agno 2.3.1__py3-none-any.whl → 2.3.3__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- agno/agent/agent.py +514 -186
- agno/compression/__init__.py +3 -0
- agno/compression/manager.py +176 -0
- agno/db/dynamo/dynamo.py +11 -0
- agno/db/firestore/firestore.py +5 -1
- agno/db/gcs_json/gcs_json_db.py +5 -2
- agno/db/in_memory/in_memory_db.py +5 -2
- agno/db/json/json_db.py +5 -1
- agno/db/migrations/manager.py +4 -4
- agno/db/mongo/async_mongo.py +158 -34
- agno/db/mongo/mongo.py +6 -2
- agno/db/mysql/mysql.py +48 -54
- agno/db/postgres/async_postgres.py +61 -51
- agno/db/postgres/postgres.py +42 -50
- agno/db/redis/redis.py +5 -0
- agno/db/redis/utils.py +5 -5
- agno/db/schemas/memory.py +7 -5
- agno/db/singlestore/singlestore.py +99 -108
- agno/db/sqlite/async_sqlite.py +32 -30
- agno/db/sqlite/sqlite.py +34 -30
- agno/knowledge/reader/pdf_reader.py +2 -2
- agno/knowledge/reader/tavily_reader.py +0 -1
- agno/memory/__init__.py +14 -1
- agno/memory/manager.py +223 -8
- agno/memory/strategies/__init__.py +15 -0
- agno/memory/strategies/base.py +67 -0
- agno/memory/strategies/summarize.py +196 -0
- agno/memory/strategies/types.py +37 -0
- agno/models/anthropic/claude.py +84 -80
- agno/models/aws/bedrock.py +38 -16
- agno/models/aws/claude.py +97 -277
- agno/models/azure/ai_foundry.py +8 -4
- agno/models/base.py +101 -14
- agno/models/cerebras/cerebras.py +18 -7
- agno/models/cerebras/cerebras_openai.py +4 -2
- agno/models/cohere/chat.py +8 -4
- agno/models/google/gemini.py +578 -20
- agno/models/groq/groq.py +18 -5
- agno/models/huggingface/huggingface.py +17 -6
- agno/models/ibm/watsonx.py +16 -6
- agno/models/litellm/chat.py +17 -7
- agno/models/message.py +19 -5
- agno/models/meta/llama.py +20 -4
- agno/models/mistral/mistral.py +8 -4
- agno/models/ollama/chat.py +17 -6
- agno/models/openai/chat.py +17 -6
- agno/models/openai/responses.py +23 -9
- agno/models/vertexai/claude.py +99 -5
- agno/os/interfaces/agui/router.py +1 -0
- agno/os/interfaces/agui/utils.py +97 -57
- agno/os/router.py +16 -1
- agno/os/routers/memory/memory.py +146 -0
- agno/os/routers/memory/schemas.py +26 -0
- agno/os/schema.py +21 -6
- agno/os/utils.py +134 -10
- agno/run/base.py +2 -1
- agno/run/workflow.py +1 -1
- agno/team/team.py +571 -225
- agno/tools/mcp/mcp.py +1 -1
- agno/utils/agent.py +119 -1
- agno/utils/dttm.py +33 -0
- agno/utils/models/ai_foundry.py +9 -2
- agno/utils/models/claude.py +12 -5
- agno/utils/models/cohere.py +9 -2
- agno/utils/models/llama.py +9 -2
- agno/utils/models/mistral.py +4 -2
- agno/utils/print_response/agent.py +37 -2
- agno/utils/print_response/team.py +52 -0
- agno/utils/tokens.py +41 -0
- agno/workflow/types.py +2 -2
- {agno-2.3.1.dist-info → agno-2.3.3.dist-info}/METADATA +45 -40
- {agno-2.3.1.dist-info → agno-2.3.3.dist-info}/RECORD +75 -68
- {agno-2.3.1.dist-info → agno-2.3.3.dist-info}/WHEEL +0 -0
- {agno-2.3.1.dist-info → agno-2.3.3.dist-info}/licenses/LICENSE +0 -0
- {agno-2.3.1.dist-info → agno-2.3.3.dist-info}/top_level.txt +0 -0
agno/models/google/gemini.py
CHANGED
|
@@ -1,3 +1,4 @@
|
|
|
1
|
+
import asyncio
|
|
1
2
|
import base64
|
|
2
3
|
import json
|
|
3
4
|
import time
|
|
@@ -27,12 +28,14 @@ try:
|
|
|
27
28
|
from google.genai.types import (
|
|
28
29
|
Content,
|
|
29
30
|
DynamicRetrievalConfig,
|
|
31
|
+
FileSearch,
|
|
30
32
|
FunctionCallingConfigMode,
|
|
31
33
|
GenerateContentConfig,
|
|
32
34
|
GenerateContentResponse,
|
|
33
35
|
GenerateContentResponseUsageMetadata,
|
|
34
36
|
GoogleSearch,
|
|
35
37
|
GoogleSearchRetrieval,
|
|
38
|
+
Operation,
|
|
36
39
|
Part,
|
|
37
40
|
Retrieval,
|
|
38
41
|
ThinkingConfig,
|
|
@@ -44,7 +47,9 @@ try:
|
|
|
44
47
|
File as GeminiFile,
|
|
45
48
|
)
|
|
46
49
|
except ImportError:
|
|
47
|
-
raise ImportError(
|
|
50
|
+
raise ImportError(
|
|
51
|
+
"`google-genai` not installed or not at the latest version. Please install it using `pip install -U google-genai`"
|
|
52
|
+
)
|
|
48
53
|
|
|
49
54
|
|
|
50
55
|
@dataclass
|
|
@@ -79,6 +84,10 @@ class Gemini(Model):
|
|
|
79
84
|
vertexai_search: bool = False
|
|
80
85
|
vertexai_search_datastore: Optional[str] = None
|
|
81
86
|
|
|
87
|
+
# Gemini File Search capabilities
|
|
88
|
+
file_search_store_names: Optional[List[str]] = None
|
|
89
|
+
file_search_metadata_filter: Optional[str] = None
|
|
90
|
+
|
|
82
91
|
temperature: Optional[float] = None
|
|
83
92
|
top_p: Optional[float] = None
|
|
84
93
|
top_k: Optional[int] = None
|
|
@@ -93,6 +102,7 @@ class Gemini(Model):
|
|
|
93
102
|
cached_content: Optional[Any] = None
|
|
94
103
|
thinking_budget: Optional[int] = None # Thinking budget for Gemini 2.5 models
|
|
95
104
|
include_thoughts: Optional[bool] = None # Include thought summaries in response
|
|
105
|
+
thinking_level: Optional[str] = None # "low", "high"
|
|
96
106
|
request_params: Optional[Dict[str, Any]] = None
|
|
97
107
|
|
|
98
108
|
# Client parameters
|
|
@@ -147,6 +157,21 @@ class Gemini(Model):
|
|
|
147
157
|
self.client = genai.Client(**client_params)
|
|
148
158
|
return self.client
|
|
149
159
|
|
|
160
|
+
def _append_file_search_tool(self, builtin_tools: List[Tool]) -> None:
|
|
161
|
+
"""Append Gemini File Search tool to builtin_tools if file search is enabled.
|
|
162
|
+
|
|
163
|
+
Args:
|
|
164
|
+
builtin_tools: List of built-in tools to append to.
|
|
165
|
+
"""
|
|
166
|
+
if not self.file_search_store_names:
|
|
167
|
+
return
|
|
168
|
+
|
|
169
|
+
log_debug("Gemini File Search enabled.")
|
|
170
|
+
file_search_config: Dict[str, Any] = {"file_search_store_names": self.file_search_store_names}
|
|
171
|
+
if self.file_search_metadata_filter:
|
|
172
|
+
file_search_config["metadata_filter"] = self.file_search_metadata_filter
|
|
173
|
+
builtin_tools.append(Tool(file_search=FileSearch(**file_search_config))) # type: ignore[arg-type]
|
|
174
|
+
|
|
150
175
|
def get_request_params(
|
|
151
176
|
self,
|
|
152
177
|
system_message: Optional[str] = None,
|
|
@@ -198,11 +223,13 @@ class Gemini(Model):
|
|
|
198
223
|
config["response_schema"] = prepare_response_schema(response_format)
|
|
199
224
|
|
|
200
225
|
# Add thinking configuration
|
|
201
|
-
thinking_config_params = {}
|
|
226
|
+
thinking_config_params: Dict[str, Any] = {}
|
|
202
227
|
if self.thinking_budget is not None:
|
|
203
228
|
thinking_config_params["thinking_budget"] = self.thinking_budget
|
|
204
229
|
if self.include_thoughts is not None:
|
|
205
230
|
thinking_config_params["include_thoughts"] = self.include_thoughts
|
|
231
|
+
if self.thinking_level is not None:
|
|
232
|
+
thinking_config_params["thinking_level"] = self.thinking_level
|
|
206
233
|
if thinking_config_params:
|
|
207
234
|
config["thinking_config"] = ThinkingConfig(**thinking_config_params)
|
|
208
235
|
|
|
@@ -240,6 +267,8 @@ class Gemini(Model):
|
|
|
240
267
|
Tool(retrieval=Retrieval(vertex_ai_search=VertexAISearch(datastore=self.vertexai_search_datastore)))
|
|
241
268
|
)
|
|
242
269
|
|
|
270
|
+
self._append_file_search_tool(builtin_tools)
|
|
271
|
+
|
|
243
272
|
# Set tools in config
|
|
244
273
|
if builtin_tools:
|
|
245
274
|
if tools:
|
|
@@ -281,11 +310,12 @@ class Gemini(Model):
|
|
|
281
310
|
tools: Optional[List[Dict[str, Any]]] = None,
|
|
282
311
|
tool_choice: Optional[Union[str, Dict[str, Any]]] = None,
|
|
283
312
|
run_response: Optional[RunOutput] = None,
|
|
313
|
+
compress_tool_results: bool = False,
|
|
284
314
|
) -> ModelResponse:
|
|
285
315
|
"""
|
|
286
316
|
Invokes the model with a list of messages and returns the response.
|
|
287
317
|
"""
|
|
288
|
-
formatted_messages, system_message = self._format_messages(messages)
|
|
318
|
+
formatted_messages, system_message = self._format_messages(messages, compress_tool_results)
|
|
289
319
|
request_kwargs = self.get_request_params(
|
|
290
320
|
system_message, response_format=response_format, tools=tools, tool_choice=tool_choice
|
|
291
321
|
)
|
|
@@ -326,11 +356,12 @@ class Gemini(Model):
|
|
|
326
356
|
tools: Optional[List[Dict[str, Any]]] = None,
|
|
327
357
|
tool_choice: Optional[Union[str, Dict[str, Any]]] = None,
|
|
328
358
|
run_response: Optional[RunOutput] = None,
|
|
359
|
+
compress_tool_results: bool = False,
|
|
329
360
|
) -> Iterator[ModelResponse]:
|
|
330
361
|
"""
|
|
331
362
|
Invokes the model with a list of messages and returns the response as a stream.
|
|
332
363
|
"""
|
|
333
|
-
formatted_messages, system_message = self._format_messages(messages)
|
|
364
|
+
formatted_messages, system_message = self._format_messages(messages, compress_tool_results)
|
|
334
365
|
|
|
335
366
|
request_kwargs = self.get_request_params(
|
|
336
367
|
system_message, response_format=response_format, tools=tools, tool_choice=tool_choice
|
|
@@ -369,11 +400,12 @@ class Gemini(Model):
|
|
|
369
400
|
tools: Optional[List[Dict[str, Any]]] = None,
|
|
370
401
|
tool_choice: Optional[Union[str, Dict[str, Any]]] = None,
|
|
371
402
|
run_response: Optional[RunOutput] = None,
|
|
403
|
+
compress_tool_results: bool = False,
|
|
372
404
|
) -> ModelResponse:
|
|
373
405
|
"""
|
|
374
406
|
Invokes the model with a list of messages and returns the response.
|
|
375
407
|
"""
|
|
376
|
-
formatted_messages, system_message = self._format_messages(messages)
|
|
408
|
+
formatted_messages, system_message = self._format_messages(messages, compress_tool_results)
|
|
377
409
|
|
|
378
410
|
request_kwargs = self.get_request_params(
|
|
379
411
|
system_message, response_format=response_format, tools=tools, tool_choice=tool_choice
|
|
@@ -415,11 +447,12 @@ class Gemini(Model):
|
|
|
415
447
|
tools: Optional[List[Dict[str, Any]]] = None,
|
|
416
448
|
tool_choice: Optional[Union[str, Dict[str, Any]]] = None,
|
|
417
449
|
run_response: Optional[RunOutput] = None,
|
|
450
|
+
compress_tool_results: bool = False,
|
|
418
451
|
) -> AsyncIterator[ModelResponse]:
|
|
419
452
|
"""
|
|
420
453
|
Invokes the model with a list of messages and returns the response as a stream.
|
|
421
454
|
"""
|
|
422
|
-
formatted_messages, system_message = self._format_messages(messages)
|
|
455
|
+
formatted_messages, system_message = self._format_messages(messages, compress_tool_results)
|
|
423
456
|
|
|
424
457
|
request_kwargs = self.get_request_params(
|
|
425
458
|
system_message, response_format=response_format, tools=tools, tool_choice=tool_choice
|
|
@@ -453,16 +486,18 @@ class Gemini(Model):
|
|
|
453
486
|
log_error(f"Unknown error from Gemini API: {e}")
|
|
454
487
|
raise ModelProviderError(message=str(e), model_name=self.name, model_id=self.id) from e
|
|
455
488
|
|
|
456
|
-
def _format_messages(self, messages: List[Message]):
|
|
489
|
+
def _format_messages(self, messages: List[Message], compress_tool_results: bool = False):
|
|
457
490
|
"""
|
|
458
491
|
Converts a list of Message objects to the Gemini-compatible format.
|
|
459
492
|
|
|
460
493
|
Args:
|
|
461
494
|
messages (List[Message]): The list of messages to convert.
|
|
495
|
+
compress_tool_results: Whether to compress tool results.
|
|
462
496
|
"""
|
|
463
497
|
formatted_messages: List = []
|
|
464
498
|
file_content: Optional[Union[GeminiFile, Part]] = None
|
|
465
499
|
system_message = None
|
|
500
|
+
|
|
466
501
|
for message in messages:
|
|
467
502
|
role = message.role
|
|
468
503
|
if role in ["system", "developer"]:
|
|
@@ -473,7 +508,8 @@ class Gemini(Model):
|
|
|
473
508
|
role = self.reverse_role_map.get(role, role)
|
|
474
509
|
|
|
475
510
|
# Add content to the message for the model
|
|
476
|
-
content = message.
|
|
511
|
+
content = message.get_content(use_compressed_content=compress_tool_results)
|
|
512
|
+
|
|
477
513
|
# Initialize message_parts to be used for Gemini
|
|
478
514
|
message_parts: List[Any] = []
|
|
479
515
|
|
|
@@ -495,11 +531,25 @@ class Gemini(Model):
|
|
|
495
531
|
message_parts.append(part)
|
|
496
532
|
# Function call results
|
|
497
533
|
elif message.tool_calls is not None and len(message.tool_calls) > 0:
|
|
498
|
-
for tool_call in message.tool_calls:
|
|
534
|
+
for idx, tool_call in enumerate(message.tool_calls):
|
|
535
|
+
if isinstance(content, list) and idx < len(content):
|
|
536
|
+
original_from_list = content[idx]
|
|
537
|
+
|
|
538
|
+
if compress_tool_results:
|
|
539
|
+
compressed_from_tool_call = tool_call.get("content")
|
|
540
|
+
tc_content = compressed_from_tool_call if compressed_from_tool_call else original_from_list
|
|
541
|
+
else:
|
|
542
|
+
tc_content = original_from_list
|
|
543
|
+
else:
|
|
544
|
+
tc_content = message.get_content(use_compressed_content=compress_tool_results)
|
|
545
|
+
|
|
546
|
+
if tc_content is None:
|
|
547
|
+
tc_content = tool_call.get("content")
|
|
548
|
+
if tc_content is None:
|
|
549
|
+
tc_content = content
|
|
550
|
+
|
|
499
551
|
message_parts.append(
|
|
500
|
-
Part.from_function_response(
|
|
501
|
-
name=tool_call["tool_name"], response={"result": tool_call["content"]}
|
|
502
|
-
)
|
|
552
|
+
Part.from_function_response(name=tool_call["tool_name"], response={"result": tc_content})
|
|
503
553
|
)
|
|
504
554
|
# Regular text content
|
|
505
555
|
else:
|
|
@@ -767,24 +817,41 @@ class Gemini(Model):
|
|
|
767
817
|
return None
|
|
768
818
|
|
|
769
819
|
def format_function_call_results(
|
|
770
|
-
self,
|
|
820
|
+
self,
|
|
821
|
+
messages: List[Message],
|
|
822
|
+
function_call_results: List[Message],
|
|
823
|
+
compress_tool_results: bool = False,
|
|
824
|
+
**kwargs,
|
|
771
825
|
) -> None:
|
|
772
826
|
"""
|
|
773
|
-
Format function call results.
|
|
827
|
+
Format function call results for Gemini.
|
|
828
|
+
|
|
829
|
+
For combined messages:
|
|
830
|
+
- content: list of ORIGINAL content (for preservation)
|
|
831
|
+
- tool_calls[i]["content"]: compressed content if available (for API sending)
|
|
832
|
+
|
|
833
|
+
This allows the message to be saved with both original and compressed versions.
|
|
774
834
|
"""
|
|
775
|
-
|
|
835
|
+
combined_original_content: List = []
|
|
776
836
|
combined_function_result: List = []
|
|
777
837
|
message_metrics = Metrics()
|
|
838
|
+
|
|
778
839
|
if len(function_call_results) > 0:
|
|
779
|
-
for result in function_call_results:
|
|
780
|
-
|
|
781
|
-
|
|
840
|
+
for idx, result in enumerate(function_call_results):
|
|
841
|
+
combined_original_content.append(result.content)
|
|
842
|
+
compressed_content = result.get_content(use_compressed_content=compress_tool_results)
|
|
843
|
+
combined_function_result.append(
|
|
844
|
+
{"tool_call_id": result.tool_call_id, "tool_name": result.tool_name, "content": compressed_content}
|
|
845
|
+
)
|
|
782
846
|
message_metrics += result.metrics
|
|
783
847
|
|
|
784
|
-
if
|
|
848
|
+
if combined_original_content:
|
|
785
849
|
messages.append(
|
|
786
850
|
Message(
|
|
787
|
-
role="tool",
|
|
851
|
+
role="tool",
|
|
852
|
+
content=combined_original_content,
|
|
853
|
+
tool_calls=combined_function_result,
|
|
854
|
+
metrics=message_metrics,
|
|
788
855
|
)
|
|
789
856
|
)
|
|
790
857
|
|
|
@@ -1115,3 +1182,494 @@ class Gemini(Model):
|
|
|
1115
1182
|
metrics.provider_metrics = {"traffic_type": response_usage.traffic_type}
|
|
1116
1183
|
|
|
1117
1184
|
return metrics
|
|
1185
|
+
|
|
1186
|
+
def create_file_search_store(self, display_name: Optional[str] = None) -> Any:
|
|
1187
|
+
"""
|
|
1188
|
+
Create a new File Search store.
|
|
1189
|
+
|
|
1190
|
+
Args:
|
|
1191
|
+
display_name: Optional display name for the store
|
|
1192
|
+
|
|
1193
|
+
Returns:
|
|
1194
|
+
FileSearchStore: The created File Search store object
|
|
1195
|
+
"""
|
|
1196
|
+
config: Dict[str, Any] = {}
|
|
1197
|
+
if display_name:
|
|
1198
|
+
config["display_name"] = display_name
|
|
1199
|
+
|
|
1200
|
+
try:
|
|
1201
|
+
store = self.get_client().file_search_stores.create(config=config or None) # type: ignore[arg-type]
|
|
1202
|
+
log_info(f"Created File Search store: {store.name}")
|
|
1203
|
+
return store
|
|
1204
|
+
except Exception as e:
|
|
1205
|
+
log_error(f"Error creating File Search store: {e}")
|
|
1206
|
+
raise
|
|
1207
|
+
|
|
1208
|
+
async def async_create_file_search_store(self, display_name: Optional[str] = None) -> Any:
|
|
1209
|
+
"""
|
|
1210
|
+
Args:
|
|
1211
|
+
display_name: Optional display name for the store
|
|
1212
|
+
|
|
1213
|
+
Returns:
|
|
1214
|
+
FileSearchStore: The created File Search store object
|
|
1215
|
+
"""
|
|
1216
|
+
config: Dict[str, Any] = {}
|
|
1217
|
+
if display_name:
|
|
1218
|
+
config["display_name"] = display_name
|
|
1219
|
+
|
|
1220
|
+
try:
|
|
1221
|
+
store = await self.get_client().aio.file_search_stores.create(config=config or None) # type: ignore[arg-type]
|
|
1222
|
+
log_info(f"Created File Search store: {store.name}")
|
|
1223
|
+
return store
|
|
1224
|
+
except Exception as e:
|
|
1225
|
+
log_error(f"Error creating File Search store: {e}")
|
|
1226
|
+
raise
|
|
1227
|
+
|
|
1228
|
+
def list_file_search_stores(self, page_size: int = 100) -> List[Any]:
|
|
1229
|
+
"""
|
|
1230
|
+
List all File Search stores.
|
|
1231
|
+
|
|
1232
|
+
Args:
|
|
1233
|
+
page_size: Maximum number of stores to return per page
|
|
1234
|
+
|
|
1235
|
+
Returns:
|
|
1236
|
+
List: List of FileSearchStore objects
|
|
1237
|
+
"""
|
|
1238
|
+
try:
|
|
1239
|
+
stores = []
|
|
1240
|
+
for store in self.get_client().file_search_stores.list(config={"page_size": page_size}):
|
|
1241
|
+
stores.append(store)
|
|
1242
|
+
log_debug(f"Found {len(stores)} File Search stores")
|
|
1243
|
+
return stores
|
|
1244
|
+
except Exception as e:
|
|
1245
|
+
log_error(f"Error listing File Search stores: {e}")
|
|
1246
|
+
raise
|
|
1247
|
+
|
|
1248
|
+
async def async_list_file_search_stores(self, page_size: int = 100) -> List[Any]:
|
|
1249
|
+
"""
|
|
1250
|
+
Async version of list_file_search_stores.
|
|
1251
|
+
|
|
1252
|
+
Args:
|
|
1253
|
+
page_size: Maximum number of stores to return per page
|
|
1254
|
+
|
|
1255
|
+
Returns:
|
|
1256
|
+
List: List of FileSearchStore objects
|
|
1257
|
+
"""
|
|
1258
|
+
try:
|
|
1259
|
+
stores = []
|
|
1260
|
+
async for store in await self.get_client().aio.file_search_stores.list(config={"page_size": page_size}):
|
|
1261
|
+
stores.append(store)
|
|
1262
|
+
log_debug(f"Found {len(stores)} File Search stores")
|
|
1263
|
+
return stores
|
|
1264
|
+
except Exception as e:
|
|
1265
|
+
log_error(f"Error listing File Search stores: {e}")
|
|
1266
|
+
raise
|
|
1267
|
+
|
|
1268
|
+
def get_file_search_store(self, name: str) -> Any:
|
|
1269
|
+
"""
|
|
1270
|
+
Get a specific File Search store by name.
|
|
1271
|
+
|
|
1272
|
+
Args:
|
|
1273
|
+
name: The name of the store (e.g., 'fileSearchStores/my-store-123')
|
|
1274
|
+
|
|
1275
|
+
Returns:
|
|
1276
|
+
FileSearchStore: The File Search store object
|
|
1277
|
+
"""
|
|
1278
|
+
try:
|
|
1279
|
+
store = self.get_client().file_search_stores.get(name=name)
|
|
1280
|
+
log_debug(f"Retrieved File Search store: {name}")
|
|
1281
|
+
return store
|
|
1282
|
+
except Exception as e:
|
|
1283
|
+
log_error(f"Error getting File Search store {name}: {e}")
|
|
1284
|
+
raise
|
|
1285
|
+
|
|
1286
|
+
async def async_get_file_search_store(self, name: str) -> Any:
|
|
1287
|
+
"""
|
|
1288
|
+
Args:
|
|
1289
|
+
name: The name of the store
|
|
1290
|
+
|
|
1291
|
+
Returns:
|
|
1292
|
+
FileSearchStore: The File Search store object
|
|
1293
|
+
"""
|
|
1294
|
+
try:
|
|
1295
|
+
store = await self.get_client().aio.file_search_stores.get(name=name)
|
|
1296
|
+
log_debug(f"Retrieved File Search store: {name}")
|
|
1297
|
+
return store
|
|
1298
|
+
except Exception as e:
|
|
1299
|
+
log_error(f"Error getting File Search store {name}: {e}")
|
|
1300
|
+
raise
|
|
1301
|
+
|
|
1302
|
+
def delete_file_search_store(self, name: str, force: bool = False) -> None:
|
|
1303
|
+
"""
|
|
1304
|
+
Delete a File Search store.
|
|
1305
|
+
|
|
1306
|
+
Args:
|
|
1307
|
+
name: The name of the store to delete
|
|
1308
|
+
force: If True, force delete even if store contains documents
|
|
1309
|
+
"""
|
|
1310
|
+
try:
|
|
1311
|
+
self.get_client().file_search_stores.delete(name=name, config={"force": force})
|
|
1312
|
+
log_info(f"Deleted File Search store: {name}")
|
|
1313
|
+
except Exception as e:
|
|
1314
|
+
log_error(f"Error deleting File Search store {name}: {e}")
|
|
1315
|
+
raise
|
|
1316
|
+
|
|
1317
|
+
async def async_delete_file_search_store(self, name: str, force: bool = True) -> None:
|
|
1318
|
+
"""
|
|
1319
|
+
Async version of delete_file_search_store.
|
|
1320
|
+
|
|
1321
|
+
Args:
|
|
1322
|
+
name: The name of the store to delete
|
|
1323
|
+
force: If True, force delete even if store contains documents
|
|
1324
|
+
"""
|
|
1325
|
+
try:
|
|
1326
|
+
await self.get_client().aio.file_search_stores.delete(name=name, config={"force": force})
|
|
1327
|
+
log_info(f"Deleted File Search store: {name}")
|
|
1328
|
+
except Exception as e:
|
|
1329
|
+
log_error(f"Error deleting File Search store {name}: {e}")
|
|
1330
|
+
raise
|
|
1331
|
+
|
|
1332
|
+
def wait_for_operation(self, operation: Operation, poll_interval: int = 5, max_wait: int = 600) -> Operation:
|
|
1333
|
+
"""
|
|
1334
|
+
Wait for a long-running operation to complete.
|
|
1335
|
+
|
|
1336
|
+
Args:
|
|
1337
|
+
operation: The operation object to wait for
|
|
1338
|
+
poll_interval: Seconds to wait between status checks
|
|
1339
|
+
max_wait: Maximum seconds to wait before timing out
|
|
1340
|
+
|
|
1341
|
+
Returns:
|
|
1342
|
+
Operation: The completed operation object
|
|
1343
|
+
|
|
1344
|
+
Raises:
|
|
1345
|
+
TimeoutError: If operation doesn't complete within max_wait seconds
|
|
1346
|
+
"""
|
|
1347
|
+
elapsed = 0
|
|
1348
|
+
while not operation.done:
|
|
1349
|
+
if elapsed >= max_wait:
|
|
1350
|
+
raise TimeoutError(f"Operation timed out after {max_wait} seconds")
|
|
1351
|
+
time.sleep(poll_interval)
|
|
1352
|
+
elapsed += poll_interval
|
|
1353
|
+
operation = self.get_client().operations.get(operation)
|
|
1354
|
+
log_debug(f"Waiting for operation... ({elapsed}s elapsed)")
|
|
1355
|
+
|
|
1356
|
+
log_info("Operation completed successfully")
|
|
1357
|
+
return operation
|
|
1358
|
+
|
|
1359
|
+
async def async_wait_for_operation(
|
|
1360
|
+
self, operation: Operation, poll_interval: int = 5, max_wait: int = 600
|
|
1361
|
+
) -> Operation:
|
|
1362
|
+
"""
|
|
1363
|
+
Async version of wait_for_operation.
|
|
1364
|
+
|
|
1365
|
+
Args:
|
|
1366
|
+
operation: The operation object to wait for
|
|
1367
|
+
poll_interval: Seconds to wait between status checks
|
|
1368
|
+
max_wait: Maximum seconds to wait before timing out
|
|
1369
|
+
|
|
1370
|
+
Returns:
|
|
1371
|
+
Operation: The completed operation object
|
|
1372
|
+
"""
|
|
1373
|
+
elapsed = 0
|
|
1374
|
+
while not operation.done:
|
|
1375
|
+
if elapsed >= max_wait:
|
|
1376
|
+
raise TimeoutError(f"Operation timed out after {max_wait} seconds")
|
|
1377
|
+
await asyncio.sleep(poll_interval)
|
|
1378
|
+
elapsed += poll_interval
|
|
1379
|
+
operation = await self.get_client().aio.operations.get(operation)
|
|
1380
|
+
log_debug(f"Waiting for operation... ({elapsed}s elapsed)")
|
|
1381
|
+
|
|
1382
|
+
log_info("Operation completed successfully")
|
|
1383
|
+
return operation
|
|
1384
|
+
|
|
1385
|
+
def upload_to_file_search_store(
|
|
1386
|
+
self,
|
|
1387
|
+
file_path: Union[str, Path],
|
|
1388
|
+
store_name: str,
|
|
1389
|
+
display_name: Optional[str] = None,
|
|
1390
|
+
chunking_config: Optional[Dict[str, Any]] = None,
|
|
1391
|
+
custom_metadata: Optional[List[Dict[str, Any]]] = None,
|
|
1392
|
+
) -> Any:
|
|
1393
|
+
"""
|
|
1394
|
+
Upload a file directly to a File Search store.
|
|
1395
|
+
|
|
1396
|
+
Args:
|
|
1397
|
+
file_path: Path to the file to upload
|
|
1398
|
+
store_name: Name of the File Search store
|
|
1399
|
+
display_name: Optional display name for the file (will be visible in citations)
|
|
1400
|
+
chunking_config: Optional chunking configuration
|
|
1401
|
+
Example: {
|
|
1402
|
+
"white_space_config": {
|
|
1403
|
+
"max_tokens_per_chunk": 200,
|
|
1404
|
+
"max_overlap_tokens": 20
|
|
1405
|
+
}
|
|
1406
|
+
}
|
|
1407
|
+
custom_metadata: Optional custom metadata as list of dicts
|
|
1408
|
+
Example: [
|
|
1409
|
+
{"key": "author", "string_value": "John Doe"},
|
|
1410
|
+
{"key": "year", "numeric_value": 2024}
|
|
1411
|
+
]
|
|
1412
|
+
|
|
1413
|
+
Returns:
|
|
1414
|
+
Operation: Long-running operation object. Use wait_for_operation() to wait for completion.
|
|
1415
|
+
"""
|
|
1416
|
+
file_path = file_path if isinstance(file_path, Path) else Path(file_path)
|
|
1417
|
+
|
|
1418
|
+
if not file_path.exists():
|
|
1419
|
+
raise FileNotFoundError(f"File not found: {file_path}")
|
|
1420
|
+
|
|
1421
|
+
config: Dict[str, Any] = {}
|
|
1422
|
+
if display_name:
|
|
1423
|
+
config["display_name"] = display_name
|
|
1424
|
+
if chunking_config:
|
|
1425
|
+
config["chunking_config"] = chunking_config
|
|
1426
|
+
if custom_metadata:
|
|
1427
|
+
config["custom_metadata"] = custom_metadata
|
|
1428
|
+
|
|
1429
|
+
try:
|
|
1430
|
+
log_info(f"Uploading file {file_path.name} to File Search store {store_name}")
|
|
1431
|
+
operation = self.get_client().file_search_stores.upload_to_file_search_store(
|
|
1432
|
+
file=file_path,
|
|
1433
|
+
file_search_store_name=store_name,
|
|
1434
|
+
config=config or None, # type: ignore[arg-type]
|
|
1435
|
+
)
|
|
1436
|
+
log_info(f"Upload initiated for {file_path.name}")
|
|
1437
|
+
return operation
|
|
1438
|
+
except Exception as e:
|
|
1439
|
+
log_error(f"Error uploading file to File Search store: {e}")
|
|
1440
|
+
raise
|
|
1441
|
+
|
|
1442
|
+
async def async_upload_to_file_search_store(
|
|
1443
|
+
self,
|
|
1444
|
+
file_path: Union[str, Path],
|
|
1445
|
+
store_name: str,
|
|
1446
|
+
display_name: Optional[str] = None,
|
|
1447
|
+
chunking_config: Optional[Dict[str, Any]] = None,
|
|
1448
|
+
custom_metadata: Optional[List[Dict[str, Any]]] = None,
|
|
1449
|
+
) -> Any:
|
|
1450
|
+
"""
|
|
1451
|
+
Args:
|
|
1452
|
+
file_path: Path to the file to upload
|
|
1453
|
+
store_name: Name of the File Search store
|
|
1454
|
+
display_name: Optional display name for the file
|
|
1455
|
+
chunking_config: Optional chunking configuration
|
|
1456
|
+
custom_metadata: Optional custom metadata
|
|
1457
|
+
|
|
1458
|
+
Returns:
|
|
1459
|
+
Operation: Long-running operation object
|
|
1460
|
+
"""
|
|
1461
|
+
file_path = file_path if isinstance(file_path, Path) else Path(file_path)
|
|
1462
|
+
|
|
1463
|
+
if not file_path.exists():
|
|
1464
|
+
raise FileNotFoundError(f"File not found: {file_path}")
|
|
1465
|
+
|
|
1466
|
+
config: Dict[str, Any] = {}
|
|
1467
|
+
if display_name:
|
|
1468
|
+
config["display_name"] = display_name
|
|
1469
|
+
if chunking_config:
|
|
1470
|
+
config["chunking_config"] = chunking_config
|
|
1471
|
+
if custom_metadata:
|
|
1472
|
+
config["custom_metadata"] = custom_metadata
|
|
1473
|
+
|
|
1474
|
+
try:
|
|
1475
|
+
log_info(f"Uploading file {file_path.name} to File Search store {store_name}")
|
|
1476
|
+
operation = await self.get_client().aio.file_search_stores.upload_to_file_search_store(
|
|
1477
|
+
file=file_path,
|
|
1478
|
+
file_search_store_name=store_name,
|
|
1479
|
+
config=config or None, # type: ignore[arg-type]
|
|
1480
|
+
)
|
|
1481
|
+
log_info(f"Upload initiated for {file_path.name}")
|
|
1482
|
+
return operation
|
|
1483
|
+
except Exception as e:
|
|
1484
|
+
log_error(f"Error uploading file to File Search store: {e}")
|
|
1485
|
+
raise
|
|
1486
|
+
|
|
1487
|
+
def import_file_to_store(
|
|
1488
|
+
self,
|
|
1489
|
+
file_name: str,
|
|
1490
|
+
store_name: str,
|
|
1491
|
+
chunking_config: Optional[Dict[str, Any]] = None,
|
|
1492
|
+
custom_metadata: Optional[List[Dict[str, Any]]] = None,
|
|
1493
|
+
) -> Any:
|
|
1494
|
+
"""
|
|
1495
|
+
Import an existing uploaded file (via Files API) into a File Search store.
|
|
1496
|
+
|
|
1497
|
+
Args:
|
|
1498
|
+
file_name: Name of the file already uploaded via Files API
|
|
1499
|
+
store_name: Name of the File Search store
|
|
1500
|
+
chunking_config: Optional chunking configuration
|
|
1501
|
+
custom_metadata: Optional custom metadata
|
|
1502
|
+
|
|
1503
|
+
Returns:
|
|
1504
|
+
Operation: Long-running operation object. Use wait_for_operation() to wait for completion.
|
|
1505
|
+
"""
|
|
1506
|
+
config: Dict[str, Any] = {}
|
|
1507
|
+
if chunking_config:
|
|
1508
|
+
config["chunking_config"] = chunking_config
|
|
1509
|
+
if custom_metadata:
|
|
1510
|
+
config["custom_metadata"] = custom_metadata
|
|
1511
|
+
|
|
1512
|
+
try:
|
|
1513
|
+
log_info(f"Importing file {file_name} to File Search store {store_name}")
|
|
1514
|
+
operation = self.get_client().file_search_stores.import_file(
|
|
1515
|
+
file_search_store_name=store_name,
|
|
1516
|
+
file_name=file_name,
|
|
1517
|
+
config=config or None, # type: ignore[arg-type]
|
|
1518
|
+
)
|
|
1519
|
+
log_info(f"Import initiated for {file_name}")
|
|
1520
|
+
return operation
|
|
1521
|
+
except Exception as e:
|
|
1522
|
+
log_error(f"Error importing file to File Search store: {e}")
|
|
1523
|
+
raise
|
|
1524
|
+
|
|
1525
|
+
async def async_import_file_to_store(
|
|
1526
|
+
self,
|
|
1527
|
+
file_name: str,
|
|
1528
|
+
store_name: str,
|
|
1529
|
+
chunking_config: Optional[Dict[str, Any]] = None,
|
|
1530
|
+
custom_metadata: Optional[List[Dict[str, Any]]] = None,
|
|
1531
|
+
) -> Any:
|
|
1532
|
+
"""
|
|
1533
|
+
Args:
|
|
1534
|
+
file_name: Name of the file already uploaded via Files API
|
|
1535
|
+
store_name: Name of the File Search store
|
|
1536
|
+
chunking_config: Optional chunking configuration
|
|
1537
|
+
custom_metadata: Optional custom metadata
|
|
1538
|
+
|
|
1539
|
+
Returns:
|
|
1540
|
+
Operation: Long-running operation object
|
|
1541
|
+
"""
|
|
1542
|
+
config: Dict[str, Any] = {}
|
|
1543
|
+
if chunking_config:
|
|
1544
|
+
config["chunking_config"] = chunking_config
|
|
1545
|
+
if custom_metadata:
|
|
1546
|
+
config["custom_metadata"] = custom_metadata
|
|
1547
|
+
|
|
1548
|
+
try:
|
|
1549
|
+
log_info(f"Importing file {file_name} to File Search store {store_name}")
|
|
1550
|
+
operation = await self.get_client().aio.file_search_stores.import_file(
|
|
1551
|
+
file_search_store_name=store_name,
|
|
1552
|
+
file_name=file_name,
|
|
1553
|
+
config=config or None, # type: ignore[arg-type]
|
|
1554
|
+
)
|
|
1555
|
+
log_info(f"Import initiated for {file_name}")
|
|
1556
|
+
return operation
|
|
1557
|
+
except Exception as e:
|
|
1558
|
+
log_error(f"Error importing file to File Search store: {e}")
|
|
1559
|
+
raise
|
|
1560
|
+
|
|
1561
|
+
def list_documents(self, store_name: str, page_size: int = 20) -> List[Any]:
|
|
1562
|
+
"""
|
|
1563
|
+
Args:
|
|
1564
|
+
store_name: Name of the File Search store
|
|
1565
|
+
page_size: Maximum number of documents to return per page
|
|
1566
|
+
|
|
1567
|
+
Returns:
|
|
1568
|
+
List: List of document objects
|
|
1569
|
+
"""
|
|
1570
|
+
try:
|
|
1571
|
+
documents = []
|
|
1572
|
+
for doc in self.get_client().file_search_stores.documents.list(
|
|
1573
|
+
parent=store_name, config={"page_size": page_size}
|
|
1574
|
+
):
|
|
1575
|
+
documents.append(doc)
|
|
1576
|
+
log_debug(f"Found {len(documents)} documents in store {store_name}")
|
|
1577
|
+
return documents
|
|
1578
|
+
except Exception as e:
|
|
1579
|
+
log_error(f"Error listing documents in store {store_name}: {e}")
|
|
1580
|
+
raise
|
|
1581
|
+
|
|
1582
|
+
async def async_list_documents(self, store_name: str, page_size: int = 20) -> List[Any]:
|
|
1583
|
+
"""
|
|
1584
|
+
Async version of list_documents.
|
|
1585
|
+
|
|
1586
|
+
Args:
|
|
1587
|
+
store_name: Name of the File Search store
|
|
1588
|
+
page_size: Maximum number of documents to return per page
|
|
1589
|
+
|
|
1590
|
+
Returns:
|
|
1591
|
+
List: List of document objects
|
|
1592
|
+
"""
|
|
1593
|
+
try:
|
|
1594
|
+
documents = []
|
|
1595
|
+
# Await the AsyncPager first, then iterate
|
|
1596
|
+
async for doc in await self.get_client().aio.file_search_stores.documents.list(
|
|
1597
|
+
parent=store_name, config={"page_size": page_size}
|
|
1598
|
+
):
|
|
1599
|
+
documents.append(doc)
|
|
1600
|
+
log_debug(f"Found {len(documents)} documents in store {store_name}")
|
|
1601
|
+
return documents
|
|
1602
|
+
except Exception as e:
|
|
1603
|
+
log_error(f"Error listing documents in store {store_name}: {e}")
|
|
1604
|
+
raise
|
|
1605
|
+
|
|
1606
|
+
def get_document(self, document_name: str) -> Any:
|
|
1607
|
+
"""
|
|
1608
|
+
Get a specific document by name.
|
|
1609
|
+
|
|
1610
|
+
Args:
|
|
1611
|
+
document_name: Full name of the document
|
|
1612
|
+
(e.g., 'fileSearchStores/store-123/documents/doc-456')
|
|
1613
|
+
|
|
1614
|
+
Returns:
|
|
1615
|
+
Document object
|
|
1616
|
+
"""
|
|
1617
|
+
try:
|
|
1618
|
+
doc = self.get_client().file_search_stores.documents.get(name=document_name)
|
|
1619
|
+
log_debug(f"Retrieved document: {document_name}")
|
|
1620
|
+
return doc
|
|
1621
|
+
except Exception as e:
|
|
1622
|
+
log_error(f"Error getting document {document_name}: {e}")
|
|
1623
|
+
raise
|
|
1624
|
+
|
|
1625
|
+
async def async_get_document(self, document_name: str) -> Any:
|
|
1626
|
+
"""
|
|
1627
|
+
Async version of get_document.
|
|
1628
|
+
|
|
1629
|
+
Args:
|
|
1630
|
+
document_name: Full name of the document
|
|
1631
|
+
|
|
1632
|
+
Returns:
|
|
1633
|
+
Document object
|
|
1634
|
+
"""
|
|
1635
|
+
try:
|
|
1636
|
+
doc = await self.get_client().aio.file_search_stores.documents.get(name=document_name)
|
|
1637
|
+
log_debug(f"Retrieved document: {document_name}")
|
|
1638
|
+
return doc
|
|
1639
|
+
except Exception as e:
|
|
1640
|
+
log_error(f"Error getting document {document_name}: {e}")
|
|
1641
|
+
raise
|
|
1642
|
+
|
|
1643
|
+
def delete_document(self, document_name: str) -> None:
|
|
1644
|
+
"""
|
|
1645
|
+
Delete a document from a File Search store.
|
|
1646
|
+
|
|
1647
|
+
Args:
|
|
1648
|
+
document_name: Full name of the document to delete
|
|
1649
|
+
|
|
1650
|
+
Example:
|
|
1651
|
+
```python
|
|
1652
|
+
model = Gemini(id="gemini-2.5-flash")
|
|
1653
|
+
model.delete_document("fileSearchStores/store-123/documents/doc-456")
|
|
1654
|
+
```
|
|
1655
|
+
"""
|
|
1656
|
+
try:
|
|
1657
|
+
self.get_client().file_search_stores.documents.delete(name=document_name)
|
|
1658
|
+
log_info(f"Deleted document: {document_name}")
|
|
1659
|
+
except Exception as e:
|
|
1660
|
+
log_error(f"Error deleting document {document_name}: {e}")
|
|
1661
|
+
raise
|
|
1662
|
+
|
|
1663
|
+
async def async_delete_document(self, document_name: str) -> None:
|
|
1664
|
+
"""
|
|
1665
|
+
Async version of delete_document.
|
|
1666
|
+
|
|
1667
|
+
Args:
|
|
1668
|
+
document_name: Full name of the document to delete
|
|
1669
|
+
"""
|
|
1670
|
+
try:
|
|
1671
|
+
await self.get_client().aio.file_search_stores.documents.delete(name=document_name)
|
|
1672
|
+
log_info(f"Deleted document: {document_name}")
|
|
1673
|
+
except Exception as e:
|
|
1674
|
+
log_error(f"Error deleting document {document_name}: {e}")
|
|
1675
|
+
raise
|