agno 2.3.2__py3-none-any.whl → 2.3.4__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (90) hide show
  1. agno/agent/agent.py +513 -185
  2. agno/compression/__init__.py +3 -0
  3. agno/compression/manager.py +176 -0
  4. agno/db/dynamo/dynamo.py +11 -0
  5. agno/db/firestore/firestore.py +5 -1
  6. agno/db/gcs_json/gcs_json_db.py +5 -2
  7. agno/db/in_memory/in_memory_db.py +5 -2
  8. agno/db/json/json_db.py +5 -1
  9. agno/db/migrations/manager.py +4 -4
  10. agno/db/mongo/async_mongo.py +158 -34
  11. agno/db/mongo/mongo.py +6 -2
  12. agno/db/mysql/mysql.py +48 -54
  13. agno/db/postgres/async_postgres.py +66 -52
  14. agno/db/postgres/postgres.py +42 -50
  15. agno/db/redis/redis.py +5 -0
  16. agno/db/redis/utils.py +5 -5
  17. agno/db/singlestore/singlestore.py +99 -108
  18. agno/db/sqlite/async_sqlite.py +29 -27
  19. agno/db/sqlite/sqlite.py +30 -26
  20. agno/knowledge/reader/pdf_reader.py +2 -2
  21. agno/knowledge/reader/tavily_reader.py +0 -1
  22. agno/memory/__init__.py +14 -1
  23. agno/memory/manager.py +217 -4
  24. agno/memory/strategies/__init__.py +15 -0
  25. agno/memory/strategies/base.py +67 -0
  26. agno/memory/strategies/summarize.py +196 -0
  27. agno/memory/strategies/types.py +37 -0
  28. agno/models/aimlapi/aimlapi.py +18 -0
  29. agno/models/anthropic/claude.py +87 -81
  30. agno/models/aws/bedrock.py +38 -16
  31. agno/models/aws/claude.py +97 -277
  32. agno/models/azure/ai_foundry.py +8 -4
  33. agno/models/base.py +101 -14
  34. agno/models/cerebras/cerebras.py +25 -9
  35. agno/models/cerebras/cerebras_openai.py +22 -2
  36. agno/models/cohere/chat.py +18 -6
  37. agno/models/cometapi/cometapi.py +19 -1
  38. agno/models/deepinfra/deepinfra.py +19 -1
  39. agno/models/fireworks/fireworks.py +19 -1
  40. agno/models/google/gemini.py +583 -21
  41. agno/models/groq/groq.py +23 -6
  42. agno/models/huggingface/huggingface.py +22 -7
  43. agno/models/ibm/watsonx.py +21 -7
  44. agno/models/internlm/internlm.py +19 -1
  45. agno/models/langdb/langdb.py +10 -0
  46. agno/models/litellm/chat.py +17 -7
  47. agno/models/litellm/litellm_openai.py +19 -1
  48. agno/models/message.py +19 -5
  49. agno/models/meta/llama.py +25 -5
  50. agno/models/meta/llama_openai.py +18 -0
  51. agno/models/mistral/mistral.py +13 -5
  52. agno/models/nvidia/nvidia.py +19 -1
  53. agno/models/ollama/chat.py +17 -6
  54. agno/models/openai/chat.py +22 -7
  55. agno/models/openai/responses.py +28 -10
  56. agno/models/openrouter/openrouter.py +20 -0
  57. agno/models/perplexity/perplexity.py +17 -0
  58. agno/models/requesty/requesty.py +18 -0
  59. agno/models/sambanova/sambanova.py +19 -1
  60. agno/models/siliconflow/siliconflow.py +19 -1
  61. agno/models/together/together.py +19 -1
  62. agno/models/vercel/v0.py +19 -1
  63. agno/models/vertexai/claude.py +99 -5
  64. agno/models/xai/xai.py +18 -0
  65. agno/os/interfaces/agui/router.py +1 -0
  66. agno/os/interfaces/agui/utils.py +97 -57
  67. agno/os/router.py +16 -0
  68. agno/os/routers/memory/memory.py +143 -0
  69. agno/os/routers/memory/schemas.py +26 -0
  70. agno/os/schema.py +33 -6
  71. agno/os/utils.py +134 -10
  72. agno/run/base.py +2 -1
  73. agno/run/workflow.py +1 -1
  74. agno/team/team.py +566 -219
  75. agno/tools/mcp/mcp.py +1 -1
  76. agno/utils/agent.py +119 -1
  77. agno/utils/models/ai_foundry.py +9 -2
  78. agno/utils/models/claude.py +12 -5
  79. agno/utils/models/cohere.py +9 -2
  80. agno/utils/models/llama.py +9 -2
  81. agno/utils/models/mistral.py +4 -2
  82. agno/utils/print_response/agent.py +37 -2
  83. agno/utils/print_response/team.py +52 -0
  84. agno/utils/tokens.py +41 -0
  85. agno/workflow/types.py +2 -2
  86. {agno-2.3.2.dist-info → agno-2.3.4.dist-info}/METADATA +45 -40
  87. {agno-2.3.2.dist-info → agno-2.3.4.dist-info}/RECORD +90 -83
  88. {agno-2.3.2.dist-info → agno-2.3.4.dist-info}/WHEEL +0 -0
  89. {agno-2.3.2.dist-info → agno-2.3.4.dist-info}/licenses/LICENSE +0 -0
  90. {agno-2.3.2.dist-info → agno-2.3.4.dist-info}/top_level.txt +0 -0
@@ -1,3 +1,4 @@
1
+ import asyncio
1
2
  import base64
2
3
  import json
3
4
  import time
@@ -27,12 +28,14 @@ try:
27
28
  from google.genai.types import (
28
29
  Content,
29
30
  DynamicRetrievalConfig,
31
+ FileSearch,
30
32
  FunctionCallingConfigMode,
31
33
  GenerateContentConfig,
32
34
  GenerateContentResponse,
33
35
  GenerateContentResponseUsageMetadata,
34
36
  GoogleSearch,
35
37
  GoogleSearchRetrieval,
38
+ Operation,
36
39
  Part,
37
40
  Retrieval,
38
41
  ThinkingConfig,
@@ -44,7 +47,9 @@ try:
44
47
  File as GeminiFile,
45
48
  )
46
49
  except ImportError:
47
- raise ImportError("`google-genai` not installed. Please install it using `pip install google-genai`")
50
+ raise ImportError(
51
+ "`google-genai` not installed or not at the latest version. Please install it using `pip install -U google-genai`"
52
+ )
48
53
 
49
54
 
50
55
  @dataclass
@@ -79,6 +84,10 @@ class Gemini(Model):
79
84
  vertexai_search: bool = False
80
85
  vertexai_search_datastore: Optional[str] = None
81
86
 
87
+ # Gemini File Search capabilities
88
+ file_search_store_names: Optional[List[str]] = None
89
+ file_search_metadata_filter: Optional[str] = None
90
+
82
91
  temperature: Optional[float] = None
83
92
  top_p: Optional[float] = None
84
93
  top_k: Optional[int] = None
@@ -93,6 +102,7 @@ class Gemini(Model):
93
102
  cached_content: Optional[Any] = None
94
103
  thinking_budget: Optional[int] = None # Thinking budget for Gemini 2.5 models
95
104
  include_thoughts: Optional[bool] = None # Include thought summaries in response
105
+ thinking_level: Optional[str] = None # "low", "high"
96
106
  request_params: Optional[Dict[str, Any]] = None
97
107
 
98
108
  # Client parameters
@@ -131,7 +141,11 @@ class Gemini(Model):
131
141
  if not vertexai:
132
142
  self.api_key = self.api_key or getenv("GOOGLE_API_KEY")
133
143
  if not self.api_key:
134
- log_error("GOOGLE_API_KEY not set. Please set the GOOGLE_API_KEY environment variable.")
144
+ raise ModelProviderError(
145
+ message="GOOGLE_API_KEY not set. Please set the GOOGLE_API_KEY environment variable.",
146
+ model_name=self.name,
147
+ model_id=self.id,
148
+ )
135
149
  client_params["api_key"] = self.api_key
136
150
  else:
137
151
  log_info("Using Vertex AI API")
@@ -147,6 +161,21 @@ class Gemini(Model):
147
161
  self.client = genai.Client(**client_params)
148
162
  return self.client
149
163
 
164
+ def _append_file_search_tool(self, builtin_tools: List[Tool]) -> None:
165
+ """Append Gemini File Search tool to builtin_tools if file search is enabled.
166
+
167
+ Args:
168
+ builtin_tools: List of built-in tools to append to.
169
+ """
170
+ if not self.file_search_store_names:
171
+ return
172
+
173
+ log_debug("Gemini File Search enabled.")
174
+ file_search_config: Dict[str, Any] = {"file_search_store_names": self.file_search_store_names}
175
+ if self.file_search_metadata_filter:
176
+ file_search_config["metadata_filter"] = self.file_search_metadata_filter
177
+ builtin_tools.append(Tool(file_search=FileSearch(**file_search_config))) # type: ignore[arg-type]
178
+
150
179
  def get_request_params(
151
180
  self,
152
181
  system_message: Optional[str] = None,
@@ -198,11 +227,13 @@ class Gemini(Model):
198
227
  config["response_schema"] = prepare_response_schema(response_format)
199
228
 
200
229
  # Add thinking configuration
201
- thinking_config_params = {}
230
+ thinking_config_params: Dict[str, Any] = {}
202
231
  if self.thinking_budget is not None:
203
232
  thinking_config_params["thinking_budget"] = self.thinking_budget
204
233
  if self.include_thoughts is not None:
205
234
  thinking_config_params["include_thoughts"] = self.include_thoughts
235
+ if self.thinking_level is not None:
236
+ thinking_config_params["thinking_level"] = self.thinking_level
206
237
  if thinking_config_params:
207
238
  config["thinking_config"] = ThinkingConfig(**thinking_config_params)
208
239
 
@@ -240,6 +271,8 @@ class Gemini(Model):
240
271
  Tool(retrieval=Retrieval(vertex_ai_search=VertexAISearch(datastore=self.vertexai_search_datastore)))
241
272
  )
242
273
 
274
+ self._append_file_search_tool(builtin_tools)
275
+
243
276
  # Set tools in config
244
277
  if builtin_tools:
245
278
  if tools:
@@ -281,11 +314,12 @@ class Gemini(Model):
281
314
  tools: Optional[List[Dict[str, Any]]] = None,
282
315
  tool_choice: Optional[Union[str, Dict[str, Any]]] = None,
283
316
  run_response: Optional[RunOutput] = None,
317
+ compress_tool_results: bool = False,
284
318
  ) -> ModelResponse:
285
319
  """
286
320
  Invokes the model with a list of messages and returns the response.
287
321
  """
288
- formatted_messages, system_message = self._format_messages(messages)
322
+ formatted_messages, system_message = self._format_messages(messages, compress_tool_results)
289
323
  request_kwargs = self.get_request_params(
290
324
  system_message, response_format=response_format, tools=tools, tool_choice=tool_choice
291
325
  )
@@ -326,11 +360,12 @@ class Gemini(Model):
326
360
  tools: Optional[List[Dict[str, Any]]] = None,
327
361
  tool_choice: Optional[Union[str, Dict[str, Any]]] = None,
328
362
  run_response: Optional[RunOutput] = None,
363
+ compress_tool_results: bool = False,
329
364
  ) -> Iterator[ModelResponse]:
330
365
  """
331
366
  Invokes the model with a list of messages and returns the response as a stream.
332
367
  """
333
- formatted_messages, system_message = self._format_messages(messages)
368
+ formatted_messages, system_message = self._format_messages(messages, compress_tool_results)
334
369
 
335
370
  request_kwargs = self.get_request_params(
336
371
  system_message, response_format=response_format, tools=tools, tool_choice=tool_choice
@@ -369,11 +404,12 @@ class Gemini(Model):
369
404
  tools: Optional[List[Dict[str, Any]]] = None,
370
405
  tool_choice: Optional[Union[str, Dict[str, Any]]] = None,
371
406
  run_response: Optional[RunOutput] = None,
407
+ compress_tool_results: bool = False,
372
408
  ) -> ModelResponse:
373
409
  """
374
410
  Invokes the model with a list of messages and returns the response.
375
411
  """
376
- formatted_messages, system_message = self._format_messages(messages)
412
+ formatted_messages, system_message = self._format_messages(messages, compress_tool_results)
377
413
 
378
414
  request_kwargs = self.get_request_params(
379
415
  system_message, response_format=response_format, tools=tools, tool_choice=tool_choice
@@ -415,11 +451,12 @@ class Gemini(Model):
415
451
  tools: Optional[List[Dict[str, Any]]] = None,
416
452
  tool_choice: Optional[Union[str, Dict[str, Any]]] = None,
417
453
  run_response: Optional[RunOutput] = None,
454
+ compress_tool_results: bool = False,
418
455
  ) -> AsyncIterator[ModelResponse]:
419
456
  """
420
457
  Invokes the model with a list of messages and returns the response as a stream.
421
458
  """
422
- formatted_messages, system_message = self._format_messages(messages)
459
+ formatted_messages, system_message = self._format_messages(messages, compress_tool_results)
423
460
 
424
461
  request_kwargs = self.get_request_params(
425
462
  system_message, response_format=response_format, tools=tools, tool_choice=tool_choice
@@ -453,16 +490,18 @@ class Gemini(Model):
453
490
  log_error(f"Unknown error from Gemini API: {e}")
454
491
  raise ModelProviderError(message=str(e), model_name=self.name, model_id=self.id) from e
455
492
 
456
- def _format_messages(self, messages: List[Message]):
493
+ def _format_messages(self, messages: List[Message], compress_tool_results: bool = False):
457
494
  """
458
495
  Converts a list of Message objects to the Gemini-compatible format.
459
496
 
460
497
  Args:
461
498
  messages (List[Message]): The list of messages to convert.
499
+ compress_tool_results: Whether to compress tool results.
462
500
  """
463
501
  formatted_messages: List = []
464
502
  file_content: Optional[Union[GeminiFile, Part]] = None
465
503
  system_message = None
504
+
466
505
  for message in messages:
467
506
  role = message.role
468
507
  if role in ["system", "developer"]:
@@ -473,7 +512,8 @@ class Gemini(Model):
473
512
  role = self.reverse_role_map.get(role, role)
474
513
 
475
514
  # Add content to the message for the model
476
- content = message.content
515
+ content = message.get_content(use_compressed_content=compress_tool_results)
516
+
477
517
  # Initialize message_parts to be used for Gemini
478
518
  message_parts: List[Any] = []
479
519
 
@@ -495,11 +535,25 @@ class Gemini(Model):
495
535
  message_parts.append(part)
496
536
  # Function call results
497
537
  elif message.tool_calls is not None and len(message.tool_calls) > 0:
498
- for tool_call in message.tool_calls:
538
+ for idx, tool_call in enumerate(message.tool_calls):
539
+ if isinstance(content, list) and idx < len(content):
540
+ original_from_list = content[idx]
541
+
542
+ if compress_tool_results:
543
+ compressed_from_tool_call = tool_call.get("content")
544
+ tc_content = compressed_from_tool_call if compressed_from_tool_call else original_from_list
545
+ else:
546
+ tc_content = original_from_list
547
+ else:
548
+ tc_content = message.get_content(use_compressed_content=compress_tool_results)
549
+
550
+ if tc_content is None:
551
+ tc_content = tool_call.get("content")
552
+ if tc_content is None:
553
+ tc_content = content
554
+
499
555
  message_parts.append(
500
- Part.from_function_response(
501
- name=tool_call["tool_name"], response={"result": tool_call["content"]}
502
- )
556
+ Part.from_function_response(name=tool_call["tool_name"], response={"result": tc_content})
503
557
  )
504
558
  # Regular text content
505
559
  else:
@@ -767,24 +821,41 @@ class Gemini(Model):
767
821
  return None
768
822
 
769
823
  def format_function_call_results(
770
- self, messages: List[Message], function_call_results: List[Message], **kwargs
824
+ self,
825
+ messages: List[Message],
826
+ function_call_results: List[Message],
827
+ compress_tool_results: bool = False,
828
+ **kwargs,
771
829
  ) -> None:
772
830
  """
773
- Format function call results.
831
+ Format function call results for Gemini.
832
+
833
+ For combined messages:
834
+ - content: list of ORIGINAL content (for preservation)
835
+ - tool_calls[i]["content"]: compressed content if available (for API sending)
836
+
837
+ This allows the message to be saved with both original and compressed versions.
774
838
  """
775
- combined_content: List = []
839
+ combined_original_content: List = []
776
840
  combined_function_result: List = []
777
841
  message_metrics = Metrics()
842
+
778
843
  if len(function_call_results) > 0:
779
- for result in function_call_results:
780
- combined_content.append(result.content)
781
- combined_function_result.append({"tool_name": result.tool_name, "content": result.content})
844
+ for idx, result in enumerate(function_call_results):
845
+ combined_original_content.append(result.content)
846
+ compressed_content = result.get_content(use_compressed_content=compress_tool_results)
847
+ combined_function_result.append(
848
+ {"tool_call_id": result.tool_call_id, "tool_name": result.tool_name, "content": compressed_content}
849
+ )
782
850
  message_metrics += result.metrics
783
851
 
784
- if combined_content:
852
+ if combined_original_content:
785
853
  messages.append(
786
854
  Message(
787
- role="tool", content=combined_content, tool_calls=combined_function_result, metrics=message_metrics
855
+ role="tool",
856
+ content=combined_original_content,
857
+ tool_calls=combined_function_result,
858
+ metrics=message_metrics,
788
859
  )
789
860
  )
790
861
 
@@ -1115,3 +1186,494 @@ class Gemini(Model):
1115
1186
  metrics.provider_metrics = {"traffic_type": response_usage.traffic_type}
1116
1187
 
1117
1188
  return metrics
1189
+
1190
+ def create_file_search_store(self, display_name: Optional[str] = None) -> Any:
1191
+ """
1192
+ Create a new File Search store.
1193
+
1194
+ Args:
1195
+ display_name: Optional display name for the store
1196
+
1197
+ Returns:
1198
+ FileSearchStore: The created File Search store object
1199
+ """
1200
+ config: Dict[str, Any] = {}
1201
+ if display_name:
1202
+ config["display_name"] = display_name
1203
+
1204
+ try:
1205
+ store = self.get_client().file_search_stores.create(config=config or None) # type: ignore[arg-type]
1206
+ log_info(f"Created File Search store: {store.name}")
1207
+ return store
1208
+ except Exception as e:
1209
+ log_error(f"Error creating File Search store: {e}")
1210
+ raise
1211
+
1212
+ async def async_create_file_search_store(self, display_name: Optional[str] = None) -> Any:
1213
+ """
1214
+ Args:
1215
+ display_name: Optional display name for the store
1216
+
1217
+ Returns:
1218
+ FileSearchStore: The created File Search store object
1219
+ """
1220
+ config: Dict[str, Any] = {}
1221
+ if display_name:
1222
+ config["display_name"] = display_name
1223
+
1224
+ try:
1225
+ store = await self.get_client().aio.file_search_stores.create(config=config or None) # type: ignore[arg-type]
1226
+ log_info(f"Created File Search store: {store.name}")
1227
+ return store
1228
+ except Exception as e:
1229
+ log_error(f"Error creating File Search store: {e}")
1230
+ raise
1231
+
1232
+ def list_file_search_stores(self, page_size: int = 100) -> List[Any]:
1233
+ """
1234
+ List all File Search stores.
1235
+
1236
+ Args:
1237
+ page_size: Maximum number of stores to return per page
1238
+
1239
+ Returns:
1240
+ List: List of FileSearchStore objects
1241
+ """
1242
+ try:
1243
+ stores = []
1244
+ for store in self.get_client().file_search_stores.list(config={"page_size": page_size}):
1245
+ stores.append(store)
1246
+ log_debug(f"Found {len(stores)} File Search stores")
1247
+ return stores
1248
+ except Exception as e:
1249
+ log_error(f"Error listing File Search stores: {e}")
1250
+ raise
1251
+
1252
+ async def async_list_file_search_stores(self, page_size: int = 100) -> List[Any]:
1253
+ """
1254
+ Async version of list_file_search_stores.
1255
+
1256
+ Args:
1257
+ page_size: Maximum number of stores to return per page
1258
+
1259
+ Returns:
1260
+ List: List of FileSearchStore objects
1261
+ """
1262
+ try:
1263
+ stores = []
1264
+ async for store in await self.get_client().aio.file_search_stores.list(config={"page_size": page_size}):
1265
+ stores.append(store)
1266
+ log_debug(f"Found {len(stores)} File Search stores")
1267
+ return stores
1268
+ except Exception as e:
1269
+ log_error(f"Error listing File Search stores: {e}")
1270
+ raise
1271
+
1272
+ def get_file_search_store(self, name: str) -> Any:
1273
+ """
1274
+ Get a specific File Search store by name.
1275
+
1276
+ Args:
1277
+ name: The name of the store (e.g., 'fileSearchStores/my-store-123')
1278
+
1279
+ Returns:
1280
+ FileSearchStore: The File Search store object
1281
+ """
1282
+ try:
1283
+ store = self.get_client().file_search_stores.get(name=name)
1284
+ log_debug(f"Retrieved File Search store: {name}")
1285
+ return store
1286
+ except Exception as e:
1287
+ log_error(f"Error getting File Search store {name}: {e}")
1288
+ raise
1289
+
1290
+ async def async_get_file_search_store(self, name: str) -> Any:
1291
+ """
1292
+ Args:
1293
+ name: The name of the store
1294
+
1295
+ Returns:
1296
+ FileSearchStore: The File Search store object
1297
+ """
1298
+ try:
1299
+ store = await self.get_client().aio.file_search_stores.get(name=name)
1300
+ log_debug(f"Retrieved File Search store: {name}")
1301
+ return store
1302
+ except Exception as e:
1303
+ log_error(f"Error getting File Search store {name}: {e}")
1304
+ raise
1305
+
1306
+ def delete_file_search_store(self, name: str, force: bool = False) -> None:
1307
+ """
1308
+ Delete a File Search store.
1309
+
1310
+ Args:
1311
+ name: The name of the store to delete
1312
+ force: If True, force delete even if store contains documents
1313
+ """
1314
+ try:
1315
+ self.get_client().file_search_stores.delete(name=name, config={"force": force})
1316
+ log_info(f"Deleted File Search store: {name}")
1317
+ except Exception as e:
1318
+ log_error(f"Error deleting File Search store {name}: {e}")
1319
+ raise
1320
+
1321
+ async def async_delete_file_search_store(self, name: str, force: bool = True) -> None:
1322
+ """
1323
+ Async version of delete_file_search_store.
1324
+
1325
+ Args:
1326
+ name: The name of the store to delete
1327
+ force: If True, force delete even if store contains documents
1328
+ """
1329
+ try:
1330
+ await self.get_client().aio.file_search_stores.delete(name=name, config={"force": force})
1331
+ log_info(f"Deleted File Search store: {name}")
1332
+ except Exception as e:
1333
+ log_error(f"Error deleting File Search store {name}: {e}")
1334
+ raise
1335
+
1336
+ def wait_for_operation(self, operation: Operation, poll_interval: int = 5, max_wait: int = 600) -> Operation:
1337
+ """
1338
+ Wait for a long-running operation to complete.
1339
+
1340
+ Args:
1341
+ operation: The operation object to wait for
1342
+ poll_interval: Seconds to wait between status checks
1343
+ max_wait: Maximum seconds to wait before timing out
1344
+
1345
+ Returns:
1346
+ Operation: The completed operation object
1347
+
1348
+ Raises:
1349
+ TimeoutError: If operation doesn't complete within max_wait seconds
1350
+ """
1351
+ elapsed = 0
1352
+ while not operation.done:
1353
+ if elapsed >= max_wait:
1354
+ raise TimeoutError(f"Operation timed out after {max_wait} seconds")
1355
+ time.sleep(poll_interval)
1356
+ elapsed += poll_interval
1357
+ operation = self.get_client().operations.get(operation)
1358
+ log_debug(f"Waiting for operation... ({elapsed}s elapsed)")
1359
+
1360
+ log_info("Operation completed successfully")
1361
+ return operation
1362
+
1363
+ async def async_wait_for_operation(
1364
+ self, operation: Operation, poll_interval: int = 5, max_wait: int = 600
1365
+ ) -> Operation:
1366
+ """
1367
+ Async version of wait_for_operation.
1368
+
1369
+ Args:
1370
+ operation: The operation object to wait for
1371
+ poll_interval: Seconds to wait between status checks
1372
+ max_wait: Maximum seconds to wait before timing out
1373
+
1374
+ Returns:
1375
+ Operation: The completed operation object
1376
+ """
1377
+ elapsed = 0
1378
+ while not operation.done:
1379
+ if elapsed >= max_wait:
1380
+ raise TimeoutError(f"Operation timed out after {max_wait} seconds")
1381
+ await asyncio.sleep(poll_interval)
1382
+ elapsed += poll_interval
1383
+ operation = await self.get_client().aio.operations.get(operation)
1384
+ log_debug(f"Waiting for operation... ({elapsed}s elapsed)")
1385
+
1386
+ log_info("Operation completed successfully")
1387
+ return operation
1388
+
1389
+ def upload_to_file_search_store(
1390
+ self,
1391
+ file_path: Union[str, Path],
1392
+ store_name: str,
1393
+ display_name: Optional[str] = None,
1394
+ chunking_config: Optional[Dict[str, Any]] = None,
1395
+ custom_metadata: Optional[List[Dict[str, Any]]] = None,
1396
+ ) -> Any:
1397
+ """
1398
+ Upload a file directly to a File Search store.
1399
+
1400
+ Args:
1401
+ file_path: Path to the file to upload
1402
+ store_name: Name of the File Search store
1403
+ display_name: Optional display name for the file (will be visible in citations)
1404
+ chunking_config: Optional chunking configuration
1405
+ Example: {
1406
+ "white_space_config": {
1407
+ "max_tokens_per_chunk": 200,
1408
+ "max_overlap_tokens": 20
1409
+ }
1410
+ }
1411
+ custom_metadata: Optional custom metadata as list of dicts
1412
+ Example: [
1413
+ {"key": "author", "string_value": "John Doe"},
1414
+ {"key": "year", "numeric_value": 2024}
1415
+ ]
1416
+
1417
+ Returns:
1418
+ Operation: Long-running operation object. Use wait_for_operation() to wait for completion.
1419
+ """
1420
+ file_path = file_path if isinstance(file_path, Path) else Path(file_path)
1421
+
1422
+ if not file_path.exists():
1423
+ raise FileNotFoundError(f"File not found: {file_path}")
1424
+
1425
+ config: Dict[str, Any] = {}
1426
+ if display_name:
1427
+ config["display_name"] = display_name
1428
+ if chunking_config:
1429
+ config["chunking_config"] = chunking_config
1430
+ if custom_metadata:
1431
+ config["custom_metadata"] = custom_metadata
1432
+
1433
+ try:
1434
+ log_info(f"Uploading file {file_path.name} to File Search store {store_name}")
1435
+ operation = self.get_client().file_search_stores.upload_to_file_search_store(
1436
+ file=file_path,
1437
+ file_search_store_name=store_name,
1438
+ config=config or None, # type: ignore[arg-type]
1439
+ )
1440
+ log_info(f"Upload initiated for {file_path.name}")
1441
+ return operation
1442
+ except Exception as e:
1443
+ log_error(f"Error uploading file to File Search store: {e}")
1444
+ raise
1445
+
1446
+ async def async_upload_to_file_search_store(
1447
+ self,
1448
+ file_path: Union[str, Path],
1449
+ store_name: str,
1450
+ display_name: Optional[str] = None,
1451
+ chunking_config: Optional[Dict[str, Any]] = None,
1452
+ custom_metadata: Optional[List[Dict[str, Any]]] = None,
1453
+ ) -> Any:
1454
+ """
1455
+ Args:
1456
+ file_path: Path to the file to upload
1457
+ store_name: Name of the File Search store
1458
+ display_name: Optional display name for the file
1459
+ chunking_config: Optional chunking configuration
1460
+ custom_metadata: Optional custom metadata
1461
+
1462
+ Returns:
1463
+ Operation: Long-running operation object
1464
+ """
1465
+ file_path = file_path if isinstance(file_path, Path) else Path(file_path)
1466
+
1467
+ if not file_path.exists():
1468
+ raise FileNotFoundError(f"File not found: {file_path}")
1469
+
1470
+ config: Dict[str, Any] = {}
1471
+ if display_name:
1472
+ config["display_name"] = display_name
1473
+ if chunking_config:
1474
+ config["chunking_config"] = chunking_config
1475
+ if custom_metadata:
1476
+ config["custom_metadata"] = custom_metadata
1477
+
1478
+ try:
1479
+ log_info(f"Uploading file {file_path.name} to File Search store {store_name}")
1480
+ operation = await self.get_client().aio.file_search_stores.upload_to_file_search_store(
1481
+ file=file_path,
1482
+ file_search_store_name=store_name,
1483
+ config=config or None, # type: ignore[arg-type]
1484
+ )
1485
+ log_info(f"Upload initiated for {file_path.name}")
1486
+ return operation
1487
+ except Exception as e:
1488
+ log_error(f"Error uploading file to File Search store: {e}")
1489
+ raise
1490
+
1491
+ def import_file_to_store(
1492
+ self,
1493
+ file_name: str,
1494
+ store_name: str,
1495
+ chunking_config: Optional[Dict[str, Any]] = None,
1496
+ custom_metadata: Optional[List[Dict[str, Any]]] = None,
1497
+ ) -> Any:
1498
+ """
1499
+ Import an existing uploaded file (via Files API) into a File Search store.
1500
+
1501
+ Args:
1502
+ file_name: Name of the file already uploaded via Files API
1503
+ store_name: Name of the File Search store
1504
+ chunking_config: Optional chunking configuration
1505
+ custom_metadata: Optional custom metadata
1506
+
1507
+ Returns:
1508
+ Operation: Long-running operation object. Use wait_for_operation() to wait for completion.
1509
+ """
1510
+ config: Dict[str, Any] = {}
1511
+ if chunking_config:
1512
+ config["chunking_config"] = chunking_config
1513
+ if custom_metadata:
1514
+ config["custom_metadata"] = custom_metadata
1515
+
1516
+ try:
1517
+ log_info(f"Importing file {file_name} to File Search store {store_name}")
1518
+ operation = self.get_client().file_search_stores.import_file(
1519
+ file_search_store_name=store_name,
1520
+ file_name=file_name,
1521
+ config=config or None, # type: ignore[arg-type]
1522
+ )
1523
+ log_info(f"Import initiated for {file_name}")
1524
+ return operation
1525
+ except Exception as e:
1526
+ log_error(f"Error importing file to File Search store: {e}")
1527
+ raise
1528
+
1529
+ async def async_import_file_to_store(
1530
+ self,
1531
+ file_name: str,
1532
+ store_name: str,
1533
+ chunking_config: Optional[Dict[str, Any]] = None,
1534
+ custom_metadata: Optional[List[Dict[str, Any]]] = None,
1535
+ ) -> Any:
1536
+ """
1537
+ Args:
1538
+ file_name: Name of the file already uploaded via Files API
1539
+ store_name: Name of the File Search store
1540
+ chunking_config: Optional chunking configuration
1541
+ custom_metadata: Optional custom metadata
1542
+
1543
+ Returns:
1544
+ Operation: Long-running operation object
1545
+ """
1546
+ config: Dict[str, Any] = {}
1547
+ if chunking_config:
1548
+ config["chunking_config"] = chunking_config
1549
+ if custom_metadata:
1550
+ config["custom_metadata"] = custom_metadata
1551
+
1552
+ try:
1553
+ log_info(f"Importing file {file_name} to File Search store {store_name}")
1554
+ operation = await self.get_client().aio.file_search_stores.import_file(
1555
+ file_search_store_name=store_name,
1556
+ file_name=file_name,
1557
+ config=config or None, # type: ignore[arg-type]
1558
+ )
1559
+ log_info(f"Import initiated for {file_name}")
1560
+ return operation
1561
+ except Exception as e:
1562
+ log_error(f"Error importing file to File Search store: {e}")
1563
+ raise
1564
+
1565
+ def list_documents(self, store_name: str, page_size: int = 20) -> List[Any]:
1566
+ """
1567
+ Args:
1568
+ store_name: Name of the File Search store
1569
+ page_size: Maximum number of documents to return per page
1570
+
1571
+ Returns:
1572
+ List: List of document objects
1573
+ """
1574
+ try:
1575
+ documents = []
1576
+ for doc in self.get_client().file_search_stores.documents.list(
1577
+ parent=store_name, config={"page_size": page_size}
1578
+ ):
1579
+ documents.append(doc)
1580
+ log_debug(f"Found {len(documents)} documents in store {store_name}")
1581
+ return documents
1582
+ except Exception as e:
1583
+ log_error(f"Error listing documents in store {store_name}: {e}")
1584
+ raise
1585
+
1586
+ async def async_list_documents(self, store_name: str, page_size: int = 20) -> List[Any]:
1587
+ """
1588
+ Async version of list_documents.
1589
+
1590
+ Args:
1591
+ store_name: Name of the File Search store
1592
+ page_size: Maximum number of documents to return per page
1593
+
1594
+ Returns:
1595
+ List: List of document objects
1596
+ """
1597
+ try:
1598
+ documents = []
1599
+ # Await the AsyncPager first, then iterate
1600
+ async for doc in await self.get_client().aio.file_search_stores.documents.list(
1601
+ parent=store_name, config={"page_size": page_size}
1602
+ ):
1603
+ documents.append(doc)
1604
+ log_debug(f"Found {len(documents)} documents in store {store_name}")
1605
+ return documents
1606
+ except Exception as e:
1607
+ log_error(f"Error listing documents in store {store_name}: {e}")
1608
+ raise
1609
+
1610
+ def get_document(self, document_name: str) -> Any:
1611
+ """
1612
+ Get a specific document by name.
1613
+
1614
+ Args:
1615
+ document_name: Full name of the document
1616
+ (e.g., 'fileSearchStores/store-123/documents/doc-456')
1617
+
1618
+ Returns:
1619
+ Document object
1620
+ """
1621
+ try:
1622
+ doc = self.get_client().file_search_stores.documents.get(name=document_name)
1623
+ log_debug(f"Retrieved document: {document_name}")
1624
+ return doc
1625
+ except Exception as e:
1626
+ log_error(f"Error getting document {document_name}: {e}")
1627
+ raise
1628
+
1629
+ async def async_get_document(self, document_name: str) -> Any:
1630
+ """
1631
+ Async version of get_document.
1632
+
1633
+ Args:
1634
+ document_name: Full name of the document
1635
+
1636
+ Returns:
1637
+ Document object
1638
+ """
1639
+ try:
1640
+ doc = await self.get_client().aio.file_search_stores.documents.get(name=document_name)
1641
+ log_debug(f"Retrieved document: {document_name}")
1642
+ return doc
1643
+ except Exception as e:
1644
+ log_error(f"Error getting document {document_name}: {e}")
1645
+ raise
1646
+
1647
+ def delete_document(self, document_name: str) -> None:
1648
+ """
1649
+ Delete a document from a File Search store.
1650
+
1651
+ Args:
1652
+ document_name: Full name of the document to delete
1653
+
1654
+ Example:
1655
+ ```python
1656
+ model = Gemini(id="gemini-2.5-flash")
1657
+ model.delete_document("fileSearchStores/store-123/documents/doc-456")
1658
+ ```
1659
+ """
1660
+ try:
1661
+ self.get_client().file_search_stores.documents.delete(name=document_name)
1662
+ log_info(f"Deleted document: {document_name}")
1663
+ except Exception as e:
1664
+ log_error(f"Error deleting document {document_name}: {e}")
1665
+ raise
1666
+
1667
+ async def async_delete_document(self, document_name: str) -> None:
1668
+ """
1669
+ Async version of delete_document.
1670
+
1671
+ Args:
1672
+ document_name: Full name of the document to delete
1673
+ """
1674
+ try:
1675
+ await self.get_client().aio.file_search_stores.documents.delete(name=document_name)
1676
+ log_info(f"Deleted document: {document_name}")
1677
+ except Exception as e:
1678
+ log_error(f"Error deleting document {document_name}: {e}")
1679
+ raise