google-adk 1.5.0__py3-none-any.whl → 1.6.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (60) hide show
  1. google/adk/a2a/converters/event_converter.py +257 -36
  2. google/adk/a2a/converters/part_converter.py +93 -25
  3. google/adk/a2a/converters/request_converter.py +12 -32
  4. google/adk/a2a/converters/utils.py +22 -4
  5. google/adk/a2a/executor/__init__.py +13 -0
  6. google/adk/a2a/executor/a2a_agent_executor.py +260 -0
  7. google/adk/a2a/executor/task_result_aggregator.py +71 -0
  8. google/adk/a2a/logs/__init__.py +13 -0
  9. google/adk/a2a/logs/log_utils.py +349 -0
  10. google/adk/agents/base_agent.py +54 -0
  11. google/adk/agents/llm_agent.py +15 -0
  12. google/adk/agents/remote_a2a_agent.py +532 -0
  13. google/adk/artifacts/in_memory_artifact_service.py +6 -3
  14. google/adk/cli/browser/chunk-EQDQRRRY.js +1 -0
  15. google/adk/cli/browser/chunk-TXJFAAIW.js +2 -0
  16. google/adk/cli/browser/index.html +4 -3
  17. google/adk/cli/browser/main-RXDVX3K6.js +3914 -0
  18. google/adk/cli/browser/polyfills-FFHMD2TL.js +17 -0
  19. google/adk/cli/cli_deploy.py +4 -1
  20. google/adk/cli/cli_eval.py +8 -6
  21. google/adk/cli/cli_tools_click.py +30 -10
  22. google/adk/cli/fast_api.py +120 -5
  23. google/adk/cli/utils/agent_loader.py +12 -0
  24. google/adk/evaluation/agent_evaluator.py +107 -10
  25. google/adk/evaluation/base_eval_service.py +157 -0
  26. google/adk/evaluation/constants.py +20 -0
  27. google/adk/evaluation/eval_case.py +3 -3
  28. google/adk/evaluation/eval_metrics.py +39 -0
  29. google/adk/evaluation/evaluation_generator.py +1 -1
  30. google/adk/evaluation/final_response_match_v2.py +230 -0
  31. google/adk/evaluation/llm_as_judge.py +141 -0
  32. google/adk/evaluation/llm_as_judge_utils.py +48 -0
  33. google/adk/evaluation/metric_evaluator_registry.py +89 -0
  34. google/adk/evaluation/response_evaluator.py +38 -211
  35. google/adk/evaluation/safety_evaluator.py +54 -0
  36. google/adk/evaluation/trajectory_evaluator.py +16 -2
  37. google/adk/evaluation/vertex_ai_eval_facade.py +147 -0
  38. google/adk/events/event.py +2 -4
  39. google/adk/flows/llm_flows/base_llm_flow.py +2 -0
  40. google/adk/memory/in_memory_memory_service.py +3 -2
  41. google/adk/models/lite_llm.py +50 -10
  42. google/adk/runners.py +27 -10
  43. google/adk/sessions/database_session_service.py +25 -7
  44. google/adk/sessions/in_memory_session_service.py +5 -1
  45. google/adk/sessions/vertex_ai_session_service.py +67 -42
  46. google/adk/tools/bigquery/config.py +11 -1
  47. google/adk/tools/bigquery/query_tool.py +306 -12
  48. google/adk/tools/enterprise_search_tool.py +2 -2
  49. google/adk/tools/function_tool.py +7 -1
  50. google/adk/tools/google_search_tool.py +1 -1
  51. google/adk/tools/mcp_tool/mcp_session_manager.py +44 -30
  52. google/adk/tools/mcp_tool/mcp_tool.py +44 -7
  53. google/adk/version.py +1 -1
  54. {google_adk-1.5.0.dist-info → google_adk-1.6.1.dist-info}/METADATA +6 -4
  55. {google_adk-1.5.0.dist-info → google_adk-1.6.1.dist-info}/RECORD +58 -42
  56. google/adk/cli/browser/main-JAAWEV7F.js +0 -92
  57. google/adk/cli/browser/polyfills-B6TNHZQ6.js +0 -17
  58. {google_adk-1.5.0.dist-info → google_adk-1.6.1.dist-info}/WHEEL +0 -0
  59. {google_adk-1.5.0.dist-info → google_adk-1.6.1.dist-info}/entry_points.txt +0 -0
  60. {google_adk-1.5.0.dist-info → google_adk-1.6.1.dist-info}/licenses/LICENSE +0 -0
google/adk/runners.py CHANGED
@@ -307,27 +307,44 @@ class Runner:
307
307
  root_agent = self.agent
308
308
  invocation_context.agent = self._find_agent_to_run(session, root_agent)
309
309
 
310
+ # Pre-processing for live streaming tools
311
+ # Inspect the tool's parameters to find if it uses LiveRequestQueue
310
312
  invocation_context.active_streaming_tools = {}
311
313
  # TODO(hangfei): switch to use canonical_tools.
312
314
  # for shell agents, there is no tools associated with it so we should skip.
313
315
  if hasattr(invocation_context.agent, 'tools'):
314
- for tool in invocation_context.agent.tools:
315
- # replicate a LiveRequestQueue for streaming tools that relis on
316
- # LiveRequestQueue
317
- from typing import get_type_hints
316
+ import inspect
318
317
 
319
- type_hints = get_type_hints(tool)
320
- for arg_type in type_hints.values():
321
- if arg_type is LiveRequestQueue:
318
+ for tool in invocation_context.agent.tools:
319
+ # We use `inspect.signature()` to examine the tool's underlying function (`tool.func`).
320
+ # This approach is deliberately chosen over `typing.get_type_hints()` for robustness.
321
+ #
322
+ # The Problem with `get_type_hints()`:
323
+ # `get_type_hints()` attempts to resolve forward-referenced (string-based) type
324
+ # annotations. This resolution can easily fail with a `NameError` (e.g., "Union not found")
325
+ # if the type isn't available in the scope where `get_type_hints()` is called.
326
+ # This is a common and brittle issue in framework code that inspects functions
327
+ # defined in separate user modules.
328
+ #
329
+ # Why `inspect.signature()` is Better Here:
330
+ # `inspect.signature()` does NOT resolve the annotations; it retrieves the raw
331
+ # annotation object as it was defined on the function. This allows us to
332
+ # perform a direct and reliable identity check (`param.annotation is LiveRequestQueue`)
333
+ # without risking a `NameError`.
334
+ callable_to_inspect = tool.func if hasattr(tool, 'func') else tool
335
+ # Ensure the target is actually callable before inspecting to avoid errors.
336
+ if not callable(callable_to_inspect):
337
+ continue
338
+ for param in inspect.signature(callable_to_inspect).parameters.values():
339
+ if param.annotation is LiveRequestQueue:
322
340
  if not invocation_context.active_streaming_tools:
323
341
  invocation_context.active_streaming_tools = {}
324
- active_streaming_tools = ActiveStreamingTool(
342
+ active_streaming_tool = ActiveStreamingTool(
325
343
  stream=LiveRequestQueue()
326
344
  )
327
345
  invocation_context.active_streaming_tools[tool.__name__] = (
328
- active_streaming_tools
346
+ active_streaming_tool
329
347
  )
330
-
331
348
  async for event in invocation_context.agent.run_live(invocation_context):
332
349
  await self.session_service.append_event(session=session, event=event)
333
350
  yield event
@@ -15,6 +15,7 @@ from __future__ import annotations
15
15
 
16
16
  import copy
17
17
  from datetime import datetime
18
+ from datetime import timezone
18
19
  import json
19
20
  import logging
20
21
  from typing import Any
@@ -144,6 +145,21 @@ class StorageSession(Base):
144
145
  def __repr__(self):
145
146
  return f"<StorageSession(id={self.id}, update_time={self.update_time})>"
146
147
 
148
+ @property
149
+ def _dialect_name(self) -> Optional[str]:
150
+ session = inspect(self).session
151
+ return session.bind.dialect.name if session else None
152
+
153
+ @property
154
+ def update_timestamp_tz(self) -> datetime:
155
+ """Returns the time zone aware update timestamp."""
156
+ if self._dialect_name == "sqlite":
157
+ # SQLite does not support timezone. SQLAlchemy returns a naive datetime
158
+ # object without timezone information. We need to convert it to UTC
159
+ # manually.
160
+ return self.update_time.replace(tzinfo=timezone.utc).timestamp()
161
+ return self.update_time.timestamp()
162
+
147
163
 
148
164
  class StorageEvent(Base):
149
165
  """Represents an event stored in the database."""
@@ -412,7 +428,7 @@ class DatabaseSessionService(BaseSessionService):
412
428
  user_id=str(storage_session.user_id),
413
429
  id=str(storage_session.id),
414
430
  state=merged_state,
415
- last_update_time=storage_session.update_time.timestamp(),
431
+ last_update_time=storage_session.update_timestamp_tz,
416
432
  )
417
433
  return session
418
434
 
@@ -443,7 +459,9 @@ class DatabaseSessionService(BaseSessionService):
443
459
 
444
460
  storage_events = (
445
461
  session_factory.query(StorageEvent)
462
+ .filter(StorageEvent.app_name == app_name)
446
463
  .filter(StorageEvent.session_id == storage_session.id)
464
+ .filter(StorageEvent.user_id == user_id)
447
465
  .filter(timestamp_filter)
448
466
  .order_by(StorageEvent.timestamp.desc())
449
467
  .limit(
@@ -473,7 +491,7 @@ class DatabaseSessionService(BaseSessionService):
473
491
  user_id=user_id,
474
492
  id=session_id,
475
493
  state=merged_state,
476
- last_update_time=storage_session.update_time.timestamp(),
494
+ last_update_time=storage_session.update_timestamp_tz,
477
495
  )
478
496
  session.events = [e.to_event() for e in reversed(storage_events)]
479
497
  return session
@@ -496,7 +514,7 @@ class DatabaseSessionService(BaseSessionService):
496
514
  user_id=user_id,
497
515
  id=storage_session.id,
498
516
  state={},
499
- last_update_time=storage_session.update_time.timestamp(),
517
+ last_update_time=storage_session.update_timestamp_tz,
500
518
  )
501
519
  sessions.append(session)
502
520
  return ListSessionsResponse(sessions=sessions)
@@ -529,13 +547,13 @@ class DatabaseSessionService(BaseSessionService):
529
547
  StorageSession, (session.app_name, session.user_id, session.id)
530
548
  )
531
549
 
532
- if storage_session.update_time.timestamp() > session.last_update_time:
550
+ if storage_session.update_timestamp_tz > session.last_update_time:
533
551
  raise ValueError(
534
552
  "The last_update_time provided in the session object"
535
553
  f" {datetime.fromtimestamp(session.last_update_time):'%Y-%m-%d %H:%M:%S'} is"
536
554
  " earlier than the update_time in the storage_session"
537
- f" {storage_session.update_time:'%Y-%m-%d %H:%M:%S'}. Please check"
538
- " if it is a stale session."
555
+ f" {datetime.fromtimestamp(storage_session.update_timestamp_tz):'%Y-%m-%d %H:%M:%S'}."
556
+ " Please check if it is a stale session."
539
557
  )
540
558
 
541
559
  # Fetch states from storage
@@ -577,7 +595,7 @@ class DatabaseSessionService(BaseSessionService):
577
595
  session_factory.refresh(storage_session)
578
596
 
579
597
  # Update timestamp with commit time
580
- session.last_update_time = storage_session.update_time.timestamp()
598
+ session.last_update_time = storage_session.update_timestamp_tz
581
599
 
582
600
  # Also update the in-memory session
583
601
  await super().append_event(session=session, event=event)
@@ -33,7 +33,11 @@ logger = logging.getLogger('google_adk.' + __name__)
33
33
 
34
34
 
35
35
  class InMemorySessionService(BaseSessionService):
36
- """An in-memory implementation of the session service."""
36
+ """An in-memory implementation of the session service.
37
+
38
+ It is not suitable for multi-threaded production environments. Use it for
39
+ testing and development only.
40
+ """
37
41
 
38
42
  def __init__(self):
39
43
  # A map from app name to a map from user ID to a map from session ID to
@@ -25,6 +25,11 @@ import urllib.parse
25
25
 
26
26
  from dateutil import parser
27
27
  from google.genai.errors import ClientError
28
+ from tenacity import retry
29
+ from tenacity import retry_if_result
30
+ from tenacity import RetryError
31
+ from tenacity import stop_after_attempt
32
+ from tenacity import wait_exponential
28
33
  from typing_extensions import override
29
34
 
30
35
  from google import genai
@@ -64,6 +69,20 @@ class VertexAiSessionService(BaseSessionService):
64
69
  self._location = location
65
70
  self._agent_engine_id = agent_engine_id
66
71
 
72
+ async def _get_session_api_response(
73
+ self,
74
+ reasoning_engine_id: str,
75
+ session_id: str,
76
+ api_client: genai.ApiClient,
77
+ ):
78
+ get_session_api_response = await api_client.async_request(
79
+ http_method='GET',
80
+ path=f'reasoningEngines/{reasoning_engine_id}/sessions/{session_id}',
81
+ request_dict={},
82
+ )
83
+ get_session_api_response = _convert_api_response(get_session_api_response)
84
+ return get_session_api_response
85
+
67
86
  @override
68
87
  async def create_session(
69
88
  self,
@@ -95,66 +114,68 @@ class VertexAiSessionService(BaseSessionService):
95
114
 
96
115
  session_id = api_response['name'].split('/')[-3]
97
116
  operation_id = api_response['name'].split('/')[-1]
98
-
99
- max_retry_attempt = 5
100
-
101
117
  if _is_vertex_express_mode(self._project, self._location):
102
118
  # Express mode doesn't support LRO, so we need to poll
103
119
  # the session resource.
104
120
  # TODO: remove this once LRO polling is supported in Express mode.
105
- for i in range(max_retry_attempt):
121
+ @retry(
122
+ stop=stop_after_attempt(5),
123
+ wait=wait_exponential(multiplier=1, min=1, max=3),
124
+ retry=retry_if_result(lambda response: not response),
125
+ reraise=True,
126
+ )
127
+ async def _poll_session_resource():
106
128
  try:
107
- await api_client.async_request(
108
- http_method='GET',
109
- path=(
110
- f'reasoningEngines/{reasoning_engine_id}/sessions/{session_id}'
111
- ),
112
- request_dict={},
129
+ return await self._get_session_api_response(
130
+ reasoning_engine_id, session_id, api_client
113
131
  )
114
- break
115
- except ClientError as e:
116
- logger.info('Polling for session %s: %s', session_id, e)
117
- # Add slight exponential backoff to avoid excessive polling.
118
- await asyncio.sleep(1 + 0.5 * i)
119
- else:
120
- raise TimeoutError('Session creation failed.')
132
+ except ClientError:
133
+ logger.info(f'Polling session resource')
134
+ return None
135
+
136
+ try:
137
+ await _poll_session_resource()
138
+ except Exception as exc:
139
+ raise ValueError('Failed to create session.') from exc
121
140
  else:
122
- lro_response = None
123
- for _ in range(max_retry_attempt):
141
+
142
+ @retry(
143
+ stop=stop_after_attempt(5),
144
+ wait=wait_exponential(multiplier=1, min=1, max=3),
145
+ retry=retry_if_result(
146
+ lambda response: not response.get('done', False),
147
+ ),
148
+ reraise=True,
149
+ )
150
+ async def _poll_lro():
124
151
  lro_response = await api_client.async_request(
125
152
  http_method='GET',
126
153
  path=f'operations/{operation_id}',
127
154
  request_dict={},
128
155
  )
129
156
  lro_response = _convert_api_response(lro_response)
157
+ return lro_response
130
158
 
131
- if lro_response.get('done', None):
132
- break
133
-
134
- await asyncio.sleep(1)
135
-
136
- if lro_response is None or not lro_response.get('done', None):
159
+ try:
160
+ await _poll_lro()
161
+ except RetryError as exc:
137
162
  raise TimeoutError(
138
163
  f'Timeout waiting for operation {operation_id} to complete.'
139
- )
164
+ ) from exc
165
+ except Exception as exc:
166
+ raise ValueError('Failed to create session.') from exc
140
167
 
141
- # Get session resource
142
- get_session_api_response = await api_client.async_request(
143
- http_method='GET',
144
- path=f'reasoningEngines/{reasoning_engine_id}/sessions/{session_id}',
145
- request_dict={},
168
+ get_session_api_response = await self._get_session_api_response(
169
+ reasoning_engine_id, session_id, api_client
146
170
  )
147
- get_session_api_response = _convert_api_response(get_session_api_response)
148
-
149
- update_timestamp = isoparse(
150
- get_session_api_response['updateTime']
151
- ).timestamp()
152
171
  session = Session(
153
172
  app_name=str(app_name),
154
173
  user_id=str(user_id),
155
174
  id=str(session_id),
156
175
  state=get_session_api_response.get('sessionState', {}),
157
- last_update_time=update_timestamp,
176
+ last_update_time=isoparse(
177
+ get_session_api_response['updateTime']
178
+ ).timestamp(),
158
179
  )
159
180
  return session
160
181
 
@@ -171,12 +192,12 @@ class VertexAiSessionService(BaseSessionService):
171
192
  api_client = self._get_api_client()
172
193
 
173
194
  # Get session resource
174
- get_session_api_response = await api_client.async_request(
175
- http_method='GET',
176
- path=f'reasoningEngines/{reasoning_engine_id}/sessions/{session_id}',
177
- request_dict={},
195
+ get_session_api_response = await self._get_session_api_response(
196
+ reasoning_engine_id, session_id, api_client
178
197
  )
179
- get_session_api_response = _convert_api_response(get_session_api_response)
198
+
199
+ if get_session_api_response['userId'] != user_id:
200
+ raise ValueError(f'Session not found: {session_id}')
180
201
 
181
202
  session_id = get_session_api_response['name'].split('/')[-1]
182
203
  update_timestamp = isoparse(
@@ -360,6 +381,7 @@ def _convert_event_to_json(event: Event) -> Dict[str, Any]:
360
381
  'turn_complete': event.turn_complete,
361
382
  'interrupted': event.interrupted,
362
383
  'branch': event.branch,
384
+ 'custom_metadata': event.custom_metadata,
363
385
  'long_running_tool_ids': (
364
386
  list(event.long_running_tool_ids)
365
387
  if event.long_running_tool_ids
@@ -439,6 +461,9 @@ def _from_api_event(api_event: Dict[str, Any]) -> Event:
439
461
  event.turn_complete = api_event['eventMetadata'].get('turnComplete', None)
440
462
  event.interrupted = api_event['eventMetadata'].get('interrupted', None)
441
463
  event.branch = api_event['eventMetadata'].get('branch', None)
464
+ event.custom_metadata = api_event['eventMetadata'].get(
465
+ 'customMetadata', None
466
+ )
442
467
  event.grounding_metadata = _session_util.decode_grounding_metadata(
443
468
  api_event['eventMetadata'].get('groundingMetadata', None)
444
469
  )
@@ -26,10 +26,20 @@ class WriteMode(Enum):
26
26
 
27
27
  BLOCKED = 'blocked'
28
28
  """No write operations are allowed.
29
-
29
+
30
30
  This mode implies that only read (i.e. SELECT query) operations are allowed.
31
31
  """
32
32
 
33
+ PROTECTED = 'protected'
34
+ """Only protected write operations are allowed in a BigQuery session.
35
+
36
+ In this mode write operations in the anonymous dataset of a BigQuery session
37
+ are allowed. For example, a temporaray table can be created, manipulated and
38
+ deleted in the anonymous dataset during Agent interaction, while protecting
39
+ permanent tables from being modified or deleted. To learn more about BigQuery
40
+ sessions, see https://cloud.google.com/bigquery/docs/sessions-intro.
41
+ """
42
+
33
43
  ALLOWED = 'allowed'
34
44
  """All write operations are allowed."""
35
45