agent-starter-pack 0.18.2__py3-none-any.whl → 0.21.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- agent_starter_pack/agents/{langgraph_base_react → adk_a2a_base}/.template/templateconfig.yaml +5 -12
- agent_starter_pack/agents/adk_a2a_base/README.md +37 -0
- agent_starter_pack/{frontends/streamlit/frontend/style/app_markdown.py → agents/adk_a2a_base/app/__init__.py} +3 -23
- agent_starter_pack/agents/adk_a2a_base/app/agent.py +70 -0
- agent_starter_pack/agents/adk_a2a_base/notebooks/adk_a2a_app_testing.ipynb +583 -0
- agent_starter_pack/agents/{crewai_coding_crew/notebooks/evaluating_crewai_agent.ipynb → adk_a2a_base/notebooks/evaluating_adk_agent.ipynb} +163 -199
- agent_starter_pack/agents/adk_a2a_base/tests/integration/test_agent.py +58 -0
- agent_starter_pack/agents/adk_base/app/__init__.py +2 -2
- agent_starter_pack/agents/adk_base/app/agent.py +3 -0
- agent_starter_pack/agents/adk_base/notebooks/adk_app_testing.ipynb +13 -28
- agent_starter_pack/agents/adk_live/app/__init__.py +17 -0
- agent_starter_pack/agents/adk_live/app/agent.py +3 -0
- agent_starter_pack/agents/agentic_rag/app/__init__.py +2 -2
- agent_starter_pack/agents/agentic_rag/app/agent.py +3 -0
- agent_starter_pack/agents/agentic_rag/notebooks/adk_app_testing.ipynb +13 -28
- agent_starter_pack/agents/{crewai_coding_crew → langgraph_base}/.template/templateconfig.yaml +12 -9
- agent_starter_pack/agents/langgraph_base/README.md +30 -0
- agent_starter_pack/agents/langgraph_base/app/__init__.py +17 -0
- agent_starter_pack/agents/{langgraph_base_react → langgraph_base}/app/agent.py +4 -4
- agent_starter_pack/agents/{langgraph_base_react → langgraph_base}/tests/integration/test_agent.py +1 -1
- agent_starter_pack/base_template/.gitignore +4 -2
- agent_starter_pack/base_template/Makefile +110 -16
- agent_starter_pack/base_template/README.md +97 -12
- agent_starter_pack/base_template/deployment/terraform/dev/apis.tf +4 -6
- agent_starter_pack/base_template/deployment/terraform/dev/providers.tf +5 -1
- agent_starter_pack/base_template/deployment/terraform/dev/variables.tf +5 -3
- agent_starter_pack/base_template/deployment/terraform/dev/{% if cookiecutter.is_adk %}telemetry.tf{% else %}unused_telemetry.tf{% endif %} +193 -0
- agent_starter_pack/base_template/deployment/terraform/github.tf +16 -9
- agent_starter_pack/base_template/deployment/terraform/locals.tf +7 -7
- agent_starter_pack/base_template/deployment/terraform/providers.tf +5 -1
- agent_starter_pack/base_template/deployment/terraform/sql/completions.sql +138 -0
- agent_starter_pack/base_template/deployment/terraform/storage.tf +0 -9
- agent_starter_pack/base_template/deployment/terraform/variables.tf +15 -19
- agent_starter_pack/base_template/deployment/terraform/{% if cookiecutter.cicd_runner == 'google_cloud_build' %}build_triggers.tf{% else %}unused_build_triggers.tf{% endif %} +20 -22
- agent_starter_pack/base_template/deployment/terraform/{% if cookiecutter.is_adk %}telemetry.tf{% else %}unused_telemetry.tf{% endif %} +206 -0
- agent_starter_pack/base_template/pyproject.toml +5 -17
- agent_starter_pack/base_template/{% if cookiecutter.cicd_runner == 'github_actions' %}.github{% else %}unused_github{% endif %}/workflows/deploy-to-prod.yaml +19 -4
- agent_starter_pack/base_template/{% if cookiecutter.cicd_runner == 'github_actions' %}.github{% else %}unused_github{% endif %}/workflows/staging.yaml +36 -11
- agent_starter_pack/base_template/{% if cookiecutter.cicd_runner == 'google_cloud_build' %}.cloudbuild{% else %}unused_.cloudbuild{% endif %}/deploy-to-prod.yaml +24 -5
- agent_starter_pack/base_template/{% if cookiecutter.cicd_runner == 'google_cloud_build' %}.cloudbuild{% else %}unused_.cloudbuild{% endif %}/staging.yaml +44 -9
- agent_starter_pack/base_template/{{cookiecutter.agent_directory}}/app_utils/telemetry.py +96 -0
- agent_starter_pack/base_template/{{cookiecutter.agent_directory}}/{utils → app_utils}/typing.py +4 -6
- agent_starter_pack/{agents/crewai_coding_crew/app/crew/config/agents.yaml → base_template/{{cookiecutter.agent_directory}}/app_utils/{% if cookiecutter.is_a2a and cookiecutter.agent_name == 'langgraph_base' %}converters{% else %}unused_converters{% endif %}/__init__.py } +9 -23
- agent_starter_pack/base_template/{{cookiecutter.agent_directory}}/app_utils/{% if cookiecutter.is_a2a and cookiecutter.agent_name == 'langgraph_base' %}converters{% else %}unused_converters{% endif %}/part_converter.py +138 -0
- agent_starter_pack/base_template/{{cookiecutter.agent_directory}}/app_utils/{% if cookiecutter.is_a2a and cookiecutter.agent_name == 'langgraph_base' %}executor{% else %}unused_executor{% endif %}/__init__.py +13 -0
- agent_starter_pack/base_template/{{cookiecutter.agent_directory}}/app_utils/{% if cookiecutter.is_a2a and cookiecutter.agent_name == 'langgraph_base' %}executor{% else %}unused_executor{% endif %}/a2a_agent_executor.py +265 -0
- agent_starter_pack/base_template/{{cookiecutter.agent_directory}}/app_utils/{% if cookiecutter.is_a2a and cookiecutter.agent_name == 'langgraph_base' %}executor{% else %}unused_executor{% endif %}/task_result_aggregator.py +152 -0
- agent_starter_pack/cli/commands/create.py +40 -4
- agent_starter_pack/cli/commands/enhance.py +1 -1
- agent_starter_pack/cli/commands/register_gemini_enterprise.py +1070 -0
- agent_starter_pack/cli/main.py +2 -0
- agent_starter_pack/cli/utils/cicd.py +20 -4
- agent_starter_pack/cli/utils/template.py +257 -25
- agent_starter_pack/deployment_targets/agent_engine/tests/integration/test_agent_engine_app.py +113 -16
- agent_starter_pack/deployment_targets/agent_engine/tests/load_test/README.md +2 -2
- agent_starter_pack/deployment_targets/agent_engine/tests/load_test/load_test.py +178 -9
- agent_starter_pack/deployment_targets/agent_engine/tests/{% if cookiecutter.is_a2a %}helpers.py{% else %}unused_helpers.py{% endif %} +138 -0
- agent_starter_pack/deployment_targets/agent_engine/{{cookiecutter.agent_directory}}/agent_engine_app.py +193 -307
- agent_starter_pack/deployment_targets/agent_engine/{{cookiecutter.agent_directory}}/app_utils/deploy.py +414 -0
- agent_starter_pack/deployment_targets/agent_engine/{{cookiecutter.agent_directory}}/{utils → app_utils}/{% if cookiecutter.is_adk_live %}expose_app.py{% else %}unused_expose_app.py{% endif %} +13 -14
- agent_starter_pack/deployment_targets/cloud_run/Dockerfile +4 -1
- agent_starter_pack/deployment_targets/cloud_run/deployment/terraform/dev/service.tf +85 -86
- agent_starter_pack/deployment_targets/cloud_run/deployment/terraform/service.tf +139 -107
- agent_starter_pack/deployment_targets/cloud_run/tests/integration/test_server_e2e.py +228 -12
- agent_starter_pack/deployment_targets/cloud_run/tests/load_test/README.md +4 -4
- agent_starter_pack/deployment_targets/cloud_run/tests/load_test/load_test.py +92 -12
- agent_starter_pack/deployment_targets/cloud_run/{{cookiecutter.agent_directory}}/{server.py → fast_api_app.py} +194 -121
- agent_starter_pack/frontends/adk_live_react/frontend/package-lock.json +18 -18
- agent_starter_pack/frontends/adk_live_react/frontend/src/multimodal-live-types.ts +5 -3
- agent_starter_pack/resources/docs/adk-cheatsheet.md +198 -41
- agent_starter_pack/resources/locks/uv-adk_a2a_base-agent_engine.lock +4966 -0
- agent_starter_pack/resources/locks/uv-adk_a2a_base-cloud_run.lock +5011 -0
- agent_starter_pack/resources/locks/uv-adk_base-agent_engine.lock +1443 -709
- agent_starter_pack/resources/locks/uv-adk_base-cloud_run.lock +1058 -874
- agent_starter_pack/resources/locks/uv-adk_live-agent_engine.lock +1443 -709
- agent_starter_pack/resources/locks/uv-adk_live-cloud_run.lock +1058 -874
- agent_starter_pack/resources/locks/uv-agentic_rag-agent_engine.lock +1568 -749
- agent_starter_pack/resources/locks/uv-agentic_rag-cloud_run.lock +1123 -929
- agent_starter_pack/resources/locks/{uv-langgraph_base_react-agent_engine.lock → uv-langgraph_base-agent_engine.lock} +1714 -1689
- agent_starter_pack/resources/locks/{uv-langgraph_base_react-cloud_run.lock → uv-langgraph_base-cloud_run.lock} +1285 -2374
- agent_starter_pack/utils/watch_and_rebuild.py +1 -1
- {agent_starter_pack-0.18.2.dist-info → agent_starter_pack-0.21.0.dist-info}/METADATA +3 -6
- {agent_starter_pack-0.18.2.dist-info → agent_starter_pack-0.21.0.dist-info}/RECORD +89 -93
- agent_starter_pack-0.21.0.dist-info/entry_points.txt +2 -0
- llm.txt +4 -5
- agent_starter_pack/agents/crewai_coding_crew/README.md +0 -34
- agent_starter_pack/agents/crewai_coding_crew/app/agent.py +0 -47
- agent_starter_pack/agents/crewai_coding_crew/app/crew/config/tasks.yaml +0 -37
- agent_starter_pack/agents/crewai_coding_crew/app/crew/crew.py +0 -71
- agent_starter_pack/agents/crewai_coding_crew/tests/integration/test_agent.py +0 -47
- agent_starter_pack/agents/langgraph_base_react/README.md +0 -9
- agent_starter_pack/agents/langgraph_base_react/notebooks/evaluating_langgraph_agent.ipynb +0 -1574
- agent_starter_pack/base_template/deployment/terraform/dev/log_sinks.tf +0 -69
- agent_starter_pack/base_template/deployment/terraform/log_sinks.tf +0 -79
- agent_starter_pack/base_template/{{cookiecutter.agent_directory}}/utils/tracing.py +0 -155
- agent_starter_pack/cli/utils/register_gemini_enterprise.py +0 -406
- agent_starter_pack/deployment_targets/agent_engine/deployment/terraform/{% if not cookiecutter.is_adk_live %}service.tf{% else %}unused_service.tf{% endif %} +0 -82
- agent_starter_pack/deployment_targets/agent_engine/notebooks/intro_agent_engine.ipynb +0 -1025
- agent_starter_pack/deployment_targets/agent_engine/{{cookiecutter.agent_directory}}/utils/deployment.py +0 -99
- agent_starter_pack/frontends/streamlit/frontend/side_bar.py +0 -214
- agent_starter_pack/frontends/streamlit/frontend/streamlit_app.py +0 -265
- agent_starter_pack/frontends/streamlit/frontend/utils/chat_utils.py +0 -67
- agent_starter_pack/frontends/streamlit/frontend/utils/local_chat_history.py +0 -127
- agent_starter_pack/frontends/streamlit/frontend/utils/message_editing.py +0 -59
- agent_starter_pack/frontends/streamlit/frontend/utils/multimodal_utils.py +0 -217
- agent_starter_pack/frontends/streamlit/frontend/utils/stream_handler.py +0 -310
- agent_starter_pack/frontends/streamlit/frontend/utils/title_summary.py +0 -94
- agent_starter_pack/resources/locks/uv-crewai_coding_crew-agent_engine.lock +0 -6650
- agent_starter_pack/resources/locks/uv-crewai_coding_crew-cloud_run.lock +0 -7825
- agent_starter_pack-0.18.2.dist-info/entry_points.txt +0 -3
- /agent_starter_pack/agents/{crewai_coding_crew → langgraph_base}/notebooks/evaluating_langgraph_agent.ipynb +0 -0
- /agent_starter_pack/base_template/{{cookiecutter.agent_directory}}/{utils → app_utils}/gcs.py +0 -0
- {agent_starter_pack-0.18.2.dist-info → agent_starter_pack-0.21.0.dist-info}/WHEEL +0 -0
- {agent_starter_pack-0.18.2.dist-info → agent_starter_pack-0.21.0.dist-info}/licenses/LICENSE +0 -0
|
@@ -39,6 +39,18 @@ steps:
|
|
|
39
39
|
- 'PATH=/usr/local/bin:/usr/bin:~/.local/bin'
|
|
40
40
|
{%- endif %}
|
|
41
41
|
{%- if cookiecutter.deployment_target == 'cloud_run' %}
|
|
42
|
+
{%- if cookiecutter.is_a2a %}
|
|
43
|
+
# Extract version from pyproject.toml
|
|
44
|
+
- name: "gcr.io/cloud-builders/gcloud-slim"
|
|
45
|
+
id: extract-version
|
|
46
|
+
entrypoint: /bin/bash
|
|
47
|
+
args:
|
|
48
|
+
- "-c"
|
|
49
|
+
- |
|
|
50
|
+
VERSION=$(awk -F'"' '/^version = / {print $$2}' pyproject.toml || echo '0.0.0')
|
|
51
|
+
echo "$${VERSION}" > /workspace/agent_version.txt
|
|
52
|
+
|
|
53
|
+
{%- endif %}
|
|
42
54
|
# Build and Push
|
|
43
55
|
- name: "gcr.io/cloud-builders/docker"
|
|
44
56
|
args:
|
|
@@ -48,6 +60,10 @@ steps:
|
|
|
48
60
|
"$_REGION-docker.pkg.dev/$PROJECT_ID/$_ARTIFACT_REGISTRY_REPO_NAME/$_CONTAINER_NAME",
|
|
49
61
|
"--build-arg",
|
|
50
62
|
"COMMIT_SHA=$COMMIT_SHA",
|
|
63
|
+
{%- if cookiecutter.is_a2a %}
|
|
64
|
+
"--build-arg",
|
|
65
|
+
"AGENT_VERSION=$(cat /workspace/agent_version.txt || echo '0.0.0')",
|
|
66
|
+
{%- endif %}
|
|
51
67
|
".",
|
|
52
68
|
]
|
|
53
69
|
- name: "gcr.io/cloud-builders/docker"
|
|
@@ -91,6 +107,18 @@ steps:
|
|
|
91
107
|
- |
|
|
92
108
|
echo $(gcloud auth print-identity-token -q) > id_token.txt
|
|
93
109
|
{%- elif cookiecutter.deployment_target == 'agent_engine' %}
|
|
110
|
+
{%- if cookiecutter.is_a2a %}
|
|
111
|
+
# Extract version from pyproject.toml
|
|
112
|
+
- name: "gcr.io/cloud-builders/gcloud-slim"
|
|
113
|
+
id: extract-version
|
|
114
|
+
entrypoint: /bin/bash
|
|
115
|
+
args:
|
|
116
|
+
- "-c"
|
|
117
|
+
- |
|
|
118
|
+
VERSION=$(awk -F'"' '/^version = / {print $$2}' pyproject.toml || echo '0.0.0')
|
|
119
|
+
echo "$${VERSION}" > /workspace/agent_version.txt
|
|
120
|
+
|
|
121
|
+
{%- endif %}
|
|
94
122
|
- name: "python:3.12-slim"
|
|
95
123
|
id: install-dependencies
|
|
96
124
|
entrypoint: /bin/bash
|
|
@@ -107,12 +135,19 @@ steps:
|
|
|
107
135
|
args:
|
|
108
136
|
- "-c"
|
|
109
137
|
- |
|
|
110
|
-
|
|
111
|
-
|
|
138
|
+
{%- if cookiecutter.is_a2a %}
|
|
139
|
+
AGENT_VERSION=$(cat /workspace/agent_version.txt || echo '0.0.0')
|
|
140
|
+
{%- endif %}
|
|
141
|
+
uv export --no-hashes --no-sources --no-header --no-dev --no-emit-project --no-annotate --locked > {{cookiecutter.agent_directory}}/app_utils/.requirements.txt
|
|
142
|
+
uv run python -m {{cookiecutter.agent_directory}}.app_utils.deploy \
|
|
112
143
|
--project ${_STAGING_PROJECT_ID} \
|
|
113
144
|
--location ${_REGION} \
|
|
114
|
-
--
|
|
115
|
-
--
|
|
145
|
+
--source-packages=./{{cookiecutter.agent_directory}} \
|
|
146
|
+
--entrypoint-module={{cookiecutter.agent_directory}}.agent_engine_app \
|
|
147
|
+
--entrypoint-object=agent_engine \
|
|
148
|
+
--requirements-file={{cookiecutter.agent_directory}}/app_utils/.requirements.txt \
|
|
149
|
+
--service-account=${_APP_SERVICE_ACCOUNT_STAGING} \
|
|
150
|
+
--set-env-vars="COMMIT_SHA=${COMMIT_SHA}{%- if cookiecutter.is_a2a %},AGENT_VERSION=$${AGENT_VERSION}{%- endif %},LOGS_BUCKET_NAME=${_LOGS_BUCKET_NAME_STAGING}{%- if cookiecutter.data_ingestion %}{%- if cookiecutter.datastore_type == "vertex_ai_search" %},DATA_STORE_ID=${_DATA_STORE_ID_STAGING},DATA_STORE_REGION=${_DATA_STORE_REGION}{%- elif cookiecutter.datastore_type == "vertex_ai_vector_search" %},VECTOR_SEARCH_INDEX=${_VECTOR_SEARCH_INDEX_STAGING},VECTOR_SEARCH_INDEX_ENDPOINT=${_VECTOR_SEARCH_INDEX_ENDPOINT_STAGING},VECTOR_SEARCH_BUCKET=${_VECTOR_SEARCH_BUCKET_STAGING}{%- endif %}{%- endif %}"
|
|
116
151
|
env:
|
|
117
152
|
- 'PATH=/usr/local/bin:/usr/bin:~/.local/bin'
|
|
118
153
|
|
|
@@ -175,7 +210,7 @@ steps:
|
|
|
175
210
|
- |
|
|
176
211
|
export _ID_TOKEN=$(cat id_token.txt)
|
|
177
212
|
export _STAGING_URL=$(cat staging_url.txt)
|
|
178
|
-
pip install locust==2.31.1 --user
|
|
213
|
+
pip install locust==2.31.1{%- if cookiecutter.is_a2a %} a2a-sdk~=0.3.9{%- endif %} --user
|
|
179
214
|
locust -f tests/load_test/load_test.py \
|
|
180
215
|
--headless \
|
|
181
216
|
-H $$_STAGING_URL \
|
|
@@ -192,7 +227,7 @@ steps:
|
|
|
192
227
|
- "-c"
|
|
193
228
|
- |
|
|
194
229
|
# Start expose app in remote mode (uses deployment_metadata.json by default)
|
|
195
|
-
uv run python -m {{cookiecutter.agent_directory}}.
|
|
230
|
+
uv run python -m {{cookiecutter.agent_directory}}.app_utils.expose_app --mode remote &
|
|
196
231
|
EXPOSE_PID=$$!
|
|
197
232
|
|
|
198
233
|
# Wait for expose app to be ready
|
|
@@ -243,10 +278,10 @@ steps:
|
|
|
243
278
|
- "-c"
|
|
244
279
|
- |
|
|
245
280
|
export _TIMESTAMP=$(date +%Y%m%d-%H%M%S)
|
|
246
|
-
gsutil -m cp -r tests/load_test/.results gs://${
|
|
281
|
+
gsutil -m cp -r tests/load_test/.results gs://${_LOGS_BUCKET_NAME_STAGING}/load-test-results/results-$${_TIMESTAMP}
|
|
247
282
|
echo "_________________________________________________________________________"
|
|
248
|
-
echo "Load test results copied to gs://${
|
|
249
|
-
echo "HTTP link: https://console.cloud.google.com/storage/browser/${
|
|
283
|
+
echo "Load test results copied to gs://${_LOGS_BUCKET_NAME_STAGING}/load-test-results/results-$${_TIMESTAMP}"
|
|
284
|
+
echo "HTTP link: https://console.cloud.google.com/storage/browser/${_LOGS_BUCKET_NAME_STAGING}/load-test-results/results-$${_TIMESTAMP}"
|
|
250
285
|
echo "_________________________________________________________________________"
|
|
251
286
|
|
|
252
287
|
# Trigger Prod Deployment
|
|
@@ -0,0 +1,96 @@
|
|
|
1
|
+
# Copyright 2025 Google LLC
|
|
2
|
+
#
|
|
3
|
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
4
|
+
# you may not use this file except in compliance with the License.
|
|
5
|
+
# You may obtain a copy of the License at
|
|
6
|
+
#
|
|
7
|
+
# http://www.apache.org/licenses/LICENSE-2.0
|
|
8
|
+
#
|
|
9
|
+
# Unless required by applicable law or agreed to in writing, software
|
|
10
|
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
11
|
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
12
|
+
# See the License for the specific language governing permissions and
|
|
13
|
+
# limitations under the License.
|
|
14
|
+
|
|
15
|
+
{%- if cookiecutter.is_adk %}
|
|
16
|
+
import logging
|
|
17
|
+
import os
|
|
18
|
+
{%- if cookiecutter.is_adk and cookiecutter.is_a2a %}
|
|
19
|
+
|
|
20
|
+
import google.auth
|
|
21
|
+
from google.adk.cli.adk_web_server import _setup_instrumentation_lib_if_installed
|
|
22
|
+
from google.adk.telemetry.google_cloud import get_gcp_exporters, get_gcp_resource
|
|
23
|
+
from google.adk.telemetry.setup import maybe_set_otel_providers
|
|
24
|
+
{%- endif %}
|
|
25
|
+
|
|
26
|
+
|
|
27
|
+
def setup_telemetry() -> str | None:
|
|
28
|
+
"""Configure OpenTelemetry and GenAI telemetry with GCS upload."""
|
|
29
|
+
{%- if cookiecutter.deployment_target == 'agent_engine' %}
|
|
30
|
+
os.environ.setdefault("GOOGLE_CLOUD_AGENT_ENGINE_ENABLE_TELEMETRY", "true")
|
|
31
|
+
{%- endif %}
|
|
32
|
+
|
|
33
|
+
bucket = os.environ.get("LOGS_BUCKET_NAME")
|
|
34
|
+
capture_content = os.environ.get(
|
|
35
|
+
"OTEL_INSTRUMENTATION_GENAI_CAPTURE_MESSAGE_CONTENT", "false"
|
|
36
|
+
)
|
|
37
|
+
if bucket and capture_content != "false":
|
|
38
|
+
logging.info("Setting up GenAI telemetry with GCS upload...")
|
|
39
|
+
os.environ["OTEL_INSTRUMENTATION_GENAI_CAPTURE_MESSAGE_CONTENT"] = "NO_CONTENT"
|
|
40
|
+
os.environ.setdefault("OTEL_INSTRUMENTATION_GENAI_UPLOAD_FORMAT", "jsonl")
|
|
41
|
+
os.environ.setdefault("OTEL_INSTRUMENTATION_GENAI_COMPLETION_HOOK", "upload")
|
|
42
|
+
os.environ.setdefault(
|
|
43
|
+
"OTEL_SEMCONV_STABILITY_OPT_IN", "gen_ai_latest_experimental"
|
|
44
|
+
)
|
|
45
|
+
commit_sha = os.environ.get("COMMIT_SHA", "dev")
|
|
46
|
+
os.environ.setdefault(
|
|
47
|
+
"OTEL_RESOURCE_ATTRIBUTES",
|
|
48
|
+
f"service.namespace={{cookiecutter.project_name}},service.version={commit_sha}",
|
|
49
|
+
)
|
|
50
|
+
path = os.environ.get("GENAI_TELEMETRY_PATH", "completions")
|
|
51
|
+
os.environ.setdefault(
|
|
52
|
+
"OTEL_INSTRUMENTATION_GENAI_UPLOAD_BASE_PATH",
|
|
53
|
+
f"gs://{bucket}/{path}",
|
|
54
|
+
)
|
|
55
|
+
{%- if cookiecutter.is_adk and cookiecutter.is_a2a %}
|
|
56
|
+
|
|
57
|
+
# Set up OpenTelemetry exporters for Cloud Trace and Cloud Logging
|
|
58
|
+
credentials, project_id = google.auth.default()
|
|
59
|
+
otel_hooks = get_gcp_exporters(
|
|
60
|
+
enable_cloud_tracing=True,
|
|
61
|
+
enable_cloud_metrics=False,
|
|
62
|
+
enable_cloud_logging=True,
|
|
63
|
+
google_auth=(credentials, project_id),
|
|
64
|
+
)
|
|
65
|
+
otel_resource = get_gcp_resource(project_id)
|
|
66
|
+
maybe_set_otel_providers(
|
|
67
|
+
otel_hooks_to_setup=[otel_hooks],
|
|
68
|
+
otel_resource=otel_resource,
|
|
69
|
+
)
|
|
70
|
+
|
|
71
|
+
# Set up GenAI SDK instrumentation
|
|
72
|
+
_setup_instrumentation_lib_if_installed()
|
|
73
|
+
{%- endif %}
|
|
74
|
+
|
|
75
|
+
return bucket
|
|
76
|
+
{%- else %}
|
|
77
|
+
import logging
|
|
78
|
+
|
|
79
|
+
from opentelemetry.exporter.cloud_trace import CloudTraceSpanExporter
|
|
80
|
+
|
|
81
|
+
|
|
82
|
+
def setup_telemetry() -> None:
|
|
83
|
+
"""Initialize Traceloop telemetry for LangGraph agents."""
|
|
84
|
+
try:
|
|
85
|
+
from traceloop.sdk import Instruments, Traceloop
|
|
86
|
+
|
|
87
|
+
Traceloop.init(
|
|
88
|
+
app_name="{{cookiecutter.project_name}}",
|
|
89
|
+
disable_batch=False,
|
|
90
|
+
telemetry_enabled=False,
|
|
91
|
+
exporter=CloudTraceSpanExporter(),
|
|
92
|
+
instruments={Instruments.LANGCHAIN},
|
|
93
|
+
)
|
|
94
|
+
except Exception as e:
|
|
95
|
+
logging.error("Failed to initialize Telemetry: %s", str(e))
|
|
96
|
+
{%- endif %}
|
agent_starter_pack/base_template/{{cookiecutter.agent_directory}}/{utils → app_utils}/typing.py
RENAMED
|
@@ -26,12 +26,14 @@ from pydantic import (
|
|
|
26
26
|
Field,
|
|
27
27
|
)
|
|
28
28
|
{%- else %}
|
|
29
|
+
import uuid
|
|
29
30
|
from typing import (
|
|
30
31
|
Literal,
|
|
31
32
|
)
|
|
32
33
|
|
|
33
34
|
from pydantic import (
|
|
34
35
|
BaseModel,
|
|
36
|
+
Field,
|
|
35
37
|
)
|
|
36
38
|
{%- endif %}
|
|
37
39
|
{%- else %}
|
|
@@ -103,14 +105,10 @@ class Feedback(BaseModel):
|
|
|
103
105
|
|
|
104
106
|
score: int | float
|
|
105
107
|
text: str | None = ""
|
|
106
|
-
{%- if cookiecutter.is_adk %}
|
|
107
|
-
invocation_id: str
|
|
108
|
-
{%- else %}
|
|
109
|
-
run_id: str
|
|
110
|
-
{%- endif %}
|
|
111
108
|
log_type: Literal["feedback"] = "feedback"
|
|
112
109
|
service_name: Literal["{{cookiecutter.project_name}}"] = "{{cookiecutter.project_name}}"
|
|
113
|
-
user_id: str =
|
|
110
|
+
user_id: str = Field(default_factory=lambda: str(uuid.uuid4()))
|
|
111
|
+
session_id: str = Field(default_factory=lambda: str(uuid.uuid4()))
|
|
114
112
|
{% if not cookiecutter.is_adk %}
|
|
115
113
|
|
|
116
114
|
def ensure_valid_config(config: RunnableConfig | None) -> RunnableConfig:
|
|
@@ -12,28 +12,14 @@
|
|
|
12
12
|
# See the License for the specific language governing permissions and
|
|
13
13
|
# limitations under the License.
|
|
14
14
|
|
|
15
|
-
|
|
16
|
-
role: >
|
|
17
|
-
Senior Software Engineer
|
|
15
|
+
"""Converters for A2A and LangChain types."""
|
|
18
16
|
|
|
17
|
+
from .part_converter import (
|
|
18
|
+
convert_a2a_parts_to_langchain_content,
|
|
19
|
+
convert_langchain_content_to_a2a_parts,
|
|
20
|
+
)
|
|
19
21
|
|
|
20
|
-
|
|
21
|
-
|
|
22
|
-
|
|
23
|
-
|
|
24
|
-
backstory: >
|
|
25
|
-
You are a Senior Software Engineer at a leading tech company. You are an expert Python programmer and do your best to produce perfect code.
|
|
26
|
-
|
|
27
|
-
|
|
28
|
-
chief_qa_engineer_agent:
|
|
29
|
-
role: >
|
|
30
|
-
Chief Software Quality Control Engineer
|
|
31
|
-
|
|
32
|
-
|
|
33
|
-
goal: >
|
|
34
|
-
Ensure that the code does the job that it is supposed to do and that it is error free.
|
|
35
|
-
|
|
36
|
-
|
|
37
|
-
backstory: >
|
|
38
|
-
You feel that programmers always do only half the job, so you are super dedicate to make high quality code.
|
|
39
|
-
|
|
22
|
+
__all__ = [
|
|
23
|
+
"convert_a2a_parts_to_langchain_content",
|
|
24
|
+
"convert_langchain_content_to_a2a_parts",
|
|
25
|
+
]
|
|
@@ -0,0 +1,138 @@
|
|
|
1
|
+
# Copyright 2025 Google LLC
|
|
2
|
+
#
|
|
3
|
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
4
|
+
# you may not use this file except in compliance with the License.
|
|
5
|
+
# You may obtain a copy of the License at
|
|
6
|
+
#
|
|
7
|
+
# http://www.apache.org/licenses/LICENSE-2.0
|
|
8
|
+
#
|
|
9
|
+
# Unless required by applicable law or agreed to in writing, software
|
|
10
|
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
11
|
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
12
|
+
# See the License for the specific language governing permissions and
|
|
13
|
+
# limitations under the License.
|
|
14
|
+
|
|
15
|
+
"""Converters between A2A Parts and LangChain message content."""
|
|
16
|
+
|
|
17
|
+
from __future__ import annotations
|
|
18
|
+
|
|
19
|
+
import logging
|
|
20
|
+
from typing import Any
|
|
21
|
+
|
|
22
|
+
from a2a.types import FilePart, FileWithBytes, FileWithUri, Part, TextPart
|
|
23
|
+
|
|
24
|
+
logger = logging.getLogger(__name__)
|
|
25
|
+
|
|
26
|
+
|
|
27
|
+
LangChainContent = str | list[str | dict[str, Any]]
|
|
28
|
+
LangChainContentDict = dict[str, Any]
|
|
29
|
+
|
|
30
|
+
|
|
31
|
+
def convert_a2a_part_to_langchain_content(part: Part) -> LangChainContentDict | str:
|
|
32
|
+
"""Convert an A2A Part to LangChain message content format."""
|
|
33
|
+
|
|
34
|
+
root = part.root
|
|
35
|
+
|
|
36
|
+
if isinstance(root, TextPart):
|
|
37
|
+
return {"type": "text", "text": root.text}
|
|
38
|
+
|
|
39
|
+
elif isinstance(root, FilePart):
|
|
40
|
+
file_data = root.file
|
|
41
|
+
mime_type = file_data.mime_type if hasattr(file_data, "mime_type") else None
|
|
42
|
+
|
|
43
|
+
# Determine media type from mime_type
|
|
44
|
+
media_type = "image" # default
|
|
45
|
+
if mime_type:
|
|
46
|
+
if mime_type.startswith("audio/"):
|
|
47
|
+
media_type = "audio"
|
|
48
|
+
elif mime_type.startswith("video/"):
|
|
49
|
+
media_type = "video"
|
|
50
|
+
|
|
51
|
+
if isinstance(file_data, FileWithUri):
|
|
52
|
+
return {"type": media_type, "url": file_data.uri}
|
|
53
|
+
else:
|
|
54
|
+
# Base64 data should already be encoded
|
|
55
|
+
return {
|
|
56
|
+
"type": media_type,
|
|
57
|
+
"base64": file_data.bytes,
|
|
58
|
+
"mime_type": mime_type or "application/octet-stream",
|
|
59
|
+
}
|
|
60
|
+
|
|
61
|
+
else:
|
|
62
|
+
import json
|
|
63
|
+
|
|
64
|
+
data_str = json.dumps(root.data, indent=2)
|
|
65
|
+
return {"type": "text", "text": f"[Structured Data]\n{data_str}"}
|
|
66
|
+
|
|
67
|
+
|
|
68
|
+
def convert_langchain_content_to_a2a_part(content: Any) -> Part:
|
|
69
|
+
"""Convert LangChain message content to an A2A Part."""
|
|
70
|
+
|
|
71
|
+
if isinstance(content, str):
|
|
72
|
+
return Part(root=TextPart(text=content))
|
|
73
|
+
|
|
74
|
+
if isinstance(content, dict):
|
|
75
|
+
content_type = content.get("type")
|
|
76
|
+
|
|
77
|
+
if content_type == "text":
|
|
78
|
+
text = content.get("text", "")
|
|
79
|
+
return Part(root=TextPart(text=text))
|
|
80
|
+
|
|
81
|
+
elif content_type in ("image", "audio", "video"):
|
|
82
|
+
# Handle URL-based media
|
|
83
|
+
if "url" in content:
|
|
84
|
+
return Part(root=FilePart(file=FileWithUri(uri=content["url"])))
|
|
85
|
+
|
|
86
|
+
# Handle base64-encoded media
|
|
87
|
+
elif "base64" in content:
|
|
88
|
+
mime_type = content.get("mime_type")
|
|
89
|
+
return Part(
|
|
90
|
+
root=FilePart(
|
|
91
|
+
file=FileWithBytes(bytes=content["base64"], mime_type=mime_type)
|
|
92
|
+
)
|
|
93
|
+
)
|
|
94
|
+
|
|
95
|
+
# Handle file_id-based media
|
|
96
|
+
elif "file_id" in content:
|
|
97
|
+
return Part(
|
|
98
|
+
root=FilePart(file=FileWithUri(uri=f"file://{content['file_id']}"))
|
|
99
|
+
)
|
|
100
|
+
|
|
101
|
+
else:
|
|
102
|
+
import json
|
|
103
|
+
|
|
104
|
+
text = json.dumps(content)
|
|
105
|
+
logger.warning(f"Unknown content type '{content_type}', converting to text")
|
|
106
|
+
return Part(root=TextPart(text=text))
|
|
107
|
+
|
|
108
|
+
logger.warning(f"Unknown content type: {type(content)}, converting to text")
|
|
109
|
+
return Part(root=TextPart(text=str(content)))
|
|
110
|
+
|
|
111
|
+
|
|
112
|
+
def convert_a2a_parts_to_langchain_content(parts: list[Part]) -> LangChainContent:
|
|
113
|
+
"""Convert a list of A2A Parts to LangChain message content."""
|
|
114
|
+
|
|
115
|
+
if not parts:
|
|
116
|
+
return ""
|
|
117
|
+
|
|
118
|
+
converted: list[str | dict[str, Any]] = []
|
|
119
|
+
for part in parts:
|
|
120
|
+
result = convert_a2a_part_to_langchain_content(part)
|
|
121
|
+
converted.append(result)
|
|
122
|
+
|
|
123
|
+
if len(converted) == 1 and isinstance(converted[0], str):
|
|
124
|
+
return converted[0]
|
|
125
|
+
|
|
126
|
+
return converted
|
|
127
|
+
|
|
128
|
+
|
|
129
|
+
def convert_langchain_content_to_a2a_parts(content: LangChainContent) -> list[Part]:
|
|
130
|
+
"""Convert LangChain message content to a list of A2A Parts."""
|
|
131
|
+
|
|
132
|
+
if isinstance(content, str):
|
|
133
|
+
return [Part(root=TextPart(text=content))]
|
|
134
|
+
|
|
135
|
+
result: list[Part] = []
|
|
136
|
+
for item in content:
|
|
137
|
+
result.append(convert_langchain_content_to_a2a_part(item))
|
|
138
|
+
return result
|
|
@@ -0,0 +1,13 @@
|
|
|
1
|
+
# Copyright 2025 Google LLC
|
|
2
|
+
#
|
|
3
|
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
4
|
+
# you may not use this file except in compliance with the License.
|
|
5
|
+
# You may obtain a copy of the License at
|
|
6
|
+
#
|
|
7
|
+
# http://www.apache.org/licenses/LICENSE-2.0
|
|
8
|
+
#
|
|
9
|
+
# Unless required by applicable law or agreed to in writing, software
|
|
10
|
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
11
|
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
12
|
+
# See the License for the specific language governing permissions and
|
|
13
|
+
# limitations under the License.
|
|
@@ -0,0 +1,265 @@
|
|
|
1
|
+
# Copyright 2025 Google LLC
|
|
2
|
+
#
|
|
3
|
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
4
|
+
# you may not use this file except in compliance with the License.
|
|
5
|
+
# You may obtain a copy of the License at
|
|
6
|
+
#
|
|
7
|
+
# http://www.apache.org/licenses/LICENSE-2.0
|
|
8
|
+
#
|
|
9
|
+
# Unless required by applicable law or agreed to in writing, software
|
|
10
|
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
11
|
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
12
|
+
# See the License for the specific language governing permissions and
|
|
13
|
+
# limitations under the License.
|
|
14
|
+
|
|
15
|
+
from __future__ import annotations
|
|
16
|
+
|
|
17
|
+
import logging
|
|
18
|
+
import uuid
|
|
19
|
+
from datetime import datetime, timezone
|
|
20
|
+
|
|
21
|
+
from a2a.server.agent_execution import AgentExecutor, RequestContext
|
|
22
|
+
from a2a.server.events import EventQueue
|
|
23
|
+
from a2a.types import (
|
|
24
|
+
Artifact,
|
|
25
|
+
Message,
|
|
26
|
+
Part,
|
|
27
|
+
Role,
|
|
28
|
+
TaskArtifactUpdateEvent,
|
|
29
|
+
TaskState,
|
|
30
|
+
TaskStatus,
|
|
31
|
+
TaskStatusUpdateEvent,
|
|
32
|
+
TextPart,
|
|
33
|
+
UnsupportedOperationError,
|
|
34
|
+
)
|
|
35
|
+
from a2a.utils.errors import ServerError
|
|
36
|
+
from langchain_core.messages import AIMessage, HumanMessage, ToolMessage
|
|
37
|
+
from langgraph.graph.state import CompiledStateGraph
|
|
38
|
+
from pydantic import BaseModel
|
|
39
|
+
from typing_extensions import override
|
|
40
|
+
|
|
41
|
+
from ..converters import (
|
|
42
|
+
convert_a2a_parts_to_langchain_content,
|
|
43
|
+
convert_langchain_content_to_a2a_parts,
|
|
44
|
+
)
|
|
45
|
+
from .task_result_aggregator import LangGraphTaskResultAggregator
|
|
46
|
+
|
|
47
|
+
logging.basicConfig(level=logging.INFO)
|
|
48
|
+
logger = logging.getLogger(__name__)
|
|
49
|
+
|
|
50
|
+
|
|
51
|
+
class LangGraphAgentExecutorConfig(BaseModel):
|
|
52
|
+
"""Configuration for the LangGraphAgentExecutor."""
|
|
53
|
+
|
|
54
|
+
enable_streaming: bool = True
|
|
55
|
+
|
|
56
|
+
|
|
57
|
+
class LangGraphAgentExecutor(AgentExecutor):
|
|
58
|
+
"""An AgentExecutor that runs a LangGraph agent against an A2A request and
|
|
59
|
+
publishes updates to an event queue."""
|
|
60
|
+
|
|
61
|
+
def __init__(
|
|
62
|
+
self,
|
|
63
|
+
*,
|
|
64
|
+
graph: CompiledStateGraph,
|
|
65
|
+
config: LangGraphAgentExecutorConfig | None = None,
|
|
66
|
+
):
|
|
67
|
+
super().__init__()
|
|
68
|
+
self._graph = graph
|
|
69
|
+
self._config = config or LangGraphAgentExecutorConfig()
|
|
70
|
+
|
|
71
|
+
@override
|
|
72
|
+
async def cancel(self, context: RequestContext, event_queue: EventQueue) -> None:
|
|
73
|
+
"""Cancel the execution."""
|
|
74
|
+
# TODO: Implement proper cancellation logic if needed
|
|
75
|
+
raise ServerError(error=UnsupportedOperationError())
|
|
76
|
+
|
|
77
|
+
@override
|
|
78
|
+
async def execute(
|
|
79
|
+
self,
|
|
80
|
+
context: RequestContext,
|
|
81
|
+
event_queue: EventQueue,
|
|
82
|
+
) -> None:
|
|
83
|
+
"""Executes an A2A request and publishes updates to the event queue."""
|
|
84
|
+
|
|
85
|
+
if not context.message:
|
|
86
|
+
raise ValueError("A2A request must have a message")
|
|
87
|
+
|
|
88
|
+
if not context.task_id:
|
|
89
|
+
raise ValueError("task_id is required")
|
|
90
|
+
if not context.context_id:
|
|
91
|
+
raise ValueError("context_id is required")
|
|
92
|
+
|
|
93
|
+
task_id = context.task_id
|
|
94
|
+
context_id = context.context_id
|
|
95
|
+
|
|
96
|
+
if not context.current_task:
|
|
97
|
+
await event_queue.enqueue_event(
|
|
98
|
+
TaskStatusUpdateEvent(
|
|
99
|
+
task_id=task_id,
|
|
100
|
+
status=TaskStatus(
|
|
101
|
+
state=TaskState.submitted,
|
|
102
|
+
message=context.message,
|
|
103
|
+
timestamp=datetime.now(timezone.utc).isoformat(),
|
|
104
|
+
),
|
|
105
|
+
context_id=context_id,
|
|
106
|
+
final=False,
|
|
107
|
+
)
|
|
108
|
+
)
|
|
109
|
+
|
|
110
|
+
try:
|
|
111
|
+
await self._handle_request(context, event_queue)
|
|
112
|
+
except Exception as e:
|
|
113
|
+
logger.error("Error handling A2A request: %s", e, exc_info=True)
|
|
114
|
+
try:
|
|
115
|
+
await event_queue.enqueue_event(
|
|
116
|
+
TaskStatusUpdateEvent(
|
|
117
|
+
task_id=task_id,
|
|
118
|
+
status=TaskStatus(
|
|
119
|
+
state=TaskState.failed,
|
|
120
|
+
timestamp=datetime.now(timezone.utc).isoformat(),
|
|
121
|
+
message=Message(
|
|
122
|
+
message_id=str(uuid.uuid4()),
|
|
123
|
+
role=Role.agent,
|
|
124
|
+
parts=[Part(root=TextPart(text=str(e)))],
|
|
125
|
+
),
|
|
126
|
+
),
|
|
127
|
+
context_id=context_id,
|
|
128
|
+
final=True,
|
|
129
|
+
)
|
|
130
|
+
)
|
|
131
|
+
except Exception as enqueue_error:
|
|
132
|
+
logger.error(
|
|
133
|
+
"Failed to publish failure event: %s", enqueue_error, exc_info=True
|
|
134
|
+
)
|
|
135
|
+
|
|
136
|
+
async def _handle_request(
|
|
137
|
+
self,
|
|
138
|
+
context: RequestContext,
|
|
139
|
+
event_queue: EventQueue,
|
|
140
|
+
) -> None:
|
|
141
|
+
"""Handle the A2A request and publish events."""
|
|
142
|
+
|
|
143
|
+
graph = self._graph
|
|
144
|
+
|
|
145
|
+
if not context.task_id:
|
|
146
|
+
raise ValueError("task_id is required")
|
|
147
|
+
if not context.context_id:
|
|
148
|
+
raise ValueError("context_id is required")
|
|
149
|
+
|
|
150
|
+
task_id = context.task_id
|
|
151
|
+
context_id = context.context_id
|
|
152
|
+
|
|
153
|
+
await event_queue.enqueue_event(
|
|
154
|
+
TaskStatusUpdateEvent(
|
|
155
|
+
task_id=task_id,
|
|
156
|
+
status=TaskStatus(
|
|
157
|
+
state=TaskState.working,
|
|
158
|
+
timestamp=datetime.now(timezone.utc).isoformat(),
|
|
159
|
+
),
|
|
160
|
+
context_id=context_id,
|
|
161
|
+
final=False,
|
|
162
|
+
)
|
|
163
|
+
)
|
|
164
|
+
|
|
165
|
+
# Convert A2A message parts to LangChain content
|
|
166
|
+
message_content = (
|
|
167
|
+
convert_a2a_parts_to_langchain_content(context.message.parts)
|
|
168
|
+
if context.message
|
|
169
|
+
else ""
|
|
170
|
+
)
|
|
171
|
+
messages = [HumanMessage(content=message_content)]
|
|
172
|
+
input_dict = {"messages": messages}
|
|
173
|
+
|
|
174
|
+
task_result_aggregator = LangGraphTaskResultAggregator()
|
|
175
|
+
|
|
176
|
+
try:
|
|
177
|
+
if self._config.enable_streaming:
|
|
178
|
+
async for chunk in graph.astream(input_dict, stream_mode="messages"):
|
|
179
|
+
if isinstance(chunk, tuple) and len(chunk) > 0:
|
|
180
|
+
message = chunk[0]
|
|
181
|
+
|
|
182
|
+
# Process AIMessage chunks
|
|
183
|
+
if isinstance(message, AIMessage) and message.content:
|
|
184
|
+
task_result_aggregator.process_message(message)
|
|
185
|
+
|
|
186
|
+
parts = convert_langchain_content_to_a2a_parts(
|
|
187
|
+
message.content
|
|
188
|
+
)
|
|
189
|
+
await event_queue.enqueue_event(
|
|
190
|
+
TaskStatusUpdateEvent(
|
|
191
|
+
task_id=task_id,
|
|
192
|
+
status=TaskStatus(
|
|
193
|
+
state=TaskState.working,
|
|
194
|
+
timestamp=datetime.now(
|
|
195
|
+
timezone.utc
|
|
196
|
+
).isoformat(),
|
|
197
|
+
message=Message(
|
|
198
|
+
message_id=str(uuid.uuid4()),
|
|
199
|
+
role=Role.agent,
|
|
200
|
+
parts=parts,
|
|
201
|
+
),
|
|
202
|
+
),
|
|
203
|
+
context_id=context_id,
|
|
204
|
+
final=False,
|
|
205
|
+
)
|
|
206
|
+
)
|
|
207
|
+
|
|
208
|
+
# Process ToolMessage chunks (for multimodal content)
|
|
209
|
+
elif isinstance(message, ToolMessage):
|
|
210
|
+
task_result_aggregator.process_message(message)
|
|
211
|
+
else:
|
|
212
|
+
result = await graph.ainvoke(input_dict)
|
|
213
|
+
if "messages" in result:
|
|
214
|
+
for msg in result["messages"]:
|
|
215
|
+
if isinstance(msg, (AIMessage, ToolMessage)) and msg.content:
|
|
216
|
+
task_result_aggregator.process_message(msg)
|
|
217
|
+
if (
|
|
218
|
+
task_result_aggregator.task_state == TaskState.working
|
|
219
|
+
and task_result_aggregator.task_status_message is not None
|
|
220
|
+
and task_result_aggregator.task_status_message.parts
|
|
221
|
+
):
|
|
222
|
+
# Publish the artifact update event as the final result
|
|
223
|
+
await event_queue.enqueue_event(
|
|
224
|
+
TaskArtifactUpdateEvent(
|
|
225
|
+
task_id=task_id,
|
|
226
|
+
last_chunk=True,
|
|
227
|
+
context_id=context_id,
|
|
228
|
+
artifact=Artifact(
|
|
229
|
+
artifact_id=str(uuid.uuid4()),
|
|
230
|
+
parts=task_result_aggregator.get_final_parts(),
|
|
231
|
+
),
|
|
232
|
+
)
|
|
233
|
+
)
|
|
234
|
+
# Publish the final status update event
|
|
235
|
+
await event_queue.enqueue_event(
|
|
236
|
+
TaskStatusUpdateEvent(
|
|
237
|
+
task_id=task_id,
|
|
238
|
+
status=TaskStatus(
|
|
239
|
+
state=TaskState.completed,
|
|
240
|
+
timestamp=datetime.now(timezone.utc).isoformat(),
|
|
241
|
+
),
|
|
242
|
+
context_id=context_id,
|
|
243
|
+
final=True,
|
|
244
|
+
)
|
|
245
|
+
)
|
|
246
|
+
else:
|
|
247
|
+
# Publish final status with current task_state and message
|
|
248
|
+
await event_queue.enqueue_event(
|
|
249
|
+
TaskStatusUpdateEvent(
|
|
250
|
+
task_id=task_id,
|
|
251
|
+
status=TaskStatus(
|
|
252
|
+
state=task_result_aggregator.task_state,
|
|
253
|
+
timestamp=datetime.now(timezone.utc).isoformat(),
|
|
254
|
+
message=task_result_aggregator.task_status_message,
|
|
255
|
+
),
|
|
256
|
+
context_id=context_id,
|
|
257
|
+
final=True,
|
|
258
|
+
)
|
|
259
|
+
)
|
|
260
|
+
|
|
261
|
+
except Exception as e:
|
|
262
|
+
logger.error("Error during graph execution: %s", e, exc_info=True)
|
|
263
|
+
# Update task state to failed using aggregator
|
|
264
|
+
task_result_aggregator.set_failed(str(e))
|
|
265
|
+
raise
|