agent-starter-pack 0.18.1__py3-none-any.whl → 0.19.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of agent-starter-pack might be problematic. Click here for more details.
- agent_starter_pack/agents/adk_a2a_base/.template/templateconfig.yaml +22 -0
- agent_starter_pack/agents/adk_a2a_base/README.md +22 -0
- agent_starter_pack/agents/adk_a2a_base/app/__init__.py +17 -0
- agent_starter_pack/agents/adk_a2a_base/app/agent.py +70 -0
- agent_starter_pack/agents/adk_a2a_base/notebooks/adk_a2a_app_testing.ipynb +600 -0
- agent_starter_pack/agents/adk_a2a_base/notebooks/evaluating_adk_agent.ipynb +1535 -0
- agent_starter_pack/agents/adk_a2a_base/tests/integration/test_agent.py +58 -0
- agent_starter_pack/base_template/.gitignore +1 -1
- agent_starter_pack/base_template/Makefile +11 -11
- agent_starter_pack/base_template/README.md +1 -1
- agent_starter_pack/base_template/{% if cookiecutter.cicd_runner == 'github_actions' %}.github{% else %}unused_github{% endif %}/workflows/deploy-to-prod.yaml +10 -2
- agent_starter_pack/base_template/{% if cookiecutter.cicd_runner == 'github_actions' %}.github{% else %}unused_github{% endif %}/workflows/staging.yaml +26 -5
- agent_starter_pack/base_template/{% if cookiecutter.cicd_runner == 'google_cloud_build' %}.cloudbuild{% else %}unused_.cloudbuild{% endif %}/deploy-to-prod.yaml +18 -3
- agent_starter_pack/base_template/{% if cookiecutter.cicd_runner == 'google_cloud_build' %}.cloudbuild{% else %}unused_.cloudbuild{% endif %}/staging.yaml +34 -3
- agent_starter_pack/cli/utils/cicd.py +20 -4
- agent_starter_pack/cli/utils/register_gemini_enterprise.py +79 -84
- agent_starter_pack/cli/utils/template.py +2 -0
- agent_starter_pack/deployment_targets/agent_engine/tests/integration/test_agent_engine_app.py +104 -2
- agent_starter_pack/deployment_targets/agent_engine/tests/load_test/load_test.py +144 -0
- agent_starter_pack/deployment_targets/agent_engine/tests/{% if cookiecutter.is_adk_a2a %}helpers.py{% else %}unused_helpers.py{% endif %} +138 -0
- agent_starter_pack/deployment_targets/agent_engine/{{cookiecutter.agent_directory}}/agent_engine_app.py +88 -4
- agent_starter_pack/deployment_targets/agent_engine/{{cookiecutter.agent_directory}}/utils/deployment.py +4 -0
- agent_starter_pack/deployment_targets/cloud_run/Dockerfile +3 -0
- agent_starter_pack/deployment_targets/cloud_run/deployment/terraform/dev/service.tf +7 -0
- agent_starter_pack/deployment_targets/cloud_run/deployment/terraform/service.tf +16 -2
- agent_starter_pack/deployment_targets/cloud_run/tests/integration/test_server_e2e.py +218 -1
- agent_starter_pack/deployment_targets/cloud_run/tests/load_test/README.md +2 -2
- agent_starter_pack/deployment_targets/cloud_run/tests/load_test/load_test.py +51 -4
- agent_starter_pack/deployment_targets/cloud_run/{{cookiecutter.agent_directory}}/server.py +66 -0
- agent_starter_pack/resources/locks/uv-adk_a2a_base-agent_engine.lock +4224 -0
- agent_starter_pack/resources/locks/uv-adk_a2a_base-cloud_run.lock +4819 -0
- agent_starter_pack/resources/locks/uv-adk_base-agent_engine.lock +230 -236
- agent_starter_pack/resources/locks/uv-adk_base-cloud_run.lock +290 -296
- agent_starter_pack/resources/locks/uv-adk_live-agent_engine.lock +230 -236
- agent_starter_pack/resources/locks/uv-adk_live-cloud_run.lock +290 -296
- agent_starter_pack/resources/locks/uv-agentic_rag-agent_engine.lock +234 -239
- agent_starter_pack/resources/locks/uv-agentic_rag-cloud_run.lock +294 -299
- agent_starter_pack/resources/locks/uv-crewai_coding_crew-agent_engine.lock +221 -228
- agent_starter_pack/resources/locks/uv-crewai_coding_crew-cloud_run.lock +279 -286
- agent_starter_pack/resources/locks/uv-langgraph_base_react-agent_engine.lock +226 -233
- agent_starter_pack/resources/locks/uv-langgraph_base_react-cloud_run.lock +298 -305
- {agent_starter_pack-0.18.1.dist-info → agent_starter_pack-0.19.0.dist-info}/METADATA +2 -1
- {agent_starter_pack-0.18.1.dist-info → agent_starter_pack-0.19.0.dist-info}/RECORD +46 -36
- {agent_starter_pack-0.18.1.dist-info → agent_starter_pack-0.19.0.dist-info}/WHEEL +0 -0
- {agent_starter_pack-0.18.1.dist-info → agent_starter_pack-0.19.0.dist-info}/entry_points.txt +0 -0
- {agent_starter_pack-0.18.1.dist-info → agent_starter_pack-0.19.0.dist-info}/licenses/LICENSE +0 -0
|
@@ -0,0 +1,58 @@
|
|
|
1
|
+
# Copyright 2025 Google LLC
|
|
2
|
+
#
|
|
3
|
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
4
|
+
# you may not use this file except in compliance with the License.
|
|
5
|
+
# You may obtain a copy of the License at
|
|
6
|
+
#
|
|
7
|
+
# http://www.apache.org/licenses/LICENSE-2.0
|
|
8
|
+
#
|
|
9
|
+
# Unless required by applicable law or agreed to in writing, software
|
|
10
|
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
11
|
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
12
|
+
# See the License for the specific language governing permissions and
|
|
13
|
+
# limitations under the License.
|
|
14
|
+
|
|
15
|
+
# mypy: disable-error-code="union-attr"
|
|
16
|
+
from google.adk.agents.run_config import RunConfig, StreamingMode
|
|
17
|
+
from google.adk.runners import Runner
|
|
18
|
+
from google.adk.sessions import InMemorySessionService
|
|
19
|
+
from google.genai import types
|
|
20
|
+
|
|
21
|
+
from {{cookiecutter.agent_directory}}.agent import root_agent
|
|
22
|
+
|
|
23
|
+
|
|
24
|
+
def test_agent_stream() -> None:
|
|
25
|
+
"""
|
|
26
|
+
Integration test for the agent stream functionality.
|
|
27
|
+
Tests that the agent returns valid streaming responses.
|
|
28
|
+
"""
|
|
29
|
+
|
|
30
|
+
session_service = InMemorySessionService()
|
|
31
|
+
|
|
32
|
+
session = session_service.create_session_sync(user_id="test_user", app_name="test")
|
|
33
|
+
runner = Runner(agent=root_agent, session_service=session_service, app_name="test")
|
|
34
|
+
|
|
35
|
+
message = types.Content(
|
|
36
|
+
role="user", parts=[types.Part.from_text(text="Why is the sky blue?")]
|
|
37
|
+
)
|
|
38
|
+
|
|
39
|
+
events = list(
|
|
40
|
+
runner.run(
|
|
41
|
+
new_message=message,
|
|
42
|
+
user_id="test_user",
|
|
43
|
+
session_id=session.id,
|
|
44
|
+
run_config=RunConfig(streaming_mode=StreamingMode.SSE),
|
|
45
|
+
)
|
|
46
|
+
)
|
|
47
|
+
assert len(events) > 0, "Expected at least one message"
|
|
48
|
+
|
|
49
|
+
has_text_content = False
|
|
50
|
+
for event in events:
|
|
51
|
+
if (
|
|
52
|
+
event.content
|
|
53
|
+
and event.content.parts
|
|
54
|
+
and any(part.text for part in event.content.parts)
|
|
55
|
+
):
|
|
56
|
+
has_text_content = True
|
|
57
|
+
break
|
|
58
|
+
assert has_text_content, "Expected at least one message with text content"
|
|
@@ -161,6 +161,9 @@ playground-dev:
|
|
|
161
161
|
deploy:
|
|
162
162
|
{%- if cookiecutter.deployment_target == 'cloud_run' %}
|
|
163
163
|
PROJECT_ID=$$(gcloud config get-value project) && \
|
|
164
|
+
{%- if cookiecutter.is_adk_a2a %}
|
|
165
|
+
PROJECT_NUMBER=$$(gcloud projects describe $$PROJECT_ID --format="value(projectNumber)") && \
|
|
166
|
+
{%- endif %}
|
|
164
167
|
gcloud beta run deploy {{cookiecutter.project_name}} \
|
|
165
168
|
--source . \
|
|
166
169
|
--memory "4Gi" \
|
|
@@ -169,14 +172,16 @@ deploy:
|
|
|
169
172
|
--no-allow-unauthenticated \
|
|
170
173
|
--no-cpu-throttling \
|
|
171
174
|
--labels "{% if cookiecutter.is_adk %}created-by=adk{% if cookiecutter.agent_garden %},{% endif %}{% endif %}{% if cookiecutter.agent_garden %}deployed-with=agent-garden{% if cookiecutter.agent_sample_id %},vertex-agent-sample-id={{cookiecutter.agent_sample_id}},vertex-agent-sample-publisher={{cookiecutter.agent_sample_publisher}}{% endif %}{% endif %}" \
|
|
175
|
+
--update-build-env-vars "AGENT_VERSION=$(shell awk -F'"' '/^version = / {print $$2}' pyproject.toml || echo '0.0.0')" \
|
|
172
176
|
--set-env-vars \
|
|
173
|
-
"COMMIT_SHA=$(shell git rev-parse HEAD){%- if cookiecutter.data_ingestion %}{%- if cookiecutter.datastore_type == "vertex_ai_search" %},DATA_STORE_ID={{cookiecutter.project_name}}-datastore,DATA_STORE_REGION=us{%- elif cookiecutter.datastore_type == "vertex_ai_vector_search" %},VECTOR_SEARCH_INDEX={{cookiecutter.project_name}}-vector-search,VECTOR_SEARCH_INDEX_ENDPOINT={{cookiecutter.project_name}}-vector-search-endpoint,VECTOR_SEARCH_BUCKET=$$PROJECT_ID-{{cookiecutter.project_name}}-vs{%- endif %}{%- endif %}" \
|
|
177
|
+
"COMMIT_SHA=$(shell git rev-parse HEAD){%- if cookiecutter.is_adk_a2a %},APP_URL=https://{{cookiecutter.project_name}}-$$PROJECT_NUMBER.us-central1.run.app{%- endif %}{%- if cookiecutter.data_ingestion %}{%- if cookiecutter.datastore_type == "vertex_ai_search" %},DATA_STORE_ID={{cookiecutter.project_name}}-datastore,DATA_STORE_REGION=us{%- elif cookiecutter.datastore_type == "vertex_ai_vector_search" %},VECTOR_SEARCH_INDEX={{cookiecutter.project_name}}-vector-search,VECTOR_SEARCH_INDEX_ENDPOINT={{cookiecutter.project_name}}-vector-search-endpoint,VECTOR_SEARCH_BUCKET=$$PROJECT_ID-{{cookiecutter.project_name}}-vs{%- endif %}{%- endif %}" \
|
|
174
178
|
$(if $(IAP),--iap) \
|
|
175
179
|
$(if $(PORT),--port=$(PORT))
|
|
176
180
|
{%- elif cookiecutter.deployment_target == 'agent_engine' %}
|
|
177
181
|
# Export dependencies to requirements file using uv export.
|
|
178
|
-
uv export --no-hashes --no-header --no-dev --no-emit-project --no-annotate > .requirements.txt 2>/dev/null || \
|
|
179
|
-
uv export --no-hashes --no-header --no-dev --no-emit-project > .requirements.txt &&
|
|
182
|
+
(uv export --no-hashes --no-header --no-dev --no-emit-project --no-annotate > .requirements.txt 2>/dev/null || \
|
|
183
|
+
uv export --no-hashes --no-header --no-dev --no-emit-project > .requirements.txt) && \
|
|
184
|
+
uv run -m {{cookiecutter.agent_directory}}.agent_engine_app
|
|
180
185
|
{%- endif %}
|
|
181
186
|
|
|
182
187
|
# Alias for 'make deploy' for backward compatibility
|
|
@@ -238,13 +243,8 @@ lint:
|
|
|
238
243
|
# ==============================================================================
|
|
239
244
|
|
|
240
245
|
# Register the deployed agent to Gemini Enterprise
|
|
241
|
-
# Usage: make register-gemini-enterprise
|
|
246
|
+
# Usage: ID="projects/.../engines/xxx" make register-gemini-enterprise
|
|
247
|
+
# Optional env vars: GEMINI_DISPLAY_NAME, GEMINI_DESCRIPTION, GEMINI_TOOL_DESCRIPTION, AGENT_ENGINE_ID
|
|
242
248
|
register-gemini-enterprise:
|
|
243
|
-
uvx --from agent-starter-pack agent-starter-pack-register-gemini-enterprise
|
|
244
|
-
$(if $(GEMINI_ENTERPRISE_APP_ID),--gemini-enterprise-app-id="$(GEMINI_ENTERPRISE_APP_ID)",) \
|
|
245
|
-
$(if $(AGENT_ENGINE_ID),--agent-engine-id="$(AGENT_ENGINE_ID)",) \
|
|
246
|
-
$(if $(GEMINI_DISPLAY_NAME),--display-name="$(GEMINI_DISPLAY_NAME)",) \
|
|
247
|
-
$(if $(GEMINI_DESCRIPTION),--description="$(GEMINI_DESCRIPTION)",) \
|
|
248
|
-
$(if $(GEMINI_TOOL_DESCRIPTION),--tool-description="$(GEMINI_TOOL_DESCRIPTION)",) \
|
|
249
|
-
$(if $(GEMINI_AUTHORIZATION_ID),--authorization-id="$(GEMINI_AUTHORIZATION_ID)",)
|
|
249
|
+
uvx --from agent-starter-pack agent-starter-pack-register-gemini-enterprise
|
|
250
250
|
{%- endif %}
|
|
@@ -72,7 +72,7 @@ make install && make playground
|
|
|
72
72
|
| `make build-frontend` | Build the frontend for production |
|
|
73
73
|
{%- endif %}
|
|
74
74
|
{%- if cookiecutter.is_adk %}
|
|
75
|
-
| `make register-gemini-enterprise` | Register deployed agent to Gemini Enterprise (
|
|
75
|
+
| `make register-gemini-enterprise` | Register deployed agent to Gemini Enterprise ([docs](https://googlecloudplatform.github.io/agent-starter-pack/cli/register_gemini_enterprise.html)) |
|
|
76
76
|
{%- endif %}
|
|
77
77
|
{%- endif %}
|
|
78
78
|
| `make test` | Run unit and integration tests |
|
|
@@ -69,6 +69,14 @@ jobs:
|
|
|
69
69
|
run: |
|
|
70
70
|
pip install uv==0.8.13
|
|
71
71
|
uv sync --locked
|
|
72
|
+
{%- if cookiecutter.is_adk_a2a %}
|
|
73
|
+
|
|
74
|
+
- name: Extract version from pyproject.toml
|
|
75
|
+
id: extract-version
|
|
76
|
+
run: |
|
|
77
|
+
VERSION=$(awk -F'"' '/^version = / {print $2}' pyproject.toml || echo '0.0.0')
|
|
78
|
+
echo "version=${VERSION}" >> $GITHUB_OUTPUT
|
|
79
|
+
{%- endif %}
|
|
72
80
|
{%- endif %}
|
|
73
81
|
|
|
74
82
|
{%- if cookiecutter.data_ingestion %}
|
|
@@ -108,10 +116,10 @@ jobs:
|
|
|
108
116
|
- name: Deploy to Production (Agent Engine)
|
|
109
117
|
run: |
|
|
110
118
|
uv export --no-hashes --no-sources --no-header --no-dev --no-emit-project --no-annotate --locked > .requirements.txt
|
|
111
|
-
uv run {{cookiecutter.agent_directory}}
|
|
119
|
+
uv run -m {{cookiecutter.agent_directory}}.agent_engine_app \
|
|
112
120
|
--project {% raw %}${{ vars.PROD_PROJECT_ID }}{% endraw %} \
|
|
113
121
|
--location {% raw %}${{ vars.REGION }}{% endraw %} \
|
|
114
122
|
--artifacts-bucket-name {% raw %}${{ vars.LOGS_BUCKET_NAME_PROD }}{% endraw %} \
|
|
115
|
-
--set-env-vars="COMMIT_SHA={% raw %}${{ github.sha }}{% endraw %}{%- if cookiecutter.data_ingestion %}{%- if cookiecutter.datastore_type == "vertex_ai_search" %},DATA_STORE_ID={% raw %}${{ vars.DATA_STORE_ID_PROD }}{% endraw %},DATA_STORE_REGION={% raw %}${{ vars.DATA_STORE_REGION }}{% endraw %}{%- elif cookiecutter.datastore_type == "vertex_ai_vector_search" %},VECTOR_SEARCH_INDEX={% raw %}${{ vars.VECTOR_SEARCH_INDEX_PROD }}{% endraw %},VECTOR_SEARCH_INDEX_ENDPOINT={% raw %}${{ vars.VECTOR_SEARCH_INDEX_ENDPOINT_PROD }}{% endraw %},VECTOR_SEARCH_BUCKET={% raw %}${{ vars.VECTOR_SEARCH_BUCKET_PROD }}{% endraw %}{%- endif %}{%- endif %}"
|
|
123
|
+
--set-env-vars="COMMIT_SHA={% raw %}${{ github.sha }}{% endraw %}{%- if cookiecutter.is_adk_a2a %},AGENT_VERSION={% raw %}${{ steps.extract-version.outputs.version }}{% endraw %}{%- endif %}{%- if cookiecutter.data_ingestion %}{%- if cookiecutter.datastore_type == "vertex_ai_search" %},DATA_STORE_ID={% raw %}${{ vars.DATA_STORE_ID_PROD }}{% endraw %},DATA_STORE_REGION={% raw %}${{ vars.DATA_STORE_REGION }}{% endraw %}{%- elif cookiecutter.datastore_type == "vertex_ai_vector_search" %},VECTOR_SEARCH_INDEX={% raw %}${{ vars.VECTOR_SEARCH_INDEX_PROD }}{% endraw %},VECTOR_SEARCH_INDEX_ENDPOINT={% raw %}${{ vars.VECTOR_SEARCH_INDEX_ENDPOINT_PROD }}{% endraw %},VECTOR_SEARCH_BUCKET={% raw %}${{ vars.VECTOR_SEARCH_BUCKET_PROD }}{% endraw %}{%- endif %}{%- endif %}"
|
|
116
124
|
{%- endif %}
|
|
117
125
|
|
|
@@ -81,11 +81,23 @@ jobs:
|
|
|
81
81
|
- name: Configure Docker for Artifact Registry
|
|
82
82
|
run: |
|
|
83
83
|
gcloud auth configure-docker {% raw %}${{ vars.REGION }}{% endraw %}-docker.pkg.dev --quiet
|
|
84
|
-
|
|
84
|
+
{%- if cookiecutter.is_adk_a2a %}
|
|
85
|
+
|
|
86
|
+
- name: Extract version from pyproject.toml
|
|
87
|
+
id: extract-version
|
|
88
|
+
run: |
|
|
89
|
+
VERSION=$(awk -F'"' '/^version = / {print $2}' pyproject.toml || echo '0.0.0')
|
|
90
|
+
echo "version=${VERSION}" >> $GITHUB_OUTPUT
|
|
91
|
+
{%- endif %}
|
|
92
|
+
|
|
85
93
|
- name: Build and Push Docker Image
|
|
86
94
|
run: |
|
|
87
95
|
docker build -t {% raw %}${{ vars.REGION }}{% endraw %}-docker.pkg.dev/{% raw %}${{ vars.CICD_PROJECT_ID }}{% endraw %}/{% raw %}${{ vars.ARTIFACT_REGISTRY_REPO_NAME }}{% endraw %}/{% raw %}${{ vars.CONTAINER_NAME }}{% endraw %} \
|
|
88
|
-
--build-arg COMMIT_SHA={% raw %}${{ github.sha }}{% endraw %}
|
|
96
|
+
--build-arg COMMIT_SHA={% raw %}${{ github.sha }}{% endraw %} \
|
|
97
|
+
{%- if cookiecutter.is_adk_a2a %}
|
|
98
|
+
--build-arg AGENT_VERSION={% raw %}${{ steps.extract-version.outputs.version }}{% endraw %} \
|
|
99
|
+
{%- endif %}
|
|
100
|
+
.
|
|
89
101
|
docker push {% raw %}${{ vars.REGION }}{% endraw %}-docker.pkg.dev/{% raw %}${{ vars.CICD_PROJECT_ID }}{% endraw %}/{% raw %}${{ vars.ARTIFACT_REGISTRY_REPO_NAME }}{% endraw %}/{% raw %}${{ vars.CONTAINER_NAME }}{% endraw %}
|
|
90
102
|
|
|
91
103
|
- name: Deploy to Staging (Cloud Run)
|
|
@@ -115,15 +127,24 @@ jobs:
|
|
|
115
127
|
run: |
|
|
116
128
|
pip install uv==0.8.13
|
|
117
129
|
uv sync --locked
|
|
130
|
+
{%- if cookiecutter.is_adk_a2a %}
|
|
131
|
+
|
|
132
|
+
- name: Extract version from pyproject.toml
|
|
133
|
+
id: extract-version
|
|
134
|
+
run: |
|
|
135
|
+
VERSION=$(awk -F'"' '/^version = / {print $2}' pyproject.toml || echo '0.0.0')
|
|
136
|
+
echo "version=${VERSION}" >> $GITHUB_OUTPUT
|
|
137
|
+
echo "Extracted version: ${VERSION}"
|
|
138
|
+
{%- endif %}
|
|
118
139
|
|
|
119
140
|
- name: Deploy to Staging (Agent Engine)
|
|
120
141
|
run: |
|
|
121
142
|
uv export --no-hashes --no-sources --no-header --no-dev --no-emit-project --no-annotate --locked > .requirements.txt
|
|
122
|
-
uv run {{cookiecutter.agent_directory}}
|
|
143
|
+
uv run -m {{cookiecutter.agent_directory}}.agent_engine_app \
|
|
123
144
|
--project {% raw %}${{ vars.STAGING_PROJECT_ID }}{% endraw %} \
|
|
124
145
|
--location {% raw %}${{ vars.REGION }}{% endraw %} \
|
|
125
146
|
--artifacts-bucket-name {% raw %}${{ vars.LOGS_BUCKET_NAME_STAGING }}{% endraw %} \
|
|
126
|
-
--set-env-vars="COMMIT_SHA={% raw %}${{ github.sha }}{% endraw %}{%- if cookiecutter.data_ingestion %}{%- if cookiecutter.datastore_type == "vertex_ai_search" %},DATA_STORE_ID={% raw %}${{ vars.DATA_STORE_ID_STAGING }}{% endraw %},DATA_STORE_REGION={% raw %}${{ vars.DATA_STORE_REGION }}{% endraw %}{%- elif cookiecutter.datastore_type == "vertex_ai_vector_search" %},VECTOR_SEARCH_INDEX={% raw %}${{ vars.VECTOR_SEARCH_INDEX_STAGING }}{% endraw %},VECTOR_SEARCH_INDEX_ENDPOINT={% raw %}${{ vars.VECTOR_SEARCH_INDEX_ENDPOINT_STAGING }}{% endraw %},VECTOR_SEARCH_BUCKET={% raw %}${{ vars.VECTOR_SEARCH_BUCKET_STAGING }}{% endraw %}{%- endif %}{%- endif %}"
|
|
147
|
+
--set-env-vars="COMMIT_SHA={% raw %}${{ github.sha }}{% endraw %}{%- if cookiecutter.is_adk_a2a %},AGENT_VERSION={% raw %}${{ steps.extract-version.outputs.version }}{% endraw %}{%- endif %}{%- if cookiecutter.data_ingestion %}{%- if cookiecutter.datastore_type == "vertex_ai_search" %},DATA_STORE_ID={% raw %}${{ vars.DATA_STORE_ID_STAGING }}{% endraw %},DATA_STORE_REGION={% raw %}${{ vars.DATA_STORE_REGION }}{% endraw %}{%- elif cookiecutter.datastore_type == "vertex_ai_vector_search" %},VECTOR_SEARCH_INDEX={% raw %}${{ vars.VECTOR_SEARCH_INDEX_STAGING }}{% endraw %},VECTOR_SEARCH_INDEX_ENDPOINT={% raw %}${{ vars.VECTOR_SEARCH_INDEX_ENDPOINT_STAGING }}{% endraw %},VECTOR_SEARCH_BUCKET={% raw %}${{ vars.VECTOR_SEARCH_BUCKET_STAGING }}{% endraw %}{%- endif %}{%- endif %}"
|
|
127
148
|
|
|
128
149
|
- name: Fetch Auth Token
|
|
129
150
|
id: fetch-token
|
|
@@ -174,7 +195,7 @@ jobs:
|
|
|
174
195
|
{%- elif cookiecutter.deployment_target == 'cloud_run' %}
|
|
175
196
|
export _ID_TOKEN="{% raw %}${{ steps.fetch-token.outputs._id_token }}{% endraw %}"
|
|
176
197
|
export _STAGING_URL="{% raw %}${{ steps.fetch-url.outputs._staging_url }}{% endraw %}"
|
|
177
|
-
pip install locust==2.31.1
|
|
198
|
+
pip install locust==2.31.1{%- if cookiecutter.is_adk_a2a %} a2a-sdk~=0.3.9{%- endif %}
|
|
178
199
|
locust -f tests/load_test/load_test.py \
|
|
179
200
|
--headless \
|
|
180
201
|
-H ${_STAGING_URL} \
|
|
@@ -42,7 +42,7 @@ steps:
|
|
|
42
42
|
{%- endif %}
|
|
43
43
|
{%- if cookiecutter.deployment_target == 'cloud_run' %}
|
|
44
44
|
|
|
45
|
-
- name: "gcr.io/cloud-builders/gcloud"
|
|
45
|
+
- name: "gcr.io/cloud-builders/gcloud-slim"
|
|
46
46
|
id: trigger-deployment
|
|
47
47
|
entrypoint: gcloud
|
|
48
48
|
args:
|
|
@@ -57,6 +57,18 @@ steps:
|
|
|
57
57
|
- $_PROD_PROJECT_ID
|
|
58
58
|
|
|
59
59
|
{%- elif cookiecutter.deployment_target == 'agent_engine' %}
|
|
60
|
+
{%- if cookiecutter.is_adk_a2a %}
|
|
61
|
+
# Extract version from pyproject.toml
|
|
62
|
+
- name: "gcr.io/cloud-builders/gcloud-slim"
|
|
63
|
+
id: extract-version
|
|
64
|
+
entrypoint: /bin/bash
|
|
65
|
+
args:
|
|
66
|
+
- "-c"
|
|
67
|
+
- |
|
|
68
|
+
VERSION=$(awk -F'"' '/^version = / {print $$2}' pyproject.toml || echo '0.0.0')
|
|
69
|
+
echo "$${VERSION}" > /workspace/agent_version.txt
|
|
70
|
+
|
|
71
|
+
{%- endif %}
|
|
60
72
|
- name: "python:3.12-slim"
|
|
61
73
|
id: install-dependencies
|
|
62
74
|
entrypoint: /bin/bash
|
|
@@ -73,12 +85,15 @@ steps:
|
|
|
73
85
|
args:
|
|
74
86
|
- "-c"
|
|
75
87
|
- |
|
|
88
|
+
{%- if cookiecutter.is_adk_a2a %}
|
|
89
|
+
AGENT_VERSION=$(cat /workspace/agent_version.txt || echo '0.0.0')
|
|
90
|
+
{%- endif %}
|
|
76
91
|
uv export --no-hashes --no-sources --no-header --no-dev --no-emit-project --no-annotate --locked > .requirements.txt
|
|
77
|
-
uv run {{cookiecutter.agent_directory}}
|
|
92
|
+
uv run -m {{cookiecutter.agent_directory}}.agent_engine_app \
|
|
78
93
|
--project ${_PROD_PROJECT_ID} \
|
|
79
94
|
--location ${_REGION} \
|
|
80
95
|
--artifacts-bucket-name ${_LOGS_BUCKET_NAME_PROD} \
|
|
81
|
-
--set-env-vars="COMMIT_SHA=${COMMIT_SHA}{%- if cookiecutter.data_ingestion %}{%- if cookiecutter.datastore_type == "vertex_ai_search" %},DATA_STORE_ID=${_DATA_STORE_ID_PROD},DATA_STORE_REGION=${_DATA_STORE_REGION}{%- elif cookiecutter.datastore_type == "vertex_ai_vector_search" %},VECTOR_SEARCH_INDEX=${_VECTOR_SEARCH_INDEX_PROD},VECTOR_SEARCH_INDEX_ENDPOINT=${_VECTOR_SEARCH_INDEX_ENDPOINT_PROD},VECTOR_SEARCH_BUCKET=${_VECTOR_SEARCH_BUCKET_PROD}{%- endif %}{%- endif %}"
|
|
96
|
+
--set-env-vars="COMMIT_SHA=${COMMIT_SHA}{%- if cookiecutter.is_adk_a2a %},AGENT_VERSION=$${AGENT_VERSION}{%- endif %}{%- if cookiecutter.data_ingestion %}{%- if cookiecutter.datastore_type == "vertex_ai_search" %},DATA_STORE_ID=${_DATA_STORE_ID_PROD},DATA_STORE_REGION=${_DATA_STORE_REGION}{%- elif cookiecutter.datastore_type == "vertex_ai_vector_search" %},VECTOR_SEARCH_INDEX=${_VECTOR_SEARCH_INDEX_PROD},VECTOR_SEARCH_INDEX_ENDPOINT=${_VECTOR_SEARCH_INDEX_ENDPOINT_PROD},VECTOR_SEARCH_BUCKET=${_VECTOR_SEARCH_BUCKET_PROD}{%- endif %}{%- endif %}"
|
|
82
97
|
env:
|
|
83
98
|
- 'PATH=/usr/local/bin:/usr/bin:~/.local/bin'
|
|
84
99
|
{%- endif %}
|
|
@@ -39,6 +39,18 @@ steps:
|
|
|
39
39
|
- 'PATH=/usr/local/bin:/usr/bin:~/.local/bin'
|
|
40
40
|
{%- endif %}
|
|
41
41
|
{%- if cookiecutter.deployment_target == 'cloud_run' %}
|
|
42
|
+
{%- if cookiecutter.is_adk_a2a %}
|
|
43
|
+
# Extract version from pyproject.toml
|
|
44
|
+
- name: "gcr.io/cloud-builders/gcloud-slim"
|
|
45
|
+
id: extract-version
|
|
46
|
+
entrypoint: /bin/bash
|
|
47
|
+
args:
|
|
48
|
+
- "-c"
|
|
49
|
+
- |
|
|
50
|
+
VERSION=$(awk -F'"' '/^version = / {print $$2}' pyproject.toml || echo '0.0.0')
|
|
51
|
+
echo "$${VERSION}" > /workspace/agent_version.txt
|
|
52
|
+
|
|
53
|
+
{%- endif %}
|
|
42
54
|
# Build and Push
|
|
43
55
|
- name: "gcr.io/cloud-builders/docker"
|
|
44
56
|
args:
|
|
@@ -48,6 +60,10 @@ steps:
|
|
|
48
60
|
"$_REGION-docker.pkg.dev/$PROJECT_ID/$_ARTIFACT_REGISTRY_REPO_NAME/$_CONTAINER_NAME",
|
|
49
61
|
"--build-arg",
|
|
50
62
|
"COMMIT_SHA=$COMMIT_SHA",
|
|
63
|
+
{%- if cookiecutter.is_adk_a2a %}
|
|
64
|
+
"--build-arg",
|
|
65
|
+
"AGENT_VERSION=$(cat /workspace/agent_version.txt || echo '0.0.0')",
|
|
66
|
+
{%- endif %}
|
|
51
67
|
".",
|
|
52
68
|
]
|
|
53
69
|
- name: "gcr.io/cloud-builders/docker"
|
|
@@ -91,6 +107,18 @@ steps:
|
|
|
91
107
|
- |
|
|
92
108
|
echo $(gcloud auth print-identity-token -q) > id_token.txt
|
|
93
109
|
{%- elif cookiecutter.deployment_target == 'agent_engine' %}
|
|
110
|
+
{%- if cookiecutter.is_adk_a2a %}
|
|
111
|
+
# Extract version from pyproject.toml
|
|
112
|
+
- name: "gcr.io/cloud-builders/gcloud-slim"
|
|
113
|
+
id: extract-version
|
|
114
|
+
entrypoint: /bin/bash
|
|
115
|
+
args:
|
|
116
|
+
- "-c"
|
|
117
|
+
- |
|
|
118
|
+
VERSION=$(awk -F'"' '/^version = / {print $$2}' pyproject.toml || echo '0.0.0')
|
|
119
|
+
echo "$${VERSION}" > /workspace/agent_version.txt
|
|
120
|
+
|
|
121
|
+
{%- endif %}
|
|
94
122
|
- name: "python:3.12-slim"
|
|
95
123
|
id: install-dependencies
|
|
96
124
|
entrypoint: /bin/bash
|
|
@@ -107,12 +135,15 @@ steps:
|
|
|
107
135
|
args:
|
|
108
136
|
- "-c"
|
|
109
137
|
- |
|
|
138
|
+
{%- if cookiecutter.is_adk_a2a %}
|
|
139
|
+
AGENT_VERSION=$(cat /workspace/agent_version.txt || echo '0.0.0')
|
|
140
|
+
{%- endif %}
|
|
110
141
|
uv export --no-hashes --no-sources --no-header --no-dev --no-emit-project --no-annotate --locked > .requirements.txt
|
|
111
|
-
uv run {{cookiecutter.agent_directory}}
|
|
142
|
+
uv run -m {{cookiecutter.agent_directory}}.agent_engine_app \
|
|
112
143
|
--project ${_STAGING_PROJECT_ID} \
|
|
113
144
|
--location ${_REGION} \
|
|
114
145
|
--artifacts-bucket-name ${_LOGS_BUCKET_NAME_STAGING} \
|
|
115
|
-
--set-env-vars="COMMIT_SHA=${COMMIT_SHA}{%- if cookiecutter.data_ingestion %}{%- if cookiecutter.datastore_type == "vertex_ai_search" %},DATA_STORE_ID=${_DATA_STORE_ID_STAGING},DATA_STORE_REGION=${_DATA_STORE_REGION}{%- elif cookiecutter.datastore_type == "vertex_ai_vector_search" %},VECTOR_SEARCH_INDEX=${_VECTOR_SEARCH_INDEX_STAGING},VECTOR_SEARCH_INDEX_ENDPOINT=${_VECTOR_SEARCH_INDEX_ENDPOINT_STAGING},VECTOR_SEARCH_BUCKET=${_VECTOR_SEARCH_BUCKET_STAGING}{%- endif %}{%- endif %}"
|
|
146
|
+
--set-env-vars="COMMIT_SHA=${COMMIT_SHA}{%- if cookiecutter.is_adk_a2a %},AGENT_VERSION=$${AGENT_VERSION}{%- endif %}{%- if cookiecutter.data_ingestion %}{%- if cookiecutter.datastore_type == "vertex_ai_search" %},DATA_STORE_ID=${_DATA_STORE_ID_STAGING},DATA_STORE_REGION=${_DATA_STORE_REGION}{%- elif cookiecutter.datastore_type == "vertex_ai_vector_search" %},VECTOR_SEARCH_INDEX=${_VECTOR_SEARCH_INDEX_STAGING},VECTOR_SEARCH_INDEX_ENDPOINT=${_VECTOR_SEARCH_INDEX_ENDPOINT_STAGING},VECTOR_SEARCH_BUCKET=${_VECTOR_SEARCH_BUCKET_STAGING}{%- endif %}{%- endif %}"
|
|
116
147
|
env:
|
|
117
148
|
- 'PATH=/usr/local/bin:/usr/bin:~/.local/bin'
|
|
118
149
|
|
|
@@ -175,7 +206,7 @@ steps:
|
|
|
175
206
|
- |
|
|
176
207
|
export _ID_TOKEN=$(cat id_token.txt)
|
|
177
208
|
export _STAGING_URL=$(cat staging_url.txt)
|
|
178
|
-
pip install locust==2.31.1 --user
|
|
209
|
+
pip install locust==2.31.1{%- if cookiecutter.is_adk_a2a %} a2a-sdk~=0.3.9{%- endif %} --user
|
|
179
210
|
locust -f tests/load_test/load_test.py \
|
|
180
211
|
--headless \
|
|
181
212
|
-H $$_STAGING_URL \
|
|
@@ -103,9 +103,10 @@ def create_github_connection(
|
|
|
103
103
|
"""
|
|
104
104
|
console.print("\n🔗 Creating GitHub connection...")
|
|
105
105
|
|
|
106
|
-
# First, ensure
|
|
107
|
-
console.print("🔧 Ensuring
|
|
106
|
+
# First, ensure required APIs are enabled
|
|
107
|
+
console.print("🔧 Ensuring required APIs are enabled...")
|
|
108
108
|
try:
|
|
109
|
+
# Enable Cloud Build API
|
|
109
110
|
run_command(
|
|
110
111
|
[
|
|
111
112
|
"gcloud",
|
|
@@ -120,13 +121,28 @@ def create_github_connection(
|
|
|
120
121
|
)
|
|
121
122
|
console.print("✅ Cloud Build API enabled")
|
|
122
123
|
|
|
123
|
-
#
|
|
124
|
+
# Enable Secret Manager API
|
|
125
|
+
run_command(
|
|
126
|
+
[
|
|
127
|
+
"gcloud",
|
|
128
|
+
"services",
|
|
129
|
+
"enable",
|
|
130
|
+
"secretmanager.googleapis.com",
|
|
131
|
+
"--project",
|
|
132
|
+
project_id,
|
|
133
|
+
],
|
|
134
|
+
capture_output=True,
|
|
135
|
+
check=False, # Don't fail if already enabled
|
|
136
|
+
)
|
|
137
|
+
console.print("✅ Secret Manager API enabled")
|
|
138
|
+
|
|
139
|
+
# Wait for the APIs to fully initialize and create the service account
|
|
124
140
|
console.print(
|
|
125
141
|
"⏳ Waiting for Cloud Build service account to be created (this typically takes 5-10 seconds)..."
|
|
126
142
|
)
|
|
127
143
|
time.sleep(10)
|
|
128
144
|
except subprocess.CalledProcessError as e:
|
|
129
|
-
console.print(f"⚠️ Could not enable
|
|
145
|
+
console.print(f"⚠️ Could not enable required APIs: {e}", style="yellow")
|
|
130
146
|
|
|
131
147
|
# Get the Cloud Build service account and grant permissions with retry logic
|
|
132
148
|
try:
|
|
@@ -15,12 +15,12 @@
|
|
|
15
15
|
|
|
16
16
|
"""Utility to register an Agent Engine to Gemini Enterprise."""
|
|
17
17
|
|
|
18
|
-
import argparse
|
|
19
18
|
import json
|
|
20
19
|
import os
|
|
21
20
|
import sys
|
|
22
21
|
from pathlib import Path
|
|
23
22
|
|
|
23
|
+
import click
|
|
24
24
|
import requests
|
|
25
25
|
import vertexai
|
|
26
26
|
from google.auth import default
|
|
@@ -53,7 +53,7 @@ def get_agent_engine_id(
|
|
|
53
53
|
"Please provide --agent-engine-id or deploy your agent first."
|
|
54
54
|
)
|
|
55
55
|
|
|
56
|
-
with open(metadata_path) as f:
|
|
56
|
+
with open(metadata_path, encoding="utf-8") as f:
|
|
57
57
|
metadata = json.load(f)
|
|
58
58
|
return metadata["remote_agent_engine_id"]
|
|
59
59
|
|
|
@@ -302,104 +302,99 @@ def register_agent(
|
|
|
302
302
|
raise
|
|
303
303
|
|
|
304
304
|
|
|
305
|
-
|
|
306
|
-
|
|
307
|
-
|
|
308
|
-
|
|
309
|
-
)
|
|
310
|
-
|
|
311
|
-
|
|
312
|
-
|
|
313
|
-
|
|
314
|
-
|
|
315
|
-
|
|
316
|
-
|
|
317
|
-
|
|
318
|
-
|
|
319
|
-
|
|
320
|
-
|
|
321
|
-
|
|
322
|
-
|
|
323
|
-
|
|
324
|
-
|
|
325
|
-
|
|
326
|
-
|
|
327
|
-
|
|
328
|
-
|
|
329
|
-
|
|
330
|
-
|
|
331
|
-
|
|
332
|
-
|
|
333
|
-
|
|
334
|
-
|
|
335
|
-
|
|
336
|
-
|
|
337
|
-
|
|
338
|
-
|
|
339
|
-
|
|
340
|
-
|
|
341
|
-
|
|
342
|
-
|
|
343
|
-
|
|
344
|
-
|
|
345
|
-
|
|
346
|
-
|
|
347
|
-
|
|
348
|
-
|
|
349
|
-
|
|
350
|
-
|
|
351
|
-
|
|
305
|
+
@click.command()
|
|
306
|
+
@click.option(
|
|
307
|
+
"--agent-engine-id",
|
|
308
|
+
envvar="AGENT_ENGINE_ID",
|
|
309
|
+
help="Agent Engine resource name (e.g., projects/.../reasoningEngines/...). "
|
|
310
|
+
"If not provided, reads from deployment_metadata.json.",
|
|
311
|
+
)
|
|
312
|
+
@click.option(
|
|
313
|
+
"--metadata-file",
|
|
314
|
+
default="deployment_metadata.json",
|
|
315
|
+
help="Path to deployment metadata file (default: deployment_metadata.json).",
|
|
316
|
+
)
|
|
317
|
+
@click.option(
|
|
318
|
+
"--gemini-enterprise-app-id",
|
|
319
|
+
help="Gemini Enterprise app full resource name "
|
|
320
|
+
"(e.g., projects/{project_number}/locations/{location}/collections/{collection}/engines/{engine_id}). "
|
|
321
|
+
"Can also be set via ID or GEMINI_ENTERPRISE_APP_ID env var.",
|
|
322
|
+
)
|
|
323
|
+
@click.option(
|
|
324
|
+
"--display-name",
|
|
325
|
+
envvar="GEMINI_DISPLAY_NAME",
|
|
326
|
+
help="Display name for the agent.",
|
|
327
|
+
)
|
|
328
|
+
@click.option(
|
|
329
|
+
"--description",
|
|
330
|
+
envvar="GEMINI_DESCRIPTION",
|
|
331
|
+
help="Description of the agent.",
|
|
332
|
+
)
|
|
333
|
+
@click.option(
|
|
334
|
+
"--tool-description",
|
|
335
|
+
envvar="GEMINI_TOOL_DESCRIPTION",
|
|
336
|
+
help="Description of what the tool does.",
|
|
337
|
+
)
|
|
338
|
+
@click.option(
|
|
339
|
+
"--project-id",
|
|
340
|
+
envvar="GOOGLE_CLOUD_PROJECT",
|
|
341
|
+
help="GCP project ID (extracted from agent-engine-id if not provided).",
|
|
342
|
+
)
|
|
343
|
+
@click.option(
|
|
344
|
+
"--authorization-id",
|
|
345
|
+
envvar="GEMINI_AUTHORIZATION_ID",
|
|
346
|
+
help="OAuth authorization resource name "
|
|
347
|
+
"(e.g., projects/{project_number}/locations/global/authorizations/{auth_id}).",
|
|
348
|
+
)
|
|
349
|
+
def main(
|
|
350
|
+
agent_engine_id: str | None,
|
|
351
|
+
metadata_file: str,
|
|
352
|
+
gemini_enterprise_app_id: str | None,
|
|
353
|
+
display_name: str | None,
|
|
354
|
+
description: str | None,
|
|
355
|
+
tool_description: str | None,
|
|
356
|
+
project_id: str | None,
|
|
357
|
+
authorization_id: str | None,
|
|
358
|
+
) -> None:
|
|
359
|
+
"""Register an Agent Engine to Gemini Enterprise."""
|
|
352
360
|
# Get agent engine ID
|
|
353
361
|
try:
|
|
354
|
-
|
|
362
|
+
resolved_agent_engine_id = get_agent_engine_id(agent_engine_id, metadata_file)
|
|
355
363
|
except ValueError as e:
|
|
356
|
-
|
|
357
|
-
sys.exit(1)
|
|
364
|
+
raise click.ClickException(str(e)) from e
|
|
358
365
|
|
|
359
366
|
# Auto-detect display_name and description from Agent Engine
|
|
360
|
-
auto_display_name, auto_description = get_agent_engine_metadata(
|
|
367
|
+
auto_display_name, auto_description = get_agent_engine_metadata(
|
|
368
|
+
resolved_agent_engine_id
|
|
369
|
+
)
|
|
361
370
|
|
|
362
|
-
|
|
363
|
-
|
|
371
|
+
# Handle gemini_enterprise_app_id with fallback to ID env var
|
|
372
|
+
resolved_gemini_enterprise_app_id = (
|
|
373
|
+
gemini_enterprise_app_id
|
|
374
|
+
or os.getenv("ID")
|
|
375
|
+
or os.getenv("GEMINI_ENTERPRISE_APP_ID")
|
|
364
376
|
)
|
|
365
|
-
if not
|
|
366
|
-
|
|
367
|
-
"Error: --gemini-enterprise-app-id or GEMINI_ENTERPRISE_APP_ID env var required"
|
|
368
|
-
file=sys.stderr,
|
|
377
|
+
if not resolved_gemini_enterprise_app_id:
|
|
378
|
+
raise click.ClickException(
|
|
379
|
+
"Error: --gemini-enterprise-app-id or ID/GEMINI_ENTERPRISE_APP_ID env var required"
|
|
369
380
|
)
|
|
370
|
-
sys.exit(1)
|
|
371
381
|
|
|
372
|
-
|
|
373
|
-
|
|
374
|
-
|
|
375
|
-
or auto_display_name
|
|
376
|
-
or "My Agent"
|
|
377
|
-
)
|
|
378
|
-
description = (
|
|
379
|
-
args.description
|
|
380
|
-
or os.getenv("GEMINI_DESCRIPTION")
|
|
381
|
-
or auto_description
|
|
382
|
-
or "AI Agent"
|
|
383
|
-
)
|
|
384
|
-
tool_description = (
|
|
385
|
-
args.tool_description or os.getenv("GEMINI_TOOL_DESCRIPTION") or description
|
|
386
|
-
)
|
|
387
|
-
project_id = args.project_id or os.getenv("GOOGLE_CLOUD_PROJECT")
|
|
388
|
-
authorization_id = args.authorization_id or os.getenv("GEMINI_AUTHORIZATION_ID")
|
|
382
|
+
resolved_display_name = display_name or auto_display_name or "My Agent"
|
|
383
|
+
resolved_description = description or auto_description or "AI Agent"
|
|
384
|
+
resolved_tool_description = tool_description or resolved_description
|
|
389
385
|
|
|
390
386
|
try:
|
|
391
387
|
register_agent(
|
|
392
|
-
agent_engine_id=
|
|
393
|
-
gemini_enterprise_app_id=
|
|
394
|
-
display_name=
|
|
395
|
-
description=
|
|
396
|
-
tool_description=
|
|
388
|
+
agent_engine_id=resolved_agent_engine_id,
|
|
389
|
+
gemini_enterprise_app_id=resolved_gemini_enterprise_app_id,
|
|
390
|
+
display_name=resolved_display_name,
|
|
391
|
+
description=resolved_description,
|
|
392
|
+
tool_description=resolved_tool_description,
|
|
397
393
|
project_id=project_id,
|
|
398
394
|
authorization_id=authorization_id,
|
|
399
395
|
)
|
|
400
396
|
except Exception as e:
|
|
401
|
-
|
|
402
|
-
sys.exit(1)
|
|
397
|
+
raise click.ClickException(f"Error during registration: {e}") from e
|
|
403
398
|
|
|
404
399
|
|
|
405
400
|
if __name__ == "__main__":
|
|
@@ -112,6 +112,7 @@ def get_available_agents(deployment_target: str | None = None) -> dict:
|
|
|
112
112
|
# Define priority agents that should appear first
|
|
113
113
|
PRIORITY_AGENTS = [
|
|
114
114
|
"adk_base",
|
|
115
|
+
"adk_a2a_base",
|
|
115
116
|
"adk_live",
|
|
116
117
|
"agentic_rag",
|
|
117
118
|
"langgraph_base_react",
|
|
@@ -738,6 +739,7 @@ def process_template(
|
|
|
738
739
|
"tags": tags,
|
|
739
740
|
"is_adk": "adk" in tags,
|
|
740
741
|
"is_adk_live": "adk_live" in tags,
|
|
742
|
+
"is_adk_a2a": "a2a" in tags,
|
|
741
743
|
"deployment_target": deployment_target or "",
|
|
742
744
|
"cicd_runner": cicd_runner or "google_cloud_build",
|
|
743
745
|
"session_type": session_type or "",
|