agent-starter-pack 0.18.2__py3-none-any.whl → 0.21.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (114) hide show
  1. agent_starter_pack/agents/{langgraph_base_react → adk_a2a_base}/.template/templateconfig.yaml +5 -12
  2. agent_starter_pack/agents/adk_a2a_base/README.md +37 -0
  3. agent_starter_pack/{frontends/streamlit/frontend/style/app_markdown.py → agents/adk_a2a_base/app/__init__.py} +3 -23
  4. agent_starter_pack/agents/adk_a2a_base/app/agent.py +70 -0
  5. agent_starter_pack/agents/adk_a2a_base/notebooks/adk_a2a_app_testing.ipynb +583 -0
  6. agent_starter_pack/agents/{crewai_coding_crew/notebooks/evaluating_crewai_agent.ipynb → adk_a2a_base/notebooks/evaluating_adk_agent.ipynb} +163 -199
  7. agent_starter_pack/agents/adk_a2a_base/tests/integration/test_agent.py +58 -0
  8. agent_starter_pack/agents/adk_base/app/__init__.py +2 -2
  9. agent_starter_pack/agents/adk_base/app/agent.py +3 -0
  10. agent_starter_pack/agents/adk_base/notebooks/adk_app_testing.ipynb +13 -28
  11. agent_starter_pack/agents/adk_live/app/__init__.py +17 -0
  12. agent_starter_pack/agents/adk_live/app/agent.py +3 -0
  13. agent_starter_pack/agents/agentic_rag/app/__init__.py +2 -2
  14. agent_starter_pack/agents/agentic_rag/app/agent.py +3 -0
  15. agent_starter_pack/agents/agentic_rag/notebooks/adk_app_testing.ipynb +13 -28
  16. agent_starter_pack/agents/{crewai_coding_crew → langgraph_base}/.template/templateconfig.yaml +12 -9
  17. agent_starter_pack/agents/langgraph_base/README.md +30 -0
  18. agent_starter_pack/agents/langgraph_base/app/__init__.py +17 -0
  19. agent_starter_pack/agents/{langgraph_base_react → langgraph_base}/app/agent.py +4 -4
  20. agent_starter_pack/agents/{langgraph_base_react → langgraph_base}/tests/integration/test_agent.py +1 -1
  21. agent_starter_pack/base_template/.gitignore +4 -2
  22. agent_starter_pack/base_template/Makefile +110 -16
  23. agent_starter_pack/base_template/README.md +97 -12
  24. agent_starter_pack/base_template/deployment/terraform/dev/apis.tf +4 -6
  25. agent_starter_pack/base_template/deployment/terraform/dev/providers.tf +5 -1
  26. agent_starter_pack/base_template/deployment/terraform/dev/variables.tf +5 -3
  27. agent_starter_pack/base_template/deployment/terraform/dev/{% if cookiecutter.is_adk %}telemetry.tf{% else %}unused_telemetry.tf{% endif %} +193 -0
  28. agent_starter_pack/base_template/deployment/terraform/github.tf +16 -9
  29. agent_starter_pack/base_template/deployment/terraform/locals.tf +7 -7
  30. agent_starter_pack/base_template/deployment/terraform/providers.tf +5 -1
  31. agent_starter_pack/base_template/deployment/terraform/sql/completions.sql +138 -0
  32. agent_starter_pack/base_template/deployment/terraform/storage.tf +0 -9
  33. agent_starter_pack/base_template/deployment/terraform/variables.tf +15 -19
  34. agent_starter_pack/base_template/deployment/terraform/{% if cookiecutter.cicd_runner == 'google_cloud_build' %}build_triggers.tf{% else %}unused_build_triggers.tf{% endif %} +20 -22
  35. agent_starter_pack/base_template/deployment/terraform/{% if cookiecutter.is_adk %}telemetry.tf{% else %}unused_telemetry.tf{% endif %} +206 -0
  36. agent_starter_pack/base_template/pyproject.toml +5 -17
  37. agent_starter_pack/base_template/{% if cookiecutter.cicd_runner == 'github_actions' %}.github{% else %}unused_github{% endif %}/workflows/deploy-to-prod.yaml +19 -4
  38. agent_starter_pack/base_template/{% if cookiecutter.cicd_runner == 'github_actions' %}.github{% else %}unused_github{% endif %}/workflows/staging.yaml +36 -11
  39. agent_starter_pack/base_template/{% if cookiecutter.cicd_runner == 'google_cloud_build' %}.cloudbuild{% else %}unused_.cloudbuild{% endif %}/deploy-to-prod.yaml +24 -5
  40. agent_starter_pack/base_template/{% if cookiecutter.cicd_runner == 'google_cloud_build' %}.cloudbuild{% else %}unused_.cloudbuild{% endif %}/staging.yaml +44 -9
  41. agent_starter_pack/base_template/{{cookiecutter.agent_directory}}/app_utils/telemetry.py +96 -0
  42. agent_starter_pack/base_template/{{cookiecutter.agent_directory}}/{utils → app_utils}/typing.py +4 -6
  43. agent_starter_pack/{agents/crewai_coding_crew/app/crew/config/agents.yaml → base_template/{{cookiecutter.agent_directory}}/app_utils/{% if cookiecutter.is_a2a and cookiecutter.agent_name == 'langgraph_base' %}converters{% else %}unused_converters{% endif %}/__init__.py } +9 -23
  44. agent_starter_pack/base_template/{{cookiecutter.agent_directory}}/app_utils/{% if cookiecutter.is_a2a and cookiecutter.agent_name == 'langgraph_base' %}converters{% else %}unused_converters{% endif %}/part_converter.py +138 -0
  45. agent_starter_pack/base_template/{{cookiecutter.agent_directory}}/app_utils/{% if cookiecutter.is_a2a and cookiecutter.agent_name == 'langgraph_base' %}executor{% else %}unused_executor{% endif %}/__init__.py +13 -0
  46. agent_starter_pack/base_template/{{cookiecutter.agent_directory}}/app_utils/{% if cookiecutter.is_a2a and cookiecutter.agent_name == 'langgraph_base' %}executor{% else %}unused_executor{% endif %}/a2a_agent_executor.py +265 -0
  47. agent_starter_pack/base_template/{{cookiecutter.agent_directory}}/app_utils/{% if cookiecutter.is_a2a and cookiecutter.agent_name == 'langgraph_base' %}executor{% else %}unused_executor{% endif %}/task_result_aggregator.py +152 -0
  48. agent_starter_pack/cli/commands/create.py +40 -4
  49. agent_starter_pack/cli/commands/enhance.py +1 -1
  50. agent_starter_pack/cli/commands/register_gemini_enterprise.py +1070 -0
  51. agent_starter_pack/cli/main.py +2 -0
  52. agent_starter_pack/cli/utils/cicd.py +20 -4
  53. agent_starter_pack/cli/utils/template.py +257 -25
  54. agent_starter_pack/deployment_targets/agent_engine/tests/integration/test_agent_engine_app.py +113 -16
  55. agent_starter_pack/deployment_targets/agent_engine/tests/load_test/README.md +2 -2
  56. agent_starter_pack/deployment_targets/agent_engine/tests/load_test/load_test.py +178 -9
  57. agent_starter_pack/deployment_targets/agent_engine/tests/{% if cookiecutter.is_a2a %}helpers.py{% else %}unused_helpers.py{% endif %} +138 -0
  58. agent_starter_pack/deployment_targets/agent_engine/{{cookiecutter.agent_directory}}/agent_engine_app.py +193 -307
  59. agent_starter_pack/deployment_targets/agent_engine/{{cookiecutter.agent_directory}}/app_utils/deploy.py +414 -0
  60. agent_starter_pack/deployment_targets/agent_engine/{{cookiecutter.agent_directory}}/{utils → app_utils}/{% if cookiecutter.is_adk_live %}expose_app.py{% else %}unused_expose_app.py{% endif %} +13 -14
  61. agent_starter_pack/deployment_targets/cloud_run/Dockerfile +4 -1
  62. agent_starter_pack/deployment_targets/cloud_run/deployment/terraform/dev/service.tf +85 -86
  63. agent_starter_pack/deployment_targets/cloud_run/deployment/terraform/service.tf +139 -107
  64. agent_starter_pack/deployment_targets/cloud_run/tests/integration/test_server_e2e.py +228 -12
  65. agent_starter_pack/deployment_targets/cloud_run/tests/load_test/README.md +4 -4
  66. agent_starter_pack/deployment_targets/cloud_run/tests/load_test/load_test.py +92 -12
  67. agent_starter_pack/deployment_targets/cloud_run/{{cookiecutter.agent_directory}}/{server.py → fast_api_app.py} +194 -121
  68. agent_starter_pack/frontends/adk_live_react/frontend/package-lock.json +18 -18
  69. agent_starter_pack/frontends/adk_live_react/frontend/src/multimodal-live-types.ts +5 -3
  70. agent_starter_pack/resources/docs/adk-cheatsheet.md +198 -41
  71. agent_starter_pack/resources/locks/uv-adk_a2a_base-agent_engine.lock +4966 -0
  72. agent_starter_pack/resources/locks/uv-adk_a2a_base-cloud_run.lock +5011 -0
  73. agent_starter_pack/resources/locks/uv-adk_base-agent_engine.lock +1443 -709
  74. agent_starter_pack/resources/locks/uv-adk_base-cloud_run.lock +1058 -874
  75. agent_starter_pack/resources/locks/uv-adk_live-agent_engine.lock +1443 -709
  76. agent_starter_pack/resources/locks/uv-adk_live-cloud_run.lock +1058 -874
  77. agent_starter_pack/resources/locks/uv-agentic_rag-agent_engine.lock +1568 -749
  78. agent_starter_pack/resources/locks/uv-agentic_rag-cloud_run.lock +1123 -929
  79. agent_starter_pack/resources/locks/{uv-langgraph_base_react-agent_engine.lock → uv-langgraph_base-agent_engine.lock} +1714 -1689
  80. agent_starter_pack/resources/locks/{uv-langgraph_base_react-cloud_run.lock → uv-langgraph_base-cloud_run.lock} +1285 -2374
  81. agent_starter_pack/utils/watch_and_rebuild.py +1 -1
  82. {agent_starter_pack-0.18.2.dist-info → agent_starter_pack-0.21.0.dist-info}/METADATA +3 -6
  83. {agent_starter_pack-0.18.2.dist-info → agent_starter_pack-0.21.0.dist-info}/RECORD +89 -93
  84. agent_starter_pack-0.21.0.dist-info/entry_points.txt +2 -0
  85. llm.txt +4 -5
  86. agent_starter_pack/agents/crewai_coding_crew/README.md +0 -34
  87. agent_starter_pack/agents/crewai_coding_crew/app/agent.py +0 -47
  88. agent_starter_pack/agents/crewai_coding_crew/app/crew/config/tasks.yaml +0 -37
  89. agent_starter_pack/agents/crewai_coding_crew/app/crew/crew.py +0 -71
  90. agent_starter_pack/agents/crewai_coding_crew/tests/integration/test_agent.py +0 -47
  91. agent_starter_pack/agents/langgraph_base_react/README.md +0 -9
  92. agent_starter_pack/agents/langgraph_base_react/notebooks/evaluating_langgraph_agent.ipynb +0 -1574
  93. agent_starter_pack/base_template/deployment/terraform/dev/log_sinks.tf +0 -69
  94. agent_starter_pack/base_template/deployment/terraform/log_sinks.tf +0 -79
  95. agent_starter_pack/base_template/{{cookiecutter.agent_directory}}/utils/tracing.py +0 -155
  96. agent_starter_pack/cli/utils/register_gemini_enterprise.py +0 -406
  97. agent_starter_pack/deployment_targets/agent_engine/deployment/terraform/{% if not cookiecutter.is_adk_live %}service.tf{% else %}unused_service.tf{% endif %} +0 -82
  98. agent_starter_pack/deployment_targets/agent_engine/notebooks/intro_agent_engine.ipynb +0 -1025
  99. agent_starter_pack/deployment_targets/agent_engine/{{cookiecutter.agent_directory}}/utils/deployment.py +0 -99
  100. agent_starter_pack/frontends/streamlit/frontend/side_bar.py +0 -214
  101. agent_starter_pack/frontends/streamlit/frontend/streamlit_app.py +0 -265
  102. agent_starter_pack/frontends/streamlit/frontend/utils/chat_utils.py +0 -67
  103. agent_starter_pack/frontends/streamlit/frontend/utils/local_chat_history.py +0 -127
  104. agent_starter_pack/frontends/streamlit/frontend/utils/message_editing.py +0 -59
  105. agent_starter_pack/frontends/streamlit/frontend/utils/multimodal_utils.py +0 -217
  106. agent_starter_pack/frontends/streamlit/frontend/utils/stream_handler.py +0 -310
  107. agent_starter_pack/frontends/streamlit/frontend/utils/title_summary.py +0 -94
  108. agent_starter_pack/resources/locks/uv-crewai_coding_crew-agent_engine.lock +0 -6650
  109. agent_starter_pack/resources/locks/uv-crewai_coding_crew-cloud_run.lock +0 -7825
  110. agent_starter_pack-0.18.2.dist-info/entry_points.txt +0 -3
  111. /agent_starter_pack/agents/{crewai_coding_crew → langgraph_base}/notebooks/evaluating_langgraph_agent.ipynb +0 -0
  112. /agent_starter_pack/base_template/{{cookiecutter.agent_directory}}/{utils → app_utils}/gcs.py +0 -0
  113. {agent_starter_pack-0.18.2.dist-info → agent_starter_pack-0.21.0.dist-info}/WHEEL +0 -0
  114. {agent_starter_pack-0.18.2.dist-info → agent_starter_pack-0.21.0.dist-info}/licenses/LICENSE +0 -0
@@ -12,11 +12,15 @@ This project is organized as follows:
12
12
  ├── {{cookiecutter.agent_directory}}/ # Core application code
13
13
  │ ├── agent.py # Main agent logic
14
14
  {%- if cookiecutter.deployment_target == 'cloud_run' %}
15
- │ ├── server.py # FastAPI Backend server
15
+ │ ├── fast_api_app.py # FastAPI Backend server
16
16
  {%- elif cookiecutter.deployment_target == 'agent_engine' %}
17
17
  │ ├── agent_engine_app.py # Agent Engine application logic
18
18
  {%- endif %}
19
- │ └── utils/ # Utility functions and helpers
19
+ │ └── app_utils/ # App utilities and helpers
20
+ {%- if cookiecutter.is_a2a and cookiecutter.agent_name == 'langgraph_base' %}
21
+ │ ├── executor/ # A2A protocol executor implementation
22
+ │ └── converters/ # Message converters for A2A protocol
23
+ {%- endif %}
20
24
  {%- if cookiecutter.cicd_runner == 'google_cloud_build' %}
21
25
  ├── .cloudbuild/ # CI/CD pipeline configurations for Google Cloud Build
22
26
  {%- elif cookiecutter.cicd_runner == 'github_actions' %}
@@ -62,7 +66,7 @@ make install && make playground
62
66
  | `make deploy` | Deploy agent to Cloud Run (use `IAP=true` to enable Identity-Aware Proxy, `PORT=8080` to specify container port) |
63
67
  | `make local-backend` | Launch local development server with hot-reload |
64
68
  {%- elif cookiecutter.deployment_target == 'agent_engine' %}
65
- | `make playground` | Launch Streamlit interface for testing agent locally and remotely |
69
+ | `make playground` | Launch local development environment for testing agent |
66
70
  | `make deploy` | Deploy agent to Agent Engine |
67
71
  {%- if cookiecutter.is_adk_live %}
68
72
  | `make local-backend` | Launch local development server with hot-reload |
@@ -71,9 +75,12 @@ make install && make playground
71
75
  | `make playground-remote` | Connect to remote deployed agent with local frontend |
72
76
  | `make build-frontend` | Build the frontend for production |
73
77
  {%- endif %}
74
- {%- if cookiecutter.is_adk %}
75
- | `make register-gemini-enterprise` | Register deployed agent to Gemini Enterprise (see Makefile for parameters) |
76
- {%- endif %}
78
+ {%- if cookiecutter.is_adk or cookiecutter.is_a2a %}
79
+ | `make register-gemini-enterprise` | Register deployed agent to Gemini Enterprise ([docs](https://googlecloudplatform.github.io/agent-starter-pack/cli/register_gemini_enterprise.html)) |
80
+ {%- endif -%}
81
+ {%- endif -%}
82
+ {%- if cookiecutter.is_a2a %}
83
+ | `make inspector` | Launch A2A Protocol Inspector to test your agent implementation |
77
84
  {%- endif %}
78
85
  | `make test` | Run unit and integration tests |
79
86
  | `make lint` | Run code quality checks (codespell, ruff, mypy) |
@@ -84,6 +91,72 @@ make install && make playground
84
91
 
85
92
  For full command options and usage, refer to the [Makefile](Makefile).
86
93
 
94
+ {%- if cookiecutter.is_a2a %}
95
+
96
+ ## Using the A2A Inspector
97
+
98
+ This agent implements the [Agent2Agent (A2A) Protocol](https://a2a-protocol.org/), enabling interoperability with agents across different frameworks and languages.
99
+
100
+ The [A2A Inspector](https://github.com/a2aproject/a2a-inspector) provides the following core features:
101
+ - 🔍 View agent card and capabilities
102
+ - ✅ Validate A2A specification compliance
103
+ - 💬 Test communication with live chat interface
104
+ - 🐛 Debug with the raw message console
105
+
106
+ ### Local Testing
107
+ {%- if cookiecutter.deployment_target == 'cloud_run' %}
108
+
109
+ 1. Start your agent:
110
+ ```bash
111
+ make local-backend
112
+ ```
113
+
114
+ 2. In a separate terminal, launch the A2A Protocol Inspector:
115
+ ```bash
116
+ make inspector
117
+ ```
118
+
119
+ 3. Open http://localhost:5001 and connect to `http://localhost:8000`
120
+ {%- else %}
121
+
122
+ > **Note:** For Agent Engine deployments, local testing with A2A endpoints requires deployment first, as `make playground` uses the ADK web interface. For local development, use `make playground`. To test A2A protocol compliance, follow the Remote Testing instructions below.
123
+ {%- endif %}
124
+
125
+ ### Remote Testing
126
+
127
+ 1. Deploy your agent:
128
+ ```bash
129
+ make deploy
130
+ ```
131
+
132
+ 2. Launch the inspector:
133
+ ```bash
134
+ make inspector
135
+ ```
136
+
137
+ 3. Get an authentication token:
138
+ ```bash
139
+ {%- if cookiecutter.deployment_target == 'cloud_run' %}
140
+ gcloud auth print-identity-token
141
+ {%- else %}
142
+ gcloud auth print-access-token
143
+ {%- endif %}
144
+ ```
145
+
146
+ 4. In the inspector UI at http://localhost:5001:
147
+ - Add an HTTP header with name: `Authorization`
148
+ - Set the value to: `Bearer <your-token-from-step-3>`
149
+ {%- if cookiecutter.deployment_target == 'cloud_run' %}
150
+ - Connect to your deployed Cloud Run URL
151
+ {%- else %}
152
+ - Connect to your Agent Engine URL using this format:
153
+ ```
154
+ https://us-central1-aiplatform.googleapis.com/v1beta1/projects/{PROJECT_ID}/locations/{REGION}/reasoningEngines/{ENGINE_ID}/a2a/v1/card
155
+ ```
156
+ Find your `PROJECT_ID`, `REGION`, and `ENGINE_ID` in the `latest_deployment_metadata.json` file created after deployment.
157
+ {%- endif %}
158
+ {%- endif %}
159
+
87
160
  {% if cookiecutter.is_adk_live %}
88
161
  ## Usage
89
162
 
@@ -124,7 +197,7 @@ Here’s the recommended workflow for local development:
124
197
  ```bash
125
198
  make ui
126
199
  ```
127
- This launches the Streamlit application, which connects to the backend server at `http://localhost:8000`.
200
+ This launches the frontend application, which connects to the backend server at `http://localhost:8000`.
128
201
  </details>
129
202
  <br>
130
203
 
@@ -143,9 +216,9 @@ This template follows a "bring your own agent" approach - you focus on your busi
143
216
 
144
217
  1. **Prototype:** Build your Generative AI Agent using the intro notebooks in `notebooks/` for guidance. Use Vertex AI Evaluation to assess performance.
145
218
  2. **Integrate:** Import your agent into the app by editing `{{cookiecutter.agent_directory}}/agent.py`.
146
- 3. **Test:** Explore your agent functionality using the Streamlit playground with `make playground`. The playground offers features like chat history, user feedback, and various input types, and automatically reloads your agent on code changes.
219
+ 3. **Test:** Explore your agent functionality using the local playground with `make playground`. The playground automatically reloads your agent on code changes.
147
220
  4. **Deploy:** Set up and initiate the CI/CD pipelines, customizing tests as necessary. Refer to the [deployment section](#deployment) for comprehensive instructions. For streamlined infrastructure deployment, simply run `uvx agent-starter-pack setup-cicd`. Check out the [`agent-starter-pack setup-cicd` CLI command](https://googlecloudplatform.github.io/agent-starter-pack/cli/setup_cicd.html). Currently supports GitHub with both Google Cloud Build and GitHub Actions as CI/CD runners.
148
- 5. **Monitor:** Track performance and gather insights using Cloud Logging, Tracing, and the Looker Studio dashboard to iterate on your application.
221
+ 5. **Monitor:** Track performance and gather insights using BigQuery telemetry data, Cloud Logging, and Cloud Trace to iterate on your application.
149
222
 
150
223
  The project includes a `GEMINI.md` file that provides context for AI tools like Gemini CLI when asking questions about your template.
151
224
  {% endif %}
@@ -175,8 +248,20 @@ The repository includes a Terraform configuration for the setup of a production
175
248
 
176
249
  {% if not cookiecutter.is_adk_live %}
177
250
  ## Monitoring and Observability
178
- > You can use [this Looker Studio dashboard]({%- if cookiecutter.is_adk %}https://lookerstudio.google.com/reporting/46b35167-b38b-4e44-bd37-701ef4307418/page/tEnnC{%- else %}https://lookerstudio.google.com/c/reporting/fa742264-4b4b-4c56-81e6-a667dd0f853f/page/tEnnC{%- endif %}
179
- ) template for visualizing events being logged in BigQuery. See the "Setup Instructions" tab to getting started.
180
251
 
181
- The application uses OpenTelemetry for comprehensive observability with all events being sent to Google Cloud Trace and Logging for monitoring and to BigQuery for long term storage.
252
+ The application uses [OpenTelemetry GenAI instrumentation](https://opentelemetry.io/docs/specs/semconv/gen-ai/) for comprehensive observability. Telemetry data is automatically captured and exported to:
253
+
254
+ - **Google Cloud Storage**: GenAI telemetry in JSONL format for efficient querying
255
+ - **BigQuery**: External tables and linked datasets provide immediate access to telemetry data via SQL queries
256
+ - **Cloud Logging**: Dedicated logging bucket with 10-year retention for GenAI operation logs
257
+
258
+ **Query your telemetry data:**
259
+
260
+ ```bash
261
+ # Example: Query recent completions
262
+ bq query --use_legacy_sql=false \
263
+ "SELECT * FROM \`{{cookiecutter.project_name}}_telemetry.completions\` LIMIT 10"
264
+ ```
265
+
266
+ For detailed setup instructions, example queries, testing in dev, and optional dashboard visualization, see the [starter pack observability guide](https://googlecloudplatform.github.io/agent-starter-pack/guide/observability.html).
182
267
  {%- endif %}
@@ -25,12 +25,10 @@ locals {
25
25
  "serviceusage.googleapis.com",
26
26
  "logging.googleapis.com",
27
27
  "cloudtrace.googleapis.com",
28
- {%- if cookiecutter.is_adk and cookiecutter.session_type == "alloydb" %}
29
- "compute.googleapis.com",
30
- "servicenetworking.googleapis.com",
31
- "alloydb.googleapis.com",
32
- "secretmanager.googleapis.com",
33
- "dns.googleapis.com"
28
+ "telemetry.googleapis.com",
29
+ {%- if cookiecutter.is_adk and cookiecutter.session_type == "cloud_sql" %}
30
+ "sqladmin.googleapis.com",
31
+ "secretmanager.googleapis.com"
34
32
  {%- endif %}
35
33
  ]
36
34
  }
@@ -17,7 +17,11 @@ terraform {
17
17
  required_providers {
18
18
  google = {
19
19
  source = "hashicorp/google"
20
- version = "> 7.0.0"
20
+ version = "~> 7.10.0"
21
+ }
22
+ random = {
23
+ source = "hashicorp/random"
24
+ version = "~> 3.7.0"
21
25
  }
22
26
  }
23
27
  }
@@ -49,15 +49,17 @@ variable "app_sa_roles" {
49
49
  description = "List of roles to assign to the application service account"
50
50
  type = list(string)
51
51
  default = [
52
- {%- if cookiecutter.session_type == "alloydb" %}
53
- "roles/secretmanager.secretAccessor",
54
- {%- endif %}
52
+
55
53
  "roles/aiplatform.user",
56
54
  "roles/discoveryengine.editor",
57
55
  "roles/logging.logWriter",
58
56
  "roles/cloudtrace.agent",
59
57
  "roles/storage.admin",
60
58
  "roles/serviceusage.serviceUsageConsumer",
59
+ {%- if cookiecutter.session_type == "cloud_sql" %}
60
+ "roles/cloudsql.client",
61
+ "roles/secretmanager.secretAccessor",
62
+ {%- endif %}
61
63
  ]
62
64
  }
63
65
  {% if cookiecutter.data_ingestion %}
@@ -0,0 +1,193 @@
1
+ # Copyright 2025 Google LLC
2
+ #
3
+ # Licensed under the Apache License, Version 2.0 (the "License");
4
+ # you may not use this file except in compliance with the License.
5
+ # You may obtain a copy of the License at
6
+ #
7
+ # http://www.apache.org/licenses/LICENSE-2.0
8
+ #
9
+ # Unless required by applicable law or agreed to in writing, software
10
+ # distributed under the License is distributed on an "AS IS" BASIS,
11
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
+ # See the License for the specific language governing permissions and
13
+ # limitations under the License.
14
+
15
+ # BigQuery dataset for telemetry external tables
16
+ resource "google_bigquery_dataset" "telemetry_dataset" {
17
+ project = var.dev_project_id
18
+ dataset_id = replace("${var.project_name}_telemetry", "-", "_")
19
+ friendly_name = "${var.project_name} Telemetry"
20
+ location = var.region
21
+ description = "Dataset for GenAI telemetry data stored in GCS"
22
+ depends_on = [google_project_service.services]
23
+ }
24
+
25
+ # BigQuery connection for accessing GCS telemetry data
26
+ resource "google_bigquery_connection" "genai_telemetry_connection" {
27
+ project = var.dev_project_id
28
+ location = var.region
29
+ connection_id = "${var.project_name}-genai-telemetry"
30
+ friendly_name = "${var.project_name} GenAI Telemetry Connection"
31
+
32
+ cloud_resource {}
33
+
34
+ depends_on = [google_project_service.services]
35
+ }
36
+
37
+ # Wait for the BigQuery connection service account to propagate in IAM
38
+ resource "time_sleep" "wait_for_bq_connection_sa" {
39
+ create_duration = "10s"
40
+
41
+ depends_on = [google_bigquery_connection.genai_telemetry_connection]
42
+ }
43
+
44
+ # Grant the BigQuery connection service account access to read from the logs bucket
45
+ resource "google_storage_bucket_iam_member" "telemetry_connection_access" {
46
+ bucket = google_storage_bucket.logs_data_bucket.name
47
+ role = "roles/storage.objectViewer"
48
+ member = "serviceAccount:${google_bigquery_connection.genai_telemetry_connection.cloud_resource[0].service_account_id}"
49
+
50
+ depends_on = [time_sleep.wait_for_bq_connection_sa]
51
+ }
52
+
53
+ # ====================================================================
54
+ # Dedicated Cloud Logging Bucket for GenAI Telemetry
55
+ # ====================================================================
56
+
57
+ # Create a custom Cloud Logging bucket for GenAI telemetry logs with long-term retention
58
+ resource "google_logging_project_bucket_config" "genai_telemetry_bucket" {
59
+ project = var.dev_project_id
60
+ location = var.region
61
+ bucket_id = "${var.project_name}-genai-telemetry"
62
+ retention_days = 3650 # 10 years retention (maximum allowed)
63
+ enable_analytics = true # Required for linked datasets
64
+ description = "Dedicated Cloud Logging bucket for ${var.project_name} GenAI telemetry with 10 year retention"
65
+
66
+ depends_on = [google_project_service.services]
67
+ }
68
+
69
+ # Log sink to route only GenAI telemetry logs to the dedicated bucket
70
+ # Filter by bucket name in the GCS path (which includes project_name) to isolate this agent's logs
71
+ resource "google_logging_project_sink" "genai_logs_to_bucket" {
72
+ name = "${var.project_name}-genai-logs"
73
+ project = var.dev_project_id
74
+ destination = "logging.googleapis.com/projects/${var.dev_project_id}/locations/${var.region}/buckets/${google_logging_project_bucket_config.genai_telemetry_bucket.bucket_id}"
75
+ filter = "log_name=\"projects/${var.dev_project_id}/logs/gen_ai.client.inference.operation.details\" AND (labels.\"gen_ai.input.messages_ref\" =~ \".*${var.project_name}.*\" OR labels.\"gen_ai.output.messages_ref\" =~ \".*${var.project_name}.*\")"
76
+
77
+ unique_writer_identity = true
78
+ depends_on = [google_logging_project_bucket_config.genai_telemetry_bucket]
79
+ }
80
+
81
+ # Create a linked dataset to the GenAI telemetry logs bucket for querying via BigQuery
82
+ resource "google_logging_linked_dataset" "genai_logs_linked_dataset" {
83
+ link_id = replace("${var.project_name}_genai_telemetry_logs", "-", "_")
84
+ bucket = google_logging_project_bucket_config.genai_telemetry_bucket.bucket_id
85
+ description = "Linked dataset for ${var.project_name} GenAI telemetry Cloud Logging bucket"
86
+ location = var.region
87
+ parent = "projects/${var.dev_project_id}"
88
+
89
+ depends_on = [
90
+ google_logging_project_bucket_config.genai_telemetry_bucket,
91
+ google_logging_project_sink.genai_logs_to_bucket
92
+ ]
93
+ }
94
+
95
+ # Wait for linked dataset to fully propagate
96
+ resource "time_sleep" "wait_for_linked_dataset" {
97
+ create_duration = "10s"
98
+
99
+ depends_on = [google_logging_linked_dataset.genai_logs_linked_dataset]
100
+ }
101
+
102
+ # ====================================================================
103
+ # Feedback Logs to Cloud Logging Bucket
104
+ # ====================================================================
105
+
106
+ # Log sink for user feedback logs - routes to the same Cloud Logging bucket
107
+ resource "google_logging_project_sink" "feedback_logs_to_bucket" {
108
+ name = "${var.project_name}-feedback"
109
+ project = var.dev_project_id
110
+ destination = "logging.googleapis.com/projects/${var.dev_project_id}/locations/${var.region}/buckets/${google_logging_project_bucket_config.genai_telemetry_bucket.bucket_id}"
111
+ filter = var.feedback_logs_filter
112
+
113
+ unique_writer_identity = true
114
+ depends_on = [google_logging_project_bucket_config.genai_telemetry_bucket]
115
+ }
116
+
117
+ # ====================================================================
118
+ # Completions External Table (GCS-based)
119
+ # ====================================================================
120
+
121
+ # External table for completions data (messages/parts) stored in GCS
122
+ resource "google_bigquery_table" "completions_external_table" {
123
+ project = var.dev_project_id
124
+ dataset_id = google_bigquery_dataset.telemetry_dataset.dataset_id
125
+ table_id = "completions"
126
+ deletion_protection = false
127
+
128
+ external_data_configuration {
129
+ autodetect = false
130
+ source_format = "NEWLINE_DELIMITED_JSON"
131
+ source_uris = ["gs://${google_storage_bucket.logs_data_bucket.name}/completions/*"]
132
+ connection_id = google_bigquery_connection.genai_telemetry_connection.name
133
+ ignore_unknown_values = true
134
+ max_bad_records = 1000
135
+ }
136
+
137
+ # Schema matching the ADK completions format
138
+ schema = jsonencode([
139
+ {
140
+ name = "parts"
141
+ type = "RECORD"
142
+ mode = "REPEATED"
143
+ fields = [
144
+ { name = "type", type = "STRING", mode = "NULLABLE" },
145
+ { name = "content", type = "STRING", mode = "NULLABLE" },
146
+ { name = "mime_type", type = "STRING", mode = "NULLABLE" },
147
+ { name = "uri", type = "STRING", mode = "NULLABLE" },
148
+ { name = "data", type = "BYTES", mode = "NULLABLE" },
149
+ { name = "id", type = "STRING", mode = "NULLABLE" },
150
+ { name = "name", type = "STRING", mode = "NULLABLE" },
151
+ { name = "arguments", type = "JSON", mode = "NULLABLE" },
152
+ { name = "response", type = "JSON", mode = "NULLABLE" }
153
+ ]
154
+ },
155
+ { name = "role", type = "STRING", mode = "NULLABLE" },
156
+ { name = "index", type = "INTEGER", mode = "NULLABLE" }
157
+ ])
158
+
159
+ depends_on = [
160
+ google_storage_bucket.logs_data_bucket,
161
+ google_bigquery_connection.genai_telemetry_connection,
162
+ google_storage_bucket_iam_member.telemetry_connection_access
163
+ ]
164
+ }
165
+
166
+ # ====================================================================
167
+ # Completions View (Joins Logs with GCS Data)
168
+ # ====================================================================
169
+
170
+ # View that joins Cloud Logging data with GCS-stored completions data
171
+ resource "google_bigquery_table" "completions_view" {
172
+ project = var.dev_project_id
173
+ dataset_id = google_bigquery_dataset.telemetry_dataset.dataset_id
174
+ table_id = "completions_view"
175
+ description = "View of GenAI completion logs joined with the GCS prompt/response external table"
176
+ deletion_protection = false
177
+
178
+ view {
179
+ query = templatefile("${path.module}/../sql/completions.sql", {
180
+ project_id = var.dev_project_id
181
+ dataset_id = google_bigquery_dataset.telemetry_dataset.dataset_id
182
+ completions_external_table = google_bigquery_table.completions_external_table.table_id
183
+ logs_link_id = google_logging_linked_dataset.genai_logs_linked_dataset.link_id
184
+ })
185
+ use_legacy_sql = false
186
+ }
187
+
188
+ depends_on = [
189
+ google_logging_linked_dataset.genai_logs_linked_dataset,
190
+ google_bigquery_table.completions_external_table,
191
+ time_sleep.wait_for_linked_dataset
192
+ ]
193
+ }
@@ -98,13 +98,6 @@ resource "github_actions_variable" "cicd_project_id" {
98
98
  depends_on = [github_repository.repo]
99
99
  }
100
100
 
101
- resource "github_actions_variable" "bucket_name_load_test_results" {
102
- repository = var.repository_name
103
- variable_name = "BUCKET_NAME_LOAD_TEST_RESULTS"
104
- value = google_storage_bucket.bucket_load_test_results.name
105
- depends_on = [github_repository.repo]
106
- }
107
-
108
101
  resource "github_actions_variable" "app_sa_email_staging" {
109
102
  repository = var.repository_name
110
103
  variable_name = "APP_SA_EMAIL_STAGING"
@@ -119,17 +112,31 @@ resource "github_actions_variable" "app_sa_email_prod" {
119
112
  depends_on = [github_repository.repo]
120
113
  }
121
114
 
115
+ resource "github_actions_variable" "app_service_account_staging" {
116
+ repository = var.repository_name
117
+ variable_name = "APP_SERVICE_ACCOUNT_STAGING"
118
+ value = google_service_account.app_sa["staging"].email
119
+ depends_on = [github_repository.repo]
120
+ }
121
+
122
+ resource "github_actions_variable" "app_service_account_prod" {
123
+ repository = var.repository_name
124
+ variable_name = "APP_SERVICE_ACCOUNT_PROD"
125
+ value = google_service_account.app_sa["prod"].email
126
+ depends_on = [github_repository.repo]
127
+ }
128
+
122
129
  resource "github_actions_variable" "logs_bucket_name_staging" {
123
130
  repository = var.repository_name
124
131
  variable_name = "LOGS_BUCKET_NAME_STAGING"
125
- value = google_storage_bucket.logs_data_bucket[var.staging_project_id].url
132
+ value = google_storage_bucket.logs_data_bucket[var.staging_project_id].name
126
133
  depends_on = [github_repository.repo]
127
134
  }
128
135
 
129
136
  resource "github_actions_variable" "logs_bucket_name_prod" {
130
137
  repository = var.repository_name
131
138
  variable_name = "LOGS_BUCKET_NAME_PROD"
132
- value = google_storage_bucket.logs_data_bucket[var.prod_project_id].url
139
+ value = google_storage_bucket.logs_data_bucket[var.prod_project_id].name
133
140
  depends_on = [github_repository.repo]
134
141
  }
135
142
 
@@ -20,7 +20,10 @@ locals {
20
20
  "serviceusage.googleapis.com",
21
21
  "bigquery.googleapis.com",
22
22
  "cloudresourcemanager.googleapis.com",
23
- "cloudtrace.googleapis.com"
23
+ "cloudtrace.googleapis.com",
24
+ {%- if cookiecutter.is_adk and cookiecutter.session_type == "cloud_sql" %}
25
+ "sqladmin.googleapis.com",
26
+ {%- endif %}
24
27
  ]
25
28
 
26
29
  deploy_project_services = [
@@ -33,12 +36,9 @@ locals {
33
36
  "serviceusage.googleapis.com",
34
37
  "logging.googleapis.com",
35
38
  "cloudtrace.googleapis.com",
36
- {%- if cookiecutter.is_adk and cookiecutter.session_type == "alloydb" %}
37
- "compute.googleapis.com",
38
- "servicenetworking.googleapis.com",
39
- "alloydb.googleapis.com",
40
- "secretmanager.googleapis.com",
41
- "dns.googleapis.com"
39
+ {%- if cookiecutter.is_adk and cookiecutter.session_type == "cloud_sql" %}
40
+ "sqladmin.googleapis.com",
41
+ "secretmanager.googleapis.com"
42
42
  {%- endif %}
43
43
  ]
44
44
 
@@ -18,12 +18,16 @@ terraform {
18
18
  required_providers {
19
19
  google = {
20
20
  source = "hashicorp/google"
21
- version = "> 7.0.0"
21
+ version = "~> 7.10.0"
22
22
  }
23
23
  github = {
24
24
  source = "integrations/github"
25
25
  version = "~> 6.5.0"
26
26
  }
27
+ random = {
28
+ source = "hashicorp/random"
29
+ version = "~> 3.7.0"
30
+ }
27
31
  }
28
32
  }
29
33
 
@@ -0,0 +1,138 @@
1
+ -- Optimized join of Cloud Logging data with GCS-stored prompt/response data.
2
+ -- This query extracts both input and output messages referenced in logs.
3
+ -- Note: Input files contain full conversation history, so messages may appear multiple times.
4
+
5
+ -- Extract message references from Cloud Logging (scan once, extract both input/output)
6
+ WITH log_refs AS (
7
+ SELECT
8
+ insert_id,
9
+ timestamp,
10
+ labels,
11
+ trace,
12
+ span_id,
13
+ JSON_VALUE(labels, '$.\"gen_ai.input.messages_ref\"') AS input_ref,
14
+ JSON_VALUE(labels, '$.\"gen_ai.output.messages_ref\"') AS output_ref
15
+ FROM `${project_id}.${logs_link_id}._AllLogs`
16
+ WHERE JSON_VALUE(labels, '$.\"gen_ai.input.messages_ref\"') IS NOT NULL
17
+ OR JSON_VALUE(labels, '$.\"gen_ai.output.messages_ref\"') IS NOT NULL
18
+ ),
19
+
20
+ -- Unpivot to get one row per message reference
21
+ unpivoted_refs AS (
22
+ SELECT
23
+ insert_id,
24
+ timestamp,
25
+ labels,
26
+ trace,
27
+ span_id,
28
+ input_ref AS messages_ref_uri,
29
+ 'input' AS message_type
30
+ FROM log_refs
31
+ WHERE input_ref IS NOT NULL
32
+
33
+ UNION ALL
34
+
35
+ SELECT
36
+ insert_id,
37
+ timestamp,
38
+ labels,
39
+ trace,
40
+ span_id,
41
+ output_ref AS messages_ref_uri,
42
+ 'output' AS message_type
43
+ FROM log_refs
44
+ WHERE output_ref IS NOT NULL
45
+ ),
46
+
47
+ -- Join with completions external table and extract api_call_id once
48
+ joined_data AS (
49
+ SELECT
50
+ lr.insert_id,
51
+ lr.timestamp,
52
+ lr.labels,
53
+ lr.trace,
54
+ lr.span_id,
55
+ lr.messages_ref_uri,
56
+ lr.message_type,
57
+ SPLIT(REGEXP_EXTRACT(lr.messages_ref_uri, r'/([^/]+)\.jsonl'), '_')[OFFSET(0)] AS api_call_id,
58
+ c.role,
59
+ c.parts,
60
+ c.index AS message_idx
61
+ FROM unpivoted_refs lr
62
+ JOIN `${project_id}.${dataset_id}.${completions_external_table}` c
63
+ ON lr.messages_ref_uri = c._FILE_NAME
64
+ ),
65
+
66
+ -- Flatten the parts array
67
+ flattened AS (
68
+ SELECT
69
+ insert_id,
70
+ timestamp,
71
+ labels,
72
+ trace,
73
+ span_id,
74
+ messages_ref_uri,
75
+ message_type,
76
+ api_call_id,
77
+ role,
78
+ message_idx,
79
+ part_idx,
80
+ part.type AS part_type,
81
+ part.content,
82
+ part.uri,
83
+ part.mime_type,
84
+ TO_HEX(MD5(part.data)) AS data_md5_hex,
85
+ part.id AS tool_id,
86
+ part.name AS tool_name,
87
+ part.arguments AS tool_args,
88
+ part.response AS tool_response
89
+ FROM joined_data
90
+ CROSS JOIN UNNEST(parts) AS part WITH OFFSET AS part_idx
91
+ ),
92
+
93
+ -- Deduplicate by trace: keep only the latest log entry per trace
94
+ -- (Tool calls create multiple log entries with same trace but different timestamps)
95
+ deduplicated AS (
96
+ SELECT
97
+ *,
98
+ ROW_NUMBER() OVER (
99
+ PARTITION BY trace, message_type, role, message_idx, part_idx
100
+ ORDER BY timestamp DESC
101
+ ) AS row_num
102
+ FROM flattened
103
+ )
104
+
105
+ SELECT
106
+ -- Core identifiers and timestamps
107
+ timestamp,
108
+ insert_id,
109
+ trace,
110
+ span_id,
111
+ api_call_id,
112
+
113
+ -- Message metadata
114
+ message_type,
115
+ role,
116
+ message_idx,
117
+ part_idx,
118
+
119
+ -- Message content
120
+ content,
121
+
122
+ -- Tool/function calling
123
+ part_type,
124
+ tool_name,
125
+ tool_args,
126
+ tool_response,
127
+
128
+ -- Additional metadata
129
+ uri,
130
+ mime_type,
131
+ data_md5_hex,
132
+
133
+ -- Raw fields
134
+ labels,
135
+ messages_ref_uri
136
+ FROM deduplicated
137
+ WHERE row_num = 1 -- Keep only the latest entry per trace/message/part
138
+ ORDER BY trace ASC, message_type ASC, message_idx ASC, part_idx ASC
@@ -17,15 +17,6 @@ provider "google" {
17
17
  user_project_override = true
18
18
  }
19
19
 
20
- resource "google_storage_bucket" "bucket_load_test_results" {
21
- name = "${var.cicd_runner_project_id}-${var.project_name}-load-test"
22
- location = var.region
23
- project = var.cicd_runner_project_id
24
- uniform_bucket_level_access = true
25
- force_destroy = true
26
- depends_on = [resource.google_project_service.cicd_services, resource.google_project_service.deploy_project_services]
27
- }
28
-
29
20
  resource "google_storage_bucket" "logs_data_bucket" {
30
21
  for_each = toset(local.all_project_ids)
31
22
  name = "${each.value}-${var.project_name}-logs"