agent-starter-pack 0.12.0__py3-none-any.whl → 0.13.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (29) hide show
  1. {agent_starter_pack-0.12.0.dist-info → agent_starter_pack-0.13.0.dist-info}/METADATA +2 -1
  2. {agent_starter_pack-0.12.0.dist-info → agent_starter_pack-0.13.0.dist-info}/RECORD +27 -29
  3. src/base_template/Makefile +18 -6
  4. src/base_template/README.md +13 -48
  5. src/cli/commands/create.py +54 -53
  6. src/cli/commands/enhance.py +6 -2
  7. src/cli/commands/list.py +65 -26
  8. src/cli/utils/gcp.py +122 -1
  9. src/cli/utils/remote_template.py +175 -4
  10. src/cli/utils/template.py +72 -1
  11. src/deployment_targets/cloud_run/Dockerfile +16 -0
  12. src/deployment_targets/cloud_run/{{cookiecutter.agent_directory}}/server.py +49 -1
  13. src/frontends/live_api_react/frontend/package-lock.json +9 -9
  14. src/frontends/live_api_react/frontend/src/App.tsx +3 -2
  15. src/frontends/live_api_react/frontend/src/utils/multimodal-live-client.ts +3 -1
  16. src/resources/locks/uv-adk_base-agent_engine.lock +452 -452
  17. src/resources/locks/uv-adk_base-cloud_run.lock +571 -568
  18. src/resources/locks/uv-agentic_rag-agent_engine.lock +565 -566
  19. src/resources/locks/uv-agentic_rag-cloud_run.lock +716 -713
  20. src/resources/locks/uv-crewai_coding_crew-agent_engine.lock +729 -735
  21. src/resources/locks/uv-crewai_coding_crew-cloud_run.lock +923 -940
  22. src/resources/locks/uv-langgraph_base_react-agent_engine.lock +658 -664
  23. src/resources/locks/uv-langgraph_base_react-cloud_run.lock +852 -869
  24. src/resources/locks/uv-live_api-cloud_run.lock +758 -775
  25. src/resources/locks/uv-adk_gemini_fullstack-agent_engine.lock +0 -3938
  26. src/resources/locks/uv-adk_gemini_fullstack-cloud_run.lock +0 -4501
  27. {agent_starter_pack-0.12.0.dist-info → agent_starter_pack-0.13.0.dist-info}/WHEEL +0 -0
  28. {agent_starter_pack-0.12.0.dist-info → agent_starter_pack-0.13.0.dist-info}/entry_points.txt +0 -0
  29. {agent_starter_pack-0.12.0.dist-info → agent_starter_pack-0.13.0.dist-info}/licenses/LICENSE +0 -0
src/cli/utils/gcp.py CHANGED
@@ -14,9 +14,11 @@
14
14
 
15
15
  # ruff: noqa: E722
16
16
  import subprocess
17
+ import time
17
18
 
18
19
  import google.auth
19
20
  from google.api_core.client_options import ClientOptions
21
+ from google.api_core.exceptions import PermissionDenied
20
22
  from google.api_core.gapic_v1.client_info import ClientInfo
21
23
  from google.cloud.aiplatform import initializer
22
24
  from google.cloud.aiplatform_v1beta1.services.prediction_service import (
@@ -25,9 +27,94 @@ from google.cloud.aiplatform_v1beta1.services.prediction_service import (
25
27
  from google.cloud.aiplatform_v1beta1.types.prediction_service import (
26
28
  CountTokensRequest,
27
29
  )
30
+ from rich.console import Console
31
+ from rich.prompt import Confirm
28
32
 
29
33
  from src.cli.utils.version import PACKAGE_NAME, get_current_version
30
34
 
35
+ console = Console()
36
+
37
+
38
+ def enable_vertex_ai_api(project_id: str, auto_approve: bool = False) -> bool:
39
+ """Enable Vertex AI API with user confirmation and propagation waiting."""
40
+ api_name = "aiplatform.googleapis.com"
41
+
42
+ # First test if API is already working with a direct connection
43
+ if _test_vertex_ai_connection(project_id):
44
+ return True
45
+
46
+ if not auto_approve:
47
+ console.print(
48
+ f"Vertex AI API is not enabled in project '{project_id}'.", style="yellow"
49
+ )
50
+ console.print(
51
+ "To continue, we need to enable the Vertex AI API.", style="yellow"
52
+ )
53
+
54
+ if not Confirm.ask(
55
+ "Do you want to enable the Vertex AI API now?", default=True
56
+ ):
57
+ return False
58
+
59
+ try:
60
+ console.print("Enabling Vertex AI API...")
61
+ subprocess.run(
62
+ [
63
+ "gcloud",
64
+ "services",
65
+ "enable",
66
+ api_name,
67
+ "--project",
68
+ project_id,
69
+ ],
70
+ check=True,
71
+ capture_output=True,
72
+ text=True,
73
+ )
74
+ console.print("✓ Vertex AI API enabled successfully")
75
+
76
+ # Wait for API propagation
77
+ console.print("⏳ Waiting for API availability to propagate...")
78
+ max_wait_time = 180 # 3 minutes
79
+ check_interval = 10 # 10 seconds
80
+ start_time = time.time()
81
+
82
+ while time.time() - start_time < max_wait_time:
83
+ if _test_vertex_ai_connection(project_id):
84
+ console.print("✓ Vertex AI API is now available")
85
+ return True
86
+ time.sleep(check_interval)
87
+ console.print("⏳ Still waiting for API propagation...")
88
+
89
+ console.print(
90
+ "⚠️ API propagation took longer than expected, but continuing...",
91
+ style="yellow",
92
+ )
93
+ return True
94
+
95
+ except subprocess.CalledProcessError as e:
96
+ console.print(f"Failed to enable Vertex AI API: {e.stderr}", style="bold red")
97
+ return False
98
+
99
+
100
+ def _test_vertex_ai_connection(project_id: str, location: str = "us-central1") -> bool:
101
+ """Test Vertex AI connection without raising exceptions."""
102
+ try:
103
+ credentials, _ = google.auth.default()
104
+ client = PredictionServiceClient(
105
+ credentials=credentials,
106
+ client_options=ClientOptions(
107
+ api_endpoint=f"{location}-aiplatform.googleapis.com"
108
+ ),
109
+ client_info=get_client_info(),
110
+ transport=initializer.global_config._api_transport,
111
+ )
112
+ request = get_dummy_request(project_id=project_id)
113
+ client.count_tokens(request=request)
114
+ return True
115
+ except Exception:
116
+ return False
117
+
31
118
 
32
119
  def get_user_agent() -> str:
33
120
  """Returns custom user agent header tuple (version, agent string)."""
@@ -52,8 +139,18 @@ def get_dummy_request(project_id: str) -> CountTokensRequest:
52
139
  def verify_vertex_connection(
53
140
  project_id: str,
54
141
  location: str = "us-central1",
142
+ auto_approve: bool = False,
55
143
  ) -> None:
56
144
  """Verifies Vertex AI connection with a test Gemini request."""
145
+ # First try direct connection - if it works, we're done
146
+ if _test_vertex_ai_connection(project_id, location):
147
+ return
148
+
149
+ # If that failed, try to enable the API
150
+ if not enable_vertex_ai_api(project_id, auto_approve):
151
+ raise Exception("Vertex AI API is not enabled and user declined to enable it")
152
+
153
+ # After enabling, test again with proper error handling
57
154
  credentials, _ = google.auth.default()
58
155
  client = PredictionServiceClient(
59
156
  credentials=credentials,
@@ -64,7 +161,31 @@ def verify_vertex_connection(
64
161
  transport=initializer.global_config._api_transport,
65
162
  )
66
163
  request = get_dummy_request(project_id=project_id)
67
- client.count_tokens(request=request)
164
+
165
+ try:
166
+ client.count_tokens(request=request)
167
+ except PermissionDenied as e:
168
+ error_message = str(e)
169
+ # Check if the error is specifically about API not being enabled
170
+ if (
171
+ "has not been used" in error_message
172
+ and "aiplatform.googleapis.com" in error_message
173
+ ):
174
+ # This shouldn't happen since we checked above, but handle it gracefully
175
+ console.print(
176
+ "⚠️ API may still be propagating, retrying in 30 seconds...",
177
+ style="yellow",
178
+ )
179
+ time.sleep(30)
180
+ try:
181
+ client.count_tokens(request=request)
182
+ except PermissionDenied:
183
+ raise Exception(
184
+ "Vertex AI API is enabled but not yet available. Please wait a few more minutes and try again."
185
+ ) from e
186
+ else:
187
+ # Re-raise other permission errors
188
+ raise
68
189
 
69
190
 
70
191
  def verify_credentials() -> dict:
@@ -18,12 +18,17 @@ import pathlib
18
18
  import re
19
19
  import shutil
20
20
  import subprocess
21
+ import sys
21
22
  import tempfile
22
- import tomllib
23
23
  from dataclasses import dataclass
24
24
  from typing import Any
25
25
 
26
+ if sys.version_info >= (3, 11):
27
+ import tomllib
28
+ else:
29
+ import tomli as tomllib
26
30
  from jinja2 import Environment
31
+ from rich.console import Console
27
32
 
28
33
 
29
34
  @dataclass
@@ -90,10 +95,14 @@ def parse_agent_spec(agent_spec: str) -> RemoteTemplateSpec | None:
90
95
  template_path = path_parts[0]
91
96
  git_ref = path_parts[1]
92
97
 
98
+ # Check if this is the ADK samples repository
99
+ is_adk_samples = repo_url == "https://github.com/google/adk-samples"
100
+
93
101
  return RemoteTemplateSpec(
94
102
  repo_url=repo_url,
95
103
  template_path=template_path.strip("/"),
96
104
  git_ref=git_ref,
105
+ is_adk_samples=is_adk_samples,
97
106
  )
98
107
 
99
108
  # GitHub shorthand: <org>/<repo>[/<path>][@<ref>]
@@ -104,10 +113,15 @@ def parse_agent_spec(agent_spec: str) -> RemoteTemplateSpec | None:
104
113
  repo = match.group(2)
105
114
  template_path = match.group(3) or ""
106
115
  git_ref = match.group(4) or "main"
116
+
117
+ # Check if this is the ADK samples repository
118
+ is_adk_samples = org == "google" and repo == "adk-samples"
119
+
107
120
  return RemoteTemplateSpec(
108
121
  repo_url=f"https://github.com/{org}/{repo}",
109
122
  template_path=template_path,
110
123
  git_ref=git_ref,
124
+ is_adk_samples=is_adk_samples,
111
125
  )
112
126
 
113
127
  return None
@@ -183,29 +197,66 @@ def fetch_remote_template(
183
197
  ) from e
184
198
 
185
199
 
200
+ def _infer_agent_directory_for_adk(
201
+ template_dir: pathlib.Path, is_adk_sample: bool
202
+ ) -> dict[str, Any]:
203
+ """Infer agent configuration for ADK samples only using Python conventions.
204
+
205
+ Args:
206
+ template_dir: Path to template directory
207
+ is_adk_sample: Whether this is an ADK sample
208
+
209
+ Returns:
210
+ Dictionary with inferred configuration, or empty dict if not ADK sample
211
+ """
212
+ if not is_adk_sample:
213
+ return {}
214
+
215
+ # Convert folder name to Python package convention (hyphens to underscores)
216
+ folder_name = template_dir.name
217
+ agent_directory = folder_name.replace("-", "_")
218
+
219
+ logging.debug(
220
+ f"Inferred agent_directory '{agent_directory}' from folder name '{folder_name}' for ADK sample"
221
+ )
222
+
223
+ return {
224
+ "settings": {
225
+ "agent_directory": agent_directory,
226
+ },
227
+ "has_explicit_config": False, # Track that this was inferred
228
+ }
229
+
230
+
186
231
  def load_remote_template_config(
187
- template_dir: pathlib.Path, cli_overrides: dict[str, Any] | None = None
232
+ template_dir: pathlib.Path,
233
+ cli_overrides: dict[str, Any] | None = None,
234
+ is_adk_sample: bool = False,
188
235
  ) -> dict[str, Any]:
189
236
  """Load template configuration from remote template's pyproject.toml with CLI overrides.
190
237
 
191
238
  Loads configuration from [tool.agent-starter-pack] section with fallbacks
192
239
  to [project] section for name and description if not specified. CLI overrides
193
- take precedence over all other sources.
240
+ take precedence over all other sources. For ADK samples without explicit config,
241
+ uses smart inference for agent directory naming.
194
242
 
195
243
  Args:
196
244
  template_dir: Path to template directory
197
245
  cli_overrides: Configuration overrides from CLI (takes highest precedence)
246
+ is_adk_sample: Whether this is an ADK sample (enables smart inference)
198
247
 
199
248
  Returns:
200
249
  Template configuration dictionary with merged sources
201
250
  """
202
- config = {}
251
+ config: dict[str, Any] = {}
252
+ has_explicit_config = False
203
253
 
204
254
  # Start with defaults
205
255
  defaults = {
206
256
  "base_template": "adk_base",
207
257
  "name": template_dir.name,
208
258
  "description": "",
259
+ "agent_directory": "app", # Default for non-ADK samples
209
260
  }
210
261
  config.update(defaults)
211
262
 
@@ -222,9 +273,13 @@ def load_remote_template_config(
222
273
  # Fallback to [project] fields if not specified in agent-starter-pack section
223
274
  project_info = pyproject_data.get("project", {})
224
275
 
276
+ # Track if we have explicit configuration
277
+ has_explicit_config = bool(toml_config)
278
+
225
279
  # Apply pyproject.toml configuration (overrides defaults)
226
280
  if toml_config:
227
281
  config.update(toml_config)
282
+ logging.debug("Found explicit [tool.agent-starter-pack] configuration")
228
283
 
229
284
  # Apply [project] fallbacks if not already set
230
285
  if "name" not in toml_config and "name" in project_info:
@@ -236,6 +291,31 @@ def load_remote_template_config(
236
291
  logging.debug(f"Loaded template config from {pyproject_path}")
237
292
  except Exception as e:
238
293
  logging.error(f"Error loading pyproject.toml config: {e}")
294
+ else:
295
+ # No pyproject.toml found
296
+ if is_adk_sample:
297
+ logging.debug(
298
+ f"No pyproject.toml found for ADK sample {template_dir.name}, will use inference"
299
+ )
300
+ else:
301
+ logging.debug(
302
+ f"No pyproject.toml found for template {template_dir.name}, using defaults"
303
+ )
304
+
305
+ # Apply ADK inference if no explicit config and this is an ADK sample
306
+ if not has_explicit_config and is_adk_sample:
307
+ try:
308
+ inferred_config = _infer_agent_directory_for_adk(
309
+ template_dir, is_adk_sample
310
+ )
311
+ config.update(inferred_config)
312
+ logging.debug("Applied ADK inference for template without explicit config")
313
+ except Exception as e:
314
+ logging.warning(f"Failed to apply ADK inference for {template_dir}: {e}")
315
+ # Continue with default configuration
316
+
317
+ # Add metadata about configuration source
318
+ config["has_explicit_config"] = bool(has_explicit_config)
239
319
 
240
320
  # Apply CLI overrides (highest precedence) using deep merge
241
321
  if cli_overrides:
@@ -287,6 +367,97 @@ def merge_template_configs(
287
367
  return deep_merge(merged_config, remote_config)
288
368
 
289
369
 
370
+ def discover_adk_agents(repo_path: pathlib.Path) -> dict[int, dict[str, Any]]:
371
+ """Discover and load all ADK agents from a repository with inference support.
372
+
373
+ Args:
374
+ repo_path: Path to the cloned ADK samples repository
375
+
376
+ Returns:
377
+ Dictionary mapping agent numbers to agent info with keys:
378
+ - name: Agent display name
379
+ - description: Agent description
380
+ - path: Relative path from repo root
381
+ - spec: adk@ specification string
382
+ - has_explicit_config: Whether agent has explicit configuration
383
+ """
384
+ import logging
385
+
386
+ adk_agents = {}
387
+
388
+ # Search specifically for agents in python/agents/* directories
389
+ agents_dir = repo_path / "python" / "agents"
390
+ logging.debug(f"Looking for agents in: {agents_dir}")
391
+ if agents_dir.exists():
392
+ all_items = list(agents_dir.iterdir())
393
+ logging.debug(
394
+ f"Found items in agents directory: {[item.name for item in all_items]}"
395
+ )
396
+
397
+ # Collect all agents first, then sort by configuration type
398
+ all_agents = []
399
+
400
+ for agent_dir in sorted(agents_dir.iterdir()):
401
+ if not agent_dir.is_dir():
402
+ logging.debug(f"Skipping non-directory: {agent_dir.name}")
403
+ continue
404
+ logging.debug(f"Processing agent directory: {agent_dir.name}")
405
+
406
+ try:
407
+ # Load configuration with ADK inference support
408
+ config = load_remote_template_config(
409
+ template_dir=agent_dir, is_adk_sample=True
410
+ )
411
+
412
+ agent_name = config.get("name", agent_dir.name)
413
+ description = config.get("description", "")
414
+ has_explicit_config = config.get("has_explicit_config", False)
415
+
416
+ # Get the relative path from repo root
417
+ relative_path = agent_dir.relative_to(repo_path)
418
+ agent_spec_name = agent_dir.name
419
+
420
+ agent_info = {
421
+ "name": agent_name,
422
+ "description": description,
423
+ "path": str(relative_path),
424
+ "spec": f"adk@{agent_spec_name}",
425
+ "has_explicit_config": has_explicit_config,
426
+ }
427
+ all_agents.append(agent_info)
428
+
429
+ except Exception as e:
430
+ logging.warning(f"Could not load agent from {agent_dir}: {e}")
431
+
432
+ # Sort agents: explicit config first, then inferred (both alphabetically within their groups)
433
+ all_agents.sort(key=lambda x: (not x["has_explicit_config"], x["name"].lower()))
434
+
435
+ # Convert to numbered dictionary
436
+ for i, agent_info in enumerate(all_agents, 1):
437
+ adk_agents[i] = agent_info
438
+
439
+ return adk_agents
440
+
441
+
442
+ def display_adk_caveat_if_needed(agents: dict[int, dict[str, Any]]) -> None:
443
+ """Display helpful note for agents that use inference.
444
+
445
+ Args:
446
+ agents: Dictionary of agent info from discover_adk_agents
447
+ """
448
+ console = Console()
449
+ inferred_agents = [
450
+ a for a in agents.values() if not a.get("has_explicit_config", True)
451
+ ]
452
+ if inferred_agents:
453
+ console.print(
454
+ "\n[blue]ℹ️ Note: Agents marked with * are templated using starter pack heuristics for ADK samples.[/]"
455
+ )
456
+ console.print(
457
+ "[dim] The starter pack attempts to create a working codebase, but you'll need to follow the generated README for complete setup.[/]"
458
+ )
459
+
460
+
290
461
  def render_and_merge_makefiles(
291
462
  base_template_path: pathlib.Path,
292
463
  final_destination: pathlib.Path,
src/cli/utils/template.py CHANGED
@@ -686,7 +686,7 @@ def process_template(
686
686
  llm_txt_content = f.read()
687
687
 
688
688
  cookiecutter_config = {
689
- "project_name": "my-project",
689
+ "project_name": project_name,
690
690
  "agent_name": agent_name,
691
691
  "package_version": get_current_version(),
692
692
  "agent_description": template_config.get("description", ""),
@@ -752,6 +752,53 @@ def process_template(
752
752
  logging.debug(
753
753
  f"Copying remote template files from {remote_template_path} to {generated_project_dir}"
754
754
  )
755
+
756
+ # Preserve base template README and pyproject.toml files before overwriting
757
+ preserve_files = ["README.md"]
758
+
759
+ # Only preserve pyproject.toml if the remote template doesn't have starter pack integration
760
+ remote_pyproject = remote_template_path / "pyproject.toml"
761
+ if remote_pyproject.exists():
762
+ try:
763
+ remote_pyproject_content = remote_pyproject.read_text()
764
+ # Check for starter pack integration markers
765
+ has_starter_pack_integration = (
766
+ "[tool.agent-starter-pack]" in remote_pyproject_content
767
+ )
768
+ if not has_starter_pack_integration:
769
+ preserve_files.append("pyproject.toml")
770
+ logging.debug(
771
+ "Remote pyproject.toml lacks starter pack integration - will preserve base template version"
772
+ )
773
+ else:
774
+ logging.debug(
775
+ "Remote pyproject.toml has starter pack integration - using remote version only"
776
+ )
777
+ except Exception as e:
778
+ logging.warning(
779
+ f"Could not read remote pyproject.toml: {e}. Will preserve base template version."
780
+ )
781
+ preserve_files.append("pyproject.toml")
782
+ else:
783
+ preserve_files.append("pyproject.toml")
784
+
785
+ for preserve_file in preserve_files:
786
+ base_file = generated_project_dir / preserve_file
787
+ remote_file = remote_template_path / preserve_file
788
+
789
+ if base_file.exists() and remote_file.exists():
790
+ # Preserve the base template file with starter_pack prefix
791
+ base_name = pathlib.Path(preserve_file).stem
792
+ extension = pathlib.Path(preserve_file).suffix
793
+ preserved_file = (
794
+ generated_project_dir
795
+ / f"starter_pack_{base_name}{extension}"
796
+ )
797
+ shutil.copy2(base_file, preserved_file)
798
+ logging.debug(
799
+ f"Preserved base template {preserve_file} as starter_pack_{base_name}{extension}"
800
+ )
801
+
755
802
  copy_files(
756
803
  remote_template_path,
757
804
  generated_project_dir,
@@ -884,11 +931,35 @@ def process_template(
884
931
  )
885
932
 
886
933
  if generated_project_dir.exists():
934
+ # Check for existing README and pyproject.toml files before removing destination
935
+ existing_preserved_files = []
887
936
  if final_destination.exists():
937
+ for item in final_destination.iterdir():
938
+ if item.is_file() and (
939
+ item.name.lower().startswith("readme")
940
+ or item.name == "pyproject.toml"
941
+ ):
942
+ existing_preserved_files.append(
943
+ (item.name, item.read_text())
944
+ )
888
945
  shutil.rmtree(final_destination)
946
+
889
947
  shutil.copytree(
890
948
  generated_project_dir, final_destination, dirs_exist_ok=True
891
949
  )
950
+
951
+ # Restore existing README and pyproject.toml files with starter_pack prefix
952
+ for file_name, file_content in existing_preserved_files:
953
+ base_name = pathlib.Path(file_name).stem
954
+ extension = pathlib.Path(file_name).suffix
955
+ preserved_file_path = (
956
+ final_destination / f"starter_pack_{base_name}{extension}"
957
+ )
958
+ preserved_file_path.write_text(file_content)
959
+ logging.debug(
960
+ f"File preservation: existing {file_name} preserved as starter_pack_{base_name}{extension}"
961
+ )
962
+
892
963
  logging.debug(
893
964
  f"Project successfully created at {final_destination}"
894
965
  )
@@ -16,12 +16,28 @@ FROM python:3.11-slim
16
16
 
17
17
  RUN pip install --no-cache-dir uv==0.6.12
18
18
 
19
+ {%- if cookiecutter.agent_name == 'live_api' %}
20
+ # Install Node.js for frontend build
21
+ RUN apt-get update && apt-get install -y \
22
+ curl \
23
+ && curl -fsSL https://deb.nodesource.com/setup_18.x | bash - \
24
+ && apt-get install -y nodejs \
25
+ && apt-get clean \
26
+ && rm -rf /var/lib/apt/lists/*
27
+ {%- endif %}
28
+
19
29
  WORKDIR /code
20
30
 
21
31
  COPY ./pyproject.toml ./README.md ./uv.lock* ./
22
32
 
23
33
  COPY ./{{cookiecutter.agent_directory}} ./{{cookiecutter.agent_directory}}
24
34
 
35
+ {%- if cookiecutter.agent_name == 'live_api' %}
36
+ # Copy and build frontend
37
+ COPY ./frontend ./frontend
38
+ RUN cd frontend && npm ci && npm run build
39
+ {%- endif %}
40
+
25
41
  RUN uv sync --frozen
26
42
 
27
43
  ARG COMMIT_SHA=""
@@ -16,11 +16,14 @@ import asyncio
16
16
  import json
17
17
  import logging
18
18
  from collections.abc import Callable
19
+ from pathlib import Path
19
20
  from typing import Any, Literal
20
21
 
21
22
  import backoff
22
- from fastapi import FastAPI, WebSocket
23
+ from fastapi import FastAPI, HTTPException, WebSocket
23
24
  from fastapi.middleware.cors import CORSMiddleware
25
+ from fastapi.responses import FileResponse
26
+ from fastapi.staticfiles import StaticFiles
24
27
  from google.cloud import logging as google_cloud_logging
25
28
  from google.genai import types
26
29
  from google.genai.types import LiveServerToolCall
@@ -36,6 +39,18 @@ app.add_middleware(
36
39
  allow_methods=["*"],
37
40
  allow_headers=["*"],
38
41
  )
42
+
43
+ # Get the path to the frontend build directory
44
+ current_dir = Path(__file__).parent
45
+ frontend_build_dir = current_dir.parent / "frontend" / "build"
46
+
47
+ # Mount static files if build directory exists
48
+ if frontend_build_dir.exists():
49
+ app.mount(
50
+ "/static",
51
+ StaticFiles(directory=str(frontend_build_dir / "static")),
52
+ name="static",
53
+ )
39
54
  logging_client = google_cloud_logging.Client()
40
55
  logger = logging_client.logger(__name__)
41
56
  logging.basicConfig(level=logging.INFO)
@@ -382,8 +397,41 @@ def collect_feedback(feedback: Feedback) -> dict[str, str]:
382
397
  """
383
398
  logger.log_struct(feedback.model_dump(), severity="INFO")
384
399
  return {"status": "success"}
400
+ {% if cookiecutter.agent_name == "live_api" %}
401
+
402
+ @app.get("/")
403
+ async def serve_frontend_root() -> FileResponse:
404
+ """Serve the frontend index.html at the root path."""
405
+ index_file = frontend_build_dir / "index.html"
406
+ if index_file.exists():
407
+ return FileResponse(str(index_file))
408
+ raise HTTPException(
409
+ status_code=404,
410
+ detail="Frontend not built. Run 'npm run build' in the frontend directory.",
411
+ )
385
412
 
386
413
 
414
+ @app.get("/{full_path:path}")
415
+ async def serve_frontend_spa(full_path: str) -> FileResponse:
416
+ """Catch-all route to serve the frontend for SPA routing.
417
+
418
+ This ensures that client-side routes are handled by the React app.
419
+ Excludes API routes (ws, feedback) and static assets.
420
+ """
421
+ # Don't intercept API routes
422
+ if full_path.startswith(("ws", "feedback", "static", "api")):
423
+ raise HTTPException(status_code=404, detail="Not found")
424
+
425
+ # Serve index.html for all other routes (SPA routing)
426
+ index_file = frontend_build_dir / "index.html"
427
+ if index_file.exists():
428
+ return FileResponse(str(index_file))
429
+ raise HTTPException(
430
+ status_code=404,
431
+ detail="Frontend not built. Run 'npm run build' in the frontend directory.",
432
+ )
433
+ {% endif %}
434
+
387
435
  # Main execution
388
436
  if __name__ == "__main__":
389
437
  import uvicorn
@@ -5914,9 +5914,9 @@
5914
5914
  "license": "ISC"
5915
5915
  },
5916
5916
  "node_modules/brace-expansion": {
5917
- "version": "1.1.11",
5918
- "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz",
5919
- "integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==",
5917
+ "version": "1.1.12",
5918
+ "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.12.tgz",
5919
+ "integrity": "sha512-9T9UjW3r0UW5c1Q7GTwllptXwhvYmEzFhzMfZ9H7FQWt+uZePjZPjBP/W1ZEyZ1twGWom5/56TF4lPcqjnDHcg==",
5920
5920
  "license": "MIT",
5921
5921
  "dependencies": {
5922
5922
  "balanced-match": "^1.0.0",
@@ -9051,9 +9051,9 @@
9051
9051
  }
9052
9052
  },
9053
9053
  "node_modules/filelist/node_modules/brace-expansion": {
9054
- "version": "2.0.1",
9055
- "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.1.tgz",
9056
- "integrity": "sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA==",
9054
+ "version": "2.0.2",
9055
+ "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.2.tgz",
9056
+ "integrity": "sha512-Jt0vHyM+jmUBqojB7E1NIYadt0vI0Qxjxd2TErW94wDz+E2LAm5vKMXXwg6ZZBTHPuUlDgQHKXvjGBdfcF1ZDQ==",
9057
9057
  "license": "MIT",
9058
9058
  "dependencies": {
9059
9059
  "balanced-match": "^1.0.0"
@@ -16819,9 +16819,9 @@
16819
16819
  }
16820
16820
  },
16821
16821
  "node_modules/sucrase/node_modules/brace-expansion": {
16822
- "version": "2.0.1",
16823
- "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.1.tgz",
16824
- "integrity": "sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA==",
16822
+ "version": "2.0.2",
16823
+ "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.2.tgz",
16824
+ "integrity": "sha512-Jt0vHyM+jmUBqojB7E1NIYadt0vI0Qxjxd2TErW94wDz+E2LAm5vKMXXwg6ZZBTHPuUlDgQHKXvjGBdfcF1ZDQ==",
16825
16825
  "license": "MIT",
16826
16826
  "dependencies": {
16827
16827
  "balanced-match": "^1.0.0"
@@ -21,8 +21,9 @@ import SidePanel from "./components/side-panel/SidePanel";
21
21
  import ControlTray from "./components/control-tray/ControlTray";
22
22
  import cn from "classnames";
23
23
 
24
- const defaultHost = "localhost:8000";
25
- const defaultUri = `ws://${defaultHost}/`;
24
+ // Use relative URLs that work with integrated setup and deployments
25
+ const defaultHost = window.location.host;
26
+ const defaultUri = `${window.location.protocol === 'https:' ? 'wss:' : 'ws:'}//${defaultHost}/`;
26
27
 
27
28
  function App() {
28
29
  const videoRef = useRef<HTMLVideoElement>(null);