ai-pipeline-core 0.2.6__py3-none-any.whl → 0.4.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (94) hide show
  1. ai_pipeline_core/__init__.py +78 -125
  2. ai_pipeline_core/deployment/__init__.py +34 -0
  3. ai_pipeline_core/deployment/base.py +861 -0
  4. ai_pipeline_core/deployment/contract.py +80 -0
  5. ai_pipeline_core/deployment/deploy.py +561 -0
  6. ai_pipeline_core/deployment/helpers.py +97 -0
  7. ai_pipeline_core/deployment/progress.py +126 -0
  8. ai_pipeline_core/deployment/remote.py +116 -0
  9. ai_pipeline_core/docs_generator/__init__.py +54 -0
  10. ai_pipeline_core/docs_generator/__main__.py +5 -0
  11. ai_pipeline_core/docs_generator/cli.py +196 -0
  12. ai_pipeline_core/docs_generator/extractor.py +324 -0
  13. ai_pipeline_core/docs_generator/guide_builder.py +644 -0
  14. ai_pipeline_core/docs_generator/trimmer.py +35 -0
  15. ai_pipeline_core/docs_generator/validator.py +114 -0
  16. ai_pipeline_core/document_store/__init__.py +13 -0
  17. ai_pipeline_core/document_store/_summary.py +9 -0
  18. ai_pipeline_core/document_store/_summary_worker.py +170 -0
  19. ai_pipeline_core/document_store/clickhouse.py +492 -0
  20. ai_pipeline_core/document_store/factory.py +38 -0
  21. ai_pipeline_core/document_store/local.py +312 -0
  22. ai_pipeline_core/document_store/memory.py +85 -0
  23. ai_pipeline_core/document_store/protocol.py +68 -0
  24. ai_pipeline_core/documents/__init__.py +12 -14
  25. ai_pipeline_core/documents/_context_vars.py +85 -0
  26. ai_pipeline_core/documents/_hashing.py +52 -0
  27. ai_pipeline_core/documents/attachment.py +85 -0
  28. ai_pipeline_core/documents/context.py +128 -0
  29. ai_pipeline_core/documents/document.py +318 -1434
  30. ai_pipeline_core/documents/mime_type.py +37 -82
  31. ai_pipeline_core/documents/utils.py +4 -12
  32. ai_pipeline_core/exceptions.py +10 -62
  33. ai_pipeline_core/images/__init__.py +309 -0
  34. ai_pipeline_core/images/_processing.py +151 -0
  35. ai_pipeline_core/llm/__init__.py +6 -4
  36. ai_pipeline_core/llm/ai_messages.py +130 -81
  37. ai_pipeline_core/llm/client.py +327 -193
  38. ai_pipeline_core/llm/model_options.py +14 -86
  39. ai_pipeline_core/llm/model_response.py +60 -103
  40. ai_pipeline_core/llm/model_types.py +16 -34
  41. ai_pipeline_core/logging/__init__.py +2 -7
  42. ai_pipeline_core/logging/logging.yml +1 -1
  43. ai_pipeline_core/logging/logging_config.py +27 -37
  44. ai_pipeline_core/logging/logging_mixin.py +15 -41
  45. ai_pipeline_core/observability/__init__.py +32 -0
  46. ai_pipeline_core/observability/_debug/__init__.py +30 -0
  47. ai_pipeline_core/observability/_debug/_auto_summary.py +94 -0
  48. ai_pipeline_core/observability/_debug/_config.py +95 -0
  49. ai_pipeline_core/observability/_debug/_content.py +764 -0
  50. ai_pipeline_core/observability/_debug/_processor.py +98 -0
  51. ai_pipeline_core/observability/_debug/_summary.py +312 -0
  52. ai_pipeline_core/observability/_debug/_types.py +75 -0
  53. ai_pipeline_core/observability/_debug/_writer.py +843 -0
  54. ai_pipeline_core/observability/_document_tracking.py +146 -0
  55. ai_pipeline_core/observability/_initialization.py +194 -0
  56. ai_pipeline_core/observability/_logging_bridge.py +57 -0
  57. ai_pipeline_core/observability/_summary.py +81 -0
  58. ai_pipeline_core/observability/_tracking/__init__.py +6 -0
  59. ai_pipeline_core/observability/_tracking/_client.py +178 -0
  60. ai_pipeline_core/observability/_tracking/_internal.py +28 -0
  61. ai_pipeline_core/observability/_tracking/_models.py +138 -0
  62. ai_pipeline_core/observability/_tracking/_processor.py +158 -0
  63. ai_pipeline_core/observability/_tracking/_service.py +311 -0
  64. ai_pipeline_core/observability/_tracking/_writer.py +229 -0
  65. ai_pipeline_core/{tracing.py → observability/tracing.py} +139 -283
  66. ai_pipeline_core/pipeline/__init__.py +10 -0
  67. ai_pipeline_core/pipeline/decorators.py +915 -0
  68. ai_pipeline_core/pipeline/options.py +16 -0
  69. ai_pipeline_core/prompt_manager.py +16 -102
  70. ai_pipeline_core/settings.py +26 -31
  71. ai_pipeline_core/testing.py +9 -0
  72. ai_pipeline_core-0.4.1.dist-info/METADATA +807 -0
  73. ai_pipeline_core-0.4.1.dist-info/RECORD +76 -0
  74. {ai_pipeline_core-0.2.6.dist-info → ai_pipeline_core-0.4.1.dist-info}/WHEEL +1 -1
  75. ai_pipeline_core/documents/document_list.py +0 -420
  76. ai_pipeline_core/documents/flow_document.py +0 -112
  77. ai_pipeline_core/documents/task_document.py +0 -117
  78. ai_pipeline_core/documents/temporary_document.py +0 -74
  79. ai_pipeline_core/flow/__init__.py +0 -9
  80. ai_pipeline_core/flow/config.py +0 -483
  81. ai_pipeline_core/flow/options.py +0 -75
  82. ai_pipeline_core/pipeline.py +0 -718
  83. ai_pipeline_core/prefect.py +0 -63
  84. ai_pipeline_core/simple_runner/__init__.py +0 -14
  85. ai_pipeline_core/simple_runner/cli.py +0 -254
  86. ai_pipeline_core/simple_runner/simple_runner.py +0 -247
  87. ai_pipeline_core/storage/__init__.py +0 -8
  88. ai_pipeline_core/storage/storage.py +0 -628
  89. ai_pipeline_core/utils/__init__.py +0 -8
  90. ai_pipeline_core/utils/deploy.py +0 -373
  91. ai_pipeline_core/utils/remote_deployment.py +0 -269
  92. ai_pipeline_core-0.2.6.dist-info/METADATA +0 -500
  93. ai_pipeline_core-0.2.6.dist-info/RECORD +0 -41
  94. {ai_pipeline_core-0.2.6.dist-info → ai_pipeline_core-0.4.1.dist-info}/licenses/LICENSE +0 -0
@@ -0,0 +1,80 @@
1
+ """Unified pipeline run response contract.
2
+
3
+ Single source of truth for the response shape used by both
4
+ webhook push (ai-pipeline-core) and polling pull (unified-middleware).
5
+ """
6
+
7
+ from datetime import datetime
8
+ from typing import Annotated, Literal
9
+ from uuid import UUID
10
+
11
+ from pydantic import BaseModel, ConfigDict, Discriminator
12
+
13
+
14
+ class _RunBase(BaseModel):
15
+ """Common fields on every run response variant."""
16
+
17
+ flow_run_id: UUID
18
+ project_name: str
19
+ state: str # PENDING, RUNNING, COMPLETED, FAILED, CRASHED, CANCELLED
20
+ timestamp: datetime
21
+
22
+ model_config = ConfigDict(frozen=True)
23
+
24
+
25
+ class PendingRun(_RunBase):
26
+ """Pipeline queued or running but no progress reported yet."""
27
+
28
+ type: Literal["pending"] = "pending"
29
+
30
+
31
+ class ProgressRun(_RunBase):
32
+ """Pipeline running with step-level progress data."""
33
+
34
+ type: Literal["progress"] = "progress"
35
+ step: int
36
+ total_steps: int
37
+ flow_name: str
38
+ status: str # "started", "completed", "cached"
39
+ progress: float # overall 0.0-1.0
40
+ step_progress: float # within step 0.0-1.0
41
+ message: str
42
+
43
+
44
+ class DeploymentResultData(BaseModel):
45
+ """Typed result payload — always has success + optional error."""
46
+
47
+ success: bool
48
+ error: str | None = None
49
+
50
+ model_config = ConfigDict(frozen=True, extra="allow")
51
+
52
+
53
+ class CompletedRun(_RunBase):
54
+ """Pipeline finished (Prefect COMPLETED). Check result.success for business outcome."""
55
+
56
+ type: Literal["completed"] = "completed"
57
+ result: DeploymentResultData
58
+
59
+
60
+ class FailedRun(_RunBase):
61
+ """Pipeline crashed — execution error, not business logic."""
62
+
63
+ type: Literal["failed"] = "failed"
64
+ error: str
65
+ result: DeploymentResultData | None = None
66
+
67
+
68
+ RunResponse = Annotated[
69
+ PendingRun | ProgressRun | CompletedRun | FailedRun,
70
+ Discriminator("type"),
71
+ ]
72
+
73
+ __all__ = [
74
+ "CompletedRun",
75
+ "DeploymentResultData",
76
+ "FailedRun",
77
+ "PendingRun",
78
+ "ProgressRun",
79
+ "RunResponse",
80
+ ]
@@ -0,0 +1,561 @@
1
+ #!/usr/bin/env python3
2
+ """Universal Prefect deployment script using Python API.
3
+
4
+ This script:
5
+ 1. Builds a Python package from pyproject.toml
6
+ 2. Uploads it to Google Cloud Storage
7
+ 3. Creates/updates a Prefect deployment using the RunnerDeployment pattern
8
+
9
+ Requirements:
10
+ - Settings configured with PREFECT_API_URL and optionally PREFECT_API_KEY
11
+ - Settings configured with PREFECT_GCS_BUCKET
12
+ - pyproject.toml with project name and version
13
+ - Local package installed for flow metadata extraction
14
+
15
+ Usage:
16
+ python -m ai_pipeline_core.deployment.deploy
17
+ """
18
+
19
+ import argparse
20
+ import asyncio
21
+ import json
22
+ import subprocess
23
+ import sys
24
+ import tempfile
25
+ import tomllib
26
+ import traceback
27
+ from datetime import UTC, datetime
28
+ from pathlib import Path
29
+ from typing import Any
30
+
31
+ from prefect.cli.deploy._storage import _PullStepStorage # type: ignore
32
+ from prefect.client.orchestration import get_client
33
+ from prefect.deployments.runner import RunnerDeployment
34
+ from prefect.flows import load_flow_from_entrypoint
35
+ from prefect_gcp.cloud_storage import GcpCredentials, GcsBucket # pyright: ignore[reportMissingTypeStubs]
36
+
37
+ from ai_pipeline_core.settings import settings
38
+
39
+ # ============================================================================
40
+ # Deployer Class
41
+ # ============================================================================
42
+
43
+
44
+ class Deployer:
45
+ """Deploy Prefect flows using the RunnerDeployment pattern.
46
+
47
+ This is the official Prefect approach that handles flow registration,
48
+ deployment creation/updates, and all edge cases automatically.
49
+ """
50
+
51
+ def __init__(self):
52
+ """Initialize deployer."""
53
+ self.config = self._load_config()
54
+ self._validate_prefect_settings()
55
+
56
+ def _load_config(self) -> dict[str, Any]:
57
+ """Load and normalize project configuration from pyproject.toml.
58
+
59
+ Returns:
60
+ Configuration dictionary with project metadata and deployment settings.
61
+ """
62
+ if not settings.prefect_gcs_bucket:
63
+ self._die("PREFECT_GCS_BUCKET not configured in settings.\nConfigure via environment variable or .env file:\n PREFECT_GCS_BUCKET=your-bucket-name")
64
+
65
+ pyproject_path = Path("pyproject.toml")
66
+ if not pyproject_path.exists():
67
+ self._die("pyproject.toml not found. Run from project root.")
68
+
69
+ with open(pyproject_path, "rb") as f:
70
+ data = tomllib.load(f)
71
+
72
+ self._pyproject_data = data
73
+
74
+ project = data.get("project", {})
75
+ name = project.get("name")
76
+ version = project.get("version")
77
+
78
+ if not name:
79
+ self._die("Project name not found in pyproject.toml")
80
+ if not version:
81
+ self._die("Project version not found in pyproject.toml")
82
+
83
+ # Normalize naming conventions
84
+ # Hyphens in package names become underscores in Python imports
85
+ package_name = name.replace("-", "_")
86
+ flow_folder = name.replace("_", "-")
87
+
88
+ return {
89
+ "name": name,
90
+ "package": package_name,
91
+ "version": version,
92
+ "bucket": settings.prefect_gcs_bucket,
93
+ "folder": f"flows/{flow_folder}",
94
+ "tarball": f"{package_name}-{version}.tar.gz",
95
+ "work_pool": settings.prefect_work_pool_name,
96
+ "work_queue": settings.prefect_work_queue_name,
97
+ }
98
+
99
+ def _validate_prefect_settings(self):
100
+ """Validate that required Prefect settings are configured."""
101
+ self.api_url = settings.prefect_api_url
102
+ if not self.api_url:
103
+ self._die(
104
+ "PREFECT_API_URL not configured in settings.\n"
105
+ "Configure via environment variable or .env file:\n"
106
+ " PREFECT_API_URL=https://api.prefect.cloud/api/accounts/.../workspaces/..."
107
+ )
108
+
109
+ def _run(self, cmd: str, *, check: bool = True) -> str | None:
110
+ """Execute shell command and return output.
111
+
112
+ Args:
113
+ cmd: Shell command to execute
114
+ check: Whether to raise on non-zero exit code
115
+
116
+ Returns:
117
+ Command stdout if successful, None if failed and check=False
118
+ """
119
+ result = subprocess.run(cmd, shell=True, capture_output=True, text=True, check=False)
120
+
121
+ if check and result.returncode != 0:
122
+ self._die(f"Command failed: {cmd}\n{result.stderr}")
123
+
124
+ return result.stdout.strip() if result.returncode == 0 else None
125
+
126
+ @staticmethod
127
+ def _info(msg: str):
128
+ """Print info message."""
129
+ print(f"→ {msg}")
130
+
131
+ @staticmethod
132
+ def _success(msg: str):
133
+ """Print success message."""
134
+ print(f"✓ {msg}")
135
+
136
+ @staticmethod
137
+ def _die(msg: str):
138
+ """Print error and exit."""
139
+ print(f"✗ {msg}", file=sys.stderr)
140
+ sys.exit(1)
141
+
142
+ def _build_package(self) -> Path:
143
+ """Build Python package using `python -m build`.
144
+
145
+ Returns:
146
+ Path to the built tarball
147
+ """
148
+ self._info(f"Building {self.config['name']} v{self.config['version']}")
149
+
150
+ # Build sdist (source distribution)
151
+ build_cmd = "python -m build --sdist"
152
+
153
+ self._run(build_cmd)
154
+
155
+ # Verify tarball was created
156
+ tarball_path = Path("dist") / self.config["tarball"]
157
+ if not tarball_path.exists():
158
+ self._die(f"Build artifact not found: {tarball_path}\nExpected tarball name: {self.config['tarball']}\nCheck that pyproject.toml version matches.")
159
+
160
+ self._success(f"Built {tarball_path.name} ({tarball_path.stat().st_size // 1024} KB)")
161
+ return tarball_path
162
+
163
+ # -- Agent build/upload support --
164
+
165
+ def _load_agent_config(self) -> dict[str, dict[str, Any]]:
166
+ """Load [tool.deploy.agents] from pyproject.toml.
167
+
168
+ Returns:
169
+ Dict mapping agent name to config (path, extra_vendor).
170
+ Empty dict if no agents configured.
171
+ """
172
+ return self._pyproject_data.get("tool", {}).get("deploy", {}).get("agents", {})
173
+
174
+ def _get_cli_agents_source(self) -> str | None:
175
+ """Get cli_agents_source path from [tool.deploy]."""
176
+ return self._pyproject_data.get("tool", {}).get("deploy", {}).get("cli_agents_source")
177
+
178
+ def _build_wheel_from_source(self, source_dir: Path) -> Path:
179
+ """Build a wheel from a source directory.
180
+
181
+ Args:
182
+ source_dir: Directory containing pyproject.toml
183
+
184
+ Returns:
185
+ Path to built .whl file in a temp dist directory
186
+ """
187
+ if not (source_dir / "pyproject.toml").exists():
188
+ self._die(f"No pyproject.toml in {source_dir}")
189
+
190
+ with tempfile.TemporaryDirectory() as tmpdir:
191
+ tmp_dist = Path(tmpdir) / "dist"
192
+ result = subprocess.run(
193
+ [sys.executable, "-m", "build", "--wheel", "--outdir", str(tmp_dist)],
194
+ cwd=source_dir,
195
+ capture_output=True,
196
+ text=True,
197
+ check=False,
198
+ )
199
+ if result.returncode != 0:
200
+ self._die(f"Wheel build failed for {source_dir.name}:\n{result.stderr}")
201
+
202
+ wheels = list(tmp_dist.glob("*.whl"))
203
+ if not wheels:
204
+ self._die(f"No wheel produced for {source_dir.name}")
205
+
206
+ # Copy to persistent dist/ under source_dir
207
+ dist_dir = source_dir / "dist"
208
+ dist_dir.mkdir(exist_ok=True)
209
+ output = dist_dir / wheels[0].name
210
+ output.write_bytes(wheels[0].read_bytes())
211
+ return output
212
+
213
+ def _build_agents(self) -> dict[str, dict[str, Any]]: # noqa: PLR0914
214
+ """Build agent wheels and manifests for all configured agents.
215
+
216
+ Returns:
217
+ Dict mapping agent name to build info:
218
+ {name: {"manifest_json": str, "files": {filename: Path}}}
219
+ Empty dict if no agents configured.
220
+ """
221
+ agent_config = self._load_agent_config()
222
+ if not agent_config:
223
+ return {}
224
+
225
+ cli_agents_source = self._get_cli_agents_source()
226
+ if not cli_agents_source:
227
+ self._die(
228
+ "Agents configured in [tool.deploy.agents] but "
229
+ "[tool.deploy].cli_agents_source is not set.\n"
230
+ "Add to pyproject.toml:\n"
231
+ ' [tool.deploy]\n cli_agents_source = "vendor/cli-agents"'
232
+ )
233
+
234
+ self._info(f"Building {len(agent_config)} agent(s): {', '.join(agent_config)}")
235
+
236
+ # Build cli-agents wheel once (shared across all agents)
237
+ cli_agents_dir = Path(cli_agents_source).resolve() # pyright: ignore[reportArgumentType]
238
+ if not (cli_agents_dir / "pyproject.toml").exists():
239
+ self._die(f"cli-agents source not found at {cli_agents_dir}")
240
+
241
+ cli_agents_wheel = self._build_wheel_from_source(cli_agents_dir)
242
+ self._success(f"Built cli-agents wheel: {cli_agents_wheel.name}")
243
+
244
+ builds: dict[str, dict[str, Any]] = {}
245
+
246
+ for agent_name, config in agent_config.items():
247
+ agent_path = Path(config["path"]).resolve()
248
+ if not (agent_path / "pyproject.toml").exists():
249
+ self._die(f"Agent '{agent_name}' path not found: {agent_path}\nCheck [tool.deploy.agents.{agent_name}].path in pyproject.toml")
250
+
251
+ # Read module_name from agent's pyproject.toml
252
+ with open(agent_path / "pyproject.toml", "rb") as f:
253
+ agent_pyproject = tomllib.load(f)
254
+
255
+ module_name = agent_pyproject.get("tool", {}).get("agent", {}).get("module")
256
+ if not module_name:
257
+ self._die(
258
+ f"Agent '{agent_name}' missing [tool.agent].module in "
259
+ f"{agent_path / 'pyproject.toml'}\n"
260
+ f'Add:\n [tool.agent]\n module = "agent_{agent_name}"'
261
+ )
262
+
263
+ # Build agent wheel
264
+ agent_wheel = self._build_wheel_from_source(agent_path)
265
+ self._success(f"Built agent wheel: {agent_wheel.name}")
266
+
267
+ # Collect all files for this agent bundle
268
+ files: dict[str, Path] = {
269
+ agent_wheel.name: agent_wheel,
270
+ cli_agents_wheel.name: cli_agents_wheel,
271
+ }
272
+
273
+ # Build extra_vendor packages from repo root
274
+ vendor_packages: list[str] = []
275
+ extra_built: set[str] = set()
276
+ for vendor_name in config.get("extra_vendor", []):
277
+ extra_source_dir = Path(vendor_name).resolve()
278
+ if not (extra_source_dir / "pyproject.toml").exists():
279
+ self._die(
280
+ f"Extra vendor '{vendor_name}' for agent '{agent_name}' "
281
+ f"not found at {extra_source_dir}\n"
282
+ f"Ensure the directory exists at repo root with pyproject.toml"
283
+ )
284
+ vendor_wheel = self._build_wheel_from_source(extra_source_dir)
285
+ files[vendor_wheel.name] = vendor_wheel
286
+ vendor_packages.append(vendor_wheel.name)
287
+ extra_built.add(extra_source_dir.name.replace("-", "_"))
288
+ self._success(f"Built vendor wheel: {vendor_wheel.name}")
289
+
290
+ # Collect existing vendor/*.whl and vendor/*.tar.gz from agent directory,
291
+ # skipping packages already built from extra_vendor
292
+ agent_vendor_dir = agent_path / "vendor"
293
+ if agent_vendor_dir.exists():
294
+ for pkg in list(agent_vendor_dir.glob("*.whl")) + list(agent_vendor_dir.glob("*.tar.gz")):
295
+ pkg_base = pkg.name.split("-")[0].replace("-", "_")
296
+ if pkg.name not in files and pkg_base not in extra_built:
297
+ files[pkg.name] = pkg
298
+ vendor_packages.append(pkg.name)
299
+
300
+ # Write manifest (plain JSON dict, compatible with AgentManifest schema)
301
+ manifest = {
302
+ "module_name": module_name,
303
+ "agent_wheel": agent_wheel.name,
304
+ "cli_agents_wheel": cli_agents_wheel.name,
305
+ "vendor_packages": vendor_packages,
306
+ "built_at": datetime.now(UTC).isoformat(),
307
+ }
308
+ manifest_json = json.dumps(manifest, indent=2)
309
+
310
+ builds[agent_name] = {"manifest_json": manifest_json, "files": files}
311
+ self._success(f"Agent '{agent_name}' bundle ready ({module_name}, {len(files)} files)")
312
+
313
+ return builds
314
+
315
+ def _create_gcs_bucket(self, bucket_folder: str) -> Any:
316
+ """Create a GcsBucket instance for uploading files.
317
+
318
+ Args:
319
+ bucket_folder: Folder path within the bucket.
320
+ """
321
+ creds = GcpCredentials()
322
+ if hasattr(settings, "gcs_service_account_file") and settings.gcs_service_account_file:
323
+ creds = GcpCredentials(service_account_file=Path(settings.gcs_service_account_file))
324
+ return GcsBucket(bucket=self.config["bucket"], bucket_folder=bucket_folder, gcp_credentials=creds)
325
+
326
+ async def _upload_agents(self, agent_builds: dict[str, dict[str, Any]]):
327
+ """Upload agent bundles to GCS.
328
+
329
+ Args:
330
+ agent_builds: Output from _build_agents()
331
+ """
332
+ if not agent_builds:
333
+ return
334
+
335
+ flow_folder = self.config["folder"]
336
+
337
+ for agent_name, build_info in agent_builds.items():
338
+ agent_folder = f"{flow_folder}/agents/{agent_name}"
339
+ bucket = self._create_gcs_bucket(agent_folder)
340
+ self._info(f"Uploading agent '{agent_name}' bundle to gs://{self.config['bucket']}/{agent_folder}")
341
+
342
+ # Upload manifest
343
+ await bucket.write_path("manifest.json", build_info["manifest_json"].encode())
344
+
345
+ # Upload wheels
346
+ for filename, filepath in build_info["files"].items():
347
+ await bucket.write_path(filename, filepath.read_bytes())
348
+
349
+ self._success(f"Agent '{agent_name}' uploaded ({len(build_info['files'])} files)")
350
+
351
+ async def _upload_package(self, tarball: Path, vendor_wheels: list[Path] | None = None):
352
+ """Upload package tarball and vendor wheels to Google Cloud Storage.
353
+
354
+ Args:
355
+ tarball: Path to the tarball to upload
356
+ vendor_wheels: Optional private dependency wheels to upload alongside
357
+ """
358
+ flow_folder = self.config["folder"]
359
+ bucket = self._create_gcs_bucket(flow_folder)
360
+
361
+ dest_uri = f"gs://{self.config['bucket']}/{flow_folder}/{tarball.name}"
362
+ self._info(f"Uploading to {dest_uri}")
363
+
364
+ tarball_bytes = tarball.read_bytes() # noqa: ASYNC240
365
+ await bucket.write_path(tarball.name, tarball_bytes)
366
+
367
+ self._success(f"Package uploaded to {flow_folder}/{tarball.name}")
368
+
369
+ for wheel in vendor_wheels or []:
370
+ await bucket.write_path(wheel.name, wheel.read_bytes())
371
+ self._success(f"Vendor wheel uploaded: {wheel.name}")
372
+
373
+ async def _deploy_via_api(self, agent_builds: dict[str, dict[str, Any]] | None = None):
374
+ """Create or update Prefect deployment using RunnerDeployment pattern.
375
+
376
+ This is the official Prefect approach that:
377
+ 1. Automatically creates/updates the flow registration
378
+ 2. Handles deployment create vs update logic
379
+ 3. Properly formats all parameters for the API
380
+
381
+ Args:
382
+ agent_builds: Output from _build_agents(). If non-empty, sets
383
+ AGENT_BUNDLES_URI env var on the deployment.
384
+ """
385
+ # Define entrypoint (assumes flow function has same name as package)
386
+ entrypoint = f"{self.config['package']}:{self.config['package']}"
387
+
388
+ # Load flow to get metadata
389
+ # This requires the package to be installed locally (typical dev workflow)
390
+ self._info(f"Loading flow from entrypoint: {entrypoint}")
391
+ try:
392
+ flow = load_flow_from_entrypoint(entrypoint)
393
+ self._success(f"Loaded flow: {flow.name}")
394
+ except ImportError as e:
395
+ self._die(
396
+ f"Failed to import flow: {e}\n\n"
397
+ f"The package must be installed locally to extract flow metadata.\n"
398
+ f"Install it with: pip install -e .\n\n"
399
+ f"Expected entrypoint: {entrypoint}\n"
400
+ f"This means: Python package '{self.config['package']}' "
401
+ f"with flow function '{self.config['package']}'"
402
+ )
403
+ except AttributeError as e:
404
+ self._die(
405
+ f"Flow function not found: {e}\n\n"
406
+ f"Expected flow function named '{self.config['package']}' "
407
+ f"in package '{self.config['package']}'.\n"
408
+ f"Check that your flow is decorated with @flow and named correctly."
409
+ )
410
+
411
+ # Define pull steps for workers
412
+ # These steps tell workers how to get and install the flow code
413
+ pull_steps = [
414
+ {
415
+ "prefect_gcp.deployments.steps.pull_from_gcs": {
416
+ "id": "pull_code",
417
+ "requires": "prefect-gcp>=0.6",
418
+ "bucket": self.config["bucket"],
419
+ "folder": self.config["folder"],
420
+ }
421
+ },
422
+ {
423
+ "prefect.deployments.steps.run_shell_script": {
424
+ "id": "install_project",
425
+ "stream_output": True,
426
+ "directory": "{{ pull_code.directory }}",
427
+ # Use uv for fast installation (worker has it installed)
428
+ # --find-links . resolves private dependencies from co-uploaded wheels
429
+ "script": f"uv pip install --system --find-links . ./{self.config['tarball']}",
430
+ }
431
+ },
432
+ ]
433
+
434
+ # Create RunnerDeployment
435
+ # This is the official Prefect pattern that handles all the complexity
436
+ self._info(f"Creating deployment for flow '{flow.name}'") # pyright: ignore[reportPossiblyUnboundVariable]
437
+
438
+ # Set AGENT_BUNDLES_URI env var if agents were built
439
+ job_variables: dict[str, Any] = {}
440
+ if agent_builds:
441
+ bundles_uri = f"gs://{self.config['bucket']}/{self.config['folder']}/agents"
442
+ job_variables["env"] = {"AGENT_BUNDLES_URI": bundles_uri}
443
+ self._info(f"Setting AGENT_BUNDLES_URI={bundles_uri}")
444
+
445
+ deployment = RunnerDeployment(
446
+ name=self.config["package"],
447
+ flow_name=flow.name, # pyright: ignore[reportPossiblyUnboundVariable]
448
+ entrypoint=entrypoint,
449
+ work_pool_name=self.config["work_pool"],
450
+ work_queue_name=self.config["work_queue"],
451
+ tags=[self.config["name"]],
452
+ version=self.config["version"],
453
+ description=flow.description or f"Deployment for {self.config['package']} v{self.config['version']}", # pyright: ignore[reportPossiblyUnboundVariable]
454
+ storage=_PullStepStorage(pull_steps),
455
+ parameters={},
456
+ job_variables=job_variables,
457
+ paused=False,
458
+ )
459
+
460
+ # Verify work pool exists before deploying
461
+ async with get_client() as client:
462
+ try:
463
+ work_pool = await client.read_work_pool(self.config["work_pool"])
464
+ self._success(f"Work pool '{self.config['work_pool']}' verified (type: {work_pool.type})")
465
+ except Exception as e:
466
+ self._die(f"Work pool '{self.config['work_pool']}' not accessible: {e}\nCreate it in the Prefect UI or with: prefect work-pool create")
467
+
468
+ # Apply deployment
469
+ # This automatically handles create vs update based on whether deployment exists
470
+ self._info("Applying deployment (create or update)...")
471
+ try:
472
+ deployment_id = await deployment.apply() # type: ignore
473
+ self._success(f"Deployment ID: {deployment_id}")
474
+
475
+ # Print helpful URLs
476
+ if self.api_url:
477
+ ui_url = self.api_url.replace("/api/", "/")
478
+ print(f"\n🌐 View deployment: {ui_url}/deployments/deployment/{deployment_id}")
479
+ print(f"🚀 Run now: prefect deployment run '{flow.name}/{self.config['package']}'") # pyright: ignore[reportPossiblyUnboundVariable]
480
+ except Exception as e:
481
+ self._die(f"Failed to apply deployment: {e}")
482
+
483
+ async def run(self):
484
+ """Execute the complete deployment pipeline."""
485
+ print("=" * 70)
486
+ print(f"Prefect Deployment: {self.config['name']} v{self.config['version']}")
487
+ print(f"Target: gs://{self.config['bucket']}/{self.config['folder']}")
488
+ print("=" * 70)
489
+ print()
490
+
491
+ # Phase 1: Build flow package
492
+ tarball = self._build_package()
493
+
494
+ # Phase 2: Build agent bundles (if configured)
495
+ agent_builds = self._build_agents()
496
+
497
+ # Phase 3: Upload flow package (include private dependency wheels from agent builds)
498
+ vendor_wheels: list[Path] = []
499
+ if agent_builds:
500
+ seen: set[str] = set()
501
+ for build_info in agent_builds.values():
502
+ for filename, filepath in build_info["files"].items():
503
+ if filename.endswith(".whl") and filename not in seen and "cli_agents" in filename:
504
+ vendor_wheels.append(filepath)
505
+ seen.add(filename)
506
+ await self._upload_package(tarball, vendor_wheels)
507
+
508
+ # Phase 4: Upload agent bundles
509
+ await self._upload_agents(agent_builds)
510
+
511
+ # Phase 5: Create/update Prefect deployment
512
+ await self._deploy_via_api(agent_builds)
513
+
514
+ print()
515
+ print("=" * 70)
516
+ self._success("Deployment complete!")
517
+ print("=" * 70)
518
+
519
+
520
+ # ============================================================================
521
+ # CLI Entry Point
522
+ # ============================================================================
523
+
524
+
525
+ def main():
526
+ """Command-line interface for deployment script."""
527
+ parser = argparse.ArgumentParser(
528
+ description="Deploy Prefect flows to GCP using the official RunnerDeployment pattern",
529
+ formatter_class=argparse.RawDescriptionHelpFormatter,
530
+ epilog="""
531
+ Prerequisites:
532
+ - Settings configured with PREFECT_API_URL (and optionally PREFECT_API_KEY)
533
+ - Settings configured with PREFECT_GCS_BUCKET
534
+ - pyproject.toml with project name and version
535
+ - Package installed locally: pip install -e .
536
+ - GCP authentication configured (via service account or default credentials)
537
+ - Work pool created in Prefect UI or CLI
538
+
539
+ Settings can be configured via:
540
+ - Environment variables (e.g., export PREFECT_API_URL=...)
541
+ - .env file in the current directory
542
+ """,
543
+ )
544
+
545
+ parser.parse_args()
546
+
547
+ try:
548
+ deployer = Deployer()
549
+ asyncio.run(deployer.run())
550
+ except KeyboardInterrupt:
551
+ print("\n✗ Deployment cancelled by user", file=sys.stderr)
552
+ sys.exit(1)
553
+ except Exception as e:
554
+ print(f"\n✗ Unexpected error: {e}", file=sys.stderr)
555
+
556
+ traceback.print_exc()
557
+ sys.exit(1)
558
+
559
+
560
+ if __name__ == "__main__":
561
+ main()