ai-pipeline-core 0.4.4__py3-none-any.whl → 0.4.6__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -64,7 +64,7 @@ from .prompt_manager import PromptManager
64
64
  from .settings import Settings
65
65
  from .testing import disable_run_logger, prefect_test_harness
66
66
 
67
- __version__ = "0.4.4"
67
+ __version__ = "0.4.6"
68
68
 
69
69
  __all__ = [
70
70
  "AIMessageType",
@@ -312,6 +312,28 @@ class Deployer:
312
312
 
313
313
  return builds
314
314
 
315
+ def _build_vendor_packages(self) -> list[Path]:
316
+ """Build vendor wheels from [tool.deploy].vendor_packages paths.
317
+
318
+ Returns:
319
+ List of built wheel paths, deduplicated by filename.
320
+ """
321
+ vendor_paths: list[str] = self._pyproject_data.get("tool", {}).get("deploy", {}).get("vendor_packages", [])
322
+ if not vendor_paths:
323
+ return []
324
+
325
+ self._info(f"Building {len(vendor_paths)} vendor package(s)")
326
+ wheels: list[Path] = []
327
+ seen: set[str] = set()
328
+ for vendor_path_str in vendor_paths:
329
+ vendor_dir = Path(vendor_path_str).resolve()
330
+ wheel = self._build_wheel_from_source(vendor_dir)
331
+ if wheel.name not in seen:
332
+ wheels.append(wheel)
333
+ seen.add(wheel.name)
334
+ self._success(f"Built vendor wheel: {wheel.name}")
335
+ return wheels
336
+
315
337
  def _create_gcs_bucket(self, bucket_folder: str) -> Any:
316
338
  """Create a GcsBucket instance for uploading files.
317
339
 
@@ -361,7 +383,7 @@ class Deployer:
361
383
  dest_uri = f"gs://{self.config['bucket']}/{flow_folder}/{tarball.name}"
362
384
  self._info(f"Uploading to {dest_uri}")
363
385
 
364
- tarball_bytes = tarball.read_bytes() # noqa: ASYNC240
386
+ tarball_bytes = tarball.read_bytes()
365
387
  await bucket.write_path(tarball.name, tarball_bytes)
366
388
 
367
389
  self._success(f"Package uploaded to {flow_folder}/{tarball.name}")
@@ -457,6 +479,14 @@ class Deployer:
457
479
  paused=False,
458
480
  )
459
481
 
482
+ # Populate parameter schema from flow function signature
483
+ deployment._set_defaults_from_flow(flow) # pyright: ignore[reportPossiblyUnboundVariable]
484
+
485
+ # Inject result type schema so consumers can discover the response shape
486
+ return_type = getattr(flow.fn, "__annotations__", {}).get("return") # pyright: ignore[reportPossiblyUnboundVariable]
487
+ if return_type is not None and hasattr(return_type, "model_json_schema"):
488
+ deployment._parameter_openapi_schema.definitions["_ResultSchema"] = return_type.model_json_schema()
489
+
460
490
  # Verify work pool exists before deploying
461
491
  async with get_client() as client:
462
492
  try:
@@ -494,21 +524,27 @@ class Deployer:
494
524
  # Phase 2: Build agent bundles (if configured)
495
525
  agent_builds = self._build_agents()
496
526
 
497
- # Phase 3: Upload flow package (include private dependency wheels from agent builds)
498
- vendor_wheels: list[Path] = []
499
- if agent_builds:
500
- seen: set[str] = set()
501
- for build_info in agent_builds.values():
502
- for filename, filepath in build_info["files"].items():
503
- if filename.endswith(".whl") and filename not in seen and "cli_agents" in filename:
504
- vendor_wheels.append(filepath)
505
- seen.add(filename)
527
+ # Phase 3: Build vendor packages from [tool.deploy].vendor_packages
528
+ vendor_wheels = self._build_vendor_packages()
529
+
530
+ # Build cli-agents wheel if source is configured — it's a private package
531
+ # not on PyPI, so the worker needs the wheel even when no agents are deployed
532
+ cli_agents_source = self._get_cli_agents_source()
533
+ if cli_agents_source:
534
+ cli_dir = Path(cli_agents_source).resolve()
535
+ if (cli_dir / "pyproject.toml").exists():
536
+ cli_wheel = self._build_wheel_from_source(cli_dir)
537
+ if cli_wheel.name not in {w.name for w in vendor_wheels}:
538
+ vendor_wheels.append(cli_wheel)
539
+ self._success(f"Built cli-agents vendor wheel: {cli_wheel.name}")
540
+
541
+ # Phase 4: Upload flow package + vendor wheels
506
542
  await self._upload_package(tarball, vendor_wheels)
507
543
 
508
- # Phase 4: Upload agent bundles
544
+ # Phase 5: Upload agent bundles
509
545
  await self._upload_agents(agent_builds)
510
546
 
511
- # Phase 5: Create/update Prefect deployment
547
+ # Phase 6: Create/update Prefect deployment
512
548
  await self._deploy_via_api(agent_builds)
513
549
 
514
550
  print()
@@ -1,9 +1,10 @@
1
1
  """Remote deployment utilities for calling PipelineDeployment flows via Prefect."""
2
2
 
3
- import inspect
4
- from collections.abc import Callable
3
+ import asyncio
4
+ from collections.abc import Awaitable, Callable, Coroutine
5
5
  from functools import wraps
6
- from typing import Any, ParamSpec, TypeVar, cast
6
+ from typing import Any, TypeVar, cast
7
+ from uuid import UUID
7
8
 
8
9
  from prefect import get_client
9
10
  from prefect.client.orchestration import PrefectClient
@@ -13,14 +14,20 @@ from prefect.deployments.flow_runs import run_deployment
13
14
  from prefect.exceptions import ObjectNotFound
14
15
 
15
16
  from ai_pipeline_core.deployment import DeploymentContext, DeploymentResult, PipelineDeployment
17
+ from ai_pipeline_core.documents import Document
18
+ from ai_pipeline_core.logging import get_pipeline_logger
16
19
  from ai_pipeline_core.observability.tracing import TraceLevel, set_trace_cost, trace
17
20
  from ai_pipeline_core.pipeline.options import FlowOptions
18
21
  from ai_pipeline_core.settings import settings
19
22
 
20
- P = ParamSpec("P")
23
+ logger = get_pipeline_logger(__name__)
24
+
21
25
  TOptions = TypeVar("TOptions", bound=FlowOptions)
22
26
  TResult = TypeVar("TResult", bound=DeploymentResult)
23
27
 
28
+ ProgressCallback = Callable[[float, str], Awaitable[None]]
29
+ """Signature for remote deployment progress callbacks: (fraction, message) -> None."""
30
+
24
31
 
25
32
  def _is_already_traced(func: Callable[..., Any]) -> bool:
26
33
  """Check if function or its __wrapped__ has __is_traced__ attribute."""
@@ -30,17 +37,80 @@ def _is_already_traced(func: Callable[..., Any]) -> bool:
30
37
  return getattr(wrapped, "__is_traced__", False) if wrapped else False
31
38
 
32
39
 
33
- async def run_remote_deployment(deployment_name: str, parameters: dict[str, Any]) -> Any:
34
- """Run a remote Prefect deployment, trying local client first then remote."""
40
+ _POLL_INTERVAL = 5.0
41
+
42
+
43
+ async def _poll_remote_flow_run(
44
+ client: PrefectClient,
45
+ flow_run_id: UUID,
46
+ deployment_name: str,
47
+ poll_interval: float = _POLL_INTERVAL,
48
+ on_progress: ProgressCallback | None = None,
49
+ ) -> Any:
50
+ """Poll a remote flow run until final, invoking on_progress callback with progress.
51
+
52
+ Reads the remote flow run's progress labels on each poll cycle and calls
53
+ on_progress(fraction, message) if provided. Without a callback, no progress
54
+ is reported. Only sends 1.0 on successful completion (not failure).
55
+ """
56
+ last_fraction = 0.0
35
57
 
36
- async def _run(client: PrefectClient, as_subflow: bool) -> Any:
37
- fr: FlowRun = await run_deployment(client=client, name=deployment_name, parameters=parameters, as_subflow=as_subflow) # type: ignore
38
- return await fr.state.result() # type: ignore
58
+ while True:
59
+ try:
60
+ flow_run = await client.read_flow_run(flow_run_id)
61
+ except Exception:
62
+ logger.warning("Failed to poll remote flow run %s", flow_run_id, exc_info=True)
63
+ await asyncio.sleep(poll_interval)
64
+ continue
65
+
66
+ state = flow_run.state
67
+ if state and state.is_final():
68
+ if on_progress and state.is_completed():
69
+ await on_progress(1.0, f"[{deployment_name}] Completed")
70
+ return await state.result() # type: ignore[union-attr]
71
+
72
+ if on_progress:
73
+ labels: dict[str, Any] = flow_run.labels or {}
74
+ progress_val = labels.get("progress.progress")
75
+
76
+ if progress_val is not None:
77
+ fraction = max(float(progress_val), last_fraction)
78
+ last_fraction = fraction
79
+ flow_name = str(labels.get("progress.flow_name", ""))
80
+ message = str(labels.get("progress.message", ""))
81
+ display = f"[{deployment_name}] {flow_name}: {message}" if flow_name else f"[{deployment_name}] Running"
82
+ await on_progress(fraction, display)
83
+ else:
84
+ await on_progress(last_fraction, f"[{deployment_name}] Waiting to start")
85
+
86
+ await asyncio.sleep(poll_interval)
87
+
88
+
89
+ async def run_remote_deployment(
90
+ deployment_name: str,
91
+ parameters: dict[str, Any],
92
+ on_progress: ProgressCallback | None = None,
93
+ ) -> Any:
94
+ """Run a remote Prefect deployment with optional progress callback.
95
+
96
+ Creates the remote flow run immediately (timeout=0) then polls its state,
97
+ invoking on_progress(fraction, message) on each poll cycle if provided.
98
+ """
99
+
100
+ async def _create_and_poll(client: PrefectClient, as_subflow: bool) -> Any:
101
+ fr: FlowRun = await run_deployment(
102
+ client=client,
103
+ name=deployment_name,
104
+ parameters=parameters,
105
+ as_subflow=as_subflow,
106
+ timeout=0,
107
+ ) # type: ignore
108
+ return await _poll_remote_flow_run(client, fr.id, deployment_name, on_progress=on_progress)
39
109
 
40
110
  async with get_client() as client:
41
111
  try:
42
112
  await client.read_deployment_by_name(name=deployment_name)
43
- return await _run(client, True) # noqa: FBT003
113
+ return await _create_and_poll(client, True) # noqa: FBT003
44
114
  except ObjectNotFound:
45
115
  pass
46
116
 
@@ -56,7 +126,7 @@ async def run_remote_deployment(deployment_name: str, parameters: dict[str, Any]
56
126
  await client.read_deployment_by_name(name=deployment_name)
57
127
  ctx = AsyncClientContext.model_construct(client=client, _httpx_settings=None, _context_stack=0)
58
128
  with ctx:
59
- return await _run(client, False) # noqa: FBT003
129
+ return await _create_and_poll(client, False) # noqa: FBT003
60
130
  except ObjectNotFound:
61
131
  pass
62
132
 
@@ -70,32 +140,38 @@ def remote_deployment(
70
140
  name: str | None = None,
71
141
  trace_level: TraceLevel = "always",
72
142
  trace_cost: float | None = None,
73
- ) -> Callable[[Callable[P, TResult]], Callable[P, TResult]]:
74
- """Decorator to call PipelineDeployment flows remotely with automatic serialization."""
143
+ ) -> Callable[[Callable[..., Any]], Callable[..., Coroutine[Any, Any, TResult]]]:
144
+ """Decorator to call PipelineDeployment flows remotely with automatic serialization.
145
+
146
+ The decorated function's body is never executed — it serves as a typed stub.
147
+ The wrapper enforces the deployment contract: (project_name, documents, options, context).
148
+ """
75
149
 
76
- def decorator(func: Callable[P, TResult]) -> Callable[P, TResult]:
150
+ def decorator(func: Callable[..., Any]) -> Callable[..., Coroutine[Any, Any, TResult]]:
77
151
  fname = getattr(func, "__name__", deployment_class.name)
78
152
 
79
153
  if _is_already_traced(func):
80
154
  raise TypeError(f"@remote_deployment target '{fname}' already has @trace")
81
155
 
82
156
  @wraps(func)
83
- async def _wrapper(*args: P.args, **kwargs: P.kwargs) -> TResult:
84
- sig = inspect.signature(func)
85
- bound = sig.bind(*args, **kwargs)
86
- bound.apply_defaults()
87
-
88
- # Pass parameters with proper types - Prefect handles Pydantic serialization
89
- parameters: dict[str, Any] = {}
90
- for pname, value in bound.arguments.items():
91
- if value is None and pname == "context":
92
- parameters[pname] = DeploymentContext()
93
- else:
94
- parameters[pname] = value
95
-
96
- full_name = f"{deployment_class.name}/{deployment_name or deployment_class.name}"
97
-
98
- result = await run_remote_deployment(full_name, parameters)
157
+ async def _wrapper(
158
+ project_name: str,
159
+ documents: list[Document],
160
+ options: TOptions,
161
+ context: DeploymentContext | None = None,
162
+ *,
163
+ on_progress: ProgressCallback | None = None,
164
+ ) -> TResult:
165
+ parameters: dict[str, Any] = {
166
+ "project_name": project_name,
167
+ "documents": documents,
168
+ "options": options,
169
+ "context": context if context is not None else DeploymentContext(),
170
+ }
171
+
172
+ full_name = f"{deployment_class.name}/{deployment_name or deployment_class.name.replace('-', '_')}"
173
+
174
+ result = await run_remote_deployment(full_name, parameters, on_progress=on_progress)
99
175
 
100
176
  if trace_cost is not None and trace_cost > 0:
101
177
  set_trace_cost(trace_cost)
@@ -111,6 +187,6 @@ def remote_deployment(
111
187
  name=name or deployment_class.name,
112
188
  )(_wrapper)
113
189
 
114
- return traced_wrapper # type: ignore[return-value]
190
+ return traced_wrapper
115
191
 
116
192
  return decorator
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: ai-pipeline-core
3
- Version: 0.4.4
3
+ Version: 0.4.6
4
4
  Summary: Core utilities for AI-powered processing pipelines using prefect
5
5
  Project-URL: Homepage, https://github.com/bbarwik/ai-pipeline-core
6
6
  Project-URL: Repository, https://github.com/bbarwik/ai-pipeline-core
@@ -1,4 +1,4 @@
1
- ai_pipeline_core/__init__.py,sha256=VcU67cNpAB6EJf2V_FxUx4ZFosbpb6IzFFwpJauHJzk,3270
1
+ ai_pipeline_core/__init__.py,sha256=QAQSyrKafsNov4dy9vNqTVarh2nDdrthMfYh7X-3Mcg,3270
2
2
  ai_pipeline_core/exceptions.py,sha256=csAl7vq6xjSFBF8-UM9WZODCbhsOdOG5zH6IbA8iteM,1280
3
3
  ai_pipeline_core/prompt_manager.py,sha256=3wFkL5rrjtUT1cLInkgyhS8hKnO4MeD1cdXAEuLhgoE,9459
4
4
  ai_pipeline_core/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
@@ -7,10 +7,10 @@ ai_pipeline_core/testing.py,sha256=jIRrLxNvTwdamucfJoHET2qMeRhhMZV9uEJXO5vAfis,2
7
7
  ai_pipeline_core/deployment/__init__.py,sha256=wTkVK6gcEQvqBajFMTAuodRONpN25yHbR1jtcumf0WQ,900
8
8
  ai_pipeline_core/deployment/base.py,sha256=bGSnDdrw6cLM_TItAiwptnwApbw5wkoIGY9pnwDvOTQ,37485
9
9
  ai_pipeline_core/deployment/contract.py,sha256=a1qbHhneTGB27oSOUy79CUIhOIzOoq37M63XoIMzA4Y,1952
10
- ai_pipeline_core/deployment/deploy.py,sha256=TCF4fH5f-K1ADODZHEyf-x7PJzDbv4qtWxlpoCe_mTs,22909
10
+ ai_pipeline_core/deployment/deploy.py,sha256=y5FxKMm7nGwkjzA74pTffO7A82MaDuajx6LHGTem8bI,24662
11
11
  ai_pipeline_core/deployment/helpers.py,sha256=yVtGFUs4AFXkpLkiQ_ale0nXXt5btfWSb5PAbikQHNs,3312
12
12
  ai_pipeline_core/deployment/progress.py,sha256=rO2g8VIh7EpzxzGGAroXEpveWoWZkk66jkDW22BY4j8,4827
13
- ai_pipeline_core/deployment/remote.py,sha256=tOexisKEeeBoHLGYZWqcjr2H-nqqYc6kvoDL72AW78w,4661
13
+ ai_pipeline_core/deployment/remote.py,sha256=tOBbICtPXbJHN8QA9juCqkeP9PqEc16mPyBrwaOwEt4,7434
14
14
  ai_pipeline_core/docs_generator/__init__.py,sha256=JbWbk-Lw5GgWrCMRuw8zvKNTZY2jXv7XqoMiBYudvRI,1255
15
15
  ai_pipeline_core/docs_generator/__main__.py,sha256=CH4agiM2suFJ63MhTg5m0GuXdc40z-6o4ojR72JQWVA,145
16
16
  ai_pipeline_core/docs_generator/cli.py,sha256=8OjdMtzQraPxWN3uPapSNJnKyPLPtnygKL0rF5JL2GY,7172
@@ -70,7 +70,7 @@ ai_pipeline_core/observability/_tracking/_writer.py,sha256=xZjwYyIxDzzzPxqkKjYAY
70
70
  ai_pipeline_core/pipeline/__init__.py,sha256=uMv1jwSyq8Ym8Hbn5097twBJLdwN1iMeqnVM4EWyrhA,282
71
71
  ai_pipeline_core/pipeline/decorators.py,sha256=CDJAeOjGLt5Ewc0Jc9zEuwLZwKyutOv89LSRS9dcXmI,37456
72
72
  ai_pipeline_core/pipeline/options.py,sha256=KF4FcT085-IwX8r649v0a9ua5xnApM0qG2wJHWbq39A,438
73
- ai_pipeline_core-0.4.4.dist-info/METADATA,sha256=DWL9spC2_pILAtrr6BARsTIjqWk3-VJO6WNyTqdEqyQ,29947
74
- ai_pipeline_core-0.4.4.dist-info/WHEEL,sha256=WLgqFyCfm_KASv4WHyYy0P3pM_m7J5L9k2skdKLirC8,87
75
- ai_pipeline_core-0.4.4.dist-info/licenses/LICENSE,sha256=kKj8mfbdWwkyG3U6n7ztB3bAZlEwShTkAsvaY657i3I,1074
76
- ai_pipeline_core-0.4.4.dist-info/RECORD,,
73
+ ai_pipeline_core-0.4.6.dist-info/METADATA,sha256=hyy3vHyR5xZ5GRg4Nrp8BxKojp3pa-RDg1KWPPm5_O8,29947
74
+ ai_pipeline_core-0.4.6.dist-info/WHEEL,sha256=WLgqFyCfm_KASv4WHyYy0P3pM_m7J5L9k2skdKLirC8,87
75
+ ai_pipeline_core-0.4.6.dist-info/licenses/LICENSE,sha256=kKj8mfbdWwkyG3U6n7ztB3bAZlEwShTkAsvaY657i3I,1074
76
+ ai_pipeline_core-0.4.6.dist-info/RECORD,,