beamflow-cli 0.3.4__tar.gz → 0.3.6__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (48) hide show
  1. {beamflow_cli-0.3.4 → beamflow_cli-0.3.6}/PKG-INFO +1 -1
  2. {beamflow_cli-0.3.4 → beamflow_cli-0.3.6}/beamflow/commands/deploy.py +68 -33
  3. {beamflow_cli-0.3.4 → beamflow_cli-0.3.6}/beamflow/main.py +20 -4
  4. beamflow_cli-0.3.6/beamflow/templates/AGENTS.md +222 -0
  5. {beamflow_cli-0.3.4 → beamflow_cli-0.3.6}/beamflow/templates/_.dockerignore +0 -1
  6. {beamflow_cli-0.3.4 → beamflow_cli-0.3.6}/beamflow/templates/_api_main.py +1 -1
  7. {beamflow_cli-0.3.4 → beamflow_cli-0.3.6}/beamflow/templates/_config/[env]/(backend-dramatiq)/backend.yaml +2 -2
  8. {beamflow_cli-0.3.4 → beamflow_cli-0.3.6}/beamflow/templates/_config/[env]/_backend.yaml +1 -1
  9. beamflow_cli-0.3.6/beamflow/templates/_config/shared/.env +1 -0
  10. {beamflow_cli-0.3.4/beamflow/templates/_config → beamflow_cli-0.3.6/beamflow/templates/_config/shared}/clients/demoClient.yaml +1 -1
  11. beamflow_cli-0.3.6/beamflow/templates/_config/shared/clients.yaml +3 -0
  12. beamflow_cli-0.3.6/beamflow/templates/_deployment/shared/api.Dockerfile +18 -0
  13. beamflow_cli-0.3.6/beamflow/templates/_deployment/shared/worker.Dockerfile +18 -0
  14. {beamflow_cli-0.3.4 → beamflow_cli-0.3.6}/beamflow/templates/_worker_main.py +2 -2
  15. {beamflow_cli-0.3.4 → beamflow_cli-0.3.6}/pyproject.toml +1 -1
  16. beamflow_cli-0.3.4/beamflow/templates/_deployment/shared/api.Dockerfile +0 -10
  17. beamflow_cli-0.3.4/beamflow/templates/_deployment/shared/worker.Dockerfile +0 -9
  18. {beamflow_cli-0.3.4 → beamflow_cli-0.3.6}/beamflow/__init__.py +0 -0
  19. {beamflow_cli-0.3.4 → beamflow_cli-0.3.6}/beamflow/commands/__init__.py +0 -0
  20. {beamflow_cli-0.3.4 → beamflow_cli-0.3.6}/beamflow/commands/auth.py +0 -0
  21. {beamflow_cli-0.3.4 → beamflow_cli-0.3.6}/beamflow/commands/build.py +0 -0
  22. {beamflow_cli-0.3.4 → beamflow_cli-0.3.6}/beamflow/commands/project.py +0 -0
  23. {beamflow_cli-0.3.4 → beamflow_cli-0.3.6}/beamflow/commands/run.py +0 -0
  24. {beamflow_cli-0.3.4 → beamflow_cli-0.3.6}/beamflow/core/__init__.py +0 -0
  25. {beamflow_cli-0.3.4 → beamflow_cli-0.3.6}/beamflow/core/api_client.py +0 -0
  26. {beamflow_cli-0.3.4 → beamflow_cli-0.3.6}/beamflow/core/auth_server.py +0 -0
  27. {beamflow_cli-0.3.4 → beamflow_cli-0.3.6}/beamflow/core/builder.py +0 -0
  28. {beamflow_cli-0.3.4 → beamflow_cli-0.3.6}/beamflow/core/config.py +0 -0
  29. {beamflow_cli-0.3.4 → beamflow_cli-0.3.6}/beamflow/core/docker_utils.py +0 -0
  30. {beamflow_cli-0.3.4 → beamflow_cli-0.3.6}/beamflow/templates/README.md +0 -0
  31. {beamflow_cli-0.3.4 → beamflow_cli-0.3.6}/beamflow/templates/_.beamflow +0 -0
  32. {beamflow_cli-0.3.4 → beamflow_cli-0.3.6}/beamflow/templates/_config/[env]/(backend-asyncio)/backend.yaml +0 -0
  33. {beamflow_cli-0.3.4 → beamflow_cli-0.3.6}/beamflow/templates/_config/[env]/(backend-managed)/backend.yaml +0 -0
  34. {beamflow_cli-0.3.4 → beamflow_cli-0.3.6}/beamflow/templates/_config/shared/backend.yaml +0 -0
  35. {beamflow_cli-0.3.4 → beamflow_cli-0.3.6}/beamflow/templates/_deployment/[env]/(backend-asyncio)/docker-compose.yaml +0 -0
  36. {beamflow_cli-0.3.4 → beamflow_cli-0.3.6}/beamflow/templates/_deployment/[env]/(backend-dramatiq)/docker-compose.yaml +0 -0
  37. {beamflow_cli-0.3.4 → beamflow_cli-0.3.6}/beamflow/templates/_deployment/[env]/.env +0 -0
  38. {beamflow_cli-0.3.4 → beamflow_cli-0.3.6}/beamflow/templates/_deployment/shared/.env +0 -0
  39. {beamflow_cli-0.3.4 → beamflow_cli-0.3.6}/beamflow/templates/_pyproject.toml +0 -0
  40. {beamflow_cli-0.3.4 → beamflow_cli-0.3.6}/beamflow/templates/_src/_api/__init__.py +0 -0
  41. {beamflow_cli-0.3.4 → beamflow_cli-0.3.6}/beamflow/templates/_src/_api/_routes/webhooks.py +0 -0
  42. {beamflow_cli-0.3.4 → beamflow_cli-0.3.6}/beamflow/templates/_src/_shared/__init__.py +0 -0
  43. {beamflow_cli-0.3.4 → beamflow_cli-0.3.6}/beamflow/templates/_src/_shared/clients/client.py +0 -0
  44. {beamflow_cli-0.3.4 → beamflow_cli-0.3.6}/beamflow/templates/_src/_shared/models/models.py +0 -0
  45. {beamflow_cli-0.3.4 → beamflow_cli-0.3.6}/beamflow/templates/_src/_shared/tasks/sharedTasks.py +0 -0
  46. {beamflow_cli-0.3.4 → beamflow_cli-0.3.6}/beamflow/templates/_src/_worker/__init__.py +0 -0
  47. {beamflow_cli-0.3.4 → beamflow_cli-0.3.6}/beamflow/templates/_src/_worker/tasks/tasks.py +0 -0
  48. {beamflow_cli-0.3.4 → beamflow_cli-0.3.6}/beamflow/ui/__init__.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.3
2
2
  Name: beamflow-cli
3
- Version: 0.3.4
3
+ Version: 0.3.6
4
4
  Summary: CLI for the Beamflow Managed Platform
5
5
  Author: juraj.bezdek@gmail.com
6
6
  Author-email: juraj.bezdek@gmail.com
@@ -1,6 +1,6 @@
1
1
  from typing import Optional
2
- import typer
3
2
  import asyncio
3
+ import typer
4
4
  from pathlib import Path
5
5
  from rich.console import Console
6
6
  from rich.progress import Progress, SpinnerColumn, TextColumn
@@ -11,12 +11,26 @@ from .build import run_build_flow
11
11
  app = typer.Typer()
12
12
  console = Console()
13
13
 
14
+ # Status labels shown to the user while polling
15
+ _STATUS_LABELS = {
16
+ "QUEUED": "Queued, waiting for deployment to start...",
17
+ "DEPLOYING": "Deploying services to Cloud Run...",
18
+ "INIT_PENDING": "Waiting for worker to register scheduled tasks...",
19
+ "INIT_RECEIVED": "Tasks registered, running health checks...",
20
+ "HEALTH_CHECK": "Health-checking API and Worker...",
21
+ "DEPLOYED": "Deployed successfully ✓",
22
+ "FAILED": "Deployment failed.",
23
+ }
24
+
25
+ _TERMINAL_STATUSES = {"DEPLOYED", "FAILED"}
26
+
27
+
14
28
  async def run_deploy_flow(artifact_id: Optional[str] = None):
15
29
  project_config = load_project_config()
16
30
  if not project_config:
17
31
  console.print("[red]No .beamflow found. Please run 'beamflow init' first.[/red]")
18
32
  raise typer.Exit(code=1)
19
-
33
+
20
34
  project_id = project_config.project_id
21
35
  if not project_id:
22
36
  console.print("[red]project_id not found in .beamflow[/red]")
@@ -41,44 +55,39 @@ async def run_deploy_flow(artifact_id: Optional[str] = None):
41
55
  deploy_data = await api.post("/v1/deploy", json={
42
56
  "project_id": project_id,
43
57
  "artifact_id": artifact_id,
44
- "env_vars": {} # TODO: load from .beamflow.yaml or env
58
+ "env_vars": {}
45
59
  })
46
60
  deploy_id = deploy_data["deploy_id"]
47
61
  progress.update(task, description=f"Deployment triggered (ID: {deploy_id}).")
48
-
49
- # 3. Poll status
50
- task = progress.add_task(description="Deploying...", total=None)
62
+
63
+ # 3. Poll status until terminal
64
+ last_status = None
51
65
  while True:
52
66
  status_data = await api.get(f"/v1/deploy/{deploy_id}/status")
53
- status = status_data["status"]
54
-
55
- if status == "SUCCESS":
56
- progress.update(task, description="Deployment successful!")
57
- return status_data
58
- elif status in ["FAILURE", "INTERNAL_ERROR", "TIMEOUT", "CANCELLED"]:
59
- progress.update(task, description=f"Deployment failed with status: {status}")
60
- console.print(f"[red]Deployment failed: {status}[/red]")
67
+ current_status = status_data["status"]
68
+
69
+ if current_status != last_status:
70
+ label = _STATUS_LABELS.get(current_status, f"Status: {current_status}")
71
+ progress.update(task, description=label)
72
+ last_status = current_status
73
+
74
+ if current_status == "DEPLOYED":
75
+ break
76
+ elif current_status == "FAILED":
77
+ error = status_data.get("error_message", "Unknown error")
78
+ console.print(f"[red]Deployment failed: {error}[/red]")
61
79
  raise typer.Exit(code=1)
62
-
63
- # Since deployment is mostly a placeholder in API for now,
64
- # we might want to just return if it's QUEUED or something
65
- # but to be correct we should poll.
66
- progress.update(task, description=f"Deploying... ({status})")
67
-
68
- # NOTE: For now, the API just returns QUEUED and doesn't update.
69
- # I'll add a safety break if it stays QUEUED for too long or just return Success for demo if it's QUEUED.
70
- # Actually, I'll just return it so the user sees it.
71
- if status == "QUEUED":
72
- # For demo purposes, we can assume it will eventually succeed or just stop here.
73
- # But let's follow the polling pattern.
74
- pass
75
-
80
+
76
81
  await asyncio.sleep(5)
77
82
 
83
+ return status_data
84
+
85
+
78
86
  @app.command()
79
87
  def deploy(
80
88
  env: str = typer.Argument(..., help="Environment to deploy to"),
81
- artifact: Optional[str] = typer.Option(None, "--artifact", "-a", help="Artifact ID to deploy")
89
+ artifact: Optional[str] = typer.Option(None, "--artifact", "-a", help="Artifact ID to deploy"),
90
+ non_interactive: bool = typer.Option(False, "--non-interactive", help="Do not ask for confirmation")
82
91
  ):
83
92
  """Deploy an artifact to the managed platform for a specified environment."""
84
93
  project_config = load_project_config()
@@ -86,19 +95,45 @@ def deploy(
86
95
  console.print("[red]No .beamflow found. Please run 'beamflow init' first.[/red]")
87
96
  raise typer.Exit(code=1)
88
97
 
98
+ # Ask for confirmation unless non-interactive is provided
99
+ if not non_interactive:
100
+ from rich.prompt import Confirm
101
+ if not Confirm.ask(f"Are you sure you want to deploy to '{env}'?"):
102
+ console.print("Deployment cancelled.")
103
+ raise typer.Exit(code=0)
104
+
89
105
  # Check if env is managed
90
106
  is_managed = False
91
107
  for em in project_config.environments:
92
108
  if em.name == env:
93
109
  is_managed = em.managed
94
110
  break
95
-
111
+
96
112
  if not is_managed:
97
113
  console.print(f"[red]Environment '{env}' is not managed. Deployment is only supported for managed environments.[/red]")
98
114
  console.print("[yellow]Update your .beamflow environments if this is incorrect.[/yellow]")
99
115
  raise typer.Exit(code=1)
100
116
 
117
+ project_id = project_config.project_id
118
+ if not project_id:
119
+ if not non_interactive:
120
+ from rich.prompt import Confirm
121
+ if Confirm.ask("Project ID not found. Would you like to link to a managed project now?"):
122
+ from .project import _link_project
123
+ _link_project(project_config)
124
+ project_id = project_config.project_id
125
+
126
+ if not project_id:
127
+ console.print("[red]project_id not found in .beamflow. Use 'beamflow init' or link to a project.[/red]")
128
+ raise typer.Exit(code=1)
129
+
101
130
  result = asyncio.run(run_deploy_flow(artifact))
102
- console.print(f"[green]Deployment finished![/green]")
103
- console.print(f"Deployment ID: [bold]{result['deploy_id']}[/bold]")
104
- console.print(f"Status: [bold]{result['status']}[/bold]")
131
+
132
+ api_url = result.get("api_url", "")
133
+ worker_url = result.get("worker_url", "")
134
+ console.print(f"[green]✓ Deployment complete![/green]")
135
+ console.print(f" Deployment ID : [bold]{result['deploy_id']}[/bold]")
136
+ if api_url:
137
+ console.print(f" API URL : [bold]{api_url}[/bold]")
138
+ if worker_url:
139
+ console.print(f" Worker URL : [bold]{worker_url}[/bold]")
@@ -73,6 +73,21 @@ def check():
73
73
  """Inspect the current project structure."""
74
74
  project_cmds.check()
75
75
 
76
+ @app.command()
77
+ def link():
78
+ """
79
+ [bold cyan]Link[/bold cyan] your current project to a Beamflow Managed Project.
80
+ """
81
+ from .core.config import load_project_config
82
+ from .commands.project import _link_project
83
+
84
+ project_config = load_project_config()
85
+ if not project_config:
86
+ console.print("[red]No .beamflow found. Please run 'beamflow init' first.[/red]")
87
+ raise typer.Exit(code=1)
88
+
89
+ _link_project(project_config)
90
+
76
91
  @env_app.command("add")
77
92
  def env_add(
78
93
  env_name: Optional[str] = typer.Argument(None, help="Environment name to add"),
@@ -115,7 +130,7 @@ def env_init_vscode(
115
130
  def run(
116
131
  ctx: typer.Context,
117
132
  env: str = typer.Argument("local", help="Environment to run (default: local)"),
118
- build: bool = typer.Option(False, "--build", help="Build images before starting"),
133
+ build: bool = typer.Option(True, "--build/--no-build", help="Build images before starting"),
119
134
  detach: bool = typer.Option(False, "--detach", "-d", help="Run in background"),
120
135
  logs: bool = typer.Option(False, "--logs", help="Follow logs (useful with --detach)")
121
136
  ):
@@ -187,7 +202,8 @@ def build(
187
202
  def deploy(
188
203
  ctx: typer.Context,
189
204
  env: str = typer.Argument("prod", help="Environment to deploy to (default: prod)"),
190
- artifact: Optional[str] = typer.Option(None, "--artifact", "-a", help="Artifact ID to deploy")
205
+ artifact: Optional[str] = typer.Option(None, "--artifact", "-a", help="Artifact ID to deploy"),
206
+ non_interactive: bool = typer.Option(False, "--non-interactive", help="Do not ask for confirmation")
191
207
  ):
192
208
  """
193
209
  [bold cyan]Deploy[/bold cyan] your project to the Beamflow Managed Platform.
@@ -213,7 +229,7 @@ def deploy(
213
229
 
214
230
  if not env_mode:
215
231
  if env == "prod":
216
- if Confirm.ask(f"Environment '{env}' not found. Would you like to set it up now?"):
232
+ if not non_interactive and Confirm.ask(f"Environment '{env}' not found. Would you like to set it up now?"):
217
233
  # We need to pass the actual project_config object to add_environment
218
234
  # But project_cmds is a module, we should be careful about cyclic imports or just use it.
219
235
  project_cmds.add_environment(project_config, env_name=env)
@@ -232,7 +248,7 @@ def deploy(
232
248
  console.print(f"To deploy to a specific environment, use: [bold]beamflow deploy <env_name>[/bold]")
233
249
  raise typer.Exit(code=1)
234
250
 
235
- deploy_cmds.deploy(env=env, artifact=artifact)
251
+ deploy_cmds.deploy(env=env, artifact=artifact, non_interactive=non_interactive)
236
252
 
237
253
  @app.command()
238
254
  def whoami():
@@ -0,0 +1,222 @@
1
+ # Beamflow Agents Guide
2
+
3
+ This guide explains how to build AI Agents and robust integrations using Beamflow. It covers the core concepts you need to know to bring data in, process it, and interact with external systems.
4
+
5
+ ## Core Concepts
6
+
7
+ Beamflow integrations revolve around the following core ideas:
8
+ - **Integration Context**: Automatically tracks the current integration, pipeline, run, and context across distributed operations.
9
+ - **Ingress**: How data enters your pipelines (via Webhooks or Polled schedules).
10
+ - **Clients**: How you make authenticated requests to external APIs.
11
+ - **Records Feed**: How you queue and deduplicate records for processing.
12
+ - **Project Structure**: How to organize code between API, Worker, and Shared modules.
13
+
14
+ ## Bringing Data In: Ingress
15
+
16
+ You can trigger your pipelines through two main ingress decorators provided by `beamflow_lib.pipelines.ingress`:
17
+
18
+ ### 1. Webhooks (`@ingress.webhook`)
19
+ Use webhooks when an external system can push events to your application. This decorator registers the handler as part of the integration but leaves the exact handling logic to you.
20
+
21
+ ```python
22
+ from beamflow_lib.pipelines.ingress import ingress
23
+
24
+ @router.post("/webhook/slack")
25
+ @ingress.webhook(pipeline="slack.messages", integration="slack")
26
+ async def handle_slack_webhook(request):
27
+ data = await request.json()
28
+ # Read the webhook and process the data!
29
+ ```
30
+
31
+ ### 2. Polling (`@ingress.poll`)
32
+ Use polling when you need to fetch data on a schedule. This decorator acts as a specialized scheduler entrypoint that injects a durable `state` dictionary into your function. This is perfect for remembering "watermarks" (like the last processed token or timestamp) to paginate stateful APIs.
33
+
34
+ The state is automatically saved for you as long as the function executes without exceptions.
35
+
36
+ ```python
37
+ from beamflow_lib.pipelines.ingress import ingress
38
+ from datetime import datetime
39
+
40
+ @ingress.poll(pipeline="slack.messages", integration="slack", schedule="*/5 * * * *")
41
+ async def poll_slack(state: dict):
42
+ # Retrieve the watermark from the previous run
43
+ since = state.get("since")
44
+
45
+ # Fetch new data using the watermark...
46
+ page = await fetch_messages_from_api(since=since)
47
+
48
+ # Process or publish data
49
+
50
+ # Update the watermark; it will be automatically saved!
51
+ state["since"] = datetime.now()
52
+ ```
53
+
54
+ ## Making External Requests: Clients
55
+
56
+ Beamflow makes it easy to interact with external APIs via Clients. They handle setting Base URLs, automatic Authentication headers, observability tracing, and defaults out of the box.
57
+
58
+ ### Adding a Client
59
+ To configure a new REST client, you place a configuration file (like `slackClient.yaml`) in your shared clients folder. Beamflow dynamically loads these clients if your configuration registry points to the folder. For example, if you have `_config/shared/clients.yaml` configured as follows:
60
+ ```yaml
61
+ path: clients
62
+ pattern: "*.yaml"
63
+ ```
64
+ You can simply define `clients/slackClient.yaml` and the system will expose it to your tasks.
65
+
66
+ ### Using a Client
67
+ Once configured, you can retrieve the standard HTTP client (from the `beamflow_clients` package) anywhere.
68
+
69
+ ```python
70
+ from beamflow_clients import get_client
71
+ from beamflow_lib.decorators import integration_task
72
+
73
+ # Load the client configuration
74
+ slack_client = get_client("slackClient")
75
+
76
+ @integration_task(integration="slack", integration_pipeline="send_message")
77
+ async def send_slack_message_task(message: str):
78
+ # Authorization and base URL logic are automatically handled here
79
+ await slack_client.request("POST", "/chat.postMessage", json={"text": message})
80
+ ```
81
+
82
+ ### Custom Clients
83
+ If you have an API you interact with heavily, you can define a custom typed client. This encapsulates specific API routes for a better developer experience.
84
+
85
+ ```python
86
+ from beamflow_lib.clients import HttpClient, client
87
+ from typing import List
88
+
89
+ # Extending HttpClient and registering with the @client decorator
90
+ @client("DemoClient")
91
+ class DemoClient(HttpClient):
92
+ """Custom client for the Demo API."""
93
+
94
+ async def get_users(self) -> List[dict]:
95
+ response = await self.request("GET", "/users")
96
+ return response.json()
97
+
98
+ async def get_user(self, user_id: int) -> dict:
99
+ response = await self.request("GET", f"/users/{user_id}")
100
+ return response.json()
101
+
102
+ # You can then resolve this custom client by name:
103
+ # demo = get_client("DemoClient")
104
+ ```
105
+
106
+ ## Processing Data: Records Feed
107
+
108
+ When you receive payloads via Webhooks or Polling, you usually want to process them robustly and asynchronously on the backend. The `RecordsFeed` module provides an opinionated record queue with built-in deduplication that prevents overwhelming pipelines.
109
+
110
+ ### Publishing Records
111
+ The framework resolves deduplication using a unique combination of `(integration, record_type, record_id)`. If multiple records arrive with the same identity, "latest-wins" semantics are applied (keeping the one with the latest timestamp).
112
+
113
+ It also seamlessly supports feeding massive data payloads -- large objects (> 5KB) are automatically offloaded to a BlobStore.
114
+
115
+ ```python
116
+ from beamflow_lib.pipelines.records_feed import RecordsFeed
117
+ from beamflow_lib.pipelines.records_model import RecordData
118
+
119
+ # Retrieve your specific feed
120
+ feed = RecordsFeed.get(feed_id="slack.messages")
121
+
122
+ # A common pattern is inserting data retrieved via an `@ingress.poll` into a feed
123
+ await feed.publish(RecordData(
124
+ record_id="msg_123",
125
+ record_type="message",
126
+ data={"text": "Hello World", "user": "U123"}
127
+ ))
128
+ ```
129
+
130
+ ### Consuming Records
131
+ To process the data asynchronously, decorate a function with `@feed_consumer`. This supports robust configurations like automatic batched receiving, delays, rate-limiting, and concurrency control.
132
+
133
+ ```python
134
+ from beamflow_lib import feed_consumer, RecordData
135
+
136
+ @feed_consumer(
137
+ feed_id="slack.messages",
138
+ batch=True,
139
+ max_batch_size=50,
140
+ max_delay_ms=2000 # wait up to 2 seconds for the queue to fill
141
+ )
142
+ async def process_batch_messages(records: list[RecordData]):
143
+ """
144
+ Consumes a maximum of 50 records in a batch, waiting
145
+ up to 2 seconds for the queue to fill.
146
+ """
147
+ print(f"Began processing {len(records)} records.")
148
+ for record in records:
149
+ print(f"Record: {record.record_id} of type {record.record_type}")
150
+ print(f"Data Payload: {record.data}")
151
+ ```
152
+
153
+ ```python
154
+ @feed_consumer(
155
+ feed_id="slack.messages",
156
+ batch=False
157
+ )
158
+ async def process_batch_messages(record: RecordData):
159
+ """
160
+ Consumes a single record at a time.
161
+ """
162
+ print(f"Began processing {record.record_id} of type {record.record_type}")
163
+ print(f"Data Payload: {record.data}")
164
+ ```
165
+ ## Project Structure
166
+
167
+ A typical Beamflow project is organized into three main areas: Configuration, Deployment, and Source Code.
168
+
169
+ ```text
170
+ .
171
+ ├── config/ # Application configuration
172
+ │ ├── shared/ # Base configuration for all environments
173
+ │ │ ├── backend.yaml # Task backend settings (shared)
174
+ │ │ ├── clients.yaml # Points to the clients folder
175
+ │ │ ├── clients/ # Shared client configurations
176
+ │ │ │ └── fooClient.yaml
177
+ │ │ └── .env # Shared environment variables
178
+ │ └── local/ # Environment-specific overrides (e.g., local, dev, prod)
179
+ │ ├── backend.yaml # Local-specific backend settings
180
+ │ └── .env # Local-specific environment variables
181
+ ├── deployment/ # deployment-related files
182
+ │ ├── shared/ # Base Dockerfiles
183
+ │ │ ├── api.Dockerfile
184
+ │ │ └── worker.Dockerfile
185
+ │ └── local/ # Local deployment configuration
186
+ │ └── docker-compose.yaml
187
+ ├── src/ # Source code for your integration
188
+ │ ├── api/ # API-specific code (Routes, Webhooks)
189
+ │ │ └── routes/
190
+ │ │ └── webhooks.py # typical place for @ingress.webhook ... we can also add the webhooks into purpose specific files ie slack_webhooks.py / stripe_webhooks.py etc
191
+ │ │ └── routes.py # we can also add standard api routes ... like health check etc
192
+ │ ├── worker/ # Worker-specific code (Tasks, Consumers)
193
+ │ │ └── tasks/
194
+ │ │ └── xyzTask.py # place for feed consumers, polling tasks, integration tasks etc
195
+ │ └── shared/ # Shared code (Models, Clients, Utils)
196
+ │ ├── clients/ # here should to the custom clients
197
+ │ └── models/ # here should to the custom models
198
+ ├── api_main.py # API Entry point
199
+ └── worker_main.py # Worker Entry point
200
+ ```
201
+
202
+ ### Source Organization (`src/`)
203
+
204
+ Beamflow separates code based on where it executes to ensure clean isolation and efficient scaling:
205
+
206
+ 1. **API (`src/api/`)**: Code that runs on the webserver. Its primary role is to handle incoming requests, typically defined using the `@ingress.webhook` decorator.
207
+ 2. **Worker (`src/worker/`)**: Code that runs on the background worker. This is where the "heavy lifting" happens, including `@integration_task`, `@ingress.poll`, and `@feed_consumer` handlers.
208
+ 3. **Shared (`src/shared/`)**: Logic used by both the API and Worker, such as custom typed Clients, Pydantic models, and utility functions.
209
+
210
+ ### Configuration & Environments
211
+
212
+ Beamflow uses a tiered configuration system that allows for seamless transitions between environments:
213
+
214
+ - **Inheritance**: Configuration is loaded from `config/shared/` first, and then overridden by environment-specific files in `config/{env}/`.
215
+ - **Backend Setup**: The `backend.yaml` file defines how tasks are executed (e.g., `asyncio` for local dev or `dramatiq` or `managed` for production).
216
+ - **Environment Variables**: `.env` files in both the shared and environment folders are automatically resolved and injected into the application.
217
+
218
+ ### Execution Entry Points
219
+
220
+ - **`api_main.py`**: The entry point for the web server. It sets up the FastAPI application and automatically imports modules from `src/api` to register routes.
221
+ - **`worker_main.py`**: The entry point for the background process. It connects to the task backend (like Redis) and automatically imports modules from `src/worker` to register task signatures.
222
+
@@ -4,6 +4,5 @@ __pycache__
4
4
  .pytest_cache
5
5
  *.pyc
6
6
  .beamflow
7
- pyproject.toml
8
7
  README.md
9
8
  tests
@@ -3,7 +3,7 @@ from fastapi import Request
3
3
  from beamflow_lib.config.env_loader import load_config_dir
4
4
  from beamflow_runtime.ingress.app import create_fastapi_app
5
5
  import os
6
- env = os.getenv("ENVIVROMENT", "dev")
6
+ env = os.getenv("ENVIRONMENT", "dev")
7
7
  # Load configuration relative to this file
8
8
  config = load_config_dir("config", environment=env)
9
9
 
@@ -1,7 +1,7 @@
1
1
  backend:
2
2
  type: dramatiq
3
- dramatiq:
4
- redis_url: redis://redis:6380/0
3
+
4
+
5
5
  webhooks:
6
6
  prefix: /webhooks
7
7
 
@@ -1,4 +1,4 @@
1
1
  # Environment-specific overrides for backend
2
2
  backend:
3
- # The type of backend can be 'dramatiq', 'async_backend', or 'managed'
3
+ # The type of backend can be 'dramatiq', 'asyncio', or 'managed'
4
4
  type: managed | dramatiq | asyncio
@@ -0,0 +1 @@
1
+ BEAMFLOW_API_URL="http://localhost:8080"
@@ -13,7 +13,7 @@ auth:
13
13
  extra_params: {}
14
14
 
15
15
  # Authentication Alternatives (Commented out)
16
- # ------------------------------------------
16
+ # ------------------------------------------s
17
17
  # Basic Auth:
18
18
  # auth:
19
19
  # type: basic
@@ -0,0 +1,3 @@
1
+ path: clients
2
+ pattern: "*.yaml"
3
+ recursive: false
@@ -0,0 +1,18 @@
1
+ # API Dockerfile for {project_name}
2
+ FROM beamflow/beamflow-base:latest
3
+
4
+ WORKDIR /app
5
+ COPY . .
6
+
7
+ # Build arg that chooses which extra(s) to install
8
+ # e.g. SERVICE_EXTRAS="api" or "worker" or "api,worker"
9
+ ARG SERVICE_EXTRAS="api"
10
+
11
+ # Install base deps + extras into system site-packages
12
+ # Base deps:
13
+ RUN uv pip install --system -r pyproject.toml
14
+ RUN uv pip install --system -r pyproject.toml --extra api || echo "no worker extra; skipping"
15
+ RUN rm -rf src/worker
16
+ EXPOSE 8000
17
+ ENV PYTHONPATH=/app/src:/app
18
+ CMD ["python", "api_main.py"]
@@ -0,0 +1,18 @@
1
+ # Worker Dockerfile for {project_name}
2
+ FROM beamflow/beamflow-base:latest
3
+
4
+ WORKDIR /app
5
+ COPY . .
6
+
7
+ # Build arg that chooses which extra(s) to install
8
+ # e.g. SERVICE_EXTRAS="api" or "worker" or "api,worker"
9
+ ARG SERVICE_EXTRAS="worker"
10
+
11
+ # Install base deps + extras into system site-packages
12
+ # Base deps:
13
+ RUN uv pip install --system -r pyproject.toml
14
+ RUN uv pip install --system -r pyproject.toml --extra worker || echo "no worker extra; skipping"
15
+
16
+ RUN rm -rf src/api
17
+ ENV PYTHONPATH=/app/src:/app
18
+ CMD ["python", "-u", "worker_main.py"]
@@ -8,8 +8,8 @@ from beamflow_runtime import initialize_runtime, run_worker
8
8
  logging.basicConfig(level=logging.INFO)
9
9
 
10
10
  # Load config
11
- environment = os.getenv("ENVIRONMENT", "dev")
12
- config = load_config_dir("config", environment=environment)
11
+ env = os.getenv("ENVIRONMENT", "dev")
12
+ config = load_config_dir("config", environment=env)
13
13
 
14
14
  # Initialize runtime with auto-import of the tasks directory
15
15
  runtime = initialize_runtime(
@@ -1,6 +1,6 @@
1
1
  [project]
2
2
  name = "beamflow-cli"
3
- version = "0.3.4"
3
+ version = "0.3.6"
4
4
  description = "CLI for the Beamflow Managed Platform"
5
5
  authors = [{name = "juraj.bezdek@gmail.com", email = "juraj.bezdek@gmail.com"}]
6
6
  requires-python = ">=3.11"
@@ -1,10 +0,0 @@
1
- # API Dockerfile for {project_name}
2
- FROM beamflow/beamflow-base:latest
3
-
4
- WORKDIR /app
5
- COPY . .
6
-
7
- RUN rm -rf src/worker
8
- EXPOSE 8000
9
- ENV PYTHONPATH=/app/src:/app
10
- CMD ["python", "api_main.py"]
@@ -1,9 +0,0 @@
1
- # Worker Dockerfile for {project_name}
2
- FROM beamflow/beamflow-base:latest
3
-
4
- WORKDIR /app
5
- COPY . .
6
-
7
- RUN rm -rf src/api
8
- ENV PYTHONPATH=/app/src:/app
9
- CMD ["python", "-u", "worker_main.py"]