conduit-shared 0.0.1__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,111 @@
1
+ # =============================================================================
2
+ # Python
3
+ # =============================================================================
4
+ __pycache__/
5
+ *.py[cod]
6
+ *$py.class
7
+ *.so
8
+ .Python
9
+ build/
10
+ develop-eggs/
11
+ dist/
12
+ downloads/
13
+ eggs/
14
+ .eggs/
15
+ *.egg-info/
16
+ .installed.cfg
17
+ *.egg
18
+ MANIFEST
19
+ *.manifest
20
+ pip-log.txt
21
+ pip-delete-this-directory.txt
22
+ htmlcov/
23
+ .tox/
24
+ .nox/
25
+ .coverage
26
+ .coverage.*
27
+ .cache
28
+ nosetests.xml
29
+ coverage.xml
30
+ *.cover
31
+ *.py,cover
32
+ .hypothesis/
33
+ .pytest_cache/
34
+ cover/
35
+ *.mo
36
+ *.pot
37
+ .venv/
38
+ venv/
39
+ ENV/
40
+ env/
41
+ .pdm.toml
42
+ .pdm-python
43
+ .pdm-build/
44
+ __pypackages__/
45
+ .mypy_cache/
46
+ .dmypy.json
47
+ dmypy.json
48
+ .pyre/
49
+ .pytype/
50
+ cython_debug/
51
+ .ruff_cache/
52
+ .pypirc
53
+
54
+ # =============================================================================
55
+ # Node / Next.js
56
+ # =============================================================================
57
+ node_modules/
58
+ .pnp
59
+ .pnp.js
60
+ .next/
61
+ out/
62
+ next-env.d.ts
63
+ .source/
64
+ npm-debug.log*
65
+ yarn-debug.log*
66
+ yarn-error.log*
67
+ .pnpm-debug.log*
68
+ *.tsbuildinfo
69
+ .vercel
70
+
71
+ # =============================================================================
72
+ # Database
73
+ # =============================================================================
74
+ *.sqlite
75
+ *.sqlite3
76
+ *.sqlite3-journal
77
+ db.sqlite
78
+ db.sqlite3
79
+
80
+ # =============================================================================
81
+ # Environment & Secrets
82
+ # =============================================================================
83
+ .env
84
+ .env*.local
85
+ .env.prod
86
+
87
+ # =============================================================================
88
+ # IDE & OS
89
+ # =============================================================================
90
+ .DS_Store
91
+ *.pem
92
+ .idea/
93
+ *.swp
94
+ *.swo
95
+ *~
96
+
97
+ # =============================================================================
98
+ # Testing & Coverage
99
+ # =============================================================================
100
+ coverage/
101
+ coverage_html/
102
+ test-results/
103
+ playwright-report/
104
+ blob-report/
105
+ .vitest/
106
+
107
+ # =============================================================================
108
+ # tanstack start
109
+ # =============================================================================
110
+ .output/
111
+ .tanstack/
@@ -0,0 +1,6 @@
1
+ Metadata-Version: 2.4
2
+ Name: conduit-shared
3
+ Version: 0.0.1
4
+ Summary: Shared models for Conduit packages
5
+ Requires-Python: >=3.12
6
+ Requires-Dist: pydantic>=2.10.0
File without changes
@@ -0,0 +1,16 @@
1
+ [project]
2
+ name = "conduit-shared"
3
+ version = "0.0.1"
4
+ description = "Shared models for Conduit packages"
5
+ readme = "README.md"
6
+ requires-python = ">=3.12"
7
+ dependencies = [
8
+ "pydantic>=2.10.0",
9
+ ]
10
+
11
+ [build-system]
12
+ requires = ["hatchling"]
13
+ build-backend = "hatchling.build"
14
+
15
+ [tool.hatch.build.targets.wheel]
16
+ packages = ["src/shared"]
@@ -0,0 +1,115 @@
1
+ """Shared models for Conduit packages."""
2
+
3
+ from shared.api import API_PREFIX, HOURLY_BUCKET_TTL, MINUTE_BUCKET_TTL, REGISTRY_TTL
4
+ from shared.artifacts import ArtifactType
5
+ from shared.events import (
6
+ BatchEventItem,
7
+ BatchEventRequest,
8
+ EventRequest,
9
+ EventType,
10
+ HealthResponse,
11
+ JobCheckpointEvent,
12
+ JobCompletedEvent,
13
+ JobFailedEvent,
14
+ JobProgressEvent,
15
+ JobRetryingEvent,
16
+ JobStartedEvent,
17
+ )
18
+ from shared.models import Job, JobStatus, StateTransition
19
+ from shared.patterns import get_matching_patterns, matches_event_pattern
20
+ from shared.schemas import (
21
+ ApiDetailResponse,
22
+ ApiEndpoint,
23
+ ApiInfo,
24
+ ApiInstance,
25
+ ApisListResponse,
26
+ ApiStats,
27
+ ApiTrendHour,
28
+ ApiTrendsResponse,
29
+ ArtifactCreateResponse,
30
+ ArtifactListResponse,
31
+ ArtifactQueuedResponse,
32
+ ArtifactResponse,
33
+ CreateArtifactRequest,
34
+ DashboardStats,
35
+ EndpointsListResponse,
36
+ ErrorResponse,
37
+ FunctionDetailResponse,
38
+ FunctionInfo,
39
+ FunctionsListResponse,
40
+ FunctionType,
41
+ JobHistoryResponse,
42
+ JobListResponse,
43
+ JobStatsResponse,
44
+ JobTrendHour,
45
+ JobTrendsResponse,
46
+ Run,
47
+ RunStats,
48
+ WorkerInfo,
49
+ WorkerInstance,
50
+ WorkersListResponse,
51
+ WorkerStats,
52
+ )
53
+
54
+ __all__ = [
55
+ # API tracking constants
56
+ "API_PREFIX",
57
+ "MINUTE_BUCKET_TTL",
58
+ "HOURLY_BUCKET_TTL",
59
+ "REGISTRY_TTL",
60
+ # Core models
61
+ "Job",
62
+ "JobStatus",
63
+ "StateTransition",
64
+ # Events
65
+ "EventType",
66
+ "EventRequest",
67
+ "BatchEventItem",
68
+ "BatchEventRequest",
69
+ "JobStartedEvent",
70
+ "JobCompletedEvent",
71
+ "JobFailedEvent",
72
+ "JobRetryingEvent",
73
+ "JobProgressEvent",
74
+ "JobCheckpointEvent",
75
+ # Artifacts
76
+ "ArtifactType",
77
+ "ArtifactCreateResponse",
78
+ "ArtifactQueuedResponse",
79
+ "ArtifactResponse",
80
+ "ArtifactListResponse",
81
+ "CreateArtifactRequest",
82
+ "ErrorResponse",
83
+ # Health
84
+ "HealthResponse",
85
+ # Pattern matching
86
+ "matches_event_pattern",
87
+ "get_matching_patterns",
88
+ # API Schemas
89
+ "ApiInstance",
90
+ "ApiEndpoint",
91
+ "ApiInfo",
92
+ "ApisListResponse",
93
+ "EndpointsListResponse",
94
+ "ApiDetailResponse",
95
+ "ApiTrendHour",
96
+ "ApiTrendsResponse",
97
+ "FunctionType",
98
+ "Run",
99
+ "WorkerInstance",
100
+ "WorkerInfo",
101
+ "WorkersListResponse",
102
+ "WorkerStats",
103
+ "FunctionInfo",
104
+ "FunctionsListResponse",
105
+ "FunctionDetailResponse",
106
+ "RunStats",
107
+ "ApiStats",
108
+ "DashboardStats",
109
+ # Job History
110
+ "JobHistoryResponse",
111
+ "JobListResponse",
112
+ "JobStatsResponse",
113
+ "JobTrendHour",
114
+ "JobTrendsResponse",
115
+ ]
@@ -0,0 +1,26 @@
1
+ """API tracking constants shared between SDK middleware and server reader.
2
+
3
+ Redis key structure:
4
+ conduit:api:registry -> SET of api names
5
+ conduit:api:{api}:endpoints -> SET of "METHOD:path"
6
+ conduit:api:{api}:{method}:{path}:m:{YYYY-MM-DDTHH:MM} -> HASH (minute bucket)
7
+ conduit:api:{api}:{method}:{path}:h:{YYYY-MM-DDTHH} -> HASH (hourly bucket)
8
+
9
+ Each hash contains:
10
+ requests, errors, total_latency_ms, status_2xx, status_4xx, status_5xx
11
+ """
12
+
13
+ # Key prefix for all API tracking keys
14
+ API_PREFIX = "conduit:api"
15
+
16
+ # Key prefix for API instance heartbeats
17
+ API_INSTANCE_PREFIX = "conduit:api_instances"
18
+
19
+ # TTLs (seconds)
20
+ MINUTE_BUCKET_TTL = 600 # 10 minutes
21
+ HOURLY_BUCKET_TTL = 2_592_000 # 30 days
22
+ REGISTRY_TTL = 2_592_000 # 30 days
23
+
24
+ # API instance TTL - must heartbeat within this time
25
+ # With 10s heartbeat interval, this gives 3 missed heartbeats before expiry
26
+ API_INSTANCE_TTL = 30
@@ -0,0 +1,20 @@
1
+ """Artifact types for job outputs."""
2
+
3
+ from enum import StrEnum
4
+
5
+
6
+ class ArtifactType(StrEnum):
7
+ """Artifact types for job outputs."""
8
+
9
+ TEXT = "text"
10
+ JSON = "json"
11
+ PNG = "image/png"
12
+ JPEG = "image/jpeg"
13
+ WEBP = "image/webp"
14
+ GIF = "image/gif"
15
+ SVG = "image/svg"
16
+ PDF = "file/pdf"
17
+ CSV = "file/csv"
18
+ XML = "file/xml"
19
+ HTML = "file/html"
20
+ BINARY = "file/binary"
@@ -0,0 +1,173 @@
1
+ """Event schemas for job tracking between workers and API."""
2
+
3
+ from datetime import datetime
4
+ from enum import StrEnum
5
+ from typing import Any
6
+
7
+ from pydantic import BaseModel, ConfigDict, Field
8
+
9
+ EVENTS_STREAM = "conduit:status:events"
10
+ API_REQUESTS_STREAM = "conduit:api:requests"
11
+ EVENTS_PUBSUB_CHANNEL = "conduit:status:events:pubsub"
12
+
13
+
14
+ class EventType(StrEnum):
15
+ """Event types sent from workers to API."""
16
+
17
+ JOB_STARTED = "job.started"
18
+ JOB_COMPLETED = "job.completed"
19
+ JOB_FAILED = "job.failed"
20
+ JOB_RETRYING = "job.retrying"
21
+ JOB_PROGRESS = "job.progress"
22
+ JOB_CHECKPOINT = "job.checkpoint"
23
+
24
+
25
+ class JobStartedEvent(BaseModel):
26
+ """Event data for job.started."""
27
+
28
+ job_id: str
29
+ function: str
30
+ function_name: str
31
+ parent_id: str | None = None
32
+ root_id: str
33
+ kwargs: dict[str, Any] = Field(default_factory=dict)
34
+ metadata: dict[str, Any] = Field(default_factory=dict)
35
+ attempt: int = 1
36
+ max_retries: int = 0
37
+ worker_id: str | None = None
38
+ started_at: datetime
39
+
40
+
41
+ class JobCompletedEvent(BaseModel):
42
+ """Event data for job.completed."""
43
+
44
+ job_id: str
45
+ function: str
46
+ function_name: str
47
+ parent_id: str | None = None
48
+ root_id: str
49
+ result: Any = None
50
+ duration_ms: float | None = None
51
+ attempt: int = 1
52
+ completed_at: datetime
53
+
54
+
55
+ class JobFailedEvent(BaseModel):
56
+ """Event data for job.failed."""
57
+
58
+ job_id: str
59
+ function: str
60
+ function_name: str
61
+ parent_id: str | None = None
62
+ root_id: str
63
+ error: str
64
+ traceback: str | None = None
65
+ attempt: int = 1
66
+ max_retries: int = 0
67
+ will_retry: bool = False
68
+ failed_at: datetime
69
+
70
+
71
+ class JobRetryingEvent(BaseModel):
72
+ """Event data for job.retrying."""
73
+
74
+ job_id: str
75
+ function: str
76
+ function_name: str
77
+ parent_id: str | None = None
78
+ root_id: str
79
+ error: str
80
+ delay_seconds: float
81
+ current_attempt: int
82
+ next_attempt: int
83
+ retry_at: datetime
84
+
85
+
86
+ class JobProgressEvent(BaseModel):
87
+ """Event data for job.progress."""
88
+
89
+ job_id: str
90
+ parent_id: str | None = None
91
+ root_id: str
92
+ progress: float
93
+ message: str | None = None
94
+ updated_at: datetime
95
+
96
+
97
+ class JobCheckpointEvent(BaseModel):
98
+ """Event data for job.checkpoint."""
99
+
100
+ job_id: str
101
+ parent_id: str | None = None
102
+ root_id: str
103
+ state: dict[str, Any]
104
+ checkpointed_at: datetime
105
+
106
+
107
+ class SSEJobEvent(BaseModel):
108
+ """Event payload streamed to browser clients via SSE.
109
+
110
+ Only includes fields the dashboard needs — sensitive data like
111
+ kwargs, result, traceback, and checkpoint state are excluded.
112
+ Extra fields from the source event are silently dropped.
113
+ """
114
+
115
+ model_config = ConfigDict(extra="ignore")
116
+
117
+ type: str
118
+ job_id: str = ""
119
+ worker_id: str = ""
120
+ function: str | None = None
121
+ function_name: str | None = None
122
+ parent_id: str | None = None
123
+ root_id: str
124
+ # job.started / job.completed / job.failed
125
+ attempt: int | None = None
126
+ max_retries: int | None = None
127
+ started_at: datetime | None = None
128
+ # job.completed
129
+ duration_ms: float | None = None
130
+ completed_at: datetime | None = None
131
+ # job.failed
132
+ error: str | None = None
133
+ failed_at: datetime | None = None
134
+ # job.retrying
135
+ current_attempt: int | None = None
136
+ next_attempt: int | None = None
137
+ # job.progress
138
+ progress: float | None = None
139
+ message: str | None = None
140
+
141
+
142
+ class EventRequest(BaseModel):
143
+ """Generic event request from workers."""
144
+
145
+ type: str
146
+ data: dict[str, Any]
147
+ worker_id: str | None = None
148
+
149
+
150
+ class BatchEventItem(BaseModel):
151
+ """Single event in a batch."""
152
+
153
+ type: str
154
+ job_id: str
155
+ worker_id: str
156
+ timestamp: float
157
+ data: dict[str, Any] = Field(default_factory=dict)
158
+
159
+
160
+ class BatchEventRequest(BaseModel):
161
+ """Batch of events from workers."""
162
+
163
+ events: list[BatchEventItem]
164
+ worker_id: str | None = None
165
+
166
+
167
+ class HealthResponse(BaseModel):
168
+ """Health check response."""
169
+
170
+ status: str = "ok"
171
+ version: str
172
+ tier: str = "free"
173
+ features: list[str] = Field(default_factory=list)
@@ -0,0 +1,335 @@
1
+ """Core data models for Conduit."""
2
+
3
+ from __future__ import annotations
4
+
5
+ import json
6
+ import uuid
7
+ from dataclasses import dataclass, field
8
+ from datetime import UTC, datetime
9
+ from enum import Enum
10
+ from typing import Any
11
+
12
+
13
+ @dataclass
14
+ class StateTransition:
15
+ """
16
+ Records a state transition for a job.
17
+
18
+ Used to build timeline views showing job progression.
19
+ """
20
+
21
+ state: str # JobStatus value
22
+ timestamp: datetime
23
+ message: str | None = None
24
+ worker_id: str | None = None
25
+
26
+ def to_dict(self) -> dict[str, Any]:
27
+ """Serialize to dictionary."""
28
+ return {
29
+ "state": self.state,
30
+ "timestamp": self.timestamp.isoformat(),
31
+ "message": self.message,
32
+ "worker_id": self.worker_id,
33
+ }
34
+
35
+ @classmethod
36
+ def from_dict(cls, data: dict[str, Any]) -> StateTransition:
37
+ """Deserialize from dictionary."""
38
+ return cls(
39
+ state=data["state"],
40
+ timestamp=datetime.fromisoformat(data["timestamp"]),
41
+ message=data.get("message"),
42
+ worker_id=data.get("worker_id"),
43
+ )
44
+
45
+
46
+ class JobStatus(str, Enum):
47
+ """Job execution status."""
48
+
49
+ PENDING = "pending"
50
+ QUEUED = "queued"
51
+ ACTIVE = "active"
52
+ COMPLETE = "complete"
53
+ FAILED = "failed"
54
+ CANCELLED = "cancelled"
55
+ RETRYING = "retrying"
56
+
57
+ def is_terminal(self) -> bool:
58
+ """Check if this is a terminal state."""
59
+ return self in (JobStatus.COMPLETE, JobStatus.FAILED, JobStatus.CANCELLED)
60
+
61
+ def can_transition_to(self, target: JobStatus) -> bool:
62
+ """Check if transition to target state is valid."""
63
+ valid_transitions: dict[JobStatus, set[JobStatus]] = {
64
+ JobStatus.PENDING: {JobStatus.QUEUED, JobStatus.CANCELLED},
65
+ JobStatus.QUEUED: {JobStatus.ACTIVE, JobStatus.CANCELLED},
66
+ JobStatus.ACTIVE: {
67
+ JobStatus.COMPLETE,
68
+ JobStatus.FAILED,
69
+ JobStatus.CANCELLED,
70
+ JobStatus.RETRYING,
71
+ },
72
+ JobStatus.RETRYING: {
73
+ JobStatus.QUEUED,
74
+ JobStatus.FAILED,
75
+ JobStatus.CANCELLED,
76
+ },
77
+ JobStatus.COMPLETE: set(),
78
+ JobStatus.FAILED: {JobStatus.QUEUED}, # Allow retry
79
+ JobStatus.CANCELLED: set(),
80
+ }
81
+ return target in valid_transitions.get(self, set())
82
+
83
+
84
+ def _generate_job_id() -> str:
85
+ """Generate a unique job ID."""
86
+ return f"job_{uuid.uuid4().hex[:16]}"
87
+
88
+
89
+ def _utc_now() -> datetime:
90
+ """Get current UTC time."""
91
+ return datetime.now(UTC)
92
+
93
+
94
+ @dataclass
95
+ class Job:
96
+ """
97
+ Represents a job to be executed.
98
+
99
+ Jobs are the fundamental unit of work in Conduit.
100
+ """
101
+
102
+ # Identity
103
+ function: str
104
+ function_name: str = ""
105
+ kwargs: dict[str, Any] = field(default_factory=dict)
106
+ id: str = field(default_factory=_generate_job_id)
107
+ key: str = "" # Deduplication key (defaults to id if not set)
108
+
109
+ # Status
110
+ status: JobStatus = JobStatus.PENDING
111
+
112
+ # Timing
113
+ scheduled_at: datetime = field(default_factory=_utc_now)
114
+ started_at: datetime | None = None
115
+ completed_at: datetime | None = None
116
+ timeout: float | None = None
117
+ lease_time: float | None = None
118
+
119
+ # Execution
120
+ # `attempts` is the number of times the job has been started (1-indexed when running).
121
+ # - 0 = not yet started
122
+ # - 1 = first attempt (initial execution)
123
+ # - 2 = second attempt (first retry)
124
+ # - N = Nth attempt (N-1 retries)
125
+ # Job can retry when: attempts <= max_retries + 1
126
+ # See also: can_retry() method
127
+ attempts: int = 0
128
+ max_retries: int = (
129
+ 0 # Number of retry attempts (0 = no retries, just the initial attempt)
130
+ )
131
+ retry_delay: float = 1.0 # Base delay in seconds between retries
132
+ retry_backoff: float = 2.0 # Exponential backoff multiplier
133
+ worker_id: str | None = None
134
+ parent_id: str | None = None
135
+ root_id: str = ""
136
+
137
+ # Progress tracking
138
+ progress: float = 0.0
139
+ metadata: dict[str, Any] = field(default_factory=dict)
140
+
141
+ # Result
142
+ result: Any = None
143
+ error: str | None = None
144
+ error_traceback: str | None = None
145
+
146
+ # Cron scheduling
147
+ schedule: str | None = None
148
+
149
+ # State history
150
+ state_history: list[StateTransition] = field(default_factory=list)
151
+
152
+ def __post_init__(self) -> None:
153
+ """Set defaults after initialization."""
154
+ if not self.key:
155
+ self.key = self.id
156
+ if not self.root_id:
157
+ self.root_id = self.id
158
+ if not self.function_name:
159
+ self.function_name = self.function
160
+
161
+ def _record_transition(
162
+ self,
163
+ state: JobStatus,
164
+ message: str | None = None,
165
+ worker_id: str | None = None,
166
+ ) -> None:
167
+ """Record a state transition to history."""
168
+ self.state_history.append(
169
+ StateTransition(
170
+ state=state.value,
171
+ timestamp=_utc_now(),
172
+ message=message,
173
+ worker_id=worker_id,
174
+ )
175
+ )
176
+
177
+ def to_dict(self) -> dict[str, Any]:
178
+ """Serialize job to dictionary."""
179
+ return {
180
+ "id": self.id,
181
+ "key": self.key,
182
+ "function": self.function,
183
+ "function_name": self.function_name,
184
+ "kwargs": self.kwargs,
185
+ "status": self.status.value,
186
+ "scheduled_at": self.scheduled_at.isoformat(),
187
+ "started_at": self.started_at.isoformat() if self.started_at else None,
188
+ "completed_at": self.completed_at.isoformat()
189
+ if self.completed_at
190
+ else None,
191
+ "timeout": self.timeout,
192
+ "lease_time": self.lease_time,
193
+ "attempts": self.attempts,
194
+ "max_retries": self.max_retries,
195
+ "retry_delay": self.retry_delay,
196
+ "retry_backoff": self.retry_backoff,
197
+ "worker_id": self.worker_id,
198
+ "parent_id": self.parent_id,
199
+ "root_id": self.root_id,
200
+ "progress": self.progress,
201
+ "metadata": self.metadata,
202
+ "result": self.result,
203
+ "error": self.error,
204
+ "error_traceback": self.error_traceback,
205
+ "schedule": self.schedule,
206
+ "state_history": [t.to_dict() for t in self.state_history],
207
+ }
208
+
209
+ @classmethod
210
+ def from_dict(cls, data: dict[str, Any]) -> Job:
211
+ """Deserialize job from dictionary."""
212
+ return cls(
213
+ id=data["id"],
214
+ key=data["key"],
215
+ function=data["function"],
216
+ function_name=data.get("function_name", data["function"]),
217
+ kwargs=data.get("kwargs", {}),
218
+ status=JobStatus(data["status"]),
219
+ scheduled_at=datetime.fromisoformat(data["scheduled_at"]),
220
+ started_at=(
221
+ datetime.fromisoformat(data["started_at"])
222
+ if data.get("started_at")
223
+ else None
224
+ ),
225
+ completed_at=(
226
+ datetime.fromisoformat(data["completed_at"])
227
+ if data.get("completed_at")
228
+ else None
229
+ ),
230
+ timeout=data.get("timeout"),
231
+ lease_time=data.get("lease_time"),
232
+ attempts=data.get("attempts", 0),
233
+ max_retries=data.get("max_retries", 0),
234
+ retry_delay=data.get("retry_delay", 1.0),
235
+ retry_backoff=data.get("retry_backoff", 2.0),
236
+ worker_id=data.get("worker_id"),
237
+ parent_id=data.get("parent_id"),
238
+ root_id=data.get("root_id", data["id"]),
239
+ progress=data.get("progress", 0.0),
240
+ metadata=data.get("metadata", {}),
241
+ result=data.get("result"),
242
+ error=data.get("error"),
243
+ error_traceback=data.get("error_traceback"),
244
+ schedule=data.get("schedule"),
245
+ state_history=[
246
+ StateTransition.from_dict(t) for t in data.get("state_history", [])
247
+ ],
248
+ )
249
+
250
+ def to_json(self) -> str:
251
+ """Serialize job to JSON string."""
252
+ return json.dumps(self.to_dict())
253
+
254
+ @classmethod
255
+ def from_json(cls, json_str: str) -> Job:
256
+ """Deserialize job from JSON string."""
257
+ return cls.from_dict(json.loads(json_str))
258
+
259
+ def calculate_retry_delay(self) -> float:
260
+ """Calculate delay before next retry using exponential backoff."""
261
+ return self.retry_delay * (self.retry_backoff ** (self.attempts - 1))
262
+
263
+ def can_retry(self) -> bool:
264
+ """Check if job can be retried."""
265
+ return self.attempts <= self.max_retries
266
+
267
+ @property
268
+ def duration_ms(self) -> int | None:
269
+ """Calculate job duration in milliseconds."""
270
+ if self.started_at and self.completed_at:
271
+ delta = self.completed_at - self.started_at
272
+ return int(delta.total_seconds() * 1000)
273
+ return None
274
+
275
+ def mark_queued(self, message: str | None = None) -> None:
276
+ """Mark job as queued."""
277
+ self.status = JobStatus.QUEUED
278
+ self._record_transition(JobStatus.QUEUED, message or "Job enqueued")
279
+
280
+ def mark_started(self, worker_id: str) -> None:
281
+ """Mark job as started."""
282
+ self.status = JobStatus.ACTIVE
283
+ self.started_at = _utc_now()
284
+ self.worker_id = worker_id
285
+ self.attempts += 1
286
+ self._record_transition(
287
+ JobStatus.ACTIVE,
288
+ f"Started (attempt {self.attempts})",
289
+ worker_id=worker_id,
290
+ )
291
+
292
+ def mark_complete(self, result: Any = None) -> None:
293
+ """Mark job as complete."""
294
+ self.status = JobStatus.COMPLETE
295
+ self.completed_at = _utc_now()
296
+ self.result = result
297
+ self.progress = 1.0
298
+ self._record_transition(JobStatus.COMPLETE, "Completed successfully")
299
+
300
+ def mark_failed(self, error: str, traceback: str | None = None) -> None:
301
+ """Mark job as failed."""
302
+ self.status = JobStatus.FAILED
303
+ self.completed_at = _utc_now()
304
+ self.error = error
305
+ self.error_traceback = traceback
306
+ self._record_transition(JobStatus.FAILED, f"Failed: {error[:100]}")
307
+
308
+ def mark_cancelled(self) -> None:
309
+ """Mark job as cancelled."""
310
+ self.status = JobStatus.CANCELLED
311
+ self.completed_at = _utc_now()
312
+ self._record_transition(JobStatus.CANCELLED, "Cancelled")
313
+
314
+ def mark_retrying(self, delay: float) -> None:
315
+ """Mark job as retrying."""
316
+ self.status = JobStatus.RETRYING
317
+ self._record_transition(
318
+ JobStatus.RETRYING,
319
+ f"Scheduled for retry in {delay:.1f}s (attempt {self.attempts + 1})",
320
+ )
321
+
322
+ @property
323
+ def is_cron(self) -> bool:
324
+ """Check if this is a recurring cron job.
325
+
326
+ A job is a cron if it has a schedule AND was marked as cron in metadata.
327
+ This distinguishes cron jobs from regular delayed jobs.
328
+ """
329
+ if self.schedule is None:
330
+ return False
331
+ # Check metadata for explicit cron marker (set by seed_cron/reschedule_cron)
332
+ if self.metadata and self.metadata.get("cron"):
333
+ return True
334
+ # Fallback: if schedule is set, treat as cron for backward compatibility
335
+ return True
@@ -0,0 +1,88 @@
1
+ """Pattern matching utilities for event names.
2
+
3
+ This module provides wildcard pattern matching for event routing.
4
+ Both EventRouter and Registry use this shared implementation.
5
+ """
6
+
7
+ from __future__ import annotations
8
+
9
+ import re
10
+
11
+
12
+ def _segment_regex(pattern: str) -> str:
13
+ """Compile one dot-delimited segment pattern into a regex fragment."""
14
+ out: list[str] = []
15
+ for char in pattern:
16
+ if char == "*":
17
+ # Single-segment wildcard: any chars except dot.
18
+ out.append("[^.]+")
19
+ else:
20
+ out.append(re.escape(char))
21
+ return "".join(out)
22
+
23
+
24
+ def matches_event_pattern(event: str, pattern: str) -> bool:
25
+ """
26
+ Check if an event name matches a pattern.
27
+
28
+ Supports glob-style wildcards:
29
+ - Exact match: "user.signup" matches "user.signup"
30
+ - Single wildcard: "user.*" matches "user.signup" but not "user.signup.email"
31
+ - Multi-wildcard: "user.**" matches "user.signup" and "user.signup.email"
32
+
33
+ Args:
34
+ event: Event name to check (e.g., "user.signup")
35
+ pattern: Pattern to match against (e.g., "user.*")
36
+
37
+ Returns:
38
+ True if the event matches the pattern
39
+
40
+ Examples:
41
+ >>> matches_event_pattern("user.signup", "user.signup")
42
+ True
43
+ >>> matches_event_pattern("user.signup", "user.*")
44
+ True
45
+ >>> matches_event_pattern("user.signup.email", "user.*")
46
+ False
47
+ >>> matches_event_pattern("user.signup.email", "user.**")
48
+ True
49
+ >>> matches_event_pattern("order.item.added", "order.*.added")
50
+ True
51
+ """
52
+ if pattern == "**":
53
+ return True
54
+
55
+ event_parts = event.split(".")
56
+ pattern_parts = pattern.split(".")
57
+
58
+ i = 0
59
+ j = 0
60
+ while i < len(pattern_parts) and j < len(event_parts):
61
+ segment = pattern_parts[i]
62
+ if segment == "**":
63
+ # Multi-segment wildcard: consume any remaining segments.
64
+ return True
65
+ if not re.fullmatch(_segment_regex(segment), event_parts[j]):
66
+ return False
67
+ i += 1
68
+ j += 1
69
+
70
+ if i == len(pattern_parts) and j == len(event_parts):
71
+ return True
72
+ if i == len(pattern_parts) - 1 and pattern_parts[i] == "**":
73
+ return True
74
+ return False
75
+
76
+
77
+ def get_matching_patterns(event: str, patterns: list[str]) -> list[str]:
78
+ """
79
+ Find all patterns that match an event name.
80
+
81
+ Args:
82
+ event: Event name to match
83
+ patterns: List of patterns to check
84
+
85
+ Returns:
86
+ List of matching patterns
87
+ """
88
+ return [p for p in patterns if matches_event_pattern(event, p)]
@@ -0,0 +1,366 @@
1
+ """Shared API schemas for Conduit."""
2
+
3
+ from datetime import datetime
4
+ from enum import StrEnum
5
+ from typing import Any, Literal
6
+
7
+ from pydantic import BaseModel, ConfigDict
8
+
9
+ from shared.artifacts import ArtifactType
10
+
11
+ # =============================================================================
12
+ # Common Types
13
+ # =============================================================================
14
+
15
+
16
+ class FunctionType(StrEnum):
17
+ TASK = "task"
18
+ CRON = "cron"
19
+ EVENT = "event"
20
+
21
+
22
+ # =============================================================================
23
+ # Job History Schemas
24
+ # =============================================================================
25
+
26
+
27
+ class JobHistoryResponse(BaseModel):
28
+ """Single job history response."""
29
+
30
+ id: str
31
+ function: str
32
+ function_name: str
33
+ status: str
34
+ created_at: datetime | None = None
35
+ scheduled_at: datetime | None = None
36
+ started_at: datetime | None = None
37
+ completed_at: datetime | None = None
38
+ attempts: int = 1
39
+ max_retries: int = 0
40
+ timeout: float | None = None
41
+ worker_id: str | None = None
42
+ parent_id: str | None = None
43
+ root_id: str
44
+ progress: float = 0.0
45
+ kwargs: dict[str, Any] = {}
46
+ metadata: dict[str, Any] = {}
47
+ result: Any = None
48
+ error: str | None = None
49
+ duration_ms: float | None = None
50
+
51
+ model_config = ConfigDict(from_attributes=True)
52
+
53
+
54
+ class JobListResponse(BaseModel):
55
+ """List of jobs response."""
56
+
57
+ jobs: list[JobHistoryResponse]
58
+ total: int
59
+ has_more: bool = False
60
+
61
+
62
+ class JobStatsResponse(BaseModel):
63
+ """Job statistics response."""
64
+
65
+ total: int
66
+ success_count: int
67
+ failure_count: int
68
+ cancelled_count: int
69
+ success_rate: float
70
+ avg_duration_ms: float | None = None
71
+
72
+
73
+ class JobTrendHour(BaseModel):
74
+ """Hourly job counts by status."""
75
+
76
+ hour: str # ISO format: "2024-01-15T14:00:00Z"
77
+ complete: int = 0
78
+ failed: int = 0
79
+ retrying: int = 0
80
+ active: int = 0
81
+
82
+
83
+ class JobTrendsResponse(BaseModel):
84
+ """Job trends response."""
85
+
86
+ hourly: list[JobTrendHour]
87
+
88
+
89
+ # =============================================================================
90
+ # Artifact Schemas
91
+ # =============================================================================
92
+
93
+
94
+ class ArtifactResponse(BaseModel):
95
+ """Single artifact response."""
96
+
97
+ id: int
98
+ job_id: str
99
+ name: str
100
+ type: str # text, json, image/png, image/jpeg, file/pdf, etc.
101
+ size_bytes: int | None = None
102
+ data: Any = None
103
+ path: str | None = None
104
+ created_at: datetime
105
+
106
+ model_config = ConfigDict(from_attributes=True)
107
+
108
+
109
+ class ArtifactListResponse(BaseModel):
110
+ """List of artifacts response."""
111
+
112
+ artifacts: list[ArtifactResponse]
113
+ total: int
114
+
115
+
116
+ class CreateArtifactRequest(BaseModel):
117
+ """Request to create an artifact."""
118
+
119
+ name: str
120
+ type: ArtifactType
121
+ data: Any = None
122
+
123
+
124
+ class ArtifactQueuedResponse(BaseModel):
125
+ """Artifact accepted but queued until the job row is available."""
126
+
127
+ status: Literal["queued"] = "queued"
128
+ job_id: str
129
+ pending_id: int
130
+
131
+
132
+ ArtifactCreateResponse = ArtifactResponse | ArtifactQueuedResponse
133
+
134
+
135
+ class ErrorResponse(BaseModel):
136
+ """Standard API error payload."""
137
+
138
+ detail: str
139
+
140
+
141
+ # =============================================================================
142
+ # Run/Job Schemas
143
+ # =============================================================================
144
+
145
+
146
+ class Run(BaseModel):
147
+ """Run model for job executions."""
148
+
149
+ id: str
150
+ function: str
151
+ function_name: str
152
+ status: str
153
+ started_at: str | None = None
154
+ completed_at: str | None = None
155
+ duration_ms: float | None = None
156
+ error: str | None = None
157
+ worker_id: str | None = None
158
+ attempts: int = 1
159
+ progress: float = 0.0
160
+
161
+
162
+ # =============================================================================
163
+ # Worker Schemas
164
+ # =============================================================================
165
+
166
+
167
+ class WorkerInstance(BaseModel):
168
+ """Worker instance model (registered via Redis heartbeat)."""
169
+
170
+ id: str
171
+ worker_name: str
172
+ started_at: str
173
+ last_heartbeat: str
174
+ functions: list[str] = []
175
+ function_names: dict[str, str] = {}
176
+ concurrency: int = 1
177
+ active_jobs: int = 0
178
+ jobs_processed: int = 0
179
+ jobs_failed: int = 0
180
+ hostname: str | None = None
181
+
182
+
183
+ class WorkerInfo(BaseModel):
184
+ """Worker-level aggregate info (grouped by worker name)."""
185
+
186
+ name: str
187
+ active: bool = False
188
+ instance_count: int = 0
189
+ instances: list[WorkerInstance] = []
190
+ functions: list[str] = []
191
+ function_names: dict[str, str] = {}
192
+ concurrency: int = 0
193
+
194
+
195
+ class WorkersListResponse(BaseModel):
196
+ """Workers list response."""
197
+
198
+ workers: list[WorkerInfo]
199
+ total: int
200
+
201
+
202
+ class WorkerStats(BaseModel):
203
+ """Worker statistics."""
204
+
205
+ total: int
206
+
207
+
208
+ # =============================================================================
209
+ # API Instance Schemas
210
+ # =============================================================================
211
+
212
+
213
+ class ApiInstance(BaseModel):
214
+ """API instance model (registered via Redis heartbeat)."""
215
+
216
+ id: str
217
+ api_name: str
218
+ started_at: str
219
+ last_heartbeat: str
220
+ host: str = "0.0.0.0"
221
+ port: int = 8000
222
+ endpoints: list[str] = []
223
+ hostname: str | None = None
224
+
225
+
226
+ HttpMethod = Literal["GET", "POST", "PUT", "PATCH", "DELETE"]
227
+
228
+
229
+ class ApiEndpoint(BaseModel):
230
+ """API endpoint info."""
231
+
232
+ method: HttpMethod
233
+ path: str
234
+ requests_24h: int = 0
235
+ avg_latency_ms: float = 0.0
236
+ p50_latency_ms: float = 0.0
237
+ p95_latency_ms: float = 0.0
238
+ p99_latency_ms: float = 0.0
239
+ error_rate: float = 0.0
240
+ last_request_at: str | None = None
241
+
242
+
243
+ class ApiInfo(BaseModel):
244
+ """API-level aggregate info (grouped by API name)."""
245
+
246
+ name: str
247
+ active: bool = False
248
+ instance_count: int = 0
249
+ instances: list[ApiInstance] = []
250
+ endpoint_count: int = 0
251
+ requests_24h: int = 0
252
+ avg_latency_ms: float = 0.0
253
+ error_rate: float = 0.0
254
+ requests_per_min: float = 0.0
255
+
256
+
257
+ class ApisListResponse(BaseModel):
258
+ """APIs list response."""
259
+
260
+ apis: list[ApiInfo]
261
+ total: int
262
+
263
+
264
+ class EndpointsListResponse(BaseModel):
265
+ """Endpoints list response (per-endpoint detail)."""
266
+
267
+ endpoints: list[ApiEndpoint]
268
+ total: int
269
+
270
+
271
+ class ApiDetailResponse(ApiEndpoint):
272
+ """API endpoint detail response."""
273
+
274
+ hourly_stats: list = []
275
+ recent_errors: list = []
276
+
277
+
278
+ class ApiTrendHour(BaseModel):
279
+ """Hourly API response counts by status category."""
280
+
281
+ hour: str
282
+ success_2xx: int = 0
283
+ client_4xx: int = 0
284
+ server_5xx: int = 0
285
+
286
+
287
+ class ApiTrendsResponse(BaseModel):
288
+ """API trends response."""
289
+
290
+ hourly: list[ApiTrendHour]
291
+
292
+
293
+ # =============================================================================
294
+ # Function Schemas
295
+ # =============================================================================
296
+
297
+
298
+ class FunctionInfo(BaseModel):
299
+ """Function info."""
300
+
301
+ key: str
302
+ name: str
303
+ type: FunctionType
304
+ active: bool = False
305
+ # Task config
306
+ timeout: int | None = None
307
+ max_retries: int | None = None
308
+ retry_delay: int | None = None
309
+ # Cron config
310
+ schedule: str | None = None
311
+ next_run_at: str | None = None
312
+ # Event config
313
+ pattern: str | None = None
314
+ # Workers currently handling this function
315
+ workers: list[str] = []
316
+ # Stats
317
+ runs_24h: int = 0
318
+ success_rate: float = 100.0
319
+ avg_duration_ms: float = 0.0
320
+ p95_duration_ms: float | None = None
321
+ last_run_at: str | None = None
322
+ last_run_status: str | None = None
323
+
324
+
325
+ class FunctionsListResponse(BaseModel):
326
+ """Functions list response."""
327
+
328
+ functions: list[FunctionInfo]
329
+ total: int
330
+
331
+
332
+ class FunctionDetailResponse(FunctionInfo):
333
+ """Function detail response."""
334
+
335
+ recent_runs: list[Run] = []
336
+
337
+
338
+ # =============================================================================
339
+ # Dashboard Schemas
340
+ # =============================================================================
341
+
342
+
343
+ class RunStats(BaseModel):
344
+ """Run statistics."""
345
+
346
+ total_24h: int
347
+ success_rate: float
348
+ active_count: int
349
+
350
+
351
+ class ApiStats(BaseModel):
352
+ """API statistics."""
353
+
354
+ requests_24h: int
355
+ avg_latency_ms: float
356
+ error_rate: float
357
+
358
+
359
+ class DashboardStats(BaseModel):
360
+ """Dashboard stats response."""
361
+
362
+ runs: RunStats
363
+ workers: WorkerStats
364
+ apis: ApiStats
365
+ recent_runs: list[Run]
366
+ recent_failures: list[Run]
@@ -0,0 +1,19 @@
1
+ """Worker constants shared between SDK and server."""
2
+
3
+ # Key prefix for worker instance heartbeats
4
+ WORKER_KEY_PREFIX = "conduit:workers"
5
+
6
+ # Key prefix for persistent worker definitions (keyed by worker name)
7
+ WORKER_DEF_PREFIX = "conduit:worker_defs"
8
+
9
+ # Key prefix for function definitions
10
+ FUNCTION_KEY_PREFIX = "conduit:functions"
11
+
12
+ # Worker instance TTL - must heartbeat within this time
13
+ # With 10s heartbeat interval, this gives 3 missed heartbeats before expiry
14
+ WORKER_TTL = 30
15
+
16
+ # Worker/function definition TTL - refreshed each time a worker starts
17
+ # Matches API registry TTL so stale entries self-clean after 30 days
18
+ WORKER_DEF_TTL = 2_592_000 # 30 days
19
+ FUNCTION_DEF_TTL = 2_592_000 # 30 days