pyoco 0.3.0__py3-none-any.whl → 0.5.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
pyoco/dsl/syntax.py CHANGED
@@ -1,122 +1,268 @@
1
- from typing import Callable, Union, List, Tuple
2
- from ..core.models import Task, Flow
3
- from ..core import engine
1
+ from __future__ import annotations
4
2
 
5
- # Global context
6
- _active_flow: Flow = None
3
+ from dataclasses import dataclass
4
+ from typing import Callable, Iterable, List, Sequence, Tuple, Union
5
+
6
+ from ..core.models import Task
7
+ from .expressions import Expression, ensure_expression
8
+ from .nodes import (
9
+ CaseNode,
10
+ DSLNode,
11
+ ForEachNode,
12
+ RepeatNode,
13
+ SubFlowNode,
14
+ SwitchNode,
15
+ TaskNode,
16
+ UntilNode,
17
+ DEFAULT_CASE_VALUE,
18
+ )
19
+ RESERVED_CTX_KEYS = {"params", "results", "scratch", "loop", "loops", "env", "artifacts"}
20
+
21
+
22
+ class FlowFragment:
23
+ """
24
+ Represents a fragment of a flow (sequence of DSL nodes). Every DSL
25
+ operator returns a FlowFragment so sub-flows can be composed before
26
+ being attached to a Flow.
27
+ """
28
+
29
+ __slots__ = ("_nodes",)
30
+
31
+ def __init__(self, nodes: Sequence[DSLNode]):
32
+ if not isinstance(nodes, (list, tuple)):
33
+ raise TypeError("FlowFragment expects a list/tuple of nodes.")
34
+ self._nodes: Tuple[DSLNode, ...] = tuple(nodes)
35
+
36
+ # Sequence composition -------------------------------------------------
37
+ def __rshift__(self, other: Union["FlowFragment", "TaskWrapper", Task]) -> "FlowFragment":
38
+ right = ensure_fragment(other)
39
+ self._link_to(right)
40
+ return FlowFragment(self._nodes + right._nodes)
41
+
42
+ # Loop support ---------------------------------------------------------
43
+ def __getitem__(self, selector: Union[int, str, Expression]) -> "FlowFragment":
44
+ """
45
+ Implements [] operator for repeat / for-each loops.
46
+ """
47
+
48
+ body = self.to_subflow()
49
+ if isinstance(selector, int):
50
+ if selector < 0:
51
+ raise ValueError("Repeat count must be non-negative.")
52
+ node = RepeatNode(body=body, count=selector)
53
+ return FlowFragment([node])
54
+
55
+ if isinstance(selector, Expression):
56
+ node = RepeatNode(body=body, count=selector)
57
+ return FlowFragment([node])
58
+
59
+ if isinstance(selector, str):
60
+ source, alias = parse_foreach_selector(selector)
61
+ node = ForEachNode(body=body, source=ensure_expression(source), alias=alias)
62
+ return FlowFragment([node])
63
+
64
+ raise TypeError(f"Unsupported loop selector: {selector!r}")
65
+
66
+ def __mod__(self, value: Union[str, Expression, Tuple[Union[str, Expression], int]]) -> "FlowFragment":
67
+ """
68
+ Implements the % operator for until loops.
69
+ """
70
+
71
+ max_iter = None
72
+ expr_value: Union[str, Expression]
73
+
74
+ if isinstance(value, tuple):
75
+ if len(value) != 2:
76
+ raise ValueError("Until tuple selector must be (expression, max_iter).")
77
+ expr_value, max_iter = value
78
+ else:
79
+ expr_value = value
80
+
81
+ node = UntilNode(
82
+ body=self.to_subflow(),
83
+ condition=ensure_expression(expr_value),
84
+ max_iter=max_iter,
85
+ )
86
+ return FlowFragment([node])
87
+
88
+ # Switch/case ----------------------------------------------------------
89
+ def __rrshift__(self, other: Union[str, int, float, bool]) -> CaseNode:
90
+ """
91
+ Enables `"X" >> fragment` syntax by reversing the operands.
92
+ """
93
+
94
+ if isinstance(other, str) and other == "*":
95
+ value = DEFAULT_CASE_VALUE
96
+ else:
97
+ value = other
98
+ return CaseNode(value=value, target=self.to_subflow())
99
+
100
+ # Helpers --------------------------------------------------------------
101
+ def to_subflow(self) -> SubFlowNode:
102
+ return SubFlowNode(list(self._nodes))
103
+
104
+ def task_nodes(self) -> List[Task]:
105
+ tasks: List[Task] = []
106
+ for node in self._nodes:
107
+ tasks.extend(_collect_tasks(node))
108
+ return tasks
109
+
110
+ def _first_task(self) -> Task | None:
111
+ for node in self._nodes:
112
+ tasks = _collect_tasks(node)
113
+ if tasks:
114
+ return tasks[0]
115
+ return None
116
+
117
+ def _last_task(self) -> Task | None:
118
+ for node in reversed(self._nodes):
119
+ tasks = _collect_tasks(node)
120
+ if tasks:
121
+ return tasks[-1]
122
+ return None
123
+
124
+ def _link_to(self, other: "FlowFragment"):
125
+ left_task = self._last_task()
126
+ right_task = other._first_task()
127
+ if left_task and right_task and left_task is not right_task:
128
+ right_task.dependencies.add(left_task)
129
+ left_task.dependents.add(right_task)
130
+
131
+ def has_control_flow(self) -> bool:
132
+ return any(not isinstance(node, TaskNode) for node in self._nodes)
133
+
134
+
135
+ class TaskWrapper(FlowFragment):
136
+ """
137
+ Wraps a Task to handle DSL operators, while exposing the underlying
138
+ task for legacy access (e.g., `task.task.inputs = ...`).
139
+ """
140
+
141
+ __slots__ = ("task",)
142
+
143
+ def __init__(self, task: Task):
144
+ self.task = task
145
+ super().__init__([TaskNode(task)])
146
+
147
+ def __call__(self, *args, **kwargs) -> "TaskWrapper":
148
+ return self
149
+
150
+ def __and__(self, other):
151
+ return Parallel([self, other])
152
+
153
+ def __or__(self, other):
154
+ return Branch([self, other])
7
155
 
8
- def task(func: Callable) -> Task:
9
- t = Task(func=func, name=func.__name__)
10
- return t
11
156
 
12
157
  class Branch(list):
13
- """Represents a branch of tasks (OR-split/join logic placeholder)."""
158
+ """Represents `A | B` OR-branches (legacy)."""
159
+
14
160
  def __rshift__(self, other):
15
- # (A | B) >> C
16
- # C depends on A and B.
17
- # AND C.trigger_policy = "ANY"
18
-
19
- targets = []
20
- if hasattr(other, 'task'):
21
- targets = [other.task]
22
- elif isinstance(other, (list, tuple)):
23
- for item in other:
24
- if hasattr(item, 'task'):
25
- targets.append(item.task)
26
-
161
+ targets = _collect_target_tasks(other)
27
162
  for target in targets:
28
163
  target.trigger_policy = "ANY"
29
164
  for source in self:
30
- if hasattr(source, 'task'):
165
+ if hasattr(source, "task"):
31
166
  target.dependencies.add(source.task)
32
167
  source.task.dependents.add(target)
33
-
34
168
  return other
35
169
 
170
+
36
171
  class Parallel(list):
37
- """Represents a parallel group of tasks (AND-split/join)."""
172
+ """Represents `A & B` parallel branches (legacy)."""
173
+
38
174
  def __rshift__(self, other):
39
- # (A & B) >> C
40
- # C depends on A AND B.
41
-
42
- targets = []
43
- if hasattr(other, 'task'):
44
- targets = [other.task]
45
- elif isinstance(other, (list, tuple)):
46
- for item in other:
47
- if hasattr(item, 'task'):
48
- targets.append(item.task)
49
-
175
+ targets = _collect_target_tasks(other)
50
176
  for target in targets:
51
177
  for source in self:
52
- if hasattr(source, 'task'):
178
+ if hasattr(source, "task"):
53
179
  target.dependencies.add(source.task)
54
180
  source.task.dependents.add(target)
55
-
56
181
  return other
57
182
 
58
- class TaskWrapper:
59
- """
60
- Wraps a Task to handle DSL operators and registration.
61
- """
62
- def __init__(self, task: Task):
63
- self.task = task
64
-
65
- def __call__(self, *args, **kwargs):
66
- # In this new spec, calling a task might not be strictly necessary for registration
67
- # if we assume tasks are added to flow explicitly or via >>
68
- # But let's keep the pattern: calling it returns a wrapper that can be chained
69
- # We might need to store args/kwargs if we want to support them
70
- return self
71
183
 
72
- def __rshift__(self, other):
73
- # self >> other
74
- if isinstance(other, TaskWrapper):
75
- other.task.dependencies.add(self.task)
76
- self.task.dependents.add(other.task)
77
- return other
78
- elif isinstance(other, (list, tuple)):
79
- # self >> (A & B) or self >> (A | B)
80
- # If it's a Branch (from |), does it imply something different?
81
- # Spec says: "Update Flow to handle Branch >> Task (set trigger_policy=ANY)"
82
- # But here we are doing Task >> Branch.
83
- # Task >> (A | B) means Task triggers both A and B?
84
- # Usually >> means "follows".
85
- # A >> (B | C) -> A triggers B and C?
86
- # Or does it mean B and C depend on A? Yes.
87
- # The difference between & and | is usually how they JOIN later, or how they are triggered?
88
- # In Airflow, >> [A, B] means A and B depend on upstream.
89
- # If we have (A | B) >> C, then C depends on A OR B.
90
- # So if 'other' is a Branch, we just add dependencies as usual.
91
- # The "OR" logic is relevant when 'other' connects to downstream.
92
-
93
- for item in other:
94
- if isinstance(item, TaskWrapper):
95
- item.task.dependencies.add(self.task)
96
- self.task.dependents.add(item.task)
97
- return other
98
- return other
184
+ def switch(expression: Union[str, Expression]) -> "SwitchBuilder":
185
+ return SwitchBuilder(expression=ensure_expression(expression))
99
186
 
100
- def __and__(self, other):
101
- # self & other (Parallel)
102
- return Parallel([self, other])
103
187
 
104
- def __or__(self, other):
105
- # self | other (Branch)
106
- # Return a Branch object containing both
107
- return Branch([self, other])
188
+ @dataclass
189
+ class SwitchBuilder:
190
+ expression: Expression
191
+
192
+ def __getitem__(self, cases: Union[CaseNode, Sequence[CaseNode]]) -> FlowFragment:
193
+ if isinstance(cases, CaseNode):
194
+ case_list = [cases]
195
+ elif isinstance(cases, Sequence):
196
+ case_list = list(cases)
197
+ else:
198
+ raise TypeError("switch()[...] expects CaseNode(s)")
199
+
200
+ if not case_list:
201
+ raise ValueError("switch() requires at least one case.")
202
+ return FlowFragment([SwitchNode(expression=self.expression, cases=case_list)])
203
+
204
+
205
+ # Helper utilities ---------------------------------------------------------
206
+ def ensure_fragment(value: Union[FlowFragment, TaskWrapper, Task]) -> FlowFragment:
207
+ if isinstance(value, FlowFragment):
208
+ return value
209
+ if isinstance(value, TaskWrapper):
210
+ return value
211
+ if hasattr(value, "task"):
212
+ return FlowFragment([TaskNode(value.task)])
213
+ if isinstance(value, Task):
214
+ return TaskWrapper(value)
215
+ raise TypeError(f"Cannot treat {value!r} as a flow fragment.")
216
+
217
+
218
+ def parse_foreach_selector(selector: str) -> Tuple[str, Union[str, None]]:
219
+ token = selector.strip()
220
+ alias = None
221
+ if " as " in token:
222
+ expr, alias = token.split(" as ", 1)
223
+ token = expr.strip()
224
+ alias = alias.strip()
225
+ if not alias or not alias.isidentifier() or alias in RESERVED_CTX_KEYS:
226
+ raise ValueError(f"Invalid foreach alias '{alias}'.")
227
+
228
+ return token, alias
229
+
230
+
231
+ def _collect_tasks(obj) -> List[Task]:
232
+ if isinstance(obj, TaskNode):
233
+ return [obj.task]
234
+ if isinstance(obj, SubFlowNode):
235
+ tasks: List[Task] = []
236
+ for step in obj.steps:
237
+ tasks.extend(_collect_tasks(step))
238
+ return tasks
239
+ if isinstance(obj, RepeatNode):
240
+ return _collect_tasks(obj.body)
241
+ if isinstance(obj, ForEachNode):
242
+ return _collect_tasks(obj.body)
243
+ if isinstance(obj, UntilNode):
244
+ return _collect_tasks(obj.body)
245
+ if isinstance(obj, SwitchNode):
246
+ tasks: List[Task] = []
247
+ for case in obj.cases:
248
+ tasks.extend(_collect_tasks(case.target))
249
+ return tasks
250
+ return []
251
+
252
+
253
+ def _collect_target_tasks(other) -> List[Task]:
254
+ targets = []
255
+ if hasattr(other, "task"):
256
+ targets = [other.task]
257
+ elif isinstance(other, (list, tuple)):
258
+ for item in other:
259
+ if hasattr(item, "task"):
260
+ targets.append(item.task)
261
+ return targets
108
262
 
109
- # We need to adapt the DSL to match the spec:
110
- # @task
111
- # def A(ctx, x:int)->int: ...
112
- # flow = Flow() >> A >> (B & C)
113
263
 
114
- # So A, B, C must be usable in the expression.
115
- # The @task decorator should return something that supports >>, &, |
264
+ def task(func: Callable) -> TaskWrapper:
265
+ return TaskWrapper(Task(func=func, name=func.__name__))
116
266
 
117
- def task_decorator(func: Callable):
118
- t = Task(func=func, name=func.__name__)
119
- return TaskWrapper(t)
120
267
 
121
- # Re-export as task
122
- task = task_decorator
268
+ __all__ = ["task", "FlowFragment", "switch", "TaskWrapper", "Branch", "Parallel"]
pyoco/dsl/validator.py ADDED
@@ -0,0 +1,104 @@
1
+ from __future__ import annotations
2
+
3
+ from dataclasses import dataclass, field
4
+ from typing import List
5
+
6
+ from ..core.models import Flow
7
+ from .nodes import (
8
+ CaseNode,
9
+ ForEachNode,
10
+ RepeatNode,
11
+ SubFlowNode,
12
+ SwitchNode,
13
+ TaskNode,
14
+ UntilNode,
15
+ DSLNode,
16
+ DEFAULT_CASE_VALUE,
17
+ )
18
+
19
+
20
+ @dataclass
21
+ class ValidationReport:
22
+ warnings: List[str] = field(default_factory=list)
23
+ errors: List[str] = field(default_factory=list)
24
+
25
+ @property
26
+ def status(self) -> str:
27
+ if self.errors:
28
+ return "error"
29
+ if self.warnings:
30
+ return "warning"
31
+ return "ok"
32
+
33
+ def to_dict(self) -> dict:
34
+ return {
35
+ "status": self.status,
36
+ "warnings": list(self.warnings),
37
+ "errors": list(self.errors),
38
+ }
39
+
40
+
41
+ class FlowValidator:
42
+ """
43
+ Traverses a Flow's SubFlow definition and produces warnings/errors for
44
+ problematic control-flow constructs (unbounded loops, duplicate cases, etc.).
45
+ """
46
+
47
+ def __init__(self, flow: Flow):
48
+ self.flow = flow
49
+ self.report = ValidationReport()
50
+
51
+ def validate(self) -> ValidationReport:
52
+ program = self.flow.build_program()
53
+ self._visit_subflow(program, "flow")
54
+ return self.report
55
+
56
+ # Traversal helpers --------------------------------------------------
57
+ def _visit_subflow(self, subflow: SubFlowNode, path: str):
58
+ for idx, node in enumerate(subflow.steps):
59
+ self._visit_node(node, f"{path}.step[{idx}]")
60
+
61
+ def _visit_node(self, node: DSLNode, path: str):
62
+ if isinstance(node, TaskNode):
63
+ return
64
+ if isinstance(node, RepeatNode):
65
+ self._visit_subflow(node.body, f"{path}.repeat")
66
+ elif isinstance(node, ForEachNode):
67
+ self._visit_subflow(node.body, f"{path}.foreach")
68
+ elif isinstance(node, UntilNode):
69
+ self._validate_until(node, path)
70
+ self._visit_subflow(node.body, f"{path}.until")
71
+ elif isinstance(node, SwitchNode):
72
+ self._validate_switch(node, path)
73
+ elif isinstance(node, SubFlowNode):
74
+ self._visit_subflow(node, path)
75
+ else:
76
+ self.report.errors.append(f"{path}: Unknown node type {type(node).__name__}")
77
+
78
+ # Validators ---------------------------------------------------------
79
+ def _validate_until(self, node: UntilNode, path: str):
80
+ if node.max_iter is None:
81
+ self.report.warnings.append(f"{path}: Until loop missing max_iter (defaults to 1000).")
82
+
83
+ def _validate_switch(self, node: SwitchNode, path: str):
84
+ seen_values = set()
85
+ default_count = 0
86
+ for idx, case in enumerate(node.cases):
87
+ case_path = f"{path}.case[{idx}]"
88
+ if case.value == DEFAULT_CASE_VALUE:
89
+ default_count += 1
90
+ if default_count > 1:
91
+ self.report.errors.append(f"{case_path}: Multiple default (*) cases are not allowed.")
92
+ else:
93
+ try:
94
+ key = case.value
95
+ if key in seen_values:
96
+ self.report.errors.append(f"{case_path}: Duplicate switch value '{case.value}'.")
97
+ else:
98
+ seen_values.add(key)
99
+ except TypeError:
100
+ self.report.errors.append(f"{case_path}: Unhashable switch value '{case.value}'.")
101
+ self._visit_subflow(case.target, f"{case_path}.target")
102
+
103
+ if default_count == 0:
104
+ self.report.warnings.append(f"{path}: Switch has no default (*) case.")
pyoco/server/api.py CHANGED
@@ -1,37 +1,43 @@
1
- from fastapi import FastAPI, HTTPException
2
- from typing import List, Optional
1
+ from fastapi import FastAPI, HTTPException, Query
2
+ from fastapi.responses import PlainTextResponse
3
+ from typing import List, Optional, Dict, Any
3
4
  from .store import StateStore
4
5
  from .models import (
5
- RunSubmitRequest, RunResponse,
6
+ RunSubmitRequest, RunResponse,
6
7
  WorkerPollRequest, WorkerPollResponse,
7
8
  WorkerHeartbeatRequest, WorkerHeartbeatResponse
8
9
  )
9
- from ..core.models import RunContext, RunStatus
10
+ from ..core.models import RunStatus
11
+ from .metrics import metrics, metrics_content_type
10
12
 
11
13
  app = FastAPI(title="Pyoco Kanban Server")
12
14
  store = StateStore()
13
15
 
14
16
  @app.post("/runs", response_model=RunResponse)
15
- def submit_run(req: RunSubmitRequest):
17
+ async def submit_run(req: RunSubmitRequest):
16
18
  run_ctx = store.create_run(req.flow_name, req.params)
17
19
  return RunResponse(run_id=run_ctx.run_id, status=run_ctx.status)
18
20
 
19
- @app.get("/runs", response_model=List[RunContext])
20
- def list_runs(status: Optional[RunStatus] = None):
21
- runs = store.list_runs()
22
- if status:
23
- runs = [r for r in runs if r.status == status]
24
- return runs
21
+ @app.get("/runs")
22
+ async def list_runs(
23
+ status: Optional[str] = None,
24
+ flow: Optional[str] = None,
25
+ limit: Optional[int] = Query(default=None, ge=1, le=200),
26
+ ):
27
+ status_enum = _parse_status(status)
28
+ limit_value = limit if isinstance(limit, int) else None
29
+ runs = store.list_runs(status=status_enum, flow=flow, limit=limit_value)
30
+ return [store.export_run(r) for r in runs]
25
31
 
26
- @app.get("/runs/{run_id}", response_model=RunContext)
27
- def get_run(run_id: str):
32
+ @app.get("/runs/{run_id}")
33
+ async def get_run(run_id: str):
28
34
  run = store.get_run(run_id)
29
35
  if not run:
30
36
  raise HTTPException(status_code=404, detail="Run not found")
31
- return run
37
+ return store.export_run(run)
32
38
 
33
39
  @app.post("/runs/{run_id}/cancel")
34
- def cancel_run(run_id: str):
40
+ async def cancel_run(run_id: str):
35
41
  run = store.get_run(run_id)
36
42
  if not run:
37
43
  raise HTTPException(status_code=404, detail="Run not found")
@@ -39,7 +45,7 @@ def cancel_run(run_id: str):
39
45
  return {"status": "CANCELLING"}
40
46
 
41
47
  @app.post("/workers/poll", response_model=WorkerPollResponse)
42
- def poll_work(req: WorkerPollRequest):
48
+ async def poll_work(req: WorkerPollRequest):
43
49
  # In v0.3.0, we ignore worker_id and tags for simplicity
44
50
  run = store.dequeue()
45
51
  if run:
@@ -58,14 +64,49 @@ def poll_work(req: WorkerPollRequest):
58
64
  return WorkerPollResponse()
59
65
 
60
66
  @app.post("/runs/{run_id}/heartbeat", response_model=WorkerHeartbeatResponse)
61
- def heartbeat(run_id: str, req: WorkerHeartbeatRequest):
67
+ async def heartbeat(run_id: str, req: WorkerHeartbeatRequest):
62
68
  run = store.get_run(run_id)
63
69
  if not run:
64
70
  raise HTTPException(status_code=404, detail="Run not found")
65
71
 
66
- store.update_run(run_id, status=req.run_status, task_states=req.task_states)
72
+ store.update_run(
73
+ run_id,
74
+ status=req.run_status,
75
+ task_states=req.task_states,
76
+ task_records=req.task_records,
77
+ logs=req.logs
78
+ )
67
79
 
68
80
  # Check if cancellation was requested
69
81
  cancel_requested = (run.status == RunStatus.CANCELLING)
70
82
 
71
83
  return WorkerHeartbeatResponse(cancel_requested=cancel_requested)
84
+
85
+ @app.get("/runs/{run_id}/logs")
86
+ async def get_logs(run_id: str, task: Optional[str] = None, tail: Optional[int] = None):
87
+ run = store.get_run(run_id)
88
+ if not run:
89
+ raise HTTPException(status_code=404, detail="Run not found")
90
+ logs = run.logs
91
+ if task:
92
+ logs = [entry for entry in logs if entry["task"] == task]
93
+ if tail:
94
+ logs = logs[-tail:]
95
+ return {"run_status": run.status, "logs": logs}
96
+
97
+
98
+ @app.get("/metrics")
99
+ async def prometheus_metrics():
100
+ payload = metrics.render_latest()
101
+ return PlainTextResponse(payload, media_type=metrics_content_type())
102
+
103
+
104
+ def _parse_status(value: Optional[str]) -> Optional[RunStatus]:
105
+ if not value:
106
+ return None
107
+ if isinstance(value, RunStatus):
108
+ return value
109
+ try:
110
+ return RunStatus(value)
111
+ except ValueError:
112
+ raise HTTPException(status_code=400, detail=f"Invalid status '{value}'")
@@ -0,0 +1,113 @@
1
+ from __future__ import annotations
2
+
3
+ from typing import Optional
4
+
5
+ from prometheus_client import (
6
+ CollectorRegistry,
7
+ CONTENT_TYPE_LATEST,
8
+ Counter,
9
+ Gauge,
10
+ Histogram,
11
+ generate_latest,
12
+ )
13
+
14
+ from ..core.models import RunStatus
15
+
16
+
17
+ _DEFAULT_BUCKETS = (
18
+ 0.05,
19
+ 0.1,
20
+ 0.25,
21
+ 0.5,
22
+ 1.0,
23
+ 2.5,
24
+ 5.0,
25
+ 10.0,
26
+ 30.0,
27
+ 60.0,
28
+ )
29
+
30
+
31
+ class MetricsSink:
32
+ """
33
+ Small wrapper that owns a CollectorRegistry so tests can reset easily.
34
+ """
35
+
36
+ def __init__(self) -> None:
37
+ self.registry = CollectorRegistry()
38
+ self._init_metrics()
39
+
40
+ def _init_metrics(self) -> None:
41
+ self.runs_total = Counter(
42
+ "pyoco_runs_total",
43
+ "Total runs observed by status transitions.",
44
+ ["status"],
45
+ registry=self.registry,
46
+ )
47
+ self.runs_in_progress = Gauge(
48
+ "pyoco_runs_in_progress",
49
+ "Number of runs currently executing (RUNNING).",
50
+ registry=self.registry,
51
+ )
52
+ self.task_duration = Histogram(
53
+ "pyoco_task_duration_seconds",
54
+ "Observed task durations.",
55
+ ["task"],
56
+ buckets=_DEFAULT_BUCKETS,
57
+ registry=self.registry,
58
+ )
59
+ self.run_duration = Histogram(
60
+ "pyoco_run_duration_seconds",
61
+ "Observed end-to-end run durations.",
62
+ ["flow"],
63
+ buckets=_DEFAULT_BUCKETS,
64
+ registry=self.registry,
65
+ )
66
+
67
+ def reset(self) -> None:
68
+ self.__init__()
69
+
70
+ def record_status_transition(
71
+ self,
72
+ previous: Optional[RunStatus],
73
+ new_status: RunStatus,
74
+ ) -> None:
75
+ status_value = new_status.value if hasattr(new_status, "value") else str(new_status)
76
+ self.runs_total.labels(status=status_value).inc()
77
+
78
+ prev_value = previous.value if hasattr(previous, "value") else previous
79
+ if status_value == RunStatus.RUNNING.value:
80
+ if prev_value != RunStatus.RUNNING.value:
81
+ self.runs_in_progress.inc()
82
+ elif prev_value == RunStatus.RUNNING.value:
83
+ self.runs_in_progress.dec()
84
+
85
+ def record_task_duration(self, task_name: str, duration_ms: Optional[float]) -> None:
86
+ if duration_ms is None:
87
+ return
88
+ if duration_ms < 0:
89
+ return
90
+ self.task_duration.labels(task=task_name).observe(duration_ms / 1000.0)
91
+
92
+ def record_run_duration(
93
+ self,
94
+ flow_name: str,
95
+ start_time: Optional[float],
96
+ end_time: Optional[float],
97
+ ) -> None:
98
+ if start_time is None or end_time is None:
99
+ return
100
+ duration = end_time - start_time
101
+ if duration < 0:
102
+ return
103
+ self.run_duration.labels(flow=flow_name).observe(duration)
104
+
105
+ def render_latest(self) -> bytes:
106
+ return generate_latest(self.registry)
107
+
108
+
109
+ metrics = MetricsSink()
110
+
111
+
112
+ def metrics_content_type() -> str:
113
+ return CONTENT_TYPE_LATEST