pyoco 0.3.0__py3-none-any.whl → 0.5.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
pyoco/core/engine.py CHANGED
@@ -1,9 +1,32 @@
1
1
  import time
2
+ import io
3
+ import sys
4
+ import traceback
2
5
  from typing import Dict, Any, List, Set, Optional
6
+ import contextlib
3
7
  from .models import Flow, Task, RunContext, TaskState, RunStatus
4
- from .context import Context
8
+ from .context import Context, LoopFrame
9
+ from .exceptions import UntilMaxIterationsExceeded
5
10
  from ..trace.backend import TraceBackend
6
11
  from ..trace.console import ConsoleTraceBackend
12
+ from ..dsl.nodes import TaskNode, RepeatNode, ForEachNode, UntilNode, SwitchNode, DEFAULT_CASE_VALUE
13
+ from ..dsl.expressions import Expression
14
+
15
+ class TeeStream:
16
+ def __init__(self, original):
17
+ self.original = original
18
+ self.buffer = io.StringIO()
19
+
20
+ def write(self, data):
21
+ self.original.write(data)
22
+ self.buffer.write(data)
23
+ return len(data)
24
+
25
+ def flush(self):
26
+ self.original.flush()
27
+
28
+ def getvalue(self):
29
+ return self.buffer.getvalue()
7
30
 
8
31
  class Engine:
9
32
  """
@@ -44,16 +67,31 @@ class Engine:
44
67
  run_context = RunContext()
45
68
 
46
69
  run_ctx = run_context
70
+ run_ctx.flow_name = flow.name
71
+ run_ctx.params = params or {}
47
72
 
48
73
  # Initialize all tasks as PENDING
49
74
  for task in flow.tasks:
50
75
  run_ctx.tasks[task.name] = TaskState.PENDING
76
+ run_ctx.ensure_task_record(task.name)
51
77
 
52
78
  ctx = Context(params=params or {}, run_context=run_ctx)
53
79
  self.trace.on_flow_start(flow.name, run_id=run_ctx.run_id)
54
80
 
55
81
  # Register active run
56
82
  self.active_runs[run_ctx.run_id] = run_ctx
83
+
84
+ if flow.has_control_flow():
85
+ try:
86
+ program = flow.build_program()
87
+ self._execute_subflow(program, ctx)
88
+ run_ctx.status = RunStatus.COMPLETED
89
+ except Exception:
90
+ run_ctx.status = RunStatus.FAILED
91
+ run_ctx.end_time = time.time()
92
+ raise
93
+ run_ctx.end_time = time.time()
94
+ return ctx
57
95
 
58
96
  try:
59
97
  executed: Set[Task] = set()
@@ -264,12 +302,130 @@ class Engine:
264
302
  run_ctx.end_time = time.time()
265
303
  return ctx
266
304
 
305
+ def _execute_subflow(self, subflow, ctx: Context):
306
+ for node in subflow.steps:
307
+ self._execute_node(node, ctx)
308
+
309
+ def _execute_node(self, node, ctx: Context):
310
+ if isinstance(node, TaskNode):
311
+ self._execute_task(node.task, ctx)
312
+ elif isinstance(node, RepeatNode):
313
+ self._execute_repeat(node, ctx)
314
+ elif isinstance(node, ForEachNode):
315
+ self._execute_foreach(node, ctx)
316
+ elif isinstance(node, UntilNode):
317
+ self._execute_until(node, ctx)
318
+ elif isinstance(node, SwitchNode):
319
+ self._execute_switch(node, ctx)
320
+ else:
321
+ raise TypeError(f"Unknown node type: {type(node)}")
322
+
323
+ def _execute_repeat(self, node: RepeatNode, ctx: Context):
324
+ count_value = self._resolve_repeat_count(node.count, ctx)
325
+ for index in range(count_value):
326
+ frame = LoopFrame(name="repeat", type="repeat", index=index, iteration=index + 1, count=count_value)
327
+ ctx.push_loop(frame)
328
+ try:
329
+ self._execute_subflow(node.body, ctx)
330
+ finally:
331
+ ctx.pop_loop()
332
+
333
+ def _execute_foreach(self, node: ForEachNode, ctx: Context):
334
+ sequence = self._eval_expression(node.source, ctx)
335
+ if not isinstance(sequence, (list, tuple)):
336
+ raise TypeError("ForEach source must evaluate to a list or tuple.")
337
+
338
+ total = len(sequence)
339
+ label = node.alias or node.source.source
340
+ for index, item in enumerate(sequence):
341
+ frame = LoopFrame(
342
+ name=f"foreach:{label}",
343
+ type="foreach",
344
+ index=index,
345
+ iteration=index + 1,
346
+ count=total,
347
+ item=item,
348
+ )
349
+ ctx.push_loop(frame)
350
+ if node.alias:
351
+ ctx.set_var(node.alias, item)
352
+ try:
353
+ self._execute_subflow(node.body, ctx)
354
+ finally:
355
+ if node.alias:
356
+ ctx.clear_var(node.alias)
357
+ ctx.pop_loop()
358
+
359
+ def _execute_until(self, node: UntilNode, ctx: Context):
360
+ max_iter = node.max_iter or 1000
361
+ iteration = 0
362
+ last_condition = None
363
+ while True:
364
+ iteration += 1
365
+ frame = LoopFrame(
366
+ name="until",
367
+ type="until",
368
+ index=iteration - 1,
369
+ iteration=iteration,
370
+ condition=last_condition,
371
+ count=max_iter,
372
+ )
373
+ ctx.push_loop(frame)
374
+ try:
375
+ self._execute_subflow(node.body, ctx)
376
+ condition_result = bool(self._eval_expression(node.condition, ctx))
377
+ finally:
378
+ ctx.pop_loop()
379
+
380
+ last_condition = condition_result
381
+ if condition_result:
382
+ break
383
+ if iteration >= max_iter:
384
+ raise UntilMaxIterationsExceeded(node.condition.source, max_iter)
385
+
386
+ def _execute_switch(self, node: SwitchNode, ctx: Context):
387
+ value = self._eval_expression(node.expression, ctx)
388
+ default_case = None
389
+ for case in node.cases:
390
+ if case.value == DEFAULT_CASE_VALUE:
391
+ if default_case is None:
392
+ default_case = case
393
+ continue
394
+ if case.value == value:
395
+ self._execute_subflow(case.target, ctx)
396
+ return
397
+ if default_case:
398
+ self._execute_subflow(default_case.target, ctx)
399
+ def _resolve_repeat_count(self, count_value, ctx: Context) -> int:
400
+ if isinstance(count_value, Expression):
401
+ resolved = self._eval_expression(count_value, ctx)
402
+ else:
403
+ resolved = count_value
404
+ if not isinstance(resolved, int):
405
+ raise TypeError("Repeat count must evaluate to an integer.")
406
+ if resolved < 0:
407
+ raise ValueError("Repeat count cannot be negative.")
408
+ return resolved
409
+
410
+ def _eval_expression(self, expression, ctx: Context):
411
+ if isinstance(expression, Expression):
412
+ return expression.evaluate(ctx=ctx.expression_data(), env=ctx.env_data())
413
+ return expression
414
+
267
415
  def _execute_task(self, task: Task, ctx: Context):
268
416
  # Update state to RUNNING
269
417
  from .models import TaskState
418
+ run_ctx = ctx.run_context
270
419
  if ctx.run_context:
271
420
  ctx.run_context.tasks[task.name] = TaskState.RUNNING
272
-
421
+ record = ctx.run_context.ensure_task_record(task.name)
422
+ record.state = TaskState.RUNNING
423
+ record.started_at = time.time()
424
+ record.error = None
425
+ record.traceback = None
426
+ else:
427
+ record = None
428
+
273
429
  self.trace.on_node_start(task.name)
274
430
  start_time = time.time()
275
431
  # Retry loop
@@ -301,8 +457,17 @@ class Engine:
301
457
  elif param_name in ctx.results:
302
458
  kwargs[param_name] = ctx.results[param_name]
303
459
 
304
- result = task.func(**kwargs)
460
+ if record:
461
+ record.inputs = {k: v for k, v in kwargs.items() if k != "ctx"}
462
+
463
+ stdout_capture = TeeStream(sys.stdout)
464
+ stderr_capture = TeeStream(sys.stderr)
465
+ with contextlib.redirect_stdout(stdout_capture), contextlib.redirect_stderr(stderr_capture):
466
+ result = task.func(**kwargs)
305
467
  ctx.set_result(task.name, result)
468
+ if run_ctx:
469
+ run_ctx.append_log(task.name, "stdout", stdout_capture.getvalue())
470
+ run_ctx.append_log(task.name, "stderr", stderr_capture.getvalue())
306
471
 
307
472
  # Handle outputs saving
308
473
  for target_path in task.outputs:
@@ -333,10 +498,24 @@ class Engine:
333
498
  # Update state to SUCCEEDED
334
499
  if ctx.run_context:
335
500
  ctx.run_context.tasks[task.name] = TaskState.SUCCEEDED
501
+ if record:
502
+ record.state = TaskState.SUCCEEDED
503
+ record.ended_at = time.time()
504
+ record.duration_ms = (record.ended_at - record.started_at) * 1000
505
+ record.output = result
336
506
 
337
507
  return # Success
338
508
 
339
509
  except Exception as e:
510
+ if run_ctx:
511
+ run_ctx.append_log(task.name, "stdout", stdout_capture.getvalue() if 'stdout_capture' in locals() else "")
512
+ run_ctx.append_log(task.name, "stderr", stderr_capture.getvalue() if 'stderr_capture' in locals() else "")
513
+ if record:
514
+ record.state = TaskState.FAILED
515
+ record.ended_at = time.time()
516
+ record.duration_ms = (record.ended_at - record.started_at) * 1000
517
+ record.error = str(e)
518
+ record.traceback = traceback.format_exc()
340
519
  if retries_left > 0:
341
520
  retries_left -= 1
342
521
  # Log retry?
@@ -0,0 +1,15 @@
1
+ class ControlFlowError(Exception):
2
+ """Base error for control flow execution issues."""
3
+
4
+
5
+ class UntilMaxIterationsExceeded(ControlFlowError):
6
+ def __init__(self, expression: str, max_iter: int):
7
+ super().__init__(f"Until condition '{expression}' exceeded max_iter={max_iter}")
8
+ self.expression = expression
9
+ self.max_iter = max_iter
10
+
11
+
12
+ class SwitchNoMatch(ControlFlowError):
13
+ def __init__(self, expression: str):
14
+ super().__init__(f"Switch expression '{expression}' did not match any case.")
15
+ self.expression = expression
pyoco/core/models.py CHANGED
@@ -1,8 +1,9 @@
1
- from typing import Any, Callable, Dict, List, Optional, Set, Union, ForwardRef
1
+ from typing import Any, Callable, Dict, List, Optional, Set, Union
2
2
  from dataclasses import dataclass, field
3
3
  from enum import Enum
4
4
  import time
5
5
  import uuid
6
+ import json
6
7
 
7
8
  @dataclass
8
9
  class Task:
@@ -56,16 +57,89 @@ class RunStatus(Enum):
56
57
  CANCELLING = "CANCELLING"
57
58
  CANCELLED = "CANCELLED"
58
59
 
60
+ @dataclass
61
+ class TaskRecord:
62
+ state: TaskState = TaskState.PENDING
63
+ started_at: Optional[float] = None
64
+ ended_at: Optional[float] = None
65
+ duration_ms: Optional[float] = None
66
+ error: Optional[str] = None
67
+ traceback: Optional[str] = None
68
+ inputs: Dict[str, Any] = field(default_factory=dict)
69
+ output: Any = None
70
+ artifacts: Dict[str, Any] = field(default_factory=dict)
71
+
72
+
59
73
  @dataclass
60
74
  class RunContext:
61
75
  """
62
76
  Holds the state of a single workflow execution.
63
77
  """
64
78
  run_id: str = field(default_factory=lambda: str(uuid.uuid4()))
79
+ flow_name: str = "main"
80
+ params: Dict[str, Any] = field(default_factory=dict)
65
81
  status: RunStatus = RunStatus.RUNNING
66
82
  tasks: Dict[str, TaskState] = field(default_factory=dict)
83
+ task_records: Dict[str, TaskRecord] = field(default_factory=dict)
67
84
  start_time: float = field(default_factory=time.time)
68
85
  end_time: Optional[float] = None
86
+ metadata: Dict[str, Any] = field(default_factory=dict)
87
+ logs: List[Dict[str, Any]] = field(default_factory=list)
88
+ _pending_logs: List[Dict[str, Any]] = field(default_factory=list, repr=False)
89
+ _log_seq: int = field(default=0, repr=False)
90
+ log_bytes: Dict[str, int] = field(default_factory=dict)
91
+ metrics_recorded_tasks: Set[str] = field(default_factory=set, repr=False)
92
+ metrics_run_observed: bool = field(default=False, repr=False)
93
+ webhook_notified_status: Optional[str] = field(default=None, repr=False)
94
+
95
+ def ensure_task_record(self, task_name: str) -> TaskRecord:
96
+ if task_name not in self.task_records:
97
+ self.task_records[task_name] = TaskRecord()
98
+ return self.task_records[task_name]
99
+
100
+ def append_log(self, task_name: str, stream: str, payload: str):
101
+ if not payload:
102
+ return
103
+ entry = {
104
+ "seq": self._log_seq,
105
+ "task": task_name,
106
+ "stream": stream,
107
+ "text": payload,
108
+ "timestamp": time.time(),
109
+ }
110
+ self._log_seq += 1
111
+ self.logs.append(entry)
112
+ self._pending_logs.append(entry)
113
+
114
+ def drain_logs(self) -> List[Dict[str, Any]]:
115
+ drained = list(self._pending_logs)
116
+ self._pending_logs.clear()
117
+ return drained
118
+
119
+ def serialize_task_records(self) -> Dict[str, Any]:
120
+ serialized: Dict[str, Any] = {}
121
+ for name, record in self.task_records.items():
122
+ serialized[name] = {
123
+ "state": record.state.value if hasattr(record.state, "value") else record.state,
124
+ "started_at": record.started_at,
125
+ "ended_at": record.ended_at,
126
+ "duration_ms": record.duration_ms,
127
+ "error": record.error,
128
+ "traceback": record.traceback,
129
+ "inputs": {k: self._safe_value(v) for k, v in record.inputs.items()},
130
+ "output": self._safe_value(record.output),
131
+ "artifacts": record.artifacts,
132
+ }
133
+ return serialized
134
+
135
+ def _safe_value(self, value: Any) -> Any:
136
+ if isinstance(value, (str, int, float, bool)) or value is None:
137
+ return value
138
+ try:
139
+ json.dumps(value)
140
+ return value
141
+ except Exception:
142
+ return repr(value)
69
143
 
70
144
  @dataclass
71
145
  class Flow:
@@ -78,8 +152,27 @@ class Flow:
78
152
  name: str = "main"
79
153
  tasks: Set[Task] = field(default_factory=set)
80
154
  _tail: Set[Task] = field(default_factory=set)
155
+ _definition: List[Any] = field(default_factory=list, repr=False)
156
+ _has_control_flow: bool = False
81
157
 
82
158
  def __rshift__(self, other):
159
+ from ..dsl.syntax import TaskWrapper, FlowFragment, ensure_fragment
160
+
161
+ if isinstance(other, TaskWrapper):
162
+ fragment = other
163
+ self._record_fragment(fragment)
164
+ self._append_task(fragment.task)
165
+ return self
166
+
167
+ if hasattr(other, "to_subflow"):
168
+ fragment = other if isinstance(other, FlowFragment) else ensure_fragment(other)
169
+ self._record_fragment(fragment)
170
+ if not self._has_control_flow and not fragment.has_control_flow():
171
+ self._append_linear_fragment(fragment)
172
+ else:
173
+ self._has_control_flow = True
174
+ return self
175
+
83
176
  # Flow >> Task/List/Branch
84
177
  new_tasks = []
85
178
  is_branch = False
@@ -155,3 +248,39 @@ class Flow:
155
248
  def add_task(self, task: Task):
156
249
  self.tasks.add(task)
157
250
 
251
+ def has_control_flow(self) -> bool:
252
+ return self._has_control_flow
253
+
254
+ def build_program(self):
255
+ from ..dsl.nodes import SubFlowNode
256
+ return SubFlowNode(list(self._definition))
257
+
258
+ def _record_fragment(self, fragment):
259
+ from ..dsl.nodes import TaskNode
260
+ subflow = fragment.to_subflow()
261
+ self._definition.extend(subflow.steps)
262
+ for task in fragment.task_nodes():
263
+ self.add_task(task)
264
+ if any(not isinstance(step, TaskNode) for step in subflow.steps):
265
+ self._has_control_flow = True
266
+
267
+ def _append_linear_fragment(self, fragment):
268
+ subflow = fragment.to_subflow()
269
+ for step in subflow.steps:
270
+ if hasattr(step, "task"):
271
+ self._append_task(step.task)
272
+
273
+ def _append_task(self, task: Task):
274
+ self.add_task(task)
275
+ if self._has_control_flow:
276
+ self._tail = {task}
277
+ return
278
+
279
+ if not self._tail:
280
+ self._tail = {task}
281
+ return
282
+
283
+ for tail_task in self._tail:
284
+ tail_task.dependents.add(task)
285
+ task.dependencies.add(tail_task)
286
+ self._tail = {task}
pyoco/discovery/loader.py CHANGED
@@ -1,9 +1,10 @@
1
1
  import importlib
2
2
  import pkgutil
3
3
  import sys
4
- from typing import Dict, List, Any
4
+ from typing import Dict, List, Any, Set
5
5
  from ..core.models import Task
6
6
  from ..dsl.syntax import TaskWrapper
7
+ from .plugins import PluginRegistry, iter_entry_points
7
8
 
8
9
  class TaskLoader:
9
10
  def __init__(self, config: Any, strict: bool = False):
@@ -11,6 +12,7 @@ class TaskLoader:
11
12
  self.strict = strict
12
13
  self.tasks: Dict[str, Task] = {}
13
14
  self._explicit_tasks: Set[str] = set()
15
+ self.plugin_reports: List[Dict[str, Any]] = []
14
16
 
15
17
  def load(self):
16
18
  # Load explicitly defined tasks in config FIRST (Higher priority)
@@ -31,6 +33,8 @@ class TaskLoader:
31
33
  for pattern in self.config.discovery.glob_modules:
32
34
  self._load_glob_modules(pattern)
33
35
 
36
+ self._load_entry_point_plugins()
37
+
34
38
  def _register_task(self, name: str, task: Task):
35
39
  if name in self.tasks:
36
40
  if name in self._explicit_tasks:
@@ -97,6 +101,33 @@ class TaskLoader:
97
101
  module_name = rel_path.replace(os.sep, ".")[:-3] # strip .py
98
102
  self._load_module(module_name)
99
103
 
104
+ def _load_entry_point_plugins(self):
105
+ entries = iter_entry_points()
106
+ for ep in entries:
107
+ info = {
108
+ "name": ep.name,
109
+ "value": ep.value,
110
+ "module": getattr(ep, "module", ""),
111
+ "tasks": [],
112
+ "warnings": [],
113
+ }
114
+ registry = PluginRegistry(self, ep.name)
115
+ try:
116
+ hook = ep.load()
117
+ if not callable(hook):
118
+ raise TypeError("Entry point must be callable")
119
+ hook(registry)
120
+ info["tasks"] = list(registry.records)
121
+ info["warnings"] = list(registry.warnings)
122
+ if not registry.records:
123
+ info["warnings"].append("no tasks registered")
124
+ except Exception as exc:
125
+ info["error"] = str(exc)
126
+ if self.strict:
127
+ raise
128
+ print(f"Warning: Plugin '{ep.name}' failed to load: {exc}")
129
+ self.plugin_reports.append(info)
130
+
100
131
  def _scan_module(self, module: Any):
101
132
  for name, obj in vars(module).items():
102
133
  if isinstance(obj, TaskWrapper):
@@ -0,0 +1,148 @@
1
+ from __future__ import annotations
2
+
3
+ from importlib import metadata as importlib_metadata
4
+ from typing import Any, Callable, Dict, List, Optional, Type
5
+
6
+ from ..core.models import Task
7
+ from ..dsl.syntax import TaskWrapper
8
+
9
+
10
+ class CallablePluginTask(Task):
11
+ """Lightweight subclass so callable registrations still appear as Task-derived."""
12
+
13
+ def __init__(self, func: Callable, name: str):
14
+ super().__init__(func=func, name=name)
15
+
16
+
17
+ def iter_entry_points(group: str = "pyoco.tasks"):
18
+ eps = importlib_metadata.entry_points()
19
+ if hasattr(eps, "select"):
20
+ return list(eps.select(group=group))
21
+ return list(eps.get(group, []))
22
+
23
+
24
+ def list_available_plugins() -> List[Dict[str, Any]]:
25
+ plugins = []
26
+ for ep in iter_entry_points():
27
+ plugins.append(
28
+ {
29
+ "name": ep.name,
30
+ "module": getattr(ep, "module", ""),
31
+ "value": ep.value,
32
+ }
33
+ )
34
+ return plugins
35
+
36
+
37
+ class PluginRegistry:
38
+ def __init__(self, loader: Any, provider_name: str) -> None:
39
+ self.loader = loader
40
+ self.provider_name = provider_name
41
+ self.registered_names: List[str] = []
42
+ self.records: List[Dict[str, Any]] = []
43
+ self.warnings: List[str] = []
44
+
45
+ def task(
46
+ self,
47
+ func: Optional[Callable] = None,
48
+ *,
49
+ name: Optional[str] = None,
50
+ inputs: Optional[Dict[str, Any]] = None,
51
+ outputs: Optional[List[str]] = None,
52
+ ):
53
+ if func is not None:
54
+ self.register_callable(
55
+ func,
56
+ name=name,
57
+ inputs=inputs or {},
58
+ outputs=outputs or [],
59
+ )
60
+ return func
61
+
62
+ def decorator(inner: Callable):
63
+ self.register_callable(
64
+ inner,
65
+ name=name,
66
+ inputs=inputs or {},
67
+ outputs=outputs or [],
68
+ )
69
+ return inner
70
+
71
+ return decorator
72
+
73
+ def register_callable(
74
+ self,
75
+ func: Callable,
76
+ *,
77
+ name: Optional[str] = None,
78
+ inputs: Optional[Dict[str, Any]] = None,
79
+ outputs: Optional[List[str]] = None,
80
+ ) -> Task:
81
+ task_name = name or getattr(func, "__name__", f"{self.provider_name}_task")
82
+ task = CallablePluginTask(func=func, name=task_name)
83
+ if inputs:
84
+ task.inputs.update(inputs)
85
+ if outputs:
86
+ task.outputs.extend(outputs)
87
+ self._finalize_task(task, origin="callable")
88
+ return task
89
+
90
+ def task_class(
91
+ self,
92
+ task_cls: Type[Task],
93
+ *args: Any,
94
+ name: Optional[str] = None,
95
+ **kwargs: Any,
96
+ ) -> Task:
97
+ if not issubclass(task_cls, Task):
98
+ raise TypeError(f"{task_cls} is not a Task subclass")
99
+ task = task_cls(*args, **kwargs)
100
+ if name:
101
+ task.name = name
102
+ self._finalize_task(task, origin="task_class")
103
+ return task
104
+
105
+ def add(self, obj: Any, *, name: Optional[str] = None) -> None:
106
+ if isinstance(obj, TaskWrapper):
107
+ task = obj.task
108
+ if name:
109
+ task.name = name
110
+ self._finalize_task(task, origin="wrapper")
111
+ elif isinstance(obj, Task):
112
+ if name:
113
+ obj.name = name
114
+ origin = "task_class" if obj.__class__ is not Task else "task"
115
+ self._finalize_task(obj, origin=origin)
116
+ elif callable(obj):
117
+ self.register_callable(obj, name=name)
118
+ else:
119
+ raise TypeError(f"Unsupported task object: {obj!r}")
120
+
121
+ def _finalize_task(self, task: Task, origin: str) -> None:
122
+ warnings = self._validate_task(task, origin)
123
+ self.loader._register_task(task.name, task)
124
+ self.registered_names.append(task.name)
125
+ self.records.append(
126
+ {
127
+ "name": task.name,
128
+ "origin": origin,
129
+ "class": task.__class__.__name__,
130
+ "warnings": warnings,
131
+ }
132
+ )
133
+ for msg in warnings:
134
+ self.warnings.append(f"{task.name}: {msg}")
135
+
136
+ def _validate_task(self, task: Task, origin: str) -> List[str]:
137
+ warnings: List[str] = []
138
+ if not getattr(task, "name", None):
139
+ generated = f"{self.provider_name}_{len(self.registered_names) + 1}"
140
+ task.name = generated
141
+ warnings.append(f"name missing; auto-assigned '{generated}'")
142
+ if not callable(getattr(task, "func", None)):
143
+ warnings.append("task.func is not callable")
144
+ if origin == "callable":
145
+ warnings.append("registered via callable; prefer Task subclass for extensibility")
146
+ if task.__class__ is Task and origin not in ("callable", "wrapper"):
147
+ warnings.append("plain Task instance detected; subclass Task for metadata support")
148
+ return warnings