pyoco 0.5.0__py3-none-any.whl → 0.6.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
pyoco/__init__.py CHANGED
@@ -2,10 +2,11 @@ from .core.models import Flow, Task
2
2
  from .core.engine import Engine
3
3
  from .dsl.syntax import task
4
4
  from .trace.console import ConsoleTraceBackend
5
+ from . import support
5
6
 
6
7
  def run(flow: Flow, params: dict = None, trace: bool = True, cute: bool = True):
7
8
  backend = ConsoleTraceBackend(style="cute" if cute else "plain")
8
9
  engine = Engine(trace_backend=backend)
9
10
  return engine.run(flow, params)
10
11
 
11
- __all__ = ["task", "Flow", "run"]
12
+ __all__ = ["task", "Flow", "run", "support"]
pyoco/cli/main.py CHANGED
@@ -4,13 +4,22 @@ import sys
4
4
  import os
5
5
  import signal
6
6
  import time
7
+ from types import SimpleNamespace
7
8
  from ..schemas.config import PyocoConfig
8
9
  from ..discovery.loader import TaskLoader
9
10
  from ..core.models import Flow
10
11
  from ..core.engine import Engine
11
12
  from ..trace.console import ConsoleTraceBackend
12
13
  from ..client import Client
13
- from ..discovery.plugins import list_available_plugins
14
+ from ..support.service import SupportInfoService
15
+ from ..core.exceptions import (
16
+ SupportInfoError,
17
+ InvalidFormatError,
18
+ TaskNotFoundError,
19
+ InvalidFilterError,
20
+ OutputWriteError,
21
+ MissingTaskMetadataError,
22
+ )
14
23
 
15
24
  def main():
16
25
  parser = argparse.ArgumentParser(description="Pyoco Workflow Engine")
@@ -88,6 +97,32 @@ def main():
88
97
  plugins_sub = plugins_parser.add_subparsers(dest="plugins_command")
89
98
  plugins_list = plugins_sub.add_parser("list", help="List discovered plug-ins")
90
99
  plugins_list.add_argument("--json", action="store_true", help="Output JSON payload")
100
+ plugins_lint = plugins_sub.add_parser("lint", help="Validate plug-ins for upcoming requirements")
101
+ plugins_lint.add_argument("--json", action="store_true", help="Output JSON payload")
102
+
103
+ support_parser = subparsers.add_parser("support", help="Generate support info")
104
+ support_subparsers = support_parser.add_subparsers(dest="support_command")
105
+
106
+ support_tasks = support_subparsers.add_parser("tasks", help="List tasks for LLM support")
107
+ support_tasks.add_argument("--config", required=True, help="Path to flow.yaml")
108
+ support_tasks.add_argument("--format", default="prompt", choices=["prompt", "json", "md"])
109
+ support_tasks.add_argument("--output", help="Write output to file")
110
+ support_tasks.add_argument("--name", action="append", help="Filter by task name (repeatable)")
111
+ support_tasks.add_argument("--origin", action="append", help="Filter by origin (repeatable)")
112
+ support_tasks.add_argument("--tag", action="append", help="Filter by tag (repeatable)")
113
+
114
+ support_task = support_subparsers.add_parser("task", help="Show task detail for LLM support")
115
+ support_task.add_argument("--config", required=True, help="Path to flow.yaml")
116
+ support_task.add_argument("--name", required=True, help="Task name")
117
+ support_task.add_argument("--format", default="prompt", choices=["prompt", "json", "md"])
118
+ support_task.add_argument("--output", help="Write output to file")
119
+ support_task.add_argument("--origin", action="append", help="Filter by origin (repeatable)")
120
+ support_task.add_argument("--tag", action="append", help="Filter by tag (repeatable)")
121
+
122
+ support_guide = support_subparsers.add_parser("guide", help="Show flow.yaml guide for LLM support")
123
+ support_guide.add_argument("--config", required=True, help="Path to flow.yaml")
124
+ support_guide.add_argument("--format", default="prompt", choices=["prompt", "json", "md"])
125
+ support_guide.add_argument("--output", help="Write output to file")
91
126
 
92
127
  args = parser.parse_args()
93
128
 
@@ -120,18 +155,72 @@ def main():
120
155
  return
121
156
 
122
157
  if args.command == "plugins":
123
- infos = list_available_plugins()
158
+ reports = _collect_plugin_reports()
124
159
  if args.plugins_command == "list":
125
160
  if getattr(args, "json", False):
126
- print(json.dumps(infos, indent=2))
161
+ print(json.dumps(reports, indent=2))
127
162
  else:
128
- if not infos:
163
+ if not reports:
129
164
  print("No plug-ins registered under group 'pyoco.tasks'.")
130
165
  else:
131
166
  print("Discovered plug-ins:")
132
- for info in infos:
167
+ for info in reports:
133
168
  mod = info.get("module") or info.get("value")
134
169
  print(f" - {info.get('name')} ({mod})")
170
+ if info.get("error"):
171
+ print(f" ⚠️ error: {info['error']}")
172
+ continue
173
+ for task in info.get("tasks", []):
174
+ warn_msg = "; ".join(task.get("warnings", [])) or "ok"
175
+ print(f" • {task['name']} [{task['origin']}] ({warn_msg})")
176
+ for warn in info.get("warnings", []):
177
+ print(f" ⚠️ {warn}")
178
+ elif args.plugins_command == "lint":
179
+ issues = []
180
+ for info in reports:
181
+ prefix = info["name"]
182
+ if info.get("error"):
183
+ issues.append(f"{prefix}: {info['error']}")
184
+ continue
185
+ for warn in info.get("warnings", []):
186
+ issues.append(f"{prefix}: {warn}")
187
+ for task in info.get("tasks", []):
188
+ for warn in task.get("warnings", []):
189
+ issues.append(f"{prefix}.{task['name']}: {warn}")
190
+ payload = {"issues": issues, "reports": reports}
191
+ if getattr(args, "json", False):
192
+ print(json.dumps(payload, indent=2))
193
+ else:
194
+ if not issues:
195
+ print("✅ All plug-ins look good.")
196
+ else:
197
+ print("⚠️ Plug-in issues found:")
198
+ for issue in issues:
199
+ print(f" - {issue}")
200
+ if issues:
201
+ sys.exit(1)
202
+ else:
203
+ plugins_parser.print_help()
204
+ return
205
+
206
+ if args.command == "support":
207
+ if not args.support_command:
208
+ support_parser.print_help()
209
+ sys.exit(1)
210
+ filters = _build_support_filters(args)
211
+ try:
212
+ content = SupportInfoService().build(
213
+ kind=args.support_command,
214
+ config_path=args.config,
215
+ format=args.format,
216
+ filters=filters or None,
217
+ output_path=args.output,
218
+ )
219
+ except SupportInfoError as exc:
220
+ _print_support_error(exc)
221
+ sys.exit(1)
222
+ if not args.output:
223
+ print(content)
135
224
  return
136
225
 
137
226
  if args.command == "server":
@@ -368,6 +457,45 @@ def main():
368
457
  sys.exit(2 if args.dry_run else 1)
369
458
  return
370
459
 
460
+ def _collect_plugin_reports():
461
+ dummy = SimpleNamespace(tasks={})
462
+ loader = TaskLoader(dummy)
463
+ loader.load()
464
+ return loader.plugin_reports
465
+
466
+
467
+ def _build_support_filters(args):
468
+ filters = {}
469
+ if getattr(args, "name", None):
470
+ value = args.name
471
+ filters["name"] = value if isinstance(value, list) else [value]
472
+ if getattr(args, "origin", None):
473
+ filters["origin"] = args.origin
474
+ if getattr(args, "tag", None):
475
+ filters["tag"] = args.tag
476
+ return filters
477
+
478
+
479
+ def _print_support_error(exc: SupportInfoError) -> None:
480
+ if isinstance(exc, InvalidFormatError):
481
+ print(f"Invalid format: {exc.format}")
482
+ return
483
+ if isinstance(exc, TaskNotFoundError):
484
+ print(f"Task not found: {exc.name}")
485
+ return
486
+ if isinstance(exc, OutputWriteError):
487
+ print(f"Failed to write output: {exc.path}")
488
+ return
489
+ if isinstance(exc, InvalidFilterError):
490
+ print(f"Invalid filter: {exc.filter_value}")
491
+ return
492
+ if isinstance(exc, MissingTaskMetadataError):
493
+ fields = ",".join(exc.fields)
494
+ print(f"Missing task metadata: {exc.name} fields={fields}")
495
+ return
496
+ print(f"Error: {exc}")
497
+
498
+
371
499
  def _stream_logs(client, args):
372
500
  seen_seq = -1
373
501
  follow = args.follow
pyoco/core/exceptions.py CHANGED
@@ -13,3 +13,39 @@ class SwitchNoMatch(ControlFlowError):
13
13
  def __init__(self, expression: str):
14
14
  super().__init__(f"Switch expression '{expression}' did not match any case.")
15
15
  self.expression = expression
16
+
17
+
18
+ class SupportInfoError(Exception):
19
+ """Base error for support info generation."""
20
+
21
+
22
+ class InvalidFormatError(SupportInfoError):
23
+ def __init__(self, format: str):
24
+ self.format = format
25
+ super().__init__(f"Invalid format: {format}")
26
+
27
+
28
+ class TaskNotFoundError(SupportInfoError):
29
+ def __init__(self, name: str):
30
+ self.name = name
31
+ super().__init__(f"Task not found: {name}")
32
+
33
+
34
+ class InvalidFilterError(SupportInfoError):
35
+ def __init__(self, filter_value: str):
36
+ self.filter_value = filter_value
37
+ super().__init__(f"Invalid filter: {filter_value}")
38
+
39
+
40
+ class OutputWriteError(SupportInfoError):
41
+ def __init__(self, path: str):
42
+ self.path = path
43
+ super().__init__(f"Failed to write output: {path}")
44
+
45
+
46
+ class MissingTaskMetadataError(SupportInfoError):
47
+ def __init__(self, name: str, fields: list[str]):
48
+ self.name = name
49
+ self.fields = fields
50
+ field_list = ",".join(fields)
51
+ super().__init__(f"Missing task metadata: {name} fields={field_list}")
pyoco/core/models.py CHANGED
@@ -238,11 +238,10 @@ class Flow:
238
238
  # So `flow >> (A | B)` just adds A and B.
239
239
  # Then `(A | B) >> C` is handled by Branch.
240
240
  pass
241
-
242
- # Update tail
241
+
243
242
  if new_tasks:
244
243
  self._tail = set(new_tasks)
245
-
244
+
246
245
  return self
247
246
 
248
247
  def add_task(self, task: Task):
@@ -284,3 +283,45 @@ class Flow:
284
283
  tail_task.dependents.add(task)
285
284
  task.dependencies.add(tail_task)
286
285
  self._tail = {task}
286
+
287
+
288
+ @dataclass
289
+ class TaskIO:
290
+ name: str
291
+ type: str
292
+ required: bool
293
+ constraints: Optional[List[str]] = None
294
+
295
+ @classmethod
296
+ def from_dict(cls, data: Dict[str, Any]) -> "TaskIO":
297
+ return cls(
298
+ name=data.get("name"),
299
+ type=data.get("type"),
300
+ required=data.get("required"),
301
+ constraints=data.get("constraints"),
302
+ )
303
+
304
+
305
+ @dataclass
306
+ class TaskInfo:
307
+ name: str
308
+ summary: str
309
+ inputs: List[TaskIO]
310
+ outputs: List[TaskIO]
311
+ origin: Optional[str] = None
312
+ tags: Optional[List[str]] = None
313
+
314
+
315
+ @dataclass
316
+ class SupportFilters:
317
+ name: Optional[List[str]] = None
318
+ origin: Optional[List[str]] = None
319
+ tag: Optional[List[str]] = None
320
+
321
+
322
+ @dataclass
323
+ class SupportInfo:
324
+ kind: str
325
+ format: str
326
+ content: str
327
+ filters: SupportFilters
pyoco/discovery/loader.py CHANGED
@@ -1,6 +1,5 @@
1
1
  import importlib
2
- import pkgutil
3
- import sys
2
+ import os
4
3
  from typing import Dict, List, Any, Set
5
4
  from ..core.models import Task
6
5
  from ..dsl.syntax import TaskWrapper
@@ -11,30 +10,28 @@ class TaskLoader:
11
10
  self.config = config
12
11
  self.strict = strict
13
12
  self.tasks: Dict[str, Task] = {}
13
+ self.task_infos: Dict[str, Any] = {}
14
14
  self._explicit_tasks: Set[str] = set()
15
15
  self.plugin_reports: List[Dict[str, Any]] = []
16
16
 
17
17
  def load(self):
18
18
  # Load explicitly defined tasks in config FIRST (Higher priority)
19
19
  for task_name, task_conf in self.config.tasks.items():
20
- if task_conf.callable:
21
- self._load_explicit_task(task_name, task_conf)
20
+ callable_path = self._conf_get(task_conf, "callable")
21
+ if callable_path:
22
+ self._load_explicit_task(task_name, task_conf, callable_path)
22
23
  self._explicit_tasks.add(task_name)
23
24
 
24
- # Load from packages
25
- for package in self.config.discovery.packages:
26
- self._load_package(package)
27
-
28
- # Load from entry points (simplified)
29
- for ep in self.config.discovery.entry_points:
30
- self._load_module(ep)
31
-
32
- # Load from glob modules
33
- for pattern in self.config.discovery.glob_modules:
34
- self._load_glob_modules(pattern)
25
+ self._load_env_modules()
35
26
 
36
27
  self._load_entry_point_plugins()
37
28
 
29
+ def _load_env_modules(self) -> None:
30
+ raw = os.getenv("PYOCO_DISCOVERY_MODULES", "")
31
+ modules = [item.strip() for item in raw.replace(",", " ").split() if item.strip()]
32
+ for module_name in modules:
33
+ self._load_module(module_name)
34
+
38
35
  def _register_task(self, name: str, task: Task):
39
36
  if name in self.tasks:
40
37
  if name in self._explicit_tasks:
@@ -51,24 +48,27 @@ class TaskLoader:
51
48
  # Apply config overlay if exists
52
49
  if self.config and name in self.config.tasks:
53
50
  conf = self.config.tasks[name]
54
- if not conf.callable:
55
- if conf.inputs:
56
- task.inputs.update(conf.inputs)
57
- if conf.outputs:
58
- task.outputs.extend(conf.outputs)
51
+ if not self._conf_get(conf, "callable"):
52
+ inputs = self._conf_get(conf, "inputs") or {}
53
+ outputs = self._conf_get(conf, "outputs") or []
54
+ if inputs:
55
+ task.inputs.update(inputs)
56
+ if outputs:
57
+ task.outputs.extend(outputs)
59
58
 
60
59
  self.tasks[name] = task
61
60
 
62
- def _load_package(self, package_name: str):
63
- try:
64
- pkg = importlib.import_module(package_name)
65
- if hasattr(pkg, '__path__'):
66
- for _, name, _ in pkgutil.iter_modules(pkg.__path__, pkg.__name__ + "."):
67
- self._load_module(name)
61
+ def _register_task_info(self, info: Any):
62
+ name = getattr(info, "name", None)
63
+ if not name:
64
+ return
65
+ if name in self.task_infos:
66
+ msg = f"Task metadata '{name}' already defined."
67
+ if self.strict:
68
+ raise ValueError(f"{msg} (Strict mode enabled)")
68
69
  else:
69
- self._scan_module(pkg)
70
- except ImportError as e:
71
- print(f"Warning: Could not import package {package_name}: {e}")
70
+ print(f"Warning: {msg} Overwriting.")
71
+ self.task_infos[name] = info
72
72
 
73
73
  def _load_module(self, module_name: str):
74
74
  try:
@@ -76,30 +76,6 @@ class TaskLoader:
76
76
  self._scan_module(mod)
77
77
  except ImportError as e:
78
78
  print(f"Warning: Could not import module {module_name}: {e}")
79
-
80
- def _load_glob_modules(self, pattern: str):
81
- import glob
82
- import os
83
-
84
- # Pattern is likely a file path glob, e.g. "jobs/*.py"
85
- # We need to convert file paths to module paths
86
- files = glob.glob(pattern, recursive=True)
87
- for file_path in files:
88
- if not file_path.endswith(".py"):
89
- continue
90
-
91
- # Convert path to module
92
- # This is tricky without knowing the root.
93
- # Assumption: running from root, and file path is relative to root.
94
- # e.g. "myproject/tasks/foo.py" -> "myproject.tasks.foo"
95
-
96
- rel_path = os.path.relpath(file_path)
97
- if rel_path.startswith(".."):
98
- # Out of tree, skip or warn
99
- continue
100
-
101
- module_name = rel_path.replace(os.sep, ".")[:-3] # strip .py
102
- self._load_module(module_name)
103
79
 
104
80
  def _load_entry_point_plugins(self):
105
81
  entries = iter_entry_points()
@@ -109,6 +85,7 @@ class TaskLoader:
109
85
  "value": ep.value,
110
86
  "module": getattr(ep, "module", ""),
111
87
  "tasks": [],
88
+ "warnings": [],
112
89
  }
113
90
  registry = PluginRegistry(self, ep.name)
114
91
  try:
@@ -116,7 +93,11 @@ class TaskLoader:
116
93
  if not callable(hook):
117
94
  raise TypeError("Entry point must be callable")
118
95
  hook(registry)
119
- info["tasks"] = list(registry.registered_names)
96
+ info["tasks"] = list(registry.records)
97
+ info["task_infos"] = list(registry.task_infos.values())
98
+ info["warnings"] = list(registry.warnings)
99
+ if not registry.records:
100
+ info["warnings"].append("no tasks registered")
120
101
  except Exception as exc:
121
102
  info["error"] = str(exc)
122
103
  if self.strict:
@@ -130,13 +111,17 @@ class TaskLoader:
130
111
  self._register_task(name, obj.task)
131
112
  elif isinstance(obj, Task):
132
113
  self._register_task(name, obj)
133
- elif callable(obj) and getattr(obj, '__pyoco_task__', False):
134
- # Convert to Task if not already
135
- pass
136
114
 
137
- def _load_explicit_task(self, name: str, conf: Any):
115
+ def _conf_get(self, conf: Any, key: str):
116
+ if hasattr(conf, key):
117
+ return getattr(conf, key)
118
+ if isinstance(conf, dict):
119
+ return conf.get(key)
120
+ return None
121
+
122
+ def _load_explicit_task(self, name: str, conf: Any, callable_path: str):
138
123
  # Load callable
139
- module_path, func_name = conf.callable.split(':')
124
+ module_path, func_name = callable_path.split(':')
140
125
  try:
141
126
  mod = importlib.import_module(module_path)
142
127
  obj = getattr(mod, func_name)
@@ -150,8 +135,8 @@ class TaskLoader:
150
135
 
151
136
  # Create a Task wrapper
152
137
  t = Task(func=real_func, name=name)
153
- t.inputs = conf.inputs
154
- t.outputs = conf.outputs
138
+ t.inputs = self._conf_get(conf, "inputs") or {}
139
+ t.outputs = self._conf_get(conf, "outputs") or []
155
140
  self.tasks[name] = t
156
141
  except (ImportError, AttributeError) as e:
157
142
  print(f"Error loading task {name}: {e}")
@@ -1,12 +1,20 @@
1
1
  from __future__ import annotations
2
2
 
3
3
  from importlib import metadata as importlib_metadata
4
- from typing import Any, Callable, Dict, List, Optional
4
+ from typing import Any, Callable, Dict, List, Optional, Type
5
5
 
6
- from ..core.models import Task
6
+ from ..core.models import Task, TaskInfo, TaskIO
7
+ from ..core.exceptions import MissingTaskMetadataError
7
8
  from ..dsl.syntax import TaskWrapper
8
9
 
9
10
 
11
+ class CallablePluginTask(Task):
12
+ """Lightweight subclass so callable registrations still appear as Task-derived."""
13
+
14
+ def __init__(self, func: Callable, name: str):
15
+ super().__init__(func=func, name=name)
16
+
17
+
10
18
  def iter_entry_points(group: str = "pyoco.tasks"):
11
19
  eps = importlib_metadata.entry_points()
12
20
  if hasattr(eps, "select"):
@@ -32,6 +40,47 @@ class PluginRegistry:
32
40
  self.loader = loader
33
41
  self.provider_name = provider_name
34
42
  self.registered_names: List[str] = []
43
+ self.records: List[Dict[str, Any]] = []
44
+ self.warnings: List[str] = []
45
+ self.task_infos: Dict[str, TaskInfo] = {}
46
+
47
+ def task_info(
48
+ self,
49
+ *,
50
+ name: Optional[str] = None,
51
+ summary: Optional[str] = None,
52
+ inputs: Optional[List[Any]] = None,
53
+ outputs: Optional[List[Any]] = None,
54
+ tags: Optional[List[str]] = None,
55
+ origin: Optional[str] = None,
56
+ ) -> None:
57
+ missing = []
58
+ if not name:
59
+ missing.append("name")
60
+ if not summary:
61
+ missing.append("summary")
62
+ if inputs is None:
63
+ missing.append("inputs")
64
+ if outputs is None:
65
+ missing.append("outputs")
66
+ if missing:
67
+ raise MissingTaskMetadataError(name or "<unknown>", missing)
68
+
69
+ if not isinstance(inputs, list) or not isinstance(outputs, list):
70
+ raise MissingTaskMetadataError(name, ["inputs", "outputs"])
71
+ task_inputs = self._normalize_taskio_list(inputs, "inputs", name)
72
+ task_outputs = self._normalize_taskio_list(outputs, "outputs", name)
73
+ info = TaskInfo(
74
+ name=name,
75
+ summary=summary,
76
+ inputs=task_inputs,
77
+ outputs=task_outputs,
78
+ origin=origin or self.provider_name,
79
+ tags=tags or [],
80
+ )
81
+ self.task_infos[name] = info
82
+ if hasattr(self.loader, "_register_task_info"):
83
+ self.loader._register_task_info(info)
35
84
 
36
85
  def task(
37
86
  self,
@@ -70,23 +119,92 @@ class PluginRegistry:
70
119
  outputs: Optional[List[str]] = None,
71
120
  ) -> Task:
72
121
  task_name = name or getattr(func, "__name__", f"{self.provider_name}_task")
73
- task = Task(func=func, name=task_name)
122
+ task = CallablePluginTask(func=func, name=task_name)
74
123
  if inputs:
75
124
  task.inputs.update(inputs)
76
125
  if outputs:
77
126
  task.outputs.extend(outputs)
78
- self.loader._register_task(task_name, task)
79
- self.registered_names.append(task_name)
127
+ self._finalize_task(task, origin="callable")
128
+ return task
129
+
130
+ def task_class(
131
+ self,
132
+ task_cls: Type[Task],
133
+ *args: Any,
134
+ name: Optional[str] = None,
135
+ **kwargs: Any,
136
+ ) -> Task:
137
+ if not issubclass(task_cls, Task):
138
+ raise TypeError(f"{task_cls} is not a Task subclass")
139
+ task = task_cls(*args, **kwargs)
140
+ if name:
141
+ task.name = name
142
+ self._finalize_task(task, origin="task_class")
80
143
  return task
81
144
 
82
145
  def add(self, obj: Any, *, name: Optional[str] = None) -> None:
83
146
  if isinstance(obj, TaskWrapper):
84
- self.loader._register_task(name or obj.task.name, obj.task)
85
- self.registered_names.append(name or obj.task.name)
147
+ task = obj.task
148
+ if name:
149
+ task.name = name
150
+ self._finalize_task(task, origin="wrapper")
86
151
  elif isinstance(obj, Task):
87
- self.loader._register_task(name or obj.name, obj)
88
- self.registered_names.append(name or obj.name)
152
+ if name:
153
+ obj.name = name
154
+ origin = "task_class" if obj.__class__ is not Task else "task"
155
+ self._finalize_task(obj, origin=origin)
89
156
  elif callable(obj):
90
157
  self.register_callable(obj, name=name)
91
158
  else:
92
159
  raise TypeError(f"Unsupported task object: {obj!r}")
160
+
161
+ def _finalize_task(self, task: Task, origin: str) -> None:
162
+ warnings = self._validate_task(task, origin)
163
+ self.loader._register_task(task.name, task)
164
+ self.registered_names.append(task.name)
165
+ self.records.append(
166
+ {
167
+ "name": task.name,
168
+ "origin": origin,
169
+ "class": task.__class__.__name__,
170
+ "warnings": warnings,
171
+ }
172
+ )
173
+ for msg in warnings:
174
+ self.warnings.append(f"{task.name}: {msg}")
175
+
176
+ def _validate_task(self, task: Task, origin: str) -> List[str]:
177
+ warnings: List[str] = []
178
+ if not getattr(task, "name", None):
179
+ generated = f"{self.provider_name}_{len(self.registered_names) + 1}"
180
+ task.name = generated
181
+ warnings.append(f"name missing; auto-assigned '{generated}'")
182
+ if not callable(getattr(task, "func", None)):
183
+ warnings.append("task.func is not callable")
184
+ if origin == "callable":
185
+ warnings.append("registered via callable; prefer Task subclass for extensibility")
186
+ if task.__class__ is Task and origin not in ("callable", "wrapper"):
187
+ warnings.append("plain Task instance detected; subclass Task for metadata support")
188
+ return warnings
189
+
190
+ def _normalize_taskio_list(self, items: List[Any], label: str, task_name: str) -> List[TaskIO]:
191
+ normalized: List[TaskIO] = []
192
+ for idx, item in enumerate(items):
193
+ if isinstance(item, TaskIO):
194
+ normalized.append(item)
195
+ continue
196
+ if isinstance(item, dict):
197
+ taskio = TaskIO.from_dict(item)
198
+ missing = []
199
+ if not taskio.name:
200
+ missing.append(f"{label}[{idx}].name")
201
+ if not taskio.type:
202
+ missing.append(f"{label}[{idx}].type")
203
+ if taskio.required is None:
204
+ missing.append(f"{label}[{idx}].required")
205
+ if missing:
206
+ raise MissingTaskMetadataError(task_name, missing)
207
+ normalized.append(taskio)
208
+ continue
209
+ raise MissingTaskMetadataError(task_name, [f"{label}[{idx}]"])
210
+ return normalized
pyoco/schemas/config.py CHANGED
@@ -13,12 +13,6 @@ class FlowConfig:
13
13
  graph: str
14
14
  defaults: Dict[str, Any] = field(default_factory=dict)
15
15
 
16
- @dataclass
17
- class DiscoveryConfig:
18
- entry_points: List[str] = field(default_factory=list)
19
- packages: List[str] = field(default_factory=list)
20
- glob_modules: List[str] = field(default_factory=list)
21
-
22
16
  @dataclass
23
17
  class RuntimeConfig:
24
18
  expose_env: List[str] = field(default_factory=list)
@@ -28,23 +22,27 @@ class PyocoConfig:
28
22
  version: int
29
23
  flows: Dict[str, FlowConfig]
30
24
  tasks: Dict[str, TaskConfig]
31
- discovery: DiscoveryConfig = field(default_factory=DiscoveryConfig)
32
25
  runtime: RuntimeConfig = field(default_factory=RuntimeConfig)
33
26
 
34
27
  @classmethod
35
28
  def from_yaml(cls, path: str) -> 'PyocoConfig':
36
29
  with open(path, 'r') as f:
37
- data = yaml.safe_load(f)
30
+ data = yaml.safe_load(f) or {}
38
31
 
39
32
  # Simple manual parsing/validation for MVP
40
33
  # In a real app, use pydantic or similar
41
34
 
42
35
  flows = {k: FlowConfig(**v) for k, v in data.get('flows', {}).items()}
43
36
  tasks = {k: TaskConfig(**v) for k, v in data.get('tasks', {}).items()}
44
-
45
- disc_data = data.get('discovery', {})
46
- discovery = DiscoveryConfig(**disc_data)
47
-
37
+
38
+ if "discovery" in data:
39
+ raise ValueError(
40
+ "Unsupported config key 'discovery'.\n"
41
+ "For safety, discovery scope is not configurable in flow.yaml.\n"
42
+ "Remove 'discovery' and use PYOCO_DISCOVERY_MODULES to import extra modules, "
43
+ "or define tasks explicitly via tasks.<name>.callable."
44
+ )
45
+
48
46
  run_data = data.get('runtime', {})
49
47
  runtime = RuntimeConfig(**run_data)
50
48
 
@@ -52,6 +50,5 @@ class PyocoConfig:
52
50
  version=data.get('version', 1),
53
51
  flows=flows,
54
52
  tasks=tasks,
55
- discovery=discovery,
56
53
  runtime=runtime
57
54
  )
@@ -0,0 +1,21 @@
1
+ from .service import SupportInfoService
2
+
3
+
4
+ def build(
5
+ kind: str,
6
+ config_path: str,
7
+ format: str = "prompt",
8
+ filters=None,
9
+ output_path: str | None = None,
10
+ ):
11
+ service = SupportInfoService()
12
+ return service.build(
13
+ kind=kind,
14
+ config_path=config_path,
15
+ format=format,
16
+ filters=filters,
17
+ output_path=output_path,
18
+ )
19
+
20
+
21
+ __all__ = ["SupportInfoService", "build"]
@@ -0,0 +1,56 @@
1
+ from typing import List
2
+
3
+ from ..core.exceptions import MissingTaskMetadataError, TaskNotFoundError
4
+ from ..core.models import SupportFilters, TaskInfo
5
+ from ..discovery.loader import TaskLoader
6
+ from ..schemas.config import PyocoConfig
7
+ from .filters import filters_label, normalize_filters, validate_filters
8
+
9
+
10
+ class TaskInfoCollector:
11
+ def collect(self, config_path: str, filters: SupportFilters | None = None) -> List[TaskInfo]:
12
+ normalized = normalize_filters(filters)
13
+ validate_filters(normalized)
14
+
15
+ config = PyocoConfig.from_yaml(config_path)
16
+ loader = TaskLoader(config)
17
+ loader.load()
18
+
19
+ task_names = sorted(loader.tasks.keys())
20
+ if normalized.name:
21
+ task_names = [name for name in task_names if name in normalized.name]
22
+ if not task_names:
23
+ raise TaskNotFoundError(filters_label(normalized))
24
+
25
+ infos: List[TaskInfo] = []
26
+ missing: List[tuple[str, List[str]]] = []
27
+ for name in task_names:
28
+ info = loader.task_infos.get(name)
29
+ if not info:
30
+ missing.append((name, ["summary", "inputs", "outputs"]))
31
+ continue
32
+ missing_fields: List[str] = []
33
+ if not info.summary:
34
+ missing_fields.append("summary")
35
+ if info.inputs is None:
36
+ missing_fields.append("inputs")
37
+ if info.outputs is None:
38
+ missing_fields.append("outputs")
39
+ if missing_fields:
40
+ missing.append((name, missing_fields))
41
+ continue
42
+ infos.append(info)
43
+
44
+ if missing:
45
+ name, fields = missing[0]
46
+ raise MissingTaskMetadataError(name, fields)
47
+
48
+ if normalized.origin:
49
+ infos = [info for info in infos if info.origin in normalized.origin]
50
+ if normalized.tag:
51
+ infos = [info for info in infos if info.tags and any(tag in info.tags for tag in normalized.tag)]
52
+
53
+ if not infos:
54
+ raise TaskNotFoundError(filters_label(normalized))
55
+
56
+ return infos
@@ -0,0 +1,56 @@
1
+ from typing import Any, Iterable, List
2
+
3
+ from ..core.exceptions import InvalidFilterError
4
+ from ..core.models import SupportFilters
5
+
6
+
7
+ def normalize_filters(filters: Any) -> SupportFilters:
8
+ if filters is None:
9
+ return SupportFilters()
10
+ if isinstance(filters, SupportFilters):
11
+ return filters
12
+ if isinstance(filters, dict):
13
+ allowed = {"name", "origin", "tag"}
14
+ extra = set(filters.keys()) - allowed
15
+ if extra:
16
+ raise InvalidFilterError(",".join(sorted(extra)))
17
+ return SupportFilters(
18
+ name=_normalize_list(filters.get("name")),
19
+ origin=_normalize_list(filters.get("origin")),
20
+ tag=_normalize_list(filters.get("tag")),
21
+ )
22
+ raise InvalidFilterError(str(filters))
23
+
24
+
25
+ def validate_filters(filters: SupportFilters) -> None:
26
+ for label, value in ("name", filters.name), ("origin", filters.origin), ("tag", filters.tag):
27
+ if value is None:
28
+ continue
29
+ if not isinstance(value, list) or not value:
30
+ raise InvalidFilterError(label)
31
+ for item in value:
32
+ if not isinstance(item, str) or not item.strip():
33
+ raise InvalidFilterError(label)
34
+
35
+
36
+ def filters_label(filters: SupportFilters) -> str:
37
+ parts: List[str] = []
38
+ if filters.name:
39
+ parts.append(f"name={','.join(filters.name)}")
40
+ if filters.origin:
41
+ parts.append(f"origin={','.join(filters.origin)}")
42
+ if filters.tag:
43
+ parts.append(f"tag={','.join(filters.tag)}")
44
+ if not parts:
45
+ return "*"
46
+ return " ".join(parts)
47
+
48
+
49
+ def _normalize_list(value: Any) -> List[str] | None:
50
+ if value is None:
51
+ return None
52
+ if isinstance(value, str):
53
+ return [value]
54
+ if isinstance(value, Iterable):
55
+ return list(value)
56
+ return None
@@ -0,0 +1,188 @@
1
+ import json
2
+ from typing import Dict, List, Tuple
3
+
4
+ from ..core.exceptions import InvalidFormatError
5
+ from ..core.models import TaskInfo
6
+
7
+
8
+ class SupportInfoRenderer:
9
+ def render(self, kind: str, tasks: List[TaskInfo] | None, format: str) -> str:
10
+ if format not in ("prompt", "json", "md"):
11
+ raise InvalidFormatError(format)
12
+ if kind == "guide":
13
+ return self._render_guide(format)
14
+ if kind not in ("tasks", "task"):
15
+ raise ValueError(f"Unknown support kind: {kind}")
16
+ return self._render_tasks(kind, tasks or [], format)
17
+
18
+ def _render_tasks(self, kind: str, tasks: List[TaskInfo], format: str) -> str:
19
+ groups = self._group_tasks(tasks)
20
+ if format == "json":
21
+ payload = {
22
+ "kind": kind,
23
+ "groups": [
24
+ {
25
+ "origin": origin,
26
+ "tasks": [self._task_to_dict(t) for t in items],
27
+ }
28
+ for origin, items in groups
29
+ ],
30
+ }
31
+ return json.dumps(payload, indent=2)
32
+ if format == "md":
33
+ return self._render_tasks_md(kind, groups)
34
+ return self._render_tasks_prompt(kind, groups)
35
+
36
+ def _render_tasks_prompt(self, kind: str, groups: List[Tuple[str, List[TaskInfo]]]) -> str:
37
+ lines = [f"Pyoco support ({kind})", ""]
38
+ for origin, items in groups:
39
+ lines.append(f"Origin: {origin}")
40
+ for task in items:
41
+ lines.append(f"- name: {task.name}")
42
+ lines.append(f" summary: {task.summary}")
43
+ lines.append(" inputs:")
44
+ for io in task.inputs:
45
+ lines.append(
46
+ f" - {io.name} ({io.type}, required={io.required})"
47
+ )
48
+ lines.append(" outputs:")
49
+ for io in task.outputs:
50
+ lines.append(
51
+ f" - {io.name} ({io.type}, required={io.required})"
52
+ )
53
+ if task.tags:
54
+ lines.append(f" tags: {', '.join(task.tags)}")
55
+ lines.append("")
56
+ return "\n".join(lines).rstrip()
57
+
58
+ def _render_tasks_md(self, kind: str, groups: List[Tuple[str, List[TaskInfo]]]) -> str:
59
+ lines = [f"# Pyoco support ({kind})", ""]
60
+ for origin, items in groups:
61
+ lines.append(f"## Origin: {origin}")
62
+ for task in items:
63
+ lines.append(f"### {task.name}")
64
+ lines.append(task.summary)
65
+ lines.append("")
66
+ lines.append("**Inputs**")
67
+ lines.append("")
68
+ if task.inputs:
69
+ lines.append("| name | type | required | constraints |")
70
+ lines.append("|---|---|---|---|")
71
+ for io in task.inputs:
72
+ constraints = ", ".join(io.constraints) if io.constraints else ""
73
+ lines.append(
74
+ f"| {io.name} | {io.type} | {io.required} | {constraints} |"
75
+ )
76
+ else:
77
+ lines.append("none")
78
+ lines.append("")
79
+ lines.append("**Outputs**")
80
+ lines.append("")
81
+ if task.outputs:
82
+ lines.append("| name | type | required | constraints |")
83
+ lines.append("|---|---|---|---|")
84
+ for io in task.outputs:
85
+ constraints = ", ".join(io.constraints) if io.constraints else ""
86
+ lines.append(
87
+ f"| {io.name} | {io.type} | {io.required} | {constraints} |"
88
+ )
89
+ else:
90
+ lines.append("none")
91
+ if task.tags:
92
+ lines.append("")
93
+ lines.append(f"**Tags**: {', '.join(task.tags)}")
94
+ lines.append("")
95
+ return "\n".join(lines).rstrip()
96
+
97
+ def _task_to_dict(self, task: TaskInfo) -> Dict[str, object]:
98
+ return {
99
+ "name": task.name,
100
+ "summary": task.summary,
101
+ "inputs": [self._taskio_to_dict(io) for io in task.inputs],
102
+ "outputs": [self._taskio_to_dict(io) for io in task.outputs],
103
+ "origin": task.origin,
104
+ "tags": task.tags or [],
105
+ }
106
+
107
+ def _taskio_to_dict(self, io) -> Dict[str, object]:
108
+ return {
109
+ "name": io.name,
110
+ "type": io.type,
111
+ "required": io.required,
112
+ "constraints": io.constraints or [],
113
+ }
114
+
115
+ def _render_guide(self, format: str) -> str:
116
+ guide = self._guide_payload()
117
+ if format == "json":
118
+ return json.dumps({"kind": "guide", **guide}, indent=2)
119
+ if format == "md":
120
+ return self._render_guide_md(guide)
121
+ return self._render_guide_prompt(guide)
122
+
123
+ def _guide_payload(self) -> Dict[str, str]:
124
+ template = (
125
+ "version: 1\n"
126
+ "flows:\n"
127
+ " main:\n"
128
+ " graph: \"task_a >> task_b\"\n"
129
+ " defaults:\n"
130
+ " seed: \"bar\"\n"
131
+ "tasks:\n"
132
+ " task_a:\n"
133
+ " callable: \"pkg.module:task_a\"\n"
134
+ " inputs:\n"
135
+ " x: \"$ctx.params.seed\"\n"
136
+ " outputs:\n"
137
+ " - \"params.shared\"\n"
138
+ " task_b:\n"
139
+ " callable: \"pkg.module:task_b\"\n"
140
+ " inputs:\n"
141
+ " input_a: \"$ctx.params.shared\"\n"
142
+ )
143
+ graph = (
144
+ "- Use >> to define dependencies: A >> B means B depends on A.\n"
145
+ "- Use | to define OR branches: (A | B) >> C means C waits for any.\n"
146
+ "- Wrap with flow variable in graph string (exec/eval).\n"
147
+ )
148
+ inputs = (
149
+ "- Prefer $ctx.params.<key> to connect tasks via shared params.\n"
150
+ "- If values would be overwritten or you need an explicit upstream output, use $node.<task_name>.output.\n"
151
+ "- Use $env.<KEY> to reference allowed environment variables.\n"
152
+ "- Task discovery is not configured in flow.yaml. Use explicit tasks.callable, entry-point plugins (group 'pyoco.tasks'),\n"
153
+ " or set PYOCO_DISCOVERY_MODULES to import extra modules. The 'discovery' config key is not supported.\n"
154
+ )
155
+ return {"template": template, "graph_syntax": graph, "input_refs": inputs}
156
+
157
+ def _render_guide_prompt(self, guide: Dict[str, str]) -> str:
158
+ return (
159
+ "Pyoco flow.yaml guide\n\n"
160
+ "Template:\n"
161
+ f"{guide['template']}\n"
162
+ "Graph syntax:\n"
163
+ f"{guide['graph_syntax']}\n"
164
+ "Input references:\n"
165
+ f"{guide['input_refs']}"
166
+ ).rstrip()
167
+
168
+ def _render_guide_md(self, guide: Dict[str, str]) -> str:
169
+ return (
170
+ "# Pyoco flow.yaml guide\n\n"
171
+ "## Template\n\n"
172
+ "```yaml\n"
173
+ f"{guide['template']}"
174
+ "```\n\n"
175
+ "## Graph syntax\n\n"
176
+ f"{guide['graph_syntax']}\n"
177
+ "## Input references\n\n"
178
+ f"{guide['input_refs']}"
179
+ ).rstrip()
180
+
181
+ def _group_tasks(self, tasks: List[TaskInfo]) -> List[Tuple[str, List[TaskInfo]]]:
182
+ groups: Dict[str, List[TaskInfo]] = {}
183
+ for task in tasks:
184
+ origin = task.origin or "unknown"
185
+ groups.setdefault(origin, []).append(task)
186
+ for items in groups.values():
187
+ items.sort(key=lambda t: t.name)
188
+ return [(origin, groups[origin]) for origin in sorted(groups.keys())]
@@ -0,0 +1,42 @@
1
+ from ..core.exceptions import InvalidFilterError
2
+ from ..core.models import SupportFilters
3
+ from .collector import TaskInfoCollector
4
+ from .filters import normalize_filters, validate_filters
5
+ from .renderer import SupportInfoRenderer
6
+ from .writer import SupportInfoWriter
7
+
8
+
9
+ class SupportInfoService:
10
+ def __init__(
11
+ self,
12
+ collector: TaskInfoCollector | None = None,
13
+ renderer: SupportInfoRenderer | None = None,
14
+ writer: SupportInfoWriter | None = None,
15
+ ) -> None:
16
+ self.collector = collector or TaskInfoCollector()
17
+ self.renderer = renderer or SupportInfoRenderer()
18
+ self.writer = writer or SupportInfoWriter()
19
+
20
+ def build(
21
+ self,
22
+ *,
23
+ kind: str,
24
+ config_path: str,
25
+ format: str = "prompt",
26
+ filters: SupportFilters | None = None,
27
+ output_path: str | None = None,
28
+ ) -> str:
29
+ normalized = normalize_filters(filters)
30
+ validate_filters(normalized)
31
+
32
+ if kind == "task" and not normalized.name:
33
+ raise InvalidFilterError("name")
34
+
35
+ tasks = []
36
+ if kind in ("tasks", "task"):
37
+ tasks = self.collector.collect(config_path, normalized)
38
+
39
+ content = self.renderer.render(kind, tasks, format)
40
+ if output_path:
41
+ self.writer.write(content, output_path)
42
+ return content
@@ -0,0 +1,15 @@
1
+ import os
2
+
3
+ from ..core.exceptions import OutputWriteError
4
+
5
+
6
+ class SupportInfoWriter:
7
+ def write(self, content: str, output_path: str) -> None:
8
+ try:
9
+ parent = os.path.dirname(output_path)
10
+ if parent:
11
+ os.makedirs(parent, exist_ok=True)
12
+ with open(output_path, "w", encoding="utf-8") as f:
13
+ f.write(content)
14
+ except Exception as exc:
15
+ raise OutputWriteError(output_path) from exc
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: pyoco
3
- Version: 0.5.0
3
+ Version: 0.6.0
4
4
  Summary: A workflow engine with sugar syntax
5
5
  Requires-Python: >=3.10
6
6
  Description-Content-Type: text/markdown
@@ -133,22 +133,29 @@ Or via CLI flag:
133
133
  pyoco run --non-cute ...
134
134
  ```
135
135
 
136
- ## 🔭 Observability Bridge (v0.5)
136
+ ## 🔭 Observability / Server (Archived)
137
137
 
138
- - `/metrics` exposes Prometheus counters (`pyoco_runs_total`, `pyoco_runs_in_progress`) and histograms (`pyoco_task_duration_seconds`, `pyoco_run_duration_seconds`). Point Grafana/Prometheus at it to watch pipelines without opening sockets.
139
- - `/runs` now accepts `status`, `flow`, `limit` query params; `/runs/{id}/logs?tail=100` fetches only the latest snippets for dashboards.
140
- - Webhook notifications fire when runs COMPLETE/FAIL—configure via `PYOCO_WEBHOOK_*` env vars and forward to Slack or your alerting stack.
141
- - Import `docs/grafana_pyoco_cute.json` for a lavender/orange starter dashboard (3 panels: in-progress count, completion trend, per-flow latency).
142
- - 詳細な手順は [docs/observability.md](docs/observability.md) を参照してください。
138
+ Observability and server-related docs are archived and out of scope for the current requirements.
139
+ See `docs/archive/observability.md` and `docs/archive/roadmap.md`.
143
140
 
144
141
  ## 🧩 Plug-ins
145
142
 
146
- Need to share domain-specific tasks? Publish an entry point under `pyoco.tasks` and pyoco will auto-load it. See [docs/plugins.md](docs/plugins.md) for the `PluginRegistry` decorator, example `pyproject.toml`, and `pyoco plugins list` CLI helper.
143
+ Need to share domain-specific tasks? Publish an entry point under `pyoco.tasks` and pyoco will auto-load it. We recommend **Task subclasses first** (callables still work with warnings). See [docs/plugins.md](docs/plugins.md) for examples, quickstart, and `pyoco plugins list` / `pyoco plugins lint`.
144
+
145
+ **Big data note:** pass handles, not copies. For large tensors/images, stash paths or handles in `ctx.artifacts`/`ctx.scratch` and let downstream tasks materialize only when needed. For lazy pipelines (e.g., DataPipe), log the pipeline when you actually iterate (typically the training task) instead of materializing upstream.
146
+
147
+ ## 🧭 Task Discovery (Security)
148
+
149
+ Pyoco does not allow configuring discovery scope in `flow.yaml` (the `discovery:` key is rejected) to reduce the risk of importing unexpected code.
150
+
151
+ - **Entry point plug-ins**: auto-loaded from `importlib.metadata.entry_points(group="pyoco.tasks")`
152
+ - **Extra imports (ops-controlled)**: set `PYOCO_DISCOVERY_MODULES` (comma/space-separated module names), e.g. `PYOCO_DISCOVERY_MODULES=tasks,myapp.extra_tasks`
153
+ - **Explicit tasks**: prefer `tasks.<name>.callable` in `flow.yaml` (see tutorials)
147
154
 
148
155
  ## 📚 Documentation
149
156
 
150
157
  - [Tutorials](docs/tutorial/index.md)
151
- - [Roadmap](docs/roadmap.md)
158
+ - [Roadmap (Archived)](docs/archive/roadmap.md)
152
159
 
153
160
  ## 💖 Contributing
154
161
 
@@ -1,33 +1,39 @@
1
- pyoco/__init__.py,sha256=E2pgDGvGRSVon7dSqIM4UD55LgVpf4jiZZA-70kOcuw,409
1
+ pyoco/__init__.py,sha256=PJIk0A3NCknNtp7hiC-Q4xpTmpVQ_AD2KbrQLcOT_1s,442
2
2
  pyoco/client.py,sha256=Y95NmMsOKTJ9AZJEg_OzHamC_w32YWmSVS653mpqHVQ,3141
3
3
  pyoco/socketless_reset.py,sha256=KsAF4I23_Kbhy9fIWFARzV5QaIOQqbl0U0yPb8a34sM,129
4
4
  pyoco/cli/entry.py,sha256=zPIG0Gx-cFO8Cf1Z3wD3Ifz_2sHaryHZ6mCRri2WEqE,93
5
- pyoco/cli/main.py,sha256=W3U-T4SliJHy4wZ70QoN2c9Mep--XxlEa8YkHe9DLuU,16515
5
+ pyoco/cli/main.py,sha256=-sVxldl1fOVedf2KUs2aCbzIQNPgXuRTrr_t5Pbavyo,22126
6
6
  pyoco/core/base_task.py,sha256=z7hOFntAPv4yCADapS-fhtLe5eWqaO8k3T1r05YEEUE,2106
7
7
  pyoco/core/context.py,sha256=TeCUriOmg7qZB3nMRu8HPdPshMW6pMVx48xZLY6a-A4,6524
8
8
  pyoco/core/engine.py,sha256=iX2Id8ryFt-xeZgraqnF3uqkI6ubiZt5NBNYWX6Qv1s,24166
9
- pyoco/core/exceptions.py,sha256=G82KY8PCnAhp3IDDIG8--Uh3EfVa192zei3l6ihfShI,565
10
- pyoco/core/models.py,sha256=8faYURF43-7IebqzTIorHxpCeC4TZfoXWjGyPNaWhyI,10501
11
- pyoco/discovery/loader.py,sha256=vC729i1bR358u6YwiVX2uonZ80WxjFGFqJRlhX89Sf0,5942
12
- pyoco/discovery/plugins.py,sha256=pNMWxS03jWPuUV2tApGch2VL40EyKLOOeYT-OPBBBRQ,2806
9
+ pyoco/core/exceptions.py,sha256=gQbaGMJIlyidqQYj_NEFj-lxQmDtlfj659d_uLEBVY8,1606
10
+ pyoco/core/models.py,sha256=Abuu2UX5SsXXjJKopxDsbQCIAdW82GbdnptsK1N4l3A,11280
11
+ pyoco/discovery/loader.py,sha256=SjFZ0joo4qLQf8pHPrSfn4kI0Ip30G_N9ia8BJzuDUM,5470
12
+ pyoco/discovery/plugins.py,sha256=st95xmOiDNXtpq7fIl5_wqvvDfXIJU5fNtUeTi7vRJM,7302
13
13
  pyoco/dsl/__init__.py,sha256=xWdb60pSRL8lNFk4GHF3EJ4hon0uiWqpv264g6-4gdg,45
14
14
  pyoco/dsl/expressions.py,sha256=BtEIxPSf3BU-wPNEicIqX_TVZ4fAnlWGrzrrfc6pU1g,4875
15
15
  pyoco/dsl/nodes.py,sha256=qDiIEsAJHnD8dpuOd-Rpy6OORCW6KDW_BdYiA2BKu18,1041
16
16
  pyoco/dsl/syntax.py,sha256=kYP5uGbwxmkSd_zeSksax8iWm_7UlRW5JxE9_DoSqbk,8638
17
17
  pyoco/dsl/validator.py,sha256=HXjcc-GzjH72YByaNxAg_7YOZsVsFDFnUaenVwd5PbY,3576
18
- pyoco/schemas/config.py,sha256=KkGZK3GxTHoIHEGb4f4k8GE2W-aBN4iPzmc_HrwuROU,1735
18
+ pyoco/schemas/config.py,sha256=LBCGRSPtacd4a10EXXkk7wAL4k-d-zKPam-pIaYbmJE,1701
19
19
  pyoco/server/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
20
20
  pyoco/server/api.py,sha256=vu2ieDZgHbi8cysO2rS-lcxqWiSQprIcqRn6GkwTtKo,3890
21
21
  pyoco/server/metrics.py,sha256=92sHZKka_yBNBGlHZgRIteywx97aoTa-MnXh3UJ0HJY,2952
22
22
  pyoco/server/models.py,sha256=ir5AuvyXQigmaynA7bS_0RNJcJo2VtpJl0GjRZrj2rU,786
23
23
  pyoco/server/store.py,sha256=ITYAV1QlPWDnceywqjjJZW9E0CyocFlPmqqfjcoM-wA,9133
24
24
  pyoco/server/webhook.py,sha256=fBSLWTDN7sIWSK0AUVuiCSdVVBFV_AyP-XEKOcdMXmQ,3643
25
+ pyoco/support/__init__.py,sha256=MofLlxywBorHrzOgAcs5cFTSAokNcPDVyQi2sr8WPdM,419
26
+ pyoco/support/collector.py,sha256=jeT7A9fMhxmx0D9aqAqWiWYdaSqxkrvUIX-KgVVLJto,2074
27
+ pyoco/support/filters.py,sha256=7TGkB8MNjfLm45kwXfD90jJ1N3GzTVt_jo6paeEEzBM,1805
28
+ pyoco/support/renderer.py,sha256=vFrOW10SSjE-WMqgGj7gci28jv-NXHjBocB1E_luQxA,7685
29
+ pyoco/support/service.py,sha256=qMi9zUrQ43dsuk98_Vu9_xu4E7ll8pTiSxNQp320eho,1352
30
+ pyoco/support/writer.py,sha256=FmRo2XWlwKzCcJ3StRiAnKC4OsOSSls-raCDwKYpXlo,470
25
31
  pyoco/trace/backend.py,sha256=a1css94_lhO4SGSPHZ1f59HJqFQtZ5Sjx09Kw7v5bsk,617
26
32
  pyoco/trace/console.py,sha256=I-BcF405OGLWoacJWeke8vTT9M5JxSBpJL-NazVyxb4,1742
27
33
  pyoco/worker/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
28
34
  pyoco/worker/client.py,sha256=862KccXRtfG7zd9ZSLqrpVSV6ev8zeuEHHdtAfLghiM,1557
29
35
  pyoco/worker/runner.py,sha256=hyKn5NbuIuF-109CnQbYc8laKbWmwe9ChaLrNUtsVIg,6367
30
- pyoco-0.5.0.dist-info/METADATA,sha256=SEow4x2y_O6SqeVuEU-_7dT12F-XA24sYVD_hMV6TQM,5251
31
- pyoco-0.5.0.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
32
- pyoco-0.5.0.dist-info/top_level.txt,sha256=2JRVocfaWRbX1VJ3zq1c5wQaOK6fMARS6ptVFWyvRF4,6
33
- pyoco-0.5.0.dist-info/RECORD,,
36
+ pyoco-0.6.0.dist-info/METADATA,sha256=qDs09R2M5X9kSVKokOQsjJ8TES_Cfzl3nGWCglgMN4o,5588
37
+ pyoco-0.6.0.dist-info/WHEEL,sha256=wUyA8OaulRlbfwMtmQsvNngGrxQHAvkKcvRmdizlJi0,92
38
+ pyoco-0.6.0.dist-info/top_level.txt,sha256=2JRVocfaWRbX1VJ3zq1c5wQaOK6fMARS6ptVFWyvRF4,6
39
+ pyoco-0.6.0.dist-info/RECORD,,
@@ -1,5 +1,5 @@
1
1
  Wheel-Version: 1.0
2
- Generator: setuptools (80.9.0)
2
+ Generator: setuptools (80.10.2)
3
3
  Root-Is-Purelib: true
4
4
  Tag: py3-none-any
5
5