pyoco 0.1.0__py3-none-any.whl → 0.3.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
pyoco/server/store.py ADDED
@@ -0,0 +1,82 @@
1
+ import uuid
2
+ import time
3
+ from typing import Dict, List, Optional
4
+ from ..core.models import RunContext, RunStatus
5
+
6
+ class StateStore:
7
+ def __init__(self):
8
+ self.runs: Dict[str, RunContext] = {}
9
+ self.queue: List[str] = []
10
+
11
+ def create_run(self, flow_name: str, params: Dict) -> RunContext:
12
+ run_id = str(uuid.uuid4())
13
+ run_ctx = RunContext(
14
+ run_id=run_id,
15
+ status=RunStatus.PENDING,
16
+ start_time=time.time()
17
+ )
18
+ # Store extra metadata if needed (flow_name, params)
19
+ # For now, RunContext doesn't have flow_name/params fields in core.models.
20
+ # We might need to extend RunContext or store them separately.
21
+ # Let's attach them dynamically for now or assume the worker knows.
22
+ # Actually, the worker needs flow_name and params to run.
23
+ # We should store them in the store alongside the context.
24
+ run_ctx.flow_name = flow_name
25
+ run_ctx.params = params
26
+
27
+ self.runs[run_id] = run_ctx
28
+ self.queue.append(run_id)
29
+ return run_ctx
30
+
31
+ def get_run(self, run_id: str) -> Optional[RunContext]:
32
+ return self.runs.get(run_id)
33
+
34
+ def list_runs(self) -> List[RunContext]:
35
+ return list(self.runs.values())
36
+
37
+ def dequeue(self, tags: List[str] = None) -> Optional[RunContext]:
38
+ # Simple FIFO queue for now. Tags ignored in v0.3.0 MVP.
39
+ if not self.queue:
40
+ return None
41
+
42
+ # Find first PENDING run
43
+ # Note: queue might contain cancelled runs?
44
+ # We should check status.
45
+
46
+ # Pop from front
47
+ # We iterate to find a valid candidate
48
+ for i, run_id in enumerate(self.queue):
49
+ run = self.runs.get(run_id)
50
+ if run and run.status == RunStatus.PENDING:
51
+ self.queue.pop(i)
52
+ return run
53
+
54
+ return None
55
+
56
+ def update_run(self, run_id: str, status: RunStatus = None, task_states: Dict = None):
57
+ run = self.runs.get(run_id)
58
+ if not run:
59
+ return
60
+
61
+ if status:
62
+ # State transition check
63
+ # If server has CANCELLING, ignore RUNNING from worker
64
+ if run.status == RunStatus.CANCELLING and status == RunStatus.RUNNING:
65
+ pass
66
+ else:
67
+ run.status = status
68
+
69
+ if status in [RunStatus.COMPLETED, RunStatus.FAILED, RunStatus.CANCELLED]:
70
+ if not run.end_time:
71
+ run.end_time = time.time()
72
+
73
+ if task_states:
74
+ run.tasks.update(task_states)
75
+
76
+ def cancel_run(self, run_id: str):
77
+ run = self.runs.get(run_id)
78
+ if not run:
79
+ return
80
+
81
+ if run.status in [RunStatus.PENDING, RunStatus.RUNNING]:
82
+ run.status = RunStatus.CANCELLING
pyoco/trace/backend.py CHANGED
@@ -3,7 +3,7 @@ from typing import Any, Optional
3
3
 
4
4
  class TraceBackend(ABC):
5
5
  @abstractmethod
6
- def on_flow_start(self, flow_name: str):
6
+ def on_flow_start(self, flow_name: str, run_id: str = None):
7
7
  pass
8
8
 
9
9
  @abstractmethod
pyoco/trace/console.py CHANGED
@@ -2,14 +2,22 @@ import time
2
2
  from .backend import TraceBackend
3
3
 
4
4
  class ConsoleTraceBackend(TraceBackend):
5
- def __init__(self, style: str = "cute"):
5
+ def __init__(self, style: str = None):
6
+ if style is None:
7
+ import os
8
+ env_cute = os.environ.get("PYOCO_CUTE", "true").lower()
9
+ if env_cute in ["0", "false", "no", "off"]:
10
+ style = "plain"
11
+ else:
12
+ style = "cute"
6
13
  self.style = style
7
14
 
8
- def on_flow_start(self, flow_name: str):
15
+ def on_flow_start(self, flow_name: str, run_id: str = None):
16
+ rid_str = f" run_id={run_id}" if run_id else ""
9
17
  if self.style == "cute":
10
- print(f"🐇 pyoco > start flow={flow_name}")
18
+ print(f"🐇 pyoco > start flow={flow_name}{rid_str}")
11
19
  else:
12
- print(f"INFO pyoco start flow={flow_name}")
20
+ print(f"INFO pyoco start flow={flow_name}{rid_str}")
13
21
 
14
22
  def on_flow_end(self, flow_name: str):
15
23
  if self.style == "cute":
File without changes
pyoco/worker/client.py ADDED
@@ -0,0 +1,43 @@
1
+ import httpx
2
+ from typing import Dict, List, Optional, Any
3
+ from ..core.models import RunStatus, TaskState
4
+
5
+ class WorkerClient:
6
+ def __init__(self, server_url: str, worker_id: str):
7
+ self.server_url = server_url.rstrip("/")
8
+ self.worker_id = worker_id
9
+ self.client = httpx.Client(base_url=self.server_url)
10
+
11
+ def poll(self, tags: List[str] = []) -> Optional[Dict[str, Any]]:
12
+ try:
13
+ resp = self.client.post("/workers/poll", json={
14
+ "worker_id": self.worker_id,
15
+ "tags": tags
16
+ })
17
+ resp.raise_for_status()
18
+ data = resp.json()
19
+ if data.get("run_id"):
20
+ return data
21
+ return None
22
+ except Exception as e:
23
+ print(f"Poll failed: {e}")
24
+ return None
25
+
26
+ def heartbeat(self, run_id: str, task_states: Dict[str, TaskState], run_status: RunStatus) -> bool:
27
+ """
28
+ Sends heartbeat. Returns True if cancellation is requested.
29
+ """
30
+ try:
31
+ # Convert Enums to values
32
+ states_json = {k: v.value for k, v in task_states.items()}
33
+ status_value = run_status.value
34
+
35
+ resp = self.client.post(f"/runs/{run_id}/heartbeat", json={
36
+ "task_states": states_json,
37
+ "run_status": status_value
38
+ })
39
+ resp.raise_for_status()
40
+ return resp.json().get("cancel_requested", False)
41
+ except Exception as e:
42
+ print(f"Heartbeat failed: {e}")
43
+ return False
pyoco/worker/runner.py ADDED
@@ -0,0 +1,171 @@
1
+ import time
2
+ import uuid
3
+ from typing import List, Optional
4
+ from ..core.engine import Engine
5
+ from ..core.models import RunContext, RunStatus, Flow
6
+ from ..trace.backend import TraceBackend
7
+ from ..discovery.loader import TaskLoader
8
+ from ..schemas.config import PyocoConfig
9
+ from ..client import Client
10
+
11
+ from ..trace.console import ConsoleTraceBackend
12
+
13
+ class RemoteTraceBackend(TraceBackend):
14
+ def __init__(self, client: Client, run_ctx: RunContext, cute: bool = True):
15
+ self.client = client
16
+ self.run_ctx = run_ctx
17
+ self.last_heartbeat = 0
18
+ self.heartbeat_interval = 1.0 # sec
19
+ self.console = ConsoleTraceBackend(style="cute" if cute else "plain")
20
+
21
+ def _send_heartbeat(self, force=False):
22
+ now = time.time()
23
+ if force or (now - self.last_heartbeat > self.heartbeat_interval):
24
+ cancel = self.client.heartbeat(
25
+ self.run_ctx.run_id,
26
+ self.run_ctx.tasks,
27
+ self.run_ctx.status
28
+ )
29
+ if cancel and self.run_ctx.status not in [RunStatus.CANCELLING, RunStatus.CANCELLED]:
30
+ print(f"🛑 Cancellation requested from server for run {self.run_ctx.run_id}")
31
+ self.run_ctx.status = RunStatus.CANCELLING
32
+ self.last_heartbeat = now
33
+
34
+ def on_flow_start(self, name: str, run_id: Optional[str] = None):
35
+ self.console.on_flow_start(name, run_id)
36
+ self._send_heartbeat(force=True)
37
+
38
+ def on_flow_end(self, name: str):
39
+ self.console.on_flow_end(name)
40
+ self._send_heartbeat(force=True)
41
+
42
+ def on_node_start(self, node_name: str):
43
+ self.console.on_node_start(node_name)
44
+ self._send_heartbeat()
45
+
46
+ def on_node_end(self, node_name: str, duration: float):
47
+ self.console.on_node_end(node_name, duration)
48
+ self._send_heartbeat(force=True)
49
+
50
+ def on_node_error(self, node_name: str, error: Exception):
51
+ self.console.on_node_error(node_name, error)
52
+ self._send_heartbeat(force=True)
53
+
54
+
55
+ class Worker:
56
+ def __init__(self, server_url: str, config: PyocoConfig, tags: List[str] = []):
57
+ self.server_url = server_url
58
+ self.config = config
59
+ self.tags = tags
60
+ self.worker_id = f"w-{uuid.uuid4().hex[:8]}"
61
+ self.client = Client(server_url, self.worker_id)
62
+ self.loader = TaskLoader(config)
63
+ self.loader.load() # Load tasks/flows once
64
+
65
+ def start(self):
66
+ print(f"🐜 Worker {self.worker_id} started. Connected to {self.server_url}")
67
+ try:
68
+ while True:
69
+ job = self.client.poll(self.tags)
70
+ if job:
71
+ self._execute_job(job)
72
+ else:
73
+ time.sleep(2.0)
74
+ except KeyboardInterrupt:
75
+ print("\n🛑 Worker stopping...")
76
+
77
+ def _execute_job(self, job):
78
+ run_id = job["run_id"]
79
+ flow_name = job["flow_name"]
80
+ params = job["params"] or {}
81
+
82
+ print(f"🚀 Received job: {run_id} (Flow: {flow_name})")
83
+
84
+ # Find flow
85
+ flow_def = self.config.flows.get(flow_name)
86
+ if not flow_def:
87
+ print(f"❌ Flow '{flow_name}' not found in local config.")
88
+ return
89
+
90
+ # Build Flow object using exec (same as main.py)
91
+ from ..core.models import Flow as FlowModel
92
+ from ..dsl.syntax import TaskWrapper
93
+
94
+ eval_context = {name: TaskWrapper(task) for name, task in self.loader.tasks.items()}
95
+
96
+ try:
97
+ flow = FlowModel(name=flow_name)
98
+ for t in self.loader.tasks.values():
99
+ flow.add_task(t)
100
+
101
+ # Evaluate graph
102
+ exec(flow_def.graph, {}, eval_context)
103
+
104
+ except Exception as e:
105
+ print(f"❌ Error building flow: {e}")
106
+ return
107
+
108
+ # Execute
109
+ engine = Engine()
110
+
111
+ # We need to inject run_id into Engine.
112
+ # Engine.run generates run_id if not provided.
113
+ # We need to pass it.
114
+ # Engine.run doesn't accept run_id argument currently.
115
+ # It creates RunContext inside.
116
+ # I need to modify Engine.run to accept optional run_id or RunContext.
117
+
118
+ # Wait, I modified Engine.run in v0.2.0.
119
+ # Let's check Engine.run signature.
120
+ pass
121
+
122
+ # I will modify Engine.run to accept run_id.
123
+ # For now, let's assume I will do that.
124
+
125
+ # Create RemoteTraceBackend
126
+ # But we need run_ctx to create backend.
127
+ # And Engine creates run_ctx.
128
+ # Chicken and egg.
129
+
130
+ # Solution: Engine.run should accept an existing RunContext or run_id.
131
+ # If I pass run_id, Engine creates RunContext with that ID.
132
+ # Then I can access it?
133
+ # Or I pass a callback to get it?
134
+
135
+ # Better: Pass run_id to Engine.run.
136
+ # Engine creates RunContext.
137
+ # Engine calls trace.on_flow_start(run_id=...).
138
+ # RemoteTraceBackend receives run_id.
139
+ # But RemoteTraceBackend needs to know which run_id to report to (it knows from constructor).
140
+ # Actually, RemoteTraceBackend needs access to the RunContext object that Engine creates.
141
+ # Because it reads `run_ctx.tasks` and `run_ctx.status`.
142
+
143
+ # If Engine creates RunContext internally, we can't pass it to Backend beforehand.
144
+ # Unless Engine exposes it.
145
+
146
+ # Alternative:
147
+ # Modify Engine to accept `run_context` argument.
148
+ # If provided, use it.
149
+
150
+ # I will modify Engine.run to accept `run_context`.
151
+
152
+ run_ctx = RunContext(run_id=run_id, status=RunStatus.RUNNING)
153
+ # Initialize tasks as PENDING? Engine does that.
154
+
155
+ backend = RemoteTraceBackend(self.client, run_ctx)
156
+ engine.trace = backend # Replace default console trace? Or chain?
157
+ # Maybe chain if we want local logs too.
158
+ # For now, just replace or use MultiBackend (not implemented).
159
+ # Let's just use RemoteBackend.
160
+
161
+ try:
162
+ engine.run(flow, params=params, run_context=run_ctx)
163
+ print(f"✅ Job {run_id} completed: {run_ctx.status}")
164
+ # Send final heartbeat
165
+ self.client.heartbeat(run_id, run_ctx.tasks, run_ctx.status)
166
+ except Exception as e:
167
+ print(f"💥 Job {run_id} failed: {e}")
168
+ # Heartbeat one last time
169
+ run_ctx.status = RunStatus.FAILED
170
+ self.client.heartbeat(run_id, run_ctx.tasks, run_ctx.status)
171
+
@@ -0,0 +1,146 @@
1
+ Metadata-Version: 2.4
2
+ Name: pyoco
3
+ Version: 0.3.0
4
+ Summary: A workflow engine with sugar syntax
5
+ Requires-Python: >=3.10
6
+ Description-Content-Type: text/markdown
7
+ Requires-Dist: pyyaml>=6.0.3
8
+ Requires-Dist: fastapi>=0.100.0
9
+ Requires-Dist: uvicorn>=0.20.0
10
+ Requires-Dist: httpx>=0.24.0
11
+
12
+ # 🐇 Pyoco
13
+
14
+ **pyoco is a minimal, pure-Python DAG engine for defining and running simple task-based workflows.**
15
+
16
+ ## Overview
17
+
18
+ Pyoco is designed to be significantly smaller, lighter, and have fewer dependencies than full-scale workflow engines like Airflow. It is optimized for local development and single-machine execution.
19
+
20
+ You can define tasks and their dependencies entirely in Python code using decorators and a simple API. There is no need for complex configuration files or external databases.
21
+
22
+ It is ideal for small jobs, development environments, and personal projects where a full-stack workflow engine would be overkill.
23
+
24
+ ## ✨ Features
25
+
26
+ - **Pure Python**: No external services or heavy dependencies required.
27
+ - **Minimal DAG model**: Tasks and dependencies are defined directly in code.
28
+ - **Task-oriented**: Focus on "small workflows" that should be easy to read and maintain.
29
+ - **Friendly trace logs**: Runs can be traced step by step from the terminal with cute (or plain) logs.
30
+ - **Parallel Execution**: Automatically runs independent tasks in parallel.
31
+ - **Artifact Management**: Easily save and manage task outputs and files.
32
+ - **Observability**: Track execution with unique Run IDs and detailed state transitions.
33
+ - **Control**: Cancel running workflows gracefully with `Ctrl+C`.
34
+
35
+ ## 📦 Installation
36
+
37
+ ```bash
38
+ pip install pyoco
39
+ ```
40
+
41
+ ## 🚀 Usage
42
+
43
+ Here is a minimal example of a pure-Python workflow.
44
+
45
+ ```python
46
+ from pyoco import task
47
+ from pyoco.core.models import Flow
48
+ from pyoco.core.engine import Engine
49
+
50
+ @task
51
+ def fetch_data(ctx):
52
+ print("🐰 Fetching data...")
53
+ return {"id": 1, "value": "carrot"}
54
+
55
+ @task
56
+ def process_data(ctx, data):
57
+ print(f"🥕 Processing: {data['value']}")
58
+ return data['value'].upper()
59
+
60
+ @task
61
+ def save_result(ctx, result):
62
+ print(f"✨ Saved: {result}")
63
+
64
+ # Define the flow
65
+ flow = Flow(name="hello_pyoco")
66
+ flow >> fetch_data >> process_data >> save_result
67
+
68
+ # Wire inputs (explicitly for this example)
69
+ process_data.task.inputs = {"data": "$node.fetch_data.output"}
70
+ save_result.task.inputs = {"result": "$node.process_data.output"}
71
+
72
+ if __name__ == "__main__":
73
+ engine = Engine()
74
+ engine.run(flow)
75
+ ```
76
+
77
+ Run it:
78
+
79
+ ```bash
80
+ python examples/hello_pyoco.py
81
+ ```
82
+
83
+ Output:
84
+
85
+ ```
86
+ 🐇 pyoco > start flow=hello_pyoco
87
+ 🏃 start node=fetch_data
88
+ 🐰 Fetching data...
89
+ ✅ done node=fetch_data (0.30 ms)
90
+ 🏃 start node=process_data
91
+ 🥕 Processing: carrot
92
+ ✅ done node=process_data (0.23 ms)
93
+ 🏃 start node=save_result
94
+ ✨ Saved: CARROT
95
+ ✅ done node=save_result (0.30 ms)
96
+ 🥕 done flow=hello_pyoco
97
+ ```
98
+
99
+ See [examples/hello_pyoco.py](examples/hello_pyoco.py) for the full code.
100
+
101
+ ## 🏗️ Architecture
102
+
103
+ Pyoco is designed with a simple flow:
104
+
105
+ ```
106
+ +-----------+ +------------------+ +-----------------+
107
+ | User Code | ---> | pyoco.core.Flow | ---> | trace/logger |
108
+ | (Tasks) | | (Engine) | | (Console/File) |
109
+ +-----------+ +------------------+ +-----------------+
110
+ ```
111
+
112
+ 1. **User Code**: You define tasks and flows using Python decorators.
113
+ 2. **Core Engine**: The engine resolves dependencies and executes tasks (in parallel where possible).
114
+ 3. **Trace**: Execution events are sent to the trace backend for logging (cute or plain).
115
+
116
+ ## 🎭 Modes
117
+
118
+ Pyoco has two output modes:
119
+
120
+ - **Cute Mode** (Default): Uses emojis and friendly messages. Best for local development and learning.
121
+ - **Non-Cute Mode**: Plain text logs. Best for CI/CD and production monitoring.
122
+
123
+ You can switch modes using an environment variable:
124
+
125
+ ```bash
126
+ export PYOCO_CUTE=0 # Disable cute mode
127
+ ```
128
+
129
+ Or via CLI flag:
130
+
131
+ ```bash
132
+ pyoco run --non-cute ...
133
+ ```
134
+
135
+ ## 📚 Documentation
136
+
137
+ - [Tutorials](docs/tutorial/index.md)
138
+ - [Roadmap](docs/roadmap.md)
139
+
140
+ ## 💖 Contributing
141
+
142
+ We love contributions! Please feel free to submit a Pull Request.
143
+
144
+ ---
145
+
146
+ *Made with 🥕 by the Pyoco Team.*
@@ -0,0 +1,25 @@
1
+ pyoco/__init__.py,sha256=E2pgDGvGRSVon7dSqIM4UD55LgVpf4jiZZA-70kOcuw,409
2
+ pyoco/client.py,sha256=0PFC-1WaIhKHLLVMUXx9hCADsNklBjXRrQS7b9IrFnc,2498
3
+ pyoco/cli/entry.py,sha256=zPIG0Gx-cFO8Cf1Z3wD3Ifz_2sHaryHZ6mCRri2WEqE,93
4
+ pyoco/cli/main.py,sha256=kGrgxh7s4I7JBJzC1CUxxt528ZtCy_Z0PfpLvN4axpM,11315
5
+ pyoco/core/base_task.py,sha256=z7hOFntAPv4yCADapS-fhtLe5eWqaO8k3T1r05YEEUE,2106
6
+ pyoco/core/context.py,sha256=khAXRLhkwYtZVxgJIFMuQshFDyDF-wqAKi2-dLELw20,4173
7
+ pyoco/core/engine.py,sha256=OlQtpbdEESHGoaVHbe0TY0ScxvQ15KrkLHylkEyb0fo,17099
8
+ pyoco/core/models.py,sha256=SXsv13DxrfUUB-ChLyMXVIqw-CmGqacPgxVWybPcCzo,5708
9
+ pyoco/discovery/loader.py,sha256=HzTs4zf2UO1zsPQ2lU5vfGXZx5hwZ_ti50xGPVfeCuI,4931
10
+ pyoco/dsl/__init__.py,sha256=xWdb60pSRL8lNFk4GHF3EJ4hon0uiWqpv264g6-4gdg,45
11
+ pyoco/dsl/syntax.py,sha256=AkFcD5gLlbJLFN0KkMIyttpHUV3v21pjz_ZqwreZkdM,4312
12
+ pyoco/schemas/config.py,sha256=KkGZK3GxTHoIHEGb4f4k8GE2W-aBN4iPzmc_HrwuROU,1735
13
+ pyoco/server/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
14
+ pyoco/server/api.py,sha256=v66kRzFHxE874ZaAtOjFtJCFLBkQzE9EjfQpfI2aDuI,2592
15
+ pyoco/server/models.py,sha256=tsdF8ShDkW6ajVV_oq9w8rUM3SbH6nbkAT0khELyZoI,712
16
+ pyoco/server/store.py,sha256=S02LbMN5sQzSJuvAb4nN7rqqs4G1BwipAdNQGELEd50,2888
17
+ pyoco/trace/backend.py,sha256=a1css94_lhO4SGSPHZ1f59HJqFQtZ5Sjx09Kw7v5bsk,617
18
+ pyoco/trace/console.py,sha256=I-BcF405OGLWoacJWeke8vTT9M5JxSBpJL-NazVyxb4,1742
19
+ pyoco/worker/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
20
+ pyoco/worker/client.py,sha256=862KccXRtfG7zd9ZSLqrpVSV6ev8zeuEHHdtAfLghiM,1557
21
+ pyoco/worker/runner.py,sha256=8Z__t54cFZmbouLFtUBNmItC_w6FBCNF7Ey-OgafdTg,6538
22
+ pyoco-0.3.0.dist-info/METADATA,sha256=u5afgdDSCKgbK6LEbgZGFM1wafznJRHVOoesZ4ZR_Us,4140
23
+ pyoco-0.3.0.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
24
+ pyoco-0.3.0.dist-info/top_level.txt,sha256=2JRVocfaWRbX1VJ3zq1c5wQaOK6fMARS6ptVFWyvRF4,6
25
+ pyoco-0.3.0.dist-info/RECORD,,
@@ -1,7 +0,0 @@
1
- Metadata-Version: 2.4
2
- Name: pyoco
3
- Version: 0.1.0
4
- Summary: A workflow engine with sugar syntax
5
- Requires-Python: >=3.10
6
- Description-Content-Type: text/markdown
7
- Requires-Dist: pyyaml>=6.0.3
@@ -1,17 +0,0 @@
1
- pyoco/__init__.py,sha256=E2pgDGvGRSVon7dSqIM4UD55LgVpf4jiZZA-70kOcuw,409
2
- pyoco/cli/entry.py,sha256=zPIG0Gx-cFO8Cf1Z3wD3Ifz_2sHaryHZ6mCRri2WEqE,93
3
- pyoco/cli/main.py,sha256=uRc6CzUTVRYF4JbehlbrprT7GvWQ-WyBZ8k12NrSxO8,6502
4
- pyoco/core/base_task.py,sha256=z7hOFntAPv4yCADapS-fhtLe5eWqaO8k3T1r05YEEUE,2106
5
- pyoco/core/context.py,sha256=SnoTz3vRghO1A-FNOrw2NEjbx1HySDqrBnQU5-KWGbk,3696
6
- pyoco/core/engine.py,sha256=m5LrEsXcpUAran5DxULtWbvhsMNj5mv17wE6lDFkFmQ,11416
7
- pyoco/core/models.py,sha256=zTt5HTSBChwRpOuw3qY2pvjRGZVsq4OQ-ZBHE3ujMWA,4548
8
- pyoco/discovery/loader.py,sha256=XzZzOAyFYrdA8K6APuEGWgjSIyp4Bgwlr834MyJc8vk,4950
9
- pyoco/dsl/__init__.py,sha256=xWdb60pSRL8lNFk4GHF3EJ4hon0uiWqpv264g6-4gdg,45
10
- pyoco/dsl/syntax.py,sha256=AkFcD5gLlbJLFN0KkMIyttpHUV3v21pjz_ZqwreZkdM,4312
11
- pyoco/schemas/config.py,sha256=KkGZK3GxTHoIHEGb4f4k8GE2W-aBN4iPzmc_HrwuROU,1735
12
- pyoco/trace/backend.py,sha256=h7l1PU8zuCSOo_VA5T1ax4znN_Az3Xuvx-KXibg3e-U,597
13
- pyoco/trace/console.py,sha256=Kf2-vma98ojhVQZHFzCUYfD_46Lr1WfAfI56smZkSZM,1397
14
- pyoco-0.1.0.dist-info/METADATA,sha256=bA_qJXUkIiC7TIOSo8CEzJ6PXp01pLQ1Q1LoMOrIw_k,187
15
- pyoco-0.1.0.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
16
- pyoco-0.1.0.dist-info/top_level.txt,sha256=2JRVocfaWRbX1VJ3zq1c5wQaOK6fMARS6ptVFWyvRF4,6
17
- pyoco-0.1.0.dist-info/RECORD,,
File without changes