pactown 0.1.4__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- pactown/__init__.py +23 -0
- pactown/cli.py +347 -0
- pactown/config.py +158 -0
- pactown/deploy/__init__.py +17 -0
- pactown/deploy/base.py +263 -0
- pactown/deploy/compose.py +359 -0
- pactown/deploy/docker.py +299 -0
- pactown/deploy/kubernetes.py +449 -0
- pactown/deploy/podman.py +400 -0
- pactown/generator.py +212 -0
- pactown/network.py +245 -0
- pactown/orchestrator.py +455 -0
- pactown/parallel.py +268 -0
- pactown/registry/__init__.py +12 -0
- pactown/registry/client.py +253 -0
- pactown/registry/models.py +150 -0
- pactown/registry/server.py +207 -0
- pactown/resolver.py +160 -0
- pactown/sandbox_manager.py +328 -0
- pactown-0.1.4.dist-info/METADATA +308 -0
- pactown-0.1.4.dist-info/RECORD +24 -0
- pactown-0.1.4.dist-info/WHEEL +4 -0
- pactown-0.1.4.dist-info/entry_points.txt +3 -0
- pactown-0.1.4.dist-info/licenses/LICENSE +201 -0
pactown/parallel.py
ADDED
|
@@ -0,0 +1,268 @@
|
|
|
1
|
+
"""Parallel execution utilities for pactown."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
import asyncio
|
|
6
|
+
import time
|
|
7
|
+
from concurrent.futures import ThreadPoolExecutor, as_completed
|
|
8
|
+
from dataclasses import dataclass, field
|
|
9
|
+
from pathlib import Path
|
|
10
|
+
from typing import Callable, Optional, Any
|
|
11
|
+
from threading import Lock
|
|
12
|
+
|
|
13
|
+
from rich.console import Console
|
|
14
|
+
from rich.progress import Progress, SpinnerColumn, TextColumn, BarColumn, TaskProgressColumn
|
|
15
|
+
|
|
16
|
+
console = Console()
|
|
17
|
+
|
|
18
|
+
|
|
19
|
+
@dataclass
|
|
20
|
+
class TaskResult:
|
|
21
|
+
"""Result of a parallel task."""
|
|
22
|
+
name: str
|
|
23
|
+
success: bool
|
|
24
|
+
duration: float
|
|
25
|
+
result: Any = None
|
|
26
|
+
error: Optional[str] = None
|
|
27
|
+
|
|
28
|
+
|
|
29
|
+
def run_parallel(
|
|
30
|
+
tasks: dict[str, Callable[[], Any]],
|
|
31
|
+
max_workers: int = 4,
|
|
32
|
+
show_progress: bool = True,
|
|
33
|
+
description: str = "Running tasks",
|
|
34
|
+
) -> dict[str, TaskResult]:
|
|
35
|
+
"""
|
|
36
|
+
Run multiple tasks in parallel using ThreadPoolExecutor.
|
|
37
|
+
|
|
38
|
+
Args:
|
|
39
|
+
tasks: Dict of {name: callable} to run
|
|
40
|
+
max_workers: Maximum parallel workers
|
|
41
|
+
show_progress: Show progress bar
|
|
42
|
+
description: Progress description
|
|
43
|
+
|
|
44
|
+
Returns:
|
|
45
|
+
Dict of {name: TaskResult}
|
|
46
|
+
"""
|
|
47
|
+
results: dict[str, TaskResult] = {}
|
|
48
|
+
|
|
49
|
+
if not tasks:
|
|
50
|
+
return results
|
|
51
|
+
|
|
52
|
+
with ThreadPoolExecutor(max_workers=max_workers) as executor:
|
|
53
|
+
futures = {}
|
|
54
|
+
start_times = {}
|
|
55
|
+
|
|
56
|
+
for name, func in tasks.items():
|
|
57
|
+
start_times[name] = time.time()
|
|
58
|
+
futures[executor.submit(func)] = name
|
|
59
|
+
|
|
60
|
+
if show_progress:
|
|
61
|
+
with Progress(
|
|
62
|
+
SpinnerColumn(),
|
|
63
|
+
TextColumn("[progress.description]{task.description}"),
|
|
64
|
+
BarColumn(),
|
|
65
|
+
TaskProgressColumn(),
|
|
66
|
+
console=console,
|
|
67
|
+
) as progress:
|
|
68
|
+
task = progress.add_task(description, total=len(tasks))
|
|
69
|
+
|
|
70
|
+
for future in as_completed(futures):
|
|
71
|
+
name = futures[future]
|
|
72
|
+
duration = time.time() - start_times[name]
|
|
73
|
+
|
|
74
|
+
try:
|
|
75
|
+
result = future.result()
|
|
76
|
+
results[name] = TaskResult(
|
|
77
|
+
name=name,
|
|
78
|
+
success=True,
|
|
79
|
+
duration=duration,
|
|
80
|
+
result=result,
|
|
81
|
+
)
|
|
82
|
+
except Exception as e:
|
|
83
|
+
results[name] = TaskResult(
|
|
84
|
+
name=name,
|
|
85
|
+
success=False,
|
|
86
|
+
duration=duration,
|
|
87
|
+
error=str(e),
|
|
88
|
+
)
|
|
89
|
+
|
|
90
|
+
progress.advance(task)
|
|
91
|
+
else:
|
|
92
|
+
for future in as_completed(futures):
|
|
93
|
+
name = futures[future]
|
|
94
|
+
duration = time.time() - start_times[name]
|
|
95
|
+
|
|
96
|
+
try:
|
|
97
|
+
result = future.result()
|
|
98
|
+
results[name] = TaskResult(
|
|
99
|
+
name=name,
|
|
100
|
+
success=True,
|
|
101
|
+
duration=duration,
|
|
102
|
+
result=result,
|
|
103
|
+
)
|
|
104
|
+
except Exception as e:
|
|
105
|
+
results[name] = TaskResult(
|
|
106
|
+
name=name,
|
|
107
|
+
success=False,
|
|
108
|
+
duration=duration,
|
|
109
|
+
error=str(e),
|
|
110
|
+
)
|
|
111
|
+
|
|
112
|
+
return results
|
|
113
|
+
|
|
114
|
+
|
|
115
|
+
def run_in_dependency_waves(
|
|
116
|
+
tasks: dict[str, Callable[[], Any]],
|
|
117
|
+
dependencies: dict[str, list[str]],
|
|
118
|
+
max_workers: int = 4,
|
|
119
|
+
on_complete: Optional[Callable[[str, TaskResult], None]] = None,
|
|
120
|
+
) -> dict[str, TaskResult]:
|
|
121
|
+
"""
|
|
122
|
+
Run tasks in waves based on dependencies.
|
|
123
|
+
|
|
124
|
+
Services with no unmet dependencies run in parallel.
|
|
125
|
+
When a wave completes, next wave starts.
|
|
126
|
+
|
|
127
|
+
Args:
|
|
128
|
+
tasks: Dict of {name: callable}
|
|
129
|
+
dependencies: Dict of {name: [dependency_names]}
|
|
130
|
+
max_workers: Max parallel workers per wave
|
|
131
|
+
on_complete: Callback when task completes
|
|
132
|
+
|
|
133
|
+
Returns:
|
|
134
|
+
Dict of {name: TaskResult}
|
|
135
|
+
"""
|
|
136
|
+
results: dict[str, TaskResult] = {}
|
|
137
|
+
completed = set()
|
|
138
|
+
remaining = set(tasks.keys())
|
|
139
|
+
|
|
140
|
+
while remaining:
|
|
141
|
+
# Find tasks with all dependencies satisfied
|
|
142
|
+
ready = []
|
|
143
|
+
for name in remaining:
|
|
144
|
+
deps = dependencies.get(name, [])
|
|
145
|
+
if all(d in completed for d in deps):
|
|
146
|
+
ready.append(name)
|
|
147
|
+
|
|
148
|
+
if not ready:
|
|
149
|
+
# Circular dependency or missing dependency
|
|
150
|
+
raise ValueError(f"Cannot resolve dependencies for: {remaining}")
|
|
151
|
+
|
|
152
|
+
# Run ready tasks in parallel
|
|
153
|
+
wave_tasks = {name: tasks[name] for name in ready}
|
|
154
|
+
wave_results = run_parallel(wave_tasks, max_workers=max_workers, show_progress=False)
|
|
155
|
+
|
|
156
|
+
for name, result in wave_results.items():
|
|
157
|
+
results[name] = result
|
|
158
|
+
remaining.remove(name)
|
|
159
|
+
|
|
160
|
+
if result.success:
|
|
161
|
+
completed.add(name)
|
|
162
|
+
|
|
163
|
+
if on_complete:
|
|
164
|
+
on_complete(name, result)
|
|
165
|
+
|
|
166
|
+
# If any task in wave failed, stop
|
|
167
|
+
if any(not r.success for r in wave_results.values()):
|
|
168
|
+
break
|
|
169
|
+
|
|
170
|
+
return results
|
|
171
|
+
|
|
172
|
+
|
|
173
|
+
async def run_parallel_async(
|
|
174
|
+
tasks: dict[str, Callable[[], Any]],
|
|
175
|
+
max_concurrent: int = 4,
|
|
176
|
+
) -> dict[str, TaskResult]:
|
|
177
|
+
"""
|
|
178
|
+
Run tasks using asyncio with a semaphore for concurrency control.
|
|
179
|
+
|
|
180
|
+
Uses run_in_executor for CPU-bound tasks.
|
|
181
|
+
"""
|
|
182
|
+
results: dict[str, TaskResult] = {}
|
|
183
|
+
semaphore = asyncio.Semaphore(max_concurrent)
|
|
184
|
+
|
|
185
|
+
async def run_task(name: str, func: Callable) -> TaskResult:
|
|
186
|
+
async with semaphore:
|
|
187
|
+
start = time.time()
|
|
188
|
+
try:
|
|
189
|
+
loop = asyncio.get_event_loop()
|
|
190
|
+
result = await loop.run_in_executor(None, func)
|
|
191
|
+
return TaskResult(
|
|
192
|
+
name=name,
|
|
193
|
+
success=True,
|
|
194
|
+
duration=time.time() - start,
|
|
195
|
+
result=result,
|
|
196
|
+
)
|
|
197
|
+
except Exception as e:
|
|
198
|
+
return TaskResult(
|
|
199
|
+
name=name,
|
|
200
|
+
success=False,
|
|
201
|
+
duration=time.time() - start,
|
|
202
|
+
error=str(e),
|
|
203
|
+
)
|
|
204
|
+
|
|
205
|
+
coros = [run_task(name, func) for name, func in tasks.items()]
|
|
206
|
+
task_results = await asyncio.gather(*coros)
|
|
207
|
+
|
|
208
|
+
for result in task_results:
|
|
209
|
+
results[result.name] = result
|
|
210
|
+
|
|
211
|
+
return results
|
|
212
|
+
|
|
213
|
+
|
|
214
|
+
class ParallelSandboxBuilder:
|
|
215
|
+
"""Build multiple sandboxes in parallel."""
|
|
216
|
+
|
|
217
|
+
def __init__(self, max_workers: int = 4):
|
|
218
|
+
self.max_workers = max_workers
|
|
219
|
+
self._lock = Lock()
|
|
220
|
+
|
|
221
|
+
def build_sandboxes(
|
|
222
|
+
self,
|
|
223
|
+
services: list[tuple[str, Path, Callable]],
|
|
224
|
+
on_complete: Optional[Callable[[str, bool, float], None]] = None,
|
|
225
|
+
) -> dict[str, TaskResult]:
|
|
226
|
+
"""
|
|
227
|
+
Build sandboxes for multiple services in parallel.
|
|
228
|
+
|
|
229
|
+
Args:
|
|
230
|
+
services: List of (name, readme_path, build_func)
|
|
231
|
+
on_complete: Callback(name, success, duration)
|
|
232
|
+
|
|
233
|
+
Returns:
|
|
234
|
+
Dict of results
|
|
235
|
+
"""
|
|
236
|
+
tasks = {}
|
|
237
|
+
for name, readme_path, build_func in services:
|
|
238
|
+
tasks[name] = build_func
|
|
239
|
+
|
|
240
|
+
def callback(name: str, result: TaskResult):
|
|
241
|
+
if on_complete:
|
|
242
|
+
on_complete(name, result.success, result.duration)
|
|
243
|
+
|
|
244
|
+
return run_parallel(
|
|
245
|
+
tasks,
|
|
246
|
+
max_workers=self.max_workers,
|
|
247
|
+
show_progress=True,
|
|
248
|
+
description="Building sandboxes",
|
|
249
|
+
)
|
|
250
|
+
|
|
251
|
+
|
|
252
|
+
def format_parallel_results(results: dict[str, TaskResult]) -> str:
|
|
253
|
+
"""Format parallel execution results for display."""
|
|
254
|
+
lines = []
|
|
255
|
+
total_time = sum(r.duration for r in results.values())
|
|
256
|
+
|
|
257
|
+
successful = [r for r in results.values() if r.success]
|
|
258
|
+
failed = [r for r in results.values() if not r.success]
|
|
259
|
+
|
|
260
|
+
lines.append(f"Completed: {len(successful)}/{len(results)} tasks")
|
|
261
|
+
lines.append(f"Total time: {total_time:.2f}s")
|
|
262
|
+
|
|
263
|
+
if failed:
|
|
264
|
+
lines.append("\nFailed:")
|
|
265
|
+
for r in failed:
|
|
266
|
+
lines.append(f" ✗ {r.name}: {r.error}")
|
|
267
|
+
|
|
268
|
+
return "\n".join(lines)
|
|
@@ -0,0 +1,12 @@
|
|
|
1
|
+
"""Pactown registry - local artifact registry for markpact modules."""
|
|
2
|
+
|
|
3
|
+
from .server import create_app
|
|
4
|
+
from .client import RegistryClient
|
|
5
|
+
from .models import Artifact, ArtifactVersion
|
|
6
|
+
|
|
7
|
+
__all__ = [
|
|
8
|
+
"create_app",
|
|
9
|
+
"RegistryClient",
|
|
10
|
+
"Artifact",
|
|
11
|
+
"ArtifactVersion",
|
|
12
|
+
]
|
|
@@ -0,0 +1,253 @@
|
|
|
1
|
+
"""Client for pactown registry."""
|
|
2
|
+
|
|
3
|
+
from pathlib import Path
|
|
4
|
+
from typing import Optional, Any
|
|
5
|
+
import hashlib
|
|
6
|
+
|
|
7
|
+
import httpx
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
class RegistryClient:
|
|
11
|
+
"""Client for interacting with pactown registry."""
|
|
12
|
+
|
|
13
|
+
def __init__(self, base_url: str = "http://localhost:8800", timeout: float = 30.0):
|
|
14
|
+
self.base_url = base_url.rstrip("/")
|
|
15
|
+
self.timeout = timeout
|
|
16
|
+
self._client = httpx.Client(timeout=timeout)
|
|
17
|
+
|
|
18
|
+
def __enter__(self):
|
|
19
|
+
return self
|
|
20
|
+
|
|
21
|
+
def __exit__(self, *args):
|
|
22
|
+
self._client.close()
|
|
23
|
+
|
|
24
|
+
def close(self):
|
|
25
|
+
self._client.close()
|
|
26
|
+
|
|
27
|
+
def health(self) -> bool:
|
|
28
|
+
"""Check if registry is healthy."""
|
|
29
|
+
try:
|
|
30
|
+
response = self._client.get(f"{self.base_url}/health")
|
|
31
|
+
return response.status_code == 200
|
|
32
|
+
except Exception:
|
|
33
|
+
return False
|
|
34
|
+
|
|
35
|
+
def list_artifacts(
|
|
36
|
+
self,
|
|
37
|
+
namespace: Optional[str] = None,
|
|
38
|
+
search: Optional[str] = None
|
|
39
|
+
) -> list[dict]:
|
|
40
|
+
"""List artifacts in the registry."""
|
|
41
|
+
params = {}
|
|
42
|
+
if namespace:
|
|
43
|
+
params["namespace"] = namespace
|
|
44
|
+
if search:
|
|
45
|
+
params["search"] = search
|
|
46
|
+
|
|
47
|
+
response = self._client.get(f"{self.base_url}/v1/artifacts", params=params)
|
|
48
|
+
response.raise_for_status()
|
|
49
|
+
return response.json()
|
|
50
|
+
|
|
51
|
+
def get_artifact(self, name: str, namespace: str = "default") -> Optional[dict]:
|
|
52
|
+
"""Get artifact information."""
|
|
53
|
+
try:
|
|
54
|
+
response = self._client.get(
|
|
55
|
+
f"{self.base_url}/v1/artifacts/{namespace}/{name}"
|
|
56
|
+
)
|
|
57
|
+
if response.status_code == 404:
|
|
58
|
+
return None
|
|
59
|
+
response.raise_for_status()
|
|
60
|
+
return response.json()
|
|
61
|
+
except httpx.HTTPStatusError:
|
|
62
|
+
return None
|
|
63
|
+
|
|
64
|
+
def get_version(
|
|
65
|
+
self,
|
|
66
|
+
name: str,
|
|
67
|
+
version: str = "latest",
|
|
68
|
+
namespace: str = "default"
|
|
69
|
+
) -> Optional[dict]:
|
|
70
|
+
"""Get specific version information."""
|
|
71
|
+
try:
|
|
72
|
+
response = self._client.get(
|
|
73
|
+
f"{self.base_url}/v1/artifacts/{namespace}/{name}/{version}"
|
|
74
|
+
)
|
|
75
|
+
if response.status_code == 404:
|
|
76
|
+
return None
|
|
77
|
+
response.raise_for_status()
|
|
78
|
+
return response.json()
|
|
79
|
+
except httpx.HTTPStatusError:
|
|
80
|
+
return None
|
|
81
|
+
|
|
82
|
+
def get_readme(
|
|
83
|
+
self,
|
|
84
|
+
name: str,
|
|
85
|
+
version: str = "latest",
|
|
86
|
+
namespace: str = "default"
|
|
87
|
+
) -> Optional[str]:
|
|
88
|
+
"""Get README content for a specific version."""
|
|
89
|
+
try:
|
|
90
|
+
response = self._client.get(
|
|
91
|
+
f"{self.base_url}/v1/artifacts/{namespace}/{name}/{version}/readme"
|
|
92
|
+
)
|
|
93
|
+
if response.status_code == 404:
|
|
94
|
+
return None
|
|
95
|
+
response.raise_for_status()
|
|
96
|
+
return response.json().get("content")
|
|
97
|
+
except httpx.HTTPStatusError:
|
|
98
|
+
return None
|
|
99
|
+
|
|
100
|
+
def publish(
|
|
101
|
+
self,
|
|
102
|
+
name: str,
|
|
103
|
+
version: str,
|
|
104
|
+
readme_path: Optional[Path] = None,
|
|
105
|
+
readme_content: Optional[str] = None,
|
|
106
|
+
namespace: str = "default",
|
|
107
|
+
description: str = "",
|
|
108
|
+
tags: Optional[list[str]] = None,
|
|
109
|
+
metadata: Optional[dict] = None,
|
|
110
|
+
) -> dict:
|
|
111
|
+
"""Publish an artifact to the registry."""
|
|
112
|
+
if readme_path:
|
|
113
|
+
readme_content = Path(readme_path).read_text()
|
|
114
|
+
|
|
115
|
+
if not readme_content:
|
|
116
|
+
raise ValueError("Either readme_path or readme_content must be provided")
|
|
117
|
+
|
|
118
|
+
payload = {
|
|
119
|
+
"name": name,
|
|
120
|
+
"version": version,
|
|
121
|
+
"readme_content": readme_content,
|
|
122
|
+
"namespace": namespace,
|
|
123
|
+
"description": description,
|
|
124
|
+
"tags": tags or [],
|
|
125
|
+
"metadata": metadata or {},
|
|
126
|
+
}
|
|
127
|
+
|
|
128
|
+
try:
|
|
129
|
+
response = self._client.post(f"{self.base_url}/v1/publish", json=payload)
|
|
130
|
+
response.raise_for_status()
|
|
131
|
+
return response.json()
|
|
132
|
+
except httpx.HTTPStatusError as e:
|
|
133
|
+
return {"success": False, "error": str(e)}
|
|
134
|
+
|
|
135
|
+
def pull(
|
|
136
|
+
self,
|
|
137
|
+
name: str,
|
|
138
|
+
version: str = "latest",
|
|
139
|
+
namespace: str = "default",
|
|
140
|
+
output_path: Optional[Path] = None,
|
|
141
|
+
) -> Optional[str]:
|
|
142
|
+
"""Pull an artifact from the registry."""
|
|
143
|
+
readme = self.get_readme(name, version, namespace)
|
|
144
|
+
|
|
145
|
+
if readme and output_path:
|
|
146
|
+
output_path = Path(output_path)
|
|
147
|
+
output_path.parent.mkdir(parents=True, exist_ok=True)
|
|
148
|
+
output_path.write_text(readme)
|
|
149
|
+
|
|
150
|
+
return readme
|
|
151
|
+
|
|
152
|
+
def delete(self, name: str, namespace: str = "default") -> bool:
|
|
153
|
+
"""Delete an artifact from the registry."""
|
|
154
|
+
try:
|
|
155
|
+
response = self._client.delete(
|
|
156
|
+
f"{self.base_url}/v1/artifacts/{namespace}/{name}"
|
|
157
|
+
)
|
|
158
|
+
return response.status_code == 200
|
|
159
|
+
except httpx.HTTPStatusError:
|
|
160
|
+
return False
|
|
161
|
+
|
|
162
|
+
def list_namespaces(self) -> list[str]:
|
|
163
|
+
"""List all namespaces."""
|
|
164
|
+
try:
|
|
165
|
+
response = self._client.get(f"{self.base_url}/v1/namespaces")
|
|
166
|
+
response.raise_for_status()
|
|
167
|
+
return response.json().get("namespaces", [])
|
|
168
|
+
except httpx.HTTPStatusError:
|
|
169
|
+
return []
|
|
170
|
+
|
|
171
|
+
|
|
172
|
+
class AsyncRegistryClient:
|
|
173
|
+
"""Async client for pactown registry."""
|
|
174
|
+
|
|
175
|
+
def __init__(self, base_url: str = "http://localhost:8800", timeout: float = 30.0):
|
|
176
|
+
self.base_url = base_url.rstrip("/")
|
|
177
|
+
self.timeout = timeout
|
|
178
|
+
self._client = httpx.AsyncClient(timeout=timeout)
|
|
179
|
+
|
|
180
|
+
async def __aenter__(self):
|
|
181
|
+
return self
|
|
182
|
+
|
|
183
|
+
async def __aexit__(self, *args):
|
|
184
|
+
await self._client.aclose()
|
|
185
|
+
|
|
186
|
+
async def close(self):
|
|
187
|
+
await self._client.aclose()
|
|
188
|
+
|
|
189
|
+
async def health(self) -> bool:
|
|
190
|
+
try:
|
|
191
|
+
response = await self._client.get(f"{self.base_url}/health")
|
|
192
|
+
return response.status_code == 200
|
|
193
|
+
except Exception:
|
|
194
|
+
return False
|
|
195
|
+
|
|
196
|
+
async def list_artifacts(
|
|
197
|
+
self,
|
|
198
|
+
namespace: Optional[str] = None,
|
|
199
|
+
search: Optional[str] = None,
|
|
200
|
+
) -> list[dict]:
|
|
201
|
+
params = {}
|
|
202
|
+
if namespace:
|
|
203
|
+
params["namespace"] = namespace
|
|
204
|
+
if search:
|
|
205
|
+
params["search"] = search
|
|
206
|
+
|
|
207
|
+
response = await self._client.get(f"{self.base_url}/v1/artifacts", params=params)
|
|
208
|
+
response.raise_for_status()
|
|
209
|
+
return response.json()
|
|
210
|
+
|
|
211
|
+
async def get_readme(
|
|
212
|
+
self,
|
|
213
|
+
name: str,
|
|
214
|
+
version: str = "latest",
|
|
215
|
+
namespace: str = "default"
|
|
216
|
+
) -> Optional[str]:
|
|
217
|
+
try:
|
|
218
|
+
response = await self._client.get(
|
|
219
|
+
f"{self.base_url}/v1/artifacts/{namespace}/{name}/{version}/readme"
|
|
220
|
+
)
|
|
221
|
+
if response.status_code == 404:
|
|
222
|
+
return None
|
|
223
|
+
response.raise_for_status()
|
|
224
|
+
return response.json().get("content")
|
|
225
|
+
except httpx.HTTPStatusError:
|
|
226
|
+
return None
|
|
227
|
+
|
|
228
|
+
async def publish(
|
|
229
|
+
self,
|
|
230
|
+
name: str,
|
|
231
|
+
version: str,
|
|
232
|
+
readme_content: str,
|
|
233
|
+
namespace: str = "default",
|
|
234
|
+
description: str = "",
|
|
235
|
+
tags: Optional[list[str]] = None,
|
|
236
|
+
metadata: Optional[dict] = None,
|
|
237
|
+
) -> dict:
|
|
238
|
+
payload = {
|
|
239
|
+
"name": name,
|
|
240
|
+
"version": version,
|
|
241
|
+
"readme_content": readme_content,
|
|
242
|
+
"namespace": namespace,
|
|
243
|
+
"description": description,
|
|
244
|
+
"tags": tags or [],
|
|
245
|
+
"metadata": metadata or {},
|
|
246
|
+
}
|
|
247
|
+
|
|
248
|
+
try:
|
|
249
|
+
response = await self._client.post(f"{self.base_url}/v1/publish", json=payload)
|
|
250
|
+
response.raise_for_status()
|
|
251
|
+
return response.json()
|
|
252
|
+
except httpx.HTTPStatusError as e:
|
|
253
|
+
return {"success": False, "error": str(e)}
|
|
@@ -0,0 +1,150 @@
|
|
|
1
|
+
"""Data models for pactown registry."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
from dataclasses import dataclass, field
|
|
6
|
+
from datetime import datetime
|
|
7
|
+
from typing import Optional, Any
|
|
8
|
+
from pathlib import Path
|
|
9
|
+
import json
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
@dataclass
|
|
13
|
+
class ArtifactVersion:
|
|
14
|
+
"""A specific version of an artifact."""
|
|
15
|
+
version: str
|
|
16
|
+
readme_content: str
|
|
17
|
+
checksum: str
|
|
18
|
+
published_at: datetime = field(default_factory=datetime.utcnow)
|
|
19
|
+
metadata: dict[str, Any] = field(default_factory=dict)
|
|
20
|
+
|
|
21
|
+
def to_dict(self) -> dict:
|
|
22
|
+
return {
|
|
23
|
+
"version": self.version,
|
|
24
|
+
"readme_content": self.readme_content,
|
|
25
|
+
"checksum": self.checksum,
|
|
26
|
+
"published_at": self.published_at.isoformat(),
|
|
27
|
+
"metadata": self.metadata,
|
|
28
|
+
}
|
|
29
|
+
|
|
30
|
+
@classmethod
|
|
31
|
+
def from_dict(cls, data: dict) -> "ArtifactVersion":
|
|
32
|
+
return cls(
|
|
33
|
+
version=data["version"],
|
|
34
|
+
readme_content=data["readme_content"],
|
|
35
|
+
checksum=data["checksum"],
|
|
36
|
+
published_at=datetime.fromisoformat(data["published_at"]),
|
|
37
|
+
metadata=data.get("metadata", {}),
|
|
38
|
+
)
|
|
39
|
+
|
|
40
|
+
|
|
41
|
+
@dataclass
|
|
42
|
+
class Artifact:
|
|
43
|
+
"""An artifact in the registry (a markpact module)."""
|
|
44
|
+
name: str
|
|
45
|
+
namespace: str = "default"
|
|
46
|
+
description: str = ""
|
|
47
|
+
versions: dict[str, ArtifactVersion] = field(default_factory=dict)
|
|
48
|
+
latest_version: Optional[str] = None
|
|
49
|
+
created_at: datetime = field(default_factory=datetime.utcnow)
|
|
50
|
+
updated_at: datetime = field(default_factory=datetime.utcnow)
|
|
51
|
+
tags: list[str] = field(default_factory=list)
|
|
52
|
+
|
|
53
|
+
@property
|
|
54
|
+
def full_name(self) -> str:
|
|
55
|
+
return f"{self.namespace}/{self.name}"
|
|
56
|
+
|
|
57
|
+
def add_version(self, version: ArtifactVersion) -> None:
|
|
58
|
+
self.versions[version.version] = version
|
|
59
|
+
self.latest_version = version.version
|
|
60
|
+
self.updated_at = datetime.utcnow()
|
|
61
|
+
|
|
62
|
+
def get_version(self, version: str = "latest") -> Optional[ArtifactVersion]:
|
|
63
|
+
if version == "latest" or version == "*":
|
|
64
|
+
version = self.latest_version
|
|
65
|
+
return self.versions.get(version)
|
|
66
|
+
|
|
67
|
+
def to_dict(self) -> dict:
|
|
68
|
+
return {
|
|
69
|
+
"name": self.name,
|
|
70
|
+
"namespace": self.namespace,
|
|
71
|
+
"description": self.description,
|
|
72
|
+
"versions": {k: v.to_dict() for k, v in self.versions.items()},
|
|
73
|
+
"latest_version": self.latest_version,
|
|
74
|
+
"created_at": self.created_at.isoformat(),
|
|
75
|
+
"updated_at": self.updated_at.isoformat(),
|
|
76
|
+
"tags": self.tags,
|
|
77
|
+
}
|
|
78
|
+
|
|
79
|
+
@classmethod
|
|
80
|
+
def from_dict(cls, data: dict) -> "Artifact":
|
|
81
|
+
versions = {
|
|
82
|
+
k: ArtifactVersion.from_dict(v)
|
|
83
|
+
for k, v in data.get("versions", {}).items()
|
|
84
|
+
}
|
|
85
|
+
return cls(
|
|
86
|
+
name=data["name"],
|
|
87
|
+
namespace=data.get("namespace", "default"),
|
|
88
|
+
description=data.get("description", ""),
|
|
89
|
+
versions=versions,
|
|
90
|
+
latest_version=data.get("latest_version"),
|
|
91
|
+
created_at=datetime.fromisoformat(data["created_at"]),
|
|
92
|
+
updated_at=datetime.fromisoformat(data["updated_at"]),
|
|
93
|
+
tags=data.get("tags", []),
|
|
94
|
+
)
|
|
95
|
+
|
|
96
|
+
|
|
97
|
+
class RegistryStorage:
|
|
98
|
+
"""File-based storage for registry artifacts."""
|
|
99
|
+
|
|
100
|
+
def __init__(self, storage_path: Path):
|
|
101
|
+
self.storage_path = Path(storage_path)
|
|
102
|
+
self.storage_path.mkdir(parents=True, exist_ok=True)
|
|
103
|
+
self._index_path = self.storage_path / "index.json"
|
|
104
|
+
self._artifacts: dict[str, Artifact] = {}
|
|
105
|
+
self._load()
|
|
106
|
+
|
|
107
|
+
def _load(self) -> None:
|
|
108
|
+
if self._index_path.exists():
|
|
109
|
+
with open(self._index_path) as f:
|
|
110
|
+
data = json.load(f)
|
|
111
|
+
for full_name, artifact_data in data.get("artifacts", {}).items():
|
|
112
|
+
self._artifacts[full_name] = Artifact.from_dict(artifact_data)
|
|
113
|
+
|
|
114
|
+
def _save(self) -> None:
|
|
115
|
+
data = {
|
|
116
|
+
"artifacts": {k: v.to_dict() for k, v in self._artifacts.items()},
|
|
117
|
+
"updated_at": datetime.utcnow().isoformat(),
|
|
118
|
+
}
|
|
119
|
+
with open(self._index_path, "w") as f:
|
|
120
|
+
json.dump(data, f, indent=2)
|
|
121
|
+
|
|
122
|
+
def get(self, namespace: str, name: str) -> Optional[Artifact]:
|
|
123
|
+
return self._artifacts.get(f"{namespace}/{name}")
|
|
124
|
+
|
|
125
|
+
def list(self, namespace: Optional[str] = None) -> list[Artifact]:
|
|
126
|
+
if namespace:
|
|
127
|
+
return [a for a in self._artifacts.values() if a.namespace == namespace]
|
|
128
|
+
return list(self._artifacts.values())
|
|
129
|
+
|
|
130
|
+
def save_artifact(self, artifact: Artifact) -> None:
|
|
131
|
+
self._artifacts[artifact.full_name] = artifact
|
|
132
|
+
self._save()
|
|
133
|
+
|
|
134
|
+
def delete(self, namespace: str, name: str) -> bool:
|
|
135
|
+
key = f"{namespace}/{name}"
|
|
136
|
+
if key in self._artifacts:
|
|
137
|
+
del self._artifacts[key]
|
|
138
|
+
self._save()
|
|
139
|
+
return True
|
|
140
|
+
return False
|
|
141
|
+
|
|
142
|
+
def search(self, query: str) -> list[Artifact]:
|
|
143
|
+
query = query.lower()
|
|
144
|
+
results = []
|
|
145
|
+
for artifact in self._artifacts.values():
|
|
146
|
+
if (query in artifact.name.lower() or
|
|
147
|
+
query in artifact.description.lower() or
|
|
148
|
+
any(query in tag.lower() for tag in artifact.tags)):
|
|
149
|
+
results.append(artifact)
|
|
150
|
+
return results
|