pactown 0.1.4__py3-none-any.whl → 0.1.47__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
pactown/parallel.py CHANGED
@@ -5,13 +5,13 @@ from __future__ import annotations
5
5
  import asyncio
6
6
  import time
7
7
  from concurrent.futures import ThreadPoolExecutor, as_completed
8
- from dataclasses import dataclass, field
8
+ from dataclasses import dataclass
9
9
  from pathlib import Path
10
- from typing import Callable, Optional, Any
11
10
  from threading import Lock
11
+ from typing import Any, Callable, Optional
12
12
 
13
13
  from rich.console import Console
14
- from rich.progress import Progress, SpinnerColumn, TextColumn, BarColumn, TaskProgressColumn
14
+ from rich.progress import BarColumn, Progress, SpinnerColumn, TaskProgressColumn, TextColumn
15
15
 
16
16
  console = Console()
17
17
 
@@ -34,29 +34,29 @@ def run_parallel(
34
34
  ) -> dict[str, TaskResult]:
35
35
  """
36
36
  Run multiple tasks in parallel using ThreadPoolExecutor.
37
-
37
+
38
38
  Args:
39
39
  tasks: Dict of {name: callable} to run
40
40
  max_workers: Maximum parallel workers
41
41
  show_progress: Show progress bar
42
42
  description: Progress description
43
-
43
+
44
44
  Returns:
45
45
  Dict of {name: TaskResult}
46
46
  """
47
47
  results: dict[str, TaskResult] = {}
48
-
48
+
49
49
  if not tasks:
50
50
  return results
51
-
51
+
52
52
  with ThreadPoolExecutor(max_workers=max_workers) as executor:
53
53
  futures = {}
54
54
  start_times = {}
55
-
55
+
56
56
  for name, func in tasks.items():
57
57
  start_times[name] = time.time()
58
58
  futures[executor.submit(func)] = name
59
-
59
+
60
60
  if show_progress:
61
61
  with Progress(
62
62
  SpinnerColumn(),
@@ -66,11 +66,11 @@ def run_parallel(
66
66
  console=console,
67
67
  ) as progress:
68
68
  task = progress.add_task(description, total=len(tasks))
69
-
69
+
70
70
  for future in as_completed(futures):
71
71
  name = futures[future]
72
72
  duration = time.time() - start_times[name]
73
-
73
+
74
74
  try:
75
75
  result = future.result()
76
76
  results[name] = TaskResult(
@@ -86,13 +86,13 @@ def run_parallel(
86
86
  duration=duration,
87
87
  error=str(e),
88
88
  )
89
-
89
+
90
90
  progress.advance(task)
91
91
  else:
92
92
  for future in as_completed(futures):
93
93
  name = futures[future]
94
94
  duration = time.time() - start_times[name]
95
-
95
+
96
96
  try:
97
97
  result = future.result()
98
98
  results[name] = TaskResult(
@@ -108,7 +108,7 @@ def run_parallel(
108
108
  duration=duration,
109
109
  error=str(e),
110
110
  )
111
-
111
+
112
112
  return results
113
113
 
114
114
 
@@ -120,23 +120,23 @@ def run_in_dependency_waves(
120
120
  ) -> dict[str, TaskResult]:
121
121
  """
122
122
  Run tasks in waves based on dependencies.
123
-
123
+
124
124
  Services with no unmet dependencies run in parallel.
125
125
  When a wave completes, next wave starts.
126
-
126
+
127
127
  Args:
128
128
  tasks: Dict of {name: callable}
129
129
  dependencies: Dict of {name: [dependency_names]}
130
130
  max_workers: Max parallel workers per wave
131
131
  on_complete: Callback when task completes
132
-
132
+
133
133
  Returns:
134
134
  Dict of {name: TaskResult}
135
135
  """
136
136
  results: dict[str, TaskResult] = {}
137
137
  completed = set()
138
138
  remaining = set(tasks.keys())
139
-
139
+
140
140
  while remaining:
141
141
  # Find tasks with all dependencies satisfied
142
142
  ready = []
@@ -144,29 +144,29 @@ def run_in_dependency_waves(
144
144
  deps = dependencies.get(name, [])
145
145
  if all(d in completed for d in deps):
146
146
  ready.append(name)
147
-
147
+
148
148
  if not ready:
149
149
  # Circular dependency or missing dependency
150
150
  raise ValueError(f"Cannot resolve dependencies for: {remaining}")
151
-
151
+
152
152
  # Run ready tasks in parallel
153
153
  wave_tasks = {name: tasks[name] for name in ready}
154
154
  wave_results = run_parallel(wave_tasks, max_workers=max_workers, show_progress=False)
155
-
155
+
156
156
  for name, result in wave_results.items():
157
157
  results[name] = result
158
158
  remaining.remove(name)
159
-
159
+
160
160
  if result.success:
161
161
  completed.add(name)
162
-
162
+
163
163
  if on_complete:
164
164
  on_complete(name, result)
165
-
165
+
166
166
  # If any task in wave failed, stop
167
167
  if any(not r.success for r in wave_results.values()):
168
168
  break
169
-
169
+
170
170
  return results
171
171
 
172
172
 
@@ -176,12 +176,12 @@ async def run_parallel_async(
176
176
  ) -> dict[str, TaskResult]:
177
177
  """
178
178
  Run tasks using asyncio with a semaphore for concurrency control.
179
-
179
+
180
180
  Uses run_in_executor for CPU-bound tasks.
181
181
  """
182
182
  results: dict[str, TaskResult] = {}
183
183
  semaphore = asyncio.Semaphore(max_concurrent)
184
-
184
+
185
185
  async def run_task(name: str, func: Callable) -> TaskResult:
186
186
  async with semaphore:
187
187
  start = time.time()
@@ -201,23 +201,23 @@ async def run_parallel_async(
201
201
  duration=time.time() - start,
202
202
  error=str(e),
203
203
  )
204
-
204
+
205
205
  coros = [run_task(name, func) for name, func in tasks.items()]
206
206
  task_results = await asyncio.gather(*coros)
207
-
207
+
208
208
  for result in task_results:
209
209
  results[result.name] = result
210
-
210
+
211
211
  return results
212
212
 
213
213
 
214
214
  class ParallelSandboxBuilder:
215
215
  """Build multiple sandboxes in parallel."""
216
-
216
+
217
217
  def __init__(self, max_workers: int = 4):
218
218
  self.max_workers = max_workers
219
219
  self._lock = Lock()
220
-
220
+
221
221
  def build_sandboxes(
222
222
  self,
223
223
  services: list[tuple[str, Path, Callable]],
@@ -225,22 +225,22 @@ class ParallelSandboxBuilder:
225
225
  ) -> dict[str, TaskResult]:
226
226
  """
227
227
  Build sandboxes for multiple services in parallel.
228
-
228
+
229
229
  Args:
230
230
  services: List of (name, readme_path, build_func)
231
231
  on_complete: Callback(name, success, duration)
232
-
232
+
233
233
  Returns:
234
234
  Dict of results
235
235
  """
236
236
  tasks = {}
237
237
  for name, readme_path, build_func in services:
238
238
  tasks[name] = build_func
239
-
239
+
240
240
  def callback(name: str, result: TaskResult):
241
241
  if on_complete:
242
242
  on_complete(name, result.success, result.duration)
243
-
243
+
244
244
  return run_parallel(
245
245
  tasks,
246
246
  max_workers=self.max_workers,
@@ -253,16 +253,16 @@ def format_parallel_results(results: dict[str, TaskResult]) -> str:
253
253
  """Format parallel execution results for display."""
254
254
  lines = []
255
255
  total_time = sum(r.duration for r in results.values())
256
-
256
+
257
257
  successful = [r for r in results.values() if r.success]
258
258
  failed = [r for r in results.values() if not r.success]
259
-
259
+
260
260
  lines.append(f"Completed: {len(successful)}/{len(results)} tasks")
261
261
  lines.append(f"Total time: {total_time:.2f}s")
262
-
262
+
263
263
  if failed:
264
264
  lines.append("\nFailed:")
265
265
  for r in failed:
266
266
  lines.append(f" ✗ {r.name}: {r.error}")
267
-
267
+
268
268
  return "\n".join(lines)
pactown/platform.py ADDED
@@ -0,0 +1,146 @@
1
+ from __future__ import annotations
2
+
3
+ import re
4
+ from typing import Literal, Optional
5
+
6
+ from pydantic import BaseModel, Field, field_validator
7
+
8
+ SubdomainSeparator = Literal["-", "."]
9
+
10
+
11
+ def coerce_subdomain_separator(value: Optional[str]) -> SubdomainSeparator:
12
+ return "." if value == "." else "-"
13
+
14
+
15
+ def normalize_host(value: str) -> str:
16
+ v = (value or "").strip().lower()
17
+ if v.startswith("http://"):
18
+ v = v[len("http://") :]
19
+ elif v.startswith("https://"):
20
+ v = v[len("https://") :]
21
+ v = v.split("/", 1)[0]
22
+ v = v.split(":", 1)[0]
23
+ v = v.strip(".")
24
+ return v
25
+
26
+
27
+ def normalize_domain(value: str) -> str:
28
+ v = normalize_host(value)
29
+ if v.startswith("www."):
30
+ v = v[len("www.") :]
31
+ return v
32
+
33
+
34
+ def is_local_domain(domain: str) -> bool:
35
+ d = normalize_domain(domain)
36
+ return d in {"localhost", "127.0.0.1", "0.0.0.0"}
37
+
38
+
39
+ def build_origin(*, scheme: Literal["http", "https"], host: str, port: Optional[int] = None) -> str:
40
+ h = normalize_host(host)
41
+ if port is None:
42
+ return f"{scheme}://{h}"
43
+ return f"{scheme}://{h}:{int(port)}"
44
+
45
+
46
+ def web_base_url(domain: str, web_host_port: int) -> str:
47
+ if is_local_domain(domain):
48
+ return build_origin(scheme="http", host="localhost", port=int(web_host_port))
49
+ return build_origin(scheme="https", host=normalize_domain(domain))
50
+
51
+
52
+ def api_base_url(domain: str, api_host_port: int) -> str:
53
+ if is_local_domain(domain):
54
+ return build_origin(scheme="http", host="localhost", port=int(api_host_port))
55
+ return build_origin(scheme="https", host=f"api.{normalize_domain(domain)}")
56
+
57
+
58
+ def to_dns_label(value: str, *, max_len: int = 63, fallback: str = "x") -> str:
59
+ v = (value or "").lower().strip()
60
+ v = re.sub(r"[^a-z0-9]+", "-", v)
61
+ v = re.sub(r"-+", "-", v).strip("-")
62
+ if not v:
63
+ v = fallback
64
+ return v[:max_len]
65
+
66
+
67
+ class DomainConfig(BaseModel):
68
+ domain: str = Field(default="localhost")
69
+ subdomain_separator: SubdomainSeparator = Field(default="-")
70
+
71
+ @field_validator("domain")
72
+ @classmethod
73
+ def _normalize_domain(cls, v: str) -> str:
74
+ out = normalize_domain(v)
75
+ if not out:
76
+ return "localhost"
77
+ return out
78
+
79
+ @field_validator("subdomain_separator")
80
+ @classmethod
81
+ def _normalize_separator(cls, v: str) -> SubdomainSeparator:
82
+ return coerce_subdomain_separator(v)
83
+
84
+
85
+ class ProjectHostParts(BaseModel):
86
+ project_id: int
87
+ username: str
88
+
89
+
90
+ _PROJECT_SUBDOMAIN_RE = re.compile(r"^(?P<project_id>\d+)(?:-|\.)(?P<username>[a-z0-9-]+)$")
91
+
92
+
93
+ def parse_project_subdomain(subdomain: str) -> Optional[ProjectHostParts]:
94
+ s = (subdomain or "").strip().lower()
95
+ m = _PROJECT_SUBDOMAIN_RE.match(s)
96
+ if not m:
97
+ return None
98
+ try:
99
+ project_id = int(m.group("project_id"))
100
+ except Exception:
101
+ return None
102
+ username = m.group("username")
103
+ if not username:
104
+ return None
105
+ return ProjectHostParts(project_id=project_id, username=username)
106
+
107
+
108
+ def build_project_subdomain(project_id: int, username: str, *, separator: SubdomainSeparator) -> str:
109
+ pid = int(project_id)
110
+ sep = coerce_subdomain_separator(separator)
111
+
112
+ prefix = f"{pid}{sep}"
113
+ max_username_len = max(1, 63 - len(prefix))
114
+ uname = to_dns_label(username, max_len=max_username_len, fallback="user")
115
+
116
+ return f"{pid}{sep}{uname}"
117
+
118
+
119
+ def build_project_host(project_id: int, username: str, *, domain: str, separator: SubdomainSeparator) -> str:
120
+ base_domain = normalize_domain(domain)
121
+ sub = build_project_subdomain(project_id, username, separator=separator)
122
+ return f"{sub}.{base_domain}"
123
+
124
+
125
+ def parse_project_host(host: str, *, domain: str) -> Optional[ProjectHostParts]:
126
+ h = normalize_host(host)
127
+ base_domain = normalize_domain(domain)
128
+ suffix = f".{base_domain}" if base_domain else ""
129
+
130
+ if not suffix or not h.endswith(suffix):
131
+ return None
132
+
133
+ subdomain = h[: -len(suffix)]
134
+ return parse_project_subdomain(subdomain)
135
+
136
+
137
+ def build_service_subdomain(service_name: str, username: str, *, separator: SubdomainSeparator) -> str:
138
+ sep = coerce_subdomain_separator(separator)
139
+ service_label = to_dns_label(service_name, max_len=20, fallback="app")
140
+ tenant_label = to_dns_label(username, max_len=30, fallback="user")
141
+
142
+ if sep == ".":
143
+ return f"{service_label}.{tenant_label}".strip(".")
144
+
145
+ label = f"{service_label}-{tenant_label}".strip("-")
146
+ return label[:63].strip("-")
@@ -1,8 +1,8 @@
1
1
  """Pactown registry - local artifact registry for markpact modules."""
2
2
 
3
- from .server import create_app
4
3
  from .client import RegistryClient
5
4
  from .models import Artifact, ArtifactVersion
5
+ from .server import create_app
6
6
 
7
7
  __all__ = [
8
8
  "create_app",
@@ -1,29 +1,28 @@
1
1
  """Client for pactown registry."""
2
2
 
3
3
  from pathlib import Path
4
- from typing import Optional, Any
5
- import hashlib
4
+ from typing import Optional
6
5
 
7
6
  import httpx
8
7
 
9
8
 
10
9
  class RegistryClient:
11
10
  """Client for interacting with pactown registry."""
12
-
11
+
13
12
  def __init__(self, base_url: str = "http://localhost:8800", timeout: float = 30.0):
14
13
  self.base_url = base_url.rstrip("/")
15
14
  self.timeout = timeout
16
15
  self._client = httpx.Client(timeout=timeout)
17
-
16
+
18
17
  def __enter__(self):
19
18
  return self
20
-
19
+
21
20
  def __exit__(self, *args):
22
21
  self._client.close()
23
-
22
+
24
23
  def close(self):
25
24
  self._client.close()
26
-
25
+
27
26
  def health(self) -> bool:
28
27
  """Check if registry is healthy."""
29
28
  try:
@@ -31,10 +30,10 @@ class RegistryClient:
31
30
  return response.status_code == 200
32
31
  except Exception:
33
32
  return False
34
-
33
+
35
34
  def list_artifacts(
36
- self,
37
- namespace: Optional[str] = None,
35
+ self,
36
+ namespace: Optional[str] = None,
38
37
  search: Optional[str] = None
39
38
  ) -> list[dict]:
40
39
  """List artifacts in the registry."""
@@ -43,11 +42,11 @@ class RegistryClient:
43
42
  params["namespace"] = namespace
44
43
  if search:
45
44
  params["search"] = search
46
-
45
+
47
46
  response = self._client.get(f"{self.base_url}/v1/artifacts", params=params)
48
47
  response.raise_for_status()
49
48
  return response.json()
50
-
49
+
51
50
  def get_artifact(self, name: str, namespace: str = "default") -> Optional[dict]:
52
51
  """Get artifact information."""
53
52
  try:
@@ -60,11 +59,11 @@ class RegistryClient:
60
59
  return response.json()
61
60
  except httpx.HTTPStatusError:
62
61
  return None
63
-
62
+
64
63
  def get_version(
65
- self,
66
- name: str,
67
- version: str = "latest",
64
+ self,
65
+ name: str,
66
+ version: str = "latest",
68
67
  namespace: str = "default"
69
68
  ) -> Optional[dict]:
70
69
  """Get specific version information."""
@@ -78,11 +77,11 @@ class RegistryClient:
78
77
  return response.json()
79
78
  except httpx.HTTPStatusError:
80
79
  return None
81
-
80
+
82
81
  def get_readme(
83
- self,
84
- name: str,
85
- version: str = "latest",
82
+ self,
83
+ name: str,
84
+ version: str = "latest",
86
85
  namespace: str = "default"
87
86
  ) -> Optional[str]:
88
87
  """Get README content for a specific version."""
@@ -96,7 +95,7 @@ class RegistryClient:
96
95
  return response.json().get("content")
97
96
  except httpx.HTTPStatusError:
98
97
  return None
99
-
98
+
100
99
  def publish(
101
100
  self,
102
101
  name: str,
@@ -111,10 +110,10 @@ class RegistryClient:
111
110
  """Publish an artifact to the registry."""
112
111
  if readme_path:
113
112
  readme_content = Path(readme_path).read_text()
114
-
113
+
115
114
  if not readme_content:
116
115
  raise ValueError("Either readme_path or readme_content must be provided")
117
-
116
+
118
117
  payload = {
119
118
  "name": name,
120
119
  "version": version,
@@ -124,31 +123,31 @@ class RegistryClient:
124
123
  "tags": tags or [],
125
124
  "metadata": metadata or {},
126
125
  }
127
-
126
+
128
127
  try:
129
128
  response = self._client.post(f"{self.base_url}/v1/publish", json=payload)
130
129
  response.raise_for_status()
131
130
  return response.json()
132
131
  except httpx.HTTPStatusError as e:
133
132
  return {"success": False, "error": str(e)}
134
-
133
+
135
134
  def pull(
136
- self,
137
- name: str,
138
- version: str = "latest",
135
+ self,
136
+ name: str,
137
+ version: str = "latest",
139
138
  namespace: str = "default",
140
139
  output_path: Optional[Path] = None,
141
140
  ) -> Optional[str]:
142
141
  """Pull an artifact from the registry."""
143
142
  readme = self.get_readme(name, version, namespace)
144
-
143
+
145
144
  if readme and output_path:
146
145
  output_path = Path(output_path)
147
146
  output_path.parent.mkdir(parents=True, exist_ok=True)
148
147
  output_path.write_text(readme)
149
-
148
+
150
149
  return readme
151
-
150
+
152
151
  def delete(self, name: str, namespace: str = "default") -> bool:
153
152
  """Delete an artifact from the registry."""
154
153
  try:
@@ -158,7 +157,7 @@ class RegistryClient:
158
157
  return response.status_code == 200
159
158
  except httpx.HTTPStatusError:
160
159
  return False
161
-
160
+
162
161
  def list_namespaces(self) -> list[str]:
163
162
  """List all namespaces."""
164
163
  try:
@@ -171,30 +170,30 @@ class RegistryClient:
171
170
 
172
171
  class AsyncRegistryClient:
173
172
  """Async client for pactown registry."""
174
-
173
+
175
174
  def __init__(self, base_url: str = "http://localhost:8800", timeout: float = 30.0):
176
175
  self.base_url = base_url.rstrip("/")
177
176
  self.timeout = timeout
178
177
  self._client = httpx.AsyncClient(timeout=timeout)
179
-
178
+
180
179
  async def __aenter__(self):
181
180
  return self
182
-
181
+
183
182
  async def __aexit__(self, *args):
184
183
  await self._client.aclose()
185
-
184
+
186
185
  async def close(self):
187
186
  await self._client.aclose()
188
-
187
+
189
188
  async def health(self) -> bool:
190
189
  try:
191
190
  response = await self._client.get(f"{self.base_url}/health")
192
191
  return response.status_code == 200
193
192
  except Exception:
194
193
  return False
195
-
194
+
196
195
  async def list_artifacts(
197
- self,
196
+ self,
198
197
  namespace: Optional[str] = None,
199
198
  search: Optional[str] = None,
200
199
  ) -> list[dict]:
@@ -203,15 +202,15 @@ class AsyncRegistryClient:
203
202
  params["namespace"] = namespace
204
203
  if search:
205
204
  params["search"] = search
206
-
205
+
207
206
  response = await self._client.get(f"{self.base_url}/v1/artifacts", params=params)
208
207
  response.raise_for_status()
209
208
  return response.json()
210
-
209
+
211
210
  async def get_readme(
212
- self,
213
- name: str,
214
- version: str = "latest",
211
+ self,
212
+ name: str,
213
+ version: str = "latest",
215
214
  namespace: str = "default"
216
215
  ) -> Optional[str]:
217
216
  try:
@@ -224,7 +223,7 @@ class AsyncRegistryClient:
224
223
  return response.json().get("content")
225
224
  except httpx.HTTPStatusError:
226
225
  return None
227
-
226
+
228
227
  async def publish(
229
228
  self,
230
229
  name: str,
@@ -244,7 +243,7 @@ class AsyncRegistryClient:
244
243
  "tags": tags or [],
245
244
  "metadata": metadata or {},
246
245
  }
247
-
246
+
248
247
  try:
249
248
  response = await self._client.post(f"{self.base_url}/v1/publish", json=payload)
250
249
  response.raise_for_status()