unitysvc-services 0.1.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- unitysvc_services/__init__.py +4 -0
- unitysvc_services/cli.py +21 -0
- unitysvc_services/format_data.py +145 -0
- unitysvc_services/list.py +245 -0
- unitysvc_services/models/__init__.py +6 -0
- unitysvc_services/models/base.py +352 -0
- unitysvc_services/models/listing_v1.py +72 -0
- unitysvc_services/models/provider_v1.py +53 -0
- unitysvc_services/models/seller_v1.py +110 -0
- unitysvc_services/models/service_v1.py +80 -0
- unitysvc_services/populate.py +186 -0
- unitysvc_services/publisher.py +925 -0
- unitysvc_services/query.py +471 -0
- unitysvc_services/scaffold.py +1039 -0
- unitysvc_services/update.py +293 -0
- unitysvc_services/utils.py +240 -0
- unitysvc_services/validator.py +515 -0
- unitysvc_services-0.1.0.dist-info/METADATA +172 -0
- unitysvc_services-0.1.0.dist-info/RECORD +23 -0
- unitysvc_services-0.1.0.dist-info/WHEEL +5 -0
- unitysvc_services-0.1.0.dist-info/entry_points.txt +2 -0
- unitysvc_services-0.1.0.dist-info/licenses/LICENSE +21 -0
- unitysvc_services-0.1.0.dist-info/top_level.txt +1 -0
@@ -0,0 +1,925 @@
|
|
1
|
+
"""Data publisher module for posting service data to UnitySVC backend."""
|
2
|
+
|
3
|
+
import json
|
4
|
+
import tomllib as toml
|
5
|
+
from pathlib import Path
|
6
|
+
from typing import Any
|
7
|
+
|
8
|
+
import httpx
|
9
|
+
import typer
|
10
|
+
from rich.console import Console
|
11
|
+
|
12
|
+
|
13
|
+
class ServiceDataPublisher:
|
14
|
+
"""Publishes service data to UnitySVC backend endpoints."""
|
15
|
+
|
16
|
+
def __init__(self, base_url: str, api_key: str):
|
17
|
+
self.base_url = base_url.rstrip("/")
|
18
|
+
self.api_key = api_key
|
19
|
+
self.client = httpx.Client(
|
20
|
+
headers={
|
21
|
+
"X-API-Key": api_key,
|
22
|
+
"Content-Type": "application/json",
|
23
|
+
},
|
24
|
+
timeout=30.0,
|
25
|
+
)
|
26
|
+
|
27
|
+
def load_data_file(self, file_path: Path) -> dict[str, Any]:
|
28
|
+
"""Load data from JSON or TOML file."""
|
29
|
+
if file_path.suffix == ".toml":
|
30
|
+
with open(file_path, "rb") as f:
|
31
|
+
return toml.load(f)
|
32
|
+
elif file_path.suffix == ".json":
|
33
|
+
with open(file_path, encoding="utf-8") as f:
|
34
|
+
return json.load(f)
|
35
|
+
else:
|
36
|
+
raise ValueError(f"Unsupported file format: {file_path.suffix}")
|
37
|
+
|
38
|
+
def load_file_content(self, file_path: Path, base_path: Path) -> str:
|
39
|
+
"""Load content from a file (text or binary)."""
|
40
|
+
full_path = base_path / file_path if not file_path.is_absolute() else file_path
|
41
|
+
|
42
|
+
if not full_path.exists():
|
43
|
+
raise FileNotFoundError(f"File not found: {full_path}")
|
44
|
+
|
45
|
+
# Try to read as text first
|
46
|
+
try:
|
47
|
+
with open(full_path, encoding="utf-8") as f:
|
48
|
+
return f.read()
|
49
|
+
except UnicodeDecodeError:
|
50
|
+
# If it fails, read as binary and encode as base64
|
51
|
+
import base64
|
52
|
+
|
53
|
+
with open(full_path, "rb") as f:
|
54
|
+
return base64.b64encode(f.read()).decode("ascii")
|
55
|
+
|
56
|
+
def resolve_file_references(self, data: dict[str, Any], base_path: Path) -> dict[str, Any]:
|
57
|
+
"""Recursively resolve file references and include content in data."""
|
58
|
+
result: dict[str, Any] = {}
|
59
|
+
|
60
|
+
for key, value in data.items():
|
61
|
+
if isinstance(value, dict):
|
62
|
+
# Recursively process nested dictionaries
|
63
|
+
result[key] = self.resolve_file_references(value, base_path)
|
64
|
+
elif isinstance(value, list):
|
65
|
+
# Process lists
|
66
|
+
result[key] = [
|
67
|
+
(self.resolve_file_references(item, base_path) if isinstance(item, dict) else item)
|
68
|
+
for item in value
|
69
|
+
]
|
70
|
+
elif key == "file_path" and isinstance(value, str):
|
71
|
+
# This is a file reference - load the content
|
72
|
+
# Store both the original path and the content
|
73
|
+
result[key] = value
|
74
|
+
# Add file_content field if not already present (for DocumentCreate compatibility)
|
75
|
+
if "file_content" not in data:
|
76
|
+
try:
|
77
|
+
content = self.load_file_content(Path(value), base_path)
|
78
|
+
result["file_content"] = content
|
79
|
+
except Exception as e:
|
80
|
+
raise ValueError(f"Failed to load file content from '{value}': {e}")
|
81
|
+
else:
|
82
|
+
result[key] = value
|
83
|
+
|
84
|
+
return result
|
85
|
+
|
86
|
+
def post_service_offering(self, data_file: Path) -> dict[str, Any]:
|
87
|
+
"""Post service offering data to the backend.
|
88
|
+
|
89
|
+
Extracts provider_name from the directory structure.
|
90
|
+
Expected path: .../{provider_name}/services/{service_name}/...
|
91
|
+
"""
|
92
|
+
# Load the data file
|
93
|
+
data = self.load_data_file(data_file)
|
94
|
+
|
95
|
+
# Resolve file references and include content
|
96
|
+
base_path = data_file.parent
|
97
|
+
data_with_content = self.resolve_file_references(data, base_path)
|
98
|
+
|
99
|
+
# Extract provider_name from directory structure
|
100
|
+
# Find the 'services' directory and use its parent as provider_name
|
101
|
+
parts = data_file.parts
|
102
|
+
try:
|
103
|
+
services_idx = parts.index("services")
|
104
|
+
provider_name = parts[services_idx - 1]
|
105
|
+
data_with_content["provider_name"] = provider_name
|
106
|
+
except (ValueError, IndexError):
|
107
|
+
raise ValueError(
|
108
|
+
f"Cannot extract provider_name from path: {data_file}. "
|
109
|
+
f"Expected path to contain .../{{provider_name}}/services/..."
|
110
|
+
)
|
111
|
+
|
112
|
+
# Post to the endpoint
|
113
|
+
response = self.client.post(
|
114
|
+
f"{self.base_url}/publish/service_offering",
|
115
|
+
json=data_with_content,
|
116
|
+
)
|
117
|
+
response.raise_for_status()
|
118
|
+
return response.json()
|
119
|
+
|
120
|
+
def post_service_listing(self, data_file: Path) -> dict[str, Any]:
|
121
|
+
"""Post service listing data to the backend.
|
122
|
+
|
123
|
+
Extracts provider_name from directory structure and service info from service.json.
|
124
|
+
Expected path: .../{provider_name}/services/{service_name}/svcreseller.json
|
125
|
+
"""
|
126
|
+
# Load the listing data file
|
127
|
+
data = self.load_data_file(data_file)
|
128
|
+
|
129
|
+
# Resolve file references and include content
|
130
|
+
base_path = data_file.parent
|
131
|
+
data_with_content = self.resolve_file_references(data, base_path)
|
132
|
+
|
133
|
+
# Extract provider_name from directory structure
|
134
|
+
parts = data_file.parts
|
135
|
+
try:
|
136
|
+
services_idx = parts.index("services")
|
137
|
+
provider_name = parts[services_idx - 1]
|
138
|
+
data_with_content["provider_name"] = provider_name
|
139
|
+
except (ValueError, IndexError):
|
140
|
+
raise ValueError(
|
141
|
+
f"Cannot extract provider_name from path: {data_file}. "
|
142
|
+
f"Expected path to contain .../{{provider_name}}/services/..."
|
143
|
+
)
|
144
|
+
|
145
|
+
# If service_name is not in listing data, find it from service files in the same directory
|
146
|
+
if "service_name" not in data_with_content or not data_with_content["service_name"]:
|
147
|
+
# Find all service files in the same directory
|
148
|
+
service_files = []
|
149
|
+
for pattern in ["*.json", "*.toml"]:
|
150
|
+
for file_path in data_file.parent.glob(pattern):
|
151
|
+
try:
|
152
|
+
file_data = self.load_data_file(file_path)
|
153
|
+
if file_data.get("schema") == "service_v1":
|
154
|
+
service_files.append((file_path, file_data))
|
155
|
+
except Exception:
|
156
|
+
continue
|
157
|
+
|
158
|
+
if len(service_files) == 0:
|
159
|
+
raise ValueError(
|
160
|
+
f"Cannot find any service_v1 files in {data_file.parent}. "
|
161
|
+
f"Listing files must be in the same directory as a service definition."
|
162
|
+
)
|
163
|
+
elif len(service_files) > 1:
|
164
|
+
service_names = [data.get("name", "unknown") for _, data in service_files]
|
165
|
+
raise ValueError(
|
166
|
+
f"Multiple services found in {data_file.parent}: {', '.join(service_names)}. "
|
167
|
+
f"Please add 'service_name' field to {data_file.name} to specify which "
|
168
|
+
f"service this listing belongs to."
|
169
|
+
)
|
170
|
+
else:
|
171
|
+
# Exactly one service found - use it
|
172
|
+
service_file, service_data = service_files[0]
|
173
|
+
data_with_content["service_name"] = service_data.get("name")
|
174
|
+
data_with_content["service_version"] = service_data.get("version")
|
175
|
+
else:
|
176
|
+
# service_name is provided in listing data, find the matching service to get version
|
177
|
+
service_name = data_with_content["service_name"]
|
178
|
+
service_found = False
|
179
|
+
|
180
|
+
for pattern in ["*.json", "*.toml"]:
|
181
|
+
for file_path in data_file.parent.glob(pattern):
|
182
|
+
try:
|
183
|
+
file_data = self.load_data_file(file_path)
|
184
|
+
if file_data.get("schema") == "service_v1" and file_data.get("name") == service_name:
|
185
|
+
data_with_content["service_version"] = file_data.get("version")
|
186
|
+
service_found = True
|
187
|
+
break
|
188
|
+
except Exception:
|
189
|
+
continue
|
190
|
+
if service_found:
|
191
|
+
break
|
192
|
+
|
193
|
+
if not service_found:
|
194
|
+
raise ValueError(
|
195
|
+
f"Service '{service_name}' specified in {data_file.name} not found in {data_file.parent}."
|
196
|
+
)
|
197
|
+
|
198
|
+
# Find seller_name from seller definition in the data directory
|
199
|
+
# Navigate up to find the data directory and look for seller file
|
200
|
+
data_dir = data_file.parent
|
201
|
+
while data_dir.name != "data" and data_dir.parent != data_dir:
|
202
|
+
data_dir = data_dir.parent
|
203
|
+
|
204
|
+
if data_dir.name != "data":
|
205
|
+
raise ValueError(
|
206
|
+
f"Cannot find 'data' directory in path: {data_file}. "
|
207
|
+
f"Expected path structure includes a 'data' directory."
|
208
|
+
)
|
209
|
+
|
210
|
+
# Look for seller file in the data directory
|
211
|
+
seller_file = None
|
212
|
+
for pattern in ["seller.json", "seller.toml"]:
|
213
|
+
potential_seller = data_dir / pattern
|
214
|
+
if potential_seller.exists():
|
215
|
+
seller_file = potential_seller
|
216
|
+
break
|
217
|
+
|
218
|
+
if not seller_file:
|
219
|
+
raise ValueError(
|
220
|
+
f"Cannot find seller.json or seller.toml in {data_dir}. "
|
221
|
+
f"A seller definition is required in the data directory."
|
222
|
+
)
|
223
|
+
|
224
|
+
# Load seller data and extract name
|
225
|
+
seller_data = self.load_data_file(seller_file)
|
226
|
+
if seller_data.get("schema") != "seller_v1":
|
227
|
+
raise ValueError(f"Seller file {seller_file} does not have schema='seller_v1'")
|
228
|
+
|
229
|
+
seller_name = seller_data.get("name")
|
230
|
+
if not seller_name:
|
231
|
+
raise ValueError(f"Seller file {seller_file} missing 'name' field")
|
232
|
+
|
233
|
+
data_with_content["seller_name"] = seller_name
|
234
|
+
|
235
|
+
# Map listing_status to status if present
|
236
|
+
if "listing_status" in data_with_content:
|
237
|
+
data_with_content["status"] = data_with_content.pop("listing_status")
|
238
|
+
|
239
|
+
# Post to the endpoint
|
240
|
+
response = self.client.post(
|
241
|
+
f"{self.base_url}/publish/service_listing",
|
242
|
+
json=data_with_content,
|
243
|
+
)
|
244
|
+
response.raise_for_status()
|
245
|
+
return response.json()
|
246
|
+
|
247
|
+
def post_provider(self, data_file: Path) -> dict[str, Any]:
|
248
|
+
"""Post provider data to the backend."""
|
249
|
+
# Load the data file
|
250
|
+
data = self.load_data_file(data_file)
|
251
|
+
|
252
|
+
# Resolve file references and include content
|
253
|
+
base_path = data_file.parent
|
254
|
+
data_with_content = self.resolve_file_references(data, base_path)
|
255
|
+
|
256
|
+
# Post to the endpoint
|
257
|
+
response = self.client.post(
|
258
|
+
f"{self.base_url}/providers/",
|
259
|
+
json=data_with_content,
|
260
|
+
)
|
261
|
+
response.raise_for_status()
|
262
|
+
return response.json()
|
263
|
+
|
264
|
+
def post_seller(self, data_file: Path) -> dict[str, Any]:
|
265
|
+
"""Post seller data to the backend."""
|
266
|
+
# Load the data file
|
267
|
+
data = self.load_data_file(data_file)
|
268
|
+
|
269
|
+
# Resolve file references and include content
|
270
|
+
base_path = data_file.parent
|
271
|
+
data_with_content = self.resolve_file_references(data, base_path)
|
272
|
+
|
273
|
+
# Post to the endpoint
|
274
|
+
response = self.client.post(
|
275
|
+
f"{self.base_url}/sellers/",
|
276
|
+
json=data_with_content,
|
277
|
+
)
|
278
|
+
response.raise_for_status()
|
279
|
+
return response.json()
|
280
|
+
|
281
|
+
def list_service_offerings(self) -> list[dict[str, Any]]:
|
282
|
+
"""List all service offerings from the backend.
|
283
|
+
|
284
|
+
Note: This endpoint doesn't exist yet in the backend.
|
285
|
+
TODO: Add GET /publish/service_offering endpoint.
|
286
|
+
"""
|
287
|
+
response = self.client.get(f"{self.base_url}/publish/service_offering")
|
288
|
+
response.raise_for_status()
|
289
|
+
result = response.json()
|
290
|
+
# Backend returns {"data": [...], "count": N}
|
291
|
+
return result.get("data", result) if isinstance(result, dict) else result
|
292
|
+
|
293
|
+
def list_service_listings(self) -> list[dict[str, Any]]:
|
294
|
+
"""List all service listings from the backend."""
|
295
|
+
response = self.client.get(f"{self.base_url}/services/")
|
296
|
+
response.raise_for_status()
|
297
|
+
result = response.json()
|
298
|
+
# Backend returns {"data": [...], "count": N}
|
299
|
+
return result.get("data", result) if isinstance(result, dict) else result
|
300
|
+
|
301
|
+
def list_providers(self) -> list[dict[str, Any]]:
|
302
|
+
"""List all providers from the backend."""
|
303
|
+
response = self.client.get(f"{self.base_url}/providers/")
|
304
|
+
response.raise_for_status()
|
305
|
+
result = response.json()
|
306
|
+
# Backend returns {"data": [...], "count": N}
|
307
|
+
return result.get("data", result) if isinstance(result, dict) else result
|
308
|
+
|
309
|
+
def list_sellers(self) -> list[dict[str, Any]]:
|
310
|
+
"""List all sellers from the backend."""
|
311
|
+
response = self.client.get(f"{self.base_url}/sellers/")
|
312
|
+
response.raise_for_status()
|
313
|
+
result = response.json()
|
314
|
+
# Backend returns {"data": [...], "count": N}
|
315
|
+
return result.get("data", result) if isinstance(result, dict) else result
|
316
|
+
|
317
|
+
def update_service_offering_status(self, offering_id: int | str, status: str) -> dict[str, Any]:
|
318
|
+
"""
|
319
|
+
Update the status of a service offering.
|
320
|
+
|
321
|
+
Allowed statuses (UpstreamStatusEnum):
|
322
|
+
- uploading: Service is being uploaded (not ready)
|
323
|
+
- ready: Service is ready to be used
|
324
|
+
- deprecated: Service is deprecated from upstream
|
325
|
+
"""
|
326
|
+
response = self.client.patch(
|
327
|
+
f"{self.base_url}/service_offering/{offering_id}/",
|
328
|
+
json={"upstream_status": status},
|
329
|
+
)
|
330
|
+
response.raise_for_status()
|
331
|
+
return response.json()
|
332
|
+
|
333
|
+
def update_service_listing_status(self, listing_id: int | str, status: str) -> dict[str, Any]:
|
334
|
+
"""
|
335
|
+
Update the status of a service listing.
|
336
|
+
|
337
|
+
Allowed statuses (ListingStatusEnum):
|
338
|
+
- unknown: Not yet determined
|
339
|
+
- upstream_ready: Upstream is ready to be used
|
340
|
+
- downstream_ready: Downstream is ready with proper routing, logging, and billing
|
341
|
+
- ready: Operationally ready (with docs, metrics, and pricing)
|
342
|
+
- in_service: Service is in service
|
343
|
+
- upstream_deprecated: Service is deprecated from upstream
|
344
|
+
- deprecated: Service is no longer offered to users
|
345
|
+
"""
|
346
|
+
response = self.client.patch(
|
347
|
+
f"{self.base_url}/service_listing/{listing_id}/",
|
348
|
+
json={"listing_status": status},
|
349
|
+
)
|
350
|
+
response.raise_for_status()
|
351
|
+
return response.json()
|
352
|
+
|
353
|
+
def find_offering_files(self, data_dir: Path) -> list[Path]:
|
354
|
+
"""
|
355
|
+
Find all service offering files in a directory tree.
|
356
|
+
|
357
|
+
Searches all JSON and TOML files and checks for schema="service_v1".
|
358
|
+
"""
|
359
|
+
offerings = []
|
360
|
+
for pattern in ["*.json", "*.toml"]:
|
361
|
+
for file_path in data_dir.rglob(pattern):
|
362
|
+
try:
|
363
|
+
data = self.load_data_file(file_path)
|
364
|
+
if data.get("schema") == "service_v1":
|
365
|
+
offerings.append(file_path)
|
366
|
+
except Exception:
|
367
|
+
# Skip files that can't be loaded or don't have schema field
|
368
|
+
pass
|
369
|
+
return sorted(offerings)
|
370
|
+
|
371
|
+
def find_listing_files(self, data_dir: Path) -> list[Path]:
|
372
|
+
"""
|
373
|
+
Find all service listing files in a directory tree.
|
374
|
+
|
375
|
+
Searches all JSON and TOML files and checks for schema="listing_v1".
|
376
|
+
"""
|
377
|
+
listings = []
|
378
|
+
for pattern in ["*.json", "*.toml"]:
|
379
|
+
for file_path in data_dir.rglob(pattern):
|
380
|
+
try:
|
381
|
+
data = self.load_data_file(file_path)
|
382
|
+
if data.get("schema") == "listing_v1":
|
383
|
+
listings.append(file_path)
|
384
|
+
except Exception:
|
385
|
+
# Skip files that can't be loaded or don't have schema field
|
386
|
+
pass
|
387
|
+
return sorted(listings)
|
388
|
+
|
389
|
+
def find_provider_files(self, data_dir: Path) -> list[Path]:
|
390
|
+
"""
|
391
|
+
Find all provider files in a directory tree.
|
392
|
+
|
393
|
+
Searches all JSON and TOML files and checks for schema="provider_v1".
|
394
|
+
"""
|
395
|
+
providers = []
|
396
|
+
for pattern in ["*.json", "*.toml"]:
|
397
|
+
for file_path in data_dir.rglob(pattern):
|
398
|
+
try:
|
399
|
+
data = self.load_data_file(file_path)
|
400
|
+
if data.get("schema") == "provider_v1":
|
401
|
+
providers.append(file_path)
|
402
|
+
except Exception:
|
403
|
+
# Skip files that can't be loaded or don't have schema field
|
404
|
+
pass
|
405
|
+
return sorted(providers)
|
406
|
+
|
407
|
+
def find_seller_files(self, data_dir: Path) -> list[Path]:
|
408
|
+
"""
|
409
|
+
Find all seller files in a directory tree.
|
410
|
+
|
411
|
+
Searches all JSON and TOML files and checks for schema="seller_v1".
|
412
|
+
"""
|
413
|
+
sellers = []
|
414
|
+
for pattern in ["*.json", "*.toml"]:
|
415
|
+
for file_path in data_dir.rglob(pattern):
|
416
|
+
try:
|
417
|
+
data = self.load_data_file(file_path)
|
418
|
+
if data.get("schema") == "seller_v1":
|
419
|
+
sellers.append(file_path)
|
420
|
+
except Exception:
|
421
|
+
# Skip files that can't be loaded or don't have schema field
|
422
|
+
pass
|
423
|
+
return sorted(sellers)
|
424
|
+
|
425
|
+
def publish_all_offerings(self, data_dir: Path) -> dict[str, Any]:
|
426
|
+
"""
|
427
|
+
Publish all service offerings found in a directory tree.
|
428
|
+
|
429
|
+
Validates data consistency before publishing.
|
430
|
+
Returns a summary of successes and failures.
|
431
|
+
"""
|
432
|
+
from .validator import DataValidator
|
433
|
+
|
434
|
+
# Validate all service directories first
|
435
|
+
validator = DataValidator(data_dir, data_dir.parent / "schema")
|
436
|
+
validation_errors = validator.validate_all_service_directories(data_dir)
|
437
|
+
if validation_errors:
|
438
|
+
return {
|
439
|
+
"total": 0,
|
440
|
+
"success": 0,
|
441
|
+
"failed": 0,
|
442
|
+
"errors": [{"file": "validation", "error": error} for error in validation_errors],
|
443
|
+
}
|
444
|
+
|
445
|
+
offering_files = self.find_offering_files(data_dir)
|
446
|
+
results: dict[str, Any] = {
|
447
|
+
"total": len(offering_files),
|
448
|
+
"success": 0,
|
449
|
+
"failed": 0,
|
450
|
+
"errors": [],
|
451
|
+
}
|
452
|
+
|
453
|
+
for offering_file in offering_files:
|
454
|
+
try:
|
455
|
+
self.post_service_offering(offering_file)
|
456
|
+
results["success"] += 1
|
457
|
+
except Exception as e:
|
458
|
+
results["failed"] += 1
|
459
|
+
results["errors"].append({"file": str(offering_file), "error": str(e)})
|
460
|
+
|
461
|
+
return results
|
462
|
+
|
463
|
+
def publish_all_listings(self, data_dir: Path) -> dict[str, Any]:
|
464
|
+
"""
|
465
|
+
Publish all service listings found in a directory tree.
|
466
|
+
|
467
|
+
Validates data consistency before publishing.
|
468
|
+
Returns a summary of successes and failures.
|
469
|
+
"""
|
470
|
+
from .validator import DataValidator
|
471
|
+
|
472
|
+
# Validate all service directories first
|
473
|
+
validator = DataValidator(data_dir, data_dir.parent / "schema")
|
474
|
+
validation_errors = validator.validate_all_service_directories(data_dir)
|
475
|
+
if validation_errors:
|
476
|
+
return {
|
477
|
+
"total": 0,
|
478
|
+
"success": 0,
|
479
|
+
"failed": 0,
|
480
|
+
"errors": [{"file": "validation", "error": error} for error in validation_errors],
|
481
|
+
}
|
482
|
+
|
483
|
+
listing_files = self.find_listing_files(data_dir)
|
484
|
+
results: dict[str, Any] = {"total": len(listing_files), "success": 0, "failed": 0, "errors": []}
|
485
|
+
|
486
|
+
for listing_file in listing_files:
|
487
|
+
try:
|
488
|
+
self.post_service_listing(listing_file)
|
489
|
+
results["success"] += 1
|
490
|
+
except Exception as e:
|
491
|
+
results["failed"] += 1
|
492
|
+
results["errors"].append({"file": str(listing_file), "error": str(e)})
|
493
|
+
|
494
|
+
return results
|
495
|
+
|
496
|
+
def publish_all_providers(self, data_dir: Path) -> dict[str, Any]:
|
497
|
+
"""
|
498
|
+
Publish all providers found in a directory tree.
|
499
|
+
|
500
|
+
Returns a summary of successes and failures.
|
501
|
+
"""
|
502
|
+
provider_files = self.find_provider_files(data_dir)
|
503
|
+
results: dict[str, Any] = {
|
504
|
+
"total": len(provider_files),
|
505
|
+
"success": 0,
|
506
|
+
"failed": 0,
|
507
|
+
"errors": [],
|
508
|
+
}
|
509
|
+
|
510
|
+
for provider_file in provider_files:
|
511
|
+
try:
|
512
|
+
self.post_provider(provider_file)
|
513
|
+
results["success"] += 1
|
514
|
+
except Exception as e:
|
515
|
+
results["failed"] += 1
|
516
|
+
results["errors"].append({"file": str(provider_file), "error": str(e)})
|
517
|
+
|
518
|
+
return results
|
519
|
+
|
520
|
+
def publish_all_sellers(self, data_dir: Path) -> dict[str, Any]:
|
521
|
+
"""
|
522
|
+
Publish all sellers found in a directory tree.
|
523
|
+
|
524
|
+
Returns a summary of successes and failures.
|
525
|
+
"""
|
526
|
+
seller_files = self.find_seller_files(data_dir)
|
527
|
+
results: dict[str, Any] = {
|
528
|
+
"total": len(seller_files),
|
529
|
+
"success": 0,
|
530
|
+
"failed": 0,
|
531
|
+
"errors": [],
|
532
|
+
}
|
533
|
+
|
534
|
+
for seller_file in seller_files:
|
535
|
+
try:
|
536
|
+
self.post_seller(seller_file)
|
537
|
+
results["success"] += 1
|
538
|
+
except Exception as e:
|
539
|
+
results["failed"] += 1
|
540
|
+
results["errors"].append({"file": str(seller_file), "error": str(e)})
|
541
|
+
|
542
|
+
return results
|
543
|
+
|
544
|
+
def close(self):
|
545
|
+
"""Close the HTTP client."""
|
546
|
+
self.client.close()
|
547
|
+
|
548
|
+
def __enter__(self):
|
549
|
+
"""Context manager entry."""
|
550
|
+
return self
|
551
|
+
|
552
|
+
def __exit__(self, exc_type, exc_val, exc_tb):
|
553
|
+
"""Context manager exit."""
|
554
|
+
self.close()
|
555
|
+
|
556
|
+
|
557
|
+
# CLI commands for publishing
|
558
|
+
app = typer.Typer(help="Publish data to backend")
|
559
|
+
console = Console()
|
560
|
+
|
561
|
+
|
562
|
+
@app.command("providers")
|
563
|
+
def publish_providers(
|
564
|
+
data_path: Path | None = typer.Argument(
|
565
|
+
None,
|
566
|
+
help="Path to provider file or directory (default: ./data or UNITYSVC_DATA_DIR env var)",
|
567
|
+
),
|
568
|
+
backend_url: str | None = typer.Option(
|
569
|
+
None,
|
570
|
+
"--backend-url",
|
571
|
+
"-u",
|
572
|
+
help="UnitySVC backend URL (default: from UNITYSVC_BACKEND_URL env var)",
|
573
|
+
),
|
574
|
+
api_key: str | None = typer.Option(
|
575
|
+
None,
|
576
|
+
"--api-key",
|
577
|
+
"-k",
|
578
|
+
help="API key for authentication (default: from UNITYSVC_API_KEY env var)",
|
579
|
+
),
|
580
|
+
):
|
581
|
+
"""Publish provider(s) from a file or directory."""
|
582
|
+
import os
|
583
|
+
|
584
|
+
# Set data path
|
585
|
+
if data_path is None:
|
586
|
+
data_path_str = os.getenv("UNITYSVC_DATA_DIR")
|
587
|
+
if data_path_str:
|
588
|
+
data_path = Path(data_path_str)
|
589
|
+
else:
|
590
|
+
data_path = Path.cwd() / "data"
|
591
|
+
|
592
|
+
if not data_path.is_absolute():
|
593
|
+
data_path = Path.cwd() / data_path
|
594
|
+
|
595
|
+
if not data_path.exists():
|
596
|
+
console.print(f"[red]✗[/red] Path not found: {data_path}", style="bold red")
|
597
|
+
raise typer.Exit(code=1)
|
598
|
+
|
599
|
+
# Get backend URL from argument or environment
|
600
|
+
backend_url = backend_url or os.getenv("UNITYSVC_BACKEND_URL")
|
601
|
+
if not backend_url:
|
602
|
+
console.print(
|
603
|
+
"[red]✗[/red] Backend URL not provided. Use --backend-url or set UNITYSVC_BACKEND_URL env var.",
|
604
|
+
style="bold red",
|
605
|
+
)
|
606
|
+
raise typer.Exit(code=1)
|
607
|
+
|
608
|
+
# Get API key from argument or environment
|
609
|
+
api_key = api_key or os.getenv("UNITYSVC_API_KEY")
|
610
|
+
if not api_key:
|
611
|
+
console.print(
|
612
|
+
"[red]✗[/red] API key not provided. Use --api-key or set UNITYSVC_API_KEY env var.",
|
613
|
+
style="bold red",
|
614
|
+
)
|
615
|
+
raise typer.Exit(code=1)
|
616
|
+
|
617
|
+
try:
|
618
|
+
with ServiceDataPublisher(backend_url, api_key) as publisher:
|
619
|
+
# Handle single file
|
620
|
+
if data_path.is_file():
|
621
|
+
console.print(f"[blue]Publishing provider:[/blue] {data_path}")
|
622
|
+
console.print(f"[blue]Backend URL:[/blue] {backend_url}\n")
|
623
|
+
result = publisher.post_provider(data_path)
|
624
|
+
console.print("[green]✓[/green] Provider published successfully!")
|
625
|
+
console.print(f"[cyan]Response:[/cyan] {json.dumps(result, indent=2)}")
|
626
|
+
# Handle directory
|
627
|
+
else:
|
628
|
+
console.print(f"[blue]Scanning for providers in:[/blue] {data_path}")
|
629
|
+
console.print(f"[blue]Backend URL:[/blue] {backend_url}\n")
|
630
|
+
results = publisher.publish_all_providers(data_path)
|
631
|
+
|
632
|
+
# Display summary
|
633
|
+
console.print("\n[bold]Publishing Summary:[/bold]")
|
634
|
+
console.print(f" Total found: {results['total']}")
|
635
|
+
console.print(f" [green]✓ Success:[/green] {results['success']}")
|
636
|
+
console.print(f" [red]✗ Failed:[/red] {results['failed']}")
|
637
|
+
|
638
|
+
# Display errors if any
|
639
|
+
if results["errors"]:
|
640
|
+
console.print("\n[bold red]Errors:[/bold red]")
|
641
|
+
for error in results["errors"]:
|
642
|
+
console.print(f" [red]✗[/red] {error['file']}")
|
643
|
+
console.print(f" {error['error']}")
|
644
|
+
|
645
|
+
if results["failed"] > 0:
|
646
|
+
raise typer.Exit(code=1)
|
647
|
+
|
648
|
+
except typer.Exit:
|
649
|
+
raise
|
650
|
+
except Exception as e:
|
651
|
+
console.print(f"[red]✗[/red] Failed to publish providers: {e}", style="bold red")
|
652
|
+
raise typer.Exit(code=1)
|
653
|
+
|
654
|
+
|
655
|
+
@app.command("sellers")
|
656
|
+
def publish_sellers(
|
657
|
+
data_path: Path | None = typer.Argument(
|
658
|
+
None,
|
659
|
+
help="Path to seller file or directory (default: ./data or UNITYSVC_DATA_DIR env var)",
|
660
|
+
),
|
661
|
+
backend_url: str | None = typer.Option(
|
662
|
+
None,
|
663
|
+
"--backend-url",
|
664
|
+
"-u",
|
665
|
+
help="UnitySVC backend URL (default: from UNITYSVC_BACKEND_URL env var)",
|
666
|
+
),
|
667
|
+
api_key: str | None = typer.Option(
|
668
|
+
None,
|
669
|
+
"--api-key",
|
670
|
+
"-k",
|
671
|
+
help="API key for authentication (default: from UNITYSVC_API_KEY env var)",
|
672
|
+
),
|
673
|
+
):
|
674
|
+
"""Publish seller(s) from a file or directory."""
|
675
|
+
import os
|
676
|
+
|
677
|
+
# Set data path
|
678
|
+
if data_path is None:
|
679
|
+
data_path_str = os.getenv("UNITYSVC_DATA_DIR")
|
680
|
+
if data_path_str:
|
681
|
+
data_path = Path(data_path_str)
|
682
|
+
else:
|
683
|
+
data_path = Path.cwd() / "data"
|
684
|
+
|
685
|
+
if not data_path.is_absolute():
|
686
|
+
data_path = Path.cwd() / data_path
|
687
|
+
|
688
|
+
if not data_path.exists():
|
689
|
+
console.print(f"[red]✗[/red] Path not found: {data_path}", style="bold red")
|
690
|
+
raise typer.Exit(code=1)
|
691
|
+
|
692
|
+
# Get backend URL
|
693
|
+
backend_url = backend_url or os.getenv("UNITYSVC_BACKEND_URL")
|
694
|
+
if not backend_url:
|
695
|
+
console.print(
|
696
|
+
"[red]✗[/red] Backend URL not provided. Use --backend-url or set UNITYSVC_BACKEND_URL env var.",
|
697
|
+
style="bold red",
|
698
|
+
)
|
699
|
+
raise typer.Exit(code=1)
|
700
|
+
|
701
|
+
# Get API key
|
702
|
+
api_key = api_key or os.getenv("UNITYSVC_API_KEY")
|
703
|
+
if not api_key:
|
704
|
+
console.print(
|
705
|
+
"[red]✗[/red] API key not provided. Use --api-key or set UNITYSVC_API_KEY env var.",
|
706
|
+
style="bold red",
|
707
|
+
)
|
708
|
+
raise typer.Exit(code=1)
|
709
|
+
|
710
|
+
try:
|
711
|
+
with ServiceDataPublisher(backend_url, api_key) as publisher:
|
712
|
+
# Handle single file
|
713
|
+
if data_path.is_file():
|
714
|
+
console.print(f"[blue]Publishing seller:[/blue] {data_path}")
|
715
|
+
console.print(f"[blue]Backend URL:[/blue] {backend_url}\n")
|
716
|
+
result = publisher.post_seller(data_path)
|
717
|
+
console.print("[green]✓[/green] Seller published successfully!")
|
718
|
+
console.print(f"[cyan]Response:[/cyan] {json.dumps(result, indent=2)}")
|
719
|
+
# Handle directory
|
720
|
+
else:
|
721
|
+
console.print(f"[blue]Scanning for sellers in:[/blue] {data_path}")
|
722
|
+
console.print(f"[blue]Backend URL:[/blue] {backend_url}\n")
|
723
|
+
results = publisher.publish_all_sellers(data_path)
|
724
|
+
|
725
|
+
console.print("\n[bold]Publishing Summary:[/bold]")
|
726
|
+
console.print(f" Total found: {results['total']}")
|
727
|
+
console.print(f" [green]✓ Success: {results['success']}[/green]")
|
728
|
+
console.print(f" [red]✗ Failed: {results['failed']}[/red]")
|
729
|
+
|
730
|
+
if results["errors"]:
|
731
|
+
console.print("\n[bold red]Errors:[/bold red]")
|
732
|
+
for error in results["errors"]:
|
733
|
+
console.print(f" [red]✗[/red] {error['file']}")
|
734
|
+
console.print(f" {error['error']}")
|
735
|
+
raise typer.Exit(code=1)
|
736
|
+
else:
|
737
|
+
console.print("\n[green]✓[/green] All sellers published successfully!")
|
738
|
+
|
739
|
+
except typer.Exit:
|
740
|
+
raise
|
741
|
+
except Exception as e:
|
742
|
+
console.print(f"[red]✗[/red] Failed to publish sellers: {e}", style="bold red")
|
743
|
+
raise typer.Exit(code=1)
|
744
|
+
|
745
|
+
|
746
|
+
@app.command("offerings")
|
747
|
+
def publish_offerings(
|
748
|
+
data_path: Path | None = typer.Argument(
|
749
|
+
None,
|
750
|
+
help="Path to service offering file or directory (default: ./data or UNITYSVC_DATA_DIR env var)",
|
751
|
+
),
|
752
|
+
backend_url: str | None = typer.Option(
|
753
|
+
None,
|
754
|
+
"--backend-url",
|
755
|
+
"-u",
|
756
|
+
help="UnitySVC backend URL (default: from UNITYSVC_BACKEND_URL env var)",
|
757
|
+
),
|
758
|
+
api_key: str | None = typer.Option(
|
759
|
+
None,
|
760
|
+
"--api-key",
|
761
|
+
"-k",
|
762
|
+
help="API key for authentication (default: from UNITYSVC_API_KEY env var)",
|
763
|
+
),
|
764
|
+
):
|
765
|
+
"""Publish service offering(s) from a file or directory."""
|
766
|
+
import os
|
767
|
+
|
768
|
+
# Set data path
|
769
|
+
if data_path is None:
|
770
|
+
data_path_str = os.getenv("UNITYSVC_DATA_DIR")
|
771
|
+
if data_path_str:
|
772
|
+
data_path = Path(data_path_str)
|
773
|
+
else:
|
774
|
+
data_path = Path.cwd() / "data"
|
775
|
+
|
776
|
+
if not data_path.is_absolute():
|
777
|
+
data_path = Path.cwd() / data_path
|
778
|
+
|
779
|
+
if not data_path.exists():
|
780
|
+
console.print(f"[red]✗[/red] Path not found: {data_path}", style="bold red")
|
781
|
+
raise typer.Exit(code=1)
|
782
|
+
|
783
|
+
# Get backend URL from argument or environment
|
784
|
+
backend_url = backend_url or os.getenv("UNITYSVC_BACKEND_URL")
|
785
|
+
if not backend_url:
|
786
|
+
console.print(
|
787
|
+
"[red]✗[/red] Backend URL not provided. Use --backend-url or set UNITYSVC_BACKEND_URL env var.",
|
788
|
+
style="bold red",
|
789
|
+
)
|
790
|
+
raise typer.Exit(code=1)
|
791
|
+
|
792
|
+
# Get API key from argument or environment
|
793
|
+
api_key = api_key or os.getenv("UNITYSVC_API_KEY")
|
794
|
+
if not api_key:
|
795
|
+
console.print(
|
796
|
+
"[red]✗[/red] API key not provided. Use --api-key or set UNITYSVC_API_KEY env var.",
|
797
|
+
style="bold red",
|
798
|
+
)
|
799
|
+
raise typer.Exit(code=1)
|
800
|
+
|
801
|
+
try:
|
802
|
+
with ServiceDataPublisher(backend_url, api_key) as publisher:
|
803
|
+
# Handle single file
|
804
|
+
if data_path.is_file():
|
805
|
+
console.print(f"[blue]Publishing service offering:[/blue] {data_path}")
|
806
|
+
console.print(f"[blue]Backend URL:[/blue] {backend_url}\n")
|
807
|
+
result = publisher.post_service_offering(data_path)
|
808
|
+
console.print("[green]✓[/green] Service offering published successfully!")
|
809
|
+
console.print(f"[cyan]Response:[/cyan] {json.dumps(result, indent=2)}")
|
810
|
+
# Handle directory
|
811
|
+
else:
|
812
|
+
console.print(f"[blue]Scanning for service offerings in:[/blue] {data_path}")
|
813
|
+
console.print(f"[blue]Backend URL:[/blue] {backend_url}\n")
|
814
|
+
results = publisher.publish_all_offerings(data_path)
|
815
|
+
|
816
|
+
console.print("\n[bold]Publishing Summary:[/bold]")
|
817
|
+
console.print(f" Total found: {results['total']}")
|
818
|
+
console.print(f" [green]✓ Success: {results['success']}[/green]")
|
819
|
+
console.print(f" [red]✗ Failed: {results['failed']}[/red]")
|
820
|
+
|
821
|
+
if results["errors"]:
|
822
|
+
console.print("\n[bold red]Errors:[/bold red]")
|
823
|
+
for error in results["errors"]:
|
824
|
+
console.print(f" [red]✗[/red] {error['file']}")
|
825
|
+
console.print(f" {error['error']}")
|
826
|
+
raise typer.Exit(code=1)
|
827
|
+
else:
|
828
|
+
console.print("\n[green]✓[/green] All service offerings published successfully!")
|
829
|
+
|
830
|
+
except typer.Exit:
|
831
|
+
raise
|
832
|
+
except Exception as e:
|
833
|
+
console.print(f"[red]✗[/red] Failed to publish service offerings: {e}", style="bold red")
|
834
|
+
raise typer.Exit(code=1)
|
835
|
+
|
836
|
+
|
837
|
+
@app.command("listings")
|
838
|
+
def publish_listings(
|
839
|
+
data_path: Path | None = typer.Argument(
|
840
|
+
None,
|
841
|
+
help="Path to service listing file or directory (default: ./data or UNITYSVC_DATA_DIR env var)",
|
842
|
+
),
|
843
|
+
backend_url: str | None = typer.Option(
|
844
|
+
None,
|
845
|
+
"--backend-url",
|
846
|
+
"-u",
|
847
|
+
help="UnitySVC backend URL (default: from UNITYSVC_BACKEND_URL env var)",
|
848
|
+
),
|
849
|
+
api_key: str | None = typer.Option(
|
850
|
+
None,
|
851
|
+
"--api-key",
|
852
|
+
"-k",
|
853
|
+
help="API key for authentication (default: from UNITYSVC_API_KEY env var)",
|
854
|
+
),
|
855
|
+
):
|
856
|
+
"""Publish service listing(s) from a file or directory."""
|
857
|
+
import os
|
858
|
+
|
859
|
+
# Set data path
|
860
|
+
if data_path is None:
|
861
|
+
data_path_str = os.getenv("UNITYSVC_DATA_DIR")
|
862
|
+
if data_path_str:
|
863
|
+
data_path = Path(data_path_str)
|
864
|
+
else:
|
865
|
+
data_path = Path.cwd() / "data"
|
866
|
+
|
867
|
+
if not data_path.is_absolute():
|
868
|
+
data_path = Path.cwd() / data_path
|
869
|
+
|
870
|
+
if not data_path.exists():
|
871
|
+
console.print(f"[red]✗[/red] Path not found: {data_path}", style="bold red")
|
872
|
+
raise typer.Exit(code=1)
|
873
|
+
|
874
|
+
# Get backend URL from argument or environment
|
875
|
+
backend_url = backend_url or os.getenv("UNITYSVC_BACKEND_URL")
|
876
|
+
if not backend_url:
|
877
|
+
console.print(
|
878
|
+
"[red]✗[/red] Backend URL not provided. Use --backend-url or set UNITYSVC_BACKEND_URL env var.",
|
879
|
+
style="bold red",
|
880
|
+
)
|
881
|
+
raise typer.Exit(code=1)
|
882
|
+
|
883
|
+
# Get API key from argument or environment
|
884
|
+
api_key = api_key or os.getenv("UNITYSVC_API_KEY")
|
885
|
+
if not api_key:
|
886
|
+
console.print(
|
887
|
+
"[red]✗[/red] API key not provided. Use --api-key or set UNITYSVC_API_KEY env var.",
|
888
|
+
style="bold red",
|
889
|
+
)
|
890
|
+
raise typer.Exit(code=1)
|
891
|
+
|
892
|
+
try:
|
893
|
+
with ServiceDataPublisher(backend_url, api_key) as publisher:
|
894
|
+
# Handle single file
|
895
|
+
if data_path.is_file():
|
896
|
+
console.print(f"[blue]Publishing service listing:[/blue] {data_path}")
|
897
|
+
console.print(f"[blue]Backend URL:[/blue] {backend_url}\n")
|
898
|
+
result = publisher.post_service_listing(data_path)
|
899
|
+
console.print("[green]✓[/green] Service listing published successfully!")
|
900
|
+
console.print(f"[cyan]Response:[/cyan] {json.dumps(result, indent=2)}")
|
901
|
+
# Handle directory
|
902
|
+
else:
|
903
|
+
console.print(f"[blue]Scanning for service listings in:[/blue] {data_path}")
|
904
|
+
console.print(f"[blue]Backend URL:[/blue] {backend_url}\n")
|
905
|
+
results = publisher.publish_all_listings(data_path)
|
906
|
+
|
907
|
+
console.print("\n[bold]Publishing Summary:[/bold]")
|
908
|
+
console.print(f" Total found: {results['total']}")
|
909
|
+
console.print(f" [green]✓ Success: {results['success']}[/green]")
|
910
|
+
console.print(f" [red]✗ Failed: {results['failed']}[/red]")
|
911
|
+
|
912
|
+
if results["errors"]:
|
913
|
+
console.print("\n[bold red]Errors:[/bold red]")
|
914
|
+
for error in results["errors"]:
|
915
|
+
console.print(f" [red]✗[/red] {error['file']}")
|
916
|
+
console.print(f" {error['error']}")
|
917
|
+
raise typer.Exit(code=1)
|
918
|
+
else:
|
919
|
+
console.print("\n[green]✓[/green] All service listings published successfully!")
|
920
|
+
|
921
|
+
except typer.Exit:
|
922
|
+
raise
|
923
|
+
except Exception as e:
|
924
|
+
console.print(f"[red]✗[/red] Failed to publish service listings: {e}", style="bold red")
|
925
|
+
raise typer.Exit(code=1)
|