unitysvc-services 0.1.0__py3-none-any.whl → 0.1.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- unitysvc_services/models/base.py +118 -34
- unitysvc_services/models/listing_v1.py +8 -4
- unitysvc_services/models/provider_v1.py +13 -3
- unitysvc_services/models/seller_v1.py +11 -7
- unitysvc_services/publisher.py +169 -209
- unitysvc_services/query.py +31 -13
- unitysvc_services/update.py +6 -1
- unitysvc_services/utils.py +110 -8
- unitysvc_services/validator.py +183 -18
- {unitysvc_services-0.1.0.dist-info → unitysvc_services-0.1.1.dist-info}/METADATA +2 -1
- unitysvc_services-0.1.1.dist-info/RECORD +23 -0
- unitysvc_services-0.1.0.dist-info/RECORD +0 -23
- {unitysvc_services-0.1.0.dist-info → unitysvc_services-0.1.1.dist-info}/WHEEL +0 -0
- {unitysvc_services-0.1.0.dist-info → unitysvc_services-0.1.1.dist-info}/entry_points.txt +0 -0
- {unitysvc_services-0.1.0.dist-info → unitysvc_services-0.1.1.dist-info}/licenses/LICENSE +0 -0
- {unitysvc_services-0.1.0.dist-info → unitysvc_services-0.1.1.dist-info}/top_level.txt +0 -0
unitysvc_services/publisher.py
CHANGED
@@ -1,6 +1,8 @@
|
|
1
1
|
"""Data publisher module for posting service data to UnitySVC backend."""
|
2
2
|
|
3
|
+
import base64
|
3
4
|
import json
|
5
|
+
import os
|
4
6
|
import tomllib as toml
|
5
7
|
from pathlib import Path
|
6
8
|
from typing import Any
|
@@ -9,6 +11,10 @@ import httpx
|
|
9
11
|
import typer
|
10
12
|
from rich.console import Console
|
11
13
|
|
14
|
+
from .models.base import ProviderStatusEnum, SellerStatusEnum
|
15
|
+
from .utils import convert_convenience_fields_to_documents, find_files_by_schema
|
16
|
+
from .validator import DataValidator
|
17
|
+
|
12
18
|
|
13
19
|
class ServiceDataPublisher:
|
14
20
|
"""Publishes service data to UnitySVC backend endpoints."""
|
@@ -48,12 +54,12 @@ class ServiceDataPublisher:
|
|
48
54
|
return f.read()
|
49
55
|
except UnicodeDecodeError:
|
50
56
|
# If it fails, read as binary and encode as base64
|
51
|
-
import base64
|
52
|
-
|
53
57
|
with open(full_path, "rb") as f:
|
54
58
|
return base64.b64encode(f.read()).decode("ascii")
|
55
59
|
|
56
|
-
def resolve_file_references(
|
60
|
+
def resolve_file_references(
|
61
|
+
self, data: dict[str, Any], base_path: Path
|
62
|
+
) -> dict[str, Any]:
|
57
63
|
"""Recursively resolve file references and include content in data."""
|
58
64
|
result: dict[str, Any] = {}
|
59
65
|
|
@@ -64,7 +70,11 @@ class ServiceDataPublisher:
|
|
64
70
|
elif isinstance(value, list):
|
65
71
|
# Process lists
|
66
72
|
result[key] = [
|
67
|
-
(
|
73
|
+
(
|
74
|
+
self.resolve_file_references(item, base_path)
|
75
|
+
if isinstance(item, dict)
|
76
|
+
else item
|
77
|
+
)
|
68
78
|
for item in value
|
69
79
|
]
|
70
80
|
elif key == "file_path" and isinstance(value, str):
|
@@ -77,7 +87,9 @@ class ServiceDataPublisher:
|
|
77
87
|
content = self.load_file_content(Path(value), base_path)
|
78
88
|
result["file_content"] = content
|
79
89
|
except Exception as e:
|
80
|
-
raise ValueError(
|
90
|
+
raise ValueError(
|
91
|
+
f"Failed to load file content from '{value}': {e}"
|
92
|
+
)
|
81
93
|
else:
|
82
94
|
result[key] = value
|
83
95
|
|
@@ -89,6 +101,7 @@ class ServiceDataPublisher:
|
|
89
101
|
Extracts provider_name from the directory structure.
|
90
102
|
Expected path: .../{provider_name}/services/{service_name}/...
|
91
103
|
"""
|
104
|
+
|
92
105
|
# Load the data file
|
93
106
|
data = self.load_data_file(data_file)
|
94
107
|
|
@@ -103,12 +116,28 @@ class ServiceDataPublisher:
|
|
103
116
|
services_idx = parts.index("services")
|
104
117
|
provider_name = parts[services_idx - 1]
|
105
118
|
data_with_content["provider_name"] = provider_name
|
119
|
+
|
120
|
+
# Find provider directory to check status
|
121
|
+
provider_dir = Path(*parts[:services_idx])
|
106
122
|
except (ValueError, IndexError):
|
107
123
|
raise ValueError(
|
108
124
|
f"Cannot extract provider_name from path: {data_file}. "
|
109
125
|
f"Expected path to contain .../{{provider_name}}/services/..."
|
110
126
|
)
|
111
127
|
|
128
|
+
# Check provider status - skip if incomplete
|
129
|
+
provider_files = find_files_by_schema(provider_dir, "provider_v1")
|
130
|
+
if provider_files:
|
131
|
+
# Should only be one provider file in the directory
|
132
|
+
_provider_file, _format, provider_data = provider_files[0]
|
133
|
+
provider_status = provider_data.get("status", ProviderStatusEnum.active)
|
134
|
+
if provider_status == ProviderStatusEnum.incomplete:
|
135
|
+
return {
|
136
|
+
"skipped": True,
|
137
|
+
"reason": f"Provider status is '{provider_status}' - not publishing offering to backend",
|
138
|
+
"name": data.get("name", "unknown"),
|
139
|
+
}
|
140
|
+
|
112
141
|
# Post to the endpoint
|
113
142
|
response = self.client.post(
|
114
143
|
f"{self.base_url}/publish/service_offering",
|
@@ -143,17 +172,12 @@ class ServiceDataPublisher:
|
|
143
172
|
)
|
144
173
|
|
145
174
|
# If service_name is not in listing data, find it from service files in the same directory
|
146
|
-
if
|
175
|
+
if (
|
176
|
+
"service_name" not in data_with_content
|
177
|
+
or not data_with_content["service_name"]
|
178
|
+
):
|
147
179
|
# Find all service files in the same directory
|
148
|
-
service_files =
|
149
|
-
for pattern in ["*.json", "*.toml"]:
|
150
|
-
for file_path in data_file.parent.glob(pattern):
|
151
|
-
try:
|
152
|
-
file_data = self.load_data_file(file_path)
|
153
|
-
if file_data.get("schema") == "service_v1":
|
154
|
-
service_files.append((file_path, file_data))
|
155
|
-
except Exception:
|
156
|
-
continue
|
180
|
+
service_files = find_files_by_schema(data_file.parent, "service_v1")
|
157
181
|
|
158
182
|
if len(service_files) == 0:
|
159
183
|
raise ValueError(
|
@@ -161,7 +185,9 @@ class ServiceDataPublisher:
|
|
161
185
|
f"Listing files must be in the same directory as a service definition."
|
162
186
|
)
|
163
187
|
elif len(service_files) > 1:
|
164
|
-
service_names = [
|
188
|
+
service_names = [
|
189
|
+
data.get("name", "unknown") for _, _, data in service_files
|
190
|
+
]
|
165
191
|
raise ValueError(
|
166
192
|
f"Multiple services found in {data_file.parent}: {', '.join(service_names)}. "
|
167
193
|
f"Please add 'service_name' field to {data_file.name} to specify which "
|
@@ -169,32 +195,25 @@ class ServiceDataPublisher:
|
|
169
195
|
)
|
170
196
|
else:
|
171
197
|
# Exactly one service found - use it
|
172
|
-
|
198
|
+
_service_file, _format, service_data = service_files[0]
|
173
199
|
data_with_content["service_name"] = service_data.get("name")
|
174
200
|
data_with_content["service_version"] = service_data.get("version")
|
175
201
|
else:
|
176
202
|
# service_name is provided in listing data, find the matching service to get version
|
177
203
|
service_name = data_with_content["service_name"]
|
178
|
-
|
204
|
+
service_files = find_files_by_schema(
|
205
|
+
data_file.parent, "service_v1", field_filter=(("name", service_name),)
|
206
|
+
)
|
179
207
|
|
180
|
-
|
181
|
-
for file_path in data_file.parent.glob(pattern):
|
182
|
-
try:
|
183
|
-
file_data = self.load_data_file(file_path)
|
184
|
-
if file_data.get("schema") == "service_v1" and file_data.get("name") == service_name:
|
185
|
-
data_with_content["service_version"] = file_data.get("version")
|
186
|
-
service_found = True
|
187
|
-
break
|
188
|
-
except Exception:
|
189
|
-
continue
|
190
|
-
if service_found:
|
191
|
-
break
|
192
|
-
|
193
|
-
if not service_found:
|
208
|
+
if not service_files:
|
194
209
|
raise ValueError(
|
195
210
|
f"Service '{service_name}' specified in {data_file.name} not found in {data_file.parent}."
|
196
211
|
)
|
197
212
|
|
213
|
+
# Get version from the found service
|
214
|
+
_service_file, _format, service_data = service_files[0]
|
215
|
+
data_with_content["service_version"] = service_data.get("version")
|
216
|
+
|
198
217
|
# Find seller_name from seller definition in the data directory
|
199
218
|
# Navigate up to find the data directory and look for seller file
|
200
219
|
data_dir = data_file.parent
|
@@ -207,28 +226,29 @@ class ServiceDataPublisher:
|
|
207
226
|
f"Expected path structure includes a 'data' directory."
|
208
227
|
)
|
209
228
|
|
210
|
-
# Look for seller file in the data directory
|
211
|
-
|
212
|
-
for pattern in ["seller.json", "seller.toml"]:
|
213
|
-
potential_seller = data_dir / pattern
|
214
|
-
if potential_seller.exists():
|
215
|
-
seller_file = potential_seller
|
216
|
-
break
|
229
|
+
# Look for seller file in the data directory by checking schema field
|
230
|
+
seller_files = find_files_by_schema(data_dir, "seller_v1")
|
217
231
|
|
218
|
-
if not
|
232
|
+
if not seller_files:
|
219
233
|
raise ValueError(
|
220
|
-
f"Cannot find
|
221
|
-
f"A seller definition is required in the data directory."
|
234
|
+
f"Cannot find seller_v1 file in {data_dir}. A seller definition is required in the data directory."
|
222
235
|
)
|
223
236
|
|
224
|
-
#
|
225
|
-
seller_data =
|
226
|
-
|
227
|
-
|
237
|
+
# Should only be one seller file in the data directory
|
238
|
+
_seller_file, _format, seller_data = seller_files[0]
|
239
|
+
|
240
|
+
# Check seller status - skip if incomplete
|
241
|
+
seller_status = seller_data.get("status", SellerStatusEnum.active)
|
242
|
+
if seller_status == SellerStatusEnum.incomplete:
|
243
|
+
return {
|
244
|
+
"skipped": True,
|
245
|
+
"reason": f"Seller status is '{seller_status}' - not publishing listing to backend",
|
246
|
+
"name": data.get("name", "unknown"),
|
247
|
+
}
|
228
248
|
|
229
249
|
seller_name = seller_data.get("name")
|
230
250
|
if not seller_name:
|
231
|
-
raise ValueError(
|
251
|
+
raise ValueError("Seller data missing 'name' field")
|
232
252
|
|
233
253
|
data_with_content["seller_name"] = seller_name
|
234
254
|
|
@@ -246,16 +266,37 @@ class ServiceDataPublisher:
|
|
246
266
|
|
247
267
|
def post_provider(self, data_file: Path) -> dict[str, Any]:
|
248
268
|
"""Post provider data to the backend."""
|
269
|
+
|
249
270
|
# Load the data file
|
250
271
|
data = self.load_data_file(data_file)
|
251
272
|
|
252
|
-
#
|
273
|
+
# Check provider status - skip if incomplete
|
274
|
+
provider_status = data.get("status", ProviderStatusEnum.active)
|
275
|
+
if provider_status == ProviderStatusEnum.incomplete:
|
276
|
+
# Return success without publishing - provider is incomplete
|
277
|
+
return {
|
278
|
+
"skipped": True,
|
279
|
+
"reason": f"Provider status is '{provider_status}' - not publishing to backend",
|
280
|
+
"name": data.get("name", "unknown"),
|
281
|
+
}
|
282
|
+
|
283
|
+
# Convert convenience fields (logo, terms_of_service) to documents
|
253
284
|
base_path = data_file.parent
|
285
|
+
data = convert_convenience_fields_to_documents(
|
286
|
+
data, base_path, logo_field="logo", terms_field="terms_of_service"
|
287
|
+
)
|
288
|
+
|
289
|
+
# Resolve file references and include content
|
254
290
|
data_with_content = self.resolve_file_references(data, base_path)
|
255
291
|
|
292
|
+
# Remove status field before sending to backend (backend uses is_active)
|
293
|
+
status = data_with_content.pop("status", ProviderStatusEnum.active)
|
294
|
+
# Map status to is_active: active and disabled -> True (published), incomplete -> False (not published)
|
295
|
+
data_with_content["is_active"] = status != ProviderStatusEnum.disabled
|
296
|
+
|
256
297
|
# Post to the endpoint
|
257
298
|
response = self.client.post(
|
258
|
-
f"{self.base_url}/
|
299
|
+
f"{self.base_url}/publish/provider",
|
259
300
|
json=data_with_content,
|
260
301
|
)
|
261
302
|
response.raise_for_status()
|
@@ -263,164 +304,61 @@ class ServiceDataPublisher:
|
|
263
304
|
|
264
305
|
def post_seller(self, data_file: Path) -> dict[str, Any]:
|
265
306
|
"""Post seller data to the backend."""
|
307
|
+
|
266
308
|
# Load the data file
|
267
309
|
data = self.load_data_file(data_file)
|
268
310
|
|
269
|
-
#
|
311
|
+
# Check seller status - skip if incomplete
|
312
|
+
seller_status = data.get("status", SellerStatusEnum.active)
|
313
|
+
if seller_status == SellerStatusEnum.incomplete:
|
314
|
+
# Return success without publishing - seller is incomplete
|
315
|
+
return {
|
316
|
+
"skipped": True,
|
317
|
+
"reason": f"Seller status is '{seller_status}' - not publishing to backend",
|
318
|
+
"name": data.get("name", "unknown"),
|
319
|
+
}
|
320
|
+
|
321
|
+
# Convert convenience fields (logo only for sellers, no terms_of_service)
|
270
322
|
base_path = data_file.parent
|
323
|
+
data = convert_convenience_fields_to_documents(
|
324
|
+
data, base_path, logo_field="logo", terms_field=None
|
325
|
+
)
|
326
|
+
|
327
|
+
# Resolve file references and include content
|
271
328
|
data_with_content = self.resolve_file_references(data, base_path)
|
272
329
|
|
330
|
+
# Remove status field before sending to backend (backend uses is_active)
|
331
|
+
status = data_with_content.pop("status", SellerStatusEnum.active)
|
332
|
+
# Map status to is_active: active and disabled -> True (published), incomplete -> False (not published)
|
333
|
+
data_with_content["is_active"] = status != SellerStatusEnum.disabled
|
334
|
+
|
273
335
|
# Post to the endpoint
|
274
336
|
response = self.client.post(
|
275
|
-
f"{self.base_url}/
|
337
|
+
f"{self.base_url}/publish/seller",
|
276
338
|
json=data_with_content,
|
277
339
|
)
|
278
340
|
response.raise_for_status()
|
279
341
|
return response.json()
|
280
342
|
|
281
|
-
def list_service_offerings(self) -> list[dict[str, Any]]:
|
282
|
-
"""List all service offerings from the backend.
|
283
|
-
|
284
|
-
Note: This endpoint doesn't exist yet in the backend.
|
285
|
-
TODO: Add GET /publish/service_offering endpoint.
|
286
|
-
"""
|
287
|
-
response = self.client.get(f"{self.base_url}/publish/service_offering")
|
288
|
-
response.raise_for_status()
|
289
|
-
result = response.json()
|
290
|
-
# Backend returns {"data": [...], "count": N}
|
291
|
-
return result.get("data", result) if isinstance(result, dict) else result
|
292
|
-
|
293
|
-
def list_service_listings(self) -> list[dict[str, Any]]:
|
294
|
-
"""List all service listings from the backend."""
|
295
|
-
response = self.client.get(f"{self.base_url}/services/")
|
296
|
-
response.raise_for_status()
|
297
|
-
result = response.json()
|
298
|
-
# Backend returns {"data": [...], "count": N}
|
299
|
-
return result.get("data", result) if isinstance(result, dict) else result
|
300
|
-
|
301
|
-
def list_providers(self) -> list[dict[str, Any]]:
|
302
|
-
"""List all providers from the backend."""
|
303
|
-
response = self.client.get(f"{self.base_url}/providers/")
|
304
|
-
response.raise_for_status()
|
305
|
-
result = response.json()
|
306
|
-
# Backend returns {"data": [...], "count": N}
|
307
|
-
return result.get("data", result) if isinstance(result, dict) else result
|
308
|
-
|
309
|
-
def list_sellers(self) -> list[dict[str, Any]]:
|
310
|
-
"""List all sellers from the backend."""
|
311
|
-
response = self.client.get(f"{self.base_url}/sellers/")
|
312
|
-
response.raise_for_status()
|
313
|
-
result = response.json()
|
314
|
-
# Backend returns {"data": [...], "count": N}
|
315
|
-
return result.get("data", result) if isinstance(result, dict) else result
|
316
|
-
|
317
|
-
def update_service_offering_status(self, offering_id: int | str, status: str) -> dict[str, Any]:
|
318
|
-
"""
|
319
|
-
Update the status of a service offering.
|
320
|
-
|
321
|
-
Allowed statuses (UpstreamStatusEnum):
|
322
|
-
- uploading: Service is being uploaded (not ready)
|
323
|
-
- ready: Service is ready to be used
|
324
|
-
- deprecated: Service is deprecated from upstream
|
325
|
-
"""
|
326
|
-
response = self.client.patch(
|
327
|
-
f"{self.base_url}/service_offering/{offering_id}/",
|
328
|
-
json={"upstream_status": status},
|
329
|
-
)
|
330
|
-
response.raise_for_status()
|
331
|
-
return response.json()
|
332
|
-
|
333
|
-
def update_service_listing_status(self, listing_id: int | str, status: str) -> dict[str, Any]:
|
334
|
-
"""
|
335
|
-
Update the status of a service listing.
|
336
|
-
|
337
|
-
Allowed statuses (ListingStatusEnum):
|
338
|
-
- unknown: Not yet determined
|
339
|
-
- upstream_ready: Upstream is ready to be used
|
340
|
-
- downstream_ready: Downstream is ready with proper routing, logging, and billing
|
341
|
-
- ready: Operationally ready (with docs, metrics, and pricing)
|
342
|
-
- in_service: Service is in service
|
343
|
-
- upstream_deprecated: Service is deprecated from upstream
|
344
|
-
- deprecated: Service is no longer offered to users
|
345
|
-
"""
|
346
|
-
response = self.client.patch(
|
347
|
-
f"{self.base_url}/service_listing/{listing_id}/",
|
348
|
-
json={"listing_status": status},
|
349
|
-
)
|
350
|
-
response.raise_for_status()
|
351
|
-
return response.json()
|
352
|
-
|
353
343
|
def find_offering_files(self, data_dir: Path) -> list[Path]:
|
354
|
-
"""
|
355
|
-
|
356
|
-
|
357
|
-
Searches all JSON and TOML files and checks for schema="service_v1".
|
358
|
-
"""
|
359
|
-
offerings = []
|
360
|
-
for pattern in ["*.json", "*.toml"]:
|
361
|
-
for file_path in data_dir.rglob(pattern):
|
362
|
-
try:
|
363
|
-
data = self.load_data_file(file_path)
|
364
|
-
if data.get("schema") == "service_v1":
|
365
|
-
offerings.append(file_path)
|
366
|
-
except Exception:
|
367
|
-
# Skip files that can't be loaded or don't have schema field
|
368
|
-
pass
|
369
|
-
return sorted(offerings)
|
344
|
+
"""Find all service offering files in a directory tree."""
|
345
|
+
files = find_files_by_schema(data_dir, "service_v1")
|
346
|
+
return sorted([f[0] for f in files])
|
370
347
|
|
371
348
|
def find_listing_files(self, data_dir: Path) -> list[Path]:
|
372
|
-
"""
|
373
|
-
|
374
|
-
|
375
|
-
Searches all JSON and TOML files and checks for schema="listing_v1".
|
376
|
-
"""
|
377
|
-
listings = []
|
378
|
-
for pattern in ["*.json", "*.toml"]:
|
379
|
-
for file_path in data_dir.rglob(pattern):
|
380
|
-
try:
|
381
|
-
data = self.load_data_file(file_path)
|
382
|
-
if data.get("schema") == "listing_v1":
|
383
|
-
listings.append(file_path)
|
384
|
-
except Exception:
|
385
|
-
# Skip files that can't be loaded or don't have schema field
|
386
|
-
pass
|
387
|
-
return sorted(listings)
|
349
|
+
"""Find all service listing files in a directory tree."""
|
350
|
+
files = find_files_by_schema(data_dir, "listing_v1")
|
351
|
+
return sorted([f[0] for f in files])
|
388
352
|
|
389
353
|
def find_provider_files(self, data_dir: Path) -> list[Path]:
|
390
|
-
"""
|
391
|
-
|
392
|
-
|
393
|
-
Searches all JSON and TOML files and checks for schema="provider_v1".
|
394
|
-
"""
|
395
|
-
providers = []
|
396
|
-
for pattern in ["*.json", "*.toml"]:
|
397
|
-
for file_path in data_dir.rglob(pattern):
|
398
|
-
try:
|
399
|
-
data = self.load_data_file(file_path)
|
400
|
-
if data.get("schema") == "provider_v1":
|
401
|
-
providers.append(file_path)
|
402
|
-
except Exception:
|
403
|
-
# Skip files that can't be loaded or don't have schema field
|
404
|
-
pass
|
405
|
-
return sorted(providers)
|
354
|
+
"""Find all provider files in a directory tree."""
|
355
|
+
files = find_files_by_schema(data_dir, "provider_v1")
|
356
|
+
return sorted([f[0] for f in files])
|
406
357
|
|
407
358
|
def find_seller_files(self, data_dir: Path) -> list[Path]:
|
408
|
-
"""
|
409
|
-
|
410
|
-
|
411
|
-
Searches all JSON and TOML files and checks for schema="seller_v1".
|
412
|
-
"""
|
413
|
-
sellers = []
|
414
|
-
for pattern in ["*.json", "*.toml"]:
|
415
|
-
for file_path in data_dir.rglob(pattern):
|
416
|
-
try:
|
417
|
-
data = self.load_data_file(file_path)
|
418
|
-
if data.get("schema") == "seller_v1":
|
419
|
-
sellers.append(file_path)
|
420
|
-
except Exception:
|
421
|
-
# Skip files that can't be loaded or don't have schema field
|
422
|
-
pass
|
423
|
-
return sorted(sellers)
|
359
|
+
"""Find all seller files in a directory tree."""
|
360
|
+
files = find_files_by_schema(data_dir, "seller_v1")
|
361
|
+
return sorted([f[0] for f in files])
|
424
362
|
|
425
363
|
def publish_all_offerings(self, data_dir: Path) -> dict[str, Any]:
|
426
364
|
"""
|
@@ -429,7 +367,6 @@ class ServiceDataPublisher:
|
|
429
367
|
Validates data consistency before publishing.
|
430
368
|
Returns a summary of successes and failures.
|
431
369
|
"""
|
432
|
-
from .validator import DataValidator
|
433
370
|
|
434
371
|
# Validate all service directories first
|
435
372
|
validator = DataValidator(data_dir, data_dir.parent / "schema")
|
@@ -439,7 +376,10 @@ class ServiceDataPublisher:
|
|
439
376
|
"total": 0,
|
440
377
|
"success": 0,
|
441
378
|
"failed": 0,
|
442
|
-
"errors": [
|
379
|
+
"errors": [
|
380
|
+
{"file": "validation", "error": error}
|
381
|
+
for error in validation_errors
|
382
|
+
],
|
443
383
|
}
|
444
384
|
|
445
385
|
offering_files = self.find_offering_files(data_dir)
|
@@ -467,8 +407,6 @@ class ServiceDataPublisher:
|
|
467
407
|
Validates data consistency before publishing.
|
468
408
|
Returns a summary of successes and failures.
|
469
409
|
"""
|
470
|
-
from .validator import DataValidator
|
471
|
-
|
472
410
|
# Validate all service directories first
|
473
411
|
validator = DataValidator(data_dir, data_dir.parent / "schema")
|
474
412
|
validation_errors = validator.validate_all_service_directories(data_dir)
|
@@ -477,11 +415,19 @@ class ServiceDataPublisher:
|
|
477
415
|
"total": 0,
|
478
416
|
"success": 0,
|
479
417
|
"failed": 0,
|
480
|
-
"errors": [
|
418
|
+
"errors": [
|
419
|
+
{"file": "validation", "error": error}
|
420
|
+
for error in validation_errors
|
421
|
+
],
|
481
422
|
}
|
482
423
|
|
483
424
|
listing_files = self.find_listing_files(data_dir)
|
484
|
-
results: dict[str, Any] = {
|
425
|
+
results: dict[str, Any] = {
|
426
|
+
"total": len(listing_files),
|
427
|
+
"success": 0,
|
428
|
+
"failed": 0,
|
429
|
+
"errors": [],
|
430
|
+
}
|
485
431
|
|
486
432
|
for listing_file in listing_files:
|
487
433
|
try:
|
@@ -579,7 +525,6 @@ def publish_providers(
|
|
579
525
|
),
|
580
526
|
):
|
581
527
|
"""Publish provider(s) from a file or directory."""
|
582
|
-
import os
|
583
528
|
|
584
529
|
# Set data path
|
585
530
|
if data_path is None:
|
@@ -648,7 +593,9 @@ def publish_providers(
|
|
648
593
|
except typer.Exit:
|
649
594
|
raise
|
650
595
|
except Exception as e:
|
651
|
-
console.print(
|
596
|
+
console.print(
|
597
|
+
f"[red]✗[/red] Failed to publish providers: {e}", style="bold red"
|
598
|
+
)
|
652
599
|
raise typer.Exit(code=1)
|
653
600
|
|
654
601
|
|
@@ -672,8 +619,6 @@ def publish_sellers(
|
|
672
619
|
),
|
673
620
|
):
|
674
621
|
"""Publish seller(s) from a file or directory."""
|
675
|
-
import os
|
676
|
-
|
677
622
|
# Set data path
|
678
623
|
if data_path is None:
|
679
624
|
data_path_str = os.getenv("UNITYSVC_DATA_DIR")
|
@@ -734,7 +679,9 @@ def publish_sellers(
|
|
734
679
|
console.print(f" {error['error']}")
|
735
680
|
raise typer.Exit(code=1)
|
736
681
|
else:
|
737
|
-
console.print(
|
682
|
+
console.print(
|
683
|
+
"\n[green]✓[/green] All sellers published successfully!"
|
684
|
+
)
|
738
685
|
|
739
686
|
except typer.Exit:
|
740
687
|
raise
|
@@ -763,8 +710,6 @@ def publish_offerings(
|
|
763
710
|
),
|
764
711
|
):
|
765
712
|
"""Publish service offering(s) from a file or directory."""
|
766
|
-
import os
|
767
|
-
|
768
713
|
# Set data path
|
769
714
|
if data_path is None:
|
770
715
|
data_path_str = os.getenv("UNITYSVC_DATA_DIR")
|
@@ -805,11 +750,15 @@ def publish_offerings(
|
|
805
750
|
console.print(f"[blue]Publishing service offering:[/blue] {data_path}")
|
806
751
|
console.print(f"[blue]Backend URL:[/blue] {backend_url}\n")
|
807
752
|
result = publisher.post_service_offering(data_path)
|
808
|
-
console.print(
|
753
|
+
console.print(
|
754
|
+
"[green]✓[/green] Service offering published successfully!"
|
755
|
+
)
|
809
756
|
console.print(f"[cyan]Response:[/cyan] {json.dumps(result, indent=2)}")
|
810
757
|
# Handle directory
|
811
758
|
else:
|
812
|
-
console.print(
|
759
|
+
console.print(
|
760
|
+
f"[blue]Scanning for service offerings in:[/blue] {data_path}"
|
761
|
+
)
|
813
762
|
console.print(f"[blue]Backend URL:[/blue] {backend_url}\n")
|
814
763
|
results = publisher.publish_all_offerings(data_path)
|
815
764
|
|
@@ -825,12 +774,16 @@ def publish_offerings(
|
|
825
774
|
console.print(f" {error['error']}")
|
826
775
|
raise typer.Exit(code=1)
|
827
776
|
else:
|
828
|
-
console.print(
|
777
|
+
console.print(
|
778
|
+
"\n[green]✓[/green] All service offerings published successfully!"
|
779
|
+
)
|
829
780
|
|
830
781
|
except typer.Exit:
|
831
782
|
raise
|
832
783
|
except Exception as e:
|
833
|
-
console.print(
|
784
|
+
console.print(
|
785
|
+
f"[red]✗[/red] Failed to publish service offerings: {e}", style="bold red"
|
786
|
+
)
|
834
787
|
raise typer.Exit(code=1)
|
835
788
|
|
836
789
|
|
@@ -854,7 +807,6 @@ def publish_listings(
|
|
854
807
|
),
|
855
808
|
):
|
856
809
|
"""Publish service listing(s) from a file or directory."""
|
857
|
-
import os
|
858
810
|
|
859
811
|
# Set data path
|
860
812
|
if data_path is None:
|
@@ -896,11 +848,15 @@ def publish_listings(
|
|
896
848
|
console.print(f"[blue]Publishing service listing:[/blue] {data_path}")
|
897
849
|
console.print(f"[blue]Backend URL:[/blue] {backend_url}\n")
|
898
850
|
result = publisher.post_service_listing(data_path)
|
899
|
-
console.print(
|
851
|
+
console.print(
|
852
|
+
"[green]✓[/green] Service listing published successfully!"
|
853
|
+
)
|
900
854
|
console.print(f"[cyan]Response:[/cyan] {json.dumps(result, indent=2)}")
|
901
855
|
# Handle directory
|
902
856
|
else:
|
903
|
-
console.print(
|
857
|
+
console.print(
|
858
|
+
f"[blue]Scanning for service listings in:[/blue] {data_path}"
|
859
|
+
)
|
904
860
|
console.print(f"[blue]Backend URL:[/blue] {backend_url}\n")
|
905
861
|
results = publisher.publish_all_listings(data_path)
|
906
862
|
|
@@ -916,10 +872,14 @@ def publish_listings(
|
|
916
872
|
console.print(f" {error['error']}")
|
917
873
|
raise typer.Exit(code=1)
|
918
874
|
else:
|
919
|
-
console.print(
|
875
|
+
console.print(
|
876
|
+
"\n[green]✓[/green] All service listings published successfully!"
|
877
|
+
)
|
920
878
|
|
921
879
|
except typer.Exit:
|
922
880
|
raise
|
923
881
|
except Exception as e:
|
924
|
-
console.print(
|
882
|
+
console.print(
|
883
|
+
f"[red]✗[/red] Failed to publish service listings: {e}", style="bold red"
|
884
|
+
)
|
925
885
|
raise typer.Exit(code=1)
|