unitysvc-services 0.1.1__py3-none-any.whl → 0.1.4__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- unitysvc_services/api.py +278 -0
- unitysvc_services/format_data.py +2 -7
- unitysvc_services/list.py +14 -43
- unitysvc_services/models/base.py +157 -102
- unitysvc_services/models/listing_v1.py +25 -9
- unitysvc_services/models/provider_v1.py +19 -8
- unitysvc_services/models/seller_v1.py +10 -8
- unitysvc_services/models/service_v1.py +8 -1
- unitysvc_services/populate.py +2 -6
- unitysvc_services/publisher.py +676 -371
- unitysvc_services/py.typed +0 -0
- unitysvc_services/query.py +522 -337
- unitysvc_services/update.py +4 -13
- unitysvc_services/utils.py +2 -6
- unitysvc_services/validator.py +98 -79
- {unitysvc_services-0.1.1.dist-info → unitysvc_services-0.1.4.dist-info}/METADATA +41 -39
- unitysvc_services-0.1.4.dist-info/RECORD +25 -0
- unitysvc_services-0.1.1.dist-info/RECORD +0 -23
- {unitysvc_services-0.1.1.dist-info → unitysvc_services-0.1.4.dist-info}/WHEEL +0 -0
- {unitysvc_services-0.1.1.dist-info → unitysvc_services-0.1.4.dist-info}/entry_points.txt +0 -0
- {unitysvc_services-0.1.1.dist-info → unitysvc_services-0.1.4.dist-info}/licenses/LICENSE +0 -0
- {unitysvc_services-0.1.1.dist-info → unitysvc_services-0.1.4.dist-info}/top_level.txt +0 -0
unitysvc_services/publisher.py
CHANGED
@@ -1,5 +1,6 @@
|
|
1
1
|
"""Data publisher module for posting service data to UnitySVC backend."""
|
2
2
|
|
3
|
+
import asyncio
|
3
4
|
import base64
|
4
5
|
import json
|
5
6
|
import os
|
@@ -11,24 +12,25 @@ import httpx
|
|
11
12
|
import typer
|
12
13
|
from rich.console import Console
|
13
14
|
|
15
|
+
from .api import UnitySvcAPI
|
14
16
|
from .models.base import ProviderStatusEnum, SellerStatusEnum
|
15
17
|
from .utils import convert_convenience_fields_to_documents, find_files_by_schema
|
16
18
|
from .validator import DataValidator
|
17
19
|
|
18
20
|
|
19
|
-
class ServiceDataPublisher:
|
20
|
-
"""Publishes service data to UnitySVC backend endpoints.
|
21
|
+
class ServiceDataPublisher(UnitySvcAPI):
|
22
|
+
"""Publishes service data to UnitySVC backend endpoints.
|
21
23
|
|
22
|
-
|
23
|
-
|
24
|
-
|
25
|
-
|
26
|
-
|
27
|
-
|
28
|
-
|
29
|
-
|
30
|
-
|
31
|
-
|
24
|
+
Inherits base HTTP client with curl fallback from UnitySvcAPI.
|
25
|
+
Extends with async operations for concurrent publishing.
|
26
|
+
"""
|
27
|
+
|
28
|
+
def __init__(self) -> None:
|
29
|
+
# Initialize base class (provides self.client as AsyncClient with curl fallback)
|
30
|
+
super().__init__()
|
31
|
+
|
32
|
+
# Semaphore to limit concurrent requests and prevent connection pool exhaustion
|
33
|
+
self.max_concurrent_requests = 15
|
32
34
|
|
33
35
|
def load_data_file(self, file_path: Path) -> dict[str, Any]:
|
34
36
|
"""Load data from JSON or TOML file."""
|
@@ -57,9 +59,7 @@ class ServiceDataPublisher:
|
|
57
59
|
with open(full_path, "rb") as f:
|
58
60
|
return base64.b64encode(f.read()).decode("ascii")
|
59
61
|
|
60
|
-
def resolve_file_references(
|
61
|
-
self, data: dict[str, Any], base_path: Path
|
62
|
-
) -> dict[str, Any]:
|
62
|
+
def resolve_file_references(self, data: dict[str, Any], base_path: Path) -> dict[str, Any]:
|
63
63
|
"""Recursively resolve file references and include content in data."""
|
64
64
|
result: dict[str, Any] = {}
|
65
65
|
|
@@ -70,11 +70,7 @@ class ServiceDataPublisher:
|
|
70
70
|
elif isinstance(value, list):
|
71
71
|
# Process lists
|
72
72
|
result[key] = [
|
73
|
-
(
|
74
|
-
self.resolve_file_references(item, base_path)
|
75
|
-
if isinstance(item, dict)
|
76
|
-
else item
|
77
|
-
)
|
73
|
+
(self.resolve_file_references(item, base_path) if isinstance(item, dict) else item)
|
78
74
|
for item in value
|
79
75
|
]
|
80
76
|
elif key == "file_path" and isinstance(value, str):
|
@@ -87,110 +83,196 @@ class ServiceDataPublisher:
|
|
87
83
|
content = self.load_file_content(Path(value), base_path)
|
88
84
|
result["file_content"] = content
|
89
85
|
except Exception as e:
|
90
|
-
raise ValueError(
|
91
|
-
f"Failed to load file content from '{value}': {e}"
|
92
|
-
)
|
86
|
+
raise ValueError(f"Failed to load file content from '{value}': {e}")
|
93
87
|
else:
|
94
88
|
result[key] = value
|
95
89
|
|
96
90
|
return result
|
97
91
|
|
98
|
-
def
|
99
|
-
|
92
|
+
async def post( # type: ignore[override]
|
93
|
+
self, endpoint: str, data: dict[str, Any], check_status: bool = True
|
94
|
+
) -> tuple[dict[str, Any], int]:
|
95
|
+
"""Make a POST request to the backend API with automatic curl fallback.
|
100
96
|
|
101
|
-
|
102
|
-
|
103
|
-
"""
|
97
|
+
Override of base class post() that returns both JSON and status code.
|
98
|
+
Uses base class client with automatic curl fallback.
|
104
99
|
|
105
|
-
|
106
|
-
|
100
|
+
Args:
|
101
|
+
endpoint: API endpoint path (e.g., "/publish/seller")
|
102
|
+
data: JSON data to post
|
103
|
+
check_status: Whether to raise on non-2xx status codes (default: True)
|
107
104
|
|
108
|
-
|
109
|
-
|
110
|
-
data_with_content = self.resolve_file_references(data, base_path)
|
105
|
+
Returns:
|
106
|
+
Tuple of (JSON response, HTTP status code)
|
111
107
|
|
112
|
-
|
113
|
-
|
114
|
-
|
115
|
-
|
116
|
-
|
117
|
-
|
118
|
-
|
108
|
+
Raises:
|
109
|
+
RuntimeError: If both httpx and curl fail
|
110
|
+
"""
|
111
|
+
# Use base class client (self.client from UnitySvcQuery) with automatic curl fallback
|
112
|
+
# If we already know curl is needed, use it directly
|
113
|
+
if self.use_curl_fallback:
|
114
|
+
# Use base class curl fallback method
|
115
|
+
response_json = await super().post(endpoint, json_data=data)
|
116
|
+
# Curl POST doesn't return status code separately, assume 2xx if no exception
|
117
|
+
status_code = 200
|
118
|
+
else:
|
119
|
+
try:
|
120
|
+
response = await self.client.post(f"{self.base_url}{endpoint}", json=data)
|
121
|
+
status_code = response.status_code
|
122
|
+
|
123
|
+
if check_status:
|
124
|
+
response.raise_for_status()
|
125
|
+
|
126
|
+
response_json = response.json()
|
127
|
+
except (httpx.ConnectError, OSError):
|
128
|
+
# Connection failed - switch to curl fallback and retry
|
129
|
+
self.use_curl_fallback = True
|
130
|
+
response_json = await super().post(endpoint, json_data=data)
|
131
|
+
status_code = 200 # Assume success if curl didn't raise
|
132
|
+
|
133
|
+
return (response_json, status_code)
|
134
|
+
|
135
|
+
async def _post_with_retry(
|
136
|
+
self,
|
137
|
+
endpoint: str,
|
138
|
+
data: dict[str, Any],
|
139
|
+
entity_type: str,
|
140
|
+
entity_name: str,
|
141
|
+
context_info: str = "",
|
142
|
+
max_retries: int = 3,
|
143
|
+
) -> dict[str, Any]:
|
144
|
+
"""
|
145
|
+
Generic retry wrapper for posting data to backend API with task polling.
|
146
|
+
|
147
|
+
The backend now returns HTTP 202 with a task_id. This method:
|
148
|
+
1. Submits the publish request
|
149
|
+
2. Gets the task_id from the response
|
150
|
+
3. Polls /tasks/{task_id} until completion
|
151
|
+
4. Returns the final result
|
152
|
+
|
153
|
+
Args:
|
154
|
+
endpoint: API endpoint path (e.g., "/publish/listing")
|
155
|
+
data: JSON data to post
|
156
|
+
entity_type: Type of entity being published (for error messages)
|
157
|
+
entity_name: Name of the entity being published (for error messages)
|
158
|
+
context_info: Additional context for error messages (e.g., provider, service info)
|
159
|
+
max_retries: Maximum number of retry attempts
|
160
|
+
|
161
|
+
Returns:
|
162
|
+
Response JSON from successful API call
|
163
|
+
|
164
|
+
Raises:
|
165
|
+
ValueError: On client errors (4xx) or after exhausting retries
|
166
|
+
"""
|
167
|
+
last_exception = None
|
168
|
+
for attempt in range(max_retries):
|
169
|
+
try:
|
170
|
+
# Use the public post() method with automatic curl fallback
|
171
|
+
response_json, status_code = await self.post(endpoint, data, check_status=False)
|
119
172
|
|
120
|
-
|
121
|
-
|
122
|
-
|
123
|
-
|
124
|
-
f"Cannot extract provider_name from path: {data_file}. "
|
125
|
-
f"Expected path to contain .../{{provider_name}}/services/..."
|
126
|
-
)
|
173
|
+
# Handle task-based response (HTTP 202)
|
174
|
+
if status_code == 202:
|
175
|
+
# Backend returns task_id - poll for completion
|
176
|
+
task_id = response_json.get("task_id")
|
127
177
|
|
128
|
-
|
129
|
-
|
130
|
-
|
131
|
-
# Should only be one provider file in the directory
|
132
|
-
_provider_file, _format, provider_data = provider_files[0]
|
133
|
-
provider_status = provider_data.get("status", ProviderStatusEnum.active)
|
134
|
-
if provider_status == ProviderStatusEnum.incomplete:
|
135
|
-
return {
|
136
|
-
"skipped": True,
|
137
|
-
"reason": f"Provider status is '{provider_status}' - not publishing offering to backend",
|
138
|
-
"name": data.get("name", "unknown"),
|
139
|
-
}
|
178
|
+
if not task_id:
|
179
|
+
context_msg = f" ({context_info})" if context_info else ""
|
180
|
+
raise ValueError(f"No task_id in response for {entity_type} '{entity_name}'{context_msg}")
|
140
181
|
|
141
|
-
|
142
|
-
|
143
|
-
|
144
|
-
|
145
|
-
|
146
|
-
|
147
|
-
|
182
|
+
# Poll task status until completion using check_task utility
|
183
|
+
try:
|
184
|
+
result = await self.check_task(task_id)
|
185
|
+
return result
|
186
|
+
except ValueError as e:
|
187
|
+
# Add context to task errors
|
188
|
+
context_msg = f" ({context_info})" if context_info else ""
|
189
|
+
raise ValueError(f"Task failed for {entity_type} '{entity_name}'{context_msg}: {e}")
|
190
|
+
|
191
|
+
# Check for errors
|
192
|
+
if status_code >= 400:
|
193
|
+
# Don't retry on 4xx errors (client errors) - they won't succeed on retry
|
194
|
+
if 400 <= status_code < 500:
|
195
|
+
error_detail = response_json.get("detail", str(response_json))
|
196
|
+
context_msg = f" ({context_info})" if context_info else ""
|
197
|
+
raise ValueError(
|
198
|
+
f"Failed to publish {entity_type} '{entity_name}'{context_msg}: {error_detail}"
|
199
|
+
)
|
148
200
|
|
149
|
-
|
150
|
-
|
201
|
+
# 5xx errors - retry with exponential backoff
|
202
|
+
if attempt < max_retries - 1:
|
203
|
+
wait_time = 2**attempt # Exponential backoff: 1s, 2s, 4s
|
204
|
+
await asyncio.sleep(wait_time)
|
205
|
+
continue
|
206
|
+
else:
|
207
|
+
# Last attempt failed
|
208
|
+
error_detail = response_json.get("detail", str(response_json))
|
209
|
+
context_msg = f" ({context_info})" if context_info else ""
|
210
|
+
raise ValueError(
|
211
|
+
f"Failed to publish {entity_type} after {max_retries} attempts: "
|
212
|
+
f"'{entity_name}'{context_msg}: {error_detail}"
|
213
|
+
)
|
151
214
|
|
152
|
-
|
153
|
-
|
154
|
-
|
215
|
+
# Success response (2xx)
|
216
|
+
return response_json
|
217
|
+
|
218
|
+
except (httpx.NetworkError, httpx.TimeoutException, RuntimeError) as e:
|
219
|
+
# Network/connection errors - the post() method should have tried curl fallback
|
220
|
+
# If we're here, both httpx and curl failed
|
221
|
+
last_exception = e
|
222
|
+
if attempt < max_retries - 1:
|
223
|
+
wait_time = 2**attempt # Exponential backoff: 1s, 2s, 4s
|
224
|
+
await asyncio.sleep(wait_time)
|
225
|
+
continue
|
226
|
+
else:
|
227
|
+
raise ValueError(
|
228
|
+
f"Network error after {max_retries} attempts for {entity_type} '{entity_name}': {str(e)}"
|
229
|
+
)
|
230
|
+
|
231
|
+
# Should never reach here, but just in case
|
232
|
+
if last_exception:
|
233
|
+
raise last_exception
|
234
|
+
raise ValueError("Unexpected error in retry logic")
|
235
|
+
|
236
|
+
async def post_service_listing_async(self, listing_file: Path, max_retries: int = 3) -> dict[str, Any]:
|
237
|
+
"""Async version of post_service_listing for concurrent publishing with retry logic."""
|
155
238
|
# Load the listing data file
|
156
|
-
data = self.load_data_file(
|
239
|
+
data = self.load_data_file(listing_file)
|
240
|
+
|
241
|
+
# If name is not provided, use filename (without extension)
|
242
|
+
if "name" not in data or not data.get("name"):
|
243
|
+
data["name"] = listing_file.stem
|
157
244
|
|
158
245
|
# Resolve file references and include content
|
159
|
-
base_path =
|
246
|
+
base_path = listing_file.parent
|
160
247
|
data_with_content = self.resolve_file_references(data, base_path)
|
161
248
|
|
162
249
|
# Extract provider_name from directory structure
|
163
|
-
parts =
|
250
|
+
parts = listing_file.parts
|
164
251
|
try:
|
165
252
|
services_idx = parts.index("services")
|
166
253
|
provider_name = parts[services_idx - 1]
|
167
254
|
data_with_content["provider_name"] = provider_name
|
168
255
|
except (ValueError, IndexError):
|
169
256
|
raise ValueError(
|
170
|
-
f"Cannot extract provider_name from path: {
|
257
|
+
f"Cannot extract provider_name from path: {listing_file}. "
|
171
258
|
f"Expected path to contain .../{{provider_name}}/services/..."
|
172
259
|
)
|
173
260
|
|
174
261
|
# If service_name is not in listing data, find it from service files in the same directory
|
175
|
-
if
|
176
|
-
"service_name" not in data_with_content
|
177
|
-
or not data_with_content["service_name"]
|
178
|
-
):
|
262
|
+
if "service_name" not in data_with_content or not data_with_content["service_name"]:
|
179
263
|
# Find all service files in the same directory
|
180
|
-
service_files = find_files_by_schema(
|
264
|
+
service_files = find_files_by_schema(listing_file.parent, "service_v1")
|
181
265
|
|
182
266
|
if len(service_files) == 0:
|
183
267
|
raise ValueError(
|
184
|
-
f"Cannot find any service_v1 files in {
|
268
|
+
f"Cannot find any service_v1 files in {listing_file.parent}. "
|
185
269
|
f"Listing files must be in the same directory as a service definition."
|
186
270
|
)
|
187
271
|
elif len(service_files) > 1:
|
188
|
-
service_names = [
|
189
|
-
data.get("name", "unknown") for _, _, data in service_files
|
190
|
-
]
|
272
|
+
service_names = [data.get("name", "unknown") for _, _, data in service_files]
|
191
273
|
raise ValueError(
|
192
|
-
f"Multiple services found in {
|
193
|
-
f"Please add 'service_name' field to {
|
274
|
+
f"Multiple services found in {listing_file.parent}: {', '.join(service_names)}. "
|
275
|
+
f"Please add 'service_name' field to {listing_file.name} to specify which "
|
194
276
|
f"service this listing belongs to."
|
195
277
|
)
|
196
278
|
else:
|
@@ -202,12 +284,12 @@ class ServiceDataPublisher:
|
|
202
284
|
# service_name is provided in listing data, find the matching service to get version
|
203
285
|
service_name = data_with_content["service_name"]
|
204
286
|
service_files = find_files_by_schema(
|
205
|
-
|
287
|
+
listing_file.parent, "service_v1", field_filter=(("name", service_name),)
|
206
288
|
)
|
207
289
|
|
208
290
|
if not service_files:
|
209
291
|
raise ValueError(
|
210
|
-
f"Service '{service_name}' specified in {
|
292
|
+
f"Service '{service_name}' specified in {listing_file.name} not found in {listing_file.parent}."
|
211
293
|
)
|
212
294
|
|
213
295
|
# Get version from the found service
|
@@ -216,13 +298,13 @@ class ServiceDataPublisher:
|
|
216
298
|
|
217
299
|
# Find seller_name from seller definition in the data directory
|
218
300
|
# Navigate up to find the data directory and look for seller file
|
219
|
-
data_dir =
|
301
|
+
data_dir = listing_file.parent
|
220
302
|
while data_dir.name != "data" and data_dir.parent != data_dir:
|
221
303
|
data_dir = data_dir.parent
|
222
304
|
|
223
305
|
if data_dir.name != "data":
|
224
306
|
raise ValueError(
|
225
|
-
f"Cannot find 'data' directory in path: {
|
307
|
+
f"Cannot find 'data' directory in path: {listing_file}. "
|
226
308
|
f"Expected path structure includes a 'data' directory."
|
227
309
|
)
|
228
310
|
|
@@ -256,17 +338,77 @@ class ServiceDataPublisher:
|
|
256
338
|
if "listing_status" in data_with_content:
|
257
339
|
data_with_content["status"] = data_with_content.pop("listing_status")
|
258
340
|
|
259
|
-
# Post to the endpoint
|
260
|
-
|
261
|
-
f"{
|
262
|
-
|
341
|
+
# Post to the endpoint using retry helper
|
342
|
+
context_info = (
|
343
|
+
f"service: {data_with_content.get('service_name')}, "
|
344
|
+
f"provider: {data_with_content.get('provider_name')}, "
|
345
|
+
f"seller: {data_with_content.get('seller_name')}"
|
346
|
+
)
|
347
|
+
return await self._post_with_retry(
|
348
|
+
endpoint="/publish/listing",
|
349
|
+
data=data_with_content,
|
350
|
+
entity_type="listing",
|
351
|
+
entity_name=data.get("name", "unknown"),
|
352
|
+
context_info=context_info,
|
353
|
+
max_retries=max_retries,
|
354
|
+
)
|
355
|
+
|
356
|
+
async def post_service_offering_async(self, data_file: Path, max_retries: int = 3) -> dict[str, Any]:
|
357
|
+
"""Async version of post_service_offering for concurrent publishing with retry logic."""
|
358
|
+
# Load the data file
|
359
|
+
data = self.load_data_file(data_file)
|
360
|
+
|
361
|
+
# Resolve file references and include content
|
362
|
+
base_path = data_file.parent
|
363
|
+
data = convert_convenience_fields_to_documents(
|
364
|
+
data, base_path, logo_field="logo", terms_field="terms_of_service"
|
263
365
|
)
|
264
|
-
response.raise_for_status()
|
265
|
-
return response.json()
|
266
366
|
|
267
|
-
|
268
|
-
|
367
|
+
# Resolve file references and include content
|
368
|
+
data_with_content = self.resolve_file_references(data, base_path)
|
269
369
|
|
370
|
+
# Extract provider_name from directory structure
|
371
|
+
# Find the 'services' directory and use its parent as provider_name
|
372
|
+
parts = data_file.parts
|
373
|
+
try:
|
374
|
+
services_idx = parts.index("services")
|
375
|
+
provider_name = parts[services_idx - 1]
|
376
|
+
data_with_content["provider_name"] = provider_name
|
377
|
+
|
378
|
+
# Find provider directory to check status
|
379
|
+
provider_dir = Path(*parts[:services_idx])
|
380
|
+
except (ValueError, IndexError):
|
381
|
+
raise ValueError(
|
382
|
+
f"Cannot extract provider_name from path: {data_file}. "
|
383
|
+
f"Expected path to contain .../{{provider_name}}/services/..."
|
384
|
+
)
|
385
|
+
|
386
|
+
# Check provider status - skip if incomplete
|
387
|
+
provider_files = find_files_by_schema(provider_dir, "provider_v1")
|
388
|
+
if provider_files:
|
389
|
+
# Should only be one provider file in the directory
|
390
|
+
_provider_file, _format, provider_data = provider_files[0]
|
391
|
+
provider_status = provider_data.get("status", ProviderStatusEnum.active)
|
392
|
+
if provider_status == ProviderStatusEnum.incomplete:
|
393
|
+
return {
|
394
|
+
"skipped": True,
|
395
|
+
"reason": f"Provider status is '{provider_status}' - not publishing offering to backend",
|
396
|
+
"name": data.get("name", "unknown"),
|
397
|
+
}
|
398
|
+
|
399
|
+
# Post to the endpoint using retry helper
|
400
|
+
context_info = f"provider: {data_with_content.get('provider_name')}"
|
401
|
+
return await self._post_with_retry(
|
402
|
+
endpoint="/publish/offering",
|
403
|
+
data=data_with_content,
|
404
|
+
entity_type="offering",
|
405
|
+
entity_name=data.get("name", "unknown"),
|
406
|
+
context_info=context_info,
|
407
|
+
max_retries=max_retries,
|
408
|
+
)
|
409
|
+
|
410
|
+
async def post_provider_async(self, data_file: Path, max_retries: int = 3) -> dict[str, Any]:
|
411
|
+
"""Async version of post_provider for concurrent publishing with retry logic."""
|
270
412
|
# Load the data file
|
271
413
|
data = self.load_data_file(data_file)
|
272
414
|
|
@@ -289,22 +431,17 @@ class ServiceDataPublisher:
|
|
289
431
|
# Resolve file references and include content
|
290
432
|
data_with_content = self.resolve_file_references(data, base_path)
|
291
433
|
|
292
|
-
#
|
293
|
-
|
294
|
-
|
295
|
-
|
296
|
-
|
297
|
-
|
298
|
-
|
299
|
-
f"{self.base_url}/publish/provider",
|
300
|
-
json=data_with_content,
|
434
|
+
# Post to the endpoint using retry helper
|
435
|
+
return await self._post_with_retry(
|
436
|
+
endpoint="/publish/provider",
|
437
|
+
data=data_with_content,
|
438
|
+
entity_type="provider",
|
439
|
+
entity_name=data.get("name", "unknown"),
|
440
|
+
max_retries=max_retries,
|
301
441
|
)
|
302
|
-
response.raise_for_status()
|
303
|
-
return response.json()
|
304
|
-
|
305
|
-
def post_seller(self, data_file: Path) -> dict[str, Any]:
|
306
|
-
"""Post seller data to the backend."""
|
307
442
|
|
443
|
+
async def post_seller_async(self, data_file: Path, max_retries: int = 3) -> dict[str, Any]:
|
444
|
+
"""Async version of post_seller for concurrent publishing with retry logic."""
|
308
445
|
# Load the data file
|
309
446
|
data = self.load_data_file(data_file)
|
310
447
|
|
@@ -320,25 +457,19 @@ class ServiceDataPublisher:
|
|
320
457
|
|
321
458
|
# Convert convenience fields (logo only for sellers, no terms_of_service)
|
322
459
|
base_path = data_file.parent
|
323
|
-
data = convert_convenience_fields_to_documents(
|
324
|
-
data, base_path, logo_field="logo", terms_field=None
|
325
|
-
)
|
460
|
+
data = convert_convenience_fields_to_documents(data, base_path, logo_field="logo", terms_field=None)
|
326
461
|
|
327
462
|
# Resolve file references and include content
|
328
463
|
data_with_content = self.resolve_file_references(data, base_path)
|
329
464
|
|
330
|
-
#
|
331
|
-
|
332
|
-
|
333
|
-
|
334
|
-
|
335
|
-
|
336
|
-
|
337
|
-
f"{self.base_url}/publish/seller",
|
338
|
-
json=data_with_content,
|
465
|
+
# Post to the endpoint using retry helper
|
466
|
+
return await self._post_with_retry(
|
467
|
+
endpoint="/publish/seller",
|
468
|
+
data=data_with_content,
|
469
|
+
entity_type="seller",
|
470
|
+
entity_name=data.get("name", "unknown"),
|
471
|
+
max_retries=max_retries,
|
339
472
|
)
|
340
|
-
response.raise_for_status()
|
341
|
-
return response.json()
|
342
473
|
|
343
474
|
def find_offering_files(self, data_dir: Path) -> list[Path]:
|
344
475
|
"""Find all service offering files in a directory tree."""
|
@@ -360,14 +491,48 @@ class ServiceDataPublisher:
|
|
360
491
|
files = find_files_by_schema(data_dir, "seller_v1")
|
361
492
|
return sorted([f[0] for f in files])
|
362
493
|
|
363
|
-
def
|
494
|
+
async def _publish_offering_task(
|
495
|
+
self, offering_file: Path, console: Console, semaphore: asyncio.Semaphore
|
496
|
+
) -> tuple[Path, dict[str, Any] | Exception]:
|
497
|
+
"""
|
498
|
+
Async task to publish a single offering with concurrency control.
|
499
|
+
|
500
|
+
Returns tuple of (offering_file, result_or_exception).
|
364
501
|
"""
|
365
|
-
|
502
|
+
async with semaphore: # Limit concurrent requests
|
503
|
+
try:
|
504
|
+
# Load offering data to get the name
|
505
|
+
data = self.load_data_file(offering_file)
|
506
|
+
offering_name = data.get("name", offering_file.stem)
|
507
|
+
|
508
|
+
# Publish the offering
|
509
|
+
result = await self.post_service_offering_async(offering_file)
|
510
|
+
|
511
|
+
# Print complete statement after publication
|
512
|
+
if result.get("skipped"):
|
513
|
+
reason = result.get("reason", "unknown")
|
514
|
+
console.print(f" [yellow]⊘[/yellow] Skipped offering: [cyan]{offering_name}[/cyan] - {reason}")
|
515
|
+
else:
|
516
|
+
provider_name = result.get("provider_name")
|
517
|
+
console.print(
|
518
|
+
f" [green]✓[/green] Published offering: [cyan]{offering_name}[/cyan] "
|
519
|
+
f"(provider: {provider_name})"
|
520
|
+
)
|
521
|
+
|
522
|
+
return (offering_file, result)
|
523
|
+
except Exception as e:
|
524
|
+
data = self.load_data_file(offering_file)
|
525
|
+
offering_name = data.get("name", offering_file.stem)
|
526
|
+
console.print(f" [red]✗[/red] Failed to publish offering: [cyan]{offering_name}[/cyan] - {str(e)}")
|
527
|
+
return (offering_file, e)
|
528
|
+
|
529
|
+
async def publish_all_offerings(self, data_dir: Path) -> dict[str, Any]:
|
530
|
+
"""
|
531
|
+
Publish all service offerings found in a directory tree concurrently.
|
366
532
|
|
367
533
|
Validates data consistency before publishing.
|
368
534
|
Returns a summary of successes and failures.
|
369
535
|
"""
|
370
|
-
|
371
536
|
# Validate all service directories first
|
372
537
|
validator = DataValidator(data_dir, data_dir.parent / "schema")
|
373
538
|
validation_errors = validator.validate_all_service_directories(data_dir)
|
@@ -376,10 +541,7 @@ class ServiceDataPublisher:
|
|
376
541
|
"total": 0,
|
377
542
|
"success": 0,
|
378
543
|
"failed": 0,
|
379
|
-
"errors": [
|
380
|
-
{"file": "validation", "error": error}
|
381
|
-
for error in validation_errors
|
382
|
-
],
|
544
|
+
"errors": [{"file": "validation", "error": error} for error in validation_errors],
|
383
545
|
}
|
384
546
|
|
385
547
|
offering_files = self.find_offering_files(data_dir)
|
@@ -390,19 +552,66 @@ class ServiceDataPublisher:
|
|
390
552
|
"errors": [],
|
391
553
|
}
|
392
554
|
|
393
|
-
|
394
|
-
|
395
|
-
|
396
|
-
|
397
|
-
|
555
|
+
if not offering_files:
|
556
|
+
return results
|
557
|
+
|
558
|
+
console = Console()
|
559
|
+
|
560
|
+
# Run all offering publications concurrently with rate limiting
|
561
|
+
# Create semaphore to limit concurrent requests
|
562
|
+
semaphore = asyncio.Semaphore(self.max_concurrent_requests)
|
563
|
+
tasks = [self._publish_offering_task(offering_file, console, semaphore) for offering_file in offering_files]
|
564
|
+
task_results = await asyncio.gather(*tasks)
|
565
|
+
|
566
|
+
# Process results
|
567
|
+
for offering_file, result in task_results:
|
568
|
+
if isinstance(result, Exception):
|
398
569
|
results["failed"] += 1
|
399
|
-
results["errors"].append({"file": str(offering_file), "error": str(
|
570
|
+
results["errors"].append({"file": str(offering_file), "error": str(result)})
|
571
|
+
else:
|
572
|
+
results["success"] += 1
|
400
573
|
|
401
574
|
return results
|
402
575
|
|
403
|
-
def
|
576
|
+
async def _publish_listing_task(
|
577
|
+
self, listing_file: Path, console: Console, semaphore: asyncio.Semaphore
|
578
|
+
) -> tuple[Path, dict[str, Any] | Exception]:
|
579
|
+
"""
|
580
|
+
Async task to publish a single listing with concurrency control.
|
581
|
+
|
582
|
+
Returns tuple of (listing_file, result_or_exception).
|
583
|
+
"""
|
584
|
+
async with semaphore: # Limit concurrent requests
|
585
|
+
try:
|
586
|
+
# Load listing data to get the name
|
587
|
+
data = self.load_data_file(listing_file)
|
588
|
+
listing_name = data.get("name", listing_file.stem)
|
589
|
+
|
590
|
+
# Publish the listing
|
591
|
+
result = await self.post_service_listing_async(listing_file)
|
592
|
+
|
593
|
+
# Print complete statement after publication
|
594
|
+
if result.get("skipped"):
|
595
|
+
reason = result.get("reason", "unknown")
|
596
|
+
console.print(f" [yellow]⊘[/yellow] Skipped listing: [cyan]{listing_name}[/cyan] - {reason}")
|
597
|
+
else:
|
598
|
+
service_name = result.get("service_name")
|
599
|
+
provider_name = result.get("provider_name")
|
600
|
+
console.print(
|
601
|
+
f" [green]✓[/green] Published listing: [cyan]{listing_name}[/cyan] "
|
602
|
+
f"(service: {service_name}, provider: {provider_name})"
|
603
|
+
)
|
604
|
+
|
605
|
+
return (listing_file, result)
|
606
|
+
except Exception as e:
|
607
|
+
data = self.load_data_file(listing_file)
|
608
|
+
listing_name = data.get("name", listing_file.stem)
|
609
|
+
console.print(f" [red]✗[/red] Failed to publish listing: [cyan]{listing_file}[/cyan] - {str(e)}")
|
610
|
+
return (listing_file, e)
|
611
|
+
|
612
|
+
async def publish_all_listings(self, data_dir: Path) -> dict[str, Any]:
|
404
613
|
"""
|
405
|
-
Publish all service listings found in a directory tree.
|
614
|
+
Publish all service listings found in a directory tree concurrently.
|
406
615
|
|
407
616
|
Validates data consistency before publishing.
|
408
617
|
Returns a summary of successes and failures.
|
@@ -415,10 +624,7 @@ class ServiceDataPublisher:
|
|
415
624
|
"total": 0,
|
416
625
|
"success": 0,
|
417
626
|
"failed": 0,
|
418
|
-
"errors": [
|
419
|
-
{"file": "validation", "error": error}
|
420
|
-
for error in validation_errors
|
421
|
-
],
|
627
|
+
"errors": [{"file": "validation", "error": error} for error in validation_errors],
|
422
628
|
}
|
423
629
|
|
424
630
|
listing_files = self.find_listing_files(data_dir)
|
@@ -429,19 +635,61 @@ class ServiceDataPublisher:
|
|
429
635
|
"errors": [],
|
430
636
|
}
|
431
637
|
|
432
|
-
|
433
|
-
|
434
|
-
|
435
|
-
|
436
|
-
|
638
|
+
if not listing_files:
|
639
|
+
return results
|
640
|
+
|
641
|
+
console = Console()
|
642
|
+
|
643
|
+
# Run all listing publications concurrently with rate limiting
|
644
|
+
# Create semaphore to limit concurrent requests
|
645
|
+
semaphore = asyncio.Semaphore(self.max_concurrent_requests)
|
646
|
+
tasks = [self._publish_listing_task(listing_file, console, semaphore) for listing_file in listing_files]
|
647
|
+
task_results = await asyncio.gather(*tasks)
|
648
|
+
|
649
|
+
# Process results
|
650
|
+
for listing_file, result in task_results:
|
651
|
+
if isinstance(result, Exception):
|
437
652
|
results["failed"] += 1
|
438
|
-
results["errors"].append({"file": str(listing_file), "error": str(
|
653
|
+
results["errors"].append({"file": str(listing_file), "error": str(result)})
|
654
|
+
else:
|
655
|
+
results["success"] += 1
|
439
656
|
|
440
657
|
return results
|
441
658
|
|
442
|
-
def
|
659
|
+
async def _publish_provider_task(
|
660
|
+
self, provider_file: Path, console: Console, semaphore: asyncio.Semaphore
|
661
|
+
) -> tuple[Path, dict[str, Any] | Exception]:
|
662
|
+
"""
|
663
|
+
Async task to publish a single provider with concurrency control.
|
664
|
+
|
665
|
+
Returns tuple of (provider_file, result_or_exception).
|
443
666
|
"""
|
444
|
-
|
667
|
+
async with semaphore: # Limit concurrent requests
|
668
|
+
try:
|
669
|
+
# Load provider data to get the name
|
670
|
+
data = self.load_data_file(provider_file)
|
671
|
+
provider_name = data.get("name", provider_file.stem)
|
672
|
+
|
673
|
+
# Publish the provider
|
674
|
+
result = await self.post_provider_async(provider_file)
|
675
|
+
|
676
|
+
# Print complete statement after publication
|
677
|
+
if result.get("skipped"):
|
678
|
+
reason = result.get("reason", "unknown")
|
679
|
+
console.print(f" [yellow]⊘[/yellow] Skipped provider: [cyan]{provider_name}[/cyan] - {reason}")
|
680
|
+
else:
|
681
|
+
console.print(f" [green]✓[/green] Published provider: [cyan]{provider_name}[/cyan]")
|
682
|
+
|
683
|
+
return (provider_file, result)
|
684
|
+
except Exception as e:
|
685
|
+
data = self.load_data_file(provider_file)
|
686
|
+
provider_name = data.get("name", provider_file.stem)
|
687
|
+
console.print(f" [red]✗[/red] Failed to publish provider: [cyan]{provider_name}[/cyan] - {str(e)}")
|
688
|
+
return (provider_file, e)
|
689
|
+
|
690
|
+
async def publish_all_providers(self, data_dir: Path) -> dict[str, Any]:
|
691
|
+
"""
|
692
|
+
Publish all providers found in a directory tree concurrently.
|
445
693
|
|
446
694
|
Returns a summary of successes and failures.
|
447
695
|
"""
|
@@ -453,19 +701,61 @@ class ServiceDataPublisher:
|
|
453
701
|
"errors": [],
|
454
702
|
}
|
455
703
|
|
456
|
-
|
457
|
-
|
458
|
-
|
459
|
-
|
460
|
-
|
704
|
+
if not provider_files:
|
705
|
+
return results
|
706
|
+
|
707
|
+
console = Console()
|
708
|
+
|
709
|
+
# Run all provider publications concurrently with rate limiting
|
710
|
+
# Create semaphore to limit concurrent requests
|
711
|
+
semaphore = asyncio.Semaphore(self.max_concurrent_requests)
|
712
|
+
tasks = [self._publish_provider_task(provider_file, console, semaphore) for provider_file in provider_files]
|
713
|
+
task_results = await asyncio.gather(*tasks)
|
714
|
+
|
715
|
+
# Process results
|
716
|
+
for provider_file, result in task_results:
|
717
|
+
if isinstance(result, Exception):
|
461
718
|
results["failed"] += 1
|
462
|
-
results["errors"].append({"file": str(provider_file), "error": str(
|
719
|
+
results["errors"].append({"file": str(provider_file), "error": str(result)})
|
720
|
+
else:
|
721
|
+
results["success"] += 1
|
463
722
|
|
464
723
|
return results
|
465
724
|
|
466
|
-
def
|
725
|
+
async def _publish_seller_task(
|
726
|
+
self, seller_file: Path, console: Console, semaphore: asyncio.Semaphore
|
727
|
+
) -> tuple[Path, dict[str, Any] | Exception]:
|
467
728
|
"""
|
468
|
-
|
729
|
+
Async task to publish a single seller with concurrency control.
|
730
|
+
|
731
|
+
Returns tuple of (seller_file, result_or_exception).
|
732
|
+
"""
|
733
|
+
async with semaphore: # Limit concurrent requests
|
734
|
+
try:
|
735
|
+
# Load seller data to get the name
|
736
|
+
data = self.load_data_file(seller_file)
|
737
|
+
seller_name = data.get("name", seller_file.stem)
|
738
|
+
|
739
|
+
# Publish the seller
|
740
|
+
result = await self.post_seller_async(seller_file)
|
741
|
+
|
742
|
+
# Print complete statement after publication
|
743
|
+
if result.get("skipped"):
|
744
|
+
reason = result.get("reason", "unknown")
|
745
|
+
console.print(f" [yellow]⊘[/yellow] Skipped seller: [cyan]{seller_name}[/cyan] - {reason}")
|
746
|
+
else:
|
747
|
+
console.print(f" [green]✓[/green] Published seller: [cyan]{seller_name}[/cyan]")
|
748
|
+
|
749
|
+
return (seller_file, result)
|
750
|
+
except Exception as e:
|
751
|
+
data = self.load_data_file(seller_file)
|
752
|
+
seller_name = data.get("name", seller_file.stem)
|
753
|
+
console.print(f" [red]✗[/red] Failed to publish seller: [cyan]{seller_name}[/cyan] - {str(e)}")
|
754
|
+
return (seller_file, e)
|
755
|
+
|
756
|
+
async def publish_all_sellers(self, data_dir: Path) -> dict[str, Any]:
|
757
|
+
"""
|
758
|
+
Publish all sellers found in a directory tree concurrently.
|
469
759
|
|
470
760
|
Returns a summary of successes and failures.
|
471
761
|
"""
|
@@ -477,27 +767,83 @@ class ServiceDataPublisher:
|
|
477
767
|
"errors": [],
|
478
768
|
}
|
479
769
|
|
480
|
-
|
481
|
-
|
482
|
-
|
483
|
-
|
484
|
-
|
770
|
+
if not seller_files:
|
771
|
+
return results
|
772
|
+
|
773
|
+
console = Console()
|
774
|
+
|
775
|
+
# Run all seller publications concurrently with rate limiting
|
776
|
+
# Create semaphore to limit concurrent requests
|
777
|
+
semaphore = asyncio.Semaphore(self.max_concurrent_requests)
|
778
|
+
tasks = [self._publish_seller_task(seller_file, console, semaphore) for seller_file in seller_files]
|
779
|
+
task_results = await asyncio.gather(*tasks)
|
780
|
+
|
781
|
+
# Process results
|
782
|
+
for seller_file, result in task_results:
|
783
|
+
if isinstance(result, Exception):
|
485
784
|
results["failed"] += 1
|
486
|
-
results["errors"].append({"file": str(seller_file), "error": str(
|
785
|
+
results["errors"].append({"file": str(seller_file), "error": str(result)})
|
786
|
+
else:
|
787
|
+
results["success"] += 1
|
487
788
|
|
488
789
|
return results
|
489
790
|
|
490
|
-
def
|
491
|
-
"""
|
492
|
-
|
791
|
+
async def publish_all_models(self, data_dir: Path) -> dict[str, Any]:
|
792
|
+
"""
|
793
|
+
Publish all data types in the correct order.
|
794
|
+
|
795
|
+
Publishing order:
|
796
|
+
1. Sellers - Must exist before listings
|
797
|
+
2. Providers - Must exist before offerings
|
798
|
+
3. Service Offerings - Must exist before listings
|
799
|
+
4. Service Listings - Depends on sellers, providers, and offerings
|
800
|
+
|
801
|
+
Returns a dict with results for each data type and overall summary.
|
802
|
+
"""
|
803
|
+
all_results: dict[str, Any] = {
|
804
|
+
"sellers": {},
|
805
|
+
"providers": {},
|
806
|
+
"offerings": {},
|
807
|
+
"listings": {},
|
808
|
+
"total_success": 0,
|
809
|
+
"total_failed": 0,
|
810
|
+
"total_found": 0,
|
811
|
+
}
|
812
|
+
|
813
|
+
# Publish in order: sellers -> providers -> offerings -> listings
|
814
|
+
publish_order = [
|
815
|
+
("sellers", self.publish_all_sellers),
|
816
|
+
("providers", self.publish_all_providers),
|
817
|
+
("offerings", self.publish_all_offerings),
|
818
|
+
("listings", self.publish_all_listings),
|
819
|
+
]
|
820
|
+
|
821
|
+
for data_type, publish_method in publish_order:
|
822
|
+
try:
|
823
|
+
results = await publish_method(data_dir)
|
824
|
+
all_results[data_type] = results
|
825
|
+
all_results["total_success"] += results["success"]
|
826
|
+
all_results["total_failed"] += results["failed"]
|
827
|
+
all_results["total_found"] += results["total"]
|
828
|
+
except Exception as e:
|
829
|
+
# If a publish method fails catastrophically, record the error
|
830
|
+
all_results[data_type] = {
|
831
|
+
"total": 0,
|
832
|
+
"success": 0,
|
833
|
+
"failed": 1,
|
834
|
+
"errors": [{"file": "N/A", "error": str(e)}],
|
835
|
+
}
|
836
|
+
all_results["total_failed"] += 1
|
837
|
+
|
838
|
+
return all_results
|
493
839
|
|
494
840
|
def __enter__(self):
|
495
|
-
"""
|
841
|
+
"""Sync context manager entry for CLI usage."""
|
496
842
|
return self
|
497
843
|
|
498
844
|
def __exit__(self, exc_type, exc_val, exc_tb):
|
499
|
-
"""
|
500
|
-
self.
|
845
|
+
"""Sync context manager exit for CLI usage."""
|
846
|
+
asyncio.run(self.aclose())
|
501
847
|
|
502
848
|
|
503
849
|
# CLI commands for publishing
|
@@ -505,34 +851,40 @@ app = typer.Typer(help="Publish data to backend")
|
|
505
851
|
console = Console()
|
506
852
|
|
507
853
|
|
508
|
-
@app.
|
509
|
-
def
|
510
|
-
|
511
|
-
|
512
|
-
help="Path to provider file or directory (default: ./data or UNITYSVC_DATA_DIR env var)",
|
513
|
-
),
|
514
|
-
backend_url: str | None = typer.Option(
|
515
|
-
None,
|
516
|
-
"--backend-url",
|
517
|
-
"-u",
|
518
|
-
help="UnitySVC backend URL (default: from UNITYSVC_BACKEND_URL env var)",
|
519
|
-
),
|
520
|
-
api_key: str | None = typer.Option(
|
854
|
+
@app.callback(invoke_without_command=True)
|
855
|
+
def publish_callback(
|
856
|
+
ctx: typer.Context,
|
857
|
+
data_path: Path | None = typer.Option(
|
521
858
|
None,
|
522
|
-
"--
|
523
|
-
"-
|
524
|
-
help="
|
859
|
+
"--data-path",
|
860
|
+
"-d",
|
861
|
+
help="Path to data directory (default: current directory)",
|
525
862
|
),
|
526
863
|
):
|
527
|
-
"""
|
528
|
-
|
864
|
+
"""
|
865
|
+
Publish data to backend.
|
866
|
+
|
867
|
+
When called without a subcommand, publishes all data types in order:
|
868
|
+
sellers → providers → offerings → listings.
|
869
|
+
|
870
|
+
Use subcommands to publish specific data types:
|
871
|
+
- providers: Publish only providers
|
872
|
+
- sellers: Publish only sellers
|
873
|
+
- offerings: Publish only service offerings
|
874
|
+
- listings: Publish only service listings
|
875
|
+
|
876
|
+
Required environment variables:
|
877
|
+
- UNITYSVC_BASE_URL: Backend API URL
|
878
|
+
- UNITYSVC_API_KEY: API key for authentication
|
879
|
+
"""
|
880
|
+
# If a subcommand was invoked, skip this callback logic
|
881
|
+
if ctx.invoked_subcommand is not None:
|
882
|
+
return
|
883
|
+
|
884
|
+
# No subcommand - publish all
|
529
885
|
# Set data path
|
530
886
|
if data_path is None:
|
531
|
-
|
532
|
-
if data_path_str:
|
533
|
-
data_path = Path(data_path_str)
|
534
|
-
else:
|
535
|
-
data_path = Path.cwd() / "data"
|
887
|
+
data_path = Path.cwd()
|
536
888
|
|
537
889
|
if not data_path.is_absolute():
|
538
890
|
data_path = Path.cwd() / data_path
|
@@ -541,38 +893,107 @@ def publish_providers(
|
|
541
893
|
console.print(f"[red]✗[/red] Path not found: {data_path}", style="bold red")
|
542
894
|
raise typer.Exit(code=1)
|
543
895
|
|
544
|
-
|
545
|
-
|
546
|
-
|
547
|
-
|
548
|
-
|
549
|
-
|
550
|
-
|
896
|
+
console.print(f"[bold blue]Publishing all data from:[/bold blue] {data_path}")
|
897
|
+
console.print(f"[bold blue]Backend URL:[/bold blue] {os.getenv('UNITYSVC_BASE_URL', 'N/A')}\n")
|
898
|
+
|
899
|
+
try:
|
900
|
+
with ServiceDataPublisher() as publisher:
|
901
|
+
# Call the publish_all_models method (now async)
|
902
|
+
all_results = asyncio.run(publisher.publish_all_models(data_path))
|
903
|
+
|
904
|
+
# Display results for each data type
|
905
|
+
data_type_display_names = {
|
906
|
+
"sellers": "Sellers",
|
907
|
+
"providers": "Providers",
|
908
|
+
"offerings": "Service Offerings",
|
909
|
+
"listings": "Service Listings",
|
910
|
+
}
|
911
|
+
|
912
|
+
for data_type in ["sellers", "providers", "offerings", "listings"]:
|
913
|
+
display_name = data_type_display_names[data_type]
|
914
|
+
results = all_results[data_type]
|
915
|
+
|
916
|
+
console.print(f"\n[bold cyan]{'=' * 60}[/bold cyan]")
|
917
|
+
console.print(f"[bold cyan]{display_name}[/bold cyan]")
|
918
|
+
console.print(f"[bold cyan]{'=' * 60}[/bold cyan]\n")
|
919
|
+
|
920
|
+
console.print(f" Total found: {results['total']}")
|
921
|
+
console.print(f" [green]✓ Success:[/green] {results['success']}")
|
922
|
+
console.print(f" [red]✗ Failed:[/red] {results['failed']}")
|
923
|
+
|
924
|
+
# Display errors if any
|
925
|
+
if results.get("errors"):
|
926
|
+
console.print(f"\n[bold red]Errors in {display_name}:[/bold red]")
|
927
|
+
for error in results["errors"]:
|
928
|
+
# Check if this is a skipped item
|
929
|
+
if isinstance(error, dict) and error.get("error", "").startswith("skipped"):
|
930
|
+
continue
|
931
|
+
console.print(f" [red]✗[/red] {error.get('file', 'unknown')}")
|
932
|
+
console.print(f" {error.get('error', 'unknown error')}")
|
933
|
+
|
934
|
+
# Final summary
|
935
|
+
console.print(f"\n[bold cyan]{'=' * 60}[/bold cyan]")
|
936
|
+
console.print("[bold]Final Publishing Summary[/bold]")
|
937
|
+
console.print(f"[bold cyan]{'=' * 60}[/bold cyan]\n")
|
938
|
+
console.print(f" Total found: {all_results['total_found']}")
|
939
|
+
console.print(f" [green]✓ Success:[/green] {all_results['total_success']}")
|
940
|
+
console.print(f" [red]✗ Failed:[/red] {all_results['total_failed']}")
|
941
|
+
|
942
|
+
if all_results["total_failed"] > 0:
|
943
|
+
console.print(
|
944
|
+
f"\n[yellow]⚠[/yellow] Completed with {all_results['total_failed']} failure(s)",
|
945
|
+
style="bold yellow",
|
946
|
+
)
|
947
|
+
raise typer.Exit(code=1)
|
948
|
+
else:
|
949
|
+
console.print(
|
950
|
+
"\n[green]✓[/green] All data published successfully!",
|
951
|
+
style="bold green",
|
952
|
+
)
|
953
|
+
|
954
|
+
except typer.Exit:
|
955
|
+
raise
|
956
|
+
except Exception as e:
|
957
|
+
console.print(f"[red]✗[/red] Failed to publish all data: {e}", style="bold red")
|
551
958
|
raise typer.Exit(code=1)
|
552
959
|
|
553
|
-
|
554
|
-
|
555
|
-
|
556
|
-
|
557
|
-
|
558
|
-
|
559
|
-
|
960
|
+
|
961
|
+
@app.command("providers")
|
962
|
+
def publish_providers(
|
963
|
+
data_path: Path | None = typer.Option(
|
964
|
+
None,
|
965
|
+
"--data-path",
|
966
|
+
"-d",
|
967
|
+
help="Path to provider file or directory (default: current directory)",
|
968
|
+
),
|
969
|
+
):
|
970
|
+
"""Publish provider(s) from a file or directory."""
|
971
|
+
|
972
|
+
# Set data path
|
973
|
+
if data_path is None:
|
974
|
+
data_path = Path.cwd()
|
975
|
+
|
976
|
+
if not data_path.is_absolute():
|
977
|
+
data_path = Path.cwd() / data_path
|
978
|
+
|
979
|
+
if not data_path.exists():
|
980
|
+
console.print(f"[red]✗[/red] Path not found: {data_path}", style="bold red")
|
560
981
|
raise typer.Exit(code=1)
|
561
982
|
|
562
983
|
try:
|
563
|
-
with ServiceDataPublisher(
|
984
|
+
with ServiceDataPublisher() as publisher:
|
564
985
|
# Handle single file
|
565
986
|
if data_path.is_file():
|
566
987
|
console.print(f"[blue]Publishing provider:[/blue] {data_path}")
|
567
|
-
console.print(f"[blue]Backend URL:[/blue] {
|
568
|
-
result = publisher.
|
988
|
+
console.print(f"[blue]Backend URL:[/blue] {os.getenv('UNITYSVC_BASE_URL', 'N/A')}\n")
|
989
|
+
result = asyncio.run(publisher.post_provider_async(data_path))
|
569
990
|
console.print("[green]✓[/green] Provider published successfully!")
|
570
991
|
console.print(f"[cyan]Response:[/cyan] {json.dumps(result, indent=2)}")
|
571
992
|
# Handle directory
|
572
993
|
else:
|
573
994
|
console.print(f"[blue]Scanning for providers in:[/blue] {data_path}")
|
574
|
-
console.print(f"[blue]Backend URL:[/blue] {
|
575
|
-
results = publisher.publish_all_providers(data_path)
|
995
|
+
console.print(f"[blue]Backend URL:[/blue] {os.getenv('UNITYSVC_BASE_URL', 'N/A')}\n")
|
996
|
+
results = asyncio.run(publisher.publish_all_providers(data_path))
|
576
997
|
|
577
998
|
# Display summary
|
578
999
|
console.print("\n[bold]Publishing Summary:[/bold]")
|
@@ -593,39 +1014,23 @@ def publish_providers(
|
|
593
1014
|
except typer.Exit:
|
594
1015
|
raise
|
595
1016
|
except Exception as e:
|
596
|
-
console.print(
|
597
|
-
f"[red]✗[/red] Failed to publish providers: {e}", style="bold red"
|
598
|
-
)
|
1017
|
+
console.print(f"[red]✗[/red] Failed to publish providers: {e}", style="bold red")
|
599
1018
|
raise typer.Exit(code=1)
|
600
1019
|
|
601
1020
|
|
602
1021
|
@app.command("sellers")
|
603
1022
|
def publish_sellers(
|
604
|
-
data_path: Path | None = typer.
|
605
|
-
None,
|
606
|
-
help="Path to seller file or directory (default: ./data or UNITYSVC_DATA_DIR env var)",
|
607
|
-
),
|
608
|
-
backend_url: str | None = typer.Option(
|
609
|
-
None,
|
610
|
-
"--backend-url",
|
611
|
-
"-u",
|
612
|
-
help="UnitySVC backend URL (default: from UNITYSVC_BACKEND_URL env var)",
|
613
|
-
),
|
614
|
-
api_key: str | None = typer.Option(
|
1023
|
+
data_path: Path | None = typer.Option(
|
615
1024
|
None,
|
616
|
-
"--
|
617
|
-
"-
|
618
|
-
help="
|
1025
|
+
"--data-path",
|
1026
|
+
"-d",
|
1027
|
+
help="Path to seller file or directory (default: current directory)",
|
619
1028
|
),
|
620
1029
|
):
|
621
1030
|
"""Publish seller(s) from a file or directory."""
|
622
1031
|
# Set data path
|
623
1032
|
if data_path is None:
|
624
|
-
|
625
|
-
if data_path_str:
|
626
|
-
data_path = Path(data_path_str)
|
627
|
-
else:
|
628
|
-
data_path = Path.cwd() / "data"
|
1033
|
+
data_path = Path.cwd()
|
629
1034
|
|
630
1035
|
if not data_path.is_absolute():
|
631
1036
|
data_path = Path.cwd() / data_path
|
@@ -634,38 +1039,20 @@ def publish_sellers(
|
|
634
1039
|
console.print(f"[red]✗[/red] Path not found: {data_path}", style="bold red")
|
635
1040
|
raise typer.Exit(code=1)
|
636
1041
|
|
637
|
-
# Get backend URL
|
638
|
-
backend_url = backend_url or os.getenv("UNITYSVC_BACKEND_URL")
|
639
|
-
if not backend_url:
|
640
|
-
console.print(
|
641
|
-
"[red]✗[/red] Backend URL not provided. Use --backend-url or set UNITYSVC_BACKEND_URL env var.",
|
642
|
-
style="bold red",
|
643
|
-
)
|
644
|
-
raise typer.Exit(code=1)
|
645
|
-
|
646
|
-
# Get API key
|
647
|
-
api_key = api_key or os.getenv("UNITYSVC_API_KEY")
|
648
|
-
if not api_key:
|
649
|
-
console.print(
|
650
|
-
"[red]✗[/red] API key not provided. Use --api-key or set UNITYSVC_API_KEY env var.",
|
651
|
-
style="bold red",
|
652
|
-
)
|
653
|
-
raise typer.Exit(code=1)
|
654
|
-
|
655
1042
|
try:
|
656
|
-
with ServiceDataPublisher(
|
1043
|
+
with ServiceDataPublisher() as publisher:
|
657
1044
|
# Handle single file
|
658
1045
|
if data_path.is_file():
|
659
1046
|
console.print(f"[blue]Publishing seller:[/blue] {data_path}")
|
660
|
-
console.print(f"[blue]Backend URL:[/blue] {
|
661
|
-
result = publisher.
|
1047
|
+
console.print(f"[blue]Backend URL:[/blue] {os.getenv('UNITYSVC_BASE_URL', 'N/A')}\n")
|
1048
|
+
result = asyncio.run(publisher.post_seller_async(data_path))
|
662
1049
|
console.print("[green]✓[/green] Seller published successfully!")
|
663
1050
|
console.print(f"[cyan]Response:[/cyan] {json.dumps(result, indent=2)}")
|
664
1051
|
# Handle directory
|
665
1052
|
else:
|
666
1053
|
console.print(f"[blue]Scanning for sellers in:[/blue] {data_path}")
|
667
|
-
console.print(f"[blue]Backend URL:[/blue] {
|
668
|
-
results = publisher.publish_all_sellers(data_path)
|
1054
|
+
console.print(f"[blue]Backend URL:[/blue] {os.getenv('UNITYSVC_BASE_URL', 'N/A')}\n")
|
1055
|
+
results = asyncio.run(publisher.publish_all_sellers(data_path))
|
669
1056
|
|
670
1057
|
console.print("\n[bold]Publishing Summary:[/bold]")
|
671
1058
|
console.print(f" Total found: {results['total']}")
|
@@ -679,9 +1066,7 @@ def publish_sellers(
|
|
679
1066
|
console.print(f" {error['error']}")
|
680
1067
|
raise typer.Exit(code=1)
|
681
1068
|
else:
|
682
|
-
console.print(
|
683
|
-
"\n[green]✓[/green] All sellers published successfully!"
|
684
|
-
)
|
1069
|
+
console.print("\n[green]✓[/green] All sellers published successfully!")
|
685
1070
|
|
686
1071
|
except typer.Exit:
|
687
1072
|
raise
|
@@ -692,31 +1077,17 @@ def publish_sellers(
|
|
692
1077
|
|
693
1078
|
@app.command("offerings")
|
694
1079
|
def publish_offerings(
|
695
|
-
data_path: Path | None = typer.
|
696
|
-
None,
|
697
|
-
help="Path to service offering file or directory (default: ./data or UNITYSVC_DATA_DIR env var)",
|
698
|
-
),
|
699
|
-
backend_url: str | None = typer.Option(
|
700
|
-
None,
|
701
|
-
"--backend-url",
|
702
|
-
"-u",
|
703
|
-
help="UnitySVC backend URL (default: from UNITYSVC_BACKEND_URL env var)",
|
704
|
-
),
|
705
|
-
api_key: str | None = typer.Option(
|
1080
|
+
data_path: Path | None = typer.Option(
|
706
1081
|
None,
|
707
|
-
"--
|
708
|
-
"-
|
709
|
-
help="
|
1082
|
+
"--data-path",
|
1083
|
+
"-d",
|
1084
|
+
help="Path to service offering file or directory (default: current directory)",
|
710
1085
|
),
|
711
1086
|
):
|
712
1087
|
"""Publish service offering(s) from a file or directory."""
|
713
1088
|
# Set data path
|
714
1089
|
if data_path is None:
|
715
|
-
|
716
|
-
if data_path_str:
|
717
|
-
data_path = Path(data_path_str)
|
718
|
-
else:
|
719
|
-
data_path = Path.cwd() / "data"
|
1090
|
+
data_path = Path.cwd()
|
720
1091
|
|
721
1092
|
if not data_path.is_absolute():
|
722
1093
|
data_path = Path.cwd() / data_path
|
@@ -725,42 +1096,20 @@ def publish_offerings(
|
|
725
1096
|
console.print(f"[red]✗[/red] Path not found: {data_path}", style="bold red")
|
726
1097
|
raise typer.Exit(code=1)
|
727
1098
|
|
728
|
-
# Get backend URL from argument or environment
|
729
|
-
backend_url = backend_url or os.getenv("UNITYSVC_BACKEND_URL")
|
730
|
-
if not backend_url:
|
731
|
-
console.print(
|
732
|
-
"[red]✗[/red] Backend URL not provided. Use --backend-url or set UNITYSVC_BACKEND_URL env var.",
|
733
|
-
style="bold red",
|
734
|
-
)
|
735
|
-
raise typer.Exit(code=1)
|
736
|
-
|
737
|
-
# Get API key from argument or environment
|
738
|
-
api_key = api_key or os.getenv("UNITYSVC_API_KEY")
|
739
|
-
if not api_key:
|
740
|
-
console.print(
|
741
|
-
"[red]✗[/red] API key not provided. Use --api-key or set UNITYSVC_API_KEY env var.",
|
742
|
-
style="bold red",
|
743
|
-
)
|
744
|
-
raise typer.Exit(code=1)
|
745
|
-
|
746
1099
|
try:
|
747
|
-
with ServiceDataPublisher(
|
1100
|
+
with ServiceDataPublisher() as publisher:
|
748
1101
|
# Handle single file
|
749
1102
|
if data_path.is_file():
|
750
1103
|
console.print(f"[blue]Publishing service offering:[/blue] {data_path}")
|
751
|
-
console.print(f"[blue]Backend URL:[/blue] {
|
752
|
-
result = publisher.
|
753
|
-
console.print(
|
754
|
-
"[green]✓[/green] Service offering published successfully!"
|
755
|
-
)
|
1104
|
+
console.print(f"[blue]Backend URL:[/blue] {os.getenv('UNITYSVC_BASE_URL', 'N/A')}\n")
|
1105
|
+
result = asyncio.run(publisher.post_service_offering_async(data_path))
|
1106
|
+
console.print("[green]✓[/green] Service offering published successfully!")
|
756
1107
|
console.print(f"[cyan]Response:[/cyan] {json.dumps(result, indent=2)}")
|
757
1108
|
# Handle directory
|
758
1109
|
else:
|
759
|
-
console.print(
|
760
|
-
|
761
|
-
)
|
762
|
-
console.print(f"[blue]Backend URL:[/blue] {backend_url}\n")
|
763
|
-
results = publisher.publish_all_offerings(data_path)
|
1110
|
+
console.print(f"[blue]Scanning for service offerings in:[/blue] {data_path}")
|
1111
|
+
console.print(f"[blue]Backend URL:[/bold blue] {os.getenv('UNITYSVC_BASE_URL', 'N/A')}\n")
|
1112
|
+
results = asyncio.run(publisher.publish_all_offerings(data_path))
|
764
1113
|
|
765
1114
|
console.print("\n[bold]Publishing Summary:[/bold]")
|
766
1115
|
console.print(f" Total found: {results['total']}")
|
@@ -774,47 +1123,29 @@ def publish_offerings(
|
|
774
1123
|
console.print(f" {error['error']}")
|
775
1124
|
raise typer.Exit(code=1)
|
776
1125
|
else:
|
777
|
-
console.print(
|
778
|
-
"\n[green]✓[/green] All service offerings published successfully!"
|
779
|
-
)
|
1126
|
+
console.print("\n[green]✓[/green] All service offerings published successfully!")
|
780
1127
|
|
781
1128
|
except typer.Exit:
|
782
1129
|
raise
|
783
1130
|
except Exception as e:
|
784
|
-
console.print(
|
785
|
-
f"[red]✗[/red] Failed to publish service offerings: {e}", style="bold red"
|
786
|
-
)
|
1131
|
+
console.print(f"[red]✗[/red] Failed to publish service offerings: {e}", style="bold red")
|
787
1132
|
raise typer.Exit(code=1)
|
788
1133
|
|
789
1134
|
|
790
1135
|
@app.command("listings")
|
791
1136
|
def publish_listings(
|
792
|
-
data_path: Path | None = typer.
|
1137
|
+
data_path: Path | None = typer.Option(
|
793
1138
|
None,
|
794
|
-
|
795
|
-
|
796
|
-
|
797
|
-
None,
|
798
|
-
"--backend-url",
|
799
|
-
"-u",
|
800
|
-
help="UnitySVC backend URL (default: from UNITYSVC_BACKEND_URL env var)",
|
801
|
-
),
|
802
|
-
api_key: str | None = typer.Option(
|
803
|
-
None,
|
804
|
-
"--api-key",
|
805
|
-
"-k",
|
806
|
-
help="API key for authentication (default: from UNITYSVC_API_KEY env var)",
|
1139
|
+
"--data-path",
|
1140
|
+
"-d",
|
1141
|
+
help="Path to service listing file or directory (default: current directory)",
|
807
1142
|
),
|
808
1143
|
):
|
809
1144
|
"""Publish service listing(s) from a file or directory."""
|
810
1145
|
|
811
1146
|
# Set data path
|
812
1147
|
if data_path is None:
|
813
|
-
|
814
|
-
if data_path_str:
|
815
|
-
data_path = Path(data_path_str)
|
816
|
-
else:
|
817
|
-
data_path = Path.cwd() / "data"
|
1148
|
+
data_path = Path.cwd()
|
818
1149
|
|
819
1150
|
if not data_path.is_absolute():
|
820
1151
|
data_path = Path.cwd() / data_path
|
@@ -823,42 +1154,20 @@ def publish_listings(
|
|
823
1154
|
console.print(f"[red]✗[/red] Path not found: {data_path}", style="bold red")
|
824
1155
|
raise typer.Exit(code=1)
|
825
1156
|
|
826
|
-
# Get backend URL from argument or environment
|
827
|
-
backend_url = backend_url or os.getenv("UNITYSVC_BACKEND_URL")
|
828
|
-
if not backend_url:
|
829
|
-
console.print(
|
830
|
-
"[red]✗[/red] Backend URL not provided. Use --backend-url or set UNITYSVC_BACKEND_URL env var.",
|
831
|
-
style="bold red",
|
832
|
-
)
|
833
|
-
raise typer.Exit(code=1)
|
834
|
-
|
835
|
-
# Get API key from argument or environment
|
836
|
-
api_key = api_key or os.getenv("UNITYSVC_API_KEY")
|
837
|
-
if not api_key:
|
838
|
-
console.print(
|
839
|
-
"[red]✗[/red] API key not provided. Use --api-key or set UNITYSVC_API_KEY env var.",
|
840
|
-
style="bold red",
|
841
|
-
)
|
842
|
-
raise typer.Exit(code=1)
|
843
|
-
|
844
1157
|
try:
|
845
|
-
with ServiceDataPublisher(
|
1158
|
+
with ServiceDataPublisher() as publisher:
|
846
1159
|
# Handle single file
|
847
1160
|
if data_path.is_file():
|
848
1161
|
console.print(f"[blue]Publishing service listing:[/blue] {data_path}")
|
849
|
-
console.print(f"[blue]Backend URL:[/blue] {
|
850
|
-
result = publisher.
|
851
|
-
console.print(
|
852
|
-
"[green]✓[/green] Service listing published successfully!"
|
853
|
-
)
|
1162
|
+
console.print(f"[blue]Backend URL:[/blue] {os.getenv('UNITYSVC_BASE_URL', 'N/A')}\n")
|
1163
|
+
result = asyncio.run(publisher.post_service_listing_async(data_path))
|
1164
|
+
console.print("[green]✓[/green] Service listing published successfully!")
|
854
1165
|
console.print(f"[cyan]Response:[/cyan] {json.dumps(result, indent=2)}")
|
855
1166
|
# Handle directory
|
856
1167
|
else:
|
857
|
-
console.print(
|
858
|
-
|
859
|
-
)
|
860
|
-
console.print(f"[blue]Backend URL:[/blue] {backend_url}\n")
|
861
|
-
results = publisher.publish_all_listings(data_path)
|
1168
|
+
console.print(f"[blue]Scanning for service listings in:[/blue] {data_path}")
|
1169
|
+
console.print(f"[blue]Backend URL:[/blue] {os.getenv('UNITYSVC_BASE_URL', 'N/A')}\n")
|
1170
|
+
results = asyncio.run(publisher.publish_all_listings(data_path))
|
862
1171
|
|
863
1172
|
console.print("\n[bold]Publishing Summary:[/bold]")
|
864
1173
|
console.print(f" Total found: {results['total']}")
|
@@ -872,14 +1181,10 @@ def publish_listings(
|
|
872
1181
|
console.print(f" {error['error']}")
|
873
1182
|
raise typer.Exit(code=1)
|
874
1183
|
else:
|
875
|
-
console.print(
|
876
|
-
"\n[green]✓[/green] All service listings published successfully!"
|
877
|
-
)
|
1184
|
+
console.print("\n[green]✓[/green] All service listings published successfully!")
|
878
1185
|
|
879
1186
|
except typer.Exit:
|
880
1187
|
raise
|
881
1188
|
except Exception as e:
|
882
|
-
console.print(
|
883
|
-
f"[red]✗[/red] Failed to publish service listings: {e}", style="bold red"
|
884
|
-
)
|
1189
|
+
console.print(f"[red]✗[/red] Failed to publish service listings: {e}", style="bold red")
|
885
1190
|
raise typer.Exit(code=1)
|