unitysvc-services 0.1.1__py3-none-any.whl → 0.1.5__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- unitysvc_services/api.py +321 -0
- unitysvc_services/cli.py +2 -1
- unitysvc_services/format_data.py +2 -7
- unitysvc_services/list.py +14 -43
- unitysvc_services/models/base.py +169 -102
- unitysvc_services/models/listing_v1.py +25 -9
- unitysvc_services/models/provider_v1.py +19 -8
- unitysvc_services/models/seller_v1.py +10 -8
- unitysvc_services/models/service_v1.py +8 -1
- unitysvc_services/populate.py +20 -6
- unitysvc_services/publisher.py +897 -462
- unitysvc_services/py.typed +0 -0
- unitysvc_services/query.py +577 -384
- unitysvc_services/test.py +769 -0
- unitysvc_services/update.py +4 -13
- unitysvc_services/utils.py +55 -6
- unitysvc_services/validator.py +117 -86
- unitysvc_services-0.1.5.dist-info/METADATA +182 -0
- unitysvc_services-0.1.5.dist-info/RECORD +26 -0
- {unitysvc_services-0.1.1.dist-info → unitysvc_services-0.1.5.dist-info}/entry_points.txt +1 -0
- unitysvc_services-0.1.1.dist-info/METADATA +0 -173
- unitysvc_services-0.1.1.dist-info/RECORD +0 -23
- {unitysvc_services-0.1.1.dist-info → unitysvc_services-0.1.5.dist-info}/WHEEL +0 -0
- {unitysvc_services-0.1.1.dist-info → unitysvc_services-0.1.5.dist-info}/licenses/LICENSE +0 -0
- {unitysvc_services-0.1.1.dist-info → unitysvc_services-0.1.5.dist-info}/top_level.txt +0 -0
unitysvc_services/publisher.py
CHANGED
@@ -1,5 +1,6 @@
|
|
1
1
|
"""Data publisher module for posting service data to UnitySVC backend."""
|
2
2
|
|
3
|
+
import asyncio
|
3
4
|
import base64
|
4
5
|
import json
|
5
6
|
import os
|
@@ -11,24 +12,25 @@ import httpx
|
|
11
12
|
import typer
|
12
13
|
from rich.console import Console
|
13
14
|
|
15
|
+
from .api import UnitySvcAPI
|
14
16
|
from .models.base import ProviderStatusEnum, SellerStatusEnum
|
15
|
-
from .utils import convert_convenience_fields_to_documents, find_files_by_schema
|
17
|
+
from .utils import convert_convenience_fields_to_documents, find_files_by_schema, render_template_file
|
16
18
|
from .validator import DataValidator
|
17
19
|
|
18
20
|
|
19
|
-
class ServiceDataPublisher:
|
20
|
-
"""Publishes service data to UnitySVC backend endpoints.
|
21
|
+
class ServiceDataPublisher(UnitySvcAPI):
|
22
|
+
"""Publishes service data to UnitySVC backend endpoints.
|
21
23
|
|
22
|
-
|
23
|
-
|
24
|
-
|
25
|
-
|
26
|
-
|
27
|
-
|
28
|
-
|
29
|
-
|
30
|
-
|
31
|
-
|
24
|
+
Inherits base HTTP client with curl fallback from UnitySvcAPI.
|
25
|
+
Extends with async operations for concurrent publishing.
|
26
|
+
"""
|
27
|
+
|
28
|
+
def __init__(self) -> None:
|
29
|
+
# Initialize base class (provides self.client as AsyncClient with curl fallback)
|
30
|
+
super().__init__()
|
31
|
+
|
32
|
+
# Semaphore to limit concurrent requests and prevent connection pool exhaustion
|
33
|
+
self.max_concurrent_requests = 15
|
32
34
|
|
33
35
|
def load_data_file(self, file_path: Path) -> dict[str, Any]:
|
34
36
|
"""Load data from JSON or TOML file."""
|
@@ -58,171 +60,305 @@ class ServiceDataPublisher:
|
|
58
60
|
return base64.b64encode(f.read()).decode("ascii")
|
59
61
|
|
60
62
|
def resolve_file_references(
|
61
|
-
self,
|
63
|
+
self,
|
64
|
+
data: dict[str, Any],
|
65
|
+
base_path: Path,
|
66
|
+
listing: dict[str, Any] | None = None,
|
67
|
+
offering: dict[str, Any] | None = None,
|
68
|
+
provider: dict[str, Any] | None = None,
|
69
|
+
seller: dict[str, Any] | None = None,
|
62
70
|
) -> dict[str, Any]:
|
63
|
-
"""Recursively resolve file references and include content in data.
|
71
|
+
"""Recursively resolve file references and include content in data.
|
72
|
+
|
73
|
+
For Jinja2 template files (.j2), renders the template with provided context
|
74
|
+
and strips the .j2 extension from file_path.
|
75
|
+
|
76
|
+
Args:
|
77
|
+
data: Data dictionary potentially containing file_path references
|
78
|
+
base_path: Base path for resolving relative file paths
|
79
|
+
listing: Listing data for template rendering (optional)
|
80
|
+
offering: Offering data for template rendering (optional)
|
81
|
+
provider: Provider data for template rendering (optional)
|
82
|
+
seller: Seller data for template rendering (optional)
|
83
|
+
|
84
|
+
Returns:
|
85
|
+
Data with file references resolved and content loaded
|
86
|
+
"""
|
64
87
|
result: dict[str, Any] = {}
|
65
88
|
|
66
89
|
for key, value in data.items():
|
67
90
|
if isinstance(value, dict):
|
68
91
|
# Recursively process nested dictionaries
|
69
|
-
result[key] = self.resolve_file_references(
|
92
|
+
result[key] = self.resolve_file_references(
|
93
|
+
value, base_path, listing=listing, offering=offering, provider=provider, seller=seller
|
94
|
+
)
|
70
95
|
elif isinstance(value, list):
|
71
96
|
# Process lists
|
72
97
|
result[key] = [
|
73
98
|
(
|
74
|
-
self.resolve_file_references(
|
99
|
+
self.resolve_file_references(
|
100
|
+
item, base_path, listing=listing, offering=offering, provider=provider, seller=seller
|
101
|
+
)
|
75
102
|
if isinstance(item, dict)
|
76
103
|
else item
|
77
104
|
)
|
78
105
|
for item in value
|
79
106
|
]
|
80
107
|
elif key == "file_path" and isinstance(value, str):
|
81
|
-
# This is a file reference - load the content
|
82
|
-
|
83
|
-
|
84
|
-
|
85
|
-
|
86
|
-
|
87
|
-
|
88
|
-
|
89
|
-
|
90
|
-
|
91
|
-
|
92
|
-
|
108
|
+
# This is a file reference - load the content and render if template
|
109
|
+
full_path = base_path / value if not Path(value).is_absolute() else Path(value)
|
110
|
+
|
111
|
+
if not full_path.exists():
|
112
|
+
raise FileNotFoundError(f"File not found: {full_path}")
|
113
|
+
|
114
|
+
# Render template if applicable
|
115
|
+
try:
|
116
|
+
content, actual_filename = render_template_file(
|
117
|
+
full_path,
|
118
|
+
listing=listing,
|
119
|
+
offering=offering,
|
120
|
+
provider=provider,
|
121
|
+
seller=seller,
|
122
|
+
)
|
123
|
+
result["file_content"] = content
|
124
|
+
|
125
|
+
# Update file_path to remove .j2 extension if it was a template
|
126
|
+
if full_path.name.endswith(".j2"):
|
127
|
+
# Strip .j2 from the path
|
128
|
+
new_path = str(value)[:-3] # Remove last 3 characters (.j2)
|
129
|
+
result[key] = new_path
|
130
|
+
else:
|
131
|
+
result[key] = value
|
132
|
+
|
133
|
+
except Exception as e:
|
134
|
+
raise ValueError(f"Failed to load/render file content from '{value}': {e}")
|
93
135
|
else:
|
94
136
|
result[key] = value
|
95
137
|
|
96
138
|
return result
|
97
139
|
|
98
|
-
def
|
99
|
-
|
140
|
+
async def post( # type: ignore[override]
|
141
|
+
self, endpoint: str, data: dict[str, Any], check_status: bool = True
|
142
|
+
) -> tuple[dict[str, Any], int]:
|
143
|
+
"""Make a POST request to the backend API with automatic curl fallback.
|
100
144
|
|
101
|
-
|
102
|
-
|
103
|
-
"""
|
145
|
+
Override of base class post() that returns both JSON and status code.
|
146
|
+
Uses base class client with automatic curl fallback.
|
104
147
|
|
105
|
-
|
106
|
-
|
148
|
+
Args:
|
149
|
+
endpoint: API endpoint path (e.g., "/publish/seller")
|
150
|
+
data: JSON data to post
|
151
|
+
check_status: Whether to raise on non-2xx status codes (default: True)
|
107
152
|
|
108
|
-
|
109
|
-
|
110
|
-
data_with_content = self.resolve_file_references(data, base_path)
|
153
|
+
Returns:
|
154
|
+
Tuple of (JSON response, HTTP status code)
|
111
155
|
|
112
|
-
|
113
|
-
|
114
|
-
|
115
|
-
|
116
|
-
|
117
|
-
|
118
|
-
|
156
|
+
Raises:
|
157
|
+
RuntimeError: If both httpx and curl fail
|
158
|
+
"""
|
159
|
+
# Use base class client (self.client from UnitySvcQuery) with automatic curl fallback
|
160
|
+
# If we already know curl is needed, use it directly
|
161
|
+
if self.use_curl_fallback:
|
162
|
+
# Use base class curl fallback method
|
163
|
+
response_json = await super().post(endpoint, json_data=data)
|
164
|
+
# Curl POST doesn't return status code separately, assume 2xx if no exception
|
165
|
+
status_code = 200
|
166
|
+
else:
|
167
|
+
try:
|
168
|
+
response = await self.client.post(f"{self.base_url}{endpoint}", json=data)
|
169
|
+
status_code = response.status_code
|
170
|
+
|
171
|
+
if check_status:
|
172
|
+
response.raise_for_status()
|
173
|
+
|
174
|
+
response_json = response.json()
|
175
|
+
except (httpx.ConnectError, OSError):
|
176
|
+
# Connection failed - switch to curl fallback and retry
|
177
|
+
self.use_curl_fallback = True
|
178
|
+
response_json = await super().post(endpoint, json_data=data)
|
179
|
+
status_code = 200 # Assume success if curl didn't raise
|
180
|
+
|
181
|
+
return (response_json, status_code)
|
182
|
+
|
183
|
+
async def _post_with_retry(
|
184
|
+
self,
|
185
|
+
endpoint: str,
|
186
|
+
data: dict[str, Any],
|
187
|
+
entity_type: str,
|
188
|
+
entity_name: str,
|
189
|
+
context_info: str = "",
|
190
|
+
max_retries: int = 3,
|
191
|
+
) -> dict[str, Any]:
|
192
|
+
"""
|
193
|
+
Generic retry wrapper for posting data to backend API with task polling.
|
194
|
+
|
195
|
+
The backend now returns HTTP 202 with a task_id. This method:
|
196
|
+
1. Submits the publish request
|
197
|
+
2. Gets the task_id from the response
|
198
|
+
3. Polls /tasks/{task_id} until completion
|
199
|
+
4. Returns the final result
|
200
|
+
|
201
|
+
Args:
|
202
|
+
endpoint: API endpoint path (e.g., "/publish/listing")
|
203
|
+
data: JSON data to post
|
204
|
+
entity_type: Type of entity being published (for error messages)
|
205
|
+
entity_name: Name of the entity being published (for error messages)
|
206
|
+
context_info: Additional context for error messages (e.g., provider, service info)
|
207
|
+
max_retries: Maximum number of retry attempts
|
208
|
+
|
209
|
+
Returns:
|
210
|
+
Response JSON from successful API call
|
211
|
+
|
212
|
+
Raises:
|
213
|
+
ValueError: On client errors (4xx) or after exhausting retries
|
214
|
+
"""
|
215
|
+
last_exception = None
|
216
|
+
for attempt in range(max_retries):
|
217
|
+
try:
|
218
|
+
# Use the public post() method with automatic curl fallback
|
219
|
+
response_json, status_code = await self.post(endpoint, data, check_status=False)
|
119
220
|
|
120
|
-
|
121
|
-
|
122
|
-
|
123
|
-
|
124
|
-
f"Cannot extract provider_name from path: {data_file}. "
|
125
|
-
f"Expected path to contain .../{{provider_name}}/services/..."
|
126
|
-
)
|
221
|
+
# Handle task-based response (HTTP 202)
|
222
|
+
if status_code == 202:
|
223
|
+
# Backend returns task_id - poll for completion
|
224
|
+
task_id = response_json.get("task_id")
|
127
225
|
|
128
|
-
|
129
|
-
|
130
|
-
|
131
|
-
# Should only be one provider file in the directory
|
132
|
-
_provider_file, _format, provider_data = provider_files[0]
|
133
|
-
provider_status = provider_data.get("status", ProviderStatusEnum.active)
|
134
|
-
if provider_status == ProviderStatusEnum.incomplete:
|
135
|
-
return {
|
136
|
-
"skipped": True,
|
137
|
-
"reason": f"Provider status is '{provider_status}' - not publishing offering to backend",
|
138
|
-
"name": data.get("name", "unknown"),
|
139
|
-
}
|
226
|
+
if not task_id:
|
227
|
+
context_msg = f" ({context_info})" if context_info else ""
|
228
|
+
raise ValueError(f"No task_id in response for {entity_type} '{entity_name}'{context_msg}")
|
140
229
|
|
141
|
-
|
142
|
-
|
143
|
-
|
144
|
-
|
145
|
-
|
146
|
-
|
147
|
-
|
230
|
+
# Poll task status until completion using check_task utility
|
231
|
+
try:
|
232
|
+
result = await self.check_task(task_id)
|
233
|
+
return result
|
234
|
+
except ValueError as e:
|
235
|
+
# Add context to task errors
|
236
|
+
context_msg = f" ({context_info})" if context_info else ""
|
237
|
+
raise ValueError(f"Task failed for {entity_type} '{entity_name}'{context_msg}: {e}")
|
238
|
+
|
239
|
+
# Check for errors
|
240
|
+
if status_code >= 400:
|
241
|
+
# Don't retry on 4xx errors (client errors) - they won't succeed on retry
|
242
|
+
if 400 <= status_code < 500:
|
243
|
+
error_detail = response_json.get("detail", str(response_json))
|
244
|
+
context_msg = f" ({context_info})" if context_info else ""
|
245
|
+
raise ValueError(
|
246
|
+
f"Failed to publish {entity_type} '{entity_name}'{context_msg}: {error_detail}"
|
247
|
+
)
|
248
|
+
|
249
|
+
# 5xx errors - retry with exponential backoff
|
250
|
+
if attempt < max_retries - 1:
|
251
|
+
wait_time = 2**attempt # Exponential backoff: 1s, 2s, 4s
|
252
|
+
await asyncio.sleep(wait_time)
|
253
|
+
continue
|
254
|
+
else:
|
255
|
+
# Last attempt failed
|
256
|
+
error_detail = response_json.get("detail", str(response_json))
|
257
|
+
context_msg = f" ({context_info})" if context_info else ""
|
258
|
+
raise ValueError(
|
259
|
+
f"Failed to publish {entity_type} after {max_retries} attempts: "
|
260
|
+
f"'{entity_name}'{context_msg}: {error_detail}"
|
261
|
+
)
|
148
262
|
|
149
|
-
|
150
|
-
|
263
|
+
# Success response (2xx)
|
264
|
+
return response_json
|
265
|
+
|
266
|
+
except (httpx.NetworkError, httpx.TimeoutException, RuntimeError) as e:
|
267
|
+
# Network/connection errors - the post() method should have tried curl fallback
|
268
|
+
# If we're here, both httpx and curl failed
|
269
|
+
last_exception = e
|
270
|
+
if attempt < max_retries - 1:
|
271
|
+
wait_time = 2**attempt # Exponential backoff: 1s, 2s, 4s
|
272
|
+
await asyncio.sleep(wait_time)
|
273
|
+
continue
|
274
|
+
else:
|
275
|
+
raise ValueError(
|
276
|
+
f"Network error after {max_retries} attempts for {entity_type} '{entity_name}': {str(e)}"
|
277
|
+
)
|
151
278
|
|
152
|
-
|
153
|
-
|
154
|
-
|
279
|
+
# Should never reach here, but just in case
|
280
|
+
if last_exception:
|
281
|
+
raise last_exception
|
282
|
+
raise ValueError("Unexpected error in retry logic")
|
283
|
+
|
284
|
+
async def post_service_listing_async(self, listing_file: Path, max_retries: int = 3) -> dict[str, Any]:
|
285
|
+
"""Async version of post_service_listing for concurrent publishing with retry logic."""
|
155
286
|
# Load the listing data file
|
156
|
-
data = self.load_data_file(
|
287
|
+
data = self.load_data_file(listing_file)
|
157
288
|
|
158
|
-
#
|
159
|
-
|
160
|
-
|
289
|
+
# If name is not provided, use filename (without extension)
|
290
|
+
if "name" not in data or not data.get("name"):
|
291
|
+
data["name"] = listing_file.stem
|
161
292
|
|
162
|
-
# Extract provider_name from directory structure
|
163
|
-
parts =
|
293
|
+
# Extract provider_name from directory structure (needed before loading provider data)
|
294
|
+
parts = listing_file.parts
|
164
295
|
try:
|
165
296
|
services_idx = parts.index("services")
|
166
297
|
provider_name = parts[services_idx - 1]
|
167
|
-
|
298
|
+
data["provider_name"] = provider_name
|
299
|
+
|
300
|
+
# Find provider directory to load provider data
|
301
|
+
provider_dir = Path(*parts[:services_idx])
|
168
302
|
except (ValueError, IndexError):
|
169
303
|
raise ValueError(
|
170
|
-
f"Cannot extract provider_name from path: {
|
304
|
+
f"Cannot extract provider_name from path: {listing_file}. "
|
171
305
|
f"Expected path to contain .../{{provider_name}}/services/..."
|
172
306
|
)
|
173
307
|
|
174
308
|
# If service_name is not in listing data, find it from service files in the same directory
|
175
|
-
if
|
176
|
-
"service_name" not in data_with_content
|
177
|
-
or not data_with_content["service_name"]
|
178
|
-
):
|
309
|
+
if "service_name" not in data or not data["service_name"]:
|
179
310
|
# Find all service files in the same directory
|
180
|
-
service_files = find_files_by_schema(
|
311
|
+
service_files = find_files_by_schema(listing_file.parent, "service_v1")
|
181
312
|
|
182
313
|
if len(service_files) == 0:
|
183
314
|
raise ValueError(
|
184
|
-
f"Cannot find any service_v1 files in {
|
315
|
+
f"Cannot find any service_v1 files in {listing_file.parent}. "
|
185
316
|
f"Listing files must be in the same directory as a service definition."
|
186
317
|
)
|
187
318
|
elif len(service_files) > 1:
|
188
|
-
service_names = [
|
189
|
-
data.get("name", "unknown") for _, _, data in service_files
|
190
|
-
]
|
319
|
+
service_names = [svc_data.get("name", "unknown") for _, _, svc_data in service_files]
|
191
320
|
raise ValueError(
|
192
|
-
f"Multiple services found in {
|
193
|
-
f"Please add 'service_name' field to {
|
321
|
+
f"Multiple services found in {listing_file.parent}: {', '.join(service_names)}. "
|
322
|
+
f"Please add 'service_name' field to {listing_file.name} to specify which "
|
194
323
|
f"service this listing belongs to."
|
195
324
|
)
|
196
325
|
else:
|
197
326
|
# Exactly one service found - use it
|
198
327
|
_service_file, _format, service_data = service_files[0]
|
199
|
-
|
200
|
-
|
328
|
+
data["service_name"] = service_data.get("name")
|
329
|
+
data["service_version"] = service_data.get("version")
|
201
330
|
else:
|
202
331
|
# service_name is provided in listing data, find the matching service to get version
|
203
|
-
service_name =
|
332
|
+
service_name = data["service_name"]
|
204
333
|
service_files = find_files_by_schema(
|
205
|
-
|
334
|
+
listing_file.parent, "service_v1", field_filter=(("name", service_name),)
|
206
335
|
)
|
207
336
|
|
208
337
|
if not service_files:
|
209
338
|
raise ValueError(
|
210
|
-
f"Service '{service_name}' specified in {
|
339
|
+
f"Service '{service_name}' specified in {listing_file.name} not found in {listing_file.parent}."
|
211
340
|
)
|
212
341
|
|
213
342
|
# Get version from the found service
|
214
343
|
_service_file, _format, service_data = service_files[0]
|
215
|
-
|
344
|
+
data["service_version"] = service_data.get("version")
|
345
|
+
|
346
|
+
# Load provider data for template rendering
|
347
|
+
provider_files = find_files_by_schema(provider_dir, "provider_v1")
|
348
|
+
if provider_files:
|
349
|
+
_provider_file, _format, provider_data = provider_files[0]
|
350
|
+
else:
|
351
|
+
provider_data = {}
|
216
352
|
|
217
353
|
# Find seller_name from seller definition in the data directory
|
218
354
|
# Navigate up to find the data directory and look for seller file
|
219
|
-
data_dir =
|
355
|
+
data_dir = listing_file.parent
|
220
356
|
while data_dir.name != "data" and data_dir.parent != data_dir:
|
221
357
|
data_dir = data_dir.parent
|
222
358
|
|
223
359
|
if data_dir.name != "data":
|
224
360
|
raise ValueError(
|
225
|
-
f"Cannot find 'data' directory in path: {
|
361
|
+
f"Cannot find 'data' directory in path: {listing_file}. "
|
226
362
|
f"Expected path structure includes a 'data' directory."
|
227
363
|
)
|
228
364
|
|
@@ -250,23 +386,115 @@ class ServiceDataPublisher:
|
|
250
386
|
if not seller_name:
|
251
387
|
raise ValueError("Seller data missing 'name' field")
|
252
388
|
|
253
|
-
|
389
|
+
data["seller_name"] = seller_name
|
254
390
|
|
255
391
|
# Map listing_status to status if present
|
256
|
-
if "listing_status" in
|
257
|
-
|
392
|
+
if "listing_status" in data:
|
393
|
+
data["status"] = data.pop("listing_status")
|
394
|
+
|
395
|
+
# NOW resolve file references with all context (listing, offering, provider, seller)
|
396
|
+
base_path = listing_file.parent
|
397
|
+
data_with_content = self.resolve_file_references(
|
398
|
+
data,
|
399
|
+
base_path,
|
400
|
+
listing=data,
|
401
|
+
offering=service_data,
|
402
|
+
provider=provider_data,
|
403
|
+
seller=seller_data,
|
404
|
+
)
|
258
405
|
|
259
|
-
# Post to the endpoint
|
260
|
-
|
261
|
-
f"{
|
262
|
-
|
406
|
+
# Post to the endpoint using retry helper
|
407
|
+
context_info = (
|
408
|
+
f"service: {data_with_content.get('service_name')}, "
|
409
|
+
f"provider: {data_with_content.get('provider_name')}, "
|
410
|
+
f"seller: {data_with_content.get('seller_name')}"
|
411
|
+
)
|
412
|
+
result = await self._post_with_retry(
|
413
|
+
endpoint="/publish/listing",
|
414
|
+
data=data_with_content,
|
415
|
+
entity_type="listing",
|
416
|
+
entity_name=data.get("name", "unknown"),
|
417
|
+
context_info=context_info,
|
418
|
+
max_retries=max_retries,
|
263
419
|
)
|
264
|
-
response.raise_for_status()
|
265
|
-
return response.json()
|
266
420
|
|
267
|
-
|
268
|
-
""
|
421
|
+
# Add local metadata to result for display purposes
|
422
|
+
result["service_name"] = data_with_content.get("service_name")
|
423
|
+
result["provider_name"] = data_with_content.get("provider_name")
|
424
|
+
result["seller_name"] = data_with_content.get("seller_name")
|
269
425
|
|
426
|
+
return result
|
427
|
+
|
428
|
+
async def post_service_offering_async(self, data_file: Path, max_retries: int = 3) -> dict[str, Any]:
|
429
|
+
"""Async version of post_service_offering for concurrent publishing with retry logic."""
|
430
|
+
# Load the data file
|
431
|
+
data = self.load_data_file(data_file)
|
432
|
+
|
433
|
+
# Convert convenience fields first
|
434
|
+
base_path = data_file.parent
|
435
|
+
data = convert_convenience_fields_to_documents(
|
436
|
+
data, base_path, logo_field="logo", terms_field="terms_of_service"
|
437
|
+
)
|
438
|
+
|
439
|
+
# Extract provider_name from directory structure
|
440
|
+
# Find the 'services' directory and use its parent as provider_name
|
441
|
+
parts = data_file.parts
|
442
|
+
try:
|
443
|
+
services_idx = parts.index("services")
|
444
|
+
provider_name = parts[services_idx - 1]
|
445
|
+
data["provider_name"] = provider_name
|
446
|
+
|
447
|
+
# Find provider directory to check status and load data
|
448
|
+
provider_dir = Path(*parts[:services_idx])
|
449
|
+
except (ValueError, IndexError):
|
450
|
+
raise ValueError(
|
451
|
+
f"Cannot extract provider_name from path: {data_file}. "
|
452
|
+
f"Expected path to contain .../{{provider_name}}/services/..."
|
453
|
+
)
|
454
|
+
|
455
|
+
# Load provider data for status check and template rendering
|
456
|
+
provider_files = find_files_by_schema(provider_dir, "provider_v1")
|
457
|
+
if provider_files:
|
458
|
+
# Should only be one provider file in the directory
|
459
|
+
_provider_file, _format, provider_data = provider_files[0]
|
460
|
+
provider_status = provider_data.get("status", ProviderStatusEnum.active)
|
461
|
+
if provider_status == ProviderStatusEnum.incomplete:
|
462
|
+
return {
|
463
|
+
"skipped": True,
|
464
|
+
"reason": f"Provider status is '{provider_status}' - not publishing offering to backend",
|
465
|
+
"name": data.get("name", "unknown"),
|
466
|
+
}
|
467
|
+
else:
|
468
|
+
provider_data = {}
|
469
|
+
|
470
|
+
# NOW resolve file references with all context (offering, provider)
|
471
|
+
data_with_content = self.resolve_file_references(
|
472
|
+
data,
|
473
|
+
base_path,
|
474
|
+
listing=None,
|
475
|
+
offering=data,
|
476
|
+
provider=provider_data,
|
477
|
+
seller=None,
|
478
|
+
)
|
479
|
+
|
480
|
+
# Post to the endpoint using retry helper
|
481
|
+
context_info = f"provider: {data_with_content.get('provider_name')}"
|
482
|
+
result = await self._post_with_retry(
|
483
|
+
endpoint="/publish/offering",
|
484
|
+
data=data_with_content,
|
485
|
+
entity_type="offering",
|
486
|
+
entity_name=data.get("name", "unknown"),
|
487
|
+
context_info=context_info,
|
488
|
+
max_retries=max_retries,
|
489
|
+
)
|
490
|
+
|
491
|
+
# Add local metadata to result for display purposes
|
492
|
+
result["provider_name"] = data_with_content.get("provider_name")
|
493
|
+
|
494
|
+
return result
|
495
|
+
|
496
|
+
async def post_provider_async(self, data_file: Path, max_retries: int = 3) -> dict[str, Any]:
|
497
|
+
"""Async version of post_provider for concurrent publishing with retry logic."""
|
270
498
|
# Load the data file
|
271
499
|
data = self.load_data_file(data_file)
|
272
500
|
|
@@ -286,25 +514,27 @@ class ServiceDataPublisher:
|
|
286
514
|
data, base_path, logo_field="logo", terms_field="terms_of_service"
|
287
515
|
)
|
288
516
|
|
289
|
-
# Resolve file references and include content
|
290
|
-
data_with_content = self.resolve_file_references(
|
291
|
-
|
292
|
-
|
293
|
-
|
294
|
-
|
295
|
-
|
296
|
-
|
297
|
-
# Post to the endpoint
|
298
|
-
response = self.client.post(
|
299
|
-
f"{self.base_url}/publish/provider",
|
300
|
-
json=data_with_content,
|
517
|
+
# Resolve file references and include content with provider context
|
518
|
+
data_with_content = self.resolve_file_references(
|
519
|
+
data,
|
520
|
+
base_path,
|
521
|
+
listing=None,
|
522
|
+
offering=None,
|
523
|
+
provider=data,
|
524
|
+
seller=None,
|
301
525
|
)
|
302
|
-
response.raise_for_status()
|
303
|
-
return response.json()
|
304
526
|
|
305
|
-
|
306
|
-
|
527
|
+
# Post to the endpoint using retry helper
|
528
|
+
return await self._post_with_retry(
|
529
|
+
endpoint="/publish/provider",
|
530
|
+
data=data_with_content,
|
531
|
+
entity_type="provider",
|
532
|
+
entity_name=data.get("name", "unknown"),
|
533
|
+
max_retries=max_retries,
|
534
|
+
)
|
307
535
|
|
536
|
+
async def post_seller_async(self, data_file: Path, max_retries: int = 3) -> dict[str, Any]:
|
537
|
+
"""Async version of post_seller for concurrent publishing with retry logic."""
|
308
538
|
# Load the data file
|
309
539
|
data = self.load_data_file(data_file)
|
310
540
|
|
@@ -320,25 +550,26 @@ class ServiceDataPublisher:
|
|
320
550
|
|
321
551
|
# Convert convenience fields (logo only for sellers, no terms_of_service)
|
322
552
|
base_path = data_file.parent
|
323
|
-
data = convert_convenience_fields_to_documents(
|
324
|
-
|
553
|
+
data = convert_convenience_fields_to_documents(data, base_path, logo_field="logo", terms_field=None)
|
554
|
+
|
555
|
+
# Resolve file references and include content with seller context
|
556
|
+
data_with_content = self.resolve_file_references(
|
557
|
+
data,
|
558
|
+
base_path,
|
559
|
+
listing=None,
|
560
|
+
offering=None,
|
561
|
+
provider=None,
|
562
|
+
seller=data,
|
325
563
|
)
|
326
564
|
|
327
|
-
#
|
328
|
-
|
329
|
-
|
330
|
-
|
331
|
-
|
332
|
-
|
333
|
-
|
334
|
-
|
335
|
-
# Post to the endpoint
|
336
|
-
response = self.client.post(
|
337
|
-
f"{self.base_url}/publish/seller",
|
338
|
-
json=data_with_content,
|
565
|
+
# Post to the endpoint using retry helper
|
566
|
+
return await self._post_with_retry(
|
567
|
+
endpoint="/publish/seller",
|
568
|
+
data=data_with_content,
|
569
|
+
entity_type="seller",
|
570
|
+
entity_name=data.get("name", "unknown"),
|
571
|
+
max_retries=max_retries,
|
339
572
|
)
|
340
|
-
response.raise_for_status()
|
341
|
-
return response.json()
|
342
573
|
|
343
574
|
def find_offering_files(self, data_dir: Path) -> list[Path]:
|
344
575
|
"""Find all service offering files in a directory tree."""
|
@@ -360,14 +591,48 @@ class ServiceDataPublisher:
|
|
360
591
|
files = find_files_by_schema(data_dir, "seller_v1")
|
361
592
|
return sorted([f[0] for f in files])
|
362
593
|
|
363
|
-
def
|
594
|
+
async def _publish_offering_task(
|
595
|
+
self, offering_file: Path, console: Console, semaphore: asyncio.Semaphore
|
596
|
+
) -> tuple[Path, dict[str, Any] | Exception]:
|
597
|
+
"""
|
598
|
+
Async task to publish a single offering with concurrency control.
|
599
|
+
|
600
|
+
Returns tuple of (offering_file, result_or_exception).
|
601
|
+
"""
|
602
|
+
async with semaphore: # Limit concurrent requests
|
603
|
+
try:
|
604
|
+
# Load offering data to get the name
|
605
|
+
data = self.load_data_file(offering_file)
|
606
|
+
offering_name = data.get("name", offering_file.stem)
|
607
|
+
|
608
|
+
# Publish the offering
|
609
|
+
result = await self.post_service_offering_async(offering_file)
|
610
|
+
|
611
|
+
# Print complete statement after publication
|
612
|
+
if result.get("skipped"):
|
613
|
+
reason = result.get("reason", "unknown")
|
614
|
+
console.print(f" [yellow]⊘[/yellow] Skipped offering: [cyan]{offering_name}[/cyan] - {reason}")
|
615
|
+
else:
|
616
|
+
provider_name = result.get("provider_name")
|
617
|
+
console.print(
|
618
|
+
f" [green]✓[/green] Published offering: [cyan]{offering_name}[/cyan] "
|
619
|
+
f"(provider: {provider_name})"
|
620
|
+
)
|
621
|
+
|
622
|
+
return (offering_file, result)
|
623
|
+
except Exception as e:
|
624
|
+
data = self.load_data_file(offering_file)
|
625
|
+
offering_name = data.get("name", offering_file.stem)
|
626
|
+
console.print(f" [red]✗[/red] Failed to publish offering: [cyan]{offering_name}[/cyan] - {str(e)}")
|
627
|
+
return (offering_file, e)
|
628
|
+
|
629
|
+
async def publish_all_offerings(self, data_dir: Path) -> dict[str, Any]:
|
364
630
|
"""
|
365
|
-
Publish all service offerings found in a directory tree.
|
631
|
+
Publish all service offerings found in a directory tree concurrently.
|
366
632
|
|
367
633
|
Validates data consistency before publishing.
|
368
634
|
Returns a summary of successes and failures.
|
369
635
|
"""
|
370
|
-
|
371
636
|
# Validate all service directories first
|
372
637
|
validator = DataValidator(data_dir, data_dir.parent / "schema")
|
373
638
|
validation_errors = validator.validate_all_service_directories(data_dir)
|
@@ -376,10 +641,7 @@ class ServiceDataPublisher:
|
|
376
641
|
"total": 0,
|
377
642
|
"success": 0,
|
378
643
|
"failed": 0,
|
379
|
-
"errors": [
|
380
|
-
{"file": "validation", "error": error}
|
381
|
-
for error in validation_errors
|
382
|
-
],
|
644
|
+
"errors": [{"file": "validation", "error": error} for error in validation_errors],
|
383
645
|
}
|
384
646
|
|
385
647
|
offering_files = self.find_offering_files(data_dir)
|
@@ -390,19 +652,66 @@ class ServiceDataPublisher:
|
|
390
652
|
"errors": [],
|
391
653
|
}
|
392
654
|
|
393
|
-
|
394
|
-
|
395
|
-
|
396
|
-
|
397
|
-
|
655
|
+
if not offering_files:
|
656
|
+
return results
|
657
|
+
|
658
|
+
console = Console()
|
659
|
+
|
660
|
+
# Run all offering publications concurrently with rate limiting
|
661
|
+
# Create semaphore to limit concurrent requests
|
662
|
+
semaphore = asyncio.Semaphore(self.max_concurrent_requests)
|
663
|
+
tasks = [self._publish_offering_task(offering_file, console, semaphore) for offering_file in offering_files]
|
664
|
+
task_results = await asyncio.gather(*tasks)
|
665
|
+
|
666
|
+
# Process results
|
667
|
+
for offering_file, result in task_results:
|
668
|
+
if isinstance(result, Exception):
|
398
669
|
results["failed"] += 1
|
399
|
-
results["errors"].append({"file": str(offering_file), "error": str(
|
670
|
+
results["errors"].append({"file": str(offering_file), "error": str(result)})
|
671
|
+
else:
|
672
|
+
results["success"] += 1
|
400
673
|
|
401
674
|
return results
|
402
675
|
|
403
|
-
def
|
676
|
+
async def _publish_listing_task(
|
677
|
+
self, listing_file: Path, console: Console, semaphore: asyncio.Semaphore
|
678
|
+
) -> tuple[Path, dict[str, Any] | Exception]:
|
404
679
|
"""
|
405
|
-
|
680
|
+
Async task to publish a single listing with concurrency control.
|
681
|
+
|
682
|
+
Returns tuple of (listing_file, result_or_exception).
|
683
|
+
"""
|
684
|
+
async with semaphore: # Limit concurrent requests
|
685
|
+
try:
|
686
|
+
# Load listing data to get the name
|
687
|
+
data = self.load_data_file(listing_file)
|
688
|
+
listing_name = data.get("name", listing_file.stem)
|
689
|
+
|
690
|
+
# Publish the listing
|
691
|
+
result = await self.post_service_listing_async(listing_file)
|
692
|
+
|
693
|
+
# Print complete statement after publication
|
694
|
+
if result.get("skipped"):
|
695
|
+
reason = result.get("reason", "unknown")
|
696
|
+
console.print(f" [yellow]⊘[/yellow] Skipped listing: [cyan]{listing_name}[/cyan] - {reason}")
|
697
|
+
else:
|
698
|
+
service_name = result.get("service_name")
|
699
|
+
provider_name = result.get("provider_name")
|
700
|
+
console.print(
|
701
|
+
f" [green]✓[/green] Published listing: [cyan]{listing_name}[/cyan] "
|
702
|
+
f"(service: {service_name}, provider: {provider_name})"
|
703
|
+
)
|
704
|
+
|
705
|
+
return (listing_file, result)
|
706
|
+
except Exception as e:
|
707
|
+
data = self.load_data_file(listing_file)
|
708
|
+
listing_name = data.get("name", listing_file.stem)
|
709
|
+
console.print(f" [red]✗[/red] Failed to publish listing: [cyan]{listing_file}[/cyan] - {str(e)}")
|
710
|
+
return (listing_file, e)
|
711
|
+
|
712
|
+
async def publish_all_listings(self, data_dir: Path) -> dict[str, Any]:
|
713
|
+
"""
|
714
|
+
Publish all service listings found in a directory tree concurrently.
|
406
715
|
|
407
716
|
Validates data consistency before publishing.
|
408
717
|
Returns a summary of successes and failures.
|
@@ -415,10 +724,7 @@ class ServiceDataPublisher:
|
|
415
724
|
"total": 0,
|
416
725
|
"success": 0,
|
417
726
|
"failed": 0,
|
418
|
-
"errors": [
|
419
|
-
{"file": "validation", "error": error}
|
420
|
-
for error in validation_errors
|
421
|
-
],
|
727
|
+
"errors": [{"file": "validation", "error": error} for error in validation_errors],
|
422
728
|
}
|
423
729
|
|
424
730
|
listing_files = self.find_listing_files(data_dir)
|
@@ -429,19 +735,61 @@ class ServiceDataPublisher:
|
|
429
735
|
"errors": [],
|
430
736
|
}
|
431
737
|
|
432
|
-
|
433
|
-
|
434
|
-
|
435
|
-
|
436
|
-
|
738
|
+
if not listing_files:
|
739
|
+
return results
|
740
|
+
|
741
|
+
console = Console()
|
742
|
+
|
743
|
+
# Run all listing publications concurrently with rate limiting
|
744
|
+
# Create semaphore to limit concurrent requests
|
745
|
+
semaphore = asyncio.Semaphore(self.max_concurrent_requests)
|
746
|
+
tasks = [self._publish_listing_task(listing_file, console, semaphore) for listing_file in listing_files]
|
747
|
+
task_results = await asyncio.gather(*tasks)
|
748
|
+
|
749
|
+
# Process results
|
750
|
+
for listing_file, result in task_results:
|
751
|
+
if isinstance(result, Exception):
|
437
752
|
results["failed"] += 1
|
438
|
-
results["errors"].append({"file": str(listing_file), "error": str(
|
753
|
+
results["errors"].append({"file": str(listing_file), "error": str(result)})
|
754
|
+
else:
|
755
|
+
results["success"] += 1
|
439
756
|
|
440
757
|
return results
|
441
758
|
|
442
|
-
def
|
759
|
+
async def _publish_provider_task(
|
760
|
+
self, provider_file: Path, console: Console, semaphore: asyncio.Semaphore
|
761
|
+
) -> tuple[Path, dict[str, Any] | Exception]:
|
762
|
+
"""
|
763
|
+
Async task to publish a single provider with concurrency control.
|
764
|
+
|
765
|
+
Returns tuple of (provider_file, result_or_exception).
|
766
|
+
"""
|
767
|
+
async with semaphore: # Limit concurrent requests
|
768
|
+
try:
|
769
|
+
# Load provider data to get the name
|
770
|
+
data = self.load_data_file(provider_file)
|
771
|
+
provider_name = data.get("name", provider_file.stem)
|
772
|
+
|
773
|
+
# Publish the provider
|
774
|
+
result = await self.post_provider_async(provider_file)
|
775
|
+
|
776
|
+
# Print complete statement after publication
|
777
|
+
if result.get("skipped"):
|
778
|
+
reason = result.get("reason", "unknown")
|
779
|
+
console.print(f" [yellow]⊘[/yellow] Skipped provider: [cyan]{provider_name}[/cyan] - {reason}")
|
780
|
+
else:
|
781
|
+
console.print(f" [green]✓[/green] Published provider: [cyan]{provider_name}[/cyan]")
|
782
|
+
|
783
|
+
return (provider_file, result)
|
784
|
+
except Exception as e:
|
785
|
+
data = self.load_data_file(provider_file)
|
786
|
+
provider_name = data.get("name", provider_file.stem)
|
787
|
+
console.print(f" [red]✗[/red] Failed to publish provider: [cyan]{provider_name}[/cyan] - {str(e)}")
|
788
|
+
return (provider_file, e)
|
789
|
+
|
790
|
+
async def publish_all_providers(self, data_dir: Path) -> dict[str, Any]:
|
443
791
|
"""
|
444
|
-
Publish all providers found in a directory tree.
|
792
|
+
Publish all providers found in a directory tree concurrently.
|
445
793
|
|
446
794
|
Returns a summary of successes and failures.
|
447
795
|
"""
|
@@ -453,19 +801,61 @@ class ServiceDataPublisher:
|
|
453
801
|
"errors": [],
|
454
802
|
}
|
455
803
|
|
456
|
-
|
457
|
-
|
458
|
-
|
459
|
-
|
460
|
-
|
804
|
+
if not provider_files:
|
805
|
+
return results
|
806
|
+
|
807
|
+
console = Console()
|
808
|
+
|
809
|
+
# Run all provider publications concurrently with rate limiting
|
810
|
+
# Create semaphore to limit concurrent requests
|
811
|
+
semaphore = asyncio.Semaphore(self.max_concurrent_requests)
|
812
|
+
tasks = [self._publish_provider_task(provider_file, console, semaphore) for provider_file in provider_files]
|
813
|
+
task_results = await asyncio.gather(*tasks)
|
814
|
+
|
815
|
+
# Process results
|
816
|
+
for provider_file, result in task_results:
|
817
|
+
if isinstance(result, Exception):
|
461
818
|
results["failed"] += 1
|
462
|
-
results["errors"].append({"file": str(provider_file), "error": str(
|
819
|
+
results["errors"].append({"file": str(provider_file), "error": str(result)})
|
820
|
+
else:
|
821
|
+
results["success"] += 1
|
463
822
|
|
464
823
|
return results
|
465
824
|
|
466
|
-
def
|
825
|
+
async def _publish_seller_task(
|
826
|
+
self, seller_file: Path, console: Console, semaphore: asyncio.Semaphore
|
827
|
+
) -> tuple[Path, dict[str, Any] | Exception]:
|
828
|
+
"""
|
829
|
+
Async task to publish a single seller with concurrency control.
|
830
|
+
|
831
|
+
Returns tuple of (seller_file, result_or_exception).
|
832
|
+
"""
|
833
|
+
async with semaphore: # Limit concurrent requests
|
834
|
+
try:
|
835
|
+
# Load seller data to get the name
|
836
|
+
data = self.load_data_file(seller_file)
|
837
|
+
seller_name = data.get("name", seller_file.stem)
|
838
|
+
|
839
|
+
# Publish the seller
|
840
|
+
result = await self.post_seller_async(seller_file)
|
841
|
+
|
842
|
+
# Print complete statement after publication
|
843
|
+
if result.get("skipped"):
|
844
|
+
reason = result.get("reason", "unknown")
|
845
|
+
console.print(f" [yellow]⊘[/yellow] Skipped seller: [cyan]{seller_name}[/cyan] - {reason}")
|
846
|
+
else:
|
847
|
+
console.print(f" [green]✓[/green] Published seller: [cyan]{seller_name}[/cyan]")
|
848
|
+
|
849
|
+
return (seller_file, result)
|
850
|
+
except Exception as e:
|
851
|
+
data = self.load_data_file(seller_file)
|
852
|
+
seller_name = data.get("name", seller_file.stem)
|
853
|
+
console.print(f" [red]✗[/red] Failed to publish seller: [cyan]{seller_name}[/cyan] - {str(e)}")
|
854
|
+
return (seller_file, e)
|
855
|
+
|
856
|
+
async def publish_all_sellers(self, data_dir: Path) -> dict[str, Any]:
|
467
857
|
"""
|
468
|
-
Publish all sellers found in a directory tree.
|
858
|
+
Publish all sellers found in a directory tree concurrently.
|
469
859
|
|
470
860
|
Returns a summary of successes and failures.
|
471
861
|
"""
|
@@ -477,27 +867,75 @@ class ServiceDataPublisher:
|
|
477
867
|
"errors": [],
|
478
868
|
}
|
479
869
|
|
480
|
-
|
481
|
-
|
482
|
-
|
483
|
-
|
484
|
-
|
870
|
+
if not seller_files:
|
871
|
+
return results
|
872
|
+
|
873
|
+
console = Console()
|
874
|
+
|
875
|
+
# Run all seller publications concurrently with rate limiting
|
876
|
+
# Create semaphore to limit concurrent requests
|
877
|
+
semaphore = asyncio.Semaphore(self.max_concurrent_requests)
|
878
|
+
tasks = [self._publish_seller_task(seller_file, console, semaphore) for seller_file in seller_files]
|
879
|
+
task_results = await asyncio.gather(*tasks)
|
880
|
+
|
881
|
+
# Process results
|
882
|
+
for seller_file, result in task_results:
|
883
|
+
if isinstance(result, Exception):
|
485
884
|
results["failed"] += 1
|
486
|
-
results["errors"].append({"file": str(seller_file), "error": str(
|
885
|
+
results["errors"].append({"file": str(seller_file), "error": str(result)})
|
886
|
+
else:
|
887
|
+
results["success"] += 1
|
487
888
|
|
488
889
|
return results
|
489
890
|
|
490
|
-
def
|
491
|
-
"""
|
492
|
-
|
891
|
+
async def publish_all_models(self, data_dir: Path) -> dict[str, Any]:
|
892
|
+
"""
|
893
|
+
Publish all data types in the correct order.
|
894
|
+
|
895
|
+
Publishing order:
|
896
|
+
1. Sellers - Must exist before listings
|
897
|
+
2. Providers - Must exist before offerings
|
898
|
+
3. Service Offerings - Must exist before listings
|
899
|
+
4. Service Listings - Depends on sellers, providers, and offerings
|
493
900
|
|
494
|
-
|
495
|
-
"""
|
496
|
-
|
901
|
+
Returns a dict with results for each data type and overall summary.
|
902
|
+
"""
|
903
|
+
all_results: dict[str, Any] = {
|
904
|
+
"sellers": {},
|
905
|
+
"providers": {},
|
906
|
+
"offerings": {},
|
907
|
+
"listings": {},
|
908
|
+
"total_success": 0,
|
909
|
+
"total_failed": 0,
|
910
|
+
"total_found": 0,
|
911
|
+
}
|
912
|
+
|
913
|
+
# Publish in order: sellers -> providers -> offerings -> listings
|
914
|
+
publish_order = [
|
915
|
+
("sellers", self.publish_all_sellers),
|
916
|
+
("providers", self.publish_all_providers),
|
917
|
+
("offerings", self.publish_all_offerings),
|
918
|
+
("listings", self.publish_all_listings),
|
919
|
+
]
|
920
|
+
|
921
|
+
for data_type, publish_method in publish_order:
|
922
|
+
try:
|
923
|
+
results = await publish_method(data_dir)
|
924
|
+
all_results[data_type] = results
|
925
|
+
all_results["total_success"] += results["success"]
|
926
|
+
all_results["total_failed"] += results["failed"]
|
927
|
+
all_results["total_found"] += results["total"]
|
928
|
+
except Exception as e:
|
929
|
+
# If a publish method fails catastrophically, record the error
|
930
|
+
all_results[data_type] = {
|
931
|
+
"total": 0,
|
932
|
+
"success": 0,
|
933
|
+
"failed": 1,
|
934
|
+
"errors": [{"file": "N/A", "error": str(e)}],
|
935
|
+
}
|
936
|
+
all_results["total_failed"] += 1
|
497
937
|
|
498
|
-
|
499
|
-
"""Context manager exit."""
|
500
|
-
self.close()
|
938
|
+
return all_results
|
501
939
|
|
502
940
|
|
503
941
|
# CLI commands for publishing
|
@@ -505,34 +943,129 @@ app = typer.Typer(help="Publish data to backend")
|
|
505
943
|
console = Console()
|
506
944
|
|
507
945
|
|
508
|
-
@app.
|
509
|
-
def
|
510
|
-
|
511
|
-
|
512
|
-
help="Path to provider file or directory (default: ./data or UNITYSVC_DATA_DIR env var)",
|
513
|
-
),
|
514
|
-
backend_url: str | None = typer.Option(
|
946
|
+
@app.callback(invoke_without_command=True)
|
947
|
+
def publish_callback(
|
948
|
+
ctx: typer.Context,
|
949
|
+
data_path: Path | None = typer.Option(
|
515
950
|
None,
|
516
|
-
"--
|
517
|
-
"-
|
518
|
-
help="
|
951
|
+
"--data-path",
|
952
|
+
"-d",
|
953
|
+
help="Path to data directory (default: current directory)",
|
519
954
|
),
|
520
|
-
|
955
|
+
):
|
956
|
+
"""
|
957
|
+
Publish data to backend.
|
958
|
+
|
959
|
+
When called without a subcommand, publishes all data types in order:
|
960
|
+
sellers → providers → offerings → listings.
|
961
|
+
|
962
|
+
Use subcommands to publish specific data types:
|
963
|
+
- providers: Publish only providers
|
964
|
+
- sellers: Publish only sellers
|
965
|
+
- offerings: Publish only service offerings
|
966
|
+
- listings: Publish only service listings
|
967
|
+
|
968
|
+
Required environment variables:
|
969
|
+
- UNITYSVC_BASE_URL: Backend API URL
|
970
|
+
- UNITYSVC_API_KEY: API key for authentication
|
971
|
+
"""
|
972
|
+
# If a subcommand was invoked, skip this callback logic
|
973
|
+
if ctx.invoked_subcommand is not None:
|
974
|
+
return
|
975
|
+
|
976
|
+
# No subcommand - publish all
|
977
|
+
# Set data path
|
978
|
+
if data_path is None:
|
979
|
+
data_path = Path.cwd()
|
980
|
+
|
981
|
+
if not data_path.is_absolute():
|
982
|
+
data_path = Path.cwd() / data_path
|
983
|
+
|
984
|
+
if not data_path.exists():
|
985
|
+
console.print(f"[red]✗[/red] Path not found: {data_path}", style="bold red")
|
986
|
+
raise typer.Exit(code=1)
|
987
|
+
|
988
|
+
console.print(f"[bold blue]Publishing all data from:[/bold blue] {data_path}")
|
989
|
+
console.print(f"[bold blue]Backend URL:[/bold blue] {os.getenv('UNITYSVC_BASE_URL', 'N/A')}\n")
|
990
|
+
|
991
|
+
async def _publish_all_async():
|
992
|
+
async with ServiceDataPublisher() as publisher:
|
993
|
+
return await publisher.publish_all_models(data_path)
|
994
|
+
|
995
|
+
try:
|
996
|
+
all_results = asyncio.run(_publish_all_async())
|
997
|
+
|
998
|
+
# Display results for each data type
|
999
|
+
data_type_display_names = {
|
1000
|
+
"sellers": "Sellers",
|
1001
|
+
"providers": "Providers",
|
1002
|
+
"offerings": "Service Offerings",
|
1003
|
+
"listings": "Service Listings",
|
1004
|
+
}
|
1005
|
+
|
1006
|
+
for data_type in ["sellers", "providers", "offerings", "listings"]:
|
1007
|
+
display_name = data_type_display_names[data_type]
|
1008
|
+
results = all_results[data_type]
|
1009
|
+
|
1010
|
+
console.print(f"\n[bold cyan]{'=' * 60}[/bold cyan]")
|
1011
|
+
console.print(f"[bold cyan]{display_name}[/bold cyan]")
|
1012
|
+
console.print(f"[bold cyan]{'=' * 60}[/bold cyan]\n")
|
1013
|
+
|
1014
|
+
console.print(f" Total found: {results['total']}")
|
1015
|
+
console.print(f" [green]✓ Success:[/green] {results['success']}")
|
1016
|
+
console.print(f" [red]✗ Failed:[/red] {results['failed']}")
|
1017
|
+
|
1018
|
+
# Display errors if any
|
1019
|
+
if results.get("errors"):
|
1020
|
+
console.print(f"\n[bold red]Errors in {display_name}:[/bold red]")
|
1021
|
+
for error in results["errors"]:
|
1022
|
+
# Check if this is a skipped item
|
1023
|
+
if isinstance(error, dict) and error.get("error", "").startswith("skipped"):
|
1024
|
+
continue
|
1025
|
+
console.print(f" [red]✗[/red] {error.get('file', 'unknown')}")
|
1026
|
+
console.print(f" {error.get('error', 'unknown error')}")
|
1027
|
+
|
1028
|
+
# Final summary
|
1029
|
+
console.print(f"\n[bold cyan]{'=' * 60}[/bold cyan]")
|
1030
|
+
console.print("[bold]Final Publishing Summary[/bold]")
|
1031
|
+
console.print(f"[bold cyan]{'=' * 60}[/bold cyan]\n")
|
1032
|
+
console.print(f" Total found: {all_results['total_found']}")
|
1033
|
+
console.print(f" [green]✓ Success:[/green] {all_results['total_success']}")
|
1034
|
+
console.print(f" [red]✗ Failed:[/red] {all_results['total_failed']}")
|
1035
|
+
|
1036
|
+
if all_results["total_failed"] > 0:
|
1037
|
+
console.print(
|
1038
|
+
f"\n[yellow]⚠[/yellow] Completed with {all_results['total_failed']} failure(s)",
|
1039
|
+
style="bold yellow",
|
1040
|
+
)
|
1041
|
+
raise typer.Exit(code=1)
|
1042
|
+
else:
|
1043
|
+
console.print(
|
1044
|
+
"\n[green]✓[/green] All data published successfully!",
|
1045
|
+
style="bold green",
|
1046
|
+
)
|
1047
|
+
|
1048
|
+
except typer.Exit:
|
1049
|
+
raise
|
1050
|
+
except Exception as e:
|
1051
|
+
console.print(f"[red]✗[/red] Failed to publish all data: {e}", style="bold red")
|
1052
|
+
raise typer.Exit(code=1)
|
1053
|
+
|
1054
|
+
|
1055
|
+
@app.command("providers")
|
1056
|
+
def publish_providers(
|
1057
|
+
data_path: Path | None = typer.Option(
|
521
1058
|
None,
|
522
|
-
"--
|
523
|
-
"-
|
524
|
-
help="
|
1059
|
+
"--data-path",
|
1060
|
+
"-d",
|
1061
|
+
help="Path to provider file or directory (default: current directory)",
|
525
1062
|
),
|
526
1063
|
):
|
527
1064
|
"""Publish provider(s) from a file or directory."""
|
528
1065
|
|
529
1066
|
# Set data path
|
530
1067
|
if data_path is None:
|
531
|
-
|
532
|
-
if data_path_str:
|
533
|
-
data_path = Path(data_path_str)
|
534
|
-
else:
|
535
|
-
data_path = Path.cwd() / "data"
|
1068
|
+
data_path = Path.cwd()
|
536
1069
|
|
537
1070
|
if not data_path.is_absolute():
|
538
1071
|
data_path = Path.cwd() / data_path
|
@@ -541,91 +1074,66 @@ def publish_providers(
|
|
541
1074
|
console.print(f"[red]✗[/red] Path not found: {data_path}", style="bold red")
|
542
1075
|
raise typer.Exit(code=1)
|
543
1076
|
|
544
|
-
#
|
545
|
-
|
546
|
-
|
547
|
-
console.print(
|
548
|
-
|
549
|
-
|
550
|
-
)
|
551
|
-
raise typer.Exit(code=1)
|
552
|
-
|
553
|
-
# Get API key from argument or environment
|
554
|
-
api_key = api_key or os.getenv("UNITYSVC_API_KEY")
|
555
|
-
if not api_key:
|
556
|
-
console.print(
|
557
|
-
"[red]✗[/red] API key not provided. Use --api-key or set UNITYSVC_API_KEY env var.",
|
558
|
-
style="bold red",
|
559
|
-
)
|
560
|
-
raise typer.Exit(code=1)
|
1077
|
+
# Handle single file
|
1078
|
+
if data_path.is_file():
|
1079
|
+
console.print(f"[blue]Publishing provider:[/blue] {data_path}")
|
1080
|
+
console.print(f"[blue]Backend URL:[/blue] {os.getenv('UNITYSVC_BASE_URL', 'N/A')}\n")
|
1081
|
+
else:
|
1082
|
+
console.print(f"[blue]Scanning for providers in:[/blue] {data_path}")
|
1083
|
+
console.print(f"[blue]Backend URL:[/blue] {os.getenv('UNITYSVC_BASE_URL', 'N/A')}\n")
|
561
1084
|
|
562
|
-
|
563
|
-
with ServiceDataPublisher(
|
1085
|
+
async def _publish_providers_async():
|
1086
|
+
async with ServiceDataPublisher() as publisher:
|
564
1087
|
# Handle single file
|
565
1088
|
if data_path.is_file():
|
566
|
-
|
567
|
-
console.print(f"[blue]Backend URL:[/blue] {backend_url}\n")
|
568
|
-
result = publisher.post_provider(data_path)
|
569
|
-
console.print("[green]✓[/green] Provider published successfully!")
|
570
|
-
console.print(f"[cyan]Response:[/cyan] {json.dumps(result, indent=2)}")
|
1089
|
+
return await publisher.post_provider_async(data_path), True
|
571
1090
|
# Handle directory
|
572
1091
|
else:
|
573
|
-
|
574
|
-
|
575
|
-
|
576
|
-
|
577
|
-
|
578
|
-
|
579
|
-
|
580
|
-
|
581
|
-
|
582
|
-
|
583
|
-
|
584
|
-
|
585
|
-
|
586
|
-
|
587
|
-
|
588
|
-
|
589
|
-
|
590
|
-
|
591
|
-
|
1092
|
+
return await publisher.publish_all_providers(data_path), False
|
1093
|
+
|
1094
|
+
try:
|
1095
|
+
result, is_single = asyncio.run(_publish_providers_async())
|
1096
|
+
|
1097
|
+
if is_single:
|
1098
|
+
console.print("[green]✓[/green] Provider published successfully!")
|
1099
|
+
console.print(f"[cyan]Response:[/cyan] {json.dumps(result, indent=2)}")
|
1100
|
+
else:
|
1101
|
+
# Display summary
|
1102
|
+
console.print("\n[bold]Publishing Summary:[/bold]")
|
1103
|
+
console.print(f" Total found: {result['total']}")
|
1104
|
+
console.print(f" [green]✓ Success:[/green] {result['success']}")
|
1105
|
+
console.print(f" [red]✗ Failed:[/red] {result['failed']}")
|
1106
|
+
|
1107
|
+
# Display errors if any
|
1108
|
+
if result["errors"]:
|
1109
|
+
console.print("\n[bold red]Errors:[/bold red]")
|
1110
|
+
for error in result["errors"]:
|
1111
|
+
console.print(f" [red]✗[/red] {error['file']}")
|
1112
|
+
console.print(f" {error['error']}")
|
1113
|
+
|
1114
|
+
if result["failed"] > 0:
|
1115
|
+
raise typer.Exit(code=1)
|
592
1116
|
|
593
1117
|
except typer.Exit:
|
594
1118
|
raise
|
595
1119
|
except Exception as e:
|
596
|
-
console.print(
|
597
|
-
f"[red]✗[/red] Failed to publish providers: {e}", style="bold red"
|
598
|
-
)
|
1120
|
+
console.print(f"[red]✗[/red] Failed to publish providers: {e}", style="bold red")
|
599
1121
|
raise typer.Exit(code=1)
|
600
1122
|
|
601
1123
|
|
602
1124
|
@app.command("sellers")
|
603
1125
|
def publish_sellers(
|
604
|
-
data_path: Path | None = typer.
|
605
|
-
None,
|
606
|
-
help="Path to seller file or directory (default: ./data or UNITYSVC_DATA_DIR env var)",
|
607
|
-
),
|
608
|
-
backend_url: str | None = typer.Option(
|
609
|
-
None,
|
610
|
-
"--backend-url",
|
611
|
-
"-u",
|
612
|
-
help="UnitySVC backend URL (default: from UNITYSVC_BACKEND_URL env var)",
|
613
|
-
),
|
614
|
-
api_key: str | None = typer.Option(
|
1126
|
+
data_path: Path | None = typer.Option(
|
615
1127
|
None,
|
616
|
-
"--
|
617
|
-
"-
|
618
|
-
help="
|
1128
|
+
"--data-path",
|
1129
|
+
"-d",
|
1130
|
+
help="Path to seller file or directory (default: current directory)",
|
619
1131
|
),
|
620
1132
|
):
|
621
1133
|
"""Publish seller(s) from a file or directory."""
|
622
1134
|
# Set data path
|
623
1135
|
if data_path is None:
|
624
|
-
|
625
|
-
if data_path_str:
|
626
|
-
data_path = Path(data_path_str)
|
627
|
-
else:
|
628
|
-
data_path = Path.cwd() / "data"
|
1136
|
+
data_path = Path.cwd()
|
629
1137
|
|
630
1138
|
if not data_path.is_absolute():
|
631
1139
|
data_path = Path.cwd() / data_path
|
@@ -634,54 +1142,43 @@ def publish_sellers(
|
|
634
1142
|
console.print(f"[red]✗[/red] Path not found: {data_path}", style="bold red")
|
635
1143
|
raise typer.Exit(code=1)
|
636
1144
|
|
637
|
-
#
|
638
|
-
|
639
|
-
|
640
|
-
console.print(
|
641
|
-
|
642
|
-
|
643
|
-
)
|
644
|
-
raise typer.Exit(code=1)
|
645
|
-
|
646
|
-
# Get API key
|
647
|
-
api_key = api_key or os.getenv("UNITYSVC_API_KEY")
|
648
|
-
if not api_key:
|
649
|
-
console.print(
|
650
|
-
"[red]✗[/red] API key not provided. Use --api-key or set UNITYSVC_API_KEY env var.",
|
651
|
-
style="bold red",
|
652
|
-
)
|
653
|
-
raise typer.Exit(code=1)
|
1145
|
+
# Handle single file
|
1146
|
+
if data_path.is_file():
|
1147
|
+
console.print(f"[blue]Publishing seller:[/blue] {data_path}")
|
1148
|
+
console.print(f"[blue]Backend URL:[/blue] {os.getenv('UNITYSVC_BASE_URL', 'N/A')}\n")
|
1149
|
+
else:
|
1150
|
+
console.print(f"[blue]Scanning for sellers in:[/blue] {data_path}")
|
1151
|
+
console.print(f"[blue]Backend URL:[/blue] {os.getenv('UNITYSVC_BASE_URL', 'N/A')}\n")
|
654
1152
|
|
655
|
-
|
656
|
-
with ServiceDataPublisher(
|
1153
|
+
async def _publish_sellers_async():
|
1154
|
+
async with ServiceDataPublisher() as publisher:
|
657
1155
|
# Handle single file
|
658
1156
|
if data_path.is_file():
|
659
|
-
|
660
|
-
console.print(f"[blue]Backend URL:[/blue] {backend_url}\n")
|
661
|
-
result = publisher.post_seller(data_path)
|
662
|
-
console.print("[green]✓[/green] Seller published successfully!")
|
663
|
-
console.print(f"[cyan]Response:[/cyan] {json.dumps(result, indent=2)}")
|
1157
|
+
return await publisher.post_seller_async(data_path), True
|
664
1158
|
# Handle directory
|
665
1159
|
else:
|
666
|
-
|
667
|
-
|
668
|
-
|
669
|
-
|
670
|
-
|
671
|
-
|
672
|
-
|
673
|
-
|
674
|
-
|
675
|
-
|
676
|
-
|
677
|
-
|
678
|
-
|
679
|
-
|
680
|
-
|
681
|
-
|
682
|
-
|
683
|
-
|
684
|
-
)
|
1160
|
+
return await publisher.publish_all_sellers(data_path), False
|
1161
|
+
|
1162
|
+
try:
|
1163
|
+
result, is_single = asyncio.run(_publish_sellers_async())
|
1164
|
+
|
1165
|
+
if is_single:
|
1166
|
+
console.print("[green]✓[/green] Seller published successfully!")
|
1167
|
+
console.print(f"[cyan]Response:[/cyan] {json.dumps(result, indent=2)}")
|
1168
|
+
else:
|
1169
|
+
console.print("\n[bold]Publishing Summary:[/bold]")
|
1170
|
+
console.print(f" Total found: {result['total']}")
|
1171
|
+
console.print(f" [green]✓ Success: {result['success']}[/green]")
|
1172
|
+
console.print(f" [red]✗ Failed: {result['failed']}[/red]")
|
1173
|
+
|
1174
|
+
if result["errors"]:
|
1175
|
+
console.print("\n[bold red]Errors:[/bold red]")
|
1176
|
+
for error in result["errors"]:
|
1177
|
+
console.print(f" [red]✗[/red] {error['file']}")
|
1178
|
+
console.print(f" {error['error']}")
|
1179
|
+
raise typer.Exit(code=1)
|
1180
|
+
else:
|
1181
|
+
console.print("\n[green]✓[/green] All sellers published successfully!")
|
685
1182
|
|
686
1183
|
except typer.Exit:
|
687
1184
|
raise
|
@@ -692,31 +1189,17 @@ def publish_sellers(
|
|
692
1189
|
|
693
1190
|
@app.command("offerings")
|
694
1191
|
def publish_offerings(
|
695
|
-
data_path: Path | None = typer.
|
1192
|
+
data_path: Path | None = typer.Option(
|
696
1193
|
None,
|
697
|
-
|
698
|
-
|
699
|
-
|
700
|
-
None,
|
701
|
-
"--backend-url",
|
702
|
-
"-u",
|
703
|
-
help="UnitySVC backend URL (default: from UNITYSVC_BACKEND_URL env var)",
|
704
|
-
),
|
705
|
-
api_key: str | None = typer.Option(
|
706
|
-
None,
|
707
|
-
"--api-key",
|
708
|
-
"-k",
|
709
|
-
help="API key for authentication (default: from UNITYSVC_API_KEY env var)",
|
1194
|
+
"--data-path",
|
1195
|
+
"-d",
|
1196
|
+
help="Path to service offering file or directory (default: current directory)",
|
710
1197
|
),
|
711
1198
|
):
|
712
1199
|
"""Publish service offering(s) from a file or directory."""
|
713
1200
|
# Set data path
|
714
1201
|
if data_path is None:
|
715
|
-
|
716
|
-
if data_path_str:
|
717
|
-
data_path = Path(data_path_str)
|
718
|
-
else:
|
719
|
-
data_path = Path.cwd() / "data"
|
1202
|
+
data_path = Path.cwd()
|
720
1203
|
|
721
1204
|
if not data_path.is_absolute():
|
722
1205
|
data_path = Path.cwd() / data_path
|
@@ -725,96 +1208,65 @@ def publish_offerings(
|
|
725
1208
|
console.print(f"[red]✗[/red] Path not found: {data_path}", style="bold red")
|
726
1209
|
raise typer.Exit(code=1)
|
727
1210
|
|
728
|
-
#
|
729
|
-
|
730
|
-
|
731
|
-
console.print(
|
732
|
-
|
733
|
-
|
734
|
-
)
|
735
|
-
raise typer.Exit(code=1)
|
736
|
-
|
737
|
-
# Get API key from argument or environment
|
738
|
-
api_key = api_key or os.getenv("UNITYSVC_API_KEY")
|
739
|
-
if not api_key:
|
740
|
-
console.print(
|
741
|
-
"[red]✗[/red] API key not provided. Use --api-key or set UNITYSVC_API_KEY env var.",
|
742
|
-
style="bold red",
|
743
|
-
)
|
744
|
-
raise typer.Exit(code=1)
|
1211
|
+
# Handle single file
|
1212
|
+
if data_path.is_file():
|
1213
|
+
console.print(f"[blue]Publishing service offering:[/blue] {data_path}")
|
1214
|
+
console.print(f"[blue]Backend URL:[/blue] {os.getenv('UNITYSVC_BASE_URL', 'N/A')}\n")
|
1215
|
+
else:
|
1216
|
+
console.print(f"[blue]Scanning for service offerings in:[/blue] {data_path}")
|
1217
|
+
console.print(f"[blue]Backend URL:[/bold blue] {os.getenv('UNITYSVC_BASE_URL', 'N/A')}\n")
|
745
1218
|
|
746
|
-
|
747
|
-
with ServiceDataPublisher(
|
1219
|
+
async def _publish_offerings_async():
|
1220
|
+
async with ServiceDataPublisher() as publisher:
|
748
1221
|
# Handle single file
|
749
1222
|
if data_path.is_file():
|
750
|
-
|
751
|
-
console.print(f"[blue]Backend URL:[/blue] {backend_url}\n")
|
752
|
-
result = publisher.post_service_offering(data_path)
|
753
|
-
console.print(
|
754
|
-
"[green]✓[/green] Service offering published successfully!"
|
755
|
-
)
|
756
|
-
console.print(f"[cyan]Response:[/cyan] {json.dumps(result, indent=2)}")
|
1223
|
+
return await publisher.post_service_offering_async(data_path), True
|
757
1224
|
# Handle directory
|
758
1225
|
else:
|
759
|
-
|
760
|
-
|
761
|
-
|
762
|
-
|
763
|
-
|
764
|
-
|
765
|
-
|
766
|
-
|
767
|
-
|
768
|
-
|
769
|
-
|
770
|
-
|
771
|
-
|
772
|
-
|
773
|
-
|
774
|
-
|
775
|
-
|
776
|
-
|
777
|
-
console.print(
|
778
|
-
|
779
|
-
|
1226
|
+
return await publisher.publish_all_offerings(data_path), False
|
1227
|
+
|
1228
|
+
try:
|
1229
|
+
result, is_single = asyncio.run(_publish_offerings_async())
|
1230
|
+
|
1231
|
+
if is_single:
|
1232
|
+
console.print("[green]✓[/green] Service offering published successfully!")
|
1233
|
+
console.print(f"[cyan]Response:[/cyan] {json.dumps(result, indent=2)}")
|
1234
|
+
else:
|
1235
|
+
console.print("\n[bold]Publishing Summary:[/bold]")
|
1236
|
+
console.print(f" Total found: {result['total']}")
|
1237
|
+
console.print(f" [green]✓ Success: {result['success']}[/green]")
|
1238
|
+
console.print(f" [red]✗ Failed: {result['failed']}[/red]")
|
1239
|
+
|
1240
|
+
if result["errors"]:
|
1241
|
+
console.print("\n[bold red]Errors:[/bold red]")
|
1242
|
+
for error in result["errors"]:
|
1243
|
+
console.print(f" [red]✗[/red] {error['file']}")
|
1244
|
+
console.print(f" {error['error']}")
|
1245
|
+
raise typer.Exit(code=1)
|
1246
|
+
else:
|
1247
|
+
console.print("\n[green]✓[/green] All service offerings published successfully!")
|
780
1248
|
|
781
1249
|
except typer.Exit:
|
782
1250
|
raise
|
783
1251
|
except Exception as e:
|
784
|
-
console.print(
|
785
|
-
f"[red]✗[/red] Failed to publish service offerings: {e}", style="bold red"
|
786
|
-
)
|
1252
|
+
console.print(f"[red]✗[/red] Failed to publish service offerings: {e}", style="bold red")
|
787
1253
|
raise typer.Exit(code=1)
|
788
1254
|
|
789
1255
|
|
790
1256
|
@app.command("listings")
|
791
1257
|
def publish_listings(
|
792
|
-
data_path: Path | None = typer.
|
1258
|
+
data_path: Path | None = typer.Option(
|
793
1259
|
None,
|
794
|
-
|
795
|
-
|
796
|
-
|
797
|
-
None,
|
798
|
-
"--backend-url",
|
799
|
-
"-u",
|
800
|
-
help="UnitySVC backend URL (default: from UNITYSVC_BACKEND_URL env var)",
|
801
|
-
),
|
802
|
-
api_key: str | None = typer.Option(
|
803
|
-
None,
|
804
|
-
"--api-key",
|
805
|
-
"-k",
|
806
|
-
help="API key for authentication (default: from UNITYSVC_API_KEY env var)",
|
1260
|
+
"--data-path",
|
1261
|
+
"-d",
|
1262
|
+
help="Path to service listing file or directory (default: current directory)",
|
807
1263
|
),
|
808
1264
|
):
|
809
1265
|
"""Publish service listing(s) from a file or directory."""
|
810
1266
|
|
811
1267
|
# Set data path
|
812
1268
|
if data_path is None:
|
813
|
-
|
814
|
-
if data_path_str:
|
815
|
-
data_path = Path(data_path_str)
|
816
|
-
else:
|
817
|
-
data_path = Path.cwd() / "data"
|
1269
|
+
data_path = Path.cwd()
|
818
1270
|
|
819
1271
|
if not data_path.is_absolute():
|
820
1272
|
data_path = Path.cwd() / data_path
|
@@ -823,63 +1275,46 @@ def publish_listings(
|
|
823
1275
|
console.print(f"[red]✗[/red] Path not found: {data_path}", style="bold red")
|
824
1276
|
raise typer.Exit(code=1)
|
825
1277
|
|
826
|
-
#
|
827
|
-
|
828
|
-
|
829
|
-
console.print(
|
830
|
-
|
831
|
-
|
832
|
-
)
|
833
|
-
raise typer.Exit(code=1)
|
834
|
-
|
835
|
-
# Get API key from argument or environment
|
836
|
-
api_key = api_key or os.getenv("UNITYSVC_API_KEY")
|
837
|
-
if not api_key:
|
838
|
-
console.print(
|
839
|
-
"[red]✗[/red] API key not provided. Use --api-key or set UNITYSVC_API_KEY env var.",
|
840
|
-
style="bold red",
|
841
|
-
)
|
842
|
-
raise typer.Exit(code=1)
|
1278
|
+
# Handle single file
|
1279
|
+
if data_path.is_file():
|
1280
|
+
console.print(f"[blue]Publishing service listing:[/blue] {data_path}")
|
1281
|
+
console.print(f"[blue]Backend URL:[/blue] {os.getenv('UNITYSVC_BASE_URL', 'N/A')}\n")
|
1282
|
+
else:
|
1283
|
+
console.print(f"[blue]Scanning for service listings in:[/blue] {data_path}")
|
1284
|
+
console.print(f"[blue]Backend URL:[/blue] {os.getenv('UNITYSVC_BASE_URL', 'N/A')}\n")
|
843
1285
|
|
844
|
-
|
845
|
-
with ServiceDataPublisher(
|
1286
|
+
async def _publish_listings_async():
|
1287
|
+
async with ServiceDataPublisher() as publisher:
|
846
1288
|
# Handle single file
|
847
1289
|
if data_path.is_file():
|
848
|
-
|
849
|
-
console.print(f"[blue]Backend URL:[/blue] {backend_url}\n")
|
850
|
-
result = publisher.post_service_listing(data_path)
|
851
|
-
console.print(
|
852
|
-
"[green]✓[/green] Service listing published successfully!"
|
853
|
-
)
|
854
|
-
console.print(f"[cyan]Response:[/cyan] {json.dumps(result, indent=2)}")
|
1290
|
+
return await publisher.post_service_listing_async(data_path), True
|
855
1291
|
# Handle directory
|
856
1292
|
else:
|
857
|
-
|
858
|
-
|
859
|
-
|
860
|
-
|
861
|
-
|
862
|
-
|
863
|
-
|
864
|
-
|
865
|
-
|
866
|
-
|
867
|
-
|
868
|
-
|
869
|
-
|
870
|
-
|
871
|
-
|
872
|
-
|
873
|
-
|
874
|
-
|
875
|
-
console.print(
|
876
|
-
|
877
|
-
|
1293
|
+
return await publisher.publish_all_listings(data_path), False
|
1294
|
+
|
1295
|
+
try:
|
1296
|
+
result, is_single = asyncio.run(_publish_listings_async())
|
1297
|
+
|
1298
|
+
if is_single:
|
1299
|
+
console.print("[green]✓[/green] Service listing published successfully!")
|
1300
|
+
console.print(f"[cyan]Response:[/cyan] {json.dumps(result, indent=2)}")
|
1301
|
+
else:
|
1302
|
+
console.print("\n[bold]Publishing Summary:[/bold]")
|
1303
|
+
console.print(f" Total found: {result['total']}")
|
1304
|
+
console.print(f" [green]✓ Success: {result['success']}[/green]")
|
1305
|
+
console.print(f" [red]✗ Failed: {result['failed']}[/red]")
|
1306
|
+
|
1307
|
+
if result["errors"]:
|
1308
|
+
console.print("\n[bold red]Errors:[/bold red]")
|
1309
|
+
for error in result["errors"]:
|
1310
|
+
console.print(f" [red]✗[/red] {error['file']}")
|
1311
|
+
console.print(f" {error['error']}")
|
1312
|
+
raise typer.Exit(code=1)
|
1313
|
+
else:
|
1314
|
+
console.print("\n[green]✓[/green] All service listings published successfully!")
|
878
1315
|
|
879
1316
|
except typer.Exit:
|
880
1317
|
raise
|
881
1318
|
except Exception as e:
|
882
|
-
console.print(
|
883
|
-
f"[red]✗[/red] Failed to publish service listings: {e}", style="bold red"
|
884
|
-
)
|
1319
|
+
console.print(f"[red]✗[/red] Failed to publish service listings: {e}", style="bold red")
|
885
1320
|
raise typer.Exit(code=1)
|