unitysvc-services 0.1.24__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- unitysvc_services/__init__.py +4 -0
- unitysvc_services/api.py +421 -0
- unitysvc_services/cli.py +23 -0
- unitysvc_services/format_data.py +140 -0
- unitysvc_services/interactive_prompt.py +1132 -0
- unitysvc_services/list.py +216 -0
- unitysvc_services/models/__init__.py +71 -0
- unitysvc_services/models/base.py +1375 -0
- unitysvc_services/models/listing_data.py +118 -0
- unitysvc_services/models/listing_v1.py +56 -0
- unitysvc_services/models/provider_data.py +79 -0
- unitysvc_services/models/provider_v1.py +54 -0
- unitysvc_services/models/seller_data.py +120 -0
- unitysvc_services/models/seller_v1.py +42 -0
- unitysvc_services/models/service_data.py +114 -0
- unitysvc_services/models/service_v1.py +81 -0
- unitysvc_services/populate.py +207 -0
- unitysvc_services/publisher.py +1628 -0
- unitysvc_services/py.typed +0 -0
- unitysvc_services/query.py +688 -0
- unitysvc_services/scaffold.py +1103 -0
- unitysvc_services/schema/base.json +777 -0
- unitysvc_services/schema/listing_v1.json +1286 -0
- unitysvc_services/schema/provider_v1.json +952 -0
- unitysvc_services/schema/seller_v1.json +379 -0
- unitysvc_services/schema/service_v1.json +1306 -0
- unitysvc_services/test.py +965 -0
- unitysvc_services/unpublisher.py +505 -0
- unitysvc_services/update.py +287 -0
- unitysvc_services/utils.py +533 -0
- unitysvc_services/validator.py +731 -0
- unitysvc_services-0.1.24.dist-info/METADATA +184 -0
- unitysvc_services-0.1.24.dist-info/RECORD +37 -0
- unitysvc_services-0.1.24.dist-info/WHEEL +5 -0
- unitysvc_services-0.1.24.dist-info/entry_points.txt +3 -0
- unitysvc_services-0.1.24.dist-info/licenses/LICENSE +21 -0
- unitysvc_services-0.1.24.dist-info/top_level.txt +1 -0
|
@@ -0,0 +1,1628 @@
|
|
|
1
|
+
"""Data publisher module for posting service data to UnitySVC backend."""
|
|
2
|
+
|
|
3
|
+
import asyncio
|
|
4
|
+
import base64
|
|
5
|
+
import json
|
|
6
|
+
import os
|
|
7
|
+
from pathlib import Path
|
|
8
|
+
from typing import Any
|
|
9
|
+
|
|
10
|
+
import httpx
|
|
11
|
+
import typer
|
|
12
|
+
from rich.console import Console
|
|
13
|
+
from rich.table import Table
|
|
14
|
+
|
|
15
|
+
import unitysvc_services
|
|
16
|
+
|
|
17
|
+
from .api import UnitySvcAPI
|
|
18
|
+
from .models.base import ListingStatusEnum, ProviderStatusEnum, SellerStatusEnum
|
|
19
|
+
from .utils import convert_convenience_fields_to_documents, find_files_by_schema, load_data_file, render_template_file
|
|
20
|
+
from .validator import DataValidator
|
|
21
|
+
|
|
22
|
+
|
|
23
|
+
class ServiceDataPublisher(UnitySvcAPI):
|
|
24
|
+
"""Publishes service data to UnitySVC backend endpoints.
|
|
25
|
+
|
|
26
|
+
Inherits base HTTP client with curl fallback from UnitySvcAPI.
|
|
27
|
+
Extends with async operations for concurrent publishing.
|
|
28
|
+
"""
|
|
29
|
+
|
|
30
|
+
def __init__(self) -> None:
|
|
31
|
+
# Initialize base class (provides self.client as AsyncClient with curl fallback)
|
|
32
|
+
super().__init__()
|
|
33
|
+
|
|
34
|
+
# Semaphore to limit concurrent requests and prevent connection pool exhaustion
|
|
35
|
+
self.max_concurrent_requests = 15
|
|
36
|
+
|
|
37
|
+
def load_file_content(self, file_path: Path, base_path: Path) -> str:
|
|
38
|
+
"""Load content from a file (text or binary)."""
|
|
39
|
+
full_path = base_path / file_path if not file_path.is_absolute() else file_path
|
|
40
|
+
|
|
41
|
+
if not full_path.exists():
|
|
42
|
+
raise FileNotFoundError(f"File not found: {full_path}")
|
|
43
|
+
|
|
44
|
+
# Try to read as text first
|
|
45
|
+
try:
|
|
46
|
+
with open(full_path, encoding="utf-8") as f:
|
|
47
|
+
return f.read()
|
|
48
|
+
except UnicodeDecodeError:
|
|
49
|
+
# If it fails, read as binary and encode as base64
|
|
50
|
+
with open(full_path, "rb") as f:
|
|
51
|
+
return base64.b64encode(f.read()).decode("ascii")
|
|
52
|
+
|
|
53
|
+
def resolve_file_references(
|
|
54
|
+
self,
|
|
55
|
+
data: dict[str, Any],
|
|
56
|
+
base_path: Path,
|
|
57
|
+
listing: dict[str, Any] | None = None,
|
|
58
|
+
offering: dict[str, Any] | None = None,
|
|
59
|
+
provider: dict[str, Any] | None = None,
|
|
60
|
+
seller: dict[str, Any] | None = None,
|
|
61
|
+
listing_filename: str | None = None,
|
|
62
|
+
interface: dict[str, Any] | None = None,
|
|
63
|
+
) -> dict[str, Any]:
|
|
64
|
+
"""Recursively resolve file references and include content in data.
|
|
65
|
+
|
|
66
|
+
For Jinja2 template files (.j2), renders the template with provided context
|
|
67
|
+
and strips the .j2 extension from file_path.
|
|
68
|
+
|
|
69
|
+
Args:
|
|
70
|
+
data: Data dictionary potentially containing file_path references
|
|
71
|
+
base_path: Base path for resolving relative file paths
|
|
72
|
+
listing: Listing data for template rendering (optional)
|
|
73
|
+
offering: Offering data for template rendering (optional)
|
|
74
|
+
provider: Provider data for template rendering (optional)
|
|
75
|
+
seller: Seller data for template rendering (optional)
|
|
76
|
+
listing_filename: Listing filename for constructing output filenames (optional)
|
|
77
|
+
interface: AccessInterface data for template rendering (optional, for interface documents)
|
|
78
|
+
|
|
79
|
+
Returns:
|
|
80
|
+
Data with file references resolved and content loaded
|
|
81
|
+
"""
|
|
82
|
+
result: dict[str, Any] = {}
|
|
83
|
+
|
|
84
|
+
# Check if this dict looks like an AccessInterface (has base_url or interface_type)
|
|
85
|
+
# If so, use it as the interface context for nested documents
|
|
86
|
+
current_interface = interface
|
|
87
|
+
if "base_url" in data or "interface_type" in data:
|
|
88
|
+
current_interface = data
|
|
89
|
+
|
|
90
|
+
for key, value in data.items():
|
|
91
|
+
if isinstance(value, dict):
|
|
92
|
+
# Recursively process nested dictionaries
|
|
93
|
+
result[key] = self.resolve_file_references(
|
|
94
|
+
value,
|
|
95
|
+
base_path,
|
|
96
|
+
listing=listing,
|
|
97
|
+
offering=offering,
|
|
98
|
+
provider=provider,
|
|
99
|
+
seller=seller,
|
|
100
|
+
listing_filename=listing_filename,
|
|
101
|
+
interface=current_interface,
|
|
102
|
+
)
|
|
103
|
+
elif isinstance(value, list):
|
|
104
|
+
# Process lists
|
|
105
|
+
result[key] = [
|
|
106
|
+
(
|
|
107
|
+
self.resolve_file_references(
|
|
108
|
+
item,
|
|
109
|
+
base_path,
|
|
110
|
+
listing=listing,
|
|
111
|
+
offering=offering,
|
|
112
|
+
provider=provider,
|
|
113
|
+
seller=seller,
|
|
114
|
+
listing_filename=listing_filename,
|
|
115
|
+
interface=current_interface,
|
|
116
|
+
)
|
|
117
|
+
if isinstance(item, dict)
|
|
118
|
+
else item
|
|
119
|
+
)
|
|
120
|
+
for item in value
|
|
121
|
+
]
|
|
122
|
+
elif key == "file_path" and isinstance(value, str):
|
|
123
|
+
# This is a file reference - load the content and render if template
|
|
124
|
+
full_path = base_path / value if not Path(value).is_absolute() else Path(value)
|
|
125
|
+
|
|
126
|
+
if not full_path.exists():
|
|
127
|
+
raise FileNotFoundError(f"File not found: {full_path}")
|
|
128
|
+
|
|
129
|
+
# Render template if applicable
|
|
130
|
+
try:
|
|
131
|
+
content, actual_filename = render_template_file(
|
|
132
|
+
full_path,
|
|
133
|
+
listing=listing,
|
|
134
|
+
offering=offering,
|
|
135
|
+
provider=provider,
|
|
136
|
+
seller=seller,
|
|
137
|
+
interface=current_interface,
|
|
138
|
+
)
|
|
139
|
+
result["file_content"] = content
|
|
140
|
+
|
|
141
|
+
# Update file_path to remove .j2 extension if it was a template
|
|
142
|
+
if full_path.name.endswith(".j2"):
|
|
143
|
+
# Strip .j2 from the path
|
|
144
|
+
new_path = str(value)[:-3] # Remove last 3 characters (.j2)
|
|
145
|
+
result[key] = new_path
|
|
146
|
+
else:
|
|
147
|
+
result[key] = value
|
|
148
|
+
|
|
149
|
+
except Exception as e:
|
|
150
|
+
raise ValueError(f"Failed to load/render file content from '{value}': {e}")
|
|
151
|
+
else:
|
|
152
|
+
result[key] = value
|
|
153
|
+
|
|
154
|
+
# After processing all fields, check if this is a code_examples document
|
|
155
|
+
# If so, try to load corresponding .out file and add to meta.output
|
|
156
|
+
if result.get("category") == "code_examples" and result.get("file_content") and listing_filename:
|
|
157
|
+
# Get the actual filename (after .j2 stripping if applicable)
|
|
158
|
+
# If file_path was updated (e.g., from "test.py.j2" to "test.py"), use that
|
|
159
|
+
# Otherwise, extract basename from original file_path
|
|
160
|
+
output_base_filename: str | None = None
|
|
161
|
+
|
|
162
|
+
# Check if file_path was modified (original might have had .j2)
|
|
163
|
+
file_path_value = result.get("file_path", "")
|
|
164
|
+
if file_path_value:
|
|
165
|
+
output_base_filename = Path(file_path_value).name
|
|
166
|
+
|
|
167
|
+
if output_base_filename:
|
|
168
|
+
# Construct output filename: {listing_stem}_{output_base_filename}.out
|
|
169
|
+
# e.g., "svclisting_test.py.out" for svclisting.json and test.py
|
|
170
|
+
listing_stem = Path(listing_filename).stem
|
|
171
|
+
output_filename = f"{listing_stem}_{output_base_filename}.out"
|
|
172
|
+
|
|
173
|
+
# Try to find the .out file in base_path (listing directory)
|
|
174
|
+
output_path = base_path / output_filename
|
|
175
|
+
|
|
176
|
+
if output_path.exists():
|
|
177
|
+
try:
|
|
178
|
+
with open(output_path, encoding="utf-8") as f:
|
|
179
|
+
output_content = f.read()
|
|
180
|
+
|
|
181
|
+
# Add output to meta field
|
|
182
|
+
if "meta" not in result or result["meta"] is None:
|
|
183
|
+
result["meta"] = {}
|
|
184
|
+
result["meta"]["output"] = output_content
|
|
185
|
+
except Exception:
|
|
186
|
+
# Don't fail if output file can't be read, just skip it
|
|
187
|
+
pass
|
|
188
|
+
|
|
189
|
+
return result
|
|
190
|
+
|
|
191
|
+
async def post( # type: ignore[override]
|
|
192
|
+
self, endpoint: str, data: dict[str, Any], check_status: bool = True, dryrun: bool = False
|
|
193
|
+
) -> tuple[dict[str, Any], int]:
|
|
194
|
+
"""Make a POST request to the backend API with automatic curl fallback.
|
|
195
|
+
|
|
196
|
+
Override of base class post() that returns both JSON and status code.
|
|
197
|
+
Uses base class client with automatic curl fallback.
|
|
198
|
+
|
|
199
|
+
Args:
|
|
200
|
+
endpoint: API endpoint path (e.g., "/publish/seller")
|
|
201
|
+
data: JSON data to post
|
|
202
|
+
check_status: Whether to raise on non-2xx status codes (default: True)
|
|
203
|
+
dryrun: If True, adds dryrun=true as query parameter
|
|
204
|
+
|
|
205
|
+
Returns:
|
|
206
|
+
Tuple of (JSON response, HTTP status code)
|
|
207
|
+
|
|
208
|
+
Raises:
|
|
209
|
+
RuntimeError: If both httpx and curl fail
|
|
210
|
+
"""
|
|
211
|
+
# Build query parameters
|
|
212
|
+
params = {"dryrun": "true"} if dryrun else None
|
|
213
|
+
|
|
214
|
+
# Use base class client (self.client from UnitySvcQuery) with automatic curl fallback
|
|
215
|
+
# If we already know curl is needed, use it directly
|
|
216
|
+
if self.use_curl_fallback:
|
|
217
|
+
# Use base class curl fallback method
|
|
218
|
+
response_json = await super().post(endpoint, json_data=data, params=params)
|
|
219
|
+
# Curl POST doesn't return status code separately, assume 2xx if no exception
|
|
220
|
+
status_code = 200
|
|
221
|
+
else:
|
|
222
|
+
try:
|
|
223
|
+
response = await self.client.post(f"{self.base_url}{endpoint}", json=data, params=params)
|
|
224
|
+
status_code = response.status_code
|
|
225
|
+
|
|
226
|
+
if check_status:
|
|
227
|
+
response.raise_for_status()
|
|
228
|
+
|
|
229
|
+
response_json = response.json()
|
|
230
|
+
except (httpx.ConnectError, OSError):
|
|
231
|
+
# Connection failed - switch to curl fallback and retry
|
|
232
|
+
self.use_curl_fallback = True
|
|
233
|
+
response_json = await super().post(endpoint, json_data=data, params=params)
|
|
234
|
+
status_code = 200 # Assume success if curl didn't raise
|
|
235
|
+
|
|
236
|
+
return (response_json, status_code)
|
|
237
|
+
|
|
238
|
+
async def _post_with_retry(
|
|
239
|
+
self,
|
|
240
|
+
endpoint: str,
|
|
241
|
+
data: dict[str, Any],
|
|
242
|
+
entity_type: str,
|
|
243
|
+
entity_name: str,
|
|
244
|
+
context_info: str = "",
|
|
245
|
+
max_retries: int = 3,
|
|
246
|
+
dryrun: bool = False,
|
|
247
|
+
) -> dict[str, Any]:
|
|
248
|
+
"""
|
|
249
|
+
Generic retry wrapper for posting data to backend API with task polling.
|
|
250
|
+
|
|
251
|
+
The backend now returns HTTP 202 with a task_id. This method:
|
|
252
|
+
1. Submits the publish request
|
|
253
|
+
2. Gets the task_id from the response
|
|
254
|
+
3. Polls /tasks/{task_id} until completion
|
|
255
|
+
4. Returns the final result
|
|
256
|
+
|
|
257
|
+
Args:
|
|
258
|
+
endpoint: API endpoint path (e.g., "/publish/listing")
|
|
259
|
+
data: JSON data to post
|
|
260
|
+
entity_type: Type of entity being published (for error messages)
|
|
261
|
+
entity_name: Name of the entity being published (for error messages)
|
|
262
|
+
context_info: Additional context for error messages (e.g., provider, service info)
|
|
263
|
+
max_retries: Maximum number of retry attempts
|
|
264
|
+
dryrun: If True, runs in dry run mode (no actual changes)
|
|
265
|
+
|
|
266
|
+
Returns:
|
|
267
|
+
Response JSON from successful API call
|
|
268
|
+
|
|
269
|
+
Raises:
|
|
270
|
+
ValueError: On client errors (4xx) or after exhausting retries
|
|
271
|
+
"""
|
|
272
|
+
last_exception = None
|
|
273
|
+
for attempt in range(max_retries):
|
|
274
|
+
try:
|
|
275
|
+
# Use the public post() method with automatic curl fallback
|
|
276
|
+
response_json, status_code = await self.post(endpoint, data, check_status=False, dryrun=dryrun)
|
|
277
|
+
|
|
278
|
+
# Handle task-based response (HTTP 202)
|
|
279
|
+
if status_code == 202:
|
|
280
|
+
# Backend returns task_id - poll for completion
|
|
281
|
+
task_id = response_json.get("task_id")
|
|
282
|
+
|
|
283
|
+
if not task_id:
|
|
284
|
+
context_msg = f" ({context_info})" if context_info else ""
|
|
285
|
+
raise ValueError(f"No task_id in response for {entity_type} '{entity_name}'{context_msg}")
|
|
286
|
+
|
|
287
|
+
# Poll task status until completion using check_task utility
|
|
288
|
+
try:
|
|
289
|
+
result = await self.check_task(task_id)
|
|
290
|
+
return result
|
|
291
|
+
except ValueError as e:
|
|
292
|
+
# Add context to task errors
|
|
293
|
+
context_msg = f" ({context_info})" if context_info else ""
|
|
294
|
+
raise ValueError(f"Task failed for {entity_type} '{entity_name}'{context_msg}: {e}")
|
|
295
|
+
|
|
296
|
+
# Check for errors
|
|
297
|
+
if status_code >= 400:
|
|
298
|
+
# Don't retry on 4xx errors (client errors) - they won't succeed on retry
|
|
299
|
+
if 400 <= status_code < 500:
|
|
300
|
+
error_detail = response_json.get("detail", str(response_json))
|
|
301
|
+
context_msg = f" ({context_info})" if context_info else ""
|
|
302
|
+
raise ValueError(
|
|
303
|
+
f"Failed to publish {entity_type} '{entity_name}'{context_msg}: {error_detail}"
|
|
304
|
+
)
|
|
305
|
+
|
|
306
|
+
# 5xx errors - retry with exponential backoff
|
|
307
|
+
if attempt < max_retries - 1:
|
|
308
|
+
wait_time = 2**attempt # Exponential backoff: 1s, 2s, 4s
|
|
309
|
+
await asyncio.sleep(wait_time)
|
|
310
|
+
continue
|
|
311
|
+
else:
|
|
312
|
+
# Last attempt failed
|
|
313
|
+
error_detail = response_json.get("detail", str(response_json))
|
|
314
|
+
context_msg = f" ({context_info})" if context_info else ""
|
|
315
|
+
raise ValueError(
|
|
316
|
+
f"Failed to publish {entity_type} after {max_retries} attempts: "
|
|
317
|
+
f"'{entity_name}'{context_msg}: {error_detail}"
|
|
318
|
+
)
|
|
319
|
+
|
|
320
|
+
# Success response (2xx)
|
|
321
|
+
return response_json
|
|
322
|
+
|
|
323
|
+
except (httpx.NetworkError, httpx.TimeoutException, RuntimeError) as e:
|
|
324
|
+
# Network/connection errors - the post() method should have tried curl fallback
|
|
325
|
+
# If we're here, both httpx and curl failed
|
|
326
|
+
last_exception = e
|
|
327
|
+
if attempt < max_retries - 1:
|
|
328
|
+
wait_time = 2**attempt # Exponential backoff: 1s, 2s, 4s
|
|
329
|
+
await asyncio.sleep(wait_time)
|
|
330
|
+
continue
|
|
331
|
+
else:
|
|
332
|
+
raise ValueError(
|
|
333
|
+
f"Network error after {max_retries} attempts for {entity_type} '{entity_name}': {str(e)}"
|
|
334
|
+
)
|
|
335
|
+
|
|
336
|
+
# Should never reach here, but just in case
|
|
337
|
+
if last_exception:
|
|
338
|
+
raise last_exception
|
|
339
|
+
raise ValueError("Unexpected error in retry logic")
|
|
340
|
+
|
|
341
|
+
async def post_service_listing_async(
|
|
342
|
+
self, listing_file: Path, max_retries: int = 3, dryrun: bool = False
|
|
343
|
+
) -> dict[str, Any]:
|
|
344
|
+
"""Async version of post_service_listing for concurrent publishing with retry logic."""
|
|
345
|
+
# Load the listing data file
|
|
346
|
+
data, _ = load_data_file(listing_file)
|
|
347
|
+
|
|
348
|
+
# If name is not provided, use filename (without extension)
|
|
349
|
+
if "name" not in data or not data.get("name"):
|
|
350
|
+
data["name"] = listing_file.stem
|
|
351
|
+
|
|
352
|
+
# Extract provider_name from directory structure (needed before loading provider data)
|
|
353
|
+
parts = listing_file.parts
|
|
354
|
+
try:
|
|
355
|
+
services_idx = parts.index("services")
|
|
356
|
+
provider_name = parts[services_idx - 1]
|
|
357
|
+
data["provider_name"] = provider_name
|
|
358
|
+
|
|
359
|
+
# Find provider directory to load provider data
|
|
360
|
+
provider_dir = Path(*parts[:services_idx])
|
|
361
|
+
except (ValueError, IndexError):
|
|
362
|
+
raise ValueError(
|
|
363
|
+
f"Cannot extract provider_name from path: {listing_file}. "
|
|
364
|
+
f"Expected path to contain .../{{provider_name}}/services/..."
|
|
365
|
+
)
|
|
366
|
+
|
|
367
|
+
# If service_name is not in listing data, find it from service files in the same directory
|
|
368
|
+
if "service_name" not in data or not data["service_name"]:
|
|
369
|
+
# Find all service files in the same directory
|
|
370
|
+
service_files = find_files_by_schema(listing_file.parent, "service_v1")
|
|
371
|
+
|
|
372
|
+
if len(service_files) == 0:
|
|
373
|
+
raise ValueError(
|
|
374
|
+
f"Cannot find any service_v1 files in {listing_file.parent}. "
|
|
375
|
+
f"Listing files must be in the same directory as a service definition."
|
|
376
|
+
)
|
|
377
|
+
elif len(service_files) > 1:
|
|
378
|
+
service_names = [svc_data.get("name", "unknown") for _, _, svc_data in service_files]
|
|
379
|
+
raise ValueError(
|
|
380
|
+
f"Multiple services found in {listing_file.parent}: {', '.join(service_names)}. "
|
|
381
|
+
f"Please add 'service_name' field to {listing_file.name} to specify which "
|
|
382
|
+
f"service this listing belongs to."
|
|
383
|
+
)
|
|
384
|
+
else:
|
|
385
|
+
# Exactly one service found - use it
|
|
386
|
+
_service_file, _format, service_data = service_files[0]
|
|
387
|
+
data["service_name"] = service_data.get("name")
|
|
388
|
+
data["service_version"] = service_data.get("version")
|
|
389
|
+
else:
|
|
390
|
+
# service_name is provided in listing data, find the matching service to get version
|
|
391
|
+
service_name = data["service_name"]
|
|
392
|
+
service_files = find_files_by_schema(
|
|
393
|
+
listing_file.parent, "service_v1", field_filter=(("name", service_name),)
|
|
394
|
+
)
|
|
395
|
+
|
|
396
|
+
if not service_files:
|
|
397
|
+
raise ValueError(
|
|
398
|
+
f"Service '{service_name}' specified in {listing_file.name} not found in {listing_file.parent}."
|
|
399
|
+
)
|
|
400
|
+
|
|
401
|
+
# Get version from the found service
|
|
402
|
+
_service_file, _format, service_data = service_files[0]
|
|
403
|
+
data["service_version"] = service_data.get("version")
|
|
404
|
+
|
|
405
|
+
# Load provider data for template rendering
|
|
406
|
+
provider_files = find_files_by_schema(provider_dir, "provider_v1")
|
|
407
|
+
if provider_files:
|
|
408
|
+
_provider_file, _format, provider_data = provider_files[0]
|
|
409
|
+
else:
|
|
410
|
+
provider_data = {}
|
|
411
|
+
|
|
412
|
+
# Find seller_name from seller definition in the data directory
|
|
413
|
+
# Navigate up to find the data directory and look for seller file
|
|
414
|
+
data_dir = listing_file.parent
|
|
415
|
+
while data_dir.name != "data" and data_dir.parent != data_dir:
|
|
416
|
+
data_dir = data_dir.parent
|
|
417
|
+
|
|
418
|
+
if data_dir.name != "data":
|
|
419
|
+
raise ValueError(
|
|
420
|
+
f"Cannot find 'data' directory in path: {listing_file}. "
|
|
421
|
+
f"Expected path structure includes a 'data' directory."
|
|
422
|
+
)
|
|
423
|
+
|
|
424
|
+
# Look for seller file in the data directory by checking schema field
|
|
425
|
+
seller_files = find_files_by_schema(data_dir, "seller_v1")
|
|
426
|
+
|
|
427
|
+
if not seller_files:
|
|
428
|
+
raise ValueError(
|
|
429
|
+
f"Cannot find seller_v1 file in {data_dir}. A seller definition is required in the data directory."
|
|
430
|
+
)
|
|
431
|
+
|
|
432
|
+
# Should only be one seller file in the data directory
|
|
433
|
+
_seller_file, _format, seller_data = seller_files[0]
|
|
434
|
+
|
|
435
|
+
# Check seller status - skip if draft
|
|
436
|
+
seller_status = seller_data.get("status", SellerStatusEnum.active)
|
|
437
|
+
if seller_status == SellerStatusEnum.draft:
|
|
438
|
+
return {
|
|
439
|
+
"skipped": True,
|
|
440
|
+
"reason": f"Seller status is '{seller_status}' - not publishing listing to backend",
|
|
441
|
+
"name": data.get("name", "unknown"),
|
|
442
|
+
}
|
|
443
|
+
|
|
444
|
+
seller_name = seller_data.get("name")
|
|
445
|
+
if not seller_name:
|
|
446
|
+
raise ValueError("Seller data missing 'name' field")
|
|
447
|
+
|
|
448
|
+
data["seller_name"] = seller_name
|
|
449
|
+
|
|
450
|
+
# Map listing_status to status if present
|
|
451
|
+
if "listing_status" in data:
|
|
452
|
+
data["status"] = data.pop("listing_status")
|
|
453
|
+
|
|
454
|
+
# Check listing status - skip if draft
|
|
455
|
+
listing_status = data.get("status", ListingStatusEnum.ready)
|
|
456
|
+
if listing_status == ListingStatusEnum.draft:
|
|
457
|
+
return {
|
|
458
|
+
"skipped": True,
|
|
459
|
+
"reason": f"Listing status is '{listing_status}' - not publishing to backend (still in draft)",
|
|
460
|
+
"name": data.get("name", "unknown"),
|
|
461
|
+
}
|
|
462
|
+
|
|
463
|
+
# NOW resolve file references with all context (listing, offering, provider, seller)
|
|
464
|
+
base_path = listing_file.parent
|
|
465
|
+
data_with_content = self.resolve_file_references(
|
|
466
|
+
data,
|
|
467
|
+
base_path,
|
|
468
|
+
listing=data,
|
|
469
|
+
offering=service_data,
|
|
470
|
+
provider=provider_data,
|
|
471
|
+
seller=seller_data,
|
|
472
|
+
listing_filename=listing_file.name,
|
|
473
|
+
)
|
|
474
|
+
|
|
475
|
+
# Post to the endpoint using retry helper
|
|
476
|
+
context_info = (
|
|
477
|
+
f"service: {data_with_content.get('service_name')}, "
|
|
478
|
+
f"provider: {data_with_content.get('provider_name')}, "
|
|
479
|
+
f"seller: {data_with_content.get('seller_name')}"
|
|
480
|
+
)
|
|
481
|
+
result = await self._post_with_retry(
|
|
482
|
+
endpoint="/publish/listing",
|
|
483
|
+
data=data_with_content,
|
|
484
|
+
entity_type="listing",
|
|
485
|
+
entity_name=data.get("name", "unknown"),
|
|
486
|
+
context_info=context_info,
|
|
487
|
+
max_retries=max_retries,
|
|
488
|
+
dryrun=dryrun,
|
|
489
|
+
)
|
|
490
|
+
|
|
491
|
+
# Add local metadata to result for display purposes
|
|
492
|
+
result["service_name"] = data_with_content.get("service_name")
|
|
493
|
+
result["provider_name"] = data_with_content.get("provider_name")
|
|
494
|
+
result["seller_name"] = data_with_content.get("seller_name")
|
|
495
|
+
|
|
496
|
+
return result
|
|
497
|
+
|
|
498
|
+
async def post_service_offering_async(
|
|
499
|
+
self, data_file: Path, max_retries: int = 3, dryrun: bool = False
|
|
500
|
+
) -> dict[str, Any]:
|
|
501
|
+
"""Async version of post_service_offering for concurrent publishing with retry logic."""
|
|
502
|
+
# Load the data file
|
|
503
|
+
data, _ = load_data_file(data_file)
|
|
504
|
+
|
|
505
|
+
# Convert convenience fields first
|
|
506
|
+
base_path = data_file.parent
|
|
507
|
+
data = convert_convenience_fields_to_documents(
|
|
508
|
+
data, base_path, logo_field="logo", terms_field="terms_of_service"
|
|
509
|
+
)
|
|
510
|
+
|
|
511
|
+
# Extract provider_name from directory structure
|
|
512
|
+
# Find the 'services' directory and use its parent as provider_name
|
|
513
|
+
parts = data_file.parts
|
|
514
|
+
try:
|
|
515
|
+
services_idx = parts.index("services")
|
|
516
|
+
provider_name = parts[services_idx - 1]
|
|
517
|
+
data["provider_name"] = provider_name
|
|
518
|
+
|
|
519
|
+
# Find provider directory to check status and load data
|
|
520
|
+
provider_dir = Path(*parts[:services_idx])
|
|
521
|
+
except (ValueError, IndexError):
|
|
522
|
+
raise ValueError(
|
|
523
|
+
f"Cannot extract provider_name from path: {data_file}. "
|
|
524
|
+
f"Expected path to contain .../{{provider_name}}/services/..."
|
|
525
|
+
)
|
|
526
|
+
|
|
527
|
+
# Load provider data for status check and template rendering
|
|
528
|
+
provider_files = find_files_by_schema(provider_dir, "provider_v1")
|
|
529
|
+
if provider_files:
|
|
530
|
+
# Should only be one provider file in the directory
|
|
531
|
+
_provider_file, _format, provider_data = provider_files[0]
|
|
532
|
+
provider_status = provider_data.get("status", ProviderStatusEnum.active)
|
|
533
|
+
if provider_status == ProviderStatusEnum.draft:
|
|
534
|
+
return {
|
|
535
|
+
"skipped": True,
|
|
536
|
+
"reason": f"Provider status is '{provider_status}' - not publishing offering to backend",
|
|
537
|
+
"name": data.get("name", "unknown"),
|
|
538
|
+
}
|
|
539
|
+
else:
|
|
540
|
+
provider_data = {}
|
|
541
|
+
|
|
542
|
+
# NOW resolve file references with all context (offering, provider)
|
|
543
|
+
data_with_content = self.resolve_file_references(
|
|
544
|
+
data,
|
|
545
|
+
base_path,
|
|
546
|
+
listing=None,
|
|
547
|
+
offering=data,
|
|
548
|
+
provider=provider_data,
|
|
549
|
+
seller=None,
|
|
550
|
+
)
|
|
551
|
+
|
|
552
|
+
# Post to the endpoint using retry helper
|
|
553
|
+
context_info = f"provider: {data_with_content.get('provider_name')}"
|
|
554
|
+
result = await self._post_with_retry(
|
|
555
|
+
endpoint="/publish/offering",
|
|
556
|
+
data=data_with_content,
|
|
557
|
+
entity_type="offering",
|
|
558
|
+
entity_name=data.get("name", "unknown"),
|
|
559
|
+
context_info=context_info,
|
|
560
|
+
max_retries=max_retries,
|
|
561
|
+
dryrun=dryrun,
|
|
562
|
+
)
|
|
563
|
+
|
|
564
|
+
# Add local metadata to result for display purposes
|
|
565
|
+
result["provider_name"] = data_with_content.get("provider_name")
|
|
566
|
+
|
|
567
|
+
return result
|
|
568
|
+
|
|
569
|
+
async def post_provider_async(self, data_file: Path, max_retries: int = 3, dryrun: bool = False) -> dict[str, Any]:
|
|
570
|
+
"""Async version of post_provider for concurrent publishing with retry logic."""
|
|
571
|
+
# Load the data file
|
|
572
|
+
data, _ = load_data_file(data_file)
|
|
573
|
+
|
|
574
|
+
# Check provider status - skip if draft
|
|
575
|
+
provider_status = data.get("status", ProviderStatusEnum.active)
|
|
576
|
+
if provider_status == ProviderStatusEnum.draft:
|
|
577
|
+
# Return success without publishing - provider is incomplete
|
|
578
|
+
return {
|
|
579
|
+
"skipped": True,
|
|
580
|
+
"reason": f"Provider status is '{provider_status}' - not publishing to backend",
|
|
581
|
+
"name": data.get("name", "unknown"),
|
|
582
|
+
}
|
|
583
|
+
|
|
584
|
+
# Convert convenience fields (logo, terms_of_service) to documents
|
|
585
|
+
base_path = data_file.parent
|
|
586
|
+
data = convert_convenience_fields_to_documents(
|
|
587
|
+
data, base_path, logo_field="logo", terms_field="terms_of_service"
|
|
588
|
+
)
|
|
589
|
+
|
|
590
|
+
# Resolve file references and include content with provider context
|
|
591
|
+
data_with_content = self.resolve_file_references(
|
|
592
|
+
data,
|
|
593
|
+
base_path,
|
|
594
|
+
listing=None,
|
|
595
|
+
offering=None,
|
|
596
|
+
provider=data,
|
|
597
|
+
seller=None,
|
|
598
|
+
)
|
|
599
|
+
|
|
600
|
+
# Post to the endpoint using retry helper
|
|
601
|
+
return await self._post_with_retry(
|
|
602
|
+
endpoint="/publish/provider",
|
|
603
|
+
data=data_with_content,
|
|
604
|
+
entity_type="provider",
|
|
605
|
+
entity_name=data.get("name", "unknown"),
|
|
606
|
+
max_retries=max_retries,
|
|
607
|
+
dryrun=dryrun,
|
|
608
|
+
)
|
|
609
|
+
|
|
610
|
+
async def post_seller_async(self, data_file: Path, max_retries: int = 3, dryrun: bool = False) -> dict[str, Any]:
|
|
611
|
+
"""Async version of post_seller for concurrent publishing with retry logic."""
|
|
612
|
+
# Load the data file
|
|
613
|
+
data, _ = load_data_file(data_file)
|
|
614
|
+
|
|
615
|
+
# Check seller status - skip if draft
|
|
616
|
+
seller_status = data.get("status", SellerStatusEnum.active)
|
|
617
|
+
if seller_status == SellerStatusEnum.draft:
|
|
618
|
+
# Return success without publishing - seller is incomplete
|
|
619
|
+
return {
|
|
620
|
+
"skipped": True,
|
|
621
|
+
"reason": f"Seller status is '{seller_status}' - not publishing to backend",
|
|
622
|
+
"name": data.get("name", "unknown"),
|
|
623
|
+
}
|
|
624
|
+
|
|
625
|
+
# Convert convenience fields (logo only for sellers, no terms_of_service)
|
|
626
|
+
base_path = data_file.parent
|
|
627
|
+
data = convert_convenience_fields_to_documents(data, base_path, logo_field="logo", terms_field=None)
|
|
628
|
+
|
|
629
|
+
# Resolve file references and include content with seller context
|
|
630
|
+
data_with_content = self.resolve_file_references(
|
|
631
|
+
data,
|
|
632
|
+
base_path,
|
|
633
|
+
listing=None,
|
|
634
|
+
offering=None,
|
|
635
|
+
provider=None,
|
|
636
|
+
seller=data,
|
|
637
|
+
)
|
|
638
|
+
|
|
639
|
+
# Post to the endpoint using retry helper
|
|
640
|
+
return await self._post_with_retry(
|
|
641
|
+
endpoint="/publish/seller",
|
|
642
|
+
data=data_with_content,
|
|
643
|
+
entity_type="seller",
|
|
644
|
+
entity_name=data.get("name", "unknown"),
|
|
645
|
+
max_retries=max_retries,
|
|
646
|
+
dryrun=dryrun,
|
|
647
|
+
)
|
|
648
|
+
|
|
649
|
+
def find_offering_files(self, data_dir: Path) -> list[Path]:
|
|
650
|
+
"""Find all service offering files in a directory tree."""
|
|
651
|
+
files = find_files_by_schema(data_dir, "service_v1")
|
|
652
|
+
return sorted([f[0] for f in files])
|
|
653
|
+
|
|
654
|
+
def find_listing_files(self, data_dir: Path) -> list[Path]:
|
|
655
|
+
"""Find all service listing files in a directory tree."""
|
|
656
|
+
files = find_files_by_schema(data_dir, "listing_v1")
|
|
657
|
+
return sorted([f[0] for f in files])
|
|
658
|
+
|
|
659
|
+
def find_provider_files(self, data_dir: Path) -> list[Path]:
|
|
660
|
+
"""Find all provider files in a directory tree."""
|
|
661
|
+
files = find_files_by_schema(data_dir, "provider_v1")
|
|
662
|
+
return sorted([f[0] for f in files])
|
|
663
|
+
|
|
664
|
+
def find_seller_files(self, data_dir: Path) -> list[Path]:
|
|
665
|
+
"""Find all seller files in a directory tree."""
|
|
666
|
+
files = find_files_by_schema(data_dir, "seller_v1")
|
|
667
|
+
return sorted([f[0] for f in files])
|
|
668
|
+
|
|
669
|
+
@staticmethod
|
|
670
|
+
def _get_status_display(status: str) -> tuple[str, str]:
|
|
671
|
+
"""Get color and symbol for status display."""
|
|
672
|
+
status_map = {
|
|
673
|
+
"created": ("[green]+[/green]", "green"),
|
|
674
|
+
"updated": ("[blue]~[/blue]", "blue"),
|
|
675
|
+
"unchanged": ("[dim]=[/dim]", "dim"),
|
|
676
|
+
"create": ("[yellow]?[/yellow]", "yellow"), # Dryrun: would be created
|
|
677
|
+
"update": ("[cyan]?[/cyan]", "cyan"), # Dryrun: would be updated
|
|
678
|
+
}
|
|
679
|
+
return status_map.get(status, ("[green]✓[/green]", "green"))
|
|
680
|
+
|
|
681
|
+
async def _publish_offering_task(
|
|
682
|
+
self, offering_file: Path, console: Console, semaphore: asyncio.Semaphore, dryrun: bool = False
|
|
683
|
+
) -> tuple[Path, dict[str, Any] | Exception]:
|
|
684
|
+
"""
|
|
685
|
+
Async task to publish a single offering with concurrency control.
|
|
686
|
+
|
|
687
|
+
Returns tuple of (offering_file, result_or_exception).
|
|
688
|
+
"""
|
|
689
|
+
async with semaphore: # Limit concurrent requests
|
|
690
|
+
try:
|
|
691
|
+
# Load offering data to get the name
|
|
692
|
+
data, _ = load_data_file(offering_file)
|
|
693
|
+
offering_name = data.get("name", offering_file.stem)
|
|
694
|
+
|
|
695
|
+
# Publish the offering
|
|
696
|
+
result = await self.post_service_offering_async(offering_file, dryrun=dryrun)
|
|
697
|
+
|
|
698
|
+
# Print complete statement after publication
|
|
699
|
+
if result.get("skipped"):
|
|
700
|
+
reason = result.get("reason", "unknown")
|
|
701
|
+
console.print(f" [yellow]⊘[/yellow] Skipped offering: [cyan]{offering_name}[/cyan] - {reason}")
|
|
702
|
+
else:
|
|
703
|
+
provider_name = result.get("provider_name")
|
|
704
|
+
status = result.get("status", "created")
|
|
705
|
+
symbol, color = self._get_status_display(status)
|
|
706
|
+
console.print(
|
|
707
|
+
f" {symbol} [{color}]{status.capitalize()}[/{color}] offering: [cyan]{offering_name}[/cyan] "
|
|
708
|
+
f"(provider: {provider_name})"
|
|
709
|
+
)
|
|
710
|
+
|
|
711
|
+
return (offering_file, result)
|
|
712
|
+
except Exception as e:
|
|
713
|
+
data, _ = load_data_file(offering_file)
|
|
714
|
+
offering_name = data.get("name", offering_file.stem)
|
|
715
|
+
console.print(f" [red]✗[/red] Failed to publish offering: [cyan]{offering_name}[/cyan] - {str(e)}")
|
|
716
|
+
return (offering_file, e)
|
|
717
|
+
|
|
718
|
+
async def publish_all_offerings(self, data_dir: Path, dryrun: bool = False) -> dict[str, Any]:
|
|
719
|
+
"""
|
|
720
|
+
Publish all service offerings found in a directory tree concurrently.
|
|
721
|
+
|
|
722
|
+
Validates data consistency before publishing.
|
|
723
|
+
Returns a summary of successes and failures.
|
|
724
|
+
|
|
725
|
+
Args:
|
|
726
|
+
data_dir: Directory to search for offering files
|
|
727
|
+
dryrun: If True, runs in dry run mode (no actual changes)
|
|
728
|
+
"""
|
|
729
|
+
# Validate all service directories first
|
|
730
|
+
schema_dir = Path(unitysvc_services.__file__).parent / "schema"
|
|
731
|
+
validator = DataValidator(data_dir, schema_dir)
|
|
732
|
+
validation_errors = validator.validate_all_service_directories(data_dir)
|
|
733
|
+
if validation_errors:
|
|
734
|
+
return {
|
|
735
|
+
"total": 0,
|
|
736
|
+
"success": 0,
|
|
737
|
+
"failed": 0,
|
|
738
|
+
"errors": [{"file": "validation", "error": error} for error in validation_errors],
|
|
739
|
+
}
|
|
740
|
+
|
|
741
|
+
offering_files = self.find_offering_files(data_dir)
|
|
742
|
+
results: dict[str, Any] = {
|
|
743
|
+
"total": len(offering_files),
|
|
744
|
+
"success": 0,
|
|
745
|
+
"failed": 0,
|
|
746
|
+
"created": 0,
|
|
747
|
+
"updated": 0,
|
|
748
|
+
"unchanged": 0,
|
|
749
|
+
"errors": [],
|
|
750
|
+
}
|
|
751
|
+
|
|
752
|
+
if not offering_files:
|
|
753
|
+
return results
|
|
754
|
+
|
|
755
|
+
console = Console()
|
|
756
|
+
|
|
757
|
+
# Run all offering publications concurrently with rate limiting
|
|
758
|
+
# Create semaphore to limit concurrent requests
|
|
759
|
+
semaphore = asyncio.Semaphore(self.max_concurrent_requests)
|
|
760
|
+
tasks = [
|
|
761
|
+
self._publish_offering_task(offering_file, console, semaphore, dryrun=dryrun)
|
|
762
|
+
for offering_file in offering_files
|
|
763
|
+
]
|
|
764
|
+
task_results = await asyncio.gather(*tasks)
|
|
765
|
+
|
|
766
|
+
# Process results
|
|
767
|
+
for offering_file, result in task_results:
|
|
768
|
+
if isinstance(result, Exception):
|
|
769
|
+
results["failed"] += 1
|
|
770
|
+
results["errors"].append({"file": str(offering_file), "error": str(result)})
|
|
771
|
+
else:
|
|
772
|
+
results["success"] += 1
|
|
773
|
+
# Track status counts (handle both normal and dryrun statuses)
|
|
774
|
+
status = result.get("status", "created")
|
|
775
|
+
if status in ("created", "create"): # "create" is dryrun mode
|
|
776
|
+
results["created"] += 1
|
|
777
|
+
elif status in ("updated", "update"): # "update" is dryrun mode
|
|
778
|
+
results["updated"] += 1
|
|
779
|
+
elif status == "unchanged":
|
|
780
|
+
results["unchanged"] += 1
|
|
781
|
+
|
|
782
|
+
return results
|
|
783
|
+
|
|
784
|
+
async def _publish_listing_task(
|
|
785
|
+
self, listing_file: Path, console: Console, semaphore: asyncio.Semaphore, dryrun: bool = False
|
|
786
|
+
) -> tuple[Path, dict[str, Any] | Exception]:
|
|
787
|
+
"""
|
|
788
|
+
Async task to publish a single listing with concurrency control.
|
|
789
|
+
|
|
790
|
+
Returns tuple of (listing_file, result_or_exception).
|
|
791
|
+
"""
|
|
792
|
+
async with semaphore: # Limit concurrent requests
|
|
793
|
+
try:
|
|
794
|
+
# Load listing data to get the name
|
|
795
|
+
data, _ = load_data_file(listing_file)
|
|
796
|
+
listing_name = data.get("name", listing_file.stem)
|
|
797
|
+
|
|
798
|
+
# Publish the listing
|
|
799
|
+
result = await self.post_service_listing_async(listing_file, dryrun=dryrun)
|
|
800
|
+
|
|
801
|
+
# Print complete statement after publication
|
|
802
|
+
if result.get("skipped"):
|
|
803
|
+
reason = result.get("reason", "unknown")
|
|
804
|
+
console.print(f" [yellow]⊘[/yellow] Skipped listing: [cyan]{listing_name}[/cyan] - {reason}")
|
|
805
|
+
else:
|
|
806
|
+
service_name = result.get("service_name")
|
|
807
|
+
provider_name = result.get("provider_name")
|
|
808
|
+
status = result.get("status", "created")
|
|
809
|
+
symbol, color = self._get_status_display(status)
|
|
810
|
+
console.print(
|
|
811
|
+
f" {symbol} [{color}]{status.capitalize()}[/{color}] listing: [cyan]{listing_name}[/cyan] "
|
|
812
|
+
f"(service: {service_name}, provider: {provider_name})"
|
|
813
|
+
)
|
|
814
|
+
|
|
815
|
+
return (listing_file, result)
|
|
816
|
+
except Exception as e:
|
|
817
|
+
data, _ = load_data_file(listing_file)
|
|
818
|
+
listing_name = data.get("name", listing_file.stem)
|
|
819
|
+
console.print(f" [red]✗[/red] Failed to publish listing: [cyan]{listing_file}[/cyan] - {str(e)}")
|
|
820
|
+
return (listing_file, e)
|
|
821
|
+
|
|
822
|
+
async def publish_all_listings(self, data_dir: Path, dryrun: bool = False) -> dict[str, Any]:
|
|
823
|
+
"""
|
|
824
|
+
Publish all service listings found in a directory tree concurrently.
|
|
825
|
+
|
|
826
|
+
Validates data consistency before publishing.
|
|
827
|
+
Returns a summary of successes and failures.
|
|
828
|
+
"""
|
|
829
|
+
# Validate all service directories first
|
|
830
|
+
schema_dir = Path(unitysvc_services.__file__).parent / "schema"
|
|
831
|
+
validator = DataValidator(data_dir, schema_dir)
|
|
832
|
+
validation_errors = validator.validate_all_service_directories(data_dir)
|
|
833
|
+
if validation_errors:
|
|
834
|
+
return {
|
|
835
|
+
"total": 0,
|
|
836
|
+
"success": 0,
|
|
837
|
+
"failed": 0,
|
|
838
|
+
"errors": [{"file": "validation", "error": error} for error in validation_errors],
|
|
839
|
+
}
|
|
840
|
+
|
|
841
|
+
listing_files = self.find_listing_files(data_dir)
|
|
842
|
+
results: dict[str, Any] = {
|
|
843
|
+
"total": len(listing_files),
|
|
844
|
+
"success": 0,
|
|
845
|
+
"failed": 0,
|
|
846
|
+
"created": 0,
|
|
847
|
+
"updated": 0,
|
|
848
|
+
"unchanged": 0,
|
|
849
|
+
"errors": [],
|
|
850
|
+
}
|
|
851
|
+
|
|
852
|
+
if not listing_files:
|
|
853
|
+
return results
|
|
854
|
+
|
|
855
|
+
console = Console()
|
|
856
|
+
|
|
857
|
+
# Run all listing publications concurrently with rate limiting
|
|
858
|
+
# Create semaphore to limit concurrent requests
|
|
859
|
+
semaphore = asyncio.Semaphore(self.max_concurrent_requests)
|
|
860
|
+
tasks = [
|
|
861
|
+
self._publish_listing_task(listing_file, console, semaphore, dryrun=dryrun)
|
|
862
|
+
for listing_file in listing_files
|
|
863
|
+
]
|
|
864
|
+
task_results = await asyncio.gather(*tasks)
|
|
865
|
+
|
|
866
|
+
# Process results
|
|
867
|
+
for listing_file, result in task_results:
|
|
868
|
+
if isinstance(result, Exception):
|
|
869
|
+
results["failed"] += 1
|
|
870
|
+
results["errors"].append({"file": str(listing_file), "error": str(result)})
|
|
871
|
+
else:
|
|
872
|
+
results["success"] += 1
|
|
873
|
+
# Track status counts (handle both normal and dryrun statuses)
|
|
874
|
+
status = result.get("status", "created")
|
|
875
|
+
if status in ("created", "create"): # "create" is dryrun mode
|
|
876
|
+
results["created"] += 1
|
|
877
|
+
elif status in ("updated", "update"): # "update" is dryrun mode
|
|
878
|
+
results["updated"] += 1
|
|
879
|
+
elif status == "unchanged":
|
|
880
|
+
results["unchanged"] += 1
|
|
881
|
+
|
|
882
|
+
return results
|
|
883
|
+
|
|
884
|
+
async def _publish_provider_task(
|
|
885
|
+
self, provider_file: Path, console: Console, semaphore: asyncio.Semaphore, dryrun: bool = False
|
|
886
|
+
) -> tuple[Path, dict[str, Any] | Exception]:
|
|
887
|
+
"""
|
|
888
|
+
Async task to publish a single provider with concurrency control.
|
|
889
|
+
|
|
890
|
+
Returns tuple of (provider_file, result_or_exception).
|
|
891
|
+
"""
|
|
892
|
+
async with semaphore: # Limit concurrent requests
|
|
893
|
+
try:
|
|
894
|
+
# Load provider data to get the name
|
|
895
|
+
data, _ = load_data_file(provider_file)
|
|
896
|
+
provider_name = data.get("name", provider_file.stem)
|
|
897
|
+
|
|
898
|
+
# Publish the provider
|
|
899
|
+
result = await self.post_provider_async(provider_file, dryrun=dryrun)
|
|
900
|
+
|
|
901
|
+
# Print complete statement after publication
|
|
902
|
+
if result.get("skipped"):
|
|
903
|
+
reason = result.get("reason", "unknown")
|
|
904
|
+
console.print(f" [yellow]⊘[/yellow] Skipped provider: [cyan]{provider_name}[/cyan] - {reason}")
|
|
905
|
+
else:
|
|
906
|
+
status = result.get("status", "created")
|
|
907
|
+
symbol, color = self._get_status_display(status)
|
|
908
|
+
console.print(
|
|
909
|
+
f" {symbol} [{color}]{status.capitalize()}[/{color}] provider: [cyan]{provider_name}[/cyan]"
|
|
910
|
+
)
|
|
911
|
+
|
|
912
|
+
return (provider_file, result)
|
|
913
|
+
except Exception as e:
|
|
914
|
+
data, _ = load_data_file(provider_file)
|
|
915
|
+
provider_name = data.get("name", provider_file.stem)
|
|
916
|
+
console.print(f" [red]✗[/red] Failed to publish provider: [cyan]{provider_name}[/cyan] - {str(e)}")
|
|
917
|
+
return (provider_file, e)
|
|
918
|
+
|
|
919
|
+
async def publish_all_providers(self, data_dir: Path, dryrun: bool = False) -> dict[str, Any]:
|
|
920
|
+
"""
|
|
921
|
+
Publish all providers found in a directory tree concurrently.
|
|
922
|
+
|
|
923
|
+
Returns a summary of successes and failures.
|
|
924
|
+
"""
|
|
925
|
+
provider_files = self.find_provider_files(data_dir)
|
|
926
|
+
results: dict[str, Any] = {
|
|
927
|
+
"total": len(provider_files),
|
|
928
|
+
"success": 0,
|
|
929
|
+
"failed": 0,
|
|
930
|
+
"created": 0,
|
|
931
|
+
"updated": 0,
|
|
932
|
+
"unchanged": 0,
|
|
933
|
+
"errors": [],
|
|
934
|
+
}
|
|
935
|
+
|
|
936
|
+
if not provider_files:
|
|
937
|
+
return results
|
|
938
|
+
|
|
939
|
+
console = Console()
|
|
940
|
+
|
|
941
|
+
# Run all provider publications concurrently with rate limiting
|
|
942
|
+
# Create semaphore to limit concurrent requests
|
|
943
|
+
semaphore = asyncio.Semaphore(self.max_concurrent_requests)
|
|
944
|
+
tasks = [
|
|
945
|
+
self._publish_provider_task(provider_file, console, semaphore, dryrun=dryrun)
|
|
946
|
+
for provider_file in provider_files
|
|
947
|
+
]
|
|
948
|
+
task_results = await asyncio.gather(*tasks)
|
|
949
|
+
|
|
950
|
+
# Process results
|
|
951
|
+
for provider_file, result in task_results:
|
|
952
|
+
if isinstance(result, Exception):
|
|
953
|
+
results["failed"] += 1
|
|
954
|
+
results["errors"].append({"file": str(provider_file), "error": str(result)})
|
|
955
|
+
else:
|
|
956
|
+
results["success"] += 1
|
|
957
|
+
# Track status counts (handle both normal and dryrun statuses)
|
|
958
|
+
status = result.get("status", "created")
|
|
959
|
+
if status in ("created", "create"): # "create" is dryrun mode
|
|
960
|
+
results["created"] += 1
|
|
961
|
+
elif status in ("updated", "update"): # "update" is dryrun mode
|
|
962
|
+
results["updated"] += 1
|
|
963
|
+
elif status == "unchanged":
|
|
964
|
+
results["unchanged"] += 1
|
|
965
|
+
|
|
966
|
+
return results
|
|
967
|
+
|
|
968
|
+
async def _publish_seller_task(
|
|
969
|
+
self, seller_file: Path, console: Console, semaphore: asyncio.Semaphore, dryrun: bool = False
|
|
970
|
+
) -> tuple[Path, dict[str, Any] | Exception]:
|
|
971
|
+
"""
|
|
972
|
+
Async task to publish a single seller with concurrency control.
|
|
973
|
+
|
|
974
|
+
Returns tuple of (seller_file, result_or_exception).
|
|
975
|
+
"""
|
|
976
|
+
async with semaphore: # Limit concurrent requests
|
|
977
|
+
try:
|
|
978
|
+
# Load seller data to get the name
|
|
979
|
+
data, _ = load_data_file(seller_file)
|
|
980
|
+
seller_name = data.get("name", seller_file.stem)
|
|
981
|
+
|
|
982
|
+
# Publish the seller
|
|
983
|
+
result = await self.post_seller_async(seller_file, dryrun=dryrun)
|
|
984
|
+
|
|
985
|
+
# Print complete statement after publication
|
|
986
|
+
if result.get("skipped"):
|
|
987
|
+
reason = result.get("reason", "unknown")
|
|
988
|
+
console.print(f" [yellow]⊘[/yellow] Skipped seller: [cyan]{seller_name}[/cyan] - {reason}")
|
|
989
|
+
else:
|
|
990
|
+
status = result.get("status", "created")
|
|
991
|
+
symbol, color = self._get_status_display(status)
|
|
992
|
+
console.print(
|
|
993
|
+
f" {symbol} [{color}]{status.capitalize()}[/{color}] seller: [cyan]{seller_name}[/cyan]"
|
|
994
|
+
)
|
|
995
|
+
|
|
996
|
+
return (seller_file, result)
|
|
997
|
+
except Exception as e:
|
|
998
|
+
data, _ = load_data_file(seller_file)
|
|
999
|
+
seller_name = data.get("name", seller_file.stem)
|
|
1000
|
+
console.print(f" [red]✗[/red] Failed to publish seller: [cyan]{seller_name}[/cyan] - {str(e)}")
|
|
1001
|
+
return (seller_file, e)
|
|
1002
|
+
|
|
1003
|
+
async def publish_all_sellers(self, data_dir: Path, dryrun: bool = False) -> dict[str, Any]:
|
|
1004
|
+
"""
|
|
1005
|
+
Publish all sellers found in a directory tree concurrently.
|
|
1006
|
+
|
|
1007
|
+
Returns a summary of successes and failures.
|
|
1008
|
+
"""
|
|
1009
|
+
seller_files = self.find_seller_files(data_dir)
|
|
1010
|
+
results: dict[str, Any] = {
|
|
1011
|
+
"total": len(seller_files),
|
|
1012
|
+
"success": 0,
|
|
1013
|
+
"failed": 0,
|
|
1014
|
+
"created": 0,
|
|
1015
|
+
"updated": 0,
|
|
1016
|
+
"unchanged": 0,
|
|
1017
|
+
"errors": [],
|
|
1018
|
+
}
|
|
1019
|
+
|
|
1020
|
+
if not seller_files:
|
|
1021
|
+
return results
|
|
1022
|
+
|
|
1023
|
+
console = Console()
|
|
1024
|
+
|
|
1025
|
+
# Run all seller publications concurrently with rate limiting
|
|
1026
|
+
# Create semaphore to limit concurrent requests
|
|
1027
|
+
semaphore = asyncio.Semaphore(self.max_concurrent_requests)
|
|
1028
|
+
tasks = [
|
|
1029
|
+
self._publish_seller_task(seller_file, console, semaphore, dryrun=dryrun) for seller_file in seller_files
|
|
1030
|
+
]
|
|
1031
|
+
task_results = await asyncio.gather(*tasks)
|
|
1032
|
+
|
|
1033
|
+
# Process results
|
|
1034
|
+
for seller_file, result in task_results:
|
|
1035
|
+
if isinstance(result, Exception):
|
|
1036
|
+
results["failed"] += 1
|
|
1037
|
+
results["errors"].append({"file": str(seller_file), "error": str(result)})
|
|
1038
|
+
else:
|
|
1039
|
+
results["success"] += 1
|
|
1040
|
+
# Track status counts (handle both normal and dryrun statuses)
|
|
1041
|
+
status = result.get("status", "created")
|
|
1042
|
+
if status in ("created", "create"): # "create" is dryrun mode
|
|
1043
|
+
results["created"] += 1
|
|
1044
|
+
elif status in ("updated", "update"): # "update" is dryrun mode
|
|
1045
|
+
results["updated"] += 1
|
|
1046
|
+
elif status == "unchanged":
|
|
1047
|
+
results["unchanged"] += 1
|
|
1048
|
+
|
|
1049
|
+
return results
|
|
1050
|
+
|
|
1051
|
+
async def publish_all_models(self, data_dir: Path, dryrun: bool = False) -> dict[str, Any]:
|
|
1052
|
+
"""
|
|
1053
|
+
Publish all data types in the correct order.
|
|
1054
|
+
|
|
1055
|
+
Publishing order:
|
|
1056
|
+
1. Sellers - Must exist before listings
|
|
1057
|
+
2. Providers - Must exist before offerings
|
|
1058
|
+
3. Service Offerings - Must exist before listings
|
|
1059
|
+
4. Service Listings - Depends on sellers, providers, and offerings
|
|
1060
|
+
|
|
1061
|
+
Returns a dict with results for each data type and overall summary.
|
|
1062
|
+
"""
|
|
1063
|
+
all_results: dict[str, Any] = {
|
|
1064
|
+
"sellers": {},
|
|
1065
|
+
"providers": {},
|
|
1066
|
+
"offerings": {},
|
|
1067
|
+
"listings": {},
|
|
1068
|
+
"total_success": 0,
|
|
1069
|
+
"total_failed": 0,
|
|
1070
|
+
"total_found": 0,
|
|
1071
|
+
"total_created": 0,
|
|
1072
|
+
"total_updated": 0,
|
|
1073
|
+
"total_unchanged": 0,
|
|
1074
|
+
}
|
|
1075
|
+
|
|
1076
|
+
# Publish in order: sellers -> providers -> offerings -> listings
|
|
1077
|
+
publish_order = [
|
|
1078
|
+
("sellers", self.publish_all_sellers),
|
|
1079
|
+
("providers", self.publish_all_providers),
|
|
1080
|
+
("offerings", self.publish_all_offerings),
|
|
1081
|
+
("listings", self.publish_all_listings),
|
|
1082
|
+
]
|
|
1083
|
+
|
|
1084
|
+
for data_type, publish_method in publish_order:
|
|
1085
|
+
try:
|
|
1086
|
+
results = await publish_method(data_dir, dryrun=dryrun)
|
|
1087
|
+
all_results[data_type] = results
|
|
1088
|
+
all_results["total_success"] += results["success"]
|
|
1089
|
+
all_results["total_failed"] += results["failed"]
|
|
1090
|
+
all_results["total_found"] += results["total"]
|
|
1091
|
+
all_results["total_created"] += results.get("created", 0)
|
|
1092
|
+
all_results["total_updated"] += results.get("updated", 0)
|
|
1093
|
+
all_results["total_unchanged"] += results.get("unchanged", 0)
|
|
1094
|
+
except Exception as e:
|
|
1095
|
+
# If a publish method fails catastrophically, record the error
|
|
1096
|
+
all_results[data_type] = {
|
|
1097
|
+
"total": 0,
|
|
1098
|
+
"success": 0,
|
|
1099
|
+
"failed": 1,
|
|
1100
|
+
"errors": [{"file": "N/A", "error": str(e)}],
|
|
1101
|
+
}
|
|
1102
|
+
all_results["total_failed"] += 1
|
|
1103
|
+
|
|
1104
|
+
return all_results
|
|
1105
|
+
|
|
1106
|
+
|
|
1107
|
+
# CLI commands for publishing
|
|
1108
|
+
app = typer.Typer(help="Publish data to backend")
|
|
1109
|
+
console = Console()
|
|
1110
|
+
|
|
1111
|
+
|
|
1112
|
+
@app.callback(invoke_without_command=True)
|
|
1113
|
+
def publish_callback(
|
|
1114
|
+
ctx: typer.Context,
|
|
1115
|
+
data_path: Path | None = typer.Option(
|
|
1116
|
+
None,
|
|
1117
|
+
"--data-path",
|
|
1118
|
+
"-d",
|
|
1119
|
+
help="Path to data directory (default: current directory)",
|
|
1120
|
+
),
|
|
1121
|
+
dryrun: bool = typer.Option(
|
|
1122
|
+
False,
|
|
1123
|
+
"--dryrun",
|
|
1124
|
+
help="Run in dry run mode (no actual changes)",
|
|
1125
|
+
),
|
|
1126
|
+
):
|
|
1127
|
+
"""
|
|
1128
|
+
Publish data to backend.
|
|
1129
|
+
|
|
1130
|
+
When called without a subcommand, publishes all data types in order:
|
|
1131
|
+
sellers → providers → offerings → listings.
|
|
1132
|
+
|
|
1133
|
+
Use subcommands to publish specific data types:
|
|
1134
|
+
- providers: Publish only providers
|
|
1135
|
+
- sellers: Publish only sellers
|
|
1136
|
+
- offerings: Publish only service offerings
|
|
1137
|
+
- listings: Publish only service listings
|
|
1138
|
+
|
|
1139
|
+
Required environment variables:
|
|
1140
|
+
- UNITYSVC_BASE_URL: Backend API URL
|
|
1141
|
+
- UNITYSVC_API_KEY: API key for authentication
|
|
1142
|
+
"""
|
|
1143
|
+
# If a subcommand was invoked, skip this callback logic
|
|
1144
|
+
if ctx.invoked_subcommand is not None:
|
|
1145
|
+
return
|
|
1146
|
+
|
|
1147
|
+
# No subcommand - publish all
|
|
1148
|
+
# Set data path
|
|
1149
|
+
if data_path is None:
|
|
1150
|
+
data_path = Path.cwd()
|
|
1151
|
+
|
|
1152
|
+
if not data_path.is_absolute():
|
|
1153
|
+
data_path = Path.cwd() / data_path
|
|
1154
|
+
|
|
1155
|
+
if not data_path.exists():
|
|
1156
|
+
console.print(f"[red]✗[/red] Path not found: {data_path}", style="bold red")
|
|
1157
|
+
raise typer.Exit(code=1)
|
|
1158
|
+
|
|
1159
|
+
console.print(f"[bold blue]Publishing all data from:[/bold blue] {data_path}")
|
|
1160
|
+
console.print(f"[bold blue]Backend URL:[/bold blue] {os.getenv('UNITYSVC_BASE_URL', 'N/A')}\n")
|
|
1161
|
+
|
|
1162
|
+
async def _publish_all_async():
|
|
1163
|
+
async with ServiceDataPublisher() as publisher:
|
|
1164
|
+
return await publisher.publish_all_models(data_path, dryrun=dryrun)
|
|
1165
|
+
|
|
1166
|
+
try:
|
|
1167
|
+
all_results = asyncio.run(_publish_all_async())
|
|
1168
|
+
|
|
1169
|
+
# Create summary table
|
|
1170
|
+
console.print("\n[bold cyan]Publishing Summary[/bold cyan]")
|
|
1171
|
+
|
|
1172
|
+
table = Table(show_header=True, header_style="bold cyan", border_style="cyan")
|
|
1173
|
+
table.add_column("Type", style="cyan", no_wrap=True)
|
|
1174
|
+
table.add_column("Found", justify="right")
|
|
1175
|
+
table.add_column("Success", justify="right", style="green")
|
|
1176
|
+
table.add_column("Failed", justify="right", style="red")
|
|
1177
|
+
table.add_column("Created", justify="right", style="green")
|
|
1178
|
+
table.add_column("Updated", justify="right", style="blue")
|
|
1179
|
+
table.add_column("Unchanged", justify="right", style="dim")
|
|
1180
|
+
|
|
1181
|
+
data_type_display_names = {
|
|
1182
|
+
"sellers": "Sellers",
|
|
1183
|
+
"providers": "Providers",
|
|
1184
|
+
"offerings": "Offerings",
|
|
1185
|
+
"listings": "Listings",
|
|
1186
|
+
}
|
|
1187
|
+
|
|
1188
|
+
# Add rows for each data type
|
|
1189
|
+
for data_type in ["sellers", "providers", "offerings", "listings"]:
|
|
1190
|
+
display_name = data_type_display_names[data_type]
|
|
1191
|
+
results = all_results[data_type]
|
|
1192
|
+
|
|
1193
|
+
table.add_row(
|
|
1194
|
+
display_name,
|
|
1195
|
+
str(results["total"]),
|
|
1196
|
+
str(results["success"]),
|
|
1197
|
+
str(results["failed"]) if results["failed"] > 0 else "",
|
|
1198
|
+
str(results.get("created", 0)) if results.get("created", 0) > 0 else "",
|
|
1199
|
+
str(results.get("updated", 0)) if results.get("updated", 0) > 0 else "",
|
|
1200
|
+
str(results.get("unchanged", 0)) if results.get("unchanged", 0) > 0 else "",
|
|
1201
|
+
)
|
|
1202
|
+
|
|
1203
|
+
# Add separator and total row
|
|
1204
|
+
table.add_section()
|
|
1205
|
+
table.add_row(
|
|
1206
|
+
"[bold]Total[/bold]",
|
|
1207
|
+
f"[bold]{all_results['total_found']}[/bold]",
|
|
1208
|
+
f"[bold green]{all_results['total_success']}[/bold green]",
|
|
1209
|
+
f"[bold red]{all_results['total_failed']}[/bold red]" if all_results["total_failed"] > 0 else "",
|
|
1210
|
+
f"[bold green]{all_results['total_created']}[/bold green]" if all_results["total_created"] > 0 else "",
|
|
1211
|
+
f"[bold blue]{all_results['total_updated']}[/bold blue]" if all_results["total_updated"] > 0 else "",
|
|
1212
|
+
f"[bold]{all_results['total_unchanged']}[/bold]" if all_results["total_unchanged"] > 0 else "",
|
|
1213
|
+
)
|
|
1214
|
+
|
|
1215
|
+
console.print(table)
|
|
1216
|
+
|
|
1217
|
+
# Display errors if any
|
|
1218
|
+
has_errors = False
|
|
1219
|
+
for data_type in ["sellers", "providers", "offerings", "listings"]:
|
|
1220
|
+
display_name = data_type_display_names[data_type]
|
|
1221
|
+
results = all_results[data_type]
|
|
1222
|
+
|
|
1223
|
+
if results.get("errors"):
|
|
1224
|
+
if not has_errors:
|
|
1225
|
+
console.print("\n[bold red]Errors:[/bold red]")
|
|
1226
|
+
has_errors = True
|
|
1227
|
+
|
|
1228
|
+
console.print(f"\n [bold red]{display_name}:[/bold red]")
|
|
1229
|
+
for error in results["errors"]:
|
|
1230
|
+
# Check if this is a skipped item
|
|
1231
|
+
if isinstance(error, dict) and error.get("error", "").startswith("skipped"):
|
|
1232
|
+
continue
|
|
1233
|
+
console.print(f" [red]✗[/red] {error.get('file', 'unknown')}")
|
|
1234
|
+
console.print(f" {error.get('error', 'unknown error')}")
|
|
1235
|
+
|
|
1236
|
+
if all_results["total_failed"] > 0:
|
|
1237
|
+
console.print(
|
|
1238
|
+
f"\n[yellow]⚠[/yellow] Completed with {all_results['total_failed']} failure(s)",
|
|
1239
|
+
style="bold yellow",
|
|
1240
|
+
)
|
|
1241
|
+
raise typer.Exit(code=1)
|
|
1242
|
+
else:
|
|
1243
|
+
if dryrun:
|
|
1244
|
+
console.print(
|
|
1245
|
+
"\n[green]✓[/green] Dry run completed successfully - no changes made!",
|
|
1246
|
+
style="bold green",
|
|
1247
|
+
)
|
|
1248
|
+
else:
|
|
1249
|
+
console.print(
|
|
1250
|
+
"\n[green]✓[/green] All data published successfully!",
|
|
1251
|
+
style="bold green",
|
|
1252
|
+
)
|
|
1253
|
+
|
|
1254
|
+
except typer.Exit:
|
|
1255
|
+
raise
|
|
1256
|
+
except Exception as e:
|
|
1257
|
+
console.print(f"[red]✗[/red] Failed to publish all data: {e}", style="bold red")
|
|
1258
|
+
raise typer.Exit(code=1)
|
|
1259
|
+
|
|
1260
|
+
|
|
1261
|
+
@app.command("providers")
|
|
1262
|
+
def publish_providers(
|
|
1263
|
+
data_path: Path | None = typer.Option(
|
|
1264
|
+
None,
|
|
1265
|
+
"--data-path",
|
|
1266
|
+
"-d",
|
|
1267
|
+
help="Path to provider file or directory (default: current directory)",
|
|
1268
|
+
),
|
|
1269
|
+
dryrun: bool = typer.Option(
|
|
1270
|
+
False,
|
|
1271
|
+
"--dryrun",
|
|
1272
|
+
help="Run in dry run mode (no actual changes)",
|
|
1273
|
+
),
|
|
1274
|
+
):
|
|
1275
|
+
"""Publish provider(s) from a file or directory."""
|
|
1276
|
+
|
|
1277
|
+
# Set data path
|
|
1278
|
+
if data_path is None:
|
|
1279
|
+
data_path = Path.cwd()
|
|
1280
|
+
|
|
1281
|
+
if not data_path.is_absolute():
|
|
1282
|
+
data_path = Path.cwd() / data_path
|
|
1283
|
+
|
|
1284
|
+
if not data_path.exists():
|
|
1285
|
+
console.print(f"[red]✗[/red] Path not found: {data_path}", style="bold red")
|
|
1286
|
+
raise typer.Exit(code=1)
|
|
1287
|
+
|
|
1288
|
+
# Handle single file
|
|
1289
|
+
if data_path.is_file():
|
|
1290
|
+
console.print(f"[blue]Publishing provider:[/blue] {data_path}")
|
|
1291
|
+
console.print(f"[blue]Backend URL:[/blue] {os.getenv('UNITYSVC_BASE_URL', 'N/A')}\n")
|
|
1292
|
+
else:
|
|
1293
|
+
console.print(f"[blue]Scanning for providers in:[/blue] {data_path}")
|
|
1294
|
+
console.print(f"[blue]Backend URL:[/blue] {os.getenv('UNITYSVC_BASE_URL', 'N/A')}\n")
|
|
1295
|
+
|
|
1296
|
+
async def _publish_providers_async():
|
|
1297
|
+
async with ServiceDataPublisher() as publisher:
|
|
1298
|
+
# Handle single file
|
|
1299
|
+
if data_path.is_file():
|
|
1300
|
+
return await publisher.post_provider_async(data_path, dryrun=dryrun), True
|
|
1301
|
+
# Handle directory
|
|
1302
|
+
else:
|
|
1303
|
+
return await publisher.publish_all_providers(data_path, dryrun=dryrun), False
|
|
1304
|
+
|
|
1305
|
+
try:
|
|
1306
|
+
result, is_single = asyncio.run(_publish_providers_async())
|
|
1307
|
+
|
|
1308
|
+
if is_single:
|
|
1309
|
+
console.print("[green]✓[/green] Provider published successfully!")
|
|
1310
|
+
console.print(f"[cyan]Response:[/cyan] {json.dumps(result, indent=2)}")
|
|
1311
|
+
else:
|
|
1312
|
+
# Display summary
|
|
1313
|
+
console.print("\n[bold cyan]Publishing Summary[/bold cyan]")
|
|
1314
|
+
table = Table(show_header=True, header_style="bold cyan", border_style="cyan")
|
|
1315
|
+
table.add_column("Type", style="cyan")
|
|
1316
|
+
table.add_column("Found", justify="right")
|
|
1317
|
+
table.add_column("Success", justify="right")
|
|
1318
|
+
table.add_column("Failed", justify="right")
|
|
1319
|
+
table.add_column("Created", justify="right")
|
|
1320
|
+
table.add_column("Updated", justify="right")
|
|
1321
|
+
table.add_column("Unchanged", justify="right")
|
|
1322
|
+
|
|
1323
|
+
table.add_row(
|
|
1324
|
+
"Providers",
|
|
1325
|
+
str(result["total"]),
|
|
1326
|
+
f"[green]{result['success']}[/green]",
|
|
1327
|
+
f"[red]{result['failed']}[/red]" if result["failed"] > 0 else "",
|
|
1328
|
+
f"[green]{result['created']}[/green]" if result["created"] > 0 else "",
|
|
1329
|
+
f"[blue]{result['updated']}[/blue]" if result["updated"] > 0 else "",
|
|
1330
|
+
f"[dim]{result['unchanged']}[/dim]" if result["unchanged"] > 0 else "",
|
|
1331
|
+
)
|
|
1332
|
+
|
|
1333
|
+
console.print(table)
|
|
1334
|
+
|
|
1335
|
+
# Display errors if any
|
|
1336
|
+
if result["errors"]:
|
|
1337
|
+
console.print("\n[bold red]Errors:[/bold red]")
|
|
1338
|
+
for error in result["errors"]:
|
|
1339
|
+
console.print(f" [red]✗[/red] {error['file']}")
|
|
1340
|
+
console.print(f" {error['error']}")
|
|
1341
|
+
|
|
1342
|
+
if result["failed"] > 0:
|
|
1343
|
+
raise typer.Exit(code=1)
|
|
1344
|
+
else:
|
|
1345
|
+
if dryrun:
|
|
1346
|
+
console.print("\n[green]✓[/green] Dry run completed successfully - no changes made!")
|
|
1347
|
+
else:
|
|
1348
|
+
console.print("\n[green]✓[/green] All providers published successfully!")
|
|
1349
|
+
|
|
1350
|
+
except typer.Exit:
|
|
1351
|
+
raise
|
|
1352
|
+
except Exception as e:
|
|
1353
|
+
console.print(f"[red]✗[/red] Failed to publish providers: {e}", style="bold red")
|
|
1354
|
+
raise typer.Exit(code=1)
|
|
1355
|
+
|
|
1356
|
+
|
|
1357
|
+
@app.command("sellers")
|
|
1358
|
+
def publish_sellers(
|
|
1359
|
+
data_path: Path | None = typer.Option(
|
|
1360
|
+
None,
|
|
1361
|
+
"--data-path",
|
|
1362
|
+
"-d",
|
|
1363
|
+
help="Path to seller file or directory (default: current directory)",
|
|
1364
|
+
),
|
|
1365
|
+
dryrun: bool = typer.Option(
|
|
1366
|
+
False,
|
|
1367
|
+
"--dryrun",
|
|
1368
|
+
help="Run in dry run mode (no actual changes)",
|
|
1369
|
+
),
|
|
1370
|
+
):
|
|
1371
|
+
"""Publish seller(s) from a file or directory."""
|
|
1372
|
+
# Set data path
|
|
1373
|
+
if data_path is None:
|
|
1374
|
+
data_path = Path.cwd()
|
|
1375
|
+
|
|
1376
|
+
if not data_path.is_absolute():
|
|
1377
|
+
data_path = Path.cwd() / data_path
|
|
1378
|
+
|
|
1379
|
+
if not data_path.exists():
|
|
1380
|
+
console.print(f"[red]✗[/red] Path not found: {data_path}", style="bold red")
|
|
1381
|
+
raise typer.Exit(code=1)
|
|
1382
|
+
|
|
1383
|
+
# Handle single file
|
|
1384
|
+
if data_path.is_file():
|
|
1385
|
+
console.print(f"[blue]Publishing seller:[/blue] {data_path}")
|
|
1386
|
+
console.print(f"[blue]Backend URL:[/blue] {os.getenv('UNITYSVC_BASE_URL', 'N/A')}\n")
|
|
1387
|
+
else:
|
|
1388
|
+
console.print(f"[blue]Scanning for sellers in:[/blue] {data_path}")
|
|
1389
|
+
console.print(f"[blue]Backend URL:[/blue] {os.getenv('UNITYSVC_BASE_URL', 'N/A')}\n")
|
|
1390
|
+
|
|
1391
|
+
async def _publish_sellers_async():
|
|
1392
|
+
async with ServiceDataPublisher() as publisher:
|
|
1393
|
+
# Handle single file
|
|
1394
|
+
if data_path.is_file():
|
|
1395
|
+
return await publisher.post_seller_async(data_path, dryrun=dryrun), True
|
|
1396
|
+
# Handle directory
|
|
1397
|
+
else:
|
|
1398
|
+
return await publisher.publish_all_sellers(data_path, dryrun=dryrun), False
|
|
1399
|
+
|
|
1400
|
+
try:
|
|
1401
|
+
result, is_single = asyncio.run(_publish_sellers_async())
|
|
1402
|
+
|
|
1403
|
+
if is_single:
|
|
1404
|
+
console.print("[green]✓[/green] Seller published successfully!")
|
|
1405
|
+
console.print(f"[cyan]Response:[/cyan] {json.dumps(result, indent=2)}")
|
|
1406
|
+
else:
|
|
1407
|
+
console.print("\n[bold cyan]Publishing Summary[/bold cyan]")
|
|
1408
|
+
table = Table(show_header=True, header_style="bold cyan", border_style="cyan")
|
|
1409
|
+
table.add_column("Type", style="cyan")
|
|
1410
|
+
table.add_column("Found", justify="right")
|
|
1411
|
+
table.add_column("Success", justify="right")
|
|
1412
|
+
table.add_column("Failed", justify="right")
|
|
1413
|
+
table.add_column("Created", justify="right")
|
|
1414
|
+
table.add_column("Updated", justify="right")
|
|
1415
|
+
table.add_column("Unchanged", justify="right")
|
|
1416
|
+
|
|
1417
|
+
table.add_row(
|
|
1418
|
+
"Sellers",
|
|
1419
|
+
str(result["total"]),
|
|
1420
|
+
f"[green]{result['success']}[/green]",
|
|
1421
|
+
f"[red]{result['failed']}[/red]" if result["failed"] > 0 else "",
|
|
1422
|
+
f"[green]{result['created']}[/green]" if result["created"] > 0 else "",
|
|
1423
|
+
f"[blue]{result['updated']}[/blue]" if result["updated"] > 0 else "",
|
|
1424
|
+
f"[dim]{result['unchanged']}[/dim]" if result["unchanged"] > 0 else "",
|
|
1425
|
+
)
|
|
1426
|
+
|
|
1427
|
+
console.print(table)
|
|
1428
|
+
|
|
1429
|
+
if result["errors"]:
|
|
1430
|
+
console.print("\n[bold red]Errors:[/bold red]")
|
|
1431
|
+
for error in result["errors"]:
|
|
1432
|
+
console.print(f" [red]✗[/red] {error['file']}")
|
|
1433
|
+
console.print(f" {error['error']}")
|
|
1434
|
+
raise typer.Exit(code=1)
|
|
1435
|
+
else:
|
|
1436
|
+
if dryrun:
|
|
1437
|
+
console.print("\n[green]✓[/green] Dry run completed successfully - no changes made!")
|
|
1438
|
+
else:
|
|
1439
|
+
console.print("\n[green]✓[/green] All sellers published successfully!")
|
|
1440
|
+
|
|
1441
|
+
except typer.Exit:
|
|
1442
|
+
raise
|
|
1443
|
+
except Exception as e:
|
|
1444
|
+
console.print(f"[red]✗[/red] Failed to publish sellers: {e}", style="bold red")
|
|
1445
|
+
raise typer.Exit(code=1)
|
|
1446
|
+
|
|
1447
|
+
|
|
1448
|
+
@app.command("offerings")
|
|
1449
|
+
def publish_offerings(
|
|
1450
|
+
data_path: Path | None = typer.Option(
|
|
1451
|
+
None,
|
|
1452
|
+
"--data-path",
|
|
1453
|
+
"-d",
|
|
1454
|
+
help="Path to service offering file or directory (default: current directory)",
|
|
1455
|
+
),
|
|
1456
|
+
dryrun: bool = typer.Option(
|
|
1457
|
+
False,
|
|
1458
|
+
"--dryrun",
|
|
1459
|
+
help="Run in dry run mode (no actual changes)",
|
|
1460
|
+
),
|
|
1461
|
+
):
|
|
1462
|
+
"""Publish service offering(s) from a file or directory."""
|
|
1463
|
+
# Set data path
|
|
1464
|
+
if data_path is None:
|
|
1465
|
+
data_path = Path.cwd()
|
|
1466
|
+
|
|
1467
|
+
if not data_path.is_absolute():
|
|
1468
|
+
data_path = Path.cwd() / data_path
|
|
1469
|
+
|
|
1470
|
+
if not data_path.exists():
|
|
1471
|
+
console.print(f"[red]✗[/red] Path not found: {data_path}", style="bold red")
|
|
1472
|
+
raise typer.Exit(code=1)
|
|
1473
|
+
|
|
1474
|
+
# Handle single file
|
|
1475
|
+
if data_path.is_file():
|
|
1476
|
+
console.print(f"[blue]Publishing service offering:[/blue] {data_path}")
|
|
1477
|
+
console.print(f"[blue]Backend URL:[/blue] {os.getenv('UNITYSVC_BASE_URL', 'N/A')}\n")
|
|
1478
|
+
else:
|
|
1479
|
+
console.print(f"[blue]Scanning for service offerings in:[/blue] {data_path}")
|
|
1480
|
+
console.print(f"[blue]Backend URL:[/blue] {os.getenv('UNITYSVC_BASE_URL', 'N/A')}\n")
|
|
1481
|
+
|
|
1482
|
+
async def _publish_offerings_async():
|
|
1483
|
+
async with ServiceDataPublisher() as publisher:
|
|
1484
|
+
# Handle single file
|
|
1485
|
+
if data_path.is_file():
|
|
1486
|
+
return await publisher.post_service_offering_async(data_path, dryrun=dryrun), True
|
|
1487
|
+
# Handle directory
|
|
1488
|
+
else:
|
|
1489
|
+
return await publisher.publish_all_offerings(data_path, dryrun=dryrun), False
|
|
1490
|
+
|
|
1491
|
+
try:
|
|
1492
|
+
result, is_single = asyncio.run(_publish_offerings_async())
|
|
1493
|
+
|
|
1494
|
+
if is_single:
|
|
1495
|
+
console.print("[green]✓[/green] Service offering published successfully!")
|
|
1496
|
+
console.print(f"[cyan]Response:[/cyan] {json.dumps(result, indent=2)}")
|
|
1497
|
+
else:
|
|
1498
|
+
console.print("\n[bold cyan]Publishing Summary[/bold cyan]")
|
|
1499
|
+
table = Table(show_header=True, header_style="bold cyan", border_style="cyan")
|
|
1500
|
+
table.add_column("Type", style="cyan")
|
|
1501
|
+
table.add_column("Found", justify="right")
|
|
1502
|
+
table.add_column("Success", justify="right")
|
|
1503
|
+
table.add_column("Failed", justify="right")
|
|
1504
|
+
table.add_column("Created", justify="right")
|
|
1505
|
+
table.add_column("Updated", justify="right")
|
|
1506
|
+
table.add_column("Unchanged", justify="right")
|
|
1507
|
+
|
|
1508
|
+
table.add_row(
|
|
1509
|
+
"Offerings",
|
|
1510
|
+
str(result["total"]),
|
|
1511
|
+
f"[green]{result['success']}[/green]",
|
|
1512
|
+
f"[red]{result['failed']}[/red]" if result["failed"] > 0 else "",
|
|
1513
|
+
f"[green]{result['created']}[/green]" if result["created"] > 0 else "",
|
|
1514
|
+
f"[blue]{result['updated']}[/blue]" if result["updated"] > 0 else "",
|
|
1515
|
+
f"[dim]{result['unchanged']}[/dim]" if result["unchanged"] > 0 else "",
|
|
1516
|
+
)
|
|
1517
|
+
|
|
1518
|
+
console.print(table)
|
|
1519
|
+
|
|
1520
|
+
if result["errors"]:
|
|
1521
|
+
console.print("\n[bold red]Errors:[/bold red]")
|
|
1522
|
+
for error in result["errors"]:
|
|
1523
|
+
console.print(f" [red]✗[/red] {error['file']}")
|
|
1524
|
+
console.print(f" {error['error']}")
|
|
1525
|
+
raise typer.Exit(code=1)
|
|
1526
|
+
else:
|
|
1527
|
+
if dryrun:
|
|
1528
|
+
console.print("\n[green]✓[/green] Dry run completed successfully - no changes made!")
|
|
1529
|
+
else:
|
|
1530
|
+
console.print("\n[green]✓[/green] All service offerings published successfully!")
|
|
1531
|
+
|
|
1532
|
+
except typer.Exit:
|
|
1533
|
+
raise
|
|
1534
|
+
except Exception as e:
|
|
1535
|
+
console.print(f"[red]✗[/red] Failed to publish service offerings: {e}", style="bold red")
|
|
1536
|
+
raise typer.Exit(code=1)
|
|
1537
|
+
|
|
1538
|
+
|
|
1539
|
+
@app.command("listings")
|
|
1540
|
+
def publish_listings(
|
|
1541
|
+
data_path: Path | None = typer.Option(
|
|
1542
|
+
None,
|
|
1543
|
+
"--data-path",
|
|
1544
|
+
"-d",
|
|
1545
|
+
help="Path to service listing file or directory (default: current directory)",
|
|
1546
|
+
),
|
|
1547
|
+
dryrun: bool = typer.Option(
|
|
1548
|
+
False,
|
|
1549
|
+
"--dryrun",
|
|
1550
|
+
help="Run in dry run mode (no actual changes)",
|
|
1551
|
+
),
|
|
1552
|
+
):
|
|
1553
|
+
"""Publish service listing(s) from a file or directory."""
|
|
1554
|
+
|
|
1555
|
+
# Set data path
|
|
1556
|
+
if data_path is None:
|
|
1557
|
+
data_path = Path.cwd()
|
|
1558
|
+
|
|
1559
|
+
if not data_path.is_absolute():
|
|
1560
|
+
data_path = Path.cwd() / data_path
|
|
1561
|
+
|
|
1562
|
+
if not data_path.exists():
|
|
1563
|
+
console.print(f"[red]✗[/red] Path not found: {data_path}", style="bold red")
|
|
1564
|
+
raise typer.Exit(code=1)
|
|
1565
|
+
|
|
1566
|
+
# Handle single file
|
|
1567
|
+
if data_path.is_file():
|
|
1568
|
+
console.print(f"[blue]Publishing service listing:[/blue] {data_path}")
|
|
1569
|
+
console.print(f"[blue]Backend URL:[/blue] {os.getenv('UNITYSVC_BASE_URL', 'N/A')}\n")
|
|
1570
|
+
else:
|
|
1571
|
+
console.print(f"[blue]Scanning for service listings in:[/blue] {data_path}")
|
|
1572
|
+
console.print(f"[blue]Backend URL:[/blue] {os.getenv('UNITYSVC_BASE_URL', 'N/A')}\n")
|
|
1573
|
+
|
|
1574
|
+
async def _publish_listings_async():
|
|
1575
|
+
async with ServiceDataPublisher() as publisher:
|
|
1576
|
+
# Handle single file
|
|
1577
|
+
if data_path.is_file():
|
|
1578
|
+
return await publisher.post_service_listing_async(data_path, dryrun=dryrun), True
|
|
1579
|
+
# Handle directory
|
|
1580
|
+
else:
|
|
1581
|
+
return await publisher.publish_all_listings(data_path, dryrun=dryrun), False
|
|
1582
|
+
|
|
1583
|
+
try:
|
|
1584
|
+
result, is_single = asyncio.run(_publish_listings_async())
|
|
1585
|
+
|
|
1586
|
+
if is_single:
|
|
1587
|
+
console.print("[green]✓[/green] Service listing published successfully!")
|
|
1588
|
+
console.print(f"[cyan]Response:[/cyan] {json.dumps(result, indent=2)}")
|
|
1589
|
+
else:
|
|
1590
|
+
console.print("\n[bold cyan]Publishing Summary[/bold cyan]")
|
|
1591
|
+
table = Table(show_header=True, header_style="bold cyan", border_style="cyan")
|
|
1592
|
+
table.add_column("Type", style="cyan")
|
|
1593
|
+
table.add_column("Found", justify="right")
|
|
1594
|
+
table.add_column("Success", justify="right")
|
|
1595
|
+
table.add_column("Failed", justify="right")
|
|
1596
|
+
table.add_column("Created", justify="right")
|
|
1597
|
+
table.add_column("Updated", justify="right")
|
|
1598
|
+
table.add_column("Unchanged", justify="right")
|
|
1599
|
+
|
|
1600
|
+
table.add_row(
|
|
1601
|
+
"Listings",
|
|
1602
|
+
str(result["total"]),
|
|
1603
|
+
f"[green]{result['success']}[/green]",
|
|
1604
|
+
f"[red]{result['failed']}[/red]" if result["failed"] > 0 else "",
|
|
1605
|
+
f"[green]{result['created']}[/green]" if result["created"] > 0 else "",
|
|
1606
|
+
f"[blue]{result['updated']}[/blue]" if result["updated"] > 0 else "",
|
|
1607
|
+
f"[dim]{result['unchanged']}[/dim]" if result["unchanged"] > 0 else "",
|
|
1608
|
+
)
|
|
1609
|
+
|
|
1610
|
+
console.print(table)
|
|
1611
|
+
|
|
1612
|
+
if result["errors"]:
|
|
1613
|
+
console.print("\n[bold red]Errors:[/bold red]")
|
|
1614
|
+
for error in result["errors"]:
|
|
1615
|
+
console.print(f" [red]✗[/red] {error['file']}")
|
|
1616
|
+
console.print(f" {error['error']}")
|
|
1617
|
+
raise typer.Exit(code=1)
|
|
1618
|
+
else:
|
|
1619
|
+
if dryrun:
|
|
1620
|
+
console.print("\n[green]✓[/green] Dry run completed successfully - no changes made!")
|
|
1621
|
+
else:
|
|
1622
|
+
console.print("\n[green]✓[/green] All service listings published successfully!")
|
|
1623
|
+
|
|
1624
|
+
except typer.Exit:
|
|
1625
|
+
raise
|
|
1626
|
+
except Exception as e:
|
|
1627
|
+
console.print(f"[red]✗[/red] Failed to publish service listings: {e}", style="bold red")
|
|
1628
|
+
raise typer.Exit(code=1)
|