unitysvc-services 0.1.24__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- unitysvc_services/__init__.py +4 -0
- unitysvc_services/api.py +421 -0
- unitysvc_services/cli.py +23 -0
- unitysvc_services/format_data.py +140 -0
- unitysvc_services/interactive_prompt.py +1132 -0
- unitysvc_services/list.py +216 -0
- unitysvc_services/models/__init__.py +71 -0
- unitysvc_services/models/base.py +1375 -0
- unitysvc_services/models/listing_data.py +118 -0
- unitysvc_services/models/listing_v1.py +56 -0
- unitysvc_services/models/provider_data.py +79 -0
- unitysvc_services/models/provider_v1.py +54 -0
- unitysvc_services/models/seller_data.py +120 -0
- unitysvc_services/models/seller_v1.py +42 -0
- unitysvc_services/models/service_data.py +114 -0
- unitysvc_services/models/service_v1.py +81 -0
- unitysvc_services/populate.py +207 -0
- unitysvc_services/publisher.py +1628 -0
- unitysvc_services/py.typed +0 -0
- unitysvc_services/query.py +688 -0
- unitysvc_services/scaffold.py +1103 -0
- unitysvc_services/schema/base.json +777 -0
- unitysvc_services/schema/listing_v1.json +1286 -0
- unitysvc_services/schema/provider_v1.json +952 -0
- unitysvc_services/schema/seller_v1.json +379 -0
- unitysvc_services/schema/service_v1.json +1306 -0
- unitysvc_services/test.py +965 -0
- unitysvc_services/unpublisher.py +505 -0
- unitysvc_services/update.py +287 -0
- unitysvc_services/utils.py +533 -0
- unitysvc_services/validator.py +731 -0
- unitysvc_services-0.1.24.dist-info/METADATA +184 -0
- unitysvc_services-0.1.24.dist-info/RECORD +37 -0
- unitysvc_services-0.1.24.dist-info/WHEEL +5 -0
- unitysvc_services-0.1.24.dist-info/entry_points.txt +3 -0
- unitysvc_services-0.1.24.dist-info/licenses/LICENSE +21 -0
- unitysvc_services-0.1.24.dist-info/top_level.txt +1 -0
unitysvc_services/api.py
ADDED
|
@@ -0,0 +1,421 @@
|
|
|
1
|
+
"""Base API client for UnitySVC with automatic curl fallback.
|
|
2
|
+
|
|
3
|
+
This module provides the base class for all UnitySVC API clients with
|
|
4
|
+
automatic network fallback from httpx to curl for systems with network
|
|
5
|
+
restrictions (e.g., macOS with conda Python).
|
|
6
|
+
"""
|
|
7
|
+
|
|
8
|
+
import asyncio
|
|
9
|
+
import json
|
|
10
|
+
import os
|
|
11
|
+
from typing import Any
|
|
12
|
+
from urllib.parse import urlencode
|
|
13
|
+
|
|
14
|
+
import httpx
|
|
15
|
+
|
|
16
|
+
|
|
17
|
+
class UnitySvcAPI:
|
|
18
|
+
"""Base class for UnitySVC API clients with automatic curl fallback.
|
|
19
|
+
|
|
20
|
+
Provides async HTTP GET/POST methods that try httpx first for performance,
|
|
21
|
+
then automatically fall back to curl if network restrictions are detected
|
|
22
|
+
(e.g., macOS with conda Python).
|
|
23
|
+
|
|
24
|
+
This base class can be used by:
|
|
25
|
+
- ServiceDataQuery (query/read operations)
|
|
26
|
+
- ServiceDataPublisher (publish/write operations)
|
|
27
|
+
- AdminQuery (administrative operations)
|
|
28
|
+
"""
|
|
29
|
+
|
|
30
|
+
def __init__(self) -> None:
|
|
31
|
+
"""Initialize API client from environment variables.
|
|
32
|
+
|
|
33
|
+
Raises:
|
|
34
|
+
ValueError: If required environment variables are not set
|
|
35
|
+
"""
|
|
36
|
+
self.base_url = os.environ.get("UNITYSVC_BASE_URL")
|
|
37
|
+
if not self.base_url:
|
|
38
|
+
raise ValueError("UNITYSVC_BASE_URL environment variable not set")
|
|
39
|
+
|
|
40
|
+
self.api_key = os.environ.get("UNITYSVC_API_KEY")
|
|
41
|
+
if not self.api_key:
|
|
42
|
+
raise ValueError("UNITYSVC_API_KEY environment variable not set")
|
|
43
|
+
|
|
44
|
+
self.base_url = self.base_url.rstrip("/")
|
|
45
|
+
self.use_curl_fallback = False
|
|
46
|
+
self.client = httpx.AsyncClient(
|
|
47
|
+
headers={
|
|
48
|
+
"Authorization": f"Bearer {self.api_key}",
|
|
49
|
+
"Content-Type": "application/json",
|
|
50
|
+
},
|
|
51
|
+
timeout=30.0,
|
|
52
|
+
)
|
|
53
|
+
|
|
54
|
+
async def _make_request_curl(self, endpoint: str, params: dict[str, Any] | None = None) -> dict[str, Any]:
|
|
55
|
+
"""Make HTTP GET request using curl fallback (async).
|
|
56
|
+
|
|
57
|
+
Args:
|
|
58
|
+
endpoint: API endpoint path (e.g., "/publish/sellers")
|
|
59
|
+
params: Query parameters
|
|
60
|
+
|
|
61
|
+
Returns:
|
|
62
|
+
JSON response as dictionary
|
|
63
|
+
|
|
64
|
+
Raises:
|
|
65
|
+
httpx.HTTPStatusError: If HTTP status code indicates error (with response details)
|
|
66
|
+
RuntimeError: If curl command fails or times out
|
|
67
|
+
"""
|
|
68
|
+
url = f"{self.base_url}{endpoint}"
|
|
69
|
+
if params:
|
|
70
|
+
url = f"{url}?{urlencode(params)}"
|
|
71
|
+
|
|
72
|
+
cmd = [
|
|
73
|
+
"curl",
|
|
74
|
+
"-s", # Silent mode
|
|
75
|
+
"-w",
|
|
76
|
+
"\n%{http_code}", # Write status code on new line
|
|
77
|
+
"-H",
|
|
78
|
+
f"Authorization: Bearer {self.api_key}",
|
|
79
|
+
"-H",
|
|
80
|
+
"Accept: application/json",
|
|
81
|
+
url,
|
|
82
|
+
]
|
|
83
|
+
|
|
84
|
+
try:
|
|
85
|
+
proc = await asyncio.create_subprocess_exec(
|
|
86
|
+
*cmd, stdout=asyncio.subprocess.PIPE, stderr=asyncio.subprocess.PIPE
|
|
87
|
+
)
|
|
88
|
+
stdout, stderr = await asyncio.wait_for(proc.communicate(), timeout=30.0)
|
|
89
|
+
|
|
90
|
+
if proc.returncode != 0:
|
|
91
|
+
error_msg = stderr.decode().strip() if stderr else "curl command failed"
|
|
92
|
+
raise RuntimeError(f"Curl error: {error_msg}")
|
|
93
|
+
|
|
94
|
+
# Parse response: last line is status code, rest is body
|
|
95
|
+
output = stdout.decode().strip()
|
|
96
|
+
lines = output.split("\n")
|
|
97
|
+
status_code = int(lines[-1])
|
|
98
|
+
body = "\n".join(lines[:-1])
|
|
99
|
+
|
|
100
|
+
# Parse JSON response
|
|
101
|
+
try:
|
|
102
|
+
response_data = json.loads(body) if body else {}
|
|
103
|
+
except json.JSONDecodeError:
|
|
104
|
+
response_data = {"error": body}
|
|
105
|
+
|
|
106
|
+
# Raise exception for non-2xx status codes (mimics httpx behavior)
|
|
107
|
+
if status_code < 200 or status_code >= 300:
|
|
108
|
+
# Create a mock response object to raise HTTPStatusError
|
|
109
|
+
mock_request = httpx.Request("GET", url)
|
|
110
|
+
mock_response = httpx.Response(status_code=status_code, content=body.encode(), request=mock_request)
|
|
111
|
+
raise httpx.HTTPStatusError(f"HTTP {status_code}", request=mock_request, response=mock_response)
|
|
112
|
+
|
|
113
|
+
return response_data
|
|
114
|
+
except TimeoutError:
|
|
115
|
+
raise RuntimeError("Request timed out after 30 seconds")
|
|
116
|
+
except httpx.HTTPStatusError:
|
|
117
|
+
# Re-raise HTTP errors as-is
|
|
118
|
+
raise
|
|
119
|
+
|
|
120
|
+
async def _make_post_request_curl(
|
|
121
|
+
self, endpoint: str, json_data: dict[str, Any] | None = None, params: dict[str, Any] | None = None
|
|
122
|
+
) -> dict[str, Any]:
|
|
123
|
+
"""Make HTTP POST request using curl fallback (async).
|
|
124
|
+
|
|
125
|
+
Args:
|
|
126
|
+
endpoint: API endpoint path (e.g., "/admin/subscriptions")
|
|
127
|
+
json_data: JSON body data
|
|
128
|
+
params: Query parameters
|
|
129
|
+
|
|
130
|
+
Returns:
|
|
131
|
+
JSON response as dictionary
|
|
132
|
+
|
|
133
|
+
Raises:
|
|
134
|
+
httpx.HTTPStatusError: If HTTP status code indicates error (with response details)
|
|
135
|
+
RuntimeError: If curl command fails or times out
|
|
136
|
+
"""
|
|
137
|
+
url = f"{self.base_url}{endpoint}"
|
|
138
|
+
if params:
|
|
139
|
+
url = f"{url}?{urlencode(params)}"
|
|
140
|
+
|
|
141
|
+
cmd = [
|
|
142
|
+
"curl",
|
|
143
|
+
"-s", # Silent mode
|
|
144
|
+
"-w",
|
|
145
|
+
"\n%{http_code}", # Write status code on new line
|
|
146
|
+
"-X",
|
|
147
|
+
"POST",
|
|
148
|
+
"-H",
|
|
149
|
+
f"Authorization: Bearer {self.api_key}",
|
|
150
|
+
"-H",
|
|
151
|
+
"Content-Type: application/json",
|
|
152
|
+
"-H",
|
|
153
|
+
"Accept: application/json",
|
|
154
|
+
]
|
|
155
|
+
|
|
156
|
+
if json_data:
|
|
157
|
+
cmd.extend(["-d", json.dumps(json_data)])
|
|
158
|
+
|
|
159
|
+
cmd.append(url)
|
|
160
|
+
|
|
161
|
+
try:
|
|
162
|
+
proc = await asyncio.create_subprocess_exec(
|
|
163
|
+
*cmd, stdout=asyncio.subprocess.PIPE, stderr=asyncio.subprocess.PIPE
|
|
164
|
+
)
|
|
165
|
+
stdout, stderr = await asyncio.wait_for(proc.communicate(), timeout=30.0)
|
|
166
|
+
|
|
167
|
+
if proc.returncode != 0:
|
|
168
|
+
error_msg = stderr.decode().strip() if stderr else "curl command failed"
|
|
169
|
+
raise RuntimeError(f"Curl error: {error_msg}")
|
|
170
|
+
|
|
171
|
+
# Parse response: last line is status code, rest is body
|
|
172
|
+
output = stdout.decode().strip()
|
|
173
|
+
lines = output.split("\n")
|
|
174
|
+
status_code = int(lines[-1])
|
|
175
|
+
body = "\n".join(lines[:-1])
|
|
176
|
+
|
|
177
|
+
# Parse JSON response
|
|
178
|
+
try:
|
|
179
|
+
response_data = json.loads(body) if body else {}
|
|
180
|
+
except json.JSONDecodeError:
|
|
181
|
+
response_data = {"error": body}
|
|
182
|
+
|
|
183
|
+
# Raise exception for non-2xx status codes (mimics httpx behavior)
|
|
184
|
+
if status_code < 200 or status_code >= 300:
|
|
185
|
+
# Create a mock response object to raise HTTPStatusError
|
|
186
|
+
mock_request = httpx.Request("POST", url)
|
|
187
|
+
mock_response = httpx.Response(status_code=status_code, content=body.encode(), request=mock_request)
|
|
188
|
+
raise httpx.HTTPStatusError(f"HTTP {status_code}", request=mock_request, response=mock_response)
|
|
189
|
+
|
|
190
|
+
return response_data
|
|
191
|
+
except TimeoutError:
|
|
192
|
+
raise RuntimeError("Request timed out after 30 seconds")
|
|
193
|
+
except httpx.HTTPStatusError:
|
|
194
|
+
# Re-raise HTTP errors as-is
|
|
195
|
+
raise
|
|
196
|
+
|
|
197
|
+
async def get(self, endpoint: str, params: dict[str, Any] | None = None) -> dict[str, Any]:
|
|
198
|
+
"""Make a GET request to the backend API with automatic curl fallback.
|
|
199
|
+
|
|
200
|
+
Public async utility method for making GET requests. Tries httpx first for performance,
|
|
201
|
+
automatically falls back to curl if network restrictions are detected (e.g., macOS
|
|
202
|
+
with conda Python).
|
|
203
|
+
|
|
204
|
+
Args:
|
|
205
|
+
endpoint: API endpoint path (e.g., "/publish/sellers", "/admin/documents")
|
|
206
|
+
params: Query parameters
|
|
207
|
+
|
|
208
|
+
Returns:
|
|
209
|
+
JSON response as dictionary
|
|
210
|
+
|
|
211
|
+
Raises:
|
|
212
|
+
RuntimeError: If both httpx and curl fail
|
|
213
|
+
"""
|
|
214
|
+
# If we already know curl is needed, use it directly
|
|
215
|
+
if self.use_curl_fallback:
|
|
216
|
+
return await self._make_request_curl(endpoint, params)
|
|
217
|
+
|
|
218
|
+
# Try httpx first
|
|
219
|
+
try:
|
|
220
|
+
response = await self.client.get(f"{self.base_url}{endpoint}", params=params)
|
|
221
|
+
response.raise_for_status()
|
|
222
|
+
return response.json()
|
|
223
|
+
except (httpx.ConnectError, OSError):
|
|
224
|
+
# Connection failed - likely network restrictions
|
|
225
|
+
# Fall back to curl and remember this for future requests
|
|
226
|
+
self.use_curl_fallback = True
|
|
227
|
+
return await self._make_request_curl(endpoint, params)
|
|
228
|
+
|
|
229
|
+
async def post(
|
|
230
|
+
self, endpoint: str, json_data: dict[str, Any] | None = None, params: dict[str, Any] | None = None
|
|
231
|
+
) -> dict[str, Any]:
|
|
232
|
+
"""Make a POST request to the backend API with automatic curl fallback.
|
|
233
|
+
|
|
234
|
+
Public async utility method for making POST requests. Tries httpx first for performance,
|
|
235
|
+
automatically falls back to curl if network restrictions are detected (e.g., macOS
|
|
236
|
+
with conda Python).
|
|
237
|
+
|
|
238
|
+
Args:
|
|
239
|
+
endpoint: API endpoint path (e.g., "/admin/subscriptions")
|
|
240
|
+
json_data: JSON body data
|
|
241
|
+
params: Query parameters
|
|
242
|
+
|
|
243
|
+
Returns:
|
|
244
|
+
JSON response as dictionary
|
|
245
|
+
|
|
246
|
+
Raises:
|
|
247
|
+
RuntimeError: If both httpx and curl fail
|
|
248
|
+
"""
|
|
249
|
+
# If we already know curl is needed, use it directly
|
|
250
|
+
if self.use_curl_fallback:
|
|
251
|
+
return await self._make_post_request_curl(endpoint, json_data, params)
|
|
252
|
+
|
|
253
|
+
# Try httpx first
|
|
254
|
+
try:
|
|
255
|
+
response = await self.client.post(f"{self.base_url}{endpoint}", json=json_data, params=params)
|
|
256
|
+
response.raise_for_status()
|
|
257
|
+
return response.json()
|
|
258
|
+
except (httpx.ConnectError, OSError):
|
|
259
|
+
# Connection failed - likely network restrictions
|
|
260
|
+
# Fall back to curl and remember this for future requests
|
|
261
|
+
self.use_curl_fallback = True
|
|
262
|
+
return await self._make_post_request_curl(endpoint, json_data, params)
|
|
263
|
+
|
|
264
|
+
async def _make_delete_request_curl(self, endpoint: str, params: dict[str, Any] | None = None) -> dict[str, Any]:
|
|
265
|
+
"""Make HTTP DELETE request using curl fallback (async).
|
|
266
|
+
|
|
267
|
+
Args:
|
|
268
|
+
endpoint: API endpoint path (e.g., "/publish/offering/123")
|
|
269
|
+
params: Query parameters
|
|
270
|
+
|
|
271
|
+
Returns:
|
|
272
|
+
JSON response as dictionary
|
|
273
|
+
|
|
274
|
+
Raises:
|
|
275
|
+
httpx.HTTPStatusError: If HTTP status code indicates error (with response details)
|
|
276
|
+
RuntimeError: If curl command fails or times out
|
|
277
|
+
"""
|
|
278
|
+
url = f"{self.base_url}{endpoint}"
|
|
279
|
+
if params:
|
|
280
|
+
url = f"{url}?{urlencode(params)}"
|
|
281
|
+
|
|
282
|
+
cmd = [
|
|
283
|
+
"curl",
|
|
284
|
+
"-s", # Silent mode
|
|
285
|
+
"-w",
|
|
286
|
+
"\n%{http_code}", # Write status code on new line
|
|
287
|
+
"-X",
|
|
288
|
+
"DELETE",
|
|
289
|
+
"-H",
|
|
290
|
+
f"Authorization: Bearer {self.api_key}",
|
|
291
|
+
"-H",
|
|
292
|
+
"Accept: application/json",
|
|
293
|
+
url,
|
|
294
|
+
]
|
|
295
|
+
|
|
296
|
+
try:
|
|
297
|
+
proc = await asyncio.create_subprocess_exec(
|
|
298
|
+
*cmd, stdout=asyncio.subprocess.PIPE, stderr=asyncio.subprocess.PIPE
|
|
299
|
+
)
|
|
300
|
+
stdout, stderr = await asyncio.wait_for(proc.communicate(), timeout=30.0)
|
|
301
|
+
|
|
302
|
+
if proc.returncode != 0:
|
|
303
|
+
error_msg = stderr.decode().strip() if stderr else "curl command failed"
|
|
304
|
+
raise RuntimeError(f"Curl error: {error_msg}")
|
|
305
|
+
|
|
306
|
+
# Parse response: last line is status code, rest is body
|
|
307
|
+
output = stdout.decode().strip()
|
|
308
|
+
lines = output.split("\n")
|
|
309
|
+
status_code = int(lines[-1])
|
|
310
|
+
body = "\n".join(lines[:-1])
|
|
311
|
+
|
|
312
|
+
# Parse JSON response
|
|
313
|
+
try:
|
|
314
|
+
response_data = json.loads(body) if body else {}
|
|
315
|
+
except json.JSONDecodeError:
|
|
316
|
+
response_data = {"error": body}
|
|
317
|
+
|
|
318
|
+
# Raise exception for non-2xx status codes (mimics httpx behavior)
|
|
319
|
+
if status_code < 200 or status_code >= 300:
|
|
320
|
+
# Create a mock response object to raise HTTPStatusError
|
|
321
|
+
mock_request = httpx.Request("DELETE", url)
|
|
322
|
+
mock_response = httpx.Response(status_code=status_code, content=body.encode(), request=mock_request)
|
|
323
|
+
raise httpx.HTTPStatusError(f"HTTP {status_code}", request=mock_request, response=mock_response)
|
|
324
|
+
|
|
325
|
+
return response_data
|
|
326
|
+
except TimeoutError:
|
|
327
|
+
raise RuntimeError("Request timed out after 30 seconds")
|
|
328
|
+
except httpx.HTTPStatusError:
|
|
329
|
+
# Re-raise HTTP errors as-is
|
|
330
|
+
raise
|
|
331
|
+
|
|
332
|
+
async def delete(self, endpoint: str, params: dict[str, Any] | None = None) -> dict[str, Any]:
|
|
333
|
+
"""Make a DELETE request to the backend API with automatic curl fallback.
|
|
334
|
+
|
|
335
|
+
Public async utility method for making DELETE requests. Tries httpx first for performance,
|
|
336
|
+
automatically falls back to curl if network restrictions are detected (e.g., macOS
|
|
337
|
+
with conda Python).
|
|
338
|
+
|
|
339
|
+
Args:
|
|
340
|
+
endpoint: API endpoint path (e.g., "/publish/offering/123")
|
|
341
|
+
params: Query parameters
|
|
342
|
+
|
|
343
|
+
Returns:
|
|
344
|
+
JSON response as dictionary
|
|
345
|
+
|
|
346
|
+
Raises:
|
|
347
|
+
RuntimeError: If both httpx and curl fail
|
|
348
|
+
"""
|
|
349
|
+
# If we already know curl is needed, use it directly
|
|
350
|
+
if self.use_curl_fallback:
|
|
351
|
+
return await self._make_delete_request_curl(endpoint, params)
|
|
352
|
+
|
|
353
|
+
# Try httpx first
|
|
354
|
+
try:
|
|
355
|
+
response = await self.client.delete(f"{self.base_url}{endpoint}", params=params)
|
|
356
|
+
response.raise_for_status()
|
|
357
|
+
return response.json()
|
|
358
|
+
except (httpx.ConnectError, OSError):
|
|
359
|
+
# Connection failed - likely network restrictions
|
|
360
|
+
# Fall back to curl and remember this for future requests
|
|
361
|
+
self.use_curl_fallback = True
|
|
362
|
+
return await self._make_delete_request_curl(endpoint, params)
|
|
363
|
+
|
|
364
|
+
async def check_task(self, task_id: str, poll_interval: float = 2.0, timeout: float = 300.0) -> dict[str, Any]:
|
|
365
|
+
"""Check and wait for task completion (async version).
|
|
366
|
+
|
|
367
|
+
Utility function to poll a Celery task until it completes or times out.
|
|
368
|
+
Uses the async HTTP client with curl fallback.
|
|
369
|
+
|
|
370
|
+
Args:
|
|
371
|
+
task_id: Celery task ID to poll
|
|
372
|
+
poll_interval: Seconds between status checks (default: 2.0)
|
|
373
|
+
timeout: Maximum seconds to wait (default: 300.0)
|
|
374
|
+
|
|
375
|
+
Returns:
|
|
376
|
+
Task result dictionary
|
|
377
|
+
|
|
378
|
+
Raises:
|
|
379
|
+
ValueError: If task fails or times out
|
|
380
|
+
"""
|
|
381
|
+
import time
|
|
382
|
+
|
|
383
|
+
start_time = time.time()
|
|
384
|
+
|
|
385
|
+
while True:
|
|
386
|
+
elapsed = time.time() - start_time
|
|
387
|
+
if elapsed > timeout:
|
|
388
|
+
raise ValueError(f"Task {task_id} timed out after {timeout}s")
|
|
389
|
+
|
|
390
|
+
# Check task status using get() with automatic curl fallback
|
|
391
|
+
# Use UnitySvcAPI.get to ensure we call the async version, not sync wrapper
|
|
392
|
+
try:
|
|
393
|
+
status = await UnitySvcAPI.get(self, f"/tasks/{task_id}")
|
|
394
|
+
except Exception:
|
|
395
|
+
# Network error while checking status - retry
|
|
396
|
+
await asyncio.sleep(poll_interval)
|
|
397
|
+
continue
|
|
398
|
+
|
|
399
|
+
state = status.get("state", "PENDING")
|
|
400
|
+
|
|
401
|
+
# Check if task is complete
|
|
402
|
+
if status.get("status") == "completed" or state == "SUCCESS":
|
|
403
|
+
return status.get("result", {})
|
|
404
|
+
elif status.get("status") == "failed" or state == "FAILURE":
|
|
405
|
+
error = status.get("error", "Unknown error")
|
|
406
|
+
raise ValueError(f"Task {task_id} failed: {error}")
|
|
407
|
+
|
|
408
|
+
# Still processing - wait and retry
|
|
409
|
+
await asyncio.sleep(poll_interval)
|
|
410
|
+
|
|
411
|
+
async def aclose(self):
|
|
412
|
+
"""Close the HTTP client."""
|
|
413
|
+
await self.client.aclose()
|
|
414
|
+
|
|
415
|
+
async def __aenter__(self):
|
|
416
|
+
"""Async context manager entry."""
|
|
417
|
+
return self
|
|
418
|
+
|
|
419
|
+
async def __aexit__(self, exc_type, exc_val, exc_tb):
|
|
420
|
+
"""Async context manager exit."""
|
|
421
|
+
await self.aclose()
|
unitysvc_services/cli.py
ADDED
|
@@ -0,0 +1,23 @@
|
|
|
1
|
+
"""Console script for unitysvc_services."""
|
|
2
|
+
|
|
3
|
+
import typer
|
|
4
|
+
|
|
5
|
+
from . import format_data, populate, publisher, query, scaffold, test, unpublisher, update, validator
|
|
6
|
+
from . import list as list_cmd
|
|
7
|
+
|
|
8
|
+
app = typer.Typer()
|
|
9
|
+
|
|
10
|
+
# Register command groups
|
|
11
|
+
# Init commands are defined in scaffold.py alongside their implementation
|
|
12
|
+
app.add_typer(scaffold.app, name="init")
|
|
13
|
+
app.add_typer(list_cmd.app, name="list")
|
|
14
|
+
app.add_typer(query.app, name="query")
|
|
15
|
+
app.add_typer(publisher.app, name="publish")
|
|
16
|
+
app.add_typer(unpublisher.app, name="unpublish")
|
|
17
|
+
app.add_typer(update.app, name="update")
|
|
18
|
+
app.add_typer(test.app, name="test")
|
|
19
|
+
|
|
20
|
+
# Register standalone commands at root level
|
|
21
|
+
app.command("format")(format_data.format_data)
|
|
22
|
+
app.command("validate")(validator.validate)
|
|
23
|
+
app.command("populate")(populate.populate)
|
|
@@ -0,0 +1,140 @@
|
|
|
1
|
+
"""Format command - format data files."""
|
|
2
|
+
|
|
3
|
+
from pathlib import Path
|
|
4
|
+
|
|
5
|
+
import typer
|
|
6
|
+
from rich.console import Console
|
|
7
|
+
|
|
8
|
+
app = typer.Typer(help="Format data files")
|
|
9
|
+
console = Console()
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
@app.command()
|
|
13
|
+
def format_data(
|
|
14
|
+
data_dir: Path | None = typer.Argument(
|
|
15
|
+
None,
|
|
16
|
+
help="Directory containing data files to format (default: current directory)",
|
|
17
|
+
),
|
|
18
|
+
check_only: bool = typer.Option(
|
|
19
|
+
False,
|
|
20
|
+
"--check",
|
|
21
|
+
help="Check if files are formatted without modifying them",
|
|
22
|
+
),
|
|
23
|
+
):
|
|
24
|
+
"""
|
|
25
|
+
Format data files (JSON, TOML, MD) to match pre-commit requirements.
|
|
26
|
+
|
|
27
|
+
This command:
|
|
28
|
+
- Formats JSON files with 2-space indentation
|
|
29
|
+
- Removes trailing whitespace
|
|
30
|
+
- Ensures files end with a newline
|
|
31
|
+
- Validates TOML syntax
|
|
32
|
+
"""
|
|
33
|
+
import json as json_lib
|
|
34
|
+
|
|
35
|
+
# Set data directory
|
|
36
|
+
if data_dir is None:
|
|
37
|
+
data_dir = Path.cwd()
|
|
38
|
+
|
|
39
|
+
if not data_dir.is_absolute():
|
|
40
|
+
data_dir = Path.cwd() / data_dir
|
|
41
|
+
|
|
42
|
+
if not data_dir.exists():
|
|
43
|
+
console.print(f"[red]✗[/red] Data directory not found: {data_dir}", style="bold red")
|
|
44
|
+
raise typer.Exit(code=1)
|
|
45
|
+
|
|
46
|
+
console.print(f"[blue]{'Checking' if check_only else 'Formatting'} files in:[/blue] {data_dir}\n")
|
|
47
|
+
|
|
48
|
+
# Find all JSON, TOML, and MD files
|
|
49
|
+
all_files: list[Path] = []
|
|
50
|
+
for ext in ["json", "toml", "md"]:
|
|
51
|
+
all_files.extend(data_dir.rglob(f"*.{ext}"))
|
|
52
|
+
|
|
53
|
+
if not all_files:
|
|
54
|
+
console.print("[yellow]No files found to format.[/yellow]")
|
|
55
|
+
return
|
|
56
|
+
|
|
57
|
+
console.print(f"[cyan]Found {len(all_files)} file(s) to process[/cyan]\n")
|
|
58
|
+
|
|
59
|
+
files_formatted = 0
|
|
60
|
+
files_with_issues = []
|
|
61
|
+
files_failed = []
|
|
62
|
+
|
|
63
|
+
for file_path in sorted(all_files):
|
|
64
|
+
try:
|
|
65
|
+
# Read file content
|
|
66
|
+
with open(file_path, encoding="utf-8") as f:
|
|
67
|
+
original_content = f.read()
|
|
68
|
+
|
|
69
|
+
modified_content = original_content
|
|
70
|
+
changes = []
|
|
71
|
+
|
|
72
|
+
# Format based on file type
|
|
73
|
+
if file_path.suffix == ".json":
|
|
74
|
+
# Parse and reformat JSON
|
|
75
|
+
try:
|
|
76
|
+
data = json_lib.loads(original_content)
|
|
77
|
+
formatted_json = json_lib.dumps(data, indent=2, sort_keys=True, separators=(",", ": "))
|
|
78
|
+
modified_content = formatted_json
|
|
79
|
+
if modified_content != original_content.rstrip("\n"):
|
|
80
|
+
changes.append("reformatted JSON")
|
|
81
|
+
except json_lib.JSONDecodeError as e:
|
|
82
|
+
console.print(f"[red]✗[/red] Invalid JSON in {file_path}: {e}")
|
|
83
|
+
files_failed.append(str(file_path.relative_to(data_dir)))
|
|
84
|
+
continue
|
|
85
|
+
|
|
86
|
+
# Remove trailing whitespace from each line
|
|
87
|
+
lines = modified_content.split("\n")
|
|
88
|
+
stripped_lines = [line.rstrip() for line in lines]
|
|
89
|
+
modified_content = "\n".join(stripped_lines)
|
|
90
|
+
if "\n".join([line.rstrip() for line in original_content.split("\n")]) != modified_content:
|
|
91
|
+
if "reformatted JSON" not in changes:
|
|
92
|
+
changes.append("removed trailing whitespace")
|
|
93
|
+
|
|
94
|
+
# Ensure file ends with single newline
|
|
95
|
+
if not modified_content.endswith("\n"):
|
|
96
|
+
modified_content += "\n"
|
|
97
|
+
changes.append("added end-of-file newline")
|
|
98
|
+
elif modified_content.endswith("\n\n"):
|
|
99
|
+
# Remove extra newlines at end
|
|
100
|
+
modified_content = modified_content.rstrip("\n") + "\n"
|
|
101
|
+
changes.append("fixed multiple end-of-file newlines")
|
|
102
|
+
|
|
103
|
+
# Check if file was modified
|
|
104
|
+
if modified_content != original_content:
|
|
105
|
+
files_with_issues.append(str(file_path.relative_to(data_dir)))
|
|
106
|
+
|
|
107
|
+
if check_only:
|
|
108
|
+
console.print(f"[yellow]✗ Would format:[/yellow] {file_path.relative_to(data_dir)}")
|
|
109
|
+
if changes:
|
|
110
|
+
console.print(f" [dim]Changes: {', '.join(changes)}[/dim]")
|
|
111
|
+
else:
|
|
112
|
+
# Write formatted content
|
|
113
|
+
with open(file_path, "w", encoding="utf-8") as f:
|
|
114
|
+
f.write(modified_content)
|
|
115
|
+
console.print(f"[green]✓ Formatted:[/green] {file_path.relative_to(data_dir)}")
|
|
116
|
+
if changes:
|
|
117
|
+
console.print(f" [dim]Changes: {', '.join(changes)}[/dim]")
|
|
118
|
+
files_formatted += 1
|
|
119
|
+
else:
|
|
120
|
+
if not check_only:
|
|
121
|
+
console.print(f"[dim]✓ Already formatted:[/dim] {file_path.relative_to(data_dir)}")
|
|
122
|
+
|
|
123
|
+
except Exception as e:
|
|
124
|
+
console.print(f"[red]✗ Error processing {file_path.relative_to(data_dir)}: {e}[/red]")
|
|
125
|
+
files_failed.append(str(file_path.relative_to(data_dir)))
|
|
126
|
+
|
|
127
|
+
# Print summary
|
|
128
|
+
console.print("\n" + "=" * 50)
|
|
129
|
+
console.print("[bold]Format Summary:[/bold]")
|
|
130
|
+
console.print(f" Total files: {len(all_files)}")
|
|
131
|
+
if check_only:
|
|
132
|
+
console.print(f" [yellow]Files needing formatting: {len(files_with_issues)}[/yellow]")
|
|
133
|
+
else:
|
|
134
|
+
console.print(f" [green]✓ Files formatted: {files_formatted}[/green]")
|
|
135
|
+
console.print(f" [dim]Already formatted: {len(all_files) - files_formatted - len(files_failed)}[/dim]")
|
|
136
|
+
if files_failed:
|
|
137
|
+
console.print(f" [red]✗ Failed: {len(files_failed)}[/red]")
|
|
138
|
+
|
|
139
|
+
if files_failed or (check_only and files_with_issues):
|
|
140
|
+
raise typer.Exit(code=1)
|