unitysvc-services 0.1.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,515 @@
1
+ """Data validation module for unitysvc_services."""
2
+
3
+ import json
4
+ import os
5
+ import re
6
+ import tomllib as toml
7
+ from pathlib import Path
8
+ from typing import Any
9
+ from urllib.parse import urlparse
10
+
11
+ import typer
12
+ from jinja2 import Environment, TemplateSyntaxError
13
+ from jsonschema.validators import Draft7Validator
14
+ from rich.console import Console
15
+
16
+
17
+ class DataValidationError(Exception):
18
+ """Exception raised when data validation fails."""
19
+
20
+ pass
21
+
22
+
23
+ class DataValidator:
24
+ """Validates data files against JSON schemas."""
25
+
26
+ def __init__(self, data_dir: Path, schema_dir: Path):
27
+ self.data_dir = data_dir
28
+ self.schema_dir = schema_dir
29
+ self.schemas: dict[str, dict[str, Any]] = {}
30
+ self.load_schemas()
31
+
32
+ def load_schemas(self) -> None:
33
+ """Load all JSON schemas from the schema directory."""
34
+ for schema_file in self.schema_dir.glob("*.json"):
35
+ schema_name = schema_file.stem
36
+ try:
37
+ with open(schema_file, encoding="utf-8") as f:
38
+ schema = json.load(f)
39
+ self.schemas[schema_name] = schema
40
+ except Exception as e:
41
+ print(f"Error loading schema {schema_file}: {e}")
42
+
43
+ def is_url(self, value: str) -> bool:
44
+ """Check if a string is a valid URL."""
45
+ try:
46
+ result = urlparse(value)
47
+ return all([result.scheme, result.netloc])
48
+ except Exception:
49
+ return False
50
+
51
+ def find_union_fields(self, schema: dict[str, Any]) -> set[str]:
52
+ """Find fields that are Union[str, HttpUrl] types in the schema."""
53
+ union_fields: set[str] = set()
54
+
55
+ def traverse_schema(obj: Any, path: str = "") -> None:
56
+ if isinstance(obj, dict):
57
+ # Check for Union type with string and URL format
58
+ if "anyOf" in obj:
59
+ any_of = obj["anyOf"]
60
+ # Count non-null items for the check
61
+ non_null_items = [item for item in any_of if item.get("type") != "null"]
62
+ has_plain_string = any(
63
+ item.get("type") == "string" and "format" not in item for item in non_null_items
64
+ )
65
+ has_uri_string = any(
66
+ item.get("type") == "string" and item.get("format") == "uri" for item in non_null_items
67
+ )
68
+
69
+ # Check for Union[str, HttpUrl] or Union[str, HttpUrl, None]
70
+ if len(non_null_items) == 2 and has_plain_string and has_uri_string:
71
+ union_fields.add(path)
72
+
73
+ # Recursively check properties
74
+ if "properties" in obj:
75
+ for prop_name, prop_schema in obj["properties"].items():
76
+ new_path = f"{path}.{prop_name}" if path else prop_name
77
+ traverse_schema(prop_schema, new_path)
78
+
79
+ # Check other schema structures
80
+ for key, value in obj.items():
81
+ if key not in ["properties", "anyOf"] and isinstance(value, dict | list):
82
+ traverse_schema(value, path)
83
+
84
+ elif isinstance(obj, list):
85
+ for item in obj:
86
+ traverse_schema(item, path)
87
+
88
+ traverse_schema(schema)
89
+ return union_fields
90
+
91
+ def validate_file_references(self, data: dict[str, Any], file_path: Path, union_fields: set[str]) -> list[str]:
92
+ """
93
+ Validate that file references in Union[str, HttpUrl] fields exist.
94
+
95
+ Also validates that all file_path fields use relative paths.
96
+ """
97
+ errors: list[str] = []
98
+
99
+ def check_field(obj: Any, field_path: str, current_path: str = "") -> None:
100
+ if isinstance(obj, dict):
101
+ for key, value in obj.items():
102
+ new_path = f"{current_path}.{key}" if current_path else key
103
+
104
+ # Check if this field is a Union[str, HttpUrl] field
105
+ if (
106
+ new_path in union_fields
107
+ and value is not None
108
+ and isinstance(value, str)
109
+ and not self.is_url(value)
110
+ ):
111
+ # Empty string is not a valid file reference
112
+ if value == "":
113
+ errors.append(f"Empty string in field '{new_path}' is not a valid file reference or URL")
114
+ # It's a file reference, must be relative path
115
+ elif Path(value).is_absolute():
116
+ errors.append(
117
+ f"File reference '{value}' in field '{new_path}' "
118
+ f"must be a relative path, not an absolute path"
119
+ )
120
+ else:
121
+ referenced_file = file_path.parent / value
122
+ if not referenced_file.exists():
123
+ errors.append(
124
+ f"File reference '{value}' in field '{new_path}' "
125
+ f"does not exist at {referenced_file}"
126
+ )
127
+
128
+ # Check if this is a file_path field (regardless of schema type)
129
+ if key == "file_path" and isinstance(value, str):
130
+ # file_path fields must not be URLs (use external_url instead)
131
+ if self.is_url(value):
132
+ errors.append(
133
+ f"File path '{value}' in field '{new_path}' "
134
+ f"must not be a URL. Use 'external_url' field for URLs instead."
135
+ )
136
+ # All file_path fields must use relative paths
137
+ elif Path(value).is_absolute():
138
+ errors.append(
139
+ f"File path '{value}' in field '{new_path}' "
140
+ f"must be a relative path, not an absolute path"
141
+ )
142
+
143
+ # Recurse into nested objects
144
+ if isinstance(value, dict | list):
145
+ check_field(value, field_path, new_path)
146
+
147
+ elif isinstance(obj, list):
148
+ for i, item in enumerate(obj):
149
+ if isinstance(item, dict | list):
150
+ check_field(item, field_path, f"{current_path}[{i}]")
151
+
152
+ check_field(data, str(file_path))
153
+ return errors
154
+
155
+ def validate_name_consistency(self, data: dict[str, Any], file_path: Path, schema_name: str) -> list[str]:
156
+ """Validate that the name field matches the directory name."""
157
+ errors: list[str] = []
158
+
159
+ # Only validate files with a 'name' field
160
+ if "name" not in data:
161
+ return errors
162
+
163
+ name_value = data["name"]
164
+ if not isinstance(name_value, str):
165
+ return errors
166
+
167
+ # Determine expected directory name based on file type
168
+ if file_path.name in ["provider.json", "provider.toml"]:
169
+ # For provider.json, the directory should match the provider name
170
+ directory_name = file_path.parent.name
171
+ if self._normalize_name(name_value) != self._normalize_name(directory_name):
172
+ errors.append(
173
+ f"Provider name '{name_value}' does not match directory name '{directory_name}'. "
174
+ f"Expected directory name to match normalized provider name: '{self._normalize_name(name_value)}'"
175
+ )
176
+
177
+ elif file_path.name in ["service.json", "service.toml"]:
178
+ # For service.json, the service directory should match the service name
179
+ service_directory_name = file_path.parent.name
180
+ if self._normalize_name(name_value) != self._normalize_name(service_directory_name):
181
+ normalized_name = self._normalize_name(name_value)
182
+ errors.append(
183
+ f"Service name '{name_value}' does not match "
184
+ f"service directory name '{service_directory_name}'. "
185
+ f"Expected service directory name to match "
186
+ f"normalized service name: '{normalized_name}'"
187
+ )
188
+
189
+ return errors
190
+
191
+ def _normalize_name(self, name: str) -> str:
192
+ """Normalize a name for directory comparison."""
193
+ # Convert to lowercase and replace spaces/special chars with hyphens
194
+ normalized = re.sub(r"[^a-zA-Z0-9]+", "-", name.lower())
195
+ # Remove leading/trailing hyphens
196
+ normalized = normalized.strip("-")
197
+ return normalized
198
+
199
+ def load_data_file(self, file_path: Path) -> tuple[dict[str, Any] | None, list[str]]:
200
+ """Load data from JSON or TOML file."""
201
+ errors: list[str] = []
202
+
203
+ try:
204
+ if file_path.suffix == ".toml":
205
+ with open(file_path, "rb") as f:
206
+ data = toml.load(f)
207
+ elif file_path.suffix == ".json":
208
+ with open(file_path, encoding="utf-8") as f:
209
+ data = json.load(f)
210
+ else:
211
+ return None, [f"Unsupported file format: {file_path.suffix}"]
212
+ return data, errors
213
+ except Exception as e:
214
+ format_name = {".json": "JSON", ".toml": "TOML"}.get(file_path.suffix, "data")
215
+ return None, [f"Failed to parse {format_name}: {e}"]
216
+
217
+ def validate_data_file(self, file_path: Path) -> tuple[bool, list[str]]:
218
+ """Validate a single data file (JSON or TOML)."""
219
+ errors: list[str] = []
220
+
221
+ data, load_errors = self.load_data_file(file_path)
222
+ if load_errors:
223
+ return False, load_errors
224
+
225
+ # data could be None if loading failed
226
+ if data is None:
227
+ return False, ["Failed to load data file"]
228
+
229
+ # Check for schema field
230
+ if "schema" not in data:
231
+ return False, ["Missing 'schema' field in data file"]
232
+
233
+ schema_name = data["schema"]
234
+
235
+ # Check if schema exists
236
+ if schema_name not in self.schemas:
237
+ return False, [f"Schema '{schema_name}' not found in schema directory"]
238
+
239
+ schema = self.schemas[schema_name]
240
+
241
+ # Validate against schema with format checking enabled
242
+ try:
243
+ validator = Draft7Validator(schema, format_checker=Draft7Validator.FORMAT_CHECKER)
244
+ validator.check_schema(schema) # Validate the schema itself
245
+ validation_errors = list(validator.iter_errors(data))
246
+ for error in validation_errors:
247
+ errors.append(f"Schema validation error: {error.message}")
248
+ if error.absolute_path:
249
+ errors.append(f" Path: {'.'.join(str(p) for p in error.absolute_path)}")
250
+ except Exception as e:
251
+ errors.append(f"Validation error: {e}")
252
+
253
+ # Find Union[str, HttpUrl] fields and validate file references
254
+ union_fields = self.find_union_fields(schema)
255
+ file_ref_errors = self.validate_file_references(data, file_path, union_fields)
256
+ errors.extend(file_ref_errors)
257
+
258
+ # Validate name consistency with directory name
259
+ name_errors = self.validate_name_consistency(data, file_path, schema_name)
260
+ errors.extend(name_errors)
261
+
262
+ return len(errors) == 0, errors
263
+
264
+ def validate_md_file(self, file_path: Path) -> tuple[bool, list[str]]:
265
+ """Validate a markdown file (basic existence check and Jinja2 syntax)."""
266
+ errors: list[str] = []
267
+
268
+ try:
269
+ with open(file_path, encoding="utf-8") as f:
270
+ content = f.read()
271
+
272
+ if not content.strip():
273
+ return True, []
274
+
275
+ # Validate Jinja2 syntax
276
+ try:
277
+ env = Environment()
278
+ env.parse(content)
279
+ except TemplateSyntaxError as e:
280
+ errors.append(f"Jinja2 syntax error: {e.message} at line {e.lineno}")
281
+ except Exception as e:
282
+ errors.append(f"Jinja2 validation error: {e}")
283
+
284
+ return len(errors) == 0, errors
285
+ except Exception as e:
286
+ return False, [f"Failed to read markdown file: {e}"]
287
+
288
+ def validate_seller_uniqueness(self) -> tuple[bool, list[str]]:
289
+ """
290
+ Validate that there is exactly one seller_v1 file in the data directory.
291
+
292
+ Each repository should have one and only one seller.json file using the seller_v1 schema.
293
+ """
294
+ errors: list[str] = []
295
+ seller_files: list[Path] = []
296
+
297
+ if not self.data_dir.exists():
298
+ return True, []
299
+
300
+ # Find all data files with seller_v1 schema
301
+ for file_path in self.data_dir.rglob("*"):
302
+ if file_path.is_file() and file_path.suffix in [".json", ".toml"]:
303
+ try:
304
+ data, load_errors = self.load_data_file(file_path)
305
+ if data and "schema" in data and data["schema"] == "seller_v1":
306
+ seller_files.append(file_path.relative_to(self.data_dir))
307
+ except Exception:
308
+ # Skip files that can't be loaded (they'll be caught by other validation)
309
+ continue
310
+
311
+ # Check count
312
+ if len(seller_files) == 0:
313
+ errors.append(
314
+ "No seller file found. Each repository must have exactly one data file using the 'seller_v1' schema."
315
+ )
316
+ elif len(seller_files) > 1:
317
+ errors.append(f"Found {len(seller_files)} seller files, but only one is allowed per repository:")
318
+ for seller_file in seller_files:
319
+ errors.append(f" - {seller_file}")
320
+
321
+ return len(errors) == 0, errors
322
+
323
+ def validate_all(self) -> dict[str, tuple[bool, list[str]]]:
324
+ """Validate all files in the data directory."""
325
+ results: dict[str, tuple[bool, list[str]]] = {}
326
+
327
+ if not self.data_dir.exists():
328
+ return results
329
+
330
+ # First, validate seller uniqueness (repository-level validation)
331
+ seller_valid, seller_errors = self.validate_seller_uniqueness()
332
+ if not seller_valid:
333
+ results["_seller_uniqueness"] = (False, seller_errors)
334
+
335
+ # Find all data and MD files recursively
336
+ for file_path in self.data_dir.rglob("*"):
337
+ if file_path.is_file() and file_path.suffix in [".json", ".toml", ".md"]:
338
+ relative_path = file_path.relative_to(self.data_dir)
339
+
340
+ if file_path.suffix in [".json", ".toml"]:
341
+ is_valid, errors = self.validate_data_file(file_path)
342
+ elif file_path.suffix == ".md":
343
+ is_valid, errors = self.validate_md_file(file_path)
344
+ else:
345
+ continue
346
+
347
+ results[str(relative_path)] = (is_valid, errors)
348
+
349
+ return results
350
+
351
+ def validate_directory_data(self, directory: Path) -> None:
352
+ """Validate data files in a directory for consistency.
353
+
354
+ Validation rules:
355
+ 1. All service_v1 files in same directory must have unique names
356
+ 2. All listing_v1 files must reference a service name that exists in the same directory
357
+ 3. If service_name is defined in listing_v1, it must match a service in the directory
358
+
359
+ Args:
360
+ directory: Directory containing data files to validate
361
+
362
+ Raises:
363
+ DataValidationError: If validation fails
364
+ """
365
+ # Find all JSON and TOML files in the directory (not recursive)
366
+ data_files: list[Path] = []
367
+ for pattern in ["*.json", "*.toml"]:
368
+ data_files.extend(directory.glob(pattern))
369
+
370
+ # Load all files and categorize by schema
371
+ services: dict[str, Path] = {} # name -> file_path
372
+ listings: list[tuple[Path, dict[str, Any]]] = [] # list of (file_path, data)
373
+
374
+ for file_path in data_files:
375
+ try:
376
+ data, load_errors = self.load_data_file(file_path)
377
+ if load_errors or data is None:
378
+ continue
379
+
380
+ schema = data.get("schema")
381
+
382
+ if schema == "service_v1":
383
+ service_name = data.get("name")
384
+ if not service_name:
385
+ raise DataValidationError(f"Service file {file_path} missing 'name' field")
386
+
387
+ # Check for duplicate service names in same directory
388
+ if service_name in services:
389
+ raise DataValidationError(
390
+ f"Duplicate service name '{service_name}' found in directory {directory}:\n"
391
+ f" - {services[service_name]}\n"
392
+ f" - {file_path}"
393
+ )
394
+
395
+ services[service_name] = file_path
396
+
397
+ elif schema == "listing_v1":
398
+ listings.append((file_path, data))
399
+
400
+ except Exception as e:
401
+ # Skip files that can't be loaded or don't have schema
402
+ if isinstance(e, DataValidationError):
403
+ raise
404
+ continue
405
+
406
+ # Validate listings reference valid services
407
+ for listing_file, listing_data in listings:
408
+ service_name = listing_data.get("service_name")
409
+
410
+ if service_name:
411
+ # If service_name is explicitly defined, it must match a service in the directory
412
+ if service_name not in services:
413
+ available_services = ", ".join(services.keys()) if services else "none"
414
+ raise DataValidationError(
415
+ f"Listing file {listing_file} references service_name '{service_name}' "
416
+ f"which does not exist in the same directory.\n"
417
+ f"Available services: {available_services}"
418
+ )
419
+ else:
420
+ # If service_name not defined, there should be exactly one service in the directory
421
+ if len(services) == 0:
422
+ raise DataValidationError(
423
+ f"Listing file {listing_file} does not specify 'service_name' "
424
+ f"and no service files found in the same directory."
425
+ )
426
+ elif len(services) > 1:
427
+ available_services = ", ".join(services.keys())
428
+ raise DataValidationError(
429
+ f"Listing file {listing_file} does not specify 'service_name' "
430
+ f"but multiple services exist in the same directory: {available_services}. "
431
+ f"Please add 'service_name' field to the listing to specify which service it belongs to."
432
+ )
433
+
434
+ def validate_all_service_directories(self, data_dir: Path) -> list[str]:
435
+ """
436
+ Validate all service directories in a directory tree.
437
+
438
+ Returns a list of validation error messages (empty if all valid).
439
+ """
440
+ errors = []
441
+
442
+ # Find all directories containing service or listing files
443
+ directories_to_validate = set()
444
+
445
+ for pattern in ["*.json", "*.toml"]:
446
+ for file_path in data_dir.rglob(pattern):
447
+ try:
448
+ data, load_errors = self.load_data_file(file_path)
449
+ if load_errors or data is None:
450
+ continue
451
+
452
+ schema = data.get("schema")
453
+ if schema in ["service_v1", "listing_v1"]:
454
+ directories_to_validate.add(file_path.parent)
455
+ except Exception:
456
+ continue
457
+
458
+ # Validate each directory
459
+ for directory in sorted(directories_to_validate):
460
+ try:
461
+ self.validate_directory_data(directory)
462
+ except DataValidationError as e:
463
+ errors.append(str(e))
464
+
465
+ return errors
466
+
467
+
468
+ # CLI command
469
+ app = typer.Typer(help="Validate data files")
470
+ console = Console()
471
+
472
+
473
+ @app.command()
474
+ def validate(
475
+ data_dir: Path | None = typer.Argument(
476
+ None,
477
+ help="Directory containing data files to validate (default: ./data or UNITYSVC_DATA_DIR env var)",
478
+ ),
479
+ ):
480
+ """
481
+ Validate data consistency in service and listing files.
482
+
483
+ Checks:
484
+ 1. Service names are unique within each directory
485
+ 2. Listing files reference valid service names
486
+ 3. Multiple services in a directory require explicit service_name in listings
487
+ """
488
+ # Determine data directory
489
+ if data_dir is None:
490
+ data_dir_str = os.environ.get("UNITYSVC_DATA_DIR")
491
+ if data_dir_str:
492
+ data_dir = Path(data_dir_str)
493
+ else:
494
+ data_dir = Path.cwd() / "data"
495
+
496
+ if not data_dir.exists():
497
+ console.print(f"[red]✗[/red] Data directory not found: {data_dir}")
498
+ raise typer.Exit(1)
499
+
500
+ console.print(f"[cyan]Validating data files in:[/cyan] {data_dir}")
501
+ console.print()
502
+
503
+ # Create validator and run validation
504
+ validator = DataValidator(data_dir, data_dir.parent / "schema")
505
+ validation_errors = validator.validate_all_service_directories(data_dir)
506
+
507
+ if validation_errors:
508
+ console.print(f"[red]✗ Validation failed with {len(validation_errors)} error(s):[/red]")
509
+ console.print()
510
+ for i, error in enumerate(validation_errors, 1):
511
+ console.print(f"[red]{i}.[/red] {error}")
512
+ console.print()
513
+ raise typer.Exit(1)
514
+ else:
515
+ console.print("[green]✓ All data files are valid![/green]")
@@ -0,0 +1,172 @@
1
+ Metadata-Version: 2.4
2
+ Name: unitysvc-services
3
+ Version: 0.1.0
4
+ Summary: SDK for digital service providers on UnitySVC
5
+ Author-email: Bo Peng <bo.peng@unitysvc.com>
6
+ Maintainer-email: Bo Peng <bo.peng@unitysvc.com>
7
+ License: MIT
8
+ Project-URL: bugs, https://github.com/unitysvc/unitysvc-services/issues
9
+ Project-URL: changelog, https://github.com/unitysvc/unitysvc-services/blob/master/changelog.md
10
+ Project-URL: homepage, https://github.com/unitysvc/unitysvc-services
11
+ Requires-Python: >=3.11
12
+ Description-Content-Type: text/markdown
13
+ License-File: LICENSE
14
+ Requires-Dist: typer
15
+ Requires-Dist: pydantic
16
+ Requires-Dist: jsonschema
17
+ Requires-Dist: jinja2
18
+ Requires-Dist: rich
19
+ Requires-Dist: httpx
20
+ Requires-Dist: tomli-w
21
+ Provides-Extra: test
22
+ Requires-Dist: coverage; extra == "test"
23
+ Requires-Dist: pytest; extra == "test"
24
+ Requires-Dist: ruff; extra == "test"
25
+ Requires-Dist: mypy; extra == "test"
26
+ Requires-Dist: ipdb; extra == "test"
27
+ Provides-Extra: dev
28
+ Requires-Dist: coverage; extra == "dev"
29
+ Requires-Dist: pytest; extra == "dev"
30
+ Requires-Dist: ruff; extra == "dev"
31
+ Requires-Dist: ty; extra == "dev"
32
+ Requires-Dist: ipdb; extra == "dev"
33
+ Requires-Dist: mkdocs; extra == "dev"
34
+ Requires-Dist: mkdocs-material; extra == "dev"
35
+ Requires-Dist: mkdocs-autorefs; extra == "dev"
36
+ Provides-Extra: docs
37
+ Requires-Dist: mkdocs; extra == "docs"
38
+ Requires-Dist: mkdocs-material; extra == "docs"
39
+ Requires-Dist: mkdocs-autorefs; extra == "docs"
40
+ Dynamic: license-file
41
+
42
+ # UnitySVC Provider SDK
43
+
44
+ ![PyPI version](https://img.shields.io/pypi/v/unitysvc-services.svg)
45
+ [![Documentation Status](https://readthedocs.org/projects/unitysvc-services/badge/?version=latest)](https://unitysvc-services.readthedocs.io/en/latest/?version=latest)
46
+
47
+ Client library and CLI tools for digital service providers to interact with the UnitySVC platform.
48
+
49
+ **📚 [Full Documentation](https://unitysvc-services.readthedocs.io)** | **🚀 [Getting Started](https://unitysvc-services.readthedocs.io/en/latest/getting-started/)** | **📖 [CLI Reference](https://unitysvc-services.readthedocs.io/en/latest/cli-reference/)**
50
+
51
+ ## Overview
52
+
53
+ UnitySVC Provider SDK enables digital service providers to manage their service offerings through a **local-first, version-controlled workflow**:
54
+
55
+ - **Define** service data using schema-validated files (JSON/TOML)
56
+ - **Manage** everything locally in git-controlled directories
57
+ - **Validate** data against schemas before publishing
58
+ - **Publish** to UnitySVC platform when ready
59
+ - **Automate** with populate scripts for dynamic catalogs
60
+
61
+ ## Installation
62
+
63
+ ```bash
64
+ pip install unitysvc-services
65
+ ```
66
+
67
+ Requires Python 3.11+
68
+
69
+ ## Quick Example
70
+
71
+ ```bash
72
+ # Initialize provider and service
73
+ unitysvc_services init provider my-provider
74
+ unitysvc_services init offering my-service
75
+ unitysvc_services init seller my-marketplace
76
+
77
+ # Validate and format
78
+ unitysvc_services validate
79
+ unitysvc_services format
80
+
81
+ # Publish to platform
82
+ export UNITYSVC_BACKEND_URL="https://api.unitysvc.com/api/v1"
83
+ export UNITYSVC_API_KEY="your-api-key"
84
+
85
+ unitysvc_services publish providers
86
+ unitysvc_services publish sellers
87
+ unitysvc_services publish offerings
88
+ unitysvc_services publish listings
89
+
90
+ # Verify
91
+ unitysvc_services query offerings
92
+ ```
93
+
94
+ ## Key Features
95
+
96
+ - 📋 **Pydantic Models** - Type-safe data models for all entities
97
+ - ✅ **Data Validation** - Comprehensive schema validation
98
+ - 🔄 **Local-First** - Work offline, commit to git, publish when ready
99
+ - 🚀 **CLI Tools** - Complete command-line interface
100
+ - 🤖 **Automation** - Script-based service generation
101
+ - 📝 **Multiple Formats** - Support for JSON and TOML
102
+
103
+ ## Workflows
104
+
105
+ ### Manual Workflow (small catalogs)
106
+
107
+ ```bash
108
+ init → edit files → validate → format → publish → verify
109
+ ```
110
+
111
+ ### Automated Workflow (large/dynamic catalogs)
112
+
113
+ ```bash
114
+ init provider → configure populate script → populate → validate → publish
115
+ ```
116
+
117
+ See [Workflows Documentation](https://unitysvc-services.readthedocs.io/en/latest/workflows/) for details.
118
+
119
+ ## Data Structure
120
+
121
+ ```
122
+ data/
123
+ ├── seller.json # One seller per repo
124
+ ├── ${provider_name}/
125
+ │ ├── provider.json # Provider metadata
126
+ │ ├── docs/ # Shared documentation
127
+ │ └── services/
128
+ │ └── ${service_name}/
129
+ │ ├── service.json # Service offering
130
+ │ └── listing-*.json # Service listing(s)
131
+ ```
132
+
133
+ See [Data Structure Documentation](https://unitysvc-services.readthedocs.io/en/latest/data-structure/) for complete details.
134
+
135
+ ## CLI Commands
136
+
137
+ | Command | Description |
138
+ |---------|-------------|
139
+ | `init` | Initialize new data files from schemas |
140
+ | `list` | List local data files |
141
+ | `query` | Query backend API for published data |
142
+ | `publish` | Publish data to backend |
143
+ | `update` | Update local file fields |
144
+ | `validate` | Validate data consistency |
145
+ | `format` | Format data files |
146
+ | `populate` | Execute provider populate scripts |
147
+
148
+ Run `unitysvc_services --help` or see [CLI Reference](https://unitysvc-services.readthedocs.io/en/latest/cli-reference/) for complete documentation.
149
+
150
+ ## Documentation
151
+
152
+ - **[Getting Started](https://unitysvc-services.readthedocs.io/en/latest/getting-started/)** - Installation and first steps
153
+ - **[Data Structure](https://unitysvc-services.readthedocs.io/en/latest/data-structure/)** - File organization rules
154
+ - **[Workflows](https://unitysvc-services.readthedocs.io/en/latest/workflows/)** - Manual and automated patterns
155
+ - **[CLI Reference](https://unitysvc-services.readthedocs.io/en/latest/cli-reference/)** - All commands and options
156
+ - **[File Schemas](https://unitysvc-services.readthedocs.io/en/latest/file-schemas/)** - Schema specifications
157
+ - **[Python API](https://unitysvc-services.readthedocs.io/en/latest/api-reference/)** - Programmatic usage
158
+
159
+ ## Links
160
+
161
+ - **PyPI**: https://pypi.org/project/unitysvc-services/
162
+ - **Documentation**: https://unitysvc-services.readthedocs.io
163
+ - **Source Code**: https://github.com/unitysvc/unitysvc-services
164
+ - **Issue Tracker**: https://github.com/unitysvc/unitysvc-services/issues
165
+
166
+ ## License
167
+
168
+ MIT License - see [LICENSE](LICENSE) file for details.
169
+
170
+ ## Contributing
171
+
172
+ Contributions welcome! See [Contributing Guide](https://unitysvc-services.readthedocs.io/en/latest/contributing/) for details.