unitysvc-services 0.1.1__py3-none-any.whl → 0.1.4__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,6 +1,5 @@
1
1
  """Update command group - update local data files."""
2
2
 
3
- import os
4
3
  from pathlib import Path
5
4
  from typing import Any
6
5
 
@@ -46,7 +45,7 @@ def update_offering(
46
45
  None,
47
46
  "--data-dir",
48
47
  "-d",
49
- help="Directory containing data files (default: ./data or UNITYSVC_DATA_DIR env var)",
48
+ help="Directory containing data files (default: current directory)",
50
49
  ),
51
50
  ):
52
51
  """
@@ -83,11 +82,7 @@ def update_offering(
83
82
 
84
83
  # Set data directory
85
84
  if data_dir is None:
86
- data_dir_str = os.getenv("UNITYSVC_DATA_DIR")
87
- if data_dir_str:
88
- data_dir = Path(data_dir_str)
89
- else:
90
- data_dir = Path.cwd() / "data"
85
+ data_dir = Path.cwd()
91
86
 
92
87
  if not data_dir.is_absolute():
93
88
  data_dir = Path.cwd() / data_dir
@@ -181,7 +176,7 @@ def update_listing(
181
176
  None,
182
177
  "--data-dir",
183
178
  "-d",
184
- help="Directory containing data files (default: ./data or UNITYSVC_DATA_DIR env var)",
179
+ help="Directory containing data files (default: current directory)",
185
180
  ),
186
181
  ):
187
182
  """
@@ -227,11 +222,7 @@ def update_listing(
227
222
 
228
223
  # Set data directory
229
224
  if data_dir is None:
230
- data_dir_str = os.getenv("UNITYSVC_DATA_DIR")
231
- if data_dir_str:
232
- data_dir = Path(data_dir_str)
233
- else:
234
- data_dir = Path.cwd() / "data"
225
+ data_dir = Path.cwd()
235
226
 
236
227
  if not data_dir.is_absolute():
237
228
  data_dir = Path.cwd() / data_dir
@@ -56,9 +56,7 @@ def write_data_file(file_path: Path, data: dict[str, Any], format: str) -> None:
56
56
 
57
57
 
58
58
  @lru_cache(maxsize=128)
59
- def find_data_files(
60
- data_dir: Path, extensions: tuple[str, ...] | None = None
61
- ) -> list[Path]:
59
+ def find_data_files(data_dir: Path, extensions: tuple[str, ...] | None = None) -> list[Path]:
62
60
  """
63
61
  Find all data files in a directory with specified extensions.
64
62
 
@@ -208,9 +206,7 @@ def resolve_provider_name(file_path: Path) -> str | None:
208
206
  return None
209
207
 
210
208
 
211
- def resolve_service_name_for_listing(
212
- listing_file: Path, listing_data: dict[str, Any]
213
- ) -> str | None:
209
+ def resolve_service_name_for_listing(listing_file: Path, listing_data: dict[str, Any]) -> str | None:
214
210
  """
215
211
  Resolve the service name for a listing file.
216
212
 
@@ -1,7 +1,6 @@
1
1
  """Data validation module for unitysvc_services."""
2
2
 
3
3
  import json
4
- import os
5
4
  import re
6
5
  import tomllib as toml
7
6
  from pathlib import Path
@@ -60,16 +59,12 @@ class DataValidator:
60
59
  if "anyOf" in obj:
61
60
  any_of = obj["anyOf"]
62
61
  # Count non-null items for the check
63
- non_null_items = [
64
- item for item in any_of if item.get("type") != "null"
65
- ]
62
+ non_null_items = [item for item in any_of if item.get("type") != "null"]
66
63
  has_plain_string = any(
67
- item.get("type") == "string" and "format" not in item
68
- for item in non_null_items
64
+ item.get("type") == "string" and "format" not in item for item in non_null_items
69
65
  )
70
66
  has_uri_string = any(
71
- item.get("type") == "string" and item.get("format") == "uri"
72
- for item in non_null_items
67
+ item.get("type") == "string" and item.get("format") == "uri" for item in non_null_items
73
68
  )
74
69
 
75
70
  # Check for Union[str, HttpUrl] or Union[str, HttpUrl, None]
@@ -84,9 +79,7 @@ class DataValidator:
84
79
 
85
80
  # Check other schema structures
86
81
  for key, value in obj.items():
87
- if key not in ["properties", "anyOf"] and isinstance(
88
- value, dict | list
89
- ):
82
+ if key not in ["properties", "anyOf"] and isinstance(value, dict | list):
90
83
  traverse_schema(value, path)
91
84
 
92
85
  elif isinstance(obj, list):
@@ -96,9 +89,7 @@ class DataValidator:
96
89
  traverse_schema(schema)
97
90
  return union_fields
98
91
 
99
- def validate_file_references(
100
- self, data: dict[str, Any], file_path: Path, union_fields: set[str]
101
- ) -> list[str]:
92
+ def validate_file_references(self, data: dict[str, Any], file_path: Path, union_fields: set[str]) -> list[str]:
102
93
  """
103
94
  Validate that file references in Union[str, HttpUrl] fields exist.
104
95
 
@@ -120,9 +111,7 @@ class DataValidator:
120
111
  ):
121
112
  # Empty string is not a valid file reference
122
113
  if value == "":
123
- errors.append(
124
- f"Empty string in field '{new_path}' is not a valid file reference or URL"
125
- )
114
+ errors.append(f"Empty string in field '{new_path}' is not a valid file reference or URL")
126
115
  # It's a file reference, must be relative path
127
116
  elif Path(value).is_absolute():
128
117
  errors.append(
@@ -172,9 +161,7 @@ class DataValidator:
172
161
  check_field(data, str(file_path))
173
162
  return errors
174
163
 
175
- def validate_name_consistency(
176
- self, data: dict[str, Any], file_path: Path, schema_name: str
177
- ) -> list[str]:
164
+ def validate_name_consistency(self, data: dict[str, Any], file_path: Path, schema_name: str) -> list[str]:
178
165
  """Validate that the name field matches the directory name."""
179
166
  errors: list[str] = []
180
167
 
@@ -199,9 +186,7 @@ class DataValidator:
199
186
  elif file_path.name in ["service.json", "service.toml"]:
200
187
  # For service.json, the service directory should match the service name
201
188
  service_directory_name = file_path.parent.name
202
- if self._normalize_name(name_value) != self._normalize_name(
203
- service_directory_name
204
- ):
189
+ if self._normalize_name(name_value) != self._normalize_name(service_directory_name):
205
190
  normalized_name = self._normalize_name(name_value)
206
191
  errors.append(
207
192
  f"Service name '{name_value}' does not match "
@@ -220,9 +205,55 @@ class DataValidator:
220
205
  normalized = normalized.strip("-")
221
206
  return normalized
222
207
 
223
- def load_data_file(
224
- self, file_path: Path
225
- ) -> tuple[dict[str, Any] | None, list[str]]:
208
+ def validate_with_pydantic_model(self, data: dict[str, Any], schema_name: str) -> list[str]:
209
+ """
210
+ Validate data using Pydantic models for additional validation rules.
211
+
212
+ This complements JSON schema validation with Pydantic field validators
213
+ like name format validation.
214
+
215
+ Args:
216
+ data: The data to validate
217
+ schema_name: The schema name (e.g., 'provider_v1', 'seller_v1')
218
+
219
+ Returns:
220
+ List of validation error messages
221
+ """
222
+ from pydantic import BaseModel
223
+
224
+ from unitysvc_services.models import ListingV1, ProviderV1, SellerV1, ServiceV1
225
+
226
+ errors: list[str] = []
227
+
228
+ # Map schema names to Pydantic model classes
229
+ model_map: dict[str, type[BaseModel]] = {
230
+ "provider_v1": ProviderV1,
231
+ "seller_v1": SellerV1,
232
+ "service_v1": ServiceV1,
233
+ "listing_v1": ListingV1,
234
+ }
235
+
236
+ if schema_name not in model_map:
237
+ return errors # No Pydantic model for this schema
238
+
239
+ model_class = model_map[schema_name]
240
+
241
+ try:
242
+ # Validate using the Pydantic model
243
+ model_class.model_validate(data)
244
+
245
+ except Exception as e:
246
+ # Extract meaningful error message from Pydantic ValidationError
247
+ error_msg = str(e)
248
+ # Pydantic errors can be verbose, try to extract just the relevant part
249
+ if "validation error" in error_msg.lower():
250
+ errors.append(f"Pydantic validation error: {error_msg}")
251
+ else:
252
+ errors.append(error_msg)
253
+
254
+ return errors
255
+
256
+ def load_data_file(self, file_path: Path) -> tuple[dict[str, Any] | None, list[str]]:
226
257
  """Load data from JSON or TOML file."""
227
258
  errors: list[str] = []
228
259
 
@@ -237,9 +268,7 @@ class DataValidator:
237
268
  return None, [f"Unsupported file format: {file_path.suffix}"]
238
269
  return data, errors
239
270
  except Exception as e:
240
- format_name = {".json": "JSON", ".toml": "TOML"}.get(
241
- file_path.suffix, "data"
242
- )
271
+ format_name = {".json": "JSON", ".toml": "TOML"}.get(file_path.suffix, "data")
243
272
  return None, [f"Failed to parse {format_name}: {e}"]
244
273
 
245
274
  def validate_data_file(self, file_path: Path) -> tuple[bool, list[str]]:
@@ -268,20 +297,20 @@ class DataValidator:
268
297
 
269
298
  # Validate against schema with format checking enabled
270
299
  try:
271
- validator = Draft7Validator(
272
- schema, format_checker=Draft7Validator.FORMAT_CHECKER
273
- )
300
+ validator = Draft7Validator(schema, format_checker=Draft7Validator.FORMAT_CHECKER)
274
301
  validator.check_schema(schema) # Validate the schema itself
275
302
  validation_errors = list(validator.iter_errors(data))
276
303
  for error in validation_errors:
277
304
  errors.append(f"Schema validation error: {error.message}")
278
305
  if error.absolute_path:
279
- errors.append(
280
- f" Path: {'.'.join(str(p) for p in error.absolute_path)}"
281
- )
306
+ errors.append(f" Path: {'.'.join(str(p) for p in error.absolute_path)}")
282
307
  except Exception as e:
283
308
  errors.append(f"Validation error: {e}")
284
309
 
310
+ # Also validate using Pydantic models for additional validation rules
311
+ pydantic_errors = self.validate_with_pydantic_model(data, schema_name)
312
+ errors.extend(pydantic_errors)
313
+
285
314
  # Find Union[str, HttpUrl] fields and validate file references
286
315
  union_fields = self.find_union_fields(schema)
287
316
  file_ref_errors = self.validate_file_references(data, file_path, union_fields)
@@ -331,6 +360,10 @@ class DataValidator:
331
360
 
332
361
  # Find all data files with seller_v1 schema
333
362
  for file_path in self.data_dir.rglob("*"):
363
+ # Skip hidden directories (those starting with .)
364
+ if any(part.startswith(".") for part in file_path.parts):
365
+ continue
366
+
334
367
  if file_path.is_file() and file_path.suffix in [".json", ".toml"]:
335
368
  try:
336
369
  data, load_errors = self.load_data_file(file_path)
@@ -346,9 +379,7 @@ class DataValidator:
346
379
  "No seller file found. Each repository must have exactly one data file using the 'seller_v1' schema."
347
380
  )
348
381
  elif len(seller_files) > 1:
349
- errors.append(
350
- f"Found {len(seller_files)} seller files, but only one is allowed per repository:"
351
- )
382
+ errors.append(f"Found {len(seller_files)} seller files, but only one is allowed per repository:")
352
383
  for seller_file in seller_files:
353
384
  errors.append(f" - {seller_file}")
354
385
 
@@ -366,20 +397,17 @@ class DataValidator:
366
397
 
367
398
  warnings: list[str] = []
368
399
 
369
- # Find all provider files
370
- provider_files = list(self.data_dir.glob("*/provider.*"))
400
+ # Find all provider files (skip hidden directories)
401
+ provider_files = [
402
+ f for f in self.data_dir.glob("*/provider.*") if not any(part.startswith(".") for part in f.parts)
403
+ ]
371
404
 
372
405
  for provider_file in provider_files:
373
406
  try:
374
- # Load provider data
375
- data = {}
376
- if provider_file.suffix == ".json":
377
- with open(provider_file, encoding="utf-8") as f:
378
- data = json.load(f)
379
- elif provider_file.suffix == ".toml":
380
- with open(provider_file, "rb") as f:
381
- data = toml.load(f)
382
- else:
407
+ # Load provider data using existing helper method
408
+ data, load_errors = self.load_data_file(provider_file)
409
+ if load_errors or data is None:
410
+ warnings.append(f"Failed to load provider file {provider_file}: {load_errors}")
383
411
  continue
384
412
 
385
413
  # Parse as ProviderV1
@@ -400,9 +428,7 @@ class DataValidator:
400
428
  )
401
429
 
402
430
  except Exception as e:
403
- warnings.append(
404
- f"Error checking provider status in {provider_file}: {e}"
405
- )
431
+ warnings.append(f"Error checking provider status in {provider_file}: {e}")
406
432
 
407
433
  # Return True (valid) but with warnings
408
434
  return True, warnings
@@ -418,20 +444,15 @@ class DataValidator:
418
444
 
419
445
  warnings: list[str] = []
420
446
 
421
- # Find all seller files
422
- seller_files = list(self.data_dir.glob("seller.*"))
447
+ # Find all seller files (skip hidden files)
448
+ seller_files = [f for f in self.data_dir.glob("seller.*") if not f.name.startswith(".")]
423
449
 
424
450
  for seller_file in seller_files:
425
451
  try:
426
- # Load seller data
427
- data = {}
428
- if seller_file.suffix == ".json":
429
- with open(seller_file, encoding="utf-8") as f:
430
- data = json.load(f)
431
- elif seller_file.suffix == ".toml":
432
- with open(seller_file, "rb") as f:
433
- data = toml.load(f)
434
- else:
452
+ # Load seller data using existing helper method
453
+ data, load_errors = self.load_data_file(seller_file)
454
+ if load_errors or data is None:
455
+ warnings.append(f"Failed to load seller file {seller_file}: {load_errors}")
435
456
  continue
436
457
 
437
458
  # Parse as SellerV1
@@ -475,8 +496,12 @@ class DataValidator:
475
496
  provider_warnings,
476
497
  ) # Warnings, not errors
477
498
 
478
- # Find all data and MD files recursively
499
+ # Find all data and MD files recursively, skipping hidden directories
479
500
  for file_path in self.data_dir.rglob("*"):
501
+ # Skip hidden directories (those starting with .)
502
+ if any(part.startswith(".") for part in file_path.parts):
503
+ continue
504
+
480
505
  if file_path.is_file() and file_path.suffix in [".json", ".toml", ".md"]:
481
506
  relative_path = file_path.relative_to(self.data_dir)
482
507
 
@@ -525,9 +550,7 @@ class DataValidator:
525
550
  if schema == "service_v1":
526
551
  service_name = data.get("name")
527
552
  if not service_name:
528
- raise DataValidationError(
529
- f"Service file {file_path} missing 'name' field"
530
- )
553
+ raise DataValidationError(f"Service file {file_path} missing 'name' field")
531
554
 
532
555
  # Check for duplicate service names in same directory
533
556
  if service_name in services:
@@ -555,9 +578,7 @@ class DataValidator:
555
578
  if service_name:
556
579
  # If service_name is explicitly defined, it must match a service in the directory
557
580
  if service_name not in services:
558
- available_services = (
559
- ", ".join(services.keys()) if services else "none"
560
- )
581
+ available_services = ", ".join(services.keys()) if services else "none"
561
582
  raise DataValidationError(
562
583
  f"Listing file {listing_file} references service_name '{service_name}' "
563
584
  f"which does not exist in the same directory.\n"
@@ -591,6 +612,10 @@ class DataValidator:
591
612
 
592
613
  for pattern in ["*.json", "*.toml"]:
593
614
  for file_path in data_dir.rglob(pattern):
615
+ # Skip hidden directories (those starting with .)
616
+ if any(part.startswith(".") for part in file_path.parts):
617
+ continue
618
+
594
619
  try:
595
620
  data, load_errors = self.load_data_file(file_path)
596
621
  if load_errors or data is None:
@@ -621,7 +646,7 @@ console = Console()
621
646
  def validate(
622
647
  data_dir: Path | None = typer.Argument(
623
648
  None,
624
- help="Directory containing data files to validate (default: ./data or UNITYSVC_DATA_DIR env var)",
649
+ help="Directory containing data files to validate (default: current directory)",
625
650
  ),
626
651
  ):
627
652
  """
@@ -634,11 +659,7 @@ def validate(
634
659
  """
635
660
  # Determine data directory
636
661
  if data_dir is None:
637
- data_dir_str = os.environ.get("UNITYSVC_DATA_DIR")
638
- if data_dir_str:
639
- data_dir = Path(data_dir_str)
640
- else:
641
- data_dir = Path.cwd() / "data"
662
+ data_dir = Path.cwd()
642
663
 
643
664
  if not data_dir.exists():
644
665
  console.print(f"[red]✗[/red] Data directory not found: {data_dir}")
@@ -668,9 +689,7 @@ def validate(
668
689
  validation_errors.extend(directory_errors)
669
690
 
670
691
  if validation_errors:
671
- console.print(
672
- f"[red]✗ Validation failed with {len(validation_errors)} error(s):[/red]"
673
- )
692
+ console.print(f"[red]✗ Validation failed with {len(validation_errors)} error(s):[/red]")
674
693
  console.print()
675
694
  for i, error in enumerate(validation_errors, 1):
676
695
  console.print(f"[red]{i}.[/red] {error}")
@@ -1,10 +1,10 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: unitysvc-services
3
- Version: 0.1.1
3
+ Version: 0.1.4
4
4
  Summary: SDK for digital service providers on UnitySVC
5
5
  Author-email: Bo Peng <bo.peng@unitysvc.com>
6
6
  Maintainer-email: Bo Peng <bo.peng@unitysvc.com>
7
- License: MIT
7
+ License-Expression: MIT
8
8
  Project-URL: bugs, https://github.com/unitysvc/unitysvc-services/issues
9
9
  Project-URL: changelog, https://github.com/unitysvc/unitysvc-services/blob/master/changelog.md
10
10
  Project-URL: homepage, https://github.com/unitysvc/unitysvc-services
@@ -53,11 +53,11 @@ Client library and CLI tools for digital service providers to interact with the
53
53
 
54
54
  UnitySVC Provider SDK enables digital service providers to manage their service offerings through a **local-first, version-controlled workflow**:
55
55
 
56
- - **Define** service data using schema-validated files (JSON/TOML)
57
- - **Manage** everything locally in git-controlled directories
58
- - **Validate** data against schemas before publishing
59
- - **Publish** to UnitySVC platform when ready
60
- - **Automate** with populate scripts for dynamic catalogs
56
+ - **Define** service data using schema-validated files (JSON/TOML)
57
+ - **Manage** everything locally in git-controlled directories
58
+ - **Validate** data against schemas before publishing
59
+ - **Publish** to UnitySVC platform when ready
60
+ - **Automate** with populate scripts for dynamic catalogs
61
61
 
62
62
  ## Installation
63
63
 
@@ -79,27 +79,29 @@ unitysvc_services init seller my-marketplace
79
79
  unitysvc_services validate
80
80
  unitysvc_services format
81
81
 
82
- # Publish to platform
83
- export UNITYSVC_BACKEND_URL="https://api.unitysvc.com/api/v1"
82
+ # Publish to platform (publishes all: sellers, providers, offerings, listings)
83
+ export UNITYSVC_BASE_URL="https://api.unitysvc.com/api/v1"
84
84
  export UNITYSVC_API_KEY="your-api-key"
85
+ unitysvc_services publish
85
86
 
87
+ # Or publish specific types only
86
88
  unitysvc_services publish providers
87
- unitysvc_services publish sellers
88
- unitysvc_services publish offerings
89
- unitysvc_services publish listings
90
89
 
91
- # Verify
90
+ # Verify with default fields
92
91
  unitysvc_services query offerings
92
+
93
+ # Query with custom fields
94
+ unitysvc_services query providers --fields id,name,contact_email
93
95
  ```
94
96
 
95
97
  ## Key Features
96
98
 
97
- - 📋 **Pydantic Models** - Type-safe data models for all entities
98
- - ✅ **Data Validation** - Comprehensive schema validation
99
- - 🔄 **Local-First** - Work offline, commit to git, publish when ready
100
- - 🚀 **CLI Tools** - Complete command-line interface
101
- - 🤖 **Automation** - Script-based service generation
102
- - 📝 **Multiple Formats** - Support for JSON and TOML
99
+ - 📋 **Pydantic Models** - Type-safe data models for all entities
100
+ - ✅ **Data Validation** - Comprehensive schema validation
101
+ - 🔄 **Local-First** - Work offline, commit to git, publish when ready
102
+ - 🚀 **CLI Tools** - Complete command-line interface
103
+ - 🤖 **Automation** - Script-based service generation
104
+ - 📝 **Multiple Formats** - Support for JSON and TOML
103
105
 
104
106
  ## Workflows
105
107
 
@@ -135,34 +137,34 @@ See [Data Structure Documentation](https://unitysvc-services.readthedocs.io/en/l
135
137
 
136
138
  ## CLI Commands
137
139
 
138
- | Command | Description |
139
- |---------|-------------|
140
- | `init` | Initialize new data files from schemas |
141
- | `list` | List local data files |
142
- | `query` | Query backend API for published data |
143
- | `publish` | Publish data to backend |
144
- | `update` | Update local file fields |
145
- | `validate` | Validate data consistency |
146
- | `format` | Format data files |
147
- | `populate` | Execute provider populate scripts |
140
+ | Command | Description |
141
+ | ---------- | -------------------------------------- |
142
+ | `init` | Initialize new data files from schemas |
143
+ | `list` | List local data files |
144
+ | `query` | Query backend API for published data |
145
+ | `publish` | Publish data to backend |
146
+ | `update` | Update local file fields |
147
+ | `validate` | Validate data consistency |
148
+ | `format` | Format data files |
149
+ | `populate` | Execute provider populate scripts |
148
150
 
149
151
  Run `unitysvc_services --help` or see [CLI Reference](https://unitysvc-services.readthedocs.io/en/latest/cli-reference/) for complete documentation.
150
152
 
151
153
  ## Documentation
152
154
 
153
- - **[Getting Started](https://unitysvc-services.readthedocs.io/en/latest/getting-started/)** - Installation and first steps
154
- - **[Data Structure](https://unitysvc-services.readthedocs.io/en/latest/data-structure/)** - File organization rules
155
- - **[Workflows](https://unitysvc-services.readthedocs.io/en/latest/workflows/)** - Manual and automated patterns
156
- - **[CLI Reference](https://unitysvc-services.readthedocs.io/en/latest/cli-reference/)** - All commands and options
157
- - **[File Schemas](https://unitysvc-services.readthedocs.io/en/latest/file-schemas/)** - Schema specifications
158
- - **[Python API](https://unitysvc-services.readthedocs.io/en/latest/api-reference/)** - Programmatic usage
155
+ - **[Getting Started](https://unitysvc-services.readthedocs.io/en/latest/getting-started/)** - Installation and first steps
156
+ - **[Data Structure](https://unitysvc-services.readthedocs.io/en/latest/data-structure/)** - File organization rules
157
+ - **[Workflows](https://unitysvc-services.readthedocs.io/en/latest/workflows/)** - Manual and automated patterns
158
+ - **[CLI Reference](https://unitysvc-services.readthedocs.io/en/latest/cli-reference/)** - All commands and options
159
+ - **[File Schemas](https://unitysvc-services.readthedocs.io/en/latest/file-schemas/)** - Schema specifications
160
+ - **[Python API](https://unitysvc-services.readthedocs.io/en/latest/api-reference/)** - Programmatic usage
159
161
 
160
162
  ## Links
161
163
 
162
- - **PyPI**: https://pypi.org/project/unitysvc-services/
163
- - **Documentation**: https://unitysvc-services.readthedocs.io
164
- - **Source Code**: https://github.com/unitysvc/unitysvc-services
165
- - **Issue Tracker**: https://github.com/unitysvc/unitysvc-services/issues
164
+ - **PyPI**: https://pypi.org/project/unitysvc-services/
165
+ - **Documentation**: https://unitysvc-services.readthedocs.io
166
+ - **Source Code**: https://github.com/unitysvc/unitysvc-services
167
+ - **Issue Tracker**: https://github.com/unitysvc/unitysvc-services/issues
166
168
 
167
169
  ## License
168
170
 
@@ -0,0 +1,25 @@
1
+ unitysvc_services/__init__.py,sha256=J6F3RlZCJUVjhZoprfbrYCxe3l9ynQQbGO7pf7FyqlM,110
2
+ unitysvc_services/api.py,sha256=FKIid1gUJcEcN_4P9d5-SgmJfW73WHBg5wXsVHeqNHQ,9888
3
+ unitysvc_services/cli.py,sha256=OK0IZyAckxP15jRWU_W49hl3t7XcNRtd8BoDMyRKqNM,682
4
+ unitysvc_services/format_data.py,sha256=Jl9Vj3fRX852fHSUa5DzO-oiFQwuQHC3WMCDNIlo1Lc,5460
5
+ unitysvc_services/list.py,sha256=QDp9BByaoeFeJxXJN9RQ-jU99mH9Guq9ampfXCbpZmI,7033
6
+ unitysvc_services/populate.py,sha256=zkcjIy8BWuQSO7JwiRNHKgGoxQvc3ujluUQdYixdBvY,6626
7
+ unitysvc_services/publisher.py,sha256=dkufYcuBJ0dDoTQJm7BMOug_Pr2RyXMVI8nvZjy-zdM,50420
8
+ unitysvc_services/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
9
+ unitysvc_services/query.py,sha256=2Rn6gioAN3W6KIumVzCpSJXzhJLJUDLkDla_GVwNx9I,24793
10
+ unitysvc_services/scaffold.py,sha256=Y73IX8vskImxSvxDgR0mvEFuAMYnBKfttn3bjcz3jmQ,40331
11
+ unitysvc_services/update.py,sha256=K9swocTUnqqiSgARo6GmuzTzUySSpyqqPPW4xF7ZU-g,9659
12
+ unitysvc_services/utils.py,sha256=GN0gkVTU8fOx2G0EbqnWmx8w9eFsoPfRprPjwCyPYkE,11371
13
+ unitysvc_services/validator.py,sha256=VAII5mu_Jdyr96v4nwXzihsoAj7DJiXN6LjhL8lGGUo,29054
14
+ unitysvc_services/models/__init__.py,sha256=hJCc2KSZmIHlKWKE6GpLGdeVB6LIpyVUKiOKnwmKvCs,200
15
+ unitysvc_services/models/base.py,sha256=3FdlR-_tBOFC2JbVNFNQA4-D1Lhlo5UZQh1QDgKnS_I,18293
16
+ unitysvc_services/models/listing_v1.py,sha256=PPb9hIdWQp80AWKLxFXYBDcWXzNcDrO4v6rqt5_i2qo,3083
17
+ unitysvc_services/models/provider_v1.py,sha256=76EK1i0hVtdx_awb00-ZMtSj4Oc9Zp4xZ-DeXmG3iTY,2701
18
+ unitysvc_services/models/seller_v1.py,sha256=oll2ZZBPBDX8wslHrbsCKf_jIqHNte2VEj5RJ9bawR4,3520
19
+ unitysvc_services/models/service_v1.py,sha256=Xpk-K-95M1LRqYM8nNJcll8t-lsW9Xdi2_bVbYNs8-M,3019
20
+ unitysvc_services-0.1.4.dist-info/licenses/LICENSE,sha256=_p8V6A8OMPu2HIztn3O01v0-urZFwk0Dd3Yk_PTIlL8,1065
21
+ unitysvc_services-0.1.4.dist-info/METADATA,sha256=7LiJhVwEw0fL72bOWwX7kU5KjQ-PDmt23CD4FgEI8YU,6628
22
+ unitysvc_services-0.1.4.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
23
+ unitysvc_services-0.1.4.dist-info/entry_points.txt,sha256=-vodnbPmo7QQmFu8jdG6sCyGRVM727w9Nhwp4Vwau_k,64
24
+ unitysvc_services-0.1.4.dist-info/top_level.txt,sha256=GIotQj-Ro2ruR7eupM1r58PWqIHTAq647ORL7E2kneo,18
25
+ unitysvc_services-0.1.4.dist-info/RECORD,,
@@ -1,23 +0,0 @@
1
- unitysvc_services/__init__.py,sha256=J6F3RlZCJUVjhZoprfbrYCxe3l9ynQQbGO7pf7FyqlM,110
2
- unitysvc_services/cli.py,sha256=OK0IZyAckxP15jRWU_W49hl3t7XcNRtd8BoDMyRKqNM,682
3
- unitysvc_services/format_data.py,sha256=kwY8BlEY8rC5bd9M15Xo9KgecCik56v7JCPusQDupbE,5636
4
- unitysvc_services/list.py,sha256=jbiDfz__pmWTGIFMDXqcIMkc1zVNHufa9Fy3TtURQ6c,7823
5
- unitysvc_services/populate.py,sha256=yBzYxccurSMAssKp9aMDhVE9jt3bFuA-KKiHtG8vaCM,6792
6
- unitysvc_services/publisher.py,sha256=o1ozRuJ9u7ZgJy4tPMID5TI8_1zlNmxE0A_prznfZ30,34230
7
- unitysvc_services/query.py,sha256=JIRtPsKkG_ZEkD7wfbyRSQ_l5Yr4E6wpMV3M5RqKbak,18159
8
- unitysvc_services/scaffold.py,sha256=Y73IX8vskImxSvxDgR0mvEFuAMYnBKfttn3bjcz3jmQ,40331
9
- unitysvc_services/update.py,sha256=8oktOTreN_3tQ8P2La_rwoBbK71ZHgnPkbY2nQIQTk4,10001
10
- unitysvc_services/utils.py,sha256=0VwhfWjmJi0J6i5l8VUJz1p3cEOqUciJ0PSlK1Xk1Z8,11383
11
- unitysvc_services/validator.py,sha256=02W8KfDuKqVkGAwG3Rm9HQ3Uzp3i4bjwzdaMLKojQEY,27543
12
- unitysvc_services/models/__init__.py,sha256=hJCc2KSZmIHlKWKE6GpLGdeVB6LIpyVUKiOKnwmKvCs,200
13
- unitysvc_services/models/base.py,sha256=ZIfZYp8-tcauzawixy21vZzkzgC4LEtZDhulS4cT66k,13649
14
- unitysvc_services/models/listing_v1.py,sha256=xLnwmqbGHawzxPrKN_idAHXmnaFRzpD4ZZ5qLNHHGeM,2347
15
- unitysvc_services/models/provider_v1.py,sha256=mOC0zs_X7DNamfGNuSSCDR-iwIbAGNpq8kkDcqFgq38,2078
16
- unitysvc_services/models/seller_v1.py,sha256=mcVQBD_HwX2nOe51m_WscmlN0j748GWq0oo-xQXo5o0,3291
17
- unitysvc_services/models/service_v1.py,sha256=u16zqM3khrJoTw_v0d45tMcKXjko5k_v3w8xwUtZ6nM,2720
18
- unitysvc_services-0.1.1.dist-info/licenses/LICENSE,sha256=_p8V6A8OMPu2HIztn3O01v0-urZFwk0Dd3Yk_PTIlL8,1065
19
- unitysvc_services-0.1.1.dist-info/METADATA,sha256=Gi2GHMfG7Gesnu4o8WKsoxBisqbhbHYU-qJ5Ty3x4QA,6291
20
- unitysvc_services-0.1.1.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
21
- unitysvc_services-0.1.1.dist-info/entry_points.txt,sha256=-vodnbPmo7QQmFu8jdG6sCyGRVM727w9Nhwp4Vwau_k,64
22
- unitysvc_services-0.1.1.dist-info/top_level.txt,sha256=GIotQj-Ro2ruR7eupM1r58PWqIHTAq647ORL7E2kneo,18
23
- unitysvc_services-0.1.1.dist-info/RECORD,,