unitysvc-services 0.1.0__py3-none-any.whl → 0.1.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -31,7 +31,9 @@ class ServiceDataQuery:
31
31
  "Backend URL not provided. Use --backend-url or set UNITYSVC_BACKEND_URL env var."
32
32
  )
33
33
  if not api_key:
34
- raise ValueError("API key not provided. Use --api-key or set UNITYSVC_API_KEY env var.")
34
+ raise ValueError(
35
+ "API key not provided. Use --api-key or set UNITYSVC_API_KEY env var."
36
+ )
35
37
 
36
38
  self.base_url = base_url.rstrip("/")
37
39
  self.api_key = api_key
@@ -45,28 +47,28 @@ class ServiceDataQuery:
45
47
 
46
48
  def list_service_offerings(self) -> list[dict[str, Any]]:
47
49
  """List all service offerings from the backend."""
48
- response = self.client.get(f"{self.base_url}/publish/service_offering")
50
+ response = self.client.get(f"{self.base_url}/publish/service_offerings")
49
51
  response.raise_for_status()
50
52
  result = response.json()
51
53
  return result.get("data", result) if isinstance(result, dict) else result
52
54
 
53
55
  def list_service_listings(self) -> list[dict[str, Any]]:
54
56
  """List all service listings from the backend."""
55
- response = self.client.get(f"{self.base_url}/services/")
57
+ response = self.client.get(f"{self.base_url}/publish/services")
56
58
  response.raise_for_status()
57
59
  result = response.json()
58
60
  return result.get("data", result) if isinstance(result, dict) else result
59
61
 
60
62
  def list_providers(self) -> list[dict[str, Any]]:
61
63
  """List all providers from the backend."""
62
- response = self.client.get(f"{self.base_url}/providers/")
64
+ response = self.client.get(f"{self.base_url}/publish/providers")
63
65
  response.raise_for_status()
64
66
  result = response.json()
65
67
  return result.get("data", result) if isinstance(result, dict) else result
66
68
 
67
69
  def list_sellers(self) -> list[dict[str, Any]]:
68
70
  """List all sellers from the backend."""
69
- response = self.client.get(f"{self.base_url}/sellers/")
71
+ response = self.client.get(f"{self.base_url}/publish/sellers")
70
72
  response.raise_for_status()
71
73
  result = response.json()
72
74
  return result.get("data", result) if isinstance(result, dict) else result
@@ -281,12 +283,16 @@ def query_offerings(
281
283
  )
282
284
 
283
285
  console.print(table)
284
- console.print(f"\n[green]Total:[/green] {len(offerings)} service offering(s)")
286
+ console.print(
287
+ f"\n[green]Total:[/green] {len(offerings)} service offering(s)"
288
+ )
285
289
  except ValueError as e:
286
290
  console.print(f"[red]✗[/red] {e}", style="bold red")
287
291
  raise typer.Exit(code=1)
288
292
  except Exception as e:
289
- console.print(f"[red]✗[/red] Failed to query service offerings: {e}", style="bold red")
293
+ console.print(
294
+ f"[red]✗[/red] Failed to query service offerings: {e}", style="bold red"
295
+ )
290
296
  raise typer.Exit(code=1)
291
297
 
292
298
 
@@ -330,7 +336,9 @@ def query_listings(
330
336
  table.add_column("Interfaces")
331
337
 
332
338
  for listing in listings:
333
- interfaces_count = len(listing.get("user_access_interfaces", []))
339
+ interfaces_count = len(
340
+ listing.get("user_access_interfaces", [])
341
+ )
334
342
  table.add_row(
335
343
  str(listing.get("id", "N/A")),
336
344
  str(listing.get("service_id", "N/A")),
@@ -340,12 +348,16 @@ def query_listings(
340
348
  )
341
349
 
342
350
  console.print(table)
343
- console.print(f"\n[green]Total:[/green] {len(listings)} service listing(s)")
351
+ console.print(
352
+ f"\n[green]Total:[/green] {len(listings)} service listing(s)"
353
+ )
344
354
  except ValueError as e:
345
355
  console.print(f"[red]✗[/red] {e}", style="bold red")
346
356
  raise typer.Exit(code=1)
347
357
  except Exception as e:
348
- console.print(f"[red]✗[/red] Failed to query service listings: {e}", style="bold red")
358
+ console.print(
359
+ f"[red]✗[/red] Failed to query service listings: {e}", style="bold red"
360
+ )
349
361
  raise typer.Exit(code=1)
350
362
 
351
363
 
@@ -401,12 +413,16 @@ def query_interfaces(
401
413
  )
402
414
 
403
415
  console.print(table)
404
- console.print(f"\n[green]Total:[/green] {data.get('count', 0)} access interface(s)")
416
+ console.print(
417
+ f"\n[green]Total:[/green] {data.get('count', 0)} access interface(s)"
418
+ )
405
419
  except ValueError as e:
406
420
  console.print(f"[red]✗[/red] {e}", style="bold red")
407
421
  raise typer.Exit(code=1)
408
422
  except Exception as e:
409
- console.print(f"[red]✗[/red] Failed to query access interfaces: {e}", style="bold red")
423
+ console.print(
424
+ f"[red]✗[/red] Failed to query access interfaces: {e}", style="bold red"
425
+ )
410
426
  raise typer.Exit(code=1)
411
427
 
412
428
 
@@ -462,7 +478,9 @@ def query_documents(
462
478
  )
463
479
 
464
480
  console.print(table)
465
- console.print(f"\n[green]Total:[/green] {data.get('count', 0)} document(s)")
481
+ console.print(
482
+ f"\n[green]Total:[/green] {data.get('count', 0)} document(s)"
483
+ )
466
484
  except ValueError as e:
467
485
  console.print(f"[red]✗[/red] {e}", style="bold red")
468
486
  raise typer.Exit(code=1)
@@ -251,8 +251,13 @@ def update_listing(
251
251
  if seller_name:
252
252
  field_filter["seller_name"] = seller_name
253
253
 
254
+ # Convert field_filter dict to tuple for caching
255
+ field_filter_tuple = tuple(sorted(field_filter.items())) if field_filter else None
256
+
254
257
  # Find listing files matching criteria
255
- listing_files = find_files_by_schema(data_dir, "listing_v1", path_filter=service_name, field_filter=field_filter)
258
+ listing_files = find_files_by_schema(
259
+ data_dir, "listing_v1", path_filter=service_name, field_filter=field_filter_tuple
260
+ )
256
261
 
257
262
  if not listing_files:
258
263
  console.print(
@@ -2,6 +2,7 @@
2
2
 
3
3
  import json
4
4
  import tomllib
5
+ from functools import lru_cache
5
6
  from pathlib import Path
6
7
  from typing import Any
7
8
 
@@ -54,23 +55,27 @@ def write_data_file(file_path: Path, data: dict[str, Any], format: str) -> None:
54
55
  raise ValueError(f"Unsupported format: {format}")
55
56
 
56
57
 
57
- def find_data_files(data_dir: Path, extensions: list[str] | None = None) -> list[Path]:
58
+ @lru_cache(maxsize=128)
59
+ def find_data_files(
60
+ data_dir: Path, extensions: tuple[str, ...] | None = None
61
+ ) -> list[Path]:
58
62
  """
59
63
  Find all data files in a directory with specified extensions.
60
64
 
61
65
  Args:
62
66
  data_dir: Directory to search
63
- extensions: List of extensions to search for (default: ["json", "toml"])
67
+ extensions: Tuple of extensions to search for (default: ("json", "toml"))
64
68
 
65
69
  Returns:
66
70
  List of Path objects for matching files
67
71
  """
68
72
  if extensions is None:
69
- extensions = ["json", "toml"]
73
+ extensions = ("json", "toml")
70
74
 
71
75
  data_files: list[Path] = []
72
76
  for ext in extensions:
73
77
  data_files.extend(data_dir.rglob(f"*.{ext}"))
78
+
74
79
  return data_files
75
80
 
76
81
 
@@ -103,11 +108,12 @@ def find_file_by_schema_and_name(
103
108
  return None
104
109
 
105
110
 
111
+ @lru_cache(maxsize=256)
106
112
  def find_files_by_schema(
107
113
  data_dir: Path,
108
114
  schema: str,
109
115
  path_filter: str | None = None,
110
- field_filter: dict[str, Any] | None = None,
116
+ field_filter: tuple[tuple[str, Any], ...] | None = None,
111
117
  ) -> list[tuple[Path, str, dict[str, Any]]]:
112
118
  """
113
119
  Find all data files matching a schema with optional filters.
@@ -116,7 +122,7 @@ def find_files_by_schema(
116
122
  data_dir: Directory to search
117
123
  schema: Schema identifier (e.g., "service_v1", "listing_v1")
118
124
  path_filter: Optional string that must be in the file path
119
- field_filter: Optional dict of field:value pairs to filter by
125
+ field_filter: Optional tuple of (key, value) pairs to filter by
120
126
 
121
127
  Returns:
122
128
  List of tuples (file_path, format, data) for matching files
@@ -124,6 +130,9 @@ def find_files_by_schema(
124
130
  data_files = find_data_files(data_dir)
125
131
  matching_files: list[tuple[Path, str, dict[str, Any]]] = []
126
132
 
133
+ # Convert field_filter tuple back to dict for filtering
134
+ field_filter_dict = dict(field_filter) if field_filter else None
135
+
127
136
  for data_file in data_files:
128
137
  try:
129
138
  # Apply path filter
@@ -137,8 +146,8 @@ def find_files_by_schema(
137
146
  continue
138
147
 
139
148
  # Apply field filters
140
- if field_filter:
141
- if not all(data.get(k) == v for k, v in field_filter.items()):
149
+ if field_filter_dict:
150
+ if not all(data.get(k) == v for k, v in field_filter_dict.items()):
142
151
  continue
143
152
 
144
153
  matching_files.append((data_file, file_format, data))
@@ -199,7 +208,9 @@ def resolve_provider_name(file_path: Path) -> str | None:
199
208
  return None
200
209
 
201
210
 
202
- def resolve_service_name_for_listing(listing_file: Path, listing_data: dict[str, Any]) -> str | None:
211
+ def resolve_service_name_for_listing(
212
+ listing_file: Path, listing_data: dict[str, Any]
213
+ ) -> str | None:
203
214
  """
204
215
  Resolve the service name for a listing file.
205
216
 
@@ -238,3 +249,94 @@ def resolve_service_name_for_listing(listing_file: Path, listing_data: dict[str,
238
249
 
239
250
  # Otherwise, return None (either no service files or multiple service files)
240
251
  return None
252
+
253
+
254
+ def convert_convenience_fields_to_documents(
255
+ data: dict[str, Any],
256
+ base_path: Path,
257
+ *,
258
+ logo_field: str = "logo",
259
+ terms_field: str | None = "terms_of_service",
260
+ ) -> dict[str, Any]:
261
+ """
262
+ Convert convenience fields (logo, terms_of_service) to Document objects.
263
+
264
+ This utility function converts file paths or URLs in convenience fields
265
+ to proper Document structures that can be stored in the backend.
266
+
267
+ Args:
268
+ data: Data dictionary containing potential convenience fields
269
+ base_path: Base path for resolving relative file paths
270
+ logo_field: Name of the logo field (default: "logo")
271
+ terms_field: Name of the terms of service field (default: "terms_of_service", None to skip)
272
+
273
+ Returns:
274
+ Updated data dictionary with convenience fields converted to documents list
275
+
276
+ Example:
277
+ >>> data = {"logo": "assets/logo.png", "documents": []}
278
+ >>> result = convert_convenience_fields_to_documents(data, Path("/data/provider"))
279
+ >>> # Result will have logo removed and added to documents list
280
+ """
281
+ # Initialize documents list if not present
282
+ if "documents" not in data or data["documents"] is None:
283
+ data["documents"] = []
284
+
285
+ # Helper to determine MIME type from file path/URL
286
+ def get_mime_type(path_or_url: str) -> str:
287
+ path_lower = path_or_url.lower()
288
+ if path_lower.endswith((".png", ".jpg", ".jpeg")):
289
+ return "png" if ".png" in path_lower else "jpeg"
290
+ elif path_lower.endswith(".svg"):
291
+ return "svg"
292
+ elif path_lower.endswith(".pdf"):
293
+ return "pdf"
294
+ elif path_lower.endswith(".md"):
295
+ return "markdown"
296
+ else:
297
+ # Default to URL if it looks like a URL, otherwise markdown
298
+ return "url" if path_or_url.startswith("http") else "markdown"
299
+
300
+ # Convert logo field
301
+ if logo_field in data and data[logo_field]:
302
+ logo_value = data[logo_field]
303
+ logo_doc: dict[str, Any] = {
304
+ "title": "Company Logo",
305
+ "category": "logo",
306
+ "mime_type": get_mime_type(str(logo_value)),
307
+ "is_public": True,
308
+ }
309
+
310
+ # Check if it's a URL or file path
311
+ if str(logo_value).startswith("http"):
312
+ logo_doc["external_url"] = str(logo_value)
313
+ else:
314
+ # It's a file path - will be resolved by resolve_file_references
315
+ logo_doc["file_path"] = str(logo_value)
316
+
317
+ data["documents"].append(logo_doc)
318
+ # Remove the convenience field
319
+ del data[logo_field]
320
+
321
+ # Convert terms_of_service field if specified
322
+ if terms_field and terms_field in data and data[terms_field]:
323
+ terms_value = data[terms_field]
324
+ terms_doc: dict[str, Any] = {
325
+ "title": "Terms of Service",
326
+ "category": "terms_of_service",
327
+ "mime_type": get_mime_type(str(terms_value)),
328
+ "is_public": True,
329
+ }
330
+
331
+ # Check if it's a URL or file path
332
+ if str(terms_value).startswith("http"):
333
+ terms_doc["external_url"] = str(terms_value)
334
+ else:
335
+ # It's a file path - will be resolved by resolve_file_references
336
+ terms_doc["file_path"] = str(terms_value)
337
+
338
+ data["documents"].append(terms_doc)
339
+ # Remove the convenience field
340
+ del data[terms_field]
341
+
342
+ return data
@@ -13,6 +13,8 @@ from jinja2 import Environment, TemplateSyntaxError
13
13
  from jsonschema.validators import Draft7Validator
14
14
  from rich.console import Console
15
15
 
16
+ import unitysvc_services
17
+
16
18
 
17
19
  class DataValidationError(Exception):
18
20
  """Exception raised when data validation fails."""
@@ -58,12 +60,16 @@ class DataValidator:
58
60
  if "anyOf" in obj:
59
61
  any_of = obj["anyOf"]
60
62
  # Count non-null items for the check
61
- non_null_items = [item for item in any_of if item.get("type") != "null"]
63
+ non_null_items = [
64
+ item for item in any_of if item.get("type") != "null"
65
+ ]
62
66
  has_plain_string = any(
63
- item.get("type") == "string" and "format" not in item for item in non_null_items
67
+ item.get("type") == "string" and "format" not in item
68
+ for item in non_null_items
64
69
  )
65
70
  has_uri_string = any(
66
- item.get("type") == "string" and item.get("format") == "uri" for item in non_null_items
71
+ item.get("type") == "string" and item.get("format") == "uri"
72
+ for item in non_null_items
67
73
  )
68
74
 
69
75
  # Check for Union[str, HttpUrl] or Union[str, HttpUrl, None]
@@ -78,7 +84,9 @@ class DataValidator:
78
84
 
79
85
  # Check other schema structures
80
86
  for key, value in obj.items():
81
- if key not in ["properties", "anyOf"] and isinstance(value, dict | list):
87
+ if key not in ["properties", "anyOf"] and isinstance(
88
+ value, dict | list
89
+ ):
82
90
  traverse_schema(value, path)
83
91
 
84
92
  elif isinstance(obj, list):
@@ -88,7 +96,9 @@ class DataValidator:
88
96
  traverse_schema(schema)
89
97
  return union_fields
90
98
 
91
- def validate_file_references(self, data: dict[str, Any], file_path: Path, union_fields: set[str]) -> list[str]:
99
+ def validate_file_references(
100
+ self, data: dict[str, Any], file_path: Path, union_fields: set[str]
101
+ ) -> list[str]:
92
102
  """
93
103
  Validate that file references in Union[str, HttpUrl] fields exist.
94
104
 
@@ -110,7 +120,9 @@ class DataValidator:
110
120
  ):
111
121
  # Empty string is not a valid file reference
112
122
  if value == "":
113
- errors.append(f"Empty string in field '{new_path}' is not a valid file reference or URL")
123
+ errors.append(
124
+ f"Empty string in field '{new_path}' is not a valid file reference or URL"
125
+ )
114
126
  # It's a file reference, must be relative path
115
127
  elif Path(value).is_absolute():
116
128
  errors.append(
@@ -139,6 +151,14 @@ class DataValidator:
139
151
  f"File path '{value}' in field '{new_path}' "
140
152
  f"must be a relative path, not an absolute path"
141
153
  )
154
+ # Check that the file exists
155
+ else:
156
+ referenced_file = file_path.parent / value
157
+ if not referenced_file.exists():
158
+ errors.append(
159
+ f"File reference '{value}' in field '{new_path}' "
160
+ f"does not exist at {referenced_file}"
161
+ )
142
162
 
143
163
  # Recurse into nested objects
144
164
  if isinstance(value, dict | list):
@@ -152,7 +172,9 @@ class DataValidator:
152
172
  check_field(data, str(file_path))
153
173
  return errors
154
174
 
155
- def validate_name_consistency(self, data: dict[str, Any], file_path: Path, schema_name: str) -> list[str]:
175
+ def validate_name_consistency(
176
+ self, data: dict[str, Any], file_path: Path, schema_name: str
177
+ ) -> list[str]:
156
178
  """Validate that the name field matches the directory name."""
157
179
  errors: list[str] = []
158
180
 
@@ -177,7 +199,9 @@ class DataValidator:
177
199
  elif file_path.name in ["service.json", "service.toml"]:
178
200
  # For service.json, the service directory should match the service name
179
201
  service_directory_name = file_path.parent.name
180
- if self._normalize_name(name_value) != self._normalize_name(service_directory_name):
202
+ if self._normalize_name(name_value) != self._normalize_name(
203
+ service_directory_name
204
+ ):
181
205
  normalized_name = self._normalize_name(name_value)
182
206
  errors.append(
183
207
  f"Service name '{name_value}' does not match "
@@ -196,7 +220,9 @@ class DataValidator:
196
220
  normalized = normalized.strip("-")
197
221
  return normalized
198
222
 
199
- def load_data_file(self, file_path: Path) -> tuple[dict[str, Any] | None, list[str]]:
223
+ def load_data_file(
224
+ self, file_path: Path
225
+ ) -> tuple[dict[str, Any] | None, list[str]]:
200
226
  """Load data from JSON or TOML file."""
201
227
  errors: list[str] = []
202
228
 
@@ -211,7 +237,9 @@ class DataValidator:
211
237
  return None, [f"Unsupported file format: {file_path.suffix}"]
212
238
  return data, errors
213
239
  except Exception as e:
214
- format_name = {".json": "JSON", ".toml": "TOML"}.get(file_path.suffix, "data")
240
+ format_name = {".json": "JSON", ".toml": "TOML"}.get(
241
+ file_path.suffix, "data"
242
+ )
215
243
  return None, [f"Failed to parse {format_name}: {e}"]
216
244
 
217
245
  def validate_data_file(self, file_path: Path) -> tuple[bool, list[str]]:
@@ -240,13 +268,17 @@ class DataValidator:
240
268
 
241
269
  # Validate against schema with format checking enabled
242
270
  try:
243
- validator = Draft7Validator(schema, format_checker=Draft7Validator.FORMAT_CHECKER)
271
+ validator = Draft7Validator(
272
+ schema, format_checker=Draft7Validator.FORMAT_CHECKER
273
+ )
244
274
  validator.check_schema(schema) # Validate the schema itself
245
275
  validation_errors = list(validator.iter_errors(data))
246
276
  for error in validation_errors:
247
277
  errors.append(f"Schema validation error: {error.message}")
248
278
  if error.absolute_path:
249
- errors.append(f" Path: {'.'.join(str(p) for p in error.absolute_path)}")
279
+ errors.append(
280
+ f" Path: {'.'.join(str(p) for p in error.absolute_path)}"
281
+ )
250
282
  except Exception as e:
251
283
  errors.append(f"Validation error: {e}")
252
284
 
@@ -314,12 +346,110 @@ class DataValidator:
314
346
  "No seller file found. Each repository must have exactly one data file using the 'seller_v1' schema."
315
347
  )
316
348
  elif len(seller_files) > 1:
317
- errors.append(f"Found {len(seller_files)} seller files, but only one is allowed per repository:")
349
+ errors.append(
350
+ f"Found {len(seller_files)} seller files, but only one is allowed per repository:"
351
+ )
318
352
  for seller_file in seller_files:
319
353
  errors.append(f" - {seller_file}")
320
354
 
321
355
  return len(errors) == 0, errors
322
356
 
357
+ def validate_provider_status(self) -> tuple[bool, list[str]]:
358
+ """
359
+ Validate provider status and warn about services under disabled/incomplete providers.
360
+
361
+ Returns tuple of (is_valid, warnings) where warnings indicate services
362
+ that will be affected by provider status.
363
+ """
364
+ from unitysvc_services.models.base import ProviderStatusEnum
365
+ from unitysvc_services.models.provider_v1 import ProviderV1
366
+
367
+ warnings: list[str] = []
368
+
369
+ # Find all provider files
370
+ provider_files = list(self.data_dir.glob("*/provider.*"))
371
+
372
+ for provider_file in provider_files:
373
+ try:
374
+ # Load provider data
375
+ data = {}
376
+ if provider_file.suffix == ".json":
377
+ with open(provider_file, encoding="utf-8") as f:
378
+ data = json.load(f)
379
+ elif provider_file.suffix == ".toml":
380
+ with open(provider_file, "rb") as f:
381
+ data = toml.load(f)
382
+ else:
383
+ continue
384
+
385
+ # Parse as ProviderV1
386
+ provider = ProviderV1.model_validate(data)
387
+ provider_dir = provider_file.parent
388
+ provider_name = provider.name
389
+
390
+ # Check if provider is not active
391
+ if provider.status != ProviderStatusEnum.active:
392
+ # Find all services under this provider
393
+ services_dir = provider_dir / "services"
394
+ if services_dir.exists():
395
+ service_count = len(list(services_dir.iterdir()))
396
+ if service_count > 0:
397
+ warnings.append(
398
+ f"Provider '{provider_name}' has status '{provider.status}' but has {service_count} "
399
+ f"service(s). All services under this provider will be affected."
400
+ )
401
+
402
+ except Exception as e:
403
+ warnings.append(
404
+ f"Error checking provider status in {provider_file}: {e}"
405
+ )
406
+
407
+ # Return True (valid) but with warnings
408
+ return True, warnings
409
+
410
+ def validate_seller_status(self) -> tuple[bool, list[str]]:
411
+ """
412
+ Validate seller status and warn if seller is disabled/incomplete.
413
+
414
+ Returns tuple of (is_valid, warnings) where warnings indicate seller issues.
415
+ """
416
+ from unitysvc_services.models.base import SellerStatusEnum
417
+ from unitysvc_services.models.seller_v1 import SellerV1
418
+
419
+ warnings: list[str] = []
420
+
421
+ # Find all seller files
422
+ seller_files = list(self.data_dir.glob("seller.*"))
423
+
424
+ for seller_file in seller_files:
425
+ try:
426
+ # Load seller data
427
+ data = {}
428
+ if seller_file.suffix == ".json":
429
+ with open(seller_file, encoding="utf-8") as f:
430
+ data = json.load(f)
431
+ elif seller_file.suffix == ".toml":
432
+ with open(seller_file, "rb") as f:
433
+ data = toml.load(f)
434
+ else:
435
+ continue
436
+
437
+ # Parse as SellerV1
438
+ seller = SellerV1.model_validate(data)
439
+ seller_name = seller.name
440
+
441
+ # Check if seller is not active
442
+ if seller.status != SellerStatusEnum.active:
443
+ warnings.append(
444
+ f"Seller '{seller_name}' has status '{seller.status}'. Seller will not be published to backend."
445
+ )
446
+
447
+ except Exception as e:
448
+ warnings.append(f"Error checking seller status in {seller_file}: {e}")
449
+
450
+ # Return True (valid) but with warnings
451
+ return True, warnings
452
+
323
453
  def validate_all(self) -> dict[str, tuple[bool, list[str]]]:
324
454
  """Validate all files in the data directory."""
325
455
  results: dict[str, tuple[bool, list[str]]] = {}
@@ -332,6 +462,19 @@ class DataValidator:
332
462
  if not seller_valid:
333
463
  results["_seller_uniqueness"] = (False, seller_errors)
334
464
 
465
+ # Validate seller status
466
+ seller_status_valid, seller_warnings = self.validate_seller_status()
467
+ if seller_warnings:
468
+ results["_seller_status"] = (True, seller_warnings) # Warnings, not errors
469
+
470
+ # Validate provider status and check for affected services
471
+ provider_status_valid, provider_warnings = self.validate_provider_status()
472
+ if provider_warnings:
473
+ results["_provider_status"] = (
474
+ True,
475
+ provider_warnings,
476
+ ) # Warnings, not errors
477
+
335
478
  # Find all data and MD files recursively
336
479
  for file_path in self.data_dir.rglob("*"):
337
480
  if file_path.is_file() and file_path.suffix in [".json", ".toml", ".md"]:
@@ -382,7 +525,9 @@ class DataValidator:
382
525
  if schema == "service_v1":
383
526
  service_name = data.get("name")
384
527
  if not service_name:
385
- raise DataValidationError(f"Service file {file_path} missing 'name' field")
528
+ raise DataValidationError(
529
+ f"Service file {file_path} missing 'name' field"
530
+ )
386
531
 
387
532
  # Check for duplicate service names in same directory
388
533
  if service_name in services:
@@ -410,7 +555,9 @@ class DataValidator:
410
555
  if service_name:
411
556
  # If service_name is explicitly defined, it must match a service in the directory
412
557
  if service_name not in services:
413
- available_services = ", ".join(services.keys()) if services else "none"
558
+ available_services = (
559
+ ", ".join(services.keys()) if services else "none"
560
+ )
414
561
  raise DataValidationError(
415
562
  f"Listing file {listing_file} references service_name '{service_name}' "
416
563
  f"which does not exist in the same directory.\n"
@@ -500,12 +647,30 @@ def validate(
500
647
  console.print(f"[cyan]Validating data files in:[/cyan] {data_dir}")
501
648
  console.print()
502
649
 
650
+ # Get schema directory from installed package
651
+ schema_dir = Path(unitysvc_services.__file__).parent / "schema"
652
+
503
653
  # Create validator and run validation
504
- validator = DataValidator(data_dir, data_dir.parent / "schema")
505
- validation_errors = validator.validate_all_service_directories(data_dir)
654
+ validator = DataValidator(data_dir, schema_dir)
655
+
656
+ # Run comprehensive validation (schema, file references, etc.)
657
+ all_results = validator.validate_all()
658
+ validation_errors = []
659
+
660
+ # Collect all errors from validate_all()
661
+ for file_path, (is_valid, errors) in all_results.items():
662
+ if not is_valid and errors:
663
+ for error in errors:
664
+ validation_errors.append(f"{file_path}: {error}")
665
+
666
+ # Also run service directory validation (service/listing relationships)
667
+ directory_errors = validator.validate_all_service_directories(data_dir)
668
+ validation_errors.extend(directory_errors)
506
669
 
507
670
  if validation_errors:
508
- console.print(f"[red]✗ Validation failed with {len(validation_errors)} error(s):[/red]")
671
+ console.print(
672
+ f"[red]✗ Validation failed with {len(validation_errors)} error(s):[/red]"
673
+ )
509
674
  console.print()
510
675
  for i, error in enumerate(validation_errors, 1):
511
676
  console.print(f"[red]{i}.[/red] {error}")
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: unitysvc-services
3
- Version: 0.1.0
3
+ Version: 0.1.1
4
4
  Summary: SDK for digital service providers on UnitySVC
5
5
  Author-email: Bo Peng <bo.peng@unitysvc.com>
6
6
  Maintainer-email: Bo Peng <bo.peng@unitysvc.com>
@@ -13,6 +13,7 @@ Description-Content-Type: text/markdown
13
13
  License-File: LICENSE
14
14
  Requires-Dist: typer
15
15
  Requires-Dist: pydantic
16
+ Requires-Dist: email-validator
16
17
  Requires-Dist: jsonschema
17
18
  Requires-Dist: jinja2
18
19
  Requires-Dist: rich