unitysvc-services 0.1.0__py3-none-any.whl → 0.2.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -27,11 +27,9 @@ class ServiceDataQuery:
27
27
  ValueError: If base_url or api_key is not provided
28
28
  """
29
29
  if not base_url:
30
- raise ValueError(
31
- "Backend URL not provided. Use --backend-url or set UNITYSVC_BACKEND_URL env var."
32
- )
30
+ raise ValueError("UNITYSVC_BASE_URL environment variable not set.")
33
31
  if not api_key:
34
- raise ValueError("API key not provided. Use --api-key or set UNITYSVC_API_KEY env var.")
32
+ raise ValueError("UNITYSVC_API_KEY environment variable not set.")
35
33
 
36
34
  self.base_url = base_url.rstrip("/")
37
35
  self.api_key = api_key
@@ -45,28 +43,28 @@ class ServiceDataQuery:
45
43
 
46
44
  def list_service_offerings(self) -> list[dict[str, Any]]:
47
45
  """List all service offerings from the backend."""
48
- response = self.client.get(f"{self.base_url}/publish/service_offering")
46
+ response = self.client.get(f"{self.base_url}/publish/service_offerings")
49
47
  response.raise_for_status()
50
48
  result = response.json()
51
49
  return result.get("data", result) if isinstance(result, dict) else result
52
50
 
53
51
  def list_service_listings(self) -> list[dict[str, Any]]:
54
52
  """List all service listings from the backend."""
55
- response = self.client.get(f"{self.base_url}/services/")
53
+ response = self.client.get(f"{self.base_url}/publish/services")
56
54
  response.raise_for_status()
57
55
  result = response.json()
58
56
  return result.get("data", result) if isinstance(result, dict) else result
59
57
 
60
58
  def list_providers(self) -> list[dict[str, Any]]:
61
59
  """List all providers from the backend."""
62
- response = self.client.get(f"{self.base_url}/providers/")
60
+ response = self.client.get(f"{self.base_url}/publish/providers")
63
61
  response.raise_for_status()
64
62
  result = response.json()
65
63
  return result.get("data", result) if isinstance(result, dict) else result
66
64
 
67
65
  def list_sellers(self) -> list[dict[str, Any]]:
68
66
  """List all sellers from the backend."""
69
- response = self.client.get(f"{self.base_url}/sellers/")
67
+ response = self.client.get(f"{self.base_url}/publish/sellers")
70
68
  response.raise_for_status()
71
69
  result = response.json()
72
70
  return result.get("data", result) if isinstance(result, dict) else result
@@ -96,40 +94,22 @@ class ServiceDataQuery:
96
94
  self.close()
97
95
 
98
96
  @staticmethod
99
- def from_env(
100
- backend_url: str | None = None, api_key: str | None = None
101
- ) -> "ServiceDataQuery":
102
- """Create ServiceDataQuery from environment variables or arguments.
103
-
104
- Args:
105
- backend_url: Optional backend URL (falls back to UNITYSVC_BACKEND_URL env var)
106
- api_key: Optional API key (falls back to UNITYSVC_API_KEY env var)
97
+ def from_env() -> "ServiceDataQuery":
98
+ """Create ServiceDataQuery from environment variables.
107
99
 
108
100
  Returns:
109
101
  ServiceDataQuery instance
110
102
 
111
103
  Raises:
112
- ValueError: If required credentials are not provided
104
+ ValueError: If required environment variables are not set
113
105
  """
114
- resolved_backend_url = backend_url or os.getenv("UNITYSVC_BACKEND_URL") or ""
115
- resolved_api_key = api_key or os.getenv("UNITYSVC_API_KEY") or ""
116
- return ServiceDataQuery(base_url=resolved_backend_url, api_key=resolved_api_key)
106
+ backend_url = os.getenv("UNITYSVC_BASE_URL") or ""
107
+ api_key = os.getenv("UNITYSVC_API_KEY") or ""
108
+ return ServiceDataQuery(base_url=backend_url, api_key=api_key)
117
109
 
118
110
 
119
111
  @app.command("sellers")
120
112
  def query_sellers(
121
- backend_url: str | None = typer.Option(
122
- None,
123
- "--backend-url",
124
- "-u",
125
- help="UnitySVC backend URL (default: from UNITYSVC_BACKEND_URL env var)",
126
- ),
127
- api_key: str | None = typer.Option(
128
- None,
129
- "--api-key",
130
- "-k",
131
- help="API key for authentication (default: from UNITYSVC_API_KEY env var)",
132
- ),
133
113
  format: str = typer.Option(
134
114
  "table",
135
115
  "--format",
@@ -139,7 +119,7 @@ def query_sellers(
139
119
  ):
140
120
  """Query all sellers from the backend."""
141
121
  try:
142
- with ServiceDataQuery.from_env(backend_url, api_key) as query:
122
+ with ServiceDataQuery.from_env() as query:
143
123
  sellers = query.list_sellers()
144
124
 
145
125
  if format == "json":
@@ -177,18 +157,6 @@ def query_sellers(
177
157
 
178
158
  @app.command("providers")
179
159
  def query_providers(
180
- backend_url: str | None = typer.Option(
181
- None,
182
- "--backend-url",
183
- "-u",
184
- help="UnitySVC backend URL (default: from UNITYSVC_BACKEND_URL env var)",
185
- ),
186
- api_key: str | None = typer.Option(
187
- None,
188
- "--api-key",
189
- "-k",
190
- help="API key for authentication (default: from UNITYSVC_API_KEY env var)",
191
- ),
192
160
  format: str = typer.Option(
193
161
  "table",
194
162
  "--format",
@@ -198,7 +166,7 @@ def query_providers(
198
166
  ):
199
167
  """Query all providers from the backend."""
200
168
  try:
201
- with ServiceDataQuery.from_env(backend_url, api_key) as query:
169
+ with ServiceDataQuery.from_env() as query:
202
170
  providers = query.list_providers()
203
171
 
204
172
  if format == "json":
@@ -232,18 +200,6 @@ def query_providers(
232
200
 
233
201
  @app.command("offerings")
234
202
  def query_offerings(
235
- backend_url: str | None = typer.Option(
236
- None,
237
- "--backend-url",
238
- "-u",
239
- help="UnitySVC backend URL (default: from UNITYSVC_BACKEND_URL env var)",
240
- ),
241
- api_key: str | None = typer.Option(
242
- None,
243
- "--api-key",
244
- "-k",
245
- help="API key for authentication (default: from UNITYSVC_API_KEY env var)",
246
- ),
247
203
  format: str = typer.Option(
248
204
  "table",
249
205
  "--format",
@@ -253,7 +209,7 @@ def query_offerings(
253
209
  ):
254
210
  """Query all service offerings from UnitySVC backend."""
255
211
  try:
256
- with ServiceDataQuery.from_env(backend_url, api_key) as query:
212
+ with ServiceDataQuery.from_env() as query:
257
213
  offerings = query.list_service_offerings()
258
214
 
259
215
  if format == "json":
@@ -292,18 +248,6 @@ def query_offerings(
292
248
 
293
249
  @app.command("listings")
294
250
  def query_listings(
295
- backend_url: str | None = typer.Option(
296
- None,
297
- "--backend-url",
298
- "-u",
299
- help="UnitySVC backend URL (default: from UNITYSVC_BACKEND_URL env var)",
300
- ),
301
- api_key: str | None = typer.Option(
302
- None,
303
- "--api-key",
304
- "-k",
305
- help="API key for authentication (default: from UNITYSVC_API_KEY env var)",
306
- ),
307
251
  format: str = typer.Option(
308
252
  "table",
309
253
  "--format",
@@ -313,7 +257,7 @@ def query_listings(
313
257
  ):
314
258
  """Query all service listings from UnitySVC backend."""
315
259
  try:
316
- with ServiceDataQuery.from_env(backend_url, api_key) as query:
260
+ with ServiceDataQuery.from_env() as query:
317
261
  listings = query.list_service_listings()
318
262
 
319
263
  if format == "json":
@@ -351,18 +295,6 @@ def query_listings(
351
295
 
352
296
  @app.command("interfaces")
353
297
  def query_interfaces(
354
- backend_url: str | None = typer.Option(
355
- None,
356
- "--backend-url",
357
- "-u",
358
- help="UnitySVC backend URL (default: from UNITYSVC_BACKEND_URL env var)",
359
- ),
360
- api_key: str | None = typer.Option(
361
- None,
362
- "--api-key",
363
- "-k",
364
- help="API key for authentication (default: from UNITYSVC_API_KEY env var)",
365
- ),
366
298
  format: str = typer.Option(
367
299
  "table",
368
300
  "--format",
@@ -372,7 +304,7 @@ def query_interfaces(
372
304
  ):
373
305
  """Query all access interfaces from UnitySVC backend (private endpoint)."""
374
306
  try:
375
- with ServiceDataQuery.from_env(backend_url, api_key) as query:
307
+ with ServiceDataQuery.from_env() as query:
376
308
  data = query.list_access_interfaces()
377
309
 
378
310
  if format == "json":
@@ -412,18 +344,6 @@ def query_interfaces(
412
344
 
413
345
  @app.command("documents")
414
346
  def query_documents(
415
- backend_url: str | None = typer.Option(
416
- None,
417
- "--backend-url",
418
- "-u",
419
- help="UnitySVC backend URL (default: from UNITYSVC_BACKEND_URL env var)",
420
- ),
421
- api_key: str | None = typer.Option(
422
- None,
423
- "--api-key",
424
- "-k",
425
- help="API key for authentication (default: from UNITYSVC_API_KEY env var)",
426
- ),
427
347
  format: str = typer.Option(
428
348
  "table",
429
349
  "--format",
@@ -433,7 +353,7 @@ def query_documents(
433
353
  ):
434
354
  """Query all documents from UnitySVC backend (private endpoint)."""
435
355
  try:
436
- with ServiceDataQuery.from_env(backend_url, api_key) as query:
356
+ with ServiceDataQuery.from_env() as query:
437
357
  data = query.list_documents()
438
358
 
439
359
  if format == "json":
@@ -1,6 +1,5 @@
1
1
  """Update command group - update local data files."""
2
2
 
3
- import os
4
3
  from pathlib import Path
5
4
  from typing import Any
6
5
 
@@ -46,7 +45,7 @@ def update_offering(
46
45
  None,
47
46
  "--data-dir",
48
47
  "-d",
49
- help="Directory containing data files (default: ./data or UNITYSVC_DATA_DIR env var)",
48
+ help="Directory containing data files (default: current directory)",
50
49
  ),
51
50
  ):
52
51
  """
@@ -83,11 +82,7 @@ def update_offering(
83
82
 
84
83
  # Set data directory
85
84
  if data_dir is None:
86
- data_dir_str = os.getenv("UNITYSVC_DATA_DIR")
87
- if data_dir_str:
88
- data_dir = Path(data_dir_str)
89
- else:
90
- data_dir = Path.cwd() / "data"
85
+ data_dir = Path.cwd()
91
86
 
92
87
  if not data_dir.is_absolute():
93
88
  data_dir = Path.cwd() / data_dir
@@ -181,7 +176,7 @@ def update_listing(
181
176
  None,
182
177
  "--data-dir",
183
178
  "-d",
184
- help="Directory containing data files (default: ./data or UNITYSVC_DATA_DIR env var)",
179
+ help="Directory containing data files (default: current directory)",
185
180
  ),
186
181
  ):
187
182
  """
@@ -227,11 +222,7 @@ def update_listing(
227
222
 
228
223
  # Set data directory
229
224
  if data_dir is None:
230
- data_dir_str = os.getenv("UNITYSVC_DATA_DIR")
231
- if data_dir_str:
232
- data_dir = Path(data_dir_str)
233
- else:
234
- data_dir = Path.cwd() / "data"
225
+ data_dir = Path.cwd()
235
226
 
236
227
  if not data_dir.is_absolute():
237
228
  data_dir = Path.cwd() / data_dir
@@ -251,8 +242,13 @@ def update_listing(
251
242
  if seller_name:
252
243
  field_filter["seller_name"] = seller_name
253
244
 
245
+ # Convert field_filter dict to tuple for caching
246
+ field_filter_tuple = tuple(sorted(field_filter.items())) if field_filter else None
247
+
254
248
  # Find listing files matching criteria
255
- listing_files = find_files_by_schema(data_dir, "listing_v1", path_filter=service_name, field_filter=field_filter)
249
+ listing_files = find_files_by_schema(
250
+ data_dir, "listing_v1", path_filter=service_name, field_filter=field_filter_tuple
251
+ )
256
252
 
257
253
  if not listing_files:
258
254
  console.print(
@@ -2,6 +2,7 @@
2
2
 
3
3
  import json
4
4
  import tomllib
5
+ from functools import lru_cache
5
6
  from pathlib import Path
6
7
  from typing import Any
7
8
 
@@ -54,23 +55,25 @@ def write_data_file(file_path: Path, data: dict[str, Any], format: str) -> None:
54
55
  raise ValueError(f"Unsupported format: {format}")
55
56
 
56
57
 
57
- def find_data_files(data_dir: Path, extensions: list[str] | None = None) -> list[Path]:
58
+ @lru_cache(maxsize=128)
59
+ def find_data_files(data_dir: Path, extensions: tuple[str, ...] | None = None) -> list[Path]:
58
60
  """
59
61
  Find all data files in a directory with specified extensions.
60
62
 
61
63
  Args:
62
64
  data_dir: Directory to search
63
- extensions: List of extensions to search for (default: ["json", "toml"])
65
+ extensions: Tuple of extensions to search for (default: ("json", "toml"))
64
66
 
65
67
  Returns:
66
68
  List of Path objects for matching files
67
69
  """
68
70
  if extensions is None:
69
- extensions = ["json", "toml"]
71
+ extensions = ("json", "toml")
70
72
 
71
73
  data_files: list[Path] = []
72
74
  for ext in extensions:
73
75
  data_files.extend(data_dir.rglob(f"*.{ext}"))
76
+
74
77
  return data_files
75
78
 
76
79
 
@@ -103,11 +106,12 @@ def find_file_by_schema_and_name(
103
106
  return None
104
107
 
105
108
 
109
+ @lru_cache(maxsize=256)
106
110
  def find_files_by_schema(
107
111
  data_dir: Path,
108
112
  schema: str,
109
113
  path_filter: str | None = None,
110
- field_filter: dict[str, Any] | None = None,
114
+ field_filter: tuple[tuple[str, Any], ...] | None = None,
111
115
  ) -> list[tuple[Path, str, dict[str, Any]]]:
112
116
  """
113
117
  Find all data files matching a schema with optional filters.
@@ -116,7 +120,7 @@ def find_files_by_schema(
116
120
  data_dir: Directory to search
117
121
  schema: Schema identifier (e.g., "service_v1", "listing_v1")
118
122
  path_filter: Optional string that must be in the file path
119
- field_filter: Optional dict of field:value pairs to filter by
123
+ field_filter: Optional tuple of (key, value) pairs to filter by
120
124
 
121
125
  Returns:
122
126
  List of tuples (file_path, format, data) for matching files
@@ -124,6 +128,9 @@ def find_files_by_schema(
124
128
  data_files = find_data_files(data_dir)
125
129
  matching_files: list[tuple[Path, str, dict[str, Any]]] = []
126
130
 
131
+ # Convert field_filter tuple back to dict for filtering
132
+ field_filter_dict = dict(field_filter) if field_filter else None
133
+
127
134
  for data_file in data_files:
128
135
  try:
129
136
  # Apply path filter
@@ -137,8 +144,8 @@ def find_files_by_schema(
137
144
  continue
138
145
 
139
146
  # Apply field filters
140
- if field_filter:
141
- if not all(data.get(k) == v for k, v in field_filter.items()):
147
+ if field_filter_dict:
148
+ if not all(data.get(k) == v for k, v in field_filter_dict.items()):
142
149
  continue
143
150
 
144
151
  matching_files.append((data_file, file_format, data))
@@ -238,3 +245,94 @@ def resolve_service_name_for_listing(listing_file: Path, listing_data: dict[str,
238
245
 
239
246
  # Otherwise, return None (either no service files or multiple service files)
240
247
  return None
248
+
249
+
250
+ def convert_convenience_fields_to_documents(
251
+ data: dict[str, Any],
252
+ base_path: Path,
253
+ *,
254
+ logo_field: str = "logo",
255
+ terms_field: str | None = "terms_of_service",
256
+ ) -> dict[str, Any]:
257
+ """
258
+ Convert convenience fields (logo, terms_of_service) to Document objects.
259
+
260
+ This utility function converts file paths or URLs in convenience fields
261
+ to proper Document structures that can be stored in the backend.
262
+
263
+ Args:
264
+ data: Data dictionary containing potential convenience fields
265
+ base_path: Base path for resolving relative file paths
266
+ logo_field: Name of the logo field (default: "logo")
267
+ terms_field: Name of the terms of service field (default: "terms_of_service", None to skip)
268
+
269
+ Returns:
270
+ Updated data dictionary with convenience fields converted to documents list
271
+
272
+ Example:
273
+ >>> data = {"logo": "assets/logo.png", "documents": []}
274
+ >>> result = convert_convenience_fields_to_documents(data, Path("/data/provider"))
275
+ >>> # Result will have logo removed and added to documents list
276
+ """
277
+ # Initialize documents list if not present
278
+ if "documents" not in data or data["documents"] is None:
279
+ data["documents"] = []
280
+
281
+ # Helper to determine MIME type from file path/URL
282
+ def get_mime_type(path_or_url: str) -> str:
283
+ path_lower = path_or_url.lower()
284
+ if path_lower.endswith((".png", ".jpg", ".jpeg")):
285
+ return "png" if ".png" in path_lower else "jpeg"
286
+ elif path_lower.endswith(".svg"):
287
+ return "svg"
288
+ elif path_lower.endswith(".pdf"):
289
+ return "pdf"
290
+ elif path_lower.endswith(".md"):
291
+ return "markdown"
292
+ else:
293
+ # Default to URL if it looks like a URL, otherwise markdown
294
+ return "url" if path_or_url.startswith("http") else "markdown"
295
+
296
+ # Convert logo field
297
+ if logo_field in data and data[logo_field]:
298
+ logo_value = data[logo_field]
299
+ logo_doc: dict[str, Any] = {
300
+ "title": "Company Logo",
301
+ "category": "logo",
302
+ "mime_type": get_mime_type(str(logo_value)),
303
+ "is_public": True,
304
+ }
305
+
306
+ # Check if it's a URL or file path
307
+ if str(logo_value).startswith("http"):
308
+ logo_doc["external_url"] = str(logo_value)
309
+ else:
310
+ # It's a file path - will be resolved by resolve_file_references
311
+ logo_doc["file_path"] = str(logo_value)
312
+
313
+ data["documents"].append(logo_doc)
314
+ # Remove the convenience field
315
+ del data[logo_field]
316
+
317
+ # Convert terms_of_service field if specified
318
+ if terms_field and terms_field in data and data[terms_field]:
319
+ terms_value = data[terms_field]
320
+ terms_doc: dict[str, Any] = {
321
+ "title": "Terms of Service",
322
+ "category": "terms_of_service",
323
+ "mime_type": get_mime_type(str(terms_value)),
324
+ "is_public": True,
325
+ }
326
+
327
+ # Check if it's a URL or file path
328
+ if str(terms_value).startswith("http"):
329
+ terms_doc["external_url"] = str(terms_value)
330
+ else:
331
+ # It's a file path - will be resolved by resolve_file_references
332
+ terms_doc["file_path"] = str(terms_value)
333
+
334
+ data["documents"].append(terms_doc)
335
+ # Remove the convenience field
336
+ del data[terms_field]
337
+
338
+ return data
@@ -1,7 +1,6 @@
1
1
  """Data validation module for unitysvc_services."""
2
2
 
3
3
  import json
4
- import os
5
4
  import re
6
5
  import tomllib as toml
7
6
  from pathlib import Path
@@ -13,6 +12,8 @@ from jinja2 import Environment, TemplateSyntaxError
13
12
  from jsonschema.validators import Draft7Validator
14
13
  from rich.console import Console
15
14
 
15
+ import unitysvc_services
16
+
16
17
 
17
18
  class DataValidationError(Exception):
18
19
  """Exception raised when data validation fails."""
@@ -139,6 +140,14 @@ class DataValidator:
139
140
  f"File path '{value}' in field '{new_path}' "
140
141
  f"must be a relative path, not an absolute path"
141
142
  )
143
+ # Check that the file exists
144
+ else:
145
+ referenced_file = file_path.parent / value
146
+ if not referenced_file.exists():
147
+ errors.append(
148
+ f"File reference '{value}' in field '{new_path}' "
149
+ f"does not exist at {referenced_file}"
150
+ )
142
151
 
143
152
  # Recurse into nested objects
144
153
  if isinstance(value, dict | list):
@@ -320,6 +329,100 @@ class DataValidator:
320
329
 
321
330
  return len(errors) == 0, errors
322
331
 
332
+ def validate_provider_status(self) -> tuple[bool, list[str]]:
333
+ """
334
+ Validate provider status and warn about services under disabled/incomplete providers.
335
+
336
+ Returns tuple of (is_valid, warnings) where warnings indicate services
337
+ that will be affected by provider status.
338
+ """
339
+ from unitysvc_services.models.base import ProviderStatusEnum
340
+ from unitysvc_services.models.provider_v1 import ProviderV1
341
+
342
+ warnings: list[str] = []
343
+
344
+ # Find all provider files
345
+ provider_files = list(self.data_dir.glob("*/provider.*"))
346
+
347
+ for provider_file in provider_files:
348
+ try:
349
+ # Load provider data
350
+ data = {}
351
+ if provider_file.suffix == ".json":
352
+ with open(provider_file, encoding="utf-8") as f:
353
+ data = json.load(f)
354
+ elif provider_file.suffix == ".toml":
355
+ with open(provider_file, "rb") as f:
356
+ data = toml.load(f)
357
+ else:
358
+ continue
359
+
360
+ # Parse as ProviderV1
361
+ provider = ProviderV1.model_validate(data)
362
+ provider_dir = provider_file.parent
363
+ provider_name = provider.name
364
+
365
+ # Check if provider is not active
366
+ if provider.status != ProviderStatusEnum.active:
367
+ # Find all services under this provider
368
+ services_dir = provider_dir / "services"
369
+ if services_dir.exists():
370
+ service_count = len(list(services_dir.iterdir()))
371
+ if service_count > 0:
372
+ warnings.append(
373
+ f"Provider '{provider_name}' has status '{provider.status}' but has {service_count} "
374
+ f"service(s). All services under this provider will be affected."
375
+ )
376
+
377
+ except Exception as e:
378
+ warnings.append(f"Error checking provider status in {provider_file}: {e}")
379
+
380
+ # Return True (valid) but with warnings
381
+ return True, warnings
382
+
383
+ def validate_seller_status(self) -> tuple[bool, list[str]]:
384
+ """
385
+ Validate seller status and warn if seller is disabled/incomplete.
386
+
387
+ Returns tuple of (is_valid, warnings) where warnings indicate seller issues.
388
+ """
389
+ from unitysvc_services.models.base import SellerStatusEnum
390
+ from unitysvc_services.models.seller_v1 import SellerV1
391
+
392
+ warnings: list[str] = []
393
+
394
+ # Find all seller files
395
+ seller_files = list(self.data_dir.glob("seller.*"))
396
+
397
+ for seller_file in seller_files:
398
+ try:
399
+ # Load seller data
400
+ data = {}
401
+ if seller_file.suffix == ".json":
402
+ with open(seller_file, encoding="utf-8") as f:
403
+ data = json.load(f)
404
+ elif seller_file.suffix == ".toml":
405
+ with open(seller_file, "rb") as f:
406
+ data = toml.load(f)
407
+ else:
408
+ continue
409
+
410
+ # Parse as SellerV1
411
+ seller = SellerV1.model_validate(data)
412
+ seller_name = seller.name
413
+
414
+ # Check if seller is not active
415
+ if seller.status != SellerStatusEnum.active:
416
+ warnings.append(
417
+ f"Seller '{seller_name}' has status '{seller.status}'. Seller will not be published to backend."
418
+ )
419
+
420
+ except Exception as e:
421
+ warnings.append(f"Error checking seller status in {seller_file}: {e}")
422
+
423
+ # Return True (valid) but with warnings
424
+ return True, warnings
425
+
323
426
  def validate_all(self) -> dict[str, tuple[bool, list[str]]]:
324
427
  """Validate all files in the data directory."""
325
428
  results: dict[str, tuple[bool, list[str]]] = {}
@@ -332,6 +435,19 @@ class DataValidator:
332
435
  if not seller_valid:
333
436
  results["_seller_uniqueness"] = (False, seller_errors)
334
437
 
438
+ # Validate seller status
439
+ seller_status_valid, seller_warnings = self.validate_seller_status()
440
+ if seller_warnings:
441
+ results["_seller_status"] = (True, seller_warnings) # Warnings, not errors
442
+
443
+ # Validate provider status and check for affected services
444
+ provider_status_valid, provider_warnings = self.validate_provider_status()
445
+ if provider_warnings:
446
+ results["_provider_status"] = (
447
+ True,
448
+ provider_warnings,
449
+ ) # Warnings, not errors
450
+
335
451
  # Find all data and MD files recursively
336
452
  for file_path in self.data_dir.rglob("*"):
337
453
  if file_path.is_file() and file_path.suffix in [".json", ".toml", ".md"]:
@@ -474,7 +590,7 @@ console = Console()
474
590
  def validate(
475
591
  data_dir: Path | None = typer.Argument(
476
592
  None,
477
- help="Directory containing data files to validate (default: ./data or UNITYSVC_DATA_DIR env var)",
593
+ help="Directory containing data files to validate (default: current directory)",
478
594
  ),
479
595
  ):
480
596
  """
@@ -487,11 +603,7 @@ def validate(
487
603
  """
488
604
  # Determine data directory
489
605
  if data_dir is None:
490
- data_dir_str = os.environ.get("UNITYSVC_DATA_DIR")
491
- if data_dir_str:
492
- data_dir = Path(data_dir_str)
493
- else:
494
- data_dir = Path.cwd() / "data"
606
+ data_dir = Path.cwd()
495
607
 
496
608
  if not data_dir.exists():
497
609
  console.print(f"[red]✗[/red] Data directory not found: {data_dir}")
@@ -500,9 +612,25 @@ def validate(
500
612
  console.print(f"[cyan]Validating data files in:[/cyan] {data_dir}")
501
613
  console.print()
502
614
 
615
+ # Get schema directory from installed package
616
+ schema_dir = Path(unitysvc_services.__file__).parent / "schema"
617
+
503
618
  # Create validator and run validation
504
- validator = DataValidator(data_dir, data_dir.parent / "schema")
505
- validation_errors = validator.validate_all_service_directories(data_dir)
619
+ validator = DataValidator(data_dir, schema_dir)
620
+
621
+ # Run comprehensive validation (schema, file references, etc.)
622
+ all_results = validator.validate_all()
623
+ validation_errors = []
624
+
625
+ # Collect all errors from validate_all()
626
+ for file_path, (is_valid, errors) in all_results.items():
627
+ if not is_valid and errors:
628
+ for error in errors:
629
+ validation_errors.append(f"{file_path}: {error}")
630
+
631
+ # Also run service directory validation (service/listing relationships)
632
+ directory_errors = validator.validate_all_service_directories(data_dir)
633
+ validation_errors.extend(directory_errors)
506
634
 
507
635
  if validation_errors:
508
636
  console.print(f"[red]✗ Validation failed with {len(validation_errors)} error(s):[/red]")