unitysvc-services 0.1.24__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (37) hide show
  1. unitysvc_services/__init__.py +4 -0
  2. unitysvc_services/api.py +421 -0
  3. unitysvc_services/cli.py +23 -0
  4. unitysvc_services/format_data.py +140 -0
  5. unitysvc_services/interactive_prompt.py +1132 -0
  6. unitysvc_services/list.py +216 -0
  7. unitysvc_services/models/__init__.py +71 -0
  8. unitysvc_services/models/base.py +1375 -0
  9. unitysvc_services/models/listing_data.py +118 -0
  10. unitysvc_services/models/listing_v1.py +56 -0
  11. unitysvc_services/models/provider_data.py +79 -0
  12. unitysvc_services/models/provider_v1.py +54 -0
  13. unitysvc_services/models/seller_data.py +120 -0
  14. unitysvc_services/models/seller_v1.py +42 -0
  15. unitysvc_services/models/service_data.py +114 -0
  16. unitysvc_services/models/service_v1.py +81 -0
  17. unitysvc_services/populate.py +207 -0
  18. unitysvc_services/publisher.py +1628 -0
  19. unitysvc_services/py.typed +0 -0
  20. unitysvc_services/query.py +688 -0
  21. unitysvc_services/scaffold.py +1103 -0
  22. unitysvc_services/schema/base.json +777 -0
  23. unitysvc_services/schema/listing_v1.json +1286 -0
  24. unitysvc_services/schema/provider_v1.json +952 -0
  25. unitysvc_services/schema/seller_v1.json +379 -0
  26. unitysvc_services/schema/service_v1.json +1306 -0
  27. unitysvc_services/test.py +965 -0
  28. unitysvc_services/unpublisher.py +505 -0
  29. unitysvc_services/update.py +287 -0
  30. unitysvc_services/utils.py +533 -0
  31. unitysvc_services/validator.py +731 -0
  32. unitysvc_services-0.1.24.dist-info/METADATA +184 -0
  33. unitysvc_services-0.1.24.dist-info/RECORD +37 -0
  34. unitysvc_services-0.1.24.dist-info/WHEEL +5 -0
  35. unitysvc_services-0.1.24.dist-info/entry_points.txt +3 -0
  36. unitysvc_services-0.1.24.dist-info/licenses/LICENSE +21 -0
  37. unitysvc_services-0.1.24.dist-info/top_level.txt +1 -0
@@ -0,0 +1,533 @@
1
+ """Utility functions for file handling and data operations."""
2
+
3
+ import json
4
+ import tomllib
5
+ from functools import lru_cache
6
+ from pathlib import Path
7
+ from typing import Any
8
+
9
+ import tomli_w
10
+ from jinja2 import Template
11
+
12
+
13
+ def deep_merge_dicts(base: dict[str, Any], override: dict[str, Any]) -> dict[str, Any]:
14
+ """
15
+ Deep merge two dictionaries, with override values taking precedence.
16
+
17
+ For nested dictionaries, performs recursive merge. For all other types
18
+ (lists, primitives), the override value completely replaces the base value.
19
+
20
+ Args:
21
+ base: Base dictionary
22
+ override: Override dictionary (values take precedence)
23
+
24
+ Returns:
25
+ Merged dictionary
26
+ """
27
+ result = base.copy()
28
+
29
+ for key, value in override.items():
30
+ if key in result and isinstance(result[key], dict) and isinstance(value, dict):
31
+ # Recursively merge nested dictionaries
32
+ result[key] = deep_merge_dicts(result[key], value)
33
+ else:
34
+ # For all other types (lists, primitives, etc.), override completely
35
+ result[key] = value
36
+
37
+ return result
38
+
39
+
40
+ def load_data_file(file_path: Path) -> tuple[dict[str, Any], str]:
41
+ """
42
+ Load a data file (JSON or TOML) and return (data, format).
43
+
44
+ Automatically checks for and merges override files with the pattern:
45
+ <base_name>.override.<extension>
46
+
47
+ For example:
48
+ - service.json -> service.override.json
49
+ - provider.toml -> provider.override.toml
50
+
51
+ If an override file exists, it will be deep-merged with the base file,
52
+ with override values taking precedence.
53
+
54
+ Args:
55
+ file_path: Path to the data file
56
+
57
+ Returns:
58
+ Tuple of (data dict, format string "json" or "toml")
59
+
60
+ Raises:
61
+ ValueError: If file format is not supported
62
+ """
63
+ # Load the base file
64
+ if file_path.suffix == ".json":
65
+ with open(file_path, encoding="utf-8") as f:
66
+ data = json.load(f)
67
+ file_format = "json"
68
+ elif file_path.suffix == ".toml":
69
+ with open(file_path, "rb") as f:
70
+ data = tomllib.load(f)
71
+ file_format = "toml"
72
+ else:
73
+ raise ValueError(f"Unsupported file format: {file_path.suffix}")
74
+
75
+ # Check for override file
76
+ # Pattern: <stem>.override.<suffix>
77
+ # Example: service.json -> service.override.json
78
+ override_path = file_path.with_stem(f"{file_path.stem}.override")
79
+
80
+ if override_path.exists():
81
+ # Load the override file (same format as base file)
82
+ if override_path.suffix == ".json":
83
+ with open(override_path, encoding="utf-8") as f:
84
+ override_data = json.load(f)
85
+ elif override_path.suffix == ".toml":
86
+ with open(override_path, "rb") as f:
87
+ override_data = tomllib.load(f)
88
+ else:
89
+ # This shouldn't happen since we're using the same suffix as base
90
+ # But handle it gracefully
91
+ override_data = {}
92
+
93
+ # Deep merge the override data into the base data
94
+ data = deep_merge_dicts(data, override_data)
95
+
96
+ return data, file_format
97
+
98
+
99
+ def write_data_file(file_path: Path, data: dict[str, Any], format: str) -> None:
100
+ """
101
+ Write data back to file in the specified format.
102
+
103
+ Args:
104
+ file_path: Path to the data file
105
+ data: Data dictionary to write
106
+ format: Format string ("json" or "toml")
107
+
108
+ Raises:
109
+ ValueError: If format is not supported
110
+ """
111
+ if format == "json":
112
+ with open(file_path, "w", encoding="utf-8") as f:
113
+ json.dump(data, f, indent=2, sort_keys=True)
114
+ f.write("\n")
115
+ elif format == "toml":
116
+ with open(file_path, "wb") as f:
117
+ tomli_w.dump(data, f)
118
+ else:
119
+ raise ValueError(f"Unsupported format: {format}")
120
+
121
+
122
+ @lru_cache(maxsize=128)
123
+ def find_data_files(data_dir: Path, extensions: tuple[str, ...] | None = None) -> list[Path]:
124
+ """
125
+ Find all data files in a directory with specified extensions.
126
+
127
+ Args:
128
+ data_dir: Directory to search
129
+ extensions: Tuple of extensions to search for (default: ("json", "toml"))
130
+
131
+ Returns:
132
+ List of Path objects for matching files
133
+ """
134
+ if extensions is None:
135
+ extensions = ("json", "toml")
136
+
137
+ data_files: list[Path] = []
138
+ for ext in extensions:
139
+ data_files.extend(data_dir.rglob(f"*.{ext}"))
140
+
141
+ return data_files
142
+
143
+
144
+ def find_file_by_schema_and_name(
145
+ data_dir: Path, schema: str, name_field: str, name_value: str
146
+ ) -> tuple[Path, str, dict[str, Any]] | None:
147
+ """
148
+ Find a data file by schema type and name field value.
149
+
150
+ Args:
151
+ data_dir: Directory to search
152
+ schema: Schema identifier (e.g., "service_v1", "listing_v1")
153
+ name_field: Field name to match (e.g., "name", "seller_name")
154
+ name_value: Value to match in the name field
155
+
156
+ Returns:
157
+ Tuple of (file_path, format, data) if found, None otherwise
158
+ """
159
+ data_files = find_data_files(data_dir)
160
+
161
+ for data_file in data_files:
162
+ try:
163
+ data, file_format = load_data_file(data_file)
164
+ if data.get("schema") == schema and data.get(name_field) == name_value:
165
+ return data_file, file_format, data
166
+ except Exception:
167
+ # Skip files that can't be loaded
168
+ continue
169
+
170
+ return None
171
+
172
+
173
+ @lru_cache(maxsize=256)
174
+ def find_files_by_schema(
175
+ data_dir: Path,
176
+ schema: str,
177
+ path_filter: str | None = None,
178
+ field_filter: tuple[tuple[str, Any], ...] | None = None,
179
+ ) -> list[tuple[Path, str, dict[str, Any]]]:
180
+ """
181
+ Find all data files matching a schema with optional filters.
182
+
183
+ Args:
184
+ data_dir: Directory to search
185
+ schema: Schema identifier (e.g., "service_v1", "listing_v1")
186
+ path_filter: Optional string that must be in the file path
187
+ field_filter: Optional tuple of (key, value) pairs to filter by
188
+
189
+ Returns:
190
+ List of tuples (file_path, format, data) for matching files
191
+ """
192
+ data_files = find_data_files(data_dir)
193
+ matching_files: list[tuple[Path, str, dict[str, Any]]] = []
194
+
195
+ # Convert field_filter tuple back to dict for filtering
196
+ field_filter_dict = dict(field_filter) if field_filter else None
197
+
198
+ for data_file in data_files:
199
+ try:
200
+ # Apply path filter
201
+ if path_filter and path_filter not in str(data_file):
202
+ continue
203
+
204
+ data, file_format = load_data_file(data_file)
205
+
206
+ # Check schema
207
+ if data.get("schema") != schema:
208
+ continue
209
+
210
+ # Apply field filters
211
+ if field_filter_dict:
212
+ if not all(data.get(k) == v for k, v in field_filter_dict.items()):
213
+ continue
214
+
215
+ matching_files.append((data_file, file_format, data))
216
+
217
+ except Exception:
218
+ # Skip files that can't be loaded
219
+ continue
220
+
221
+ return matching_files
222
+
223
+
224
+ def resolve_provider_name(file_path: Path) -> str | None:
225
+ """
226
+ Resolve the provider name from the file path.
227
+
228
+ The provider name is determined by the directory structure:
229
+ - For service offerings: <provider_name>/services/<service_name>/service.{json,toml}
230
+ - For service listings: <provider_name>/services/<service_name>/listing-*.{json,toml}
231
+
232
+ Args:
233
+ file_path: Path to the service offering or listing file
234
+
235
+ Returns:
236
+ Provider name if found in directory structure, None otherwise
237
+ """
238
+ # Check if file is under a "services" directory
239
+ parts = file_path.parts
240
+
241
+ try:
242
+ # Find the "services" directory in the path
243
+ services_idx = parts.index("services")
244
+
245
+ # Provider name is the directory before "services"
246
+ if services_idx > 0:
247
+ provider_dir = parts[services_idx - 1]
248
+
249
+ # The provider directory should contain a provider data file
250
+ # Get the full path to the provider directory
251
+ provider_path = Path(*parts[:services_idx])
252
+
253
+ # Look for provider data file to validate and get the actual provider name
254
+ for data_file in find_data_files(provider_path):
255
+ try:
256
+ # Only check files in the provider directory itself, not subdirectories
257
+ if data_file.parent == provider_path:
258
+ data, _file_format = load_data_file(data_file)
259
+ if data.get("schema") == "provider_v1":
260
+ return data.get("name")
261
+ except Exception:
262
+ continue
263
+
264
+ # Fallback to directory name if no provider file found
265
+ return provider_dir
266
+ except (ValueError, IndexError):
267
+ # "services" not in path or invalid structure
268
+ pass
269
+
270
+ return None
271
+
272
+
273
+ def resolve_service_name_for_listing(listing_file: Path, listing_data: dict[str, Any]) -> str | None:
274
+ """
275
+ Resolve the service name for a listing file.
276
+
277
+ Rules:
278
+ 1. If service_name is defined in listing_data, return it
279
+ 2. Otherwise, find the only service offering in the same directory and return its name
280
+
281
+ Args:
282
+ listing_file: Path to the listing file
283
+ listing_data: Listing data dictionary
284
+
285
+ Returns:
286
+ Service name if found, None otherwise
287
+ """
288
+ # Rule 1: If service_name is already defined, use it
289
+ if "service_name" in listing_data and listing_data["service_name"]:
290
+ return listing_data["service_name"]
291
+
292
+ # Rule 2: Find the only service offering in the same directory
293
+ listing_dir = listing_file.parent
294
+
295
+ # Find all service offering files in the same directory
296
+ service_files: list[tuple[Path, str, dict[str, Any]]] = []
297
+ for data_file in find_data_files(listing_dir):
298
+ try:
299
+ data, file_format = load_data_file(data_file)
300
+ if data.get("schema") == "service_v1":
301
+ service_files.append((data_file, file_format, data))
302
+ except Exception:
303
+ continue
304
+
305
+ # If there's exactly one service file, use its name
306
+ if len(service_files) == 1:
307
+ _service_file, _service_format, service_data = service_files[0]
308
+ return service_data.get("name")
309
+
310
+ # Otherwise, return None (either no service files or multiple service files)
311
+ return None
312
+
313
+
314
+ def convert_convenience_fields_to_documents(
315
+ data: dict[str, Any],
316
+ base_path: Path,
317
+ *,
318
+ logo_field: str = "logo",
319
+ terms_field: str | None = "terms_of_service",
320
+ ) -> dict[str, Any]:
321
+ """
322
+ Convert convenience fields (logo, terms_of_service) to Document objects.
323
+
324
+ This utility function converts file paths or URLs in convenience fields
325
+ to proper Document structures that can be stored in the backend.
326
+
327
+ Args:
328
+ data: Data dictionary containing potential convenience fields
329
+ base_path: Base path for resolving relative file paths
330
+ logo_field: Name of the logo field (default: "logo")
331
+ terms_field: Name of the terms of service field (default: "terms_of_service", None to skip)
332
+
333
+ Returns:
334
+ Updated data dictionary with convenience fields converted to documents list
335
+
336
+ Example:
337
+ >>> data = {"logo": "assets/logo.png", "documents": []}
338
+ >>> result = convert_convenience_fields_to_documents(data, Path("/data/provider"))
339
+ >>> # Result will have logo removed and added to documents list
340
+ """
341
+ # Initialize documents list if not present
342
+ if "documents" not in data or data["documents"] is None:
343
+ data["documents"] = []
344
+
345
+ # Helper to determine MIME type from file path/URL
346
+ def get_mime_type(path_or_url: str) -> str:
347
+ path_lower = path_or_url.lower()
348
+ if path_lower.endswith((".png", ".jpg", ".jpeg")):
349
+ return "png" if ".png" in path_lower else "jpeg"
350
+ elif path_lower.endswith(".svg"):
351
+ return "svg"
352
+ elif path_lower.endswith(".pdf"):
353
+ return "pdf"
354
+ elif path_lower.endswith(".md"):
355
+ return "markdown"
356
+ else:
357
+ # Default to URL if it looks like a URL, otherwise markdown
358
+ return "url" if path_or_url.startswith("http") else "markdown"
359
+
360
+ # Convert logo field
361
+ if logo_field in data and data[logo_field]:
362
+ logo_value = data[logo_field]
363
+ logo_doc: dict[str, Any] = {
364
+ "title": "Company Logo",
365
+ "category": "logo",
366
+ "mime_type": get_mime_type(str(logo_value)),
367
+ "is_public": True,
368
+ }
369
+
370
+ # Check if it's a URL or file path
371
+ if str(logo_value).startswith("http"):
372
+ logo_doc["external_url"] = str(logo_value)
373
+ else:
374
+ # It's a file path - will be resolved by resolve_file_references
375
+ logo_doc["file_path"] = str(logo_value)
376
+
377
+ data["documents"].append(logo_doc)
378
+ # Remove the convenience field
379
+ del data[logo_field]
380
+
381
+ # Convert terms_of_service field if specified
382
+ if terms_field and terms_field in data and data[terms_field]:
383
+ terms_value = data[terms_field]
384
+ terms_doc: dict[str, Any] = {
385
+ "title": "Terms of Service",
386
+ "category": "terms_of_service",
387
+ "mime_type": get_mime_type(str(terms_value)),
388
+ "is_public": True,
389
+ }
390
+
391
+ # Check if it's a URL or file path
392
+ if str(terms_value).startswith("http"):
393
+ terms_doc["external_url"] = str(terms_value)
394
+ else:
395
+ # It's a file path - will be resolved by resolve_file_references
396
+ terms_doc["file_path"] = str(terms_value)
397
+
398
+ data["documents"].append(terms_doc)
399
+ # Remove the convenience field
400
+ del data[terms_field]
401
+
402
+ return data
403
+
404
+
405
+ def render_template_file(
406
+ file_path: Path,
407
+ listing: dict[str, Any] | None = None,
408
+ offering: dict[str, Any] | None = None,
409
+ provider: dict[str, Any] | None = None,
410
+ seller: dict[str, Any] | None = None,
411
+ interface: dict[str, Any] | None = None,
412
+ ) -> tuple[str, str]:
413
+ """Render a Jinja2 template file and return content and new filename.
414
+
415
+ If the file is not a template (.j2 extension), returns the file content as-is
416
+ and the original filename.
417
+
418
+ Args:
419
+ file_path: Path to the file (may or may not be a .j2 template)
420
+ listing: Listing data for template rendering (optional)
421
+ offering: Offering data for template rendering (optional)
422
+ provider: Provider data for template rendering (optional)
423
+ seller: Seller data for template rendering (optional)
424
+ interface: AccessInterface data for template rendering (optional, contains base_url, routing_key, etc.)
425
+
426
+ Returns:
427
+ Tuple of (rendered_content, new_filename_without_j2)
428
+
429
+ Raises:
430
+ Exception: If template rendering fails
431
+ """
432
+ # Read file content
433
+ with open(file_path, encoding="utf-8") as f:
434
+ file_content = f.read()
435
+
436
+ # Check if this is a Jinja2 template
437
+ is_template = file_path.name.endswith(".j2")
438
+
439
+ if is_template:
440
+ # Render the template
441
+ template = Template(file_content)
442
+ rendered_content = template.render(
443
+ listing=listing or {},
444
+ offering=offering or {},
445
+ provider=provider or {},
446
+ seller=seller or {},
447
+ interface=interface or {},
448
+ )
449
+
450
+ # Strip .j2 from filename
451
+ # Example: test.py.j2 -> test.py
452
+ new_filename = file_path.name[:-3] # Remove last 3 characters (.j2)
453
+
454
+ return rendered_content, new_filename
455
+ else:
456
+ # Not a template - return as-is
457
+ return file_content, file_path.name
458
+
459
+
460
+ def determine_interpreter(file_content: str, file_suffix: str) -> tuple[str | None, str | None]:
461
+ """
462
+ Determine the interpreter command for executing a script file.
463
+
464
+ Checks for shebang line first, then falls back to file extension-based detection.
465
+
466
+ Args:
467
+ file_content: The content of the script file
468
+ file_suffix: The file extension (e.g., ".py", ".js", ".sh")
469
+
470
+ Returns:
471
+ Tuple of (interpreter_cmd, error_message). If successful, returns (interpreter_cmd, None).
472
+ If failed, returns (None, error_message).
473
+
474
+ Examples:
475
+ >>> determine_interpreter("#!/usr/bin/env python3\\nprint('hello')", ".py")
476
+ ('python3', None)
477
+ >>> determine_interpreter("console.log('hello')", ".js")
478
+ ('node', None)
479
+ >>> determine_interpreter("curl http://example.com", ".sh")
480
+ ('bash', None)
481
+ """
482
+ import shutil
483
+
484
+ # Parse shebang to get interpreter
485
+ lines = file_content.split("\n")
486
+ interpreter_cmd = None
487
+
488
+ # First, try to parse shebang
489
+ if lines and lines[0].startswith("#!"):
490
+ shebang = lines[0][2:].strip()
491
+ if "/env " in shebang:
492
+ # e.g., #!/usr/bin/env python3
493
+ interpreter_cmd = shebang.split("/env ", 1)[1].strip().split()[0]
494
+ else:
495
+ # e.g., #!/usr/bin/python3
496
+ interpreter_cmd = shebang.split("/")[-1].split()[0]
497
+
498
+ # If no shebang found, determine interpreter based on file extension
499
+ if not interpreter_cmd:
500
+ if file_suffix == ".py":
501
+ # Try python3 first, fallback to python
502
+ if shutil.which("python3"):
503
+ interpreter_cmd = "python3"
504
+ elif shutil.which("python"):
505
+ interpreter_cmd = "python"
506
+ else:
507
+ return None, "Neither 'python3' nor 'python' found. Please install Python to run this test."
508
+ elif file_suffix == ".js":
509
+ # JavaScript files need Node.js
510
+ if shutil.which("node"):
511
+ interpreter_cmd = "node"
512
+ else:
513
+ return None, "'node' not found. Please install Node.js to run JavaScript tests."
514
+ elif file_suffix == ".sh":
515
+ # Shell scripts use bash
516
+ if shutil.which("bash"):
517
+ interpreter_cmd = "bash"
518
+ else:
519
+ return None, "'bash' not found. Please install bash to run shell script tests."
520
+ else:
521
+ # Unknown file type - try python3/python as fallback
522
+ if shutil.which("python3"):
523
+ interpreter_cmd = "python3"
524
+ elif shutil.which("python"):
525
+ interpreter_cmd = "python"
526
+ else:
527
+ return None, f"Unknown file type '{file_suffix}' and no Python interpreter found."
528
+ else:
529
+ # Shebang was found - verify the interpreter exists
530
+ if not shutil.which(interpreter_cmd):
531
+ return None, f"Interpreter '{interpreter_cmd}' from shebang not found. Please install it to run this test."
532
+
533
+ return interpreter_cmd, None