unitysvc-services 0.1.24__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (37) hide show
  1. unitysvc_services/__init__.py +4 -0
  2. unitysvc_services/api.py +421 -0
  3. unitysvc_services/cli.py +23 -0
  4. unitysvc_services/format_data.py +140 -0
  5. unitysvc_services/interactive_prompt.py +1132 -0
  6. unitysvc_services/list.py +216 -0
  7. unitysvc_services/models/__init__.py +71 -0
  8. unitysvc_services/models/base.py +1375 -0
  9. unitysvc_services/models/listing_data.py +118 -0
  10. unitysvc_services/models/listing_v1.py +56 -0
  11. unitysvc_services/models/provider_data.py +79 -0
  12. unitysvc_services/models/provider_v1.py +54 -0
  13. unitysvc_services/models/seller_data.py +120 -0
  14. unitysvc_services/models/seller_v1.py +42 -0
  15. unitysvc_services/models/service_data.py +114 -0
  16. unitysvc_services/models/service_v1.py +81 -0
  17. unitysvc_services/populate.py +207 -0
  18. unitysvc_services/publisher.py +1628 -0
  19. unitysvc_services/py.typed +0 -0
  20. unitysvc_services/query.py +688 -0
  21. unitysvc_services/scaffold.py +1103 -0
  22. unitysvc_services/schema/base.json +777 -0
  23. unitysvc_services/schema/listing_v1.json +1286 -0
  24. unitysvc_services/schema/provider_v1.json +952 -0
  25. unitysvc_services/schema/seller_v1.json +379 -0
  26. unitysvc_services/schema/service_v1.json +1306 -0
  27. unitysvc_services/test.py +965 -0
  28. unitysvc_services/unpublisher.py +505 -0
  29. unitysvc_services/update.py +287 -0
  30. unitysvc_services/utils.py +533 -0
  31. unitysvc_services/validator.py +731 -0
  32. unitysvc_services-0.1.24.dist-info/METADATA +184 -0
  33. unitysvc_services-0.1.24.dist-info/RECORD +37 -0
  34. unitysvc_services-0.1.24.dist-info/WHEEL +5 -0
  35. unitysvc_services-0.1.24.dist-info/entry_points.txt +3 -0
  36. unitysvc_services-0.1.24.dist-info/licenses/LICENSE +21 -0
  37. unitysvc_services-0.1.24.dist-info/top_level.txt +1 -0
@@ -0,0 +1,1103 @@
1
+ #!/usr/bin/env python3
2
+ """
3
+ Initialize new provider or service data structure.
4
+
5
+ This module provides functions to create new directory structures for providers or services
6
+ by copying from existing examples or data directories and updating the name fields.
7
+ """
8
+
9
+ import json
10
+ import shutil
11
+ import sys
12
+ import tomllib # Built-in since Python 3.11
13
+ from datetime import datetime
14
+ from pathlib import Path
15
+ from typing import Any
16
+
17
+ import typer
18
+ from rich.console import Console
19
+
20
+ from .interactive_prompt import (
21
+ LISTING_GROUPS,
22
+ OFFERING_GROUPS,
23
+ PROVIDER_GROUPS,
24
+ SELLER_GROUPS,
25
+ PromptEngine,
26
+ create_listing_data,
27
+ create_offering_data,
28
+ create_provider_data,
29
+ create_seller_data,
30
+ )
31
+ from .utils import load_data_file
32
+
33
+ try:
34
+ import tomli_w
35
+
36
+ TOML_WRITE_AVAILABLE = True
37
+ except ImportError:
38
+ TOML_WRITE_AVAILABLE = False
39
+
40
+ TOML_AVAILABLE = TOML_WRITE_AVAILABLE # For backward compatibility
41
+
42
+ # YAML support has been removed
43
+ YAML_AVAILABLE = False
44
+
45
+
46
+ # Constants
47
+ DATA_FILE_EXTENSIONS = [".json", ".toml"]
48
+ DEFAULT_FORMAT = "toml"
49
+
50
+
51
+ def find_source_directory(source_name: str, base_dirs: list[Path]) -> Path | None:
52
+ """Find the source directory in the given base directories."""
53
+ # Handle absolute paths (starting with /)
54
+ if source_name.startswith("/"):
55
+ # Remove leading slash and treat as relative path from base directories
56
+ relative_path = source_name.lstrip("/")
57
+ for base_dir in base_dirs:
58
+ if not base_dir.exists():
59
+ continue
60
+ source_path = base_dir / relative_path
61
+ if source_path.exists() and source_path.is_dir():
62
+ return source_path
63
+ return None
64
+
65
+ # Handle relative paths (existing behavior)
66
+ for base_dir in base_dirs:
67
+ if not base_dir.exists():
68
+ continue
69
+
70
+ # Look for exact match first
71
+ source_path = base_dir / source_name
72
+ if source_path.exists() and source_path.is_dir():
73
+ return source_path
74
+
75
+ # Look for nested directories (e.g., provider1/service1)
76
+ for provider_dir in base_dir.iterdir():
77
+ if provider_dir.is_dir() and provider_dir.name != "README.md":
78
+ nested_path = provider_dir / source_name
79
+ if nested_path.exists() and nested_path.is_dir():
80
+ return nested_path
81
+
82
+ return None
83
+
84
+
85
+ def save_data_file(file_path: Path, data: dict[str, Any]) -> None:
86
+ """Save data to JSON or TOML file."""
87
+ suffix = file_path.suffix.lower()
88
+
89
+ if suffix == ".json":
90
+ with open(file_path, "w", encoding="utf-8") as f:
91
+ json.dump(data, f, indent=2)
92
+ f.write("\n")
93
+ elif suffix == ".toml":
94
+ if not TOML_WRITE_AVAILABLE:
95
+ raise ImportError("tomli_w is required to write TOML files. Install with: pip install tomli-w")
96
+ with open(file_path, "wb") as f:
97
+ tomli_w.dump(data, f)
98
+ else:
99
+ raise ValueError(f"Unsupported file format: {suffix}")
100
+
101
+
102
+ def detect_source_format(source_dir: Path) -> str:
103
+ """Detect the primary format used in the source directory."""
104
+ # Look for data files and determine the most common format
105
+ format_counts = {"json": 0, "toml": 0}
106
+
107
+ for file_path in source_dir.rglob("*"):
108
+ if file_path.is_file():
109
+ suffix = file_path.suffix.lower()
110
+ if suffix == ".json":
111
+ format_counts["json"] += 1
112
+ elif suffix == ".toml":
113
+ format_counts["toml"] += 1
114
+
115
+ # Return the format with the highest count, default to toml if no data files found
116
+ if max(format_counts.values()) == 0:
117
+ return "toml"
118
+
119
+ return max(format_counts, key=lambda x: format_counts[x])
120
+
121
+
122
+ def normalize_name(name: str) -> str:
123
+ """Normalize a name to match the expected directory format (replace underscores with hyphens)."""
124
+ return name.replace("_", "-")
125
+
126
+
127
+ def discover_schemas(schema_dir: Path) -> dict[str, dict[str, Any]]:
128
+ """Discover available schemas by scanning the schema directory."""
129
+ schemas: dict[str, dict[str, Any]] = {}
130
+ if not schema_dir.exists():
131
+ return schemas
132
+
133
+ for schema_file in schema_dir.glob("*.json"):
134
+ schema_name = schema_file.stem
135
+ try:
136
+ with open(schema_file, encoding="utf-8") as f:
137
+ schema_data = json.load(f)
138
+ schemas[schema_name] = schema_data
139
+ except Exception as e:
140
+ print(f"Warning: Could not load schema {schema_file}: {e}", file=sys.stderr)
141
+
142
+ return schemas
143
+
144
+
145
+ def generate_example_value(property_def: dict, property_name: str, schema_name: str) -> Any:
146
+ """Generate an example value based on JSON schema property definition."""
147
+ # Handle default values first
148
+ if "default" in property_def:
149
+ return property_def["default"]
150
+
151
+ # Handle anyOf (union types)
152
+ if "anyOf" in property_def:
153
+ # Find the first non-null type
154
+ for option in property_def["anyOf"]:
155
+ if option.get("type") != "null":
156
+ return generate_example_value(option, property_name, schema_name)
157
+ return None
158
+
159
+ # Handle $ref (references to definitions)
160
+ if "$ref" in property_def and property_def["$ref"].startswith("#/$defs/"):
161
+ # For now, handle simple enum references
162
+ # This would need the full schema context to resolve properly
163
+ # For CategoryEnum, return "AI" as default
164
+ if "Category" in property_def["$ref"]:
165
+ return "AI"
166
+ return "reference_value"
167
+
168
+ prop_type = property_def.get("type", "string")
169
+
170
+ if prop_type == "string":
171
+ format_type = property_def.get("format")
172
+ if format_type == "email":
173
+ return "contact@example.com"
174
+ elif format_type == "uri":
175
+ return "https://example.com"
176
+ elif format_type == "date-time":
177
+ return datetime.now().isoformat() + "Z"
178
+ elif property_name in [
179
+ "terms_of_service",
180
+ "documentation",
181
+ "api_documentation",
182
+ "code_example",
183
+ ]:
184
+ # These are likely file references or URLs
185
+ file_mappings = {
186
+ "terms_of_service": "terms-of-service.md",
187
+ "code_example": "code-example.md",
188
+ "api_documentation": "api-docs.md",
189
+ }
190
+ return file_mappings.get(property_name, "https://docs.example.com")
191
+ else:
192
+ # Generate meaningful example based on property name
193
+ if property_name == "name":
194
+ return "placeholder_name" # Will be replaced with actual name
195
+ elif property_name == "description":
196
+ return f"Description for {schema_name.replace('_', ' ')}"
197
+ elif "email" in property_name.lower():
198
+ return "contact@example.com"
199
+ elif "homepage" in property_name.lower():
200
+ return "https://example.com"
201
+ else:
202
+ return f"Example {property_name}"
203
+
204
+ elif prop_type == "object":
205
+ # Handle object properties
206
+ additional_props = property_def.get("additionalProperties")
207
+ if additional_props is True:
208
+ # additionalProperties: true - create example object based on property name
209
+ if property_name == "access_method":
210
+ return {
211
+ "type": "REST_API",
212
+ "authentication": "API_KEY",
213
+ "endpoint": "https://api.example.com",
214
+ }
215
+ else:
216
+ return {"example_key": "example_value"}
217
+ elif isinstance(additional_props, dict) and additional_props.get("type") == "string":
218
+ # additionalProperties with string type - create example key-value pairs
219
+ return {
220
+ "feature1": "Feature description 1",
221
+ "feature2": "Feature description 2",
222
+ }
223
+ return {}
224
+
225
+ elif prop_type == "array":
226
+ items_def = property_def.get("items", {})
227
+ if items_def.get("type") == "object":
228
+ # Generate example array with one object
229
+ example_obj = {}
230
+ if "properties" in items_def:
231
+ for item_prop, item_def in items_def["properties"].items():
232
+ example_obj[item_prop] = generate_example_value(item_def, item_prop, schema_name)
233
+ return [example_obj]
234
+ return []
235
+
236
+ elif prop_type == "number" or prop_type == "integer":
237
+ return 1
238
+
239
+ elif prop_type == "boolean":
240
+ return True
241
+
242
+ return None
243
+
244
+
245
+ def generate_data_from_schema(schema_def: dict, schema_name: str, dir_name: str) -> dict[str, Any]:
246
+ """Generate example data based on JSON schema definition."""
247
+ data = {}
248
+
249
+ properties = schema_def.get("properties", {})
250
+ required = schema_def.get("required", [])
251
+
252
+ for prop_name, prop_def in properties.items():
253
+ # Generate value for this property
254
+ value = generate_example_value(prop_def, prop_name, schema_name)
255
+
256
+ # Special handling for certain fields
257
+ if prop_name == "name":
258
+ if "service" in schema_name:
259
+ data[prop_name] = normalize_name(dir_name)
260
+ else:
261
+ data[prop_name] = dir_name
262
+ elif prop_name == "schema":
263
+ data[prop_name] = schema_name
264
+ elif value is not None: # Only add non-None values to avoid TOML serialization issues
265
+ data[prop_name] = value
266
+ # Skip None values unless they're required
267
+ elif prop_name in required:
268
+ # For required fields that would be None, provide a placeholder
269
+ data[prop_name] = f"placeholder_{prop_name}"
270
+
271
+ return data
272
+
273
+
274
+ def get_data_filename_from_schema(schema_name: str, format_type: str) -> str:
275
+ """Get the appropriate filename based on schema name and format."""
276
+ if "provider" in schema_name:
277
+ return f"provider.{format_type}"
278
+ elif "service" in schema_name:
279
+ return f"service.{format_type}"
280
+ else:
281
+ # For unknown schemas, use the schema name as filename
282
+ base_name = schema_name.replace("_v", "").replace("_", "-")
283
+ return f"{base_name}.{format_type}"
284
+
285
+
286
+ def create_additional_files_from_schema(dest_dir: Path, schema_def: dict, schema_name: str, dir_name: str) -> list[str]:
287
+ """Create additional files based on schema requirements (like terms-of-service.md)."""
288
+ created_files = []
289
+
290
+ properties = schema_def.get("properties", {})
291
+
292
+ for prop_name, prop_def in properties.items():
293
+ # Look for properties that reference markdown files
294
+ # Check both direct type and anyOf types for string properties
295
+ is_string_type = False
296
+ if prop_def.get("type") == "string":
297
+ is_string_type = True
298
+ elif "anyOf" in prop_def:
299
+ # Check if any of the anyOf options is a string type
300
+ for option in prop_def["anyOf"]:
301
+ if option.get("type") == "string":
302
+ is_string_type = True
303
+ break
304
+
305
+ if is_string_type and prop_name in [
306
+ "terms_of_service",
307
+ "code_example",
308
+ "api_documentation",
309
+ ]:
310
+ filename = generate_example_value(prop_def, prop_name, schema_name)
311
+
312
+ # Only create file if it's a .md reference (not a URL)
313
+ if filename and ".md" in str(filename) and not filename.startswith("http"):
314
+ file_path = dest_dir / filename
315
+
316
+ if prop_name == "terms_of_service":
317
+ content = f"# Terms of Service for {dir_name}\n\nPlaceholder terms of service document.\n"
318
+ elif prop_name == "code_example":
319
+ content = f"# Code Example for {dir_name}\n\nPlaceholder code example.\n"
320
+ elif prop_name == "api_documentation":
321
+ content = f"# API Documentation for {dir_name}\n\nPlaceholder API documentation.\n"
322
+ else:
323
+ content = f"# {prop_name.replace('_', ' ').title()} for {dir_name}\n\nPlaceholder content.\n"
324
+
325
+ file_path.write_text(content, encoding="utf-8")
326
+ created_files.append(filename)
327
+
328
+ return created_files
329
+
330
+
331
+ def handle_destination_directory(dest_dir: Path, force: bool = False) -> None:
332
+ """Handle destination directory creation, removing existing if force is True, otherwise ignore if exists."""
333
+ if dest_dir.exists():
334
+ if force:
335
+ print(f"Removing existing directory: {dest_dir}")
336
+ shutil.rmtree(dest_dir)
337
+ else:
338
+ print(f"Skipping existing directory: {dest_dir}")
339
+ return
340
+
341
+ dest_dir.mkdir(parents=True, exist_ok=True)
342
+
343
+
344
+ def update_string_references(obj, old_values: set[str], new_values: dict[str, str], context: str = "") -> bool:
345
+ """Recursively update string references in nested data structures.
346
+
347
+ Args:
348
+ obj: The object to update (dict or list)
349
+ old_values: Set of old values to look for
350
+ new_values: Dict mapping old values to new values
351
+ context: Context for logging (optional)
352
+
353
+ Returns:
354
+ True if any updates were made
355
+ """
356
+ updated = False
357
+
358
+ if isinstance(obj, dict):
359
+ for key, value in obj.items():
360
+ if isinstance(value, str) and value in old_values:
361
+ new_value = new_values[value]
362
+ print(f" Converting{context}: '{value}' -> '{new_value}'")
363
+ obj[key] = new_value
364
+ updated = True
365
+ else:
366
+ if update_string_references(value, old_values, new_values, context):
367
+ updated = True
368
+ elif isinstance(obj, list):
369
+ for i, item in enumerate(obj):
370
+ if isinstance(item, str) and item in old_values:
371
+ new_value = new_values[item]
372
+ print(f" Converting{context}: '{item}' -> '{new_value}'")
373
+ obj[i] = new_value
374
+ updated = True
375
+ else:
376
+ if update_string_references(item, old_values, new_values, context):
377
+ updated = True
378
+
379
+ return updated
380
+
381
+
382
+ def create_schema_based_structure(
383
+ dest_dir: Path,
384
+ dir_name: str,
385
+ schema_name: str,
386
+ format_type: str = DEFAULT_FORMAT,
387
+ force: bool = False,
388
+ ) -> None:
389
+ """Create a directory structure with minimal valid data based on the specified schema."""
390
+ # Check if directory already exists before processing
391
+ if dest_dir.exists() and not force:
392
+ print(f"Skipping existing directory: {dest_dir}")
393
+ return
394
+
395
+ handle_destination_directory(dest_dir, force)
396
+
397
+ # Discover available schemas
398
+ project_root = Path(__file__).parent.parent
399
+ schema_dir = project_root / "schema"
400
+ available_schemas = discover_schemas(schema_dir)
401
+
402
+ if schema_name not in available_schemas:
403
+ schema_list = ", ".join(available_schemas.keys()) if available_schemas else "none"
404
+ print(
405
+ f"Error: Unknown schema '{schema_name}'. Available schemas: {schema_list}",
406
+ file=sys.stderr,
407
+ )
408
+ sys.exit(1)
409
+
410
+ schema_def = available_schemas[schema_name]
411
+
412
+ # Generate data based on schema definition
413
+ try:
414
+ data = generate_data_from_schema(schema_def, schema_name, dir_name)
415
+
416
+ # Create additional files based on schema requirements
417
+ created_files = create_additional_files_from_schema(dest_dir, schema_def, schema_name, dir_name)
418
+
419
+ # Save the data file
420
+ data_filename = get_data_filename_from_schema(schema_name, format_type)
421
+ data_path = dest_dir / data_filename
422
+ save_data_file(data_path, data)
423
+
424
+ # Print summary
425
+ print(f"Created {schema_name} dataset: {dest_dir}")
426
+ for created_file in created_files:
427
+ print(f" Added: {created_file}")
428
+ print(f" Added: {data_path.name}")
429
+
430
+ except Exception as e:
431
+ print(f"Error generating data from schema: {e}", file=sys.stderr)
432
+ sys.exit(1)
433
+
434
+
435
+ def create_empty_structure(
436
+ dest_dir: Path,
437
+ dir_name: str,
438
+ format_type: str = DEFAULT_FORMAT,
439
+ force: bool = False,
440
+ ) -> None:
441
+ """Create an empty directory structure with README.md and data file in specified format."""
442
+ # Check if directory already exists before processing
443
+ if dest_dir.exists() and not force:
444
+ print(f"Skipping existing directory: {dest_dir}")
445
+ return
446
+
447
+ handle_destination_directory(dest_dir, force)
448
+
449
+ # Create data file with name and schema fields in the specified format
450
+ data_path = dest_dir / f"data.{format_type}"
451
+ data_content = {"name": dir_name, "schema": "scheme"}
452
+
453
+ # Save using the appropriate format
454
+ save_data_file(data_path, data_content)
455
+
456
+ print(f"Created empty directory: {dest_dir}")
457
+ print(f" Added: {data_path.name}")
458
+
459
+
460
+ def copy_and_update_structure(
461
+ source_dir: Path,
462
+ dest_dir: Path,
463
+ new_name: str,
464
+ copy_data: bool = True,
465
+ project_root: Path | None = None,
466
+ format_type: str = DEFAULT_FORMAT,
467
+ force: bool = False,
468
+ ) -> None:
469
+ """Copy source directory to destination and update names."""
470
+ # Check if directory already exists before processing
471
+ if dest_dir.exists() and not force:
472
+ print(f"Skipping existing directory: {dest_dir}")
473
+ return
474
+
475
+ handle_destination_directory(dest_dir, force)
476
+
477
+ print(f"Copying from: {source_dir}")
478
+ print(f"Creating: {dest_dir}")
479
+
480
+ def process_directory(source_path: Path, dest_path: Path, relative_path: str = ""):
481
+ """Recursively process directory contents."""
482
+ dest_path.mkdir(parents=True, exist_ok=True)
483
+
484
+ # Collect .md files in current source directory for reference conversion
485
+ md_files_in_dir = {f.name for f in source_path.iterdir() if f.is_file() and f.suffix == ".md"}
486
+
487
+ for item in source_path.iterdir():
488
+ source_file = source_path / item.name
489
+ dest_file = dest_path / item.name
490
+
491
+ if source_file.is_dir():
492
+ # Recursively process subdirectory
493
+ new_relative = f"{relative_path}/{item.name}" if relative_path else item.name
494
+ process_directory(source_file, dest_file, new_relative)
495
+ elif source_file.is_file():
496
+ # Handle files based on type
497
+ if source_file.suffix == ".md":
498
+ # 1. Copy .md files only if copy_data is True
499
+ if copy_data:
500
+ shutil.copy2(source_file, dest_file)
501
+ elif source_file.suffix.lower() in DATA_FILE_EXTENSIONS:
502
+ # 2. Process data files
503
+ try:
504
+ data, _ = load_data_file(source_file)
505
+
506
+ # Update name field to match directory name
507
+ if "name" in data:
508
+ if source_file.name.startswith("service."):
509
+ # Service file - use normalized name (matches the directory it will be in)
510
+ data["name"] = normalize_name(new_name)
511
+ else:
512
+ # Provider file - use the new_name
513
+ data["name"] = new_name
514
+
515
+ # Convert file references to absolute paths if not copying data
516
+ if not copy_data:
517
+ # Create mapping of file references to absolute paths
518
+ # Use source directory path, not destination path
519
+ # Calculate the path relative to the data directory
520
+ if project_root and "example_data" in str(source_dir):
521
+ # Source is in example_data, get relative path from example_data
522
+ source_relative_to_base = source_dir.relative_to(project_root / "example_data")
523
+ elif project_root and "data" in str(source_dir):
524
+ # Source is in data directory, get relative path from data
525
+ source_relative_to_base = source_dir.relative_to(project_root / "data")
526
+ else:
527
+ # Fallback: use the source directory name
528
+ source_relative_to_base = Path(source_dir.name)
529
+
530
+ if relative_path:
531
+ # For nested directories, append the relative path
532
+ source_path_with_relative = source_relative_to_base / relative_path
533
+ else:
534
+ # For root level, use just the source path
535
+ source_path_with_relative = source_relative_to_base
536
+
537
+ path_prefix = f"/{source_path_with_relative}"
538
+ new_values = {md_file: f"{path_prefix}/{md_file}" for md_file in md_files_in_dir}
539
+
540
+ update_string_references(data, md_files_in_dir, new_values, " file reference")
541
+
542
+ # Save the updated data file in the specified format
543
+ # Determine the new file path with the correct extension
544
+ if format_type != "json" or dest_file.suffix.lower() != ".json":
545
+ # Change extension to match the format
546
+ dest_file_with_format = dest_file.parent / f"{dest_file.stem}.{format_type}"
547
+ print(f" Converting format: {dest_file.name} -> {dest_file_with_format.name}")
548
+ else:
549
+ dest_file_with_format = dest_file
550
+
551
+ save_data_file(dest_file_with_format, data)
552
+
553
+ except Exception as e:
554
+ print(
555
+ f" Warning: Could not process {source_file}: {e}",
556
+ file=sys.stderr,
557
+ )
558
+ # Copy the file as-is if we can't process it
559
+ shutil.copy2(source_file, dest_file)
560
+ else:
561
+ # Copy other files as-is
562
+ shutil.copy2(source_file, dest_file)
563
+
564
+ # Process the entire directory structure
565
+ process_directory(source_dir, dest_dir)
566
+
567
+ # Rename service directories to match normalized names and update any absolute paths
568
+ normalized_name = normalize_name(new_name)
569
+ for item in dest_dir.iterdir():
570
+ if (
571
+ item.is_dir()
572
+ and any((item / f"service{ext}").exists() for ext in DATA_FILE_EXTENSIONS)
573
+ and item.name != normalized_name
574
+ ):
575
+ old_name = item.name
576
+ new_path = dest_dir / normalized_name
577
+ print(f" Renaming service directory: {old_name} -> {normalized_name}")
578
+ item.rename(new_path)
579
+
580
+ # Update the name field in the service data file to match the new directory name
581
+ for ext_with_dot in DATA_FILE_EXTENSIONS:
582
+ ext = ext_with_dot.lstrip(".")
583
+ service_file = new_path / f"service.{ext}"
584
+ if service_file.exists():
585
+ try:
586
+ data, _ = load_data_file(service_file)
587
+ if "name" in data:
588
+ print(
589
+ f" Updating service name to match directory: '{data['name']}' -> '{normalized_name}'"
590
+ )
591
+ data["name"] = normalized_name
592
+ save_data_file(service_file, data)
593
+ except Exception as e:
594
+ print(
595
+ f" Warning: Could not update service name in {service_file}: {e}",
596
+ file=sys.stderr,
597
+ )
598
+
599
+ # Update any absolute paths that reference the old directory name
600
+ if not copy_data:
601
+
602
+ def fix_renamed_paths_in_files(old_dir_name: str, new_dir_name: str):
603
+ data_files = [file for ext in DATA_FILE_EXTENSIONS for file in dest_dir.glob(f"**/*{ext}")]
604
+ for data_file in data_files:
605
+ try:
606
+ data, _ = load_data_file(data_file)
607
+
608
+ # Find all strings that start with the old directory path
609
+ def collect_old_paths(obj, old_paths, new_path_mappings):
610
+ if isinstance(obj, dict):
611
+ for value in obj.values():
612
+ if isinstance(value, str) and value.startswith(f"/{old_dir_name}/"):
613
+ old_paths.add(value)
614
+ new_path_mappings[value] = value.replace(
615
+ f"/{old_dir_name}/",
616
+ f"/{new_dir_name}/",
617
+ 1,
618
+ )
619
+ else:
620
+ collect_old_paths(value, old_paths, new_path_mappings)
621
+ elif isinstance(obj, list):
622
+ for item in obj:
623
+ if isinstance(item, str) and item.startswith(f"/{old_dir_name}/"):
624
+ old_paths.add(item)
625
+ new_path_mappings[item] = item.replace(
626
+ f"/{old_dir_name}/",
627
+ f"/{new_dir_name}/",
628
+ 1,
629
+ )
630
+ else:
631
+ collect_old_paths(item, old_paths, new_path_mappings)
632
+
633
+ old_paths: set[str] = set()
634
+ new_path_mappings: dict[str, str] = {}
635
+ collect_old_paths(data, old_paths, new_path_mappings)
636
+
637
+ if old_paths:
638
+ updated = update_string_references(
639
+ data,
640
+ old_paths,
641
+ new_path_mappings,
642
+ " path after rename",
643
+ )
644
+ if updated:
645
+ save_data_file(data_file, data)
646
+
647
+ except Exception as e:
648
+ print(
649
+ f" Warning: Could not update paths in {data_file}: {e}",
650
+ file=sys.stderr,
651
+ )
652
+
653
+ fix_renamed_paths_in_files(old_name, normalized_name)
654
+
655
+ print(f"✓ Successfully created '{dest_dir}' from '{source_dir}'")
656
+
657
+
658
+ # Typer CLI app for init commands
659
+ app = typer.Typer(help="Initialize new data files from schemas")
660
+ console = Console()
661
+
662
+
663
+ @app.command("offering")
664
+ def init_offering(
665
+ name: str = typer.Argument(..., help="Name for the new service offering"),
666
+ output_dir: Path = typer.Option(
667
+ Path.cwd() / "data",
668
+ "--output-dir",
669
+ "-o",
670
+ help="Output directory (default: ./data)",
671
+ ),
672
+ format: str = typer.Option(
673
+ "json",
674
+ "--format",
675
+ "-f",
676
+ help="Output format: json or toml",
677
+ ),
678
+ source: str | None = typer.Option(
679
+ None,
680
+ "--source",
681
+ "-s",
682
+ help="Copy from existing service offering directory",
683
+ ),
684
+ ):
685
+ """Create a new service offering skeleton."""
686
+ # Prepare arguments for scaffold
687
+ if source:
688
+ # Copy mode
689
+ base_dirs = [Path.cwd() / "data", Path.cwd()]
690
+ source_dir = find_source_directory(source, base_dirs)
691
+ if not source_dir:
692
+ console.print(
693
+ f"[red]✗[/red] Source directory not found: {source}",
694
+ style="bold red",
695
+ )
696
+ raise typer.Exit(code=1)
697
+
698
+ console.print(f"[blue]Copying from:[/blue] {source_dir}")
699
+ console.print(f"[blue]Creating:[/blue] {name}")
700
+ console.print(f"[blue]Format:[/blue] {format}\n")
701
+
702
+ try:
703
+ copy_and_update_structure(
704
+ source_dir=source_dir,
705
+ dest_dir=output_dir / name,
706
+ new_name=name,
707
+ copy_data=False,
708
+ project_root=None,
709
+ format_type=format,
710
+ force=False,
711
+ )
712
+ console.print(f"[green]✓[/green] Service offering created: {output_dir / name}")
713
+ except Exception as e:
714
+ console.print(
715
+ f"[red]✗[/red] Failed to create service offering: {e}",
716
+ style="bold red",
717
+ )
718
+ raise typer.Exit(code=1)
719
+ else:
720
+ # Interactive mode - prompt for values
721
+ console.print("[bold cyan]Creating service offering interactively[/bold cyan]")
722
+ console.print(f"[dim]Output directory:[/dim] {output_dir / name}")
723
+ console.print(f"[dim]Format:[/dim] {format}\n")
724
+
725
+ try:
726
+ # Create directory structure
727
+ offering_dir = output_dir / name
728
+ offering_dir.mkdir(parents=True, exist_ok=True)
729
+
730
+ # Create prompt engine
731
+ engine = PromptEngine(OFFERING_GROUPS)
732
+
733
+ # Prompt for all fields (pass name if provided via CLI)
734
+ user_input = engine.prompt_all(context={"name": name})
735
+
736
+ # Create offering data structure (pass offering_dir for document file validation)
737
+ offering_data = create_offering_data(user_input, offering_dir=offering_dir)
738
+
739
+ # Write service file
740
+ service_file = offering_dir / f"service.{format}"
741
+ if format == "json":
742
+ with open(service_file, "w") as f:
743
+ json.dump(offering_data, f, indent=2)
744
+ f.write("\n")
745
+ else: # toml
746
+ if not TOML_WRITE_AVAILABLE:
747
+ console.print(
748
+ "[red]✗[/red] TOML write support not available. Install tomli_w.",
749
+ style="bold red",
750
+ )
751
+ raise typer.Exit(code=1)
752
+ with open(service_file, "wb") as f:
753
+ tomli_w.dump(offering_data, f)
754
+
755
+ console.print(f"\n[green]✓[/green] Service offering created: {offering_dir}")
756
+ console.print(f" Added: {service_file.name}")
757
+ except typer.Abort:
758
+ console.print("[yellow]Service offering creation cancelled[/yellow]")
759
+ raise typer.Exit(code=1)
760
+ except Exception as e:
761
+ console.print(
762
+ f"[red]✗[/red] Failed to create service offering: {e}",
763
+ style="bold red",
764
+ )
765
+ raise typer.Exit(code=1)
766
+
767
+
768
+ @app.command("listing")
769
+ def init_listing(
770
+ name: str = typer.Argument(..., help="Name for the new service listing"),
771
+ output_dir: Path = typer.Option(
772
+ Path.cwd() / "data",
773
+ "--output-dir",
774
+ "-o",
775
+ help="Output directory (default: ./data)",
776
+ ),
777
+ format: str = typer.Option(
778
+ "json",
779
+ "--format",
780
+ "-f",
781
+ help="Output format: json or toml",
782
+ ),
783
+ source: str | None = typer.Option(
784
+ None,
785
+ "--source",
786
+ "-s",
787
+ help="Copy from existing service listing directory",
788
+ ),
789
+ ):
790
+ """Create a new service listing skeleton."""
791
+ # Prepare arguments for scaffold
792
+ if source:
793
+ # Copy mode
794
+ base_dirs = [Path.cwd() / "data", Path.cwd()]
795
+ source_dir = find_source_directory(source, base_dirs)
796
+ if not source_dir:
797
+ console.print(
798
+ f"[red]✗[/red] Source directory not found: {source}",
799
+ style="bold red",
800
+ )
801
+ raise typer.Exit(code=1)
802
+
803
+ console.print(f"[blue]Copying from:[/blue] {source_dir}")
804
+ console.print(f"[blue]Creating:[/blue] {name}")
805
+ console.print(f"[blue]Format:[/blue] {format}\n")
806
+
807
+ try:
808
+ copy_and_update_structure(
809
+ source_dir=source_dir,
810
+ dest_dir=output_dir / name,
811
+ new_name=name,
812
+ copy_data=False,
813
+ project_root=None,
814
+ format_type=format,
815
+ force=False,
816
+ )
817
+ console.print(f"[green]✓[/green] Service listing created: {output_dir / name}")
818
+ except Exception as e:
819
+ console.print(
820
+ f"[red]✗[/red] Failed to create service listing: {e}",
821
+ style="bold red",
822
+ )
823
+ raise typer.Exit(code=1)
824
+ else:
825
+ # Interactive mode - prompt for values
826
+ console.print("[bold cyan]Creating service listing interactively[/bold cyan]")
827
+ console.print(f"[dim]Output directory:[/dim] {output_dir / name}")
828
+ console.print(f"[dim]Format:[/dim] {format}\n")
829
+
830
+ try:
831
+ # Create directory structure
832
+ listing_dir = output_dir / name
833
+ listing_dir.mkdir(parents=True, exist_ok=True)
834
+
835
+ # Create prompt engine
836
+ engine = PromptEngine(LISTING_GROUPS)
837
+
838
+ # Prompt for all fields (pass name if provided via CLI)
839
+ user_input = engine.prompt_all(context={"name": name})
840
+
841
+ # Create listing data structure (pass listing_dir for document file validation)
842
+ listing_data = create_listing_data(user_input, listing_dir=listing_dir)
843
+
844
+ # Write listing file
845
+ listing_file = listing_dir / f"listing.{format}"
846
+ if format == "json":
847
+ with open(listing_file, "w") as f:
848
+ json.dump(listing_data, f, indent=2)
849
+ f.write("\n")
850
+ else: # toml
851
+ if not TOML_WRITE_AVAILABLE:
852
+ console.print(
853
+ "[red]✗[/red] TOML write support not available. Install tomli_w.",
854
+ style="bold red",
855
+ )
856
+ raise typer.Exit(code=1)
857
+ with open(listing_file, "wb") as f:
858
+ tomli_w.dump(listing_data, f)
859
+
860
+ console.print(f"\n[green]✓[/green] Service listing created: {listing_dir}")
861
+ console.print(f" Added: {listing_file.name}")
862
+ except typer.Abort:
863
+ console.print("[yellow]Service listing creation cancelled[/yellow]")
864
+ raise typer.Exit(code=1)
865
+ except Exception as e:
866
+ console.print(
867
+ f"[red]✗[/red] Failed to create service listing: {e}",
868
+ style="bold red",
869
+ )
870
+ raise typer.Exit(code=1)
871
+
872
+
873
+ @app.command("provider")
874
+ def init_provider(
875
+ name: str = typer.Argument(..., help="Name for the new provider"),
876
+ output_dir: Path = typer.Option(
877
+ Path.cwd() / "data",
878
+ "--output-dir",
879
+ "-o",
880
+ help="Output directory (default: ./data)",
881
+ ),
882
+ format: str = typer.Option(
883
+ "json",
884
+ "--format",
885
+ "-f",
886
+ help="Output format: json or toml",
887
+ ),
888
+ source: str | None = typer.Option(
889
+ None,
890
+ "--source",
891
+ "-s",
892
+ help="Copy from existing provider directory",
893
+ ),
894
+ ):
895
+ """Create a new provider skeleton."""
896
+ # Prepare arguments for scaffold
897
+ if source:
898
+ # Copy mode
899
+ base_dirs = [Path.cwd() / "data", Path.cwd()]
900
+ source_dir = find_source_directory(source, base_dirs)
901
+ if not source_dir:
902
+ console.print(
903
+ f"[red]✗[/red] Source directory not found: {source}",
904
+ style="bold red",
905
+ )
906
+ raise typer.Exit(code=1)
907
+
908
+ console.print(f"[blue]Copying from:[/blue] {source_dir}")
909
+ console.print(f"[blue]Creating:[/blue] {name}")
910
+ console.print(f"[blue]Format:[/blue] {format}\n")
911
+
912
+ try:
913
+ copy_and_update_structure(
914
+ source_dir=source_dir,
915
+ dest_dir=output_dir / name,
916
+ new_name=name,
917
+ copy_data=False,
918
+ project_root=None,
919
+ format_type=format,
920
+ force=False,
921
+ )
922
+ console.print(f"[green]✓[/green] Provider created: {output_dir / name}")
923
+ except Exception as e:
924
+ console.print(f"[red]✗[/red] Failed to create provider: {e}", style="bold red")
925
+ raise typer.Exit(code=1)
926
+ else:
927
+ # Interactive mode - prompt for values
928
+ console.print("[bold cyan]Creating provider interactively[/bold cyan]")
929
+ console.print(f"[dim]Output directory:[/dim] {output_dir / name}")
930
+ console.print(f"[dim]Format:[/dim] {format}\n")
931
+
932
+ try:
933
+ # Create prompt engine
934
+ engine = PromptEngine(PROVIDER_GROUPS)
935
+
936
+ # Prompt for all fields (pass name if provided via CLI)
937
+ user_input = engine.prompt_all(context={"name": name})
938
+
939
+ # Create provider data structure
940
+ provider_data = create_provider_data(user_input)
941
+
942
+ # Create directory structure
943
+ provider_dir = output_dir / name
944
+ provider_dir.mkdir(parents=True, exist_ok=True)
945
+
946
+ # Write provider file
947
+ provider_file = provider_dir / f"provider.{format}"
948
+ if format == "json":
949
+ with open(provider_file, "w") as f:
950
+ json.dump(provider_data, f, indent=2)
951
+ f.write("\n")
952
+ else: # toml
953
+ if not TOML_WRITE_AVAILABLE:
954
+ console.print(
955
+ "[red]✗[/red] TOML write support not available. Install tomli_w.",
956
+ style="bold red",
957
+ )
958
+ raise typer.Exit(code=1)
959
+ with open(provider_file, "wb") as f:
960
+ tomli_w.dump(provider_data, f)
961
+
962
+ console.print(f"\n[green]✓[/green] Provider created: {provider_dir}")
963
+ console.print(f" Added: {provider_file.name}")
964
+ except typer.Abort:
965
+ console.print("[yellow]Provider creation cancelled[/yellow]")
966
+ raise typer.Exit(code=1)
967
+ except Exception as e:
968
+ console.print(f"[red]✗[/red] Failed to create provider: {e}", style="bold red")
969
+ raise typer.Exit(code=1)
970
+
971
+
972
+ @app.command("seller")
973
+ def init_seller(
974
+ name: str = typer.Argument(..., help="Name for the new seller"),
975
+ output_dir: Path = typer.Option(
976
+ Path.cwd() / "data",
977
+ "--output-dir",
978
+ "-o",
979
+ help="Output directory (default: ./data)",
980
+ ),
981
+ format: str = typer.Option(
982
+ "json",
983
+ "--format",
984
+ "-f",
985
+ help="Output format: json or toml",
986
+ ),
987
+ source: str | None = typer.Option(
988
+ None,
989
+ "--source",
990
+ "-s",
991
+ help="Copy from existing seller file",
992
+ ),
993
+ ):
994
+ """Create a new seller skeleton."""
995
+ # Prepare arguments for scaffold
996
+ if source:
997
+ # Copy mode - for seller, source is a file not a directory
998
+ base_dirs = [Path.cwd() / "data", Path.cwd()]
999
+ source_path = None
1000
+
1001
+ # Try to find the source file
1002
+ for base_dir in base_dirs:
1003
+ potential_path = base_dir / source
1004
+ if potential_path.exists() and potential_path.is_file():
1005
+ source_path = potential_path
1006
+ break
1007
+ # Also try with common seller filenames
1008
+ for filename in ["seller.json", "seller.toml"]:
1009
+ potential_file = base_dir / source / filename
1010
+ if potential_file.exists():
1011
+ source_path = potential_file
1012
+ break
1013
+ if source_path:
1014
+ break
1015
+
1016
+ if not source_path:
1017
+ console.print(
1018
+ f"[red]✗[/red] Source seller file not found: {source}",
1019
+ style="bold red",
1020
+ )
1021
+ raise typer.Exit(code=1)
1022
+
1023
+ console.print(f"[blue]Copying from:[/blue] {source_path}")
1024
+ console.print(f"[blue]Creating:[/blue] seller.{format}")
1025
+ console.print(f"[blue]Output directory:[/blue] {output_dir}\n")
1026
+
1027
+ try:
1028
+ # Load source file
1029
+ if source_path.suffix == ".json":
1030
+ with open(source_path) as f:
1031
+ data = json.load(f)
1032
+ else: # .toml
1033
+ with open(source_path, "rb") as f:
1034
+ data = tomllib.load(f)
1035
+
1036
+ # Update name
1037
+ data["name"] = name
1038
+
1039
+ # Ensure output directory exists
1040
+ output_dir.mkdir(parents=True, exist_ok=True)
1041
+
1042
+ # Write to output format
1043
+ output_file = output_dir / f"seller.{format}"
1044
+ if format == "json":
1045
+ with open(output_file, "w") as f:
1046
+ json.dump(data, f, indent=2)
1047
+ f.write("\n")
1048
+ else: # toml
1049
+ if not TOML_WRITE_AVAILABLE:
1050
+ console.print(
1051
+ "[red]✗[/red] TOML write support not available. Install tomli_w.",
1052
+ style="bold red",
1053
+ )
1054
+ raise typer.Exit(code=1)
1055
+ with open(output_file, "wb") as f:
1056
+ tomli_w.dump(data, f)
1057
+
1058
+ console.print(f"[green]✓[/green] Seller created: {output_file}")
1059
+ except Exception as e:
1060
+ console.print(f"[red]✗[/red] Failed to create seller: {e}", style="bold red")
1061
+ raise typer.Exit(code=1)
1062
+ else:
1063
+ # Interactive mode - prompt for values
1064
+ console.print("[bold cyan]Creating seller interactively[/bold cyan]")
1065
+ console.print(f"[dim]Output directory:[/dim] {output_dir}")
1066
+ console.print(f"[dim]Format:[/dim] {format}\n")
1067
+
1068
+ try:
1069
+ # Create prompt engine
1070
+ engine = PromptEngine(SELLER_GROUPS)
1071
+
1072
+ # Prompt for all fields (pass name if provided via CLI)
1073
+ user_input = engine.prompt_all(context={"name": name})
1074
+
1075
+ # Create seller data structure
1076
+ seller_data = create_seller_data(user_input)
1077
+
1078
+ # Ensure output directory exists
1079
+ output_dir.mkdir(parents=True, exist_ok=True)
1080
+
1081
+ # Write to file
1082
+ output_file = output_dir / f"seller.{format}"
1083
+ if format == "json":
1084
+ with open(output_file, "w") as f:
1085
+ json.dump(seller_data, f, indent=2)
1086
+ f.write("\n")
1087
+ else: # toml
1088
+ if not TOML_WRITE_AVAILABLE:
1089
+ console.print(
1090
+ "[red]✗[/red] TOML write support not available. Install tomli_w.",
1091
+ style="bold red",
1092
+ )
1093
+ raise typer.Exit(code=1)
1094
+ with open(output_file, "wb") as f:
1095
+ tomli_w.dump(seller_data, f)
1096
+
1097
+ console.print(f"\n[green]✓[/green] Seller created: {output_file}")
1098
+ except typer.Abort:
1099
+ console.print("[yellow]Seller creation cancelled[/yellow]")
1100
+ raise typer.Exit(code=1)
1101
+ except Exception as e:
1102
+ console.print(f"[red]✗[/red] Failed to create seller: {e}", style="bold red")
1103
+ raise typer.Exit(code=1)