unitysvc-services 0.1.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,1039 @@
1
+ #!/usr/bin/env python3
2
+ """
3
+ Initialize new provider or service data structure.
4
+
5
+ This module provides functions to create new directory structures for providers or services
6
+ by copying from existing examples or data directories and updating the name fields.
7
+ """
8
+
9
+ import json
10
+ import shutil
11
+ import sys
12
+ import tomllib # Built-in since Python 3.11
13
+ from datetime import datetime
14
+ from pathlib import Path
15
+ from typing import Any
16
+
17
+ import typer
18
+ from rich.console import Console
19
+
20
+ try:
21
+ import tomli_w
22
+
23
+ TOML_WRITE_AVAILABLE = True
24
+ except ImportError:
25
+ TOML_WRITE_AVAILABLE = False
26
+
27
+ TOML_AVAILABLE = TOML_WRITE_AVAILABLE # For backward compatibility
28
+
29
+ # YAML support has been removed
30
+ YAML_AVAILABLE = False
31
+
32
+
33
+ # Constants
34
+ DATA_FILE_EXTENSIONS = [".json", ".toml"]
35
+ DEFAULT_FORMAT = "toml"
36
+
37
+
38
+ def find_source_directory(source_name: str, base_dirs: list[Path]) -> Path | None:
39
+ """Find the source directory in the given base directories."""
40
+ # Handle absolute paths (starting with /)
41
+ if source_name.startswith("/"):
42
+ # Remove leading slash and treat as relative path from base directories
43
+ relative_path = source_name.lstrip("/")
44
+ for base_dir in base_dirs:
45
+ if not base_dir.exists():
46
+ continue
47
+ source_path = base_dir / relative_path
48
+ if source_path.exists() and source_path.is_dir():
49
+ return source_path
50
+ return None
51
+
52
+ # Handle relative paths (existing behavior)
53
+ for base_dir in base_dirs:
54
+ if not base_dir.exists():
55
+ continue
56
+
57
+ # Look for exact match first
58
+ source_path = base_dir / source_name
59
+ if source_path.exists() and source_path.is_dir():
60
+ return source_path
61
+
62
+ # Look for nested directories (e.g., provider1/service1)
63
+ for provider_dir in base_dir.iterdir():
64
+ if provider_dir.is_dir() and provider_dir.name != "README.md":
65
+ nested_path = provider_dir / source_name
66
+ if nested_path.exists() and nested_path.is_dir():
67
+ return nested_path
68
+
69
+ return None
70
+
71
+
72
+ def load_data_file(file_path: Path) -> dict[str, Any]:
73
+ """Load data from JSON or TOML file."""
74
+ suffix = file_path.suffix.lower()
75
+
76
+ if suffix == ".json":
77
+ with open(file_path, encoding="utf-8") as f:
78
+ return json.load(f)
79
+ elif suffix == ".toml":
80
+ with open(file_path, "rb") as f:
81
+ return tomllib.load(f)
82
+ else:
83
+ raise ValueError(f"Unsupported file format: {suffix}")
84
+
85
+
86
+ def save_data_file(file_path: Path, data: dict[str, Any]) -> None:
87
+ """Save data to JSON or TOML file."""
88
+ suffix = file_path.suffix.lower()
89
+
90
+ if suffix == ".json":
91
+ with open(file_path, "w", encoding="utf-8") as f:
92
+ json.dump(data, f, indent=2)
93
+ f.write("\n")
94
+ elif suffix == ".toml":
95
+ if not TOML_WRITE_AVAILABLE:
96
+ raise ImportError("tomli_w is required to write TOML files. Install with: pip install tomli-w")
97
+ with open(file_path, "wb") as f:
98
+ tomli_w.dump(data, f)
99
+ else:
100
+ raise ValueError(f"Unsupported file format: {suffix}")
101
+
102
+
103
+ def detect_source_format(source_dir: Path) -> str:
104
+ """Detect the primary format used in the source directory."""
105
+ # Look for data files and determine the most common format
106
+ format_counts = {"json": 0, "toml": 0}
107
+
108
+ for file_path in source_dir.rglob("*"):
109
+ if file_path.is_file():
110
+ suffix = file_path.suffix.lower()
111
+ if suffix == ".json":
112
+ format_counts["json"] += 1
113
+ elif suffix == ".toml":
114
+ format_counts["toml"] += 1
115
+
116
+ # Return the format with the highest count, default to toml if no data files found
117
+ if max(format_counts.values()) == 0:
118
+ return "toml"
119
+
120
+ return max(format_counts, key=lambda x: format_counts[x])
121
+
122
+
123
+ def normalize_name(name: str) -> str:
124
+ """Normalize a name to match the expected directory format (replace underscores with hyphens)."""
125
+ return name.replace("_", "-")
126
+
127
+
128
+ def discover_schemas(schema_dir: Path) -> dict[str, dict[str, Any]]:
129
+ """Discover available schemas by scanning the schema directory."""
130
+ schemas: dict[str, dict[str, Any]] = {}
131
+ if not schema_dir.exists():
132
+ return schemas
133
+
134
+ for schema_file in schema_dir.glob("*.json"):
135
+ schema_name = schema_file.stem
136
+ try:
137
+ with open(schema_file, encoding="utf-8") as f:
138
+ schema_data = json.load(f)
139
+ schemas[schema_name] = schema_data
140
+ except Exception as e:
141
+ print(f"Warning: Could not load schema {schema_file}: {e}", file=sys.stderr)
142
+
143
+ return schemas
144
+
145
+
146
+ def generate_example_value(property_def: dict, property_name: str, schema_name: str) -> Any:
147
+ """Generate an example value based on JSON schema property definition."""
148
+ # Handle default values first
149
+ if "default" in property_def:
150
+ return property_def["default"]
151
+
152
+ # Handle anyOf (union types)
153
+ if "anyOf" in property_def:
154
+ # Find the first non-null type
155
+ for option in property_def["anyOf"]:
156
+ if option.get("type") != "null":
157
+ return generate_example_value(option, property_name, schema_name)
158
+ return None
159
+
160
+ # Handle $ref (references to definitions)
161
+ if "$ref" in property_def and property_def["$ref"].startswith("#/$defs/"):
162
+ # For now, handle simple enum references
163
+ # This would need the full schema context to resolve properly
164
+ # For CategoryEnum, return "AI" as default
165
+ if "Category" in property_def["$ref"]:
166
+ return "AI"
167
+ return "reference_value"
168
+
169
+ prop_type = property_def.get("type", "string")
170
+
171
+ if prop_type == "string":
172
+ format_type = property_def.get("format")
173
+ if format_type == "email":
174
+ return "contact@example.com"
175
+ elif format_type == "uri":
176
+ return "https://example.com"
177
+ elif format_type == "date-time":
178
+ return datetime.now().isoformat() + "Z"
179
+ elif property_name in [
180
+ "terms_of_service",
181
+ "documentation",
182
+ "api_documentation",
183
+ "code_example",
184
+ ]:
185
+ # These are likely file references or URLs
186
+ file_mappings = {
187
+ "terms_of_service": "terms-of-service.md",
188
+ "code_example": "code-example.md",
189
+ "api_documentation": "api-docs.md",
190
+ }
191
+ return file_mappings.get(property_name, "https://docs.example.com")
192
+ else:
193
+ # Generate meaningful example based on property name
194
+ if property_name == "name":
195
+ return "placeholder_name" # Will be replaced with actual name
196
+ elif property_name == "description":
197
+ return f"Description for {schema_name.replace('_', ' ')}"
198
+ elif "email" in property_name.lower():
199
+ return "contact@example.com"
200
+ elif "homepage" in property_name.lower():
201
+ return "https://example.com"
202
+ else:
203
+ return f"Example {property_name}"
204
+
205
+ elif prop_type == "object":
206
+ # Handle object properties
207
+ additional_props = property_def.get("additionalProperties")
208
+ if additional_props is True:
209
+ # additionalProperties: true - create example object based on property name
210
+ if property_name == "access_method":
211
+ return {
212
+ "type": "REST_API",
213
+ "authentication": "API_KEY",
214
+ "endpoint": "https://api.example.com",
215
+ }
216
+ else:
217
+ return {"example_key": "example_value"}
218
+ elif isinstance(additional_props, dict) and additional_props.get("type") == "string":
219
+ # additionalProperties with string type - create example key-value pairs
220
+ return {
221
+ "feature1": "Feature description 1",
222
+ "feature2": "Feature description 2",
223
+ }
224
+ return {}
225
+
226
+ elif prop_type == "array":
227
+ items_def = property_def.get("items", {})
228
+ if items_def.get("type") == "object":
229
+ # Generate example array with one object
230
+ example_obj = {}
231
+ if "properties" in items_def:
232
+ for item_prop, item_def in items_def["properties"].items():
233
+ example_obj[item_prop] = generate_example_value(item_def, item_prop, schema_name)
234
+ return [example_obj]
235
+ return []
236
+
237
+ elif prop_type == "number" or prop_type == "integer":
238
+ return 1
239
+
240
+ elif prop_type == "boolean":
241
+ return True
242
+
243
+ return None
244
+
245
+
246
+ def generate_data_from_schema(schema_def: dict, schema_name: str, dir_name: str) -> dict[str, Any]:
247
+ """Generate example data based on JSON schema definition."""
248
+ data = {}
249
+
250
+ properties = schema_def.get("properties", {})
251
+ required = schema_def.get("required", [])
252
+
253
+ for prop_name, prop_def in properties.items():
254
+ # Generate value for this property
255
+ value = generate_example_value(prop_def, prop_name, schema_name)
256
+
257
+ # Special handling for certain fields
258
+ if prop_name == "name":
259
+ if "service" in schema_name:
260
+ data[prop_name] = normalize_name(dir_name)
261
+ else:
262
+ data[prop_name] = dir_name
263
+ elif prop_name == "schema":
264
+ data[prop_name] = schema_name
265
+ elif value is not None: # Only add non-None values to avoid TOML serialization issues
266
+ data[prop_name] = value
267
+ # Skip None values unless they're required
268
+ elif prop_name in required:
269
+ # For required fields that would be None, provide a placeholder
270
+ data[prop_name] = f"placeholder_{prop_name}"
271
+
272
+ return data
273
+
274
+
275
+ def get_data_filename_from_schema(schema_name: str, format_type: str) -> str:
276
+ """Get the appropriate filename based on schema name and format."""
277
+ if "provider" in schema_name:
278
+ return f"provider.{format_type}"
279
+ elif "service" in schema_name:
280
+ return f"service.{format_type}"
281
+ else:
282
+ # For unknown schemas, use the schema name as filename
283
+ base_name = schema_name.replace("_v", "").replace("_", "-")
284
+ return f"{base_name}.{format_type}"
285
+
286
+
287
+ def create_additional_files_from_schema(dest_dir: Path, schema_def: dict, schema_name: str, dir_name: str) -> list[str]:
288
+ """Create additional files based on schema requirements (like terms-of-service.md)."""
289
+ created_files = []
290
+
291
+ properties = schema_def.get("properties", {})
292
+
293
+ for prop_name, prop_def in properties.items():
294
+ # Look for properties that reference markdown files
295
+ # Check both direct type and anyOf types for string properties
296
+ is_string_type = False
297
+ if prop_def.get("type") == "string":
298
+ is_string_type = True
299
+ elif "anyOf" in prop_def:
300
+ # Check if any of the anyOf options is a string type
301
+ for option in prop_def["anyOf"]:
302
+ if option.get("type") == "string":
303
+ is_string_type = True
304
+ break
305
+
306
+ if is_string_type and prop_name in [
307
+ "terms_of_service",
308
+ "code_example",
309
+ "api_documentation",
310
+ ]:
311
+ filename = generate_example_value(prop_def, prop_name, schema_name)
312
+
313
+ # Only create file if it's a .md reference (not a URL)
314
+ if filename and ".md" in str(filename) and not filename.startswith("http"):
315
+ file_path = dest_dir / filename
316
+
317
+ if prop_name == "terms_of_service":
318
+ content = f"# Terms of Service for {dir_name}\n\nPlaceholder terms of service document.\n"
319
+ elif prop_name == "code_example":
320
+ content = f"# Code Example for {dir_name}\n\nPlaceholder code example.\n"
321
+ elif prop_name == "api_documentation":
322
+ content = f"# API Documentation for {dir_name}\n\nPlaceholder API documentation.\n"
323
+ else:
324
+ content = f"# {prop_name.replace('_', ' ').title()} for {dir_name}\n\nPlaceholder content.\n"
325
+
326
+ file_path.write_text(content, encoding="utf-8")
327
+ created_files.append(filename)
328
+
329
+ return created_files
330
+
331
+
332
+ def handle_destination_directory(dest_dir: Path, force: bool = False) -> None:
333
+ """Handle destination directory creation, removing existing if force is True, otherwise ignore if exists."""
334
+ if dest_dir.exists():
335
+ if force:
336
+ print(f"Removing existing directory: {dest_dir}")
337
+ shutil.rmtree(dest_dir)
338
+ else:
339
+ print(f"Skipping existing directory: {dest_dir}")
340
+ return
341
+
342
+ dest_dir.mkdir(parents=True, exist_ok=True)
343
+
344
+
345
+ def update_string_references(obj, old_values: set[str], new_values: dict[str, str], context: str = "") -> bool:
346
+ """Recursively update string references in nested data structures.
347
+
348
+ Args:
349
+ obj: The object to update (dict or list)
350
+ old_values: Set of old values to look for
351
+ new_values: Dict mapping old values to new values
352
+ context: Context for logging (optional)
353
+
354
+ Returns:
355
+ True if any updates were made
356
+ """
357
+ updated = False
358
+
359
+ if isinstance(obj, dict):
360
+ for key, value in obj.items():
361
+ if isinstance(value, str) and value in old_values:
362
+ new_value = new_values[value]
363
+ print(f" Converting{context}: '{value}' -> '{new_value}'")
364
+ obj[key] = new_value
365
+ updated = True
366
+ else:
367
+ if update_string_references(value, old_values, new_values, context):
368
+ updated = True
369
+ elif isinstance(obj, list):
370
+ for i, item in enumerate(obj):
371
+ if isinstance(item, str) and item in old_values:
372
+ new_value = new_values[item]
373
+ print(f" Converting{context}: '{item}' -> '{new_value}'")
374
+ obj[i] = new_value
375
+ updated = True
376
+ else:
377
+ if update_string_references(item, old_values, new_values, context):
378
+ updated = True
379
+
380
+ return updated
381
+
382
+
383
+ def create_schema_based_structure(
384
+ dest_dir: Path,
385
+ dir_name: str,
386
+ schema_name: str,
387
+ format_type: str = DEFAULT_FORMAT,
388
+ force: bool = False,
389
+ ) -> None:
390
+ """Create a directory structure with minimal valid data based on the specified schema."""
391
+ # Check if directory already exists before processing
392
+ if dest_dir.exists() and not force:
393
+ print(f"Skipping existing directory: {dest_dir}")
394
+ return
395
+
396
+ handle_destination_directory(dest_dir, force)
397
+
398
+ # Discover available schemas
399
+ project_root = Path(__file__).parent.parent
400
+ schema_dir = project_root / "schema"
401
+ available_schemas = discover_schemas(schema_dir)
402
+
403
+ if schema_name not in available_schemas:
404
+ schema_list = ", ".join(available_schemas.keys()) if available_schemas else "none"
405
+ print(
406
+ f"Error: Unknown schema '{schema_name}'. Available schemas: {schema_list}",
407
+ file=sys.stderr,
408
+ )
409
+ sys.exit(1)
410
+
411
+ schema_def = available_schemas[schema_name]
412
+
413
+ # Generate data based on schema definition
414
+ try:
415
+ data = generate_data_from_schema(schema_def, schema_name, dir_name)
416
+
417
+ # Create additional files based on schema requirements
418
+ created_files = create_additional_files_from_schema(dest_dir, schema_def, schema_name, dir_name)
419
+
420
+ # Save the data file
421
+ data_filename = get_data_filename_from_schema(schema_name, format_type)
422
+ data_path = dest_dir / data_filename
423
+ save_data_file(data_path, data)
424
+
425
+ # Print summary
426
+ print(f"Created {schema_name} dataset: {dest_dir}")
427
+ for created_file in created_files:
428
+ print(f" Added: {created_file}")
429
+ print(f" Added: {data_path.name}")
430
+
431
+ except Exception as e:
432
+ print(f"Error generating data from schema: {e}", file=sys.stderr)
433
+ sys.exit(1)
434
+
435
+
436
+ def create_empty_structure(
437
+ dest_dir: Path,
438
+ dir_name: str,
439
+ format_type: str = DEFAULT_FORMAT,
440
+ force: bool = False,
441
+ ) -> None:
442
+ """Create an empty directory structure with README.md and data file in specified format."""
443
+ # Check if directory already exists before processing
444
+ if dest_dir.exists() and not force:
445
+ print(f"Skipping existing directory: {dest_dir}")
446
+ return
447
+
448
+ handle_destination_directory(dest_dir, force)
449
+
450
+ # Create data file with name and schema fields in the specified format
451
+ data_path = dest_dir / f"data.{format_type}"
452
+ data_content = {"name": dir_name, "schema": "scheme"}
453
+
454
+ # Save using the appropriate format
455
+ save_data_file(data_path, data_content)
456
+
457
+ print(f"Created empty directory: {dest_dir}")
458
+ print(f" Added: {data_path.name}")
459
+
460
+
461
+ def copy_and_update_structure(
462
+ source_dir: Path,
463
+ dest_dir: Path,
464
+ new_name: str,
465
+ copy_data: bool = True,
466
+ project_root: Path | None = None,
467
+ format_type: str = DEFAULT_FORMAT,
468
+ force: bool = False,
469
+ ) -> None:
470
+ """Copy source directory to destination and update names."""
471
+ # Check if directory already exists before processing
472
+ if dest_dir.exists() and not force:
473
+ print(f"Skipping existing directory: {dest_dir}")
474
+ return
475
+
476
+ handle_destination_directory(dest_dir, force)
477
+
478
+ print(f"Copying from: {source_dir}")
479
+ print(f"Creating: {dest_dir}")
480
+
481
+ def process_directory(source_path: Path, dest_path: Path, relative_path: str = ""):
482
+ """Recursively process directory contents."""
483
+ dest_path.mkdir(parents=True, exist_ok=True)
484
+
485
+ # Collect .md files in current source directory for reference conversion
486
+ md_files_in_dir = {f.name for f in source_path.iterdir() if f.is_file() and f.suffix == ".md"}
487
+
488
+ for item in source_path.iterdir():
489
+ source_file = source_path / item.name
490
+ dest_file = dest_path / item.name
491
+
492
+ if source_file.is_dir():
493
+ # Recursively process subdirectory
494
+ new_relative = f"{relative_path}/{item.name}" if relative_path else item.name
495
+ process_directory(source_file, dest_file, new_relative)
496
+ elif source_file.is_file():
497
+ # Handle files based on type
498
+ if source_file.suffix == ".md":
499
+ # 1. Copy .md files only if copy_data is True
500
+ if copy_data:
501
+ shutil.copy2(source_file, dest_file)
502
+ elif source_file.suffix.lower() in DATA_FILE_EXTENSIONS:
503
+ # 2. Process data files
504
+ try:
505
+ data = load_data_file(source_file)
506
+
507
+ # Update name field to match directory name
508
+ if "name" in data:
509
+ if source_file.name.startswith("service."):
510
+ # Service file - use normalized name (matches the directory it will be in)
511
+ data["name"] = normalize_name(new_name)
512
+ else:
513
+ # Provider file - use the new_name
514
+ data["name"] = new_name
515
+
516
+ # Convert file references to absolute paths if not copying data
517
+ if not copy_data:
518
+ # Create mapping of file references to absolute paths
519
+ # Use source directory path, not destination path
520
+ # Calculate the path relative to the data directory
521
+ if project_root and "example_data" in str(source_dir):
522
+ # Source is in example_data, get relative path from example_data
523
+ source_relative_to_base = source_dir.relative_to(project_root / "example_data")
524
+ elif project_root and "data" in str(source_dir):
525
+ # Source is in data directory, get relative path from data
526
+ source_relative_to_base = source_dir.relative_to(project_root / "data")
527
+ else:
528
+ # Fallback: use the source directory name
529
+ source_relative_to_base = Path(source_dir.name)
530
+
531
+ if relative_path:
532
+ # For nested directories, append the relative path
533
+ source_path_with_relative = source_relative_to_base / relative_path
534
+ else:
535
+ # For root level, use just the source path
536
+ source_path_with_relative = source_relative_to_base
537
+
538
+ path_prefix = f"/{source_path_with_relative}"
539
+ new_values = {md_file: f"{path_prefix}/{md_file}" for md_file in md_files_in_dir}
540
+
541
+ update_string_references(data, md_files_in_dir, new_values, " file reference")
542
+
543
+ # Save the updated data file in the specified format
544
+ # Determine the new file path with the correct extension
545
+ if format_type != "json" or dest_file.suffix.lower() != ".json":
546
+ # Change extension to match the format
547
+ dest_file_with_format = dest_file.parent / f"{dest_file.stem}.{format_type}"
548
+ print(f" Converting format: {dest_file.name} -> {dest_file_with_format.name}")
549
+ else:
550
+ dest_file_with_format = dest_file
551
+
552
+ save_data_file(dest_file_with_format, data)
553
+
554
+ except Exception as e:
555
+ print(
556
+ f" Warning: Could not process {source_file}: {e}",
557
+ file=sys.stderr,
558
+ )
559
+ # Copy the file as-is if we can't process it
560
+ shutil.copy2(source_file, dest_file)
561
+ else:
562
+ # Copy other files as-is
563
+ shutil.copy2(source_file, dest_file)
564
+
565
+ # Process the entire directory structure
566
+ process_directory(source_dir, dest_dir)
567
+
568
+ # Rename service directories to match normalized names and update any absolute paths
569
+ normalized_name = normalize_name(new_name)
570
+ for item in dest_dir.iterdir():
571
+ if (
572
+ item.is_dir()
573
+ and any((item / f"service{ext}").exists() for ext in DATA_FILE_EXTENSIONS)
574
+ and item.name != normalized_name
575
+ ):
576
+ old_name = item.name
577
+ new_path = dest_dir / normalized_name
578
+ print(f" Renaming service directory: {old_name} -> {normalized_name}")
579
+ item.rename(new_path)
580
+
581
+ # Update the name field in the service data file to match the new directory name
582
+ for ext_with_dot in DATA_FILE_EXTENSIONS:
583
+ ext = ext_with_dot.lstrip(".")
584
+ service_file = new_path / f"service.{ext}"
585
+ if service_file.exists():
586
+ try:
587
+ data = load_data_file(service_file)
588
+ if "name" in data:
589
+ print(
590
+ f" Updating service name to match directory: '{data['name']}' -> '{normalized_name}'"
591
+ )
592
+ data["name"] = normalized_name
593
+ save_data_file(service_file, data)
594
+ except Exception as e:
595
+ print(
596
+ f" Warning: Could not update service name in {service_file}: {e}",
597
+ file=sys.stderr,
598
+ )
599
+
600
+ # Update any absolute paths that reference the old directory name
601
+ if not copy_data:
602
+
603
+ def fix_renamed_paths_in_files(old_dir_name: str, new_dir_name: str):
604
+ data_files = [file for ext in DATA_FILE_EXTENSIONS for file in dest_dir.glob(f"**/*{ext}")]
605
+ for data_file in data_files:
606
+ try:
607
+ data = load_data_file(data_file)
608
+
609
+ # Find all strings that start with the old directory path
610
+ def collect_old_paths(obj, old_paths, new_path_mappings):
611
+ if isinstance(obj, dict):
612
+ for value in obj.values():
613
+ if isinstance(value, str) and value.startswith(f"/{old_dir_name}/"):
614
+ old_paths.add(value)
615
+ new_path_mappings[value] = value.replace(
616
+ f"/{old_dir_name}/",
617
+ f"/{new_dir_name}/",
618
+ 1,
619
+ )
620
+ else:
621
+ collect_old_paths(value, old_paths, new_path_mappings)
622
+ elif isinstance(obj, list):
623
+ for item in obj:
624
+ if isinstance(item, str) and item.startswith(f"/{old_dir_name}/"):
625
+ old_paths.add(item)
626
+ new_path_mappings[item] = item.replace(
627
+ f"/{old_dir_name}/",
628
+ f"/{new_dir_name}/",
629
+ 1,
630
+ )
631
+ else:
632
+ collect_old_paths(item, old_paths, new_path_mappings)
633
+
634
+ old_paths: set[str] = set()
635
+ new_path_mappings: dict[str, str] = {}
636
+ collect_old_paths(data, old_paths, new_path_mappings)
637
+
638
+ if old_paths:
639
+ updated = update_string_references(
640
+ data,
641
+ old_paths,
642
+ new_path_mappings,
643
+ " path after rename",
644
+ )
645
+ if updated:
646
+ save_data_file(data_file, data)
647
+
648
+ except Exception as e:
649
+ print(
650
+ f" Warning: Could not update paths in {data_file}: {e}",
651
+ file=sys.stderr,
652
+ )
653
+
654
+ fix_renamed_paths_in_files(old_name, normalized_name)
655
+
656
+ print(f"✓ Successfully created '{dest_dir}' from '{source_dir}'")
657
+
658
+
659
+ # Typer CLI app for init commands
660
+ app = typer.Typer(help="Initialize new data files from schemas")
661
+ console = Console()
662
+
663
+
664
+ @app.command("offering")
665
+ def init_offering(
666
+ name: str = typer.Argument(..., help="Name for the new service offering"),
667
+ output_dir: Path = typer.Option(
668
+ Path.cwd() / "data",
669
+ "--output-dir",
670
+ "-o",
671
+ help="Output directory (default: ./data)",
672
+ ),
673
+ format: str = typer.Option(
674
+ "json",
675
+ "--format",
676
+ "-f",
677
+ help="Output format: json or toml",
678
+ ),
679
+ source: str | None = typer.Option(
680
+ None,
681
+ "--source",
682
+ "-s",
683
+ help="Copy from existing service offering directory",
684
+ ),
685
+ ):
686
+ """Create a new service offering skeleton."""
687
+ # Prepare arguments for scaffold
688
+ if source:
689
+ # Copy mode
690
+ base_dirs = [Path.cwd() / "data", Path.cwd()]
691
+ source_dir = find_source_directory(source, base_dirs)
692
+ if not source_dir:
693
+ console.print(
694
+ f"[red]✗[/red] Source directory not found: {source}",
695
+ style="bold red",
696
+ )
697
+ raise typer.Exit(code=1)
698
+
699
+ console.print(f"[blue]Copying from:[/blue] {source_dir}")
700
+ console.print(f"[blue]Creating:[/blue] {name}")
701
+ console.print(f"[blue]Format:[/blue] {format}\n")
702
+
703
+ try:
704
+ copy_and_update_structure(
705
+ source_dir=source_dir,
706
+ dest_dir=output_dir / name,
707
+ new_name=name,
708
+ copy_data=False,
709
+ project_root=None,
710
+ format_type=format,
711
+ force=False,
712
+ )
713
+ console.print(f"[green]✓[/green] Service offering created: {output_dir / name}")
714
+ except Exception as e:
715
+ console.print(
716
+ f"[red]✗[/red] Failed to create service offering: {e}",
717
+ style="bold red",
718
+ )
719
+ raise typer.Exit(code=1)
720
+ else:
721
+ # Generate from schema
722
+ console.print(f"[blue]Creating service offering:[/blue] {name}")
723
+ console.print(f"[blue]Output directory:[/blue] {output_dir}")
724
+ console.print(f"[blue]Format:[/blue] {format}\n")
725
+
726
+ try:
727
+ create_schema_based_structure(
728
+ dest_dir=output_dir / name,
729
+ dir_name=name,
730
+ schema_name="service_v1",
731
+ format_type=format,
732
+ force=False,
733
+ )
734
+ console.print(f"[green]✓[/green] Service offering created: {output_dir / name}")
735
+ except Exception as e:
736
+ console.print(
737
+ f"[red]✗[/red] Failed to create service offering: {e}",
738
+ style="bold red",
739
+ )
740
+ raise typer.Exit(code=1)
741
+
742
+
743
+ @app.command("listing")
744
+ def init_listing(
745
+ name: str = typer.Argument(..., help="Name for the new service listing"),
746
+ output_dir: Path = typer.Option(
747
+ Path.cwd() / "data",
748
+ "--output-dir",
749
+ "-o",
750
+ help="Output directory (default: ./data)",
751
+ ),
752
+ format: str = typer.Option(
753
+ "json",
754
+ "--format",
755
+ "-f",
756
+ help="Output format: json or toml",
757
+ ),
758
+ source: str | None = typer.Option(
759
+ None,
760
+ "--source",
761
+ "-s",
762
+ help="Copy from existing service listing directory",
763
+ ),
764
+ ):
765
+ """Create a new service listing skeleton."""
766
+ # Prepare arguments for scaffold
767
+ if source:
768
+ # Copy mode
769
+ base_dirs = [Path.cwd() / "data", Path.cwd()]
770
+ source_dir = find_source_directory(source, base_dirs)
771
+ if not source_dir:
772
+ console.print(
773
+ f"[red]✗[/red] Source directory not found: {source}",
774
+ style="bold red",
775
+ )
776
+ raise typer.Exit(code=1)
777
+
778
+ console.print(f"[blue]Copying from:[/blue] {source_dir}")
779
+ console.print(f"[blue]Creating:[/blue] {name}")
780
+ console.print(f"[blue]Format:[/blue] {format}\n")
781
+
782
+ try:
783
+ copy_and_update_structure(
784
+ source_dir=source_dir,
785
+ dest_dir=output_dir / name,
786
+ new_name=name,
787
+ copy_data=False,
788
+ project_root=None,
789
+ format_type=format,
790
+ force=False,
791
+ )
792
+ console.print(f"[green]✓[/green] Service listing created: {output_dir / name}")
793
+ except Exception as e:
794
+ console.print(
795
+ f"[red]✗[/red] Failed to create service listing: {e}",
796
+ style="bold red",
797
+ )
798
+ raise typer.Exit(code=1)
799
+ else:
800
+ # Generate from schema
801
+ console.print(f"[blue]Creating service listing:[/blue] {name}")
802
+ console.print(f"[blue]Output directory:[/blue] {output_dir}")
803
+ console.print(f"[blue]Format:[/blue] {format}\n")
804
+
805
+ try:
806
+ create_schema_based_structure(
807
+ dest_dir=output_dir / name,
808
+ dir_name=name,
809
+ schema_name="listing_v1",
810
+ format_type=format,
811
+ force=False,
812
+ )
813
+ console.print(f"[green]✓[/green] Service listing created: {output_dir / name}")
814
+ except Exception as e:
815
+ console.print(
816
+ f"[red]✗[/red] Failed to create service listing: {e}",
817
+ style="bold red",
818
+ )
819
+ raise typer.Exit(code=1)
820
+
821
+
822
+ @app.command("provider")
823
+ def init_provider(
824
+ name: str = typer.Argument(..., help="Name for the new provider"),
825
+ output_dir: Path = typer.Option(
826
+ Path.cwd() / "data",
827
+ "--output-dir",
828
+ "-o",
829
+ help="Output directory (default: ./data)",
830
+ ),
831
+ format: str = typer.Option(
832
+ "json",
833
+ "--format",
834
+ "-f",
835
+ help="Output format: json or toml",
836
+ ),
837
+ source: str | None = typer.Option(
838
+ None,
839
+ "--source",
840
+ "-s",
841
+ help="Copy from existing provider directory",
842
+ ),
843
+ ):
844
+ """Create a new provider skeleton."""
845
+ # Prepare arguments for scaffold
846
+ if source:
847
+ # Copy mode
848
+ base_dirs = [Path.cwd() / "data", Path.cwd()]
849
+ source_dir = find_source_directory(source, base_dirs)
850
+ if not source_dir:
851
+ console.print(
852
+ f"[red]✗[/red] Source directory not found: {source}",
853
+ style="bold red",
854
+ )
855
+ raise typer.Exit(code=1)
856
+
857
+ console.print(f"[blue]Copying from:[/blue] {source_dir}")
858
+ console.print(f"[blue]Creating:[/blue] {name}")
859
+ console.print(f"[blue]Format:[/blue] {format}\n")
860
+
861
+ try:
862
+ copy_and_update_structure(
863
+ source_dir=source_dir,
864
+ dest_dir=output_dir / name,
865
+ new_name=name,
866
+ copy_data=False,
867
+ project_root=None,
868
+ format_type=format,
869
+ force=False,
870
+ )
871
+ console.print(f"[green]✓[/green] Provider created: {output_dir / name}")
872
+ except Exception as e:
873
+ console.print(f"[red]✗[/red] Failed to create provider: {e}", style="bold red")
874
+ raise typer.Exit(code=1)
875
+ else:
876
+ # Generate from schema
877
+ console.print(f"[blue]Creating provider:[/blue] {name}")
878
+ console.print(f"[blue]Output directory:[/blue] {output_dir}")
879
+ console.print(f"[blue]Format:[/blue] {format}\n")
880
+
881
+ try:
882
+ create_schema_based_structure(
883
+ dest_dir=output_dir / name,
884
+ dir_name=name,
885
+ schema_name="provider_v1",
886
+ format_type=format,
887
+ force=False,
888
+ )
889
+ console.print(f"[green]✓[/green] Provider created: {output_dir / name}")
890
+ except Exception as e:
891
+ console.print(f"[red]✗[/red] Failed to create provider: {e}", style="bold red")
892
+ raise typer.Exit(code=1)
893
+
894
+
895
+ @app.command("seller")
896
+ def init_seller(
897
+ name: str = typer.Argument(..., help="Name for the new seller"),
898
+ output_dir: Path = typer.Option(
899
+ Path.cwd() / "data",
900
+ "--output-dir",
901
+ "-o",
902
+ help="Output directory (default: ./data)",
903
+ ),
904
+ format: str = typer.Option(
905
+ "json",
906
+ "--format",
907
+ "-f",
908
+ help="Output format: json or toml",
909
+ ),
910
+ source: str | None = typer.Option(
911
+ None,
912
+ "--source",
913
+ "-s",
914
+ help="Copy from existing seller file",
915
+ ),
916
+ ):
917
+ """Create a new seller skeleton."""
918
+ # Prepare arguments for scaffold
919
+ if source:
920
+ # Copy mode - for seller, source is a file not a directory
921
+ base_dirs = [Path.cwd() / "data", Path.cwd()]
922
+ source_path = None
923
+
924
+ # Try to find the source file
925
+ for base_dir in base_dirs:
926
+ potential_path = base_dir / source
927
+ if potential_path.exists() and potential_path.is_file():
928
+ source_path = potential_path
929
+ break
930
+ # Also try with common seller filenames
931
+ for filename in ["seller.json", "seller.toml"]:
932
+ potential_file = base_dir / source / filename
933
+ if potential_file.exists():
934
+ source_path = potential_file
935
+ break
936
+ if source_path:
937
+ break
938
+
939
+ if not source_path:
940
+ console.print(
941
+ f"[red]✗[/red] Source seller file not found: {source}",
942
+ style="bold red",
943
+ )
944
+ raise typer.Exit(code=1)
945
+
946
+ console.print(f"[blue]Copying from:[/blue] {source_path}")
947
+ console.print(f"[blue]Creating:[/blue] seller.{format}")
948
+ console.print(f"[blue]Output directory:[/blue] {output_dir}\n")
949
+
950
+ try:
951
+ # Load source file
952
+ if source_path.suffix == ".json":
953
+ with open(source_path) as f:
954
+ data = json.load(f)
955
+ else: # .toml
956
+ with open(source_path, "rb") as f:
957
+ data = tomllib.load(f)
958
+
959
+ # Update name
960
+ data["name"] = name
961
+
962
+ # Ensure output directory exists
963
+ output_dir.mkdir(parents=True, exist_ok=True)
964
+
965
+ # Write to output format
966
+ output_file = output_dir / f"seller.{format}"
967
+ if format == "json":
968
+ with open(output_file, "w") as f:
969
+ json.dump(data, f, indent=2)
970
+ f.write("\n")
971
+ else: # toml
972
+ if not TOML_WRITE_AVAILABLE:
973
+ console.print(
974
+ "[red]✗[/red] TOML write support not available. Install tomli_w.",
975
+ style="bold red",
976
+ )
977
+ raise typer.Exit(code=1)
978
+ with open(output_file, "wb") as f:
979
+ tomli_w.dump(data, f)
980
+
981
+ console.print(f"[green]✓[/green] Seller created: {output_file}")
982
+ except Exception as e:
983
+ console.print(f"[red]✗[/red] Failed to create seller: {e}", style="bold red")
984
+ raise typer.Exit(code=1)
985
+ else:
986
+ # Generate from schema
987
+ console.print(f"[blue]Creating seller:[/blue] {name}")
988
+ console.print(f"[blue]Output directory:[/blue] {output_dir}")
989
+ console.print(f"[blue]Format:[/blue] {format}\n")
990
+
991
+ try:
992
+ # Ensure output directory exists
993
+ output_dir.mkdir(parents=True, exist_ok=True)
994
+
995
+ # Get schema directory
996
+ pkg_path = Path(__file__).parent
997
+ schema_dir = pkg_path / "schema"
998
+
999
+ # Load schema to generate example
1000
+ schema_file = schema_dir / "seller_v1.json"
1001
+ if not schema_file.exists():
1002
+ raise FileNotFoundError(f"Schema file not found: {schema_file}")
1003
+
1004
+ with open(schema_file) as f:
1005
+ json.load(f)
1006
+
1007
+ # Create basic seller data from schema
1008
+ seller_data = {
1009
+ "schema": "seller_v1",
1010
+ "time_created": datetime.utcnow().isoformat() + "Z",
1011
+ "name": name,
1012
+ "display_name": name.replace("-", " ").replace("_", " ").title(),
1013
+ "seller_type": "individual",
1014
+ "contact_email": "contact@example.com",
1015
+ "description": f"{name} seller",
1016
+ "is_active": True,
1017
+ "is_verified": False,
1018
+ }
1019
+
1020
+ # Write to file
1021
+ output_file = output_dir / f"seller.{format}"
1022
+ if format == "json":
1023
+ with open(output_file, "w") as f:
1024
+ json.dump(seller_data, f, indent=2)
1025
+ f.write("\n")
1026
+ else: # toml
1027
+ if not TOML_WRITE_AVAILABLE:
1028
+ console.print(
1029
+ "[red]✗[/red] TOML write support not available. Install tomli_w.",
1030
+ style="bold red",
1031
+ )
1032
+ raise typer.Exit(code=1)
1033
+ with open(output_file, "wb") as f:
1034
+ tomli_w.dump(seller_data, f)
1035
+
1036
+ console.print(f"[green]✓[/green] Seller created: {output_file}")
1037
+ except Exception as e:
1038
+ console.print(f"[red]✗[/red] Failed to create seller: {e}", style="bold red")
1039
+ raise typer.Exit(code=1)