appwrite-utils-cli 1.8.2 → 1.8.5

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (51) hide show
  1. package/CHANGELOG.md +6 -1
  2. package/README.md +42 -13
  3. package/dist/adapters/TablesDBAdapter.js +1 -1
  4. package/dist/cli/commands/functionCommands.js +30 -3
  5. package/dist/cli/commands/schemaCommands.js +39 -4
  6. package/dist/cli/commands/storageCommands.d.ts +5 -0
  7. package/dist/cli/commands/storageCommands.js +143 -0
  8. package/dist/collections/attributes.js +7 -7
  9. package/dist/collections/methods.js +1 -1
  10. package/dist/collections/tableOperations.js +2 -2
  11. package/dist/interactiveCLI.d.ts +1 -0
  12. package/dist/interactiveCLI.js +30 -0
  13. package/dist/main.js +17 -0
  14. package/dist/migrations/appwriteToX.js +1 -1
  15. package/dist/migrations/yaml/generateImportSchemas.js +2 -2
  16. package/dist/setupCommands.js +6 -0
  17. package/dist/shared/attributeMapper.js +2 -2
  18. package/dist/shared/jsonSchemaGenerator.js +3 -1
  19. package/dist/shared/pydanticModelGenerator.d.ts +17 -0
  20. package/dist/shared/pydanticModelGenerator.js +615 -0
  21. package/dist/shared/schemaGenerator.d.ts +3 -2
  22. package/dist/shared/schemaGenerator.js +22 -9
  23. package/dist/storage/methods.js +50 -7
  24. package/dist/utils/configDiscovery.js +2 -3
  25. package/dist/utils/constantsGenerator.d.ts +20 -8
  26. package/dist/utils/constantsGenerator.js +37 -25
  27. package/dist/utils/projectConfig.js +1 -1
  28. package/dist/utils/yamlConverter.d.ts +2 -2
  29. package/dist/utils/yamlConverter.js +2 -2
  30. package/package.json +1 -1
  31. package/src/adapters/TablesDBAdapter.ts +1 -1
  32. package/src/cli/commands/functionCommands.ts +28 -3
  33. package/src/cli/commands/schemaCommands.ts +59 -22
  34. package/src/cli/commands/storageCommands.ts +152 -0
  35. package/src/collections/attributes.ts +7 -7
  36. package/src/collections/methods.ts +7 -7
  37. package/src/collections/tableOperations.ts +2 -2
  38. package/src/interactiveCLI.ts +42 -12
  39. package/src/main.ts +32 -9
  40. package/src/migrations/appwriteToX.ts +1 -1
  41. package/src/migrations/yaml/generateImportSchemas.ts +7 -7
  42. package/src/setupCommands.ts +6 -0
  43. package/src/shared/attributeMapper.ts +2 -2
  44. package/src/shared/jsonSchemaGenerator.ts +4 -2
  45. package/src/shared/pydanticModelGenerator.ts +618 -0
  46. package/src/shared/schemaGenerator.ts +38 -25
  47. package/src/storage/methods.ts +67 -23
  48. package/src/utils/configDiscovery.ts +40 -41
  49. package/src/utils/constantsGenerator.ts +43 -26
  50. package/src/utils/projectConfig.ts +11 -11
  51. package/src/utils/yamlConverter.ts +40 -40
@@ -0,0 +1,618 @@
1
+ import fs from 'fs';
2
+ import path from 'path';
3
+ import { MessageFormatter } from './messageFormatter.js';
4
+ import type { AppwriteConfig, Attribute } from 'appwrite-utils';
5
+
6
+ // Embedded template for base Pydantic model (always written as base.py)
7
+ const BASE_PYDANTIC_TEMPLATE = `"""
8
+ Appwrite-compatible Pydantic base models for SmartScraper.
9
+
10
+ Provides clean base classes for all Appwrite document models without SQLAlchemy dependencies.
11
+ """
12
+
13
+ import json
14
+ from datetime import datetime
15
+ from typing import Any, ClassVar
16
+
17
+ from pydantic import BaseModel, Field, field_validator
18
+
19
+
20
+ class BaseAppwriteModel(BaseModel):
21
+ """
22
+ Base Appwrite-compatible Pydantic model with field aliases for Appwrite's $ prefixed fields.
23
+
24
+ Handles the mapping between Python-compatible field names and Appwrite's $ prefixed fields:
25
+ - rid -> $id
26
+ - created_at -> $createdAt
27
+ - updated_at -> $updatedAt
28
+ - permissions -> $permissions
29
+ - database_id -> $databaseId
30
+ - collection_id -> $collectionId
31
+ - sequence -> $sequence
32
+ """
33
+
34
+ # Optional class-level defaults for database/collection identifiers
35
+ databaseId: ClassVar[str | None] = None
36
+ collectionId: ClassVar[str | None] = None
37
+
38
+ rid: str = Field(..., alias="$id", description="Appwrite document ID")
39
+ created_at: datetime = Field(..., alias="$createdAt", description="Document creation timestamp")
40
+ updated_at: datetime = Field(
41
+ ..., alias="$updatedAt", description="Document last update timestamp"
42
+ )
43
+ permissions: list[str] = Field(
44
+ default_factory=list, alias="$permissions", description="Document permissions"
45
+ )
46
+ database_id: str = Field(..., alias="$databaseId", description="Appwrite database ID")
47
+ collection_id: str = Field(..., alias="$collectionId", description="Appwrite collection ID")
48
+ sequence: int | None = Field(None, alias="$sequence", description="Document sequence number")
49
+
50
+ class Config:
51
+ """Pydantic configuration for Appwrite compatibility"""
52
+
53
+ from_attributes = True
54
+ populate_by_name = True # Allow both field name and alias
55
+ extra = "allow" # Allow additional fields from Appwrite
56
+ json_encoders = {datetime: lambda v: v.isoformat() if v else None}
57
+
58
+ @field_validator("created_at", "updated_at", mode="before")
59
+ @classmethod
60
+ def parse_datetime(cls, v: str | datetime) -> datetime:
61
+ """Parse datetime from string or return datetime object"""
62
+ if isinstance(v, str):
63
+ # Handle ISO format with or without microseconds
64
+ try:
65
+ return datetime.fromisoformat(v.replace("Z", "+00:00"))
66
+ except ValueError:
67
+ # Fallback for other formats
68
+ return datetime.fromisoformat(v)
69
+ return v
70
+
71
+ def to_appwrite_dict(self) -> dict[str, Any]:
72
+ """Convert model to dictionary with Appwrite field names ($ prefixed)"""
73
+ return self.model_dump(by_alias=True, exclude_unset=True)
74
+
75
+ def to_python_dict(self) -> dict[str, Any]:
76
+ """Convert model to dictionary with Python field names (no $ prefix)"""
77
+ return self.model_dump(by_alias=False, exclude_unset=True)
78
+
79
+ @classmethod
80
+ def from_appwrite_document(cls, document: dict[str, Any]):
81
+ """Create model instance from Appwrite document with $ prefixed fields"""
82
+ return cls.model_validate(document)
83
+
84
+ def to_update_payload(self, exclude_unset: bool = True) -> dict[str, Any]:
85
+ """Convert model to update payload excluding system fields and None values"""
86
+ data = self.model_dump(by_alias=False, exclude_unset=exclude_unset)
87
+ return strip_appwrite_keys(data)
88
+
89
+
90
+ class CreateBase(BaseModel):
91
+ """
92
+ Base model for creating documents in Appwrite.
93
+ Makes all Appwrite system fields optional since they're auto-generated.
94
+ """
95
+
96
+ rid: str | None = Field(None, alias="$id", description="Optional custom document ID")
97
+ created_at: datetime | None = Field(
98
+ None, alias="$createdAt", description="Auto-generated creation timestamp"
99
+ )
100
+ updated_at: datetime | None = Field(
101
+ None, alias="$updatedAt", description="Auto-generated update timestamp"
102
+ )
103
+ permissions: list[str] | None = Field(
104
+ None, alias="$permissions", description="Optional document permissions"
105
+ )
106
+ database_id: str | None = Field(
107
+ None, alias="$databaseId", description="Auto-set database ID"
108
+ )
109
+ collection_id: str | None = Field(
110
+ None, alias="$collectionId", description="Auto-set collection ID"
111
+ )
112
+ sequence: int | None = Field(
113
+ None, alias="$sequence", description="Auto-generated sequence number"
114
+ )
115
+
116
+ class Config:
117
+ """Pydantic configuration for creation payloads"""
118
+
119
+ from_attributes = True
120
+ populate_by_name = True
121
+ extra = "allow"
122
+ json_encoders = {datetime: lambda v: v.isoformat() if v else None}
123
+
124
+ @field_validator("created_at", "updated_at", mode="before")
125
+ @classmethod
126
+ def parse_datetime(cls, v: str | datetime | None) -> datetime | None:
127
+ """Parse datetime from string or return datetime object"""
128
+ if v is None:
129
+ return None
130
+ if isinstance(v, str):
131
+ try:
132
+ return datetime.fromisoformat(v.replace("Z", "+00:00"))
133
+ except ValueError:
134
+ return datetime.fromisoformat(v)
135
+ return v
136
+
137
+ def strip_appwrite_fields(self) -> dict[str, Any]:
138
+ """
139
+ Remove Appwrite system fields and return clean data for creation.
140
+ Useful when preparing data for Appwrite document creation.
141
+ """
142
+ excluded_fields = {
143
+ "rid",
144
+ "$id",
145
+ "created_at",
146
+ "$createdAt",
147
+ "updated_at",
148
+ "$updatedAt",
149
+ "permissions",
150
+ "$permissions",
151
+ "database_id",
152
+ "$databaseId",
153
+ "collection_id",
154
+ "$collectionId",
155
+ "sequence",
156
+ "$sequence",
157
+ }
158
+
159
+ data = self.model_dump(by_alias=False, exclude_unset=True)
160
+ return {k: v for k, v in data.items() if k not in excluded_fields}
161
+
162
+
163
+ class UpdateBase(BaseModel):
164
+ """
165
+ Generic base model for partial updates.
166
+ Makes all fields optional for PATCH operations.
167
+ """
168
+
169
+ class Config:
170
+ """Pydantic configuration for update payloads"""
171
+
172
+ from_attributes = True
173
+ extra = "allow"
174
+ json_encoders = {datetime: lambda v: v.isoformat() if v else None}
175
+
176
+ def get_update_data(self, exclude_unset: bool = True) -> dict[str, Any]:
177
+ """
178
+ Get update data excluding None values and optionally unset fields.
179
+ Perfect for PATCH operations where only changed fields should be sent.
180
+ """
181
+ data = self.model_dump(exclude_unset=exclude_unset)
182
+ return {k: v for k, v in data.items() if v is not None}
183
+
184
+ def get_creation_data(self) -> dict[str, Any]:
185
+ """Get clean data for Appwrite document creation"""
186
+ return convert_to_create_payload(self)
187
+
188
+
189
+ # ============================================================================
190
+ # UTILITY FUNCTIONS
191
+ # ============================================================================
192
+
193
+
194
+ def strip_appwrite_keys(data: dict[str, Any]) -> dict[str, Any]:
195
+ """
196
+ Remove Appwrite system fields ($ prefixed) from a dictionary.
197
+
198
+ Args:
199
+ data: Dictionary that may contain Appwrite system fields
200
+
201
+ Returns:
202
+ Dictionary with Appwrite system fields removed
203
+
204
+ Example:
205
+ >>> data = {"name": "John", "$id": "123", "$createdAt": "2023-01-01"}
206
+ >>> strip_appwrite_keys(data)
207
+ {"name": "John"}
208
+ """
209
+ excluded_keys = {
210
+ "$id",
211
+ "$createdAt",
212
+ "$updatedAt",
213
+ "$permissions",
214
+ "$databaseId",
215
+ "$collectionId",
216
+ "$sequence",
217
+ }
218
+ return {k: v for k, v in data.items() if k not in excluded_keys}
219
+
220
+
221
+ def convert_to_create_payload(model_instance: BaseModel) -> dict[str, Any]:
222
+ """
223
+ Convert any Pydantic model instance to a clean creation payload.
224
+ Removes Appwrite system fields and None values.
225
+
226
+ Args:
227
+ model_instance: Pydantic model instance
228
+
229
+ Returns:
230
+ Dictionary suitable for Appwrite document creation
231
+
232
+ Example:
233
+ >>> user = UserModel(name="John", rid="123", created_at=datetime.now())
234
+ >>> convert_to_create_payload(user)
235
+ {"name": "John"}
236
+ """
237
+ data = model_instance.model_dump(exclude_unset=True)
238
+ # Remove Appwrite system fields and None values
239
+ clean_data = strip_appwrite_keys(data)
240
+ return {k: v for k, v in clean_data.items() if v is not None}
241
+
242
+
243
+ def convert_to_update_payload(
244
+ model_instance: BaseModel, exclude_unset: bool = True
245
+ ) -> dict[str, Any]:
246
+ """
247
+ Convert any Pydantic model instance to a clean update payload.
248
+ Removes None values and optionally unset fields.
249
+
250
+ Args:
251
+ model_instance: Pydantic model instance
252
+ exclude_unset: Whether to exclude fields that weren't explicitly set
253
+
254
+ Returns:
255
+ Dictionary suitable for Appwrite document updates
256
+
257
+ Example:
258
+ >>> user_update = UserUpdateModel(name="Jane")
259
+ >>> convert_to_update_payload(user_update)
260
+ {"name": "Jane"}
261
+ """
262
+ data = model_instance.model_dump(exclude_unset=exclude_unset)
263
+ return {k: v for k, v in data.items() if v is not None}
264
+
265
+
266
+ # ============================================================================
267
+ # JSON FIELD HELPER MIXINS
268
+ # ============================================================================
269
+
270
+
271
+ class JSONFieldMixin:
272
+ """
273
+ Mixin providing standardized JSON field helper methods.
274
+ Use this to add consistent JSON encode/decode patterns to models.
275
+ """
276
+
277
+ def _encode_json_field(self, data: Any) -> str | None:
278
+ """Safely encode data to JSON string"""
279
+ if data is None:
280
+ return None
281
+ try:
282
+ return json.dumps(data)
283
+ except (TypeError, ValueError):
284
+ return None
285
+
286
+ def _decode_json_field(self, json_str: str | None, default: Any = None) -> Any:
287
+ """Safely decode JSON string to data"""
288
+ if not json_str:
289
+ return default
290
+ try:
291
+ return json.loads(json_str)
292
+ except (json.JSONDecodeError, TypeError):
293
+ return default
294
+
295
+ def _decode_json_list(self, json_str: str | None) -> list[Any]:
296
+ """Safely decode JSON string to list"""
297
+ return self._decode_json_field(json_str, [])
298
+
299
+ def _decode_json_dict(self, json_str: str | None) -> dict[str, Any]:
300
+ """Safely decode JSON string to dictionary"""
301
+ return self._decode_json_field(json_str, {})
302
+
303
+
304
+ class TimestampMixin:
305
+ """
306
+ Mixin providing standardized timestamp handling for business timestamps.
307
+ Use this for models that need to handle ISO timestamp strings.
308
+ """
309
+
310
+ def _set_timestamp(self, date: datetime | None) -> str | None:
311
+ """Convert datetime to ISO string"""
312
+ return date.isoformat() if date else None
313
+
314
+ def _get_timestamp(self, timestamp_str: str | None) -> datetime | None:
315
+ """Convert ISO string to datetime"""
316
+ if not timestamp_str:
317
+ return None
318
+ try:
319
+ return datetime.fromisoformat(timestamp_str.replace("Z", "+00:00"))
320
+ except ValueError:
321
+ return None
322
+
323
+
324
+ class StringArrayMixin:
325
+ """
326
+ Mixin providing standardized string array handling for many-to-many relationships.
327
+ Use this for models that manage arrays of IDs for relationships.
328
+ """
329
+
330
+ def _add_to_array(self, array: list[str], item: str) -> None:
331
+ """Add item to array if not already present"""
332
+ if item not in array:
333
+ array.append(item)
334
+
335
+ def _remove_from_array(self, array: list[str], item: str) -> None:
336
+ """Remove item from array if present"""
337
+ if item in array:
338
+ array.remove(item)
339
+
340
+ def _ensure_array_field(self, field_value: list[str] | None) -> list[str]:
341
+ """Ensure field is a list, return empty list if None"""
342
+ return field_value or []
343
+
344
+
345
+ # ============================================================================
346
+ # ENHANCED UTILITY FUNCTIONS
347
+ # ============================================================================
348
+
349
+
350
+ def safe_json_encode(data: Any) -> str | None:
351
+ """
352
+ Safely encode any data to JSON string.
353
+
354
+ Args:
355
+ data: Data to encode
356
+
357
+ Returns:
358
+ JSON string or None if encoding fails
359
+
360
+ Example:
361
+ >>> safe_json_encode({"key": "value"})
362
+ '{"key": "value"}'
363
+ >>> safe_json_encode(None)
364
+ None
365
+ """
366
+ if data is None:
367
+ return None
368
+ try:
369
+ return json.dumps(data)
370
+ except (TypeError, ValueError):
371
+ return None
372
+
373
+
374
+ def safe_json_decode(json_str: str | None, default: Any = None) -> Any:
375
+ """
376
+ Safely decode JSON string to data.
377
+
378
+ Args:
379
+ json_str: JSON string to decode
380
+ default: Default value if decoding fails
381
+
382
+ Returns:
383
+ Decoded data or default value
384
+
385
+ Example:
386
+ >>> safe_json_decode('{"key": "value"}')
387
+ {'key': 'value'}
388
+ >>> safe_json_decode('invalid', {})
389
+ {}
390
+ """
391
+ if not json_str:
392
+ return default
393
+ try:
394
+ return json.loads(json_str)
395
+ except (json.JSONDecodeError, TypeError):
396
+ return default
397
+
398
+
399
+ def create_json_field_helpers(field_name: str, default_type: type[Any] = dict):
400
+ """
401
+ Create getter/setter methods for JSON fields.
402
+ Useful for dynamically adding JSON field helpers to models.
403
+
404
+ Args:
405
+ field_name: Name of the JSON field
406
+ default_type: Default type for the field (dict or list)
407
+
408
+ Returns:
409
+ Tuple of (getter, setter) functions
410
+
411
+ Example:
412
+ >>> get_metadata, set_metadata = create_json_field_helpers('metadata')
413
+ >>> # Then add to model class
414
+ """
415
+ def getter(self) -> Any:
416
+ json_str = getattr(self, field_name, None)
417
+ default = default_type() if callable(default_type) else default_type
418
+ return safe_json_decode(json_str, default)
419
+
420
+ def setter(self, value: Any) -> None:
421
+ setattr(self, field_name, safe_json_encode(value))
422
+
423
+ return getter, setter
424
+
425
+
426
+ def validate_appwrite_document(document: dict[str, Any]) -> bool:
427
+ """
428
+ Validate that a dictionary contains required Appwrite document fields.
429
+
430
+ Args:
431
+ document: Dictionary to validate
432
+
433
+ Returns:
434
+ True if valid Appwrite document format
435
+
436
+ Example:
437
+ >>> doc = {"$id": "123", "$createdAt": "2023-01-01T00:00:00Z", "name": "test"}
438
+ >>> validate_appwrite_document(doc)
439
+ True
440
+ """
441
+ required_fields = {"$id", "$createdAt", "$updatedAt"}
442
+ return all(field in document for field in required_fields)
443
+
444
+
445
+ def batch_prepare_documents(
446
+ models: list[BaseModel], batch_size: int = 100
447
+ ) -> list[list[dict[str, Any]]]:
448
+ """
449
+ Prepare model instances for batch creation in Appwrite.
450
+ Splits into batches and removes system fields.
451
+
452
+ Args:
453
+ models: List of Pydantic model instances
454
+ batch_size: Maximum documents per batch
455
+
456
+ Returns:
457
+ List of batches, each containing clean document data
458
+
459
+ Example:
460
+ >>> users = [CreateUser(name="John"), CreateUser(name="Jane")]
461
+ >>> batches = batch_prepare_documents(users, batch_size=1)
462
+ >>> len(batches)
463
+ 2
464
+ """
465
+ clean_docs = [convert_to_create_payload(model) for model in models]
466
+
467
+ batches = []
468
+ for i in range(0, len(clean_docs), batch_size):
469
+ batch = clean_docs[i:i + batch_size]
470
+ batches.append(batch)
471
+
472
+ return batches
473
+ `;
474
+
475
+ export class PydanticModelGenerator {
476
+ constructor(private config: AppwriteConfig, private appwriteFolderPath: string) {}
477
+
478
+ generatePydanticModels(options: { baseOutputDirectory: string; verbose?: boolean }) {
479
+ const { baseOutputDirectory, verbose = false } = options;
480
+ const pyDir = baseOutputDirectory;
481
+ if (!fs.existsSync(pyDir)) fs.mkdirSync(pyDir, { recursive: true });
482
+
483
+ this.writeBase(pyDir, verbose);
484
+
485
+ const collections = this.config.collections || [];
486
+ for (const coll of collections) {
487
+ const fileName = `${this.toSnake(coll.name)}.py`;
488
+ const filePath = path.join(pyDir, fileName);
489
+ const code = this.generateModel(coll.name, coll.attributes || []);
490
+ fs.writeFileSync(filePath, code, { encoding: 'utf-8' });
491
+ if (verbose) MessageFormatter.success(`Pydantic model written to ${filePath}`, { prefix: 'Schema' });
492
+ }
493
+
494
+ // __init__.py to ease imports
495
+ const initPath = path.join(pyDir, '__init__.py');
496
+ try {
497
+ const exports = (this.config.collections || []).map(c => `from .${this.toSnake(c.name)} import ${this.toPascal(c.name)}`).join('\n');
498
+ fs.writeFileSync(initPath, `${exports}\n`, { encoding: 'utf-8' });
499
+ } catch {}
500
+ }
501
+
502
+ private writeBase(pyDir: string, verbose: boolean) {
503
+ const basePath = path.join(pyDir, 'base.py');
504
+ // Always write embedded template content
505
+ fs.writeFileSync(basePath, BASE_PYDANTIC_TEMPLATE, { encoding: 'utf-8' });
506
+ if (verbose) MessageFormatter.success(`Base Pydantic model written to ${basePath}`, { prefix: 'Schema' });
507
+ }
508
+
509
+ private generateModel(name: string, attributes: Attribute[]): string {
510
+ const pascal = this.toPascal(name);
511
+ const imports = new Set<string>();
512
+ imports.add("from .base import BaseAppwriteModel");
513
+ const typeImports = new Set<string>();
514
+ typeImports.add('from pydantic import Field');
515
+ const typingImports = new Set<string>();
516
+
517
+ const fields: string[] = [];
518
+ for (const attr of attributes) {
519
+ if (!attr || !attr.key) continue;
520
+ const ann = this.mapAttributeToPythonType(attr, typingImports);
521
+ const required = !!(attr as any).required;
522
+ const isArray = !!(attr as any).array;
523
+ const defaultInitializer = this.defaultInitializer(attr, required, isArray);
524
+ fields.push(` ${attr.key}: ${ann}${defaultInitializer}`);
525
+ }
526
+
527
+ const header = this.composeHeader(imports, typeImports, typingImports);
528
+ return `${header}\n\nclass ${pascal}(BaseAppwriteModel):\n${fields.join('\n')}\n`;
529
+ }
530
+
531
+ private composeHeader(imports: Set<string>, typeImports: Set<string>, typingImports: Set<string>): string {
532
+ const lines: string[] = ["from __future__ import annotations"];
533
+ lines.push(...Array.from(typeImports));
534
+ if (typingImports.size > 0) {
535
+ lines.push(`from typing import ${Array.from(typingImports).sort().join(', ')}`);
536
+ }
537
+ // datetime import if referenced; include by default as safe
538
+ lines.push('from datetime import datetime');
539
+ lines.push(...Array.from(imports));
540
+ return lines.join('\n');
541
+ }
542
+
543
+ private defaultInitializer(attr: Attribute, required: boolean, isArray: boolean): string {
544
+ if (required) return '';
545
+ // Optional fields default to None; arrays can be None to distinguish missing vs empty
546
+ return ' = None';
547
+ }
548
+
549
+ private mapAttributeToPythonType(attr: Attribute, typingImports: Set<string>): string {
550
+ const t = String((attr as any).type || '').toLowerCase();
551
+ const isArray = !!(attr as any).array;
552
+ let base: string;
553
+ switch (t) {
554
+ case 'string':
555
+ case 'email':
556
+ case 'ip':
557
+ case 'url':
558
+ base = 'str';
559
+ break;
560
+ case 'integer':
561
+ base = 'int';
562
+ break;
563
+ case 'double':
564
+ case 'float':
565
+ base = 'float';
566
+ break;
567
+ case 'boolean':
568
+ base = 'bool';
569
+ break;
570
+ case 'datetime':
571
+ base = 'datetime';
572
+ break;
573
+ case 'enum': {
574
+ const els = Array.isArray((attr as any).elements) ? (attr as any).elements : [];
575
+ if (els.length > 0) {
576
+ typingImports.add('Literal');
577
+ base = `Literal[${els.map((e: string) => `'${e.replace(/'/g, "\\'")}'`).join(', ')}]`;
578
+ } else {
579
+ base = 'str';
580
+ }
581
+ break;
582
+ }
583
+ case 'relationship': {
584
+ const relType = (attr as any).relationType || '';
585
+ base = (relType === 'oneToMany' || relType === 'manyToMany') ? 'list[str]' : 'str';
586
+ break;
587
+ }
588
+ default:
589
+ base = 'str';
590
+ break;
591
+ }
592
+ if (isArray && t !== 'relationship') {
593
+ base = `list[${base}]`;
594
+ }
595
+ const required = !!(attr as any).required;
596
+ if (!required) {
597
+ base = `${base} | None`;
598
+ }
599
+ return base;
600
+ }
601
+
602
+ private toSnake(s: string): string {
603
+ return s
604
+ .replace(/([a-z0-9])([A-Z])/g, '$1_$2')
605
+ .replace(/[^a-zA-Z0-9]+/g, '_')
606
+ .replace(/_+/g, '_')
607
+ .replace(/^_|_$/g, '')
608
+ .toLowerCase();
609
+ }
610
+ private toPascal(s: string): string {
611
+ return s
612
+ .replace(/[^a-zA-Z0-9]+/g, ' ')
613
+ .split(' ')
614
+ .filter(Boolean)
615
+ .map(w => w.charAt(0).toUpperCase() + w.slice(1))
616
+ .join('');
617
+ }
618
+ }