appwrite-utils-cli 1.8.2 → 1.8.5
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +6 -1
- package/README.md +42 -13
- package/dist/adapters/TablesDBAdapter.js +1 -1
- package/dist/cli/commands/functionCommands.js +30 -3
- package/dist/cli/commands/schemaCommands.js +39 -4
- package/dist/cli/commands/storageCommands.d.ts +5 -0
- package/dist/cli/commands/storageCommands.js +143 -0
- package/dist/collections/attributes.js +7 -7
- package/dist/collections/methods.js +1 -1
- package/dist/collections/tableOperations.js +2 -2
- package/dist/interactiveCLI.d.ts +1 -0
- package/dist/interactiveCLI.js +30 -0
- package/dist/main.js +17 -0
- package/dist/migrations/appwriteToX.js +1 -1
- package/dist/migrations/yaml/generateImportSchemas.js +2 -2
- package/dist/setupCommands.js +6 -0
- package/dist/shared/attributeMapper.js +2 -2
- package/dist/shared/jsonSchemaGenerator.js +3 -1
- package/dist/shared/pydanticModelGenerator.d.ts +17 -0
- package/dist/shared/pydanticModelGenerator.js +615 -0
- package/dist/shared/schemaGenerator.d.ts +3 -2
- package/dist/shared/schemaGenerator.js +22 -9
- package/dist/storage/methods.js +50 -7
- package/dist/utils/configDiscovery.js +2 -3
- package/dist/utils/constantsGenerator.d.ts +20 -8
- package/dist/utils/constantsGenerator.js +37 -25
- package/dist/utils/projectConfig.js +1 -1
- package/dist/utils/yamlConverter.d.ts +2 -2
- package/dist/utils/yamlConverter.js +2 -2
- package/package.json +1 -1
- package/src/adapters/TablesDBAdapter.ts +1 -1
- package/src/cli/commands/functionCommands.ts +28 -3
- package/src/cli/commands/schemaCommands.ts +59 -22
- package/src/cli/commands/storageCommands.ts +152 -0
- package/src/collections/attributes.ts +7 -7
- package/src/collections/methods.ts +7 -7
- package/src/collections/tableOperations.ts +2 -2
- package/src/interactiveCLI.ts +42 -12
- package/src/main.ts +32 -9
- package/src/migrations/appwriteToX.ts +1 -1
- package/src/migrations/yaml/generateImportSchemas.ts +7 -7
- package/src/setupCommands.ts +6 -0
- package/src/shared/attributeMapper.ts +2 -2
- package/src/shared/jsonSchemaGenerator.ts +4 -2
- package/src/shared/pydanticModelGenerator.ts +618 -0
- package/src/shared/schemaGenerator.ts +38 -25
- package/src/storage/methods.ts +67 -23
- package/src/utils/configDiscovery.ts +40 -41
- package/src/utils/constantsGenerator.ts +43 -26
- package/src/utils/projectConfig.ts +11 -11
- package/src/utils/yamlConverter.ts +40 -40
|
@@ -0,0 +1,615 @@
|
|
|
1
|
+
import fs from 'fs';
|
|
2
|
+
import path from 'path';
|
|
3
|
+
import { MessageFormatter } from './messageFormatter.js';
|
|
4
|
+
// Embedded template for base Pydantic model (always written as base.py)
|
|
5
|
+
const BASE_PYDANTIC_TEMPLATE = `"""
|
|
6
|
+
Appwrite-compatible Pydantic base models for SmartScraper.
|
|
7
|
+
|
|
8
|
+
Provides clean base classes for all Appwrite document models without SQLAlchemy dependencies.
|
|
9
|
+
"""
|
|
10
|
+
|
|
11
|
+
import json
|
|
12
|
+
from datetime import datetime
|
|
13
|
+
from typing import Any, ClassVar
|
|
14
|
+
|
|
15
|
+
from pydantic import BaseModel, Field, field_validator
|
|
16
|
+
|
|
17
|
+
|
|
18
|
+
class BaseAppwriteModel(BaseModel):
|
|
19
|
+
"""
|
|
20
|
+
Base Appwrite-compatible Pydantic model with field aliases for Appwrite's $ prefixed fields.
|
|
21
|
+
|
|
22
|
+
Handles the mapping between Python-compatible field names and Appwrite's $ prefixed fields:
|
|
23
|
+
- rid -> $id
|
|
24
|
+
- created_at -> $createdAt
|
|
25
|
+
- updated_at -> $updatedAt
|
|
26
|
+
- permissions -> $permissions
|
|
27
|
+
- database_id -> $databaseId
|
|
28
|
+
- collection_id -> $collectionId
|
|
29
|
+
- sequence -> $sequence
|
|
30
|
+
"""
|
|
31
|
+
|
|
32
|
+
# Optional class-level defaults for database/collection identifiers
|
|
33
|
+
databaseId: ClassVar[str | None] = None
|
|
34
|
+
collectionId: ClassVar[str | None] = None
|
|
35
|
+
|
|
36
|
+
rid: str = Field(..., alias="$id", description="Appwrite document ID")
|
|
37
|
+
created_at: datetime = Field(..., alias="$createdAt", description="Document creation timestamp")
|
|
38
|
+
updated_at: datetime = Field(
|
|
39
|
+
..., alias="$updatedAt", description="Document last update timestamp"
|
|
40
|
+
)
|
|
41
|
+
permissions: list[str] = Field(
|
|
42
|
+
default_factory=list, alias="$permissions", description="Document permissions"
|
|
43
|
+
)
|
|
44
|
+
database_id: str = Field(..., alias="$databaseId", description="Appwrite database ID")
|
|
45
|
+
collection_id: str = Field(..., alias="$collectionId", description="Appwrite collection ID")
|
|
46
|
+
sequence: int | None = Field(None, alias="$sequence", description="Document sequence number")
|
|
47
|
+
|
|
48
|
+
class Config:
|
|
49
|
+
"""Pydantic configuration for Appwrite compatibility"""
|
|
50
|
+
|
|
51
|
+
from_attributes = True
|
|
52
|
+
populate_by_name = True # Allow both field name and alias
|
|
53
|
+
extra = "allow" # Allow additional fields from Appwrite
|
|
54
|
+
json_encoders = {datetime: lambda v: v.isoformat() if v else None}
|
|
55
|
+
|
|
56
|
+
@field_validator("created_at", "updated_at", mode="before")
|
|
57
|
+
@classmethod
|
|
58
|
+
def parse_datetime(cls, v: str | datetime) -> datetime:
|
|
59
|
+
"""Parse datetime from string or return datetime object"""
|
|
60
|
+
if isinstance(v, str):
|
|
61
|
+
# Handle ISO format with or without microseconds
|
|
62
|
+
try:
|
|
63
|
+
return datetime.fromisoformat(v.replace("Z", "+00:00"))
|
|
64
|
+
except ValueError:
|
|
65
|
+
# Fallback for other formats
|
|
66
|
+
return datetime.fromisoformat(v)
|
|
67
|
+
return v
|
|
68
|
+
|
|
69
|
+
def to_appwrite_dict(self) -> dict[str, Any]:
|
|
70
|
+
"""Convert model to dictionary with Appwrite field names ($ prefixed)"""
|
|
71
|
+
return self.model_dump(by_alias=True, exclude_unset=True)
|
|
72
|
+
|
|
73
|
+
def to_python_dict(self) -> dict[str, Any]:
|
|
74
|
+
"""Convert model to dictionary with Python field names (no $ prefix)"""
|
|
75
|
+
return self.model_dump(by_alias=False, exclude_unset=True)
|
|
76
|
+
|
|
77
|
+
@classmethod
|
|
78
|
+
def from_appwrite_document(cls, document: dict[str, Any]):
|
|
79
|
+
"""Create model instance from Appwrite document with $ prefixed fields"""
|
|
80
|
+
return cls.model_validate(document)
|
|
81
|
+
|
|
82
|
+
def to_update_payload(self, exclude_unset: bool = True) -> dict[str, Any]:
|
|
83
|
+
"""Convert model to update payload excluding system fields and None values"""
|
|
84
|
+
data = self.model_dump(by_alias=False, exclude_unset=exclude_unset)
|
|
85
|
+
return strip_appwrite_keys(data)
|
|
86
|
+
|
|
87
|
+
|
|
88
|
+
class CreateBase(BaseModel):
|
|
89
|
+
"""
|
|
90
|
+
Base model for creating documents in Appwrite.
|
|
91
|
+
Makes all Appwrite system fields optional since they're auto-generated.
|
|
92
|
+
"""
|
|
93
|
+
|
|
94
|
+
rid: str | None = Field(None, alias="$id", description="Optional custom document ID")
|
|
95
|
+
created_at: datetime | None = Field(
|
|
96
|
+
None, alias="$createdAt", description="Auto-generated creation timestamp"
|
|
97
|
+
)
|
|
98
|
+
updated_at: datetime | None = Field(
|
|
99
|
+
None, alias="$updatedAt", description="Auto-generated update timestamp"
|
|
100
|
+
)
|
|
101
|
+
permissions: list[str] | None = Field(
|
|
102
|
+
None, alias="$permissions", description="Optional document permissions"
|
|
103
|
+
)
|
|
104
|
+
database_id: str | None = Field(
|
|
105
|
+
None, alias="$databaseId", description="Auto-set database ID"
|
|
106
|
+
)
|
|
107
|
+
collection_id: str | None = Field(
|
|
108
|
+
None, alias="$collectionId", description="Auto-set collection ID"
|
|
109
|
+
)
|
|
110
|
+
sequence: int | None = Field(
|
|
111
|
+
None, alias="$sequence", description="Auto-generated sequence number"
|
|
112
|
+
)
|
|
113
|
+
|
|
114
|
+
class Config:
|
|
115
|
+
"""Pydantic configuration for creation payloads"""
|
|
116
|
+
|
|
117
|
+
from_attributes = True
|
|
118
|
+
populate_by_name = True
|
|
119
|
+
extra = "allow"
|
|
120
|
+
json_encoders = {datetime: lambda v: v.isoformat() if v else None}
|
|
121
|
+
|
|
122
|
+
@field_validator("created_at", "updated_at", mode="before")
|
|
123
|
+
@classmethod
|
|
124
|
+
def parse_datetime(cls, v: str | datetime | None) -> datetime | None:
|
|
125
|
+
"""Parse datetime from string or return datetime object"""
|
|
126
|
+
if v is None:
|
|
127
|
+
return None
|
|
128
|
+
if isinstance(v, str):
|
|
129
|
+
try:
|
|
130
|
+
return datetime.fromisoformat(v.replace("Z", "+00:00"))
|
|
131
|
+
except ValueError:
|
|
132
|
+
return datetime.fromisoformat(v)
|
|
133
|
+
return v
|
|
134
|
+
|
|
135
|
+
def strip_appwrite_fields(self) -> dict[str, Any]:
|
|
136
|
+
"""
|
|
137
|
+
Remove Appwrite system fields and return clean data for creation.
|
|
138
|
+
Useful when preparing data for Appwrite document creation.
|
|
139
|
+
"""
|
|
140
|
+
excluded_fields = {
|
|
141
|
+
"rid",
|
|
142
|
+
"$id",
|
|
143
|
+
"created_at",
|
|
144
|
+
"$createdAt",
|
|
145
|
+
"updated_at",
|
|
146
|
+
"$updatedAt",
|
|
147
|
+
"permissions",
|
|
148
|
+
"$permissions",
|
|
149
|
+
"database_id",
|
|
150
|
+
"$databaseId",
|
|
151
|
+
"collection_id",
|
|
152
|
+
"$collectionId",
|
|
153
|
+
"sequence",
|
|
154
|
+
"$sequence",
|
|
155
|
+
}
|
|
156
|
+
|
|
157
|
+
data = self.model_dump(by_alias=False, exclude_unset=True)
|
|
158
|
+
return {k: v for k, v in data.items() if k not in excluded_fields}
|
|
159
|
+
|
|
160
|
+
|
|
161
|
+
class UpdateBase(BaseModel):
|
|
162
|
+
"""
|
|
163
|
+
Generic base model for partial updates.
|
|
164
|
+
Makes all fields optional for PATCH operations.
|
|
165
|
+
"""
|
|
166
|
+
|
|
167
|
+
class Config:
|
|
168
|
+
"""Pydantic configuration for update payloads"""
|
|
169
|
+
|
|
170
|
+
from_attributes = True
|
|
171
|
+
extra = "allow"
|
|
172
|
+
json_encoders = {datetime: lambda v: v.isoformat() if v else None}
|
|
173
|
+
|
|
174
|
+
def get_update_data(self, exclude_unset: bool = True) -> dict[str, Any]:
|
|
175
|
+
"""
|
|
176
|
+
Get update data excluding None values and optionally unset fields.
|
|
177
|
+
Perfect for PATCH operations where only changed fields should be sent.
|
|
178
|
+
"""
|
|
179
|
+
data = self.model_dump(exclude_unset=exclude_unset)
|
|
180
|
+
return {k: v for k, v in data.items() if v is not None}
|
|
181
|
+
|
|
182
|
+
def get_creation_data(self) -> dict[str, Any]:
|
|
183
|
+
"""Get clean data for Appwrite document creation"""
|
|
184
|
+
return convert_to_create_payload(self)
|
|
185
|
+
|
|
186
|
+
|
|
187
|
+
# ============================================================================
|
|
188
|
+
# UTILITY FUNCTIONS
|
|
189
|
+
# ============================================================================
|
|
190
|
+
|
|
191
|
+
|
|
192
|
+
def strip_appwrite_keys(data: dict[str, Any]) -> dict[str, Any]:
|
|
193
|
+
"""
|
|
194
|
+
Remove Appwrite system fields ($ prefixed) from a dictionary.
|
|
195
|
+
|
|
196
|
+
Args:
|
|
197
|
+
data: Dictionary that may contain Appwrite system fields
|
|
198
|
+
|
|
199
|
+
Returns:
|
|
200
|
+
Dictionary with Appwrite system fields removed
|
|
201
|
+
|
|
202
|
+
Example:
|
|
203
|
+
>>> data = {"name": "John", "$id": "123", "$createdAt": "2023-01-01"}
|
|
204
|
+
>>> strip_appwrite_keys(data)
|
|
205
|
+
{"name": "John"}
|
|
206
|
+
"""
|
|
207
|
+
excluded_keys = {
|
|
208
|
+
"$id",
|
|
209
|
+
"$createdAt",
|
|
210
|
+
"$updatedAt",
|
|
211
|
+
"$permissions",
|
|
212
|
+
"$databaseId",
|
|
213
|
+
"$collectionId",
|
|
214
|
+
"$sequence",
|
|
215
|
+
}
|
|
216
|
+
return {k: v for k, v in data.items() if k not in excluded_keys}
|
|
217
|
+
|
|
218
|
+
|
|
219
|
+
def convert_to_create_payload(model_instance: BaseModel) -> dict[str, Any]:
|
|
220
|
+
"""
|
|
221
|
+
Convert any Pydantic model instance to a clean creation payload.
|
|
222
|
+
Removes Appwrite system fields and None values.
|
|
223
|
+
|
|
224
|
+
Args:
|
|
225
|
+
model_instance: Pydantic model instance
|
|
226
|
+
|
|
227
|
+
Returns:
|
|
228
|
+
Dictionary suitable for Appwrite document creation
|
|
229
|
+
|
|
230
|
+
Example:
|
|
231
|
+
>>> user = UserModel(name="John", rid="123", created_at=datetime.now())
|
|
232
|
+
>>> convert_to_create_payload(user)
|
|
233
|
+
{"name": "John"}
|
|
234
|
+
"""
|
|
235
|
+
data = model_instance.model_dump(exclude_unset=True)
|
|
236
|
+
# Remove Appwrite system fields and None values
|
|
237
|
+
clean_data = strip_appwrite_keys(data)
|
|
238
|
+
return {k: v for k, v in clean_data.items() if v is not None}
|
|
239
|
+
|
|
240
|
+
|
|
241
|
+
def convert_to_update_payload(
|
|
242
|
+
model_instance: BaseModel, exclude_unset: bool = True
|
|
243
|
+
) -> dict[str, Any]:
|
|
244
|
+
"""
|
|
245
|
+
Convert any Pydantic model instance to a clean update payload.
|
|
246
|
+
Removes None values and optionally unset fields.
|
|
247
|
+
|
|
248
|
+
Args:
|
|
249
|
+
model_instance: Pydantic model instance
|
|
250
|
+
exclude_unset: Whether to exclude fields that weren't explicitly set
|
|
251
|
+
|
|
252
|
+
Returns:
|
|
253
|
+
Dictionary suitable for Appwrite document updates
|
|
254
|
+
|
|
255
|
+
Example:
|
|
256
|
+
>>> user_update = UserUpdateModel(name="Jane")
|
|
257
|
+
>>> convert_to_update_payload(user_update)
|
|
258
|
+
{"name": "Jane"}
|
|
259
|
+
"""
|
|
260
|
+
data = model_instance.model_dump(exclude_unset=exclude_unset)
|
|
261
|
+
return {k: v for k, v in data.items() if v is not None}
|
|
262
|
+
|
|
263
|
+
|
|
264
|
+
# ============================================================================
|
|
265
|
+
# JSON FIELD HELPER MIXINS
|
|
266
|
+
# ============================================================================
|
|
267
|
+
|
|
268
|
+
|
|
269
|
+
class JSONFieldMixin:
|
|
270
|
+
"""
|
|
271
|
+
Mixin providing standardized JSON field helper methods.
|
|
272
|
+
Use this to add consistent JSON encode/decode patterns to models.
|
|
273
|
+
"""
|
|
274
|
+
|
|
275
|
+
def _encode_json_field(self, data: Any) -> str | None:
|
|
276
|
+
"""Safely encode data to JSON string"""
|
|
277
|
+
if data is None:
|
|
278
|
+
return None
|
|
279
|
+
try:
|
|
280
|
+
return json.dumps(data)
|
|
281
|
+
except (TypeError, ValueError):
|
|
282
|
+
return None
|
|
283
|
+
|
|
284
|
+
def _decode_json_field(self, json_str: str | None, default: Any = None) -> Any:
|
|
285
|
+
"""Safely decode JSON string to data"""
|
|
286
|
+
if not json_str:
|
|
287
|
+
return default
|
|
288
|
+
try:
|
|
289
|
+
return json.loads(json_str)
|
|
290
|
+
except (json.JSONDecodeError, TypeError):
|
|
291
|
+
return default
|
|
292
|
+
|
|
293
|
+
def _decode_json_list(self, json_str: str | None) -> list[Any]:
|
|
294
|
+
"""Safely decode JSON string to list"""
|
|
295
|
+
return self._decode_json_field(json_str, [])
|
|
296
|
+
|
|
297
|
+
def _decode_json_dict(self, json_str: str | None) -> dict[str, Any]:
|
|
298
|
+
"""Safely decode JSON string to dictionary"""
|
|
299
|
+
return self._decode_json_field(json_str, {})
|
|
300
|
+
|
|
301
|
+
|
|
302
|
+
class TimestampMixin:
|
|
303
|
+
"""
|
|
304
|
+
Mixin providing standardized timestamp handling for business timestamps.
|
|
305
|
+
Use this for models that need to handle ISO timestamp strings.
|
|
306
|
+
"""
|
|
307
|
+
|
|
308
|
+
def _set_timestamp(self, date: datetime | None) -> str | None:
|
|
309
|
+
"""Convert datetime to ISO string"""
|
|
310
|
+
return date.isoformat() if date else None
|
|
311
|
+
|
|
312
|
+
def _get_timestamp(self, timestamp_str: str | None) -> datetime | None:
|
|
313
|
+
"""Convert ISO string to datetime"""
|
|
314
|
+
if not timestamp_str:
|
|
315
|
+
return None
|
|
316
|
+
try:
|
|
317
|
+
return datetime.fromisoformat(timestamp_str.replace("Z", "+00:00"))
|
|
318
|
+
except ValueError:
|
|
319
|
+
return None
|
|
320
|
+
|
|
321
|
+
|
|
322
|
+
class StringArrayMixin:
|
|
323
|
+
"""
|
|
324
|
+
Mixin providing standardized string array handling for many-to-many relationships.
|
|
325
|
+
Use this for models that manage arrays of IDs for relationships.
|
|
326
|
+
"""
|
|
327
|
+
|
|
328
|
+
def _add_to_array(self, array: list[str], item: str) -> None:
|
|
329
|
+
"""Add item to array if not already present"""
|
|
330
|
+
if item not in array:
|
|
331
|
+
array.append(item)
|
|
332
|
+
|
|
333
|
+
def _remove_from_array(self, array: list[str], item: str) -> None:
|
|
334
|
+
"""Remove item from array if present"""
|
|
335
|
+
if item in array:
|
|
336
|
+
array.remove(item)
|
|
337
|
+
|
|
338
|
+
def _ensure_array_field(self, field_value: list[str] | None) -> list[str]:
|
|
339
|
+
"""Ensure field is a list, return empty list if None"""
|
|
340
|
+
return field_value or []
|
|
341
|
+
|
|
342
|
+
|
|
343
|
+
# ============================================================================
|
|
344
|
+
# ENHANCED UTILITY FUNCTIONS
|
|
345
|
+
# ============================================================================
|
|
346
|
+
|
|
347
|
+
|
|
348
|
+
def safe_json_encode(data: Any) -> str | None:
|
|
349
|
+
"""
|
|
350
|
+
Safely encode any data to JSON string.
|
|
351
|
+
|
|
352
|
+
Args:
|
|
353
|
+
data: Data to encode
|
|
354
|
+
|
|
355
|
+
Returns:
|
|
356
|
+
JSON string or None if encoding fails
|
|
357
|
+
|
|
358
|
+
Example:
|
|
359
|
+
>>> safe_json_encode({"key": "value"})
|
|
360
|
+
'{"key": "value"}'
|
|
361
|
+
>>> safe_json_encode(None)
|
|
362
|
+
None
|
|
363
|
+
"""
|
|
364
|
+
if data is None:
|
|
365
|
+
return None
|
|
366
|
+
try:
|
|
367
|
+
return json.dumps(data)
|
|
368
|
+
except (TypeError, ValueError):
|
|
369
|
+
return None
|
|
370
|
+
|
|
371
|
+
|
|
372
|
+
def safe_json_decode(json_str: str | None, default: Any = None) -> Any:
|
|
373
|
+
"""
|
|
374
|
+
Safely decode JSON string to data.
|
|
375
|
+
|
|
376
|
+
Args:
|
|
377
|
+
json_str: JSON string to decode
|
|
378
|
+
default: Default value if decoding fails
|
|
379
|
+
|
|
380
|
+
Returns:
|
|
381
|
+
Decoded data or default value
|
|
382
|
+
|
|
383
|
+
Example:
|
|
384
|
+
>>> safe_json_decode('{"key": "value"}')
|
|
385
|
+
{'key': 'value'}
|
|
386
|
+
>>> safe_json_decode('invalid', {})
|
|
387
|
+
{}
|
|
388
|
+
"""
|
|
389
|
+
if not json_str:
|
|
390
|
+
return default
|
|
391
|
+
try:
|
|
392
|
+
return json.loads(json_str)
|
|
393
|
+
except (json.JSONDecodeError, TypeError):
|
|
394
|
+
return default
|
|
395
|
+
|
|
396
|
+
|
|
397
|
+
def create_json_field_helpers(field_name: str, default_type: type[Any] = dict):
|
|
398
|
+
"""
|
|
399
|
+
Create getter/setter methods for JSON fields.
|
|
400
|
+
Useful for dynamically adding JSON field helpers to models.
|
|
401
|
+
|
|
402
|
+
Args:
|
|
403
|
+
field_name: Name of the JSON field
|
|
404
|
+
default_type: Default type for the field (dict or list)
|
|
405
|
+
|
|
406
|
+
Returns:
|
|
407
|
+
Tuple of (getter, setter) functions
|
|
408
|
+
|
|
409
|
+
Example:
|
|
410
|
+
>>> get_metadata, set_metadata = create_json_field_helpers('metadata')
|
|
411
|
+
>>> # Then add to model class
|
|
412
|
+
"""
|
|
413
|
+
def getter(self) -> Any:
|
|
414
|
+
json_str = getattr(self, field_name, None)
|
|
415
|
+
default = default_type() if callable(default_type) else default_type
|
|
416
|
+
return safe_json_decode(json_str, default)
|
|
417
|
+
|
|
418
|
+
def setter(self, value: Any) -> None:
|
|
419
|
+
setattr(self, field_name, safe_json_encode(value))
|
|
420
|
+
|
|
421
|
+
return getter, setter
|
|
422
|
+
|
|
423
|
+
|
|
424
|
+
def validate_appwrite_document(document: dict[str, Any]) -> bool:
|
|
425
|
+
"""
|
|
426
|
+
Validate that a dictionary contains required Appwrite document fields.
|
|
427
|
+
|
|
428
|
+
Args:
|
|
429
|
+
document: Dictionary to validate
|
|
430
|
+
|
|
431
|
+
Returns:
|
|
432
|
+
True if valid Appwrite document format
|
|
433
|
+
|
|
434
|
+
Example:
|
|
435
|
+
>>> doc = {"$id": "123", "$createdAt": "2023-01-01T00:00:00Z", "name": "test"}
|
|
436
|
+
>>> validate_appwrite_document(doc)
|
|
437
|
+
True
|
|
438
|
+
"""
|
|
439
|
+
required_fields = {"$id", "$createdAt", "$updatedAt"}
|
|
440
|
+
return all(field in document for field in required_fields)
|
|
441
|
+
|
|
442
|
+
|
|
443
|
+
def batch_prepare_documents(
|
|
444
|
+
models: list[BaseModel], batch_size: int = 100
|
|
445
|
+
) -> list[list[dict[str, Any]]]:
|
|
446
|
+
"""
|
|
447
|
+
Prepare model instances for batch creation in Appwrite.
|
|
448
|
+
Splits into batches and removes system fields.
|
|
449
|
+
|
|
450
|
+
Args:
|
|
451
|
+
models: List of Pydantic model instances
|
|
452
|
+
batch_size: Maximum documents per batch
|
|
453
|
+
|
|
454
|
+
Returns:
|
|
455
|
+
List of batches, each containing clean document data
|
|
456
|
+
|
|
457
|
+
Example:
|
|
458
|
+
>>> users = [CreateUser(name="John"), CreateUser(name="Jane")]
|
|
459
|
+
>>> batches = batch_prepare_documents(users, batch_size=1)
|
|
460
|
+
>>> len(batches)
|
|
461
|
+
2
|
|
462
|
+
"""
|
|
463
|
+
clean_docs = [convert_to_create_payload(model) for model in models]
|
|
464
|
+
|
|
465
|
+
batches = []
|
|
466
|
+
for i in range(0, len(clean_docs), batch_size):
|
|
467
|
+
batch = clean_docs[i:i + batch_size]
|
|
468
|
+
batches.append(batch)
|
|
469
|
+
|
|
470
|
+
return batches
|
|
471
|
+
`;
|
|
472
|
+
export class PydanticModelGenerator {
|
|
473
|
+
config;
|
|
474
|
+
appwriteFolderPath;
|
|
475
|
+
constructor(config, appwriteFolderPath) {
|
|
476
|
+
this.config = config;
|
|
477
|
+
this.appwriteFolderPath = appwriteFolderPath;
|
|
478
|
+
}
|
|
479
|
+
generatePydanticModels(options) {
|
|
480
|
+
const { baseOutputDirectory, verbose = false } = options;
|
|
481
|
+
const pyDir = baseOutputDirectory;
|
|
482
|
+
if (!fs.existsSync(pyDir))
|
|
483
|
+
fs.mkdirSync(pyDir, { recursive: true });
|
|
484
|
+
this.writeBase(pyDir, verbose);
|
|
485
|
+
const collections = this.config.collections || [];
|
|
486
|
+
for (const coll of collections) {
|
|
487
|
+
const fileName = `${this.toSnake(coll.name)}.py`;
|
|
488
|
+
const filePath = path.join(pyDir, fileName);
|
|
489
|
+
const code = this.generateModel(coll.name, coll.attributes || []);
|
|
490
|
+
fs.writeFileSync(filePath, code, { encoding: 'utf-8' });
|
|
491
|
+
if (verbose)
|
|
492
|
+
MessageFormatter.success(`Pydantic model written to ${filePath}`, { prefix: 'Schema' });
|
|
493
|
+
}
|
|
494
|
+
// __init__.py to ease imports
|
|
495
|
+
const initPath = path.join(pyDir, '__init__.py');
|
|
496
|
+
try {
|
|
497
|
+
const exports = (this.config.collections || []).map(c => `from .${this.toSnake(c.name)} import ${this.toPascal(c.name)}`).join('\n');
|
|
498
|
+
fs.writeFileSync(initPath, `${exports}\n`, { encoding: 'utf-8' });
|
|
499
|
+
}
|
|
500
|
+
catch { }
|
|
501
|
+
}
|
|
502
|
+
writeBase(pyDir, verbose) {
|
|
503
|
+
const basePath = path.join(pyDir, 'base.py');
|
|
504
|
+
// Always write embedded template content
|
|
505
|
+
fs.writeFileSync(basePath, BASE_PYDANTIC_TEMPLATE, { encoding: 'utf-8' });
|
|
506
|
+
if (verbose)
|
|
507
|
+
MessageFormatter.success(`Base Pydantic model written to ${basePath}`, { prefix: 'Schema' });
|
|
508
|
+
}
|
|
509
|
+
generateModel(name, attributes) {
|
|
510
|
+
const pascal = this.toPascal(name);
|
|
511
|
+
const imports = new Set();
|
|
512
|
+
imports.add("from .base import BaseAppwriteModel");
|
|
513
|
+
const typeImports = new Set();
|
|
514
|
+
typeImports.add('from pydantic import Field');
|
|
515
|
+
const typingImports = new Set();
|
|
516
|
+
const fields = [];
|
|
517
|
+
for (const attr of attributes) {
|
|
518
|
+
if (!attr || !attr.key)
|
|
519
|
+
continue;
|
|
520
|
+
const ann = this.mapAttributeToPythonType(attr, typingImports);
|
|
521
|
+
const required = !!attr.required;
|
|
522
|
+
const isArray = !!attr.array;
|
|
523
|
+
const defaultInitializer = this.defaultInitializer(attr, required, isArray);
|
|
524
|
+
fields.push(` ${attr.key}: ${ann}${defaultInitializer}`);
|
|
525
|
+
}
|
|
526
|
+
const header = this.composeHeader(imports, typeImports, typingImports);
|
|
527
|
+
return `${header}\n\nclass ${pascal}(BaseAppwriteModel):\n${fields.join('\n')}\n`;
|
|
528
|
+
}
|
|
529
|
+
composeHeader(imports, typeImports, typingImports) {
|
|
530
|
+
const lines = ["from __future__ import annotations"];
|
|
531
|
+
lines.push(...Array.from(typeImports));
|
|
532
|
+
if (typingImports.size > 0) {
|
|
533
|
+
lines.push(`from typing import ${Array.from(typingImports).sort().join(', ')}`);
|
|
534
|
+
}
|
|
535
|
+
// datetime import if referenced; include by default as safe
|
|
536
|
+
lines.push('from datetime import datetime');
|
|
537
|
+
lines.push(...Array.from(imports));
|
|
538
|
+
return lines.join('\n');
|
|
539
|
+
}
|
|
540
|
+
defaultInitializer(attr, required, isArray) {
|
|
541
|
+
if (required)
|
|
542
|
+
return '';
|
|
543
|
+
// Optional fields default to None; arrays can be None to distinguish missing vs empty
|
|
544
|
+
return ' = None';
|
|
545
|
+
}
|
|
546
|
+
mapAttributeToPythonType(attr, typingImports) {
|
|
547
|
+
const t = String(attr.type || '').toLowerCase();
|
|
548
|
+
const isArray = !!attr.array;
|
|
549
|
+
let base;
|
|
550
|
+
switch (t) {
|
|
551
|
+
case 'string':
|
|
552
|
+
case 'email':
|
|
553
|
+
case 'ip':
|
|
554
|
+
case 'url':
|
|
555
|
+
base = 'str';
|
|
556
|
+
break;
|
|
557
|
+
case 'integer':
|
|
558
|
+
base = 'int';
|
|
559
|
+
break;
|
|
560
|
+
case 'double':
|
|
561
|
+
case 'float':
|
|
562
|
+
base = 'float';
|
|
563
|
+
break;
|
|
564
|
+
case 'boolean':
|
|
565
|
+
base = 'bool';
|
|
566
|
+
break;
|
|
567
|
+
case 'datetime':
|
|
568
|
+
base = 'datetime';
|
|
569
|
+
break;
|
|
570
|
+
case 'enum': {
|
|
571
|
+
const els = Array.isArray(attr.elements) ? attr.elements : [];
|
|
572
|
+
if (els.length > 0) {
|
|
573
|
+
typingImports.add('Literal');
|
|
574
|
+
base = `Literal[${els.map((e) => `'${e.replace(/'/g, "\\'")}'`).join(', ')}]`;
|
|
575
|
+
}
|
|
576
|
+
else {
|
|
577
|
+
base = 'str';
|
|
578
|
+
}
|
|
579
|
+
break;
|
|
580
|
+
}
|
|
581
|
+
case 'relationship': {
|
|
582
|
+
const relType = attr.relationType || '';
|
|
583
|
+
base = (relType === 'oneToMany' || relType === 'manyToMany') ? 'list[str]' : 'str';
|
|
584
|
+
break;
|
|
585
|
+
}
|
|
586
|
+
default:
|
|
587
|
+
base = 'str';
|
|
588
|
+
break;
|
|
589
|
+
}
|
|
590
|
+
if (isArray && t !== 'relationship') {
|
|
591
|
+
base = `list[${base}]`;
|
|
592
|
+
}
|
|
593
|
+
const required = !!attr.required;
|
|
594
|
+
if (!required) {
|
|
595
|
+
base = `${base} | None`;
|
|
596
|
+
}
|
|
597
|
+
return base;
|
|
598
|
+
}
|
|
599
|
+
toSnake(s) {
|
|
600
|
+
return s
|
|
601
|
+
.replace(/([a-z0-9])([A-Z])/g, '$1_$2')
|
|
602
|
+
.replace(/[^a-zA-Z0-9]+/g, '_')
|
|
603
|
+
.replace(/_+/g, '_')
|
|
604
|
+
.replace(/^_|_$/g, '')
|
|
605
|
+
.toLowerCase();
|
|
606
|
+
}
|
|
607
|
+
toPascal(s) {
|
|
608
|
+
return s
|
|
609
|
+
.replace(/[^a-zA-Z0-9]+/g, ' ')
|
|
610
|
+
.split(' ')
|
|
611
|
+
.filter(Boolean)
|
|
612
|
+
.map(w => w.charAt(0).toUpperCase() + w.slice(1))
|
|
613
|
+
.join('');
|
|
614
|
+
}
|
|
615
|
+
}
|
|
@@ -31,9 +31,10 @@ export declare class SchemaGenerator {
|
|
|
31
31
|
private updateTypeScriptConfig;
|
|
32
32
|
private extractRelationships;
|
|
33
33
|
generateSchemas(options?: {
|
|
34
|
-
format?: "zod" | "json" | "both";
|
|
34
|
+
format?: "zod" | "json" | "pydantic" | "both" | "all";
|
|
35
35
|
verbose?: boolean;
|
|
36
|
-
|
|
36
|
+
outputDir?: string;
|
|
37
|
+
}): Promise<void>;
|
|
37
38
|
createSchemaStringV4: (name: string, attributes: Attribute[]) => string;
|
|
38
39
|
typeToZod: (attribute: Attribute) => string;
|
|
39
40
|
}
|
|
@@ -289,21 +289,28 @@ export default appwriteConfig;
|
|
|
289
289
|
extractRelationships() {
|
|
290
290
|
this.relationshipMap = extractTwoWayRelationships(this.config);
|
|
291
291
|
}
|
|
292
|
-
generateSchemas(options = {}) {
|
|
293
|
-
const { format = "both", verbose = false } = options;
|
|
292
|
+
async generateSchemas(options = {}) {
|
|
293
|
+
const { format = "both", verbose = false, outputDir } = options;
|
|
294
294
|
if (!this.config.collections) {
|
|
295
295
|
return;
|
|
296
296
|
}
|
|
297
297
|
// Create schemas directory using config setting
|
|
298
|
-
const
|
|
299
|
-
|
|
300
|
-
|
|
301
|
-
|
|
298
|
+
const configuredDir = outputDir || this.config.schemaConfig?.outputDirectory || "schemas";
|
|
299
|
+
let schemasPath;
|
|
300
|
+
if (path.isAbsolute(configuredDir)) {
|
|
301
|
+
schemasPath = configuredDir;
|
|
302
|
+
}
|
|
303
|
+
else if (configuredDir === "schemas") {
|
|
304
|
+
schemasPath = resolveSchemaDir(this.appwriteFolderPath);
|
|
305
|
+
}
|
|
306
|
+
else {
|
|
307
|
+
schemasPath = path.join(this.appwriteFolderPath, configuredDir);
|
|
308
|
+
}
|
|
302
309
|
if (!fs.existsSync(schemasPath)) {
|
|
303
310
|
fs.mkdirSync(schemasPath, { recursive: true });
|
|
304
311
|
}
|
|
305
312
|
// Generate Zod schemas (TypeScript)
|
|
306
|
-
if (format === "zod" || format === "both") {
|
|
313
|
+
if (format === "zod" || format === "both" || format === "all") {
|
|
307
314
|
this.config.collections.forEach((collection) => {
|
|
308
315
|
const schemaString = this.createSchemaStringV4(collection.name, collection.attributes || []);
|
|
309
316
|
const camelCaseName = toCamelCase(collection.name);
|
|
@@ -315,14 +322,20 @@ export default appwriteConfig;
|
|
|
315
322
|
});
|
|
316
323
|
}
|
|
317
324
|
// Generate JSON schemas (all at once)
|
|
318
|
-
if (format === "json" || format === "both") {
|
|
325
|
+
if (format === "json" || format === "both" || format === "all") {
|
|
319
326
|
const jsonSchemaGenerator = new JsonSchemaGenerator(this.config, this.appwriteFolderPath);
|
|
320
327
|
jsonSchemaGenerator.generateJsonSchemas({
|
|
321
328
|
outputFormat: format === "json" ? "json" : "both",
|
|
322
|
-
outputDirectory:
|
|
329
|
+
outputDirectory: configuredDir,
|
|
323
330
|
verbose: verbose
|
|
324
331
|
});
|
|
325
332
|
}
|
|
333
|
+
// Generate Python Pydantic models
|
|
334
|
+
if (format === "pydantic" || format === "all") {
|
|
335
|
+
const mod = await import("./pydanticModelGenerator.js");
|
|
336
|
+
const pgen = new mod.PydanticModelGenerator(this.config, this.appwriteFolderPath);
|
|
337
|
+
pgen.generatePydanticModels({ baseOutputDirectory: schemasPath, verbose });
|
|
338
|
+
}
|
|
326
339
|
if (verbose) {
|
|
327
340
|
MessageFormatter.success(`Schema generation completed (format: ${format})`, { prefix: "Schema" });
|
|
328
341
|
}
|