appwrite-utils-cli 1.7.7 → 1.7.8

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -5,15 +5,51 @@ import { fetchAllCollections } from "../collections/methods.js";
5
5
  import { fetchAllDatabases } from "../databases/methods.js";
6
6
  import { CollectionSchema, attributeSchema, AppwriteConfigSchema, permissionsSchema, attributesSchema, indexesSchema, parseAttribute, } from "appwrite-utils";
7
7
  import { getDatabaseFromConfig } from "./afterImportActions.js";
8
+ import { getAdapterFromConfig } from "../utils/getClientFromConfig.js";
8
9
  import { listBuckets } from "../storage/methods.js";
9
10
  import { listFunctions, listFunctionDeployments } from "../functions/methods.js";
10
11
  import { MessageFormatter } from "../shared/messageFormatter.js";
12
+ import { isLegacyDatabases } from "../utils/typeGuards.js";
13
+ /**
14
+ * Convert between collection and table terminology based on data structure
15
+ */
16
+ function normalizeCollectionOrTable(collection) {
17
+ // Check if this is a table (has columns) or collection (has attributes)
18
+ const isTable = collection.columns && Array.isArray(collection.columns);
19
+ if (isTable) {
20
+ // Table structure - convert columns to attributes
21
+ MessageFormatter.debug(`Detected table structure: ${collection.name || collection.tableName}`, { prefix: "Migration" });
22
+ return {
23
+ ...collection,
24
+ attributes: collection.columns || [],
25
+ permissions: collection.$permissions || collection.permissions || [],
26
+ name: collection.name || collection.tableName,
27
+ $id: collection.$id || collection.tableId,
28
+ enabled: collection.enabled ?? true
29
+ };
30
+ }
31
+ else {
32
+ // Collection structure - use as-is with fallbacks
33
+ MessageFormatter.debug(`Detected collection structure: ${collection.name}`, { prefix: "Migration" });
34
+ return {
35
+ ...collection,
36
+ attributes: collection.attributes || [],
37
+ permissions: collection.$permissions || collection.permissions || [],
38
+ name: collection.name,
39
+ $id: collection.$id,
40
+ enabled: collection.enabled ?? true
41
+ };
42
+ }
43
+ }
11
44
  export class AppwriteToX {
12
45
  config;
13
46
  storage;
14
47
  updatedConfig;
15
48
  collToAttributeMap = new Map();
16
49
  appwriteFolderPath;
50
+ adapter;
51
+ apiMode;
52
+ databaseApiModes = new Map();
17
53
  constructor(config, appwriteFolderPath, storage) {
18
54
  this.config = config;
19
55
  this.updatedConfig = config;
@@ -21,6 +57,24 @@ export class AppwriteToX {
21
57
  this.appwriteFolderPath = appwriteFolderPath;
22
58
  this.ensureClientInitialized();
23
59
  }
60
+ /**
61
+ * Initialize adapter for database operations with API mode detection
62
+ */
63
+ async initializeAdapter() {
64
+ if (!this.adapter) {
65
+ try {
66
+ const { adapter, apiMode } = await getAdapterFromConfig(this.config);
67
+ this.adapter = adapter;
68
+ this.apiMode = apiMode;
69
+ MessageFormatter.info(`Initialized database adapter with API mode: ${apiMode}`, { prefix: "Migration" });
70
+ }
71
+ catch (error) {
72
+ MessageFormatter.warning(`Failed to initialize adapter, falling back to legacy client: ${error instanceof Error ? error.message : 'Unknown error'}`, { prefix: "Migration" });
73
+ // Fallback to legacy client initialization
74
+ this.ensureClientInitialized();
75
+ }
76
+ }
77
+ }
24
78
  ensureClientInitialized() {
25
79
  if (!this.config.appwriteClient) {
26
80
  const client = new Client();
@@ -52,15 +106,103 @@ export class AppwriteToX {
52
106
  return parsedPermissions ?? [];
53
107
  };
54
108
  updateCollectionConfigAttributes = (collection) => {
55
- for (const attribute of collection.attributes) {
56
- const attributeMap = this.collToAttributeMap.get(collection.name);
109
+ // Normalize collection/table structure to handle both TablesDB and Legacy formats
110
+ const normalizedCollection = normalizeCollectionOrTable(collection);
111
+ for (const attribute of normalizedCollection.attributes) {
112
+ if (!attribute) {
113
+ MessageFormatter.warning("Skipping null/undefined attribute in updateCollectionConfigAttributes", { prefix: "Migration" });
114
+ continue;
115
+ }
57
116
  const attributeParsed = attributeSchema.parse(attribute);
58
117
  this.collToAttributeMap
59
- .get(collection.name)
118
+ .get(normalizedCollection.name)
60
119
  ?.push(attributeParsed);
61
120
  }
62
121
  };
63
- async appwriteSync(config, databases) {
122
+ /**
123
+ * Fetch collections/tables using the appropriate adapter or legacy client
124
+ */
125
+ async fetchCollectionsOrTables(databaseId, db) {
126
+ // Try to use adapter first
127
+ if (this.adapter) {
128
+ try {
129
+ const result = await this.adapter.listTables({ databaseId });
130
+ const items = result.tables || result.collections || [];
131
+ MessageFormatter.info(`Fetched ${items.length} items using ${this.apiMode} adapter`, { prefix: "Migration" });
132
+ return items;
133
+ }
134
+ catch (error) {
135
+ MessageFormatter.warning(`Adapter fetch failed, falling back to legacy: ${error instanceof Error ? error.message : 'Unknown error'}`, { prefix: "Migration" });
136
+ }
137
+ }
138
+ // Fallback to legacy method
139
+ try {
140
+ const collections = await fetchAllCollections(databaseId, db);
141
+ MessageFormatter.info(`Fetched ${collections.length} collections using legacy client`, { prefix: "Migration" });
142
+ return collections;
143
+ }
144
+ catch (error) {
145
+ MessageFormatter.error("Failed to fetch collections with both adapter and legacy methods", error instanceof Error ? error : new Error(String(error)), { prefix: "Migration" });
146
+ throw error;
147
+ }
148
+ }
149
+ /**
150
+ * Get collection/table using the appropriate adapter or legacy client
151
+ */
152
+ async getCollectionOrTable(databaseId, collectionId) {
153
+ // Try to use adapter first
154
+ if (this.adapter) {
155
+ try {
156
+ const result = await this.adapter.getTable({ databaseId, tableId: collectionId });
157
+ return result;
158
+ }
159
+ catch (error) {
160
+ MessageFormatter.warning(`Adapter get failed, falling back to legacy: ${error instanceof Error ? error.message : 'Unknown error'}`, { prefix: "Migration" });
161
+ }
162
+ }
163
+ // Fallback to legacy method
164
+ const db = getDatabaseFromConfig(this.config);
165
+ return await db.getCollection(databaseId, collectionId);
166
+ }
167
+ /**
168
+ * Detect API mode for a specific database by testing adapter capabilities
169
+ */
170
+ async detectDatabaseApiMode(databaseId) {
171
+ // If we already detected this database, return cached result
172
+ if (this.databaseApiModes.has(databaseId)) {
173
+ return this.databaseApiModes.get(databaseId);
174
+ }
175
+ // If we have a global adapter, use its API mode as default
176
+ if (this.apiMode) {
177
+ this.databaseApiModes.set(databaseId, this.apiMode);
178
+ MessageFormatter.debug(`Using global API mode for database ${databaseId}: ${this.apiMode}`, { prefix: "Migration" });
179
+ return this.apiMode;
180
+ }
181
+ // Default to legacy if no adapter available
182
+ const defaultMode = 'legacy';
183
+ this.databaseApiModes.set(databaseId, defaultMode);
184
+ MessageFormatter.debug(`Defaulting to legacy mode for database ${databaseId}`, { prefix: "Migration" });
185
+ return defaultMode;
186
+ }
187
+ /**
188
+ * Get API mode context for schema generation
189
+ */
190
+ getSchemaGeneratorApiContext() {
191
+ const databaseModes = {};
192
+ // Get API mode for each database
193
+ for (const db of this.updatedConfig.databases || []) {
194
+ const apiMode = this.databaseApiModes.get(db.$id) || this.apiMode || 'legacy';
195
+ databaseModes[db.$id] = apiMode;
196
+ }
197
+ return {
198
+ apiMode: this.apiMode || 'legacy',
199
+ databaseApiModes: databaseModes,
200
+ adapterMetadata: this.adapter?.getMetadata()
201
+ };
202
+ }
203
+ async appwriteSync(config, databases, databaseSelections, bucketSelections) {
204
+ // Initialize adapter for proper API mode detection and usage
205
+ await this.initializeAdapter();
64
206
  const db = getDatabaseFromConfig(config);
65
207
  if (!databases) {
66
208
  try {
@@ -73,16 +215,22 @@ export class AppwriteToX {
73
215
  throw new Error(`Database fetch failed: ${error instanceof Error ? error.message : 'Unknown error'}`);
74
216
  }
75
217
  }
218
+ // Filter databases based on selection if provided
219
+ let databasesToProcess = databases;
220
+ if (databaseSelections && databaseSelections.length > 0) {
221
+ databasesToProcess = databases?.filter(db => databaseSelections.some(selection => selection.databaseId === db.$id)) || [];
222
+ MessageFormatter.info(`Filtered to ${databasesToProcess.length} selected databases`, { prefix: "Migration" });
223
+ }
76
224
  let updatedConfig = { ...config };
77
225
  // Initialize databases array if it doesn't exist
78
226
  if (!updatedConfig.databases) {
79
227
  updatedConfig.databases = [];
80
228
  }
81
229
  // Sync remote databases to local config - add missing ones
82
- MessageFormatter.info(`Syncing ${databases.length} remote databases with local config...`, { prefix: "Migration" });
230
+ MessageFormatter.info(`Syncing ${databasesToProcess.length} remote databases with local config...`, { prefix: "Migration" });
83
231
  let addedCount = 0;
84
232
  let updatedCount = 0;
85
- for (const remoteDb of databases) {
233
+ for (const remoteDb of databasesToProcess) {
86
234
  // Check if this database already exists in the config
87
235
  const existingDbIndex = updatedConfig.databases.findIndex((localDb) => localDb.$id === remoteDb.$id);
88
236
  if (existingDbIndex === -1) {
@@ -106,10 +254,19 @@ export class AppwriteToX {
106
254
  MessageFormatter.success(`Database sync summary: ${addedCount} added, ${updatedCount} updated, ${updatedConfig.databases.length} total`, { prefix: "Migration" });
107
255
  // Fetch all buckets
108
256
  const allBuckets = await listBuckets(this.storage);
257
+ // Filter buckets based on selection if provided
258
+ let matchedBuckets = allBuckets.buckets;
259
+ if (bucketSelections && bucketSelections.length > 0) {
260
+ matchedBuckets = allBuckets.buckets.filter(bucket => bucketSelections.some(selection => selection.bucketId === bucket.$id));
261
+ MessageFormatter.info(`Filtered to ${matchedBuckets.length} selected buckets`, { prefix: "Migration" });
262
+ }
109
263
  // Loop through each database
110
- for (const database of databases) {
111
- // Match bucket to database
112
- const matchedBucket = allBuckets.buckets.find((bucket) => bucket.$id.toLowerCase().includes(database.$id.toLowerCase()));
264
+ for (const database of databasesToProcess) {
265
+ // Detect API mode for this specific database
266
+ const dbApiMode = await this.detectDatabaseApiMode(database.$id);
267
+ MessageFormatter.info(`Processing database '${database.name}' with API mode: ${dbApiMode}`, { prefix: "Migration" });
268
+ // Match bucket to database (from filtered buckets if selections provided)
269
+ const matchedBucket = matchedBuckets.find((bucket) => bucket.$id.toLowerCase().includes(database.$id.toLowerCase()));
113
270
  if (matchedBucket) {
114
271
  const dbConfig = updatedConfig.databases.find((db) => db.$id === database.$id);
115
272
  if (dbConfig) {
@@ -125,73 +282,131 @@ export class AppwriteToX {
125
282
  };
126
283
  }
127
284
  }
128
- const collections = await fetchAllCollections(database.$id, db);
285
+ // Use adapter-aware collection/table fetching with proper API mode detection
286
+ const collections = await this.fetchCollectionsOrTables(database.$id, db);
287
+ // Filter collections based on table selection if provided
288
+ let collectionsToProcess = collections;
289
+ if (databaseSelections && databaseSelections.length > 0) {
290
+ const dbSelection = databaseSelections.find(selection => selection.databaseId === database.$id);
291
+ if (dbSelection && dbSelection.tableIds.length > 0) {
292
+ collectionsToProcess = collections.filter(collection => dbSelection.tableIds.includes(collection.$id));
293
+ MessageFormatter.info(`Filtered to ${collectionsToProcess.length} selected tables for database '${database.name}'`, { prefix: "Migration" });
294
+ }
295
+ }
129
296
  // Loop through each collection in the current database
130
297
  if (!updatedConfig.collections) {
131
298
  updatedConfig.collections = [];
132
299
  }
133
- for (const collection of collections) {
134
- MessageFormatter.processing(`Processing collection: ${collection.name}`, { prefix: "Migration" });
135
- const existingCollectionIndex = updatedConfig.collections.findIndex((c) => c.name === collection.name);
136
- // Parse the collection permissions and attributes
137
- const collPermissions = this.parsePermissionsArray(collection.$permissions);
138
- const collAttributes = collection.attributes
139
- .map((attr) => {
140
- return parseAttribute(attr);
141
- })
142
- .filter((attribute) => attribute.type === "relationship"
143
- ? attribute.side !== "child"
144
- : true);
145
- for (const attribute of collAttributes) {
146
- if (attribute.type === "relationship" &&
147
- attribute.relatedCollection) {
148
- MessageFormatter.info(`Fetching related collection for ID: ${attribute.relatedCollection}`, { prefix: "Migration" });
149
- try {
150
- const relatedCollectionPulled = await db.getCollection(database.$id, attribute.relatedCollection);
151
- MessageFormatter.info(`Fetched Collection Name: ${relatedCollectionPulled.name}`, { prefix: "Migration" });
152
- attribute.relatedCollection = relatedCollectionPulled.name;
153
- MessageFormatter.info(`Updated attribute.relatedCollection to: ${attribute.relatedCollection}`, { prefix: "Migration" });
154
- }
155
- catch (error) {
156
- MessageFormatter.error("Error fetching related collection", error instanceof Error ? error : new Error(String(error)), { prefix: "Migration" });
300
+ MessageFormatter.info(`Processing ${collectionsToProcess.length} collections/tables in database '${database.name}'`, { prefix: "Migration" });
301
+ let processedCount = 0;
302
+ let errorCount = 0;
303
+ for (const collection of collectionsToProcess) {
304
+ try {
305
+ if (!collection) {
306
+ MessageFormatter.warning("Skipping null/undefined collection", { prefix: "Migration" });
307
+ errorCount++;
308
+ continue;
309
+ }
310
+ // Normalize collection/table structure to handle both TablesDB and Legacy formats
311
+ const normalizedCollection = normalizeCollectionOrTable(collection);
312
+ MessageFormatter.processing(`Processing ${normalizedCollection.name} (${normalizedCollection.$id})`, { prefix: "Migration" });
313
+ const existingCollectionIndex = updatedConfig.collections.findIndex((c) => c.name === normalizedCollection.name);
314
+ // Parse the collection permissions and attributes using normalized structure
315
+ const collPermissions = this.parsePermissionsArray(normalizedCollection.permissions);
316
+ // Process attributes with proper error handling
317
+ let collAttributes = [];
318
+ try {
319
+ collAttributes = normalizedCollection.attributes
320
+ .map((attr) => {
321
+ if (!attr) {
322
+ MessageFormatter.warning("Skipping null/undefined attribute", { prefix: "Migration" });
323
+ return null;
324
+ }
325
+ return parseAttribute(attr);
326
+ })
327
+ .filter((attribute) => attribute !== null &&
328
+ (attribute.type !== "relationship" ? true : attribute.side !== "child"));
329
+ }
330
+ catch (error) {
331
+ MessageFormatter.error(`Error processing attributes for ${normalizedCollection.name}`, error instanceof Error ? error : new Error(String(error)), { prefix: "Migration" });
332
+ // Continue with empty attributes array
333
+ collAttributes = [];
334
+ }
335
+ for (const attribute of collAttributes) {
336
+ if (attribute.type === "relationship" &&
337
+ attribute.relatedCollection) {
338
+ MessageFormatter.info(`Fetching related collection for ID: ${attribute.relatedCollection}`, { prefix: "Migration" });
339
+ try {
340
+ const relatedCollectionPulled = await this.getCollectionOrTable(database.$id, attribute.relatedCollection);
341
+ MessageFormatter.info(`Fetched Collection Name: ${relatedCollectionPulled.name}`, { prefix: "Migration" });
342
+ attribute.relatedCollection = relatedCollectionPulled.name;
343
+ MessageFormatter.info(`Updated attribute.relatedCollection to: ${attribute.relatedCollection}`, { prefix: "Migration" });
344
+ }
345
+ catch (error) {
346
+ MessageFormatter.error("Error fetching related collection", error instanceof Error ? error : new Error(String(error)), { prefix: "Migration" });
347
+ }
157
348
  }
158
349
  }
350
+ this.collToAttributeMap.set(normalizedCollection.name, collAttributes);
351
+ // Process indexes with proper error handling using normalized collection
352
+ let collIndexes = [];
353
+ try {
354
+ const finalIndexes = (normalizedCollection.indexes || collection.indexes || []).map((index) => {
355
+ if (!index) {
356
+ MessageFormatter.warning("Skipping null/undefined index", { prefix: "Migration" });
357
+ return null;
358
+ }
359
+ return {
360
+ ...index,
361
+ // Convert TablesDB 'columns' to expected 'attributes' for schema validation
362
+ attributes: index.attributes || index.columns || [],
363
+ orders: index.orders?.filter((order) => {
364
+ return order !== null && order;
365
+ }),
366
+ };
367
+ }).filter((index) => index !== null);
368
+ collIndexes = indexesSchema.parse(finalIndexes) ?? [];
369
+ }
370
+ catch (error) {
371
+ MessageFormatter.error(`Error processing indexes for ${normalizedCollection.name}`, error instanceof Error ? error : new Error(String(error)), { prefix: "Migration" });
372
+ // Continue with empty indexes array
373
+ collIndexes = [];
374
+ }
375
+ // Prepare the collection object to be added or updated using normalized data
376
+ const collToPush = CollectionSchema.parse({
377
+ $id: normalizedCollection.$id,
378
+ name: normalizedCollection.name,
379
+ enabled: normalizedCollection.enabled,
380
+ documentSecurity: collection.documentSecurity, // Use original collection for this field
381
+ $createdAt: collection.$createdAt, // Use original collection for timestamps
382
+ $updatedAt: collection.$updatedAt,
383
+ $permissions: collPermissions.length > 0 ? collPermissions : undefined,
384
+ indexes: collIndexes.length > 0 ? collIndexes : undefined,
385
+ attributes: collAttributes.length > 0 ? collAttributes : undefined,
386
+ });
387
+ if (existingCollectionIndex !== -1) {
388
+ // Update existing collection
389
+ updatedConfig.collections[existingCollectionIndex] = collToPush;
390
+ MessageFormatter.debug(`Updated existing collection: ${normalizedCollection.name}`, { prefix: "Migration" });
391
+ }
392
+ else {
393
+ // Add new collection
394
+ updatedConfig.collections.push(collToPush);
395
+ MessageFormatter.debug(`Added new collection: ${normalizedCollection.name}`, { prefix: "Migration" });
396
+ }
397
+ processedCount++;
159
398
  }
160
- this.collToAttributeMap.set(collection.name, collAttributes);
161
- const finalIndexes = collection.indexes.map((index) => {
162
- return {
163
- ...index,
164
- orders: index.orders?.filter((order) => {
165
- return order !== null && order;
166
- }),
167
- };
168
- });
169
- const collIndexes = indexesSchema.parse(finalIndexes) ?? [];
170
- // Prepare the collection object to be added or updated
171
- const collToPush = CollectionSchema.parse({
172
- $id: collection.$id,
173
- name: collection.name,
174
- enabled: collection.enabled,
175
- documentSecurity: collection.documentSecurity,
176
- $createdAt: collection.$createdAt,
177
- $updatedAt: collection.$updatedAt,
178
- $permissions: collPermissions.length > 0 ? collPermissions : undefined,
179
- indexes: collIndexes.length > 0 ? collIndexes : undefined,
180
- attributes: collAttributes.length > 0 ? collAttributes : undefined,
181
- });
182
- if (existingCollectionIndex !== -1) {
183
- // Update existing collection
184
- updatedConfig.collections[existingCollectionIndex] = collToPush;
185
- }
186
- else {
187
- // Add new collection
188
- updatedConfig.collections.push(collToPush);
399
+ catch (error) {
400
+ MessageFormatter.error(`Error processing collection: ${collection?.name || 'unknown'}`, error instanceof Error ? error : new Error(String(error)), { prefix: "Migration" });
401
+ errorCount++;
189
402
  }
190
403
  }
191
- MessageFormatter.success(`Processed ${collections.length} collections in ${database.name}`, { prefix: "Migration" });
404
+ MessageFormatter.success(`Database '${database.name}' processing complete: ${processedCount} collections processed, ${errorCount} errors`, { prefix: "Migration" });
192
405
  }
193
406
  // Add unmatched buckets as global buckets
194
- const globalBuckets = allBuckets.buckets.filter((bucket) => !updatedConfig.databases.some((db) => db.bucket && db.bucket.$id === bucket.$id));
407
+ // Use filtered buckets if selections provided, otherwise use all buckets
408
+ const sourceBuckets = bucketSelections && bucketSelections.length > 0 ? matchedBuckets : allBuckets.buckets;
409
+ const globalBuckets = sourceBuckets.filter((bucket) => !updatedConfig.databases.some((db) => db.bucket && db.bucket.$id === bucket.$id));
195
410
  updatedConfig.buckets = globalBuckets.map((bucket) => ({
196
411
  $id: bucket.$id,
197
412
  name: bucket.name,
@@ -225,23 +440,32 @@ export class AppwriteToX {
225
440
  this.updatedConfig = updatedConfig;
226
441
  MessageFormatter.success(`Sync completed - ${updatedConfig.databases.length} databases, ${updatedConfig.collections?.length || 0} collections, ${updatedConfig.buckets?.length || 0} buckets, ${updatedConfig.functions?.length || 0} functions`, { prefix: "Migration" });
227
442
  }
228
- async toSchemas(databases) {
443
+ async toSchemas(databases, databaseSelections, bucketSelections) {
229
444
  try {
230
445
  MessageFormatter.info("Starting sync-from-Appwrite process...", { prefix: "Migration" });
231
- await this.appwriteSync(this.config, databases);
446
+ await this.appwriteSync(this.config, databases, databaseSelections, bucketSelections);
232
447
  const generator = new SchemaGenerator(this.updatedConfig, this.appwriteFolderPath);
448
+ // Pass API mode context to the schema generator
449
+ const apiContext = this.getSchemaGeneratorApiContext();
450
+ // Extend the config with API mode information for schema generation
451
+ const configWithApiContext = {
452
+ ...this.updatedConfig,
453
+ apiMode: apiContext.apiMode,
454
+ databaseApiModes: apiContext.databaseApiModes,
455
+ adapterMetadata: apiContext.adapterMetadata
456
+ };
233
457
  // Check if this is a YAML-based project
234
458
  const yamlConfigPath = findYamlConfig(this.appwriteFolderPath);
235
459
  const isYamlProject = !!yamlConfigPath;
236
460
  if (isYamlProject) {
237
461
  MessageFormatter.info("Detected YAML configuration - generating YAML collection definitions", { prefix: "Migration" });
238
462
  generator.updateYamlCollections();
239
- await generator.updateConfig(this.updatedConfig, true);
463
+ await generator.updateConfig(configWithApiContext, true);
240
464
  }
241
465
  else {
242
466
  MessageFormatter.info("Generating TypeScript collection definitions", { prefix: "Migration" });
243
467
  generator.updateTsSchemas();
244
- await generator.updateConfig(this.updatedConfig, false);
468
+ await generator.updateConfig(configWithApiContext, false);
245
469
  }
246
470
  MessageFormatter.info("Generating Zod schemas from synced collections...", { prefix: "Migration" });
247
471
  generator.generateSchemas();
@@ -6,8 +6,8 @@ export declare const YamlImportConfigSchema: z.ZodObject<{
6
6
  basePath: z.ZodOptional<z.ZodString>;
7
7
  type: z.ZodDefault<z.ZodEnum<{
8
8
  json: "json";
9
- yaml: "yaml";
10
9
  csv: "csv";
10
+ yaml: "yaml";
11
11
  }>>;
12
12
  }, z.core.$strip>;
13
13
  target: z.ZodObject<{
@@ -518,6 +518,13 @@ export function generateTableSchema() {
518
518
  tableSchema.$id = "https://appwrite-utils.dev/schemas/table.schema.json";
519
519
  tableSchema.title = "Appwrite Table Definition";
520
520
  tableSchema.description = "YAML configuration for Appwrite table definitions (new TablesDB API)";
521
+ // Replace 'documentSecurity' with 'rowSecurity'
522
+ delete tableSchema.properties.documentSecurity;
523
+ tableSchema.properties.rowSecurity = {
524
+ "type": "boolean",
525
+ "description": "Enable row-level security",
526
+ "default": false
527
+ };
521
528
  // Replace 'attributes' with 'columns'
522
529
  delete tableSchema.properties.attributes;
523
530
  tableSchema.properties.columns = {
@@ -528,26 +535,34 @@ export function generateTableSchema() {
528
535
  },
529
536
  "default": []
530
537
  };
531
- // Update index definition to support both attributes and columns
538
+ // Update index definition to use columns instead of attributes
539
+ delete tableSchema.$defs.index.properties.attributes;
532
540
  tableSchema.$defs.index.properties.columns = {
533
541
  "type": "array",
534
542
  "items": { "type": "string" },
535
543
  "description": "Column names to include in the index",
536
544
  "minItems": 1
537
545
  };
538
- // Make index support either attributes or columns
539
- tableSchema.$defs.index.oneOf = [
540
- { "required": ["key", "type", "attributes"] },
541
- { "required": ["key", "type", "columns"] }
542
- ];
543
- delete tableSchema.$defs.index.required;
546
+ // Update index required fields to use columns
547
+ const requiredIndex = tableSchema.$defs.index.required;
548
+ if (requiredIndex && requiredIndex.includes("attributes")) {
549
+ const attributesIndex = requiredIndex.indexOf("attributes");
550
+ requiredIndex[attributesIndex] = "columns";
551
+ }
544
552
  // Add column definition (similar to attribute but with table terminology)
545
553
  tableSchema.$defs.column = JSON.parse(JSON.stringify(tableSchema.$defs.attribute));
554
+ // Add encrypted property (table-specific feature)
555
+ tableSchema.$defs.column.properties.encrypted = {
556
+ "type": "boolean",
557
+ "description": "Whether the column should be encrypted",
558
+ "default": false
559
+ };
560
+ // Replace relatedCollection with relatedTable for table terminology
561
+ delete tableSchema.$defs.column.properties.relatedCollection;
546
562
  tableSchema.$defs.column.properties.relatedTable = {
547
563
  "type": "string",
548
564
  "description": "Related table for relationship columns"
549
565
  };
550
- delete tableSchema.$defs.column.properties.relatedCollection;
551
566
  return tableSchema;
552
567
  }
553
568
  /**
@@ -111,13 +111,16 @@ export class SchemaGenerator {
111
111
  collections?.forEach((collection) => {
112
112
  const { databaseId, ...collectionWithoutDbId } = collection; // Destructure to exclude databaseId
113
113
  const collectionFilePath = path.join(collectionsFolderPath, `${collection.name}.ts`);
114
+ // Determine if we're in tables mode for terminology
115
+ const isTablesMode = outputDir === "tables";
116
+ const securityField = isTablesMode ? "rowSecurity" : "documentSecurity";
114
117
  const collectionContent = `import { type CollectionCreate } from "appwrite-utils";
115
-
118
+
116
119
  const ${collection.name}Config: Partial<CollectionCreate> = {
117
120
  name: "${collection.name}",
118
121
  $id: "${collection.$id}",
119
122
  enabled: ${collection.enabled},
120
- documentSecurity: ${collection.documentSecurity},
123
+ ${securityField}: ${collection.documentSecurity},
121
124
  $permissions: [
122
125
  ${collection.$permissions
123
126
  .map((permission) => `{ permission: "${permission.permission}", target: "${permission.target}" }`)
@@ -128,25 +131,34 @@ export class SchemaGenerator {
128
131
  .map((attr) => {
129
132
  return `{ ${Object.entries(attr)
130
133
  .map(([key, value]) => {
134
+ // Handle table vs collection terminology for related fields
135
+ let outputKey = key;
136
+ let outputValue = value;
137
+ if (isTablesMode) {
138
+ // Convert collection terminology to table terminology
139
+ if (key === "relatedCollection") {
140
+ outputKey = "relatedTable";
141
+ }
142
+ }
131
143
  // Check the type of the value and format it accordingly
132
- if (typeof value === "string") {
144
+ if (typeof outputValue === "string") {
133
145
  // If the value is a string, wrap it in quotes
134
- return `${key}: "${value.replace(/"/g, '\\"')}"`; // Escape existing quotes in the string
146
+ return `${outputKey}: "${outputValue.replace(/"/g, '\\"')}"`; // Escape existing quotes in the string
135
147
  }
136
- else if (Array.isArray(value)) {
148
+ else if (Array.isArray(outputValue)) {
137
149
  // If the value is an array, join it with commas
138
- if (value.length > 0) {
139
- return `${key}: [${value
150
+ if (outputValue.length > 0) {
151
+ return `${outputKey}: [${outputValue
140
152
  .map((item) => `"${item}"`)
141
153
  .join(", ")}]`;
142
154
  }
143
155
  else {
144
- return `${key}: []`;
156
+ return `${outputKey}: []`;
145
157
  }
146
158
  }
147
159
  else {
148
160
  // If the value is not a string (e.g., boolean or number), output it directly
149
- return `${key}: ${value}`;
161
+ return `${outputKey}: ${outputValue}`;
150
162
  }
151
163
  })
152
164
  .join(", ")} }`;
@@ -155,16 +167,17 @@ export class SchemaGenerator {
155
167
  ],
156
168
  indexes: [
157
169
  ${(collection.indexes?.map((index) => {
158
- // Map each attribute to ensure it is properly quoted
170
+ // Use appropriate terminology for index attributes/columns
171
+ const indexField = isTablesMode ? "columns" : "attributes";
159
172
  const formattedAttributes = index.attributes.map((attr) => `"${attr}"`).join(", ") ?? "";
160
- return `{ key: "${index.key}", type: "${index.type}", attributes: [${formattedAttributes}], orders: [${index.orders
173
+ return `{ key: "${index.key}", type: "${index.type}", ${indexField}: [${formattedAttributes}], orders: [${index.orders
161
174
  ?.filter((order) => order !== null)
162
175
  .map((order) => `"${order}"`)
163
176
  .join(", ") ?? ""}] }`;
164
177
  }) ?? []).join(",\n ")}
165
178
  ]
166
179
  };
167
-
180
+
168
181
  export default ${collection.name}Config;
169
182
  `;
170
183
  fs.writeFileSync(collectionFilePath, collectionContent, {