appwrite-utils-cli 1.7.7 → 1.7.8

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -157,6 +157,53 @@ const YamlCollectionSchema = z.object({
157
157
 
158
158
  type YamlCollection = z.infer<typeof YamlCollectionSchema>;
159
159
 
160
+ // YAML Table Schema - Supports table-specific terminology
161
+ const YamlTableSchema = z.object({
162
+ name: z.string(),
163
+ id: z.string().optional(),
164
+ rowSecurity: z.boolean().default(false), // Tables use rowSecurity
165
+ enabled: z.boolean().default(true),
166
+ permissions: z.array(
167
+ z.object({
168
+ permission: z.string(),
169
+ target: z.string()
170
+ })
171
+ ).optional().default([]),
172
+ columns: z.array( // Tables use columns terminology
173
+ z.object({
174
+ key: z.string(),
175
+ type: z.string(),
176
+ size: z.number().optional(),
177
+ required: z.boolean().default(false),
178
+ array: z.boolean().optional(),
179
+ encrypted: z.boolean().optional(), // Tables support encrypted property
180
+ default: z.any().optional(),
181
+ min: z.number().optional(),
182
+ max: z.number().optional(),
183
+ elements: z.array(z.string()).optional(),
184
+ relatedTable: z.string().optional(), // Tables use relatedTable
185
+ relationType: z.string().optional(),
186
+ twoWay: z.boolean().optional(),
187
+ twoWayKey: z.string().optional(),
188
+ onDelete: z.string().optional(),
189
+ side: z.string().optional(),
190
+ encrypt: z.boolean().optional(),
191
+ format: z.string().optional()
192
+ })
193
+ ).optional().default([]),
194
+ indexes: z.array(
195
+ z.object({
196
+ key: z.string(),
197
+ type: z.string(),
198
+ columns: z.array(z.string()), // Tables use columns in indexes
199
+ orders: z.array(z.string()).optional()
200
+ })
201
+ ).optional().default([]),
202
+ importDefs: z.array(z.any()).optional().default([])
203
+ });
204
+
205
+ type YamlTable = z.infer<typeof YamlTableSchema>;
206
+
160
207
  /**
161
208
  * Loads a YAML collection file and converts it to CollectionCreate format
162
209
  * @param filePath Path to the YAML collection file
@@ -214,57 +261,54 @@ export const loadYamlCollection = (filePath: string): CollectionCreate | null =>
214
261
  };
215
262
 
216
263
  /**
217
- * Loads a YAML table file and converts it to table format
264
+ * Loads a YAML table file and converts it to CollectionCreate format
218
265
  * @param filePath Path to the YAML table file
219
- * @returns Table object or null if loading fails
266
+ * @returns CollectionCreate object or null if loading fails
220
267
  */
221
- export const loadYamlTable = (filePath: string): any | null => {
268
+ export const loadYamlTable = (filePath: string): CollectionCreate | null => {
222
269
  try {
223
270
  const fileContent = fs.readFileSync(filePath, "utf8");
224
271
  const yamlData = yaml.load(fileContent) as unknown;
225
272
 
226
- // For now, use the collection schema as base and adapt for tables
227
- const parsedTable = YamlCollectionSchema.parse(yamlData);
273
+ // Use the new table-specific schema
274
+ const parsedTable = YamlTableSchema.parse(yamlData);
228
275
 
229
- // Convert YAML table to TableCreate format
230
- const table: any = {
276
+ // Convert YAML table to CollectionCreate format (internal representation)
277
+ const table: CollectionCreate = {
231
278
  name: parsedTable.name,
232
- tableId: (yamlData as any).tableId || parsedTable.id || parsedTable.name.toLowerCase().replace(/\s+/g, '_'),
233
- documentSecurity: parsedTable.documentSecurity,
279
+ $id: (yamlData as any).tableId || parsedTable.id || parsedTable.name.toLowerCase().replace(/\s+/g, '_'),
280
+ documentSecurity: parsedTable.rowSecurity, // Convert rowSecurity to documentSecurity
234
281
  enabled: parsedTable.enabled,
235
282
  $permissions: parsedTable.permissions.map(p => ({
236
283
  permission: p.permission as any,
237
284
  target: p.target
238
285
  })),
239
- attributes: parsedTable.attributes.map(attr => ({
240
- key: attr.key,
241
- type: attr.type as any,
242
- size: attr.size,
243
- required: attr.required,
244
- array: attr.array,
245
- xdefault: attr.default,
246
- min: attr.min,
247
- max: attr.max,
248
- elements: attr.elements,
249
- relatedCollection: attr.relatedCollection,
250
- relationType: attr.relationType as any,
251
- twoWay: attr.twoWay,
252
- twoWayKey: attr.twoWayKey,
253
- onDelete: attr.onDelete as any,
254
- side: attr.side as any,
255
- encrypted: (attr as any).encrypt,
256
- format: (attr as any).format
286
+ attributes: parsedTable.columns.map(col => ({ // Convert columns to attributes
287
+ key: col.key,
288
+ type: col.type as any,
289
+ size: col.size,
290
+ required: col.required,
291
+ array: col.array,
292
+ xdefault: col.default,
293
+ min: col.min,
294
+ max: col.max,
295
+ elements: col.elements,
296
+ relatedCollection: col.relatedTable, // Convert relatedTable to relatedCollection
297
+ relationType: col.relationType as any,
298
+ twoWay: col.twoWay,
299
+ twoWayKey: col.twoWayKey,
300
+ onDelete: col.onDelete as any,
301
+ side: col.side as any,
302
+ encrypted: col.encrypted || col.encrypt, // Support both encrypted and encrypt
303
+ format: col.format
257
304
  })),
258
305
  indexes: parsedTable.indexes.map(idx => ({
259
306
  key: idx.key,
260
307
  type: idx.type as any,
261
- attributes: idx.attributes,
308
+ attributes: idx.columns, // Convert columns to attributes
262
309
  orders: idx.orders as any
263
310
  })),
264
- importDefs: parsedTable.importDefs,
265
- databaseId: (yamlData as any).databaseId,
266
- // Add backward compatibility field
267
- $id: (yamlData as any).$id || parsedTable.id
311
+ importDefs: parsedTable.importDefs || []
268
312
  };
269
313
 
270
314
  return table;
@@ -346,7 +390,7 @@ export const discoverCollections = async (collectionsDir: string): Promise<Colle
346
390
  * Result of discovering tables from a directory
347
391
  */
348
392
  export interface TableDiscoveryResult {
349
- tables: any[];
393
+ tables: CollectionCreate[];
350
394
  loadedNames: Set<string>;
351
395
  conflicts: Array<{ name: string; source1: string; source2: string }>;
352
396
  }
@@ -361,7 +405,7 @@ export const discoverTables = async (
361
405
  tablesDir: string,
362
406
  existingNames: Set<string> = new Set()
363
407
  ): Promise<TableDiscoveryResult> => {
364
- const tables: any[] = [];
408
+ const tables: CollectionCreate[] = [];
365
409
  const loadedNames = new Set<string>();
366
410
  const conflicts: Array<{ name: string; source1: string; source2: string }> = [];
367
411
 
@@ -380,7 +424,7 @@ export const discoverTables = async (
380
424
  continue;
381
425
  }
382
426
  const filePath = path.join(tablesDir, file);
383
- let table: any | null = null;
427
+ let table: CollectionCreate | null = null;
384
428
 
385
429
  // Handle YAML tables
386
430
  if (file.endsWith('.yaml') || file.endsWith('.yml')) {
@@ -391,7 +435,7 @@ export const discoverTables = async (
391
435
  else if (file.endsWith('.ts')) {
392
436
  const fileUrl = pathToFileURL(filePath).href;
393
437
  const tableModule = (await import(fileUrl));
394
- const importedTable: any = tableModule.default?.default || tableModule.default || tableModule;
438
+ const importedTable: CollectionCreate = tableModule.default?.default || tableModule.default || tableModule;
395
439
  if (importedTable) {
396
440
  table = importedTable;
397
441
  // Ensure importDefs are properly loaded
@@ -402,7 +446,7 @@ export const discoverTables = async (
402
446
  }
403
447
 
404
448
  if (table) {
405
- const tableName = table.name || table.tableId || table.$id || file;
449
+ const tableName = table.name || (table as any).tableId || table.$id || file;
406
450
 
407
451
  // Check for naming conflicts with existing collections
408
452
  if (existingNames.has(tableName)) {
@@ -415,7 +459,7 @@ export const discoverTables = async (
415
459
  } else {
416
460
  loadedNames.add(tableName);
417
461
  // Mark as coming from tables directory
418
- table._isFromTablesDir = true;
462
+ (table as any)._isFromTablesDir = true;
419
463
  tables.push(table);
420
464
  }
421
465
  }
@@ -467,7 +511,7 @@ export const discoverLegacyDirectory = async (
467
511
  ...collection,
468
512
  _isFromTablesDir: true,
469
513
  tableId: collection.$id || collection.name.toLowerCase().replace(/\s+/g, '_')
470
- };
514
+ } as CollectionCreate;
471
515
  items.push(table);
472
516
  } else {
473
517
  items.push(collection);
@@ -8,6 +8,7 @@ export interface YamlCollectionData {
8
8
  name: string;
9
9
  id?: string;
10
10
  documentSecurity?: boolean;
11
+ rowSecurity?: boolean;
11
12
  enabled?: boolean;
12
13
  permissions?: Array<{
13
14
  permission: string;
@@ -86,10 +87,16 @@ export function collectionToYaml(
86
87
  const yamlData: YamlCollectionData = {
87
88
  name: collection.name,
88
89
  id: collection.$id,
89
- documentSecurity: collection.documentSecurity,
90
90
  enabled: collection.enabled,
91
91
  };
92
92
 
93
+ // Use appropriate security field based on terminology
94
+ if (config.useTableTerminology) {
95
+ yamlData.rowSecurity = collection.documentSecurity;
96
+ } else {
97
+ yamlData.documentSecurity = collection.documentSecurity;
98
+ }
99
+
93
100
  // Convert permissions
94
101
  if (collection.$permissions && collection.$permissions.length > 0) {
95
102
  yamlData.permissions = collection.$permissions.map(p => ({
@@ -240,6 +247,12 @@ export function normalizeYamlData(yamlData: YamlCollectionData): YamlCollectionD
240
247
  }));
241
248
  }
242
249
 
250
+ // Normalize security fields - prefer documentSecurity for consistency
251
+ if (yamlData.rowSecurity !== undefined && yamlData.documentSecurity === undefined) {
252
+ normalized.documentSecurity = yamlData.rowSecurity;
253
+ delete normalized.rowSecurity;
254
+ }
255
+
243
256
  return normalized;
244
257
  }
245
258
 
@@ -248,7 +261,8 @@ export function normalizeYamlData(yamlData: YamlCollectionData): YamlCollectionD
248
261
  */
249
262
  export function usesTableTerminology(yamlData: YamlCollectionData): boolean {
250
263
  return !!(yamlData.columns && yamlData.columns.length > 0) ||
251
- !!(yamlData.indexes?.some(idx => !!(idx as any).columns));
264
+ !!(yamlData.indexes?.some(idx => !!(idx as any).columns)) ||
265
+ yamlData.rowSecurity !== undefined;
252
266
  }
253
267
 
254
268
  /**
@@ -279,6 +293,12 @@ export function convertTerminology(
279
293
  }));
280
294
  }
281
295
 
296
+ // Convert security field
297
+ if (yamlData.documentSecurity !== undefined && yamlData.rowSecurity === undefined) {
298
+ converted.rowSecurity = yamlData.documentSecurity;
299
+ delete converted.documentSecurity;
300
+ }
301
+
282
302
  return converted;
283
303
  } else {
284
304
  // Convert columns to attributes (normalize)
@@ -367,7 +387,6 @@ export function generateYamlTemplate(
367
387
  const template: YamlCollectionData = {
368
388
  name: entityName,
369
389
  id: entityName.toLowerCase().replace(/\s+/g, '_'),
370
- documentSecurity: false,
371
390
  enabled: true,
372
391
  permissions: [
373
392
  {
@@ -390,6 +409,13 @@ export function generateYamlTemplate(
390
409
  importDefs: []
391
410
  };
392
411
 
412
+ // Use appropriate security field based on terminology
413
+ if (config.useTableTerminology) {
414
+ template.rowSecurity = false;
415
+ } else {
416
+ template.documentSecurity = false;
417
+ }
418
+
393
419
  // Assign fields with correct property name
394
420
  (template as any)[fieldsKey] = fieldsArray;
395
421
  template.indexes = indexesArray as any;
@@ -72,6 +72,7 @@ import { configureLogging, updateLogger, logger } from "./shared/logging.js";
72
72
  import { MessageFormatter, Messages } from "./shared/messageFormatter.js";
73
73
  import { SchemaGenerator } from "./shared/schemaGenerator.js";
74
74
  import { findYamlConfig } from "./config/yamlConfig.js";
75
+ import { createImportSchemas } from "./migrations/yaml/generateImportSchemas.js";
75
76
  import {
76
77
  validateCollectionsTablesConfig,
77
78
  reportValidationResults,
@@ -80,6 +81,7 @@ import {
80
81
  } from "./config/configValidation.js";
81
82
  import { ConfigManager } from "./config/ConfigManager.js";
82
83
  import { ClientFactory } from "./utils/ClientFactory.js";
84
+ import type { DatabaseSelection, BucketSelection } from "./shared/selectionDialogs.js";
83
85
 
84
86
  export interface SetupOptions {
85
87
  databases?: Models.Database[];
@@ -341,6 +343,26 @@ export class UtilsController {
341
343
  return dbs.databases;
342
344
  }
343
345
 
346
+ async fetchAllBuckets(): Promise<{ buckets: Models.Bucket[] }> {
347
+ await this.init();
348
+ if (!this.storage) {
349
+ MessageFormatter.warning("Storage not initialized - buckets will be empty", { prefix: "Controller" });
350
+ return { buckets: [] };
351
+ }
352
+
353
+ try {
354
+ const result = await this.storage.listBuckets([
355
+ Query.limit(1000) // Increase limit to get all buckets
356
+ ]);
357
+
358
+ MessageFormatter.success(`Found ${result.buckets.length} buckets`, { prefix: "Controller" });
359
+ return result;
360
+ } catch (error: any) {
361
+ MessageFormatter.error(`Failed to fetch buckets: ${error.message || error}`, error instanceof Error ? error : undefined, { prefix: "Controller" });
362
+ return { buckets: [] };
363
+ }
364
+ }
365
+
344
366
  async wipeOtherDatabases(databasesToKeep: Models.Database[]) {
345
367
  await this.init();
346
368
  if (!this.database) {
@@ -636,7 +658,9 @@ export class UtilsController {
636
658
 
637
659
  async synchronizeConfigurations(
638
660
  databases?: Models.Database[],
639
- config?: AppwriteConfig
661
+ config?: AppwriteConfig,
662
+ databaseSelections?: DatabaseSelection[],
663
+ bucketSelections?: BucketSelection[]
640
664
  ) {
641
665
  await this.init();
642
666
  if (!this.storage) {
@@ -652,21 +676,109 @@ export class UtilsController {
652
676
  MessageFormatter.error("Failed to get appwriteFolderPath", undefined, { prefix: "Controller" });
653
677
  return;
654
678
  }
679
+
680
+ // If selections are provided, filter the databases accordingly
681
+ let filteredDatabases = databases;
682
+ if (databaseSelections && databaseSelections.length > 0) {
683
+ // Convert selections to Models.Database format
684
+ filteredDatabases = [];
685
+ const allDatabases = databases ? databases : await fetchAllDatabases(this.database!);
686
+
687
+ for (const selection of databaseSelections) {
688
+ const database = allDatabases.find(db => db.$id === selection.databaseId);
689
+ if (database) {
690
+ filteredDatabases.push(database);
691
+ } else {
692
+ MessageFormatter.warning(`Database with ID ${selection.databaseId} not found`, { prefix: "Controller" });
693
+ }
694
+ }
695
+
696
+ MessageFormatter.info(`Syncing ${filteredDatabases.length} selected databases out of ${allDatabases.length} available`, { prefix: "Controller" });
697
+ }
698
+
655
699
  const appwriteToX = new AppwriteToX(
656
700
  configToUse,
657
701
  this.appwriteFolderPath,
658
702
  this.storage
659
703
  );
660
- await appwriteToX.toSchemas(databases);
661
-
704
+ await appwriteToX.toSchemas(filteredDatabases);
705
+
662
706
  // Update the controller's config with the synchronized collections
663
707
  this.config = appwriteToX.updatedConfig;
664
-
708
+
665
709
  // Write the updated config back to disk
666
710
  const generator = new SchemaGenerator(this.config, this.appwriteFolderPath);
667
711
  const yamlConfigPath = findYamlConfig(this.appwriteFolderPath);
668
712
  const isYamlProject = !!yamlConfigPath;
669
713
  await generator.updateConfig(this.config, isYamlProject);
714
+
715
+ // Regenerate JSON schemas to reflect any table terminology fixes
716
+ try {
717
+ MessageFormatter.progress("Regenerating JSON schemas...", { prefix: "Sync" });
718
+ await createImportSchemas(this.appwriteFolderPath);
719
+ MessageFormatter.success("JSON schemas regenerated successfully", { prefix: "Sync" });
720
+ } catch (error) {
721
+ // Log error but don't fail the sync process
722
+ const errorMessage = error instanceof Error ? error.message : String(error);
723
+ MessageFormatter.warning(
724
+ `Failed to regenerate JSON schemas, but sync completed: ${errorMessage}`,
725
+ { prefix: "Sync" }
726
+ );
727
+ logger.warn("Schema regeneration failed during sync:", error);
728
+ }
729
+ }
730
+
731
+ async selectiveSync(
732
+ databaseSelections: DatabaseSelection[],
733
+ bucketSelections: BucketSelection[]
734
+ ): Promise<void> {
735
+ await this.init();
736
+ if (!this.database) {
737
+ MessageFormatter.error("Database not initialized", undefined, { prefix: "Controller" });
738
+ return;
739
+ }
740
+
741
+ MessageFormatter.progress("Starting selective sync...", { prefix: "Controller" });
742
+
743
+ // Convert database selections to Models.Database format
744
+ const selectedDatabases: Models.Database[] = [];
745
+
746
+ for (const dbSelection of databaseSelections) {
747
+ // Get the full database object from the controller
748
+ const databases = await fetchAllDatabases(this.database);
749
+ const database = databases.find(db => db.$id === dbSelection.databaseId);
750
+
751
+ if (database) {
752
+ selectedDatabases.push(database);
753
+ MessageFormatter.info(`Selected database: ${database.name} (${database.$id})`, { prefix: "Controller" });
754
+
755
+ // Log selected tables for this database
756
+ if (dbSelection.tableIds && dbSelection.tableIds.length > 0) {
757
+ MessageFormatter.info(` Tables: ${dbSelection.tableIds.join(', ')}`, { prefix: "Controller" });
758
+ }
759
+ } else {
760
+ MessageFormatter.warning(`Database with ID ${dbSelection.databaseId} not found`, { prefix: "Controller" });
761
+ }
762
+ }
763
+
764
+ if (selectedDatabases.length === 0) {
765
+ MessageFormatter.warning("No valid databases selected for sync", { prefix: "Controller" });
766
+ return;
767
+ }
768
+
769
+ // Log bucket selections if provided
770
+ if (bucketSelections && bucketSelections.length > 0) {
771
+ MessageFormatter.info(`Selected ${bucketSelections.length} buckets:`, { prefix: "Controller" });
772
+ for (const bucketSelection of bucketSelections) {
773
+ const dbInfo = bucketSelection.databaseId ? ` (DB: ${bucketSelection.databaseId})` : '';
774
+ MessageFormatter.info(` - ${bucketSelection.bucketName} (${bucketSelection.bucketId})${dbInfo}`, { prefix: "Controller" });
775
+ }
776
+ }
777
+
778
+ // Perform selective sync using the enhanced synchronizeConfigurations method
779
+ await this.synchronizeConfigurations(selectedDatabases, this.config, databaseSelections, bucketSelections);
780
+
781
+ MessageFormatter.success("Selective sync completed successfully!", { prefix: "Controller" });
670
782
  }
671
783
 
672
784
  async syncDb(