appwrite-utils-cli 0.0.262 → 0.0.264

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -2,6 +2,8 @@ import { toCamelCase, toPascalCase } from "../utils/index.js";
2
2
  import { z } from "zod";
3
3
  import fs from "fs";
4
4
  import path from "path";
5
+ import { dump } from "js-yaml";
6
+ import { getDatabaseFromConfig } from "./afterImportActions.js";
5
7
  export class SchemaGenerator {
6
8
  relationshipMap = new Map();
7
9
  config;
@@ -11,6 +13,15 @@ export class SchemaGenerator {
11
13
  this.appwriteFolderPath = appwriteFolderPath;
12
14
  this.extractRelationships();
13
15
  }
16
+ updateYamlSchemas() {
17
+ // Output this.config to a YAML file at appwriteFolderPath/appwriteConfig.yaml
18
+ let finalConfig = this.config;
19
+ finalConfig.appwriteClient = null;
20
+ const yamlConfig = finalConfig;
21
+ const yamlPath = path.join(this.appwriteFolderPath, "appwriteConfig.yaml");
22
+ fs.writeFileSync(yamlPath, dump(yamlConfig), { encoding: "utf-8" });
23
+ console.log(`YAML written to ${yamlPath}`);
24
+ }
14
25
  extractRelationships() {
15
26
  this.config.collections.forEach((collection) => {
16
27
  collection.attributes.forEach((attr) => {
@@ -39,6 +50,7 @@ export class SchemaGenerator {
39
50
  break;
40
51
  }
41
52
  this.addRelationship(collection.name, relationshipAttr.relatedCollection, attr.key, relationshipAttr.twoWayKey, isArrayParent, isArrayChild);
53
+ console.log(`Extracted relationship: ${attr.key}\n\t${collection.name} -> ${relationshipAttr.relatedCollection}, databaseId: ${collection.databaseId}`);
42
54
  }
43
55
  });
44
56
  });
@@ -79,7 +91,14 @@ export class SchemaGenerator {
79
91
  let imports = `import { z } from "zod";\n`;
80
92
  // Use the relationshipMap to find related collections
81
93
  const relationshipDetails = this.relationshipMap.get(name) || [];
82
- const relatedCollections = relationshipDetails.map((detail) => {
94
+ const relatedCollections = relationshipDetails
95
+ .filter((detail, index, self) => {
96
+ const uniqueKey = `${detail.parentCollection}-${detail.childCollection}-${detail.parentKey}-${detail.childKey}`;
97
+ return (index ===
98
+ self.findIndex((obj) => `${obj.parentCollection}-${obj.childCollection}-${obj.parentKey}-${obj.childKey}` ===
99
+ uniqueKey));
100
+ })
101
+ .map((detail) => {
83
102
  const relatedCollectionName = detail.isChild
84
103
  ? detail.parentCollection
85
104
  : detail.childCollection;
@@ -92,8 +111,9 @@ export class SchemaGenerator {
92
111
  let curNum = 0;
93
112
  let maxNum = relatedCollections.length;
94
113
  relatedCollections.forEach((relatedCollection) => {
95
- const relatedPascalName = toPascalCase(relatedCollection[0]);
96
- const relatedCamelName = toCamelCase(relatedCollection[0]);
114
+ console.log(relatedCollection);
115
+ let relatedPascalName = toPascalCase(relatedCollection[0]);
116
+ let relatedCamelName = toCamelCase(relatedCollection[0]);
97
117
  curNum++;
98
118
  let endNameTypes = relatedPascalName;
99
119
  let endNameLazy = `${relatedPascalName}Schema`;
@@ -162,10 +182,20 @@ export class SchemaGenerator {
162
182
  case "integer":
163
183
  baseSchemaCode = "z.number().int()";
164
184
  if (attribute.min !== undefined) {
165
- baseSchemaCode += `.min(${attribute.min}, "Minimum value of ${attribute.min} not met")`;
185
+ if (BigInt(attribute.min) === BigInt(-9223372036854776000)) {
186
+ delete attribute.min;
187
+ }
188
+ else {
189
+ baseSchemaCode += `.min(${attribute.min}, "Minimum value of ${attribute.min} not met")`;
190
+ }
166
191
  }
167
192
  if (attribute.max !== undefined) {
168
- baseSchemaCode += `.max(${attribute.max}, "Maximum value of ${attribute.max} exceeded")`;
193
+ if (BigInt(attribute.max) === BigInt(9223372036854776000)) {
194
+ delete attribute.max;
195
+ }
196
+ else {
197
+ baseSchemaCode += `.max(${attribute.max}, "Maximum value of ${attribute.max} exceeded")`;
198
+ }
169
199
  }
170
200
  if (attribute.xdefault !== undefined) {
171
201
  baseSchemaCode += `.default(${attribute.xdefault})`;
package/dist/setup.js CHANGED
File without changes
@@ -2,6 +2,7 @@ import { type ConverterFunctions } from "./migrations/converters.js";
2
2
  import { type AfterImportActions } from "./migrations/afterImportActions.js";
3
3
  import { type ValidationRules } from "./migrations/validationRules.js";
4
4
  export interface SetupOptions {
5
+ sync: boolean;
5
6
  runProd: boolean;
6
7
  runStaging: boolean;
7
8
  runDev: boolean;
@@ -11,6 +11,7 @@ import { afterImportActions, } from "./migrations/afterImportActions.js";
11
11
  import { validationRules, } from "./migrations/validationRules.js";
12
12
  import { ImportController } from "./migrations/importController.js";
13
13
  import _ from "lodash";
14
+ import { AppwriteToX } from "./migrations/appwriteToX.js";
14
15
  async function loadConfig(configPath) {
15
16
  if (!fs.existsSync(configPath)) {
16
17
  throw new Error(`Configuration file not found at ${configPath}`);
@@ -84,6 +85,12 @@ export class UtilsController {
84
85
  if (!this.database || !this.storage || !this.config) {
85
86
  throw new Error("Database or storage not initialized");
86
87
  }
88
+ if (options.sync) {
89
+ console.log("Starting synchronization with server...");
90
+ const appwriteToX = new AppwriteToX(this.config, this.appwriteFolderPath);
91
+ await appwriteToX.toSchemas();
92
+ console.log("Synchronization complete, YAML and Schemas updated");
93
+ }
87
94
  // Start the setup
88
95
  console.log("Starting setup, this step sets up migrations, runs backup, wipes databases, and updates schemas (depending on your options)...");
89
96
  await startSetup(this.database, this.storage, this.config, options, this.appwriteFolderPath);
package/package.json CHANGED
@@ -1,7 +1,7 @@
1
1
  {
2
2
  "name": "appwrite-utils-cli",
3
3
  "description": "Appwrite Utility Functions to help with database management, data conversion, data import, migrations, and much more. Meant to be used as a CLI tool, I do not recommend installing this in frontend environments.",
4
- "version": "0.0.262",
4
+ "version": "0.0.264",
5
5
  "main": "src/main.ts",
6
6
  "type": "module",
7
7
  "repository": {
package/src/main.ts CHANGED
@@ -7,6 +7,7 @@ async function main() {
7
7
  const controller = new UtilsController();
8
8
  await controller.init();
9
9
 
10
+ let sync = false;
10
11
  let runProd = false;
11
12
  let runStaging = false;
12
13
  let runDev = false;
@@ -17,6 +18,9 @@ async function main() {
17
18
  let importData = false;
18
19
  let wipeDocuments = false;
19
20
  let shouldWriteFile = false;
21
+ if (args.includes("--sync")) {
22
+ sync = true;
23
+ }
20
24
  if (args.includes("--prod")) {
21
25
  runProd = true;
22
26
  }
@@ -49,6 +53,7 @@ async function main() {
49
53
  }
50
54
  if (args.includes("--init")) {
51
55
  await controller.run({
56
+ sync: sync,
52
57
  runProd: runProd,
53
58
  runStaging: runStaging,
54
59
  runDev: runDev,
@@ -64,6 +69,7 @@ async function main() {
64
69
  });
65
70
  } else {
66
71
  await controller.run({
72
+ sync: sync,
67
73
  runProd: runProd,
68
74
  runStaging: runStaging,
69
75
  runDev: runDev,
@@ -13,7 +13,7 @@ import fs from "fs";
13
13
  import os from "os";
14
14
  import { logger } from "./logging.js";
15
15
 
16
- const getDatabaseFromConfig = (config: AppwriteConfig) => {
16
+ export const getDatabaseFromConfig = (config: AppwriteConfig) => {
17
17
  if (!config.appwriteClient) {
18
18
  config.appwriteClient = new Client()
19
19
  .setEndpoint(config.appwriteEndpoint)
@@ -23,7 +23,7 @@ const getDatabaseFromConfig = (config: AppwriteConfig) => {
23
23
  return new Databases(config.appwriteClient!);
24
24
  };
25
25
 
26
- const getStorageFromConfig = (config: AppwriteConfig) => {
26
+ export const getStorageFromConfig = (config: AppwriteConfig) => {
27
27
  if (!config.appwriteClient) {
28
28
  config.appwriteClient = new Client()
29
29
  .setEndpoint(config.appwriteEndpoint)
@@ -0,0 +1,156 @@
1
+ import { SchemaGenerator } from "./schemaStrings.js";
2
+ import { Databases, Query, type Models, type Permission } from "node-appwrite";
3
+ import { fetchAllCollections } from "./collections.js";
4
+ import { fetchAllDatabases } from "./databases.js";
5
+ import {
6
+ collectionSchema,
7
+ attributeSchema,
8
+ type AppwriteConfig,
9
+ AppwriteConfigSchema,
10
+ type ConfigDatabases,
11
+ type Attribute,
12
+ permissionsSchema,
13
+ attributesSchema,
14
+ indexesSchema,
15
+ } from "./schema.js";
16
+ import { getDatabaseFromConfig } from "./afterImportActions.js";
17
+
18
+ export class AppwriteToX {
19
+ config: AppwriteConfig;
20
+ updatedConfig: AppwriteConfig;
21
+ collToAttributeMap = new Map<string, Attribute[]>();
22
+ appwriteFolderPath: string;
23
+
24
+ constructor(config: AppwriteConfig, appwriteFolderPath: string) {
25
+ this.config = config;
26
+ this.updatedConfig = config;
27
+ this.appwriteFolderPath = appwriteFolderPath;
28
+ }
29
+
30
+ // Function to parse a single permission string
31
+ parsePermissionString = (permissionString: string) => {
32
+ const match = permissionString.match(/^(\w+)\('([^']+)'\)$/);
33
+ if (!match) {
34
+ throw new Error(`Invalid permission format: ${permissionString}`);
35
+ }
36
+ return {
37
+ permission: match[1],
38
+ target: match[2],
39
+ };
40
+ };
41
+
42
+ // Function to parse an array of permission strings
43
+ parsePermissionsArray = (permissions: string[]) => {
44
+ const parsedPermissions = permissionsSchema.parse(permissions);
45
+ // Validate the parsed permissions using Zod
46
+ return parsedPermissions ?? [];
47
+ };
48
+
49
+ updateCollectionConfigAttributes = (collection: Models.Collection) => {
50
+ for (const attribute of collection.attributes) {
51
+ const attributeMap = this.collToAttributeMap.get(
52
+ collection.name as string
53
+ );
54
+ const attributeParsed = attributeSchema.parse(attribute);
55
+ this.collToAttributeMap
56
+ .get(collection.name as string)
57
+ ?.push(attributeParsed);
58
+ }
59
+ };
60
+
61
+ async appwriteSync(config: AppwriteConfig) {
62
+ const db = getDatabaseFromConfig(config);
63
+ const databases = await fetchAllDatabases(db);
64
+ let updatedConfig: AppwriteConfig = { ...config };
65
+
66
+ // Loop through each database
67
+ for (const database of databases) {
68
+ if (database.name.toLowerCase() === "migrations") {
69
+ continue;
70
+ }
71
+ const collections = await fetchAllCollections(database.$id, db);
72
+
73
+ // Loop through each collection in the current database
74
+ for (const collection of collections) {
75
+ const existingCollectionIndex = updatedConfig.collections.findIndex(
76
+ (c) => c.name === collection.name
77
+ );
78
+
79
+ // Parse the collection permissions and attributes
80
+ const collPermissions = this.parsePermissionsArray(
81
+ collection.$permissions
82
+ );
83
+ const collAttributes = attributesSchema
84
+ .parse(collection.attributes)
85
+ .filter((attribute) =>
86
+ attribute.type === "relationship"
87
+ ? attribute.side !== "child"
88
+ : true
89
+ );
90
+ for (const attribute of collAttributes) {
91
+ if (
92
+ attribute.type === "relationship" &&
93
+ attribute.relatedCollection
94
+ ) {
95
+ console.log(
96
+ `Fetching related collection for ID: ${attribute.relatedCollection}`
97
+ );
98
+ try {
99
+ const relatedCollectionPulled = await db.getCollection(
100
+ database.$id,
101
+ attribute.relatedCollection
102
+ );
103
+ console.log(
104
+ `Fetched Collection Name: ${relatedCollectionPulled.name}`
105
+ );
106
+ attribute.relatedCollection = relatedCollectionPulled.name;
107
+ console.log(
108
+ `Updated attribute.relatedCollection to: ${attribute.relatedCollection}`
109
+ );
110
+ } catch (error) {
111
+ console.log("Error fetching related collection:", error);
112
+ }
113
+ }
114
+ }
115
+ this.collToAttributeMap.set(collection.name, collAttributes);
116
+ const collIndexes = indexesSchema.parse(collection.indexes);
117
+
118
+ // Prepare the collection object to be added or updated
119
+ const collToPush = collectionSchema.parse({
120
+ name: collection.name,
121
+ enabled: collection.enabled,
122
+ documentSecurity: collection.documentSecurity,
123
+ $createdAt: collection.$createdAt,
124
+ $updatedAt: collection.$updatedAt,
125
+ $permissions:
126
+ collPermissions.length > 0 ? collPermissions : undefined,
127
+ indexes: collIndexes.length > 0 ? collIndexes : undefined,
128
+ attributes: collAttributes.length > 0 ? collAttributes : undefined,
129
+ });
130
+
131
+ if (existingCollectionIndex !== -1) {
132
+ // Update existing collection
133
+ updatedConfig.collections[existingCollectionIndex] = collToPush;
134
+ } else {
135
+ // Add new collection
136
+ updatedConfig.collections.push(collToPush);
137
+ }
138
+ }
139
+
140
+ console.log(
141
+ `Processed ${collections.length} collections in ${database.name}`
142
+ );
143
+ }
144
+ this.updatedConfig = updatedConfig;
145
+ }
146
+
147
+ async toSchemas() {
148
+ await this.appwriteSync(this.config);
149
+ const generator = new SchemaGenerator(
150
+ this.updatedConfig,
151
+ this.appwriteFolderPath
152
+ );
153
+ generator.updateYamlSchemas();
154
+ generator.generateSchemas();
155
+ }
156
+ }
@@ -133,7 +133,7 @@ export class DataLoader {
133
133
  }
134
134
 
135
135
  // Method to load data from a file specified in the import definition
136
- async loadData(importDef: ImportDef): Promise<any[]> {
136
+ loadData(importDef: ImportDef): any[] {
137
137
  // Resolve the file path and check if the file exists
138
138
  const filePath = path.resolve(this.appwriteFolderPath, importDef.filePath);
139
139
  if (!fs.existsSync(filePath)) {
@@ -194,10 +194,7 @@ export class DataLoader {
194
194
  * @param attributeMappings - The mappings that define how each attribute should be transformed.
195
195
  * @returns The transformed item.
196
196
  */
197
- async transformData(
198
- item: any,
199
- attributeMappings: AttributeMappings
200
- ): Promise<any> {
197
+ transformData(item: any, attributeMappings: AttributeMappings): any {
201
198
  // Convert the item using the attribute mappings provided
202
199
  const convertedItem = convertObjectByAttributeMappings(
203
200
  item,
@@ -345,107 +342,111 @@ export class DataLoader {
345
342
  async updateReferencesInRelatedCollections() {
346
343
  // Iterate over each collection configuration
347
344
  for (const collectionConfig of this.config.collections) {
348
- const collectionKey = this.getCollectionKey(collectionConfig.name);
349
- const collectionData = this.importMap.get(collectionKey);
345
+ const collectionKey = this.getCollectionKey(collectionConfig.name);
346
+ const collectionData = this.importMap.get(collectionKey);
350
347
 
351
- if (!collectionData || !collectionData.data) continue;
348
+ if (!collectionData || !collectionData.data) continue;
352
349
 
353
- console.log(
354
- `Updating references for collection: ${collectionConfig.name}`
355
- );
350
+ console.log(`Updating references for collection: ${collectionConfig.name}`);
356
351
 
357
- // Iterate over each data item in the current collection
358
- for (const item of collectionData.data) {
359
- let needsUpdate = false;
352
+ // Iterate over each data item in the current collection
353
+ for (const item of collectionData.data) {
354
+ let needsUpdate = false;
360
355
 
361
- // Check if the current collection has import definitions with idMappings
362
- if (collectionConfig.importDefs) {
363
- for (const importDef of collectionConfig.importDefs) {
364
- if (importDef.idMappings) {
365
- // Iterate over each idMapping defined for the current import definition
366
- for (const idMapping of importDef.idMappings) {
367
- const oldIds = Array.isArray(
368
- item.context[idMapping.sourceField]
369
- )
370
- ? item.context[idMapping.sourceField]
371
- : [item.context[idMapping.sourceField]];
356
+ // Check if the current collection has import definitions with idMappings
357
+ if (collectionConfig.importDefs) {
358
+ for (const importDef of collectionConfig.importDefs) {
359
+ if (importDef.idMappings) {
360
+ // Iterate over each idMapping defined for the current import definition
361
+ for (const idMapping of importDef.idMappings) {
362
+ const oldIds = Array.isArray(item.context[idMapping.sourceField])
363
+ ? item.context[idMapping.sourceField]
364
+ : [item.context[idMapping.sourceField]];
365
+ const resolvedNewIds: string[] = [];
372
366
 
373
- oldIds.forEach((oldId: any) => {
374
- let newIdForOldId;
367
+ oldIds.forEach((oldId: any) => {
368
+ // Attempt to find a new ID for the old ID
369
+ let newIdForOldId = this.findNewIdForOldId(
370
+ oldId,
371
+ idMapping,
372
+ importDef
373
+ );
375
374
 
376
- // Handling users merged into a new ID
377
- newIdForOldId = this.findNewIdForOldId(oldId, idMapping);
375
+ if (newIdForOldId && !resolvedNewIds.includes(newIdForOldId)) {
376
+ resolvedNewIds.push(newIdForOldId);
377
+ } else {
378
+ logger.error(`No new ID found for old ID ${oldId} in collection ${collectionConfig.name}`);
379
+ }
380
+ });
378
381
 
379
- if (newIdForOldId) {
380
- const targetField =
381
- idMapping.fieldToSet || idMapping.targetField;
382
- const isArray = collectionConfig.attributes.some(
383
- (attribute) =>
384
- attribute.key === targetField && attribute.array
385
- );
382
+ if (resolvedNewIds.length) {
383
+ const targetField = idMapping.fieldToSet || idMapping.targetField;
384
+ const isArray = collectionConfig.attributes.some(
385
+ attribute => attribute.key === targetField && attribute.array
386
+ );
386
387
 
387
- // Properly update the target field based on whether it should be an array
388
- if (isArray) {
389
- if (!Array.isArray(item.finalData[targetField])) {
390
- item.finalData[targetField] = [newIdForOldId];
391
- } else if (
392
- !item.finalData[targetField].includes(newIdForOldId)
393
- ) {
394
- item.finalData[targetField].push(newIdForOldId);
395
- }
396
- } else {
397
- item.finalData[targetField] = newIdForOldId;
388
+ // Set the target field based on whether it's an array or single value
389
+ item.finalData[targetField] = isArray ? resolvedNewIds : resolvedNewIds[0];
390
+ needsUpdate = true;
391
+ }
392
+ }
398
393
  }
399
- needsUpdate = true;
400
- }
401
- });
402
- }
394
+ }
403
395
  }
404
- }
405
- }
406
396
 
407
- // Update the importMap if changes were made to the item
408
- if (needsUpdate) {
409
- this.importMap.set(collectionKey, collectionData);
410
- logger.info(
411
- `Updated item: ${JSON.stringify(item.finalData, undefined, 2)}`
412
- );
397
+ // Update the importMap if changes were made to the item
398
+ if (needsUpdate) {
399
+ this.importMap.set(collectionKey, collectionData);
400
+ logger.info(`Updated item: ${JSON.stringify(item.finalData, undefined, 2)}`);
401
+ }
413
402
  }
414
- }
415
403
  }
404
+ }
405
+
406
+ findNewIdForOldId(oldId: string, idMapping: IdMapping, importDef: ImportDef) {
407
+ // First, check if this ID mapping is related to the users collection.
408
+ const targetCollectionKey = this.getCollectionKey(idMapping.targetCollection);
409
+ const isUsersCollection = targetCollectionKey === this.getCollectionKey(this.config.usersCollectionName);
410
+
411
+ // If handling users, check the mergedUserMap for any existing new ID.
412
+ if (isUsersCollection) {
413
+ for (const [newUserId, oldIds] of this.mergedUserMap.entries()) {
414
+ if (oldIds.includes(oldId)) {
415
+ return newUserId;
416
+ }
417
+ }
416
418
  }
417
419
 
418
- findNewIdForOldId(oldId: string, idMapping: IdMapping) {
419
- // Check merged users first for any corresponding new ID
420
- let newIdForOldId;
421
- for (const [newUserId, oldIds] of this.mergedUserMap.entries()) {
422
- if (oldIds.includes(oldId)) {
423
- newIdForOldId = newUserId;
424
- break;
420
+ // If not a user or no merged ID found, check the regular ID mapping from old to new.
421
+ const targetCollectionData = this.importMap.get(targetCollectionKey);
422
+ if (targetCollectionData) {
423
+ const foundEntry = targetCollectionData.data.find(entry => entry.context[importDef.primaryKeyField] === oldId);
424
+ if (foundEntry) {
425
+ return foundEntry.context.docId; // Assuming `docId` stores the new ID after import
425
426
  }
426
- }
427
+ }
427
428
 
428
- // If no new ID found in merged users, check the old-to-new ID map for the target collection
429
- if (!newIdForOldId) {
430
- const targetCollectionKey = this.getCollectionKey(
431
- idMapping.targetCollection
432
- );
433
- const targetOldIdToNewIdMap =
434
- this.oldIdToNewIdPerCollectionMap.get(targetCollectionKey);
429
+ logger.error(`No corresponding new ID found for ${oldId} in ${targetCollectionKey}`);
430
+ return null; // Return null if no new ID is found
431
+ }
435
432
 
436
- if (targetOldIdToNewIdMap && targetOldIdToNewIdMap.has(oldId)) {
437
- newIdForOldId = targetOldIdToNewIdMap.get(oldId);
438
- }
439
- }
440
433
 
441
- return newIdForOldId;
442
- }
443
434
 
444
435
  private writeMapsToJsonFile() {
445
436
  const outputDir = path.resolve(process.cwd());
446
437
  const outputFile = path.join(outputDir, "dataLoaderOutput.json");
447
438
 
448
439
  const dataToWrite = {
440
+ // Convert Maps to arrays of entries for serialization
441
+ oldIdToNewIdPerCollectionMap: Array.from(
442
+ this.oldIdToNewIdPerCollectionMap.entries()
443
+ ).map(([key, value]) => {
444
+ return {
445
+ collection: key,
446
+ data: Array.from(value.entries()),
447
+ };
448
+ }),
449
+ mergedUserMap: Array.from(this.mergedUserMap.entries()),
449
450
  dataFromCollections: Array.from(this.importMap.entries()).map(
450
451
  ([key, value]) => {
451
452
  return {
@@ -454,8 +455,6 @@ export class DataLoader {
454
455
  };
455
456
  }
456
457
  ),
457
- // Convert Maps to arrays of entries for serialization
458
- mergedUserMap: Array.from(this.mergedUserMap.entries()),
459
458
  // emailToUserIdMap: Array.from(this.emailToUserIdMap.entries()),
460
459
  // phoneToUserIdMap: Array.from(this.phoneToUserIdMap.entries()),
461
460
  };
@@ -497,7 +496,7 @@ export class DataLoader {
497
496
  newId: string
498
497
  ): Promise<any> {
499
498
  // Transform the item data based on the attribute mappings
500
- let transformedItem = await this.transformData(item, attributeMappings);
499
+ let transformedItem = this.transformData(item, attributeMappings);
501
500
  const userData = AuthUserCreateSchema.safeParse(transformedItem);
502
501
  if (!userData.success) {
503
502
  logger.error(
@@ -577,7 +576,7 @@ export class DataLoader {
577
576
  importDef: ImportDef
578
577
  ): Promise<void> {
579
578
  // Load the raw data based on the import definition
580
- const rawData = await this.loadData(importDef);
579
+ const rawData = this.loadData(importDef);
581
580
  const operationId = this.collectionImportOperations.get(
582
581
  this.getCollectionKey(collection.name)
583
582
  );
@@ -762,7 +761,7 @@ export class DataLoader {
762
761
  importDef: ImportDef
763
762
  ): Promise<void> {
764
763
  // Load the raw data based on the import definition
765
- const rawData = await this.loadData(importDef);
764
+ const rawData = this.loadData(importDef);
766
765
  const operationId = this.collectionImportOperations.get(
767
766
  this.getCollectionKey(collection.name)
768
767
  );
@@ -801,7 +800,7 @@ export class DataLoader {
801
800
  // Create a context object for the item, including the new ID
802
801
  let context = this.createContext(db, collection, item, itemIdNew);
803
802
  // Transform the item data based on the attribute mappings
804
- const transformedData = await this.transformData(
803
+ const transformedData = this.transformData(
805
804
  item,
806
805
  importDef.attributeMappings
807
806
  );
@@ -892,7 +891,7 @@ export class DataLoader {
892
891
  return;
893
892
  }
894
893
  // Load the raw data based on the import definition
895
- const rawData = await this.loadData(importDef);
894
+ const rawData = this.loadData(importDef);
896
895
  const operationId = this.collectionImportOperations.get(
897
896
  this.getCollectionKey(collection.name)
898
897
  );
@@ -903,7 +902,7 @@ export class DataLoader {
903
902
  }
904
903
  for (const item of rawData) {
905
904
  // Transform the item data based on the attribute mappings
906
- let transformedData = await this.transformData(
905
+ let transformedData = this.transformData(
907
906
  item,
908
907
  importDef.attributeMappings
909
908
  );
@@ -0,0 +1,25 @@
1
+ import { Databases, Query, type Models } from "node-appwrite";
2
+
3
+ export const fetchAllDatabases = async (
4
+ database: Databases
5
+ ): Promise<Models.Database[]> => {
6
+ const databases = await database.list([Query.limit(25)]);
7
+ const allDatabases = databases.databases;
8
+ let lastDatabaseId = allDatabases[allDatabases.length - 1].$id;
9
+ if (databases.databases.length < 25) {
10
+ return allDatabases;
11
+ } else {
12
+ while (lastDatabaseId) {
13
+ const databases = await database.list([
14
+ Query.limit(25),
15
+ Query.cursorAfter(lastDatabaseId),
16
+ ]);
17
+ allDatabases.push(...databases.databases);
18
+ if (databases.databases.length < 25) {
19
+ break;
20
+ }
21
+ lastDatabaseId = databases.databases[databases.databases.length - 1].$id;
22
+ }
23
+ }
24
+ return allDatabases;
25
+ };
@@ -14,6 +14,14 @@ export const createOrUpdateIndex = async (
14
14
  if (existingIndex.total > 0) {
15
15
  await db.deleteIndex(dbId, collectionId, existingIndex.indexes[0].key);
16
16
  }
17
+ if (index.type === "fulltext" && index.attributes.length > 1) {
18
+ throw new Error(
19
+ `Fulltext index can only be created on a single attribute. Index: ${index.key}`
20
+ );
21
+ } else if (index.type === "fulltext") {
22
+ // @ts-ignore
23
+ index.attributes = index.attributes[0];
24
+ }
17
25
  const newIndex = await db.createIndex(
18
26
  dbId,
19
27
  collectionId,