appwrite-utils-cli 0.0.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +80 -0
- package/dist/main.d.ts +2 -0
- package/dist/main.js +74 -0
- package/dist/migrations/afterImportActions.d.ts +12 -0
- package/dist/migrations/afterImportActions.js +196 -0
- package/dist/migrations/attributes.d.ts +4 -0
- package/dist/migrations/attributes.js +158 -0
- package/dist/migrations/backup.d.ts +621 -0
- package/dist/migrations/backup.js +159 -0
- package/dist/migrations/collections.d.ts +16 -0
- package/dist/migrations/collections.js +207 -0
- package/dist/migrations/converters.d.ts +179 -0
- package/dist/migrations/converters.js +575 -0
- package/dist/migrations/dbHelpers.d.ts +5 -0
- package/dist/migrations/dbHelpers.js +54 -0
- package/dist/migrations/importController.d.ts +44 -0
- package/dist/migrations/importController.js +312 -0
- package/dist/migrations/importDataActions.d.ts +44 -0
- package/dist/migrations/importDataActions.js +219 -0
- package/dist/migrations/indexes.d.ts +4 -0
- package/dist/migrations/indexes.js +18 -0
- package/dist/migrations/logging.d.ts +2 -0
- package/dist/migrations/logging.js +14 -0
- package/dist/migrations/migrationHelper.d.ts +18 -0
- package/dist/migrations/migrationHelper.js +66 -0
- package/dist/migrations/queue.d.ts +13 -0
- package/dist/migrations/queue.js +79 -0
- package/dist/migrations/relationships.d.ts +90 -0
- package/dist/migrations/relationships.js +209 -0
- package/dist/migrations/schema.d.ts +3142 -0
- package/dist/migrations/schema.js +485 -0
- package/dist/migrations/schemaStrings.d.ts +12 -0
- package/dist/migrations/schemaStrings.js +261 -0
- package/dist/migrations/setupDatabase.d.ts +7 -0
- package/dist/migrations/setupDatabase.js +151 -0
- package/dist/migrations/storage.d.ts +8 -0
- package/dist/migrations/storage.js +241 -0
- package/dist/migrations/users.d.ts +11 -0
- package/dist/migrations/users.js +114 -0
- package/dist/migrations/validationRules.d.ts +43 -0
- package/dist/migrations/validationRules.js +42 -0
- package/dist/schemas/authUser.d.ts +62 -0
- package/dist/schemas/authUser.js +17 -0
- package/dist/setup.d.ts +2 -0
- package/dist/setup.js +5 -0
- package/dist/types.d.ts +9 -0
- package/dist/types.js +5 -0
- package/dist/utils/configSchema.json +742 -0
- package/dist/utils/helperFunctions.d.ts +34 -0
- package/dist/utils/helperFunctions.js +72 -0
- package/dist/utils/index.d.ts +2 -0
- package/dist/utils/index.js +2 -0
- package/dist/utils/setupFiles.d.ts +2 -0
- package/dist/utils/setupFiles.js +276 -0
- package/dist/utilsController.d.ts +30 -0
- package/dist/utilsController.js +106 -0
- package/package.json +34 -0
- package/src/main.ts +77 -0
- package/src/migrations/afterImportActions.ts +300 -0
- package/src/migrations/attributes.ts +315 -0
- package/src/migrations/backup.ts +189 -0
- package/src/migrations/collections.ts +303 -0
- package/src/migrations/converters.ts +628 -0
- package/src/migrations/dbHelpers.ts +89 -0
- package/src/migrations/importController.ts +509 -0
- package/src/migrations/importDataActions.ts +313 -0
- package/src/migrations/indexes.ts +37 -0
- package/src/migrations/logging.ts +15 -0
- package/src/migrations/migrationHelper.ts +100 -0
- package/src/migrations/queue.ts +119 -0
- package/src/migrations/relationships.ts +336 -0
- package/src/migrations/schema.ts +590 -0
- package/src/migrations/schemaStrings.ts +310 -0
- package/src/migrations/setupDatabase.ts +219 -0
- package/src/migrations/storage.ts +351 -0
- package/src/migrations/users.ts +148 -0
- package/src/migrations/validationRules.ts +63 -0
- package/src/schemas/authUser.ts +23 -0
- package/src/setup.ts +8 -0
- package/src/types.ts +14 -0
- package/src/utils/configSchema.json +742 -0
- package/src/utils/helperFunctions.ts +111 -0
- package/src/utils/index.ts +2 -0
- package/src/utils/setupFiles.ts +295 -0
- package/src/utilsController.ts +173 -0
- package/tsconfig.json +37 -0
|
@@ -0,0 +1,313 @@
|
|
|
1
|
+
import {
|
|
2
|
+
ID,
|
|
3
|
+
InputFile,
|
|
4
|
+
Query,
|
|
5
|
+
type Databases,
|
|
6
|
+
type Storage,
|
|
7
|
+
} from "node-appwrite";
|
|
8
|
+
import type { AppwriteConfig } from "./schema.js";
|
|
9
|
+
import { validationRules, type ValidationRules } from "./validationRules.js";
|
|
10
|
+
import {
|
|
11
|
+
converterFunctions,
|
|
12
|
+
convertObjectBySchema,
|
|
13
|
+
type ConverterFunctions,
|
|
14
|
+
} from "./converters.js";
|
|
15
|
+
import {
|
|
16
|
+
afterImportActions,
|
|
17
|
+
type AfterImportActions,
|
|
18
|
+
} from "./afterImportActions.js";
|
|
19
|
+
import { logger } from "./logging.js";
|
|
20
|
+
|
|
21
|
+
type AttributeMappings =
|
|
22
|
+
AppwriteConfig["collections"][number]["importDefs"][number]["attributeMappings"];
|
|
23
|
+
|
|
24
|
+
export class ImportDataActions {
|
|
25
|
+
private db: Databases;
|
|
26
|
+
private storage: Storage;
|
|
27
|
+
private config: AppwriteConfig;
|
|
28
|
+
private converterDefinitions: ConverterFunctions;
|
|
29
|
+
private validityRuleDefinitions: ValidationRules;
|
|
30
|
+
private afterImportActionsDefinitions: AfterImportActions;
|
|
31
|
+
|
|
32
|
+
constructor(
|
|
33
|
+
db: Databases,
|
|
34
|
+
storage: Storage,
|
|
35
|
+
config: AppwriteConfig,
|
|
36
|
+
converterDefinitions: ConverterFunctions,
|
|
37
|
+
validityRuleDefinitions: ValidationRules,
|
|
38
|
+
afterImportActionsDefinitions: AfterImportActions
|
|
39
|
+
) {
|
|
40
|
+
this.db = db;
|
|
41
|
+
this.storage = storage;
|
|
42
|
+
this.config = config;
|
|
43
|
+
this.converterDefinitions = converterDefinitions;
|
|
44
|
+
this.validityRuleDefinitions = validityRuleDefinitions;
|
|
45
|
+
this.afterImportActionsDefinitions = afterImportActionsDefinitions;
|
|
46
|
+
}
|
|
47
|
+
|
|
48
|
+
async runConverterFunctions(item: any, attributeMappings: AttributeMappings) {
|
|
49
|
+
const conversionSchema = attributeMappings.reduce((schema, mapping) => {
|
|
50
|
+
schema[mapping.targetKey] = (originalValue: any) => {
|
|
51
|
+
return mapping.converters.reduce((value, converterName) => {
|
|
52
|
+
let shouldProcessAsArray = false;
|
|
53
|
+
if (
|
|
54
|
+
(converterName.includes("[Arr]") ||
|
|
55
|
+
converterName.includes("[arr]")) &&
|
|
56
|
+
Array.isArray(value)
|
|
57
|
+
) {
|
|
58
|
+
shouldProcessAsArray = true;
|
|
59
|
+
converterName = converterName
|
|
60
|
+
.replace("[Arr]", "")
|
|
61
|
+
.replace("[arr]", "");
|
|
62
|
+
} else if (
|
|
63
|
+
(!Array.isArray(value) && converterName.includes("[Arr]")) ||
|
|
64
|
+
converterName.includes("[arr]")
|
|
65
|
+
) {
|
|
66
|
+
converterName = converterName
|
|
67
|
+
.replace("[Arr]", "")
|
|
68
|
+
.replace("[arr]", "");
|
|
69
|
+
}
|
|
70
|
+
const converterFunction =
|
|
71
|
+
converterFunctions[
|
|
72
|
+
converterName as keyof typeof converterFunctions
|
|
73
|
+
];
|
|
74
|
+
if (converterFunction) {
|
|
75
|
+
if (Array.isArray(value) && !shouldProcessAsArray) {
|
|
76
|
+
return value.map((item) => converterFunction(item));
|
|
77
|
+
} else {
|
|
78
|
+
return converterFunction(value);
|
|
79
|
+
}
|
|
80
|
+
} else {
|
|
81
|
+
logger.warn(
|
|
82
|
+
`Converter function '${converterName}' is not defined.`
|
|
83
|
+
);
|
|
84
|
+
return value;
|
|
85
|
+
}
|
|
86
|
+
}, originalValue);
|
|
87
|
+
};
|
|
88
|
+
return schema;
|
|
89
|
+
}, {} as Record<string, (value: any) => any>);
|
|
90
|
+
|
|
91
|
+
// Convert the item using the constructed schema
|
|
92
|
+
const convertedItem = convertObjectBySchema(item, conversionSchema);
|
|
93
|
+
// Merge the converted item back into the original item object
|
|
94
|
+
Object.assign(item, convertedItem);
|
|
95
|
+
console.log("Converted item:", item);
|
|
96
|
+
return item;
|
|
97
|
+
}
|
|
98
|
+
|
|
99
|
+
/**
|
|
100
|
+
* Validates a single data item based on defined validation rules.
|
|
101
|
+
* @param item The data item to validate.
|
|
102
|
+
* @param context The context for resolving templated parameters in validation rules.
|
|
103
|
+
* @returns A promise that resolves to true if the item is valid, false otherwise.
|
|
104
|
+
*/
|
|
105
|
+
async validateItem(
|
|
106
|
+
item: any,
|
|
107
|
+
attributeMap: AttributeMappings,
|
|
108
|
+
context: { [key: string]: any }
|
|
109
|
+
): Promise<boolean> {
|
|
110
|
+
for (const mapping of attributeMap) {
|
|
111
|
+
const { validationActions } = mapping;
|
|
112
|
+
if (
|
|
113
|
+
!validationActions ||
|
|
114
|
+
!Array.isArray(validationActions) ||
|
|
115
|
+
!validationActions.length
|
|
116
|
+
) {
|
|
117
|
+
console.warn(
|
|
118
|
+
"No validation actions defined for the item, assuming true"
|
|
119
|
+
);
|
|
120
|
+
return true; // Assume items without validation actions as valid.
|
|
121
|
+
}
|
|
122
|
+
for (const ruleDef of validationActions) {
|
|
123
|
+
const { action, params } = ruleDef;
|
|
124
|
+
const validationRule =
|
|
125
|
+
validationRules[action as keyof typeof validationRules];
|
|
126
|
+
|
|
127
|
+
if (!validationRule) {
|
|
128
|
+
logger.warn(`Validation rule '${action}' is not defined.`);
|
|
129
|
+
continue; // Optionally, consider undefined rules as a validation failure.
|
|
130
|
+
}
|
|
131
|
+
|
|
132
|
+
// Resolve templated parameters
|
|
133
|
+
const resolvedParams = params.map((param: any) =>
|
|
134
|
+
this.resolveTemplate(param, context, item)
|
|
135
|
+
);
|
|
136
|
+
|
|
137
|
+
// Apply the validation rule
|
|
138
|
+
let isValid = false;
|
|
139
|
+
if (Array.isArray(item)) {
|
|
140
|
+
isValid = item.every((item) =>
|
|
141
|
+
(validationRule as any)(item, ...resolvedParams)
|
|
142
|
+
);
|
|
143
|
+
} else {
|
|
144
|
+
isValid = (validationRule as any)(item, ...resolvedParams);
|
|
145
|
+
}
|
|
146
|
+
if (!isValid) {
|
|
147
|
+
logger.error(
|
|
148
|
+
`Validation failed for rule '${action}' with params ${params.join(
|
|
149
|
+
", "
|
|
150
|
+
)}`
|
|
151
|
+
);
|
|
152
|
+
return false; // Stop validation on first failure
|
|
153
|
+
}
|
|
154
|
+
}
|
|
155
|
+
}
|
|
156
|
+
|
|
157
|
+
return true; // The item passed all validations
|
|
158
|
+
}
|
|
159
|
+
|
|
160
|
+
async executeAfterImportActions(
|
|
161
|
+
item: any,
|
|
162
|
+
attributeMap: AttributeMappings,
|
|
163
|
+
context: { [key: string]: any }
|
|
164
|
+
): Promise<void> {
|
|
165
|
+
for (const mapping of attributeMap) {
|
|
166
|
+
const { postImportActions } = mapping;
|
|
167
|
+
if (!postImportActions || !Array.isArray(postImportActions)) {
|
|
168
|
+
console.warn(
|
|
169
|
+
`No post-import actions defined for attribute: ${mapping.targetKey}`,
|
|
170
|
+
postImportActions
|
|
171
|
+
);
|
|
172
|
+
continue; // Skip to the next attribute if no actions are defined
|
|
173
|
+
}
|
|
174
|
+
for (const actionDef of postImportActions) {
|
|
175
|
+
const { action, params } = actionDef;
|
|
176
|
+
console.log(
|
|
177
|
+
`Executing post-import action '${action}' for attribute '${
|
|
178
|
+
mapping.targetKey
|
|
179
|
+
}' with params ${params.join(", ")}...`
|
|
180
|
+
);
|
|
181
|
+
logger.info(
|
|
182
|
+
`Executing post-import action '${action}' for attribute '${
|
|
183
|
+
mapping.targetKey
|
|
184
|
+
}' with params ${params.join(", ")}...`
|
|
185
|
+
);
|
|
186
|
+
try {
|
|
187
|
+
await this.executeAction(action, params, context, item);
|
|
188
|
+
} catch (error) {
|
|
189
|
+
logger.error(
|
|
190
|
+
`Failed to execute post-import action '${action}' for attribute '${mapping.targetKey}':`,
|
|
191
|
+
error
|
|
192
|
+
);
|
|
193
|
+
}
|
|
194
|
+
}
|
|
195
|
+
}
|
|
196
|
+
}
|
|
197
|
+
|
|
198
|
+
async executeAction(
|
|
199
|
+
actionName: string,
|
|
200
|
+
params: any[], // Accepts any type, including objects
|
|
201
|
+
context: { [key: string]: any },
|
|
202
|
+
item: any
|
|
203
|
+
): Promise<void> {
|
|
204
|
+
const actionMethod =
|
|
205
|
+
afterImportActions[actionName as keyof typeof afterImportActions];
|
|
206
|
+
if (typeof actionMethod === "function") {
|
|
207
|
+
try {
|
|
208
|
+
// Resolve parameters, handling both strings and objects
|
|
209
|
+
const resolvedParams = params.map((param) => {
|
|
210
|
+
// Directly resolve each param, whether it's an object or a string
|
|
211
|
+
return this.resolveTemplate(param, context, item);
|
|
212
|
+
});
|
|
213
|
+
|
|
214
|
+
// Execute the action with resolved parameters
|
|
215
|
+
// Parameters are passed as-is, with objects treated as single parameters
|
|
216
|
+
logger.info(
|
|
217
|
+
`Executing action '${actionName}' from context: ${JSON.stringify(
|
|
218
|
+
context,
|
|
219
|
+
null,
|
|
220
|
+
2
|
|
221
|
+
)} with params:`,
|
|
222
|
+
resolvedParams
|
|
223
|
+
);
|
|
224
|
+
await (actionMethod as any)(this.config, ...resolvedParams);
|
|
225
|
+
} catch (error: any) {
|
|
226
|
+
logger.error(`Error executing action '${actionName}':`, error);
|
|
227
|
+
throw new Error(
|
|
228
|
+
`Execution failed for action '${actionName}': ${error.message}`
|
|
229
|
+
);
|
|
230
|
+
}
|
|
231
|
+
} else {
|
|
232
|
+
logger.warn(`Action '${actionName}' is not defined.`);
|
|
233
|
+
throw new Error(`Action '${actionName}' is not defined.`);
|
|
234
|
+
}
|
|
235
|
+
}
|
|
236
|
+
|
|
237
|
+
/**
|
|
238
|
+
* Resolves a templated string or object using the provided context and current data item.
|
|
239
|
+
* If the template is a string that starts and ends with "{}", it replaces it with the corresponding value from item or context.
|
|
240
|
+
* If the template is an object, it recursively resolves its properties.
|
|
241
|
+
* @param template The templated string or object.
|
|
242
|
+
* @param context The context for resolving the template.
|
|
243
|
+
* @param item The current data item being processed.
|
|
244
|
+
*/
|
|
245
|
+
resolveTemplate(
|
|
246
|
+
template: any,
|
|
247
|
+
context: { [key: string]: any },
|
|
248
|
+
item: any
|
|
249
|
+
): any {
|
|
250
|
+
// Function to recursively resolve paths, including handling [any] notation
|
|
251
|
+
const resolvePath = (path: string, currentContext: any): any => {
|
|
252
|
+
const anyKeyRegex = /\[any\]/g;
|
|
253
|
+
let pathParts = path.split(".").filter(Boolean);
|
|
254
|
+
|
|
255
|
+
return pathParts.reduce((acc, part, index) => {
|
|
256
|
+
// Handle [any] part by iterating over all elements if it's an object or an array
|
|
257
|
+
if (part === "[any]") {
|
|
258
|
+
if (Array.isArray(acc)) {
|
|
259
|
+
return acc
|
|
260
|
+
.map((item) => item[pathParts[index + 1]])
|
|
261
|
+
.filter((item) => item !== undefined);
|
|
262
|
+
} else if (typeof acc === "object") {
|
|
263
|
+
return Object.values(acc)
|
|
264
|
+
.map((item: any) => item[pathParts[index + 1]])
|
|
265
|
+
.filter((item) => item !== undefined);
|
|
266
|
+
}
|
|
267
|
+
} else {
|
|
268
|
+
return acc?.[part];
|
|
269
|
+
}
|
|
270
|
+
}, currentContext);
|
|
271
|
+
};
|
|
272
|
+
|
|
273
|
+
if (typeof template === "string") {
|
|
274
|
+
// Matches placeholders in the template
|
|
275
|
+
const regex = /\{([^}]+)\}/g;
|
|
276
|
+
let match;
|
|
277
|
+
let resolvedString = template;
|
|
278
|
+
while ((match = regex.exec(template)) !== null) {
|
|
279
|
+
const path = match[1];
|
|
280
|
+
// Resolve the path, handling [any] notation and arrays/objects
|
|
281
|
+
const resolvedValue = resolvePath(path, { ...context, ...item });
|
|
282
|
+
if (resolvedValue !== undefined) {
|
|
283
|
+
// If it's an array (from [any] notation), join the values; adjust as needed
|
|
284
|
+
const value = Array.isArray(resolvedValue)
|
|
285
|
+
? resolvedValue.join(", ")
|
|
286
|
+
: resolvedValue;
|
|
287
|
+
resolvedString = resolvedString.replace(match[0], value);
|
|
288
|
+
} else {
|
|
289
|
+
logger.warn(`Failed to resolve ${template} in context: `, context);
|
|
290
|
+
}
|
|
291
|
+
}
|
|
292
|
+
// console.log(`Resolved string: ${resolvedString}`);
|
|
293
|
+
return resolvedString;
|
|
294
|
+
} else if (typeof template === "object" && template !== null) {
|
|
295
|
+
// Recursively resolve templates for each property in the object
|
|
296
|
+
const resolvedObject: any = Array.isArray(template) ? [] : {};
|
|
297
|
+
for (const key in template) {
|
|
298
|
+
const resolvedValue = this.resolveTemplate(
|
|
299
|
+
template[key],
|
|
300
|
+
context,
|
|
301
|
+
item
|
|
302
|
+
);
|
|
303
|
+
if (resolvedValue !== undefined) {
|
|
304
|
+
// Only assign if resolvedValue is not undefined
|
|
305
|
+
resolvedObject[key] = resolvedValue;
|
|
306
|
+
}
|
|
307
|
+
}
|
|
308
|
+
return resolvedObject;
|
|
309
|
+
}
|
|
310
|
+
// console.log(`Template is not a string or object: ${template}`);
|
|
311
|
+
return template;
|
|
312
|
+
}
|
|
313
|
+
}
|
|
@@ -0,0 +1,37 @@
|
|
|
1
|
+
import { indexSchema, type Index } from "./schema.js";
|
|
2
|
+
import { Databases, Query, type Models } from "node-appwrite";
|
|
3
|
+
// import {}
|
|
4
|
+
|
|
5
|
+
export const createOrUpdateIndex = async (
|
|
6
|
+
dbId: string,
|
|
7
|
+
db: Databases,
|
|
8
|
+
collectionId: string,
|
|
9
|
+
index: Index
|
|
10
|
+
) => {
|
|
11
|
+
const existingIndex = await db.listIndexes(dbId, collectionId, [
|
|
12
|
+
Query.equal("key", index.key),
|
|
13
|
+
]);
|
|
14
|
+
if (existingIndex.total > 0) {
|
|
15
|
+
await db.deleteIndex(dbId, collectionId, existingIndex.indexes[0].key);
|
|
16
|
+
}
|
|
17
|
+
const newIndex = await db.createIndex(
|
|
18
|
+
dbId,
|
|
19
|
+
collectionId,
|
|
20
|
+
index.key,
|
|
21
|
+
index.type,
|
|
22
|
+
index.attributes,
|
|
23
|
+
index.orders
|
|
24
|
+
);
|
|
25
|
+
return newIndex;
|
|
26
|
+
};
|
|
27
|
+
|
|
28
|
+
export const createOrUpdateIndexes = async (
|
|
29
|
+
dbId: string,
|
|
30
|
+
db: Databases,
|
|
31
|
+
collectionId: string,
|
|
32
|
+
indexes: Index[]
|
|
33
|
+
) => {
|
|
34
|
+
for (const index of indexes) {
|
|
35
|
+
await createOrUpdateIndex(dbId, db, collectionId, index);
|
|
36
|
+
}
|
|
37
|
+
};
|
|
@@ -0,0 +1,15 @@
|
|
|
1
|
+
import winston from "winston";
|
|
2
|
+
|
|
3
|
+
export const logger = winston.createLogger({
|
|
4
|
+
level: "info",
|
|
5
|
+
format: winston.format.prettyPrint(),
|
|
6
|
+
defaultMeta: { service: "appwrite-utils-cli" },
|
|
7
|
+
transports: [
|
|
8
|
+
//
|
|
9
|
+
// - Write all logs with importance level of `error` or less to `error.log`
|
|
10
|
+
// - Write all logs with importance level of `info` or less to `combined.log`
|
|
11
|
+
//
|
|
12
|
+
new winston.transports.File({ filename: "error.log", level: "error" }),
|
|
13
|
+
new winston.transports.File({ filename: "combined.log" }),
|
|
14
|
+
],
|
|
15
|
+
});
|
|
@@ -0,0 +1,100 @@
|
|
|
1
|
+
import { ID, Query, type Databases } from "node-appwrite";
|
|
2
|
+
import { OperationSchema } from "./backup.js";
|
|
3
|
+
|
|
4
|
+
export const findOrCreateOperation = async (
|
|
5
|
+
database: Databases,
|
|
6
|
+
collectionId: string,
|
|
7
|
+
operationType: string
|
|
8
|
+
) => {
|
|
9
|
+
// Here you would query your database for an existing operation
|
|
10
|
+
// If it doesn't exist, create a new one
|
|
11
|
+
// This is a simplified example
|
|
12
|
+
const operations = await database.listDocuments(
|
|
13
|
+
"migrations",
|
|
14
|
+
"currentOperations",
|
|
15
|
+
[
|
|
16
|
+
Query.equal("collectionId", collectionId),
|
|
17
|
+
Query.equal("operationType", operationType),
|
|
18
|
+
Query.equal("status", "in_progress"),
|
|
19
|
+
]
|
|
20
|
+
);
|
|
21
|
+
|
|
22
|
+
if (operations.documents.length > 0) {
|
|
23
|
+
return OperationSchema.parse(operations.documents[0]); // Assuming the first document is the operation we want
|
|
24
|
+
} else {
|
|
25
|
+
// Create a new operation document
|
|
26
|
+
const op = await database.createDocument(
|
|
27
|
+
"migrations",
|
|
28
|
+
"currentOperations",
|
|
29
|
+
ID.unique(),
|
|
30
|
+
{
|
|
31
|
+
operationType,
|
|
32
|
+
collectionId,
|
|
33
|
+
status: "in_progress",
|
|
34
|
+
batches: [],
|
|
35
|
+
progress: 0,
|
|
36
|
+
total: 0,
|
|
37
|
+
error: "",
|
|
38
|
+
}
|
|
39
|
+
);
|
|
40
|
+
|
|
41
|
+
return OperationSchema.parse(op);
|
|
42
|
+
}
|
|
43
|
+
};
|
|
44
|
+
|
|
45
|
+
export const updateOperation = async (
|
|
46
|
+
database: Databases,
|
|
47
|
+
operationId: string,
|
|
48
|
+
updateFields: any
|
|
49
|
+
) => {
|
|
50
|
+
await database.updateDocument(
|
|
51
|
+
"migrations",
|
|
52
|
+
"currentOperations",
|
|
53
|
+
operationId,
|
|
54
|
+
updateFields
|
|
55
|
+
);
|
|
56
|
+
};
|
|
57
|
+
|
|
58
|
+
// Actual max 1073741824
|
|
59
|
+
export const maxDataLength = 1073741820;
|
|
60
|
+
export const maxBatchItems = 100;
|
|
61
|
+
|
|
62
|
+
export const splitIntoBatches = (data: any[]): any[][] => {
|
|
63
|
+
let batches = [];
|
|
64
|
+
let currentBatch: any[] = [];
|
|
65
|
+
let currentBatchLength = 0;
|
|
66
|
+
let currentBatchItemCount = 0;
|
|
67
|
+
|
|
68
|
+
data.forEach((item, index) => {
|
|
69
|
+
const itemLength = JSON.stringify(item).length;
|
|
70
|
+
if (itemLength > maxDataLength) {
|
|
71
|
+
console.log(
|
|
72
|
+
item,
|
|
73
|
+
`Large item found at index ${index} with length ${itemLength}:`
|
|
74
|
+
);
|
|
75
|
+
}
|
|
76
|
+
// Check if adding the current item would exceed the max length or max items per batch
|
|
77
|
+
if (
|
|
78
|
+
currentBatchLength + itemLength >= maxDataLength ||
|
|
79
|
+
currentBatchItemCount >= maxBatchItems
|
|
80
|
+
) {
|
|
81
|
+
// If so, start a new batch
|
|
82
|
+
batches.push(currentBatch);
|
|
83
|
+
currentBatch = [item];
|
|
84
|
+
currentBatchLength = itemLength;
|
|
85
|
+
currentBatchItemCount = 1; // Reset item count for the new batch
|
|
86
|
+
} else {
|
|
87
|
+
// Otherwise, add the item to the current batch
|
|
88
|
+
currentBatch.push(item);
|
|
89
|
+
currentBatchLength += itemLength;
|
|
90
|
+
currentBatchItemCount++;
|
|
91
|
+
}
|
|
92
|
+
});
|
|
93
|
+
|
|
94
|
+
// Don't forget to add the last batch if it's not empty
|
|
95
|
+
if (currentBatch.length > 0) {
|
|
96
|
+
batches.push(currentBatch);
|
|
97
|
+
}
|
|
98
|
+
|
|
99
|
+
return batches;
|
|
100
|
+
};
|
|
@@ -0,0 +1,119 @@
|
|
|
1
|
+
import { Query, type Databases, type Models } from "node-appwrite";
|
|
2
|
+
import type { Attribute } from "./schema.js";
|
|
3
|
+
import { createOrUpdateAttribute } from "./attributes.js";
|
|
4
|
+
import _ from "lodash";
|
|
5
|
+
import { fetchAndCacheCollectionByName } from "./collections.js";
|
|
6
|
+
|
|
7
|
+
export interface QueuedOperation {
|
|
8
|
+
type: "attribute";
|
|
9
|
+
collectionId?: string;
|
|
10
|
+
attribute?: Attribute;
|
|
11
|
+
collection?: Models.Collection;
|
|
12
|
+
dependencies?: string[];
|
|
13
|
+
}
|
|
14
|
+
export const queuedOperations: QueuedOperation[] = [];
|
|
15
|
+
export const nameToIdMapping: Map<string, string> = new Map();
|
|
16
|
+
|
|
17
|
+
export const enqueueOperation = (operation: QueuedOperation) => {
|
|
18
|
+
queuedOperations.push(operation);
|
|
19
|
+
};
|
|
20
|
+
|
|
21
|
+
export const processQueue = async (db: Databases, dbId: string) => {
|
|
22
|
+
console.log("---------------------------------");
|
|
23
|
+
console.log(`Starting Queue processing of ${dbId}`);
|
|
24
|
+
console.log("---------------------------------");
|
|
25
|
+
let progress = true;
|
|
26
|
+
|
|
27
|
+
while (progress) {
|
|
28
|
+
progress = false;
|
|
29
|
+
console.log("Processing queued operations:");
|
|
30
|
+
for (let i = 0; i < queuedOperations.length; i++) {
|
|
31
|
+
const operation = queuedOperations[i];
|
|
32
|
+
let collectionFound: Models.Collection | undefined;
|
|
33
|
+
|
|
34
|
+
// Handle relationship attribute operations
|
|
35
|
+
if (operation.attribute?.type === "relationship") {
|
|
36
|
+
// Attempt to resolve the collection directly if collectionId is specified
|
|
37
|
+
if (operation.collectionId) {
|
|
38
|
+
console.log(`\tFetching collection by ID: ${operation.collectionId}`);
|
|
39
|
+
try {
|
|
40
|
+
collectionFound = await db.getCollection(
|
|
41
|
+
dbId,
|
|
42
|
+
operation.collectionId
|
|
43
|
+
);
|
|
44
|
+
} catch (e) {
|
|
45
|
+
console.log(
|
|
46
|
+
`\tCollection not found by ID: ${operation.collectionId}`
|
|
47
|
+
);
|
|
48
|
+
}
|
|
49
|
+
}
|
|
50
|
+
// Attempt to resolve related collection if specified and not already found
|
|
51
|
+
if (!collectionFound && operation.attribute?.relatedCollection) {
|
|
52
|
+
collectionFound = await fetchAndCacheCollectionByName(
|
|
53
|
+
db,
|
|
54
|
+
dbId,
|
|
55
|
+
operation.attribute.relatedCollection
|
|
56
|
+
);
|
|
57
|
+
}
|
|
58
|
+
// Handle dependencies if collection still not found
|
|
59
|
+
if (!collectionFound) {
|
|
60
|
+
for (const dep of operation.dependencies || []) {
|
|
61
|
+
collectionFound = await fetchAndCacheCollectionByName(
|
|
62
|
+
db,
|
|
63
|
+
dbId,
|
|
64
|
+
dep
|
|
65
|
+
);
|
|
66
|
+
if (collectionFound) break; // Break early if collection is found
|
|
67
|
+
}
|
|
68
|
+
}
|
|
69
|
+
} else if (operation.collectionId) {
|
|
70
|
+
// Handle non-relationship operations with a specified collectionId
|
|
71
|
+
console.log(`\tFetching collection by ID: ${operation.collectionId}`);
|
|
72
|
+
try {
|
|
73
|
+
collectionFound = await db.getCollection(
|
|
74
|
+
dbId,
|
|
75
|
+
operation.collectionId
|
|
76
|
+
);
|
|
77
|
+
} catch (e) {
|
|
78
|
+
console.log(
|
|
79
|
+
`\tCollection not found by ID: ${operation.collectionId}`
|
|
80
|
+
);
|
|
81
|
+
}
|
|
82
|
+
}
|
|
83
|
+
|
|
84
|
+
// Process the operation if the collection is found
|
|
85
|
+
if (collectionFound && operation.attribute) {
|
|
86
|
+
console.log(
|
|
87
|
+
`\tProcessing attribute: ${operation.attribute.key} for collection ID: ${collectionFound.$id}`
|
|
88
|
+
);
|
|
89
|
+
await createOrUpdateAttribute(
|
|
90
|
+
db,
|
|
91
|
+
dbId,
|
|
92
|
+
collectionFound,
|
|
93
|
+
operation.attribute
|
|
94
|
+
);
|
|
95
|
+
queuedOperations.splice(i, 1);
|
|
96
|
+
i--; // Adjust index since we're modifying the array
|
|
97
|
+
progress = true;
|
|
98
|
+
} else {
|
|
99
|
+
console.error(
|
|
100
|
+
`\tCollection not found for operation, removing from queue: ${JSON.stringify(
|
|
101
|
+
operation
|
|
102
|
+
)}`
|
|
103
|
+
);
|
|
104
|
+
queuedOperations.splice(i, 1);
|
|
105
|
+
i--; // Adjust index since we're modifying the array
|
|
106
|
+
}
|
|
107
|
+
}
|
|
108
|
+
console.log(`\tFinished processing queued operations`);
|
|
109
|
+
}
|
|
110
|
+
|
|
111
|
+
if (queuedOperations.length > 0) {
|
|
112
|
+
console.error("Unresolved operations remain due to unmet dependencies.");
|
|
113
|
+
console.log(queuedOperations);
|
|
114
|
+
}
|
|
115
|
+
|
|
116
|
+
console.log("---------------------------------");
|
|
117
|
+
console.log(`Queue processing complete for ${dbId}`);
|
|
118
|
+
console.log("---------------------------------");
|
|
119
|
+
};
|