appwrite-utils-cli 0.0.261 → 0.0.263
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +24 -5
- package/dist/main.js +6 -0
- package/dist/migrations/afterImportActions.d.ts +3 -1
- package/dist/migrations/afterImportActions.js +2 -2
- package/dist/migrations/appwriteToX.d.ts +109 -0
- package/dist/migrations/appwriteToX.js +88 -0
- package/dist/migrations/backup.d.ts +4 -4
- package/dist/migrations/converters.js +1 -1
- package/dist/migrations/dataLoader.d.ts +19 -18
- package/dist/migrations/dataLoader.js +65 -84
- package/dist/migrations/databases.d.ts +2 -0
- package/dist/migrations/databases.js +23 -0
- package/dist/migrations/importController.js +3 -0
- package/dist/migrations/importDataActions.d.ts +1 -1
- package/dist/migrations/importDataActions.js +1 -1
- package/dist/migrations/schema.d.ts +363 -32
- package/dist/migrations/schema.js +48 -28
- package/dist/migrations/schemaStrings.d.ts +1 -0
- package/dist/migrations/schemaStrings.js +10 -0
- package/dist/setup.js +0 -0
- package/dist/utils/helperFunctions.d.ts +3 -0
- package/dist/utils/helperFunctions.js +8 -0
- package/dist/utilsController.d.ts +1 -0
- package/dist/utilsController.js +7 -0
- package/package.json +1 -1
- package/src/main.ts +6 -0
- package/src/migrations/afterImportActions.ts +2 -2
- package/src/migrations/appwriteToX.ts +122 -0
- package/src/migrations/converters.ts +2 -2
- package/src/migrations/dataLoader.ts +87 -95
- package/src/migrations/databases.ts +25 -0
- package/src/migrations/importController.ts +3 -0
- package/src/migrations/importDataActions.ts +1 -1
- package/src/migrations/schema.ts +65 -36
- package/src/migrations/schemaStrings.ts +11 -0
- package/src/utils/helperFunctions.ts +17 -0
- package/src/utilsController.ts +9 -0
@@ -1,4 +1,4 @@
|
|
1
|
-
import { ID, IndexType } from "node-appwrite";
|
1
|
+
import { ID, IndexType, Permission } from "node-appwrite";
|
2
2
|
import { z } from "zod";
|
3
3
|
const stringAttributeSchema = z.object({
|
4
4
|
key: z.string().describe("The key of the attribute"),
|
@@ -324,6 +324,7 @@ export const indexSchema = z.object({
|
|
324
324
|
attributes: z.array(z.string()),
|
325
325
|
orders: z.array(z.string()).optional(),
|
326
326
|
});
|
327
|
+
export const indexesSchema = z.array(indexSchema);
|
327
328
|
export const AttributeMappingsSchema = z.array(z.object({
|
328
329
|
oldKey: z
|
329
330
|
.string()
|
@@ -366,6 +367,19 @@ export const AttributeMappingsSchema = z.array(z.object({
|
|
366
367
|
.describe("The after import actions and parameter placeholders (they'll be replaced with the actual data) to use for the import")
|
367
368
|
.default([]),
|
368
369
|
}));
|
370
|
+
export const idMappingSchema = z.array(z.object({
|
371
|
+
sourceField: z
|
372
|
+
.string()
|
373
|
+
.describe("The key of the data in the import data to match in the current data"),
|
374
|
+
fieldToSet: z
|
375
|
+
.string()
|
376
|
+
.optional()
|
377
|
+
.describe("The field to set in the target collection, if different from sourceField"),
|
378
|
+
targetField: z
|
379
|
+
.string()
|
380
|
+
.describe("The field in the target collection to match with sourceField that will then be updated"),
|
381
|
+
targetCollection: z.string().describe("The collection to search"),
|
382
|
+
}));
|
369
383
|
export const importDefSchema = z
|
370
384
|
.object({
|
371
385
|
type: z
|
@@ -382,20 +396,7 @@ export const importDefSchema = z
|
|
382
396
|
.string()
|
383
397
|
.default("id")
|
384
398
|
.describe("The field in the import data representing the primary key for this import data (if any)"),
|
385
|
-
idMappings:
|
386
|
-
.array(z.object({
|
387
|
-
sourceField: z
|
388
|
-
.string()
|
389
|
-
.describe("The key of the data in the import data to match in the current data"),
|
390
|
-
fieldToSet: z
|
391
|
-
.string()
|
392
|
-
.optional()
|
393
|
-
.describe("The field to set in the target collection, if different from sourceField"),
|
394
|
-
targetField: z
|
395
|
-
.string()
|
396
|
-
.describe("The field in the target collection to match with sourceField that will then be updated"),
|
397
|
-
targetCollection: z.string().describe("The collection to search"),
|
398
|
-
}))
|
399
|
+
idMappings: idMappingSchema
|
399
400
|
.optional()
|
400
401
|
.describe("The id mappings for the attribute to map ID's to"),
|
401
402
|
updateMapping: z
|
@@ -416,12 +417,40 @@ export const importDefSchemas = z
|
|
416
417
|
.array(importDefSchema)
|
417
418
|
.default([])
|
418
419
|
.describe("The import definitions for the database");
|
420
|
+
export const permissionSchema = z
|
421
|
+
.object({
|
422
|
+
permission: z.string(),
|
423
|
+
target: z.string(),
|
424
|
+
})
|
425
|
+
.or(z.string().transform((val) => {
|
426
|
+
const trimmedVal = val.trim();
|
427
|
+
// Adjusted regex to match double quotes
|
428
|
+
const match = trimmedVal.match(/^(\w+)\("([^"]+)"\)$/);
|
429
|
+
if (!match) {
|
430
|
+
throw new Error(`Invalid permission format: ${trimmedVal}`);
|
431
|
+
}
|
432
|
+
return {
|
433
|
+
permission: match[1],
|
434
|
+
target: match[2],
|
435
|
+
};
|
436
|
+
}));
|
437
|
+
export const permissionsSchema = z.array(permissionSchema).optional();
|
438
|
+
export const attributesSchema = z.array(attributeSchema).default([]);
|
419
439
|
export const collectionSchema = z.object({
|
440
|
+
name: z.string().describe("The name of the collection"),
|
420
441
|
$id: z
|
421
442
|
.string()
|
422
443
|
.optional()
|
423
444
|
.default(ID.unique())
|
424
445
|
.describe("The ID of the collection, auto generated if not provided"),
|
446
|
+
enabled: z
|
447
|
+
.boolean()
|
448
|
+
.default(true)
|
449
|
+
.describe("Whether the collection is enabled or not"),
|
450
|
+
documentSecurity: z
|
451
|
+
.boolean()
|
452
|
+
.default(false)
|
453
|
+
.describe("Whether document security is enabled or not"),
|
425
454
|
$createdAt: z.string(),
|
426
455
|
$updatedAt: z.string(),
|
427
456
|
$permissions: z
|
@@ -431,19 +460,6 @@ export const collectionSchema = z.object({
|
|
431
460
|
}))
|
432
461
|
.default([])
|
433
462
|
.describe("The permissions of the collection"),
|
434
|
-
databaseId: z
|
435
|
-
.string()
|
436
|
-
.optional()
|
437
|
-
.describe("The ID of the database the collection belongs to"),
|
438
|
-
name: z.string().describe("The name of the collection"),
|
439
|
-
enabled: z
|
440
|
-
.boolean()
|
441
|
-
.default(true)
|
442
|
-
.describe("Whether the collection is enabled or not"),
|
443
|
-
documentSecurity: z
|
444
|
-
.boolean()
|
445
|
-
.default(false)
|
446
|
-
.describe("Whether document security is enabled or not"),
|
447
463
|
attributes: z
|
448
464
|
.array(attributeSchema)
|
449
465
|
.default([])
|
@@ -453,6 +469,10 @@ export const collectionSchema = z.object({
|
|
453
469
|
.default([])
|
454
470
|
.describe("The indexes of the collection"),
|
455
471
|
importDefs: importDefSchemas,
|
472
|
+
databaseId: z
|
473
|
+
.string()
|
474
|
+
.optional()
|
475
|
+
.describe("The ID of the database the collection belongs to"),
|
456
476
|
});
|
457
477
|
export const CollectionCreateSchema = collectionSchema.omit({
|
458
478
|
$createdAt: true,
|
@@ -2,6 +2,7 @@ import { toCamelCase, toPascalCase } from "../utils/index.js";
|
|
2
2
|
import { z } from "zod";
|
3
3
|
import fs from "fs";
|
4
4
|
import path from "path";
|
5
|
+
import { dump } from "js-yaml";
|
5
6
|
export class SchemaGenerator {
|
6
7
|
relationshipMap = new Map();
|
7
8
|
config;
|
@@ -11,6 +12,15 @@ export class SchemaGenerator {
|
|
11
12
|
this.appwriteFolderPath = appwriteFolderPath;
|
12
13
|
this.extractRelationships();
|
13
14
|
}
|
15
|
+
updateYamlSchemas() {
|
16
|
+
// Output this.config to a YAML file at appwriteFolderPath/appwriteConfig.yaml
|
17
|
+
let finalConfig = this.config;
|
18
|
+
finalConfig.appwriteClient = null;
|
19
|
+
const yamlConfig = finalConfig;
|
20
|
+
const yamlPath = path.join(this.appwriteFolderPath, "appwriteConfig.yaml");
|
21
|
+
fs.writeFileSync(yamlPath, dump(yamlConfig), { encoding: "utf-8" });
|
22
|
+
console.log(`YAML written to ${yamlPath}`);
|
23
|
+
}
|
14
24
|
extractRelationships() {
|
15
25
|
this.config.collections.forEach((collection) => {
|
16
26
|
collection.attributes.forEach((attr) => {
|
package/dist/setup.js
CHANGED
File without changes
|
@@ -1,4 +1,6 @@
|
|
1
1
|
import type { Models } from "node-appwrite";
|
2
|
+
import type { CollectionImportData } from "src/migrations/dataLoader.js";
|
3
|
+
import type { ConfigCollection } from "src/migrations/schema.js";
|
2
4
|
export declare const toPascalCase: (str: string) => string;
|
3
5
|
export declare const toCamelCase: (str: string) => string;
|
4
6
|
export declare const ensureDirectoryExistence: (filePath: string) => true | undefined;
|
@@ -32,3 +34,4 @@ export declare const getFileViewUrl: (endpoint: string, projectId: string, bucke
|
|
32
34
|
* @return {string} The complete download URL for the file.
|
33
35
|
*/
|
34
36
|
export declare const getFileDownloadUrl: (endpoint: string, projectId: string, bucketId: string, fileId: string, jwt?: Models.Jwt) => string;
|
37
|
+
export declare const finalizeByAttributeMap: (appwriteFolderPath: string, collection: ConfigCollection, item: CollectionImportData["data"][number]) => Promise<any>;
|
@@ -70,3 +70,11 @@ export const getFileViewUrl = (endpoint, projectId, bucketId, fileId, jwt) => {
|
|
70
70
|
export const getFileDownloadUrl = (endpoint, projectId, bucketId, fileId, jwt) => {
|
71
71
|
return `${endpoint}/storage/buckets/${bucketId}/files/${fileId}/download?project=${projectId}${jwt ? `&jwt=${jwt.jwt}` : ""}`;
|
72
72
|
};
|
73
|
+
export const finalizeByAttributeMap = async (appwriteFolderPath, collection, item) => {
|
74
|
+
const schemaFolderPath = path.join(appwriteFolderPath, "schemas");
|
75
|
+
const zodSchema = await import(`${schemaFolderPath}/${toCamelCase(collection.name)}.ts`);
|
76
|
+
return zodSchema.parse({
|
77
|
+
...item.context,
|
78
|
+
...item.finalData,
|
79
|
+
});
|
80
|
+
};
|
@@ -2,6 +2,7 @@ import { type ConverterFunctions } from "./migrations/converters.js";
|
|
2
2
|
import { type AfterImportActions } from "./migrations/afterImportActions.js";
|
3
3
|
import { type ValidationRules } from "./migrations/validationRules.js";
|
4
4
|
export interface SetupOptions {
|
5
|
+
sync: boolean;
|
5
6
|
runProd: boolean;
|
6
7
|
runStaging: boolean;
|
7
8
|
runDev: boolean;
|
package/dist/utilsController.js
CHANGED
@@ -11,6 +11,7 @@ import { afterImportActions, } from "./migrations/afterImportActions.js";
|
|
11
11
|
import { validationRules, } from "./migrations/validationRules.js";
|
12
12
|
import { ImportController } from "./migrations/importController.js";
|
13
13
|
import _ from "lodash";
|
14
|
+
import { AppwriteToX } from "./migrations/appwriteToX.js";
|
14
15
|
async function loadConfig(configPath) {
|
15
16
|
if (!fs.existsSync(configPath)) {
|
16
17
|
throw new Error(`Configuration file not found at ${configPath}`);
|
@@ -84,6 +85,12 @@ export class UtilsController {
|
|
84
85
|
if (!this.database || !this.storage || !this.config) {
|
85
86
|
throw new Error("Database or storage not initialized");
|
86
87
|
}
|
88
|
+
if (options.sync) {
|
89
|
+
console.log("Starting synchronization with server...");
|
90
|
+
const appwriteToX = new AppwriteToX(this.config, this.appwriteFolderPath);
|
91
|
+
await appwriteToX.toSchemas();
|
92
|
+
console.log("Synchronization complete, YAML and Schemas updated");
|
93
|
+
}
|
87
94
|
// Start the setup
|
88
95
|
console.log("Starting setup, this step sets up migrations, runs backup, wipes databases, and updates schemas (depending on your options)...");
|
89
96
|
await startSetup(this.database, this.storage, this.config, options, this.appwriteFolderPath);
|
package/package.json
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
{
|
2
2
|
"name": "appwrite-utils-cli",
|
3
3
|
"description": "Appwrite Utility Functions to help with database management, data conversion, data import, migrations, and much more. Meant to be used as a CLI tool, I do not recommend installing this in frontend environments.",
|
4
|
-
"version": "0.0.
|
4
|
+
"version": "0.0.263",
|
5
5
|
"main": "src/main.ts",
|
6
6
|
"type": "module",
|
7
7
|
"repository": {
|
package/src/main.ts
CHANGED
@@ -7,6 +7,7 @@ async function main() {
|
|
7
7
|
const controller = new UtilsController();
|
8
8
|
await controller.init();
|
9
9
|
|
10
|
+
let sync = false;
|
10
11
|
let runProd = false;
|
11
12
|
let runStaging = false;
|
12
13
|
let runDev = false;
|
@@ -17,6 +18,9 @@ async function main() {
|
|
17
18
|
let importData = false;
|
18
19
|
let wipeDocuments = false;
|
19
20
|
let shouldWriteFile = false;
|
21
|
+
if (args.includes("--sync")) {
|
22
|
+
sync = true;
|
23
|
+
}
|
20
24
|
if (args.includes("--prod")) {
|
21
25
|
runProd = true;
|
22
26
|
}
|
@@ -49,6 +53,7 @@ async function main() {
|
|
49
53
|
}
|
50
54
|
if (args.includes("--init")) {
|
51
55
|
await controller.run({
|
56
|
+
sync: sync,
|
52
57
|
runProd: runProd,
|
53
58
|
runStaging: runStaging,
|
54
59
|
runDev: runDev,
|
@@ -64,6 +69,7 @@ async function main() {
|
|
64
69
|
});
|
65
70
|
} else {
|
66
71
|
await controller.run({
|
72
|
+
sync: sync,
|
67
73
|
runProd: runProd,
|
68
74
|
runStaging: runStaging,
|
69
75
|
runDev: runDev,
|
@@ -13,7 +13,7 @@ import fs from "fs";
|
|
13
13
|
import os from "os";
|
14
14
|
import { logger } from "./logging.js";
|
15
15
|
|
16
|
-
const getDatabaseFromConfig = (config: AppwriteConfig) => {
|
16
|
+
export const getDatabaseFromConfig = (config: AppwriteConfig) => {
|
17
17
|
if (!config.appwriteClient) {
|
18
18
|
config.appwriteClient = new Client()
|
19
19
|
.setEndpoint(config.appwriteEndpoint)
|
@@ -23,7 +23,7 @@ const getDatabaseFromConfig = (config: AppwriteConfig) => {
|
|
23
23
|
return new Databases(config.appwriteClient!);
|
24
24
|
};
|
25
25
|
|
26
|
-
const getStorageFromConfig = (config: AppwriteConfig) => {
|
26
|
+
export const getStorageFromConfig = (config: AppwriteConfig) => {
|
27
27
|
if (!config.appwriteClient) {
|
28
28
|
config.appwriteClient = new Client()
|
29
29
|
.setEndpoint(config.appwriteEndpoint)
|
@@ -0,0 +1,122 @@
|
|
1
|
+
import { SchemaGenerator } from "./schemaStrings.js";
|
2
|
+
import { Databases, Query, type Models, type Permission } from "node-appwrite";
|
3
|
+
import { fetchAllCollections } from "./collections.js";
|
4
|
+
import { fetchAllDatabases } from "./databases.js";
|
5
|
+
import {
|
6
|
+
collectionSchema,
|
7
|
+
attributeSchema,
|
8
|
+
type AppwriteConfig,
|
9
|
+
AppwriteConfigSchema,
|
10
|
+
type ConfigDatabases,
|
11
|
+
type Attribute,
|
12
|
+
permissionsSchema,
|
13
|
+
attributesSchema,
|
14
|
+
indexesSchema,
|
15
|
+
} from "./schema.js";
|
16
|
+
import { getDatabaseFromConfig } from "./afterImportActions.js";
|
17
|
+
|
18
|
+
export class AppwriteToX {
|
19
|
+
config: AppwriteConfig;
|
20
|
+
updatedConfig: AppwriteConfig;
|
21
|
+
collToAttributeMap = new Map<string, Attribute[]>();
|
22
|
+
appwriteFolderPath: string;
|
23
|
+
|
24
|
+
constructor(config: AppwriteConfig, appwriteFolderPath: string) {
|
25
|
+
this.config = config;
|
26
|
+
this.updatedConfig = config;
|
27
|
+
this.appwriteFolderPath = appwriteFolderPath;
|
28
|
+
}
|
29
|
+
|
30
|
+
// Function to parse a single permission string
|
31
|
+
parsePermissionString = (permissionString: string) => {
|
32
|
+
const match = permissionString.match(/^(\w+)\('([^']+)'\)$/);
|
33
|
+
if (!match) {
|
34
|
+
throw new Error(`Invalid permission format: ${permissionString}`);
|
35
|
+
}
|
36
|
+
return {
|
37
|
+
permission: match[1],
|
38
|
+
target: match[2],
|
39
|
+
};
|
40
|
+
};
|
41
|
+
|
42
|
+
// Function to parse an array of permission strings
|
43
|
+
parsePermissionsArray = (permissions: string[]) => {
|
44
|
+
const parsedPermissions = permissionsSchema.parse(permissions);
|
45
|
+
// Validate the parsed permissions using Zod
|
46
|
+
return parsedPermissions ?? [];
|
47
|
+
};
|
48
|
+
|
49
|
+
updateCollectionConfigAttributes = (collection: Models.Collection) => {
|
50
|
+
for (const attribute of collection.attributes) {
|
51
|
+
const attributeMap = this.collToAttributeMap.get(
|
52
|
+
collection.name as string
|
53
|
+
);
|
54
|
+
const attributeParsed = attributeSchema.parse(attribute);
|
55
|
+
this.collToAttributeMap
|
56
|
+
.get(collection.name as string)
|
57
|
+
?.push(attributeParsed);
|
58
|
+
}
|
59
|
+
};
|
60
|
+
|
61
|
+
async appwriteSync(config: AppwriteConfig) {
|
62
|
+
const db = getDatabaseFromConfig(config);
|
63
|
+
const databases = await fetchAllDatabases(db);
|
64
|
+
let updatedConfig: AppwriteConfig = { ...config };
|
65
|
+
|
66
|
+
// Loop through each database
|
67
|
+
for (const database of databases) {
|
68
|
+
const collections = await fetchAllCollections(database.$id, db);
|
69
|
+
|
70
|
+
// Loop through each collection in the current database
|
71
|
+
for (const collection of collections) {
|
72
|
+
const existingCollectionIndex = updatedConfig.collections.findIndex(
|
73
|
+
(c) => c.name === collection.name
|
74
|
+
);
|
75
|
+
|
76
|
+
// Parse the collection permissions and attributes
|
77
|
+
const collPermissions = this.parsePermissionsArray(
|
78
|
+
collection.$permissions
|
79
|
+
);
|
80
|
+
const collAttributes = attributesSchema.parse(collection.attributes);
|
81
|
+
this.collToAttributeMap.set(collection.name, collAttributes);
|
82
|
+
const collIndexes = indexesSchema.parse(collection.indexes);
|
83
|
+
|
84
|
+
// Prepare the collection object to be added or updated
|
85
|
+
const collToPush = collectionSchema.parse({
|
86
|
+
name: collection.name,
|
87
|
+
enabled: collection.enabled,
|
88
|
+
documentSecurity: collection.documentSecurity,
|
89
|
+
$createdAt: collection.$createdAt,
|
90
|
+
$updatedAt: collection.$updatedAt,
|
91
|
+
$permissions:
|
92
|
+
collPermissions.length > 0 ? collPermissions : undefined,
|
93
|
+
indexes: collIndexes.length > 0 ? collIndexes : undefined,
|
94
|
+
attributes: collAttributes.length > 0 ? collAttributes : undefined,
|
95
|
+
});
|
96
|
+
|
97
|
+
if (existingCollectionIndex !== -1) {
|
98
|
+
// Update existing collection
|
99
|
+
updatedConfig.collections[existingCollectionIndex] = collToPush;
|
100
|
+
} else {
|
101
|
+
// Add new collection
|
102
|
+
updatedConfig.collections.push(collToPush);
|
103
|
+
}
|
104
|
+
}
|
105
|
+
|
106
|
+
console.log(
|
107
|
+
`Processed ${collections.length} collections in ${database.name}`
|
108
|
+
);
|
109
|
+
}
|
110
|
+
this.updatedConfig = updatedConfig;
|
111
|
+
}
|
112
|
+
|
113
|
+
async toSchemas() {
|
114
|
+
await this.appwriteSync(this.config);
|
115
|
+
const generator = new SchemaGenerator(
|
116
|
+
this.updatedConfig,
|
117
|
+
this.appwriteFolderPath
|
118
|
+
);
|
119
|
+
generator.updateYamlSchemas();
|
120
|
+
generator.generateSchemas();
|
121
|
+
}
|
122
|
+
}
|
@@ -1,6 +1,6 @@
|
|
1
1
|
import { DateTime } from "luxon";
|
2
2
|
import _ from "lodash";
|
3
|
-
import type { AppwriteConfig } from "./schema.js";
|
3
|
+
import type { AppwriteConfig, ConfigCollection } from "./schema.js";
|
4
4
|
|
5
5
|
const { cloneDeep, isObject } = _;
|
6
6
|
|
@@ -16,7 +16,7 @@ export const converterFunctions = {
|
|
16
16
|
*/
|
17
17
|
anyToString(value: any): string | null {
|
18
18
|
if (value == null) return null;
|
19
|
-
return typeof value === "string" ? value :
|
19
|
+
return typeof value === "string" ? value : `${value}`;
|
20
20
|
},
|
21
21
|
|
22
22
|
/**
|
@@ -7,6 +7,7 @@ import {
|
|
7
7
|
type AttributeMappings,
|
8
8
|
type ConfigCollection,
|
9
9
|
type ConfigDatabase,
|
10
|
+
type IdMapping,
|
10
11
|
type ImportDef,
|
11
12
|
type ImportDefs,
|
12
13
|
type RelationshipAttribute,
|
@@ -22,6 +23,7 @@ import { findOrCreateOperation, updateOperation } from "./migrationHelper.js";
|
|
22
23
|
import { AuthUserCreateSchema } from "../schemas/authUser.js";
|
23
24
|
import _ from "lodash";
|
24
25
|
import { UsersController } from "./users.js";
|
26
|
+
import { finalizeByAttributeMap } from "../utils/helperFunctions.js";
|
25
27
|
// Define a schema for the structure of collection import data using Zod for validation
|
26
28
|
export const CollectionImportDataSchema = z.object({
|
27
29
|
// Optional collection creation schema
|
@@ -96,22 +98,38 @@ export class DataLoader {
|
|
96
98
|
* @param target - The target object with values to update the source object.
|
97
99
|
* @returns The updated source object.
|
98
100
|
*/
|
99
|
-
mergeObjects(
|
100
|
-
|
101
|
-
|
102
|
-
|
103
|
-
Object.keys(
|
104
|
-
const
|
105
|
-
|
106
|
-
|
107
|
-
|
108
|
-
|
109
|
-
|
101
|
+
mergeObjects(source: any, update: any): any {
|
102
|
+
// Create a new object to hold the merged result
|
103
|
+
const result = { ...source };
|
104
|
+
|
105
|
+
Object.keys(update).forEach((key) => {
|
106
|
+
const sourceValue = source[key];
|
107
|
+
const updateValue = update[key];
|
108
|
+
|
109
|
+
// If the update value is an array, concatenate and remove duplicates
|
110
|
+
if (Array.isArray(updateValue)) {
|
111
|
+
const sourceArray = Array.isArray(sourceValue) ? sourceValue : [];
|
112
|
+
result[key] = [...new Set([...sourceArray, ...updateValue])];
|
113
|
+
}
|
114
|
+
// If the update value is an object, recursively merge
|
115
|
+
else if (
|
116
|
+
updateValue !== null &&
|
117
|
+
typeof updateValue === "object" &&
|
118
|
+
!(updateValue instanceof Date)
|
110
119
|
) {
|
111
|
-
|
120
|
+
result[key] = this.mergeObjects(sourceValue, updateValue);
|
121
|
+
}
|
122
|
+
// If the update value is not nullish, overwrite the source value
|
123
|
+
else if (updateValue !== null && updateValue !== undefined) {
|
124
|
+
result[key] = updateValue;
|
125
|
+
}
|
126
|
+
// If the update value is nullish, keep the original value unless it doesn't exist
|
127
|
+
else if (sourceValue === undefined) {
|
128
|
+
result[key] = updateValue;
|
112
129
|
}
|
113
130
|
});
|
114
|
-
|
131
|
+
|
132
|
+
return result;
|
115
133
|
}
|
116
134
|
|
117
135
|
// Method to load data from a file specified in the import definition
|
@@ -332,6 +350,10 @@ export class DataLoader {
|
|
332
350
|
|
333
351
|
if (!collectionData || !collectionData.data) continue;
|
334
352
|
|
353
|
+
console.log(
|
354
|
+
`Updating references for collection: ${collectionConfig.name}`
|
355
|
+
);
|
356
|
+
|
335
357
|
// Iterate over each data item in the current collection
|
336
358
|
for (const item of collectionData.data) {
|
337
359
|
let needsUpdate = false;
|
@@ -342,102 +364,74 @@ export class DataLoader {
|
|
342
364
|
if (importDef.idMappings) {
|
343
365
|
// Iterate over each idMapping defined for the current import definition
|
344
366
|
for (const idMapping of importDef.idMappings) {
|
345
|
-
const
|
346
|
-
|
347
|
-
|
348
|
-
|
367
|
+
const oldIds = Array.isArray(
|
368
|
+
item.context[idMapping.sourceField]
|
369
|
+
)
|
370
|
+
? item.context[idMapping.sourceField]
|
371
|
+
: [item.context[idMapping.sourceField]];
|
372
|
+
const resolvedNewIds: string[] = [];
|
373
|
+
|
374
|
+
oldIds.forEach((oldId: any) => {
|
375
|
+
// Attempt to find a new ID for the old ID
|
376
|
+
let newIdForOldId = this.findNewIdForOldId(oldId, idMapping);
|
377
|
+
|
378
|
+
// Check if a new ID was found and it's not already included
|
379
|
+
if (
|
380
|
+
newIdForOldId &&
|
381
|
+
!resolvedNewIds.includes(newIdForOldId)
|
382
|
+
) {
|
383
|
+
resolvedNewIds.push(newIdForOldId);
|
384
|
+
}
|
385
|
+
});
|
386
|
+
if (resolvedNewIds.length) {
|
387
|
+
const targetField =
|
388
|
+
idMapping.fieldToSet || idMapping.targetField;
|
389
|
+
const isArray = collectionConfig.attributes.some(
|
390
|
+
(attribute) =>
|
391
|
+
attribute.key === targetField && attribute.array
|
392
|
+
);
|
349
393
|
|
350
|
-
|
351
|
-
|
352
|
-
|
353
|
-
|
354
|
-
|
355
|
-
|
356
|
-
item.finalData[fieldToUpdate].push(newIdForOldId);
|
357
|
-
} else {
|
358
|
-
// Otherwise, directly set the new ID
|
359
|
-
item.finalData[fieldToUpdate] = newIdForOldId;
|
360
|
-
}
|
361
|
-
console.log(`Updated ${oldId} to ${newIdForOldId}`);
|
362
|
-
needsUpdate = true;
|
394
|
+
// Set the target field based on whether it's an array or single value
|
395
|
+
if (isArray) {
|
396
|
+
item.finalData[targetField] = resolvedNewIds;
|
397
|
+
} else {
|
398
|
+
// In case of a single value, use the first resolved ID
|
399
|
+
item.finalData[targetField] = resolvedNewIds[0];
|
363
400
|
}
|
401
|
+
needsUpdate = true;
|
364
402
|
}
|
365
403
|
}
|
366
404
|
}
|
367
405
|
}
|
368
406
|
}
|
369
407
|
|
408
|
+
// Update the importMap if changes were made to the item
|
370
409
|
if (needsUpdate) {
|
371
|
-
// Re-transform the item's finalData using its attribute mappings
|
372
|
-
const importDef = item.importDef; // Assuming importDef is available in the item
|
373
|
-
if (importDef && importDef.attributeMappings) {
|
374
|
-
item.finalData = await this.transformData(
|
375
|
-
item.finalData,
|
376
|
-
importDef.attributeMappings
|
377
|
-
);
|
378
|
-
}
|
379
410
|
this.importMap.set(collectionKey, collectionData);
|
411
|
+
logger.info(
|
412
|
+
`Updated item: ${JSON.stringify(item.finalData, undefined, 2)}`
|
413
|
+
);
|
380
414
|
}
|
381
415
|
}
|
382
416
|
}
|
383
417
|
}
|
384
418
|
|
385
|
-
|
386
|
-
|
387
|
-
|
388
|
-
|
389
|
-
|
390
|
-
|
391
|
-
|
392
|
-
// // Load data for this particular import definition
|
393
|
-
// const collectionData = this.importMap.get(
|
394
|
-
// this.getCollectionKey(collection.name)
|
395
|
-
// )?.data;
|
396
|
-
// if (!collectionData) continue;
|
397
|
-
|
398
|
-
// // Iterate over each item in the collection data
|
399
|
-
// for (const item of collectionData) {
|
400
|
-
// let needsUpdate = false;
|
401
|
-
|
402
|
-
// // Go through each idMapping defined for the collection
|
403
|
-
// for (const mapping of importDef.idMappings) {
|
404
|
-
// const oldReferenceId =
|
405
|
-
// item[mapping.targetField as keyof typeof item]; // Get the current reference ID from the item
|
406
|
-
// const referenceCollectionMap =
|
407
|
-
// this.oldIdToNewIdPerCollectionMap.get(
|
408
|
-
// this.getCollectionKey(mapping.targetCollection)
|
409
|
-
// );
|
410
|
-
|
411
|
-
// if (
|
412
|
-
// referenceCollectionMap &&
|
413
|
-
// referenceCollectionMap.has(oldReferenceId)
|
414
|
-
// ) {
|
415
|
-
// // Update the target field with the new reference ID from the mapped collection
|
416
|
-
// item[mapping.sourceField as keyof typeof item] =
|
417
|
-
// referenceCollectionMap.get(oldReferenceId);
|
418
|
-
// needsUpdate = true;
|
419
|
-
// console.log(
|
420
|
-
// `Updated item with ${mapping.sourceField} = ${JSON.stringify(
|
421
|
-
// item,
|
422
|
-
// null,
|
423
|
-
// undefined
|
424
|
-
// )}.`
|
425
|
-
// );
|
426
|
-
// }
|
427
|
-
// }
|
419
|
+
findNewIdForOldId(oldId: string, idMapping: IdMapping) {
|
420
|
+
// First, check if the old ID has been merged into a new one
|
421
|
+
for (const [newUserId, oldIds] of this.mergedUserMap.entries()) {
|
422
|
+
if (oldIds.includes(oldId)) {
|
423
|
+
return newUserId;
|
424
|
+
}
|
425
|
+
}
|
428
426
|
|
429
|
-
|
430
|
-
|
431
|
-
|
432
|
-
|
433
|
-
|
434
|
-
|
435
|
-
|
436
|
-
|
437
|
-
// }
|
438
|
-
// }
|
439
|
-
// }
|
440
|
-
// }
|
427
|
+
// If not merged, look for a direct mapping from old to new ID
|
428
|
+
const targetCollectionKey = this.getCollectionKey(
|
429
|
+
idMapping.targetCollection
|
430
|
+
);
|
431
|
+
const targetOldIdToNewIdMap =
|
432
|
+
this.oldIdToNewIdPerCollectionMap.get(targetCollectionKey);
|
433
|
+
return targetOldIdToNewIdMap?.get(oldId);
|
434
|
+
}
|
441
435
|
|
442
436
|
private writeMapsToJsonFile() {
|
443
437
|
const outputDir = path.resolve(process.cwd());
|
@@ -514,7 +508,6 @@ export class DataLoader {
|
|
514
508
|
// Check for duplicate email and add to emailToUserIdMap if not found
|
515
509
|
if (email && email.length > 0) {
|
516
510
|
if (this.emailToUserIdMap.has(email)) {
|
517
|
-
logger.error(`Duplicate email found or user exists already: ${email}`);
|
518
511
|
existingId = this.emailToUserIdMap.get(email);
|
519
512
|
} else {
|
520
513
|
this.emailToUserIdMap.set(email, newId);
|
@@ -524,7 +517,6 @@ export class DataLoader {
|
|
524
517
|
// Check for duplicate phone and add to phoneToUserIdMap if not found
|
525
518
|
if (phone && phone.length > 0) {
|
526
519
|
if (this.phoneToUserIdMap.has(phone)) {
|
527
|
-
logger.error(`Duplicate phone found: ${phone}`);
|
528
520
|
existingId = this.phoneToUserIdMap.get(phone);
|
529
521
|
} else {
|
530
522
|
this.phoneToUserIdMap.set(phone, newId);
|