appwrite-utils-cli 0.0.262 → 0.0.263
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +24 -5
- package/dist/main.js +6 -0
- package/dist/migrations/afterImportActions.d.ts +3 -1
- package/dist/migrations/afterImportActions.js +2 -2
- package/dist/migrations/appwriteToX.d.ts +109 -0
- package/dist/migrations/appwriteToX.js +88 -0
- package/dist/migrations/backup.d.ts +4 -4
- package/dist/migrations/dataLoader.d.ts +17 -24
- package/dist/migrations/dataLoader.js +26 -32
- package/dist/migrations/databases.d.ts +2 -0
- package/dist/migrations/databases.js +23 -0
- package/dist/migrations/schema.d.ts +345 -32
- package/dist/migrations/schema.js +34 -14
- package/dist/migrations/schemaStrings.d.ts +1 -0
- package/dist/migrations/schemaStrings.js +10 -0
- package/dist/utilsController.d.ts +1 -0
- package/dist/utilsController.js +7 -0
- package/package.json +1 -1
- package/src/main.ts +6 -0
- package/src/migrations/afterImportActions.ts +2 -2
- package/src/migrations/appwriteToX.ts +122 -0
- package/src/migrations/dataLoader.ts +35 -43
- package/src/migrations/databases.ts +25 -0
- package/src/migrations/schema.ts +40 -14
- package/src/migrations/schemaStrings.ts +11 -0
- package/src/utilsController.ts +9 -0
package/dist/utilsController.js
CHANGED
@@ -11,6 +11,7 @@ import { afterImportActions, } from "./migrations/afterImportActions.js";
|
|
11
11
|
import { validationRules, } from "./migrations/validationRules.js";
|
12
12
|
import { ImportController } from "./migrations/importController.js";
|
13
13
|
import _ from "lodash";
|
14
|
+
import { AppwriteToX } from "./migrations/appwriteToX.js";
|
14
15
|
async function loadConfig(configPath) {
|
15
16
|
if (!fs.existsSync(configPath)) {
|
16
17
|
throw new Error(`Configuration file not found at ${configPath}`);
|
@@ -84,6 +85,12 @@ export class UtilsController {
|
|
84
85
|
if (!this.database || !this.storage || !this.config) {
|
85
86
|
throw new Error("Database or storage not initialized");
|
86
87
|
}
|
88
|
+
if (options.sync) {
|
89
|
+
console.log("Starting synchronization with server...");
|
90
|
+
const appwriteToX = new AppwriteToX(this.config, this.appwriteFolderPath);
|
91
|
+
await appwriteToX.toSchemas();
|
92
|
+
console.log("Synchronization complete, YAML and Schemas updated");
|
93
|
+
}
|
87
94
|
// Start the setup
|
88
95
|
console.log("Starting setup, this step sets up migrations, runs backup, wipes databases, and updates schemas (depending on your options)...");
|
89
96
|
await startSetup(this.database, this.storage, this.config, options, this.appwriteFolderPath);
|
package/package.json
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
{
|
2
2
|
"name": "appwrite-utils-cli",
|
3
3
|
"description": "Appwrite Utility Functions to help with database management, data conversion, data import, migrations, and much more. Meant to be used as a CLI tool, I do not recommend installing this in frontend environments.",
|
4
|
-
"version": "0.0.
|
4
|
+
"version": "0.0.263",
|
5
5
|
"main": "src/main.ts",
|
6
6
|
"type": "module",
|
7
7
|
"repository": {
|
package/src/main.ts
CHANGED
@@ -7,6 +7,7 @@ async function main() {
|
|
7
7
|
const controller = new UtilsController();
|
8
8
|
await controller.init();
|
9
9
|
|
10
|
+
let sync = false;
|
10
11
|
let runProd = false;
|
11
12
|
let runStaging = false;
|
12
13
|
let runDev = false;
|
@@ -17,6 +18,9 @@ async function main() {
|
|
17
18
|
let importData = false;
|
18
19
|
let wipeDocuments = false;
|
19
20
|
let shouldWriteFile = false;
|
21
|
+
if (args.includes("--sync")) {
|
22
|
+
sync = true;
|
23
|
+
}
|
20
24
|
if (args.includes("--prod")) {
|
21
25
|
runProd = true;
|
22
26
|
}
|
@@ -49,6 +53,7 @@ async function main() {
|
|
49
53
|
}
|
50
54
|
if (args.includes("--init")) {
|
51
55
|
await controller.run({
|
56
|
+
sync: sync,
|
52
57
|
runProd: runProd,
|
53
58
|
runStaging: runStaging,
|
54
59
|
runDev: runDev,
|
@@ -64,6 +69,7 @@ async function main() {
|
|
64
69
|
});
|
65
70
|
} else {
|
66
71
|
await controller.run({
|
72
|
+
sync: sync,
|
67
73
|
runProd: runProd,
|
68
74
|
runStaging: runStaging,
|
69
75
|
runDev: runDev,
|
@@ -13,7 +13,7 @@ import fs from "fs";
|
|
13
13
|
import os from "os";
|
14
14
|
import { logger } from "./logging.js";
|
15
15
|
|
16
|
-
const getDatabaseFromConfig = (config: AppwriteConfig) => {
|
16
|
+
export const getDatabaseFromConfig = (config: AppwriteConfig) => {
|
17
17
|
if (!config.appwriteClient) {
|
18
18
|
config.appwriteClient = new Client()
|
19
19
|
.setEndpoint(config.appwriteEndpoint)
|
@@ -23,7 +23,7 @@ const getDatabaseFromConfig = (config: AppwriteConfig) => {
|
|
23
23
|
return new Databases(config.appwriteClient!);
|
24
24
|
};
|
25
25
|
|
26
|
-
const getStorageFromConfig = (config: AppwriteConfig) => {
|
26
|
+
export const getStorageFromConfig = (config: AppwriteConfig) => {
|
27
27
|
if (!config.appwriteClient) {
|
28
28
|
config.appwriteClient = new Client()
|
29
29
|
.setEndpoint(config.appwriteEndpoint)
|
@@ -0,0 +1,122 @@
|
|
1
|
+
import { SchemaGenerator } from "./schemaStrings.js";
|
2
|
+
import { Databases, Query, type Models, type Permission } from "node-appwrite";
|
3
|
+
import { fetchAllCollections } from "./collections.js";
|
4
|
+
import { fetchAllDatabases } from "./databases.js";
|
5
|
+
import {
|
6
|
+
collectionSchema,
|
7
|
+
attributeSchema,
|
8
|
+
type AppwriteConfig,
|
9
|
+
AppwriteConfigSchema,
|
10
|
+
type ConfigDatabases,
|
11
|
+
type Attribute,
|
12
|
+
permissionsSchema,
|
13
|
+
attributesSchema,
|
14
|
+
indexesSchema,
|
15
|
+
} from "./schema.js";
|
16
|
+
import { getDatabaseFromConfig } from "./afterImportActions.js";
|
17
|
+
|
18
|
+
export class AppwriteToX {
|
19
|
+
config: AppwriteConfig;
|
20
|
+
updatedConfig: AppwriteConfig;
|
21
|
+
collToAttributeMap = new Map<string, Attribute[]>();
|
22
|
+
appwriteFolderPath: string;
|
23
|
+
|
24
|
+
constructor(config: AppwriteConfig, appwriteFolderPath: string) {
|
25
|
+
this.config = config;
|
26
|
+
this.updatedConfig = config;
|
27
|
+
this.appwriteFolderPath = appwriteFolderPath;
|
28
|
+
}
|
29
|
+
|
30
|
+
// Function to parse a single permission string
|
31
|
+
parsePermissionString = (permissionString: string) => {
|
32
|
+
const match = permissionString.match(/^(\w+)\('([^']+)'\)$/);
|
33
|
+
if (!match) {
|
34
|
+
throw new Error(`Invalid permission format: ${permissionString}`);
|
35
|
+
}
|
36
|
+
return {
|
37
|
+
permission: match[1],
|
38
|
+
target: match[2],
|
39
|
+
};
|
40
|
+
};
|
41
|
+
|
42
|
+
// Function to parse an array of permission strings
|
43
|
+
parsePermissionsArray = (permissions: string[]) => {
|
44
|
+
const parsedPermissions = permissionsSchema.parse(permissions);
|
45
|
+
// Validate the parsed permissions using Zod
|
46
|
+
return parsedPermissions ?? [];
|
47
|
+
};
|
48
|
+
|
49
|
+
updateCollectionConfigAttributes = (collection: Models.Collection) => {
|
50
|
+
for (const attribute of collection.attributes) {
|
51
|
+
const attributeMap = this.collToAttributeMap.get(
|
52
|
+
collection.name as string
|
53
|
+
);
|
54
|
+
const attributeParsed = attributeSchema.parse(attribute);
|
55
|
+
this.collToAttributeMap
|
56
|
+
.get(collection.name as string)
|
57
|
+
?.push(attributeParsed);
|
58
|
+
}
|
59
|
+
};
|
60
|
+
|
61
|
+
async appwriteSync(config: AppwriteConfig) {
|
62
|
+
const db = getDatabaseFromConfig(config);
|
63
|
+
const databases = await fetchAllDatabases(db);
|
64
|
+
let updatedConfig: AppwriteConfig = { ...config };
|
65
|
+
|
66
|
+
// Loop through each database
|
67
|
+
for (const database of databases) {
|
68
|
+
const collections = await fetchAllCollections(database.$id, db);
|
69
|
+
|
70
|
+
// Loop through each collection in the current database
|
71
|
+
for (const collection of collections) {
|
72
|
+
const existingCollectionIndex = updatedConfig.collections.findIndex(
|
73
|
+
(c) => c.name === collection.name
|
74
|
+
);
|
75
|
+
|
76
|
+
// Parse the collection permissions and attributes
|
77
|
+
const collPermissions = this.parsePermissionsArray(
|
78
|
+
collection.$permissions
|
79
|
+
);
|
80
|
+
const collAttributes = attributesSchema.parse(collection.attributes);
|
81
|
+
this.collToAttributeMap.set(collection.name, collAttributes);
|
82
|
+
const collIndexes = indexesSchema.parse(collection.indexes);
|
83
|
+
|
84
|
+
// Prepare the collection object to be added or updated
|
85
|
+
const collToPush = collectionSchema.parse({
|
86
|
+
name: collection.name,
|
87
|
+
enabled: collection.enabled,
|
88
|
+
documentSecurity: collection.documentSecurity,
|
89
|
+
$createdAt: collection.$createdAt,
|
90
|
+
$updatedAt: collection.$updatedAt,
|
91
|
+
$permissions:
|
92
|
+
collPermissions.length > 0 ? collPermissions : undefined,
|
93
|
+
indexes: collIndexes.length > 0 ? collIndexes : undefined,
|
94
|
+
attributes: collAttributes.length > 0 ? collAttributes : undefined,
|
95
|
+
});
|
96
|
+
|
97
|
+
if (existingCollectionIndex !== -1) {
|
98
|
+
// Update existing collection
|
99
|
+
updatedConfig.collections[existingCollectionIndex] = collToPush;
|
100
|
+
} else {
|
101
|
+
// Add new collection
|
102
|
+
updatedConfig.collections.push(collToPush);
|
103
|
+
}
|
104
|
+
}
|
105
|
+
|
106
|
+
console.log(
|
107
|
+
`Processed ${collections.length} collections in ${database.name}`
|
108
|
+
);
|
109
|
+
}
|
110
|
+
this.updatedConfig = updatedConfig;
|
111
|
+
}
|
112
|
+
|
113
|
+
async toSchemas() {
|
114
|
+
await this.appwriteSync(this.config);
|
115
|
+
const generator = new SchemaGenerator(
|
116
|
+
this.updatedConfig,
|
117
|
+
this.appwriteFolderPath
|
118
|
+
);
|
119
|
+
generator.updateYamlSchemas();
|
120
|
+
generator.generateSchemas();
|
121
|
+
}
|
122
|
+
}
|
@@ -369,36 +369,37 @@ export class DataLoader {
|
|
369
369
|
)
|
370
370
|
? item.context[idMapping.sourceField]
|
371
371
|
: [item.context[idMapping.sourceField]];
|
372
|
+
const resolvedNewIds: string[] = [];
|
372
373
|
|
373
374
|
oldIds.forEach((oldId: any) => {
|
374
|
-
|
375
|
+
// Attempt to find a new ID for the old ID
|
376
|
+
let newIdForOldId = this.findNewIdForOldId(oldId, idMapping);
|
375
377
|
|
376
|
-
//
|
377
|
-
|
378
|
-
|
379
|
-
|
380
|
-
|
381
|
-
|
382
|
-
const isArray = collectionConfig.attributes.some(
|
383
|
-
(attribute) =>
|
384
|
-
attribute.key === targetField && attribute.array
|
385
|
-
);
|
386
|
-
|
387
|
-
// Properly update the target field based on whether it should be an array
|
388
|
-
if (isArray) {
|
389
|
-
if (!Array.isArray(item.finalData[targetField])) {
|
390
|
-
item.finalData[targetField] = [newIdForOldId];
|
391
|
-
} else if (
|
392
|
-
!item.finalData[targetField].includes(newIdForOldId)
|
393
|
-
) {
|
394
|
-
item.finalData[targetField].push(newIdForOldId);
|
395
|
-
}
|
396
|
-
} else {
|
397
|
-
item.finalData[targetField] = newIdForOldId;
|
398
|
-
}
|
399
|
-
needsUpdate = true;
|
378
|
+
// Check if a new ID was found and it's not already included
|
379
|
+
if (
|
380
|
+
newIdForOldId &&
|
381
|
+
!resolvedNewIds.includes(newIdForOldId)
|
382
|
+
) {
|
383
|
+
resolvedNewIds.push(newIdForOldId);
|
400
384
|
}
|
401
385
|
});
|
386
|
+
if (resolvedNewIds.length) {
|
387
|
+
const targetField =
|
388
|
+
idMapping.fieldToSet || idMapping.targetField;
|
389
|
+
const isArray = collectionConfig.attributes.some(
|
390
|
+
(attribute) =>
|
391
|
+
attribute.key === targetField && attribute.array
|
392
|
+
);
|
393
|
+
|
394
|
+
// Set the target field based on whether it's an array or single value
|
395
|
+
if (isArray) {
|
396
|
+
item.finalData[targetField] = resolvedNewIds;
|
397
|
+
} else {
|
398
|
+
// In case of a single value, use the first resolved ID
|
399
|
+
item.finalData[targetField] = resolvedNewIds[0];
|
400
|
+
}
|
401
|
+
needsUpdate = true;
|
402
|
+
}
|
402
403
|
}
|
403
404
|
}
|
404
405
|
}
|
@@ -416,29 +417,20 @@ export class DataLoader {
|
|
416
417
|
}
|
417
418
|
|
418
419
|
findNewIdForOldId(oldId: string, idMapping: IdMapping) {
|
419
|
-
//
|
420
|
-
let newIdForOldId;
|
420
|
+
// First, check if the old ID has been merged into a new one
|
421
421
|
for (const [newUserId, oldIds] of this.mergedUserMap.entries()) {
|
422
422
|
if (oldIds.includes(oldId)) {
|
423
|
-
|
424
|
-
break;
|
425
|
-
}
|
426
|
-
}
|
427
|
-
|
428
|
-
// If no new ID found in merged users, check the old-to-new ID map for the target collection
|
429
|
-
if (!newIdForOldId) {
|
430
|
-
const targetCollectionKey = this.getCollectionKey(
|
431
|
-
idMapping.targetCollection
|
432
|
-
);
|
433
|
-
const targetOldIdToNewIdMap =
|
434
|
-
this.oldIdToNewIdPerCollectionMap.get(targetCollectionKey);
|
435
|
-
|
436
|
-
if (targetOldIdToNewIdMap && targetOldIdToNewIdMap.has(oldId)) {
|
437
|
-
newIdForOldId = targetOldIdToNewIdMap.get(oldId);
|
423
|
+
return newUserId;
|
438
424
|
}
|
439
425
|
}
|
440
426
|
|
441
|
-
|
427
|
+
// If not merged, look for a direct mapping from old to new ID
|
428
|
+
const targetCollectionKey = this.getCollectionKey(
|
429
|
+
idMapping.targetCollection
|
430
|
+
);
|
431
|
+
const targetOldIdToNewIdMap =
|
432
|
+
this.oldIdToNewIdPerCollectionMap.get(targetCollectionKey);
|
433
|
+
return targetOldIdToNewIdMap?.get(oldId);
|
442
434
|
}
|
443
435
|
|
444
436
|
private writeMapsToJsonFile() {
|
@@ -0,0 +1,25 @@
|
|
1
|
+
import { Databases, Query, type Models } from "node-appwrite";
|
2
|
+
|
3
|
+
export const fetchAllDatabases = async (
|
4
|
+
database: Databases
|
5
|
+
): Promise<Models.Database[]> => {
|
6
|
+
const databases = await database.list([Query.limit(25)]);
|
7
|
+
const allDatabases = databases.databases;
|
8
|
+
let lastDatabaseId = allDatabases[allDatabases.length - 1].$id;
|
9
|
+
if (databases.databases.length < 25) {
|
10
|
+
return allDatabases;
|
11
|
+
} else {
|
12
|
+
while (lastDatabaseId) {
|
13
|
+
const databases = await database.list([
|
14
|
+
Query.limit(25),
|
15
|
+
Query.cursorAfter(lastDatabaseId),
|
16
|
+
]);
|
17
|
+
allDatabases.push(...databases.databases);
|
18
|
+
if (databases.databases.length < 25) {
|
19
|
+
break;
|
20
|
+
}
|
21
|
+
lastDatabaseId = databases.databases[databases.databases.length - 1].$id;
|
22
|
+
}
|
23
|
+
}
|
24
|
+
return allDatabases;
|
25
|
+
};
|
package/src/migrations/schema.ts
CHANGED
@@ -1,4 +1,4 @@
|
|
1
|
-
import { ID, IndexType } from "node-appwrite";
|
1
|
+
import { ID, IndexType, Permission } from "node-appwrite";
|
2
2
|
import { z } from "zod";
|
3
3
|
|
4
4
|
const stringAttributeSchema = z.object({
|
@@ -376,6 +376,8 @@ export const indexSchema = z.object({
|
|
376
376
|
orders: z.array(z.string()).optional(),
|
377
377
|
});
|
378
378
|
|
379
|
+
export const indexesSchema = z.array(indexSchema);
|
380
|
+
|
379
381
|
export type Index = z.infer<typeof indexSchema>;
|
380
382
|
|
381
383
|
export const AttributeMappingsSchema = z.array(
|
@@ -510,12 +512,45 @@ export const importDefSchemas = z
|
|
510
512
|
.default([])
|
511
513
|
.describe("The import definitions for the database");
|
512
514
|
|
515
|
+
export const permissionSchema = z
|
516
|
+
.object({
|
517
|
+
permission: z.string(),
|
518
|
+
target: z.string(),
|
519
|
+
})
|
520
|
+
.or(
|
521
|
+
z.string().transform((val) => {
|
522
|
+
const trimmedVal = val.trim();
|
523
|
+
// Adjusted regex to match double quotes
|
524
|
+
const match = trimmedVal.match(/^(\w+)\("([^"]+)"\)$/);
|
525
|
+
if (!match) {
|
526
|
+
throw new Error(`Invalid permission format: ${trimmedVal}`);
|
527
|
+
}
|
528
|
+
return {
|
529
|
+
permission: match[1],
|
530
|
+
target: match[2],
|
531
|
+
};
|
532
|
+
})
|
533
|
+
);
|
534
|
+
|
535
|
+
export const permissionsSchema = z.array(permissionSchema).optional();
|
536
|
+
|
537
|
+
export const attributesSchema = z.array(attributeSchema).default([]);
|
538
|
+
|
513
539
|
export const collectionSchema = z.object({
|
540
|
+
name: z.string().describe("The name of the collection"),
|
514
541
|
$id: z
|
515
542
|
.string()
|
516
543
|
.optional()
|
517
544
|
.default(ID.unique())
|
518
545
|
.describe("The ID of the collection, auto generated if not provided"),
|
546
|
+
enabled: z
|
547
|
+
.boolean()
|
548
|
+
.default(true)
|
549
|
+
.describe("Whether the collection is enabled or not"),
|
550
|
+
documentSecurity: z
|
551
|
+
.boolean()
|
552
|
+
.default(false)
|
553
|
+
.describe("Whether document security is enabled or not"),
|
519
554
|
$createdAt: z.string(),
|
520
555
|
$updatedAt: z.string(),
|
521
556
|
$permissions: z
|
@@ -527,19 +562,6 @@ export const collectionSchema = z.object({
|
|
527
562
|
)
|
528
563
|
.default([])
|
529
564
|
.describe("The permissions of the collection"),
|
530
|
-
databaseId: z
|
531
|
-
.string()
|
532
|
-
.optional()
|
533
|
-
.describe("The ID of the database the collection belongs to"),
|
534
|
-
name: z.string().describe("The name of the collection"),
|
535
|
-
enabled: z
|
536
|
-
.boolean()
|
537
|
-
.default(true)
|
538
|
-
.describe("Whether the collection is enabled or not"),
|
539
|
-
documentSecurity: z
|
540
|
-
.boolean()
|
541
|
-
.default(false)
|
542
|
-
.describe("Whether document security is enabled or not"),
|
543
565
|
attributes: z
|
544
566
|
.array(attributeSchema)
|
545
567
|
.default([])
|
@@ -549,6 +571,10 @@ export const collectionSchema = z.object({
|
|
549
571
|
.default([])
|
550
572
|
.describe("The indexes of the collection"),
|
551
573
|
importDefs: importDefSchemas,
|
574
|
+
databaseId: z
|
575
|
+
.string()
|
576
|
+
.optional()
|
577
|
+
.describe("The ID of the database the collection belongs to"),
|
552
578
|
});
|
553
579
|
|
554
580
|
export const CollectionCreateSchema = collectionSchema.omit({
|
@@ -7,6 +7,7 @@ import type {
|
|
7
7
|
import { z } from "zod";
|
8
8
|
import fs from "fs";
|
9
9
|
import path from "path";
|
10
|
+
import { dump } from "js-yaml";
|
10
11
|
|
11
12
|
interface RelationshipDetail {
|
12
13
|
parentCollection: string;
|
@@ -28,6 +29,16 @@ export class SchemaGenerator {
|
|
28
29
|
this.extractRelationships();
|
29
30
|
}
|
30
31
|
|
32
|
+
public updateYamlSchemas(): void {
|
33
|
+
// Output this.config to a YAML file at appwriteFolderPath/appwriteConfig.yaml
|
34
|
+
let finalConfig = this.config;
|
35
|
+
finalConfig.appwriteClient = null;
|
36
|
+
const yamlConfig = finalConfig;
|
37
|
+
const yamlPath = path.join(this.appwriteFolderPath, "appwriteConfig.yaml");
|
38
|
+
fs.writeFileSync(yamlPath, dump(yamlConfig), { encoding: "utf-8" });
|
39
|
+
console.log(`YAML written to ${yamlPath}`);
|
40
|
+
}
|
41
|
+
|
31
42
|
private extractRelationships(): void {
|
32
43
|
this.config.collections.forEach((collection) => {
|
33
44
|
collection.attributes.forEach((attr) => {
|
package/src/utilsController.ts
CHANGED
@@ -23,6 +23,7 @@ import {
|
|
23
23
|
} from "./migrations/validationRules.js";
|
24
24
|
import { ImportController } from "./migrations/importController.js";
|
25
25
|
import _ from "lodash";
|
26
|
+
import { AppwriteToX } from "./migrations/appwriteToX.js";
|
26
27
|
|
27
28
|
async function loadConfig(configPath: string) {
|
28
29
|
if (!fs.existsSync(configPath)) {
|
@@ -35,6 +36,7 @@ async function loadConfig(configPath: string) {
|
|
35
36
|
}
|
36
37
|
|
37
38
|
export interface SetupOptions {
|
39
|
+
sync: boolean;
|
38
40
|
runProd: boolean;
|
39
41
|
runStaging: boolean;
|
40
42
|
runDev: boolean;
|
@@ -127,6 +129,13 @@ export class UtilsController {
|
|
127
129
|
throw new Error("Database or storage not initialized");
|
128
130
|
}
|
129
131
|
|
132
|
+
if (options.sync) {
|
133
|
+
console.log("Starting synchronization with server...");
|
134
|
+
const appwriteToX = new AppwriteToX(this.config, this.appwriteFolderPath);
|
135
|
+
await appwriteToX.toSchemas();
|
136
|
+
console.log("Synchronization complete, YAML and Schemas updated");
|
137
|
+
}
|
138
|
+
|
130
139
|
// Start the setup
|
131
140
|
console.log(
|
132
141
|
"Starting setup, this step sets up migrations, runs backup, wipes databases, and updates schemas (depending on your options)..."
|