@mono-labs/cli 0.0.165 → 0.0.170
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/lib/app.js +49 -49
- package/lib/commands/build-process/cliFactory.js +7 -2
- package/lib/commands/build-process/runHasteCommand.js +20 -6
- package/lib/commands/build-process/testflag.js +1 -0
- package/lib/commands/generate/generateSeed.js +223 -223
- package/lib/commands/generate/index.js +30 -30
- package/lib/commands/prune/index.js +12 -12
- package/lib/commands/prune/prune.js +50 -50
- package/lib/commands/seed/import.js +30 -30
- package/lib/commands/seed/index.js +12 -12
- package/lib/commands/submit/index.js +38 -38
- package/lib/commands/update/eas.js +39 -39
- package/lib/commands/update/index.js +87 -87
- package/lib/config.js +4 -4
- package/lib/index.js +57 -57
- package/package.json +2 -3
package/lib/app.js
CHANGED
|
@@ -1,49 +1,49 @@
|
|
|
1
|
-
import { Command } from 'commander';
|
|
2
|
-
|
|
3
|
-
import { STAGING_URL } from './config.js';
|
|
4
|
-
|
|
5
|
-
import fs from 'node:fs';
|
|
6
|
-
import { fileURLToPath } from 'node:url';
|
|
7
|
-
import { dirname, join } from 'node:path';
|
|
8
|
-
|
|
9
|
-
const __filename = fileURLToPath(import.meta.url);
|
|
10
|
-
const __dirname = dirname(__filename);
|
|
11
|
-
|
|
12
|
-
const pkgPath = join(__dirname, '../', 'package.json');
|
|
13
|
-
const pkg = JSON.parse(fs.readFileSync(pkgPath, 'utf8'));
|
|
14
|
-
|
|
15
|
-
const version = pkg.version || '0.0.1';
|
|
16
|
-
export const program = new Command();
|
|
17
|
-
|
|
18
|
-
const getBinFromPackageJSON = () => {
|
|
19
|
-
const keyList = Object.keys(pkg.bin);
|
|
20
|
-
if (keyList.length === 0) {
|
|
21
|
-
throw new Error('No bin field found in package.json');
|
|
22
|
-
}
|
|
23
|
-
return keyList[0];
|
|
24
|
-
};
|
|
25
|
-
|
|
26
|
-
const programName = getBinFromPackageJSON();
|
|
27
|
-
|
|
28
|
-
program
|
|
29
|
-
.name(programName)
|
|
30
|
-
.description(pkg.description || '')
|
|
31
|
-
.version(version);
|
|
32
|
-
const NEXT_PUBLIC_API_URL =
|
|
33
|
-
(process.env.NEXT_PUBLIC_API_URL &&
|
|
34
|
-
process.env.NEXT_PUBLIC_API_URL.length > 0) ||
|
|
35
|
-
STAGING_URL;
|
|
36
|
-
|
|
37
|
-
export const generateEnvValues = (
|
|
38
|
-
forceProd = false,
|
|
39
|
-
ngrokUrl = 'localhost:3000',
|
|
40
|
-
useAtlas = false
|
|
41
|
-
) => {
|
|
42
|
-
return {
|
|
43
|
-
...process.env,
|
|
44
|
-
NEXT_PUBLIC_API_URL,
|
|
45
|
-
NEXT_FORCE_PROD: forceProd,
|
|
46
|
-
EXPO_PRIVATE_API_URL: ngrokUrl,
|
|
47
|
-
EXPO_UNSTABLE_ATLAS: useAtlas,
|
|
48
|
-
};
|
|
49
|
-
};
|
|
1
|
+
import { Command } from 'commander';
|
|
2
|
+
|
|
3
|
+
import { STAGING_URL } from './config.js';
|
|
4
|
+
|
|
5
|
+
import fs from 'node:fs';
|
|
6
|
+
import { fileURLToPath } from 'node:url';
|
|
7
|
+
import { dirname, join } from 'node:path';
|
|
8
|
+
|
|
9
|
+
const __filename = fileURLToPath(import.meta.url);
|
|
10
|
+
const __dirname = dirname(__filename);
|
|
11
|
+
|
|
12
|
+
const pkgPath = join(__dirname, '../', 'package.json');
|
|
13
|
+
const pkg = JSON.parse(fs.readFileSync(pkgPath, 'utf8'));
|
|
14
|
+
|
|
15
|
+
const version = pkg.version || '0.0.1';
|
|
16
|
+
export const program = new Command();
|
|
17
|
+
|
|
18
|
+
const getBinFromPackageJSON = () => {
|
|
19
|
+
const keyList = Object.keys(pkg.bin);
|
|
20
|
+
if (keyList.length === 0) {
|
|
21
|
+
throw new Error('No bin field found in package.json');
|
|
22
|
+
}
|
|
23
|
+
return keyList[0];
|
|
24
|
+
};
|
|
25
|
+
|
|
26
|
+
const programName = getBinFromPackageJSON();
|
|
27
|
+
|
|
28
|
+
program
|
|
29
|
+
.name(programName)
|
|
30
|
+
.description(pkg.description || '')
|
|
31
|
+
.version(version);
|
|
32
|
+
const NEXT_PUBLIC_API_URL =
|
|
33
|
+
(process.env.NEXT_PUBLIC_API_URL &&
|
|
34
|
+
process.env.NEXT_PUBLIC_API_URL.length > 0) ||
|
|
35
|
+
STAGING_URL;
|
|
36
|
+
|
|
37
|
+
export const generateEnvValues = (
|
|
38
|
+
forceProd = false,
|
|
39
|
+
ngrokUrl = 'localhost:3000',
|
|
40
|
+
useAtlas = false
|
|
41
|
+
) => {
|
|
42
|
+
return {
|
|
43
|
+
...process.env,
|
|
44
|
+
NEXT_PUBLIC_API_URL,
|
|
45
|
+
NEXT_FORCE_PROD: forceProd,
|
|
46
|
+
EXPO_PRIVATE_API_URL: ngrokUrl,
|
|
47
|
+
EXPO_UNSTABLE_ATLAS: useAtlas,
|
|
48
|
+
};
|
|
49
|
+
};
|
|
@@ -5,6 +5,7 @@ import { verifyOptionValue } from './validators.js';
|
|
|
5
5
|
import { mergeData, setData } from './dataLayer.js';
|
|
6
6
|
import { getHasteConfig } from '../loadFromRoot.js';
|
|
7
7
|
import { pruneRepo } from '../prune/prune.js';
|
|
8
|
+
import { testFlag } from './testflag.js';
|
|
8
9
|
/**
|
|
9
10
|
* Register commander commands for each haste file definition.
|
|
10
11
|
* Handles argument, options, validation, and action wiring.
|
|
@@ -49,7 +50,7 @@ export function buildCommands(files) {
|
|
|
49
50
|
|
|
50
51
|
let current = program
|
|
51
52
|
.command(commandName)
|
|
52
|
-
.description(configObject.description || '
|
|
53
|
+
.description(configObject.description || 'Mono command');
|
|
53
54
|
const argInfo = configObject.argument;
|
|
54
55
|
// Argument
|
|
55
56
|
if (argInfo) {
|
|
@@ -58,7 +59,11 @@ export function buildCommands(files) {
|
|
|
58
59
|
const argSpec = required ? `<${type}>` : `[${type}]`;
|
|
59
60
|
current = current.argument(argSpec, argInfo.description || '');
|
|
60
61
|
}
|
|
61
|
-
|
|
62
|
+
if (testFlag)
|
|
63
|
+
console.log(
|
|
64
|
+
'firstConfigObject:',
|
|
65
|
+
JSON.stringify(configObject, null, 2)
|
|
66
|
+
);
|
|
62
67
|
|
|
63
68
|
// Options
|
|
64
69
|
Object.entries(optionsData).forEach(([optionKey, meta]) => {
|
|
@@ -4,6 +4,19 @@ import { killAllBackground } from './runners/processManager.js';
|
|
|
4
4
|
import { getHasteConfig } from '../loadFromRoot.js';
|
|
5
5
|
import { parseEnvFile } from './readEnv.js';
|
|
6
6
|
import path from 'node:path';
|
|
7
|
+
import { testFlag } from './testflag.js';
|
|
8
|
+
|
|
9
|
+
export function getAllowAllKeys(cfg) {
|
|
10
|
+
const decls = cfg.options ?? {};
|
|
11
|
+
return Object.entries(decls)
|
|
12
|
+
.filter(
|
|
13
|
+
([, v]) =>
|
|
14
|
+
Boolean(v?.allowAll) &&
|
|
15
|
+
Array.isArray(v?.options) &&
|
|
16
|
+
v.options.length > 0
|
|
17
|
+
)
|
|
18
|
+
.map(([k]) => k);
|
|
19
|
+
}
|
|
7
20
|
|
|
8
21
|
/**
|
|
9
22
|
* Orchestrate execution of a single haste command definition.
|
|
@@ -53,7 +66,7 @@ export async function runHasteCommand(configObject, options = {}) {
|
|
|
53
66
|
const actions = configObject.actions ?? [];
|
|
54
67
|
|
|
55
68
|
console.log(
|
|
56
|
-
`→ Executing
|
|
69
|
+
`→ Executing mono command: ${configObject.name || 'Unnamed Command'}`
|
|
57
70
|
);
|
|
58
71
|
console.log(`→ Using AWS profile: ${awsProfile}`);
|
|
59
72
|
console.log(`→ Using environment: ${options.stage ? 'stage' : 'dev'}`);
|
|
@@ -74,11 +87,12 @@ export async function runHasteCommand(configObject, options = {}) {
|
|
|
74
87
|
console.log(`→ background action: ${cmd}`);
|
|
75
88
|
runBackground(cmd, envObj, options);
|
|
76
89
|
}
|
|
77
|
-
|
|
78
|
-
|
|
79
|
-
|
|
80
|
-
|
|
81
|
-
|
|
90
|
+
if (testFlag) {
|
|
91
|
+
console.log(`→ foreground action (attached): ${fg}`);
|
|
92
|
+
console.log('options:', options);
|
|
93
|
+
console.log('secondConfigObject:', JSON.stringify(configObject, null, 2));
|
|
94
|
+
console.log('config', JSON.stringify(configObject.options || {}, null, 2));
|
|
95
|
+
}
|
|
82
96
|
|
|
83
97
|
try {
|
|
84
98
|
await runBackground(fg, envObj, options, true);
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
export const testFlag = false;
|
|
@@ -1,223 +1,223 @@
|
|
|
1
|
-
import fs from 'fs';
|
|
2
|
-
// Initialize the DynamoDB client
|
|
3
|
-
|
|
4
|
-
import { readFileSync } from 'fs';
|
|
5
|
-
import path from 'path';
|
|
6
|
-
|
|
7
|
-
import { DynamoDBClient, ScanCommand } from '@aws-sdk/client-dynamodb';
|
|
8
|
-
import { marshall, unmarshall } from '@aws-sdk/util-dynamodb';
|
|
9
|
-
|
|
10
|
-
import { join } from 'node:path';
|
|
11
|
-
const packageJSON = JSON.parse(
|
|
12
|
-
readFileSync(join(process.cwd(), 'package.json'), 'utf8')
|
|
13
|
-
);
|
|
14
|
-
|
|
15
|
-
const awsObject = packageJSON['aws'] || {};
|
|
16
|
-
const projectName = packageJSON['name'] || 'project';
|
|
17
|
-
|
|
18
|
-
const awsProfile = awsObject['profile'] || 'default';
|
|
19
|
-
|
|
20
|
-
// TODO: Fix Copy Issues
|
|
21
|
-
const dirPath = './docker/seed'; // Folder path to delete files from
|
|
22
|
-
|
|
23
|
-
// Function to delete all files in the specified directory (ignores directories)
|
|
24
|
-
function deleteFilesInDir(dir) {
|
|
25
|
-
// Read all files and directories inside the directory
|
|
26
|
-
const files = fs.readdirSync(dir);
|
|
27
|
-
|
|
28
|
-
// Loop through each file and directory
|
|
29
|
-
files.forEach((file) => {
|
|
30
|
-
const filePath = path.join(dir, file); // Get full path of the file or directory
|
|
31
|
-
|
|
32
|
-
// Check if it's a file (not a directory)
|
|
33
|
-
const stats = fs.statSync(filePath);
|
|
34
|
-
|
|
35
|
-
if (stats.isFile()) {
|
|
36
|
-
// If it's a file, delete it
|
|
37
|
-
fs.unlinkSync(filePath);
|
|
38
|
-
}
|
|
39
|
-
});
|
|
40
|
-
}
|
|
41
|
-
|
|
42
|
-
// Function to scan the DynamoDB table and generate the desired JSON format
|
|
43
|
-
async function generateTableExport(tablename, client, profilesOnly = false) {
|
|
44
|
-
let params = {
|
|
45
|
-
TableName: tablename,
|
|
46
|
-
};
|
|
47
|
-
|
|
48
|
-
// This will hold all the data retrieved from DynamoDB
|
|
49
|
-
let allItems = [];
|
|
50
|
-
let lastEvaluatedKey = null;
|
|
51
|
-
|
|
52
|
-
// If there are more items (pagination in case of large tables)
|
|
53
|
-
do {
|
|
54
|
-
if (lastEvaluatedKey) {
|
|
55
|
-
params.ExclusiveStartKey = lastEvaluatedKey;
|
|
56
|
-
}
|
|
57
|
-
|
|
58
|
-
try {
|
|
59
|
-
// Perform the scan operation
|
|
60
|
-
|
|
61
|
-
const data = await client.send(new ScanCommand(params));
|
|
62
|
-
allItems = allItems.concat(data.Items);
|
|
63
|
-
lastEvaluatedKey = data.LastEvaluatedKey; // Set the last evaluated key for pagination
|
|
64
|
-
} catch (error) {
|
|
65
|
-
console.error('Error scanning DynamoDB table:', error);
|
|
66
|
-
return;
|
|
67
|
-
}
|
|
68
|
-
} while (lastEvaluatedKey); // Continue scanning if there are more pages of results
|
|
69
|
-
|
|
70
|
-
// Format the data into the desired JSON structure
|
|
71
|
-
//
|
|
72
|
-
const formattedData = {
|
|
73
|
-
[tablename]: allItems
|
|
74
|
-
.filter(
|
|
75
|
-
(item) =>
|
|
76
|
-
!profilesOnly ||
|
|
77
|
-
!tablename.includes('Database') ||
|
|
78
|
-
unmarshall(item)['SK'].includes('PROFILE')
|
|
79
|
-
)
|
|
80
|
-
.map((item) => {
|
|
81
|
-
const formattedItem = unmarshall(item); // Unmarshall DynamoDB format to JS object
|
|
82
|
-
// Ensure the correct format: PutRequest -> Item
|
|
83
|
-
//if (tablename.includes('Database') && !formattedItem['SK'].includes('USER')) return undefined;
|
|
84
|
-
return {
|
|
85
|
-
PutRequest: {
|
|
86
|
-
Item: marshall(formattedItem), // Marshall JS object back to DynamoDB format
|
|
87
|
-
},
|
|
88
|
-
};
|
|
89
|
-
}),
|
|
90
|
-
};
|
|
91
|
-
return formattedData;
|
|
92
|
-
}
|
|
93
|
-
async function exportDynamoTable(
|
|
94
|
-
tables,
|
|
95
|
-
client,
|
|
96
|
-
dbRewrites,
|
|
97
|
-
profilesOnly = false,
|
|
98
|
-
strOut = './docker/seed'
|
|
99
|
-
) {
|
|
100
|
-
deleteFilesInDir(dirPath);
|
|
101
|
-
let output = await Promise.all(
|
|
102
|
-
tables.map(
|
|
103
|
-
async (tableName) =>
|
|
104
|
-
await generateTableExport(tableName, client, profilesOnly)
|
|
105
|
-
)
|
|
106
|
-
);
|
|
107
|
-
const fileName = `${strOut}/dynamodb-seed`;
|
|
108
|
-
|
|
109
|
-
const outputRes = {};
|
|
110
|
-
output.map((item) => {
|
|
111
|
-
const keys = Object.keys(item);
|
|
112
|
-
|
|
113
|
-
return keys.map((key) => {
|
|
114
|
-
const value = item[key].filter((item) => item !== undefined);
|
|
115
|
-
outputRes[key] = value;
|
|
116
|
-
return { value };
|
|
117
|
-
});
|
|
118
|
-
});
|
|
119
|
-
|
|
120
|
-
output = outputRes;
|
|
121
|
-
|
|
122
|
-
const fileObject = {};
|
|
123
|
-
const dbObject = {};
|
|
124
|
-
Object.keys(output).forEach((key) => {
|
|
125
|
-
const value = output[key];
|
|
126
|
-
|
|
127
|
-
if (value.length > 0) {
|
|
128
|
-
const dbKey = dbRewrites[key] || key;
|
|
129
|
-
|
|
130
|
-
dbObject[dbKey] = value;
|
|
131
|
-
}
|
|
132
|
-
});
|
|
133
|
-
|
|
134
|
-
let countTotal = 0;
|
|
135
|
-
|
|
136
|
-
Object.keys(dbObject).forEach((key) => {
|
|
137
|
-
let currentPosition = 0;
|
|
138
|
-
const numOfItems = 20;
|
|
139
|
-
const putItems = dbObject[key];
|
|
140
|
-
while (currentPosition < putItems.length) {
|
|
141
|
-
if (dbObject[key].length > numOfItems) {
|
|
142
|
-
const result = putItems.slice(
|
|
143
|
-
currentPosition,
|
|
144
|
-
currentPosition + numOfItems
|
|
145
|
-
);
|
|
146
|
-
fileObject[`${fileName}-${countTotal}`] = { [key]: result };
|
|
147
|
-
currentPosition += numOfItems;
|
|
148
|
-
countTotal += 1;
|
|
149
|
-
} else {
|
|
150
|
-
const result = putItems.slice(currentPosition, putItems.length);
|
|
151
|
-
fileObject[`${fileName}-${countTotal}`] = { [key]: result };
|
|
152
|
-
currentPosition += numOfItems;
|
|
153
|
-
countTotal += 1;
|
|
154
|
-
}
|
|
155
|
-
}
|
|
156
|
-
});
|
|
157
|
-
|
|
158
|
-
Object.keys(fileObject).forEach((key) => {
|
|
159
|
-
fs.writeFileSync(`${key}.json`, JSON.stringify(fileObject[key], null, 2));
|
|
160
|
-
});
|
|
161
|
-
}
|
|
162
|
-
export function createDirIfNotExists(dirname) {
|
|
163
|
-
if (!fs.existsSync(dirname)) {
|
|
164
|
-
fs.mkdirSync(dirname);
|
|
165
|
-
}
|
|
166
|
-
}
|
|
167
|
-
|
|
168
|
-
// Run the function
|
|
169
|
-
|
|
170
|
-
export function exportTable(
|
|
171
|
-
newTables,
|
|
172
|
-
owner,
|
|
173
|
-
altOwner = 'dev',
|
|
174
|
-
rewriteDb,
|
|
175
|
-
live = false,
|
|
176
|
-
region = 'us-east-2',
|
|
177
|
-
profilesOnly = false
|
|
178
|
-
) {
|
|
179
|
-
createDirIfNotExists(dirPath);
|
|
180
|
-
const tables = live ? ['MainDatabase'] : ['MainDB'];
|
|
181
|
-
const dbRewrites = {};
|
|
182
|
-
const dbOg = {};
|
|
183
|
-
tables.map((table, index) => (dbOg[table] = newTables[index] || ''));
|
|
184
|
-
tables.map((table, index) => {
|
|
185
|
-
const rewriteDbIndex = rewriteDb[index];
|
|
186
|
-
if (rewriteDbIndex === 'MainDB') {
|
|
187
|
-
dbRewrites[`${projectName}-infra-${table}-${owner}`] =
|
|
188
|
-
`${rewriteDbIndex || table}`;
|
|
189
|
-
} else {
|
|
190
|
-
const newTable = tables[index].replace(
|
|
191
|
-
tables[index],
|
|
192
|
-
newTables[index] || tables[index]
|
|
193
|
-
);
|
|
194
|
-
dbRewrites[`${projectName}-infra-${table}-${owner}`] =
|
|
195
|
-
`${projectName}-infra-${newTable || table}-${altOwner || owner}`;
|
|
196
|
-
}
|
|
197
|
-
});
|
|
198
|
-
|
|
199
|
-
let dbTables = ['MainDB'];
|
|
200
|
-
|
|
201
|
-
if (live) {
|
|
202
|
-
dbTables = tables.map((table) => {
|
|
203
|
-
return `${projectName}-infra-${table}-${owner}`;
|
|
204
|
-
});
|
|
205
|
-
}
|
|
206
|
-
|
|
207
|
-
let client = undefined;
|
|
208
|
-
if (live) {
|
|
209
|
-
client = new DynamoDBClient({
|
|
210
|
-
region: region, // Replace with your AWS region
|
|
211
|
-
});
|
|
212
|
-
} else {
|
|
213
|
-
client = new DynamoDBClient({
|
|
214
|
-
region: region, // Replace with your AWS region
|
|
215
|
-
endpoint: 'http://localhost:8000', // The default local DynamoDB endpoint
|
|
216
|
-
credentials: {
|
|
217
|
-
accessKeyId: 'fakeAccessKeyId', // Use fake credentials for local DynamoDB
|
|
218
|
-
secretAccessKey: 'fakeSecretAccessKey',
|
|
219
|
-
},
|
|
220
|
-
});
|
|
221
|
-
}
|
|
222
|
-
exportDynamoTable(dbTables, client, dbRewrites, profilesOnly);
|
|
223
|
-
}
|
|
1
|
+
import fs from 'fs';
|
|
2
|
+
// Initialize the DynamoDB client
|
|
3
|
+
|
|
4
|
+
import { readFileSync } from 'fs';
|
|
5
|
+
import path from 'path';
|
|
6
|
+
|
|
7
|
+
import { DynamoDBClient, ScanCommand } from '@aws-sdk/client-dynamodb';
|
|
8
|
+
import { marshall, unmarshall } from '@aws-sdk/util-dynamodb';
|
|
9
|
+
|
|
10
|
+
import { join } from 'node:path';
|
|
11
|
+
const packageJSON = JSON.parse(
|
|
12
|
+
readFileSync(join(process.cwd(), 'package.json'), 'utf8')
|
|
13
|
+
);
|
|
14
|
+
|
|
15
|
+
const awsObject = packageJSON['aws'] || {};
|
|
16
|
+
const projectName = packageJSON['name'] || 'project';
|
|
17
|
+
|
|
18
|
+
const awsProfile = awsObject['profile'] || 'default';
|
|
19
|
+
|
|
20
|
+
// TODO: Fix Copy Issues
|
|
21
|
+
const dirPath = './docker/seed'; // Folder path to delete files from
|
|
22
|
+
|
|
23
|
+
// Function to delete all files in the specified directory (ignores directories)
|
|
24
|
+
function deleteFilesInDir(dir) {
|
|
25
|
+
// Read all files and directories inside the directory
|
|
26
|
+
const files = fs.readdirSync(dir);
|
|
27
|
+
|
|
28
|
+
// Loop through each file and directory
|
|
29
|
+
files.forEach((file) => {
|
|
30
|
+
const filePath = path.join(dir, file); // Get full path of the file or directory
|
|
31
|
+
|
|
32
|
+
// Check if it's a file (not a directory)
|
|
33
|
+
const stats = fs.statSync(filePath);
|
|
34
|
+
|
|
35
|
+
if (stats.isFile()) {
|
|
36
|
+
// If it's a file, delete it
|
|
37
|
+
fs.unlinkSync(filePath);
|
|
38
|
+
}
|
|
39
|
+
});
|
|
40
|
+
}
|
|
41
|
+
|
|
42
|
+
// Function to scan the DynamoDB table and generate the desired JSON format
|
|
43
|
+
async function generateTableExport(tablename, client, profilesOnly = false) {
|
|
44
|
+
let params = {
|
|
45
|
+
TableName: tablename,
|
|
46
|
+
};
|
|
47
|
+
|
|
48
|
+
// This will hold all the data retrieved from DynamoDB
|
|
49
|
+
let allItems = [];
|
|
50
|
+
let lastEvaluatedKey = null;
|
|
51
|
+
|
|
52
|
+
// If there are more items (pagination in case of large tables)
|
|
53
|
+
do {
|
|
54
|
+
if (lastEvaluatedKey) {
|
|
55
|
+
params.ExclusiveStartKey = lastEvaluatedKey;
|
|
56
|
+
}
|
|
57
|
+
|
|
58
|
+
try {
|
|
59
|
+
// Perform the scan operation
|
|
60
|
+
|
|
61
|
+
const data = await client.send(new ScanCommand(params));
|
|
62
|
+
allItems = allItems.concat(data.Items);
|
|
63
|
+
lastEvaluatedKey = data.LastEvaluatedKey; // Set the last evaluated key for pagination
|
|
64
|
+
} catch (error) {
|
|
65
|
+
console.error('Error scanning DynamoDB table:', error);
|
|
66
|
+
return;
|
|
67
|
+
}
|
|
68
|
+
} while (lastEvaluatedKey); // Continue scanning if there are more pages of results
|
|
69
|
+
|
|
70
|
+
// Format the data into the desired JSON structure
|
|
71
|
+
//
|
|
72
|
+
const formattedData = {
|
|
73
|
+
[tablename]: allItems
|
|
74
|
+
.filter(
|
|
75
|
+
(item) =>
|
|
76
|
+
!profilesOnly ||
|
|
77
|
+
!tablename.includes('Database') ||
|
|
78
|
+
unmarshall(item)['SK'].includes('PROFILE')
|
|
79
|
+
)
|
|
80
|
+
.map((item) => {
|
|
81
|
+
const formattedItem = unmarshall(item); // Unmarshall DynamoDB format to JS object
|
|
82
|
+
// Ensure the correct format: PutRequest -> Item
|
|
83
|
+
//if (tablename.includes('Database') && !formattedItem['SK'].includes('USER')) return undefined;
|
|
84
|
+
return {
|
|
85
|
+
PutRequest: {
|
|
86
|
+
Item: marshall(formattedItem), // Marshall JS object back to DynamoDB format
|
|
87
|
+
},
|
|
88
|
+
};
|
|
89
|
+
}),
|
|
90
|
+
};
|
|
91
|
+
return formattedData;
|
|
92
|
+
}
|
|
93
|
+
async function exportDynamoTable(
|
|
94
|
+
tables,
|
|
95
|
+
client,
|
|
96
|
+
dbRewrites,
|
|
97
|
+
profilesOnly = false,
|
|
98
|
+
strOut = './docker/seed'
|
|
99
|
+
) {
|
|
100
|
+
deleteFilesInDir(dirPath);
|
|
101
|
+
let output = await Promise.all(
|
|
102
|
+
tables.map(
|
|
103
|
+
async (tableName) =>
|
|
104
|
+
await generateTableExport(tableName, client, profilesOnly)
|
|
105
|
+
)
|
|
106
|
+
);
|
|
107
|
+
const fileName = `${strOut}/dynamodb-seed`;
|
|
108
|
+
|
|
109
|
+
const outputRes = {};
|
|
110
|
+
output.map((item) => {
|
|
111
|
+
const keys = Object.keys(item);
|
|
112
|
+
|
|
113
|
+
return keys.map((key) => {
|
|
114
|
+
const value = item[key].filter((item) => item !== undefined);
|
|
115
|
+
outputRes[key] = value;
|
|
116
|
+
return { value };
|
|
117
|
+
});
|
|
118
|
+
});
|
|
119
|
+
|
|
120
|
+
output = outputRes;
|
|
121
|
+
|
|
122
|
+
const fileObject = {};
|
|
123
|
+
const dbObject = {};
|
|
124
|
+
Object.keys(output).forEach((key) => {
|
|
125
|
+
const value = output[key];
|
|
126
|
+
|
|
127
|
+
if (value.length > 0) {
|
|
128
|
+
const dbKey = dbRewrites[key] || key;
|
|
129
|
+
|
|
130
|
+
dbObject[dbKey] = value;
|
|
131
|
+
}
|
|
132
|
+
});
|
|
133
|
+
|
|
134
|
+
let countTotal = 0;
|
|
135
|
+
|
|
136
|
+
Object.keys(dbObject).forEach((key) => {
|
|
137
|
+
let currentPosition = 0;
|
|
138
|
+
const numOfItems = 20;
|
|
139
|
+
const putItems = dbObject[key];
|
|
140
|
+
while (currentPosition < putItems.length) {
|
|
141
|
+
if (dbObject[key].length > numOfItems) {
|
|
142
|
+
const result = putItems.slice(
|
|
143
|
+
currentPosition,
|
|
144
|
+
currentPosition + numOfItems
|
|
145
|
+
);
|
|
146
|
+
fileObject[`${fileName}-${countTotal}`] = { [key]: result };
|
|
147
|
+
currentPosition += numOfItems;
|
|
148
|
+
countTotal += 1;
|
|
149
|
+
} else {
|
|
150
|
+
const result = putItems.slice(currentPosition, putItems.length);
|
|
151
|
+
fileObject[`${fileName}-${countTotal}`] = { [key]: result };
|
|
152
|
+
currentPosition += numOfItems;
|
|
153
|
+
countTotal += 1;
|
|
154
|
+
}
|
|
155
|
+
}
|
|
156
|
+
});
|
|
157
|
+
|
|
158
|
+
Object.keys(fileObject).forEach((key) => {
|
|
159
|
+
fs.writeFileSync(`${key}.json`, JSON.stringify(fileObject[key], null, 2));
|
|
160
|
+
});
|
|
161
|
+
}
|
|
162
|
+
export function createDirIfNotExists(dirname) {
|
|
163
|
+
if (!fs.existsSync(dirname)) {
|
|
164
|
+
fs.mkdirSync(dirname);
|
|
165
|
+
}
|
|
166
|
+
}
|
|
167
|
+
|
|
168
|
+
// Run the function
|
|
169
|
+
|
|
170
|
+
export function exportTable(
|
|
171
|
+
newTables,
|
|
172
|
+
owner,
|
|
173
|
+
altOwner = 'dev',
|
|
174
|
+
rewriteDb,
|
|
175
|
+
live = false,
|
|
176
|
+
region = 'us-east-2',
|
|
177
|
+
profilesOnly = false
|
|
178
|
+
) {
|
|
179
|
+
createDirIfNotExists(dirPath);
|
|
180
|
+
const tables = live ? ['MainDatabase'] : ['MainDB'];
|
|
181
|
+
const dbRewrites = {};
|
|
182
|
+
const dbOg = {};
|
|
183
|
+
tables.map((table, index) => (dbOg[table] = newTables[index] || ''));
|
|
184
|
+
tables.map((table, index) => {
|
|
185
|
+
const rewriteDbIndex = rewriteDb[index];
|
|
186
|
+
if (rewriteDbIndex === 'MainDB') {
|
|
187
|
+
dbRewrites[`${projectName}-infra-${table}-${owner}`] =
|
|
188
|
+
`${rewriteDbIndex || table}`;
|
|
189
|
+
} else {
|
|
190
|
+
const newTable = tables[index].replace(
|
|
191
|
+
tables[index],
|
|
192
|
+
newTables[index] || tables[index]
|
|
193
|
+
);
|
|
194
|
+
dbRewrites[`${projectName}-infra-${table}-${owner}`] =
|
|
195
|
+
`${projectName}-infra-${newTable || table}-${altOwner || owner}`;
|
|
196
|
+
}
|
|
197
|
+
});
|
|
198
|
+
|
|
199
|
+
let dbTables = ['MainDB'];
|
|
200
|
+
|
|
201
|
+
if (live) {
|
|
202
|
+
dbTables = tables.map((table) => {
|
|
203
|
+
return `${projectName}-infra-${table}-${owner}`;
|
|
204
|
+
});
|
|
205
|
+
}
|
|
206
|
+
|
|
207
|
+
let client = undefined;
|
|
208
|
+
if (live) {
|
|
209
|
+
client = new DynamoDBClient({
|
|
210
|
+
region: region, // Replace with your AWS region
|
|
211
|
+
});
|
|
212
|
+
} else {
|
|
213
|
+
client = new DynamoDBClient({
|
|
214
|
+
region: region, // Replace with your AWS region
|
|
215
|
+
endpoint: 'http://localhost:8000', // The default local DynamoDB endpoint
|
|
216
|
+
credentials: {
|
|
217
|
+
accessKeyId: 'fakeAccessKeyId', // Use fake credentials for local DynamoDB
|
|
218
|
+
secretAccessKey: 'fakeSecretAccessKey',
|
|
219
|
+
},
|
|
220
|
+
});
|
|
221
|
+
}
|
|
222
|
+
exportDynamoTable(dbTables, client, dbRewrites, profilesOnly);
|
|
223
|
+
}
|