@mono-labs/cli 0.0.167 → 0.0.170
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/lib/app.js +49 -49
- package/lib/commands/build-process/cliFactory.js +1 -1
- package/lib/commands/build-process/runHasteCommand.js +1 -1
- package/lib/commands/generate/generateSeed.js +223 -223
- package/lib/commands/generate/index.js +30 -30
- package/lib/commands/prune/index.js +12 -12
- package/lib/commands/prune/prune.js +50 -50
- package/lib/commands/seed/import.js +30 -30
- package/lib/commands/seed/index.js +12 -12
- package/lib/commands/submit/index.js +38 -38
- package/lib/commands/update/eas.js +39 -39
- package/lib/commands/update/index.js +87 -87
- package/lib/config.js +4 -4
- package/lib/index.js +57 -57
- package/package.json +2 -3
package/lib/app.js
CHANGED
|
@@ -1,49 +1,49 @@
|
|
|
1
|
-
import { Command } from 'commander';
|
|
2
|
-
|
|
3
|
-
import { STAGING_URL } from './config.js';
|
|
4
|
-
|
|
5
|
-
import fs from 'node:fs';
|
|
6
|
-
import { fileURLToPath } from 'node:url';
|
|
7
|
-
import { dirname, join } from 'node:path';
|
|
8
|
-
|
|
9
|
-
const __filename = fileURLToPath(import.meta.url);
|
|
10
|
-
const __dirname = dirname(__filename);
|
|
11
|
-
|
|
12
|
-
const pkgPath = join(__dirname, '../', 'package.json');
|
|
13
|
-
const pkg = JSON.parse(fs.readFileSync(pkgPath, 'utf8'));
|
|
14
|
-
|
|
15
|
-
const version = pkg.version || '0.0.1';
|
|
16
|
-
export const program = new Command();
|
|
17
|
-
|
|
18
|
-
const getBinFromPackageJSON = () => {
|
|
19
|
-
const keyList = Object.keys(pkg.bin);
|
|
20
|
-
if (keyList.length === 0) {
|
|
21
|
-
throw new Error('No bin field found in package.json');
|
|
22
|
-
}
|
|
23
|
-
return keyList[0];
|
|
24
|
-
};
|
|
25
|
-
|
|
26
|
-
const programName = getBinFromPackageJSON();
|
|
27
|
-
|
|
28
|
-
program
|
|
29
|
-
.name(programName)
|
|
30
|
-
.description(pkg.description || '')
|
|
31
|
-
.version(version);
|
|
32
|
-
const NEXT_PUBLIC_API_URL =
|
|
33
|
-
(process.env.NEXT_PUBLIC_API_URL &&
|
|
34
|
-
process.env.NEXT_PUBLIC_API_URL.length > 0) ||
|
|
35
|
-
STAGING_URL;
|
|
36
|
-
|
|
37
|
-
export const generateEnvValues = (
|
|
38
|
-
forceProd = false,
|
|
39
|
-
ngrokUrl = 'localhost:3000',
|
|
40
|
-
useAtlas = false
|
|
41
|
-
) => {
|
|
42
|
-
return {
|
|
43
|
-
...process.env,
|
|
44
|
-
NEXT_PUBLIC_API_URL,
|
|
45
|
-
NEXT_FORCE_PROD: forceProd,
|
|
46
|
-
EXPO_PRIVATE_API_URL: ngrokUrl,
|
|
47
|
-
EXPO_UNSTABLE_ATLAS: useAtlas,
|
|
48
|
-
};
|
|
49
|
-
};
|
|
1
|
+
import { Command } from 'commander';
|
|
2
|
+
|
|
3
|
+
import { STAGING_URL } from './config.js';
|
|
4
|
+
|
|
5
|
+
import fs from 'node:fs';
|
|
6
|
+
import { fileURLToPath } from 'node:url';
|
|
7
|
+
import { dirname, join } from 'node:path';
|
|
8
|
+
|
|
9
|
+
const __filename = fileURLToPath(import.meta.url);
|
|
10
|
+
const __dirname = dirname(__filename);
|
|
11
|
+
|
|
12
|
+
const pkgPath = join(__dirname, '../', 'package.json');
|
|
13
|
+
const pkg = JSON.parse(fs.readFileSync(pkgPath, 'utf8'));
|
|
14
|
+
|
|
15
|
+
const version = pkg.version || '0.0.1';
|
|
16
|
+
export const program = new Command();
|
|
17
|
+
|
|
18
|
+
const getBinFromPackageJSON = () => {
|
|
19
|
+
const keyList = Object.keys(pkg.bin);
|
|
20
|
+
if (keyList.length === 0) {
|
|
21
|
+
throw new Error('No bin field found in package.json');
|
|
22
|
+
}
|
|
23
|
+
return keyList[0];
|
|
24
|
+
};
|
|
25
|
+
|
|
26
|
+
const programName = getBinFromPackageJSON();
|
|
27
|
+
|
|
28
|
+
program
|
|
29
|
+
.name(programName)
|
|
30
|
+
.description(pkg.description || '')
|
|
31
|
+
.version(version);
|
|
32
|
+
const NEXT_PUBLIC_API_URL =
|
|
33
|
+
(process.env.NEXT_PUBLIC_API_URL &&
|
|
34
|
+
process.env.NEXT_PUBLIC_API_URL.length > 0) ||
|
|
35
|
+
STAGING_URL;
|
|
36
|
+
|
|
37
|
+
export const generateEnvValues = (
|
|
38
|
+
forceProd = false,
|
|
39
|
+
ngrokUrl = 'localhost:3000',
|
|
40
|
+
useAtlas = false
|
|
41
|
+
) => {
|
|
42
|
+
return {
|
|
43
|
+
...process.env,
|
|
44
|
+
NEXT_PUBLIC_API_URL,
|
|
45
|
+
NEXT_FORCE_PROD: forceProd,
|
|
46
|
+
EXPO_PRIVATE_API_URL: ngrokUrl,
|
|
47
|
+
EXPO_UNSTABLE_ATLAS: useAtlas,
|
|
48
|
+
};
|
|
49
|
+
};
|
|
@@ -50,7 +50,7 @@ export function buildCommands(files) {
|
|
|
50
50
|
|
|
51
51
|
let current = program
|
|
52
52
|
.command(commandName)
|
|
53
|
-
.description(configObject.description || '
|
|
53
|
+
.description(configObject.description || 'Mono command');
|
|
54
54
|
const argInfo = configObject.argument;
|
|
55
55
|
// Argument
|
|
56
56
|
if (argInfo) {
|
|
@@ -66,7 +66,7 @@ export async function runHasteCommand(configObject, options = {}) {
|
|
|
66
66
|
const actions = configObject.actions ?? [];
|
|
67
67
|
|
|
68
68
|
console.log(
|
|
69
|
-
`→ Executing
|
|
69
|
+
`→ Executing mono command: ${configObject.name || 'Unnamed Command'}`
|
|
70
70
|
);
|
|
71
71
|
console.log(`→ Using AWS profile: ${awsProfile}`);
|
|
72
72
|
console.log(`→ Using environment: ${options.stage ? 'stage' : 'dev'}`);
|
|
@@ -1,223 +1,223 @@
|
|
|
1
|
-
import fs from 'fs';
|
|
2
|
-
// Initialize the DynamoDB client
|
|
3
|
-
|
|
4
|
-
import { readFileSync } from 'fs';
|
|
5
|
-
import path from 'path';
|
|
6
|
-
|
|
7
|
-
import { DynamoDBClient, ScanCommand } from '@aws-sdk/client-dynamodb';
|
|
8
|
-
import { marshall, unmarshall } from '@aws-sdk/util-dynamodb';
|
|
9
|
-
|
|
10
|
-
import { join } from 'node:path';
|
|
11
|
-
const packageJSON = JSON.parse(
|
|
12
|
-
readFileSync(join(process.cwd(), 'package.json'), 'utf8')
|
|
13
|
-
);
|
|
14
|
-
|
|
15
|
-
const awsObject = packageJSON['aws'] || {};
|
|
16
|
-
const projectName = packageJSON['name'] || 'project';
|
|
17
|
-
|
|
18
|
-
const awsProfile = awsObject['profile'] || 'default';
|
|
19
|
-
|
|
20
|
-
// TODO: Fix Copy Issues
|
|
21
|
-
const dirPath = './docker/seed'; // Folder path to delete files from
|
|
22
|
-
|
|
23
|
-
// Function to delete all files in the specified directory (ignores directories)
|
|
24
|
-
function deleteFilesInDir(dir) {
|
|
25
|
-
// Read all files and directories inside the directory
|
|
26
|
-
const files = fs.readdirSync(dir);
|
|
27
|
-
|
|
28
|
-
// Loop through each file and directory
|
|
29
|
-
files.forEach((file) => {
|
|
30
|
-
const filePath = path.join(dir, file); // Get full path of the file or directory
|
|
31
|
-
|
|
32
|
-
// Check if it's a file (not a directory)
|
|
33
|
-
const stats = fs.statSync(filePath);
|
|
34
|
-
|
|
35
|
-
if (stats.isFile()) {
|
|
36
|
-
// If it's a file, delete it
|
|
37
|
-
fs.unlinkSync(filePath);
|
|
38
|
-
}
|
|
39
|
-
});
|
|
40
|
-
}
|
|
41
|
-
|
|
42
|
-
// Function to scan the DynamoDB table and generate the desired JSON format
|
|
43
|
-
async function generateTableExport(tablename, client, profilesOnly = false) {
|
|
44
|
-
let params = {
|
|
45
|
-
TableName: tablename,
|
|
46
|
-
};
|
|
47
|
-
|
|
48
|
-
// This will hold all the data retrieved from DynamoDB
|
|
49
|
-
let allItems = [];
|
|
50
|
-
let lastEvaluatedKey = null;
|
|
51
|
-
|
|
52
|
-
// If there are more items (pagination in case of large tables)
|
|
53
|
-
do {
|
|
54
|
-
if (lastEvaluatedKey) {
|
|
55
|
-
params.ExclusiveStartKey = lastEvaluatedKey;
|
|
56
|
-
}
|
|
57
|
-
|
|
58
|
-
try {
|
|
59
|
-
// Perform the scan operation
|
|
60
|
-
|
|
61
|
-
const data = await client.send(new ScanCommand(params));
|
|
62
|
-
allItems = allItems.concat(data.Items);
|
|
63
|
-
lastEvaluatedKey = data.LastEvaluatedKey; // Set the last evaluated key for pagination
|
|
64
|
-
} catch (error) {
|
|
65
|
-
console.error('Error scanning DynamoDB table:', error);
|
|
66
|
-
return;
|
|
67
|
-
}
|
|
68
|
-
} while (lastEvaluatedKey); // Continue scanning if there are more pages of results
|
|
69
|
-
|
|
70
|
-
// Format the data into the desired JSON structure
|
|
71
|
-
//
|
|
72
|
-
const formattedData = {
|
|
73
|
-
[tablename]: allItems
|
|
74
|
-
.filter(
|
|
75
|
-
(item) =>
|
|
76
|
-
!profilesOnly ||
|
|
77
|
-
!tablename.includes('Database') ||
|
|
78
|
-
unmarshall(item)['SK'].includes('PROFILE')
|
|
79
|
-
)
|
|
80
|
-
.map((item) => {
|
|
81
|
-
const formattedItem = unmarshall(item); // Unmarshall DynamoDB format to JS object
|
|
82
|
-
// Ensure the correct format: PutRequest -> Item
|
|
83
|
-
//if (tablename.includes('Database') && !formattedItem['SK'].includes('USER')) return undefined;
|
|
84
|
-
return {
|
|
85
|
-
PutRequest: {
|
|
86
|
-
Item: marshall(formattedItem), // Marshall JS object back to DynamoDB format
|
|
87
|
-
},
|
|
88
|
-
};
|
|
89
|
-
}),
|
|
90
|
-
};
|
|
91
|
-
return formattedData;
|
|
92
|
-
}
|
|
93
|
-
async function exportDynamoTable(
|
|
94
|
-
tables,
|
|
95
|
-
client,
|
|
96
|
-
dbRewrites,
|
|
97
|
-
profilesOnly = false,
|
|
98
|
-
strOut = './docker/seed'
|
|
99
|
-
) {
|
|
100
|
-
deleteFilesInDir(dirPath);
|
|
101
|
-
let output = await Promise.all(
|
|
102
|
-
tables.map(
|
|
103
|
-
async (tableName) =>
|
|
104
|
-
await generateTableExport(tableName, client, profilesOnly)
|
|
105
|
-
)
|
|
106
|
-
);
|
|
107
|
-
const fileName = `${strOut}/dynamodb-seed`;
|
|
108
|
-
|
|
109
|
-
const outputRes = {};
|
|
110
|
-
output.map((item) => {
|
|
111
|
-
const keys = Object.keys(item);
|
|
112
|
-
|
|
113
|
-
return keys.map((key) => {
|
|
114
|
-
const value = item[key].filter((item) => item !== undefined);
|
|
115
|
-
outputRes[key] = value;
|
|
116
|
-
return { value };
|
|
117
|
-
});
|
|
118
|
-
});
|
|
119
|
-
|
|
120
|
-
output = outputRes;
|
|
121
|
-
|
|
122
|
-
const fileObject = {};
|
|
123
|
-
const dbObject = {};
|
|
124
|
-
Object.keys(output).forEach((key) => {
|
|
125
|
-
const value = output[key];
|
|
126
|
-
|
|
127
|
-
if (value.length > 0) {
|
|
128
|
-
const dbKey = dbRewrites[key] || key;
|
|
129
|
-
|
|
130
|
-
dbObject[dbKey] = value;
|
|
131
|
-
}
|
|
132
|
-
});
|
|
133
|
-
|
|
134
|
-
let countTotal = 0;
|
|
135
|
-
|
|
136
|
-
Object.keys(dbObject).forEach((key) => {
|
|
137
|
-
let currentPosition = 0;
|
|
138
|
-
const numOfItems = 20;
|
|
139
|
-
const putItems = dbObject[key];
|
|
140
|
-
while (currentPosition < putItems.length) {
|
|
141
|
-
if (dbObject[key].length > numOfItems) {
|
|
142
|
-
const result = putItems.slice(
|
|
143
|
-
currentPosition,
|
|
144
|
-
currentPosition + numOfItems
|
|
145
|
-
);
|
|
146
|
-
fileObject[`${fileName}-${countTotal}`] = { [key]: result };
|
|
147
|
-
currentPosition += numOfItems;
|
|
148
|
-
countTotal += 1;
|
|
149
|
-
} else {
|
|
150
|
-
const result = putItems.slice(currentPosition, putItems.length);
|
|
151
|
-
fileObject[`${fileName}-${countTotal}`] = { [key]: result };
|
|
152
|
-
currentPosition += numOfItems;
|
|
153
|
-
countTotal += 1;
|
|
154
|
-
}
|
|
155
|
-
}
|
|
156
|
-
});
|
|
157
|
-
|
|
158
|
-
Object.keys(fileObject).forEach((key) => {
|
|
159
|
-
fs.writeFileSync(`${key}.json`, JSON.stringify(fileObject[key], null, 2));
|
|
160
|
-
});
|
|
161
|
-
}
|
|
162
|
-
export function createDirIfNotExists(dirname) {
|
|
163
|
-
if (!fs.existsSync(dirname)) {
|
|
164
|
-
fs.mkdirSync(dirname);
|
|
165
|
-
}
|
|
166
|
-
}
|
|
167
|
-
|
|
168
|
-
// Run the function
|
|
169
|
-
|
|
170
|
-
export function exportTable(
|
|
171
|
-
newTables,
|
|
172
|
-
owner,
|
|
173
|
-
altOwner = 'dev',
|
|
174
|
-
rewriteDb,
|
|
175
|
-
live = false,
|
|
176
|
-
region = 'us-east-2',
|
|
177
|
-
profilesOnly = false
|
|
178
|
-
) {
|
|
179
|
-
createDirIfNotExists(dirPath);
|
|
180
|
-
const tables = live ? ['MainDatabase'] : ['MainDB'];
|
|
181
|
-
const dbRewrites = {};
|
|
182
|
-
const dbOg = {};
|
|
183
|
-
tables.map((table, index) => (dbOg[table] = newTables[index] || ''));
|
|
184
|
-
tables.map((table, index) => {
|
|
185
|
-
const rewriteDbIndex = rewriteDb[index];
|
|
186
|
-
if (rewriteDbIndex === 'MainDB') {
|
|
187
|
-
dbRewrites[`${projectName}-infra-${table}-${owner}`] =
|
|
188
|
-
`${rewriteDbIndex || table}`;
|
|
189
|
-
} else {
|
|
190
|
-
const newTable = tables[index].replace(
|
|
191
|
-
tables[index],
|
|
192
|
-
newTables[index] || tables[index]
|
|
193
|
-
);
|
|
194
|
-
dbRewrites[`${projectName}-infra-${table}-${owner}`] =
|
|
195
|
-
`${projectName}-infra-${newTable || table}-${altOwner || owner}`;
|
|
196
|
-
}
|
|
197
|
-
});
|
|
198
|
-
|
|
199
|
-
let dbTables = ['MainDB'];
|
|
200
|
-
|
|
201
|
-
if (live) {
|
|
202
|
-
dbTables = tables.map((table) => {
|
|
203
|
-
return `${projectName}-infra-${table}-${owner}`;
|
|
204
|
-
});
|
|
205
|
-
}
|
|
206
|
-
|
|
207
|
-
let client = undefined;
|
|
208
|
-
if (live) {
|
|
209
|
-
client = new DynamoDBClient({
|
|
210
|
-
region: region, // Replace with your AWS region
|
|
211
|
-
});
|
|
212
|
-
} else {
|
|
213
|
-
client = new DynamoDBClient({
|
|
214
|
-
region: region, // Replace with your AWS region
|
|
215
|
-
endpoint: 'http://localhost:8000', // The default local DynamoDB endpoint
|
|
216
|
-
credentials: {
|
|
217
|
-
accessKeyId: 'fakeAccessKeyId', // Use fake credentials for local DynamoDB
|
|
218
|
-
secretAccessKey: 'fakeSecretAccessKey',
|
|
219
|
-
},
|
|
220
|
-
});
|
|
221
|
-
}
|
|
222
|
-
exportDynamoTable(dbTables, client, dbRewrites, profilesOnly);
|
|
223
|
-
}
|
|
1
|
+
import fs from 'fs';
|
|
2
|
+
// Initialize the DynamoDB client
|
|
3
|
+
|
|
4
|
+
import { readFileSync } from 'fs';
|
|
5
|
+
import path from 'path';
|
|
6
|
+
|
|
7
|
+
import { DynamoDBClient, ScanCommand } from '@aws-sdk/client-dynamodb';
|
|
8
|
+
import { marshall, unmarshall } from '@aws-sdk/util-dynamodb';
|
|
9
|
+
|
|
10
|
+
import { join } from 'node:path';
|
|
11
|
+
const packageJSON = JSON.parse(
|
|
12
|
+
readFileSync(join(process.cwd(), 'package.json'), 'utf8')
|
|
13
|
+
);
|
|
14
|
+
|
|
15
|
+
const awsObject = packageJSON['aws'] || {};
|
|
16
|
+
const projectName = packageJSON['name'] || 'project';
|
|
17
|
+
|
|
18
|
+
const awsProfile = awsObject['profile'] || 'default';
|
|
19
|
+
|
|
20
|
+
// TODO: Fix Copy Issues
|
|
21
|
+
const dirPath = './docker/seed'; // Folder path to delete files from
|
|
22
|
+
|
|
23
|
+
// Function to delete all files in the specified directory (ignores directories)
|
|
24
|
+
function deleteFilesInDir(dir) {
|
|
25
|
+
// Read all files and directories inside the directory
|
|
26
|
+
const files = fs.readdirSync(dir);
|
|
27
|
+
|
|
28
|
+
// Loop through each file and directory
|
|
29
|
+
files.forEach((file) => {
|
|
30
|
+
const filePath = path.join(dir, file); // Get full path of the file or directory
|
|
31
|
+
|
|
32
|
+
// Check if it's a file (not a directory)
|
|
33
|
+
const stats = fs.statSync(filePath);
|
|
34
|
+
|
|
35
|
+
if (stats.isFile()) {
|
|
36
|
+
// If it's a file, delete it
|
|
37
|
+
fs.unlinkSync(filePath);
|
|
38
|
+
}
|
|
39
|
+
});
|
|
40
|
+
}
|
|
41
|
+
|
|
42
|
+
// Function to scan the DynamoDB table and generate the desired JSON format
|
|
43
|
+
async function generateTableExport(tablename, client, profilesOnly = false) {
|
|
44
|
+
let params = {
|
|
45
|
+
TableName: tablename,
|
|
46
|
+
};
|
|
47
|
+
|
|
48
|
+
// This will hold all the data retrieved from DynamoDB
|
|
49
|
+
let allItems = [];
|
|
50
|
+
let lastEvaluatedKey = null;
|
|
51
|
+
|
|
52
|
+
// If there are more items (pagination in case of large tables)
|
|
53
|
+
do {
|
|
54
|
+
if (lastEvaluatedKey) {
|
|
55
|
+
params.ExclusiveStartKey = lastEvaluatedKey;
|
|
56
|
+
}
|
|
57
|
+
|
|
58
|
+
try {
|
|
59
|
+
// Perform the scan operation
|
|
60
|
+
|
|
61
|
+
const data = await client.send(new ScanCommand(params));
|
|
62
|
+
allItems = allItems.concat(data.Items);
|
|
63
|
+
lastEvaluatedKey = data.LastEvaluatedKey; // Set the last evaluated key for pagination
|
|
64
|
+
} catch (error) {
|
|
65
|
+
console.error('Error scanning DynamoDB table:', error);
|
|
66
|
+
return;
|
|
67
|
+
}
|
|
68
|
+
} while (lastEvaluatedKey); // Continue scanning if there are more pages of results
|
|
69
|
+
|
|
70
|
+
// Format the data into the desired JSON structure
|
|
71
|
+
//
|
|
72
|
+
const formattedData = {
|
|
73
|
+
[tablename]: allItems
|
|
74
|
+
.filter(
|
|
75
|
+
(item) =>
|
|
76
|
+
!profilesOnly ||
|
|
77
|
+
!tablename.includes('Database') ||
|
|
78
|
+
unmarshall(item)['SK'].includes('PROFILE')
|
|
79
|
+
)
|
|
80
|
+
.map((item) => {
|
|
81
|
+
const formattedItem = unmarshall(item); // Unmarshall DynamoDB format to JS object
|
|
82
|
+
// Ensure the correct format: PutRequest -> Item
|
|
83
|
+
//if (tablename.includes('Database') && !formattedItem['SK'].includes('USER')) return undefined;
|
|
84
|
+
return {
|
|
85
|
+
PutRequest: {
|
|
86
|
+
Item: marshall(formattedItem), // Marshall JS object back to DynamoDB format
|
|
87
|
+
},
|
|
88
|
+
};
|
|
89
|
+
}),
|
|
90
|
+
};
|
|
91
|
+
return formattedData;
|
|
92
|
+
}
|
|
93
|
+
async function exportDynamoTable(
|
|
94
|
+
tables,
|
|
95
|
+
client,
|
|
96
|
+
dbRewrites,
|
|
97
|
+
profilesOnly = false,
|
|
98
|
+
strOut = './docker/seed'
|
|
99
|
+
) {
|
|
100
|
+
deleteFilesInDir(dirPath);
|
|
101
|
+
let output = await Promise.all(
|
|
102
|
+
tables.map(
|
|
103
|
+
async (tableName) =>
|
|
104
|
+
await generateTableExport(tableName, client, profilesOnly)
|
|
105
|
+
)
|
|
106
|
+
);
|
|
107
|
+
const fileName = `${strOut}/dynamodb-seed`;
|
|
108
|
+
|
|
109
|
+
const outputRes = {};
|
|
110
|
+
output.map((item) => {
|
|
111
|
+
const keys = Object.keys(item);
|
|
112
|
+
|
|
113
|
+
return keys.map((key) => {
|
|
114
|
+
const value = item[key].filter((item) => item !== undefined);
|
|
115
|
+
outputRes[key] = value;
|
|
116
|
+
return { value };
|
|
117
|
+
});
|
|
118
|
+
});
|
|
119
|
+
|
|
120
|
+
output = outputRes;
|
|
121
|
+
|
|
122
|
+
const fileObject = {};
|
|
123
|
+
const dbObject = {};
|
|
124
|
+
Object.keys(output).forEach((key) => {
|
|
125
|
+
const value = output[key];
|
|
126
|
+
|
|
127
|
+
if (value.length > 0) {
|
|
128
|
+
const dbKey = dbRewrites[key] || key;
|
|
129
|
+
|
|
130
|
+
dbObject[dbKey] = value;
|
|
131
|
+
}
|
|
132
|
+
});
|
|
133
|
+
|
|
134
|
+
let countTotal = 0;
|
|
135
|
+
|
|
136
|
+
Object.keys(dbObject).forEach((key) => {
|
|
137
|
+
let currentPosition = 0;
|
|
138
|
+
const numOfItems = 20;
|
|
139
|
+
const putItems = dbObject[key];
|
|
140
|
+
while (currentPosition < putItems.length) {
|
|
141
|
+
if (dbObject[key].length > numOfItems) {
|
|
142
|
+
const result = putItems.slice(
|
|
143
|
+
currentPosition,
|
|
144
|
+
currentPosition + numOfItems
|
|
145
|
+
);
|
|
146
|
+
fileObject[`${fileName}-${countTotal}`] = { [key]: result };
|
|
147
|
+
currentPosition += numOfItems;
|
|
148
|
+
countTotal += 1;
|
|
149
|
+
} else {
|
|
150
|
+
const result = putItems.slice(currentPosition, putItems.length);
|
|
151
|
+
fileObject[`${fileName}-${countTotal}`] = { [key]: result };
|
|
152
|
+
currentPosition += numOfItems;
|
|
153
|
+
countTotal += 1;
|
|
154
|
+
}
|
|
155
|
+
}
|
|
156
|
+
});
|
|
157
|
+
|
|
158
|
+
Object.keys(fileObject).forEach((key) => {
|
|
159
|
+
fs.writeFileSync(`${key}.json`, JSON.stringify(fileObject[key], null, 2));
|
|
160
|
+
});
|
|
161
|
+
}
|
|
162
|
+
export function createDirIfNotExists(dirname) {
|
|
163
|
+
if (!fs.existsSync(dirname)) {
|
|
164
|
+
fs.mkdirSync(dirname);
|
|
165
|
+
}
|
|
166
|
+
}
|
|
167
|
+
|
|
168
|
+
// Run the function
|
|
169
|
+
|
|
170
|
+
export function exportTable(
|
|
171
|
+
newTables,
|
|
172
|
+
owner,
|
|
173
|
+
altOwner = 'dev',
|
|
174
|
+
rewriteDb,
|
|
175
|
+
live = false,
|
|
176
|
+
region = 'us-east-2',
|
|
177
|
+
profilesOnly = false
|
|
178
|
+
) {
|
|
179
|
+
createDirIfNotExists(dirPath);
|
|
180
|
+
const tables = live ? ['MainDatabase'] : ['MainDB'];
|
|
181
|
+
const dbRewrites = {};
|
|
182
|
+
const dbOg = {};
|
|
183
|
+
tables.map((table, index) => (dbOg[table] = newTables[index] || ''));
|
|
184
|
+
tables.map((table, index) => {
|
|
185
|
+
const rewriteDbIndex = rewriteDb[index];
|
|
186
|
+
if (rewriteDbIndex === 'MainDB') {
|
|
187
|
+
dbRewrites[`${projectName}-infra-${table}-${owner}`] =
|
|
188
|
+
`${rewriteDbIndex || table}`;
|
|
189
|
+
} else {
|
|
190
|
+
const newTable = tables[index].replace(
|
|
191
|
+
tables[index],
|
|
192
|
+
newTables[index] || tables[index]
|
|
193
|
+
);
|
|
194
|
+
dbRewrites[`${projectName}-infra-${table}-${owner}`] =
|
|
195
|
+
`${projectName}-infra-${newTable || table}-${altOwner || owner}`;
|
|
196
|
+
}
|
|
197
|
+
});
|
|
198
|
+
|
|
199
|
+
let dbTables = ['MainDB'];
|
|
200
|
+
|
|
201
|
+
if (live) {
|
|
202
|
+
dbTables = tables.map((table) => {
|
|
203
|
+
return `${projectName}-infra-${table}-${owner}`;
|
|
204
|
+
});
|
|
205
|
+
}
|
|
206
|
+
|
|
207
|
+
let client = undefined;
|
|
208
|
+
if (live) {
|
|
209
|
+
client = new DynamoDBClient({
|
|
210
|
+
region: region, // Replace with your AWS region
|
|
211
|
+
});
|
|
212
|
+
} else {
|
|
213
|
+
client = new DynamoDBClient({
|
|
214
|
+
region: region, // Replace with your AWS region
|
|
215
|
+
endpoint: 'http://localhost:8000', // The default local DynamoDB endpoint
|
|
216
|
+
credentials: {
|
|
217
|
+
accessKeyId: 'fakeAccessKeyId', // Use fake credentials for local DynamoDB
|
|
218
|
+
secretAccessKey: 'fakeSecretAccessKey',
|
|
219
|
+
},
|
|
220
|
+
});
|
|
221
|
+
}
|
|
222
|
+
exportDynamoTable(dbTables, client, dbRewrites, profilesOnly);
|
|
223
|
+
}
|
|
@@ -1,30 +1,30 @@
|
|
|
1
|
-
import { program } from '../../app.js'
|
|
2
|
-
import { exportTable } from './generateSeed.js'
|
|
3
|
-
|
|
4
|
-
program
|
|
5
|
-
.command('generate')
|
|
6
|
-
.description('Generate seed data in ./docker/seed')
|
|
7
|
-
.argument('[<string>]', 'Tables to generateFrom')
|
|
8
|
-
.option('-o, --owner <owner>', 'Owner of the tables')
|
|
9
|
-
.option('-n, --newowner <newowner>', 'New owner of the tables')
|
|
10
|
-
.option('-p, --live', 'Pull from live')
|
|
11
|
-
.option('-r, --region <region>', 'Region to deploy to')
|
|
12
|
-
.option('-d, --db <db>', 'Databases to rewrite to')
|
|
13
|
-
.option('--profiles', 'Profiles only seed generation')
|
|
14
|
-
.action(async (str, options) => {
|
|
15
|
-
const owner = options.owner || 'dev'
|
|
16
|
-
const profilesOnly = options.profiles || false
|
|
17
|
-
const tables = (str || '').split(',')
|
|
18
|
-
let nameRedirect = []
|
|
19
|
-
if (options.db) nameRedirect = options.db.split(',')
|
|
20
|
-
|
|
21
|
-
exportTable(
|
|
22
|
-
tables,
|
|
23
|
-
owner,
|
|
24
|
-
options.newowner,
|
|
25
|
-
nameRedirect,
|
|
26
|
-
options.live,
|
|
27
|
-
options.region,
|
|
28
|
-
profilesOnly,
|
|
29
|
-
)
|
|
30
|
-
})
|
|
1
|
+
import { program } from '../../app.js'
|
|
2
|
+
import { exportTable } from './generateSeed.js'
|
|
3
|
+
|
|
4
|
+
program
|
|
5
|
+
.command('generate')
|
|
6
|
+
.description('Generate seed data in ./docker/seed')
|
|
7
|
+
.argument('[<string>]', 'Tables to generateFrom')
|
|
8
|
+
.option('-o, --owner <owner>', 'Owner of the tables')
|
|
9
|
+
.option('-n, --newowner <newowner>', 'New owner of the tables')
|
|
10
|
+
.option('-p, --live', 'Pull from live')
|
|
11
|
+
.option('-r, --region <region>', 'Region to deploy to')
|
|
12
|
+
.option('-d, --db <db>', 'Databases to rewrite to')
|
|
13
|
+
.option('--profiles', 'Profiles only seed generation')
|
|
14
|
+
.action(async (str, options) => {
|
|
15
|
+
const owner = options.owner || 'dev'
|
|
16
|
+
const profilesOnly = options.profiles || false
|
|
17
|
+
const tables = (str || '').split(',')
|
|
18
|
+
let nameRedirect = []
|
|
19
|
+
if (options.db) nameRedirect = options.db.split(',')
|
|
20
|
+
|
|
21
|
+
exportTable(
|
|
22
|
+
tables,
|
|
23
|
+
owner,
|
|
24
|
+
options.newowner,
|
|
25
|
+
nameRedirect,
|
|
26
|
+
options.live,
|
|
27
|
+
options.region,
|
|
28
|
+
profilesOnly,
|
|
29
|
+
)
|
|
30
|
+
})
|
|
@@ -1,12 +1,12 @@
|
|
|
1
|
-
import { spawn } from 'child_process';
|
|
2
|
-
|
|
3
|
-
import { program } from '../../app.js';
|
|
4
|
-
import { pruneRepo } from './prune.js';
|
|
5
|
-
|
|
6
|
-
program
|
|
7
|
-
.command('prune2')
|
|
8
|
-
.description('Prune local branches that are not on origin')
|
|
9
|
-
|
|
10
|
-
.action(() => {
|
|
11
|
-
pruneRepo();
|
|
12
|
-
});
|
|
1
|
+
import { spawn } from 'child_process';
|
|
2
|
+
|
|
3
|
+
import { program } from '../../app.js';
|
|
4
|
+
import { pruneRepo } from './prune.js';
|
|
5
|
+
|
|
6
|
+
program
|
|
7
|
+
.command('prune2')
|
|
8
|
+
.description('Prune local branches that are not on origin')
|
|
9
|
+
|
|
10
|
+
.action(() => {
|
|
11
|
+
pruneRepo();
|
|
12
|
+
});
|
|
@@ -1,50 +1,50 @@
|
|
|
1
|
-
import { execSync } from 'child_process';
|
|
2
|
-
|
|
3
|
-
const log = (...args) => console.log(...args);
|
|
4
|
-
const err = (...args) => console.error(...args);
|
|
5
|
-
export function pruneRepo() {
|
|
6
|
-
try {
|
|
7
|
-
// Fetch and prune remote branches
|
|
8
|
-
log('Fetching latest branch data from origin...');
|
|
9
|
-
execSync('git fetch --prune', { stdio: 'inherit' });
|
|
10
|
-
|
|
11
|
-
// Get local branches (trim whitespace)
|
|
12
|
-
const localBranches = execSync("git branch --format '%(refname:short)'")
|
|
13
|
-
.toString()
|
|
14
|
-
.trim()
|
|
15
|
-
.split('\n')
|
|
16
|
-
.map((branch) => branch.trim().replaceAll("'", ''));
|
|
17
|
-
|
|
18
|
-
// Get remote branches (remove "origin/" prefix)
|
|
19
|
-
const remoteBranches = execSync('git branch -r')
|
|
20
|
-
.toString()
|
|
21
|
-
.trim()
|
|
22
|
-
.split('\n')
|
|
23
|
-
.map((branch) => branch.replace(/^\s*origin\//, '').trim());
|
|
24
|
-
|
|
25
|
-
// Find local branches that are NOT in remote branches
|
|
26
|
-
const branchesToDelete = localBranches.filter(
|
|
27
|
-
(branch) => !remoteBranches.includes(branch)
|
|
28
|
-
);
|
|
29
|
-
if (branchesToDelete.length === 0) {
|
|
30
|
-
log('No local branches to delete.');
|
|
31
|
-
process.exit(0);
|
|
32
|
-
}
|
|
33
|
-
|
|
34
|
-
// Delete untracked local branches
|
|
35
|
-
log('Deleting local branches that are not on origin...');
|
|
36
|
-
branchesToDelete.forEach((branch) => {
|
|
37
|
-
log(`Attempting to delete: ${branch}`);
|
|
38
|
-
try {
|
|
39
|
-
execSync(`git branch -D ${branch}`, { stdio: 'inherit' });
|
|
40
|
-
log(`Deleted: ${branch}`);
|
|
41
|
-
} catch (error) {
|
|
42
|
-
error(`Failed to delete branch ${branch}:`, error.message);
|
|
43
|
-
}
|
|
44
|
-
});
|
|
45
|
-
|
|
46
|
-
log('Cleanup complete!');
|
|
47
|
-
} catch (error) {
|
|
48
|
-
err('An error occurred:', error.message);
|
|
49
|
-
}
|
|
50
|
-
}
|
|
1
|
+
import { execSync } from 'child_process';
|
|
2
|
+
|
|
3
|
+
const log = (...args) => console.log(...args);
|
|
4
|
+
const err = (...args) => console.error(...args);
|
|
5
|
+
export function pruneRepo() {
|
|
6
|
+
try {
|
|
7
|
+
// Fetch and prune remote branches
|
|
8
|
+
log('Fetching latest branch data from origin...');
|
|
9
|
+
execSync('git fetch --prune', { stdio: 'inherit' });
|
|
10
|
+
|
|
11
|
+
// Get local branches (trim whitespace)
|
|
12
|
+
const localBranches = execSync("git branch --format '%(refname:short)'")
|
|
13
|
+
.toString()
|
|
14
|
+
.trim()
|
|
15
|
+
.split('\n')
|
|
16
|
+
.map((branch) => branch.trim().replaceAll("'", ''));
|
|
17
|
+
|
|
18
|
+
// Get remote branches (remove "origin/" prefix)
|
|
19
|
+
const remoteBranches = execSync('git branch -r')
|
|
20
|
+
.toString()
|
|
21
|
+
.trim()
|
|
22
|
+
.split('\n')
|
|
23
|
+
.map((branch) => branch.replace(/^\s*origin\//, '').trim());
|
|
24
|
+
|
|
25
|
+
// Find local branches that are NOT in remote branches
|
|
26
|
+
const branchesToDelete = localBranches.filter(
|
|
27
|
+
(branch) => !remoteBranches.includes(branch)
|
|
28
|
+
);
|
|
29
|
+
if (branchesToDelete.length === 0) {
|
|
30
|
+
log('No local branches to delete.');
|
|
31
|
+
process.exit(0);
|
|
32
|
+
}
|
|
33
|
+
|
|
34
|
+
// Delete untracked local branches
|
|
35
|
+
log('Deleting local branches that are not on origin...');
|
|
36
|
+
branchesToDelete.forEach((branch) => {
|
|
37
|
+
log(`Attempting to delete: ${branch}`);
|
|
38
|
+
try {
|
|
39
|
+
execSync(`git branch -D ${branch}`, { stdio: 'inherit' });
|
|
40
|
+
log(`Deleted: ${branch}`);
|
|
41
|
+
} catch (error) {
|
|
42
|
+
error(`Failed to delete branch ${branch}:`, error.message);
|
|
43
|
+
}
|
|
44
|
+
});
|
|
45
|
+
|
|
46
|
+
log('Cleanup complete!');
|
|
47
|
+
} catch (error) {
|
|
48
|
+
err('An error occurred:', error.message);
|
|
49
|
+
}
|
|
50
|
+
}
|
|
@@ -1,30 +1,30 @@
|
|
|
1
|
-
import { execSync } from 'child_process';
|
|
2
|
-
import fs from 'fs';
|
|
3
|
-
import path from 'path';
|
|
4
|
-
|
|
5
|
-
export function importAllDynamoBatches(folderPath, useRemote = false) {
|
|
6
|
-
const files = fs
|
|
7
|
-
.readdirSync(folderPath)
|
|
8
|
-
.filter(
|
|
9
|
-
(file) => file.startsWith('dynamodb-seed-') && file.endsWith('.json')
|
|
10
|
-
);
|
|
11
|
-
|
|
12
|
-
files.sort(); // Optional: ensures files run in order
|
|
13
|
-
|
|
14
|
-
for (const file of files) {
|
|
15
|
-
const fullPath = path.resolve(path.join(folderPath, file));
|
|
16
|
-
|
|
17
|
-
const baseCommand =
|
|
18
|
-
useRemote ?
|
|
19
|
-
`aws dynamodb batch-write-item --request-items file://${fullPath}`
|
|
20
|
-
: `aws dynamodb batch-write-item --endpoint-url http://localhost:8000 --request-items file://${fullPath}`;
|
|
21
|
-
try {
|
|
22
|
-
execSync(baseCommand, {
|
|
23
|
-
stdio: 'inherit',
|
|
24
|
-
});
|
|
25
|
-
} catch (err) {
|
|
26
|
-
console.error(`❌ Error with ${file}:`, err.message);
|
|
27
|
-
break; // or continue if you want to skip failed files
|
|
28
|
-
}
|
|
29
|
-
}
|
|
30
|
-
}
|
|
1
|
+
import { execSync } from 'child_process';
|
|
2
|
+
import fs from 'fs';
|
|
3
|
+
import path from 'path';
|
|
4
|
+
|
|
5
|
+
export function importAllDynamoBatches(folderPath, useRemote = false) {
|
|
6
|
+
const files = fs
|
|
7
|
+
.readdirSync(folderPath)
|
|
8
|
+
.filter(
|
|
9
|
+
(file) => file.startsWith('dynamodb-seed-') && file.endsWith('.json')
|
|
10
|
+
);
|
|
11
|
+
|
|
12
|
+
files.sort(); // Optional: ensures files run in order
|
|
13
|
+
|
|
14
|
+
for (const file of files) {
|
|
15
|
+
const fullPath = path.resolve(path.join(folderPath, file));
|
|
16
|
+
|
|
17
|
+
const baseCommand =
|
|
18
|
+
useRemote ?
|
|
19
|
+
`aws dynamodb batch-write-item --request-items file://${fullPath}`
|
|
20
|
+
: `aws dynamodb batch-write-item --endpoint-url http://localhost:8000 --request-items file://${fullPath}`;
|
|
21
|
+
try {
|
|
22
|
+
execSync(baseCommand, {
|
|
23
|
+
stdio: 'inherit',
|
|
24
|
+
});
|
|
25
|
+
} catch (err) {
|
|
26
|
+
console.error(`❌ Error with ${file}:`, err.message);
|
|
27
|
+
break; // or continue if you want to skip failed files
|
|
28
|
+
}
|
|
29
|
+
}
|
|
30
|
+
}
|
|
@@ -1,12 +1,12 @@
|
|
|
1
|
-
import { spawn } from 'child_process'
|
|
2
|
-
|
|
3
|
-
import { program } from '../../app.js'
|
|
4
|
-
import { importAllDynamoBatches } from './import.js'
|
|
5
|
-
|
|
6
|
-
program
|
|
7
|
-
.command('seed')
|
|
8
|
-
.description('Execute eas build command')
|
|
9
|
-
.option('-p, --live', 'Pull from live')
|
|
10
|
-
.action(async (options) => {
|
|
11
|
-
importAllDynamoBatches('./docker/seed', options.live)
|
|
12
|
-
})
|
|
1
|
+
import { spawn } from 'child_process'
|
|
2
|
+
|
|
3
|
+
import { program } from '../../app.js'
|
|
4
|
+
import { importAllDynamoBatches } from './import.js'
|
|
5
|
+
|
|
6
|
+
program
|
|
7
|
+
.command('seed')
|
|
8
|
+
.description('Execute eas build command')
|
|
9
|
+
.option('-p, --live', 'Pull from live')
|
|
10
|
+
.action(async (options) => {
|
|
11
|
+
importAllDynamoBatches('./docker/seed', options.live)
|
|
12
|
+
})
|
|
@@ -1,38 +1,38 @@
|
|
|
1
|
-
import { spawn } from 'child_process';
|
|
2
|
-
|
|
3
|
-
import { program } from '../../app.js';
|
|
4
|
-
import { generateEnvValues } from '../../app.js';
|
|
5
|
-
import { STAGING_URL } from '../../config.js';
|
|
6
|
-
import os from 'node:os';
|
|
7
|
-
import path from 'node:path';
|
|
8
|
-
const homeBin = path.join(os.homedir(), 'bin');
|
|
9
|
-
const PATH = [homeBin, process.env.PATH].filter(Boolean).join(path.delimiter);
|
|
10
|
-
program
|
|
11
|
-
.command('submit')
|
|
12
|
-
.description('Execute eas build command')
|
|
13
|
-
.option('--android', 'Build to target preview profile')
|
|
14
|
-
.option('--ios', 'Build to target production profile')
|
|
15
|
-
.action((str, options) => {
|
|
16
|
-
//
|
|
17
|
-
|
|
18
|
-
let envObj = generateEnvValues(true, '', false);
|
|
19
|
-
|
|
20
|
-
envObj.NEXT_PUBLIC_API_URL = `${STAGING_URL}`;
|
|
21
|
-
envObj.NEXT_FORCE_PROD = 'true';
|
|
22
|
-
envObj.EAS_BUILD_PROFILE = 'production';
|
|
23
|
-
|
|
24
|
-
const command = `workspace app eas submit ${str.android ? `--platform android` : `--platform ios`}`;
|
|
25
|
-
|
|
26
|
-
const child = spawn('yarn', [command], {
|
|
27
|
-
stdio: 'inherit',
|
|
28
|
-
shell: true, // required if using shell-style commands or cross-platform support
|
|
29
|
-
env: {
|
|
30
|
-
...envObj,
|
|
31
|
-
PATH,
|
|
32
|
-
},
|
|
33
|
-
});
|
|
34
|
-
|
|
35
|
-
child.on('exit', (code) => {
|
|
36
|
-
process.exit(code ?? 0);
|
|
37
|
-
});
|
|
38
|
-
});
|
|
1
|
+
import { spawn } from 'child_process';
|
|
2
|
+
|
|
3
|
+
import { program } from '../../app.js';
|
|
4
|
+
import { generateEnvValues } from '../../app.js';
|
|
5
|
+
import { STAGING_URL } from '../../config.js';
|
|
6
|
+
import os from 'node:os';
|
|
7
|
+
import path from 'node:path';
|
|
8
|
+
const homeBin = path.join(os.homedir(), 'bin');
|
|
9
|
+
const PATH = [homeBin, process.env.PATH].filter(Boolean).join(path.delimiter);
|
|
10
|
+
program
|
|
11
|
+
.command('submit')
|
|
12
|
+
.description('Execute eas build command')
|
|
13
|
+
.option('--android', 'Build to target preview profile')
|
|
14
|
+
.option('--ios', 'Build to target production profile')
|
|
15
|
+
.action((str, options) => {
|
|
16
|
+
//
|
|
17
|
+
|
|
18
|
+
let envObj = generateEnvValues(true, '', false);
|
|
19
|
+
|
|
20
|
+
envObj.NEXT_PUBLIC_API_URL = `${STAGING_URL}`;
|
|
21
|
+
envObj.NEXT_FORCE_PROD = 'true';
|
|
22
|
+
envObj.EAS_BUILD_PROFILE = 'production';
|
|
23
|
+
|
|
24
|
+
const command = `workspace app eas submit ${str.android ? `--platform android` : `--platform ios`}`;
|
|
25
|
+
|
|
26
|
+
const child = spawn('yarn', [command], {
|
|
27
|
+
stdio: 'inherit',
|
|
28
|
+
shell: true, // required if using shell-style commands or cross-platform support
|
|
29
|
+
env: {
|
|
30
|
+
...envObj,
|
|
31
|
+
PATH,
|
|
32
|
+
},
|
|
33
|
+
});
|
|
34
|
+
|
|
35
|
+
child.on('exit', (code) => {
|
|
36
|
+
process.exit(code ?? 0);
|
|
37
|
+
});
|
|
38
|
+
});
|
|
@@ -1,39 +1,39 @@
|
|
|
1
|
-
import { execSync } from 'child_process'
|
|
2
|
-
|
|
3
|
-
export function getEASChannels() {
|
|
4
|
-
const channelsData = execSync(
|
|
5
|
-
'yarn eas channel:list --non-interactive --json',
|
|
6
|
-
{ stdio: ['pipe', 'pipe', 'ignore'] }, // Ignore stderr
|
|
7
|
-
).toString()
|
|
8
|
-
|
|
9
|
-
// Extract valid JSON from any extra noise
|
|
10
|
-
const jsonStart = channelsData.indexOf('[')
|
|
11
|
-
const jsonEnd = channelsData.lastIndexOf(']') + 1
|
|
12
|
-
|
|
13
|
-
if (jsonStart === -1 || jsonEnd === -1) {
|
|
14
|
-
throw new Error('JSON output not found in command output')
|
|
15
|
-
}
|
|
16
|
-
|
|
17
|
-
const jsonSlice = channelsData.slice(jsonStart, jsonEnd)
|
|
18
|
-
const channels = JSON.parse(jsonSlice)
|
|
19
|
-
return channels
|
|
20
|
-
}
|
|
21
|
-
|
|
22
|
-
export function getEASBranches() {
|
|
23
|
-
const channelsData = execSync(
|
|
24
|
-
'yarn eas branch:list --non-interactive --json',
|
|
25
|
-
{ stdio: ['pipe', 'pipe', 'ignore'] }, // Ignore stderr
|
|
26
|
-
).toString()
|
|
27
|
-
|
|
28
|
-
// Extract valid JSON from any extra noise
|
|
29
|
-
const jsonStart = channelsData.indexOf('[')
|
|
30
|
-
const jsonEnd = channelsData.lastIndexOf(']') + 1
|
|
31
|
-
|
|
32
|
-
if (jsonStart === -1 || jsonEnd === -1) {
|
|
33
|
-
throw new Error('JSON output not found in command output')
|
|
34
|
-
}
|
|
35
|
-
|
|
36
|
-
const jsonSlice = channelsData.slice(jsonStart, jsonEnd)
|
|
37
|
-
const channels = JSON.parse(jsonSlice)
|
|
38
|
-
return channels
|
|
39
|
-
}
|
|
1
|
+
import { execSync } from 'child_process'
|
|
2
|
+
|
|
3
|
+
export function getEASChannels() {
|
|
4
|
+
const channelsData = execSync(
|
|
5
|
+
'yarn eas channel:list --non-interactive --json',
|
|
6
|
+
{ stdio: ['pipe', 'pipe', 'ignore'] }, // Ignore stderr
|
|
7
|
+
).toString()
|
|
8
|
+
|
|
9
|
+
// Extract valid JSON from any extra noise
|
|
10
|
+
const jsonStart = channelsData.indexOf('[')
|
|
11
|
+
const jsonEnd = channelsData.lastIndexOf(']') + 1
|
|
12
|
+
|
|
13
|
+
if (jsonStart === -1 || jsonEnd === -1) {
|
|
14
|
+
throw new Error('JSON output not found in command output')
|
|
15
|
+
}
|
|
16
|
+
|
|
17
|
+
const jsonSlice = channelsData.slice(jsonStart, jsonEnd)
|
|
18
|
+
const channels = JSON.parse(jsonSlice)
|
|
19
|
+
return channels
|
|
20
|
+
}
|
|
21
|
+
|
|
22
|
+
export function getEASBranches() {
|
|
23
|
+
const channelsData = execSync(
|
|
24
|
+
'yarn eas branch:list --non-interactive --json',
|
|
25
|
+
{ stdio: ['pipe', 'pipe', 'ignore'] }, // Ignore stderr
|
|
26
|
+
).toString()
|
|
27
|
+
|
|
28
|
+
// Extract valid JSON from any extra noise
|
|
29
|
+
const jsonStart = channelsData.indexOf('[')
|
|
30
|
+
const jsonEnd = channelsData.lastIndexOf(']') + 1
|
|
31
|
+
|
|
32
|
+
if (jsonStart === -1 || jsonEnd === -1) {
|
|
33
|
+
throw new Error('JSON output not found in command output')
|
|
34
|
+
}
|
|
35
|
+
|
|
36
|
+
const jsonSlice = channelsData.slice(jsonStart, jsonEnd)
|
|
37
|
+
const channels = JSON.parse(jsonSlice)
|
|
38
|
+
return channels
|
|
39
|
+
}
|
|
@@ -1,87 +1,87 @@
|
|
|
1
|
-
import { spawn } from 'child_process';
|
|
2
|
-
import inquirer from 'inquirer';
|
|
3
|
-
|
|
4
|
-
import { program } from '../../app.js';
|
|
5
|
-
import { STAGING_URL } from '../../config.js';
|
|
6
|
-
import { getEASBranches } from './eas.js';
|
|
7
|
-
|
|
8
|
-
const NEXT_PUBLIC_API_URL = STAGING_URL;
|
|
9
|
-
const NEXT_FORCE_PROD = true;
|
|
10
|
-
const EXPO_BUILD_PROFILE = 'production';
|
|
11
|
-
|
|
12
|
-
program
|
|
13
|
-
.command('update')
|
|
14
|
-
.description('Prune local branches that are not on origin')
|
|
15
|
-
.option('--auto', 'Auto run')
|
|
16
|
-
.action(async (str) => {
|
|
17
|
-
//
|
|
18
|
-
|
|
19
|
-
const { auto } = str;
|
|
20
|
-
|
|
21
|
-
if (auto) {
|
|
22
|
-
const fastChild = spawn(`yarn eas update --auto`, {
|
|
23
|
-
stdio: ['inherit', 'pipe', 'pipe'], // Read from terminal, but capture output
|
|
24
|
-
shell: true,
|
|
25
|
-
env: {
|
|
26
|
-
...process.env,
|
|
27
|
-
NEXT_FORCE_PROD,
|
|
28
|
-
NEXT_PUBLIC_API_URL,
|
|
29
|
-
EXPO_BUILD_PROFILE,
|
|
30
|
-
},
|
|
31
|
-
});
|
|
32
|
-
fastChild.stdout.on('data', (data) => {
|
|
33
|
-
process.stdout.write(data); // pipe to main stdout
|
|
34
|
-
});
|
|
35
|
-
|
|
36
|
-
fastChild.stderr.on('data', (data) => {
|
|
37
|
-
process.stderr.write(data); // pipe errors
|
|
38
|
-
});
|
|
39
|
-
fastChild.on('message', (data) => {});
|
|
40
|
-
return;
|
|
41
|
-
}
|
|
42
|
-
|
|
43
|
-
const branches = getEASBranches().map((branch) => branch.name);
|
|
44
|
-
|
|
45
|
-
const { branch } = await inquirer.prompt([
|
|
46
|
-
{
|
|
47
|
-
type: 'list',
|
|
48
|
-
name: 'branch',
|
|
49
|
-
message: 'Select branch to update',
|
|
50
|
-
choices: Object.keys(branches).map((key) => ({
|
|
51
|
-
name: branches[key],
|
|
52
|
-
value: branches[key],
|
|
53
|
-
})),
|
|
54
|
-
default: Object.keys(branches).map((key) => branches[key]),
|
|
55
|
-
},
|
|
56
|
-
]);
|
|
57
|
-
|
|
58
|
-
const { message } = await inquirer.prompt([
|
|
59
|
-
{
|
|
60
|
-
type: 'input',
|
|
61
|
-
name: 'message',
|
|
62
|
-
message: 'Enter a message for the update:',
|
|
63
|
-
default: 'No message provided', // Optional default
|
|
64
|
-
validate: (input) => input.trim() !== '' || 'Message cannot be empty.',
|
|
65
|
-
},
|
|
66
|
-
]);
|
|
67
|
-
const command = `yarn eas update --branch ${branch} --message "${message}"`;
|
|
68
|
-
const child = spawn(`${command} --non-interactive`, {
|
|
69
|
-
stdio: ['inherit', 'pipe', 'pipe'], // Read from terminal, but capture output
|
|
70
|
-
shell: true,
|
|
71
|
-
env: {
|
|
72
|
-
...process.env,
|
|
73
|
-
NEXT_FORCE_PROD,
|
|
74
|
-
NEXT_PUBLIC_API_URL,
|
|
75
|
-
EXPO_BUILD_PROFILE,
|
|
76
|
-
},
|
|
77
|
-
});
|
|
78
|
-
|
|
79
|
-
child.stdout.on('data', (data) => {
|
|
80
|
-
process.stdout.write(data); // pipe to main stdout
|
|
81
|
-
});
|
|
82
|
-
|
|
83
|
-
child.stderr.on('data', (data) => {
|
|
84
|
-
process.stderr.write(data); // pipe errors
|
|
85
|
-
});
|
|
86
|
-
child.on('message', (data) => {});
|
|
87
|
-
});
|
|
1
|
+
import { spawn } from 'child_process';
|
|
2
|
+
import inquirer from 'inquirer';
|
|
3
|
+
|
|
4
|
+
import { program } from '../../app.js';
|
|
5
|
+
import { STAGING_URL } from '../../config.js';
|
|
6
|
+
import { getEASBranches } from './eas.js';
|
|
7
|
+
|
|
8
|
+
const NEXT_PUBLIC_API_URL = STAGING_URL;
|
|
9
|
+
const NEXT_FORCE_PROD = true;
|
|
10
|
+
const EXPO_BUILD_PROFILE = 'production';
|
|
11
|
+
|
|
12
|
+
program
|
|
13
|
+
.command('update')
|
|
14
|
+
.description('Prune local branches that are not on origin')
|
|
15
|
+
.option('--auto', 'Auto run')
|
|
16
|
+
.action(async (str) => {
|
|
17
|
+
//
|
|
18
|
+
|
|
19
|
+
const { auto } = str;
|
|
20
|
+
|
|
21
|
+
if (auto) {
|
|
22
|
+
const fastChild = spawn(`yarn eas update --auto`, {
|
|
23
|
+
stdio: ['inherit', 'pipe', 'pipe'], // Read from terminal, but capture output
|
|
24
|
+
shell: true,
|
|
25
|
+
env: {
|
|
26
|
+
...process.env,
|
|
27
|
+
NEXT_FORCE_PROD,
|
|
28
|
+
NEXT_PUBLIC_API_URL,
|
|
29
|
+
EXPO_BUILD_PROFILE,
|
|
30
|
+
},
|
|
31
|
+
});
|
|
32
|
+
fastChild.stdout.on('data', (data) => {
|
|
33
|
+
process.stdout.write(data); // pipe to main stdout
|
|
34
|
+
});
|
|
35
|
+
|
|
36
|
+
fastChild.stderr.on('data', (data) => {
|
|
37
|
+
process.stderr.write(data); // pipe errors
|
|
38
|
+
});
|
|
39
|
+
fastChild.on('message', (data) => {});
|
|
40
|
+
return;
|
|
41
|
+
}
|
|
42
|
+
|
|
43
|
+
const branches = getEASBranches().map((branch) => branch.name);
|
|
44
|
+
|
|
45
|
+
const { branch } = await inquirer.prompt([
|
|
46
|
+
{
|
|
47
|
+
type: 'list',
|
|
48
|
+
name: 'branch',
|
|
49
|
+
message: 'Select branch to update',
|
|
50
|
+
choices: Object.keys(branches).map((key) => ({
|
|
51
|
+
name: branches[key],
|
|
52
|
+
value: branches[key],
|
|
53
|
+
})),
|
|
54
|
+
default: Object.keys(branches).map((key) => branches[key]),
|
|
55
|
+
},
|
|
56
|
+
]);
|
|
57
|
+
|
|
58
|
+
const { message } = await inquirer.prompt([
|
|
59
|
+
{
|
|
60
|
+
type: 'input',
|
|
61
|
+
name: 'message',
|
|
62
|
+
message: 'Enter a message for the update:',
|
|
63
|
+
default: 'No message provided', // Optional default
|
|
64
|
+
validate: (input) => input.trim() !== '' || 'Message cannot be empty.',
|
|
65
|
+
},
|
|
66
|
+
]);
|
|
67
|
+
const command = `yarn eas update --branch ${branch} --message "${message}"`;
|
|
68
|
+
const child = spawn(`${command} --non-interactive`, {
|
|
69
|
+
stdio: ['inherit', 'pipe', 'pipe'], // Read from terminal, but capture output
|
|
70
|
+
shell: true,
|
|
71
|
+
env: {
|
|
72
|
+
...process.env,
|
|
73
|
+
NEXT_FORCE_PROD,
|
|
74
|
+
NEXT_PUBLIC_API_URL,
|
|
75
|
+
EXPO_BUILD_PROFILE,
|
|
76
|
+
},
|
|
77
|
+
});
|
|
78
|
+
|
|
79
|
+
child.stdout.on('data', (data) => {
|
|
80
|
+
process.stdout.write(data); // pipe to main stdout
|
|
81
|
+
});
|
|
82
|
+
|
|
83
|
+
child.stderr.on('data', (data) => {
|
|
84
|
+
process.stderr.write(data); // pipe errors
|
|
85
|
+
});
|
|
86
|
+
child.on('message', (data) => {});
|
|
87
|
+
});
|
package/lib/config.js
CHANGED
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
//export const STAGE_URL = 'https://77df2h2jif6xrvk6eexwrlkeke0xibqc.lambda-url.us-east-2.on.aws/'
|
|
2
|
-
export const STAGE_URL = 'https://p2po52rai264bk62bec2cmtzdi0rqhnw.lambda-url.us-east-2.on.aws/'
|
|
3
|
-
|
|
4
|
-
export const STAGING_URL = process.env.STAGE_URL || STAGE_URL
|
|
1
|
+
//export const STAGE_URL = 'https://77df2h2jif6xrvk6eexwrlkeke0xibqc.lambda-url.us-east-2.on.aws/'
|
|
2
|
+
export const STAGE_URL = 'https://p2po52rai264bk62bec2cmtzdi0rqhnw.lambda-url.us-east-2.on.aws/'
|
|
3
|
+
|
|
4
|
+
export const STAGING_URL = process.env.STAGE_URL || STAGE_URL
|
package/lib/index.js
CHANGED
|
@@ -1,57 +1,57 @@
|
|
|
1
|
-
import 'dotenv/config';
|
|
2
|
-
import spawn from 'cross-spawn';
|
|
3
|
-
|
|
4
|
-
import { program } from './app.js';
|
|
5
|
-
import './commands/generate/index.js';
|
|
6
|
-
import './commands/prune/index.js';
|
|
7
|
-
import './commands/seed/index.js';
|
|
8
|
-
import './commands/submit/index.js';
|
|
9
|
-
import './commands/update/index.js';
|
|
10
|
-
import './commands/build-process/index.js';
|
|
11
|
-
import { getHasteConfig } from './commands/loadFromRoot.js';
|
|
12
|
-
import { executeCommandsIfWorkspaceAction } from './commands/build-process/test.js';
|
|
13
|
-
import os from 'node:os';
|
|
14
|
-
import path from 'node:path';
|
|
15
|
-
const homeBin = path.join(os.homedir(), 'bin');
|
|
16
|
-
const PATH = [homeBin, process.env.PATH].filter(Boolean).join(path.delimiter);
|
|
17
|
-
|
|
18
|
-
const { config } = getHasteConfig();
|
|
19
|
-
|
|
20
|
-
const workspacemap = config.workspace?.packageMaps || {};
|
|
21
|
-
const preactions = config.workspace?.preactions || [];
|
|
22
|
-
const envMapList = config.envMap ?? ['FAILURE'];
|
|
23
|
-
|
|
24
|
-
program.on('command:*', (operands) => {
|
|
25
|
-
const [cmd] = operands; // e.g. "destroy3"
|
|
26
|
-
const raw = program.rawArgs.slice(2); // after `node script.js`
|
|
27
|
-
const i = raw.indexOf(cmd);
|
|
28
|
-
const tokens = i >= 0 ? raw.slice(i) : operands;
|
|
29
|
-
|
|
30
|
-
const workspace = workspacemap[tokens[0]] || tokens[0];
|
|
31
|
-
let rest = tokens.slice(1);
|
|
32
|
-
|
|
33
|
-
const envKeys = Object.keys(process.env).filter((k) => k.startsWith('MONO_'));
|
|
34
|
-
|
|
35
|
-
let envObj = {};
|
|
36
|
-
|
|
37
|
-
envKeys.map((k) => {
|
|
38
|
-
envMapList.map((item) => {
|
|
39
|
-
envObj[k.replace('MONO', item)] = process.env[k];
|
|
40
|
-
});
|
|
41
|
-
});
|
|
42
|
-
|
|
43
|
-
const args = ['workspace', workspace, ...rest];
|
|
44
|
-
|
|
45
|
-
console.error(`Unknown command. Falling back to: yarn ${args.join(' ')}`);
|
|
46
|
-
executeCommandsIfWorkspaceAction(args, preactions, envObj);
|
|
47
|
-
const child = spawn('yarn', args, {
|
|
48
|
-
stdio: 'inherit',
|
|
49
|
-
shell: process.platform === 'win32',
|
|
50
|
-
env: { ...process.env, ...envObj, PATH },
|
|
51
|
-
});
|
|
52
|
-
child.on('exit', (code) => {
|
|
53
|
-
console.log('Child process exited with code:', code);
|
|
54
|
-
process.exitCode = code ?? 1;
|
|
55
|
-
});
|
|
56
|
-
});
|
|
57
|
-
program.parse();
|
|
1
|
+
import 'dotenv/config';
|
|
2
|
+
import spawn from 'cross-spawn';
|
|
3
|
+
|
|
4
|
+
import { program } from './app.js';
|
|
5
|
+
import './commands/generate/index.js';
|
|
6
|
+
import './commands/prune/index.js';
|
|
7
|
+
import './commands/seed/index.js';
|
|
8
|
+
import './commands/submit/index.js';
|
|
9
|
+
import './commands/update/index.js';
|
|
10
|
+
import './commands/build-process/index.js';
|
|
11
|
+
import { getHasteConfig } from './commands/loadFromRoot.js';
|
|
12
|
+
import { executeCommandsIfWorkspaceAction } from './commands/build-process/test.js';
|
|
13
|
+
import os from 'node:os';
|
|
14
|
+
import path from 'node:path';
|
|
15
|
+
const homeBin = path.join(os.homedir(), 'bin');
|
|
16
|
+
const PATH = [homeBin, process.env.PATH].filter(Boolean).join(path.delimiter);
|
|
17
|
+
|
|
18
|
+
const { config } = getHasteConfig();
|
|
19
|
+
|
|
20
|
+
const workspacemap = config.workspace?.packageMaps || {};
|
|
21
|
+
const preactions = config.workspace?.preactions || [];
|
|
22
|
+
const envMapList = config.envMap ?? ['FAILURE'];
|
|
23
|
+
|
|
24
|
+
program.on('command:*', (operands) => {
|
|
25
|
+
const [cmd] = operands; // e.g. "destroy3"
|
|
26
|
+
const raw = program.rawArgs.slice(2); // after `node script.js`
|
|
27
|
+
const i = raw.indexOf(cmd);
|
|
28
|
+
const tokens = i >= 0 ? raw.slice(i) : operands;
|
|
29
|
+
|
|
30
|
+
const workspace = workspacemap[tokens[0]] || tokens[0];
|
|
31
|
+
let rest = tokens.slice(1);
|
|
32
|
+
|
|
33
|
+
const envKeys = Object.keys(process.env).filter((k) => k.startsWith('MONO_'));
|
|
34
|
+
|
|
35
|
+
let envObj = {};
|
|
36
|
+
|
|
37
|
+
envKeys.map((k) => {
|
|
38
|
+
envMapList.map((item) => {
|
|
39
|
+
envObj[k.replace('MONO', item)] = process.env[k];
|
|
40
|
+
});
|
|
41
|
+
});
|
|
42
|
+
|
|
43
|
+
const args = ['workspace', workspace, ...rest];
|
|
44
|
+
|
|
45
|
+
console.error(`Unknown command. Falling back to: yarn ${args.join(' ')}`);
|
|
46
|
+
executeCommandsIfWorkspaceAction(args, preactions, envObj);
|
|
47
|
+
const child = spawn('yarn', args, {
|
|
48
|
+
stdio: 'inherit',
|
|
49
|
+
shell: process.platform === 'win32',
|
|
50
|
+
env: { ...process.env, ...envObj, PATH },
|
|
51
|
+
});
|
|
52
|
+
child.on('exit', (code) => {
|
|
53
|
+
console.log('Child process exited with code:', code);
|
|
54
|
+
process.exitCode = code ?? 1;
|
|
55
|
+
});
|
|
56
|
+
});
|
|
57
|
+
program.parse();
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@mono-labs/cli",
|
|
3
|
-
"version": "0.0.
|
|
3
|
+
"version": "0.0.170",
|
|
4
4
|
"description": "A CLI tool for building and deploying projects",
|
|
5
5
|
"type": "module",
|
|
6
6
|
"main": "index.js",
|
|
@@ -55,6 +55,5 @@
|
|
|
55
55
|
"lib/",
|
|
56
56
|
"types.d.ts",
|
|
57
57
|
"README.md"
|
|
58
|
-
]
|
|
59
|
-
"packageManager": "yarn@4.5.0"
|
|
58
|
+
]
|
|
60
59
|
}
|