@mono-labs/cli 0.0.167 → 0.0.172

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/lib/app.js CHANGED
@@ -1,49 +1,49 @@
1
- import { Command } from 'commander';
2
-
3
- import { STAGING_URL } from './config.js';
4
-
5
- import fs from 'node:fs';
6
- import { fileURLToPath } from 'node:url';
7
- import { dirname, join } from 'node:path';
8
-
9
- const __filename = fileURLToPath(import.meta.url);
10
- const __dirname = dirname(__filename);
11
-
12
- const pkgPath = join(__dirname, '../', 'package.json');
13
- const pkg = JSON.parse(fs.readFileSync(pkgPath, 'utf8'));
14
-
15
- const version = pkg.version || '0.0.1';
16
- export const program = new Command();
17
-
18
- const getBinFromPackageJSON = () => {
19
- const keyList = Object.keys(pkg.bin);
20
- if (keyList.length === 0) {
21
- throw new Error('No bin field found in package.json');
22
- }
23
- return keyList[0];
24
- };
25
-
26
- const programName = getBinFromPackageJSON();
27
-
28
- program
29
- .name(programName)
30
- .description(pkg.description || '')
31
- .version(version);
32
- const NEXT_PUBLIC_API_URL =
33
- (process.env.NEXT_PUBLIC_API_URL &&
34
- process.env.NEXT_PUBLIC_API_URL.length > 0) ||
35
- STAGING_URL;
36
-
37
- export const generateEnvValues = (
38
- forceProd = false,
39
- ngrokUrl = 'localhost:3000',
40
- useAtlas = false
41
- ) => {
42
- return {
43
- ...process.env,
44
- NEXT_PUBLIC_API_URL,
45
- NEXT_FORCE_PROD: forceProd,
46
- EXPO_PRIVATE_API_URL: ngrokUrl,
47
- EXPO_UNSTABLE_ATLAS: useAtlas,
48
- };
49
- };
1
+ import { Command } from 'commander';
2
+
3
+ import { STAGING_URL } from './config.js';
4
+
5
+ import fs from 'node:fs';
6
+ import { fileURLToPath } from 'node:url';
7
+ import { dirname, join } from 'node:path';
8
+
9
+ const __filename = fileURLToPath(import.meta.url);
10
+ const __dirname = dirname(__filename);
11
+
12
+ const pkgPath = join(__dirname, '../', 'package.json');
13
+ const pkg = JSON.parse(fs.readFileSync(pkgPath, 'utf8'));
14
+
15
+ const version = pkg.version || '0.0.1';
16
+ export const program = new Command();
17
+
18
+ const getBinFromPackageJSON = () => {
19
+ const keyList = Object.keys(pkg.bin);
20
+ if (keyList.length === 0) {
21
+ throw new Error('No bin field found in package.json');
22
+ }
23
+ return keyList[0];
24
+ };
25
+
26
+ const programName = getBinFromPackageJSON();
27
+
28
+ program
29
+ .name(programName)
30
+ .description(pkg.description || '')
31
+ .version(version);
32
+ const NEXT_PUBLIC_API_URL =
33
+ (process.env.NEXT_PUBLIC_API_URL &&
34
+ process.env.NEXT_PUBLIC_API_URL.length > 0) ||
35
+ STAGING_URL;
36
+
37
+ export const generateEnvValues = (
38
+ forceProd = false,
39
+ ngrokUrl = 'localhost:3000',
40
+ useAtlas = false
41
+ ) => {
42
+ return {
43
+ ...process.env,
44
+ NEXT_PUBLIC_API_URL,
45
+ NEXT_FORCE_PROD: forceProd,
46
+ EXPO_PRIVATE_API_URL: ngrokUrl,
47
+ EXPO_UNSTABLE_ATLAS: useAtlas,
48
+ };
49
+ };
@@ -50,7 +50,7 @@ export function buildCommands(files) {
50
50
 
51
51
  let current = program
52
52
  .command(commandName)
53
- .description(configObject.description || 'Haste command');
53
+ .description(configObject.description || 'Mono command');
54
54
  const argInfo = configObject.argument;
55
55
  // Argument
56
56
  if (argInfo) {
@@ -66,7 +66,7 @@ export async function runHasteCommand(configObject, options = {}) {
66
66
  const actions = configObject.actions ?? [];
67
67
 
68
68
  console.log(
69
- `→ Executing haste command: ${configObject.name || 'Unnamed Command'}`
69
+ `→ Executing mono command: ${configObject.name || 'Unnamed Command'}`
70
70
  );
71
71
  console.log(`→ Using AWS profile: ${awsProfile}`);
72
72
  console.log(`→ Using environment: ${options.stage ? 'stage' : 'dev'}`);
@@ -1,223 +1,223 @@
1
- import fs from 'fs';
2
- // Initialize the DynamoDB client
3
-
4
- import { readFileSync } from 'fs';
5
- import path from 'path';
6
-
7
- import { DynamoDBClient, ScanCommand } from '@aws-sdk/client-dynamodb';
8
- import { marshall, unmarshall } from '@aws-sdk/util-dynamodb';
9
-
10
- import { join } from 'node:path';
11
- const packageJSON = JSON.parse(
12
- readFileSync(join(process.cwd(), 'package.json'), 'utf8')
13
- );
14
-
15
- const awsObject = packageJSON['aws'] || {};
16
- const projectName = packageJSON['name'] || 'project';
17
-
18
- const awsProfile = awsObject['profile'] || 'default';
19
-
20
- // TODO: Fix Copy Issues
21
- const dirPath = './docker/seed'; // Folder path to delete files from
22
-
23
- // Function to delete all files in the specified directory (ignores directories)
24
- function deleteFilesInDir(dir) {
25
- // Read all files and directories inside the directory
26
- const files = fs.readdirSync(dir);
27
-
28
- // Loop through each file and directory
29
- files.forEach((file) => {
30
- const filePath = path.join(dir, file); // Get full path of the file or directory
31
-
32
- // Check if it's a file (not a directory)
33
- const stats = fs.statSync(filePath);
34
-
35
- if (stats.isFile()) {
36
- // If it's a file, delete it
37
- fs.unlinkSync(filePath);
38
- }
39
- });
40
- }
41
-
42
- // Function to scan the DynamoDB table and generate the desired JSON format
43
- async function generateTableExport(tablename, client, profilesOnly = false) {
44
- let params = {
45
- TableName: tablename,
46
- };
47
-
48
- // This will hold all the data retrieved from DynamoDB
49
- let allItems = [];
50
- let lastEvaluatedKey = null;
51
-
52
- // If there are more items (pagination in case of large tables)
53
- do {
54
- if (lastEvaluatedKey) {
55
- params.ExclusiveStartKey = lastEvaluatedKey;
56
- }
57
-
58
- try {
59
- // Perform the scan operation
60
-
61
- const data = await client.send(new ScanCommand(params));
62
- allItems = allItems.concat(data.Items);
63
- lastEvaluatedKey = data.LastEvaluatedKey; // Set the last evaluated key for pagination
64
- } catch (error) {
65
- console.error('Error scanning DynamoDB table:', error);
66
- return;
67
- }
68
- } while (lastEvaluatedKey); // Continue scanning if there are more pages of results
69
-
70
- // Format the data into the desired JSON structure
71
- //
72
- const formattedData = {
73
- [tablename]: allItems
74
- .filter(
75
- (item) =>
76
- !profilesOnly ||
77
- !tablename.includes('Database') ||
78
- unmarshall(item)['SK'].includes('PROFILE')
79
- )
80
- .map((item) => {
81
- const formattedItem = unmarshall(item); // Unmarshall DynamoDB format to JS object
82
- // Ensure the correct format: PutRequest -> Item
83
- //if (tablename.includes('Database') && !formattedItem['SK'].includes('USER')) return undefined;
84
- return {
85
- PutRequest: {
86
- Item: marshall(formattedItem), // Marshall JS object back to DynamoDB format
87
- },
88
- };
89
- }),
90
- };
91
- return formattedData;
92
- }
93
- async function exportDynamoTable(
94
- tables,
95
- client,
96
- dbRewrites,
97
- profilesOnly = false,
98
- strOut = './docker/seed'
99
- ) {
100
- deleteFilesInDir(dirPath);
101
- let output = await Promise.all(
102
- tables.map(
103
- async (tableName) =>
104
- await generateTableExport(tableName, client, profilesOnly)
105
- )
106
- );
107
- const fileName = `${strOut}/dynamodb-seed`;
108
-
109
- const outputRes = {};
110
- output.map((item) => {
111
- const keys = Object.keys(item);
112
-
113
- return keys.map((key) => {
114
- const value = item[key].filter((item) => item !== undefined);
115
- outputRes[key] = value;
116
- return { value };
117
- });
118
- });
119
-
120
- output = outputRes;
121
-
122
- const fileObject = {};
123
- const dbObject = {};
124
- Object.keys(output).forEach((key) => {
125
- const value = output[key];
126
-
127
- if (value.length > 0) {
128
- const dbKey = dbRewrites[key] || key;
129
-
130
- dbObject[dbKey] = value;
131
- }
132
- });
133
-
134
- let countTotal = 0;
135
-
136
- Object.keys(dbObject).forEach((key) => {
137
- let currentPosition = 0;
138
- const numOfItems = 20;
139
- const putItems = dbObject[key];
140
- while (currentPosition < putItems.length) {
141
- if (dbObject[key].length > numOfItems) {
142
- const result = putItems.slice(
143
- currentPosition,
144
- currentPosition + numOfItems
145
- );
146
- fileObject[`${fileName}-${countTotal}`] = { [key]: result };
147
- currentPosition += numOfItems;
148
- countTotal += 1;
149
- } else {
150
- const result = putItems.slice(currentPosition, putItems.length);
151
- fileObject[`${fileName}-${countTotal}`] = { [key]: result };
152
- currentPosition += numOfItems;
153
- countTotal += 1;
154
- }
155
- }
156
- });
157
-
158
- Object.keys(fileObject).forEach((key) => {
159
- fs.writeFileSync(`${key}.json`, JSON.stringify(fileObject[key], null, 2));
160
- });
161
- }
162
- export function createDirIfNotExists(dirname) {
163
- if (!fs.existsSync(dirname)) {
164
- fs.mkdirSync(dirname);
165
- }
166
- }
167
-
168
- // Run the function
169
-
170
- export function exportTable(
171
- newTables,
172
- owner,
173
- altOwner = 'dev',
174
- rewriteDb,
175
- live = false,
176
- region = 'us-east-2',
177
- profilesOnly = false
178
- ) {
179
- createDirIfNotExists(dirPath);
180
- const tables = live ? ['MainDatabase'] : ['MainDB'];
181
- const dbRewrites = {};
182
- const dbOg = {};
183
- tables.map((table, index) => (dbOg[table] = newTables[index] || ''));
184
- tables.map((table, index) => {
185
- const rewriteDbIndex = rewriteDb[index];
186
- if (rewriteDbIndex === 'MainDB') {
187
- dbRewrites[`${projectName}-infra-${table}-${owner}`] =
188
- `${rewriteDbIndex || table}`;
189
- } else {
190
- const newTable = tables[index].replace(
191
- tables[index],
192
- newTables[index] || tables[index]
193
- );
194
- dbRewrites[`${projectName}-infra-${table}-${owner}`] =
195
- `${projectName}-infra-${newTable || table}-${altOwner || owner}`;
196
- }
197
- });
198
-
199
- let dbTables = ['MainDB'];
200
-
201
- if (live) {
202
- dbTables = tables.map((table) => {
203
- return `${projectName}-infra-${table}-${owner}`;
204
- });
205
- }
206
-
207
- let client = undefined;
208
- if (live) {
209
- client = new DynamoDBClient({
210
- region: region, // Replace with your AWS region
211
- });
212
- } else {
213
- client = new DynamoDBClient({
214
- region: region, // Replace with your AWS region
215
- endpoint: 'http://localhost:8000', // The default local DynamoDB endpoint
216
- credentials: {
217
- accessKeyId: 'fakeAccessKeyId', // Use fake credentials for local DynamoDB
218
- secretAccessKey: 'fakeSecretAccessKey',
219
- },
220
- });
221
- }
222
- exportDynamoTable(dbTables, client, dbRewrites, profilesOnly);
223
- }
1
+ import fs from 'fs';
2
+ // Initialize the DynamoDB client
3
+
4
+ import { readFileSync } from 'fs';
5
+ import path from 'path';
6
+
7
+ import { DynamoDBClient, ScanCommand } from '@aws-sdk/client-dynamodb';
8
+ import { marshall, unmarshall } from '@aws-sdk/util-dynamodb';
9
+
10
+ import { join } from 'node:path';
11
+ const packageJSON = JSON.parse(
12
+ readFileSync(join(process.cwd(), 'package.json'), 'utf8')
13
+ );
14
+
15
+ const awsObject = packageJSON['aws'] || {};
16
+ const projectName = packageJSON['name'] || 'project';
17
+
18
+ const awsProfile = awsObject['profile'] || 'default';
19
+
20
+ // TODO: Fix Copy Issues
21
+ const dirPath = './docker/seed'; // Folder path to delete files from
22
+
23
+ // Function to delete all files in the specified directory (ignores directories)
24
+ function deleteFilesInDir(dir) {
25
+ // Read all files and directories inside the directory
26
+ const files = fs.readdirSync(dir);
27
+
28
+ // Loop through each file and directory
29
+ files.forEach((file) => {
30
+ const filePath = path.join(dir, file); // Get full path of the file or directory
31
+
32
+ // Check if it's a file (not a directory)
33
+ const stats = fs.statSync(filePath);
34
+
35
+ if (stats.isFile()) {
36
+ // If it's a file, delete it
37
+ fs.unlinkSync(filePath);
38
+ }
39
+ });
40
+ }
41
+
42
+ // Function to scan the DynamoDB table and generate the desired JSON format
43
+ async function generateTableExport(tablename, client, profilesOnly = false) {
44
+ let params = {
45
+ TableName: tablename,
46
+ };
47
+
48
+ // This will hold all the data retrieved from DynamoDB
49
+ let allItems = [];
50
+ let lastEvaluatedKey = null;
51
+
52
+ // If there are more items (pagination in case of large tables)
53
+ do {
54
+ if (lastEvaluatedKey) {
55
+ params.ExclusiveStartKey = lastEvaluatedKey;
56
+ }
57
+
58
+ try {
59
+ // Perform the scan operation
60
+
61
+ const data = await client.send(new ScanCommand(params));
62
+ allItems = allItems.concat(data.Items);
63
+ lastEvaluatedKey = data.LastEvaluatedKey; // Set the last evaluated key for pagination
64
+ } catch (error) {
65
+ console.error('Error scanning DynamoDB table:', error);
66
+ return;
67
+ }
68
+ } while (lastEvaluatedKey); // Continue scanning if there are more pages of results
69
+
70
+ // Format the data into the desired JSON structure
71
+ //
72
+ const formattedData = {
73
+ [tablename]: allItems
74
+ .filter(
75
+ (item) =>
76
+ !profilesOnly ||
77
+ !tablename.includes('Database') ||
78
+ unmarshall(item)['SK'].includes('PROFILE')
79
+ )
80
+ .map((item) => {
81
+ const formattedItem = unmarshall(item); // Unmarshall DynamoDB format to JS object
82
+ // Ensure the correct format: PutRequest -> Item
83
+ //if (tablename.includes('Database') && !formattedItem['SK'].includes('USER')) return undefined;
84
+ return {
85
+ PutRequest: {
86
+ Item: marshall(formattedItem), // Marshall JS object back to DynamoDB format
87
+ },
88
+ };
89
+ }),
90
+ };
91
+ return formattedData;
92
+ }
93
+ async function exportDynamoTable(
94
+ tables,
95
+ client,
96
+ dbRewrites,
97
+ profilesOnly = false,
98
+ strOut = './docker/seed'
99
+ ) {
100
+ deleteFilesInDir(dirPath);
101
+ let output = await Promise.all(
102
+ tables.map(
103
+ async (tableName) =>
104
+ await generateTableExport(tableName, client, profilesOnly)
105
+ )
106
+ );
107
+ const fileName = `${strOut}/dynamodb-seed`;
108
+
109
+ const outputRes = {};
110
+ output.map((item) => {
111
+ const keys = Object.keys(item);
112
+
113
+ return keys.map((key) => {
114
+ const value = item[key].filter((item) => item !== undefined);
115
+ outputRes[key] = value;
116
+ return { value };
117
+ });
118
+ });
119
+
120
+ output = outputRes;
121
+
122
+ const fileObject = {};
123
+ const dbObject = {};
124
+ Object.keys(output).forEach((key) => {
125
+ const value = output[key];
126
+
127
+ if (value.length > 0) {
128
+ const dbKey = dbRewrites[key] || key;
129
+
130
+ dbObject[dbKey] = value;
131
+ }
132
+ });
133
+
134
+ let countTotal = 0;
135
+
136
+ Object.keys(dbObject).forEach((key) => {
137
+ let currentPosition = 0;
138
+ const numOfItems = 20;
139
+ const putItems = dbObject[key];
140
+ while (currentPosition < putItems.length) {
141
+ if (dbObject[key].length > numOfItems) {
142
+ const result = putItems.slice(
143
+ currentPosition,
144
+ currentPosition + numOfItems
145
+ );
146
+ fileObject[`${fileName}-${countTotal}`] = { [key]: result };
147
+ currentPosition += numOfItems;
148
+ countTotal += 1;
149
+ } else {
150
+ const result = putItems.slice(currentPosition, putItems.length);
151
+ fileObject[`${fileName}-${countTotal}`] = { [key]: result };
152
+ currentPosition += numOfItems;
153
+ countTotal += 1;
154
+ }
155
+ }
156
+ });
157
+
158
+ Object.keys(fileObject).forEach((key) => {
159
+ fs.writeFileSync(`${key}.json`, JSON.stringify(fileObject[key], null, 2));
160
+ });
161
+ }
162
+ export function createDirIfNotExists(dirname) {
163
+ if (!fs.existsSync(dirname)) {
164
+ fs.mkdirSync(dirname);
165
+ }
166
+ }
167
+
168
+ // Run the function
169
+
170
+ export function exportTable(
171
+ newTables,
172
+ owner,
173
+ altOwner = 'dev',
174
+ rewriteDb,
175
+ live = false,
176
+ region = 'us-east-2',
177
+ profilesOnly = false
178
+ ) {
179
+ createDirIfNotExists(dirPath);
180
+ const tables = live ? ['MainDatabase'] : ['MainDB'];
181
+ const dbRewrites = {};
182
+ const dbOg = {};
183
+ tables.map((table, index) => (dbOg[table] = newTables[index] || ''));
184
+ tables.map((table, index) => {
185
+ const rewriteDbIndex = rewriteDb[index];
186
+ if (rewriteDbIndex === 'MainDB') {
187
+ dbRewrites[`${projectName}-infra-${table}-${owner}`] =
188
+ `${rewriteDbIndex || table}`;
189
+ } else {
190
+ const newTable = tables[index].replace(
191
+ tables[index],
192
+ newTables[index] || tables[index]
193
+ );
194
+ dbRewrites[`${projectName}-infra-${table}-${owner}`] =
195
+ `${projectName}-infra-${newTable || table}-${altOwner || owner}`;
196
+ }
197
+ });
198
+
199
+ let dbTables = ['MainDB'];
200
+
201
+ if (live) {
202
+ dbTables = tables.map((table) => {
203
+ return `${projectName}-infra-${table}-${owner}`;
204
+ });
205
+ }
206
+
207
+ let client = undefined;
208
+ if (live) {
209
+ client = new DynamoDBClient({
210
+ region: region, // Replace with your AWS region
211
+ });
212
+ } else {
213
+ client = new DynamoDBClient({
214
+ region: region, // Replace with your AWS region
215
+ endpoint: 'http://localhost:8000', // The default local DynamoDB endpoint
216
+ credentials: {
217
+ accessKeyId: 'fakeAccessKeyId', // Use fake credentials for local DynamoDB
218
+ secretAccessKey: 'fakeSecretAccessKey',
219
+ },
220
+ });
221
+ }
222
+ exportDynamoTable(dbTables, client, dbRewrites, profilesOnly);
223
+ }
@@ -1,30 +1,30 @@
1
- import { program } from '../../app.js'
2
- import { exportTable } from './generateSeed.js'
3
-
4
- program
5
- .command('generate')
6
- .description('Generate seed data in ./docker/seed')
7
- .argument('[<string>]', 'Tables to generateFrom')
8
- .option('-o, --owner <owner>', 'Owner of the tables')
9
- .option('-n, --newowner <newowner>', 'New owner of the tables')
10
- .option('-p, --live', 'Pull from live')
11
- .option('-r, --region <region>', 'Region to deploy to')
12
- .option('-d, --db <db>', 'Databases to rewrite to')
13
- .option('--profiles', 'Profiles only seed generation')
14
- .action(async (str, options) => {
15
- const owner = options.owner || 'dev'
16
- const profilesOnly = options.profiles || false
17
- const tables = (str || '').split(',')
18
- let nameRedirect = []
19
- if (options.db) nameRedirect = options.db.split(',')
20
-
21
- exportTable(
22
- tables,
23
- owner,
24
- options.newowner,
25
- nameRedirect,
26
- options.live,
27
- options.region,
28
- profilesOnly,
29
- )
30
- })
1
+ import { program } from '../../app.js'
2
+ import { exportTable } from './generateSeed.js'
3
+
4
+ program
5
+ .command('generate')
6
+ .description('Generate seed data in ./docker/seed')
7
+ .argument('[<string>]', 'Tables to generateFrom')
8
+ .option('-o, --owner <owner>', 'Owner of the tables')
9
+ .option('-n, --newowner <newowner>', 'New owner of the tables')
10
+ .option('-p, --live', 'Pull from live')
11
+ .option('-r, --region <region>', 'Region to deploy to')
12
+ .option('-d, --db <db>', 'Databases to rewrite to')
13
+ .option('--profiles', 'Profiles only seed generation')
14
+ .action(async (str, options) => {
15
+ const owner = options.owner || 'dev'
16
+ const profilesOnly = options.profiles || false
17
+ const tables = (str || '').split(',')
18
+ let nameRedirect = []
19
+ if (options.db) nameRedirect = options.db.split(',')
20
+
21
+ exportTable(
22
+ tables,
23
+ owner,
24
+ options.newowner,
25
+ nameRedirect,
26
+ options.live,
27
+ options.region,
28
+ profilesOnly,
29
+ )
30
+ })
@@ -1,12 +1,12 @@
1
- import { spawn } from 'child_process';
2
-
3
- import { program } from '../../app.js';
4
- import { pruneRepo } from './prune.js';
5
-
6
- program
7
- .command('prune2')
8
- .description('Prune local branches that are not on origin')
9
-
10
- .action(() => {
11
- pruneRepo();
12
- });
1
+ import { spawn } from 'child_process';
2
+
3
+ import { program } from '../../app.js';
4
+ import { pruneRepo } from './prune.js';
5
+
6
+ program
7
+ .command('prune2')
8
+ .description('Prune local branches that are not on origin')
9
+
10
+ .action(() => {
11
+ pruneRepo();
12
+ });