@magentrix-corp/magentrix-cli 1.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (43) hide show
  1. package/LICENSE +25 -0
  2. package/README.md +471 -0
  3. package/actions/autopublish.js +283 -0
  4. package/actions/autopublish.old.js +293 -0
  5. package/actions/autopublish.v2.js +447 -0
  6. package/actions/create.js +329 -0
  7. package/actions/help.js +165 -0
  8. package/actions/main.js +81 -0
  9. package/actions/publish.js +567 -0
  10. package/actions/pull.js +139 -0
  11. package/actions/setup.js +61 -0
  12. package/actions/status.js +17 -0
  13. package/bin/magentrix.js +159 -0
  14. package/package.json +61 -0
  15. package/utils/cacher.js +112 -0
  16. package/utils/cli/checkInstanceUrl.js +29 -0
  17. package/utils/cli/helpers/compare.js +281 -0
  18. package/utils/cli/helpers/ensureApiKey.js +57 -0
  19. package/utils/cli/helpers/ensureCredentials.js +60 -0
  20. package/utils/cli/helpers/ensureInstanceUrl.js +63 -0
  21. package/utils/cli/writeRecords.js +223 -0
  22. package/utils/compare.js +135 -0
  23. package/utils/compress.js +18 -0
  24. package/utils/config.js +451 -0
  25. package/utils/diff.js +49 -0
  26. package/utils/downloadAssets.js +75 -0
  27. package/utils/filetag.js +115 -0
  28. package/utils/hash.js +14 -0
  29. package/utils/magentrix/api/assets.js +145 -0
  30. package/utils/magentrix/api/auth.js +56 -0
  31. package/utils/magentrix/api/createEntity.js +61 -0
  32. package/utils/magentrix/api/deleteEntity.js +55 -0
  33. package/utils/magentrix/api/meqlQuery.js +31 -0
  34. package/utils/magentrix/api/retrieveEntity.js +32 -0
  35. package/utils/magentrix/api/updateEntity.js +66 -0
  36. package/utils/magentrix/fetch.js +154 -0
  37. package/utils/merge.js +22 -0
  38. package/utils/preferences.js +40 -0
  39. package/utils/spinner.js +43 -0
  40. package/utils/template.js +52 -0
  41. package/utils/updateFileBase.js +103 -0
  42. package/vars/config.js +1 -0
  43. package/vars/global.js +33 -0
@@ -0,0 +1,283 @@
1
+ import chokidar from 'chokidar';
2
+ import { ensureValidCredentials } from '../utils/cli/helpers/ensureCredentials.js';
3
+ import { withSpinner } from '../utils/spinner.js';
4
+ import { EXPORT_ROOT } from '../vars/global.js';
5
+ import fs from 'fs';
6
+ import path from 'path';
7
+ import chalk from 'chalk';
8
+ import { runPublish } from './publish.js';
9
+
10
+ const LOCK_FILE = path.join(process.cwd(), '.magentrix', 'autopublish.lock');
11
+
12
+ let credentials = {};
13
+ let isProcessing = false;
14
+ let pendingFiles = new Set(); // Track files that have changed while processing
15
+
16
+ /**
17
+ * Creates a lock file to prevent multiple autopublish instances
18
+ * @returns {boolean} True if lock was acquired, false if already locked
19
+ */
20
+ const acquireLock = () => {
21
+ try {
22
+ if (fs.existsSync(LOCK_FILE)) {
23
+ // Check if the lock is stale (process might have crashed)
24
+ const lockData = JSON.parse(fs.readFileSync(LOCK_FILE, 'utf-8'));
25
+ const lockAge = Date.now() - lockData.timestamp;
26
+
27
+ // If lock is older than 1 hour, consider it stale
28
+ if (lockAge < 3600000) {
29
+ return false;
30
+ }
31
+ }
32
+
33
+ // Create lock file
34
+ fs.mkdirSync(path.dirname(LOCK_FILE), { recursive: true });
35
+ fs.writeFileSync(LOCK_FILE, JSON.stringify({
36
+ pid: process.pid,
37
+ timestamp: Date.now()
38
+ }));
39
+
40
+ return true;
41
+ } catch (err) {
42
+ console.error(chalk.red(`Failed to acquire lock: ${err.message}`));
43
+ return false;
44
+ }
45
+ };
46
+
47
+ /**
48
+ * Releases the lock file
49
+ */
50
+ const releaseLock = () => {
51
+ try {
52
+ if (fs.existsSync(LOCK_FILE)) {
53
+ fs.unlinkSync(LOCK_FILE);
54
+ }
55
+ } catch (err) {
56
+ console.error(chalk.red(`Failed to release lock: ${err.message}`));
57
+ }
58
+ };
59
+
60
+ /**
61
+ * Debounce timer for batching changes
62
+ */
63
+ let debounceTimer = null;
64
+ const DEBOUNCE_DELAY = 1000; // 1 second
65
+
66
+ /**
67
+ * Shows the current queue status by updating the header
68
+ */
69
+ const showQueueStatus = () => {
70
+ if (pendingFiles.size > 0) {
71
+ // Save cursor position, move to top of screen, update queue line, restore cursor
72
+ process.stdout.write('\x1b[s'); // Save cursor position
73
+ process.stdout.write('\x1b[4;1H'); // Move to line 4, column 1
74
+ process.stdout.write('\x1b[2K'); // Clear entire line
75
+ process.stdout.write(chalk.yellow(`Queued for next publish: ${pendingFiles.size} file(s)`));
76
+ process.stdout.write('\x1b[u'); // Restore cursor position
77
+ }
78
+ };
79
+
80
+ /**
81
+ * Processes pending changes by running the publish command
82
+ */
83
+ const processPendingChanges = async () => {
84
+ if (isProcessing) {
85
+ return;
86
+ }
87
+
88
+ isProcessing = true;
89
+
90
+ // Clear the pending files set and capture what we're processing
91
+ const filesToProcess = new Set(pendingFiles);
92
+ pendingFiles.clear();
93
+
94
+ // Clear console and show header
95
+ process.stdout.write('\x1Bc');
96
+ console.log(chalk.cyan.bold('🔄 Auto-Publishing Changes...'));
97
+ console.log(chalk.gray('─'.repeat(48)));
98
+
99
+ if (filesToProcess.size > 0) {
100
+ console.log(chalk.yellow(`Processing ${filesToProcess.size} file change(s):`));
101
+ for (const file of filesToProcess) {
102
+ console.log(chalk.gray(` ${file}`));
103
+ }
104
+ }
105
+
106
+ // Reserve a line for the queue status that will be updated during processing
107
+ console.log(); // Empty line that will show queue status when files are added
108
+ console.log(); // Extra spacing
109
+
110
+ try {
111
+ // Run the publish logic with our pre-authenticated credentials
112
+ const result = await runPublish({
113
+ silent: false,
114
+ skipAuth: true,
115
+ credentials
116
+ });
117
+
118
+ // Show completion message
119
+ console.log();
120
+ console.log(chalk.gray('─'.repeat(48)));
121
+ if (result.hasChanges) {
122
+ console.log(chalk.green('✓ Changes published successfully'));
123
+ } else {
124
+ console.log(chalk.green('✓ No changes detected'));
125
+ }
126
+ console.log(chalk.gray('─'.repeat(48)));
127
+
128
+ // Show queued changes if any came in during processing
129
+ if (pendingFiles.size > 0) {
130
+ console.log(chalk.yellow(`⏳ ${pendingFiles.size} file change(s) queued for next publish:`));
131
+ for (const file of pendingFiles) {
132
+ console.log(chalk.gray(` ${file}`));
133
+ }
134
+ } else {
135
+ console.log(chalk.cyan('Watching for file changes...'));
136
+ }
137
+ console.log(chalk.gray('Press Ctrl+C to stop'));
138
+ console.log();
139
+ } catch (error) {
140
+ console.log();
141
+ console.log(chalk.bgRed.bold.white(' ✖ Auto-Publish Failed '));
142
+ console.log(chalk.redBright('─'.repeat(48)));
143
+ console.log(chalk.red(error.message));
144
+ console.log(chalk.redBright('─'.repeat(48)));
145
+
146
+ // Show queued changes if any
147
+ if (pendingFiles.size > 0) {
148
+ console.log(chalk.yellow(`⏳ ${pendingFiles.size} file change(s) queued for next publish`));
149
+ } else {
150
+ console.log(chalk.cyan('Watching for file changes...'));
151
+ }
152
+ console.log(chalk.gray('Press Ctrl+C to stop'));
153
+ console.log();
154
+ }
155
+
156
+ isProcessing = false;
157
+
158
+ // If changes came in while processing, start a fresh debounce timer
159
+ // This gives the user time to finish their edits before we publish again
160
+ if (pendingFiles.size > 0) {
161
+ // Clear any existing timer and start fresh
162
+ if (debounceTimer) {
163
+ clearTimeout(debounceTimer);
164
+ }
165
+
166
+ debounceTimer = setTimeout(() => {
167
+ processPendingChanges();
168
+ }, DEBOUNCE_DELAY);
169
+ }
170
+ };
171
+
172
+ /**
173
+ * Queue a file change for processing
174
+ */
175
+ const queueChange = (filePath, operation) => {
176
+ const relativePath = path.relative(process.cwd(), filePath);
177
+ const displayPath = `${operation} ${relativePath}`;
178
+
179
+ // Add to pending files
180
+ pendingFiles.add(displayPath);
181
+
182
+ // Update queue display if currently processing
183
+ if (isProcessing) {
184
+ showQueueStatus();
185
+ // Don't start timer yet - wait for current batch to complete
186
+ return;
187
+ }
188
+
189
+ // Not currently processing - start/reset the debounce timer
190
+ // Each new change resets the timer, ensuring we wait for editing to stop
191
+ if (debounceTimer) {
192
+ clearTimeout(debounceTimer);
193
+ }
194
+
195
+ debounceTimer = setTimeout(() => {
196
+ processPendingChanges();
197
+ }, DEBOUNCE_DELAY);
198
+ };
199
+
200
+ /**
201
+ * File watcher event handlers
202
+ */
203
+ const onAdd = (filePath) => {
204
+ queueChange(filePath, '[+]');
205
+ };
206
+
207
+ const onChange = (filePath) => {
208
+ queueChange(filePath, '[~]');
209
+ };
210
+
211
+ const onUnlink = (filePath) => {
212
+ queueChange(filePath, '[-]');
213
+ };
214
+
215
+ /**
216
+ * Starts the file watcher
217
+ */
218
+ const startWatcher = () => {
219
+ console.log(chalk.cyan.bold('🔄 Auto-Publish Mode Activated'));
220
+ console.log(chalk.gray('─'.repeat(48)));
221
+ console.log(chalk.green('Watching for file changes in:'));
222
+ console.log(chalk.cyan(` ${path.join(process.cwd(), EXPORT_ROOT)}`));
223
+ console.log(chalk.gray('Press Ctrl+C to stop'));
224
+ console.log();
225
+
226
+ const watchPath = path.join(process.cwd(), EXPORT_ROOT);
227
+
228
+ const watcher = chokidar.watch(watchPath, {
229
+ ignored: /(^|[\/\\])\../, // Ignore dotfiles
230
+ persistent: true,
231
+ ignoreInitial: true, // Don't fire events for existing files
232
+ awaitWriteFinish: {
233
+ stabilityThreshold: 300,
234
+ pollInterval: 100
235
+ }
236
+ });
237
+
238
+ watcher
239
+ .on('add', onAdd)
240
+ .on('change', onChange)
241
+ .on('unlink', onUnlink);
242
+
243
+ // Handle cleanup on exit
244
+ const cleanup = () => {
245
+ console.log(chalk.yellow('\n\nStopping auto-publish...'));
246
+ watcher.close();
247
+ releaseLock();
248
+ process.exit(0);
249
+ };
250
+
251
+ process.on('SIGINT', cleanup);
252
+ process.on('SIGTERM', cleanup);
253
+ };
254
+
255
+ /**
256
+ * Main autopublish function
257
+ */
258
+ export const autoPublish = async () => {
259
+ process.stdout.write('\x1Bc');
260
+
261
+ // Check for existing lock
262
+ if (!acquireLock()) {
263
+ console.log(chalk.bgRed.bold.white(' ✖ Already Running '));
264
+ console.log(chalk.redBright('─'.repeat(48)));
265
+ console.log(chalk.red('Another instance of autopublish is already running.'));
266
+ console.log(chalk.gray('Only one autopublish instance can run at a time.'));
267
+ console.log(chalk.redBright('─'.repeat(48)));
268
+ console.log();
269
+ console.log(chalk.yellow('If you believe this is an error, delete the lock file:'));
270
+ console.log(chalk.cyan(LOCK_FILE));
271
+ process.exit(1);
272
+ }
273
+
274
+ // Authenticate
275
+ credentials = await withSpinner('Authenticating...', async () => {
276
+ return await ensureValidCredentials();
277
+ });
278
+
279
+ console.log();
280
+
281
+ // Start watching
282
+ startWatcher();
283
+ };
@@ -0,0 +1,293 @@
1
+ import chokidar from 'chokidar';
2
+ import { ensureValidCredentials } from '../utils/cli/helpers/ensureCredentials.js';
3
+ import { withSpinner } from '../utils/spinner.js';
4
+ import { ENTITY_FIELD_MAP, ENTITY_TYPE_MAP, TYPE_DIR_MAP } from '../vars/global.js';
5
+ import fs from 'fs';
6
+ import Config from '../utils/config.js';
7
+ import { updateEntity } from '../utils/magentrix/api/updateEntity.js';
8
+ import chalk from 'chalk';
9
+ import { deleteEntity } from '../utils/magentrix/api/deleteEntity.js';
10
+ import { decompressString } from '../utils/compress.js';
11
+
12
+ const config = new Config();
13
+
14
+ let credentials = {};
15
+ let isUpdating = null;
16
+ let alreadyWarned = false;
17
+
18
+ /**
19
+ * Handler for file creation events.
20
+ * Triggered when a new file is added to the watched directory.
21
+ *
22
+ * @async
23
+ * @function onAdd
24
+ * @param {string} path - The path of the newly added file.
25
+ */
26
+ const onAdd = async (path) => {
27
+ // TODO: Ensure it doesn't try and create a file that has already been created using CLI
28
+ // TODO: Handle file addition logic (e.g., queue for remote sync)
29
+ // console.log(`[+] File created: ${path}`);
30
+
31
+ const pathParts = path.split("\\");
32
+ const acceptedTypes = Object.keys(TYPE_DIR_MAP).map(key => TYPE_DIR_MAP[key].directory);
33
+ const fileName = pathParts[pathParts.length - 1];
34
+ const name = fileName.split(".")[0];
35
+
36
+ const fileContent = fs.readFileSync(path, 'utf-8');
37
+
38
+ let pathType = null;
39
+
40
+ for (const part of pathParts) {
41
+ if (acceptedTypes.includes(part)) {
42
+ pathType = part;
43
+ }
44
+ }
45
+
46
+ let formattedData;
47
+
48
+ if (['Classes', 'Trigger', 'Controllers'].includes(pathType)) {
49
+ formattedData = {
50
+ Name: name,
51
+ Body: fileContent,
52
+ Description: "",
53
+ Type: ({
54
+ "Classes": "Class",
55
+ "Triggers": "Trigger",
56
+ "Controllers": "Controller"
57
+ })[pathType]
58
+ };
59
+ } else if (['Templates', 'Pages'].includes(pathType)) {
60
+ formattedData = {
61
+ Name: name,
62
+ Content: fileContent,
63
+ Description: "",
64
+ Type: "Active Page"
65
+ };
66
+ }
67
+
68
+ console.log(formattedData)
69
+ // Uncomment to perform creation via API:
70
+ // const creationResponse = await withSpinner('Creating file...', async () => {
71
+ // return await createEntity(credentials.instanceUrl, credentials.token.value, entityType, formattedData);
72
+ // });
73
+ };
74
+
75
+ /**
76
+ * Handler for file modification events.
77
+ * Triggered when an existing file is changed.
78
+ *
79
+ * @async
80
+ * @function onChange
81
+ * @param {string} path - The path of the modified file.
82
+ */
83
+ const onChange = async (path) => {
84
+ if (isUpdating) {
85
+ if (!alreadyWarned) {
86
+ process.stdout.write(chalk.yellow('\r⚠ File is currently compiling. Please wait...\n'));
87
+ alreadyWarned = true;
88
+ }
89
+ return;
90
+ }
91
+
92
+ alreadyWarned = false;
93
+
94
+ const fileDataQuery = config.searchObject({ filePath: path }, { global: false, filename: 'base.json' });
95
+ const fileData = fileDataQuery?.[0];
96
+ if (!fileData) {
97
+ // No file found in the base index, no action needed here
98
+ return;
99
+ }
100
+
101
+ process.stdout.write('\x1Bc'); // Clear the console
102
+ console.log('🔄 Watching for file changes... Any edits will be detected and queued for remote update.');
103
+ console.log();
104
+
105
+ const recordId = fileData.value.recordId;
106
+ const fileType = fileData.value.type;
107
+ const fileContents = fs.readFileSync(path, 'utf-8');
108
+
109
+ const updateField = ENTITY_FIELD_MAP[fileType];
110
+ const entityName = ENTITY_TYPE_MAP[fileType];
111
+
112
+ const updateBody = {
113
+ Id: recordId,
114
+ [updateField]: fileContents
115
+ }
116
+
117
+ isUpdating = true;
118
+ const response = await withSpinner(chalk.gray(`Compiling ${path}...`), async () => {
119
+ return await updateEntity(
120
+ credentials.instanceUrl,
121
+ credentials.token.value,
122
+ entityName,
123
+ recordId,
124
+ updateBody
125
+ ).catch(err => {
126
+ return { ...err, hasErrors: true }
127
+ });
128
+ });
129
+ isUpdating = false;
130
+
131
+ if (response?.hasErrors) {
132
+ const err = response;
133
+
134
+ // Clear line and provide an error heading
135
+ console.log();
136
+ console.log(chalk.bgRed.bold.white(' ✖ Magentrix API Error '));
137
+ console.log(chalk.redBright('─'.repeat(48)));
138
+
139
+ if (err.response && Array.isArray(err.response.errors)) {
140
+ const errors = err.response.errors;
141
+ console.log(
142
+ chalk.red.bold(
143
+ `Found ${errors.length} error${errors.length !== 1 ? 's' : ''}:`
144
+ )
145
+ );
146
+ errors.forEach((error, idx) => {
147
+ // Show error status (if available), code, and message, all prettified
148
+ const code = error.code ? chalk.gray(`[${error.code}] `) : '';
149
+ const status = error.status ? chalk.yellow(`[${error.status}] `) : '';
150
+ const msg = chalk.whiteBright(error.message);
151
+ console.log(
152
+ `${chalk.redBright(' •')} ${status}${code}${msg}`
153
+ );
154
+ });
155
+ } else if (err.response && err.response.message) {
156
+ // Single message fallback
157
+ console.log(chalk.red(' • ') + chalk.whiteBright(err.response.message));
158
+ } else if (err.message) {
159
+ // Any unexpected error
160
+ console.log(chalk.red(' • ') + chalk.whiteBright(err.message));
161
+ } else {
162
+ console.log(chalk.red(' • ') + chalk.whiteBright('An unexpected error has occurred.'));
163
+ }
164
+
165
+ console.log(chalk.redBright('─'.repeat(48)));
166
+ } else {
167
+ // Clean, celebratory message!
168
+ console.log();
169
+ console.log(
170
+ chalk.bgGreen.bold.white(' ✔ File Compiled & Saved! ')
171
+ );
172
+ console.log(chalk.greenBright('─'.repeat(48)));
173
+ console.log(
174
+ chalk.greenBright.bold('Your file was compiled and saved on the remote Magentrix server.')
175
+ );
176
+ // Optionally: include file info or extra details if available
177
+ if (response.fileName) {
178
+ console.log(chalk.whiteBright(`File: ${chalk.cyan(response.fileName)}`));
179
+ }
180
+ if (response.lastModified) {
181
+ console.log(
182
+ chalk.gray(`Last Modified: ${new Date(response.lastModified).toLocaleString()}`)
183
+ );
184
+ }
185
+ if (response.recordId) {
186
+ console.log(chalk.gray(`Record ID: ${chalk.yellow(response.recordId)}`));
187
+ }
188
+ // Add spacing and line
189
+ console.log(chalk.greenBright('─'.repeat(48)));
190
+ console.log();
191
+ }
192
+ };
193
+
194
+ /**
195
+ * Handler for file deletion events.
196
+ * Triggered when a file is removed from the watched directory.
197
+ *
198
+ * @async
199
+ * @function onUnlink
200
+ * @param {string} path - The path of the deleted file.
201
+ */
202
+ const onUnlink = async (path) => {
203
+ process.stdout.write('\x1Bc'); // Clear the console
204
+ console.log('🔄 Watching for file changes... Any edits will be detected and queued for remote update.');
205
+ console.log();
206
+
207
+ const fileDataQuery = config.searchObject({ filePath: path }, { global: false, filename: 'base.json' });
208
+ const fileData = fileDataQuery?.[0];
209
+ if (!fileData) {
210
+ console.log(chalk.gray(`⚠️ File removed, but not found in index: ${path}`));
211
+ return;
212
+ }
213
+
214
+ const recordId = fileData.value.recordId;
215
+ const fileType = fileData.value.type;
216
+ const entityName = ENTITY_TYPE_MAP[fileType];
217
+
218
+ const response = await withSpinner(`🗑️ Deleting remote ${entityName} for: ${path}`, async () => {
219
+ return await deleteEntity(
220
+ credentials.instanceUrl,
221
+ credentials.token.value,
222
+ entityName,
223
+ recordId
224
+ ).catch(err => {
225
+ return { ...err?.response, hasErrors: true };
226
+ });
227
+ });
228
+
229
+ if (response?.hasErrors) {
230
+ try {
231
+ // Put the file back with the last synced content
232
+ const lastContent = decompressString(fileData.value.compressedContent);
233
+ fs.writeFileSync(path, lastContent);
234
+ } catch (err) {
235
+ console.warn(`⚠️ Failed to restore deleted file at "${path}". Reason: ${err.message}`);
236
+ }
237
+ }
238
+
239
+ if (response?.success) {
240
+ console.log(chalk.green.bold(`✔ Successfully deleted remote ${entityName} for:`), chalk.whiteBright(path));
241
+ } else if (response?.hasErrors || response?.errors?.length > 0) {
242
+ console.log();
243
+ console.log(chalk.bgRed.bold.white(' ✖ Deletion Failed '));
244
+ console.log(chalk.redBright('─'.repeat(48)));
245
+ const errors = response.errors || [];
246
+ if (errors.length > 0) {
247
+ errors.forEach((err, i) => {
248
+ const code = err.code ? chalk.gray(`[${err.code}] `) : '';
249
+ const status = err.status ? chalk.yellow(`[${err.status}] `) : '';
250
+ const msg = chalk.whiteBright(err.message);
251
+ console.log(`${chalk.redBright(' •')} ${status}${code}${msg}`);
252
+ });
253
+ } else {
254
+ console.log(chalk.red('An unknown error occurred during deletion.'));
255
+ }
256
+ } else {
257
+ console.log(chalk.yellow(`⚠️ Unexpected response while deleting: ${path}`));
258
+ }
259
+ };
260
+
261
+ const startWatcher = () => {
262
+ console.log('🔄 Watching for file changes... Any edits will be detected and queued for remote update.');
263
+ console.log();
264
+
265
+ const watcher = chokidar.watch('.', {
266
+ ignored: /(^|[\/\\])\../, // Ignore dotfiles and .git etc.
267
+ persistent: true,
268
+ ignoreInitial: true // Don't fire events for files already present
269
+ });
270
+
271
+ watcher
272
+ .on('add', onAdd)
273
+ .on('change', onChange)
274
+ .on('unlink', onUnlink);
275
+ }
276
+
277
+ /**
278
+ * Initializes and starts the file watcher for Magentrix CLI.
279
+ * Watches the current directory for file changes (add, change, unlink)
280
+ * and triggers remote sync as needed.
281
+ *
282
+ * @function watch
283
+ * @returns {void}
284
+ */
285
+ export const autoPublish = async () => {
286
+ process.stdout.write('\x1Bc'); // Clear the console
287
+
288
+ credentials = await withSpinner('Authenticating...', async () => {
289
+ return await ensureValidCredentials();
290
+ });
291
+
292
+ startWatcher();
293
+ };