@underpostnet/underpost 2.99.6 → 2.99.8

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (39) hide show
  1. package/.github/workflows/ghpkg.ci.yml +10 -25
  2. package/.github/workflows/npmpkg.ci.yml +14 -2
  3. package/CHANGELOG.md +520 -0
  4. package/Dockerfile +15 -15
  5. package/README.md +9 -16
  6. package/bin/deploy.js +13 -3
  7. package/bin/file.js +4 -19
  8. package/cli.md +46 -28
  9. package/examples/static-page/ssr-components/CustomPage.js +1 -1
  10. package/jsdoc.json +26 -5
  11. package/manifests/cronjobs/dd-cron/dd-cron-backup.yaml +9 -2
  12. package/manifests/cronjobs/dd-cron/dd-cron-dns.yaml +9 -2
  13. package/manifests/deployment/dd-default-development/deployment.yaml +2 -2
  14. package/manifests/deployment/dd-test-development/deployment.yaml +2 -2
  15. package/package.json +1 -2
  16. package/src/cli/baremetal.js +8 -322
  17. package/src/cli/cloud-init.js +2 -2
  18. package/src/cli/index.js +12 -1
  19. package/src/cli/repository.js +166 -13
  20. package/src/cli/run.js +16 -42
  21. package/src/cli/ssh.js +1 -1
  22. package/src/cli/system.js +332 -0
  23. package/src/db/DataBaseProvider.js +3 -3
  24. package/src/db/mariadb/MariaDB.js +5 -5
  25. package/src/db/mongo/MongooseDB.js +3 -3
  26. package/src/index.js +17 -5
  27. package/src/mailer/EmailRender.js +3 -3
  28. package/src/mailer/MailerProvider.js +4 -4
  29. package/src/runtime/express/Dockerfile +15 -15
  30. package/src/runtime/lampp/Dockerfile +15 -15
  31. package/src/server/backup.js +15 -4
  32. package/src/server/client-build-docs.js +28 -2
  33. package/src/server/conf.js +8 -24
  34. package/src/server/cron.js +48 -38
  35. package/src/server/dns.js +0 -8
  36. package/bin/cron.js +0 -47
  37. package/bin/db.js +0 -199
  38. package/bin/hwt.js +0 -49
  39. package/bin/util.js +0 -63
@@ -1,23 +1,23 @@
1
1
  FROM rockylinux:9
2
2
 
3
- # --- Update and install required packages
3
+ # Update and install required packages
4
4
  RUN dnf -y update && \
5
5
  dnf -y install epel-release && \
6
6
  dnf -y install --allowerasing \
7
- bzip2 \
8
- sudo \
9
- curl \
10
- net-tools \
11
- openssh-server \
12
- nano \
13
- vim-enhanced \
14
- less \
15
- openssl-devel \
16
- wget \
17
- git \
18
- gnupg2 \
19
- libnsl \
20
- perl && \
7
+ bzip2 \
8
+ sudo \
9
+ curl \
10
+ net-tools \
11
+ openssh-server \
12
+ nano \
13
+ vim-enhanced \
14
+ less \
15
+ openssl-devel \
16
+ wget \
17
+ git \
18
+ gnupg2 \
19
+ libnsl \
20
+ perl && \
21
21
  dnf clean all
22
22
 
23
23
 
@@ -1,23 +1,23 @@
1
1
  FROM rockylinux:9
2
2
 
3
- # --- Update and install required packages
3
+ # Update and install required packages
4
4
  RUN dnf -y update && \
5
5
  dnf -y install epel-release && \
6
6
  dnf -y install --allowerasing \
7
- bzip2 \
8
- sudo \
9
- curl \
10
- net-tools \
11
- openssh-server \
12
- nano \
13
- vim-enhanced \
14
- less \
15
- openssl-devel \
16
- wget \
17
- git \
18
- gnupg2 \
19
- libnsl \
20
- perl && \
7
+ bzip2 \
8
+ sudo \
9
+ curl \
10
+ net-tools \
11
+ openssh-server \
12
+ nano \
13
+ vim-enhanced \
14
+ less \
15
+ openssl-devel \
16
+ wget \
17
+ git \
18
+ gnupg2 \
19
+ libnsl \
20
+ perl && \
21
21
  dnf clean all
22
22
 
23
23
  # --- Download and install XAMPP
@@ -8,6 +8,7 @@ import fs from 'fs-extra';
8
8
  import { loggerFactory } from './logger.js';
9
9
  import { shellExec } from './process.js';
10
10
  import dotenv from 'dotenv';
11
+ import Underpost from '../index.js';
11
12
 
12
13
  dotenv.config();
13
14
 
@@ -28,6 +29,7 @@ class BackUp {
28
29
  * @param {boolean} [options.k3s] - Use k3s cluster context.
29
30
  * @param {boolean} [options.kind] - Use kind cluster context.
30
31
  * @param {boolean} [options.kubeadm] - Use kubeadm cluster context.
32
+ * @param {boolean} [options.ssh] - Execute backup commands via SSH on the remote node.
31
33
  * @memberof UnderpostBakcUp
32
34
  */
33
35
  static callback = async function (deployList, options = { git: false }) {
@@ -43,10 +45,19 @@ class BackUp {
43
45
  const deployId = _deployId.trim();
44
46
  if (!deployId) continue;
45
47
 
46
- logger.info('Executing database export for', deployId);
47
- shellExec(
48
- `node bin db ${options.git ? '--git --force-clone ' : ''}--export --primary-pod${clusterFlag} ${deployId}`,
49
- );
48
+ const command = `node bin db ${options.git ? '--git --force-clone ' : ''}--export --primary-pod${clusterFlag} ${deployId}`;
49
+
50
+ if (options.ssh) {
51
+ logger.info('Executing database export via SSH for', deployId);
52
+ await Underpost.ssh.sshRemoteRunner(command, {
53
+ remote: true,
54
+ useSudo: true,
55
+ cd: '/home/dd/engine',
56
+ });
57
+ } else {
58
+ logger.info('Executing database export for', deployId);
59
+ shellExec(command);
60
+ }
50
61
  }
51
62
  };
52
63
  }
@@ -1,5 +1,11 @@
1
1
  'use strict';
2
2
 
3
+ /**
4
+ * Module for building project documentation (JSDoc, Swagger, Coverage).
5
+ * @module src/server/client-build-docs.js
6
+ * @namespace clientBuildDocs
7
+ */
8
+
3
9
  import fs from 'fs-extra';
4
10
  import swaggerAutoGen from 'swagger-autogen';
5
11
  import { shellExec } from './process.js';
@@ -8,6 +14,8 @@ import { JSONweb } from './client-formatted.js';
8
14
 
9
15
  /**
10
16
  * Builds API documentation using Swagger
17
+ * @function buildApiDocs
18
+ * @memberof clientBuildDocs
11
19
  * @param {Object} options - Documentation build options
12
20
  * @param {string} options.host - The hostname for the API
13
21
  * @param {string} options.path - The base path for the API
@@ -126,12 +134,15 @@ const buildApiDocs = async ({
126
134
 
127
135
  /**
128
136
  * Builds JSDoc documentation
137
+ * @function buildJsDocs
138
+ * @memberof clientBuildDocs
129
139
  * @param {Object} options - JSDoc build options
130
140
  * @param {string} options.host - The hostname for the documentation
131
141
  * @param {string} options.path - The base path for the documentation
132
142
  * @param {Object} options.metadata - Metadata for the documentation
143
+ * @param {string} options.publicClientId - Client ID used to resolve the tutorials/references directory
133
144
  */
134
- const buildJsDocs = async ({ host, path, metadata = {} }) => {
145
+ const buildJsDocs = async ({ host, path, metadata = {}, publicClientId }) => {
135
146
  const logger = loggerFactory(import.meta);
136
147
  const jsDocsConfig = JSON.parse(fs.readFileSync(`./jsdoc.json`, 'utf8'));
137
148
 
@@ -139,6 +150,17 @@ const buildJsDocs = async ({ host, path, metadata = {} }) => {
139
150
  jsDocsConfig.opts.theme_opts.title = metadata?.title ? metadata.title : undefined;
140
151
  jsDocsConfig.opts.theme_opts.favicon = `./public/${host}${path === '/' ? '/' : `${path}/`}favicon.ico`;
141
152
 
153
+ const tutorialsPath = `./src/client/public/${publicClientId}/docs/references`;
154
+ if (fs.existsSync(tutorialsPath)) {
155
+ jsDocsConfig.opts.tutorials = tutorialsPath;
156
+ if (jsDocsConfig.opts.theme_opts.sections && !jsDocsConfig.opts.theme_opts.sections.includes('Tutorials')) {
157
+ jsDocsConfig.opts.theme_opts.sections.push('Tutorials');
158
+ }
159
+ logger.info('build jsdoc tutorials', tutorialsPath);
160
+ } else {
161
+ delete jsDocsConfig.opts.tutorials;
162
+ }
163
+
142
164
  fs.writeFileSync(`./jsdoc.json`, JSON.stringify(jsDocsConfig, null, 4), 'utf8');
143
165
  logger.warn('build jsdoc view', jsDocsConfig.opts.destination);
144
166
 
@@ -147,6 +169,8 @@ const buildJsDocs = async ({ host, path, metadata = {} }) => {
147
169
 
148
170
  /**
149
171
  * Builds test coverage documentation
172
+ * @function buildCoverage
173
+ * @memberof clientBuildDocs
150
174
  * @param {Object} options - Coverage build options
151
175
  * @param {string} options.host - The hostname for the coverage
152
176
  * @param {string} options.path - The base path for the coverage
@@ -168,6 +192,8 @@ const buildCoverage = async ({ host, path }) => {
168
192
 
169
193
  /**
170
194
  * Main function to build all documentation
195
+ * @function buildDocs
196
+ * @memberof clientBuildDocs
171
197
  * @param {Object} options - Documentation build options
172
198
  * @param {string} options.host - The hostname
173
199
  * @param {string} options.path - The base path
@@ -188,7 +214,7 @@ const buildDocs = async ({
188
214
  rootClientPath,
189
215
  packageData,
190
216
  }) => {
191
- await buildJsDocs({ host, path, metadata });
217
+ await buildJsDocs({ host, path, metadata, publicClientId });
192
218
  await buildCoverage({ host, path });
193
219
  await buildApiDocs({
194
220
  host,
@@ -1167,9 +1167,10 @@ const getPathsSSR = (conf) => {
1167
1167
  };
1168
1168
 
1169
1169
  /**
1170
- * @method Cmd
1170
+ * @method CmdUnderpost
1171
1171
  * @description The command factory.
1172
1172
  * @memberof ServerConfBuilder
1173
+ * @namespace CmdUnderpost
1173
1174
  */
1174
1175
  const Cmd = {
1175
1176
  /**
@@ -1184,7 +1185,7 @@ const Cmd = {
1184
1185
  * @description Builds the deploy.
1185
1186
  * @param {string} deployId - The deploy ID.
1186
1187
  * @returns {string} - The build command.
1187
- * @memberof Cmd
1188
+ * @memberof CmdUnderpost
1188
1189
  */
1189
1190
  build: (deployId) => `node bin/deploy build-full-client ${deployId}`,
1190
1191
  /**
@@ -1193,7 +1194,7 @@ const Cmd = {
1193
1194
  * @param {string} deployId - The deploy ID.
1194
1195
  * @param {string} env - The environment.
1195
1196
  * @returns {string} - The conf command.
1196
- * @memberof Cmd
1197
+ * @memberof CmdUnderpost
1197
1198
  */
1198
1199
  conf: (deployId, env) => `node bin/deploy conf ${deployId} ${env ? env : 'production'}`,
1199
1200
  /**
@@ -1203,14 +1204,14 @@ const Cmd = {
1203
1204
  * @param {string} host - The host.
1204
1205
  * @param {string} path - The path.
1205
1206
  * @returns {string} - The replica command.
1206
- * @memberof Cmd
1207
+ * @memberof CmdUnderpost
1207
1208
  */
1208
1209
  replica: (deployId, host, path) => `node bin/deploy build-single-replica ${deployId} ${host} ${path}`,
1209
1210
  /**
1210
1211
  * @method syncPorts
1211
1212
  * @description Syncs the ports.
1212
1213
  * @returns {string} - The sync ports command.
1213
- * @memberof Cmd
1214
+ * @memberof CmdUnderpost
1214
1215
  */
1215
1216
  syncPorts: () => `node bin/deploy sync-env-port`,
1216
1217
  };
@@ -1335,25 +1336,8 @@ const buildCliDoc = (program, oldVersion, newVersion) => {
1335
1336
  md = md.replaceAll(oldVersion, newVersion);
1336
1337
  fs.writeFileSync(`./src/client/public/nexodev/docs/references/Command Line Interface.md`, md, 'utf8');
1337
1338
  fs.writeFileSync(`./cli.md`, md, 'utf8');
1338
- const readmeSplit = `pwa-microservices-template</a>`;
1339
- const readme = fs.readFileSync(`./README.md`, 'utf8').split(readmeSplit);
1340
- fs.writeFileSync(
1341
- './README.md',
1342
- (
1343
- readme[0] +
1344
- readmeSplit +
1345
- `
1346
-
1347
- ` +
1348
- baseOptions +
1349
- `
1350
-
1351
- <a target="_top" href="https://github.com/${process.env.GITHUB_USERNAME}/pwa-microservices-template/blob/master/cli.md">See complete CLI Docs here.</a>
1352
-
1353
- `
1354
- ).replaceAll(oldVersion, newVersion),
1355
- 'utf8',
1356
- );
1339
+ const readme = fs.readFileSync(`./README.md`, 'utf8');
1340
+ fs.writeFileSync('./README.md', readme.replaceAll(oldVersion, newVersion), 'utf8');
1357
1341
  };
1358
1342
 
1359
1343
  /**
@@ -8,12 +8,15 @@ import { loggerFactory } from './logger.js';
8
8
  import { shellExec } from './process.js';
9
9
  import fs from 'fs-extra';
10
10
  import Underpost from '../index.js';
11
+ import { getUnderpostRootPath } from './conf.js';
11
12
 
12
13
  const logger = loggerFactory(import.meta);
13
14
 
14
15
  const volumeHostPath = '/home/dd';
15
16
  const enginePath = '/home/dd/engine';
16
17
  const cronVolumeName = 'underpost-cron-container-volume';
18
+ const shareEnvVolumeName = 'underpost-share-env';
19
+ const underpostContainerEnvPath = '/usr/lib/node_modules/underpost/.env';
17
20
 
18
21
  /**
19
22
  * Generates a Kubernetes CronJob YAML manifest string.
@@ -30,6 +33,7 @@ const cronVolumeName = 'underpost-cron-container-volume';
30
33
  * @param {string} [params.cmd] - Optional pre-script commands to run before cron execution
31
34
  * @param {boolean} [params.suspend=false] - Whether the CronJob is suspended
32
35
  * @param {boolean} [params.dryRun=false] - Pass --dry-run flag to the cron command inside the container
36
+ * @param {boolean} [params.ssh=false] - Execute backup commands via SSH on the remote node
33
37
  * @returns {string} Kubernetes CronJob YAML manifest
34
38
  * @memberof UnderpostCron
35
39
  */
@@ -45,6 +49,7 @@ const cronJobYamlFactory = ({
45
49
  cmd,
46
50
  suspend = false,
47
51
  dryRun = false,
52
+ ssh = false,
48
53
  }) => {
49
54
  const containerImage = image || `underpost/underpost-engine:${Underpost.version}`;
50
55
 
@@ -57,7 +62,7 @@ const cronJobYamlFactory = ({
57
62
 
58
63
  const cmdPart = cmd ? `${cmd} && ` : '';
59
64
  const cronBin = dev ? 'node bin' : 'underpost';
60
- const flags = `${git ? '--git ' : ''}${dev ? '--dev ' : ''}${dryRun ? '--dry-run ' : ''}`;
65
+ const flags = `${git ? '--git ' : ''}${dev ? '--dev ' : ''}${dryRun ? '--dry-run ' : ''}${ssh ? '--ssh ' : ''}`;
61
66
  const cronCommand = `${cmdPart}${cronBin} cron ${flags}${deployList} ${jobList}`;
62
67
 
63
68
  return `apiVersion: batch/v1
@@ -94,11 +99,18 @@ spec:
94
99
  volumeMounts:
95
100
  - mountPath: ${enginePath}
96
101
  name: ${cronVolumeName}
102
+ - mountPath: ${underpostContainerEnvPath}
103
+ name: ${shareEnvVolumeName}
104
+ subPath: .env
97
105
  volumes:
98
106
  - hostPath:
99
107
  path: ${enginePath}
100
108
  type: Directory
101
109
  name: ${cronVolumeName}
110
+ - hostPath:
111
+ path: ${getUnderpostRootPath()}
112
+ type: DirectoryOrCreate
113
+ name: ${shareEnvVolumeName}
102
114
  restartPolicy: OnFailure
103
115
  `;
104
116
  };
@@ -171,6 +183,7 @@ class UnderpostCron {
171
183
  * @param {boolean} [options.kubeadm] - Use kubeadm cluster context (apply directly on host)
172
184
  * @param {boolean} [options.dryRun] - Preview cron jobs without executing them
173
185
  * @param {boolean} [options.createJobNow] - After applying, immediately create a Job from each CronJob (requires --apply)
186
+ * @param {boolean} [options.ssh] - Execute backup commands via SSH on the remote node
174
187
  * @memberof UnderpostCron
175
188
  */
176
189
  callback: async function (
@@ -178,15 +191,9 @@ class UnderpostCron {
178
191
  jobList = Object.keys(Underpost.cron.JOB).join(','),
179
192
  options = {},
180
193
  ) {
181
- if (options.setupStart) {
182
- await Underpost.cron.setupDeployStart(options.setupStart, options);
183
- return;
184
- }
194
+ if (options.setupStart) return await Underpost.cron.setupDeployStart(options.setupStart, options);
185
195
 
186
- if (options.generateK8sCronjobs) {
187
- await Underpost.cron.generateK8sCronJobs(options);
188
- return;
189
- }
196
+ if (options.generateK8sCronjobs) return await Underpost.cron.generateK8sCronJobs(options);
190
197
 
191
198
  for (const _jobId of jobList.split(',')) {
192
199
  const jobId = _jobId.trim();
@@ -219,6 +226,7 @@ class UnderpostCron {
219
226
  * @param {boolean} [options.k3s] - k3s cluster context (apply directly on host)
220
227
  * @param {boolean} [options.kind] - kind cluster context (apply via kind-worker container)
221
228
  * @param {boolean} [options.kubeadm] - kubeadm cluster context (apply directly on host)
229
+ * @param {boolean} [options.ssh] - Execute backup commands via SSH on the remote node
222
230
  * @memberof UnderpostCron
223
231
  */
224
232
  setupDeployStart: async function (deployId, options = {}) {
@@ -263,17 +271,18 @@ class UnderpostCron {
263
271
  // Generate and apply cron job manifests for this deploy-id
264
272
  await Underpost.cron.generateK8sCronJobs({
265
273
  deployId,
266
- apply: options.apply,
267
- git: !!options.git,
268
- dev: !!options.dev,
269
- cmd: options.cmd,
270
274
  namespace: options.namespace,
271
275
  image: options.image,
272
- k3s: !!options.k3s,
273
- kind: !!options.kind,
274
- kubeadm: !!options.kubeadm,
275
- createJobNow: !!options.createJobNow,
276
- dryRun: !!options.dryRun,
276
+ apply: options.apply,
277
+ createJobNow: options.createJobNow,
278
+ git: true,
279
+ dev: true,
280
+ kubeadm: true,
281
+ ssh: true,
282
+ cmd: ` cd ${enginePath} && node bin env ${deployId} production`,
283
+ k3s: false,
284
+ kind: false,
285
+ dryRun: false,
277
286
  });
278
287
  },
279
288
 
@@ -295,6 +304,7 @@ class UnderpostCron {
295
304
  * @param {boolean} [options.kubeadm=false] - kubeadm cluster context (apply directly on host)
296
305
  * @param {boolean} [options.createJobNow=false] - After applying, create a Job from each CronJob immediately
297
306
  * @param {boolean} [options.dryRun=false] - Pass --dry-run=client to kubectl commands
307
+ * @param {boolean} [options.ssh=false] - Execute backup commands via SSH on the remote node
298
308
  * @memberof UnderpostCron
299
309
  */
300
310
  generateK8sCronJobs: async function (options = {}) {
@@ -351,6 +361,7 @@ class UnderpostCron {
351
361
  cmd: options.cmd,
352
362
  suspend: false,
353
363
  dryRun: !!options.dryRun,
364
+ ssh: !!options.ssh,
354
365
  });
355
366
 
356
367
  const yamlFilePath = `${outputDir}/${cronJobName}.yaml`;
@@ -389,29 +400,28 @@ class UnderpostCron {
389
400
  shellExec(`kubectl apply -f ${yamlFile}`);
390
401
  }
391
402
  logger.info('All CronJob manifests applied');
392
-
393
- // Create an immediate Job from each CronJob if requested
394
- if (options.createJobNow) {
395
- for (const job of Object.keys(confCronConfig.jobs)) {
396
- const jobConfig = confCronConfig.jobs[job];
397
- if (jobConfig.enabled === false) continue;
398
-
399
- const cronJobName = `${jobDeployId}-${job}`
400
- .toLowerCase()
401
- .replace(/[^a-z0-9-]/g, '-')
402
- .replace(/--+/g, '-')
403
- .replace(/^-|-$/g, '')
404
- .substring(0, 52);
405
-
406
- const immediateJobName = `${cronJobName}-now-${Date.now()}`.substring(0, 63);
407
- logger.info(`Creating immediate Job from CronJob: ${cronJobName}`, { jobName: immediateJobName });
408
- shellExec(`kubectl create job ${immediateJobName} --from=cronjob/${cronJobName} -n ${namespace}`);
409
- }
410
- logger.info('All immediate Jobs created');
411
- }
412
403
  } else {
413
404
  logger.info(`Manifests generated in ${outputDir}. Use --apply to deploy to the cluster.`);
414
405
  }
406
+ // Create an immediate Job from each CronJob if requested
407
+ if (options.createJobNow) {
408
+ for (const job of Object.keys(confCronConfig.jobs)) {
409
+ const jobConfig = confCronConfig.jobs[job];
410
+ if (jobConfig.enabled === false) continue;
411
+
412
+ const cronJobName = `${jobDeployId}-${job}`
413
+ .toLowerCase()
414
+ .replace(/[^a-z0-9-]/g, '-')
415
+ .replace(/--+/g, '-')
416
+ .replace(/^-|-$/g, '')
417
+ .substring(0, 52);
418
+
419
+ const immediateJobName = `${cronJobName}-now-${Date.now()}`.substring(0, 63);
420
+ logger.info(`Creating immediate Job from CronJob: ${cronJobName}`, { jobName: immediateJobName });
421
+ shellExec(`kubectl create job ${immediateJobName} --from=cronjob/${cronJobName} -n ${namespace}`);
422
+ }
423
+ logger.info('All immediate Jobs created');
424
+ }
415
425
  },
416
426
 
417
427
  /**
package/src/server/dns.js CHANGED
@@ -330,14 +330,6 @@ class Dns {
330
330
  logger.info('IP updated successfully and verified', testIp);
331
331
  Underpost.env.set('ip', testIp);
332
332
  Underpost.env.delete('monitor-input');
333
- {
334
- const deployId = resolveDeployId();
335
- const envs = dotenv.parse(
336
- fs.readFileSync(`./engine-private/conf/${deployId}/.env.${process.env.NODE_ENV}`, 'utf8'),
337
- );
338
- envs.ip = testIp;
339
- writeEnv(`./engine-private/conf/${deployId}/.env.${process.env.NODE_ENV}`, envs);
340
- }
341
333
  } else {
342
334
  logger.error('IP not updated or verification failed', { expected: testIp, received: verifyIp });
343
335
  }
package/bin/cron.js DELETED
@@ -1,47 +0,0 @@
1
- import { BackUpManagement } from '../src/server/backup.js';
2
- import { Cmd } from '../src/server/conf.js';
3
- import { Dns } from '../src/server/dns.js';
4
- import { loggerFactory } from '../src/server/logger.js';
5
- import { netWorkCron, saveRuntimeCron } from '../src/server/network.js';
6
- import { shellExec } from '../src/server/process.js';
7
- import fs from 'fs-extra';
8
-
9
- const logger = loggerFactory(import.meta);
10
-
11
- await logger.setUpInfo();
12
-
13
- switch (process.argv[2]) {
14
- case 'backups':
15
- {
16
- await BackUpManagement.Init({ deployId: process.argv[3] });
17
- }
18
- break;
19
- case 'dns':
20
- {
21
- await Dns.InitIpDaemon({ deployId: process.argv[3] });
22
- }
23
- break;
24
-
25
- case 'run': {
26
- const confCronConfig = JSON.parse(fs.readFileSync(`./engine-private/conf/${process.argv[3]}/conf.cron.json`));
27
- if (confCronConfig.jobs && Object.keys(confCronConfig.jobs).length > 0) {
28
- shellExec(`node bin/deploy conf ${process.argv[3]} production`);
29
- for (const job of Object.keys(confCronConfig.jobs)) {
30
- if (confCronConfig.jobs[job].enabled) {
31
- shellExec(Cmd.cron(process.argv[3], job, confCronConfig.jobs[job].expression));
32
- netWorkCron.push({
33
- deployId: process.argv[3],
34
- jobId: job,
35
- expression: confCronConfig.jobs[job].expression,
36
- });
37
- }
38
- }
39
- }
40
- await saveRuntimeCron();
41
- if (fs.existsSync(`./tmp/await-deploy`)) fs.remove(`./tmp/await-deploy`);
42
- break;
43
- }
44
-
45
- default:
46
- break;
47
- }