underpost 2.6.2 → 2.7.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.nycrc +2 -2
- package/AUTHORS.md +10 -0
- package/CHANGELOG.md +91 -0
- package/Dockerfile +5 -3
- package/README.md +48 -3
- package/bin/deploy.js +88 -27
- package/bin/index.js +45 -21
- package/bin/ssl.js +3 -0
- package/conf.js +7 -2
- package/docker-compose.yml +1 -1
- package/package.json +133 -128
- package/src/client/components/core/BtnIcon.js +4 -1
- package/src/client/components/core/CommonJs.js +26 -2
- package/src/client/components/core/CssCore.js +4 -0
- package/src/client/components/core/Docs.js +107 -8
- package/src/client/components/core/VanillaJs.js +10 -7
- package/src/client/ssr/body-components/CacheControl.js +1 -1
- package/src/index.js +29 -0
- package/src/server/auth.js +1 -1
- package/src/server/backup.js +17 -6
- package/src/server/client-build.js +16 -5
- package/src/server/dns.js +12 -1
- package/src/server/logger.js +54 -9
- package/src/server/process.js +4 -4
- package/src/server/prompt-optimizer.js +28 -0
- package/startup.js +1 -1
- package/bin/help.js +0 -110
package/src/server/backup.js
CHANGED
|
@@ -2,15 +2,25 @@ import fs from 'fs-extra';
|
|
|
2
2
|
import { loggerFactory } from './logger.js';
|
|
3
3
|
import { shellExec } from './process.js';
|
|
4
4
|
import { getDataDeploy } from './conf.js';
|
|
5
|
+
import cron from 'node-cron';
|
|
5
6
|
|
|
6
7
|
const logger = loggerFactory(import.meta);
|
|
7
8
|
|
|
8
9
|
const BackUpManagement = {
|
|
9
10
|
Init: async function () {
|
|
10
11
|
await this.Callback();
|
|
11
|
-
|
|
12
|
-
|
|
13
|
-
|
|
12
|
+
|
|
13
|
+
// Schedule the sending process to run every day at 1 am
|
|
14
|
+
cron.schedule(
|
|
15
|
+
'0 1 * * *',
|
|
16
|
+
async () => {
|
|
17
|
+
await this.Callback();
|
|
18
|
+
},
|
|
19
|
+
{
|
|
20
|
+
scheduled: true,
|
|
21
|
+
timezone: process.env.TIME_ZONE || 'America/New_York',
|
|
22
|
+
},
|
|
23
|
+
);
|
|
14
24
|
},
|
|
15
25
|
Callback: async function () {
|
|
16
26
|
const privateCronConfPath = `./engine-private/conf/${process.argv[2]}/conf.cron.json`;
|
|
@@ -26,7 +36,8 @@ const BackUpManagement = {
|
|
|
26
36
|
if (!fs.existsSync('./engine-private/cron-backups'))
|
|
27
37
|
fs.mkdirSync('./engine-private/cron-backups', { recursive: true });
|
|
28
38
|
|
|
29
|
-
for (const
|
|
39
|
+
for (const deployGroupData of backups) {
|
|
40
|
+
const { deployGroupId } = deployGroupData;
|
|
30
41
|
const dataDeploy = getDataDeploy({ deployGroupId });
|
|
31
42
|
|
|
32
43
|
for (const deployObj of dataDeploy) {
|
|
@@ -41,9 +52,9 @@ const BackUpManagement = {
|
|
|
41
52
|
for (const host of Object.keys(confServer))
|
|
42
53
|
for (const path of Object.keys(confServer[host])) {
|
|
43
54
|
// retention policy
|
|
44
|
-
let { db, backupFrequency, maxBackupRetention } = confServer[host][path];
|
|
55
|
+
let { db, backupFrequency, maxBackupRetention, singleReplica } = confServer[host][path];
|
|
45
56
|
|
|
46
|
-
if (!db) continue;
|
|
57
|
+
if (!db || singleReplica) continue;
|
|
47
58
|
|
|
48
59
|
if (!backupFrequency) backupFrequency = 'daily';
|
|
49
60
|
if (!maxBackupRetention) maxBackupRetention = 5;
|
|
@@ -104,6 +104,7 @@ const buildClient = async (options = { liveClientBuildPaths: [], instances: [] }
|
|
|
104
104
|
const confClient = JSON.parse(fs.readFileSync(`./conf/conf.client.json`, 'utf8'));
|
|
105
105
|
const confServer = JSON.parse(fs.readFileSync(`./conf/conf.server.json`, 'utf8'));
|
|
106
106
|
const confSSR = JSON.parse(fs.readFileSync(`./conf/conf.ssr.json`, 'utf8'));
|
|
107
|
+
const packageData = JSON.parse(fs.readFileSync(`./package.json`, 'utf8'));
|
|
107
108
|
const acmeChallengePath = `/.well-known/acme-challenge`;
|
|
108
109
|
const publicPath = `./public`;
|
|
109
110
|
|
|
@@ -133,7 +134,6 @@ const buildClient = async (options = { liveClientBuildPaths: [], instances: [] }
|
|
|
133
134
|
apis,
|
|
134
135
|
iconsBuild,
|
|
135
136
|
docsBuild,
|
|
136
|
-
swaggerApiVersion,
|
|
137
137
|
apiBaseProxyPath,
|
|
138
138
|
apiBaseHost,
|
|
139
139
|
ttiLoadTimeLimit,
|
|
@@ -150,7 +150,7 @@ const buildClient = async (options = { liveClientBuildPaths: [], instances: [] }
|
|
|
150
150
|
const rootClientPath = directory ? directory : `${publicPath}/${host}${path}`;
|
|
151
151
|
const port = newInstance(currentPort);
|
|
152
152
|
const publicClientId = publicRef ? publicRef : client;
|
|
153
|
-
const fullBuildEnabled = !process.argv.includes('l') && !confServer[host][path].
|
|
153
|
+
const fullBuildEnabled = !process.argv.includes('l') && !confServer[host][path].liteBuild && !enableLiveRebuild;
|
|
154
154
|
// const baseHost = process.env.NODE_ENV === 'production' ? `https://${host}` : `http://localhost:${port}`;
|
|
155
155
|
const baseHost = process.env.NODE_ENV === 'production' ? `https://${host}` : ``;
|
|
156
156
|
// ''; // process.env.NODE_ENV === 'production' ? `https://${host}` : ``;
|
|
@@ -171,7 +171,7 @@ const buildClient = async (options = { liveClientBuildPaths: [], instances: [] }
|
|
|
171
171
|
}
|
|
172
172
|
|
|
173
173
|
if (fullBuildEnabled)
|
|
174
|
-
// !(confServer[host]['/'] && confServer[host]['/'].
|
|
174
|
+
// !(confServer[host]['/'] && confServer[host]['/'].liteBuild)
|
|
175
175
|
await fullBuild({
|
|
176
176
|
path,
|
|
177
177
|
logger,
|
|
@@ -540,7 +540,7 @@ Sitemap: https://${host}${path === '/' ? '' : path}/sitemap.xml`,
|
|
|
540
540
|
);
|
|
541
541
|
}
|
|
542
542
|
|
|
543
|
-
if (!enableLiveRebuild && !process.argv.includes('l') && docsBuild) {
|
|
543
|
+
if (!enableLiveRebuild && !process.argv.includes('l') && !process.argv.includes('deploy') && docsBuild) {
|
|
544
544
|
// fullBuildEnabled || process.argv.includes('docs')
|
|
545
545
|
|
|
546
546
|
// https://www.pullrequest.com/blog/leveraging-jsdoc-for-better-code-documentation-in-javascript/
|
|
@@ -559,13 +559,24 @@ Sitemap: https://${host}${path === '/' ? '' : path}/sitemap.xml`,
|
|
|
559
559
|
logger.warn('build jsdoc view', jsDocsConfig.opts.destination);
|
|
560
560
|
shellExec(`npm run docs`, { silent: true });
|
|
561
561
|
|
|
562
|
+
// coverage
|
|
563
|
+
if (!fs.existsSync(`./coverage`)) {
|
|
564
|
+
shellExec(`npm test`);
|
|
565
|
+
}
|
|
566
|
+
const coverageBuildPath = `${jsDocsConfig.opts.destination}/coverage`;
|
|
567
|
+
fs.mkdirSync(coverageBuildPath, { recursive: true });
|
|
568
|
+
fs.copySync(`./coverage`, coverageBuildPath);
|
|
569
|
+
|
|
570
|
+
// uml
|
|
571
|
+
shellExec(`node bin/deploy uml ${host} ${path}`);
|
|
572
|
+
|
|
562
573
|
// https://swagger-autogen.github.io/docs/
|
|
563
574
|
|
|
564
575
|
const basePath = path === '/' ? `${process.env.BASE_API}` : `/${process.env.BASE_API}`;
|
|
565
576
|
|
|
566
577
|
const doc = {
|
|
567
578
|
info: {
|
|
568
|
-
version:
|
|
579
|
+
version: packageData.version, // by default: '1.0.0'
|
|
569
580
|
title: metadata?.title ? `${metadata.title}` : 'REST API', // by default: 'REST API'
|
|
570
581
|
description: metadata?.description ? metadata.description : '', // by default: ''
|
|
571
582
|
},
|
package/src/server/dns.js
CHANGED
|
@@ -1,6 +1,7 @@
|
|
|
1
1
|
import axios from 'axios';
|
|
2
2
|
import dotenv from 'dotenv';
|
|
3
3
|
import fs from 'fs';
|
|
4
|
+
import cron from 'node-cron';
|
|
4
5
|
|
|
5
6
|
import { ip } from './network.js';
|
|
6
7
|
import { loggerFactory } from './logger.js';
|
|
@@ -58,7 +59,17 @@ const Dns = {
|
|
|
58
59
|
}
|
|
59
60
|
};
|
|
60
61
|
await callback();
|
|
61
|
-
|
|
62
|
+
// every minute
|
|
63
|
+
cron.schedule(
|
|
64
|
+
'* * * * *',
|
|
65
|
+
async () => {
|
|
66
|
+
await callback();
|
|
67
|
+
},
|
|
68
|
+
{
|
|
69
|
+
scheduled: true,
|
|
70
|
+
timezone: process.env.TIME_ZONE || 'America/New_York',
|
|
71
|
+
},
|
|
72
|
+
);
|
|
62
73
|
},
|
|
63
74
|
services: {
|
|
64
75
|
updateIp: {
|
package/src/server/logger.js
CHANGED
|
@@ -1,3 +1,9 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Module for managing logger control and configuration
|
|
3
|
+
* @module src/server/logger.js
|
|
4
|
+
* @namespace Logger
|
|
5
|
+
*/
|
|
6
|
+
|
|
1
7
|
'use strict';
|
|
2
8
|
|
|
3
9
|
import dotenv from 'dotenv';
|
|
@@ -68,7 +74,38 @@ const format = (meta) =>
|
|
|
68
74
|
}),
|
|
69
75
|
);
|
|
70
76
|
|
|
71
|
-
|
|
77
|
+
/**
|
|
78
|
+
* Logs information about the current process environment to the console.
|
|
79
|
+
*
|
|
80
|
+
* This function is used to log details about
|
|
81
|
+
* the execution context, such as command-line arguments,
|
|
82
|
+
* environment variables, the process's administrative privileges,
|
|
83
|
+
* and the maximum available heap space size.
|
|
84
|
+
*
|
|
85
|
+
* @param {winston.Logger} logger - A pre-configured Winston logger object.
|
|
86
|
+
* @memberof Logger
|
|
87
|
+
*/
|
|
88
|
+
const setUpInfo = async (logger = new winston.Logger()) => {
|
|
89
|
+
logger.info('argv', process.argv);
|
|
90
|
+
logger.info('env', process.env.NODE_ENV);
|
|
91
|
+
logger.info('admin', await isAdmin());
|
|
92
|
+
logger.info('--max-old-space-size', {
|
|
93
|
+
total_available_size: formatBytes(v8.getHeapStatistics().total_available_size),
|
|
94
|
+
});
|
|
95
|
+
};
|
|
96
|
+
|
|
97
|
+
/**
|
|
98
|
+
* The function `loggerFactory` creates a logger instance with specified transports for printing out
|
|
99
|
+
* messages.
|
|
100
|
+
* @param meta - The `meta` parameter in the `loggerFactory` function is used to extract the last part
|
|
101
|
+
* of a URL and use it to create log files in a specific directory.
|
|
102
|
+
* @returns {winston.Logger} The `loggerFactory` function returns a logger instance created using Winston logger
|
|
103
|
+
* library. The logger instance is configured with various transports for printing out messages to
|
|
104
|
+
* different destinations such as the terminal, error.log file, and all.log file. The logger instance
|
|
105
|
+
* also has a method `setUpInfo` attached to it for setting up additional information.
|
|
106
|
+
* @memberof Logger
|
|
107
|
+
*/
|
|
108
|
+
const loggerFactory = (meta = { url: '' }) => {
|
|
72
109
|
meta = meta.url.split('/').pop();
|
|
73
110
|
// Define which transports the logger must use to print out messages.
|
|
74
111
|
// In this example, we are using three different transports
|
|
@@ -98,17 +135,25 @@ const loggerFactory = (meta) => {
|
|
|
98
135
|
// exitOnError: false,
|
|
99
136
|
});
|
|
100
137
|
logger.setUpInfo = async () => {
|
|
101
|
-
logger
|
|
102
|
-
logger.info('env', process.env.NODE_ENV);
|
|
103
|
-
logger.info('admin', await isAdmin());
|
|
104
|
-
logger.info('--max-old-space-size', {
|
|
105
|
-
total_available_size: formatBytes(v8.getHeapStatistics().total_available_size),
|
|
106
|
-
});
|
|
138
|
+
await setUpInfo(logger);
|
|
107
139
|
};
|
|
108
140
|
return logger;
|
|
109
141
|
};
|
|
110
142
|
|
|
111
|
-
|
|
143
|
+
/**
|
|
144
|
+
* The `loggerMiddleware` function creates a middleware for logging HTTP requests using Morgan with
|
|
145
|
+
* custom message format and options.
|
|
146
|
+
* @param meta - The `meta` parameter in the `loggerMiddleware` function is an object that contains
|
|
147
|
+
* information about the request URL. It has a default value of an empty object `{ url: '' }`. This
|
|
148
|
+
* object is used to provide additional metadata for logging purposes.
|
|
149
|
+
* @returns {Handler<any, any>} The `loggerMiddleware` function returns a middleware function that uses the Morgan library
|
|
150
|
+
* to log HTTP request information. The middleware function formats the log message using predefined
|
|
151
|
+
* tokens provided by Morgan and custom tokens like `:host` to include specific request details. The
|
|
152
|
+
* log message format includes information such as remote address, HTTP method, host, URL, status code,
|
|
153
|
+
* content length, and response time in milliseconds. The middleware
|
|
154
|
+
* @memberof Logger
|
|
155
|
+
*/
|
|
156
|
+
const loggerMiddleware = (meta = { url: '' }) => {
|
|
112
157
|
const stream = {
|
|
113
158
|
// Use the http severity
|
|
114
159
|
write: (message) => loggerFactory(meta).http(message),
|
|
@@ -132,4 +177,4 @@ const loggerMiddleware = (meta) => {
|
|
|
132
177
|
);
|
|
133
178
|
};
|
|
134
179
|
|
|
135
|
-
export { loggerFactory, loggerMiddleware };
|
|
180
|
+
export { loggerFactory, loggerMiddleware, setUpInfo };
|
package/src/server/process.js
CHANGED
|
@@ -53,13 +53,13 @@ const ProcessController = {
|
|
|
53
53
|
},
|
|
54
54
|
};
|
|
55
55
|
|
|
56
|
-
const shellExec = (cmd, options = { silent: false, async: false, stdout: false }) => {
|
|
57
|
-
logger.info(`cmd`, cmd);
|
|
56
|
+
const shellExec = (cmd, options = { silent: false, async: false, stdout: false, disableLog: true }) => {
|
|
57
|
+
if (!options.disableLog) logger.info(`cmd`, cmd);
|
|
58
58
|
return options.stdout ? shell.exec(cmd, options).stdout : shell.exec(cmd, options);
|
|
59
59
|
};
|
|
60
60
|
|
|
61
|
-
const shellCd = (cd) => {
|
|
62
|
-
logger.info(`cd`, cd);
|
|
61
|
+
const shellCd = (cd, options = { disableLog: true }) => {
|
|
62
|
+
if (options.disableLog) logger.info(`cd`, cd);
|
|
63
63
|
return shell.cd(cd);
|
|
64
64
|
};
|
|
65
65
|
|
|
@@ -0,0 +1,28 @@
|
|
|
1
|
+
// https://github.com/xenova/transformers.js/blob/f43d3dd348fd7b293008802590bb3a1afa218dc7/src/models.js#L10
|
|
2
|
+
|
|
3
|
+
import { AutoModelForSeq2SeqLM, AutoTokenizer } from '@xenova/transformers';
|
|
4
|
+
import { loggerFactory } from './logger.js';
|
|
5
|
+
import dotenv from 'dotenv';
|
|
6
|
+
|
|
7
|
+
dotenv.config();
|
|
8
|
+
|
|
9
|
+
const logger = loggerFactory(import.meta);
|
|
10
|
+
|
|
11
|
+
const tokenizer = await AutoTokenizer.from_pretrained('Xenova/t5-small');
|
|
12
|
+
|
|
13
|
+
const model = await AutoModelForSeq2SeqLM.from_pretrained('Xenova/t5-small');
|
|
14
|
+
|
|
15
|
+
const prompt = 'translate English to German: I love transformers!';
|
|
16
|
+
|
|
17
|
+
logger.info('input', { prompt });
|
|
18
|
+
|
|
19
|
+
const tokenizerData = await tokenizer(prompt);
|
|
20
|
+
|
|
21
|
+
const { input_ids } = tokenizerData;
|
|
22
|
+
|
|
23
|
+
const outputs = await model.generate(input_ids);
|
|
24
|
+
|
|
25
|
+
for (const output of outputs) {
|
|
26
|
+
const decoded = tokenizer.decode(output, { skip_special_tokens: true });
|
|
27
|
+
logger.info('decoded', { decoded });
|
|
28
|
+
}
|
package/startup.js
CHANGED
package/bin/help.js
DELETED
|
@@ -1,110 +0,0 @@
|
|
|
1
|
-
import { loggerFactory } from '../src/server/logger.js';
|
|
2
|
-
import colors from 'colors';
|
|
3
|
-
|
|
4
|
-
colors.enable();
|
|
5
|
-
|
|
6
|
-
const logger = loggerFactory(import.meta);
|
|
7
|
-
|
|
8
|
-
// backup: `node bin/db <host><path> export <deploy-id>`
|
|
9
|
-
// restore: `node bin/db <host><path> import <deploy-id>`
|
|
10
|
-
// new-api-src: `node bin/deploy build-nodejs-src-api <api-id>`
|
|
11
|
-
// text-to-image: `node bin/util text-to-image 's4()' white black 100x100`
|
|
12
|
-
// sync-packages: `node bin/deploy update-package`
|
|
13
|
-
// ssl: `npm run ssl <os> <deploy-id> <host>`
|
|
14
|
-
// clean empty folder: `node bin/util delete-empty-folder`
|
|
15
|
-
// sync env port: `node bin/deploy sync-env-port <deployId>`
|
|
16
|
-
// node bin/vs import
|
|
17
|
-
// node bin/vs export
|
|
18
|
-
// build macro replica: `node bin/deploy build-macro-replica dd`
|
|
19
|
-
// node bin/deploy update-version 2.5.2
|
|
20
|
-
|
|
21
|
-
const data = {
|
|
22
|
-
help: `
|
|
23
|
-
---------------------------------------------------------------
|
|
24
|
-
${`Help`.white}
|
|
25
|
-
---------------------------------------------------------------
|
|
26
|
-
|
|
27
|
-
Arguments:
|
|
28
|
-
|
|
29
|
-
> [optional] section: help | install | ssl
|
|
30
|
-
> [optional] sections: section,section,...
|
|
31
|
-
|
|
32
|
-
Command Line:
|
|
33
|
-
|
|
34
|
-
> ${`node bin/help <section/s>`.yellow}
|
|
35
|
-
`,
|
|
36
|
-
install: `
|
|
37
|
-
---------------------------------------------------------------
|
|
38
|
-
${`Programs installer`.white}
|
|
39
|
-
---------------------------------------------------------------
|
|
40
|
-
|
|
41
|
-
Arguments:
|
|
42
|
-
|
|
43
|
-
> [required] os: windows
|
|
44
|
-
> [required] program: certbot | xampp | docker | wordpress
|
|
45
|
-
> [required] host/path: example.com | example.com/path | www.example.com
|
|
46
|
-
|
|
47
|
-
Command Line:
|
|
48
|
-
|
|
49
|
-
> ${`node bin/install <os> <program> <host/path>`.yellow}
|
|
50
|
-
`,
|
|
51
|
-
ssl: `
|
|
52
|
-
---------------------------------------------------------------
|
|
53
|
-
${`SSL management`.white}
|
|
54
|
-
---------------------------------------------------------------
|
|
55
|
-
|
|
56
|
-
Arguments:
|
|
57
|
-
|
|
58
|
-
> [required] os: windows
|
|
59
|
-
> [required] hosts: example.com,www.example.com
|
|
60
|
-
|
|
61
|
-
Command Line:
|
|
62
|
-
|
|
63
|
-
> ${`node bin/ssl <os> <hosts>`.yellow}
|
|
64
|
-
`,
|
|
65
|
-
mariadb: `
|
|
66
|
-
---------------------------------------------------------------
|
|
67
|
-
${`DataBase management`.white}
|
|
68
|
-
---------------------------------------------------------------
|
|
69
|
-
|
|
70
|
-
Arguments:
|
|
71
|
-
|
|
72
|
-
> [required] operator: show | create | delete | import | export
|
|
73
|
-
> [required] host/path: example.com | example.com/path | www.example.com
|
|
74
|
-
|
|
75
|
-
Command Line:
|
|
76
|
-
|
|
77
|
-
> ${`node bin/db <host/path> <operator>`.yellow}
|
|
78
|
-
`,
|
|
79
|
-
shortcut: `
|
|
80
|
-
---------------------------------------------------------------
|
|
81
|
-
${`Shortcut Generator`.white}
|
|
82
|
-
---------------------------------------------------------------
|
|
83
|
-
|
|
84
|
-
Arguments:
|
|
85
|
-
|
|
86
|
-
> [required] os: windows | linux
|
|
87
|
-
> [required] env: development | production | test
|
|
88
|
-
|
|
89
|
-
Command Line:
|
|
90
|
-
|
|
91
|
-
> ${`node bin/shortcut <os> <env>`.yellow}
|
|
92
|
-
`,
|
|
93
|
-
end: '---------------------------------------------------------------',
|
|
94
|
-
};
|
|
95
|
-
|
|
96
|
-
logger.info('argv', process.argv);
|
|
97
|
-
|
|
98
|
-
const [exe, dir, sections] = process.argv;
|
|
99
|
-
|
|
100
|
-
try {
|
|
101
|
-
let out = '';
|
|
102
|
-
if (!sections) Object.keys(data).map((section) => (out += data[section]));
|
|
103
|
-
else {
|
|
104
|
-
for (const section of sections.split(',')) out += data[section];
|
|
105
|
-
out += data['end'];
|
|
106
|
-
}
|
|
107
|
-
logger.info(out);
|
|
108
|
-
} catch (error) {
|
|
109
|
-
logger.error(error, error.stack);
|
|
110
|
-
}
|