esa-cli 0.0.2-beta.19 → 0.0.2-beta.20
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/commands/commit/index.js +5 -0
- package/dist/commands/common/utils.js +84 -87
- package/dist/commands/deploy/index.js +1 -0
- package/dist/commands/deployments/delete.js +2 -2
- package/dist/commands/dev/ew2/devPack.js +9 -9
- package/dist/commands/dev/ew2/mock/kv.js +1 -1
- package/dist/commands/dev/ew2/server.js +1 -1
- package/dist/commands/dev/index.js +1 -1
- package/dist/commands/dev/mockWorker/devPack.js +3 -3
- package/dist/commands/init/helper.js +6 -7
- package/dist/commands/route/add.js +0 -1
- package/dist/commands/route/helper.js +9 -10
- package/dist/commands/utils.js +4 -4
- package/dist/components/filterSelector.js +1 -1
- package/dist/i18n/locales.json +2 -2
- package/dist/libs/apiService.js +3 -4
- package/dist/libs/logger.js +4 -4
- package/dist/utils/checkIsRoutineCreated.js +1 -1
- package/dist/utils/compress.js +5 -5
- package/dist/utils/download.js +2 -2
- package/dist/utils/fileUtils/index.js +33 -9
- package/package.json +1 -1
|
@@ -69,6 +69,11 @@ export function handleCommit(argv) {
|
|
|
69
69
|
}
|
|
70
70
|
logger.startSubStep('Generating code version');
|
|
71
71
|
const res = yield generateCodeVersion(projectName, description, argv === null || argv === void 0 ? void 0 : argv.entry, argv === null || argv === void 0 ? void 0 : argv.assets, argv === null || argv === void 0 ? void 0 : argv.minify);
|
|
72
|
+
const { isSuccess } = res || {};
|
|
73
|
+
if (!isSuccess) {
|
|
74
|
+
logger.endSubStep('Generate version failed');
|
|
75
|
+
exit(1);
|
|
76
|
+
}
|
|
72
77
|
const codeVersion = (_b = (_a = res === null || res === void 0 ? void 0 : res.res) === null || _a === void 0 ? void 0 : _a.data) === null || _b === void 0 ? void 0 : _b.CodeVersion;
|
|
73
78
|
if (!codeVersion) {
|
|
74
79
|
logger.endSubStep('Missing CodeVersion in response');
|
|
@@ -120,100 +120,97 @@ export function generateCodeVersion(projectName_1, description_1, entry_1, asset
|
|
|
120
120
|
return __awaiter(this, arguments, void 0, function* (projectName, description, entry, assets, minify = false, projectPath) {
|
|
121
121
|
var _a;
|
|
122
122
|
const { zip, sourceList, dynamicSources } = yield compress(entry, assets, minify, projectPath);
|
|
123
|
-
|
|
124
|
-
|
|
125
|
-
const
|
|
126
|
-
|
|
127
|
-
|
|
128
|
-
|
|
129
|
-
|
|
130
|
-
|
|
131
|
-
|
|
132
|
-
|
|
133
|
-
|
|
134
|
-
node.children.set(part, { children: new Map(), isFile: false });
|
|
135
|
-
}
|
|
136
|
-
const child = node.children.get(part);
|
|
137
|
-
if (i === parts.length - 1)
|
|
138
|
-
child.isFile = true;
|
|
139
|
-
node = child;
|
|
123
|
+
// Pretty print upload directory tree
|
|
124
|
+
const buildTree = (paths, decorateTopLevel) => {
|
|
125
|
+
const root = { children: new Map(), isFile: false };
|
|
126
|
+
const sorted = [...paths].sort((a, b) => a.localeCompare(b));
|
|
127
|
+
for (const p of sorted) {
|
|
128
|
+
const parts = p.split('/').filter(Boolean);
|
|
129
|
+
let node = root;
|
|
130
|
+
for (let i = 0; i < parts.length; i++) {
|
|
131
|
+
const part = parts[i];
|
|
132
|
+
if (!node.children.has(part)) {
|
|
133
|
+
node.children.set(part, { children: new Map(), isFile: false });
|
|
140
134
|
}
|
|
135
|
+
const child = node.children.get(part);
|
|
136
|
+
if (i === parts.length - 1)
|
|
137
|
+
child.isFile = true;
|
|
138
|
+
node = child;
|
|
141
139
|
}
|
|
142
|
-
const lines = [];
|
|
143
|
-
const render = (node, prefix, depth) => {
|
|
144
|
-
const entries = [...node.children.entries()];
|
|
145
|
-
entries.forEach(([_name, _child], idx) => {
|
|
146
|
-
const isLast = idx === entries.length - 1;
|
|
147
|
-
const connector = isLast ? '└ ' : '├ ';
|
|
148
|
-
const nextPrefix = prefix + (isLast ? ' ' : '│ ');
|
|
149
|
-
const displayName = depth === 0 ? decorateTopLevel(_name) : _name;
|
|
150
|
-
lines.push(prefix + connector + displayName);
|
|
151
|
-
render(_child, nextPrefix, depth + 1);
|
|
152
|
-
});
|
|
153
|
-
};
|
|
154
|
-
render(root, '', 0);
|
|
155
|
-
return lines.length ? lines : ['-'];
|
|
156
|
-
};
|
|
157
|
-
const header = chalk.hex('#22c55e')('UPLOAD') + ' Files to be uploaded (source paths)';
|
|
158
|
-
logger.block();
|
|
159
|
-
logger.log(header);
|
|
160
|
-
const dynamicSet = new Set(dynamicSources);
|
|
161
|
-
const LIMIT = 300;
|
|
162
|
-
const staticPaths = sourceList
|
|
163
|
-
.filter((p) => !dynamicSet.has(p))
|
|
164
|
-
.sort((a, b) => a.localeCompare(b));
|
|
165
|
-
const dynamicPaths = sourceList
|
|
166
|
-
.filter((p) => dynamicSet.has(p))
|
|
167
|
-
.sort((a, b) => a.localeCompare(b));
|
|
168
|
-
let omitted = 0;
|
|
169
|
-
let shownStatic = staticPaths;
|
|
170
|
-
if (staticPaths.length > LIMIT) {
|
|
171
|
-
shownStatic = staticPaths.slice(0, LIMIT);
|
|
172
|
-
omitted = staticPaths.length - LIMIT;
|
|
173
140
|
}
|
|
174
|
-
|
|
175
|
-
const
|
|
176
|
-
|
|
177
|
-
|
|
178
|
-
|
|
179
|
-
|
|
180
|
-
|
|
181
|
-
|
|
182
|
-
|
|
183
|
-
|
|
184
|
-
|
|
185
|
-
stat.hasStatic = true;
|
|
186
|
-
topLevelStats.set(top, stat);
|
|
141
|
+
const lines = [];
|
|
142
|
+
const render = (node, prefix, depth) => {
|
|
143
|
+
const entries = [...node.children.entries()];
|
|
144
|
+
entries.forEach(([_name, _child], idx) => {
|
|
145
|
+
const isLast = idx === entries.length - 1;
|
|
146
|
+
const connector = isLast ? '└ ' : '├ ';
|
|
147
|
+
const nextPrefix = prefix + (isLast ? ' ' : '│ ');
|
|
148
|
+
const displayName = depth === 0 ? decorateTopLevel(_name) : _name;
|
|
149
|
+
lines.push(prefix + connector + displayName);
|
|
150
|
+
render(_child, nextPrefix, depth + 1);
|
|
151
|
+
});
|
|
187
152
|
};
|
|
188
|
-
|
|
189
|
-
|
|
190
|
-
|
|
191
|
-
|
|
192
|
-
|
|
193
|
-
|
|
194
|
-
|
|
195
|
-
|
|
196
|
-
|
|
197
|
-
|
|
198
|
-
|
|
199
|
-
|
|
200
|
-
|
|
201
|
-
|
|
202
|
-
|
|
203
|
-
|
|
153
|
+
render(root, '', 0);
|
|
154
|
+
return lines.length ? lines : ['-'];
|
|
155
|
+
};
|
|
156
|
+
const header = chalk.hex('#22c55e')('UPLOAD') + ' Files to be uploaded (source paths)';
|
|
157
|
+
logger.block();
|
|
158
|
+
logger.log(header);
|
|
159
|
+
const dynamicSet = new Set(dynamicSources);
|
|
160
|
+
const LIMIT = 300;
|
|
161
|
+
const staticPaths = sourceList
|
|
162
|
+
.filter((p) => !dynamicSet.has(p))
|
|
163
|
+
.sort((a, b) => a.localeCompare(b));
|
|
164
|
+
const dynamicPaths = sourceList
|
|
165
|
+
.filter((p) => dynamicSet.has(p))
|
|
166
|
+
.sort((a, b) => a.localeCompare(b));
|
|
167
|
+
let omitted = 0;
|
|
168
|
+
let shownStatic = staticPaths;
|
|
169
|
+
if (staticPaths.length > LIMIT) {
|
|
170
|
+
shownStatic = staticPaths.slice(0, LIMIT);
|
|
171
|
+
omitted = staticPaths.length - LIMIT;
|
|
172
|
+
}
|
|
173
|
+
// Compute top-level markers based on whether a top-level bucket contains dynamic/static files
|
|
174
|
+
const topLevelStats = new Map();
|
|
175
|
+
const addStat = (p, isDynamic) => {
|
|
176
|
+
const top = p.split('/')[0] || p;
|
|
177
|
+
const stat = topLevelStats.get(top) || {
|
|
178
|
+
hasDynamic: false,
|
|
179
|
+
hasStatic: false
|
|
204
180
|
};
|
|
205
|
-
|
|
206
|
-
|
|
207
|
-
|
|
208
|
-
|
|
209
|
-
|
|
210
|
-
|
|
211
|
-
|
|
212
|
-
|
|
181
|
+
if (isDynamic)
|
|
182
|
+
stat.hasDynamic = true;
|
|
183
|
+
else
|
|
184
|
+
stat.hasStatic = true;
|
|
185
|
+
topLevelStats.set(top, stat);
|
|
186
|
+
};
|
|
187
|
+
dynamicPaths.forEach((p) => addStat(p, true));
|
|
188
|
+
shownStatic.forEach((p) => addStat(p, false));
|
|
189
|
+
const dynamicMarker = chalk.bold.yellowBright(' (dynamic)');
|
|
190
|
+
const staticMarker = chalk.bold.greenBright(' (static)');
|
|
191
|
+
const decorateTopLevel = (name) => {
|
|
192
|
+
const stat = topLevelStats.get(name);
|
|
193
|
+
if (!stat)
|
|
194
|
+
return name;
|
|
195
|
+
if (stat.hasDynamic && stat.hasStatic) {
|
|
196
|
+
return `${name}${dynamicMarker}${staticMarker}`;
|
|
213
197
|
}
|
|
214
|
-
|
|
198
|
+
if (stat.hasDynamic)
|
|
199
|
+
return `${name}${dynamicMarker}`;
|
|
200
|
+
if (stat.hasStatic)
|
|
201
|
+
return `${name}${staticMarker}`;
|
|
202
|
+
return name;
|
|
203
|
+
};
|
|
204
|
+
const combined = [...dynamicPaths, ...shownStatic];
|
|
205
|
+
const treeLines = buildTree(combined, decorateTopLevel);
|
|
206
|
+
for (const line of treeLines) {
|
|
207
|
+
logger.log(line);
|
|
215
208
|
}
|
|
216
|
-
|
|
209
|
+
if (omitted > 0) {
|
|
210
|
+
const note = chalk.gray(`Only show the first ${LIMIT} static files, omitted ${omitted} files`);
|
|
211
|
+
logger.log(note);
|
|
212
|
+
}
|
|
213
|
+
logger.block();
|
|
217
214
|
const projectConfig = getProjectConfig(projectPath);
|
|
218
215
|
const notFoundStrategy = normalizeNotFoundStrategy((_a = projectConfig === null || projectConfig === void 0 ? void 0 : projectConfig.assets) === null || _a === void 0 ? void 0 : _a.notFoundStrategy);
|
|
219
216
|
logger.startSubStep('Generating code version');
|
|
@@ -52,7 +52,7 @@ export function handleDeleteDeployments(argv) {
|
|
|
52
52
|
const isInteractive = argv.i;
|
|
53
53
|
if (isInteractive) {
|
|
54
54
|
const { allVersions, stagingVersions, productionVersions } = yield getRoutineCodeVersions(projectConfig.name);
|
|
55
|
-
//
|
|
55
|
+
// Show information about versions being deployed
|
|
56
56
|
if (stagingVersions.length > 0 || productionVersions.length > 0) {
|
|
57
57
|
logger.log(chalk.yellow('⚠️ Currently deploying versions:'));
|
|
58
58
|
if (stagingVersions.length > 0) {
|
|
@@ -64,7 +64,7 @@ export function handleDeleteDeployments(argv) {
|
|
|
64
64
|
logger.log('');
|
|
65
65
|
}
|
|
66
66
|
logger.log(t('delete_deployments_table_title').d(' Version ID Description'));
|
|
67
|
-
//
|
|
67
|
+
// Filter out versions being deployed
|
|
68
68
|
const selectList = allVersions
|
|
69
69
|
.filter((item) => {
|
|
70
70
|
var _a, _b;
|
|
@@ -16,7 +16,7 @@ import { getRoot, getDirName } from '../../../utils/fileUtils/base.js';
|
|
|
16
16
|
import { getDevConf } from '../../../utils/fileUtils/index.js';
|
|
17
17
|
import { EW2Path } from '../../../utils/installEw2.js';
|
|
18
18
|
import devBuild from '../build.js';
|
|
19
|
-
//
|
|
19
|
+
// Generate available Ew2 port
|
|
20
20
|
const generateEw2Port = () => __awaiter(void 0, void 0, void 0, function* () {
|
|
21
21
|
let ew2port = 3322;
|
|
22
22
|
let portAvailable = yield checkPort(ew2port);
|
|
@@ -60,7 +60,7 @@ conf_path = "${erConfPath}"
|
|
|
60
60
|
fs.promises.writeFile(erConfPath, erConf)
|
|
61
61
|
]);
|
|
62
62
|
};
|
|
63
|
-
//
|
|
63
|
+
// Generate entry file
|
|
64
64
|
const generateEntry = (id, projectEntry, userRoot, port) => __awaiter(void 0, void 0, void 0, function* () {
|
|
65
65
|
const __dirname = getDirName(import.meta.url);
|
|
66
66
|
const devDir = path.resolve(userRoot, '.dev');
|
|
@@ -85,20 +85,20 @@ const generateEntry = (id, projectEntry, userRoot, port) => __awaiter(void 0, vo
|
|
|
85
85
|
.replace(/'\$userPath'/g, `'${projectEntry.replace(/\\/g, '/')}'`)
|
|
86
86
|
.replace(/\$userPort/g, `${port}`));
|
|
87
87
|
});
|
|
88
|
-
//
|
|
88
|
+
// Preliminary preparation
|
|
89
89
|
const prepare = (configPath, entry, port, localUpstream, userRoot) => __awaiter(void 0, void 0, void 0, function* () {
|
|
90
90
|
const options = {};
|
|
91
91
|
const currentOptions = { entry, port, localUpstream };
|
|
92
|
-
//
|
|
92
|
+
// Support running multiple workers simultaneously
|
|
93
93
|
const id = new Date().getTime().toString();
|
|
94
94
|
// @ts-ignore
|
|
95
95
|
global.id = id;
|
|
96
|
-
//
|
|
96
|
+
// Generate entry file
|
|
97
97
|
yield generateEntry(id, entry, userRoot, port);
|
|
98
|
-
//
|
|
98
|
+
// Generate Ew2 configuration
|
|
99
99
|
const ew2port = yield generateEw2Port();
|
|
100
100
|
yield writeEw2config(id, ew2port, userRoot);
|
|
101
|
-
//
|
|
101
|
+
// Configuration items for each dev session, distinguished by id in one file
|
|
102
102
|
if (fs.existsSync(configPath)) {
|
|
103
103
|
const currentConfig = fs
|
|
104
104
|
.readFileSync(configPath, 'utf-8')
|
|
@@ -106,7 +106,7 @@ const prepare = (configPath, entry, port, localUpstream, userRoot) => __awaiter(
|
|
|
106
106
|
const currentConfigObj = JSON.parse(currentConfig);
|
|
107
107
|
const currentIds = Object.keys(currentConfigObj);
|
|
108
108
|
if (currentIds[0] && /^\d+$/.test(currentIds[0])) {
|
|
109
|
-
//
|
|
109
|
+
// Remove unused entries
|
|
110
110
|
for (let currentId of currentIds) {
|
|
111
111
|
const unused = yield checkPort(currentConfigObj[currentId].port);
|
|
112
112
|
if (unused) {
|
|
@@ -155,7 +155,7 @@ const devPack = () => __awaiter(void 0, void 0, void 0, function* () {
|
|
|
155
155
|
}
|
|
156
156
|
else {
|
|
157
157
|
logger.notInProject();
|
|
158
|
-
process.exit(
|
|
158
|
+
process.exit(1);
|
|
159
159
|
}
|
|
160
160
|
return prepare(path.resolve(userRoot, '.dev/devConfig.js'), projectEntry, port, localUpstream, userRoot)
|
|
161
161
|
.then(() => {
|
|
@@ -187,7 +187,7 @@ class Ew2Server {
|
|
|
187
187
|
agent: new HttpProxyAgent(`http://127.0.0.1:${ew2Port}`)
|
|
188
188
|
});
|
|
189
189
|
const workerHeaders = Object.fromEntries(workerRes.headers.entries());
|
|
190
|
-
//
|
|
190
|
+
// Solve gzip compatibility issue, prevent net::ERR_CONTENT_DECODING_FAILED
|
|
191
191
|
workerHeaders['content-encoding'] = 'identity';
|
|
192
192
|
if (workerRes.body) {
|
|
193
193
|
res.writeHead(workerRes.status, workerHeaders);
|
|
@@ -40,12 +40,12 @@ const generateEntry = (id, projectEntry, userRoot) => __awaiter(void 0, void 0,
|
|
|
40
40
|
const prepare = (configPath, entry, port, localUpstream, userRoot) => __awaiter(void 0, void 0, void 0, function* () {
|
|
41
41
|
const options = {};
|
|
42
42
|
const currentOptions = { entry, port, localUpstream };
|
|
43
|
-
//
|
|
43
|
+
// Support running multiple deno instances simultaneously
|
|
44
44
|
const id = new Date().getTime().toString();
|
|
45
45
|
// @ts-ignore
|
|
46
46
|
global.id = id;
|
|
47
47
|
yield generateEntry(id, entry, userRoot);
|
|
48
|
-
//
|
|
48
|
+
// Configuration items for each dev session, distinguished by id in one file
|
|
49
49
|
if (fs.existsSync(configPath)) {
|
|
50
50
|
const currentConfig = fs
|
|
51
51
|
.readFileSync(configPath, 'utf-8')
|
|
@@ -101,7 +101,7 @@ const devPack = () => __awaiter(void 0, void 0, void 0, function* () {
|
|
|
101
101
|
}
|
|
102
102
|
else {
|
|
103
103
|
logger.notInProject();
|
|
104
|
-
process.exit(
|
|
104
|
+
process.exit(1);
|
|
105
105
|
}
|
|
106
106
|
return prepare(path.resolve(userRoot, '.dev/devConfig.js'), projectEntry, port, localUpstream, userRoot)
|
|
107
107
|
.then(() => {
|
|
@@ -100,7 +100,7 @@ export function checkAndUpdatePackage(packageName) {
|
|
|
100
100
|
const spinner = logger.ora;
|
|
101
101
|
spinner.text = t('checking_template_update').d('Checking esa-template updates...');
|
|
102
102
|
spinner.start();
|
|
103
|
-
//
|
|
103
|
+
// Get currently installed version
|
|
104
104
|
const __dirname = getDirName(import.meta.url);
|
|
105
105
|
const packageJsonPath = path.join(__dirname, '../../../');
|
|
106
106
|
let versionInfo;
|
|
@@ -124,7 +124,7 @@ export function checkAndUpdatePackage(packageName) {
|
|
|
124
124
|
}
|
|
125
125
|
const match = versionInfo.match(new RegExp(`(${packageName})@([0-9.]+)`));
|
|
126
126
|
const currentVersion = match ? match[2] : '';
|
|
127
|
-
//
|
|
127
|
+
// Get latest version
|
|
128
128
|
const latestVersion = execSync(`npm view ${packageName} version`, {
|
|
129
129
|
cwd: packageJsonPath
|
|
130
130
|
})
|
|
@@ -173,7 +173,7 @@ export function checkAndUpdatePackage(packageName) {
|
|
|
173
173
|
});
|
|
174
174
|
}
|
|
175
175
|
export const getFrameworkConfig = (framework) => {
|
|
176
|
-
//
|
|
176
|
+
// Read template.jsonc from init directory
|
|
177
177
|
const templatePath = path.join(getDirName(import.meta.url), 'template.jsonc');
|
|
178
178
|
const jsonc = fs.readFileSync(templatePath, 'utf-8');
|
|
179
179
|
const json = JSON.parse(jsonc);
|
|
@@ -184,7 +184,7 @@ export const getFrameworkConfig = (framework) => {
|
|
|
184
184
|
* @returns 框架全部配置
|
|
185
185
|
*/
|
|
186
186
|
export const getAllFrameworkConfig = () => {
|
|
187
|
-
//
|
|
187
|
+
// Read template.jsonc from init directory
|
|
188
188
|
const templatePath = path.join(getDirName(import.meta.url), 'template.jsonc');
|
|
189
189
|
const jsonc = fs.readFileSync(templatePath, 'utf-8');
|
|
190
190
|
const json = JSON.parse(jsonc);
|
|
@@ -231,7 +231,7 @@ export function getInitParamsFromArgv(argv) {
|
|
|
231
231
|
params.deploy = Boolean(a.deploy);
|
|
232
232
|
return params;
|
|
233
233
|
}
|
|
234
|
-
//
|
|
234
|
+
// Configure project name
|
|
235
235
|
export const configProjectName = (initParams) => __awaiter(void 0, void 0, void 0, function* () {
|
|
236
236
|
if (initParams.name) {
|
|
237
237
|
log.step(`Project name configured ${initParams.name}`);
|
|
@@ -608,7 +608,7 @@ export function getGitVersion() {
|
|
|
608
608
|
}
|
|
609
609
|
export function isGitInstalled() {
|
|
610
610
|
return __awaiter(this, void 0, void 0, function* () {
|
|
611
|
-
return (yield getGitVersion()) !== '';
|
|
611
|
+
return (yield getGitVersion()) !== '' && (yield getGitVersion()) !== null;
|
|
612
612
|
});
|
|
613
613
|
}
|
|
614
614
|
/**
|
|
@@ -684,7 +684,6 @@ function ensureGitignore(projectRoot, assetsDirectory) {
|
|
|
684
684
|
? `${existingContent.replace(/\n$/, '')}\n${toAppend.join('\n')}\n`
|
|
685
685
|
: `${toAppend.join('\n')}\n`;
|
|
686
686
|
fs.writeFileSync(gitignorePath, newContent, 'utf-8');
|
|
687
|
-
logger.log('Updated .gitignore');
|
|
688
687
|
}
|
|
689
688
|
catch (_a) {
|
|
690
689
|
// Do not fail init due to .gitignore issues
|
|
@@ -72,8 +72,7 @@ export const transferRouteToRuleString = (routePath) => {
|
|
|
72
72
|
export const transferRuleStringToRoute = (ruleStr) => {
|
|
73
73
|
if (!ruleStr) {
|
|
74
74
|
return '';
|
|
75
|
-
}
|
|
76
|
-
// 去掉外层括号并按 " and " 分割
|
|
75
|
+
} // Remove outer brackets and split by " and "
|
|
77
76
|
const cleanedRule = ruleStr.replace(/^\(|\)$/g, '');
|
|
78
77
|
const parts = cleanedRule.split(' and ');
|
|
79
78
|
if (parts.length !== 2) {
|
|
@@ -81,36 +80,36 @@ export const transferRuleStringToRoute = (ruleStr) => {
|
|
|
81
80
|
}
|
|
82
81
|
let host = '';
|
|
83
82
|
let uriPath = '';
|
|
84
|
-
//
|
|
83
|
+
// Process host part
|
|
85
84
|
const hostPart = parts[0].trim();
|
|
86
85
|
if (hostPart.startsWith(`${RuleMatchOperatorEndsWith}(${RuleMatchTypeHost},`)) {
|
|
87
|
-
// host
|
|
86
|
+
// Logic when host matches eq
|
|
88
87
|
// ends_with(http.host, "value")
|
|
89
88
|
const match = hostPart.match(/ends_with\(http\.host,\s*"([^"]+)"\)/);
|
|
90
89
|
if (match) {
|
|
91
|
-
host = `*${match[1]}`; //
|
|
90
|
+
host = `*${match[1]}`; // Add prefix *
|
|
92
91
|
}
|
|
93
92
|
}
|
|
94
93
|
else if (hostPart.startsWith(`${RuleMatchTypeHost} ${RuleMatchOperatorEq}`)) {
|
|
95
|
-
// host
|
|
94
|
+
// Logic when host matches eq
|
|
96
95
|
// http.host eq "value"
|
|
97
96
|
const match = hostPart.match(/http\.host eq "([^"]+)"/);
|
|
98
97
|
if (match) {
|
|
99
98
|
host = match[1];
|
|
100
99
|
}
|
|
101
100
|
}
|
|
102
|
-
//
|
|
101
|
+
// Process uriPath part
|
|
103
102
|
const uriPathPart = parts[1].trim();
|
|
104
103
|
if (uriPathPart.startsWith(`${RuleMatchOperatorStartsWith}(${RuleMatchTypeUriPath},`)) {
|
|
105
|
-
// uriPath
|
|
104
|
+
// Logic when uriPath matches startsWith
|
|
106
105
|
// starts_with(http.request.uri.path, "value")
|
|
107
106
|
const match = uriPathPart.match(/starts_with\(http\.request\.uri\.path,\s*"([^"]+)"\)/);
|
|
108
107
|
if (match) {
|
|
109
|
-
uriPath = `${match[1]}*`; //
|
|
108
|
+
uriPath = `${match[1]}*`; // Add suffix *
|
|
110
109
|
}
|
|
111
110
|
}
|
|
112
111
|
else if (uriPathPart.startsWith(`${RuleMatchTypeUriPath} ${RuleMatchOperatorEq}`)) {
|
|
113
|
-
// uriPath
|
|
112
|
+
// Logic when uriPath matches eq
|
|
114
113
|
// http.request.uri.path eq "value"
|
|
115
114
|
const match = uriPathPart.match(/http\.request\.uri\.path eq "([^"]+)"/);
|
|
116
115
|
if (match) {
|
package/dist/commands/utils.js
CHANGED
|
@@ -71,7 +71,7 @@ export const bindRoutineWithDomain = (name, domain) => __awaiter(void 0, void 0,
|
|
|
71
71
|
export function validName(name) {
|
|
72
72
|
return /^[a-zA-Z0-9-_]+$/.test(name);
|
|
73
73
|
}
|
|
74
|
-
//
|
|
74
|
+
// Validate if domain is valid
|
|
75
75
|
export function validDomain(domain) {
|
|
76
76
|
return /^(?:[a-z0-9-](?:[a-zA-Z0-9-]{0,61}[a-zA-Z0-9])?\.)+[a-zA-Z]{2,}$/.test(domain);
|
|
77
77
|
}
|
|
@@ -131,12 +131,12 @@ export function validateLoginCredentials(accessKeyId_1, accessKeySecret_1, endpo
|
|
|
131
131
|
});
|
|
132
132
|
}
|
|
133
133
|
export function isValidRouteForDomain(route, domain) {
|
|
134
|
-
//
|
|
135
|
-
//
|
|
134
|
+
// Build a regex that allows subdomains and arbitrary paths
|
|
135
|
+
// For example, match URLs like *.example.com/*
|
|
136
136
|
return route.includes(domain);
|
|
137
137
|
}
|
|
138
138
|
export function escapeRegExp(string) {
|
|
139
|
-
return string.replace(/[.*+?^${}()|[\]\\]/g, '\\$&'); // $&
|
|
139
|
+
return string.replace(/[.*+?^${}()|[\]\\]/g, '\\$&'); // $& represents the entire matched string
|
|
140
140
|
}
|
|
141
141
|
export const getAllSites = () => __awaiter(void 0, void 0, void 0, function* () {
|
|
142
142
|
var _a;
|
|
@@ -51,7 +51,7 @@ export const FilterSelector = ({ data, onSubmit, hideCount = 20 }) => {
|
|
|
51
51
|
else if (tabPressCount === 1) {
|
|
52
52
|
const filteredDataInner = data.filter((site) => site.label.includes(inputValue));
|
|
53
53
|
setFilteredData(filteredDataInner);
|
|
54
|
-
//
|
|
54
|
+
// Enter selection mode when match results >= 1
|
|
55
55
|
if ((filteredDataInner.length >= 1 &&
|
|
56
56
|
showAll &&
|
|
57
57
|
filteredDataInner.length > hideCount) ||
|
package/dist/i18n/locales.json
CHANGED
|
@@ -204,8 +204,8 @@
|
|
|
204
204
|
"zh_CN": "读取 CLI 配置错误"
|
|
205
205
|
},
|
|
206
206
|
"dev_error_no_project_config": {
|
|
207
|
-
"en": "Error reading project configuration esa.toml file.",
|
|
208
|
-
"zh_CN": "读取项目配置 esa.toml 文件错误"
|
|
207
|
+
"en": "Error reading project configuration esa.jsonc (recommended) or esa.toml file.",
|
|
208
|
+
"zh_CN": "读取项目配置 esa.jsonc (推荐) 或 esa.toml 文件错误"
|
|
209
209
|
},
|
|
210
210
|
"dev_build_start": {
|
|
211
211
|
"en": "Starting build process",
|
package/dist/libs/apiService.js
CHANGED
|
@@ -90,7 +90,6 @@ export class ApiService {
|
|
|
90
90
|
}
|
|
91
91
|
}
|
|
92
92
|
catch (error) {
|
|
93
|
-
console.log('error', error);
|
|
94
93
|
return {
|
|
95
94
|
success: false,
|
|
96
95
|
message: t('login_failed').d('An error occurred while trying to log in.')
|
|
@@ -101,15 +100,15 @@ export class ApiService {
|
|
|
101
100
|
quickDeployRoutine(edgeRoutine) {
|
|
102
101
|
return __awaiter(this, void 0, void 0, function* () {
|
|
103
102
|
try {
|
|
104
|
-
//
|
|
103
|
+
// Upload code to unstable version
|
|
105
104
|
const stagingRes = yield this.getRoutineStagingCodeUploadInfo(edgeRoutine);
|
|
106
105
|
if (stagingRes) {
|
|
107
|
-
//
|
|
106
|
+
// Production version
|
|
108
107
|
const commitRes = yield this.commitRoutineStagingCode({
|
|
109
108
|
Name: edgeRoutine.name,
|
|
110
109
|
CodeDescription: edgeRoutine.description
|
|
111
110
|
});
|
|
112
|
-
//
|
|
111
|
+
// Deploy to production environment
|
|
113
112
|
if (commitRes) {
|
|
114
113
|
const deployRes = yield this.publishRoutineCodeVersion({
|
|
115
114
|
Name: edgeRoutine.name,
|
package/dist/libs/logger.js
CHANGED
|
@@ -180,7 +180,7 @@ class Logger {
|
|
|
180
180
|
this.block();
|
|
181
181
|
this.log('If there is code to deploy, you can either:');
|
|
182
182
|
this.subLog(`- Specify an entry-point to your Routine via the command line (ex: ${chalk.green('esa deploy src/index.ts')})`);
|
|
183
|
-
this.subLog('- Or
|
|
183
|
+
this.subLog('- Or create an "esa.jsonc" file (recommended):');
|
|
184
184
|
console.log('```jsonc\n' +
|
|
185
185
|
'{\n' +
|
|
186
186
|
' "name": "my-routine",\n' +
|
|
@@ -188,7 +188,7 @@ class Logger {
|
|
|
188
188
|
' "dev": { "port": 18080 }\n' +
|
|
189
189
|
'}\n' +
|
|
190
190
|
'```');
|
|
191
|
-
this.subLog('- Or, if you prefer TOML,
|
|
191
|
+
this.subLog('- Or, if you prefer TOML, create an "esa.toml" file:');
|
|
192
192
|
console.log('```toml\n' +
|
|
193
193
|
'name = "my-routine"\n' +
|
|
194
194
|
'entry = "src/index.ts"\n' +
|
|
@@ -197,7 +197,7 @@ class Logger {
|
|
|
197
197
|
'port = 18080\n' +
|
|
198
198
|
'```\n');
|
|
199
199
|
this.log('If you are deploying a directory of static assets, you can either:');
|
|
200
|
-
this.subLog(`-
|
|
200
|
+
this.subLog(`- Create an "esa.jsonc" file (recommended) and run ${chalk.green('esa deploy -a ./dist')}`);
|
|
201
201
|
console.log('```jsonc\n' +
|
|
202
202
|
'{\n' +
|
|
203
203
|
' "name": "my-routine",\n' +
|
|
@@ -206,7 +206,7 @@ class Logger {
|
|
|
206
206
|
' }\n' +
|
|
207
207
|
'}\n' +
|
|
208
208
|
'```');
|
|
209
|
-
this.subLog(`- Or
|
|
209
|
+
this.subLog(`- Or create an "esa.toml" file and run ${chalk.green('esa deploy -a ./dist')}`);
|
|
210
210
|
console.log('```toml\n' +
|
|
211
211
|
'name = "my-routine"\n' +
|
|
212
212
|
'\n' +
|
|
@@ -26,7 +26,7 @@ export function validRoutine(name) {
|
|
|
26
26
|
const isCreatedRoutine = yield isRoutineExist(name);
|
|
27
27
|
if (!isCreatedRoutine) {
|
|
28
28
|
logger.warn(`${t('routine_not_exist').d('Routine does not exist, please create a new one. Run command:')} ${chalk.greenBright('esa deploy')}`);
|
|
29
|
-
exit(
|
|
29
|
+
exit(1);
|
|
30
30
|
}
|
|
31
31
|
});
|
|
32
32
|
}
|
package/dist/utils/compress.js
CHANGED
|
@@ -43,10 +43,10 @@ const compress = (scriptEntry_1, assetsDir_1, ...args_1) => __awaiter(void 0, [s
|
|
|
43
43
|
let assetsDirectory = assetsDir || ((_a = projectConfig === null || projectConfig === void 0 ? void 0 : projectConfig.assets) === null || _a === void 0 ? void 0 : _a.directory);
|
|
44
44
|
const routineType = checkEdgeRoutineType(scriptEntry, assetsDir, projectPath);
|
|
45
45
|
if (!projectConfig && !scriptEntry && !assetsDir) {
|
|
46
|
-
logger.error('esa.jsonc or esa.toml is not found and script entry or assets directory is not provided by command line');
|
|
47
|
-
exit(
|
|
46
|
+
logger.error('esa.jsonc (recommended) or esa.toml is not found and script entry or assets directory is not provided by command line');
|
|
47
|
+
exit(1);
|
|
48
48
|
}
|
|
49
|
-
//
|
|
49
|
+
// Parameter priority: use parameters if available, otherwise use values from config file
|
|
50
50
|
const entry = scriptEntry || (projectConfig === null || projectConfig === void 0 ? void 0 : projectConfig.entry);
|
|
51
51
|
if (routineType === EDGE_ROUTINE_TYPE.NOT_EXIST) {
|
|
52
52
|
const errorMessage = [
|
|
@@ -61,7 +61,7 @@ const compress = (scriptEntry_1, assetsDir_1, ...args_1) => __awaiter(void 0, [s
|
|
|
61
61
|
chalk.cyan('🔍 Possible issue causes:'),
|
|
62
62
|
chalk.white(' 1. Entry file path is incorrect or file does not exist'),
|
|
63
63
|
chalk.white(' 2. Assets directory path is incorrect or directory does not exist'),
|
|
64
|
-
chalk.white(` 3. Project configuration file ${chalk.yellow('esa.jsonc')} or ${chalk.yellow('esa.toml')} format error`),
|
|
64
|
+
chalk.white(` 3. Project configuration file ${chalk.yellow('esa.jsonc')} (recommended) or ${chalk.yellow('esa.toml')} format error`),
|
|
65
65
|
chalk.white(` 4. Relative path format error, please use ${chalk.yellow('./xxx')} format`),
|
|
66
66
|
'',
|
|
67
67
|
chalk.yellow.bold(`📍 Please check if the following ${chalk.red('absolute paths')} are correct:`),
|
|
@@ -83,7 +83,7 @@ const compress = (scriptEntry_1, assetsDir_1, ...args_1) => __awaiter(void 0, [s
|
|
|
83
83
|
''
|
|
84
84
|
].join('\n');
|
|
85
85
|
logger.error(errorMessage);
|
|
86
|
-
exit(
|
|
86
|
+
exit(1);
|
|
87
87
|
}
|
|
88
88
|
if (routineType === EDGE_ROUTINE_TYPE.JS_ONLY ||
|
|
89
89
|
routineType === EDGE_ROUTINE_TYPE.JS_AND_ASSETS) {
|
package/dist/utils/download.js
CHANGED
|
@@ -92,8 +92,8 @@ function isBinDirInPath(binDir) {
|
|
|
92
92
|
*/
|
|
93
93
|
function addBinDirToPath(binDir) {
|
|
94
94
|
return __awaiter(this, void 0, void 0, function* () {
|
|
95
|
-
//
|
|
96
|
-
// setx
|
|
95
|
+
// Use setx to add to PATH
|
|
96
|
+
// setx has a 2047 character limit for PATH
|
|
97
97
|
const command = `setx Path "%Path%;${binDir}"`;
|
|
98
98
|
try {
|
|
99
99
|
yield execAsync(command);
|
|
@@ -17,7 +17,19 @@ import { getDirName, getRoot } from './base.js';
|
|
|
17
17
|
const projectConfigFile = 'esa.toml';
|
|
18
18
|
const __dirname = getDirName(import.meta.url);
|
|
19
19
|
const root = getRoot();
|
|
20
|
-
|
|
20
|
+
// Function to get the actual project config file path (supports both .jsonc and .toml)
|
|
21
|
+
export const getProjectConfigPath = (filePath = root) => {
|
|
22
|
+
const configFormats = ['esa.jsonc', 'esa.toml'];
|
|
23
|
+
for (const format of configFormats) {
|
|
24
|
+
const configPath = path.join(filePath, format);
|
|
25
|
+
if (fs.existsSync(configPath)) {
|
|
26
|
+
return configPath;
|
|
27
|
+
}
|
|
28
|
+
}
|
|
29
|
+
// Default to .jsonc if no config file exists
|
|
30
|
+
return path.join(filePath, 'esa.jsonc');
|
|
31
|
+
};
|
|
32
|
+
export const projectConfigPath = getProjectConfigPath();
|
|
21
33
|
export const cliConfigPath = path.join(os.homedir(), '.esa/config/default.toml');
|
|
22
34
|
// Function to get the actual config file path (supports both .toml and .jsonc)
|
|
23
35
|
export const getCliConfigPath = () => {
|
|
@@ -39,7 +51,7 @@ export const generateHiddenConfigDir = () => {
|
|
|
39
51
|
export const generateToml = (path) => {
|
|
40
52
|
if (!fs.existsSync(path)) {
|
|
41
53
|
fs.writeFileSync(path, '', 'utf-8');
|
|
42
|
-
//
|
|
54
|
+
// Add default endpoint
|
|
43
55
|
const defaultConfig = {
|
|
44
56
|
endpoint: 'esa.cn-hangzhou.aliyuncs.com'
|
|
45
57
|
};
|
|
@@ -53,16 +65,29 @@ export const generateDefaultConfig = () => {
|
|
|
53
65
|
};
|
|
54
66
|
export function updateProjectConfigFile(configUpdate_1) {
|
|
55
67
|
return __awaiter(this, arguments, void 0, function* (configUpdate, filePath = root) {
|
|
56
|
-
const configPath =
|
|
68
|
+
const configPath = getProjectConfigPath(filePath);
|
|
57
69
|
try {
|
|
58
70
|
let configFileContent = yield fsPromises.readFile(configPath, 'utf8');
|
|
59
|
-
let config
|
|
60
|
-
|
|
61
|
-
|
|
71
|
+
let config;
|
|
72
|
+
let updatedConfigString;
|
|
73
|
+
// Detect file format based on file extension
|
|
74
|
+
if (configPath.endsWith('.jsonc') || configPath.endsWith('.json')) {
|
|
75
|
+
// Handle JSONC format
|
|
76
|
+
const jsonContent = configFileContent.replace(/\/\*[\s\S]*?\*\/|\/\/.*$/gm, '');
|
|
77
|
+
config = JSON.parse(jsonContent);
|
|
78
|
+
config = Object.assign(Object.assign({}, config), configUpdate);
|
|
79
|
+
updatedConfigString = JSON.stringify(config, null, 2) + '\n';
|
|
80
|
+
}
|
|
81
|
+
else {
|
|
82
|
+
// Handle TOML format (default)
|
|
83
|
+
config = toml.parse(configFileContent);
|
|
84
|
+
config = Object.assign(Object.assign({}, config), configUpdate);
|
|
85
|
+
updatedConfigString = toml.stringify(config);
|
|
86
|
+
}
|
|
62
87
|
yield fsPromises.writeFile(configPath, updatedConfigString);
|
|
63
88
|
}
|
|
64
89
|
catch (error) {
|
|
65
|
-
logger.error(`Error updating
|
|
90
|
+
logger.error(`Error updating config file: ${error}`);
|
|
66
91
|
logger.pathEacces(__dirname);
|
|
67
92
|
}
|
|
68
93
|
});
|
|
@@ -114,7 +139,6 @@ export function readConfigFile(configPath) {
|
|
|
114
139
|
}
|
|
115
140
|
}
|
|
116
141
|
catch (error) {
|
|
117
|
-
console.log(error);
|
|
118
142
|
logger.error(`Error parsing config file: ${error}`);
|
|
119
143
|
return null;
|
|
120
144
|
}
|
|
@@ -160,7 +184,7 @@ export function getConfigurations() {
|
|
|
160
184
|
}
|
|
161
185
|
}
|
|
162
186
|
export function generateConfigFile(projectName_1, initConfigs_1, targetDir_1) {
|
|
163
|
-
return __awaiter(this, arguments, void 0, function* (projectName, initConfigs, targetDir, configFormat = '
|
|
187
|
+
return __awaiter(this, arguments, void 0, function* (projectName, initConfigs, targetDir, configFormat = 'jsonc', notFoundStrategy) {
|
|
164
188
|
var _a, _b;
|
|
165
189
|
const outputDir = targetDir !== null && targetDir !== void 0 ? targetDir : process.cwd();
|
|
166
190
|
const currentDirName = path.basename(outputDir);
|