@dainprotocol/cli 1.2.28 → 1.2.30
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/__tests__/build.test.js +289 -0
- package/dist/__tests__/deploy.test.js +10 -27
- package/dist/__tests__/dev.test.js +189 -0
- package/dist/__tests__/init.test.js +290 -0
- package/dist/__tests__/integration.test.js +5 -22
- package/dist/__tests__/testchat.test.js +214 -0
- package/dist/__tests__/utils.test.js +324 -0
- package/dist/commands/build.js +28 -61
- package/dist/commands/deploy.js +91 -156
- package/dist/commands/dev.js +22 -83
- package/dist/commands/init.js +2 -9
- package/dist/commands/logs.js +46 -111
- package/dist/commands/start.js +15 -4
- package/dist/commands/status.js +25 -62
- package/dist/commands/undeploy.js +23 -64
- package/dist/index.js +0 -7
- package/dist/templates/default/dain.json +1 -1
- package/dist/utils.js +112 -37
- package/package.json +1 -1
package/dist/commands/deploy.js
CHANGED
|
@@ -1,15 +1,4 @@
|
|
|
1
1
|
"use strict";
|
|
2
|
-
var __assign = (this && this.__assign) || function () {
|
|
3
|
-
__assign = Object.assign || function(t) {
|
|
4
|
-
for (var s, i = 1, n = arguments.length; i < n; i++) {
|
|
5
|
-
s = arguments[i];
|
|
6
|
-
for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p))
|
|
7
|
-
t[p] = s[p];
|
|
8
|
-
}
|
|
9
|
-
return t;
|
|
10
|
-
};
|
|
11
|
-
return __assign.apply(this, arguments);
|
|
12
|
-
};
|
|
13
2
|
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
|
14
3
|
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
|
15
4
|
return new (P || (P = Promise))(function (resolve, reject) {
|
|
@@ -58,31 +47,7 @@ var path_1 = __importDefault(require("path"));
|
|
|
58
47
|
var archiver_1 = __importDefault(require("archiver"));
|
|
59
48
|
var build_1 = __importDefault(require("./build"));
|
|
60
49
|
var START_DEPLOY_URI = "/api/app/data/deployments/start-deploy";
|
|
61
|
-
var
|
|
62
|
-
function fetchWithTimeout(url_1, options_1) {
|
|
63
|
-
return __awaiter(this, arguments, void 0, function (url, options, timeoutMs) {
|
|
64
|
-
var controller, timeoutId, response;
|
|
65
|
-
if (timeoutMs === void 0) { timeoutMs = FETCH_TIMEOUT_MS; }
|
|
66
|
-
return __generator(this, function (_a) {
|
|
67
|
-
switch (_a.label) {
|
|
68
|
-
case 0:
|
|
69
|
-
controller = new AbortController();
|
|
70
|
-
timeoutId = setTimeout(function () { return controller.abort(); }, timeoutMs);
|
|
71
|
-
_a.label = 1;
|
|
72
|
-
case 1:
|
|
73
|
-
_a.trys.push([1, , 3, 4]);
|
|
74
|
-
return [4 /*yield*/, fetch(url, __assign(__assign({}, options), { signal: controller.signal }))];
|
|
75
|
-
case 2:
|
|
76
|
-
response = _a.sent();
|
|
77
|
-
return [2 /*return*/, response];
|
|
78
|
-
case 3:
|
|
79
|
-
clearTimeout(timeoutId);
|
|
80
|
-
return [7 /*endfinally*/];
|
|
81
|
-
case 4: return [2 /*return*/];
|
|
82
|
-
}
|
|
83
|
-
});
|
|
84
|
-
});
|
|
85
|
-
}
|
|
50
|
+
var DEPLOY_TIMEOUT_MS = 120000;
|
|
86
51
|
function deploy(options) {
|
|
87
52
|
return __awaiter(this, void 0, void 0, function () {
|
|
88
53
|
var config, spinner, basePath, deployPath, isProduction, environmentName, _a, repoName, branchName, projectId, _b, result, response, envArray, _c, error_1;
|
|
@@ -91,8 +56,8 @@ function deploy(options) {
|
|
|
91
56
|
case 0:
|
|
92
57
|
config = (0, utils_1.getDainConfig)(options.config);
|
|
93
58
|
spinner = (0, ora_1.default)("Deploying project...").start();
|
|
94
|
-
basePath = config["api-base-url"] ||
|
|
95
|
-
deployPath =
|
|
59
|
+
basePath = config["api-base-url"] || utils_1.DEFAULT_API_BASE_URL;
|
|
60
|
+
deployPath = (0, utils_1.joinUrl)(basePath, START_DEPLOY_URI);
|
|
96
61
|
isProduction = config["environment"] === "production";
|
|
97
62
|
environmentName = config["environment"] || "production";
|
|
98
63
|
_a = checkGitConfig(), repoName = _a.repoName, branchName = _a.branchName;
|
|
@@ -114,28 +79,21 @@ function deploy(options) {
|
|
|
114
79
|
case 4:
|
|
115
80
|
_d.trys.push([4, 18, , 19]);
|
|
116
81
|
if (!(projectId && repoName && branchName)) return [3 /*break*/, 7];
|
|
117
|
-
// Deploy project using github repository
|
|
118
82
|
spinner.info("Deploying project using github repository...").start();
|
|
119
|
-
return [4 /*yield*/, fetchWithTimeout(
|
|
83
|
+
return [4 /*yield*/, (0, utils_1.fetchWithTimeout)((0, utils_1.joinUrl)(basePath, "/api/app/codegen/data/projects/".concat(projectId, "/start-deploy")), {
|
|
120
84
|
method: "POST",
|
|
121
85
|
headers: { "Content-Type": "application/json" },
|
|
122
|
-
body: JSON.stringify({
|
|
123
|
-
|
|
124
|
-
isProduction: isProduction,
|
|
125
|
-
branch: branchName,
|
|
126
|
-
}),
|
|
127
|
-
})];
|
|
86
|
+
body: JSON.stringify({ environment: environmentName, isProduction: isProduction, branch: branchName }),
|
|
87
|
+
}, DEPLOY_TIMEOUT_MS)];
|
|
128
88
|
case 5:
|
|
129
89
|
response = _d.sent();
|
|
130
|
-
if (!response.ok)
|
|
90
|
+
if (!response.ok)
|
|
131
91
|
throw new Error("Deploy failed: ".concat(response.status, " ").concat(response.statusText));
|
|
132
|
-
}
|
|
133
92
|
return [4 /*yield*/, response.json()];
|
|
134
93
|
case 6:
|
|
135
94
|
result = _d.sent();
|
|
136
95
|
return [3 /*break*/, 14];
|
|
137
96
|
case 7:
|
|
138
|
-
// Deploy project using local files
|
|
139
97
|
spinner.info("Deploying project using local files...").start();
|
|
140
98
|
return [4 /*yield*/, loadAndValidateEnvVariables()];
|
|
141
99
|
case 8:
|
|
@@ -149,7 +107,7 @@ function deploy(options) {
|
|
|
149
107
|
_d.label = 11;
|
|
150
108
|
case 11:
|
|
151
109
|
_d.trys.push([11, 13, , 14]);
|
|
152
|
-
return [4 /*yield*/, fetchWithTimeout(result.service.url, { method: "GET" }, 10000)];
|
|
110
|
+
return [4 /*yield*/, (0, utils_1.fetchWithTimeout)(result.service.url, { method: "GET" }, 10000)];
|
|
153
111
|
case 12:
|
|
154
112
|
_d.sent();
|
|
155
113
|
return [3 /*break*/, 14];
|
|
@@ -180,7 +138,6 @@ function deploy(options) {
|
|
|
180
138
|
});
|
|
181
139
|
});
|
|
182
140
|
}
|
|
183
|
-
// Loads and validates environment variables
|
|
184
141
|
function loadAndValidateEnvVariables() {
|
|
185
142
|
return __awaiter(this, void 0, void 0, function () {
|
|
186
143
|
var envArray;
|
|
@@ -202,7 +159,6 @@ function loadAndValidateEnvVariables() {
|
|
|
202
159
|
});
|
|
203
160
|
});
|
|
204
161
|
}
|
|
205
|
-
// Deploys and pushes files to the platform
|
|
206
162
|
function deployAndPushFiles(startDeployUrl, envArray, isProduction, environmentName) {
|
|
207
163
|
return __awaiter(this, void 0, void 0, function () {
|
|
208
164
|
var projectZip, projectZipBuffer, formData, response, errorText;
|
|
@@ -211,151 +167,130 @@ function deployAndPushFiles(startDeployUrl, envArray, isProduction, environmentN
|
|
|
211
167
|
case 0: return [4 /*yield*/, zipDirectory("./", "project.zip")];
|
|
212
168
|
case 1:
|
|
213
169
|
projectZip = _a.sent();
|
|
214
|
-
|
|
170
|
+
_a.label = 2;
|
|
215
171
|
case 2:
|
|
172
|
+
_a.trys.push([2, , 7, 9]);
|
|
173
|
+
return [4 /*yield*/, fs_extra_1.default.readFile(projectZip)];
|
|
174
|
+
case 3:
|
|
216
175
|
projectZipBuffer = _a.sent();
|
|
217
176
|
formData = new FormData();
|
|
218
177
|
formData.append("array", JSON.stringify(envArray));
|
|
219
178
|
formData.append("file", new Blob([new Uint8Array(projectZipBuffer)]), "project.zip");
|
|
220
179
|
formData.append("isProduction", isProduction.toString());
|
|
221
180
|
formData.append("environment", environmentName);
|
|
222
|
-
return [4 /*yield*/, fetchWithTimeout(startDeployUrl, {
|
|
223
|
-
|
|
224
|
-
body: formData,
|
|
225
|
-
})];
|
|
226
|
-
case 3:
|
|
181
|
+
return [4 /*yield*/, (0, utils_1.fetchWithTimeout)(startDeployUrl, { method: "POST", body: formData }, DEPLOY_TIMEOUT_MS)];
|
|
182
|
+
case 4:
|
|
227
183
|
response = _a.sent();
|
|
228
|
-
if (!!response.ok) return [3 /*break*/,
|
|
184
|
+
if (!!response.ok) return [3 /*break*/, 6];
|
|
229
185
|
return [4 /*yield*/, response.text()];
|
|
230
|
-
case
|
|
186
|
+
case 5:
|
|
231
187
|
errorText = _a.sent();
|
|
232
188
|
throw new Error("Deployment failed: ".concat(response.status, " ").concat(response.statusText, ". Response: ").concat(errorText));
|
|
233
|
-
case
|
|
189
|
+
case 6: return [2 /*return*/, response.json()];
|
|
190
|
+
case 7: return [4 /*yield*/, fs_extra_1.default.remove(projectZip).catch(function () { return undefined; })];
|
|
191
|
+
case 8:
|
|
192
|
+
_a.sent();
|
|
193
|
+
return [7 /*endfinally*/];
|
|
194
|
+
case 9: return [2 /*return*/];
|
|
234
195
|
}
|
|
235
196
|
});
|
|
236
197
|
});
|
|
237
198
|
}
|
|
238
|
-
// Reads the .env file and returns an array of key-value pairs
|
|
239
199
|
function loadEnvVariables() {
|
|
240
200
|
return __awaiter(this, void 0, void 0, function () {
|
|
241
|
-
var files, envFile,
|
|
242
|
-
return __generator(this, function (
|
|
243
|
-
switch (
|
|
201
|
+
var files, envFile, _a;
|
|
202
|
+
return __generator(this, function (_b) {
|
|
203
|
+
switch (_b.label) {
|
|
244
204
|
case 0: return [4 /*yield*/, fs_extra_1.default.readdir("./")];
|
|
245
205
|
case 1:
|
|
246
|
-
files =
|
|
206
|
+
files = _b.sent();
|
|
247
207
|
envFile = files.find(function (file) { return file === ".env"; });
|
|
248
208
|
if (!envFile) {
|
|
249
209
|
(0, utils_1.logError)("Environment file not found. Please create a .env file in the build directory.");
|
|
250
210
|
process.exit(1);
|
|
251
211
|
}
|
|
212
|
+
_a = utils_1.parseEnvContent;
|
|
252
213
|
return [4 /*yield*/, fs_extra_1.default.readFile(path_1.default.join("./", envFile), "utf-8")];
|
|
253
|
-
case 2:
|
|
254
|
-
envContent = _a.sent();
|
|
255
|
-
return [2 /*return*/, envContent
|
|
256
|
-
.split("\n")
|
|
257
|
-
.filter(function (line) { return line.trim() && !line.trim().startsWith("#"); })
|
|
258
|
-
.map(function (line) {
|
|
259
|
-
var equalsIndex = line.indexOf("=");
|
|
260
|
-
if (equalsIndex === -1)
|
|
261
|
-
return null;
|
|
262
|
-
var name = line.substring(0, equalsIndex).trim();
|
|
263
|
-
var value = line.substring(equalsIndex + 1).trim();
|
|
264
|
-
// Remove surrounding quotes if present
|
|
265
|
-
var unquotedValue = value.replace(/^["']|["']$/g, "");
|
|
266
|
-
return { name: name, value: unquotedValue };
|
|
267
|
-
})
|
|
268
|
-
.filter(function (env) {
|
|
269
|
-
return env !== null && env.name !== "" && env.value !== "";
|
|
270
|
-
})];
|
|
214
|
+
case 2: return [2 /*return*/, _a.apply(void 0, [_b.sent()])];
|
|
271
215
|
}
|
|
272
216
|
});
|
|
273
217
|
});
|
|
274
218
|
}
|
|
275
|
-
// Zips the current directory and returns the path to the zip file
|
|
276
219
|
function zipDirectory(sourceDir, outputZip) {
|
|
277
|
-
return
|
|
278
|
-
|
|
279
|
-
|
|
280
|
-
|
|
281
|
-
|
|
282
|
-
|
|
283
|
-
|
|
284
|
-
|
|
285
|
-
reject(err);
|
|
286
|
-
});
|
|
287
|
-
archive.pipe(output);
|
|
288
|
-
archive.glob("**/*", { ignore: ["node_modules/**", "project.zip"] });
|
|
289
|
-
archive.finalize();
|
|
290
|
-
})];
|
|
291
|
-
});
|
|
220
|
+
return new Promise(function (resolve, reject) {
|
|
221
|
+
var output = fs_extra_1.default.createWriteStream(outputZip);
|
|
222
|
+
var archive = (0, archiver_1.default)("zip", { zlib: { level: 9 } });
|
|
223
|
+
output.on("close", function () { return resolve(outputZip); });
|
|
224
|
+
archive.on("error", function (err) { (0, utils_1.logError)("Error during zipping: ", err); reject(err); });
|
|
225
|
+
archive.pipe(output);
|
|
226
|
+
archive.glob("**/*", { ignore: ["node_modules/**", "project.zip", ".git/**", ".env*", ".dain/**"] });
|
|
227
|
+
archive.finalize();
|
|
292
228
|
});
|
|
293
229
|
}
|
|
294
|
-
|
|
230
|
+
function checkGitConfig() {
|
|
231
|
+
var _a, _b, _c;
|
|
295
232
|
try {
|
|
296
|
-
// Read git config and get repo name
|
|
297
233
|
var gitConfig = fs_extra_1.default.readFileSync(".git/config", "utf-8");
|
|
298
|
-
var remote = gitConfig.match(/\[remote "origin"\]\
|
|
299
|
-
var
|
|
300
|
-
|
|
234
|
+
var remote = gitConfig.match(/\[remote "origin"\][\s\S]*?url = ([^\n]+)/);
|
|
235
|
+
var remoteUrl = remote ? remote[1].trim() : null;
|
|
236
|
+
var repoName = ((_a = remoteUrl === null || remoteUrl === void 0 ? void 0 : remoteUrl.match(/git@github.com:(.+)\.git/)) === null || _a === void 0 ? void 0 : _a[1])
|
|
237
|
+
|| ((_b = remoteUrl === null || remoteUrl === void 0 ? void 0 : remoteUrl.match(/https?:\/\/github.com\/(.+)\.git/)) === null || _b === void 0 ? void 0 : _b[1])
|
|
238
|
+
|| null;
|
|
301
239
|
var gitHead = fs_extra_1.default.readFileSync(".git/HEAD", "utf-8");
|
|
302
|
-
var
|
|
303
|
-
var branchName = branch ? branch[1] : null;
|
|
240
|
+
var branchName = ((_c = gitHead.match(/ref: refs\/heads\/(.*)/)) === null || _c === void 0 ? void 0 : _c[1]) || null;
|
|
304
241
|
return { repoName: repoName, branchName: branchName };
|
|
305
242
|
}
|
|
306
|
-
catch (
|
|
243
|
+
catch (_d) {
|
|
307
244
|
return { repoName: null, branchName: null };
|
|
308
245
|
}
|
|
309
|
-
}
|
|
310
|
-
|
|
311
|
-
|
|
312
|
-
|
|
313
|
-
|
|
314
|
-
|
|
315
|
-
|
|
316
|
-
|
|
317
|
-
|
|
318
|
-
|
|
319
|
-
|
|
320
|
-
|
|
246
|
+
}
|
|
247
|
+
function getProjectId(_a) {
|
|
248
|
+
return __awaiter(this, arguments, void 0, function (_b) {
|
|
249
|
+
var response, _c;
|
|
250
|
+
var basePath = _b.basePath, repoName = _b.repoName;
|
|
251
|
+
return __generator(this, function (_d) {
|
|
252
|
+
switch (_d.label) {
|
|
253
|
+
case 0:
|
|
254
|
+
_d.trys.push([0, 2, , 3]);
|
|
255
|
+
return [4 /*yield*/, (0, utils_1.fetchWithTimeout)((0, utils_1.joinUrl)(basePath, "/api/app/codegen/data/projects/get-by-repo?repo=".concat(encodeURIComponent(repoName))), { method: "GET" })];
|
|
256
|
+
case 1:
|
|
257
|
+
response = _d.sent();
|
|
258
|
+
return [2 /*return*/, response.ok ? response.json() : null];
|
|
259
|
+
case 2:
|
|
260
|
+
_c = _d.sent();
|
|
321
261
|
return [2 /*return*/, null];
|
|
322
|
-
return [2 /*return
|
|
323
|
-
|
|
324
|
-
|
|
325
|
-
return [2 /*return*/, null];
|
|
326
|
-
case 3: return [2 /*return*/];
|
|
327
|
-
}
|
|
262
|
+
case 3: return [2 /*return*/];
|
|
263
|
+
}
|
|
264
|
+
});
|
|
328
265
|
});
|
|
329
|
-
}
|
|
330
|
-
|
|
331
|
-
|
|
332
|
-
|
|
333
|
-
|
|
334
|
-
|
|
335
|
-
|
|
336
|
-
|
|
337
|
-
|
|
338
|
-
|
|
339
|
-
|
|
340
|
-
|
|
341
|
-
|
|
342
|
-
|
|
343
|
-
|
|
344
|
-
|
|
345
|
-
|
|
346
|
-
|
|
347
|
-
|
|
348
|
-
|
|
349
|
-
|
|
350
|
-
|
|
351
|
-
|
|
352
|
-
|
|
353
|
-
|
|
354
|
-
case 5: return [2 /*return*/];
|
|
355
|
-
}
|
|
266
|
+
}
|
|
267
|
+
function updateDainJson(result) {
|
|
268
|
+
return __awaiter(this, void 0, void 0, function () {
|
|
269
|
+
var dainJson, _a, _b, error_2;
|
|
270
|
+
return __generator(this, function (_c) {
|
|
271
|
+
switch (_c.label) {
|
|
272
|
+
case 0:
|
|
273
|
+
_c.trys.push([0, 3, , 4]);
|
|
274
|
+
_b = (_a = JSON).parse;
|
|
275
|
+
return [4 /*yield*/, fs_extra_1.default.readFile("./dain.json", "utf-8")];
|
|
276
|
+
case 1:
|
|
277
|
+
dainJson = _b.apply(_a, [_c.sent()]);
|
|
278
|
+
dainJson["deployment-id"] = result.deploymentId;
|
|
279
|
+
dainJson["service-id"] = result.serviceId;
|
|
280
|
+
return [4 /*yield*/, fs_extra_1.default.writeFile("./dain.json", JSON.stringify(dainJson, null, 2))];
|
|
281
|
+
case 2:
|
|
282
|
+
_c.sent();
|
|
283
|
+
return [3 /*break*/, 4];
|
|
284
|
+
case 3:
|
|
285
|
+
error_2 = _c.sent();
|
|
286
|
+
(0, utils_1.logError)("Failed to update dain.json", error_2);
|
|
287
|
+
return [3 /*break*/, 4];
|
|
288
|
+
case 4: return [2 /*return*/];
|
|
289
|
+
}
|
|
290
|
+
});
|
|
356
291
|
});
|
|
357
|
-
}
|
|
358
|
-
|
|
292
|
+
}
|
|
293
|
+
function printDeploymentResult(result, spinner) {
|
|
359
294
|
console.log("\n-----------------------");
|
|
360
295
|
spinner.succeed("Deployment URL: ".concat(result.service.url));
|
|
361
296
|
spinner.succeed("Deployment ID: ".concat(result.deploymentId));
|
|
@@ -363,4 +298,4 @@ var printDeploymentResult = function (result, spinner) {
|
|
|
363
298
|
spinner.succeed("Deployment completed successfully.");
|
|
364
299
|
spinner.info("You can access logs using `dain logs -w` command.");
|
|
365
300
|
console.log("-----------------------");
|
|
366
|
-
}
|
|
301
|
+
}
|
package/dist/commands/dev.js
CHANGED
|
@@ -60,7 +60,6 @@ var miniflare_1 = require("miniflare");
|
|
|
60
60
|
var build_1 = __importDefault(require("./build"));
|
|
61
61
|
var fs_extra_1 = __importDefault(require("fs-extra"));
|
|
62
62
|
var net_1 = require("net");
|
|
63
|
-
// Module state
|
|
64
63
|
var childProcess = null;
|
|
65
64
|
var watcher = null;
|
|
66
65
|
var mf = null;
|
|
@@ -71,13 +70,8 @@ var isCleaningUp = false;
|
|
|
71
70
|
function isPortAvailable(port) {
|
|
72
71
|
return new Promise(function (resolve) {
|
|
73
72
|
var server = (0, net_1.createServer)()
|
|
74
|
-
.listen(port, function () {
|
|
75
|
-
|
|
76
|
-
resolve(true);
|
|
77
|
-
})
|
|
78
|
-
.on('error', function () {
|
|
79
|
-
resolve(false);
|
|
80
|
-
});
|
|
73
|
+
.listen(port, function () { server.close(); resolve(true); })
|
|
74
|
+
.on('error', function () { return resolve(false); });
|
|
81
75
|
});
|
|
82
76
|
}
|
|
83
77
|
function cleanup() {
|
|
@@ -134,14 +128,11 @@ function startProcess(mainFile, envVars, isRestart) {
|
|
|
134
128
|
}
|
|
135
129
|
var spinner = (0, ora_1.default)(isRestart ? 'Restarting development server...' : 'Starting development server...').start();
|
|
136
130
|
var hasStarted = false;
|
|
137
|
-
// Use spawn with args array to prevent command injection
|
|
138
131
|
var tsNodePath = path_1.default.join(process.cwd(), 'node_modules', '.bin', 'ts-node');
|
|
139
|
-
// Check if ts-node exists
|
|
140
132
|
if (!fs_extra_1.default.existsSync(tsNodePath)) {
|
|
141
133
|
spinner.fail('ts-node not found. Run: npm install ts-node typescript');
|
|
142
134
|
return;
|
|
143
135
|
}
|
|
144
|
-
// Check if main file exists
|
|
145
136
|
if (!fs_extra_1.default.existsSync(mainFile)) {
|
|
146
137
|
spinner.fail("Main file not found: ".concat(mainFile));
|
|
147
138
|
return;
|
|
@@ -149,7 +140,7 @@ function startProcess(mainFile, envVars, isRestart) {
|
|
|
149
140
|
childProcess = (0, child_process_1.spawn)(tsNodePath, [mainFile], {
|
|
150
141
|
env: __assign(__assign({}, process.env), envVars),
|
|
151
142
|
stdio: ['inherit', 'pipe', 'pipe'],
|
|
152
|
-
shell: false,
|
|
143
|
+
shell: false,
|
|
153
144
|
});
|
|
154
145
|
var markStarted = function (success) {
|
|
155
146
|
if (hasStarted)
|
|
@@ -166,58 +157,32 @@ function startProcess(mainFile, envVars, isRestart) {
|
|
|
166
157
|
spinner.fail('Development server error.');
|
|
167
158
|
}
|
|
168
159
|
};
|
|
169
|
-
(_a = childProcess.stdout) === null || _a === void 0 ? void 0 : _a.on('data', function (data) {
|
|
170
|
-
markStarted(true);
|
|
171
|
-
process.stdout.write(data.toString());
|
|
172
|
-
});
|
|
160
|
+
(_a = childProcess.stdout) === null || _a === void 0 ? void 0 : _a.on('data', function (data) { markStarted(true); process.stdout.write(data.toString()); });
|
|
173
161
|
(_b = childProcess.stderr) === null || _b === void 0 ? void 0 : _b.on('data', function (data) {
|
|
174
162
|
var output = data.toString();
|
|
175
|
-
|
|
176
|
-
if (output.includes('Error:') || output.includes('error:')) {
|
|
177
|
-
markStarted(false);
|
|
178
|
-
}
|
|
179
|
-
else {
|
|
180
|
-
markStarted(true);
|
|
181
|
-
}
|
|
163
|
+
markStarted(output.includes('Error:') || output.includes('error:') ? false : true);
|
|
182
164
|
process.stderr.write(output);
|
|
183
165
|
});
|
|
184
166
|
childProcess.on('close', function (code) {
|
|
185
|
-
if (code !== 0 && code !== null && !hasStarted)
|
|
167
|
+
if (code !== 0 && code !== null && !hasStarted)
|
|
186
168
|
spinner.fail("Development server exited with code ".concat(code));
|
|
187
|
-
}
|
|
188
|
-
childProcess = null;
|
|
189
|
-
});
|
|
190
|
-
childProcess.on('error', function (error) {
|
|
191
|
-
spinner.fail("Failed to start: ".concat(error.message));
|
|
192
169
|
childProcess = null;
|
|
193
170
|
});
|
|
171
|
+
childProcess.on('error', function (error) { spinner.fail("Failed to start: ".concat(error.message)); childProcess = null; });
|
|
194
172
|
}
|
|
195
173
|
function dev(options) {
|
|
196
174
|
return __awaiter(this, void 0, void 0, function () {
|
|
197
|
-
var config, port,
|
|
175
|
+
var config, port, portNumber, runtime, mainFile, resolvedMain, envVars, proxySetup, watchPaths, dainDir, outFile, MFconfig_1, debounceTimer_1, error_1;
|
|
198
176
|
return __generator(this, function (_a) {
|
|
199
177
|
switch (_a.label) {
|
|
200
178
|
case 0:
|
|
201
179
|
config = (0, utils_1.getDainConfig)(options.config);
|
|
202
|
-
|
|
203
|
-
port = process.env.PORT;
|
|
204
|
-
portSource = '.env file';
|
|
205
|
-
}
|
|
206
|
-
else if (options.port) {
|
|
207
|
-
port = options.port;
|
|
208
|
-
portSource = 'command line argument';
|
|
209
|
-
}
|
|
210
|
-
else {
|
|
211
|
-
port = '2022';
|
|
212
|
-
portSource = 'default value';
|
|
213
|
-
}
|
|
180
|
+
port = process.env.PORT || options.port || '2022';
|
|
214
181
|
portNumber = parseInt(port, 10);
|
|
215
182
|
if (isNaN(portNumber) || portNumber < 1 || portNumber > 65535) {
|
|
216
183
|
(0, utils_1.logError)("Invalid port: ".concat(port, ". Must be 1-65535. Using default port 2022"));
|
|
217
184
|
port = '2022';
|
|
218
|
-
portSource = 'default value (after invalid port)';
|
|
219
185
|
}
|
|
220
|
-
(0, utils_1.logInfo)("Using port ".concat(port, " (from ").concat(portSource, ")"));
|
|
221
186
|
runtime = options.runtime || config.runtime || 'node';
|
|
222
187
|
mainFile = config['main-file'];
|
|
223
188
|
resolvedMain = path_1.default.resolve(process.cwd(), mainFile);
|
|
@@ -232,32 +197,23 @@ function dev(options) {
|
|
|
232
197
|
DAIN_ENVIRONMENT: config['environment'],
|
|
233
198
|
DAIN_OUT_DIR: config['out-dir'],
|
|
234
199
|
};
|
|
235
|
-
// Register signal handlers once
|
|
236
200
|
process.once('SIGINT', function () { return gracefulShutdown(0); });
|
|
237
201
|
process.once('SIGTERM', function () { return gracefulShutdown(0); });
|
|
238
|
-
process.once('uncaughtException', function (error) {
|
|
239
|
-
|
|
240
|
-
gracefulShutdown(1);
|
|
241
|
-
});
|
|
242
|
-
process.once('unhandledRejection', function (reason) {
|
|
243
|
-
(0, utils_1.logError)('Unhandled Rejection:', reason);
|
|
244
|
-
gracefulShutdown(1);
|
|
245
|
-
});
|
|
202
|
+
process.once('uncaughtException', function (error) { (0, utils_1.logError)('Uncaught Exception:', error); gracefulShutdown(1); });
|
|
203
|
+
process.once('unhandledRejection', function (reason) { (0, utils_1.logError)('Unhandled Rejection:', reason); gracefulShutdown(1); });
|
|
246
204
|
_a.label = 1;
|
|
247
205
|
case 1:
|
|
248
206
|
_a.trys.push([1, 9, , 10]);
|
|
249
|
-
|
|
250
|
-
return [4 /*yield*/, isPortAvailable(parsedPort)];
|
|
207
|
+
return [4 /*yield*/, isPortAvailable(portNumber)];
|
|
251
208
|
case 2:
|
|
252
209
|
if (!(_a.sent())) {
|
|
253
|
-
(0, utils_1.logError)("Port ".concat(
|
|
210
|
+
(0, utils_1.logError)("Port ".concat(portNumber, " is already in use. Use --port to specify a different port."));
|
|
254
211
|
process.exit(1);
|
|
255
212
|
}
|
|
256
213
|
process.env.PORT = port;
|
|
257
214
|
if (!!options.noproxy) return [3 /*break*/, 4];
|
|
258
|
-
if (!config['api-key'])
|
|
215
|
+
if (!config['api-key'])
|
|
259
216
|
throw new Error("'api-key' is required when using development proxy");
|
|
260
|
-
}
|
|
261
217
|
return [4 /*yield*/, (0, utils_1.setupProxy)(port, config['api-key'], config)];
|
|
262
218
|
case 3:
|
|
263
219
|
proxySetup = _a.sent();
|
|
@@ -269,19 +225,10 @@ function dev(options) {
|
|
|
269
225
|
startProcess(mainFile, envVars);
|
|
270
226
|
watchPaths = [
|
|
271
227
|
path_1.default.dirname(mainFile),
|
|
272
|
-
config['static-dir']
|
|
273
|
-
? path_1.default.join(process.cwd(), config['static-dir'])
|
|
274
|
-
: (0, utils_1.getStaticFilesPath)(),
|
|
228
|
+
config['static-dir'] ? path_1.default.join(process.cwd(), config['static-dir']) : (0, utils_1.getStaticFilesPath)(),
|
|
275
229
|
].filter(function (p) { return fs_extra_1.default.existsSync(p); });
|
|
276
|
-
watcher = chokidar_1.default.watch(watchPaths, {
|
|
277
|
-
|
|
278
|
-
persistent: true,
|
|
279
|
-
ignoreInitial: true,
|
|
280
|
-
});
|
|
281
|
-
watcher.on('change', function (changedPath) {
|
|
282
|
-
(0, utils_1.logInfo)("File ".concat(changedPath, " changed. Restarting..."));
|
|
283
|
-
startProcess(mainFile, envVars, true);
|
|
284
|
-
});
|
|
230
|
+
watcher = chokidar_1.default.watch(watchPaths, { ignored: /(^|[\/\\])\./, persistent: true, ignoreInitial: true });
|
|
231
|
+
watcher.on('change', function (changedPath) { (0, utils_1.logInfo)("File ".concat(changedPath, " changed. Restarting...")); startProcess(mainFile, envVars, true); });
|
|
285
232
|
(0, utils_1.logInfo)('Watching for file changes...');
|
|
286
233
|
return [3 /*break*/, 8];
|
|
287
234
|
case 5:
|
|
@@ -291,25 +238,17 @@ function dev(options) {
|
|
|
291
238
|
return [4 /*yield*/, (0, build_1.default)({ config: options.config, runtime: 'workers', watch: true })];
|
|
292
239
|
case 6:
|
|
293
240
|
_a.sent();
|
|
294
|
-
MFconfig_1 = {
|
|
295
|
-
scriptPath: outFile,
|
|
296
|
-
modules: true,
|
|
297
|
-
port: parseInt(port, 10),
|
|
298
|
-
log: new miniflare_1.Log(miniflare_1.LogLevel.DEBUG),
|
|
299
|
-
liveReload: true,
|
|
300
|
-
};
|
|
241
|
+
MFconfig_1 = { scriptPath: outFile, modules: true, port: parseInt(port, 10), log: new miniflare_1.Log(miniflare_1.LogLevel.DEBUG), liveReload: true };
|
|
301
242
|
mf = new miniflare_1.Miniflare(MFconfig_1);
|
|
302
243
|
(0, utils_1.logSuccess)("Miniflare server started on port ".concat(port));
|
|
303
244
|
debounceTimer_1 = null;
|
|
304
245
|
fs_extra_1.default.watch(dainDir, { recursive: true }, function (_eventType, filename) {
|
|
305
246
|
if (debounceTimer_1)
|
|
306
247
|
clearTimeout(debounceTimer_1);
|
|
307
|
-
debounceTimer_1 = setTimeout(function () {
|
|
308
|
-
|
|
309
|
-
|
|
310
|
-
|
|
311
|
-
}
|
|
312
|
-
}, 300);
|
|
248
|
+
debounceTimer_1 = setTimeout(function () { if (mf) {
|
|
249
|
+
mf.setOptions(MFconfig_1);
|
|
250
|
+
(0, utils_1.logInfo)("Build updated (".concat(filename, ")"));
|
|
251
|
+
} }, 300);
|
|
313
252
|
});
|
|
314
253
|
(0, utils_1.logInfo)('Watching for file changes in source and build directories...');
|
|
315
254
|
return [3 /*break*/, 8];
|
package/dist/commands/init.js
CHANGED
|
@@ -13,25 +13,19 @@ function init(projectName) {
|
|
|
13
13
|
var projectDir = path_1.default.join(process.cwd(), projectName);
|
|
14
14
|
var templateDir = path_1.default.join(__dirname, '..', '..', 'templates', 'default');
|
|
15
15
|
try {
|
|
16
|
-
|
|
17
|
-
|
|
18
|
-
var versionNum = Number(nodeVersion.slice(1).split('.')[0]);
|
|
19
|
-
if (versionNum < 20) {
|
|
16
|
+
var nodeVersion = Number(process.version.slice(1).split('.')[0]);
|
|
17
|
+
if (nodeVersion < 20) {
|
|
20
18
|
spinner.fail('Node.js version 20.7 or higher is required');
|
|
21
19
|
(0, utils_1.logError)('Please upgrade Node.js to version 20.7 or higher');
|
|
22
20
|
(0, utils_1.logInfo)('You can download it from: https://nodejs.org/');
|
|
23
21
|
process.exit(1);
|
|
24
22
|
}
|
|
25
|
-
// Check if directory already exists
|
|
26
23
|
if (fs_extra_1.default.existsSync(projectDir)) {
|
|
27
24
|
spinner.fail("Directory ".concat(projectName, " already exists"));
|
|
28
25
|
process.exit(1);
|
|
29
26
|
}
|
|
30
|
-
// Create project directory
|
|
31
27
|
fs_extra_1.default.ensureDirSync(projectDir);
|
|
32
|
-
// Copy template files
|
|
33
28
|
fs_extra_1.default.copySync(templateDir, projectDir);
|
|
34
|
-
// Modify package.json
|
|
35
29
|
var packageJsonPath = path_1.default.join(projectDir, 'package.json');
|
|
36
30
|
var packageJson = fs_extra_1.default.readJsonSync(packageJsonPath);
|
|
37
31
|
packageJson.name = projectName;
|
|
@@ -42,7 +36,6 @@ function init(projectName) {
|
|
|
42
36
|
(0, utils_1.logInfo)(" cd ".concat(projectName));
|
|
43
37
|
(0, utils_1.logInfo)(' npm install');
|
|
44
38
|
(0, utils_1.logInfo)(' npm run dev');
|
|
45
|
-
// Explicitly exit with success
|
|
46
39
|
process.exit(0);
|
|
47
40
|
}
|
|
48
41
|
catch (error) {
|