@lark-apaas/fullstack-cli 1.1.16-beta.5 → 1.1.16-beta.7
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/index.js +525 -55
- package/package.json +1 -1
- package/templates/.spark_project +16 -0
- package/templates/nest-cli.json +1 -1
- package/templates/scripts/build.sh +7 -2
- package/templates/scripts/dev.js +275 -0
- package/templates/scripts/dev.sh +1 -244
package/dist/index.js
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
// src/index.ts
|
|
2
|
-
import
|
|
3
|
-
import
|
|
2
|
+
import fs26 from "fs";
|
|
3
|
+
import path22 from "path";
|
|
4
4
|
import { fileURLToPath as fileURLToPath5 } from "url";
|
|
5
5
|
import { config as dotenvConfig } from "dotenv";
|
|
6
6
|
|
|
@@ -1041,6 +1041,8 @@ async function fetchColumnComments(connectionString, options = {}) {
|
|
|
1041
1041
|
const timeoutMs = options.timeoutMs ?? DEFAULT_TIMEOUT_MS;
|
|
1042
1042
|
const url = new URL(connectionString);
|
|
1043
1043
|
const schemaName = url.searchParams.get("schema") ?? "public";
|
|
1044
|
+
const start = Date.now();
|
|
1045
|
+
console.log(`[fetchColumnComments] \u2192 Querying pg_description for schema=${schemaName} (timeout=${timeoutMs}ms)`);
|
|
1044
1046
|
const sql = postgres(connectionString, {
|
|
1045
1047
|
connect_timeout: Math.ceil(timeoutMs / 1e3),
|
|
1046
1048
|
idle_timeout: Math.ceil(timeoutMs / 1e3)
|
|
@@ -1069,6 +1071,7 @@ async function fetchColumnComments(connectionString, options = {}) {
|
|
|
1069
1071
|
const key = `${row.tableName}.${row.columnName}`;
|
|
1070
1072
|
commentMap.set(key, row.comment);
|
|
1071
1073
|
}
|
|
1074
|
+
console.log(`[fetchColumnComments] \u2190 Fetched ${commentMap.size} column comments (${Date.now() - start}ms)`);
|
|
1072
1075
|
return commentMap;
|
|
1073
1076
|
} finally {
|
|
1074
1077
|
await sql.end().catch(() => {
|
|
@@ -1167,6 +1170,79 @@ function addJsonbTypeComments(source, columnComments) {
|
|
|
1167
1170
|
return { text: result.join("\n"), added };
|
|
1168
1171
|
}
|
|
1169
1172
|
|
|
1173
|
+
// src/commands/db/gen-dbschema/transforms/text/synced-table-comments.ts
|
|
1174
|
+
var TABLE_COMMENT = "Synced table: data is auto-synced from external source. Do not rename or delete this table.";
|
|
1175
|
+
var FIELD_COMMENT = "Synced field: auto-synced, do not modify or delete";
|
|
1176
|
+
var TABLE_DEF_REGEX = /^(export const\s+\w+\s*=\s*(?:pgTable|pgView|pgMaterializedView)\(\s*["'`])([^"'`]+)(["'`])/;
|
|
1177
|
+
var FIELD_WITH_NAME_REGEX = /^\s*[\w"']+\s*:\s*\w+\(\s*["'`]([^"'`]+)["'`]/;
|
|
1178
|
+
var FIELD_PROP_NAME_REGEX = /^\s*([\w]+)\s*:/;
|
|
1179
|
+
function addSyncedTableComments(source, syncedTableMap) {
|
|
1180
|
+
if (!syncedTableMap || syncedTableMap.size === 0) {
|
|
1181
|
+
return { text: source, added: 0 };
|
|
1182
|
+
}
|
|
1183
|
+
const lines = source.split("\n");
|
|
1184
|
+
const result = [];
|
|
1185
|
+
let added = 0;
|
|
1186
|
+
let currentSyncedFields = null;
|
|
1187
|
+
let insideTableBody = false;
|
|
1188
|
+
let braceDepth = 0;
|
|
1189
|
+
for (let i = 0; i < lines.length; i++) {
|
|
1190
|
+
const line = lines[i];
|
|
1191
|
+
const tableMatch = line.match(TABLE_DEF_REGEX);
|
|
1192
|
+
if (tableMatch) {
|
|
1193
|
+
const tableName = tableMatch[2];
|
|
1194
|
+
const syncedFields = syncedTableMap.get(tableName);
|
|
1195
|
+
if (syncedFields) {
|
|
1196
|
+
currentSyncedFields = syncedFields;
|
|
1197
|
+
insideTableBody = true;
|
|
1198
|
+
braceDepth = 0;
|
|
1199
|
+
const prevLine = result[result.length - 1]?.trim() ?? "";
|
|
1200
|
+
if (!prevLine.includes("Synced table")) {
|
|
1201
|
+
const indentMatch = line.match(/^\s*/);
|
|
1202
|
+
const indent = indentMatch ? indentMatch[0] : "";
|
|
1203
|
+
result.push(`${indent}// ${TABLE_COMMENT}`);
|
|
1204
|
+
added++;
|
|
1205
|
+
}
|
|
1206
|
+
}
|
|
1207
|
+
}
|
|
1208
|
+
if (insideTableBody) {
|
|
1209
|
+
for (const ch of line) {
|
|
1210
|
+
if (ch === "{") braceDepth++;
|
|
1211
|
+
if (ch === "}") braceDepth--;
|
|
1212
|
+
}
|
|
1213
|
+
if (braceDepth <= 0) {
|
|
1214
|
+
insideTableBody = false;
|
|
1215
|
+
currentSyncedFields = null;
|
|
1216
|
+
}
|
|
1217
|
+
if (currentSyncedFields && braceDepth >= 1 && !tableMatch) {
|
|
1218
|
+
const columnName = extractColumnName2(line);
|
|
1219
|
+
if (columnName && currentSyncedFields.has(columnName)) {
|
|
1220
|
+
const prevLine = result[result.length - 1]?.trim() ?? "";
|
|
1221
|
+
if (!prevLine.includes("Synced field")) {
|
|
1222
|
+
const indentMatch = line.match(/^\s*/);
|
|
1223
|
+
const indent = indentMatch ? indentMatch[0] : "";
|
|
1224
|
+
result.push(`${indent}// ${FIELD_COMMENT}`);
|
|
1225
|
+
added++;
|
|
1226
|
+
}
|
|
1227
|
+
}
|
|
1228
|
+
}
|
|
1229
|
+
}
|
|
1230
|
+
result.push(line);
|
|
1231
|
+
}
|
|
1232
|
+
return { text: result.join("\n"), added };
|
|
1233
|
+
}
|
|
1234
|
+
function extractColumnName2(line) {
|
|
1235
|
+
const withNameMatch = line.match(FIELD_WITH_NAME_REGEX);
|
|
1236
|
+
if (withNameMatch) {
|
|
1237
|
+
return withNameMatch[1];
|
|
1238
|
+
}
|
|
1239
|
+
const propMatch = line.match(FIELD_PROP_NAME_REGEX);
|
|
1240
|
+
if (propMatch) {
|
|
1241
|
+
return propMatch[1];
|
|
1242
|
+
}
|
|
1243
|
+
return null;
|
|
1244
|
+
}
|
|
1245
|
+
|
|
1170
1246
|
// src/commands/db/gen-dbschema/transforms/text/table-aliases.ts
|
|
1171
1247
|
var TABLE_ALIAS_MARKER = "// table aliases";
|
|
1172
1248
|
function generateTableAliases(source) {
|
|
@@ -1215,13 +1291,16 @@ function postprocessSchema(rawSource, options = {}) {
|
|
|
1215
1291
|
source = inlineCustomTypes(source);
|
|
1216
1292
|
const jsonbCommentsResult = addJsonbTypeComments(source, options.columnComments);
|
|
1217
1293
|
source = jsonbCommentsResult.text;
|
|
1294
|
+
const syncedCommentsResult = addSyncedTableComments(source, options.syncedTableMap);
|
|
1295
|
+
source = syncedCommentsResult.text;
|
|
1218
1296
|
source = generateTableAliases(source);
|
|
1219
1297
|
source = formatSource(source);
|
|
1220
1298
|
return {
|
|
1221
1299
|
source,
|
|
1222
1300
|
astStats,
|
|
1223
1301
|
patchedDefects: patchResult.fixed,
|
|
1224
|
-
addedJsonbComments: jsonbCommentsResult.added
|
|
1302
|
+
addedJsonbComments: jsonbCommentsResult.added,
|
|
1303
|
+
addedSyncedComments: syncedCommentsResult.added
|
|
1225
1304
|
};
|
|
1226
1305
|
}
|
|
1227
1306
|
function logStats(result, prefix = "[postprocess]") {
|
|
@@ -1271,6 +1350,9 @@ function logStats(result, prefix = "[postprocess]") {
|
|
|
1271
1350
|
if (result.addedJsonbComments > 0) {
|
|
1272
1351
|
console.info(`${prefix} Added ${result.addedJsonbComments} JSDoc comments for jsonb fields`);
|
|
1273
1352
|
}
|
|
1353
|
+
if (result.addedSyncedComments > 0) {
|
|
1354
|
+
console.info(`${prefix} Added ${result.addedSyncedComments} comments for synced tables/fields`);
|
|
1355
|
+
}
|
|
1274
1356
|
}
|
|
1275
1357
|
|
|
1276
1358
|
// src/commands/db/gen-dbschema/index.ts
|
|
@@ -1281,7 +1363,10 @@ async function postprocessDrizzleSchema(targetPath, options = {}) {
|
|
|
1281
1363
|
return void 0;
|
|
1282
1364
|
}
|
|
1283
1365
|
const rawSource = fs3.readFileSync(resolvedPath, "utf8");
|
|
1284
|
-
const result = postprocessSchema(rawSource, {
|
|
1366
|
+
const result = postprocessSchema(rawSource, {
|
|
1367
|
+
columnComments: options.columnComments,
|
|
1368
|
+
syncedTableMap: options.syncedTableMap
|
|
1369
|
+
});
|
|
1285
1370
|
fs3.writeFileSync(resolvedPath, result.source, "utf8");
|
|
1286
1371
|
logStats(result, "[postprocess-drizzle-schema]");
|
|
1287
1372
|
return {
|
|
@@ -1291,10 +1376,112 @@ async function postprocessDrizzleSchema(targetPath, options = {}) {
|
|
|
1291
1376
|
patchedDefects: result.patchedDefects,
|
|
1292
1377
|
replacedTimestamps: result.astStats.replacedTimestamp,
|
|
1293
1378
|
replacedDefaultNow: result.astStats.replacedDefaultNow,
|
|
1294
|
-
addedJsonbComments: result.addedJsonbComments
|
|
1379
|
+
addedJsonbComments: result.addedJsonbComments,
|
|
1380
|
+
addedSyncedComments: result.addedSyncedComments
|
|
1295
1381
|
};
|
|
1296
1382
|
}
|
|
1297
1383
|
|
|
1384
|
+
// src/utils/http-client.ts
|
|
1385
|
+
import { HttpClient } from "@lark-apaas/http-client";
|
|
1386
|
+
var clientInstance = null;
|
|
1387
|
+
function getHttpClient() {
|
|
1388
|
+
if (!clientInstance) {
|
|
1389
|
+
clientInstance = new HttpClient({
|
|
1390
|
+
timeout: 3e4,
|
|
1391
|
+
platform: {
|
|
1392
|
+
enabled: true
|
|
1393
|
+
}
|
|
1394
|
+
});
|
|
1395
|
+
const canaryEnv = process.env.FORCE_FRAMEWORK_CLI_CANARY_ENV;
|
|
1396
|
+
if (canaryEnv) {
|
|
1397
|
+
clientInstance.interceptors.request.use((req) => {
|
|
1398
|
+
req.headers["x-tt-env"] = canaryEnv;
|
|
1399
|
+
return req;
|
|
1400
|
+
});
|
|
1401
|
+
}
|
|
1402
|
+
}
|
|
1403
|
+
return clientInstance;
|
|
1404
|
+
}
|
|
1405
|
+
|
|
1406
|
+
// src/commands/db/gen-dbschema/utils/fetch-synced-tables.ts
|
|
1407
|
+
var DEFAULT_TIMEOUT_MS2 = 1e4;
|
|
1408
|
+
async function fetchSyncedTables(appId, workspace) {
|
|
1409
|
+
try {
|
|
1410
|
+
const client = getHttpClient();
|
|
1411
|
+
const timeoutPromise = new Promise((_, reject) => {
|
|
1412
|
+
setTimeout(
|
|
1413
|
+
() => reject(new Error(`Timeout after ${DEFAULT_TIMEOUT_MS2}ms`)),
|
|
1414
|
+
DEFAULT_TIMEOUT_MS2
|
|
1415
|
+
);
|
|
1416
|
+
});
|
|
1417
|
+
const start = Date.now();
|
|
1418
|
+
console.log(
|
|
1419
|
+
`[fetchSyncedTables] \u2192 GET listTableView (dbBranch=main) appId=${appId ? "set" : "unset"} workspace=${workspace ? "set" : "unset"}`
|
|
1420
|
+
);
|
|
1421
|
+
const response = await Promise.race([
|
|
1422
|
+
client.get(
|
|
1423
|
+
`/api/v1/dataloom/inner/app/${appId}/workspaces/${workspace}/listTableView`,
|
|
1424
|
+
{ params: { dbBranch: "main" }, headers: { "x-supaas-bizsource": "miaoda" } }
|
|
1425
|
+
),
|
|
1426
|
+
timeoutPromise
|
|
1427
|
+
]);
|
|
1428
|
+
console.log(
|
|
1429
|
+
`[fetchSyncedTables] \u2190 listTableView response: ${response.status} ${response.statusText} (${Date.now() - start}ms) with logId=${response.headers.get("x-tt-logid")}`
|
|
1430
|
+
);
|
|
1431
|
+
if (!response.ok) {
|
|
1432
|
+
throw new Error(
|
|
1433
|
+
`listTableView API failed: ${response.status} ${response.statusText}`
|
|
1434
|
+
);
|
|
1435
|
+
}
|
|
1436
|
+
let json;
|
|
1437
|
+
try {
|
|
1438
|
+
json = await response.json();
|
|
1439
|
+
} catch (error) {
|
|
1440
|
+
console.warn(
|
|
1441
|
+
"[fetchSyncedTables] \u26A0 Failed to parse listTableView response JSON, returning empty map:",
|
|
1442
|
+
error instanceof Error ? error.message : String(error)
|
|
1443
|
+
);
|
|
1444
|
+
return /* @__PURE__ */ new Map();
|
|
1445
|
+
}
|
|
1446
|
+
const tableView = json?.data?.data;
|
|
1447
|
+
if (!tableView) {
|
|
1448
|
+
console.warn(
|
|
1449
|
+
"[fetchSyncedTables] \u26A0 listTableView response missing data.data, returning empty map"
|
|
1450
|
+
);
|
|
1451
|
+
return /* @__PURE__ */ new Map();
|
|
1452
|
+
}
|
|
1453
|
+
const syncedMap = extractSyncedTableMap(tableView);
|
|
1454
|
+
const totalCount = (tableView.table?.data?.length ?? 0) + (tableView.view?.data?.length ?? 0) + (tableView.materializedView?.data?.length ?? 0);
|
|
1455
|
+
console.log(
|
|
1456
|
+
`[fetchSyncedTables] \u2713 Extracted synced tables: ${syncedMap.size}/${totalCount} (elapsed ${Date.now() - start}ms)`
|
|
1457
|
+
);
|
|
1458
|
+
return syncedMap;
|
|
1459
|
+
} catch (error) {
|
|
1460
|
+
console.error(
|
|
1461
|
+
"[fetchSyncedTables] \u274C Error fetching synced tables:",
|
|
1462
|
+
error
|
|
1463
|
+
);
|
|
1464
|
+
return /* @__PURE__ */ new Map();
|
|
1465
|
+
}
|
|
1466
|
+
}
|
|
1467
|
+
function extractSyncedTableMap(tableView) {
|
|
1468
|
+
const syncedMap = /* @__PURE__ */ new Map();
|
|
1469
|
+
const allTables = [
|
|
1470
|
+
...tableView.table?.data ?? [],
|
|
1471
|
+
...tableView.view?.data ?? [],
|
|
1472
|
+
...tableView.materializedView?.data ?? []
|
|
1473
|
+
];
|
|
1474
|
+
for (const table of allTables) {
|
|
1475
|
+
if (table.bitableSyncTask && table.bitableSyncTask.fieldApiNameList?.length > 0) {
|
|
1476
|
+
syncedMap.set(
|
|
1477
|
+
table.tableName,
|
|
1478
|
+
new Set(table.bitableSyncTask.fieldApiNameList)
|
|
1479
|
+
);
|
|
1480
|
+
}
|
|
1481
|
+
}
|
|
1482
|
+
return syncedMap;
|
|
1483
|
+
}
|
|
1484
|
+
|
|
1298
1485
|
// src/commands/db/gen-nest-resource/generator.ts
|
|
1299
1486
|
import { pluralize } from "inflection";
|
|
1300
1487
|
|
|
@@ -1973,7 +2160,9 @@ async function run(options = {}) {
|
|
|
1973
2160
|
}
|
|
1974
2161
|
const databaseUrl = process.env.SUDA_DATABASE_URL;
|
|
1975
2162
|
if (!databaseUrl) {
|
|
1976
|
-
console.error(
|
|
2163
|
+
console.error(
|
|
2164
|
+
"[gen-db-schema] Error: SUDA_DATABASE_URL environment variable is required"
|
|
2165
|
+
);
|
|
1977
2166
|
process.exit(1);
|
|
1978
2167
|
}
|
|
1979
2168
|
const outputPath = options.output || process.env.DB_SCHEMA_OUTPUT || "server/database/schema.ts";
|
|
@@ -1988,9 +2177,14 @@ async function run(options = {}) {
|
|
|
1988
2177
|
path2.resolve(__dirname2, "../../../dist/config/drizzle.config.js")
|
|
1989
2178
|
];
|
|
1990
2179
|
const configPath = configPathCandidates.find((p) => fs4.existsSync(p));
|
|
1991
|
-
console.log(
|
|
2180
|
+
console.log(
|
|
2181
|
+
"[gen-db-schema] Using drizzle config from:",
|
|
2182
|
+
configPath ?? "(not found)"
|
|
2183
|
+
);
|
|
1992
2184
|
if (!configPath) {
|
|
1993
|
-
console.error(
|
|
2185
|
+
console.error(
|
|
2186
|
+
"[gen-db-schema] Error: drizzle config not found in CLI package"
|
|
2187
|
+
);
|
|
1994
2188
|
process.exit(1);
|
|
1995
2189
|
}
|
|
1996
2190
|
const resolveDrizzleKitBin = () => {
|
|
@@ -2006,7 +2200,9 @@ async function run(options = {}) {
|
|
|
2006
2200
|
const binField = pkgJson.bin;
|
|
2007
2201
|
const binRelPath = typeof binField === "string" ? binField : binField?.["drizzle-kit"];
|
|
2008
2202
|
if (!binRelPath) {
|
|
2009
|
-
throw new Error(
|
|
2203
|
+
throw new Error(
|
|
2204
|
+
"Unable to resolve drizzle-kit binary from package.json"
|
|
2205
|
+
);
|
|
2010
2206
|
}
|
|
2011
2207
|
return path2.resolve(currentDir, binRelPath);
|
|
2012
2208
|
}
|
|
@@ -2017,14 +2213,65 @@ async function run(options = {}) {
|
|
|
2017
2213
|
throw new Error("Unable to locate drizzle-kit package root");
|
|
2018
2214
|
};
|
|
2019
2215
|
let columnComments;
|
|
2020
|
-
|
|
2021
|
-
|
|
2022
|
-
|
|
2023
|
-
|
|
2216
|
+
let syncedTableMap;
|
|
2217
|
+
const appId = process.env.app_id;
|
|
2218
|
+
const workspace = process.env.suda_workspace_id;
|
|
2219
|
+
console.log(
|
|
2220
|
+
`[gen-db-schema] Pre-fetch info: columnComments=enabled, syncedTables=${appId && workspace ? "enabled" : "skipped"} (app_id=${appId ? "set" : "unset"}, suda_workspace_id=${workspace ? "set" : "unset"})`
|
|
2221
|
+
);
|
|
2222
|
+
const columnCommentsTask = (async () => {
|
|
2223
|
+
const start = Date.now();
|
|
2224
|
+
console.log("[gen-db-schema] \u2192 Fetching column comments...");
|
|
2225
|
+
const res = await fetchColumnComments(databaseUrl, { timeoutMs: 1e4 });
|
|
2226
|
+
console.log(
|
|
2227
|
+
`[gen-db-schema] \u2190 Fetched column comments: ${res.size} items (${Date.now() - start}ms)`
|
|
2228
|
+
);
|
|
2229
|
+
return res;
|
|
2230
|
+
})();
|
|
2231
|
+
const syncedTablesTask = appId && workspace ? (async () => {
|
|
2232
|
+
const start = Date.now();
|
|
2233
|
+
console.log(
|
|
2234
|
+
"[gen-db-schema] \u2192 Fetching synced tables from listTableView..."
|
|
2235
|
+
);
|
|
2236
|
+
const res = await fetchSyncedTables(appId, workspace);
|
|
2237
|
+
console.log(
|
|
2238
|
+
`[gen-db-schema] \u2190 Fetched synced tables: ${res.size} tables (${Date.now() - start}ms)`
|
|
2239
|
+
);
|
|
2240
|
+
return res;
|
|
2241
|
+
})() : void 0;
|
|
2242
|
+
const fetchTasks = await Promise.allSettled([
|
|
2243
|
+
columnCommentsTask,
|
|
2244
|
+
...syncedTablesTask ? [syncedTablesTask] : []
|
|
2245
|
+
]);
|
|
2246
|
+
if (fetchTasks[0].status === "fulfilled") {
|
|
2247
|
+
columnComments = fetchTasks[0].value;
|
|
2248
|
+
console.log(
|
|
2249
|
+
`[gen-db-schema] \u2713 Column comments ready: ${columnComments.size}`
|
|
2250
|
+
);
|
|
2251
|
+
} else {
|
|
2024
2252
|
console.warn(
|
|
2025
2253
|
"[gen-db-schema] \u26A0 Failed to fetch column comments (skipping):",
|
|
2026
|
-
|
|
2254
|
+
fetchTasks[0].reason instanceof Error ? fetchTasks[0].reason.message : String(fetchTasks[0].reason)
|
|
2255
|
+
);
|
|
2256
|
+
}
|
|
2257
|
+
if (appId && workspace) {
|
|
2258
|
+
if (fetchTasks[1]?.status === "fulfilled") {
|
|
2259
|
+
syncedTableMap = fetchTasks[1].value;
|
|
2260
|
+
console.log(
|
|
2261
|
+
`[gen-db-schema] \u2713 Synced tables ready: ${syncedTableMap.size}`
|
|
2262
|
+
);
|
|
2263
|
+
} else if (fetchTasks[1]?.status === "rejected") {
|
|
2264
|
+
console.warn(
|
|
2265
|
+
"[gen-db-schema] \u26A0 Failed to fetch synced tables (skipping):",
|
|
2266
|
+
fetchTasks[1].reason instanceof Error ? fetchTasks[1].reason.message : String(fetchTasks[1].reason)
|
|
2267
|
+
);
|
|
2268
|
+
syncedTableMap = /* @__PURE__ */ new Map();
|
|
2269
|
+
}
|
|
2270
|
+
} else {
|
|
2271
|
+
console.info(
|
|
2272
|
+
"[gen-db-schema] \u2139 Skipping synced table detection (app_id or suda_workspace_id not set)"
|
|
2027
2273
|
);
|
|
2274
|
+
syncedTableMap = /* @__PURE__ */ new Map();
|
|
2028
2275
|
}
|
|
2029
2276
|
try {
|
|
2030
2277
|
const env = {
|
|
@@ -2036,13 +2283,19 @@ async function run(options = {}) {
|
|
|
2036
2283
|
};
|
|
2037
2284
|
const drizzleKitBin = resolveDrizzleKitBin();
|
|
2038
2285
|
const spawnArgs = [drizzleKitBin, "introspect", "--config", configPath];
|
|
2039
|
-
const result = spawnSync(process.execPath, spawnArgs, {
|
|
2286
|
+
const result = spawnSync(process.execPath, spawnArgs, {
|
|
2287
|
+
stdio: "inherit",
|
|
2288
|
+
env,
|
|
2289
|
+
cwd: process.cwd()
|
|
2290
|
+
});
|
|
2040
2291
|
if (result.error) {
|
|
2041
2292
|
console.error("[gen-db-schema] Execution failed:", result.error);
|
|
2042
2293
|
throw result.error;
|
|
2043
2294
|
}
|
|
2044
2295
|
if ((result.status ?? 0) !== 0) {
|
|
2045
|
-
throw new Error(
|
|
2296
|
+
throw new Error(
|
|
2297
|
+
`drizzle-kit introspect failed with status ${result.status}`
|
|
2298
|
+
);
|
|
2046
2299
|
}
|
|
2047
2300
|
const generatedSchema = path2.join(OUT_DIR, "schema.ts");
|
|
2048
2301
|
if (!fs4.existsSync(generatedSchema)) {
|
|
@@ -2050,10 +2303,14 @@ async function run(options = {}) {
|
|
|
2050
2303
|
throw new Error("drizzle-kit introspect failed to generate schema.ts");
|
|
2051
2304
|
}
|
|
2052
2305
|
const stats = await postprocessDrizzleSchema(generatedSchema, {
|
|
2053
|
-
columnComments
|
|
2306
|
+
columnComments,
|
|
2307
|
+
syncedTableMap
|
|
2054
2308
|
});
|
|
2055
2309
|
if (stats?.unmatchedUnknown?.length) {
|
|
2056
|
-
console.warn(
|
|
2310
|
+
console.warn(
|
|
2311
|
+
"[gen-db-schema] Unmatched custom types detected:",
|
|
2312
|
+
stats.unmatchedUnknown
|
|
2313
|
+
);
|
|
2057
2314
|
}
|
|
2058
2315
|
console.log("[gen-db-schema] \u2713 Postprocessed schema");
|
|
2059
2316
|
fs4.mkdirSync(path2.dirname(SCHEMA_FILE), { recursive: true });
|
|
@@ -2068,14 +2325,22 @@ async function run(options = {}) {
|
|
|
2068
2325
|
schemaFilePath,
|
|
2069
2326
|
moduleOutputDir: path2.resolve(process.cwd(), "server/modules")
|
|
2070
2327
|
});
|
|
2071
|
-
console.log(
|
|
2328
|
+
console.log(
|
|
2329
|
+
"[gen-db-schema] \u2713 Generate NestJS Module Boilerplate Successfully"
|
|
2330
|
+
);
|
|
2072
2331
|
}
|
|
2073
2332
|
} catch (error) {
|
|
2074
|
-
console.warn(
|
|
2333
|
+
console.warn(
|
|
2334
|
+
"[gen-db-schema] Generate NestJS Module Boilerplate failed:",
|
|
2335
|
+
error instanceof Error ? error.message : String(error)
|
|
2336
|
+
);
|
|
2075
2337
|
}
|
|
2076
2338
|
console.log("[gen-db-schema] \u2713 Complete");
|
|
2077
2339
|
} catch (err) {
|
|
2078
|
-
console.error(
|
|
2340
|
+
console.error(
|
|
2341
|
+
"[gen-db-schema] Failed:",
|
|
2342
|
+
err instanceof Error ? err.message : String(err)
|
|
2343
|
+
);
|
|
2079
2344
|
exitCode = 1;
|
|
2080
2345
|
} finally {
|
|
2081
2346
|
if (fs4.existsSync(OUT_DIR)) {
|
|
@@ -2162,6 +2427,13 @@ var syncConfig = {
|
|
|
2162
2427
|
type: "add-line",
|
|
2163
2428
|
to: ".gitignore",
|
|
2164
2429
|
line: ".agent/"
|
|
2430
|
+
},
|
|
2431
|
+
// 8. 同步 .spark_project 配置文件(总是覆盖)
|
|
2432
|
+
{
|
|
2433
|
+
from: "templates/.spark_project",
|
|
2434
|
+
to: ".spark_project",
|
|
2435
|
+
type: "file",
|
|
2436
|
+
overwrite: true
|
|
2165
2437
|
}
|
|
2166
2438
|
],
|
|
2167
2439
|
// 文件权限设置
|
|
@@ -2419,28 +2691,6 @@ var syncCommand = {
|
|
|
2419
2691
|
}
|
|
2420
2692
|
};
|
|
2421
2693
|
|
|
2422
|
-
// src/utils/http-client.ts
|
|
2423
|
-
import { HttpClient } from "@lark-apaas/http-client";
|
|
2424
|
-
var clientInstance = null;
|
|
2425
|
-
function getHttpClient() {
|
|
2426
|
-
if (!clientInstance) {
|
|
2427
|
-
clientInstance = new HttpClient({
|
|
2428
|
-
timeout: 3e4,
|
|
2429
|
-
platform: {
|
|
2430
|
-
enabled: true
|
|
2431
|
-
}
|
|
2432
|
-
});
|
|
2433
|
-
const canaryEnv = process.env.FORCE_FRAMEWORK_CLI_CANARY_ENV;
|
|
2434
|
-
if (canaryEnv) {
|
|
2435
|
-
clientInstance.interceptors.request.use((req) => {
|
|
2436
|
-
req.headers["x-tt-env"] = canaryEnv;
|
|
2437
|
-
return req;
|
|
2438
|
-
});
|
|
2439
|
-
}
|
|
2440
|
-
}
|
|
2441
|
-
return clientInstance;
|
|
2442
|
-
}
|
|
2443
|
-
|
|
2444
2694
|
// src/utils/telemetry.ts
|
|
2445
2695
|
async function reportEvents(events) {
|
|
2446
2696
|
if (events.length === 0) {
|
|
@@ -2595,7 +2845,7 @@ function autoCommitUpgradeChanges(version, cwd, filesToStage, commitMessage) {
|
|
|
2595
2845
|
- Cleanup package.json config
|
|
2596
2846
|
- Upgrade @lark-apaas dependencies (if any)
|
|
2597
2847
|
|
|
2598
|
-
Auto-committed by fullstack-cli
|
|
2848
|
+
Auto-committed by fullstack-cli`;
|
|
2599
2849
|
gitCommit(message, cwd);
|
|
2600
2850
|
console.log(`[fullstack-cli] \u2713 Auto-committed ${changedFiles.length} changed file(s)`);
|
|
2601
2851
|
return true;
|
|
@@ -4042,7 +4292,7 @@ var PROMPT_PATTERNS = [
|
|
|
4042
4292
|
{ pattern: /proceed\?/i, answer: "y\n" }
|
|
4043
4293
|
];
|
|
4044
4294
|
async function executeShadcnAdd(registryItemPath) {
|
|
4045
|
-
return new Promise((
|
|
4295
|
+
return new Promise((resolve2) => {
|
|
4046
4296
|
let output = "";
|
|
4047
4297
|
const args = ["--yes", "shadcn@3.8.2", "add", registryItemPath];
|
|
4048
4298
|
const ptyProcess = pty.spawn("npx", args, {
|
|
@@ -4068,7 +4318,7 @@ async function executeShadcnAdd(registryItemPath) {
|
|
|
4068
4318
|
});
|
|
4069
4319
|
const timeoutId = setTimeout(() => {
|
|
4070
4320
|
ptyProcess.kill();
|
|
4071
|
-
|
|
4321
|
+
resolve2({
|
|
4072
4322
|
success: false,
|
|
4073
4323
|
files: [],
|
|
4074
4324
|
error: "\u6267\u884C\u8D85\u65F6"
|
|
@@ -4079,7 +4329,7 @@ async function executeShadcnAdd(registryItemPath) {
|
|
|
4079
4329
|
const success = exitCode === 0;
|
|
4080
4330
|
const filePaths = parseOutput(output);
|
|
4081
4331
|
const files = filePaths.map(toFileInfo);
|
|
4082
|
-
|
|
4332
|
+
resolve2({
|
|
4083
4333
|
success,
|
|
4084
4334
|
files,
|
|
4085
4335
|
error: success ? void 0 : output || `Process exited with code ${exitCode}`
|
|
@@ -4090,12 +4340,12 @@ async function executeShadcnAdd(registryItemPath) {
|
|
|
4090
4340
|
|
|
4091
4341
|
// src/commands/component/add.handler.ts
|
|
4092
4342
|
function runActionPluginInit() {
|
|
4093
|
-
return new Promise((
|
|
4343
|
+
return new Promise((resolve2) => {
|
|
4094
4344
|
execFile("fullstack-cli", ["action-plugin", "init"], { cwd: process.cwd(), stdio: "ignore" }, (error) => {
|
|
4095
4345
|
if (error) {
|
|
4096
4346
|
debug("action-plugin init \u5931\u8D25: %s", error.message);
|
|
4097
4347
|
}
|
|
4098
|
-
|
|
4348
|
+
resolve2();
|
|
4099
4349
|
});
|
|
4100
4350
|
});
|
|
4101
4351
|
}
|
|
@@ -6931,7 +7181,45 @@ async function genArtifactUploadCredential(appId, body) {
|
|
|
6931
7181
|
const response = await client.post(url, body);
|
|
6932
7182
|
if (!response.ok || response.status !== 200) {
|
|
6933
7183
|
throw new Error(
|
|
6934
|
-
`
|
|
7184
|
+
`gen_artifact_upload_credential \u8BF7\u6C42\u5931\u8D25: ${response.status} ${response.statusText}`
|
|
7185
|
+
);
|
|
7186
|
+
}
|
|
7187
|
+
return response.json();
|
|
7188
|
+
}
|
|
7189
|
+
async function getDefaultBucketId(appId) {
|
|
7190
|
+
const client = getHttpClient();
|
|
7191
|
+
const url = `/b/${appId}/get_published_v2`;
|
|
7192
|
+
const response = await client.get(url);
|
|
7193
|
+
if (!response.ok || response.status !== 200) {
|
|
7194
|
+
throw new Error(
|
|
7195
|
+
`get_published_v2 \u8BF7\u6C42\u5931\u8D25: ${response.status} ${response.statusText}`
|
|
7196
|
+
);
|
|
7197
|
+
}
|
|
7198
|
+
const data = await response.json();
|
|
7199
|
+
const bucketId = data?.data?.app_runtime_extra?.bucket?.default_bucket_id;
|
|
7200
|
+
if (!bucketId) {
|
|
7201
|
+
throw new Error(`\u672A\u627E\u5230\u5E94\u7528 ${appId} \u7684\u9ED8\u8BA4\u5B58\u50A8\u6876`);
|
|
7202
|
+
}
|
|
7203
|
+
return bucketId;
|
|
7204
|
+
}
|
|
7205
|
+
async function preUploadStaticAttachment(appId, bucketId) {
|
|
7206
|
+
const client = getHttpClient();
|
|
7207
|
+
const url = `/v1/app/${appId}/storage/bucket/${bucketId}/preUploadStatic`;
|
|
7208
|
+
const response = await client.post(url, {});
|
|
7209
|
+
if (!response.ok || response.status !== 200) {
|
|
7210
|
+
throw new Error(
|
|
7211
|
+
`preUploadStatic \u8BF7\u6C42\u5931\u8D25: ${response.status} ${response.statusText}`
|
|
7212
|
+
);
|
|
7213
|
+
}
|
|
7214
|
+
return response.json();
|
|
7215
|
+
}
|
|
7216
|
+
async function uploadStaticAttachmentCallback(appId, bucketId, body) {
|
|
7217
|
+
const client = getHttpClient();
|
|
7218
|
+
const url = `/v1/app/${appId}/storage/bucket/${bucketId}/object/callbackStatic`;
|
|
7219
|
+
const response = await client.post(url, body);
|
|
7220
|
+
if (!response.ok || response.status !== 200) {
|
|
7221
|
+
throw new Error(
|
|
7222
|
+
`callbackStatic \u8BF7\u6C42\u5931\u8D25: ${response.status} ${response.statusText}`
|
|
6935
7223
|
);
|
|
6936
7224
|
}
|
|
6937
7225
|
return response.json();
|
|
@@ -6969,6 +7257,170 @@ function camelToKebab(str) {
|
|
|
6969
7257
|
return str.replace(/([a-z])([A-Z])/g, "$1-$2").toLowerCase();
|
|
6970
7258
|
}
|
|
6971
7259
|
|
|
7260
|
+
// src/commands/build/upload-static.handler.ts
|
|
7261
|
+
import * as fs25 from "fs";
|
|
7262
|
+
import * as path21 from "path";
|
|
7263
|
+
import { execFileSync } from "child_process";
|
|
7264
|
+
function readCredentialsFromEnv() {
|
|
7265
|
+
const uploadPrefix = process.env.STATIC_UPLOAD_PREFIX;
|
|
7266
|
+
const uploadID = process.env.STATIC_UPLOAD_ID;
|
|
7267
|
+
const bucketId = process.env.STATIC_UPLOAD_BUCKET_ID;
|
|
7268
|
+
const accessKeyID = process.env.STATIC_UPLOAD_AK;
|
|
7269
|
+
const secretAccessKey = process.env.STATIC_UPLOAD_SK;
|
|
7270
|
+
const sessionToken = process.env.STATIC_UPLOAD_TOKEN;
|
|
7271
|
+
if (!uploadPrefix || !uploadID || !bucketId || !accessKeyID || !secretAccessKey || !sessionToken) {
|
|
7272
|
+
return null;
|
|
7273
|
+
}
|
|
7274
|
+
return { uploadPrefix, uploadID, bucketId, accessKeyID, secretAccessKey, sessionToken };
|
|
7275
|
+
}
|
|
7276
|
+
var LOG_PREFIX = "[upload-static]";
|
|
7277
|
+
async function uploadStatic(options) {
|
|
7278
|
+
try {
|
|
7279
|
+
const {
|
|
7280
|
+
appId,
|
|
7281
|
+
staticDir = "shared/static",
|
|
7282
|
+
tosutilPath = "/workspace/tosutil",
|
|
7283
|
+
endpoint = "tos-cn-beijing.volces.com",
|
|
7284
|
+
region = "cn-beijing"
|
|
7285
|
+
} = options;
|
|
7286
|
+
const resolvedStaticDir = path21.resolve(staticDir);
|
|
7287
|
+
if (!fs25.existsSync(resolvedStaticDir)) {
|
|
7288
|
+
console.error(`${LOG_PREFIX} \u76EE\u5F55\u4E0D\u5B58\u5728: ${resolvedStaticDir}\uFF0C\u8DF3\u8FC7\u4E0A\u4F20`);
|
|
7289
|
+
return;
|
|
7290
|
+
}
|
|
7291
|
+
if (isDirEmpty(resolvedStaticDir)) {
|
|
7292
|
+
console.error(`${LOG_PREFIX} \u76EE\u5F55\u4E3A\u7A7A: ${resolvedStaticDir}\uFF0C\u8DF3\u8FC7\u4E0A\u4F20`);
|
|
7293
|
+
return;
|
|
7294
|
+
}
|
|
7295
|
+
if (!fs25.existsSync(tosutilPath)) {
|
|
7296
|
+
throw new Error(
|
|
7297
|
+
`tosutil \u4E0D\u5B58\u5728: ${tosutilPath}\u3002\u8BF7\u786E\u4FDD\u6D41\u6C34\u7EBF\u5DF2\u5728"\u4EA7\u7269\u6253\u5305\u4E0A\u4F20"\u6B65\u9AA4\u4E2D\u4E0B\u8F7D tosutil\u3002`
|
|
7298
|
+
);
|
|
7299
|
+
}
|
|
7300
|
+
let uploadPrefix;
|
|
7301
|
+
let uploadID;
|
|
7302
|
+
let bucketId;
|
|
7303
|
+
let accessKeyID;
|
|
7304
|
+
let secretAccessKey;
|
|
7305
|
+
let sessionToken;
|
|
7306
|
+
const envCredentials = readCredentialsFromEnv();
|
|
7307
|
+
if (envCredentials) {
|
|
7308
|
+
console.error(`${LOG_PREFIX} \u4F7F\u7528\u73AF\u5883\u53D8\u91CF\u4E2D\u7684\u4E0A\u4F20\u51ED\u8BC1`);
|
|
7309
|
+
({ uploadPrefix, uploadID, bucketId, accessKeyID, secretAccessKey, sessionToken } = envCredentials);
|
|
7310
|
+
} else {
|
|
7311
|
+
console.error(`${LOG_PREFIX} \u73AF\u5883\u53D8\u91CF\u672A\u8BBE\u7F6E\uFF0C\u8C03\u7528 preUploadStatic...`);
|
|
7312
|
+
bucketId = await resolveBucketId(appId);
|
|
7313
|
+
const preUploadResp = await fetchPreUpload(appId, bucketId);
|
|
7314
|
+
const { uploadCredential } = preUploadResp.data;
|
|
7315
|
+
({ uploadPrefix, uploadID } = preUploadResp.data);
|
|
7316
|
+
({ AccessKeyID: accessKeyID, SecretAccessKey: secretAccessKey, SessionToken: sessionToken } = uploadCredential);
|
|
7317
|
+
}
|
|
7318
|
+
console.error(`${LOG_PREFIX} \u4E0A\u4F20\u76EE\u6807: ${uploadPrefix}`);
|
|
7319
|
+
console.error(`${LOG_PREFIX} \u914D\u7F6E tosutil...`);
|
|
7320
|
+
configureTosutil(tosutilPath, {
|
|
7321
|
+
endpoint,
|
|
7322
|
+
region,
|
|
7323
|
+
accessKeyID,
|
|
7324
|
+
secretAccessKey,
|
|
7325
|
+
sessionToken
|
|
7326
|
+
});
|
|
7327
|
+
console.error(`${LOG_PREFIX} \u4E0A\u4F20 ${resolvedStaticDir} -> ${uploadPrefix}`);
|
|
7328
|
+
uploadToTos(tosutilPath, resolvedStaticDir, uploadPrefix);
|
|
7329
|
+
console.error(`${LOG_PREFIX} tosutil \u4E0A\u4F20\u5B8C\u6210`);
|
|
7330
|
+
console.error(`${LOG_PREFIX} \u8C03\u7528 callbackStatic (uploadID: ${uploadID})...`);
|
|
7331
|
+
const callbackResp = await uploadStaticAttachmentCallback(appId, bucketId, { uploadID });
|
|
7332
|
+
if (callbackResp.status_code !== "0") {
|
|
7333
|
+
throw new Error(`callbackStatic \u8FD4\u56DE\u5F02\u5E38, status_code: ${callbackResp.status_code}`);
|
|
7334
|
+
}
|
|
7335
|
+
const attachments = callbackResp.data?.attachments || [];
|
|
7336
|
+
console.error(`${LOG_PREFIX} \u4E0A\u4F20\u5B8C\u6210\uFF0C\u5171 ${attachments.length} \u4E2A\u6587\u4EF6`);
|
|
7337
|
+
console.log(JSON.stringify(callbackResp));
|
|
7338
|
+
} catch (error) {
|
|
7339
|
+
const message = error instanceof Error ? error.message : String(error);
|
|
7340
|
+
console.error(`${LOG_PREFIX} Error: ${message}`);
|
|
7341
|
+
process.exit(1);
|
|
7342
|
+
}
|
|
7343
|
+
}
|
|
7344
|
+
async function fetchPreUpload(appId, bucketId) {
|
|
7345
|
+
const response = await preUploadStaticAttachment(appId, bucketId);
|
|
7346
|
+
if (response.status_code !== "0") {
|
|
7347
|
+
throw new Error(`preUploadStatic \u8FD4\u56DE\u5F02\u5E38, status_code: ${response.status_code}`);
|
|
7348
|
+
}
|
|
7349
|
+
const { uploadPrefix, uploadID, uploadCredential } = response.data || {};
|
|
7350
|
+
if (!uploadPrefix || !uploadID) {
|
|
7351
|
+
throw new Error("preUploadStatic \u8FD4\u56DE\u6570\u636E\u4E0D\u5B8C\u6574\uFF0C\u7F3A\u5C11 uploadPrefix \u6216 uploadID");
|
|
7352
|
+
}
|
|
7353
|
+
if (!uploadCredential?.AccessKeyID || !uploadCredential?.SecretAccessKey || !uploadCredential?.SessionToken) {
|
|
7354
|
+
throw new Error("preUploadStatic \u8FD4\u56DE\u7684\u51ED\u8BC1\u5B57\u6BB5\u4E0D\u5B8C\u6574");
|
|
7355
|
+
}
|
|
7356
|
+
return response;
|
|
7357
|
+
}
|
|
7358
|
+
function configureTosutil(tosutilPath, config) {
|
|
7359
|
+
const { endpoint, region, accessKeyID, secretAccessKey, sessionToken } = config;
|
|
7360
|
+
execFileSync(
|
|
7361
|
+
tosutilPath,
|
|
7362
|
+
["config", "-e", endpoint, "-i", accessKeyID, "-k", secretAccessKey, "-t", sessionToken, "-re", region],
|
|
7363
|
+
{ stdio: "pipe" }
|
|
7364
|
+
);
|
|
7365
|
+
}
|
|
7366
|
+
function uploadToTos(tosutilPath, sourceDir, destUrl) {
|
|
7367
|
+
execFileSync(
|
|
7368
|
+
tosutilPath,
|
|
7369
|
+
["cp", sourceDir, destUrl, "-r", "-flat", "-j", "5", "-p", "3", "-ps", "10485760", "-f"],
|
|
7370
|
+
{ stdio: "inherit" }
|
|
7371
|
+
);
|
|
7372
|
+
}
|
|
7373
|
+
async function resolveBucketId(appId) {
|
|
7374
|
+
console.error(`${LOG_PREFIX} \u83B7\u53D6\u9ED8\u8BA4\u5B58\u50A8\u6876...`);
|
|
7375
|
+
const bucketId = await getDefaultBucketId(appId);
|
|
7376
|
+
console.error(`${LOG_PREFIX} \u9ED8\u8BA4\u5B58\u50A8\u6876: ${bucketId}`);
|
|
7377
|
+
return bucketId;
|
|
7378
|
+
}
|
|
7379
|
+
function isDirEmpty(dirPath) {
|
|
7380
|
+
const entries = fs25.readdirSync(dirPath);
|
|
7381
|
+
return entries.length === 0;
|
|
7382
|
+
}
|
|
7383
|
+
|
|
7384
|
+
// src/commands/build/pre-upload-static.handler.ts
|
|
7385
|
+
var LOG_PREFIX2 = "[pre-upload-static]";
|
|
7386
|
+
function shellEscape(value) {
|
|
7387
|
+
return value.replace(/["\\`$]/g, "\\$&");
|
|
7388
|
+
}
|
|
7389
|
+
async function preUploadStatic(options) {
|
|
7390
|
+
try {
|
|
7391
|
+
const { appId } = options;
|
|
7392
|
+
console.error(`${LOG_PREFIX2} \u83B7\u53D6\u9ED8\u8BA4\u5B58\u50A8\u6876...`);
|
|
7393
|
+
const bucketId = await getDefaultBucketId(appId);
|
|
7394
|
+
console.error(`${LOG_PREFIX2} \u9ED8\u8BA4\u5B58\u50A8\u6876: ${bucketId}`);
|
|
7395
|
+
console.error(`${LOG_PREFIX2} \u8C03\u7528 preUploadStatic...`);
|
|
7396
|
+
const response = await preUploadStaticAttachment(appId, bucketId);
|
|
7397
|
+
if (response.status_code !== "0") {
|
|
7398
|
+
console.error(`${LOG_PREFIX2} preUploadStatic \u8FD4\u56DE\u5F02\u5E38, status_code: ${response.status_code}`);
|
|
7399
|
+
return;
|
|
7400
|
+
}
|
|
7401
|
+
const { downloadURLPrefix, uploadPrefix, uploadID, uploadCredential } = response.data || {};
|
|
7402
|
+
if (!downloadURLPrefix || !uploadPrefix || !uploadID) {
|
|
7403
|
+
console.error(`${LOG_PREFIX2} preUploadStatic \u8FD4\u56DE\u6570\u636E\u4E0D\u5B8C\u6574`);
|
|
7404
|
+
return;
|
|
7405
|
+
}
|
|
7406
|
+
if (!uploadCredential?.AccessKeyID || !uploadCredential?.SecretAccessKey || !uploadCredential?.SessionToken) {
|
|
7407
|
+
console.error(`${LOG_PREFIX2} preUploadStatic \u8FD4\u56DE\u7684\u51ED\u8BC1\u5B57\u6BB5\u4E0D\u5B8C\u6574`);
|
|
7408
|
+
return;
|
|
7409
|
+
}
|
|
7410
|
+
console.log(`export STATIC_ASSETS_BASE_URL="${shellEscape(downloadURLPrefix)}"`);
|
|
7411
|
+
console.log(`export STATIC_UPLOAD_PREFIX="${shellEscape(uploadPrefix)}"`);
|
|
7412
|
+
console.log(`export STATIC_UPLOAD_ID="${shellEscape(uploadID)}"`);
|
|
7413
|
+
console.log(`export STATIC_UPLOAD_AK="${shellEscape(uploadCredential.AccessKeyID)}"`);
|
|
7414
|
+
console.log(`export STATIC_UPLOAD_SK="${shellEscape(uploadCredential.SecretAccessKey)}"`);
|
|
7415
|
+
console.log(`export STATIC_UPLOAD_TOKEN="${shellEscape(uploadCredential.SessionToken)}"`);
|
|
7416
|
+
console.log(`export STATIC_UPLOAD_BUCKET_ID="${shellEscape(bucketId)}"`);
|
|
7417
|
+
console.error(`${LOG_PREFIX2} \u73AF\u5883\u53D8\u91CF\u5DF2\u8F93\u51FA`);
|
|
7418
|
+
} catch (error) {
|
|
7419
|
+
const message = error instanceof Error ? error.message : String(error);
|
|
7420
|
+
console.error(`${LOG_PREFIX2} Warning: ${message}`);
|
|
7421
|
+
}
|
|
7422
|
+
}
|
|
7423
|
+
|
|
6972
7424
|
// src/commands/build/index.ts
|
|
6973
7425
|
var getTokenCommand = {
|
|
6974
7426
|
name: "get-token",
|
|
@@ -6979,10 +7431,28 @@ var getTokenCommand = {
|
|
|
6979
7431
|
});
|
|
6980
7432
|
}
|
|
6981
7433
|
};
|
|
7434
|
+
var uploadStaticCommand = {
|
|
7435
|
+
name: "upload-static",
|
|
7436
|
+
description: "Upload shared/static files to TOS",
|
|
7437
|
+
register(program) {
|
|
7438
|
+
program.command(this.name).description(this.description).requiredOption("--app-id <id>", "Application ID").option("--static-dir <dir>", "Static files directory", "shared/static").option("--tosutil-path <path>", "Path to tosutil binary", "/workspace/tosutil").option("--endpoint <endpoint>", "TOS endpoint", "tos-cn-beijing.volces.com").option("--region <region>", "TOS region", "cn-beijing").action(async (options) => {
|
|
7439
|
+
await uploadStatic(options);
|
|
7440
|
+
});
|
|
7441
|
+
}
|
|
7442
|
+
};
|
|
7443
|
+
var preUploadStaticCommand = {
|
|
7444
|
+
name: "pre-upload-static",
|
|
7445
|
+
description: "Get TOS upload info and output as env vars for build.sh eval",
|
|
7446
|
+
register(program) {
|
|
7447
|
+
program.command(this.name).description(this.description).requiredOption("--app-id <id>", "Application ID").action(async (options) => {
|
|
7448
|
+
await preUploadStatic(options);
|
|
7449
|
+
});
|
|
7450
|
+
}
|
|
7451
|
+
};
|
|
6982
7452
|
var buildCommandGroup = {
|
|
6983
7453
|
name: "build",
|
|
6984
7454
|
description: "Build related commands",
|
|
6985
|
-
commands: [getTokenCommand]
|
|
7455
|
+
commands: [getTokenCommand, uploadStaticCommand, preUploadStaticCommand]
|
|
6986
7456
|
};
|
|
6987
7457
|
|
|
6988
7458
|
// src/commands/index.ts
|
|
@@ -6999,12 +7469,12 @@ var commands = [
|
|
|
6999
7469
|
];
|
|
7000
7470
|
|
|
7001
7471
|
// src/index.ts
|
|
7002
|
-
var envPath =
|
|
7003
|
-
if (
|
|
7472
|
+
var envPath = path22.join(process.cwd(), ".env");
|
|
7473
|
+
if (fs26.existsSync(envPath)) {
|
|
7004
7474
|
dotenvConfig({ path: envPath });
|
|
7005
7475
|
}
|
|
7006
|
-
var __dirname =
|
|
7007
|
-
var pkg = JSON.parse(
|
|
7476
|
+
var __dirname = path22.dirname(fileURLToPath5(import.meta.url));
|
|
7477
|
+
var pkg = JSON.parse(fs26.readFileSync(path22.join(__dirname, "../package.json"), "utf-8"));
|
|
7008
7478
|
var cli = new FullstackCLI(pkg.version);
|
|
7009
7479
|
cli.useAll(commands);
|
|
7010
7480
|
cli.run();
|
package/package.json
CHANGED
|
@@ -0,0 +1,16 @@
|
|
|
1
|
+
run = ["npm", "run", "dev"] # 默认 spark-cli dev
|
|
2
|
+
hidden = [".config", ".git", "scripts", "node_modules", "dist", ".spark", ".agent", ".agents", "tmp", ".spark_project", ".playwright-cli"]
|
|
3
|
+
lint = ["npm", "run", "lint"]
|
|
4
|
+
test = ["npm", "run", "test"]
|
|
5
|
+
genDbSchema = ["npm", "run", "gen:db-schema"]
|
|
6
|
+
genOpenApiClient = ["npm", "run", "gen:openapi"]
|
|
7
|
+
|
|
8
|
+
[deployment]
|
|
9
|
+
build = ["npm", "run", "build"]
|
|
10
|
+
run = ["npm", "run", "start"]
|
|
11
|
+
|
|
12
|
+
[files]
|
|
13
|
+
[files.restrict]
|
|
14
|
+
pathPatterns = ["client/src/api/gen", "package.json", ".spark_project", ".gitignore"]
|
|
15
|
+
[files.hidden]
|
|
16
|
+
pathPatterns = [".config", ".git", "scripts", "node_modules", "dist", ".spark", ".agent", ".agents", "tmp", ".spark_project", ".playwright-cli"]
|
package/templates/nest-cli.json
CHANGED
|
@@ -83,12 +83,17 @@ STEP_START=$(node -e "console.log(Date.now())")
|
|
|
83
83
|
|
|
84
84
|
mkdir -p "$OUT_DIR/dist/client"
|
|
85
85
|
|
|
86
|
-
#
|
|
87
|
-
|
|
86
|
+
# 移动 HTML(从 dist/client 移走,避免残留)
|
|
87
|
+
mv "$ROOT_DIR/dist/client/"*.html "$OUT_DIR/dist/client/" || true
|
|
88
88
|
|
|
89
89
|
# 拷贝 run.sh 文件
|
|
90
90
|
cp "$ROOT_DIR/scripts/run.sh" "$OUT_DIR/"
|
|
91
91
|
|
|
92
|
+
# 拷贝 .env 文件(如果存在)
|
|
93
|
+
if [ -f "$ROOT_DIR/.env" ]; then
|
|
94
|
+
cp "$ROOT_DIR/.env" "$OUT_DIR/"
|
|
95
|
+
fi
|
|
96
|
+
|
|
92
97
|
# 清理无用文件
|
|
93
98
|
rm -rf "$ROOT_DIR/dist/scripts"
|
|
94
99
|
rm -rf "$ROOT_DIR/dist/tsconfig.node.tsbuildinfo"
|
|
@@ -0,0 +1,275 @@
|
|
|
1
|
+
#!/usr/bin/env node
|
|
2
|
+
'use strict';
|
|
3
|
+
|
|
4
|
+
const fs = require('fs');
|
|
5
|
+
const path = require('path');
|
|
6
|
+
const { spawn, execSync } = require('child_process');
|
|
7
|
+
const readline = require('readline');
|
|
8
|
+
|
|
9
|
+
// ── Project root ──────────────────────────────────────────────────────────────
|
|
10
|
+
const PROJECT_ROOT = path.resolve(__dirname, '..');
|
|
11
|
+
process.chdir(PROJECT_ROOT);
|
|
12
|
+
|
|
13
|
+
// ── Load .env ─────────────────────────────────────────────────────────────────
|
|
14
|
+
function loadEnv() {
|
|
15
|
+
const envPath = path.join(PROJECT_ROOT, '.env');
|
|
16
|
+
if (!fs.existsSync(envPath)) return;
|
|
17
|
+
const lines = fs.readFileSync(envPath, 'utf8').split('\n');
|
|
18
|
+
for (const line of lines) {
|
|
19
|
+
const trimmed = line.trim();
|
|
20
|
+
if (!trimmed || trimmed.startsWith('#')) continue;
|
|
21
|
+
const eqIdx = trimmed.indexOf('=');
|
|
22
|
+
if (eqIdx === -1) continue;
|
|
23
|
+
const key = trimmed.slice(0, eqIdx).trim();
|
|
24
|
+
const value = trimmed.slice(eqIdx + 1).trim();
|
|
25
|
+
if (!(key in process.env)) {
|
|
26
|
+
process.env[key] = value;
|
|
27
|
+
}
|
|
28
|
+
}
|
|
29
|
+
}
|
|
30
|
+
loadEnv();
|
|
31
|
+
|
|
32
|
+
// ── Configuration ─────────────────────────────────────────────────────────────
|
|
33
|
+
const LOG_DIR = process.env.LOG_DIR || 'logs';
|
|
34
|
+
const MAX_RESTART_COUNT = parseInt(process.env.MAX_RESTART_COUNT, 10) || 10;
|
|
35
|
+
const RESTART_DELAY = parseInt(process.env.RESTART_DELAY, 10) || 2;
|
|
36
|
+
const MAX_DELAY = 60;
|
|
37
|
+
const SERVER_PORT = process.env.SERVER_PORT || '3000';
|
|
38
|
+
const CLIENT_DEV_PORT = process.env.CLIENT_DEV_PORT || '8080';
|
|
39
|
+
|
|
40
|
+
fs.mkdirSync(LOG_DIR, { recursive: true });
|
|
41
|
+
|
|
42
|
+
// ── Logging infrastructure ────────────────────────────────────────────────────
|
|
43
|
+
const devStdLogPath = path.join(LOG_DIR, 'dev.std.log');
|
|
44
|
+
const devLogPath = path.join(LOG_DIR, 'dev.log');
|
|
45
|
+
const devStdLogFd = fs.openSync(devStdLogPath, 'a');
|
|
46
|
+
const devLogFd = fs.openSync(devLogPath, 'a');
|
|
47
|
+
|
|
48
|
+
function timestamp() {
|
|
49
|
+
const now = new Date();
|
|
50
|
+
return (
|
|
51
|
+
now.getFullYear() + '-' +
|
|
52
|
+
String(now.getMonth() + 1).padStart(2, '0') + '-' +
|
|
53
|
+
String(now.getDate()).padStart(2, '0') + ' ' +
|
|
54
|
+
String(now.getHours()).padStart(2, '0') + ':' +
|
|
55
|
+
String(now.getMinutes()).padStart(2, '0') + ':' +
|
|
56
|
+
String(now.getSeconds()).padStart(2, '0')
|
|
57
|
+
);
|
|
58
|
+
}
|
|
59
|
+
|
|
60
|
+
/** Write to terminal + dev.std.log */
|
|
61
|
+
function writeOutput(msg) {
|
|
62
|
+
try { process.stdout.write(msg); } catch {}
|
|
63
|
+
try { fs.writeSync(devStdLogFd, msg); } catch {}
|
|
64
|
+
}
|
|
65
|
+
|
|
66
|
+
/** Structured event log → terminal + dev.std.log + dev.log */
|
|
67
|
+
function logEvent(level, name, message) {
|
|
68
|
+
const msg = `[${timestamp()}] [${level}] [${name}] ${message}\n`;
|
|
69
|
+
writeOutput(msg);
|
|
70
|
+
try { fs.writeSync(devLogFd, msg); } catch {}
|
|
71
|
+
}
|
|
72
|
+
|
|
73
|
+
// ── Process group management ──────────────────────────────────────────────────
|
|
74
|
+
function killProcessGroup(pid, signal) {
|
|
75
|
+
try {
|
|
76
|
+
process.kill(-pid, signal);
|
|
77
|
+
} catch {}
|
|
78
|
+
}
|
|
79
|
+
|
|
80
|
+
function killOrphansByPort(port) {
|
|
81
|
+
try {
|
|
82
|
+
const pids = execSync(`lsof -ti :${port}`, { encoding: 'utf8', timeout: 5000 }).trim();
|
|
83
|
+
if (pids) {
|
|
84
|
+
const pidList = pids.split('\n').filter(Boolean);
|
|
85
|
+
for (const p of pidList) {
|
|
86
|
+
try { process.kill(parseInt(p, 10), 'SIGKILL'); } catch {}
|
|
87
|
+
}
|
|
88
|
+
return pidList;
|
|
89
|
+
}
|
|
90
|
+
} catch {}
|
|
91
|
+
return [];
|
|
92
|
+
}
|
|
93
|
+
|
|
94
|
+
// ── Process supervision ───────────────────────────────────────────────────────
|
|
95
|
+
let stopping = false;
|
|
96
|
+
const managedProcesses = []; // { name, pid, child }
|
|
97
|
+
|
|
98
|
+
function sleep(ms) {
|
|
99
|
+
return new Promise((resolve) => setTimeout(resolve, ms));
|
|
100
|
+
}
|
|
101
|
+
|
|
102
|
+
/**
|
|
103
|
+
* Start and supervise a process with auto-restart and log piping.
|
|
104
|
+
* Returns a promise that resolves when the process loop ends.
|
|
105
|
+
*/
|
|
106
|
+
function startProcess({ name, command, args, cleanupPort }) {
|
|
107
|
+
const logFilePath = path.join(LOG_DIR, `${name}.std.log`);
|
|
108
|
+
const logFd = fs.openSync(logFilePath, 'a');
|
|
109
|
+
|
|
110
|
+
const entry = { name, pid: null, child: null };
|
|
111
|
+
managedProcesses.push(entry);
|
|
112
|
+
|
|
113
|
+
const run = async () => {
|
|
114
|
+
let restartCount = 0;
|
|
115
|
+
|
|
116
|
+
while (!stopping) {
|
|
117
|
+
const child = spawn(command, args, {
|
|
118
|
+
detached: true,
|
|
119
|
+
stdio: ['ignore', 'pipe', 'pipe'],
|
|
120
|
+
shell: true,
|
|
121
|
+
cwd: PROJECT_ROOT,
|
|
122
|
+
env: { ...process.env },
|
|
123
|
+
});
|
|
124
|
+
|
|
125
|
+
entry.pid = child.pid;
|
|
126
|
+
entry.child = child;
|
|
127
|
+
|
|
128
|
+
logEvent('INFO', name, `Process started (PGID: ${child.pid}): ${command} ${args.join(' ')}`);
|
|
129
|
+
|
|
130
|
+
// Pipe stdout and stderr through readline for timestamped logging
|
|
131
|
+
const pipeLines = (stream) => {
|
|
132
|
+
const rl = readline.createInterface({ input: stream, crlfDelay: Infinity });
|
|
133
|
+
rl.on('line', (line) => {
|
|
134
|
+
const msg = `[${timestamp()}] [${name}] ${line}\n`;
|
|
135
|
+
try { fs.writeSync(logFd, msg); } catch {}
|
|
136
|
+
writeOutput(msg);
|
|
137
|
+
});
|
|
138
|
+
};
|
|
139
|
+
if (child.stdout) pipeLines(child.stdout);
|
|
140
|
+
if (child.stderr) pipeLines(child.stderr);
|
|
141
|
+
|
|
142
|
+
// Wait for the direct child to exit.
|
|
143
|
+
// NOTE: must use 'exit', not 'close'. With shell:true, grandchild processes
|
|
144
|
+
// (e.g. nest's server) inherit stdout pipes. 'close' won't fire until ALL
|
|
145
|
+
// pipe holders exit, causing dev.js to hang when npm/nest dies but server survives.
|
|
146
|
+
const exitCode = await new Promise((resolve) => {
|
|
147
|
+
child.on('exit', (code) => resolve(code ?? 1));
|
|
148
|
+
child.on('error', () => resolve(1));
|
|
149
|
+
});
|
|
150
|
+
|
|
151
|
+
// Kill the entire process group
|
|
152
|
+
if (entry.pid) {
|
|
153
|
+
killProcessGroup(entry.pid, 'SIGTERM');
|
|
154
|
+
await sleep(2000);
|
|
155
|
+
killProcessGroup(entry.pid, 'SIGKILL');
|
|
156
|
+
}
|
|
157
|
+
entry.pid = null;
|
|
158
|
+
entry.child = null;
|
|
159
|
+
|
|
160
|
+
// Port cleanup fallback
|
|
161
|
+
if (cleanupPort) {
|
|
162
|
+
const orphans = killOrphansByPort(cleanupPort);
|
|
163
|
+
if (orphans.length > 0) {
|
|
164
|
+
logEvent('WARN', name, `Killed orphan processes on port ${cleanupPort}: ${orphans.join(' ')}`);
|
|
165
|
+
await sleep(500);
|
|
166
|
+
}
|
|
167
|
+
}
|
|
168
|
+
|
|
169
|
+
if (stopping) break;
|
|
170
|
+
|
|
171
|
+
restartCount++;
|
|
172
|
+
if (restartCount >= MAX_RESTART_COUNT) {
|
|
173
|
+
logEvent('ERROR', name, `Max restart count (${MAX_RESTART_COUNT}) reached, giving up`);
|
|
174
|
+
break;
|
|
175
|
+
}
|
|
176
|
+
|
|
177
|
+
const delay = Math.min(RESTART_DELAY * (1 << (restartCount - 1)), MAX_DELAY);
|
|
178
|
+
logEvent('WARN', name, `Process exited with code ${exitCode}, restarting (${restartCount}/${MAX_RESTART_COUNT}) in ${delay}s...`);
|
|
179
|
+
await sleep(delay * 1000);
|
|
180
|
+
}
|
|
181
|
+
|
|
182
|
+
try { fs.closeSync(logFd); } catch {}
|
|
183
|
+
};
|
|
184
|
+
|
|
185
|
+
return run();
|
|
186
|
+
}
|
|
187
|
+
|
|
188
|
+
// ── Cleanup ───────────────────────────────────────────────────────────────────
|
|
189
|
+
let cleanupDone = false;
|
|
190
|
+
|
|
191
|
+
async function cleanup() {
|
|
192
|
+
if (cleanupDone) return;
|
|
193
|
+
cleanupDone = true;
|
|
194
|
+
stopping = true;
|
|
195
|
+
|
|
196
|
+
logEvent('INFO', 'main', 'Shutting down all processes...');
|
|
197
|
+
|
|
198
|
+
// Kill all managed process groups
|
|
199
|
+
for (const entry of managedProcesses) {
|
|
200
|
+
if (entry.pid) {
|
|
201
|
+
logEvent('INFO', 'main', `Stopping process group (PGID: ${entry.pid})`);
|
|
202
|
+
killProcessGroup(entry.pid, 'SIGTERM');
|
|
203
|
+
}
|
|
204
|
+
}
|
|
205
|
+
|
|
206
|
+
// Wait for graceful shutdown
|
|
207
|
+
await sleep(2000);
|
|
208
|
+
|
|
209
|
+
// Force kill any remaining
|
|
210
|
+
for (const entry of managedProcesses) {
|
|
211
|
+
if (entry.pid) {
|
|
212
|
+
logEvent('WARN', 'main', `Force killing process group (PGID: ${entry.pid})`);
|
|
213
|
+
killProcessGroup(entry.pid, 'SIGKILL');
|
|
214
|
+
}
|
|
215
|
+
}
|
|
216
|
+
|
|
217
|
+
// Port cleanup fallback
|
|
218
|
+
killOrphansByPort(SERVER_PORT);
|
|
219
|
+
killOrphansByPort(CLIENT_DEV_PORT);
|
|
220
|
+
|
|
221
|
+
logEvent('INFO', 'main', 'All processes stopped');
|
|
222
|
+
|
|
223
|
+
try { fs.closeSync(devStdLogFd); } catch {}
|
|
224
|
+
try { fs.closeSync(devLogFd); } catch {}
|
|
225
|
+
|
|
226
|
+
process.exit(0);
|
|
227
|
+
}
|
|
228
|
+
|
|
229
|
+
process.on('SIGTERM', cleanup);
|
|
230
|
+
process.on('SIGINT', cleanup);
|
|
231
|
+
process.on('SIGHUP', cleanup);
|
|
232
|
+
|
|
233
|
+
// ── Main ──────────────────────────────────────────────────────────────────────
|
|
234
|
+
async function main() {
|
|
235
|
+
logEvent('INFO', 'main', '========== Dev session started ==========');
|
|
236
|
+
|
|
237
|
+
// Initialize action plugins
|
|
238
|
+
writeOutput('\n🔌 Initializing action plugins...\n');
|
|
239
|
+
try {
|
|
240
|
+
execSync('fullstack-cli action-plugin init', { cwd: PROJECT_ROOT, stdio: 'inherit' });
|
|
241
|
+
writeOutput('✅ Action plugins initialized\n\n');
|
|
242
|
+
} catch {
|
|
243
|
+
writeOutput('⚠️ Action plugin initialization failed, continuing anyway...\n\n');
|
|
244
|
+
}
|
|
245
|
+
|
|
246
|
+
// Start server and client
|
|
247
|
+
const serverPromise = startProcess({
|
|
248
|
+
name: 'server',
|
|
249
|
+
command: 'npm',
|
|
250
|
+
args: ['run', 'dev:server'],
|
|
251
|
+
cleanupPort: SERVER_PORT,
|
|
252
|
+
});
|
|
253
|
+
|
|
254
|
+
const clientPromise = startProcess({
|
|
255
|
+
name: 'client',
|
|
256
|
+
command: 'npm',
|
|
257
|
+
args: ['run', 'dev:client'],
|
|
258
|
+
cleanupPort: CLIENT_DEV_PORT,
|
|
259
|
+
});
|
|
260
|
+
|
|
261
|
+
writeOutput(`📋 Dev processes running. Press Ctrl+C to stop.\n`);
|
|
262
|
+
writeOutput(`📄 Logs: ${devStdLogPath}\n\n`);
|
|
263
|
+
|
|
264
|
+
// Wait for both (they loop until stopping or max restarts)
|
|
265
|
+
await Promise.all([serverPromise, clientPromise]);
|
|
266
|
+
|
|
267
|
+
if (!cleanupDone) {
|
|
268
|
+
await cleanup();
|
|
269
|
+
}
|
|
270
|
+
}
|
|
271
|
+
|
|
272
|
+
main().catch((err) => {
|
|
273
|
+
console.error('Fatal error:', err);
|
|
274
|
+
process.exit(1);
|
|
275
|
+
});
|
package/templates/scripts/dev.sh
CHANGED
|
@@ -1,245 +1,2 @@
|
|
|
1
1
|
#!/usr/bin/env bash
|
|
2
|
-
|
|
3
|
-
|
|
4
|
-
set -uo pipefail
|
|
5
|
-
|
|
6
|
-
# Ensure the script always runs from the project root
|
|
7
|
-
cd "$(dirname "${BASH_SOURCE[0]}")/.."
|
|
8
|
-
|
|
9
|
-
# Configuration
|
|
10
|
-
LOG_DIR=${LOG_DIR:-logs}
|
|
11
|
-
DEV_LOG="${LOG_DIR}/dev.log"
|
|
12
|
-
MAX_RESTART_COUNT=${MAX_RESTART_COUNT:-10}
|
|
13
|
-
RESTART_DELAY=${RESTART_DELAY:-2}
|
|
14
|
-
|
|
15
|
-
# Process tracking
|
|
16
|
-
SERVER_PID=""
|
|
17
|
-
CLIENT_PID=""
|
|
18
|
-
PARENT_PID=$$
|
|
19
|
-
STOP_FLAG_FILE="/tmp/dev_sh_stop_$$"
|
|
20
|
-
CLEANUP_DONE=false
|
|
21
|
-
|
|
22
|
-
mkdir -p "${LOG_DIR}"
|
|
23
|
-
|
|
24
|
-
# Redirect all stdout/stderr to both terminal and log file
|
|
25
|
-
DEV_STD_LOG="${LOG_DIR}/dev.std.log"
|
|
26
|
-
exec > >(tee -a "$DEV_STD_LOG") 2>&1
|
|
27
|
-
|
|
28
|
-
# Log event to dev.log with timestamp
|
|
29
|
-
log_event() {
|
|
30
|
-
local level=$1
|
|
31
|
-
local process_name=$2
|
|
32
|
-
local message=$3
|
|
33
|
-
local msg="[$(date '+%Y-%m-%d %H:%M:%S')] [${level}] [${process_name}] ${message}"
|
|
34
|
-
echo "$msg"
|
|
35
|
-
echo "$msg" >> "${DEV_LOG}"
|
|
36
|
-
}
|
|
37
|
-
|
|
38
|
-
# Check if PID is valid (positive integer and process exists)
|
|
39
|
-
is_valid_pid() {
|
|
40
|
-
local pid=$1
|
|
41
|
-
[[ -n "$pid" ]] && [[ "$pid" =~ ^[0-9]+$ ]] && [[ "$pid" -gt 0 ]] && kill -0 "$pid" 2>/dev/null
|
|
42
|
-
}
|
|
43
|
-
|
|
44
|
-
# Check if parent process is still alive
|
|
45
|
-
is_parent_alive() {
|
|
46
|
-
kill -0 "$PARENT_PID" 2>/dev/null
|
|
47
|
-
}
|
|
48
|
-
|
|
49
|
-
# Check if should stop (parent exited or stop flag exists)
|
|
50
|
-
should_stop() {
|
|
51
|
-
[[ -f "$STOP_FLAG_FILE" ]] || ! is_parent_alive
|
|
52
|
-
}
|
|
53
|
-
|
|
54
|
-
# Kill entire process tree (process and all descendants)
|
|
55
|
-
kill_tree() {
|
|
56
|
-
local pid=$1
|
|
57
|
-
local signal=${2:-TERM}
|
|
58
|
-
|
|
59
|
-
# Get all descendant PIDs
|
|
60
|
-
local children
|
|
61
|
-
children=$(pgrep -P "$pid" 2>/dev/null) || true
|
|
62
|
-
|
|
63
|
-
# Recursively kill children first
|
|
64
|
-
for child in $children; do
|
|
65
|
-
kill_tree "$child" "$signal"
|
|
66
|
-
done
|
|
67
|
-
|
|
68
|
-
# Kill the process itself
|
|
69
|
-
if kill -0 "$pid" 2>/dev/null; then
|
|
70
|
-
kill -"$signal" "$pid" 2>/dev/null || true
|
|
71
|
-
fi
|
|
72
|
-
}
|
|
73
|
-
|
|
74
|
-
# Start a process with supervision
|
|
75
|
-
# $1: name
|
|
76
|
-
# $2: command
|
|
77
|
-
# $3: cleanup port for orphan processes (optional)
|
|
78
|
-
start_supervised_process() {
|
|
79
|
-
local name=$1
|
|
80
|
-
local cmd=$2
|
|
81
|
-
local cleanup_port=${3:-""}
|
|
82
|
-
local log_file="${LOG_DIR}/${name}.std.log"
|
|
83
|
-
|
|
84
|
-
(
|
|
85
|
-
local restart_count=0
|
|
86
|
-
local child_pid=""
|
|
87
|
-
local max_delay=60 # Maximum delay in seconds
|
|
88
|
-
|
|
89
|
-
# Handle signals to kill child process tree
|
|
90
|
-
trap 'if [[ -n "$child_pid" ]]; then kill_tree "$child_pid" TERM; fi' TERM INT
|
|
91
|
-
|
|
92
|
-
while true; do
|
|
93
|
-
# Check if we should stop (parent exited or stop flag)
|
|
94
|
-
if should_stop; then
|
|
95
|
-
log_event "INFO" "$name" "Process stopped (parent exited or user requested)"
|
|
96
|
-
break
|
|
97
|
-
fi
|
|
98
|
-
|
|
99
|
-
# Start command in background and capture output with timestamps
|
|
100
|
-
eval "$cmd" > >(
|
|
101
|
-
while IFS= read -r line; do
|
|
102
|
-
local msg="[$(date '+%Y-%m-%d %H:%M:%S')] [${name}] ${line}"
|
|
103
|
-
echo "$msg"
|
|
104
|
-
echo "$msg" >> "$log_file"
|
|
105
|
-
done
|
|
106
|
-
) 2>&1 &
|
|
107
|
-
child_pid=$!
|
|
108
|
-
|
|
109
|
-
log_event "INFO" "$name" "Process started (PID: ${child_pid}): ${cmd}"
|
|
110
|
-
|
|
111
|
-
# Wait for child to exit
|
|
112
|
-
set +e
|
|
113
|
-
wait "$child_pid"
|
|
114
|
-
exit_code=$?
|
|
115
|
-
set -e
|
|
116
|
-
|
|
117
|
-
# Kill entire process tree to avoid orphans
|
|
118
|
-
if [[ -n "$child_pid" ]]; then
|
|
119
|
-
kill_tree "$child_pid" TERM
|
|
120
|
-
sleep 0.5
|
|
121
|
-
kill_tree "$child_pid" KILL
|
|
122
|
-
fi
|
|
123
|
-
child_pid=""
|
|
124
|
-
|
|
125
|
-
# Cleanup orphan processes by port (for processes that escaped kill_tree)
|
|
126
|
-
if [[ -n "$cleanup_port" ]]; then
|
|
127
|
-
local orphan_pids
|
|
128
|
-
orphan_pids=$(lsof -ti ":${cleanup_port}" 2>/dev/null) || true
|
|
129
|
-
if [[ -n "$orphan_pids" ]]; then
|
|
130
|
-
log_event "WARN" "$name" "Killing orphan processes on port ${cleanup_port}: $(echo $orphan_pids | tr '\n' ' ')"
|
|
131
|
-
echo "$orphan_pids" | xargs kill -9 2>/dev/null || true
|
|
132
|
-
sleep 0.5
|
|
133
|
-
fi
|
|
134
|
-
fi
|
|
135
|
-
|
|
136
|
-
# Check if we should stop (parent exited or stop flag)
|
|
137
|
-
if should_stop; then
|
|
138
|
-
log_event "INFO" "$name" "Process stopped (parent exited or user requested)"
|
|
139
|
-
break
|
|
140
|
-
fi
|
|
141
|
-
|
|
142
|
-
# Process exited unexpectedly, restart
|
|
143
|
-
restart_count=$((restart_count + 1))
|
|
144
|
-
|
|
145
|
-
if [[ $restart_count -ge $MAX_RESTART_COUNT ]]; then
|
|
146
|
-
log_event "ERROR" "$name" "Max restart count (${MAX_RESTART_COUNT}) reached, giving up"
|
|
147
|
-
break
|
|
148
|
-
fi
|
|
149
|
-
|
|
150
|
-
# Exponential backoff: delay = RESTART_DELAY * 2^(restart_count-1), capped at max_delay
|
|
151
|
-
local delay=$((RESTART_DELAY * (1 << (restart_count - 1))))
|
|
152
|
-
if [[ $delay -gt $max_delay ]]; then
|
|
153
|
-
delay=$max_delay
|
|
154
|
-
fi
|
|
155
|
-
|
|
156
|
-
log_event "WARN" "$name" "Process exited with code ${exit_code}, restarting (${restart_count}/${MAX_RESTART_COUNT}) in ${delay}s..."
|
|
157
|
-
sleep "$delay"
|
|
158
|
-
done
|
|
159
|
-
) &
|
|
160
|
-
}
|
|
161
|
-
|
|
162
|
-
# Cleanup function
|
|
163
|
-
cleanup() {
|
|
164
|
-
# Prevent multiple cleanup calls
|
|
165
|
-
if [[ "$CLEANUP_DONE" == "true" ]]; then
|
|
166
|
-
return
|
|
167
|
-
fi
|
|
168
|
-
CLEANUP_DONE=true
|
|
169
|
-
|
|
170
|
-
log_event "INFO" "main" "Shutting down all processes..."
|
|
171
|
-
|
|
172
|
-
# Create stop flag to signal child processes
|
|
173
|
-
touch "$STOP_FLAG_FILE"
|
|
174
|
-
|
|
175
|
-
# Kill entire process trees (TERM first)
|
|
176
|
-
for pid in $SERVER_PID $CLIENT_PID; do
|
|
177
|
-
if is_valid_pid "$pid"; then
|
|
178
|
-
log_event "INFO" "main" "Stopping process tree (PID: ${pid})"
|
|
179
|
-
kill_tree "$pid" TERM
|
|
180
|
-
fi
|
|
181
|
-
done
|
|
182
|
-
|
|
183
|
-
# Kill any remaining background jobs
|
|
184
|
-
local bg_pids
|
|
185
|
-
bg_pids=$(jobs -p 2>/dev/null) || true
|
|
186
|
-
if [[ -n "$bg_pids" ]]; then
|
|
187
|
-
for pid in $bg_pids; do
|
|
188
|
-
kill_tree "$pid" TERM
|
|
189
|
-
done
|
|
190
|
-
fi
|
|
191
|
-
|
|
192
|
-
# Wait for processes to terminate
|
|
193
|
-
sleep 1
|
|
194
|
-
|
|
195
|
-
# Force kill if still running
|
|
196
|
-
for pid in $SERVER_PID $CLIENT_PID; do
|
|
197
|
-
if is_valid_pid "$pid"; then
|
|
198
|
-
log_event "WARN" "main" "Force killing process tree (PID: ${pid})"
|
|
199
|
-
kill_tree "$pid" KILL
|
|
200
|
-
fi
|
|
201
|
-
done
|
|
202
|
-
|
|
203
|
-
# Cleanup stop flag
|
|
204
|
-
rm -f "$STOP_FLAG_FILE"
|
|
205
|
-
|
|
206
|
-
log_event "INFO" "main" "All processes stopped"
|
|
207
|
-
}
|
|
208
|
-
|
|
209
|
-
# Set up signal handlers
|
|
210
|
-
trap cleanup EXIT INT TERM HUP
|
|
211
|
-
|
|
212
|
-
# Remove any stale stop flag
|
|
213
|
-
rm -f "$STOP_FLAG_FILE"
|
|
214
|
-
|
|
215
|
-
# Initialize dev.log
|
|
216
|
-
echo "" >> "${DEV_LOG}"
|
|
217
|
-
log_event "INFO" "main" "========== Dev session started =========="
|
|
218
|
-
|
|
219
|
-
# Initialize action plugins before starting dev servers
|
|
220
|
-
echo "🔌 Initializing action plugins..."
|
|
221
|
-
if fullstack-cli action-plugin init; then
|
|
222
|
-
echo "✅ Action plugins initialized"
|
|
223
|
-
else
|
|
224
|
-
echo "⚠️ Action plugin initialization failed, continuing anyway..."
|
|
225
|
-
fi
|
|
226
|
-
echo ""
|
|
227
|
-
|
|
228
|
-
# Start server (cleanup orphan processes on SERVER_PORT)
|
|
229
|
-
start_supervised_process "server" "npm run dev:server" "${SERVER_PORT:-3000}"
|
|
230
|
-
SERVER_PID=$!
|
|
231
|
-
log_event "INFO" "server" "Supervisor started with PID ${SERVER_PID}"
|
|
232
|
-
|
|
233
|
-
# Start client (cleanup orphan processes on CLIENT_DEV_PORT)
|
|
234
|
-
start_supervised_process "client" "npm run dev:client" "${CLIENT_DEV_PORT:-8080}"
|
|
235
|
-
CLIENT_PID=$!
|
|
236
|
-
log_event "INFO" "client" "Supervisor started with PID ${CLIENT_PID}"
|
|
237
|
-
|
|
238
|
-
log_event "INFO" "main" "All processes started, monitoring..."
|
|
239
|
-
echo ""
|
|
240
|
-
echo "📋 Dev processes running. Press Ctrl+C to stop."
|
|
241
|
-
echo "📄 Logs: ${DEV_STD_LOG}"
|
|
242
|
-
echo ""
|
|
243
|
-
|
|
244
|
-
# Wait for all background processes
|
|
245
|
-
wait
|
|
2
|
+
exec node "$(dirname "$0")/dev.js" "$@"
|