@lark-apaas/fullstack-cli 1.1.9-alpha.1 → 1.1.10
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +158 -2
- package/dist/config/drizzle.config.d.ts +3 -1
- package/dist/config/drizzle.config.js +20 -16
- package/dist/index.d.ts +2 -1
- package/dist/index.js +3876 -49
- package/package.json +15 -7
- package/templates/nest-cli.json +25 -0
- package/templates/scripts/build.sh +16 -9
- package/templates/scripts/prune-smart.js +41 -7
- package/dist/commands/gen-db-schema.d.ts +0 -20
- package/dist/commands/gen-db-schema.js +0 -115
- package/dist/commands/read-logs/client-std.d.ts +0 -2
- package/dist/commands/read-logs/client-std.js +0 -98
- package/dist/commands/read-logs/json-lines.d.ts +0 -3
- package/dist/commands/read-logs/json-lines.js +0 -217
- package/dist/commands/read-logs/server-std.d.ts +0 -1
- package/dist/commands/read-logs/server-std.js +0 -25
- package/dist/commands/read-logs/std-utils.d.ts +0 -5
- package/dist/commands/read-logs/std-utils.js +0 -61
- package/dist/commands/read-logs/tail.d.ts +0 -2
- package/dist/commands/read-logs/tail.js +0 -47
- package/dist/commands/read-logs.d.ts +0 -16
- package/dist/commands/read-logs.js +0 -153
- package/dist/commands/read-logs.test.d.ts +0 -1
- package/dist/commands/read-logs.test.js +0 -199
- package/dist/commands/sync.d.ts +0 -6
- package/dist/commands/sync.js +0 -211
- package/dist/config/sync.d.ts +0 -52
- package/dist/config/sync.js +0 -54
- package/dist/utils/file-ops.d.ts +0 -17
- package/dist/utils/file-ops.js +0 -40
- package/dist/utils/file-ops.test.d.ts +0 -1
- package/dist/utils/file-ops.test.js +0 -104
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@lark-apaas/fullstack-cli",
|
|
3
|
-
"version": "1.1.
|
|
3
|
+
"version": "1.1.10",
|
|
4
4
|
"description": "CLI tool for fullstack template management",
|
|
5
5
|
"type": "module",
|
|
6
6
|
"main": "dist/index.js",
|
|
@@ -12,7 +12,7 @@
|
|
|
12
12
|
"bin"
|
|
13
13
|
],
|
|
14
14
|
"scripts": {
|
|
15
|
-
"build": "
|
|
15
|
+
"build": "tsup",
|
|
16
16
|
"test": "vitest run",
|
|
17
17
|
"test:watch": "vitest",
|
|
18
18
|
"prepublishOnly": "npm run build"
|
|
@@ -31,15 +31,23 @@
|
|
|
31
31
|
"access": "public"
|
|
32
32
|
},
|
|
33
33
|
"dependencies": {
|
|
34
|
-
"@lark-apaas/devtool-kits": "1.2.
|
|
34
|
+
"@lark-apaas/devtool-kits": "^1.2.13",
|
|
35
|
+
"@lark-apaas/http-client": "0.1.2",
|
|
35
36
|
"@vercel/nft": "^0.30.3",
|
|
36
|
-
"
|
|
37
|
+
"commander": "^13.0.0",
|
|
37
38
|
"dotenv": "^16.0.0",
|
|
38
|
-
"drizzle-kit": "0.31.5"
|
|
39
|
+
"drizzle-kit": "0.31.5",
|
|
40
|
+
"ts-morph": "^27.0.0",
|
|
41
|
+
"zod-to-json-schema": "^3.24.1"
|
|
39
42
|
},
|
|
40
43
|
"devDependencies": {
|
|
41
44
|
"@types/node": "^22.0.0",
|
|
45
|
+
"tsup": "^8.3.5",
|
|
42
46
|
"typescript": "^5.9.2",
|
|
43
|
-
"vitest": "^
|
|
44
|
-
}
|
|
47
|
+
"vitest": "^2.1.8"
|
|
48
|
+
},
|
|
49
|
+
"peerDependencies": {
|
|
50
|
+
"typescript": "^5.9.2"
|
|
51
|
+
},
|
|
52
|
+
"migrationVersion": 1
|
|
45
53
|
}
|
|
@@ -0,0 +1,25 @@
|
|
|
1
|
+
{
|
|
2
|
+
"$schema": "https://json.schemastore.org/nest-cli",
|
|
3
|
+
"collection": "@nestjs/schematics",
|
|
4
|
+
"sourceRoot": "server",
|
|
5
|
+
"compilerOptions": {
|
|
6
|
+
"deleteOutDir": true,
|
|
7
|
+
"tsConfigPath": "tsconfig.node.json",
|
|
8
|
+
"assets": [
|
|
9
|
+
{
|
|
10
|
+
"include": "capabilities/**/*.json",
|
|
11
|
+
"outDir": "dist/server",
|
|
12
|
+
"watchAssets": true
|
|
13
|
+
}
|
|
14
|
+
],
|
|
15
|
+
"plugins": [
|
|
16
|
+
{
|
|
17
|
+
"name": "@nestjs/swagger",
|
|
18
|
+
"options": {
|
|
19
|
+
"introspectComments": true,
|
|
20
|
+
"classValidatorShim": true
|
|
21
|
+
}
|
|
22
|
+
}
|
|
23
|
+
]
|
|
24
|
+
}
|
|
25
|
+
}
|
|
@@ -19,21 +19,28 @@ print_time() {
|
|
|
19
19
|
}
|
|
20
20
|
|
|
21
21
|
# ==================== 步骤 0 ====================
|
|
22
|
-
echo "
|
|
22
|
+
echo "🗑️ [0/5] 安装插件"
|
|
23
23
|
STEP_START=$(node -e "console.log(Date.now())")
|
|
24
|
-
|
|
24
|
+
npx fullstack-cli action-plugin init
|
|
25
25
|
print_time $STEP_START
|
|
26
26
|
echo ""
|
|
27
27
|
|
|
28
28
|
# ==================== 步骤 1 ====================
|
|
29
|
-
echo "
|
|
29
|
+
echo "📝 [1/5] 更新 openapi 代码"
|
|
30
30
|
STEP_START=$(node -e "console.log(Date.now())")
|
|
31
|
-
|
|
31
|
+
npm run gen:openapi
|
|
32
32
|
print_time $STEP_START
|
|
33
33
|
echo ""
|
|
34
34
|
|
|
35
35
|
# ==================== 步骤 2 ====================
|
|
36
|
-
echo "
|
|
36
|
+
echo "🗑️ [2/5] 清理 dist 目录"
|
|
37
|
+
STEP_START=$(node -e "console.log(Date.now())")
|
|
38
|
+
rm -rf "$ROOT_DIR/dist"
|
|
39
|
+
print_time $STEP_START
|
|
40
|
+
echo ""
|
|
41
|
+
|
|
42
|
+
# ==================== 步骤 3 ====================
|
|
43
|
+
echo "🔨 [3/5] 并行构建 server 和 client"
|
|
37
44
|
STEP_START=$(node -e "console.log(Date.now())")
|
|
38
45
|
|
|
39
46
|
# 并行构建
|
|
@@ -70,8 +77,8 @@ echo " ✅ Client 构建完成"
|
|
|
70
77
|
print_time $STEP_START
|
|
71
78
|
echo ""
|
|
72
79
|
|
|
73
|
-
# ==================== 步骤
|
|
74
|
-
echo "📦 [
|
|
80
|
+
# ==================== 步骤 4 ====================
|
|
81
|
+
echo "📦 [4/5] 准备 server 依赖产物"
|
|
75
82
|
STEP_START=$(node -e "console.log(Date.now())")
|
|
76
83
|
|
|
77
84
|
mkdir -p "$OUT_DIR/dist/client"
|
|
@@ -89,8 +96,8 @@ rm -rf "$ROOT_DIR/dist/tsconfig.node.tsbuildinfo"
|
|
|
89
96
|
print_time $STEP_START
|
|
90
97
|
echo ""
|
|
91
98
|
|
|
92
|
-
# ==================== 步骤
|
|
93
|
-
echo "✂️ [
|
|
99
|
+
# ==================== 步骤 5 ====================
|
|
100
|
+
echo "✂️ [5/5] 智能依赖裁剪"
|
|
94
101
|
STEP_START=$(node -e "console.log(Date.now())")
|
|
95
102
|
|
|
96
103
|
# 分析实际依赖、复制并裁剪 node_modules、生成精简的 package.json
|
|
@@ -178,9 +178,41 @@ async function smartPrune() {
|
|
|
178
178
|
}
|
|
179
179
|
}
|
|
180
180
|
|
|
181
|
-
console.log(`📦
|
|
181
|
+
console.log(`📦 静态分析需要 ${requiredPackages.size} 个 npm 包`);
|
|
182
182
|
|
|
183
|
-
// 4.
|
|
183
|
+
// 4. 处理 actionPlugins(动态加载的插件,无法被静态分析追踪)
|
|
184
|
+
const originalPackage = JSON.parse(fs.readFileSync(ROOT_PACKAGE_JSON, 'utf8'));
|
|
185
|
+
const actionPlugins = originalPackage.actionPlugins || {};
|
|
186
|
+
const actionPluginNames = Object.keys(actionPlugins);
|
|
187
|
+
|
|
188
|
+
if (actionPluginNames.length > 0) {
|
|
189
|
+
console.log(`🔌 发现 ${actionPluginNames.length} 个 Action 插件,添加到依赖列表...`);
|
|
190
|
+
|
|
191
|
+
for (const pluginName of actionPluginNames) {
|
|
192
|
+
requiredPackages.add(pluginName);
|
|
193
|
+
|
|
194
|
+
// 检查插件的 peerDependencies,也添加进去
|
|
195
|
+
const pluginPkgPath = path.join(ROOT_NODE_MODULES, pluginName, 'package.json');
|
|
196
|
+
if (fs.existsSync(pluginPkgPath)) {
|
|
197
|
+
try {
|
|
198
|
+
const pluginPkg = JSON.parse(fs.readFileSync(pluginPkgPath, 'utf8'));
|
|
199
|
+
if (pluginPkg.peerDependencies) {
|
|
200
|
+
for (const peerDep of Object.keys(pluginPkg.peerDependencies)) {
|
|
201
|
+
requiredPackages.add(peerDep);
|
|
202
|
+
}
|
|
203
|
+
}
|
|
204
|
+
} catch {
|
|
205
|
+
// 忽略解析错误
|
|
206
|
+
}
|
|
207
|
+
}
|
|
208
|
+
}
|
|
209
|
+
|
|
210
|
+
console.log(` ✅ 插件: ${actionPluginNames.join(', ')}`);
|
|
211
|
+
}
|
|
212
|
+
|
|
213
|
+
console.log(`📦 总共需要 ${requiredPackages.size} 个 npm 包\n`);
|
|
214
|
+
|
|
215
|
+
// 5. 选择性复制包(只复制需要的)
|
|
184
216
|
console.log('📋 选择性复制 node_modules(仅复制需要的包)...');
|
|
185
217
|
const copyStart = Date.now();
|
|
186
218
|
|
|
@@ -192,16 +224,15 @@ async function smartPrune() {
|
|
|
192
224
|
console.log(` 成功: ${copiedCount.success} 个包,失败: ${copiedCount.failed} 个`);
|
|
193
225
|
console.log(` 硬链接: ${fileStats.hardLinks} 个文件,复制: ${fileStats.copies} 个文件\n`);
|
|
194
226
|
|
|
195
|
-
//
|
|
196
|
-
const originalPackage = JSON.parse(fs.readFileSync(ROOT_PACKAGE_JSON, 'utf8'));
|
|
197
|
-
|
|
227
|
+
// 6. 生成精简版 package.json
|
|
198
228
|
// 优化:直接构建 dependencies,避免多次对象展开
|
|
199
229
|
const prunedDependencies = {};
|
|
200
230
|
const allDeps = originalPackage.dependencies || {};
|
|
201
231
|
const allDevDeps = originalPackage.devDependencies || {};
|
|
202
232
|
|
|
203
233
|
for (const pkg of requiredPackages) {
|
|
204
|
-
|
|
234
|
+
// 优先从 dependencies/devDependencies 获取版本,其次从 actionPlugins 获取
|
|
235
|
+
const version = allDeps[pkg] || allDevDeps[pkg] || actionPlugins[pkg];
|
|
205
236
|
if (version) {
|
|
206
237
|
prunedDependencies[pkg] = version;
|
|
207
238
|
}
|
|
@@ -222,11 +253,14 @@ async function smartPrune() {
|
|
|
222
253
|
|
|
223
254
|
const totalElapsed = Date.now() - totalStartTime;
|
|
224
255
|
|
|
225
|
-
//
|
|
256
|
+
// 7. 输出统计信息
|
|
226
257
|
console.log('='.repeat(60));
|
|
227
258
|
console.log('📊 智能裁剪统计:');
|
|
228
259
|
console.log('='.repeat(60));
|
|
229
260
|
console.log(` 需要的包数量: ${requiredPackages.size}`);
|
|
261
|
+
if (actionPluginNames.length > 0) {
|
|
262
|
+
console.log(` Action 插件: ${actionPluginNames.length} 个`);
|
|
263
|
+
}
|
|
230
264
|
console.log(` 成功复制: ${copiedCount.success} 个包`);
|
|
231
265
|
console.log(` 失败: ${copiedCount.failed} 个包`);
|
|
232
266
|
console.log(` 硬链接文件: ${fileStats.hardLinks} 个`);
|
|
@@ -1,20 +0,0 @@
|
|
|
1
|
-
/**
|
|
2
|
-
* 生成数据库 schema
|
|
3
|
-
*
|
|
4
|
-
* 命令行选项:
|
|
5
|
-
* - --output <path>: schema 输出路径,默认 'server/database/schema.ts'
|
|
6
|
-
* - --schema-filter <schemas>: schema 过滤器,逗号分隔
|
|
7
|
-
* - --tables-filter <tables>: 表过滤器,逗号分隔,默认 '*'
|
|
8
|
-
*
|
|
9
|
-
* 环境变量配置:
|
|
10
|
-
* - SUDA_DATABASE_URL: 数据库连接 URL(必需)
|
|
11
|
-
* - DB_SCHEMA_OUTPUT: schema 输出路径(命令行选项优先)
|
|
12
|
-
* - DRIZZLE_SCHEMA_FILTER: schema 过滤器(命令行选项优先)
|
|
13
|
-
* - DRIZZLE_TABLES_FILTER: 表过滤器(命令行选项优先)
|
|
14
|
-
*/
|
|
15
|
-
export declare function run(options?: {
|
|
16
|
-
enableNestModuleGenerate?: boolean;
|
|
17
|
-
output?: string;
|
|
18
|
-
schemaFilter?: string;
|
|
19
|
-
tablesFilter?: string;
|
|
20
|
-
}): Promise<void>;
|
|
@@ -1,115 +0,0 @@
|
|
|
1
|
-
import path from 'node:path';
|
|
2
|
-
import fs from 'node:fs';
|
|
3
|
-
import { fileURLToPath } from 'node:url';
|
|
4
|
-
import { spawnSync } from 'node:child_process';
|
|
5
|
-
import { createRequire } from 'node:module';
|
|
6
|
-
// 加载 .env 配置
|
|
7
|
-
import { config as loadEnv } from 'dotenv';
|
|
8
|
-
// 创建 require 函数来加载 CommonJS 模块
|
|
9
|
-
const require = createRequire(import.meta.url);
|
|
10
|
-
/**
|
|
11
|
-
* 生成数据库 schema
|
|
12
|
-
*
|
|
13
|
-
* 命令行选项:
|
|
14
|
-
* - --output <path>: schema 输出路径,默认 'server/database/schema.ts'
|
|
15
|
-
* - --schema-filter <schemas>: schema 过滤器,逗号分隔
|
|
16
|
-
* - --tables-filter <tables>: 表过滤器,逗号分隔,默认 '*'
|
|
17
|
-
*
|
|
18
|
-
* 环境变量配置:
|
|
19
|
-
* - SUDA_DATABASE_URL: 数据库连接 URL(必需)
|
|
20
|
-
* - DB_SCHEMA_OUTPUT: schema 输出路径(命令行选项优先)
|
|
21
|
-
* - DRIZZLE_SCHEMA_FILTER: schema 过滤器(命令行选项优先)
|
|
22
|
-
* - DRIZZLE_TABLES_FILTER: 表过滤器(命令行选项优先)
|
|
23
|
-
*/
|
|
24
|
-
export async function run(options = {}) {
|
|
25
|
-
// 加载用户项目的 .env 文件
|
|
26
|
-
let exitCode = 0;
|
|
27
|
-
const envPath = path.resolve(process.cwd(), '.env');
|
|
28
|
-
if (fs.existsSync(envPath)) {
|
|
29
|
-
loadEnv({ path: envPath });
|
|
30
|
-
console.log('[gen-db-schema] ✓ Loaded .env file');
|
|
31
|
-
}
|
|
32
|
-
const databaseUrl = process.env.SUDA_DATABASE_URL;
|
|
33
|
-
if (!databaseUrl) {
|
|
34
|
-
console.error('[gen-db-schema] Error: SUDA_DATABASE_URL environment variable is required');
|
|
35
|
-
process.exit(1);
|
|
36
|
-
}
|
|
37
|
-
// 命令行选项优先于环境变量
|
|
38
|
-
const outputPath = options.output || process.env.DB_SCHEMA_OUTPUT || 'server/database/schema.ts';
|
|
39
|
-
const OUT_DIR = path.resolve(process.cwd(), 'server/database/.introspect');
|
|
40
|
-
const SCHEMA_FILE = path.resolve(process.cwd(), outputPath);
|
|
41
|
-
console.log('[gen-db-schema] Starting...');
|
|
42
|
-
// 获取当前文件所在目录(ESM 方式)
|
|
43
|
-
const __filename = fileURLToPath(import.meta.url);
|
|
44
|
-
const __dirname = path.dirname(__filename);
|
|
45
|
-
// 使用 CLI 内部的 drizzle 配置
|
|
46
|
-
const cliRoot = path.resolve(__dirname, '../');
|
|
47
|
-
const configPath = path.join(cliRoot, 'config', 'drizzle.config.js');
|
|
48
|
-
if (!fs.existsSync(configPath)) {
|
|
49
|
-
console.error('[gen-db-schema] Error: drizzle config not found in CLI package');
|
|
50
|
-
process.exit(1);
|
|
51
|
-
}
|
|
52
|
-
try {
|
|
53
|
-
// 通过环境变量传递绝对路径给配置文件
|
|
54
|
-
const env = {
|
|
55
|
-
...process.env,
|
|
56
|
-
__DRIZZLE_OUT_DIR__: OUT_DIR,
|
|
57
|
-
__DRIZZLE_SCHEMA_PATH__: SCHEMA_FILE,
|
|
58
|
-
};
|
|
59
|
-
// 执行 drizzle-kit introspect
|
|
60
|
-
const args = process.argv.slice(3).filter((arg) => !arg.startsWith('--enable-nest-module-generate'));
|
|
61
|
-
const spawnArgs = ['--yes', 'drizzle-kit', 'introspect', '--config', configPath, ...args];
|
|
62
|
-
const result = spawnSync('npx', spawnArgs, { stdio: 'inherit', env });
|
|
63
|
-
if (result.error) {
|
|
64
|
-
console.error('[gen-db-schema] Execution failed:', result.error);
|
|
65
|
-
throw result.error;
|
|
66
|
-
}
|
|
67
|
-
if ((result.status ?? 0) !== 0) {
|
|
68
|
-
throw new Error(`drizzle-kit introspect failed with status ${result.status}`);
|
|
69
|
-
}
|
|
70
|
-
// 复制生成的 schema
|
|
71
|
-
const generatedSchema = path.join(OUT_DIR, 'schema.ts');
|
|
72
|
-
if (!fs.existsSync(generatedSchema)) {
|
|
73
|
-
console.error('[gen-db-schema] schema.ts not generated');
|
|
74
|
-
throw new Error('drizzle-kit introspect failed to generate schema.ts');
|
|
75
|
-
}
|
|
76
|
-
// 后处理 schema(使用 CommonJS require 方式加载)
|
|
77
|
-
const { postprocessDrizzleSchema } = require('@lark-apaas/devtool-kits');
|
|
78
|
-
const stats = postprocessDrizzleSchema(generatedSchema);
|
|
79
|
-
if (stats?.unmatchedUnknown?.length) {
|
|
80
|
-
console.warn('[gen-db-schema] Unmatched custom types detected:', stats.unmatchedUnknown);
|
|
81
|
-
}
|
|
82
|
-
console.log('[gen-db-schema] ✓ Postprocessed schema');
|
|
83
|
-
fs.mkdirSync(path.dirname(SCHEMA_FILE), { recursive: true });
|
|
84
|
-
fs.copyFileSync(generatedSchema, SCHEMA_FILE);
|
|
85
|
-
console.log(`[gen-db-schema] ✓ Copied to ${outputPath}`);
|
|
86
|
-
try {
|
|
87
|
-
if (options.enableNestModuleGenerate) {
|
|
88
|
-
const { parseAndGenerateNestResourceTemplate } = require('@lark-apaas/devtool-kits');
|
|
89
|
-
const tsConfigFilePath = path.resolve(process.cwd(), 'tsconfig.json');
|
|
90
|
-
const schemaFilePath = SCHEMA_FILE;
|
|
91
|
-
await parseAndGenerateNestResourceTemplate({
|
|
92
|
-
tsConfigFilePath,
|
|
93
|
-
schemaFilePath,
|
|
94
|
-
moduleOutputDir: path.resolve(process.cwd(), 'server/modules'),
|
|
95
|
-
});
|
|
96
|
-
console.log('[gen-db-schema] ✓ Generate NestJS Module Boilerplate Successfully');
|
|
97
|
-
}
|
|
98
|
-
}
|
|
99
|
-
catch (error) {
|
|
100
|
-
console.warn('[gen-db-schema] Generate NestJS Module Boilerplate failed:', error instanceof Error ? error.message : String(error));
|
|
101
|
-
}
|
|
102
|
-
console.log('[gen-db-schema] ✓ Complete');
|
|
103
|
-
}
|
|
104
|
-
catch (err) {
|
|
105
|
-
console.error('[gen-db-schema] Failed:', err instanceof Error ? err.message : String(err));
|
|
106
|
-
exitCode = 1;
|
|
107
|
-
}
|
|
108
|
-
finally {
|
|
109
|
-
// 清理临时文件
|
|
110
|
-
if (fs.existsSync(OUT_DIR)) {
|
|
111
|
-
fs.rmSync(OUT_DIR, { recursive: true, force: true });
|
|
112
|
-
}
|
|
113
|
-
process.exit(exitCode);
|
|
114
|
-
}
|
|
115
|
-
}
|
|
@@ -1,98 +0,0 @@
|
|
|
1
|
-
import { readFileTailLines } from './tail.js';
|
|
2
|
-
import { stripPrefixFromStdLine } from './std-utils.js';
|
|
3
|
-
export function readClientStdSegment(filePath, maxLines) {
|
|
4
|
-
const lines = readFileTailLines(filePath, Math.max(maxLines * 5, 2000));
|
|
5
|
-
return extractClientStdSegment(lines, maxLines);
|
|
6
|
-
}
|
|
7
|
-
export function extractClientStdSegment(lines, maxLines) {
|
|
8
|
-
const bodyLines = lines.map(stripPrefixFromStdLine);
|
|
9
|
-
const hotStartMarkers = [
|
|
10
|
-
/file change detected\..*incremental compilation/i,
|
|
11
|
-
/starting incremental compilation/i,
|
|
12
|
-
/starting compilation/i,
|
|
13
|
-
/\bcompiling\b/i,
|
|
14
|
-
/\brecompil/i,
|
|
15
|
-
];
|
|
16
|
-
const hotEndMarkers = [
|
|
17
|
-
/file change detected\..*incremental compilation/i,
|
|
18
|
-
/\bwebpack compiled\b/i,
|
|
19
|
-
/compiled successfully/i,
|
|
20
|
-
/compiled with warnings/i,
|
|
21
|
-
/compiled with errors/i,
|
|
22
|
-
/failed to compile/i,
|
|
23
|
-
/fast refresh/i,
|
|
24
|
-
/\bhmr\b/i,
|
|
25
|
-
/hot update/i,
|
|
26
|
-
/\bhot reload\b/i,
|
|
27
|
-
/\bhmr update\b/i,
|
|
28
|
-
];
|
|
29
|
-
const compiledMarkers = [
|
|
30
|
-
/\bwebpack compiled\b/i,
|
|
31
|
-
/compiled successfully/i,
|
|
32
|
-
/compiled with warnings/i,
|
|
33
|
-
/compiled with errors/i,
|
|
34
|
-
/failed to compile/i,
|
|
35
|
-
];
|
|
36
|
-
let startIndex = -1;
|
|
37
|
-
for (let i = bodyLines.length - 1; i >= 0; i -= 1) {
|
|
38
|
-
const line = bodyLines[i];
|
|
39
|
-
if (!line)
|
|
40
|
-
continue;
|
|
41
|
-
if (hotStartMarkers.some((re) => re.test(line))) {
|
|
42
|
-
startIndex = i;
|
|
43
|
-
break;
|
|
44
|
-
}
|
|
45
|
-
}
|
|
46
|
-
if (startIndex === -1) {
|
|
47
|
-
let pivotIndex = -1;
|
|
48
|
-
for (let i = bodyLines.length - 1; i >= 0; i -= 1) {
|
|
49
|
-
const line = bodyLines[i];
|
|
50
|
-
if (!line)
|
|
51
|
-
continue;
|
|
52
|
-
if (hotEndMarkers.some((re) => re.test(line))) {
|
|
53
|
-
pivotIndex = i;
|
|
54
|
-
break;
|
|
55
|
-
}
|
|
56
|
-
}
|
|
57
|
-
if (pivotIndex !== -1) {
|
|
58
|
-
if (compiledMarkers.some((re) => re.test(bodyLines[pivotIndex] ?? ''))) {
|
|
59
|
-
startIndex = pivotIndex;
|
|
60
|
-
}
|
|
61
|
-
else {
|
|
62
|
-
const searchLimit = 80;
|
|
63
|
-
const from = Math.max(0, pivotIndex - searchLimit);
|
|
64
|
-
for (let i = pivotIndex; i >= from; i -= 1) {
|
|
65
|
-
const line = bodyLines[i];
|
|
66
|
-
if (!line)
|
|
67
|
-
continue;
|
|
68
|
-
if (compiledMarkers.some((re) => re.test(line))) {
|
|
69
|
-
startIndex = i;
|
|
70
|
-
break;
|
|
71
|
-
}
|
|
72
|
-
}
|
|
73
|
-
if (startIndex === -1) {
|
|
74
|
-
startIndex = pivotIndex;
|
|
75
|
-
}
|
|
76
|
-
}
|
|
77
|
-
}
|
|
78
|
-
}
|
|
79
|
-
if (startIndex === -1) {
|
|
80
|
-
for (let i = bodyLines.length - 1; i >= 0; i -= 1) {
|
|
81
|
-
const line = bodyLines[i];
|
|
82
|
-
if (!line)
|
|
83
|
-
continue;
|
|
84
|
-
if (/\bdev:client\b/.test(line)) {
|
|
85
|
-
startIndex = i;
|
|
86
|
-
break;
|
|
87
|
-
}
|
|
88
|
-
}
|
|
89
|
-
}
|
|
90
|
-
const segment = startIndex === -1 ? bodyLines : bodyLines.slice(startIndex);
|
|
91
|
-
if (segment.length === 0) {
|
|
92
|
-
return [];
|
|
93
|
-
}
|
|
94
|
-
if (segment.length <= maxLines) {
|
|
95
|
-
return segment;
|
|
96
|
-
}
|
|
97
|
-
return segment.slice(segment.length - maxLines);
|
|
98
|
-
}
|
|
@@ -1,3 +0,0 @@
|
|
|
1
|
-
export declare function readJsonLinesLastPid(filePath: string, maxLines: number): string[];
|
|
2
|
-
export declare function readJsonLinesByTraceId(filePath: string, traceId: string, maxLines: number): string[];
|
|
3
|
-
export declare function readJsonLinesTail(lines: string[], maxLines: number): string[];
|
|
@@ -1,217 +0,0 @@
|
|
|
1
|
-
import fs from 'node:fs';
|
|
2
|
-
function normalizePid(value) {
|
|
3
|
-
if (typeof value === 'number') {
|
|
4
|
-
return String(value);
|
|
5
|
-
}
|
|
6
|
-
if (typeof value === 'string' && value.length > 0) {
|
|
7
|
-
return value;
|
|
8
|
-
}
|
|
9
|
-
return 'unknown';
|
|
10
|
-
}
|
|
11
|
-
export function readJsonLinesLastPid(filePath, maxLines) {
|
|
12
|
-
const stat = fs.statSync(filePath);
|
|
13
|
-
if (stat.size === 0) {
|
|
14
|
-
return [];
|
|
15
|
-
}
|
|
16
|
-
const fd = fs.openSync(filePath, 'r');
|
|
17
|
-
const chunkSize = 64 * 1024;
|
|
18
|
-
let position = stat.size;
|
|
19
|
-
let remainder = '';
|
|
20
|
-
let targetPid = null;
|
|
21
|
-
let finished = false;
|
|
22
|
-
const collected = [];
|
|
23
|
-
try {
|
|
24
|
-
while (position > 0 && !finished) {
|
|
25
|
-
const length = Math.min(chunkSize, position);
|
|
26
|
-
position -= length;
|
|
27
|
-
const buffer = Buffer.alloc(length);
|
|
28
|
-
fs.readSync(fd, buffer, 0, length, position);
|
|
29
|
-
let chunk = buffer.toString('utf8');
|
|
30
|
-
if (remainder) {
|
|
31
|
-
chunk += remainder;
|
|
32
|
-
remainder = '';
|
|
33
|
-
}
|
|
34
|
-
const parts = chunk.split('\n');
|
|
35
|
-
remainder = parts.shift() ?? '';
|
|
36
|
-
for (let i = parts.length - 1; i >= 0; i -= 1) {
|
|
37
|
-
const line = parts[i].trim();
|
|
38
|
-
if (!line)
|
|
39
|
-
continue;
|
|
40
|
-
let parsed = null;
|
|
41
|
-
try {
|
|
42
|
-
parsed = JSON.parse(line);
|
|
43
|
-
}
|
|
44
|
-
catch {
|
|
45
|
-
continue;
|
|
46
|
-
}
|
|
47
|
-
const pid = normalizePid(parsed?.pid);
|
|
48
|
-
if (targetPid === null) {
|
|
49
|
-
targetPid = pid;
|
|
50
|
-
}
|
|
51
|
-
if (pid !== targetPid) {
|
|
52
|
-
finished = true;
|
|
53
|
-
break;
|
|
54
|
-
}
|
|
55
|
-
collected.push(line);
|
|
56
|
-
if (collected.length >= maxLines * 5) {
|
|
57
|
-
finished = true;
|
|
58
|
-
break;
|
|
59
|
-
}
|
|
60
|
-
}
|
|
61
|
-
}
|
|
62
|
-
if (!finished && remainder) {
|
|
63
|
-
const line = remainder.trim();
|
|
64
|
-
if (line) {
|
|
65
|
-
try {
|
|
66
|
-
const parsed = JSON.parse(line);
|
|
67
|
-
const pid = normalizePid(parsed?.pid);
|
|
68
|
-
if (targetPid === null) {
|
|
69
|
-
targetPid = pid;
|
|
70
|
-
}
|
|
71
|
-
if (pid === targetPid) {
|
|
72
|
-
collected.push(line);
|
|
73
|
-
}
|
|
74
|
-
}
|
|
75
|
-
catch {
|
|
76
|
-
return [];
|
|
77
|
-
}
|
|
78
|
-
}
|
|
79
|
-
}
|
|
80
|
-
}
|
|
81
|
-
finally {
|
|
82
|
-
fs.closeSync(fd);
|
|
83
|
-
}
|
|
84
|
-
const reversed = collected.reverse();
|
|
85
|
-
if (reversed.length <= maxLines) {
|
|
86
|
-
return reversed;
|
|
87
|
-
}
|
|
88
|
-
return reversed.slice(reversed.length - maxLines);
|
|
89
|
-
}
|
|
90
|
-
function normalizeTraceId(value) {
|
|
91
|
-
if (typeof value === 'string') {
|
|
92
|
-
const trimmed = value.trim();
|
|
93
|
-
return trimmed ? trimmed : null;
|
|
94
|
-
}
|
|
95
|
-
if (typeof value === 'number' && Number.isFinite(value)) {
|
|
96
|
-
return String(value);
|
|
97
|
-
}
|
|
98
|
-
return null;
|
|
99
|
-
}
|
|
100
|
-
function extractTraceId(obj) {
|
|
101
|
-
if (!obj || typeof obj !== 'object')
|
|
102
|
-
return null;
|
|
103
|
-
const record = obj;
|
|
104
|
-
const directKeys = ['trace_id', 'traceId', 'traceID', 'traceid'];
|
|
105
|
-
for (const key of directKeys) {
|
|
106
|
-
const value = normalizeTraceId(record[key]);
|
|
107
|
-
if (value)
|
|
108
|
-
return value;
|
|
109
|
-
}
|
|
110
|
-
const meta = record['meta'];
|
|
111
|
-
if (meta && typeof meta === 'object') {
|
|
112
|
-
const metaRecord = meta;
|
|
113
|
-
for (const key of directKeys) {
|
|
114
|
-
const value = normalizeTraceId(metaRecord[key]);
|
|
115
|
-
if (value)
|
|
116
|
-
return value;
|
|
117
|
-
}
|
|
118
|
-
}
|
|
119
|
-
const attributes = record['attributes'];
|
|
120
|
-
if (attributes && typeof attributes === 'object') {
|
|
121
|
-
const attrRecord = attributes;
|
|
122
|
-
for (const key of ['traceID', 'trace_id', 'traceId', 'traceid']) {
|
|
123
|
-
const value = normalizeTraceId(attrRecord[key]);
|
|
124
|
-
if (value)
|
|
125
|
-
return value;
|
|
126
|
-
}
|
|
127
|
-
}
|
|
128
|
-
return null;
|
|
129
|
-
}
|
|
130
|
-
export function readJsonLinesByTraceId(filePath, traceId, maxLines) {
|
|
131
|
-
const wanted = traceId.trim();
|
|
132
|
-
if (!wanted)
|
|
133
|
-
return [];
|
|
134
|
-
const stat = fs.statSync(filePath);
|
|
135
|
-
if (stat.size === 0) {
|
|
136
|
-
return [];
|
|
137
|
-
}
|
|
138
|
-
const fd = fs.openSync(filePath, 'r');
|
|
139
|
-
const chunkSize = 64 * 1024;
|
|
140
|
-
let position = stat.size;
|
|
141
|
-
let remainder = '';
|
|
142
|
-
let processedNonEmpty = 0;
|
|
143
|
-
let finished = false;
|
|
144
|
-
const collected = [];
|
|
145
|
-
const maxProcessed = Math.max(maxLines * 200, 5000);
|
|
146
|
-
try {
|
|
147
|
-
while (position > 0 && !finished) {
|
|
148
|
-
const length = Math.min(chunkSize, position);
|
|
149
|
-
position -= length;
|
|
150
|
-
const buffer = Buffer.alloc(length);
|
|
151
|
-
fs.readSync(fd, buffer, 0, length, position);
|
|
152
|
-
let chunk = buffer.toString('utf8');
|
|
153
|
-
if (remainder) {
|
|
154
|
-
chunk += remainder;
|
|
155
|
-
remainder = '';
|
|
156
|
-
}
|
|
157
|
-
const parts = chunk.split('\n');
|
|
158
|
-
remainder = parts.shift() ?? '';
|
|
159
|
-
for (let i = parts.length - 1; i >= 0; i -= 1) {
|
|
160
|
-
const line = parts[i].trim();
|
|
161
|
-
if (!line)
|
|
162
|
-
continue;
|
|
163
|
-
processedNonEmpty += 1;
|
|
164
|
-
try {
|
|
165
|
-
const parsed = JSON.parse(line);
|
|
166
|
-
const lineTraceId = extractTraceId(parsed);
|
|
167
|
-
if (lineTraceId === wanted) {
|
|
168
|
-
collected.push(line);
|
|
169
|
-
if (collected.length >= maxLines) {
|
|
170
|
-
finished = true;
|
|
171
|
-
break;
|
|
172
|
-
}
|
|
173
|
-
}
|
|
174
|
-
}
|
|
175
|
-
catch {
|
|
176
|
-
continue;
|
|
177
|
-
}
|
|
178
|
-
if (processedNonEmpty >= maxProcessed && collected.length > 0) {
|
|
179
|
-
finished = true;
|
|
180
|
-
break;
|
|
181
|
-
}
|
|
182
|
-
}
|
|
183
|
-
}
|
|
184
|
-
if (!finished && remainder) {
|
|
185
|
-
const line = remainder.trim();
|
|
186
|
-
if (line) {
|
|
187
|
-
try {
|
|
188
|
-
const parsed = JSON.parse(line);
|
|
189
|
-
const lineTraceId = extractTraceId(parsed);
|
|
190
|
-
if (lineTraceId === wanted) {
|
|
191
|
-
collected.push(line);
|
|
192
|
-
}
|
|
193
|
-
}
|
|
194
|
-
catch {
|
|
195
|
-
return [];
|
|
196
|
-
}
|
|
197
|
-
}
|
|
198
|
-
}
|
|
199
|
-
}
|
|
200
|
-
finally {
|
|
201
|
-
fs.closeSync(fd);
|
|
202
|
-
}
|
|
203
|
-
return collected.reverse();
|
|
204
|
-
}
|
|
205
|
-
export function readJsonLinesTail(lines, maxLines) {
|
|
206
|
-
const result = [];
|
|
207
|
-
for (let i = lines.length - 1; i >= 0; i -= 1) {
|
|
208
|
-
const line = lines[i].trim();
|
|
209
|
-
if (!line)
|
|
210
|
-
continue;
|
|
211
|
-
result.push(line);
|
|
212
|
-
if (result.length >= maxLines) {
|
|
213
|
-
break;
|
|
214
|
-
}
|
|
215
|
-
}
|
|
216
|
-
return result.reverse();
|
|
217
|
-
}
|
|
@@ -1 +0,0 @@
|
|
|
1
|
-
export declare function readServerStdSegment(filePath: string, maxLines: number): string[];
|