@lark-apaas/fullstack-cli 1.1.8 → 1.1.9-alpha.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/commands/gen-db-schema.js +4 -4
- package/dist/commands/read-logs/client-std.d.ts +2 -0
- package/dist/commands/read-logs/client-std.js +98 -0
- package/dist/commands/read-logs/json-lines.d.ts +3 -0
- package/dist/commands/read-logs/json-lines.js +217 -0
- package/dist/commands/read-logs/server-std.d.ts +1 -0
- package/dist/commands/read-logs/server-std.js +25 -0
- package/dist/commands/read-logs/std-utils.d.ts +5 -0
- package/dist/commands/read-logs/std-utils.js +61 -0
- package/dist/commands/read-logs/tail.d.ts +2 -0
- package/dist/commands/read-logs/tail.js +47 -0
- package/dist/commands/read-logs.d.ts +8 -1
- package/dist/commands/read-logs.js +112 -327
- package/dist/commands/read-logs.test.js +131 -1
- package/package.json +2 -2
- package/templates/scripts/dev.sh +231 -31
|
@@ -73,16 +73,16 @@ export async function run(options = {}) {
|
|
|
73
73
|
console.error('[gen-db-schema] schema.ts not generated');
|
|
74
74
|
throw new Error('drizzle-kit introspect failed to generate schema.ts');
|
|
75
75
|
}
|
|
76
|
-
fs.mkdirSync(path.dirname(SCHEMA_FILE), { recursive: true });
|
|
77
|
-
fs.copyFileSync(generatedSchema, SCHEMA_FILE);
|
|
78
|
-
console.log(`[gen-db-schema] ✓ Copied to ${outputPath}`);
|
|
79
76
|
// 后处理 schema(使用 CommonJS require 方式加载)
|
|
80
77
|
const { postprocessDrizzleSchema } = require('@lark-apaas/devtool-kits');
|
|
81
|
-
const stats = postprocessDrizzleSchema(
|
|
78
|
+
const stats = postprocessDrizzleSchema(generatedSchema);
|
|
82
79
|
if (stats?.unmatchedUnknown?.length) {
|
|
83
80
|
console.warn('[gen-db-schema] Unmatched custom types detected:', stats.unmatchedUnknown);
|
|
84
81
|
}
|
|
85
82
|
console.log('[gen-db-schema] ✓ Postprocessed schema');
|
|
83
|
+
fs.mkdirSync(path.dirname(SCHEMA_FILE), { recursive: true });
|
|
84
|
+
fs.copyFileSync(generatedSchema, SCHEMA_FILE);
|
|
85
|
+
console.log(`[gen-db-schema] ✓ Copied to ${outputPath}`);
|
|
86
86
|
try {
|
|
87
87
|
if (options.enableNestModuleGenerate) {
|
|
88
88
|
const { parseAndGenerateNestResourceTemplate } = require('@lark-apaas/devtool-kits');
|
|
@@ -0,0 +1,98 @@
|
|
|
1
|
+
import { readFileTailLines } from './tail.js';
|
|
2
|
+
import { stripPrefixFromStdLine } from './std-utils.js';
|
|
3
|
+
export function readClientStdSegment(filePath, maxLines) {
|
|
4
|
+
const lines = readFileTailLines(filePath, Math.max(maxLines * 5, 2000));
|
|
5
|
+
return extractClientStdSegment(lines, maxLines);
|
|
6
|
+
}
|
|
7
|
+
export function extractClientStdSegment(lines, maxLines) {
|
|
8
|
+
const bodyLines = lines.map(stripPrefixFromStdLine);
|
|
9
|
+
const hotStartMarkers = [
|
|
10
|
+
/file change detected\..*incremental compilation/i,
|
|
11
|
+
/starting incremental compilation/i,
|
|
12
|
+
/starting compilation/i,
|
|
13
|
+
/\bcompiling\b/i,
|
|
14
|
+
/\brecompil/i,
|
|
15
|
+
];
|
|
16
|
+
const hotEndMarkers = [
|
|
17
|
+
/file change detected\..*incremental compilation/i,
|
|
18
|
+
/\bwebpack compiled\b/i,
|
|
19
|
+
/compiled successfully/i,
|
|
20
|
+
/compiled with warnings/i,
|
|
21
|
+
/compiled with errors/i,
|
|
22
|
+
/failed to compile/i,
|
|
23
|
+
/fast refresh/i,
|
|
24
|
+
/\bhmr\b/i,
|
|
25
|
+
/hot update/i,
|
|
26
|
+
/\bhot reload\b/i,
|
|
27
|
+
/\bhmr update\b/i,
|
|
28
|
+
];
|
|
29
|
+
const compiledMarkers = [
|
|
30
|
+
/\bwebpack compiled\b/i,
|
|
31
|
+
/compiled successfully/i,
|
|
32
|
+
/compiled with warnings/i,
|
|
33
|
+
/compiled with errors/i,
|
|
34
|
+
/failed to compile/i,
|
|
35
|
+
];
|
|
36
|
+
let startIndex = -1;
|
|
37
|
+
for (let i = bodyLines.length - 1; i >= 0; i -= 1) {
|
|
38
|
+
const line = bodyLines[i];
|
|
39
|
+
if (!line)
|
|
40
|
+
continue;
|
|
41
|
+
if (hotStartMarkers.some((re) => re.test(line))) {
|
|
42
|
+
startIndex = i;
|
|
43
|
+
break;
|
|
44
|
+
}
|
|
45
|
+
}
|
|
46
|
+
if (startIndex === -1) {
|
|
47
|
+
let pivotIndex = -1;
|
|
48
|
+
for (let i = bodyLines.length - 1; i >= 0; i -= 1) {
|
|
49
|
+
const line = bodyLines[i];
|
|
50
|
+
if (!line)
|
|
51
|
+
continue;
|
|
52
|
+
if (hotEndMarkers.some((re) => re.test(line))) {
|
|
53
|
+
pivotIndex = i;
|
|
54
|
+
break;
|
|
55
|
+
}
|
|
56
|
+
}
|
|
57
|
+
if (pivotIndex !== -1) {
|
|
58
|
+
if (compiledMarkers.some((re) => re.test(bodyLines[pivotIndex] ?? ''))) {
|
|
59
|
+
startIndex = pivotIndex;
|
|
60
|
+
}
|
|
61
|
+
else {
|
|
62
|
+
const searchLimit = 80;
|
|
63
|
+
const from = Math.max(0, pivotIndex - searchLimit);
|
|
64
|
+
for (let i = pivotIndex; i >= from; i -= 1) {
|
|
65
|
+
const line = bodyLines[i];
|
|
66
|
+
if (!line)
|
|
67
|
+
continue;
|
|
68
|
+
if (compiledMarkers.some((re) => re.test(line))) {
|
|
69
|
+
startIndex = i;
|
|
70
|
+
break;
|
|
71
|
+
}
|
|
72
|
+
}
|
|
73
|
+
if (startIndex === -1) {
|
|
74
|
+
startIndex = pivotIndex;
|
|
75
|
+
}
|
|
76
|
+
}
|
|
77
|
+
}
|
|
78
|
+
}
|
|
79
|
+
if (startIndex === -1) {
|
|
80
|
+
for (let i = bodyLines.length - 1; i >= 0; i -= 1) {
|
|
81
|
+
const line = bodyLines[i];
|
|
82
|
+
if (!line)
|
|
83
|
+
continue;
|
|
84
|
+
if (/\bdev:client\b/.test(line)) {
|
|
85
|
+
startIndex = i;
|
|
86
|
+
break;
|
|
87
|
+
}
|
|
88
|
+
}
|
|
89
|
+
}
|
|
90
|
+
const segment = startIndex === -1 ? bodyLines : bodyLines.slice(startIndex);
|
|
91
|
+
if (segment.length === 0) {
|
|
92
|
+
return [];
|
|
93
|
+
}
|
|
94
|
+
if (segment.length <= maxLines) {
|
|
95
|
+
return segment;
|
|
96
|
+
}
|
|
97
|
+
return segment.slice(segment.length - maxLines);
|
|
98
|
+
}
|
|
@@ -0,0 +1,3 @@
|
|
|
1
|
+
export declare function readJsonLinesLastPid(filePath: string, maxLines: number): string[];
|
|
2
|
+
export declare function readJsonLinesByTraceId(filePath: string, traceId: string, maxLines: number): string[];
|
|
3
|
+
export declare function readJsonLinesTail(lines: string[], maxLines: number): string[];
|
|
@@ -0,0 +1,217 @@
|
|
|
1
|
+
import fs from 'node:fs';
|
|
2
|
+
function normalizePid(value) {
|
|
3
|
+
if (typeof value === 'number') {
|
|
4
|
+
return String(value);
|
|
5
|
+
}
|
|
6
|
+
if (typeof value === 'string' && value.length > 0) {
|
|
7
|
+
return value;
|
|
8
|
+
}
|
|
9
|
+
return 'unknown';
|
|
10
|
+
}
|
|
11
|
+
export function readJsonLinesLastPid(filePath, maxLines) {
|
|
12
|
+
const stat = fs.statSync(filePath);
|
|
13
|
+
if (stat.size === 0) {
|
|
14
|
+
return [];
|
|
15
|
+
}
|
|
16
|
+
const fd = fs.openSync(filePath, 'r');
|
|
17
|
+
const chunkSize = 64 * 1024;
|
|
18
|
+
let position = stat.size;
|
|
19
|
+
let remainder = '';
|
|
20
|
+
let targetPid = null;
|
|
21
|
+
let finished = false;
|
|
22
|
+
const collected = [];
|
|
23
|
+
try {
|
|
24
|
+
while (position > 0 && !finished) {
|
|
25
|
+
const length = Math.min(chunkSize, position);
|
|
26
|
+
position -= length;
|
|
27
|
+
const buffer = Buffer.alloc(length);
|
|
28
|
+
fs.readSync(fd, buffer, 0, length, position);
|
|
29
|
+
let chunk = buffer.toString('utf8');
|
|
30
|
+
if (remainder) {
|
|
31
|
+
chunk += remainder;
|
|
32
|
+
remainder = '';
|
|
33
|
+
}
|
|
34
|
+
const parts = chunk.split('\n');
|
|
35
|
+
remainder = parts.shift() ?? '';
|
|
36
|
+
for (let i = parts.length - 1; i >= 0; i -= 1) {
|
|
37
|
+
const line = parts[i].trim();
|
|
38
|
+
if (!line)
|
|
39
|
+
continue;
|
|
40
|
+
let parsed = null;
|
|
41
|
+
try {
|
|
42
|
+
parsed = JSON.parse(line);
|
|
43
|
+
}
|
|
44
|
+
catch {
|
|
45
|
+
continue;
|
|
46
|
+
}
|
|
47
|
+
const pid = normalizePid(parsed?.pid);
|
|
48
|
+
if (targetPid === null) {
|
|
49
|
+
targetPid = pid;
|
|
50
|
+
}
|
|
51
|
+
if (pid !== targetPid) {
|
|
52
|
+
finished = true;
|
|
53
|
+
break;
|
|
54
|
+
}
|
|
55
|
+
collected.push(line);
|
|
56
|
+
if (collected.length >= maxLines * 5) {
|
|
57
|
+
finished = true;
|
|
58
|
+
break;
|
|
59
|
+
}
|
|
60
|
+
}
|
|
61
|
+
}
|
|
62
|
+
if (!finished && remainder) {
|
|
63
|
+
const line = remainder.trim();
|
|
64
|
+
if (line) {
|
|
65
|
+
try {
|
|
66
|
+
const parsed = JSON.parse(line);
|
|
67
|
+
const pid = normalizePid(parsed?.pid);
|
|
68
|
+
if (targetPid === null) {
|
|
69
|
+
targetPid = pid;
|
|
70
|
+
}
|
|
71
|
+
if (pid === targetPid) {
|
|
72
|
+
collected.push(line);
|
|
73
|
+
}
|
|
74
|
+
}
|
|
75
|
+
catch {
|
|
76
|
+
return [];
|
|
77
|
+
}
|
|
78
|
+
}
|
|
79
|
+
}
|
|
80
|
+
}
|
|
81
|
+
finally {
|
|
82
|
+
fs.closeSync(fd);
|
|
83
|
+
}
|
|
84
|
+
const reversed = collected.reverse();
|
|
85
|
+
if (reversed.length <= maxLines) {
|
|
86
|
+
return reversed;
|
|
87
|
+
}
|
|
88
|
+
return reversed.slice(reversed.length - maxLines);
|
|
89
|
+
}
|
|
90
|
+
function normalizeTraceId(value) {
|
|
91
|
+
if (typeof value === 'string') {
|
|
92
|
+
const trimmed = value.trim();
|
|
93
|
+
return trimmed ? trimmed : null;
|
|
94
|
+
}
|
|
95
|
+
if (typeof value === 'number' && Number.isFinite(value)) {
|
|
96
|
+
return String(value);
|
|
97
|
+
}
|
|
98
|
+
return null;
|
|
99
|
+
}
|
|
100
|
+
function extractTraceId(obj) {
|
|
101
|
+
if (!obj || typeof obj !== 'object')
|
|
102
|
+
return null;
|
|
103
|
+
const record = obj;
|
|
104
|
+
const directKeys = ['trace_id', 'traceId', 'traceID', 'traceid'];
|
|
105
|
+
for (const key of directKeys) {
|
|
106
|
+
const value = normalizeTraceId(record[key]);
|
|
107
|
+
if (value)
|
|
108
|
+
return value;
|
|
109
|
+
}
|
|
110
|
+
const meta = record['meta'];
|
|
111
|
+
if (meta && typeof meta === 'object') {
|
|
112
|
+
const metaRecord = meta;
|
|
113
|
+
for (const key of directKeys) {
|
|
114
|
+
const value = normalizeTraceId(metaRecord[key]);
|
|
115
|
+
if (value)
|
|
116
|
+
return value;
|
|
117
|
+
}
|
|
118
|
+
}
|
|
119
|
+
const attributes = record['attributes'];
|
|
120
|
+
if (attributes && typeof attributes === 'object') {
|
|
121
|
+
const attrRecord = attributes;
|
|
122
|
+
for (const key of ['traceID', 'trace_id', 'traceId', 'traceid']) {
|
|
123
|
+
const value = normalizeTraceId(attrRecord[key]);
|
|
124
|
+
if (value)
|
|
125
|
+
return value;
|
|
126
|
+
}
|
|
127
|
+
}
|
|
128
|
+
return null;
|
|
129
|
+
}
|
|
130
|
+
export function readJsonLinesByTraceId(filePath, traceId, maxLines) {
|
|
131
|
+
const wanted = traceId.trim();
|
|
132
|
+
if (!wanted)
|
|
133
|
+
return [];
|
|
134
|
+
const stat = fs.statSync(filePath);
|
|
135
|
+
if (stat.size === 0) {
|
|
136
|
+
return [];
|
|
137
|
+
}
|
|
138
|
+
const fd = fs.openSync(filePath, 'r');
|
|
139
|
+
const chunkSize = 64 * 1024;
|
|
140
|
+
let position = stat.size;
|
|
141
|
+
let remainder = '';
|
|
142
|
+
let processedNonEmpty = 0;
|
|
143
|
+
let finished = false;
|
|
144
|
+
const collected = [];
|
|
145
|
+
const maxProcessed = Math.max(maxLines * 200, 5000);
|
|
146
|
+
try {
|
|
147
|
+
while (position > 0 && !finished) {
|
|
148
|
+
const length = Math.min(chunkSize, position);
|
|
149
|
+
position -= length;
|
|
150
|
+
const buffer = Buffer.alloc(length);
|
|
151
|
+
fs.readSync(fd, buffer, 0, length, position);
|
|
152
|
+
let chunk = buffer.toString('utf8');
|
|
153
|
+
if (remainder) {
|
|
154
|
+
chunk += remainder;
|
|
155
|
+
remainder = '';
|
|
156
|
+
}
|
|
157
|
+
const parts = chunk.split('\n');
|
|
158
|
+
remainder = parts.shift() ?? '';
|
|
159
|
+
for (let i = parts.length - 1; i >= 0; i -= 1) {
|
|
160
|
+
const line = parts[i].trim();
|
|
161
|
+
if (!line)
|
|
162
|
+
continue;
|
|
163
|
+
processedNonEmpty += 1;
|
|
164
|
+
try {
|
|
165
|
+
const parsed = JSON.parse(line);
|
|
166
|
+
const lineTraceId = extractTraceId(parsed);
|
|
167
|
+
if (lineTraceId === wanted) {
|
|
168
|
+
collected.push(line);
|
|
169
|
+
if (collected.length >= maxLines) {
|
|
170
|
+
finished = true;
|
|
171
|
+
break;
|
|
172
|
+
}
|
|
173
|
+
}
|
|
174
|
+
}
|
|
175
|
+
catch {
|
|
176
|
+
continue;
|
|
177
|
+
}
|
|
178
|
+
if (processedNonEmpty >= maxProcessed && collected.length > 0) {
|
|
179
|
+
finished = true;
|
|
180
|
+
break;
|
|
181
|
+
}
|
|
182
|
+
}
|
|
183
|
+
}
|
|
184
|
+
if (!finished && remainder) {
|
|
185
|
+
const line = remainder.trim();
|
|
186
|
+
if (line) {
|
|
187
|
+
try {
|
|
188
|
+
const parsed = JSON.parse(line);
|
|
189
|
+
const lineTraceId = extractTraceId(parsed);
|
|
190
|
+
if (lineTraceId === wanted) {
|
|
191
|
+
collected.push(line);
|
|
192
|
+
}
|
|
193
|
+
}
|
|
194
|
+
catch {
|
|
195
|
+
return [];
|
|
196
|
+
}
|
|
197
|
+
}
|
|
198
|
+
}
|
|
199
|
+
}
|
|
200
|
+
finally {
|
|
201
|
+
fs.closeSync(fd);
|
|
202
|
+
}
|
|
203
|
+
return collected.reverse();
|
|
204
|
+
}
|
|
205
|
+
export function readJsonLinesTail(lines, maxLines) {
|
|
206
|
+
const result = [];
|
|
207
|
+
for (let i = lines.length - 1; i >= 0; i -= 1) {
|
|
208
|
+
const line = lines[i].trim();
|
|
209
|
+
if (!line)
|
|
210
|
+
continue;
|
|
211
|
+
result.push(line);
|
|
212
|
+
if (result.length >= maxLines) {
|
|
213
|
+
break;
|
|
214
|
+
}
|
|
215
|
+
}
|
|
216
|
+
return result.reverse();
|
|
217
|
+
}
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
export declare function readServerStdSegment(filePath: string, maxLines: number): string[];
|
|
@@ -0,0 +1,25 @@
|
|
|
1
|
+
import { readFileTailLines } from './tail.js';
|
|
2
|
+
import { readStdLinesTailFromLastMarker, stripPrefixFromStdLine } from './std-utils.js';
|
|
3
|
+
export function readServerStdSegment(filePath, maxLines) {
|
|
4
|
+
const marker = (line) => {
|
|
5
|
+
if (!line)
|
|
6
|
+
return false;
|
|
7
|
+
if (/\bdev:server\b/.test(line))
|
|
8
|
+
return true;
|
|
9
|
+
if (line.includes('Starting compilation in watch mode'))
|
|
10
|
+
return true;
|
|
11
|
+
if (line.includes('File change detected. Starting incremental compilation'))
|
|
12
|
+
return true;
|
|
13
|
+
if (line.includes('Starting Nest application'))
|
|
14
|
+
return true;
|
|
15
|
+
if (line.includes('Nest application successfully started'))
|
|
16
|
+
return true;
|
|
17
|
+
return false;
|
|
18
|
+
};
|
|
19
|
+
const segment = readStdLinesTailFromLastMarker(filePath, maxLines, marker);
|
|
20
|
+
if (segment.markerFound) {
|
|
21
|
+
return segment.lines;
|
|
22
|
+
}
|
|
23
|
+
const lines = readFileTailLines(filePath, maxLines);
|
|
24
|
+
return lines.map(stripPrefixFromStdLine);
|
|
25
|
+
}
|
|
@@ -0,0 +1,61 @@
|
|
|
1
|
+
import fs from 'node:fs';
|
|
2
|
+
export function stripPrefixFromStdLine(line) {
|
|
3
|
+
const match = line.match(/^(\[(\d{4}-\d{2}-\d{2} \d{2}:\d{2}:\d{2})\] \[(server|client)\] )(.*)$/);
|
|
4
|
+
if (!match) {
|
|
5
|
+
return line;
|
|
6
|
+
}
|
|
7
|
+
return match[4] || '';
|
|
8
|
+
}
|
|
9
|
+
export function readStdLinesTailFromLastMarker(filePath, maxLines, isMarker) {
|
|
10
|
+
const stat = fs.statSync(filePath);
|
|
11
|
+
if (stat.size === 0) {
|
|
12
|
+
return { lines: [], markerFound: false };
|
|
13
|
+
}
|
|
14
|
+
const fd = fs.openSync(filePath, 'r');
|
|
15
|
+
const chunkSize = 64 * 1024;
|
|
16
|
+
let position = stat.size;
|
|
17
|
+
let remainder = '';
|
|
18
|
+
let markerFound = false;
|
|
19
|
+
let finished = false;
|
|
20
|
+
const collected = [];
|
|
21
|
+
try {
|
|
22
|
+
while (position > 0 && !finished) {
|
|
23
|
+
const length = Math.min(chunkSize, position);
|
|
24
|
+
position -= length;
|
|
25
|
+
const buffer = Buffer.alloc(length);
|
|
26
|
+
fs.readSync(fd, buffer, 0, length, position);
|
|
27
|
+
let chunk = buffer.toString('utf8');
|
|
28
|
+
if (remainder) {
|
|
29
|
+
chunk += remainder;
|
|
30
|
+
remainder = '';
|
|
31
|
+
}
|
|
32
|
+
const parts = chunk.split('\n');
|
|
33
|
+
remainder = parts.shift() ?? '';
|
|
34
|
+
for (let i = parts.length - 1; i >= 0; i -= 1) {
|
|
35
|
+
const rawLine = parts[i];
|
|
36
|
+
const line = stripPrefixFromStdLine(rawLine);
|
|
37
|
+
if (collected.length < maxLines) {
|
|
38
|
+
collected.push(line);
|
|
39
|
+
}
|
|
40
|
+
if (isMarker(line)) {
|
|
41
|
+
markerFound = true;
|
|
42
|
+
finished = true;
|
|
43
|
+
break;
|
|
44
|
+
}
|
|
45
|
+
}
|
|
46
|
+
}
|
|
47
|
+
if (!finished && remainder) {
|
|
48
|
+
const line = stripPrefixFromStdLine(remainder);
|
|
49
|
+
if (collected.length < maxLines) {
|
|
50
|
+
collected.push(line);
|
|
51
|
+
}
|
|
52
|
+
if (isMarker(line)) {
|
|
53
|
+
markerFound = true;
|
|
54
|
+
}
|
|
55
|
+
}
|
|
56
|
+
}
|
|
57
|
+
finally {
|
|
58
|
+
fs.closeSync(fd);
|
|
59
|
+
}
|
|
60
|
+
return { lines: collected.reverse(), markerFound };
|
|
61
|
+
}
|
|
@@ -0,0 +1,47 @@
|
|
|
1
|
+
import fs from 'node:fs';
|
|
2
|
+
export function fileExists(filePath) {
|
|
3
|
+
try {
|
|
4
|
+
fs.accessSync(filePath, fs.constants.F_OK | fs.constants.R_OK);
|
|
5
|
+
return true;
|
|
6
|
+
}
|
|
7
|
+
catch {
|
|
8
|
+
return false;
|
|
9
|
+
}
|
|
10
|
+
}
|
|
11
|
+
export function readFileTailLines(filePath, maxLines) {
|
|
12
|
+
const stat = fs.statSync(filePath);
|
|
13
|
+
if (stat.size === 0) {
|
|
14
|
+
return [];
|
|
15
|
+
}
|
|
16
|
+
const fd = fs.openSync(filePath, 'r');
|
|
17
|
+
const chunkSize = 64 * 1024;
|
|
18
|
+
const chunks = [];
|
|
19
|
+
let position = stat.size;
|
|
20
|
+
let collectedLines = 0;
|
|
21
|
+
try {
|
|
22
|
+
while (position > 0 && collectedLines <= maxLines) {
|
|
23
|
+
const length = Math.min(chunkSize, position);
|
|
24
|
+
position -= length;
|
|
25
|
+
const buffer = Buffer.alloc(length);
|
|
26
|
+
fs.readSync(fd, buffer, 0, length, position);
|
|
27
|
+
chunks.unshift(buffer.toString('utf8'));
|
|
28
|
+
const chunkLines = buffer.toString('utf8').split('\n').length - 1;
|
|
29
|
+
collectedLines += chunkLines;
|
|
30
|
+
}
|
|
31
|
+
}
|
|
32
|
+
finally {
|
|
33
|
+
fs.closeSync(fd);
|
|
34
|
+
}
|
|
35
|
+
const content = chunks.join('');
|
|
36
|
+
const allLines = content.split('\n');
|
|
37
|
+
if (allLines.length === 0) {
|
|
38
|
+
return [];
|
|
39
|
+
}
|
|
40
|
+
if (allLines[allLines.length - 1] === '') {
|
|
41
|
+
allLines.pop();
|
|
42
|
+
}
|
|
43
|
+
if (allLines.length <= maxLines) {
|
|
44
|
+
return allLines;
|
|
45
|
+
}
|
|
46
|
+
return allLines.slice(allLines.length - maxLines);
|
|
47
|
+
}
|
|
@@ -1,9 +1,16 @@
|
|
|
1
|
-
type LogType = 'server' | 'trace' | 'server-std' | 'client-std' | 'browser'
|
|
1
|
+
type LogType = 'server' | 'trace' | 'server-std' | 'client-std' | 'browser';
|
|
2
|
+
export interface ReadLogsJsonResult {
|
|
3
|
+
hasError: boolean;
|
|
4
|
+
message: string;
|
|
5
|
+
logs?: unknown[];
|
|
6
|
+
}
|
|
2
7
|
interface ReadLogsOptions {
|
|
3
8
|
logDir: string;
|
|
4
9
|
type: LogType;
|
|
5
10
|
maxLines?: number;
|
|
11
|
+
traceId?: string;
|
|
6
12
|
}
|
|
7
13
|
export declare function readLatestLogLines(options: ReadLogsOptions): Promise<string[]>;
|
|
14
|
+
export declare function readLogsJsonResult(options: ReadLogsOptions): Promise<ReadLogsJsonResult>;
|
|
8
15
|
export declare function run(options: ReadLogsOptions): Promise<void>;
|
|
9
16
|
export {};
|