@kodrunhq/opencode-autopilot 1.16.0 → 1.17.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/bin/inspect.ts +2 -2
- package/package.json +1 -1
- package/src/config/index.ts +29 -0
- package/src/config/migrations.ts +196 -0
- package/src/config/v7.ts +45 -0
- package/src/config.ts +3 -3
- package/src/health/checks.ts +97 -0
- package/src/health/types.ts +1 -1
- package/src/index.ts +25 -2
- package/src/kernel/transaction.ts +48 -0
- package/src/kernel/types.ts +1 -2
- package/src/logging/domains.ts +39 -0
- package/src/logging/forensic-writer.ts +177 -0
- package/src/logging/index.ts +4 -0
- package/src/logging/logger.ts +44 -0
- package/src/logging/performance.ts +59 -0
- package/src/logging/rotation.ts +261 -0
- package/src/logging/types.ts +33 -0
- package/src/memory/capture-utils.ts +149 -0
- package/src/memory/capture.ts +16 -197
- package/src/memory/decay.ts +11 -2
- package/src/memory/injector.ts +4 -1
- package/src/memory/lessons.ts +85 -0
- package/src/memory/observations.ts +177 -0
- package/src/memory/preferences.ts +718 -0
- package/src/memory/projects.ts +83 -0
- package/src/memory/repository.ts +46 -1001
- package/src/memory/retrieval.ts +5 -1
- package/src/observability/context-display.ts +8 -0
- package/src/observability/event-handlers.ts +44 -6
- package/src/observability/forensic-log.ts +10 -2
- package/src/observability/forensic-schemas.ts +9 -1
- package/src/observability/log-reader.ts +20 -1
- package/src/orchestrator/error-context.ts +24 -0
- package/src/orchestrator/handlers/build-utils.ts +118 -0
- package/src/orchestrator/handlers/build.ts +13 -148
- package/src/orchestrator/handlers/retrospective.ts +0 -1
- package/src/orchestrator/lesson-memory.ts +7 -2
- package/src/orchestrator/orchestration-logger.ts +46 -31
- package/src/orchestrator/progress.ts +63 -0
- package/src/review/memory.ts +11 -3
- package/src/review/parse-findings.ts +116 -0
- package/src/review/pipeline.ts +3 -107
- package/src/review/selection.ts +38 -4
- package/src/scoring/time-provider.ts +23 -0
- package/src/tools/doctor.ts +2 -2
- package/src/tools/logs.ts +32 -6
- package/src/tools/orchestrate.ts +11 -9
- package/src/tools/replay.ts +42 -0
- package/src/tools/review.ts +8 -2
- package/src/tools/summary.ts +43 -0
- package/src/utils/random.ts +33 -0
- package/src/ux/session-summary.ts +56 -0
|
@@ -0,0 +1,177 @@
|
|
|
1
|
+
import {
|
|
2
|
+
appendForensicEvent,
|
|
3
|
+
appendForensicEventForArtifactDir,
|
|
4
|
+
} from "../observability/forensic-log";
|
|
5
|
+
import type { ForensicEventDomain, ForensicEventType } from "../observability/forensic-types";
|
|
6
|
+
import type { LogEntry, LogSink } from "./types";
|
|
7
|
+
|
|
8
|
+
export function createForensicSinkForArtifactDir(artifactDir: string): LogSink {
|
|
9
|
+
return {
|
|
10
|
+
write(entry: LogEntry): void {
|
|
11
|
+
const {
|
|
12
|
+
domain,
|
|
13
|
+
operation,
|
|
14
|
+
runId,
|
|
15
|
+
sessionId,
|
|
16
|
+
parentSessionId,
|
|
17
|
+
phase,
|
|
18
|
+
dispatchId,
|
|
19
|
+
taskId,
|
|
20
|
+
agent,
|
|
21
|
+
code,
|
|
22
|
+
subsystem,
|
|
23
|
+
...payload
|
|
24
|
+
} = entry.metadata;
|
|
25
|
+
|
|
26
|
+
let forensicDomain: ForensicEventDomain = "system";
|
|
27
|
+
if (
|
|
28
|
+
domain === "session" ||
|
|
29
|
+
domain === "orchestrator" ||
|
|
30
|
+
domain === "contract" ||
|
|
31
|
+
domain === "system" ||
|
|
32
|
+
domain === "review"
|
|
33
|
+
) {
|
|
34
|
+
forensicDomain = domain;
|
|
35
|
+
}
|
|
36
|
+
|
|
37
|
+
let forensicType: ForensicEventType = "info";
|
|
38
|
+
|
|
39
|
+
if (operation && isValidForensicType(operation as string)) {
|
|
40
|
+
forensicType = operation as ForensicEventType;
|
|
41
|
+
} else {
|
|
42
|
+
switch (entry.level) {
|
|
43
|
+
case "ERROR":
|
|
44
|
+
forensicType = "error";
|
|
45
|
+
break;
|
|
46
|
+
case "WARN":
|
|
47
|
+
forensicType = "warning";
|
|
48
|
+
break;
|
|
49
|
+
case "INFO":
|
|
50
|
+
forensicType = "info";
|
|
51
|
+
break;
|
|
52
|
+
case "DEBUG":
|
|
53
|
+
forensicType = "debug";
|
|
54
|
+
break;
|
|
55
|
+
}
|
|
56
|
+
}
|
|
57
|
+
|
|
58
|
+
appendForensicEventForArtifactDir(artifactDir, {
|
|
59
|
+
timestamp: entry.timestamp,
|
|
60
|
+
domain: forensicDomain,
|
|
61
|
+
runId: (runId as string) ?? null,
|
|
62
|
+
sessionId: (sessionId as string) ?? null,
|
|
63
|
+
parentSessionId: (parentSessionId as string) ?? null,
|
|
64
|
+
phase: (phase as string) ?? null,
|
|
65
|
+
dispatchId: (dispatchId as string) ?? null,
|
|
66
|
+
taskId: (taskId as number) ?? null,
|
|
67
|
+
agent: (agent as string) ?? null,
|
|
68
|
+
type: forensicType,
|
|
69
|
+
code: (code as string) ?? null,
|
|
70
|
+
message: entry.message,
|
|
71
|
+
payload: {
|
|
72
|
+
...payload,
|
|
73
|
+
...(subsystem ? { subsystem } : {}),
|
|
74
|
+
} as Record<string, string | number | boolean | object | readonly unknown[] | null>,
|
|
75
|
+
});
|
|
76
|
+
},
|
|
77
|
+
};
|
|
78
|
+
}
|
|
79
|
+
|
|
80
|
+
export function createForensicSink(projectRoot: string): LogSink {
|
|
81
|
+
return {
|
|
82
|
+
write(entry: LogEntry): void {
|
|
83
|
+
const {
|
|
84
|
+
domain,
|
|
85
|
+
operation,
|
|
86
|
+
runId,
|
|
87
|
+
sessionId,
|
|
88
|
+
parentSessionId,
|
|
89
|
+
phase,
|
|
90
|
+
dispatchId,
|
|
91
|
+
taskId,
|
|
92
|
+
agent,
|
|
93
|
+
code,
|
|
94
|
+
subsystem,
|
|
95
|
+
...payload
|
|
96
|
+
} = entry.metadata;
|
|
97
|
+
|
|
98
|
+
let forensicDomain: ForensicEventDomain = "system";
|
|
99
|
+
if (
|
|
100
|
+
domain === "session" ||
|
|
101
|
+
domain === "orchestrator" ||
|
|
102
|
+
domain === "contract" ||
|
|
103
|
+
domain === "system" ||
|
|
104
|
+
domain === "review"
|
|
105
|
+
) {
|
|
106
|
+
forensicDomain = domain;
|
|
107
|
+
}
|
|
108
|
+
|
|
109
|
+
let forensicType: ForensicEventType = "info";
|
|
110
|
+
|
|
111
|
+
if (operation && isValidForensicType(operation as string)) {
|
|
112
|
+
forensicType = operation as ForensicEventType;
|
|
113
|
+
} else {
|
|
114
|
+
switch (entry.level) {
|
|
115
|
+
case "ERROR":
|
|
116
|
+
forensicType = "error";
|
|
117
|
+
break;
|
|
118
|
+
case "WARN":
|
|
119
|
+
forensicType = "warning";
|
|
120
|
+
break;
|
|
121
|
+
case "INFO":
|
|
122
|
+
forensicType = "info";
|
|
123
|
+
break;
|
|
124
|
+
case "DEBUG":
|
|
125
|
+
forensicType = "debug";
|
|
126
|
+
break;
|
|
127
|
+
}
|
|
128
|
+
}
|
|
129
|
+
|
|
130
|
+
appendForensicEvent(projectRoot, {
|
|
131
|
+
timestamp: entry.timestamp,
|
|
132
|
+
projectRoot,
|
|
133
|
+
domain: forensicDomain,
|
|
134
|
+
runId: (runId as string) ?? null,
|
|
135
|
+
sessionId: (sessionId as string) ?? null,
|
|
136
|
+
parentSessionId: (parentSessionId as string) ?? null,
|
|
137
|
+
phase: (phase as string) ?? null,
|
|
138
|
+
dispatchId: (dispatchId as string) ?? null,
|
|
139
|
+
taskId: (taskId as number) ?? null,
|
|
140
|
+
agent: (agent as string) ?? null,
|
|
141
|
+
type: forensicType,
|
|
142
|
+
code: (code as string) ?? null,
|
|
143
|
+
message: entry.message,
|
|
144
|
+
payload: {
|
|
145
|
+
...payload,
|
|
146
|
+
...(subsystem ? { subsystem } : {}),
|
|
147
|
+
} as Record<string, string | number | boolean | object | readonly unknown[] | null>,
|
|
148
|
+
});
|
|
149
|
+
},
|
|
150
|
+
};
|
|
151
|
+
}
|
|
152
|
+
|
|
153
|
+
function isValidForensicType(type: string): boolean {
|
|
154
|
+
const validTypes = [
|
|
155
|
+
"run_started",
|
|
156
|
+
"dispatch",
|
|
157
|
+
"dispatch_multi",
|
|
158
|
+
"result_applied",
|
|
159
|
+
"phase_transition",
|
|
160
|
+
"complete",
|
|
161
|
+
"decision",
|
|
162
|
+
"error",
|
|
163
|
+
"loop_detected",
|
|
164
|
+
"failure_recorded",
|
|
165
|
+
"warning",
|
|
166
|
+
"session_start",
|
|
167
|
+
"session_end",
|
|
168
|
+
"fallback",
|
|
169
|
+
"model_switch",
|
|
170
|
+
"context_warning",
|
|
171
|
+
"tool_complete",
|
|
172
|
+
"compacted",
|
|
173
|
+
"info",
|
|
174
|
+
"debug",
|
|
175
|
+
];
|
|
176
|
+
return validTypes.includes(type);
|
|
177
|
+
}
|
|
@@ -0,0 +1,44 @@
|
|
|
1
|
+
import type { LogEntry, Logger, LogLevel, LogMetadata, LogSink } from "./types";
|
|
2
|
+
|
|
3
|
+
export class BaseLogger implements Logger {
|
|
4
|
+
constructor(
|
|
5
|
+
private readonly sink: LogSink,
|
|
6
|
+
private readonly baseMetadata: LogMetadata,
|
|
7
|
+
) {}
|
|
8
|
+
|
|
9
|
+
debug(message: string, metadata?: Partial<LogMetadata>): void {
|
|
10
|
+
this.log("DEBUG", message, metadata);
|
|
11
|
+
}
|
|
12
|
+
|
|
13
|
+
info(message: string, metadata?: Partial<LogMetadata>): void {
|
|
14
|
+
this.log("INFO", message, metadata);
|
|
15
|
+
}
|
|
16
|
+
|
|
17
|
+
warn(message: string, metadata?: Partial<LogMetadata>): void {
|
|
18
|
+
this.log("WARN", message, metadata);
|
|
19
|
+
}
|
|
20
|
+
|
|
21
|
+
error(message: string, metadata?: Partial<LogMetadata>): void {
|
|
22
|
+
this.log("ERROR", message, metadata);
|
|
23
|
+
}
|
|
24
|
+
|
|
25
|
+
child(metadata: Partial<LogMetadata>): Logger {
|
|
26
|
+
return new BaseLogger(this.sink, {
|
|
27
|
+
...this.baseMetadata,
|
|
28
|
+
...metadata,
|
|
29
|
+
});
|
|
30
|
+
}
|
|
31
|
+
|
|
32
|
+
private log(level: LogLevel, message: string, metadata?: Partial<LogMetadata>): void {
|
|
33
|
+
const entry: LogEntry = {
|
|
34
|
+
timestamp: new Date().toISOString(),
|
|
35
|
+
level,
|
|
36
|
+
message,
|
|
37
|
+
metadata: {
|
|
38
|
+
...this.baseMetadata,
|
|
39
|
+
...metadata,
|
|
40
|
+
},
|
|
41
|
+
};
|
|
42
|
+
this.sink.write(Object.freeze(entry));
|
|
43
|
+
}
|
|
44
|
+
}
|
|
@@ -0,0 +1,59 @@
|
|
|
1
|
+
import { getLogger } from "./domains";
|
|
2
|
+
|
|
3
|
+
function log() {
|
|
4
|
+
return getLogger("system", "performance");
|
|
5
|
+
}
|
|
6
|
+
|
|
7
|
+
export interface MemorySnapshot {
|
|
8
|
+
readonly rss: number;
|
|
9
|
+
readonly heapTotal: number;
|
|
10
|
+
readonly heapUsed: number;
|
|
11
|
+
readonly external: number;
|
|
12
|
+
readonly arrayBuffers: number;
|
|
13
|
+
}
|
|
14
|
+
|
|
15
|
+
export interface TimerHandle {
|
|
16
|
+
stop(metadata?: Record<string, unknown>): void;
|
|
17
|
+
}
|
|
18
|
+
|
|
19
|
+
export function recordMemoryUsage(): void {
|
|
20
|
+
const mem = process.memoryUsage();
|
|
21
|
+
|
|
22
|
+
const snapshot: MemorySnapshot = {
|
|
23
|
+
rss: mem.rss,
|
|
24
|
+
heapTotal: mem.heapTotal,
|
|
25
|
+
heapUsed: mem.heapUsed,
|
|
26
|
+
external: mem.external,
|
|
27
|
+
arrayBuffers: mem.arrayBuffers,
|
|
28
|
+
};
|
|
29
|
+
|
|
30
|
+
log().info("memory usage", {
|
|
31
|
+
operation: "memory_snapshot",
|
|
32
|
+
...snapshot,
|
|
33
|
+
});
|
|
34
|
+
}
|
|
35
|
+
|
|
36
|
+
export function startTimer(operation: string): TimerHandle {
|
|
37
|
+
// performance.now() is monotonic and unaffected by system-clock adjustments
|
|
38
|
+
const startMs = performance.now();
|
|
39
|
+
|
|
40
|
+
return {
|
|
41
|
+
stop(metadata?: Record<string, unknown>): void {
|
|
42
|
+
const durationMs = performance.now() - startMs;
|
|
43
|
+
|
|
44
|
+
log().info("operation completed", {
|
|
45
|
+
operation,
|
|
46
|
+
durationMs,
|
|
47
|
+
...metadata,
|
|
48
|
+
});
|
|
49
|
+
},
|
|
50
|
+
};
|
|
51
|
+
}
|
|
52
|
+
|
|
53
|
+
export function recordAgentResponseTime(agent: string, durationMs: number): void {
|
|
54
|
+
log().info("agent response time", {
|
|
55
|
+
operation: "agent_response_time",
|
|
56
|
+
agent,
|
|
57
|
+
durationMs,
|
|
58
|
+
});
|
|
59
|
+
}
|
|
@@ -0,0 +1,261 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Log rotation and retention for the OpenCode Autopilot plugin.
|
|
3
|
+
*
|
|
4
|
+
* Handles:
|
|
5
|
+
* - Max file-count enforcement (oldest files pruned first)
|
|
6
|
+
* - Time-based expiry (files older than `maxAgeDays` are removed)
|
|
7
|
+
* - Gzip compression of rotated `.log` / `.jsonl` files
|
|
8
|
+
*
|
|
9
|
+
* All filesystem operations use `node:fs/promises` for portability.
|
|
10
|
+
*
|
|
11
|
+
* @module
|
|
12
|
+
*/
|
|
13
|
+
|
|
14
|
+
import { createReadStream, createWriteStream } from "node:fs";
|
|
15
|
+
import { readdir, rename, stat, unlink } from "node:fs/promises";
|
|
16
|
+
import { join } from "node:path";
|
|
17
|
+
import { createGzip } from "node:zlib";
|
|
18
|
+
import { isEnoentError } from "../utils/fs-helpers";
|
|
19
|
+
|
|
20
|
+
/** Extensions eligible for gzip compression during rotation. */
|
|
21
|
+
const COMPRESSIBLE_EXTENSIONS = new Set([".log", ".jsonl"]);
|
|
22
|
+
|
|
23
|
+
/** Default maximum number of log files to keep (excluding compressed archives). */
|
|
24
|
+
const DEFAULT_MAX_FILES = 10;
|
|
25
|
+
|
|
26
|
+
/** Default maximum log file size in bytes (10 MiB). */
|
|
27
|
+
const DEFAULT_MAX_SIZE_BYTES = 10 * 1024 * 1024;
|
|
28
|
+
|
|
29
|
+
/** Default maximum age in days before a file is deleted. */
|
|
30
|
+
const DEFAULT_MAX_AGE_DAYS = 30;
|
|
31
|
+
|
|
32
|
+
export interface RotationOptions {
|
|
33
|
+
/**
|
|
34
|
+
* Maximum number of log files to retain (oldest are pruned first).
|
|
35
|
+
* Does not count `.gz` archives.
|
|
36
|
+
* @default 10
|
|
37
|
+
*/
|
|
38
|
+
readonly maxFiles?: number;
|
|
39
|
+
|
|
40
|
+
/**
|
|
41
|
+
* Maximum individual file size in bytes. Files exceeding this limit are
|
|
42
|
+
* compressed and renamed with a `.gz` extension before the next write.
|
|
43
|
+
* @default 10_485_760 (10 MiB)
|
|
44
|
+
*/
|
|
45
|
+
readonly maxSize?: number;
|
|
46
|
+
|
|
47
|
+
/**
|
|
48
|
+
* Maximum age in days. Files (and archives) older than this are deleted.
|
|
49
|
+
* @default 30
|
|
50
|
+
*/
|
|
51
|
+
readonly maxAgeDays?: number;
|
|
52
|
+
}
|
|
53
|
+
|
|
54
|
+
export interface RotationResult {
|
|
55
|
+
/** Number of files compressed into `.gz` archives. */
|
|
56
|
+
readonly compressed: number;
|
|
57
|
+
/** Number of files deleted (age or count limit exceeded). */
|
|
58
|
+
readonly deleted: number;
|
|
59
|
+
}
|
|
60
|
+
|
|
61
|
+
interface FileEntry {
|
|
62
|
+
readonly name: string;
|
|
63
|
+
readonly path: string;
|
|
64
|
+
readonly mtimeMs: number;
|
|
65
|
+
readonly size: number;
|
|
66
|
+
}
|
|
67
|
+
|
|
68
|
+
function isCompressible(name: string): boolean {
|
|
69
|
+
const dot = name.lastIndexOf(".");
|
|
70
|
+
if (dot === -1) return false;
|
|
71
|
+
const ext = name.slice(dot);
|
|
72
|
+
return COMPRESSIBLE_EXTENSIONS.has(ext);
|
|
73
|
+
}
|
|
74
|
+
|
|
75
|
+
function isArchive(name: string): boolean {
|
|
76
|
+
return name.endsWith(".gz");
|
|
77
|
+
}
|
|
78
|
+
|
|
79
|
+
/**
|
|
80
|
+
* Compresses `sourcePath` to `sourcePath + ".gz"` then removes the original.
|
|
81
|
+
* Returns `true` on success, `false` if the source vanished mid-flight.
|
|
82
|
+
*/
|
|
83
|
+
async function gzipFile(sourcePath: string): Promise<boolean> {
|
|
84
|
+
const archivePath = `${sourcePath}.gz`;
|
|
85
|
+
await new Promise<void>((resolve, reject) => {
|
|
86
|
+
const readStream = createReadStream(sourcePath);
|
|
87
|
+
const writeStream = createWriteStream(archivePath);
|
|
88
|
+
const gzip = createGzip();
|
|
89
|
+
|
|
90
|
+
readStream.on("error", reject);
|
|
91
|
+
writeStream.on("error", reject);
|
|
92
|
+
writeStream.on("finish", resolve);
|
|
93
|
+
|
|
94
|
+
readStream.pipe(gzip).pipe(writeStream);
|
|
95
|
+
});
|
|
96
|
+
// Only remove the original after the archive is fully written.
|
|
97
|
+
await unlink(sourcePath);
|
|
98
|
+
return true;
|
|
99
|
+
}
|
|
100
|
+
|
|
101
|
+
/**
|
|
102
|
+
* Reads all entries in `logDir`, resolving `stat` for each.
|
|
103
|
+
* Silently skips entries that disappear between readdir and stat.
|
|
104
|
+
*/
|
|
105
|
+
async function listEntries(logDir: string): Promise<readonly FileEntry[]> {
|
|
106
|
+
let names: string[];
|
|
107
|
+
try {
|
|
108
|
+
names = await readdir(logDir);
|
|
109
|
+
} catch (error: unknown) {
|
|
110
|
+
if (isEnoentError(error)) return [];
|
|
111
|
+
throw error;
|
|
112
|
+
}
|
|
113
|
+
|
|
114
|
+
const entries: FileEntry[] = [];
|
|
115
|
+
for (const name of names) {
|
|
116
|
+
const filePath = join(logDir, name);
|
|
117
|
+
try {
|
|
118
|
+
const fileStat = await stat(filePath);
|
|
119
|
+
if (!fileStat.isFile()) continue;
|
|
120
|
+
entries.push({
|
|
121
|
+
name,
|
|
122
|
+
path: filePath,
|
|
123
|
+
mtimeMs: fileStat.mtimeMs,
|
|
124
|
+
size: fileStat.size,
|
|
125
|
+
});
|
|
126
|
+
} catch (error: unknown) {
|
|
127
|
+
if (!isEnoentError(error)) throw error;
|
|
128
|
+
// File disappeared between readdir and stat — skip it.
|
|
129
|
+
}
|
|
130
|
+
}
|
|
131
|
+
|
|
132
|
+
return entries;
|
|
133
|
+
}
|
|
134
|
+
|
|
135
|
+
/**
|
|
136
|
+
* Checks whether a single file exceeds the given `maxSize` threshold.
|
|
137
|
+
*
|
|
138
|
+
* Intended for use by writers that want to rotate before the next append.
|
|
139
|
+
*
|
|
140
|
+
* @param filePath - Absolute path to the log file.
|
|
141
|
+
* @param maxSize - Size limit in bytes.
|
|
142
|
+
* @returns `true` when the file exists and its size exceeds `maxSize`.
|
|
143
|
+
*/
|
|
144
|
+
export async function exceedsMaxSize(filePath: string, maxSize: number): Promise<boolean> {
|
|
145
|
+
try {
|
|
146
|
+
const fileStat = await stat(filePath);
|
|
147
|
+
return fileStat.isFile() && fileStat.size > maxSize;
|
|
148
|
+
} catch (error: unknown) {
|
|
149
|
+
if (isEnoentError(error)) return false;
|
|
150
|
+
throw error;
|
|
151
|
+
}
|
|
152
|
+
}
|
|
153
|
+
|
|
154
|
+
/**
|
|
155
|
+
* Rotates a single active log file by compressing it to `<filePath>.gz`.
|
|
156
|
+
*
|
|
157
|
+
* The caller is responsible for opening a fresh log file afterwards.
|
|
158
|
+
* Returns `true` when the file was successfully rotated, `false` when the
|
|
159
|
+
* file did not exist (nothing to rotate).
|
|
160
|
+
*
|
|
161
|
+
* @param filePath - Absolute path to the log file to rotate.
|
|
162
|
+
*/
|
|
163
|
+
export async function rotateFile(filePath: string): Promise<boolean> {
|
|
164
|
+
try {
|
|
165
|
+
const fileStat = await stat(filePath);
|
|
166
|
+
if (!fileStat.isFile()) return false;
|
|
167
|
+
} catch (error: unknown) {
|
|
168
|
+
if (isEnoentError(error)) return false;
|
|
169
|
+
throw error;
|
|
170
|
+
}
|
|
171
|
+
|
|
172
|
+
if (!isCompressible(filePath)) {
|
|
173
|
+
// Non-compressible files are renamed with a timestamp suffix.
|
|
174
|
+
const rotatedPath = `${filePath}.${Date.now()}.bak`;
|
|
175
|
+
await rename(filePath, rotatedPath);
|
|
176
|
+
return true;
|
|
177
|
+
}
|
|
178
|
+
|
|
179
|
+
return gzipFile(filePath);
|
|
180
|
+
}
|
|
181
|
+
|
|
182
|
+
/**
|
|
183
|
+
* Runs the full rotation and retention policy for all log files in `logDir`.
|
|
184
|
+
*
|
|
185
|
+
* **What this does (in order):**
|
|
186
|
+
* 1. Compress oversized `.log` / `.jsonl` files into `.gz` archives.
|
|
187
|
+
* 2. Delete files (any extension) older than `maxAgeDays`.
|
|
188
|
+
* 3. Prune oldest plain log files when their count exceeds `maxFiles`.
|
|
189
|
+
*
|
|
190
|
+
* @param logDir - Directory containing log files.
|
|
191
|
+
* @param options - Rotation and retention options.
|
|
192
|
+
* @returns Counts of compressed and deleted files.
|
|
193
|
+
*/
|
|
194
|
+
export async function rotateLogs(
|
|
195
|
+
logDir: string,
|
|
196
|
+
options?: RotationOptions,
|
|
197
|
+
): Promise<RotationResult> {
|
|
198
|
+
const maxFiles = options?.maxFiles ?? DEFAULT_MAX_FILES;
|
|
199
|
+
const maxSize = options?.maxSize ?? DEFAULT_MAX_SIZE_BYTES;
|
|
200
|
+
const maxAgeDays = options?.maxAgeDays ?? DEFAULT_MAX_AGE_DAYS;
|
|
201
|
+
const ageThresholdMs = Date.now() - maxAgeDays * 24 * 60 * 60 * 1000;
|
|
202
|
+
|
|
203
|
+
let compressed = 0;
|
|
204
|
+
let deleted = 0;
|
|
205
|
+
|
|
206
|
+
// --- Pass 1: Compress oversized plain log files ---
|
|
207
|
+
{
|
|
208
|
+
const entries = await listEntries(logDir);
|
|
209
|
+
for (const entry of entries) {
|
|
210
|
+
if (isArchive(entry.name)) continue;
|
|
211
|
+
if (!isCompressible(entry.name)) continue;
|
|
212
|
+
if (entry.size <= maxSize) continue;
|
|
213
|
+
|
|
214
|
+
try {
|
|
215
|
+
const rotated = await gzipFile(entry.path);
|
|
216
|
+
if (rotated) compressed++;
|
|
217
|
+
} catch (error: unknown) {
|
|
218
|
+
if (!isEnoentError(error)) throw error;
|
|
219
|
+
// File disappeared — not an error.
|
|
220
|
+
}
|
|
221
|
+
}
|
|
222
|
+
}
|
|
223
|
+
|
|
224
|
+
// --- Pass 2: Delete files older than maxAgeDays (any extension) ---
|
|
225
|
+
{
|
|
226
|
+
const entries = await listEntries(logDir);
|
|
227
|
+
for (const entry of entries) {
|
|
228
|
+
if (entry.mtimeMs >= ageThresholdMs) continue;
|
|
229
|
+
|
|
230
|
+
try {
|
|
231
|
+
await unlink(entry.path);
|
|
232
|
+
deleted++;
|
|
233
|
+
} catch (error: unknown) {
|
|
234
|
+
if (!isEnoentError(error)) throw error;
|
|
235
|
+
}
|
|
236
|
+
}
|
|
237
|
+
}
|
|
238
|
+
|
|
239
|
+
// --- Pass 3: Prune oldest plain log files that exceed maxFiles count ---
|
|
240
|
+
{
|
|
241
|
+
const entries = await listEntries(logDir);
|
|
242
|
+
const plainLogs = entries
|
|
243
|
+
.filter((e) => !isArchive(e.name) && isCompressible(e.name))
|
|
244
|
+
.sort((a, b) => a.mtimeMs - b.mtimeMs); // oldest first
|
|
245
|
+
|
|
246
|
+
const overflow = plainLogs.length - maxFiles;
|
|
247
|
+
if (overflow > 0) {
|
|
248
|
+
const toDelete = plainLogs.slice(0, overflow);
|
|
249
|
+
for (const entry of toDelete) {
|
|
250
|
+
try {
|
|
251
|
+
await unlink(entry.path);
|
|
252
|
+
deleted++;
|
|
253
|
+
} catch (error: unknown) {
|
|
254
|
+
if (!isEnoentError(error)) throw error;
|
|
255
|
+
}
|
|
256
|
+
}
|
|
257
|
+
}
|
|
258
|
+
}
|
|
259
|
+
|
|
260
|
+
return { compressed, deleted };
|
|
261
|
+
}
|
|
@@ -0,0 +1,33 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Logging type definitions for the OpenCode Autopilot plugin.
|
|
3
|
+
*
|
|
4
|
+
* @module
|
|
5
|
+
*/
|
|
6
|
+
|
|
7
|
+
export type LogLevel = "DEBUG" | "INFO" | "WARN" | "ERROR";
|
|
8
|
+
|
|
9
|
+
export interface LogMetadata {
|
|
10
|
+
readonly domain: string;
|
|
11
|
+
readonly subsystem?: string;
|
|
12
|
+
readonly operation?: string;
|
|
13
|
+
readonly [key: string]: unknown;
|
|
14
|
+
}
|
|
15
|
+
|
|
16
|
+
export interface LogEntry {
|
|
17
|
+
readonly timestamp: string;
|
|
18
|
+
readonly level: LogLevel;
|
|
19
|
+
readonly message: string;
|
|
20
|
+
readonly metadata: LogMetadata;
|
|
21
|
+
}
|
|
22
|
+
|
|
23
|
+
export interface LogSink {
|
|
24
|
+
write(entry: LogEntry): void;
|
|
25
|
+
}
|
|
26
|
+
|
|
27
|
+
export interface Logger {
|
|
28
|
+
debug(message: string, metadata?: Partial<LogMetadata>): void;
|
|
29
|
+
info(message: string, metadata?: Partial<LogMetadata>): void;
|
|
30
|
+
warn(message: string, metadata?: Partial<LogMetadata>): void;
|
|
31
|
+
error(message: string, metadata?: Partial<LogMetadata>): void;
|
|
32
|
+
child(metadata: Partial<LogMetadata>): Logger;
|
|
33
|
+
}
|