claws-code 0.8.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.claude/commands/claws-auto.md +90 -0
- package/.claude/commands/claws-bin.md +28 -0
- package/.claude/commands/claws-cleanup.md +28 -0
- package/.claude/commands/claws-do.md +82 -0
- package/.claude/commands/claws-fix.md +40 -0
- package/.claude/commands/claws-goal.md +111 -0
- package/.claude/commands/claws-help.md +54 -0
- package/.claude/commands/claws-plan.md +103 -0
- package/.claude/commands/claws-report.md +29 -0
- package/.claude/commands/claws-status.md +37 -0
- package/.claude/commands/claws-update.md +32 -0
- package/.claude/commands/claws.md +64 -0
- package/.claude/rules/claws-default-behavior.md +76 -0
- package/.claude/settings.json +112 -0
- package/.claude/settings.local.json +19 -0
- package/.claude/skills/claws-auto-engine/SKILL.md +97 -0
- package/.claude/skills/claws-goal-tracker/SKILL.md +106 -0
- package/.claude/skills/claws-prompt-templates/SKILL.md +203 -0
- package/.claude/skills/claws-wave-lead/SKILL.md +126 -0
- package/.claude/skills/claws-wave-subworker/SKILL.md +60 -0
- package/CHANGELOG.md +1949 -0
- package/LICENSE +21 -0
- package/README.md +420 -0
- package/bin/cli.js +84 -0
- package/cli.js +223 -0
- package/docs/ARCHITECTURE.md +511 -0
- package/docs/event-protocol.md +588 -0
- package/docs/features.md +562 -0
- package/docs/guide.md +891 -0
- package/docs/index.html +716 -0
- package/docs/protocol.md +323 -0
- package/extension/.vscodeignore +15 -0
- package/extension/CHANGELOG.md +1906 -0
- package/extension/LICENSE +21 -0
- package/extension/README.md +137 -0
- package/extension/docs/features.md +424 -0
- package/extension/docs/protocol.md +197 -0
- package/extension/esbuild.mjs +25 -0
- package/extension/icon.png +0 -0
- package/extension/native/.metadata.json +10 -0
- package/extension/native/node-pty/LICENSE +69 -0
- package/extension/native/node-pty/README.md +165 -0
- package/extension/native/node-pty/lib/conpty_console_list_agent.js +16 -0
- package/extension/native/node-pty/lib/conpty_console_list_agent.js.map +1 -0
- package/extension/native/node-pty/lib/eventEmitter2.js +47 -0
- package/extension/native/node-pty/lib/eventEmitter2.js.map +1 -0
- package/extension/native/node-pty/lib/index.js +52 -0
- package/extension/native/node-pty/lib/index.js.map +1 -0
- package/extension/native/node-pty/lib/interfaces.js +7 -0
- package/extension/native/node-pty/lib/interfaces.js.map +1 -0
- package/extension/native/node-pty/lib/shared/conout.js +11 -0
- package/extension/native/node-pty/lib/shared/conout.js.map +1 -0
- package/extension/native/node-pty/lib/terminal.js +190 -0
- package/extension/native/node-pty/lib/terminal.js.map +1 -0
- package/extension/native/node-pty/lib/types.js +7 -0
- package/extension/native/node-pty/lib/types.js.map +1 -0
- package/extension/native/node-pty/lib/unixTerminal.js +346 -0
- package/extension/native/node-pty/lib/unixTerminal.js.map +1 -0
- package/extension/native/node-pty/lib/utils.js +39 -0
- package/extension/native/node-pty/lib/utils.js.map +1 -0
- package/extension/native/node-pty/lib/windowsConoutConnection.js +125 -0
- package/extension/native/node-pty/lib/windowsConoutConnection.js.map +1 -0
- package/extension/native/node-pty/lib/windowsPtyAgent.js +320 -0
- package/extension/native/node-pty/lib/windowsPtyAgent.js.map +1 -0
- package/extension/native/node-pty/lib/windowsTerminal.js +199 -0
- package/extension/native/node-pty/lib/windowsTerminal.js.map +1 -0
- package/extension/native/node-pty/lib/worker/conoutSocketWorker.js +22 -0
- package/extension/native/node-pty/lib/worker/conoutSocketWorker.js.map +1 -0
- package/extension/native/node-pty/package.json +64 -0
- package/extension/native/node-pty/prebuilds/darwin-arm64/pty.node +0 -0
- package/extension/native/node-pty/prebuilds/darwin-arm64/spawn-helper +0 -0
- package/extension/native/node-pty/prebuilds/darwin-x64/pty.node +0 -0
- package/extension/native/node-pty/prebuilds/darwin-x64/spawn-helper +0 -0
- package/extension/native/node-pty/prebuilds/win32-arm64/conpty/OpenConsole.exe +0 -0
- package/extension/native/node-pty/prebuilds/win32-arm64/conpty/conpty.dll +0 -0
- package/extension/native/node-pty/prebuilds/win32-arm64/conpty.node +0 -0
- package/extension/native/node-pty/prebuilds/win32-arm64/conpty_console_list.node +0 -0
- package/extension/native/node-pty/prebuilds/win32-arm64/pty.node +0 -0
- package/extension/native/node-pty/prebuilds/win32-arm64/winpty-agent.exe +0 -0
- package/extension/native/node-pty/prebuilds/win32-arm64/winpty.dll +0 -0
- package/extension/native/node-pty/prebuilds/win32-x64/conpty/OpenConsole.exe +0 -0
- package/extension/native/node-pty/prebuilds/win32-x64/conpty/conpty.dll +0 -0
- package/extension/native/node-pty/prebuilds/win32-x64/conpty.node +0 -0
- package/extension/native/node-pty/prebuilds/win32-x64/conpty_console_list.node +0 -0
- package/extension/native/node-pty/prebuilds/win32-x64/pty.node +0 -0
- package/extension/native/node-pty/prebuilds/win32-x64/winpty-agent.exe +0 -0
- package/extension/native/node-pty/prebuilds/win32-x64/winpty.dll +0 -0
- package/extension/package-lock.json +605 -0
- package/extension/package.json +343 -0
- package/extension/scripts/bundle-native.mjs +104 -0
- package/extension/scripts/deploy-dev.mjs +60 -0
- package/extension/src/ansi-strip.ts +52 -0
- package/extension/src/backends/vscode/claws-pty.ts +483 -0
- package/extension/src/backends/vscode/status-bar.ts +99 -0
- package/extension/src/backends/vscode/vscode-backend.ts +282 -0
- package/extension/src/capture-store.ts +125 -0
- package/extension/src/event-log.ts +629 -0
- package/extension/src/event-schemas.ts +478 -0
- package/extension/src/extension.js +492 -0
- package/extension/src/extension.ts +873 -0
- package/extension/src/lifecycle-engine.ts +60 -0
- package/extension/src/lifecycle-rules.ts +171 -0
- package/extension/src/lifecycle-store.ts +506 -0
- package/extension/src/peer-registry.ts +176 -0
- package/extension/src/pipeline-registry.ts +82 -0
- package/extension/src/platform.ts +64 -0
- package/extension/src/protocol.ts +532 -0
- package/extension/src/server-config.ts +98 -0
- package/extension/src/server.ts +2210 -0
- package/extension/src/task-registry.ts +51 -0
- package/extension/src/terminal-backend.ts +211 -0
- package/extension/src/terminal-manager.ts +395 -0
- package/extension/src/topic-registry.ts +70 -0
- package/extension/src/topic-utils.ts +46 -0
- package/extension/src/transport.ts +45 -0
- package/extension/src/uninstall-cleanup.ts +232 -0
- package/extension/src/wave-registry.ts +314 -0
- package/extension/src/websocket-transport.ts +153 -0
- package/extension/tsconfig.json +23 -0
- package/lib/capabilities.js +145 -0
- package/lib/dry-run.js +43 -0
- package/lib/install.js +1018 -0
- package/lib/mcp-setup.js +92 -0
- package/lib/platform.js +240 -0
- package/lib/preflight.js +152 -0
- package/lib/shell-hook.js +343 -0
- package/lib/uninstall.js +162 -0
- package/lib/verify.js +166 -0
- package/mcp_server.js +3529 -0
- package/package.json +48 -0
- package/rules/claws-default-behavior.md +72 -0
- package/scripts/_helpers/atomic-file.mjs +137 -0
- package/scripts/_helpers/fix-repair.js +64 -0
- package/scripts/_helpers/json-safe.mjs +218 -0
- package/scripts/bump-version.sh +84 -0
- package/scripts/codegen/gen-docs.mjs +61 -0
- package/scripts/codegen/gen-json-schema.mjs +62 -0
- package/scripts/codegen/gen-mcp-tools.mjs +358 -0
- package/scripts/codegen/gen-types.mjs +172 -0
- package/scripts/codegen/index.mjs +42 -0
- package/scripts/dev-hooks/check-extension-dirs.js +77 -0
- package/scripts/dev-hooks/check-open-claws-terminals.js +70 -0
- package/scripts/dev-hooks/check-stale-main.js +55 -0
- package/scripts/dev-hooks/check-tag-pushed.js +51 -0
- package/scripts/dev-hooks/check-tag-vs-main.js +56 -0
- package/scripts/dev-vsix-install.sh +60 -0
- package/scripts/fix.sh +702 -0
- package/scripts/gen-client-types.mjs +81 -0
- package/scripts/git-hooks/pre-commit +31 -0
- package/scripts/hooks/lifecycle-state.js +61 -0
- package/scripts/hooks/package.json +4 -0
- package/scripts/hooks/post-tool-use-claws.js +292 -0
- package/scripts/hooks/pre-bash-no-verify-block.js +72 -0
- package/scripts/hooks/pre-tool-use-claws.js +206 -0
- package/scripts/hooks/session-start-claws.js +97 -0
- package/scripts/hooks/stop-claws.js +88 -0
- package/scripts/inject-claude-md.js +205 -0
- package/scripts/inject-dev-hooks.js +96 -0
- package/scripts/inject-global-claude-md.js +140 -0
- package/scripts/inject-settings-hooks.js +370 -0
- package/scripts/install.ps1 +146 -0
- package/scripts/install.sh +1729 -0
- package/scripts/monitor-arm-watch.js +155 -0
- package/scripts/rebuild-node-pty.sh +245 -0
- package/scripts/report.sh +232 -0
- package/scripts/shell-hook.fish +164 -0
- package/scripts/shell-hook.ps1 +33 -0
- package/scripts/shell-hook.sh +232 -0
- package/scripts/stream-events.js +399 -0
- package/scripts/terminal-wrapper.sh +36 -0
- package/scripts/test-enforcement.sh +132 -0
- package/scripts/test-install.sh +174 -0
- package/scripts/test-installer-parity.sh +135 -0
- package/scripts/test-template-enforcement.sh +76 -0
- package/scripts/uninstall.sh +143 -0
- package/scripts/update.sh +337 -0
- package/scripts/verify-release.sh +323 -0
- package/scripts/verify-wrapped.sh +194 -0
- package/templates/CLAUDE.global.md +135 -0
- package/templates/CLAUDE.project.md +37 -0
|
@@ -0,0 +1,629 @@
|
|
|
1
|
+
import * as fs from 'fs';
|
|
2
|
+
import * as path from 'path';
|
|
3
|
+
import { matchTopic } from './topic-utils';
|
|
4
|
+
|
|
5
|
+
const SEGMENT_SIZE_THRESHOLD = 10 * 1024 * 1024; // 10 MB
|
|
6
|
+
const SEGMENT_AGE_THRESHOLD_MS = 3600_000; // 1 hour
|
|
7
|
+
const MANIFEST_FLUSH_INTERVAL = 100; // write manifest every N appends
|
|
8
|
+
|
|
9
|
+
// ── Cross-cutting cursor contract ─────────────────────────────────────────────
|
|
10
|
+
// Cursor format: "<4-digit-segment-id>:<decimal-byte-offset>", e.g. "0002:1428".
|
|
11
|
+
// Segment ID is zero-padded to 4 digits. Byte offset is a decimal integer.
|
|
12
|
+
// Consumers seek to exactly this offset via fs.createReadStream(path, {start}).
|
|
13
|
+
// This contract is shared with w3 (reader) and w4 (retention/observability).
|
|
14
|
+
|
|
15
|
+
export function formatCursor(segmentId: number, offset: number): string {
|
|
16
|
+
return `${String(segmentId).padStart(4, '0')}:${offset}`;
|
|
17
|
+
}
|
|
18
|
+
|
|
19
|
+
export function parseCursor(cursor: string): { segmentId: number; offset: number } | null {
|
|
20
|
+
const m = cursor.match(/^(\d{4}):(\d+)$/);
|
|
21
|
+
if (!m) return null;
|
|
22
|
+
return { segmentId: parseInt(m[1], 10), offset: parseInt(m[2], 10) };
|
|
23
|
+
}
|
|
24
|
+
|
|
25
|
+
export interface AppendResult {
|
|
26
|
+
cursor: string;
|
|
27
|
+
sequence: number;
|
|
28
|
+
}
|
|
29
|
+
|
|
30
|
+
export interface LogRecord {
|
|
31
|
+
topic?: string;
|
|
32
|
+
from?: string;
|
|
33
|
+
ts_server?: string;
|
|
34
|
+
sequence?: number;
|
|
35
|
+
payload?: unknown;
|
|
36
|
+
[key: string]: unknown;
|
|
37
|
+
}
|
|
38
|
+
|
|
39
|
+
interface SegmentEntry {
|
|
40
|
+
id: string;
|
|
41
|
+
path: string;
|
|
42
|
+
size: number;
|
|
43
|
+
first_ts: string | null;
|
|
44
|
+
last_ts: string | null;
|
|
45
|
+
}
|
|
46
|
+
|
|
47
|
+
interface Manifest {
|
|
48
|
+
stream: string;
|
|
49
|
+
segments: SegmentEntry[];
|
|
50
|
+
current_segment: string;
|
|
51
|
+
current_offset: number;
|
|
52
|
+
sequence_counter?: number;
|
|
53
|
+
}
|
|
54
|
+
|
|
55
|
+
export interface EventLogWriterOptions {
|
|
56
|
+
/** Override the size rotation threshold (bytes). Default: 10 MB. */
|
|
57
|
+
sizeThreshold?: number;
|
|
58
|
+
/** Override the age rotation threshold (ms). Default: 1 hour. */
|
|
59
|
+
ageThresholdMs?: number;
|
|
60
|
+
}
|
|
61
|
+
|
|
62
|
+
/**
|
|
63
|
+
* Append-only writer for the persistent event log.
|
|
64
|
+
*
|
|
65
|
+
* Stream identification: a single "default" stream per workspace, covering all
|
|
66
|
+
* topics. Per-topic partitioning is deferred to v2 if throughput demands it.
|
|
67
|
+
*
|
|
68
|
+
* Cursor format: "<4-digit-segment-id>:<decimal-byte-offset>", e.g. "0002:1428".
|
|
69
|
+
* Byte offsets allow efficient seeking via fs.createReadStream(path, {start}).
|
|
70
|
+
*
|
|
71
|
+
* Writes are synchronous (fs.writeSync) so segment files are immediately visible
|
|
72
|
+
* on disk after each append, which simplifies crash recovery and testing.
|
|
73
|
+
*/
|
|
74
|
+
const COMPACT_SIZE_THRESHOLD = 1024; // segments < 1 KB are candidates for merging
|
|
75
|
+
|
|
76
|
+
export class EventLogWriter {
|
|
77
|
+
protected streamDir = '';
|
|
78
|
+
protected segmentId = 0;
|
|
79
|
+
protected currentOffset = 0;
|
|
80
|
+
protected currentSegmentPath = '';
|
|
81
|
+
protected openedAt = 0;
|
|
82
|
+
protected fd: number | null = null;
|
|
83
|
+
protected fdDeferred = false; // true when segment path is set but fs.openSync not yet called
|
|
84
|
+
protected degraded = false;
|
|
85
|
+
protected segments: SegmentEntry[] = [];
|
|
86
|
+
private appendCount = 0;
|
|
87
|
+
// Per-stream sequence counter. Monotonically increasing across rotations.
|
|
88
|
+
// Persisted in manifest.json; on recovery, restored to last_value+1 (one gap
|
|
89
|
+
// per restart, detectable, never re-issues a sequence).
|
|
90
|
+
// Safe up to Number.MAX_SAFE_INTEGER ≈ 285 years at 1 000 events/s.
|
|
91
|
+
private sequenceCounter = 0;
|
|
92
|
+
// Serialised append queue — guarantees ordering under concurrent publishes.
|
|
93
|
+
private appendQueue: Promise<void> = Promise.resolve();
|
|
94
|
+
private readonly sizeThreshold: number;
|
|
95
|
+
private readonly ageThresholdMs: number;
|
|
96
|
+
// Per-segment topic index: accumulated in memory, flushed to .idx on close/rotate.
|
|
97
|
+
private idxEntries: Array<{ topic: string; offset: number }> = [];
|
|
98
|
+
|
|
99
|
+
constructor(opts?: EventLogWriterOptions) {
|
|
100
|
+
this.sizeThreshold = opts?.sizeThreshold ?? SEGMENT_SIZE_THRESHOLD;
|
|
101
|
+
this.ageThresholdMs = opts?.ageThresholdMs ?? SEGMENT_AGE_THRESHOLD_MS;
|
|
102
|
+
}
|
|
103
|
+
|
|
104
|
+
open(workspaceRoot: string): Promise<void> {
|
|
105
|
+
this.streamDir = path.join(workspaceRoot, '.claws', 'events', 'default');
|
|
106
|
+
try {
|
|
107
|
+
fs.mkdirSync(this.streamDir, { recursive: true });
|
|
108
|
+
} catch {
|
|
109
|
+
this.degraded = true;
|
|
110
|
+
return Promise.resolve();
|
|
111
|
+
}
|
|
112
|
+
// Crash recovery: try manifest first; fall back to directory scan.
|
|
113
|
+
if (!this.tryRecoverFromManifest()) {
|
|
114
|
+
const maxId = this.scanMaxSegmentId();
|
|
115
|
+
this.segmentId = maxId + 1;
|
|
116
|
+
this.openFreshSegment();
|
|
117
|
+
}
|
|
118
|
+
return Promise.resolve();
|
|
119
|
+
}
|
|
120
|
+
|
|
121
|
+
// Attempts to recover writer state from an existing manifest.json.
|
|
122
|
+
// Returns true on success (fd opened, state restored), false otherwise.
|
|
123
|
+
// Recovery rule: trust actual file size over the manifest's current_offset —
|
|
124
|
+
// the manifest may be stale if the process crashed between appends and a flush.
|
|
125
|
+
protected tryRecoverFromManifest(): boolean {
|
|
126
|
+
const manifestPath = path.join(this.streamDir, 'manifest.json');
|
|
127
|
+
try {
|
|
128
|
+
const raw = fs.readFileSync(manifestPath, 'utf8');
|
|
129
|
+
const m = JSON.parse(raw) as Partial<Manifest>;
|
|
130
|
+
if (
|
|
131
|
+
typeof m.current_segment !== 'string' ||
|
|
132
|
+
typeof m.current_offset !== 'number' ||
|
|
133
|
+
!Array.isArray(m.segments)
|
|
134
|
+
) return false;
|
|
135
|
+
const segEntry = m.segments.find(s => s.id === m.current_segment);
|
|
136
|
+
if (!segEntry) return false;
|
|
137
|
+
const segPath = path.join(this.streamDir, segEntry.path);
|
|
138
|
+
const segId = parseInt(m.current_segment, 10);
|
|
139
|
+
if (isNaN(segId) || segId < 1) return false;
|
|
140
|
+
|
|
141
|
+
this.segmentId = segId;
|
|
142
|
+
this.currentSegmentPath = segPath;
|
|
143
|
+
this.segments = m.segments.map(s => ({ ...s }));
|
|
144
|
+
this.openedAt = Date.now();
|
|
145
|
+
|
|
146
|
+
// File may not exist yet when the segment was opened lazily and no events
|
|
147
|
+
// arrived before the process restarted. Treat it as a deferred segment.
|
|
148
|
+
let statSize = 0;
|
|
149
|
+
let fileExists = true;
|
|
150
|
+
try {
|
|
151
|
+
statSize = fs.statSync(segPath).size;
|
|
152
|
+
} catch {
|
|
153
|
+
fileExists = false;
|
|
154
|
+
}
|
|
155
|
+
|
|
156
|
+
if (fileExists) {
|
|
157
|
+
this.currentOffset = statSize; // trust file, not stale manifest offset
|
|
158
|
+
this.fd = fs.openSync(segPath, 'a');
|
|
159
|
+
this.fdDeferred = false;
|
|
160
|
+
} else {
|
|
161
|
+
this.currentOffset = 0;
|
|
162
|
+
this.fd = null;
|
|
163
|
+
this.fdDeferred = true;
|
|
164
|
+
}
|
|
165
|
+
// Restore sequence counter with +1 so the last issued sequence before crash
|
|
166
|
+
// is never re-issued. Cost: one detectable gap per restart — acceptable.
|
|
167
|
+
if (typeof m.sequence_counter === 'number' && m.sequence_counter >= 0) {
|
|
168
|
+
this.sequenceCounter = m.sequence_counter + 1;
|
|
169
|
+
}
|
|
170
|
+
return true;
|
|
171
|
+
} catch {
|
|
172
|
+
return false;
|
|
173
|
+
}
|
|
174
|
+
}
|
|
175
|
+
|
|
176
|
+
protected writeManifest(): void {
|
|
177
|
+
if (this.degraded || !this.streamDir) return;
|
|
178
|
+
const manifest: Manifest = {
|
|
179
|
+
stream: 'default',
|
|
180
|
+
segments: this.segments.map(s => ({ ...s })),
|
|
181
|
+
current_segment: this.segmentIdStr(),
|
|
182
|
+
current_offset: this.currentOffset,
|
|
183
|
+
sequence_counter: this.sequenceCounter,
|
|
184
|
+
};
|
|
185
|
+
const manifestPath = path.join(this.streamDir, 'manifest.json');
|
|
186
|
+
const tmpPath = `${manifestPath}.tmp`;
|
|
187
|
+
try {
|
|
188
|
+
// F7: fsync before rename — mirrors M-29/M-43 pattern; manifest survives
|
|
189
|
+
// power-cut or SIGKILL after write but before kernel page-cache flush.
|
|
190
|
+
const fd = fs.openSync(tmpPath, 'w');
|
|
191
|
+
try {
|
|
192
|
+
fs.writeSync(fd, JSON.stringify(manifest, null, 2) + '\n');
|
|
193
|
+
fs.fsyncSync(fd);
|
|
194
|
+
} finally { fs.closeSync(fd); }
|
|
195
|
+
fs.renameSync(tmpPath, manifestPath);
|
|
196
|
+
} catch {
|
|
197
|
+
// Non-fatal: manifest write failure must not crash the writer.
|
|
198
|
+
}
|
|
199
|
+
}
|
|
200
|
+
|
|
201
|
+
// Scans the stream directory for the highest 4-digit segment ID prefix.
|
|
202
|
+
// Returns 0 if the directory is empty or contains no matching files.
|
|
203
|
+
protected scanMaxSegmentId(): number {
|
|
204
|
+
try {
|
|
205
|
+
let max = 0;
|
|
206
|
+
for (const entry of fs.readdirSync(this.streamDir)) {
|
|
207
|
+
const m = entry.match(/^(\d{4})-/);
|
|
208
|
+
if (m) {
|
|
209
|
+
const n = parseInt(m[1], 10);
|
|
210
|
+
if (n > max) max = n;
|
|
211
|
+
}
|
|
212
|
+
}
|
|
213
|
+
return max;
|
|
214
|
+
} catch {
|
|
215
|
+
return 0;
|
|
216
|
+
}
|
|
217
|
+
}
|
|
218
|
+
|
|
219
|
+
protected makeSegmentName(id: number): string {
|
|
220
|
+
const pad = String(id).padStart(4, '0');
|
|
221
|
+
const iso = new Date().toISOString().slice(0, 13); // "2026-04-28T18"
|
|
222
|
+
return `${pad}-${iso}.jsonl`;
|
|
223
|
+
}
|
|
224
|
+
|
|
225
|
+
protected openFreshSegment(): void {
|
|
226
|
+
const name = this.makeSegmentName(this.segmentId);
|
|
227
|
+
this.currentSegmentPath = path.join(this.streamDir, name);
|
|
228
|
+
// Defer fs.openSync until the first doAppend call — file only created when
|
|
229
|
+
// an event actually arrives, so activation produces no empty .jsonl files.
|
|
230
|
+
this.fd = null;
|
|
231
|
+
this.fdDeferred = true;
|
|
232
|
+
this.currentOffset = 0;
|
|
233
|
+
this.openedAt = Date.now();
|
|
234
|
+
this.segments.push({
|
|
235
|
+
id: this.segmentIdStr(),
|
|
236
|
+
path: name,
|
|
237
|
+
size: 0,
|
|
238
|
+
first_ts: null,
|
|
239
|
+
last_ts: null,
|
|
240
|
+
});
|
|
241
|
+
}
|
|
242
|
+
|
|
243
|
+
protected needsRotation(): boolean {
|
|
244
|
+
return (
|
|
245
|
+
this.currentOffset >= this.sizeThreshold ||
|
|
246
|
+
Date.now() - this.openedAt >= this.ageThresholdMs
|
|
247
|
+
);
|
|
248
|
+
}
|
|
249
|
+
|
|
250
|
+
private idxPath(): string {
|
|
251
|
+
return this.currentSegmentPath.replace(/\.jsonl$/, '.idx');
|
|
252
|
+
}
|
|
253
|
+
|
|
254
|
+
private flushIdx(): void {
|
|
255
|
+
if (!this.idxEntries.length || !this.currentSegmentPath) return;
|
|
256
|
+
const content = this.idxEntries.map(e => `${e.topic}\t${e.offset}`).join('\n') + '\n';
|
|
257
|
+
const tmpPath = this.idxPath() + '.tmp';
|
|
258
|
+
try {
|
|
259
|
+
fs.writeFileSync(tmpPath, content, 'utf8');
|
|
260
|
+
fs.renameSync(tmpPath, this.idxPath());
|
|
261
|
+
} catch { /* non-fatal: idx loss is recoverable */ }
|
|
262
|
+
}
|
|
263
|
+
|
|
264
|
+
protected rotate(): void {
|
|
265
|
+
// Update the closing segment's final size before moving on.
|
|
266
|
+
const closing = this.segments[this.segments.length - 1];
|
|
267
|
+
if (closing) closing.size = this.currentOffset;
|
|
268
|
+
// Flush idx for the closing segment before releasing the fd.
|
|
269
|
+
this.flushIdx();
|
|
270
|
+
this.idxEntries = [];
|
|
271
|
+
if (this.fd !== null) {
|
|
272
|
+
try { fs.closeSync(this.fd); } catch { /* ignore */ }
|
|
273
|
+
this.fd = null;
|
|
274
|
+
}
|
|
275
|
+
this.fdDeferred = false;
|
|
276
|
+
this.segmentId++;
|
|
277
|
+
this.currentOffset = 0;
|
|
278
|
+
this.openFreshSegment(); // sets fdDeferred = true for the new segment
|
|
279
|
+
// Eager open: rotation only fires inside doAppend (never at startup), so the
|
|
280
|
+
// lazy guarantee (no empty .jsonl at activation) does not apply here.
|
|
281
|
+
try {
|
|
282
|
+
this.fd = fs.openSync(this.currentSegmentPath, 'a');
|
|
283
|
+
this.currentOffset = fs.fstatSync(this.fd).size;
|
|
284
|
+
this.fdDeferred = false;
|
|
285
|
+
} catch {
|
|
286
|
+
this.degraded = true;
|
|
287
|
+
}
|
|
288
|
+
this.writeManifest();
|
|
289
|
+
}
|
|
290
|
+
|
|
291
|
+
segmentIdStr(): string {
|
|
292
|
+
return String(this.segmentId).padStart(4, '0');
|
|
293
|
+
}
|
|
294
|
+
|
|
295
|
+
currentCursor(): string {
|
|
296
|
+
return formatCursor(this.segmentId, this.currentOffset);
|
|
297
|
+
}
|
|
298
|
+
|
|
299
|
+
get isDegraded(): boolean { return this.degraded; }
|
|
300
|
+
|
|
301
|
+
/** Last successfully appended sequence number. Returns 0 before any append. */
|
|
302
|
+
get lastSequence(): number { return Math.max(0, this.sequenceCounter - 1); }
|
|
303
|
+
|
|
304
|
+
append(record: LogRecord): Promise<AppendResult> {
|
|
305
|
+
if (this.degraded || (this.fd === null && !this.fdDeferred)) {
|
|
306
|
+
return Promise.resolve({ cursor: '', sequence: -1 });
|
|
307
|
+
}
|
|
308
|
+
const result = this.appendQueue.then(() => this.doAppend(record));
|
|
309
|
+
this.appendQueue = result.then(() => undefined).catch(() => undefined);
|
|
310
|
+
return result;
|
|
311
|
+
}
|
|
312
|
+
|
|
313
|
+
protected doAppend(record: LogRecord): AppendResult {
|
|
314
|
+
// Lazy open: materialise the segment file on first write.
|
|
315
|
+
if (this.fd === null && !this.degraded && this.fdDeferred) {
|
|
316
|
+
try {
|
|
317
|
+
this.fd = fs.openSync(this.currentSegmentPath, 'a');
|
|
318
|
+
this.currentOffset = fs.fstatSync(this.fd).size;
|
|
319
|
+
this.fdDeferred = false;
|
|
320
|
+
} catch {
|
|
321
|
+
this.degraded = true;
|
|
322
|
+
return { cursor: '', sequence: -1 };
|
|
323
|
+
}
|
|
324
|
+
}
|
|
325
|
+
|
|
326
|
+
if (this.fd === null) return { cursor: '', sequence: -1 };
|
|
327
|
+
|
|
328
|
+
// Rotate BEFORE writing so the record lands in the new segment.
|
|
329
|
+
if (this.needsRotation()) {
|
|
330
|
+
this.rotate();
|
|
331
|
+
}
|
|
332
|
+
|
|
333
|
+
// After rotation, check if we're in degraded mode (rotate can set it).
|
|
334
|
+
if (this.degraded || this.fd === null) return { cursor: '', sequence: -1 };
|
|
335
|
+
|
|
336
|
+
// Stamp sequence and ts_server onto the stored record (immutable enrichment).
|
|
337
|
+
const seq = this.sequenceCounter++;
|
|
338
|
+
const ts = (record.ts_server as string | undefined) ?? new Date().toISOString();
|
|
339
|
+
const enriched: LogRecord = { ...record, ts_server: ts, sequence: seq };
|
|
340
|
+
|
|
341
|
+
const line = JSON.stringify(enriched) + '\n';
|
|
342
|
+
const buf = Buffer.from(line, 'utf8');
|
|
343
|
+
const cursor = formatCursor(this.segmentId, this.currentOffset);
|
|
344
|
+
const lineOffset = this.currentOffset; // byte position of this record's start
|
|
345
|
+
|
|
346
|
+
try {
|
|
347
|
+
fs.writeSync(this.fd, buf);
|
|
348
|
+
} catch (err) {
|
|
349
|
+
this.degraded = true;
|
|
350
|
+
throw err;
|
|
351
|
+
}
|
|
352
|
+
this.currentOffset += buf.length;
|
|
353
|
+
|
|
354
|
+
// Track topic + offset for the per-segment .idx file.
|
|
355
|
+
const idxTopic = typeof enriched.topic === 'string' ? enriched.topic : '';
|
|
356
|
+
this.idxEntries.push({ topic: idxTopic, offset: lineOffset });
|
|
357
|
+
|
|
358
|
+
// Update current segment metadata for manifest accuracy.
|
|
359
|
+
const lastSeg = this.segments[this.segments.length - 1];
|
|
360
|
+
if (lastSeg) {
|
|
361
|
+
if (!lastSeg.first_ts) lastSeg.first_ts = ts;
|
|
362
|
+
lastSeg.last_ts = ts;
|
|
363
|
+
lastSeg.size = this.currentOffset;
|
|
364
|
+
}
|
|
365
|
+
|
|
366
|
+
this.appendCount++;
|
|
367
|
+
if (this.appendCount % MANIFEST_FLUSH_INTERVAL === 0) {
|
|
368
|
+
this.writeManifest();
|
|
369
|
+
}
|
|
370
|
+
|
|
371
|
+
return { cursor, sequence: seq };
|
|
372
|
+
}
|
|
373
|
+
|
|
374
|
+
close(): Promise<void> {
|
|
375
|
+
this.flushIdx();
|
|
376
|
+
this.idxEntries = [];
|
|
377
|
+
this.writeManifest();
|
|
378
|
+
if (this.fd !== null) {
|
|
379
|
+
try { fs.closeSync(this.fd); } catch { /* ignore */ }
|
|
380
|
+
this.fd = null;
|
|
381
|
+
}
|
|
382
|
+
return Promise.resolve();
|
|
383
|
+
}
|
|
384
|
+
|
|
385
|
+
/**
|
|
386
|
+
* Delete segments whose file mtime is older than `retentionDays` days.
|
|
387
|
+
* Closes the open fd if the active segment is among those deleted, then
|
|
388
|
+
* re-opens a fresh (deferred) segment so the writer remains usable.
|
|
389
|
+
* Serialised through the append queue to avoid races with concurrent appends.
|
|
390
|
+
*/
|
|
391
|
+
runRetention(retentionDays: number): Promise<void> {
|
|
392
|
+
const p = this.appendQueue.then(() => this._doRetention(retentionDays));
|
|
393
|
+
this.appendQueue = p.then(() => undefined).catch(() => undefined);
|
|
394
|
+
return p;
|
|
395
|
+
}
|
|
396
|
+
|
|
397
|
+
private _doRetention(retentionDays: number): void {
|
|
398
|
+
if (this.degraded || !this.streamDir) return;
|
|
399
|
+
const cutoffMs = Date.now() - retentionDays * 86_400_000;
|
|
400
|
+
const toDelete: Set<string> = new Set();
|
|
401
|
+
|
|
402
|
+
for (const seg of this.segments) {
|
|
403
|
+
const filePath = path.join(this.streamDir, seg.path);
|
|
404
|
+
let mtimeMs: number;
|
|
405
|
+
try {
|
|
406
|
+
mtimeMs = fs.statSync(filePath).mtimeMs;
|
|
407
|
+
} catch {
|
|
408
|
+
toDelete.add(seg.id);
|
|
409
|
+
continue;
|
|
410
|
+
}
|
|
411
|
+
if (mtimeMs < cutoffMs) {
|
|
412
|
+
// If this is the active segment, close the fd before unlinking.
|
|
413
|
+
if (seg.id === this.segmentIdStr() && this.fd !== null) {
|
|
414
|
+
this.flushIdx();
|
|
415
|
+
this.idxEntries = [];
|
|
416
|
+
try { fs.closeSync(this.fd); } catch { /* ignore */ }
|
|
417
|
+
this.fd = null;
|
|
418
|
+
this.fdDeferred = false;
|
|
419
|
+
}
|
|
420
|
+
try { fs.unlinkSync(filePath); } catch { /* ignore */ }
|
|
421
|
+
const idxFilePath = filePath.replace(/\.jsonl$/, '.idx');
|
|
422
|
+
try { fs.unlinkSync(idxFilePath); } catch { /* ignore */ }
|
|
423
|
+
toDelete.add(seg.id);
|
|
424
|
+
}
|
|
425
|
+
}
|
|
426
|
+
|
|
427
|
+
if (toDelete.size === 0) return;
|
|
428
|
+
|
|
429
|
+
const deletedCurrent = toDelete.has(this.segmentIdStr());
|
|
430
|
+
this.segments = this.segments.filter(s => !toDelete.has(s.id));
|
|
431
|
+
|
|
432
|
+
if (deletedCurrent) {
|
|
433
|
+
// Re-initialise writer with a fresh deferred segment so appends can resume.
|
|
434
|
+
this.segmentId++;
|
|
435
|
+
this.openFreshSegment();
|
|
436
|
+
}
|
|
437
|
+
this.writeManifest();
|
|
438
|
+
}
|
|
439
|
+
|
|
440
|
+
/**
|
|
441
|
+
* Merge all non-current segments smaller than 1 KB into a single segment.
|
|
442
|
+
* If the active segment is also small, it is included in the merge.
|
|
443
|
+
* Preserves event sequence ordering. Writes an .idx file for the merged segment.
|
|
444
|
+
* Serialised through the append queue.
|
|
445
|
+
*/
|
|
446
|
+
compact(): Promise<void> {
|
|
447
|
+
const p = this.appendQueue.then(() => this._doCompact());
|
|
448
|
+
this.appendQueue = p.then(() => undefined).catch(() => undefined);
|
|
449
|
+
return p;
|
|
450
|
+
}
|
|
451
|
+
|
|
452
|
+
private _doCompact(): void {
|
|
453
|
+
if (this.degraded || !this.streamDir) return;
|
|
454
|
+
|
|
455
|
+
const smallSegs = this.segments
|
|
456
|
+
.filter(s => s.size < COMPACT_SIZE_THRESHOLD)
|
|
457
|
+
.sort((a, b) => parseInt(a.id, 10) - parseInt(b.id, 10));
|
|
458
|
+
|
|
459
|
+
if (smallSegs.length < 2) return;
|
|
460
|
+
|
|
461
|
+
const chunks: Buffer[] = [];
|
|
462
|
+
for (const seg of smallSegs) {
|
|
463
|
+
const filePath = path.join(this.streamDir, seg.path);
|
|
464
|
+
try { chunks.push(fs.readFileSync(filePath)); } catch { /* skip missing */ }
|
|
465
|
+
}
|
|
466
|
+
if (chunks.length < 2) return;
|
|
467
|
+
|
|
468
|
+
const merged = Buffer.concat(chunks);
|
|
469
|
+
const firstSeg = smallSegs[0];
|
|
470
|
+
const mergedPath = path.join(this.streamDir, firstSeg.path);
|
|
471
|
+
|
|
472
|
+
// Close active fd if the current segment is part of the merge.
|
|
473
|
+
const currentInMerge = smallSegs.some(s => s.id === this.segmentIdStr());
|
|
474
|
+
if (currentInMerge && this.fd !== null) {
|
|
475
|
+
this.flushIdx();
|
|
476
|
+
this.idxEntries = [];
|
|
477
|
+
try { fs.closeSync(this.fd); } catch { /* ignore */ }
|
|
478
|
+
this.fd = null;
|
|
479
|
+
this.fdDeferred = false;
|
|
480
|
+
}
|
|
481
|
+
|
|
482
|
+
// Write merged content atomically.
|
|
483
|
+
const tmpPath = mergedPath + '.tmp';
|
|
484
|
+
try {
|
|
485
|
+
fs.writeFileSync(tmpPath, merged);
|
|
486
|
+
fs.renameSync(tmpPath, mergedPath);
|
|
487
|
+
} catch {
|
|
488
|
+
return;
|
|
489
|
+
}
|
|
490
|
+
|
|
491
|
+
// Remove source files (all except firstSeg).
|
|
492
|
+
const smallIds = new Set(smallSegs.map(s => s.id));
|
|
493
|
+
for (const seg of smallSegs.slice(1)) {
|
|
494
|
+
const filePath = path.join(this.streamDir, seg.path);
|
|
495
|
+
try { fs.unlinkSync(filePath); } catch { /* ignore */ }
|
|
496
|
+
const idxFilePath = filePath.replace(/\.jsonl$/, '.idx');
|
|
497
|
+
try { fs.unlinkSync(idxFilePath); } catch { /* ignore */ }
|
|
498
|
+
}
|
|
499
|
+
// Remove old idx for firstSeg (will be rebuilt from merged content).
|
|
500
|
+
try { fs.unlinkSync(mergedPath.replace(/\.jsonl$/, '.idx')); } catch { /* ignore */ }
|
|
501
|
+
|
|
502
|
+
// Update manifest.
|
|
503
|
+
const mergedEntry: SegmentEntry = {
|
|
504
|
+
id: firstSeg.id,
|
|
505
|
+
path: firstSeg.path,
|
|
506
|
+
size: merged.length,
|
|
507
|
+
first_ts: firstSeg.first_ts,
|
|
508
|
+
last_ts: smallSegs[smallSegs.length - 1].last_ts,
|
|
509
|
+
};
|
|
510
|
+
this.segments = this.segments
|
|
511
|
+
.filter(s => !smallIds.has(s.id))
|
|
512
|
+
.concat(mergedEntry)
|
|
513
|
+
.sort((a, b) => parseInt(a.id, 10) - parseInt(b.id, 10));
|
|
514
|
+
|
|
515
|
+
// Point the writer at the merged segment.
|
|
516
|
+
this.segmentId = parseInt(firstSeg.id, 10);
|
|
517
|
+
this.currentSegmentPath = mergedPath;
|
|
518
|
+
this.currentOffset = merged.length;
|
|
519
|
+
try {
|
|
520
|
+
this.fd = fs.openSync(mergedPath, 'a');
|
|
521
|
+
this.fdDeferred = false;
|
|
522
|
+
} catch {
|
|
523
|
+
this.degraded = true;
|
|
524
|
+
}
|
|
525
|
+
|
|
526
|
+
// Rebuild .idx from merged content.
|
|
527
|
+
this._rebuildIdxForPath(mergedPath);
|
|
528
|
+
|
|
529
|
+
this.writeManifest();
|
|
530
|
+
}
|
|
531
|
+
|
|
532
|
+
private _rebuildIdxForPath(segPath: string): void {
|
|
533
|
+
try {
|
|
534
|
+
const buf = fs.readFileSync(segPath);
|
|
535
|
+
const lines = buf.toString('utf8').split('\n');
|
|
536
|
+
let byteOffset = 0;
|
|
537
|
+
const entries: string[] = [];
|
|
538
|
+
for (const line of lines) {
|
|
539
|
+
if (line.trim()) {
|
|
540
|
+
try {
|
|
541
|
+
const rec = JSON.parse(line) as LogRecord;
|
|
542
|
+
const topic = typeof rec.topic === 'string' ? rec.topic : '';
|
|
543
|
+
entries.push(`${topic}\t${byteOffset}`);
|
|
544
|
+
} catch { /* skip malformed */ }
|
|
545
|
+
}
|
|
546
|
+
byteOffset += Buffer.byteLength(line, 'utf8') + 1; // +1 for \n
|
|
547
|
+
}
|
|
548
|
+
if (entries.length === 0) return;
|
|
549
|
+
const idxContent = entries.join('\n') + '\n';
|
|
550
|
+
const idxFilePath = segPath.replace(/\.jsonl$/, '.idx');
|
|
551
|
+
const tmpPath = idxFilePath + '.tmp';
|
|
552
|
+
fs.writeFileSync(tmpPath, idxContent, 'utf8');
|
|
553
|
+
fs.renameSync(tmpPath, idxFilePath);
|
|
554
|
+
} catch { /* non-fatal */ }
|
|
555
|
+
}
|
|
556
|
+
}
|
|
557
|
+
|
|
558
|
+
export class EventLogReader {
|
|
559
|
+
private readonly streamDir: string;
|
|
560
|
+
|
|
561
|
+
constructor(workspaceRoot: string) {
|
|
562
|
+
this.streamDir = path.join(workspaceRoot, '.claws', 'events', 'default');
|
|
563
|
+
}
|
|
564
|
+
|
|
565
|
+
async *scanFrom(cursor: string, topicPattern: string): AsyncIterable<LogRecord> {
|
|
566
|
+
const parsed = parseCursor(cursor);
|
|
567
|
+
if (!parsed) return;
|
|
568
|
+
const { segmentId, offset } = parsed;
|
|
569
|
+
const segments = this.listSegments();
|
|
570
|
+
const relevant = segments
|
|
571
|
+
.filter(s => s.id >= segmentId)
|
|
572
|
+
.sort((a, b) => a.id - b.id);
|
|
573
|
+
for (const seg of relevant) {
|
|
574
|
+
const startOffset = seg.id === segmentId ? offset : 0;
|
|
575
|
+
yield* this.readSegmentFrom(seg.filePath, startOffset, topicPattern);
|
|
576
|
+
}
|
|
577
|
+
}
|
|
578
|
+
|
|
579
|
+
private listSegments(): Array<{ id: number; filePath: string }> {
|
|
580
|
+
try {
|
|
581
|
+
const raw = fs.readFileSync(path.join(this.streamDir, 'manifest.json'), 'utf8');
|
|
582
|
+
const m = JSON.parse(raw) as { segments?: Array<{ id: string; path: string }> };
|
|
583
|
+
if (Array.isArray(m.segments)) {
|
|
584
|
+
return m.segments
|
|
585
|
+
.map(s => ({ id: parseInt(s.id, 10), filePath: path.join(this.streamDir, s.path) }))
|
|
586
|
+
.filter(s => !isNaN(s.id));
|
|
587
|
+
}
|
|
588
|
+
} catch { /* fall through */ }
|
|
589
|
+
try {
|
|
590
|
+
return fs.readdirSync(this.streamDir)
|
|
591
|
+
.filter(n => /^\d{4}-.*\.jsonl$/.test(n))
|
|
592
|
+
.map(n => ({ id: parseInt(n.slice(0, 4), 10), filePath: path.join(this.streamDir, n) }))
|
|
593
|
+
.filter(s => !isNaN(s.id));
|
|
594
|
+
} catch {
|
|
595
|
+
return [];
|
|
596
|
+
}
|
|
597
|
+
}
|
|
598
|
+
|
|
599
|
+
private async *readSegmentFrom(
|
|
600
|
+
filePath: string,
|
|
601
|
+
startOffset: number,
|
|
602
|
+
topicPattern: string,
|
|
603
|
+
): AsyncGenerator<LogRecord> {
|
|
604
|
+
let data: Buffer;
|
|
605
|
+
try {
|
|
606
|
+
const stat = fs.statSync(filePath);
|
|
607
|
+
const size = stat.size - startOffset;
|
|
608
|
+
if (size <= 0) return;
|
|
609
|
+
const fd = fs.openSync(filePath, 'r');
|
|
610
|
+
try {
|
|
611
|
+
data = Buffer.alloc(size);
|
|
612
|
+
fs.readSync(fd, data, 0, size, startOffset);
|
|
613
|
+
} finally {
|
|
614
|
+
fs.closeSync(fd);
|
|
615
|
+
}
|
|
616
|
+
} catch {
|
|
617
|
+
return;
|
|
618
|
+
}
|
|
619
|
+
for (const line of data.toString('utf8').split('\n')) {
|
|
620
|
+
if (!line.trim()) continue;
|
|
621
|
+
try {
|
|
622
|
+
const record = JSON.parse(line) as LogRecord;
|
|
623
|
+
if (typeof record.topic === 'string' && matchTopic(record.topic, topicPattern)) {
|
|
624
|
+
yield record;
|
|
625
|
+
}
|
|
626
|
+
} catch { /* skip malformed lines */ }
|
|
627
|
+
}
|
|
628
|
+
}
|
|
629
|
+
}
|