@loreai/core 0.17.1 → 0.19.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/bun/agents-file.d.ts +4 -0
- package/dist/bun/agents-file.d.ts.map +1 -1
- package/dist/bun/config.d.ts +2 -0
- package/dist/bun/config.d.ts.map +1 -1
- package/dist/bun/curator.d.ts +45 -0
- package/dist/bun/curator.d.ts.map +1 -1
- package/dist/bun/data-dir.d.ts +18 -0
- package/dist/bun/data-dir.d.ts.map +1 -0
- package/dist/bun/db.d.ts +85 -0
- package/dist/bun/db.d.ts.map +1 -1
- package/dist/bun/distillation.d.ts +2 -13
- package/dist/bun/distillation.d.ts.map +1 -1
- package/dist/bun/embedding-vendor.d.ts +22 -38
- package/dist/bun/embedding-vendor.d.ts.map +1 -1
- package/dist/bun/embedding-worker-types.d.ts +17 -12
- package/dist/bun/embedding-worker-types.d.ts.map +1 -1
- package/dist/bun/embedding-worker.d.ts +9 -2
- package/dist/bun/embedding-worker.d.ts.map +1 -1
- package/dist/bun/embedding-worker.js +38864 -33
- package/dist/bun/embedding-worker.js.map +4 -4
- package/dist/bun/embedding.d.ts +35 -23
- package/dist/bun/embedding.d.ts.map +1 -1
- package/dist/bun/gradient.d.ts +17 -1
- package/dist/bun/gradient.d.ts.map +1 -1
- package/dist/bun/import/detect.d.ts +14 -0
- package/dist/bun/import/detect.d.ts.map +1 -0
- package/dist/bun/import/extract.d.ts +43 -0
- package/dist/bun/import/extract.d.ts.map +1 -0
- package/dist/bun/import/history.d.ts +40 -0
- package/dist/bun/import/history.d.ts.map +1 -0
- package/dist/bun/import/index.d.ts +17 -0
- package/dist/bun/import/index.d.ts.map +1 -0
- package/dist/bun/import/providers/aider.d.ts +2 -0
- package/dist/bun/import/providers/aider.d.ts.map +1 -0
- package/dist/bun/import/providers/claude-code.d.ts +2 -0
- package/dist/bun/import/providers/claude-code.d.ts.map +1 -0
- package/dist/bun/import/providers/cline.d.ts +2 -0
- package/dist/bun/import/providers/cline.d.ts.map +1 -0
- package/dist/bun/import/providers/codex.d.ts +2 -0
- package/dist/bun/import/providers/codex.d.ts.map +1 -0
- package/dist/bun/import/providers/continue.d.ts +2 -0
- package/dist/bun/import/providers/continue.d.ts.map +1 -0
- package/dist/bun/import/providers/index.d.ts +19 -0
- package/dist/bun/import/providers/index.d.ts.map +1 -0
- package/dist/bun/import/providers/opencode.d.ts +2 -0
- package/dist/bun/import/providers/opencode.d.ts.map +1 -0
- package/dist/bun/import/providers/pi.d.ts +2 -0
- package/dist/bun/import/providers/pi.d.ts.map +1 -0
- package/dist/bun/import/types.d.ts +82 -0
- package/dist/bun/import/types.d.ts.map +1 -0
- package/dist/bun/index.d.ts +5 -2
- package/dist/bun/index.d.ts.map +1 -1
- package/dist/bun/index.js +3150 -439
- package/dist/bun/index.js.map +4 -4
- package/dist/bun/instruction-detect.d.ts +66 -0
- package/dist/bun/instruction-detect.d.ts.map +1 -0
- package/dist/bun/log.d.ts +9 -0
- package/dist/bun/log.d.ts.map +1 -1
- package/dist/bun/ltm.d.ts +139 -5
- package/dist/bun/ltm.d.ts.map +1 -1
- package/dist/bun/pattern-extract.d.ts +7 -0
- package/dist/bun/pattern-extract.d.ts.map +1 -1
- package/dist/bun/prompt.d.ts +1 -1
- package/dist/bun/prompt.d.ts.map +1 -1
- package/dist/bun/recall.d.ts.map +1 -1
- package/dist/bun/search.d.ts +5 -3
- package/dist/bun/search.d.ts.map +1 -1
- package/dist/bun/session-limiter.d.ts +26 -0
- package/dist/bun/session-limiter.d.ts.map +1 -0
- package/dist/bun/temporal.d.ts +2 -0
- package/dist/bun/temporal.d.ts.map +1 -1
- package/dist/bun/types.d.ts +1 -1
- package/dist/node/agents-file.d.ts +4 -0
- package/dist/node/agents-file.d.ts.map +1 -1
- package/dist/node/config.d.ts +2 -0
- package/dist/node/config.d.ts.map +1 -1
- package/dist/node/curator.d.ts +45 -0
- package/dist/node/curator.d.ts.map +1 -1
- package/dist/node/data-dir.d.ts +18 -0
- package/dist/node/data-dir.d.ts.map +1 -0
- package/dist/node/db.d.ts +85 -0
- package/dist/node/db.d.ts.map +1 -1
- package/dist/node/distillation.d.ts +2 -13
- package/dist/node/distillation.d.ts.map +1 -1
- package/dist/node/embedding-vendor.d.ts +22 -38
- package/dist/node/embedding-vendor.d.ts.map +1 -1
- package/dist/node/embedding-worker-types.d.ts +17 -12
- package/dist/node/embedding-worker-types.d.ts.map +1 -1
- package/dist/node/embedding-worker.d.ts +9 -2
- package/dist/node/embedding-worker.d.ts.map +1 -1
- package/dist/node/embedding-worker.js +38864 -33
- package/dist/node/embedding-worker.js.map +4 -4
- package/dist/node/embedding.d.ts +35 -23
- package/dist/node/embedding.d.ts.map +1 -1
- package/dist/node/gradient.d.ts +17 -1
- package/dist/node/gradient.d.ts.map +1 -1
- package/dist/node/import/detect.d.ts +14 -0
- package/dist/node/import/detect.d.ts.map +1 -0
- package/dist/node/import/extract.d.ts +43 -0
- package/dist/node/import/extract.d.ts.map +1 -0
- package/dist/node/import/history.d.ts +40 -0
- package/dist/node/import/history.d.ts.map +1 -0
- package/dist/node/import/index.d.ts +17 -0
- package/dist/node/import/index.d.ts.map +1 -0
- package/dist/node/import/providers/aider.d.ts +2 -0
- package/dist/node/import/providers/aider.d.ts.map +1 -0
- package/dist/node/import/providers/claude-code.d.ts +2 -0
- package/dist/node/import/providers/claude-code.d.ts.map +1 -0
- package/dist/node/import/providers/cline.d.ts +2 -0
- package/dist/node/import/providers/cline.d.ts.map +1 -0
- package/dist/node/import/providers/codex.d.ts +2 -0
- package/dist/node/import/providers/codex.d.ts.map +1 -0
- package/dist/node/import/providers/continue.d.ts +2 -0
- package/dist/node/import/providers/continue.d.ts.map +1 -0
- package/dist/node/import/providers/index.d.ts +19 -0
- package/dist/node/import/providers/index.d.ts.map +1 -0
- package/dist/node/import/providers/opencode.d.ts +2 -0
- package/dist/node/import/providers/opencode.d.ts.map +1 -0
- package/dist/node/import/providers/pi.d.ts +2 -0
- package/dist/node/import/providers/pi.d.ts.map +1 -0
- package/dist/node/import/types.d.ts +82 -0
- package/dist/node/import/types.d.ts.map +1 -0
- package/dist/node/index.d.ts +5 -2
- package/dist/node/index.d.ts.map +1 -1
- package/dist/node/index.js +3150 -439
- package/dist/node/index.js.map +4 -4
- package/dist/node/instruction-detect.d.ts +66 -0
- package/dist/node/instruction-detect.d.ts.map +1 -0
- package/dist/node/log.d.ts +9 -0
- package/dist/node/log.d.ts.map +1 -1
- package/dist/node/ltm.d.ts +139 -5
- package/dist/node/ltm.d.ts.map +1 -1
- package/dist/node/pattern-extract.d.ts +7 -0
- package/dist/node/pattern-extract.d.ts.map +1 -1
- package/dist/node/prompt.d.ts +1 -1
- package/dist/node/prompt.d.ts.map +1 -1
- package/dist/node/recall.d.ts.map +1 -1
- package/dist/node/search.d.ts +5 -3
- package/dist/node/search.d.ts.map +1 -1
- package/dist/node/session-limiter.d.ts +26 -0
- package/dist/node/session-limiter.d.ts.map +1 -0
- package/dist/node/temporal.d.ts +2 -0
- package/dist/node/temporal.d.ts.map +1 -1
- package/dist/node/types.d.ts +1 -1
- package/dist/types/agents-file.d.ts +4 -0
- package/dist/types/agents-file.d.ts.map +1 -1
- package/dist/types/config.d.ts +2 -0
- package/dist/types/config.d.ts.map +1 -1
- package/dist/types/curator.d.ts +45 -0
- package/dist/types/curator.d.ts.map +1 -1
- package/dist/types/data-dir.d.ts +18 -0
- package/dist/types/data-dir.d.ts.map +1 -0
- package/dist/types/db.d.ts +85 -0
- package/dist/types/db.d.ts.map +1 -1
- package/dist/types/distillation.d.ts +2 -13
- package/dist/types/distillation.d.ts.map +1 -1
- package/dist/types/embedding-vendor.d.ts +22 -38
- package/dist/types/embedding-vendor.d.ts.map +1 -1
- package/dist/types/embedding-worker-types.d.ts +17 -12
- package/dist/types/embedding-worker-types.d.ts.map +1 -1
- package/dist/types/embedding-worker.d.ts +9 -2
- package/dist/types/embedding-worker.d.ts.map +1 -1
- package/dist/types/embedding.d.ts +35 -23
- package/dist/types/embedding.d.ts.map +1 -1
- package/dist/types/gradient.d.ts +17 -1
- package/dist/types/gradient.d.ts.map +1 -1
- package/dist/types/import/detect.d.ts +14 -0
- package/dist/types/import/detect.d.ts.map +1 -0
- package/dist/types/import/extract.d.ts +43 -0
- package/dist/types/import/extract.d.ts.map +1 -0
- package/dist/types/import/history.d.ts +40 -0
- package/dist/types/import/history.d.ts.map +1 -0
- package/dist/types/import/index.d.ts +17 -0
- package/dist/types/import/index.d.ts.map +1 -0
- package/dist/types/import/providers/aider.d.ts +2 -0
- package/dist/types/import/providers/aider.d.ts.map +1 -0
- package/dist/types/import/providers/claude-code.d.ts +2 -0
- package/dist/types/import/providers/claude-code.d.ts.map +1 -0
- package/dist/types/import/providers/cline.d.ts +2 -0
- package/dist/types/import/providers/cline.d.ts.map +1 -0
- package/dist/types/import/providers/codex.d.ts +2 -0
- package/dist/types/import/providers/codex.d.ts.map +1 -0
- package/dist/types/import/providers/continue.d.ts +2 -0
- package/dist/types/import/providers/continue.d.ts.map +1 -0
- package/dist/types/import/providers/index.d.ts +19 -0
- package/dist/types/import/providers/index.d.ts.map +1 -0
- package/dist/types/import/providers/opencode.d.ts +2 -0
- package/dist/types/import/providers/opencode.d.ts.map +1 -0
- package/dist/types/import/providers/pi.d.ts +2 -0
- package/dist/types/import/providers/pi.d.ts.map +1 -0
- package/dist/types/import/types.d.ts +82 -0
- package/dist/types/import/types.d.ts.map +1 -0
- package/dist/types/index.d.ts +5 -2
- package/dist/types/index.d.ts.map +1 -1
- package/dist/types/instruction-detect.d.ts +66 -0
- package/dist/types/instruction-detect.d.ts.map +1 -0
- package/dist/types/log.d.ts +9 -0
- package/dist/types/log.d.ts.map +1 -1
- package/dist/types/ltm.d.ts +139 -5
- package/dist/types/ltm.d.ts.map +1 -1
- package/dist/types/pattern-extract.d.ts +7 -0
- package/dist/types/pattern-extract.d.ts.map +1 -1
- package/dist/types/prompt.d.ts +1 -1
- package/dist/types/prompt.d.ts.map +1 -1
- package/dist/types/recall.d.ts.map +1 -1
- package/dist/types/search.d.ts +5 -3
- package/dist/types/search.d.ts.map +1 -1
- package/dist/types/session-limiter.d.ts +26 -0
- package/dist/types/session-limiter.d.ts.map +1 -0
- package/dist/types/temporal.d.ts +2 -0
- package/dist/types/temporal.d.ts.map +1 -1
- package/dist/types/types.d.ts +1 -1
- package/package.json +3 -4
- package/src/agents-file.ts +41 -13
- package/src/config.ts +31 -18
- package/src/curator.ts +163 -75
- package/src/data-dir.ts +76 -0
- package/src/db.ts +457 -11
- package/src/distillation.ts +65 -16
- package/src/embedding-vendor.ts +23 -40
- package/src/embedding-worker-types.ts +19 -11
- package/src/embedding-worker.ts +111 -47
- package/src/embedding.ts +224 -174
- package/src/gradient.ts +192 -75
- package/src/import/detect.ts +37 -0
- package/src/import/extract.ts +137 -0
- package/src/import/history.ts +99 -0
- package/src/import/index.ts +45 -0
- package/src/import/providers/aider.ts +207 -0
- package/src/import/providers/claude-code.ts +339 -0
- package/src/import/providers/cline.ts +324 -0
- package/src/import/providers/codex.ts +369 -0
- package/src/import/providers/continue.ts +304 -0
- package/src/import/providers/index.ts +32 -0
- package/src/import/providers/opencode.ts +272 -0
- package/src/import/providers/pi.ts +332 -0
- package/src/import/types.ts +91 -0
- package/src/index.ts +13 -0
- package/src/instruction-detect.ts +275 -0
- package/src/log.ts +91 -3
- package/src/ltm.ts +789 -41
- package/src/pattern-extract.ts +41 -0
- package/src/prompt.ts +7 -1
- package/src/recall.ts +43 -5
- package/src/search.ts +7 -5
- package/src/session-limiter.ts +47 -0
- package/src/temporal.ts +18 -6
- package/src/types.ts +1 -1
|
@@ -0,0 +1,324 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Cline (VS Code extension) conversation history provider.
|
|
3
|
+
*
|
|
4
|
+
* Reads JSON task files from VS Code's globalStorage for the Cline extension:
|
|
5
|
+
* ~/.vscode/data/User/globalStorage/saoudrizwan.claude-dev/tasks/<taskId>/
|
|
6
|
+
*
|
|
7
|
+
* Each task directory contains:
|
|
8
|
+
* - api_conversation_history.json — Anthropic MessageParam[] format
|
|
9
|
+
* - task_metadata.json — optional metadata
|
|
10
|
+
*
|
|
11
|
+
* The task history index at:
|
|
12
|
+
* globalStorage/saoudrizwan.claude-dev/state/taskHistory.json
|
|
13
|
+
* maps tasks to their CWD (cwdOnTaskInitialization).
|
|
14
|
+
*/
|
|
15
|
+
import { readdirSync, readFileSync, existsSync, statSync } from "fs";
|
|
16
|
+
import { join } from "path";
|
|
17
|
+
import { homedir } from "os";
|
|
18
|
+
import type { AgentHistoryProvider, ConversationChunk, DetectedSession } from "../types";
|
|
19
|
+
import { registerProvider } from "./index";
|
|
20
|
+
|
|
21
|
+
// ---------------------------------------------------------------------------
|
|
22
|
+
// Constants
|
|
23
|
+
// ---------------------------------------------------------------------------
|
|
24
|
+
|
|
25
|
+
const MAX_TOOL_OUTPUT_CHARS = 500;
|
|
26
|
+
const DEFAULT_MAX_TOKENS = 12288;
|
|
27
|
+
|
|
28
|
+
// Extension IDs — Cline has been published under multiple IDs
|
|
29
|
+
const EXTENSION_IDS = [
|
|
30
|
+
"saoudrizwan.claude-dev",
|
|
31
|
+
"cline.cline",
|
|
32
|
+
];
|
|
33
|
+
|
|
34
|
+
// ---------------------------------------------------------------------------
|
|
35
|
+
// Types (Cline's Anthropic-compatible format)
|
|
36
|
+
// ---------------------------------------------------------------------------
|
|
37
|
+
|
|
38
|
+
type ContentBlock =
|
|
39
|
+
| { type: "text"; text: string }
|
|
40
|
+
| { type: "tool_use"; id: string; name: string; input: Record<string, unknown> }
|
|
41
|
+
| { type: "tool_result"; tool_use_id: string; content: string | ContentBlock[] }
|
|
42
|
+
| { type: "image"; source?: unknown }
|
|
43
|
+
| { type: string };
|
|
44
|
+
|
|
45
|
+
type ClineMessage = {
|
|
46
|
+
role: "user" | "assistant";
|
|
47
|
+
content: string | ContentBlock[];
|
|
48
|
+
};
|
|
49
|
+
|
|
50
|
+
type TaskHistoryItem = {
|
|
51
|
+
id: string;
|
|
52
|
+
ts: number;
|
|
53
|
+
task: string;
|
|
54
|
+
tokensIn?: number;
|
|
55
|
+
tokensOut?: number;
|
|
56
|
+
totalCost?: number;
|
|
57
|
+
cwdOnTaskInitialization?: string;
|
|
58
|
+
modelId?: string;
|
|
59
|
+
};
|
|
60
|
+
|
|
61
|
+
// ---------------------------------------------------------------------------
|
|
62
|
+
// Helpers
|
|
63
|
+
// ---------------------------------------------------------------------------
|
|
64
|
+
|
|
65
|
+
function estimateTokens(text: string): number {
|
|
66
|
+
return Math.ceil(text.length / 3);
|
|
67
|
+
}
|
|
68
|
+
|
|
69
|
+
function truncate(text: string, max: number): string {
|
|
70
|
+
if (text.length <= max) return text;
|
|
71
|
+
return text.slice(0, max) + "...";
|
|
72
|
+
}
|
|
73
|
+
|
|
74
|
+
/**
|
|
75
|
+
* Find the VS Code globalStorage directories to search.
|
|
76
|
+
* Checks multiple VS Code variants (stable, insiders, OSS) and extension IDs.
|
|
77
|
+
*/
|
|
78
|
+
function findGlobalStorageDirs(): string[] {
|
|
79
|
+
const home = homedir();
|
|
80
|
+
const dirs: string[] = [];
|
|
81
|
+
|
|
82
|
+
// VS Code storage paths by platform
|
|
83
|
+
const basePaths: string[] = [];
|
|
84
|
+
const platform = process.platform;
|
|
85
|
+
|
|
86
|
+
if (platform === "darwin") {
|
|
87
|
+
basePaths.push(
|
|
88
|
+
join(home, "Library", "Application Support", "Code", "User", "globalStorage"),
|
|
89
|
+
join(home, "Library", "Application Support", "Code - Insiders", "User", "globalStorage"),
|
|
90
|
+
join(home, "Library", "Application Support", "VSCodium", "User", "globalStorage"),
|
|
91
|
+
);
|
|
92
|
+
} else if (platform === "win32") {
|
|
93
|
+
const appdata = process.env.APPDATA || join(home, "AppData", "Roaming");
|
|
94
|
+
basePaths.push(
|
|
95
|
+
join(appdata, "Code", "User", "globalStorage"),
|
|
96
|
+
join(appdata, "Code - Insiders", "User", "globalStorage"),
|
|
97
|
+
join(appdata, "VSCodium", "User", "globalStorage"),
|
|
98
|
+
);
|
|
99
|
+
} else {
|
|
100
|
+
// Linux
|
|
101
|
+
const configHome = process.env.XDG_CONFIG_HOME || join(home, ".config");
|
|
102
|
+
basePaths.push(
|
|
103
|
+
join(configHome, "Code", "User", "globalStorage"),
|
|
104
|
+
join(configHome, "Code - Insiders", "User", "globalStorage"),
|
|
105
|
+
join(configHome, "VSCodium", "User", "globalStorage"),
|
|
106
|
+
);
|
|
107
|
+
// Also check the older data path
|
|
108
|
+
basePaths.push(
|
|
109
|
+
join(home, ".vscode", "data", "User", "globalStorage"),
|
|
110
|
+
join(home, ".vscode-insiders", "data", "User", "globalStorage"),
|
|
111
|
+
);
|
|
112
|
+
}
|
|
113
|
+
|
|
114
|
+
for (const base of basePaths) {
|
|
115
|
+
for (const extId of EXTENSION_IDS) {
|
|
116
|
+
const dir = join(base, extId);
|
|
117
|
+
if (existsSync(dir)) dirs.push(dir);
|
|
118
|
+
}
|
|
119
|
+
}
|
|
120
|
+
|
|
121
|
+
return dirs;
|
|
122
|
+
}
|
|
123
|
+
|
|
124
|
+
/** Load the task history index and filter by project CWD. */
|
|
125
|
+
function loadTaskHistory(
|
|
126
|
+
storageDir: string,
|
|
127
|
+
projectPath: string,
|
|
128
|
+
): TaskHistoryItem[] {
|
|
129
|
+
// Try both known locations for the history file
|
|
130
|
+
const paths = [
|
|
131
|
+
join(storageDir, "state", "taskHistory.json"),
|
|
132
|
+
join(storageDir, "taskHistory.json"),
|
|
133
|
+
];
|
|
134
|
+
|
|
135
|
+
for (const historyPath of paths) {
|
|
136
|
+
if (!existsSync(historyPath)) continue;
|
|
137
|
+
|
|
138
|
+
try {
|
|
139
|
+
const raw = readFileSync(historyPath, "utf-8");
|
|
140
|
+
const items = JSON.parse(raw) as TaskHistoryItem[];
|
|
141
|
+
if (!Array.isArray(items)) continue;
|
|
142
|
+
|
|
143
|
+
return items.filter(
|
|
144
|
+
(item) => item.cwdOnTaskInitialization === projectPath,
|
|
145
|
+
);
|
|
146
|
+
} catch {
|
|
147
|
+
continue;
|
|
148
|
+
}
|
|
149
|
+
}
|
|
150
|
+
|
|
151
|
+
return [];
|
|
152
|
+
}
|
|
153
|
+
|
|
154
|
+
/** Read the API conversation history for a task. */
|
|
155
|
+
function readConversation(taskDir: string): ClineMessage[] {
|
|
156
|
+
const filePath = join(taskDir, "api_conversation_history.json");
|
|
157
|
+
if (!existsSync(filePath)) return [];
|
|
158
|
+
|
|
159
|
+
try {
|
|
160
|
+
const raw = readFileSync(filePath, "utf-8");
|
|
161
|
+
const messages = JSON.parse(raw) as ClineMessage[];
|
|
162
|
+
return Array.isArray(messages) ? messages : [];
|
|
163
|
+
} catch {
|
|
164
|
+
return [];
|
|
165
|
+
}
|
|
166
|
+
}
|
|
167
|
+
|
|
168
|
+
/** Convert a content block to text. */
|
|
169
|
+
function blockToText(block: ContentBlock): string | null {
|
|
170
|
+
switch (block.type) {
|
|
171
|
+
case "text":
|
|
172
|
+
return (block as { type: "text"; text: string }).text;
|
|
173
|
+
case "tool_use": {
|
|
174
|
+
const tu = block as { type: "tool_use"; name: string; input: Record<string, unknown> };
|
|
175
|
+
return `[tool: ${tu.name}] ${truncate(JSON.stringify(tu.input), MAX_TOOL_OUTPUT_CHARS)}`;
|
|
176
|
+
}
|
|
177
|
+
case "tool_result": {
|
|
178
|
+
const tr = block as { type: "tool_result"; content: string | ContentBlock[] };
|
|
179
|
+
let content: string;
|
|
180
|
+
if (typeof tr.content === "string") {
|
|
181
|
+
content = tr.content;
|
|
182
|
+
} else if (Array.isArray(tr.content)) {
|
|
183
|
+
content = tr.content
|
|
184
|
+
.filter((b): b is { type: "text"; text: string } => b.type === "text")
|
|
185
|
+
.map((b) => b.text)
|
|
186
|
+
.join("\n");
|
|
187
|
+
} else {
|
|
188
|
+
content = "";
|
|
189
|
+
}
|
|
190
|
+
return content ? `[tool_result] ${truncate(content, MAX_TOOL_OUTPUT_CHARS)}` : null;
|
|
191
|
+
}
|
|
192
|
+
default:
|
|
193
|
+
return null;
|
|
194
|
+
}
|
|
195
|
+
}
|
|
196
|
+
|
|
197
|
+
/** Convert a ClineMessage to text. */
|
|
198
|
+
function messageToText(msg: ClineMessage): string | null {
|
|
199
|
+
if (typeof msg.content === "string") {
|
|
200
|
+
return msg.content ? `[${msg.role}] ${msg.content}` : null;
|
|
201
|
+
}
|
|
202
|
+
|
|
203
|
+
const parts = (msg.content as ContentBlock[])
|
|
204
|
+
.map(blockToText)
|
|
205
|
+
.filter(Boolean) as string[];
|
|
206
|
+
return parts.length > 0 ? `[${msg.role}] ${parts.join("\n")}` : null;
|
|
207
|
+
}
|
|
208
|
+
|
|
209
|
+
// ---------------------------------------------------------------------------
|
|
210
|
+
// Provider implementation
|
|
211
|
+
// ---------------------------------------------------------------------------
|
|
212
|
+
|
|
213
|
+
const clineProvider: AgentHistoryProvider = {
|
|
214
|
+
name: "cline",
|
|
215
|
+
displayName: "Cline",
|
|
216
|
+
|
|
217
|
+
detect(projectPath: string): DetectedSession[] {
|
|
218
|
+
const sessions: DetectedSession[] = [];
|
|
219
|
+
const storageDirs = findGlobalStorageDirs();
|
|
220
|
+
|
|
221
|
+
for (const storageDir of storageDirs) {
|
|
222
|
+
const tasks = loadTaskHistory(storageDir, projectPath);
|
|
223
|
+
|
|
224
|
+
for (const task of tasks) {
|
|
225
|
+
const taskDir = join(storageDir, "tasks", task.id);
|
|
226
|
+
if (!existsSync(taskDir)) continue;
|
|
227
|
+
|
|
228
|
+
// Quick count of messages
|
|
229
|
+
const messages = readConversation(taskDir);
|
|
230
|
+
if (messages.length < 3) continue;
|
|
231
|
+
|
|
232
|
+
const dateStr = new Date(task.ts).toISOString().slice(0, 10);
|
|
233
|
+
const label = task.task
|
|
234
|
+
? `${dateStr} - ${truncate(task.task, 60)} (${messages.length} messages)`
|
|
235
|
+
: `${dateStr} (${messages.length} messages)`;
|
|
236
|
+
|
|
237
|
+
// Estimate tokens from file size
|
|
238
|
+
const historyFile = join(taskDir, "api_conversation_history.json");
|
|
239
|
+
let estimatedTokens = messages.length * 500;
|
|
240
|
+
try {
|
|
241
|
+
const stat = statSync(historyFile);
|
|
242
|
+
estimatedTokens = Math.ceil(stat.size / 5);
|
|
243
|
+
} catch {
|
|
244
|
+
// Use the message-count-based estimate
|
|
245
|
+
}
|
|
246
|
+
|
|
247
|
+
sessions.push({
|
|
248
|
+
id: taskDir,
|
|
249
|
+
label,
|
|
250
|
+
startedAt: task.ts,
|
|
251
|
+
lastActivityAt: task.ts,
|
|
252
|
+
estimatedTokens,
|
|
253
|
+
messageCount: messages.length,
|
|
254
|
+
});
|
|
255
|
+
}
|
|
256
|
+
}
|
|
257
|
+
|
|
258
|
+
return sessions.sort((a, b) => b.lastActivityAt - a.lastActivityAt);
|
|
259
|
+
},
|
|
260
|
+
|
|
261
|
+
readChunks(
|
|
262
|
+
_projectPath: string,
|
|
263
|
+
sessionIds: string[],
|
|
264
|
+
maxTokens: number = DEFAULT_MAX_TOKENS,
|
|
265
|
+
): ConversationChunk[] {
|
|
266
|
+
const chunks: ConversationChunk[] = [];
|
|
267
|
+
|
|
268
|
+
for (const taskDir of sessionIds) {
|
|
269
|
+
const messages = readConversation(taskDir);
|
|
270
|
+
if (messages.length === 0) continue;
|
|
271
|
+
|
|
272
|
+
// Get timestamp from directory stat
|
|
273
|
+
let sessionTimestamp: number;
|
|
274
|
+
try {
|
|
275
|
+
sessionTimestamp = statSync(taskDir).mtimeMs;
|
|
276
|
+
} catch {
|
|
277
|
+
sessionTimestamp = Date.now();
|
|
278
|
+
}
|
|
279
|
+
|
|
280
|
+
const textMessages: { text: string }[] = [];
|
|
281
|
+
for (const msg of messages) {
|
|
282
|
+
const text = messageToText(msg);
|
|
283
|
+
if (text) textMessages.push({ text });
|
|
284
|
+
}
|
|
285
|
+
|
|
286
|
+
if (textMessages.length === 0) continue;
|
|
287
|
+
|
|
288
|
+
// Build chunks respecting maxTokens boundaries
|
|
289
|
+
let currentTexts: string[] = [];
|
|
290
|
+
let currentTokens = 0;
|
|
291
|
+
let chunkIndex = 0;
|
|
292
|
+
|
|
293
|
+
const flushChunk = () => {
|
|
294
|
+
if (currentTexts.length === 0) return;
|
|
295
|
+
chunkIndex++;
|
|
296
|
+
const text = currentTexts.join("\n\n");
|
|
297
|
+
chunks.push({
|
|
298
|
+
label: `Cline ${new Date(sessionTimestamp).toISOString().slice(0, 10)} (${chunkIndex})`,
|
|
299
|
+
text,
|
|
300
|
+
estimatedTokens: estimateTokens(text),
|
|
301
|
+
timestamp: sessionTimestamp,
|
|
302
|
+
});
|
|
303
|
+
currentTexts = [];
|
|
304
|
+
currentTokens = 0;
|
|
305
|
+
};
|
|
306
|
+
|
|
307
|
+
for (const msg of textMessages) {
|
|
308
|
+
const msgTokens = estimateTokens(msg.text);
|
|
309
|
+
if (currentTokens > 0 && currentTokens + msgTokens > maxTokens) {
|
|
310
|
+
flushChunk();
|
|
311
|
+
}
|
|
312
|
+
currentTexts.push(msg.text);
|
|
313
|
+
currentTokens += msgTokens;
|
|
314
|
+
}
|
|
315
|
+
|
|
316
|
+
flushChunk();
|
|
317
|
+
}
|
|
318
|
+
|
|
319
|
+
return chunks;
|
|
320
|
+
},
|
|
321
|
+
};
|
|
322
|
+
|
|
323
|
+
// Auto-register on import
|
|
324
|
+
registerProvider(clineProvider);
|
|
@@ -0,0 +1,369 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Codex (OpenAI) conversation history provider.
|
|
3
|
+
*
|
|
4
|
+
* Reads JSONL session files from ~/.codex/sessions/YYYY/MM/DD/<rollout>.jsonl
|
|
5
|
+
* and archived sessions from ~/.codex/archived_sessions/*.jsonl
|
|
6
|
+
*
|
|
7
|
+
* Each JSONL file starts with a session_meta line containing { id, cwd, timestamp, ... }
|
|
8
|
+
* followed by response_item, event_msg, compacted, and turn_context lines.
|
|
9
|
+
*/
|
|
10
|
+
import { readdirSync, readFileSync, statSync, existsSync } from "fs";
|
|
11
|
+
import { join } from "path";
|
|
12
|
+
import { homedir } from "os";
|
|
13
|
+
import type { AgentHistoryProvider, ConversationChunk, DetectedSession } from "../types";
|
|
14
|
+
import { registerProvider } from "./index";
|
|
15
|
+
|
|
16
|
+
// ---------------------------------------------------------------------------
|
|
17
|
+
// Constants
|
|
18
|
+
// ---------------------------------------------------------------------------
|
|
19
|
+
|
|
20
|
+
const CODEX_DIR = join(homedir(), ".codex");
|
|
21
|
+
const SESSIONS_DIR = join(CODEX_DIR, "sessions");
|
|
22
|
+
const ARCHIVED_DIR = join(CODEX_DIR, "archived_sessions");
|
|
23
|
+
const MAX_TOOL_OUTPUT_CHARS = 500;
|
|
24
|
+
const DEFAULT_MAX_TOKENS = 12288;
|
|
25
|
+
|
|
26
|
+
// ---------------------------------------------------------------------------
|
|
27
|
+
// JSONL types (only the fields we read)
|
|
28
|
+
// ---------------------------------------------------------------------------
|
|
29
|
+
|
|
30
|
+
type CodexLine =
|
|
31
|
+
| {
|
|
32
|
+
type: "session_meta";
|
|
33
|
+
payload: {
|
|
34
|
+
meta: {
|
|
35
|
+
id: string;
|
|
36
|
+
timestamp: string;
|
|
37
|
+
cwd: string;
|
|
38
|
+
source?: string;
|
|
39
|
+
model_provider?: string;
|
|
40
|
+
cli_version?: string;
|
|
41
|
+
};
|
|
42
|
+
};
|
|
43
|
+
}
|
|
44
|
+
| {
|
|
45
|
+
type: "response_item";
|
|
46
|
+
payload: ResponseItem;
|
|
47
|
+
}
|
|
48
|
+
| {
|
|
49
|
+
type: "event_msg";
|
|
50
|
+
payload: {
|
|
51
|
+
type?: string;
|
|
52
|
+
output?: string;
|
|
53
|
+
truncated?: boolean;
|
|
54
|
+
};
|
|
55
|
+
}
|
|
56
|
+
| {
|
|
57
|
+
type: "compacted";
|
|
58
|
+
payload: {
|
|
59
|
+
replacement_history?: ResponseItem[];
|
|
60
|
+
};
|
|
61
|
+
}
|
|
62
|
+
| { type: string; payload?: unknown };
|
|
63
|
+
|
|
64
|
+
type ResponseItem = {
|
|
65
|
+
type?: string;
|
|
66
|
+
role?: string;
|
|
67
|
+
content?: string | ContentPart[];
|
|
68
|
+
name?: string;
|
|
69
|
+
arguments?: string;
|
|
70
|
+
output?: string;
|
|
71
|
+
status?: string;
|
|
72
|
+
};
|
|
73
|
+
|
|
74
|
+
type ContentPart =
|
|
75
|
+
| { type: "input_text"; text: string }
|
|
76
|
+
| { type: "output_text"; text: string }
|
|
77
|
+
| { type: "text"; text: string }
|
|
78
|
+
| { type: string; [key: string]: unknown };
|
|
79
|
+
|
|
80
|
+
// ---------------------------------------------------------------------------
|
|
81
|
+
// Helpers
|
|
82
|
+
// ---------------------------------------------------------------------------
|
|
83
|
+
|
|
84
|
+
function estimateTokens(text: string): number {
|
|
85
|
+
return Math.ceil(text.length / 3);
|
|
86
|
+
}
|
|
87
|
+
|
|
88
|
+
function truncate(text: string, max: number): string {
|
|
89
|
+
if (text.length <= max) return text;
|
|
90
|
+
return text.slice(0, max) + "...";
|
|
91
|
+
}
|
|
92
|
+
|
|
93
|
+
/** Recursively find all .jsonl files under a directory. */
|
|
94
|
+
function findJsonlFiles(dir: string): string[] {
|
|
95
|
+
const results: string[] = [];
|
|
96
|
+
if (!existsSync(dir)) return results;
|
|
97
|
+
|
|
98
|
+
const walk = (d: string) => {
|
|
99
|
+
let entries: string[];
|
|
100
|
+
try {
|
|
101
|
+
entries = readdirSync(d);
|
|
102
|
+
} catch {
|
|
103
|
+
return;
|
|
104
|
+
}
|
|
105
|
+
for (const entry of entries) {
|
|
106
|
+
const full = join(d, entry);
|
|
107
|
+
try {
|
|
108
|
+
const stat = statSync(full);
|
|
109
|
+
if (stat.isDirectory()) walk(full);
|
|
110
|
+
else if (stat.isFile() && entry.endsWith(".jsonl")) results.push(full);
|
|
111
|
+
} catch {
|
|
112
|
+
// Skip inaccessible entries
|
|
113
|
+
}
|
|
114
|
+
}
|
|
115
|
+
};
|
|
116
|
+
|
|
117
|
+
walk(dir);
|
|
118
|
+
return results;
|
|
119
|
+
}
|
|
120
|
+
|
|
121
|
+
/** Extract text content from a ResponseItem. */
|
|
122
|
+
function responseItemToText(item: ResponseItem): string | null {
|
|
123
|
+
if (!item) return null;
|
|
124
|
+
|
|
125
|
+
// Message items (user/assistant text)
|
|
126
|
+
if (item.type === "message" && item.role && item.content) {
|
|
127
|
+
const text = extractContent(item.content);
|
|
128
|
+
if (text) return `[${item.role}] ${text}`;
|
|
129
|
+
}
|
|
130
|
+
|
|
131
|
+
// Function/tool call items
|
|
132
|
+
if (item.type === "function_call" && item.name) {
|
|
133
|
+
const args = item.arguments
|
|
134
|
+
? truncate(item.arguments, MAX_TOOL_OUTPUT_CHARS)
|
|
135
|
+
: "";
|
|
136
|
+
return `[tool: ${item.name}] ${args}`;
|
|
137
|
+
}
|
|
138
|
+
|
|
139
|
+
// Function/tool output items
|
|
140
|
+
if (item.type === "function_call_output" && item.output) {
|
|
141
|
+
return `[tool_result] ${truncate(item.output, MAX_TOOL_OUTPUT_CHARS)}`;
|
|
142
|
+
}
|
|
143
|
+
|
|
144
|
+
return null;
|
|
145
|
+
}
|
|
146
|
+
|
|
147
|
+
/** Extract text from content (string or array of content parts). */
|
|
148
|
+
function extractContent(content: string | ContentPart[]): string | null {
|
|
149
|
+
if (typeof content === "string") return content;
|
|
150
|
+
if (!Array.isArray(content)) return null;
|
|
151
|
+
|
|
152
|
+
const parts: string[] = [];
|
|
153
|
+
for (const part of content) {
|
|
154
|
+
if ("text" in part && typeof part.text === "string") {
|
|
155
|
+
parts.push(part.text);
|
|
156
|
+
}
|
|
157
|
+
}
|
|
158
|
+
return parts.length > 0 ? parts.join("\n") : null;
|
|
159
|
+
}
|
|
160
|
+
|
|
161
|
+
/** Parse a JSONL file, returning typed lines. */
|
|
162
|
+
function parseJSONL(filePath: string): CodexLine[] {
|
|
163
|
+
let raw: string;
|
|
164
|
+
try {
|
|
165
|
+
raw = readFileSync(filePath, "utf-8");
|
|
166
|
+
} catch {
|
|
167
|
+
return [];
|
|
168
|
+
}
|
|
169
|
+
|
|
170
|
+
const lines: CodexLine[] = [];
|
|
171
|
+
for (const line of raw.split("\n")) {
|
|
172
|
+
if (!line.trim()) continue;
|
|
173
|
+
try {
|
|
174
|
+
lines.push(JSON.parse(line) as CodexLine);
|
|
175
|
+
} catch {
|
|
176
|
+
// Skip malformed
|
|
177
|
+
}
|
|
178
|
+
}
|
|
179
|
+
return lines;
|
|
180
|
+
}
|
|
181
|
+
|
|
182
|
+
/** Get session metadata from the first line of a JSONL file. */
|
|
183
|
+
function getSessionMeta(
|
|
184
|
+
filePath: string,
|
|
185
|
+
): {
|
|
186
|
+
id: string;
|
|
187
|
+
cwd: string;
|
|
188
|
+
timestamp: string;
|
|
189
|
+
messageCount: number;
|
|
190
|
+
fileSize: number;
|
|
191
|
+
} | null {
|
|
192
|
+
let raw: string;
|
|
193
|
+
try {
|
|
194
|
+
raw = readFileSync(filePath, "utf-8");
|
|
195
|
+
} catch {
|
|
196
|
+
return null;
|
|
197
|
+
}
|
|
198
|
+
|
|
199
|
+
const lines = raw.split("\n").filter((l) => l.trim());
|
|
200
|
+
if (lines.length === 0) return null;
|
|
201
|
+
|
|
202
|
+
// First line should be session_meta
|
|
203
|
+
let meta: CodexLine;
|
|
204
|
+
try {
|
|
205
|
+
meta = JSON.parse(lines[0]) as CodexLine;
|
|
206
|
+
} catch {
|
|
207
|
+
return null;
|
|
208
|
+
}
|
|
209
|
+
|
|
210
|
+
if (meta.type !== "session_meta") return null;
|
|
211
|
+
|
|
212
|
+
const payload = meta.payload as {
|
|
213
|
+
meta: { id: string; cwd: string; timestamp: string };
|
|
214
|
+
};
|
|
215
|
+
|
|
216
|
+
// Count message-like lines
|
|
217
|
+
let messageCount = 0;
|
|
218
|
+
for (const line of lines) {
|
|
219
|
+
try {
|
|
220
|
+
const parsed = JSON.parse(line) as CodexLine;
|
|
221
|
+
if (parsed.type === "response_item" || parsed.type === "event_msg") {
|
|
222
|
+
messageCount++;
|
|
223
|
+
}
|
|
224
|
+
} catch {
|
|
225
|
+
// Skip
|
|
226
|
+
}
|
|
227
|
+
}
|
|
228
|
+
|
|
229
|
+
return {
|
|
230
|
+
id: payload.meta.id,
|
|
231
|
+
cwd: payload.meta.cwd,
|
|
232
|
+
timestamp: payload.meta.timestamp,
|
|
233
|
+
messageCount,
|
|
234
|
+
fileSize: raw.length,
|
|
235
|
+
};
|
|
236
|
+
}
|
|
237
|
+
|
|
238
|
+
// ---------------------------------------------------------------------------
|
|
239
|
+
// Provider implementation
|
|
240
|
+
// ---------------------------------------------------------------------------
|
|
241
|
+
|
|
242
|
+
const codexProvider: AgentHistoryProvider = {
|
|
243
|
+
name: "codex",
|
|
244
|
+
displayName: "Codex",
|
|
245
|
+
|
|
246
|
+
detect(projectPath: string): DetectedSession[] {
|
|
247
|
+
const sessions: DetectedSession[] = [];
|
|
248
|
+
|
|
249
|
+
// Scan both active and archived sessions
|
|
250
|
+
const allFiles = [
|
|
251
|
+
...findJsonlFiles(SESSIONS_DIR),
|
|
252
|
+
...findJsonlFiles(ARCHIVED_DIR),
|
|
253
|
+
];
|
|
254
|
+
|
|
255
|
+
for (const filePath of allFiles) {
|
|
256
|
+
const meta = getSessionMeta(filePath);
|
|
257
|
+
if (!meta) continue;
|
|
258
|
+
|
|
259
|
+
// Match by CWD — the session must have been started in this project
|
|
260
|
+
if (meta.cwd !== projectPath) continue;
|
|
261
|
+
|
|
262
|
+
// Skip trivially small sessions
|
|
263
|
+
if (meta.messageCount < 3) continue;
|
|
264
|
+
|
|
265
|
+
const ts = new Date(meta.timestamp).getTime();
|
|
266
|
+
const estimatedTokens = Math.ceil(meta.fileSize / 5);
|
|
267
|
+
const dateStr = new Date(ts).toISOString().slice(0, 10);
|
|
268
|
+
|
|
269
|
+
sessions.push({
|
|
270
|
+
id: filePath,
|
|
271
|
+
label: `${dateStr} (${meta.messageCount} messages)`,
|
|
272
|
+
startedAt: ts,
|
|
273
|
+
lastActivityAt: ts, // Best approximation without reading all lines
|
|
274
|
+
estimatedTokens,
|
|
275
|
+
messageCount: meta.messageCount,
|
|
276
|
+
});
|
|
277
|
+
}
|
|
278
|
+
|
|
279
|
+
return sessions.sort((a, b) => b.lastActivityAt - a.lastActivityAt);
|
|
280
|
+
},
|
|
281
|
+
|
|
282
|
+
readChunks(
|
|
283
|
+
_projectPath: string,
|
|
284
|
+
sessionIds: string[],
|
|
285
|
+
maxTokens: number = DEFAULT_MAX_TOKENS,
|
|
286
|
+
): ConversationChunk[] {
|
|
287
|
+
const chunks: ConversationChunk[] = [];
|
|
288
|
+
|
|
289
|
+
for (const filePath of sessionIds) {
|
|
290
|
+
const lines = parseJSONL(filePath);
|
|
291
|
+
const messages: { text: string; timestamp: number }[] = [];
|
|
292
|
+
|
|
293
|
+
// Find session timestamp for labeling
|
|
294
|
+
let sessionTimestamp = Date.now();
|
|
295
|
+
const firstLine = lines[0];
|
|
296
|
+
if (firstLine?.type === "session_meta") {
|
|
297
|
+
const meta = firstLine as Extract<CodexLine, { type: "session_meta" }>;
|
|
298
|
+
const ts = new Date(meta.payload.meta.timestamp).getTime();
|
|
299
|
+
if (!Number.isNaN(ts)) sessionTimestamp = ts;
|
|
300
|
+
}
|
|
301
|
+
|
|
302
|
+
for (const line of lines) {
|
|
303
|
+
if (line.type === "response_item") {
|
|
304
|
+
const ri = line as Extract<CodexLine, { type: "response_item" }>;
|
|
305
|
+
const text = responseItemToText(ri.payload);
|
|
306
|
+
if (text) {
|
|
307
|
+
messages.push({ text, timestamp: sessionTimestamp });
|
|
308
|
+
}
|
|
309
|
+
} else if (line.type === "event_msg") {
|
|
310
|
+
const ev = line as Extract<CodexLine, { type: "event_msg" }>;
|
|
311
|
+
if (ev.payload.output) {
|
|
312
|
+
messages.push({
|
|
313
|
+
text: `[exec] ${truncate(ev.payload.output, MAX_TOOL_OUTPUT_CHARS)}`,
|
|
314
|
+
timestamp: sessionTimestamp,
|
|
315
|
+
});
|
|
316
|
+
}
|
|
317
|
+
} else if (line.type === "compacted") {
|
|
318
|
+
const comp = line as Extract<CodexLine, { type: "compacted" }>;
|
|
319
|
+
if (comp.payload.replacement_history) {
|
|
320
|
+
// After compaction, the replacement_history is the compressed conversation
|
|
321
|
+
for (const item of comp.payload.replacement_history) {
|
|
322
|
+
const text = responseItemToText(item);
|
|
323
|
+
if (text) {
|
|
324
|
+
messages.push({ text, timestamp: sessionTimestamp });
|
|
325
|
+
}
|
|
326
|
+
}
|
|
327
|
+
}
|
|
328
|
+
}
|
|
329
|
+
}
|
|
330
|
+
|
|
331
|
+
if (messages.length === 0) continue;
|
|
332
|
+
|
|
333
|
+
// Build chunks respecting maxTokens boundaries
|
|
334
|
+
let currentTexts: string[] = [];
|
|
335
|
+
let currentTokens = 0;
|
|
336
|
+
let chunkIndex = 0;
|
|
337
|
+
|
|
338
|
+
const flushChunk = () => {
|
|
339
|
+
if (currentTexts.length === 0) return;
|
|
340
|
+
chunkIndex++;
|
|
341
|
+
const text = currentTexts.join("\n\n");
|
|
342
|
+
chunks.push({
|
|
343
|
+
label: `Codex ${new Date(sessionTimestamp).toISOString().slice(0, 10)} (${chunkIndex})`,
|
|
344
|
+
text,
|
|
345
|
+
estimatedTokens: estimateTokens(text),
|
|
346
|
+
timestamp: sessionTimestamp,
|
|
347
|
+
});
|
|
348
|
+
currentTexts = [];
|
|
349
|
+
currentTokens = 0;
|
|
350
|
+
};
|
|
351
|
+
|
|
352
|
+
for (const msg of messages) {
|
|
353
|
+
const msgTokens = estimateTokens(msg.text);
|
|
354
|
+
if (currentTokens > 0 && currentTokens + msgTokens > maxTokens) {
|
|
355
|
+
flushChunk();
|
|
356
|
+
}
|
|
357
|
+
currentTexts.push(msg.text);
|
|
358
|
+
currentTokens += msgTokens;
|
|
359
|
+
}
|
|
360
|
+
|
|
361
|
+
flushChunk();
|
|
362
|
+
}
|
|
363
|
+
|
|
364
|
+
return chunks;
|
|
365
|
+
},
|
|
366
|
+
};
|
|
367
|
+
|
|
368
|
+
// Auto-register on import
|
|
369
|
+
registerProvider(codexProvider);
|