opencode-lcm 0.11.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +83 -0
- package/LICENSE +21 -0
- package/README.md +207 -0
- package/dist/archive-transform.d.ts +45 -0
- package/dist/archive-transform.js +81 -0
- package/dist/constants.d.ts +12 -0
- package/dist/constants.js +16 -0
- package/dist/doctor.d.ts +22 -0
- package/dist/doctor.js +44 -0
- package/dist/index.d.ts +4 -0
- package/dist/index.js +306 -0
- package/dist/logging.d.ts +14 -0
- package/dist/logging.js +28 -0
- package/dist/options.d.ts +3 -0
- package/dist/options.js +217 -0
- package/dist/preview-providers.d.ts +20 -0
- package/dist/preview-providers.js +246 -0
- package/dist/privacy.d.ts +16 -0
- package/dist/privacy.js +92 -0
- package/dist/search-ranking.d.ts +12 -0
- package/dist/search-ranking.js +98 -0
- package/dist/sql-utils.d.ts +31 -0
- package/dist/sql-utils.js +80 -0
- package/dist/store-artifacts.d.ts +50 -0
- package/dist/store-artifacts.js +374 -0
- package/dist/store-retention.d.ts +39 -0
- package/dist/store-retention.js +90 -0
- package/dist/store-search.d.ts +37 -0
- package/dist/store-search.js +298 -0
- package/dist/store-snapshot.d.ts +133 -0
- package/dist/store-snapshot.js +325 -0
- package/dist/store-types.d.ts +14 -0
- package/dist/store-types.js +5 -0
- package/dist/store.d.ts +316 -0
- package/dist/store.js +3673 -0
- package/dist/types.d.ts +117 -0
- package/dist/types.js +1 -0
- package/dist/utils.d.ts +35 -0
- package/dist/utils.js +414 -0
- package/dist/workspace-path.d.ts +1 -0
- package/dist/workspace-path.js +15 -0
- package/dist/worktree-key.d.ts +1 -0
- package/dist/worktree-key.js +6 -0
- package/package.json +61 -0
- package/src/archive-transform.ts +147 -0
- package/src/bun-sqlite.d.ts +18 -0
- package/src/constants.ts +20 -0
- package/src/doctor.ts +83 -0
- package/src/index.ts +330 -0
- package/src/logging.ts +41 -0
- package/src/options.ts +297 -0
- package/src/preview-providers.ts +298 -0
- package/src/privacy.ts +122 -0
- package/src/search-ranking.ts +145 -0
- package/src/sql-utils.ts +107 -0
- package/src/store-artifacts.ts +666 -0
- package/src/store-retention.ts +152 -0
- package/src/store-search.ts +440 -0
- package/src/store-snapshot.ts +582 -0
- package/src/store-types.ts +16 -0
- package/src/store.ts +4926 -0
- package/src/types.ts +132 -0
- package/src/utils.ts +444 -0
- package/src/workspace-path.ts +20 -0
- package/src/worktree-key.ts +5 -0
|
@@ -0,0 +1,298 @@
|
|
|
1
|
+
import { getLogger } from './logging.js';
|
|
2
|
+
import { rankSearchCandidates } from './search-ranking.js';
|
|
3
|
+
import { buildSnippet, sanitizeFtsTokens, tokenizeQuery } from './utils.js';
|
|
4
|
+
function deleteScopedFtsRows(db, table, sessionIDs) {
|
|
5
|
+
if (!sessionIDs) {
|
|
6
|
+
db.prepare(`DELETE FROM ${table}`).run();
|
|
7
|
+
return;
|
|
8
|
+
}
|
|
9
|
+
if (sessionIDs.length === 0)
|
|
10
|
+
return;
|
|
11
|
+
db.prepare(`DELETE FROM ${table} WHERE session_id IN (${sessionIDs.map(() => '?').join(', ')})`).run(...sessionIDs);
|
|
12
|
+
}
|
|
13
|
+
export function buildFtsQuery(query) {
|
|
14
|
+
const phrases = [...query.matchAll(/"([^"]+)"/g)]
|
|
15
|
+
.map((match) => sanitizeFtsTokens(tokenizeQuery(match[1])).join(' '))
|
|
16
|
+
.filter(Boolean)
|
|
17
|
+
.map((phrase) => `"${phrase}"`);
|
|
18
|
+
const remainder = query.replace(/"[^"]+"/g, ' ');
|
|
19
|
+
const tokens = sanitizeFtsTokens(tokenizeQuery(remainder)).map((token) => `${token}*`);
|
|
20
|
+
const parts = [...phrases, ...tokens];
|
|
21
|
+
if (parts.length === 0)
|
|
22
|
+
return undefined;
|
|
23
|
+
return parts.join(' AND ');
|
|
24
|
+
}
|
|
25
|
+
/**
|
|
26
|
+
* Compute TF-IDF weights for candidate query tokens against the FTS5 corpus.
|
|
27
|
+
* Returns tokens sorted by descending IDF score (most informative first).
|
|
28
|
+
* Tokens that appear in >80% of documents are dropped as corpus-common noise.
|
|
29
|
+
*
|
|
30
|
+
* Uses a single FTS5 query per token to get document frequency, which is
|
|
31
|
+
* acceptable since automatic retrieval works with ≤10 candidate tokens.
|
|
32
|
+
*/
|
|
33
|
+
function getTotalDocCount(db) {
|
|
34
|
+
const messageCount = db.prepare('SELECT COUNT(*) AS count FROM message_fts').get() ?? { count: 0 };
|
|
35
|
+
const summaryCount = db.prepare('SELECT COUNT(*) AS count FROM summary_fts').get() ?? { count: 0 };
|
|
36
|
+
const artifactCount = db.prepare('SELECT COUNT(*) AS count FROM artifact_fts').get() ?? { count: 0 };
|
|
37
|
+
return Math.max(1, messageCount.count + summaryCount.count + artifactCount.count);
|
|
38
|
+
}
|
|
39
|
+
export function computeTfidfWeights(db, candidateTokens) {
|
|
40
|
+
if (candidateTokens.length === 0)
|
|
41
|
+
return [];
|
|
42
|
+
const totalDocs = getTotalDocCount(db);
|
|
43
|
+
const results = [];
|
|
44
|
+
for (const token of candidateTokens) {
|
|
45
|
+
// Query document frequency across all FTS tables
|
|
46
|
+
// FTS5 MATCH 'token*' finds all documents containing terms with this prefix
|
|
47
|
+
const query = `${token}*`;
|
|
48
|
+
let docFreq = 0;
|
|
49
|
+
try {
|
|
50
|
+
const msgFreq = db
|
|
51
|
+
.prepare('SELECT COUNT(*) AS count FROM message_fts WHERE message_fts MATCH ?')
|
|
52
|
+
.get(query);
|
|
53
|
+
docFreq += msgFreq?.count ?? 0;
|
|
54
|
+
}
|
|
55
|
+
catch (error) {
|
|
56
|
+
getLogger().debug('TF-IDF message_fts query failed for token', { token, error });
|
|
57
|
+
}
|
|
58
|
+
try {
|
|
59
|
+
const sumFreq = db
|
|
60
|
+
.prepare('SELECT COUNT(*) AS count FROM summary_fts WHERE summary_fts MATCH ?')
|
|
61
|
+
.get(query);
|
|
62
|
+
docFreq += sumFreq?.count ?? 0;
|
|
63
|
+
}
|
|
64
|
+
catch (error) {
|
|
65
|
+
getLogger().debug('TF-IDF summary_fts query failed for token', { token, error });
|
|
66
|
+
}
|
|
67
|
+
try {
|
|
68
|
+
const artFreq = db
|
|
69
|
+
.prepare('SELECT COUNT(*) AS count FROM artifact_fts WHERE artifact_fts MATCH ?')
|
|
70
|
+
.get(query);
|
|
71
|
+
docFreq += artFreq?.count ?? 0;
|
|
72
|
+
}
|
|
73
|
+
catch (error) {
|
|
74
|
+
getLogger().debug('TF-IDF artifact_fts query failed for token', { token, error });
|
|
75
|
+
}
|
|
76
|
+
// Smoothed IDF: log(N / (df + 1)) + 1
|
|
77
|
+
// Smoothing prevents division by zero and ensures non-zero weights
|
|
78
|
+
const idf = Math.log(totalDocs / (docFreq + 1)) + 1;
|
|
79
|
+
results.push({ token, idf, docFreq });
|
|
80
|
+
}
|
|
81
|
+
// Sort by descending IDF — most informative tokens first
|
|
82
|
+
results.sort((a, b) => b.idf - a.idf);
|
|
83
|
+
return results;
|
|
84
|
+
}
|
|
85
|
+
/**
|
|
86
|
+
* Filter candidate tokens using TF-IDF weights.
|
|
87
|
+
* Drops tokens whose IDF is below the median (corpus-common terms)
|
|
88
|
+
* and tokens that appear in >80% of documents.
|
|
89
|
+
* Returns tokens sorted by descending IDF.
|
|
90
|
+
*/
|
|
91
|
+
export function filterTokensByTfidf(db, candidateTokens, options) {
|
|
92
|
+
const { maxCommonRatio = 0.8, minTokens = 1 } = options ?? {};
|
|
93
|
+
const weights = computeTfidfWeights(db, candidateTokens);
|
|
94
|
+
if (weights.length === 0)
|
|
95
|
+
return candidateTokens;
|
|
96
|
+
// Get total docs for common-ratio threshold (already computed inside computeTfidfWeights, but needed here for ratio)
|
|
97
|
+
const totalDocs = getTotalDocCount(db);
|
|
98
|
+
// Compute median IDF
|
|
99
|
+
const sortedIdfs = weights.map((w) => w.idf).sort((a, b) => a - b);
|
|
100
|
+
const medianIdf = sortedIdfs.length % 2 === 0
|
|
101
|
+
? (sortedIdfs[sortedIdfs.length / 2 - 1] + sortedIdfs[sortedIdfs.length / 2]) / 2
|
|
102
|
+
: sortedIdfs[Math.floor(sortedIdfs.length / 2)];
|
|
103
|
+
// Filter: keep tokens with IDF >= median AND below common-ratio threshold
|
|
104
|
+
// Always keep at least minTokens tokens (the highest-IDF ones)
|
|
105
|
+
const filtered = weights.filter((w) => {
|
|
106
|
+
const docRatio = w.docFreq / totalDocs;
|
|
107
|
+
return w.idf >= medianIdf && docRatio <= maxCommonRatio;
|
|
108
|
+
});
|
|
109
|
+
// Ensure minimum token count
|
|
110
|
+
if (filtered.length < minTokens) {
|
|
111
|
+
return weights.slice(0, minTokens).map((w) => w.token);
|
|
112
|
+
}
|
|
113
|
+
return filtered.map((w) => w.token);
|
|
114
|
+
}
|
|
115
|
+
export function searchWithFts(deps, query, sessionIDs, limit = 5) {
|
|
116
|
+
const ftsQuery = buildFtsQuery(query);
|
|
117
|
+
if (!ftsQuery)
|
|
118
|
+
return [];
|
|
119
|
+
if (sessionIDs && sessionIDs.length === 0)
|
|
120
|
+
return [];
|
|
121
|
+
try {
|
|
122
|
+
const db = deps.getDb();
|
|
123
|
+
const fetchLimit = Math.max(limit * 8, 12);
|
|
124
|
+
const buildScopeClause = (ids) => {
|
|
125
|
+
if (!ids)
|
|
126
|
+
return { clause: '', params: [] };
|
|
127
|
+
return {
|
|
128
|
+
clause: `session_id IN (${ids.map(() => '?').join(', ')}) AND `,
|
|
129
|
+
params: ids,
|
|
130
|
+
};
|
|
131
|
+
};
|
|
132
|
+
const scope = buildScopeClause(sessionIDs);
|
|
133
|
+
const messageRows = db
|
|
134
|
+
.prepare(`SELECT message_id, session_id, role, created_at, content, snippet(message_fts, 4, '[', ']', '...', 12) AS snippet, bm25(message_fts) AS rank
|
|
135
|
+
FROM message_fts
|
|
136
|
+
WHERE ${scope.clause}message_fts MATCH ?
|
|
137
|
+
ORDER BY rank, created_at DESC
|
|
138
|
+
LIMIT ?`)
|
|
139
|
+
.all(...scope.params, ftsQuery, fetchLimit);
|
|
140
|
+
const summaryRows = db
|
|
141
|
+
.prepare(`SELECT node_id, session_id, created_at, content, snippet(summary_fts, 4, '[', ']', '...', 14) AS snippet, bm25(summary_fts) AS rank
|
|
142
|
+
FROM summary_fts
|
|
143
|
+
WHERE ${scope.clause}summary_fts MATCH ?
|
|
144
|
+
ORDER BY rank, created_at DESC
|
|
145
|
+
LIMIT ?`)
|
|
146
|
+
.all(...scope.params, ftsQuery, fetchLimit);
|
|
147
|
+
const artifactRows = db
|
|
148
|
+
.prepare(`SELECT artifact_id, session_id, artifact_kind, created_at, content, snippet(artifact_fts, 6, '[', ']', '...', 14) AS snippet, bm25(artifact_fts) AS rank
|
|
149
|
+
FROM artifact_fts
|
|
150
|
+
WHERE ${scope.clause}artifact_fts MATCH ?
|
|
151
|
+
ORDER BY rank, created_at DESC
|
|
152
|
+
LIMIT ?`)
|
|
153
|
+
.all(...scope.params, ftsQuery, fetchLimit);
|
|
154
|
+
const candidates = [
|
|
155
|
+
...messageRows.map((row, index) => ({
|
|
156
|
+
id: row.message_id,
|
|
157
|
+
type: row.role,
|
|
158
|
+
sessionID: row.session_id,
|
|
159
|
+
timestamp: Number(row.created_at),
|
|
160
|
+
snippet: row.snippet || buildSnippet(row.content, query),
|
|
161
|
+
content: row.content,
|
|
162
|
+
sourceKind: 'message',
|
|
163
|
+
sourceOrder: index,
|
|
164
|
+
})),
|
|
165
|
+
...summaryRows.map((row, index) => ({
|
|
166
|
+
id: row.node_id,
|
|
167
|
+
type: 'summary',
|
|
168
|
+
sessionID: row.session_id,
|
|
169
|
+
timestamp: Number(row.created_at),
|
|
170
|
+
snippet: row.snippet || buildSnippet(row.content, query),
|
|
171
|
+
content: row.content,
|
|
172
|
+
sourceKind: 'summary',
|
|
173
|
+
sourceOrder: index,
|
|
174
|
+
})),
|
|
175
|
+
...artifactRows.map((row, index) => ({
|
|
176
|
+
id: row.artifact_id,
|
|
177
|
+
type: `artifact:${row.artifact_kind}`,
|
|
178
|
+
sessionID: row.session_id,
|
|
179
|
+
timestamp: Number(row.created_at),
|
|
180
|
+
snippet: row.snippet || buildSnippet(row.content, query),
|
|
181
|
+
content: row.content,
|
|
182
|
+
sourceKind: 'artifact',
|
|
183
|
+
sourceOrder: index,
|
|
184
|
+
})),
|
|
185
|
+
];
|
|
186
|
+
return rankSearchCandidates(candidates, query, limit);
|
|
187
|
+
}
|
|
188
|
+
catch (error) {
|
|
189
|
+
getLogger().debug('FTS search failed, returning empty results', { query, error });
|
|
190
|
+
return [];
|
|
191
|
+
}
|
|
192
|
+
}
|
|
193
|
+
export function searchByScan(deps, query, sessionIDs, limit = 5) {
|
|
194
|
+
const sessions = deps.readScopedSessionsSync(sessionIDs);
|
|
195
|
+
const candidates = [];
|
|
196
|
+
for (const session of sessions) {
|
|
197
|
+
for (const [index, message] of session.messages.entries()) {
|
|
198
|
+
const blob = deps.guessMessageText(message, deps.ignoreToolPrefixes);
|
|
199
|
+
if (!blob.toLowerCase().includes(query))
|
|
200
|
+
continue;
|
|
201
|
+
candidates.push({
|
|
202
|
+
id: message.info.id,
|
|
203
|
+
type: message.info.role,
|
|
204
|
+
sessionID: session.sessionID,
|
|
205
|
+
timestamp: message.info.time.created,
|
|
206
|
+
snippet: buildSnippet(blob, query),
|
|
207
|
+
content: blob,
|
|
208
|
+
sourceKind: 'message',
|
|
209
|
+
sourceOrder: index,
|
|
210
|
+
});
|
|
211
|
+
}
|
|
212
|
+
}
|
|
213
|
+
const summaryRows = deps.readScopedSummaryRowsSync(sessionIDs);
|
|
214
|
+
summaryRows.forEach((row, index) => {
|
|
215
|
+
if (!row.summary_text.toLowerCase().includes(query))
|
|
216
|
+
return;
|
|
217
|
+
candidates.push({
|
|
218
|
+
id: row.node_id,
|
|
219
|
+
type: 'summary',
|
|
220
|
+
sessionID: row.session_id,
|
|
221
|
+
timestamp: row.created_at,
|
|
222
|
+
snippet: buildSnippet(row.summary_text, query),
|
|
223
|
+
content: row.summary_text,
|
|
224
|
+
sourceKind: 'summary',
|
|
225
|
+
sourceOrder: index,
|
|
226
|
+
});
|
|
227
|
+
});
|
|
228
|
+
const artifactRows = deps.readScopedArtifactRowsSync(sessionIDs);
|
|
229
|
+
for (const [index, row] of artifactRows.entries()) {
|
|
230
|
+
const haystack = `${row.preview_text}\n${row.content_text}`.toLowerCase();
|
|
231
|
+
if (!haystack.includes(query))
|
|
232
|
+
continue;
|
|
233
|
+
candidates.push({
|
|
234
|
+
id: row.artifact_id,
|
|
235
|
+
type: `artifact:${row.artifact_kind}`,
|
|
236
|
+
sessionID: row.session_id,
|
|
237
|
+
timestamp: row.created_at,
|
|
238
|
+
snippet: buildSnippet(`${row.preview_text}\n${row.content_text}`, query),
|
|
239
|
+
content: row.content_text,
|
|
240
|
+
sourceKind: 'artifact',
|
|
241
|
+
sourceOrder: index,
|
|
242
|
+
});
|
|
243
|
+
}
|
|
244
|
+
return rankSearchCandidates(candidates, query, limit);
|
|
245
|
+
}
|
|
246
|
+
export function replaceMessageSearchRowsSync(deps, session) {
|
|
247
|
+
const db = deps.getDb();
|
|
248
|
+
db.prepare('DELETE FROM message_fts WHERE session_id = ?').run(session.sessionID);
|
|
249
|
+
insertMessageSearchRowsSync(deps, session);
|
|
250
|
+
}
|
|
251
|
+
function insertMessageSearchRowsSync(deps, session) {
|
|
252
|
+
const db = deps.getDb();
|
|
253
|
+
const insert = db.prepare('INSERT INTO message_fts (session_id, message_id, role, created_at, content) VALUES (?, ?, ?, ?, ?)');
|
|
254
|
+
for (const message of session.messages) {
|
|
255
|
+
const content = deps.guessMessageText(message, deps.ignoreToolPrefixes);
|
|
256
|
+
if (!content)
|
|
257
|
+
continue;
|
|
258
|
+
insert.run(session.sessionID, message.info.id, message.info.role, String(message.info.time.created), content);
|
|
259
|
+
}
|
|
260
|
+
}
|
|
261
|
+
export function replaceMessageSearchRowSync(deps, sessionID, message) {
|
|
262
|
+
const db = deps.getDb();
|
|
263
|
+
db.prepare('DELETE FROM message_fts WHERE message_id = ?').run(message.info.id);
|
|
264
|
+
const content = deps.guessMessageText(message, deps.ignoreToolPrefixes);
|
|
265
|
+
if (!content)
|
|
266
|
+
return;
|
|
267
|
+
db.prepare('INSERT INTO message_fts (session_id, message_id, role, created_at, content) VALUES (?, ?, ?, ?, ?)').run(sessionID, message.info.id, message.info.role, String(message.info.time.created), content);
|
|
268
|
+
}
|
|
269
|
+
export function replaceSummarySearchRowsSync(deps, sessionIDs) {
|
|
270
|
+
const db = deps.getDb();
|
|
271
|
+
deleteScopedFtsRows(db, 'summary_fts', sessionIDs);
|
|
272
|
+
const summaryRows = deps.readScopedSummaryRowsSync(sessionIDs);
|
|
273
|
+
const insert = db.prepare('INSERT INTO summary_fts (session_id, node_id, level, created_at, content) VALUES (?, ?, ?, ?, ?)');
|
|
274
|
+
for (const row of summaryRows) {
|
|
275
|
+
insert.run(row.session_id, row.node_id, String(row.level), String(row.created_at), row.summary_text);
|
|
276
|
+
}
|
|
277
|
+
}
|
|
278
|
+
export function replaceArtifactSearchRowsSync(deps, sessionIDs) {
|
|
279
|
+
const db = deps.getDb();
|
|
280
|
+
deleteScopedFtsRows(db, 'artifact_fts', sessionIDs);
|
|
281
|
+
const artifactRows = deps.readScopedArtifactRowsSync(sessionIDs);
|
|
282
|
+
const insert = db.prepare('INSERT INTO artifact_fts (session_id, artifact_id, message_id, part_id, artifact_kind, created_at, content) VALUES (?, ?, ?, ?, ?, ?, ?)');
|
|
283
|
+
for (const row of artifactRows) {
|
|
284
|
+
insert.run(row.session_id, row.artifact_id, row.message_id, row.part_id, row.artifact_kind, String(row.created_at), deps.buildArtifactSearchContent(row));
|
|
285
|
+
}
|
|
286
|
+
}
|
|
287
|
+
export function refreshSearchIndexesSync(deps, sessionIDs) {
|
|
288
|
+
const db = deps.getDb();
|
|
289
|
+
deleteScopedFtsRows(db, 'message_fts', sessionIDs);
|
|
290
|
+
for (const session of deps.readScopedSessionsSync(sessionIDs)) {
|
|
291
|
+
insertMessageSearchRowsSync(deps, session);
|
|
292
|
+
}
|
|
293
|
+
replaceSummarySearchRowsSync(deps, sessionIDs);
|
|
294
|
+
replaceArtifactSearchRowsSync(deps, sessionIDs);
|
|
295
|
+
}
|
|
296
|
+
export function rebuildSearchIndexesSync(deps) {
|
|
297
|
+
refreshSearchIndexesSync(deps);
|
|
298
|
+
}
|
|
@@ -0,0 +1,133 @@
|
|
|
1
|
+
import type { SqlDatabaseLike } from './store-types.js';
|
|
2
|
+
export type SnapshotScope = 'session' | 'root' | 'worktree' | 'all';
|
|
3
|
+
export type SnapshotWorktreeMode = 'auto' | 'preserve' | 'current';
|
|
4
|
+
export type SessionRow = {
|
|
5
|
+
session_id: string;
|
|
6
|
+
title: string | null;
|
|
7
|
+
session_directory: string | null;
|
|
8
|
+
worktree_key: string | null;
|
|
9
|
+
parent_session_id: string | null;
|
|
10
|
+
root_session_id: string | null;
|
|
11
|
+
lineage_depth: number | null;
|
|
12
|
+
pinned: number | null;
|
|
13
|
+
pin_reason: string | null;
|
|
14
|
+
updated_at: number;
|
|
15
|
+
compacted_at: number | null;
|
|
16
|
+
deleted: number;
|
|
17
|
+
event_count: number;
|
|
18
|
+
};
|
|
19
|
+
export type MessageRow = {
|
|
20
|
+
message_id: string;
|
|
21
|
+
session_id: string;
|
|
22
|
+
created_at: number;
|
|
23
|
+
info_json: string;
|
|
24
|
+
};
|
|
25
|
+
export type PartRow = {
|
|
26
|
+
part_id: string;
|
|
27
|
+
session_id: string;
|
|
28
|
+
message_id: string;
|
|
29
|
+
sort_key: number;
|
|
30
|
+
part_json: string;
|
|
31
|
+
};
|
|
32
|
+
export type SummaryNodeRow = {
|
|
33
|
+
node_id: string;
|
|
34
|
+
session_id: string;
|
|
35
|
+
level: number;
|
|
36
|
+
node_kind: string;
|
|
37
|
+
start_index: number;
|
|
38
|
+
end_index: number;
|
|
39
|
+
message_ids_json: string;
|
|
40
|
+
summary_text: string;
|
|
41
|
+
created_at: number;
|
|
42
|
+
};
|
|
43
|
+
export type SummaryEdgeRow = {
|
|
44
|
+
session_id: string;
|
|
45
|
+
parent_id: string;
|
|
46
|
+
child_id: string;
|
|
47
|
+
child_position: number;
|
|
48
|
+
};
|
|
49
|
+
export type SummaryStateRow = {
|
|
50
|
+
session_id: string;
|
|
51
|
+
archived_count: number;
|
|
52
|
+
latest_message_created: number;
|
|
53
|
+
archived_signature: string | null;
|
|
54
|
+
root_node_ids_json: string;
|
|
55
|
+
updated_at: number;
|
|
56
|
+
};
|
|
57
|
+
export type ArtifactRow = {
|
|
58
|
+
artifact_id: string;
|
|
59
|
+
session_id: string;
|
|
60
|
+
message_id: string;
|
|
61
|
+
part_id: string;
|
|
62
|
+
artifact_kind: string;
|
|
63
|
+
field_name: string;
|
|
64
|
+
preview_text: string;
|
|
65
|
+
content_text: string;
|
|
66
|
+
content_hash: string | null;
|
|
67
|
+
metadata_json: string;
|
|
68
|
+
char_count: number;
|
|
69
|
+
created_at: number;
|
|
70
|
+
};
|
|
71
|
+
export type ArtifactBlobRow = {
|
|
72
|
+
content_hash: string;
|
|
73
|
+
content_text: string;
|
|
74
|
+
char_count: number;
|
|
75
|
+
created_at: number;
|
|
76
|
+
};
|
|
77
|
+
export type SnapshotPayload = {
|
|
78
|
+
version: 1;
|
|
79
|
+
exportedAt: number;
|
|
80
|
+
scope: string;
|
|
81
|
+
sessions: SessionRow[];
|
|
82
|
+
messages: MessageRow[];
|
|
83
|
+
parts: PartRow[];
|
|
84
|
+
resumes: Array<{
|
|
85
|
+
session_id: string;
|
|
86
|
+
note: string;
|
|
87
|
+
updated_at: number;
|
|
88
|
+
}>;
|
|
89
|
+
artifacts: ArtifactRow[];
|
|
90
|
+
artifact_blobs: ArtifactBlobRow[];
|
|
91
|
+
summary_nodes: SummaryNodeRow[];
|
|
92
|
+
summary_edges: SummaryEdgeRow[];
|
|
93
|
+
summary_state: SummaryStateRow[];
|
|
94
|
+
};
|
|
95
|
+
export type ExportSnapshotInput = {
|
|
96
|
+
filePath: string;
|
|
97
|
+
sessionID?: string;
|
|
98
|
+
scope?: string;
|
|
99
|
+
};
|
|
100
|
+
export type ImportSnapshotInput = {
|
|
101
|
+
filePath: string;
|
|
102
|
+
mode?: 'replace' | 'merge';
|
|
103
|
+
worktreeMode?: SnapshotWorktreeMode;
|
|
104
|
+
};
|
|
105
|
+
export type SnapshotExportBindings = {
|
|
106
|
+
workspaceDirectory: string;
|
|
107
|
+
normalizeScope(scope?: string): SnapshotScope | undefined;
|
|
108
|
+
resolveScopeSessionIDs(scope?: string, sessionID?: string): string[] | undefined;
|
|
109
|
+
readScopedSessionRowsSync(sessionIDs?: string[]): SessionRow[];
|
|
110
|
+
readScopedMessageRowsSync(sessionIDs?: string[]): MessageRow[];
|
|
111
|
+
readScopedPartRowsSync(sessionIDs?: string[]): PartRow[];
|
|
112
|
+
readScopedResumeRowsSync(sessionIDs?: string[]): Array<{
|
|
113
|
+
session_id: string;
|
|
114
|
+
note: string;
|
|
115
|
+
updated_at: number;
|
|
116
|
+
}>;
|
|
117
|
+
readScopedArtifactRowsSync(sessionIDs?: string[]): ArtifactRow[];
|
|
118
|
+
readScopedArtifactBlobRowsSync(sessionIDs?: string[]): ArtifactBlobRow[];
|
|
119
|
+
readScopedSummaryRowsSync(sessionIDs?: string[]): SummaryNodeRow[];
|
|
120
|
+
readScopedSummaryEdgeRowsSync(sessionIDs?: string[]): SummaryEdgeRow[];
|
|
121
|
+
readScopedSummaryStateRowsSync(sessionIDs?: string[]): SummaryStateRow[];
|
|
122
|
+
};
|
|
123
|
+
export type SnapshotImportBindings = {
|
|
124
|
+
workspaceDirectory: string;
|
|
125
|
+
getDb(): SqlDatabaseLike;
|
|
126
|
+
clearSessionDataSync(sessionID: string): void;
|
|
127
|
+
backfillArtifactBlobsSync(): void;
|
|
128
|
+
refreshAllLineageSync(): void;
|
|
129
|
+
syncAllDerivedSessionStateSync(force: boolean): void;
|
|
130
|
+
refreshSearchIndexesSync(sessionIDs?: string[]): void;
|
|
131
|
+
};
|
|
132
|
+
export declare function exportStoreSnapshot(bindings: SnapshotExportBindings, input: ExportSnapshotInput): Promise<string>;
|
|
133
|
+
export declare function importStoreSnapshot(bindings: SnapshotImportBindings, input: ImportSnapshotInput): Promise<string>;
|