@vellumai/assistant 0.5.3 → 0.5.4
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/docs/architecture/memory.md +105 -0
- package/package.json +1 -1
- package/src/__tests__/archive-recall.test.ts +560 -0
- package/src/__tests__/conversation-clear-safety.test.ts +259 -0
- package/src/__tests__/conversation-switch-memory-reduction.test.ts +474 -0
- package/src/__tests__/db-schedule-syntax-migration.test.ts +3 -0
- package/src/__tests__/memory-reducer-job.test.ts +538 -0
- package/src/__tests__/memory-reducer-scheduling.test.ts +473 -0
- package/src/__tests__/memory-reducer-types.test.ts +12 -4
- package/src/__tests__/memory-reducer.test.ts +7 -1
- package/src/__tests__/memory-regressions.test.ts +24 -4
- package/src/__tests__/memory-simplified-config.test.ts +4 -4
- package/src/__tests__/simplified-memory-e2e.test.ts +666 -0
- package/src/__tests__/simplified-memory-runtime.test.ts +616 -0
- package/src/cli/commands/conversations.ts +18 -0
- package/src/config/bundled-skills/schedule/TOOLS.json +8 -0
- package/src/config/loader.ts +0 -1
- package/src/config/schemas/memory-simplified.ts +1 -1
- package/src/daemon/conversation-memory.ts +117 -0
- package/src/daemon/conversation-runtime-assembly.ts +1 -0
- package/src/daemon/handlers/conversations.ts +11 -0
- package/src/daemon/lifecycle.ts +44 -1
- package/src/memory/archive-recall.ts +516 -0
- package/src/memory/brief-time.ts +5 -4
- package/src/memory/conversation-crud.ts +210 -0
- package/src/memory/conversation-key-store.ts +33 -4
- package/src/memory/db-init.ts +4 -0
- package/src/memory/job-handlers/backfill-simplified-memory.ts +462 -0
- package/src/memory/job-handlers/conversation-starters.ts +9 -3
- package/src/memory/job-handlers/reduce-conversation-memory.ts +229 -0
- package/src/memory/jobs-store.ts +2 -0
- package/src/memory/jobs-worker.ts +8 -0
- package/src/memory/migrations/036-normalize-phone-identities.ts +49 -14
- package/src/memory/migrations/135-backfill-contact-interaction-stats.ts +9 -1
- package/src/memory/migrations/141-rename-verification-table.ts +8 -0
- package/src/memory/migrations/142-rename-verification-session-id-column.ts +7 -2
- package/src/memory/migrations/174-rename-thread-starters-table.ts +8 -0
- package/src/memory/migrations/188-schedule-quiet-flag.ts +13 -0
- package/src/memory/migrations/index.ts +1 -0
- package/src/memory/reducer-scheduler.ts +242 -0
- package/src/memory/reducer-types.ts +9 -2
- package/src/memory/reducer.ts +25 -11
- package/src/memory/schema/infrastructure.ts +1 -0
- package/src/runtime/auth/route-policy.ts +10 -1
- package/src/runtime/routes/conversation-management-routes.ts +88 -2
- package/src/runtime/routes/guardian-bootstrap-routes.ts +19 -7
- package/src/runtime/routes/secret-routes.ts +1 -0
- package/src/schedule/schedule-store.ts +7 -0
- package/src/schedule/scheduler.ts +6 -2
- package/src/telemetry/usage-telemetry-reporter.ts +1 -1
- package/src/tools/filesystem/edit.ts +6 -1
- package/src/tools/filesystem/read.ts +6 -1
- package/src/tools/filesystem/write.ts +6 -1
- package/src/tools/memory/handlers.ts +129 -1
- package/src/tools/schedule/create.ts +3 -0
- package/src/tools/schedule/list.ts +5 -1
- package/src/tools/schedule/update.ts +6 -0
|
@@ -0,0 +1,229 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Job handler for `reduce_conversation_memory`.
|
|
3
|
+
*
|
|
4
|
+
* Ties together the reducer service ({@link runReducer}) and the transactional
|
|
5
|
+
* store ({@link applyReducerResult}) to process unreduced conversation turns
|
|
6
|
+
* as a background job.
|
|
7
|
+
*
|
|
8
|
+
* The handler:
|
|
9
|
+
* 1. Loads the conversation and validates the dirty tail marker.
|
|
10
|
+
* 2. Loads the unreduced message span (messages since the dirty tail).
|
|
11
|
+
* 3. Loads active time contexts and open loops for the conversation's scope.
|
|
12
|
+
* 4. Includes the current `contextSummary` when present (prepended as a
|
|
13
|
+
* synthetic system message so the reducer has compacted context).
|
|
14
|
+
* 5. Calls `runReducer` with the assembled input.
|
|
15
|
+
* 6. Applies the result transactionally via `applyReducerResult`.
|
|
16
|
+
*
|
|
17
|
+
* If the reducer fails or returns the {@link EMPTY_REDUCER_RESULT} sentinel
|
|
18
|
+
* (unparseable output), the checkpoint is NOT advanced — the dirty tail stays
|
|
19
|
+
* in place so the next run retries. A valid-but-empty model response (e.g.
|
|
20
|
+
* `{}`) returns a normal empty result that advances the checkpoint normally.
|
|
21
|
+
*/
|
|
22
|
+
|
|
23
|
+
import { and, asc, eq, gte } from "drizzle-orm";
|
|
24
|
+
|
|
25
|
+
import { getLogger } from "../../util/logger.js";
|
|
26
|
+
import { type ConversationRow, getConversation } from "../conversation-crud.js";
|
|
27
|
+
import { getDb } from "../db.js";
|
|
28
|
+
import { asString } from "../job-utils.js";
|
|
29
|
+
import type { MemoryJob } from "../jobs-store.js";
|
|
30
|
+
import { type ReducerPromptInput, runReducer } from "../reducer.js";
|
|
31
|
+
import {
|
|
32
|
+
applyReducerResult,
|
|
33
|
+
getActiveOpenLoops,
|
|
34
|
+
getActiveTimeContexts,
|
|
35
|
+
} from "../reducer-store.js";
|
|
36
|
+
import { EMPTY_REDUCER_RESULT } from "../reducer-types.js";
|
|
37
|
+
import { messages } from "../schema.js";
|
|
38
|
+
|
|
39
|
+
const log = getLogger("reduce-conversation-memory-job");
|
|
40
|
+
|
|
41
|
+
export interface ReduceConversationMemoryPayload {
|
|
42
|
+
conversationId: string;
|
|
43
|
+
}
|
|
44
|
+
|
|
45
|
+
/**
|
|
46
|
+
* Process a `reduce_conversation_memory` job.
|
|
47
|
+
*
|
|
48
|
+
* @throws Re-throws reducer errors so the job worker can classify and retry.
|
|
49
|
+
*/
|
|
50
|
+
export async function reduceConversationMemoryJob(
|
|
51
|
+
job: MemoryJob,
|
|
52
|
+
): Promise<void> {
|
|
53
|
+
const conversationId = asString(job.payload.conversationId);
|
|
54
|
+
if (!conversationId) {
|
|
55
|
+
log.warn({ jobId: job.id }, "Missing conversationId in job payload");
|
|
56
|
+
return;
|
|
57
|
+
}
|
|
58
|
+
|
|
59
|
+
// ── 1. Load conversation and validate dirty tail ────────────────
|
|
60
|
+
const conversation = getConversation(conversationId);
|
|
61
|
+
if (!conversation) {
|
|
62
|
+
log.warn(
|
|
63
|
+
{ jobId: job.id, conversationId },
|
|
64
|
+
"Conversation not found, skipping reduction",
|
|
65
|
+
);
|
|
66
|
+
return;
|
|
67
|
+
}
|
|
68
|
+
|
|
69
|
+
const dirtyTailMessageId = conversation.memoryDirtyTailSinceMessageId;
|
|
70
|
+
if (!dirtyTailMessageId) {
|
|
71
|
+
log.debug(
|
|
72
|
+
{ jobId: job.id, conversationId },
|
|
73
|
+
"No dirty tail marker — conversation is already fully reduced",
|
|
74
|
+
);
|
|
75
|
+
return;
|
|
76
|
+
}
|
|
77
|
+
|
|
78
|
+
// ── 2. Load unreduced message span ──────────────────────────────
|
|
79
|
+
const unreducedMessages = loadUnreducedMessages(
|
|
80
|
+
conversationId,
|
|
81
|
+
dirtyTailMessageId,
|
|
82
|
+
);
|
|
83
|
+
|
|
84
|
+
if (unreducedMessages.length === 0) {
|
|
85
|
+
log.debug(
|
|
86
|
+
{ jobId: job.id, conversationId, dirtyTailMessageId },
|
|
87
|
+
"No messages found from dirty tail — nothing to reduce",
|
|
88
|
+
);
|
|
89
|
+
return;
|
|
90
|
+
}
|
|
91
|
+
|
|
92
|
+
// ── 3. Load active brief-state context ──────────────────────────
|
|
93
|
+
const scopeId = conversation.memoryScopeId;
|
|
94
|
+
const now = Date.now();
|
|
95
|
+
|
|
96
|
+
const existingTimeContexts = getActiveTimeContexts(scopeId, now);
|
|
97
|
+
const existingOpenLoops = getActiveOpenLoops(scopeId);
|
|
98
|
+
|
|
99
|
+
// ── 4. Build reducer input ──────────────────────────────────────
|
|
100
|
+
const newMessages = buildNewMessages(conversation, unreducedMessages);
|
|
101
|
+
|
|
102
|
+
const reducerInput: ReducerPromptInput = {
|
|
103
|
+
conversationId,
|
|
104
|
+
newMessages,
|
|
105
|
+
existingTimeContexts: existingTimeContexts.map((tc) => ({
|
|
106
|
+
id: tc.id,
|
|
107
|
+
summary: tc.summary,
|
|
108
|
+
})),
|
|
109
|
+
existingOpenLoops: existingOpenLoops.map((ol) => ({
|
|
110
|
+
id: ol.id,
|
|
111
|
+
summary: ol.summary,
|
|
112
|
+
status: ol.status,
|
|
113
|
+
})),
|
|
114
|
+
nowMs: now,
|
|
115
|
+
scopeId,
|
|
116
|
+
};
|
|
117
|
+
|
|
118
|
+
// ── 5. Run the reducer ──────────────────────────────────────────
|
|
119
|
+
const result = await runReducer(reducerInput);
|
|
120
|
+
|
|
121
|
+
// If the reducer returns the empty sentinel, skip applying — the dirty
|
|
122
|
+
// tail stays in place so a future run can retry.
|
|
123
|
+
if (result === EMPTY_REDUCER_RESULT) {
|
|
124
|
+
log.warn(
|
|
125
|
+
{ jobId: job.id, conversationId },
|
|
126
|
+
"Reducer returned empty result — not advancing checkpoint",
|
|
127
|
+
);
|
|
128
|
+
return;
|
|
129
|
+
}
|
|
130
|
+
|
|
131
|
+
// ── 6. Apply result transactionally ─────────────────────────────
|
|
132
|
+
const lastMessage = unreducedMessages[unreducedMessages.length - 1];
|
|
133
|
+
applyReducerResult({
|
|
134
|
+
result,
|
|
135
|
+
conversationId,
|
|
136
|
+
scopeId,
|
|
137
|
+
reducedThroughMessageId: lastMessage.id,
|
|
138
|
+
now,
|
|
139
|
+
});
|
|
140
|
+
|
|
141
|
+
log.info(
|
|
142
|
+
{
|
|
143
|
+
jobId: job.id,
|
|
144
|
+
conversationId,
|
|
145
|
+
reducedThroughMessageId: lastMessage.id,
|
|
146
|
+
messageCount: unreducedMessages.length,
|
|
147
|
+
timeContextOps: result.timeContexts.length,
|
|
148
|
+
openLoopOps: result.openLoops.length,
|
|
149
|
+
},
|
|
150
|
+
"Conversation memory reduction completed",
|
|
151
|
+
);
|
|
152
|
+
}
|
|
153
|
+
|
|
154
|
+
// ── Internal helpers ────────────────────────────────────────────────
|
|
155
|
+
|
|
156
|
+
interface MessageRow {
|
|
157
|
+
id: string;
|
|
158
|
+
role: string;
|
|
159
|
+
content: string;
|
|
160
|
+
createdAt: number;
|
|
161
|
+
}
|
|
162
|
+
|
|
163
|
+
/**
|
|
164
|
+
* Load messages from `dirtyTailMessageId` onward (inclusive), ordered by
|
|
165
|
+
* createdAt ascending. Uses the message's createdAt as the boundary since
|
|
166
|
+
* message ordering is timestamp-based.
|
|
167
|
+
*/
|
|
168
|
+
function loadUnreducedMessages(
|
|
169
|
+
conversationId: string,
|
|
170
|
+
dirtyTailMessageId: string,
|
|
171
|
+
): MessageRow[] {
|
|
172
|
+
const db = getDb();
|
|
173
|
+
|
|
174
|
+
// First, find the createdAt of the dirty tail message
|
|
175
|
+
const tailMessage = db
|
|
176
|
+
.select({ createdAt: messages.createdAt })
|
|
177
|
+
.from(messages)
|
|
178
|
+
.where(eq(messages.id, dirtyTailMessageId))
|
|
179
|
+
.get();
|
|
180
|
+
|
|
181
|
+
if (!tailMessage) {
|
|
182
|
+
return [];
|
|
183
|
+
}
|
|
184
|
+
|
|
185
|
+
// Load all messages from that timestamp onward
|
|
186
|
+
return db
|
|
187
|
+
.select({
|
|
188
|
+
id: messages.id,
|
|
189
|
+
role: messages.role,
|
|
190
|
+
content: messages.content,
|
|
191
|
+
createdAt: messages.createdAt,
|
|
192
|
+
})
|
|
193
|
+
.from(messages)
|
|
194
|
+
.where(
|
|
195
|
+
and(
|
|
196
|
+
eq(messages.conversationId, conversationId),
|
|
197
|
+
gte(messages.createdAt, tailMessage.createdAt),
|
|
198
|
+
),
|
|
199
|
+
)
|
|
200
|
+
.orderBy(asc(messages.createdAt))
|
|
201
|
+
.all();
|
|
202
|
+
}
|
|
203
|
+
|
|
204
|
+
/**
|
|
205
|
+
* Build the `newMessages` array for the reducer input.
|
|
206
|
+
*
|
|
207
|
+
* When the conversation has a `contextSummary` (from context window
|
|
208
|
+
* compaction), it is prepended as a synthetic `system` message so the
|
|
209
|
+
* reducer has access to prior compacted context.
|
|
210
|
+
*/
|
|
211
|
+
function buildNewMessages(
|
|
212
|
+
conversation: ConversationRow,
|
|
213
|
+
unreducedMessages: MessageRow[],
|
|
214
|
+
): Array<{ role: string; content: string }> {
|
|
215
|
+
const result: Array<{ role: string; content: string }> = [];
|
|
216
|
+
|
|
217
|
+
if (conversation.contextSummary) {
|
|
218
|
+
result.push({
|
|
219
|
+
role: "system",
|
|
220
|
+
content: `[Prior context summary] ${conversation.contextSummary}`,
|
|
221
|
+
});
|
|
222
|
+
}
|
|
223
|
+
|
|
224
|
+
for (const msg of unreducedMessages) {
|
|
225
|
+
result.push({ role: msg.role, content: msg.content });
|
|
226
|
+
}
|
|
227
|
+
|
|
228
|
+
return result;
|
|
229
|
+
}
|
package/src/memory/jobs-store.ts
CHANGED
|
@@ -30,6 +30,8 @@ export type MemoryJobType =
|
|
|
30
30
|
| "embed_media"
|
|
31
31
|
| "embed_attachment"
|
|
32
32
|
| "generate_conversation_starters"
|
|
33
|
+
| "reduce_conversation_memory"
|
|
34
|
+
| "backfill_simplified_memory"
|
|
33
35
|
| "generate_capability_cards" // legacy compat — silently dropped by worker (capability cards removed)
|
|
34
36
|
| "generate_thread_starters"; // legacy compat — silently dropped by worker (renamed to generate_conversation_starters)
|
|
35
37
|
|
|
@@ -3,6 +3,7 @@ import type { AssistantConfig } from "../config/types.js";
|
|
|
3
3
|
import { getLogger } from "../util/logger.js";
|
|
4
4
|
import { rawRun } from "./db.js";
|
|
5
5
|
import { backfillJob } from "./job-handlers/backfill.js";
|
|
6
|
+
import { backfillSimplifiedMemoryJob } from "./job-handlers/backfill-simplified-memory.js";
|
|
6
7
|
import {
|
|
7
8
|
cleanupStaleSupersededItemsJob,
|
|
8
9
|
pruneOldConversationsJob,
|
|
@@ -25,6 +26,7 @@ import {
|
|
|
25
26
|
rebuildIndexJob,
|
|
26
27
|
} from "./job-handlers/index-maintenance.js";
|
|
27
28
|
import { mediaProcessingJob } from "./job-handlers/media-processing.js";
|
|
29
|
+
import { reduceConversationMemoryJob } from "./job-handlers/reduce-conversation-memory.js";
|
|
28
30
|
import { buildConversationSummaryJob } from "./job-handlers/summarization.js";
|
|
29
31
|
import {
|
|
30
32
|
BackendUnavailableError,
|
|
@@ -319,6 +321,12 @@ async function processJob(
|
|
|
319
321
|
case "embed_attachment":
|
|
320
322
|
await embedAttachmentJob(job, config);
|
|
321
323
|
return;
|
|
324
|
+
case "reduce_conversation_memory":
|
|
325
|
+
await reduceConversationMemoryJob(job);
|
|
326
|
+
return;
|
|
327
|
+
case "backfill_simplified_memory":
|
|
328
|
+
await backfillSimplifiedMemoryJob(job);
|
|
329
|
+
return;
|
|
322
330
|
case "generate_conversation_starters":
|
|
323
331
|
await generateConversationStartersJob(job);
|
|
324
332
|
return;
|
|
@@ -79,9 +79,27 @@ export function migrateNormalizePhoneIdentities(database: DrizzleDb): void {
|
|
|
79
79
|
.get(table);
|
|
80
80
|
const orderBy = hasUpdatedAt ? "updated_at DESC, rowid DESC" : "rowid DESC";
|
|
81
81
|
|
|
82
|
-
|
|
82
|
+
// Filter uniqueKeyScope to only include peer columns that actually exist in the table.
|
|
83
|
+
// If a peer column is missing, its unique index can't exist either, so no collision risk.
|
|
84
|
+
let effectiveScope = uniqueKeyScope;
|
|
83
85
|
if (uniqueKeyScope) {
|
|
84
|
-
|
|
86
|
+
const validPeers = uniqueKeyScope.peerColumns.filter(
|
|
87
|
+
(col) =>
|
|
88
|
+
!!raw
|
|
89
|
+
.query(`SELECT 1 FROM pragma_table_info(?) WHERE name = ?`)
|
|
90
|
+
.get(table, col),
|
|
91
|
+
);
|
|
92
|
+
effectiveScope =
|
|
93
|
+
validPeers.length === uniqueKeyScope.peerColumns.length
|
|
94
|
+
? uniqueKeyScope
|
|
95
|
+
: validPeers.length > 0
|
|
96
|
+
? { ...uniqueKeyScope, peerColumns: validPeers }
|
|
97
|
+
: undefined;
|
|
98
|
+
}
|
|
99
|
+
|
|
100
|
+
const selectColumns = [`id`, column];
|
|
101
|
+
if (effectiveScope) {
|
|
102
|
+
for (const peer of effectiveScope.peerColumns) {
|
|
85
103
|
if (!selectColumns.includes(peer)) selectColumns.push(peer);
|
|
86
104
|
}
|
|
87
105
|
}
|
|
@@ -104,14 +122,14 @@ export function migrateNormalizePhoneIdentities(database: DrizzleDb): void {
|
|
|
104
122
|
if (!original) continue;
|
|
105
123
|
const normalized = normalizePhoneNumber(original);
|
|
106
124
|
if (normalized && normalized !== original) {
|
|
107
|
-
if (
|
|
125
|
+
if (effectiveScope) {
|
|
108
126
|
// Check if another row already has the normalized value within the same unique-key scope
|
|
109
|
-
const peerConditions =
|
|
127
|
+
const peerConditions = effectiveScope.peerColumns
|
|
110
128
|
.map((col) => `${col} = ?`)
|
|
111
129
|
.join(" AND ");
|
|
112
|
-
const peerValues =
|
|
113
|
-
const whereExtra =
|
|
114
|
-
? ` AND (${
|
|
130
|
+
const peerValues = effectiveScope.peerColumns.map((col) => row[col]);
|
|
131
|
+
const whereExtra = effectiveScope.whereClause
|
|
132
|
+
? ` AND (${effectiveScope.whereClause})`
|
|
115
133
|
: "";
|
|
116
134
|
const existing = raw
|
|
117
135
|
.query(
|
|
@@ -154,9 +172,26 @@ export function migrateNormalizePhoneIdentities(database: DrizzleDb): void {
|
|
|
154
172
|
.get(table);
|
|
155
173
|
const orderBy = hasUpdatedAt ? "updated_at DESC, rowid DESC" : "rowid DESC";
|
|
156
174
|
|
|
157
|
-
|
|
175
|
+
// Filter uniqueKeyScope to only include peer columns that actually exist in the table.
|
|
176
|
+
let effectiveScope = uniqueKeyScope;
|
|
158
177
|
if (uniqueKeyScope) {
|
|
159
|
-
|
|
178
|
+
const validPeers = uniqueKeyScope.peerColumns.filter(
|
|
179
|
+
(col) =>
|
|
180
|
+
!!raw
|
|
181
|
+
.query(`SELECT 1 FROM pragma_table_info(?) WHERE name = ?`)
|
|
182
|
+
.get(table, col),
|
|
183
|
+
);
|
|
184
|
+
effectiveScope =
|
|
185
|
+
validPeers.length === uniqueKeyScope.peerColumns.length
|
|
186
|
+
? uniqueKeyScope
|
|
187
|
+
: validPeers.length > 0
|
|
188
|
+
? { ...uniqueKeyScope, peerColumns: validPeers }
|
|
189
|
+
: undefined;
|
|
190
|
+
}
|
|
191
|
+
|
|
192
|
+
const selectColumns = [`id`, column];
|
|
193
|
+
if (effectiveScope) {
|
|
194
|
+
for (const peer of effectiveScope.peerColumns) {
|
|
160
195
|
if (!selectColumns.includes(peer)) selectColumns.push(peer);
|
|
161
196
|
}
|
|
162
197
|
}
|
|
@@ -179,13 +214,13 @@ export function migrateNormalizePhoneIdentities(database: DrizzleDb): void {
|
|
|
179
214
|
if (!original) continue;
|
|
180
215
|
const normalized = normalizePhoneNumber(original);
|
|
181
216
|
if (normalized && normalized !== original) {
|
|
182
|
-
if (
|
|
183
|
-
const peerConditions =
|
|
217
|
+
if (effectiveScope) {
|
|
218
|
+
const peerConditions = effectiveScope.peerColumns
|
|
184
219
|
.map((col) => `${col} = ?`)
|
|
185
220
|
.join(" AND ");
|
|
186
|
-
const peerValues =
|
|
187
|
-
const whereExtra =
|
|
188
|
-
? ` AND (${
|
|
221
|
+
const peerValues = effectiveScope.peerColumns.map((col) => row[col]);
|
|
222
|
+
const whereExtra = effectiveScope.whereClause
|
|
223
|
+
? ` AND (${effectiveScope.whereClause})`
|
|
189
224
|
: "";
|
|
190
225
|
const existing = raw
|
|
191
226
|
.query(
|
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
import type
|
|
1
|
+
import { type DrizzleDb, getSqliteFrom } from "../db-connection.js";
|
|
2
2
|
import { withCrashRecovery } from "./validate-migration-state.js";
|
|
3
3
|
|
|
4
4
|
/**
|
|
@@ -7,6 +7,14 @@ import { withCrashRecovery } from "./validate-migration-state.js";
|
|
|
7
7
|
* existing data, so it stays at 0 and accumulates going forward.
|
|
8
8
|
*/
|
|
9
9
|
export function migrateBackfillContactInteractionStats(db: DrizzleDb): void {
|
|
10
|
+
const raw = getSqliteFrom(db);
|
|
11
|
+
const colExists = raw
|
|
12
|
+
.query(
|
|
13
|
+
`SELECT 1 FROM pragma_table_info('contacts') WHERE name = 'last_interaction'`,
|
|
14
|
+
)
|
|
15
|
+
.get();
|
|
16
|
+
if (!colExists) return;
|
|
17
|
+
|
|
10
18
|
withCrashRecovery(db, "backfill_contact_interaction_stats", () => {
|
|
11
19
|
db.run(/*sql*/ `
|
|
12
20
|
UPDATE contacts
|
|
@@ -18,6 +18,14 @@ export function migrateRenameVerificationTable(database: DrizzleDb): void {
|
|
|
18
18
|
.get();
|
|
19
19
|
if (!oldTableExists) return;
|
|
20
20
|
|
|
21
|
+
// If the new table already exists, the rename would collide — skip
|
|
22
|
+
const newTableExists = raw
|
|
23
|
+
.query(
|
|
24
|
+
`SELECT 1 FROM sqlite_master WHERE type = 'table' AND name = 'channel_verification_sessions'`,
|
|
25
|
+
)
|
|
26
|
+
.get();
|
|
27
|
+
if (newTableExists) return;
|
|
28
|
+
|
|
21
29
|
// Rename the physical table
|
|
22
30
|
raw.exec(
|
|
23
31
|
/*sql*/ `ALTER TABLE channel_guardian_verification_challenges RENAME TO channel_verification_sessions`,
|
|
@@ -15,14 +15,19 @@ export function migrateRenameVerificationSessionIdColumn(
|
|
|
15
15
|
() => {
|
|
16
16
|
const raw = getSqliteFrom(database);
|
|
17
17
|
|
|
18
|
-
// Check the old column exists before attempting the rename
|
|
18
|
+
// Check the old column exists and the new column doesn't before attempting the rename.
|
|
19
|
+
// Both checks are needed for crash recovery: if the rename succeeded but the checkpoint
|
|
20
|
+
// didn't commit, the old column is gone and the new one already exists.
|
|
19
21
|
const columns = raw
|
|
20
22
|
.query(`PRAGMA table_info(call_sessions)`)
|
|
21
23
|
.all() as Array<{ name: string }>;
|
|
22
24
|
const hasOldColumn = columns.some(
|
|
23
25
|
(c) => c.name === "guardian_verification_session_id",
|
|
24
26
|
);
|
|
25
|
-
|
|
27
|
+
const hasNewColumn = columns.some(
|
|
28
|
+
(c) => c.name === "verification_session_id",
|
|
29
|
+
);
|
|
30
|
+
if (!hasOldColumn || hasNewColumn) return;
|
|
26
31
|
|
|
27
32
|
raw.exec(
|
|
28
33
|
/*sql*/ `ALTER TABLE call_sessions RENAME COLUMN guardian_verification_session_id TO verification_session_id`,
|
|
@@ -21,6 +21,14 @@ export function migrateRenameThreadStartersTable(database: DrizzleDb): void {
|
|
|
21
21
|
.get();
|
|
22
22
|
if (!oldTableExists) return;
|
|
23
23
|
|
|
24
|
+
// If the new table already exists (crash recovery), skip the rename
|
|
25
|
+
const newTableExists = raw
|
|
26
|
+
.query(
|
|
27
|
+
`SELECT 1 FROM sqlite_master WHERE type = 'table' AND name = 'conversation_starters'`,
|
|
28
|
+
)
|
|
29
|
+
.get();
|
|
30
|
+
if (newTableExists) return;
|
|
31
|
+
|
|
24
32
|
// Rename the physical table
|
|
25
33
|
raw.exec(
|
|
26
34
|
/*sql*/ `ALTER TABLE thread_starters RENAME TO conversation_starters`,
|
|
@@ -0,0 +1,13 @@
|
|
|
1
|
+
import type { DrizzleDb } from "../db-connection.js";
|
|
2
|
+
import { getSqliteFrom } from "../db-connection.js";
|
|
3
|
+
|
|
4
|
+
export function migrateScheduleQuietFlag(database: DrizzleDb): void {
|
|
5
|
+
const raw = getSqliteFrom(database);
|
|
6
|
+
try {
|
|
7
|
+
raw.exec(
|
|
8
|
+
`ALTER TABLE cron_jobs ADD COLUMN quiet INTEGER NOT NULL DEFAULT 0`,
|
|
9
|
+
);
|
|
10
|
+
} catch {
|
|
11
|
+
// Column already exists — nothing to do.
|
|
12
|
+
}
|
|
13
|
+
}
|
|
@@ -129,6 +129,7 @@ export { migrateLlmRequestLogProvider } from "./184-llm-request-log-provider.js"
|
|
|
129
129
|
export { migrateMemoryBriefState } from "./185-memory-brief-state.js";
|
|
130
130
|
export { migrateMemoryArchiveTables } from "./186-memory-archive.js";
|
|
131
131
|
export { migrateMemoryReducerCheckpoints } from "./187-memory-reducer-checkpoints.js";
|
|
132
|
+
export { migrateScheduleQuietFlag } from "./188-schedule-quiet-flag.js";
|
|
132
133
|
export {
|
|
133
134
|
MIGRATION_REGISTRY,
|
|
134
135
|
type MigrationRegistryEntry,
|