family-ai-agent 1.0.6 → 1.0.7
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.letta/settings.local.json +3 -0
- package/dist/database/adapters/base-adapter.d.ts +81 -0
- package/dist/database/adapters/base-adapter.d.ts.map +1 -0
- package/dist/database/adapters/base-adapter.js +105 -0
- package/dist/database/adapters/base-adapter.js.map +1 -0
- package/dist/database/adapters/index.d.ts +49 -0
- package/dist/database/adapters/index.d.ts.map +1 -0
- package/dist/database/adapters/index.js +200 -0
- package/dist/database/adapters/index.js.map +1 -0
- package/dist/database/adapters/postgres-adapter.d.ts +75 -0
- package/dist/database/adapters/postgres-adapter.d.ts.map +1 -0
- package/dist/database/adapters/postgres-adapter.js +225 -0
- package/dist/database/adapters/postgres-adapter.js.map +1 -0
- package/dist/database/adapters/sqlite-adapter.d.ts +72 -0
- package/dist/database/adapters/sqlite-adapter.d.ts.map +1 -0
- package/dist/database/adapters/sqlite-adapter.js +368 -0
- package/dist/database/adapters/sqlite-adapter.js.map +1 -0
- package/dist/database/cache/cache-keys.d.ts +180 -0
- package/dist/database/cache/cache-keys.d.ts.map +1 -0
- package/dist/database/cache/cache-keys.js +107 -0
- package/dist/database/cache/cache-keys.js.map +1 -0
- package/dist/database/cache/index.d.ts +24 -0
- package/dist/database/cache/index.d.ts.map +1 -0
- package/dist/database/cache/index.js +34 -0
- package/dist/database/cache/index.js.map +1 -0
- package/dist/database/cache/query-cache.d.ts +67 -0
- package/dist/database/cache/query-cache.d.ts.map +1 -0
- package/dist/database/cache/query-cache.js +177 -0
- package/dist/database/cache/query-cache.js.map +1 -0
- package/dist/database/client.d.ts +63 -4
- package/dist/database/client.d.ts.map +1 -1
- package/dist/database/client.js +147 -59
- package/dist/database/client.js.map +1 -1
- package/dist/database/db-config.d.ts +104 -0
- package/dist/database/db-config.d.ts.map +1 -0
- package/dist/database/db-config.js +167 -0
- package/dist/database/db-config.js.map +1 -0
- package/dist/database/drizzle/index.d.ts +42 -0
- package/dist/database/drizzle/index.d.ts.map +1 -0
- package/dist/database/drizzle/index.js +48 -0
- package/dist/database/drizzle/index.js.map +1 -0
- package/dist/database/drizzle/schema/audit.d.ts +533 -0
- package/dist/database/drizzle/schema/audit.d.ts.map +1 -0
- package/dist/database/drizzle/schema/audit.js +71 -0
- package/dist/database/drizzle/schema/audit.js.map +1 -0
- package/dist/database/drizzle/schema/checkpoints.d.ts +665 -0
- package/dist/database/drizzle/schema/checkpoints.d.ts.map +1 -0
- package/dist/database/drizzle/schema/checkpoints.js +110 -0
- package/dist/database/drizzle/schema/checkpoints.js.map +1 -0
- package/dist/database/drizzle/schema/conversations.d.ts +449 -0
- package/dist/database/drizzle/schema/conversations.d.ts.map +1 -0
- package/dist/database/drizzle/schema/conversations.js +91 -0
- package/dist/database/drizzle/schema/conversations.js.map +1 -0
- package/dist/database/drizzle/schema/documents.d.ts +600 -0
- package/dist/database/drizzle/schema/documents.d.ts.map +1 -0
- package/dist/database/drizzle/schema/documents.js +100 -0
- package/dist/database/drizzle/schema/documents.js.map +1 -0
- package/dist/database/drizzle/schema/index.d.ts +3084 -0
- package/dist/database/drizzle/schema/index.d.ts.map +1 -0
- package/dist/database/drizzle/schema/index.js +46 -0
- package/dist/database/drizzle/schema/index.js.map +1 -0
- package/dist/database/drizzle/schema/memories.d.ts +435 -0
- package/dist/database/drizzle/schema/memories.d.ts.map +1 -0
- package/dist/database/drizzle/schema/memories.js +73 -0
- package/dist/database/drizzle/schema/memories.js.map +1 -0
- package/dist/database/drizzle/schema/tasks.d.ts +565 -0
- package/dist/database/drizzle/schema/tasks.d.ts.map +1 -0
- package/dist/database/drizzle/schema/tasks.js +84 -0
- package/dist/database/drizzle/schema/tasks.js.map +1 -0
- package/dist/database/health/circuit-breaker.d.ts +81 -0
- package/dist/database/health/circuit-breaker.d.ts.map +1 -0
- package/dist/database/health/circuit-breaker.js +184 -0
- package/dist/database/health/circuit-breaker.js.map +1 -0
- package/dist/database/health/health-monitor.d.ts +69 -0
- package/dist/database/health/health-monitor.d.ts.map +1 -0
- package/dist/database/health/health-monitor.js +174 -0
- package/dist/database/health/health-monitor.js.map +1 -0
- package/dist/database/health/index.d.ts +27 -0
- package/dist/database/health/index.d.ts.map +1 -0
- package/dist/database/health/index.js +23 -0
- package/dist/database/health/index.js.map +1 -0
- package/dist/database/index.d.ts +16 -0
- package/dist/database/index.d.ts.map +1 -0
- package/dist/database/index.js +41 -0
- package/dist/database/index.js.map +1 -0
- package/dist/database/migrations/index.d.ts +34 -0
- package/dist/database/migrations/index.d.ts.map +1 -0
- package/dist/database/migrations/index.js +45 -0
- package/dist/database/migrations/index.js.map +1 -0
- package/dist/database/migrations/migrator.d.ts +77 -0
- package/dist/database/migrations/migrator.d.ts.map +1 -0
- package/dist/database/migrations/migrator.js +258 -0
- package/dist/database/migrations/migrator.js.map +1 -0
- package/dist/database/migrations/versions/001_initial.d.ts +9 -0
- package/dist/database/migrations/versions/001_initial.d.ts.map +1 -0
- package/dist/database/migrations/versions/001_initial.js +183 -0
- package/dist/database/migrations/versions/001_initial.js.map +1 -0
- package/dist/database/types.d.ts +255 -0
- package/dist/database/types.d.ts.map +1 -0
- package/dist/database/types.js +8 -0
- package/dist/database/types.js.map +1 -0
- package/dist/database/vector/embedding-cache.d.ts +92 -0
- package/dist/database/vector/embedding-cache.d.ts.map +1 -0
- package/dist/database/vector/embedding-cache.js +185 -0
- package/dist/database/vector/embedding-cache.js.map +1 -0
- package/dist/database/vector/hnsw-index.d.ts +111 -0
- package/dist/database/vector/hnsw-index.d.ts.map +1 -0
- package/dist/database/vector/hnsw-index.js +337 -0
- package/dist/database/vector/hnsw-index.js.map +1 -0
- package/dist/database/vector/index.d.ts +75 -0
- package/dist/database/vector/index.d.ts.map +1 -0
- package/dist/database/vector/index.js +213 -0
- package/dist/database/vector/index.js.map +1 -0
- package/dist/database/vector/similarity.d.ts +67 -0
- package/dist/database/vector/similarity.d.ts.map +1 -0
- package/dist/database/vector/similarity.js +176 -0
- package/dist/database/vector/similarity.js.map +1 -0
- package/package.json +6 -3
- package/src/database/adapters/base-adapter.ts +171 -0
- package/src/database/adapters/index.ts +224 -0
- package/src/database/adapters/postgres-adapter.ts +285 -0
- package/src/database/adapters/sqlite-adapter.ts +420 -0
- package/src/database/cache/cache-keys.ts +150 -0
- package/src/database/cache/index.ts +44 -0
- package/src/database/cache/query-cache.ts +213 -0
- package/src/database/client.ts +166 -64
- package/src/database/db-config.ts +194 -0
- package/src/database/drizzle/index.ts +66 -0
- package/src/database/drizzle/schema/audit.ts +127 -0
- package/src/database/drizzle/schema/checkpoints.ts +164 -0
- package/src/database/drizzle/schema/conversations.ts +138 -0
- package/src/database/drizzle/schema/documents.ts +157 -0
- package/src/database/drizzle/schema/index.ts +139 -0
- package/src/database/drizzle/schema/memories.ts +127 -0
- package/src/database/drizzle/schema/tasks.ts +129 -0
- package/src/database/health/circuit-breaker.ts +214 -0
- package/src/database/health/health-monitor.ts +224 -0
- package/src/database/health/index.ts +41 -0
- package/src/database/index.ts +157 -0
- package/src/database/migrations/index.ts +52 -0
- package/src/database/migrations/migrator.ts +325 -0
- package/src/database/migrations/versions/001_initial.ts +198 -0
- package/src/database/types.ts +324 -0
- package/src/database/vector/embedding-cache.ts +234 -0
- package/src/database/vector/hnsw-index.ts +452 -0
- package/src/database/vector/index.ts +292 -0
- package/src/database/vector/similarity.ts +198 -0
|
@@ -0,0 +1,325 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Database Migrator
|
|
3
|
+
*
|
|
4
|
+
* Handles automatic schema migrations with version tracking.
|
|
5
|
+
* Supports both PostgreSQL and SQLite.
|
|
6
|
+
*/
|
|
7
|
+
|
|
8
|
+
import type { DatabaseAdapter, Migration, MigrationResult } from '../types.js';
|
|
9
|
+
import { createLogger } from '../../utils/logger.js';
|
|
10
|
+
import { MemoryError } from '../../utils/errors.js';
|
|
11
|
+
|
|
12
|
+
const logger = createLogger('Migrator');
|
|
13
|
+
|
|
14
|
+
/**
|
|
15
|
+
* Schema version record
|
|
16
|
+
*/
|
|
17
|
+
interface SchemaVersion {
|
|
18
|
+
version: number;
|
|
19
|
+
name: string;
|
|
20
|
+
applied_at: number | Date;
|
|
21
|
+
checksum: string | null;
|
|
22
|
+
}
|
|
23
|
+
|
|
24
|
+
/**
|
|
25
|
+
* Database migrator for schema version management
|
|
26
|
+
*/
|
|
27
|
+
export class Migrator {
|
|
28
|
+
private migrations: Migration[] = [];
|
|
29
|
+
private adapter: DatabaseAdapter;
|
|
30
|
+
|
|
31
|
+
constructor(adapter: DatabaseAdapter) {
|
|
32
|
+
this.adapter = adapter;
|
|
33
|
+
}
|
|
34
|
+
|
|
35
|
+
/**
|
|
36
|
+
* Register a migration
|
|
37
|
+
*/
|
|
38
|
+
register(migration: Migration): void {
|
|
39
|
+
// Check for duplicate versions
|
|
40
|
+
if (this.migrations.some((m) => m.version === migration.version)) {
|
|
41
|
+
throw new MemoryError(`Duplicate migration version: ${migration.version}`);
|
|
42
|
+
}
|
|
43
|
+
|
|
44
|
+
this.migrations.push(migration);
|
|
45
|
+
this.migrations.sort((a, b) => a.version - b.version);
|
|
46
|
+
}
|
|
47
|
+
|
|
48
|
+
/**
|
|
49
|
+
* Register multiple migrations
|
|
50
|
+
*/
|
|
51
|
+
registerAll(migrations: Migration[]): void {
|
|
52
|
+
for (const migration of migrations) {
|
|
53
|
+
this.register(migration);
|
|
54
|
+
}
|
|
55
|
+
}
|
|
56
|
+
|
|
57
|
+
/**
|
|
58
|
+
* Ensure schema_versions table exists
|
|
59
|
+
*/
|
|
60
|
+
private async ensureVersionTable(): Promise<void> {
|
|
61
|
+
const sql = `
|
|
62
|
+
CREATE TABLE IF NOT EXISTS schema_versions (
|
|
63
|
+
version INTEGER PRIMARY KEY,
|
|
64
|
+
name TEXT NOT NULL,
|
|
65
|
+
applied_at ${this.adapter.type === 'postgresql' ? 'TIMESTAMP WITH TIME ZONE' : 'INTEGER'} NOT NULL,
|
|
66
|
+
checksum TEXT
|
|
67
|
+
)
|
|
68
|
+
`;
|
|
69
|
+
await this.adapter.query(sql);
|
|
70
|
+
}
|
|
71
|
+
|
|
72
|
+
/**
|
|
73
|
+
* Get current schema version
|
|
74
|
+
*/
|
|
75
|
+
async getCurrentVersion(): Promise<number> {
|
|
76
|
+
try {
|
|
77
|
+
await this.ensureVersionTable();
|
|
78
|
+
const result = await this.adapter.query<{ max_version: number | null }>(
|
|
79
|
+
'SELECT MAX(version) as max_version FROM schema_versions'
|
|
80
|
+
);
|
|
81
|
+
return result.rows[0]?.max_version ?? 0;
|
|
82
|
+
} catch (error) {
|
|
83
|
+
logger.debug('Could not get current version, assuming 0', { error });
|
|
84
|
+
return 0;
|
|
85
|
+
}
|
|
86
|
+
}
|
|
87
|
+
|
|
88
|
+
/**
|
|
89
|
+
* Get all applied migrations
|
|
90
|
+
*/
|
|
91
|
+
async getAppliedMigrations(): Promise<SchemaVersion[]> {
|
|
92
|
+
await this.ensureVersionTable();
|
|
93
|
+
const result = await this.adapter.query<SchemaVersion>(
|
|
94
|
+
'SELECT version, name, applied_at, checksum FROM schema_versions ORDER BY version ASC'
|
|
95
|
+
);
|
|
96
|
+
return result.rows;
|
|
97
|
+
}
|
|
98
|
+
|
|
99
|
+
/**
|
|
100
|
+
* Get pending migrations
|
|
101
|
+
*/
|
|
102
|
+
async getPendingMigrations(): Promise<Migration[]> {
|
|
103
|
+
const currentVersion = await this.getCurrentVersion();
|
|
104
|
+
return this.migrations.filter((m) => m.version > currentVersion);
|
|
105
|
+
}
|
|
106
|
+
|
|
107
|
+
/**
|
|
108
|
+
* Run all pending migrations
|
|
109
|
+
*/
|
|
110
|
+
async migrate(): Promise<MigrationResult[]> {
|
|
111
|
+
const pending = await this.getPendingMigrations();
|
|
112
|
+
const results: MigrationResult[] = [];
|
|
113
|
+
|
|
114
|
+
if (pending.length === 0) {
|
|
115
|
+
logger.debug('No pending migrations');
|
|
116
|
+
return results;
|
|
117
|
+
}
|
|
118
|
+
|
|
119
|
+
logger.info('Running migrations', { count: pending.length });
|
|
120
|
+
|
|
121
|
+
for (const migration of pending) {
|
|
122
|
+
const result = await this.runMigration(migration);
|
|
123
|
+
results.push(result);
|
|
124
|
+
|
|
125
|
+
if (!result.success) {
|
|
126
|
+
logger.error('Migration failed, stopping', { version: migration.version });
|
|
127
|
+
break;
|
|
128
|
+
}
|
|
129
|
+
}
|
|
130
|
+
|
|
131
|
+
return results;
|
|
132
|
+
}
|
|
133
|
+
|
|
134
|
+
/**
|
|
135
|
+
* Run a single migration
|
|
136
|
+
*/
|
|
137
|
+
private async runMigration(migration: Migration): Promise<MigrationResult> {
|
|
138
|
+
const start = Date.now();
|
|
139
|
+
|
|
140
|
+
logger.info('Running migration', {
|
|
141
|
+
version: migration.version,
|
|
142
|
+
name: migration.name,
|
|
143
|
+
});
|
|
144
|
+
|
|
145
|
+
try {
|
|
146
|
+
await this.adapter.transaction(async (client) => {
|
|
147
|
+
// Run the migration
|
|
148
|
+
await migration.up(this.adapter);
|
|
149
|
+
|
|
150
|
+
// Record the migration
|
|
151
|
+
const now = this.adapter.type === 'postgresql'
|
|
152
|
+
? 'NOW()'
|
|
153
|
+
: String(Date.now());
|
|
154
|
+
|
|
155
|
+
await this.adapter.query(
|
|
156
|
+
`INSERT INTO schema_versions (version, name, applied_at, checksum) VALUES ($1, $2, ${
|
|
157
|
+
this.adapter.type === 'postgresql' ? 'NOW()' : '$3'
|
|
158
|
+
}, $${this.adapter.type === 'postgresql' ? '3' : '4'})`,
|
|
159
|
+
this.adapter.type === 'postgresql'
|
|
160
|
+
? [migration.version, migration.name, this.computeChecksum(migration)]
|
|
161
|
+
: [migration.version, migration.name, Date.now(), this.computeChecksum(migration)]
|
|
162
|
+
);
|
|
163
|
+
});
|
|
164
|
+
|
|
165
|
+
const durationMs = Date.now() - start;
|
|
166
|
+
logger.info('Migration completed', {
|
|
167
|
+
version: migration.version,
|
|
168
|
+
durationMs,
|
|
169
|
+
});
|
|
170
|
+
|
|
171
|
+
return {
|
|
172
|
+
version: migration.version,
|
|
173
|
+
name: migration.name,
|
|
174
|
+
success: true,
|
|
175
|
+
durationMs,
|
|
176
|
+
};
|
|
177
|
+
} catch (error) {
|
|
178
|
+
const durationMs = Date.now() - start;
|
|
179
|
+
const errorMessage = error instanceof Error ? error.message : 'Unknown error';
|
|
180
|
+
|
|
181
|
+
logger.error('Migration failed', {
|
|
182
|
+
version: migration.version,
|
|
183
|
+
error: errorMessage,
|
|
184
|
+
});
|
|
185
|
+
|
|
186
|
+
return {
|
|
187
|
+
version: migration.version,
|
|
188
|
+
name: migration.name,
|
|
189
|
+
success: false,
|
|
190
|
+
error: errorMessage,
|
|
191
|
+
durationMs,
|
|
192
|
+
};
|
|
193
|
+
}
|
|
194
|
+
}
|
|
195
|
+
|
|
196
|
+
/**
|
|
197
|
+
* Rollback to a specific version
|
|
198
|
+
*/
|
|
199
|
+
async rollback(targetVersion: number): Promise<MigrationResult[]> {
|
|
200
|
+
const currentVersion = await this.getCurrentVersion();
|
|
201
|
+
const results: MigrationResult[] = [];
|
|
202
|
+
|
|
203
|
+
if (targetVersion >= currentVersion) {
|
|
204
|
+
logger.debug('Nothing to rollback');
|
|
205
|
+
return results;
|
|
206
|
+
}
|
|
207
|
+
|
|
208
|
+
// Get migrations to rollback (in reverse order)
|
|
209
|
+
const toRollback = this.migrations
|
|
210
|
+
.filter((m) => m.version > targetVersion && m.version <= currentVersion)
|
|
211
|
+
.reverse();
|
|
212
|
+
|
|
213
|
+
logger.info('Rolling back migrations', { count: toRollback.length });
|
|
214
|
+
|
|
215
|
+
for (const migration of toRollback) {
|
|
216
|
+
const result = await this.runRollback(migration);
|
|
217
|
+
results.push(result);
|
|
218
|
+
|
|
219
|
+
if (!result.success) {
|
|
220
|
+
logger.error('Rollback failed, stopping', { version: migration.version });
|
|
221
|
+
break;
|
|
222
|
+
}
|
|
223
|
+
}
|
|
224
|
+
|
|
225
|
+
return results;
|
|
226
|
+
}
|
|
227
|
+
|
|
228
|
+
/**
|
|
229
|
+
* Run a single rollback
|
|
230
|
+
*/
|
|
231
|
+
private async runRollback(migration: Migration): Promise<MigrationResult> {
|
|
232
|
+
const start = Date.now();
|
|
233
|
+
|
|
234
|
+
logger.info('Rolling back migration', {
|
|
235
|
+
version: migration.version,
|
|
236
|
+
name: migration.name,
|
|
237
|
+
});
|
|
238
|
+
|
|
239
|
+
try {
|
|
240
|
+
await this.adapter.transaction(async () => {
|
|
241
|
+
// Run the down migration
|
|
242
|
+
await migration.down(this.adapter);
|
|
243
|
+
|
|
244
|
+
// Remove the migration record
|
|
245
|
+
await this.adapter.query(
|
|
246
|
+
'DELETE FROM schema_versions WHERE version = $1',
|
|
247
|
+
[migration.version]
|
|
248
|
+
);
|
|
249
|
+
});
|
|
250
|
+
|
|
251
|
+
const durationMs = Date.now() - start;
|
|
252
|
+
logger.info('Rollback completed', {
|
|
253
|
+
version: migration.version,
|
|
254
|
+
durationMs,
|
|
255
|
+
});
|
|
256
|
+
|
|
257
|
+
return {
|
|
258
|
+
version: migration.version,
|
|
259
|
+
name: migration.name,
|
|
260
|
+
success: true,
|
|
261
|
+
durationMs,
|
|
262
|
+
};
|
|
263
|
+
} catch (error) {
|
|
264
|
+
const durationMs = Date.now() - start;
|
|
265
|
+
const errorMessage = error instanceof Error ? error.message : 'Unknown error';
|
|
266
|
+
|
|
267
|
+
logger.error('Rollback failed', {
|
|
268
|
+
version: migration.version,
|
|
269
|
+
error: errorMessage,
|
|
270
|
+
});
|
|
271
|
+
|
|
272
|
+
return {
|
|
273
|
+
version: migration.version,
|
|
274
|
+
name: migration.name,
|
|
275
|
+
success: false,
|
|
276
|
+
error: errorMessage,
|
|
277
|
+
durationMs,
|
|
278
|
+
};
|
|
279
|
+
}
|
|
280
|
+
}
|
|
281
|
+
|
|
282
|
+
/**
|
|
283
|
+
* Compute checksum for a migration
|
|
284
|
+
*/
|
|
285
|
+
private computeChecksum(migration: Migration): string {
|
|
286
|
+
// Simple checksum based on function string representation
|
|
287
|
+
const str = migration.up.toString() + migration.down.toString();
|
|
288
|
+
let hash = 0;
|
|
289
|
+
for (let i = 0; i < str.length; i++) {
|
|
290
|
+
const char = str.charCodeAt(i);
|
|
291
|
+
hash = ((hash << 5) - hash) + char;
|
|
292
|
+
hash = hash & hash; // Convert to 32bit integer
|
|
293
|
+
}
|
|
294
|
+
return hash.toString(16);
|
|
295
|
+
}
|
|
296
|
+
|
|
297
|
+
/**
|
|
298
|
+
* Verify migration integrity
|
|
299
|
+
*/
|
|
300
|
+
async verify(): Promise<{ valid: boolean; issues: string[] }> {
|
|
301
|
+
const applied = await this.getAppliedMigrations();
|
|
302
|
+
const issues: string[] = [];
|
|
303
|
+
|
|
304
|
+
for (const record of applied) {
|
|
305
|
+
const migration = this.migrations.find((m) => m.version === record.version);
|
|
306
|
+
|
|
307
|
+
if (!migration) {
|
|
308
|
+
issues.push(`Applied migration v${record.version} not found in registered migrations`);
|
|
309
|
+
continue;
|
|
310
|
+
}
|
|
311
|
+
|
|
312
|
+
const currentChecksum = this.computeChecksum(migration);
|
|
313
|
+
if (record.checksum && record.checksum !== currentChecksum) {
|
|
314
|
+
issues.push(`Migration v${record.version} checksum mismatch (code may have changed)`);
|
|
315
|
+
}
|
|
316
|
+
}
|
|
317
|
+
|
|
318
|
+
return {
|
|
319
|
+
valid: issues.length === 0,
|
|
320
|
+
issues,
|
|
321
|
+
};
|
|
322
|
+
}
|
|
323
|
+
}
|
|
324
|
+
|
|
325
|
+
export default Migrator;
|
|
@@ -0,0 +1,198 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Migration 001: Initial Schema
|
|
3
|
+
*
|
|
4
|
+
* Creates all core tables for the Family AI Agent system.
|
|
5
|
+
*/
|
|
6
|
+
|
|
7
|
+
import type { DatabaseAdapter, Migration } from '../../types.js';
|
|
8
|
+
|
|
9
|
+
export const migration001Initial: Migration = {
|
|
10
|
+
version: 1,
|
|
11
|
+
name: 'initial_schema',
|
|
12
|
+
|
|
13
|
+
async up(adapter: DatabaseAdapter): Promise<void> {
|
|
14
|
+
if (adapter.type === 'postgresql') {
|
|
15
|
+
// Enable pgvector extension
|
|
16
|
+
await adapter.query('CREATE EXTENSION IF NOT EXISTS vector');
|
|
17
|
+
}
|
|
18
|
+
|
|
19
|
+
// Note: For SQLite, tables are created in sqlite-adapter.ts initialize()
|
|
20
|
+
// This migration is primarily for PostgreSQL to ensure pgvector is enabled
|
|
21
|
+
|
|
22
|
+
if (adapter.type === 'postgresql') {
|
|
23
|
+
// Create conversations table
|
|
24
|
+
await adapter.query(`
|
|
25
|
+
CREATE TABLE IF NOT EXISTS conversations (
|
|
26
|
+
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
|
27
|
+
thread_id TEXT NOT NULL,
|
|
28
|
+
user_id TEXT,
|
|
29
|
+
created_at TIMESTAMP WITH TIME ZONE DEFAULT NOW() NOT NULL,
|
|
30
|
+
updated_at TIMESTAMP WITH TIME ZONE DEFAULT NOW() NOT NULL
|
|
31
|
+
)
|
|
32
|
+
`);
|
|
33
|
+
await adapter.query('CREATE INDEX IF NOT EXISTS idx_conversations_thread ON conversations(thread_id)');
|
|
34
|
+
await adapter.query('CREATE INDEX IF NOT EXISTS idx_conversations_user ON conversations(user_id)');
|
|
35
|
+
|
|
36
|
+
// Create messages table
|
|
37
|
+
await adapter.query(`
|
|
38
|
+
CREATE TABLE IF NOT EXISTS messages (
|
|
39
|
+
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
|
40
|
+
conversation_id UUID NOT NULL REFERENCES conversations(id) ON DELETE CASCADE,
|
|
41
|
+
role TEXT NOT NULL,
|
|
42
|
+
content TEXT NOT NULL,
|
|
43
|
+
metadata JSONB DEFAULT '{}' NOT NULL,
|
|
44
|
+
created_at TIMESTAMP WITH TIME ZONE DEFAULT NOW() NOT NULL
|
|
45
|
+
)
|
|
46
|
+
`);
|
|
47
|
+
await adapter.query('CREATE INDEX IF NOT EXISTS idx_messages_conversation ON messages(conversation_id)');
|
|
48
|
+
|
|
49
|
+
// Create long_term_memories table with pgvector
|
|
50
|
+
await adapter.query(`
|
|
51
|
+
CREATE TABLE IF NOT EXISTS long_term_memories (
|
|
52
|
+
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
|
53
|
+
user_id TEXT,
|
|
54
|
+
memory_type TEXT NOT NULL,
|
|
55
|
+
content TEXT NOT NULL,
|
|
56
|
+
embedding vector(1536) NOT NULL,
|
|
57
|
+
importance REAL DEFAULT 0.5 NOT NULL,
|
|
58
|
+
access_count INTEGER DEFAULT 0 NOT NULL,
|
|
59
|
+
last_accessed TIMESTAMP WITH TIME ZONE,
|
|
60
|
+
metadata JSONB DEFAULT '{}' NOT NULL,
|
|
61
|
+
created_at TIMESTAMP WITH TIME ZONE DEFAULT NOW() NOT NULL,
|
|
62
|
+
updated_at TIMESTAMP WITH TIME ZONE DEFAULT NOW() NOT NULL
|
|
63
|
+
)
|
|
64
|
+
`);
|
|
65
|
+
await adapter.query('CREATE INDEX IF NOT EXISTS idx_memories_user ON long_term_memories(user_id)');
|
|
66
|
+
await adapter.query('CREATE INDEX IF NOT EXISTS idx_memories_type ON long_term_memories(memory_type)');
|
|
67
|
+
await adapter.query(`
|
|
68
|
+
CREATE INDEX IF NOT EXISTS idx_memories_embedding
|
|
69
|
+
ON long_term_memories USING hnsw (embedding vector_cosine_ops)
|
|
70
|
+
`);
|
|
71
|
+
|
|
72
|
+
// Create documents table
|
|
73
|
+
await adapter.query(`
|
|
74
|
+
CREATE TABLE IF NOT EXISTS documents (
|
|
75
|
+
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
|
76
|
+
user_id TEXT,
|
|
77
|
+
filename TEXT NOT NULL,
|
|
78
|
+
file_type TEXT,
|
|
79
|
+
file_size INTEGER,
|
|
80
|
+
content TEXT,
|
|
81
|
+
metadata JSONB DEFAULT '{}' NOT NULL,
|
|
82
|
+
created_at TIMESTAMP WITH TIME ZONE DEFAULT NOW() NOT NULL
|
|
83
|
+
)
|
|
84
|
+
`);
|
|
85
|
+
await adapter.query('CREATE INDEX IF NOT EXISTS idx_documents_user ON documents(user_id)');
|
|
86
|
+
|
|
87
|
+
// Create document_chunks table with pgvector
|
|
88
|
+
await adapter.query(`
|
|
89
|
+
CREATE TABLE IF NOT EXISTS document_chunks (
|
|
90
|
+
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
|
91
|
+
document_id UUID NOT NULL REFERENCES documents(id) ON DELETE CASCADE,
|
|
92
|
+
chunk_index INTEGER NOT NULL,
|
|
93
|
+
content TEXT NOT NULL,
|
|
94
|
+
embedding vector(1536) NOT NULL,
|
|
95
|
+
metadata JSONB DEFAULT '{}' NOT NULL,
|
|
96
|
+
created_at TIMESTAMP WITH TIME ZONE DEFAULT NOW() NOT NULL
|
|
97
|
+
)
|
|
98
|
+
`);
|
|
99
|
+
await adapter.query('CREATE INDEX IF NOT EXISTS idx_chunks_document ON document_chunks(document_id)');
|
|
100
|
+
await adapter.query(`
|
|
101
|
+
CREATE INDEX IF NOT EXISTS idx_chunks_embedding
|
|
102
|
+
ON document_chunks USING hnsw (embedding vector_cosine_ops)
|
|
103
|
+
`);
|
|
104
|
+
|
|
105
|
+
// Create checkpoints table
|
|
106
|
+
await adapter.query(`
|
|
107
|
+
CREATE TABLE IF NOT EXISTS checkpoints (
|
|
108
|
+
thread_id TEXT NOT NULL,
|
|
109
|
+
checkpoint_ns TEXT NOT NULL DEFAULT '',
|
|
110
|
+
checkpoint_id TEXT NOT NULL,
|
|
111
|
+
parent_checkpoint_id TEXT,
|
|
112
|
+
type TEXT,
|
|
113
|
+
checkpoint JSONB NOT NULL,
|
|
114
|
+
metadata JSONB DEFAULT '{}' NOT NULL,
|
|
115
|
+
created_at TIMESTAMP WITH TIME ZONE DEFAULT NOW() NOT NULL,
|
|
116
|
+
PRIMARY KEY (thread_id, checkpoint_ns, checkpoint_id)
|
|
117
|
+
)
|
|
118
|
+
`);
|
|
119
|
+
|
|
120
|
+
// Create checkpoint_writes table
|
|
121
|
+
await adapter.query(`
|
|
122
|
+
CREATE TABLE IF NOT EXISTS checkpoint_writes (
|
|
123
|
+
thread_id TEXT NOT NULL,
|
|
124
|
+
checkpoint_ns TEXT NOT NULL DEFAULT '',
|
|
125
|
+
checkpoint_id TEXT NOT NULL,
|
|
126
|
+
task_id TEXT NOT NULL,
|
|
127
|
+
idx INTEGER NOT NULL,
|
|
128
|
+
channel TEXT NOT NULL,
|
|
129
|
+
type TEXT,
|
|
130
|
+
value JSONB,
|
|
131
|
+
created_at TIMESTAMP WITH TIME ZONE DEFAULT NOW() NOT NULL,
|
|
132
|
+
PRIMARY KEY (thread_id, checkpoint_ns, checkpoint_id, task_id, idx)
|
|
133
|
+
)
|
|
134
|
+
`);
|
|
135
|
+
|
|
136
|
+
// Create audit_logs table
|
|
137
|
+
await adapter.query(`
|
|
138
|
+
CREATE TABLE IF NOT EXISTS audit_logs (
|
|
139
|
+
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
|
140
|
+
user_id TEXT,
|
|
141
|
+
agent_id TEXT,
|
|
142
|
+
action_type TEXT NOT NULL,
|
|
143
|
+
action_details JSONB DEFAULT '{}' NOT NULL,
|
|
144
|
+
input_hash TEXT,
|
|
145
|
+
output_hash TEXT,
|
|
146
|
+
status TEXT DEFAULT 'success' NOT NULL,
|
|
147
|
+
error_message TEXT,
|
|
148
|
+
execution_time_ms INTEGER,
|
|
149
|
+
ip_address TEXT,
|
|
150
|
+
user_agent TEXT,
|
|
151
|
+
success BOOLEAN DEFAULT true NOT NULL,
|
|
152
|
+
created_at TIMESTAMP WITH TIME ZONE DEFAULT NOW() NOT NULL
|
|
153
|
+
)
|
|
154
|
+
`);
|
|
155
|
+
await adapter.query('CREATE INDEX IF NOT EXISTS idx_audit_user ON audit_logs(user_id)');
|
|
156
|
+
await adapter.query('CREATE INDEX IF NOT EXISTS idx_audit_created ON audit_logs(created_at)');
|
|
157
|
+
|
|
158
|
+
// Create tasks table
|
|
159
|
+
await adapter.query(`
|
|
160
|
+
CREATE TABLE IF NOT EXISTS tasks (
|
|
161
|
+
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
|
162
|
+
user_id TEXT,
|
|
163
|
+
task_type TEXT NOT NULL,
|
|
164
|
+
priority INTEGER DEFAULT 0 NOT NULL,
|
|
165
|
+
status TEXT DEFAULT 'pending' NOT NULL,
|
|
166
|
+
payload JSONB DEFAULT '{}' NOT NULL,
|
|
167
|
+
result JSONB,
|
|
168
|
+
error_message TEXT,
|
|
169
|
+
scheduled_at TIMESTAMP WITH TIME ZONE,
|
|
170
|
+
started_at TIMESTAMP WITH TIME ZONE,
|
|
171
|
+
completed_at TIMESTAMP WITH TIME ZONE,
|
|
172
|
+
retry_count INTEGER DEFAULT 0 NOT NULL,
|
|
173
|
+
max_retries INTEGER DEFAULT 3 NOT NULL,
|
|
174
|
+
created_at TIMESTAMP WITH TIME ZONE DEFAULT NOW() NOT NULL,
|
|
175
|
+
updated_at TIMESTAMP WITH TIME ZONE DEFAULT NOW() NOT NULL
|
|
176
|
+
)
|
|
177
|
+
`);
|
|
178
|
+
await adapter.query('CREATE INDEX IF NOT EXISTS idx_tasks_status ON tasks(status)');
|
|
179
|
+
await adapter.query('CREATE INDEX IF NOT EXISTS idx_tasks_user ON tasks(user_id)');
|
|
180
|
+
}
|
|
181
|
+
},
|
|
182
|
+
|
|
183
|
+
async down(adapter: DatabaseAdapter): Promise<void> {
|
|
184
|
+
if (adapter.type === 'postgresql') {
|
|
185
|
+
await adapter.query('DROP TABLE IF EXISTS checkpoint_writes CASCADE');
|
|
186
|
+
await adapter.query('DROP TABLE IF EXISTS checkpoints CASCADE');
|
|
187
|
+
await adapter.query('DROP TABLE IF EXISTS document_chunks CASCADE');
|
|
188
|
+
await adapter.query('DROP TABLE IF EXISTS documents CASCADE');
|
|
189
|
+
await adapter.query('DROP TABLE IF EXISTS long_term_memories CASCADE');
|
|
190
|
+
await adapter.query('DROP TABLE IF EXISTS messages CASCADE');
|
|
191
|
+
await adapter.query('DROP TABLE IF EXISTS conversations CASCADE');
|
|
192
|
+
await adapter.query('DROP TABLE IF EXISTS audit_logs CASCADE');
|
|
193
|
+
await adapter.query('DROP TABLE IF EXISTS tasks CASCADE');
|
|
194
|
+
}
|
|
195
|
+
},
|
|
196
|
+
};
|
|
197
|
+
|
|
198
|
+
export default migration001Initial;
|