@elizaos/plugin-sql 1.6.5-alpha.2 → 1.6.5-alpha.21
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/browser/index.browser.js +1040 -379
- package/dist/browser/index.browser.js.map +24 -21
- package/dist/browser/tsconfig.build.tsbuildinfo +1 -1
- package/dist/node/index.node.js +1132 -528
- package/dist/node/index.node.js.map +27 -25
- package/dist/node/tsconfig.build.node.tsbuildinfo +1 -1
- package/package.json +6 -4
|
@@ -35,6 +35,44 @@ var __require = /* @__PURE__ */ ((x) => typeof require !== "undefined" ? require
|
|
|
35
35
|
throw Error('Dynamic require of "' + x + '" is not supported');
|
|
36
36
|
});
|
|
37
37
|
|
|
38
|
+
// src/schema/agent.ts
|
|
39
|
+
import { sql } from "drizzle-orm";
|
|
40
|
+
import { boolean, jsonb, pgTable, text, timestamp, uuid } from "drizzle-orm/pg-core";
|
|
41
|
+
var agentTable;
|
|
42
|
+
var init_agent = __esm(() => {
|
|
43
|
+
agentTable = pgTable("agents", {
|
|
44
|
+
id: uuid("id").primaryKey().defaultRandom(),
|
|
45
|
+
enabled: boolean("enabled").default(true).notNull(),
|
|
46
|
+
server_id: uuid("server_id"),
|
|
47
|
+
createdAt: timestamp("created_at", { withTimezone: true }).default(sql`now()`).notNull(),
|
|
48
|
+
updatedAt: timestamp("updated_at", { withTimezone: true }).default(sql`now()`).notNull(),
|
|
49
|
+
name: text("name").notNull(),
|
|
50
|
+
username: text("username"),
|
|
51
|
+
system: text("system").default(""),
|
|
52
|
+
bio: jsonb("bio").$type().default(sql`'[]'::jsonb`),
|
|
53
|
+
messageExamples: jsonb("message_examples").$type().default(sql`'[]'::jsonb`).notNull(),
|
|
54
|
+
postExamples: jsonb("post_examples").$type().default(sql`'[]'::jsonb`).notNull(),
|
|
55
|
+
topics: jsonb("topics").$type().default(sql`'[]'::jsonb`).notNull(),
|
|
56
|
+
adjectives: jsonb("adjectives").$type().default(sql`'[]'::jsonb`).notNull(),
|
|
57
|
+
knowledge: jsonb("knowledge").$type().default(sql`'[]'::jsonb`).notNull(),
|
|
58
|
+
plugins: jsonb("plugins").$type().default(sql`'[]'::jsonb`).notNull(),
|
|
59
|
+
settings: jsonb("settings").$type().default(sql`'{}'::jsonb`).notNull(),
|
|
60
|
+
style: jsonb("style").$type().default(sql`'{}'::jsonb`).notNull()
|
|
61
|
+
});
|
|
62
|
+
});
|
|
63
|
+
|
|
64
|
+
// src/schema/server.ts
|
|
65
|
+
import { sql as sql10 } from "drizzle-orm";
|
|
66
|
+
import { pgTable as pgTable10, timestamp as timestamp10, uuid as uuid10 } from "drizzle-orm/pg-core";
|
|
67
|
+
var serverTable;
|
|
68
|
+
var init_server = __esm(() => {
|
|
69
|
+
serverTable = pgTable10("servers", {
|
|
70
|
+
id: uuid10("id").primaryKey(),
|
|
71
|
+
createdAt: timestamp10("created_at", { withTimezone: true }).default(sql10`now()`).notNull(),
|
|
72
|
+
updatedAt: timestamp10("updated_at", { withTimezone: true }).default(sql10`now()`).notNull()
|
|
73
|
+
});
|
|
74
|
+
});
|
|
75
|
+
|
|
38
76
|
// src/runtime-migrator/storage/migration-tracker.ts
|
|
39
77
|
import { sql as sql17 } from "drizzle-orm";
|
|
40
78
|
|
|
@@ -213,14 +251,14 @@ class ExtensionManager {
|
|
|
213
251
|
for (const extension of extensions) {
|
|
214
252
|
try {
|
|
215
253
|
if (!/^[a-zA-Z0-9_-]+$/.test(extension)) {
|
|
216
|
-
logger.warn(
|
|
254
|
+
logger.warn({ src: "plugin:sql", extension }, "Invalid extension name - contains invalid characters");
|
|
217
255
|
continue;
|
|
218
256
|
}
|
|
219
257
|
await this.db.execute(sql20`CREATE EXTENSION IF NOT EXISTS ${sql20.identifier(extension)}`);
|
|
220
|
-
logger.debug(
|
|
258
|
+
logger.debug({ src: "plugin:sql", extension }, "Extension installed");
|
|
221
259
|
} catch (error) {
|
|
222
260
|
const errorMessage = error instanceof Error ? error.message : String(error);
|
|
223
|
-
logger.warn(
|
|
261
|
+
logger.warn({ src: "plugin:sql", extension, error: errorMessage }, "Could not install extension");
|
|
224
262
|
}
|
|
225
263
|
}
|
|
226
264
|
}
|
|
@@ -18340,10 +18378,7 @@ async function generateMigrationSQL(previousSnapshot, currentSnapshot, diff) {
|
|
|
18340
18378
|
}
|
|
18341
18379
|
const dataLossCheck = checkForDataLoss(diff);
|
|
18342
18380
|
if (dataLossCheck.warnings.length > 0) {
|
|
18343
|
-
logger2.warn("
|
|
18344
|
-
for (const warning of dataLossCheck.warnings) {
|
|
18345
|
-
logger2.warn(` - ${warning}`);
|
|
18346
|
-
}
|
|
18381
|
+
logger2.warn({ src: "plugin:sql", warnings: dataLossCheck.warnings }, "Schema changes may cause data loss");
|
|
18347
18382
|
}
|
|
18348
18383
|
const schemasToCreate = new Set;
|
|
18349
18384
|
for (const tableName of diff.tables.created) {
|
|
@@ -18768,7 +18803,7 @@ class DatabaseIntrospector {
|
|
|
18768
18803
|
this.db = db;
|
|
18769
18804
|
}
|
|
18770
18805
|
async introspectSchema(schemaName = "public") {
|
|
18771
|
-
logger4.info(
|
|
18806
|
+
logger4.info({ src: "plugin:sql", schemaName }, "Starting database introspection");
|
|
18772
18807
|
const tables = {};
|
|
18773
18808
|
const schemas = {};
|
|
18774
18809
|
const enums = {};
|
|
@@ -18776,7 +18811,7 @@ class DatabaseIntrospector {
|
|
|
18776
18811
|
for (const tableInfo of allTables) {
|
|
18777
18812
|
const tableName = tableInfo.table_name;
|
|
18778
18813
|
const tableSchema = tableInfo.table_schema || "public";
|
|
18779
|
-
logger4.debug(
|
|
18814
|
+
logger4.debug({ src: "plugin:sql", tableSchema, tableName }, "Introspecting table");
|
|
18780
18815
|
const columns = await this.getColumns(tableSchema, tableName);
|
|
18781
18816
|
const columnsObject = {};
|
|
18782
18817
|
const uniqueConstraintObject = {};
|
|
@@ -18793,15 +18828,17 @@ class DatabaseIntrospector {
|
|
|
18793
18828
|
const indexesObject = {};
|
|
18794
18829
|
for (const idx of indexes) {
|
|
18795
18830
|
if (!idx.is_primary && !idx.is_unique_constraint) {
|
|
18796
|
-
|
|
18797
|
-
|
|
18798
|
-
|
|
18799
|
-
|
|
18800
|
-
|
|
18801
|
-
|
|
18802
|
-
|
|
18803
|
-
|
|
18804
|
-
|
|
18831
|
+
if (idx.columns && Array.isArray(idx.columns) && idx.columns.length > 0) {
|
|
18832
|
+
indexesObject[idx.name] = {
|
|
18833
|
+
name: idx.name,
|
|
18834
|
+
columns: idx.columns.map((col) => ({
|
|
18835
|
+
expression: col,
|
|
18836
|
+
isExpression: false
|
|
18837
|
+
})),
|
|
18838
|
+
isUnique: idx.is_unique,
|
|
18839
|
+
method: idx.method || "btree"
|
|
18840
|
+
};
|
|
18841
|
+
}
|
|
18805
18842
|
}
|
|
18806
18843
|
}
|
|
18807
18844
|
const foreignKeys = await this.getForeignKeys(tableSchema, tableName);
|
|
@@ -18869,7 +18906,7 @@ class DatabaseIntrospector {
|
|
|
18869
18906
|
}
|
|
18870
18907
|
enums[key].values.push(enumInfo.value);
|
|
18871
18908
|
}
|
|
18872
|
-
logger4.info(
|
|
18909
|
+
logger4.info({ src: "plugin:sql", tableCount: Object.keys(tables).length }, "Database introspection complete");
|
|
18873
18910
|
return {
|
|
18874
18911
|
version: "7",
|
|
18875
18912
|
dialect: "postgresql",
|
|
@@ -19129,7 +19166,7 @@ class RuntimeMigrator {
|
|
|
19129
19166
|
}
|
|
19130
19167
|
}
|
|
19131
19168
|
for (const schemaName of schemasToCreate) {
|
|
19132
|
-
logger5.debug(
|
|
19169
|
+
logger5.debug({ src: "plugin:sql", schemaName }, "Ensuring schema exists");
|
|
19133
19170
|
await this.db.execute(sql22.raw(`CREATE SCHEMA IF NOT EXISTS "${schemaName}"`));
|
|
19134
19171
|
}
|
|
19135
19172
|
}
|
|
@@ -19140,10 +19177,10 @@ class RuntimeMigrator {
|
|
|
19140
19177
|
const tableData = table;
|
|
19141
19178
|
const actualSchema = tableData.schema || "public";
|
|
19142
19179
|
if (!isCorePLugin && actualSchema === "public") {
|
|
19143
|
-
logger5.warn(
|
|
19180
|
+
logger5.warn({ src: "plugin:sql", pluginName, tableName: tableData.name, expectedSchema }, "Plugin table is using public schema - consider using pgSchema for better isolation");
|
|
19144
19181
|
}
|
|
19145
19182
|
if (isCorePLugin && actualSchema !== "public") {
|
|
19146
|
-
logger5.warn(
|
|
19183
|
+
logger5.warn({ src: "plugin:sql", pluginName: "@elizaos/plugin-sql", tableName: tableData.name, actualSchema }, "Core plugin table should use public schema");
|
|
19147
19184
|
}
|
|
19148
19185
|
}
|
|
19149
19186
|
}
|
|
@@ -19354,13 +19391,13 @@ class RuntimeMigrator {
|
|
|
19354
19391
|
}
|
|
19355
19392
|
}
|
|
19356
19393
|
}
|
|
19357
|
-
logger5.debug(
|
|
19394
|
+
logger5.debug({ src: "plugin:sql", urlPreview: url.substring(0, 50) }, "Connection string did not match any PostgreSQL patterns");
|
|
19358
19395
|
return false;
|
|
19359
19396
|
}
|
|
19360
19397
|
async initialize() {
|
|
19361
|
-
logger5.info("
|
|
19398
|
+
logger5.info({ src: "plugin:sql" }, "Initializing migration system");
|
|
19362
19399
|
await this.migrationTracker.ensureTables();
|
|
19363
|
-
logger5.info("
|
|
19400
|
+
logger5.info({ src: "plugin:sql" }, "Migration system initialized");
|
|
19364
19401
|
}
|
|
19365
19402
|
async migrate(pluginName, schema, options = {}) {
|
|
19366
19403
|
const lockId = this.getAdvisoryLockId(pluginName);
|
|
@@ -19369,46 +19406,46 @@ class RuntimeMigrator {
|
|
|
19369
19406
|
}
|
|
19370
19407
|
let lockAcquired = false;
|
|
19371
19408
|
try {
|
|
19372
|
-
logger5.info(
|
|
19409
|
+
logger5.info({ src: "plugin:sql", pluginName }, "Starting migration for plugin");
|
|
19373
19410
|
await this.initialize();
|
|
19374
19411
|
const postgresUrl = process.env.POSTGRES_URL || process.env.DATABASE_URL || "";
|
|
19375
19412
|
const isRealPostgres = this.isRealPostgresDatabase(postgresUrl);
|
|
19376
19413
|
if (isRealPostgres) {
|
|
19377
19414
|
try {
|
|
19378
|
-
logger5.debug(
|
|
19415
|
+
logger5.debug({ src: "plugin:sql", pluginName }, "Using PostgreSQL advisory locks");
|
|
19379
19416
|
const lockIdStr = lockId.toString();
|
|
19380
19417
|
const lockResult = await this.db.execute(sql22`SELECT pg_try_advisory_lock(CAST(${lockIdStr} AS bigint)) as acquired`);
|
|
19381
19418
|
lockAcquired = lockResult.rows[0]?.acquired === true;
|
|
19382
19419
|
if (!lockAcquired) {
|
|
19383
|
-
logger5.info(
|
|
19420
|
+
logger5.info({ src: "plugin:sql", pluginName }, "Migration already in progress, waiting for lock");
|
|
19384
19421
|
await this.db.execute(sql22`SELECT pg_advisory_lock(CAST(${lockIdStr} AS bigint))`);
|
|
19385
19422
|
lockAcquired = true;
|
|
19386
|
-
logger5.info(
|
|
19423
|
+
logger5.info({ src: "plugin:sql", pluginName }, "Lock acquired");
|
|
19387
19424
|
} else {
|
|
19388
|
-
logger5.debug(
|
|
19425
|
+
logger5.debug({ src: "plugin:sql", pluginName, lockId: lockIdStr }, "Advisory lock acquired");
|
|
19389
19426
|
}
|
|
19390
19427
|
} catch (lockError) {
|
|
19391
|
-
logger5.warn(
|
|
19428
|
+
logger5.warn({ src: "plugin:sql", pluginName, error: lockError instanceof Error ? lockError.message : String(lockError) }, "Failed to acquire advisory lock, continuing without lock");
|
|
19392
19429
|
lockAcquired = false;
|
|
19393
19430
|
}
|
|
19394
19431
|
} else {
|
|
19395
|
-
logger5.debug(
|
|
19432
|
+
logger5.debug({ src: "plugin:sql" }, "Development database detected, skipping advisory locks");
|
|
19396
19433
|
}
|
|
19397
|
-
await this.extensionManager.installRequiredExtensions(["vector", "fuzzystrmatch"]);
|
|
19434
|
+
await this.extensionManager.installRequiredExtensions(["vector", "fuzzystrmatch", "pgcrypto"]);
|
|
19398
19435
|
const currentSnapshot = await generateSnapshot(schema);
|
|
19399
19436
|
await this.ensureSchemasExist(currentSnapshot);
|
|
19400
19437
|
this.validateSchemaUsage(pluginName, currentSnapshot);
|
|
19401
19438
|
const currentHash = hashSnapshot(currentSnapshot);
|
|
19402
19439
|
const lastMigration = await this.migrationTracker.getLastMigration(pluginName);
|
|
19403
19440
|
if (lastMigration && lastMigration.hash === currentHash) {
|
|
19404
|
-
logger5.info(
|
|
19441
|
+
logger5.info({ src: "plugin:sql", pluginName, hash: currentHash }, "No changes detected, skipping migration");
|
|
19405
19442
|
return;
|
|
19406
19443
|
}
|
|
19407
19444
|
let previousSnapshot = await this.snapshotStorage.getLatestSnapshot(pluginName);
|
|
19408
19445
|
if (!previousSnapshot && Object.keys(currentSnapshot.tables).length > 0) {
|
|
19409
19446
|
const hasExistingTables = await this.introspector.hasExistingTables(pluginName);
|
|
19410
19447
|
if (hasExistingTables) {
|
|
19411
|
-
logger5.info(
|
|
19448
|
+
logger5.info({ src: "plugin:sql", pluginName }, "No snapshot found but tables exist in database, introspecting");
|
|
19412
19449
|
const schemaName = this.getExpectedSchemaName(pluginName);
|
|
19413
19450
|
const introspectedSnapshot = await this.introspector.introspectSchema(schemaName);
|
|
19414
19451
|
if (Object.keys(introspectedSnapshot.tables).length > 0) {
|
|
@@ -19416,15 +19453,15 @@ class RuntimeMigrator {
|
|
|
19416
19453
|
await this.journalStorage.updateJournal(pluginName, 0, `introspected_${Date.now()}`, true);
|
|
19417
19454
|
const introspectedHash = hashSnapshot(introspectedSnapshot);
|
|
19418
19455
|
await this.migrationTracker.recordMigration(pluginName, introspectedHash, Date.now());
|
|
19419
|
-
logger5.info(
|
|
19456
|
+
logger5.info({ src: "plugin:sql", pluginName }, "Created initial snapshot from existing database");
|
|
19420
19457
|
previousSnapshot = introspectedSnapshot;
|
|
19421
19458
|
}
|
|
19422
19459
|
}
|
|
19423
19460
|
}
|
|
19424
19461
|
if (!hasChanges(previousSnapshot, currentSnapshot)) {
|
|
19425
|
-
logger5.info(
|
|
19462
|
+
logger5.info({ src: "plugin:sql", pluginName }, "No schema changes");
|
|
19426
19463
|
if (!previousSnapshot && Object.keys(currentSnapshot.tables).length === 0) {
|
|
19427
|
-
logger5.info(
|
|
19464
|
+
logger5.info({ src: "plugin:sql", pluginName }, "Recording empty schema");
|
|
19428
19465
|
await this.migrationTracker.recordMigration(pluginName, currentHash, Date.now());
|
|
19429
19466
|
const idx = await this.journalStorage.getNextIdx(pluginName);
|
|
19430
19467
|
const tag = this.generateMigrationTag(idx, pluginName);
|
|
@@ -19435,7 +19472,7 @@ class RuntimeMigrator {
|
|
|
19435
19472
|
}
|
|
19436
19473
|
const diff = await calculateDiff(previousSnapshot, currentSnapshot);
|
|
19437
19474
|
if (!hasDiffChanges(diff)) {
|
|
19438
|
-
logger5.info(
|
|
19475
|
+
logger5.info({ src: "plugin:sql", pluginName }, "No actionable changes");
|
|
19439
19476
|
return;
|
|
19440
19477
|
}
|
|
19441
19478
|
const dataLossCheck = checkForDataLoss(diff);
|
|
@@ -19443,55 +19480,34 @@ class RuntimeMigrator {
|
|
|
19443
19480
|
const isProduction = false;
|
|
19444
19481
|
const allowDestructive = options.force || options.allowDataLoss || process.env.ELIZA_ALLOW_DESTRUCTIVE_MIGRATIONS === "true";
|
|
19445
19482
|
if (!allowDestructive) {
|
|
19446
|
-
logger5.error("
|
|
19447
|
-
logger5.error(`[RuntimeMigrator] Plugin: ${pluginName}`);
|
|
19448
|
-
logger5.error(`[RuntimeMigrator] Environment: ${isProduction ? "PRODUCTION" : "DEVELOPMENT"}`);
|
|
19449
|
-
logger5.error("[RuntimeMigrator] Destructive operations detected:");
|
|
19450
|
-
for (const warning of dataLossCheck.warnings) {
|
|
19451
|
-
logger5.error(`[RuntimeMigrator] - ${warning}`);
|
|
19452
|
-
}
|
|
19453
|
-
logger5.error("[RuntimeMigrator] To proceed with destructive migrations:");
|
|
19454
|
-
logger5.error("[RuntimeMigrator] 1. Set environment variable: export ELIZA_ALLOW_DESTRUCTIVE_MIGRATIONS=true");
|
|
19455
|
-
logger5.error("[RuntimeMigrator] 2. Or use option: migrate(plugin, schema, { force: true })");
|
|
19456
|
-
if (isProduction) {
|
|
19457
|
-
logger5.error("[RuntimeMigrator] 3. For production, consider using drizzle-kit for manual migration");
|
|
19458
|
-
}
|
|
19483
|
+
logger5.error({ src: "plugin:sql", pluginName, environment: isProduction ? "PRODUCTION" : "DEVELOPMENT", warnings: dataLossCheck.warnings }, "Destructive migration blocked - set ELIZA_ALLOW_DESTRUCTIVE_MIGRATIONS=true or use force option");
|
|
19459
19484
|
const errorMessage = isProduction ? `Destructive migration blocked in production for ${pluginName}. Set ELIZA_ALLOW_DESTRUCTIVE_MIGRATIONS=true or use drizzle-kit.` : `Destructive migration blocked for ${pluginName}. Set ELIZA_ALLOW_DESTRUCTIVE_MIGRATIONS=true to proceed.`;
|
|
19460
19485
|
throw new Error(errorMessage);
|
|
19461
19486
|
}
|
|
19462
19487
|
if (dataLossCheck.requiresConfirmation) {
|
|
19463
|
-
logger5.warn("
|
|
19464
|
-
logger5.warn(`[RuntimeMigrator] Plugin: ${pluginName}`);
|
|
19465
|
-
logger5.warn("[RuntimeMigrator] The following operations will be performed:");
|
|
19466
|
-
for (const warning of dataLossCheck.warnings) {
|
|
19467
|
-
logger5.warn(`[RuntimeMigrator] ⚠️ ${warning}`);
|
|
19468
|
-
}
|
|
19488
|
+
logger5.warn({ src: "plugin:sql", pluginName, warnings: dataLossCheck.warnings }, "Proceeding with destructive migration");
|
|
19469
19489
|
}
|
|
19470
19490
|
}
|
|
19471
19491
|
const sqlStatements = await generateMigrationSQL(previousSnapshot, currentSnapshot, diff);
|
|
19472
19492
|
if (sqlStatements.length === 0) {
|
|
19473
|
-
logger5.info(
|
|
19493
|
+
logger5.info({ src: "plugin:sql", pluginName }, "No SQL statements to execute");
|
|
19474
19494
|
return;
|
|
19475
19495
|
}
|
|
19476
|
-
logger5.info(
|
|
19496
|
+
logger5.info({ src: "plugin:sql", pluginName, statementCount: sqlStatements.length }, "Executing SQL statements");
|
|
19477
19497
|
if (options.verbose) {
|
|
19478
19498
|
sqlStatements.forEach((stmt, i2) => {
|
|
19479
|
-
logger5.debug(
|
|
19499
|
+
logger5.debug({ src: "plugin:sql", statementIndex: i2 + 1, statement: stmt }, "SQL statement");
|
|
19480
19500
|
});
|
|
19481
19501
|
}
|
|
19482
19502
|
if (options.dryRun) {
|
|
19483
|
-
logger5.info("
|
|
19484
|
-
logger5.info("[RuntimeMigrator] Would execute:");
|
|
19485
|
-
sqlStatements.forEach((stmt, i2) => {
|
|
19486
|
-
logger5.info(` ${i2 + 1}. ${stmt}`);
|
|
19487
|
-
});
|
|
19503
|
+
logger5.info({ src: "plugin:sql", pluginName, statements: sqlStatements }, "DRY RUN mode - not executing statements");
|
|
19488
19504
|
return;
|
|
19489
19505
|
}
|
|
19490
19506
|
await this.executeMigration(pluginName, currentSnapshot, currentHash, sqlStatements);
|
|
19491
|
-
logger5.info(
|
|
19507
|
+
logger5.info({ src: "plugin:sql", pluginName }, "Migration completed successfully");
|
|
19492
19508
|
return;
|
|
19493
19509
|
} catch (error) {
|
|
19494
|
-
logger5.error(
|
|
19510
|
+
logger5.error({ src: "plugin:sql", pluginName, error: error instanceof Error ? error.message : String(error) }, "Migration failed");
|
|
19495
19511
|
throw error;
|
|
19496
19512
|
} finally {
|
|
19497
19513
|
const postgresUrl = process.env.POSTGRES_URL || process.env.DATABASE_URL || "";
|
|
@@ -19500,9 +19516,9 @@ class RuntimeMigrator {
|
|
|
19500
19516
|
try {
|
|
19501
19517
|
const lockIdStr = lockId.toString();
|
|
19502
19518
|
await this.db.execute(sql22`SELECT pg_advisory_unlock(CAST(${lockIdStr} AS bigint))`);
|
|
19503
|
-
logger5.debug(
|
|
19519
|
+
logger5.debug({ src: "plugin:sql", pluginName }, "Advisory lock released");
|
|
19504
19520
|
} catch (unlockError) {
|
|
19505
|
-
logger5.warn(
|
|
19521
|
+
logger5.warn({ src: "plugin:sql", pluginName, error: unlockError instanceof Error ? unlockError.message : String(unlockError) }, "Failed to release advisory lock");
|
|
19506
19522
|
}
|
|
19507
19523
|
}
|
|
19508
19524
|
}
|
|
@@ -19513,7 +19529,7 @@ class RuntimeMigrator {
|
|
|
19513
19529
|
await this.db.execute(sql22`BEGIN`);
|
|
19514
19530
|
transactionStarted = true;
|
|
19515
19531
|
for (const stmt of sqlStatements) {
|
|
19516
|
-
logger5.debug(
|
|
19532
|
+
logger5.debug({ src: "plugin:sql", statement: stmt }, "Executing SQL statement");
|
|
19517
19533
|
await this.db.execute(sql22.raw(stmt));
|
|
19518
19534
|
}
|
|
19519
19535
|
const idx = await this.journalStorage.getNextIdx(pluginName);
|
|
@@ -19522,14 +19538,14 @@ class RuntimeMigrator {
|
|
|
19522
19538
|
await this.journalStorage.updateJournal(pluginName, idx, tag, true);
|
|
19523
19539
|
await this.snapshotStorage.saveSnapshot(pluginName, idx, snapshot);
|
|
19524
19540
|
await this.db.execute(sql22`COMMIT`);
|
|
19525
|
-
logger5.info(
|
|
19541
|
+
logger5.info({ src: "plugin:sql", pluginName, tag }, "Recorded migration");
|
|
19526
19542
|
} catch (error) {
|
|
19527
19543
|
if (transactionStarted) {
|
|
19528
19544
|
try {
|
|
19529
19545
|
await this.db.execute(sql22`ROLLBACK`);
|
|
19530
|
-
logger5.error("
|
|
19546
|
+
logger5.error({ src: "plugin:sql", error: error instanceof Error ? error.message : String(error) }, "Migration failed, rolled back");
|
|
19531
19547
|
} catch (rollbackError) {
|
|
19532
|
-
logger5.error("
|
|
19548
|
+
logger5.error({ src: "plugin:sql", error: rollbackError instanceof Error ? rollbackError.message : String(rollbackError) }, "Failed to rollback transaction");
|
|
19533
19549
|
}
|
|
19534
19550
|
}
|
|
19535
19551
|
throw error;
|
|
@@ -19552,31 +19568,31 @@ class RuntimeMigrator {
|
|
|
19552
19568
|
};
|
|
19553
19569
|
}
|
|
19554
19570
|
async reset(pluginName) {
|
|
19555
|
-
logger5.warn(
|
|
19571
|
+
logger5.warn({ src: "plugin:sql", pluginName }, "Resetting migrations");
|
|
19556
19572
|
await this.db.execute(sql22`DELETE FROM migrations._migrations WHERE plugin_name = ${pluginName}`);
|
|
19557
19573
|
await this.db.execute(sql22`DELETE FROM migrations._journal WHERE plugin_name = ${pluginName}`);
|
|
19558
19574
|
await this.db.execute(sql22`DELETE FROM migrations._snapshots WHERE plugin_name = ${pluginName}`);
|
|
19559
|
-
logger5.warn(
|
|
19575
|
+
logger5.warn({ src: "plugin:sql", pluginName }, "Reset complete");
|
|
19560
19576
|
}
|
|
19561
19577
|
async checkMigration(pluginName, schema) {
|
|
19562
19578
|
try {
|
|
19563
|
-
logger5.info(
|
|
19579
|
+
logger5.info({ src: "plugin:sql", pluginName }, "Checking migration");
|
|
19564
19580
|
const currentSnapshot = await generateSnapshot(schema);
|
|
19565
19581
|
const previousSnapshot = await this.snapshotStorage.getLatestSnapshot(pluginName);
|
|
19566
19582
|
if (!hasChanges(previousSnapshot, currentSnapshot)) {
|
|
19567
|
-
logger5.info(
|
|
19583
|
+
logger5.info({ src: "plugin:sql", pluginName }, "No changes detected");
|
|
19568
19584
|
return null;
|
|
19569
19585
|
}
|
|
19570
19586
|
const diff = await calculateDiff(previousSnapshot, currentSnapshot);
|
|
19571
19587
|
const dataLossCheck = checkForDataLoss(diff);
|
|
19572
19588
|
if (dataLossCheck.hasDataLoss) {
|
|
19573
|
-
logger5.warn(
|
|
19589
|
+
logger5.warn({ src: "plugin:sql", pluginName }, "Migration would cause data loss");
|
|
19574
19590
|
} else {
|
|
19575
|
-
logger5.info(
|
|
19591
|
+
logger5.info({ src: "plugin:sql", pluginName }, "Migration is safe (no data loss)");
|
|
19576
19592
|
}
|
|
19577
19593
|
return dataLossCheck;
|
|
19578
19594
|
} catch (error) {
|
|
19579
|
-
logger5.error(
|
|
19595
|
+
logger5.error({ src: "plugin:sql", pluginName, error: error instanceof Error ? error.message : String(error) }, "Failed to check migration");
|
|
19580
19596
|
throw error;
|
|
19581
19597
|
}
|
|
19582
19598
|
}
|
|
@@ -19598,12 +19614,660 @@ var init_runtime_migrator2 = __esm(() => {
|
|
|
19598
19614
|
init_runtime_migrator();
|
|
19599
19615
|
});
|
|
19600
19616
|
|
|
19617
|
+
// src/migrations.ts
|
|
19618
|
+
import { logger as logger6 } from "@elizaos/core";
|
|
19619
|
+
import { sql as sql23 } from "drizzle-orm";
|
|
19620
|
+
async function migrateToEntityRLS(adapter) {
|
|
19621
|
+
const db = adapter.db;
|
|
19622
|
+
try {
|
|
19623
|
+
await db.execute(sql23`SELECT 1 FROM pg_tables LIMIT 1`);
|
|
19624
|
+
} catch {
|
|
19625
|
+
logger6.debug("[Migration] ⊘ Not PostgreSQL, skipping PostgreSQL-specific migrations");
|
|
19626
|
+
return;
|
|
19627
|
+
}
|
|
19628
|
+
logger6.info("[Migration] Starting develop → feat/entity-rls migration...");
|
|
19629
|
+
try {
|
|
19630
|
+
logger6.debug("[Migration] → Clearing RuntimeMigrator snapshot cache...");
|
|
19631
|
+
try {
|
|
19632
|
+
await db.execute(sql23`DELETE FROM migrations._snapshots WHERE plugin_name = '@elizaos/plugin-sql'`);
|
|
19633
|
+
logger6.debug("[Migration] ✓ Snapshot cache cleared");
|
|
19634
|
+
} catch (error) {
|
|
19635
|
+
logger6.debug("[Migration] ⊘ No snapshot cache to clear (migrations schema not yet created)");
|
|
19636
|
+
}
|
|
19637
|
+
logger6.debug("[Migration] → Disabling Row Level Security on all tables...");
|
|
19638
|
+
try {
|
|
19639
|
+
const tablesResult = await db.execute(sql23`
|
|
19640
|
+
SELECT tablename
|
|
19641
|
+
FROM pg_tables
|
|
19642
|
+
WHERE schemaname = 'public'
|
|
19643
|
+
ORDER BY tablename
|
|
19644
|
+
`);
|
|
19645
|
+
for (const row of tablesResult.rows || []) {
|
|
19646
|
+
const tableName = row.tablename;
|
|
19647
|
+
try {
|
|
19648
|
+
await db.execute(sql23.raw(`ALTER TABLE "${tableName}" DISABLE ROW LEVEL SECURITY`));
|
|
19649
|
+
logger6.debug(`[Migration] ✓ Disabled RLS on ${tableName}`);
|
|
19650
|
+
} catch (error) {
|
|
19651
|
+
logger6.debug(`[Migration] ⊘ Could not disable RLS on ${tableName}`);
|
|
19652
|
+
}
|
|
19653
|
+
}
|
|
19654
|
+
} catch (error) {
|
|
19655
|
+
logger6.debug("[Migration] ⊘ Could not disable RLS (may not have permissions)");
|
|
19656
|
+
}
|
|
19657
|
+
logger6.debug("[Migration] → Handling server_id → message_server_id migrations...");
|
|
19658
|
+
const tablesToMigrate = ["channels", "worlds", "rooms"];
|
|
19659
|
+
for (const tableName of tablesToMigrate) {
|
|
19660
|
+
try {
|
|
19661
|
+
const columnsResult = await db.execute(sql23`
|
|
19662
|
+
SELECT column_name, data_type, is_nullable
|
|
19663
|
+
FROM information_schema.columns
|
|
19664
|
+
WHERE table_schema = 'public'
|
|
19665
|
+
AND table_name = ${tableName}
|
|
19666
|
+
AND column_name IN ('server_id', 'message_server_id')
|
|
19667
|
+
ORDER BY column_name
|
|
19668
|
+
`);
|
|
19669
|
+
const columns = columnsResult.rows || [];
|
|
19670
|
+
const serverId = columns.find((c) => c.column_name === "server_id");
|
|
19671
|
+
const messageServerId = columns.find((c) => c.column_name === "message_server_id");
|
|
19672
|
+
if (serverId && !messageServerId) {
|
|
19673
|
+
logger6.debug(`[Migration] → Renaming ${tableName}.server_id to message_server_id...`);
|
|
19674
|
+
await db.execute(sql23.raw(`ALTER TABLE "${tableName}" RENAME COLUMN "server_id" TO "message_server_id"`));
|
|
19675
|
+
logger6.debug(`[Migration] ✓ Renamed ${tableName}.server_id → message_server_id`);
|
|
19676
|
+
if (serverId.data_type === "text") {
|
|
19677
|
+
try {
|
|
19678
|
+
logger6.debug(`[Migration] → Dropping DEFAULT constraint on ${tableName}.message_server_id...`);
|
|
19679
|
+
await db.execute(sql23.raw(`ALTER TABLE "${tableName}" ALTER COLUMN "message_server_id" DROP DEFAULT`));
|
|
19680
|
+
logger6.debug(`[Migration] ✓ Dropped DEFAULT constraint`);
|
|
19681
|
+
logger6.debug(`[Migration] → Converting ${tableName}.message_server_id from text to uuid...`);
|
|
19682
|
+
await db.execute(sql23.raw(`ALTER TABLE "${tableName}" ALTER COLUMN "message_server_id" TYPE uuid USING "message_server_id"::uuid`));
|
|
19683
|
+
logger6.debug(`[Migration] ✓ Converted ${tableName}.message_server_id to uuid`);
|
|
19684
|
+
} catch (convertError) {
|
|
19685
|
+
logger6.warn(`[Migration] ⚠️ Could not convert ${tableName}.message_server_id to uuid - data may not be valid UUIDs`);
|
|
19686
|
+
logger6.debug(`[Migration] → Setting invalid UUIDs to NULL in ${tableName}.message_server_id...`);
|
|
19687
|
+
await db.execute(sql23.raw(`ALTER TABLE "${tableName}" ALTER COLUMN "message_server_id" TYPE uuid USING CASE WHEN "message_server_id" ~ '^[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}$' THEN "message_server_id"::uuid ELSE NULL END`));
|
|
19688
|
+
}
|
|
19689
|
+
}
|
|
19690
|
+
if (tableName === "channels") {
|
|
19691
|
+
const nullCountResult = await db.execute(sql23.raw(`SELECT COUNT(*) as count FROM "${tableName}" WHERE "message_server_id" IS NULL`));
|
|
19692
|
+
const nullCount = nullCountResult.rows?.[0]?.count;
|
|
19693
|
+
if (nullCount && parseInt(nullCount) > 0) {
|
|
19694
|
+
logger6.warn(`[Migration] ⚠️ ${tableName} has ${nullCount} rows with NULL message_server_id - these will be deleted`);
|
|
19695
|
+
await db.execute(sql23.raw(`DELETE FROM "${tableName}" WHERE "message_server_id" IS NULL`));
|
|
19696
|
+
logger6.debug(`[Migration] ✓ Deleted ${nullCount} rows with NULL message_server_id from ${tableName}`);
|
|
19697
|
+
}
|
|
19698
|
+
logger6.debug(`[Migration] → Making ${tableName}.message_server_id NOT NULL...`);
|
|
19699
|
+
await db.execute(sql23.raw(`ALTER TABLE "${tableName}" ALTER COLUMN "message_server_id" SET NOT NULL`));
|
|
19700
|
+
logger6.debug(`[Migration] ✓ Set ${tableName}.message_server_id NOT NULL`);
|
|
19701
|
+
}
|
|
19702
|
+
} else if (serverId && messageServerId) {
|
|
19703
|
+
logger6.debug(`[Migration] → ${tableName} has both columns, dropping server_id...`);
|
|
19704
|
+
await db.execute(sql23.raw(`ALTER TABLE "${tableName}" DROP COLUMN "server_id" CASCADE`));
|
|
19705
|
+
logger6.debug(`[Migration] ✓ Dropped ${tableName}.server_id (will be re-added by RuntimeMigrator for RLS)`);
|
|
19706
|
+
} else if (!serverId && messageServerId) {
|
|
19707
|
+
if (messageServerId.data_type === "text") {
|
|
19708
|
+
logger6.debug(`[Migration] → ${tableName}.message_server_id exists but is TEXT, needs UUID conversion...`);
|
|
19709
|
+
logger6.debug(`[Migration] → Dropping DEFAULT constraint on ${tableName}.message_server_id...`);
|
|
19710
|
+
await db.execute(sql23.raw(`ALTER TABLE "${tableName}" ALTER COLUMN "message_server_id" DROP DEFAULT`));
|
|
19711
|
+
logger6.debug(`[Migration] ✓ Dropped DEFAULT constraint`);
|
|
19712
|
+
logger6.debug(`[Migration] → Converting ${tableName}.message_server_id from text to uuid (generating UUIDs from text)...`);
|
|
19713
|
+
await db.execute(sql23.raw(`
|
|
19714
|
+
ALTER TABLE "${tableName}"
|
|
19715
|
+
ALTER COLUMN "message_server_id" TYPE uuid
|
|
19716
|
+
USING CASE
|
|
19717
|
+
WHEN "message_server_id" ~ '^[0-9a-fA-F]{8}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{12}$'
|
|
19718
|
+
THEN "message_server_id"::uuid
|
|
19719
|
+
ELSE md5("message_server_id")::uuid
|
|
19720
|
+
END
|
|
19721
|
+
`));
|
|
19722
|
+
logger6.debug(`[Migration] ✓ Converted ${tableName}.message_server_id to uuid`);
|
|
19723
|
+
} else {
|
|
19724
|
+
logger6.debug(`[Migration] ⊘ ${tableName}.message_server_id already UUID, skipping`);
|
|
19725
|
+
}
|
|
19726
|
+
} else {
|
|
19727
|
+
logger6.debug(`[Migration] ⊘ ${tableName} already migrated, skipping`);
|
|
19728
|
+
}
|
|
19729
|
+
} catch (error) {
|
|
19730
|
+
logger6.warn(`[Migration] ⚠️ Error migrating ${tableName}.server_id: ${error}`);
|
|
19731
|
+
}
|
|
19732
|
+
}
|
|
19733
|
+
logger6.debug("[Migration] → Dropping all remaining RLS-managed server_id columns...");
|
|
19734
|
+
try {
|
|
19735
|
+
const serverIdColumnsResult = await db.execute(sql23`
|
|
19736
|
+
SELECT table_name
|
|
19737
|
+
FROM information_schema.columns
|
|
19738
|
+
WHERE table_schema = 'public'
|
|
19739
|
+
AND column_name = 'server_id'
|
|
19740
|
+
AND table_name NOT IN (
|
|
19741
|
+
'servers', -- server_id is the primary key
|
|
19742
|
+
'agents', -- server_id is in the schema (for RLS)
|
|
19743
|
+
'channels', -- already handled above
|
|
19744
|
+
'worlds', -- already handled above
|
|
19745
|
+
'rooms', -- already handled above
|
|
19746
|
+
'server_agents', -- server_id is part of composite key
|
|
19747
|
+
'drizzle_migrations',
|
|
19748
|
+
'__drizzle_migrations'
|
|
19749
|
+
)
|
|
19750
|
+
ORDER BY table_name
|
|
19751
|
+
`);
|
|
19752
|
+
const tablesToClean = serverIdColumnsResult.rows || [];
|
|
19753
|
+
logger6.debug(`[Migration] → Found ${tablesToClean.length} tables with server_id columns`);
|
|
19754
|
+
for (const row of tablesToClean) {
|
|
19755
|
+
const tableName = row.table_name;
|
|
19756
|
+
try {
|
|
19757
|
+
await db.execute(sql23.raw(`ALTER TABLE "${tableName}" DROP COLUMN IF EXISTS server_id CASCADE`));
|
|
19758
|
+
logger6.debug(`[Migration] ✓ Dropped server_id from ${tableName}`);
|
|
19759
|
+
} catch (error) {
|
|
19760
|
+
logger6.debug(`[Migration] ⊘ Could not drop server_id from ${tableName}`);
|
|
19761
|
+
}
|
|
19762
|
+
}
|
|
19763
|
+
} catch (error) {
|
|
19764
|
+
logger6.debug("[Migration] ⊘ Could not drop server_id columns (may not have permissions)");
|
|
19765
|
+
}
|
|
19766
|
+
logger6.debug("[Migration] → Checking server_agents table rename...");
|
|
19767
|
+
try {
|
|
19768
|
+
const tablesResult = await db.execute(sql23`
|
|
19769
|
+
SELECT table_name
|
|
19770
|
+
FROM information_schema.tables
|
|
19771
|
+
WHERE table_schema = 'public'
|
|
19772
|
+
AND table_name IN ('server_agents', 'message_server_agents')
|
|
19773
|
+
ORDER BY table_name
|
|
19774
|
+
`);
|
|
19775
|
+
const tables = tablesResult.rows || [];
|
|
19776
|
+
const hasServerAgents = tables.some((t) => t.table_name === "server_agents");
|
|
19777
|
+
const hasMessageServerAgents = tables.some((t) => t.table_name === "message_server_agents");
|
|
19778
|
+
if (hasServerAgents && !hasMessageServerAgents) {
|
|
19779
|
+
logger6.debug("[Migration] → Renaming server_agents to message_server_agents...");
|
|
19780
|
+
await db.execute(sql23.raw(`ALTER TABLE "server_agents" RENAME TO "message_server_agents"`));
|
|
19781
|
+
logger6.debug("[Migration] ✓ Renamed server_agents → message_server_agents");
|
|
19782
|
+
logger6.debug("[Migration] → Renaming message_server_agents.server_id to message_server_id...");
|
|
19783
|
+
await db.execute(sql23.raw(`ALTER TABLE "message_server_agents" RENAME COLUMN "server_id" TO "message_server_id"`));
|
|
19784
|
+
logger6.debug("[Migration] ✓ Renamed message_server_agents.server_id → message_server_id");
|
|
19785
|
+
} else if (!hasServerAgents && !hasMessageServerAgents) {
|
|
19786
|
+
logger6.debug("[Migration] ⊘ No server_agents table to migrate");
|
|
19787
|
+
} else if (hasMessageServerAgents) {
|
|
19788
|
+
logger6.debug("[Migration] → Checking message_server_agents columns...");
|
|
19789
|
+
const columnsResult = await db.execute(sql23`
|
|
19790
|
+
SELECT column_name
|
|
19791
|
+
FROM information_schema.columns
|
|
19792
|
+
WHERE table_schema = 'public'
|
|
19793
|
+
AND table_name = 'message_server_agents'
|
|
19794
|
+
AND column_name IN ('server_id', 'message_server_id')
|
|
19795
|
+
ORDER BY column_name
|
|
19796
|
+
`);
|
|
19797
|
+
const columns = columnsResult.rows || [];
|
|
19798
|
+
const hasServerId = columns.some((c) => c.column_name === "server_id");
|
|
19799
|
+
const hasMessageServerId = columns.some((c) => c.column_name === "message_server_id");
|
|
19800
|
+
if (hasServerId && !hasMessageServerId) {
|
|
19801
|
+
logger6.debug("[Migration] → Renaming message_server_agents.server_id to message_server_id...");
|
|
19802
|
+
await db.execute(sql23.raw(`ALTER TABLE "message_server_agents" RENAME COLUMN "server_id" TO "message_server_id"`));
|
|
19803
|
+
logger6.debug("[Migration] ✓ Renamed message_server_agents.server_id → message_server_id");
|
|
19804
|
+
} else if (!hasServerId && !hasMessageServerId) {
|
|
19805
|
+
logger6.debug("[Migration] → message_server_agents exists without required columns, truncating...");
|
|
19806
|
+
await db.execute(sql23`TRUNCATE TABLE message_server_agents CASCADE`);
|
|
19807
|
+
logger6.debug("[Migration] ✓ Truncated message_server_agents");
|
|
19808
|
+
} else {
|
|
19809
|
+
logger6.debug("[Migration] ⊘ message_server_agents already has correct schema");
|
|
19810
|
+
}
|
|
19811
|
+
}
|
|
19812
|
+
} catch (error) {
|
|
19813
|
+
logger6.debug("[Migration] ⊘ Could not check/migrate server_agents table");
|
|
19814
|
+
}
|
|
19815
|
+
logger6.debug("[Migration] → Checking channel_participants table...");
|
|
19816
|
+
try {
|
|
19817
|
+
const columnsResult = await db.execute(sql23`
|
|
19818
|
+
SELECT column_name
|
|
19819
|
+
FROM information_schema.columns
|
|
19820
|
+
WHERE table_schema = 'public'
|
|
19821
|
+
AND table_name = 'channel_participants'
|
|
19822
|
+
AND column_name IN ('user_id', 'entity_id')
|
|
19823
|
+
ORDER BY column_name
|
|
19824
|
+
`);
|
|
19825
|
+
const columns = columnsResult.rows || [];
|
|
19826
|
+
const hasUserId = columns.some((c) => c.column_name === "user_id");
|
|
19827
|
+
const hasEntityId = columns.some((c) => c.column_name === "entity_id");
|
|
19828
|
+
if (hasUserId && !hasEntityId) {
|
|
19829
|
+
logger6.debug("[Migration] → Renaming channel_participants.user_id to entity_id...");
|
|
19830
|
+
await db.execute(sql23.raw(`ALTER TABLE "channel_participants" RENAME COLUMN "user_id" TO "entity_id"`));
|
|
19831
|
+
logger6.debug("[Migration] ✓ Renamed channel_participants.user_id → entity_id");
|
|
19832
|
+
} else if (!hasUserId && !hasEntityId) {
|
|
19833
|
+
logger6.debug("[Migration] → channel_participants exists without entity_id or user_id, truncating...");
|
|
19834
|
+
await db.execute(sql23`TRUNCATE TABLE channel_participants CASCADE`);
|
|
19835
|
+
logger6.debug("[Migration] ✓ Truncated channel_participants");
|
|
19836
|
+
} else {
|
|
19837
|
+
logger6.debug("[Migration] ⊘ channel_participants already has entity_id column");
|
|
19838
|
+
}
|
|
19839
|
+
} catch (error) {
|
|
19840
|
+
logger6.debug("[Migration] ⊘ Could not check/migrate channel_participants");
|
|
19841
|
+
}
|
|
19842
|
+
logger6.debug("[Migration] → Discovering and dropping all regular indexes...");
|
|
19843
|
+
try {
|
|
19844
|
+
const indexesResult = await db.execute(sql23`
|
|
19845
|
+
SELECT i.relname AS index_name
|
|
19846
|
+
FROM pg_index idx
|
|
19847
|
+
JOIN pg_class i ON i.oid = idx.indexrelid
|
|
19848
|
+
JOIN pg_class c ON c.oid = idx.indrelid
|
|
19849
|
+
JOIN pg_namespace n ON n.oid = c.relnamespace
|
|
19850
|
+
LEFT JOIN pg_constraint con ON con.conindid = idx.indexrelid
|
|
19851
|
+
WHERE n.nspname = 'public'
|
|
19852
|
+
AND NOT idx.indisprimary -- Not a primary key
|
|
19853
|
+
AND con.contype IS NULL -- Not a constraint (unique, etc)
|
|
19854
|
+
ORDER BY i.relname
|
|
19855
|
+
`);
|
|
19856
|
+
const indexesToDrop = indexesResult.rows || [];
|
|
19857
|
+
logger6.debug(`[Migration] → Found ${indexesToDrop.length} indexes to drop`);
|
|
19858
|
+
for (const row of indexesToDrop) {
|
|
19859
|
+
const indexName = row.index_name;
|
|
19860
|
+
try {
|
|
19861
|
+
await db.execute(sql23.raw(`DROP INDEX IF EXISTS "${indexName}"`));
|
|
19862
|
+
logger6.debug(`[Migration] ✓ Dropped index ${indexName}`);
|
|
19863
|
+
} catch (error) {
|
|
19864
|
+
logger6.debug(`[Migration] ⊘ Could not drop index ${indexName}`);
|
|
19865
|
+
}
|
|
19866
|
+
}
|
|
19867
|
+
} catch (error) {
|
|
19868
|
+
logger6.debug("[Migration] ⊘ Could not drop indexes (may not have permissions)");
|
|
19869
|
+
}
|
|
19870
|
+
logger6.info("[Migration] ✓ Migration complete - develop to feat/entity-rls migration finished");
|
|
19871
|
+
} catch (error) {
|
|
19872
|
+
logger6.error("[Migration] Migration failed:", String(error));
|
|
19873
|
+
throw error;
|
|
19874
|
+
}
|
|
19875
|
+
}
|
|
19876
|
+
var init_migrations = () => {};
|
|
19877
|
+
|
|
19878
|
+
// src/rls.ts
|
|
19879
|
+
import { logger as logger7, validateUuid } from "@elizaos/core";
|
|
19880
|
+
import { sql as sql24, eq } from "drizzle-orm";
|
|
19881
|
+
async function installRLSFunctions(adapter) {
|
|
19882
|
+
const db = adapter.db;
|
|
19883
|
+
await db.execute(sql24`
|
|
19884
|
+
CREATE TABLE IF NOT EXISTS servers (
|
|
19885
|
+
id UUID PRIMARY KEY,
|
|
19886
|
+
created_at TIMESTAMPTZ DEFAULT NOW() NOT NULL,
|
|
19887
|
+
updated_at TIMESTAMPTZ DEFAULT NOW() NOT NULL
|
|
19888
|
+
)
|
|
19889
|
+
`);
|
|
19890
|
+
await db.execute(sql24`
|
|
19891
|
+
CREATE OR REPLACE FUNCTION current_server_id() RETURNS UUID AS $$
|
|
19892
|
+
DECLARE
|
|
19893
|
+
app_name TEXT;
|
|
19894
|
+
BEGIN
|
|
19895
|
+
app_name := NULLIF(current_setting('application_name', TRUE), '');
|
|
19896
|
+
|
|
19897
|
+
-- Return NULL if application_name is not set or not a valid UUID
|
|
19898
|
+
-- This allows admin queries to work without RLS restrictions
|
|
19899
|
+
BEGIN
|
|
19900
|
+
RETURN app_name::UUID;
|
|
19901
|
+
EXCEPTION WHEN OTHERS THEN
|
|
19902
|
+
RETURN NULL;
|
|
19903
|
+
END;
|
|
19904
|
+
END;
|
|
19905
|
+
$$ LANGUAGE plpgsql STABLE;
|
|
19906
|
+
`);
|
|
19907
|
+
await db.execute(sql24`
|
|
19908
|
+
CREATE OR REPLACE FUNCTION add_server_isolation(
|
|
19909
|
+
schema_name text,
|
|
19910
|
+
table_name text
|
|
19911
|
+
) RETURNS void AS $$
|
|
19912
|
+
DECLARE
|
|
19913
|
+
full_table_name text;
|
|
19914
|
+
column_exists boolean;
|
|
19915
|
+
orphaned_count bigint;
|
|
19916
|
+
BEGIN
|
|
19917
|
+
full_table_name := schema_name || '.' || table_name;
|
|
19918
|
+
|
|
19919
|
+
-- Check if server_id column already exists
|
|
19920
|
+
SELECT EXISTS (
|
|
19921
|
+
SELECT 1 FROM information_schema.columns
|
|
19922
|
+
WHERE information_schema.columns.table_schema = schema_name
|
|
19923
|
+
AND information_schema.columns.table_name = add_server_isolation.table_name
|
|
19924
|
+
AND information_schema.columns.column_name = 'server_id'
|
|
19925
|
+
) INTO column_exists;
|
|
19926
|
+
|
|
19927
|
+
-- Add server_id column if missing (DEFAULT populates it automatically for new rows)
|
|
19928
|
+
IF NOT column_exists THEN
|
|
19929
|
+
EXECUTE format('ALTER TABLE %I.%I ADD COLUMN server_id UUID DEFAULT current_server_id()', schema_name, table_name);
|
|
19930
|
+
|
|
19931
|
+
-- Backfill existing rows with current server_id
|
|
19932
|
+
-- This ensures all existing data belongs to the server instance that is enabling RLS
|
|
19933
|
+
EXECUTE format('UPDATE %I.%I SET server_id = current_server_id() WHERE server_id IS NULL', schema_name, table_name);
|
|
19934
|
+
ELSE
|
|
19935
|
+
-- Column already exists (RLS was previously enabled then disabled)
|
|
19936
|
+
-- Restore the DEFAULT clause (may have been removed during uninstallRLS)
|
|
19937
|
+
EXECUTE format('ALTER TABLE %I.%I ALTER COLUMN server_id SET DEFAULT current_server_id()', schema_name, table_name);
|
|
19938
|
+
|
|
19939
|
+
-- Only backfill NULL server_id rows, do NOT steal data from other servers
|
|
19940
|
+
EXECUTE format('SELECT COUNT(*) FROM %I.%I WHERE server_id IS NULL', schema_name, table_name) INTO orphaned_count;
|
|
19941
|
+
|
|
19942
|
+
IF orphaned_count > 0 THEN
|
|
19943
|
+
RAISE NOTICE 'Backfilling % rows with NULL server_id in %.%', orphaned_count, schema_name, table_name;
|
|
19944
|
+
EXECUTE format('UPDATE %I.%I SET server_id = current_server_id() WHERE server_id IS NULL', schema_name, table_name);
|
|
19945
|
+
END IF;
|
|
19946
|
+
END IF;
|
|
19947
|
+
|
|
19948
|
+
-- Create index for efficient server_id filtering
|
|
19949
|
+
EXECUTE format('CREATE INDEX IF NOT EXISTS idx_%I_server_id ON %I.%I(server_id)', table_name, schema_name, table_name);
|
|
19950
|
+
|
|
19951
|
+
-- Enable RLS on the table
|
|
19952
|
+
EXECUTE format('ALTER TABLE %I.%I ENABLE ROW LEVEL SECURITY', schema_name, table_name);
|
|
19953
|
+
|
|
19954
|
+
-- FORCE RLS even for table owners (critical for security)
|
|
19955
|
+
EXECUTE format('ALTER TABLE %I.%I FORCE ROW LEVEL SECURITY', schema_name, table_name);
|
|
19956
|
+
|
|
19957
|
+
-- Drop existing policy if present
|
|
19958
|
+
EXECUTE format('DROP POLICY IF EXISTS server_isolation_policy ON %I.%I', schema_name, table_name);
|
|
19959
|
+
|
|
19960
|
+
-- Create isolation policy: users can only see/modify rows where server_id matches current server instance
|
|
19961
|
+
-- No NULL clause - all rows must have a valid server_id (backfilled during column addition)
|
|
19962
|
+
EXECUTE format('
|
|
19963
|
+
CREATE POLICY server_isolation_policy ON %I.%I
|
|
19964
|
+
USING (server_id = current_server_id())
|
|
19965
|
+
WITH CHECK (server_id = current_server_id())
|
|
19966
|
+
', schema_name, table_name);
|
|
19967
|
+
END;
|
|
19968
|
+
$$ LANGUAGE plpgsql;
|
|
19969
|
+
`);
|
|
19970
|
+
await db.execute(sql24`
|
|
19971
|
+
CREATE OR REPLACE FUNCTION apply_rls_to_all_tables() RETURNS void AS $$
|
|
19972
|
+
DECLARE
|
|
19973
|
+
tbl record;
|
|
19974
|
+
BEGIN
|
|
19975
|
+
FOR tbl IN
|
|
19976
|
+
SELECT schemaname, tablename
|
|
19977
|
+
FROM pg_tables
|
|
19978
|
+
WHERE schemaname = 'public'
|
|
19979
|
+
AND tablename NOT IN (
|
|
19980
|
+
'servers',
|
|
19981
|
+
'drizzle_migrations',
|
|
19982
|
+
'__drizzle_migrations'
|
|
19983
|
+
)
|
|
19984
|
+
LOOP
|
|
19985
|
+
BEGIN
|
|
19986
|
+
PERFORM add_server_isolation(tbl.schemaname, tbl.tablename);
|
|
19987
|
+
EXCEPTION WHEN OTHERS THEN
|
|
19988
|
+
RAISE WARNING 'Failed to apply RLS to %.%: %', tbl.schemaname, tbl.tablename, SQLERRM;
|
|
19989
|
+
END;
|
|
19990
|
+
END LOOP;
|
|
19991
|
+
END;
|
|
19992
|
+
$$ LANGUAGE plpgsql;
|
|
19993
|
+
`);
|
|
19994
|
+
logger7.info({ src: "plugin:sql" }, "RLS PostgreSQL functions installed");
|
|
19995
|
+
await installEntityRLS(adapter);
|
|
19996
|
+
}
|
|
19997
|
+
async function applyRLSToNewTables(adapter) {
|
|
19998
|
+
const db = adapter.db;
|
|
19999
|
+
try {
|
|
20000
|
+
await db.execute(sql24`SELECT apply_rls_to_all_tables()`);
|
|
20001
|
+
logger7.info({ src: "plugin:sql" }, "RLS applied to all tables");
|
|
20002
|
+
} catch (error) {
|
|
20003
|
+
logger7.warn({ src: "plugin:sql", error: String(error) }, "Failed to apply RLS to some tables");
|
|
20004
|
+
}
|
|
20005
|
+
}
|
|
20006
|
+
async function installEntityRLS(adapter) {
|
|
20007
|
+
const db = adapter.db;
|
|
20008
|
+
logger7.info("[Entity RLS] Installing entity RLS functions and policies...");
|
|
20009
|
+
await db.execute(sql24`
|
|
20010
|
+
CREATE OR REPLACE FUNCTION current_entity_id()
|
|
20011
|
+
RETURNS UUID AS $$
|
|
20012
|
+
DECLARE
|
|
20013
|
+
entity_id_text TEXT;
|
|
20014
|
+
BEGIN
|
|
20015
|
+
-- Read from transaction-local variable
|
|
20016
|
+
entity_id_text := NULLIF(current_setting('app.entity_id', TRUE), '');
|
|
20017
|
+
|
|
20018
|
+
IF entity_id_text IS NULL OR entity_id_text = '' THEN
|
|
20019
|
+
RETURN NULL;
|
|
20020
|
+
END IF;
|
|
20021
|
+
|
|
20022
|
+
BEGIN
|
|
20023
|
+
RETURN entity_id_text::UUID;
|
|
20024
|
+
EXCEPTION WHEN OTHERS THEN
|
|
20025
|
+
RETURN NULL;
|
|
20026
|
+
END;
|
|
20027
|
+
END;
|
|
20028
|
+
$$ LANGUAGE plpgsql STABLE;
|
|
20029
|
+
`);
|
|
20030
|
+
logger7.info("[Entity RLS] Created current_entity_id() function");
|
|
20031
|
+
await db.execute(sql24`
|
|
20032
|
+
CREATE OR REPLACE FUNCTION add_entity_isolation(
|
|
20033
|
+
schema_name text,
|
|
20034
|
+
table_name text,
|
|
20035
|
+
require_entity boolean DEFAULT false
|
|
20036
|
+
) RETURNS void AS $$
|
|
20037
|
+
DECLARE
|
|
20038
|
+
full_table_name text;
|
|
20039
|
+
has_entity_id boolean;
|
|
20040
|
+
has_author_id boolean;
|
|
20041
|
+
has_channel_id boolean;
|
|
20042
|
+
has_room_id boolean;
|
|
20043
|
+
entity_column_name text;
|
|
20044
|
+
room_column_name text;
|
|
20045
|
+
BEGIN
|
|
20046
|
+
full_table_name := schema_name || '.' || table_name;
|
|
20047
|
+
|
|
20048
|
+
-- Check which columns exist (using camelCase as per schema definition)
|
|
20049
|
+
SELECT EXISTS (
|
|
20050
|
+
SELECT 1 FROM information_schema.columns
|
|
20051
|
+
WHERE information_schema.columns.table_schema = schema_name
|
|
20052
|
+
AND information_schema.columns.table_name = add_entity_isolation.table_name
|
|
20053
|
+
AND information_schema.columns.column_name = 'entityId'
|
|
20054
|
+
) INTO has_entity_id;
|
|
20055
|
+
|
|
20056
|
+
SELECT EXISTS (
|
|
20057
|
+
SELECT 1 FROM information_schema.columns
|
|
20058
|
+
WHERE information_schema.columns.table_schema = schema_name
|
|
20059
|
+
AND information_schema.columns.table_name = add_entity_isolation.table_name
|
|
20060
|
+
AND information_schema.columns.column_name = 'authorId'
|
|
20061
|
+
) INTO has_author_id;
|
|
20062
|
+
|
|
20063
|
+
SELECT EXISTS (
|
|
20064
|
+
SELECT 1 FROM information_schema.columns
|
|
20065
|
+
WHERE information_schema.columns.table_schema = schema_name
|
|
20066
|
+
AND information_schema.columns.table_name = add_entity_isolation.table_name
|
|
20067
|
+
AND information_schema.columns.column_name = 'roomId'
|
|
20068
|
+
) INTO has_room_id;
|
|
20069
|
+
|
|
20070
|
+
-- Skip if no entity-related columns
|
|
20071
|
+
IF NOT (has_entity_id OR has_author_id OR has_room_id) THEN
|
|
20072
|
+
RAISE NOTICE '[Entity RLS] Skipping %.%: no entity columns found', schema_name, table_name;
|
|
20073
|
+
RETURN;
|
|
20074
|
+
END IF;
|
|
20075
|
+
|
|
20076
|
+
-- Determine which column to use for entity filtering
|
|
20077
|
+
-- Priority: roomId (shared access via participants) > entityId/authorId (direct access)
|
|
20078
|
+
--
|
|
20079
|
+
-- SPECIAL CASE: participants table must use direct entityId to avoid infinite recursion
|
|
20080
|
+
IF table_name = 'participants' AND has_entity_id THEN
|
|
20081
|
+
entity_column_name := 'entityId';
|
|
20082
|
+
room_column_name := NULL;
|
|
20083
|
+
ELSIF has_room_id THEN
|
|
20084
|
+
room_column_name := 'roomId';
|
|
20085
|
+
entity_column_name := NULL;
|
|
20086
|
+
ELSIF has_entity_id THEN
|
|
20087
|
+
entity_column_name := 'entityId';
|
|
20088
|
+
room_column_name := NULL;
|
|
20089
|
+
ELSIF has_author_id THEN
|
|
20090
|
+
entity_column_name := 'authorId';
|
|
20091
|
+
room_column_name := NULL;
|
|
20092
|
+
ELSE
|
|
20093
|
+
entity_column_name := NULL;
|
|
20094
|
+
room_column_name := NULL;
|
|
20095
|
+
END IF;
|
|
20096
|
+
|
|
20097
|
+
-- Enable RLS on the table
|
|
20098
|
+
EXECUTE format('ALTER TABLE %I.%I ENABLE ROW LEVEL SECURITY', schema_name, table_name);
|
|
20099
|
+
EXECUTE format('ALTER TABLE %I.%I FORCE ROW LEVEL SECURITY', schema_name, table_name);
|
|
20100
|
+
|
|
20101
|
+
-- Drop existing entity policies if present
|
|
20102
|
+
EXECUTE format('DROP POLICY IF EXISTS entity_isolation_policy ON %I.%I', schema_name, table_name);
|
|
20103
|
+
|
|
20104
|
+
-- CASE 1: Table has roomId or channelId (shared access via participants)
|
|
20105
|
+
IF room_column_name IS NOT NULL THEN
|
|
20106
|
+
-- Determine the corresponding column name in participants table
|
|
20107
|
+
-- If the table has roomId, look for roomId in participants.roomId
|
|
20108
|
+
-- participants table uses: entityId (for participant), roomId (for room)
|
|
20109
|
+
-- RESTRICTIVE: Must pass BOTH server RLS AND entity RLS (combined with AND)
|
|
20110
|
+
|
|
20111
|
+
-- Build policy with or without NULL check based on require_entity parameter
|
|
20112
|
+
IF require_entity THEN
|
|
20113
|
+
-- STRICT MODE: Entity context is REQUIRED (blocks NULL entity_id)
|
|
20114
|
+
EXECUTE format('
|
|
20115
|
+
CREATE POLICY entity_isolation_policy ON %I.%I
|
|
20116
|
+
AS RESTRICTIVE
|
|
20117
|
+
USING (
|
|
20118
|
+
current_entity_id() IS NOT NULL
|
|
20119
|
+
AND %I IN (
|
|
20120
|
+
SELECT "roomId"
|
|
20121
|
+
FROM participants
|
|
20122
|
+
WHERE "entityId" = current_entity_id()
|
|
20123
|
+
)
|
|
20124
|
+
)
|
|
20125
|
+
WITH CHECK (
|
|
20126
|
+
current_entity_id() IS NOT NULL
|
|
20127
|
+
AND %I IN (
|
|
20128
|
+
SELECT "roomId"
|
|
20129
|
+
FROM participants
|
|
20130
|
+
WHERE "entityId" = current_entity_id()
|
|
20131
|
+
)
|
|
20132
|
+
)
|
|
20133
|
+
', schema_name, table_name, room_column_name, room_column_name);
|
|
20134
|
+
RAISE NOTICE '[Entity RLS] Applied STRICT RESTRICTIVE to %.% (via % → participants.roomId, entity REQUIRED)', schema_name, table_name, room_column_name;
|
|
20135
|
+
ELSE
|
|
20136
|
+
-- PERMISSIVE MODE: NULL entity_id allows system/admin access
|
|
20137
|
+
EXECUTE format('
|
|
20138
|
+
CREATE POLICY entity_isolation_policy ON %I.%I
|
|
20139
|
+
AS RESTRICTIVE
|
|
20140
|
+
USING (
|
|
20141
|
+
current_entity_id() IS NULL
|
|
20142
|
+
OR %I IN (
|
|
20143
|
+
SELECT "roomId"
|
|
20144
|
+
FROM participants
|
|
20145
|
+
WHERE "entityId" = current_entity_id()
|
|
20146
|
+
)
|
|
20147
|
+
)
|
|
20148
|
+
WITH CHECK (
|
|
20149
|
+
current_entity_id() IS NULL
|
|
20150
|
+
OR %I IN (
|
|
20151
|
+
SELECT "roomId"
|
|
20152
|
+
FROM participants
|
|
20153
|
+
WHERE "entityId" = current_entity_id()
|
|
20154
|
+
)
|
|
20155
|
+
)
|
|
20156
|
+
', schema_name, table_name, room_column_name, room_column_name);
|
|
20157
|
+
RAISE NOTICE '[Entity RLS] Applied PERMISSIVE RESTRICTIVE to %.% (via % → participants.roomId, NULL allowed)', schema_name, table_name, room_column_name;
|
|
20158
|
+
END IF;
|
|
20159
|
+
|
|
20160
|
+
-- CASE 2: Table has direct entity_id or author_id column
|
|
20161
|
+
ELSIF entity_column_name IS NOT NULL THEN
|
|
20162
|
+
-- RESTRICTIVE: Must pass BOTH server RLS AND entity RLS (combined with AND)
|
|
20163
|
+
|
|
20164
|
+
IF require_entity THEN
|
|
20165
|
+
-- STRICT MODE: Entity context is REQUIRED
|
|
20166
|
+
EXECUTE format('
|
|
20167
|
+
CREATE POLICY entity_isolation_policy ON %I.%I
|
|
20168
|
+
AS RESTRICTIVE
|
|
20169
|
+
USING (
|
|
20170
|
+
current_entity_id() IS NOT NULL
|
|
20171
|
+
AND %I = current_entity_id()
|
|
20172
|
+
)
|
|
20173
|
+
WITH CHECK (
|
|
20174
|
+
current_entity_id() IS NOT NULL
|
|
20175
|
+
AND %I = current_entity_id()
|
|
20176
|
+
)
|
|
20177
|
+
', schema_name, table_name, entity_column_name, entity_column_name);
|
|
20178
|
+
RAISE NOTICE '[Entity RLS] Applied STRICT RESTRICTIVE to %.% (direct column: %, entity REQUIRED)', schema_name, table_name, entity_column_name;
|
|
20179
|
+
ELSE
|
|
20180
|
+
-- PERMISSIVE MODE: NULL entity_id allows system/admin access
|
|
20181
|
+
EXECUTE format('
|
|
20182
|
+
CREATE POLICY entity_isolation_policy ON %I.%I
|
|
20183
|
+
AS RESTRICTIVE
|
|
20184
|
+
USING (
|
|
20185
|
+
current_entity_id() IS NULL
|
|
20186
|
+
OR %I = current_entity_id()
|
|
20187
|
+
)
|
|
20188
|
+
WITH CHECK (
|
|
20189
|
+
current_entity_id() IS NULL
|
|
20190
|
+
OR %I = current_entity_id()
|
|
20191
|
+
)
|
|
20192
|
+
', schema_name, table_name, entity_column_name, entity_column_name);
|
|
20193
|
+
RAISE NOTICE '[Entity RLS] Applied PERMISSIVE RESTRICTIVE to %.% (direct column: %, NULL allowed)', schema_name, table_name, entity_column_name;
|
|
20194
|
+
END IF;
|
|
20195
|
+
END IF;
|
|
20196
|
+
|
|
20197
|
+
-- Create indexes for efficient entity filtering
|
|
20198
|
+
IF room_column_name IS NOT NULL THEN
|
|
20199
|
+
EXECUTE format('CREATE INDEX IF NOT EXISTS idx_%I_room ON %I.%I(%I)',
|
|
20200
|
+
table_name, schema_name, table_name, room_column_name);
|
|
20201
|
+
END IF;
|
|
20202
|
+
|
|
20203
|
+
IF entity_column_name IS NOT NULL THEN
|
|
20204
|
+
EXECUTE format('CREATE INDEX IF NOT EXISTS idx_%I_entity ON %I.%I(%I)',
|
|
20205
|
+
table_name, schema_name, table_name, entity_column_name);
|
|
20206
|
+
END IF;
|
|
20207
|
+
END;
|
|
20208
|
+
$$ LANGUAGE plpgsql;
|
|
20209
|
+
`);
|
|
20210
|
+
logger7.info("[Entity RLS] Created add_entity_isolation() function");
|
|
20211
|
+
await db.execute(sql24`
|
|
20212
|
+
CREATE OR REPLACE FUNCTION apply_entity_rls_to_all_tables() RETURNS void AS $$
|
|
20213
|
+
DECLARE
|
|
20214
|
+
tbl record;
|
|
20215
|
+
require_entity_for_table boolean;
|
|
20216
|
+
BEGIN
|
|
20217
|
+
FOR tbl IN
|
|
20218
|
+
SELECT schemaname, tablename
|
|
20219
|
+
FROM pg_tables
|
|
20220
|
+
WHERE schemaname = 'public'
|
|
20221
|
+
AND tablename NOT IN (
|
|
20222
|
+
'servers', -- Server RLS table
|
|
20223
|
+
'users', -- Authentication table (no entity isolation needed)
|
|
20224
|
+
'entity_mappings', -- Mapping table (no entity isolation needed)
|
|
20225
|
+
'drizzle_migrations', -- Migration tracking
|
|
20226
|
+
'__drizzle_migrations' -- Migration tracking
|
|
20227
|
+
)
|
|
20228
|
+
LOOP
|
|
20229
|
+
BEGIN
|
|
20230
|
+
-- Apply STRICT mode (require_entity=true) to sensitive user-facing tables
|
|
20231
|
+
-- These tables MUST have entity context set to access data
|
|
20232
|
+
-- STRICT tables: memories, logs, components, tasks (user data requiring isolation)
|
|
20233
|
+
-- NOTE: Excluded tables:
|
|
20234
|
+
-- - 'participants': Adding participants is a privileged operation during initialization
|
|
20235
|
+
IF tbl.tablename IN ('memories', 'logs', 'components', 'tasks') THEN
|
|
20236
|
+
require_entity_for_table := true;
|
|
20237
|
+
ELSE
|
|
20238
|
+
-- PERMISSIVE mode (require_entity=false) for system/privileged tables
|
|
20239
|
+
-- This includes: participants, rooms, channels, entities, etc.
|
|
20240
|
+
require_entity_for_table := false;
|
|
20241
|
+
END IF;
|
|
20242
|
+
|
|
20243
|
+
PERFORM add_entity_isolation(tbl.schemaname, tbl.tablename, require_entity_for_table);
|
|
20244
|
+
EXCEPTION WHEN OTHERS THEN
|
|
20245
|
+
RAISE WARNING '[Entity RLS] Failed to apply to %.%: %', tbl.schemaname, tbl.tablename, SQLERRM;
|
|
20246
|
+
END;
|
|
20247
|
+
END LOOP;
|
|
20248
|
+
END;
|
|
20249
|
+
$$ LANGUAGE plpgsql;
|
|
20250
|
+
`);
|
|
20251
|
+
logger7.info("[Entity RLS] Created apply_entity_rls_to_all_tables() function");
|
|
20252
|
+
logger7.info("[Entity RLS] Entity RLS functions installed successfully");
|
|
20253
|
+
}
|
|
20254
|
+
async function applyEntityRLSToAllTables(adapter) {
|
|
20255
|
+
const db = adapter.db;
|
|
20256
|
+
try {
|
|
20257
|
+
await db.execute(sql24`SELECT apply_entity_rls_to_all_tables()`);
|
|
20258
|
+
logger7.info("[Entity RLS] Applied entity RLS to all eligible tables");
|
|
20259
|
+
} catch (error) {
|
|
20260
|
+
logger7.warn("[Entity RLS] Failed to apply entity RLS to some tables:", String(error));
|
|
20261
|
+
}
|
|
20262
|
+
}
|
|
20263
|
+
var init_rls = () => {};
|
|
20264
|
+
|
|
19601
20265
|
// src/migration-service.ts
|
|
19602
20266
|
var exports_migration_service = {};
|
|
19603
20267
|
__export(exports_migration_service, {
|
|
19604
20268
|
DatabaseMigrationService: () => DatabaseMigrationService
|
|
19605
20269
|
});
|
|
19606
|
-
import { logger as
|
|
20270
|
+
import { logger as logger8 } from "@elizaos/core";
|
|
19607
20271
|
|
|
19608
20272
|
class DatabaseMigrationService {
|
|
19609
20273
|
db = null;
|
|
@@ -19612,22 +20276,22 @@ class DatabaseMigrationService {
|
|
|
19612
20276
|
constructor() {}
|
|
19613
20277
|
async initializeWithDatabase(db) {
|
|
19614
20278
|
this.db = db;
|
|
20279
|
+
await migrateToEntityRLS({ db });
|
|
19615
20280
|
this.migrator = new RuntimeMigrator(db);
|
|
19616
20281
|
await this.migrator.initialize();
|
|
19617
|
-
|
|
20282
|
+
logger8.info({ src: "plugin:sql" }, "DatabaseMigrationService initialized");
|
|
19618
20283
|
}
|
|
19619
20284
|
discoverAndRegisterPluginSchemas(plugins) {
|
|
19620
20285
|
for (const plugin of plugins) {
|
|
19621
20286
|
if (plugin.schema) {
|
|
19622
20287
|
this.registeredSchemas.set(plugin.name, plugin.schema);
|
|
19623
|
-
logger6.info(`Registered schema for plugin: ${plugin.name}`);
|
|
19624
20288
|
}
|
|
19625
20289
|
}
|
|
19626
|
-
|
|
20290
|
+
logger8.info({ src: "plugin:sql", schemasDiscovered: this.registeredSchemas.size, totalPlugins: plugins.length }, "Plugin schemas discovered");
|
|
19627
20291
|
}
|
|
19628
20292
|
registerSchema(pluginName, schema) {
|
|
19629
20293
|
this.registeredSchemas.set(pluginName, schema);
|
|
19630
|
-
|
|
20294
|
+
logger8.debug({ src: "plugin:sql", pluginName }, "Schema registered");
|
|
19631
20295
|
}
|
|
19632
20296
|
async runAllPluginMigrations(options) {
|
|
19633
20297
|
if (!this.db || !this.migrator) {
|
|
@@ -19639,12 +20303,7 @@ class DatabaseMigrationService {
|
|
|
19639
20303
|
force: options?.force ?? false,
|
|
19640
20304
|
dryRun: options?.dryRun ?? false
|
|
19641
20305
|
};
|
|
19642
|
-
|
|
19643
|
-
logger6.info(`[DatabaseMigrationService] Environment: ${isProduction ? "PRODUCTION" : "DEVELOPMENT"}`);
|
|
19644
|
-
logger6.info(`[DatabaseMigrationService] Plugins to migrate: ${this.registeredSchemas.size}`);
|
|
19645
|
-
if (migrationOptions.dryRun) {
|
|
19646
|
-
logger6.info("[DatabaseMigrationService] DRY RUN mode - no changes will be applied");
|
|
19647
|
-
}
|
|
20306
|
+
logger8.info({ src: "plugin:sql", environment: isProduction ? "PRODUCTION" : "DEVELOPMENT", pluginCount: this.registeredSchemas.size, dryRun: migrationOptions.dryRun }, "Starting migrations");
|
|
19648
20307
|
let successCount = 0;
|
|
19649
20308
|
let failureCount = 0;
|
|
19650
20309
|
const errors = [];
|
|
@@ -19652,27 +20311,37 @@ class DatabaseMigrationService {
|
|
|
19652
20311
|
try {
|
|
19653
20312
|
await this.migrator.migrate(pluginName, schema, migrationOptions);
|
|
19654
20313
|
successCount++;
|
|
19655
|
-
|
|
20314
|
+
logger8.info({ src: "plugin:sql", pluginName }, "Migration completed");
|
|
19656
20315
|
} catch (error) {
|
|
19657
20316
|
failureCount++;
|
|
19658
20317
|
const errorMessage = error.message;
|
|
19659
20318
|
errors.push({ pluginName, error });
|
|
19660
20319
|
if (errorMessage.includes("Destructive migration blocked")) {
|
|
19661
|
-
|
|
19662
|
-
if (!migrationOptions.force && process.env.ELIZA_ALLOW_DESTRUCTIVE_MIGRATIONS !== "true") {
|
|
19663
|
-
logger6.error("[DatabaseMigrationService] To allow destructive migrations:");
|
|
19664
|
-
logger6.error("[DatabaseMigrationService] - Set ELIZA_ALLOW_DESTRUCTIVE_MIGRATIONS=true");
|
|
19665
|
-
logger6.error("[DatabaseMigrationService] - Or pass { force: true } to this method");
|
|
19666
|
-
}
|
|
20320
|
+
logger8.error({ src: "plugin:sql", pluginName }, "Migration blocked - destructive changes detected. Set ELIZA_ALLOW_DESTRUCTIVE_MIGRATIONS=true or use force option");
|
|
19667
20321
|
} else {
|
|
19668
|
-
|
|
20322
|
+
logger8.error({ src: "plugin:sql", pluginName, error: errorMessage }, "Migration failed");
|
|
19669
20323
|
}
|
|
19670
20324
|
}
|
|
19671
20325
|
}
|
|
19672
20326
|
if (failureCount === 0) {
|
|
19673
|
-
|
|
20327
|
+
logger8.info({ src: "plugin:sql", successCount }, "All migrations completed successfully");
|
|
20328
|
+
const dataIsolationEnabled = process.env.ENABLE_DATA_ISOLATION === "true";
|
|
20329
|
+
if (dataIsolationEnabled) {
|
|
20330
|
+
try {
|
|
20331
|
+
logger8.info({ src: "plugin:sql" }, "Re-applying Row Level Security...");
|
|
20332
|
+
await installRLSFunctions({ db: this.db });
|
|
20333
|
+
await applyRLSToNewTables({ db: this.db });
|
|
20334
|
+
await applyEntityRLSToAllTables({ db: this.db });
|
|
20335
|
+
logger8.info({ src: "plugin:sql" }, "RLS re-applied successfully");
|
|
20336
|
+
} catch (rlsError) {
|
|
20337
|
+
const errorMsg = rlsError instanceof Error ? rlsError.message : String(rlsError);
|
|
20338
|
+
logger8.warn({ src: "plugin:sql", error: errorMsg }, "Failed to re-apply RLS (this is OK if server_id columns are not yet in schemas)");
|
|
20339
|
+
}
|
|
20340
|
+
} else {
|
|
20341
|
+
logger8.info({ src: "plugin:sql" }, "Skipping RLS re-application (ENABLE_DATA_ISOLATION is not true)");
|
|
20342
|
+
}
|
|
19674
20343
|
} else {
|
|
19675
|
-
|
|
20344
|
+
logger8.error({ src: "plugin:sql", failureCount, successCount }, "Some migrations failed");
|
|
19676
20345
|
const errorSummary = errors.map((e) => `${e.pluginName}: ${e.error.message}`).join(`
|
|
19677
20346
|
`);
|
|
19678
20347
|
throw new Error(`${failureCount} migration(s) failed:
|
|
@@ -19685,35 +20354,37 @@ class DatabaseMigrationService {
|
|
|
19685
20354
|
}
|
|
19686
20355
|
var init_migration_service = __esm(() => {
|
|
19687
20356
|
init_runtime_migrator2();
|
|
20357
|
+
init_migrations();
|
|
20358
|
+
init_rls();
|
|
19688
20359
|
});
|
|
19689
20360
|
|
|
19690
20361
|
// src/index.browser.ts
|
|
19691
20362
|
import {
|
|
19692
|
-
logger as
|
|
20363
|
+
logger as logger11
|
|
19693
20364
|
} from "@elizaos/core/browser";
|
|
19694
20365
|
|
|
19695
20366
|
// src/pglite/adapter.ts
|
|
19696
|
-
import { logger as
|
|
20367
|
+
import { logger as logger10 } from "@elizaos/core";
|
|
19697
20368
|
import { drizzle } from "drizzle-orm/pglite";
|
|
19698
20369
|
|
|
19699
20370
|
// src/base.ts
|
|
19700
20371
|
import {
|
|
19701
20372
|
ChannelType,
|
|
19702
20373
|
DatabaseAdapter,
|
|
19703
|
-
logger as
|
|
20374
|
+
logger as logger9
|
|
19704
20375
|
} from "@elizaos/core";
|
|
19705
20376
|
import {
|
|
19706
20377
|
and,
|
|
19707
20378
|
cosineDistance,
|
|
19708
20379
|
count,
|
|
19709
20380
|
desc,
|
|
19710
|
-
eq,
|
|
20381
|
+
eq as eq2,
|
|
19711
20382
|
gte,
|
|
19712
20383
|
inArray,
|
|
19713
20384
|
lt,
|
|
19714
20385
|
lte,
|
|
19715
20386
|
or,
|
|
19716
|
-
sql as
|
|
20387
|
+
sql as sql25
|
|
19717
20388
|
} from "drizzle-orm";
|
|
19718
20389
|
|
|
19719
20390
|
// node_modules/uuid/dist/esm-browser/stringify.js
|
|
@@ -19773,6 +20444,7 @@ import { check as check2, foreignKey as foreignKey2, index as index2, pgTable as
|
|
|
19773
20444
|
import { VECTOR_DIMS } from "@elizaos/core";
|
|
19774
20445
|
|
|
19775
20446
|
// src/schema/memory.ts
|
|
20447
|
+
init_agent();
|
|
19776
20448
|
import { relations, sql as sql4 } from "drizzle-orm";
|
|
19777
20449
|
import {
|
|
19778
20450
|
boolean as boolean2,
|
|
@@ -19786,30 +20458,8 @@ import {
|
|
|
19786
20458
|
uuid as uuid4
|
|
19787
20459
|
} from "drizzle-orm/pg-core";
|
|
19788
20460
|
|
|
19789
|
-
// src/schema/agent.ts
|
|
19790
|
-
import { sql } from "drizzle-orm";
|
|
19791
|
-
import { boolean, jsonb, pgTable, text, timestamp, uuid } from "drizzle-orm/pg-core";
|
|
19792
|
-
var agentTable = pgTable("agents", {
|
|
19793
|
-
id: uuid("id").primaryKey().defaultRandom(),
|
|
19794
|
-
enabled: boolean("enabled").default(true).notNull(),
|
|
19795
|
-
owner_id: uuid("owner_id"),
|
|
19796
|
-
createdAt: timestamp("created_at", { withTimezone: true }).default(sql`now()`).notNull(),
|
|
19797
|
-
updatedAt: timestamp("updated_at", { withTimezone: true }).default(sql`now()`).notNull(),
|
|
19798
|
-
name: text("name").notNull(),
|
|
19799
|
-
username: text("username"),
|
|
19800
|
-
system: text("system").default(""),
|
|
19801
|
-
bio: jsonb("bio").$type().default(sql`'[]'::jsonb`),
|
|
19802
|
-
messageExamples: jsonb("message_examples").$type().default(sql`'[]'::jsonb`).notNull(),
|
|
19803
|
-
postExamples: jsonb("post_examples").$type().default(sql`'[]'::jsonb`).notNull(),
|
|
19804
|
-
topics: jsonb("topics").$type().default(sql`'[]'::jsonb`).notNull(),
|
|
19805
|
-
adjectives: jsonb("adjectives").$type().default(sql`'[]'::jsonb`).notNull(),
|
|
19806
|
-
knowledge: jsonb("knowledge").$type().default(sql`'[]'::jsonb`).notNull(),
|
|
19807
|
-
plugins: jsonb("plugins").$type().default(sql`'[]'::jsonb`).notNull(),
|
|
19808
|
-
settings: jsonb("settings").$type().default(sql`'{}'::jsonb`).notNull(),
|
|
19809
|
-
style: jsonb("style").$type().default(sql`'{}'::jsonb`).notNull()
|
|
19810
|
-
});
|
|
19811
|
-
|
|
19812
20461
|
// src/schema/entity.ts
|
|
20462
|
+
init_agent();
|
|
19813
20463
|
import { sql as sql2 } from "drizzle-orm";
|
|
19814
20464
|
import { jsonb as jsonb2, pgTable as pgTable2, text as text2, timestamp as timestamp2, unique, uuid as uuid2 } from "drizzle-orm/pg-core";
|
|
19815
20465
|
var entityTable = pgTable2("entities", {
|
|
@@ -19827,6 +20477,7 @@ var entityTable = pgTable2("entities", {
|
|
|
19827
20477
|
});
|
|
19828
20478
|
|
|
19829
20479
|
// src/schema/room.ts
|
|
20480
|
+
init_agent();
|
|
19830
20481
|
import { sql as sql3 } from "drizzle-orm";
|
|
19831
20482
|
import { jsonb as jsonb3, pgTable as pgTable3, text as text3, timestamp as timestamp3, uuid as uuid3 } from "drizzle-orm/pg-core";
|
|
19832
20483
|
var roomTable = pgTable3("rooms", {
|
|
@@ -19836,12 +20487,12 @@ var roomTable = pgTable3("rooms", {
|
|
|
19836
20487
|
}),
|
|
19837
20488
|
source: text3("source").notNull(),
|
|
19838
20489
|
type: text3("type").notNull(),
|
|
19839
|
-
|
|
20490
|
+
messageServerId: uuid3("message_server_id"),
|
|
19840
20491
|
worldId: uuid3("worldId"),
|
|
19841
20492
|
name: text3("name"),
|
|
19842
20493
|
metadata: jsonb3("metadata"),
|
|
19843
|
-
channelId: text3("
|
|
19844
|
-
createdAt: timestamp3("
|
|
20494
|
+
channelId: text3("channel_id"),
|
|
20495
|
+
createdAt: timestamp3("created_at").default(sql3`now()`).notNull()
|
|
19845
20496
|
});
|
|
19846
20497
|
|
|
19847
20498
|
// src/schema/memory.ts
|
|
@@ -19933,17 +20584,18 @@ var embeddingTable = pgTable5("embeddings", {
|
|
|
19933
20584
|
]);
|
|
19934
20585
|
|
|
19935
20586
|
// src/schema/index.ts
|
|
20587
|
+
init_agent();
|
|
19936
20588
|
var exports_schema = {};
|
|
19937
20589
|
__export(exports_schema, {
|
|
19938
20590
|
worldTable: () => worldTable,
|
|
19939
20591
|
taskTable: () => taskTable,
|
|
19940
|
-
|
|
20592
|
+
serverTable: () => serverTable,
|
|
19941
20593
|
roomTable: () => roomTable,
|
|
19942
20594
|
relationshipTable: () => relationshipTable,
|
|
19943
20595
|
participantTable: () => participantTable,
|
|
19944
|
-
ownersTable: () => ownersTable,
|
|
19945
20596
|
messageTable: () => messageTable,
|
|
19946
20597
|
messageServerTable: () => messageServerTable,
|
|
20598
|
+
messageServerAgentsTable: () => messageServerAgentsTable,
|
|
19947
20599
|
memoryTable: () => memoryTable,
|
|
19948
20600
|
logTable: () => logTable,
|
|
19949
20601
|
entityTable: () => entityTable,
|
|
@@ -19956,6 +20608,7 @@ __export(exports_schema, {
|
|
|
19956
20608
|
});
|
|
19957
20609
|
|
|
19958
20610
|
// src/schema/cache.ts
|
|
20611
|
+
init_agent();
|
|
19959
20612
|
import { sql as sql6 } from "drizzle-orm";
|
|
19960
20613
|
import { jsonb as jsonb5, pgTable as pgTable6, text as text5, primaryKey, timestamp as timestamp6, uuid as uuid6 } from "drizzle-orm/pg-core";
|
|
19961
20614
|
var cacheTable = pgTable6("cache", {
|
|
@@ -19964,14 +20617,16 @@ var cacheTable = pgTable6("cache", {
|
|
|
19964
20617
|
value: jsonb5("value").notNull(),
|
|
19965
20618
|
createdAt: timestamp6("created_at", { withTimezone: true }).default(sql6`now()`).notNull(),
|
|
19966
20619
|
expiresAt: timestamp6("expires_at", { withTimezone: true })
|
|
19967
|
-
}, (table) =>
|
|
19968
|
-
|
|
19969
|
-
|
|
20620
|
+
}, (table) => [
|
|
20621
|
+
primaryKey({ columns: [table.key, table.agentId] })
|
|
20622
|
+
]);
|
|
19970
20623
|
// src/schema/component.ts
|
|
20624
|
+
init_agent();
|
|
19971
20625
|
import { sql as sql8 } from "drizzle-orm";
|
|
19972
20626
|
import { jsonb as jsonb7, pgTable as pgTable8, text as text7, timestamp as timestamp8, uuid as uuid8 } from "drizzle-orm/pg-core";
|
|
19973
20627
|
|
|
19974
20628
|
// src/schema/world.ts
|
|
20629
|
+
init_agent();
|
|
19975
20630
|
import { sql as sql7 } from "drizzle-orm";
|
|
19976
20631
|
import { jsonb as jsonb6, pgTable as pgTable7, text as text6, timestamp as timestamp7, uuid as uuid7 } from "drizzle-orm/pg-core";
|
|
19977
20632
|
var worldTable = pgTable7("worlds", {
|
|
@@ -19979,8 +20634,8 @@ var worldTable = pgTable7("worlds", {
|
|
|
19979
20634
|
agentId: uuid7("agentId").notNull().references(() => agentTable.id, { onDelete: "cascade" }),
|
|
19980
20635
|
name: text6("name").notNull(),
|
|
19981
20636
|
metadata: jsonb6("metadata"),
|
|
19982
|
-
|
|
19983
|
-
createdAt: timestamp7("
|
|
20637
|
+
messageServerId: uuid7("message_server_id"),
|
|
20638
|
+
createdAt: timestamp7("created_at").default(sql7`now()`).notNull()
|
|
19984
20639
|
});
|
|
19985
20640
|
|
|
19986
20641
|
// src/schema/component.ts
|
|
@@ -20017,15 +20672,12 @@ var logTable = pgTable9("logs", {
|
|
|
20017
20672
|
foreignColumns: [entityTable.id]
|
|
20018
20673
|
}).onDelete("cascade")
|
|
20019
20674
|
]);
|
|
20020
|
-
|
|
20021
|
-
|
|
20022
|
-
|
|
20023
|
-
|
|
20024
|
-
id: uuid10("id").primaryKey(),
|
|
20025
|
-
createdAt: timestamp10("created_at", { withTimezone: true }).default(sql10`now()`).notNull(),
|
|
20026
|
-
updatedAt: timestamp10("updated_at", { withTimezone: true }).default(sql10`now()`).notNull()
|
|
20027
|
-
});
|
|
20675
|
+
|
|
20676
|
+
// src/schema/index.ts
|
|
20677
|
+
init_server();
|
|
20678
|
+
|
|
20028
20679
|
// src/schema/participant.ts
|
|
20680
|
+
init_agent();
|
|
20029
20681
|
import { sql as sql11 } from "drizzle-orm";
|
|
20030
20682
|
import { foreignKey as foreignKey4, index as index3, pgTable as pgTable11, text as text9, timestamp as timestamp11, uuid as uuid11 } from "drizzle-orm/pg-core";
|
|
20031
20683
|
var participantTable = pgTable11("participants", {
|
|
@@ -20056,6 +20708,7 @@ var participantTable = pgTable11("participants", {
|
|
|
20056
20708
|
}).onDelete("cascade")
|
|
20057
20709
|
]);
|
|
20058
20710
|
// src/schema/relationship.ts
|
|
20711
|
+
init_agent();
|
|
20059
20712
|
import { sql as sql12 } from "drizzle-orm";
|
|
20060
20713
|
import {
|
|
20061
20714
|
foreignKey as foreignKey5,
|
|
@@ -20090,6 +20743,7 @@ var relationshipTable = pgTable12("relationships", {
|
|
|
20090
20743
|
}).onDelete("cascade")
|
|
20091
20744
|
]);
|
|
20092
20745
|
// src/schema/tasks.ts
|
|
20746
|
+
init_agent();
|
|
20093
20747
|
import { jsonb as jsonb10, pgTable as pgTable13, text as text11, timestamp as timestamp13, uuid as uuid13 } from "drizzle-orm/pg-core";
|
|
20094
20748
|
import { sql as sql13 } from "drizzle-orm";
|
|
20095
20749
|
var taskTable = pgTable13("tasks", {
|
|
@@ -20099,7 +20753,7 @@ var taskTable = pgTable13("tasks", {
|
|
|
20099
20753
|
roomId: uuid13("roomId"),
|
|
20100
20754
|
worldId: uuid13("worldId"),
|
|
20101
20755
|
entityId: uuid13("entityId"),
|
|
20102
|
-
agentId: uuid13("
|
|
20756
|
+
agentId: uuid13("agentId").notNull().references(() => agentTable.id, { onDelete: "cascade" }),
|
|
20103
20757
|
tags: text11("tags").array().default(sql13`'{}'::text[]`),
|
|
20104
20758
|
metadata: jsonb10("metadata").default(sql13`'{}'::jsonb`),
|
|
20105
20759
|
createdAt: timestamp13("created_at", { withTimezone: true }).defaultNow(),
|
|
@@ -20122,7 +20776,7 @@ import { pgTable as pgTable15, text as text13, jsonb as jsonb12, timestamp as ti
|
|
|
20122
20776
|
import { sql as sql15 } from "drizzle-orm";
|
|
20123
20777
|
var channelTable = pgTable15("channels", {
|
|
20124
20778
|
id: text13("id").primaryKey(),
|
|
20125
|
-
messageServerId: uuid15("
|
|
20779
|
+
messageServerId: uuid15("message_server_id").notNull().references(() => messageServerTable.id, { onDelete: "cascade" }),
|
|
20126
20780
|
name: text13("name").notNull(),
|
|
20127
20781
|
type: text13("type").notNull(),
|
|
20128
20782
|
sourceType: text13("source_type"),
|
|
@@ -20154,18 +20808,19 @@ var messageTable = pgTable16("central_messages", {
|
|
|
20154
20808
|
import { pgTable as pgTable17, text as text15, primaryKey as primaryKey2 } from "drizzle-orm/pg-core";
|
|
20155
20809
|
var channelParticipantsTable = pgTable17("channel_participants", {
|
|
20156
20810
|
channelId: text15("channel_id").notNull().references(() => channelTable.id, { onDelete: "cascade" }),
|
|
20157
|
-
|
|
20158
|
-
}, (table) =>
|
|
20159
|
-
|
|
20160
|
-
|
|
20161
|
-
// src/schema/
|
|
20811
|
+
entityId: text15("entity_id").notNull()
|
|
20812
|
+
}, (table) => [
|
|
20813
|
+
primaryKey2({ columns: [table.channelId, table.entityId] })
|
|
20814
|
+
]);
|
|
20815
|
+
// src/schema/messageServerAgent.ts
|
|
20162
20816
|
import { pgTable as pgTable18, uuid as uuid16, primaryKey as primaryKey3 } from "drizzle-orm/pg-core";
|
|
20163
|
-
|
|
20164
|
-
|
|
20817
|
+
init_agent();
|
|
20818
|
+
var messageServerAgentsTable = pgTable18("message_server_agents", {
|
|
20819
|
+
messageServerId: uuid16("message_server_id").notNull().references(() => messageServerTable.id, { onDelete: "cascade" }),
|
|
20165
20820
|
agentId: uuid16("agent_id").notNull().references(() => agentTable.id, { onDelete: "cascade" })
|
|
20166
|
-
}, (table) =>
|
|
20167
|
-
|
|
20168
|
-
|
|
20821
|
+
}, (table) => [
|
|
20822
|
+
primaryKey3({ columns: [table.messageServerId, table.agentId] })
|
|
20823
|
+
]);
|
|
20169
20824
|
// src/base.ts
|
|
20170
20825
|
class BaseDrizzleAdapter extends DatabaseAdapter {
|
|
20171
20826
|
maxRetries = 3;
|
|
@@ -20227,10 +20882,10 @@ class BaseDrizzleAdapter extends DatabaseAdapter {
|
|
|
20227
20882
|
const backoffDelay = Math.min(this.baseDelay * 2 ** (attempt - 1), this.maxDelay);
|
|
20228
20883
|
const jitter = Math.random() * this.jitterMax;
|
|
20229
20884
|
const delay = backoffDelay + jitter;
|
|
20230
|
-
|
|
20885
|
+
logger9.warn({ src: "plugin:sql", attempt, maxRetries: this.maxRetries, error: error instanceof Error ? error.message : String(error) }, "Database operation failed, retrying");
|
|
20231
20886
|
await new Promise((resolve) => setTimeout(resolve, delay));
|
|
20232
20887
|
} else {
|
|
20233
|
-
|
|
20888
|
+
logger9.error({ src: "plugin:sql", totalAttempts: attempt, error: error instanceof Error ? error.message : String(error) }, "Max retry attempts reached");
|
|
20234
20889
|
throw error instanceof Error ? error : new Error(String(error));
|
|
20235
20890
|
}
|
|
20236
20891
|
}
|
|
@@ -20239,7 +20894,7 @@ class BaseDrizzleAdapter extends DatabaseAdapter {
|
|
|
20239
20894
|
}
|
|
20240
20895
|
async ensureEmbeddingDimension(dimension) {
|
|
20241
20896
|
return this.withDatabase(async () => {
|
|
20242
|
-
const existingMemory = await this.db.select().from(memoryTable).innerJoin(embeddingTable,
|
|
20897
|
+
const existingMemory = await this.db.select().from(memoryTable).innerJoin(embeddingTable, eq2(embeddingTable.memoryId, memoryTable.id)).where(eq2(memoryTable.agentId, this.agentId)).limit(1);
|
|
20243
20898
|
if (existingMemory.length > 0) {
|
|
20244
20899
|
Object.entries(DIMENSION_MAP).find(([_, colName]) => existingMemory[0].embeddings[colName] !== null);
|
|
20245
20900
|
}
|
|
@@ -20248,7 +20903,7 @@ class BaseDrizzleAdapter extends DatabaseAdapter {
|
|
|
20248
20903
|
}
|
|
20249
20904
|
async getAgent(agentId) {
|
|
20250
20905
|
return this.withDatabase(async () => {
|
|
20251
|
-
const rows = await this.db.select().from(agentTable).where(
|
|
20906
|
+
const rows = await this.db.select().from(agentTable).where(eq2(agentTable.id, agentId)).limit(1);
|
|
20252
20907
|
if (rows.length === 0)
|
|
20253
20908
|
return null;
|
|
20254
20909
|
const row = rows[0];
|
|
@@ -20282,9 +20937,9 @@ class BaseDrizzleAdapter extends DatabaseAdapter {
|
|
|
20282
20937
|
return this.withDatabase(async () => {
|
|
20283
20938
|
try {
|
|
20284
20939
|
if (agent.id) {
|
|
20285
|
-
const existing = await this.db.select({ id: agentTable.id }).from(agentTable).where(
|
|
20940
|
+
const existing = await this.db.select({ id: agentTable.id }).from(agentTable).where(eq2(agentTable.id, agent.id)).limit(1);
|
|
20286
20941
|
if (existing.length > 0) {
|
|
20287
|
-
|
|
20942
|
+
logger9.warn({ src: "plugin:sql", agentId: agent.id }, "Attempted to create agent with duplicate ID");
|
|
20288
20943
|
return false;
|
|
20289
20944
|
}
|
|
20290
20945
|
}
|
|
@@ -20295,10 +20950,9 @@ class BaseDrizzleAdapter extends DatabaseAdapter {
|
|
|
20295
20950
|
updatedAt: new Date(agent.updatedAt || Date.now())
|
|
20296
20951
|
});
|
|
20297
20952
|
});
|
|
20298
|
-
logger7.debug(`Agent created successfully: ${agent.id}`);
|
|
20299
20953
|
return true;
|
|
20300
20954
|
} catch (error) {
|
|
20301
|
-
|
|
20955
|
+
logger9.error({ src: "plugin:sql", agentId: agent.id, error: error instanceof Error ? error.message : String(error) }, "Failed to create agent");
|
|
20302
20956
|
return false;
|
|
20303
20957
|
}
|
|
20304
20958
|
});
|
|
@@ -20330,18 +20984,17 @@ class BaseDrizzleAdapter extends DatabaseAdapter {
|
|
|
20330
20984
|
} else {
|
|
20331
20985
|
updateData.updatedAt = new Date;
|
|
20332
20986
|
}
|
|
20333
|
-
await tx.update(agentTable).set(updateData).where(
|
|
20987
|
+
await tx.update(agentTable).set(updateData).where(eq2(agentTable.id, agentId));
|
|
20334
20988
|
});
|
|
20335
|
-
logger7.debug(`Agent updated successfully: ${agentId}`);
|
|
20336
20989
|
return true;
|
|
20337
20990
|
} catch (error) {
|
|
20338
|
-
|
|
20991
|
+
logger9.error({ src: "plugin:sql", agentId, error: error instanceof Error ? error.message : String(error) }, "Failed to update agent");
|
|
20339
20992
|
return false;
|
|
20340
20993
|
}
|
|
20341
20994
|
});
|
|
20342
20995
|
}
|
|
20343
20996
|
async mergeAgentSettings(tx, agentId, updatedSettings) {
|
|
20344
|
-
const currentAgent = await tx.select({ settings: agentTable.settings }).from(agentTable).where(
|
|
20997
|
+
const currentAgent = await tx.select({ settings: agentTable.settings }).from(agentTable).where(eq2(agentTable.id, agentId)).limit(1);
|
|
20345
20998
|
const currentSettings = currentAgent.length > 0 && currentAgent[0].settings ? currentAgent[0].settings : {};
|
|
20346
20999
|
const deepMerge = (target, source) => {
|
|
20347
21000
|
if (source === null) {
|
|
@@ -20377,22 +21030,16 @@ class BaseDrizzleAdapter extends DatabaseAdapter {
|
|
|
20377
21030
|
return finalSettings === undefined ? {} : finalSettings;
|
|
20378
21031
|
}
|
|
20379
21032
|
async deleteAgent(agentId) {
|
|
20380
|
-
logger7.debug(`[DB] Deleting agent with ID: ${agentId}`);
|
|
20381
21033
|
return this.withDatabase(async () => {
|
|
20382
21034
|
try {
|
|
20383
|
-
const result = await this.db.delete(agentTable).where(
|
|
21035
|
+
const result = await this.db.delete(agentTable).where(eq2(agentTable.id, agentId)).returning();
|
|
20384
21036
|
if (result.length === 0) {
|
|
20385
|
-
|
|
21037
|
+
logger9.warn({ src: "plugin:sql", agentId }, "Agent not found for deletion");
|
|
20386
21038
|
return false;
|
|
20387
21039
|
}
|
|
20388
|
-
logger7.success(`[DB] Agent ${agentId} and all related data successfully deleted via cascade`);
|
|
20389
21040
|
return true;
|
|
20390
21041
|
} catch (error) {
|
|
20391
|
-
|
|
20392
|
-
if (error instanceof Error) {
|
|
20393
|
-
logger7.error(`[DB] Error details: ${error.name} - ${error.message}`);
|
|
20394
|
-
logger7.error(`[DB] Stack trace: ${error.stack}`);
|
|
20395
|
-
}
|
|
21042
|
+
logger9.error({ src: "plugin:sql", agentId, error: error instanceof Error ? error.message : String(error) }, "Failed to delete agent");
|
|
20396
21043
|
throw error;
|
|
20397
21044
|
}
|
|
20398
21045
|
});
|
|
@@ -20403,7 +21050,7 @@ class BaseDrizzleAdapter extends DatabaseAdapter {
|
|
|
20403
21050
|
const result = await this.db.select({ count: count() }).from(agentTable);
|
|
20404
21051
|
return result[0]?.count || 0;
|
|
20405
21052
|
} catch (error) {
|
|
20406
|
-
|
|
21053
|
+
logger9.error({ src: "plugin:sql", error: error instanceof Error ? error.message : String(error) }, "Failed to count agents");
|
|
20407
21054
|
return 0;
|
|
20408
21055
|
}
|
|
20409
21056
|
});
|
|
@@ -20412,9 +21059,8 @@ class BaseDrizzleAdapter extends DatabaseAdapter {
|
|
|
20412
21059
|
return this.withDatabase(async () => {
|
|
20413
21060
|
try {
|
|
20414
21061
|
await this.db.delete(agentTable);
|
|
20415
|
-
logger7.success("Successfully cleaned up agent table");
|
|
20416
21062
|
} catch (error) {
|
|
20417
|
-
|
|
21063
|
+
logger9.error({ src: "plugin:sql", error: error instanceof Error ? error.message : String(error) }, "Failed to clean up agent table");
|
|
20418
21064
|
throw error;
|
|
20419
21065
|
}
|
|
20420
21066
|
});
|
|
@@ -20424,7 +21070,7 @@ class BaseDrizzleAdapter extends DatabaseAdapter {
|
|
|
20424
21070
|
const result = await this.db.select({
|
|
20425
21071
|
entity: entityTable,
|
|
20426
21072
|
components: componentTable
|
|
20427
|
-
}).from(entityTable).leftJoin(componentTable,
|
|
21073
|
+
}).from(entityTable).leftJoin(componentTable, eq2(componentTable.entityId, entityTable.id)).where(inArray(entityTable.id, entityIds));
|
|
20428
21074
|
if (result.length === 0)
|
|
20429
21075
|
return [];
|
|
20430
21076
|
const entities = {};
|
|
@@ -20450,11 +21096,11 @@ class BaseDrizzleAdapter extends DatabaseAdapter {
|
|
|
20450
21096
|
const query = this.db.select({
|
|
20451
21097
|
entity: entityTable,
|
|
20452
21098
|
...includeComponents && { components: componentTable }
|
|
20453
|
-
}).from(participantTable).leftJoin(entityTable, and(
|
|
21099
|
+
}).from(participantTable).leftJoin(entityTable, and(eq2(participantTable.entityId, entityTable.id), eq2(entityTable.agentId, this.agentId)));
|
|
20454
21100
|
if (includeComponents) {
|
|
20455
|
-
query.leftJoin(componentTable,
|
|
21101
|
+
query.leftJoin(componentTable, eq2(componentTable.entityId, entityTable.id));
|
|
20456
21102
|
}
|
|
20457
|
-
const result = await query.where(
|
|
21103
|
+
const result = await query.where(eq2(participantTable.roomId, roomId));
|
|
20458
21104
|
const entitiesByIdMap = new Map;
|
|
20459
21105
|
for (const row of result) {
|
|
20460
21106
|
if (!row.entity)
|
|
@@ -20493,21 +21139,17 @@ class BaseDrizzleAdapter extends DatabaseAdapter {
|
|
|
20493
21139
|
metadata: entity.metadata || {}
|
|
20494
21140
|
}));
|
|
20495
21141
|
await tx.insert(entityTable).values(normalizedEntities);
|
|
20496
|
-
logger7.debug(`${entities.length} Entities created successfully`);
|
|
20497
21142
|
return true;
|
|
20498
21143
|
});
|
|
20499
21144
|
} catch (error) {
|
|
20500
|
-
|
|
20501
|
-
if (error instanceof Error && error.stack) {
|
|
20502
|
-
logger7.trace("Stack trace:", error.stack);
|
|
20503
|
-
}
|
|
21145
|
+
logger9.error({ src: "plugin:sql", entityId: entities[0]?.id, error: error instanceof Error ? error.message : String(error) }, "Failed to create entities");
|
|
20504
21146
|
return false;
|
|
20505
21147
|
}
|
|
20506
21148
|
});
|
|
20507
21149
|
}
|
|
20508
21150
|
async ensureEntityExists(entity) {
|
|
20509
21151
|
if (!entity.id) {
|
|
20510
|
-
|
|
21152
|
+
logger9.error({ src: "plugin:sql" }, "Entity ID is required for ensureEntityExists");
|
|
20511
21153
|
return false;
|
|
20512
21154
|
}
|
|
20513
21155
|
try {
|
|
@@ -20517,7 +21159,7 @@ class BaseDrizzleAdapter extends DatabaseAdapter {
|
|
|
20517
21159
|
}
|
|
20518
21160
|
return true;
|
|
20519
21161
|
} catch (error) {
|
|
20520
|
-
|
|
21162
|
+
logger9.error({ src: "plugin:sql", entityId: entity.id, error: error instanceof Error ? error.message : String(error) }, "Failed to ensure entity exists");
|
|
20521
21163
|
return false;
|
|
20522
21164
|
}
|
|
20523
21165
|
}
|
|
@@ -20531,25 +21173,25 @@ class BaseDrizzleAdapter extends DatabaseAdapter {
|
|
|
20531
21173
|
names: this.normalizeEntityNames(entity.names),
|
|
20532
21174
|
metadata: entity.metadata || {}
|
|
20533
21175
|
};
|
|
20534
|
-
await this.db.update(entityTable).set(normalizedEntity).where(
|
|
21176
|
+
await this.db.update(entityTable).set(normalizedEntity).where(eq2(entityTable.id, entity.id));
|
|
20535
21177
|
});
|
|
20536
21178
|
}
|
|
20537
21179
|
async deleteEntity(entityId) {
|
|
20538
21180
|
return this.withDatabase(async () => {
|
|
20539
21181
|
await this.db.transaction(async (tx) => {
|
|
20540
|
-
await tx.delete(componentTable).where(or(
|
|
20541
|
-
await tx.delete(entityTable).where(
|
|
21182
|
+
await tx.delete(componentTable).where(or(eq2(componentTable.entityId, entityId), eq2(componentTable.sourceEntityId, entityId)));
|
|
21183
|
+
await tx.delete(entityTable).where(eq2(entityTable.id, entityId));
|
|
20542
21184
|
});
|
|
20543
21185
|
});
|
|
20544
21186
|
}
|
|
20545
21187
|
async getEntitiesByNames(params) {
|
|
20546
21188
|
return this.withDatabase(async () => {
|
|
20547
21189
|
const { names, agentId } = params;
|
|
20548
|
-
const nameConditions = names.map((name) =>
|
|
20549
|
-
const query =
|
|
21190
|
+
const nameConditions = names.map((name) => sql25`${name} = ANY(${entityTable.names})`);
|
|
21191
|
+
const query = sql25`
|
|
20550
21192
|
SELECT * FROM ${entityTable}
|
|
20551
21193
|
WHERE ${entityTable.agentId} = ${agentId}
|
|
20552
|
-
AND (${
|
|
21194
|
+
AND (${sql25.join(nameConditions, sql25` OR `)})
|
|
20553
21195
|
`;
|
|
20554
21196
|
const result = await this.db.execute(query);
|
|
20555
21197
|
return result.rows.map((row) => ({
|
|
@@ -20564,7 +21206,7 @@ class BaseDrizzleAdapter extends DatabaseAdapter {
|
|
|
20564
21206
|
return this.withDatabase(async () => {
|
|
20565
21207
|
const { query, agentId, limit = 10 } = params;
|
|
20566
21208
|
if (!query || query.trim() === "") {
|
|
20567
|
-
const result2 = await this.db.select().from(entityTable).where(
|
|
21209
|
+
const result2 = await this.db.select().from(entityTable).where(eq2(entityTable.agentId, agentId)).limit(limit);
|
|
20568
21210
|
return result2.map((row) => ({
|
|
20569
21211
|
id: row.id,
|
|
20570
21212
|
agentId: row.agentId,
|
|
@@ -20572,7 +21214,7 @@ class BaseDrizzleAdapter extends DatabaseAdapter {
|
|
|
20572
21214
|
metadata: row.metadata || {}
|
|
20573
21215
|
}));
|
|
20574
21216
|
}
|
|
20575
|
-
const searchQuery =
|
|
21217
|
+
const searchQuery = sql25`
|
|
20576
21218
|
SELECT * FROM ${entityTable}
|
|
20577
21219
|
WHERE ${entityTable.agentId} = ${agentId}
|
|
20578
21220
|
AND EXISTS (
|
|
@@ -20592,12 +21234,12 @@ class BaseDrizzleAdapter extends DatabaseAdapter {
|
|
|
20592
21234
|
}
|
|
20593
21235
|
async getComponent(entityId, type, worldId, sourceEntityId) {
|
|
20594
21236
|
return this.withDatabase(async () => {
|
|
20595
|
-
const conditions = [
|
|
21237
|
+
const conditions = [eq2(componentTable.entityId, entityId), eq2(componentTable.type, type)];
|
|
20596
21238
|
if (worldId) {
|
|
20597
|
-
conditions.push(
|
|
21239
|
+
conditions.push(eq2(componentTable.worldId, worldId));
|
|
20598
21240
|
}
|
|
20599
21241
|
if (sourceEntityId) {
|
|
20600
|
-
conditions.push(
|
|
21242
|
+
conditions.push(eq2(componentTable.sourceEntityId, sourceEntityId));
|
|
20601
21243
|
}
|
|
20602
21244
|
const result = await this.db.select().from(componentTable).where(and(...conditions));
|
|
20603
21245
|
if (result.length === 0)
|
|
@@ -20618,12 +21260,12 @@ class BaseDrizzleAdapter extends DatabaseAdapter {
|
|
|
20618
21260
|
}
|
|
20619
21261
|
async getComponents(entityId, worldId, sourceEntityId) {
|
|
20620
21262
|
return this.withDatabase(async () => {
|
|
20621
|
-
const conditions = [
|
|
21263
|
+
const conditions = [eq2(componentTable.entityId, entityId)];
|
|
20622
21264
|
if (worldId) {
|
|
20623
|
-
conditions.push(
|
|
21265
|
+
conditions.push(eq2(componentTable.worldId, worldId));
|
|
20624
21266
|
}
|
|
20625
21267
|
if (sourceEntityId) {
|
|
20626
|
-
conditions.push(
|
|
21268
|
+
conditions.push(eq2(componentTable.sourceEntityId, sourceEntityId));
|
|
20627
21269
|
}
|
|
20628
21270
|
const result = await this.db.select({
|
|
20629
21271
|
id: componentTable.id,
|
|
@@ -20667,7 +21309,7 @@ class BaseDrizzleAdapter extends DatabaseAdapter {
|
|
|
20667
21309
|
await this.db.update(componentTable).set({
|
|
20668
21310
|
...component,
|
|
20669
21311
|
updatedAt: new Date
|
|
20670
|
-
}).where(
|
|
21312
|
+
}).where(eq2(componentTable.id, component.id));
|
|
20671
21313
|
} catch (e) {
|
|
20672
21314
|
console.error("updateComponent error", e);
|
|
20673
21315
|
}
|
|
@@ -20675,7 +21317,7 @@ class BaseDrizzleAdapter extends DatabaseAdapter {
|
|
|
20675
21317
|
}
|
|
20676
21318
|
async deleteComponent(componentId) {
|
|
20677
21319
|
return this.withDatabase(async () => {
|
|
20678
|
-
await this.db.delete(componentTable).where(
|
|
21320
|
+
await this.db.delete(componentTable).where(eq2(componentTable.id, componentId));
|
|
20679
21321
|
});
|
|
20680
21322
|
}
|
|
20681
21323
|
async getMemories(params) {
|
|
@@ -20685,30 +21327,27 @@ class BaseDrizzleAdapter extends DatabaseAdapter {
|
|
|
20685
21327
|
if (offset !== undefined && offset < 0) {
|
|
20686
21328
|
throw new Error("offset must be a non-negative number");
|
|
20687
21329
|
}
|
|
20688
|
-
return this.
|
|
20689
|
-
const conditions = [
|
|
21330
|
+
return this.withEntityContext(entityId ?? null, async (tx) => {
|
|
21331
|
+
const conditions = [eq2(memoryTable.type, tableName)];
|
|
20690
21332
|
if (start) {
|
|
20691
21333
|
conditions.push(gte(memoryTable.createdAt, new Date(start)));
|
|
20692
21334
|
}
|
|
20693
|
-
if (entityId) {
|
|
20694
|
-
conditions.push(eq(memoryTable.entityId, entityId));
|
|
20695
|
-
}
|
|
20696
21335
|
if (roomId) {
|
|
20697
|
-
conditions.push(
|
|
21336
|
+
conditions.push(eq2(memoryTable.roomId, roomId));
|
|
20698
21337
|
}
|
|
20699
21338
|
if (worldId) {
|
|
20700
|
-
conditions.push(
|
|
21339
|
+
conditions.push(eq2(memoryTable.worldId, worldId));
|
|
20701
21340
|
}
|
|
20702
21341
|
if (end) {
|
|
20703
21342
|
conditions.push(lte(memoryTable.createdAt, new Date(end)));
|
|
20704
21343
|
}
|
|
20705
21344
|
if (unique3) {
|
|
20706
|
-
conditions.push(
|
|
21345
|
+
conditions.push(eq2(memoryTable.unique, true));
|
|
20707
21346
|
}
|
|
20708
21347
|
if (agentId) {
|
|
20709
|
-
conditions.push(
|
|
21348
|
+
conditions.push(eq2(memoryTable.agentId, agentId));
|
|
20710
21349
|
}
|
|
20711
|
-
const baseQuery =
|
|
21350
|
+
const baseQuery = tx.select({
|
|
20712
21351
|
memory: {
|
|
20713
21352
|
id: memoryTable.id,
|
|
20714
21353
|
type: memoryTable.type,
|
|
@@ -20721,7 +21360,7 @@ class BaseDrizzleAdapter extends DatabaseAdapter {
|
|
|
20721
21360
|
metadata: memoryTable.metadata
|
|
20722
21361
|
},
|
|
20723
21362
|
embedding: embeddingTable[this.embeddingDimension]
|
|
20724
|
-
}).from(memoryTable).leftJoin(embeddingTable,
|
|
21363
|
+
}).from(memoryTable).leftJoin(embeddingTable, eq2(embeddingTable.memoryId, memoryTable.id)).where(and(...conditions)).orderBy(desc(memoryTable.createdAt));
|
|
20725
21364
|
const rows = await (async () => {
|
|
20726
21365
|
if (params.count && offset !== undefined && offset > 0) {
|
|
20727
21366
|
return baseQuery.limit(params.count).offset(offset);
|
|
@@ -20752,10 +21391,10 @@ class BaseDrizzleAdapter extends DatabaseAdapter {
|
|
|
20752
21391
|
if (params.roomIds.length === 0)
|
|
20753
21392
|
return [];
|
|
20754
21393
|
const conditions = [
|
|
20755
|
-
|
|
21394
|
+
eq2(memoryTable.type, params.tableName),
|
|
20756
21395
|
inArray(memoryTable.roomId, params.roomIds)
|
|
20757
21396
|
];
|
|
20758
|
-
conditions.push(
|
|
21397
|
+
conditions.push(eq2(memoryTable.agentId, this.agentId));
|
|
20759
21398
|
const query = this.db.select({
|
|
20760
21399
|
id: memoryTable.id,
|
|
20761
21400
|
type: memoryTable.type,
|
|
@@ -20785,7 +21424,7 @@ class BaseDrizzleAdapter extends DatabaseAdapter {
|
|
|
20785
21424
|
const result = await this.db.select({
|
|
20786
21425
|
memory: memoryTable,
|
|
20787
21426
|
embedding: embeddingTable[this.embeddingDimension]
|
|
20788
|
-
}).from(memoryTable).leftJoin(embeddingTable,
|
|
21427
|
+
}).from(memoryTable).leftJoin(embeddingTable, eq2(memoryTable.id, embeddingTable.memoryId)).where(eq2(memoryTable.id, id)).limit(1);
|
|
20789
21428
|
if (result.length === 0)
|
|
20790
21429
|
return null;
|
|
20791
21430
|
const row = result[0];
|
|
@@ -20808,12 +21447,12 @@ class BaseDrizzleAdapter extends DatabaseAdapter {
|
|
|
20808
21447
|
return [];
|
|
20809
21448
|
const conditions = [inArray(memoryTable.id, memoryIds)];
|
|
20810
21449
|
if (tableName) {
|
|
20811
|
-
conditions.push(
|
|
21450
|
+
conditions.push(eq2(memoryTable.type, tableName));
|
|
20812
21451
|
}
|
|
20813
21452
|
const rows = await this.db.select({
|
|
20814
21453
|
memory: memoryTable,
|
|
20815
21454
|
embedding: embeddingTable[this.embeddingDimension]
|
|
20816
|
-
}).from(memoryTable).leftJoin(embeddingTable,
|
|
21455
|
+
}).from(memoryTable).leftJoin(embeddingTable, eq2(embeddingTable.memoryId, memoryTable.id)).where(and(...conditions)).orderBy(desc(memoryTable.createdAt));
|
|
20817
21456
|
return rows.map((row) => ({
|
|
20818
21457
|
id: row.memory.id,
|
|
20819
21458
|
createdAt: row.memory.createdAt.getTime(),
|
|
@@ -20830,7 +21469,7 @@ class BaseDrizzleAdapter extends DatabaseAdapter {
|
|
|
20830
21469
|
async getCachedEmbeddings(opts) {
|
|
20831
21470
|
return this.withDatabase(async () => {
|
|
20832
21471
|
try {
|
|
20833
|
-
const results = await this.db.execute(
|
|
21472
|
+
const results = await this.db.execute(sql25`
|
|
20834
21473
|
WITH content_text AS (
|
|
20835
21474
|
SELECT
|
|
20836
21475
|
m.id,
|
|
@@ -20870,7 +21509,7 @@ class BaseDrizzleAdapter extends DatabaseAdapter {
|
|
|
20870
21509
|
levenshtein_score: Number(row.levenshtein_score)
|
|
20871
21510
|
})).filter((row) => Array.isArray(row.embedding));
|
|
20872
21511
|
} catch (error) {
|
|
20873
|
-
|
|
21512
|
+
logger9.error({ src: "plugin:sql", tableName: opts.query_table_name, fieldName: opts.query_field_name, error: error instanceof Error ? error.message : String(error) }, "Failed to get cached embeddings");
|
|
20874
21513
|
if (error instanceof Error && error.message === "levenshtein argument exceeds maximum length of 255 characters") {
|
|
20875
21514
|
return [];
|
|
20876
21515
|
}
|
|
@@ -20883,16 +21522,16 @@ class BaseDrizzleAdapter extends DatabaseAdapter {
|
|
|
20883
21522
|
try {
|
|
20884
21523
|
const sanitizedBody = this.sanitizeJsonObject(params.body);
|
|
20885
21524
|
const jsonString = JSON.stringify(sanitizedBody);
|
|
20886
|
-
await this.
|
|
21525
|
+
await this.withEntityContext(params.entityId, async (tx) => {
|
|
20887
21526
|
await tx.insert(logTable).values({
|
|
20888
|
-
body:
|
|
21527
|
+
body: sql25`${jsonString}::jsonb`,
|
|
20889
21528
|
entityId: params.entityId,
|
|
20890
21529
|
roomId: params.roomId,
|
|
20891
21530
|
type: params.type
|
|
20892
21531
|
});
|
|
20893
21532
|
});
|
|
20894
21533
|
} catch (error) {
|
|
20895
|
-
|
|
21534
|
+
logger9.error({ src: "plugin:sql", type: params.type, roomId: params.roomId, entityId: params.entityId, error: error instanceof Error ? error.message : String(error) }, "Failed to create log entry");
|
|
20896
21535
|
throw error;
|
|
20897
21536
|
}
|
|
20898
21537
|
});
|
|
@@ -20925,8 +21564,8 @@ class BaseDrizzleAdapter extends DatabaseAdapter {
|
|
|
20925
21564
|
}
|
|
20926
21565
|
async getLogs(params) {
|
|
20927
21566
|
const { entityId, roomId, type, count: count2, offset } = params;
|
|
20928
|
-
return this.
|
|
20929
|
-
const result = await
|
|
21567
|
+
return this.withEntityContext(entityId ?? null, async (tx) => {
|
|
21568
|
+
const result = await tx.select().from(logTable).where(and(roomId ? eq2(logTable.roomId, roomId) : undefined, type ? eq2(logTable.type, type) : undefined)).orderBy(desc(logTable.createdAt)).limit(count2 ?? 10).offset(offset ?? 0);
|
|
20930
21569
|
const logs = result.map((log2) => ({
|
|
20931
21570
|
...log2,
|
|
20932
21571
|
id: log2.id,
|
|
@@ -20944,15 +21583,15 @@ class BaseDrizzleAdapter extends DatabaseAdapter {
|
|
|
20944
21583
|
const limit = Math.min(Math.max(params.limit ?? 20, 1), 100);
|
|
20945
21584
|
const fromDate = typeof params.from === "number" ? new Date(params.from) : undefined;
|
|
20946
21585
|
const toDate = typeof params.to === "number" ? new Date(params.to) : undefined;
|
|
20947
|
-
return this.
|
|
21586
|
+
return this.withEntityContext(params.entityId ?? null, async (tx) => {
|
|
20948
21587
|
const runMap = new Map;
|
|
20949
21588
|
const conditions = [
|
|
20950
|
-
|
|
20951
|
-
|
|
20952
|
-
|
|
21589
|
+
eq2(logTable.type, "run_event"),
|
|
21590
|
+
sql25`${logTable.body} ? 'runId'`,
|
|
21591
|
+
eq2(roomTable.agentId, this.agentId)
|
|
20953
21592
|
];
|
|
20954
21593
|
if (params.roomId) {
|
|
20955
|
-
conditions.push(
|
|
21594
|
+
conditions.push(eq2(logTable.roomId, params.roomId));
|
|
20956
21595
|
}
|
|
20957
21596
|
if (fromDate) {
|
|
20958
21597
|
conditions.push(gte(logTable.createdAt, fromDate));
|
|
@@ -20962,15 +21601,15 @@ class BaseDrizzleAdapter extends DatabaseAdapter {
|
|
|
20962
21601
|
}
|
|
20963
21602
|
const whereClause = and(...conditions);
|
|
20964
21603
|
const eventLimit = Math.max(limit * 20, 200);
|
|
20965
|
-
const runEventRows = await
|
|
20966
|
-
runId:
|
|
20967
|
-
status:
|
|
20968
|
-
messageId:
|
|
21604
|
+
const runEventRows = await tx.select({
|
|
21605
|
+
runId: sql25`(${logTable.body} ->> 'runId')`,
|
|
21606
|
+
status: sql25`(${logTable.body} ->> 'status')`,
|
|
21607
|
+
messageId: sql25`(${logTable.body} ->> 'messageId')`,
|
|
20969
21608
|
rawBody: logTable.body,
|
|
20970
21609
|
createdAt: logTable.createdAt,
|
|
20971
21610
|
roomId: logTable.roomId,
|
|
20972
21611
|
entityId: logTable.entityId
|
|
20973
|
-
}).from(logTable).innerJoin(roomTable,
|
|
21612
|
+
}).from(logTable).innerJoin(roomTable, eq2(roomTable.id, logTable.roomId)).where(whereClause).orderBy(desc(logTable.createdAt)).limit(eventLimit);
|
|
20974
21613
|
for (const row of runEventRows) {
|
|
20975
21614
|
const runId = row.runId;
|
|
20976
21615
|
if (!runId)
|
|
@@ -21039,8 +21678,8 @@ class BaseDrizzleAdapter extends DatabaseAdapter {
|
|
|
21039
21678
|
}
|
|
21040
21679
|
const runIds = limitedRuns.map((run) => run.runId).filter(Boolean);
|
|
21041
21680
|
if (runIds.length > 0) {
|
|
21042
|
-
const runIdArray =
|
|
21043
|
-
const actionSummary = await this.db.execute(
|
|
21681
|
+
const runIdArray = sql25`array[${sql25.join(runIds.map((id) => sql25`${id}`), sql25`, `)}]::text[]`;
|
|
21682
|
+
const actionSummary = await this.db.execute(sql25`
|
|
21044
21683
|
SELECT
|
|
21045
21684
|
body->>'runId' as "runId",
|
|
21046
21685
|
COUNT(*)::int as "actions",
|
|
@@ -21060,7 +21699,7 @@ class BaseDrizzleAdapter extends DatabaseAdapter {
|
|
|
21060
21699
|
counts.errors += Number(row.errors ?? 0);
|
|
21061
21700
|
counts.modelCalls += Number(row.modelCalls ?? 0);
|
|
21062
21701
|
}
|
|
21063
|
-
const evaluatorSummary = await this.db.execute(
|
|
21702
|
+
const evaluatorSummary = await this.db.execute(sql25`
|
|
21064
21703
|
SELECT
|
|
21065
21704
|
body->>'runId' as "runId",
|
|
21066
21705
|
COUNT(*)::int as "evaluators"
|
|
@@ -21076,7 +21715,7 @@ class BaseDrizzleAdapter extends DatabaseAdapter {
|
|
|
21076
21715
|
continue;
|
|
21077
21716
|
counts.evaluators += Number(row.evaluators ?? 0);
|
|
21078
21717
|
}
|
|
21079
|
-
const genericSummary = await this.db.execute(
|
|
21718
|
+
const genericSummary = await this.db.execute(sql25`
|
|
21080
21719
|
SELECT
|
|
21081
21720
|
body->>'runId' as "runId",
|
|
21082
21721
|
COUNT(*) FILTER (WHERE type LIKE 'useModel:%')::int as "modelLogs",
|
|
@@ -21112,7 +21751,7 @@ class BaseDrizzleAdapter extends DatabaseAdapter {
|
|
|
21112
21751
|
}
|
|
21113
21752
|
async deleteLog(logId) {
|
|
21114
21753
|
return this.withDatabase(async () => {
|
|
21115
|
-
await this.db.delete(logTable).where(
|
|
21754
|
+
await this.db.delete(logTable).where(eq2(logTable.id, logId));
|
|
21116
21755
|
});
|
|
21117
21756
|
}
|
|
21118
21757
|
async searchMemories(params) {
|
|
@@ -21129,20 +21768,20 @@ class BaseDrizzleAdapter extends DatabaseAdapter {
|
|
|
21129
21768
|
async searchMemoriesByEmbedding(embedding, params) {
|
|
21130
21769
|
return this.withDatabase(async () => {
|
|
21131
21770
|
const cleanVector = embedding.map((n) => Number.isFinite(n) ? Number(n.toFixed(6)) : 0);
|
|
21132
|
-
const similarity =
|
|
21133
|
-
const conditions = [
|
|
21771
|
+
const similarity = sql25`1 - (${cosineDistance(embeddingTable[this.embeddingDimension], cleanVector)})`;
|
|
21772
|
+
const conditions = [eq2(memoryTable.type, params.tableName)];
|
|
21134
21773
|
if (params.unique) {
|
|
21135
|
-
conditions.push(
|
|
21774
|
+
conditions.push(eq2(memoryTable.unique, true));
|
|
21136
21775
|
}
|
|
21137
|
-
conditions.push(
|
|
21776
|
+
conditions.push(eq2(memoryTable.agentId, this.agentId));
|
|
21138
21777
|
if (params.roomId) {
|
|
21139
|
-
conditions.push(
|
|
21778
|
+
conditions.push(eq2(memoryTable.roomId, params.roomId));
|
|
21140
21779
|
}
|
|
21141
21780
|
if (params.worldId) {
|
|
21142
|
-
conditions.push(
|
|
21781
|
+
conditions.push(eq2(memoryTable.worldId, params.worldId));
|
|
21143
21782
|
}
|
|
21144
21783
|
if (params.entityId) {
|
|
21145
|
-
conditions.push(
|
|
21784
|
+
conditions.push(eq2(memoryTable.entityId, params.entityId));
|
|
21146
21785
|
}
|
|
21147
21786
|
if (params.match_threshold) {
|
|
21148
21787
|
conditions.push(gte(similarity, params.match_threshold));
|
|
@@ -21151,7 +21790,7 @@ class BaseDrizzleAdapter extends DatabaseAdapter {
|
|
|
21151
21790
|
memory: memoryTable,
|
|
21152
21791
|
similarity,
|
|
21153
21792
|
embedding: embeddingTable[this.embeddingDimension]
|
|
21154
|
-
}).from(embeddingTable).innerJoin(memoryTable,
|
|
21793
|
+
}).from(embeddingTable).innerJoin(memoryTable, eq2(memoryTable.id, embeddingTable.memoryId)).where(and(...conditions)).orderBy(desc(similarity)).limit(params.count ?? 10);
|
|
21155
21794
|
return results.map((row) => ({
|
|
21156
21795
|
id: row.memory.id,
|
|
21157
21796
|
type: row.memory.type,
|
|
@@ -21169,11 +21808,9 @@ class BaseDrizzleAdapter extends DatabaseAdapter {
|
|
|
21169
21808
|
});
|
|
21170
21809
|
}
|
|
21171
21810
|
async createMemory(memory, tableName) {
|
|
21172
|
-
logger7.debug(`DrizzleAdapter createMemory: memoryId: ${memory.id}, embeddingLength: ${memory.embedding?.length}, contentLength: ${memory.content?.text?.length}`);
|
|
21173
21811
|
const memoryId = memory.id ?? v4_default();
|
|
21174
21812
|
const existing = await this.getMemoryById(memoryId);
|
|
21175
21813
|
if (existing) {
|
|
21176
|
-
logger7.debug(`Memory already exists, skipping creation: ${memoryId}`);
|
|
21177
21814
|
return memoryId;
|
|
21178
21815
|
}
|
|
21179
21816
|
if (memory.unique === undefined) {
|
|
@@ -21192,13 +21829,13 @@ class BaseDrizzleAdapter extends DatabaseAdapter {
|
|
|
21192
21829
|
}
|
|
21193
21830
|
const contentToInsert = typeof memory.content === "string" ? memory.content : JSON.stringify(memory.content ?? {});
|
|
21194
21831
|
const metadataToInsert = typeof memory.metadata === "string" ? memory.metadata : JSON.stringify(memory.metadata ?? {});
|
|
21195
|
-
await this.
|
|
21832
|
+
await this.withEntityContext(memory.entityId, async (tx) => {
|
|
21196
21833
|
await tx.insert(memoryTable).values([
|
|
21197
21834
|
{
|
|
21198
21835
|
id: memoryId,
|
|
21199
21836
|
type: tableName,
|
|
21200
|
-
content:
|
|
21201
|
-
metadata:
|
|
21837
|
+
content: sql25`${contentToInsert}::jsonb`,
|
|
21838
|
+
metadata: sql25`${metadataToInsert}::jsonb`,
|
|
21202
21839
|
entityId: memory.entityId,
|
|
21203
21840
|
roomId: memory.roomId,
|
|
21204
21841
|
worldId: memory.worldId,
|
|
@@ -21223,28 +21860,27 @@ class BaseDrizzleAdapter extends DatabaseAdapter {
|
|
|
21223
21860
|
async updateMemory(memory) {
|
|
21224
21861
|
return this.withDatabase(async () => {
|
|
21225
21862
|
try {
|
|
21226
|
-
logger7.debug(`Updating memory: memoryId: ${memory.id}, hasEmbedding: ${!!memory.embedding}`);
|
|
21227
21863
|
await this.db.transaction(async (tx) => {
|
|
21228
21864
|
if (memory.content) {
|
|
21229
21865
|
const contentToUpdate = typeof memory.content === "string" ? memory.content : JSON.stringify(memory.content ?? {});
|
|
21230
21866
|
const metadataToUpdate = typeof memory.metadata === "string" ? memory.metadata : JSON.stringify(memory.metadata ?? {});
|
|
21231
21867
|
await tx.update(memoryTable).set({
|
|
21232
|
-
content:
|
|
21233
|
-
...memory.metadata && { metadata:
|
|
21234
|
-
}).where(
|
|
21868
|
+
content: sql25`${contentToUpdate}::jsonb`,
|
|
21869
|
+
...memory.metadata && { metadata: sql25`${metadataToUpdate}::jsonb` }
|
|
21870
|
+
}).where(eq2(memoryTable.id, memory.id));
|
|
21235
21871
|
} else if (memory.metadata) {
|
|
21236
21872
|
const metadataToUpdate = typeof memory.metadata === "string" ? memory.metadata : JSON.stringify(memory.metadata ?? {});
|
|
21237
21873
|
await tx.update(memoryTable).set({
|
|
21238
|
-
metadata:
|
|
21239
|
-
}).where(
|
|
21874
|
+
metadata: sql25`${metadataToUpdate}::jsonb`
|
|
21875
|
+
}).where(eq2(memoryTable.id, memory.id));
|
|
21240
21876
|
}
|
|
21241
21877
|
if (memory.embedding && Array.isArray(memory.embedding)) {
|
|
21242
21878
|
const cleanVector = memory.embedding.map((n) => Number.isFinite(n) ? Number(n.toFixed(6)) : 0);
|
|
21243
|
-
const existingEmbedding = await tx.select({ id: embeddingTable.id }).from(embeddingTable).where(
|
|
21879
|
+
const existingEmbedding = await tx.select({ id: embeddingTable.id }).from(embeddingTable).where(eq2(embeddingTable.memoryId, memory.id)).limit(1);
|
|
21244
21880
|
if (existingEmbedding.length > 0) {
|
|
21245
21881
|
const updateValues = {};
|
|
21246
21882
|
updateValues[this.embeddingDimension] = cleanVector;
|
|
21247
|
-
await tx.update(embeddingTable).set(updateValues).where(
|
|
21883
|
+
await tx.update(embeddingTable).set(updateValues).where(eq2(embeddingTable.memoryId, memory.id));
|
|
21248
21884
|
} else {
|
|
21249
21885
|
const embeddingValues = {
|
|
21250
21886
|
id: v4_default(),
|
|
@@ -21255,10 +21891,9 @@ class BaseDrizzleAdapter extends DatabaseAdapter {
|
|
|
21255
21891
|
}
|
|
21256
21892
|
}
|
|
21257
21893
|
});
|
|
21258
|
-
logger7.debug(`Memory updated successfully: ${memory.id}`);
|
|
21259
21894
|
return true;
|
|
21260
21895
|
} catch (error) {
|
|
21261
|
-
|
|
21896
|
+
logger9.error({ src: "plugin:sql", memoryId: memory.id, error: error instanceof Error ? error.message : String(error) }, "Failed to update memory");
|
|
21262
21897
|
return false;
|
|
21263
21898
|
}
|
|
21264
21899
|
});
|
|
@@ -21267,10 +21902,9 @@ class BaseDrizzleAdapter extends DatabaseAdapter {
|
|
|
21267
21902
|
return this.withDatabase(async () => {
|
|
21268
21903
|
await this.db.transaction(async (tx) => {
|
|
21269
21904
|
await this.deleteMemoryFragments(tx, memoryId);
|
|
21270
|
-
await tx.delete(embeddingTable).where(
|
|
21271
|
-
await tx.delete(memoryTable).where(
|
|
21905
|
+
await tx.delete(embeddingTable).where(eq2(embeddingTable.memoryId, memoryId));
|
|
21906
|
+
await tx.delete(memoryTable).where(eq2(memoryTable.id, memoryId));
|
|
21272
21907
|
});
|
|
21273
|
-
logger7.debug(`Memory and related fragments removed successfully: ${memoryId}`);
|
|
21274
21908
|
});
|
|
21275
21909
|
}
|
|
21276
21910
|
async deleteManyMemories(memoryIds) {
|
|
@@ -21289,7 +21923,6 @@ class BaseDrizzleAdapter extends DatabaseAdapter {
|
|
|
21289
21923
|
await tx.delete(memoryTable).where(inArray(memoryTable.id, batch));
|
|
21290
21924
|
}
|
|
21291
21925
|
});
|
|
21292
|
-
logger7.debug(`Batch memory deletion completed successfully: ${memoryIds.length}`);
|
|
21293
21926
|
});
|
|
21294
21927
|
}
|
|
21295
21928
|
async deleteMemoryFragments(tx, documentId) {
|
|
@@ -21298,40 +21931,38 @@ class BaseDrizzleAdapter extends DatabaseAdapter {
|
|
|
21298
21931
|
const fragmentIds = fragmentsToDelete.map((f) => f.id);
|
|
21299
21932
|
await tx.delete(embeddingTable).where(inArray(embeddingTable.memoryId, fragmentIds));
|
|
21300
21933
|
await tx.delete(memoryTable).where(inArray(memoryTable.id, fragmentIds));
|
|
21301
|
-
logger7.debug(`Deleted related fragments: documentId: ${documentId}, fragmentCount: ${fragmentsToDelete.length}`);
|
|
21302
21934
|
}
|
|
21303
21935
|
}
|
|
21304
21936
|
async getMemoryFragments(tx, documentId) {
|
|
21305
|
-
const fragments = await tx.select({ id: memoryTable.id }).from(memoryTable).where(and(
|
|
21937
|
+
const fragments = await tx.select({ id: memoryTable.id }).from(memoryTable).where(and(eq2(memoryTable.agentId, this.agentId), sql25`${memoryTable.metadata}->>'documentId' = ${documentId}`));
|
|
21306
21938
|
return fragments.map((f) => ({ id: f.id }));
|
|
21307
21939
|
}
|
|
21308
21940
|
async deleteAllMemories(roomId, tableName) {
|
|
21309
21941
|
return this.withDatabase(async () => {
|
|
21310
21942
|
await this.db.transaction(async (tx) => {
|
|
21311
|
-
const rows = await tx.select({ id: memoryTable.id }).from(memoryTable).where(and(
|
|
21943
|
+
const rows = await tx.select({ id: memoryTable.id }).from(memoryTable).where(and(eq2(memoryTable.roomId, roomId), eq2(memoryTable.type, tableName)));
|
|
21312
21944
|
const ids = rows.map((r) => r.id);
|
|
21313
|
-
|
|
21945
|
+
logger9.debug({ src: "plugin:sql", roomId, tableName, memoryCount: ids.length }, "Deleting all memories");
|
|
21314
21946
|
if (ids.length === 0) {
|
|
21315
21947
|
return;
|
|
21316
21948
|
}
|
|
21317
21949
|
await Promise.all(ids.map(async (memoryId) => {
|
|
21318
21950
|
await this.deleteMemoryFragments(tx, memoryId);
|
|
21319
|
-
await tx.delete(embeddingTable).where(
|
|
21951
|
+
await tx.delete(embeddingTable).where(eq2(embeddingTable.memoryId, memoryId));
|
|
21320
21952
|
}));
|
|
21321
|
-
await tx.delete(memoryTable).where(and(
|
|
21953
|
+
await tx.delete(memoryTable).where(and(eq2(memoryTable.roomId, roomId), eq2(memoryTable.type, tableName)));
|
|
21322
21954
|
});
|
|
21323
|
-
logger7.debug(`All memories removed successfully: roomId: ${roomId}, tableName: ${tableName}`);
|
|
21324
21955
|
});
|
|
21325
21956
|
}
|
|
21326
21957
|
async countMemories(roomId, unique3 = true, tableName = "") {
|
|
21327
21958
|
if (!tableName)
|
|
21328
21959
|
throw new Error("tableName is required");
|
|
21329
21960
|
return this.withDatabase(async () => {
|
|
21330
|
-
const conditions = [
|
|
21961
|
+
const conditions = [eq2(memoryTable.roomId, roomId), eq2(memoryTable.type, tableName)];
|
|
21331
21962
|
if (unique3) {
|
|
21332
|
-
conditions.push(
|
|
21963
|
+
conditions.push(eq2(memoryTable.unique, true));
|
|
21333
21964
|
}
|
|
21334
|
-
const result = await this.db.select({ count:
|
|
21965
|
+
const result = await this.db.select({ count: sql25`count(*)` }).from(memoryTable).where(and(...conditions));
|
|
21335
21966
|
return Number(result[0]?.count ?? 0);
|
|
21336
21967
|
});
|
|
21337
21968
|
}
|
|
@@ -21342,18 +21973,19 @@ class BaseDrizzleAdapter extends DatabaseAdapter {
|
|
|
21342
21973
|
name: roomTable.name,
|
|
21343
21974
|
channelId: roomTable.channelId,
|
|
21344
21975
|
agentId: roomTable.agentId,
|
|
21345
|
-
|
|
21976
|
+
messageServerId: roomTable.messageServerId,
|
|
21346
21977
|
worldId: roomTable.worldId,
|
|
21347
21978
|
type: roomTable.type,
|
|
21348
21979
|
source: roomTable.source,
|
|
21349
21980
|
metadata: roomTable.metadata
|
|
21350
|
-
}).from(roomTable).where(and(inArray(roomTable.id, roomIds),
|
|
21981
|
+
}).from(roomTable).where(and(inArray(roomTable.id, roomIds), eq2(roomTable.agentId, this.agentId)));
|
|
21351
21982
|
const rooms = result.map((room) => ({
|
|
21352
21983
|
...room,
|
|
21353
21984
|
id: room.id,
|
|
21354
21985
|
name: room.name ?? undefined,
|
|
21355
21986
|
agentId: room.agentId,
|
|
21356
|
-
|
|
21987
|
+
messageServerId: room.messageServerId,
|
|
21988
|
+
serverId: room.messageServerId,
|
|
21357
21989
|
worldId: room.worldId,
|
|
21358
21990
|
channelId: room.channelId,
|
|
21359
21991
|
type: room.type,
|
|
@@ -21364,13 +21996,14 @@ class BaseDrizzleAdapter extends DatabaseAdapter {
|
|
|
21364
21996
|
}
|
|
21365
21997
|
async getRoomsByWorld(worldId) {
|
|
21366
21998
|
return this.withDatabase(async () => {
|
|
21367
|
-
const result = await this.db.select().from(roomTable).where(
|
|
21999
|
+
const result = await this.db.select().from(roomTable).where(eq2(roomTable.worldId, worldId));
|
|
21368
22000
|
const rooms = result.map((room) => ({
|
|
21369
22001
|
...room,
|
|
21370
22002
|
id: room.id,
|
|
21371
22003
|
name: room.name ?? undefined,
|
|
21372
22004
|
agentId: room.agentId,
|
|
21373
|
-
|
|
22005
|
+
messageServerId: room.messageServerId,
|
|
22006
|
+
serverId: room.messageServerId,
|
|
21374
22007
|
worldId: room.worldId,
|
|
21375
22008
|
channelId: room.channelId,
|
|
21376
22009
|
type: room.type,
|
|
@@ -21381,7 +22014,7 @@ class BaseDrizzleAdapter extends DatabaseAdapter {
|
|
|
21381
22014
|
}
|
|
21382
22015
|
async updateRoom(room) {
|
|
21383
22016
|
return this.withDatabase(async () => {
|
|
21384
|
-
await this.db.update(roomTable).set({ ...room, agentId: this.agentId }).where(
|
|
22017
|
+
await this.db.update(roomTable).set({ ...room, agentId: this.agentId }).where(eq2(roomTable.id, room.id));
|
|
21385
22018
|
});
|
|
21386
22019
|
}
|
|
21387
22020
|
async createRooms(rooms) {
|
|
@@ -21401,19 +22034,19 @@ class BaseDrizzleAdapter extends DatabaseAdapter {
|
|
|
21401
22034
|
throw new Error("Room ID is required");
|
|
21402
22035
|
return this.withDatabase(async () => {
|
|
21403
22036
|
await this.db.transaction(async (tx) => {
|
|
21404
|
-
await tx.delete(roomTable).where(
|
|
22037
|
+
await tx.delete(roomTable).where(eq2(roomTable.id, roomId));
|
|
21405
22038
|
});
|
|
21406
22039
|
});
|
|
21407
22040
|
}
|
|
21408
22041
|
async getRoomsForParticipant(entityId) {
|
|
21409
22042
|
return this.withDatabase(async () => {
|
|
21410
|
-
const result = await this.db.select({ roomId: participantTable.roomId }).from(participantTable).innerJoin(roomTable,
|
|
22043
|
+
const result = await this.db.select({ roomId: participantTable.roomId }).from(participantTable).innerJoin(roomTable, eq2(participantTable.roomId, roomTable.id)).where(and(eq2(participantTable.entityId, entityId), eq2(roomTable.agentId, this.agentId)));
|
|
21411
22044
|
return result.map((row) => row.roomId);
|
|
21412
22045
|
});
|
|
21413
22046
|
}
|
|
21414
22047
|
async getRoomsForParticipants(entityIds) {
|
|
21415
22048
|
return this.withDatabase(async () => {
|
|
21416
|
-
const result = await this.db.selectDistinct({ roomId: participantTable.roomId }).from(participantTable).innerJoin(roomTable,
|
|
22049
|
+
const result = await this.db.selectDistinct({ roomId: participantTable.roomId }).from(participantTable).innerJoin(roomTable, eq2(participantTable.roomId, roomTable.id)).where(and(inArray(participantTable.entityId, entityIds), eq2(roomTable.agentId, this.agentId)));
|
|
21417
22050
|
return result.map((row) => row.roomId);
|
|
21418
22051
|
});
|
|
21419
22052
|
}
|
|
@@ -21427,7 +22060,7 @@ class BaseDrizzleAdapter extends DatabaseAdapter {
|
|
|
21427
22060
|
}).onConflictDoNothing();
|
|
21428
22061
|
return true;
|
|
21429
22062
|
} catch (error) {
|
|
21430
|
-
|
|
22063
|
+
logger9.error({ src: "plugin:sql", entityId, roomId, agentId: this.agentId, error: error instanceof Error ? error.message : String(error) }, "Failed to add participant to room");
|
|
21431
22064
|
return false;
|
|
21432
22065
|
}
|
|
21433
22066
|
});
|
|
@@ -21441,10 +22074,9 @@ class BaseDrizzleAdapter extends DatabaseAdapter {
|
|
|
21441
22074
|
agentId: this.agentId
|
|
21442
22075
|
}));
|
|
21443
22076
|
await this.db.insert(participantTable).values(values).onConflictDoNothing().execute();
|
|
21444
|
-
logger7.debug(`${entityIds.length} Entities linked successfully`);
|
|
21445
22077
|
return true;
|
|
21446
22078
|
} catch (error) {
|
|
21447
|
-
|
|
22079
|
+
logger9.error({ src: "plugin:sql", roomId, agentId: this.agentId, error: error instanceof Error ? error.message : String(error) }, "Failed to add participants to room");
|
|
21448
22080
|
return false;
|
|
21449
22081
|
}
|
|
21450
22082
|
});
|
|
@@ -21453,13 +22085,12 @@ class BaseDrizzleAdapter extends DatabaseAdapter {
|
|
|
21453
22085
|
return this.withDatabase(async () => {
|
|
21454
22086
|
try {
|
|
21455
22087
|
const result = await this.db.transaction(async (tx) => {
|
|
21456
|
-
return await tx.delete(participantTable).where(and(
|
|
22088
|
+
return await tx.delete(participantTable).where(and(eq2(participantTable.entityId, entityId), eq2(participantTable.roomId, roomId))).returning();
|
|
21457
22089
|
});
|
|
21458
22090
|
const removed = result.length > 0;
|
|
21459
|
-
logger7.debug(`Participant ${removed ? "removed" : "not found"}: entityId: ${entityId}, roomId: ${roomId}, removed: ${removed}`);
|
|
21460
22091
|
return removed;
|
|
21461
22092
|
} catch (error) {
|
|
21462
|
-
|
|
22093
|
+
logger9.error({ src: "plugin:sql", entityId, roomId, error: error instanceof Error ? error.message : String(error) }, "Failed to remove participant from room");
|
|
21463
22094
|
return false;
|
|
21464
22095
|
}
|
|
21465
22096
|
});
|
|
@@ -21470,7 +22101,7 @@ class BaseDrizzleAdapter extends DatabaseAdapter {
|
|
|
21470
22101
|
id: participantTable.id,
|
|
21471
22102
|
entityId: participantTable.entityId,
|
|
21472
22103
|
roomId: participantTable.roomId
|
|
21473
|
-
}).from(participantTable).where(
|
|
22104
|
+
}).from(participantTable).where(eq2(participantTable.entityId, entityId));
|
|
21474
22105
|
const entities = await this.getEntitiesByIds([entityId]);
|
|
21475
22106
|
if (!entities || !entities.length) {
|
|
21476
22107
|
return [];
|
|
@@ -21483,13 +22114,19 @@ class BaseDrizzleAdapter extends DatabaseAdapter {
|
|
|
21483
22114
|
}
|
|
21484
22115
|
async getParticipantsForRoom(roomId) {
|
|
21485
22116
|
return this.withDatabase(async () => {
|
|
21486
|
-
const result = await this.db.select({ entityId: participantTable.entityId }).from(participantTable).where(
|
|
22117
|
+
const result = await this.db.select({ entityId: participantTable.entityId }).from(participantTable).where(eq2(participantTable.roomId, roomId));
|
|
21487
22118
|
return result.map((row) => row.entityId);
|
|
21488
22119
|
});
|
|
21489
22120
|
}
|
|
22121
|
+
async isRoomParticipant(roomId, entityId) {
|
|
22122
|
+
return this.withDatabase(async () => {
|
|
22123
|
+
const result = await this.db.select().from(participantTable).where(and(eq2(participantTable.roomId, roomId), eq2(participantTable.entityId, entityId))).limit(1);
|
|
22124
|
+
return result.length > 0;
|
|
22125
|
+
});
|
|
22126
|
+
}
|
|
21490
22127
|
async getParticipantUserState(roomId, entityId) {
|
|
21491
22128
|
return this.withDatabase(async () => {
|
|
21492
|
-
const result = await this.db.select({ roomState: participantTable.roomState }).from(participantTable).where(and(
|
|
22129
|
+
const result = await this.db.select({ roomState: participantTable.roomState }).from(participantTable).where(and(eq2(participantTable.roomId, roomId), eq2(participantTable.entityId, entityId), eq2(participantTable.agentId, this.agentId))).limit(1);
|
|
21493
22130
|
return result[0]?.roomState ?? null;
|
|
21494
22131
|
});
|
|
21495
22132
|
}
|
|
@@ -21497,10 +22134,10 @@ class BaseDrizzleAdapter extends DatabaseAdapter {
|
|
|
21497
22134
|
return this.withDatabase(async () => {
|
|
21498
22135
|
try {
|
|
21499
22136
|
await this.db.transaction(async (tx) => {
|
|
21500
|
-
await tx.update(participantTable).set({ roomState: state }).where(and(
|
|
22137
|
+
await tx.update(participantTable).set({ roomState: state }).where(and(eq2(participantTable.roomId, roomId), eq2(participantTable.entityId, entityId), eq2(participantTable.agentId, this.agentId)));
|
|
21501
22138
|
});
|
|
21502
22139
|
} catch (error) {
|
|
21503
|
-
|
|
22140
|
+
logger9.error({ src: "plugin:sql", roomId, entityId, state, error: error instanceof Error ? error.message : String(error) }, "Failed to set participant follow state");
|
|
21504
22141
|
throw error;
|
|
21505
22142
|
}
|
|
21506
22143
|
});
|
|
@@ -21520,7 +22157,7 @@ class BaseDrizzleAdapter extends DatabaseAdapter {
|
|
|
21520
22157
|
await this.db.insert(relationshipTable).values(saveParams);
|
|
21521
22158
|
return true;
|
|
21522
22159
|
} catch (error) {
|
|
21523
|
-
|
|
22160
|
+
logger9.error({ src: "plugin:sql", agentId: this.agentId, error: error instanceof Error ? error.message : String(error), saveParams }, "Error creating relationship");
|
|
21524
22161
|
return false;
|
|
21525
22162
|
}
|
|
21526
22163
|
});
|
|
@@ -21531,9 +22168,9 @@ class BaseDrizzleAdapter extends DatabaseAdapter {
|
|
|
21531
22168
|
await this.db.update(relationshipTable).set({
|
|
21532
22169
|
tags: relationship.tags || [],
|
|
21533
22170
|
metadata: relationship.metadata || {}
|
|
21534
|
-
}).where(
|
|
22171
|
+
}).where(eq2(relationshipTable.id, relationship.id));
|
|
21535
22172
|
} catch (error) {
|
|
21536
|
-
|
|
22173
|
+
logger9.error({ src: "plugin:sql", agentId: this.agentId, error: error instanceof Error ? error.message : String(error), relationshipId: relationship.id }, "Error updating relationship");
|
|
21537
22174
|
throw error;
|
|
21538
22175
|
}
|
|
21539
22176
|
});
|
|
@@ -21541,7 +22178,7 @@ class BaseDrizzleAdapter extends DatabaseAdapter {
|
|
|
21541
22178
|
async getRelationship(params) {
|
|
21542
22179
|
return this.withDatabase(async () => {
|
|
21543
22180
|
const { sourceEntityId, targetEntityId } = params;
|
|
21544
|
-
const result = await this.db.select().from(relationshipTable).where(and(
|
|
22181
|
+
const result = await this.db.select().from(relationshipTable).where(and(eq2(relationshipTable.sourceEntityId, sourceEntityId), eq2(relationshipTable.targetEntityId, targetEntityId)));
|
|
21545
22182
|
if (result.length === 0)
|
|
21546
22183
|
return null;
|
|
21547
22184
|
const relationship = result[0];
|
|
@@ -21562,13 +22199,13 @@ class BaseDrizzleAdapter extends DatabaseAdapter {
|
|
|
21562
22199
|
const { entityId, tags } = params;
|
|
21563
22200
|
let query;
|
|
21564
22201
|
if (tags && tags.length > 0) {
|
|
21565
|
-
query =
|
|
22202
|
+
query = sql25`
|
|
21566
22203
|
SELECT * FROM ${relationshipTable}
|
|
21567
22204
|
WHERE (${relationshipTable.sourceEntityId} = ${entityId} OR ${relationshipTable.targetEntityId} = ${entityId})
|
|
21568
|
-
AND ${relationshipTable.tags} && CAST(ARRAY[${
|
|
22205
|
+
AND ${relationshipTable.tags} && CAST(ARRAY[${sql25.join(tags, sql25`, `)}] AS text[])
|
|
21569
22206
|
`;
|
|
21570
22207
|
} else {
|
|
21571
|
-
query =
|
|
22208
|
+
query = sql25`
|
|
21572
22209
|
SELECT * FROM ${relationshipTable}
|
|
21573
22210
|
WHERE ${relationshipTable.sourceEntityId} = ${entityId} OR ${relationshipTable.targetEntityId} = ${entityId}
|
|
21574
22211
|
`;
|
|
@@ -21577,25 +22214,25 @@ class BaseDrizzleAdapter extends DatabaseAdapter {
|
|
|
21577
22214
|
return result.rows.map((relationship) => ({
|
|
21578
22215
|
...relationship,
|
|
21579
22216
|
id: relationship.id,
|
|
21580
|
-
sourceEntityId: relationship.sourceEntityId,
|
|
21581
|
-
targetEntityId: relationship.targetEntityId,
|
|
21582
|
-
agentId: relationship.agentId,
|
|
22217
|
+
sourceEntityId: relationship.source_entity_id || relationship.sourceEntityId,
|
|
22218
|
+
targetEntityId: relationship.target_entity_id || relationship.targetEntityId,
|
|
22219
|
+
agentId: relationship.agent_id || relationship.agentId,
|
|
21583
22220
|
tags: relationship.tags ?? [],
|
|
21584
22221
|
metadata: relationship.metadata ?? {},
|
|
21585
|
-
createdAt: relationship.createdAt ? relationship.createdAt instanceof Date ? relationship.createdAt.toISOString() : new Date(relationship.createdAt).toISOString() : new Date().toISOString()
|
|
22222
|
+
createdAt: relationship.created_at || relationship.createdAt ? (relationship.created_at || relationship.createdAt) instanceof Date ? (relationship.created_at || relationship.createdAt).toISOString() : new Date(relationship.created_at || relationship.createdAt).toISOString() : new Date().toISOString()
|
|
21586
22223
|
}));
|
|
21587
22224
|
});
|
|
21588
22225
|
}
|
|
21589
22226
|
async getCache(key) {
|
|
21590
22227
|
return this.withDatabase(async () => {
|
|
21591
22228
|
try {
|
|
21592
|
-
const result = await this.db.select({ value: cacheTable.value }).from(cacheTable).where(and(
|
|
22229
|
+
const result = await this.db.select({ value: cacheTable.value }).from(cacheTable).where(and(eq2(cacheTable.agentId, this.agentId), eq2(cacheTable.key, key))).limit(1);
|
|
21593
22230
|
if (result && result.length > 0 && result[0]) {
|
|
21594
22231
|
return result[0].value;
|
|
21595
22232
|
}
|
|
21596
22233
|
return;
|
|
21597
22234
|
} catch (error) {
|
|
21598
|
-
|
|
22235
|
+
logger9.error({ src: "plugin:sql", agentId: this.agentId, error: error instanceof Error ? error.message : String(error), key }, "Error fetching cache");
|
|
21599
22236
|
return;
|
|
21600
22237
|
}
|
|
21601
22238
|
});
|
|
@@ -21615,7 +22252,7 @@ class BaseDrizzleAdapter extends DatabaseAdapter {
|
|
|
21615
22252
|
});
|
|
21616
22253
|
return true;
|
|
21617
22254
|
} catch (error) {
|
|
21618
|
-
|
|
22255
|
+
logger9.error({ src: "plugin:sql", agentId: this.agentId, error: error instanceof Error ? error.message : String(error), key }, "Error setting cache");
|
|
21619
22256
|
return false;
|
|
21620
22257
|
}
|
|
21621
22258
|
});
|
|
@@ -21624,11 +22261,11 @@ class BaseDrizzleAdapter extends DatabaseAdapter {
|
|
|
21624
22261
|
return this.withDatabase(async () => {
|
|
21625
22262
|
try {
|
|
21626
22263
|
await this.db.transaction(async (tx) => {
|
|
21627
|
-
await tx.delete(cacheTable).where(and(
|
|
22264
|
+
await tx.delete(cacheTable).where(and(eq2(cacheTable.agentId, this.agentId), eq2(cacheTable.key, key)));
|
|
21628
22265
|
});
|
|
21629
22266
|
return true;
|
|
21630
22267
|
} catch (error) {
|
|
21631
|
-
|
|
22268
|
+
logger9.error({ src: "plugin:sql", agentId: this.agentId, error: error instanceof Error ? error.message : String(error), key }, "Error deleting cache");
|
|
21632
22269
|
return false;
|
|
21633
22270
|
}
|
|
21634
22271
|
});
|
|
@@ -21646,24 +22283,24 @@ class BaseDrizzleAdapter extends DatabaseAdapter {
|
|
|
21646
22283
|
}
|
|
21647
22284
|
async getWorld(id) {
|
|
21648
22285
|
return this.withDatabase(async () => {
|
|
21649
|
-
const result = await this.db.select().from(worldTable).where(
|
|
22286
|
+
const result = await this.db.select().from(worldTable).where(eq2(worldTable.id, id));
|
|
21650
22287
|
return result.length > 0 ? result[0] : null;
|
|
21651
22288
|
});
|
|
21652
22289
|
}
|
|
21653
22290
|
async getAllWorlds() {
|
|
21654
22291
|
return this.withDatabase(async () => {
|
|
21655
|
-
const result = await this.db.select().from(worldTable).where(
|
|
22292
|
+
const result = await this.db.select().from(worldTable).where(eq2(worldTable.agentId, this.agentId));
|
|
21656
22293
|
return result;
|
|
21657
22294
|
});
|
|
21658
22295
|
}
|
|
21659
22296
|
async updateWorld(world) {
|
|
21660
22297
|
return this.withDatabase(async () => {
|
|
21661
|
-
await this.db.update(worldTable).set(world).where(
|
|
22298
|
+
await this.db.update(worldTable).set(world).where(eq2(worldTable.id, world.id));
|
|
21662
22299
|
});
|
|
21663
22300
|
}
|
|
21664
22301
|
async removeWorld(id) {
|
|
21665
22302
|
return this.withDatabase(async () => {
|
|
21666
|
-
await this.db.delete(worldTable).where(
|
|
22303
|
+
await this.db.delete(worldTable).where(eq2(worldTable.id, id));
|
|
21667
22304
|
});
|
|
21668
22305
|
}
|
|
21669
22306
|
async createTask(task) {
|
|
@@ -21694,8 +22331,8 @@ class BaseDrizzleAdapter extends DatabaseAdapter {
|
|
|
21694
22331
|
async getTasks(params) {
|
|
21695
22332
|
return this.withRetry(async () => {
|
|
21696
22333
|
return this.withDatabase(async () => {
|
|
21697
|
-
const result = await this.db.select().from(taskTable).where(and(
|
|
21698
|
-
|
|
22334
|
+
const result = await this.db.select().from(taskTable).where(and(eq2(taskTable.agentId, this.agentId), ...params.roomId ? [eq2(taskTable.roomId, params.roomId)] : [], ...params.tags && params.tags.length > 0 ? [
|
|
22335
|
+
sql25`${taskTable.tags} @> ARRAY[${sql25.raw(params.tags.map((t) => `'${t.replace(/'/g, "''")}'`).join(", "))}]::text[]`
|
|
21699
22336
|
] : []));
|
|
21700
22337
|
return result.map((row) => ({
|
|
21701
22338
|
id: row.id,
|
|
@@ -21712,7 +22349,7 @@ class BaseDrizzleAdapter extends DatabaseAdapter {
|
|
|
21712
22349
|
async getTasksByName(name) {
|
|
21713
22350
|
return this.withRetry(async () => {
|
|
21714
22351
|
return this.withDatabase(async () => {
|
|
21715
|
-
const result = await this.db.select().from(taskTable).where(and(
|
|
22352
|
+
const result = await this.db.select().from(taskTable).where(and(eq2(taskTable.name, name), eq2(taskTable.agentId, this.agentId)));
|
|
21716
22353
|
return result.map((row) => ({
|
|
21717
22354
|
id: row.id,
|
|
21718
22355
|
name: row.name,
|
|
@@ -21728,7 +22365,7 @@ class BaseDrizzleAdapter extends DatabaseAdapter {
|
|
|
21728
22365
|
async getTask(id) {
|
|
21729
22366
|
return this.withRetry(async () => {
|
|
21730
22367
|
return this.withDatabase(async () => {
|
|
21731
|
-
const result = await this.db.select().from(taskTable).where(and(
|
|
22368
|
+
const result = await this.db.select().from(taskTable).where(and(eq2(taskTable.id, id), eq2(taskTable.agentId, this.agentId))).limit(1);
|
|
21732
22369
|
if (result.length === 0) {
|
|
21733
22370
|
return null;
|
|
21734
22371
|
}
|
|
@@ -21763,18 +22400,18 @@ class BaseDrizzleAdapter extends DatabaseAdapter {
|
|
|
21763
22400
|
if (task.metadata !== undefined) {
|
|
21764
22401
|
updateValues.metadata = task.metadata;
|
|
21765
22402
|
}
|
|
21766
|
-
await this.db.update(taskTable).set(updateValues).where(and(
|
|
22403
|
+
await this.db.update(taskTable).set(updateValues).where(and(eq2(taskTable.id, id), eq2(taskTable.agentId, this.agentId)));
|
|
21767
22404
|
});
|
|
21768
22405
|
});
|
|
21769
22406
|
}
|
|
21770
22407
|
async deleteTask(id) {
|
|
21771
22408
|
return this.withDatabase(async () => {
|
|
21772
|
-
await this.db.delete(taskTable).where(
|
|
22409
|
+
await this.db.delete(taskTable).where(eq2(taskTable.id, id));
|
|
21773
22410
|
});
|
|
21774
22411
|
}
|
|
21775
22412
|
async getMemoriesByWorldId(params) {
|
|
21776
22413
|
return this.withDatabase(async () => {
|
|
21777
|
-
const rooms = await this.db.select({ id: roomTable.id }).from(roomTable).where(and(
|
|
22414
|
+
const rooms = await this.db.select({ id: roomTable.id }).from(roomTable).where(and(eq2(roomTable.worldId, params.worldId), eq2(roomTable.agentId, this.agentId)));
|
|
21778
22415
|
if (rooms.length === 0) {
|
|
21779
22416
|
return [];
|
|
21780
22417
|
}
|
|
@@ -21789,27 +22426,22 @@ class BaseDrizzleAdapter extends DatabaseAdapter {
|
|
|
21789
22426
|
}
|
|
21790
22427
|
async deleteRoomsByWorldId(worldId) {
|
|
21791
22428
|
return this.withDatabase(async () => {
|
|
21792
|
-
const rooms = await this.db.select({ id: roomTable.id }).from(roomTable).where(and(
|
|
22429
|
+
const rooms = await this.db.select({ id: roomTable.id }).from(roomTable).where(and(eq2(roomTable.worldId, worldId), eq2(roomTable.agentId, this.agentId)));
|
|
21793
22430
|
if (rooms.length === 0) {
|
|
21794
|
-
logger7.debug(`No rooms found for worldId ${worldId} and agentId ${this.agentId} to delete.`);
|
|
21795
22431
|
return;
|
|
21796
22432
|
}
|
|
21797
22433
|
const roomIds = rooms.map((room) => room.id);
|
|
21798
22434
|
if (roomIds.length > 0) {
|
|
21799
22435
|
await this.db.delete(logTable).where(inArray(logTable.roomId, roomIds));
|
|
21800
|
-
logger7.debug(`Deleted logs for ${roomIds.length} rooms in world ${worldId}.`);
|
|
21801
22436
|
await this.db.delete(participantTable).where(inArray(participantTable.roomId, roomIds));
|
|
21802
|
-
logger7.debug(`Deleted participants for ${roomIds.length} rooms in world ${worldId}.`);
|
|
21803
22437
|
const memoriesInRooms = await this.db.select({ id: memoryTable.id }).from(memoryTable).where(inArray(memoryTable.roomId, roomIds));
|
|
21804
22438
|
const memoryIdsInRooms = memoriesInRooms.map((m) => m.id);
|
|
21805
22439
|
if (memoryIdsInRooms.length > 0) {
|
|
21806
22440
|
await this.db.delete(embeddingTable).where(inArray(embeddingTable.memoryId, memoryIdsInRooms));
|
|
21807
|
-
logger7.debug(`Deleted embeddings for ${memoryIdsInRooms.length} memories in world ${worldId}.`);
|
|
21808
22441
|
await this.db.delete(memoryTable).where(inArray(memoryTable.id, memoryIdsInRooms));
|
|
21809
|
-
logger7.debug(`Deleted ${memoryIdsInRooms.length} memories in world ${worldId}.`);
|
|
21810
22442
|
}
|
|
21811
22443
|
await this.db.delete(roomTable).where(inArray(roomTable.id, roomIds));
|
|
21812
|
-
|
|
22444
|
+
logger9.debug({ src: "plugin:sql", worldId, roomsDeleted: roomIds.length, memoriesDeleted: memoryIdsInRooms.length }, "World cleanup completed");
|
|
21813
22445
|
}
|
|
21814
22446
|
});
|
|
21815
22447
|
}
|
|
@@ -21828,7 +22460,7 @@ class BaseDrizzleAdapter extends DatabaseAdapter {
|
|
|
21828
22460
|
};
|
|
21829
22461
|
await this.db.insert(messageServerTable).values(serverToInsert).onConflictDoNothing();
|
|
21830
22462
|
if (data.id) {
|
|
21831
|
-
const existing = await this.db.select().from(messageServerTable).where(
|
|
22463
|
+
const existing = await this.db.select().from(messageServerTable).where(eq2(messageServerTable.id, data.id)).limit(1);
|
|
21832
22464
|
if (existing.length > 0) {
|
|
21833
22465
|
return {
|
|
21834
22466
|
id: existing[0].id,
|
|
@@ -21861,7 +22493,7 @@ class BaseDrizzleAdapter extends DatabaseAdapter {
|
|
|
21861
22493
|
}
|
|
21862
22494
|
async getMessageServerById(serverId) {
|
|
21863
22495
|
return this.withDatabase(async () => {
|
|
21864
|
-
const results = await this.db.select().from(messageServerTable).where(
|
|
22496
|
+
const results = await this.db.select().from(messageServerTable).where(eq2(messageServerTable.id, serverId)).limit(1);
|
|
21865
22497
|
return results.length > 0 ? {
|
|
21866
22498
|
id: results[0].id,
|
|
21867
22499
|
name: results[0].name,
|
|
@@ -21873,6 +22505,26 @@ class BaseDrizzleAdapter extends DatabaseAdapter {
|
|
|
21873
22505
|
} : null;
|
|
21874
22506
|
});
|
|
21875
22507
|
}
|
|
22508
|
+
async getMessageServerByRlsServerId(rlsServerId) {
|
|
22509
|
+
return this.withDatabase(async () => {
|
|
22510
|
+
const results = await this.db.execute(sql25`
|
|
22511
|
+
SELECT id, name, source_type, source_id, metadata, created_at, updated_at
|
|
22512
|
+
FROM message_servers
|
|
22513
|
+
WHERE server_id = ${rlsServerId}
|
|
22514
|
+
LIMIT 1
|
|
22515
|
+
`);
|
|
22516
|
+
const rows = results.rows || results;
|
|
22517
|
+
return rows.length > 0 ? {
|
|
22518
|
+
id: rows[0].id,
|
|
22519
|
+
name: rows[0].name,
|
|
22520
|
+
sourceType: rows[0].source_type,
|
|
22521
|
+
sourceId: rows[0].source_id || undefined,
|
|
22522
|
+
metadata: rows[0].metadata || undefined,
|
|
22523
|
+
createdAt: new Date(rows[0].created_at),
|
|
22524
|
+
updatedAt: new Date(rows[0].updated_at)
|
|
22525
|
+
} : null;
|
|
22526
|
+
});
|
|
22527
|
+
}
|
|
21876
22528
|
async createChannel(data, participantIds) {
|
|
21877
22529
|
return this.withDatabase(async () => {
|
|
21878
22530
|
const newId = data.id || v4_default();
|
|
@@ -21892,9 +22544,9 @@ class BaseDrizzleAdapter extends DatabaseAdapter {
|
|
|
21892
22544
|
await this.db.transaction(async (tx) => {
|
|
21893
22545
|
await tx.insert(channelTable).values(channelToInsert);
|
|
21894
22546
|
if (participantIds && participantIds.length > 0) {
|
|
21895
|
-
const participantValues = participantIds.map((
|
|
22547
|
+
const participantValues = participantIds.map((entityId) => ({
|
|
21896
22548
|
channelId: newId,
|
|
21897
|
-
|
|
22549
|
+
entityId
|
|
21898
22550
|
}));
|
|
21899
22551
|
await tx.insert(channelParticipantsTable).values(participantValues).onConflictDoNothing();
|
|
21900
22552
|
}
|
|
@@ -21902,9 +22554,9 @@ class BaseDrizzleAdapter extends DatabaseAdapter {
|
|
|
21902
22554
|
return channelToInsert;
|
|
21903
22555
|
});
|
|
21904
22556
|
}
|
|
21905
|
-
async
|
|
22557
|
+
async getChannelsForMessageServer(messageServerId) {
|
|
21906
22558
|
return this.withDatabase(async () => {
|
|
21907
|
-
const results = await this.db.select().from(channelTable).where(
|
|
22559
|
+
const results = await this.db.select().from(channelTable).where(eq2(channelTable.messageServerId, messageServerId));
|
|
21908
22560
|
return results.map((r) => ({
|
|
21909
22561
|
id: r.id,
|
|
21910
22562
|
messageServerId: r.messageServerId,
|
|
@@ -21921,7 +22573,7 @@ class BaseDrizzleAdapter extends DatabaseAdapter {
|
|
|
21921
22573
|
}
|
|
21922
22574
|
async getChannelDetails(channelId) {
|
|
21923
22575
|
return this.withDatabase(async () => {
|
|
21924
|
-
const results = await this.db.select().from(channelTable).where(
|
|
22576
|
+
const results = await this.db.select().from(channelTable).where(eq2(channelTable.id, channelId)).limit(1);
|
|
21925
22577
|
return results.length > 0 ? {
|
|
21926
22578
|
id: results[0].id,
|
|
21927
22579
|
messageServerId: results[0].messageServerId,
|
|
@@ -21959,7 +22611,7 @@ class BaseDrizzleAdapter extends DatabaseAdapter {
|
|
|
21959
22611
|
}
|
|
21960
22612
|
async getMessageById(id) {
|
|
21961
22613
|
return this.withDatabase(async () => {
|
|
21962
|
-
const rows = await this.db.select().from(messageTable).where(
|
|
22614
|
+
const rows = await this.db.select().from(messageTable).where(eq2(messageTable.id, id)).limit(1);
|
|
21963
22615
|
return rows?.[0] ?? null;
|
|
21964
22616
|
});
|
|
21965
22617
|
}
|
|
@@ -21978,7 +22630,7 @@ class BaseDrizzleAdapter extends DatabaseAdapter {
|
|
|
21978
22630
|
inReplyToRootMessageId: patch.inReplyToRootMessageId ?? existing.inReplyToRootMessageId,
|
|
21979
22631
|
updatedAt
|
|
21980
22632
|
};
|
|
21981
|
-
await this.db.update(messageTable).set(next).where(
|
|
22633
|
+
await this.db.update(messageTable).set(next).where(eq2(messageTable.id, id));
|
|
21982
22634
|
return {
|
|
21983
22635
|
...existing,
|
|
21984
22636
|
...next
|
|
@@ -21987,7 +22639,7 @@ class BaseDrizzleAdapter extends DatabaseAdapter {
|
|
|
21987
22639
|
}
|
|
21988
22640
|
async getMessagesForChannel(channelId, limit = 50, beforeTimestamp) {
|
|
21989
22641
|
return this.withDatabase(async () => {
|
|
21990
|
-
const conditions = [
|
|
22642
|
+
const conditions = [eq2(messageTable.channelId, channelId)];
|
|
21991
22643
|
if (beforeTimestamp) {
|
|
21992
22644
|
conditions.push(lt(messageTable.createdAt, beforeTimestamp));
|
|
21993
22645
|
}
|
|
@@ -22010,7 +22662,7 @@ class BaseDrizzleAdapter extends DatabaseAdapter {
|
|
|
22010
22662
|
}
|
|
22011
22663
|
async deleteMessage(messageId) {
|
|
22012
22664
|
return this.withDatabase(async () => {
|
|
22013
|
-
await this.db.delete(messageTable).where(
|
|
22665
|
+
await this.db.delete(messageTable).where(eq2(messageTable.id, messageId));
|
|
22014
22666
|
});
|
|
22015
22667
|
}
|
|
22016
22668
|
async updateChannel(channelId, updates) {
|
|
@@ -22022,13 +22674,13 @@ class BaseDrizzleAdapter extends DatabaseAdapter {
|
|
|
22022
22674
|
updateData.name = updates.name;
|
|
22023
22675
|
if (updates.metadata !== undefined)
|
|
22024
22676
|
updateData.metadata = updates.metadata;
|
|
22025
|
-
await tx.update(channelTable).set(updateData).where(
|
|
22677
|
+
await tx.update(channelTable).set(updateData).where(eq2(channelTable.id, channelId));
|
|
22026
22678
|
if (updates.participantCentralUserIds !== undefined) {
|
|
22027
|
-
await tx.delete(channelParticipantsTable).where(
|
|
22679
|
+
await tx.delete(channelParticipantsTable).where(eq2(channelParticipantsTable.channelId, channelId));
|
|
22028
22680
|
if (updates.participantCentralUserIds.length > 0) {
|
|
22029
|
-
const participantValues = updates.participantCentralUserIds.map((
|
|
22681
|
+
const participantValues = updates.participantCentralUserIds.map((entityId) => ({
|
|
22030
22682
|
channelId,
|
|
22031
|
-
|
|
22683
|
+
entityId
|
|
22032
22684
|
}));
|
|
22033
22685
|
await tx.insert(channelParticipantsTable).values(participantValues).onConflictDoNothing();
|
|
22034
22686
|
}
|
|
@@ -22044,53 +22696,59 @@ class BaseDrizzleAdapter extends DatabaseAdapter {
|
|
|
22044
22696
|
async deleteChannel(channelId) {
|
|
22045
22697
|
return this.withDatabase(async () => {
|
|
22046
22698
|
await this.db.transaction(async (tx) => {
|
|
22047
|
-
await tx.delete(messageTable).where(
|
|
22048
|
-
await tx.delete(channelParticipantsTable).where(
|
|
22049
|
-
await tx.delete(channelTable).where(
|
|
22699
|
+
await tx.delete(messageTable).where(eq2(messageTable.channelId, channelId));
|
|
22700
|
+
await tx.delete(channelParticipantsTable).where(eq2(channelParticipantsTable.channelId, channelId));
|
|
22701
|
+
await tx.delete(channelTable).where(eq2(channelTable.id, channelId));
|
|
22050
22702
|
});
|
|
22051
22703
|
});
|
|
22052
22704
|
}
|
|
22053
|
-
async addChannelParticipants(channelId,
|
|
22705
|
+
async addChannelParticipants(channelId, entityIds) {
|
|
22054
22706
|
return this.withDatabase(async () => {
|
|
22055
|
-
if (!
|
|
22707
|
+
if (!entityIds || entityIds.length === 0)
|
|
22056
22708
|
return;
|
|
22057
|
-
const participantValues =
|
|
22709
|
+
const participantValues = entityIds.map((entityId) => ({
|
|
22058
22710
|
channelId,
|
|
22059
|
-
|
|
22711
|
+
entityId
|
|
22060
22712
|
}));
|
|
22061
22713
|
await this.db.insert(channelParticipantsTable).values(participantValues).onConflictDoNothing();
|
|
22062
22714
|
});
|
|
22063
22715
|
}
|
|
22064
22716
|
async getChannelParticipants(channelId) {
|
|
22065
22717
|
return this.withDatabase(async () => {
|
|
22066
|
-
const results = await this.db.select({
|
|
22067
|
-
return results.map((r) => r.
|
|
22718
|
+
const results = await this.db.select({ entityId: channelParticipantsTable.entityId }).from(channelParticipantsTable).where(eq2(channelParticipantsTable.channelId, channelId));
|
|
22719
|
+
return results.map((r) => r.entityId);
|
|
22068
22720
|
});
|
|
22069
22721
|
}
|
|
22070
|
-
async
|
|
22722
|
+
async isChannelParticipant(channelId, entityId) {
|
|
22071
22723
|
return this.withDatabase(async () => {
|
|
22072
|
-
await this.db.
|
|
22073
|
-
|
|
22724
|
+
const result = await this.db.select().from(channelParticipantsTable).where(and(eq2(channelParticipantsTable.channelId, channelId), eq2(channelParticipantsTable.entityId, entityId))).limit(1);
|
|
22725
|
+
return result.length > 0;
|
|
22726
|
+
});
|
|
22727
|
+
}
|
|
22728
|
+
async addAgentToMessageServer(messageServerId, agentId) {
|
|
22729
|
+
return this.withDatabase(async () => {
|
|
22730
|
+
await this.db.insert(messageServerAgentsTable).values({
|
|
22731
|
+
messageServerId,
|
|
22074
22732
|
agentId
|
|
22075
22733
|
}).onConflictDoNothing();
|
|
22076
22734
|
});
|
|
22077
22735
|
}
|
|
22078
|
-
async
|
|
22736
|
+
async getAgentsForMessageServer(messageServerId) {
|
|
22079
22737
|
return this.withDatabase(async () => {
|
|
22080
|
-
const results = await this.db.select({ agentId:
|
|
22738
|
+
const results = await this.db.select({ agentId: messageServerAgentsTable.agentId }).from(messageServerAgentsTable).where(eq2(messageServerAgentsTable.messageServerId, messageServerId));
|
|
22081
22739
|
return results.map((r) => r.agentId);
|
|
22082
22740
|
});
|
|
22083
22741
|
}
|
|
22084
|
-
async
|
|
22742
|
+
async removeAgentFromMessageServer(messageServerId, agentId) {
|
|
22085
22743
|
return this.withDatabase(async () => {
|
|
22086
|
-
await this.db.delete(
|
|
22744
|
+
await this.db.delete(messageServerAgentsTable).where(and(eq2(messageServerAgentsTable.messageServerId, messageServerId), eq2(messageServerAgentsTable.agentId, agentId)));
|
|
22087
22745
|
});
|
|
22088
22746
|
}
|
|
22089
22747
|
async findOrCreateDmChannel(user1Id, user2Id, messageServerId) {
|
|
22090
22748
|
return this.withDatabase(async () => {
|
|
22091
22749
|
const ids = [user1Id, user2Id].sort();
|
|
22092
22750
|
const dmChannelName = `DM-${ids[0]}-${ids[1]}`;
|
|
22093
|
-
const existingChannels = await this.db.select().from(channelTable).where(and(
|
|
22751
|
+
const existingChannels = await this.db.select().from(channelTable).where(and(eq2(channelTable.type, ChannelType.DM), eq2(channelTable.name, dmChannelName), eq2(channelTable.messageServerId, messageServerId))).limit(1);
|
|
22094
22752
|
if (existingChannels.length > 0) {
|
|
22095
22753
|
return {
|
|
22096
22754
|
id: existingChannels[0].id,
|
|
@@ -22124,11 +22782,14 @@ class PgliteDatabaseAdapter extends BaseDrizzleAdapter {
|
|
|
22124
22782
|
this.manager = manager;
|
|
22125
22783
|
this.db = drizzle(this.manager.getConnection());
|
|
22126
22784
|
}
|
|
22785
|
+
async withEntityContext(_entityId, callback) {
|
|
22786
|
+
return this.db.transaction(callback);
|
|
22787
|
+
}
|
|
22127
22788
|
async getEntityByIds(entityIds) {
|
|
22128
22789
|
return this.getEntitiesByIds(entityIds);
|
|
22129
22790
|
}
|
|
22130
22791
|
async getMemoriesByServerId(_params) {
|
|
22131
|
-
|
|
22792
|
+
logger10.warn({ src: "plugin:sql" }, "getMemoriesByServerId called but not implemented");
|
|
22132
22793
|
return [];
|
|
22133
22794
|
}
|
|
22134
22795
|
async ensureAgentExists(agent) {
|
|
@@ -22153,13 +22814,13 @@ class PgliteDatabaseAdapter extends BaseDrizzleAdapter {
|
|
|
22153
22814
|
}
|
|
22154
22815
|
async withDatabase(operation) {
|
|
22155
22816
|
if (this.manager.isShuttingDown()) {
|
|
22156
|
-
|
|
22817
|
+
logger10.warn({ src: "plugin:sql" }, "Database is shutting down");
|
|
22157
22818
|
return null;
|
|
22158
22819
|
}
|
|
22159
22820
|
return operation();
|
|
22160
22821
|
}
|
|
22161
22822
|
async init() {
|
|
22162
|
-
|
|
22823
|
+
logger10.debug({ src: "plugin:sql" }, "PGliteDatabaseAdapter initialized");
|
|
22163
22824
|
}
|
|
22164
22825
|
async isReady() {
|
|
22165
22826
|
return !this.manager.isShuttingDown();
|
|
@@ -22223,17 +22884,17 @@ var plugin = {
|
|
|
22223
22884
|
priority: 0,
|
|
22224
22885
|
schema: exports_schema,
|
|
22225
22886
|
init: async (_config, runtime) => {
|
|
22226
|
-
|
|
22887
|
+
logger11.info({ src: "plugin:sql" }, "plugin-sql (browser) init starting");
|
|
22227
22888
|
try {
|
|
22228
22889
|
const isReady = await runtime.isReady();
|
|
22229
22890
|
if (isReady) {
|
|
22230
|
-
|
|
22891
|
+
logger11.info({ src: "plugin:sql" }, "Database adapter already registered, skipping creation");
|
|
22231
22892
|
return;
|
|
22232
22893
|
}
|
|
22233
22894
|
} catch (error) {}
|
|
22234
22895
|
const dbAdapter = createDatabaseAdapter({}, runtime.agentId);
|
|
22235
22896
|
runtime.registerDatabaseAdapter(dbAdapter);
|
|
22236
|
-
|
|
22897
|
+
logger11.info({ src: "plugin:sql" }, "Browser database adapter (PGlite) created and registered");
|
|
22237
22898
|
}
|
|
22238
22899
|
};
|
|
22239
22900
|
var index_browser_default = plugin;
|
|
@@ -22244,5 +22905,5 @@ export {
|
|
|
22244
22905
|
DatabaseMigrationService
|
|
22245
22906
|
};
|
|
22246
22907
|
|
|
22247
|
-
//# debugId=
|
|
22908
|
+
//# debugId=7F64AB0B1EC2677C64756E2164756E21
|
|
22248
22909
|
//# sourceMappingURL=index.browser.js.map
|