@elizaos/plugin-sql 1.6.5-alpha.15 → 1.6.5-alpha.16
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/browser/index.browser.js +96 -160
- package/dist/browser/index.browser.js.map +12 -12
- package/dist/browser/tsconfig.build.tsbuildinfo +1 -1
- package/dist/node/index.node.js +120 -185
- package/dist/node/index.node.js.map +14 -14
- package/dist/node/tsconfig.build.node.tsbuildinfo +1 -1
- package/package.json +6 -4
package/dist/node/index.node.js
CHANGED
|
@@ -5909,14 +5909,14 @@ class ExtensionManager {
|
|
|
5909
5909
|
for (const extension of extensions) {
|
|
5910
5910
|
try {
|
|
5911
5911
|
if (!/^[a-zA-Z0-9_-]+$/.test(extension)) {
|
|
5912
|
-
logger2.warn(
|
|
5912
|
+
logger2.warn({ src: "plugin:sql", extension }, "Invalid extension name - contains invalid characters");
|
|
5913
5913
|
continue;
|
|
5914
5914
|
}
|
|
5915
5915
|
await this.db.execute(sql`CREATE EXTENSION IF NOT EXISTS ${sql.identifier(extension)}`);
|
|
5916
|
-
logger2.debug(
|
|
5916
|
+
logger2.debug({ src: "plugin:sql", extension }, "Extension installed");
|
|
5917
5917
|
} catch (error) {
|
|
5918
5918
|
const errorMessage = error instanceof Error ? error.message : String(error);
|
|
5919
|
-
logger2.warn(
|
|
5919
|
+
logger2.warn({ src: "plugin:sql", extension, error: errorMessage }, "Could not install extension");
|
|
5920
5920
|
}
|
|
5921
5921
|
}
|
|
5922
5922
|
}
|
|
@@ -6628,10 +6628,7 @@ async function generateMigrationSQL(previousSnapshot, currentSnapshot, diff) {
|
|
|
6628
6628
|
}
|
|
6629
6629
|
const dataLossCheck = checkForDataLoss(diff);
|
|
6630
6630
|
if (dataLossCheck.warnings.length > 0) {
|
|
6631
|
-
logger3.warn("
|
|
6632
|
-
for (const warning of dataLossCheck.warnings) {
|
|
6633
|
-
logger3.warn(` - ${warning}`);
|
|
6634
|
-
}
|
|
6631
|
+
logger3.warn({ src: "plugin:sql", warnings: dataLossCheck.warnings }, "Schema changes may cause data loss");
|
|
6635
6632
|
}
|
|
6636
6633
|
const schemasToCreate = new Set;
|
|
6637
6634
|
for (const tableName of diff.tables.created) {
|
|
@@ -7053,7 +7050,7 @@ class DatabaseIntrospector {
|
|
|
7053
7050
|
this.db = db2;
|
|
7054
7051
|
}
|
|
7055
7052
|
async introspectSchema(schemaName = "public") {
|
|
7056
|
-
logger5.info(
|
|
7053
|
+
logger5.info({ src: "plugin:sql", schemaName }, "Starting database introspection");
|
|
7057
7054
|
const tables = {};
|
|
7058
7055
|
const schemas = {};
|
|
7059
7056
|
const enums = {};
|
|
@@ -7061,7 +7058,7 @@ class DatabaseIntrospector {
|
|
|
7061
7058
|
for (const tableInfo of allTables) {
|
|
7062
7059
|
const tableName = tableInfo.table_name;
|
|
7063
7060
|
const tableSchema = tableInfo.table_schema || "public";
|
|
7064
|
-
logger5.debug(
|
|
7061
|
+
logger5.debug({ src: "plugin:sql", tableSchema, tableName }, "Introspecting table");
|
|
7065
7062
|
const columns2 = await this.getColumns(tableSchema, tableName);
|
|
7066
7063
|
const columnsObject = {};
|
|
7067
7064
|
const uniqueConstraintObject = {};
|
|
@@ -7156,7 +7153,7 @@ class DatabaseIntrospector {
|
|
|
7156
7153
|
}
|
|
7157
7154
|
enums[key].values.push(enumInfo.value);
|
|
7158
7155
|
}
|
|
7159
|
-
logger5.info(
|
|
7156
|
+
logger5.info({ src: "plugin:sql", tableCount: Object.keys(tables).length }, "Database introspection complete");
|
|
7160
7157
|
return {
|
|
7161
7158
|
version: "7",
|
|
7162
7159
|
dialect: "postgresql",
|
|
@@ -7418,7 +7415,7 @@ class RuntimeMigrator {
|
|
|
7418
7415
|
}
|
|
7419
7416
|
}
|
|
7420
7417
|
for (const schemaName of schemasToCreate) {
|
|
7421
|
-
logger6.debug(
|
|
7418
|
+
logger6.debug({ src: "plugin:sql", schemaName }, "Ensuring schema exists");
|
|
7422
7419
|
await this.db.execute(sql.raw(`CREATE SCHEMA IF NOT EXISTS "${schemaName}"`));
|
|
7423
7420
|
}
|
|
7424
7421
|
}
|
|
@@ -7429,10 +7426,10 @@ class RuntimeMigrator {
|
|
|
7429
7426
|
const tableData = table3;
|
|
7430
7427
|
const actualSchema = tableData.schema || "public";
|
|
7431
7428
|
if (!isCorePLugin && actualSchema === "public") {
|
|
7432
|
-
logger6.warn(
|
|
7429
|
+
logger6.warn({ src: "plugin:sql", pluginName, tableName: tableData.name, expectedSchema }, "Plugin table is using public schema - consider using pgSchema for better isolation");
|
|
7433
7430
|
}
|
|
7434
7431
|
if (isCorePLugin && actualSchema !== "public") {
|
|
7435
|
-
logger6.warn(
|
|
7432
|
+
logger6.warn({ src: "plugin:sql", pluginName: "@elizaos/plugin-sql", tableName: tableData.name, actualSchema }, "Core plugin table should use public schema");
|
|
7436
7433
|
}
|
|
7437
7434
|
}
|
|
7438
7435
|
}
|
|
@@ -7643,13 +7640,13 @@ class RuntimeMigrator {
|
|
|
7643
7640
|
}
|
|
7644
7641
|
}
|
|
7645
7642
|
}
|
|
7646
|
-
logger6.debug(
|
|
7643
|
+
logger6.debug({ src: "plugin:sql", urlPreview: url.substring(0, 50) }, "Connection string did not match any PostgreSQL patterns");
|
|
7647
7644
|
return false;
|
|
7648
7645
|
}
|
|
7649
7646
|
async initialize() {
|
|
7650
|
-
logger6.info("
|
|
7647
|
+
logger6.info({ src: "plugin:sql" }, "Initializing migration system");
|
|
7651
7648
|
await this.migrationTracker.ensureTables();
|
|
7652
|
-
logger6.info("
|
|
7649
|
+
logger6.info({ src: "plugin:sql" }, "Migration system initialized");
|
|
7653
7650
|
}
|
|
7654
7651
|
async migrate(pluginName, schema2, options = {}) {
|
|
7655
7652
|
const lockId = this.getAdvisoryLockId(pluginName);
|
|
@@ -7658,30 +7655,30 @@ class RuntimeMigrator {
|
|
|
7658
7655
|
}
|
|
7659
7656
|
let lockAcquired = false;
|
|
7660
7657
|
try {
|
|
7661
|
-
logger6.info(
|
|
7658
|
+
logger6.info({ src: "plugin:sql", pluginName }, "Starting migration for plugin");
|
|
7662
7659
|
await this.initialize();
|
|
7663
7660
|
const postgresUrl = process.env.POSTGRES_URL || process.env.DATABASE_URL || "";
|
|
7664
7661
|
const isRealPostgres = this.isRealPostgresDatabase(postgresUrl);
|
|
7665
7662
|
if (isRealPostgres) {
|
|
7666
7663
|
try {
|
|
7667
|
-
logger6.debug(
|
|
7664
|
+
logger6.debug({ src: "plugin:sql", pluginName }, "Using PostgreSQL advisory locks");
|
|
7668
7665
|
const lockIdStr = lockId.toString();
|
|
7669
7666
|
const lockResult = await this.db.execute(sql`SELECT pg_try_advisory_lock(CAST(${lockIdStr} AS bigint)) as acquired`);
|
|
7670
7667
|
lockAcquired = lockResult.rows[0]?.acquired === true;
|
|
7671
7668
|
if (!lockAcquired) {
|
|
7672
|
-
logger6.info(
|
|
7669
|
+
logger6.info({ src: "plugin:sql", pluginName }, "Migration already in progress, waiting for lock");
|
|
7673
7670
|
await this.db.execute(sql`SELECT pg_advisory_lock(CAST(${lockIdStr} AS bigint))`);
|
|
7674
7671
|
lockAcquired = true;
|
|
7675
|
-
logger6.info(
|
|
7672
|
+
logger6.info({ src: "plugin:sql", pluginName }, "Lock acquired");
|
|
7676
7673
|
} else {
|
|
7677
|
-
logger6.debug(
|
|
7674
|
+
logger6.debug({ src: "plugin:sql", pluginName, lockId: lockIdStr }, "Advisory lock acquired");
|
|
7678
7675
|
}
|
|
7679
7676
|
} catch (lockError) {
|
|
7680
|
-
logger6.warn(
|
|
7677
|
+
logger6.warn({ src: "plugin:sql", pluginName, error: lockError instanceof Error ? lockError.message : String(lockError) }, "Failed to acquire advisory lock, continuing without lock");
|
|
7681
7678
|
lockAcquired = false;
|
|
7682
7679
|
}
|
|
7683
7680
|
} else {
|
|
7684
|
-
logger6.debug(
|
|
7681
|
+
logger6.debug({ src: "plugin:sql" }, "Development database detected, skipping advisory locks");
|
|
7685
7682
|
}
|
|
7686
7683
|
await this.extensionManager.installRequiredExtensions(["vector", "fuzzystrmatch", "pgcrypto"]);
|
|
7687
7684
|
const currentSnapshot = await generateSnapshot(schema2);
|
|
@@ -7690,14 +7687,14 @@ class RuntimeMigrator {
|
|
|
7690
7687
|
const currentHash = hashSnapshot(currentSnapshot);
|
|
7691
7688
|
const lastMigration = await this.migrationTracker.getLastMigration(pluginName);
|
|
7692
7689
|
if (lastMigration && lastMigration.hash === currentHash) {
|
|
7693
|
-
logger6.info(
|
|
7690
|
+
logger6.info({ src: "plugin:sql", pluginName, hash: currentHash }, "No changes detected, skipping migration");
|
|
7694
7691
|
return;
|
|
7695
7692
|
}
|
|
7696
7693
|
let previousSnapshot = await this.snapshotStorage.getLatestSnapshot(pluginName);
|
|
7697
7694
|
if (!previousSnapshot && Object.keys(currentSnapshot.tables).length > 0) {
|
|
7698
7695
|
const hasExistingTables = await this.introspector.hasExistingTables(pluginName);
|
|
7699
7696
|
if (hasExistingTables) {
|
|
7700
|
-
logger6.info(
|
|
7697
|
+
logger6.info({ src: "plugin:sql", pluginName }, "No snapshot found but tables exist in database, introspecting");
|
|
7701
7698
|
const schemaName = this.getExpectedSchemaName(pluginName);
|
|
7702
7699
|
const introspectedSnapshot = await this.introspector.introspectSchema(schemaName);
|
|
7703
7700
|
if (Object.keys(introspectedSnapshot.tables).length > 0) {
|
|
@@ -7705,15 +7702,15 @@ class RuntimeMigrator {
|
|
|
7705
7702
|
await this.journalStorage.updateJournal(pluginName, 0, `introspected_${Date.now()}`, true);
|
|
7706
7703
|
const introspectedHash = hashSnapshot(introspectedSnapshot);
|
|
7707
7704
|
await this.migrationTracker.recordMigration(pluginName, introspectedHash, Date.now());
|
|
7708
|
-
logger6.info(
|
|
7705
|
+
logger6.info({ src: "plugin:sql", pluginName }, "Created initial snapshot from existing database");
|
|
7709
7706
|
previousSnapshot = introspectedSnapshot;
|
|
7710
7707
|
}
|
|
7711
7708
|
}
|
|
7712
7709
|
}
|
|
7713
7710
|
if (!hasChanges(previousSnapshot, currentSnapshot)) {
|
|
7714
|
-
logger6.info(
|
|
7711
|
+
logger6.info({ src: "plugin:sql", pluginName }, "No schema changes");
|
|
7715
7712
|
if (!previousSnapshot && Object.keys(currentSnapshot.tables).length === 0) {
|
|
7716
|
-
logger6.info(
|
|
7713
|
+
logger6.info({ src: "plugin:sql", pluginName }, "Recording empty schema");
|
|
7717
7714
|
await this.migrationTracker.recordMigration(pluginName, currentHash, Date.now());
|
|
7718
7715
|
const idx = await this.journalStorage.getNextIdx(pluginName);
|
|
7719
7716
|
const tag = this.generateMigrationTag(idx, pluginName);
|
|
@@ -7724,7 +7721,7 @@ class RuntimeMigrator {
|
|
|
7724
7721
|
}
|
|
7725
7722
|
const diff = await calculateDiff(previousSnapshot, currentSnapshot);
|
|
7726
7723
|
if (!hasDiffChanges(diff)) {
|
|
7727
|
-
logger6.info(
|
|
7724
|
+
logger6.info({ src: "plugin:sql", pluginName }, "No actionable changes");
|
|
7728
7725
|
return;
|
|
7729
7726
|
}
|
|
7730
7727
|
const dataLossCheck = checkForDataLoss(diff);
|
|
@@ -7732,55 +7729,34 @@ class RuntimeMigrator {
|
|
|
7732
7729
|
const isProduction = false;
|
|
7733
7730
|
const allowDestructive = options.force || options.allowDataLoss || process.env.ELIZA_ALLOW_DESTRUCTIVE_MIGRATIONS === "true";
|
|
7734
7731
|
if (!allowDestructive) {
|
|
7735
|
-
logger6.error("
|
|
7736
|
-
logger6.error(`[RuntimeMigrator] Plugin: ${pluginName}`);
|
|
7737
|
-
logger6.error(`[RuntimeMigrator] Environment: ${isProduction ? "PRODUCTION" : "DEVELOPMENT"}`);
|
|
7738
|
-
logger6.error("[RuntimeMigrator] Destructive operations detected:");
|
|
7739
|
-
for (const warning of dataLossCheck.warnings) {
|
|
7740
|
-
logger6.error(`[RuntimeMigrator] - ${warning}`);
|
|
7741
|
-
}
|
|
7742
|
-
logger6.error("[RuntimeMigrator] To proceed with destructive migrations:");
|
|
7743
|
-
logger6.error("[RuntimeMigrator] 1. Set environment variable: export ELIZA_ALLOW_DESTRUCTIVE_MIGRATIONS=true");
|
|
7744
|
-
logger6.error("[RuntimeMigrator] 2. Or use option: migrate(plugin, schema, { force: true })");
|
|
7745
|
-
if (isProduction) {
|
|
7746
|
-
logger6.error("[RuntimeMigrator] 3. For production, consider using drizzle-kit for manual migration");
|
|
7747
|
-
}
|
|
7732
|
+
logger6.error({ src: "plugin:sql", pluginName, environment: isProduction ? "PRODUCTION" : "DEVELOPMENT", warnings: dataLossCheck.warnings }, "Destructive migration blocked - set ELIZA_ALLOW_DESTRUCTIVE_MIGRATIONS=true or use force option");
|
|
7748
7733
|
const errorMessage = isProduction ? `Destructive migration blocked in production for ${pluginName}. Set ELIZA_ALLOW_DESTRUCTIVE_MIGRATIONS=true or use drizzle-kit.` : `Destructive migration blocked for ${pluginName}. Set ELIZA_ALLOW_DESTRUCTIVE_MIGRATIONS=true to proceed.`;
|
|
7749
7734
|
throw new Error(errorMessage);
|
|
7750
7735
|
}
|
|
7751
7736
|
if (dataLossCheck.requiresConfirmation) {
|
|
7752
|
-
logger6.warn("
|
|
7753
|
-
logger6.warn(`[RuntimeMigrator] Plugin: ${pluginName}`);
|
|
7754
|
-
logger6.warn("[RuntimeMigrator] The following operations will be performed:");
|
|
7755
|
-
for (const warning of dataLossCheck.warnings) {
|
|
7756
|
-
logger6.warn(`[RuntimeMigrator] ⚠️ ${warning}`);
|
|
7757
|
-
}
|
|
7737
|
+
logger6.warn({ src: "plugin:sql", pluginName, warnings: dataLossCheck.warnings }, "Proceeding with destructive migration");
|
|
7758
7738
|
}
|
|
7759
7739
|
}
|
|
7760
7740
|
const sqlStatements = await generateMigrationSQL(previousSnapshot, currentSnapshot, diff);
|
|
7761
7741
|
if (sqlStatements.length === 0) {
|
|
7762
|
-
logger6.info(
|
|
7742
|
+
logger6.info({ src: "plugin:sql", pluginName }, "No SQL statements to execute");
|
|
7763
7743
|
return;
|
|
7764
7744
|
}
|
|
7765
|
-
logger6.info(
|
|
7745
|
+
logger6.info({ src: "plugin:sql", pluginName, statementCount: sqlStatements.length }, "Executing SQL statements");
|
|
7766
7746
|
if (options.verbose) {
|
|
7767
7747
|
sqlStatements.forEach((stmt, i) => {
|
|
7768
|
-
logger6.debug(
|
|
7748
|
+
logger6.debug({ src: "plugin:sql", statementIndex: i + 1, statement: stmt }, "SQL statement");
|
|
7769
7749
|
});
|
|
7770
7750
|
}
|
|
7771
7751
|
if (options.dryRun) {
|
|
7772
|
-
logger6.info("
|
|
7773
|
-
logger6.info("[RuntimeMigrator] Would execute:");
|
|
7774
|
-
sqlStatements.forEach((stmt, i) => {
|
|
7775
|
-
logger6.info(` ${i + 1}. ${stmt}`);
|
|
7776
|
-
});
|
|
7752
|
+
logger6.info({ src: "plugin:sql", pluginName, statements: sqlStatements }, "DRY RUN mode - not executing statements");
|
|
7777
7753
|
return;
|
|
7778
7754
|
}
|
|
7779
7755
|
await this.executeMigration(pluginName, currentSnapshot, currentHash, sqlStatements);
|
|
7780
|
-
logger6.info(
|
|
7756
|
+
logger6.info({ src: "plugin:sql", pluginName }, "Migration completed successfully");
|
|
7781
7757
|
return;
|
|
7782
7758
|
} catch (error) {
|
|
7783
|
-
logger6.error(
|
|
7759
|
+
logger6.error({ src: "plugin:sql", pluginName, error: error instanceof Error ? error.message : String(error) }, "Migration failed");
|
|
7784
7760
|
throw error;
|
|
7785
7761
|
} finally {
|
|
7786
7762
|
const postgresUrl = process.env.POSTGRES_URL || process.env.DATABASE_URL || "";
|
|
@@ -7789,9 +7765,9 @@ class RuntimeMigrator {
|
|
|
7789
7765
|
try {
|
|
7790
7766
|
const lockIdStr = lockId.toString();
|
|
7791
7767
|
await this.db.execute(sql`SELECT pg_advisory_unlock(CAST(${lockIdStr} AS bigint))`);
|
|
7792
|
-
logger6.debug(
|
|
7768
|
+
logger6.debug({ src: "plugin:sql", pluginName }, "Advisory lock released");
|
|
7793
7769
|
} catch (unlockError) {
|
|
7794
|
-
logger6.warn(
|
|
7770
|
+
logger6.warn({ src: "plugin:sql", pluginName, error: unlockError instanceof Error ? unlockError.message : String(unlockError) }, "Failed to release advisory lock");
|
|
7795
7771
|
}
|
|
7796
7772
|
}
|
|
7797
7773
|
}
|
|
@@ -7802,7 +7778,7 @@ class RuntimeMigrator {
|
|
|
7802
7778
|
await this.db.execute(sql`BEGIN`);
|
|
7803
7779
|
transactionStarted = true;
|
|
7804
7780
|
for (const stmt of sqlStatements) {
|
|
7805
|
-
logger6.debug(
|
|
7781
|
+
logger6.debug({ src: "plugin:sql", statement: stmt }, "Executing SQL statement");
|
|
7806
7782
|
await this.db.execute(sql.raw(stmt));
|
|
7807
7783
|
}
|
|
7808
7784
|
const idx = await this.journalStorage.getNextIdx(pluginName);
|
|
@@ -7811,14 +7787,14 @@ class RuntimeMigrator {
|
|
|
7811
7787
|
await this.journalStorage.updateJournal(pluginName, idx, tag, true);
|
|
7812
7788
|
await this.snapshotStorage.saveSnapshot(pluginName, idx, snapshot);
|
|
7813
7789
|
await this.db.execute(sql`COMMIT`);
|
|
7814
|
-
logger6.info(
|
|
7790
|
+
logger6.info({ src: "plugin:sql", pluginName, tag }, "Recorded migration");
|
|
7815
7791
|
} catch (error) {
|
|
7816
7792
|
if (transactionStarted) {
|
|
7817
7793
|
try {
|
|
7818
7794
|
await this.db.execute(sql`ROLLBACK`);
|
|
7819
|
-
logger6.error("
|
|
7795
|
+
logger6.error({ src: "plugin:sql", error: error instanceof Error ? error.message : String(error) }, "Migration failed, rolled back");
|
|
7820
7796
|
} catch (rollbackError) {
|
|
7821
|
-
logger6.error("
|
|
7797
|
+
logger6.error({ src: "plugin:sql", error: rollbackError instanceof Error ? rollbackError.message : String(rollbackError) }, "Failed to rollback transaction");
|
|
7822
7798
|
}
|
|
7823
7799
|
}
|
|
7824
7800
|
throw error;
|
|
@@ -7841,31 +7817,31 @@ class RuntimeMigrator {
|
|
|
7841
7817
|
};
|
|
7842
7818
|
}
|
|
7843
7819
|
async reset(pluginName) {
|
|
7844
|
-
logger6.warn(
|
|
7820
|
+
logger6.warn({ src: "plugin:sql", pluginName }, "Resetting migrations");
|
|
7845
7821
|
await this.db.execute(sql`DELETE FROM migrations._migrations WHERE plugin_name = ${pluginName}`);
|
|
7846
7822
|
await this.db.execute(sql`DELETE FROM migrations._journal WHERE plugin_name = ${pluginName}`);
|
|
7847
7823
|
await this.db.execute(sql`DELETE FROM migrations._snapshots WHERE plugin_name = ${pluginName}`);
|
|
7848
|
-
logger6.warn(
|
|
7824
|
+
logger6.warn({ src: "plugin:sql", pluginName }, "Reset complete");
|
|
7849
7825
|
}
|
|
7850
7826
|
async checkMigration(pluginName, schema2) {
|
|
7851
7827
|
try {
|
|
7852
|
-
logger6.info(
|
|
7828
|
+
logger6.info({ src: "plugin:sql", pluginName }, "Checking migration");
|
|
7853
7829
|
const currentSnapshot = await generateSnapshot(schema2);
|
|
7854
7830
|
const previousSnapshot = await this.snapshotStorage.getLatestSnapshot(pluginName);
|
|
7855
7831
|
if (!hasChanges(previousSnapshot, currentSnapshot)) {
|
|
7856
|
-
logger6.info(
|
|
7832
|
+
logger6.info({ src: "plugin:sql", pluginName }, "No changes detected");
|
|
7857
7833
|
return null;
|
|
7858
7834
|
}
|
|
7859
7835
|
const diff = await calculateDiff(previousSnapshot, currentSnapshot);
|
|
7860
7836
|
const dataLossCheck = checkForDataLoss(diff);
|
|
7861
7837
|
if (dataLossCheck.hasDataLoss) {
|
|
7862
|
-
logger6.warn(
|
|
7838
|
+
logger6.warn({ src: "plugin:sql", pluginName }, "Migration would cause data loss");
|
|
7863
7839
|
} else {
|
|
7864
|
-
logger6.info(
|
|
7840
|
+
logger6.info({ src: "plugin:sql", pluginName }, "Migration is safe (no data loss)");
|
|
7865
7841
|
}
|
|
7866
7842
|
return dataLossCheck;
|
|
7867
7843
|
} catch (error) {
|
|
7868
|
-
logger6.error(
|
|
7844
|
+
logger6.error({ src: "plugin:sql", pluginName, error: error instanceof Error ? error.message : String(error) }, "Failed to check migration");
|
|
7869
7845
|
throw error;
|
|
7870
7846
|
}
|
|
7871
7847
|
}
|
|
@@ -8264,7 +8240,7 @@ async function installRLSFunctions(adapter) {
|
|
|
8264
8240
|
END;
|
|
8265
8241
|
$$ LANGUAGE plpgsql;
|
|
8266
8242
|
`);
|
|
8267
|
-
logger8.info("
|
|
8243
|
+
logger8.info({ src: "plugin:sql" }, "RLS PostgreSQL functions installed");
|
|
8268
8244
|
await installEntityRLS(adapter);
|
|
8269
8245
|
}
|
|
8270
8246
|
async function getOrCreateRlsServer(adapter, serverId) {
|
|
@@ -8272,7 +8248,7 @@ async function getOrCreateRlsServer(adapter, serverId) {
|
|
|
8272
8248
|
await db2.insert(serverTable).values({
|
|
8273
8249
|
id: serverId
|
|
8274
8250
|
}).onConflictDoNothing();
|
|
8275
|
-
logger8.info(
|
|
8251
|
+
logger8.info({ src: "plugin:sql", serverId: serverId.slice(0, 8) }, "RLS server registered");
|
|
8276
8252
|
return serverId;
|
|
8277
8253
|
}
|
|
8278
8254
|
async function setServerContext(adapter, serverId) {
|
|
@@ -8284,8 +8260,7 @@ async function setServerContext(adapter, serverId) {
|
|
|
8284
8260
|
if (servers.length === 0) {
|
|
8285
8261
|
throw new Error(`Server ${serverId} does not exist`);
|
|
8286
8262
|
}
|
|
8287
|
-
logger8.info(
|
|
8288
|
-
logger8.info("[Data Isolation] Context configured successfully (using application_name)");
|
|
8263
|
+
logger8.info({ src: "plugin:sql", serverId: serverId.slice(0, 8) }, "RLS context configured");
|
|
8289
8264
|
}
|
|
8290
8265
|
async function assignAgentToServer(adapter, agentId, serverId) {
|
|
8291
8266
|
if (!agentId || !serverId) {
|
|
@@ -8298,26 +8273,26 @@ async function assignAgentToServer(adapter, agentId, serverId) {
|
|
|
8298
8273
|
const agent = agents[0];
|
|
8299
8274
|
const currentServerId = agent.server_id;
|
|
8300
8275
|
if (currentServerId === serverId) {
|
|
8301
|
-
logger8.debug(
|
|
8276
|
+
logger8.debug({ src: "plugin:sql", agentName: agent.name }, "Agent already assigned to correct server");
|
|
8302
8277
|
} else {
|
|
8303
8278
|
await db2.update(agentTable).set({ server_id: serverId }).where(eq(agentTable.id, agentId));
|
|
8304
8279
|
if (currentServerId === null) {
|
|
8305
|
-
logger8.info(
|
|
8280
|
+
logger8.info({ src: "plugin:sql", agentName: agent.name }, "Agent assigned to server");
|
|
8306
8281
|
} else {
|
|
8307
|
-
logger8.warn(
|
|
8282
|
+
logger8.warn({ src: "plugin:sql", agentName: agent.name }, "Agent server changed");
|
|
8308
8283
|
}
|
|
8309
8284
|
}
|
|
8310
8285
|
} else {
|
|
8311
|
-
logger8.debug(
|
|
8286
|
+
logger8.debug({ src: "plugin:sql", agentId }, "Agent does not exist yet");
|
|
8312
8287
|
}
|
|
8313
8288
|
}
|
|
8314
8289
|
async function applyRLSToNewTables(adapter) {
|
|
8315
8290
|
const db2 = adapter.db;
|
|
8316
8291
|
try {
|
|
8317
8292
|
await db2.execute(sql`SELECT apply_rls_to_all_tables()`);
|
|
8318
|
-
logger8.info("
|
|
8293
|
+
logger8.info({ src: "plugin:sql" }, "RLS applied to all tables");
|
|
8319
8294
|
} catch (error) {
|
|
8320
|
-
logger8.warn("
|
|
8295
|
+
logger8.warn({ src: "plugin:sql", error: String(error) }, "Failed to apply RLS to some tables");
|
|
8321
8296
|
}
|
|
8322
8297
|
}
|
|
8323
8298
|
async function uninstallRLS(adapter) {
|
|
@@ -8331,14 +8306,14 @@ async function uninstallRLS(adapter) {
|
|
|
8331
8306
|
`);
|
|
8332
8307
|
const rlsEnabled = checkResult.rows?.[0]?.rls_enabled;
|
|
8333
8308
|
if (!rlsEnabled) {
|
|
8334
|
-
logger8.debug("
|
|
8309
|
+
logger8.debug({ src: "plugin:sql" }, "RLS not installed, skipping cleanup");
|
|
8335
8310
|
return;
|
|
8336
8311
|
}
|
|
8337
|
-
logger8.info("
|
|
8312
|
+
logger8.info({ src: "plugin:sql" }, "Disabling RLS globally (keeping server_id columns for schema compatibility)...");
|
|
8338
8313
|
try {
|
|
8339
8314
|
await uninstallEntityRLS(adapter);
|
|
8340
8315
|
} catch (entityRlsError) {
|
|
8341
|
-
logger8.debug("
|
|
8316
|
+
logger8.debug({ src: "plugin:sql" }, "Entity RLS cleanup skipped (not installed or already cleaned)");
|
|
8342
8317
|
}
|
|
8343
8318
|
await db2.execute(sql`
|
|
8344
8319
|
CREATE OR REPLACE FUNCTION _temp_disable_rls_on_table(
|
|
@@ -8375,22 +8350,22 @@ async function uninstallRLS(adapter) {
|
|
|
8375
8350
|
const tableName = row.tablename;
|
|
8376
8351
|
try {
|
|
8377
8352
|
await db2.execute(sql`SELECT _temp_disable_rls_on_table(${schemaName}, ${tableName})`);
|
|
8378
|
-
logger8.debug(
|
|
8353
|
+
logger8.debug({ src: "plugin:sql", schemaName, tableName }, "Disabled RLS on table");
|
|
8379
8354
|
} catch (error) {
|
|
8380
|
-
logger8.warn(
|
|
8355
|
+
logger8.warn({ src: "plugin:sql", schemaName, tableName, error: String(error) }, "Failed to disable RLS on table");
|
|
8381
8356
|
}
|
|
8382
8357
|
}
|
|
8383
8358
|
await db2.execute(sql`DROP FUNCTION IF EXISTS _temp_disable_rls_on_table(text, text)`);
|
|
8384
|
-
logger8.info("
|
|
8385
|
-
logger8.info("
|
|
8359
|
+
logger8.info({ src: "plugin:sql" }, "Keeping server_id values intact (prevents data theft on re-enable)");
|
|
8360
|
+
logger8.info({ src: "plugin:sql" }, "Clearing servers table...");
|
|
8386
8361
|
await db2.execute(sql`TRUNCATE TABLE servers`);
|
|
8387
8362
|
await db2.execute(sql`DROP FUNCTION IF EXISTS apply_rls_to_all_tables() CASCADE`);
|
|
8388
8363
|
await db2.execute(sql`DROP FUNCTION IF EXISTS add_server_isolation(text, text) CASCADE`);
|
|
8389
8364
|
await db2.execute(sql`DROP FUNCTION IF EXISTS current_server_id() CASCADE`);
|
|
8390
|
-
logger8.info("
|
|
8391
|
-
logger8.
|
|
8365
|
+
logger8.info({ src: "plugin:sql" }, "Dropped all RLS functions");
|
|
8366
|
+
logger8.info({ src: "plugin:sql" }, "RLS disabled successfully (server_id columns preserved)");
|
|
8392
8367
|
} catch (error) {
|
|
8393
|
-
logger8.error("
|
|
8368
|
+
logger8.error({ src: "plugin:sql", error: String(error) }, "Failed to disable RLS");
|
|
8394
8369
|
throw error;
|
|
8395
8370
|
}
|
|
8396
8371
|
}
|
|
@@ -8703,20 +8678,19 @@ class DatabaseMigrationService {
|
|
|
8703
8678
|
await migrateToEntityRLS({ db: db2 });
|
|
8704
8679
|
this.migrator = new RuntimeMigrator(db2);
|
|
8705
8680
|
await this.migrator.initialize();
|
|
8706
|
-
logger9.info(
|
|
8681
|
+
logger9.info({ src: "plugin:sql" }, "DatabaseMigrationService initialized");
|
|
8707
8682
|
}
|
|
8708
8683
|
discoverAndRegisterPluginSchemas(plugins) {
|
|
8709
8684
|
for (const plugin of plugins) {
|
|
8710
8685
|
if (plugin.schema) {
|
|
8711
8686
|
this.registeredSchemas.set(plugin.name, plugin.schema);
|
|
8712
|
-
logger9.info(`Registered schema for plugin: ${plugin.name}`);
|
|
8713
8687
|
}
|
|
8714
8688
|
}
|
|
8715
|
-
logger9.info(
|
|
8689
|
+
logger9.info({ src: "plugin:sql", schemasDiscovered: this.registeredSchemas.size, totalPlugins: plugins.length }, "Plugin schemas discovered");
|
|
8716
8690
|
}
|
|
8717
8691
|
registerSchema(pluginName, schema2) {
|
|
8718
8692
|
this.registeredSchemas.set(pluginName, schema2);
|
|
8719
|
-
logger9.
|
|
8693
|
+
logger9.debug({ src: "plugin:sql", pluginName }, "Schema registered");
|
|
8720
8694
|
}
|
|
8721
8695
|
async runAllPluginMigrations(options) {
|
|
8722
8696
|
if (!this.db || !this.migrator) {
|
|
@@ -8728,12 +8702,7 @@ class DatabaseMigrationService {
|
|
|
8728
8702
|
force: options?.force ?? false,
|
|
8729
8703
|
dryRun: options?.dryRun ?? false
|
|
8730
8704
|
};
|
|
8731
|
-
logger9.info("
|
|
8732
|
-
logger9.info(`[DatabaseMigrationService] Environment: ${isProduction ? "PRODUCTION" : "DEVELOPMENT"}`);
|
|
8733
|
-
logger9.info(`[DatabaseMigrationService] Plugins to migrate: ${this.registeredSchemas.size}`);
|
|
8734
|
-
if (migrationOptions.dryRun) {
|
|
8735
|
-
logger9.info("[DatabaseMigrationService] DRY RUN mode - no changes will be applied");
|
|
8736
|
-
}
|
|
8705
|
+
logger9.info({ src: "plugin:sql", environment: isProduction ? "PRODUCTION" : "DEVELOPMENT", pluginCount: this.registeredSchemas.size, dryRun: migrationOptions.dryRun }, "Starting migrations");
|
|
8737
8706
|
let successCount = 0;
|
|
8738
8707
|
let failureCount = 0;
|
|
8739
8708
|
const errors2 = [];
|
|
@@ -8741,43 +8710,37 @@ class DatabaseMigrationService {
|
|
|
8741
8710
|
try {
|
|
8742
8711
|
await this.migrator.migrate(pluginName, schema2, migrationOptions);
|
|
8743
8712
|
successCount++;
|
|
8744
|
-
logger9.info(
|
|
8713
|
+
logger9.info({ src: "plugin:sql", pluginName }, "Migration completed");
|
|
8745
8714
|
} catch (error) {
|
|
8746
8715
|
failureCount++;
|
|
8747
8716
|
const errorMessage = error.message;
|
|
8748
8717
|
errors2.push({ pluginName, error });
|
|
8749
8718
|
if (errorMessage.includes("Destructive migration blocked")) {
|
|
8750
|
-
logger9.error(
|
|
8751
|
-
if (!migrationOptions.force && process.env.ELIZA_ALLOW_DESTRUCTIVE_MIGRATIONS !== "true") {
|
|
8752
|
-
logger9.error("[DatabaseMigrationService] To allow destructive migrations:");
|
|
8753
|
-
logger9.error("[DatabaseMigrationService] - Set ELIZA_ALLOW_DESTRUCTIVE_MIGRATIONS=true");
|
|
8754
|
-
logger9.error("[DatabaseMigrationService] - Or pass { force: true } to this method");
|
|
8755
|
-
}
|
|
8719
|
+
logger9.error({ src: "plugin:sql", pluginName }, "Migration blocked - destructive changes detected. Set ELIZA_ALLOW_DESTRUCTIVE_MIGRATIONS=true or use force option");
|
|
8756
8720
|
} else {
|
|
8757
|
-
logger9.error(
|
|
8721
|
+
logger9.error({ src: "plugin:sql", pluginName, error: errorMessage }, "Migration failed");
|
|
8758
8722
|
}
|
|
8759
8723
|
}
|
|
8760
8724
|
}
|
|
8761
8725
|
if (failureCount === 0) {
|
|
8762
|
-
logger9.info(
|
|
8726
|
+
logger9.info({ src: "plugin:sql", successCount }, "All migrations completed successfully");
|
|
8763
8727
|
const dataIsolationEnabled = process.env.ENABLE_DATA_ISOLATION === "true";
|
|
8764
8728
|
if (dataIsolationEnabled) {
|
|
8765
8729
|
try {
|
|
8766
|
-
logger9.info("
|
|
8730
|
+
logger9.info({ src: "plugin:sql" }, "Re-applying Row Level Security...");
|
|
8767
8731
|
await installRLSFunctions({ db: this.db });
|
|
8768
8732
|
await applyRLSToNewTables({ db: this.db });
|
|
8769
8733
|
await applyEntityRLSToAllTables({ db: this.db });
|
|
8770
|
-
logger9.info("
|
|
8734
|
+
logger9.info({ src: "plugin:sql" }, "RLS re-applied successfully");
|
|
8771
8735
|
} catch (rlsError) {
|
|
8772
8736
|
const errorMsg = rlsError instanceof Error ? rlsError.message : String(rlsError);
|
|
8773
|
-
logger9.warn("
|
|
8774
|
-
logger9.warn("[DatabaseMigrationService] This is OK if server_id columns are not yet in schemas");
|
|
8737
|
+
logger9.warn({ src: "plugin:sql", error: errorMsg }, "Failed to re-apply RLS (this is OK if server_id columns are not yet in schemas)");
|
|
8775
8738
|
}
|
|
8776
8739
|
} else {
|
|
8777
|
-
logger9.info("
|
|
8740
|
+
logger9.info({ src: "plugin:sql" }, "Skipping RLS re-application (ENABLE_DATA_ISOLATION is not true)");
|
|
8778
8741
|
}
|
|
8779
8742
|
} else {
|
|
8780
|
-
logger9.error(
|
|
8743
|
+
logger9.error({ src: "plugin:sql", failureCount, successCount }, "Some migrations failed");
|
|
8781
8744
|
const errorSummary = errors2.map((e) => `${e.pluginName}: ${e.error.message}`).join(`
|
|
8782
8745
|
`);
|
|
8783
8746
|
throw new Error(`${failureCount} migration(s) failed:
|
|
@@ -9441,10 +9404,10 @@ class BaseDrizzleAdapter extends DatabaseAdapter {
|
|
|
9441
9404
|
const backoffDelay = Math.min(this.baseDelay * 2 ** (attempt - 1), this.maxDelay);
|
|
9442
9405
|
const jitter = Math.random() * this.jitterMax;
|
|
9443
9406
|
const delay = backoffDelay + jitter;
|
|
9444
|
-
logger10.warn(
|
|
9407
|
+
logger10.warn({ src: "plugin:sql", attempt, maxRetries: this.maxRetries, error: error instanceof Error ? error.message : String(error) }, "Database operation failed, retrying");
|
|
9445
9408
|
await new Promise((resolve) => setTimeout(resolve, delay));
|
|
9446
9409
|
} else {
|
|
9447
|
-
logger10.error(
|
|
9410
|
+
logger10.error({ src: "plugin:sql", totalAttempts: attempt, error: error instanceof Error ? error.message : String(error) }, "Max retry attempts reached");
|
|
9448
9411
|
throw error instanceof Error ? error : new Error(String(error));
|
|
9449
9412
|
}
|
|
9450
9413
|
}
|
|
@@ -9498,7 +9461,7 @@ class BaseDrizzleAdapter extends DatabaseAdapter {
|
|
|
9498
9461
|
if (agent.id) {
|
|
9499
9462
|
const existing = await this.db.select({ id: agentTable.id }).from(agentTable).where(eq(agentTable.id, agent.id)).limit(1);
|
|
9500
9463
|
if (existing.length > 0) {
|
|
9501
|
-
logger10.warn(
|
|
9464
|
+
logger10.warn({ src: "plugin:sql", agentId: agent.id }, "Attempted to create agent with duplicate ID");
|
|
9502
9465
|
return false;
|
|
9503
9466
|
}
|
|
9504
9467
|
}
|
|
@@ -9509,10 +9472,9 @@ class BaseDrizzleAdapter extends DatabaseAdapter {
|
|
|
9509
9472
|
updatedAt: new Date(agent.updatedAt || Date.now())
|
|
9510
9473
|
});
|
|
9511
9474
|
});
|
|
9512
|
-
logger10.debug(`Agent created successfully: ${agent.id}`);
|
|
9513
9475
|
return true;
|
|
9514
9476
|
} catch (error) {
|
|
9515
|
-
logger10.error(
|
|
9477
|
+
logger10.error({ src: "plugin:sql", agentId: agent.id, error: error instanceof Error ? error.message : String(error) }, "Failed to create agent");
|
|
9516
9478
|
return false;
|
|
9517
9479
|
}
|
|
9518
9480
|
});
|
|
@@ -9546,10 +9508,9 @@ class BaseDrizzleAdapter extends DatabaseAdapter {
|
|
|
9546
9508
|
}
|
|
9547
9509
|
await tx.update(agentTable).set(updateData).where(eq(agentTable.id, agentId));
|
|
9548
9510
|
});
|
|
9549
|
-
logger10.debug(`Agent updated successfully: ${agentId}`);
|
|
9550
9511
|
return true;
|
|
9551
9512
|
} catch (error) {
|
|
9552
|
-
logger10.error(
|
|
9513
|
+
logger10.error({ src: "plugin:sql", agentId, error: error instanceof Error ? error.message : String(error) }, "Failed to update agent");
|
|
9553
9514
|
return false;
|
|
9554
9515
|
}
|
|
9555
9516
|
});
|
|
@@ -9591,22 +9552,16 @@ class BaseDrizzleAdapter extends DatabaseAdapter {
|
|
|
9591
9552
|
return finalSettings === undefined ? {} : finalSettings;
|
|
9592
9553
|
}
|
|
9593
9554
|
async deleteAgent(agentId) {
|
|
9594
|
-
logger10.debug(`[DB] Deleting agent with ID: ${agentId}`);
|
|
9595
9555
|
return this.withDatabase(async () => {
|
|
9596
9556
|
try {
|
|
9597
9557
|
const result = await this.db.delete(agentTable).where(eq(agentTable.id, agentId)).returning();
|
|
9598
9558
|
if (result.length === 0) {
|
|
9599
|
-
logger10.warn(
|
|
9559
|
+
logger10.warn({ src: "plugin:sql", agentId }, "Agent not found for deletion");
|
|
9600
9560
|
return false;
|
|
9601
9561
|
}
|
|
9602
|
-
logger10.success(`[DB] Agent ${agentId} and all related data successfully deleted via cascade`);
|
|
9603
9562
|
return true;
|
|
9604
9563
|
} catch (error) {
|
|
9605
|
-
logger10.error(
|
|
9606
|
-
if (error instanceof Error) {
|
|
9607
|
-
logger10.error(`[DB] Error details: ${error.name} - ${error.message}`);
|
|
9608
|
-
logger10.error(`[DB] Stack trace: ${error.stack}`);
|
|
9609
|
-
}
|
|
9564
|
+
logger10.error({ src: "plugin:sql", agentId, error: error instanceof Error ? error.message : String(error) }, "Failed to delete agent");
|
|
9610
9565
|
throw error;
|
|
9611
9566
|
}
|
|
9612
9567
|
});
|
|
@@ -9617,7 +9572,7 @@ class BaseDrizzleAdapter extends DatabaseAdapter {
|
|
|
9617
9572
|
const result = await this.db.select({ count: count() }).from(agentTable);
|
|
9618
9573
|
return result[0]?.count || 0;
|
|
9619
9574
|
} catch (error) {
|
|
9620
|
-
logger10.error(
|
|
9575
|
+
logger10.error({ src: "plugin:sql", error: error instanceof Error ? error.message : String(error) }, "Failed to count agents");
|
|
9621
9576
|
return 0;
|
|
9622
9577
|
}
|
|
9623
9578
|
});
|
|
@@ -9626,9 +9581,8 @@ class BaseDrizzleAdapter extends DatabaseAdapter {
|
|
|
9626
9581
|
return this.withDatabase(async () => {
|
|
9627
9582
|
try {
|
|
9628
9583
|
await this.db.delete(agentTable);
|
|
9629
|
-
logger10.success("Successfully cleaned up agent table");
|
|
9630
9584
|
} catch (error) {
|
|
9631
|
-
logger10.error(
|
|
9585
|
+
logger10.error({ src: "plugin:sql", error: error instanceof Error ? error.message : String(error) }, "Failed to clean up agent table");
|
|
9632
9586
|
throw error;
|
|
9633
9587
|
}
|
|
9634
9588
|
});
|
|
@@ -9707,21 +9661,17 @@ class BaseDrizzleAdapter extends DatabaseAdapter {
|
|
|
9707
9661
|
metadata: entity2.metadata || {}
|
|
9708
9662
|
}));
|
|
9709
9663
|
await tx.insert(entityTable).values(normalizedEntities);
|
|
9710
|
-
logger10.debug(`${entities.length} Entities created successfully`);
|
|
9711
9664
|
return true;
|
|
9712
9665
|
});
|
|
9713
9666
|
} catch (error) {
|
|
9714
|
-
logger10.error(
|
|
9715
|
-
if (error instanceof Error && error.stack) {
|
|
9716
|
-
logger10.trace("Stack trace:", error.stack);
|
|
9717
|
-
}
|
|
9667
|
+
logger10.error({ src: "plugin:sql", entityId: entities[0]?.id, error: error instanceof Error ? error.message : String(error) }, "Failed to create entities");
|
|
9718
9668
|
return false;
|
|
9719
9669
|
}
|
|
9720
9670
|
});
|
|
9721
9671
|
}
|
|
9722
9672
|
async ensureEntityExists(entity2) {
|
|
9723
9673
|
if (!entity2.id) {
|
|
9724
|
-
logger10.error("Entity ID is required for ensureEntityExists");
|
|
9674
|
+
logger10.error({ src: "plugin:sql" }, "Entity ID is required for ensureEntityExists");
|
|
9725
9675
|
return false;
|
|
9726
9676
|
}
|
|
9727
9677
|
try {
|
|
@@ -9731,7 +9681,7 @@ class BaseDrizzleAdapter extends DatabaseAdapter {
|
|
|
9731
9681
|
}
|
|
9732
9682
|
return true;
|
|
9733
9683
|
} catch (error) {
|
|
9734
|
-
logger10.error(
|
|
9684
|
+
logger10.error({ src: "plugin:sql", entityId: entity2.id, error: error instanceof Error ? error.message : String(error) }, "Failed to ensure entity exists");
|
|
9735
9685
|
return false;
|
|
9736
9686
|
}
|
|
9737
9687
|
}
|
|
@@ -10081,7 +10031,7 @@ class BaseDrizzleAdapter extends DatabaseAdapter {
|
|
|
10081
10031
|
levenshtein_score: Number(row.levenshtein_score)
|
|
10082
10032
|
})).filter((row) => Array.isArray(row.embedding));
|
|
10083
10033
|
} catch (error) {
|
|
10084
|
-
logger10.error(
|
|
10034
|
+
logger10.error({ src: "plugin:sql", tableName: opts.query_table_name, fieldName: opts.query_field_name, error: error instanceof Error ? error.message : String(error) }, "Failed to get cached embeddings");
|
|
10085
10035
|
if (error instanceof Error && error.message === "levenshtein argument exceeds maximum length of 255 characters") {
|
|
10086
10036
|
return [];
|
|
10087
10037
|
}
|
|
@@ -10103,7 +10053,7 @@ class BaseDrizzleAdapter extends DatabaseAdapter {
|
|
|
10103
10053
|
});
|
|
10104
10054
|
});
|
|
10105
10055
|
} catch (error) {
|
|
10106
|
-
logger10.error(
|
|
10056
|
+
logger10.error({ src: "plugin:sql", type: params.type, roomId: params.roomId, entityId: params.entityId, error: error instanceof Error ? error.message : String(error) }, "Failed to create log entry");
|
|
10107
10057
|
throw error;
|
|
10108
10058
|
}
|
|
10109
10059
|
});
|
|
@@ -10380,11 +10330,9 @@ class BaseDrizzleAdapter extends DatabaseAdapter {
|
|
|
10380
10330
|
});
|
|
10381
10331
|
}
|
|
10382
10332
|
async createMemory(memory, tableName) {
|
|
10383
|
-
logger10.debug(`DrizzleAdapter createMemory: memoryId: ${memory.id}, embeddingLength: ${memory.embedding?.length}, contentLength: ${memory.content?.text?.length}`);
|
|
10384
10333
|
const memoryId = memory.id ?? v4();
|
|
10385
10334
|
const existing = await this.getMemoryById(memoryId);
|
|
10386
10335
|
if (existing) {
|
|
10387
|
-
logger10.debug(`Memory already exists, skipping creation: ${memoryId}`);
|
|
10388
10336
|
return memoryId;
|
|
10389
10337
|
}
|
|
10390
10338
|
if (memory.unique === undefined) {
|
|
@@ -10434,7 +10382,6 @@ class BaseDrizzleAdapter extends DatabaseAdapter {
|
|
|
10434
10382
|
async updateMemory(memory) {
|
|
10435
10383
|
return this.withDatabase(async () => {
|
|
10436
10384
|
try {
|
|
10437
|
-
logger10.debug(`Updating memory: memoryId: ${memory.id}, hasEmbedding: ${!!memory.embedding}`);
|
|
10438
10385
|
await this.db.transaction(async (tx) => {
|
|
10439
10386
|
if (memory.content) {
|
|
10440
10387
|
const contentToUpdate = typeof memory.content === "string" ? memory.content : JSON.stringify(memory.content ?? {});
|
|
@@ -10466,10 +10413,9 @@ class BaseDrizzleAdapter extends DatabaseAdapter {
|
|
|
10466
10413
|
}
|
|
10467
10414
|
}
|
|
10468
10415
|
});
|
|
10469
|
-
logger10.debug(`Memory updated successfully: ${memory.id}`);
|
|
10470
10416
|
return true;
|
|
10471
10417
|
} catch (error) {
|
|
10472
|
-
logger10.error(
|
|
10418
|
+
logger10.error({ src: "plugin:sql", memoryId: memory.id, error: error instanceof Error ? error.message : String(error) }, "Failed to update memory");
|
|
10473
10419
|
return false;
|
|
10474
10420
|
}
|
|
10475
10421
|
});
|
|
@@ -10481,7 +10427,6 @@ class BaseDrizzleAdapter extends DatabaseAdapter {
|
|
|
10481
10427
|
await tx.delete(embeddingTable).where(eq(embeddingTable.memoryId, memoryId));
|
|
10482
10428
|
await tx.delete(memoryTable).where(eq(memoryTable.id, memoryId));
|
|
10483
10429
|
});
|
|
10484
|
-
logger10.debug(`Memory and related fragments removed successfully: ${memoryId}`);
|
|
10485
10430
|
});
|
|
10486
10431
|
}
|
|
10487
10432
|
async deleteManyMemories(memoryIds) {
|
|
@@ -10500,7 +10445,6 @@ class BaseDrizzleAdapter extends DatabaseAdapter {
|
|
|
10500
10445
|
await tx.delete(memoryTable).where(inArray(memoryTable.id, batch));
|
|
10501
10446
|
}
|
|
10502
10447
|
});
|
|
10503
|
-
logger10.debug(`Batch memory deletion completed successfully: ${memoryIds.length}`);
|
|
10504
10448
|
});
|
|
10505
10449
|
}
|
|
10506
10450
|
async deleteMemoryFragments(tx, documentId) {
|
|
@@ -10509,7 +10453,6 @@ class BaseDrizzleAdapter extends DatabaseAdapter {
|
|
|
10509
10453
|
const fragmentIds = fragmentsToDelete.map((f) => f.id);
|
|
10510
10454
|
await tx.delete(embeddingTable).where(inArray(embeddingTable.memoryId, fragmentIds));
|
|
10511
10455
|
await tx.delete(memoryTable).where(inArray(memoryTable.id, fragmentIds));
|
|
10512
|
-
logger10.debug(`Deleted related fragments: documentId: ${documentId}, fragmentCount: ${fragmentsToDelete.length}`);
|
|
10513
10456
|
}
|
|
10514
10457
|
}
|
|
10515
10458
|
async getMemoryFragments(tx, documentId) {
|
|
@@ -10521,7 +10464,7 @@ class BaseDrizzleAdapter extends DatabaseAdapter {
|
|
|
10521
10464
|
await this.db.transaction(async (tx) => {
|
|
10522
10465
|
const rows = await tx.select({ id: memoryTable.id }).from(memoryTable).where(and(eq(memoryTable.roomId, roomId), eq(memoryTable.type, tableName)));
|
|
10523
10466
|
const ids = rows.map((r) => r.id);
|
|
10524
|
-
logger10.debug(
|
|
10467
|
+
logger10.debug({ src: "plugin:sql", roomId, tableName, memoryCount: ids.length }, "Deleting all memories");
|
|
10525
10468
|
if (ids.length === 0) {
|
|
10526
10469
|
return;
|
|
10527
10470
|
}
|
|
@@ -10531,7 +10474,6 @@ class BaseDrizzleAdapter extends DatabaseAdapter {
|
|
|
10531
10474
|
}));
|
|
10532
10475
|
await tx.delete(memoryTable).where(and(eq(memoryTable.roomId, roomId), eq(memoryTable.type, tableName)));
|
|
10533
10476
|
});
|
|
10534
|
-
logger10.debug(`All memories removed successfully: roomId: ${roomId}, tableName: ${tableName}`);
|
|
10535
10477
|
});
|
|
10536
10478
|
}
|
|
10537
10479
|
async countMemories(roomId, unique2 = true, tableName = "") {
|
|
@@ -10640,7 +10582,7 @@ class BaseDrizzleAdapter extends DatabaseAdapter {
|
|
|
10640
10582
|
}).onConflictDoNothing();
|
|
10641
10583
|
return true;
|
|
10642
10584
|
} catch (error) {
|
|
10643
|
-
logger10.error(
|
|
10585
|
+
logger10.error({ src: "plugin:sql", entityId, roomId, agentId: this.agentId, error: error instanceof Error ? error.message : String(error) }, "Failed to add participant to room");
|
|
10644
10586
|
return false;
|
|
10645
10587
|
}
|
|
10646
10588
|
});
|
|
@@ -10654,10 +10596,9 @@ class BaseDrizzleAdapter extends DatabaseAdapter {
|
|
|
10654
10596
|
agentId: this.agentId
|
|
10655
10597
|
}));
|
|
10656
10598
|
await this.db.insert(participantTable).values(values).onConflictDoNothing().execute();
|
|
10657
|
-
logger10.debug(`${entityIds.length} Entities linked successfully`);
|
|
10658
10599
|
return true;
|
|
10659
10600
|
} catch (error) {
|
|
10660
|
-
logger10.error(
|
|
10601
|
+
logger10.error({ src: "plugin:sql", roomId, agentId: this.agentId, error: error instanceof Error ? error.message : String(error) }, "Failed to add participants to room");
|
|
10661
10602
|
return false;
|
|
10662
10603
|
}
|
|
10663
10604
|
});
|
|
@@ -10669,10 +10610,9 @@ class BaseDrizzleAdapter extends DatabaseAdapter {
|
|
|
10669
10610
|
return await tx.delete(participantTable).where(and(eq(participantTable.entityId, entityId), eq(participantTable.roomId, roomId))).returning();
|
|
10670
10611
|
});
|
|
10671
10612
|
const removed = result.length > 0;
|
|
10672
|
-
logger10.debug(`Participant ${removed ? "removed" : "not found"}: entityId: ${entityId}, roomId: ${roomId}, removed: ${removed}`);
|
|
10673
10613
|
return removed;
|
|
10674
10614
|
} catch (error) {
|
|
10675
|
-
logger10.error(
|
|
10615
|
+
logger10.error({ src: "plugin:sql", entityId, roomId, error: error instanceof Error ? error.message : String(error) }, "Failed to remove participant from room");
|
|
10676
10616
|
return false;
|
|
10677
10617
|
}
|
|
10678
10618
|
});
|
|
@@ -10719,7 +10659,7 @@ class BaseDrizzleAdapter extends DatabaseAdapter {
|
|
|
10719
10659
|
await tx.update(participantTable).set({ roomState: state }).where(and(eq(participantTable.roomId, roomId), eq(participantTable.entityId, entityId), eq(participantTable.agentId, this.agentId)));
|
|
10720
10660
|
});
|
|
10721
10661
|
} catch (error) {
|
|
10722
|
-
logger10.error(
|
|
10662
|
+
logger10.error({ src: "plugin:sql", roomId, entityId, state, error: error instanceof Error ? error.message : String(error) }, "Failed to set participant follow state");
|
|
10723
10663
|
throw error;
|
|
10724
10664
|
}
|
|
10725
10665
|
});
|
|
@@ -10739,7 +10679,7 @@ class BaseDrizzleAdapter extends DatabaseAdapter {
|
|
|
10739
10679
|
await this.db.insert(relationshipTable).values(saveParams);
|
|
10740
10680
|
return true;
|
|
10741
10681
|
} catch (error) {
|
|
10742
|
-
logger10.error(
|
|
10682
|
+
logger10.error({ src: "plugin:sql", agentId: this.agentId, error: error instanceof Error ? error.message : String(error), saveParams }, "Error creating relationship");
|
|
10743
10683
|
return false;
|
|
10744
10684
|
}
|
|
10745
10685
|
});
|
|
@@ -10752,7 +10692,7 @@ class BaseDrizzleAdapter extends DatabaseAdapter {
|
|
|
10752
10692
|
metadata: relationship.metadata || {}
|
|
10753
10693
|
}).where(eq(relationshipTable.id, relationship.id));
|
|
10754
10694
|
} catch (error) {
|
|
10755
|
-
logger10.error(
|
|
10695
|
+
logger10.error({ src: "plugin:sql", agentId: this.agentId, error: error instanceof Error ? error.message : String(error), relationshipId: relationship.id }, "Error updating relationship");
|
|
10756
10696
|
throw error;
|
|
10757
10697
|
}
|
|
10758
10698
|
});
|
|
@@ -10814,7 +10754,7 @@ class BaseDrizzleAdapter extends DatabaseAdapter {
|
|
|
10814
10754
|
}
|
|
10815
10755
|
return;
|
|
10816
10756
|
} catch (error) {
|
|
10817
|
-
logger10.error(
|
|
10757
|
+
logger10.error({ src: "plugin:sql", agentId: this.agentId, error: error instanceof Error ? error.message : String(error), key }, "Error fetching cache");
|
|
10818
10758
|
return;
|
|
10819
10759
|
}
|
|
10820
10760
|
});
|
|
@@ -10834,7 +10774,7 @@ class BaseDrizzleAdapter extends DatabaseAdapter {
|
|
|
10834
10774
|
});
|
|
10835
10775
|
return true;
|
|
10836
10776
|
} catch (error) {
|
|
10837
|
-
logger10.error(
|
|
10777
|
+
logger10.error({ src: "plugin:sql", agentId: this.agentId, error: error instanceof Error ? error.message : String(error), key }, "Error setting cache");
|
|
10838
10778
|
return false;
|
|
10839
10779
|
}
|
|
10840
10780
|
});
|
|
@@ -10847,7 +10787,7 @@ class BaseDrizzleAdapter extends DatabaseAdapter {
|
|
|
10847
10787
|
});
|
|
10848
10788
|
return true;
|
|
10849
10789
|
} catch (error) {
|
|
10850
|
-
logger10.error(
|
|
10790
|
+
logger10.error({ src: "plugin:sql", agentId: this.agentId, error: error instanceof Error ? error.message : String(error), key }, "Error deleting cache");
|
|
10851
10791
|
return false;
|
|
10852
10792
|
}
|
|
10853
10793
|
});
|
|
@@ -11010,25 +10950,20 @@ class BaseDrizzleAdapter extends DatabaseAdapter {
|
|
|
11010
10950
|
return this.withDatabase(async () => {
|
|
11011
10951
|
const rooms = await this.db.select({ id: roomTable.id }).from(roomTable).where(and(eq(roomTable.worldId, worldId), eq(roomTable.agentId, this.agentId)));
|
|
11012
10952
|
if (rooms.length === 0) {
|
|
11013
|
-
logger10.debug(`No rooms found for worldId ${worldId} and agentId ${this.agentId} to delete.`);
|
|
11014
10953
|
return;
|
|
11015
10954
|
}
|
|
11016
10955
|
const roomIds = rooms.map((room) => room.id);
|
|
11017
10956
|
if (roomIds.length > 0) {
|
|
11018
10957
|
await this.db.delete(logTable).where(inArray(logTable.roomId, roomIds));
|
|
11019
|
-
logger10.debug(`Deleted logs for ${roomIds.length} rooms in world ${worldId}.`);
|
|
11020
10958
|
await this.db.delete(participantTable).where(inArray(participantTable.roomId, roomIds));
|
|
11021
|
-
logger10.debug(`Deleted participants for ${roomIds.length} rooms in world ${worldId}.`);
|
|
11022
10959
|
const memoriesInRooms = await this.db.select({ id: memoryTable.id }).from(memoryTable).where(inArray(memoryTable.roomId, roomIds));
|
|
11023
10960
|
const memoryIdsInRooms = memoriesInRooms.map((m) => m.id);
|
|
11024
10961
|
if (memoryIdsInRooms.length > 0) {
|
|
11025
10962
|
await this.db.delete(embeddingTable).where(inArray(embeddingTable.memoryId, memoryIdsInRooms));
|
|
11026
|
-
logger10.debug(`Deleted embeddings for ${memoryIdsInRooms.length} memories in world ${worldId}.`);
|
|
11027
10963
|
await this.db.delete(memoryTable).where(inArray(memoryTable.id, memoryIdsInRooms));
|
|
11028
|
-
logger10.debug(`Deleted ${memoryIdsInRooms.length} memories in world ${worldId}.`);
|
|
11029
10964
|
}
|
|
11030
10965
|
await this.db.delete(roomTable).where(inArray(roomTable.id, roomIds));
|
|
11031
|
-
logger10.debug(
|
|
10966
|
+
logger10.debug({ src: "plugin:sql", worldId, roomsDeleted: roomIds.length, memoriesDeleted: memoryIdsInRooms.length }, "World cleanup completed");
|
|
11032
10967
|
}
|
|
11033
10968
|
});
|
|
11034
10969
|
}
|
|
@@ -11376,7 +11311,7 @@ class PgliteDatabaseAdapter extends BaseDrizzleAdapter {
|
|
|
11376
11311
|
return this.getEntitiesByIds(entityIds);
|
|
11377
11312
|
}
|
|
11378
11313
|
async getMemoriesByServerId(_params) {
|
|
11379
|
-
logger11.warn("getMemoriesByServerId called but not implemented
|
|
11314
|
+
logger11.warn({ src: "plugin:sql" }, "getMemoriesByServerId called but not implemented");
|
|
11380
11315
|
return [];
|
|
11381
11316
|
}
|
|
11382
11317
|
async ensureAgentExists(agent) {
|
|
@@ -11401,13 +11336,13 @@ class PgliteDatabaseAdapter extends BaseDrizzleAdapter {
|
|
|
11401
11336
|
}
|
|
11402
11337
|
async withDatabase(operation) {
|
|
11403
11338
|
if (this.manager.isShuttingDown()) {
|
|
11404
|
-
logger11.warn("Database is shutting down");
|
|
11339
|
+
logger11.warn({ src: "plugin:sql" }, "Database is shutting down");
|
|
11405
11340
|
return null;
|
|
11406
11341
|
}
|
|
11407
11342
|
return operation();
|
|
11408
11343
|
}
|
|
11409
11344
|
async init() {
|
|
11410
|
-
logger11.debug("
|
|
11345
|
+
logger11.debug({ src: "plugin:sql" }, "PGliteDatabaseAdapter initialized");
|
|
11411
11346
|
}
|
|
11412
11347
|
async isReady() {
|
|
11413
11348
|
return !this.manager.isShuttingDown();
|
|
@@ -11759,7 +11694,7 @@ class PgDatabaseAdapter extends BaseDrizzleAdapter {
|
|
|
11759
11694
|
return this.getEntitiesByIds(entityIds);
|
|
11760
11695
|
}
|
|
11761
11696
|
async getMemoriesByServerId(_params) {
|
|
11762
|
-
logger12.warn("getMemoriesByServerId called but not implemented
|
|
11697
|
+
logger12.warn({ src: "plugin:sql" }, "getMemoriesByServerId called but not implemented");
|
|
11763
11698
|
return [];
|
|
11764
11699
|
}
|
|
11765
11700
|
async ensureAgentExists(agent) {
|
|
@@ -11795,7 +11730,7 @@ class PgDatabaseAdapter extends BaseDrizzleAdapter {
|
|
|
11795
11730
|
});
|
|
11796
11731
|
}
|
|
11797
11732
|
async init() {
|
|
11798
|
-
logger12.debug("
|
|
11733
|
+
logger12.debug({ src: "plugin:sql" }, "PgDatabaseAdapter initialized");
|
|
11799
11734
|
}
|
|
11800
11735
|
async isReady() {
|
|
11801
11736
|
return this.manager.testConnection();
|
|
@@ -11868,7 +11803,7 @@ class PostgresConnectionManager {
|
|
|
11868
11803
|
const poolConfig = { connectionString };
|
|
11869
11804
|
if (rlsServerId) {
|
|
11870
11805
|
poolConfig.application_name = rlsServerId;
|
|
11871
|
-
logger13.debug(
|
|
11806
|
+
logger13.debug({ src: "plugin:sql", rlsServerId: rlsServerId.substring(0, 8) }, "Pool configured with RLS server");
|
|
11872
11807
|
}
|
|
11873
11808
|
this.pool = new Pool2(poolConfig);
|
|
11874
11809
|
this.db = drizzle2(this.pool, { casing: "snake_case" });
|
|
@@ -11889,7 +11824,7 @@ class PostgresConnectionManager {
|
|
|
11889
11824
|
await client.query("SELECT 1");
|
|
11890
11825
|
return true;
|
|
11891
11826
|
} catch (error) {
|
|
11892
|
-
logger13.error(
|
|
11827
|
+
logger13.error({ src: "plugin:sql", error: error instanceof Error ? error.message : String(error) }, "Failed to connect to the database");
|
|
11893
11828
|
return false;
|
|
11894
11829
|
} finally {
|
|
11895
11830
|
if (client) {
|
|
@@ -11994,14 +11929,14 @@ function createDatabaseAdapter(config, agentId) {
|
|
|
11994
11929
|
}
|
|
11995
11930
|
rlsServerId = stringToUuid(rlsServerIdString);
|
|
11996
11931
|
managerKey = rlsServerId;
|
|
11997
|
-
logger14.debug(
|
|
11932
|
+
logger14.debug({ src: "plugin:sql", rlsServerId: rlsServerId.slice(0, 8), serverIdString: rlsServerIdString }, "Using connection pool for RLS server");
|
|
11998
11933
|
}
|
|
11999
11934
|
if (!globalSingletons.postgresConnectionManagers) {
|
|
12000
11935
|
globalSingletons.postgresConnectionManagers = new Map;
|
|
12001
11936
|
}
|
|
12002
11937
|
let manager = globalSingletons.postgresConnectionManagers.get(managerKey);
|
|
12003
11938
|
if (!manager) {
|
|
12004
|
-
logger14.debug(
|
|
11939
|
+
logger14.debug({ src: "plugin:sql", managerKey: managerKey.slice(0, 8) }, "Creating new connection pool");
|
|
12005
11940
|
manager = new PostgresConnectionManager(config.postgresUrl, rlsServerId);
|
|
12006
11941
|
globalSingletons.postgresConnectionManagers.set(managerKey, manager);
|
|
12007
11942
|
}
|
|
@@ -12019,18 +11954,18 @@ var plugin = {
|
|
|
12019
11954
|
priority: 0,
|
|
12020
11955
|
schema: exports_schema,
|
|
12021
11956
|
init: async (_config, runtime) => {
|
|
12022
|
-
|
|
11957
|
+
runtime.logger.info({ src: "plugin:sql", agentId: runtime.agentId }, "plugin-sql (node) init starting");
|
|
12023
11958
|
const adapterRegistered = await runtime.isReady().then(() => true).catch((error) => {
|
|
12024
11959
|
const message = error instanceof Error ? error.message : String(error);
|
|
12025
11960
|
if (message.includes("Database adapter not registered")) {
|
|
12026
|
-
|
|
11961
|
+
runtime.logger.info({ src: "plugin:sql", agentId: runtime.agentId }, "No pre-registered database adapter detected; registering adapter");
|
|
12027
11962
|
} else {
|
|
12028
|
-
|
|
11963
|
+
runtime.logger.warn({ src: "plugin:sql", agentId: runtime.agentId, error: message }, "Database adapter readiness check error; proceeding to register adapter");
|
|
12029
11964
|
}
|
|
12030
11965
|
return false;
|
|
12031
11966
|
});
|
|
12032
11967
|
if (adapterRegistered) {
|
|
12033
|
-
|
|
11968
|
+
runtime.logger.info({ src: "plugin:sql", agentId: runtime.agentId }, "Database adapter already registered, skipping creation");
|
|
12034
11969
|
return;
|
|
12035
11970
|
}
|
|
12036
11971
|
const postgresUrl = runtime.getSetting("POSTGRES_URL");
|
|
@@ -12040,7 +11975,7 @@ var plugin = {
|
|
|
12040
11975
|
postgresUrl
|
|
12041
11976
|
}, runtime.agentId);
|
|
12042
11977
|
runtime.registerDatabaseAdapter(dbAdapter);
|
|
12043
|
-
|
|
11978
|
+
runtime.logger.info({ src: "plugin:sql", agentId: runtime.agentId }, "Database adapter created and registered");
|
|
12044
11979
|
}
|
|
12045
11980
|
};
|
|
12046
11981
|
var index_node_default = plugin;
|
|
@@ -12057,5 +11992,5 @@ export {
|
|
|
12057
11992
|
DatabaseMigrationService
|
|
12058
11993
|
};
|
|
12059
11994
|
|
|
12060
|
-
//# debugId=
|
|
11995
|
+
//# debugId=A90408A8FE4E3FF564756E2164756E21
|
|
12061
11996
|
//# sourceMappingURL=index.node.js.map
|