@elizaos/plugin-sql 2.0.0-alpha.2 → 2.0.0-alpha.4

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -37,14 +37,14 @@ var init_agent = __esm(() => {
37
37
  });
38
38
 
39
39
  // schema/server.ts
40
- import { sql as sql15 } from "drizzle-orm";
41
- import { pgTable as pgTable17, timestamp as timestamp15, uuid as uuid15 } from "drizzle-orm/pg-core";
40
+ import { sql as sql17 } from "drizzle-orm";
41
+ import { pgTable as pgTable19, timestamp as timestamp17, uuid as uuid17 } from "drizzle-orm/pg-core";
42
42
  var serverTable;
43
43
  var init_server = __esm(() => {
44
- serverTable = pgTable17("servers", {
45
- id: uuid15("id").primaryKey(),
46
- createdAt: timestamp15("created_at", { withTimezone: true }).default(sql15`now()`).notNull(),
47
- updatedAt: timestamp15("updated_at", { withTimezone: true }).default(sql15`now()`).notNull()
44
+ serverTable = pgTable19("servers", {
45
+ id: uuid17("id").primaryKey(),
46
+ createdAt: timestamp17("created_at", { withTimezone: true }).default(sql17`now()`).notNull(),
47
+ updatedAt: timestamp17("updated_at", { withTimezone: true }).default(sql17`now()`).notNull()
48
48
  });
49
49
  });
50
50
 
@@ -52,27 +52,27 @@ var init_server = __esm(() => {
52
52
  function getDb(adapter) {
53
53
  return adapter.db;
54
54
  }
55
- function getRow(result, index5 = 0) {
56
- return result.rows[index5];
55
+ function getRow(result, index7 = 0) {
56
+ return result.rows[index7];
57
57
  }
58
58
 
59
59
  // migrations.ts
60
60
  import { logger } from "@elizaos/core";
61
- import { sql as sql17 } from "drizzle-orm";
61
+ import { sql as sql19 } from "drizzle-orm";
62
62
  function getRows(result) {
63
63
  return result.rows;
64
64
  }
65
65
  async function migrateToEntityRLS(adapter) {
66
66
  const db = getDb(adapter);
67
67
  try {
68
- await db.execute(sql17`SELECT 1 FROM pg_tables LIMIT 1`);
68
+ await db.execute(sql19`SELECT 1 FROM pg_tables LIMIT 1`);
69
69
  } catch {
70
70
  logger.debug("[Migration] ⊘ Not PostgreSQL, skipping PostgreSQL-specific migrations");
71
71
  return;
72
72
  }
73
73
  let schemaAlreadyMigrated = false;
74
74
  try {
75
- const migrationCheck = await db.execute(sql17`
75
+ const migrationCheck = await db.execute(sql19`
76
76
  SELECT column_name FROM information_schema.columns
77
77
  WHERE table_schema = 'public'
78
78
  AND table_name = 'rooms'
@@ -94,7 +94,7 @@ async function migrateToEntityRLS(adapter) {
94
94
  }
95
95
  logger.debug("[Migration] → Schema migrated but RLS disabled, cleaning up...");
96
96
  try {
97
- const tablesWithRls = await db.execute(sql17`
97
+ const tablesWithRls = await db.execute(sql19`
98
98
  SELECT c.relname as tablename
99
99
  FROM pg_class c
100
100
  JOIN pg_namespace n ON n.oid = c.relnamespace
@@ -107,7 +107,7 @@ async function migrateToEntityRLS(adapter) {
107
107
  for (const row of tablesWithRls.rows) {
108
108
  const tableName = row.tablename;
109
109
  try {
110
- await db.execute(sql17.raw(`ALTER TABLE "${tableName}" DISABLE ROW LEVEL SECURITY`));
110
+ await db.execute(sql19.raw(`ALTER TABLE "${tableName}" DISABLE ROW LEVEL SECURITY`));
111
111
  } catch {}
112
112
  }
113
113
  logger.debug(`[Migration] ✓ RLS cleanup completed (${tablesWithRls.rows.length} tables)`);
@@ -123,14 +123,14 @@ async function migrateToEntityRLS(adapter) {
123
123
  try {
124
124
  logger.debug("[Migration] → Clearing RuntimeMigrator snapshot cache...");
125
125
  try {
126
- await db.execute(sql17`DELETE FROM migrations._snapshots WHERE plugin_name = '@elizaos/plugin-sql'`);
126
+ await db.execute(sql19`DELETE FROM migrations._snapshots WHERE plugin_name = '@elizaos/plugin-sql'`);
127
127
  logger.debug("[Migration] ✓ Snapshot cache cleared");
128
128
  } catch (_error) {
129
129
  logger.debug("[Migration] ⊘ No snapshot cache to clear (migrations schema not yet created)");
130
130
  }
131
131
  logger.debug("[Migration] → Checking for Row Level Security to disable...");
132
132
  try {
133
- const tablesWithRls = await db.execute(sql17`
133
+ const tablesWithRls = await db.execute(sql19`
134
134
  SELECT c.relname as tablename
135
135
  FROM pg_class c
136
136
  JOIN pg_namespace n ON n.oid = c.relnamespace
@@ -143,7 +143,7 @@ async function migrateToEntityRLS(adapter) {
143
143
  for (const row of tablesWithRls.rows) {
144
144
  const tableName = row.tablename;
145
145
  try {
146
- await db.execute(sql17.raw(`ALTER TABLE "${tableName}" DISABLE ROW LEVEL SECURITY`));
146
+ await db.execute(sql19.raw(`ALTER TABLE "${tableName}" DISABLE ROW LEVEL SECURITY`));
147
147
  logger.debug(`[Migration] ✓ Disabled RLS on ${tableName}`);
148
148
  } catch (_error) {
149
149
  logger.debug(`[Migration] ⊘ Could not disable RLS on ${tableName}`);
@@ -159,7 +159,7 @@ async function migrateToEntityRLS(adapter) {
159
159
  const tablesToMigrate = ["channels", "worlds", "rooms"];
160
160
  for (const tableName of tablesToMigrate) {
161
161
  try {
162
- const columnsResult = await db.execute(sql17`
162
+ const columnsResult = await db.execute(sql19`
163
163
  SELECT column_name, data_type, is_nullable
164
164
  FROM information_schema.columns
165
165
  WHERE table_schema = 'public'
@@ -175,19 +175,19 @@ async function migrateToEntityRLS(adapter) {
175
175
  const oldColumnName = serverIdSnake ? "server_id" : "serverId";
176
176
  if (serverId && !messageServerId) {
177
177
  logger.debug(`[Migration] → Renaming ${tableName}.${oldColumnName} to message_server_id...`);
178
- await db.execute(sql17.raw(`ALTER TABLE "${tableName}" RENAME COLUMN "${oldColumnName}" TO "message_server_id"`));
178
+ await db.execute(sql19.raw(`ALTER TABLE "${tableName}" RENAME COLUMN "${oldColumnName}" TO "message_server_id"`));
179
179
  logger.debug(`[Migration] ✓ Renamed ${tableName}.${oldColumnName} → message_server_id`);
180
180
  if (serverId.data_type === "text") {
181
181
  try {
182
182
  logger.debug(`[Migration] → Dropping DEFAULT constraint on ${tableName}.message_server_id...`);
183
- await db.execute(sql17.raw(`ALTER TABLE "${tableName}" ALTER COLUMN "message_server_id" DROP DEFAULT`));
183
+ await db.execute(sql19.raw(`ALTER TABLE "${tableName}" ALTER COLUMN "message_server_id" DROP DEFAULT`));
184
184
  logger.debug(`[Migration] ✓ Dropped DEFAULT constraint`);
185
185
  } catch {
186
186
  logger.debug(`[Migration] ⊘ No DEFAULT constraint to drop on ${tableName}.message_server_id`);
187
187
  }
188
188
  try {
189
189
  logger.debug(`[Migration] → Converting ${tableName}.message_server_id from text to uuid...`);
190
- await db.execute(sql17.raw(`
190
+ await db.execute(sql19.raw(`
191
191
  ALTER TABLE "${tableName}"
192
192
  ALTER COLUMN "message_server_id" TYPE uuid
193
193
  USING CASE
@@ -204,29 +204,29 @@ async function migrateToEntityRLS(adapter) {
204
204
  }
205
205
  }
206
206
  if (tableName === "channels") {
207
- const nullCountResult = await db.execute(sql17.raw(`SELECT COUNT(*) as count FROM "${tableName}" WHERE "message_server_id" IS NULL`));
207
+ const nullCountResult = await db.execute(sql19.raw(`SELECT COUNT(*) as count FROM "${tableName}" WHERE "message_server_id" IS NULL`));
208
208
  const nullCount = nullCountResult.rows?.[0]?.count;
209
209
  if (nullCount && parseInt(nullCount, 10) > 0) {
210
210
  logger.warn(`[Migration] ⚠️ ${tableName} has ${nullCount} rows with NULL message_server_id - these will be deleted`);
211
- await db.execute(sql17.raw(`DELETE FROM "${tableName}" WHERE "message_server_id" IS NULL`));
211
+ await db.execute(sql19.raw(`DELETE FROM "${tableName}" WHERE "message_server_id" IS NULL`));
212
212
  logger.debug(`[Migration] ✓ Deleted ${nullCount} rows with NULL message_server_id from ${tableName}`);
213
213
  }
214
214
  logger.debug(`[Migration] → Making ${tableName}.message_server_id NOT NULL...`);
215
- await db.execute(sql17.raw(`ALTER TABLE "${tableName}" ALTER COLUMN "message_server_id" SET NOT NULL`));
215
+ await db.execute(sql19.raw(`ALTER TABLE "${tableName}" ALTER COLUMN "message_server_id" SET NOT NULL`));
216
216
  logger.debug(`[Migration] ✓ Set ${tableName}.message_server_id NOT NULL`);
217
217
  }
218
218
  } else if (serverId && messageServerId) {
219
219
  logger.debug(`[Migration] → ${tableName} has both columns, dropping ${oldColumnName}...`);
220
- await db.execute(sql17.raw(`ALTER TABLE "${tableName}" DROP COLUMN "${oldColumnName}" CASCADE`));
220
+ await db.execute(sql19.raw(`ALTER TABLE "${tableName}" DROP COLUMN "${oldColumnName}" CASCADE`));
221
221
  logger.debug(`[Migration] ✓ Dropped ${tableName}.${oldColumnName}`);
222
222
  } else if (!serverId && messageServerId) {
223
223
  if (messageServerId.data_type === "text") {
224
224
  logger.debug(`[Migration] → ${tableName}.message_server_id exists but is TEXT, needs UUID conversion...`);
225
225
  logger.debug(`[Migration] → Dropping DEFAULT constraint on ${tableName}.message_server_id...`);
226
- await db.execute(sql17.raw(`ALTER TABLE "${tableName}" ALTER COLUMN "message_server_id" DROP DEFAULT`));
226
+ await db.execute(sql19.raw(`ALTER TABLE "${tableName}" ALTER COLUMN "message_server_id" DROP DEFAULT`));
227
227
  logger.debug(`[Migration] ✓ Dropped DEFAULT constraint`);
228
228
  logger.debug(`[Migration] → Converting ${tableName}.message_server_id from text to uuid (generating UUIDs from text)...`);
229
- await db.execute(sql17.raw(`
229
+ await db.execute(sql19.raw(`
230
230
  ALTER TABLE "${tableName}"
231
231
  ALTER COLUMN "message_server_id" TYPE uuid
232
232
  USING CASE
@@ -248,7 +248,7 @@ async function migrateToEntityRLS(adapter) {
248
248
  }
249
249
  logger.debug("[Migration] → Dropping all remaining RLS-managed server_id columns...");
250
250
  try {
251
- const serverIdColumnsResult = await db.execute(sql17`
251
+ const serverIdColumnsResult = await db.execute(sql19`
252
252
  SELECT table_name
253
253
  FROM information_schema.columns
254
254
  WHERE table_schema = 'public'
@@ -270,7 +270,7 @@ async function migrateToEntityRLS(adapter) {
270
270
  for (const row of tablesToClean) {
271
271
  const tableName = row.table_name;
272
272
  try {
273
- await db.execute(sql17.raw(`ALTER TABLE "${tableName}" DROP COLUMN IF EXISTS server_id CASCADE`));
273
+ await db.execute(sql19.raw(`ALTER TABLE "${tableName}" DROP COLUMN IF EXISTS server_id CASCADE`));
274
274
  logger.debug(`[Migration] ✓ Dropped server_id from ${tableName}`);
275
275
  } catch (_error) {
276
276
  logger.debug(`[Migration] ⊘ Could not drop server_id from ${tableName}`);
@@ -281,7 +281,7 @@ async function migrateToEntityRLS(adapter) {
281
281
  }
282
282
  logger.debug("[Migration] → Checking agents.owner_id → server_id rename...");
283
283
  try {
284
- const agentsColumnsResult = await db.execute(sql17`
284
+ const agentsColumnsResult = await db.execute(sql19`
285
285
  SELECT column_name
286
286
  FROM information_schema.columns
287
287
  WHERE table_schema = 'public'
@@ -294,11 +294,11 @@ async function migrateToEntityRLS(adapter) {
294
294
  const hasServerId = agentsColumns.some((c) => c.column_name === "server_id");
295
295
  if (hasOwnerId && !hasServerId) {
296
296
  logger.debug("[Migration] → Renaming agents.owner_id to server_id...");
297
- await db.execute(sql17.raw(`ALTER TABLE "agents" RENAME COLUMN "owner_id" TO "server_id"`));
297
+ await db.execute(sql19.raw(`ALTER TABLE "agents" RENAME COLUMN "owner_id" TO "server_id"`));
298
298
  logger.debug("[Migration] ✓ Renamed agents.owner_id → server_id");
299
299
  } else if (hasOwnerId && hasServerId) {
300
300
  logger.debug("[Migration] → Both owner_id and server_id exist, dropping owner_id...");
301
- await db.execute(sql17.raw(`ALTER TABLE "agents" DROP COLUMN "owner_id" CASCADE`));
301
+ await db.execute(sql19.raw(`ALTER TABLE "agents" DROP COLUMN "owner_id" CASCADE`));
302
302
  logger.debug("[Migration] ✓ Dropped agents.owner_id");
303
303
  } else {
304
304
  logger.debug("[Migration] ⊘ agents table already has server_id (or no owner_id), skipping");
@@ -308,7 +308,7 @@ async function migrateToEntityRLS(adapter) {
308
308
  }
309
309
  logger.debug("[Migration] → Checking for owners → servers data migration...");
310
310
  try {
311
- const ownersTableResult = await db.execute(sql17`
311
+ const ownersTableResult = await db.execute(sql19`
312
312
  SELECT table_name
313
313
  FROM information_schema.tables
314
314
  WHERE table_schema = 'public'
@@ -316,7 +316,7 @@ async function migrateToEntityRLS(adapter) {
316
316
  `);
317
317
  if (ownersTableResult.rows && ownersTableResult.rows.length > 0) {
318
318
  logger.debug("[Migration] → Ensuring servers table exists...");
319
- await db.execute(sql17.raw(`
319
+ await db.execute(sql19.raw(`
320
320
  CREATE TABLE IF NOT EXISTS "servers" (
321
321
  "id" uuid PRIMARY KEY,
322
322
  "created_at" timestamp with time zone DEFAULT now() NOT NULL,
@@ -324,7 +324,7 @@ async function migrateToEntityRLS(adapter) {
324
324
  )
325
325
  `));
326
326
  logger.debug("[Migration] → Migrating owners data to servers...");
327
- await db.execute(sql17.raw(`
327
+ await db.execute(sql19.raw(`
328
328
  INSERT INTO "servers" ("id", "created_at", "updated_at")
329
329
  SELECT "id", COALESCE("created_at", now()), COALESCE("updated_at", now())
330
330
  FROM "owners"
@@ -332,7 +332,7 @@ async function migrateToEntityRLS(adapter) {
332
332
  `));
333
333
  logger.debug("[Migration] ✓ Migrated owners data to servers");
334
334
  logger.debug("[Migration] → Dropping obsolete owners table...");
335
- await db.execute(sql17.raw(`DROP TABLE IF EXISTS "owners" CASCADE`));
335
+ await db.execute(sql19.raw(`DROP TABLE IF EXISTS "owners" CASCADE`));
336
336
  logger.debug("[Migration] ✓ Dropped obsolete owners table");
337
337
  } else {
338
338
  logger.debug("[Migration] ⊘ owners table not found, skipping");
@@ -342,7 +342,7 @@ async function migrateToEntityRLS(adapter) {
342
342
  }
343
343
  logger.debug("[Migration] → Checking server_agents table rename...");
344
344
  try {
345
- const tablesResult = await db.execute(sql17`
345
+ const tablesResult = await db.execute(sql19`
346
346
  SELECT table_name
347
347
  FROM information_schema.tables
348
348
  WHERE table_schema = 'public'
@@ -354,16 +354,16 @@ async function migrateToEntityRLS(adapter) {
354
354
  const hasMessageServerAgents = tables.some((t) => t.table_name === "message_server_agents");
355
355
  if (hasServerAgents && !hasMessageServerAgents) {
356
356
  logger.debug("[Migration] → Renaming server_agents to message_server_agents...");
357
- await db.execute(sql17.raw(`ALTER TABLE "server_agents" RENAME TO "message_server_agents"`));
357
+ await db.execute(sql19.raw(`ALTER TABLE "server_agents" RENAME TO "message_server_agents"`));
358
358
  logger.debug("[Migration] ✓ Renamed server_agents → message_server_agents");
359
359
  logger.debug("[Migration] → Renaming message_server_agents.server_id to message_server_id...");
360
- await db.execute(sql17.raw(`ALTER TABLE "message_server_agents" RENAME COLUMN "server_id" TO "message_server_id"`));
360
+ await db.execute(sql19.raw(`ALTER TABLE "message_server_agents" RENAME COLUMN "server_id" TO "message_server_id"`));
361
361
  logger.debug("[Migration] ✓ Renamed message_server_agents.server_id → message_server_id");
362
362
  } else if (!hasServerAgents && !hasMessageServerAgents) {
363
363
  logger.debug("[Migration] ⊘ No server_agents table to migrate");
364
364
  } else if (hasMessageServerAgents) {
365
365
  logger.debug("[Migration] → Checking message_server_agents columns...");
366
- const columnsResult = await db.execute(sql17`
366
+ const columnsResult = await db.execute(sql19`
367
367
  SELECT column_name
368
368
  FROM information_schema.columns
369
369
  WHERE table_schema = 'public'
@@ -376,11 +376,11 @@ async function migrateToEntityRLS(adapter) {
376
376
  const hasMessageServerId = columns.some((c) => c.column_name === "message_server_id");
377
377
  if (hasServerId && !hasMessageServerId) {
378
378
  logger.debug("[Migration] → Renaming message_server_agents.server_id to message_server_id...");
379
- await db.execute(sql17.raw(`ALTER TABLE "message_server_agents" RENAME COLUMN "server_id" TO "message_server_id"`));
379
+ await db.execute(sql19.raw(`ALTER TABLE "message_server_agents" RENAME COLUMN "server_id" TO "message_server_id"`));
380
380
  logger.debug("[Migration] ✓ Renamed message_server_agents.server_id → message_server_id");
381
381
  } else if (!hasServerId && !hasMessageServerId) {
382
382
  logger.debug("[Migration] → message_server_agents exists without required columns, truncating...");
383
- await db.execute(sql17`TRUNCATE TABLE message_server_agents CASCADE`);
383
+ await db.execute(sql19`TRUNCATE TABLE message_server_agents CASCADE`);
384
384
  logger.debug("[Migration] ✓ Truncated message_server_agents");
385
385
  } else {
386
386
  logger.debug("[Migration] ⊘ message_server_agents already has correct schema");
@@ -391,7 +391,7 @@ async function migrateToEntityRLS(adapter) {
391
391
  }
392
392
  logger.debug("[Migration] → Checking channel_participants table...");
393
393
  try {
394
- const columnsResult = await db.execute(sql17`
394
+ const columnsResult = await db.execute(sql19`
395
395
  SELECT column_name
396
396
  FROM information_schema.columns
397
397
  WHERE table_schema = 'public'
@@ -404,11 +404,11 @@ async function migrateToEntityRLS(adapter) {
404
404
  const hasEntityId = columns.some((c) => c.column_name === "entity_id");
405
405
  if (hasUserId && !hasEntityId) {
406
406
  logger.debug("[Migration] → Renaming channel_participants.user_id to entity_id...");
407
- await db.execute(sql17.raw(`ALTER TABLE "channel_participants" RENAME COLUMN "user_id" TO "entity_id"`));
407
+ await db.execute(sql19.raw(`ALTER TABLE "channel_participants" RENAME COLUMN "user_id" TO "entity_id"`));
408
408
  logger.debug("[Migration] ✓ Renamed channel_participants.user_id → entity_id");
409
409
  } else if (!hasUserId && !hasEntityId) {
410
410
  logger.debug("[Migration] → channel_participants exists without entity_id or user_id, truncating...");
411
- await db.execute(sql17`TRUNCATE TABLE channel_participants CASCADE`);
411
+ await db.execute(sql19`TRUNCATE TABLE channel_participants CASCADE`);
412
412
  logger.debug("[Migration] ✓ Truncated channel_participants");
413
413
  } else {
414
414
  logger.debug("[Migration] ⊘ channel_participants already has entity_id column");
@@ -418,7 +418,7 @@ async function migrateToEntityRLS(adapter) {
418
418
  }
419
419
  logger.debug("[Migration] → Discovering and dropping all regular indexes...");
420
420
  try {
421
- const indexesResult = await db.execute(sql17`
421
+ const indexesResult = await db.execute(sql19`
422
422
  SELECT i.relname AS index_name
423
423
  FROM pg_index idx
424
424
  JOIN pg_class i ON i.oid = idx.indexrelid
@@ -435,7 +435,7 @@ async function migrateToEntityRLS(adapter) {
435
435
  for (const row of indexesToDrop) {
436
436
  const indexName = row.index_name;
437
437
  try {
438
- await db.execute(sql17.raw(`DROP INDEX IF EXISTS "${indexName}"`));
438
+ await db.execute(sql19.raw(`DROP INDEX IF EXISTS "${indexName}"`));
439
439
  logger.debug(`[Migration] ✓ Dropped index ${indexName}`);
440
440
  } catch (_error) {
441
441
  logger.debug(`[Migration] ⊘ Could not drop index ${indexName}`);
@@ -500,14 +500,14 @@ async function migrateToEntityRLS(adapter) {
500
500
  ];
501
501
  for (const rename of columnRenames) {
502
502
  try {
503
- const tableExistsResult = await db.execute(sql17`
503
+ const tableExistsResult = await db.execute(sql19`
504
504
  SELECT 1 FROM information_schema.tables
505
505
  WHERE table_schema = 'public' AND table_name = ${rename.table}
506
506
  `);
507
507
  if (!tableExistsResult.rows || tableExistsResult.rows.length === 0) {
508
508
  continue;
509
509
  }
510
- const columnsResult = await db.execute(sql17`
510
+ const columnsResult = await db.execute(sql19`
511
511
  SELECT column_name
512
512
  FROM information_schema.columns
513
513
  WHERE table_schema = 'public'
@@ -520,11 +520,11 @@ async function migrateToEntityRLS(adapter) {
520
520
  const hasNewColumn = columns.some((c) => c.column_name === rename.to);
521
521
  if (hasOldColumn && !hasNewColumn) {
522
522
  logger.debug(`[Migration] → Renaming ${rename.table}.${rename.from} to ${rename.to}...`);
523
- await db.execute(sql17.raw(`ALTER TABLE "${rename.table}" RENAME COLUMN "${rename.from}" TO "${rename.to}"`));
523
+ await db.execute(sql19.raw(`ALTER TABLE "${rename.table}" RENAME COLUMN "${rename.from}" TO "${rename.to}"`));
524
524
  logger.debug(`[Migration] ✓ Renamed ${rename.table}.${rename.from} → ${rename.to}`);
525
525
  } else if (hasOldColumn && hasNewColumn) {
526
526
  logger.debug(`[Migration] → Both columns exist, dropping ${rename.table}.${rename.from}...`);
527
- await db.execute(sql17.raw(`ALTER TABLE "${rename.table}" DROP COLUMN "${rename.from}" CASCADE`));
527
+ await db.execute(sql19.raw(`ALTER TABLE "${rename.table}" DROP COLUMN "${rename.from}" CASCADE`));
528
528
  logger.debug(`[Migration] ✓ Dropped ${rename.table}.${rename.from}`);
529
529
  }
530
530
  } catch (error) {
@@ -542,17 +542,17 @@ var init_migrations = () => {};
542
542
 
543
543
  // rls.ts
544
544
  import { logger as logger2, validateUuid } from "@elizaos/core";
545
- import { eq, sql as sql18 } from "drizzle-orm";
545
+ import { eq, sql as sql20 } from "drizzle-orm";
546
546
  async function installRLSFunctions(adapter) {
547
547
  const db = getDb(adapter);
548
- await db.execute(sql18`
548
+ await db.execute(sql20`
549
549
  CREATE TABLE IF NOT EXISTS servers (
550
550
  id UUID PRIMARY KEY,
551
551
  created_at TIMESTAMPTZ DEFAULT NOW() NOT NULL,
552
552
  updated_at TIMESTAMPTZ DEFAULT NOW() NOT NULL
553
553
  )
554
554
  `);
555
- await db.execute(sql18`
555
+ await db.execute(sql20`
556
556
  CREATE OR REPLACE FUNCTION current_server_id() RETURNS UUID AS $$
557
557
  DECLARE
558
558
  app_name TEXT;
@@ -567,7 +567,7 @@ async function installRLSFunctions(adapter) {
567
567
  END;
568
568
  $$ LANGUAGE plpgsql STABLE;
569
569
  `);
570
- await db.execute(sql18`
570
+ await db.execute(sql20`
571
571
  CREATE OR REPLACE FUNCTION add_server_isolation(
572
572
  schema_name text,
573
573
  table_name text
@@ -611,7 +611,7 @@ async function installRLSFunctions(adapter) {
611
611
  END;
612
612
  $$ LANGUAGE plpgsql;
613
613
  `);
614
- await db.execute(sql18`
614
+ await db.execute(sql20`
615
615
  CREATE OR REPLACE FUNCTION apply_rls_to_all_tables() RETURNS void AS $$
616
616
  DECLARE
617
617
  tbl record;
@@ -641,7 +641,7 @@ async function installRLSFunctions(adapter) {
641
641
  async function applyRLSToNewTables(adapter) {
642
642
  const db = getDb(adapter);
643
643
  try {
644
- await db.execute(sql18`SELECT apply_rls_to_all_tables()`);
644
+ await db.execute(sql20`SELECT apply_rls_to_all_tables()`);
645
645
  logger2.info({ src: "plugin:sql" }, "RLS applied to all tables");
646
646
  } catch (error) {
647
647
  logger2.warn({ src: "plugin:sql", error: String(error) }, "Failed to apply RLS to some tables");
@@ -650,7 +650,7 @@ async function applyRLSToNewTables(adapter) {
650
650
  async function installEntityRLS(adapter) {
651
651
  const db = getDb(adapter);
652
652
  logger2.info("[Entity RLS] Installing entity RLS functions and policies...");
653
- await db.execute(sql18`
653
+ await db.execute(sql20`
654
654
  CREATE OR REPLACE FUNCTION current_entity_id()
655
655
  RETURNS UUID AS $$
656
656
  DECLARE
@@ -672,7 +672,7 @@ async function installEntityRLS(adapter) {
672
672
  $$ LANGUAGE plpgsql STABLE;
673
673
  `);
674
674
  logger2.info("[Entity RLS] Created current_entity_id() function");
675
- await db.execute(sql18`
675
+ await db.execute(sql20`
676
676
  CREATE OR REPLACE FUNCTION add_entity_isolation(
677
677
  schema_name text,
678
678
  table_name text,
@@ -829,7 +829,7 @@ async function installEntityRLS(adapter) {
829
829
  $$ LANGUAGE plpgsql;
830
830
  `);
831
831
  logger2.info("[Entity RLS] Created add_entity_isolation() function");
832
- await db.execute(sql18`
832
+ await db.execute(sql20`
833
833
  CREATE OR REPLACE FUNCTION apply_entity_rls_to_all_tables() RETURNS void AS $$
834
834
  DECLARE
835
835
  tbl record;
@@ -873,7 +873,7 @@ async function installEntityRLS(adapter) {
873
873
  async function applyEntityRLSToAllTables(adapter) {
874
874
  const db = getDb(adapter);
875
875
  try {
876
- await db.execute(sql18`SELECT apply_entity_rls_to_all_tables()`);
876
+ await db.execute(sql20`SELECT apply_entity_rls_to_all_tables()`);
877
877
  logger2.info("[Entity RLS] Applied entity RLS to all eligible tables");
878
878
  } catch (error) {
879
879
  logger2.warn("[Entity RLS] Failed to apply entity RLS to some tables:", String(error));
@@ -1466,7 +1466,7 @@ function hasChanges(previousSnapshot, currentSnapshot) {
1466
1466
  const currHash = hashSnapshot(currentSnapshot);
1467
1467
  return prevHash !== currHash;
1468
1468
  }
1469
- var sqlToStr = (sql19, _casing) => {
1469
+ var sqlToStr = (sql21, _casing) => {
1470
1470
  const config = {
1471
1471
  escapeName: () => {
1472
1472
  throw new Error("we don't support params for `sql` default values");
@@ -1479,7 +1479,7 @@ var sqlToStr = (sql19, _casing) => {
1479
1479
  },
1480
1480
  casing: undefined
1481
1481
  };
1482
- return sql19.toQuery(config).sql;
1482
+ return sql21.toQuery(config).sql;
1483
1483
  };
1484
1484
  var init_snapshot_generator = () => {};
1485
1485
 
@@ -1669,14 +1669,14 @@ async function generateMigrationSQL(previousSnapshot, currentSnapshot, diff) {
1669
1669
  const alterStatements = generateAlterColumnSQL(modified.table, modified.column, modified.changes);
1670
1670
  statements.push(...alterStatements);
1671
1671
  }
1672
- for (const index5 of diff.indexes.deleted) {
1673
- statements.push(generateDropIndexSQL(index5));
1672
+ for (const index7 of diff.indexes.deleted) {
1673
+ statements.push(generateDropIndexSQL(index7));
1674
1674
  }
1675
1675
  for (const alteredIndex of diff.indexes.altered) {
1676
1676
  statements.push(generateDropIndexSQL(alteredIndex.old));
1677
1677
  }
1678
- for (const index5 of diff.indexes.created) {
1679
- statements.push(generateCreateIndexSQL(index5));
1678
+ for (const index7 of diff.indexes.created) {
1679
+ statements.push(generateCreateIndexSQL(index7));
1680
1680
  }
1681
1681
  for (const alteredIndex of diff.indexes.altered) {
1682
1682
  statements.push(generateCreateIndexSQL(alteredIndex.new));
@@ -1773,18 +1773,18 @@ function generateCreateTableSQL(fullTableName, table) {
1773
1773
  return { tableSQL, fkSQLs };
1774
1774
  }
1775
1775
  function generateColumnDefinition(name, def) {
1776
- let sql19 = `"${name}" ${def.type}`;
1776
+ let sql21 = `"${name}" ${def.type}`;
1777
1777
  if (def.primaryKey && !def.type.includes("SERIAL")) {
1778
- sql19 += " PRIMARY KEY";
1778
+ sql21 += " PRIMARY KEY";
1779
1779
  }
1780
1780
  if (def.notNull) {
1781
- sql19 += " NOT NULL";
1781
+ sql21 += " NOT NULL";
1782
1782
  }
1783
1783
  if (def.default !== undefined) {
1784
1784
  const defaultValue = formatDefaultValue(def.default, def.type);
1785
- sql19 += ` DEFAULT ${defaultValue}`;
1785
+ sql21 += ` DEFAULT ${defaultValue}`;
1786
1786
  }
1787
- return sql19;
1787
+ return sql21;
1788
1788
  }
1789
1789
  function generateAddColumnSQL(table, column, definition) {
1790
1790
  const [schema, tableName] = table.includes(".") ? table.split(".") : ["public", table];
@@ -1920,18 +1920,18 @@ function formatDefaultValue(value, type) {
1920
1920
  }
1921
1921
  return String(value);
1922
1922
  }
1923
- function generateCreateIndexSQL(index5) {
1924
- const unique3 = index5.isUnique ? "UNIQUE " : "";
1925
- const method = index5.method || "btree";
1926
- const columns = index5.columns.map((c) => {
1923
+ function generateCreateIndexSQL(index7) {
1924
+ const unique3 = index7.isUnique ? "UNIQUE " : "";
1925
+ const method = index7.method || "btree";
1926
+ const columns = index7.columns.map((c) => {
1927
1927
  if (c.isExpression) {
1928
1928
  return c.expression;
1929
1929
  }
1930
1930
  return `"${c.expression}"${c.asc === false ? " DESC" : ""}`;
1931
1931
  }).join(", ");
1932
- const indexName = index5.name.includes(".") ? index5.name.split(".")[1] : index5.name;
1932
+ const indexName = index7.name.includes(".") ? index7.name.split(".")[1] : index7.name;
1933
1933
  let tableRef;
1934
- const indexTable = index5.table;
1934
+ const indexTable = index7.table;
1935
1935
  if (indexTable?.includes(".")) {
1936
1936
  const [schema, table] = indexTable.split(".");
1937
1937
  tableRef = `"${schema}"."${table}"`;
@@ -1940,8 +1940,8 @@ function generateCreateIndexSQL(index5) {
1940
1940
  }
1941
1941
  return `CREATE ${unique3}INDEX "${indexName}" ON ${tableRef} USING ${method} (${columns});`;
1942
1942
  }
1943
- function generateDropIndexSQL(index5) {
1944
- const indexNameFull = typeof index5 === "string" ? index5 : index5.name;
1943
+ function generateDropIndexSQL(index7) {
1944
+ const indexNameFull = typeof index7 === "string" ? index7 : index7.name;
1945
1945
  const indexName = indexNameFull.includes(".") ? indexNameFull.split(".")[1] : indexNameFull;
1946
1946
  return `DROP INDEX IF EXISTS "${indexName}";`;
1947
1947
  }
@@ -1951,14 +1951,14 @@ function generateCreateForeignKeySQL(fk) {
1951
1951
  const tableFrom = fk.tableFrom;
1952
1952
  const columnsFrom = fk.columnsFrom.map((c) => `"${c}"`).join(", ");
1953
1953
  const columnsTo = fk.columnsTo.map((c) => `"${c}"`).join(", ");
1954
- let sql19 = `ALTER TABLE "${schemaFrom}"."${tableFrom}" ADD CONSTRAINT "${fk.name}" FOREIGN KEY (${columnsFrom}) REFERENCES "${schemaTo}"."${fk.tableTo}" (${columnsTo})`;
1954
+ let sql21 = `ALTER TABLE "${schemaFrom}"."${tableFrom}" ADD CONSTRAINT "${fk.name}" FOREIGN KEY (${columnsFrom}) REFERENCES "${schemaTo}"."${fk.tableTo}" (${columnsTo})`;
1955
1955
  if (fk.onDelete) {
1956
- sql19 += ` ON DELETE ${fk.onDelete}`;
1956
+ sql21 += ` ON DELETE ${fk.onDelete}`;
1957
1957
  }
1958
1958
  if (fk.onUpdate) {
1959
- sql19 += ` ON UPDATE ${fk.onUpdate}`;
1959
+ sql21 += ` ON UPDATE ${fk.onUpdate}`;
1960
1960
  }
1961
- return `${sql19};`;
1961
+ return `${sql21};`;
1962
1962
  }
1963
1963
  function generateDropForeignKeySQL(fk) {
1964
1964
  const [schema, tableName] = fk.tableFrom ? fk.tableFrom.includes(".") ? fk.tableFrom.split(".") : ["public", fk.tableFrom] : ["public", ""];
@@ -1969,12 +1969,12 @@ function generateCreateUniqueConstraintSQL(constraint) {
1969
1969
  const [schema, tableName] = table.includes(".") ? table.split(".") : ["public", table];
1970
1970
  const name = constraint.name;
1971
1971
  const columns = constraint.columns.map((c) => `"${c}"`).join(", ");
1972
- let sql19 = `ALTER TABLE "${schema}"."${tableName}" ADD CONSTRAINT "${name}" UNIQUE`;
1972
+ let sql21 = `ALTER TABLE "${schema}"."${tableName}" ADD CONSTRAINT "${name}" UNIQUE`;
1973
1973
  if (constraint.nullsNotDistinct) {
1974
- sql19 += ` NULLS NOT DISTINCT`;
1974
+ sql21 += ` NULLS NOT DISTINCT`;
1975
1975
  }
1976
- sql19 += ` (${columns});`;
1977
- return sql19;
1976
+ sql21 += ` (${columns});`;
1977
+ return sql21;
1978
1978
  }
1979
1979
  function generateDropUniqueConstraintSQL(constraint) {
1980
1980
  const table = constraint.table || "";
@@ -1997,7 +1997,7 @@ var init_sql_generator = () => {};
1997
1997
 
1998
1998
  // runtime-migrator/drizzle-adapters/database-introspector.ts
1999
1999
  import { logger as logger4 } from "@elizaos/core";
2000
- import { sql as sql19 } from "drizzle-orm";
2000
+ import { sql as sql21 } from "drizzle-orm";
2001
2001
  function getRows2(result) {
2002
2002
  return result.rows;
2003
2003
  }
@@ -2126,7 +2126,7 @@ class DatabaseIntrospector {
2126
2126
  };
2127
2127
  }
2128
2128
  async getTables(schemaName) {
2129
- const result = await this.db.execute(sql19`SELECT
2129
+ const result = await this.db.execute(sql21`SELECT
2130
2130
  table_schema,
2131
2131
  table_name
2132
2132
  FROM information_schema.tables
@@ -2136,7 +2136,7 @@ class DatabaseIntrospector {
2136
2136
  return getRows2(result);
2137
2137
  }
2138
2138
  async getColumns(schemaName, tableName) {
2139
- const result = await this.db.execute(sql19`SELECT
2139
+ const result = await this.db.execute(sql21`SELECT
2140
2140
  a.attname AS column_name,
2141
2141
  CASE
2142
2142
  WHEN a.attnotnull THEN 'NO'
@@ -2179,7 +2179,7 @@ class DatabaseIntrospector {
2179
2179
  return getRows2(result);
2180
2180
  }
2181
2181
  async getIndexes(schemaName, tableName) {
2182
- const result = await this.db.execute(sql19`SELECT
2182
+ const result = await this.db.execute(sql21`SELECT
2183
2183
  i.relname AS name,
2184
2184
  idx.indisunique AS is_unique,
2185
2185
  idx.indisprimary AS is_primary,
@@ -2203,7 +2203,7 @@ class DatabaseIntrospector {
2203
2203
  return getRows2(result);
2204
2204
  }
2205
2205
  async getForeignKeys(schemaName, tableName) {
2206
- const result = await this.db.execute(sql19`SELECT
2206
+ const result = await this.db.execute(sql21`SELECT
2207
2207
  con.conname AS name,
2208
2208
  att.attname AS column_name,
2209
2209
  fnsp.nspname AS foreign_table_schema,
@@ -2238,7 +2238,7 @@ class DatabaseIntrospector {
2238
2238
  return getRows2(result);
2239
2239
  }
2240
2240
  async getPrimaryKeys(schemaName, tableName) {
2241
- const result = await this.db.execute(sql19`SELECT
2241
+ const result = await this.db.execute(sql21`SELECT
2242
2242
  con.conname AS name,
2243
2243
  ARRAY(
2244
2244
  SELECT a.attname
@@ -2256,7 +2256,7 @@ class DatabaseIntrospector {
2256
2256
  return getRows2(result);
2257
2257
  }
2258
2258
  async getUniqueConstraints(schemaName, tableName) {
2259
- const result = await this.db.execute(sql19`SELECT
2259
+ const result = await this.db.execute(sql21`SELECT
2260
2260
  con.conname AS name,
2261
2261
  ARRAY(
2262
2262
  SELECT a.attname
@@ -2274,7 +2274,7 @@ class DatabaseIntrospector {
2274
2274
  return getRows2(result);
2275
2275
  }
2276
2276
  async getCheckConstraints(schemaName, tableName) {
2277
- const result = await this.db.execute(sql19`SELECT
2277
+ const result = await this.db.execute(sql21`SELECT
2278
2278
  con.conname AS name,
2279
2279
  pg_get_constraintdef(con.oid) AS definition
2280
2280
  FROM pg_constraint con
@@ -2286,7 +2286,7 @@ class DatabaseIntrospector {
2286
2286
  return getRows2(result);
2287
2287
  }
2288
2288
  async getEnums(schemaName) {
2289
- const result = await this.db.execute(sql19`SELECT
2289
+ const result = await this.db.execute(sql21`SELECT
2290
2290
  n.nspname AS schema,
2291
2291
  t.typname AS name,
2292
2292
  e.enumlabel AS value,
@@ -2318,7 +2318,7 @@ class DatabaseIntrospector {
2318
2318
  }
2319
2319
  async hasExistingTables(pluginName) {
2320
2320
  const schemaName = pluginName === "@elizaos/plugin-sql" ? "public" : this.deriveSchemaName(pluginName);
2321
- const result = await this.db.execute(sql19`SELECT COUNT(*) AS count
2321
+ const result = await this.db.execute(sql21`SELECT COUNT(*) AS count
2322
2322
  FROM information_schema.tables
2323
2323
  WHERE table_schema = ${schemaName}
2324
2324
  AND table_type = 'BASE TABLE'`);
@@ -2334,7 +2334,7 @@ var init_database_introspector = () => {};
2334
2334
 
2335
2335
  // runtime-migrator/extension-manager.ts
2336
2336
  import { logger as logger5 } from "@elizaos/core";
2337
- import { sql as sql20 } from "drizzle-orm";
2337
+ import { sql as sql22 } from "drizzle-orm";
2338
2338
 
2339
2339
  class ExtensionManager {
2340
2340
  db;
@@ -2348,7 +2348,7 @@ class ExtensionManager {
2348
2348
  logger5.warn({ src: "plugin:sql", extension }, "Invalid extension name - contains invalid characters");
2349
2349
  continue;
2350
2350
  }
2351
- await this.db.execute(sql20`CREATE EXTENSION IF NOT EXISTS ${sql20.identifier(extension)}`);
2351
+ await this.db.execute(sql22`CREATE EXTENSION IF NOT EXISTS ${sql22.identifier(extension)}`);
2352
2352
  logger5.debug({ src: "plugin:sql", extension }, "Extension installed");
2353
2353
  } catch (error) {
2354
2354
  const errorMessage = error instanceof Error ? error.message : String(error);
@@ -2404,7 +2404,7 @@ function normalizeSchemaName(input) {
2404
2404
  var init_schema_transformer = () => {};
2405
2405
 
2406
2406
  // runtime-migrator/storage/journal-storage.ts
2407
- import { sql as sql21 } from "drizzle-orm";
2407
+ import { sql as sql23 } from "drizzle-orm";
2408
2408
 
2409
2409
  class JournalStorage {
2410
2410
  db;
@@ -2412,7 +2412,7 @@ class JournalStorage {
2412
2412
  this.db = db;
2413
2413
  }
2414
2414
  async loadJournal(pluginName) {
2415
- const result = await this.db.execute(sql21`SELECT version, dialect, entries
2415
+ const result = await this.db.execute(sql23`SELECT version, dialect, entries
2416
2416
  FROM migrations._journal
2417
2417
  WHERE plugin_name = ${pluginName}`);
2418
2418
  if (result.rows.length === 0) {
@@ -2429,7 +2429,7 @@ class JournalStorage {
2429
2429
  };
2430
2430
  }
2431
2431
  async saveJournal(pluginName, journal) {
2432
- await this.db.execute(sql21`INSERT INTO migrations._journal (plugin_name, version, dialect, entries)
2432
+ await this.db.execute(sql23`INSERT INTO migrations._journal (plugin_name, version, dialect, entries)
2433
2433
  VALUES (${pluginName}, ${journal.version}, ${journal.dialect}, ${JSON.stringify(journal.entries)}::jsonb)
2434
2434
  ON CONFLICT (plugin_name)
2435
2435
  DO UPDATE SET
@@ -2471,7 +2471,7 @@ class JournalStorage {
2471
2471
  var init_journal_storage = () => {};
2472
2472
 
2473
2473
  // runtime-migrator/storage/migration-tracker.ts
2474
- import { sql as sql22 } from "drizzle-orm";
2474
+ import { sql as sql24 } from "drizzle-orm";
2475
2475
 
2476
2476
  class MigrationTracker {
2477
2477
  db;
@@ -2479,11 +2479,11 @@ class MigrationTracker {
2479
2479
  this.db = db;
2480
2480
  }
2481
2481
  async ensureSchema() {
2482
- await this.db.execute(sql22`CREATE SCHEMA IF NOT EXISTS migrations`);
2482
+ await this.db.execute(sql24`CREATE SCHEMA IF NOT EXISTS migrations`);
2483
2483
  }
2484
2484
  async ensureTables() {
2485
2485
  await this.ensureSchema();
2486
- await this.db.execute(sql22`
2486
+ await this.db.execute(sql24`
2487
2487
  CREATE TABLE IF NOT EXISTS migrations._migrations (
2488
2488
  id SERIAL PRIMARY KEY,
2489
2489
  plugin_name TEXT NOT NULL,
@@ -2491,7 +2491,7 @@ class MigrationTracker {
2491
2491
  created_at BIGINT NOT NULL
2492
2492
  )
2493
2493
  `);
2494
- await this.db.execute(sql22`
2494
+ await this.db.execute(sql24`
2495
2495
  CREATE TABLE IF NOT EXISTS migrations._journal (
2496
2496
  plugin_name TEXT PRIMARY KEY,
2497
2497
  version TEXT NOT NULL,
@@ -2499,7 +2499,7 @@ class MigrationTracker {
2499
2499
  entries JSONB NOT NULL DEFAULT '[]'
2500
2500
  )
2501
2501
  `);
2502
- await this.db.execute(sql22`
2502
+ await this.db.execute(sql24`
2503
2503
  CREATE TABLE IF NOT EXISTS migrations._snapshots (
2504
2504
  id SERIAL PRIMARY KEY,
2505
2505
  plugin_name TEXT NOT NULL,
@@ -2511,7 +2511,7 @@ class MigrationTracker {
2511
2511
  `);
2512
2512
  }
2513
2513
  async getLastMigration(pluginName) {
2514
- const result = await this.db.execute(sql22`SELECT id, hash, created_at
2514
+ const result = await this.db.execute(sql24`SELECT id, hash, created_at
2515
2515
  FROM migrations._migrations
2516
2516
  WHERE plugin_name = ${pluginName}
2517
2517
  ORDER BY created_at DESC
@@ -2519,14 +2519,14 @@ class MigrationTracker {
2519
2519
  return getRow(result) || null;
2520
2520
  }
2521
2521
  async recordMigration(pluginName, hash, createdAt) {
2522
- await this.db.execute(sql22`INSERT INTO migrations._migrations (plugin_name, hash, created_at)
2522
+ await this.db.execute(sql24`INSERT INTO migrations._migrations (plugin_name, hash, created_at)
2523
2523
  VALUES (${pluginName}, ${hash}, ${createdAt})`);
2524
2524
  }
2525
2525
  }
2526
2526
  var init_migration_tracker = () => {};
2527
2527
 
2528
2528
  // runtime-migrator/storage/snapshot-storage.ts
2529
- import { sql as sql23 } from "drizzle-orm";
2529
+ import { sql as sql25 } from "drizzle-orm";
2530
2530
 
2531
2531
  class SnapshotStorage {
2532
2532
  db;
@@ -2534,7 +2534,7 @@ class SnapshotStorage {
2534
2534
  this.db = db;
2535
2535
  }
2536
2536
  async saveSnapshot(pluginName, idx, snapshot) {
2537
- await this.db.execute(sql23`INSERT INTO migrations._snapshots (plugin_name, idx, snapshot)
2537
+ await this.db.execute(sql25`INSERT INTO migrations._snapshots (plugin_name, idx, snapshot)
2538
2538
  VALUES (${pluginName}, ${idx}, ${JSON.stringify(snapshot)}::jsonb)
2539
2539
  ON CONFLICT (plugin_name, idx)
2540
2540
  DO UPDATE SET
@@ -2542,7 +2542,7 @@ class SnapshotStorage {
2542
2542
  created_at = NOW()`);
2543
2543
  }
2544
2544
  async loadSnapshot(pluginName, idx) {
2545
- const result = await this.db.execute(sql23`SELECT snapshot
2545
+ const result = await this.db.execute(sql25`SELECT snapshot
2546
2546
  FROM migrations._snapshots
2547
2547
  WHERE plugin_name = ${pluginName} AND idx = ${idx}`);
2548
2548
  if (result.rows.length === 0) {
@@ -2551,7 +2551,7 @@ class SnapshotStorage {
2551
2551
  return result.rows[0].snapshot;
2552
2552
  }
2553
2553
  async getLatestSnapshot(pluginName) {
2554
- const result = await this.db.execute(sql23`SELECT snapshot
2554
+ const result = await this.db.execute(sql25`SELECT snapshot
2555
2555
  FROM migrations._snapshots
2556
2556
  WHERE plugin_name = ${pluginName}
2557
2557
  ORDER BY idx DESC
@@ -2562,7 +2562,7 @@ class SnapshotStorage {
2562
2562
  return result.rows[0].snapshot;
2563
2563
  }
2564
2564
  async getAllSnapshots(pluginName) {
2565
- const result = await this.db.execute(sql23`SELECT snapshot
2565
+ const result = await this.db.execute(sql25`SELECT snapshot
2566
2566
  FROM migrations._snapshots
2567
2567
  WHERE plugin_name = ${pluginName}
2568
2568
  ORDER BY idx ASC`);
@@ -2573,7 +2573,7 @@ var init_snapshot_storage = () => {};
2573
2573
 
2574
2574
  // runtime-migrator/runtime-migrator.ts
2575
2575
  import { logger as logger7 } from "@elizaos/core";
2576
- import { sql as sql24 } from "drizzle-orm";
2576
+ import { sql as sql26 } from "drizzle-orm";
2577
2577
 
2578
2578
  class RuntimeMigrator {
2579
2579
  db;
@@ -2611,7 +2611,7 @@ class RuntimeMigrator {
2611
2611
  }
2612
2612
  for (const schemaName of schemasToCreate) {
2613
2613
  logger7.debug({ src: "plugin:sql", schemaName }, "Ensuring schema exists");
2614
- await this.db.execute(sql24.raw(`CREATE SCHEMA IF NOT EXISTS "${schemaName}"`));
2614
+ await this.db.execute(sql26.raw(`CREATE SCHEMA IF NOT EXISTS "${schemaName}"`));
2615
2615
  }
2616
2616
  }
2617
2617
  validateSchemaUsage(pluginName, snapshot) {
@@ -2765,11 +2765,11 @@ class RuntimeMigrator {
2765
2765
  try {
2766
2766
  logger7.debug({ src: "plugin:sql", pluginName }, "Using PostgreSQL advisory locks");
2767
2767
  const lockIdStr = lockId.toString();
2768
- const lockResult = await this.db.execute(sql24`SELECT pg_try_advisory_lock(CAST(${lockIdStr} AS bigint)) as acquired`);
2768
+ const lockResult = await this.db.execute(sql26`SELECT pg_try_advisory_lock(CAST(${lockIdStr} AS bigint)) as acquired`);
2769
2769
  lockAcquired = getRow(lockResult)?.acquired === true;
2770
2770
  if (!lockAcquired) {
2771
2771
  logger7.info({ src: "plugin:sql", pluginName }, "Migration already in progress, waiting for lock");
2772
- await this.db.execute(sql24`SELECT pg_advisory_lock(CAST(${lockIdStr} AS bigint))`);
2772
+ await this.db.execute(sql26`SELECT pg_advisory_lock(CAST(${lockIdStr} AS bigint))`);
2773
2773
  lockAcquired = true;
2774
2774
  logger7.info({ src: "plugin:sql", pluginName }, "Lock acquired");
2775
2775
  } else {
@@ -2900,7 +2900,7 @@ class RuntimeMigrator {
2900
2900
  if (lockAcquired && isRealPostgres) {
2901
2901
  try {
2902
2902
  const lockIdStr = lockId.toString();
2903
- await this.db.execute(sql24`SELECT pg_advisory_unlock(CAST(${lockIdStr} AS bigint))`);
2903
+ await this.db.execute(sql26`SELECT pg_advisory_unlock(CAST(${lockIdStr} AS bigint))`);
2904
2904
  logger7.debug({ src: "plugin:sql", pluginName }, "Advisory lock released");
2905
2905
  } catch (unlockError) {
2906
2906
  logger7.warn({
@@ -2915,23 +2915,23 @@ class RuntimeMigrator {
2915
2915
  async executeMigration(pluginName, snapshot, hash, sqlStatements) {
2916
2916
  let transactionStarted = false;
2917
2917
  try {
2918
- await this.db.execute(sql24`BEGIN`);
2918
+ await this.db.execute(sql26`BEGIN`);
2919
2919
  transactionStarted = true;
2920
2920
  for (const stmt of sqlStatements) {
2921
2921
  logger7.debug({ src: "plugin:sql", statement: stmt }, "Executing SQL statement");
2922
- await this.db.execute(sql24.raw(stmt));
2922
+ await this.db.execute(sql26.raw(stmt));
2923
2923
  }
2924
2924
  const idx = await this.journalStorage.getNextIdx(pluginName);
2925
2925
  await this.migrationTracker.recordMigration(pluginName, hash, Date.now());
2926
2926
  const tag = this.generateMigrationTag(idx, pluginName);
2927
2927
  await this.journalStorage.updateJournal(pluginName, idx, tag, true);
2928
2928
  await this.snapshotStorage.saveSnapshot(pluginName, idx, snapshot);
2929
- await this.db.execute(sql24`COMMIT`);
2929
+ await this.db.execute(sql26`COMMIT`);
2930
2930
  logger7.info({ src: "plugin:sql", pluginName, tag }, "Recorded migration");
2931
2931
  } catch (error) {
2932
2932
  if (transactionStarted) {
2933
2933
  try {
2934
- await this.db.execute(sql24`ROLLBACK`);
2934
+ await this.db.execute(sql26`ROLLBACK`);
2935
2935
  logger7.error({
2936
2936
  src: "plugin:sql",
2937
2937
  error: error instanceof Error ? error.message : String(error)
@@ -2948,8 +2948,8 @@ class RuntimeMigrator {
2948
2948
  }
2949
2949
  generateMigrationTag(idx, pluginName) {
2950
2950
  const prefix = idx.toString().padStart(4, "0");
2951
- const timestamp17 = Date.now().toString(36);
2952
- return `${prefix}_${pluginName}_${timestamp17}`;
2951
+ const timestamp19 = Date.now().toString(36);
2952
+ return `${prefix}_${pluginName}_${timestamp19}`;
2953
2953
  }
2954
2954
  async getStatus(pluginName) {
2955
2955
  const lastMigration = await this.migrationTracker.getLastMigration(pluginName);
@@ -2964,9 +2964,9 @@ class RuntimeMigrator {
2964
2964
  }
2965
2965
  async reset(pluginName) {
2966
2966
  logger7.warn({ src: "plugin:sql", pluginName }, "Resetting migrations");
2967
- await this.db.execute(sql24`DELETE FROM migrations._migrations WHERE plugin_name = ${pluginName}`);
2968
- await this.db.execute(sql24`DELETE FROM migrations._journal WHERE plugin_name = ${pluginName}`);
2969
- await this.db.execute(sql24`DELETE FROM migrations._snapshots WHERE plugin_name = ${pluginName}`);
2967
+ await this.db.execute(sql26`DELETE FROM migrations._migrations WHERE plugin_name = ${pluginName}`);
2968
+ await this.db.execute(sql26`DELETE FROM migrations._journal WHERE plugin_name = ${pluginName}`);
2969
+ await this.db.execute(sql26`DELETE FROM migrations._snapshots WHERE plugin_name = ${pluginName}`);
2970
2970
  logger7.warn({ src: "plugin:sql", pluginName }, "Reset complete");
2971
2971
  }
2972
2972
  async checkMigration(pluginName, schema) {
@@ -3147,10 +3147,10 @@ import {
3147
3147
  lt,
3148
3148
  lte,
3149
3149
  or,
3150
- sql as sql25
3150
+ sql as sql27
3151
3151
  } from "drizzle-orm";
3152
3152
 
3153
- // ../../../node_modules/uuid/dist/stringify.js
3153
+ // ../../../node_modules/.bun/uuid@13.0.0/node_modules/uuid/dist/stringify.js
3154
3154
  var byteToHex = [];
3155
3155
  for (let i = 0;i < 256; ++i) {
3156
3156
  byteToHex.push((i + 256).toString(16).slice(1));
@@ -3159,7 +3159,7 @@ function unsafeStringify(arr, offset = 0) {
3159
3159
  return (byteToHex[arr[offset + 0]] + byteToHex[arr[offset + 1]] + byteToHex[arr[offset + 2]] + byteToHex[arr[offset + 3]] + "-" + byteToHex[arr[offset + 4]] + byteToHex[arr[offset + 5]] + "-" + byteToHex[arr[offset + 6]] + byteToHex[arr[offset + 7]] + "-" + byteToHex[arr[offset + 8]] + byteToHex[arr[offset + 9]] + "-" + byteToHex[arr[offset + 10]] + byteToHex[arr[offset + 11]] + byteToHex[arr[offset + 12]] + byteToHex[arr[offset + 13]] + byteToHex[arr[offset + 14]] + byteToHex[arr[offset + 15]]).toLowerCase();
3160
3160
  }
3161
3161
 
3162
- // ../../../node_modules/uuid/dist/rng.js
3162
+ // ../../../node_modules/.bun/uuid@13.0.0/node_modules/uuid/dist/rng.js
3163
3163
  var getRandomValues;
3164
3164
  var rnds8 = new Uint8Array(16);
3165
3165
  function rng() {
@@ -3172,11 +3172,11 @@ function rng() {
3172
3172
  return getRandomValues(rnds8);
3173
3173
  }
3174
3174
 
3175
- // ../../../node_modules/uuid/dist/native.js
3175
+ // ../../../node_modules/.bun/uuid@13.0.0/node_modules/uuid/dist/native.js
3176
3176
  var randomUUID = typeof crypto !== "undefined" && crypto.randomUUID && crypto.randomUUID.bind(crypto);
3177
3177
  var native_default = { randomUUID };
3178
3178
 
3179
- // ../../../node_modules/uuid/dist/v4.js
3179
+ // ../../../node_modules/.bun/uuid@13.0.0/node_modules/uuid/dist/v4.js
3180
3180
  function _v4(options, buf, offset) {
3181
3181
  options = options || {};
3182
3182
  const rnds = options.random ?? options.rng?.() ?? rng();
@@ -3361,6 +3361,8 @@ __export(exports_schema, {
3361
3361
  roomTable: () => roomTable,
3362
3362
  relationshipTable: () => relationshipTable,
3363
3363
  participantTable: () => participantTable,
3364
+ pairingRequestTable: () => pairingRequestTable,
3365
+ pairingAllowlistTable: () => pairingAllowlistTable,
3364
3366
  messageTable: () => messageTable,
3365
3367
  messageServerTable: () => messageServerTable,
3366
3368
  messageServerAgentsTable: () => messageServerAgentsTable,
@@ -3503,26 +3505,59 @@ var messageServerAgentsTable = pgTable14("message_server_agents", {
3503
3505
  messageServerId: uuid12("message_server_id").notNull().references(() => messageServerTable.id, { onDelete: "cascade" }),
3504
3506
  agentId: uuid12("agent_id").notNull().references(() => agentTable.id, { onDelete: "cascade" })
3505
3507
  }, (table) => [primaryKey3({ columns: [table.messageServerId, table.agentId] })]);
3506
- // schema/participant.ts
3508
+ // schema/pairingAllowlist.ts
3507
3509
  init_agent();
3508
3510
  import { sql as sql13 } from "drizzle-orm";
3509
- import { foreignKey as foreignKey4, index as index3, pgTable as pgTable15, text as text13, timestamp as timestamp13, uuid as uuid13 } from "drizzle-orm/pg-core";
3510
- var participantTable = pgTable15("participants", {
3511
- id: uuid13("id").notNull().primaryKey().default(sql13`gen_random_uuid()`),
3511
+ import { index as index3, jsonb as jsonb12, pgTable as pgTable15, text as text13, timestamp as timestamp13, uniqueIndex, uuid as uuid13 } from "drizzle-orm/pg-core";
3512
+ var pairingAllowlistTable = pgTable15("pairing_allowlist", {
3513
+ id: uuid13("id").primaryKey().defaultRandom(),
3514
+ channel: text13("channel").notNull(),
3515
+ senderId: text13("sender_id").notNull(),
3512
3516
  createdAt: timestamp13("created_at", { withTimezone: true }).default(sql13`now()`).notNull(),
3513
- entityId: uuid13("entity_id").references(() => entityTable.id, {
3517
+ metadata: jsonb12("metadata").default(sql13`'{}'::jsonb`),
3518
+ agentId: uuid13("agent_id").notNull().references(() => agentTable.id, { onDelete: "cascade" })
3519
+ }, (table) => [
3520
+ index3("pairing_allowlist_channel_agent_idx").on(table.channel, table.agentId),
3521
+ uniqueIndex("pairing_allowlist_sender_channel_agent_idx").on(table.senderId, table.channel, table.agentId)
3522
+ ]);
3523
+ // schema/pairingRequest.ts
3524
+ init_agent();
3525
+ import { sql as sql14 } from "drizzle-orm";
3526
+ import { index as index4, jsonb as jsonb13, pgTable as pgTable16, text as text14, timestamp as timestamp14, uniqueIndex as uniqueIndex2, uuid as uuid14 } from "drizzle-orm/pg-core";
3527
+ var pairingRequestTable = pgTable16("pairing_requests", {
3528
+ id: uuid14("id").primaryKey().defaultRandom(),
3529
+ channel: text14("channel").notNull(),
3530
+ senderId: text14("sender_id").notNull(),
3531
+ code: text14("code").notNull(),
3532
+ createdAt: timestamp14("created_at", { withTimezone: true }).default(sql14`now()`).notNull(),
3533
+ lastSeenAt: timestamp14("last_seen_at", { withTimezone: true }).default(sql14`now()`).notNull(),
3534
+ metadata: jsonb13("metadata").default(sql14`'{}'::jsonb`),
3535
+ agentId: uuid14("agent_id").notNull().references(() => agentTable.id, { onDelete: "cascade" })
3536
+ }, (table) => [
3537
+ index4("pairing_requests_channel_agent_idx").on(table.channel, table.agentId),
3538
+ uniqueIndex2("pairing_requests_code_channel_agent_idx").on(table.code, table.channel, table.agentId),
3539
+ uniqueIndex2("pairing_requests_sender_channel_agent_idx").on(table.senderId, table.channel, table.agentId)
3540
+ ]);
3541
+ // schema/participant.ts
3542
+ init_agent();
3543
+ import { sql as sql15 } from "drizzle-orm";
3544
+ import { foreignKey as foreignKey4, index as index5, pgTable as pgTable17, text as text15, timestamp as timestamp15, uuid as uuid15 } from "drizzle-orm/pg-core";
3545
+ var participantTable = pgTable17("participants", {
3546
+ id: uuid15("id").notNull().primaryKey().default(sql15`gen_random_uuid()`),
3547
+ createdAt: timestamp15("created_at", { withTimezone: true }).default(sql15`now()`).notNull(),
3548
+ entityId: uuid15("entity_id").references(() => entityTable.id, {
3514
3549
  onDelete: "cascade"
3515
3550
  }),
3516
- roomId: uuid13("room_id").references(() => roomTable.id, {
3551
+ roomId: uuid15("room_id").references(() => roomTable.id, {
3517
3552
  onDelete: "cascade"
3518
3553
  }),
3519
- agentId: uuid13("agent_id").references(() => agentTable.id, {
3554
+ agentId: uuid15("agent_id").references(() => agentTable.id, {
3520
3555
  onDelete: "cascade"
3521
3556
  }),
3522
- roomState: text13("room_state")
3557
+ roomState: text15("room_state")
3523
3558
  }, (table) => [
3524
- index3("idx_participants_user").on(table.entityId),
3525
- index3("idx_participants_room").on(table.roomId),
3559
+ index5("idx_participants_user").on(table.entityId),
3560
+ index5("idx_participants_room").on(table.roomId),
3526
3561
  foreignKey4({
3527
3562
  name: "fk_room",
3528
3563
  columns: [table.roomId],
@@ -3536,27 +3571,27 @@ var participantTable = pgTable15("participants", {
3536
3571
  ]);
3537
3572
  // schema/relationship.ts
3538
3573
  init_agent();
3539
- import { sql as sql14 } from "drizzle-orm";
3574
+ import { sql as sql16 } from "drizzle-orm";
3540
3575
  import {
3541
3576
  foreignKey as foreignKey5,
3542
- index as index4,
3543
- jsonb as jsonb12,
3544
- pgTable as pgTable16,
3545
- text as text14,
3546
- timestamp as timestamp14,
3577
+ index as index6,
3578
+ jsonb as jsonb14,
3579
+ pgTable as pgTable18,
3580
+ text as text16,
3581
+ timestamp as timestamp16,
3547
3582
  unique as unique2,
3548
- uuid as uuid14
3583
+ uuid as uuid16
3549
3584
  } from "drizzle-orm/pg-core";
3550
- var relationshipTable = pgTable16("relationships", {
3551
- id: uuid14("id").notNull().primaryKey().default(sql14`gen_random_uuid()`),
3552
- createdAt: timestamp14("created_at", { withTimezone: true }).default(sql14`now()`).notNull(),
3553
- sourceEntityId: uuid14("source_entity_id").notNull().references(() => entityTable.id, { onDelete: "cascade" }),
3554
- targetEntityId: uuid14("target_entity_id").notNull().references(() => entityTable.id, { onDelete: "cascade" }),
3555
- agentId: uuid14("agent_id").notNull().references(() => agentTable.id, { onDelete: "cascade" }),
3556
- tags: text14("tags").array(),
3557
- metadata: jsonb12("metadata")
3585
+ var relationshipTable = pgTable18("relationships", {
3586
+ id: uuid16("id").notNull().primaryKey().default(sql16`gen_random_uuid()`),
3587
+ createdAt: timestamp16("created_at", { withTimezone: true }).default(sql16`now()`).notNull(),
3588
+ sourceEntityId: uuid16("source_entity_id").notNull().references(() => entityTable.id, { onDelete: "cascade" }),
3589
+ targetEntityId: uuid16("target_entity_id").notNull().references(() => entityTable.id, { onDelete: "cascade" }),
3590
+ agentId: uuid16("agent_id").notNull().references(() => agentTable.id, { onDelete: "cascade" }),
3591
+ tags: text16("tags").array(),
3592
+ metadata: jsonb14("metadata")
3558
3593
  }, (table) => [
3559
- index4("idx_relationships_users").on(table.sourceEntityId, table.targetEntityId),
3594
+ index6("idx_relationships_users").on(table.sourceEntityId, table.targetEntityId),
3560
3595
  unique2("unique_relationship").on(table.sourceEntityId, table.targetEntityId, table.agentId),
3561
3596
  foreignKey5({
3562
3597
  name: "fk_user_a",
@@ -3575,20 +3610,20 @@ init_server();
3575
3610
 
3576
3611
  // schema/tasks.ts
3577
3612
  init_agent();
3578
- import { sql as sql16 } from "drizzle-orm";
3579
- import { jsonb as jsonb13, pgTable as pgTable18, text as text15, timestamp as timestamp16, uuid as uuid16 } from "drizzle-orm/pg-core";
3580
- var taskTable = pgTable18("tasks", {
3581
- id: uuid16("id").primaryKey().defaultRandom(),
3582
- name: text15("name").notNull(),
3583
- description: text15("description"),
3584
- roomId: uuid16("room_id"),
3585
- worldId: uuid16("world_id"),
3586
- entityId: uuid16("entity_id"),
3587
- agentId: uuid16("agent_id").notNull().references(() => agentTable.id, { onDelete: "cascade" }),
3588
- tags: text15("tags").array().default(sql16`'{}'::text[]`),
3589
- metadata: jsonb13("metadata").default(sql16`'{}'::jsonb`),
3590
- createdAt: timestamp16("created_at", { withTimezone: true }).defaultNow(),
3591
- updatedAt: timestamp16("updated_at", { withTimezone: true }).defaultNow()
3613
+ import { sql as sql18 } from "drizzle-orm";
3614
+ import { jsonb as jsonb15, pgTable as pgTable20, text as text17, timestamp as timestamp18, uuid as uuid18 } from "drizzle-orm/pg-core";
3615
+ var taskTable = pgTable20("tasks", {
3616
+ id: uuid18("id").primaryKey().defaultRandom(),
3617
+ name: text17("name").notNull(),
3618
+ description: text17("description"),
3619
+ roomId: uuid18("room_id"),
3620
+ worldId: uuid18("world_id"),
3621
+ entityId: uuid18("entity_id"),
3622
+ agentId: uuid18("agent_id").notNull().references(() => agentTable.id, { onDelete: "cascade" }),
3623
+ tags: text17("tags").array().default(sql18`'{}'::text[]`),
3624
+ metadata: jsonb15("metadata").default(sql18`'{}'::jsonb`),
3625
+ createdAt: timestamp18("created_at", { withTimezone: true }).defaultNow(),
3626
+ updatedAt: timestamp18("updated_at", { withTimezone: true }).defaultNow()
3592
3627
  });
3593
3628
  // base.ts
3594
3629
  class BaseDrizzleAdapter extends DatabaseAdapter {
@@ -3998,11 +4033,11 @@ class BaseDrizzleAdapter extends DatabaseAdapter {
3998
4033
  async getEntitiesByNames(params) {
3999
4034
  return this.withDatabase(async () => {
4000
4035
  const { names, agentId } = params;
4001
- const nameConditions = names.map((name) => sql25`${name} = ANY(${entityTable.names})`);
4002
- const query = sql25`
4036
+ const nameConditions = names.map((name) => sql27`${name} = ANY(${entityTable.names})`);
4037
+ const query = sql27`
4003
4038
  SELECT * FROM ${entityTable}
4004
4039
  WHERE ${entityTable.agentId} = ${agentId}
4005
- AND (${sql25.join(nameConditions, sql25` OR `)})
4040
+ AND (${sql27.join(nameConditions, sql27` OR `)})
4006
4041
  `;
4007
4042
  const result = await this.db.execute(query);
4008
4043
  return result.rows.map((row) => ({
@@ -4025,7 +4060,7 @@ class BaseDrizzleAdapter extends DatabaseAdapter {
4025
4060
  metadata: row.metadata || {}
4026
4061
  }));
4027
4062
  }
4028
- const searchQuery = sql25`
4063
+ const searchQuery = sql27`
4029
4064
  SELECT * FROM ${entityTable}
4030
4065
  WHERE ${entityTable.agentId} = ${agentId}
4031
4066
  AND EXISTS (
@@ -4281,7 +4316,7 @@ class BaseDrizzleAdapter extends DatabaseAdapter {
4281
4316
  async getCachedEmbeddings(opts) {
4282
4317
  return this.withDatabase(async () => {
4283
4318
  try {
4284
- const results = await this.db.execute(sql25`
4319
+ const results = await this.db.execute(sql27`
4285
4320
  WITH content_text AS (
4286
4321
  SELECT
4287
4322
  m.id,
@@ -4341,7 +4376,7 @@ class BaseDrizzleAdapter extends DatabaseAdapter {
4341
4376
  const jsonString = JSON.stringify(sanitizedBody);
4342
4377
  await this.withEntityContext(params.entityId, async (tx) => {
4343
4378
  await tx.insert(logTable).values({
4344
- body: sql25`${jsonString}::jsonb`,
4379
+ body: sql27`${jsonString}::jsonb`,
4345
4380
  entityId: params.entityId,
4346
4381
  roomId: params.roomId,
4347
4382
  type: params.type
@@ -4415,7 +4450,7 @@ class BaseDrizzleAdapter extends DatabaseAdapter {
4415
4450
  const runMap = new Map;
4416
4451
  const conditions = [
4417
4452
  eq2(logTable.type, "run_event"),
4418
- sql25`${logTable.body} ? 'runId'`,
4453
+ sql27`${logTable.body} ? 'runId'`,
4419
4454
  eq2(roomTable.agentId, this.agentId)
4420
4455
  ];
4421
4456
  if (params.roomId) {
@@ -4430,9 +4465,9 @@ class BaseDrizzleAdapter extends DatabaseAdapter {
4430
4465
  const whereClause = and(...conditions);
4431
4466
  const eventLimit = Math.max(limit * 20, 200);
4432
4467
  const runEventRows = await tx.select({
4433
- runId: sql25`(${logTable.body} ->> 'runId')`,
4434
- status: sql25`(${logTable.body} ->> 'status')`,
4435
- messageId: sql25`(${logTable.body} ->> 'messageId')`,
4468
+ runId: sql27`(${logTable.body} ->> 'runId')`,
4469
+ status: sql27`(${logTable.body} ->> 'status')`,
4470
+ messageId: sql27`(${logTable.body} ->> 'messageId')`,
4436
4471
  rawBody: logTable.body,
4437
4472
  createdAt: logTable.createdAt,
4438
4473
  roomId: logTable.roomId,
@@ -4479,18 +4514,18 @@ class BaseDrizzleAdapter extends DatabaseAdapter {
4479
4514
  }
4480
4515
  }
4481
4516
  const createdAt = row.createdAt instanceof Date ? row.createdAt : new Date(row.createdAt);
4482
- const timestamp17 = createdAt.getTime();
4517
+ const timestamp19 = createdAt.getTime();
4483
4518
  const bodyStatus = body?.status;
4484
4519
  const eventStatus = row.status ?? bodyStatus;
4485
4520
  if (eventStatus === "started") {
4486
4521
  const currentStartedAt = summary.startedAt === null ? null : typeof summary.startedAt === "bigint" ? Number(summary.startedAt) : summary.startedAt;
4487
- summary.startedAt = currentStartedAt === null ? timestamp17 : Math.min(currentStartedAt, timestamp17);
4522
+ summary.startedAt = currentStartedAt === null ? timestamp19 : Math.min(currentStartedAt, timestamp19);
4488
4523
  } else if (eventStatus === "completed" || eventStatus === "timeout" || eventStatus === "error") {
4489
4524
  summary.status = eventStatus;
4490
- summary.endedAt = timestamp17;
4525
+ summary.endedAt = timestamp19;
4491
4526
  if (summary.startedAt !== null) {
4492
4527
  const startedAtNum = typeof summary.startedAt === "bigint" ? Number(summary.startedAt) : summary.startedAt;
4493
- summary.durationMs = Math.max(timestamp17 - startedAtNum, 0);
4528
+ summary.durationMs = Math.max(timestamp19 - startedAtNum, 0);
4494
4529
  }
4495
4530
  }
4496
4531
  runMap.set(runId, summary);
@@ -4518,8 +4553,8 @@ class BaseDrizzleAdapter extends DatabaseAdapter {
4518
4553
  }
4519
4554
  const runIds = limitedRuns.map((run) => run.runId).filter(Boolean);
4520
4555
  if (runIds.length > 0) {
4521
- const runIdArray = sql25`array[${sql25.join(runIds.map((id) => sql25`${id}`), sql25`, `)}]::text[]`;
4522
- const actionSummary = await this.db.execute(sql25`
4556
+ const runIdArray = sql27`array[${sql27.join(runIds.map((id) => sql27`${id}`), sql27`, `)}]::text[]`;
4557
+ const actionSummary = await this.db.execute(sql27`
4523
4558
  SELECT
4524
4559
  body->>'runId' as "runId",
4525
4560
  COUNT(*)::int as "actions",
@@ -4539,7 +4574,7 @@ class BaseDrizzleAdapter extends DatabaseAdapter {
4539
4574
  counts.errors += Number(row.errors ?? 0);
4540
4575
  counts.modelCalls += Number(row.modelCalls ?? 0);
4541
4576
  }
4542
- const evaluatorSummary = await this.db.execute(sql25`
4577
+ const evaluatorSummary = await this.db.execute(sql27`
4543
4578
  SELECT
4544
4579
  body->>'runId' as "runId",
4545
4580
  COUNT(*)::int as "evaluators"
@@ -4555,7 +4590,7 @@ class BaseDrizzleAdapter extends DatabaseAdapter {
4555
4590
  continue;
4556
4591
  counts.evaluators += Number(row.evaluators ?? 0);
4557
4592
  }
4558
- const genericSummary = await this.db.execute(sql25`
4593
+ const genericSummary = await this.db.execute(sql27`
4559
4594
  SELECT
4560
4595
  body->>'runId' as "runId",
4561
4596
  COUNT(*) FILTER (WHERE type LIKE 'useModel:%')::int as "modelLogs",
@@ -4609,7 +4644,7 @@ class BaseDrizzleAdapter extends DatabaseAdapter {
4609
4644
  async searchMemoriesByEmbedding(embedding, params) {
4610
4645
  return this.withDatabase(async () => {
4611
4646
  const cleanVector = embedding.map((n) => Number.isFinite(n) ? Number(n.toFixed(6)) : 0);
4612
- const similarity = sql25`1 - (${cosineDistance(embeddingTable[this.embeddingDimension], cleanVector)})`;
4647
+ const similarity = sql27`1 - (${cosineDistance(embeddingTable[this.embeddingDimension], cleanVector)})`;
4613
4648
  const conditions = [eq2(memoryTable.type, params.tableName)];
4614
4649
  if (params.unique) {
4615
4650
  conditions.push(eq2(memoryTable.unique, true));
@@ -4675,8 +4710,8 @@ class BaseDrizzleAdapter extends DatabaseAdapter {
4675
4710
  {
4676
4711
  id: memoryId,
4677
4712
  type: tableName,
4678
- content: sql25`${contentToInsert}::jsonb`,
4679
- metadata: sql25`${metadataToInsert}::jsonb`,
4713
+ content: sql27`${contentToInsert}::jsonb`,
4714
+ metadata: sql27`${metadataToInsert}::jsonb`,
4680
4715
  entityId: memory.entityId,
4681
4716
  roomId: memory.roomId,
4682
4717
  worldId: memory.worldId,
@@ -4706,15 +4741,15 @@ class BaseDrizzleAdapter extends DatabaseAdapter {
4706
4741
  const contentToUpdate = typeof memory.content === "string" ? memory.content : JSON.stringify(memory.content ?? {});
4707
4742
  const metadataToUpdate = typeof memory.metadata === "string" ? memory.metadata : JSON.stringify(memory.metadata ?? {});
4708
4743
  await tx.update(memoryTable).set({
4709
- content: sql25`${contentToUpdate}::jsonb`,
4744
+ content: sql27`${contentToUpdate}::jsonb`,
4710
4745
  ...memory.metadata && {
4711
- metadata: sql25`${metadataToUpdate}::jsonb`
4746
+ metadata: sql27`${metadataToUpdate}::jsonb`
4712
4747
  }
4713
4748
  }).where(eq2(memoryTable.id, memory.id));
4714
4749
  } else if (memory.metadata) {
4715
4750
  const metadataToUpdate = typeof memory.metadata === "string" ? memory.metadata : JSON.stringify(memory.metadata ?? {});
4716
4751
  await tx.update(memoryTable).set({
4717
- metadata: sql25`${metadataToUpdate}::jsonb`
4752
+ metadata: sql27`${metadataToUpdate}::jsonb`
4718
4753
  }).where(eq2(memoryTable.id, memory.id));
4719
4754
  }
4720
4755
  if (memory.embedding && Array.isArray(memory.embedding)) {
@@ -4781,7 +4816,7 @@ class BaseDrizzleAdapter extends DatabaseAdapter {
4781
4816
  }
4782
4817
  }
4783
4818
  async getMemoryFragments(tx, documentId) {
4784
- const fragments = await tx.select({ id: memoryTable.id }).from(memoryTable).where(and(eq2(memoryTable.agentId, this.agentId), sql25`${memoryTable.metadata}->>'documentId' = ${documentId}`));
4819
+ const fragments = await tx.select({ id: memoryTable.id }).from(memoryTable).where(and(eq2(memoryTable.agentId, this.agentId), sql27`${memoryTable.metadata}->>'documentId' = ${documentId}`));
4785
4820
  return fragments.map((f) => ({ id: f.id }));
4786
4821
  }
4787
4822
  async deleteAllMemories(roomId, tableName) {
@@ -4809,7 +4844,7 @@ class BaseDrizzleAdapter extends DatabaseAdapter {
4809
4844
  if (unique3) {
4810
4845
  conditions.push(eq2(memoryTable.unique, true));
4811
4846
  }
4812
- const result = await this.db.select({ count: sql25`count(*)` }).from(memoryTable).where(and(...conditions));
4847
+ const result = await this.db.select({ count: sql27`count(*)` }).from(memoryTable).where(and(...conditions));
4813
4848
  const result0 = result[0];
4814
4849
  return Number(result0?.count ?? 0);
4815
4850
  });
@@ -5080,13 +5115,13 @@ class BaseDrizzleAdapter extends DatabaseAdapter {
5080
5115
  const { entityId, tags } = params;
5081
5116
  let query;
5082
5117
  if (tags && tags.length > 0) {
5083
- query = sql25`
5118
+ query = sql27`
5084
5119
  SELECT * FROM ${relationshipTable}
5085
5120
  WHERE (${relationshipTable.sourceEntityId} = ${entityId} OR ${relationshipTable.targetEntityId} = ${entityId})
5086
- AND ${relationshipTable.tags} && CAST(ARRAY[${sql25.join(tags, sql25`, `)}] AS text[])
5121
+ AND ${relationshipTable.tags} && CAST(ARRAY[${sql27.join(tags, sql27`, `)}] AS text[])
5087
5122
  `;
5088
5123
  } else {
5089
- query = sql25`
5124
+ query = sql27`
5090
5125
  SELECT * FROM ${relationshipTable}
5091
5126
  WHERE ${relationshipTable.sourceEntityId} = ${entityId} OR ${relationshipTable.targetEntityId} = ${entityId}
5092
5127
  `;
@@ -5228,7 +5263,7 @@ class BaseDrizzleAdapter extends DatabaseAdapter {
5228
5263
  return this.withRetry(async () => {
5229
5264
  return this.withDatabase(async () => {
5230
5265
  const result = await this.db.select().from(taskTable).where(and(eq2(taskTable.agentId, this.agentId), ...params.roomId ? [eq2(taskTable.roomId, params.roomId)] : [], ...params.tags && params.tags.length > 0 ? [
5231
- sql25`${taskTable.tags} @> ARRAY[${sql25.join(params.tags.map((t) => sql25`${t}`), sql25`, `)}]::text[]`
5266
+ sql27`${taskTable.tags} @> ARRAY[${sql27.join(params.tags.map((t) => sql27`${t}`), sql27`, `)}]::text[]`
5232
5267
  ] : []));
5233
5268
  return result.map((row) => ({
5234
5269
  id: row.id,
@@ -5418,7 +5453,7 @@ class BaseDrizzleAdapter extends DatabaseAdapter {
5418
5453
  }
5419
5454
  async getMessageServerByRlsServerId(rlsServerId) {
5420
5455
  return this.withDatabase(async () => {
5421
- const results = await this.db.execute(sql25`
5456
+ const results = await this.db.execute(sql27`
5422
5457
  SELECT id, name, source_type, source_id, metadata, created_at, updated_at
5423
5458
  FROM message_servers
5424
5459
  WHERE server_id = ${rlsServerId}
@@ -5697,6 +5732,82 @@ class BaseDrizzleAdapter extends DatabaseAdapter {
5697
5732
  }, ids);
5698
5733
  });
5699
5734
  }
5735
+ async getPairingRequests(channel, agentId) {
5736
+ return this.withDatabase(async () => {
5737
+ const results = await this.db.select().from(pairingRequestTable).where(and(eq2(pairingRequestTable.channel, channel), eq2(pairingRequestTable.agentId, agentId))).orderBy(pairingRequestTable.createdAt);
5738
+ return results.map((row) => ({
5739
+ id: row.id,
5740
+ channel: row.channel,
5741
+ senderId: row.senderId,
5742
+ code: row.code,
5743
+ createdAt: row.createdAt,
5744
+ lastSeenAt: row.lastSeenAt,
5745
+ metadata: row.metadata || undefined,
5746
+ agentId: row.agentId
5747
+ }));
5748
+ });
5749
+ }
5750
+ async createPairingRequest(request) {
5751
+ return this.withDatabase(async () => {
5752
+ const id = request.id || v4_default();
5753
+ await this.db.insert(pairingRequestTable).values({
5754
+ id,
5755
+ channel: request.channel,
5756
+ senderId: request.senderId,
5757
+ code: request.code,
5758
+ createdAt: request.createdAt,
5759
+ lastSeenAt: request.lastSeenAt,
5760
+ metadata: request.metadata || {},
5761
+ agentId: request.agentId
5762
+ });
5763
+ return id;
5764
+ });
5765
+ }
5766
+ async updatePairingRequest(request) {
5767
+ return this.withDatabase(async () => {
5768
+ await this.db.update(pairingRequestTable).set({
5769
+ lastSeenAt: request.lastSeenAt,
5770
+ metadata: request.metadata || {}
5771
+ }).where(eq2(pairingRequestTable.id, request.id));
5772
+ });
5773
+ }
5774
+ async deletePairingRequest(id) {
5775
+ return this.withDatabase(async () => {
5776
+ await this.db.delete(pairingRequestTable).where(eq2(pairingRequestTable.id, id));
5777
+ });
5778
+ }
5779
+ async getPairingAllowlist(channel, agentId) {
5780
+ return this.withDatabase(async () => {
5781
+ const results = await this.db.select().from(pairingAllowlistTable).where(and(eq2(pairingAllowlistTable.channel, channel), eq2(pairingAllowlistTable.agentId, agentId))).orderBy(pairingAllowlistTable.createdAt);
5782
+ return results.map((row) => ({
5783
+ id: row.id,
5784
+ channel: row.channel,
5785
+ senderId: row.senderId,
5786
+ createdAt: row.createdAt,
5787
+ metadata: row.metadata || undefined,
5788
+ agentId: row.agentId
5789
+ }));
5790
+ });
5791
+ }
5792
+ async createPairingAllowlistEntry(entry) {
5793
+ return this.withDatabase(async () => {
5794
+ const id = entry.id || v4_default();
5795
+ await this.db.insert(pairingAllowlistTable).values({
5796
+ id,
5797
+ channel: entry.channel,
5798
+ senderId: entry.senderId,
5799
+ createdAt: entry.createdAt,
5800
+ metadata: entry.metadata || {},
5801
+ agentId: entry.agentId
5802
+ }).onConflictDoNothing();
5803
+ return id;
5804
+ });
5805
+ }
5806
+ async deletePairingAllowlistEntry(id) {
5807
+ return this.withDatabase(async () => {
5808
+ await this.db.delete(pairingAllowlistTable).where(eq2(pairingAllowlistTable.id, id));
5809
+ });
5810
+ }
5700
5811
  }
5701
5812
 
5702
5813
  // pglite/adapter.ts
@@ -5840,5 +5951,5 @@ export {
5840
5951
  DatabaseMigrationService
5841
5952
  };
5842
5953
 
5843
- //# debugId=33D355E5927A96C264756E2164756E21
5954
+ //# debugId=6C9E79385428E37A64756E2164756E21
5844
5955
  //# sourceMappingURL=index.browser.js.map