@mastra/duckdb 1.1.3-alpha.0 → 1.2.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,6 +1,7 @@
1
1
  'use strict';
2
2
 
3
3
  var chunkS2AWBPTS_cjs = require('./chunk-S2AWBPTS.cjs');
4
+ var error = require('@mastra/core/error');
4
5
  var storage = require('@mastra/core/storage');
5
6
  var observability = require('@mastra/core/observability');
6
7
  var utils = require('@mastra/core/utils');
@@ -59,6 +60,7 @@ CREATE TABLE IF NOT EXISTS metric_events (
59
60
  timestamp TIMESTAMP NOT NULL,
60
61
 
61
62
  -- IDs
63
+ metricId VARCHAR NOT NULL PRIMARY KEY,
62
64
  traceId VARCHAR,
63
65
  spanId VARCHAR,
64
66
  experimentId VARCHAR,
@@ -110,6 +112,7 @@ CREATE TABLE IF NOT EXISTS log_events (
110
112
  timestamp TIMESTAMP NOT NULL,
111
113
 
112
114
  -- IDs
115
+ logId VARCHAR NOT NULL PRIMARY KEY,
113
116
  traceId VARCHAR,
114
117
  spanId VARCHAR,
115
118
  experimentId VARCHAR,
@@ -156,6 +159,7 @@ CREATE TABLE IF NOT EXISTS score_events (
156
159
  timestamp TIMESTAMP NOT NULL,
157
160
 
158
161
  -- IDs
162
+ scoreId VARCHAR NOT NULL PRIMARY KEY,
159
163
  traceId VARCHAR,
160
164
  spanId VARCHAR,
161
165
  experimentId VARCHAR,
@@ -206,6 +210,7 @@ CREATE TABLE IF NOT EXISTS feedback_events (
206
210
  timestamp TIMESTAMP NOT NULL,
207
211
 
208
212
  -- IDs
213
+ feedbackId VARCHAR NOT NULL PRIMARY KEY,
209
214
  traceId VARCHAR,
210
215
  spanId VARCHAR,
211
216
  experimentId VARCHAR,
@@ -719,6 +724,7 @@ function rowToFeedbackRecord(row) {
719
724
  const numValue = Number(rawValue);
720
725
  if (!isNaN(numValue)) value = numValue;
721
726
  return {
727
+ feedbackId: row.feedbackId,
722
728
  timestamp: toDate(row.timestamp),
723
729
  traceId: row.traceId ?? null,
724
730
  spanId: row.spanId ?? null,
@@ -790,12 +796,13 @@ async function createFeedback(db, args) {
790
796
  const feedbackUserId = f.feedbackUserId ?? f.userId ?? null;
791
797
  await db.execute(
792
798
  `INSERT INTO feedback_events (
793
- timestamp, traceId, spanId, experimentId,
799
+ feedbackId, timestamp, traceId, spanId, experimentId,
794
800
  entityType, entityId, entityName, entityVersionId, parentEntityVersionId, parentEntityType, parentEntityId, parentEntityName, rootEntityVersionId, rootEntityType, rootEntityId, rootEntityName,
795
801
  userId, organizationId, resourceId, runId, sessionId, threadId, requestId, environment, executionSource, serviceName,
796
802
  feedbackUserId, sourceId, feedbackSource, feedbackType, value, comment, tags, metadata, scope
797
803
  )
798
804
  VALUES (${[
805
+ v(f.feedbackId),
799
806
  v(f.timestamp),
800
807
  v(f.traceId),
801
808
  v(f.spanId ?? null),
@@ -831,7 +838,8 @@ async function createFeedback(db, args) {
831
838
  jsonV(f.tags ?? null),
832
839
  jsonV(f.metadata),
833
840
  jsonV(f.scope ?? null)
834
- ].join(", ")})`
841
+ ].join(", ")})
842
+ ON CONFLICT DO NOTHING`
835
843
  );
836
844
  }
837
845
  async function batchCreateFeedback(db, args) {
@@ -841,6 +849,7 @@ async function batchCreateFeedback(db, args) {
841
849
  const feedbackSource = legacyFeedback.feedbackSource ?? legacyFeedback.source ?? "";
842
850
  const feedbackUserId = legacyFeedback.feedbackUserId ?? legacyFeedback.userId ?? null;
843
851
  return `(${[
852
+ v(legacyFeedback.feedbackId),
844
853
  v(legacyFeedback.timestamp),
845
854
  v(legacyFeedback.traceId),
846
855
  v(legacyFeedback.spanId ?? null),
@@ -880,12 +889,13 @@ async function batchCreateFeedback(db, args) {
880
889
  });
881
890
  await db.execute(
882
891
  `INSERT INTO feedback_events (
883
- timestamp, traceId, spanId, experimentId,
892
+ feedbackId, timestamp, traceId, spanId, experimentId,
884
893
  entityType, entityId, entityName, entityVersionId, parentEntityVersionId, parentEntityType, parentEntityId, parentEntityName, rootEntityVersionId, rootEntityType, rootEntityId, rootEntityName,
885
894
  userId, organizationId, resourceId, runId, sessionId, threadId, requestId, environment, executionSource, serviceName,
886
895
  feedbackUserId, sourceId, feedbackSource, feedbackType, value, comment, tags, metadata, scope
887
896
  )
888
- VALUES ${tuples.join(",\n ")}`
897
+ VALUES ${tuples.join(",\n ")}
898
+ ON CONFLICT DO NOTHING`
889
899
  );
890
900
  }
891
901
  async function listFeedback(db, args) {
@@ -1042,6 +1052,7 @@ async function getFeedbackPercentiles(db, args) {
1042
1052
 
1043
1053
  // src/storage/domains/observability/logs.ts
1044
1054
  var COLUMNS = [
1055
+ "logId",
1045
1056
  "timestamp",
1046
1057
  "level",
1047
1058
  "message",
@@ -1078,6 +1089,7 @@ var COLUMNS = [
1078
1089
  var COLUMNS_SQL = COLUMNS.join(", ");
1079
1090
  function rowToLogRecord(row) {
1080
1091
  return {
1092
+ logId: row.logId,
1081
1093
  timestamp: toDate(row.timestamp),
1082
1094
  level: row.level,
1083
1095
  message: row.message,
@@ -1116,6 +1128,7 @@ async function batchCreateLogs(db, args) {
1116
1128
  if (args.logs.length === 0) return;
1117
1129
  const tuples = args.logs.map((log) => {
1118
1130
  return `(${[
1131
+ v(log.logId),
1119
1132
  v(log.timestamp),
1120
1133
  v(log.level),
1121
1134
  v(log.message),
@@ -1150,7 +1163,7 @@ async function batchCreateLogs(db, args) {
1150
1163
  jsonV(log.scope)
1151
1164
  ].join(", ")})`;
1152
1165
  });
1153
- await db.execute(`INSERT INTO log_events (${COLUMNS_SQL}) VALUES ${tuples.join(",\n")}`);
1166
+ await db.execute(`INSERT INTO log_events (${COLUMNS_SQL}) VALUES ${tuples.join(",\n")} ON CONFLICT DO NOTHING`);
1154
1167
  }
1155
1168
  async function listLogs(db, args) {
1156
1169
  const filters = args.filters ?? {};
@@ -1217,6 +1230,7 @@ function buildMetricNameFilter(name) {
1217
1230
  return { clause: `name = ?`, params: [name] };
1218
1231
  }
1219
1232
  var METRIC_COLUMNS = [
1233
+ "metricId",
1220
1234
  "timestamp",
1221
1235
  "name",
1222
1236
  "value",
@@ -1320,6 +1334,7 @@ function resolveGroupBy(groupBy) {
1320
1334
  }
1321
1335
  function rowToMetricRecord(row) {
1322
1336
  return {
1337
+ metricId: row.metricId,
1323
1338
  timestamp: toDate(row.timestamp),
1324
1339
  name: row.name,
1325
1340
  value: Number(row.value),
@@ -1363,6 +1378,7 @@ async function batchCreateMetrics(db, args) {
1363
1378
  if (args.metrics.length === 0) return;
1364
1379
  const tuples = args.metrics.map((m) => {
1365
1380
  return `(${[
1381
+ v(m.metricId),
1366
1382
  v(m.timestamp),
1367
1383
  v(m.name),
1368
1384
  v(m.value),
@@ -1402,7 +1418,9 @@ async function batchCreateMetrics(db, args) {
1402
1418
  jsonV(m.scope ?? null)
1403
1419
  ].join(", ")})`;
1404
1420
  });
1405
- await db.execute(`INSERT INTO metric_events (${METRIC_COLUMNS_SQL}) VALUES ${tuples.join(",\n")}`);
1421
+ await db.execute(
1422
+ `INSERT INTO metric_events (${METRIC_COLUMNS_SQL}) VALUES ${tuples.join(",\n")} ON CONFLICT DO NOTHING`
1423
+ );
1406
1424
  }
1407
1425
  async function listMetrics(db, args) {
1408
1426
  const filters = args.filters ?? {};
@@ -1701,6 +1719,122 @@ async function getMetricLabelValues(db, args) {
1701
1719
  );
1702
1720
  return { values: rows.map((r) => r.val) };
1703
1721
  }
1722
+ var SIGNAL_MIGRATIONS = [
1723
+ { table: "metric_events", createDDL: METRIC_EVENTS_DDL, idColumn: "metricId" },
1724
+ { table: "log_events", createDDL: LOG_EVENTS_DDL, idColumn: "logId" },
1725
+ { table: "score_events", createDDL: SCORE_EVENTS_DDL, idColumn: "scoreId" },
1726
+ { table: "feedback_events", createDDL: FEEDBACK_EVENTS_DDL, idColumn: "feedbackId" }
1727
+ ];
1728
+ async function tableExists(db, table) {
1729
+ const rows = await db.query(
1730
+ `SELECT table_name FROM information_schema.tables WHERE table_name = ?`,
1731
+ [table]
1732
+ );
1733
+ return rows.length > 0;
1734
+ }
1735
+ async function hasPrimaryKey(db, table) {
1736
+ const rows = await db.query(
1737
+ `SELECT constraint_type FROM information_schema.table_constraints
1738
+ WHERE table_name = ? AND constraint_type = 'PRIMARY KEY'`,
1739
+ [table]
1740
+ );
1741
+ return rows.length > 0;
1742
+ }
1743
+ async function getColumns(db, table) {
1744
+ const rows = await db.query(
1745
+ `SELECT column_name FROM information_schema.columns WHERE table_name = ?`,
1746
+ [table]
1747
+ );
1748
+ return rows.map((r) => r.column_name);
1749
+ }
1750
+ function buildTemporaryTableDDL(createDDL, table, tempTable) {
1751
+ return createDDL.replace(`CREATE TABLE IF NOT EXISTS ${table}`, `CREATE TABLE ${tempTable}`);
1752
+ }
1753
+ async function dropTableIfExists(db, table) {
1754
+ if (await tableExists(db, table)) {
1755
+ await db.execute(`DROP TABLE ${table}`);
1756
+ }
1757
+ }
1758
+ function createMigrationError(args, error$1) {
1759
+ return new error.MastraError(
1760
+ {
1761
+ id: storage.createStorageErrorId("DUCKDB", "MIGRATE_SIGNAL_TABLES", "FAILED"),
1762
+ domain: error.ErrorDomain.STORAGE,
1763
+ category: error.ErrorCategory.THIRD_PARTY,
1764
+ details: args
1765
+ },
1766
+ error$1
1767
+ );
1768
+ }
1769
+ async function checkSignalTablesMigrationStatus(db) {
1770
+ const tables = [];
1771
+ for (const { table, idColumn } of SIGNAL_MIGRATIONS) {
1772
+ if (!await tableExists(db, table)) {
1773
+ continue;
1774
+ }
1775
+ if (await hasPrimaryKey(db, table)) {
1776
+ continue;
1777
+ }
1778
+ tables.push({ table, idColumn });
1779
+ }
1780
+ return {
1781
+ needsMigration: tables.length > 0,
1782
+ tables
1783
+ };
1784
+ }
1785
+ async function migrateSignalTables(db, logger) {
1786
+ for (const { table, createDDL, idColumn } of SIGNAL_MIGRATIONS) {
1787
+ if (!await tableExists(db, table)) continue;
1788
+ if (await hasPrimaryKey(db, table)) continue;
1789
+ logger?.info?.(`Migrating ${table} to schema with ${idColumn} PRIMARY KEY`);
1790
+ const temp = `${table}_migrating_${Date.now()}`;
1791
+ const backup = `${table}_backup_${Date.now()}`;
1792
+ let originalRenamed = false;
1793
+ let swapCompleted = false;
1794
+ try {
1795
+ await db.execute(buildTemporaryTableDDL(createDDL, table, temp));
1796
+ const newColumns = await getColumns(db, temp);
1797
+ const currentColumns = new Set(await getColumns(db, table));
1798
+ const columnList = newColumns.map((c) => `"${c}"`).join(", ");
1799
+ const selectExprs = newColumns.map((c) => {
1800
+ if (c === idColumn) {
1801
+ return currentColumns.has(c) ? `COALESCE(NULLIF("${c}", ''), CAST(uuid() AS VARCHAR)) AS "${c}"` : `CAST(uuid() AS VARCHAR) AS "${c}"`;
1802
+ }
1803
+ return currentColumns.has(c) ? `"${c}"` : `NULL AS "${c}"`;
1804
+ }).join(", ");
1805
+ await db.execute(`INSERT INTO ${temp} (${columnList}) SELECT ${selectExprs} FROM ${table}`);
1806
+ await db.execute(`ALTER TABLE ${table} RENAME TO ${backup}`);
1807
+ originalRenamed = true;
1808
+ await db.execute(`ALTER TABLE ${temp} RENAME TO ${table}`);
1809
+ swapCompleted = true;
1810
+ try {
1811
+ await db.execute(`DROP TABLE ${backup}`);
1812
+ } catch (cleanupError) {
1813
+ logger?.warn?.(
1814
+ `Migration of ${table} completed, but failed to drop backup ${backup}: ${cleanupError.message}`
1815
+ );
1816
+ }
1817
+ logger?.info?.(`Successfully migrated ${table}`);
1818
+ } catch (error) {
1819
+ logger?.error?.(`Migration of ${table} failed: ${error.message}`);
1820
+ try {
1821
+ await dropTableIfExists(db, temp);
1822
+ } catch (restoreError) {
1823
+ logger?.error?.(`Failed to clean up temporary table ${temp}: ${restoreError.message}`);
1824
+ }
1825
+ if (originalRenamed && !swapCompleted) {
1826
+ try {
1827
+ await db.execute(`ALTER TABLE ${backup} RENAME TO ${table}`);
1828
+ } catch (restoreError) {
1829
+ logger?.error?.(
1830
+ `Failed to restore original table ${table} from backup ${backup}: ${restoreError.message}`
1831
+ );
1832
+ }
1833
+ }
1834
+ throw createMigrationError({ table, idColumn }, error);
1835
+ }
1836
+ }
1837
+ }
1704
1838
  var SCORE_GROUP_BY_COLUMNS = /* @__PURE__ */ new Set([
1705
1839
  "timestamp",
1706
1840
  "traceId",
@@ -1816,6 +1950,7 @@ function toSeriesName2(values) {
1816
1950
  }
1817
1951
  function rowToScoreRecord(row) {
1818
1952
  return {
1953
+ scoreId: row.scoreId,
1819
1954
  timestamp: toDate(row.timestamp),
1820
1955
  traceId: row.traceId ?? null,
1821
1956
  spanId: row.spanId ?? null,
@@ -1886,12 +2021,13 @@ async function createScore(db, args) {
1886
2021
  const scoreSource = s.scoreSource ?? s.source ?? null;
1887
2022
  await db.execute(
1888
2023
  `INSERT INTO score_events (
1889
- timestamp, traceId, spanId, experimentId, scoreTraceId,
2024
+ scoreId, timestamp, traceId, spanId, experimentId, scoreTraceId,
1890
2025
  entityType, entityId, entityName, entityVersionId, parentEntityVersionId, parentEntityType, parentEntityId, parentEntityName, rootEntityVersionId, rootEntityType, rootEntityId, rootEntityName,
1891
2026
  userId, organizationId, resourceId, runId, sessionId, threadId, requestId, environment, executionSource, serviceName,
1892
2027
  scorerId, scorerVersion, scoreSource, score, reason, tags, metadata, scope
1893
2028
  )
1894
2029
  VALUES (${[
2030
+ v(s.scoreId),
1895
2031
  v(s.timestamp),
1896
2032
  v(s.traceId),
1897
2033
  v(s.spanId ?? null),
@@ -1927,7 +2063,8 @@ async function createScore(db, args) {
1927
2063
  jsonV(s.tags ?? null),
1928
2064
  jsonV(s.metadata),
1929
2065
  jsonV(s.scope ?? null)
1930
- ].join(", ")})`
2066
+ ].join(", ")})
2067
+ ON CONFLICT DO NOTHING`
1931
2068
  );
1932
2069
  }
1933
2070
  async function batchCreateScores(db, args) {
@@ -1936,6 +2073,7 @@ async function batchCreateScores(db, args) {
1936
2073
  const legacyScore = s;
1937
2074
  const scoreSource = legacyScore.scoreSource ?? legacyScore.source ?? null;
1938
2075
  return `(${[
2076
+ v(legacyScore.scoreId),
1939
2077
  v(legacyScore.timestamp),
1940
2078
  v(legacyScore.traceId),
1941
2079
  v(legacyScore.spanId ?? null),
@@ -1975,12 +2113,13 @@ async function batchCreateScores(db, args) {
1975
2113
  });
1976
2114
  await db.execute(
1977
2115
  `INSERT INTO score_events (
1978
- timestamp, traceId, spanId, experimentId, scoreTraceId,
2116
+ scoreId, timestamp, traceId, spanId, experimentId, scoreTraceId,
1979
2117
  entityType, entityId, entityName, entityVersionId, parentEntityVersionId, parentEntityType, parentEntityId, parentEntityName, rootEntityVersionId, rootEntityType, rootEntityId, rootEntityName,
1980
2118
  userId, organizationId, resourceId, runId, sessionId, threadId, requestId, environment, executionSource, serviceName,
1981
2119
  scorerId, scorerVersion, scoreSource, score, reason, tags, metadata, scope
1982
2120
  )
1983
- VALUES ${tuples.join(",\n ")}`
2121
+ VALUES ${tuples.join(",\n ")}
2122
+ ON CONFLICT DO NOTHING`
1984
2123
  );
1985
2124
  }
1986
2125
  async function listScores(db, args) {
@@ -2204,6 +2343,40 @@ var SPAN_RECONSTRUCT_SELECT = `
2204
2343
  ${argMaxNonNull("requestContext")}
2205
2344
  FROM span_events
2206
2345
  `;
2346
+ var SPAN_RECONSTRUCT_SELECT_LIGHT = `
2347
+ SELECT
2348
+ traceId, spanId,
2349
+ ${argMaxNonNull("name")},
2350
+ ${argMaxNonNull("spanType")},
2351
+ ${argMaxNonNull("parentSpanId")},
2352
+ ${argMaxNonNull("isEvent")},
2353
+ coalesce(min(timestamp) FILTER (WHERE eventType = 'start'), min(timestamp)) as startedAt,
2354
+ ${argMaxNonNull("endedAt")},
2355
+ ${argMaxNonNull("entityType")},
2356
+ ${argMaxNonNull("entityId")},
2357
+ ${argMaxNonNull("entityName")},
2358
+ ${argMaxNonNull("error")}
2359
+ FROM span_events
2360
+ `;
2361
+ function rowToLightSpanRecord(row) {
2362
+ return {
2363
+ traceId: row.traceId,
2364
+ spanId: row.spanId,
2365
+ name: row.name,
2366
+ spanType: row.spanType,
2367
+ parentSpanId: row.parentSpanId ?? null,
2368
+ isEvent: row.isEvent,
2369
+ startedAt: toDate(row.startedAt),
2370
+ endedAt: toDateOrNull(row.endedAt),
2371
+ entityType: row.entityType ?? null,
2372
+ entityId: row.entityId ?? null,
2373
+ entityName: row.entityName ?? null,
2374
+ error: parseJson(row.error),
2375
+ createdAt: toDate(row.startedAt),
2376
+ // DuckDB event-sourced — use startedAt as proxy
2377
+ updatedAt: toDateOrNull(row.endedAt)
2378
+ };
2379
+ }
2207
2380
  function rowToSpanRecord(row) {
2208
2381
  return {
2209
2382
  traceId: row.traceId,
@@ -2410,6 +2583,16 @@ async function getTrace(db, args) {
2410
2583
  spans: rows.map((row) => rowToSpanRecord(row))
2411
2584
  };
2412
2585
  }
2586
+ async function getTraceLight(db, args) {
2587
+ const rows = await db.query(`${SPAN_RECONSTRUCT_SELECT_LIGHT} WHERE traceId = ? GROUP BY traceId, spanId`, [
2588
+ args.traceId
2589
+ ]);
2590
+ if (rows.length === 0) return null;
2591
+ return {
2592
+ traceId: args.traceId,
2593
+ spans: rows.map((row) => rowToLightSpanRecord(row))
2594
+ };
2595
+ }
2413
2596
  async function listTraces(db, args) {
2414
2597
  const filters = args.filters ?? {};
2415
2598
  const page = Number(args.pagination?.page ?? 0);
@@ -2458,6 +2641,32 @@ async function listTraces(db, args) {
2458
2641
  }
2459
2642
 
2460
2643
  // src/storage/domains/observability/index.ts
2644
+ function buildSignalMigrationRequiredMessage(args) {
2645
+ const tableList = args.tables.map((table) => ` - ${table.table}`).join("\n");
2646
+ return `
2647
+ ===========================================================================
2648
+ MIGRATION REQUIRED: DuckDB observability signal tables need signal IDs
2649
+ ===========================================================================
2650
+
2651
+ The following signal tables still use the legacy schema and must be migrated
2652
+ before observability storage can initialize:
2653
+
2654
+ ${tableList}
2655
+
2656
+ To fix this, run the manual migration command:
2657
+
2658
+ npx mastra migrate
2659
+
2660
+ This command will:
2661
+ 1. Create replacement signal tables with signal-ID primary keys
2662
+ 2. Backfill missing signal IDs for legacy rows
2663
+ 3. Swap the migrated tables into place
2664
+
2665
+ WARNING: This migration recreates the signal tables and may take significant
2666
+ time for large databases. Please ensure you have a backup before proceeding.
2667
+ ===========================================================================
2668
+ `;
2669
+ }
2461
2670
  var ObservabilityStorageDuckDB = class extends storage.ObservabilityStorage {
2462
2671
  db;
2463
2672
  constructor(config) {
@@ -2466,6 +2675,17 @@ var ObservabilityStorageDuckDB = class extends storage.ObservabilityStorage {
2466
2675
  }
2467
2676
  /** Create all observability tables if they don't exist. */
2468
2677
  async init() {
2678
+ const migrationStatus = await checkSignalTablesMigrationStatus(this.db);
2679
+ if (migrationStatus.needsMigration) {
2680
+ throw new error.MastraError({
2681
+ id: storage.createStorageErrorId("DUCKDB", "MIGRATION_REQUIRED", "SIGNAL_TABLES"),
2682
+ domain: error.ErrorDomain.STORAGE,
2683
+ category: error.ErrorCategory.USER,
2684
+ text: buildSignalMigrationRequiredMessage({
2685
+ tables: migrationStatus.tables.map(({ table }) => ({ table }))
2686
+ })
2687
+ });
2688
+ }
2469
2689
  for (const ddl of ALL_DDL) {
2470
2690
  await this.db.execute(ddl);
2471
2691
  }
@@ -2473,6 +2693,29 @@ var ObservabilityStorageDuckDB = class extends storage.ObservabilityStorage {
2473
2693
  await this.db.execute(migration);
2474
2694
  }
2475
2695
  }
2696
+ /**
2697
+ * Manually migrate legacy signal tables to the signal-ID primary-key schema.
2698
+ * The public method name is historical; the CLI still calls `migrateSpans()`
2699
+ * for observability migrations even though this now also migrates signal tables.
2700
+ */
2701
+ async migrateSpans() {
2702
+ const migrationStatus = await checkSignalTablesMigrationStatus(this.db);
2703
+ if (!migrationStatus.needsMigration) {
2704
+ return {
2705
+ success: true,
2706
+ alreadyMigrated: true,
2707
+ duplicatesRemoved: 0,
2708
+ message: "Migration already complete. Signal tables already use signal-ID primary keys."
2709
+ };
2710
+ }
2711
+ await migrateSignalTables(this.db, this.logger);
2712
+ return {
2713
+ success: true,
2714
+ alreadyMigrated: false,
2715
+ duplicatesRemoved: 0,
2716
+ message: `Migration complete. Migrated signal tables: ${migrationStatus.tables.map((t) => t.table).join(", ")}.`
2717
+ };
2718
+ }
2476
2719
  /** Delete all rows from every observability table. Use with caution. */
2477
2720
  async dangerouslyClearAll() {
2478
2721
  for (const table of ["span_events", "metric_events", "log_events", "score_events", "feedback_events"]) {
@@ -2504,6 +2747,9 @@ var ObservabilityStorageDuckDB = class extends storage.ObservabilityStorage {
2504
2747
  async getTrace(args) {
2505
2748
  return getTrace(this.db, args);
2506
2749
  }
2750
+ async getTraceLight(args) {
2751
+ return getTraceLight(this.db, args);
2752
+ }
2507
2753
  async listTraces(args) {
2508
2754
  return listTraces(this.db, args);
2509
2755
  }
@@ -2606,5 +2852,5 @@ var ObservabilityStorageDuckDB = class extends storage.ObservabilityStorage {
2606
2852
  };
2607
2853
 
2608
2854
  exports.ObservabilityStorageDuckDB = ObservabilityStorageDuckDB;
2609
- //# sourceMappingURL=observability-4TAPGTC4.cjs.map
2610
- //# sourceMappingURL=observability-4TAPGTC4.cjs.map
2855
+ //# sourceMappingURL=observability-AILZGFQT.cjs.map
2856
+ //# sourceMappingURL=observability-AILZGFQT.cjs.map