@mastra/duckdb 1.1.2 → 1.2.0-alpha.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +24 -0
- package/dist/docs/SKILL.md +1 -1
- package/dist/docs/assets/SOURCE_MAP.json +1 -1
- package/dist/index.cjs +9 -1
- package/dist/index.cjs.map +1 -1
- package/dist/index.js +9 -1
- package/dist/index.js.map +1 -1
- package/dist/{observability-4TAPGTC4.cjs → observability-AILZGFQT.cjs} +258 -12
- package/dist/observability-AILZGFQT.cjs.map +1 -0
- package/dist/{observability-EMLEGDF7.js → observability-YJBOVLPV.js} +259 -13
- package/dist/observability-YJBOVLPV.js.map +1 -0
- package/dist/storage/domains/observability/ddl.d.ts +4 -4
- package/dist/storage/domains/observability/ddl.d.ts.map +1 -1
- package/dist/storage/domains/observability/feedback.d.ts.map +1 -1
- package/dist/storage/domains/observability/index.d.ts +13 -1
- package/dist/storage/domains/observability/index.d.ts.map +1 -1
- package/dist/storage/domains/observability/logs.d.ts.map +1 -1
- package/dist/storage/domains/observability/metrics.d.ts.map +1 -1
- package/dist/storage/domains/observability/migration.d.ts +19 -0
- package/dist/storage/domains/observability/migration.d.ts.map +1 -0
- package/dist/storage/domains/observability/scores.d.ts.map +1 -1
- package/dist/storage/domains/observability/tracing.d.ts +3 -1
- package/dist/storage/domains/observability/tracing.d.ts.map +1 -1
- package/dist/storage/index.d.ts +2 -0
- package/dist/storage/index.d.ts.map +1 -1
- package/package.json +8 -8
- package/dist/observability-4TAPGTC4.cjs.map +0 -1
- package/dist/observability-EMLEGDF7.js.map +0 -1
|
@@ -1,5 +1,6 @@
|
|
|
1
1
|
import { DuckDBConnection } from './chunk-37GBWD4M.js';
|
|
2
|
-
import {
|
|
2
|
+
import { MastraError, ErrorCategory, ErrorDomain } from '@mastra/core/error';
|
|
3
|
+
import { ObservabilityStorage, createStorageErrorId, toTraceSpans } from '@mastra/core/storage';
|
|
3
4
|
import { EntityType } from '@mastra/core/observability';
|
|
4
5
|
import { parseFieldKey } from '@mastra/core/utils';
|
|
5
6
|
|
|
@@ -57,6 +58,7 @@ CREATE TABLE IF NOT EXISTS metric_events (
|
|
|
57
58
|
timestamp TIMESTAMP NOT NULL,
|
|
58
59
|
|
|
59
60
|
-- IDs
|
|
61
|
+
metricId VARCHAR NOT NULL PRIMARY KEY,
|
|
60
62
|
traceId VARCHAR,
|
|
61
63
|
spanId VARCHAR,
|
|
62
64
|
experimentId VARCHAR,
|
|
@@ -108,6 +110,7 @@ CREATE TABLE IF NOT EXISTS log_events (
|
|
|
108
110
|
timestamp TIMESTAMP NOT NULL,
|
|
109
111
|
|
|
110
112
|
-- IDs
|
|
113
|
+
logId VARCHAR NOT NULL PRIMARY KEY,
|
|
111
114
|
traceId VARCHAR,
|
|
112
115
|
spanId VARCHAR,
|
|
113
116
|
experimentId VARCHAR,
|
|
@@ -154,6 +157,7 @@ CREATE TABLE IF NOT EXISTS score_events (
|
|
|
154
157
|
timestamp TIMESTAMP NOT NULL,
|
|
155
158
|
|
|
156
159
|
-- IDs
|
|
160
|
+
scoreId VARCHAR NOT NULL PRIMARY KEY,
|
|
157
161
|
traceId VARCHAR,
|
|
158
162
|
spanId VARCHAR,
|
|
159
163
|
experimentId VARCHAR,
|
|
@@ -204,6 +208,7 @@ CREATE TABLE IF NOT EXISTS feedback_events (
|
|
|
204
208
|
timestamp TIMESTAMP NOT NULL,
|
|
205
209
|
|
|
206
210
|
-- IDs
|
|
211
|
+
feedbackId VARCHAR NOT NULL PRIMARY KEY,
|
|
207
212
|
traceId VARCHAR,
|
|
208
213
|
spanId VARCHAR,
|
|
209
214
|
experimentId VARCHAR,
|
|
@@ -717,6 +722,7 @@ function rowToFeedbackRecord(row) {
|
|
|
717
722
|
const numValue = Number(rawValue);
|
|
718
723
|
if (!isNaN(numValue)) value = numValue;
|
|
719
724
|
return {
|
|
725
|
+
feedbackId: row.feedbackId,
|
|
720
726
|
timestamp: toDate(row.timestamp),
|
|
721
727
|
traceId: row.traceId ?? null,
|
|
722
728
|
spanId: row.spanId ?? null,
|
|
@@ -788,12 +794,13 @@ async function createFeedback(db, args) {
|
|
|
788
794
|
const feedbackUserId = f.feedbackUserId ?? f.userId ?? null;
|
|
789
795
|
await db.execute(
|
|
790
796
|
`INSERT INTO feedback_events (
|
|
791
|
-
timestamp, traceId, spanId, experimentId,
|
|
797
|
+
feedbackId, timestamp, traceId, spanId, experimentId,
|
|
792
798
|
entityType, entityId, entityName, entityVersionId, parentEntityVersionId, parentEntityType, parentEntityId, parentEntityName, rootEntityVersionId, rootEntityType, rootEntityId, rootEntityName,
|
|
793
799
|
userId, organizationId, resourceId, runId, sessionId, threadId, requestId, environment, executionSource, serviceName,
|
|
794
800
|
feedbackUserId, sourceId, feedbackSource, feedbackType, value, comment, tags, metadata, scope
|
|
795
801
|
)
|
|
796
802
|
VALUES (${[
|
|
803
|
+
v(f.feedbackId),
|
|
797
804
|
v(f.timestamp),
|
|
798
805
|
v(f.traceId),
|
|
799
806
|
v(f.spanId ?? null),
|
|
@@ -829,7 +836,8 @@ async function createFeedback(db, args) {
|
|
|
829
836
|
jsonV(f.tags ?? null),
|
|
830
837
|
jsonV(f.metadata),
|
|
831
838
|
jsonV(f.scope ?? null)
|
|
832
|
-
].join(", ")})
|
|
839
|
+
].join(", ")})
|
|
840
|
+
ON CONFLICT DO NOTHING`
|
|
833
841
|
);
|
|
834
842
|
}
|
|
835
843
|
async function batchCreateFeedback(db, args) {
|
|
@@ -839,6 +847,7 @@ async function batchCreateFeedback(db, args) {
|
|
|
839
847
|
const feedbackSource = legacyFeedback.feedbackSource ?? legacyFeedback.source ?? "";
|
|
840
848
|
const feedbackUserId = legacyFeedback.feedbackUserId ?? legacyFeedback.userId ?? null;
|
|
841
849
|
return `(${[
|
|
850
|
+
v(legacyFeedback.feedbackId),
|
|
842
851
|
v(legacyFeedback.timestamp),
|
|
843
852
|
v(legacyFeedback.traceId),
|
|
844
853
|
v(legacyFeedback.spanId ?? null),
|
|
@@ -878,12 +887,13 @@ async function batchCreateFeedback(db, args) {
|
|
|
878
887
|
});
|
|
879
888
|
await db.execute(
|
|
880
889
|
`INSERT INTO feedback_events (
|
|
881
|
-
timestamp, traceId, spanId, experimentId,
|
|
890
|
+
feedbackId, timestamp, traceId, spanId, experimentId,
|
|
882
891
|
entityType, entityId, entityName, entityVersionId, parentEntityVersionId, parentEntityType, parentEntityId, parentEntityName, rootEntityVersionId, rootEntityType, rootEntityId, rootEntityName,
|
|
883
892
|
userId, organizationId, resourceId, runId, sessionId, threadId, requestId, environment, executionSource, serviceName,
|
|
884
893
|
feedbackUserId, sourceId, feedbackSource, feedbackType, value, comment, tags, metadata, scope
|
|
885
894
|
)
|
|
886
|
-
VALUES ${tuples.join(",\n ")}
|
|
895
|
+
VALUES ${tuples.join(",\n ")}
|
|
896
|
+
ON CONFLICT DO NOTHING`
|
|
887
897
|
);
|
|
888
898
|
}
|
|
889
899
|
async function listFeedback(db, args) {
|
|
@@ -1040,6 +1050,7 @@ async function getFeedbackPercentiles(db, args) {
|
|
|
1040
1050
|
|
|
1041
1051
|
// src/storage/domains/observability/logs.ts
|
|
1042
1052
|
var COLUMNS = [
|
|
1053
|
+
"logId",
|
|
1043
1054
|
"timestamp",
|
|
1044
1055
|
"level",
|
|
1045
1056
|
"message",
|
|
@@ -1076,6 +1087,7 @@ var COLUMNS = [
|
|
|
1076
1087
|
var COLUMNS_SQL = COLUMNS.join(", ");
|
|
1077
1088
|
function rowToLogRecord(row) {
|
|
1078
1089
|
return {
|
|
1090
|
+
logId: row.logId,
|
|
1079
1091
|
timestamp: toDate(row.timestamp),
|
|
1080
1092
|
level: row.level,
|
|
1081
1093
|
message: row.message,
|
|
@@ -1114,6 +1126,7 @@ async function batchCreateLogs(db, args) {
|
|
|
1114
1126
|
if (args.logs.length === 0) return;
|
|
1115
1127
|
const tuples = args.logs.map((log) => {
|
|
1116
1128
|
return `(${[
|
|
1129
|
+
v(log.logId),
|
|
1117
1130
|
v(log.timestamp),
|
|
1118
1131
|
v(log.level),
|
|
1119
1132
|
v(log.message),
|
|
@@ -1148,7 +1161,7 @@ async function batchCreateLogs(db, args) {
|
|
|
1148
1161
|
jsonV(log.scope)
|
|
1149
1162
|
].join(", ")})`;
|
|
1150
1163
|
});
|
|
1151
|
-
await db.execute(`INSERT INTO log_events (${COLUMNS_SQL}) VALUES ${tuples.join(",\n")}`);
|
|
1164
|
+
await db.execute(`INSERT INTO log_events (${COLUMNS_SQL}) VALUES ${tuples.join(",\n")} ON CONFLICT DO NOTHING`);
|
|
1152
1165
|
}
|
|
1153
1166
|
async function listLogs(db, args) {
|
|
1154
1167
|
const filters = args.filters ?? {};
|
|
@@ -1215,6 +1228,7 @@ function buildMetricNameFilter(name) {
|
|
|
1215
1228
|
return { clause: `name = ?`, params: [name] };
|
|
1216
1229
|
}
|
|
1217
1230
|
var METRIC_COLUMNS = [
|
|
1231
|
+
"metricId",
|
|
1218
1232
|
"timestamp",
|
|
1219
1233
|
"name",
|
|
1220
1234
|
"value",
|
|
@@ -1318,6 +1332,7 @@ function resolveGroupBy(groupBy) {
|
|
|
1318
1332
|
}
|
|
1319
1333
|
function rowToMetricRecord(row) {
|
|
1320
1334
|
return {
|
|
1335
|
+
metricId: row.metricId,
|
|
1321
1336
|
timestamp: toDate(row.timestamp),
|
|
1322
1337
|
name: row.name,
|
|
1323
1338
|
value: Number(row.value),
|
|
@@ -1361,6 +1376,7 @@ async function batchCreateMetrics(db, args) {
|
|
|
1361
1376
|
if (args.metrics.length === 0) return;
|
|
1362
1377
|
const tuples = args.metrics.map((m) => {
|
|
1363
1378
|
return `(${[
|
|
1379
|
+
v(m.metricId),
|
|
1364
1380
|
v(m.timestamp),
|
|
1365
1381
|
v(m.name),
|
|
1366
1382
|
v(m.value),
|
|
@@ -1400,7 +1416,9 @@ async function batchCreateMetrics(db, args) {
|
|
|
1400
1416
|
jsonV(m.scope ?? null)
|
|
1401
1417
|
].join(", ")})`;
|
|
1402
1418
|
});
|
|
1403
|
-
await db.execute(
|
|
1419
|
+
await db.execute(
|
|
1420
|
+
`INSERT INTO metric_events (${METRIC_COLUMNS_SQL}) VALUES ${tuples.join(",\n")} ON CONFLICT DO NOTHING`
|
|
1421
|
+
);
|
|
1404
1422
|
}
|
|
1405
1423
|
async function listMetrics(db, args) {
|
|
1406
1424
|
const filters = args.filters ?? {};
|
|
@@ -1699,6 +1717,122 @@ async function getMetricLabelValues(db, args) {
|
|
|
1699
1717
|
);
|
|
1700
1718
|
return { values: rows.map((r) => r.val) };
|
|
1701
1719
|
}
|
|
1720
|
+
var SIGNAL_MIGRATIONS = [
|
|
1721
|
+
{ table: "metric_events", createDDL: METRIC_EVENTS_DDL, idColumn: "metricId" },
|
|
1722
|
+
{ table: "log_events", createDDL: LOG_EVENTS_DDL, idColumn: "logId" },
|
|
1723
|
+
{ table: "score_events", createDDL: SCORE_EVENTS_DDL, idColumn: "scoreId" },
|
|
1724
|
+
{ table: "feedback_events", createDDL: FEEDBACK_EVENTS_DDL, idColumn: "feedbackId" }
|
|
1725
|
+
];
|
|
1726
|
+
async function tableExists(db, table) {
|
|
1727
|
+
const rows = await db.query(
|
|
1728
|
+
`SELECT table_name FROM information_schema.tables WHERE table_name = ?`,
|
|
1729
|
+
[table]
|
|
1730
|
+
);
|
|
1731
|
+
return rows.length > 0;
|
|
1732
|
+
}
|
|
1733
|
+
async function hasPrimaryKey(db, table) {
|
|
1734
|
+
const rows = await db.query(
|
|
1735
|
+
`SELECT constraint_type FROM information_schema.table_constraints
|
|
1736
|
+
WHERE table_name = ? AND constraint_type = 'PRIMARY KEY'`,
|
|
1737
|
+
[table]
|
|
1738
|
+
);
|
|
1739
|
+
return rows.length > 0;
|
|
1740
|
+
}
|
|
1741
|
+
async function getColumns(db, table) {
|
|
1742
|
+
const rows = await db.query(
|
|
1743
|
+
`SELECT column_name FROM information_schema.columns WHERE table_name = ?`,
|
|
1744
|
+
[table]
|
|
1745
|
+
);
|
|
1746
|
+
return rows.map((r) => r.column_name);
|
|
1747
|
+
}
|
|
1748
|
+
function buildTemporaryTableDDL(createDDL, table, tempTable) {
|
|
1749
|
+
return createDDL.replace(`CREATE TABLE IF NOT EXISTS ${table}`, `CREATE TABLE ${tempTable}`);
|
|
1750
|
+
}
|
|
1751
|
+
async function dropTableIfExists(db, table) {
|
|
1752
|
+
if (await tableExists(db, table)) {
|
|
1753
|
+
await db.execute(`DROP TABLE ${table}`);
|
|
1754
|
+
}
|
|
1755
|
+
}
|
|
1756
|
+
function createMigrationError(args, error) {
|
|
1757
|
+
return new MastraError(
|
|
1758
|
+
{
|
|
1759
|
+
id: createStorageErrorId("DUCKDB", "MIGRATE_SIGNAL_TABLES", "FAILED"),
|
|
1760
|
+
domain: ErrorDomain.STORAGE,
|
|
1761
|
+
category: ErrorCategory.THIRD_PARTY,
|
|
1762
|
+
details: args
|
|
1763
|
+
},
|
|
1764
|
+
error
|
|
1765
|
+
);
|
|
1766
|
+
}
|
|
1767
|
+
async function checkSignalTablesMigrationStatus(db) {
|
|
1768
|
+
const tables = [];
|
|
1769
|
+
for (const { table, idColumn } of SIGNAL_MIGRATIONS) {
|
|
1770
|
+
if (!await tableExists(db, table)) {
|
|
1771
|
+
continue;
|
|
1772
|
+
}
|
|
1773
|
+
if (await hasPrimaryKey(db, table)) {
|
|
1774
|
+
continue;
|
|
1775
|
+
}
|
|
1776
|
+
tables.push({ table, idColumn });
|
|
1777
|
+
}
|
|
1778
|
+
return {
|
|
1779
|
+
needsMigration: tables.length > 0,
|
|
1780
|
+
tables
|
|
1781
|
+
};
|
|
1782
|
+
}
|
|
1783
|
+
async function migrateSignalTables(db, logger) {
|
|
1784
|
+
for (const { table, createDDL, idColumn } of SIGNAL_MIGRATIONS) {
|
|
1785
|
+
if (!await tableExists(db, table)) continue;
|
|
1786
|
+
if (await hasPrimaryKey(db, table)) continue;
|
|
1787
|
+
logger?.info?.(`Migrating ${table} to schema with ${idColumn} PRIMARY KEY`);
|
|
1788
|
+
const temp = `${table}_migrating_${Date.now()}`;
|
|
1789
|
+
const backup = `${table}_backup_${Date.now()}`;
|
|
1790
|
+
let originalRenamed = false;
|
|
1791
|
+
let swapCompleted = false;
|
|
1792
|
+
try {
|
|
1793
|
+
await db.execute(buildTemporaryTableDDL(createDDL, table, temp));
|
|
1794
|
+
const newColumns = await getColumns(db, temp);
|
|
1795
|
+
const currentColumns = new Set(await getColumns(db, table));
|
|
1796
|
+
const columnList = newColumns.map((c) => `"${c}"`).join(", ");
|
|
1797
|
+
const selectExprs = newColumns.map((c) => {
|
|
1798
|
+
if (c === idColumn) {
|
|
1799
|
+
return currentColumns.has(c) ? `COALESCE(NULLIF("${c}", ''), CAST(uuid() AS VARCHAR)) AS "${c}"` : `CAST(uuid() AS VARCHAR) AS "${c}"`;
|
|
1800
|
+
}
|
|
1801
|
+
return currentColumns.has(c) ? `"${c}"` : `NULL AS "${c}"`;
|
|
1802
|
+
}).join(", ");
|
|
1803
|
+
await db.execute(`INSERT INTO ${temp} (${columnList}) SELECT ${selectExprs} FROM ${table}`);
|
|
1804
|
+
await db.execute(`ALTER TABLE ${table} RENAME TO ${backup}`);
|
|
1805
|
+
originalRenamed = true;
|
|
1806
|
+
await db.execute(`ALTER TABLE ${temp} RENAME TO ${table}`);
|
|
1807
|
+
swapCompleted = true;
|
|
1808
|
+
try {
|
|
1809
|
+
await db.execute(`DROP TABLE ${backup}`);
|
|
1810
|
+
} catch (cleanupError) {
|
|
1811
|
+
logger?.warn?.(
|
|
1812
|
+
`Migration of ${table} completed, but failed to drop backup ${backup}: ${cleanupError.message}`
|
|
1813
|
+
);
|
|
1814
|
+
}
|
|
1815
|
+
logger?.info?.(`Successfully migrated ${table}`);
|
|
1816
|
+
} catch (error) {
|
|
1817
|
+
logger?.error?.(`Migration of ${table} failed: ${error.message}`);
|
|
1818
|
+
try {
|
|
1819
|
+
await dropTableIfExists(db, temp);
|
|
1820
|
+
} catch (restoreError) {
|
|
1821
|
+
logger?.error?.(`Failed to clean up temporary table ${temp}: ${restoreError.message}`);
|
|
1822
|
+
}
|
|
1823
|
+
if (originalRenamed && !swapCompleted) {
|
|
1824
|
+
try {
|
|
1825
|
+
await db.execute(`ALTER TABLE ${backup} RENAME TO ${table}`);
|
|
1826
|
+
} catch (restoreError) {
|
|
1827
|
+
logger?.error?.(
|
|
1828
|
+
`Failed to restore original table ${table} from backup ${backup}: ${restoreError.message}`
|
|
1829
|
+
);
|
|
1830
|
+
}
|
|
1831
|
+
}
|
|
1832
|
+
throw createMigrationError({ table, idColumn }, error);
|
|
1833
|
+
}
|
|
1834
|
+
}
|
|
1835
|
+
}
|
|
1702
1836
|
var SCORE_GROUP_BY_COLUMNS = /* @__PURE__ */ new Set([
|
|
1703
1837
|
"timestamp",
|
|
1704
1838
|
"traceId",
|
|
@@ -1814,6 +1948,7 @@ function toSeriesName2(values) {
|
|
|
1814
1948
|
}
|
|
1815
1949
|
function rowToScoreRecord(row) {
|
|
1816
1950
|
return {
|
|
1951
|
+
scoreId: row.scoreId,
|
|
1817
1952
|
timestamp: toDate(row.timestamp),
|
|
1818
1953
|
traceId: row.traceId ?? null,
|
|
1819
1954
|
spanId: row.spanId ?? null,
|
|
@@ -1884,12 +2019,13 @@ async function createScore(db, args) {
|
|
|
1884
2019
|
const scoreSource = s.scoreSource ?? s.source ?? null;
|
|
1885
2020
|
await db.execute(
|
|
1886
2021
|
`INSERT INTO score_events (
|
|
1887
|
-
timestamp, traceId, spanId, experimentId, scoreTraceId,
|
|
2022
|
+
scoreId, timestamp, traceId, spanId, experimentId, scoreTraceId,
|
|
1888
2023
|
entityType, entityId, entityName, entityVersionId, parentEntityVersionId, parentEntityType, parentEntityId, parentEntityName, rootEntityVersionId, rootEntityType, rootEntityId, rootEntityName,
|
|
1889
2024
|
userId, organizationId, resourceId, runId, sessionId, threadId, requestId, environment, executionSource, serviceName,
|
|
1890
2025
|
scorerId, scorerVersion, scoreSource, score, reason, tags, metadata, scope
|
|
1891
2026
|
)
|
|
1892
2027
|
VALUES (${[
|
|
2028
|
+
v(s.scoreId),
|
|
1893
2029
|
v(s.timestamp),
|
|
1894
2030
|
v(s.traceId),
|
|
1895
2031
|
v(s.spanId ?? null),
|
|
@@ -1925,7 +2061,8 @@ async function createScore(db, args) {
|
|
|
1925
2061
|
jsonV(s.tags ?? null),
|
|
1926
2062
|
jsonV(s.metadata),
|
|
1927
2063
|
jsonV(s.scope ?? null)
|
|
1928
|
-
].join(", ")})
|
|
2064
|
+
].join(", ")})
|
|
2065
|
+
ON CONFLICT DO NOTHING`
|
|
1929
2066
|
);
|
|
1930
2067
|
}
|
|
1931
2068
|
async function batchCreateScores(db, args) {
|
|
@@ -1934,6 +2071,7 @@ async function batchCreateScores(db, args) {
|
|
|
1934
2071
|
const legacyScore = s;
|
|
1935
2072
|
const scoreSource = legacyScore.scoreSource ?? legacyScore.source ?? null;
|
|
1936
2073
|
return `(${[
|
|
2074
|
+
v(legacyScore.scoreId),
|
|
1937
2075
|
v(legacyScore.timestamp),
|
|
1938
2076
|
v(legacyScore.traceId),
|
|
1939
2077
|
v(legacyScore.spanId ?? null),
|
|
@@ -1973,12 +2111,13 @@ async function batchCreateScores(db, args) {
|
|
|
1973
2111
|
});
|
|
1974
2112
|
await db.execute(
|
|
1975
2113
|
`INSERT INTO score_events (
|
|
1976
|
-
timestamp, traceId, spanId, experimentId, scoreTraceId,
|
|
2114
|
+
scoreId, timestamp, traceId, spanId, experimentId, scoreTraceId,
|
|
1977
2115
|
entityType, entityId, entityName, entityVersionId, parentEntityVersionId, parentEntityType, parentEntityId, parentEntityName, rootEntityVersionId, rootEntityType, rootEntityId, rootEntityName,
|
|
1978
2116
|
userId, organizationId, resourceId, runId, sessionId, threadId, requestId, environment, executionSource, serviceName,
|
|
1979
2117
|
scorerId, scorerVersion, scoreSource, score, reason, tags, metadata, scope
|
|
1980
2118
|
)
|
|
1981
|
-
VALUES ${tuples.join(",\n ")}
|
|
2119
|
+
VALUES ${tuples.join(",\n ")}
|
|
2120
|
+
ON CONFLICT DO NOTHING`
|
|
1982
2121
|
);
|
|
1983
2122
|
}
|
|
1984
2123
|
async function listScores(db, args) {
|
|
@@ -2202,6 +2341,40 @@ var SPAN_RECONSTRUCT_SELECT = `
|
|
|
2202
2341
|
${argMaxNonNull("requestContext")}
|
|
2203
2342
|
FROM span_events
|
|
2204
2343
|
`;
|
|
2344
|
+
var SPAN_RECONSTRUCT_SELECT_LIGHT = `
|
|
2345
|
+
SELECT
|
|
2346
|
+
traceId, spanId,
|
|
2347
|
+
${argMaxNonNull("name")},
|
|
2348
|
+
${argMaxNonNull("spanType")},
|
|
2349
|
+
${argMaxNonNull("parentSpanId")},
|
|
2350
|
+
${argMaxNonNull("isEvent")},
|
|
2351
|
+
coalesce(min(timestamp) FILTER (WHERE eventType = 'start'), min(timestamp)) as startedAt,
|
|
2352
|
+
${argMaxNonNull("endedAt")},
|
|
2353
|
+
${argMaxNonNull("entityType")},
|
|
2354
|
+
${argMaxNonNull("entityId")},
|
|
2355
|
+
${argMaxNonNull("entityName")},
|
|
2356
|
+
${argMaxNonNull("error")}
|
|
2357
|
+
FROM span_events
|
|
2358
|
+
`;
|
|
2359
|
+
function rowToLightSpanRecord(row) {
|
|
2360
|
+
return {
|
|
2361
|
+
traceId: row.traceId,
|
|
2362
|
+
spanId: row.spanId,
|
|
2363
|
+
name: row.name,
|
|
2364
|
+
spanType: row.spanType,
|
|
2365
|
+
parentSpanId: row.parentSpanId ?? null,
|
|
2366
|
+
isEvent: row.isEvent,
|
|
2367
|
+
startedAt: toDate(row.startedAt),
|
|
2368
|
+
endedAt: toDateOrNull(row.endedAt),
|
|
2369
|
+
entityType: row.entityType ?? null,
|
|
2370
|
+
entityId: row.entityId ?? null,
|
|
2371
|
+
entityName: row.entityName ?? null,
|
|
2372
|
+
error: parseJson(row.error),
|
|
2373
|
+
createdAt: toDate(row.startedAt),
|
|
2374
|
+
// DuckDB event-sourced — use startedAt as proxy
|
|
2375
|
+
updatedAt: toDateOrNull(row.endedAt)
|
|
2376
|
+
};
|
|
2377
|
+
}
|
|
2205
2378
|
function rowToSpanRecord(row) {
|
|
2206
2379
|
return {
|
|
2207
2380
|
traceId: row.traceId,
|
|
@@ -2408,6 +2581,16 @@ async function getTrace(db, args) {
|
|
|
2408
2581
|
spans: rows.map((row) => rowToSpanRecord(row))
|
|
2409
2582
|
};
|
|
2410
2583
|
}
|
|
2584
|
+
async function getTraceLight(db, args) {
|
|
2585
|
+
const rows = await db.query(`${SPAN_RECONSTRUCT_SELECT_LIGHT} WHERE traceId = ? GROUP BY traceId, spanId`, [
|
|
2586
|
+
args.traceId
|
|
2587
|
+
]);
|
|
2588
|
+
if (rows.length === 0) return null;
|
|
2589
|
+
return {
|
|
2590
|
+
traceId: args.traceId,
|
|
2591
|
+
spans: rows.map((row) => rowToLightSpanRecord(row))
|
|
2592
|
+
};
|
|
2593
|
+
}
|
|
2411
2594
|
async function listTraces(db, args) {
|
|
2412
2595
|
const filters = args.filters ?? {};
|
|
2413
2596
|
const page = Number(args.pagination?.page ?? 0);
|
|
@@ -2456,6 +2639,32 @@ async function listTraces(db, args) {
|
|
|
2456
2639
|
}
|
|
2457
2640
|
|
|
2458
2641
|
// src/storage/domains/observability/index.ts
|
|
2642
|
+
function buildSignalMigrationRequiredMessage(args) {
|
|
2643
|
+
const tableList = args.tables.map((table) => ` - ${table.table}`).join("\n");
|
|
2644
|
+
return `
|
|
2645
|
+
===========================================================================
|
|
2646
|
+
MIGRATION REQUIRED: DuckDB observability signal tables need signal IDs
|
|
2647
|
+
===========================================================================
|
|
2648
|
+
|
|
2649
|
+
The following signal tables still use the legacy schema and must be migrated
|
|
2650
|
+
before observability storage can initialize:
|
|
2651
|
+
|
|
2652
|
+
${tableList}
|
|
2653
|
+
|
|
2654
|
+
To fix this, run the manual migration command:
|
|
2655
|
+
|
|
2656
|
+
npx mastra migrate
|
|
2657
|
+
|
|
2658
|
+
This command will:
|
|
2659
|
+
1. Create replacement signal tables with signal-ID primary keys
|
|
2660
|
+
2. Backfill missing signal IDs for legacy rows
|
|
2661
|
+
3. Swap the migrated tables into place
|
|
2662
|
+
|
|
2663
|
+
WARNING: This migration recreates the signal tables and may take significant
|
|
2664
|
+
time for large databases. Please ensure you have a backup before proceeding.
|
|
2665
|
+
===========================================================================
|
|
2666
|
+
`;
|
|
2667
|
+
}
|
|
2459
2668
|
var ObservabilityStorageDuckDB = class extends ObservabilityStorage {
|
|
2460
2669
|
db;
|
|
2461
2670
|
constructor(config) {
|
|
@@ -2464,6 +2673,17 @@ var ObservabilityStorageDuckDB = class extends ObservabilityStorage {
|
|
|
2464
2673
|
}
|
|
2465
2674
|
/** Create all observability tables if they don't exist. */
|
|
2466
2675
|
async init() {
|
|
2676
|
+
const migrationStatus = await checkSignalTablesMigrationStatus(this.db);
|
|
2677
|
+
if (migrationStatus.needsMigration) {
|
|
2678
|
+
throw new MastraError({
|
|
2679
|
+
id: createStorageErrorId("DUCKDB", "MIGRATION_REQUIRED", "SIGNAL_TABLES"),
|
|
2680
|
+
domain: ErrorDomain.STORAGE,
|
|
2681
|
+
category: ErrorCategory.USER,
|
|
2682
|
+
text: buildSignalMigrationRequiredMessage({
|
|
2683
|
+
tables: migrationStatus.tables.map(({ table }) => ({ table }))
|
|
2684
|
+
})
|
|
2685
|
+
});
|
|
2686
|
+
}
|
|
2467
2687
|
for (const ddl of ALL_DDL) {
|
|
2468
2688
|
await this.db.execute(ddl);
|
|
2469
2689
|
}
|
|
@@ -2471,6 +2691,29 @@ var ObservabilityStorageDuckDB = class extends ObservabilityStorage {
|
|
|
2471
2691
|
await this.db.execute(migration);
|
|
2472
2692
|
}
|
|
2473
2693
|
}
|
|
2694
|
+
/**
|
|
2695
|
+
* Manually migrate legacy signal tables to the signal-ID primary-key schema.
|
|
2696
|
+
* The public method name is historical; the CLI still calls `migrateSpans()`
|
|
2697
|
+
* for observability migrations even though this now also migrates signal tables.
|
|
2698
|
+
*/
|
|
2699
|
+
async migrateSpans() {
|
|
2700
|
+
const migrationStatus = await checkSignalTablesMigrationStatus(this.db);
|
|
2701
|
+
if (!migrationStatus.needsMigration) {
|
|
2702
|
+
return {
|
|
2703
|
+
success: true,
|
|
2704
|
+
alreadyMigrated: true,
|
|
2705
|
+
duplicatesRemoved: 0,
|
|
2706
|
+
message: "Migration already complete. Signal tables already use signal-ID primary keys."
|
|
2707
|
+
};
|
|
2708
|
+
}
|
|
2709
|
+
await migrateSignalTables(this.db, this.logger);
|
|
2710
|
+
return {
|
|
2711
|
+
success: true,
|
|
2712
|
+
alreadyMigrated: false,
|
|
2713
|
+
duplicatesRemoved: 0,
|
|
2714
|
+
message: `Migration complete. Migrated signal tables: ${migrationStatus.tables.map((t) => t.table).join(", ")}.`
|
|
2715
|
+
};
|
|
2716
|
+
}
|
|
2474
2717
|
/** Delete all rows from every observability table. Use with caution. */
|
|
2475
2718
|
async dangerouslyClearAll() {
|
|
2476
2719
|
for (const table of ["span_events", "metric_events", "log_events", "score_events", "feedback_events"]) {
|
|
@@ -2502,6 +2745,9 @@ var ObservabilityStorageDuckDB = class extends ObservabilityStorage {
|
|
|
2502
2745
|
async getTrace(args) {
|
|
2503
2746
|
return getTrace(this.db, args);
|
|
2504
2747
|
}
|
|
2748
|
+
async getTraceLight(args) {
|
|
2749
|
+
return getTraceLight(this.db, args);
|
|
2750
|
+
}
|
|
2505
2751
|
async listTraces(args) {
|
|
2506
2752
|
return listTraces(this.db, args);
|
|
2507
2753
|
}
|
|
@@ -2604,5 +2850,5 @@ var ObservabilityStorageDuckDB = class extends ObservabilityStorage {
|
|
|
2604
2850
|
};
|
|
2605
2851
|
|
|
2606
2852
|
export { ObservabilityStorageDuckDB };
|
|
2607
|
-
//# sourceMappingURL=observability-
|
|
2608
|
-
//# sourceMappingURL=observability-
|
|
2853
|
+
//# sourceMappingURL=observability-YJBOVLPV.js.map
|
|
2854
|
+
//# sourceMappingURL=observability-YJBOVLPV.js.map
|