@mastra/upstash 1.0.0-beta.2 → 1.0.0-beta.3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +25 -0
- package/dist/index.cjs +138 -80
- package/dist/index.cjs.map +1 -1
- package/dist/index.js +139 -81
- package/dist/index.js.map +1 -1
- package/dist/storage/domains/memory/index.d.ts +4 -0
- package/dist/storage/domains/memory/index.d.ts.map +1 -1
- package/dist/storage/domains/scores/index.d.ts.map +1 -1
- package/package.json +3 -3
package/CHANGELOG.md
CHANGED
|
@@ -1,5 +1,30 @@
|
|
|
1
1
|
# @mastra/upstash
|
|
2
2
|
|
|
3
|
+
## 1.0.0-beta.3
|
|
4
|
+
|
|
5
|
+
### Minor Changes
|
|
6
|
+
|
|
7
|
+
- feat(storage): support querying messages from multiple threads ([#10663](https://github.com/mastra-ai/mastra/pull/10663))
|
|
8
|
+
- Fixed TypeScript errors where `threadId: string | string[]` was being passed to places expecting `Scalar` type
|
|
9
|
+
- Added proper multi-thread support for `listMessages` across all adapters when `threadId` is an array
|
|
10
|
+
- Updated `_getIncludedMessages` to look up message threadId by ID (since message IDs are globally unique)
|
|
11
|
+
- **upstash**: Added `msg-idx:{messageId}` index for O(1) message lookups (backwards compatible with fallback to scan for old messages, with automatic backfill)
|
|
12
|
+
|
|
13
|
+
### Patch Changes
|
|
14
|
+
|
|
15
|
+
- Unify transformScoreRow functions across storage adapters ([#10648](https://github.com/mastra-ai/mastra/pull/10648))
|
|
16
|
+
|
|
17
|
+
Added a unified `transformScoreRow` function in `@mastra/core/storage` that provides schema-driven row transformation for score data. This eliminates code duplication across 10 storage adapters while maintaining store-specific behavior through configurable options:
|
|
18
|
+
- `preferredTimestampFields`: Preferred source fields for timestamps (PostgreSQL, Cloudflare D1)
|
|
19
|
+
- `convertTimestamps`: Convert timestamp strings to Date objects (MSSQL, MongoDB, ClickHouse)
|
|
20
|
+
- `nullValuePattern`: Skip values matching pattern (ClickHouse's `'_null_'`)
|
|
21
|
+
- `fieldMappings`: Map source column names to schema fields (LibSQL's `additionalLLMContext`)
|
|
22
|
+
|
|
23
|
+
Each store adapter now uses the unified function with appropriate options, reducing ~200 lines of duplicate transformation logic while ensuring consistent behavior across all storage backends.
|
|
24
|
+
|
|
25
|
+
- Updated dependencies [[`ac0d2f4`](https://github.com/mastra-ai/mastra/commit/ac0d2f4ff8831f72c1c66c2be809706d17f65789), [`1a0d3fc`](https://github.com/mastra-ai/mastra/commit/1a0d3fc811482c9c376cdf79ee615c23bae9b2d6), [`85a628b`](https://github.com/mastra-ai/mastra/commit/85a628b1224a8f64cd82ea7f033774bf22df7a7e), [`c237233`](https://github.com/mastra-ai/mastra/commit/c23723399ccedf7f5744b3f40997b79246bfbe64), [`15f9e21`](https://github.com/mastra-ai/mastra/commit/15f9e216177201ea6e3f6d0bfb063fcc0953444f), [`ff94dea`](https://github.com/mastra-ai/mastra/commit/ff94dea935f4e34545c63bcb6c29804732698809), [`5b2ff46`](https://github.com/mastra-ai/mastra/commit/5b2ff4651df70c146523a7fca773f8eb0a2272f8), [`db41688`](https://github.com/mastra-ai/mastra/commit/db4168806d007417e2e60b4f68656dca4e5f40c9), [`5ca599d`](https://github.com/mastra-ai/mastra/commit/5ca599d0bb59a1595f19f58473fcd67cc71cef58), [`bff1145`](https://github.com/mastra-ai/mastra/commit/bff114556b3cbadad9b2768488708f8ad0e91475), [`5c8ca24`](https://github.com/mastra-ai/mastra/commit/5c8ca247094e0cc2cdbd7137822fb47241f86e77), [`e191844`](https://github.com/mastra-ai/mastra/commit/e1918444ca3f80e82feef1dad506cd4ec6e2875f), [`22553f1`](https://github.com/mastra-ai/mastra/commit/22553f11c63ee5e966a9c034a349822249584691), [`7237163`](https://github.com/mastra-ai/mastra/commit/72371635dbf96a87df4b073cc48fc655afbdce3d), [`2500740`](https://github.com/mastra-ai/mastra/commit/2500740ea23da067d6e50ec71c625ab3ce275e64), [`873ecbb`](https://github.com/mastra-ai/mastra/commit/873ecbb517586aa17d2f1e99283755b3ebb2863f), [`4f9bbe5`](https://github.com/mastra-ai/mastra/commit/4f9bbe5968f42c86f4930b8193de3c3c17e5bd36), [`02e51fe`](https://github.com/mastra-ai/mastra/commit/02e51feddb3d4155cfbcc42624fd0d0970d032c0), [`8f3fa3a`](https://github.com/mastra-ai/mastra/commit/8f3fa3a652bb77da092f913ec51ae46e3a7e27dc), [`cd29ad2`](https://github.com/mastra-ai/mastra/commit/cd29ad23a255534e8191f249593849ed29160886), [`bdf4d8c`](https://github.com/mastra-ai/mastra/commit/bdf4d8cdc656d8a2c21d81834bfa3bfa70f56c16), [`854e3da`](https://github.com/mastra-ai/mastra/commit/854e3dad5daac17a91a20986399d3a51f54bf68b), [`ce18d38`](https://github.com/mastra-ai/mastra/commit/ce18d38678c65870350d123955014a8432075fd9), [`cccf9c8`](https://github.com/mastra-ai/mastra/commit/cccf9c8b2d2dfc1a5e63919395b83d78c89682a0), [`61a5705`](https://github.com/mastra-ai/mastra/commit/61a570551278b6743e64243b3ce7d73de915ca8a), [`db70a48`](https://github.com/mastra-ai/mastra/commit/db70a48aeeeeb8e5f92007e8ede52c364ce15287), [`f0fdc14`](https://github.com/mastra-ai/mastra/commit/f0fdc14ee233d619266b3d2bbdeea7d25cfc6d13), [`db18bc9`](https://github.com/mastra-ai/mastra/commit/db18bc9c3825e2c1a0ad9a183cc9935f6691bfa1), [`9b37b56`](https://github.com/mastra-ai/mastra/commit/9b37b565e1f2a76c24f728945cc740c2b09be9da), [`41a23c3`](https://github.com/mastra-ai/mastra/commit/41a23c32f9877d71810f37e24930515df2ff7a0f), [`5d171ad`](https://github.com/mastra-ai/mastra/commit/5d171ad9ef340387276b77c2bb3e83e83332d729), [`f03ae60`](https://github.com/mastra-ai/mastra/commit/f03ae60500fe350c9d828621006cdafe1975fdd8), [`d1e74a0`](https://github.com/mastra-ai/mastra/commit/d1e74a0a293866dece31022047f5dbab65a304d0), [`39e7869`](https://github.com/mastra-ai/mastra/commit/39e7869bc7d0ee391077ce291474d8a84eedccff), [`5761926`](https://github.com/mastra-ai/mastra/commit/57619260c4a2cdd598763abbacd90de594c6bc76), [`c900fdd`](https://github.com/mastra-ai/mastra/commit/c900fdd504c41348efdffb205cfe80d48c38fa33), [`604a79f`](https://github.com/mastra-ai/mastra/commit/604a79fecf276e26a54a3fe01bb94e65315d2e0e), [`887f0b4`](https://github.com/mastra-ai/mastra/commit/887f0b4746cdbd7cb7d6b17ac9f82aeb58037ea5), [`2562143`](https://github.com/mastra-ai/mastra/commit/256214336b4faa78646c9c1776612393790d8784), [`ef11a61`](https://github.com/mastra-ai/mastra/commit/ef11a61920fa0ed08a5b7ceedd192875af119749)]:
|
|
26
|
+
- @mastra/core@1.0.0-beta.6
|
|
27
|
+
|
|
3
28
|
## 1.0.0-beta.2
|
|
4
29
|
|
|
5
30
|
### Patch Changes
|
package/dist/index.cjs
CHANGED
|
@@ -54,6 +54,9 @@ function getMessageKey(threadId, messageId) {
|
|
|
54
54
|
const key = getKey(storage.TABLE_MESSAGES, { threadId, id: messageId });
|
|
55
55
|
return key;
|
|
56
56
|
}
|
|
57
|
+
function getMessageIndexKey(messageId) {
|
|
58
|
+
return `msg-idx:${messageId}`;
|
|
59
|
+
}
|
|
57
60
|
var StoreMemoryUpstash = class extends storage.MemoryStorage {
|
|
58
61
|
client;
|
|
59
62
|
operations;
|
|
@@ -320,6 +323,7 @@ var StoreMemoryUpstash = class extends storage.MemoryStorage {
|
|
|
320
323
|
}
|
|
321
324
|
}
|
|
322
325
|
pipeline.set(key, message);
|
|
326
|
+
pipeline.set(getMessageIndexKey(message.id), message.threadId);
|
|
323
327
|
pipeline.zadd(getThreadMessagesKey(message.threadId), {
|
|
324
328
|
score,
|
|
325
329
|
member: message.id
|
|
@@ -350,43 +354,60 @@ var StoreMemoryUpstash = class extends storage.MemoryStorage {
|
|
|
350
354
|
);
|
|
351
355
|
}
|
|
352
356
|
}
|
|
353
|
-
|
|
354
|
-
|
|
357
|
+
/**
|
|
358
|
+
* Lookup threadId for a message - tries index first (O(1)), falls back to scan (backwards compatible)
|
|
359
|
+
*/
|
|
360
|
+
async _getThreadIdForMessage(messageId) {
|
|
361
|
+
const indexedThreadId = await this.client.get(getMessageIndexKey(messageId));
|
|
362
|
+
if (indexedThreadId) {
|
|
363
|
+
return indexedThreadId;
|
|
364
|
+
}
|
|
365
|
+
const existingKeyPattern = getMessageKey("*", messageId);
|
|
366
|
+
const keys = await this.operations.scanKeys(existingKeyPattern);
|
|
367
|
+
if (keys.length === 0) return null;
|
|
368
|
+
const messageData = await this.client.get(keys[0]);
|
|
369
|
+
if (!messageData) return null;
|
|
370
|
+
if (messageData.threadId) {
|
|
371
|
+
await this.client.set(getMessageIndexKey(messageId), messageData.threadId);
|
|
372
|
+
}
|
|
373
|
+
return messageData.threadId || null;
|
|
374
|
+
}
|
|
375
|
+
async _getIncludedMessages(include) {
|
|
376
|
+
if (!include?.length) return [];
|
|
355
377
|
const messageIds = /* @__PURE__ */ new Set();
|
|
356
378
|
const messageIdToThreadIds = {};
|
|
357
|
-
|
|
358
|
-
|
|
359
|
-
|
|
360
|
-
|
|
361
|
-
|
|
362
|
-
|
|
363
|
-
|
|
364
|
-
|
|
365
|
-
|
|
366
|
-
|
|
367
|
-
|
|
368
|
-
|
|
369
|
-
|
|
370
|
-
|
|
371
|
-
|
|
372
|
-
|
|
373
|
-
|
|
374
|
-
|
|
375
|
-
|
|
376
|
-
|
|
377
|
-
|
|
378
|
-
|
|
379
|
-
}
|
|
379
|
+
for (const item of include) {
|
|
380
|
+
const itemThreadId = await this._getThreadIdForMessage(item.id);
|
|
381
|
+
if (!itemThreadId) continue;
|
|
382
|
+
messageIds.add(item.id);
|
|
383
|
+
messageIdToThreadIds[item.id] = itemThreadId;
|
|
384
|
+
const itemThreadMessagesKey = getThreadMessagesKey(itemThreadId);
|
|
385
|
+
const rank = await this.client.zrank(itemThreadMessagesKey, item.id);
|
|
386
|
+
if (rank === null) continue;
|
|
387
|
+
if (item.withPreviousMessages) {
|
|
388
|
+
const start = Math.max(0, rank - item.withPreviousMessages);
|
|
389
|
+
const prevIds = rank === 0 ? [] : await this.client.zrange(itemThreadMessagesKey, start, rank - 1);
|
|
390
|
+
prevIds.forEach((id) => {
|
|
391
|
+
messageIds.add(id);
|
|
392
|
+
messageIdToThreadIds[id] = itemThreadId;
|
|
393
|
+
});
|
|
394
|
+
}
|
|
395
|
+
if (item.withNextMessages) {
|
|
396
|
+
const nextIds = await this.client.zrange(itemThreadMessagesKey, rank + 1, rank + item.withNextMessages);
|
|
397
|
+
nextIds.forEach((id) => {
|
|
398
|
+
messageIds.add(id);
|
|
399
|
+
messageIdToThreadIds[id] = itemThreadId;
|
|
400
|
+
});
|
|
380
401
|
}
|
|
381
|
-
const pipeline = this.client.pipeline();
|
|
382
|
-
Array.from(messageIds).forEach((id) => {
|
|
383
|
-
const tId = messageIdToThreadIds[id] || threadId;
|
|
384
|
-
pipeline.get(getMessageKey(tId, id));
|
|
385
|
-
});
|
|
386
|
-
const results = await pipeline.exec();
|
|
387
|
-
return results.filter((result) => result !== null);
|
|
388
402
|
}
|
|
389
|
-
return [];
|
|
403
|
+
if (messageIds.size === 0) return [];
|
|
404
|
+
const pipeline = this.client.pipeline();
|
|
405
|
+
Array.from(messageIds).forEach((id) => {
|
|
406
|
+
const tId = messageIdToThreadIds[id];
|
|
407
|
+
pipeline.get(getMessageKey(tId, id));
|
|
408
|
+
});
|
|
409
|
+
const results = await pipeline.exec();
|
|
410
|
+
return results.filter((result) => result !== null);
|
|
390
411
|
}
|
|
391
412
|
parseStoredMessage(storedMessage) {
|
|
392
413
|
const defaultMessageContent = { format: 2, parts: [{ type: "text", text: "" }] };
|
|
@@ -400,17 +421,49 @@ var StoreMemoryUpstash = class extends storage.MemoryStorage {
|
|
|
400
421
|
async listMessagesById({ messageIds }) {
|
|
401
422
|
if (messageIds.length === 0) return { messages: [] };
|
|
402
423
|
try {
|
|
403
|
-
const
|
|
404
|
-
const
|
|
405
|
-
|
|
406
|
-
|
|
407
|
-
|
|
408
|
-
|
|
409
|
-
|
|
410
|
-
|
|
411
|
-
|
|
412
|
-
|
|
413
|
-
|
|
424
|
+
const rawMessages = [];
|
|
425
|
+
const indexPipeline = this.client.pipeline();
|
|
426
|
+
messageIds.forEach((id) => indexPipeline.get(getMessageIndexKey(id)));
|
|
427
|
+
const indexResults = await indexPipeline.exec();
|
|
428
|
+
const indexedIds = [];
|
|
429
|
+
const unindexedIds = [];
|
|
430
|
+
messageIds.forEach((id, i) => {
|
|
431
|
+
const threadId = indexResults[i];
|
|
432
|
+
if (threadId) {
|
|
433
|
+
indexedIds.push({ messageId: id, threadId });
|
|
434
|
+
} else {
|
|
435
|
+
unindexedIds.push(id);
|
|
436
|
+
}
|
|
437
|
+
});
|
|
438
|
+
if (indexedIds.length > 0) {
|
|
439
|
+
const messagePipeline = this.client.pipeline();
|
|
440
|
+
indexedIds.forEach(({ messageId, threadId }) => messagePipeline.get(getMessageKey(threadId, messageId)));
|
|
441
|
+
const messageResults = await messagePipeline.exec();
|
|
442
|
+
rawMessages.push(...messageResults.filter((msg) => msg !== null));
|
|
443
|
+
}
|
|
444
|
+
if (unindexedIds.length > 0) {
|
|
445
|
+
const threadKeys = await this.client.keys("thread:*");
|
|
446
|
+
const result = await Promise.all(
|
|
447
|
+
threadKeys.map((threadKey) => {
|
|
448
|
+
const threadId = threadKey.split(":")[1];
|
|
449
|
+
if (!threadId) throw new Error(`Failed to parse thread ID from thread key "${threadKey}"`);
|
|
450
|
+
return this.client.mget(
|
|
451
|
+
unindexedIds.map((id) => getMessageKey(threadId, id))
|
|
452
|
+
);
|
|
453
|
+
})
|
|
454
|
+
);
|
|
455
|
+
const foundMessages = result.flat(1).filter((msg) => !!msg);
|
|
456
|
+
rawMessages.push(...foundMessages);
|
|
457
|
+
if (foundMessages.length > 0) {
|
|
458
|
+
const backfillPipeline = this.client.pipeline();
|
|
459
|
+
foundMessages.forEach((msg) => {
|
|
460
|
+
if (msg.threadId) {
|
|
461
|
+
backfillPipeline.set(getMessageIndexKey(msg.id), msg.threadId);
|
|
462
|
+
}
|
|
463
|
+
});
|
|
464
|
+
await backfillPipeline.exec();
|
|
465
|
+
}
|
|
466
|
+
}
|
|
414
467
|
const list = new agent.MessageList().add(rawMessages.map(this.parseStoredMessage), "memory");
|
|
415
468
|
return { messages: list.get.all.db() };
|
|
416
469
|
} catch (error$1) {
|
|
@@ -429,18 +482,18 @@ var StoreMemoryUpstash = class extends storage.MemoryStorage {
|
|
|
429
482
|
}
|
|
430
483
|
async listMessages(args) {
|
|
431
484
|
const { threadId, resourceId, include, filter, perPage: perPageInput, page = 0, orderBy } = args;
|
|
432
|
-
|
|
485
|
+
const threadIds = Array.isArray(threadId) ? threadId : [threadId];
|
|
486
|
+
if (threadIds.length === 0 || threadIds.some((id) => !id.trim())) {
|
|
433
487
|
throw new error.MastraError(
|
|
434
488
|
{
|
|
435
489
|
id: "STORAGE_UPSTASH_LIST_MESSAGES_INVALID_THREAD_ID",
|
|
436
490
|
domain: error.ErrorDomain.STORAGE,
|
|
437
491
|
category: error.ErrorCategory.THIRD_PARTY,
|
|
438
|
-
details: { threadId }
|
|
492
|
+
details: { threadId: Array.isArray(threadId) ? threadId.join(",") : threadId }
|
|
439
493
|
},
|
|
440
|
-
new Error("threadId must be a non-empty string")
|
|
494
|
+
new Error("threadId must be a non-empty string or array of non-empty strings")
|
|
441
495
|
);
|
|
442
496
|
}
|
|
443
|
-
const threadMessagesKey = getThreadMessagesKey(threadId);
|
|
444
497
|
const perPage = storage.normalizePerPage(perPageInput, 40);
|
|
445
498
|
const { offset, perPage: perPageForResponse } = storage.calculatePagination(page, perPageInput, perPage);
|
|
446
499
|
try {
|
|
@@ -457,11 +510,18 @@ var StoreMemoryUpstash = class extends storage.MemoryStorage {
|
|
|
457
510
|
}
|
|
458
511
|
let includedMessages = [];
|
|
459
512
|
if (include && include.length > 0) {
|
|
460
|
-
const included = await this._getIncludedMessages(
|
|
513
|
+
const included = await this._getIncludedMessages(include);
|
|
461
514
|
includedMessages = included.map(this.parseStoredMessage);
|
|
462
515
|
}
|
|
463
|
-
const
|
|
464
|
-
|
|
516
|
+
const allMessageIdsWithThreads = [];
|
|
517
|
+
for (const tid of threadIds) {
|
|
518
|
+
const threadMessagesKey = getThreadMessagesKey(tid);
|
|
519
|
+
const messageIds2 = await this.client.zrange(threadMessagesKey, 0, -1);
|
|
520
|
+
for (const mid of messageIds2) {
|
|
521
|
+
allMessageIdsWithThreads.push({ threadId: tid, messageId: mid });
|
|
522
|
+
}
|
|
523
|
+
}
|
|
524
|
+
if (allMessageIdsWithThreads.length === 0) {
|
|
465
525
|
return {
|
|
466
526
|
messages: [],
|
|
467
527
|
total: 0,
|
|
@@ -471,7 +531,7 @@ var StoreMemoryUpstash = class extends storage.MemoryStorage {
|
|
|
471
531
|
};
|
|
472
532
|
}
|
|
473
533
|
const pipeline = this.client.pipeline();
|
|
474
|
-
|
|
534
|
+
allMessageIdsWithThreads.forEach(({ threadId: tid, messageId }) => pipeline.get(getMessageKey(tid, messageId)));
|
|
475
535
|
const results = await pipeline.exec();
|
|
476
536
|
let messagesData = results.filter((msg) => msg !== null).map(this.parseStoredMessage);
|
|
477
537
|
if (resourceId) {
|
|
@@ -547,7 +607,7 @@ var StoreMemoryUpstash = class extends storage.MemoryStorage {
|
|
|
547
607
|
domain: error.ErrorDomain.STORAGE,
|
|
548
608
|
category: error.ErrorCategory.THIRD_PARTY,
|
|
549
609
|
details: {
|
|
550
|
-
threadId,
|
|
610
|
+
threadId: Array.isArray(threadId) ? threadId.join(",") : threadId,
|
|
551
611
|
resourceId: resourceId ?? ""
|
|
552
612
|
}
|
|
553
613
|
},
|
|
@@ -752,13 +812,33 @@ var StoreMemoryUpstash = class extends storage.MemoryStorage {
|
|
|
752
812
|
try {
|
|
753
813
|
const threadIds = /* @__PURE__ */ new Set();
|
|
754
814
|
const messageKeys = [];
|
|
755
|
-
|
|
815
|
+
const foundMessageIds = [];
|
|
816
|
+
const indexPipeline = this.client.pipeline();
|
|
817
|
+
messageIds.forEach((id) => indexPipeline.get(getMessageIndexKey(id)));
|
|
818
|
+
const indexResults = await indexPipeline.exec();
|
|
819
|
+
const indexedMessages = [];
|
|
820
|
+
const unindexedMessageIds = [];
|
|
821
|
+
messageIds.forEach((id, i) => {
|
|
822
|
+
const threadId = indexResults[i];
|
|
823
|
+
if (threadId) {
|
|
824
|
+
indexedMessages.push({ messageId: id, threadId });
|
|
825
|
+
} else {
|
|
826
|
+
unindexedMessageIds.push(id);
|
|
827
|
+
}
|
|
828
|
+
});
|
|
829
|
+
for (const { messageId, threadId } of indexedMessages) {
|
|
830
|
+
messageKeys.push(getMessageKey(threadId, messageId));
|
|
831
|
+
foundMessageIds.push(messageId);
|
|
832
|
+
threadIds.add(threadId);
|
|
833
|
+
}
|
|
834
|
+
for (const messageId of unindexedMessageIds) {
|
|
756
835
|
const pattern = getMessageKey("*", messageId);
|
|
757
836
|
const keys = await this.operations.scanKeys(pattern);
|
|
758
837
|
for (const key of keys) {
|
|
759
838
|
const message = await this.client.get(key);
|
|
760
839
|
if (message && message.id === messageId) {
|
|
761
840
|
messageKeys.push(key);
|
|
841
|
+
foundMessageIds.push(messageId);
|
|
762
842
|
if (message.threadId) {
|
|
763
843
|
threadIds.add(message.threadId);
|
|
764
844
|
}
|
|
@@ -773,6 +853,9 @@ var StoreMemoryUpstash = class extends storage.MemoryStorage {
|
|
|
773
853
|
for (const key of messageKeys) {
|
|
774
854
|
pipeline.del(key);
|
|
775
855
|
}
|
|
856
|
+
for (const messageId of foundMessageIds) {
|
|
857
|
+
pipeline.del(getMessageIndexKey(messageId));
|
|
858
|
+
}
|
|
776
859
|
if (threadIds.size > 0) {
|
|
777
860
|
for (const threadId of threadIds) {
|
|
778
861
|
const threadKey = getKey(storage.TABLE_THREADS, { id: threadId });
|
|
@@ -948,32 +1031,7 @@ var StoreOperationsUpstash = class extends storage.StoreOperations {
|
|
|
948
1031
|
}
|
|
949
1032
|
};
|
|
950
1033
|
function transformScoreRow(row) {
|
|
951
|
-
|
|
952
|
-
if (typeof v === "string") {
|
|
953
|
-
try {
|
|
954
|
-
return JSON.parse(v);
|
|
955
|
-
} catch {
|
|
956
|
-
return v;
|
|
957
|
-
}
|
|
958
|
-
}
|
|
959
|
-
return v;
|
|
960
|
-
};
|
|
961
|
-
return {
|
|
962
|
-
...row,
|
|
963
|
-
scorer: parseField(row.scorer),
|
|
964
|
-
preprocessStepResult: parseField(row.preprocessStepResult),
|
|
965
|
-
generateScorePrompt: row.generateScorePrompt,
|
|
966
|
-
generateReasonPrompt: row.generateReasonPrompt,
|
|
967
|
-
analyzeStepResult: parseField(row.analyzeStepResult),
|
|
968
|
-
metadata: parseField(row.metadata),
|
|
969
|
-
input: parseField(row.input),
|
|
970
|
-
output: parseField(row.output),
|
|
971
|
-
additionalContext: parseField(row.additionalContext),
|
|
972
|
-
requestContext: parseField(row.requestContext),
|
|
973
|
-
entity: parseField(row.entity),
|
|
974
|
-
createdAt: row.createdAt,
|
|
975
|
-
updatedAt: row.updatedAt
|
|
976
|
-
};
|
|
1034
|
+
return storage.transformScoreRow(row);
|
|
977
1035
|
}
|
|
978
1036
|
var ScoresUpstash = class extends storage.ScoresStorage {
|
|
979
1037
|
client;
|