@mastra/pg 0.11.1-alpha.1 → 0.12.0-alpha.3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.turbo/turbo-build.log +7 -7
- package/CHANGELOG.md +27 -0
- package/dist/_tsup-dts-rollup.d.cts +39 -6
- package/dist/_tsup-dts-rollup.d.ts +39 -6
- package/dist/index.cjs +94 -8
- package/dist/index.js +95 -9
- package/package.json +5 -5
- package/src/storage/index.test.ts +355 -1
- package/src/storage/index.ts +119 -5
- package/src/vector/filter.test.ts +12 -12
- package/src/vector/filter.ts +36 -7
- package/src/vector/index.test.ts +2 -2
- package/src/vector/index.ts +4 -4
- package/src/vector/sql-builder.ts +2 -1
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@mastra/pg",
|
|
3
|
-
"version": "0.
|
|
3
|
+
"version": "0.12.0-alpha.3",
|
|
4
4
|
"description": "Postgres provider for Mastra - includes both vector and db storage capabilities",
|
|
5
5
|
"type": "module",
|
|
6
6
|
"main": "dist/index.js",
|
|
@@ -29,16 +29,16 @@
|
|
|
29
29
|
"@microsoft/api-extractor": "^7.52.8",
|
|
30
30
|
"@types/node": "^20.19.0",
|
|
31
31
|
"@types/pg": "^8.15.4",
|
|
32
|
-
"eslint": "^9.
|
|
32
|
+
"eslint": "^9.29.0",
|
|
33
33
|
"tsup": "^8.5.0",
|
|
34
34
|
"typescript": "^5.8.3",
|
|
35
35
|
"vitest": "^3.2.3",
|
|
36
|
-
"@internal/storage-test-utils": "0.0.9",
|
|
37
36
|
"@internal/lint": "0.0.13",
|
|
38
|
-
"@
|
|
37
|
+
"@internal/storage-test-utils": "0.0.9",
|
|
38
|
+
"@mastra/core": "0.10.7-alpha.3"
|
|
39
39
|
},
|
|
40
40
|
"peerDependencies": {
|
|
41
|
-
"@mastra/core": ">=0.10.
|
|
41
|
+
"@mastra/core": ">=0.10.7-0 <0.11.0-0"
|
|
42
42
|
},
|
|
43
43
|
"scripts": {
|
|
44
44
|
"build": "tsup src/index.ts --format esm,cjs --experimental-dts --clean --treeshake=smallest --splitting",
|
|
@@ -591,6 +591,81 @@ describe('PostgresStore', () => {
|
|
|
591
591
|
expect(thread2Messages).toHaveLength(1);
|
|
592
592
|
expect(thread2Messages[0].id).toBe(message.id);
|
|
593
593
|
});
|
|
594
|
+
it('should upsert messages: duplicate id+threadId results in update, not duplicate row', async () => {
|
|
595
|
+
const thread = await createSampleThread();
|
|
596
|
+
await store.saveThread({ thread });
|
|
597
|
+
const baseMessage = createSampleMessageV2({
|
|
598
|
+
threadId: thread.id,
|
|
599
|
+
createdAt: new Date(),
|
|
600
|
+
content: { content: 'Original' },
|
|
601
|
+
resourceId: thread.resourceId,
|
|
602
|
+
});
|
|
603
|
+
|
|
604
|
+
// Insert the message for the first time
|
|
605
|
+
await store.saveMessages({ messages: [baseMessage], format: 'v2' });
|
|
606
|
+
|
|
607
|
+
// Insert again with the same id and threadId but different content
|
|
608
|
+
const updatedMessage = {
|
|
609
|
+
...createSampleMessageV2({
|
|
610
|
+
threadId: thread.id,
|
|
611
|
+
createdAt: new Date(),
|
|
612
|
+
content: { content: 'Updated' },
|
|
613
|
+
resourceId: thread.resourceId,
|
|
614
|
+
}),
|
|
615
|
+
id: baseMessage.id,
|
|
616
|
+
};
|
|
617
|
+
|
|
618
|
+
await store.saveMessages({ messages: [updatedMessage], format: 'v2' });
|
|
619
|
+
|
|
620
|
+
// Retrieve messages for the thread
|
|
621
|
+
const retrievedMessages = await store.getMessages({ threadId: thread.id, format: 'v2' });
|
|
622
|
+
|
|
623
|
+
// Only one message should exist for that id+threadId
|
|
624
|
+
expect(retrievedMessages.filter(m => m.id === baseMessage.id)).toHaveLength(1);
|
|
625
|
+
|
|
626
|
+
// The content should be the updated one
|
|
627
|
+
expect(retrievedMessages.find(m => m.id === baseMessage.id)?.content.content).toBe('Updated');
|
|
628
|
+
});
|
|
629
|
+
|
|
630
|
+
it('should upsert messages: duplicate id and different threadid', async () => {
|
|
631
|
+
const thread1 = await createSampleThread();
|
|
632
|
+
const thread2 = await createSampleThread();
|
|
633
|
+
await store.saveThread({ thread: thread1 });
|
|
634
|
+
await store.saveThread({ thread: thread2 });
|
|
635
|
+
|
|
636
|
+
const message = createSampleMessageV2({
|
|
637
|
+
threadId: thread1.id,
|
|
638
|
+
createdAt: new Date(),
|
|
639
|
+
content: { content: 'Thread1 Content' },
|
|
640
|
+
resourceId: thread1.resourceId,
|
|
641
|
+
});
|
|
642
|
+
|
|
643
|
+
// Insert message into thread1
|
|
644
|
+
await store.saveMessages({ messages: [message], format: 'v2' });
|
|
645
|
+
|
|
646
|
+
// Attempt to insert a message with the same id but different threadId
|
|
647
|
+
const conflictingMessage = {
|
|
648
|
+
...createSampleMessageV2({
|
|
649
|
+
threadId: thread2.id, // different thread
|
|
650
|
+
content: { content: 'Thread2 Content' },
|
|
651
|
+
resourceId: thread2.resourceId,
|
|
652
|
+
}),
|
|
653
|
+
id: message.id,
|
|
654
|
+
};
|
|
655
|
+
|
|
656
|
+
// Save should move the message to the new thread
|
|
657
|
+
await store.saveMessages({ messages: [conflictingMessage], format: 'v2' });
|
|
658
|
+
|
|
659
|
+
// Retrieve messages for both threads
|
|
660
|
+
const thread1Messages = await store.getMessages({ threadId: thread1.id, format: 'v2' });
|
|
661
|
+
const thread2Messages = await store.getMessages({ threadId: thread2.id, format: 'v2' });
|
|
662
|
+
|
|
663
|
+
// Thread 1 should NOT have the message with that id
|
|
664
|
+
expect(thread1Messages.find(m => m.id === message.id)).toBeUndefined();
|
|
665
|
+
|
|
666
|
+
// Thread 2 should have the message with that id
|
|
667
|
+
expect(thread2Messages.find(m => m.id === message.id)?.content.content).toBe('Thread2 Content');
|
|
668
|
+
});
|
|
594
669
|
});
|
|
595
670
|
|
|
596
671
|
describe('Edge Cases and Error Handling', () => {
|
|
@@ -1615,7 +1690,6 @@ describe('PostgresStore', () => {
|
|
|
1615
1690
|
selectBy: { pagination: { page: 0, perPage: 5 } },
|
|
1616
1691
|
format: 'v2',
|
|
1617
1692
|
});
|
|
1618
|
-
console.log(page1);
|
|
1619
1693
|
expect(page1.messages).toHaveLength(5);
|
|
1620
1694
|
expect(page1.total).toBe(15);
|
|
1621
1695
|
expect(page1.page).toBe(0);
|
|
@@ -1686,6 +1760,286 @@ describe('PostgresStore', () => {
|
|
|
1686
1760
|
);
|
|
1687
1761
|
}
|
|
1688
1762
|
});
|
|
1763
|
+
|
|
1764
|
+
it('should save and retrieve messages', async () => {
|
|
1765
|
+
const thread = createSampleThread();
|
|
1766
|
+
await store.saveThread({ thread });
|
|
1767
|
+
|
|
1768
|
+
const messages = [
|
|
1769
|
+
createSampleMessageV1({ threadId: thread.id }),
|
|
1770
|
+
createSampleMessageV1({ threadId: thread.id }),
|
|
1771
|
+
];
|
|
1772
|
+
|
|
1773
|
+
// Save messages
|
|
1774
|
+
const savedMessages = await store.saveMessages({ messages });
|
|
1775
|
+
expect(savedMessages).toEqual(messages);
|
|
1776
|
+
|
|
1777
|
+
// Retrieve messages
|
|
1778
|
+
const retrievedMessages = await store.getMessagesPaginated({ threadId: thread.id, format: 'v1' });
|
|
1779
|
+
expect(retrievedMessages.messages).toHaveLength(2);
|
|
1780
|
+
const checkMessages = messages.map(m => {
|
|
1781
|
+
const { resourceId, ...rest } = m;
|
|
1782
|
+
return rest;
|
|
1783
|
+
});
|
|
1784
|
+
expect(retrievedMessages.messages).toEqual(expect.arrayContaining(checkMessages));
|
|
1785
|
+
});
|
|
1786
|
+
|
|
1787
|
+
it('should maintain message order', async () => {
|
|
1788
|
+
const thread = createSampleThread();
|
|
1789
|
+
await store.saveThread({ thread });
|
|
1790
|
+
|
|
1791
|
+
const messageContent = ['First', 'Second', 'Third'];
|
|
1792
|
+
|
|
1793
|
+
const messages = messageContent.map(content =>
|
|
1794
|
+
createSampleMessageV2({
|
|
1795
|
+
threadId: thread.id,
|
|
1796
|
+
content: { content, parts: [{ type: 'text', text: content }] },
|
|
1797
|
+
}),
|
|
1798
|
+
);
|
|
1799
|
+
|
|
1800
|
+
await store.saveMessages({ messages, format: 'v2' });
|
|
1801
|
+
|
|
1802
|
+
const retrievedMessages = await store.getMessagesPaginated({ threadId: thread.id, format: 'v2' });
|
|
1803
|
+
expect(retrievedMessages.messages).toHaveLength(3);
|
|
1804
|
+
|
|
1805
|
+
// Verify order is maintained
|
|
1806
|
+
retrievedMessages.messages.forEach((msg, idx) => {
|
|
1807
|
+
expect((msg.content.parts[0] as any).text).toEqual(messageContent[idx]);
|
|
1808
|
+
});
|
|
1809
|
+
});
|
|
1810
|
+
|
|
1811
|
+
it('should rollback on error during message save', async () => {
|
|
1812
|
+
const thread = createSampleThread();
|
|
1813
|
+
await store.saveThread({ thread });
|
|
1814
|
+
|
|
1815
|
+
const messages = [
|
|
1816
|
+
createSampleMessageV1({ threadId: thread.id }),
|
|
1817
|
+
{ ...createSampleMessageV1({ threadId: thread.id }), id: null } as any, // This will cause an error
|
|
1818
|
+
];
|
|
1819
|
+
|
|
1820
|
+
await expect(store.saveMessages({ messages })).rejects.toThrow();
|
|
1821
|
+
|
|
1822
|
+
// Verify no messages were saved
|
|
1823
|
+
const savedMessages = await store.getMessagesPaginated({ threadId: thread.id, format: 'v2' });
|
|
1824
|
+
expect(savedMessages.messages).toHaveLength(0);
|
|
1825
|
+
});
|
|
1826
|
+
|
|
1827
|
+
it('should retrieve messages w/ next/prev messages by message id + resource id', async () => {
|
|
1828
|
+
const thread = createSampleThread({ id: 'thread-one' });
|
|
1829
|
+
await store.saveThread({ thread });
|
|
1830
|
+
|
|
1831
|
+
const thread2 = createSampleThread({ id: 'thread-two' });
|
|
1832
|
+
await store.saveThread({ thread: thread2 });
|
|
1833
|
+
|
|
1834
|
+
const thread3 = createSampleThread({ id: 'thread-three' });
|
|
1835
|
+
await store.saveThread({ thread: thread3 });
|
|
1836
|
+
|
|
1837
|
+
const messages: MastraMessageV2[] = [
|
|
1838
|
+
createSampleMessageV2({
|
|
1839
|
+
threadId: 'thread-one',
|
|
1840
|
+
content: { content: 'First' },
|
|
1841
|
+
resourceId: 'cross-thread-resource',
|
|
1842
|
+
}),
|
|
1843
|
+
createSampleMessageV2({
|
|
1844
|
+
threadId: 'thread-one',
|
|
1845
|
+
content: { content: 'Second' },
|
|
1846
|
+
resourceId: 'cross-thread-resource',
|
|
1847
|
+
}),
|
|
1848
|
+
createSampleMessageV2({
|
|
1849
|
+
threadId: 'thread-one',
|
|
1850
|
+
content: { content: 'Third' },
|
|
1851
|
+
resourceId: 'cross-thread-resource',
|
|
1852
|
+
}),
|
|
1853
|
+
|
|
1854
|
+
createSampleMessageV2({
|
|
1855
|
+
threadId: 'thread-two',
|
|
1856
|
+
content: { content: 'Fourth' },
|
|
1857
|
+
resourceId: 'cross-thread-resource',
|
|
1858
|
+
}),
|
|
1859
|
+
createSampleMessageV2({
|
|
1860
|
+
threadId: 'thread-two',
|
|
1861
|
+
content: { content: 'Fifth' },
|
|
1862
|
+
resourceId: 'cross-thread-resource',
|
|
1863
|
+
}),
|
|
1864
|
+
createSampleMessageV2({
|
|
1865
|
+
threadId: 'thread-two',
|
|
1866
|
+
content: { content: 'Sixth' },
|
|
1867
|
+
resourceId: 'cross-thread-resource',
|
|
1868
|
+
}),
|
|
1869
|
+
|
|
1870
|
+
createSampleMessageV2({
|
|
1871
|
+
threadId: 'thread-three',
|
|
1872
|
+
content: { content: 'Seventh' },
|
|
1873
|
+
resourceId: 'other-resource',
|
|
1874
|
+
}),
|
|
1875
|
+
createSampleMessageV2({
|
|
1876
|
+
threadId: 'thread-three',
|
|
1877
|
+
content: { content: 'Eighth' },
|
|
1878
|
+
resourceId: 'other-resource',
|
|
1879
|
+
}),
|
|
1880
|
+
];
|
|
1881
|
+
|
|
1882
|
+
await store.saveMessages({ messages: messages, format: 'v2' });
|
|
1883
|
+
|
|
1884
|
+
const retrievedMessages = await store.getMessagesPaginated({ threadId: 'thread-one', format: 'v2' });
|
|
1885
|
+
expect(retrievedMessages.messages).toHaveLength(3);
|
|
1886
|
+
expect(retrievedMessages.messages.map((m: any) => m.content.parts[0].text)).toEqual([
|
|
1887
|
+
'First',
|
|
1888
|
+
'Second',
|
|
1889
|
+
'Third',
|
|
1890
|
+
]);
|
|
1891
|
+
|
|
1892
|
+
const retrievedMessages2 = await store.getMessagesPaginated({ threadId: 'thread-two', format: 'v2' });
|
|
1893
|
+
expect(retrievedMessages2.messages).toHaveLength(3);
|
|
1894
|
+
expect(retrievedMessages2.messages.map((m: any) => m.content.parts[0].text)).toEqual([
|
|
1895
|
+
'Fourth',
|
|
1896
|
+
'Fifth',
|
|
1897
|
+
'Sixth',
|
|
1898
|
+
]);
|
|
1899
|
+
|
|
1900
|
+
const retrievedMessages3 = await store.getMessagesPaginated({ threadId: 'thread-three', format: 'v2' });
|
|
1901
|
+
expect(retrievedMessages3.messages).toHaveLength(2);
|
|
1902
|
+
expect(retrievedMessages3.messages.map((m: any) => m.content.parts[0].text)).toEqual(['Seventh', 'Eighth']);
|
|
1903
|
+
|
|
1904
|
+
const { messages: crossThreadMessages } = await store.getMessagesPaginated({
|
|
1905
|
+
threadId: 'thread-doesnt-exist',
|
|
1906
|
+
format: 'v2',
|
|
1907
|
+
selectBy: {
|
|
1908
|
+
last: 0,
|
|
1909
|
+
include: [
|
|
1910
|
+
{
|
|
1911
|
+
id: messages[1].id,
|
|
1912
|
+
threadId: 'thread-one',
|
|
1913
|
+
withNextMessages: 2,
|
|
1914
|
+
withPreviousMessages: 2,
|
|
1915
|
+
},
|
|
1916
|
+
{
|
|
1917
|
+
id: messages[4].id,
|
|
1918
|
+
threadId: 'thread-two',
|
|
1919
|
+
withPreviousMessages: 2,
|
|
1920
|
+
withNextMessages: 2,
|
|
1921
|
+
},
|
|
1922
|
+
],
|
|
1923
|
+
},
|
|
1924
|
+
});
|
|
1925
|
+
|
|
1926
|
+
expect(crossThreadMessages).toHaveLength(6);
|
|
1927
|
+
expect(crossThreadMessages.filter(m => m.threadId === `thread-one`)).toHaveLength(3);
|
|
1928
|
+
expect(crossThreadMessages.filter(m => m.threadId === `thread-two`)).toHaveLength(3);
|
|
1929
|
+
|
|
1930
|
+
const crossThreadMessages2 = await store.getMessagesPaginated({
|
|
1931
|
+
threadId: 'thread-one',
|
|
1932
|
+
format: 'v2',
|
|
1933
|
+
selectBy: {
|
|
1934
|
+
last: 0,
|
|
1935
|
+
include: [
|
|
1936
|
+
{
|
|
1937
|
+
id: messages[4].id,
|
|
1938
|
+
threadId: 'thread-two',
|
|
1939
|
+
withPreviousMessages: 1,
|
|
1940
|
+
withNextMessages: 1,
|
|
1941
|
+
},
|
|
1942
|
+
],
|
|
1943
|
+
},
|
|
1944
|
+
});
|
|
1945
|
+
|
|
1946
|
+
expect(crossThreadMessages2.messages).toHaveLength(3);
|
|
1947
|
+
expect(crossThreadMessages2.messages.filter(m => m.threadId === `thread-one`)).toHaveLength(0);
|
|
1948
|
+
expect(crossThreadMessages2.messages.filter(m => m.threadId === `thread-two`)).toHaveLength(3);
|
|
1949
|
+
|
|
1950
|
+
const crossThreadMessages3 = await store.getMessagesPaginated({
|
|
1951
|
+
threadId: 'thread-two',
|
|
1952
|
+
format: 'v2',
|
|
1953
|
+
selectBy: {
|
|
1954
|
+
last: 0,
|
|
1955
|
+
include: [
|
|
1956
|
+
{
|
|
1957
|
+
id: messages[1].id,
|
|
1958
|
+
threadId: 'thread-one',
|
|
1959
|
+
withNextMessages: 1,
|
|
1960
|
+
withPreviousMessages: 1,
|
|
1961
|
+
},
|
|
1962
|
+
],
|
|
1963
|
+
},
|
|
1964
|
+
});
|
|
1965
|
+
|
|
1966
|
+
expect(crossThreadMessages3.messages).toHaveLength(3);
|
|
1967
|
+
expect(crossThreadMessages3.messages.filter(m => m.threadId === `thread-one`)).toHaveLength(3);
|
|
1968
|
+
expect(crossThreadMessages3.messages.filter(m => m.threadId === `thread-two`)).toHaveLength(0);
|
|
1969
|
+
});
|
|
1970
|
+
|
|
1971
|
+
it('should return messages using both last and include (cross-thread, deduped)', async () => {
|
|
1972
|
+
const thread = createSampleThread({ id: 'thread-one' });
|
|
1973
|
+
await store.saveThread({ thread });
|
|
1974
|
+
|
|
1975
|
+
const thread2 = createSampleThread({ id: 'thread-two' });
|
|
1976
|
+
await store.saveThread({ thread: thread2 });
|
|
1977
|
+
|
|
1978
|
+
const now = new Date();
|
|
1979
|
+
|
|
1980
|
+
// Setup: create messages in two threads
|
|
1981
|
+
const messages = [
|
|
1982
|
+
createSampleMessageV2({
|
|
1983
|
+
threadId: 'thread-one',
|
|
1984
|
+
content: { content: 'A' },
|
|
1985
|
+
createdAt: new Date(now.getTime()),
|
|
1986
|
+
}),
|
|
1987
|
+
createSampleMessageV2({
|
|
1988
|
+
threadId: 'thread-one',
|
|
1989
|
+
content: { content: 'B' },
|
|
1990
|
+
createdAt: new Date(now.getTime() + 1000),
|
|
1991
|
+
}),
|
|
1992
|
+
createSampleMessageV2({
|
|
1993
|
+
threadId: 'thread-one',
|
|
1994
|
+
content: { content: 'C' },
|
|
1995
|
+
createdAt: new Date(now.getTime() + 2000),
|
|
1996
|
+
}),
|
|
1997
|
+
createSampleMessageV2({
|
|
1998
|
+
threadId: 'thread-two',
|
|
1999
|
+
content: { content: 'D' },
|
|
2000
|
+
createdAt: new Date(now.getTime() + 3000),
|
|
2001
|
+
}),
|
|
2002
|
+
createSampleMessageV2({
|
|
2003
|
+
threadId: 'thread-two',
|
|
2004
|
+
content: { content: 'E' },
|
|
2005
|
+
createdAt: new Date(now.getTime() + 4000),
|
|
2006
|
+
}),
|
|
2007
|
+
createSampleMessageV2({
|
|
2008
|
+
threadId: 'thread-two',
|
|
2009
|
+
content: { content: 'F' },
|
|
2010
|
+
createdAt: new Date(now.getTime() + 5000),
|
|
2011
|
+
}),
|
|
2012
|
+
];
|
|
2013
|
+
await store.saveMessages({ messages, format: 'v2' });
|
|
2014
|
+
|
|
2015
|
+
// Use last: 2 and include a message from another thread with context
|
|
2016
|
+
const { messages: result } = await store.getMessagesPaginated({
|
|
2017
|
+
threadId: 'thread-one',
|
|
2018
|
+
format: 'v2',
|
|
2019
|
+
selectBy: {
|
|
2020
|
+
last: 2,
|
|
2021
|
+
include: [
|
|
2022
|
+
{
|
|
2023
|
+
id: messages[4].id, // 'E' from thread-bar
|
|
2024
|
+
threadId: 'thread-two',
|
|
2025
|
+
withPreviousMessages: 1,
|
|
2026
|
+
withNextMessages: 1,
|
|
2027
|
+
},
|
|
2028
|
+
],
|
|
2029
|
+
},
|
|
2030
|
+
});
|
|
2031
|
+
|
|
2032
|
+
// Should include last 2 from thread-one and 3 from thread-two (D, E, F)
|
|
2033
|
+
expect(result.map(m => m.content.content).sort()).toEqual(['B', 'C', 'D', 'E', 'F']);
|
|
2034
|
+
// Should include 2 from thread-one
|
|
2035
|
+
expect(result.filter(m => m.threadId === 'thread-one').map((m: any) => m.content.content)).toEqual(['B', 'C']);
|
|
2036
|
+
// Should include 3 from thread-two
|
|
2037
|
+
expect(result.filter(m => m.threadId === 'thread-two').map((m: any) => m.content.content)).toEqual([
|
|
2038
|
+
'D',
|
|
2039
|
+
'E',
|
|
2040
|
+
'F',
|
|
2041
|
+
]);
|
|
2042
|
+
});
|
|
1689
2043
|
});
|
|
1690
2044
|
|
|
1691
2045
|
describe('getThreadsByResourceId with pagination', () => {
|
package/src/storage/index.ts
CHANGED
|
@@ -8,6 +8,7 @@ import {
|
|
|
8
8
|
TABLE_MESSAGES,
|
|
9
9
|
TABLE_THREADS,
|
|
10
10
|
TABLE_TRACES,
|
|
11
|
+
TABLE_RESOURCES,
|
|
11
12
|
TABLE_WORKFLOW_SNAPSHOT,
|
|
12
13
|
TABLE_EVALS,
|
|
13
14
|
} from '@mastra/core/storage';
|
|
@@ -16,6 +17,7 @@ import type {
|
|
|
16
17
|
PaginationInfo,
|
|
17
18
|
StorageColumn,
|
|
18
19
|
StorageGetMessagesArg,
|
|
20
|
+
StorageResourceType,
|
|
19
21
|
TABLE_NAMES,
|
|
20
22
|
WorkflowRun,
|
|
21
23
|
WorkflowRuns,
|
|
@@ -101,9 +103,11 @@ export class PostgresStore extends MastraStorage {
|
|
|
101
103
|
|
|
102
104
|
public get supports(): {
|
|
103
105
|
selectByIncludeResourceScope: boolean;
|
|
106
|
+
resourceWorkingMemory: boolean;
|
|
104
107
|
} {
|
|
105
108
|
return {
|
|
106
109
|
selectByIncludeResourceScope: true,
|
|
110
|
+
resourceWorkingMemory: true,
|
|
107
111
|
};
|
|
108
112
|
}
|
|
109
113
|
|
|
@@ -1024,7 +1028,10 @@ export class PostgresStore extends MastraStorage {
|
|
|
1024
1028
|
}
|
|
1025
1029
|
|
|
1026
1030
|
try {
|
|
1027
|
-
const perPage =
|
|
1031
|
+
const perPage =
|
|
1032
|
+
perPageInput !== undefined
|
|
1033
|
+
? perPageInput
|
|
1034
|
+
: this.resolveMessageLimit({ last: selectBy?.last, defaultLimit: 40 });
|
|
1028
1035
|
const currentOffset = page * perPage;
|
|
1029
1036
|
|
|
1030
1037
|
const conditions: string[] = [`thread_id = $1`];
|
|
@@ -1045,7 +1052,7 @@ export class PostgresStore extends MastraStorage {
|
|
|
1045
1052
|
const countResult = await this.db.one(countQuery, queryParams);
|
|
1046
1053
|
const total = parseInt(countResult.count, 10);
|
|
1047
1054
|
|
|
1048
|
-
if (total === 0) {
|
|
1055
|
+
if (total === 0 && messages.length === 0) {
|
|
1049
1056
|
return {
|
|
1050
1057
|
messages: [],
|
|
1051
1058
|
total: 0,
|
|
@@ -1055,10 +1062,14 @@ export class PostgresStore extends MastraStorage {
|
|
|
1055
1062
|
};
|
|
1056
1063
|
}
|
|
1057
1064
|
|
|
1065
|
+
const excludeIds = messages.map(m => m.id);
|
|
1066
|
+
const excludeIdsParam = excludeIds.map((_, idx) => `$${idx + paramIndex}`).join(', ');
|
|
1067
|
+
paramIndex += excludeIds.length;
|
|
1068
|
+
|
|
1058
1069
|
const dataQuery = `${selectStatement} FROM ${this.getTableName(
|
|
1059
1070
|
TABLE_MESSAGES,
|
|
1060
|
-
)} ${whereClause} ${orderByStatement} LIMIT $${paramIndex++} OFFSET $${paramIndex++}`;
|
|
1061
|
-
const rows = await this.db.manyOrNone(dataQuery, [...queryParams, perPage, currentOffset]);
|
|
1071
|
+
)} ${whereClause} ${excludeIds.length ? `AND id NOT IN (${excludeIdsParam})` : ''}${orderByStatement} LIMIT $${paramIndex++} OFFSET $${paramIndex++}`;
|
|
1072
|
+
const rows = await this.db.manyOrNone(dataQuery, [...queryParams, ...excludeIds, perPage, currentOffset]);
|
|
1062
1073
|
messages.push(...(rows || []));
|
|
1063
1074
|
|
|
1064
1075
|
const list = new MessageList().add(messages, 'memory');
|
|
@@ -1140,7 +1151,13 @@ export class PostgresStore extends MastraStorage {
|
|
|
1140
1151
|
}
|
|
1141
1152
|
return t.none(
|
|
1142
1153
|
`INSERT INTO ${this.getTableName(TABLE_MESSAGES)} (id, thread_id, content, "createdAt", role, type, "resourceId")
|
|
1143
|
-
VALUES ($1, $2, $3, $4, $5, $6, $7)
|
|
1154
|
+
VALUES ($1, $2, $3, $4, $5, $6, $7)
|
|
1155
|
+
ON CONFLICT (id) DO UPDATE SET
|
|
1156
|
+
thread_id = EXCLUDED.thread_id,
|
|
1157
|
+
content = EXCLUDED.content,
|
|
1158
|
+
role = EXCLUDED.role,
|
|
1159
|
+
type = EXCLUDED.type,
|
|
1160
|
+
"resourceId" = EXCLUDED."resourceId"`,
|
|
1144
1161
|
[
|
|
1145
1162
|
message.id,
|
|
1146
1163
|
message.threadId,
|
|
@@ -1658,4 +1675,101 @@ export class PostgresStore extends MastraStorage {
|
|
|
1658
1675
|
return message;
|
|
1659
1676
|
});
|
|
1660
1677
|
}
|
|
1678
|
+
|
|
1679
|
+
async getResourceById({ resourceId }: { resourceId: string }): Promise<StorageResourceType | null> {
|
|
1680
|
+
const tableName = this.getTableName(TABLE_RESOURCES);
|
|
1681
|
+
const result = await this.db.oneOrNone<StorageResourceType>(`SELECT * FROM ${tableName} WHERE id = $1`, [
|
|
1682
|
+
resourceId,
|
|
1683
|
+
]);
|
|
1684
|
+
|
|
1685
|
+
if (!result) {
|
|
1686
|
+
return null;
|
|
1687
|
+
}
|
|
1688
|
+
|
|
1689
|
+
return {
|
|
1690
|
+
...result,
|
|
1691
|
+
// Ensure workingMemory is always returned as a string, regardless of automatic parsing
|
|
1692
|
+
workingMemory:
|
|
1693
|
+
typeof result.workingMemory === 'object' ? JSON.stringify(result.workingMemory) : result.workingMemory,
|
|
1694
|
+
metadata: typeof result.metadata === 'string' ? JSON.parse(result.metadata) : result.metadata,
|
|
1695
|
+
};
|
|
1696
|
+
}
|
|
1697
|
+
|
|
1698
|
+
async saveResource({ resource }: { resource: StorageResourceType }): Promise<StorageResourceType> {
|
|
1699
|
+
const tableName = this.getTableName(TABLE_RESOURCES);
|
|
1700
|
+
await this.db.none(
|
|
1701
|
+
`INSERT INTO ${tableName} (id, "workingMemory", metadata, "createdAt", "updatedAt")
|
|
1702
|
+
VALUES ($1, $2, $3, $4, $5)`,
|
|
1703
|
+
[
|
|
1704
|
+
resource.id,
|
|
1705
|
+
resource.workingMemory,
|
|
1706
|
+
JSON.stringify(resource.metadata),
|
|
1707
|
+
resource.createdAt.toISOString(),
|
|
1708
|
+
resource.updatedAt.toISOString(),
|
|
1709
|
+
],
|
|
1710
|
+
);
|
|
1711
|
+
|
|
1712
|
+
return resource;
|
|
1713
|
+
}
|
|
1714
|
+
|
|
1715
|
+
async updateResource({
|
|
1716
|
+
resourceId,
|
|
1717
|
+
workingMemory,
|
|
1718
|
+
metadata,
|
|
1719
|
+
}: {
|
|
1720
|
+
resourceId: string;
|
|
1721
|
+
workingMemory?: string;
|
|
1722
|
+
metadata?: Record<string, unknown>;
|
|
1723
|
+
}): Promise<StorageResourceType> {
|
|
1724
|
+
const existingResource = await this.getResourceById({ resourceId });
|
|
1725
|
+
|
|
1726
|
+
if (!existingResource) {
|
|
1727
|
+
// Create new resource if it doesn't exist
|
|
1728
|
+
const newResource: StorageResourceType = {
|
|
1729
|
+
id: resourceId,
|
|
1730
|
+
workingMemory,
|
|
1731
|
+
metadata: metadata || {},
|
|
1732
|
+
createdAt: new Date(),
|
|
1733
|
+
updatedAt: new Date(),
|
|
1734
|
+
};
|
|
1735
|
+
return this.saveResource({ resource: newResource });
|
|
1736
|
+
}
|
|
1737
|
+
|
|
1738
|
+
const updatedResource = {
|
|
1739
|
+
...existingResource,
|
|
1740
|
+
workingMemory: workingMemory !== undefined ? workingMemory : existingResource.workingMemory,
|
|
1741
|
+
metadata: {
|
|
1742
|
+
...existingResource.metadata,
|
|
1743
|
+
...metadata,
|
|
1744
|
+
},
|
|
1745
|
+
updatedAt: new Date(),
|
|
1746
|
+
};
|
|
1747
|
+
|
|
1748
|
+
const tableName = this.getTableName(TABLE_RESOURCES);
|
|
1749
|
+
const updates: string[] = [];
|
|
1750
|
+
const values: any[] = [];
|
|
1751
|
+
let paramIndex = 1;
|
|
1752
|
+
|
|
1753
|
+
if (workingMemory !== undefined) {
|
|
1754
|
+
updates.push(`"workingMemory" = $${paramIndex}`);
|
|
1755
|
+
values.push(workingMemory);
|
|
1756
|
+
paramIndex++;
|
|
1757
|
+
}
|
|
1758
|
+
|
|
1759
|
+
if (metadata) {
|
|
1760
|
+
updates.push(`metadata = $${paramIndex}`);
|
|
1761
|
+
values.push(JSON.stringify(updatedResource.metadata));
|
|
1762
|
+
paramIndex++;
|
|
1763
|
+
}
|
|
1764
|
+
|
|
1765
|
+
updates.push(`"updatedAt" = $${paramIndex}`);
|
|
1766
|
+
values.push(updatedResource.updatedAt.toISOString());
|
|
1767
|
+
paramIndex++;
|
|
1768
|
+
|
|
1769
|
+
values.push(resourceId);
|
|
1770
|
+
|
|
1771
|
+
await this.db.none(`UPDATE ${tableName} SET ${updates.join(', ')} WHERE id = $${paramIndex}`, values);
|
|
1772
|
+
|
|
1773
|
+
return updatedResource;
|
|
1774
|
+
}
|
|
1661
1775
|
}
|
|
@@ -54,7 +54,7 @@ describe('PGFilterTranslator', () => {
|
|
|
54
54
|
translator.translate({
|
|
55
55
|
nested: {
|
|
56
56
|
field: 'value',
|
|
57
|
-
},
|
|
57
|
+
} as any,
|
|
58
58
|
}),
|
|
59
59
|
).toEqual({
|
|
60
60
|
'nested.field': { $eq: 'value' },
|
|
@@ -634,7 +634,7 @@ describe('PGFilterTranslator', () => {
|
|
|
634
634
|
|
|
635
635
|
it('throws error for $not if not an object', () => {
|
|
636
636
|
expect(() => translator.translate({ $not: 'value' })).toThrow();
|
|
637
|
-
expect(() => translator.translate({ $not: [{ field: 'value' }] })).toThrow();
|
|
637
|
+
expect(() => translator.translate({ $not: [{ field: 'value' }] } as any)).toThrow();
|
|
638
638
|
});
|
|
639
639
|
it('throws error for $not if empty', () => {
|
|
640
640
|
expect(() => translator.translate({ $not: {} })).toThrow();
|
|
@@ -645,21 +645,21 @@ describe('PGFilterTranslator', () => {
|
|
|
645
645
|
// $and cannot be used in field conditions
|
|
646
646
|
expect(() =>
|
|
647
647
|
translator.translate({
|
|
648
|
-
field: { $and: [{ $eq: 'value1' }, { $eq: 'value2' }] },
|
|
648
|
+
field: { $and: [{ $eq: 'value1' }, { $eq: 'value2' }] } as any,
|
|
649
649
|
}),
|
|
650
650
|
).toThrow();
|
|
651
651
|
|
|
652
652
|
// $or cannot be used in field conditions
|
|
653
653
|
expect(() =>
|
|
654
654
|
translator.translate({
|
|
655
|
-
field: { $or: [{ $eq: 'value1' }, { $eq: 'value2' }] },
|
|
655
|
+
field: { $or: [{ $eq: 'value1' }, { $eq: 'value2' }] } as any,
|
|
656
656
|
}),
|
|
657
657
|
).toThrow();
|
|
658
658
|
|
|
659
659
|
// $nor cannot be used in field conditions
|
|
660
660
|
expect(() =>
|
|
661
661
|
translator.translate({
|
|
662
|
-
field: { $nor: [{ $eq: 'value1' }, { $eq: 'value2' }] },
|
|
662
|
+
field: { $nor: [{ $eq: 'value1' }, { $eq: 'value2' }] } as any,
|
|
663
663
|
}),
|
|
664
664
|
).toThrow();
|
|
665
665
|
});
|
|
@@ -710,7 +710,7 @@ describe('PGFilterTranslator', () => {
|
|
|
710
710
|
$gt: {
|
|
711
711
|
$or: [{ subfield: 'value1' }, { subfield: 'value2' }],
|
|
712
712
|
},
|
|
713
|
-
},
|
|
713
|
+
} as any,
|
|
714
714
|
}),
|
|
715
715
|
).toThrow();
|
|
716
716
|
|
|
@@ -721,7 +721,7 @@ describe('PGFilterTranslator', () => {
|
|
|
721
721
|
$in: [
|
|
722
722
|
{
|
|
723
723
|
$and: [{ subfield: 'value1' }, { subfield: 'value2' }],
|
|
724
|
-
},
|
|
724
|
+
} as any,
|
|
725
725
|
],
|
|
726
726
|
},
|
|
727
727
|
}),
|
|
@@ -730,7 +730,7 @@ describe('PGFilterTranslator', () => {
|
|
|
730
730
|
|
|
731
731
|
it('validates $not operator structure', () => {
|
|
732
732
|
// $not must be an object
|
|
733
|
-
expect(() => translator.translate({ field: { $not: 'value' } })).toThrow();
|
|
733
|
+
expect(() => translator.translate({ field: { $not: 'value' } } as any)).toThrow();
|
|
734
734
|
expect(() => translator.translate({ field: { $not: ['value'] } })).toThrow();
|
|
735
735
|
expect(() => translator.translate({ $not: 'value' })).toThrow();
|
|
736
736
|
expect(() => translator.translate({ $not: ['value'] })).toThrow();
|
|
@@ -790,7 +790,7 @@ describe('PGFilterTranslator', () => {
|
|
|
790
790
|
});
|
|
791
791
|
|
|
792
792
|
it('throws error for non-logical operators at top level', () => {
|
|
793
|
-
const invalidFilters = [{ $gt: 100 }, { $in: ['value1', 'value2'] }, { $eq: true }];
|
|
793
|
+
const invalidFilters: any = [{ $gt: 100 }, { $in: ['value1', 'value2'] }, { $eq: true }];
|
|
794
794
|
|
|
795
795
|
invalidFilters.forEach(filter => {
|
|
796
796
|
expect(() => translator.translate(filter)).toThrow(/Invalid top-level operator/);
|
|
@@ -808,13 +808,13 @@ describe('PGFilterTranslator', () => {
|
|
|
808
808
|
// Should throw for non-object values
|
|
809
809
|
expect(() =>
|
|
810
810
|
translator.translate({
|
|
811
|
-
field: { $elemMatch: 'value' },
|
|
811
|
+
field: { $elemMatch: 'value' } as any,
|
|
812
812
|
}),
|
|
813
813
|
).toThrow('$elemMatch requires an object with conditions');
|
|
814
814
|
|
|
815
815
|
expect(() =>
|
|
816
816
|
translator.translate({
|
|
817
|
-
field: { $elemMatch: ['value'] },
|
|
817
|
+
field: { $elemMatch: ['value'] } as any,
|
|
818
818
|
}),
|
|
819
819
|
).toThrow('$elemMatch requires an object with conditions');
|
|
820
820
|
});
|
|
@@ -917,7 +917,7 @@ describe('PGFilterTranslator', () => {
|
|
|
917
917
|
translator.translate({
|
|
918
918
|
nested: {
|
|
919
919
|
field: { $regex: 'pattern', $options: 'i' },
|
|
920
|
-
},
|
|
920
|
+
} as any,
|
|
921
921
|
}),
|
|
922
922
|
).toEqual({
|
|
923
923
|
'nested.field': { $regex: '(?i)pattern' },
|