@mastra/pg 0.12.3 → 0.12.4
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.turbo/turbo-build.log +7 -7
- package/CHANGELOG.md +55 -0
- package/dist/_tsup-dts-rollup.d.cts +346 -63
- package/dist/_tsup-dts-rollup.d.ts +346 -63
- package/dist/index.cjs +1610 -1117
- package/dist/index.js +1611 -1118
- package/package.json +4 -4
- package/src/storage/domains/legacy-evals/index.ts +151 -0
- package/src/storage/domains/memory/index.ts +900 -0
- package/src/storage/domains/operations/index.ts +368 -0
- package/src/storage/domains/scores/index.ts +231 -0
- package/src/storage/domains/traces/index.ts +160 -0
- package/src/storage/domains/utils.ts +12 -0
- package/src/storage/domains/workflows/index.ts +253 -0
- package/src/storage/index.test.ts +5 -2389
- package/src/storage/index.ts +157 -1545
- package/src/storage/test-utils.ts +368 -0
- package/src/vector/index.test.ts +89 -0
- package/src/vector/index.ts +3 -0
|
@@ -0,0 +1,368 @@
|
|
|
1
|
+
import { createSampleThread } from '@internal/storage-test-utils';
|
|
2
|
+
import type { StorageColumn, TABLE_NAMES } from '@mastra/core/storage';
|
|
3
|
+
import pgPromise from 'pg-promise';
|
|
4
|
+
import { afterAll, afterEach, beforeAll, beforeEach, describe, expect, it } from 'vitest';
|
|
5
|
+
import { PostgresStore } from '.';
|
|
6
|
+
import type { PostgresConfig } from '.';
|
|
7
|
+
|
|
8
|
+
export const TEST_CONFIG: PostgresConfig = {
|
|
9
|
+
host: process.env.POSTGRES_HOST || 'localhost',
|
|
10
|
+
port: Number(process.env.POSTGRES_PORT) || 5434,
|
|
11
|
+
database: process.env.POSTGRES_DB || 'postgres',
|
|
12
|
+
user: process.env.POSTGRES_USER || 'postgres',
|
|
13
|
+
password: process.env.POSTGRES_PASSWORD || 'postgres',
|
|
14
|
+
};
|
|
15
|
+
|
|
16
|
+
export const connectionString = `postgresql://${TEST_CONFIG.user}:${TEST_CONFIG.password}@${TEST_CONFIG.host}:${TEST_CONFIG.port}/${TEST_CONFIG.database}`;
|
|
17
|
+
|
|
18
|
+
export function pgTests() {
|
|
19
|
+
let store: PostgresStore;
|
|
20
|
+
|
|
21
|
+
describe('PG specific tests', () => {
|
|
22
|
+
beforeAll(async () => {
|
|
23
|
+
store = new PostgresStore(TEST_CONFIG);
|
|
24
|
+
});
|
|
25
|
+
afterAll(async () => {
|
|
26
|
+
try {
|
|
27
|
+
await store.close();
|
|
28
|
+
} catch {}
|
|
29
|
+
});
|
|
30
|
+
|
|
31
|
+
describe('Public Fields Access', () => {
|
|
32
|
+
it('should expose db field as public', () => {
|
|
33
|
+
expect(store.db).toBeDefined();
|
|
34
|
+
expect(typeof store.db).toBe('object');
|
|
35
|
+
expect(store.db.query).toBeDefined();
|
|
36
|
+
expect(typeof store.db.query).toBe('function');
|
|
37
|
+
});
|
|
38
|
+
|
|
39
|
+
it('should expose pgp field as public', () => {
|
|
40
|
+
expect(store.pgp).toBeDefined();
|
|
41
|
+
expect(typeof store.pgp).toBe('function');
|
|
42
|
+
expect(store.pgp.end).toBeDefined();
|
|
43
|
+
expect(typeof store.pgp.end).toBe('function');
|
|
44
|
+
});
|
|
45
|
+
|
|
46
|
+
it('should allow direct database queries via public db field', async () => {
|
|
47
|
+
const result = await store.db.one('SELECT 1 as test');
|
|
48
|
+
expect(result.test).toBe(1);
|
|
49
|
+
});
|
|
50
|
+
|
|
51
|
+
it('should allow access to pgp utilities via public pgp field', () => {
|
|
52
|
+
const helpers = store.pgp.helpers;
|
|
53
|
+
expect(helpers).toBeDefined();
|
|
54
|
+
expect(helpers.insert).toBeDefined();
|
|
55
|
+
expect(helpers.update).toBeDefined();
|
|
56
|
+
});
|
|
57
|
+
|
|
58
|
+
it('should maintain connection state through public db field', async () => {
|
|
59
|
+
// Test multiple queries to ensure connection state
|
|
60
|
+
const result1 = await store.db.one('SELECT NOW() as timestamp1');
|
|
61
|
+
const result2 = await store.db.one('SELECT NOW() as timestamp2');
|
|
62
|
+
|
|
63
|
+
expect(result1.timestamp1).toBeDefined();
|
|
64
|
+
expect(result2.timestamp2).toBeDefined();
|
|
65
|
+
expect(new Date(result2.timestamp2).getTime()).toBeGreaterThanOrEqual(new Date(result1.timestamp1).getTime());
|
|
66
|
+
});
|
|
67
|
+
|
|
68
|
+
it('should throw error when pool is used after disconnect', async () => {
|
|
69
|
+
await store.close();
|
|
70
|
+
await expect(store.db.connect()).rejects.toThrow();
|
|
71
|
+
store = new PostgresStore(TEST_CONFIG);
|
|
72
|
+
await store.init();
|
|
73
|
+
});
|
|
74
|
+
});
|
|
75
|
+
|
|
76
|
+
describe('PgStorage Table Name Quoting', () => {
|
|
77
|
+
const camelCaseTable = 'TestCamelCaseTable';
|
|
78
|
+
const snakeCaseTable = 'test_snake_case_table';
|
|
79
|
+
const BASE_SCHEMA = {
|
|
80
|
+
id: { type: 'integer', primaryKey: true, nullable: false },
|
|
81
|
+
name: { type: 'text', nullable: true },
|
|
82
|
+
} as Record<string, StorageColumn>;
|
|
83
|
+
|
|
84
|
+
beforeEach(async () => {
|
|
85
|
+
// Only clear tables if store is initialized
|
|
86
|
+
try {
|
|
87
|
+
// Clear tables before each test
|
|
88
|
+
await store.clearTable({ tableName: camelCaseTable as TABLE_NAMES });
|
|
89
|
+
await store.clearTable({ tableName: snakeCaseTable as TABLE_NAMES });
|
|
90
|
+
} catch (error) {
|
|
91
|
+
// Ignore errors during table clearing
|
|
92
|
+
console.warn('Error clearing tables:', error);
|
|
93
|
+
}
|
|
94
|
+
});
|
|
95
|
+
|
|
96
|
+
afterEach(async () => {
|
|
97
|
+
// Only clear tables if store is initialized
|
|
98
|
+
try {
|
|
99
|
+
// Clear tables before each test
|
|
100
|
+
await store.clearTable({ tableName: camelCaseTable as TABLE_NAMES });
|
|
101
|
+
await store.clearTable({ tableName: snakeCaseTable as TABLE_NAMES });
|
|
102
|
+
} catch (error) {
|
|
103
|
+
// Ignore errors during table clearing
|
|
104
|
+
console.warn('Error clearing tables:', error);
|
|
105
|
+
}
|
|
106
|
+
});
|
|
107
|
+
|
|
108
|
+
it('should create and upsert to a camelCase table without quoting errors', async () => {
|
|
109
|
+
await expect(
|
|
110
|
+
store.createTable({
|
|
111
|
+
tableName: camelCaseTable as TABLE_NAMES,
|
|
112
|
+
schema: BASE_SCHEMA,
|
|
113
|
+
}),
|
|
114
|
+
).resolves.not.toThrow();
|
|
115
|
+
|
|
116
|
+
await store.insert({
|
|
117
|
+
tableName: camelCaseTable as TABLE_NAMES,
|
|
118
|
+
record: { id: '1', name: 'Alice' },
|
|
119
|
+
});
|
|
120
|
+
|
|
121
|
+
const row: any = await store.load({
|
|
122
|
+
tableName: camelCaseTable as TABLE_NAMES,
|
|
123
|
+
keys: { id: '1' },
|
|
124
|
+
});
|
|
125
|
+
expect(row?.name).toBe('Alice');
|
|
126
|
+
});
|
|
127
|
+
|
|
128
|
+
it('should create and upsert to a snake_case table without quoting errors', async () => {
|
|
129
|
+
await expect(
|
|
130
|
+
store.createTable({
|
|
131
|
+
tableName: snakeCaseTable as TABLE_NAMES,
|
|
132
|
+
schema: BASE_SCHEMA,
|
|
133
|
+
}),
|
|
134
|
+
).resolves.not.toThrow();
|
|
135
|
+
|
|
136
|
+
await store.insert({
|
|
137
|
+
tableName: snakeCaseTable as TABLE_NAMES,
|
|
138
|
+
record: { id: '2', name: 'Bob' },
|
|
139
|
+
});
|
|
140
|
+
|
|
141
|
+
const row: any = await store.load({
|
|
142
|
+
tableName: snakeCaseTable as TABLE_NAMES,
|
|
143
|
+
keys: { id: '2' },
|
|
144
|
+
});
|
|
145
|
+
expect(row?.name).toBe('Bob');
|
|
146
|
+
});
|
|
147
|
+
});
|
|
148
|
+
|
|
149
|
+
describe('Permission Handling', () => {
|
|
150
|
+
const schemaRestrictedUser = 'mastra_schema_restricted_storage';
|
|
151
|
+
const restrictedPassword = 'test123';
|
|
152
|
+
const testSchema = 'testSchema';
|
|
153
|
+
let adminDb: pgPromise.IDatabase<{}>;
|
|
154
|
+
let pgpAdmin: pgPromise.IMain;
|
|
155
|
+
|
|
156
|
+
beforeAll(async () => {
|
|
157
|
+
// Re-initialize the main store for subsequent tests
|
|
158
|
+
|
|
159
|
+
await store.init();
|
|
160
|
+
|
|
161
|
+
// Create a separate pg-promise instance for admin operations
|
|
162
|
+
pgpAdmin = pgPromise();
|
|
163
|
+
adminDb = pgpAdmin(connectionString);
|
|
164
|
+
try {
|
|
165
|
+
await adminDb.tx(async t => {
|
|
166
|
+
// Drop the test schema if it exists from previous runs
|
|
167
|
+
await t.none(`DROP SCHEMA IF EXISTS ${testSchema} CASCADE`);
|
|
168
|
+
|
|
169
|
+
// Create schema restricted user with minimal permissions
|
|
170
|
+
await t.none(`
|
|
171
|
+
DO $$
|
|
172
|
+
BEGIN
|
|
173
|
+
IF NOT EXISTS (SELECT FROM pg_catalog.pg_roles WHERE rolname = '${schemaRestrictedUser}') THEN
|
|
174
|
+
CREATE USER ${schemaRestrictedUser} WITH PASSWORD '${restrictedPassword}' NOCREATEDB;
|
|
175
|
+
END IF;
|
|
176
|
+
END
|
|
177
|
+
$$;`);
|
|
178
|
+
|
|
179
|
+
// Grant only connect and usage to schema restricted user
|
|
180
|
+
await t.none(`
|
|
181
|
+
REVOKE ALL ON DATABASE ${(TEST_CONFIG as any).database} FROM ${schemaRestrictedUser};
|
|
182
|
+
GRANT CONNECT ON DATABASE ${(TEST_CONFIG as any).database} TO ${schemaRestrictedUser};
|
|
183
|
+
REVOKE ALL ON SCHEMA public FROM ${schemaRestrictedUser};
|
|
184
|
+
GRANT USAGE ON SCHEMA public TO ${schemaRestrictedUser};
|
|
185
|
+
`);
|
|
186
|
+
});
|
|
187
|
+
} catch (error) {
|
|
188
|
+
// Clean up the database connection on error
|
|
189
|
+
pgpAdmin.end();
|
|
190
|
+
throw error;
|
|
191
|
+
}
|
|
192
|
+
});
|
|
193
|
+
|
|
194
|
+
afterAll(async () => {
|
|
195
|
+
try {
|
|
196
|
+
// Then clean up test user in admin connection
|
|
197
|
+
await adminDb.tx(async t => {
|
|
198
|
+
await t.none(`
|
|
199
|
+
REASSIGN OWNED BY ${schemaRestrictedUser} TO postgres;
|
|
200
|
+
DROP OWNED BY ${schemaRestrictedUser};
|
|
201
|
+
DROP USER IF EXISTS ${schemaRestrictedUser};
|
|
202
|
+
`);
|
|
203
|
+
});
|
|
204
|
+
|
|
205
|
+
// Finally clean up admin connection
|
|
206
|
+
if (pgpAdmin) {
|
|
207
|
+
pgpAdmin.end();
|
|
208
|
+
}
|
|
209
|
+
} catch (error) {
|
|
210
|
+
console.error('Error cleaning up test user:', error);
|
|
211
|
+
if (pgpAdmin) pgpAdmin.end();
|
|
212
|
+
}
|
|
213
|
+
});
|
|
214
|
+
|
|
215
|
+
describe('Schema Creation', () => {
|
|
216
|
+
beforeEach(async () => {
|
|
217
|
+
// Create a fresh connection for each test
|
|
218
|
+
const tempPgp = pgPromise();
|
|
219
|
+
const tempDb = tempPgp(connectionString);
|
|
220
|
+
|
|
221
|
+
try {
|
|
222
|
+
// Ensure schema doesn't exist before each test
|
|
223
|
+
await tempDb.none(`DROP SCHEMA IF EXISTS ${testSchema} CASCADE`);
|
|
224
|
+
|
|
225
|
+
// Ensure no active connections from restricted user
|
|
226
|
+
await tempDb.none(`
|
|
227
|
+
SELECT pg_terminate_backend(pid)
|
|
228
|
+
FROM pg_stat_activity
|
|
229
|
+
WHERE usename = '${schemaRestrictedUser}'
|
|
230
|
+
`);
|
|
231
|
+
} finally {
|
|
232
|
+
tempPgp.end(); // Always clean up the connection
|
|
233
|
+
}
|
|
234
|
+
});
|
|
235
|
+
|
|
236
|
+
afterEach(async () => {
|
|
237
|
+
// Create a fresh connection for cleanup
|
|
238
|
+
const tempPgp = pgPromise();
|
|
239
|
+
const tempDb = tempPgp(connectionString);
|
|
240
|
+
|
|
241
|
+
try {
|
|
242
|
+
// Clean up any connections from the restricted user and drop schema
|
|
243
|
+
await tempDb.none(`
|
|
244
|
+
DO $$
|
|
245
|
+
BEGIN
|
|
246
|
+
-- Terminate connections
|
|
247
|
+
PERFORM pg_terminate_backend(pid)
|
|
248
|
+
FROM pg_stat_activity
|
|
249
|
+
WHERE usename = '${schemaRestrictedUser}';
|
|
250
|
+
|
|
251
|
+
-- Drop schema
|
|
252
|
+
DROP SCHEMA IF EXISTS ${testSchema} CASCADE;
|
|
253
|
+
END $$;
|
|
254
|
+
`);
|
|
255
|
+
} catch (error) {
|
|
256
|
+
console.error('Error in afterEach cleanup:', error);
|
|
257
|
+
} finally {
|
|
258
|
+
tempPgp.end(); // Always clean up the connection
|
|
259
|
+
}
|
|
260
|
+
});
|
|
261
|
+
|
|
262
|
+
it('should fail when user lacks CREATE privilege', async () => {
|
|
263
|
+
const restrictedDB = new PostgresStore({
|
|
264
|
+
...TEST_CONFIG,
|
|
265
|
+
user: schemaRestrictedUser,
|
|
266
|
+
password: restrictedPassword,
|
|
267
|
+
schemaName: testSchema,
|
|
268
|
+
});
|
|
269
|
+
|
|
270
|
+
// Create a fresh connection for verification
|
|
271
|
+
const tempPgp = pgPromise();
|
|
272
|
+
const tempDb = tempPgp(connectionString);
|
|
273
|
+
|
|
274
|
+
try {
|
|
275
|
+
// Test schema creation by initializing the store
|
|
276
|
+
await expect(async () => {
|
|
277
|
+
await restrictedDB.init();
|
|
278
|
+
}).rejects.toThrow(
|
|
279
|
+
`Unable to create schema "${testSchema}". This requires CREATE privilege on the database.`,
|
|
280
|
+
);
|
|
281
|
+
|
|
282
|
+
// Verify schema was not created
|
|
283
|
+
const exists = await tempDb.oneOrNone(
|
|
284
|
+
`SELECT EXISTS (SELECT 1 FROM information_schema.schemata WHERE schema_name = $1)`,
|
|
285
|
+
[testSchema],
|
|
286
|
+
);
|
|
287
|
+
expect(exists?.exists).toBe(false);
|
|
288
|
+
} finally {
|
|
289
|
+
await restrictedDB.close();
|
|
290
|
+
tempPgp.end(); // Clean up the verification connection
|
|
291
|
+
}
|
|
292
|
+
});
|
|
293
|
+
|
|
294
|
+
it('should fail with schema creation error when saving thread', async () => {
|
|
295
|
+
const restrictedDB = new PostgresStore({
|
|
296
|
+
...TEST_CONFIG,
|
|
297
|
+
user: schemaRestrictedUser,
|
|
298
|
+
password: restrictedPassword,
|
|
299
|
+
schemaName: testSchema,
|
|
300
|
+
});
|
|
301
|
+
|
|
302
|
+
// Create a fresh connection for verification
|
|
303
|
+
const tempPgp = pgPromise();
|
|
304
|
+
const tempDb = tempPgp(connectionString);
|
|
305
|
+
|
|
306
|
+
try {
|
|
307
|
+
await expect(async () => {
|
|
308
|
+
await restrictedDB.init();
|
|
309
|
+
const thread = createSampleThread();
|
|
310
|
+
await restrictedDB.saveThread({ thread });
|
|
311
|
+
}).rejects.toThrow(
|
|
312
|
+
`Unable to create schema "${testSchema}". This requires CREATE privilege on the database.`,
|
|
313
|
+
);
|
|
314
|
+
|
|
315
|
+
// Verify schema was not created
|
|
316
|
+
const exists = await tempDb.oneOrNone(
|
|
317
|
+
`SELECT EXISTS (SELECT 1 FROM information_schema.schemata WHERE schema_name = $1)`,
|
|
318
|
+
[testSchema],
|
|
319
|
+
);
|
|
320
|
+
expect(exists?.exists).toBe(false);
|
|
321
|
+
} finally {
|
|
322
|
+
await restrictedDB.close();
|
|
323
|
+
tempPgp.end(); // Clean up the verification connection
|
|
324
|
+
}
|
|
325
|
+
});
|
|
326
|
+
});
|
|
327
|
+
});
|
|
328
|
+
|
|
329
|
+
describe('Validation', () => {
|
|
330
|
+
const validConfig = TEST_CONFIG as any;
|
|
331
|
+
it('throws if connectionString is empty', () => {
|
|
332
|
+
expect(() => new PostgresStore({ connectionString: '' })).toThrow();
|
|
333
|
+
expect(() => new PostgresStore({ ...validConfig, connectionString: '' })).toThrow();
|
|
334
|
+
});
|
|
335
|
+
it('throws if host is missing or empty', () => {
|
|
336
|
+
expect(() => new PostgresStore({ ...validConfig, host: '' })).toThrow();
|
|
337
|
+
const { host, ...rest } = validConfig;
|
|
338
|
+
expect(() => new PostgresStore(rest as any)).toThrow();
|
|
339
|
+
});
|
|
340
|
+
it('throws if connectionString is empty', () => {
|
|
341
|
+
expect(() => new PostgresStore({ connectionString: '' })).toThrow();
|
|
342
|
+
const { database, ...rest } = validConfig;
|
|
343
|
+
expect(() => new PostgresStore(rest as any)).toThrow();
|
|
344
|
+
});
|
|
345
|
+
it('throws if user is missing or empty', () => {
|
|
346
|
+
expect(() => new PostgresStore({ ...validConfig, user: '' })).toThrow();
|
|
347
|
+
const { user, ...rest } = validConfig;
|
|
348
|
+
expect(() => new PostgresStore(rest as any)).toThrow();
|
|
349
|
+
});
|
|
350
|
+
it('throws if database is missing or empty', () => {
|
|
351
|
+
expect(() => new PostgresStore({ ...validConfig, database: '' })).toThrow();
|
|
352
|
+
const { database, ...rest } = validConfig;
|
|
353
|
+
expect(() => new PostgresStore(rest as any)).toThrow();
|
|
354
|
+
});
|
|
355
|
+
it('throws if password is missing or empty', () => {
|
|
356
|
+
expect(() => new PostgresStore({ ...validConfig, password: '' })).toThrow();
|
|
357
|
+
const { password, ...rest } = validConfig;
|
|
358
|
+
expect(() => new PostgresStore(rest as any)).toThrow();
|
|
359
|
+
});
|
|
360
|
+
it('does not throw on valid config (host-based)', () => {
|
|
361
|
+
expect(() => new PostgresStore(validConfig)).not.toThrow();
|
|
362
|
+
});
|
|
363
|
+
it('does not throw on non-empty connection string', () => {
|
|
364
|
+
expect(() => new PostgresStore({ connectionString })).not.toThrow();
|
|
365
|
+
});
|
|
366
|
+
});
|
|
367
|
+
});
|
|
368
|
+
}
|
package/src/vector/index.test.ts
CHANGED
|
@@ -1988,6 +1988,95 @@ describe('PgVector', () => {
|
|
|
1988
1988
|
expect(results[0]?.score).toBeCloseTo(1, 5);
|
|
1989
1989
|
expect(results[1]?.score).toBeGreaterThan(0.9);
|
|
1990
1990
|
});
|
|
1991
|
+
|
|
1992
|
+
// NEW TEST: Reproduce the SET LOCAL bug
|
|
1993
|
+
it('should verify that ef_search parameter is actually being set (reproduces SET LOCAL bug)', async () => {
|
|
1994
|
+
const client = await vectorDB.pool.connect();
|
|
1995
|
+
try {
|
|
1996
|
+
// Test current behavior: SET LOCAL without transaction should have no effect
|
|
1997
|
+
await client.query('SET LOCAL hnsw.ef_search = 500');
|
|
1998
|
+
|
|
1999
|
+
// Check if the parameter was actually set
|
|
2000
|
+
const result = await client.query('SHOW hnsw.ef_search');
|
|
2001
|
+
const currentValue = result.rows[0]['hnsw.ef_search'];
|
|
2002
|
+
|
|
2003
|
+
// The value should still be the default (not 500)
|
|
2004
|
+
expect(parseInt(currentValue)).not.toBe(500);
|
|
2005
|
+
|
|
2006
|
+
// Now test with proper transaction
|
|
2007
|
+
await client.query('BEGIN');
|
|
2008
|
+
await client.query('SET LOCAL hnsw.ef_search = 500');
|
|
2009
|
+
|
|
2010
|
+
const resultInTransaction = await client.query('SHOW hnsw.ef_search');
|
|
2011
|
+
const valueInTransaction = resultInTransaction.rows[0]['hnsw.ef_search'];
|
|
2012
|
+
|
|
2013
|
+
// This should work because we're in a transaction
|
|
2014
|
+
expect(parseInt(valueInTransaction)).toBe(500);
|
|
2015
|
+
|
|
2016
|
+
await client.query('ROLLBACK');
|
|
2017
|
+
|
|
2018
|
+
// After rollback, should return to default
|
|
2019
|
+
const resultAfterRollback = await client.query('SHOW hnsw.ef_search');
|
|
2020
|
+
const valueAfterRollback = resultAfterRollback.rows[0]['hnsw.ef_search'];
|
|
2021
|
+
expect(parseInt(valueAfterRollback)).not.toBe(500);
|
|
2022
|
+
} finally {
|
|
2023
|
+
client.release();
|
|
2024
|
+
}
|
|
2025
|
+
});
|
|
2026
|
+
|
|
2027
|
+
// Verify the fix works - ef parameter is properly applied in query method
|
|
2028
|
+
it('should properly apply ef parameter using transactions (verifies fix)', async () => {
|
|
2029
|
+
const client = await vectorDB.pool.connect();
|
|
2030
|
+
const queryCommands: string[] = [];
|
|
2031
|
+
|
|
2032
|
+
// Spy on the client query method to capture all SQL commands
|
|
2033
|
+
const originalClientQuery = client.query;
|
|
2034
|
+
const clientQuerySpy = vi.fn().mockImplementation((query, ...args) => {
|
|
2035
|
+
if (typeof query === 'string') {
|
|
2036
|
+
queryCommands.push(query);
|
|
2037
|
+
}
|
|
2038
|
+
return originalClientQuery.call(client, query, ...args);
|
|
2039
|
+
});
|
|
2040
|
+
client.query = clientQuerySpy;
|
|
2041
|
+
|
|
2042
|
+
try {
|
|
2043
|
+
// Manually release the client so query() can get a fresh one
|
|
2044
|
+
client.release();
|
|
2045
|
+
|
|
2046
|
+
await vectorDB.query({
|
|
2047
|
+
indexName,
|
|
2048
|
+
queryVector: [1, 0, 0],
|
|
2049
|
+
topK: 2,
|
|
2050
|
+
ef: 128,
|
|
2051
|
+
});
|
|
2052
|
+
|
|
2053
|
+
const testClient = await vectorDB.pool.connect();
|
|
2054
|
+
try {
|
|
2055
|
+
// Test that SET LOCAL works within a transaction
|
|
2056
|
+
await testClient.query('BEGIN');
|
|
2057
|
+
await testClient.query('SET LOCAL hnsw.ef_search = 256');
|
|
2058
|
+
|
|
2059
|
+
const result = await testClient.query('SHOW hnsw.ef_search');
|
|
2060
|
+
const value = result.rows[0]['hnsw.ef_search'];
|
|
2061
|
+
expect(parseInt(value)).toBe(256);
|
|
2062
|
+
|
|
2063
|
+
await testClient.query('ROLLBACK');
|
|
2064
|
+
|
|
2065
|
+
// After rollback, should revert
|
|
2066
|
+
const resultAfter = await testClient.query('SHOW hnsw.ef_search');
|
|
2067
|
+
const valueAfter = resultAfter.rows[0]['hnsw.ef_search'];
|
|
2068
|
+
expect(parseInt(valueAfter)).not.toBe(256);
|
|
2069
|
+
} finally {
|
|
2070
|
+
testClient.release();
|
|
2071
|
+
}
|
|
2072
|
+
} finally {
|
|
2073
|
+
// Restore original function if client is still connected
|
|
2074
|
+
if (client.query === clientQuerySpy) {
|
|
2075
|
+
client.query = originalClientQuery;
|
|
2076
|
+
}
|
|
2077
|
+
clientQuerySpy.mockRestore();
|
|
2078
|
+
}
|
|
2079
|
+
});
|
|
1991
2080
|
});
|
|
1992
2081
|
|
|
1993
2082
|
describe('IVF Parameters', () => {
|
package/src/vector/index.ts
CHANGED
|
@@ -200,6 +200,7 @@ export class PgVector extends MastraVector<PGVectorFilter> {
|
|
|
200
200
|
|
|
201
201
|
const client = await this.pool.connect();
|
|
202
202
|
try {
|
|
203
|
+
await client.query('BEGIN');
|
|
203
204
|
const vectorStr = `[${queryVector.join(',')}]`;
|
|
204
205
|
const translatedFilter = this.transformFilter(filter);
|
|
205
206
|
const { sql: filterQuery, values: filterValues } = buildFilterQuery(translatedFilter, minScore, topK);
|
|
@@ -237,6 +238,7 @@ export class PgVector extends MastraVector<PGVectorFilter> {
|
|
|
237
238
|
ORDER BY score DESC
|
|
238
239
|
LIMIT $2`;
|
|
239
240
|
const result = await client.query(query, filterValues);
|
|
241
|
+
await client.query('COMMIT');
|
|
240
242
|
|
|
241
243
|
return result.rows.map(({ id, score, metadata, embedding }) => ({
|
|
242
244
|
id,
|
|
@@ -245,6 +247,7 @@ export class PgVector extends MastraVector<PGVectorFilter> {
|
|
|
245
247
|
...(includeVector && embedding && { vector: JSON.parse(embedding) }),
|
|
246
248
|
}));
|
|
247
249
|
} catch (error) {
|
|
250
|
+
await client.query('ROLLBACK');
|
|
248
251
|
const mastraError = new MastraError(
|
|
249
252
|
{
|
|
250
253
|
id: 'MASTRA_STORAGE_PG_VECTOR_QUERY_FAILED',
|