@mastra/upstash 1.0.0-beta.10 → 1.0.0-beta.11
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +72 -0
- package/dist/docs/README.md +1 -1
- package/dist/docs/SKILL.md +1 -1
- package/dist/docs/SOURCE_MAP.json +1 -1
- package/dist/docs/memory/01-working-memory.md +10 -6
- package/dist/docs/rag/01-vector-databases.md +10 -5
- package/dist/docs/rag/02-retrieval.md +5 -6
- package/dist/index.cjs +21 -21
- package/dist/index.cjs.map +1 -1
- package/dist/index.js +21 -21
- package/dist/index.js.map +1 -1
- package/dist/storage/domains/workflows/index.d.ts +1 -0
- package/dist/storage/domains/workflows/index.d.ts.map +1 -1
- package/package.json +4 -4
package/CHANGELOG.md
CHANGED
|
@@ -1,5 +1,77 @@
|
|
|
1
1
|
# @mastra/upstash
|
|
2
2
|
|
|
3
|
+
## 1.0.0-beta.11
|
|
4
|
+
|
|
5
|
+
### Patch Changes
|
|
6
|
+
|
|
7
|
+
- Aligned vector store configuration with underlying library APIs, giving you access to all library options directly. ([#11742](https://github.com/mastra-ai/mastra/pull/11742))
|
|
8
|
+
|
|
9
|
+
**Why this change?**
|
|
10
|
+
|
|
11
|
+
Previously, each vector store defined its own configuration types that only exposed a subset of the underlying library's options. This meant users couldn't access advanced features like authentication, SSL, compression, or custom headers without creating their own client instances. Now, the configuration types extend the library types directly, so all options are available.
|
|
12
|
+
|
|
13
|
+
**@mastra/libsql** (Breaking)
|
|
14
|
+
|
|
15
|
+
Renamed `connectionUrl` to `url` to match the `@libsql/client` API and align with LibSQLStorage.
|
|
16
|
+
|
|
17
|
+
```typescript
|
|
18
|
+
// Before
|
|
19
|
+
new LibSQLVector({ id: 'my-vector', connectionUrl: 'file:./db.sqlite' });
|
|
20
|
+
|
|
21
|
+
// After
|
|
22
|
+
new LibSQLVector({ id: 'my-vector', url: 'file:./db.sqlite' });
|
|
23
|
+
```
|
|
24
|
+
|
|
25
|
+
**@mastra/opensearch** (Breaking)
|
|
26
|
+
|
|
27
|
+
Renamed `url` to `node` and added support for all OpenSearch `ClientOptions` including authentication, SSL, and compression.
|
|
28
|
+
|
|
29
|
+
```typescript
|
|
30
|
+
// Before
|
|
31
|
+
new OpenSearchVector({ id: 'my-vector', url: 'http://localhost:9200' });
|
|
32
|
+
|
|
33
|
+
// After
|
|
34
|
+
new OpenSearchVector({ id: 'my-vector', node: 'http://localhost:9200' });
|
|
35
|
+
|
|
36
|
+
// With authentication (now possible)
|
|
37
|
+
new OpenSearchVector({
|
|
38
|
+
id: 'my-vector',
|
|
39
|
+
node: 'https://localhost:9200',
|
|
40
|
+
auth: { username: 'admin', password: 'admin' },
|
|
41
|
+
ssl: { rejectUnauthorized: false },
|
|
42
|
+
});
|
|
43
|
+
```
|
|
44
|
+
|
|
45
|
+
**@mastra/pinecone** (Breaking)
|
|
46
|
+
|
|
47
|
+
Removed `environment` parameter. Use `controllerHostUrl` instead (the actual Pinecone SDK field name). Added support for all `PineconeConfiguration` options.
|
|
48
|
+
|
|
49
|
+
```typescript
|
|
50
|
+
// Before
|
|
51
|
+
new PineconeVector({ id: 'my-vector', apiKey: '...', environment: '...' });
|
|
52
|
+
|
|
53
|
+
// After
|
|
54
|
+
new PineconeVector({ id: 'my-vector', apiKey: '...' });
|
|
55
|
+
|
|
56
|
+
// With custom controller host (if needed)
|
|
57
|
+
new PineconeVector({ id: 'my-vector', apiKey: '...', controllerHostUrl: '...' });
|
|
58
|
+
```
|
|
59
|
+
|
|
60
|
+
**@mastra/clickhouse**
|
|
61
|
+
|
|
62
|
+
Added support for all `ClickHouseClientConfigOptions` like `request_timeout`, `compression`, `keep_alive`, and `database`. Existing configurations continue to work unchanged.
|
|
63
|
+
|
|
64
|
+
**@mastra/cloudflare, @mastra/cloudflare-d1, @mastra/lance, @mastra/libsql, @mastra/mongodb, @mastra/pg, @mastra/upstash**
|
|
65
|
+
|
|
66
|
+
Improved logging by replacing `console.warn` with structured logger in workflow storage domains.
|
|
67
|
+
|
|
68
|
+
**@mastra/deployer-cloud**
|
|
69
|
+
|
|
70
|
+
Updated internal LibSQLVector configuration for compatibility with the new API.
|
|
71
|
+
|
|
72
|
+
- Updated dependencies [[`ebae12a`](https://github.com/mastra-ai/mastra/commit/ebae12a2dd0212e75478981053b148a2c246962d), [`c61a0a5`](https://github.com/mastra-ai/mastra/commit/c61a0a5de4904c88fd8b3718bc26d1be1c2ec6e7), [`69136e7`](https://github.com/mastra-ai/mastra/commit/69136e748e32f57297728a4e0f9a75988462f1a7), [`449aed2`](https://github.com/mastra-ai/mastra/commit/449aed2ba9d507b75bf93d427646ea94f734dfd1), [`eb648a2`](https://github.com/mastra-ai/mastra/commit/eb648a2cc1728f7678768dd70cd77619b448dab9), [`0131105`](https://github.com/mastra-ai/mastra/commit/0131105532e83bdcbb73352fc7d0879eebf140dc), [`9d5059e`](https://github.com/mastra-ai/mastra/commit/9d5059eae810829935fb08e81a9bb7ecd5b144a7), [`ef756c6`](https://github.com/mastra-ai/mastra/commit/ef756c65f82d16531c43f49a27290a416611e526), [`b00ccd3`](https://github.com/mastra-ai/mastra/commit/b00ccd325ebd5d9e37e34dd0a105caae67eb568f), [`3bdfa75`](https://github.com/mastra-ai/mastra/commit/3bdfa7507a91db66f176ba8221aa28dd546e464a), [`e770de9`](https://github.com/mastra-ai/mastra/commit/e770de941a287a49b1964d44db5a5763d19890a6), [`52e2716`](https://github.com/mastra-ai/mastra/commit/52e2716b42df6eff443de72360ae83e86ec23993), [`27b4040`](https://github.com/mastra-ai/mastra/commit/27b4040bfa1a95d92546f420a02a626b1419a1d6), [`610a70b`](https://github.com/mastra-ai/mastra/commit/610a70bdad282079f0c630e0d7bb284578f20151), [`8dc7f55`](https://github.com/mastra-ai/mastra/commit/8dc7f55900395771da851dc7d78d53ae84fe34ec), [`8379099`](https://github.com/mastra-ai/mastra/commit/8379099fc467af6bef54dd7f80c9bd75bf8bbddf), [`8c0ec25`](https://github.com/mastra-ai/mastra/commit/8c0ec25646c8a7df253ed1e5ff4863a0d3f1316c), [`ff4d9a6`](https://github.com/mastra-ai/mastra/commit/ff4d9a6704fc87b31a380a76ed22736fdedbba5a), [`69821ef`](https://github.com/mastra-ai/mastra/commit/69821ef806482e2c44e2197ac0b050c3fe3a5285), [`1ed5716`](https://github.com/mastra-ai/mastra/commit/1ed5716830867b3774c4a1b43cc0d82935f32b96), [`4186bdd`](https://github.com/mastra-ai/mastra/commit/4186bdd00731305726fa06adba0b076a1d50b49f), [`7aaf973`](https://github.com/mastra-ai/mastra/commit/7aaf973f83fbbe9521f1f9e7a4fd99b8de464617)]:
|
|
73
|
+
- @mastra/core@1.0.0-beta.22
|
|
74
|
+
|
|
3
75
|
## 1.0.0-beta.10
|
|
4
76
|
|
|
5
77
|
### Patch Changes
|
package/dist/docs/README.md
CHANGED
package/dist/docs/SKILL.md
CHANGED
|
@@ -80,13 +80,15 @@ const memory = new Memory({
|
|
|
80
80
|
|
|
81
81
|
### Usage with Agents
|
|
82
82
|
|
|
83
|
-
When using resource-scoped memory, make sure to pass the `
|
|
83
|
+
When using resource-scoped memory, make sure to pass the `resource` parameter in the memory options:
|
|
84
84
|
|
|
85
85
|
```typescript
|
|
86
|
-
// Resource-scoped memory requires
|
|
86
|
+
// Resource-scoped memory requires resource
|
|
87
87
|
const response = await agent.generate("Hello!", {
|
|
88
|
-
|
|
89
|
-
|
|
88
|
+
memory: {
|
|
89
|
+
thread: "conversation-123",
|
|
90
|
+
resource: "user-alice-456", // Same user across different threads
|
|
91
|
+
},
|
|
90
92
|
});
|
|
91
93
|
```
|
|
92
94
|
|
|
@@ -339,8 +341,10 @@ const thread = await memory.createThread({
|
|
|
339
341
|
|
|
340
342
|
// The agent will now have access to this information in all messages
|
|
341
343
|
await agent.generate("What's my blood type?", {
|
|
342
|
-
|
|
343
|
-
|
|
344
|
+
memory: {
|
|
345
|
+
thread: thread.id,
|
|
346
|
+
resource: "user-456",
|
|
347
|
+
},
|
|
344
348
|
});
|
|
345
349
|
// Response: "Your blood type is O+."
|
|
346
350
|
```
|
|
@@ -12,6 +12,7 @@ After generating embeddings, you need to store them in a database that supports
|
|
|
12
12
|
import { MongoDBVector } from "@mastra/mongodb";
|
|
13
13
|
|
|
14
14
|
const store = new MongoDBVector({
|
|
15
|
+
id: 'mongodb-vector',
|
|
15
16
|
uri: process.env.MONGODB_URI,
|
|
16
17
|
dbName: process.env.MONGODB_DATABASE,
|
|
17
18
|
});
|
|
@@ -144,6 +145,7 @@ await store.upsert({
|
|
|
144
145
|
import { AstraVector } from "@mastra/astra";
|
|
145
146
|
|
|
146
147
|
const store = new AstraVector({
|
|
148
|
+
id: 'astra-vector',
|
|
147
149
|
token: process.env.ASTRA_DB_TOKEN,
|
|
148
150
|
endpoint: process.env.ASTRA_DB_ENDPOINT,
|
|
149
151
|
keyspace: process.env.ASTRA_DB_KEYSPACE,
|
|
@@ -170,7 +172,7 @@ import { LibSQLVector } from "@mastra/core/vector/libsql";
|
|
|
170
172
|
|
|
171
173
|
const store = new LibSQLVector({
|
|
172
174
|
id: 'libsql-vector',
|
|
173
|
-
|
|
175
|
+
url: process.env.DATABASE_URL,
|
|
174
176
|
authToken: process.env.DATABASE_AUTH_TOKEN, // Optional: for Turso cloud databases
|
|
175
177
|
});
|
|
176
178
|
|
|
@@ -217,6 +219,7 @@ await store.upsert({
|
|
|
217
219
|
import { CloudflareVector } from "@mastra/vectorize";
|
|
218
220
|
|
|
219
221
|
const store = new CloudflareVector({
|
|
222
|
+
id: 'cloudflare-vector',
|
|
220
223
|
accountId: process.env.CF_ACCOUNT_ID,
|
|
221
224
|
apiToken: process.env.CF_API_TOKEN,
|
|
222
225
|
});
|
|
@@ -238,7 +241,7 @@ await store.upsert({
|
|
|
238
241
|
```ts title="vector-store.ts"
|
|
239
242
|
import { OpenSearchVector } from "@mastra/opensearch";
|
|
240
243
|
|
|
241
|
-
const store = new OpenSearchVector({
|
|
244
|
+
const store = new OpenSearchVector({ id: "opensearch", node: process.env.OPENSEARCH_URL });
|
|
242
245
|
|
|
243
246
|
await store.createIndex({
|
|
244
247
|
indexName: "my-collection",
|
|
@@ -259,7 +262,7 @@ await store.upsert({
|
|
|
259
262
|
```ts title="vector-store.ts"
|
|
260
263
|
import { ElasticSearchVector } from "@mastra/elasticsearch";
|
|
261
264
|
|
|
262
|
-
const store = new ElasticSearchVector({ url: process.env.ELASTICSEARCH_URL });
|
|
265
|
+
const store = new ElasticSearchVector({ id: 'elasticsearch-vector', url: process.env.ELASTICSEARCH_URL });
|
|
263
266
|
|
|
264
267
|
await store.createIndex({
|
|
265
268
|
indexName: "my-collection",
|
|
@@ -280,6 +283,7 @@ await store.upsert({
|
|
|
280
283
|
import { CouchbaseVector } from "@mastra/couchbase";
|
|
281
284
|
|
|
282
285
|
const store = new CouchbaseVector({
|
|
286
|
+
id: 'couchbase-vector',
|
|
283
287
|
connectionString: process.env.COUCHBASE_CONNECTION_STRING,
|
|
284
288
|
username: process.env.COUCHBASE_USERNAME,
|
|
285
289
|
password: process.env.COUCHBASE_PASSWORD,
|
|
@@ -331,6 +335,7 @@ For detailed setup instructions and best practices, see the [official LanceDB do
|
|
|
331
335
|
import { S3Vectors } from "@mastra/s3vectors";
|
|
332
336
|
|
|
333
337
|
const store = new S3Vectors({
|
|
338
|
+
id: 's3-vectors',
|
|
334
339
|
vectorBucketName: "my-vector-bucket",
|
|
335
340
|
clientConfig: {
|
|
336
341
|
region: "us-east-1",
|
|
@@ -373,7 +378,7 @@ The dimension size must match the output dimension of your chosen embedding mode
|
|
|
373
378
|
- Cohere embed-multilingual-v3: 1024 dimensions
|
|
374
379
|
- Google text-embedding-004: 768 dimensions (or custom)
|
|
375
380
|
|
|
376
|
-
|
|
381
|
+
> **Note:**
|
|
377
382
|
Index dimensions cannot be changed after creation. To use a different model, delete and recreate the index with the new dimension size.
|
|
378
383
|
|
|
379
384
|
### Naming Rules for Databases
|
|
@@ -537,7 +542,7 @@ The upsert operation:
|
|
|
537
542
|
|
|
538
543
|
Vector stores support rich metadata (any JSON-serializable fields) for filtering and organization. Since metadata is stored with no fixed schema, use consistent field naming to avoid unexpected query results.
|
|
539
544
|
|
|
540
|
-
|
|
545
|
+
> **Note:**
|
|
541
546
|
Metadata is crucial for vector storage - without it, you'd only have numerical embeddings with no way to return the original text or filter results. Always store at least the source text as metadata.
|
|
542
547
|
|
|
543
548
|
```ts
|
|
@@ -171,7 +171,7 @@ The Vector Query Tool supports database-specific configurations that enable you
|
|
|
171
171
|
> **Note:**
|
|
172
172
|
These configurations are for **query-time options** like namespaces, performance tuning, and filtering—not for database connection setup.
|
|
173
173
|
|
|
174
|
-
Connection credentials (URLs, auth tokens) are configured when you instantiate the vector store class (e.g., `new LibSQLVector({
|
|
174
|
+
Connection credentials (URLs, auth tokens) are configured when you instantiate the vector store class (e.g., `new LibSQLVector({ url: '...' })`).
|
|
175
175
|
|
|
176
176
|
```ts
|
|
177
177
|
import { createVectorQueryTool } from "@mastra/rag";
|
|
@@ -258,11 +258,10 @@ requestContext.set("databaseConfig", {
|
|
|
258
258
|
},
|
|
259
259
|
});
|
|
260
260
|
|
|
261
|
-
await pineconeQueryTool.execute(
|
|
262
|
-
|
|
263
|
-
mastra,
|
|
264
|
-
|
|
265
|
-
});
|
|
261
|
+
await pineconeQueryTool.execute(
|
|
262
|
+
{ queryText: "search query" },
|
|
263
|
+
{ mastra, requestContext }
|
|
264
|
+
);
|
|
266
265
|
```
|
|
267
266
|
|
|
268
267
|
For detailed configuration options and advanced usage, see the [Vector Query Tool Reference](https://mastra.ai/reference/v1/tools/vector-query-tool).
|
package/dist/index.cjs
CHANGED
|
@@ -1381,24 +1381,6 @@ var ScoresUpstash = class extends storage.ScoresStorage {
|
|
|
1381
1381
|
};
|
|
1382
1382
|
}
|
|
1383
1383
|
};
|
|
1384
|
-
function parseWorkflowRun(row) {
|
|
1385
|
-
let parsedSnapshot = row.snapshot;
|
|
1386
|
-
if (typeof parsedSnapshot === "string") {
|
|
1387
|
-
try {
|
|
1388
|
-
parsedSnapshot = JSON.parse(row.snapshot);
|
|
1389
|
-
} catch (e) {
|
|
1390
|
-
console.warn(`Failed to parse snapshot for workflow ${row.workflow_name}: ${e}`);
|
|
1391
|
-
}
|
|
1392
|
-
}
|
|
1393
|
-
return {
|
|
1394
|
-
workflowName: row.workflow_name,
|
|
1395
|
-
runId: row.run_id,
|
|
1396
|
-
snapshot: parsedSnapshot,
|
|
1397
|
-
createdAt: storage.ensureDate(row.createdAt),
|
|
1398
|
-
updatedAt: storage.ensureDate(row.updatedAt),
|
|
1399
|
-
resourceId: row.resourceId
|
|
1400
|
-
};
|
|
1401
|
-
}
|
|
1402
1384
|
var WorkflowsUpstash = class extends storage.WorkflowsStorage {
|
|
1403
1385
|
client;
|
|
1404
1386
|
#db;
|
|
@@ -1408,6 +1390,24 @@ var WorkflowsUpstash = class extends storage.WorkflowsStorage {
|
|
|
1408
1390
|
this.client = client;
|
|
1409
1391
|
this.#db = new UpstashDB({ client });
|
|
1410
1392
|
}
|
|
1393
|
+
parseWorkflowRun(row) {
|
|
1394
|
+
let parsedSnapshot = row.snapshot;
|
|
1395
|
+
if (typeof parsedSnapshot === "string") {
|
|
1396
|
+
try {
|
|
1397
|
+
parsedSnapshot = JSON.parse(row.snapshot);
|
|
1398
|
+
} catch (e) {
|
|
1399
|
+
this.logger.warn(`Failed to parse snapshot for workflow ${row.workflow_name}: ${e}`);
|
|
1400
|
+
}
|
|
1401
|
+
}
|
|
1402
|
+
return {
|
|
1403
|
+
workflowName: row.workflow_name,
|
|
1404
|
+
runId: row.run_id,
|
|
1405
|
+
snapshot: parsedSnapshot,
|
|
1406
|
+
createdAt: storage.ensureDate(row.createdAt),
|
|
1407
|
+
updatedAt: storage.ensureDate(row.updatedAt),
|
|
1408
|
+
resourceId: row.resourceId
|
|
1409
|
+
};
|
|
1410
|
+
}
|
|
1411
1411
|
async dangerouslyClearAll() {
|
|
1412
1412
|
await this.#db.deleteData({ tableName: storage.TABLE_WORKFLOW_SNAPSHOT });
|
|
1413
1413
|
}
|
|
@@ -1573,7 +1573,7 @@ var WorkflowsUpstash = class extends storage.WorkflowsStorage {
|
|
|
1573
1573
|
);
|
|
1574
1574
|
const data = workflows.find((w) => w?.run_id === runId && w?.workflow_name === workflowName);
|
|
1575
1575
|
if (!data) return null;
|
|
1576
|
-
return parseWorkflowRun(data);
|
|
1576
|
+
return this.parseWorkflowRun(data);
|
|
1577
1577
|
} catch (error$1) {
|
|
1578
1578
|
throw new error.MastraError(
|
|
1579
1579
|
{
|
|
@@ -1658,7 +1658,7 @@ var WorkflowsUpstash = class extends storage.WorkflowsStorage {
|
|
|
1658
1658
|
const results = await pipeline.exec();
|
|
1659
1659
|
let runs = results.map((result) => result).filter(
|
|
1660
1660
|
(record) => record !== null && record !== void 0 && typeof record === "object" && "workflow_name" in record
|
|
1661
|
-
).filter((record) => !workflowName || record.workflow_name === workflowName).map((w) => parseWorkflowRun(w)).filter((w) => {
|
|
1661
|
+
).filter((record) => !workflowName || record.workflow_name === workflowName).map((w) => this.parseWorkflowRun(w)).filter((w) => {
|
|
1662
1662
|
if (fromDate && w.createdAt < fromDate) return false;
|
|
1663
1663
|
if (toDate && w.createdAt > toDate) return false;
|
|
1664
1664
|
if (status) {
|
|
@@ -1667,7 +1667,7 @@ var WorkflowsUpstash = class extends storage.WorkflowsStorage {
|
|
|
1667
1667
|
try {
|
|
1668
1668
|
snapshot = JSON.parse(snapshot);
|
|
1669
1669
|
} catch (e) {
|
|
1670
|
-
|
|
1670
|
+
this.logger.warn(`Failed to parse snapshot for workflow ${w.workflowName}: ${e}`);
|
|
1671
1671
|
return false;
|
|
1672
1672
|
}
|
|
1673
1673
|
}
|