@mastra/pg 1.8.5 → 1.8.6
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +18 -0
- package/dist/docs/SKILL.md +1 -1
- package/dist/docs/assets/SOURCE_MAP.json +1 -1
- package/dist/index.cjs +24 -11
- package/dist/index.cjs.map +1 -1
- package/dist/index.js +24 -11
- package/dist/index.js.map +1 -1
- package/dist/storage/domains/datasets/index.d.ts.map +1 -1
- package/dist/storage/domains/memory/index.d.ts.map +1 -1
- package/package.json +5 -5
package/CHANGELOG.md
CHANGED
|
@@ -1,5 +1,23 @@
|
|
|
1
1
|
# @mastra/pg
|
|
2
2
|
|
|
3
|
+
## 1.8.6
|
|
4
|
+
|
|
5
|
+
### Patch Changes
|
|
6
|
+
|
|
7
|
+
- Added expectedTrajectory support to dataset items across all storage backends and API layer. Dataset items can now store trajectory expectations that define expected agent execution steps, ordering, and constraints for trajectory-based evaluation scoring. ([#14902](https://github.com/mastra-ai/mastra/pull/14902))
|
|
8
|
+
|
|
9
|
+
- Updated dependencies [[`cb15509`](https://github.com/mastra-ai/mastra/commit/cb15509b58f6a83e11b765c945082afc027db972), [`81e4259`](https://github.com/mastra-ai/mastra/commit/81e425939b4ceeb4f586e9b6d89c3b1c1f2d2fe7), [`951b8a1`](https://github.com/mastra-ai/mastra/commit/951b8a1b5ef7e1474c59dc4f2b9fc1a8b1e508b6), [`80c5668`](https://github.com/mastra-ai/mastra/commit/80c5668e365470d3a96d3e953868fd7a643ff67c), [`3d478c1`](https://github.com/mastra-ai/mastra/commit/3d478c1e13f17b80f330ac49d7aa42ef929b93ff), [`2b4ea10`](https://github.com/mastra-ai/mastra/commit/2b4ea10b053e4ea1ab232d536933a4a3c4cba999), [`a0544f0`](https://github.com/mastra-ai/mastra/commit/a0544f0a1e6bd52ac12676228967c1938e43648d), [`6039f17`](https://github.com/mastra-ai/mastra/commit/6039f176f9c457304825ff1df8c83b8e457376c0), [`06b928d`](https://github.com/mastra-ai/mastra/commit/06b928dfc2f5630d023467476cc5919dfa858d0a), [`6a8d984`](https://github.com/mastra-ai/mastra/commit/6a8d9841f2933456ee1598099f488d742b600054), [`c8c86aa`](https://github.com/mastra-ai/mastra/commit/c8c86aa1458017fbd1c0776fdc0c520d129df8a6)]:
|
|
10
|
+
- @mastra/core@1.22.0
|
|
11
|
+
|
|
12
|
+
## 1.8.6-alpha.0
|
|
13
|
+
|
|
14
|
+
### Patch Changes
|
|
15
|
+
|
|
16
|
+
- Added expectedTrajectory support to dataset items across all storage backends and API layer. Dataset items can now store trajectory expectations that define expected agent execution steps, ordering, and constraints for trajectory-based evaluation scoring. ([#14902](https://github.com/mastra-ai/mastra/pull/14902))
|
|
17
|
+
|
|
18
|
+
- Updated dependencies [[`cb15509`](https://github.com/mastra-ai/mastra/commit/cb15509b58f6a83e11b765c945082afc027db972), [`80c5668`](https://github.com/mastra-ai/mastra/commit/80c5668e365470d3a96d3e953868fd7a643ff67c), [`3d478c1`](https://github.com/mastra-ai/mastra/commit/3d478c1e13f17b80f330ac49d7aa42ef929b93ff), [`6039f17`](https://github.com/mastra-ai/mastra/commit/6039f176f9c457304825ff1df8c83b8e457376c0), [`06b928d`](https://github.com/mastra-ai/mastra/commit/06b928dfc2f5630d023467476cc5919dfa858d0a), [`6a8d984`](https://github.com/mastra-ai/mastra/commit/6a8d9841f2933456ee1598099f488d742b600054)]:
|
|
19
|
+
- @mastra/core@1.22.0-alpha.2
|
|
20
|
+
|
|
3
21
|
## 1.8.5
|
|
4
22
|
|
|
5
23
|
### Patch Changes
|
package/dist/docs/SKILL.md
CHANGED
package/dist/index.cjs
CHANGED
|
@@ -4358,6 +4358,7 @@ var DatasetsPG = class _DatasetsPG extends storage.DatasetsStorage {
|
|
|
4358
4358
|
await this.#addColumnIfNotExists(storage.TABLE_DATASETS, "scorerIds", "JSONB");
|
|
4359
4359
|
await this.#addColumnIfNotExists(storage.TABLE_DATASET_ITEMS, "requestContext", "JSONB");
|
|
4360
4360
|
await this.#addColumnIfNotExists(storage.TABLE_DATASET_ITEMS, "source", "JSONB");
|
|
4361
|
+
await this.#addColumnIfNotExists(storage.TABLE_DATASET_ITEMS, "expectedTrajectory", "JSONB");
|
|
4361
4362
|
await this.createDefaultIndexes();
|
|
4362
4363
|
await this.createCustomIndexes();
|
|
4363
4364
|
}
|
|
@@ -4440,6 +4441,7 @@ var DatasetsPG = class _DatasetsPG extends storage.DatasetsStorage {
|
|
|
4440
4441
|
datasetVersion: row.datasetVersion,
|
|
4441
4442
|
input: storage.safelyParseJSON(row.input),
|
|
4442
4443
|
groundTruth: row.groundTruth ? storage.safelyParseJSON(row.groundTruth) : void 0,
|
|
4444
|
+
expectedTrajectory: row.expectedTrajectory ? storage.safelyParseJSON(row.expectedTrajectory) : void 0,
|
|
4443
4445
|
requestContext: row.requestContext ? storage.safelyParseJSON(row.requestContext) : void 0,
|
|
4444
4446
|
metadata: row.metadata ? storage.safelyParseJSON(row.metadata) : void 0,
|
|
4445
4447
|
source: row.source ? storage.safelyParseJSON(row.source) : void 0,
|
|
@@ -4456,6 +4458,7 @@ var DatasetsPG = class _DatasetsPG extends storage.DatasetsStorage {
|
|
|
4456
4458
|
isDeleted: Boolean(row.isDeleted),
|
|
4457
4459
|
input: storage.safelyParseJSON(row.input),
|
|
4458
4460
|
groundTruth: row.groundTruth ? storage.safelyParseJSON(row.groundTruth) : void 0,
|
|
4461
|
+
expectedTrajectory: row.expectedTrajectory ? storage.safelyParseJSON(row.expectedTrajectory) : void 0,
|
|
4459
4462
|
requestContext: row.requestContext ? storage.safelyParseJSON(row.requestContext) : void 0,
|
|
4460
4463
|
metadata: row.metadata ? storage.safelyParseJSON(row.metadata) : void 0,
|
|
4461
4464
|
source: row.source ? storage.safelyParseJSON(row.source) : void 0,
|
|
@@ -4725,13 +4728,14 @@ var DatasetsPG = class _DatasetsPG extends storage.DatasetsStorage {
|
|
|
4725
4728
|
);
|
|
4726
4729
|
newVersion = row.version;
|
|
4727
4730
|
await t.none(
|
|
4728
|
-
`INSERT INTO ${itemsTable} ("id","datasetId","datasetVersion","validTo","isDeleted","input","groundTruth","requestContext","metadata","source","createdAt","createdAtZ","updatedAt","updatedAtZ") VALUES ($1,$2,$3,NULL,false,$4,$5,$6,$7,$8,$9,$10,$11,$12)`,
|
|
4731
|
+
`INSERT INTO ${itemsTable} ("id","datasetId","datasetVersion","validTo","isDeleted","input","groundTruth","expectedTrajectory","requestContext","metadata","source","createdAt","createdAtZ","updatedAt","updatedAtZ") VALUES ($1,$2,$3,NULL,false,$4,$5,$6,$7,$8,$9,$10,$11,$12,$13)`,
|
|
4729
4732
|
[
|
|
4730
4733
|
id,
|
|
4731
4734
|
args.datasetId,
|
|
4732
4735
|
newVersion,
|
|
4733
4736
|
JSON.stringify(args.input),
|
|
4734
4737
|
jsonbArg(args.groundTruth),
|
|
4738
|
+
jsonbArg(args.expectedTrajectory),
|
|
4735
4739
|
jsonbArg(args.requestContext),
|
|
4736
4740
|
jsonbArg(args.metadata),
|
|
4737
4741
|
jsonbArg(args.source),
|
|
@@ -4752,6 +4756,7 @@ var DatasetsPG = class _DatasetsPG extends storage.DatasetsStorage {
|
|
|
4752
4756
|
datasetVersion: newVersion,
|
|
4753
4757
|
input: args.input,
|
|
4754
4758
|
groundTruth: args.groundTruth,
|
|
4759
|
+
expectedTrajectory: args.expectedTrajectory,
|
|
4755
4760
|
requestContext: args.requestContext,
|
|
4756
4761
|
metadata: args.metadata,
|
|
4757
4762
|
source: args.source,
|
|
@@ -4798,11 +4803,12 @@ var DatasetsPG = class _DatasetsPG extends storage.DatasetsStorage {
|
|
|
4798
4803
|
const versionId = crypto.randomUUID();
|
|
4799
4804
|
const now = /* @__PURE__ */ new Date();
|
|
4800
4805
|
const nowIso = now.toISOString();
|
|
4801
|
-
const mergedInput = args.input
|
|
4802
|
-
const mergedGroundTruth = args.groundTruth
|
|
4803
|
-
const
|
|
4804
|
-
const
|
|
4805
|
-
const
|
|
4806
|
+
const mergedInput = args.input !== void 0 ? args.input : existing.input;
|
|
4807
|
+
const mergedGroundTruth = args.groundTruth !== void 0 ? args.groundTruth : existing.groundTruth;
|
|
4808
|
+
const mergedExpectedTrajectory = args.expectedTrajectory !== void 0 ? args.expectedTrajectory : existing.expectedTrajectory;
|
|
4809
|
+
const mergedRequestContext = args.requestContext !== void 0 ? args.requestContext : existing.requestContext;
|
|
4810
|
+
const mergedMetadata = args.metadata !== void 0 ? args.metadata : existing.metadata;
|
|
4811
|
+
const mergedSource = args.source !== void 0 ? args.source : existing.source;
|
|
4806
4812
|
let newVersion;
|
|
4807
4813
|
await this.#db.client.tx(async (t) => {
|
|
4808
4814
|
const row = await t.one(
|
|
@@ -4815,13 +4821,14 @@ var DatasetsPG = class _DatasetsPG extends storage.DatasetsStorage {
|
|
|
4815
4821
|
[newVersion, args.id]
|
|
4816
4822
|
);
|
|
4817
4823
|
await t.none(
|
|
4818
|
-
`INSERT INTO ${itemsTable} ("id","datasetId","datasetVersion","validTo","isDeleted","input","groundTruth","requestContext","metadata","source","createdAt","createdAtZ","updatedAt","updatedAtZ") VALUES ($1,$2,$3,NULL,false,$4,$5,$6,$7,$8,$9,$10,$11,$12)`,
|
|
4824
|
+
`INSERT INTO ${itemsTable} ("id","datasetId","datasetVersion","validTo","isDeleted","input","groundTruth","expectedTrajectory","requestContext","metadata","source","createdAt","createdAtZ","updatedAt","updatedAtZ") VALUES ($1,$2,$3,NULL,false,$4,$5,$6,$7,$8,$9,$10,$11,$12,$13)`,
|
|
4819
4825
|
[
|
|
4820
4826
|
args.id,
|
|
4821
4827
|
args.datasetId,
|
|
4822
4828
|
newVersion,
|
|
4823
4829
|
JSON.stringify(mergedInput),
|
|
4824
4830
|
jsonbArg(mergedGroundTruth),
|
|
4831
|
+
jsonbArg(mergedExpectedTrajectory),
|
|
4825
4832
|
jsonbArg(mergedRequestContext),
|
|
4826
4833
|
jsonbArg(mergedMetadata),
|
|
4827
4834
|
jsonbArg(mergedSource),
|
|
@@ -4841,6 +4848,7 @@ var DatasetsPG = class _DatasetsPG extends storage.DatasetsStorage {
|
|
|
4841
4848
|
datasetVersion: newVersion,
|
|
4842
4849
|
input: mergedInput,
|
|
4843
4850
|
groundTruth: mergedGroundTruth,
|
|
4851
|
+
expectedTrajectory: mergedExpectedTrajectory,
|
|
4844
4852
|
requestContext: mergedRequestContext,
|
|
4845
4853
|
metadata: mergedMetadata,
|
|
4846
4854
|
source: mergedSource,
|
|
@@ -4889,13 +4897,14 @@ var DatasetsPG = class _DatasetsPG extends storage.DatasetsStorage {
|
|
|
4889
4897
|
[newVersion, id]
|
|
4890
4898
|
);
|
|
4891
4899
|
await t.none(
|
|
4892
|
-
`INSERT INTO ${itemsTable} ("id","datasetId","datasetVersion","validTo","isDeleted","input","groundTruth","requestContext","metadata","source","createdAt","createdAtZ","updatedAt","updatedAtZ") VALUES ($1,$2,$3,NULL,true,$4,$5,$6,$7,$8,$9,$10,$11,$12)`,
|
|
4900
|
+
`INSERT INTO ${itemsTable} ("id","datasetId","datasetVersion","validTo","isDeleted","input","groundTruth","expectedTrajectory","requestContext","metadata","source","createdAt","createdAtZ","updatedAt","updatedAtZ") VALUES ($1,$2,$3,NULL,true,$4,$5,$6,$7,$8,$9,$10,$11,$12,$13)`,
|
|
4893
4901
|
[
|
|
4894
4902
|
id,
|
|
4895
4903
|
datasetId,
|
|
4896
4904
|
newVersion,
|
|
4897
4905
|
JSON.stringify(existing.input),
|
|
4898
4906
|
jsonbArg(existing.groundTruth),
|
|
4907
|
+
jsonbArg(existing.expectedTrajectory),
|
|
4899
4908
|
jsonbArg(existing.requestContext),
|
|
4900
4909
|
jsonbArg(existing.metadata),
|
|
4901
4910
|
jsonbArg(existing.source),
|
|
@@ -4952,13 +4961,14 @@ var DatasetsPG = class _DatasetsPG extends storage.DatasetsStorage {
|
|
|
4952
4961
|
newVersion = row.version;
|
|
4953
4962
|
for (const { id, input: itemInput } of itemsWithIds) {
|
|
4954
4963
|
await t.none(
|
|
4955
|
-
`INSERT INTO ${itemsTable} ("id","datasetId","datasetVersion","validTo","isDeleted","input","groundTruth","requestContext","metadata","source","createdAt","createdAtZ","updatedAt","updatedAtZ") VALUES ($1,$2,$3,NULL,false,$4,$5,$6,$7,$8,$9,$10,$11,$12)`,
|
|
4964
|
+
`INSERT INTO ${itemsTable} ("id","datasetId","datasetVersion","validTo","isDeleted","input","groundTruth","expectedTrajectory","requestContext","metadata","source","createdAt","createdAtZ","updatedAt","updatedAtZ") VALUES ($1,$2,$3,NULL,false,$4,$5,$6,$7,$8,$9,$10,$11,$12,$13)`,
|
|
4956
4965
|
[
|
|
4957
4966
|
id,
|
|
4958
4967
|
input.datasetId,
|
|
4959
4968
|
newVersion,
|
|
4960
4969
|
JSON.stringify(itemInput.input),
|
|
4961
4970
|
jsonbArg(itemInput.groundTruth),
|
|
4971
|
+
jsonbArg(itemInput.expectedTrajectory),
|
|
4962
4972
|
jsonbArg(itemInput.requestContext),
|
|
4963
4973
|
jsonbArg(itemInput.metadata),
|
|
4964
4974
|
jsonbArg(itemInput.source),
|
|
@@ -4980,6 +4990,7 @@ var DatasetsPG = class _DatasetsPG extends storage.DatasetsStorage {
|
|
|
4980
4990
|
datasetVersion: newVersion,
|
|
4981
4991
|
input: itemInput.input,
|
|
4982
4992
|
groundTruth: itemInput.groundTruth,
|
|
4993
|
+
expectedTrajectory: itemInput.expectedTrajectory,
|
|
4983
4994
|
requestContext: itemInput.requestContext,
|
|
4984
4995
|
metadata: itemInput.metadata,
|
|
4985
4996
|
source: itemInput.source,
|
|
@@ -5037,13 +5048,14 @@ var DatasetsPG = class _DatasetsPG extends storage.DatasetsStorage {
|
|
|
5037
5048
|
[newVersion, item.id]
|
|
5038
5049
|
);
|
|
5039
5050
|
await t.none(
|
|
5040
|
-
`INSERT INTO ${itemsTable} ("id","datasetId","datasetVersion","validTo","isDeleted","input","groundTruth","requestContext","metadata","source","createdAt","createdAtZ","updatedAt","updatedAtZ") VALUES ($1,$2,$3,NULL,true,$4,$5,$6,$7,$8,$9,$10,$11,$12)`,
|
|
5051
|
+
`INSERT INTO ${itemsTable} ("id","datasetId","datasetVersion","validTo","isDeleted","input","groundTruth","expectedTrajectory","requestContext","metadata","source","createdAt","createdAtZ","updatedAt","updatedAtZ") VALUES ($1,$2,$3,NULL,true,$4,$5,$6,$7,$8,$9,$10,$11,$12,$13)`,
|
|
5041
5052
|
[
|
|
5042
5053
|
item.id,
|
|
5043
5054
|
input.datasetId,
|
|
5044
5055
|
newVersion,
|
|
5045
5056
|
JSON.stringify(item.input),
|
|
5046
5057
|
jsonbArg(item.groundTruth),
|
|
5058
|
+
jsonbArg(item.expectedTrajectory),
|
|
5047
5059
|
jsonbArg(item.requestContext),
|
|
5048
5060
|
jsonbArg(item.metadata),
|
|
5049
5061
|
jsonbArg(item.source),
|
|
@@ -9059,7 +9071,8 @@ var MemoryPG = class _MemoryPG extends storage.MemoryStorage {
|
|
|
9059
9071
|
lastObservedAt: input.chunk.lastObservedAt,
|
|
9060
9072
|
createdAt: /* @__PURE__ */ new Date(),
|
|
9061
9073
|
suggestedContinuation: input.chunk.suggestedContinuation,
|
|
9062
|
-
currentTask: input.chunk.currentTask
|
|
9074
|
+
currentTask: input.chunk.currentTask,
|
|
9075
|
+
threadTitle: input.chunk.threadTitle
|
|
9063
9076
|
};
|
|
9064
9077
|
const lastBufferedAtTime = input.lastBufferedAtTime ? input.lastBufferedAtTime.toISOString() : null;
|
|
9065
9078
|
const result = await this.#db.client.query(
|