@mastra/memory 1.13.1 → 1.14.0-alpha.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (31) hide show
  1. package/CHANGELOG.md +47 -0
  2. package/dist/{chunk-SWCWXIDJ.js → chunk-FQGF36BE.js} +60 -35
  3. package/dist/chunk-FQGF36BE.js.map +1 -0
  4. package/dist/{chunk-HNVSYZRA.cjs → chunk-X7E3WPF2.cjs} +60 -35
  5. package/dist/chunk-X7E3WPF2.cjs.map +1 -0
  6. package/dist/docs/SKILL.md +1 -1
  7. package/dist/docs/assets/SOURCE_MAP.json +39 -39
  8. package/dist/index.cjs +12 -12
  9. package/dist/index.js +4 -4
  10. package/dist/{observational-memory-4VT6QEB7.cjs → observational-memory-22RZ4253.cjs} +26 -26
  11. package/dist/{observational-memory-4VT6QEB7.cjs.map → observational-memory-22RZ4253.cjs.map} +1 -1
  12. package/dist/{observational-memory-74TRS2R6.js → observational-memory-JQ34KLFS.js} +3 -3
  13. package/dist/{observational-memory-74TRS2R6.js.map → observational-memory-JQ34KLFS.js.map} +1 -1
  14. package/dist/processors/index.cjs +24 -24
  15. package/dist/processors/index.js +1 -1
  16. package/dist/processors/observational-memory/index.d.ts +1 -1
  17. package/dist/processors/observational-memory/index.d.ts.map +1 -1
  18. package/dist/processors/observational-memory/observation-strategies/base.d.ts +3 -3
  19. package/dist/processors/observational-memory/observation-strategies/base.d.ts.map +1 -1
  20. package/dist/processors/observational-memory/observation-strategies/types.d.ts +9 -0
  21. package/dist/processors/observational-memory/observation-strategies/types.d.ts.map +1 -1
  22. package/dist/processors/observational-memory/observation-turn/step.d.ts.map +1 -1
  23. package/dist/processors/observational-memory/observational-memory.d.ts +4 -3
  24. package/dist/processors/observational-memory/observational-memory.d.ts.map +1 -1
  25. package/dist/processors/observational-memory/processor.d.ts.map +1 -1
  26. package/dist/processors/observational-memory/reflector-runner.d.ts.map +1 -1
  27. package/dist/processors/observational-memory/types.d.ts +13 -2
  28. package/dist/processors/observational-memory/types.d.ts.map +1 -1
  29. package/package.json +4 -4
  30. package/dist/chunk-HNVSYZRA.cjs.map +0 -1
  31. package/dist/chunk-SWCWXIDJ.js.map +0 -1
package/CHANGELOG.md CHANGED
@@ -1,5 +1,52 @@
1
1
  # @mastra/memory
2
2
 
3
+ ## 1.14.0-alpha.1
4
+
5
+ ### Minor Changes
6
+
7
+ - Added usage data to ObserveHooks callbacks and standalone reflect() return. ([#15047](https://github.com/mastra-ai/mastra/pull/15047))
8
+
9
+ **ObserveHooks:** `onObservationEnd` and `onReflectionEnd` now receive a result object containing token usage from the underlying LLM call. This enables reliable usage tracking across all observation and reflection paths (sync, async buffered, and resource-scoped).
10
+
11
+ **Standalone reflect():** `reflect()` now returns `{ reflected, record, usage? }` so callers can capture token usage without hooks.
12
+
13
+ **Examples**
14
+
15
+ ```ts
16
+ // Via hooks
17
+ await memory.observe({
18
+ threadId,
19
+ messages,
20
+ hooks: {
21
+ onObservationEnd: ({ usage }) => {
22
+ // usage: { inputTokens, outputTokens, totalTokens }
23
+ },
24
+ onReflectionEnd: ({ usage }) => {
25
+ // usage: { inputTokens, outputTokens, totalTokens }
26
+ },
27
+ },
28
+ });
29
+
30
+ // Via standalone reflect()
31
+ const { reflected, usage } = await memory.reflect(threadId, resourceId);
32
+ ```
33
+
34
+ Existing callbacks that accept no arguments continue to work without changes.
35
+
36
+ ### Patch Changes
37
+
38
+ - Updated dependencies [[`fff91cf`](https://github.com/mastra-ai/mastra/commit/fff91cf914de0e731578aacebffdeebef82f0440)]:
39
+ - @mastra/core@1.23.0-alpha.4
40
+
41
+ ## 1.13.2-alpha.0
42
+
43
+ ### Patch Changes
44
+
45
+ - Fixed observational memory buffering so sealed assistant chunks stay split instead of being merged back into one persisted message during long tool runs. ([#14995](https://github.com/mastra-ai/mastra/pull/14995))
46
+
47
+ - Updated dependencies [[`ed425d7`](https://github.com/mastra-ai/mastra/commit/ed425d78e7c66cbda8209fee910856f98c6c6b82), [`ba6f7e9`](https://github.com/mastra-ai/mastra/commit/ba6f7e9086d8281393f2acae60fda61de3bff1f9), [`7eb2596`](https://github.com/mastra-ai/mastra/commit/7eb25960d607e07468c9a10c5437abd2deaf1e9a)]:
48
+ - @mastra/core@1.23.0-alpha.0
49
+
3
50
  ## 1.13.1
4
51
 
5
52
  ### Patch Changes
@@ -945,7 +945,7 @@ var ObservationStrategy = class _ObservationStrategy {
945
945
  static create;
946
946
  /**
947
947
  * Run the full observation lifecycle.
948
- * @returns `true` if a full observation cycle completed; `false` if skipped (stale lock) or async-buffer failure was swallowed.
948
+ * @returns Result with `observed` flag and optional `usage` from the observer LLM call.
949
949
  * @throws On sync/resource-scoped observer failure after failed markers (same as pre–Option-A contract).
950
950
  */
951
951
  async run() {
@@ -955,7 +955,7 @@ var ObservationStrategy = class _ObservationStrategy {
955
955
  if (this.needsLock) {
956
956
  const fresh = await this.storage.getObservationalMemory(record.threadId, record.resourceId);
957
957
  if (fresh?.lastObservedAt && record.lastObservedAt && fresh.lastObservedAt > record.lastObservedAt) {
958
- return false;
958
+ return { observed: false };
959
959
  }
960
960
  }
961
961
  const { messages, existingObservations } = await this.prepare();
@@ -976,7 +976,7 @@ var ObservationStrategy = class _ObservationStrategy {
976
976
  observabilityContext: this.opts.observabilityContext
977
977
  });
978
978
  }
979
- return true;
979
+ return { observed: true, usage: output.usage };
980
980
  } catch (error) {
981
981
  await this.emitFailedMarkers(cycleId, error);
982
982
  if (!this.rethrowOnFailure) {
@@ -995,7 +995,7 @@ var ObservationStrategy = class _ObservationStrategy {
995
995
  });
996
996
  if (abortSignal?.aborted) throw error;
997
997
  omError("[OM] Observation failed", error);
998
- return false;
998
+ return { observed: false };
999
999
  }
1000
1000
  omError("[OM] Observation failed", error);
1001
1001
  throw error;
@@ -2023,6 +2023,26 @@ var ObservationStep = class {
2023
2023
  if (statusSnapshot.shouldBuffer && !hasIncompleteToolCalls) {
2024
2024
  const allMessages = messageList.get.all.db();
2025
2025
  const unobservedMessages = om.getUnobservedMessages(allMessages, statusSnapshot.record);
2026
+ const candidates = om.getUnobservedMessages(unobservedMessages, statusSnapshot.record, {
2027
+ excludeBuffered: true
2028
+ });
2029
+ if (candidates.length > 0) {
2030
+ om.sealMessagesForBuffering(candidates);
2031
+ try {
2032
+ await this.turn.hooks?.onBufferChunkSealed?.();
2033
+ } catch (error) {
2034
+ omDebug(
2035
+ `[OM:buffer] onBufferChunkSealed hook failed: ${error instanceof Error ? error.message : String(error)}`
2036
+ );
2037
+ }
2038
+ if (this.turn.memory) {
2039
+ await this.turn.memory.persistMessages(candidates);
2040
+ }
2041
+ messageList.removeByIds(candidates.map((msg) => msg.id));
2042
+ for (const msg of candidates) {
2043
+ messageList.add(msg, "memory");
2044
+ }
2045
+ }
2026
2046
  void om.buffer({
2027
2047
  threadId,
2028
2048
  resourceId,
@@ -2031,23 +2051,7 @@ var ObservationStep = class {
2031
2051
  record: statusSnapshot.record,
2032
2052
  writer: this.turn.writer,
2033
2053
  requestContext: this.turn.requestContext,
2034
- observabilityContext: this.turn.observabilityContext,
2035
- beforeBuffer: async (candidates) => {
2036
- if (candidates.length === 0) {
2037
- return;
2038
- }
2039
- om.sealMessagesForBuffering(candidates);
2040
- try {
2041
- await this.turn.hooks?.onBufferChunkSealed?.();
2042
- } catch (error) {
2043
- omDebug(
2044
- `[OM:buffer] onBufferChunkSealed hook failed: ${error instanceof Error ? error.message : String(error)}`
2045
- );
2046
- }
2047
- if (this.turn.memory) {
2048
- await this.turn.memory.persistMessages(candidates);
2049
- }
2050
- }
2054
+ observabilityContext: this.turn.observabilityContext
2051
2055
  }).catch((err) => {
2052
2056
  omDebug(`[OM:buffer] fire-and-forget buffer failed: ${err?.message}`);
2053
2057
  });
@@ -4189,7 +4193,7 @@ var ReflectorRunner = class {
4189
4193
  /**
4190
4194
  * Start an async buffered reflection in the background.
4191
4195
  */
4192
- startAsyncBufferedReflection(record, observationTokens, lockKey, writer, requestContext, observabilityContext) {
4196
+ startAsyncBufferedReflection(record, observationTokens, lockKey, writer, requestContext, observabilityContext, reflectionHooks) {
4193
4197
  const bufferKey = this.buffering.getReflectionBufferKey(lockKey);
4194
4198
  if (this.buffering.isAsyncBufferingInProgress(bufferKey)) {
4195
4199
  return;
@@ -4199,7 +4203,10 @@ var ReflectorRunner = class {
4199
4203
  this.storage.setBufferingReflectionFlag(record.id, true).catch((err) => {
4200
4204
  omError("[OM] Failed to set buffering reflection flag", err);
4201
4205
  });
4202
- const asyncOp = this.doAsyncBufferedReflection(record, bufferKey, writer, requestContext, observabilityContext).catch(async (error) => {
4206
+ reflectionHooks?.onReflectionStart?.();
4207
+ const asyncOp = this.doAsyncBufferedReflection(record, bufferKey, writer, requestContext, observabilityContext).then((usage) => {
4208
+ reflectionHooks?.onReflectionEnd?.({ usage });
4209
+ }).catch(async (error) => {
4203
4210
  if (writer) {
4204
4211
  const failedMarker = createBufferingFailedMarker({
4205
4212
  cycleId: `reflect-buf-${Date.now()}-${Math.random().toString(36).slice(2, 11)}`,
@@ -4215,6 +4222,10 @@ var ReflectorRunner = class {
4215
4222
  await this.persistMarkerToStorage(failedMarker, record.threadId ?? "", record.resourceId ?? void 0);
4216
4223
  }
4217
4224
  omError("[OM] Async buffered reflection failed", error);
4225
+ reflectionHooks?.onReflectionEnd?.({
4226
+ usage: void 0,
4227
+ error: error instanceof Error ? error : new Error(String(error))
4228
+ });
4218
4229
  BufferingCoordinator.lastBufferedBoundary.delete(bufferKey);
4219
4230
  }).finally(() => {
4220
4231
  BufferingCoordinator.asyncBufferingOps.delete(bufferKey);
@@ -4308,6 +4319,7 @@ var ReflectorRunner = class {
4308
4319
  });
4309
4320
  await this.persistMarkerToStorage(endMarker, currentRecord.threadId ?? "", currentRecord.resourceId ?? void 0);
4310
4321
  }
4322
+ return reflectResult.usage;
4311
4323
  }
4312
4324
  /**
4313
4325
  * Try to activate buffered reflection when threshold is reached.
@@ -4428,7 +4440,8 @@ ${unreflectedContent}` : freshRecord.bufferedReflection;
4428
4440
  lockKey,
4429
4441
  writer,
4430
4442
  requestContext,
4431
- observabilityContext
4443
+ observabilityContext,
4444
+ reflectionHooks
4432
4445
  );
4433
4446
  }
4434
4447
  }
@@ -4462,7 +4475,8 @@ ${unreflectedContent}` : freshRecord.bufferedReflection;
4462
4475
  lockKey,
4463
4476
  writer,
4464
4477
  requestContext,
4465
- observabilityContext
4478
+ observabilityContext,
4479
+ reflectionHooks
4466
4480
  );
4467
4481
  return;
4468
4482
  }
@@ -4501,6 +4515,8 @@ ${unreflectedContent}` : freshRecord.bufferedReflection;
4501
4515
  recordId: record.id,
4502
4516
  threadId
4503
4517
  } : void 0;
4518
+ let reflectionUsage;
4519
+ let reflectionError;
4504
4520
  try {
4505
4521
  const compressionStartLevel = await this.getCompressionStartLevel(requestContext);
4506
4522
  const reflectResult = await this.call(
@@ -4514,6 +4530,7 @@ ${unreflectedContent}` : freshRecord.bufferedReflection;
4514
4530
  requestContext,
4515
4531
  observabilityContext
4516
4532
  );
4533
+ reflectionUsage = reflectResult.usage;
4517
4534
  const reflectionTokenCount = this.tokenCounter.countObservations(reflectResult.observations);
4518
4535
  await this.storage.createReflectionGeneration({
4519
4536
  currentRecord: record,
@@ -4558,13 +4575,14 @@ ${unreflectedContent}` : freshRecord.bufferedReflection;
4558
4575
  await writer.custom(failedMarker).catch(() => {
4559
4576
  });
4560
4577
  }
4578
+ reflectionError = error instanceof Error ? error : new Error(String(error));
4561
4579
  if (abortSignal?.aborted) {
4562
4580
  throw error;
4563
4581
  }
4564
4582
  omError("[OM] Reflection failed", error);
4565
4583
  } finally {
4566
4584
  await this.storage.setReflectingFlag(record.id, false);
4567
- reflectionHooks?.onReflectionEnd?.();
4585
+ reflectionHooks?.onReflectionEnd?.({ usage: reflectionUsage, error: reflectionError });
4568
4586
  unregisterOp(record.id, "reflecting");
4569
4587
  }
4570
4588
  }
@@ -7909,6 +7927,7 @@ ${grouped}` : grouped;
7909
7927
  const lockKey = this.buffering.getLockKey(threadId, resourceId);
7910
7928
  const reflectionHooks = hooks ? { onReflectionStart: hooks.onReflectionStart, onReflectionEnd: hooks.onReflectionEnd } : void 0;
7911
7929
  let observed = false;
7930
+ let observationUsage;
7912
7931
  let generationBefore = -1;
7913
7932
  await this.withLock(lockKey, async () => {
7914
7933
  const freshRecord = await this.getOrCreateRecord(threadId, resourceId);
@@ -7925,8 +7944,9 @@ ${grouped}` : grouped;
7925
7944
  return;
7926
7945
  }
7927
7946
  hooks?.onObservationStart?.();
7947
+ let observationError;
7928
7948
  try {
7929
- observed = await ObservationStrategy.create(this, {
7949
+ const result = await ObservationStrategy.create(this, {
7930
7950
  record: freshRecord,
7931
7951
  threadId,
7932
7952
  resourceId,
@@ -7936,8 +7956,13 @@ ${grouped}` : grouped;
7936
7956
  writer: opts.writer,
7937
7957
  observabilityContext: opts.observabilityContext
7938
7958
  }).run();
7959
+ observed = result.observed;
7960
+ observationUsage = result.usage;
7961
+ } catch (error) {
7962
+ observationError = error instanceof Error ? error : new Error(String(error));
7963
+ throw error;
7939
7964
  } finally {
7940
- hooks?.onObservationEnd?.();
7965
+ hooks?.onObservationEnd?.({ usage: observationUsage, error: observationError });
7941
7966
  }
7942
7967
  });
7943
7968
  const record = await this.getOrCreateRecord(threadId, resourceId);
@@ -7958,7 +7983,7 @@ ${grouped}` : grouped;
7958
7983
  async reflect(threadId, resourceId, prompt, requestContext, observabilityContext) {
7959
7984
  const record = await this.getOrCreateRecord(threadId, resourceId);
7960
7985
  if (!record.activeObservations) {
7961
- return { reflected: false, record };
7986
+ return { reflected: false, record, usage: void 0 };
7962
7987
  }
7963
7988
  await this.storage.setReflectingFlag(record.id, true);
7964
7989
  registerOp(record.id, "reflecting");
@@ -7983,11 +8008,11 @@ ${grouped}` : grouped;
7983
8008
  tokenCount: reflectionTokenCount
7984
8009
  });
7985
8010
  const updatedRecord = await this.getOrCreateRecord(threadId, resourceId);
7986
- return { reflected: true, record: updatedRecord };
8011
+ return { reflected: true, record: updatedRecord, usage: reflectResult.usage };
7987
8012
  } catch (error) {
7988
8013
  omError("[OM] reflect() failed", error);
7989
8014
  const latestRecord = await this.getOrCreateRecord(threadId, resourceId);
7990
- return { reflected: false, record: latestRecord };
8015
+ return { reflected: false, record: latestRecord, usage: void 0 };
7991
8016
  } finally {
7992
8017
  await this.storage.setReflectingFlag(record.id, false);
7993
8018
  unregisterOp(record.id, "reflecting");
@@ -8011,9 +8036,9 @@ ${grouped}` : grouped;
8011
8036
  /**
8012
8037
  * Get observation history (previous generations)
8013
8038
  */
8014
- async getHistory(threadId, resourceId, limit) {
8039
+ async getHistory(threadId, resourceId, limit, options) {
8015
8040
  const ids = this.getStorageIds(threadId, resourceId);
8016
- return this.storage.getObservationalMemoryHistory(ids.threadId, ids.resourceId, limit);
8041
+ return this.storage.getObservationalMemoryHistory(ids.threadId, ids.resourceId, limit, options);
8017
8042
  }
8018
8043
  /**
8019
8044
  * Clear all memory for a specific thread/resource
@@ -8559,5 +8584,5 @@ function getObservationsAsOf(activeObservations, asOf) {
8559
8584
  }
8560
8585
 
8561
8586
  export { ModelByInputTokens, OBSERVER_SYSTEM_PROMPT, ObservationalMemory, ObservationalMemoryProcessor, TokenCounter, buildObserverPrompt, buildObserverSystemPrompt, combineObservationGroupRanges, deriveObservationGroupProvenance, extractCurrentTask, formatMessagesForObserver, formatToolResultForObserver, getObservationsAsOf, hasCurrentTaskSection, injectAnchorIds, optimizeObservationsForContext, parseAnchorId, parseObservationGroups, parseObserverOutput, reconcileObservationGroupsFromReflection, renderObservationGroupsForReflection, resolveToolResultValue, stripEphemeralAnchorIds, stripObservationGroups, truncateStringByTokens, wrapInObservationGroup };
8562
- //# sourceMappingURL=chunk-SWCWXIDJ.js.map
8563
- //# sourceMappingURL=chunk-SWCWXIDJ.js.map
8587
+ //# sourceMappingURL=chunk-FQGF36BE.js.map
8588
+ //# sourceMappingURL=chunk-FQGF36BE.js.map