@mastra/memory 1.2.0 → 1.3.0-alpha.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (29) hide show
  1. package/CHANGELOG.md +47 -0
  2. package/dist/{chunk-5YW6JV6Y.js → chunk-F5P5HTMC.js} +135 -67
  3. package/dist/chunk-F5P5HTMC.js.map +1 -0
  4. package/dist/{chunk-7SCXX4S7.cjs → chunk-LXATBJ2L.cjs} +137 -66
  5. package/dist/chunk-LXATBJ2L.cjs.map +1 -0
  6. package/dist/docs/SKILL.md +1 -1
  7. package/dist/docs/assets/SOURCE_MAP.json +26 -14
  8. package/dist/docs/references/docs-memory-observational-memory.md +86 -11
  9. package/dist/docs/references/reference-memory-observational-memory.md +318 -9
  10. package/dist/index.cjs +22 -1
  11. package/dist/index.cjs.map +1 -1
  12. package/dist/index.d.ts.map +1 -1
  13. package/dist/index.js +22 -1
  14. package/dist/index.js.map +1 -1
  15. package/dist/observational-memory-3DA7KJIH.js +3 -0
  16. package/dist/{observational-memory-LI6QFTRE.js.map → observational-memory-3DA7KJIH.js.map} +1 -1
  17. package/dist/observational-memory-SA5RITIG.cjs +64 -0
  18. package/dist/{observational-memory-G3HACXHE.cjs.map → observational-memory-SA5RITIG.cjs.map} +1 -1
  19. package/dist/processors/index.cjs +24 -12
  20. package/dist/processors/index.js +1 -1
  21. package/dist/processors/observational-memory/index.d.ts +1 -1
  22. package/dist/processors/observational-memory/index.d.ts.map +1 -1
  23. package/dist/processors/observational-memory/observational-memory.d.ts +41 -4
  24. package/dist/processors/observational-memory/observational-memory.d.ts.map +1 -1
  25. package/package.json +3 -3
  26. package/dist/chunk-5YW6JV6Y.js.map +0 -1
  27. package/dist/chunk-7SCXX4S7.cjs.map +0 -1
  28. package/dist/observational-memory-G3HACXHE.cjs +0 -52
  29. package/dist/observational-memory-LI6QFTRE.js +0 -3
package/CHANGELOG.md CHANGED
@@ -1,5 +1,52 @@
1
1
  # @mastra/memory
2
2
 
3
+ ## 1.3.0-alpha.0
4
+
5
+ ### Minor Changes
6
+
7
+ - @mastra/opencode: Add opencode plugin for Observational Memory integration ([#12925](https://github.com/mastra-ai/mastra/pull/12925))
8
+
9
+ Added standalone `observe()` API that accepts external messages directly, so integrations can trigger observation without duplicating messages into Mastra's storage.
10
+
11
+ **New exports:**
12
+ - `ObserveHooks` — lifecycle callbacks (`onObservationStart`, `onObservationEnd`, `onReflectionStart`, `onReflectionEnd`) for hooking into observation/reflection cycles
13
+ - `OBSERVATION_CONTEXT_PROMPT` — preamble that introduces the observations block
14
+ - `OBSERVATION_CONTEXT_INSTRUCTIONS` — rules for interpreting observations (placed after the `<observations>` block)
15
+ - `OBSERVATION_CONTINUATION_HINT` — behavioral guidance that prevents models from awkwardly acknowledging the memory system
16
+ - `getOrCreateRecord()` — now public, allows eager record initialization before the first observation cycle
17
+
18
+ ```ts
19
+ import { ObservationalMemory } from '@mastra/memory/processors';
20
+
21
+ const om = new ObservationalMemory({ storage, model: 'google/gemini-2.5-flash' });
22
+
23
+ // Eagerly initialize a record
24
+ await om.getOrCreateRecord(threadId);
25
+
26
+ // Pass messages directly with lifecycle hooks
27
+ await om.observe({
28
+ threadId,
29
+ messages: myMessages,
30
+ hooks: {
31
+ onObservationStart: () => console.log('Observing...'),
32
+ onObservationEnd: () => console.log('Done!'),
33
+ onReflectionStart: () => console.log('Reflecting...'),
34
+ onReflectionEnd: () => console.log('Reflected!'),
35
+ },
36
+ });
37
+ ```
38
+
39
+ **Breaking:** `observe()` now takes an object param instead of positional args. Update calls from `observe(threadId, resourceId)` to `observe({ threadId, resourceId })`.
40
+
41
+ ### Patch Changes
42
+
43
+ - Fixed observational memory writing non-integer token counts to PostgreSQL, which caused `invalid input syntax for type integer` errors. Token counts are now correctly rounded to integers before all database writes. ([#12976](https://github.com/mastra-ai/mastra/pull/12976))
44
+
45
+ - Fixed cloneThread not copying working memory to the cloned thread. Thread-scoped working memory is now properly carried over when cloning, and resource-scoped working memory is copied when the clone uses a different resourceId. ([#12833](https://github.com/mastra-ai/mastra/pull/12833))
46
+
47
+ - Updated dependencies [[`7ef618f`](https://github.com/mastra-ai/mastra/commit/7ef618f3c49c27e2f6b27d7f564c557c0734325b), [`b373564`](https://github.com/mastra-ai/mastra/commit/b37356491d43b4d53067f10cb669abaf2502f218), [`927c2af`](https://github.com/mastra-ai/mastra/commit/927c2af9792286c122e04409efce0f3c804f777f), [`b896b41`](https://github.com/mastra-ai/mastra/commit/b896b41343de7fcc14442fb40fe82d189e65bbe2), [`6415277`](https://github.com/mastra-ai/mastra/commit/6415277a438faa00db2af850ead5dee25f40c428), [`0831bbb`](https://github.com/mastra-ai/mastra/commit/0831bbb5bc750c18e9b22b45f18687c964b70828), [`63f7eda`](https://github.com/mastra-ai/mastra/commit/63f7eda605eb3e0c8c35ee3912ffe7c999c69f69), [`a5b67a3`](https://github.com/mastra-ai/mastra/commit/a5b67a3589a74415feb663a55d1858324a2afde9), [`877b02c`](https://github.com/mastra-ai/mastra/commit/877b02cdbb15e199184c7f2b8f217be8d3ebada7), [`7567222`](https://github.com/mastra-ai/mastra/commit/7567222b1366f0d39980594792dd9d5060bfe2ab), [`af71458`](https://github.com/mastra-ai/mastra/commit/af71458e3b566f09c11d0e5a0a836dc818e7a24a), [`eb36bd8`](https://github.com/mastra-ai/mastra/commit/eb36bd8c52fcd6ec9674ac3b7a6412405b5983e1), [`3cbf121`](https://github.com/mastra-ai/mastra/commit/3cbf121f55418141924754a83102aade89835947)]:
48
+ - @mastra/core@1.4.0-alpha.0
49
+
3
50
  ## 1.2.0
4
51
 
5
52
  ### Minor Changes
@@ -1026,7 +1026,7 @@ var TokenCounter = class _TokenCounter {
1026
1026
  if (toolResultCount > 0) {
1027
1027
  overhead += toolResultCount * _TokenCounter.TOKENS_PER_MESSAGE;
1028
1028
  }
1029
- return this.encoder.encode(tokenString, "all").length + overhead;
1029
+ return Math.round(this.encoder.encode(tokenString, "all").length + overhead);
1030
1030
  }
1031
1031
  /**
1032
1032
  * Count tokens in an array of messages
@@ -1280,6 +1280,17 @@ var OBSERVATIONAL_MEMORY_DEFAULTS = {
1280
1280
  // Start buffering at 50% of observationTokens
1281
1281
  }
1282
1282
  };
1283
+ var OBSERVATION_CONTINUATION_HINT = `This message is not from the user, the conversation history grew too long and wouldn't fit in context! Thankfully the entire conversation is stored in your memory observations. Please continue from where the observations left off. Do not refer to your "memory observations" directly, the user doesn't know about them, they are your memories! Just respond naturally as if you're remembering the conversation (you are!). Do not say "Hi there!" or "based on our previous conversation" as if the conversation is just starting, this is not a new conversation. This is an ongoing conversation, keep continuity by responding based on your memory. For example do not say "I understand. I've reviewed my memory observations", or "I remember [...]". Answer naturally following the suggestion from your memory. Note that your memory may contain a suggested first response, which you should follow.
1284
+
1285
+ IMPORTANT: this system reminder is NOT from the user. The system placed it here as part of your memory system. This message is part of you remembering your conversation with the user.
1286
+
1287
+ NOTE: Any messages following this system reminder are newer than your memories.`;
1288
+ var OBSERVATION_CONTEXT_PROMPT = `The following observations block contains your memory of past conversations with this user.`;
1289
+ var OBSERVATION_CONTEXT_INSTRUCTIONS = `IMPORTANT: When responding, reference specific details from these observations. Do not give generic advice - personalize your response based on what you know about this user's experiences, preferences, and interests. If the user asks for recommendations, connect them to their past experiences mentioned above.
1290
+
1291
+ KNOWLEDGE UPDATES: When asked about current state (e.g., "where do I currently...", "what is my current..."), always prefer the MOST RECENT information. Observations include dates - if you see conflicting information, the newer observation supersedes the older one. Look for phrases like "will start", "is switching", "changed to", "moved to" as indicators that previous information has been updated.
1292
+
1293
+ PLANNED ACTIONS: If the user stated they planned to do something (e.g., "I'm going to...", "I'm looking forward to...", "I will...") and the date they planned to do it is now in the past (check the relative time like "3 weeks ago"), assume they completed the action unless there's evidence they didn't. For example, if someone said "I'll start my new diet on Monday" and that was 2 weeks ago, assume they started the diet.`;
1283
1294
  var ObservationalMemory = class _ObservationalMemory {
1284
1295
  id = "observational-memory";
1285
1296
  name = "Observational Memory";
@@ -1867,6 +1878,16 @@ Async buffering is enabled by default \u2014 this opt-out is only needed when us
1867
1878
  const effectiveThreshold = Math.max(totalBudget - currentObservationTokens, baseThreshold);
1868
1879
  return Math.round(effectiveThreshold);
1869
1880
  }
1881
+ /**
1882
+ * Check whether the unobserved message tokens meet the observation threshold.
1883
+ */
1884
+ meetsObservationThreshold(opts) {
1885
+ const { record, unobservedTokens, extraTokens = 0 } = opts;
1886
+ const pendingTokens = (record.pendingMessageTokens ?? 0) + unobservedTokens + extraTokens;
1887
+ const currentObservationTokens = record.observationTokenCount ?? 0;
1888
+ const threshold = this.calculateDynamicThreshold(this.observationConfig.messageTokens, currentObservationTokens);
1889
+ return pendingTokens >= threshold;
1890
+ }
1870
1891
  /**
1871
1892
  * Get or create the Observer agent
1872
1893
  */
@@ -1913,7 +1934,8 @@ Async buffering is enabled by default \u2014 this opt-out is only needed when us
1913
1934
  };
1914
1935
  }
1915
1936
  /**
1916
- * Get or create the observational memory record
1937
+ * Get or create the observational memory record.
1938
+ * Returns the existing record if one exists, otherwise initializes a new one.
1917
1939
  */
1918
1940
  async getOrCreateRecord(threadId, resourceId) {
1919
1941
  const ids = this.getStorageIds(threadId, resourceId);
@@ -2582,17 +2604,13 @@ Async buffering is enabled by default \u2014 this opt-out is only needed when us
2582
2604
  optimized = addRelativeTimeToObservations(optimized, currentDate);
2583
2605
  }
2584
2606
  let content = `
2585
- The following observations block contains your memory of past conversations with this user.
2607
+ ${OBSERVATION_CONTEXT_PROMPT}
2586
2608
 
2587
2609
  <observations>
2588
2610
  ${optimized}
2589
2611
  </observations>
2590
2612
 
2591
- IMPORTANT: When responding, reference specific details from these observations. Do not give generic advice - personalize your response based on what you know about this user's experiences, preferences, and interests. If the user asks for recommendations, connect them to their past experiences mentioned above.
2592
-
2593
- KNOWLEDGE UPDATES: When asked about current state (e.g., "where do I currently...", "what is my current..."), always prefer the MOST RECENT information. Observations include dates - if you see conflicting information, the newer observation supersedes the older one. Look for phrases like "will start", "is switching", "changed to", "moved to" as indicators that previous information has been updated.
2594
-
2595
- PLANNED ACTIONS: If the user stated they planned to do something (e.g., "I'm going to...", "I'm looking forward to...", "I will...") and the date they planned to do it is now in the past (check the relative time like "3 weeks ago"), assume they completed the action unless there's evidence they didn't. For example, if someone said "I'll start my new diet on Monday" and that was 2 weeks ago, assume they started the diet.`;
2613
+ ${OBSERVATION_CONTEXT_INSTRUCTIONS}`;
2596
2614
  if (unobservedContextBlocks) {
2597
2615
  content += `
2598
2616
 
@@ -2856,16 +2874,22 @@ ${suggestedResponse}
2856
2874
  if (freshUnobservedMessages.length > 0) {
2857
2875
  try {
2858
2876
  if (this.scope === "resource" && resourceId) {
2859
- await this.doResourceScopedObservation(
2860
- freshRecord,
2861
- threadId,
2877
+ await this.doResourceScopedObservation({
2878
+ record: freshRecord,
2879
+ currentThreadId: threadId,
2862
2880
  resourceId,
2863
- freshUnobservedMessages,
2881
+ currentThreadMessages: freshUnobservedMessages,
2864
2882
  writer,
2865
2883
  abortSignal
2866
- );
2884
+ });
2867
2885
  } else {
2868
- await this.doSynchronousObservation(freshRecord, threadId, freshUnobservedMessages, writer, abortSignal);
2886
+ await this.doSynchronousObservation({
2887
+ record: freshRecord,
2888
+ threadId,
2889
+ unobservedMessages: freshUnobservedMessages,
2890
+ writer,
2891
+ abortSignal
2892
+ });
2869
2893
  }
2870
2894
  updatedRecord = await this.getOrCreateRecord(threadId, resourceId);
2871
2895
  const updatedTime = updatedRecord.lastObservedAt?.getTime() ?? 0;
@@ -3005,12 +3029,7 @@ ${suggestedResponse}
3005
3029
  parts: [
3006
3030
  {
3007
3031
  type: "text",
3008
- text: `<system-reminder>This message is not from the user, the conversation history grew too long and wouldn't fit in context! Thankfully the entire conversation is stored in your memory observations. Please continue from where the observations left off. Do not refer to your "memory observations" directly, the user doesn't know about them, they are your memories! Just respond naturally as if you're remembering the conversation (you are!). Do not say "Hi there!" or "based on our previous conversation" as if the conversation is just starting, this is not a new conversation. This is an ongoing conversation, keep continuity by responding based on your memory. For example do not say "I understand. I've reviewed my memory observations", or "I remember [...]". Answer naturally following the suggestion from your memory. Note that your memory may contain a suggested first response, which you should follow.
3009
-
3010
- IMPORTANT: this system reminder is NOT from the user. The system placed it here as part of your memory system. This message is part of you remembering your conversation with the user.
3011
-
3012
- NOTE: Any messages following this system reminder are newer than your memories.
3013
- </system-reminder>`
3032
+ text: `<system-reminder>${OBSERVATION_CONTINUATION_HINT}</system-reminder>`
3014
3033
  }
3015
3034
  ]
3016
3035
  },
@@ -3167,14 +3186,13 @@ NOTE: Any messages following this system reminder are newer than your memories.
3167
3186
  _ObservationalMemory.lastBufferedBoundary.set(bufKey, 0);
3168
3187
  this.storage.setBufferingObservationFlag(record.id, false, 0).catch(() => {
3169
3188
  });
3170
- await this.maybeReflect(
3189
+ await this.maybeReflect({
3171
3190
  record,
3172
- record.observationTokenCount ?? 0,
3191
+ observationTokens: record.observationTokenCount ?? 0,
3173
3192
  threadId,
3174
3193
  writer,
3175
- void 0,
3176
3194
  messageList
3177
- );
3195
+ });
3178
3196
  record = await this.getOrCreateRecord(threadId, resourceId);
3179
3197
  }
3180
3198
  }
@@ -3184,7 +3202,7 @@ NOTE: Any messages following this system reminder are newer than your memories.
3184
3202
  const obsTokens = record.observationTokenCount ?? 0;
3185
3203
  if (this.shouldReflect(obsTokens)) {
3186
3204
  omDebug(`[OM:step0-reflect] obsTokens=${obsTokens} over reflectThreshold, triggering reflection`);
3187
- await this.maybeReflect(record, obsTokens, threadId, writer, void 0, messageList);
3205
+ await this.maybeReflect({ record, observationTokens: obsTokens, threadId, writer, messageList });
3188
3206
  record = await this.getOrCreateRecord(threadId, resourceId);
3189
3207
  } else if (this.isAsyncReflectionEnabled()) {
3190
3208
  const lockKey = this.getLockKey(threadId, resourceId);
@@ -3524,21 +3542,35 @@ ${newThreadSection}`;
3524
3542
  }
3525
3543
  const newThreadId = threadIdMatch[1];
3526
3544
  const newDate = dateMatch[1];
3527
- const existingPattern = new RegExp(
3528
- `<thread id="${newThreadId}">\\s*Date:\\s*${newDate.replace(/[.*+?^${}()|[\]\\]/g, "\\$&")}([\\s\\S]*?)</thread>`
3529
- );
3530
- const existingMatch = existingObservations.match(existingPattern);
3531
- if (existingMatch) {
3532
- const newObsMatch = newThreadSection.match(/<thread id="[^"]+">[\s\S]*?Date:[^\n]*\n([\s\S]*?)\n<\/thread>/);
3533
- if (newObsMatch && newObsMatch[1]) {
3534
- const newObsContent = newObsMatch[1].trim();
3535
- const mergedSection = existingObservations.replace(existingPattern, (match) => {
3536
- const withoutClose = match.replace(/<\/thread>$/, "").trimEnd();
3537
- return `${withoutClose}
3545
+ const threadOpen = `<thread id="${newThreadId}">`;
3546
+ const threadClose = "</thread>";
3547
+ const startIdx = existingObservations.indexOf(threadOpen);
3548
+ let existingSection = null;
3549
+ let existingSectionStart = -1;
3550
+ let existingSectionEnd = -1;
3551
+ if (startIdx !== -1) {
3552
+ const closeIdx = existingObservations.indexOf(threadClose, startIdx);
3553
+ if (closeIdx !== -1) {
3554
+ existingSectionEnd = closeIdx + threadClose.length;
3555
+ existingSectionStart = startIdx;
3556
+ const section = existingObservations.slice(startIdx, existingSectionEnd);
3557
+ if (section.includes(`Date: ${newDate}`) || section.includes(`Date:${newDate}`)) {
3558
+ existingSection = section;
3559
+ }
3560
+ }
3561
+ }
3562
+ if (existingSection) {
3563
+ const dateLineEnd = newThreadSection.indexOf("\n", newThreadSection.indexOf("Date:"));
3564
+ const newCloseIdx = newThreadSection.lastIndexOf(threadClose);
3565
+ if (dateLineEnd !== -1 && newCloseIdx !== -1) {
3566
+ const newObsContent = newThreadSection.slice(dateLineEnd + 1, newCloseIdx).trim();
3567
+ if (newObsContent) {
3568
+ const withoutClose = existingSection.slice(0, existingSection.length - threadClose.length).trimEnd();
3569
+ const merged = `${withoutClose}
3538
3570
  ${newObsContent}
3539
- </thread>`;
3540
- });
3541
- return mergedSection;
3571
+ ${threadClose}`;
3572
+ return existingObservations.slice(0, existingSectionStart) + merged + existingObservations.slice(existingSectionEnd);
3573
+ }
3542
3574
  }
3543
3575
  }
3544
3576
  return `${existingObservations}
@@ -3562,7 +3594,8 @@ ${newThreadSection}`;
3562
3594
  /**
3563
3595
  * Do synchronous observation (fallback when no buffering)
3564
3596
  */
3565
- async doSynchronousObservation(record, threadId, unobservedMessages, writer, abortSignal) {
3597
+ async doSynchronousObservation(opts) {
3598
+ const { record, threadId, unobservedMessages, writer, abortSignal, reflectionHooks } = opts;
3566
3599
  this.emitDebugEvent({
3567
3600
  type: "observation_triggered",
3568
3601
  timestamp: /* @__PURE__ */ new Date(),
@@ -3687,13 +3720,14 @@ ${result.observations}` : result.observations;
3687
3720
  })),
3688
3721
  usage: result.usage
3689
3722
  });
3690
- await this.maybeReflect(
3691
- { ...record, activeObservations: newObservations },
3692
- totalTokenCount,
3723
+ await this.maybeReflect({
3724
+ record: { ...record, activeObservations: newObservations },
3725
+ observationTokens: totalTokenCount,
3693
3726
  threadId,
3694
3727
  writer,
3695
- abortSignal
3696
- );
3728
+ abortSignal,
3729
+ reflectionHooks
3730
+ });
3697
3731
  } catch (error) {
3698
3732
  if (lastMessage?.id) {
3699
3733
  const failedMarker = this.createObservationFailedMarker({
@@ -4236,7 +4270,8 @@ ${unreflectedContent}` : freshRecord.bufferedReflection;
4236
4270
  * 3. Only updates lastObservedAt AFTER all threads are observed
4237
4271
  * 4. Only triggers reflection AFTER all threads are observed
4238
4272
  */
4239
- async doResourceScopedObservation(record, currentThreadId, resourceId, currentThreadMessages, writer, abortSignal) {
4273
+ async doResourceScopedObservation(opts) {
4274
+ const { record, currentThreadId, resourceId, currentThreadMessages, writer, abortSignal, reflectionHooks } = opts;
4240
4275
  const { threads: allThreads } = await this.storage.listThreads({ filter: { resourceId } });
4241
4276
  const threadMetadataMap = /* @__PURE__ */ new Map();
4242
4277
  for (const thread of allThreads) {
@@ -4498,13 +4533,14 @@ ${unreflectedContent}` : freshRecord.bufferedReflection;
4498
4533
  }
4499
4534
  }
4500
4535
  }
4501
- await this.maybeReflect(
4502
- { ...record, activeObservations: currentObservations },
4503
- totalTokenCount,
4504
- currentThreadId,
4536
+ await this.maybeReflect({
4537
+ record: { ...record, activeObservations: currentObservations },
4538
+ observationTokens: totalTokenCount,
4539
+ threadId: currentThreadId,
4505
4540
  writer,
4506
- abortSignal
4507
- );
4541
+ abortSignal,
4542
+ reflectionHooks
4543
+ });
4508
4544
  } catch (error) {
4509
4545
  for (const [threadId, msgs] of threadsWithMessages) {
4510
4546
  const lastMessage = msgs[msgs.length - 1];
@@ -4575,7 +4611,8 @@ ${unreflectedContent}` : freshRecord.bufferedReflection;
4575
4611
  * When async buffering is enabled via `bufferTokens`, reflection is triggered
4576
4612
  * in the background at intervals, and activated when the threshold is reached.
4577
4613
  */
4578
- async maybeReflect(record, observationTokens, _threadId, writer, abortSignal, messageList) {
4614
+ async maybeReflect(opts) {
4615
+ const { record, observationTokens, writer, abortSignal, messageList, reflectionHooks } = opts;
4579
4616
  const lockKey = this.getLockKey(record.threadId, record.resourceId);
4580
4617
  const reflectThreshold = this.getMaxThreshold(this.reflectionConfig.observationTokens);
4581
4618
  if (this.isAsyncReflectionEnabled() && observationTokens < reflectThreshold) {
@@ -4611,11 +4648,12 @@ ${unreflectedContent}` : freshRecord.bufferedReflection;
4611
4648
  return;
4612
4649
  }
4613
4650
  }
4651
+ reflectionHooks?.onReflectionStart?.();
4614
4652
  await this.storage.setReflectingFlag(record.id, true);
4615
4653
  registerOp(record.id, "reflecting");
4616
4654
  const cycleId = crypto.randomUUID();
4617
4655
  const startedAt = (/* @__PURE__ */ new Date()).toISOString();
4618
- const threadId = _threadId ?? "unknown";
4656
+ const threadId = opts.threadId ?? "unknown";
4619
4657
  if (writer) {
4620
4658
  const startMarker = this.createObservationStartMarker({
4621
4659
  cycleId,
@@ -4701,26 +4739,45 @@ ${unreflectedContent}` : freshRecord.bufferedReflection;
4701
4739
  omError("[OM] Reflection failed", error);
4702
4740
  } finally {
4703
4741
  await this.storage.setReflectingFlag(record.id, false);
4742
+ reflectionHooks?.onReflectionEnd?.();
4704
4743
  unregisterOp(record.id, "reflecting");
4705
4744
  }
4706
4745
  }
4707
4746
  /**
4708
4747
  * Manually trigger observation.
4748
+ *
4749
+ * When `messages` is provided, those are used directly (filtered for unobserved)
4750
+ * instead of reading from storage. This allows external systems (e.g., opencode)
4751
+ * to pass conversation messages without duplicating them into Mastra's DB.
4709
4752
  */
4710
- async observe(threadId, resourceId, _prompt) {
4753
+ async observe(opts) {
4754
+ const { threadId, resourceId, messages, hooks } = opts;
4711
4755
  const lockKey = this.getLockKey(threadId, resourceId);
4756
+ const reflectionHooks = hooks ? { onReflectionStart: hooks.onReflectionStart, onReflectionEnd: hooks.onReflectionEnd } : void 0;
4712
4757
  await this.withLock(lockKey, async () => {
4713
4758
  const freshRecord = await this.getOrCreateRecord(threadId, resourceId);
4714
4759
  if (this.scope === "resource" && resourceId) {
4715
- await this.doResourceScopedObservation(
4716
- freshRecord,
4717
- threadId,
4718
- resourceId,
4719
- []
4720
- // no in-flight messages — everything is already in the DB
4721
- );
4760
+ const currentMessages = messages ?? [];
4761
+ if (!this.meetsObservationThreshold({
4762
+ record: freshRecord,
4763
+ unobservedTokens: this.tokenCounter.countMessages(currentMessages)
4764
+ })) {
4765
+ return;
4766
+ }
4767
+ hooks?.onObservationStart?.();
4768
+ try {
4769
+ await this.doResourceScopedObservation({
4770
+ record: freshRecord,
4771
+ currentThreadId: threadId,
4772
+ resourceId,
4773
+ currentThreadMessages: currentMessages,
4774
+ reflectionHooks
4775
+ });
4776
+ } finally {
4777
+ hooks?.onObservationEnd?.();
4778
+ }
4722
4779
  } else {
4723
- const unobservedMessages = await this.loadUnobservedMessages(
4780
+ const unobservedMessages = messages ? this.getUnobservedMessages(messages, freshRecord) : await this.loadUnobservedMessages(
4724
4781
  threadId,
4725
4782
  resourceId,
4726
4783
  freshRecord.lastObservedAt ? new Date(freshRecord.lastObservedAt) : void 0
@@ -4728,7 +4785,18 @@ ${unreflectedContent}` : freshRecord.bufferedReflection;
4728
4785
  if (unobservedMessages.length === 0) {
4729
4786
  return;
4730
4787
  }
4731
- await this.doSynchronousObservation(freshRecord, threadId, unobservedMessages);
4788
+ if (!this.meetsObservationThreshold({
4789
+ record: freshRecord,
4790
+ unobservedTokens: this.tokenCounter.countMessages(unobservedMessages)
4791
+ })) {
4792
+ return;
4793
+ }
4794
+ hooks?.onObservationStart?.();
4795
+ try {
4796
+ await this.doSynchronousObservation({ record: freshRecord, threadId, unobservedMessages, reflectionHooks });
4797
+ } finally {
4798
+ hooks?.onObservationEnd?.();
4799
+ }
4732
4800
  }
4733
4801
  });
4734
4802
  }
@@ -4820,6 +4888,6 @@ ${unreflectedContent}` : freshRecord.bufferedReflection;
4820
4888
  }
4821
4889
  };
4822
4890
 
4823
- export { OBSERVATIONAL_MEMORY_DEFAULTS, OBSERVER_SYSTEM_PROMPT, ObservationalMemory, TokenCounter, buildObserverPrompt, buildObserverSystemPrompt, extractCurrentTask, formatMessagesForObserver, hasCurrentTaskSection, optimizeObservationsForContext, parseObserverOutput };
4824
- //# sourceMappingURL=chunk-5YW6JV6Y.js.map
4825
- //# sourceMappingURL=chunk-5YW6JV6Y.js.map
4891
+ export { OBSERVATIONAL_MEMORY_DEFAULTS, OBSERVATION_CONTEXT_INSTRUCTIONS, OBSERVATION_CONTEXT_PROMPT, OBSERVATION_CONTINUATION_HINT, OBSERVER_SYSTEM_PROMPT, ObservationalMemory, TokenCounter, buildObserverPrompt, buildObserverSystemPrompt, extractCurrentTask, formatMessagesForObserver, hasCurrentTaskSection, optimizeObservationsForContext, parseObserverOutput };
4892
+ //# sourceMappingURL=chunk-F5P5HTMC.js.map
4893
+ //# sourceMappingURL=chunk-F5P5HTMC.js.map