@mastra/opencode 0.0.1 → 0.0.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/CHANGELOG.md ADDED
@@ -0,0 +1,87 @@
1
+ # @mastra/opencode
2
+
3
+ ## 0.0.2
4
+
5
+ ### Patch Changes
6
+
7
+ - @mastra/opencode: Add opencode plugin for Observational Memory integration ([#12925](https://github.com/mastra-ai/mastra/pull/12925))
8
+
9
+ Added standalone `observe()` API that accepts external messages directly, so integrations can trigger observation without duplicating messages into Mastra's storage.
10
+
11
+ **New exports:**
12
+ - `ObserveHooks` — lifecycle callbacks (`onObservationStart`, `onObservationEnd`, `onReflectionStart`, `onReflectionEnd`) for hooking into observation/reflection cycles
13
+ - `OBSERVATION_CONTEXT_PROMPT` — preamble that introduces the observations block
14
+ - `OBSERVATION_CONTEXT_INSTRUCTIONS` — rules for interpreting observations (placed after the `<observations>` block)
15
+ - `OBSERVATION_CONTINUATION_HINT` — behavioral guidance that prevents models from awkwardly acknowledging the memory system
16
+ - `getOrCreateRecord()` — now public, allows eager record initialization before the first observation cycle
17
+
18
+ ```ts
19
+ import { ObservationalMemory } from '@mastra/memory/processors';
20
+
21
+ const om = new ObservationalMemory({ storage, model: 'google/gemini-2.5-flash' });
22
+
23
+ // Eagerly initialize a record
24
+ await om.getOrCreateRecord(threadId);
25
+
26
+ // Pass messages directly with lifecycle hooks
27
+ await om.observe({
28
+ threadId,
29
+ messages: myMessages,
30
+ hooks: {
31
+ onObservationStart: () => console.log('Observing...'),
32
+ onObservationEnd: () => console.log('Done!'),
33
+ onReflectionStart: () => console.log('Reflecting...'),
34
+ onReflectionEnd: () => console.log('Reflected!'),
35
+ },
36
+ });
37
+ ```
38
+
39
+ **Breaking:** `observe()` now takes an object param instead of positional args. Update calls from `observe(threadId, resourceId)` to `observe({ threadId, resourceId })`.
40
+
41
+ - Updated dependencies [[`7ef618f`](https://github.com/mastra-ai/mastra/commit/7ef618f3c49c27e2f6b27d7f564c557c0734325b), [`b373564`](https://github.com/mastra-ai/mastra/commit/b37356491d43b4d53067f10cb669abaf2502f218), [`927c2af`](https://github.com/mastra-ai/mastra/commit/927c2af9792286c122e04409efce0f3c804f777f), [`927c2af`](https://github.com/mastra-ai/mastra/commit/927c2af9792286c122e04409efce0f3c804f777f), [`b896b41`](https://github.com/mastra-ai/mastra/commit/b896b41343de7fcc14442fb40fe82d189e65bbe2), [`6415277`](https://github.com/mastra-ai/mastra/commit/6415277a438faa00db2af850ead5dee25f40c428), [`191bc3a`](https://github.com/mastra-ai/mastra/commit/191bc3adfdbe4b262dbc93b7d9c3d6c6a3c8ef92), [`0831bbb`](https://github.com/mastra-ai/mastra/commit/0831bbb5bc750c18e9b22b45f18687c964b70828), [`74fb394`](https://github.com/mastra-ai/mastra/commit/74fb3944f51f55e1fc1ca65eede4254d8fe72aa3), [`63f7eda`](https://github.com/mastra-ai/mastra/commit/63f7eda605eb3e0c8c35ee3912ffe7c999c69f69), [`a5b67a3`](https://github.com/mastra-ai/mastra/commit/a5b67a3589a74415feb663a55d1858324a2afde9), [`877b02c`](https://github.com/mastra-ai/mastra/commit/877b02cdbb15e199184c7f2b8f217be8d3ebada7), [`cb8c38e`](https://github.com/mastra-ai/mastra/commit/cb8c38e6f855ad190383a7112ba95abef072d490), [`7567222`](https://github.com/mastra-ai/mastra/commit/7567222b1366f0d39980594792dd9d5060bfe2ab), [`af71458`](https://github.com/mastra-ai/mastra/commit/af71458e3b566f09c11d0e5a0a836dc818e7a24a), [`eb36bd8`](https://github.com/mastra-ai/mastra/commit/eb36bd8c52fcd6ec9674ac3b7a6412405b5983e1), [`3cbf121`](https://github.com/mastra-ai/mastra/commit/3cbf121f55418141924754a83102aade89835947)]:
42
+ - @mastra/core@1.4.0
43
+ - @mastra/libsql@1.4.0
44
+ - @mastra/memory@1.3.0
45
+
46
+ ## 0.0.2-alpha.0
47
+
48
+ ### Patch Changes
49
+
50
+ - @mastra/opencode: Add opencode plugin for Observational Memory integration ([#12925](https://github.com/mastra-ai/mastra/pull/12925))
51
+
52
+ Added standalone `observe()` API that accepts external messages directly, so integrations can trigger observation without duplicating messages into Mastra's storage.
53
+
54
+ **New exports:**
55
+ - `ObserveHooks` — lifecycle callbacks (`onObservationStart`, `onObservationEnd`, `onReflectionStart`, `onReflectionEnd`) for hooking into observation/reflection cycles
56
+ - `OBSERVATION_CONTEXT_PROMPT` — preamble that introduces the observations block
57
+ - `OBSERVATION_CONTEXT_INSTRUCTIONS` — rules for interpreting observations (placed after the `<observations>` block)
58
+ - `OBSERVATION_CONTINUATION_HINT` — behavioral guidance that prevents models from awkwardly acknowledging the memory system
59
+ - `getOrCreateRecord()` — now public, allows eager record initialization before the first observation cycle
60
+
61
+ ```ts
62
+ import { ObservationalMemory } from '@mastra/memory/processors';
63
+
64
+ const om = new ObservationalMemory({ storage, model: 'google/gemini-2.5-flash' });
65
+
66
+ // Eagerly initialize a record
67
+ await om.getOrCreateRecord(threadId);
68
+
69
+ // Pass messages directly with lifecycle hooks
70
+ await om.observe({
71
+ threadId,
72
+ messages: myMessages,
73
+ hooks: {
74
+ onObservationStart: () => console.log('Observing...'),
75
+ onObservationEnd: () => console.log('Done!'),
76
+ onReflectionStart: () => console.log('Reflecting...'),
77
+ onReflectionEnd: () => console.log('Reflected!'),
78
+ },
79
+ });
80
+ ```
81
+
82
+ **Breaking:** `observe()` now takes an object param instead of positional args. Update calls from `observe(threadId, resourceId)` to `observe({ threadId, resourceId })`.
83
+
84
+ - Updated dependencies [[`7ef618f`](https://github.com/mastra-ai/mastra/commit/7ef618f3c49c27e2f6b27d7f564c557c0734325b), [`b373564`](https://github.com/mastra-ai/mastra/commit/b37356491d43b4d53067f10cb669abaf2502f218), [`927c2af`](https://github.com/mastra-ai/mastra/commit/927c2af9792286c122e04409efce0f3c804f777f), [`927c2af`](https://github.com/mastra-ai/mastra/commit/927c2af9792286c122e04409efce0f3c804f777f), [`b896b41`](https://github.com/mastra-ai/mastra/commit/b896b41343de7fcc14442fb40fe82d189e65bbe2), [`6415277`](https://github.com/mastra-ai/mastra/commit/6415277a438faa00db2af850ead5dee25f40c428), [`191bc3a`](https://github.com/mastra-ai/mastra/commit/191bc3adfdbe4b262dbc93b7d9c3d6c6a3c8ef92), [`0831bbb`](https://github.com/mastra-ai/mastra/commit/0831bbb5bc750c18e9b22b45f18687c964b70828), [`74fb394`](https://github.com/mastra-ai/mastra/commit/74fb3944f51f55e1fc1ca65eede4254d8fe72aa3), [`63f7eda`](https://github.com/mastra-ai/mastra/commit/63f7eda605eb3e0c8c35ee3912ffe7c999c69f69), [`a5b67a3`](https://github.com/mastra-ai/mastra/commit/a5b67a3589a74415feb663a55d1858324a2afde9), [`877b02c`](https://github.com/mastra-ai/mastra/commit/877b02cdbb15e199184c7f2b8f217be8d3ebada7), [`cb8c38e`](https://github.com/mastra-ai/mastra/commit/cb8c38e6f855ad190383a7112ba95abef072d490), [`7567222`](https://github.com/mastra-ai/mastra/commit/7567222b1366f0d39980594792dd9d5060bfe2ab), [`af71458`](https://github.com/mastra-ai/mastra/commit/af71458e3b566f09c11d0e5a0a836dc818e7a24a), [`eb36bd8`](https://github.com/mastra-ai/mastra/commit/eb36bd8c52fcd6ec9674ac3b7a6412405b5983e1), [`3cbf121`](https://github.com/mastra-ai/mastra/commit/3cbf121f55418141924754a83102aade89835947)]:
85
+ - @mastra/core@1.4.0-alpha.0
86
+ - @mastra/libsql@1.4.0-alpha.0
87
+ - @mastra/memory@1.3.0-alpha.0
package/LICENSE.md ADDED
@@ -0,0 +1,15 @@
1
+ # Apache License 2.0
2
+
3
+ Copyright (c) 2025 Kepler Software, Inc.
4
+
5
+ Licensed under the Apache License, Version 2.0 (the "License");
6
+ you may not use this file except in compliance with the License.
7
+ You may obtain a copy of the License at
8
+
9
+ http://www.apache.org/licenses/LICENSE-2.0
10
+
11
+ Unless required by applicable law or agreed to in writing, software
12
+ distributed under the License is distributed on an "AS IS" BASIS,
13
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14
+ See the License for the specific language governing permissions and
15
+ limitations under the License.
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@mastra/opencode",
3
- "version": "0.0.1",
3
+ "version": "0.0.2",
4
4
  "description": "OpenCode plugin for Mastra Observational Memory",
5
5
  "type": "module",
6
6
  "main": "./dist/index.js",
@@ -22,11 +22,6 @@
22
22
  },
23
23
  "./package.json": "./package.json"
24
24
  },
25
- "scripts": {
26
- "build:lib": "tsup --silent --config tsup.config.ts",
27
- "build:watch": "pnpm build:lib --watch",
28
- "lint": "eslint ."
29
- },
30
25
  "keywords": [
31
26
  "mastra",
32
27
  "opencode",
@@ -47,20 +42,25 @@
47
42
  "node": ">=22.13.0"
48
43
  },
49
44
  "dependencies": {
50
- "@mastra/memory": "workspace:*",
51
- "@mastra/core": "workspace:*",
52
- "@mastra/libsql": "workspace:*"
45
+ "@mastra/memory": "1.3.0",
46
+ "@mastra/core": "1.4.0",
47
+ "@mastra/libsql": "1.4.0"
53
48
  },
54
49
  "peerDependencies": {
55
50
  "@opencode-ai/plugin": "*"
56
51
  },
57
52
  "devDependencies": {
58
- "@internal/lint": "workspace:*",
59
- "@internal/types-builder": "workspace:*",
60
53
  "@opencode-ai/plugin": "*",
61
54
  "@opencode-ai/sdk": "*",
62
55
  "tsup": "^8.5.1",
63
- "typescript": "catalog:",
64
- "vitest": "catalog:"
56
+ "typescript": "^5.9.3",
57
+ "vitest": "4.0.16",
58
+ "@internal/lint": "0.0.59",
59
+ "@internal/types-builder": "0.0.34"
60
+ },
61
+ "scripts": {
62
+ "build:lib": "tsup --silent --config tsup.config.ts",
63
+ "build:watch": "pnpm build:lib --watch",
64
+ "lint": "eslint ."
65
65
  }
66
- }
66
+ }
package/dist/index.cjs DELETED
@@ -1,320 +0,0 @@
1
- 'use strict';
2
-
3
- var promises = require('fs/promises');
4
- var path = require('path');
5
- var libsql = require('@mastra/libsql');
6
- var processors = require('@mastra/memory/processors');
7
- var plugin = require('@opencode-ai/plugin');
8
-
9
- // src/index.ts
10
- var CONFIG_FILE = ".opencode/mastra.json";
11
- var DEFAULT_STORAGE_PATH = ".opencode/memory/observations.db";
12
- async function loadConfig(directory) {
13
- try {
14
- const configPath = path.join(directory, CONFIG_FILE);
15
- const raw = await promises.readFile(configPath, "utf-8");
16
- return JSON.parse(raw);
17
- } catch {
18
- return {};
19
- }
20
- }
21
- function convertMessages(messages, sessionId) {
22
- return messages.map(({ info, parts }) => {
23
- const convertedParts = parts.map((part) => {
24
- const p = part;
25
- const type = p.type;
26
- if (type === "text" && p.text) {
27
- return { type: "text", text: p.text };
28
- }
29
- if (type === "tool-invocation") {
30
- return {
31
- type: "tool-invocation",
32
- toolInvocation: {
33
- toolCallId: p.toolCallId,
34
- toolName: p.toolName,
35
- args: p.args,
36
- result: p.result,
37
- state: p.state
38
- }
39
- };
40
- }
41
- if (type === "file") {
42
- return {
43
- type: "file",
44
- url: p.url,
45
- mediaType: p.mediaType
46
- };
47
- }
48
- if (type === "image") {
49
- return {
50
- type: "image",
51
- image: p.image
52
- };
53
- }
54
- if (type === "reasoning" && p.reasoning) {
55
- return { type: "reasoning", reasoning: p.reasoning };
56
- }
57
- if (type?.startsWith("data-om-")) {
58
- return null;
59
- }
60
- return null;
61
- }).filter((p) => p !== null);
62
- if (convertedParts.length === 0) return null;
63
- if (info.role !== "user" && info.role !== "assistant") return null;
64
- return {
65
- id: info.id,
66
- role: info.role,
67
- // opencode timestamps are already in milliseconds (JavaScript Date)
68
- createdAt: new Date(info.time.created),
69
- threadId: sessionId,
70
- resourceId: sessionId,
71
- content: {
72
- format: 2,
73
- parts: convertedParts
74
- }
75
- };
76
- }).filter((m) => m !== null);
77
- }
78
- function progressBar(current, total, width = 20) {
79
- const pct = total > 0 ? Math.min(current / total, 1) : 0;
80
- const filled = Math.round(pct * width);
81
- return `[${"\u2588".repeat(filled)}${"\u2591".repeat(width - filled)}] ${(pct * 100).toFixed(1)}%`;
82
- }
83
- function formatTokens(n) {
84
- return n >= 1e3 ? `${(n / 1e3).toFixed(1)}k` : String(n);
85
- }
86
- function resolveThreshold(t) {
87
- return typeof t === "number" ? t : t.max;
88
- }
89
- var MastraPlugin = async (ctx) => {
90
- const config = await loadConfig(ctx.directory);
91
- let credentialsReady = false;
92
- const resolveCredentials = async () => {
93
- if (credentialsReady) return;
94
- try {
95
- const providersResponse = await ctx.client.config.providers();
96
- if (providersResponse.data) {
97
- for (const provider of providersResponse.data.providers) {
98
- if (provider.key && provider.env) {
99
- for (const envVar of provider.env) {
100
- if (!process.env[envVar]) {
101
- process.env[envVar] = provider.key;
102
- }
103
- }
104
- }
105
- }
106
- }
107
- } catch {
108
- }
109
- credentialsReady = true;
110
- };
111
- const dbRelativePath = config.storagePath ?? DEFAULT_STORAGE_PATH;
112
- const dbAbsolutePath = path.join(ctx.directory, dbRelativePath);
113
- await promises.mkdir(path.dirname(dbAbsolutePath), { recursive: true });
114
- const storagePath = `file:${dbAbsolutePath}`;
115
- const store = new libsql.LibSQLStore({ id: "mastra-om", url: storagePath });
116
- await store.init();
117
- const storage = await store.getStore("memory");
118
- if (!storage) {
119
- throw new Error(`@mastra/opencode: failed to initialize memory storage from ${storagePath}`);
120
- }
121
- const om = new processors.ObservationalMemory({
122
- storage,
123
- model: config.model,
124
- observation: config.observation,
125
- reflection: config.reflection,
126
- scope: config.scope,
127
- shareTokenBudget: config.shareTokenBudget
128
- });
129
- setTimeout(() => {
130
- void ctx.client.tui.showToast({
131
- body: {
132
- title: "Mastra",
133
- message: "Observational Memory activated",
134
- variant: "success",
135
- duration: 3e3
136
- }
137
- });
138
- }, 500);
139
- return {
140
- // Hook: Eagerly initialize OM record on session creation
141
- // so diagnostic tools work immediately (before first observation cycle).
142
- event: async ({ event }) => {
143
- if (event.type === "session.created") {
144
- const sessionId = event.properties.info.id;
145
- try {
146
- await om.getOrCreateRecord(sessionId);
147
- } catch (err) {
148
- void ctx.client.tui.showToast({
149
- body: {
150
- title: "Mastra",
151
- message: `Failed to initialize Observational Memory: ${err instanceof Error ? err.message : String(err)}`,
152
- variant: "error",
153
- duration: 5e3
154
- }
155
- });
156
- }
157
- }
158
- },
159
- // Hook: Transform messages before they reach the model.
160
- // This is the core integration point — observe and shape context in one pass:
161
- // 1. Convert opencode messages → MastraDBMessage format
162
- // 2. Run observation if threshold is met (with toast notifications)
163
- // 3. Inject observation summary and filter out already-observed messages
164
- "experimental.chat.messages.transform": async (_input, output) => {
165
- const sessionId = output.messages[0]?.info.sessionID;
166
- if (!sessionId) return;
167
- await resolveCredentials();
168
- try {
169
- const mastraMessages = convertMessages(output.messages, sessionId);
170
- if (mastraMessages.length > 0) {
171
- await om.observe({
172
- threadId: sessionId,
173
- messages: mastraMessages,
174
- hooks: {
175
- onObservationStart: () => {
176
- void ctx.client.tui.showToast({
177
- body: {
178
- title: "Mastra",
179
- message: "Observing conversation...",
180
- variant: "info",
181
- duration: 1e4
182
- }
183
- });
184
- },
185
- onObservationEnd: () => {
186
- void ctx.client.tui.showToast({
187
- body: {
188
- title: "Mastra",
189
- message: "Observation complete",
190
- variant: "success",
191
- duration: 3e3
192
- }
193
- });
194
- },
195
- onReflectionStart: () => {
196
- void ctx.client.tui.showToast({
197
- body: {
198
- title: "Mastra",
199
- message: "Reflecting on observations...",
200
- variant: "info",
201
- duration: 1e4
202
- }
203
- });
204
- },
205
- onReflectionEnd: () => {
206
- void ctx.client.tui.showToast({
207
- body: {
208
- title: "Mastra",
209
- message: "Reflection complete",
210
- variant: "success",
211
- duration: 3e3
212
- }
213
- });
214
- }
215
- }
216
- });
217
- }
218
- const record = await om.getRecord(sessionId);
219
- if (record?.lastObservedAt) {
220
- const lastObservedAt = new Date(record.lastObservedAt);
221
- output.messages = output.messages.filter(({ info }) => {
222
- const msgTime = new Date(info.time.created);
223
- return msgTime > lastObservedAt;
224
- });
225
- }
226
- } catch (err) {
227
- void ctx.client.tui.showToast({
228
- body: {
229
- title: "Mastra",
230
- message: `Observational Memory error: ${err instanceof Error ? err.message : String(err)}`,
231
- variant: "error",
232
- duration: 5e3
233
- }
234
- });
235
- }
236
- },
237
- // Hook: Inject observations into the system prompt so the model has compressed context.
238
- "experimental.chat.system.transform": async (input, output) => {
239
- const sessionId = input.sessionID;
240
- if (!sessionId) return;
241
- try {
242
- const observations = await om.getObservations(sessionId);
243
- if (!observations) return;
244
- const optimized = processors.optimizeObservationsForContext(observations);
245
- output.system.push(
246
- `${processors.OBSERVATION_CONTEXT_PROMPT}
247
-
248
- <observations>
249
- ${optimized}
250
- </observations>
251
-
252
- ${processors.OBSERVATION_CONTEXT_INSTRUCTIONS}
253
-
254
- ${processors.OBSERVATION_CONTINUATION_HINT}`
255
- );
256
- } catch {
257
- }
258
- },
259
- // Diagnostic tools for inspecting OM state
260
- tool: {
261
- memory_status: plugin.tool({
262
- description: "Show Observational Memory progress \u2014 how close the session is to the next observation and reflection cycle.",
263
- args: {},
264
- async execute(_args, context) {
265
- const threadId = context.sessionID;
266
- const record = await om.getRecord(threadId);
267
- if (!record) {
268
- return "No Observational Memory record found for this session.";
269
- }
270
- const omConfig = om.config;
271
- const obsThreshold = resolveThreshold(omConfig.observation.messageTokens);
272
- const refThreshold = resolveThreshold(omConfig.reflection.observationTokens);
273
- const obsTokens = record.observationTokenCount ?? 0;
274
- const tokenCounter = new processors.TokenCounter();
275
- let unobservedTokens = 0;
276
- try {
277
- const resp = await ctx.client.session.messages({ path: { id: threadId } });
278
- if (resp.data) {
279
- const allMastra = convertMessages(resp.data, threadId);
280
- const unobserved = record.lastObservedAt ? allMastra.filter((m) => m.createdAt > new Date(record.lastObservedAt)) : allMastra;
281
- unobservedTokens = tokenCounter.countMessages(unobserved);
282
- }
283
- } catch {
284
- unobservedTokens = record.pendingMessageTokens ?? 0;
285
- }
286
- const lines = [
287
- `Observational Memory`,
288
- `Scope: ${record.scope} | Generations: ${record.generationCount ?? 0}`,
289
- ``,
290
- `\u2500\u2500 Observation \u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500`,
291
- `Unobserved: ${formatTokens(unobservedTokens)} / ${formatTokens(obsThreshold)} tokens`,
292
- progressBar(unobservedTokens, obsThreshold),
293
- ``,
294
- `\u2500\u2500 Reflection \u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500`,
295
- `Observations: ${formatTokens(obsTokens)} / ${formatTokens(refThreshold)} tokens`,
296
- progressBar(obsTokens, refThreshold),
297
- ``,
298
- `\u2500\u2500 Status \u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500`,
299
- `Last observed: ${record.lastObservedAt ?? "never"}`,
300
- `Observing: ${record.isObserving ? "yes" : "no"} | Reflecting: ${record.isReflecting ? "yes" : "no"}`
301
- ];
302
- return lines.join("\n");
303
- }
304
- }),
305
- memory_observations: plugin.tool({
306
- description: "Show the current active observations stored in Observational Memory.",
307
- args: {},
308
- async execute(_args, context) {
309
- const threadId = context.sessionID;
310
- const observations = await om.getObservations(threadId);
311
- return observations ?? "No observations stored yet.";
312
- }
313
- })
314
- }
315
- };
316
- };
317
-
318
- exports.MastraPlugin = MastraPlugin;
319
- //# sourceMappingURL=index.cjs.map
320
- //# sourceMappingURL=index.cjs.map
@@ -1 +0,0 @@
1
- {"version":3,"sources":["../src/index.ts"],"names":["join","readFile","mkdir","dirname","LibSQLStore","ObservationalMemory","optimizeObservationsForContext","OBSERVATION_CONTEXT_PROMPT","OBSERVATION_CONTEXT_INSTRUCTIONS","OBSERVATION_CONTINUATION_HINT","tool","TokenCounter"],"mappings":";;;;;;;;;AAwDA,IAAM,WAAA,GAAc,uBAAA;AACpB,IAAM,oBAAA,GAAuB,kCAAA;AAE7B,eAAe,WAAW,SAAA,EAAkD;AAC1E,EAAA,IAAI;AACF,IAAA,MAAM,UAAA,GAAaA,SAAA,CAAK,SAAA,EAAW,WAAW,CAAA;AAC9C,IAAA,MAAM,GAAA,GAAM,MAAMC,iBAAA,CAAS,UAAA,EAAY,OAAO,CAAA;AAC9C,IAAA,OAAO,IAAA,CAAK,MAAM,GAAG,CAAA;AAAA,EACvB,CAAA,CAAA,MAAQ;AAEN,IAAA,OAAO,EAAC;AAAA,EACV;AACF;AAKA,SAAS,eAAA,CAAgB,UAA8C,SAAA,EAAmB;AACxF,EAAA,OAAO,SACJ,GAAA,CAAI,CAAC,EAAE,IAAA,EAAM,OAAM,KAAM;AAGxB,IAAA,MAAM,cAAA,GAAiB,KAAA,CACpB,GAAA,CAAI,CAAC,IAAA,KAAc;AAClB,MAAA,MAAM,CAAA,GAAI,IAAA;AACV,MAAA,MAAM,OAAO,CAAA,CAAE,IAAA;AAEf,MAAA,IAAI,IAAA,KAAS,MAAA,IAAU,CAAA,CAAE,IAAA,EAAM;AAC7B,QAAA,OAAO,EAAE,IAAA,EAAM,MAAA,EAAQ,IAAA,EAAM,EAAE,IAAA,EAAK;AAAA,MACtC;AAEA,MAAA,IAAI,SAAS,iBAAA,EAAmB;AAC9B,QAAA,OAAO;AAAA,UACL,IAAA,EAAM,iBAAA;AAAA,UACN,cAAA,EAAgB;AAAA,YACd,YAAY,CAAA,CAAE,UAAA;AAAA,YACd,UAAU,CAAA,CAAE,QAAA;AAAA,YACZ,MAAM,CAAA,CAAE,IAAA;AAAA,YACR,QAAQ,CAAA,CAAE,MAAA;AAAA,YACV,OAAO,CAAA,CAAE;AAAA;AACX,SACF;AAAA,MACF;AAEA,MAAA,IAAI,SAAS,MAAA,EAAQ;AACnB,QAAA,OAAO;AAAA,UACL,IAAA,EAAM,MAAA;AAAA,UACN,KAAK,CAAA,CAAE,GAAA;AAAA,UACP,WAAW,CAAA,CAAE;AAAA,SACf;AAAA,MACF;AAEA,MAAA,IAAI,SAAS,OAAA,EAAS;AACpB,QAAA,OAAO;AAAA,UACL,IAAA,EAAM,OAAA;AAAA,UACN,OAAO,CAAA,CAAE;AAAA,SACX;AAAA,MACF;AAEA,MAAA,IAAI,IAAA,KAAS,WAAA,IAAe,CAAA,CAAE,SAAA,EAAW;AACvC,QAAA,OAAO,EAAE,IAAA,EAAM,WAAA,EAAa,SAAA,EAAW,EAAE,SAAA,EAAU;AAAA,MACrD;AAGA,MAAA,IAAI,IAAA,EAAM,UAAA,CAAW,UAAU,CAAA,EAAG;AAChC,QAAA,OAAO,IAAA;AAAA,MACT;AAEA,MAAA,OAAO,IAAA;AAAA,IACT,CAAC,CAAA,CACA,MAAA,CAAO,CAAC,CAAA,KAAkC,MAAM,IAAI,CAAA;AAEvD,IAAA,IAAI,cAAA,CAAe,MAAA,KAAW,CAAA,EAAG,OAAO,IAAA;AACxC,IAAA,IAAI,KAAK,IAAA,KAAS,MAAA,IAAU,IAAA,CAAK,IAAA,KAAS,aAAa,OAAO,IAAA;AAE9D,IAAA,OAAO;AAAA,MACL,IAAI,IAAA,CAAK,EAAA;AAAA,MACT,MAAM,IAAA,CAAK,IAAA;AAAA;AAAA,MAEX,SAAA,EAAW,IAAI,IAAA,CAAK,IAAA,CAAK,KAAK,OAAO,CAAA;AAAA,MACrC,QAAA,EAAU,SAAA;AAAA,MACV,UAAA,EAAY,SAAA;AAAA,MACZ,OAAA,EAAS;AAAA,QACP,MAAA,EAAQ,CAAA;AAAA,QACR,KAAA,EAAO;AAAA;AACT,KACF;AAAA,EACF,CAAC,CAAA,CACA,MAAA,CAAO,CAAC,CAAA,KAAkC,MAAM,IAAI,CAAA;AACzD;AAEA,SAAS,WAAA,CAAY,OAAA,EAAiB,KAAA,EAAe,KAAA,GAAQ,EAAA,EAAY;AACvE,EAAA,MAAM,GAAA,GAAM,QAAQ,CAAA,GAAI,IAAA,CAAK,IAAI,OAAA,GAAU,KAAA,EAAO,CAAC,CAAA,GAAI,CAAA;AACvD,EAAA,MAAM,MAAA,GAAS,IAAA,CAAK,KAAA,CAAM,GAAA,GAAM,KAAK,CAAA;AACrC,EAAA,OAAO,IAAI,QAAA,CAAI,MAAA,CAAO,MAAM,CAAC,GAAG,QAAA,CAAI,MAAA,CAAO,KAAA,GAAQ,MAAM,CAAC,CAAA,EAAA,EAAA,CAAM,GAAA,GAAM,GAAA,EAAK,OAAA,CAAQ,CAAC,CAAC,CAAA,CAAA,CAAA;AACvF;AAEA,SAAS,aAAa,CAAA,EAAmB;AACvC,EAAA,OAAO,CAAA,IAAK,GAAA,GAAO,CAAA,EAAA,CAAI,CAAA,GAAI,GAAA,EAAM,QAAQ,CAAC,CAAC,CAAA,CAAA,CAAA,GAAM,MAAA,CAAO,CAAC,CAAA;AAC3D;AAEA,SAAS,iBAAiB,CAAA,EAAkD;AAC1E,EAAA,OAAO,OAAO,CAAA,KAAM,QAAA,GAAW,CAAA,GAAI,CAAA,CAAE,GAAA;AACvC;AAEO,IAAM,YAAA,GAAuB,OAAM,GAAA,KAAO;AAE/C,EAAA,MAAM,MAAA,GAAS,MAAM,UAAA,CAAW,GAAA,CAAI,SAAS,CAAA;AAI7C,EAAA,IAAI,gBAAA,GAAmB,KAAA;AACvB,EAAA,MAAM,qBAAqB,YAAY;AACrC,IAAA,IAAI,gBAAA,EAAkB;AACtB,IAAA,IAAI;AACF,MAAA,MAAM,iBAAA,GAAoB,MAAM,GAAA,CAAI,MAAA,CAAO,OAAO,SAAA,EAAU;AAC5D,MAAA,IAAI,kBAAkB,IAAA,EAAM;AAC1B,QAAA,KAAA,MAAW,QAAA,IAAY,iBAAA,CAAkB,IAAA,CAAK,SAAA,EAAW;AACvD,UAAA,IAAI,QAAA,CAAS,GAAA,IAAO,QAAA,CAAS,GAAA,EAAK;AAChC,YAAA,KAAA,MAAW,MAAA,IAAU,SAAS,GAAA,EAAK;AACjC,cAAA,IAAI,CAAC,OAAA,CAAQ,GAAA,CAAI,MAAM,CAAA,EAAG;AACxB,gBAAA,OAAA,CAAQ,GAAA,CAAI,MAAM,CAAA,GAAI,QAAA,CAAS,GAAA;AAAA,cACjC;AAAA,YACF;AAAA,UACF;AAAA,QACF;AAAA,MACF;AAAA,IACF,CAAA,CAAA,MAAQ;AAAA,IAER;AACA,IAAA,gBAAA,GAAmB,IAAA;AAAA,EACrB,CAAA;AAGA,EAAA,MAAM,cAAA,GAAiB,OAAO,WAAA,IAAe,oBAAA;AAC7C,EAAA,MAAM,cAAA,GAAiBD,SAAA,CAAK,GAAA,CAAI,SAAA,EAAW,cAAc,CAAA;AACzD,EAAA,MAAME,eAAMC,YAAA,CAAQ,cAAc,GAAG,EAAE,SAAA,EAAW,MAAM,CAAA;AACxD,EAAA,MAAM,WAAA,GAAc,QAAQ,cAAc,CAAA,CAAA;AAC1C,EAAA,MAAM,KAAA,GAAQ,IAAIC,kBAAA,CAAY,EAAE,IAAI,WAAA,EAAa,GAAA,EAAK,aAAa,CAAA;AACnE,EAAA,MAAM,MAAM,IAAA,EAAK;AACjB,EAAA,MAAM,OAAA,GAAU,MAAM,KAAA,CAAM,QAAA,CAAS,QAAQ,CAAA;AAC7C,EAAA,IAAI,CAAC,OAAA,EAAS;AACZ,IAAA,MAAM,IAAI,KAAA,CAAM,CAAA,2DAAA,EAA8D,WAAW,CAAA,CAAE,CAAA;AAAA,EAC7F;AAIA,EAAA,MAAM,EAAA,GAAK,IAAIC,8BAAA,CAAoB;AAAA,IACjC,OAAA;AAAA,IACA,OAAO,MAAA,CAAO,KAAA;AAAA,IACd,aAAa,MAAA,CAAO,WAAA;AAAA,IACpB,YAAY,MAAA,CAAO,UAAA;AAAA,IACnB,OAAO,MAAA,CAAO,KAAA;AAAA,IACd,kBAAkB,MAAA,CAAO;AAAA,GAC1B,CAAA;AAGD,EAAA,UAAA,CAAW,MAAM;AACf,IAAA,KAAK,GAAA,CAAI,MAAA,CAAO,GAAA,CAAI,SAAA,CAAU;AAAA,MAC5B,IAAA,EAAM;AAAA,QACJ,KAAA,EAAO,QAAA;AAAA,QACP,OAAA,EAAS,gCAAA;AAAA,QACT,OAAA,EAAS,SAAA;AAAA,QACT,QAAA,EAAU;AAAA;AACZ,KACD,CAAA;AAAA,EACH,GAAG,GAAG,CAAA;AAEN,EAAA,OAAO;AAAA;AAAA;AAAA,IAGL,KAAA,EAAO,OAAO,EAAE,KAAA,EAAM,KAAM;AAC1B,MAAA,IAAI,KAAA,CAAM,SAAS,iBAAA,EAAmB;AACpC,QAAA,MAAM,SAAA,GAAY,KAAA,CAAM,UAAA,CAAW,IAAA,CAAK,EAAA;AACxC,QAAA,IAAI;AACF,UAAA,MAAM,EAAA,CAAG,kBAAkB,SAAS,CAAA;AAAA,QACtC,SAAS,GAAA,EAAK;AACZ,UAAA,KAAK,GAAA,CAAI,MAAA,CAAO,GAAA,CAAI,SAAA,CAAU;AAAA,YAC5B,IAAA,EAAM;AAAA,cACJ,KAAA,EAAO,QAAA;AAAA,cACP,OAAA,EAAS,8CAA8C,GAAA,YAAe,KAAA,GAAQ,IAAI,OAAA,GAAU,MAAA,CAAO,GAAG,CAAC,CAAA,CAAA;AAAA,cACvG,OAAA,EAAS,OAAA;AAAA,cACT,QAAA,EAAU;AAAA;AACZ,WACD,CAAA;AAAA,QACH;AAAA,MACF;AAAA,IACF,CAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,IAOA,sCAAA,EAAwC,OAAO,MAAA,EAAQ,MAAA,KAAW;AAChE,MAAA,MAAM,SAAA,GAAY,MAAA,CAAO,QAAA,CAAS,CAAC,GAAG,IAAA,CAAK,SAAA;AAC3C,MAAA,IAAI,CAAC,SAAA,EAAW;AAGhB,MAAA,MAAM,kBAAA,EAAmB;AAEzB,MAAA,IAAI;AACF,QAAA,MAAM,cAAA,GAAiB,eAAA,CAAgB,MAAA,CAAO,QAAA,EAAU,SAAS,CAAA;AAGjE,QAAA,IAAI,cAAA,CAAe,SAAS,CAAA,EAAG;AAC7B,UAAA,MAAM,GAAG,OAAA,CAAQ;AAAA,YACf,QAAA,EAAU,SAAA;AAAA,YACV,QAAA,EAAU,cAAA;AAAA,YACV,KAAA,EAAO;AAAA,cACL,oBAAoB,MAAM;AACxB,gBAAA,KAAK,GAAA,CAAI,MAAA,CAAO,GAAA,CAAI,SAAA,CAAU;AAAA,kBAC5B,IAAA,EAAM;AAAA,oBACJ,KAAA,EAAO,QAAA;AAAA,oBACP,OAAA,EAAS,2BAAA;AAAA,oBACT,OAAA,EAAS,MAAA;AAAA,oBACT,QAAA,EAAU;AAAA;AACZ,iBACD,CAAA;AAAA,cACH,CAAA;AAAA,cACA,kBAAkB,MAAM;AACtB,gBAAA,KAAK,GAAA,CAAI,MAAA,CAAO,GAAA,CAAI,SAAA,CAAU;AAAA,kBAC5B,IAAA,EAAM;AAAA,oBACJ,KAAA,EAAO,QAAA;AAAA,oBACP,OAAA,EAAS,sBAAA;AAAA,oBACT,OAAA,EAAS,SAAA;AAAA,oBACT,QAAA,EAAU;AAAA;AACZ,iBACD,CAAA;AAAA,cACH,CAAA;AAAA,cACA,mBAAmB,MAAM;AACvB,gBAAA,KAAK,GAAA,CAAI,MAAA,CAAO,GAAA,CAAI,SAAA,CAAU;AAAA,kBAC5B,IAAA,EAAM;AAAA,oBACJ,KAAA,EAAO,QAAA;AAAA,oBACP,OAAA,EAAS,+BAAA;AAAA,oBACT,OAAA,EAAS,MAAA;AAAA,oBACT,QAAA,EAAU;AAAA;AACZ,iBACD,CAAA;AAAA,cACH,CAAA;AAAA,cACA,iBAAiB,MAAM;AACrB,gBAAA,KAAK,GAAA,CAAI,MAAA,CAAO,GAAA,CAAI,SAAA,CAAU;AAAA,kBAC5B,IAAA,EAAM;AAAA,oBACJ,KAAA,EAAO,QAAA;AAAA,oBACP,OAAA,EAAS,qBAAA;AAAA,oBACT,OAAA,EAAS,SAAA;AAAA,oBACT,QAAA,EAAU;AAAA;AACZ,iBACD,CAAA;AAAA,cACH;AAAA;AACF,WACD,CAAA;AAAA,QACH;AAGA,QAAA,MAAM,MAAA,GAAS,MAAM,EAAA,CAAG,SAAA,CAAU,SAAS,CAAA;AAC3C,QAAA,IAAI,QAAQ,cAAA,EAAgB;AAC1B,UAAA,MAAM,cAAA,GAAiB,IAAI,IAAA,CAAK,MAAA,CAAO,cAAc,CAAA;AACrD,UAAA,MAAA,CAAO,WAAW,MAAA,CAAO,QAAA,CAAS,OAAO,CAAC,EAAE,MAAK,KAAM;AAErD,YAAA,MAAM,OAAA,GAAU,IAAI,IAAA,CAAK,IAAA,CAAK,KAAK,OAAO,CAAA;AAC1C,YAAA,OAAO,OAAA,GAAU,cAAA;AAAA,UACnB,CAAC,CAAA;AAAA,QACH;AAAA,MACF,SAAS,GAAA,EAAK;AACZ,QAAA,KAAK,GAAA,CAAI,MAAA,CAAO,GAAA,CAAI,SAAA,CAAU;AAAA,UAC5B,IAAA,EAAM;AAAA,YACJ,KAAA,EAAO,QAAA;AAAA,YACP,OAAA,EAAS,+BAA+B,GAAA,YAAe,KAAA,GAAQ,IAAI,OAAA,GAAU,MAAA,CAAO,GAAG,CAAC,CAAA,CAAA;AAAA,YACxF,OAAA,EAAS,OAAA;AAAA,YACT,QAAA,EAAU;AAAA;AACZ,SACD,CAAA;AAAA,MACH;AAAA,IACF,CAAA;AAAA;AAAA,IAGA,oCAAA,EAAsC,OAAO,KAAA,EAAO,MAAA,KAAW;AAC7D,MAAA,MAAM,YAAY,KAAA,CAAM,SAAA;AACxB,MAAA,IAAI,CAAC,SAAA,EAAW;AAEhB,MAAA,IAAI;AACF,QAAA,MAAM,YAAA,GAAe,MAAM,EAAA,CAAG,eAAA,CAAgB,SAAS,CAAA;AACvD,QAAA,IAAI,CAAC,YAAA,EAAc;AAEnB,QAAA,MAAM,SAAA,GAAYC,0CAA+B,YAAY,CAAA;AAC7D,QAAA,MAAA,CAAO,MAAA,CAAO,IAAA;AAAA,UACZ,GAAGC,qCAA0B;;AAAA;AAAA,EAAuB,SAAS;AAAA;;AAAA,EAAwBC,2CAAgC;;AAAA,EAAOC,wCAA6B,CAAA;AAAA,SAC3J;AAAA,MACF,CAAA,CAAA,MAAQ;AAAA,MAER;AAAA,IACF,CAAA;AAAA;AAAA,IAGA,IAAA,EAAM;AAAA,MACJ,eAAeC,WAAA,CAAK;AAAA,QAClB,WAAA,EAAa,kHAAA;AAAA,QACb,MAAM,EAAC;AAAA,QACP,MAAM,OAAA,CAAQ,KAAA,EAAO,OAAA,EAAS;AAC5B,UAAA,MAAM,WAAW,OAAA,CAAQ,SAAA;AACzB,UAAA,MAAM,MAAA,GAAS,MAAM,EAAA,CAAG,SAAA,CAAU,QAAQ,CAAA;AAC1C,UAAA,IAAI,CAAC,MAAA,EAAQ;AACX,YAAA,OAAO,wDAAA;AAAA,UACT;AAEA,UAAA,MAAM,WAAW,EAAA,CAAG,MAAA;AACpB,UAAA,MAAM,YAAA,GAAe,gBAAA,CAAiB,QAAA,CAAS,WAAA,CAAY,aAAa,CAAA;AACxE,UAAA,MAAM,YAAA,GAAe,gBAAA,CAAiB,QAAA,CAAS,UAAA,CAAW,iBAAiB,CAAA;AAC3E,UAAA,MAAM,SAAA,GAAY,OAAO,qBAAA,IAAyB,CAAA;AAGlD,UAAA,MAAM,YAAA,GAAe,IAAIC,uBAAA,EAAa;AACtC,UAAA,IAAI,gBAAA,GAAmB,CAAA;AACvB,UAAA,IAAI;AACF,YAAA,MAAM,IAAA,GAAO,MAAM,GAAA,CAAI,MAAA,CAAO,OAAA,CAAQ,QAAA,CAAS,EAAE,IAAA,EAAM,EAAE,EAAA,EAAI,QAAA,EAAS,EAAG,CAAA;AACzE,YAAA,IAAI,KAAK,IAAA,EAAM;AACb,cAAA,MAAM,SAAA,GAAY,eAAA,CAAgB,IAAA,CAAK,IAAA,EAAM,QAAQ,CAAA;AACrD,cAAA,MAAM,UAAA,GAAa,MAAA,CAAO,cAAA,GACtB,SAAA,CAAU,MAAA,CAAO,CAAA,CAAA,KAAK,CAAA,CAAE,SAAA,GAAY,IAAI,IAAA,CAAK,MAAA,CAAO,cAAe,CAAC,CAAA,GACpE,SAAA;AACJ,cAAA,gBAAA,GAAmB,YAAA,CAAa,cAAc,UAAU,CAAA;AAAA,YAC1D;AAAA,UACF,CAAA,CAAA,MAAQ;AAEN,YAAA,gBAAA,GAAmB,OAAO,oBAAA,IAAwB,CAAA;AAAA,UACpD;AAEA,UAAA,MAAM,KAAA,GAAQ;AAAA,YACZ,CAAA,oBAAA,CAAA;AAAA,YACA,UAAU,MAAA,CAAO,KAAK,CAAA,kBAAA,EAAqB,MAAA,CAAO,mBAAmB,CAAC,CAAA,CAAA;AAAA,YACtE,CAAA,CAAA;AAAA,YACA,CAAA,6MAAA,CAAA;AAAA,YACA,eAAe,YAAA,CAAa,gBAAgB,CAAC,CAAA,GAAA,EAAM,YAAA,CAAa,YAAY,CAAC,CAAA,OAAA,CAAA;AAAA,YAC7E,WAAA,CAAY,kBAAkB,YAAY,CAAA;AAAA,YAC1C,CAAA,CAAA;AAAA,YACA,CAAA,4MAAA,CAAA;AAAA,YACA,iBAAiB,YAAA,CAAa,SAAS,CAAC,CAAA,GAAA,EAAM,YAAA,CAAa,YAAY,CAAC,CAAA,OAAA,CAAA;AAAA,YACxE,WAAA,CAAY,WAAW,YAAY,CAAA;AAAA,YACnC,CAAA,CAAA;AAAA,YACA,CAAA,gOAAA,CAAA;AAAA,YACA,CAAA,eAAA,EAAkB,MAAA,CAAO,cAAA,IAAkB,OAAO,CAAA,CAAA;AAAA,YAClD,CAAA,WAAA,EAAc,OAAO,WAAA,GAAc,KAAA,GAAQ,IAAI,CAAA,iBAAA,EAAoB,MAAA,CAAO,YAAA,GAAe,KAAA,GAAQ,IAAI,CAAA;AAAA,WACvG;AAEA,UAAA,OAAO,KAAA,CAAM,KAAK,IAAI,CAAA;AAAA,QACxB;AAAA,OACD,CAAA;AAAA,MAED,qBAAqBD,WAAA,CAAK;AAAA,QACxB,WAAA,EAAa,sEAAA;AAAA,QACb,MAAM,EAAC;AAAA,QACP,MAAM,OAAA,CAAQ,KAAA,EAAO,OAAA,EAAS;AAC5B,UAAA,MAAM,WAAW,OAAA,CAAQ,SAAA;AACzB,UAAA,MAAM,YAAA,GAAe,MAAM,EAAA,CAAG,eAAA,CAAgB,QAAQ,CAAA;AACtD,UAAA,OAAO,YAAA,IAAgB,6BAAA;AAAA,QACzB;AAAA,OACD;AAAA;AACH,GACF;AACF","file":"index.cjs","sourcesContent":["/**\n * @mastra/opencode\n *\n * OpenCode plugin that brings Mastra Observational Memory into opencode sessions.\n *\n * Mastra OM compresses long conversation history into structured observations\n * using an Observer (extract) and Reflector (condense) architecture.\n *\n * Configuration is read from .opencode/mastra.json in the project root.\n *\n * @example .opencode/mastra.json\n * ```json\n * {\n * \"model\": \"google/gemini-2.5-flash\",\n * \"observation\": { \"messageTokens\": 20000 },\n * \"reflection\": { \"observationTokens\": 90000 },\n * \"storagePath\": \".opencode/memory/observations.db\"\n * }\n * ```\n */\n\nimport { readFile, mkdir } from 'node:fs/promises';\nimport { join, dirname } from 'node:path';\nimport type { ObservationalMemoryOptions } from '@mastra/core/memory';\nimport { LibSQLStore } from '@mastra/libsql';\nimport {\n ObservationalMemory,\n TokenCounter,\n optimizeObservationsForContext,\n OBSERVATION_CONTINUATION_HINT,\n OBSERVATION_CONTEXT_PROMPT,\n OBSERVATION_CONTEXT_INSTRUCTIONS,\n} from '@mastra/memory/processors';\nimport type { Plugin } from '@opencode-ai/plugin';\nimport { tool } from '@opencode-ai/plugin';\nimport type { Message, Part } from '@opencode-ai/sdk';\n\nexport type { ObservationalMemoryOptions };\n\n/**\n * Plugin config read from .opencode/mastra.json.\n * Extends Mastra's ObservationalMemoryOptions with opencode-specific fields.\n *\n * In the opencode plugin context, pass string model IDs\n * (e.g., 'google/gemini-2.5-flash') — Mastra's provider registry resolves them.\n */\nexport interface MastraOMPluginConfig extends ObservationalMemoryOptions {\n /**\n * Path to the SQLite database file for observation storage.\n * Relative to the project root.\n *\n * @default '.opencode/memory/observations.db'\n */\n storagePath?: string;\n}\n\nconst CONFIG_FILE = '.opencode/mastra.json';\nconst DEFAULT_STORAGE_PATH = '.opencode/memory/observations.db';\n\nasync function loadConfig(directory: string): Promise<MastraOMPluginConfig> {\n try {\n const configPath = join(directory, CONFIG_FILE);\n const raw = await readFile(configPath, 'utf-8');\n return JSON.parse(raw) as MastraOMPluginConfig;\n } catch {\n // No config file or invalid JSON — use defaults\n return {};\n }\n}\n\n/** Convert opencode messages to MastraDBMessage format.\n * Preserves all part types including tool invocations, files, images, and reasoning.\n */\nfunction convertMessages(messages: { info: Message; parts: Part[] }[], sessionId: string) {\n return messages\n .map(({ info, parts }) => {\n // Convert ALL part types, not just text\n // Use type assertions since Part union type is restrictive\n const convertedParts = parts\n .map((part): any => {\n const p = part as any;\n const type = p.type as string;\n\n if (type === 'text' && p.text) {\n return { type: 'text', text: p.text };\n }\n\n if (type === 'tool-invocation') {\n return {\n type: 'tool-invocation',\n toolInvocation: {\n toolCallId: p.toolCallId,\n toolName: p.toolName,\n args: p.args,\n result: p.result,\n state: p.state,\n },\n };\n }\n\n if (type === 'file') {\n return {\n type: 'file',\n url: p.url,\n mediaType: p.mediaType,\n };\n }\n\n if (type === 'image') {\n return {\n type: 'image',\n image: p.image,\n };\n }\n\n if (type === 'reasoning' && p.reasoning) {\n return { type: 'reasoning', reasoning: p.reasoning };\n }\n\n // Skip unknown or internal part types\n if (type?.startsWith('data-om-')) {\n return null;\n }\n\n return null;\n })\n .filter((p): p is NonNullable<typeof p> => p !== null);\n\n if (convertedParts.length === 0) return null;\n if (info.role !== 'user' && info.role !== 'assistant') return null;\n\n return {\n id: info.id,\n role: info.role,\n // opencode timestamps are already in milliseconds (JavaScript Date)\n createdAt: new Date(info.time.created),\n threadId: sessionId,\n resourceId: sessionId,\n content: {\n format: 2 as const,\n parts: convertedParts,\n },\n };\n })\n .filter((m): m is NonNullable<typeof m> => m !== null);\n}\n\nfunction progressBar(current: number, total: number, width = 20): string {\n const pct = total > 0 ? Math.min(current / total, 1) : 0;\n const filled = Math.round(pct * width);\n return `[${'█'.repeat(filled)}${'░'.repeat(width - filled)}] ${(pct * 100).toFixed(1)}%`;\n}\n\nfunction formatTokens(n: number): string {\n return n >= 1000 ? `${(n / 1000).toFixed(1)}k` : String(n);\n}\n\nfunction resolveThreshold(t: number | { min: number; max: number }): number {\n return typeof t === 'number' ? t : t.max;\n}\n\nexport const MastraPlugin: Plugin = async ctx => {\n // Load config from .opencode/mastra.json\n const config = await loadConfig(ctx.directory);\n\n // Resolve API keys from opencode's provider store (deferred so it doesn't block plugin init).\n // .env takes priority — opencode keys only fill in gaps.\n let credentialsReady = false;\n const resolveCredentials = async () => {\n if (credentialsReady) return;\n try {\n const providersResponse = await ctx.client.config.providers();\n if (providersResponse.data) {\n for (const provider of providersResponse.data.providers) {\n if (provider.key && provider.env) {\n for (const envVar of provider.env) {\n if (!process.env[envVar]) {\n process.env[envVar] = provider.key;\n }\n }\n }\n }\n }\n } catch {\n // Credentials not available from opencode — rely on .env\n }\n credentialsReady = true;\n };\n\n // Storage: SQLite via Mastra's LibSQLStore\n const dbRelativePath = config.storagePath ?? DEFAULT_STORAGE_PATH;\n const dbAbsolutePath = join(ctx.directory, dbRelativePath);\n await mkdir(dirname(dbAbsolutePath), { recursive: true });\n const storagePath = `file:${dbAbsolutePath}`;\n const store = new LibSQLStore({ id: 'mastra-om', url: storagePath });\n await store.init();\n const storage = await store.getStore('memory');\n if (!storage) {\n throw new Error(`@mastra/opencode: failed to initialize memory storage from ${storagePath}`);\n }\n\n // Observational Memory: uses Mastra's full OM class\n // Model string IDs (e.g., 'google/gemini-2.5-flash') are resolved by Mastra's provider registry.\n const om = new ObservationalMemory({\n storage,\n model: config.model,\n observation: config.observation,\n reflection: config.reflection,\n scope: config.scope,\n shareTokenBudget: config.shareTokenBudget,\n });\n\n // Notify user that OM is active (delayed to let TUI initialize)\n setTimeout(() => {\n void ctx.client.tui.showToast({\n body: {\n title: 'Mastra',\n message: 'Observational Memory activated',\n variant: 'success',\n duration: 3000,\n },\n });\n }, 500);\n\n return {\n // Hook: Eagerly initialize OM record on session creation\n // so diagnostic tools work immediately (before first observation cycle).\n event: async ({ event }) => {\n if (event.type === 'session.created') {\n const sessionId = event.properties.info.id;\n try {\n await om.getOrCreateRecord(sessionId);\n } catch (err) {\n void ctx.client.tui.showToast({\n body: {\n title: 'Mastra',\n message: `Failed to initialize Observational Memory: ${err instanceof Error ? err.message : String(err)}`,\n variant: 'error',\n duration: 5000,\n },\n });\n }\n }\n },\n\n // Hook: Transform messages before they reach the model.\n // This is the core integration point — observe and shape context in one pass:\n // 1. Convert opencode messages → MastraDBMessage format\n // 2. Run observation if threshold is met (with toast notifications)\n // 3. Inject observation summary and filter out already-observed messages\n 'experimental.chat.messages.transform': async (_input, output) => {\n const sessionId = output.messages[0]?.info.sessionID;\n if (!sessionId) return;\n\n // Ensure API keys are resolved before observation needs a model\n await resolveCredentials();\n\n try {\n const mastraMessages = convertMessages(output.messages, sessionId);\n\n // Run observation — OM filters for unobserved messages and checks thresholds\n if (mastraMessages.length > 0) {\n await om.observe({\n threadId: sessionId,\n messages: mastraMessages,\n hooks: {\n onObservationStart: () => {\n void ctx.client.tui.showToast({\n body: {\n title: 'Mastra',\n message: 'Observing conversation...',\n variant: 'info',\n duration: 10000,\n },\n });\n },\n onObservationEnd: () => {\n void ctx.client.tui.showToast({\n body: {\n title: 'Mastra',\n message: 'Observation complete',\n variant: 'success',\n duration: 3000,\n },\n });\n },\n onReflectionStart: () => {\n void ctx.client.tui.showToast({\n body: {\n title: 'Mastra',\n message: 'Reflecting on observations...',\n variant: 'info',\n duration: 10000,\n },\n });\n },\n onReflectionEnd: () => {\n void ctx.client.tui.showToast({\n body: {\n title: 'Mastra',\n message: 'Reflection complete',\n variant: 'success',\n duration: 3000,\n },\n });\n },\n },\n });\n }\n\n // Discard already-observed messages — observations replace them\n const record = await om.getRecord(sessionId);\n if (record?.lastObservedAt) {\n const lastObservedAt = new Date(record.lastObservedAt);\n output.messages = output.messages.filter(({ info }) => {\n // opencode timestamps are already in milliseconds\n const msgTime = new Date(info.time.created);\n return msgTime > lastObservedAt;\n });\n }\n } catch (err) {\n void ctx.client.tui.showToast({\n body: {\n title: 'Mastra',\n message: `Observational Memory error: ${err instanceof Error ? err.message : String(err)}`,\n variant: 'error',\n duration: 5000,\n },\n });\n }\n },\n\n // Hook: Inject observations into the system prompt so the model has compressed context.\n 'experimental.chat.system.transform': async (input, output) => {\n const sessionId = input.sessionID;\n if (!sessionId) return;\n\n try {\n const observations = await om.getObservations(sessionId);\n if (!observations) return;\n\n const optimized = optimizeObservationsForContext(observations);\n output.system.push(\n `${OBSERVATION_CONTEXT_PROMPT}\\n\\n<observations>\\n${optimized}\\n</observations>\\n\\n${OBSERVATION_CONTEXT_INSTRUCTIONS}\\n\\n${OBSERVATION_CONTINUATION_HINT}`,\n );\n } catch {\n // Non-fatal — model proceeds without observations\n }\n },\n\n // Diagnostic tools for inspecting OM state\n tool: {\n memory_status: tool({\n description: 'Show Observational Memory progress — how close the session is to the next observation and reflection cycle.',\n args: {},\n async execute(_args, context) {\n const threadId = context.sessionID;\n const record = await om.getRecord(threadId);\n if (!record) {\n return 'No Observational Memory record found for this session.';\n }\n\n const omConfig = om.config;\n const obsThreshold = resolveThreshold(omConfig.observation.messageTokens);\n const refThreshold = resolveThreshold(omConfig.reflection.observationTokens);\n const obsTokens = record.observationTokenCount ?? 0;\n\n // Fetch live messages to compute unobserved token count\n const tokenCounter = new TokenCounter();\n let unobservedTokens = 0;\n try {\n const resp = await ctx.client.session.messages({ path: { id: threadId } });\n if (resp.data) {\n const allMastra = convertMessages(resp.data, threadId);\n const unobserved = record.lastObservedAt\n ? allMastra.filter(m => m.createdAt > new Date(record.lastObservedAt!))\n : allMastra;\n unobservedTokens = tokenCounter.countMessages(unobserved);\n }\n } catch {\n // Fall back to record's pending count\n unobservedTokens = record.pendingMessageTokens ?? 0;\n }\n\n const lines = [\n `Observational Memory`,\n `Scope: ${record.scope} | Generations: ${record.generationCount ?? 0}`,\n ``,\n `── Observation ──────────────────────────────`,\n `Unobserved: ${formatTokens(unobservedTokens)} / ${formatTokens(obsThreshold)} tokens`,\n progressBar(unobservedTokens, obsThreshold),\n ``,\n `── Reflection ──────────────────────────────`,\n `Observations: ${formatTokens(obsTokens)} / ${formatTokens(refThreshold)} tokens`,\n progressBar(obsTokens, refThreshold),\n ``,\n `── Status ──────────────────────────────────`,\n `Last observed: ${record.lastObservedAt ?? 'never'}`,\n `Observing: ${record.isObserving ? 'yes' : 'no'} | Reflecting: ${record.isReflecting ? 'yes' : 'no'}`,\n ];\n\n return lines.join('\\n');\n },\n }),\n\n memory_observations: tool({\n description: 'Show the current active observations stored in Observational Memory.',\n args: {},\n async execute(_args, context) {\n const threadId = context.sessionID;\n const observations = await om.getObservations(threadId);\n return observations ?? 'No observations stored yet.';\n },\n }),\n },\n };\n};\n"]}
package/dist/index.d.ts DELETED
@@ -1,41 +0,0 @@
1
- /**
2
- * @mastra/opencode
3
- *
4
- * OpenCode plugin that brings Mastra Observational Memory into opencode sessions.
5
- *
6
- * Mastra OM compresses long conversation history into structured observations
7
- * using an Observer (extract) and Reflector (condense) architecture.
8
- *
9
- * Configuration is read from .opencode/mastra.json in the project root.
10
- *
11
- * @example .opencode/mastra.json
12
- * ```json
13
- * {
14
- * "model": "google/gemini-2.5-flash",
15
- * "observation": { "messageTokens": 20000 },
16
- * "reflection": { "observationTokens": 90000 },
17
- * "storagePath": ".opencode/memory/observations.db"
18
- * }
19
- * ```
20
- */
21
- import type { ObservationalMemoryOptions } from '@mastra/core/memory';
22
- import type { Plugin } from '@opencode-ai/plugin';
23
- export type { ObservationalMemoryOptions };
24
- /**
25
- * Plugin config read from .opencode/mastra.json.
26
- * Extends Mastra's ObservationalMemoryOptions with opencode-specific fields.
27
- *
28
- * In the opencode plugin context, pass string model IDs
29
- * (e.g., 'google/gemini-2.5-flash') — Mastra's provider registry resolves them.
30
- */
31
- export interface MastraOMPluginConfig extends ObservationalMemoryOptions {
32
- /**
33
- * Path to the SQLite database file for observation storage.
34
- * Relative to the project root.
35
- *
36
- * @default '.opencode/memory/observations.db'
37
- */
38
- storagePath?: string;
39
- }
40
- export declare const MastraPlugin: Plugin;
41
- //# sourceMappingURL=index.d.ts.map
@@ -1 +0,0 @@
1
- {"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../src/index.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;;;;;;GAmBG;AAIH,OAAO,KAAK,EAAE,0BAA0B,EAAE,MAAM,qBAAqB,CAAC;AAUtE,OAAO,KAAK,EAAE,MAAM,EAAE,MAAM,qBAAqB,CAAC;AAIlD,YAAY,EAAE,0BAA0B,EAAE,CAAC;AAE3C;;;;;;GAMG;AACH,MAAM,WAAW,oBAAqB,SAAQ,0BAA0B;IACtE;;;;;OAKG;IACH,WAAW,CAAC,EAAE,MAAM,CAAC;CACtB;AA2GD,eAAO,MAAM,YAAY,EAAE,MA+P1B,CAAC"}
package/dist/index.js DELETED
@@ -1,318 +0,0 @@
1
- import { mkdir, readFile } from 'fs/promises';
2
- import { join, dirname } from 'path';
3
- import { LibSQLStore } from '@mastra/libsql';
4
- import { ObservationalMemory, TokenCounter, optimizeObservationsForContext, OBSERVATION_CONTEXT_PROMPT, OBSERVATION_CONTEXT_INSTRUCTIONS, OBSERVATION_CONTINUATION_HINT } from '@mastra/memory/processors';
5
- import { tool } from '@opencode-ai/plugin';
6
-
7
- // src/index.ts
8
- var CONFIG_FILE = ".opencode/mastra.json";
9
- var DEFAULT_STORAGE_PATH = ".opencode/memory/observations.db";
10
- async function loadConfig(directory) {
11
- try {
12
- const configPath = join(directory, CONFIG_FILE);
13
- const raw = await readFile(configPath, "utf-8");
14
- return JSON.parse(raw);
15
- } catch {
16
- return {};
17
- }
18
- }
19
- function convertMessages(messages, sessionId) {
20
- return messages.map(({ info, parts }) => {
21
- const convertedParts = parts.map((part) => {
22
- const p = part;
23
- const type = p.type;
24
- if (type === "text" && p.text) {
25
- return { type: "text", text: p.text };
26
- }
27
- if (type === "tool-invocation") {
28
- return {
29
- type: "tool-invocation",
30
- toolInvocation: {
31
- toolCallId: p.toolCallId,
32
- toolName: p.toolName,
33
- args: p.args,
34
- result: p.result,
35
- state: p.state
36
- }
37
- };
38
- }
39
- if (type === "file") {
40
- return {
41
- type: "file",
42
- url: p.url,
43
- mediaType: p.mediaType
44
- };
45
- }
46
- if (type === "image") {
47
- return {
48
- type: "image",
49
- image: p.image
50
- };
51
- }
52
- if (type === "reasoning" && p.reasoning) {
53
- return { type: "reasoning", reasoning: p.reasoning };
54
- }
55
- if (type?.startsWith("data-om-")) {
56
- return null;
57
- }
58
- return null;
59
- }).filter((p) => p !== null);
60
- if (convertedParts.length === 0) return null;
61
- if (info.role !== "user" && info.role !== "assistant") return null;
62
- return {
63
- id: info.id,
64
- role: info.role,
65
- // opencode timestamps are already in milliseconds (JavaScript Date)
66
- createdAt: new Date(info.time.created),
67
- threadId: sessionId,
68
- resourceId: sessionId,
69
- content: {
70
- format: 2,
71
- parts: convertedParts
72
- }
73
- };
74
- }).filter((m) => m !== null);
75
- }
76
- function progressBar(current, total, width = 20) {
77
- const pct = total > 0 ? Math.min(current / total, 1) : 0;
78
- const filled = Math.round(pct * width);
79
- return `[${"\u2588".repeat(filled)}${"\u2591".repeat(width - filled)}] ${(pct * 100).toFixed(1)}%`;
80
- }
81
- function formatTokens(n) {
82
- return n >= 1e3 ? `${(n / 1e3).toFixed(1)}k` : String(n);
83
- }
84
- function resolveThreshold(t) {
85
- return typeof t === "number" ? t : t.max;
86
- }
87
- var MastraPlugin = async (ctx) => {
88
- const config = await loadConfig(ctx.directory);
89
- let credentialsReady = false;
90
- const resolveCredentials = async () => {
91
- if (credentialsReady) return;
92
- try {
93
- const providersResponse = await ctx.client.config.providers();
94
- if (providersResponse.data) {
95
- for (const provider of providersResponse.data.providers) {
96
- if (provider.key && provider.env) {
97
- for (const envVar of provider.env) {
98
- if (!process.env[envVar]) {
99
- process.env[envVar] = provider.key;
100
- }
101
- }
102
- }
103
- }
104
- }
105
- } catch {
106
- }
107
- credentialsReady = true;
108
- };
109
- const dbRelativePath = config.storagePath ?? DEFAULT_STORAGE_PATH;
110
- const dbAbsolutePath = join(ctx.directory, dbRelativePath);
111
- await mkdir(dirname(dbAbsolutePath), { recursive: true });
112
- const storagePath = `file:${dbAbsolutePath}`;
113
- const store = new LibSQLStore({ id: "mastra-om", url: storagePath });
114
- await store.init();
115
- const storage = await store.getStore("memory");
116
- if (!storage) {
117
- throw new Error(`@mastra/opencode: failed to initialize memory storage from ${storagePath}`);
118
- }
119
- const om = new ObservationalMemory({
120
- storage,
121
- model: config.model,
122
- observation: config.observation,
123
- reflection: config.reflection,
124
- scope: config.scope,
125
- shareTokenBudget: config.shareTokenBudget
126
- });
127
- setTimeout(() => {
128
- void ctx.client.tui.showToast({
129
- body: {
130
- title: "Mastra",
131
- message: "Observational Memory activated",
132
- variant: "success",
133
- duration: 3e3
134
- }
135
- });
136
- }, 500);
137
- return {
138
- // Hook: Eagerly initialize OM record on session creation
139
- // so diagnostic tools work immediately (before first observation cycle).
140
- event: async ({ event }) => {
141
- if (event.type === "session.created") {
142
- const sessionId = event.properties.info.id;
143
- try {
144
- await om.getOrCreateRecord(sessionId);
145
- } catch (err) {
146
- void ctx.client.tui.showToast({
147
- body: {
148
- title: "Mastra",
149
- message: `Failed to initialize Observational Memory: ${err instanceof Error ? err.message : String(err)}`,
150
- variant: "error",
151
- duration: 5e3
152
- }
153
- });
154
- }
155
- }
156
- },
157
- // Hook: Transform messages before they reach the model.
158
- // This is the core integration point — observe and shape context in one pass:
159
- // 1. Convert opencode messages → MastraDBMessage format
160
- // 2. Run observation if threshold is met (with toast notifications)
161
- // 3. Inject observation summary and filter out already-observed messages
162
- "experimental.chat.messages.transform": async (_input, output) => {
163
- const sessionId = output.messages[0]?.info.sessionID;
164
- if (!sessionId) return;
165
- await resolveCredentials();
166
- try {
167
- const mastraMessages = convertMessages(output.messages, sessionId);
168
- if (mastraMessages.length > 0) {
169
- await om.observe({
170
- threadId: sessionId,
171
- messages: mastraMessages,
172
- hooks: {
173
- onObservationStart: () => {
174
- void ctx.client.tui.showToast({
175
- body: {
176
- title: "Mastra",
177
- message: "Observing conversation...",
178
- variant: "info",
179
- duration: 1e4
180
- }
181
- });
182
- },
183
- onObservationEnd: () => {
184
- void ctx.client.tui.showToast({
185
- body: {
186
- title: "Mastra",
187
- message: "Observation complete",
188
- variant: "success",
189
- duration: 3e3
190
- }
191
- });
192
- },
193
- onReflectionStart: () => {
194
- void ctx.client.tui.showToast({
195
- body: {
196
- title: "Mastra",
197
- message: "Reflecting on observations...",
198
- variant: "info",
199
- duration: 1e4
200
- }
201
- });
202
- },
203
- onReflectionEnd: () => {
204
- void ctx.client.tui.showToast({
205
- body: {
206
- title: "Mastra",
207
- message: "Reflection complete",
208
- variant: "success",
209
- duration: 3e3
210
- }
211
- });
212
- }
213
- }
214
- });
215
- }
216
- const record = await om.getRecord(sessionId);
217
- if (record?.lastObservedAt) {
218
- const lastObservedAt = new Date(record.lastObservedAt);
219
- output.messages = output.messages.filter(({ info }) => {
220
- const msgTime = new Date(info.time.created);
221
- return msgTime > lastObservedAt;
222
- });
223
- }
224
- } catch (err) {
225
- void ctx.client.tui.showToast({
226
- body: {
227
- title: "Mastra",
228
- message: `Observational Memory error: ${err instanceof Error ? err.message : String(err)}`,
229
- variant: "error",
230
- duration: 5e3
231
- }
232
- });
233
- }
234
- },
235
- // Hook: Inject observations into the system prompt so the model has compressed context.
236
- "experimental.chat.system.transform": async (input, output) => {
237
- const sessionId = input.sessionID;
238
- if (!sessionId) return;
239
- try {
240
- const observations = await om.getObservations(sessionId);
241
- if (!observations) return;
242
- const optimized = optimizeObservationsForContext(observations);
243
- output.system.push(
244
- `${OBSERVATION_CONTEXT_PROMPT}
245
-
246
- <observations>
247
- ${optimized}
248
- </observations>
249
-
250
- ${OBSERVATION_CONTEXT_INSTRUCTIONS}
251
-
252
- ${OBSERVATION_CONTINUATION_HINT}`
253
- );
254
- } catch {
255
- }
256
- },
257
- // Diagnostic tools for inspecting OM state
258
- tool: {
259
- memory_status: tool({
260
- description: "Show Observational Memory progress \u2014 how close the session is to the next observation and reflection cycle.",
261
- args: {},
262
- async execute(_args, context) {
263
- const threadId = context.sessionID;
264
- const record = await om.getRecord(threadId);
265
- if (!record) {
266
- return "No Observational Memory record found for this session.";
267
- }
268
- const omConfig = om.config;
269
- const obsThreshold = resolveThreshold(omConfig.observation.messageTokens);
270
- const refThreshold = resolveThreshold(omConfig.reflection.observationTokens);
271
- const obsTokens = record.observationTokenCount ?? 0;
272
- const tokenCounter = new TokenCounter();
273
- let unobservedTokens = 0;
274
- try {
275
- const resp = await ctx.client.session.messages({ path: { id: threadId } });
276
- if (resp.data) {
277
- const allMastra = convertMessages(resp.data, threadId);
278
- const unobserved = record.lastObservedAt ? allMastra.filter((m) => m.createdAt > new Date(record.lastObservedAt)) : allMastra;
279
- unobservedTokens = tokenCounter.countMessages(unobserved);
280
- }
281
- } catch {
282
- unobservedTokens = record.pendingMessageTokens ?? 0;
283
- }
284
- const lines = [
285
- `Observational Memory`,
286
- `Scope: ${record.scope} | Generations: ${record.generationCount ?? 0}`,
287
- ``,
288
- `\u2500\u2500 Observation \u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500`,
289
- `Unobserved: ${formatTokens(unobservedTokens)} / ${formatTokens(obsThreshold)} tokens`,
290
- progressBar(unobservedTokens, obsThreshold),
291
- ``,
292
- `\u2500\u2500 Reflection \u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500`,
293
- `Observations: ${formatTokens(obsTokens)} / ${formatTokens(refThreshold)} tokens`,
294
- progressBar(obsTokens, refThreshold),
295
- ``,
296
- `\u2500\u2500 Status \u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500`,
297
- `Last observed: ${record.lastObservedAt ?? "never"}`,
298
- `Observing: ${record.isObserving ? "yes" : "no"} | Reflecting: ${record.isReflecting ? "yes" : "no"}`
299
- ];
300
- return lines.join("\n");
301
- }
302
- }),
303
- memory_observations: tool({
304
- description: "Show the current active observations stored in Observational Memory.",
305
- args: {},
306
- async execute(_args, context) {
307
- const threadId = context.sessionID;
308
- const observations = await om.getObservations(threadId);
309
- return observations ?? "No observations stored yet.";
310
- }
311
- })
312
- }
313
- };
314
- };
315
-
316
- export { MastraPlugin };
317
- //# sourceMappingURL=index.js.map
318
- //# sourceMappingURL=index.js.map
package/dist/index.js.map DELETED
@@ -1 +0,0 @@
1
- {"version":3,"sources":["../src/index.ts"],"names":[],"mappings":";;;;;;;AAwDA,IAAM,WAAA,GAAc,uBAAA;AACpB,IAAM,oBAAA,GAAuB,kCAAA;AAE7B,eAAe,WAAW,SAAA,EAAkD;AAC1E,EAAA,IAAI;AACF,IAAA,MAAM,UAAA,GAAa,IAAA,CAAK,SAAA,EAAW,WAAW,CAAA;AAC9C,IAAA,MAAM,GAAA,GAAM,MAAM,QAAA,CAAS,UAAA,EAAY,OAAO,CAAA;AAC9C,IAAA,OAAO,IAAA,CAAK,MAAM,GAAG,CAAA;AAAA,EACvB,CAAA,CAAA,MAAQ;AAEN,IAAA,OAAO,EAAC;AAAA,EACV;AACF;AAKA,SAAS,eAAA,CAAgB,UAA8C,SAAA,EAAmB;AACxF,EAAA,OAAO,SACJ,GAAA,CAAI,CAAC,EAAE,IAAA,EAAM,OAAM,KAAM;AAGxB,IAAA,MAAM,cAAA,GAAiB,KAAA,CACpB,GAAA,CAAI,CAAC,IAAA,KAAc;AAClB,MAAA,MAAM,CAAA,GAAI,IAAA;AACV,MAAA,MAAM,OAAO,CAAA,CAAE,IAAA;AAEf,MAAA,IAAI,IAAA,KAAS,MAAA,IAAU,CAAA,CAAE,IAAA,EAAM;AAC7B,QAAA,OAAO,EAAE,IAAA,EAAM,MAAA,EAAQ,IAAA,EAAM,EAAE,IAAA,EAAK;AAAA,MACtC;AAEA,MAAA,IAAI,SAAS,iBAAA,EAAmB;AAC9B,QAAA,OAAO;AAAA,UACL,IAAA,EAAM,iBAAA;AAAA,UACN,cAAA,EAAgB;AAAA,YACd,YAAY,CAAA,CAAE,UAAA;AAAA,YACd,UAAU,CAAA,CAAE,QAAA;AAAA,YACZ,MAAM,CAAA,CAAE,IAAA;AAAA,YACR,QAAQ,CAAA,CAAE,MAAA;AAAA,YACV,OAAO,CAAA,CAAE;AAAA;AACX,SACF;AAAA,MACF;AAEA,MAAA,IAAI,SAAS,MAAA,EAAQ;AACnB,QAAA,OAAO;AAAA,UACL,IAAA,EAAM,MAAA;AAAA,UACN,KAAK,CAAA,CAAE,GAAA;AAAA,UACP,WAAW,CAAA,CAAE;AAAA,SACf;AAAA,MACF;AAEA,MAAA,IAAI,SAAS,OAAA,EAAS;AACpB,QAAA,OAAO;AAAA,UACL,IAAA,EAAM,OAAA;AAAA,UACN,OAAO,CAAA,CAAE;AAAA,SACX;AAAA,MACF;AAEA,MAAA,IAAI,IAAA,KAAS,WAAA,IAAe,CAAA,CAAE,SAAA,EAAW;AACvC,QAAA,OAAO,EAAE,IAAA,EAAM,WAAA,EAAa,SAAA,EAAW,EAAE,SAAA,EAAU;AAAA,MACrD;AAGA,MAAA,IAAI,IAAA,EAAM,UAAA,CAAW,UAAU,CAAA,EAAG;AAChC,QAAA,OAAO,IAAA;AAAA,MACT;AAEA,MAAA,OAAO,IAAA;AAAA,IACT,CAAC,CAAA,CACA,MAAA,CAAO,CAAC,CAAA,KAAkC,MAAM,IAAI,CAAA;AAEvD,IAAA,IAAI,cAAA,CAAe,MAAA,KAAW,CAAA,EAAG,OAAO,IAAA;AACxC,IAAA,IAAI,KAAK,IAAA,KAAS,MAAA,IAAU,IAAA,CAAK,IAAA,KAAS,aAAa,OAAO,IAAA;AAE9D,IAAA,OAAO;AAAA,MACL,IAAI,IAAA,CAAK,EAAA;AAAA,MACT,MAAM,IAAA,CAAK,IAAA;AAAA;AAAA,MAEX,SAAA,EAAW,IAAI,IAAA,CAAK,IAAA,CAAK,KAAK,OAAO,CAAA;AAAA,MACrC,QAAA,EAAU,SAAA;AAAA,MACV,UAAA,EAAY,SAAA;AAAA,MACZ,OAAA,EAAS;AAAA,QACP,MAAA,EAAQ,CAAA;AAAA,QACR,KAAA,EAAO;AAAA;AACT,KACF;AAAA,EACF,CAAC,CAAA,CACA,MAAA,CAAO,CAAC,CAAA,KAAkC,MAAM,IAAI,CAAA;AACzD;AAEA,SAAS,WAAA,CAAY,OAAA,EAAiB,KAAA,EAAe,KAAA,GAAQ,EAAA,EAAY;AACvE,EAAA,MAAM,GAAA,GAAM,QAAQ,CAAA,GAAI,IAAA,CAAK,IAAI,OAAA,GAAU,KAAA,EAAO,CAAC,CAAA,GAAI,CAAA;AACvD,EAAA,MAAM,MAAA,GAAS,IAAA,CAAK,KAAA,CAAM,GAAA,GAAM,KAAK,CAAA;AACrC,EAAA,OAAO,IAAI,QAAA,CAAI,MAAA,CAAO,MAAM,CAAC,GAAG,QAAA,CAAI,MAAA,CAAO,KAAA,GAAQ,MAAM,CAAC,CAAA,EAAA,EAAA,CAAM,GAAA,GAAM,GAAA,EAAK,OAAA,CAAQ,CAAC,CAAC,CAAA,CAAA,CAAA;AACvF;AAEA,SAAS,aAAa,CAAA,EAAmB;AACvC,EAAA,OAAO,CAAA,IAAK,GAAA,GAAO,CAAA,EAAA,CAAI,CAAA,GAAI,GAAA,EAAM,QAAQ,CAAC,CAAC,CAAA,CAAA,CAAA,GAAM,MAAA,CAAO,CAAC,CAAA;AAC3D;AAEA,SAAS,iBAAiB,CAAA,EAAkD;AAC1E,EAAA,OAAO,OAAO,CAAA,KAAM,QAAA,GAAW,CAAA,GAAI,CAAA,CAAE,GAAA;AACvC;AAEO,IAAM,YAAA,GAAuB,OAAM,GAAA,KAAO;AAE/C,EAAA,MAAM,MAAA,GAAS,MAAM,UAAA,CAAW,GAAA,CAAI,SAAS,CAAA;AAI7C,EAAA,IAAI,gBAAA,GAAmB,KAAA;AACvB,EAAA,MAAM,qBAAqB,YAAY;AACrC,IAAA,IAAI,gBAAA,EAAkB;AACtB,IAAA,IAAI;AACF,MAAA,MAAM,iBAAA,GAAoB,MAAM,GAAA,CAAI,MAAA,CAAO,OAAO,SAAA,EAAU;AAC5D,MAAA,IAAI,kBAAkB,IAAA,EAAM;AAC1B,QAAA,KAAA,MAAW,QAAA,IAAY,iBAAA,CAAkB,IAAA,CAAK,SAAA,EAAW;AACvD,UAAA,IAAI,QAAA,CAAS,GAAA,IAAO,QAAA,CAAS,GAAA,EAAK;AAChC,YAAA,KAAA,MAAW,MAAA,IAAU,SAAS,GAAA,EAAK;AACjC,cAAA,IAAI,CAAC,OAAA,CAAQ,GAAA,CAAI,MAAM,CAAA,EAAG;AACxB,gBAAA,OAAA,CAAQ,GAAA,CAAI,MAAM,CAAA,GAAI,QAAA,CAAS,GAAA;AAAA,cACjC;AAAA,YACF;AAAA,UACF;AAAA,QACF;AAAA,MACF;AAAA,IACF,CAAA,CAAA,MAAQ;AAAA,IAER;AACA,IAAA,gBAAA,GAAmB,IAAA;AAAA,EACrB,CAAA;AAGA,EAAA,MAAM,cAAA,GAAiB,OAAO,WAAA,IAAe,oBAAA;AAC7C,EAAA,MAAM,cAAA,GAAiB,IAAA,CAAK,GAAA,CAAI,SAAA,EAAW,cAAc,CAAA;AACzD,EAAA,MAAM,MAAM,OAAA,CAAQ,cAAc,GAAG,EAAE,SAAA,EAAW,MAAM,CAAA;AACxD,EAAA,MAAM,WAAA,GAAc,QAAQ,cAAc,CAAA,CAAA;AAC1C,EAAA,MAAM,KAAA,GAAQ,IAAI,WAAA,CAAY,EAAE,IAAI,WAAA,EAAa,GAAA,EAAK,aAAa,CAAA;AACnE,EAAA,MAAM,MAAM,IAAA,EAAK;AACjB,EAAA,MAAM,OAAA,GAAU,MAAM,KAAA,CAAM,QAAA,CAAS,QAAQ,CAAA;AAC7C,EAAA,IAAI,CAAC,OAAA,EAAS;AACZ,IAAA,MAAM,IAAI,KAAA,CAAM,CAAA,2DAAA,EAA8D,WAAW,CAAA,CAAE,CAAA;AAAA,EAC7F;AAIA,EAAA,MAAM,EAAA,GAAK,IAAI,mBAAA,CAAoB;AAAA,IACjC,OAAA;AAAA,IACA,OAAO,MAAA,CAAO,KAAA;AAAA,IACd,aAAa,MAAA,CAAO,WAAA;AAAA,IACpB,YAAY,MAAA,CAAO,UAAA;AAAA,IACnB,OAAO,MAAA,CAAO,KAAA;AAAA,IACd,kBAAkB,MAAA,CAAO;AAAA,GAC1B,CAAA;AAGD,EAAA,UAAA,CAAW,MAAM;AACf,IAAA,KAAK,GAAA,CAAI,MAAA,CAAO,GAAA,CAAI,SAAA,CAAU;AAAA,MAC5B,IAAA,EAAM;AAAA,QACJ,KAAA,EAAO,QAAA;AAAA,QACP,OAAA,EAAS,gCAAA;AAAA,QACT,OAAA,EAAS,SAAA;AAAA,QACT,QAAA,EAAU;AAAA;AACZ,KACD,CAAA;AAAA,EACH,GAAG,GAAG,CAAA;AAEN,EAAA,OAAO;AAAA;AAAA;AAAA,IAGL,KAAA,EAAO,OAAO,EAAE,KAAA,EAAM,KAAM;AAC1B,MAAA,IAAI,KAAA,CAAM,SAAS,iBAAA,EAAmB;AACpC,QAAA,MAAM,SAAA,GAAY,KAAA,CAAM,UAAA,CAAW,IAAA,CAAK,EAAA;AACxC,QAAA,IAAI;AACF,UAAA,MAAM,EAAA,CAAG,kBAAkB,SAAS,CAAA;AAAA,QACtC,SAAS,GAAA,EAAK;AACZ,UAAA,KAAK,GAAA,CAAI,MAAA,CAAO,GAAA,CAAI,SAAA,CAAU;AAAA,YAC5B,IAAA,EAAM;AAAA,cACJ,KAAA,EAAO,QAAA;AAAA,cACP,OAAA,EAAS,8CAA8C,GAAA,YAAe,KAAA,GAAQ,IAAI,OAAA,GAAU,MAAA,CAAO,GAAG,CAAC,CAAA,CAAA;AAAA,cACvG,OAAA,EAAS,OAAA;AAAA,cACT,QAAA,EAAU;AAAA;AACZ,WACD,CAAA;AAAA,QACH;AAAA,MACF;AAAA,IACF,CAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,IAOA,sCAAA,EAAwC,OAAO,MAAA,EAAQ,MAAA,KAAW;AAChE,MAAA,MAAM,SAAA,GAAY,MAAA,CAAO,QAAA,CAAS,CAAC,GAAG,IAAA,CAAK,SAAA;AAC3C,MAAA,IAAI,CAAC,SAAA,EAAW;AAGhB,MAAA,MAAM,kBAAA,EAAmB;AAEzB,MAAA,IAAI;AACF,QAAA,MAAM,cAAA,GAAiB,eAAA,CAAgB,MAAA,CAAO,QAAA,EAAU,SAAS,CAAA;AAGjE,QAAA,IAAI,cAAA,CAAe,SAAS,CAAA,EAAG;AAC7B,UAAA,MAAM,GAAG,OAAA,CAAQ;AAAA,YACf,QAAA,EAAU,SAAA;AAAA,YACV,QAAA,EAAU,cAAA;AAAA,YACV,KAAA,EAAO;AAAA,cACL,oBAAoB,MAAM;AACxB,gBAAA,KAAK,GAAA,CAAI,MAAA,CAAO,GAAA,CAAI,SAAA,CAAU;AAAA,kBAC5B,IAAA,EAAM;AAAA,oBACJ,KAAA,EAAO,QAAA;AAAA,oBACP,OAAA,EAAS,2BAAA;AAAA,oBACT,OAAA,EAAS,MAAA;AAAA,oBACT,QAAA,EAAU;AAAA;AACZ,iBACD,CAAA;AAAA,cACH,CAAA;AAAA,cACA,kBAAkB,MAAM;AACtB,gBAAA,KAAK,GAAA,CAAI,MAAA,CAAO,GAAA,CAAI,SAAA,CAAU;AAAA,kBAC5B,IAAA,EAAM;AAAA,oBACJ,KAAA,EAAO,QAAA;AAAA,oBACP,OAAA,EAAS,sBAAA;AAAA,oBACT,OAAA,EAAS,SAAA;AAAA,oBACT,QAAA,EAAU;AAAA;AACZ,iBACD,CAAA;AAAA,cACH,CAAA;AAAA,cACA,mBAAmB,MAAM;AACvB,gBAAA,KAAK,GAAA,CAAI,MAAA,CAAO,GAAA,CAAI,SAAA,CAAU;AAAA,kBAC5B,IAAA,EAAM;AAAA,oBACJ,KAAA,EAAO,QAAA;AAAA,oBACP,OAAA,EAAS,+BAAA;AAAA,oBACT,OAAA,EAAS,MAAA;AAAA,oBACT,QAAA,EAAU;AAAA;AACZ,iBACD,CAAA;AAAA,cACH,CAAA;AAAA,cACA,iBAAiB,MAAM;AACrB,gBAAA,KAAK,GAAA,CAAI,MAAA,CAAO,GAAA,CAAI,SAAA,CAAU;AAAA,kBAC5B,IAAA,EAAM;AAAA,oBACJ,KAAA,EAAO,QAAA;AAAA,oBACP,OAAA,EAAS,qBAAA;AAAA,oBACT,OAAA,EAAS,SAAA;AAAA,oBACT,QAAA,EAAU;AAAA;AACZ,iBACD,CAAA;AAAA,cACH;AAAA;AACF,WACD,CAAA;AAAA,QACH;AAGA,QAAA,MAAM,MAAA,GAAS,MAAM,EAAA,CAAG,SAAA,CAAU,SAAS,CAAA;AAC3C,QAAA,IAAI,QAAQ,cAAA,EAAgB;AAC1B,UAAA,MAAM,cAAA,GAAiB,IAAI,IAAA,CAAK,MAAA,CAAO,cAAc,CAAA;AACrD,UAAA,MAAA,CAAO,WAAW,MAAA,CAAO,QAAA,CAAS,OAAO,CAAC,EAAE,MAAK,KAAM;AAErD,YAAA,MAAM,OAAA,GAAU,IAAI,IAAA,CAAK,IAAA,CAAK,KAAK,OAAO,CAAA;AAC1C,YAAA,OAAO,OAAA,GAAU,cAAA;AAAA,UACnB,CAAC,CAAA;AAAA,QACH;AAAA,MACF,SAAS,GAAA,EAAK;AACZ,QAAA,KAAK,GAAA,CAAI,MAAA,CAAO,GAAA,CAAI,SAAA,CAAU;AAAA,UAC5B,IAAA,EAAM;AAAA,YACJ,KAAA,EAAO,QAAA;AAAA,YACP,OAAA,EAAS,+BAA+B,GAAA,YAAe,KAAA,GAAQ,IAAI,OAAA,GAAU,MAAA,CAAO,GAAG,CAAC,CAAA,CAAA;AAAA,YACxF,OAAA,EAAS,OAAA;AAAA,YACT,QAAA,EAAU;AAAA;AACZ,SACD,CAAA;AAAA,MACH;AAAA,IACF,CAAA;AAAA;AAAA,IAGA,oCAAA,EAAsC,OAAO,KAAA,EAAO,MAAA,KAAW;AAC7D,MAAA,MAAM,YAAY,KAAA,CAAM,SAAA;AACxB,MAAA,IAAI,CAAC,SAAA,EAAW;AAEhB,MAAA,IAAI;AACF,QAAA,MAAM,YAAA,GAAe,MAAM,EAAA,CAAG,eAAA,CAAgB,SAAS,CAAA;AACvD,QAAA,IAAI,CAAC,YAAA,EAAc;AAEnB,QAAA,MAAM,SAAA,GAAY,+BAA+B,YAAY,CAAA;AAC7D,QAAA,MAAA,CAAO,MAAA,CAAO,IAAA;AAAA,UACZ,GAAG,0BAA0B;;AAAA;AAAA,EAAuB,SAAS;AAAA;;AAAA,EAAwB,gCAAgC;;AAAA,EAAO,6BAA6B,CAAA;AAAA,SAC3J;AAAA,MACF,CAAA,CAAA,MAAQ;AAAA,MAER;AAAA,IACF,CAAA;AAAA;AAAA,IAGA,IAAA,EAAM;AAAA,MACJ,eAAe,IAAA,CAAK;AAAA,QAClB,WAAA,EAAa,kHAAA;AAAA,QACb,MAAM,EAAC;AAAA,QACP,MAAM,OAAA,CAAQ,KAAA,EAAO,OAAA,EAAS;AAC5B,UAAA,MAAM,WAAW,OAAA,CAAQ,SAAA;AACzB,UAAA,MAAM,MAAA,GAAS,MAAM,EAAA,CAAG,SAAA,CAAU,QAAQ,CAAA;AAC1C,UAAA,IAAI,CAAC,MAAA,EAAQ;AACX,YAAA,OAAO,wDAAA;AAAA,UACT;AAEA,UAAA,MAAM,WAAW,EAAA,CAAG,MAAA;AACpB,UAAA,MAAM,YAAA,GAAe,gBAAA,CAAiB,QAAA,CAAS,WAAA,CAAY,aAAa,CAAA;AACxE,UAAA,MAAM,YAAA,GAAe,gBAAA,CAAiB,QAAA,CAAS,UAAA,CAAW,iBAAiB,CAAA;AAC3E,UAAA,MAAM,SAAA,GAAY,OAAO,qBAAA,IAAyB,CAAA;AAGlD,UAAA,MAAM,YAAA,GAAe,IAAI,YAAA,EAAa;AACtC,UAAA,IAAI,gBAAA,GAAmB,CAAA;AACvB,UAAA,IAAI;AACF,YAAA,MAAM,IAAA,GAAO,MAAM,GAAA,CAAI,MAAA,CAAO,OAAA,CAAQ,QAAA,CAAS,EAAE,IAAA,EAAM,EAAE,EAAA,EAAI,QAAA,EAAS,EAAG,CAAA;AACzE,YAAA,IAAI,KAAK,IAAA,EAAM;AACb,cAAA,MAAM,SAAA,GAAY,eAAA,CAAgB,IAAA,CAAK,IAAA,EAAM,QAAQ,CAAA;AACrD,cAAA,MAAM,UAAA,GAAa,MAAA,CAAO,cAAA,GACtB,SAAA,CAAU,MAAA,CAAO,CAAA,CAAA,KAAK,CAAA,CAAE,SAAA,GAAY,IAAI,IAAA,CAAK,MAAA,CAAO,cAAe,CAAC,CAAA,GACpE,SAAA;AACJ,cAAA,gBAAA,GAAmB,YAAA,CAAa,cAAc,UAAU,CAAA;AAAA,YAC1D;AAAA,UACF,CAAA,CAAA,MAAQ;AAEN,YAAA,gBAAA,GAAmB,OAAO,oBAAA,IAAwB,CAAA;AAAA,UACpD;AAEA,UAAA,MAAM,KAAA,GAAQ;AAAA,YACZ,CAAA,oBAAA,CAAA;AAAA,YACA,UAAU,MAAA,CAAO,KAAK,CAAA,kBAAA,EAAqB,MAAA,CAAO,mBAAmB,CAAC,CAAA,CAAA;AAAA,YACtE,CAAA,CAAA;AAAA,YACA,CAAA,6MAAA,CAAA;AAAA,YACA,eAAe,YAAA,CAAa,gBAAgB,CAAC,CAAA,GAAA,EAAM,YAAA,CAAa,YAAY,CAAC,CAAA,OAAA,CAAA;AAAA,YAC7E,WAAA,CAAY,kBAAkB,YAAY,CAAA;AAAA,YAC1C,CAAA,CAAA;AAAA,YACA,CAAA,4MAAA,CAAA;AAAA,YACA,iBAAiB,YAAA,CAAa,SAAS,CAAC,CAAA,GAAA,EAAM,YAAA,CAAa,YAAY,CAAC,CAAA,OAAA,CAAA;AAAA,YACxE,WAAA,CAAY,WAAW,YAAY,CAAA;AAAA,YACnC,CAAA,CAAA;AAAA,YACA,CAAA,gOAAA,CAAA;AAAA,YACA,CAAA,eAAA,EAAkB,MAAA,CAAO,cAAA,IAAkB,OAAO,CAAA,CAAA;AAAA,YAClD,CAAA,WAAA,EAAc,OAAO,WAAA,GAAc,KAAA,GAAQ,IAAI,CAAA,iBAAA,EAAoB,MAAA,CAAO,YAAA,GAAe,KAAA,GAAQ,IAAI,CAAA;AAAA,WACvG;AAEA,UAAA,OAAO,KAAA,CAAM,KAAK,IAAI,CAAA;AAAA,QACxB;AAAA,OACD,CAAA;AAAA,MAED,qBAAqB,IAAA,CAAK;AAAA,QACxB,WAAA,EAAa,sEAAA;AAAA,QACb,MAAM,EAAC;AAAA,QACP,MAAM,OAAA,CAAQ,KAAA,EAAO,OAAA,EAAS;AAC5B,UAAA,MAAM,WAAW,OAAA,CAAQ,SAAA;AACzB,UAAA,MAAM,YAAA,GAAe,MAAM,EAAA,CAAG,eAAA,CAAgB,QAAQ,CAAA;AACtD,UAAA,OAAO,YAAA,IAAgB,6BAAA;AAAA,QACzB;AAAA,OACD;AAAA;AACH,GACF;AACF","file":"index.js","sourcesContent":["/**\n * @mastra/opencode\n *\n * OpenCode plugin that brings Mastra Observational Memory into opencode sessions.\n *\n * Mastra OM compresses long conversation history into structured observations\n * using an Observer (extract) and Reflector (condense) architecture.\n *\n * Configuration is read from .opencode/mastra.json in the project root.\n *\n * @example .opencode/mastra.json\n * ```json\n * {\n * \"model\": \"google/gemini-2.5-flash\",\n * \"observation\": { \"messageTokens\": 20000 },\n * \"reflection\": { \"observationTokens\": 90000 },\n * \"storagePath\": \".opencode/memory/observations.db\"\n * }\n * ```\n */\n\nimport { readFile, mkdir } from 'node:fs/promises';\nimport { join, dirname } from 'node:path';\nimport type { ObservationalMemoryOptions } from '@mastra/core/memory';\nimport { LibSQLStore } from '@mastra/libsql';\nimport {\n ObservationalMemory,\n TokenCounter,\n optimizeObservationsForContext,\n OBSERVATION_CONTINUATION_HINT,\n OBSERVATION_CONTEXT_PROMPT,\n OBSERVATION_CONTEXT_INSTRUCTIONS,\n} from '@mastra/memory/processors';\nimport type { Plugin } from '@opencode-ai/plugin';\nimport { tool } from '@opencode-ai/plugin';\nimport type { Message, Part } from '@opencode-ai/sdk';\n\nexport type { ObservationalMemoryOptions };\n\n/**\n * Plugin config read from .opencode/mastra.json.\n * Extends Mastra's ObservationalMemoryOptions with opencode-specific fields.\n *\n * In the opencode plugin context, pass string model IDs\n * (e.g., 'google/gemini-2.5-flash') — Mastra's provider registry resolves them.\n */\nexport interface MastraOMPluginConfig extends ObservationalMemoryOptions {\n /**\n * Path to the SQLite database file for observation storage.\n * Relative to the project root.\n *\n * @default '.opencode/memory/observations.db'\n */\n storagePath?: string;\n}\n\nconst CONFIG_FILE = '.opencode/mastra.json';\nconst DEFAULT_STORAGE_PATH = '.opencode/memory/observations.db';\n\nasync function loadConfig(directory: string): Promise<MastraOMPluginConfig> {\n try {\n const configPath = join(directory, CONFIG_FILE);\n const raw = await readFile(configPath, 'utf-8');\n return JSON.parse(raw) as MastraOMPluginConfig;\n } catch {\n // No config file or invalid JSON — use defaults\n return {};\n }\n}\n\n/** Convert opencode messages to MastraDBMessage format.\n * Preserves all part types including tool invocations, files, images, and reasoning.\n */\nfunction convertMessages(messages: { info: Message; parts: Part[] }[], sessionId: string) {\n return messages\n .map(({ info, parts }) => {\n // Convert ALL part types, not just text\n // Use type assertions since Part union type is restrictive\n const convertedParts = parts\n .map((part): any => {\n const p = part as any;\n const type = p.type as string;\n\n if (type === 'text' && p.text) {\n return { type: 'text', text: p.text };\n }\n\n if (type === 'tool-invocation') {\n return {\n type: 'tool-invocation',\n toolInvocation: {\n toolCallId: p.toolCallId,\n toolName: p.toolName,\n args: p.args,\n result: p.result,\n state: p.state,\n },\n };\n }\n\n if (type === 'file') {\n return {\n type: 'file',\n url: p.url,\n mediaType: p.mediaType,\n };\n }\n\n if (type === 'image') {\n return {\n type: 'image',\n image: p.image,\n };\n }\n\n if (type === 'reasoning' && p.reasoning) {\n return { type: 'reasoning', reasoning: p.reasoning };\n }\n\n // Skip unknown or internal part types\n if (type?.startsWith('data-om-')) {\n return null;\n }\n\n return null;\n })\n .filter((p): p is NonNullable<typeof p> => p !== null);\n\n if (convertedParts.length === 0) return null;\n if (info.role !== 'user' && info.role !== 'assistant') return null;\n\n return {\n id: info.id,\n role: info.role,\n // opencode timestamps are already in milliseconds (JavaScript Date)\n createdAt: new Date(info.time.created),\n threadId: sessionId,\n resourceId: sessionId,\n content: {\n format: 2 as const,\n parts: convertedParts,\n },\n };\n })\n .filter((m): m is NonNullable<typeof m> => m !== null);\n}\n\nfunction progressBar(current: number, total: number, width = 20): string {\n const pct = total > 0 ? Math.min(current / total, 1) : 0;\n const filled = Math.round(pct * width);\n return `[${'█'.repeat(filled)}${'░'.repeat(width - filled)}] ${(pct * 100).toFixed(1)}%`;\n}\n\nfunction formatTokens(n: number): string {\n return n >= 1000 ? `${(n / 1000).toFixed(1)}k` : String(n);\n}\n\nfunction resolveThreshold(t: number | { min: number; max: number }): number {\n return typeof t === 'number' ? t : t.max;\n}\n\nexport const MastraPlugin: Plugin = async ctx => {\n // Load config from .opencode/mastra.json\n const config = await loadConfig(ctx.directory);\n\n // Resolve API keys from opencode's provider store (deferred so it doesn't block plugin init).\n // .env takes priority — opencode keys only fill in gaps.\n let credentialsReady = false;\n const resolveCredentials = async () => {\n if (credentialsReady) return;\n try {\n const providersResponse = await ctx.client.config.providers();\n if (providersResponse.data) {\n for (const provider of providersResponse.data.providers) {\n if (provider.key && provider.env) {\n for (const envVar of provider.env) {\n if (!process.env[envVar]) {\n process.env[envVar] = provider.key;\n }\n }\n }\n }\n }\n } catch {\n // Credentials not available from opencode — rely on .env\n }\n credentialsReady = true;\n };\n\n // Storage: SQLite via Mastra's LibSQLStore\n const dbRelativePath = config.storagePath ?? DEFAULT_STORAGE_PATH;\n const dbAbsolutePath = join(ctx.directory, dbRelativePath);\n await mkdir(dirname(dbAbsolutePath), { recursive: true });\n const storagePath = `file:${dbAbsolutePath}`;\n const store = new LibSQLStore({ id: 'mastra-om', url: storagePath });\n await store.init();\n const storage = await store.getStore('memory');\n if (!storage) {\n throw new Error(`@mastra/opencode: failed to initialize memory storage from ${storagePath}`);\n }\n\n // Observational Memory: uses Mastra's full OM class\n // Model string IDs (e.g., 'google/gemini-2.5-flash') are resolved by Mastra's provider registry.\n const om = new ObservationalMemory({\n storage,\n model: config.model,\n observation: config.observation,\n reflection: config.reflection,\n scope: config.scope,\n shareTokenBudget: config.shareTokenBudget,\n });\n\n // Notify user that OM is active (delayed to let TUI initialize)\n setTimeout(() => {\n void ctx.client.tui.showToast({\n body: {\n title: 'Mastra',\n message: 'Observational Memory activated',\n variant: 'success',\n duration: 3000,\n },\n });\n }, 500);\n\n return {\n // Hook: Eagerly initialize OM record on session creation\n // so diagnostic tools work immediately (before first observation cycle).\n event: async ({ event }) => {\n if (event.type === 'session.created') {\n const sessionId = event.properties.info.id;\n try {\n await om.getOrCreateRecord(sessionId);\n } catch (err) {\n void ctx.client.tui.showToast({\n body: {\n title: 'Mastra',\n message: `Failed to initialize Observational Memory: ${err instanceof Error ? err.message : String(err)}`,\n variant: 'error',\n duration: 5000,\n },\n });\n }\n }\n },\n\n // Hook: Transform messages before they reach the model.\n // This is the core integration point — observe and shape context in one pass:\n // 1. Convert opencode messages → MastraDBMessage format\n // 2. Run observation if threshold is met (with toast notifications)\n // 3. Inject observation summary and filter out already-observed messages\n 'experimental.chat.messages.transform': async (_input, output) => {\n const sessionId = output.messages[0]?.info.sessionID;\n if (!sessionId) return;\n\n // Ensure API keys are resolved before observation needs a model\n await resolveCredentials();\n\n try {\n const mastraMessages = convertMessages(output.messages, sessionId);\n\n // Run observation — OM filters for unobserved messages and checks thresholds\n if (mastraMessages.length > 0) {\n await om.observe({\n threadId: sessionId,\n messages: mastraMessages,\n hooks: {\n onObservationStart: () => {\n void ctx.client.tui.showToast({\n body: {\n title: 'Mastra',\n message: 'Observing conversation...',\n variant: 'info',\n duration: 10000,\n },\n });\n },\n onObservationEnd: () => {\n void ctx.client.tui.showToast({\n body: {\n title: 'Mastra',\n message: 'Observation complete',\n variant: 'success',\n duration: 3000,\n },\n });\n },\n onReflectionStart: () => {\n void ctx.client.tui.showToast({\n body: {\n title: 'Mastra',\n message: 'Reflecting on observations...',\n variant: 'info',\n duration: 10000,\n },\n });\n },\n onReflectionEnd: () => {\n void ctx.client.tui.showToast({\n body: {\n title: 'Mastra',\n message: 'Reflection complete',\n variant: 'success',\n duration: 3000,\n },\n });\n },\n },\n });\n }\n\n // Discard already-observed messages — observations replace them\n const record = await om.getRecord(sessionId);\n if (record?.lastObservedAt) {\n const lastObservedAt = new Date(record.lastObservedAt);\n output.messages = output.messages.filter(({ info }) => {\n // opencode timestamps are already in milliseconds\n const msgTime = new Date(info.time.created);\n return msgTime > lastObservedAt;\n });\n }\n } catch (err) {\n void ctx.client.tui.showToast({\n body: {\n title: 'Mastra',\n message: `Observational Memory error: ${err instanceof Error ? err.message : String(err)}`,\n variant: 'error',\n duration: 5000,\n },\n });\n }\n },\n\n // Hook: Inject observations into the system prompt so the model has compressed context.\n 'experimental.chat.system.transform': async (input, output) => {\n const sessionId = input.sessionID;\n if (!sessionId) return;\n\n try {\n const observations = await om.getObservations(sessionId);\n if (!observations) return;\n\n const optimized = optimizeObservationsForContext(observations);\n output.system.push(\n `${OBSERVATION_CONTEXT_PROMPT}\\n\\n<observations>\\n${optimized}\\n</observations>\\n\\n${OBSERVATION_CONTEXT_INSTRUCTIONS}\\n\\n${OBSERVATION_CONTINUATION_HINT}`,\n );\n } catch {\n // Non-fatal — model proceeds without observations\n }\n },\n\n // Diagnostic tools for inspecting OM state\n tool: {\n memory_status: tool({\n description: 'Show Observational Memory progress — how close the session is to the next observation and reflection cycle.',\n args: {},\n async execute(_args, context) {\n const threadId = context.sessionID;\n const record = await om.getRecord(threadId);\n if (!record) {\n return 'No Observational Memory record found for this session.';\n }\n\n const omConfig = om.config;\n const obsThreshold = resolveThreshold(omConfig.observation.messageTokens);\n const refThreshold = resolveThreshold(omConfig.reflection.observationTokens);\n const obsTokens = record.observationTokenCount ?? 0;\n\n // Fetch live messages to compute unobserved token count\n const tokenCounter = new TokenCounter();\n let unobservedTokens = 0;\n try {\n const resp = await ctx.client.session.messages({ path: { id: threadId } });\n if (resp.data) {\n const allMastra = convertMessages(resp.data, threadId);\n const unobserved = record.lastObservedAt\n ? allMastra.filter(m => m.createdAt > new Date(record.lastObservedAt!))\n : allMastra;\n unobservedTokens = tokenCounter.countMessages(unobserved);\n }\n } catch {\n // Fall back to record's pending count\n unobservedTokens = record.pendingMessageTokens ?? 0;\n }\n\n const lines = [\n `Observational Memory`,\n `Scope: ${record.scope} | Generations: ${record.generationCount ?? 0}`,\n ``,\n `── Observation ──────────────────────────────`,\n `Unobserved: ${formatTokens(unobservedTokens)} / ${formatTokens(obsThreshold)} tokens`,\n progressBar(unobservedTokens, obsThreshold),\n ``,\n `── Reflection ──────────────────────────────`,\n `Observations: ${formatTokens(obsTokens)} / ${formatTokens(refThreshold)} tokens`,\n progressBar(obsTokens, refThreshold),\n ``,\n `── Status ──────────────────────────────────`,\n `Last observed: ${record.lastObservedAt ?? 'never'}`,\n `Observing: ${record.isObserving ? 'yes' : 'no'} | Reflecting: ${record.isReflecting ? 'yes' : 'no'}`,\n ];\n\n return lines.join('\\n');\n },\n }),\n\n memory_observations: tool({\n description: 'Show the current active observations stored in Observational Memory.',\n args: {},\n async execute(_args, context) {\n const threadId = context.sessionID;\n const observations = await om.getObservations(threadId);\n return observations ?? 'No observations stored yet.';\n },\n }),\n },\n };\n};\n"]}