opencode-mastra-om 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +21 -0
- package/README.md +44 -0
- package/dist/index.js +405 -0
- package/package.json +57 -0
package/LICENSE
ADDED
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
MIT License
|
|
2
|
+
|
|
3
|
+
Copyright (c) 2026 Rick Ross
|
|
4
|
+
|
|
5
|
+
Permission is hereby granted, free of charge, to any person obtaining a copy
|
|
6
|
+
of this software and associated documentation files (the "Software"), to deal
|
|
7
|
+
in the Software without restriction, including without limitation the rights
|
|
8
|
+
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
|
9
|
+
copies of the Software, and to permit persons to whom the Software is
|
|
10
|
+
furnished to do so, subject to the following conditions:
|
|
11
|
+
|
|
12
|
+
The above copyright notice and this permission notice shall be included in all
|
|
13
|
+
copies or substantial portions of the Software.
|
|
14
|
+
|
|
15
|
+
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
|
16
|
+
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
|
17
|
+
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
|
18
|
+
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
|
19
|
+
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
|
20
|
+
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
|
21
|
+
SOFTWARE.
|
package/README.md
ADDED
|
@@ -0,0 +1,44 @@
|
|
|
1
|
+
# opencode-mastra-om
|
|
2
|
+
|
|
3
|
+
Enhanced Mastra Observational Memory plugin for OpenCode.
|
|
4
|
+
|
|
5
|
+
## Improvements over `@mastra/opencode@0.0.20`
|
|
6
|
+
|
|
7
|
+
- `apiKey` in `mastra.json` bypasses env var resolution entirely
|
|
8
|
+
- `storageUrl` supports PostgreSQL in addition to SQLite
|
|
9
|
+
- `observationModel` / `reflectionModel` for separate model selection
|
|
10
|
+
- `logPath` for debug logging without `OM_DEBUG` env var
|
|
11
|
+
- Smarter credential resolution — handles multi-env-var providers (Google)
|
|
12
|
+
- Manual trigger tools: `om_observe`, `om_reflect`, `om_prune`
|
|
13
|
+
- `om_status`, `om_observations`, `om_config` diagnostic tools
|
|
14
|
+
|
|
15
|
+
## Installation
|
|
16
|
+
|
|
17
|
+
The plugin file lives at `src/mastra-om.ts` and is symlinked to `~/.config/opencode/plugin/mastra-om.ts`.
|
|
18
|
+
|
|
19
|
+
OpenCode loads it via `opencode.json`:
|
|
20
|
+
```json
|
|
21
|
+
{
|
|
22
|
+
"plugin": {
|
|
23
|
+
"mastra-om": {
|
|
24
|
+
"path": "~/.config/opencode/plugin/mastra-om.ts"
|
|
25
|
+
}
|
|
26
|
+
}
|
|
27
|
+
}
|
|
28
|
+
```
|
|
29
|
+
|
|
30
|
+
## Config (`<agent-dir>/.opencode/mastra.json`)
|
|
31
|
+
|
|
32
|
+
```json
|
|
33
|
+
{
|
|
34
|
+
"model": "google/gemini-2.5-flash",
|
|
35
|
+
"apiKey": "AIza...",
|
|
36
|
+
"observation": { "messageTokens": 10000 },
|
|
37
|
+
"reflection": { "observationTokens": 60000 },
|
|
38
|
+
"storagePath": ".opencode/memory/observations.db"
|
|
39
|
+
}
|
|
40
|
+
```
|
|
41
|
+
|
|
42
|
+
## Known Issues
|
|
43
|
+
|
|
44
|
+
- Reflection infinite retry loop when observations can't compress below threshold — tracked at [mastra-ai/mastra#14110](https://github.com/mastra-ai/mastra/issues/14110). Workaround: raise `reflection.observationTokens` threshold.
|
package/dist/index.js
ADDED
|
@@ -0,0 +1,405 @@
|
|
|
1
|
+
// src/index.ts
|
|
2
|
+
import { appendFileSync, mkdirSync } from "node:fs";
|
|
3
|
+
import { readFile, mkdir } from "node:fs/promises";
|
|
4
|
+
import { join, dirname } from "node:path";
|
|
5
|
+
import { LibSQLStore } from "@mastra/libsql";
|
|
6
|
+
import {
|
|
7
|
+
ObservationalMemory,
|
|
8
|
+
TokenCounter,
|
|
9
|
+
optimizeObservationsForContext,
|
|
10
|
+
OBSERVATION_CONTINUATION_HINT,
|
|
11
|
+
OBSERVATION_CONTEXT_PROMPT,
|
|
12
|
+
OBSERVATION_CONTEXT_INSTRUCTIONS
|
|
13
|
+
} from "@mastra/memory/processors";
|
|
14
|
+
import { tool } from "@opencode-ai/plugin";
|
|
15
|
+
var CONFIG_FILE = ".opencode/mastra.json";
|
|
16
|
+
var DEFAULT_STORAGE_PATH = ".opencode/memory/observations.db";
|
|
17
|
+
var PROVIDER_ENV_VARS = {
|
|
18
|
+
google: ["GOOGLE_GENERATIVE_AI_API_KEY", "GEMINI_API_KEY"],
|
|
19
|
+
anthropic: ["ANTHROPIC_API_KEY"],
|
|
20
|
+
openai: ["OPENAI_API_KEY"],
|
|
21
|
+
xai: ["XAI_API_KEY"],
|
|
22
|
+
groq: ["GROQ_API_KEY"],
|
|
23
|
+
mistral: ["MISTRAL_API_KEY"],
|
|
24
|
+
deepseek: ["DEEPSEEK_API_KEY"],
|
|
25
|
+
openrouter: ["OPENROUTER_API_KEY"],
|
|
26
|
+
fireworks: ["FIREWORKS_API_KEY"]
|
|
27
|
+
};
|
|
28
|
+
async function loadConfig(directory) {
|
|
29
|
+
try {
|
|
30
|
+
const configPath = join(directory, CONFIG_FILE);
|
|
31
|
+
const raw = await readFile(configPath, "utf-8");
|
|
32
|
+
return JSON.parse(raw);
|
|
33
|
+
} catch {
|
|
34
|
+
return {};
|
|
35
|
+
}
|
|
36
|
+
}
|
|
37
|
+
function convertMessages(messages, sessionId) {
|
|
38
|
+
return messages.map(({ info, parts }) => {
|
|
39
|
+
const convertedParts = parts.map((part) => {
|
|
40
|
+
const p = part;
|
|
41
|
+
const type = p.type;
|
|
42
|
+
if (type === "text" && p.text)
|
|
43
|
+
return { type: "text", text: p.text };
|
|
44
|
+
if (type === "tool-invocation")
|
|
45
|
+
return { type: "tool-invocation", toolInvocation: { toolCallId: p.toolCallId, toolName: p.toolName, args: p.args, result: p.result, state: p.state } };
|
|
46
|
+
if (type === "file")
|
|
47
|
+
return { type: "file", url: p.url, mediaType: p.mediaType };
|
|
48
|
+
if (type === "image")
|
|
49
|
+
return { type: "image", image: p.image };
|
|
50
|
+
if (type === "reasoning" && p.reasoning)
|
|
51
|
+
return { type: "reasoning", reasoning: p.reasoning };
|
|
52
|
+
if (type?.startsWith("data-om-"))
|
|
53
|
+
return null;
|
|
54
|
+
return null;
|
|
55
|
+
}).filter((p) => p !== null);
|
|
56
|
+
if (convertedParts.length === 0)
|
|
57
|
+
return null;
|
|
58
|
+
if (info.role !== "user" && info.role !== "assistant")
|
|
59
|
+
return null;
|
|
60
|
+
return {
|
|
61
|
+
id: info.id,
|
|
62
|
+
role: info.role,
|
|
63
|
+
createdAt: new Date(info.time.created),
|
|
64
|
+
threadId: sessionId,
|
|
65
|
+
resourceId: sessionId,
|
|
66
|
+
content: { format: 2, parts: convertedParts }
|
|
67
|
+
};
|
|
68
|
+
}).filter((m) => m !== null);
|
|
69
|
+
}
|
|
70
|
+
function progressBar(current, total, width = 20) {
|
|
71
|
+
const pct = total > 0 ? Math.min(current / total, 1) : 0;
|
|
72
|
+
const filled = Math.round(pct * width);
|
|
73
|
+
return `[${"█".repeat(filled)}${"░".repeat(width - filled)}] ${(pct * 100).toFixed(1)}%`;
|
|
74
|
+
}
|
|
75
|
+
function formatTokens(n) {
|
|
76
|
+
return n >= 1000 ? `${(n / 1000).toFixed(1)}k` : String(n);
|
|
77
|
+
}
|
|
78
|
+
function resolveThreshold(t) {
|
|
79
|
+
return typeof t === "number" ? t : t.max;
|
|
80
|
+
}
|
|
81
|
+
var MastraPlugin = async (ctx) => {
|
|
82
|
+
const config = await loadConfig(ctx.directory);
|
|
83
|
+
let logFile = null;
|
|
84
|
+
if (config.logPath) {
|
|
85
|
+
logFile = join(ctx.directory, config.logPath);
|
|
86
|
+
mkdirSync(dirname(logFile), { recursive: true });
|
|
87
|
+
} else if (process.env.OM_DEBUG) {
|
|
88
|
+
logFile = join(ctx.directory, ".opencode/memory/om.log");
|
|
89
|
+
mkdirSync(dirname(logFile), { recursive: true });
|
|
90
|
+
}
|
|
91
|
+
const omLog = (msg) => {
|
|
92
|
+
const line = `[${new Date().toISOString()}] ${msg}
|
|
93
|
+
`;
|
|
94
|
+
if (logFile) {
|
|
95
|
+
try {
|
|
96
|
+
appendFileSync(logFile, line);
|
|
97
|
+
} catch {}
|
|
98
|
+
}
|
|
99
|
+
};
|
|
100
|
+
omLog(`[init] mastra-om plugin starting, pid=${process.pid}`);
|
|
101
|
+
let lastError = null;
|
|
102
|
+
let credentialsReady = false;
|
|
103
|
+
const resolveCredentials = async () => {
|
|
104
|
+
if (credentialsReady)
|
|
105
|
+
return;
|
|
106
|
+
if (config.apiKey) {
|
|
107
|
+
const modelProvider = config.model?.split("/")[0];
|
|
108
|
+
if (modelProvider) {
|
|
109
|
+
const envVars = PROVIDER_ENV_VARS[modelProvider] ?? [`${modelProvider.toUpperCase()}_API_KEY`];
|
|
110
|
+
for (const envVar of envVars) {
|
|
111
|
+
process.env[envVar] = config.apiKey;
|
|
112
|
+
}
|
|
113
|
+
omLog(`[credentials] set ${envVars.join(", ")} from config.apiKey`);
|
|
114
|
+
}
|
|
115
|
+
}
|
|
116
|
+
try {
|
|
117
|
+
const providersResponse = await ctx.client.config.providers();
|
|
118
|
+
if (providersResponse.data) {
|
|
119
|
+
for (const provider of providersResponse.data.providers) {
|
|
120
|
+
const key = provider.key ?? provider.apiKey ?? provider.token;
|
|
121
|
+
if (key && provider.env) {
|
|
122
|
+
for (const envVar of provider.env) {
|
|
123
|
+
if (!process.env[envVar]) {
|
|
124
|
+
process.env[envVar] = key;
|
|
125
|
+
omLog(`[credentials] set ${envVar} from provider store`);
|
|
126
|
+
}
|
|
127
|
+
}
|
|
128
|
+
}
|
|
129
|
+
}
|
|
130
|
+
}
|
|
131
|
+
} catch (e) {
|
|
132
|
+
omLog(`[credentials] provider store unavailable: ${e}`);
|
|
133
|
+
}
|
|
134
|
+
credentialsReady = true;
|
|
135
|
+
omLog(`[credentials] resolved. GOOGLE_GENERATIVE_AI_API_KEY=${process.env.GOOGLE_GENERATIVE_AI_API_KEY ? "set" : "missing"}`);
|
|
136
|
+
};
|
|
137
|
+
let store;
|
|
138
|
+
if (config.storageUrl && (config.storageUrl.startsWith("postgresql://") || config.storageUrl.startsWith("postgres://"))) {
|
|
139
|
+
omLog(`[init] using PostgreSQL storage: ${config.storageUrl.replace(/:\/\/[^@]+@/, "://<redacted>@")}`);
|
|
140
|
+
const pgMod = await new Function('return import("@mastra/pg")')();
|
|
141
|
+
const PostgresStore = pgMod.PostgresStore;
|
|
142
|
+
store = new PostgresStore({ connectionString: config.storageUrl });
|
|
143
|
+
await store.init();
|
|
144
|
+
} else {
|
|
145
|
+
const url = config.storageUrl ?? `file:${join(ctx.directory, config.storagePath ?? DEFAULT_STORAGE_PATH)}`;
|
|
146
|
+
if (!config.storageUrl) {
|
|
147
|
+
const dbAbsolutePath = join(ctx.directory, config.storagePath ?? DEFAULT_STORAGE_PATH);
|
|
148
|
+
await mkdir(dirname(dbAbsolutePath), { recursive: true });
|
|
149
|
+
}
|
|
150
|
+
omLog(`[init] using SQLite/LibSQL storage: ${url}`);
|
|
151
|
+
store = new LibSQLStore({ id: "mastra-om", url });
|
|
152
|
+
await store.init();
|
|
153
|
+
}
|
|
154
|
+
const storage = await store.getStore("memory");
|
|
155
|
+
if (!storage)
|
|
156
|
+
throw new Error(`mastra-om: failed to initialize storage`);
|
|
157
|
+
const omOptions = {
|
|
158
|
+
storage,
|
|
159
|
+
scope: config.scope,
|
|
160
|
+
shareTokenBudget: config.shareTokenBudget,
|
|
161
|
+
observation: {
|
|
162
|
+
...config.observation,
|
|
163
|
+
...config.observationModel ? { model: config.observationModel } : {}
|
|
164
|
+
},
|
|
165
|
+
reflection: {
|
|
166
|
+
...config.reflection,
|
|
167
|
+
...config.reflectionModel ? { model: config.reflectionModel } : {}
|
|
168
|
+
}
|
|
169
|
+
};
|
|
170
|
+
if (config.model && !config.observationModel && !config.reflectionModel) {
|
|
171
|
+
omOptions.model = config.model;
|
|
172
|
+
}
|
|
173
|
+
const om = new ObservationalMemory(omOptions);
|
|
174
|
+
omLog(`[init] ObservationalMemory created, model=${config.model ?? "default"}`);
|
|
175
|
+
setTimeout(() => {
|
|
176
|
+
ctx.client.tui.showToast({
|
|
177
|
+
body: { title: "Mastra OM", message: "Observational Memory active", variant: "success", duration: 3000 }
|
|
178
|
+
});
|
|
179
|
+
}, 500);
|
|
180
|
+
const runObserve = async (sessionId, messages) => {
|
|
181
|
+
await om.observe({
|
|
182
|
+
threadId: sessionId,
|
|
183
|
+
messages,
|
|
184
|
+
hooks: {
|
|
185
|
+
onObservationStart: () => {
|
|
186
|
+
omLog(`[observe] starting observation`);
|
|
187
|
+
ctx.client.tui.showToast({ body: { title: "Mastra OM", message: "Observing...", variant: "info", duration: 1e4 } });
|
|
188
|
+
},
|
|
189
|
+
onObservationEnd: () => {
|
|
190
|
+
omLog(`[observe] observation complete`);
|
|
191
|
+
ctx.client.tui.showToast({ body: { title: "Mastra OM", message: "Observation complete", variant: "success", duration: 3000 } });
|
|
192
|
+
},
|
|
193
|
+
onReflectionStart: () => {
|
|
194
|
+
omLog(`[reflect] starting reflection`);
|
|
195
|
+
ctx.client.tui.showToast({ body: { title: "Mastra OM", message: "Reflecting...", variant: "info", duration: 1e4 } });
|
|
196
|
+
},
|
|
197
|
+
onReflectionEnd: () => {
|
|
198
|
+
omLog(`[reflect] reflection complete`);
|
|
199
|
+
ctx.client.tui.showToast({ body: { title: "Mastra OM", message: "Reflection complete", variant: "success", duration: 3000 } });
|
|
200
|
+
}
|
|
201
|
+
}
|
|
202
|
+
});
|
|
203
|
+
};
|
|
204
|
+
return {
|
|
205
|
+
event: async ({ event }) => {
|
|
206
|
+
if (event.type === "session.created") {
|
|
207
|
+
const sessionId = event.properties.info.id;
|
|
208
|
+
try {
|
|
209
|
+
await om.getOrCreateRecord(sessionId);
|
|
210
|
+
omLog(`[session] initialized record for ${sessionId}`);
|
|
211
|
+
} catch (err) {
|
|
212
|
+
const msg = err instanceof Error ? err.message : String(err);
|
|
213
|
+
omLog(`[session] failed to init record: ${msg}`);
|
|
214
|
+
}
|
|
215
|
+
}
|
|
216
|
+
},
|
|
217
|
+
"experimental.chat.messages.transform": async (_input, output) => {
|
|
218
|
+
const sessionId = output.messages[0]?.info.sessionID;
|
|
219
|
+
if (!sessionId)
|
|
220
|
+
return;
|
|
221
|
+
await resolveCredentials();
|
|
222
|
+
try {
|
|
223
|
+
const mastraMessages = convertMessages(output.messages, sessionId);
|
|
224
|
+
if (mastraMessages.length > 0) {
|
|
225
|
+
await runObserve(sessionId, mastraMessages);
|
|
226
|
+
}
|
|
227
|
+
const record = await om.getRecord(sessionId);
|
|
228
|
+
if (record?.lastObservedAt) {
|
|
229
|
+
const lastObservedAt = new Date(record.lastObservedAt);
|
|
230
|
+
output.messages = output.messages.filter(({ info }) => {
|
|
231
|
+
return new Date(info.time.created) > lastObservedAt;
|
|
232
|
+
});
|
|
233
|
+
}
|
|
234
|
+
lastError = null;
|
|
235
|
+
} catch (err) {
|
|
236
|
+
lastError = err instanceof Error ? err.message : String(err);
|
|
237
|
+
omLog(`[error] transform failed: ${lastError}`);
|
|
238
|
+
ctx.client.tui.showToast({
|
|
239
|
+
body: { title: "Mastra OM", message: `Error: ${lastError}`, variant: "error", duration: 5000 }
|
|
240
|
+
});
|
|
241
|
+
}
|
|
242
|
+
},
|
|
243
|
+
"experimental.chat.system.transform": async (input, output) => {
|
|
244
|
+
const sessionId = input.sessionID;
|
|
245
|
+
if (!sessionId)
|
|
246
|
+
return;
|
|
247
|
+
try {
|
|
248
|
+
const observations = await om.getObservations(sessionId);
|
|
249
|
+
if (!observations)
|
|
250
|
+
return;
|
|
251
|
+
const optimized = optimizeObservationsForContext(observations);
|
|
252
|
+
output.system.push(`${OBSERVATION_CONTEXT_PROMPT}
|
|
253
|
+
|
|
254
|
+
<observations>
|
|
255
|
+
${optimized}
|
|
256
|
+
</observations>
|
|
257
|
+
|
|
258
|
+
${OBSERVATION_CONTEXT_INSTRUCTIONS}
|
|
259
|
+
|
|
260
|
+
${OBSERVATION_CONTINUATION_HINT}`);
|
|
261
|
+
} catch {}
|
|
262
|
+
},
|
|
263
|
+
tool: {
|
|
264
|
+
om_status: tool({
|
|
265
|
+
description: "Show Observational Memory progress — how close the session is to the next observation and reflection cycle.",
|
|
266
|
+
args: {},
|
|
267
|
+
async execute(_args, context) {
|
|
268
|
+
const threadId = context.sessionID;
|
|
269
|
+
const record = await om.getRecord(threadId);
|
|
270
|
+
if (!record)
|
|
271
|
+
return "No Observational Memory record found for this session.";
|
|
272
|
+
const omConfig = om.config;
|
|
273
|
+
const obsThreshold = resolveThreshold(omConfig.observation.messageTokens);
|
|
274
|
+
const refThreshold = resolveThreshold(omConfig.reflection.observationTokens);
|
|
275
|
+
const obsTokens = record.observationTokenCount ?? 0;
|
|
276
|
+
const tokenCounter = new TokenCounter;
|
|
277
|
+
let unobservedTokens = 0;
|
|
278
|
+
try {
|
|
279
|
+
const resp = await ctx.client.session.messages({ path: { id: threadId } });
|
|
280
|
+
if (resp.data) {
|
|
281
|
+
const allMastra = convertMessages(resp.data, threadId);
|
|
282
|
+
const unobserved = record.lastObservedAt ? allMastra.filter((m) => m.createdAt > new Date(record.lastObservedAt)) : allMastra;
|
|
283
|
+
unobservedTokens = tokenCounter.countMessages(unobserved);
|
|
284
|
+
}
|
|
285
|
+
} catch {
|
|
286
|
+
unobservedTokens = record.pendingMessageTokens ?? 0;
|
|
287
|
+
}
|
|
288
|
+
const modelStr = config.observationModel ? `obs=${config.observationModel} ref=${config.reflectionModel ?? config.model ?? "default"}` : config.model ?? "default";
|
|
289
|
+
const lines = [
|
|
290
|
+
`Observational Memory`,
|
|
291
|
+
`Scope: ${record.scope} | Generations: ${record.generationCount ?? 0} | Model: ${modelStr}`,
|
|
292
|
+
``,
|
|
293
|
+
`── Observation ──────────────────────────────`,
|
|
294
|
+
`Unobserved: ${formatTokens(unobservedTokens)} / ${formatTokens(obsThreshold)} tokens`,
|
|
295
|
+
progressBar(unobservedTokens, obsThreshold),
|
|
296
|
+
``,
|
|
297
|
+
`── Reflection ──────────────────────────────`,
|
|
298
|
+
`Observations: ${formatTokens(obsTokens)} / ${formatTokens(refThreshold)} tokens`,
|
|
299
|
+
progressBar(obsTokens, refThreshold),
|
|
300
|
+
``,
|
|
301
|
+
`── Status ──────────────────────────────────`,
|
|
302
|
+
`Last observed: ${record.lastObservedAt ?? "never"}`,
|
|
303
|
+
`Observing: ${record.isObserving ? "yes" : "no"} | Reflecting: ${record.isReflecting ? "yes" : "no"}`,
|
|
304
|
+
`Credentials: ${credentialsReady ? "ready" : "pending"}`,
|
|
305
|
+
...lastError ? [`Last error: ${lastError}`] : []
|
|
306
|
+
];
|
|
307
|
+
return lines.join(`
|
|
308
|
+
`);
|
|
309
|
+
}
|
|
310
|
+
}),
|
|
311
|
+
om_observations: tool({
|
|
312
|
+
description: "Show the current active observations stored in Observational Memory.",
|
|
313
|
+
args: {},
|
|
314
|
+
async execute(_args, context) {
|
|
315
|
+
const threadId = context.sessionID;
|
|
316
|
+
const observations = await om.getObservations(threadId);
|
|
317
|
+
return observations ?? "No observations stored yet.";
|
|
318
|
+
}
|
|
319
|
+
}),
|
|
320
|
+
om_observe: tool({
|
|
321
|
+
description: "Manually trigger an observation cycle right now, without waiting for the token threshold.",
|
|
322
|
+
args: {},
|
|
323
|
+
async execute(_args, context) {
|
|
324
|
+
const threadId = context.sessionID;
|
|
325
|
+
await resolveCredentials();
|
|
326
|
+
try {
|
|
327
|
+
const resp = await ctx.client.session.messages({ path: { id: threadId } });
|
|
328
|
+
if (!resp.data || resp.data.length === 0)
|
|
329
|
+
return "No messages to observe.";
|
|
330
|
+
const mastraMessages = convertMessages(resp.data, threadId);
|
|
331
|
+
await runObserve(threadId, mastraMessages);
|
|
332
|
+
return "Observation cycle triggered. Check memory_status for results.";
|
|
333
|
+
} catch (err) {
|
|
334
|
+
const msg = err instanceof Error ? err.message : String(err);
|
|
335
|
+
lastError = msg;
|
|
336
|
+
return `Observation failed: ${msg}`;
|
|
337
|
+
}
|
|
338
|
+
}
|
|
339
|
+
}),
|
|
340
|
+
om_reflect: tool({
|
|
341
|
+
description: "Manually trigger a reflection cycle to condense accumulated observations.",
|
|
342
|
+
args: {},
|
|
343
|
+
async execute(_args, context) {
|
|
344
|
+
const threadId = context.sessionID;
|
|
345
|
+
await resolveCredentials();
|
|
346
|
+
try {
|
|
347
|
+
await om.reflect(threadId);
|
|
348
|
+
return "Reflection cycle triggered. Check memory_observations for results.";
|
|
349
|
+
} catch (err) {
|
|
350
|
+
const msg = err instanceof Error ? err.message : String(err);
|
|
351
|
+
lastError = msg;
|
|
352
|
+
return `Reflection failed: ${msg}`;
|
|
353
|
+
}
|
|
354
|
+
}
|
|
355
|
+
}),
|
|
356
|
+
om_prune: tool({
|
|
357
|
+
description: "Prune already-observed messages from storage to free space.",
|
|
358
|
+
args: {},
|
|
359
|
+
async execute(_args, context) {
|
|
360
|
+
const threadId = context.sessionID;
|
|
361
|
+
try {
|
|
362
|
+
const resp = await ctx.client.session.messages({ path: { id: threadId } });
|
|
363
|
+
if (!resp.data)
|
|
364
|
+
return "Could not load messages.";
|
|
365
|
+
const mastraMessages = convertMessages(resp.data, threadId);
|
|
366
|
+
const remaining = await om.pruneObserved({ threadId, messages: mastraMessages });
|
|
367
|
+
return `Pruned ${mastraMessages.length - remaining.length} observed messages, ${remaining.length} remaining.`;
|
|
368
|
+
} catch (err) {
|
|
369
|
+
const msg = err instanceof Error ? err.message : String(err);
|
|
370
|
+
return `Prune failed: ${msg}`;
|
|
371
|
+
}
|
|
372
|
+
}
|
|
373
|
+
}),
|
|
374
|
+
om_config: tool({
|
|
375
|
+
description: "Show the current Mastra Observational Memory configuration.",
|
|
376
|
+
args: {},
|
|
377
|
+
async execute() {
|
|
378
|
+
const omConfig = om.config;
|
|
379
|
+
const redactedConfig = {
|
|
380
|
+
...config,
|
|
381
|
+
apiKey: config.apiKey ? `${config.apiKey.slice(0, 8)}...` : undefined,
|
|
382
|
+
storageUrl: config.storageUrl ? config.storageUrl.replace(/:\/\/[^@]+@/, "://<redacted>@") : undefined
|
|
383
|
+
};
|
|
384
|
+
const lines = [
|
|
385
|
+
`── Config (mastra.json) ─────────────────────`,
|
|
386
|
+
JSON.stringify(redactedConfig, null, 2),
|
|
387
|
+
``,
|
|
388
|
+
`── Resolved OM Settings ─────────────────────`,
|
|
389
|
+
`Observation threshold: ${JSON.stringify(omConfig.observation.messageTokens)} tokens`,
|
|
390
|
+
`Reflection threshold: ${JSON.stringify(omConfig.reflection.observationTokens)} tokens`,
|
|
391
|
+
`Scope: ${omConfig.scope ?? "thread"}`,
|
|
392
|
+
`Storage: ${config.storageUrl ? config.storageUrl.replace(/:\/\/[^@]+@/, "://<redacted>@") : `file:${join(ctx.directory, config.storagePath ?? DEFAULT_STORAGE_PATH)}`}`,
|
|
393
|
+
`Credentials: ${credentialsReady ? "ready" : "pending"}`,
|
|
394
|
+
...lastError ? [`Last error: ${lastError}`] : []
|
|
395
|
+
];
|
|
396
|
+
return lines.join(`
|
|
397
|
+
`);
|
|
398
|
+
}
|
|
399
|
+
})
|
|
400
|
+
}
|
|
401
|
+
};
|
|
402
|
+
};
|
|
403
|
+
export {
|
|
404
|
+
MastraPlugin
|
|
405
|
+
};
|
package/package.json
ADDED
|
@@ -0,0 +1,57 @@
|
|
|
1
|
+
{
|
|
2
|
+
"$schema": "https://json.schemastore.org/package.json",
|
|
3
|
+
"name": "opencode-mastra-om",
|
|
4
|
+
"version": "0.1.0",
|
|
5
|
+
"type": "module",
|
|
6
|
+
"license": "MIT",
|
|
7
|
+
"description": "Enhanced Mastra Observational Memory plugin for OpenCode — persistent cross-session memory with observation, reflection, and manual trigger tools",
|
|
8
|
+
"keywords": [
|
|
9
|
+
"opencode",
|
|
10
|
+
"plugin",
|
|
11
|
+
"memory",
|
|
12
|
+
"mastra",
|
|
13
|
+
"observational-memory"
|
|
14
|
+
],
|
|
15
|
+
"author": "Rick Ross",
|
|
16
|
+
"repository": {
|
|
17
|
+
"type": "git",
|
|
18
|
+
"url": "https://github.com/activated-intelligence/opencode-mastra-om"
|
|
19
|
+
},
|
|
20
|
+
"main": "dist/index.js",
|
|
21
|
+
"exports": {
|
|
22
|
+
".": "./dist/index.js",
|
|
23
|
+
"./server": "./dist/index.js"
|
|
24
|
+
},
|
|
25
|
+
"files": [
|
|
26
|
+
"dist",
|
|
27
|
+
"README.md",
|
|
28
|
+
"LICENSE"
|
|
29
|
+
],
|
|
30
|
+
"opencode": {
|
|
31
|
+
"type": "plugin",
|
|
32
|
+
"hooks": [
|
|
33
|
+
"tool",
|
|
34
|
+
"experimental.chat.messages.transform",
|
|
35
|
+
"experimental.chat.system.transform",
|
|
36
|
+
"event"
|
|
37
|
+
]
|
|
38
|
+
},
|
|
39
|
+
"dependencies": {
|
|
40
|
+
"@mastra/core": "^1.21.0",
|
|
41
|
+
"@mastra/libsql": "^1.7.3",
|
|
42
|
+
"@mastra/memory": "^1.13.0",
|
|
43
|
+
"@opencode-ai/plugin": "^1.3.11",
|
|
44
|
+
"@opencode-ai/sdk": "^1.3.11"
|
|
45
|
+
},
|
|
46
|
+
"devDependencies": {
|
|
47
|
+
"@tsconfig/node22": "^22.0.0",
|
|
48
|
+
"@types/bun": "latest",
|
|
49
|
+
"@types/node": "^22.0.0",
|
|
50
|
+
"typescript": "^5.0.0"
|
|
51
|
+
},
|
|
52
|
+
"scripts": {
|
|
53
|
+
"build": "bun build ./src/index.ts --outdir ./dist --target node --external @opencode-ai/plugin --external @opencode-ai/sdk --external @mastra/core --external @mastra/libsql --external @mastra/memory",
|
|
54
|
+
"typecheck": "tsc --noEmit",
|
|
55
|
+
"prepublishOnly": "bun run build"
|
|
56
|
+
}
|
|
57
|
+
}
|