opencode-mem 2.4.0 → 2.5.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +15 -19
- package/dist/config.d.ts +4 -0
- package/dist/config.d.ts.map +1 -1
- package/dist/config.js +32 -2
- package/dist/index.js +2 -2
- package/dist/services/auto-capture.d.ts.map +1 -1
- package/dist/services/auto-capture.js +34 -41
- package/dist/services/language-detector.d.ts +3 -0
- package/dist/services/language-detector.d.ts.map +1 -0
- package/dist/services/language-detector.js +16 -0
- package/dist/services/secret-resolver.d.ts +2 -0
- package/dist/services/secret-resolver.d.ts.map +1 -0
- package/dist/services/secret-resolver.js +55 -0
- package/dist/services/user-memory-learning.d.ts.map +1 -1
- package/dist/services/user-memory-learning.js +25 -21
- package/package.json +3 -1
package/README.md
CHANGED
|
@@ -55,35 +55,31 @@ Configure at `~/.config/opencode/opencode-mem.jsonc`:
|
|
|
55
55
|
"embeddingModel": "Xenova/nomic-embed-text-v1",
|
|
56
56
|
"webServerEnabled": true,
|
|
57
57
|
"webServerPort": 4747,
|
|
58
|
+
|
|
58
59
|
"autoCaptureEnabled": true,
|
|
60
|
+
"autoCaptureLanguage": "auto",
|
|
59
61
|
"memoryProvider": "openai-chat",
|
|
60
|
-
"memoryModel": "gpt-
|
|
62
|
+
"memoryModel": "gpt-4o-mini",
|
|
63
|
+
"memoryApiUrl": "https://api.openai.com/v1",
|
|
64
|
+
"memoryApiKey": "sk-...",
|
|
65
|
+
|
|
66
|
+
"showAutoCaptureToasts": true,
|
|
67
|
+
"showUserProfileToasts": true,
|
|
68
|
+
"showErrorToasts": true,
|
|
69
|
+
|
|
61
70
|
"userProfileAnalysisInterval": 10,
|
|
62
71
|
"maxMemories": 10
|
|
63
72
|
}
|
|
64
73
|
```
|
|
65
74
|
|
|
66
|
-
|
|
67
|
-
|
|
68
|
-
## Important: v2.3 Breaking Changes
|
|
69
|
-
|
|
70
|
-
User-scoped memories removed. All memories now project-scoped. Update configuration:
|
|
71
|
-
|
|
75
|
+
**API Key Formats:**
|
|
72
76
|
```jsonc
|
|
73
|
-
|
|
74
|
-
|
|
75
|
-
|
|
76
|
-
"maxProjectMemories": 10
|
|
77
|
-
}
|
|
78
|
-
|
|
79
|
-
// NEW: Use only
|
|
80
|
-
{
|
|
81
|
-
"userProfileAnalysisInterval": 10,
|
|
82
|
-
"maxMemories": 10
|
|
83
|
-
}
|
|
77
|
+
"memoryApiKey": "sk-..."
|
|
78
|
+
"memoryApiKey": "file://~/.config/opencode/api-key.txt"
|
|
79
|
+
"memoryApiKey": "env://OPENAI_API_KEY"
|
|
84
80
|
```
|
|
85
81
|
|
|
86
|
-
|
|
82
|
+
Full documentation available in our [Configuration Guide](https://github.com/tickernelz/opencode-mem/wiki/Configuration-Guide).
|
|
87
83
|
|
|
88
84
|
## Documentation
|
|
89
85
|
|
package/dist/config.d.ts
CHANGED
|
@@ -13,6 +13,7 @@ export declare const CONFIG: {
|
|
|
13
13
|
autoCaptureEnabled: boolean;
|
|
14
14
|
autoCaptureMaxIterations: number;
|
|
15
15
|
autoCaptureIterationTimeout: number;
|
|
16
|
+
autoCaptureLanguage: string | undefined;
|
|
16
17
|
memoryProvider: "openai-chat" | "openai-responses" | "anthropic";
|
|
17
18
|
memoryModel: string | undefined;
|
|
18
19
|
memoryApiUrl: string | undefined;
|
|
@@ -32,6 +33,9 @@ export declare const CONFIG: {
|
|
|
32
33
|
userProfileMaxWorkflows: number;
|
|
33
34
|
userProfileConfidenceDecayDays: number;
|
|
34
35
|
userProfileChangelogRetentionCount: number;
|
|
36
|
+
showAutoCaptureToasts: boolean;
|
|
37
|
+
showUserProfileToasts: boolean;
|
|
38
|
+
showErrorToasts: boolean;
|
|
35
39
|
};
|
|
36
40
|
export declare function isConfigured(): boolean;
|
|
37
41
|
//# sourceMappingURL=config.d.ts.map
|
package/dist/config.d.ts.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"config.d.ts","sourceRoot":"","sources":["../src/config.ts"],"names":[],"mappings":"
|
|
1
|
+
{"version":3,"file":"config.d.ts","sourceRoot":"","sources":["../src/config.ts"],"names":[],"mappings":"AAuXA,eAAO,MAAM,MAAM;;;;;;;;;;;;;;;;oBAyBb,aAAa,GACb,kBAAkB,GAClB,WAAW;;;;;;;;;;;;;;;;;;;;;;CA4BhB,CAAC;AAEF,wBAAgB,YAAY,IAAI,OAAO,CAEtC"}
|
package/dist/config.js
CHANGED
|
@@ -2,6 +2,7 @@ import { existsSync, readFileSync, mkdirSync, writeFileSync } from "node:fs";
|
|
|
2
2
|
import { join } from "node:path";
|
|
3
3
|
import { homedir } from "node:os";
|
|
4
4
|
import { stripJsoncComments } from "./services/jsonc.js";
|
|
5
|
+
import { resolveSecretValue } from "./services/secret-resolver.js";
|
|
5
6
|
const CONFIG_DIR = join(homedir(), ".config", "opencode");
|
|
6
7
|
const DATA_DIR = join(homedir(), ".opencode-mem");
|
|
7
8
|
const CONFIG_FILES = [
|
|
@@ -41,6 +42,9 @@ const DEFAULTS = {
|
|
|
41
42
|
userProfileMaxWorkflows: 10,
|
|
42
43
|
userProfileConfidenceDecayDays: 30,
|
|
43
44
|
userProfileChangelogRetentionCount: 5,
|
|
45
|
+
showAutoCaptureToasts: true,
|
|
46
|
+
showUserProfileToasts: true,
|
|
47
|
+
showErrorToasts: true,
|
|
44
48
|
};
|
|
45
49
|
function expandPath(path) {
|
|
46
50
|
if (path.startsWith("~/")) {
|
|
@@ -158,6 +162,11 @@ const CONFIG_TEMPLATE = `{
|
|
|
158
162
|
"memoryModel": "gpt-4o-mini",
|
|
159
163
|
"memoryApiUrl": "https://api.openai.com/v1",
|
|
160
164
|
"memoryApiKey": "sk-...",
|
|
165
|
+
|
|
166
|
+
// API Key Formats:
|
|
167
|
+
// Direct value: "sk-..."
|
|
168
|
+
// From file: "file://~/.config/litellm-key.txt"
|
|
169
|
+
// From env variable: "env://LITELLM_API_KEY"
|
|
161
170
|
|
|
162
171
|
// Examples for different providers:
|
|
163
172
|
// OpenAI Chat Completion (default, backward compatible):
|
|
@@ -193,6 +202,23 @@ const CONFIG_TEMPLATE = `{
|
|
|
193
202
|
// Days to keep AI session history before cleanup
|
|
194
203
|
"aiSessionRetentionDays": 7,
|
|
195
204
|
|
|
205
|
+
// Language for auto-capture summaries (default: "auto" for auto-detection)
|
|
206
|
+
// Options: "auto", "en", "id", "zh", "ja", "es", "fr", "de", "ru", "pt", "ar", "ko"
|
|
207
|
+
// "autoCaptureLanguage": "auto",
|
|
208
|
+
|
|
209
|
+
// ============================================
|
|
210
|
+
// Toast Notifications
|
|
211
|
+
// ============================================
|
|
212
|
+
|
|
213
|
+
// Show toast when memory is auto-captured
|
|
214
|
+
"showAutoCaptureToasts": true,
|
|
215
|
+
|
|
216
|
+
// Show toast when user profile is updated
|
|
217
|
+
"showUserProfileToasts": true,
|
|
218
|
+
|
|
219
|
+
// Show toast for error messages
|
|
220
|
+
"showErrorToasts": true,
|
|
221
|
+
|
|
196
222
|
// ============================================
|
|
197
223
|
// User Profile System
|
|
198
224
|
// ============================================
|
|
@@ -285,7 +311,7 @@ export const CONFIG = {
|
|
|
285
311
|
getEmbeddingDimensions(fileConfig.embeddingModel ?? DEFAULTS.embeddingModel),
|
|
286
312
|
embeddingApiUrl: fileConfig.embeddingApiUrl,
|
|
287
313
|
embeddingApiKey: fileConfig.embeddingApiUrl
|
|
288
|
-
? (fileConfig.embeddingApiKey ?? process.env.OPENAI_API_KEY)
|
|
314
|
+
? resolveSecretValue(fileConfig.embeddingApiKey ?? process.env.OPENAI_API_KEY)
|
|
289
315
|
: undefined,
|
|
290
316
|
similarityThreshold: fileConfig.similarityThreshold ?? DEFAULTS.similarityThreshold,
|
|
291
317
|
maxMemories: fileConfig.maxMemories ?? DEFAULTS.maxMemories,
|
|
@@ -295,10 +321,11 @@ export const CONFIG = {
|
|
|
295
321
|
autoCaptureEnabled: fileConfig.autoCaptureEnabled ?? DEFAULTS.autoCaptureEnabled,
|
|
296
322
|
autoCaptureMaxIterations: fileConfig.autoCaptureMaxIterations ?? DEFAULTS.autoCaptureMaxIterations,
|
|
297
323
|
autoCaptureIterationTimeout: fileConfig.autoCaptureIterationTimeout ?? DEFAULTS.autoCaptureIterationTimeout,
|
|
324
|
+
autoCaptureLanguage: fileConfig.autoCaptureLanguage,
|
|
298
325
|
memoryProvider: (fileConfig.memoryProvider ?? "openai-chat"),
|
|
299
326
|
memoryModel: fileConfig.memoryModel,
|
|
300
327
|
memoryApiUrl: fileConfig.memoryApiUrl,
|
|
301
|
-
memoryApiKey: fileConfig.memoryApiKey,
|
|
328
|
+
memoryApiKey: resolveSecretValue(fileConfig.memoryApiKey),
|
|
302
329
|
aiSessionRetentionDays: fileConfig.aiSessionRetentionDays ?? DEFAULTS.aiSessionRetentionDays,
|
|
303
330
|
webServerEnabled: fileConfig.webServerEnabled ?? DEFAULTS.webServerEnabled,
|
|
304
331
|
webServerPort: fileConfig.webServerPort ?? DEFAULTS.webServerPort,
|
|
@@ -314,6 +341,9 @@ export const CONFIG = {
|
|
|
314
341
|
userProfileMaxWorkflows: fileConfig.userProfileMaxWorkflows ?? DEFAULTS.userProfileMaxWorkflows,
|
|
315
342
|
userProfileConfidenceDecayDays: fileConfig.userProfileConfidenceDecayDays ?? DEFAULTS.userProfileConfidenceDecayDays,
|
|
316
343
|
userProfileChangelogRetentionCount: fileConfig.userProfileChangelogRetentionCount ?? DEFAULTS.userProfileChangelogRetentionCount,
|
|
344
|
+
showAutoCaptureToasts: fileConfig.showAutoCaptureToasts ?? DEFAULTS.showAutoCaptureToasts,
|
|
345
|
+
showUserProfileToasts: fileConfig.showUserProfileToasts ?? DEFAULTS.showUserProfileToasts,
|
|
346
|
+
showErrorToasts: fileConfig.showErrorToasts ?? DEFAULTS.showErrorToasts,
|
|
317
347
|
};
|
|
318
348
|
export function isConfigured() {
|
|
319
349
|
return true;
|
package/dist/index.js
CHANGED
|
@@ -148,7 +148,7 @@ export const OpenCodeMemPlugin = async (ctx) => {
|
|
|
148
148
|
}
|
|
149
149
|
catch (error) {
|
|
150
150
|
log("chat.message: ERROR", { error: String(error) });
|
|
151
|
-
if (ctx.client?.tui) {
|
|
151
|
+
if (ctx.client?.tui && CONFIG.showErrorToasts) {
|
|
152
152
|
await ctx.client.tui
|
|
153
153
|
.showToast({
|
|
154
154
|
body: {
|
|
@@ -392,7 +392,7 @@ export const OpenCodeMemPlugin = async (ctx) => {
|
|
|
392
392
|
})
|
|
393
393
|
.catch((err) => {
|
|
394
394
|
log("Auto-cleanup failed", { error: String(err) });
|
|
395
|
-
if (ctx.client?.tui) {
|
|
395
|
+
if (ctx.client?.tui && CONFIG.showErrorToasts) {
|
|
396
396
|
ctx.client.tui
|
|
397
397
|
.showToast({
|
|
398
398
|
body: {
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"auto-capture.d.ts","sourceRoot":"","sources":["../../src/services/auto-capture.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,WAAW,EAAE,MAAM,qBAAqB,CAAC;AAcvD,wBAAsB,kBAAkB,CACtC,GAAG,EAAE,WAAW,EAChB,SAAS,EAAE,MAAM,EACjB,SAAS,EAAE,MAAM,GAChB,OAAO,CAAC,IAAI,CAAC,
|
|
1
|
+
{"version":3,"file":"auto-capture.d.ts","sourceRoot":"","sources":["../../src/services/auto-capture.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,WAAW,EAAE,MAAM,qBAAqB,CAAC;AAcvD,wBAAsB,kBAAkB,CACtC,GAAG,EAAE,WAAW,EAChB,SAAS,EAAE,MAAM,EACjB,SAAS,EAAE,MAAM,GAChB,OAAO,CAAC,IAAI,CAAC,CAoGf"}
|
|
@@ -37,7 +37,7 @@ export async function performAutoCapture(ctx, sessionID, directory) {
|
|
|
37
37
|
const tags = getTags(directory);
|
|
38
38
|
const latestMemory = await getLatestProjectMemory(tags.project.tag);
|
|
39
39
|
const context = buildMarkdownContext(prompt.content, textResponses, toolCalls, latestMemory);
|
|
40
|
-
const summaryResult = await generateSummary(context, sessionID);
|
|
40
|
+
const summaryResult = await generateSummary(context, sessionID, prompt.content);
|
|
41
41
|
if (!summaryResult || summaryResult.type === "skip") {
|
|
42
42
|
log("Auto-capture: skipped non-technical conversation", { sessionID });
|
|
43
43
|
userPromptManager.deletePrompt(prompt.id);
|
|
@@ -59,31 +59,35 @@ export async function performAutoCapture(ctx, sessionID, directory) {
|
|
|
59
59
|
if (result.success) {
|
|
60
60
|
userPromptManager.linkMemoryToPrompt(prompt.id, result.id);
|
|
61
61
|
userPromptManager.markAsCaptured(prompt.id);
|
|
62
|
+
if (CONFIG.showAutoCaptureToasts) {
|
|
63
|
+
await ctx.client?.tui
|
|
64
|
+
.showToast({
|
|
65
|
+
body: {
|
|
66
|
+
title: "Memory Captured",
|
|
67
|
+
message: "Project memory saved from conversation",
|
|
68
|
+
variant: "success",
|
|
69
|
+
duration: 3000,
|
|
70
|
+
},
|
|
71
|
+
})
|
|
72
|
+
.catch(() => { });
|
|
73
|
+
}
|
|
74
|
+
}
|
|
75
|
+
}
|
|
76
|
+
catch (error) {
|
|
77
|
+
log("Auto-capture error", { sessionID, error: String(error) });
|
|
78
|
+
if (CONFIG.showErrorToasts) {
|
|
62
79
|
await ctx.client?.tui
|
|
63
80
|
.showToast({
|
|
64
81
|
body: {
|
|
65
|
-
title: "
|
|
66
|
-
message:
|
|
67
|
-
variant: "
|
|
68
|
-
duration:
|
|
82
|
+
title: "Auto-Capture Failed",
|
|
83
|
+
message: String(error),
|
|
84
|
+
variant: "error",
|
|
85
|
+
duration: 5000,
|
|
69
86
|
},
|
|
70
87
|
})
|
|
71
88
|
.catch(() => { });
|
|
72
89
|
}
|
|
73
90
|
}
|
|
74
|
-
catch (error) {
|
|
75
|
-
log("Auto-capture error", { sessionID, error: String(error) });
|
|
76
|
-
await ctx.client?.tui
|
|
77
|
-
.showToast({
|
|
78
|
-
body: {
|
|
79
|
-
title: "Auto-Capture Failed",
|
|
80
|
-
message: String(error),
|
|
81
|
-
variant: "error",
|
|
82
|
-
duration: 5000,
|
|
83
|
-
},
|
|
84
|
-
})
|
|
85
|
-
.catch(() => { });
|
|
86
|
-
}
|
|
87
91
|
}
|
|
88
92
|
function extractAIContent(messages) {
|
|
89
93
|
const textResponses = [];
|
|
@@ -180,11 +184,12 @@ function buildMarkdownContext(userPrompt, textResponses, toolCalls, latestMemory
|
|
|
180
184
|
}
|
|
181
185
|
return sections.join("\n");
|
|
182
186
|
}
|
|
183
|
-
async function generateSummary(context, sessionID) {
|
|
187
|
+
async function generateSummary(context, sessionID, userPrompt) {
|
|
184
188
|
if (!CONFIG.memoryModel || !CONFIG.memoryApiUrl) {
|
|
185
189
|
throw new Error("External API not configured for auto-capture");
|
|
186
190
|
}
|
|
187
191
|
const { AIProviderFactory } = await import("./ai/ai-provider-factory.js");
|
|
192
|
+
const { detectLanguage, getLanguageName } = await import("./language-detector.js");
|
|
188
193
|
const providerConfig = {
|
|
189
194
|
model: CONFIG.memoryModel,
|
|
190
195
|
apiUrl: CONFIG.memoryApiUrl,
|
|
@@ -193,6 +198,10 @@ async function generateSummary(context, sessionID) {
|
|
|
193
198
|
iterationTimeout: CONFIG.autoCaptureIterationTimeout,
|
|
194
199
|
};
|
|
195
200
|
const provider = AIProviderFactory.createProvider(CONFIG.memoryProvider, providerConfig);
|
|
201
|
+
const targetLang = CONFIG.autoCaptureLanguage === "auto" || !CONFIG.autoCaptureLanguage
|
|
202
|
+
? detectLanguage(userPrompt)
|
|
203
|
+
: CONFIG.autoCaptureLanguage;
|
|
204
|
+
const langName = getLanguageName(targetLang);
|
|
196
205
|
const systemPrompt = `You are a technical memory recorder for a software development project.
|
|
197
206
|
|
|
198
207
|
RULES:
|
|
@@ -200,34 +209,18 @@ RULES:
|
|
|
200
209
|
2. SKIP non-technical by returning type="skip"
|
|
201
210
|
3. NO meta-commentary or behavior analysis
|
|
202
211
|
4. Include specific file names, functions, technical details
|
|
203
|
-
5.
|
|
212
|
+
5. You MUST write the summary in ${langName}.
|
|
204
213
|
|
|
205
214
|
FORMAT:
|
|
206
215
|
## Request
|
|
207
|
-
[1-2 sentences: what was requested, in
|
|
216
|
+
[1-2 sentences: what was requested, in ${langName}]
|
|
208
217
|
|
|
209
218
|
## Outcome
|
|
210
|
-
[1-2 sentences: what was done, include files/functions, in
|
|
219
|
+
[1-2 sentences: what was done, include files/functions, in ${langName}]
|
|
211
220
|
|
|
212
221
|
SKIP if: greetings, casual chat, no code/decisions made
|
|
213
|
-
CAPTURE if: code changed, bug fixed, feature added, decision made
|
|
214
|
-
|
|
215
|
-
EXAMPLES:
|
|
216
|
-
Technical (English) → type="feature":
|
|
217
|
-
## Request
|
|
218
|
-
Fix function returning null.
|
|
219
|
-
## Outcome
|
|
220
|
-
Changed searchMemories() to listMemories() in auto-capture.ts:166.
|
|
221
|
-
|
|
222
|
-
Technical (Indonesian) → type="feature":
|
|
223
|
-
## Request
|
|
224
|
-
Perbaiki fungsi yang mengembalikan null.
|
|
225
|
-
## Outcome
|
|
226
|
-
Mengubah searchMemories() menjadi listMemories() di auto-capture.ts:166.
|
|
227
|
-
|
|
228
|
-
Non-technical → type="skip", summary="":
|
|
229
|
-
User greeted, AI introduced capabilities.`;
|
|
230
|
-
const userPrompt = `${context}
|
|
222
|
+
CAPTURE if: code changed, bug fixed, feature added, decision made`;
|
|
223
|
+
const aiPrompt = `${context}
|
|
231
224
|
|
|
232
225
|
Analyze this conversation. If it contains technical work (code, bugs, features, decisions), create a concise summary. If it's non-technical (greetings, casual chat, incomplete requests), return type="skip" with empty summary.`;
|
|
233
226
|
const toolSchema = {
|
|
@@ -251,7 +244,7 @@ Analyze this conversation. If it contains technical work (code, bugs, features,
|
|
|
251
244
|
},
|
|
252
245
|
},
|
|
253
246
|
};
|
|
254
|
-
const result = await provider.executeToolCall(systemPrompt,
|
|
247
|
+
const result = await provider.executeToolCall(systemPrompt, aiPrompt, toolSchema, sessionID);
|
|
255
248
|
if (!result.success || !result.data) {
|
|
256
249
|
throw new Error(result.error || "Failed to generate summary");
|
|
257
250
|
}
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"language-detector.d.ts","sourceRoot":"","sources":["../../src/services/language-detector.ts"],"names":[],"mappings":"AAGA,wBAAgB,cAAc,CAAC,IAAI,EAAE,MAAM,GAAG,MAAM,CAYnD;AAED,wBAAgB,eAAe,CAAC,IAAI,EAAE,MAAM,GAAG,MAAM,CAGpD"}
|
|
@@ -0,0 +1,16 @@
|
|
|
1
|
+
import { franc } from "franc-min";
|
|
2
|
+
import { iso6393, iso6393To1 } from "iso-639-3";
|
|
3
|
+
export function detectLanguage(text) {
|
|
4
|
+
if (!text || text.trim().length === 0) {
|
|
5
|
+
return "en";
|
|
6
|
+
}
|
|
7
|
+
const detected = franc(text, { minLength: 10 });
|
|
8
|
+
if (detected === "und") {
|
|
9
|
+
return "en";
|
|
10
|
+
}
|
|
11
|
+
return iso6393To1[detected] || "en";
|
|
12
|
+
}
|
|
13
|
+
export function getLanguageName(code) {
|
|
14
|
+
const lang = iso6393.find((l) => l.iso6391 === code);
|
|
15
|
+
return lang?.name || "English";
|
|
16
|
+
}
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"secret-resolver.d.ts","sourceRoot":"","sources":["../../src/services/secret-resolver.ts"],"names":[],"mappings":"AAiCA,wBAAgB,kBAAkB,CAAC,KAAK,EAAE,MAAM,GAAG,SAAS,GAAG,MAAM,GAAG,SAAS,CAkChF"}
|
|
@@ -0,0 +1,55 @@
|
|
|
1
|
+
import { existsSync, readFileSync, statSync } from "node:fs";
|
|
2
|
+
import { join } from "node:path";
|
|
3
|
+
import { homedir, platform } from "node:os";
|
|
4
|
+
function expandPath(path) {
|
|
5
|
+
if (path.startsWith("~/")) {
|
|
6
|
+
return join(homedir(), path.slice(2));
|
|
7
|
+
}
|
|
8
|
+
if (path === "~") {
|
|
9
|
+
return homedir();
|
|
10
|
+
}
|
|
11
|
+
return path;
|
|
12
|
+
}
|
|
13
|
+
function checkFilePermissions(filePath) {
|
|
14
|
+
if (platform() === "win32") {
|
|
15
|
+
return;
|
|
16
|
+
}
|
|
17
|
+
try {
|
|
18
|
+
const stats = statSync(filePath);
|
|
19
|
+
const mode = stats.mode & 0o777;
|
|
20
|
+
if (mode > 0o600) {
|
|
21
|
+
console.warn(`Warning: Secret file ${filePath} has permissive permissions (${mode.toString(8)}). Recommend chmod 600.`);
|
|
22
|
+
}
|
|
23
|
+
}
|
|
24
|
+
catch (error) {
|
|
25
|
+
console.warn(`Warning: Could not check file permissions for ${filePath}`);
|
|
26
|
+
}
|
|
27
|
+
}
|
|
28
|
+
export function resolveSecretValue(value) {
|
|
29
|
+
if (!value) {
|
|
30
|
+
return undefined;
|
|
31
|
+
}
|
|
32
|
+
if (value.startsWith("file://")) {
|
|
33
|
+
const filePath = expandPath(value.slice(7));
|
|
34
|
+
if (!existsSync(filePath)) {
|
|
35
|
+
throw new Error(`Secret file not found: ${filePath}`);
|
|
36
|
+
}
|
|
37
|
+
try {
|
|
38
|
+
checkFilePermissions(filePath);
|
|
39
|
+
const content = readFileSync(filePath, "utf-8");
|
|
40
|
+
return content.trim();
|
|
41
|
+
}
|
|
42
|
+
catch (error) {
|
|
43
|
+
throw new Error(`Failed to read secret file ${filePath}: ${error}`);
|
|
44
|
+
}
|
|
45
|
+
}
|
|
46
|
+
if (value.startsWith("env://")) {
|
|
47
|
+
const envVar = value.slice(6);
|
|
48
|
+
const envValue = process.env[envVar];
|
|
49
|
+
if (!envValue) {
|
|
50
|
+
throw new Error(`Environment variable not found: ${envVar}`);
|
|
51
|
+
}
|
|
52
|
+
return envValue;
|
|
53
|
+
}
|
|
54
|
+
return value;
|
|
55
|
+
}
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"user-memory-learning.d.ts","sourceRoot":"","sources":["../../src/services/user-memory-learning.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,WAAW,EAAE,MAAM,qBAAqB,CAAC;AASvD,wBAAsB,0BAA0B,CAC9C,GAAG,EAAE,WAAW,EAChB,SAAS,EAAE,MAAM,GAChB,OAAO,CAAC,IAAI,CAAC,
|
|
1
|
+
{"version":3,"file":"user-memory-learning.d.ts","sourceRoot":"","sources":["../../src/services/user-memory-learning.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,WAAW,EAAE,MAAM,qBAAqB,CAAC;AASvD,wBAAsB,0BAA0B,CAC9C,GAAG,EAAE,WAAW,EAChB,SAAS,EAAE,MAAM,GAChB,OAAO,CAAC,IAAI,CAAC,CAuFf"}
|
|
@@ -32,16 +32,18 @@ export async function performUserProfileLearning(ctx, directory) {
|
|
|
32
32
|
userProfileManager.createProfile(userId, tags.user.displayName || "Unknown", tags.user.userName || "unknown", tags.user.userEmail || "unknown", updatedProfileData, prompts.length);
|
|
33
33
|
}
|
|
34
34
|
userPromptManager.markMultipleAsUserLearningCaptured(prompts.map((p) => p.id));
|
|
35
|
-
|
|
36
|
-
.
|
|
37
|
-
|
|
38
|
-
|
|
39
|
-
|
|
40
|
-
|
|
41
|
-
|
|
42
|
-
|
|
43
|
-
|
|
44
|
-
|
|
35
|
+
if (CONFIG.showUserProfileToasts) {
|
|
36
|
+
await ctx.client?.tui
|
|
37
|
+
.showToast({
|
|
38
|
+
body: {
|
|
39
|
+
title: "User Profile Updated",
|
|
40
|
+
message: `Analyzed ${prompts.length} prompts and updated your profile`,
|
|
41
|
+
variant: "success",
|
|
42
|
+
duration: 3000,
|
|
43
|
+
},
|
|
44
|
+
})
|
|
45
|
+
.catch(() => { });
|
|
46
|
+
}
|
|
45
47
|
}
|
|
46
48
|
catch (error) {
|
|
47
49
|
const errorStack = error instanceof Error ? error.stack : undefined;
|
|
@@ -50,16 +52,18 @@ export async function performUserProfileLearning(ctx, directory) {
|
|
|
50
52
|
stack: errorStack,
|
|
51
53
|
errorType: error instanceof Error ? error.constructor.name : typeof error,
|
|
52
54
|
});
|
|
53
|
-
|
|
54
|
-
.
|
|
55
|
-
|
|
56
|
-
|
|
57
|
-
|
|
58
|
-
|
|
59
|
-
|
|
60
|
-
|
|
61
|
-
|
|
62
|
-
|
|
55
|
+
if (CONFIG.showErrorToasts) {
|
|
56
|
+
await ctx.client?.tui
|
|
57
|
+
.showToast({
|
|
58
|
+
body: {
|
|
59
|
+
title: "User Profile Update Failed",
|
|
60
|
+
message: String(error),
|
|
61
|
+
variant: "error",
|
|
62
|
+
duration: 5000,
|
|
63
|
+
},
|
|
64
|
+
})
|
|
65
|
+
.catch(() => { });
|
|
66
|
+
}
|
|
63
67
|
}
|
|
64
68
|
}
|
|
65
69
|
function generateChangeSummary(oldProfile, newProfile) {
|
|
@@ -136,7 +140,7 @@ async function analyzeUserProfile(context, existingProfile) {
|
|
|
136
140
|
|
|
137
141
|
Your task is to analyze user prompts and ${existingProfile ? "update" : "create"} a comprehensive user profile.
|
|
138
142
|
|
|
139
|
-
CRITICAL: Detect the language used by the user. You MUST output all descriptions and text in the SAME language as the user's prompts.
|
|
143
|
+
CRITICAL: Detect the language used by the user in their prompts. You MUST output all descriptions, categories, and text in the SAME language as the user's prompts.
|
|
140
144
|
|
|
141
145
|
Use the update_user_profile tool to save the ${existingProfile ? "updated" : "new"} profile.`;
|
|
142
146
|
const toolSchema = {
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "opencode-mem",
|
|
3
|
-
"version": "2.
|
|
3
|
+
"version": "2.5.0",
|
|
4
4
|
"description": "OpenCode plugin that gives coding agents persistent memory using local vector database",
|
|
5
5
|
"type": "module",
|
|
6
6
|
"main": "dist/plugin.js",
|
|
@@ -34,6 +34,8 @@
|
|
|
34
34
|
"dependencies": {
|
|
35
35
|
"@opencode-ai/plugin": "^1.0.162",
|
|
36
36
|
"@xenova/transformers": "^2.17.2",
|
|
37
|
+
"franc-min": "^6.2.0",
|
|
38
|
+
"iso-639-3": "^3.0.1",
|
|
37
39
|
"sqlite-vec": "^0.1.7-alpha.2"
|
|
38
40
|
},
|
|
39
41
|
"devDependencies": {
|