@syntesseraai/opencode-feature-factory 0.3.5 → 0.3.6

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md CHANGED
@@ -42,6 +42,7 @@ The plugin now includes a local MCP daemon binary: `ff-local-recall-mcp`.
42
42
  - `local_recall.index.status`
43
43
  - `local_recall.index.stop`
44
44
  - `local_recall.index.rebuild`
45
+ - In headless and TUI sessions, relevant memories are automatically injected into model context per user prompt
45
46
 
46
47
  ### Environment Variables
47
48
 
@@ -56,6 +57,19 @@ The plugin now includes a local MCP daemon binary: `ff-local-recall-mcp`.
56
57
  - `FF_LOCAL_RECALL_OPENAI_MODEL` - OpenAI embedding model (default: `text-embedding-3-small`)
57
58
  - `OPENAI_API_KEY` - Required when `FF_LOCAL_RECALL_EMBEDDING_PROVIDER=openai`
58
59
 
60
+ ### Prompt Injection Controls
61
+
62
+ - `FF_LOCAL_RECALL_PROMPT_INJECTION_ENABLED` - Enable automatic per-prompt memory injection (`true` by default)
63
+ - `FF_LOCAL_RECALL_PROMPT_INJECTION_MIN_PROMPT_CHARS` - Minimum user prompt length before searching (default: `20`)
64
+ - `FF_LOCAL_RECALL_PROMPT_INJECTION_MAX_QUERY_CHARS` - Maximum characters from prompt used for retrieval (default: `2000`)
65
+ - `FF_LOCAL_RECALL_PROMPT_INJECTION_SEARCH_LIMIT` - Max memories fetched before ranking/filtering (default: `8`)
66
+ - `FF_LOCAL_RECALL_PROMPT_INJECTION_MAX_RESULTS` - Max memories injected into model context (default: `5`)
67
+ - `FF_LOCAL_RECALL_PROMPT_INJECTION_MIN_RELEVANCE` - Minimum relevance score for inclusion (default: `0.2`)
68
+ - `FF_LOCAL_RECALL_PROMPT_INJECTION_MIN_IMPORTANCE` - Minimum memory importance used in search (default: `0.2`)
69
+ - `FF_LOCAL_RECALL_PROMPT_INJECTION_MAX_TOKENS` - Approx token budget for injected memory block (default: `400`)
70
+ - `FF_LOCAL_RECALL_PROMPT_INJECTION_SESSION_ONLY` - Restrict retrieval to current session memories only (`false` by default)
71
+ - `FF_LOCAL_RECALL_PROMPT_INTERNAL_MARKERS` - Comma-separated markers that skip injection (default: `[LOCAL_RECALL_INTERNAL]`)
72
+
59
73
  ## Agents Provided
60
74
 
61
75
  ### Primary Agents
package/dist/index.js CHANGED
@@ -6,6 +6,7 @@ import { createFFAgentsCurrentTool } from './plugins/ff-agents-current-plugin.js
6
6
  import { createFFAgentsShowTool } from './plugins/ff-agents-show-plugin.js';
7
7
  import { createFFAgentsClearTool } from './plugins/ff-agents-clear-plugin.js';
8
8
  import { createLearningStoreTool, createLearningSearchTool, createLearningGetTool, createLearningIndexStartTool, createLearningIndexStatusTool, createLearningIndexStopTool, createLearningIndexRebuildTool, initLocalRecall, } from './local-recall/index.js';
9
+ import { createLocalRecallPromptHooks } from './local-recall/prompt-injection.js';
9
10
  import { createFFPlanCreateTool } from './plugins/ff-plan-create-plugin.js';
10
11
  import { createFFPlanUpdateTool } from './plugins/ff-plan-update-plugin.js';
11
12
  import { createFFAgentContextCreateTool } from './plugins/ff-agent-context-create-plugin.js';
@@ -49,6 +50,8 @@ export const FeatureFactoryPlugin = async (input) => {
49
50
  }
50
51
  // Load hooks from the quality gate plugin
51
52
  const qualityGateHooks = await StopQualityGateHooksPlugin(input).catch(() => ({}));
53
+ // Load local-recall prompt injection hooks (headless-safe)
54
+ const localRecallPromptHooks = createLocalRecallPromptHooks(directory);
52
55
  // Create all tools
53
56
  const tools = {
54
57
  // Agent management tools
@@ -89,6 +92,7 @@ export const FeatureFactoryPlugin = async (input) => {
89
92
  // Return combined hooks and tools
90
93
  return {
91
94
  ...qualityGateHooks,
95
+ ...localRecallPromptHooks,
92
96
  tool: tools,
93
97
  };
94
98
  };
@@ -0,0 +1,2 @@
1
+ import type { Hooks } from '@opencode-ai/plugin';
2
+ export declare function createLocalRecallPromptHooks(directory: string): Partial<Hooks>;
@@ -0,0 +1,194 @@
1
+ import { searchLearningMemories } from './mcp-server.js';
2
+ const MEMORY_CONTEXT_HEADER = '## Local Recall: Relevant Memories';
3
+ const MEMORY_CONTEXT_GUIDANCE = 'Use these prior project learnings only when they directly improve the current response.';
4
+ const DEFAULT_INTERNAL_MARKERS = ['[LOCAL_RECALL_INTERNAL]'];
5
+ const APPROX_CHARS_PER_TOKEN = 4;
6
+ const pendingContextBySession = new Map();
7
+ function parseBooleanEnv(name, fallback) {
8
+ const value = process.env[name];
9
+ if (!value) {
10
+ return fallback;
11
+ }
12
+ const normalized = value.trim().toLowerCase();
13
+ if (['1', 'true', 'yes', 'on'].includes(normalized)) {
14
+ return true;
15
+ }
16
+ if (['0', 'false', 'no', 'off'].includes(normalized)) {
17
+ return false;
18
+ }
19
+ return fallback;
20
+ }
21
+ function parseNumberEnv(name, fallback, options) {
22
+ const value = process.env[name];
23
+ if (!value) {
24
+ return fallback;
25
+ }
26
+ const parsed = Number(value);
27
+ if (!Number.isFinite(parsed)) {
28
+ return fallback;
29
+ }
30
+ const withInteger = options?.integer ? Math.trunc(parsed) : parsed;
31
+ const min = options?.min ?? Number.NEGATIVE_INFINITY;
32
+ const max = options?.max ?? Number.POSITIVE_INFINITY;
33
+ return Math.min(max, Math.max(min, withInteger));
34
+ }
35
+ function parseInternalMarkers() {
36
+ const configured = process.env.FF_LOCAL_RECALL_PROMPT_INTERNAL_MARKERS;
37
+ if (!configured) {
38
+ return DEFAULT_INTERNAL_MARKERS;
39
+ }
40
+ const markers = configured
41
+ .split(',')
42
+ .map((entry) => entry.trim())
43
+ .filter((entry) => entry.length > 0);
44
+ return markers.length > 0 ? markers : DEFAULT_INTERNAL_MARKERS;
45
+ }
46
+ function getPromptInjectionConfig() {
47
+ return {
48
+ enabled: parseBooleanEnv('FF_LOCAL_RECALL_PROMPT_INJECTION_ENABLED', true),
49
+ minPromptChars: parseNumberEnv('FF_LOCAL_RECALL_PROMPT_INJECTION_MIN_PROMPT_CHARS', 20, {
50
+ min: 1,
51
+ max: 2000,
52
+ integer: true,
53
+ }),
54
+ maxQueryChars: parseNumberEnv('FF_LOCAL_RECALL_PROMPT_INJECTION_MAX_QUERY_CHARS', 2000, {
55
+ min: 64,
56
+ max: 20000,
57
+ integer: true,
58
+ }),
59
+ searchLimit: parseNumberEnv('FF_LOCAL_RECALL_PROMPT_INJECTION_SEARCH_LIMIT', 8, {
60
+ min: 1,
61
+ max: 50,
62
+ integer: true,
63
+ }),
64
+ maxResults: parseNumberEnv('FF_LOCAL_RECALL_PROMPT_INJECTION_MAX_RESULTS', 5, {
65
+ min: 1,
66
+ max: 20,
67
+ integer: true,
68
+ }),
69
+ minRelevance: parseNumberEnv('FF_LOCAL_RECALL_PROMPT_INJECTION_MIN_RELEVANCE', 0.2, {
70
+ min: 0,
71
+ max: 1,
72
+ }),
73
+ minImportance: parseNumberEnv('FF_LOCAL_RECALL_PROMPT_INJECTION_MIN_IMPORTANCE', 0.2, {
74
+ min: 0,
75
+ max: 1,
76
+ }),
77
+ maxTokens: parseNumberEnv('FF_LOCAL_RECALL_PROMPT_INJECTION_MAX_TOKENS', 400, {
78
+ min: 50,
79
+ max: 4000,
80
+ integer: true,
81
+ }),
82
+ sessionOnly: parseBooleanEnv('FF_LOCAL_RECALL_PROMPT_INJECTION_SESSION_ONLY', false),
83
+ internalMarkers: parseInternalMarkers(),
84
+ };
85
+ }
86
+ function estimateTokens(text) {
87
+ return Math.ceil(text.length / APPROX_CHARS_PER_TOKEN);
88
+ }
89
+ function normalizeInlineText(value) {
90
+ return value.replace(/\s+/g, ' ').trim();
91
+ }
92
+ function extractUserPromptText(parts) {
93
+ const text = parts
94
+ .filter((part) => part.type === 'text')
95
+ .filter((part) => !part.synthetic)
96
+ .map((part) => part.text)
97
+ .join('\n')
98
+ .trim();
99
+ return text;
100
+ }
101
+ function isInternalPrompt(prompt, markers) {
102
+ return markers.some((marker) => marker.length > 0 && prompt.includes(marker));
103
+ }
104
+ function scoreMemory(memory) {
105
+ return memory.relevance * 0.75 + memory.importance * 0.25;
106
+ }
107
+ function rankMemories(memories, config) {
108
+ return memories
109
+ .filter((memory) => memory.relevance >= config.minRelevance)
110
+ .sort((a, b) => {
111
+ const scoreDifference = scoreMemory(b) - scoreMemory(a);
112
+ if (scoreDifference !== 0) {
113
+ return scoreDifference;
114
+ }
115
+ return b.createdAt - a.createdAt;
116
+ });
117
+ }
118
+ function formatMemoryLine(index, memory) {
119
+ const title = normalizeInlineText(memory.title);
120
+ const tags = memory.tags.length > 0 ? ` | tags: ${memory.tags.slice(0, 5).join(', ')}` : '';
121
+ return `${index}. ${title} (category: ${memory.category}, relevance: ${memory.relevance.toFixed(2)}, importance: ${memory.importance.toFixed(2)}${tags})`;
122
+ }
123
+ function buildMemoryContext(memories, config) {
124
+ const lines = [];
125
+ let usedTokens = estimateTokens(MEMORY_CONTEXT_HEADER) + estimateTokens(MEMORY_CONTEXT_GUIDANCE);
126
+ for (const memory of memories) {
127
+ if (lines.length >= config.maxResults) {
128
+ break;
129
+ }
130
+ const line = formatMemoryLine(lines.length + 1, memory);
131
+ const lineTokens = estimateTokens(line);
132
+ if (lines.length > 0 && usedTokens + lineTokens > config.maxTokens) {
133
+ break;
134
+ }
135
+ lines.push(line);
136
+ usedTokens += lineTokens;
137
+ }
138
+ if (lines.length === 0) {
139
+ return null;
140
+ }
141
+ return [MEMORY_CONTEXT_HEADER, MEMORY_CONTEXT_GUIDANCE, ...lines.map((line) => `- ${line}`)].join('\n');
142
+ }
143
+ export function createLocalRecallPromptHooks(directory) {
144
+ const config = getPromptInjectionConfig();
145
+ if (!config.enabled) {
146
+ return {};
147
+ }
148
+ return {
149
+ 'chat.message': async (input, output) => {
150
+ const promptText = extractUserPromptText(output.parts);
151
+ if (promptText.length < config.minPromptChars) {
152
+ pendingContextBySession.delete(input.sessionID);
153
+ return;
154
+ }
155
+ if (isInternalPrompt(promptText, config.internalMarkers)) {
156
+ pendingContextBySession.delete(input.sessionID);
157
+ return;
158
+ }
159
+ const criteria = {
160
+ query: promptText.slice(0, config.maxQueryChars),
161
+ limit: config.searchLimit,
162
+ minImportance: config.minImportance,
163
+ };
164
+ if (config.sessionOnly) {
165
+ criteria.sessionID = input.sessionID;
166
+ }
167
+ try {
168
+ const searchOutput = await searchLearningMemories(directory, criteria);
169
+ const ranked = rankMemories(searchOutput.results, config);
170
+ const context = buildMemoryContext(ranked, config);
171
+ if (!context) {
172
+ pendingContextBySession.delete(input.sessionID);
173
+ return;
174
+ }
175
+ pendingContextBySession.set(input.sessionID, context);
176
+ }
177
+ catch {
178
+ pendingContextBySession.delete(input.sessionID);
179
+ }
180
+ },
181
+ 'experimental.chat.system.transform': async (input, output) => {
182
+ const sessionID = input.sessionID;
183
+ if (!sessionID) {
184
+ return;
185
+ }
186
+ const pendingContext = pendingContextBySession.get(sessionID);
187
+ if (!pendingContext) {
188
+ return;
189
+ }
190
+ output.system.push(pendingContext);
191
+ pendingContextBySession.delete(sessionID);
192
+ },
193
+ };
194
+ }
package/package.json CHANGED
@@ -1,7 +1,7 @@
1
1
  {
2
2
  "$schema": "https://json.schemastore.org/package.json",
3
3
  "name": "@syntesseraai/opencode-feature-factory",
4
- "version": "0.3.5",
4
+ "version": "0.3.6",
5
5
  "type": "module",
6
6
  "description": "OpenCode plugin for Feature Factory agents - provides sub-agents and skills for validation, review, security, and architecture assessment",
7
7
  "license": "MIT",