@syntesseraai/opencode-feature-factory 0.3.0 → 0.3.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (34) hide show
  1. package/README.md +27 -0
  2. package/agents/building.md +0 -1
  3. package/agents/ff-acceptance.md +0 -2
  4. package/agents/ff-research.md +0 -1
  5. package/agents/ff-review.md +0 -2
  6. package/agents/ff-security.md +0 -2
  7. package/agents/ff-validate.md +0 -2
  8. package/agents/ff-well-architected.md +0 -2
  9. package/agents/planning.md +0 -1
  10. package/agents/reviewing.md +0 -1
  11. package/bin/ff-deploy.js +5 -0
  12. package/bin/ff-local-recall-mcp.js +9 -0
  13. package/dist/index.js +16 -1
  14. package/dist/local-recall/daemon-controller.d.ts +51 -0
  15. package/dist/local-recall/daemon-controller.js +166 -0
  16. package/dist/local-recall/index-state.d.ts +14 -0
  17. package/dist/local-recall/index-state.js +76 -0
  18. package/dist/local-recall/index.d.ts +8 -2
  19. package/dist/local-recall/index.js +9 -2
  20. package/dist/local-recall/mcp-server.d.ts +29 -33
  21. package/dist/local-recall/mcp-server.js +172 -53
  22. package/dist/local-recall/mcp-stdio-server.d.ts +4 -0
  23. package/dist/local-recall/mcp-stdio-server.js +225 -0
  24. package/dist/local-recall/mcp-tools.d.ts +24 -11
  25. package/dist/local-recall/mcp-tools.js +112 -87
  26. package/dist/local-recall/vector/embedding-provider.d.ts +37 -0
  27. package/dist/local-recall/vector/embedding-provider.js +184 -0
  28. package/dist/local-recall/vector/orama-index.d.ts +37 -0
  29. package/dist/local-recall/vector/orama-index.js +379 -0
  30. package/dist/local-recall/vector/types.d.ts +33 -0
  31. package/dist/local-recall/vector/types.js +1 -0
  32. package/dist/mcp-config.d.ts +63 -0
  33. package/dist/mcp-config.js +121 -0
  34. package/package.json +5 -2
@@ -1,28 +1,24 @@
1
- /**
2
- * mcp-tools.ts — OpenCode plugin tool definitions for local-recall.
3
- *
4
- * Replaces the legacy ff-learning-{store,search,get} plugins with
5
- * MCP-backed tools that read from OpenCode's session storage and
6
- * expose the local-recall memory system.
7
- *
8
- * Public contract uses the native MemoryCategory taxonomy
9
- * (pattern, decision, debugging, preference, context, procedure).
10
- * The public contract exposes only category-based MCP tools.
11
- */
12
1
  import { tool } from '@opencode-ai/plugin/tool';
13
- import { searchMemories, getMemory } from './memory-service.js';
14
- import { runExtraction } from './daemon.js';
15
- // ────────────────────────────────────────────────────────────
16
- // ff-learning-store → triggers extraction + explicit store
17
- // ────────────────────────────────────────────────────────────
2
+ import { getIndexingStatus, getLearningMemory, rebuildIndex, searchLearningMemories, startIndexingDaemon, stopIndexingDaemon, storeLearningMemory, } from './mcp-server.js';
3
+ function errorResponse(error) {
4
+ return JSON.stringify({
5
+ success: false,
6
+ error: error instanceof Error ? error.message : String(error),
7
+ });
8
+ }
18
9
  export const createLearningStoreTool = () => tool({
19
- description: 'Store a new learning/memory. Creates a JSON memory file in .feature-factory/local-recall/memories/. ' +
20
- 'Also triggers extraction from OpenCode session history so that ' +
21
- 'new memories are automatically captured from recent conversations.',
10
+ description: 'Store a memory in local-recall and enqueue asynchronous daemon indexing. Returns memory ID and daemon status.',
22
11
  args: {
23
12
  title: tool.schema.string('Title for the memory'),
24
13
  description: tool.schema.string('Brief description of the learning'),
25
- category: tool.schema.enum(['pattern', 'decision', 'debugging', 'preference', 'context', 'procedure'], 'Category of memory: pattern (reusable code), decision (arch/design), debugging (error resolution), preference (user pref), context (domain knowledge), procedure (how-to)'),
14
+ category: tool.schema.enum([
15
+ 'pattern',
16
+ 'decision',
17
+ 'debugging',
18
+ 'preference',
19
+ 'context',
20
+ 'procedure',
21
+ ]),
26
22
  tags: tool.schema.array(tool.schema.string(), 'Tags for categorization'),
27
23
  importance: tool.schema.number('Importance score from 0 to 1').optional(),
28
24
  content: tool.schema.string('Full content/body of the memory').optional(),
@@ -38,46 +34,28 @@ export const createLearningStoreTool = () => tool({
38
34
  },
39
35
  async execute(args, toolCtx) {
40
36
  try {
41
- // Run extraction to ensure latest session data is captured
42
- const stats = await runExtraction(toolCtx.directory);
43
- // Also store the explicit memory from the agent
44
- const { storeMemory } = await import('./memory-service.js');
45
- const memory = {
46
- id: crypto.randomUUID(),
47
- sessionID: 'agent-explicit',
48
- messageID: 'agent-explicit',
49
- category: args.category,
37
+ const result = await storeLearningMemory(toolCtx.directory, {
50
38
  title: args.title,
51
- body: args.content ?? args.description,
39
+ description: args.description,
40
+ category: args.category,
52
41
  tags: args.tags,
53
- importance: args.importance ?? 0.7,
54
- createdAt: Date.now(),
55
- extractedBy: 'agent-explicit',
56
- };
57
- const result = await storeMemory(toolCtx.directory, memory);
42
+ importance: args.importance,
43
+ content: args.content,
44
+ });
58
45
  return JSON.stringify({
59
46
  success: true,
60
- memoryId: result.id,
61
- extractionStats: {
62
- sessionsScanned: stats.sessionsScanned,
63
- newMemories: stats.newMemories,
64
- },
47
+ memoryId: result.memoryId,
48
+ queued: true,
49
+ daemon: result.daemon,
65
50
  });
66
51
  }
67
- catch (err) {
68
- return JSON.stringify({
69
- success: false,
70
- error: err instanceof Error ? err.message : String(err),
71
- });
52
+ catch (error) {
53
+ return errorResponse(error);
72
54
  }
73
55
  },
74
56
  });
75
- // ────────────────────────────────────────────────────────────
76
- // ff-learning-search → searches local-recall memory store
77
- // ────────────────────────────────────────────────────────────
78
57
  export const createLearningSearchTool = () => tool({
79
- description: 'Search for memories by query, tags, or category. Returns matching memory metadata sorted by relevance and importance. ' +
80
- 'Searches the local-recall memory store which is automatically populated from OpenCode session history.',
58
+ description: 'Search local-recall memories. Uses vector index when available and falls back to lexical search on provider failure.',
81
59
  args: {
82
60
  query: tool.schema.string('Search query to match against memory content'),
83
61
  tags: tool.schema.array(tool.schema.string(), 'Filter by tags').optional(),
@@ -89,7 +67,7 @@ export const createLearningSearchTool = () => tool({
89
67
  },
90
68
  async execute(args, toolCtx) {
91
69
  try {
92
- const results = await searchMemories(toolCtx.directory, {
70
+ const result = await searchLearningMemories(toolCtx.directory, {
93
71
  query: args.query,
94
72
  tags: args.tags,
95
73
  category: args.category,
@@ -97,39 +75,34 @@ export const createLearningSearchTool = () => tool({
97
75
  limit: Math.min(Math.max(args.limit ?? 10, 1), 50),
98
76
  });
99
77
  return JSON.stringify({
100
- count: results.length,
101
- memories: results.map((r) => ({
102
- id: r.id,
103
- title: r.title,
104
- category: r.category,
105
- tags: r.tags,
106
- importance: r.importance,
107
- relevance: Math.round(r.relevance * 100) / 100,
108
- createdAt: r.createdAt,
109
- sessionID: r.sessionID,
78
+ backend: result.backend,
79
+ fallbackReason: result.fallbackReason,
80
+ count: result.results.length,
81
+ memories: result.results.map((memory) => ({
82
+ id: memory.id,
83
+ title: memory.title,
84
+ category: memory.category,
85
+ tags: memory.tags,
86
+ importance: memory.importance,
87
+ relevance: Math.round(memory.relevance * 100) / 100,
88
+ createdAt: memory.createdAt,
89
+ sessionID: memory.sessionID,
110
90
  })),
111
91
  });
112
92
  }
113
- catch (err) {
114
- return JSON.stringify({
115
- success: false,
116
- error: err instanceof Error ? err.message : String(err),
117
- });
93
+ catch (error) {
94
+ return errorResponse(error);
118
95
  }
119
96
  },
120
97
  });
121
- // ────────────────────────────────────────────────────────────
122
- // ff-learning-get → retrieves a specific memory by ID
123
- // ────────────────────────────────────────────────────────────
124
98
  export const createLearningGetTool = () => tool({
125
- description: 'Retrieve the full content of a specific memory by its unique ID. ' +
126
- 'Returns the complete memory including body text from the local-recall store.',
99
+ description: 'Retrieve the full content of a specific memory by ID from local-recall.',
127
100
  args: {
128
101
  memoryId: tool.schema.string('Unique memory ID to retrieve'),
129
102
  },
130
103
  async execute(args, toolCtx) {
131
104
  try {
132
- const memory = await getMemory(toolCtx.directory, args.memoryId);
105
+ const memory = await getLearningMemory(toolCtx.directory, args.memoryId);
133
106
  if (!memory) {
134
107
  return JSON.stringify({
135
108
  success: false,
@@ -138,25 +111,77 @@ export const createLearningGetTool = () => tool({
138
111
  }
139
112
  return JSON.stringify({
140
113
  success: true,
141
- memory: {
142
- id: memory.id,
143
- sessionID: memory.sessionID,
144
- messageID: memory.messageID,
145
- category: memory.category,
146
- title: memory.title,
147
- body: memory.body,
148
- tags: memory.tags,
149
- importance: memory.importance,
150
- createdAt: memory.createdAt,
151
- extractedBy: memory.extractedBy,
152
- },
114
+ memory,
115
+ });
116
+ }
117
+ catch (error) {
118
+ return errorResponse(error);
119
+ }
120
+ },
121
+ });
122
+ export const createLearningIndexStartTool = () => tool({
123
+ description: 'Start local-recall indexing daemon and return status.',
124
+ args: {
125
+ intervalMs: tool.schema.number('Optional daemon interval in milliseconds').optional(),
126
+ },
127
+ async execute(args, toolCtx) {
128
+ try {
129
+ const status = await startIndexingDaemon(toolCtx.directory, args.intervalMs);
130
+ return JSON.stringify({
131
+ success: true,
132
+ ...status,
153
133
  });
154
134
  }
155
- catch (err) {
135
+ catch (error) {
136
+ return errorResponse(error);
137
+ }
138
+ },
139
+ });
140
+ export const createLearningIndexStatusTool = () => tool({
141
+ description: 'Get current local-recall indexing daemon and index status.',
142
+ args: {},
143
+ async execute(_args, toolCtx) {
144
+ try {
145
+ const status = await getIndexingStatus(toolCtx.directory);
156
146
  return JSON.stringify({
157
- success: false,
158
- error: err instanceof Error ? err.message : String(err),
147
+ success: true,
148
+ ...status,
159
149
  });
160
150
  }
151
+ catch (error) {
152
+ return errorResponse(error);
153
+ }
154
+ },
155
+ });
156
+ export const createLearningIndexStopTool = () => tool({
157
+ description: 'Stop local-recall indexing daemon and return status.',
158
+ args: {},
159
+ async execute(_args, toolCtx) {
160
+ try {
161
+ const status = await stopIndexingDaemon(toolCtx.directory);
162
+ return JSON.stringify({
163
+ success: true,
164
+ ...status,
165
+ });
166
+ }
167
+ catch (error) {
168
+ return errorResponse(error);
169
+ }
170
+ },
171
+ });
172
+ export const createLearningIndexRebuildTool = () => tool({
173
+ description: 'Trigger a full local-recall index rebuild and return status.',
174
+ args: {},
175
+ async execute(_args, toolCtx) {
176
+ try {
177
+ const status = await rebuildIndex(toolCtx.directory);
178
+ return JSON.stringify({
179
+ success: true,
180
+ ...status,
181
+ });
182
+ }
183
+ catch (error) {
184
+ return errorResponse(error);
185
+ }
161
186
  },
162
187
  });
@@ -0,0 +1,37 @@
1
+ import type { EmbeddingProvider, EmbeddingProviderName } from './types.js';
2
+ interface EmbeddingProviderEnv extends NodeJS.ProcessEnv {
3
+ FF_LOCAL_RECALL_EMBEDDING_PROVIDER?: string;
4
+ FF_LOCAL_RECALL_OLLAMA_URL?: string;
5
+ FF_LOCAL_RECALL_OLLAMA_MODEL?: string;
6
+ FF_LOCAL_RECALL_OPENAI_URL?: string;
7
+ FF_LOCAL_RECALL_OPENAI_MODEL?: string;
8
+ OPENAI_API_KEY?: string;
9
+ }
10
+ export declare class OllamaEmbeddingProvider implements EmbeddingProvider {
11
+ readonly name: EmbeddingProviderName;
12
+ readonly model: string;
13
+ readonly baseURL: string;
14
+ dimensions?: number;
15
+ constructor(options?: {
16
+ model?: string;
17
+ baseURL?: string;
18
+ });
19
+ embed(input: string[]): Promise<number[][]>;
20
+ private embedBatch;
21
+ private embedLegacy;
22
+ }
23
+ export declare class OpenAIEmbeddingProvider implements EmbeddingProvider {
24
+ readonly name: EmbeddingProviderName;
25
+ readonly model: string;
26
+ readonly baseURL: string;
27
+ readonly apiKey: string;
28
+ dimensions?: number;
29
+ constructor(options: {
30
+ apiKey: string;
31
+ model?: string;
32
+ baseURL?: string;
33
+ });
34
+ embed(input: string[]): Promise<number[][]>;
35
+ }
36
+ export declare function createEmbeddingProvider(env?: EmbeddingProviderEnv): EmbeddingProvider;
37
+ export {};
@@ -0,0 +1,184 @@
1
+ const DEFAULT_OLLAMA_URL = 'http://127.0.0.1:11434';
2
+ const DEFAULT_OLLAMA_MODEL = 'nomic-embed-text';
3
+ const DEFAULT_OPENAI_URL = 'https://api.openai.com/v1';
4
+ const DEFAULT_OPENAI_MODEL = 'text-embedding-3-small';
5
+ function normalizeBaseURL(url) {
6
+ return url.endsWith('/') ? url.slice(0, -1) : url;
7
+ }
8
+ function toErrorMessage(error) {
9
+ return error instanceof Error ? error.message : String(error);
10
+ }
11
+ function assertEmbeddingShape(vectors) {
12
+ if (!Array.isArray(vectors)) {
13
+ throw new Error('Embedding response was not an array');
14
+ }
15
+ for (const vector of vectors) {
16
+ if (!Array.isArray(vector) || !vector.every((value) => typeof value === 'number')) {
17
+ throw new Error('Embedding response contained invalid vectors');
18
+ }
19
+ }
20
+ return vectors;
21
+ }
22
+ function validateDimensions(vectors, currentDimensions) {
23
+ const first = vectors[0];
24
+ if (!first) {
25
+ throw new Error('Embedding provider returned no vectors');
26
+ }
27
+ const nextDimensions = first.length;
28
+ if (nextDimensions === 0) {
29
+ throw new Error('Embedding provider returned empty vectors');
30
+ }
31
+ for (const vector of vectors) {
32
+ if (vector.length !== nextDimensions) {
33
+ throw new Error('Embedding provider returned mixed vector dimensions');
34
+ }
35
+ }
36
+ if (currentDimensions && currentDimensions !== nextDimensions) {
37
+ throw new Error(`Embedding dimensions changed from ${currentDimensions} to ${nextDimensions}`);
38
+ }
39
+ return nextDimensions;
40
+ }
41
+ async function parseError(response) {
42
+ try {
43
+ const text = await response.text();
44
+ if (!text) {
45
+ return `${response.status} ${response.statusText}`;
46
+ }
47
+ return `${response.status} ${response.statusText}: ${text}`;
48
+ }
49
+ catch {
50
+ return `${response.status} ${response.statusText}`;
51
+ }
52
+ }
53
+ export class OllamaEmbeddingProvider {
54
+ name = 'ollama';
55
+ model;
56
+ baseURL;
57
+ dimensions;
58
+ constructor(options) {
59
+ this.model = options?.model ?? DEFAULT_OLLAMA_MODEL;
60
+ this.baseURL = normalizeBaseURL(options?.baseURL ?? DEFAULT_OLLAMA_URL);
61
+ }
62
+ async embed(input) {
63
+ if (input.length === 0) {
64
+ return [];
65
+ }
66
+ let vectors = null;
67
+ let primaryError = null;
68
+ try {
69
+ vectors = await this.embedBatch(input);
70
+ }
71
+ catch (error) {
72
+ primaryError = toErrorMessage(error);
73
+ }
74
+ if (!vectors) {
75
+ try {
76
+ const legacyVectors = await Promise.all(input.map((value) => this.embedLegacy(value)));
77
+ vectors = legacyVectors;
78
+ }
79
+ catch (error) {
80
+ const fallbackError = toErrorMessage(error);
81
+ throw new Error(`Failed to fetch embeddings from Ollama. batch=${primaryError ?? 'n/a'} fallback=${fallbackError}`);
82
+ }
83
+ }
84
+ const dimensions = validateDimensions(vectors, this.dimensions);
85
+ this.dimensions = dimensions;
86
+ return vectors;
87
+ }
88
+ async embedBatch(input) {
89
+ const response = await fetch(`${this.baseURL}/api/embed`, {
90
+ method: 'POST',
91
+ headers: {
92
+ 'Content-Type': 'application/json',
93
+ },
94
+ body: JSON.stringify({
95
+ model: this.model,
96
+ input,
97
+ }),
98
+ });
99
+ if (!response.ok) {
100
+ throw new Error(await parseError(response));
101
+ }
102
+ const payload = (await response.json());
103
+ return assertEmbeddingShape(payload.embeddings);
104
+ }
105
+ async embedLegacy(input) {
106
+ const response = await fetch(`${this.baseURL}/api/embeddings`, {
107
+ method: 'POST',
108
+ headers: {
109
+ 'Content-Type': 'application/json',
110
+ },
111
+ body: JSON.stringify({
112
+ model: this.model,
113
+ prompt: input,
114
+ }),
115
+ });
116
+ if (!response.ok) {
117
+ throw new Error(await parseError(response));
118
+ }
119
+ const payload = (await response.json());
120
+ const [embedding] = assertEmbeddingShape([payload.embedding]);
121
+ return embedding;
122
+ }
123
+ }
124
+ export class OpenAIEmbeddingProvider {
125
+ name = 'openai';
126
+ model;
127
+ baseURL;
128
+ apiKey;
129
+ dimensions;
130
+ constructor(options) {
131
+ this.apiKey = options.apiKey;
132
+ this.model = options.model ?? DEFAULT_OPENAI_MODEL;
133
+ this.baseURL = normalizeBaseURL(options.baseURL ?? DEFAULT_OPENAI_URL);
134
+ }
135
+ async embed(input) {
136
+ if (input.length === 0) {
137
+ return [];
138
+ }
139
+ const response = await fetch(`${this.baseURL}/embeddings`, {
140
+ method: 'POST',
141
+ headers: {
142
+ 'Content-Type': 'application/json',
143
+ Authorization: `Bearer ${this.apiKey}`,
144
+ },
145
+ body: JSON.stringify({
146
+ model: this.model,
147
+ input,
148
+ }),
149
+ });
150
+ if (!response.ok) {
151
+ throw new Error(await parseError(response));
152
+ }
153
+ const payload = (await response.json());
154
+ const embeddings = assertEmbeddingShape(payload.data?.map((entry) => entry.embedding) ?? []);
155
+ const dimensions = validateDimensions(embeddings, this.dimensions);
156
+ this.dimensions = dimensions;
157
+ return embeddings;
158
+ }
159
+ }
160
+ function getRequestedProvider(env) {
161
+ const value = (env.FF_LOCAL_RECALL_EMBEDDING_PROVIDER ?? 'ollama').trim().toLowerCase();
162
+ if (value === 'openai') {
163
+ return 'openai';
164
+ }
165
+ return 'ollama';
166
+ }
167
+ export function createEmbeddingProvider(env = process.env) {
168
+ const provider = getRequestedProvider(env);
169
+ if (provider === 'openai') {
170
+ const apiKey = env.OPENAI_API_KEY;
171
+ if (!apiKey) {
172
+ throw new Error('OPENAI_API_KEY is required when FF_LOCAL_RECALL_EMBEDDING_PROVIDER=openai');
173
+ }
174
+ return new OpenAIEmbeddingProvider({
175
+ apiKey,
176
+ model: env.FF_LOCAL_RECALL_OPENAI_MODEL,
177
+ baseURL: env.FF_LOCAL_RECALL_OPENAI_URL,
178
+ });
179
+ }
180
+ return new OllamaEmbeddingProvider({
181
+ model: env.FF_LOCAL_RECALL_OLLAMA_MODEL,
182
+ baseURL: env.FF_LOCAL_RECALL_OLLAMA_URL,
183
+ });
184
+ }
@@ -0,0 +1,37 @@
1
+ import type { Memory, MemorySearchResult, SearchCriteria } from '../types.js';
2
+ import type { EmbeddingProvider } from './types.js';
3
+ export declare class OramaMemoryIndex {
4
+ private readonly directory;
5
+ private readonly provider;
6
+ private db;
7
+ private readonly documents;
8
+ private dimensions;
9
+ private updatedAt;
10
+ constructor(directory: string, provider: EmbeddingProvider);
11
+ initialize(): Promise<void>;
12
+ getStatus(): {
13
+ documents: number;
14
+ dimensions: number | null;
15
+ provider: string;
16
+ model: string;
17
+ updatedAt: string | null;
18
+ };
19
+ search(criteria: SearchCriteria): Promise<MemorySearchResult[]>;
20
+ upsertMemories(memories: Memory[]): Promise<number>;
21
+ removeMemories(memoryIDs: string[]): Promise<number>;
22
+ rebuild(memories: Memory[]): Promise<number>;
23
+ private get indexDir();
24
+ private get manifestPath();
25
+ private get documentsPath();
26
+ private buildSchema;
27
+ private ensureDB;
28
+ private rebuildDatabaseFromDocuments;
29
+ private hydrateDB;
30
+ private loadSnapshot;
31
+ private persistSnapshot;
32
+ private writeAtomic;
33
+ private memoryToEmbeddingInput;
34
+ private toDocument;
35
+ private assertEmbeddingDimensions;
36
+ private embedBatch;
37
+ }