@syntesseraai/opencode-feature-factory 0.3.2 → 0.3.4

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -20,6 +20,55 @@ import { extractFromMessage } from './session-extractor.js';
20
20
  import { extractThinkingFromMessage } from './thinking-extractor.js';
21
21
  import { readProcessedLog, getProcessedMessageIDs, getProcessedHashes, markProcessed, contentHash, } from './processed-log.js';
22
22
  import { getMemoriesDir, storeMemories } from './memory-service.js';
23
+ function getErrorMessage(error) {
24
+ return error instanceof Error ? error.message : String(error);
25
+ }
26
+ function toProcessedFailureEntry(failure, failureMessage) {
27
+ switch (failure.scope) {
28
+ case 'project':
29
+ return {
30
+ kind: 'failure',
31
+ scope: 'project',
32
+ processedAt: Date.now(),
33
+ failure: failureMessage,
34
+ directory: failure.directory,
35
+ };
36
+ case 'session':
37
+ return {
38
+ kind: 'failure',
39
+ scope: 'session',
40
+ processedAt: Date.now(),
41
+ failure: failureMessage,
42
+ sessionID: failure.sessionID,
43
+ };
44
+ case 'extraction':
45
+ return {
46
+ kind: 'failure',
47
+ scope: 'extraction',
48
+ processedAt: Date.now(),
49
+ failure: failureMessage,
50
+ };
51
+ }
52
+ }
53
+ function recordFailure(stats, failure) {
54
+ let message;
55
+ switch (failure.scope) {
56
+ case 'project':
57
+ message = `No OpenCode project found for directory: ${failure.directory}`;
58
+ break;
59
+ case 'message':
60
+ message = `Error processing message ${failure.messageID}: ${getErrorMessage(failure.error)}`;
61
+ break;
62
+ case 'session':
63
+ message = `Error processing session ${failure.sessionID}: ${getErrorMessage(failure.error)}`;
64
+ break;
65
+ case 'extraction':
66
+ message = `Extraction failed: ${getErrorMessage(failure.error)}`;
67
+ break;
68
+ }
69
+ stats.errors.push(message);
70
+ return message;
71
+ }
23
72
  // ────────────────────────────────────────────────────────────
24
73
  // Helpers
25
74
  // ────────────────────────────────────────────────────────────
@@ -69,7 +118,14 @@ export async function runExtraction(directory) {
69
118
  // Find project for this directory
70
119
  const project = await findProject(directory);
71
120
  if (!project) {
72
- stats.errors.push(`No OpenCode project found for directory: ${directory}`);
121
+ const failure = { scope: 'project', directory };
122
+ const failureMessage = recordFailure(stats, failure);
123
+ try {
124
+ await markProcessed(directory, [toProcessedFailureEntry(failure, failureMessage)]);
125
+ }
126
+ catch (persistErr) {
127
+ stats.errors.push(`Failed to persist processing failure: ${getErrorMessage(persistErr)}`);
128
+ }
73
129
  return stats;
74
130
  }
75
131
  // Ensure local-recall directories exist
@@ -147,6 +203,7 @@ export async function runExtraction(directory) {
147
203
  processedHashes.add(msgHash);
148
204
  // Mark as processed with content hash
149
205
  newProcessedEntries.push({
206
+ status: 'success',
150
207
  messageID: message.id,
151
208
  contentHash: msgHash,
152
209
  processedAt: Date.now(),
@@ -154,19 +211,31 @@ export async function runExtraction(directory) {
154
211
  });
155
212
  }
156
213
  catch (err) {
157
- stats.errors.push(`Error processing message ${message.id}: ${err instanceof Error ? err.message : String(err)}`);
214
+ recordFailure(stats, {
215
+ scope: 'message',
216
+ messageID: message.id,
217
+ error: err,
218
+ });
158
219
  // Still mark as processed to avoid re-trying broken messages
159
220
  newProcessedEntries.push({
221
+ status: 'failed',
160
222
  messageID: message.id,
161
223
  contentHash: msgHash,
162
224
  processedAt: Date.now(),
163
225
  memoriesCreated: 0,
226
+ failure: getErrorMessage(err),
164
227
  });
165
228
  }
166
229
  }
167
230
  }
168
231
  catch (err) {
169
- stats.errors.push(`Error processing session ${session.id}: ${err instanceof Error ? err.message : String(err)}`);
232
+ const failure = {
233
+ scope: 'session',
234
+ sessionID: session.id,
235
+ error: err,
236
+ };
237
+ const failureMessage = recordFailure(stats, failure);
238
+ newProcessedEntries.push(toProcessedFailureEntry(failure, failureMessage));
170
239
  }
171
240
  }
172
241
  // Batch store all new memories
@@ -180,7 +249,14 @@ export async function runExtraction(directory) {
180
249
  }
181
250
  }
182
251
  catch (err) {
183
- stats.errors.push(`Extraction failed: ${err instanceof Error ? err.message : String(err)}`);
252
+ const failure = { scope: 'extraction', error: err };
253
+ const failureMessage = recordFailure(stats, failure);
254
+ try {
255
+ await markProcessed(directory, [toProcessedFailureEntry(failure, failureMessage)]);
256
+ }
257
+ catch (persistErr) {
258
+ stats.errors.push(`Failed to persist processing failure: ${getErrorMessage(persistErr)}`);
259
+ }
184
260
  }
185
261
  return stats;
186
262
  }
@@ -11,6 +11,9 @@
11
11
  import { createHash } from 'node:crypto';
12
12
  import { readFile, writeFile, mkdir } from 'node:fs/promises';
13
13
  import { join, dirname } from 'node:path';
14
+ function isProcessedMessageEntry(entry) {
15
+ return 'messageID' in entry && 'contentHash' in entry;
16
+ }
14
17
  function getLogPath(directory) {
15
18
  return join(directory, 'ff-memories', 'processed.json');
16
19
  }
@@ -48,7 +51,7 @@ export async function readProcessedLog(directory) {
48
51
  */
49
52
  export async function isProcessed(directory, messageID) {
50
53
  const log = await readProcessedLog(directory);
51
- return log.some((entry) => entry.messageID === messageID);
54
+ return log.some((entry) => isProcessedMessageEntry(entry) && entry.messageID === messageID);
52
55
  }
53
56
  /**
54
57
  * Check if a content hash has already been processed.
@@ -56,7 +59,7 @@ export async function isProcessed(directory, messageID) {
56
59
  */
57
60
  export async function isContentProcessed(directory, hash) {
58
61
  const log = await readProcessedLog(directory);
59
- return log.some((entry) => entry.contentHash === hash);
62
+ return log.some((entry) => isProcessedMessageEntry(entry) && entry.contentHash === hash);
60
63
  }
61
64
  /**
62
65
  * Mark messages as processed by appending entries to the log.
@@ -72,11 +75,11 @@ export async function markProcessed(directory, entries) {
72
75
  * Get the set of already-processed message IDs for fast lookup.
73
76
  */
74
77
  export function getProcessedMessageIDs(log) {
75
- return new Set(log.map((e) => e.messageID));
78
+ return new Set(log.filter(isProcessedMessageEntry).map((e) => e.messageID));
76
79
  }
77
80
  /**
78
81
  * Get the set of already-processed content hashes for fast lookup.
79
82
  */
80
83
  export function getProcessedHashes(log) {
81
- return new Set(log.map((e) => e.contentHash));
84
+ return new Set(log.filter(isProcessedMessageEntry).map((e) => e.contentHash));
82
85
  }
@@ -45,6 +45,10 @@ async function dirExists(dirPath) {
45
45
  return false;
46
46
  }
47
47
  }
48
+ function getPartStartTime(part) {
49
+ const start = part.time?.start;
50
+ return typeof start === 'number' ? start : Number.POSITIVE_INFINITY;
51
+ }
48
52
  // ── Project Reader ──────────────────────────────────────────────
49
53
  /**
50
54
  * Find the project record whose worktree matches `directory`.
@@ -131,7 +135,13 @@ export async function listParts(messageID) {
131
135
  if (!(await dirExists(partDir)))
132
136
  return [];
133
137
  const parts = await readAllJsonInDir(partDir);
134
- return parts.sort((a, b) => a.time.start - b.time.start);
138
+ return parts.sort((a, b) => {
139
+ const timeDelta = getPartStartTime(a) - getPartStartTime(b);
140
+ if (timeDelta !== 0) {
141
+ return timeDelta;
142
+ }
143
+ return a.id.localeCompare(b.id);
144
+ });
135
145
  }
136
146
  /**
137
147
  * Get a single part by ID.
@@ -54,10 +54,10 @@ export interface OCPart {
54
54
  messageID: string;
55
55
  type: string;
56
56
  text?: string;
57
- synthetic: boolean;
58
- time: {
59
- start: number;
60
- end: number;
57
+ synthetic?: boolean;
58
+ time?: {
59
+ start?: number;
60
+ end?: number;
61
61
  };
62
62
  }
63
63
  /** A memory extracted from a conversation turn */
@@ -119,11 +119,34 @@ export interface ExtractionResult {
119
119
  /** Where the extraction came from — used to generate logical IDs */
120
120
  source: 'session' | 'thinking';
121
121
  }
122
- /** Tracks which messages have already been processed */
123
- export interface ProcessedEntry {
122
+ interface ProcessedMessageEntryBase {
124
123
  messageID: string;
125
124
  processedAt: number;
126
- memoriesCreated: number;
127
125
  /** SHA-256 hex hash of the concatenated extracted bodies for content-level idempotency */
128
126
  contentHash: string;
129
127
  }
128
+ /** Tracks which messages have already been processed */
129
+ export type ProcessedMessageEntry = (ProcessedMessageEntryBase & {
130
+ status: 'success';
131
+ memoriesCreated: number;
132
+ failure?: undefined;
133
+ }) | (ProcessedMessageEntryBase & {
134
+ status: 'failed';
135
+ memoriesCreated: 0;
136
+ failure: string;
137
+ }) | (ProcessedMessageEntryBase & {
138
+ status?: undefined;
139
+ memoriesCreated: number;
140
+ failure?: undefined;
141
+ });
142
+ /** Persistent daemon-level failures that are not tied to a specific message record. */
143
+ export type ProcessedFailureEntry = {
144
+ kind: 'failure';
145
+ scope: 'project' | 'session' | 'extraction';
146
+ processedAt: number;
147
+ failure: string;
148
+ directory?: string;
149
+ sessionID?: string;
150
+ };
151
+ export type ProcessedEntry = ProcessedMessageEntry | ProcessedFailureEntry;
152
+ export {};
package/package.json CHANGED
@@ -1,7 +1,7 @@
1
1
  {
2
2
  "$schema": "https://json.schemastore.org/package.json",
3
3
  "name": "@syntesseraai/opencode-feature-factory",
4
- "version": "0.3.2",
4
+ "version": "0.3.4",
5
5
  "type": "module",
6
6
  "description": "OpenCode plugin for Feature Factory agents - provides sub-agents and skills for validation, review, security, and architecture assessment",
7
7
  "license": "MIT",