@sudocode-ai/cli 0.1.7 → 0.1.8

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (43) hide show
  1. package/dist/cli/feedback-commands.d.ts.map +1 -1
  2. package/dist/cli/feedback-commands.js +7 -0
  3. package/dist/cli/feedback-commands.js.map +1 -1
  4. package/dist/cli/init-commands.d.ts.map +1 -1
  5. package/dist/cli/init-commands.js +2 -1
  6. package/dist/cli/init-commands.js.map +1 -1
  7. package/dist/cli/issue-commands.d.ts.map +1 -1
  8. package/dist/cli/issue-commands.js +18 -1
  9. package/dist/cli/issue-commands.js.map +1 -1
  10. package/dist/cli/server-commands.d.ts.map +1 -1
  11. package/dist/cli/server-commands.js +9 -7
  12. package/dist/cli/server-commands.js.map +1 -1
  13. package/dist/cli/spec-commands.js +1 -1
  14. package/dist/cli/spec-commands.js.map +1 -1
  15. package/dist/cli/sync-commands.d.ts.map +1 -1
  16. package/dist/cli/sync-commands.js +11 -7
  17. package/dist/cli/sync-commands.js.map +1 -1
  18. package/dist/cli/update-commands.d.ts.map +1 -1
  19. package/dist/cli/update-commands.js +72 -10
  20. package/dist/cli/update-commands.js.map +1 -1
  21. package/dist/filename-generator.d.ts +10 -9
  22. package/dist/filename-generator.d.ts.map +1 -1
  23. package/dist/filename-generator.js +55 -45
  24. package/dist/filename-generator.js.map +1 -1
  25. package/dist/import.d.ts +6 -1
  26. package/dist/import.d.ts.map +1 -1
  27. package/dist/import.js +7 -6
  28. package/dist/import.js.map +1 -1
  29. package/dist/operations/feedback.d.ts.map +1 -1
  30. package/dist/operations/feedback.js +17 -22
  31. package/dist/operations/feedback.js.map +1 -1
  32. package/dist/sync.d.ts.map +1 -1
  33. package/dist/sync.js +12 -1
  34. package/dist/sync.js.map +1 -1
  35. package/dist/validation.d.ts +5 -1
  36. package/dist/validation.d.ts.map +1 -1
  37. package/dist/validation.js +32 -0
  38. package/dist/validation.js.map +1 -1
  39. package/dist/watcher.d.ts +11 -5
  40. package/dist/watcher.d.ts.map +1 -1
  41. package/dist/watcher.js +344 -148
  42. package/dist/watcher.js.map +1 -1
  43. package/package.json +2 -2
package/dist/watcher.js CHANGED
@@ -13,20 +13,83 @@ import { listIssues, getIssue } from "./operations/issues.js";
13
13
  import { parseMarkdownFile } from "./markdown.js";
14
14
  import { listFeedback } from "./operations/feedback.js";
15
15
  import { getTags } from "./operations/tags.js";
16
+ import { findExistingEntityFile, generateUniqueFilename, } from "./filename-generator.js";
17
+ import { getOutgoingRelationships } from "./operations/relationships.js";
18
+ import * as crypto from "crypto";
19
+ // Guard against processing our own file writes (oscillation prevention)
20
+ // Track files currently being processed to prevent same-file oscillation
21
+ const filesBeingProcessed = new Set();
22
+ // Content hash cache for detecting actual content changes (oscillation prevention)
23
+ const contentHashCache = new Map();
24
+ /**
25
+ * Compute SHA256 hash of file content for change detection
26
+ */
27
+ function computeContentHash(filePath) {
28
+ try {
29
+ const content = fs.readFileSync(filePath, "utf8");
30
+ return crypto.createHash("sha256").update(content).digest("hex");
31
+ }
32
+ catch (error) {
33
+ // File doesn't exist or can't be read
34
+ return "";
35
+ }
36
+ }
37
+ /**
38
+ * Check if file content has actually changed since last processing
39
+ * Returns true if content changed, false if unchanged
40
+ */
41
+ function hasContentChanged(filePath) {
42
+ const currentHash = computeContentHash(filePath);
43
+ const cachedHash = contentHashCache.get(filePath);
44
+ if (cachedHash && cachedHash === currentHash) {
45
+ // Content unchanged - skip processing
46
+ return false;
47
+ }
48
+ // Update cache with new hash
49
+ contentHashCache.set(filePath, currentHash);
50
+ return true;
51
+ }
16
52
  /**
17
53
  * Start watching files for changes
18
54
  * Returns a control object to stop the watcher
19
55
  */
20
56
  export function startWatcher(options) {
21
- const { db, baseDir, debounceDelay = 2000, onLog = console.log, onError = console.error, ignoreInitial = true, syncJSONLToMarkdown: enableReverseSync = false, } = options;
57
+ const { db, baseDir, onLog = console.log, onError = console.error, ignoreInitial = true, syncJSONLToMarkdown: enableReverseSync = false, onEntitySync, onFileChange, } = options;
22
58
  const stats = {
23
59
  filesWatched: 0,
24
- changesPending: 0,
25
60
  changesProcessed: 0,
26
61
  errors: 0,
27
62
  };
28
- // Map of file paths to pending timeout IDs
29
- const pendingChanges = new Map();
63
+ // Cache of previous JSONL state (entity ID -> timestamp)
64
+ // This allows us to detect changes by comparing new JSONL against cached state
65
+ const jsonlStateCache = new Map(); // jsonlPath -> (entityId -> content_hash)
66
+ /**
67
+ * Compute a canonical content hash for an entity that's invariant to key ordering
68
+ * This ensures that {"id":"x","title":"y"} and {"title":"y","id":"x"} produce the same hash
69
+ */
70
+ function computeCanonicalHash(entity) {
71
+ // Sort keys recursively to ensure consistent ordering
72
+ const sortKeys = (obj) => {
73
+ if (obj === null || typeof obj !== "object") {
74
+ return obj;
75
+ }
76
+ if (Array.isArray(obj)) {
77
+ return obj.map(sortKeys);
78
+ }
79
+ const sorted = {};
80
+ Object.keys(obj)
81
+ .sort()
82
+ .forEach((key) => {
83
+ sorted[key] = sortKeys(obj[key]);
84
+ });
85
+ return sorted;
86
+ };
87
+ const canonical = sortKeys(entity);
88
+ return crypto
89
+ .createHash("sha256")
90
+ .update(JSON.stringify(canonical))
91
+ .digest("hex");
92
+ }
30
93
  /**
31
94
  * Check if markdown file content matches database content
32
95
  * Returns true if they match (no sync needed)
@@ -162,8 +225,7 @@ export function startWatcher(options) {
162
225
  const dbTagsSet = new Set(dbTags);
163
226
  if (jsonlTags.some((tag) => !dbTagsSet.has(tag)))
164
227
  return true;
165
- // Compare relationships
166
- const { getOutgoingRelationships } = require("./operations/relationships.js");
228
+ // Compare relationships;
167
229
  const dbRels = getOutgoingRelationships(db, entityId, entityType);
168
230
  const jsonlRels = jsonlEntity.relationships || [];
169
231
  if (jsonlRels.length !== dbRels.length)
@@ -211,172 +273,275 @@ export function startWatcher(options) {
211
273
  */
212
274
  async function processChange(filePath, event) {
213
275
  try {
214
- const ext = path.extname(filePath);
215
- const basename = path.basename(filePath);
216
- if (ext === ".md") {
217
- // Markdown file changed - sync to database and JSONL
218
- onLog(`[watch] ${event} ${path.relative(baseDir, filePath)}`);
219
- if (event === "unlink") {
220
- // File was deleted - remove from database and JSONL
221
- // Calculate relative file path
222
- const relPath = path.relative(baseDir, filePath);
223
- // Look up spec by file path
224
- const spec = getSpecByFilePath(db, relPath);
225
- if (spec) {
226
- // Delete from database
227
- const deleted = deleteSpec(db, spec.id);
228
- if (deleted) {
229
- onLog(`[watch] Deleted spec ${spec.id} (file removed)`);
230
- // Export to JSONL to reflect deletion
231
- await exportToJSONL(db, { outputDir: baseDir });
276
+ // Set re-entry guard for this specific file to prevent oscillation
277
+ filesBeingProcessed.add(filePath);
278
+ try {
279
+ const ext = path.extname(filePath);
280
+ const basename = path.basename(filePath);
281
+ if (ext === ".md") {
282
+ // Markdown file changed - sync to database and JSONL
283
+ onLog(`[watch] ${event} ${path.relative(baseDir, filePath)}`);
284
+ if (event === "unlink") {
285
+ // File was deleted - remove from database and JSONL
286
+ // Calculate relative file path
287
+ const relPath = path.relative(baseDir, filePath);
288
+ // Look up spec by file path
289
+ const spec = getSpecByFilePath(db, relPath);
290
+ if (spec) {
291
+ // Delete from database
292
+ const deleted = deleteSpec(db, spec.id);
293
+ if (deleted) {
294
+ onLog(`[watch] Deleted spec ${spec.id} (file removed)`);
295
+ // Export to JSONL to reflect deletion
296
+ await exportToJSONL(db, { outputDir: baseDir });
297
+ }
298
+ }
299
+ else {
300
+ onLog(`[watch] File deleted but no spec found: ${relPath}`);
232
301
  }
233
302
  }
234
303
  else {
235
- onLog(`[watch] File deleted but no spec found: ${relPath}`);
304
+ // Parse markdown to get entity info
305
+ try {
306
+ const parsed = parseMarkdownFile(filePath, db, baseDir);
307
+ const { data: frontmatter } = parsed;
308
+ const entityId = frontmatter.id;
309
+ // Determine entity type based on file location
310
+ const relPath = path.relative(baseDir, filePath);
311
+ const entityType = relPath.startsWith("specs/") || relPath.startsWith("specs\\")
312
+ ? "spec"
313
+ : "issue";
314
+ // Skip if content already matches (prevents oscillation)
315
+ if (entityId && contentMatches(filePath, entityId, entityType)) {
316
+ return;
317
+ }
318
+ // Check timestamps to determine sync direction
319
+ if (entityId) {
320
+ const dbEntity = entityType === "spec"
321
+ ? getSpec(db, entityId)
322
+ : getIssue(db, entityId);
323
+ if (dbEntity) {
324
+ // Get file modification time
325
+ const fileStat = fs.statSync(filePath);
326
+ const fileTime = fileStat.mtimeMs;
327
+ // Get database updated_at time
328
+ const dbTime = new Date(dbEntity.updated_at).getTime();
329
+ // If database is newer than file, skip markdown → database sync
330
+ if (dbTime > fileTime) {
331
+ onLog(`[watch] Skipping sync for ${entityType} ${entityId} (database is newer)`);
332
+ return;
333
+ }
334
+ }
335
+ }
336
+ }
337
+ catch (error) {
338
+ // If parsing fails, continue with sync (might be a new file)
339
+ }
340
+ // Sync markdown to database
341
+ const result = await syncMarkdownToJSONL(db, filePath, {
342
+ outputDir: baseDir,
343
+ autoExport: true,
344
+ autoInitialize: true,
345
+ writeBackFrontmatter: true,
346
+ });
347
+ if (result.success) {
348
+ onLog(`[watch] Synced ${result.entityType} ${result.entityId} (${result.action})`);
349
+ // Emit typed callback event for markdown sync
350
+ if (onEntitySync) {
351
+ // Get full entity data to include in event
352
+ const entity = result.entityType === "spec"
353
+ ? getSpec(db, result.entityId)
354
+ : getIssue(db, result.entityId);
355
+ await onEntitySync({
356
+ entityType: result.entityType,
357
+ entityId: result.entityId,
358
+ action: result.action,
359
+ filePath,
360
+ baseDir,
361
+ source: "markdown",
362
+ timestamp: new Date(),
363
+ entity: entity ?? undefined,
364
+ version: 1,
365
+ });
366
+ }
367
+ }
368
+ else {
369
+ onError(new Error(`Failed to sync ${filePath}: ${result.error}`));
370
+ stats.errors++;
371
+ }
236
372
  }
237
373
  }
238
- else {
239
- // Parse markdown to get entity info
240
- try {
241
- const parsed = parseMarkdownFile(filePath, db, baseDir);
242
- const { data: frontmatter } = parsed;
243
- const entityId = frontmatter.id;
244
- // Determine entity type based on file location
245
- const relPath = path.relative(baseDir, filePath);
246
- const entityType = relPath.startsWith("specs/") || relPath.startsWith("specs\\")
247
- ? "spec"
248
- : "issue";
249
- // Skip if content already matches (prevents oscillation)
250
- if (entityId && contentMatches(filePath, entityId, entityType)) {
251
- return;
374
+ else if (basename === "specs.jsonl" || basename === "issues.jsonl") {
375
+ // JSONL file changed (e.g., from CLI update or git pull)
376
+ onLog(`[watch] ${event} ${path.relative(baseDir, filePath)}`);
377
+ if (event !== "unlink") {
378
+ const entityType = basename === "specs.jsonl" ? "spec" : "issue";
379
+ // Read JSONL file
380
+ const jsonlContent = fs.readFileSync(filePath, "utf8");
381
+ const jsonlLines = jsonlContent
382
+ .trim()
383
+ .split("\n")
384
+ .filter((line) => line.trim());
385
+ // Parse JSONL entities and build new state map
386
+ const jsonlEntities = jsonlLines.map((line) => JSON.parse(line));
387
+ const newStateMap = new Map();
388
+ for (const entity of jsonlEntities) {
389
+ // Use canonical content hash to detect any content changes
390
+ // Canonical hash is invariant to JSON key ordering
391
+ const contentHash = computeCanonicalHash(entity);
392
+ newStateMap.set(entity.id, contentHash);
393
+ }
394
+ // Get cached state (previous JSONL state)
395
+ const cachedStateMap = jsonlStateCache.get(filePath) || new Map();
396
+ // Detect changed entities by comparing new state with cached state
397
+ const changedEntities = [];
398
+ for (const jsonlEntity of jsonlEntities) {
399
+ const entityId = jsonlEntity.id;
400
+ const newHash = newStateMap.get(entityId);
401
+ const cachedHash = cachedStateMap.get(entityId);
402
+ if (!cachedHash) {
403
+ // Entity not in cache = created
404
+ changedEntities.push({ entityId, action: "created" });
405
+ }
406
+ else if (newHash !== cachedHash) {
407
+ // Content hash differs = entity changed
408
+ changedEntities.push({ entityId, action: "updated" });
409
+ }
252
410
  }
253
- // Check timestamps to determine sync direction
254
- if (entityId) {
255
- const dbEntity = entityType === "spec"
256
- ? getSpec(db, entityId)
257
- : getIssue(db, entityId);
258
- if (dbEntity) {
259
- // Get file modification time
260
- const fileStat = fs.statSync(filePath);
261
- const fileTime = fileStat.mtimeMs;
262
- // Get database updated_at time
263
- const dbTime = new Date(dbEntity.updated_at).getTime();
264
- // If database is newer than file, skip markdown → database sync
265
- if (dbTime > fileTime) {
266
- onLog(`[watch] Skipping sync for ${entityType} ${entityId} (database is newer)`);
267
- return;
411
+ // Update cache with new state
412
+ jsonlStateCache.set(filePath, newStateMap);
413
+ if (changedEntities.length > 0) {
414
+ onLog(`[watch] Detected ${changedEntities.length} changed ${entityType}(s) in JSONL`);
415
+ // Import from JSONL to sync database
416
+ // Pass changed entity IDs to force update even if timestamp hasn't changed
417
+ // (user may have manually edited JSONL content without updating timestamp)
418
+ const changedIds = changedEntities.map((e) => e.entityId);
419
+ await importFromJSONL(db, {
420
+ inputDir: baseDir,
421
+ forceUpdateIds: changedIds,
422
+ });
423
+ onLog(`[watch] Imported JSONL changes to database`);
424
+ // Emit events for changed entities (after import, so we have fresh data)
425
+ for (const { entityId, action } of changedEntities) {
426
+ onLog(`[watch] Synced ${entityType} ${entityId} (${action})`);
427
+ if (onEntitySync) {
428
+ // Get fresh entity data from database (after import)
429
+ const entity = entityType === "spec"
430
+ ? getSpec(db, entityId)
431
+ : getIssue(db, entityId);
432
+ // Find markdown file path
433
+ let entityFilePath;
434
+ if (entityType === "spec" && entity && "file_path" in entity) {
435
+ entityFilePath = path.join(baseDir, entity.file_path);
436
+ }
437
+ else if (entityType === "issue" &&
438
+ entity &&
439
+ "file_path" in entity) {
440
+ entityFilePath = path.join(baseDir, entity.file_path);
441
+ }
442
+ else {
443
+ // Fallback to default path
444
+ entityFilePath = path.join(baseDir, entityType === "spec" ? "specs" : "issues", `${entityId}.md`);
445
+ }
446
+ await onEntitySync({
447
+ entityType,
448
+ entityId: entityId,
449
+ action,
450
+ filePath: entityFilePath,
451
+ baseDir,
452
+ source: "jsonl",
453
+ timestamp: new Date(),
454
+ entity: entity ?? undefined,
455
+ version: 1,
456
+ });
268
457
  }
269
458
  }
270
459
  }
271
- }
272
- catch (error) {
273
- // If parsing fails, continue with sync (might be a new file)
274
- }
275
- // Sync markdown to database
276
- const result = await syncMarkdownToJSONL(db, filePath, {
277
- outputDir: baseDir,
278
- autoExport: true,
279
- autoInitialize: true,
280
- writeBackFrontmatter: true,
281
- });
282
- if (result.success) {
283
- onLog(`[watch] Synced ${result.entityType} ${result.entityId} (${result.action})`);
284
- }
285
- else {
286
- onError(new Error(`Failed to sync ${filePath}: ${result.error}`));
287
- stats.errors++;
288
- }
289
- }
290
- }
291
- else if (basename === "specs.jsonl" || basename === "issues.jsonl") {
292
- // JSONL file changed (e.g., from git pull) - check if import is needed
293
- onLog(`[watch] ${event} ${path.relative(baseDir, filePath)}`);
294
- if (event !== "unlink") {
295
- // Check if JSONL actually differs from database before importing
296
- if (jsonlNeedsImport(filePath)) {
297
- await importFromJSONL(db, {
298
- inputDir: baseDir,
299
- });
300
- onLog(`[watch] Imported JSONL changes to database`);
301
- }
302
- // Optionally sync database changes back to markdown files
303
- // Only sync entities where content actually differs (contentMatches check)
304
- if (enableReverseSync) {
305
- onLog(`[watch] Checking for entities that need markdown updates...`);
306
- let syncedCount = 0;
307
- // Get all specs and sync to markdown
308
- const specs = listSpecs(db);
309
- for (const spec of specs) {
310
- if (spec.file_path) {
311
- const mdPath = path.join(baseDir, spec.file_path);
312
- // Skip if content already matches (prevents oscillation)
313
- if (contentMatches(mdPath, spec.id, "spec")) {
460
+ else {
461
+ onLog(`[watch] No entity changes detected in ${basename}`);
462
+ }
463
+ // Optionally sync database changes back to markdown files
464
+ // Only sync entities where content actually differs (contentMatches check)
465
+ if (enableReverseSync) {
466
+ onLog(`[watch] Checking for entities that need markdown updates...`);
467
+ let syncedCount = 0;
468
+ // Get all specs and sync to markdown
469
+ const specs = listSpecs(db);
470
+ for (const spec of specs) {
471
+ if (spec.file_path) {
472
+ const mdPath = path.join(baseDir, spec.file_path);
473
+ // Skip if content already matches (prevents oscillation)
474
+ if (contentMatches(mdPath, spec.id, "spec")) {
475
+ continue;
476
+ }
477
+ const result = await syncJSONLToMarkdown(db, spec.id, "spec", mdPath);
478
+ if (result.success) {
479
+ syncedCount++;
480
+ onLog(`[watch] Synced spec ${spec.id} to ${spec.file_path} (${result.action})`);
481
+ }
482
+ else if (result.error) {
483
+ onError(new Error(`Failed to sync spec ${spec.id}: ${result.error}`));
484
+ }
485
+ }
486
+ }
487
+ // Get all issues and check if any need syncing
488
+ const issues = listIssues(db);
489
+ const issuesDir = path.join(baseDir, "issues");
490
+ for (const issue of issues) {
491
+ // Find existing file or generate new filename using unified scheme
492
+ let mdPath = findExistingEntityFile(issue.id, issuesDir, issue.title);
493
+ if (!mdPath) {
494
+ // File doesn't exist, generate new filename
495
+ const fileName = generateUniqueFilename(issue.title, issue.id);
496
+ mdPath = path.join(issuesDir, fileName);
497
+ }
498
+ // Skip if content already matches (prevents unnecessary writes and oscillation)
499
+ if (contentMatches(mdPath, issue.id, "issue")) {
314
500
  continue;
315
501
  }
316
- const result = await syncJSONLToMarkdown(db, spec.id, "spec", mdPath);
502
+ const result = await syncJSONLToMarkdown(db, issue.id, "issue", mdPath);
317
503
  if (result.success) {
318
504
  syncedCount++;
319
- onLog(`[watch] Synced spec ${spec.id} to ${spec.file_path} (${result.action})`);
505
+ onLog(`[watch] Synced issue ${issue.id} to markdown (${result.action})`);
320
506
  }
321
507
  else if (result.error) {
322
- onError(new Error(`Failed to sync spec ${spec.id}: ${result.error}`));
508
+ onError(new Error(`Failed to sync issue ${issue.id}: ${result.error}`));
323
509
  }
324
510
  }
325
- }
326
- // Get all issues and check if any need syncing
327
- const issues = listIssues(db);
328
- const issuesDir = path.join(baseDir, "issues");
329
- for (const issue of issues) {
330
- const fileName = `${issue.id}.md`;
331
- const mdPath = path.join(issuesDir, fileName);
332
- // Skip if content already matches (prevents unnecessary writes and oscillation)
333
- if (contentMatches(mdPath, issue.id, "issue")) {
334
- continue;
335
- }
336
- const result = await syncJSONLToMarkdown(db, issue.id, "issue", mdPath);
337
- if (result.success) {
338
- syncedCount++;
339
- onLog(`[watch] Synced issue ${issue.id} to markdown (${result.action})`);
511
+ if (syncedCount > 0) {
512
+ onLog(`[watch] Synced ${syncedCount} entities to markdown`);
340
513
  }
341
- else if (result.error) {
342
- onError(new Error(`Failed to sync issue ${issue.id}: ${result.error}`));
514
+ else {
515
+ onLog(`[watch] All markdown files are up to date`);
343
516
  }
344
517
  }
345
- if (syncedCount > 0) {
346
- onLog(`[watch] Synced ${syncedCount} entities to markdown`);
347
- }
348
- else {
349
- onLog(`[watch] All markdown files are up to date`);
350
- }
351
518
  }
352
519
  }
520
+ stats.changesProcessed++;
521
+ }
522
+ catch (error) {
523
+ const message = error instanceof Error ? error.message : String(error);
524
+ onError(new Error(`Error processing ${filePath}: ${message}`));
525
+ stats.errors++;
353
526
  }
354
- stats.changesProcessed++;
355
527
  }
356
- catch (error) {
357
- const message = error instanceof Error ? error.message : String(error);
358
- onError(new Error(`Error processing ${filePath}: ${message}`));
359
- stats.errors++;
528
+ finally {
529
+ // Always clear re-entry guard for this file, even on errors
530
+ filesBeingProcessed.delete(filePath);
360
531
  }
361
532
  }
362
533
  /**
363
- * Debounced file change handler
534
+ * File change handler with oscillation guards
535
+ * Processes changes immediately (no debounce)
364
536
  */
365
537
  function handleFileChange(filePath, event) {
366
- // Cancel pending change for this file
367
- const existingTimeout = pendingChanges.get(filePath);
368
- if (existingTimeout) {
369
- clearTimeout(existingTimeout);
370
- stats.changesPending--;
538
+ // Guard: Skip if we're currently processing this specific file (prevents oscillation)
539
+ // This is the primary defense against the oscillation loop
540
+ if (filesBeingProcessed.has(filePath)) {
541
+ return;
371
542
  }
372
- // Schedule new change
373
- stats.changesPending++;
374
- const timeout = setTimeout(() => {
375
- pendingChanges.delete(filePath);
376
- stats.changesPending--;
377
- processChange(filePath, event);
378
- }, debounceDelay);
379
- pendingChanges.set(filePath, timeout);
543
+ // Process change immediately
544
+ processChange(filePath, event);
380
545
  }
381
546
  // Set up event handlers
382
547
  watcher.on("add", (filePath) => handleFileChange(filePath, "add"));
@@ -386,6 +551,43 @@ export function startWatcher(options) {
386
551
  const watched = watcher.getWatched();
387
552
  stats.filesWatched = Object.keys(watched).reduce((total, dir) => total + watched[dir].length, 0);
388
553
  onLog(`[watch] Watching ${stats.filesWatched} files in ${baseDir}`);
554
+ // Initialize JSONL state cache on startup to avoid broadcasting all entities on first change
555
+ try {
556
+ // Initialize specs.jsonl cache
557
+ const specsJsonlPath = path.join(baseDir, "specs.jsonl");
558
+ if (fs.existsSync(specsJsonlPath)) {
559
+ const content = fs.readFileSync(specsJsonlPath, "utf8");
560
+ const lines = content.trim().split("\n").filter((line) => line.trim());
561
+ const stateMap = new Map();
562
+ for (const line of lines) {
563
+ const entity = JSON.parse(line);
564
+ // Use canonical content hash to match the change detection logic
565
+ const contentHash = computeCanonicalHash(entity);
566
+ stateMap.set(entity.id, contentHash);
567
+ }
568
+ jsonlStateCache.set(specsJsonlPath, stateMap);
569
+ onLog(`[watch] Initialized cache for specs.jsonl (${stateMap.size} entities)`);
570
+ }
571
+ // Initialize issues.jsonl cache
572
+ const issuesJsonlPath = path.join(baseDir, "issues.jsonl");
573
+ if (fs.existsSync(issuesJsonlPath)) {
574
+ const content = fs.readFileSync(issuesJsonlPath, "utf8");
575
+ const lines = content.trim().split("\n").filter((line) => line.trim());
576
+ const stateMap = new Map();
577
+ for (const line of lines) {
578
+ const entity = JSON.parse(line);
579
+ // Use canonical content hash to match the change detection logic
580
+ const contentHash = computeCanonicalHash(entity);
581
+ stateMap.set(entity.id, contentHash);
582
+ }
583
+ jsonlStateCache.set(issuesJsonlPath, stateMap);
584
+ onLog(`[watch] Initialized cache for issues.jsonl (${stateMap.size} entities)`);
585
+ }
586
+ }
587
+ catch (error) {
588
+ onLog(`[watch] Warning: Failed to initialize JSONL cache: ${error instanceof Error ? error.message : String(error)}`);
589
+ // Continue anyway - cache will be populated on first change
590
+ }
389
591
  });
390
592
  watcher.on("error", (error) => {
391
593
  onError(error);
@@ -395,12 +597,6 @@ export function startWatcher(options) {
395
597
  return {
396
598
  stop: async () => {
397
599
  onLog("[watch] Stopping watcher...");
398
- // Cancel all pending changes
399
- for (const timeout of pendingChanges.values()) {
400
- clearTimeout(timeout);
401
- }
402
- pendingChanges.clear();
403
- stats.changesPending = 0;
404
600
  // Close watcher
405
601
  await watcher.close();
406
602
  onLog("[watch] Watcher stopped");