claude_memory 0.9.1 → 0.11.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (77) hide show
  1. checksums.yaml +4 -4
  2. data/.claude/memory.sqlite3 +0 -0
  3. data/.claude/skills/dashboard/SKILL.md +42 -0
  4. data/.claude-plugin/marketplace.json +1 -1
  5. data/.claude-plugin/plugin.json +1 -1
  6. data/CHANGELOG.md +130 -0
  7. data/CLAUDE.md +30 -6
  8. data/README.md +66 -2
  9. data/db/migrations/015_add_activity_events.rb +26 -0
  10. data/db/migrations/016_add_moment_feedback.rb +22 -0
  11. data/db/migrations/017_add_last_recalled_at.rb +15 -0
  12. data/docs/1_0_punchlist.md +371 -0
  13. data/docs/EXAMPLES.md +41 -2
  14. data/docs/GETTING_STARTED.md +33 -4
  15. data/docs/architecture.md +22 -7
  16. data/docs/audit-queries.md +131 -0
  17. data/docs/dashboard.md +192 -0
  18. data/docs/improvements.md +650 -9
  19. data/docs/influence/cq.md +187 -0
  20. data/docs/plugin.md +13 -6
  21. data/docs/quality_review.md +524 -172
  22. data/docs/reflection_memory_as_accumulating_judgment.md +67 -0
  23. data/lib/claude_memory/activity_log.rb +86 -0
  24. data/lib/claude_memory/commands/census_command.rb +210 -0
  25. data/lib/claude_memory/commands/completion_command.rb +3 -0
  26. data/lib/claude_memory/commands/dashboard_command.rb +54 -0
  27. data/lib/claude_memory/commands/dedupe_conflicts_command.rb +55 -0
  28. data/lib/claude_memory/commands/digest_command.rb +273 -0
  29. data/lib/claude_memory/commands/hook_command.rb +61 -2
  30. data/lib/claude_memory/commands/initializers/hooks_configurator.rb +7 -4
  31. data/lib/claude_memory/commands/reclassify_references_command.rb +56 -0
  32. data/lib/claude_memory/commands/registry.rb +7 -1
  33. data/lib/claude_memory/commands/show_command.rb +90 -0
  34. data/lib/claude_memory/commands/skills/distill-transcripts.md +13 -1
  35. data/lib/claude_memory/commands/stats_command.rb +131 -2
  36. data/lib/claude_memory/commands/sweep_command.rb +2 -0
  37. data/lib/claude_memory/configuration.rb +16 -0
  38. data/lib/claude_memory/core/relative_time.rb +9 -0
  39. data/lib/claude_memory/dashboard/api.rb +610 -0
  40. data/lib/claude_memory/dashboard/conflicts.rb +279 -0
  41. data/lib/claude_memory/dashboard/efficacy.rb +127 -0
  42. data/lib/claude_memory/dashboard/fact_presenter.rb +109 -0
  43. data/lib/claude_memory/dashboard/health.rb +175 -0
  44. data/lib/claude_memory/dashboard/index.html +2707 -0
  45. data/lib/claude_memory/dashboard/knowledge.rb +136 -0
  46. data/lib/claude_memory/dashboard/moments.rb +244 -0
  47. data/lib/claude_memory/dashboard/reuse.rb +97 -0
  48. data/lib/claude_memory/dashboard/scoped_fact_resolver.rb +95 -0
  49. data/lib/claude_memory/dashboard/server.rb +211 -0
  50. data/lib/claude_memory/dashboard/timeline.rb +68 -0
  51. data/lib/claude_memory/dashboard/trust.rb +454 -0
  52. data/lib/claude_memory/distill/bare_conclusion_detector.rb +71 -0
  53. data/lib/claude_memory/distill/reference_material_detector.rb +78 -0
  54. data/lib/claude_memory/hook/auto_memory_mirror.rb +112 -0
  55. data/lib/claude_memory/hook/context_injector.rb +97 -3
  56. data/lib/claude_memory/hook/handler.rb +191 -3
  57. data/lib/claude_memory/mcp/handlers/management_handlers.rb +8 -0
  58. data/lib/claude_memory/mcp/query_guide.rb +11 -0
  59. data/lib/claude_memory/mcp/text_summary.rb +29 -0
  60. data/lib/claude_memory/mcp/tool_definitions.rb +13 -0
  61. data/lib/claude_memory/mcp/tools.rb +148 -0
  62. data/lib/claude_memory/publish.rb +13 -21
  63. data/lib/claude_memory/recall/stale_detector.rb +67 -0
  64. data/lib/claude_memory/resolve/predicate_policy.rb +2 -0
  65. data/lib/claude_memory/resolve/resolver.rb +41 -11
  66. data/lib/claude_memory/store/llm_cache.rb +68 -0
  67. data/lib/claude_memory/store/metrics_aggregator.rb +96 -0
  68. data/lib/claude_memory/store/schema_manager.rb +1 -1
  69. data/lib/claude_memory/store/sqlite_store.rb +47 -143
  70. data/lib/claude_memory/store/store_manager.rb +29 -0
  71. data/lib/claude_memory/sweep/maintenance.rb +216 -0
  72. data/lib/claude_memory/sweep/recall_timestamp_refresher.rb +83 -0
  73. data/lib/claude_memory/sweep/sweeper.rb +2 -0
  74. data/lib/claude_memory/templates/hooks.example.json +5 -0
  75. data/lib/claude_memory/version.rb +1 -1
  76. data/lib/claude_memory.rb +24 -0
  77. metadata +51 -1
@@ -8,6 +8,8 @@ require "extralite"
8
8
  require "sequel/adapters/extralite"
9
9
  require_relative "retry_handler"
10
10
  require_relative "schema_manager"
11
+ require_relative "llm_cache"
12
+ require_relative "metrics_aggregator"
11
13
 
12
14
  module ClaudeMemory
13
15
  module Store
@@ -19,6 +21,8 @@ module ClaudeMemory
19
21
  class SQLiteStore
20
22
  include RetryHandler
21
23
  include SchemaManager
24
+ include LLMCache
25
+ include MetricsAggregator
22
26
 
23
27
  # @return [Sequel::Database] the underlying Sequel database connection
24
28
  attr_reader :db
@@ -101,6 +105,49 @@ module ClaudeMemory
101
105
  # @return [Sequel::Dataset]
102
106
  def mcp_tool_calls = @db[:mcp_tool_calls]
103
107
 
108
+ # @return [Sequel::Dataset]
109
+ def activity_events = @db[:activity_events]
110
+
111
+ # @return [Sequel::Dataset]
112
+ def moment_feedback = @db[:moment_feedback]
113
+
114
+ # Upsert a thumbs-up/down verdict for a moment. One row per event_id
115
+ # (unique constraint on the column) — repeat clicks overwrite. Returns
116
+ # the persisted row.
117
+ #
118
+ # @param event_id [Integer] activity_events row id
119
+ # @param verdict [String] "up" or "down"
120
+ # @param note [String, nil] optional freeform note
121
+ # @param recorded_at [String, nil] ISO 8601 timestamp (defaults to now UTC)
122
+ # @return [Hash] row after upsert
123
+ def upsert_moment_feedback(event_id:, verdict:, note: nil, recorded_at: nil)
124
+ raise ArgumentError, "verdict must be 'up' or 'down'" unless %w[up down].include?(verdict)
125
+
126
+ ts = recorded_at || Time.now.utc.iso8601
127
+ with_retry do
128
+ @db.transaction do
129
+ existing = moment_feedback.where(event_id: event_id).first
130
+ if existing
131
+ moment_feedback.where(id: existing[:id]).update(
132
+ verdict: verdict, note: note, recorded_at: ts
133
+ )
134
+ moment_feedback.where(id: existing[:id]).first
135
+ else
136
+ id = moment_feedback.insert(
137
+ event_id: event_id, verdict: verdict, note: note, recorded_at: ts
138
+ )
139
+ moment_feedback.where(id: id).first
140
+ end
141
+ end
142
+ end
143
+ end
144
+
145
+ # Remove the verdict for a moment, if any.
146
+ # @return [Integer] number of rows deleted (0 or 1)
147
+ def clear_moment_feedback(event_id)
148
+ with_retry { moment_feedback.where(event_id: event_id).delete }
149
+ end
150
+
104
151
  # Record a single MCP tool invocation for telemetry.
105
152
  # Inserts synchronously; callers wrap in with_retry at the call site
106
153
  # if needed.
@@ -497,149 +544,6 @@ module ClaudeMemory
497
544
  .all
498
545
  end
499
546
 
500
- # Count content items that have not yet been distilled.
501
- # @param min_length [Integer] minimum byte_len threshold
502
- # @return [Integer]
503
- def count_undistilled(min_length: 200)
504
- content_items
505
- .left_join(:ingestion_metrics, content_item_id: :id)
506
- .where(Sequel[:ingestion_metrics][:id] => nil)
507
- .where { byte_len >= min_length }
508
- .count
509
- end
510
-
511
- # Record token usage and extraction counts for a distillation run.
512
- # @param content_item_id [Integer] content item that was distilled
513
- # @param input_tokens [Integer] LLM input tokens consumed
514
- # @param output_tokens [Integer] LLM output tokens consumed
515
- # @param facts_extracted [Integer] number of facts extracted
516
- # @return [Integer] inserted row id
517
- def record_ingestion_metrics(content_item_id:, input_tokens:, output_tokens:, facts_extracted:)
518
- ingestion_metrics.insert(
519
- content_item_id: content_item_id,
520
- input_tokens: input_tokens,
521
- output_tokens: output_tokens,
522
- facts_extracted: facts_extracted,
523
- created_at: Time.now.utc.iso8601
524
- )
525
- end
526
-
527
- # Compute aggregate ingestion metrics across all distillation runs.
528
- # @return [Hash, nil] totals and efficiency ratio, or nil if no data
529
- def aggregate_ingestion_metrics
530
- # standard:disable Performance/Detect (Sequel DSL requires .select{}.first)
531
- result = ingestion_metrics
532
- .select {
533
- [
534
- sum(:input_tokens).as(:total_input),
535
- sum(:output_tokens).as(:total_output),
536
- sum(:facts_extracted).as(:total_facts),
537
- count(:id).as(:total_ops)
538
- ]
539
- }
540
- .first
541
- # standard:enable Performance/Detect
542
-
543
- return nil if result.nil? || result[:total_ops].to_i.zero?
544
-
545
- total_input = result[:total_input].to_i
546
- total_output = result[:total_output].to_i
547
- total_facts = result[:total_facts].to_i
548
- total_ops = result[:total_ops].to_i
549
-
550
- efficiency = total_input.zero? ? 0.0 : (total_facts.to_f / total_input * 1000).round(2)
551
-
552
- {
553
- total_input_tokens: total_input,
554
- total_output_tokens: total_output,
555
- total_facts_extracted: total_facts,
556
- total_operations: total_ops,
557
- avg_facts_per_1k_input_tokens: efficiency
558
- }
559
- end
560
-
561
- # Mark all undistilled content items as distilled with zero token counts.
562
- # Used for backfilling legacy content that predates the metrics table.
563
- # @return [Integer] number of items backfilled
564
- def backfill_distillation_metrics!
565
- undistilled_ids = content_items
566
- .left_join(:ingestion_metrics, content_item_id: :id)
567
- .where(Sequel[:ingestion_metrics][:id] => nil)
568
- .select_map(Sequel[:content_items][:id])
569
-
570
- return 0 if undistilled_ids.empty?
571
-
572
- now = Time.now.utc.iso8601
573
- undistilled_ids.each do |cid|
574
- ingestion_metrics.insert(
575
- content_item_id: cid,
576
- input_tokens: 0,
577
- output_tokens: 0,
578
- facts_extracted: 0,
579
- created_at: now
580
- )
581
- end
582
-
583
- undistilled_ids.size
584
- end
585
-
586
- # --- LLM cache ---
587
-
588
- # Look up a cached LLM result by its cache key.
589
- # @param cache_key [String] SHA-256 hex cache key
590
- # @return [Hash, nil]
591
- def llm_cache_lookup(cache_key)
592
- llm_cache.where(cache_key: cache_key).first
593
- end
594
-
595
- # Store or update a cached LLM result. Uses upsert on the cache_key.
596
- # @param operation [String] operation name (e.g. "distill", "embed")
597
- # @param model [String] model identifier
598
- # @param input_hash [String] SHA-256 hex digest of the input
599
- # @param result_json [String] JSON-serialized result
600
- # @param input_tokens [Integer, nil] input tokens consumed
601
- # @param output_tokens [Integer, nil] output tokens consumed
602
- # @return [void]
603
- def llm_cache_store(operation:, model:, input_hash:, result_json:, input_tokens: nil, output_tokens: nil)
604
- cache_key = Digest::SHA256.hexdigest("#{operation}:#{model}:#{input_hash}")
605
-
606
- llm_cache
607
- .insert_conflict(target: :cache_key, update: {
608
- result_json: result_json,
609
- input_tokens: input_tokens,
610
- output_tokens: output_tokens,
611
- created_at: Time.now.utc.iso8601
612
- })
613
- .insert(
614
- cache_key: cache_key,
615
- operation: operation,
616
- model: model,
617
- input_hash: input_hash,
618
- result_json: result_json,
619
- input_tokens: input_tokens,
620
- output_tokens: output_tokens,
621
- created_at: Time.now.utc.iso8601
622
- )
623
- end
624
-
625
- # Compute the cache key for an LLM operation.
626
- # @param operation [String] operation name
627
- # @param model [String] model identifier
628
- # @param input [String] raw input text
629
- # @return [String] SHA-256 hex cache key
630
- def llm_cache_key(operation, model, input)
631
- input_hash = Digest::SHA256.hexdigest(input)
632
- Digest::SHA256.hexdigest("#{operation}:#{model}:#{input_hash}")
633
- end
634
-
635
- # Delete LLM cache entries older than the given age.
636
- # @param max_age_seconds [Integer] maximum age in seconds (default: 7 days)
637
- # @return [Integer] number of rows deleted
638
- def llm_cache_prune(max_age_seconds: 604_800)
639
- cutoff = (Time.now - max_age_seconds).utc.iso8601
640
- llm_cache.where { created_at < cutoff }.delete
641
- end
642
-
643
547
  # --- Meta ---
644
548
 
645
549
  # Set a key-value pair in the meta table (upsert).
@@ -118,6 +118,35 @@ module ClaudeMemory
118
118
  end
119
119
  end
120
120
 
121
+ # Return the store for an explicit scope only if its database file
122
+ # already exists on disk. Never creates a new DB. Useful for
123
+ # read-only surfaces that want to avoid accidental initialization.
124
+ # @param scope [String] "global" or "project"
125
+ # @return [SQLiteStore, nil]
126
+ def store_if_exists(scope)
127
+ case scope
128
+ when "project"
129
+ return nil unless project_exists?
130
+ ensure_project!
131
+ when "global"
132
+ return nil unless global_exists?
133
+ ensure_global!
134
+ end
135
+ end
136
+
137
+ # Return whichever store is available, preferring the requested scope.
138
+ # Falls back to the other scope if the preferred DB doesn't exist on
139
+ # disk yet. Returns nil only when both DBs are missing. Intended for
140
+ # "best-effort" surfaces like activity logging and default dashboard
141
+ # reads where the caller just needs some store to talk to.
142
+ # @param prefer [Symbol] :project (default) or :global
143
+ # @return [SQLiteStore, nil]
144
+ def default_store(prefer: :project)
145
+ primary = (prefer == :global) ? "global" : "project"
146
+ fallback = (prefer == :global) ? "project" : "global"
147
+ store_if_exists(primary) || store_if_exists(fallback)
148
+ end
149
+
121
150
  # Copy a project-scoped fact (with its entities and provenance) into the
122
151
  # global store, making it available across all projects. Runs the global
123
152
  # writes in a single transaction for atomicity.
@@ -47,6 +47,74 @@ module ClaudeMemory
47
47
  .update(status: "expired")
48
48
  end
49
49
 
50
+ # Collapse duplicate multi-value facts. Before the resolver-level
51
+ # dedup fix (2026-04-17), multi-value predicates like uses_language
52
+ # and uses_framework accumulated identical rows every ingest cycle.
53
+ # For each (subject_entity_id, predicate, object_literal, scope) group
54
+ # with more than one active fact, keep the oldest row, copy the
55
+ # duplicates' provenance onto the keeper (so we retain source
56
+ # signal), and mark the duplicates superseded. Returns the count of
57
+ # fact rows merged into their keeper.
58
+ def dedupe_multi_value_facts
59
+ merged = 0
60
+ @store.db.transaction do
61
+ # Pull every active fact with a literal object and group in Ruby.
62
+ # Facts tables stay small (< 10k typical); Sequel's HAVING COUNT(*)
63
+ # path hits adapter quoting bugs on some Extralite versions.
64
+ active = @store.facts
65
+ .where(status: "active")
66
+ .exclude(subject_entity_id: nil)
67
+ .exclude(object_literal: nil)
68
+ .order(:id)
69
+ .all
70
+
71
+ groups = active.group_by { |f|
72
+ [f[:subject_entity_id], f[:predicate], f[:object_literal]&.downcase, f[:scope]]
73
+ }
74
+
75
+ groups.each_value do |rows|
76
+ next if rows.size < 2
77
+
78
+ keeper = rows.first
79
+ rows[1..].each do |loser|
80
+ @store.provenance.where(fact_id: loser[:id]).update(fact_id: keeper[:id])
81
+ @store.facts.where(id: loser[:id]).update(
82
+ status: "superseded",
83
+ valid_to: Time.now.utc.iso8601
84
+ )
85
+ @store.insert_fact_link(from_fact_id: keeper[:id], to_fact_id: loser[:id], link_type: "supersedes")
86
+ merged += 1
87
+ end
88
+ end
89
+ end
90
+ merged
91
+ end
92
+
93
+ # Fix scope leakage: facts whose `scope` column disagrees with the
94
+ # store they live in. Pre-2026-04-20, the resolver treated
95
+ # scope_hint from the distiller as a scope override — so when the
96
+ # NullDistiller detected global-scope language ("always", "my
97
+ # preference"), it stamped scope: "global" on facts that still
98
+ # ended up written to the project DB. The result was invisible
99
+ # orphaned rows: not in the global DB so global recall never saw
100
+ # them, but labeled global inside the project DB.
101
+ #
102
+ # This pass detects those rows by comparing `scope` to the
103
+ # expected value derived from which DB this Maintenance instance
104
+ # is running against, and rewrites scope + project_path to match.
105
+ # Does not move facts between DBs — users can `claude-memory
106
+ # promote <id>` to do a proper cross-store copy.
107
+ # Returns: Integer count of facts whose scope was corrected.
108
+ def fix_scope_leakage
109
+ expected = expected_scope_for_store
110
+ return 0 unless expected
111
+
112
+ project_path_for_scope = (expected == "global") ? nil : detect_project_path
113
+ @store.facts
114
+ .exclude(scope: expected)
115
+ .update(scope: expected, project_path: project_path_for_scope)
116
+ end
117
+
50
118
  # Delete provenance records referencing non-existent facts.
51
119
  # Returns: Integer count of deleted provenance rows
52
120
  def prune_orphaned_provenance
@@ -188,6 +256,114 @@ module ClaudeMemory
188
256
  true
189
257
  end
190
258
 
259
+ # Deduplicate open conflicts that describe the same contradiction.
260
+ # Before the Resolver#apply_conflict dedupe fix (2026-04-24), each
261
+ # re-extraction of the losing value in a single-value slot produced
262
+ # a new disputed fact + conflict row — production DBs accumulated 11
263
+ # open conflicts for "sqlite vs postgresql" referencing 11 different
264
+ # disputed facts. This pass keeps the earliest conflict per logical
265
+ # pair and marks the rest resolved, reinforcing the keeper's
266
+ # provenance chain with the duplicates' provenance.
267
+ #
268
+ # Pair key: (subject_entity_id, predicate, normalized(object_a), normalized(object_b))
269
+ # with object order sorted so A-vs-B == B-vs-A.
270
+ #
271
+ # @param dry_run [Boolean] when true, decide but don't write
272
+ # @return [Hash] {inspected:, resolved:, decisions: [{conflict_id:, action:, keeper_id:}]}
273
+ def dedupe_open_conflicts(dry_run: false)
274
+ result = {inspected: 0, resolved: 0, decisions: []}
275
+
276
+ open_rows = @store.conflicts
277
+ .where(status: "open")
278
+ .order(:id)
279
+ .all
280
+ return result if open_rows.empty?
281
+
282
+ fact_ids = open_rows.flat_map { |r| [r[:fact_a_id], r[:fact_b_id]] }.uniq
283
+ facts = @store.facts
284
+ .where(id: fact_ids)
285
+ .select(:id, :subject_entity_id, :predicate, :object_literal, :status)
286
+ .all
287
+ .to_h { |f| [f[:id], f] }
288
+
289
+ @store.db.transaction do
290
+ groups = open_rows.group_by { |row| pair_key(row, facts) }.reject { |key, _| key.nil? }
291
+ groups.each_value do |rows_in_group|
292
+ result[:inspected] += rows_in_group.size
293
+ next if rows_in_group.size < 2
294
+
295
+ keeper = rows_in_group.first
296
+ duplicates = rows_in_group[1..]
297
+ duplicates.each do |dup|
298
+ result[:decisions] << {
299
+ conflict_id: dup[:id],
300
+ action: :resolve_duplicate,
301
+ keeper_id: keeper[:id],
302
+ duplicate_fact_id: dup[:fact_b_id]
303
+ }
304
+ # Counted whether or not we actually write, so dry-run output
305
+ # matches real-run output and callers can compare plans.
306
+ result[:resolved] += 1
307
+ next if dry_run
308
+
309
+ # Resolve the duplicate conflict. Also reject its disputed
310
+ # side (fact_b_id is always the newer inserted-as-disputed
311
+ # fact per Resolver convention), and shift its provenance
312
+ # onto the keeper's fact_b so the evidence isn't lost.
313
+ keeper_fact_b_id = keeper[:fact_b_id]
314
+ if dup[:fact_b_id] != keeper_fact_b_id
315
+ @store.provenance.where(fact_id: dup[:fact_b_id]).update(fact_id: keeper_fact_b_id)
316
+ @store.facts.where(id: dup[:fact_b_id]).update(
317
+ status: "rejected",
318
+ valid_to: Time.now.utc.iso8601
319
+ )
320
+ end
321
+ @store.conflicts.where(id: dup[:id]).update(
322
+ status: "resolved",
323
+ notes: "Deduplicated into conflict ##{keeper[:id]}"
324
+ )
325
+ end
326
+ end
327
+ end
328
+
329
+ result
330
+ end
331
+
332
+ # Reclassify active facts currently labeled `convention` whose object
333
+ # text matches the ReferenceMaterialDetector heuristics. Fixes the
334
+ # historical data tail from before the detector was wired into
335
+ # `store_extraction` on 2026-04-24. Current writes can't create this
336
+ # pattern — this pass only cleans up what already exists.
337
+ #
338
+ # @param dry_run [Boolean] when true, decide but don't write
339
+ # @return [Hash] {inspected:, reclassified:, decisions: [{fact_id:, object:}]}
340
+ def reclassify_references(dry_run: false)
341
+ detector = ClaudeMemory::Distill::ReferenceMaterialDetector.new
342
+ result = {inspected: 0, reclassified: 0, decisions: []}
343
+
344
+ candidates = @store.facts
345
+ .where(status: "active", predicate: "convention")
346
+ .select(:id, :object_literal)
347
+ .all
348
+
349
+ @store.db.transaction do
350
+ candidates.each do |row|
351
+ result[:inspected] += 1
352
+ fact = {predicate: "convention", object: row[:object_literal]}
353
+ next unless detector.reference_material?(fact)
354
+
355
+ result[:decisions] << {fact_id: row[:id], object: row[:object_literal]}
356
+ result[:reclassified] += 1
357
+
358
+ unless dry_run
359
+ @store.facts.where(id: row[:id]).update(predicate: "reference")
360
+ end
361
+ end
362
+ end
363
+
364
+ result
365
+ end
366
+
191
367
  # Run SQLite VACUUM to reclaim space.
192
368
  # Returns: true
193
369
  def vacuum
@@ -197,6 +373,20 @@ module ClaudeMemory
197
373
 
198
374
  private
199
375
 
376
+ # Canonical key for grouping open conflicts. Two conflicts are the
377
+ # "same" when they involve the same subject, predicate, and set of
378
+ # objects (A-vs-B == B-vs-A). Missing-fact conflicts (either side
379
+ # deleted) get a nil key and are skipped by the caller.
380
+ def pair_key(conflict_row, facts_by_id)
381
+ a = facts_by_id[conflict_row[:fact_a_id]]
382
+ b = facts_by_id[conflict_row[:fact_b_id]]
383
+ return nil unless a && b
384
+ return nil unless a[:subject_entity_id] == b[:subject_entity_id]
385
+ return nil unless a[:predicate] == b[:predicate]
386
+ objects = [a[:object_literal].to_s.downcase.strip, b[:object_literal].to_s.downcase.strip].sort
387
+ [a[:subject_entity_id], a[:predicate], objects]
388
+ end
389
+
200
390
  def restore_tokenize(text)
201
391
  return Set.new if text.nil?
202
392
  text.downcase
@@ -230,6 +420,32 @@ module ClaudeMemory
230
420
  (Time.now - days * 86400).utc.iso8601
231
421
  end
232
422
 
423
+ # Infer the scope each store is supposed to carry by comparing its
424
+ # DB path to the canonical Configuration paths. Returns "global" for
425
+ # the user-wide DB, "project" for the per-project DB, or nil when
426
+ # the path doesn't match either (custom test paths, etc. — in which
427
+ # case fix_scope_leakage is a no-op).
428
+ def expected_scope_for_store
429
+ path = @store.db.opts[:database].to_s
430
+ return nil if path.empty?
431
+ config = ClaudeMemory::Configuration.new
432
+ return "global" if File.expand_path(path) == File.expand_path(config.global_db_path)
433
+
434
+ # Project DB lives at <project>/.claude/memory.sqlite3 — we can
435
+ # always check whether the DB path sits under a .claude directory
436
+ # to classify it as project-scoped regardless of which project.
437
+ File.dirname(File.expand_path(path)).end_with?("/.claude") ? "project" : nil
438
+ end
439
+
440
+ def detect_project_path
441
+ path = @store.db.opts[:database].to_s
442
+ return nil if path.empty?
443
+ # The canonical project DB lives at <project>/.claude/memory.sqlite3
444
+ # so the project path is two levels up.
445
+ project = File.dirname(File.expand_path(path), 2)
446
+ Dir.exist?(project) ? project : nil
447
+ end
448
+
233
449
  def with_vec_index
234
450
  vec_index = @store.vector_index
235
451
  return unless vec_index.available?
@@ -0,0 +1,83 @@
1
+ # frozen_string_literal: true
2
+
3
+ require "json"
4
+
5
+ module ClaudeMemory
6
+ module Sweep
7
+ # Path B for #35 access-based staleness — sweep-derived rather than
8
+ # per-recall written. Scans activity_events from both stores, projects
9
+ # the most recent recall/context-injection touch per (scope, fact_id),
10
+ # and bulk-updates facts.last_recalled_at across both DBs.
11
+ #
12
+ # Cross-DB by design: project DBs record activity_events for both
13
+ # project and global facts (a recall fired from a project context that
14
+ # returns global facts is logged in the project DB), so a per-store
15
+ # refresh would silently miss global facts entirely.
16
+ #
17
+ # Lookback bounds keep the scan O(window), not O(history).
18
+ class RecallTimestampRefresher
19
+ DEFAULT_LOOKBACK_DAYS = 90
20
+ RECALL_EVENT_TYPES = %w[recall hook_context].freeze
21
+
22
+ def initialize(manager, lookback_days: DEFAULT_LOOKBACK_DAYS)
23
+ @manager = manager
24
+ @lookback_days = lookback_days
25
+ end
26
+
27
+ # @return [Hash] {project: Int, global: Int} — count of facts updated per scope.
28
+ def refresh!
29
+ cutoff = (Time.now.utc - @lookback_days * 86_400).iso8601
30
+ latest = collect_latest_per_fact(cutoff)
31
+ apply_to_stores(latest)
32
+ end
33
+
34
+ private
35
+
36
+ # Scans every activity_events table available to the manager and
37
+ # returns {[scope, fact_id] => latest_occurred_at}.
38
+ def collect_latest_per_fact(cutoff)
39
+ latest = {}
40
+ %w[project global].each do |source|
41
+ store = @manager.store_if_exists(source)
42
+ next unless store
43
+ rows = store.activity_events
44
+ .where(event_type: RECALL_EVENT_TYPES)
45
+ .where { occurred_at >= cutoff }
46
+ .select(:occurred_at, :detail_json)
47
+ .all
48
+ rows.each do |row|
49
+ details = parse_details(row[:detail_json])
50
+ scoped = Dashboard::ScopedFactResolver.scoped_ids_from_details(details)
51
+ scoped.each do |scope, ids|
52
+ ids.each do |fact_id|
53
+ key = [scope.to_s, fact_id]
54
+ existing = latest[key]
55
+ latest[key] = row[:occurred_at] if existing.nil? || row[:occurred_at] > existing
56
+ end
57
+ end
58
+ end
59
+ end
60
+ latest
61
+ end
62
+
63
+ def parse_details(detail_json)
64
+ return {} if detail_json.nil? || detail_json.empty?
65
+ JSON.parse(detail_json, symbolize_names: true)
66
+ rescue JSON::ParserError
67
+ {}
68
+ end
69
+
70
+ def apply_to_stores(latest)
71
+ counts = {project: 0, global: 0}
72
+ latest.group_by { |(scope, _id), _ts| scope }.each do |scope, entries|
73
+ store = @manager.store_if_exists(scope)
74
+ next unless store
75
+ entries.each do |((_scope, fact_id), ts)|
76
+ counts[scope.to_sym] += store.facts.where(id: fact_id).update(last_recalled_at: ts)
77
+ end
78
+ end
79
+ counts
80
+ end
81
+ end
82
+ end
83
+ end
@@ -38,6 +38,8 @@ module ClaudeMemory
38
38
 
39
39
  run_if_within_budget { @stats[:proposed_facts_expired] = maintenance.expire_proposed_facts }
40
40
  run_if_within_budget { @stats[:disputed_facts_expired] = maintenance.expire_disputed_facts }
41
+ run_if_within_budget { @stats[:multi_value_facts_merged] = maintenance.dedupe_multi_value_facts }
42
+ run_if_within_budget { @stats[:scope_leakage_fixed] = maintenance.fix_scope_leakage }
41
43
  run_if_within_budget { @stats[:orphaned_provenance_deleted] = maintenance.prune_orphaned_provenance }
42
44
  run_if_within_budget { @stats[:old_content_pruned] = maintenance.prune_old_content }
43
45
  run_if_within_budget { @stats[:mcp_tool_calls_pruned] = maintenance.prune_old_mcp_tool_calls }
@@ -68,6 +68,11 @@
68
68
  "command": "claude-memory hook sweep",
69
69
  "timeout": 30,
70
70
  "statusMessage": "Sweeping memory..."
71
+ },
72
+ {
73
+ "type": "command",
74
+ "command": "claude-memory hook nudge",
75
+ "timeout": 5
71
76
  }
72
77
  ]
73
78
  }
@@ -1,5 +1,5 @@
1
1
  # frozen_string_literal: true
2
2
 
3
3
  module ClaudeMemory
4
- VERSION = "0.9.1"
4
+ VERSION = "0.11.0"
5
5
  end
data/lib/claude_memory.rb CHANGED
@@ -73,12 +73,32 @@ require_relative "claude_memory/commands/completion_command"
73
73
  require_relative "claude_memory/commands/embeddings_command"
74
74
  require_relative "claude_memory/commands/reject_command"
75
75
  require_relative "claude_memory/commands/restore_command"
76
+ require_relative "claude_memory/commands/dedupe_conflicts_command"
77
+ require_relative "claude_memory/commands/reclassify_references_command"
78
+ require_relative "claude_memory/commands/census_command"
79
+ require_relative "claude_memory/commands/dashboard_command"
80
+ require_relative "claude_memory/dashboard/fact_presenter"
81
+ require_relative "claude_memory/dashboard/scoped_fact_resolver"
82
+ require_relative "claude_memory/dashboard/conflicts"
83
+ require_relative "claude_memory/dashboard/efficacy"
84
+ require_relative "claude_memory/dashboard/moments"
85
+ require_relative "claude_memory/dashboard/trust"
86
+ require_relative "claude_memory/dashboard/knowledge"
87
+ require_relative "claude_memory/dashboard/reuse"
88
+ require_relative "claude_memory/dashboard/timeline"
89
+ require_relative "claude_memory/dashboard/health"
90
+ require_relative "claude_memory/dashboard/api"
91
+ require_relative "claude_memory/dashboard/server"
92
+ require_relative "claude_memory/commands/digest_command"
93
+ require_relative "claude_memory/commands/show_command"
76
94
  require_relative "claude_memory/commands/registry"
77
95
  require_relative "claude_memory/cli"
78
96
  require_relative "claude_memory/configuration"
79
97
  require_relative "claude_memory/distill/distiller"
80
98
  require_relative "claude_memory/distill/extraction"
81
99
  require_relative "claude_memory/distill/null_distiller"
100
+ require_relative "claude_memory/distill/reference_material_detector"
101
+ require_relative "claude_memory/distill/bare_conclusion_detector"
82
102
  require_relative "claude_memory/domain/fact"
83
103
  require_relative "claude_memory/domain/entity"
84
104
  require_relative "claude_memory/domain/provenance"
@@ -91,6 +111,7 @@ require_relative "claude_memory/embeddings/api_adapter"
91
111
  require_relative "claude_memory/embeddings/dimension_check"
92
112
  require_relative "claude_memory/embeddings/resolver"
93
113
  require_relative "claude_memory/embeddings/similarity"
114
+ require_relative "claude_memory/hook/auto_memory_mirror"
94
115
  require_relative "claude_memory/hook/context_injector"
95
116
  require_relative "claude_memory/hook/distillation_runner"
96
117
  require_relative "claude_memory/hook/exit_codes"
@@ -109,6 +130,7 @@ require_relative "claude_memory/ingest/tool_extractor"
109
130
  require_relative "claude_memory/ingest/tool_filter"
110
131
  require_relative "claude_memory/ingest/ingester"
111
132
  require_relative "claude_memory/ingest/transcript_reader"
133
+ require_relative "claude_memory/activity_log"
112
134
  require_relative "claude_memory/logging/logger"
113
135
  require_relative "claude_memory/infrastructure/file_system"
114
136
  require_relative "claude_memory/infrastructure/in_memory_file_system"
@@ -126,6 +148,7 @@ require_relative "claude_memory/recall/expansion_detector"
126
148
  require_relative "claude_memory/recall/query_core"
127
149
  require_relative "claude_memory/recall/legacy_engine"
128
150
  require_relative "claude_memory/recall/dual_engine"
151
+ require_relative "claude_memory/recall/stale_detector"
129
152
  require_relative "claude_memory/recall"
130
153
  require_relative "claude_memory/shortcuts"
131
154
  require_relative "claude_memory/resolve/predicate_policy"
@@ -134,6 +157,7 @@ require_relative "claude_memory/store/sqlite_store"
134
157
  require_relative "claude_memory/store/store_manager"
135
158
  require_relative "claude_memory/sweep/maintenance"
136
159
  require_relative "claude_memory/sweep/sweeper"
160
+ require_relative "claude_memory/sweep/recall_timestamp_refresher"
137
161
  require_relative "claude_memory/version"
138
162
 
139
163
  module ClaudeMemory