claude_memory 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +7 -0
- data/.claude/CLAUDE.md +3 -0
- data/.claude/memory.sqlite3 +0 -0
- data/.claude/output-styles/memory-aware.md +21 -0
- data/.claude/rules/claude_memory.generated.md +21 -0
- data/.claude/settings.json +62 -0
- data/.claude/settings.local.json +21 -0
- data/.claude-plugin/marketplace.json +13 -0
- data/.claude-plugin/plugin.json +10 -0
- data/.mcp.json +11 -0
- data/CHANGELOG.md +36 -0
- data/CLAUDE.md +224 -0
- data/CODE_OF_CONDUCT.md +10 -0
- data/LICENSE.txt +21 -0
- data/README.md +212 -0
- data/Rakefile +10 -0
- data/commands/analyze.md +29 -0
- data/commands/recall.md +17 -0
- data/commands/remember.md +26 -0
- data/docs/demo.md +126 -0
- data/docs/organizational_memory_playbook.md +291 -0
- data/docs/plan.md +411 -0
- data/docs/plugin.md +202 -0
- data/docs/updated_plan.md +453 -0
- data/exe/claude-memory +8 -0
- data/hooks/hooks.json +59 -0
- data/lib/claude_memory/cli.rb +869 -0
- data/lib/claude_memory/distill/distiller.rb +11 -0
- data/lib/claude_memory/distill/extraction.rb +29 -0
- data/lib/claude_memory/distill/json_schema.md +78 -0
- data/lib/claude_memory/distill/null_distiller.rb +123 -0
- data/lib/claude_memory/hook/handler.rb +49 -0
- data/lib/claude_memory/index/lexical_fts.rb +58 -0
- data/lib/claude_memory/ingest/ingester.rb +46 -0
- data/lib/claude_memory/ingest/transcript_reader.rb +21 -0
- data/lib/claude_memory/mcp/server.rb +127 -0
- data/lib/claude_memory/mcp/tools.rb +409 -0
- data/lib/claude_memory/publish.rb +201 -0
- data/lib/claude_memory/recall.rb +360 -0
- data/lib/claude_memory/resolve/predicate_policy.rb +30 -0
- data/lib/claude_memory/resolve/resolver.rb +152 -0
- data/lib/claude_memory/store/sqlite_store.rb +340 -0
- data/lib/claude_memory/store/store_manager.rb +139 -0
- data/lib/claude_memory/sweep/sweeper.rb +80 -0
- data/lib/claude_memory/templates/hooks.example.json +74 -0
- data/lib/claude_memory/templates/output-styles/memory-aware.md +21 -0
- data/lib/claude_memory/version.rb +5 -0
- data/lib/claude_memory.rb +36 -0
- data/sig/claude_memory.rbs +4 -0
- data/skills/analyze/SKILL.md +126 -0
- data/skills/memory/SKILL.md +82 -0
- metadata +123 -0
|
@@ -0,0 +1,409 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
require "json"
|
|
4
|
+
require "digest"
|
|
5
|
+
|
|
6
|
+
module ClaudeMemory
|
|
7
|
+
module MCP
|
|
8
|
+
class Tools
|
|
9
|
+
def initialize(store_or_manager)
|
|
10
|
+
@recall = Recall.new(store_or_manager)
|
|
11
|
+
|
|
12
|
+
if store_or_manager.is_a?(Store::StoreManager)
|
|
13
|
+
@manager = store_or_manager
|
|
14
|
+
else
|
|
15
|
+
@legacy_store = store_or_manager
|
|
16
|
+
end
|
|
17
|
+
end
|
|
18
|
+
|
|
19
|
+
def definitions
|
|
20
|
+
[
|
|
21
|
+
{
|
|
22
|
+
name: "memory.recall",
|
|
23
|
+
description: "Recall facts matching a query. Searches both global and project databases.",
|
|
24
|
+
inputSchema: {
|
|
25
|
+
type: "object",
|
|
26
|
+
properties: {
|
|
27
|
+
query: {type: "string", description: "Search query"},
|
|
28
|
+
limit: {type: "integer", description: "Max results", default: 10},
|
|
29
|
+
scope: {type: "string", enum: ["all", "global", "project"], description: "Filter by scope: 'all' (default), 'global', or 'project'", default: "all"}
|
|
30
|
+
},
|
|
31
|
+
required: ["query"]
|
|
32
|
+
}
|
|
33
|
+
},
|
|
34
|
+
{
|
|
35
|
+
name: "memory.explain",
|
|
36
|
+
description: "Get detailed explanation of a fact with provenance",
|
|
37
|
+
inputSchema: {
|
|
38
|
+
type: "object",
|
|
39
|
+
properties: {
|
|
40
|
+
fact_id: {type: "integer", description: "Fact ID to explain"},
|
|
41
|
+
scope: {type: "string", enum: ["global", "project"], description: "Which database to look in", default: "project"}
|
|
42
|
+
},
|
|
43
|
+
required: ["fact_id"]
|
|
44
|
+
}
|
|
45
|
+
},
|
|
46
|
+
{
|
|
47
|
+
name: "memory.changes",
|
|
48
|
+
description: "List recent fact changes from both databases",
|
|
49
|
+
inputSchema: {
|
|
50
|
+
type: "object",
|
|
51
|
+
properties: {
|
|
52
|
+
since: {type: "string", description: "ISO timestamp"},
|
|
53
|
+
limit: {type: "integer", default: 20},
|
|
54
|
+
scope: {type: "string", enum: ["all", "global", "project"], default: "all"}
|
|
55
|
+
}
|
|
56
|
+
}
|
|
57
|
+
},
|
|
58
|
+
{
|
|
59
|
+
name: "memory.conflicts",
|
|
60
|
+
description: "List open conflicts from both databases",
|
|
61
|
+
inputSchema: {
|
|
62
|
+
type: "object",
|
|
63
|
+
properties: {
|
|
64
|
+
scope: {type: "string", enum: ["all", "global", "project"], default: "all"}
|
|
65
|
+
}
|
|
66
|
+
}
|
|
67
|
+
},
|
|
68
|
+
{
|
|
69
|
+
name: "memory.sweep_now",
|
|
70
|
+
description: "Run maintenance sweep on a database",
|
|
71
|
+
inputSchema: {
|
|
72
|
+
type: "object",
|
|
73
|
+
properties: {
|
|
74
|
+
budget_seconds: {type: "integer", default: 5},
|
|
75
|
+
scope: {type: "string", enum: ["global", "project"], default: "project"}
|
|
76
|
+
}
|
|
77
|
+
}
|
|
78
|
+
},
|
|
79
|
+
{
|
|
80
|
+
name: "memory.status",
|
|
81
|
+
description: "Get memory system status for both databases",
|
|
82
|
+
inputSchema: {
|
|
83
|
+
type: "object",
|
|
84
|
+
properties: {}
|
|
85
|
+
}
|
|
86
|
+
},
|
|
87
|
+
{
|
|
88
|
+
name: "memory.promote",
|
|
89
|
+
description: "Promote a project fact to global memory. Use when user says a preference should apply everywhere.",
|
|
90
|
+
inputSchema: {
|
|
91
|
+
type: "object",
|
|
92
|
+
properties: {
|
|
93
|
+
fact_id: {type: "integer", description: "Project fact ID to promote to global"}
|
|
94
|
+
},
|
|
95
|
+
required: ["fact_id"]
|
|
96
|
+
}
|
|
97
|
+
},
|
|
98
|
+
{
|
|
99
|
+
name: "memory.store_extraction",
|
|
100
|
+
description: "Store extracted facts, entities, and decisions from a conversation. Call this to persist knowledge you've learned during the session.",
|
|
101
|
+
inputSchema: {
|
|
102
|
+
type: "object",
|
|
103
|
+
properties: {
|
|
104
|
+
entities: {
|
|
105
|
+
type: "array",
|
|
106
|
+
description: "Entities mentioned (databases, frameworks, services, etc.)",
|
|
107
|
+
items: {
|
|
108
|
+
type: "object",
|
|
109
|
+
properties: {
|
|
110
|
+
type: {type: "string", description: "Entity type: database, framework, language, platform, repo, module, person, service"},
|
|
111
|
+
name: {type: "string", description: "Canonical name"},
|
|
112
|
+
confidence: {type: "number", description: "0.0-1.0 extraction confidence"}
|
|
113
|
+
},
|
|
114
|
+
required: ["type", "name"]
|
|
115
|
+
}
|
|
116
|
+
},
|
|
117
|
+
facts: {
|
|
118
|
+
type: "array",
|
|
119
|
+
description: "Facts learned during the session",
|
|
120
|
+
items: {
|
|
121
|
+
type: "object",
|
|
122
|
+
properties: {
|
|
123
|
+
subject: {type: "string", description: "Entity name or 'repo' for project-level facts"},
|
|
124
|
+
predicate: {type: "string", description: "Relationship type: uses_database, uses_framework, convention, decision, auth_method, deployment_platform"},
|
|
125
|
+
object: {type: "string", description: "The value or target entity"},
|
|
126
|
+
confidence: {type: "number", description: "0.0-1.0 how confident"},
|
|
127
|
+
quote: {type: "string", description: "Source text excerpt (max 200 chars)"},
|
|
128
|
+
strength: {type: "string", enum: ["stated", "inferred"], description: "Was this explicitly stated or inferred?"},
|
|
129
|
+
scope_hint: {type: "string", enum: ["project", "global"], description: "Should this apply to just this project or globally?"}
|
|
130
|
+
},
|
|
131
|
+
required: ["subject", "predicate", "object"]
|
|
132
|
+
}
|
|
133
|
+
},
|
|
134
|
+
decisions: {
|
|
135
|
+
type: "array",
|
|
136
|
+
description: "Decisions made during the session",
|
|
137
|
+
items: {
|
|
138
|
+
type: "object",
|
|
139
|
+
properties: {
|
|
140
|
+
title: {type: "string", description: "Short summary (max 100 chars)"},
|
|
141
|
+
summary: {type: "string", description: "Full description"},
|
|
142
|
+
status_hint: {type: "string", enum: ["accepted", "proposed", "rejected"]}
|
|
143
|
+
},
|
|
144
|
+
required: ["title", "summary"]
|
|
145
|
+
}
|
|
146
|
+
},
|
|
147
|
+
scope: {type: "string", enum: ["global", "project"], description: "Default scope for facts", default: "project"}
|
|
148
|
+
},
|
|
149
|
+
required: ["facts"]
|
|
150
|
+
}
|
|
151
|
+
}
|
|
152
|
+
]
|
|
153
|
+
end
|
|
154
|
+
|
|
155
|
+
def call(name, arguments)
|
|
156
|
+
case name
|
|
157
|
+
when "memory.recall"
|
|
158
|
+
recall(arguments)
|
|
159
|
+
when "memory.explain"
|
|
160
|
+
explain(arguments)
|
|
161
|
+
when "memory.changes"
|
|
162
|
+
changes(arguments)
|
|
163
|
+
when "memory.conflicts"
|
|
164
|
+
conflicts(arguments)
|
|
165
|
+
when "memory.sweep_now"
|
|
166
|
+
sweep_now(arguments)
|
|
167
|
+
when "memory.status"
|
|
168
|
+
status
|
|
169
|
+
when "memory.promote"
|
|
170
|
+
promote(arguments)
|
|
171
|
+
when "memory.store_extraction"
|
|
172
|
+
store_extraction(arguments)
|
|
173
|
+
else
|
|
174
|
+
{error: "Unknown tool: #{name}"}
|
|
175
|
+
end
|
|
176
|
+
end
|
|
177
|
+
|
|
178
|
+
private
|
|
179
|
+
|
|
180
|
+
def recall(args)
|
|
181
|
+
scope = args["scope"] || "all"
|
|
182
|
+
results = @recall.query(args["query"], limit: args["limit"] || 10, scope: scope)
|
|
183
|
+
{
|
|
184
|
+
facts: results.map do |r|
|
|
185
|
+
{
|
|
186
|
+
id: r[:fact][:id],
|
|
187
|
+
subject: r[:fact][:subject_name],
|
|
188
|
+
predicate: r[:fact][:predicate],
|
|
189
|
+
object: r[:fact][:object_literal],
|
|
190
|
+
status: r[:fact][:status],
|
|
191
|
+
source: r[:source],
|
|
192
|
+
receipts: r[:receipts].map { |p| {quote: p[:quote], strength: p[:strength]} }
|
|
193
|
+
}
|
|
194
|
+
end
|
|
195
|
+
}
|
|
196
|
+
end
|
|
197
|
+
|
|
198
|
+
def explain(args)
|
|
199
|
+
scope = args["scope"] || "project"
|
|
200
|
+
explanation = @recall.explain(args["fact_id"], scope: scope)
|
|
201
|
+
return {error: "Fact not found in #{scope} database"} unless explanation
|
|
202
|
+
|
|
203
|
+
{
|
|
204
|
+
fact: {
|
|
205
|
+
id: explanation[:fact][:id],
|
|
206
|
+
subject: explanation[:fact][:subject_name],
|
|
207
|
+
predicate: explanation[:fact][:predicate],
|
|
208
|
+
object: explanation[:fact][:object_literal],
|
|
209
|
+
status: explanation[:fact][:status],
|
|
210
|
+
valid_from: explanation[:fact][:valid_from],
|
|
211
|
+
valid_to: explanation[:fact][:valid_to]
|
|
212
|
+
},
|
|
213
|
+
source: scope,
|
|
214
|
+
receipts: explanation[:receipts].map { |p| {quote: p[:quote], strength: p[:strength]} },
|
|
215
|
+
supersedes: explanation[:supersedes],
|
|
216
|
+
superseded_by: explanation[:superseded_by],
|
|
217
|
+
conflicts: explanation[:conflicts].map { |c| c[:id] }
|
|
218
|
+
}
|
|
219
|
+
end
|
|
220
|
+
|
|
221
|
+
def changes(args)
|
|
222
|
+
since = args["since"] || (Time.now - 86400 * 7).utc.iso8601
|
|
223
|
+
scope = args["scope"] || "all"
|
|
224
|
+
list = @recall.changes(since: since, limit: args["limit"] || 20, scope: scope)
|
|
225
|
+
{
|
|
226
|
+
since: since,
|
|
227
|
+
changes: list.map do |c|
|
|
228
|
+
{
|
|
229
|
+
id: c[:id],
|
|
230
|
+
predicate: c[:predicate],
|
|
231
|
+
object: c[:object_literal],
|
|
232
|
+
status: c[:status],
|
|
233
|
+
created_at: c[:created_at],
|
|
234
|
+
source: c[:source]
|
|
235
|
+
}
|
|
236
|
+
end
|
|
237
|
+
}
|
|
238
|
+
end
|
|
239
|
+
|
|
240
|
+
def conflicts(args)
|
|
241
|
+
scope = args["scope"] || "all"
|
|
242
|
+
list = @recall.conflicts(scope: scope)
|
|
243
|
+
{
|
|
244
|
+
count: list.size,
|
|
245
|
+
conflicts: list.map do |c|
|
|
246
|
+
{
|
|
247
|
+
id: c[:id],
|
|
248
|
+
fact_a: c[:fact_a_id],
|
|
249
|
+
fact_b: c[:fact_b_id],
|
|
250
|
+
status: c[:status],
|
|
251
|
+
source: c[:source]
|
|
252
|
+
}
|
|
253
|
+
end
|
|
254
|
+
}
|
|
255
|
+
end
|
|
256
|
+
|
|
257
|
+
def sweep_now(args)
|
|
258
|
+
scope = args["scope"] || "project"
|
|
259
|
+
store = get_store_for_scope(scope)
|
|
260
|
+
return {error: "Database not available"} unless store
|
|
261
|
+
|
|
262
|
+
sweeper = Sweep::Sweeper.new(store)
|
|
263
|
+
stats = sweeper.run!(budget_seconds: args["budget_seconds"] || 5)
|
|
264
|
+
{
|
|
265
|
+
scope: scope,
|
|
266
|
+
proposed_expired: stats[:proposed_facts_expired],
|
|
267
|
+
disputed_expired: stats[:disputed_facts_expired],
|
|
268
|
+
orphaned_deleted: stats[:orphaned_provenance_deleted],
|
|
269
|
+
content_pruned: stats[:old_content_pruned],
|
|
270
|
+
elapsed_seconds: stats[:elapsed_seconds].round(3)
|
|
271
|
+
}
|
|
272
|
+
end
|
|
273
|
+
|
|
274
|
+
def status
|
|
275
|
+
result = {databases: {}}
|
|
276
|
+
|
|
277
|
+
if @manager
|
|
278
|
+
if @manager.global_exists?
|
|
279
|
+
@manager.ensure_global!
|
|
280
|
+
result[:databases][:global] = db_stats(@manager.global_store)
|
|
281
|
+
else
|
|
282
|
+
result[:databases][:global] = {exists: false}
|
|
283
|
+
end
|
|
284
|
+
|
|
285
|
+
if @manager.project_exists?
|
|
286
|
+
@manager.ensure_project!
|
|
287
|
+
result[:databases][:project] = db_stats(@manager.project_store)
|
|
288
|
+
else
|
|
289
|
+
result[:databases][:project] = {exists: false}
|
|
290
|
+
end
|
|
291
|
+
else
|
|
292
|
+
result[:databases][:legacy] = db_stats(@legacy_store)
|
|
293
|
+
end
|
|
294
|
+
|
|
295
|
+
result
|
|
296
|
+
end
|
|
297
|
+
|
|
298
|
+
def promote(args)
|
|
299
|
+
return {error: "Promote requires StoreManager"} unless @manager
|
|
300
|
+
|
|
301
|
+
fact_id = args["fact_id"]
|
|
302
|
+
global_fact_id = @manager.promote_fact(fact_id)
|
|
303
|
+
|
|
304
|
+
if global_fact_id
|
|
305
|
+
{
|
|
306
|
+
success: true,
|
|
307
|
+
project_fact_id: fact_id,
|
|
308
|
+
global_fact_id: global_fact_id,
|
|
309
|
+
message: "Fact promoted to global memory"
|
|
310
|
+
}
|
|
311
|
+
else
|
|
312
|
+
{error: "Fact #{fact_id} not found in project database"}
|
|
313
|
+
end
|
|
314
|
+
end
|
|
315
|
+
|
|
316
|
+
def store_extraction(args)
|
|
317
|
+
scope = args["scope"] || "project"
|
|
318
|
+
store = get_store_for_scope(scope)
|
|
319
|
+
return {error: "Database not available"} unless store
|
|
320
|
+
|
|
321
|
+
entities = (args["entities"] || []).map { |e| symbolize_keys(e) }
|
|
322
|
+
facts = (args["facts"] || []).map { |f| symbolize_keys(f) }
|
|
323
|
+
decisions = (args["decisions"] || []).map { |d| symbolize_keys(d) }
|
|
324
|
+
|
|
325
|
+
project_path = ENV["CLAUDE_PROJECT_DIR"] || Dir.pwd
|
|
326
|
+
occurred_at = Time.now.utc.iso8601
|
|
327
|
+
|
|
328
|
+
searchable_text = build_searchable_text(entities, facts, decisions)
|
|
329
|
+
content_item_id = create_synthetic_content_item(store, searchable_text, project_path, occurred_at)
|
|
330
|
+
index_content_item(store, content_item_id, searchable_text)
|
|
331
|
+
|
|
332
|
+
extraction = Distill::Extraction.new(
|
|
333
|
+
entities: entities,
|
|
334
|
+
facts: facts,
|
|
335
|
+
decisions: decisions,
|
|
336
|
+
signals: []
|
|
337
|
+
)
|
|
338
|
+
|
|
339
|
+
resolver = Resolve::Resolver.new(store)
|
|
340
|
+
result = resolver.apply(
|
|
341
|
+
extraction,
|
|
342
|
+
content_item_id: content_item_id,
|
|
343
|
+
occurred_at: occurred_at,
|
|
344
|
+
project_path: project_path,
|
|
345
|
+
scope: scope
|
|
346
|
+
)
|
|
347
|
+
|
|
348
|
+
{
|
|
349
|
+
success: true,
|
|
350
|
+
scope: scope,
|
|
351
|
+
entities_created: result[:entities_created],
|
|
352
|
+
facts_created: result[:facts_created],
|
|
353
|
+
facts_superseded: result[:facts_superseded],
|
|
354
|
+
conflicts_created: result[:conflicts_created]
|
|
355
|
+
}
|
|
356
|
+
end
|
|
357
|
+
|
|
358
|
+
def build_searchable_text(entities, facts, decisions)
|
|
359
|
+
parts = []
|
|
360
|
+
entities.each { |e| parts << "#{e[:type]}: #{e[:name]}" }
|
|
361
|
+
facts.each { |f| parts << "#{f[:subject]} #{f[:predicate]} #{f[:object]} #{f[:quote]}" }
|
|
362
|
+
decisions.each { |d| parts << "#{d[:title]} #{d[:summary]}" }
|
|
363
|
+
parts.join(" ").strip
|
|
364
|
+
end
|
|
365
|
+
|
|
366
|
+
def create_synthetic_content_item(store, text, project_path, occurred_at)
|
|
367
|
+
text_hash = Digest::SHA256.hexdigest(text)
|
|
368
|
+
store.upsert_content_item(
|
|
369
|
+
source: "mcp_extraction",
|
|
370
|
+
session_id: "mcp-#{Time.now.to_i}",
|
|
371
|
+
transcript_path: nil,
|
|
372
|
+
project_path: project_path,
|
|
373
|
+
text_hash: text_hash,
|
|
374
|
+
byte_len: text.bytesize,
|
|
375
|
+
raw_text: text,
|
|
376
|
+
occurred_at: occurred_at
|
|
377
|
+
)
|
|
378
|
+
end
|
|
379
|
+
|
|
380
|
+
def index_content_item(store, content_item_id, text)
|
|
381
|
+
fts = Index::LexicalFTS.new(store)
|
|
382
|
+
fts.index_content_item(content_item_id, text)
|
|
383
|
+
end
|
|
384
|
+
|
|
385
|
+
def symbolize_keys(hash)
|
|
386
|
+
hash.transform_keys(&:to_sym)
|
|
387
|
+
end
|
|
388
|
+
|
|
389
|
+
def get_store_for_scope(scope)
|
|
390
|
+
if @manager
|
|
391
|
+
@manager.store_for_scope(scope)
|
|
392
|
+
else
|
|
393
|
+
@legacy_store
|
|
394
|
+
end
|
|
395
|
+
end
|
|
396
|
+
|
|
397
|
+
def db_stats(store)
|
|
398
|
+
{
|
|
399
|
+
exists: true,
|
|
400
|
+
facts_total: store.facts.count,
|
|
401
|
+
facts_active: store.facts.where(status: "active").count,
|
|
402
|
+
content_items: store.content_items.count,
|
|
403
|
+
open_conflicts: store.conflicts.where(status: "open").count,
|
|
404
|
+
schema_version: store.schema_version
|
|
405
|
+
}
|
|
406
|
+
end
|
|
407
|
+
end
|
|
408
|
+
end
|
|
409
|
+
end
|
|
@@ -0,0 +1,201 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
require "digest"
|
|
4
|
+
require "fileutils"
|
|
5
|
+
|
|
6
|
+
module ClaudeMemory
|
|
7
|
+
class Publish
|
|
8
|
+
RULES_DIR = ".claude/rules"
|
|
9
|
+
GENERATED_FILE = "claude_memory.generated.md"
|
|
10
|
+
|
|
11
|
+
def initialize(store)
|
|
12
|
+
@store = store
|
|
13
|
+
end
|
|
14
|
+
|
|
15
|
+
def generate_snapshot(since: nil)
|
|
16
|
+
facts = fetch_active_facts
|
|
17
|
+
conflicts = @store.open_conflicts
|
|
18
|
+
recent_supersessions = fetch_recent_supersessions(since)
|
|
19
|
+
|
|
20
|
+
sections = []
|
|
21
|
+
sections << generate_decisions_section(facts)
|
|
22
|
+
sections << generate_conventions_section(facts)
|
|
23
|
+
sections << generate_constraints_section(facts)
|
|
24
|
+
sections << generate_conflicts_section(conflicts) if conflicts.any?
|
|
25
|
+
sections << generate_supersessions_section(recent_supersessions) if recent_supersessions.any?
|
|
26
|
+
|
|
27
|
+
header = <<~HEADER
|
|
28
|
+
<!--
|
|
29
|
+
This file is auto-generated by claude-memory.
|
|
30
|
+
Do not edit manually - changes will be overwritten.
|
|
31
|
+
Generated: #{Time.now.utc.iso8601}
|
|
32
|
+
-->
|
|
33
|
+
|
|
34
|
+
# Project Memory
|
|
35
|
+
|
|
36
|
+
HEADER
|
|
37
|
+
|
|
38
|
+
header + sections.compact.reject(&:empty?).join("\n")
|
|
39
|
+
end
|
|
40
|
+
|
|
41
|
+
def publish!(mode: :shared, granularity: :repo, since: nil)
|
|
42
|
+
content = generate_snapshot(since: since)
|
|
43
|
+
path = output_path(mode)
|
|
44
|
+
|
|
45
|
+
FileUtils.mkdir_p(File.dirname(path))
|
|
46
|
+
|
|
47
|
+
if should_write?(path, content)
|
|
48
|
+
File.write(path, content)
|
|
49
|
+
ensure_import_exists(mode, path)
|
|
50
|
+
{status: :updated, path: path}
|
|
51
|
+
else
|
|
52
|
+
{status: :unchanged, path: path}
|
|
53
|
+
end
|
|
54
|
+
end
|
|
55
|
+
|
|
56
|
+
private
|
|
57
|
+
|
|
58
|
+
def output_path(mode)
|
|
59
|
+
case mode
|
|
60
|
+
when :shared
|
|
61
|
+
File.join(RULES_DIR, GENERATED_FILE)
|
|
62
|
+
when :local
|
|
63
|
+
".claude_memory.local.md"
|
|
64
|
+
when :home
|
|
65
|
+
project_name = File.basename(Dir.pwd)
|
|
66
|
+
File.join(Dir.home, ".claude", "claude_memory", "#{project_name}.md")
|
|
67
|
+
else
|
|
68
|
+
File.join(RULES_DIR, GENERATED_FILE)
|
|
69
|
+
end
|
|
70
|
+
end
|
|
71
|
+
|
|
72
|
+
def fetch_active_facts
|
|
73
|
+
@store.facts
|
|
74
|
+
.left_join(:entities, id: :subject_entity_id)
|
|
75
|
+
.select(
|
|
76
|
+
Sequel[:facts][:id],
|
|
77
|
+
Sequel[:facts][:predicate],
|
|
78
|
+
Sequel[:facts][:object_literal],
|
|
79
|
+
Sequel[:facts][:status],
|
|
80
|
+
Sequel[:facts][:confidence],
|
|
81
|
+
Sequel[:facts][:created_at],
|
|
82
|
+
Sequel[:entities][:canonical_name].as(:subject_name)
|
|
83
|
+
)
|
|
84
|
+
.where(Sequel[:facts][:status] => "active")
|
|
85
|
+
.order(Sequel.desc(Sequel[:facts][:created_at]))
|
|
86
|
+
.limit(100)
|
|
87
|
+
.all
|
|
88
|
+
end
|
|
89
|
+
|
|
90
|
+
def fetch_recent_supersessions(since)
|
|
91
|
+
return [] unless since
|
|
92
|
+
|
|
93
|
+
@store.facts
|
|
94
|
+
.left_join(:entities, id: :subject_entity_id)
|
|
95
|
+
.select(
|
|
96
|
+
Sequel[:facts][:id],
|
|
97
|
+
Sequel[:facts][:predicate],
|
|
98
|
+
Sequel[:facts][:object_literal],
|
|
99
|
+
Sequel[:facts][:valid_to],
|
|
100
|
+
Sequel[:entities][:canonical_name].as(:subject_name)
|
|
101
|
+
)
|
|
102
|
+
.where(Sequel[:facts][:status] => "superseded")
|
|
103
|
+
.where { Sequel[:facts][:valid_to] >= since }
|
|
104
|
+
.order(Sequel.desc(Sequel[:facts][:valid_to]))
|
|
105
|
+
.limit(20)
|
|
106
|
+
.all
|
|
107
|
+
end
|
|
108
|
+
|
|
109
|
+
def generate_decisions_section(facts)
|
|
110
|
+
decisions = facts.select { |f| f[:predicate] == "decision" || f[:predicate]&.start_with?("decided_") }
|
|
111
|
+
return "" if decisions.empty?
|
|
112
|
+
|
|
113
|
+
lines = ["## Current Decisions\n"]
|
|
114
|
+
decisions.each do |d|
|
|
115
|
+
lines << "- #{d[:object_literal]}"
|
|
116
|
+
end
|
|
117
|
+
lines.join("\n") + "\n"
|
|
118
|
+
end
|
|
119
|
+
|
|
120
|
+
def generate_conventions_section(facts)
|
|
121
|
+
conventions = facts.select { |f| f[:predicate] == "convention" || f[:predicate]&.include?("_convention") }
|
|
122
|
+
return "" if conventions.empty?
|
|
123
|
+
|
|
124
|
+
lines = ["## Conventions\n"]
|
|
125
|
+
conventions.each do |c|
|
|
126
|
+
lines << "- #{c[:object_literal]}"
|
|
127
|
+
end
|
|
128
|
+
lines.join("\n") + "\n"
|
|
129
|
+
end
|
|
130
|
+
|
|
131
|
+
def generate_constraints_section(facts)
|
|
132
|
+
constraints = facts.select do |f|
|
|
133
|
+
%w[uses_database uses_framework deployment_platform auth_method].include?(f[:predicate])
|
|
134
|
+
end
|
|
135
|
+
return "" if constraints.empty?
|
|
136
|
+
|
|
137
|
+
lines = ["## Technical Constraints\n"]
|
|
138
|
+
constraints.each do |c|
|
|
139
|
+
lines << "- **#{humanize(c[:predicate])}**: #{c[:object_literal]}"
|
|
140
|
+
end
|
|
141
|
+
lines.join("\n") + "\n"
|
|
142
|
+
end
|
|
143
|
+
|
|
144
|
+
def generate_conflicts_section(conflicts)
|
|
145
|
+
return "" if conflicts.empty?
|
|
146
|
+
|
|
147
|
+
lines = ["## Open Conflicts\n"]
|
|
148
|
+
lines << "The following facts are in conflict and need resolution:\n"
|
|
149
|
+
conflicts.each do |c|
|
|
150
|
+
lines << "- Conflict ##{c[:id]}: Fact #{c[:fact_a_id]} vs Fact #{c[:fact_b_id]}"
|
|
151
|
+
end
|
|
152
|
+
lines.join("\n") + "\n"
|
|
153
|
+
end
|
|
154
|
+
|
|
155
|
+
def generate_supersessions_section(supersessions)
|
|
156
|
+
return "" if supersessions.empty?
|
|
157
|
+
|
|
158
|
+
lines = ["## Recent Changes\n"]
|
|
159
|
+
supersessions.each do |s|
|
|
160
|
+
lines << "- [Superseded] #{s[:subject_name]}.#{s[:predicate]}: #{s[:object_literal]} (until #{s[:valid_to]})"
|
|
161
|
+
end
|
|
162
|
+
lines.join("\n") + "\n"
|
|
163
|
+
end
|
|
164
|
+
|
|
165
|
+
def should_write?(path, content)
|
|
166
|
+
return true unless File.exist?(path)
|
|
167
|
+
|
|
168
|
+
existing_hash = Digest::SHA256.file(path).hexdigest
|
|
169
|
+
new_hash = Digest::SHA256.hexdigest(content)
|
|
170
|
+
existing_hash != new_hash
|
|
171
|
+
end
|
|
172
|
+
|
|
173
|
+
def ensure_import_exists(mode, path)
|
|
174
|
+
return if mode == :local
|
|
175
|
+
|
|
176
|
+
claude_md = ".claude/CLAUDE.md"
|
|
177
|
+
import_line = case mode
|
|
178
|
+
when :shared
|
|
179
|
+
"@#{RULES_DIR}/#{GENERATED_FILE}"
|
|
180
|
+
when :home
|
|
181
|
+
"@~/#{path.sub(Dir.home + "/", "")}"
|
|
182
|
+
else
|
|
183
|
+
"@#{path}"
|
|
184
|
+
end
|
|
185
|
+
|
|
186
|
+
if File.exist?(claude_md)
|
|
187
|
+
content = File.read(claude_md)
|
|
188
|
+
return if content.include?(import_line)
|
|
189
|
+
|
|
190
|
+
File.write(claude_md, content + "\n#{import_line}\n")
|
|
191
|
+
else
|
|
192
|
+
FileUtils.mkdir_p(".claude")
|
|
193
|
+
File.write(claude_md, "# Project Memory\n\n#{import_line}\n")
|
|
194
|
+
end
|
|
195
|
+
end
|
|
196
|
+
|
|
197
|
+
def humanize(predicate)
|
|
198
|
+
predicate.tr("_", " ").capitalize
|
|
199
|
+
end
|
|
200
|
+
end
|
|
201
|
+
end
|