llmemory 0.1.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +7 -0
- data/LICENSE.txt +21 -0
- data/README.md +193 -0
- data/lib/generators/llmemory/install/install_generator.rb +24 -0
- data/lib/generators/llmemory/install/templates/create_llmemory_tables.rb +73 -0
- data/lib/llmemory/configuration.rb +51 -0
- data/lib/llmemory/extractors/entity_relation_extractor.rb +74 -0
- data/lib/llmemory/extractors/fact_extractor.rb +74 -0
- data/lib/llmemory/extractors.rb +9 -0
- data/lib/llmemory/llm/anthropic.rb +48 -0
- data/lib/llmemory/llm/base.rb +17 -0
- data/lib/llmemory/llm/openai.rb +46 -0
- data/lib/llmemory/llm.rb +18 -0
- data/lib/llmemory/long_term/file_based/category.rb +22 -0
- data/lib/llmemory/long_term/file_based/item.rb +31 -0
- data/lib/llmemory/long_term/file_based/memory.rb +83 -0
- data/lib/llmemory/long_term/file_based/resource.rb +22 -0
- data/lib/llmemory/long_term/file_based/retrieval.rb +90 -0
- data/lib/llmemory/long_term/file_based/storage.rb +35 -0
- data/lib/llmemory/long_term/file_based/storages/active_record_models.rb +26 -0
- data/lib/llmemory/long_term/file_based/storages/active_record_storage.rb +144 -0
- data/lib/llmemory/long_term/file_based/storages/base.rb +71 -0
- data/lib/llmemory/long_term/file_based/storages/database_storage.rb +231 -0
- data/lib/llmemory/long_term/file_based/storages/file_storage.rb +180 -0
- data/lib/llmemory/long_term/file_based/storages/memory_storage.rb +100 -0
- data/lib/llmemory/long_term/file_based.rb +15 -0
- data/lib/llmemory/long_term/graph_based/conflict_resolver.rb +33 -0
- data/lib/llmemory/long_term/graph_based/edge.rb +49 -0
- data/lib/llmemory/long_term/graph_based/knowledge_graph.rb +114 -0
- data/lib/llmemory/long_term/graph_based/memory.rb +143 -0
- data/lib/llmemory/long_term/graph_based/node.rb +42 -0
- data/lib/llmemory/long_term/graph_based/storage.rb +24 -0
- data/lib/llmemory/long_term/graph_based/storages/active_record_models.rb +23 -0
- data/lib/llmemory/long_term/graph_based/storages/active_record_storage.rb +132 -0
- data/lib/llmemory/long_term/graph_based/storages/base.rb +39 -0
- data/lib/llmemory/long_term/graph_based/storages/memory_storage.rb +106 -0
- data/lib/llmemory/long_term/graph_based.rb +15 -0
- data/lib/llmemory/long_term.rb +9 -0
- data/lib/llmemory/maintenance/consolidator.rb +55 -0
- data/lib/llmemory/maintenance/reindexer.rb +27 -0
- data/lib/llmemory/maintenance/runner.rb +34 -0
- data/lib/llmemory/maintenance/summarizer.rb +57 -0
- data/lib/llmemory/maintenance.rb +8 -0
- data/lib/llmemory/memory.rb +96 -0
- data/lib/llmemory/retrieval/context_assembler.rb +53 -0
- data/lib/llmemory/retrieval/engine.rb +74 -0
- data/lib/llmemory/retrieval/temporal_ranker.rb +23 -0
- data/lib/llmemory/retrieval.rb +10 -0
- data/lib/llmemory/short_term/checkpoint.rb +47 -0
- data/lib/llmemory/short_term/stores/active_record_checkpoint.rb +14 -0
- data/lib/llmemory/short_term/stores/active_record_store.rb +58 -0
- data/lib/llmemory/short_term/stores/base.rb +21 -0
- data/lib/llmemory/short_term/stores/memory_store.rb +37 -0
- data/lib/llmemory/short_term/stores/postgres_store.rb +80 -0
- data/lib/llmemory/short_term/stores/redis_store.rb +54 -0
- data/lib/llmemory/short_term.rb +8 -0
- data/lib/llmemory/vector_store/base.rb +19 -0
- data/lib/llmemory/vector_store/memory_store.rb +53 -0
- data/lib/llmemory/vector_store/openai_embeddings.rb +49 -0
- data/lib/llmemory/vector_store.rb +10 -0
- data/lib/llmemory/version.rb +5 -0
- data/lib/llmemory.rb +19 -0
- metadata +163 -0
|
@@ -0,0 +1,231 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
require "json"
|
|
4
|
+
require "securerandom"
|
|
5
|
+
require_relative "base"
|
|
6
|
+
|
|
7
|
+
module Llmemory
|
|
8
|
+
module LongTerm
|
|
9
|
+
module FileBased
|
|
10
|
+
module Storages
|
|
11
|
+
class DatabaseStorage < Base
|
|
12
|
+
def initialize(database_url: nil)
|
|
13
|
+
@database_url = database_url || Llmemory.configuration.database_url
|
|
14
|
+
@connection = nil
|
|
15
|
+
end
|
|
16
|
+
|
|
17
|
+
def save_resource(user_id, text)
|
|
18
|
+
ensure_tables!
|
|
19
|
+
id = "res_#{SecureRandom.hex(8)}"
|
|
20
|
+
conn.exec_params(
|
|
21
|
+
"INSERT INTO llmemory_resources (id, user_id, text, created_at) VALUES ($1, $2, $3, $4)",
|
|
22
|
+
[id, user_id, text, Time.now.utc.iso8601]
|
|
23
|
+
)
|
|
24
|
+
id
|
|
25
|
+
end
|
|
26
|
+
|
|
27
|
+
def save_item(user_id, category:, content:, source_resource_id:)
|
|
28
|
+
ensure_tables!
|
|
29
|
+
id = "item_#{SecureRandom.hex(8)}"
|
|
30
|
+
conn.exec_params(
|
|
31
|
+
"INSERT INTO llmemory_items (id, user_id, category, content, source_resource_id, created_at) VALUES ($1, $2, $3, $4, $5, $6)",
|
|
32
|
+
[id, user_id, category, content, source_resource_id, Time.now.utc.iso8601]
|
|
33
|
+
)
|
|
34
|
+
id
|
|
35
|
+
end
|
|
36
|
+
|
|
37
|
+
def load_category(user_id, category_name)
|
|
38
|
+
ensure_tables!
|
|
39
|
+
result = conn.exec_params(
|
|
40
|
+
"SELECT content FROM llmemory_categories WHERE user_id = $1 AND category_name = $2",
|
|
41
|
+
[user_id, category_name]
|
|
42
|
+
)
|
|
43
|
+
result.any? ? result.first["content"].to_s : ""
|
|
44
|
+
end
|
|
45
|
+
|
|
46
|
+
def save_category(user_id, category_name, content)
|
|
47
|
+
ensure_tables!
|
|
48
|
+
conn.exec_params(
|
|
49
|
+
<<~SQL,
|
|
50
|
+
INSERT INTO llmemory_categories (user_id, category_name, content, updated_at)
|
|
51
|
+
VALUES ($1, $2, $3, $4)
|
|
52
|
+
ON CONFLICT (user_id, category_name)
|
|
53
|
+
DO UPDATE SET content = $3, updated_at = $4
|
|
54
|
+
SQL
|
|
55
|
+
[user_id, category_name, content, Time.now.utc.iso8601]
|
|
56
|
+
)
|
|
57
|
+
true
|
|
58
|
+
end
|
|
59
|
+
|
|
60
|
+
def list_categories(user_id)
|
|
61
|
+
ensure_tables!
|
|
62
|
+
conn.exec_params("SELECT category_name FROM llmemory_categories WHERE user_id = $1", [user_id])
|
|
63
|
+
.map { |r| r["category_name"] }
|
|
64
|
+
end
|
|
65
|
+
|
|
66
|
+
def search_items(user_id, query)
|
|
67
|
+
ensure_tables!
|
|
68
|
+
pattern = "%#{conn.escape_string(query.to_s.downcase)}%"
|
|
69
|
+
rows = conn.exec_params(
|
|
70
|
+
"SELECT id, category, content, source_resource_id, created_at FROM llmemory_items WHERE user_id = $1 AND LOWER(content) LIKE $2",
|
|
71
|
+
[user_id, pattern]
|
|
72
|
+
)
|
|
73
|
+
rows_to_items(rows)
|
|
74
|
+
end
|
|
75
|
+
|
|
76
|
+
def search_resources(user_id, query)
|
|
77
|
+
ensure_tables!
|
|
78
|
+
pattern = "%#{conn.escape_string(query.to_s.downcase)}%"
|
|
79
|
+
rows = conn.exec_params(
|
|
80
|
+
"SELECT id, text, created_at FROM llmemory_resources WHERE user_id = $1 AND LOWER(text) LIKE $2",
|
|
81
|
+
[user_id, pattern]
|
|
82
|
+
)
|
|
83
|
+
rows_to_resources(rows)
|
|
84
|
+
end
|
|
85
|
+
|
|
86
|
+
def get_resources_since(user_id, hours:)
|
|
87
|
+
ensure_tables!
|
|
88
|
+
cutoff = (Time.now - (hours * 3600)).utc.iso8601
|
|
89
|
+
rows = conn.exec_params(
|
|
90
|
+
"SELECT id, text, created_at FROM llmemory_resources WHERE user_id = $1 AND created_at >= $2 ORDER BY created_at",
|
|
91
|
+
[user_id, cutoff]
|
|
92
|
+
)
|
|
93
|
+
rows_to_resources(rows)
|
|
94
|
+
end
|
|
95
|
+
|
|
96
|
+
def get_items_older_than(user_id, days:)
|
|
97
|
+
ensure_tables!
|
|
98
|
+
cutoff = (Time.now - (days * 86400)).utc.iso8601
|
|
99
|
+
rows = conn.exec_params(
|
|
100
|
+
"SELECT id, category, content, source_resource_id, created_at FROM llmemory_items WHERE user_id = $1 AND created_at < $2 ORDER BY created_at",
|
|
101
|
+
[user_id, cutoff]
|
|
102
|
+
)
|
|
103
|
+
rows_to_items(rows)
|
|
104
|
+
end
|
|
105
|
+
|
|
106
|
+
def get_all_items(user_id)
|
|
107
|
+
ensure_tables!
|
|
108
|
+
rows = conn.exec_params(
|
|
109
|
+
"SELECT id, category, content, source_resource_id, created_at FROM llmemory_items WHERE user_id = $1 ORDER BY created_at",
|
|
110
|
+
[user_id]
|
|
111
|
+
)
|
|
112
|
+
rows_to_items(rows)
|
|
113
|
+
end
|
|
114
|
+
|
|
115
|
+
def get_all_resources(user_id)
|
|
116
|
+
ensure_tables!
|
|
117
|
+
rows = conn.exec_params(
|
|
118
|
+
"SELECT id, text, created_at FROM llmemory_resources WHERE user_id = $1 ORDER BY created_at",
|
|
119
|
+
[user_id]
|
|
120
|
+
)
|
|
121
|
+
rows_to_resources(rows)
|
|
122
|
+
end
|
|
123
|
+
|
|
124
|
+
def get_items_since(user_id, hours:)
|
|
125
|
+
ensure_tables!
|
|
126
|
+
cutoff = (Time.now - (hours * 3600)).utc.iso8601
|
|
127
|
+
rows = conn.exec_params(
|
|
128
|
+
"SELECT id, category, content, source_resource_id, created_at FROM llmemory_items WHERE user_id = $1 AND created_at >= $2 ORDER BY created_at",
|
|
129
|
+
[user_id, cutoff]
|
|
130
|
+
)
|
|
131
|
+
rows_to_items(rows)
|
|
132
|
+
end
|
|
133
|
+
|
|
134
|
+
def replace_items(user_id, ids_to_remove, merged_item)
|
|
135
|
+
ensure_tables!
|
|
136
|
+
ids_to_remove.each do |id|
|
|
137
|
+
conn.exec_params("DELETE FROM llmemory_items WHERE user_id = $1 AND id = $2", [user_id, id])
|
|
138
|
+
end
|
|
139
|
+
created_at = merged_item[:created_at] || Time.now
|
|
140
|
+
created_at = created_at.utc.iso8601 if created_at.respond_to?(:utc)
|
|
141
|
+
id = "item_#{SecureRandom.hex(8)}"
|
|
142
|
+
conn.exec_params(
|
|
143
|
+
"INSERT INTO llmemory_items (id, user_id, category, content, source_resource_id, created_at) VALUES ($1, $2, $3, $4, $5, $6)",
|
|
144
|
+
[
|
|
145
|
+
id,
|
|
146
|
+
user_id,
|
|
147
|
+
merged_item[:category],
|
|
148
|
+
merged_item[:content],
|
|
149
|
+
merged_item[:source_resource_id],
|
|
150
|
+
created_at
|
|
151
|
+
]
|
|
152
|
+
)
|
|
153
|
+
end
|
|
154
|
+
|
|
155
|
+
def archive_items(user_id, item_ids)
|
|
156
|
+
ensure_tables!
|
|
157
|
+
item_ids.each { |id| conn.exec_params("DELETE FROM llmemory_items WHERE user_id = $1 AND id = $2", [user_id, id]) }
|
|
158
|
+
end
|
|
159
|
+
|
|
160
|
+
def archive_resources(user_id, resource_ids)
|
|
161
|
+
ensure_tables!
|
|
162
|
+
resource_ids.each { |id| conn.exec_params("DELETE FROM llmemory_resources WHERE user_id = $1 AND id = $2", [user_id, id]) }
|
|
163
|
+
end
|
|
164
|
+
|
|
165
|
+
private
|
|
166
|
+
|
|
167
|
+
def conn
|
|
168
|
+
@connection ||= begin
|
|
169
|
+
require "pg"
|
|
170
|
+
PG.connect(@database_url)
|
|
171
|
+
end
|
|
172
|
+
end
|
|
173
|
+
|
|
174
|
+
def ensure_tables!
|
|
175
|
+
conn.exec(<<~SQL)
|
|
176
|
+
CREATE TABLE IF NOT EXISTS llmemory_resources (
|
|
177
|
+
id TEXT NOT NULL PRIMARY KEY,
|
|
178
|
+
user_id TEXT NOT NULL,
|
|
179
|
+
text TEXT NOT NULL,
|
|
180
|
+
created_at TIMESTAMPTZ NOT NULL
|
|
181
|
+
);
|
|
182
|
+
CREATE INDEX IF NOT EXISTS idx_llmemory_resources_user_id ON llmemory_resources(user_id);
|
|
183
|
+
SQL
|
|
184
|
+
conn.exec(<<~SQL)
|
|
185
|
+
CREATE TABLE IF NOT EXISTS llmemory_items (
|
|
186
|
+
id TEXT NOT NULL PRIMARY KEY,
|
|
187
|
+
user_id TEXT NOT NULL,
|
|
188
|
+
category TEXT NOT NULL,
|
|
189
|
+
content TEXT NOT NULL,
|
|
190
|
+
source_resource_id TEXT,
|
|
191
|
+
created_at TIMESTAMPTZ NOT NULL
|
|
192
|
+
);
|
|
193
|
+
CREATE INDEX IF NOT EXISTS idx_llmemory_items_user_id ON llmemory_items(user_id);
|
|
194
|
+
SQL
|
|
195
|
+
conn.exec(<<~SQL)
|
|
196
|
+
CREATE TABLE IF NOT EXISTS llmemory_categories (
|
|
197
|
+
user_id TEXT NOT NULL,
|
|
198
|
+
category_name TEXT NOT NULL,
|
|
199
|
+
content TEXT NOT NULL,
|
|
200
|
+
updated_at TIMESTAMPTZ NOT NULL,
|
|
201
|
+
PRIMARY KEY (user_id, category_name)
|
|
202
|
+
);
|
|
203
|
+
SQL
|
|
204
|
+
end
|
|
205
|
+
|
|
206
|
+
def rows_to_items(rows)
|
|
207
|
+
rows.map do |r|
|
|
208
|
+
{
|
|
209
|
+
id: r["id"],
|
|
210
|
+
category: r["category"],
|
|
211
|
+
content: r["content"],
|
|
212
|
+
source_resource_id: r["source_resource_id"],
|
|
213
|
+
created_at: Time.parse(r["created_at"])
|
|
214
|
+
}
|
|
215
|
+
end
|
|
216
|
+
end
|
|
217
|
+
|
|
218
|
+
def rows_to_resources(rows)
|
|
219
|
+
rows.map do |r|
|
|
220
|
+
{
|
|
221
|
+
id: r["id"],
|
|
222
|
+
text: r["text"],
|
|
223
|
+
created_at: Time.parse(r["created_at"])
|
|
224
|
+
}
|
|
225
|
+
end
|
|
226
|
+
end
|
|
227
|
+
end
|
|
228
|
+
end
|
|
229
|
+
end
|
|
230
|
+
end
|
|
231
|
+
end
|
|
@@ -0,0 +1,180 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
require "fileutils"
|
|
4
|
+
require "json"
|
|
5
|
+
require_relative "base"
|
|
6
|
+
|
|
7
|
+
module Llmemory
|
|
8
|
+
module LongTerm
|
|
9
|
+
module FileBased
|
|
10
|
+
module Storages
|
|
11
|
+
class FileStorage < Base
|
|
12
|
+
def initialize(base_path: nil)
|
|
13
|
+
@base_path = base_path || Llmemory.configuration.long_term_storage_path || "./llmemory_data"
|
|
14
|
+
@base_path = File.expand_path(@base_path)
|
|
15
|
+
end
|
|
16
|
+
|
|
17
|
+
def save_resource(user_id, text)
|
|
18
|
+
ensure_user_dir(user_id)
|
|
19
|
+
seq = next_seq(user_id, "resource_id_seq")
|
|
20
|
+
id = "res_#{seq}"
|
|
21
|
+
path = resource_path(user_id, id)
|
|
22
|
+
data = { text: text, created_at: Time.now.iso8601 }
|
|
23
|
+
File.write(path, JSON.generate(data))
|
|
24
|
+
id
|
|
25
|
+
end
|
|
26
|
+
|
|
27
|
+
def save_item(user_id, category:, content:, source_resource_id:)
|
|
28
|
+
ensure_user_dir(user_id)
|
|
29
|
+
seq = next_seq(user_id, "item_id_seq")
|
|
30
|
+
id = "item_#{seq}"
|
|
31
|
+
path = item_path(user_id, id)
|
|
32
|
+
data = {
|
|
33
|
+
id: id,
|
|
34
|
+
category: category,
|
|
35
|
+
content: content,
|
|
36
|
+
source_resource_id: source_resource_id,
|
|
37
|
+
created_at: Time.now.iso8601
|
|
38
|
+
}
|
|
39
|
+
File.write(path, JSON.generate(data))
|
|
40
|
+
id
|
|
41
|
+
end
|
|
42
|
+
|
|
43
|
+
def load_category(user_id, category_name)
|
|
44
|
+
path = category_path(user_id, category_name)
|
|
45
|
+
return "" unless File.file?(path)
|
|
46
|
+
File.read(path)
|
|
47
|
+
end
|
|
48
|
+
|
|
49
|
+
def save_category(user_id, category_name, content)
|
|
50
|
+
ensure_user_dir(user_id, "categories")
|
|
51
|
+
path = category_path(user_id, category_name)
|
|
52
|
+
File.write(path, content)
|
|
53
|
+
true
|
|
54
|
+
end
|
|
55
|
+
|
|
56
|
+
def list_categories(user_id)
|
|
57
|
+
dir = user_path(user_id, "categories")
|
|
58
|
+
return [] unless Dir.exist?(dir)
|
|
59
|
+
Dir.children(dir).select { |f| f.end_with?(".md") }.map { |f| File.basename(f, ".md") }
|
|
60
|
+
end
|
|
61
|
+
|
|
62
|
+
def search_items(user_id, query)
|
|
63
|
+
query_lower = query.downcase
|
|
64
|
+
get_all_items(user_id).select { |i| (i[:content] || i["content"]).to_s.downcase.include?(query_lower) }
|
|
65
|
+
end
|
|
66
|
+
|
|
67
|
+
def search_resources(user_id, query)
|
|
68
|
+
query_lower = query.downcase
|
|
69
|
+
get_all_resources(user_id).select { |r| (r[:text] || r["text"]).to_s.downcase.include?(query_lower) }
|
|
70
|
+
end
|
|
71
|
+
|
|
72
|
+
def get_resources_since(user_id, hours:)
|
|
73
|
+
cutoff = Time.now - (hours * 3600)
|
|
74
|
+
get_all_resources(user_id).select { |r| parse_time(r[:created_at] || r["created_at"]) >= cutoff }
|
|
75
|
+
end
|
|
76
|
+
|
|
77
|
+
def get_items_older_than(user_id, days:)
|
|
78
|
+
cutoff = Time.now - (days * 86400)
|
|
79
|
+
get_all_items(user_id).select { |i| parse_time(i[:created_at] || i["created_at"]) < cutoff }
|
|
80
|
+
end
|
|
81
|
+
|
|
82
|
+
def get_all_items(user_id)
|
|
83
|
+
dir = user_path(user_id, "items")
|
|
84
|
+
return [] unless Dir.exist?(dir)
|
|
85
|
+
Dir.children(dir).select { |f| f.end_with?(".json") }.map do |f|
|
|
86
|
+
data = JSON.parse(File.read(File.join(dir, f)), symbolize_names: true)
|
|
87
|
+
data[:created_at] = parse_time(data[:created_at])
|
|
88
|
+
data
|
|
89
|
+
end.sort_by { |i| i[:created_at] }
|
|
90
|
+
end
|
|
91
|
+
|
|
92
|
+
def get_all_resources(user_id)
|
|
93
|
+
dir = user_path(user_id, "resources")
|
|
94
|
+
return [] unless Dir.exist?(dir)
|
|
95
|
+
Dir.children(dir).select { |f| f.end_with?(".json") }.map do |f|
|
|
96
|
+
data = JSON.parse(File.read(File.join(dir, f)), symbolize_names: true)
|
|
97
|
+
id = File.basename(f, ".json")
|
|
98
|
+
data[:id] = id
|
|
99
|
+
data[:created_at] = parse_time(data[:created_at])
|
|
100
|
+
data
|
|
101
|
+
end.sort_by { |r| r[:created_at] }
|
|
102
|
+
end
|
|
103
|
+
|
|
104
|
+
def get_items_since(user_id, hours:)
|
|
105
|
+
cutoff = Time.now - (hours * 3600)
|
|
106
|
+
get_all_items(user_id).select { |i| parse_time(i[:created_at]) >= cutoff }
|
|
107
|
+
end
|
|
108
|
+
|
|
109
|
+
def replace_items(user_id, ids_to_remove, merged_item)
|
|
110
|
+
ids_to_remove.each { |id| File.delete(item_path(user_id, id)) if File.file?(item_path(user_id, id)) }
|
|
111
|
+
merged_item = merged_item.merge(created_at: Time.now) unless merged_item.key?(:created_at)
|
|
112
|
+
seq = next_seq(user_id, "item_id_seq")
|
|
113
|
+
id = "item_#{seq}"
|
|
114
|
+
path = item_path(user_id, id)
|
|
115
|
+
data = merged_item.merge(id: id).transform_values { |v| v.respond_to?(:iso8601) ? v.iso8601 : v }
|
|
116
|
+
File.write(path, JSON.generate(data))
|
|
117
|
+
end
|
|
118
|
+
|
|
119
|
+
def archive_items(user_id, item_ids)
|
|
120
|
+
item_ids.each { |id| File.delete(item_path(user_id, id)) if File.file?(item_path(user_id, id)) }
|
|
121
|
+
end
|
|
122
|
+
|
|
123
|
+
def archive_resources(user_id, resource_ids)
|
|
124
|
+
resource_ids.each { |id| File.delete(resource_path(user_id, id)) if File.file?(resource_path(user_id, id)) }
|
|
125
|
+
end
|
|
126
|
+
|
|
127
|
+
private
|
|
128
|
+
|
|
129
|
+
def user_path(user_id, *parts)
|
|
130
|
+
safe = user_id.to_s.gsub(%r{[^\w\-.]}, "_")
|
|
131
|
+
File.join(@base_path, safe, *parts)
|
|
132
|
+
end
|
|
133
|
+
|
|
134
|
+
def resource_path(user_id, id)
|
|
135
|
+
ensure_user_dir(user_id, "resources")
|
|
136
|
+
File.join(user_path(user_id, "resources"), "#{id}.json")
|
|
137
|
+
end
|
|
138
|
+
|
|
139
|
+
def item_path(user_id, id)
|
|
140
|
+
ensure_user_dir(user_id, "items")
|
|
141
|
+
File.join(user_path(user_id, "items"), "#{id}.json")
|
|
142
|
+
end
|
|
143
|
+
|
|
144
|
+
def category_path(user_id, category_name)
|
|
145
|
+
safe = category_name.to_s.gsub(%r{[^\w\-.]}, "_")
|
|
146
|
+
File.join(user_path(user_id, "categories"), "#{safe}.md")
|
|
147
|
+
end
|
|
148
|
+
|
|
149
|
+
def ensure_user_dir(user_id, *subdirs)
|
|
150
|
+
dir = user_path(user_id, *subdirs)
|
|
151
|
+
FileUtils.mkdir_p(dir)
|
|
152
|
+
end
|
|
153
|
+
|
|
154
|
+
def meta_path(user_id)
|
|
155
|
+
File.join(user_path(user_id), "meta.json")
|
|
156
|
+
end
|
|
157
|
+
|
|
158
|
+
def next_seq(user_id, key)
|
|
159
|
+
ensure_user_dir(user_id)
|
|
160
|
+
path = meta_path(user_id)
|
|
161
|
+
meta = if File.file?(path)
|
|
162
|
+
JSON.parse(File.read(path))
|
|
163
|
+
else
|
|
164
|
+
{}
|
|
165
|
+
end
|
|
166
|
+
meta[key] = (meta[key] || 0) + 1
|
|
167
|
+
File.write(path, JSON.generate(meta))
|
|
168
|
+
meta[key]
|
|
169
|
+
end
|
|
170
|
+
|
|
171
|
+
def parse_time(val)
|
|
172
|
+
return val if val.is_a?(Time)
|
|
173
|
+
return Time.parse(val.to_s) if val
|
|
174
|
+
Time.now
|
|
175
|
+
end
|
|
176
|
+
end
|
|
177
|
+
end
|
|
178
|
+
end
|
|
179
|
+
end
|
|
180
|
+
end
|
|
@@ -0,0 +1,100 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
require_relative "base"
|
|
4
|
+
|
|
5
|
+
module Llmemory
|
|
6
|
+
module LongTerm
|
|
7
|
+
module FileBased
|
|
8
|
+
module Storages
|
|
9
|
+
class MemoryStorage < Base
|
|
10
|
+
def initialize
|
|
11
|
+
@resources = Hash.new { |h, k| h[k] = [] }
|
|
12
|
+
@items = Hash.new { |h, k| h[k] = [] }
|
|
13
|
+
@categories = Hash.new { |h, k| h[k] = {} }
|
|
14
|
+
@resource_id_seq = 0
|
|
15
|
+
@item_id_seq = 0
|
|
16
|
+
end
|
|
17
|
+
|
|
18
|
+
def save_resource(user_id, text)
|
|
19
|
+
@resource_id_seq += 1
|
|
20
|
+
id = "res_#{@resource_id_seq}"
|
|
21
|
+
@resources[user_id] << { id: id, text: text, created_at: Time.now }
|
|
22
|
+
id
|
|
23
|
+
end
|
|
24
|
+
|
|
25
|
+
def save_item(user_id, category:, content:, source_resource_id:)
|
|
26
|
+
@item_id_seq += 1
|
|
27
|
+
id = "item_#{@item_id_seq}"
|
|
28
|
+
@items[user_id] << {
|
|
29
|
+
id: id,
|
|
30
|
+
category: category,
|
|
31
|
+
content: content,
|
|
32
|
+
source_resource_id: source_resource_id,
|
|
33
|
+
created_at: Time.now
|
|
34
|
+
}
|
|
35
|
+
id
|
|
36
|
+
end
|
|
37
|
+
|
|
38
|
+
def load_category(user_id, category_name)
|
|
39
|
+
@categories[user_id][category_name].to_s
|
|
40
|
+
end
|
|
41
|
+
|
|
42
|
+
def save_category(user_id, category_name, content)
|
|
43
|
+
@categories[user_id][category_name] = content
|
|
44
|
+
true
|
|
45
|
+
end
|
|
46
|
+
|
|
47
|
+
def list_categories(user_id)
|
|
48
|
+
@categories[user_id].keys
|
|
49
|
+
end
|
|
50
|
+
|
|
51
|
+
def search_items(user_id, query)
|
|
52
|
+
query_lower = query.downcase
|
|
53
|
+
@items[user_id].select { |i| i[:content].to_s.downcase.include?(query_lower) }
|
|
54
|
+
end
|
|
55
|
+
|
|
56
|
+
def search_resources(user_id, query)
|
|
57
|
+
query_lower = query.downcase
|
|
58
|
+
@resources[user_id].select { |r| r[:text].to_s.downcase.include?(query_lower) }
|
|
59
|
+
end
|
|
60
|
+
|
|
61
|
+
def get_resources_since(user_id, hours:)
|
|
62
|
+
cutoff = Time.now - (hours * 3600)
|
|
63
|
+
@resources[user_id].select { |r| r[:created_at] >= cutoff }
|
|
64
|
+
end
|
|
65
|
+
|
|
66
|
+
def get_items_older_than(user_id, days:)
|
|
67
|
+
cutoff = Time.now - (days * 86400)
|
|
68
|
+
@items[user_id].select { |i| i[:created_at] < cutoff }
|
|
69
|
+
end
|
|
70
|
+
|
|
71
|
+
def get_all_items(user_id)
|
|
72
|
+
@items[user_id].dup
|
|
73
|
+
end
|
|
74
|
+
|
|
75
|
+
def get_all_resources(user_id)
|
|
76
|
+
@resources[user_id].dup
|
|
77
|
+
end
|
|
78
|
+
|
|
79
|
+
def get_items_since(user_id, hours:)
|
|
80
|
+
cutoff = Time.now - (hours * 3600)
|
|
81
|
+
@items[user_id].select { |i| i[:created_at] >= cutoff }
|
|
82
|
+
end
|
|
83
|
+
|
|
84
|
+
def replace_items(user_id, ids_to_remove, merged_item)
|
|
85
|
+
@items[user_id].reject! { |i| ids_to_remove.include?(i[:id]) }
|
|
86
|
+
@items[user_id] << merged_item.merge(created_at: Time.now)
|
|
87
|
+
end
|
|
88
|
+
|
|
89
|
+
def archive_items(user_id, item_ids)
|
|
90
|
+
@items[user_id].reject! { |i| item_ids.include?(i[:id]) }
|
|
91
|
+
end
|
|
92
|
+
|
|
93
|
+
def archive_resources(user_id, resource_ids)
|
|
94
|
+
@resources[user_id].reject! { |r| resource_ids.include?(r[:id]) }
|
|
95
|
+
end
|
|
96
|
+
end
|
|
97
|
+
end
|
|
98
|
+
end
|
|
99
|
+
end
|
|
100
|
+
end
|
|
@@ -0,0 +1,15 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
require_relative "file_based/resource"
|
|
4
|
+
require_relative "file_based/item"
|
|
5
|
+
require_relative "file_based/category"
|
|
6
|
+
require_relative "file_based/storage"
|
|
7
|
+
require_relative "file_based/memory"
|
|
8
|
+
require_relative "file_based/retrieval"
|
|
9
|
+
|
|
10
|
+
module Llmemory
|
|
11
|
+
module LongTerm
|
|
12
|
+
module FileBased
|
|
13
|
+
end
|
|
14
|
+
end
|
|
15
|
+
end
|
|
@@ -0,0 +1,33 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
require_relative "edge"
|
|
4
|
+
|
|
5
|
+
module Llmemory
|
|
6
|
+
module LongTerm
|
|
7
|
+
module GraphBased
|
|
8
|
+
class ConflictResolver
|
|
9
|
+
EXCLUSIVE_PREDICATES = %w[works_at lives_in current_job current_city employer residence].freeze
|
|
10
|
+
|
|
11
|
+
def initialize(knowledge_graph)
|
|
12
|
+
@graph = knowledge_graph
|
|
13
|
+
end
|
|
14
|
+
|
|
15
|
+
def resolve(new_edge)
|
|
16
|
+
return [] unless exclusive_predicate?(new_edge.predicate)
|
|
17
|
+
|
|
18
|
+
subject_id = new_edge.subject_id
|
|
19
|
+
existing = @graph.find_edges(subject: subject_id, predicate: new_edge.predicate, include_archived: false)
|
|
20
|
+
to_archive = existing.reject { |e| e.object_id == new_edge.object_id }
|
|
21
|
+
to_archive.each do |e|
|
|
22
|
+
@graph.archive_edge(e.id, reason: "replaced by #{new_edge.object_id}")
|
|
23
|
+
end
|
|
24
|
+
to_archive.map(&:id)
|
|
25
|
+
end
|
|
26
|
+
|
|
27
|
+
def exclusive_predicate?(predicate)
|
|
28
|
+
EXCLUSIVE_PREDICATES.include?(predicate.to_s.downcase)
|
|
29
|
+
end
|
|
30
|
+
end
|
|
31
|
+
end
|
|
32
|
+
end
|
|
33
|
+
end
|
|
@@ -0,0 +1,49 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
module Llmemory
|
|
4
|
+
module LongTerm
|
|
5
|
+
module GraphBased
|
|
6
|
+
Edge = Struct.new(
|
|
7
|
+
:id,
|
|
8
|
+
:user_id,
|
|
9
|
+
:subject_id,
|
|
10
|
+
:predicate,
|
|
11
|
+
:object_id,
|
|
12
|
+
:properties,
|
|
13
|
+
:created_at,
|
|
14
|
+
:archived_at,
|
|
15
|
+
keyword_init: true
|
|
16
|
+
) do
|
|
17
|
+
def self.from_h(hash)
|
|
18
|
+
new(
|
|
19
|
+
id: hash[:id] || hash["id"],
|
|
20
|
+
user_id: hash[:user_id] || hash["user_id"],
|
|
21
|
+
subject_id: hash[:subject_id] || hash["subject_id"],
|
|
22
|
+
predicate: (hash[:predicate] || hash["predicate"]).to_s,
|
|
23
|
+
object_id: hash[:object_id] || hash["object_id"],
|
|
24
|
+
properties: hash[:properties] || hash["properties"] || {},
|
|
25
|
+
created_at: hash[:created_at] || hash["created_at"],
|
|
26
|
+
archived_at: hash[:archived_at] || hash["archived_at"]
|
|
27
|
+
)
|
|
28
|
+
end
|
|
29
|
+
|
|
30
|
+
def archived?
|
|
31
|
+
!archived_at.nil?
|
|
32
|
+
end
|
|
33
|
+
|
|
34
|
+
def to_h
|
|
35
|
+
{
|
|
36
|
+
id: id,
|
|
37
|
+
user_id: user_id,
|
|
38
|
+
subject_id: subject_id,
|
|
39
|
+
predicate: predicate,
|
|
40
|
+
object_id: object_id,
|
|
41
|
+
properties: properties || {},
|
|
42
|
+
created_at: created_at,
|
|
43
|
+
archived_at: archived_at
|
|
44
|
+
}
|
|
45
|
+
end
|
|
46
|
+
end
|
|
47
|
+
end
|
|
48
|
+
end
|
|
49
|
+
end
|