powermem 0.1.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- powermem/__init__.py +103 -0
- powermem/agent/__init__.py +35 -0
- powermem/agent/abstract/__init__.py +22 -0
- powermem/agent/abstract/collaboration.py +259 -0
- powermem/agent/abstract/context.py +187 -0
- powermem/agent/abstract/manager.py +232 -0
- powermem/agent/abstract/permission.py +217 -0
- powermem/agent/abstract/privacy.py +267 -0
- powermem/agent/abstract/scope.py +199 -0
- powermem/agent/agent.py +791 -0
- powermem/agent/components/__init__.py +18 -0
- powermem/agent/components/collaboration_coordinator.py +645 -0
- powermem/agent/components/permission_controller.py +586 -0
- powermem/agent/components/privacy_protector.py +767 -0
- powermem/agent/components/scope_controller.py +685 -0
- powermem/agent/factories/__init__.py +16 -0
- powermem/agent/factories/agent_factory.py +266 -0
- powermem/agent/factories/config_factory.py +308 -0
- powermem/agent/factories/memory_factory.py +229 -0
- powermem/agent/implementations/__init__.py +16 -0
- powermem/agent/implementations/hybrid.py +728 -0
- powermem/agent/implementations/multi_agent.py +1040 -0
- powermem/agent/implementations/multi_user.py +1020 -0
- powermem/agent/types.py +53 -0
- powermem/agent/wrappers/__init__.py +14 -0
- powermem/agent/wrappers/agent_memory_wrapper.py +427 -0
- powermem/agent/wrappers/compatibility_wrapper.py +520 -0
- powermem/config_loader.py +318 -0
- powermem/configs.py +249 -0
- powermem/core/__init__.py +19 -0
- powermem/core/async_memory.py +1493 -0
- powermem/core/audit.py +258 -0
- powermem/core/base.py +165 -0
- powermem/core/memory.py +1567 -0
- powermem/core/setup.py +162 -0
- powermem/core/telemetry.py +215 -0
- powermem/integrations/__init__.py +17 -0
- powermem/integrations/embeddings/__init__.py +13 -0
- powermem/integrations/embeddings/aws_bedrock.py +100 -0
- powermem/integrations/embeddings/azure_openai.py +55 -0
- powermem/integrations/embeddings/base.py +31 -0
- powermem/integrations/embeddings/config/base.py +132 -0
- powermem/integrations/embeddings/configs.py +31 -0
- powermem/integrations/embeddings/factory.py +48 -0
- powermem/integrations/embeddings/gemini.py +39 -0
- powermem/integrations/embeddings/huggingface.py +41 -0
- powermem/integrations/embeddings/langchain.py +35 -0
- powermem/integrations/embeddings/lmstudio.py +29 -0
- powermem/integrations/embeddings/mock.py +11 -0
- powermem/integrations/embeddings/ollama.py +53 -0
- powermem/integrations/embeddings/openai.py +49 -0
- powermem/integrations/embeddings/qwen.py +102 -0
- powermem/integrations/embeddings/together.py +31 -0
- powermem/integrations/embeddings/vertexai.py +54 -0
- powermem/integrations/llm/__init__.py +18 -0
- powermem/integrations/llm/anthropic.py +87 -0
- powermem/integrations/llm/base.py +132 -0
- powermem/integrations/llm/config/anthropic.py +56 -0
- powermem/integrations/llm/config/azure.py +56 -0
- powermem/integrations/llm/config/base.py +62 -0
- powermem/integrations/llm/config/deepseek.py +56 -0
- powermem/integrations/llm/config/ollama.py +56 -0
- powermem/integrations/llm/config/openai.py +79 -0
- powermem/integrations/llm/config/qwen.py +68 -0
- powermem/integrations/llm/config/qwen_asr.py +46 -0
- powermem/integrations/llm/config/vllm.py +56 -0
- powermem/integrations/llm/configs.py +26 -0
- powermem/integrations/llm/deepseek.py +106 -0
- powermem/integrations/llm/factory.py +118 -0
- powermem/integrations/llm/gemini.py +201 -0
- powermem/integrations/llm/langchain.py +65 -0
- powermem/integrations/llm/ollama.py +106 -0
- powermem/integrations/llm/openai.py +166 -0
- powermem/integrations/llm/openai_structured.py +80 -0
- powermem/integrations/llm/qwen.py +207 -0
- powermem/integrations/llm/qwen_asr.py +171 -0
- powermem/integrations/llm/vllm.py +106 -0
- powermem/integrations/rerank/__init__.py +20 -0
- powermem/integrations/rerank/base.py +43 -0
- powermem/integrations/rerank/config/__init__.py +7 -0
- powermem/integrations/rerank/config/base.py +27 -0
- powermem/integrations/rerank/configs.py +23 -0
- powermem/integrations/rerank/factory.py +68 -0
- powermem/integrations/rerank/qwen.py +159 -0
- powermem/intelligence/__init__.py +17 -0
- powermem/intelligence/ebbinghaus_algorithm.py +354 -0
- powermem/intelligence/importance_evaluator.py +361 -0
- powermem/intelligence/intelligent_memory_manager.py +284 -0
- powermem/intelligence/manager.py +148 -0
- powermem/intelligence/plugin.py +229 -0
- powermem/prompts/__init__.py +29 -0
- powermem/prompts/graph/graph_prompts.py +217 -0
- powermem/prompts/graph/graph_tools_prompts.py +469 -0
- powermem/prompts/importance_evaluation.py +246 -0
- powermem/prompts/intelligent_memory_prompts.py +163 -0
- powermem/prompts/templates.py +193 -0
- powermem/storage/__init__.py +14 -0
- powermem/storage/adapter.py +896 -0
- powermem/storage/base.py +109 -0
- powermem/storage/config/base.py +13 -0
- powermem/storage/config/oceanbase.py +58 -0
- powermem/storage/config/pgvector.py +52 -0
- powermem/storage/config/sqlite.py +27 -0
- powermem/storage/configs.py +159 -0
- powermem/storage/factory.py +59 -0
- powermem/storage/migration_manager.py +438 -0
- powermem/storage/oceanbase/__init__.py +8 -0
- powermem/storage/oceanbase/constants.py +162 -0
- powermem/storage/oceanbase/oceanbase.py +1384 -0
- powermem/storage/oceanbase/oceanbase_graph.py +1441 -0
- powermem/storage/pgvector/__init__.py +7 -0
- powermem/storage/pgvector/pgvector.py +420 -0
- powermem/storage/sqlite/__init__.py +0 -0
- powermem/storage/sqlite/sqlite.py +218 -0
- powermem/storage/sqlite/sqlite_vector_store.py +311 -0
- powermem/utils/__init__.py +35 -0
- powermem/utils/utils.py +605 -0
- powermem/version.py +23 -0
- powermem-0.1.0.dist-info/METADATA +187 -0
- powermem-0.1.0.dist-info/RECORD +123 -0
- powermem-0.1.0.dist-info/WHEEL +5 -0
- powermem-0.1.0.dist-info/licenses/LICENSE +206 -0
- powermem-0.1.0.dist-info/top_level.txt +1 -0
|
@@ -0,0 +1,354 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Ebbinghaus forgetting curve algorithm
|
|
3
|
+
|
|
4
|
+
This module implements the Ebbinghaus forgetting curve for memory management.
|
|
5
|
+
"""
|
|
6
|
+
|
|
7
|
+
import logging
|
|
8
|
+
import math
|
|
9
|
+
from typing import Any, Dict, Optional, List
|
|
10
|
+
from datetime import datetime, timedelta
|
|
11
|
+
|
|
12
|
+
logger = logging.getLogger(__name__)
|
|
13
|
+
|
|
14
|
+
|
|
15
|
+
class EbbinghausAlgorithm:
|
|
16
|
+
"""
|
|
17
|
+
Implements Ebbinghaus forgetting curve algorithm for memory management.
|
|
18
|
+
"""
|
|
19
|
+
|
|
20
|
+
def __init__(self, config: Dict[str, Any]):
|
|
21
|
+
"""
|
|
22
|
+
Initialize Ebbinghaus algorithm.
|
|
23
|
+
|
|
24
|
+
Args:
|
|
25
|
+
config: Algorithm configuration
|
|
26
|
+
"""
|
|
27
|
+
self.config = config
|
|
28
|
+
|
|
29
|
+
# Ebbinghaus curve parameters
|
|
30
|
+
self.initial_retention = config.get("initial_retention", 1.0)
|
|
31
|
+
self.decay_rate = config.get("decay_rate", 0.1)
|
|
32
|
+
self.reinforcement_factor = config.get("reinforcement_factor", 0.3)
|
|
33
|
+
|
|
34
|
+
# Memory type thresholds
|
|
35
|
+
self.working_threshold = config.get("working_threshold", 0.3)
|
|
36
|
+
self.short_term_threshold = config.get("short_term_threshold", 0.6)
|
|
37
|
+
self.long_term_threshold = config.get("long_term_threshold", 0.8)
|
|
38
|
+
|
|
39
|
+
# Time intervals (in hours)
|
|
40
|
+
self.review_intervals = config.get("review_intervals", [1, 6, 24, 72, 168])
|
|
41
|
+
|
|
42
|
+
logger.info("EbbinghausAlgorithm initialized")
|
|
43
|
+
|
|
44
|
+
def process_memory_metadata(
|
|
45
|
+
self,
|
|
46
|
+
content: str,
|
|
47
|
+
importance_score: float,
|
|
48
|
+
memory_type: str
|
|
49
|
+
) -> Dict[str, Any]:
|
|
50
|
+
"""
|
|
51
|
+
Process memory using Ebbinghaus algorithm and return metadata.
|
|
52
|
+
|
|
53
|
+
Args:
|
|
54
|
+
content: Memory content
|
|
55
|
+
importance_score: Importance score
|
|
56
|
+
memory_type: Type of memory
|
|
57
|
+
|
|
58
|
+
Returns:
|
|
59
|
+
Dictionary containing intelligence metadata
|
|
60
|
+
"""
|
|
61
|
+
try:
|
|
62
|
+
current_time = datetime.utcnow()
|
|
63
|
+
|
|
64
|
+
# Calculate initial retention based on importance
|
|
65
|
+
initial_retention = self.initial_retention * importance_score
|
|
66
|
+
|
|
67
|
+
# Calculate decay rate based on memory type
|
|
68
|
+
decay_rate = self._get_decay_rate_for_type(memory_type)
|
|
69
|
+
|
|
70
|
+
# Generate review schedule
|
|
71
|
+
review_schedule = self._generate_review_schedule(importance_score, current_time)
|
|
72
|
+
|
|
73
|
+
# Calculate next review time
|
|
74
|
+
next_review = review_schedule[0] if review_schedule else current_time + timedelta(hours=1)
|
|
75
|
+
|
|
76
|
+
intelligence_metadata = {
|
|
77
|
+
# Ebbinghaus algorithm data
|
|
78
|
+
"intelligence": {
|
|
79
|
+
"importance_score": importance_score,
|
|
80
|
+
"memory_type": memory_type,
|
|
81
|
+
"initial_retention": initial_retention,
|
|
82
|
+
"decay_rate": decay_rate,
|
|
83
|
+
"current_retention": initial_retention,
|
|
84
|
+
"next_review": next_review.isoformat(),
|
|
85
|
+
"review_schedule": [rt.isoformat() for rt in review_schedule],
|
|
86
|
+
"last_reviewed": current_time.isoformat(),
|
|
87
|
+
"review_count": 0,
|
|
88
|
+
"access_count": 0,
|
|
89
|
+
"reinforcement_factor": self.reinforcement_factor,
|
|
90
|
+
},
|
|
91
|
+
# Memory management flags
|
|
92
|
+
"memory_management": {
|
|
93
|
+
"should_promote": False,
|
|
94
|
+
"should_forget": False,
|
|
95
|
+
"should_archive": False,
|
|
96
|
+
"is_active": True,
|
|
97
|
+
},
|
|
98
|
+
# Timestamps
|
|
99
|
+
"created_at": current_time.isoformat(),
|
|
100
|
+
"updated_at": current_time.isoformat(),
|
|
101
|
+
}
|
|
102
|
+
|
|
103
|
+
logger.debug(f"Generated intelligence metadata for type: {memory_type}, importance: {importance_score}")
|
|
104
|
+
|
|
105
|
+
return intelligence_metadata
|
|
106
|
+
|
|
107
|
+
except Exception as e:
|
|
108
|
+
logger.error(f"Failed to process memory metadata: {e}")
|
|
109
|
+
return {
|
|
110
|
+
"intelligence": {
|
|
111
|
+
"importance_score": importance_score,
|
|
112
|
+
"memory_type": memory_type,
|
|
113
|
+
"error": str(e)
|
|
114
|
+
}
|
|
115
|
+
}
|
|
116
|
+
|
|
117
|
+
def calculate_decay(self, created_at) -> float:
|
|
118
|
+
"""
|
|
119
|
+
Calculate decay factor based on time elapsed.
|
|
120
|
+
|
|
121
|
+
Args:
|
|
122
|
+
created_at: When the memory was created (datetime object or ISO string)
|
|
123
|
+
|
|
124
|
+
Returns:
|
|
125
|
+
Decay factor between 0 and 1
|
|
126
|
+
"""
|
|
127
|
+
try:
|
|
128
|
+
# Handle both datetime objects and ISO string formats
|
|
129
|
+
if isinstance(created_at, str):
|
|
130
|
+
if created_at:
|
|
131
|
+
created_at = datetime.fromisoformat(created_at.replace('Z', '+00:00'))
|
|
132
|
+
else:
|
|
133
|
+
# If empty string, use current time
|
|
134
|
+
created_at = datetime.utcnow()
|
|
135
|
+
elif created_at is None:
|
|
136
|
+
# If None, use current time
|
|
137
|
+
created_at = datetime.utcnow()
|
|
138
|
+
|
|
139
|
+
time_elapsed = datetime.utcnow() - created_at
|
|
140
|
+
hours_elapsed = time_elapsed.total_seconds() / 3600
|
|
141
|
+
|
|
142
|
+
# Ebbinghaus forgetting curve: R = e^(-t/S)
|
|
143
|
+
# where R is retention, t is time, S is strength
|
|
144
|
+
decay_factor = math.exp(-hours_elapsed / (24 * self.decay_rate))
|
|
145
|
+
|
|
146
|
+
return max(decay_factor, 0.0)
|
|
147
|
+
|
|
148
|
+
except Exception as e:
|
|
149
|
+
logger.error(f"Failed to calculate decay: {e}")
|
|
150
|
+
return 0.5
|
|
151
|
+
|
|
152
|
+
def calculate_relevance(self, memory: Dict[str, Any], query: str) -> float:
|
|
153
|
+
"""
|
|
154
|
+
Calculate relevance score for a memory given a query.
|
|
155
|
+
|
|
156
|
+
Args:
|
|
157
|
+
memory: Memory data
|
|
158
|
+
query: Search query
|
|
159
|
+
|
|
160
|
+
Returns:
|
|
161
|
+
Relevance score between 0 and 1
|
|
162
|
+
"""
|
|
163
|
+
try:
|
|
164
|
+
content = memory.get("content", "").lower()
|
|
165
|
+
query_lower = query.lower()
|
|
166
|
+
|
|
167
|
+
# Simple keyword matching
|
|
168
|
+
query_words = query_lower.split()
|
|
169
|
+
content_words = content.split()
|
|
170
|
+
|
|
171
|
+
matches = 0
|
|
172
|
+
for word in query_words:
|
|
173
|
+
if word in content_words:
|
|
174
|
+
matches += 1
|
|
175
|
+
|
|
176
|
+
relevance_score = matches / len(query_words) if query_words else 0.0
|
|
177
|
+
|
|
178
|
+
return min(relevance_score, 1.0)
|
|
179
|
+
|
|
180
|
+
except Exception as e:
|
|
181
|
+
logger.error(f"Failed to calculate relevance: {e}")
|
|
182
|
+
return 0.0
|
|
183
|
+
|
|
184
|
+
def should_promote(self, memory: Dict[str, Any]) -> bool:
|
|
185
|
+
"""
|
|
186
|
+
Determine if a memory should be promoted to a higher tier.
|
|
187
|
+
|
|
188
|
+
Args:
|
|
189
|
+
memory: Memory data
|
|
190
|
+
|
|
191
|
+
Returns:
|
|
192
|
+
True if memory should be promoted
|
|
193
|
+
"""
|
|
194
|
+
try:
|
|
195
|
+
# Check access frequency
|
|
196
|
+
access_count = memory.get("access_count", 0)
|
|
197
|
+
if access_count >= 3:
|
|
198
|
+
return True
|
|
199
|
+
|
|
200
|
+
# Check recency
|
|
201
|
+
created_at = memory.get("created_at")
|
|
202
|
+
if created_at:
|
|
203
|
+
# Parse string to datetime if needed
|
|
204
|
+
if isinstance(created_at, str):
|
|
205
|
+
created_at = datetime.fromisoformat(created_at.replace('Z', '+00:00'))
|
|
206
|
+
time_elapsed = datetime.utcnow() - created_at
|
|
207
|
+
if time_elapsed > timedelta(hours=24):
|
|
208
|
+
return True
|
|
209
|
+
|
|
210
|
+
# Check importance
|
|
211
|
+
importance = memory.get("importance_score", 0.5)
|
|
212
|
+
if importance >= self.short_term_threshold:
|
|
213
|
+
return True
|
|
214
|
+
|
|
215
|
+
return False
|
|
216
|
+
|
|
217
|
+
except Exception as e:
|
|
218
|
+
logger.error(f"Failed to check promotion: {e}")
|
|
219
|
+
return False
|
|
220
|
+
|
|
221
|
+
def should_forget(self, memory: Dict[str, Any]) -> bool:
|
|
222
|
+
"""
|
|
223
|
+
Determine if a memory should be forgotten.
|
|
224
|
+
|
|
225
|
+
Args:
|
|
226
|
+
memory: Memory data
|
|
227
|
+
|
|
228
|
+
Returns:
|
|
229
|
+
True if memory should be forgotten
|
|
230
|
+
"""
|
|
231
|
+
try:
|
|
232
|
+
# Check decay factor
|
|
233
|
+
created_at = memory.get("created_at")
|
|
234
|
+
if created_at:
|
|
235
|
+
decay_factor = self.calculate_decay(created_at)
|
|
236
|
+
if decay_factor < self.working_threshold:
|
|
237
|
+
return True
|
|
238
|
+
|
|
239
|
+
# Check access frequency
|
|
240
|
+
access_count = memory.get("access_count", 0)
|
|
241
|
+
if access_count == 0:
|
|
242
|
+
# Check if memory is old enough to be forgotten
|
|
243
|
+
if created_at:
|
|
244
|
+
# Parse string to datetime if needed
|
|
245
|
+
if isinstance(created_at, str):
|
|
246
|
+
created_at = datetime.fromisoformat(created_at.replace('Z', '+00:00'))
|
|
247
|
+
time_elapsed = datetime.utcnow() - created_at
|
|
248
|
+
if time_elapsed > timedelta(days=7):
|
|
249
|
+
return True
|
|
250
|
+
|
|
251
|
+
return False
|
|
252
|
+
|
|
253
|
+
except Exception as e:
|
|
254
|
+
logger.error(f"Failed to check forgetting: {e}")
|
|
255
|
+
return False
|
|
256
|
+
|
|
257
|
+
def should_archive(self, memory: Dict[str, Any]) -> bool:
|
|
258
|
+
"""
|
|
259
|
+
Determine if a memory should be archived.
|
|
260
|
+
|
|
261
|
+
Args:
|
|
262
|
+
memory: Memory data
|
|
263
|
+
|
|
264
|
+
Returns:
|
|
265
|
+
True if memory should be archived
|
|
266
|
+
"""
|
|
267
|
+
try:
|
|
268
|
+
# Check age
|
|
269
|
+
created_at = memory.get("created_at")
|
|
270
|
+
if created_at:
|
|
271
|
+
# Parse string to datetime if needed
|
|
272
|
+
if isinstance(created_at, str):
|
|
273
|
+
created_at = datetime.fromisoformat(created_at.replace('Z', '+00:00'))
|
|
274
|
+
time_elapsed = datetime.utcnow() - created_at
|
|
275
|
+
if time_elapsed > timedelta(days=30):
|
|
276
|
+
return True
|
|
277
|
+
|
|
278
|
+
# Check importance
|
|
279
|
+
importance = memory.get("importance_score", 0.5)
|
|
280
|
+
if importance < self.working_threshold:
|
|
281
|
+
return True
|
|
282
|
+
|
|
283
|
+
return False
|
|
284
|
+
|
|
285
|
+
except Exception as e:
|
|
286
|
+
logger.error(f"Failed to check archiving: {e}")
|
|
287
|
+
return False
|
|
288
|
+
|
|
289
|
+
def get_review_schedule(self, memory: Dict[str, Any]) -> list:
|
|
290
|
+
"""
|
|
291
|
+
Get review schedule for a memory based on Ebbinghaus curve.
|
|
292
|
+
|
|
293
|
+
Args:
|
|
294
|
+
memory: Memory data
|
|
295
|
+
|
|
296
|
+
Returns:
|
|
297
|
+
List of review times
|
|
298
|
+
"""
|
|
299
|
+
try:
|
|
300
|
+
created_at = memory.get("created_at", datetime.utcnow())
|
|
301
|
+
# Parse string to datetime if needed
|
|
302
|
+
if isinstance(created_at, str):
|
|
303
|
+
created_at = datetime.fromisoformat(created_at.replace('Z', '+00:00'))
|
|
304
|
+
importance = memory.get("importance_score", 0.5)
|
|
305
|
+
|
|
306
|
+
# Adjust intervals based on importance
|
|
307
|
+
adjusted_intervals = []
|
|
308
|
+
for interval in self.review_intervals:
|
|
309
|
+
# Higher importance = shorter intervals
|
|
310
|
+
adjusted_interval = interval * (1 - importance * 0.5)
|
|
311
|
+
adjusted_intervals.append(adjusted_interval)
|
|
312
|
+
|
|
313
|
+
# Calculate review times
|
|
314
|
+
review_times = []
|
|
315
|
+
for interval in adjusted_intervals:
|
|
316
|
+
review_time = created_at + timedelta(hours=interval)
|
|
317
|
+
review_times.append(review_time)
|
|
318
|
+
|
|
319
|
+
return review_times
|
|
320
|
+
|
|
321
|
+
except Exception as e:
|
|
322
|
+
logger.error(f"Failed to get review schedule: {e}")
|
|
323
|
+
return []
|
|
324
|
+
|
|
325
|
+
def _get_decay_rate_for_type(self, memory_type: str) -> float:
|
|
326
|
+
"""Get decay rate based on memory type."""
|
|
327
|
+
decay_rates = {
|
|
328
|
+
"working": self.decay_rate * 2.0, # Faster decay for working memory
|
|
329
|
+
"short_term": self.decay_rate * 1.5, # Medium decay for short-term
|
|
330
|
+
"long_term": self.decay_rate, # Standard decay for long-term
|
|
331
|
+
}
|
|
332
|
+
return decay_rates.get(memory_type, self.decay_rate)
|
|
333
|
+
|
|
334
|
+
def _generate_review_schedule(self, importance_score: float, created_at: datetime) -> List[datetime]:
|
|
335
|
+
"""Generate review schedule based on importance and Ebbinghaus curve."""
|
|
336
|
+
try:
|
|
337
|
+
# Adjust intervals based on importance
|
|
338
|
+
adjusted_intervals = []
|
|
339
|
+
for interval in self.review_intervals:
|
|
340
|
+
# Higher importance = shorter intervals (more frequent reviews)
|
|
341
|
+
adjusted_interval = interval * (1 - importance_score * 0.3)
|
|
342
|
+
adjusted_intervals.append(max(adjusted_interval, 0.5)) # Minimum 0.5 hours
|
|
343
|
+
|
|
344
|
+
# Calculate review times
|
|
345
|
+
review_times = []
|
|
346
|
+
for interval in adjusted_intervals:
|
|
347
|
+
review_time = created_at + timedelta(hours=interval)
|
|
348
|
+
review_times.append(review_time)
|
|
349
|
+
|
|
350
|
+
return review_times
|
|
351
|
+
|
|
352
|
+
except Exception as e:
|
|
353
|
+
logger.error(f"Failed to generate review schedule: {e}")
|
|
354
|
+
return []
|