mem-llm 1.0.0__py3-none-any.whl → 1.0.2__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of mem-llm might be problematic. Click here for more details.
- {memory_llm → mem_llm}/__init__.py +3 -2
- mem_llm/config_from_docs.py +180 -0
- {memory_llm → mem_llm}/mem_agent.py +12 -4
- {memory_llm → mem_llm}/memory_db.py +5 -0
- {memory_llm → mem_llm}/memory_manager.py +5 -0
- {mem_llm-1.0.0.dist-info → mem_llm-1.0.2.dist-info}/METADATA +5 -5
- mem_llm-1.0.2.dist-info/RECORD +15 -0
- mem_llm-1.0.2.dist-info/top_level.txt +1 -0
- mem_llm-1.0.0.dist-info/RECORD +0 -14
- mem_llm-1.0.0.dist-info/top_level.txt +0 -1
- {memory_llm → mem_llm}/config.yaml.example +0 -0
- {memory_llm → mem_llm}/config_manager.py +0 -0
- {memory_llm → mem_llm}/knowledge_loader.py +0 -0
- {memory_llm → mem_llm}/llm_client.py +0 -0
- {memory_llm → mem_llm}/memory_tools.py +0 -0
- {memory_llm → mem_llm}/prompt_templates.py +0 -0
- {mem_llm-1.0.0.dist-info → mem_llm-1.0.2.dist-info}/WHEEL +0 -0
|
@@ -19,11 +19,12 @@ try:
|
|
|
19
19
|
from .memory_db import SQLMemoryManager
|
|
20
20
|
from .prompt_templates import prompt_manager
|
|
21
21
|
from .config_manager import get_config
|
|
22
|
-
|
|
22
|
+
from .config_from_docs import create_config_from_document
|
|
23
|
+
__all_pro__ = ["SQLMemoryManager", "prompt_manager", "get_config", "create_config_from_document"]
|
|
23
24
|
except ImportError:
|
|
24
25
|
__all_pro__ = []
|
|
25
26
|
|
|
26
|
-
__version__ = "1.0.
|
|
27
|
+
__version__ = "1.0.2"
|
|
27
28
|
__author__ = "C. Emre Karataş"
|
|
28
29
|
|
|
29
30
|
__all__ = [
|
|
@@ -0,0 +1,180 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Config Generator from Documents (PDF, DOCX, TXT)
|
|
3
|
+
Automatically creates config.yaml from business documents
|
|
4
|
+
"""
|
|
5
|
+
|
|
6
|
+
import os
|
|
7
|
+
from typing import Optional, Dict, Any
|
|
8
|
+
import yaml
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
def extract_text_from_file(file_path: str) -> str:
|
|
12
|
+
"""
|
|
13
|
+
Extract text from PDF, DOCX, or TXT files
|
|
14
|
+
|
|
15
|
+
Args:
|
|
16
|
+
file_path: Path to document
|
|
17
|
+
|
|
18
|
+
Returns:
|
|
19
|
+
Extracted text
|
|
20
|
+
"""
|
|
21
|
+
file_ext = os.path.splitext(file_path)[1].lower()
|
|
22
|
+
|
|
23
|
+
if file_ext == '.txt':
|
|
24
|
+
with open(file_path, 'r', encoding='utf-8') as f:
|
|
25
|
+
return f.read()
|
|
26
|
+
|
|
27
|
+
elif file_ext == '.pdf':
|
|
28
|
+
try:
|
|
29
|
+
import PyPDF2
|
|
30
|
+
text = []
|
|
31
|
+
with open(file_path, 'rb') as f:
|
|
32
|
+
reader = PyPDF2.PdfReader(f)
|
|
33
|
+
for page in reader.pages:
|
|
34
|
+
text.append(page.extract_text())
|
|
35
|
+
return '\n'.join(text)
|
|
36
|
+
except ImportError:
|
|
37
|
+
return "⚠️ PyPDF2 not installed. Run: pip install PyPDF2"
|
|
38
|
+
|
|
39
|
+
elif file_ext in ['.docx', '.doc']:
|
|
40
|
+
try:
|
|
41
|
+
import docx
|
|
42
|
+
doc = docx.Document(file_path)
|
|
43
|
+
text = []
|
|
44
|
+
for paragraph in doc.paragraphs:
|
|
45
|
+
text.append(paragraph.text)
|
|
46
|
+
return '\n'.join(text)
|
|
47
|
+
except ImportError:
|
|
48
|
+
return "⚠️ python-docx not installed. Run: pip install python-docx"
|
|
49
|
+
|
|
50
|
+
else:
|
|
51
|
+
return f"⚠️ Unsupported file format: {file_ext}"
|
|
52
|
+
|
|
53
|
+
|
|
54
|
+
def generate_config_from_text(text: str, company_name: Optional[str] = None) -> Dict[str, Any]:
|
|
55
|
+
"""
|
|
56
|
+
Generate config.yaml structure from text
|
|
57
|
+
|
|
58
|
+
Args:
|
|
59
|
+
text: Extracted text from document
|
|
60
|
+
company_name: Company name (optional)
|
|
61
|
+
|
|
62
|
+
Returns:
|
|
63
|
+
Config dictionary
|
|
64
|
+
"""
|
|
65
|
+
# Simple config template
|
|
66
|
+
config = {
|
|
67
|
+
"usage_mode": "business", # or "personal"
|
|
68
|
+
|
|
69
|
+
"llm": {
|
|
70
|
+
"model": "granite4:tiny-h",
|
|
71
|
+
"temperature": 0.3,
|
|
72
|
+
"max_tokens": 300,
|
|
73
|
+
"ollama_url": "http://localhost:11434"
|
|
74
|
+
},
|
|
75
|
+
|
|
76
|
+
"memory": {
|
|
77
|
+
"use_sql": True,
|
|
78
|
+
"db_path": "memories.db",
|
|
79
|
+
"json_dir": "memories"
|
|
80
|
+
},
|
|
81
|
+
|
|
82
|
+
"response": {
|
|
83
|
+
"use_knowledge_base": True,
|
|
84
|
+
"recent_conversations_limit": 5
|
|
85
|
+
},
|
|
86
|
+
|
|
87
|
+
"business": {
|
|
88
|
+
"company_name": company_name or "Your Company",
|
|
89
|
+
"industry": "Technology",
|
|
90
|
+
"founded_year": "2024"
|
|
91
|
+
},
|
|
92
|
+
|
|
93
|
+
"knowledge_base": {
|
|
94
|
+
"auto_load": True,
|
|
95
|
+
"search_limit": 5
|
|
96
|
+
},
|
|
97
|
+
|
|
98
|
+
"logging": {
|
|
99
|
+
"level": "INFO",
|
|
100
|
+
"file": "mem_agent.log"
|
|
101
|
+
}
|
|
102
|
+
}
|
|
103
|
+
|
|
104
|
+
# Try to extract company name from text if not provided
|
|
105
|
+
if not company_name:
|
|
106
|
+
lines = text.split('\n')[:10] # First 10 lines
|
|
107
|
+
for line in lines:
|
|
108
|
+
if any(keyword in line.lower() for keyword in ['company', 'corp', 'inc', 'ltd']):
|
|
109
|
+
config["business"]["company_name"] = line.strip()[:50]
|
|
110
|
+
break
|
|
111
|
+
|
|
112
|
+
return config
|
|
113
|
+
|
|
114
|
+
|
|
115
|
+
def create_config_from_document(
|
|
116
|
+
doc_path: str,
|
|
117
|
+
output_path: str = "config.yaml",
|
|
118
|
+
company_name: Optional[str] = None
|
|
119
|
+
) -> str:
|
|
120
|
+
"""
|
|
121
|
+
Create config.yaml from a business document
|
|
122
|
+
|
|
123
|
+
Args:
|
|
124
|
+
doc_path: Path to PDF/DOCX/TXT document
|
|
125
|
+
output_path: Output config.yaml path
|
|
126
|
+
company_name: Company name (optional)
|
|
127
|
+
|
|
128
|
+
Returns:
|
|
129
|
+
Success message
|
|
130
|
+
"""
|
|
131
|
+
if not os.path.exists(doc_path):
|
|
132
|
+
return f"❌ File not found: {doc_path}"
|
|
133
|
+
|
|
134
|
+
# Extract text
|
|
135
|
+
print(f"📄 Reading document: {doc_path}")
|
|
136
|
+
text = extract_text_from_file(doc_path)
|
|
137
|
+
|
|
138
|
+
if text.startswith("⚠️"):
|
|
139
|
+
return text # Error message
|
|
140
|
+
|
|
141
|
+
print(f"✅ Extracted {len(text)} characters")
|
|
142
|
+
|
|
143
|
+
# Generate config
|
|
144
|
+
config = generate_config_from_text(text, company_name)
|
|
145
|
+
|
|
146
|
+
# Save to YAML
|
|
147
|
+
with open(output_path, 'w', encoding='utf-8') as f:
|
|
148
|
+
yaml.dump(config, f, default_flow_style=False, allow_unicode=True)
|
|
149
|
+
|
|
150
|
+
print(f"✅ Config created: {output_path}")
|
|
151
|
+
print(f"📌 Company: {config['business']['company_name']}")
|
|
152
|
+
|
|
153
|
+
return f"✅ Config successfully created at {output_path}"
|
|
154
|
+
|
|
155
|
+
|
|
156
|
+
# Simple CLI
|
|
157
|
+
if __name__ == "__main__":
|
|
158
|
+
import sys
|
|
159
|
+
|
|
160
|
+
if len(sys.argv) < 2:
|
|
161
|
+
print("""
|
|
162
|
+
🔧 Config Generator from Documents
|
|
163
|
+
|
|
164
|
+
Usage:
|
|
165
|
+
python -m mem_llm.config_from_docs <document_path> [output_path] [company_name]
|
|
166
|
+
|
|
167
|
+
Examples:
|
|
168
|
+
python -m mem_llm.config_from_docs company_info.pdf
|
|
169
|
+
python -m mem_llm.config_from_docs business.docx my_config.yaml "Acme Corp"
|
|
170
|
+
python -m mem_llm.config_from_docs info.txt
|
|
171
|
+
""")
|
|
172
|
+
sys.exit(1)
|
|
173
|
+
|
|
174
|
+
doc_path = sys.argv[1]
|
|
175
|
+
output_path = sys.argv[2] if len(sys.argv) > 2 else "config.yaml"
|
|
176
|
+
company_name = sys.argv[3] if len(sys.argv) > 3 else None
|
|
177
|
+
|
|
178
|
+
result = create_config_from_document(doc_path, output_path, company_name)
|
|
179
|
+
print(result)
|
|
180
|
+
|
|
@@ -111,6 +111,8 @@ class MemAgent:
|
|
|
111
111
|
self.logger.info(f"JSON memory system active: {json_dir}")
|
|
112
112
|
|
|
113
113
|
# LLM client
|
|
114
|
+
self.model = model # Store model name
|
|
115
|
+
self.use_sql = use_sql # Store SQL usage flag
|
|
114
116
|
self.llm = OllamaClient(model, ollama_url)
|
|
115
117
|
self.logger.info(f"LLM client ready: {model}")
|
|
116
118
|
|
|
@@ -217,7 +219,12 @@ class MemAgent:
|
|
|
217
219
|
self.logger.info(f"Prompt template loaded: {template_name} (Mode: {self.usage_mode})")
|
|
218
220
|
except Exception as e:
|
|
219
221
|
self.logger.error(f"Prompt template loading error: {e}")
|
|
220
|
-
|
|
222
|
+
# Simple, short and effective default prompt
|
|
223
|
+
self.current_system_prompt = """You are a helpful AI assistant. Be concise and direct.
|
|
224
|
+
- Give short, clear answers (2-3 sentences max)
|
|
225
|
+
- Only use information from conversation history
|
|
226
|
+
- If you don't know something, say so
|
|
227
|
+
- Don't make assumptions or hallucinate"""
|
|
221
228
|
|
|
222
229
|
def check_setup(self) -> Dict[str, Any]:
|
|
223
230
|
"""Check system setup"""
|
|
@@ -330,7 +337,8 @@ class MemAgent:
|
|
|
330
337
|
recent_limit = self.config.get("response.recent_conversations_limit", 5) if hasattr(self, 'config') and self.config else 5
|
|
331
338
|
recent_convs = self.memory.get_recent_conversations(user_id, recent_limit)
|
|
332
339
|
|
|
333
|
-
|
|
340
|
+
# Add conversations in chronological order (oldest first)
|
|
341
|
+
for conv in recent_convs:
|
|
334
342
|
messages.append({"role": "user", "content": conv.get('user_message', '')})
|
|
335
343
|
messages.append({"role": "assistant", "content": conv.get('bot_response', '')})
|
|
336
344
|
except Exception as e:
|
|
@@ -350,8 +358,8 @@ class MemAgent:
|
|
|
350
358
|
try:
|
|
351
359
|
response = self.llm.chat(
|
|
352
360
|
messages=messages,
|
|
353
|
-
temperature=self.config.get("llm.temperature", 0.
|
|
354
|
-
max_tokens=self.config.get("llm.max_tokens",
|
|
361
|
+
temperature=self.config.get("llm.temperature", 0.3) if hasattr(self, 'config') and self.config else 0.3, # Lower = more focused
|
|
362
|
+
max_tokens=self.config.get("llm.max_tokens", 300) if hasattr(self, 'config') and self.config else 300 # Shorter responses
|
|
355
363
|
)
|
|
356
364
|
except Exception as e:
|
|
357
365
|
self.logger.error(f"LLM response error: {e}")
|
|
@@ -164,6 +164,11 @@ class SQLMemoryManager:
|
|
|
164
164
|
self.conn.commit()
|
|
165
165
|
return interaction_id
|
|
166
166
|
|
|
167
|
+
# Alias for compatibility
|
|
168
|
+
def add_conversation(self, user_id: str, user_message: str, bot_response: str, metadata: Optional[Dict] = None) -> int:
|
|
169
|
+
"""Alias for add_interaction"""
|
|
170
|
+
return self.add_interaction(user_id, user_message, bot_response, metadata)
|
|
171
|
+
|
|
167
172
|
def get_recent_conversations(self, user_id: str, limit: int = 10) -> List[Dict]:
|
|
168
173
|
"""
|
|
169
174
|
Kullanıcının son konuşmalarını getirir
|
|
@@ -101,6 +101,11 @@ class MemoryManager:
|
|
|
101
101
|
self.conversations[user_id].append(interaction)
|
|
102
102
|
self.save_memory(user_id)
|
|
103
103
|
|
|
104
|
+
# Alias for compatibility
|
|
105
|
+
def add_conversation(self, user_id: str, user_message: str, bot_response: str, metadata: Optional[Dict] = None) -> None:
|
|
106
|
+
"""Alias for add_interaction"""
|
|
107
|
+
return self.add_interaction(user_id, user_message, bot_response, metadata)
|
|
108
|
+
|
|
104
109
|
def update_profile(self, user_id: str, updates: Dict) -> None:
|
|
105
110
|
"""
|
|
106
111
|
Update user profile
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: mem-llm
|
|
3
|
-
Version: 1.0.
|
|
3
|
+
Version: 1.0.2
|
|
4
4
|
Summary: Memory-enabled AI assistant with local LLM support
|
|
5
5
|
Home-page: https://github.com/emredeveloper/Mem-LLM
|
|
6
6
|
Author: C. Emre Karataş
|
|
@@ -86,7 +86,7 @@ ollama pull granite4:tiny-h
|
|
|
86
86
|
### 3. Use Mem-Agent
|
|
87
87
|
|
|
88
88
|
```python
|
|
89
|
-
from
|
|
89
|
+
from mem_llm import MemAgent
|
|
90
90
|
|
|
91
91
|
# Create agent
|
|
92
92
|
agent = MemAgent(model="granite4:tiny-h")
|
|
@@ -150,7 +150,7 @@ Memory LLM/
|
|
|
150
150
|
### MemAgent Class
|
|
151
151
|
|
|
152
152
|
```python
|
|
153
|
-
from
|
|
153
|
+
from mem_llm import MemAgent
|
|
154
154
|
|
|
155
155
|
agent = MemAgent(
|
|
156
156
|
model="granite4:tiny-h", # Ollama model name
|
|
@@ -197,7 +197,7 @@ agent.clear_user_memory("user_id", confirm=True)
|
|
|
197
197
|
### MemoryManager Class
|
|
198
198
|
|
|
199
199
|
```python
|
|
200
|
-
from
|
|
200
|
+
from mem_llm import MemoryManager
|
|
201
201
|
|
|
202
202
|
memory = MemoryManager(memory_dir="memories")
|
|
203
203
|
|
|
@@ -222,7 +222,7 @@ results = memory.search_memory("user_id", "order")
|
|
|
222
222
|
### OllamaClient Class
|
|
223
223
|
|
|
224
224
|
```python
|
|
225
|
-
from
|
|
225
|
+
from mem_llm import OllamaClient
|
|
226
226
|
|
|
227
227
|
client = OllamaClient(model="granite4:tiny-h")
|
|
228
228
|
|
|
@@ -0,0 +1,15 @@
|
|
|
1
|
+
mem_llm/__init__.py,sha256=l9ynmAWNyC_CPZcb5q-pkJ_oVdJZpFN4hwVHRNqCkg8,920
|
|
2
|
+
mem_llm/config.yaml.example,sha256=lgmfaU5pxnIm4zYxwgCcgLSohNx1Jw6oh3Qk0Xoe2DE,917
|
|
3
|
+
mem_llm/config_from_docs.py,sha256=YFhq1SWyK63C-TNMS73ncNHg8sJ-XGOf2idWVCjxFco,4974
|
|
4
|
+
mem_llm/config_manager.py,sha256=8PIHs21jZWlI-eG9DgekjOvNxU3-U4xH7SbT8Gr-Z6M,7075
|
|
5
|
+
mem_llm/knowledge_loader.py,sha256=oSNhfYYcx7DlZLVogxnbSwaIydq_Q3__RDJFeZR2XVw,2699
|
|
6
|
+
mem_llm/llm_client.py,sha256=tLNulVEV_tWdktvcQUokdhd0gTkIISUHipglRt17IWk,5255
|
|
7
|
+
mem_llm/mem_agent.py,sha256=BIEMHpbss4QPstS-aEoZwmKBBc_fg87tf8Jj7MTIV8g,20357
|
|
8
|
+
mem_llm/memory_db.py,sha256=KyNIcChYihSavd2ot5KMBlVB9lq8rexoBQ0lA5bCJNI,12611
|
|
9
|
+
mem_llm/memory_manager.py,sha256=iXnf5YEJXmQ75jgJ2LEx9zCHxIpZTcLtHlp2eWgFjRg,8335
|
|
10
|
+
mem_llm/memory_tools.py,sha256=ARANFqu_bmL56SlV1RzTjfQsJj-Qe2QvqY0pF92hDxU,8678
|
|
11
|
+
mem_llm/prompt_templates.py,sha256=tCiQJw3QQKIaH8NsxEKOIaIVxw4XT43PwdmyfCINzzM,6536
|
|
12
|
+
mem_llm-1.0.2.dist-info/METADATA,sha256=yJxAbApli62T27XkKE0SIHxgD50PPbhXldmBXfdyfhE,9347
|
|
13
|
+
mem_llm-1.0.2.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
|
|
14
|
+
mem_llm-1.0.2.dist-info/top_level.txt,sha256=_fU1ML-0JwkaxWdhqpwtmTNaJEOvDMQeJdA8d5WqDn8,8
|
|
15
|
+
mem_llm-1.0.2.dist-info/RECORD,,
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
mem_llm
|
mem_llm-1.0.0.dist-info/RECORD
DELETED
|
@@ -1,14 +0,0 @@
|
|
|
1
|
-
memory_llm/__init__.py,sha256=74hTFnqEMUtTnTLUtZllFo-8NM-JghqZgPH9SDgQj0g,827
|
|
2
|
-
memory_llm/config.yaml.example,sha256=lgmfaU5pxnIm4zYxwgCcgLSohNx1Jw6oh3Qk0Xoe2DE,917
|
|
3
|
-
memory_llm/config_manager.py,sha256=8PIHs21jZWlI-eG9DgekjOvNxU3-U4xH7SbT8Gr-Z6M,7075
|
|
4
|
-
memory_llm/knowledge_loader.py,sha256=oSNhfYYcx7DlZLVogxnbSwaIydq_Q3__RDJFeZR2XVw,2699
|
|
5
|
-
memory_llm/llm_client.py,sha256=tLNulVEV_tWdktvcQUokdhd0gTkIISUHipglRt17IWk,5255
|
|
6
|
-
memory_llm/mem_agent.py,sha256=AMw8X5cFdHoyphyHf9B4eBXDFGTLEv9nkDBXnO_fGL4,19907
|
|
7
|
-
memory_llm/memory_db.py,sha256=OGWTIHBHh1qETGvmrlZWfmv9szSaFuSCzJGMZg6HBww,12329
|
|
8
|
-
memory_llm/memory_manager.py,sha256=-JM0Qb5dYm1Rj4jd3FQfDpZSaya-ly9rcgEjyvnyDzk,8052
|
|
9
|
-
memory_llm/memory_tools.py,sha256=ARANFqu_bmL56SlV1RzTjfQsJj-Qe2QvqY0pF92hDxU,8678
|
|
10
|
-
memory_llm/prompt_templates.py,sha256=tCiQJw3QQKIaH8NsxEKOIaIVxw4XT43PwdmyfCINzzM,6536
|
|
11
|
-
mem_llm-1.0.0.dist-info/METADATA,sha256=Pdiho_vUo-vCZgKde5WYCqfabFtXoubMJA97u_qLjaY,9359
|
|
12
|
-
mem_llm-1.0.0.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
|
|
13
|
-
mem_llm-1.0.0.dist-info/top_level.txt,sha256=7I8wePWMtiZ-viJGXLYAiHpxiwpwPbFhNn1cyufySok,11
|
|
14
|
-
mem_llm-1.0.0.dist-info/RECORD,,
|
|
@@ -1 +0,0 @@
|
|
|
1
|
-
memory_llm
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|