mem-llm 1.0.7__py3-none-any.whl → 1.0.10__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of mem-llm might be problematic. Click here for more details.

@@ -0,0 +1,17 @@
1
+ mem_llm/__init__.py,sha256=PSNNshT0pmAU3qTtuPiWv_PDyjokHthNwgCO4B7aReY,1052
2
+ mem_llm/cli.py,sha256=DiqQyBZknN8pVagY5jXH85_LZ6odVGopfpa-7DILNNE,8666
3
+ mem_llm/config.yaml.example,sha256=lgmfaU5pxnIm4zYxwgCcgLSohNx1Jw6oh3Qk0Xoe2DE,917
4
+ mem_llm/config_from_docs.py,sha256=YFhq1SWyK63C-TNMS73ncNHg8sJ-XGOf2idWVCjxFco,4974
5
+ mem_llm/config_manager.py,sha256=8PIHs21jZWlI-eG9DgekjOvNxU3-U4xH7SbT8Gr-Z6M,7075
6
+ mem_llm/dynamic_prompt.py,sha256=8H99QVDRJSVtGb_o4sdEPnG1cJWuer3KiD-nuL1srTA,10244
7
+ mem_llm/knowledge_loader.py,sha256=oSNhfYYcx7DlZLVogxnbSwaIydq_Q3__RDJFeZR2XVw,2699
8
+ mem_llm/llm_client.py,sha256=aIC0si_TKe_pLWHPbqjH_HrwIk83b1YLBW3U-405YV0,7768
9
+ mem_llm/mem_agent.py,sha256=ln6G5J-o1_tCe0tU956u59euii7f7LQt-DM0uhd27rM,29927
10
+ mem_llm/memory_db.py,sha256=UzkMOw_p7svg6d4ZgpBWdPKoILWrJ2hAQSPHvAG_f4M,13563
11
+ mem_llm/memory_manager.py,sha256=CZI3A8pFboHQIgeiXB1h2gZK7mgfbVSU3IxuqE-zXtc,9978
12
+ mem_llm/memory_tools.py,sha256=ARANFqu_bmL56SlV1RzTjfQsJj-Qe2QvqY0pF92hDxU,8678
13
+ mem_llm-1.0.10.dist-info/METADATA,sha256=Ym81G7c3Ck9usIZQWkoWzoVygVY2otl_H56NdCjuqa4,27031
14
+ mem_llm-1.0.10.dist-info/WHEEL,sha256=beeZ86-EfXScwlR_HKu4SllMC9wUEj_8Z_4FJ3egI2w,91
15
+ mem_llm-1.0.10.dist-info/entry_points.txt,sha256=z9bg6xgNroIobvCMtnSXeFPc-vI1nMen8gejHCdnl0U,45
16
+ mem_llm-1.0.10.dist-info/top_level.txt,sha256=_fU1ML-0JwkaxWdhqpwtmTNaJEOvDMQeJdA8d5WqDn8,8
17
+ mem_llm-1.0.10.dist-info/RECORD,,
@@ -1,5 +1,5 @@
1
1
  Wheel-Version: 1.0
2
- Generator: setuptools (80.9.0)
2
+ Generator: setuptools (76.1.0)
3
3
  Root-Is-Purelib: true
4
4
  Tag: py3-none-any
5
5
 
@@ -0,0 +1,2 @@
1
+ [console_scripts]
2
+ mem-llm = mem_llm.cli:main
@@ -1,244 +0,0 @@
1
- """
2
- System Prompt Templates and Management
3
- Customizable prompt templates for different scenarios
4
- """
5
-
6
- from typing import Dict, List, Optional
7
- from datetime import datetime
8
-
9
-
10
- class PromptTemplate:
11
- """System prompt template"""
12
-
13
- def __init__(self, name: str, base_prompt: str,
14
- variables: Optional[Dict[str, str]] = None):
15
- """
16
- Args:
17
- name: Template name
18
- base_prompt: Base prompt text (can contain variables in {variable} format)
19
- variables: Default variable values
20
- """
21
- self.name = name
22
- self.base_prompt = base_prompt
23
- self.variables = variables or {}
24
-
25
- def render(self, **kwargs) -> str:
26
- """
27
- Fill template with variables
28
-
29
- Args:
30
- **kwargs: Variable values
31
-
32
- Returns:
33
- Generated prompt
34
- """
35
- merged_vars = {**self.variables, **kwargs}
36
- return self.base_prompt.format(**merged_vars)
37
-
38
-
39
- class PromptManager:
40
- """Manages prompt templates"""
41
-
42
- def __init__(self):
43
- self.templates: Dict[str, PromptTemplate] = {}
44
- self._load_default_templates()
45
-
46
- def _load_default_templates(self) -> None:
47
- """Load default templates"""
48
-
49
- # 1. Customer Service
50
- self.add_template(
51
- name="customer_service",
52
- base_prompt="""You are a professional customer service assistant for {company_name} company.
53
-
54
- Your task:
55
- - Approach customers kindly and helpfully
56
- - Remember past interactions and create context
57
- - Solve problems quickly and effectively
58
- - Redirect to human representative when necessary
59
-
60
- Communication Style:
61
- - Use {tone} tone
62
- - Give short and clear answers
63
- - Show empathy
64
- - Be professional
65
-
66
- Important Rules:
67
- - Never lie
68
- - Don't speculate on topics you don't know
69
- - Keep customer satisfaction in the foreground
70
- - Ask if there's any other help at the end of each response
71
-
72
- You are currently working on {current_date}.
73
- """,
74
- variables={
75
- "company_name": "Our Company",
76
- "tone": "friendly and professional",
77
- "current_date": datetime.now().strftime("%Y-%m-%d")
78
- }
79
- )
80
-
81
- # 2. Technical Support
82
- self.add_template(
83
- name="tech_support",
84
- base_prompt="""You are a technical support expert for {product_name}.
85
-
86
- Your Expertise Areas:
87
- - Problem diagnosis and resolution
88
- - Step-by-step guidance
89
- - Technical documentation
90
- - Debugging
91
-
92
- Approach:
93
- - First understand the problem completely
94
- - Start with simple solutions
95
- - Explain step by step
96
- - Explain technical terms when necessary
97
-
98
- User Level: {user_level}
99
-
100
- Response Format:
101
- 1. Summarize the problem
102
- 2. List possible causes
103
- 3. Provide solution steps
104
- 4. Check results
105
-
106
- Log level: {log_level}
107
- """,
108
- variables={
109
- "product_name": "Our Product",
110
- "user_level": "intermediate level",
111
- "log_level": "detailed"
112
- }
113
- )
114
-
115
- # 3. Personal Assistant
116
- self.add_template(
117
- name="personal_assistant",
118
- base_prompt="""Sen {user_name} için kişisel dijital asistansın.
119
-
120
- Görevlerin:
121
- - Günlük planlamasına yardım
122
- - Hatırlatmalar
123
- - Bilgi toplama ve özetleme
124
- - Öneri ve tavsiyeler
125
-
126
- Kişiselleştirme:
127
- - Kullanıcının tercihlerini öğren
128
- - Alışkanlıklarını hatırla
129
- - Proaktif önerilerde bulun
130
- - Önceliklere göre sırala
131
-
132
- Çalışma Saatleri: {work_hours}
133
- Zaman Dilimi: {timezone}
134
- Tercih Edilen Dil: {language}
135
-
136
- Yaklaşım:
137
- - Verimlilik odaklı
138
- - Minimal ve net
139
- - Proaktif
140
- - Esnek
141
-
142
- Veri Gizliliği: {privacy_level}
143
- """,
144
- variables={
145
- "user_name": "Kullanıcı",
146
- "work_hours": "09:00-18:00",
147
- "timezone": "Europe/Istanbul",
148
- "language": "Türkçe",
149
- "privacy_level": "yüksek"
150
- }
151
- )
152
-
153
- # 4. Business Customer Service
154
- self.add_template(
155
- name="business_customer_service",
156
- base_prompt="""Sen {company_name} şirketinin kurumsal müşteri hizmetleri asistanısın.
157
-
158
- Kurumsal Müşteri Yaklaşımı:
159
- - Profesyonel ve çözüm odaklı
160
- - SLA'lara uygun hızlı yanıt
161
- - Teknik sorunlara derin destek
162
- - Çoklu kanal entegrasyonu
163
-
164
- Şirket Bilgileri:
165
- - Kuruluş Yılı: {founded_year}
166
- - Çalışan Sayısı: {employee_count}
167
- - Sektör: {industry}
168
-
169
- Öncelik Seviyesi: {priority_level}
170
- SLA Süresi: {sla_hours} saat
171
- """,
172
- variables={
173
- "company_name": "Kurumsal Şirket",
174
- "founded_year": "2010",
175
- "employee_count": "500+",
176
- "industry": "Teknoloji",
177
- "priority_level": "yüksek",
178
- "sla_hours": "4"
179
- }
180
- )
181
-
182
- def add_template(self, name: str, base_prompt: str,
183
- variables: Optional[Dict[str, str]] = None) -> None:
184
- """
185
- Add new template
186
-
187
- Args:
188
- name: Template name
189
- base_prompt: Prompt text
190
- variables: Default variables
191
- """
192
- self.templates[name] = PromptTemplate(name, base_prompt, variables)
193
-
194
- def get_template(self, name: str) -> Optional[PromptTemplate]:
195
- """
196
- Get template
197
-
198
- Args:
199
- name: Template name
200
-
201
- Returns:
202
- PromptTemplate or None
203
- """
204
- return self.templates.get(name)
205
-
206
- def render_prompt(self, template_name: str, **kwargs) -> str:
207
- """
208
- Render template
209
-
210
- Args:
211
- template_name: Template name
212
- **kwargs: Variable values
213
-
214
- Returns:
215
- Generated prompt
216
- """
217
- template = self.get_template(template_name)
218
- if template:
219
- return template.render(**kwargs)
220
- raise ValueError(f"Template '{template_name}' not found")
221
-
222
- def list_templates(self) -> List[str]:
223
- """List available templates"""
224
- return list(self.templates.keys())
225
-
226
- def get_template_variables(self, template_name: str) -> Dict[str, str]:
227
- """
228
- Return template variables
229
-
230
- Args:
231
- template_name: Template name
232
-
233
- Returns:
234
- Variables dictionary
235
- """
236
- template = self.get_template(template_name)
237
- if template:
238
- return template.variables.copy()
239
- return {}
240
-
241
-
242
- # Global instance for ready use
243
- prompt_manager = PromptManager()
244
-
@@ -1,304 +0,0 @@
1
- Metadata-Version: 2.4
2
- Name: mem-llm
3
- Version: 1.0.7
4
- Summary: Memory-enabled AI assistant with local LLM support
5
- Home-page: https://github.com/emredeveloper/Mem-LLM
6
- Author: C. Emre Karataş
7
- Author-email: karatasqemre@gmail.com
8
- Project-URL: Bug Reports, https://github.com/emredeveloper/Mem-LLM/issues
9
- Project-URL: Source, https://github.com/emredeveloper/Mem-LLM
10
- Keywords: llm ai memory agent chatbot ollama local
11
- Classifier: Development Status :: 4 - Beta
12
- Classifier: Intended Audience :: Developers
13
- Classifier: Topic :: Scientific/Engineering :: Artificial Intelligence
14
- Classifier: License :: OSI Approved :: MIT License
15
- Classifier: Programming Language :: Python :: 3
16
- Classifier: Programming Language :: Python :: 3.8
17
- Classifier: Programming Language :: Python :: 3.9
18
- Classifier: Programming Language :: Python :: 3.10
19
- Classifier: Programming Language :: Python :: 3.11
20
- Classifier: Programming Language :: Python :: 3.12
21
- Requires-Python: >=3.8
22
- Description-Content-Type: text/markdown
23
- Requires-Dist: requests>=2.31.0
24
- Requires-Dist: pyyaml>=6.0.1
25
- Provides-Extra: dev
26
- Requires-Dist: pytest>=7.4.0; extra == "dev"
27
- Requires-Dist: black>=23.7.0; extra == "dev"
28
- Requires-Dist: flake8>=6.1.0; extra == "dev"
29
- Dynamic: author
30
- Dynamic: author-email
31
- Dynamic: classifier
32
- Dynamic: description
33
- Dynamic: description-content-type
34
- Dynamic: home-page
35
- Dynamic: keywords
36
- Dynamic: project-url
37
- Dynamic: provides-extra
38
- Dynamic: requires-dist
39
- Dynamic: requires-python
40
- Dynamic: summary
41
-
42
- # 🧠 mem-llm
43
-
44
- **Memory-enabled AI assistant that remembers conversations using local LLMs**
45
-
46
- [![Python](https://img.shields.io/badge/Python-3.8%2B-blue.svg)](https://www.python.org/downloads/)
47
- [![PyPI](https://img.shields.io/pypi/v/mem-llm.svg)](https://pypi.org/project/mem-llm/)
48
- [![License](https://img.shields.io/badge/License-MIT-green.svg)](LICENSE)
49
-
50
- ---
51
-
52
- ## 🎯 What is it?
53
-
54
- A lightweight Python library that adds **persistent memory** to local LLM chatbots. Each user gets their own conversation history that the AI remembers across sessions.
55
-
56
- **Perfect for:**
57
- - 💬 Customer service chatbots
58
- - 🤖 Personal AI assistants
59
- - 📝 Context-aware applications
60
- - 🏢 Business automation
61
-
62
- ---
63
-
64
- ## ⚡ Quick Start
65
-
66
- ### 1. Install
67
-
68
- ```bash
69
- pip install mem-llm
70
- ```
71
-
72
- ### 2. Setup Ollama (one-time)
73
-
74
- ```bash
75
- # Install: https://ollama.ai/download
76
- ollama serve
77
-
78
- # Download model (only 2.5GB)
79
- ollama pull granite4:tiny-h
80
- ```
81
-
82
- ### 3. Use
83
-
84
- ```python
85
- from mem_llm import MemAgent
86
-
87
- # Create agent (one line!)
88
- agent = MemAgent()
89
-
90
- # Set user
91
- agent.set_user("john")
92
-
93
- # Chat - it remembers!
94
- agent.chat("My name is John")
95
- agent.chat("What's my name?") # → "Your name is John"
96
- ```
97
-
98
- ---
99
-
100
- ## 💡 Features
101
-
102
- | Feature | Description |
103
- |---------|-------------|
104
- | 🧠 **Memory** | Remembers each user's conversation history |
105
- | 👥 **Multi-user** | Separate memory for each user |
106
- | 🔒 **Privacy** | 100% local, no cloud/API needed |
107
- | ⚡ **Fast** | Lightweight SQLite/JSON storage |
108
- | 🎯 **Simple** | 3 lines of code to get started |
109
-
110
- ---
111
-
112
- ## 📖 Usage Examples
113
-
114
- ### Basic Chat
115
-
116
- ```python
117
- from mem_llm import MemAgent
118
-
119
- agent = MemAgent()
120
- agent.set_user("alice")
121
-
122
- # First conversation
123
- agent.chat("I love pizza")
124
-
125
- # Later...
126
- agent.chat("What's my favorite food?")
127
- # → "Your favorite food is pizza"
128
- ```
129
-
130
- ### Customer Service Bot
131
-
132
- ```python
133
- agent = MemAgent()
134
-
135
- # Customer 1
136
- agent.set_user("customer_001")
137
- agent.chat("My order #12345 is delayed")
138
-
139
- # Customer 2 (different memory!)
140
- agent.set_user("customer_002")
141
- agent.chat("I want to return item #67890")
142
- ```
143
-
144
- ### Check User Profile
145
-
146
- ```python
147
- # Get automatically extracted user info
148
- profile = agent.get_user_profile()
149
- # {'name': 'Alice', 'favorite_food': 'pizza', 'location': 'NYC'}
150
- ```
151
-
152
- ---
153
-
154
- ## 🔧 Configuration
155
-
156
- ### JSON Memory (default - simple)
157
-
158
- ```python
159
- agent = MemAgent(
160
- model="granite4:tiny-h",
161
- use_sql=False, # Use JSON files
162
- memory_dir="memories"
163
- )
164
- ```
165
-
166
- ### SQL Memory (advanced - faster)
167
-
168
- ```python
169
- agent = MemAgent(
170
- model="granite4:tiny-h",
171
- use_sql=True, # Use SQLite
172
- memory_dir="memories.db"
173
- )
174
- ```
175
-
176
- ### Custom Settings
177
-
178
- ```python
179
- agent = MemAgent(
180
- model="llama2", # Any Ollama model
181
- ollama_url="http://localhost:11434"
182
- )
183
- ```
184
-
185
- ---
186
-
187
- ## 📚 API Reference
188
-
189
- ### MemAgent
190
-
191
- ```python
192
- # Initialize
193
- agent = MemAgent(model="granite4:tiny-h", use_sql=False)
194
-
195
- # Set active user
196
- agent.set_user(user_id: str, name: Optional[str] = None)
197
-
198
- # Chat
199
- response = agent.chat(message: str, metadata: Optional[Dict] = None) -> str
200
-
201
- # Get profile
202
- profile = agent.get_user_profile(user_id: Optional[str] = None) -> Dict
203
-
204
- # System check
205
- status = agent.check_setup() -> Dict
206
- ```
207
-
208
- ---
209
-
210
- ## 🎨 Advanced: PDF/DOCX Config
211
-
212
- Generate config from business documents:
213
-
214
- ```python
215
- from mem_llm import create_config_from_document
216
-
217
- # Create config.yaml from PDF
218
- create_config_from_document(
219
- doc_path="company_info.pdf",
220
- output_path="config.yaml",
221
- company_name="Acme Corp"
222
- )
223
-
224
- # Use config
225
- agent = MemAgent(config_file="config.yaml")
226
- ```
227
-
228
- ---
229
-
230
- ## 🔥 Models
231
-
232
- Works with any [Ollama](https://ollama.ai/) model:
233
-
234
- | Model | Size | Speed | Quality |
235
- |-------|------|-------|---------|
236
- | `granite4:tiny-h` | 2.5GB | ⚡⚡⚡ | ⭐⭐ |
237
- | `llama2` | 4GB | ⚡⚡ | ⭐⭐⭐ |
238
- | `mistral` | 4GB | ⚡⚡ | ⭐⭐⭐⭐ |
239
- | `llama3` | 5GB | ⚡ | ⭐⭐⭐⭐⭐ |
240
-
241
- ```bash
242
- ollama pull <model-name>
243
- ```
244
-
245
- ---
246
-
247
- ## 📦 Requirements
248
-
249
- - Python 3.8+
250
- - Ollama (for LLM)
251
- - 4GB RAM minimum
252
- - 5GB disk space
253
-
254
- **Dependencies** (auto-installed):
255
- - `requests >= 2.31.0`
256
- - `pyyaml >= 6.0.1`
257
-
258
- ---
259
-
260
- ## 🐛 Troubleshooting
261
-
262
- ### Ollama not running?
263
-
264
- ```bash
265
- ollama serve
266
- ```
267
-
268
- ### Model not found?
269
-
270
- ```bash
271
- ollama pull granite4:tiny-h
272
- ```
273
-
274
- ### Import error?
275
-
276
- ```bash
277
- pip install mem-llm --upgrade
278
- ```
279
-
280
- ---
281
-
282
- ## 📄 License
283
-
284
- MIT License - feel free to use in personal and commercial projects!
285
-
286
- ---
287
-
288
- ## 🔗 Links
289
-
290
- - **PyPI:** https://pypi.org/project/mem-llm/
291
- - **GitHub:** https://github.com/emredeveloper/Mem-LLM
292
- - **Ollama:** https://ollama.ai/
293
-
294
- ---
295
-
296
- ## 🌟 Star us on GitHub!
297
-
298
- If you find this useful, give us a ⭐ on [GitHub](https://github.com/emredeveloper/Mem-LLM)!
299
-
300
- ---
301
-
302
- <div align="center">
303
- Made with ❤️ by <a href="https://github.com/emredeveloper">C. Emre Karataş</a>
304
- </div>
@@ -1,15 +0,0 @@
1
- mem_llm/__init__.py,sha256=K-nIQ-BtTJzDhgseulljOKMPXuQZS563ffED4brYFm4,920
2
- mem_llm/config.yaml.example,sha256=lgmfaU5pxnIm4zYxwgCcgLSohNx1Jw6oh3Qk0Xoe2DE,917
3
- mem_llm/config_from_docs.py,sha256=YFhq1SWyK63C-TNMS73ncNHg8sJ-XGOf2idWVCjxFco,4974
4
- mem_llm/config_manager.py,sha256=8PIHs21jZWlI-eG9DgekjOvNxU3-U4xH7SbT8Gr-Z6M,7075
5
- mem_llm/knowledge_loader.py,sha256=oSNhfYYcx7DlZLVogxnbSwaIydq_Q3__RDJFeZR2XVw,2699
6
- mem_llm/llm_client.py,sha256=XW-LALYV4C-Nj2R8XYT2iT2YnTeH6_tUIldMKooY2uY,5461
7
- mem_llm/mem_agent.py,sha256=kJwlZYRIE6OI06ZG-mtQKzz1_zkjs1ss_DYr4DiaHUw,27140
8
- mem_llm/memory_db.py,sha256=UzkMOw_p7svg6d4ZgpBWdPKoILWrJ2hAQSPHvAG_f4M,13563
9
- mem_llm/memory_manager.py,sha256=CZI3A8pFboHQIgeiXB1h2gZK7mgfbVSU3IxuqE-zXtc,9978
10
- mem_llm/memory_tools.py,sha256=ARANFqu_bmL56SlV1RzTjfQsJj-Qe2QvqY0pF92hDxU,8678
11
- mem_llm/prompt_templates.py,sha256=tCiQJw3QQKIaH8NsxEKOIaIVxw4XT43PwdmyfCINzzM,6536
12
- mem_llm-1.0.7.dist-info/METADATA,sha256=K2paX5fYmv3U7BNrBbqYI3D6gwfvszaS1HyJJuIJo88,6505
13
- mem_llm-1.0.7.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
14
- mem_llm-1.0.7.dist-info/top_level.txt,sha256=_fU1ML-0JwkaxWdhqpwtmTNaJEOvDMQeJdA8d5WqDn8,8
15
- mem_llm-1.0.7.dist-info/RECORD,,