superlocalmemory 2.6.0 → 2.7.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +167 -1803
- package/README.md +212 -397
- package/bin/slm +179 -3
- package/bin/superlocalmemoryv2:learning +4 -0
- package/bin/superlocalmemoryv2:patterns +4 -0
- package/docs/ACCESSIBILITY.md +291 -0
- package/docs/ARCHITECTURE.md +12 -6
- package/docs/FRAMEWORK-INTEGRATIONS.md +300 -0
- package/docs/MCP-MANUAL-SETUP.md +14 -4
- package/install.sh +99 -3
- package/mcp_server.py +291 -1
- package/package.json +2 -1
- package/requirements-learning.txt +12 -0
- package/scripts/verify-v27.sh +233 -0
- package/skills/slm-show-patterns/SKILL.md +224 -0
- package/src/learning/__init__.py +201 -0
- package/src/learning/adaptive_ranker.py +826 -0
- package/src/learning/cross_project_aggregator.py +866 -0
- package/src/learning/engagement_tracker.py +638 -0
- package/src/learning/feature_extractor.py +461 -0
- package/src/learning/feedback_collector.py +690 -0
- package/src/learning/learning_db.py +842 -0
- package/src/learning/project_context_manager.py +582 -0
- package/src/learning/source_quality_scorer.py +685 -0
- package/src/learning/synthetic_bootstrap.py +1047 -0
- package/src/learning/tests/__init__.py +0 -0
- package/src/learning/tests/test_adaptive_ranker.py +328 -0
- package/src/learning/tests/test_aggregator.py +309 -0
- package/src/learning/tests/test_feedback_collector.py +295 -0
- package/src/learning/tests/test_learning_db.py +606 -0
- package/src/learning/tests/test_project_context.py +296 -0
- package/src/learning/tests/test_source_quality.py +355 -0
- package/src/learning/tests/test_synthetic_bootstrap.py +433 -0
- package/src/learning/tests/test_workflow_miner.py +322 -0
- package/src/learning/workflow_pattern_miner.py +665 -0
- package/ui/index.html +346 -13
- package/ui/js/clusters.js +90 -1
- package/ui/js/graph-core.js +445 -0
- package/ui/js/graph-cytoscape-monolithic-backup.js +1168 -0
- package/ui/js/graph-cytoscape.js +1168 -0
- package/ui/js/graph-d3-backup.js +32 -0
- package/ui/js/graph-filters.js +220 -0
- package/ui/js/graph-interactions.js +354 -0
- package/ui/js/graph-ui.js +214 -0
- package/ui/js/memories.js +52 -0
- package/ui/js/modal.js +104 -1
|
@@ -0,0 +1,300 @@
|
|
|
1
|
+
# Framework Integrations
|
|
2
|
+
|
|
3
|
+
SuperLocalMemory V2 integrates with popular AI frameworks as a memory backend — 100% local, zero cloud dependencies.
|
|
4
|
+
|
|
5
|
+
---
|
|
6
|
+
|
|
7
|
+
## LangChain Integration
|
|
8
|
+
|
|
9
|
+
Use SuperLocalMemory as a chat message history store in LangChain applications.
|
|
10
|
+
|
|
11
|
+
### Installation
|
|
12
|
+
|
|
13
|
+
```bash
|
|
14
|
+
pip install langchain-superlocalmemory
|
|
15
|
+
```
|
|
16
|
+
|
|
17
|
+
### Basic Usage
|
|
18
|
+
|
|
19
|
+
```python
|
|
20
|
+
from langchain_superlocalmemory import SuperLocalMemoryChatMessageHistory
|
|
21
|
+
from langchain_core.runnables.history import RunnableWithMessageHistory
|
|
22
|
+
from langchain_core.prompts import ChatPromptTemplate, MessagesPlaceholder
|
|
23
|
+
from langchain_openai import ChatOpenAI
|
|
24
|
+
|
|
25
|
+
# Create chat history with session-based memory
|
|
26
|
+
history = SuperLocalMemoryChatMessageHistory(session_id="my-session")
|
|
27
|
+
|
|
28
|
+
# Build a conversational chain
|
|
29
|
+
prompt = ChatPromptTemplate.from_messages([
|
|
30
|
+
("system", "You are a helpful assistant."),
|
|
31
|
+
MessagesPlaceholder(variable_name="history"),
|
|
32
|
+
("human", "{input}"),
|
|
33
|
+
])
|
|
34
|
+
|
|
35
|
+
chain = prompt | ChatOpenAI()
|
|
36
|
+
|
|
37
|
+
# Wrap with message history
|
|
38
|
+
chain_with_history = RunnableWithMessageHistory(
|
|
39
|
+
chain,
|
|
40
|
+
lambda session_id: SuperLocalMemoryChatMessageHistory(session_id=session_id),
|
|
41
|
+
input_messages_key="input",
|
|
42
|
+
history_messages_key="history",
|
|
43
|
+
)
|
|
44
|
+
|
|
45
|
+
# Use the chain
|
|
46
|
+
response = chain_with_history.invoke(
|
|
47
|
+
{"input": "What is AI?"},
|
|
48
|
+
config={"configurable": {"session_id": "my-session"}}
|
|
49
|
+
)
|
|
50
|
+
```
|
|
51
|
+
|
|
52
|
+
### Advanced Features
|
|
53
|
+
|
|
54
|
+
**Session Isolation:**
|
|
55
|
+
```python
|
|
56
|
+
# Different sessions have isolated message histories
|
|
57
|
+
history_user1 = SuperLocalMemoryChatMessageHistory(session_id="user-1")
|
|
58
|
+
history_user2 = SuperLocalMemoryChatMessageHistory(session_id="user-2")
|
|
59
|
+
```
|
|
60
|
+
|
|
61
|
+
**Profile Support:**
|
|
62
|
+
```python
|
|
63
|
+
# Use different memory profiles for different contexts
|
|
64
|
+
history_work = SuperLocalMemoryChatMessageHistory(
|
|
65
|
+
session_id="work-chat",
|
|
66
|
+
profile="work"
|
|
67
|
+
)
|
|
68
|
+
history_personal = SuperLocalMemoryChatMessageHistory(
|
|
69
|
+
session_id="personal-chat",
|
|
70
|
+
profile="personal"
|
|
71
|
+
)
|
|
72
|
+
```
|
|
73
|
+
|
|
74
|
+
**Message Filtering:**
|
|
75
|
+
```python
|
|
76
|
+
# Retrieve messages with limits
|
|
77
|
+
recent_messages = history.get_messages(limit=10)
|
|
78
|
+
|
|
79
|
+
# Clear session history
|
|
80
|
+
history.clear()
|
|
81
|
+
```
|
|
82
|
+
|
|
83
|
+
### Storage Details
|
|
84
|
+
|
|
85
|
+
- Messages persist in `~/.claude-memory/memory.db`
|
|
86
|
+
- Each message stored as a memory with tags: `langchain`, `chat`, `session:<session_id>`
|
|
87
|
+
- Supports all LangChain message types (HumanMessage, AIMessage, SystemMessage)
|
|
88
|
+
- Automatic timestamp and metadata tracking
|
|
89
|
+
|
|
90
|
+
---
|
|
91
|
+
|
|
92
|
+
## LlamaIndex Integration
|
|
93
|
+
|
|
94
|
+
Use SuperLocalMemory as a chat store for LlamaIndex's memory system.
|
|
95
|
+
|
|
96
|
+
### Installation
|
|
97
|
+
|
|
98
|
+
```bash
|
|
99
|
+
pip install llama-index-storage-chat-store-superlocalmemory
|
|
100
|
+
```
|
|
101
|
+
|
|
102
|
+
### Basic Usage
|
|
103
|
+
|
|
104
|
+
```python
|
|
105
|
+
from llama_index.storage.chat_store.superlocalmemory import SuperLocalMemoryChatStore
|
|
106
|
+
from llama_index.core.memory import ChatMemoryBuffer
|
|
107
|
+
from llama_index.core.chat_engine import SimpleChatEngine
|
|
108
|
+
from llama_index.llms.openai import OpenAI
|
|
109
|
+
|
|
110
|
+
# Create chat store
|
|
111
|
+
chat_store = SuperLocalMemoryChatStore()
|
|
112
|
+
|
|
113
|
+
# Create memory with chat store
|
|
114
|
+
memory = ChatMemoryBuffer.from_defaults(
|
|
115
|
+
chat_store=chat_store,
|
|
116
|
+
chat_store_key="user-1"
|
|
117
|
+
)
|
|
118
|
+
|
|
119
|
+
# Use with a chat engine
|
|
120
|
+
llm = OpenAI(model="gpt-4")
|
|
121
|
+
chat_engine = SimpleChatEngine.from_defaults(
|
|
122
|
+
llm=llm,
|
|
123
|
+
memory=memory
|
|
124
|
+
)
|
|
125
|
+
|
|
126
|
+
# Chat
|
|
127
|
+
response = chat_engine.chat("What is machine learning?")
|
|
128
|
+
print(response)
|
|
129
|
+
```
|
|
130
|
+
|
|
131
|
+
### Advanced Features
|
|
132
|
+
|
|
133
|
+
**Multiple Users:**
|
|
134
|
+
```python
|
|
135
|
+
# Separate memory for each user
|
|
136
|
+
memory_user1 = ChatMemoryBuffer.from_defaults(
|
|
137
|
+
chat_store=chat_store,
|
|
138
|
+
chat_store_key="user-1"
|
|
139
|
+
)
|
|
140
|
+
memory_user2 = ChatMemoryBuffer.from_defaults(
|
|
141
|
+
chat_store=chat_store,
|
|
142
|
+
chat_store_key="user-2"
|
|
143
|
+
)
|
|
144
|
+
```
|
|
145
|
+
|
|
146
|
+
**Profile Support:**
|
|
147
|
+
```python
|
|
148
|
+
# Use different profiles for different contexts
|
|
149
|
+
chat_store_work = SuperLocalMemoryChatStore(profile="work")
|
|
150
|
+
chat_store_personal = SuperLocalMemoryChatStore(profile="personal")
|
|
151
|
+
|
|
152
|
+
memory_work = ChatMemoryBuffer.from_defaults(
|
|
153
|
+
chat_store=chat_store_work,
|
|
154
|
+
chat_store_key="project-x"
|
|
155
|
+
)
|
|
156
|
+
```
|
|
157
|
+
|
|
158
|
+
**Message Management:**
|
|
159
|
+
```python
|
|
160
|
+
# Get messages for a specific chat
|
|
161
|
+
messages = chat_store.get_messages("user-1")
|
|
162
|
+
|
|
163
|
+
# Set messages
|
|
164
|
+
from llama_index.core.base.llms.types import ChatMessage
|
|
165
|
+
chat_store.set_messages(
|
|
166
|
+
"user-1",
|
|
167
|
+
[ChatMessage(role="user", content="Hello")]
|
|
168
|
+
)
|
|
169
|
+
|
|
170
|
+
# Delete messages
|
|
171
|
+
chat_store.delete_messages("user-1")
|
|
172
|
+
|
|
173
|
+
# List all chat keys
|
|
174
|
+
all_chats = chat_store.get_keys()
|
|
175
|
+
```
|
|
176
|
+
|
|
177
|
+
### Storage Details
|
|
178
|
+
|
|
179
|
+
- Messages persist in `~/.claude-memory/memory.db`
|
|
180
|
+
- Each message stored as a memory with tags: `llamaindex`, `chat`, `key:<chat_store_key>`
|
|
181
|
+
- Supports all LlamaIndex ChatMessage roles (user, assistant, system)
|
|
182
|
+
- Automatic timestamp tracking
|
|
183
|
+
- Full profile isolation support
|
|
184
|
+
|
|
185
|
+
---
|
|
186
|
+
|
|
187
|
+
## Why Use SuperLocalMemory with Frameworks?
|
|
188
|
+
|
|
189
|
+
| Benefit | Description |
|
|
190
|
+
|---------|-------------|
|
|
191
|
+
| **100% Local** | No cloud dependencies, all data stays on your machine |
|
|
192
|
+
| **Zero Configuration** | Works with default settings, no API keys needed |
|
|
193
|
+
| **Cross-Framework** | Same local database used by all frameworks and tools |
|
|
194
|
+
| **Profile Isolation** | Separate memories for work, personal, clients |
|
|
195
|
+
| **Persistent** | Memories survive across sessions and reboots |
|
|
196
|
+
| **Free Forever** | No usage limits, no subscriptions |
|
|
197
|
+
|
|
198
|
+
---
|
|
199
|
+
|
|
200
|
+
## Common Patterns
|
|
201
|
+
|
|
202
|
+
### Multi-Context Applications
|
|
203
|
+
|
|
204
|
+
```python
|
|
205
|
+
# LangChain for customer support
|
|
206
|
+
support_history = SuperLocalMemoryChatMessageHistory(
|
|
207
|
+
session_id="customer-123",
|
|
208
|
+
profile="customer-support"
|
|
209
|
+
)
|
|
210
|
+
|
|
211
|
+
# LlamaIndex for internal documentation
|
|
212
|
+
docs_store = SuperLocalMemoryChatStore(profile="internal-docs")
|
|
213
|
+
docs_memory = ChatMemoryBuffer.from_defaults(
|
|
214
|
+
chat_store=docs_store,
|
|
215
|
+
chat_store_key="team-wiki"
|
|
216
|
+
)
|
|
217
|
+
```
|
|
218
|
+
|
|
219
|
+
### Session Management
|
|
220
|
+
|
|
221
|
+
```python
|
|
222
|
+
# Create sessions with metadata
|
|
223
|
+
from langchain_core.messages import HumanMessage, AIMessage
|
|
224
|
+
|
|
225
|
+
history = SuperLocalMemoryChatMessageHistory(session_id="session-123")
|
|
226
|
+
history.add_user_message("What is Python?")
|
|
227
|
+
history.add_ai_message("Python is a high-level programming language...")
|
|
228
|
+
|
|
229
|
+
# Later, retrieve full conversation
|
|
230
|
+
messages = history.get_messages()
|
|
231
|
+
```
|
|
232
|
+
|
|
233
|
+
### Memory Cleanup
|
|
234
|
+
|
|
235
|
+
```python
|
|
236
|
+
# LangChain: Clear specific session
|
|
237
|
+
history.clear()
|
|
238
|
+
|
|
239
|
+
# LlamaIndex: Delete specific chat
|
|
240
|
+
chat_store.delete_messages("user-1")
|
|
241
|
+
|
|
242
|
+
# CLI: Reset entire profile
|
|
243
|
+
# superlocalmemoryv2:reset soft --profile customer-support
|
|
244
|
+
```
|
|
245
|
+
|
|
246
|
+
---
|
|
247
|
+
|
|
248
|
+
## Troubleshooting
|
|
249
|
+
|
|
250
|
+
### Import Errors
|
|
251
|
+
|
|
252
|
+
If you get import errors, ensure packages are installed:
|
|
253
|
+
|
|
254
|
+
```bash
|
|
255
|
+
# For LangChain
|
|
256
|
+
pip install langchain-superlocalmemory langchain-core
|
|
257
|
+
|
|
258
|
+
# For LlamaIndex
|
|
259
|
+
pip install llama-index-storage-chat-store-superlocalmemory llama-index-core
|
|
260
|
+
```
|
|
261
|
+
|
|
262
|
+
### Database Locked
|
|
263
|
+
|
|
264
|
+
If you see "database is locked" errors:
|
|
265
|
+
|
|
266
|
+
```bash
|
|
267
|
+
# Check if SuperLocalMemory is running correctly
|
|
268
|
+
superlocalmemoryv2:status
|
|
269
|
+
|
|
270
|
+
# Restart any MCP servers
|
|
271
|
+
# (Close and reopen Cursor/Windsurf)
|
|
272
|
+
```
|
|
273
|
+
|
|
274
|
+
### Profile Not Found
|
|
275
|
+
|
|
276
|
+
If a profile doesn't exist:
|
|
277
|
+
|
|
278
|
+
```bash
|
|
279
|
+
# List available profiles
|
|
280
|
+
superlocalmemoryv2:profile list
|
|
281
|
+
|
|
282
|
+
# Create the profile
|
|
283
|
+
superlocalmemoryv2:profile create work
|
|
284
|
+
```
|
|
285
|
+
|
|
286
|
+
---
|
|
287
|
+
|
|
288
|
+
## Learn More
|
|
289
|
+
|
|
290
|
+
- **[LangChain Wiki Guide](https://github.com/varun369/SuperLocalMemoryV2/wiki/LangChain-Integration)** — Full integration tutorial
|
|
291
|
+
- **[LlamaIndex Wiki Guide](https://github.com/varun369/SuperLocalMemoryV2/wiki/LlamaIndex-Integration)** — Complete setup guide
|
|
292
|
+
- **[API Reference](API-REFERENCE.md)** — Python API documentation
|
|
293
|
+
- **[Profiles Guide](PROFILES-GUIDE.md)** — Multi-context management
|
|
294
|
+
|
|
295
|
+
---
|
|
296
|
+
|
|
297
|
+
<p align="center">
|
|
298
|
+
<strong>Built by <a href="https://github.com/varun369">Varun Pratap Bhardwaj</a></strong><br/>
|
|
299
|
+
MIT License • <a href="https://superlocalmemory.com">superlocalmemory.com</a>
|
|
300
|
+
</p>
|
package/docs/MCP-MANUAL-SETUP.md
CHANGED
|
@@ -683,7 +683,7 @@ python3 ~/.claude-memory/mcp_server.py
|
|
|
683
683
|
```
|
|
684
684
|
============================================================
|
|
685
685
|
SuperLocalMemory V2 - MCP Server
|
|
686
|
-
Version: 2.
|
|
686
|
+
Version: 2.7.0
|
|
687
687
|
============================================================
|
|
688
688
|
|
|
689
689
|
Transport: stdio
|
|
@@ -692,10 +692,16 @@ Database: /Users/yourusername/.claude-memory/memory.db
|
|
|
692
692
|
MCP Tools Available:
|
|
693
693
|
- remember(content, tags, project, importance)
|
|
694
694
|
- recall(query, limit, min_score)
|
|
695
|
+
- search(query) [ChatGPT Connector]
|
|
696
|
+
- fetch(id) [ChatGPT Connector]
|
|
695
697
|
- list_recent(limit)
|
|
696
698
|
- get_status()
|
|
697
699
|
- build_graph()
|
|
698
700
|
- switch_profile(name)
|
|
701
|
+
- backup_status() [Auto-Backup]
|
|
702
|
+
- memory_used(...) [v2.7 Learning]
|
|
703
|
+
- get_learned_patterns(...) [v2.7 Learning]
|
|
704
|
+
- correct_pattern(...) [v2.7 Learning]
|
|
699
705
|
|
|
700
706
|
...
|
|
701
707
|
```
|
|
@@ -731,7 +737,7 @@ In your IDE/app, check:
|
|
|
731
737
|
|
|
732
738
|
## Available MCP Tools
|
|
733
739
|
|
|
734
|
-
Once configured, these
|
|
740
|
+
Once configured, these 12 tools are available:
|
|
735
741
|
|
|
736
742
|
| Tool | Purpose | Example Usage |
|
|
737
743
|
|------|---------|---------------|
|
|
@@ -743,10 +749,14 @@ Once configured, these 8 tools are available:
|
|
|
743
749
|
| `switch_profile()` | Change profile | "Switch to work profile" |
|
|
744
750
|
| `search()` | Search memories (OpenAI MCP spec) | Used by ChatGPT Connectors and Deep Research |
|
|
745
751
|
| `fetch()` | Fetch memory by ID (OpenAI MCP spec) | Used by ChatGPT Connectors and Deep Research |
|
|
752
|
+
| `backup_status()` | Auto-backup status | "What's the backup status?" |
|
|
753
|
+
| `memory_used()` | Feedback for learning (v2.7) | Implicit — called when a recalled memory is used |
|
|
754
|
+
| `get_learned_patterns()` | Retrieve learned patterns (v2.7) | "What patterns have you learned about me?" |
|
|
755
|
+
| `correct_pattern()` | Correct a learned pattern (v2.7) | "I actually prefer Vue, not React" |
|
|
746
756
|
|
|
747
|
-
**Note:** `search()` and `fetch()` are required by OpenAI's MCP specification for ChatGPT Connectors. They are available in all transports but primarily used by ChatGPT.
|
|
757
|
+
**Note:** `search()` and `fetch()` are required by OpenAI's MCP specification for ChatGPT Connectors. They are available in all transports but primarily used by ChatGPT. The 3 learning tools (`memory_used`, `get_learned_patterns`, `correct_pattern`) require v2.7's optional learning dependencies.
|
|
748
758
|
|
|
749
|
-
Plus **2 MCP prompts** and **
|
|
759
|
+
Plus **2 MCP prompts** and **6 MCP resources** for advanced use.
|
|
750
760
|
|
|
751
761
|
---
|
|
752
762
|
|
package/install.sh
CHANGED
|
@@ -57,18 +57,87 @@ if [ "$NON_INTERACTIVE" = true ]; then
|
|
|
57
57
|
echo ""
|
|
58
58
|
fi
|
|
59
59
|
|
|
60
|
-
# Check Python version
|
|
60
|
+
# Check Python version — install if missing (non-tech user friendly)
|
|
61
61
|
echo "Checking Python version..."
|
|
62
|
+
|
|
63
|
+
install_python() {
|
|
64
|
+
echo ""
|
|
65
|
+
echo "Python 3 not found. Attempting automatic installation..."
|
|
66
|
+
if [ "$(uname)" = "Darwin" ]; then
|
|
67
|
+
# macOS: try Homebrew first, then Xcode CLI tools
|
|
68
|
+
if command -v brew &> /dev/null; then
|
|
69
|
+
echo "Installing Python via Homebrew..."
|
|
70
|
+
brew install python3 && return 0
|
|
71
|
+
fi
|
|
72
|
+
# Try installing Xcode Command Line Tools (includes Python 3)
|
|
73
|
+
echo "Installing Xcode Command Line Tools (includes Python 3)..."
|
|
74
|
+
echo "A system dialog may appear — click 'Install' to continue."
|
|
75
|
+
xcode-select --install 2>/dev/null
|
|
76
|
+
# Wait for user to complete the install dialog
|
|
77
|
+
echo "Waiting for Xcode CLI tools installation to complete..."
|
|
78
|
+
echo "Press Enter after the installation finishes."
|
|
79
|
+
if [ "$NON_INTERACTIVE" = false ]; then
|
|
80
|
+
read -r
|
|
81
|
+
else
|
|
82
|
+
# In non-interactive mode, wait and retry
|
|
83
|
+
sleep 30
|
|
84
|
+
fi
|
|
85
|
+
if command -v python3 &> /dev/null; then
|
|
86
|
+
return 0
|
|
87
|
+
fi
|
|
88
|
+
# Last resort: direct Python.org installer
|
|
89
|
+
echo ""
|
|
90
|
+
echo "Automatic installation could not complete."
|
|
91
|
+
echo "Please install Python 3.10+ from: https://www.python.org/downloads/"
|
|
92
|
+
echo "Then re-run this installer."
|
|
93
|
+
return 1
|
|
94
|
+
elif [ -f /etc/debian_version ]; then
|
|
95
|
+
# Debian/Ubuntu
|
|
96
|
+
echo "Installing Python via apt..."
|
|
97
|
+
sudo apt-get update -qq && sudo apt-get install -y python3 python3-pip && return 0
|
|
98
|
+
elif [ -f /etc/redhat-release ]; then
|
|
99
|
+
# RHEL/CentOS/Fedora
|
|
100
|
+
echo "Installing Python via dnf..."
|
|
101
|
+
sudo dnf install -y python3 python3-pip && return 0
|
|
102
|
+
elif [ -f /etc/arch-release ]; then
|
|
103
|
+
# Arch Linux
|
|
104
|
+
sudo pacman -S --noconfirm python python-pip && return 0
|
|
105
|
+
fi
|
|
106
|
+
echo "Could not auto-install Python. Please install Python 3.8+ manually."
|
|
107
|
+
echo " macOS: brew install python3"
|
|
108
|
+
echo " Ubuntu: sudo apt install python3 python3-pip"
|
|
109
|
+
echo " Fedora: sudo dnf install python3 python3-pip"
|
|
110
|
+
return 1
|
|
111
|
+
}
|
|
112
|
+
|
|
113
|
+
if ! command -v python3 &> /dev/null; then
|
|
114
|
+
install_python || exit 1
|
|
115
|
+
fi
|
|
116
|
+
|
|
62
117
|
PYTHON_VERSION=$(python3 -c 'import sys; print(f"{sys.version_info.major}.{sys.version_info.minor}")')
|
|
63
118
|
PYTHON_MAJOR=$(python3 -c 'import sys; print(sys.version_info.major)')
|
|
64
119
|
PYTHON_MINOR=$(python3 -c 'import sys; print(sys.version_info.minor)')
|
|
65
120
|
|
|
66
121
|
if [ "$PYTHON_MAJOR" -lt 3 ] || ([ "$PYTHON_MAJOR" -eq 3 ] && [ "$PYTHON_MINOR" -lt 8 ]); then
|
|
67
|
-
echo "
|
|
68
|
-
exit 1
|
|
122
|
+
echo "Python $PYTHON_VERSION found but 3.8+ required."
|
|
123
|
+
install_python || exit 1
|
|
124
|
+
# Re-check after install
|
|
125
|
+
PYTHON_VERSION=$(python3 -c 'import sys; print(f"{sys.version_info.major}.{sys.version_info.minor}")')
|
|
126
|
+
PYTHON_MAJOR=$(python3 -c 'import sys; print(sys.version_info.major)')
|
|
127
|
+
PYTHON_MINOR=$(python3 -c 'import sys; print(sys.version_info.minor)')
|
|
128
|
+
if [ "$PYTHON_MAJOR" -lt 3 ] || ([ "$PYTHON_MAJOR" -eq 3 ] && [ "$PYTHON_MINOR" -lt 8 ]); then
|
|
129
|
+
echo "✗ Error: Python 3.8+ still not available after install attempt"
|
|
130
|
+
exit 1
|
|
131
|
+
fi
|
|
69
132
|
fi
|
|
70
133
|
echo "✓ Python $PYTHON_VERSION"
|
|
71
134
|
|
|
135
|
+
# Ensure pip3 is available
|
|
136
|
+
if ! command -v pip3 &> /dev/null; then
|
|
137
|
+
echo "Installing pip..."
|
|
138
|
+
python3 -m ensurepip --upgrade 2>/dev/null || python3 -c "import urllib.request; urllib.request.urlretrieve('https://bootstrap.pypa.io/get-pip.py', '/tmp/get-pip.py')" && python3 /tmp/get-pip.py 2>/dev/null || true
|
|
139
|
+
fi
|
|
140
|
+
|
|
72
141
|
# Create installation directory
|
|
73
142
|
echo ""
|
|
74
143
|
echo "Creating installation directory..."
|
|
@@ -88,6 +157,13 @@ echo "Copying source files..."
|
|
|
88
157
|
cp -r "${REPO_DIR}/src/"* "${INSTALL_DIR}/"
|
|
89
158
|
echo "✓ Source files copied"
|
|
90
159
|
|
|
160
|
+
# Copy learning modules explicitly (v2.7+ — ensures nested dir is handled)
|
|
161
|
+
if [ -d "${REPO_DIR}/src/learning" ]; then
|
|
162
|
+
mkdir -p "${INSTALL_DIR}/learning"
|
|
163
|
+
cp -r "${REPO_DIR}/src/learning/"* "${INSTALL_DIR}/learning/"
|
|
164
|
+
echo "✓ Learning modules copied"
|
|
165
|
+
fi
|
|
166
|
+
|
|
91
167
|
# Copy hooks
|
|
92
168
|
echo "Copying hooks..."
|
|
93
169
|
mkdir -p "${INSTALL_DIR}/hooks"
|
|
@@ -205,6 +281,22 @@ else
|
|
|
205
281
|
echo "⚠️ requirements-core.txt not found, skipping dependency installation"
|
|
206
282
|
fi
|
|
207
283
|
|
|
284
|
+
# Install learning dependencies (v2.7+)
|
|
285
|
+
echo ""
|
|
286
|
+
echo "Installing learning dependencies..."
|
|
287
|
+
echo " Enables intelligent pattern learning and personalized recall"
|
|
288
|
+
|
|
289
|
+
if [ -f "${REPO_DIR}/requirements-learning.txt" ]; then
|
|
290
|
+
if pip3 install $PIP_FLAGS -q -r "${REPO_DIR}/requirements-learning.txt" 2>/dev/null; then
|
|
291
|
+
echo "✓ Learning dependencies installed (personalized ranking enabled)"
|
|
292
|
+
else
|
|
293
|
+
echo "○ Learning dependencies skipped (core features unaffected)"
|
|
294
|
+
echo " To install later: pip3 install lightgbm scipy"
|
|
295
|
+
fi
|
|
296
|
+
else
|
|
297
|
+
echo "○ requirements-learning.txt not found (learning features will use rule-based ranking)"
|
|
298
|
+
fi
|
|
299
|
+
|
|
208
300
|
# Initialize knowledge graph and pattern learning
|
|
209
301
|
echo ""
|
|
210
302
|
echo "Initializing advanced features..."
|
|
@@ -677,6 +769,10 @@ echo " slm status"
|
|
|
677
769
|
echo " slm remember 'My first memory'"
|
|
678
770
|
echo " slm recall 'first'"
|
|
679
771
|
echo ""
|
|
772
|
+
echo "Learning System (v2.7+):"
|
|
773
|
+
echo " slm learning status - Check learning system"
|
|
774
|
+
echo " slm engagement - View engagement metrics"
|
|
775
|
+
echo ""
|
|
680
776
|
# Optional: Offer to install optional features
|
|
681
777
|
if [ "$NON_INTERACTIVE" = true ]; then
|
|
682
778
|
INSTALL_CHOICE="N"
|