claude-self-reflect 2.4.13 → 2.4.15
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.claude/agents/open-source-maintainer.md +94 -8
- package/Dockerfile.watcher +7 -0
- package/README.md +19 -0
- package/docker-compose.yaml +8 -7
- package/installer/setup-wizard-docker.js +38 -0
- package/mcp-server/src/server.py +4 -2
- package/mcp-server/src/utils.py +53 -0
- package/package.json +1 -1
- package/scripts/import-conversations-unified.py +11 -4
- package/scripts/import-watcher.py +75 -20
|
@@ -175,20 +175,76 @@ safety check -r mcp-server/requirements.txt
|
|
|
175
175
|
# For Node: npm test
|
|
176
176
|
```
|
|
177
177
|
|
|
178
|
-
#### 5. Release
|
|
178
|
+
#### 4.5. Create Professional Release Notes
|
|
179
179
|
```bash
|
|
180
|
+
# Create release notes file
|
|
181
|
+
VERSION=$(node -p "require('./package.json').version")
|
|
182
|
+
cat > docs/RELEASE_NOTES_v${VERSION}.md << 'EOF'
|
|
183
|
+
# Release Notes - v${VERSION}
|
|
184
|
+
|
|
185
|
+
## Summary
|
|
186
|
+
Brief description of what this release addresses and why it matters.
|
|
187
|
+
|
|
188
|
+
## Changes
|
|
189
|
+
|
|
190
|
+
### Bug Fixes
|
|
191
|
+
- Fixed global npm installation failing due to Docker build context issues (#13)
|
|
192
|
+
- Modified Dockerfile.importer to embed Python dependencies directly
|
|
193
|
+
- Removed dependency on external requirements.txt file during build
|
|
194
|
+
- Ensures compatibility with both local development and global npm installations
|
|
195
|
+
|
|
196
|
+
### Technical Details
|
|
197
|
+
- Files modified:
|
|
198
|
+
- `Dockerfile.importer`: Embedded Python dependencies inline
|
|
199
|
+
- Removed COPY instruction for scripts that are volume-mounted at runtime
|
|
200
|
+
|
|
201
|
+
### Verification
|
|
202
|
+
- Docker builds tested successfully in isolation
|
|
203
|
+
- Import process verified to skip already imported files
|
|
204
|
+
- Both local and global npm installation paths validated
|
|
205
|
+
|
|
206
|
+
## Installation
|
|
207
|
+
```bash
|
|
208
|
+
npm install -g claude-self-reflect@${VERSION}
|
|
209
|
+
```
|
|
210
|
+
|
|
211
|
+
## Contributors
|
|
212
|
+
Thank you to everyone who reported issues and helped test this release:
|
|
213
|
+
- @mattias012 - Reported npm global installation issue
|
|
214
|
+
- @vbp1 - Confirmed Docker setup problems
|
|
215
|
+
|
|
216
|
+
## Related Issues
|
|
217
|
+
- Resolves #13: Global npm installation Docker build failures
|
|
218
|
+
EOF
|
|
219
|
+
```
|
|
220
|
+
|
|
221
|
+
#### 5. Version Bump & Release Creation
|
|
222
|
+
```bash
|
|
223
|
+
# Update package.json version BEFORE creating tag
|
|
224
|
+
# Determine version bump type based on changes:
|
|
225
|
+
# - patch: bug fixes, minor updates (2.4.10 -> 2.4.11)
|
|
226
|
+
# - minor: new features, non-breaking changes (2.4.10 -> 2.5.0)
|
|
227
|
+
# - major: breaking changes (2.4.10 -> 3.0.0)
|
|
228
|
+
npm version patch --no-git-tag-version # Updates package.json and package-lock.json
|
|
229
|
+
|
|
230
|
+
# Commit version bump
|
|
231
|
+
VERSION=$(node -p "require('./package.json').version")
|
|
232
|
+
git add package.json package-lock.json
|
|
233
|
+
git commit -m "chore: bump version to ${VERSION} for release"
|
|
234
|
+
git push origin main
|
|
235
|
+
|
|
180
236
|
# Create and push tag
|
|
181
|
-
git tag -a
|
|
182
|
-
git push origin
|
|
237
|
+
git tag -a v${VERSION} -m "Release v${VERSION} - Brief description"
|
|
238
|
+
git push origin v${VERSION}
|
|
183
239
|
|
|
184
240
|
# Create GitHub release
|
|
185
|
-
gh release create
|
|
186
|
-
--title "
|
|
187
|
-
--notes-file docs/
|
|
241
|
+
gh release create v${VERSION} \
|
|
242
|
+
--title "v${VERSION} - Release Title" \
|
|
243
|
+
--notes-file docs/RELEASE_NOTES_v${VERSION}.md \
|
|
188
244
|
--target main
|
|
189
245
|
|
|
190
246
|
# Monitor the release workflow
|
|
191
|
-
echo "
|
|
247
|
+
echo "Release created! Monitoring automated publishing..."
|
|
192
248
|
gh run list --workflow "CI/CD Pipeline" --limit 1
|
|
193
249
|
gh run watch
|
|
194
250
|
```
|
|
@@ -207,7 +263,7 @@ echo "⏳ Waiting for automated npm publish..."
|
|
|
207
263
|
# Monitor the release workflow until npm publish completes
|
|
208
264
|
```
|
|
209
265
|
|
|
210
|
-
#### 7. Post-Release Verification
|
|
266
|
+
#### 7. Post-Release Verification & Issue Management
|
|
211
267
|
```bash
|
|
212
268
|
# Verify GitHub release
|
|
213
269
|
gh release view vX.Y.Z
|
|
@@ -217,6 +273,36 @@ npm view claude-self-reflect version
|
|
|
217
273
|
|
|
218
274
|
# Check that related PRs are closed
|
|
219
275
|
gh pr list --state closed --limit 10
|
|
276
|
+
|
|
277
|
+
# Handle related issues professionally
|
|
278
|
+
# For each issue addressed in this release:
|
|
279
|
+
ISSUE_NUMBER=13 # Example
|
|
280
|
+
VERSION=$(node -p "require('./package.json').version")
|
|
281
|
+
|
|
282
|
+
# Determine if issue should be closed or kept open
|
|
283
|
+
# Close if: bug fixed, feature implemented, question answered
|
|
284
|
+
# Keep open if: partial fix, needs more work, ongoing discussion
|
|
285
|
+
|
|
286
|
+
# Professional comment template (no emojis, clear references)
|
|
287
|
+
gh issue comment $ISSUE_NUMBER --body "Thank you for reporting this issue. The global npm installation problem has been addressed in release v${VERSION}.
|
|
288
|
+
|
|
289
|
+
The fix involved modifying the Docker build process to embed dependencies directly:
|
|
290
|
+
- Modified: Dockerfile.importer - Embedded Python dependencies to avoid file path issues
|
|
291
|
+
- Verified: Docker builds work correctly without requiring scripts directory in build context
|
|
292
|
+
- Tested: Import process correctly skips already imported files
|
|
293
|
+
|
|
294
|
+
You can update to the latest version with:
|
|
295
|
+
\`\`\`bash
|
|
296
|
+
npm install -g claude-self-reflect@${VERSION}
|
|
297
|
+
\`\`\`
|
|
298
|
+
|
|
299
|
+
Please let us know if you encounter any issues with the new version."
|
|
300
|
+
|
|
301
|
+
# Close the issue if fully resolved
|
|
302
|
+
gh issue close $ISSUE_NUMBER --comment "Closing as resolved in v${VERSION}. Feel free to reopen if you encounter any related issues."
|
|
303
|
+
|
|
304
|
+
# Or keep open with status update if partially resolved
|
|
305
|
+
# gh issue comment $ISSUE_NUMBER --body "Partial fix implemented in v${VERSION}. Keeping this issue open to track remaining work on [specific aspect]."
|
|
220
306
|
```
|
|
221
307
|
|
|
222
308
|
#### 8. Rollback Procedures
|
package/Dockerfile.watcher
CHANGED
|
@@ -20,12 +20,19 @@ RUN pip install --no-cache-dir \
|
|
|
20
20
|
# Create non-root user
|
|
21
21
|
RUN useradd -m -u 1000 watcher
|
|
22
22
|
|
|
23
|
+
# Pre-download FastEmbed model to avoid runtime downloads
|
|
24
|
+
RUN mkdir -p /home/watcher/.cache && \
|
|
25
|
+
FASTEMBED_CACHE_PATH=/home/watcher/.cache/fastembed python -c "from fastembed import TextEmbedding; import os; os.environ['FASTEMBED_CACHE_PATH']='/home/watcher/.cache/fastembed'; TextEmbedding('sentence-transformers/all-MiniLM-L6-v2')" && \
|
|
26
|
+
chown -R watcher:watcher /home/watcher/.cache
|
|
27
|
+
|
|
23
28
|
# Create scripts directory and copy required files
|
|
24
29
|
RUN mkdir -p /scripts
|
|
25
30
|
|
|
26
31
|
# Copy all necessary scripts
|
|
27
32
|
COPY scripts/import-conversations-unified.py /scripts/
|
|
28
33
|
COPY scripts/import-watcher.py /scripts/
|
|
34
|
+
COPY scripts/utils.py /scripts/
|
|
35
|
+
COPY scripts/trigger-import.py /scripts/
|
|
29
36
|
|
|
30
37
|
RUN chmod +x /scripts/*.py
|
|
31
38
|
|
package/README.md
CHANGED
|
@@ -203,6 +203,10 @@ Recent conversations matter more. Old ones fade. Like your brain, but reliable.
|
|
|
203
203
|
|
|
204
204
|
Works perfectly out of the box. [Configure if you're particular](docs/memory-decay.md).
|
|
205
205
|
|
|
206
|
+
## Theoretical Foundation
|
|
207
|
+
|
|
208
|
+
Claude Self-Reflect addresses the "reality gap" in AI memory systems - the distance between perfect recall expectations and practical utility. Our approach aligns with the SPAR Framework (Sense, Plan, Act, Reflect) for agentic AI systems. [Learn more about our design philosophy](docs/architecture/SPAR-alignment.md).
|
|
209
|
+
|
|
206
210
|
## For the Skeptics
|
|
207
211
|
|
|
208
212
|
**"Just use grep"** - Sure, enjoy your 10,000 matches for "database"
|
|
@@ -372,4 +376,19 @@ Special thanks to our contributors and security researchers:
|
|
|
372
376
|
- **[@akamalov](https://github.com/akamalov)** - Highlighted Ubuntu WSL bug and helped educate about filesystem nuances
|
|
373
377
|
- **[@kylesnowschwartz](https://github.com/kylesnowschwartz)** - Comprehensive security review leading to v2.3.3+ security improvements (#6)
|
|
374
378
|
|
|
379
|
+
## Windows Configuration
|
|
380
|
+
|
|
381
|
+
### Recommended: Use WSL
|
|
382
|
+
For the best experience on Windows, we recommend using WSL (Windows Subsystem for Linux) which provides native Linux compatibility for Docker operations.
|
|
383
|
+
|
|
384
|
+
### Alternative: Native Windows
|
|
385
|
+
If using Docker Desktop on native Windows, you need to adjust the CONFIG_PATH in your `.env` file to use Docker-compatible paths:
|
|
386
|
+
|
|
387
|
+
```bash
|
|
388
|
+
# Replace USERNAME with your Windows username
|
|
389
|
+
CONFIG_PATH=/c/Users/USERNAME/.claude-self-reflect/config
|
|
390
|
+
```
|
|
391
|
+
|
|
392
|
+
This ensures Docker can properly mount the config directory. The setup wizard creates the directory, but Windows users need to update the path format for Docker compatibility.
|
|
393
|
+
|
|
375
394
|
MIT License. Built with ❤️ for the Claude community.
|
package/docker-compose.yaml
CHANGED
|
@@ -7,7 +7,7 @@ services:
|
|
|
7
7
|
image: alpine
|
|
8
8
|
command: chown -R 1000:1000 /config
|
|
9
9
|
volumes:
|
|
10
|
-
-
|
|
10
|
+
- ${CONFIG_PATH:-~/.claude-self-reflect/config}:/config
|
|
11
11
|
profiles: ["watch", "mcp", "import"]
|
|
12
12
|
|
|
13
13
|
# Qdrant vector database - the heart of semantic search
|
|
@@ -22,8 +22,8 @@ services:
|
|
|
22
22
|
- QDRANT__LOG_LEVEL=INFO
|
|
23
23
|
- QDRANT__SERVICE__HTTP_PORT=6333
|
|
24
24
|
restart: unless-stopped
|
|
25
|
-
mem_limit: ${QDRANT_MEMORY:-
|
|
26
|
-
memswap_limit: ${QDRANT_MEMORY:-
|
|
25
|
+
mem_limit: ${QDRANT_MEMORY:-2g}
|
|
26
|
+
memswap_limit: ${QDRANT_MEMORY:-2g}
|
|
27
27
|
|
|
28
28
|
# One-time import service (runs once then exits)
|
|
29
29
|
importer:
|
|
@@ -36,7 +36,7 @@ services:
|
|
|
36
36
|
- qdrant
|
|
37
37
|
volumes:
|
|
38
38
|
- ${CLAUDE_LOGS_PATH:-~/.claude/projects}:/logs:ro
|
|
39
|
-
-
|
|
39
|
+
- ${CONFIG_PATH:-~/.claude-self-reflect/config}:/config
|
|
40
40
|
- ./scripts:/scripts:ro
|
|
41
41
|
environment:
|
|
42
42
|
- QDRANT_URL=http://qdrant:6333
|
|
@@ -64,8 +64,9 @@ services:
|
|
|
64
64
|
- qdrant
|
|
65
65
|
volumes:
|
|
66
66
|
- ${CLAUDE_LOGS_PATH:-~/.claude/projects}:/logs:ro
|
|
67
|
-
-
|
|
67
|
+
- ${CONFIG_PATH:-~/.claude-self-reflect/config}:/config
|
|
68
68
|
- ./scripts:/scripts:ro
|
|
69
|
+
- /tmp:/tmp
|
|
69
70
|
environment:
|
|
70
71
|
- QDRANT_URL=http://qdrant:6333
|
|
71
72
|
- STATE_FILE=/config/imported-files.json
|
|
@@ -78,8 +79,8 @@ services:
|
|
|
78
79
|
- PYTHONUNBUFFERED=1
|
|
79
80
|
restart: unless-stopped
|
|
80
81
|
profiles: ["watch"]
|
|
81
|
-
mem_limit:
|
|
82
|
-
memswap_limit:
|
|
82
|
+
mem_limit: 1g
|
|
83
|
+
memswap_limit: 1g
|
|
83
84
|
|
|
84
85
|
# MCP server for Claude integration
|
|
85
86
|
mcp-server:
|
|
@@ -7,6 +7,7 @@ import fs from 'fs/promises';
|
|
|
7
7
|
import fsSync from 'fs';
|
|
8
8
|
import readline from 'readline';
|
|
9
9
|
import path from 'path';
|
|
10
|
+
import os from 'os';
|
|
10
11
|
|
|
11
12
|
const __filename = fileURLToPath(import.meta.url);
|
|
12
13
|
const __dirname = dirname(__filename);
|
|
@@ -94,6 +95,40 @@ async function checkDocker() {
|
|
|
94
95
|
async function configureEnvironment() {
|
|
95
96
|
console.log('\n🔐 Configuring environment...');
|
|
96
97
|
|
|
98
|
+
// Setup config directory in user's home directory for global npm installs
|
|
99
|
+
const userConfigDir = join(os.homedir(), '.claude-self-reflect', 'config');
|
|
100
|
+
|
|
101
|
+
try {
|
|
102
|
+
await fs.mkdir(userConfigDir, { recursive: true });
|
|
103
|
+
console.log(`📁 Using config directory: ${userConfigDir}`);
|
|
104
|
+
|
|
105
|
+
// Migrate existing config from project directory if it exists
|
|
106
|
+
const oldConfigDir = join(projectRoot, 'config');
|
|
107
|
+
try {
|
|
108
|
+
await fs.access(oldConfigDir);
|
|
109
|
+
const files = await fs.readdir(oldConfigDir);
|
|
110
|
+
if (files.length > 0) {
|
|
111
|
+
console.log('🔄 Migrating existing config data...');
|
|
112
|
+
for (const file of files) {
|
|
113
|
+
const sourcePath = join(oldConfigDir, file);
|
|
114
|
+
const targetPath = join(userConfigDir, file);
|
|
115
|
+
try {
|
|
116
|
+
await fs.copyFile(sourcePath, targetPath);
|
|
117
|
+
} catch (err) {
|
|
118
|
+
// Ignore copy errors, file might already exist
|
|
119
|
+
}
|
|
120
|
+
}
|
|
121
|
+
console.log('✅ Config migration completed');
|
|
122
|
+
}
|
|
123
|
+
} catch {
|
|
124
|
+
// No old config directory, nothing to migrate
|
|
125
|
+
}
|
|
126
|
+
} catch (error) {
|
|
127
|
+
console.log(`❌ Could not create config directory: ${error.message}`);
|
|
128
|
+
console.log(' This may cause Docker mount issues. Please check permissions.');
|
|
129
|
+
throw error;
|
|
130
|
+
}
|
|
131
|
+
|
|
97
132
|
const envPath = join(projectRoot, '.env');
|
|
98
133
|
let envContent = '';
|
|
99
134
|
let hasValidApiKey = false;
|
|
@@ -153,6 +188,9 @@ async function configureEnvironment() {
|
|
|
153
188
|
if (!envContent.includes('PREFER_LOCAL_EMBEDDINGS=')) {
|
|
154
189
|
envContent += `PREFER_LOCAL_EMBEDDINGS=${localMode ? 'true' : 'false'}\n`;
|
|
155
190
|
}
|
|
191
|
+
if (!envContent.includes('CONFIG_PATH=')) {
|
|
192
|
+
envContent += `CONFIG_PATH=${userConfigDir}\n`;
|
|
193
|
+
}
|
|
156
194
|
|
|
157
195
|
await fs.writeFile(envPath, envContent.trim() + '\n');
|
|
158
196
|
console.log('✅ Environment configured');
|
package/mcp-server/src/server.py
CHANGED
|
@@ -11,6 +11,7 @@ import hashlib
|
|
|
11
11
|
import time
|
|
12
12
|
|
|
13
13
|
from fastmcp import FastMCP, Context
|
|
14
|
+
from .utils import normalize_project_name
|
|
14
15
|
from pydantic import BaseModel, Field
|
|
15
16
|
from qdrant_client import AsyncQdrantClient, models
|
|
16
17
|
from qdrant_client.models import (
|
|
@@ -233,8 +234,9 @@ async def reflect_on_past(
|
|
|
233
234
|
# Filter collections by project if not searching all
|
|
234
235
|
project_collections = [] # Define at this scope for later use
|
|
235
236
|
if target_project != 'all':
|
|
236
|
-
# Generate the collection name pattern for this project
|
|
237
|
-
|
|
237
|
+
# Generate the collection name pattern for this project using normalized name
|
|
238
|
+
normalized_name = normalize_project_name(target_project)
|
|
239
|
+
project_hash = hashlib.md5(normalized_name.encode()).hexdigest()[:8]
|
|
238
240
|
project_collections = [
|
|
239
241
|
c for c in all_collections
|
|
240
242
|
if c.startswith(f"conv_{project_hash}_")
|
|
@@ -0,0 +1,53 @@
|
|
|
1
|
+
"""Shared utilities for claude-self-reflect MCP server and scripts."""
|
|
2
|
+
|
|
3
|
+
from pathlib import Path
|
|
4
|
+
|
|
5
|
+
|
|
6
|
+
def normalize_project_name(project_path: str) -> str:
|
|
7
|
+
"""
|
|
8
|
+
Normalize project name for consistent hashing across import/search.
|
|
9
|
+
|
|
10
|
+
Handles various path formats:
|
|
11
|
+
- Claude logs format: -Users-kyle-Code-claude-self-reflect -> claude-self-reflect
|
|
12
|
+
- Regular paths: /path/to/project -> project
|
|
13
|
+
- Already normalized: project -> project
|
|
14
|
+
|
|
15
|
+
Args:
|
|
16
|
+
project_path: Project path or name in any format
|
|
17
|
+
|
|
18
|
+
Returns:
|
|
19
|
+
Normalized project name suitable for consistent hashing
|
|
20
|
+
"""
|
|
21
|
+
if not project_path:
|
|
22
|
+
return ""
|
|
23
|
+
|
|
24
|
+
# Remove trailing slashes
|
|
25
|
+
project_path = project_path.rstrip('/')
|
|
26
|
+
|
|
27
|
+
# Handle Claude logs format (starts with dash)
|
|
28
|
+
if project_path.startswith('-'):
|
|
29
|
+
# For paths like -Users-kyle-Code-claude-self-reflect
|
|
30
|
+
# We want to extract the actual project name which may contain dashes
|
|
31
|
+
# Strategy: Find common parent directories and extract what comes after
|
|
32
|
+
|
|
33
|
+
# Remove leading dash and convert back to path-like format
|
|
34
|
+
path_str = project_path[1:].replace('-', '/')
|
|
35
|
+
path_parts = Path(path_str).parts
|
|
36
|
+
|
|
37
|
+
# Look for common project parent directories
|
|
38
|
+
project_parents = {'projects', 'code', 'Code', 'repos', 'repositories',
|
|
39
|
+
'dev', 'Development', 'work', 'src', 'github'}
|
|
40
|
+
|
|
41
|
+
# Find the project name after a known parent directory
|
|
42
|
+
for i, part in enumerate(path_parts):
|
|
43
|
+
if part.lower() in project_parents and i + 1 < len(path_parts):
|
|
44
|
+
# Everything after the parent directory is the project name
|
|
45
|
+
# Join remaining parts with dash if project name has multiple components
|
|
46
|
+
remaining = path_parts[i + 1:]
|
|
47
|
+
return '-'.join(remaining)
|
|
48
|
+
|
|
49
|
+
# Fallback: just use the last component
|
|
50
|
+
return path_parts[-1] if path_parts else project_path
|
|
51
|
+
|
|
52
|
+
# Handle regular paths - use basename
|
|
53
|
+
return Path(project_path).name
|
package/package.json
CHANGED
|
@@ -14,6 +14,10 @@ from typing import List, Dict, Any
|
|
|
14
14
|
import logging
|
|
15
15
|
from pathlib import Path
|
|
16
16
|
|
|
17
|
+
# Add the mcp-server/src directory to the Python path
|
|
18
|
+
sys.path.insert(0, os.path.join(os.path.dirname(os.path.dirname(os.path.abspath(__file__))), 'mcp-server', 'src'))
|
|
19
|
+
from utils import normalize_project_name
|
|
20
|
+
|
|
17
21
|
from qdrant_client import QdrantClient
|
|
18
22
|
from qdrant_client.models import (
|
|
19
23
|
VectorParams, Distance, PointStruct,
|
|
@@ -29,7 +33,9 @@ from tenacity import (
|
|
|
29
33
|
# Configuration
|
|
30
34
|
QDRANT_URL = os.getenv("QDRANT_URL", "http://localhost:6333")
|
|
31
35
|
LOGS_DIR = os.getenv("LOGS_DIR", "/logs")
|
|
32
|
-
|
|
36
|
+
# Default to project config directory for state file
|
|
37
|
+
default_state_file = os.path.join(os.path.dirname(os.path.dirname(os.path.abspath(__file__))), "config", "imported-files.json")
|
|
38
|
+
STATE_FILE = os.getenv("STATE_FILE", default_state_file)
|
|
33
39
|
BATCH_SIZE = int(os.getenv("BATCH_SIZE", "10")) # Reduced from 100 to prevent OOM
|
|
34
40
|
PREFER_LOCAL_EMBEDDINGS = os.getenv("PREFER_LOCAL_EMBEDDINGS", "false").lower() == "true"
|
|
35
41
|
VOYAGE_API_KEY = os.getenv("VOYAGE_KEY")
|
|
@@ -343,10 +349,11 @@ def main():
|
|
|
343
349
|
# Import each project
|
|
344
350
|
total_imported = 0
|
|
345
351
|
for project_dir in project_dirs:
|
|
346
|
-
# Create collection name from project
|
|
347
|
-
|
|
352
|
+
# Create collection name from normalized project name
|
|
353
|
+
normalized_name = normalize_project_name(project_dir.name)
|
|
354
|
+
collection_name = f"conv_{hashlib.md5(normalized_name.encode()).hexdigest()[:8]}{collection_suffix}"
|
|
348
355
|
|
|
349
|
-
logger.info(f"Importing project: {project_dir.name} -> {collection_name}")
|
|
356
|
+
logger.info(f"Importing project: {project_dir.name} (normalized: {normalized_name}) -> {collection_name}")
|
|
350
357
|
chunks = import_project(project_dir, collection_name, state)
|
|
351
358
|
total_imported += chunks
|
|
352
359
|
logger.info(f"Imported {chunks} chunks from {project_dir.name}")
|
|
@@ -1,33 +1,88 @@
|
|
|
1
1
|
#!/usr/bin/env python3
|
|
2
|
-
"""
|
|
2
|
+
"""Enhanced watcher that runs import periodically and supports manual triggers."""
|
|
3
3
|
|
|
4
4
|
import time
|
|
5
5
|
import subprocess
|
|
6
6
|
import os
|
|
7
7
|
import sys
|
|
8
8
|
from datetime import datetime
|
|
9
|
+
from pathlib import Path
|
|
9
10
|
|
|
10
11
|
WATCH_INTERVAL = int(os.getenv('WATCH_INTERVAL', '60'))
|
|
12
|
+
SIGNAL_FILE = Path("/tmp/claude-self-reflect-import-current")
|
|
13
|
+
CHECK_INTERVAL = 1 # Check for signal file every second
|
|
11
14
|
|
|
12
|
-
print(f"[Watcher] Starting import watcher with {WATCH_INTERVAL}s interval", flush=True)
|
|
15
|
+
print(f"[Watcher] Starting enhanced import watcher with {WATCH_INTERVAL}s interval", flush=True)
|
|
16
|
+
print(f"[Watcher] Monitoring signal file: {SIGNAL_FILE}", flush=True)
|
|
17
|
+
|
|
18
|
+
last_import = 0
|
|
13
19
|
|
|
14
20
|
while True:
|
|
15
|
-
|
|
16
|
-
|
|
17
|
-
|
|
18
|
-
|
|
19
|
-
|
|
20
|
-
|
|
21
|
-
|
|
22
|
-
|
|
23
|
-
|
|
24
|
-
|
|
25
|
-
|
|
26
|
-
|
|
27
|
-
|
|
28
|
-
|
|
29
|
-
|
|
30
|
-
|
|
21
|
+
current_time = time.time()
|
|
22
|
+
|
|
23
|
+
# Check for manual trigger signal
|
|
24
|
+
if SIGNAL_FILE.exists():
|
|
25
|
+
print(f"[Watcher] Signal detected! Running immediate import...", flush=True)
|
|
26
|
+
try:
|
|
27
|
+
# Read conversation ID if provided
|
|
28
|
+
conversation_id = None
|
|
29
|
+
try:
|
|
30
|
+
conversation_id = SIGNAL_FILE.read_text().strip()
|
|
31
|
+
except:
|
|
32
|
+
pass
|
|
33
|
+
|
|
34
|
+
# Remove signal file to prevent re-triggering
|
|
35
|
+
SIGNAL_FILE.unlink()
|
|
36
|
+
|
|
37
|
+
# Run import with special flag for current conversation only
|
|
38
|
+
cmd = [sys.executable, "/scripts/import-conversations-unified.py"]
|
|
39
|
+
if conversation_id:
|
|
40
|
+
cmd.extend(["--conversation-id", conversation_id])
|
|
41
|
+
else:
|
|
42
|
+
# Import only today's conversations for manual trigger
|
|
43
|
+
cmd.extend(["--days", "1"])
|
|
44
|
+
|
|
45
|
+
# Write progress indicator
|
|
46
|
+
progress_file = Path("/tmp/claude-self-reflect-import-progress")
|
|
47
|
+
progress_file.write_text("🔄 Starting import...")
|
|
48
|
+
|
|
49
|
+
print(f"[Watcher] Running command: {' '.join(cmd)}", flush=True)
|
|
50
|
+
result = subprocess.run(cmd, capture_output=True, text=True)
|
|
51
|
+
|
|
52
|
+
if result.returncode == 0:
|
|
53
|
+
print(f"[Watcher] Manual import completed successfully", flush=True)
|
|
54
|
+
# Create completion signal
|
|
55
|
+
Path("/tmp/claude-self-reflect-import-complete").touch()
|
|
56
|
+
else:
|
|
57
|
+
print(f"[Watcher] Manual import failed with code {result.returncode}", flush=True)
|
|
58
|
+
if result.stderr:
|
|
59
|
+
print(f"[Watcher] Error: {result.stderr}", flush=True)
|
|
60
|
+
|
|
61
|
+
last_import = current_time
|
|
62
|
+
|
|
63
|
+
except Exception as e:
|
|
64
|
+
print(f"[Watcher] Error during manual import: {e}", flush=True)
|
|
65
|
+
|
|
66
|
+
# Regular scheduled import
|
|
67
|
+
elif current_time - last_import >= WATCH_INTERVAL:
|
|
68
|
+
try:
|
|
69
|
+
print(f"[Watcher] Running scheduled import at {datetime.now().isoformat()}", flush=True)
|
|
70
|
+
result = subprocess.run([
|
|
71
|
+
sys.executable,
|
|
72
|
+
"/scripts/import-conversations-unified.py"
|
|
73
|
+
], capture_output=True, text=True)
|
|
74
|
+
|
|
75
|
+
if result.returncode == 0:
|
|
76
|
+
print(f"[Watcher] Scheduled import completed successfully", flush=True)
|
|
77
|
+
else:
|
|
78
|
+
print(f"[Watcher] Scheduled import failed with code {result.returncode}", flush=True)
|
|
79
|
+
if result.stderr:
|
|
80
|
+
print(f"[Watcher] Error: {result.stderr}", flush=True)
|
|
81
|
+
|
|
82
|
+
last_import = current_time
|
|
83
|
+
|
|
84
|
+
except Exception as e:
|
|
85
|
+
print(f"[Watcher] Error during scheduled import: {e}", flush=True)
|
|
31
86
|
|
|
32
|
-
|
|
33
|
-
time.sleep(
|
|
87
|
+
# Short sleep to check for signals frequently
|
|
88
|
+
time.sleep(CHECK_INTERVAL)
|