@stackmemoryai/stackmemory 0.3.21 โ 0.3.24
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/cli/commands/linear-unified.js +2 -3
- package/dist/cli/commands/linear-unified.js.map +2 -2
- package/dist/cli/commands/ralph.js +294 -0
- package/dist/cli/commands/ralph.js.map +7 -0
- package/dist/cli/commands/tasks.js +1 -1
- package/dist/cli/commands/tasks.js.map +2 -2
- package/dist/cli/index.js +2 -0
- package/dist/cli/index.js.map +2 -2
- package/dist/integrations/mcp/handlers/code-execution-handlers.js +262 -0
- package/dist/integrations/mcp/handlers/code-execution-handlers.js.map +7 -0
- package/dist/integrations/mcp/tool-definitions-code.js +121 -0
- package/dist/integrations/mcp/tool-definitions-code.js.map +7 -0
- package/dist/integrations/ralph/bridge/ralph-stackmemory-bridge.js +586 -0
- package/dist/integrations/ralph/bridge/ralph-stackmemory-bridge.js.map +7 -0
- package/dist/integrations/ralph/context/context-budget-manager.js +297 -0
- package/dist/integrations/ralph/context/context-budget-manager.js.map +7 -0
- package/dist/integrations/ralph/context/stackmemory-context-loader.js +356 -0
- package/dist/integrations/ralph/context/stackmemory-context-loader.js.map +7 -0
- package/dist/integrations/ralph/index.js +14 -0
- package/dist/integrations/ralph/index.js.map +7 -0
- package/dist/integrations/ralph/learning/pattern-learner.js +397 -0
- package/dist/integrations/ralph/learning/pattern-learner.js.map +7 -0
- package/dist/integrations/ralph/lifecycle/iteration-lifecycle.js +444 -0
- package/dist/integrations/ralph/lifecycle/iteration-lifecycle.js.map +7 -0
- package/dist/integrations/ralph/orchestration/multi-loop-orchestrator.js +459 -0
- package/dist/integrations/ralph/orchestration/multi-loop-orchestrator.js.map +7 -0
- package/dist/integrations/ralph/performance/performance-optimizer.js +354 -0
- package/dist/integrations/ralph/performance/performance-optimizer.js.map +7 -0
- package/dist/integrations/ralph/ralph-integration-demo.js +178 -0
- package/dist/integrations/ralph/ralph-integration-demo.js.map +7 -0
- package/dist/integrations/ralph/state/state-reconciler.js +400 -0
- package/dist/integrations/ralph/state/state-reconciler.js.map +7 -0
- package/dist/integrations/ralph/swarm/swarm-coordinator.js +487 -0
- package/dist/integrations/ralph/swarm/swarm-coordinator.js.map +7 -0
- package/dist/integrations/ralph/types.js +1 -0
- package/dist/integrations/ralph/types.js.map +7 -0
- package/dist/integrations/ralph/visualization/ralph-debugger.js +581 -0
- package/dist/integrations/ralph/visualization/ralph-debugger.js.map +7 -0
- package/dist/servers/railway/index.js +98 -92
- package/dist/servers/railway/index.js.map +3 -3
- package/package.json +1 -2
- package/scripts/claude-sm-autostart.js +1 -1
- package/scripts/clean-linear-backlog.js +2 -2
- package/scripts/debug-linear-update.js +1 -1
- package/scripts/debug-railway-build.js +87 -0
- package/scripts/delete-linear-tasks.js +2 -2
- package/scripts/deploy-ralph-swarm.sh +365 -0
- package/scripts/install-code-execution-hooks.sh +96 -0
- package/scripts/linear-task-review.js +1 -1
- package/scripts/ralph-integration-test.js +274 -0
- package/scripts/ralph-loop-implementation.js +404 -0
- package/scripts/swarm-monitor.js +509 -0
- package/scripts/sync-and-clean-tasks.js +1 -1
- package/scripts/sync-linear-graphql.js +3 -3
- package/scripts/sync-linear-tasks.js +1 -1
- package/scripts/test-code-execution.js +143 -0
- package/scripts/test-parallel-swarms.js +443 -0
- package/scripts/testing/ralph-cli-test.js +88 -0
- package/scripts/testing/ralph-integration-validation.js +727 -0
- package/scripts/testing/ralph-swarm-test-scenarios.js +613 -0
- package/scripts/update-linear-tasks-fixed.js +1 -1
- package/scripts/validate-railway-deployment.js +137 -0
- package/templates/claude-hooks/hook-config.json +59 -0
- package/templates/claude-hooks/pre-tool-use +189 -0
- package/dist/servers/railway/minimal.js +0 -91
- package/dist/servers/railway/minimal.js.map +0 -7
|
@@ -0,0 +1,87 @@
|
|
|
1
|
+
#!/usr/bin/env node
|
|
2
|
+
|
|
3
|
+
/**
|
|
4
|
+
* Debug Railway build issues
|
|
5
|
+
*/
|
|
6
|
+
|
|
7
|
+
import fs from 'fs';
|
|
8
|
+
import path from 'path';
|
|
9
|
+
import { fileURLToPath } from 'url';
|
|
10
|
+
|
|
11
|
+
const __filename = fileURLToPath(import.meta.url);
|
|
12
|
+
const __dirname = path.dirname(__filename);
|
|
13
|
+
|
|
14
|
+
console.log('๐ Railway Build Debugger');
|
|
15
|
+
console.log('========================\n');
|
|
16
|
+
|
|
17
|
+
// Check for server files
|
|
18
|
+
const serverDir = path.join(__dirname, '..', 'dist', 'servers', 'railway');
|
|
19
|
+
const srcDir = path.join(__dirname, '..', 'src', 'servers', 'railway');
|
|
20
|
+
|
|
21
|
+
console.log('๐ Checking dist/servers/railway:');
|
|
22
|
+
if (fs.existsSync(serverDir)) {
|
|
23
|
+
const files = fs.readdirSync(serverDir);
|
|
24
|
+
files.forEach(file => {
|
|
25
|
+
const stats = fs.statSync(path.join(serverDir, file));
|
|
26
|
+
console.log(` - ${file} (${stats.size} bytes, modified: ${stats.mtime.toISOString()})`);
|
|
27
|
+
|
|
28
|
+
// Check for minimal server references
|
|
29
|
+
if (file === 'index.js') {
|
|
30
|
+
const content = fs.readFileSync(path.join(serverDir, file), 'utf-8');
|
|
31
|
+
if (content.includes('Minimal')) {
|
|
32
|
+
console.log(` โ ๏ธ Contains "Minimal" references`);
|
|
33
|
+
}
|
|
34
|
+
if (content.includes('/auth/signup')) {
|
|
35
|
+
console.log(` โ
Contains auth endpoints`);
|
|
36
|
+
}
|
|
37
|
+
}
|
|
38
|
+
});
|
|
39
|
+
} else {
|
|
40
|
+
console.log(' โ Directory does not exist');
|
|
41
|
+
}
|
|
42
|
+
|
|
43
|
+
console.log('\n๐ Checking src/servers/railway:');
|
|
44
|
+
if (fs.existsSync(srcDir)) {
|
|
45
|
+
const files = fs.readdirSync(srcDir);
|
|
46
|
+
files.forEach(file => {
|
|
47
|
+
const stats = fs.statSync(path.join(srcDir, file));
|
|
48
|
+
console.log(` - ${file} (${stats.size} bytes)`);
|
|
49
|
+
});
|
|
50
|
+
} else {
|
|
51
|
+
console.log(' โ Directory does not exist');
|
|
52
|
+
}
|
|
53
|
+
|
|
54
|
+
// Check package.json scripts
|
|
55
|
+
console.log('\n๐ฆ Package.json start scripts:');
|
|
56
|
+
const packageJson = JSON.parse(fs.readFileSync(path.join(__dirname, '..', 'package.json'), 'utf-8'));
|
|
57
|
+
Object.entries(packageJson.scripts).forEach(([key, value]) => {
|
|
58
|
+
if (key.includes('start')) {
|
|
59
|
+
console.log(` ${key}: ${value}`);
|
|
60
|
+
}
|
|
61
|
+
});
|
|
62
|
+
|
|
63
|
+
// Check Dockerfile
|
|
64
|
+
console.log('\n๐ณ Dockerfile CMD:');
|
|
65
|
+
const dockerfile = fs.readFileSync(path.join(__dirname, '..', 'Dockerfile'), 'utf-8');
|
|
66
|
+
const cmdMatch = dockerfile.match(/CMD\s+\[.*\]/g);
|
|
67
|
+
if (cmdMatch) {
|
|
68
|
+
cmdMatch.forEach(cmd => {
|
|
69
|
+
console.log(` ${cmd}`);
|
|
70
|
+
});
|
|
71
|
+
}
|
|
72
|
+
|
|
73
|
+
// Check Railway config
|
|
74
|
+
console.log('\n๐ Railway.json:');
|
|
75
|
+
const railwayConfig = path.join(__dirname, '..', 'railway.json');
|
|
76
|
+
if (fs.existsSync(railwayConfig)) {
|
|
77
|
+
const config = JSON.parse(fs.readFileSync(railwayConfig, 'utf-8'));
|
|
78
|
+
console.log(JSON.stringify(config, null, 2));
|
|
79
|
+
} else {
|
|
80
|
+
console.log(' โ railway.json not found');
|
|
81
|
+
}
|
|
82
|
+
|
|
83
|
+
console.log('\n๐ก Recommendations:');
|
|
84
|
+
console.log('1. Railway may be using a cached build layer');
|
|
85
|
+
console.log('2. Try changing the base image in Dockerfile to force rebuild');
|
|
86
|
+
console.log('3. Check Railway dashboard for any override settings');
|
|
87
|
+
console.log('4. Consider contacting Railway support about cache issues');
|
|
@@ -8,7 +8,7 @@ import 'dotenv/config';
|
|
|
8
8
|
import fs from 'fs';
|
|
9
9
|
import readline from 'readline';
|
|
10
10
|
|
|
11
|
-
const API_KEY = process.env.LINEAR_API_KEY;
|
|
11
|
+
const API_KEY = process.env.STACKMEMORY_LINEAR_API_KEY || process.env.LINEAR_API_KEY;
|
|
12
12
|
if (!API_KEY) {
|
|
13
13
|
console.error('โ LINEAR_API_KEY environment variable not set');
|
|
14
14
|
console.log('Please set LINEAR_API_KEY in your .env file or export it in your shell');
|
|
@@ -83,7 +83,7 @@ async function deleteLinearTasks() {
|
|
|
83
83
|
const response = await fetch('https://api.linear.app/graphql', {
|
|
84
84
|
method: 'POST',
|
|
85
85
|
headers: {
|
|
86
|
-
'Authorization': API_KEY
|
|
86
|
+
'Authorization': `Bearer ${API_KEY}`,
|
|
87
87
|
'Content-Type': 'application/json'
|
|
88
88
|
},
|
|
89
89
|
body: JSON.stringify({
|
|
@@ -0,0 +1,365 @@
|
|
|
1
|
+
#!/bin/bash
|
|
2
|
+
|
|
3
|
+
# Ralph Swarm Deployment Script
|
|
4
|
+
# Ensures parallel execution and monitoring capabilities
|
|
5
|
+
|
|
6
|
+
set -e
|
|
7
|
+
|
|
8
|
+
SCRIPT_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
|
|
9
|
+
PROJECT_ROOT="$(dirname "$SCRIPT_DIR")"
|
|
10
|
+
SWARM_DIR="$PROJECT_ROOT/.swarm"
|
|
11
|
+
SWARM_LOGS="$SWARM_DIR/logs"
|
|
12
|
+
SWARM_PIDS="$SWARM_DIR/pids"
|
|
13
|
+
SWARM_STATUS="$SWARM_DIR/status"
|
|
14
|
+
|
|
15
|
+
# Colors for output
|
|
16
|
+
RED='\033[0;31m'
|
|
17
|
+
GREEN='\033[0;32m'
|
|
18
|
+
YELLOW='\033[1;33m'
|
|
19
|
+
BLUE='\033[0;34m'
|
|
20
|
+
NC='\033[0m' # No Color
|
|
21
|
+
|
|
22
|
+
# Ensure directories exist
|
|
23
|
+
mkdir -p "$SWARM_LOGS" "$SWARM_PIDS" "$SWARM_STATUS"
|
|
24
|
+
|
|
25
|
+
# Function to print colored output
|
|
26
|
+
print_status() {
|
|
27
|
+
echo -e "${GREEN}[SWARM]${NC} $1"
|
|
28
|
+
}
|
|
29
|
+
|
|
30
|
+
print_error() {
|
|
31
|
+
echo -e "${RED}[ERROR]${NC} $1"
|
|
32
|
+
}
|
|
33
|
+
|
|
34
|
+
print_warning() {
|
|
35
|
+
echo -e "${YELLOW}[WARNING]${NC} $1"
|
|
36
|
+
}
|
|
37
|
+
|
|
38
|
+
print_info() {
|
|
39
|
+
echo -e "${BLUE}[INFO]${NC} $1"
|
|
40
|
+
}
|
|
41
|
+
|
|
42
|
+
# Function to check prerequisites
|
|
43
|
+
check_prerequisites() {
|
|
44
|
+
print_status "Checking prerequisites..."
|
|
45
|
+
|
|
46
|
+
# Check Node.js
|
|
47
|
+
if ! command -v node &> /dev/null; then
|
|
48
|
+
print_error "Node.js is not installed"
|
|
49
|
+
exit 1
|
|
50
|
+
fi
|
|
51
|
+
|
|
52
|
+
# Check if project is built
|
|
53
|
+
if [ ! -d "$PROJECT_ROOT/dist" ]; then
|
|
54
|
+
print_warning "Project not built. Building now..."
|
|
55
|
+
cd "$PROJECT_ROOT"
|
|
56
|
+
npm run build
|
|
57
|
+
fi
|
|
58
|
+
|
|
59
|
+
# Check database connection
|
|
60
|
+
if [ -z "$DATABASE_URL" ] && [ ! -f "$HOME/.stackmemory/projects.db" ]; then
|
|
61
|
+
print_warning "No database configured. Using SQLite fallback."
|
|
62
|
+
export DATABASE_URL="sqlite://$HOME/.stackmemory/projects.db"
|
|
63
|
+
fi
|
|
64
|
+
|
|
65
|
+
print_status "Prerequisites check complete"
|
|
66
|
+
}
|
|
67
|
+
|
|
68
|
+
# Function to initialize swarm environment
|
|
69
|
+
initialize_swarm() {
|
|
70
|
+
print_status "Initializing swarm environment..."
|
|
71
|
+
|
|
72
|
+
# Create swarm configuration
|
|
73
|
+
cat > "$SWARM_DIR/config.json" <<EOF
|
|
74
|
+
{
|
|
75
|
+
"maxAgents": 10,
|
|
76
|
+
"coordinationInterval": 30000,
|
|
77
|
+
"driftDetectionThreshold": 5,
|
|
78
|
+
"freshStartInterval": 3600000,
|
|
79
|
+
"conflictResolutionStrategy": "expertise",
|
|
80
|
+
"enableDynamicPlanning": true,
|
|
81
|
+
"pathologicalBehaviorDetection": true,
|
|
82
|
+
"parallelExecution": true,
|
|
83
|
+
"monitoring": {
|
|
84
|
+
"enabled": true,
|
|
85
|
+
"port": 3456,
|
|
86
|
+
"metricsInterval": 5000
|
|
87
|
+
}
|
|
88
|
+
}
|
|
89
|
+
EOF
|
|
90
|
+
|
|
91
|
+
print_status "Swarm configuration created"
|
|
92
|
+
}
|
|
93
|
+
|
|
94
|
+
# Function to launch swarm with project
|
|
95
|
+
launch_swarm() {
|
|
96
|
+
local project="$1"
|
|
97
|
+
local agents="${2:-architect,developer,tester,reviewer}"
|
|
98
|
+
local max_agents="${3:-5}"
|
|
99
|
+
|
|
100
|
+
print_status "Launching swarm for project: $project"
|
|
101
|
+
print_info "Agents: $agents"
|
|
102
|
+
print_info "Max agents: $max_agents"
|
|
103
|
+
|
|
104
|
+
# Generate swarm ID
|
|
105
|
+
local swarm_id="swarm-$(date +%Y%m%d-%H%M%S)-$$"
|
|
106
|
+
local log_file="$SWARM_LOGS/$swarm_id.log"
|
|
107
|
+
local pid_file="$SWARM_PIDS/$swarm_id.pid"
|
|
108
|
+
|
|
109
|
+
# Launch swarm in background
|
|
110
|
+
nohup node "$PROJECT_ROOT/dist/cli/index.js" ralph swarm \
|
|
111
|
+
"$project" \
|
|
112
|
+
--agents "$agents" \
|
|
113
|
+
--max-agents "$max_agents" \
|
|
114
|
+
> "$log_file" 2>&1 &
|
|
115
|
+
|
|
116
|
+
local pid=$!
|
|
117
|
+
echo $pid > "$pid_file"
|
|
118
|
+
|
|
119
|
+
# Store swarm metadata
|
|
120
|
+
cat > "$SWARM_STATUS/$swarm_id.json" <<EOF
|
|
121
|
+
{
|
|
122
|
+
"id": "$swarm_id",
|
|
123
|
+
"pid": $pid,
|
|
124
|
+
"project": "$project",
|
|
125
|
+
"agents": "$agents",
|
|
126
|
+
"maxAgents": $max_agents,
|
|
127
|
+
"startTime": "$(date -u +"%Y-%m-%dT%H:%M:%SZ")",
|
|
128
|
+
"status": "running",
|
|
129
|
+
"logFile": "$log_file"
|
|
130
|
+
}
|
|
131
|
+
EOF
|
|
132
|
+
|
|
133
|
+
print_status "Swarm launched with ID: $swarm_id (PID: $pid)"
|
|
134
|
+
echo "$swarm_id"
|
|
135
|
+
}
|
|
136
|
+
|
|
137
|
+
# Function to launch parallel swarms
|
|
138
|
+
launch_parallel_swarms() {
|
|
139
|
+
print_status "Launching parallel swarms..."
|
|
140
|
+
|
|
141
|
+
local swarm_ids=()
|
|
142
|
+
|
|
143
|
+
# Swarm 1: Architecture and design
|
|
144
|
+
swarm_ids+=("$(launch_swarm "Design system architecture" "architect,developer" 3)")
|
|
145
|
+
sleep 2
|
|
146
|
+
|
|
147
|
+
# Swarm 2: Core implementation
|
|
148
|
+
swarm_ids+=("$(launch_swarm "Implement core features" "developer,tester" 4)")
|
|
149
|
+
sleep 2
|
|
150
|
+
|
|
151
|
+
# Swarm 3: Testing and quality
|
|
152
|
+
swarm_ids+=("$(launch_swarm "Create comprehensive test suite" "tester,reviewer" 3)")
|
|
153
|
+
sleep 2
|
|
154
|
+
|
|
155
|
+
# Swarm 4: Documentation
|
|
156
|
+
swarm_ids+=("$(launch_swarm "Generate documentation" "documenter,developer" 2)")
|
|
157
|
+
|
|
158
|
+
print_status "Launched ${#swarm_ids[@]} parallel swarms"
|
|
159
|
+
|
|
160
|
+
# Return swarm IDs for monitoring
|
|
161
|
+
echo "${swarm_ids[@]}"
|
|
162
|
+
}
|
|
163
|
+
|
|
164
|
+
# Function to monitor swarm status
|
|
165
|
+
monitor_swarm() {
|
|
166
|
+
local swarm_id="$1"
|
|
167
|
+
|
|
168
|
+
if [ ! -f "$SWARM_STATUS/$swarm_id.json" ]; then
|
|
169
|
+
print_error "Swarm $swarm_id not found"
|
|
170
|
+
return 1
|
|
171
|
+
fi
|
|
172
|
+
|
|
173
|
+
local pid=$(jq -r '.pid' "$SWARM_STATUS/$swarm_id.json")
|
|
174
|
+
local project=$(jq -r '.project' "$SWARM_STATUS/$swarm_id.json")
|
|
175
|
+
local log_file=$(jq -r '.logFile' "$SWARM_STATUS/$swarm_id.json")
|
|
176
|
+
|
|
177
|
+
if ps -p $pid > /dev/null 2>&1; then
|
|
178
|
+
print_status "Swarm $swarm_id (PID: $pid) is RUNNING"
|
|
179
|
+
print_info "Project: $project"
|
|
180
|
+
print_info "Recent logs:"
|
|
181
|
+
tail -n 5 "$log_file" | sed 's/^/ /'
|
|
182
|
+
else
|
|
183
|
+
print_warning "Swarm $swarm_id (PID: $pid) has STOPPED"
|
|
184
|
+
# Update status
|
|
185
|
+
jq '.status = "stopped"' "$SWARM_STATUS/$swarm_id.json" > "$SWARM_STATUS/$swarm_id.json.tmp"
|
|
186
|
+
mv "$SWARM_STATUS/$swarm_id.json.tmp" "$SWARM_STATUS/$swarm_id.json"
|
|
187
|
+
fi
|
|
188
|
+
}
|
|
189
|
+
|
|
190
|
+
# Function to monitor all active swarms
|
|
191
|
+
monitor_all_swarms() {
|
|
192
|
+
print_status "Monitoring all active swarms..."
|
|
193
|
+
|
|
194
|
+
for status_file in "$SWARM_STATUS"/*.json; do
|
|
195
|
+
if [ -f "$status_file" ]; then
|
|
196
|
+
local swarm_id=$(basename "$status_file" .json)
|
|
197
|
+
monitor_swarm "$swarm_id"
|
|
198
|
+
echo ""
|
|
199
|
+
fi
|
|
200
|
+
done
|
|
201
|
+
}
|
|
202
|
+
|
|
203
|
+
# Function to stop swarm
|
|
204
|
+
stop_swarm() {
|
|
205
|
+
local swarm_id="$1"
|
|
206
|
+
|
|
207
|
+
if [ ! -f "$SWARM_PIDS/$swarm_id.pid" ]; then
|
|
208
|
+
print_error "Swarm $swarm_id not found"
|
|
209
|
+
return 1
|
|
210
|
+
fi
|
|
211
|
+
|
|
212
|
+
local pid=$(cat "$SWARM_PIDS/$swarm_id.pid")
|
|
213
|
+
|
|
214
|
+
if ps -p $pid > /dev/null 2>&1; then
|
|
215
|
+
print_status "Stopping swarm $swarm_id (PID: $pid)..."
|
|
216
|
+
kill -TERM $pid
|
|
217
|
+
sleep 2
|
|
218
|
+
|
|
219
|
+
# Force kill if still running
|
|
220
|
+
if ps -p $pid > /dev/null 2>&1; then
|
|
221
|
+
print_warning "Force stopping swarm $swarm_id..."
|
|
222
|
+
kill -KILL $pid
|
|
223
|
+
fi
|
|
224
|
+
|
|
225
|
+
print_status "Swarm $swarm_id stopped"
|
|
226
|
+
else
|
|
227
|
+
print_info "Swarm $swarm_id is not running"
|
|
228
|
+
fi
|
|
229
|
+
|
|
230
|
+
# Update status
|
|
231
|
+
if [ -f "$SWARM_STATUS/$swarm_id.json" ]; then
|
|
232
|
+
jq '.status = "stopped"' "$SWARM_STATUS/$swarm_id.json" > "$SWARM_STATUS/$swarm_id.json.tmp"
|
|
233
|
+
mv "$SWARM_STATUS/$swarm_id.json.tmp" "$SWARM_STATUS/$swarm_id.json"
|
|
234
|
+
fi
|
|
235
|
+
}
|
|
236
|
+
|
|
237
|
+
# Function to clean up old swarms
|
|
238
|
+
cleanup_swarms() {
|
|
239
|
+
print_status "Cleaning up old swarm data..."
|
|
240
|
+
|
|
241
|
+
local count=0
|
|
242
|
+
for status_file in "$SWARM_STATUS"/*.json; do
|
|
243
|
+
if [ -f "$status_file" ]; then
|
|
244
|
+
local status=$(jq -r '.status' "$status_file")
|
|
245
|
+
if [ "$status" = "stopped" ]; then
|
|
246
|
+
local swarm_id=$(basename "$status_file" .json)
|
|
247
|
+
rm -f "$status_file"
|
|
248
|
+
rm -f "$SWARM_PIDS/$swarm_id.pid"
|
|
249
|
+
count=$((count + 1))
|
|
250
|
+
fi
|
|
251
|
+
fi
|
|
252
|
+
done
|
|
253
|
+
|
|
254
|
+
print_status "Cleaned up $count stopped swarms"
|
|
255
|
+
}
|
|
256
|
+
|
|
257
|
+
# Function to show swarm dashboard
|
|
258
|
+
show_dashboard() {
|
|
259
|
+
clear
|
|
260
|
+
echo "========================================"
|
|
261
|
+
echo " RALPH SWARM DASHBOARD"
|
|
262
|
+
echo "========================================"
|
|
263
|
+
echo ""
|
|
264
|
+
|
|
265
|
+
local running=0
|
|
266
|
+
local stopped=0
|
|
267
|
+
local total=0
|
|
268
|
+
|
|
269
|
+
for status_file in "$SWARM_STATUS"/*.json; do
|
|
270
|
+
if [ -f "$status_file" ]; then
|
|
271
|
+
total=$((total + 1))
|
|
272
|
+
local status=$(jq -r '.status' "$status_file")
|
|
273
|
+
if [ "$status" = "running" ]; then
|
|
274
|
+
running=$((running + 1))
|
|
275
|
+
else
|
|
276
|
+
stopped=$((stopped + 1))
|
|
277
|
+
fi
|
|
278
|
+
fi
|
|
279
|
+
done
|
|
280
|
+
|
|
281
|
+
echo "Total Swarms: $total"
|
|
282
|
+
echo "Running: $running"
|
|
283
|
+
echo "Stopped: $stopped"
|
|
284
|
+
echo ""
|
|
285
|
+
echo "----------------------------------------"
|
|
286
|
+
echo "Active Swarms:"
|
|
287
|
+
echo "----------------------------------------"
|
|
288
|
+
|
|
289
|
+
for status_file in "$SWARM_STATUS"/*.json; do
|
|
290
|
+
if [ -f "$status_file" ]; then
|
|
291
|
+
local status=$(jq -r '.status' "$status_file")
|
|
292
|
+
if [ "$status" = "running" ]; then
|
|
293
|
+
local swarm_id=$(basename "$status_file" .json)
|
|
294
|
+
local project=$(jq -r '.project' "$status_file")
|
|
295
|
+
local agents=$(jq -r '.agents' "$status_file")
|
|
296
|
+
echo ""
|
|
297
|
+
echo "ID: $swarm_id"
|
|
298
|
+
echo "Project: $project"
|
|
299
|
+
echo "Agents: $agents"
|
|
300
|
+
echo "----------------------------------------"
|
|
301
|
+
fi
|
|
302
|
+
fi
|
|
303
|
+
done
|
|
304
|
+
}
|
|
305
|
+
|
|
306
|
+
# Main command handler
|
|
307
|
+
case "${1:-}" in
|
|
308
|
+
"start")
|
|
309
|
+
check_prerequisites
|
|
310
|
+
initialize_swarm
|
|
311
|
+
shift
|
|
312
|
+
launch_swarm "$@"
|
|
313
|
+
;;
|
|
314
|
+
"parallel")
|
|
315
|
+
check_prerequisites
|
|
316
|
+
initialize_swarm
|
|
317
|
+
launch_parallel_swarms
|
|
318
|
+
;;
|
|
319
|
+
"monitor")
|
|
320
|
+
if [ -n "${2:-}" ]; then
|
|
321
|
+
monitor_swarm "$2"
|
|
322
|
+
else
|
|
323
|
+
monitor_all_swarms
|
|
324
|
+
fi
|
|
325
|
+
;;
|
|
326
|
+
"stop")
|
|
327
|
+
if [ -z "${2:-}" ]; then
|
|
328
|
+
print_error "Please provide swarm ID"
|
|
329
|
+
exit 1
|
|
330
|
+
fi
|
|
331
|
+
stop_swarm "$2"
|
|
332
|
+
;;
|
|
333
|
+
"cleanup")
|
|
334
|
+
cleanup_swarms
|
|
335
|
+
;;
|
|
336
|
+
"dashboard")
|
|
337
|
+
show_dashboard
|
|
338
|
+
;;
|
|
339
|
+
"help"|"--help"|"-h"|"")
|
|
340
|
+
echo "Ralph Swarm Deployment Script"
|
|
341
|
+
echo ""
|
|
342
|
+
echo "Usage: $0 [command] [options]"
|
|
343
|
+
echo ""
|
|
344
|
+
echo "Commands:"
|
|
345
|
+
echo " start <project> [agents] [max] Launch a swarm for a project"
|
|
346
|
+
echo " parallel Launch multiple parallel swarms"
|
|
347
|
+
echo " monitor [swarm_id] Monitor swarm(s) status"
|
|
348
|
+
echo " stop <swarm_id> Stop a specific swarm"
|
|
349
|
+
echo " cleanup Clean up stopped swarms"
|
|
350
|
+
echo " dashboard Show swarm dashboard"
|
|
351
|
+
echo " help Show this help message"
|
|
352
|
+
echo ""
|
|
353
|
+
echo "Examples:"
|
|
354
|
+
echo " $0 start \"Build a todo app\" \"developer,tester\" 4"
|
|
355
|
+
echo " $0 parallel"
|
|
356
|
+
echo " $0 monitor swarm-20240120-123456-1234"
|
|
357
|
+
echo " $0 stop swarm-20240120-123456-1234"
|
|
358
|
+
echo " $0 dashboard"
|
|
359
|
+
;;
|
|
360
|
+
*)
|
|
361
|
+
print_error "Unknown command: $1"
|
|
362
|
+
echo "Run '$0 help' for usage information"
|
|
363
|
+
exit 1
|
|
364
|
+
;;
|
|
365
|
+
esac
|
|
@@ -0,0 +1,96 @@
|
|
|
1
|
+
#!/bin/bash
|
|
2
|
+
|
|
3
|
+
# Install Code Execution and Pre-Tool-Use Hooks for StackMemory
|
|
4
|
+
|
|
5
|
+
set -e
|
|
6
|
+
|
|
7
|
+
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
|
8
|
+
PROJECT_ROOT="$(dirname "$SCRIPT_DIR")"
|
|
9
|
+
CLAUDE_HOOKS_DIR="$HOME/.claude/hooks"
|
|
10
|
+
|
|
11
|
+
echo "๐ Installing StackMemory Code Execution Hooks"
|
|
12
|
+
echo "============================================"
|
|
13
|
+
|
|
14
|
+
# Create hooks directory if it doesn't exist
|
|
15
|
+
if [ ! -d "$CLAUDE_HOOKS_DIR" ]; then
|
|
16
|
+
echo "Creating Claude hooks directory..."
|
|
17
|
+
mkdir -p "$CLAUDE_HOOKS_DIR"
|
|
18
|
+
fi
|
|
19
|
+
|
|
20
|
+
# Build the project first to ensure handlers are compiled
|
|
21
|
+
echo ""
|
|
22
|
+
echo "๐ฆ Building project..."
|
|
23
|
+
cd "$PROJECT_ROOT"
|
|
24
|
+
npm run build
|
|
25
|
+
|
|
26
|
+
# Install pre-tool-use hook
|
|
27
|
+
echo ""
|
|
28
|
+
echo "๐ Installing pre-tool-use hook..."
|
|
29
|
+
if [ -f "$PROJECT_ROOT/templates/claude-hooks/pre-tool-use" ]; then
|
|
30
|
+
# Backup existing hook if present
|
|
31
|
+
if [ -f "$CLAUDE_HOOKS_DIR/pre-tool-use" ]; then
|
|
32
|
+
echo " Backing up existing pre-tool-use hook..."
|
|
33
|
+
cp "$CLAUDE_HOOKS_DIR/pre-tool-use" "$CLAUDE_HOOKS_DIR/pre-tool-use.backup.$(date +%Y%m%d_%H%M%S)"
|
|
34
|
+
fi
|
|
35
|
+
|
|
36
|
+
# Copy new hook
|
|
37
|
+
cp "$PROJECT_ROOT/templates/claude-hooks/pre-tool-use" "$CLAUDE_HOOKS_DIR/"
|
|
38
|
+
chmod +x "$CLAUDE_HOOKS_DIR/pre-tool-use"
|
|
39
|
+
echo " โ
pre-tool-use hook installed"
|
|
40
|
+
else
|
|
41
|
+
echo " โ pre-tool-use hook not found"
|
|
42
|
+
fi
|
|
43
|
+
|
|
44
|
+
# Create configuration file
|
|
45
|
+
echo ""
|
|
46
|
+
echo "โ๏ธ Setting up configuration..."
|
|
47
|
+
STACKMEMORY_CONFIG_DIR="$HOME/.stackmemory"
|
|
48
|
+
mkdir -p "$STACKMEMORY_CONFIG_DIR"
|
|
49
|
+
|
|
50
|
+
# Create mode configuration
|
|
51
|
+
cat > "$STACKMEMORY_CONFIG_DIR/tool-mode.conf" << EOF
|
|
52
|
+
# StackMemory Tool Mode Configuration
|
|
53
|
+
# Options: permissive (default), restrictive, code_only
|
|
54
|
+
|
|
55
|
+
STACKMEMORY_TOOL_MODE=permissive
|
|
56
|
+
EOF
|
|
57
|
+
|
|
58
|
+
echo " โ
Configuration created at $STACKMEMORY_CONFIG_DIR/tool-mode.conf"
|
|
59
|
+
|
|
60
|
+
# Test code execution handler
|
|
61
|
+
echo ""
|
|
62
|
+
echo "๐งช Testing code execution handler..."
|
|
63
|
+
node "$PROJECT_ROOT/scripts/test-code-execution.js" 2>/dev/null || {
|
|
64
|
+
echo " โ ๏ธ Code execution test failed - handler may need dependencies"
|
|
65
|
+
echo " Run: node scripts/test-code-execution.js for details"
|
|
66
|
+
}
|
|
67
|
+
|
|
68
|
+
# Display usage information
|
|
69
|
+
echo ""
|
|
70
|
+
echo "๐ Installation Complete!"
|
|
71
|
+
echo ""
|
|
72
|
+
echo "Usage:"
|
|
73
|
+
echo "------"
|
|
74
|
+
echo "1. Set tool mode (optional):"
|
|
75
|
+
echo " export STACKMEMORY_TOOL_MODE=permissive # Default - all tools allowed"
|
|
76
|
+
echo " export STACKMEMORY_TOOL_MODE=restrictive # Block dangerous tools"
|
|
77
|
+
echo " export STACKMEMORY_TOOL_MODE=code_only # Only code execution allowed"
|
|
78
|
+
echo ""
|
|
79
|
+
echo "2. Or edit: ~/.stackmemory/tool-mode.conf"
|
|
80
|
+
echo ""
|
|
81
|
+
echo "3. View tool usage logs:"
|
|
82
|
+
echo " tail -f ~/.stackmemory/tool-use.log"
|
|
83
|
+
echo ""
|
|
84
|
+
echo "4. Test code execution:"
|
|
85
|
+
echo " node $PROJECT_ROOT/scripts/test-code-execution.js"
|
|
86
|
+
echo ""
|
|
87
|
+
echo "Modes:"
|
|
88
|
+
echo "------"
|
|
89
|
+
echo "โข permissive: All tools allowed, dangerous ones logged"
|
|
90
|
+
echo "โข restrictive: Blocks Bash, Write, Edit, Delete, WebFetch"
|
|
91
|
+
echo "โข code_only: Only Python/JavaScript execution (pure computation)"
|
|
92
|
+
echo ""
|
|
93
|
+
echo "The code_only mode creates a restricted environment similar to"
|
|
94
|
+
echo "execute_code_py, where Claude can only perform computations."
|
|
95
|
+
echo ""
|
|
96
|
+
echo "โจ Ready to use with Claude Code!"
|
|
@@ -17,7 +17,7 @@ async function queryLinear(query, variables = {}) {
|
|
|
17
17
|
method: 'POST',
|
|
18
18
|
headers: {
|
|
19
19
|
'Content-Type': 'application/json',
|
|
20
|
-
'Authorization': process.env.LINEAR_API_KEY
|
|
20
|
+
'Authorization': process.env.STACKMEMORY_LINEAR_API_KEY || process.env.LINEAR_API_KEY
|
|
21
21
|
},
|
|
22
22
|
body: JSON.stringify({ query, variables })
|
|
23
23
|
});
|