@stackmemoryai/stackmemory 0.5.25 → 0.5.26

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@stackmemoryai/stackmemory",
3
- "version": "0.5.25",
3
+ "version": "0.5.26",
4
4
  "description": "Lossless memory runtime for AI coding tools - organizes context as a call stack instead of linear chat logs, with team collaboration and infinite retention",
5
5
  "engines": {
6
6
  "node": ">=20.0.0",
@@ -44,8 +44,8 @@
44
44
  "persistence"
45
45
  ],
46
46
  "scripts": {
47
- "start": "node dist/servers/railway/index.js",
48
- "start:full": "node dist/servers/railway/index.js",
47
+ "start": "node dist/integrations/mcp/server.js",
48
+ "start:full": "node dist/integrations/mcp/server.js",
49
49
  "setup": "npm install && npm run build && npm run init",
50
50
  "postinstall": "node scripts/install-claude-hooks-auto.js || true",
51
51
  "init": "node dist/scripts/initialize.js",
@@ -68,11 +68,6 @@
68
68
  "status": "node dist/scripts/status.js",
69
69
  "linear:sync": "node scripts/sync-linear-graphql.js",
70
70
  "linear:mirror": "node scripts/sync-linear-graphql.js --mirror",
71
- "railway:setup": "./scripts/setup-railway-deployment.sh",
72
- "railway:deploy": "railway up --detach",
73
- "railway:migrate": "tsx src/cli/commands/migrate.ts",
74
- "railway:schema:verify": "tsx scripts/verify-railway-schema.ts",
75
- "railway:logs": "railway logs",
76
71
  "claude:setup": "node scripts/setup-claude-integration.js",
77
72
  "daemons:start": "node scripts/claude-sm-autostart.js",
78
73
  "daemons:status": "node scripts/claude-sm-autostart.js status",
@@ -1,87 +0,0 @@
1
- #!/usr/bin/env node
2
-
3
- /**
4
- * Debug Railway build issues
5
- */
6
-
7
- import fs from 'fs';
8
- import path from 'path';
9
- import { fileURLToPath } from 'url';
10
-
11
- const __filename = fileURLToPath(import.meta.url);
12
- const __dirname = path.dirname(__filename);
13
-
14
- console.log('šŸ” Railway Build Debugger');
15
- console.log('========================\n');
16
-
17
- // Check for server files
18
- const serverDir = path.join(__dirname, '..', 'dist', 'servers', 'railway');
19
- const srcDir = path.join(__dirname, '..', 'src', 'servers', 'railway');
20
-
21
- console.log('šŸ“ Checking dist/servers/railway:');
22
- if (fs.existsSync(serverDir)) {
23
- const files = fs.readdirSync(serverDir);
24
- files.forEach(file => {
25
- const stats = fs.statSync(path.join(serverDir, file));
26
- console.log(` - ${file} (${stats.size} bytes, modified: ${stats.mtime.toISOString()})`);
27
-
28
- // Check for minimal server references
29
- if (file === 'index.js') {
30
- const content = fs.readFileSync(path.join(serverDir, file), 'utf-8');
31
- if (content.includes('Minimal')) {
32
- console.log(` āš ļø Contains "Minimal" references`);
33
- }
34
- if (content.includes('/auth/signup')) {
35
- console.log(` āœ… Contains auth endpoints`);
36
- }
37
- }
38
- });
39
- } else {
40
- console.log(' āŒ Directory does not exist');
41
- }
42
-
43
- console.log('\nšŸ“ Checking src/servers/railway:');
44
- if (fs.existsSync(srcDir)) {
45
- const files = fs.readdirSync(srcDir);
46
- files.forEach(file => {
47
- const stats = fs.statSync(path.join(srcDir, file));
48
- console.log(` - ${file} (${stats.size} bytes)`);
49
- });
50
- } else {
51
- console.log(' āŒ Directory does not exist');
52
- }
53
-
54
- // Check package.json scripts
55
- console.log('\nšŸ“¦ Package.json start scripts:');
56
- const packageJson = JSON.parse(fs.readFileSync(path.join(__dirname, '..', 'package.json'), 'utf-8'));
57
- Object.entries(packageJson.scripts).forEach(([key, value]) => {
58
- if (key.includes('start')) {
59
- console.log(` ${key}: ${value}`);
60
- }
61
- });
62
-
63
- // Check Dockerfile
64
- console.log('\n🐳 Dockerfile CMD:');
65
- const dockerfile = fs.readFileSync(path.join(__dirname, '..', 'Dockerfile'), 'utf-8');
66
- const cmdMatch = dockerfile.match(/CMD\s+\[.*\]/g);
67
- if (cmdMatch) {
68
- cmdMatch.forEach(cmd => {
69
- console.log(` ${cmd}`);
70
- });
71
- }
72
-
73
- // Check Railway config
74
- console.log('\nšŸš‚ Railway.json:');
75
- const railwayConfig = path.join(__dirname, '..', 'railway.json');
76
- if (fs.existsSync(railwayConfig)) {
77
- const config = JSON.parse(fs.readFileSync(railwayConfig, 'utf-8'));
78
- console.log(JSON.stringify(config, null, 2));
79
- } else {
80
- console.log(' āŒ railway.json not found');
81
- }
82
-
83
- console.log('\nšŸ’” Recommendations:');
84
- console.log('1. Railway may be using a cached build layer');
85
- console.log('2. Try changing the base image in Dockerfile to force rebuild');
86
- console.log('3. Check Railway dashboard for any override settings');
87
- console.log('4. Consider contacting Railway support about cache issues');
@@ -1,352 +0,0 @@
1
- #!/bin/bash
2
-
3
- # Runway Deployment Script for StackMemory MCP Server
4
- # Production deployment with comprehensive checks
5
-
6
- set -euo pipefail
7
-
8
- # Color codes for output
9
- RED='\033[0;31m'
10
- GREEN='\033[0;32m'
11
- YELLOW='\033[1;33m'
12
- BLUE='\033[0;34m'
13
- NC='\033[0m' # No Color
14
-
15
- # Configuration
16
- ENVIRONMENT="${1:-production}"
17
- PROJECT_NAME="stackmemory-mcp"
18
- REQUIRED_ENV_VARS=(
19
- "AUTH0_DOMAIN"
20
- "AUTH0_AUDIENCE"
21
- "AUTH0_CLIENT_ID"
22
- "AUTH0_CLIENT_SECRET"
23
- "DATABASE_URL"
24
- "REDIS_URL"
25
- "JWT_SECRET"
26
- "DATADOG_API_KEY"
27
- "SENTRY_DSN"
28
- )
29
-
30
- # Logging functions
31
- log_info() {
32
- echo -e "${BLUE}[INFO]${NC} $1"
33
- }
34
-
35
- log_success() {
36
- echo -e "${GREEN}[SUCCESS]${NC} $1"
37
- }
38
-
39
- log_warning() {
40
- echo -e "${YELLOW}[WARNING]${NC} $1"
41
- }
42
-
43
- log_error() {
44
- echo -e "${RED}[ERROR]${NC} $1"
45
- exit 1
46
- }
47
-
48
- # Check prerequisites
49
- check_prerequisites() {
50
- log_info "Checking prerequisites..."
51
-
52
- # Check for required tools
53
- for tool in docker node npm runway pg_isready redis-cli; do
54
- if ! command -v $tool &> /dev/null; then
55
- log_error "$tool is not installed"
56
- fi
57
- done
58
-
59
- # Check Node version
60
- NODE_VERSION=$(node -v | cut -d'v' -f2 | cut -d'.' -f1)
61
- if [ "$NODE_VERSION" -lt 20 ]; then
62
- log_error "Node.js 20 or higher is required (current: v$NODE_VERSION)"
63
- fi
64
-
65
- # Check environment variables
66
- for var in "${REQUIRED_ENV_VARS[@]}"; do
67
- if [ -z "${!var:-}" ]; then
68
- log_error "Environment variable $var is not set"
69
- fi
70
- done
71
-
72
- log_success "All prerequisites met"
73
- }
74
-
75
- # Run tests
76
- run_tests() {
77
- log_info "Running tests..."
78
-
79
- # Unit tests
80
- npm test || log_error "Unit tests failed"
81
-
82
- # Integration tests
83
- npm run test:integration || log_error "Integration tests failed"
84
-
85
- # Security scan
86
- npm audit --production || log_warning "Security vulnerabilities found"
87
-
88
- log_success "All tests passed"
89
- }
90
-
91
- # Build application
92
- build_application() {
93
- log_info "Building application..."
94
-
95
- # Clean previous builds
96
- rm -rf dist
97
-
98
- # Install dependencies
99
- npm ci --production=false
100
-
101
- # Build TypeScript
102
- npm run build
103
-
104
- # Verify build
105
- if [ ! -f "dist/src/runway/index.js" ]; then
106
- log_error "Build failed: main entry point not found"
107
- fi
108
-
109
- log_success "Application built successfully"
110
- }
111
-
112
- # Build Docker image
113
- build_docker_image() {
114
- log_info "Building Docker image..."
115
-
116
- # Generate build tag
117
- VERSION=$(node -p "require('./package.json').version")
118
- BUILD_TAG="${PROJECT_NAME}:${VERSION}-${ENVIRONMENT}"
119
- LATEST_TAG="${PROJECT_NAME}:latest-${ENVIRONMENT}"
120
-
121
- # Build image
122
- docker build \
123
- --file Dockerfile.runway \
124
- --tag "$BUILD_TAG" \
125
- --tag "$LATEST_TAG" \
126
- --build-arg NODE_ENV="$ENVIRONMENT" \
127
- --platform linux/amd64 \
128
- .
129
-
130
- # Tag for registry
131
- REGISTRY_URL="${RUNWAY_REGISTRY:-registry.runway.app}"
132
- docker tag "$BUILD_TAG" "$REGISTRY_URL/$BUILD_TAG"
133
- docker tag "$LATEST_TAG" "$REGISTRY_URL/$LATEST_TAG"
134
-
135
- log_success "Docker image built: $BUILD_TAG"
136
- }
137
-
138
- # Database migrations
139
- run_migrations() {
140
- log_info "Running database migrations..."
141
-
142
- # Check database connection
143
- if ! pg_isready -d "$DATABASE_URL"; then
144
- log_error "Cannot connect to database"
145
- fi
146
-
147
- # Run migrations using docker
148
- docker run --rm \
149
- -e DATABASE_URL="$DATABASE_URL" \
150
- "${PROJECT_NAME}:latest-${ENVIRONMENT}" \
151
- node dist/src/runway/database/migrate.js
152
-
153
- log_success "Database migrations completed"
154
- }
155
-
156
- # Health checks
157
- perform_health_checks() {
158
- log_info "Performing health checks..."
159
-
160
- # Start services locally for testing
161
- docker-compose -f docker-compose.runway.yml up -d
162
-
163
- # Wait for services to be ready
164
- sleep 10
165
-
166
- # Check application health
167
- HEALTH_RESPONSE=$(curl -s http://localhost:8080/health)
168
- if ! echo "$HEALTH_RESPONSE" | grep -q '"healthy":true'; then
169
- log_error "Health check failed: $HEALTH_RESPONSE"
170
- fi
171
-
172
- # Check database
173
- docker exec stackmemory-postgres pg_isready -U stackmemory || log_error "Database not ready"
174
-
175
- # Check Redis
176
- docker exec stackmemory-redis redis-cli ping || log_error "Redis not ready"
177
-
178
- # Stop services
179
- docker-compose -f docker-compose.runway.yml down
180
-
181
- log_success "All health checks passed"
182
- }
183
-
184
- # Deploy to Runway
185
- deploy_to_runway() {
186
- log_info "Deploying to Runway ($ENVIRONMENT)..."
187
-
188
- # Login to Runway
189
- runway login || log_error "Failed to login to Runway"
190
-
191
- # Validate configuration
192
- runway validate || log_error "Runway configuration validation failed"
193
-
194
- # Push Docker image
195
- docker push "$REGISTRY_URL/${PROJECT_NAME}:${VERSION}-${ENVIRONMENT}"
196
-
197
- # Deploy with canary strategy
198
- runway deploy \
199
- --environment "$ENVIRONMENT" \
200
- --strategy canary \
201
- --canary-percentage 10 \
202
- --canary-duration 30m \
203
- --wait \
204
- --timeout 600
205
-
206
- # Verify deployment
207
- runway status "$PROJECT_NAME" --environment "$ENVIRONMENT"
208
-
209
- log_success "Deployment successful"
210
- }
211
-
212
- # Smoke tests
213
- run_smoke_tests() {
214
- log_info "Running smoke tests..."
215
-
216
- # Get deployment URL
217
- DEPLOYMENT_URL=$(runway url "$PROJECT_NAME" --environment "$ENVIRONMENT")
218
-
219
- # Test health endpoint
220
- curl -f "$DEPLOYMENT_URL/health" || log_error "Health endpoint failed"
221
-
222
- # Test metrics endpoint
223
- curl -f "$DEPLOYMENT_URL/metrics" || log_error "Metrics endpoint failed"
224
-
225
- # Test authentication
226
- TEST_TOKEN=$(./scripts/get-test-token.sh)
227
- curl -f -H "Authorization: Bearer $TEST_TOKEN" \
228
- "$DEPLOYMENT_URL/api/v1/projects" || log_error "Authentication test failed"
229
-
230
- log_success "Smoke tests passed"
231
- }
232
-
233
- # Monitor deployment
234
- monitor_deployment() {
235
- log_info "Monitoring deployment for 5 minutes..."
236
-
237
- START_TIME=$(date +%s)
238
- MONITOR_DURATION=300 # 5 minutes
239
-
240
- while true; do
241
- CURRENT_TIME=$(date +%s)
242
- ELAPSED=$((CURRENT_TIME - START_TIME))
243
-
244
- if [ $ELAPSED -gt $MONITOR_DURATION ]; then
245
- break
246
- fi
247
-
248
- # Check error rate
249
- ERROR_RATE=$(runway metrics "$PROJECT_NAME" --metric error_rate --duration 1m)
250
- if (( $(echo "$ERROR_RATE > 0.05" | bc -l) )); then
251
- log_warning "High error rate detected: $ERROR_RATE"
252
- fi
253
-
254
- # Check latency
255
- LATENCY=$(runway metrics "$PROJECT_NAME" --metric p95_latency --duration 1m)
256
- if (( $(echo "$LATENCY > 2000" | bc -l) )); then
257
- log_warning "High latency detected: ${LATENCY}ms"
258
- fi
259
-
260
- sleep 30
261
- done
262
-
263
- log_success "Monitoring complete"
264
- }
265
-
266
- # Rollback if needed
267
- rollback_deployment() {
268
- log_error "Deployment failed, rolling back..."
269
-
270
- runway rollback "$PROJECT_NAME" \
271
- --environment "$ENVIRONMENT" \
272
- --to-previous \
273
- --wait
274
-
275
- log_info "Rollback completed"
276
- exit 1
277
- }
278
-
279
- # Notification
280
- send_notification() {
281
- local STATUS=$1
282
- local MESSAGE=$2
283
-
284
- # Slack notification
285
- if [ -n "${SLACK_WEBHOOK_URL:-}" ]; then
286
- curl -X POST "$SLACK_WEBHOOK_URL" \
287
- -H "Content-Type: application/json" \
288
- -d "{
289
- \"text\": \"Deployment $STATUS\",
290
- \"attachments\": [{
291
- \"color\": \"$([ "$STATUS" == "SUCCESS" ] && echo "good" || echo "danger")\",
292
- \"fields\": [{
293
- \"title\": \"Project\",
294
- \"value\": \"$PROJECT_NAME\",
295
- \"short\": true
296
- }, {
297
- \"title\": \"Environment\",
298
- \"value\": \"$ENVIRONMENT\",
299
- \"short\": true
300
- }, {
301
- \"title\": \"Message\",
302
- \"value\": \"$MESSAGE\"
303
- }]
304
- }]
305
- }"
306
- fi
307
-
308
- # Email notification
309
- if [ -n "${NOTIFICATION_EMAIL:-}" ]; then
310
- echo "$MESSAGE" | mail -s "Deployment $STATUS: $PROJECT_NAME" "$NOTIFICATION_EMAIL"
311
- fi
312
- }
313
-
314
- # Main deployment flow
315
- main() {
316
- log_info "Starting deployment for $PROJECT_NAME to $ENVIRONMENT"
317
- log_info "================================================"
318
-
319
- # Set error trap
320
- trap 'rollback_deployment' ERR
321
-
322
- # Pre-deployment checks
323
- check_prerequisites
324
-
325
- # Build and test
326
- run_tests
327
- build_application
328
- build_docker_image
329
-
330
- # Local verification
331
- perform_health_checks
332
-
333
- # Database setup
334
- run_migrations
335
-
336
- # Deploy
337
- deploy_to_runway
338
-
339
- # Post-deployment verification
340
- run_smoke_tests
341
- monitor_deployment
342
-
343
- # Success notification
344
- send_notification "SUCCESS" "Deployment completed successfully"
345
-
346
- log_success "================================================"
347
- log_success "Deployment completed successfully!"
348
- log_success "URL: $(runway url "$PROJECT_NAME" --environment "$ENVIRONMENT")"
349
- }
350
-
351
- # Run main function
352
- main "$@"
@@ -1,39 +0,0 @@
1
- #!/bin/bash
2
-
3
- echo "šŸš‚ Railway Environment Variables Setup"
4
- echo "======================================"
5
- echo ""
6
- echo "You need to add these environment variables in the Railway dashboard:"
7
- echo "https://railway.app/project/90d5083a-4adf-49b8-b2ff-95adfbb610f2/service/b2a145c3-065e-4225-8c84-aa0d8f49d243/settings"
8
- echo ""
9
- echo "Copy and paste these into Railway's environment variables section:"
10
- echo ""
11
-
12
- # Load .env file
13
- if [ -f .env ]; then
14
- source .env
15
-
16
- echo "REDIS_URL=$REDIS_URL"
17
- echo "LINEAR_API_KEY=$LINEAR_API_KEY"
18
- echo "CHROMADB_API_KEY=$CHROMADB_API_KEY"
19
- echo "CHROMADB_TENANT=$CHROMADB_TENANT"
20
- echo "CHROMADB_DATABASE=$CHROMADB_DATABASE"
21
- echo "LINEAR_TEAM_ID=$LINEAR_TEAM_ID"
22
- echo "LINEAR_ORGANIZATION=$LINEAR_ORGANIZATION"
23
- echo ""
24
- echo "Optional (for cold storage tier):"
25
- echo "GCS_BUCKET_NAME=stackmemory-cold-storage"
26
- echo "GCS_PROJECT_ID=your-gcp-project-id"
27
- echo "GCS_CLIENT_EMAIL=your-service-account@project.iam.gserviceaccount.com"
28
- echo "GCS_PRIVATE_KEY=your-gcs-private-key"
29
- else
30
- echo "āŒ .env file not found. Please create one first."
31
- exit 1
32
- fi
33
-
34
- echo ""
35
- echo "After adding these variables:"
36
- echo "1. Click 'Deploy' in Railway dashboard"
37
- echo "2. Or run: railway up"
38
- echo "3. Check deployment: railway logs"
39
- echo "4. Visit: https://stackmemory-production.up.railway.app/health"
@@ -1,37 +0,0 @@
1
- #!/usr/bin/env bash
2
- set -euo pipefail
3
-
4
- echo "[railway-setup] Starting setup..."
5
-
6
- # Ensure tsx is available
7
- if ! command -v npx >/dev/null 2>&1; then
8
- echo "npx not found; please run via npm scripts (npm run railway:setup)" >&2
9
- exit 1
10
- fi
11
-
12
- # Default SQLite path if DATABASE_URL not provided
13
- if [[ -z "${DATABASE_URL:-}" ]]; then
14
- mkdir -p .stackmemory
15
- export DATABASE_URL="$(pwd)/.stackmemory/railway.db"
16
- echo "[railway-setup] DATABASE_URL not set; using ${DATABASE_URL}"
17
- fi
18
-
19
- echo "[railway-setup] Applying migrations to ${DATABASE_URL}"
20
- npx tsx src/cli/commands/migrate.ts apply --to latest
21
-
22
- echo "[railway-setup] Verifying schema version"
23
- if npx tsx scripts/verify-railway-schema.ts; then
24
- echo "[railway-setup] Schema verified"
25
- else
26
- code=$?
27
- if [[ "$code" == "2" ]]; then
28
- echo "[railway-setup] Schema below latest; retrying apply"
29
- npx tsx src/cli/commands/migrate.ts apply --to latest
30
- else
31
- echo "[railway-setup] Verification failed with code $code" >&2
32
- exit $code
33
- fi
34
- fi
35
-
36
- echo "[railway-setup] Done."
37
-
@@ -1,228 +0,0 @@
1
- #!/usr/bin/env node
2
- import 'dotenv/config';
3
- import pg from 'pg';
4
- import Database from 'better-sqlite3';
5
- import { mkdirSync } from 'fs';
6
- import { dirname, join } from 'path';
7
- import { homedir } from 'os';
8
-
9
- const { Client } = pg;
10
-
11
- // Railway PostgreSQL connection
12
- // Use environment variable or fallback to internal URL
13
- const RAILWAY_DATABASE_URL = process.env.RAILWAY_DATABASE_URL ||
14
- process.env.DATABASE_URL ||
15
- 'postgresql://postgres:YTSFXqPzFhghOcefgwPvJyWOBTYHbYxd@postgres.railway.internal:5432/railway';
16
-
17
- // Local SQLite database path
18
- const dbPath = join(homedir(), '.stackmemory', 'context.db');
19
- mkdirSync(dirname(dbPath), { recursive: true });
20
-
21
- async function syncFramesFromRailway() {
22
- console.log('šŸ”„ Starting sync from Railway database...\n');
23
-
24
- // Connect to Railway PostgreSQL
25
- const pgClient = new Client({
26
- connectionString: RAILWAY_DATABASE_URL,
27
- });
28
-
29
- try {
30
- await pgClient.connect();
31
- console.log('āœ… Connected to Railway PostgreSQL database');
32
-
33
- // Connect to local SQLite
34
- const sqliteDb = new Database(dbPath);
35
- console.log('āœ… Connected to local SQLite database\n');
36
-
37
- // Check if frames table exists in PostgreSQL
38
- const tableCheckQuery = `
39
- SELECT EXISTS (
40
- SELECT FROM information_schema.tables
41
- WHERE table_name = 'frames'
42
- );
43
- `;
44
-
45
- const tableExists = await pgClient.query(tableCheckQuery);
46
-
47
- if (!tableExists.rows[0].exists) {
48
- console.log('āš ļø No frames table found in Railway database');
49
- console.log(' The Railway deployment may not have created frames yet.\n');
50
-
51
- // Check for other relevant tables
52
- const tablesQuery = `
53
- SELECT table_name
54
- FROM information_schema.tables
55
- WHERE table_schema = 'public'
56
- ORDER BY table_name;
57
- `;
58
-
59
- const tables = await pgClient.query(tablesQuery);
60
- console.log('šŸ“Š Available tables in Railway database:');
61
- tables.rows.forEach(row => {
62
- console.log(` - ${row.table_name}`);
63
- });
64
-
65
- return;
66
- }
67
-
68
- // Fetch frames from Railway
69
- const framesQuery = `
70
- SELECT
71
- frame_id,
72
- run_id,
73
- project_id,
74
- parent_frame_id,
75
- depth,
76
- type,
77
- name,
78
- state,
79
- inputs,
80
- outputs,
81
- digest_text,
82
- digest_json,
83
- created_at,
84
- closed_at
85
- FROM frames
86
- ORDER BY created_at DESC
87
- LIMIT 1000;
88
- `;
89
-
90
- const framesResult = await pgClient.query(framesQuery);
91
- console.log(`šŸ“„ Found ${framesResult.rows.length} frames in Railway database\n`);
92
-
93
- if (framesResult.rows.length === 0) {
94
- console.log('ā„¹ļø No frames to sync. The Railway database is empty.');
95
- return;
96
- }
97
-
98
- // Prepare SQLite insert statement
99
- const insertStmt = sqliteDb.prepare(`
100
- INSERT OR REPLACE INTO frames (
101
- frame_id, run_id, project_id, parent_frame_id, depth,
102
- type, name, state, inputs, outputs, digest_text, digest_json,
103
- created_at, closed_at
104
- ) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
105
- `);
106
-
107
- // Begin transaction for bulk insert
108
- const insertMany = sqliteDb.transaction((frames) => {
109
- for (const frame of frames) {
110
- insertStmt.run(
111
- frame.frame_id,
112
- frame.run_id,
113
- frame.project_id,
114
- frame.parent_frame_id,
115
- frame.depth,
116
- frame.type,
117
- frame.name,
118
- frame.state,
119
- typeof frame.inputs === 'object' ? JSON.stringify(frame.inputs) : frame.inputs,
120
- typeof frame.outputs === 'object' ? JSON.stringify(frame.outputs) : frame.outputs,
121
- frame.digest_text,
122
- typeof frame.digest_json === 'object' ? JSON.stringify(frame.digest_json) : frame.digest_json,
123
- frame.created_at ? new Date(frame.created_at).getTime() : Date.now(),
124
- frame.closed_at ? new Date(frame.closed_at).getTime() : null
125
- );
126
- }
127
- });
128
-
129
- // Execute bulk insert
130
- insertMany(framesResult.rows);
131
- console.log(`āœ… Synced ${framesResult.rows.length} frames to local database\n`);
132
-
133
- // Also sync events if they exist
134
- const eventsCheckQuery = `
135
- SELECT EXISTS (
136
- SELECT FROM information_schema.tables
137
- WHERE table_name = 'events'
138
- );
139
- `;
140
-
141
- const eventsExist = await pgClient.query(eventsCheckQuery);
142
-
143
- if (eventsExist.rows[0].exists) {
144
- const eventsQuery = `
145
- SELECT
146
- event_id,
147
- run_id,
148
- frame_id,
149
- seq,
150
- event_type,
151
- payload,
152
- ts
153
- FROM events
154
- ORDER BY ts DESC
155
- LIMIT 5000;
156
- `;
157
-
158
- const eventsResult = await pgClient.query(eventsQuery);
159
-
160
- if (eventsResult.rows.length > 0) {
161
- console.log(`šŸ“„ Found ${eventsResult.rows.length} events in Railway database`);
162
-
163
- const eventInsertStmt = sqliteDb.prepare(`
164
- INSERT OR REPLACE INTO events (
165
- event_id, run_id, frame_id, seq, event_type, payload, ts
166
- ) VALUES (?, ?, ?, ?, ?, ?, ?)
167
- `);
168
-
169
- const insertEvents = sqliteDb.transaction((events) => {
170
- for (const event of events) {
171
- eventInsertStmt.run(
172
- event.event_id,
173
- event.run_id,
174
- event.frame_id,
175
- event.seq,
176
- event.event_type,
177
- typeof event.payload === 'object' ? JSON.stringify(event.payload) : event.payload,
178
- event.ts ? new Date(event.ts).getTime() : Date.now()
179
- );
180
- }
181
- });
182
-
183
- insertEvents(eventsResult.rows);
184
- console.log(`āœ… Synced ${eventsResult.rows.length} events to local database\n`);
185
- }
186
- }
187
-
188
- // Verify the sync
189
- const frameCount = sqliteDb.prepare('SELECT COUNT(*) as count FROM frames').get();
190
- const eventCount = sqliteDb.prepare('SELECT COUNT(*) as count FROM events').get();
191
-
192
- console.log('šŸ“Š Local database statistics:');
193
- console.log(` - Frames: ${frameCount.count}`);
194
- console.log(` - Events: ${eventCount.count}`);
195
-
196
- // Show recent frames
197
- const recentFrames = sqliteDb.prepare(`
198
- SELECT frame_id, name, type, state, datetime(created_at/1000, 'unixepoch') as created
199
- FROM frames
200
- ORDER BY created_at DESC
201
- LIMIT 5
202
- `).all();
203
-
204
- if (recentFrames.length > 0) {
205
- console.log('\nšŸ• Recent frames:');
206
- recentFrames.forEach(frame => {
207
- console.log(` - ${frame.name} (${frame.type}) - ${frame.state} - ${frame.created}`);
208
- });
209
- }
210
-
211
- sqliteDb.close();
212
-
213
- } catch (error) {
214
- console.error('āŒ Error syncing frames:', error.message);
215
-
216
- // If connection failed due to internal network, try external URL
217
- if (error.message.includes('ENOTFOUND') || error.message.includes('postgres.railway.internal')) {
218
- console.log('\nšŸ”„ Retrying with external Railway database URL...');
219
- console.log(' Note: You may need to get the external DATABASE_URL from Railway dashboard.');
220
- console.log(' Run: railway variables --json | jq -r .DATABASE_URL');
221
- }
222
- } finally {
223
- await pgClient.end();
224
- }
225
- }
226
-
227
- // Run the sync
228
- syncFramesFromRailway().catch(console.error);
@@ -1,222 +0,0 @@
1
- #!/usr/bin/env node
2
- import 'dotenv/config';
3
- import pg from 'pg';
4
- import { createClient } from 'redis';
5
-
6
- const { Client } = pg;
7
-
8
- // Railway PostgreSQL URL from environment
9
- const DATABASE_URL = process.env.DATABASE_URL ||
10
- 'postgresql://postgres:YTSFXqPzFhghOcefgwPvJyWOBTYHbYxd@postgres.railway.internal:5432/railway';
11
-
12
- async function testPostgreSQL() {
13
- console.log('🐘 Testing PostgreSQL Connection...\n');
14
-
15
- const pgClient = new Client({
16
- connectionString: DATABASE_URL
17
- });
18
-
19
- try {
20
- console.log('šŸ“” Connecting to PostgreSQL...');
21
- await pgClient.connect();
22
- console.log('āœ… Connected to PostgreSQL!\n');
23
-
24
- // Test basic query
25
- const timeResult = await pgClient.query('SELECT NOW() as current_time');
26
- console.log('ā° Database time:', timeResult.rows[0].current_time);
27
-
28
- // Create frames table if it doesn't exist
29
- console.log('\nšŸ“Š Creating frames table...');
30
- await pgClient.query(`
31
- CREATE TABLE IF NOT EXISTS frames (
32
- frame_id TEXT PRIMARY KEY,
33
- run_id TEXT NOT NULL,
34
- project_id TEXT NOT NULL,
35
- parent_frame_id TEXT,
36
- depth INTEGER DEFAULT 0,
37
- type TEXT NOT NULL,
38
- name TEXT NOT NULL,
39
- state TEXT DEFAULT 'active',
40
- inputs JSONB DEFAULT '{}',
41
- outputs JSONB DEFAULT '{}',
42
- digest_text TEXT,
43
- digest_json JSONB DEFAULT '{}',
44
- created_at TIMESTAMP DEFAULT NOW(),
45
- closed_at TIMESTAMP
46
- );
47
-
48
- CREATE INDEX IF NOT EXISTS idx_frames_project ON frames(project_id);
49
- CREATE INDEX IF NOT EXISTS idx_frames_state ON frames(state);
50
- CREATE INDEX IF NOT EXISTS idx_frames_created ON frames(created_at);
51
- `);
52
- console.log('āœ… Frames table ready!\n');
53
-
54
- // Check existing frames
55
- const countResult = await pgClient.query('SELECT COUNT(*) as count FROM frames');
56
- console.log('šŸ“ˆ Existing frames:', countResult.rows[0].count);
57
-
58
- // Insert a test frame
59
- const testFrameId = `test-frame-${Date.now()}`;
60
- console.log('\nšŸ”§ Inserting test frame...');
61
- await pgClient.query(`
62
- INSERT INTO frames (
63
- frame_id, run_id, project_id, type, name, state,
64
- inputs, outputs, digest_text
65
- ) VALUES (
66
- $1, $2, $3, $4, $5, $6, $7, $8, $9
67
- )
68
- `, [
69
- testFrameId,
70
- 'test-run-001',
71
- 'stackmemory-test',
72
- 'test',
73
- 'Database Connection Test',
74
- 'active',
75
- JSON.stringify({ test: true, timestamp: new Date().toISOString() }),
76
- JSON.stringify({ success: true }),
77
- 'Test frame for Railway PostgreSQL connection'
78
- ]);
79
- console.log('āœ… Test frame inserted:', testFrameId);
80
-
81
- // Retrieve the test frame
82
- console.log('\nšŸ” Retrieving test frame...');
83
- const frameResult = await pgClient.query(
84
- 'SELECT * FROM frames WHERE frame_id = $1',
85
- [testFrameId]
86
- );
87
-
88
- if (frameResult.rows.length > 0) {
89
- const frame = frameResult.rows[0];
90
- console.log('āœ… Frame retrieved successfully!');
91
- console.log(' - Name:', frame.name);
92
- console.log(' - Type:', frame.type);
93
- console.log(' - State:', frame.state);
94
- console.log(' - Created:', frame.created_at);
95
- }
96
-
97
- // Get recent frames
98
- console.log('\nšŸ“‹ Recent frames:');
99
- const recentFrames = await pgClient.query(`
100
- SELECT frame_id, name, type, state, created_at
101
- FROM frames
102
- ORDER BY created_at DESC
103
- LIMIT 5
104
- `);
105
-
106
- if (recentFrames.rows.length > 0) {
107
- recentFrames.rows.forEach((frame, index) => {
108
- console.log(` ${index + 1}. ${frame.name} (${frame.type}) - ${frame.state}`);
109
- });
110
- } else {
111
- console.log(' No frames found');
112
- }
113
-
114
- // Check table information
115
- console.log('\nšŸ“Š Database tables:');
116
- const tables = await pgClient.query(`
117
- SELECT table_name
118
- FROM information_schema.tables
119
- WHERE table_schema = 'public'
120
- ORDER BY table_name
121
- `);
122
-
123
- tables.rows.forEach(row => {
124
- console.log(` - ${row.table_name}`);
125
- });
126
-
127
- } catch (error) {
128
- console.error('āŒ PostgreSQL Error:', error.message);
129
- if (error.message.includes('ENOTFOUND')) {
130
- console.log('\nšŸ’” Note: postgres.railway.internal only works from within Railway');
131
- console.log(' For local testing, you need the external DATABASE_URL');
132
- }
133
- } finally {
134
- await pgClient.end();
135
- console.log('\nšŸ”Œ PostgreSQL connection closed');
136
- }
137
- }
138
-
139
- async function testRedis() {
140
- console.log('\n\nšŸ”“ Testing Redis Connection...\n');
141
-
142
- // Try to build Redis URL from environment
143
- const REDIS_URL = process.env.REDIS_URL ||
144
- process.env.REDISHOST ? `redis://${process.env.REDISHOST}:${process.env.REDISPORT || 6379}` : null;
145
-
146
- if (!REDIS_URL) {
147
- console.log('āš ļø No Redis configuration found in environment');
148
- console.log(' Add REDIS_URL or REDISHOST to Railway variables');
149
- return;
150
- }
151
-
152
- const redisClient = createClient({ url: REDIS_URL });
153
-
154
- try {
155
- console.log('šŸ“” Connecting to Redis...');
156
- await redisClient.connect();
157
- console.log('āœ… Connected to Redis!\n');
158
-
159
- // Test basic operations
160
- console.log('šŸ”§ Testing Redis operations...');
161
-
162
- // Set a test key
163
- const testKey = `test:connection:${Date.now()}`;
164
- await redisClient.set(testKey, JSON.stringify({
165
- test: true,
166
- timestamp: new Date().toISOString(),
167
- message: 'Railway Redis connection test'
168
- }), { EX: 60 }); // Expire after 60 seconds
169
- console.log('āœ… Set test key:', testKey);
170
-
171
- // Get the test key
172
- const value = await redisClient.get(testKey);
173
- const parsed = JSON.parse(value);
174
- console.log('āœ… Retrieved value:', parsed);
175
-
176
- // Test Redis info
177
- const info = await redisClient.info('server');
178
- const version = info.match(/redis_version:(.+)/)?.[1];
179
- console.log('\nšŸ“Š Redis Server Info:');
180
- console.log(' - Version:', version);
181
-
182
- // Check memory usage
183
- const memoryInfo = await redisClient.info('memory');
184
- const usedMemory = memoryInfo.match(/used_memory_human:(.+)/)?.[1];
185
- console.log(' - Memory used:', usedMemory);
186
-
187
- // List all keys (be careful in production!)
188
- const keys = await redisClient.keys('*');
189
- console.log(' - Total keys:', keys.length);
190
-
191
- if (keys.length > 0 && keys.length <= 10) {
192
- console.log(' - Keys:', keys);
193
- }
194
-
195
- } catch (error) {
196
- console.error('āŒ Redis Error:', error.message);
197
- if (error.message.includes('ENOTFOUND')) {
198
- console.log('\nšŸ’” Note: Redis host not found');
199
- console.log(' Make sure Redis variables are configured in Railway');
200
- }
201
- } finally {
202
- await redisClient.disconnect();
203
- console.log('\nšŸ”Œ Redis connection closed');
204
- }
205
- }
206
-
207
- // Run tests
208
- async function runTests() {
209
- console.log('šŸš€ Railway Database Connection Tests\n');
210
- console.log('=' .repeat(50));
211
-
212
- await testPostgreSQL();
213
- await testRedis();
214
-
215
- console.log('\n' + '=' .repeat(50));
216
- console.log('āœ… Tests complete!\n');
217
- }
218
-
219
- runTests().catch(error => {
220
- console.error('Fatal error:', error);
221
- process.exit(1);
222
- });
@@ -1,137 +0,0 @@
1
- #!/usr/bin/env node
2
-
3
- /**
4
- * Validate Railway deployment and check all services
5
- */
6
-
7
- async function checkEndpoint(url, expectedKeys = []) {
8
- try {
9
- console.log(`\nšŸ“ Checking: ${url}`);
10
- const response = await fetch(url);
11
- const data = await response.json();
12
-
13
- console.log(` Status: ${response.status}`);
14
- console.log(` Response:`, JSON.stringify(data, null, 2));
15
-
16
- // Check for expected keys
17
- for (const key of expectedKeys) {
18
- if (key in data) {
19
- console.log(` āœ… Found key: ${key}`);
20
- } else {
21
- console.log(` āŒ Missing key: ${key}`);
22
- }
23
- }
24
-
25
- return { success: response.ok, data };
26
- } catch (error) {
27
- console.log(` āŒ Error: ${error.message}`);
28
- return { success: false, error: error.message };
29
- }
30
- }
31
-
32
- async function testAuth(baseUrl) {
33
- console.log('\nšŸ” Testing Authentication Endpoints:');
34
-
35
- // Test signup
36
- const signupData = {
37
- email: `test${Date.now()}@example.com`,
38
- password: 'TestPass123!',
39
- name: 'Test User'
40
- };
41
-
42
- console.log('\nšŸ“ Testing POST /auth/signup');
43
- try {
44
- const response = await fetch(`${baseUrl}/auth/signup`, {
45
- method: 'POST',
46
- headers: { 'Content-Type': 'application/json' },
47
- body: JSON.stringify(signupData)
48
- });
49
-
50
- const data = await response.json();
51
- console.log(` Status: ${response.status}`);
52
-
53
- if (response.ok) {
54
- console.log(' āœ… Signup endpoint works!');
55
- console.log(` Response:`, JSON.stringify(data, null, 2));
56
- } else {
57
- console.log(' āŒ Signup failed:', data.error || data.message);
58
- }
59
- } catch (error) {
60
- console.log(' āŒ Signup endpoint not available:', error.message);
61
- }
62
-
63
- // Test login
64
- console.log('\nšŸ”‘ Testing POST /auth/login');
65
- try {
66
- const response = await fetch(`${baseUrl}/auth/login`, {
67
- method: 'POST',
68
- headers: { 'Content-Type': 'application/json' },
69
- body: JSON.stringify({
70
- email: signupData.email,
71
- password: signupData.password
72
- })
73
- });
74
-
75
- const data = await response.json();
76
- console.log(` Status: ${response.status}`);
77
-
78
- if (response.ok) {
79
- console.log(' āœ… Login endpoint works!');
80
- console.log(` Token received:`, data.token ? 'Yes' : 'No');
81
- console.log(` API Key:`, data.apiKey ? 'Yes' : 'No');
82
- } else {
83
- console.log(' āŒ Login failed:', data.error || data.message);
84
- }
85
- } catch (error) {
86
- console.log(' āŒ Login endpoint not available:', error.message);
87
- }
88
- }
89
-
90
- async function main() {
91
- const baseUrl = process.argv[2] || 'https://stackmemory-production.up.railway.app';
92
-
93
- console.log('šŸš€ StackMemory Railway Deployment Validator');
94
- console.log('==========================================');
95
- console.log(`Base URL: ${baseUrl}`);
96
- console.log(`Timestamp: ${new Date().toISOString()}`);
97
-
98
- // Check root endpoint
99
- const root = await checkEndpoint(baseUrl, ['message', 'version', 'endpoints']);
100
-
101
- // Detect server type
102
- if (root.data?.message?.includes('Minimal')) {
103
- console.log('\nāš ļø WARNING: Minimal server is running!');
104
- console.log(' The full server with auth endpoints is not deployed.');
105
- } else if (root.data?.message?.includes('API Server')) {
106
- console.log('\nāœ… Full API server is running!');
107
- }
108
-
109
- // Check health
110
- await checkEndpoint(`${baseUrl}/health`, ['status']);
111
-
112
- // Check database connections
113
- const dbTest = await checkEndpoint(`${baseUrl}/test-db`, ['postgresql', 'redis']);
114
-
115
- if (dbTest.data) {
116
- console.log('\nšŸ“Š Database Status:');
117
- if (dbTest.data.postgresql?.status === 'connected') {
118
- console.log(' āœ… PostgreSQL: Connected');
119
- } else {
120
- console.log(' āŒ PostgreSQL:', dbTest.data.postgresql?.status || 'Not configured');
121
- }
122
-
123
- if (dbTest.data.redis?.status === 'connected') {
124
- console.log(' āœ… Redis: Connected');
125
- } else {
126
- console.log(' āŒ Redis:', dbTest.data.redis?.status || 'Not configured');
127
- }
128
- }
129
-
130
- // Test auth endpoints
131
- await testAuth(baseUrl);
132
-
133
- console.log('\n==========================================');
134
- console.log('Validation complete!');
135
- }
136
-
137
- main().catch(console.error);
@@ -1,35 +0,0 @@
1
- #!/usr/bin/env node
2
- /**
3
- * Verify Railway schema version; exits non-zero if below latest
4
- */
5
- import { Pool } from 'pg';
6
- import Database from 'better-sqlite3';
7
-
8
- function isPg(url: string): boolean {
9
- return url.startsWith('postgres://') || url.startsWith('postgresql://');
10
- }
11
-
12
- async function main() {
13
- const url = process.env.DATABASE_URL || '.stackmemory/railway.db';
14
- const latest = 3; // keep in sync with CLI
15
-
16
- if (isPg(url)) {
17
- const pool = new Pool({ connectionString: url });
18
- await pool.query('CREATE TABLE IF NOT EXISTS railway_schema_version (version INTEGER PRIMARY KEY, applied_at TIMESTAMPTZ DEFAULT NOW(), description TEXT)');
19
- const r = await pool.query('SELECT COALESCE(MAX(version), 0) AS v FROM railway_schema_version');
20
- const current = Number(r.rows[0]?.v || 0);
21
- console.log(JSON.stringify({ database: 'postgres', current, latest }));
22
- await pool.end();
23
- if (current < latest) process.exit(2);
24
- } else {
25
- const db = new Database(url);
26
- db.exec('CREATE TABLE IF NOT EXISTS railway_schema_version (version INTEGER PRIMARY KEY, applied_at DATETIME DEFAULT CURRENT_TIMESTAMP, description TEXT)');
27
- const row = db.prepare('SELECT COALESCE(MAX(version), 0) AS v FROM railway_schema_version').get() as any;
28
- const current = Number(row?.v || 0);
29
- console.log(JSON.stringify({ database: 'sqlite', current, latest }));
30
- if (current < latest) process.exit(2);
31
- }
32
- }
33
-
34
- main().catch((e) => { console.error(e); process.exit(1); });
35
-