@twelvehart/supermemory-runtime 1.0.0-next.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.env.example +57 -0
- package/README.md +374 -0
- package/dist/index.js +189 -0
- package/dist/mcp/index.js +1132 -0
- package/docker-compose.prod.yml +91 -0
- package/docker-compose.yml +358 -0
- package/drizzle/0000_dapper_the_professor.sql +159 -0
- package/drizzle/0001_api_keys.sql +51 -0
- package/drizzle/meta/0000_snapshot.json +1532 -0
- package/drizzle/meta/_journal.json +13 -0
- package/drizzle.config.ts +20 -0
- package/package.json +114 -0
- package/scripts/add-extraction-job.ts +122 -0
- package/scripts/benchmark-pgvector.ts +122 -0
- package/scripts/bootstrap.sh +209 -0
- package/scripts/check-runtime-pack.ts +111 -0
- package/scripts/claude-mcp-config.ts +336 -0
- package/scripts/docker-entrypoint.sh +183 -0
- package/scripts/doctor.ts +377 -0
- package/scripts/init-db.sql +33 -0
- package/scripts/install.sh +1110 -0
- package/scripts/mcp-setup.ts +271 -0
- package/scripts/migrations/001_create_pgvector_extension.sql +31 -0
- package/scripts/migrations/002_create_memory_embeddings_table.sql +75 -0
- package/scripts/migrations/003_create_hnsw_index.sql +94 -0
- package/scripts/migrations/004_create_memory_embeddings_standalone.sql +70 -0
- package/scripts/migrations/005_create_chunks_table.sql +95 -0
- package/scripts/migrations/006_create_processing_queue.sql +45 -0
- package/scripts/migrations/generate_test_data.sql +42 -0
- package/scripts/migrations/phase1_comprehensive_test.sql +204 -0
- package/scripts/migrations/run_migrations.sh +286 -0
- package/scripts/migrations/test_hnsw_index.sql +255 -0
- package/scripts/pre-commit-secrets +282 -0
- package/scripts/run-extraction-worker.ts +46 -0
- package/scripts/run-phase1-tests.sh +291 -0
- package/scripts/setup.ts +222 -0
- package/scripts/smoke-install.sh +12 -0
- package/scripts/test-health-endpoint.sh +328 -0
- package/src/api/index.ts +2 -0
- package/src/api/middleware/auth.ts +80 -0
- package/src/api/middleware/csrf.ts +308 -0
- package/src/api/middleware/errorHandler.ts +166 -0
- package/src/api/middleware/rateLimit.ts +360 -0
- package/src/api/middleware/validation.ts +514 -0
- package/src/api/routes/documents.ts +286 -0
- package/src/api/routes/profiles.ts +237 -0
- package/src/api/routes/search.ts +71 -0
- package/src/api/stores/index.ts +58 -0
- package/src/config/bootstrap-env.ts +3 -0
- package/src/config/env.ts +71 -0
- package/src/config/feature-flags.ts +25 -0
- package/src/config/index.ts +140 -0
- package/src/config/secrets.config.ts +291 -0
- package/src/db/client.ts +92 -0
- package/src/db/index.ts +73 -0
- package/src/db/postgres.ts +72 -0
- package/src/db/schema/chunks.schema.ts +31 -0
- package/src/db/schema/containers.schema.ts +46 -0
- package/src/db/schema/documents.schema.ts +49 -0
- package/src/db/schema/embeddings.schema.ts +32 -0
- package/src/db/schema/index.ts +11 -0
- package/src/db/schema/memories.schema.ts +72 -0
- package/src/db/schema/profiles.schema.ts +34 -0
- package/src/db/schema/queue.schema.ts +59 -0
- package/src/db/schema/relationships.schema.ts +42 -0
- package/src/db/schema.ts +223 -0
- package/src/db/worker-connection.ts +47 -0
- package/src/index.ts +235 -0
- package/src/mcp/CLAUDE.md +1 -0
- package/src/mcp/index.ts +1380 -0
- package/src/mcp/legacyState.ts +22 -0
- package/src/mcp/rateLimit.ts +358 -0
- package/src/mcp/resources.ts +309 -0
- package/src/mcp/results.ts +104 -0
- package/src/mcp/tools.ts +401 -0
- package/src/queues/config.ts +119 -0
- package/src/queues/index.ts +289 -0
- package/src/sdk/client.ts +225 -0
- package/src/sdk/errors.ts +266 -0
- package/src/sdk/http.ts +560 -0
- package/src/sdk/index.ts +244 -0
- package/src/sdk/resources/base.ts +65 -0
- package/src/sdk/resources/connections.ts +204 -0
- package/src/sdk/resources/documents.ts +163 -0
- package/src/sdk/resources/index.ts +10 -0
- package/src/sdk/resources/memories.ts +150 -0
- package/src/sdk/resources/search.ts +60 -0
- package/src/sdk/resources/settings.ts +36 -0
- package/src/sdk/types.ts +674 -0
- package/src/services/chunking/index.ts +451 -0
- package/src/services/chunking.service.ts +650 -0
- package/src/services/csrf.service.ts +252 -0
- package/src/services/documents.repository.ts +219 -0
- package/src/services/documents.service.ts +191 -0
- package/src/services/embedding.service.ts +404 -0
- package/src/services/extraction.service.ts +300 -0
- package/src/services/extractors/code.extractor.ts +451 -0
- package/src/services/extractors/index.ts +9 -0
- package/src/services/extractors/markdown.extractor.ts +461 -0
- package/src/services/extractors/pdf.extractor.ts +315 -0
- package/src/services/extractors/text.extractor.ts +118 -0
- package/src/services/extractors/url.extractor.ts +243 -0
- package/src/services/index.ts +235 -0
- package/src/services/ingestion.service.ts +177 -0
- package/src/services/llm/anthropic.ts +400 -0
- package/src/services/llm/base.ts +460 -0
- package/src/services/llm/contradiction-detector.service.ts +526 -0
- package/src/services/llm/heuristics.ts +148 -0
- package/src/services/llm/index.ts +309 -0
- package/src/services/llm/memory-classifier.service.ts +383 -0
- package/src/services/llm/memory-extension-detector.service.ts +523 -0
- package/src/services/llm/mock.ts +470 -0
- package/src/services/llm/openai.ts +398 -0
- package/src/services/llm/prompts.ts +438 -0
- package/src/services/llm/types.ts +373 -0
- package/src/services/memory.repository.ts +1769 -0
- package/src/services/memory.service.ts +1338 -0
- package/src/services/memory.types.ts +234 -0
- package/src/services/persistence/index.ts +295 -0
- package/src/services/pipeline.service.ts +509 -0
- package/src/services/profile.repository.ts +436 -0
- package/src/services/profile.service.ts +560 -0
- package/src/services/profile.types.ts +270 -0
- package/src/services/relationships/detector.ts +1128 -0
- package/src/services/relationships/index.ts +268 -0
- package/src/services/relationships/memory-integration.ts +459 -0
- package/src/services/relationships/strategies.ts +132 -0
- package/src/services/relationships/types.ts +370 -0
- package/src/services/search.service.ts +761 -0
- package/src/services/search.types.ts +220 -0
- package/src/services/secrets.service.ts +384 -0
- package/src/services/vectorstore/base.ts +327 -0
- package/src/services/vectorstore/index.ts +444 -0
- package/src/services/vectorstore/memory.ts +286 -0
- package/src/services/vectorstore/migration.ts +295 -0
- package/src/services/vectorstore/mock.ts +403 -0
- package/src/services/vectorstore/pgvector.ts +695 -0
- package/src/services/vectorstore/types.ts +247 -0
- package/src/startup.ts +389 -0
- package/src/types/api.types.ts +193 -0
- package/src/types/document.types.ts +103 -0
- package/src/types/index.ts +241 -0
- package/src/types/profile.base.ts +133 -0
- package/src/utils/errors.ts +447 -0
- package/src/utils/id.ts +15 -0
- package/src/utils/index.ts +101 -0
- package/src/utils/logger.ts +313 -0
- package/src/utils/sanitization.ts +501 -0
- package/src/utils/secret-validation.ts +273 -0
- package/src/utils/synonyms.ts +188 -0
- package/src/utils/validation.ts +581 -0
- package/src/workers/chunking.worker.ts +242 -0
- package/src/workers/embedding.worker.ts +358 -0
- package/src/workers/extraction.worker.ts +346 -0
- package/src/workers/indexing.worker.ts +505 -0
- package/tsconfig.json +38 -0
|
@@ -0,0 +1,291 @@
|
|
|
1
|
+
#!/bin/bash
|
|
2
|
+
# Phase 1 Database Triggers & Functions Test Runner
|
|
3
|
+
# TASK-003 Test Execution Script
|
|
4
|
+
# Created: 2026-02-02
|
|
5
|
+
|
|
6
|
+
set -e # Exit on error
|
|
7
|
+
|
|
8
|
+
# Color codes for output
|
|
9
|
+
RED='\033[0;31m'
|
|
10
|
+
GREEN='\033[0;32m'
|
|
11
|
+
YELLOW='\033[1;33m'
|
|
12
|
+
BLUE='\033[0;34m'
|
|
13
|
+
NC='\033[0m' # No Color
|
|
14
|
+
|
|
15
|
+
# Configuration
|
|
16
|
+
DB_NAME="${DB_NAME:-supermemory_test}"
|
|
17
|
+
DB_USER="${DB_USER:-postgres}"
|
|
18
|
+
DB_HOST="${DB_HOST:-localhost}"
|
|
19
|
+
DB_PORT="${DB_PORT:-5432}"
|
|
20
|
+
TEST_FILE="tests/database/phase1-triggers-functions.test.sql"
|
|
21
|
+
LOG_FILE="test_output.log"
|
|
22
|
+
|
|
23
|
+
# Function to print colored output
|
|
24
|
+
print_header() {
|
|
25
|
+
echo -e "${BLUE}========================================${NC}"
|
|
26
|
+
echo -e "${BLUE}$1${NC}"
|
|
27
|
+
echo -e "${BLUE}========================================${NC}"
|
|
28
|
+
}
|
|
29
|
+
|
|
30
|
+
print_success() {
|
|
31
|
+
echo -e "${GREEN}✅ $1${NC}"
|
|
32
|
+
}
|
|
33
|
+
|
|
34
|
+
print_error() {
|
|
35
|
+
echo -e "${RED}❌ $1${NC}"
|
|
36
|
+
}
|
|
37
|
+
|
|
38
|
+
print_warning() {
|
|
39
|
+
echo -e "${YELLOW}⚠️ $1${NC}"
|
|
40
|
+
}
|
|
41
|
+
|
|
42
|
+
print_info() {
|
|
43
|
+
echo -e "${BLUE}ℹ️ $1${NC}"
|
|
44
|
+
}
|
|
45
|
+
|
|
46
|
+
# Function to check if PostgreSQL is running
|
|
47
|
+
check_postgres() {
|
|
48
|
+
print_header "Checking PostgreSQL Connection"
|
|
49
|
+
|
|
50
|
+
if ! command -v psql &> /dev/null; then
|
|
51
|
+
print_error "psql not found. Please install PostgreSQL client."
|
|
52
|
+
exit 1
|
|
53
|
+
fi
|
|
54
|
+
|
|
55
|
+
if ! psql -h $DB_HOST -p $DB_PORT -U $DB_USER -lqt &> /dev/null; then
|
|
56
|
+
print_error "Cannot connect to PostgreSQL at $DB_HOST:$DB_PORT"
|
|
57
|
+
print_info "Make sure PostgreSQL is running and credentials are correct"
|
|
58
|
+
exit 1
|
|
59
|
+
fi
|
|
60
|
+
|
|
61
|
+
print_success "PostgreSQL connection verified"
|
|
62
|
+
}
|
|
63
|
+
|
|
64
|
+
# Function to check if pgvector is installed
|
|
65
|
+
check_pgvector() {
|
|
66
|
+
print_header "Checking pgvector Extension"
|
|
67
|
+
|
|
68
|
+
# Check if extension is available
|
|
69
|
+
if ! psql -h $DB_HOST -p $DB_PORT -U $DB_USER -d postgres -tAc "SELECT 1 FROM pg_available_extensions WHERE name='vector'" | grep -q 1; then
|
|
70
|
+
print_error "pgvector extension not available"
|
|
71
|
+
print_info "Install pgvector: https://github.com/pgvector/pgvector"
|
|
72
|
+
exit 1
|
|
73
|
+
fi
|
|
74
|
+
|
|
75
|
+
print_success "pgvector extension available"
|
|
76
|
+
}
|
|
77
|
+
|
|
78
|
+
# Function to create test database
|
|
79
|
+
create_test_db() {
|
|
80
|
+
print_header "Setting Up Test Database"
|
|
81
|
+
|
|
82
|
+
# Drop existing test database if it exists
|
|
83
|
+
if psql -h $DB_HOST -p $DB_PORT -U $DB_USER -lqt | cut -d \| -f 1 | grep -qw $DB_NAME; then
|
|
84
|
+
print_warning "Test database '$DB_NAME' already exists. Dropping it..."
|
|
85
|
+
dropdb -h $DB_HOST -p $DB_PORT -U $DB_USER $DB_NAME
|
|
86
|
+
fi
|
|
87
|
+
|
|
88
|
+
# Create fresh test database
|
|
89
|
+
print_info "Creating test database '$DB_NAME'..."
|
|
90
|
+
createdb -h $DB_HOST -p $DB_PORT -U $DB_USER $DB_NAME
|
|
91
|
+
|
|
92
|
+
# Enable pgvector extension
|
|
93
|
+
print_info "Enabling pgvector extension..."
|
|
94
|
+
psql -h $DB_HOST -p $DB_PORT -U $DB_USER -d $DB_NAME -c "CREATE EXTENSION vector;" > /dev/null
|
|
95
|
+
|
|
96
|
+
print_success "Test database created and configured"
|
|
97
|
+
}
|
|
98
|
+
|
|
99
|
+
# Function to run migrations
|
|
100
|
+
run_migrations() {
|
|
101
|
+
print_header "Running Database Migrations"
|
|
102
|
+
|
|
103
|
+
MIGRATION_DIR="scripts/migrations"
|
|
104
|
+
|
|
105
|
+
if [ ! -d "$MIGRATION_DIR" ]; then
|
|
106
|
+
print_warning "Migration directory not found. Skipping migrations."
|
|
107
|
+
return
|
|
108
|
+
fi
|
|
109
|
+
|
|
110
|
+
# Run migrations in order
|
|
111
|
+
for migration in $(ls $MIGRATION_DIR/*.sql 2>/dev/null | sort); do
|
|
112
|
+
# Skip test files
|
|
113
|
+
if [[ $migration == *"test_"* ]]; then
|
|
114
|
+
continue
|
|
115
|
+
fi
|
|
116
|
+
|
|
117
|
+
print_info "Running migration: $(basename $migration)"
|
|
118
|
+
psql -h $DB_HOST -p $DB_PORT -U $DB_USER -d $DB_NAME -f "$migration" > /dev/null 2>&1 || {
|
|
119
|
+
print_warning "Migration $(basename $migration) failed or already applied"
|
|
120
|
+
}
|
|
121
|
+
done
|
|
122
|
+
|
|
123
|
+
print_success "Migrations completed"
|
|
124
|
+
}
|
|
125
|
+
|
|
126
|
+
# Function to run test suite
|
|
127
|
+
run_tests() {
|
|
128
|
+
print_header "Running Phase 1 Test Suite"
|
|
129
|
+
|
|
130
|
+
if [ ! -f "$TEST_FILE" ]; then
|
|
131
|
+
print_error "Test file not found: $TEST_FILE"
|
|
132
|
+
exit 1
|
|
133
|
+
fi
|
|
134
|
+
|
|
135
|
+
print_info "Executing tests from $TEST_FILE"
|
|
136
|
+
print_info "Output will be saved to $LOG_FILE"
|
|
137
|
+
|
|
138
|
+
# Run tests and capture output
|
|
139
|
+
if psql -h $DB_HOST -p $DB_PORT -U $DB_USER -d $DB_NAME -f "$TEST_FILE" > "$LOG_FILE" 2>&1; then
|
|
140
|
+
print_success "Test execution completed"
|
|
141
|
+
else
|
|
142
|
+
print_error "Test execution failed"
|
|
143
|
+
return 1
|
|
144
|
+
fi
|
|
145
|
+
}
|
|
146
|
+
|
|
147
|
+
# Function to analyze test results
|
|
148
|
+
analyze_results() {
|
|
149
|
+
print_header "Analyzing Test Results"
|
|
150
|
+
|
|
151
|
+
if [ ! -f "$LOG_FILE" ]; then
|
|
152
|
+
print_error "Log file not found: $LOG_FILE"
|
|
153
|
+
exit 1
|
|
154
|
+
fi
|
|
155
|
+
|
|
156
|
+
# Count test results
|
|
157
|
+
PASSED=$(grep -c "TEST PASSED" "$LOG_FILE" 2>/dev/null || echo 0)
|
|
158
|
+
FAILED=$(grep -c "TEST FAILED" "$LOG_FILE" 2>/dev/null || echo 0)
|
|
159
|
+
WARNINGS=$(grep -c "PERFORMANCE WARNING" "$LOG_FILE" 2>/dev/null || echo 0)
|
|
160
|
+
|
|
161
|
+
echo ""
|
|
162
|
+
echo "Test Summary:"
|
|
163
|
+
echo "============="
|
|
164
|
+
print_success "Passed: $PASSED"
|
|
165
|
+
|
|
166
|
+
if [ $FAILED -gt 0 ]; then
|
|
167
|
+
print_error "Failed: $FAILED"
|
|
168
|
+
else
|
|
169
|
+
print_info "Failed: $FAILED"
|
|
170
|
+
fi
|
|
171
|
+
|
|
172
|
+
if [ $WARNINGS -gt 0 ]; then
|
|
173
|
+
print_warning "Warnings: $WARNINGS"
|
|
174
|
+
fi
|
|
175
|
+
|
|
176
|
+
# Show performance metrics
|
|
177
|
+
echo ""
|
|
178
|
+
echo "Performance Metrics:"
|
|
179
|
+
echo "==================="
|
|
180
|
+
grep "PERFORMANCE:" "$LOG_FILE" | while read line; do
|
|
181
|
+
print_info "$line"
|
|
182
|
+
done
|
|
183
|
+
|
|
184
|
+
# Show failures if any
|
|
185
|
+
if [ $FAILED -gt 0 ]; then
|
|
186
|
+
echo ""
|
|
187
|
+
print_error "Failed Tests:"
|
|
188
|
+
grep "TEST FAILED" "$LOG_FILE"
|
|
189
|
+
return 1
|
|
190
|
+
fi
|
|
191
|
+
|
|
192
|
+
echo ""
|
|
193
|
+
print_success "All tests passed! 🎉"
|
|
194
|
+
return 0
|
|
195
|
+
}
|
|
196
|
+
|
|
197
|
+
# Function to show detailed logs
|
|
198
|
+
show_logs() {
|
|
199
|
+
print_header "Detailed Test Output"
|
|
200
|
+
cat "$LOG_FILE"
|
|
201
|
+
}
|
|
202
|
+
|
|
203
|
+
# Function to cleanup
|
|
204
|
+
cleanup() {
|
|
205
|
+
print_header "Cleanup"
|
|
206
|
+
|
|
207
|
+
if [ "$KEEP_DB" = "true" ]; then
|
|
208
|
+
print_info "Keeping test database (KEEP_DB=true)"
|
|
209
|
+
print_info "To clean up later: dropdb $DB_NAME"
|
|
210
|
+
else
|
|
211
|
+
print_info "Dropping test database..."
|
|
212
|
+
dropdb -h $DB_HOST -p $DB_PORT -U $DB_USER $DB_NAME 2>/dev/null || true
|
|
213
|
+
print_success "Test database dropped"
|
|
214
|
+
fi
|
|
215
|
+
|
|
216
|
+
if [ -f "$LOG_FILE" ]; then
|
|
217
|
+
print_info "Test log saved to: $LOG_FILE"
|
|
218
|
+
fi
|
|
219
|
+
}
|
|
220
|
+
|
|
221
|
+
# Main execution
|
|
222
|
+
main() {
|
|
223
|
+
print_header "Phase 1 Database Test Suite"
|
|
224
|
+
echo "Test Database: $DB_NAME"
|
|
225
|
+
echo "PostgreSQL: $DB_USER@$DB_HOST:$DB_PORT"
|
|
226
|
+
echo ""
|
|
227
|
+
|
|
228
|
+
# Run all steps
|
|
229
|
+
check_postgres
|
|
230
|
+
check_pgvector
|
|
231
|
+
create_test_db
|
|
232
|
+
run_migrations
|
|
233
|
+
run_tests
|
|
234
|
+
|
|
235
|
+
# Analyze results
|
|
236
|
+
if analyze_results; then
|
|
237
|
+
EXIT_CODE=0
|
|
238
|
+
else
|
|
239
|
+
EXIT_CODE=1
|
|
240
|
+
fi
|
|
241
|
+
|
|
242
|
+
# Optional: show detailed logs
|
|
243
|
+
if [ "$VERBOSE" = "true" ]; then
|
|
244
|
+
show_logs
|
|
245
|
+
fi
|
|
246
|
+
|
|
247
|
+
# Cleanup
|
|
248
|
+
cleanup
|
|
249
|
+
|
|
250
|
+
exit $EXIT_CODE
|
|
251
|
+
}
|
|
252
|
+
|
|
253
|
+
# Parse command line arguments
|
|
254
|
+
while [[ $# -gt 0 ]]; do
|
|
255
|
+
case $1 in
|
|
256
|
+
--keep-db)
|
|
257
|
+
KEEP_DB=true
|
|
258
|
+
shift
|
|
259
|
+
;;
|
|
260
|
+
--verbose)
|
|
261
|
+
VERBOSE=true
|
|
262
|
+
shift
|
|
263
|
+
;;
|
|
264
|
+
--help)
|
|
265
|
+
echo "Usage: $0 [OPTIONS]"
|
|
266
|
+
echo ""
|
|
267
|
+
echo "Options:"
|
|
268
|
+
echo " --keep-db Keep test database after tests complete"
|
|
269
|
+
echo " --verbose Show detailed test output"
|
|
270
|
+
echo " --help Show this help message"
|
|
271
|
+
echo ""
|
|
272
|
+
echo "Environment Variables:"
|
|
273
|
+
echo " DB_NAME Test database name (default: supermemory_test)"
|
|
274
|
+
echo " DB_USER PostgreSQL user (default: postgres)"
|
|
275
|
+
echo " DB_HOST PostgreSQL host (default: localhost)"
|
|
276
|
+
echo " DB_PORT PostgreSQL port (default: 5432)"
|
|
277
|
+
echo ""
|
|
278
|
+
echo "Example:"
|
|
279
|
+
echo " DB_NAME=mytest $0 --keep-db --verbose"
|
|
280
|
+
exit 0
|
|
281
|
+
;;
|
|
282
|
+
*)
|
|
283
|
+
print_error "Unknown option: $1"
|
|
284
|
+
echo "Use --help for usage information"
|
|
285
|
+
exit 1
|
|
286
|
+
;;
|
|
287
|
+
esac
|
|
288
|
+
done
|
|
289
|
+
|
|
290
|
+
# Run main
|
|
291
|
+
main
|
package/scripts/setup.ts
ADDED
|
@@ -0,0 +1,222 @@
|
|
|
1
|
+
#!/usr/bin/env tsx
|
|
2
|
+
import { createInterface } from 'node:readline/promises';
|
|
3
|
+
import { randomBytes } from 'node:crypto';
|
|
4
|
+
import { existsSync } from 'node:fs';
|
|
5
|
+
import { readFile, rename, writeFile } from 'node:fs/promises';
|
|
6
|
+
import { dirname } from 'node:path';
|
|
7
|
+
import { mkdir } from 'node:fs/promises';
|
|
8
|
+
import { resolveEnvFile } from '../src/config/env.js';
|
|
9
|
+
|
|
10
|
+
type Answers = {
|
|
11
|
+
databaseUrl: string;
|
|
12
|
+
authEnabled: boolean;
|
|
13
|
+
authToken: string;
|
|
14
|
+
redisUrl: string;
|
|
15
|
+
llmProvider: '' | 'openai' | 'anthropic';
|
|
16
|
+
openaiApiKey: string;
|
|
17
|
+
anthropicApiKey: string;
|
|
18
|
+
};
|
|
19
|
+
|
|
20
|
+
const DEFAULT_DATABASE_URL = 'postgresql://supermemory:supermemory_secret@localhost:15432/supermemory';
|
|
21
|
+
const DEFAULT_REDIS_URL = 'redis://localhost:16379';
|
|
22
|
+
|
|
23
|
+
function parseEnv(raw: string): Record<string, string> {
|
|
24
|
+
const env: Record<string, string> = {};
|
|
25
|
+
for (const line of raw.split('\n')) {
|
|
26
|
+
const trimmed = line.trim();
|
|
27
|
+
if (!trimmed || trimmed.startsWith('#')) continue;
|
|
28
|
+
const idx = trimmed.indexOf('=');
|
|
29
|
+
if (idx <= 0) continue;
|
|
30
|
+
const key = trimmed.slice(0, idx).trim();
|
|
31
|
+
const value = trimmed.slice(idx + 1).trim();
|
|
32
|
+
env[key] = value;
|
|
33
|
+
}
|
|
34
|
+
return env;
|
|
35
|
+
}
|
|
36
|
+
|
|
37
|
+
function parseArgs(): { envFile?: string } {
|
|
38
|
+
const args = process.argv.slice(2);
|
|
39
|
+
let envFile: string | undefined;
|
|
40
|
+
|
|
41
|
+
for (let index = 0; index < args.length; index += 1) {
|
|
42
|
+
const arg = args[index];
|
|
43
|
+
|
|
44
|
+
if (arg === '--env-file') {
|
|
45
|
+
const value = args[index + 1];
|
|
46
|
+
if (!value) {
|
|
47
|
+
throw new Error('--env-file requires a value');
|
|
48
|
+
}
|
|
49
|
+
envFile = value;
|
|
50
|
+
index += 1;
|
|
51
|
+
continue;
|
|
52
|
+
}
|
|
53
|
+
|
|
54
|
+
if (arg.startsWith('--env-file=')) {
|
|
55
|
+
envFile = arg.slice('--env-file='.length);
|
|
56
|
+
continue;
|
|
57
|
+
}
|
|
58
|
+
|
|
59
|
+
throw new Error(`Unknown argument: ${arg}`);
|
|
60
|
+
}
|
|
61
|
+
|
|
62
|
+
return { envFile };
|
|
63
|
+
}
|
|
64
|
+
|
|
65
|
+
async function questionWithDefault(
|
|
66
|
+
rl: ReturnType<typeof createInterface>,
|
|
67
|
+
prompt: string,
|
|
68
|
+
fallback: string
|
|
69
|
+
): Promise<string> {
|
|
70
|
+
const answer = (await rl.question(`${prompt} [${fallback}]: `)).trim();
|
|
71
|
+
return answer || fallback;
|
|
72
|
+
}
|
|
73
|
+
|
|
74
|
+
async function askYesNo(
|
|
75
|
+
rl: ReturnType<typeof createInterface>,
|
|
76
|
+
prompt: string,
|
|
77
|
+
fallback: boolean
|
|
78
|
+
): Promise<boolean> {
|
|
79
|
+
const label = fallback ? 'Y/n' : 'y/N';
|
|
80
|
+
const answer = (await rl.question(`${prompt} (${label}): `)).trim().toLowerCase();
|
|
81
|
+
if (!answer) return fallback;
|
|
82
|
+
return answer === 'y' || answer === 'yes';
|
|
83
|
+
}
|
|
84
|
+
|
|
85
|
+
async function run(): Promise<void> {
|
|
86
|
+
const { envFile } = parseArgs();
|
|
87
|
+
const envResolution = resolveEnvFile({ cliEnvFile: envFile });
|
|
88
|
+
const existingEnv = envResolution.exists ? parseEnv(await readFile(envResolution.path, 'utf-8')) : {};
|
|
89
|
+
|
|
90
|
+
const rl = createInterface({
|
|
91
|
+
input: process.stdin,
|
|
92
|
+
output: process.stdout,
|
|
93
|
+
});
|
|
94
|
+
|
|
95
|
+
console.log('\nSupermemory setup configurator\n');
|
|
96
|
+
console.log('Press Enter to accept defaults.\n');
|
|
97
|
+
|
|
98
|
+
const useDefaultDb = await askYesNo(
|
|
99
|
+
rl,
|
|
100
|
+
'Use local Docker/Postgres default connection string?',
|
|
101
|
+
true
|
|
102
|
+
);
|
|
103
|
+
const databaseUrl = useDefaultDb
|
|
104
|
+
? DEFAULT_DATABASE_URL
|
|
105
|
+
: await questionWithDefault(
|
|
106
|
+
rl,
|
|
107
|
+
'DATABASE_URL',
|
|
108
|
+
existingEnv.DATABASE_URL || DEFAULT_DATABASE_URL
|
|
109
|
+
);
|
|
110
|
+
|
|
111
|
+
const authEnabled = await askYesNo(
|
|
112
|
+
rl,
|
|
113
|
+
'Enable bearer-token auth for REST API?',
|
|
114
|
+
(existingEnv.AUTH_ENABLED || 'false') === 'true'
|
|
115
|
+
);
|
|
116
|
+
const authToken = authEnabled
|
|
117
|
+
? await questionWithDefault(
|
|
118
|
+
rl,
|
|
119
|
+
'AUTH_TOKEN',
|
|
120
|
+
existingEnv.AUTH_TOKEN || randomBytes(24).toString('base64url')
|
|
121
|
+
)
|
|
122
|
+
: '';
|
|
123
|
+
|
|
124
|
+
const useRedis = await askYesNo(
|
|
125
|
+
rl,
|
|
126
|
+
'Use Redis queue workers (recommended for async extraction)?',
|
|
127
|
+
true
|
|
128
|
+
);
|
|
129
|
+
const redisUrl = useRedis
|
|
130
|
+
? await questionWithDefault(rl, 'REDIS_URL', existingEnv.REDIS_URL || DEFAULT_REDIS_URL)
|
|
131
|
+
: '';
|
|
132
|
+
|
|
133
|
+
const llmChoice = await questionWithDefault(
|
|
134
|
+
rl,
|
|
135
|
+
'LLM provider (none/openai/anthropic)',
|
|
136
|
+
existingEnv.LLM_PROVIDER || 'none'
|
|
137
|
+
);
|
|
138
|
+
const llmProvider: '' | 'openai' | 'anthropic' =
|
|
139
|
+
llmChoice === 'openai' || llmChoice === 'anthropic' ? llmChoice : '';
|
|
140
|
+
|
|
141
|
+
const openaiApiKey =
|
|
142
|
+
llmProvider === 'openai'
|
|
143
|
+
? await questionWithDefault(rl, 'OPENAI_API_KEY', existingEnv.OPENAI_API_KEY || '')
|
|
144
|
+
: existingEnv.OPENAI_API_KEY || '';
|
|
145
|
+
const anthropicApiKey =
|
|
146
|
+
llmProvider === 'anthropic'
|
|
147
|
+
? await questionWithDefault(rl, 'ANTHROPIC_API_KEY', existingEnv.ANTHROPIC_API_KEY || '')
|
|
148
|
+
: existingEnv.ANTHROPIC_API_KEY || '';
|
|
149
|
+
|
|
150
|
+
rl.close();
|
|
151
|
+
|
|
152
|
+
const answers: Answers = {
|
|
153
|
+
databaseUrl,
|
|
154
|
+
authEnabled,
|
|
155
|
+
authToken,
|
|
156
|
+
redisUrl,
|
|
157
|
+
llmProvider,
|
|
158
|
+
openaiApiKey,
|
|
159
|
+
anthropicApiKey,
|
|
160
|
+
};
|
|
161
|
+
|
|
162
|
+
if (envResolution.exists) {
|
|
163
|
+
const backupPath = `${envResolution.path}.backup-${Date.now()}`;
|
|
164
|
+
await rename(envResolution.path, backupPath);
|
|
165
|
+
console.log(`Backed up existing env file to ${backupPath}`);
|
|
166
|
+
}
|
|
167
|
+
|
|
168
|
+
const envText = `# Generated by scripts/setup.ts
|
|
169
|
+
DATABASE_URL=${answers.databaseUrl}
|
|
170
|
+
API_HOST=localhost
|
|
171
|
+
API_PORT=13000
|
|
172
|
+
API_HOST_PORT=13000
|
|
173
|
+
|
|
174
|
+
# Optional REST API auth
|
|
175
|
+
AUTH_ENABLED=${answers.authEnabled ? 'true' : 'false'}
|
|
176
|
+
AUTH_TOKEN=${answers.authToken}
|
|
177
|
+
CSRF_SECRET=${randomBytes(48).toString('base64url')}
|
|
178
|
+
|
|
179
|
+
# Optional queue workers (if unset, API falls back to inline processing)
|
|
180
|
+
REDIS_URL=${answers.redisUrl}
|
|
181
|
+
POSTGRES_HOST_PORT=15432
|
|
182
|
+
REDIS_HOST_PORT=16379
|
|
183
|
+
BULLMQ_CONCURRENCY_EXTRACTION=5
|
|
184
|
+
BULLMQ_CONCURRENCY_CHUNKING=3
|
|
185
|
+
BULLMQ_CONCURRENCY_EMBEDDING=2
|
|
186
|
+
BULLMQ_CONCURRENCY_INDEXING=1
|
|
187
|
+
|
|
188
|
+
# Optional LLM extraction provider
|
|
189
|
+
LLM_PROVIDER=${answers.llmProvider}
|
|
190
|
+
OPENAI_API_KEY=${answers.openaiApiKey}
|
|
191
|
+
ANTHROPIC_API_KEY=${answers.anthropicApiKey}
|
|
192
|
+
|
|
193
|
+
LLM_MODEL=
|
|
194
|
+
LLM_MAX_TOKENS=2000
|
|
195
|
+
LLM_TEMPERATURE=0.1
|
|
196
|
+
LLM_TIMEOUT_MS=30000
|
|
197
|
+
LLM_MAX_RETRIES=3
|
|
198
|
+
LLM_CACHE_ENABLED=true
|
|
199
|
+
LLM_CACHE_TTL_MS=900000
|
|
200
|
+
|
|
201
|
+
EMBEDDING_MODEL=text-embedding-3-small
|
|
202
|
+
EMBEDDING_DIMENSIONS=1536
|
|
203
|
+
RATE_LIMIT_REQUESTS=100
|
|
204
|
+
RATE_LIMIT_WINDOW_MS=60000
|
|
205
|
+
LOG_LEVEL=info
|
|
206
|
+
`;
|
|
207
|
+
|
|
208
|
+
await mkdir(dirname(envResolution.path), { recursive: true });
|
|
209
|
+
await writeFile(envResolution.path, envText, 'utf-8');
|
|
210
|
+
|
|
211
|
+
console.log(`\nWrote env file successfully: ${envResolution.path}\n`);
|
|
212
|
+
console.log('Next steps:');
|
|
213
|
+
console.log('1) Start dependencies: docker compose up -d postgres redis');
|
|
214
|
+
console.log('2) Run migrations: ./scripts/migrations/run_migrations.sh');
|
|
215
|
+
console.log(`3) Validate config: npm run doctor -- --env-file ${envResolution.path}`);
|
|
216
|
+
console.log('4) Start API: npm run dev');
|
|
217
|
+
}
|
|
218
|
+
|
|
219
|
+
run().catch((error) => {
|
|
220
|
+
console.error('Setup failed:', error);
|
|
221
|
+
process.exit(1);
|
|
222
|
+
});
|
|
@@ -0,0 +1,12 @@
|
|
|
1
|
+
#!/usr/bin/env bash
|
|
2
|
+
set -euo pipefail
|
|
3
|
+
|
|
4
|
+
TARGET_DIR="${1:-./supermemory-smoke}"
|
|
5
|
+
INSTALLER_VERSION="${SUPERMEMORY_INSTALLER_VERSION:-latest}"
|
|
6
|
+
RUNTIME_VERSION="${SUPERMEMORY_RUNTIME_VERSION:-latest}"
|
|
7
|
+
|
|
8
|
+
npx -y "@twelvehart/supermemory@${INSTALLER_VERSION}" full \
|
|
9
|
+
--dir "$TARGET_DIR" \
|
|
10
|
+
--mcp project \
|
|
11
|
+
--runtime-version "$RUNTIME_VERSION" \
|
|
12
|
+
--skip-api-keys
|