@stackmemoryai/stackmemory 0.3.18 → 0.3.20

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,146 @@
1
+ import inquirer from "inquirer";
2
+ import chalk from "chalk";
3
+ import { homedir } from "os";
4
+ import { join } from "path";
5
+ import { existsSync, mkdirSync, writeFileSync, readFileSync } from "fs";
6
+ import open from "open";
7
+ function registerLoginCommand(program) {
8
+ program.command("login").description("Login to hosted StackMemory service").option("--api-url <url>", "Custom API URL", "https://api.stackmemory.ai").option("--email <email>", "Email address for login").option("--password <password>", "Password (not recommended in CLI)").action(async (options) => {
9
+ const cfgDir = join(homedir(), ".stackmemory");
10
+ if (!existsSync(cfgDir)) mkdirSync(cfgDir, { recursive: true });
11
+ console.log(chalk.cyan("\u{1F510} StackMemory Hosted Service Login\n"));
12
+ const credentials = await inquirer.prompt([
13
+ {
14
+ type: "input",
15
+ name: "email",
16
+ message: "Email:",
17
+ default: options.email,
18
+ validate: (input) => {
19
+ const emailRegex = /^[^\s@]+@[^\s@]+\.[^\s@]+$/;
20
+ return emailRegex.test(input) ? true : "Please enter a valid email";
21
+ }
22
+ },
23
+ {
24
+ type: "password",
25
+ name: "password",
26
+ message: "Password:",
27
+ default: options.password,
28
+ mask: "*",
29
+ validate: (input) => input.length >= 6 ? true : "Password must be at least 6 characters"
30
+ }
31
+ ]);
32
+ console.log(chalk.gray("\nAuthenticating with StackMemory API..."));
33
+ try {
34
+ const apiUrl = options.apiUrl || process.env.STACKMEMORY_API_URL || "https://api.stackmemory.ai";
35
+ const response = await fetch(`${apiUrl}/auth/login`, {
36
+ method: "POST",
37
+ headers: {
38
+ "Content-Type": "application/json",
39
+ "User-Agent": "StackMemory-CLI/0.3.19"
40
+ },
41
+ body: JSON.stringify({
42
+ email: credentials.email,
43
+ password: credentials.password
44
+ })
45
+ });
46
+ const data = await response.json();
47
+ if (!response.ok || !data.success) {
48
+ if (response.status === 404) {
49
+ console.log(chalk.yellow("\n\u26A0\uFE0F Hosted API not available. Would you like to:"));
50
+ const { choice } = await inquirer.prompt([
51
+ {
52
+ type: "list",
53
+ name: "choice",
54
+ message: "Select an option:",
55
+ choices: [
56
+ { name: "Open signup page in browser", value: "signup" },
57
+ { name: "Configure database URL manually", value: "manual" },
58
+ { name: "Use local database", value: "local" },
59
+ { name: "Cancel", value: "cancel" }
60
+ ]
61
+ }
62
+ ]);
63
+ if (choice === "signup") {
64
+ await open("https://stackmemory.ai/signup");
65
+ console.log(chalk.cyan("Opening signup page in browser..."));
66
+ return;
67
+ } else if (choice === "manual") {
68
+ const { databaseUrl } = await inquirer.prompt([
69
+ {
70
+ type: "password",
71
+ name: "databaseUrl",
72
+ message: "Enter your DATABASE_URL (postgres://...):",
73
+ validate: (input) => input.startsWith("postgres://") || input.startsWith("postgresql://") ? true : "Must start with postgres:// or postgresql://"
74
+ }
75
+ ]);
76
+ const cfgPath2 = join(cfgDir, "config.json");
77
+ let cfg2 = {};
78
+ try {
79
+ if (existsSync(cfgPath2)) cfg2 = JSON.parse(readFileSync(cfgPath2, "utf-8"));
80
+ } catch {
81
+ }
82
+ cfg2.database = { mode: "hosted", url: databaseUrl };
83
+ cfg2.auth = { email: credentials.email };
84
+ writeFileSync(cfgPath2, JSON.stringify(cfg2, null, 2));
85
+ console.log(chalk.green("\u2713 Database configured successfully"));
86
+ return;
87
+ } else if (choice === "local") {
88
+ const cfgPath2 = join(cfgDir, "config.json");
89
+ let cfg2 = {};
90
+ try {
91
+ if (existsSync(cfgPath2)) cfg2 = JSON.parse(readFileSync(cfgPath2, "utf-8"));
92
+ } catch {
93
+ }
94
+ cfg2.database = { mode: "local" };
95
+ writeFileSync(cfgPath2, JSON.stringify(cfg2, null, 2));
96
+ console.log(chalk.green("\u2713 Switched to local database mode"));
97
+ return;
98
+ } else {
99
+ console.log(chalk.gray("Login cancelled"));
100
+ return;
101
+ }
102
+ }
103
+ throw new Error(data.error || "Authentication failed");
104
+ }
105
+ const cfgPath = join(cfgDir, "config.json");
106
+ let cfg = {};
107
+ try {
108
+ if (existsSync(cfgPath)) cfg = JSON.parse(readFileSync(cfgPath, "utf-8"));
109
+ } catch {
110
+ }
111
+ cfg.auth = {
112
+ apiKey: data.apiKey,
113
+ apiUrl,
114
+ email: credentials.email
115
+ };
116
+ if (data.databaseUrl) {
117
+ cfg.database = {
118
+ mode: "hosted",
119
+ url: data.databaseUrl
120
+ };
121
+ }
122
+ writeFileSync(cfgPath, JSON.stringify(cfg, null, 2));
123
+ const envFile = join(cfgDir, "stackmemory.env");
124
+ const envContent = `# StackMemory Authentication
125
+ STACKMEMORY_API_KEY=${data.apiKey}
126
+ STACKMEMORY_API_URL=${apiUrl}
127
+ ${data.databaseUrl ? `DATABASE_URL=${data.databaseUrl}` : ""}
128
+ `;
129
+ writeFileSync(envFile, envContent);
130
+ console.log(chalk.green("\n\u2705 Successfully logged in to StackMemory"));
131
+ console.log(chalk.green(`\u2713 Configuration saved to ~/.stackmemory/config.json`));
132
+ console.log(chalk.gray("\nYou can now use:"));
133
+ console.log(chalk.cyan(" stackmemory sync ") + chalk.gray("- Sync your context to the cloud"));
134
+ console.log(chalk.cyan(" stackmemory db status") + chalk.gray("- Check database connection"));
135
+ console.log(chalk.cyan(" stackmemory context ") + chalk.gray("- Manage your contexts"));
136
+ } catch (error) {
137
+ console.error(chalk.red("\n\u274C Login failed:"), error.message);
138
+ console.log(chalk.yellow("\nTip: Visit https://stackmemory.ai/signup to create an account"));
139
+ process.exit(1);
140
+ }
141
+ });
142
+ }
143
+ export {
144
+ registerLoginCommand
145
+ };
146
+ //# sourceMappingURL=login.js.map
@@ -0,0 +1,7 @@
1
+ {
2
+ "version": 3,
3
+ "sources": ["../../../src/cli/commands/login.ts"],
4
+ "sourcesContent": ["import { Command } from 'commander';\nimport inquirer from 'inquirer';\nimport chalk from 'chalk';\nimport { homedir } from 'os';\nimport { join } from 'path';\nimport { existsSync, mkdirSync, writeFileSync, readFileSync } from 'fs';\nimport open from 'open';\n\ninterface ConfigShape {\n version?: string;\n setupCompleted?: string;\n features?: any;\n paths?: any;\n database?: { mode?: 'local' | 'hosted'; url?: string };\n auth?: { \n apiKey?: string;\n apiUrl?: string;\n email?: string;\n };\n}\n\ninterface AuthResponse {\n success: boolean;\n apiKey?: string;\n databaseUrl?: string;\n email?: string;\n error?: string;\n}\n\nexport function registerLoginCommand(program: Command): void {\n program\n .command('login')\n .description('Login to hosted StackMemory service')\n .option('--api-url <url>', 'Custom API URL', 'https://api.stackmemory.ai')\n .option('--email <email>', 'Email address for login')\n .option('--password <password>', 'Password (not recommended in CLI)')\n .action(async (options) => {\n const cfgDir = join(homedir(), '.stackmemory');\n if (!existsSync(cfgDir)) mkdirSync(cfgDir, { recursive: true });\n\n console.log(chalk.cyan('\uD83D\uDD10 StackMemory Hosted Service Login\\n'));\n\n // Prompt for credentials\n const credentials = await inquirer.prompt([\n {\n type: 'input',\n name: 'email',\n message: 'Email:',\n default: options.email,\n validate: (input: string) => {\n const emailRegex = /^[^\\s@]+@[^\\s@]+\\.[^\\s@]+$/;\n return emailRegex.test(input) ? true : 'Please enter a valid email';\n },\n },\n {\n type: 'password',\n name: 'password',\n message: 'Password:',\n default: options.password,\n mask: '*',\n validate: (input: string) => input.length >= 6 ? true : 'Password must be at least 6 characters',\n },\n ]);\n\n console.log(chalk.gray('\\nAuthenticating with StackMemory API...'));\n\n try {\n // Authenticate with the hosted API\n const apiUrl = options.apiUrl || process.env.STACKMEMORY_API_URL || 'https://api.stackmemory.ai';\n const response = await fetch(`${apiUrl}/auth/login`, {\n method: 'POST',\n headers: {\n 'Content-Type': 'application/json',\n 'User-Agent': 'StackMemory-CLI/0.3.19',\n },\n body: JSON.stringify({\n email: credentials.email,\n password: credentials.password,\n }),\n });\n\n const data: AuthResponse = await response.json();\n\n if (!response.ok || !data.success) {\n if (response.status === 404) {\n // Fallback to Railway server if hosted API not available\n console.log(chalk.yellow('\\n\u26A0\uFE0F Hosted API not available. Would you like to:'));\n const { choice } = await inquirer.prompt([\n {\n type: 'list',\n name: 'choice',\n message: 'Select an option:',\n choices: [\n { name: 'Open signup page in browser', value: 'signup' },\n { name: 'Configure database URL manually', value: 'manual' },\n { name: 'Use local database', value: 'local' },\n { name: 'Cancel', value: 'cancel' },\n ],\n },\n ]);\n\n if (choice === 'signup') {\n await open('https://stackmemory.ai/signup');\n console.log(chalk.cyan('Opening signup page in browser...'));\n return;\n } else if (choice === 'manual') {\n const { databaseUrl } = await inquirer.prompt([\n {\n type: 'password',\n name: 'databaseUrl',\n message: 'Enter your DATABASE_URL (postgres://...):',\n validate: (input: string) =>\n input.startsWith('postgres://') || input.startsWith('postgresql://')\n ? true\n : 'Must start with postgres:// or postgresql://',\n },\n ]);\n\n // Save manual configuration\n const cfgPath = join(cfgDir, 'config.json');\n let cfg: ConfigShape = {};\n try {\n if (existsSync(cfgPath)) cfg = JSON.parse(readFileSync(cfgPath, 'utf-8'));\n } catch {}\n \n cfg.database = { mode: 'hosted', url: databaseUrl };\n cfg.auth = { email: credentials.email };\n \n writeFileSync(cfgPath, JSON.stringify(cfg, null, 2));\n console.log(chalk.green('\u2713 Database configured successfully'));\n return;\n } else if (choice === 'local') {\n const cfgPath = join(cfgDir, 'config.json');\n let cfg: ConfigShape = {};\n try {\n if (existsSync(cfgPath)) cfg = JSON.parse(readFileSync(cfgPath, 'utf-8'));\n } catch {}\n \n cfg.database = { mode: 'local' };\n writeFileSync(cfgPath, JSON.stringify(cfg, null, 2));\n console.log(chalk.green('\u2713 Switched to local database mode'));\n return;\n } else {\n console.log(chalk.gray('Login cancelled'));\n return;\n }\n }\n\n throw new Error(data.error || 'Authentication failed');\n }\n\n // Save configuration\n const cfgPath = join(cfgDir, 'config.json');\n let cfg: ConfigShape = {};\n try {\n if (existsSync(cfgPath)) cfg = JSON.parse(readFileSync(cfgPath, 'utf-8'));\n } catch {}\n\n cfg.auth = {\n apiKey: data.apiKey,\n apiUrl: apiUrl,\n email: credentials.email,\n };\n\n if (data.databaseUrl) {\n cfg.database = {\n mode: 'hosted',\n url: data.databaseUrl,\n };\n }\n\n writeFileSync(cfgPath, JSON.stringify(cfg, null, 2));\n \n // Save environment variables\n const envFile = join(cfgDir, 'stackmemory.env');\n const envContent = `# StackMemory Authentication\nSTACKMEMORY_API_KEY=${data.apiKey}\nSTACKMEMORY_API_URL=${apiUrl}\n${data.databaseUrl ? `DATABASE_URL=${data.databaseUrl}` : ''}\n`;\n writeFileSync(envFile, envContent);\n\n console.log(chalk.green('\\n\u2705 Successfully logged in to StackMemory'));\n console.log(chalk.green(`\u2713 Configuration saved to ~/.stackmemory/config.json`));\n console.log(chalk.gray('\\nYou can now use:'));\n console.log(chalk.cyan(' stackmemory sync ') + chalk.gray('- Sync your context to the cloud'));\n console.log(chalk.cyan(' stackmemory db status') + chalk.gray('- Check database connection'));\n console.log(chalk.cyan(' stackmemory context ') + chalk.gray('- Manage your contexts'));\n \n } catch (error: any) {\n console.error(chalk.red('\\n\u274C Login failed:'), error.message);\n console.log(chalk.yellow('\\nTip: Visit https://stackmemory.ai/signup to create an account'));\n process.exit(1);\n }\n });\n}"],
5
+ "mappings": "AACA,OAAO,cAAc;AACrB,OAAO,WAAW;AAClB,SAAS,eAAe;AACxB,SAAS,YAAY;AACrB,SAAS,YAAY,WAAW,eAAe,oBAAoB;AACnE,OAAO,UAAU;AAuBV,SAAS,qBAAqB,SAAwB;AAC3D,UACG,QAAQ,OAAO,EACf,YAAY,qCAAqC,EACjD,OAAO,mBAAmB,kBAAkB,4BAA4B,EACxE,OAAO,mBAAmB,yBAAyB,EACnD,OAAO,yBAAyB,mCAAmC,EACnE,OAAO,OAAO,YAAY;AACzB,UAAM,SAAS,KAAK,QAAQ,GAAG,cAAc;AAC7C,QAAI,CAAC,WAAW,MAAM,EAAG,WAAU,QAAQ,EAAE,WAAW,KAAK,CAAC;AAE9D,YAAQ,IAAI,MAAM,KAAK,8CAAuC,CAAC;AAG/D,UAAM,cAAc,MAAM,SAAS,OAAO;AAAA,MACxC;AAAA,QACE,MAAM;AAAA,QACN,MAAM;AAAA,QACN,SAAS;AAAA,QACT,SAAS,QAAQ;AAAA,QACjB,UAAU,CAAC,UAAkB;AAC3B,gBAAM,aAAa;AACnB,iBAAO,WAAW,KAAK,KAAK,IAAI,OAAO;AAAA,QACzC;AAAA,MACF;AAAA,MACA;AAAA,QACE,MAAM;AAAA,QACN,MAAM;AAAA,QACN,SAAS;AAAA,QACT,SAAS,QAAQ;AAAA,QACjB,MAAM;AAAA,QACN,UAAU,CAAC,UAAkB,MAAM,UAAU,IAAI,OAAO;AAAA,MAC1D;AAAA,IACF,CAAC;AAED,YAAQ,IAAI,MAAM,KAAK,0CAA0C,CAAC;AAElE,QAAI;AAEF,YAAM,SAAS,QAAQ,UAAU,QAAQ,IAAI,uBAAuB;AACpE,YAAM,WAAW,MAAM,MAAM,GAAG,MAAM,eAAe;AAAA,QACnD,QAAQ;AAAA,QACR,SAAS;AAAA,UACP,gBAAgB;AAAA,UAChB,cAAc;AAAA,QAChB;AAAA,QACA,MAAM,KAAK,UAAU;AAAA,UACnB,OAAO,YAAY;AAAA,UACnB,UAAU,YAAY;AAAA,QACxB,CAAC;AAAA,MACH,CAAC;AAED,YAAM,OAAqB,MAAM,SAAS,KAAK;AAE/C,UAAI,CAAC,SAAS,MAAM,CAAC,KAAK,SAAS;AACjC,YAAI,SAAS,WAAW,KAAK;AAE3B,kBAAQ,IAAI,MAAM,OAAO,8DAAoD,CAAC;AAC9E,gBAAM,EAAE,OAAO,IAAI,MAAM,SAAS,OAAO;AAAA,YACvC;AAAA,cACE,MAAM;AAAA,cACN,MAAM;AAAA,cACN,SAAS;AAAA,cACT,SAAS;AAAA,gBACP,EAAE,MAAM,+BAA+B,OAAO,SAAS;AAAA,gBACvD,EAAE,MAAM,mCAAmC,OAAO,SAAS;AAAA,gBAC3D,EAAE,MAAM,sBAAsB,OAAO,QAAQ;AAAA,gBAC7C,EAAE,MAAM,UAAU,OAAO,SAAS;AAAA,cACpC;AAAA,YACF;AAAA,UACF,CAAC;AAED,cAAI,WAAW,UAAU;AACvB,kBAAM,KAAK,+BAA+B;AAC1C,oBAAQ,IAAI,MAAM,KAAK,mCAAmC,CAAC;AAC3D;AAAA,UACF,WAAW,WAAW,UAAU;AAC9B,kBAAM,EAAE,YAAY,IAAI,MAAM,SAAS,OAAO;AAAA,cAC5C;AAAA,gBACE,MAAM;AAAA,gBACN,MAAM;AAAA,gBACN,SAAS;AAAA,gBACT,UAAU,CAAC,UACT,MAAM,WAAW,aAAa,KAAK,MAAM,WAAW,eAAe,IAC/D,OACA;AAAA,cACR;AAAA,YACF,CAAC;AAGD,kBAAMA,WAAU,KAAK,QAAQ,aAAa;AAC1C,gBAAIC,OAAmB,CAAC;AACxB,gBAAI;AACF,kBAAI,WAAWD,QAAO,EAAG,CAAAC,OAAM,KAAK,MAAM,aAAaD,UAAS,OAAO,CAAC;AAAA,YAC1E,QAAQ;AAAA,YAAC;AAET,YAAAC,KAAI,WAAW,EAAE,MAAM,UAAU,KAAK,YAAY;AAClD,YAAAA,KAAI,OAAO,EAAE,OAAO,YAAY,MAAM;AAEtC,0BAAcD,UAAS,KAAK,UAAUC,MAAK,MAAM,CAAC,CAAC;AACnD,oBAAQ,IAAI,MAAM,MAAM,yCAAoC,CAAC;AAC7D;AAAA,UACF,WAAW,WAAW,SAAS;AAC7B,kBAAMD,WAAU,KAAK,QAAQ,aAAa;AAC1C,gBAAIC,OAAmB,CAAC;AACxB,gBAAI;AACF,kBAAI,WAAWD,QAAO,EAAG,CAAAC,OAAM,KAAK,MAAM,aAAaD,UAAS,OAAO,CAAC;AAAA,YAC1E,QAAQ;AAAA,YAAC;AAET,YAAAC,KAAI,WAAW,EAAE,MAAM,QAAQ;AAC/B,0BAAcD,UAAS,KAAK,UAAUC,MAAK,MAAM,CAAC,CAAC;AACnD,oBAAQ,IAAI,MAAM,MAAM,wCAAmC,CAAC;AAC5D;AAAA,UACF,OAAO;AACL,oBAAQ,IAAI,MAAM,KAAK,iBAAiB,CAAC;AACzC;AAAA,UACF;AAAA,QACF;AAEA,cAAM,IAAI,MAAM,KAAK,SAAS,uBAAuB;AAAA,MACvD;AAGA,YAAM,UAAU,KAAK,QAAQ,aAAa;AAC1C,UAAI,MAAmB,CAAC;AACxB,UAAI;AACF,YAAI,WAAW,OAAO,EAAG,OAAM,KAAK,MAAM,aAAa,SAAS,OAAO,CAAC;AAAA,MAC1E,QAAQ;AAAA,MAAC;AAET,UAAI,OAAO;AAAA,QACT,QAAQ,KAAK;AAAA,QACb;AAAA,QACA,OAAO,YAAY;AAAA,MACrB;AAEA,UAAI,KAAK,aAAa;AACpB,YAAI,WAAW;AAAA,UACb,MAAM;AAAA,UACN,KAAK,KAAK;AAAA,QACZ;AAAA,MACF;AAEA,oBAAc,SAAS,KAAK,UAAU,KAAK,MAAM,CAAC,CAAC;AAGnD,YAAM,UAAU,KAAK,QAAQ,iBAAiB;AAC9C,YAAM,aAAa;AAAA,sBACL,KAAK,MAAM;AAAA,sBACX,MAAM;AAAA,EAC1B,KAAK,cAAc,gBAAgB,KAAK,WAAW,KAAK,EAAE;AAAA;AAEpD,oBAAc,SAAS,UAAU;AAEjC,cAAQ,IAAI,MAAM,MAAM,gDAA2C,CAAC;AACpE,cAAQ,IAAI,MAAM,MAAM,0DAAqD,CAAC;AAC9E,cAAQ,IAAI,MAAM,KAAK,oBAAoB,CAAC;AAC5C,cAAQ,IAAI,MAAM,KAAK,yBAAyB,IAAI,MAAM,KAAK,kCAAkC,CAAC;AAClG,cAAQ,IAAI,MAAM,KAAK,yBAAyB,IAAI,MAAM,KAAK,6BAA6B,CAAC;AAC7F,cAAQ,IAAI,MAAM,KAAK,yBAAyB,IAAI,MAAM,KAAK,wBAAwB,CAAC;AAAA,IAE1F,SAAS,OAAY;AACnB,cAAQ,MAAM,MAAM,IAAI,wBAAmB,GAAG,MAAM,OAAO;AAC3D,cAAQ,IAAI,MAAM,OAAO,iEAAiE,CAAC;AAC3F,cAAQ,KAAK,CAAC;AAAA,IAChB;AAAA,EACF,CAAC;AACL;",
6
+ "names": ["cfgPath", "cfg"]
7
+ }
@@ -0,0 +1,178 @@
1
+ #!/usr/bin/env node
2
+ import { Command } from "commander";
3
+ import { Pool } from "pg";
4
+ import Database from "better-sqlite3";
5
+ const MIGRATIONS = [
6
+ {
7
+ version: 1,
8
+ description: "base schema",
9
+ statements: [
10
+ // contexts
11
+ `CREATE TABLE IF NOT EXISTS contexts (id ${isPg() ? "BIGSERIAL" : "INTEGER PRIMARY KEY AUTOINCREMENT"} PRIMARY KEY, project_id TEXT NOT NULL, content TEXT NOT NULL, type TEXT DEFAULT 'general', ${isPg() ? "metadata JSONB DEFAULT '{}'::jsonb" : "metadata TEXT DEFAULT '{}'"}, created_at ${isPg() ? "TIMESTAMPTZ" : "DATETIME"} DEFAULT ${isPg() ? "NOW()" : "CURRENT_TIMESTAMP"}, updated_at ${isPg() ? "TIMESTAMPTZ" : "DATETIME"} DEFAULT ${isPg() ? "NOW()" : "CURRENT_TIMESTAMP"})`,
12
+ // api_keys
13
+ `CREATE TABLE IF NOT EXISTS api_keys (id ${isPg() ? "BIGSERIAL" : "INTEGER PRIMARY KEY AUTOINCREMENT"} PRIMARY KEY, key_hash TEXT UNIQUE NOT NULL, user_id TEXT NOT NULL, name TEXT, created_at ${isPg() ? "TIMESTAMPTZ" : "DATETIME"} DEFAULT ${isPg() ? "NOW()" : "CURRENT_TIMESTAMP"}, ${isPg() ? "last_used TIMESTAMPTZ" : "last_used DATETIME"}, revoked ${isPg() ? "BOOLEAN" : "BOOLEAN"} DEFAULT ${isPg() ? "false" : "0"})`,
14
+ // users with role
15
+ `CREATE TABLE IF NOT EXISTS users (id TEXT PRIMARY KEY, email TEXT, name TEXT, tier TEXT DEFAULT 'free', role TEXT DEFAULT 'user', created_at ${isPg() ? "TIMESTAMPTZ" : "DATETIME"} DEFAULT ${isPg() ? "NOW()" : "CURRENT_TIMESTAMP"}, updated_at ${isPg() ? "TIMESTAMPTZ" : "DATETIME"} DEFAULT ${isPg() ? "NOW()" : "CURRENT_TIMESTAMP"})`,
16
+ // projects
17
+ `CREATE TABLE IF NOT EXISTS projects (id TEXT PRIMARY KEY, name TEXT, is_public ${isPg() ? "BOOLEAN" : "BOOLEAN"} DEFAULT ${isPg() ? "false" : "0"}, created_at ${isPg() ? "TIMESTAMPTZ" : "DATETIME"} DEFAULT ${isPg() ? "NOW()" : "CURRENT_TIMESTAMP"}, updated_at ${isPg() ? "TIMESTAMPTZ" : "DATETIME"} DEFAULT ${isPg() ? "NOW()" : "CURRENT_TIMESTAMP"})`,
18
+ // project members
19
+ `CREATE TABLE IF NOT EXISTS project_members (project_id TEXT NOT NULL, user_id TEXT NOT NULL, role TEXT NOT NULL ${isPg() ? "" : "CHECK (role IN ('admin','owner','editor','viewer'))"}, created_at ${isPg() ? "TIMESTAMPTZ" : "DATETIME"} DEFAULT ${isPg() ? "NOW()" : "CURRENT_TIMESTAMP"}, PRIMARY KEY (project_id, user_id))`,
20
+ // indexes
21
+ `CREATE INDEX IF NOT EXISTS idx_contexts_project ON contexts(project_id)`,
22
+ `CREATE INDEX IF NOT EXISTS idx_api_keys_hash ON api_keys(key_hash)`,
23
+ `CREATE INDEX IF NOT EXISTS idx_users_email ON users(email)`,
24
+ `CREATE INDEX IF NOT EXISTS idx_project_members_user ON project_members(user_id)`
25
+ ]
26
+ },
27
+ {
28
+ version: 2,
29
+ description: "admin sessions",
30
+ statements: [
31
+ `CREATE TABLE IF NOT EXISTS admin_sessions (id TEXT PRIMARY KEY, user_id TEXT NOT NULL, created_at ${isPg() ? "TIMESTAMPTZ" : "DATETIME"} DEFAULT ${isPg() ? "NOW()" : "CURRENT_TIMESTAMP"}, expires_at ${isPg() ? "TIMESTAMPTZ" : "DATETIME"} NOT NULL, user_agent TEXT, ip TEXT)`,
32
+ `CREATE INDEX IF NOT EXISTS idx_admin_sessions_user ON admin_sessions(user_id)`
33
+ ]
34
+ },
35
+ {
36
+ version: 3,
37
+ description: "role enums & checks",
38
+ statements: [
39
+ // PG enum upgrades; for SQLite CHECK already present
40
+ `CREATE TYPE user_role AS ENUM ('admin','user')`,
41
+ `CREATE TYPE member_role AS ENUM ('admin','owner','editor','viewer')`,
42
+ `ALTER TABLE users ALTER COLUMN role TYPE user_role USING role::user_role`,
43
+ `ALTER TABLE project_members ALTER COLUMN role TYPE member_role USING role::member_role`,
44
+ `ALTER TABLE project_members ADD CONSTRAINT project_members_role_check CHECK (role IN ('admin','owner','editor','viewer'))`,
45
+ `ALTER TABLE users ADD CONSTRAINT users_role_check CHECK (role IN ('admin','user'))`
46
+ ]
47
+ }
48
+ ];
49
+ function isPg() {
50
+ const url = process.env.DATABASE_URL || "";
51
+ return url.startsWith("postgres://") || url.startsWith("postgresql://");
52
+ }
53
+ async function connect() {
54
+ if (isPg()) {
55
+ const pool = new Pool({ connectionString: process.env.DATABASE_URL });
56
+ await pool.query(`CREATE TABLE IF NOT EXISTS railway_schema_version (version INTEGER PRIMARY KEY, applied_at TIMESTAMPTZ DEFAULT NOW(), description TEXT)`);
57
+ return { kind: "pg", pg: pool };
58
+ } else {
59
+ const path = process.env.DATABASE_URL || ".stackmemory/railway.db";
60
+ const db = new Database(path);
61
+ db.exec(`CREATE TABLE IF NOT EXISTS railway_schema_version (version INTEGER PRIMARY KEY, applied_at DATETIME DEFAULT CURRENT_TIMESTAMP, description TEXT)`);
62
+ return { kind: "sqlite", sqlite: db };
63
+ }
64
+ }
65
+ async function getCurrentVersion(m) {
66
+ if (m.kind === "pg") {
67
+ const r = await m.pg.query("SELECT COALESCE(MAX(version), 0) AS v FROM railway_schema_version");
68
+ return Number(r.rows[0]?.v || 0);
69
+ }
70
+ const row = m.sqlite.prepare("SELECT COALESCE(MAX(version), 0) AS v FROM railway_schema_version").get();
71
+ return Number(row?.v || 0);
72
+ }
73
+ async function listApplied(m) {
74
+ if (m.kind === "pg") {
75
+ const r = await m.pg.query("SELECT version, description, applied_at FROM railway_schema_version ORDER BY version ASC");
76
+ return r.rows.map((row) => ({ version: Number(row.version), description: row.description }));
77
+ }
78
+ const rows = m.sqlite.prepare("SELECT version, description, applied_at FROM railway_schema_version ORDER BY version ASC").all();
79
+ return rows.map((row) => ({ version: Number(row.version), description: row.description }));
80
+ }
81
+ async function applyTo(m, target) {
82
+ const current = await getCurrentVersion(m);
83
+ const pending = MIGRATIONS.filter((mig) => mig.version > current && mig.version <= target);
84
+ for (const mig of pending) {
85
+ if (m.kind === "pg") {
86
+ for (const s of mig.statements) {
87
+ try {
88
+ await m.pg.query(s);
89
+ } catch {
90
+ }
91
+ }
92
+ await m.pg.query("INSERT INTO railway_schema_version (version, description) VALUES ($1, $2) ON CONFLICT (version) DO NOTHING", [mig.version, mig.description]);
93
+ } else {
94
+ m.sqlite.exec("BEGIN");
95
+ try {
96
+ for (const s of mig.statements) {
97
+ try {
98
+ m.sqlite.exec(s);
99
+ } catch {
100
+ }
101
+ }
102
+ m.sqlite.prepare("INSERT OR IGNORE INTO railway_schema_version (version, description) VALUES (?, ?)").run(mig.version, mig.description);
103
+ m.sqlite.exec("COMMIT");
104
+ } catch {
105
+ m.sqlite.exec("ROLLBACK");
106
+ throw new Error(`Migration ${mig.version} failed`);
107
+ }
108
+ }
109
+ console.log(`Applied migration v${mig.version}: ${mig.description}`);
110
+ }
111
+ }
112
+ async function rollbackTo(m, target) {
113
+ const current = await getCurrentVersion(m);
114
+ if (target >= current) {
115
+ console.log("Nothing to rollback");
116
+ return;
117
+ }
118
+ if (m.kind === "pg") {
119
+ await m.pg.query("DELETE FROM railway_schema_version WHERE version > $1", [target]);
120
+ } else {
121
+ m.sqlite.prepare("DELETE FROM railway_schema_version WHERE version > ?").run(target);
122
+ }
123
+ console.log(`Rolled back schema version pointer from ${current} to ${target}`);
124
+ }
125
+ async function main() {
126
+ const program = new Command();
127
+ program.name("railway-migrate").description("Manage Railway server schema migrations").option("-d, --database <url>", "DATABASE_URL override");
128
+ program.command("list").description("List applied migrations").action(async () => {
129
+ if (program.opts().database) process.env.DATABASE_URL = program.opts().database;
130
+ const m = await connect();
131
+ const applied = await listApplied(m);
132
+ const current = await getCurrentVersion(m);
133
+ console.log("Current version:", current);
134
+ if (applied.length === 0) console.log("(no migrations applied)");
135
+ applied.forEach((a) => console.log(`v${a.version} - ${a.description}`));
136
+ process.exit(0);
137
+ });
138
+ program.command("status").description("Show current version and pending migrations").action(async () => {
139
+ if (program.opts().database) process.env.DATABASE_URL = program.opts().database;
140
+ const m = await connect();
141
+ const current = await getCurrentVersion(m);
142
+ const latest = Math.max(...MIGRATIONS.map((m2) => m2.version));
143
+ const pending = MIGRATIONS.filter((mig) => mig.version > current);
144
+ console.log("Current version:", current);
145
+ console.log("Latest available:", latest);
146
+ if (pending.length === 0) console.log("No pending migrations.");
147
+ else {
148
+ console.log("Pending:");
149
+ pending.forEach((p) => console.log(`- v${p.version} ${p.description}`));
150
+ }
151
+ process.exit(0);
152
+ });
153
+ program.command("apply").description("Apply migrations up to a target").option("--to <version|latest>", 'Target version (number or "latest")', "latest").action(async (cmd) => {
154
+ if (program.opts().database) process.env.DATABASE_URL = program.opts().database;
155
+ const m = await connect();
156
+ const latest = Math.max(...MIGRATIONS.map((m2) => m2.version));
157
+ const target = cmd.to === "latest" ? latest : parseInt(cmd.to, 10);
158
+ if (!Number.isFinite(target)) throw new Error("Invalid target");
159
+ await applyTo(m, target);
160
+ console.log("Done.");
161
+ process.exit(0);
162
+ });
163
+ program.command("rollback").description("Rollback schema version pointer (non-destructive)").option("--to <version>", "Target version number", "0").action(async (cmd) => {
164
+ if (program.opts().database) process.env.DATABASE_URL = program.opts().database;
165
+ const m = await connect();
166
+ const target = parseInt(cmd.to, 10);
167
+ if (!Number.isFinite(target)) throw new Error("Invalid target");
168
+ await rollbackTo(m, target);
169
+ console.log("Done.");
170
+ process.exit(0);
171
+ });
172
+ await program.parseAsync(process.argv);
173
+ }
174
+ main().catch((e) => {
175
+ console.error(e);
176
+ process.exit(1);
177
+ });
178
+ //# sourceMappingURL=migrate.js.map
@@ -0,0 +1,7 @@
1
+ {
2
+ "version": 3,
3
+ "sources": ["../../../src/cli/commands/migrate.ts"],
4
+ "sourcesContent": ["#!/usr/bin/env node\n/**\n * Railway schema migration CLI\n * Usage examples:\n * - DATABASE_URL=... tsx src/cli/commands/migrate.ts list\n * - DATABASE_URL=... tsx src/cli/commands/migrate.ts status\n * - DATABASE_URL=... tsx src/cli/commands/migrate.ts apply --to latest\n * - DATABASE_URL=... tsx src/cli/commands/migrate.ts rollback --to 2\n */\n\nimport { Command } from 'commander';\nimport { Pool } from 'pg';\nimport Database from 'better-sqlite3';\n\ntype DbKind = 'pg' | 'sqlite';\n\ninterface Migrator {\n kind: DbKind;\n pg?: Pool;\n sqlite?: Database.Database;\n}\n\nconst MIGRATIONS: Array<{ version: number; description: string; statements: string[] }> = [\n {\n version: 1,\n description: 'base schema',\n statements: [\n // contexts\n `CREATE TABLE IF NOT EXISTS contexts (id ${isPg() ? 'BIGSERIAL' : 'INTEGER PRIMARY KEY AUTOINCREMENT'} PRIMARY KEY, project_id TEXT NOT NULL, content TEXT NOT NULL, type TEXT DEFAULT 'general', ${isPg() ? \"metadata JSONB DEFAULT '{}'::jsonb\" : \"metadata TEXT DEFAULT '{}'\"}, created_at ${isPg() ? 'TIMESTAMPTZ' : 'DATETIME'} DEFAULT ${isPg() ? 'NOW()' : 'CURRENT_TIMESTAMP'}, updated_at ${isPg() ? 'TIMESTAMPTZ' : 'DATETIME'} DEFAULT ${isPg() ? 'NOW()' : 'CURRENT_TIMESTAMP'})`,\n // api_keys\n `CREATE TABLE IF NOT EXISTS api_keys (id ${isPg() ? 'BIGSERIAL' : 'INTEGER PRIMARY KEY AUTOINCREMENT'} PRIMARY KEY, key_hash TEXT UNIQUE NOT NULL, user_id TEXT NOT NULL, name TEXT, created_at ${isPg() ? 'TIMESTAMPTZ' : 'DATETIME'} DEFAULT ${isPg() ? 'NOW()' : 'CURRENT_TIMESTAMP'}, ${isPg() ? 'last_used TIMESTAMPTZ' : 'last_used DATETIME'}, revoked ${isPg() ? 'BOOLEAN' : 'BOOLEAN'} DEFAULT ${isPg() ? 'false' : '0'})`,\n // users with role\n `CREATE TABLE IF NOT EXISTS users (id TEXT PRIMARY KEY, email TEXT, name TEXT, tier TEXT DEFAULT 'free', role TEXT DEFAULT 'user', created_at ${isPg() ? 'TIMESTAMPTZ' : 'DATETIME'} DEFAULT ${isPg() ? 'NOW()' : 'CURRENT_TIMESTAMP'}, updated_at ${isPg() ? 'TIMESTAMPTZ' : 'DATETIME'} DEFAULT ${isPg() ? 'NOW()' : 'CURRENT_TIMESTAMP'})`,\n // projects\n `CREATE TABLE IF NOT EXISTS projects (id TEXT PRIMARY KEY, name TEXT, is_public ${isPg() ? 'BOOLEAN' : 'BOOLEAN'} DEFAULT ${isPg() ? 'false' : '0'}, created_at ${isPg() ? 'TIMESTAMPTZ' : 'DATETIME'} DEFAULT ${isPg() ? 'NOW()' : 'CURRENT_TIMESTAMP'}, updated_at ${isPg() ? 'TIMESTAMPTZ' : 'DATETIME'} DEFAULT ${isPg() ? 'NOW()' : 'CURRENT_TIMESTAMP'})`,\n // project members\n `CREATE TABLE IF NOT EXISTS project_members (project_id TEXT NOT NULL, user_id TEXT NOT NULL, role TEXT NOT NULL ${isPg() ? '' : \"CHECK (role IN ('admin','owner','editor','viewer'))\"}, created_at ${isPg() ? 'TIMESTAMPTZ' : 'DATETIME'} DEFAULT ${isPg() ? 'NOW()' : 'CURRENT_TIMESTAMP'}, PRIMARY KEY (project_id, user_id))`,\n // indexes\n `CREATE INDEX IF NOT EXISTS idx_contexts_project ON contexts(project_id)`,\n `CREATE INDEX IF NOT EXISTS idx_api_keys_hash ON api_keys(key_hash)`,\n `CREATE INDEX IF NOT EXISTS idx_users_email ON users(email)`,\n `CREATE INDEX IF NOT EXISTS idx_project_members_user ON project_members(user_id)`\n ],\n },\n {\n version: 2,\n description: 'admin sessions',\n statements: [\n `CREATE TABLE IF NOT EXISTS admin_sessions (id TEXT PRIMARY KEY, user_id TEXT NOT NULL, created_at ${isPg() ? 'TIMESTAMPTZ' : 'DATETIME'} DEFAULT ${isPg() ? 'NOW()' : 'CURRENT_TIMESTAMP'}, expires_at ${isPg() ? 'TIMESTAMPTZ' : 'DATETIME'} NOT NULL, user_agent TEXT, ip TEXT)`,\n `CREATE INDEX IF NOT EXISTS idx_admin_sessions_user ON admin_sessions(user_id)`\n ],\n },\n {\n version: 3,\n description: 'role enums & checks',\n statements: [\n // PG enum upgrades; for SQLite CHECK already present\n `CREATE TYPE user_role AS ENUM ('admin','user')`,\n `CREATE TYPE member_role AS ENUM ('admin','owner','editor','viewer')`,\n `ALTER TABLE users ALTER COLUMN role TYPE user_role USING role::user_role`,\n `ALTER TABLE project_members ALTER COLUMN role TYPE member_role USING role::member_role`,\n `ALTER TABLE project_members ADD CONSTRAINT project_members_role_check CHECK (role IN ('admin','owner','editor','viewer'))`,\n `ALTER TABLE users ADD CONSTRAINT users_role_check CHECK (role IN ('admin','user'))`\n ],\n },\n];\n\nfunction isPg(): boolean {\n const url = process.env.DATABASE_URL || '';\n return url.startsWith('postgres://') || url.startsWith('postgresql://');\n}\n\nasync function connect(): Promise<Migrator> {\n if (isPg()) {\n const pool = new Pool({ connectionString: process.env.DATABASE_URL });\n // ensure version table\n await pool.query(`CREATE TABLE IF NOT EXISTS railway_schema_version (version INTEGER PRIMARY KEY, applied_at TIMESTAMPTZ DEFAULT NOW(), description TEXT)`);\n return { kind: 'pg', pg: pool };\n } else {\n const path = process.env.DATABASE_URL || '.stackmemory/railway.db';\n const db = new Database(path);\n db.exec(`CREATE TABLE IF NOT EXISTS railway_schema_version (version INTEGER PRIMARY KEY, applied_at DATETIME DEFAULT CURRENT_TIMESTAMP, description TEXT)`);\n return { kind: 'sqlite', sqlite: db };\n }\n}\n\nasync function getCurrentVersion(m: Migrator): Promise<number> {\n if (m.kind === 'pg') {\n const r = await m.pg!.query('SELECT COALESCE(MAX(version), 0) AS v FROM railway_schema_version');\n return Number(r.rows[0]?.v || 0);\n }\n const row = m.sqlite!.prepare('SELECT COALESCE(MAX(version), 0) AS v FROM railway_schema_version').get() as any;\n return Number(row?.v || 0);\n}\n\nasync function listApplied(m: Migrator): Promise<Array<{ version: number; description: string }>> {\n if (m.kind === 'pg') {\n const r = await m.pg!.query('SELECT version, description, applied_at FROM railway_schema_version ORDER BY version ASC');\n return r.rows.map((row) => ({ version: Number(row.version), description: row.description }));\n }\n const rows = m.sqlite!.prepare('SELECT version, description, applied_at FROM railway_schema_version ORDER BY version ASC').all() as any[];\n return rows.map((row) => ({ version: Number(row.version), description: row.description }));\n}\n\nasync function applyTo(m: Migrator, target: number): Promise<void> {\n const current = await getCurrentVersion(m);\n const pending = MIGRATIONS.filter((mig) => mig.version > current && mig.version <= target);\n for (const mig of pending) {\n if (m.kind === 'pg') {\n for (const s of mig.statements) {\n try { await m.pg!.query(s); } catch {}\n }\n await m.pg!.query('INSERT INTO railway_schema_version (version, description) VALUES ($1, $2) ON CONFLICT (version) DO NOTHING', [mig.version, mig.description]);\n } else {\n m.sqlite!.exec('BEGIN');\n try {\n for (const s of mig.statements) {\n try { m.sqlite!.exec(s); } catch {}\n }\n m.sqlite!.prepare('INSERT OR IGNORE INTO railway_schema_version (version, description) VALUES (?, ?)').run(mig.version, mig.description);\n m.sqlite!.exec('COMMIT');\n } catch {\n m.sqlite!.exec('ROLLBACK');\n throw new Error(`Migration ${mig.version} failed`);\n }\n }\n console.log(`Applied migration v${mig.version}: ${mig.description}`);\n }\n}\n\nasync function rollbackTo(m: Migrator, target: number): Promise<void> {\n const current = await getCurrentVersion(m);\n if (target >= current) {\n console.log('Nothing to rollback');\n return;\n }\n // Soft rollback: move version pointer back; does not drop objects\n if (m.kind === 'pg') {\n await m.pg!.query('DELETE FROM railway_schema_version WHERE version > $1', [target]);\n } else {\n m.sqlite!.prepare('DELETE FROM railway_schema_version WHERE version > ?').run(target);\n }\n console.log(`Rolled back schema version pointer from ${current} to ${target}`);\n}\n\nasync function main() {\n const program = new Command();\n program\n .name('railway-migrate')\n .description('Manage Railway server schema migrations')\n .option('-d, --database <url>', 'DATABASE_URL override');\n\n program\n .command('list')\n .description('List applied migrations')\n .action(async () => {\n if (program.opts().database) process.env.DATABASE_URL = program.opts().database;\n const m = await connect();\n const applied = await listApplied(m);\n const current = await getCurrentVersion(m);\n console.log('Current version:', current);\n if (applied.length === 0) console.log('(no migrations applied)');\n applied.forEach((a) => console.log(`v${a.version} - ${a.description}`));\n process.exit(0);\n });\n\n program\n .command('status')\n .description('Show current version and pending migrations')\n .action(async () => {\n if (program.opts().database) process.env.DATABASE_URL = program.opts().database;\n const m = await connect();\n const current = await getCurrentVersion(m);\n const latest = Math.max(...MIGRATIONS.map((m) => m.version));\n const pending = MIGRATIONS.filter((mig) => mig.version > current);\n console.log('Current version:', current);\n console.log('Latest available:', latest);\n if (pending.length === 0) console.log('No pending migrations.');\n else {\n console.log('Pending:');\n pending.forEach((p) => console.log(`- v${p.version} ${p.description}`));\n }\n process.exit(0);\n });\n\n program\n .command('apply')\n .description('Apply migrations up to a target')\n .option('--to <version|latest>', 'Target version (number or \"latest\")', 'latest')\n .action(async (cmd) => {\n if (program.opts().database) process.env.DATABASE_URL = program.opts().database;\n const m = await connect();\n const latest = Math.max(...MIGRATIONS.map((m) => m.version));\n const target = cmd.to === 'latest' ? latest : parseInt(cmd.to, 10);\n if (!Number.isFinite(target)) throw new Error('Invalid target');\n await applyTo(m, target);\n console.log('Done.');\n process.exit(0);\n });\n\n program\n .command('rollback')\n .description('Rollback schema version pointer (non-destructive)')\n .option('--to <version>', 'Target version number', '0')\n .action(async (cmd) => {\n if (program.opts().database) process.env.DATABASE_URL = program.opts().database;\n const m = await connect();\n const target = parseInt(cmd.to, 10);\n if (!Number.isFinite(target)) throw new Error('Invalid target');\n await rollbackTo(m, target);\n console.log('Done.');\n process.exit(0);\n });\n\n await program.parseAsync(process.argv);\n}\n\nmain().catch((e) => {\n console.error(e);\n process.exit(1);\n});\n\n"],
5
+ "mappings": ";AAUA,SAAS,eAAe;AACxB,SAAS,YAAY;AACrB,OAAO,cAAc;AAUrB,MAAM,aAAoF;AAAA,EACxF;AAAA,IACE,SAAS;AAAA,IACT,aAAa;AAAA,IACb,YAAY;AAAA;AAAA,MAEV,2CAA2C,KAAK,IAAI,cAAc,mCAAmC,+FAA+F,KAAK,IAAI,uCAAuC,4BAA4B,gBAAgB,KAAK,IAAI,gBAAgB,UAAU,YAAY,KAAK,IAAI,UAAU,mBAAmB,gBAAgB,KAAK,IAAI,gBAAgB,UAAU,YAAY,KAAK,IAAI,UAAU,mBAAmB;AAAA;AAAA,MAE1d,2CAA2C,KAAK,IAAI,cAAc,mCAAmC,6FAA6F,KAAK,IAAI,gBAAgB,UAAU,YAAY,KAAK,IAAI,UAAU,mBAAmB,KAAK,KAAK,IAAI,0BAA0B,oBAAoB,aAAa,KAAK,IAAI,YAAY,SAAS,YAAY,KAAK,IAAI,UAAU,GAAG;AAAA;AAAA,MAEha,gJAAgJ,KAAK,IAAI,gBAAgB,UAAU,YAAY,KAAK,IAAI,UAAU,mBAAmB,gBAAgB,KAAK,IAAI,gBAAgB,UAAU,YAAY,KAAK,IAAI,UAAU,mBAAmB;AAAA;AAAA,MAE1U,kFAAkF,KAAK,IAAI,YAAY,SAAS,YAAY,KAAK,IAAI,UAAU,GAAG,gBAAgB,KAAK,IAAI,gBAAgB,UAAU,YAAY,KAAK,IAAI,UAAU,mBAAmB,gBAAgB,KAAK,IAAI,gBAAgB,UAAU,YAAY,KAAK,IAAI,UAAU,mBAAmB;AAAA;AAAA,MAE5V,mHAAmH,KAAK,IAAI,KAAK,qDAAqD,gBAAgB,KAAK,IAAI,gBAAgB,UAAU,YAAY,KAAK,IAAI,UAAU,mBAAmB;AAAA;AAAA,MAE3R;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,IACF;AAAA,EACF;AAAA,EACA;AAAA,IACE,SAAS;AAAA,IACT,aAAa;AAAA,IACb,YAAY;AAAA,MACV,qGAAqG,KAAK,IAAI,gBAAgB,UAAU,YAAY,KAAK,IAAI,UAAU,mBAAmB,gBAAgB,KAAK,IAAI,gBAAgB,UAAU;AAAA,MAC7O;AAAA,IACF;AAAA,EACF;AAAA,EACA;AAAA,IACE,SAAS;AAAA,IACT,aAAa;AAAA,IACb,YAAY;AAAA;AAAA,MAEV;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,IACF;AAAA,EACF;AACF;AAEA,SAAS,OAAgB;AACvB,QAAM,MAAM,QAAQ,IAAI,gBAAgB;AACxC,SAAO,IAAI,WAAW,aAAa,KAAK,IAAI,WAAW,eAAe;AACxE;AAEA,eAAe,UAA6B;AAC1C,MAAI,KAAK,GAAG;AACV,UAAM,OAAO,IAAI,KAAK,EAAE,kBAAkB,QAAQ,IAAI,aAAa,CAAC;AAEpE,UAAM,KAAK,MAAM,yIAAyI;AAC1J,WAAO,EAAE,MAAM,MAAM,IAAI,KAAK;AAAA,EAChC,OAAO;AACL,UAAM,OAAO,QAAQ,IAAI,gBAAgB;AACzC,UAAM,KAAK,IAAI,SAAS,IAAI;AAC5B,OAAG,KAAK,kJAAkJ;AAC1J,WAAO,EAAE,MAAM,UAAU,QAAQ,GAAG;AAAA,EACtC;AACF;AAEA,eAAe,kBAAkB,GAA8B;AAC7D,MAAI,EAAE,SAAS,MAAM;AACnB,UAAM,IAAI,MAAM,EAAE,GAAI,MAAM,mEAAmE;AAC/F,WAAO,OAAO,EAAE,KAAK,CAAC,GAAG,KAAK,CAAC;AAAA,EACjC;AACA,QAAM,MAAM,EAAE,OAAQ,QAAQ,mEAAmE,EAAE,IAAI;AACvG,SAAO,OAAO,KAAK,KAAK,CAAC;AAC3B;AAEA,eAAe,YAAY,GAAuE;AAChG,MAAI,EAAE,SAAS,MAAM;AACnB,UAAM,IAAI,MAAM,EAAE,GAAI,MAAM,0FAA0F;AACtH,WAAO,EAAE,KAAK,IAAI,CAAC,SAAS,EAAE,SAAS,OAAO,IAAI,OAAO,GAAG,aAAa,IAAI,YAAY,EAAE;AAAA,EAC7F;AACA,QAAM,OAAO,EAAE,OAAQ,QAAQ,0FAA0F,EAAE,IAAI;AAC/H,SAAO,KAAK,IAAI,CAAC,SAAS,EAAE,SAAS,OAAO,IAAI,OAAO,GAAG,aAAa,IAAI,YAAY,EAAE;AAC3F;AAEA,eAAe,QAAQ,GAAa,QAA+B;AACjE,QAAM,UAAU,MAAM,kBAAkB,CAAC;AACzC,QAAM,UAAU,WAAW,OAAO,CAAC,QAAQ,IAAI,UAAU,WAAW,IAAI,WAAW,MAAM;AACzF,aAAW,OAAO,SAAS;AACzB,QAAI,EAAE,SAAS,MAAM;AACnB,iBAAW,KAAK,IAAI,YAAY;AAC9B,YAAI;AAAE,gBAAM,EAAE,GAAI,MAAM,CAAC;AAAA,QAAG,QAAQ;AAAA,QAAC;AAAA,MACvC;AACA,YAAM,EAAE,GAAI,MAAM,8GAA8G,CAAC,IAAI,SAAS,IAAI,WAAW,CAAC;AAAA,IAChK,OAAO;AACL,QAAE,OAAQ,KAAK,OAAO;AACtB,UAAI;AACF,mBAAW,KAAK,IAAI,YAAY;AAC9B,cAAI;AAAE,cAAE,OAAQ,KAAK,CAAC;AAAA,UAAG,QAAQ;AAAA,UAAC;AAAA,QACpC;AACA,UAAE,OAAQ,QAAQ,mFAAmF,EAAE,IAAI,IAAI,SAAS,IAAI,WAAW;AACvI,UAAE,OAAQ,KAAK,QAAQ;AAAA,MACzB,QAAQ;AACN,UAAE,OAAQ,KAAK,UAAU;AACzB,cAAM,IAAI,MAAM,aAAa,IAAI,OAAO,SAAS;AAAA,MACnD;AAAA,IACF;AACA,YAAQ,IAAI,sBAAsB,IAAI,OAAO,KAAK,IAAI,WAAW,EAAE;AAAA,EACrE;AACF;AAEA,eAAe,WAAW,GAAa,QAA+B;AACpE,QAAM,UAAU,MAAM,kBAAkB,CAAC;AACzC,MAAI,UAAU,SAAS;AACrB,YAAQ,IAAI,qBAAqB;AACjC;AAAA,EACF;AAEA,MAAI,EAAE,SAAS,MAAM;AACnB,UAAM,EAAE,GAAI,MAAM,yDAAyD,CAAC,MAAM,CAAC;AAAA,EACrF,OAAO;AACL,MAAE,OAAQ,QAAQ,sDAAsD,EAAE,IAAI,MAAM;AAAA,EACtF;AACA,UAAQ,IAAI,2CAA2C,OAAO,OAAO,MAAM,EAAE;AAC/E;AAEA,eAAe,OAAO;AACpB,QAAM,UAAU,IAAI,QAAQ;AAC5B,UACG,KAAK,iBAAiB,EACtB,YAAY,yCAAyC,EACrD,OAAO,wBAAwB,uBAAuB;AAEzD,UACG,QAAQ,MAAM,EACd,YAAY,yBAAyB,EACrC,OAAO,YAAY;AAClB,QAAI,QAAQ,KAAK,EAAE,SAAU,SAAQ,IAAI,eAAe,QAAQ,KAAK,EAAE;AACvE,UAAM,IAAI,MAAM,QAAQ;AACxB,UAAM,UAAU,MAAM,YAAY,CAAC;AACnC,UAAM,UAAU,MAAM,kBAAkB,CAAC;AACzC,YAAQ,IAAI,oBAAoB,OAAO;AACvC,QAAI,QAAQ,WAAW,EAAG,SAAQ,IAAI,yBAAyB;AAC/D,YAAQ,QAAQ,CAAC,MAAM,QAAQ,IAAI,IAAI,EAAE,OAAO,MAAM,EAAE,WAAW,EAAE,CAAC;AACtE,YAAQ,KAAK,CAAC;AAAA,EAChB,CAAC;AAEH,UACG,QAAQ,QAAQ,EAChB,YAAY,6CAA6C,EACzD,OAAO,YAAY;AAClB,QAAI,QAAQ,KAAK,EAAE,SAAU,SAAQ,IAAI,eAAe,QAAQ,KAAK,EAAE;AACvE,UAAM,IAAI,MAAM,QAAQ;AACxB,UAAM,UAAU,MAAM,kBAAkB,CAAC;AACzC,UAAM,SAAS,KAAK,IAAI,GAAG,WAAW,IAAI,CAACA,OAAMA,GAAE,OAAO,CAAC;AAC3D,UAAM,UAAU,WAAW,OAAO,CAAC,QAAQ,IAAI,UAAU,OAAO;AAChE,YAAQ,IAAI,oBAAoB,OAAO;AACvC,YAAQ,IAAI,qBAAqB,MAAM;AACvC,QAAI,QAAQ,WAAW,EAAG,SAAQ,IAAI,wBAAwB;AAAA,SACzD;AACH,cAAQ,IAAI,UAAU;AACtB,cAAQ,QAAQ,CAAC,MAAM,QAAQ,IAAI,MAAM,EAAE,OAAO,IAAI,EAAE,WAAW,EAAE,CAAC;AAAA,IACxE;AACA,YAAQ,KAAK,CAAC;AAAA,EAChB,CAAC;AAEH,UACG,QAAQ,OAAO,EACf,YAAY,iCAAiC,EAC7C,OAAO,yBAAyB,uCAAuC,QAAQ,EAC/E,OAAO,OAAO,QAAQ;AACrB,QAAI,QAAQ,KAAK,EAAE,SAAU,SAAQ,IAAI,eAAe,QAAQ,KAAK,EAAE;AACvE,UAAM,IAAI,MAAM,QAAQ;AACxB,UAAM,SAAS,KAAK,IAAI,GAAG,WAAW,IAAI,CAACA,OAAMA,GAAE,OAAO,CAAC;AAC3D,UAAM,SAAS,IAAI,OAAO,WAAW,SAAS,SAAS,IAAI,IAAI,EAAE;AACjE,QAAI,CAAC,OAAO,SAAS,MAAM,EAAG,OAAM,IAAI,MAAM,gBAAgB;AAC9D,UAAM,QAAQ,GAAG,MAAM;AACvB,YAAQ,IAAI,OAAO;AACnB,YAAQ,KAAK,CAAC;AAAA,EAChB,CAAC;AAEH,UACG,QAAQ,UAAU,EAClB,YAAY,mDAAmD,EAC/D,OAAO,kBAAkB,yBAAyB,GAAG,EACrD,OAAO,OAAO,QAAQ;AACrB,QAAI,QAAQ,KAAK,EAAE,SAAU,SAAQ,IAAI,eAAe,QAAQ,KAAK,EAAE;AACvE,UAAM,IAAI,MAAM,QAAQ;AACxB,UAAM,SAAS,SAAS,IAAI,IAAI,EAAE;AAClC,QAAI,CAAC,OAAO,SAAS,MAAM,EAAG,OAAM,IAAI,MAAM,gBAAgB;AAC9D,UAAM,WAAW,GAAG,MAAM;AAC1B,YAAQ,IAAI,OAAO;AACnB,YAAQ,KAAK,CAAC;AAAA,EAChB,CAAC;AAEH,QAAM,QAAQ,WAAW,QAAQ,IAAI;AACvC;AAEA,KAAK,EAAE,MAAM,CAAC,MAAM;AAClB,UAAQ,MAAM,CAAC;AACf,UAAQ,KAAK,CAAC;AAChB,CAAC;",
6
+ "names": ["m"]
7
+ }
@@ -64,8 +64,51 @@ async function runOnboarding() {
64
64
  scanProjects: false,
65
65
  enableLinear: false,
66
66
  enableAnalytics: true,
67
- defaultContextPath: join(homedir(), ".stackmemory")
67
+ defaultContextPath: join(homedir(), ".stackmemory"),
68
+ storageMode: "local"
68
69
  };
70
+ const { storageMode } = await inquirer.prompt([
71
+ {
72
+ type: "list",
73
+ name: "storageMode",
74
+ message: "Where should StackMemory store data?",
75
+ choices: [
76
+ { name: "Local (free, SQLite in ~/.stackmemory)", value: "local" },
77
+ { name: "Hosted (paid, managed Postgres)", value: "hosted" }
78
+ ],
79
+ default: "local"
80
+ }
81
+ ]);
82
+ config.storageMode = storageMode;
83
+ if (storageMode === "hosted") {
84
+ const { hasAccount } = await inquirer.prompt([
85
+ {
86
+ type: "confirm",
87
+ name: "hasAccount",
88
+ message: "Do you already have a hosted account & connection string?",
89
+ default: false
90
+ }
91
+ ]);
92
+ if (!hasAccount) {
93
+ try {
94
+ const signupUrl = "https://stackmemory.ai/hosted";
95
+ console.log(chalk.gray(`Opening signup page: ${signupUrl}`));
96
+ const mod = await import("open");
97
+ await mod.default(signupUrl);
98
+ } catch (e) {
99
+ console.log(chalk.yellow("Could not open browser automatically. Please sign up and obtain your DATABASE_URL."));
100
+ }
101
+ }
102
+ const { databaseUrl } = await inquirer.prompt([
103
+ {
104
+ type: "password",
105
+ name: "databaseUrl",
106
+ message: "Paste your DATABASE_URL (postgres://...)",
107
+ validate: (input) => input.startsWith("postgres://") || input.startsWith("postgresql://") ? true : "Must start with postgres:// or postgresql://"
108
+ }
109
+ ]);
110
+ config.databaseUrl = databaseUrl;
111
+ }
69
112
  if (setupType === "basic") {
70
113
  const basicAnswers = await inquirer.prompt([
71
114
  {
@@ -299,12 +342,28 @@ async function applyConfiguration(config) {
299
342
  },
300
343
  paths: {
301
344
  default: config.defaultContextPath
345
+ },
346
+ database: {
347
+ mode: config.storageMode,
348
+ ...config.databaseUrl ? { url: config.databaseUrl } : {}
302
349
  }
303
350
  };
304
351
  writeFileSync(
305
352
  join(configPath, "config.json"),
306
353
  JSON.stringify(mainConfig, null, 2)
307
354
  );
355
+ if (config.storageMode === "hosted" && config.databaseUrl) {
356
+ try {
357
+ const envFile = join(configPath, "railway.env");
358
+ writeFileSync(envFile, `# StackMemory hosted DB
359
+ DATABASE_URL=${config.databaseUrl}
360
+ `);
361
+ console.log(chalk.green(" \u2713 Saved hosted DB settings to ~/.stackmemory/railway.env"));
362
+ console.log(chalk.gray(" Tip: export DATABASE_URL from this file in your shell profile."));
363
+ } catch (e) {
364
+ console.log(chalk.yellow(" \u26A0 Could not write hosted DB env file"));
365
+ }
366
+ }
308
367
  const binPath = "/usr/local/bin/claude-sm";
309
368
  const sourcePath = join(configPath, "bin", "stackmemory");
310
369
  try {
@@ -337,6 +396,101 @@ exec stackmemory "$@"
337
396
  chalk.yellow(" \u26A0 Could not create claude-sm symlink (may need sudo)")
338
397
  );
339
398
  }
399
+ const codexBinPath = "/usr/local/bin/codex-sm";
400
+ const codexSourcePath = join(configPath, "bin", "codex-sm");
401
+ try {
402
+ const codexWrapper = `#!/bin/bash
403
+ # Codex CLI wrapper with StackMemory integration
404
+ # Usage: codex-sm [--auto-sync] [--sync-interval=MINUTES] [args...]
405
+
406
+ # Flags
407
+ AUTO_SYNC=false
408
+ SYNC_INTERVAL=5
409
+ for arg in "$@"; do
410
+ case $arg in
411
+ --auto-sync)
412
+ AUTO_SYNC=true
413
+ shift
414
+ ;;
415
+ --sync-interval=*)
416
+ SYNC_INTERVAL="\${arg#*=}"
417
+ shift
418
+ ;;
419
+ esac
420
+ done
421
+
422
+ # Auto-initialize StackMemory if in git repo without it
423
+ if [ -d ".git" ] && [ ! -d ".stackmemory" ]; then
424
+ echo "\u{1F4E6} Initializing StackMemory for this project..."
425
+ stackmemory init --silent 2>/dev/null || true
426
+ fi
427
+
428
+ # Load existing context if available
429
+ if [ -d ".stackmemory" ]; then
430
+ echo "\u{1F9E0} Loading StackMemory context..."
431
+ stackmemory status --brief 2>/dev/null || true
432
+ fi
433
+
434
+ # Start Linear auto-sync in background if requested
435
+ SYNC_PID=""
436
+ if [ "$AUTO_SYNC" = true ] && [ -n "$LINEAR_API_KEY" ]; then
437
+ echo "\u{1F504} Starting Linear auto-sync (${SYNC_INTERVAL}min intervals)..."
438
+ (
439
+ while true; do
440
+ sleep $((SYNC_INTERVAL * 60))
441
+ if [ -d ".stackmemory" ]; then
442
+ stackmemory linear sync --quiet 2>/dev/null || true
443
+ fi
444
+ done
445
+ ) &
446
+ SYNC_PID=$!
447
+ fi
448
+
449
+ cleanup() {
450
+ echo ""
451
+ echo "\u{1F4DD} Saving StackMemory context..."
452
+
453
+ # Kill auto-sync if running
454
+ if [ -n "$SYNC_PID" ] && kill -0 $SYNC_PID 2>/dev/null; then
455
+ echo "\u{1F6D1} Stopping auto-sync..."
456
+ kill $SYNC_PID 2>/dev/null || true
457
+ fi
458
+
459
+ # Save project status and final sync
460
+ if [ -d ".stackmemory" ]; then
461
+ stackmemory status 2>/dev/null
462
+ if [ -n "$LINEAR_API_KEY" ]; then
463
+ echo "\u{1F504} Final Linear sync..."
464
+ stackmemory linear sync 2>/dev/null
465
+ fi
466
+ echo "\u2705 StackMemory context saved"
467
+ fi
468
+ }
469
+
470
+ trap cleanup EXIT INT TERM
471
+
472
+ # Run Codex CLI
473
+ if command -v codex &> /dev/null; then
474
+ codex "$@"
475
+ elif command -v codex-cli &> /dev/null; then
476
+ codex-cli "$@"
477
+ else
478
+ echo "\u274C Codex CLI not found. Please install it first."
479
+ echo " See: https://github.com/openai/codex-cli"
480
+ exit 1
481
+ fi
482
+ `;
483
+ writeFileSync(codexSourcePath, codexWrapper);
484
+ execFileSync("chmod", ["+x", codexSourcePath]);
485
+ if (!existsSync(codexBinPath)) {
486
+ execFileSync("ln", ["-s", codexSourcePath, codexBinPath]);
487
+ console.log(chalk.green(" \u2713 Created codex-sm command"));
488
+ }
489
+ } catch (error) {
490
+ console.log(
491
+ chalk.yellow(" \u26A0 Could not create codex-sm symlink (may need sudo)")
492
+ );
493
+ }
340
494
  }
341
495
  function showNextSteps(config) {
342
496
  console.log(chalk.cyan("\u{1F389} Next Steps:\n"));
@@ -359,8 +513,10 @@ function showNextSteps(config) {
359
513
  }
360
514
  console.log("3. Use with Claude:");
361
515
  console.log(chalk.gray(" claude-sm # Or use stackmemory directly\n"));
516
+ console.log("4. Use with Codex:");
517
+ console.log(chalk.gray(" codex-sm # Codex + StackMemory integration\n"));
362
518
  if (config.enableLinear) {
363
- console.log("4. Sync with Linear:");
519
+ console.log("5. Sync with Linear:");
364
520
  console.log(chalk.gray(" stackmemory linear sync\n"));
365
521
  }
366
522
  console.log("For more help:");