@prajwolkc/stk 0.1.1 → 0.2.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +65 -5
- package/dist/commands/doctor.d.ts +2 -0
- package/dist/commands/doctor.js +220 -0
- package/dist/commands/health.js +2 -1
- package/dist/commands/logs.js +202 -97
- package/dist/index.js +3 -1
- package/dist/lib/plugins.d.ts +40 -0
- package/dist/lib/plugins.js +65 -0
- package/dist/services/aws.js +39 -10
- package/dist/services/database.js +25 -6
- package/dist/services/registry.d.ts +1 -0
- package/dist/services/registry.js +16 -2
- package/package.json +5 -2
package/README.md
CHANGED
|
@@ -4,10 +4,13 @@ One CLI to deploy, monitor, and debug your entire stack.
|
|
|
4
4
|
|
|
5
5
|
Stop opening 5 dashboards. `stk` checks your services, watches your deploys, syncs your env vars, tails your logs, and manages your issues — all from one command.
|
|
6
6
|
|
|
7
|
+
<!-- TODO: Add demo GIF here -->
|
|
8
|
+
<!--  -->
|
|
9
|
+
|
|
7
10
|
## Install
|
|
8
11
|
|
|
9
12
|
```bash
|
|
10
|
-
npm install -g stk
|
|
13
|
+
npm install -g @prajwolkc/stk
|
|
11
14
|
```
|
|
12
15
|
|
|
13
16
|
## Quick Start
|
|
@@ -16,6 +19,7 @@ npm install -g stk-cli
|
|
|
16
19
|
cd my-project
|
|
17
20
|
stk init # auto-detect your services
|
|
18
21
|
stk init --template saas # or use a starter template
|
|
22
|
+
stk doctor # diagnose any misconfig
|
|
19
23
|
stk health # check everything
|
|
20
24
|
stk status # one-line summary of your whole stack
|
|
21
25
|
```
|
|
@@ -27,10 +31,12 @@ stk status # one-line summary of your whole stack
|
|
|
27
31
|
| `stk init` | Initialize config (auto-detect or `--template saas\|api\|fullstack\|static\|fly\|aws`) |
|
|
28
32
|
| `stk status` | One-line summary: git, services, deploys, issues |
|
|
29
33
|
| `stk health` | Health check all configured services |
|
|
30
|
-
| `stk
|
|
34
|
+
| `stk doctor` | Diagnose misconfig, missing env vars, and suggest fixes |
|
|
35
|
+
| `stk deploy` | Git push + watch deploy providers |
|
|
31
36
|
| `stk env pull` | Pull env vars from Vercel + Railway into `.env.pulled` |
|
|
32
37
|
| `stk env diff` | Show what's in your local `.env` |
|
|
33
|
-
| `stk logs` | Tail Railway
|
|
38
|
+
| `stk logs` | Tail logs from Railway, Vercel, Fly, or Render |
|
|
39
|
+
| `stk logs -p vercel` | Logs from a specific provider |
|
|
34
40
|
| `stk todo ls` | List open GitHub issues |
|
|
35
41
|
| `stk todo add "title"` | Create a GitHub issue |
|
|
36
42
|
| `stk todo close 42` | Close an issue |
|
|
@@ -40,6 +46,7 @@ stk status # one-line summary of your whole stack
|
|
|
40
46
|
**Deploy providers:** Railway, Vercel, Fly.io, Render, AWS
|
|
41
47
|
**Databases:** PostgreSQL, MongoDB, Redis, Supabase
|
|
42
48
|
**Storage & billing:** Cloudflare R2, Stripe
|
|
49
|
+
**Custom:** Add your own via plugins
|
|
43
50
|
|
|
44
51
|
## Configuration
|
|
45
52
|
|
|
@@ -82,15 +89,57 @@ stk init --list-templates
|
|
|
82
89
|
| `fly` | Fly.io + PostgreSQL + Redis |
|
|
83
90
|
| `aws` | AWS + PostgreSQL + Redis |
|
|
84
91
|
|
|
85
|
-
##
|
|
92
|
+
## Doctor
|
|
93
|
+
|
|
94
|
+
`stk doctor` scans your config and environment to catch issues before they bite:
|
|
95
|
+
|
|
96
|
+
```
|
|
97
|
+
$ stk doctor
|
|
98
|
+
|
|
99
|
+
my-saas — Doctor
|
|
100
|
+
─────────────────────────────────────────
|
|
101
|
+
✓ railway Configured correctly
|
|
102
|
+
✗ vercel Missing required: VERCEL_TOKEN
|
|
103
|
+
See https://vercel.com/account/tokens
|
|
104
|
+
! database Missing optional: RAILWAY_PROJECT_ID
|
|
105
|
+
Some features need these for full functionality
|
|
106
|
+
✓ stripe Configured correctly
|
|
107
|
+
```
|
|
86
108
|
|
|
87
|
-
|
|
109
|
+
## Plugins
|
|
110
|
+
|
|
111
|
+
Add custom services without forking. Create `.stk/plugins/my-service.mjs`:
|
|
112
|
+
|
|
113
|
+
```js
|
|
114
|
+
export default {
|
|
115
|
+
name: "my-plugin",
|
|
116
|
+
services: {
|
|
117
|
+
myservice: {
|
|
118
|
+
name: "My Service",
|
|
119
|
+
envVars: ["MY_SERVICE_TOKEN"],
|
|
120
|
+
healthCheck: async () => {
|
|
121
|
+
const token = process.env.MY_SERVICE_TOKEN;
|
|
122
|
+
if (!token) {
|
|
123
|
+
return { name: "My Service", status: "skipped", detail: "MY_SERVICE_TOKEN not set" };
|
|
124
|
+
}
|
|
125
|
+
// Your check logic here
|
|
126
|
+
return { name: "My Service", status: "healthy", detail: "connected" };
|
|
127
|
+
}
|
|
128
|
+
}
|
|
129
|
+
}
|
|
130
|
+
};
|
|
131
|
+
```
|
|
132
|
+
|
|
133
|
+
Plugins are automatically loaded by `stk health` and `stk health --all`.
|
|
134
|
+
|
|
135
|
+
## Environment Variables
|
|
88
136
|
|
|
89
137
|
```bash
|
|
90
138
|
# Deploy providers
|
|
91
139
|
RAILWAY_API_TOKEN=
|
|
92
140
|
VERCEL_TOKEN=
|
|
93
141
|
FLY_API_TOKEN=
|
|
142
|
+
FLY_APP_NAME= # needed for stk logs -p fly
|
|
94
143
|
RENDER_API_KEY=
|
|
95
144
|
AWS_ACCESS_KEY_ID= / AWS_SECRET_ACCESS_KEY=
|
|
96
145
|
|
|
@@ -109,6 +158,17 @@ GITHUB_TOKEN=
|
|
|
109
158
|
GITHUB_REPO=owner/repo # or auto-detected from git remote
|
|
110
159
|
```
|
|
111
160
|
|
|
161
|
+
## Development
|
|
162
|
+
|
|
163
|
+
```bash
|
|
164
|
+
git clone https://github.com/Harden43/stk.git
|
|
165
|
+
cd stk
|
|
166
|
+
npm install
|
|
167
|
+
npm run dev -- health --all # run in dev mode
|
|
168
|
+
npm test # run tests
|
|
169
|
+
npm run build # compile TypeScript
|
|
170
|
+
```
|
|
171
|
+
|
|
112
172
|
## License
|
|
113
173
|
|
|
114
174
|
MIT
|
|
@@ -0,0 +1,220 @@
|
|
|
1
|
+
import { Command } from "commander";
|
|
2
|
+
import chalk from "chalk";
|
|
3
|
+
import { existsSync } from "fs";
|
|
4
|
+
import { loadConfig, enabledServices, CONFIG_FILE } from "../lib/config.js";
|
|
5
|
+
const ENV_REQUIREMENTS = {
|
|
6
|
+
railway: {
|
|
7
|
+
required: ["RAILWAY_API_TOKEN"],
|
|
8
|
+
optional: ["RAILWAY_PROJECT_ID", "RAILWAY_ENVIRONMENT_ID", "RAILWAY_SERVICE_ID"],
|
|
9
|
+
docs: "https://docs.railway.com/guides/public-api",
|
|
10
|
+
},
|
|
11
|
+
vercel: {
|
|
12
|
+
required: ["VERCEL_TOKEN"],
|
|
13
|
+
optional: ["VERCEL_PROJECT_ID"],
|
|
14
|
+
docs: "https://vercel.com/account/tokens",
|
|
15
|
+
},
|
|
16
|
+
fly: {
|
|
17
|
+
required: ["FLY_API_TOKEN"],
|
|
18
|
+
optional: ["FLY_APP_NAME"],
|
|
19
|
+
docs: "https://fly.io/docs/flyctl/tokens/",
|
|
20
|
+
},
|
|
21
|
+
render: {
|
|
22
|
+
required: ["RENDER_API_KEY"],
|
|
23
|
+
optional: [],
|
|
24
|
+
docs: "https://render.com/docs/api",
|
|
25
|
+
},
|
|
26
|
+
aws: {
|
|
27
|
+
required: ["AWS_ACCESS_KEY_ID", "AWS_SECRET_ACCESS_KEY"],
|
|
28
|
+
optional: ["AWS_REGION"],
|
|
29
|
+
},
|
|
30
|
+
database: {
|
|
31
|
+
required: ["DATABASE_URL"],
|
|
32
|
+
optional: [],
|
|
33
|
+
},
|
|
34
|
+
mongodb: {
|
|
35
|
+
required: ["MONGODB_URL"],
|
|
36
|
+
optional: [],
|
|
37
|
+
},
|
|
38
|
+
redis: {
|
|
39
|
+
required: ["REDIS_URL"],
|
|
40
|
+
optional: [],
|
|
41
|
+
},
|
|
42
|
+
supabase: {
|
|
43
|
+
required: ["SUPABASE_URL"],
|
|
44
|
+
optional: ["SUPABASE_SERVICE_KEY", "SUPABASE_ANON_KEY"],
|
|
45
|
+
docs: "https://supabase.com/docs/guides/api",
|
|
46
|
+
},
|
|
47
|
+
r2: {
|
|
48
|
+
required: ["CLOUDFLARE_ACCOUNT_ID", "CLOUDFLARE_API_TOKEN"],
|
|
49
|
+
optional: [],
|
|
50
|
+
docs: "https://developers.cloudflare.com/r2/api/",
|
|
51
|
+
},
|
|
52
|
+
stripe: {
|
|
53
|
+
required: ["STRIPE_SECRET_KEY"],
|
|
54
|
+
optional: [],
|
|
55
|
+
docs: "https://dashboard.stripe.com/apikeys",
|
|
56
|
+
},
|
|
57
|
+
};
|
|
58
|
+
export const doctorCommand = new Command("doctor")
|
|
59
|
+
.description("Diagnose configuration issues and suggest fixes")
|
|
60
|
+
.action(async () => {
|
|
61
|
+
const issues = [];
|
|
62
|
+
// 1. Check config file
|
|
63
|
+
const hasConfig = existsSync(CONFIG_FILE);
|
|
64
|
+
if (!hasConfig) {
|
|
65
|
+
issues.push({
|
|
66
|
+
level: "warn",
|
|
67
|
+
service: "config",
|
|
68
|
+
message: `No ${CONFIG_FILE} found — using auto-detection`,
|
|
69
|
+
fix: `Run ${chalk.white("stk init")} to create one`,
|
|
70
|
+
});
|
|
71
|
+
}
|
|
72
|
+
const config = loadConfig();
|
|
73
|
+
const enabled = enabledServices(config);
|
|
74
|
+
// 2. Check each enabled service for env vars
|
|
75
|
+
for (const svc of enabled) {
|
|
76
|
+
const reqs = ENV_REQUIREMENTS[svc];
|
|
77
|
+
if (!reqs)
|
|
78
|
+
continue;
|
|
79
|
+
const missingRequired = reqs.required.filter((v) => !process.env[v]);
|
|
80
|
+
const missingOptional = reqs.optional.filter((v) => !process.env[v]);
|
|
81
|
+
if (missingRequired.length > 0) {
|
|
82
|
+
issues.push({
|
|
83
|
+
level: "error",
|
|
84
|
+
service: svc,
|
|
85
|
+
message: `Missing required: ${missingRequired.join(", ")}`,
|
|
86
|
+
fix: reqs.docs ? `See ${reqs.docs}` : `Set ${missingRequired[0]} in your .env`,
|
|
87
|
+
});
|
|
88
|
+
}
|
|
89
|
+
if (missingOptional.length > 0) {
|
|
90
|
+
issues.push({
|
|
91
|
+
level: "warn",
|
|
92
|
+
service: svc,
|
|
93
|
+
message: `Missing optional: ${missingOptional.join(", ")}`,
|
|
94
|
+
fix: `Some features (logs, env sync, deploy) need these for full functionality`,
|
|
95
|
+
});
|
|
96
|
+
}
|
|
97
|
+
if (missingRequired.length === 0) {
|
|
98
|
+
issues.push({
|
|
99
|
+
level: "info",
|
|
100
|
+
service: svc,
|
|
101
|
+
message: "Configured correctly",
|
|
102
|
+
});
|
|
103
|
+
}
|
|
104
|
+
}
|
|
105
|
+
// 3. Check for partial config (env vars set but service not enabled)
|
|
106
|
+
for (const [svc, reqs] of Object.entries(ENV_REQUIREMENTS)) {
|
|
107
|
+
if (enabled.includes(svc))
|
|
108
|
+
continue;
|
|
109
|
+
const hasAny = reqs.required.some((v) => process.env[v]);
|
|
110
|
+
if (hasAny) {
|
|
111
|
+
issues.push({
|
|
112
|
+
level: "warn",
|
|
113
|
+
service: svc,
|
|
114
|
+
message: `Env vars found but service not enabled in config`,
|
|
115
|
+
fix: `Add "${svc}": true to services in ${CONFIG_FILE}`,
|
|
116
|
+
});
|
|
117
|
+
}
|
|
118
|
+
}
|
|
119
|
+
// 4. Check for common misconfigurations
|
|
120
|
+
if (process.env.DATABASE_URL) {
|
|
121
|
+
try {
|
|
122
|
+
new URL(process.env.DATABASE_URL);
|
|
123
|
+
}
|
|
124
|
+
catch {
|
|
125
|
+
issues.push({
|
|
126
|
+
level: "error",
|
|
127
|
+
service: "database",
|
|
128
|
+
message: "DATABASE_URL is not a valid URL",
|
|
129
|
+
fix: "Format: postgresql://user:pass@host:5432/dbname",
|
|
130
|
+
});
|
|
131
|
+
}
|
|
132
|
+
}
|
|
133
|
+
if (process.env.REDIS_URL) {
|
|
134
|
+
try {
|
|
135
|
+
new URL(process.env.REDIS_URL);
|
|
136
|
+
}
|
|
137
|
+
catch {
|
|
138
|
+
issues.push({
|
|
139
|
+
level: "error",
|
|
140
|
+
service: "redis",
|
|
141
|
+
message: "REDIS_URL is not a valid URL",
|
|
142
|
+
fix: "Format: redis://default:pass@host:6379",
|
|
143
|
+
});
|
|
144
|
+
}
|
|
145
|
+
}
|
|
146
|
+
if (process.env.STRIPE_SECRET_KEY && !process.env.STRIPE_SECRET_KEY.startsWith("sk_")) {
|
|
147
|
+
issues.push({
|
|
148
|
+
level: "error",
|
|
149
|
+
service: "stripe",
|
|
150
|
+
message: "STRIPE_SECRET_KEY should start with sk_test_ or sk_live_",
|
|
151
|
+
});
|
|
152
|
+
}
|
|
153
|
+
// 5. Check deploy config
|
|
154
|
+
if (hasConfig && config.deploy?.providers) {
|
|
155
|
+
for (const p of config.deploy.providers) {
|
|
156
|
+
if (!enabled.includes(p)) {
|
|
157
|
+
issues.push({
|
|
158
|
+
level: "error",
|
|
159
|
+
service: "deploy",
|
|
160
|
+
message: `Deploy provider "${p}" is not enabled in services`,
|
|
161
|
+
fix: `Add "${p}": true to services, or remove from deploy.providers`,
|
|
162
|
+
});
|
|
163
|
+
}
|
|
164
|
+
}
|
|
165
|
+
}
|
|
166
|
+
// 6. Check GitHub config for todo
|
|
167
|
+
if (!process.env.GITHUB_TOKEN && !config.github?.repo) {
|
|
168
|
+
issues.push({
|
|
169
|
+
level: "info",
|
|
170
|
+
service: "github",
|
|
171
|
+
message: "GITHUB_TOKEN not set — stk todo will have limited access",
|
|
172
|
+
fix: "Set GITHUB_TOKEN for creating/closing issues",
|
|
173
|
+
});
|
|
174
|
+
}
|
|
175
|
+
// Print results
|
|
176
|
+
const ICONS = {
|
|
177
|
+
error: chalk.red("✗"),
|
|
178
|
+
warn: chalk.yellow("!"),
|
|
179
|
+
info: chalk.green("✓"),
|
|
180
|
+
};
|
|
181
|
+
const COLORS = {
|
|
182
|
+
error: chalk.red,
|
|
183
|
+
warn: chalk.yellow,
|
|
184
|
+
info: chalk.green,
|
|
185
|
+
};
|
|
186
|
+
console.log();
|
|
187
|
+
console.log(chalk.bold(` ${config.name} — Doctor`));
|
|
188
|
+
console.log(chalk.dim(" ─────────────────────────────────────────"));
|
|
189
|
+
const grouped = new Map();
|
|
190
|
+
for (const issue of issues) {
|
|
191
|
+
const list = grouped.get(issue.service) ?? [];
|
|
192
|
+
list.push(issue);
|
|
193
|
+
grouped.set(issue.service, list);
|
|
194
|
+
}
|
|
195
|
+
for (const [service, serviceIssues] of grouped) {
|
|
196
|
+
for (const issue of serviceIssues) {
|
|
197
|
+
const icon = ICONS[issue.level];
|
|
198
|
+
const svc = COLORS[issue.level](service.padEnd(12));
|
|
199
|
+
console.log(` ${icon} ${svc} ${issue.message}`);
|
|
200
|
+
if (issue.fix) {
|
|
201
|
+
console.log(` ${" ".repeat(12)} ${chalk.dim(issue.fix)}`);
|
|
202
|
+
}
|
|
203
|
+
}
|
|
204
|
+
}
|
|
205
|
+
const errors = issues.filter((i) => i.level === "error");
|
|
206
|
+
const warns = issues.filter((i) => i.level === "warn");
|
|
207
|
+
const ok = issues.filter((i) => i.level === "info");
|
|
208
|
+
console.log();
|
|
209
|
+
if (errors.length > 0) {
|
|
210
|
+
console.log(chalk.red(` ${errors.length} error${errors.length > 1 ? "s" : ""} found`));
|
|
211
|
+
process.exitCode = 1;
|
|
212
|
+
}
|
|
213
|
+
else if (warns.length > 0) {
|
|
214
|
+
console.log(chalk.yellow(` ${warns.length} warning${warns.length > 1 ? "s" : ""}, ${ok.length} ok`));
|
|
215
|
+
}
|
|
216
|
+
else {
|
|
217
|
+
console.log(chalk.green(` All ${ok.length} checks passed`));
|
|
218
|
+
}
|
|
219
|
+
console.log();
|
|
220
|
+
});
|
package/dist/commands/health.js
CHANGED
|
@@ -2,7 +2,7 @@ import { Command } from "commander";
|
|
|
2
2
|
import chalk from "chalk";
|
|
3
3
|
import ora from "ora";
|
|
4
4
|
import { loadConfig, enabledServices } from "../lib/config.js";
|
|
5
|
-
import { getChecker, allCheckerNames } from "../services/registry.js";
|
|
5
|
+
import { getChecker, allCheckerNames, loadPluginCheckers } from "../services/registry.js";
|
|
6
6
|
const STATUS_ICON = {
|
|
7
7
|
healthy: chalk.green("✓"),
|
|
8
8
|
degraded: chalk.yellow("~"),
|
|
@@ -22,6 +22,7 @@ export const healthCommand = new Command("health")
|
|
|
22
22
|
.action(async (opts) => {
|
|
23
23
|
const config = loadConfig();
|
|
24
24
|
const spinner = ora("Checking services...").start();
|
|
25
|
+
await loadPluginCheckers();
|
|
25
26
|
const serviceList = opts.all
|
|
26
27
|
? allCheckerNames()
|
|
27
28
|
: enabledServices(config);
|
package/dist/commands/logs.js
CHANGED
|
@@ -1,142 +1,246 @@
|
|
|
1
1
|
import { Command } from "commander";
|
|
2
2
|
import chalk from "chalk";
|
|
3
3
|
import ora from "ora";
|
|
4
|
+
import { loadConfig, enabledServices } from "../lib/config.js";
|
|
4
5
|
export const logsCommand = new Command("logs")
|
|
5
|
-
.description("Tail
|
|
6
|
+
.description("Tail logs from your deploy providers")
|
|
6
7
|
.option("-n, --lines <count>", "number of recent lines to show", "50")
|
|
7
8
|
.option("-f, --follow", "keep streaming new logs")
|
|
9
|
+
.option("-p, --provider <name>", "specific provider (railway, vercel, fly, render)")
|
|
8
10
|
.action(async (opts) => {
|
|
9
|
-
const
|
|
10
|
-
|
|
11
|
-
|
|
11
|
+
const config = loadConfig();
|
|
12
|
+
const enabled = enabledServices(config);
|
|
13
|
+
const limit = parseInt(opts.lines, 10);
|
|
14
|
+
// Build list of available log providers
|
|
15
|
+
const providers = [];
|
|
16
|
+
if ((opts.provider === "railway" || (!opts.provider && enabled.includes("railway"))) &&
|
|
17
|
+
process.env.RAILWAY_API_TOKEN) {
|
|
18
|
+
providers.push(railwayProvider());
|
|
19
|
+
}
|
|
20
|
+
if ((opts.provider === "vercel" || (!opts.provider && enabled.includes("vercel"))) &&
|
|
21
|
+
process.env.VERCEL_TOKEN) {
|
|
22
|
+
providers.push(vercelProvider());
|
|
23
|
+
}
|
|
24
|
+
if ((opts.provider === "fly" || (!opts.provider && enabled.includes("fly"))) &&
|
|
25
|
+
process.env.FLY_API_TOKEN) {
|
|
26
|
+
providers.push(flyProvider());
|
|
27
|
+
}
|
|
28
|
+
if ((opts.provider === "render" || (!opts.provider && enabled.includes("render"))) &&
|
|
29
|
+
process.env.RENDER_API_KEY) {
|
|
30
|
+
providers.push(renderProvider());
|
|
31
|
+
}
|
|
32
|
+
if (providers.length === 0) {
|
|
33
|
+
console.log(chalk.red("\n No log providers available."));
|
|
34
|
+
console.log(chalk.dim(" Configure railway, vercel, fly, or render in stk.config.json and set their tokens.\n"));
|
|
12
35
|
process.exitCode = 1;
|
|
13
36
|
return;
|
|
14
37
|
}
|
|
15
|
-
|
|
16
|
-
const
|
|
17
|
-
|
|
18
|
-
if (!deploymentId) {
|
|
19
|
-
// Try to get latest deployment
|
|
20
|
-
const spinner = ora("Fetching latest deployment...").start();
|
|
38
|
+
// Fetch logs from all providers
|
|
39
|
+
for (const provider of providers) {
|
|
40
|
+
const spinner = ora(`Fetching ${provider.name} logs...`).start();
|
|
21
41
|
try {
|
|
22
|
-
const
|
|
23
|
-
|
|
24
|
-
|
|
25
|
-
|
|
26
|
-
|
|
27
|
-
await followLogs(token, latestDeploymentId);
|
|
28
|
-
}
|
|
42
|
+
const logs = await provider.fetch(limit);
|
|
43
|
+
spinner.stop();
|
|
44
|
+
if (logs.length === 0) {
|
|
45
|
+
console.log(chalk.dim(`\n ${provider.name}: no logs found\n`));
|
|
46
|
+
continue;
|
|
29
47
|
}
|
|
30
|
-
|
|
31
|
-
|
|
32
|
-
|
|
48
|
+
console.log();
|
|
49
|
+
console.log(chalk.bold(` ${provider.name} Logs`));
|
|
50
|
+
console.log(chalk.dim(" ─────────────────────────────────────────"));
|
|
51
|
+
for (const log of logs) {
|
|
52
|
+
printLogLine(log, provider.name);
|
|
33
53
|
}
|
|
34
54
|
}
|
|
35
55
|
catch (err) {
|
|
36
|
-
spinner.fail(err.message);
|
|
56
|
+
spinner.fail(`${provider.name}: ${err.message}`);
|
|
37
57
|
}
|
|
38
|
-
return;
|
|
39
58
|
}
|
|
40
|
-
|
|
41
|
-
if (opts.follow) {
|
|
42
|
-
|
|
59
|
+
// Follow mode — stream from first available provider
|
|
60
|
+
if (opts.follow && providers[0]) {
|
|
61
|
+
console.log(chalk.dim("\n --- streaming (Ctrl+C to stop) ---\n"));
|
|
62
|
+
const provider = providers[0];
|
|
63
|
+
if (provider.follow) {
|
|
64
|
+
await provider.follow((log) => printLogLine(log, provider.name));
|
|
65
|
+
}
|
|
66
|
+
else {
|
|
67
|
+
console.log(chalk.dim(` ${provider.name} does not support streaming`));
|
|
68
|
+
}
|
|
43
69
|
}
|
|
70
|
+
console.log();
|
|
44
71
|
});
|
|
45
|
-
|
|
72
|
+
// --- Railway ---
|
|
73
|
+
function railwayProvider() {
|
|
74
|
+
const token = process.env.RAILWAY_API_TOKEN;
|
|
75
|
+
const projectId = process.env.RAILWAY_PROJECT_ID;
|
|
76
|
+
const serviceId = process.env.RAILWAY_SERVICE_ID;
|
|
77
|
+
return {
|
|
78
|
+
name: "Railway",
|
|
79
|
+
async fetch(limit) {
|
|
80
|
+
const deploymentId = await getLatestRailwayDeployment(token, projectId, serviceId);
|
|
81
|
+
if (!deploymentId)
|
|
82
|
+
return [];
|
|
83
|
+
const res = await fetch("https://backboard.railway.com/graphql/v2", {
|
|
84
|
+
method: "POST",
|
|
85
|
+
headers: {
|
|
86
|
+
Authorization: `Bearer ${token}`,
|
|
87
|
+
"Content-Type": "application/json",
|
|
88
|
+
},
|
|
89
|
+
body: JSON.stringify({
|
|
90
|
+
query: `{ deploymentLogs(deploymentId: "${deploymentId}", limit: ${limit}) { timestamp message severity } }`,
|
|
91
|
+
}),
|
|
92
|
+
});
|
|
93
|
+
const data = (await res.json());
|
|
94
|
+
return data.data?.deploymentLogs ?? [];
|
|
95
|
+
},
|
|
96
|
+
async follow(onLog) {
|
|
97
|
+
const deploymentId = await getLatestRailwayDeployment(token, projectId, serviceId);
|
|
98
|
+
if (!deploymentId)
|
|
99
|
+
return;
|
|
100
|
+
let lastTimestamp = new Date().toISOString();
|
|
101
|
+
while (true) {
|
|
102
|
+
await sleep(3000);
|
|
103
|
+
try {
|
|
104
|
+
const res = await fetch("https://backboard.railway.com/graphql/v2", {
|
|
105
|
+
method: "POST",
|
|
106
|
+
headers: {
|
|
107
|
+
Authorization: `Bearer ${token}`,
|
|
108
|
+
"Content-Type": "application/json",
|
|
109
|
+
},
|
|
110
|
+
body: JSON.stringify({
|
|
111
|
+
query: `{ deploymentLogs(deploymentId: "${deploymentId}", limit: 20) { timestamp message severity } }`,
|
|
112
|
+
}),
|
|
113
|
+
});
|
|
114
|
+
const data = (await res.json());
|
|
115
|
+
for (const log of data.data?.deploymentLogs ?? []) {
|
|
116
|
+
if (log.timestamp > lastTimestamp) {
|
|
117
|
+
onLog(log);
|
|
118
|
+
lastTimestamp = log.timestamp;
|
|
119
|
+
}
|
|
120
|
+
}
|
|
121
|
+
}
|
|
122
|
+
catch { /* retry */ }
|
|
123
|
+
}
|
|
124
|
+
},
|
|
125
|
+
};
|
|
126
|
+
}
|
|
127
|
+
async function getLatestRailwayDeployment(token, projectId, serviceId) {
|
|
46
128
|
if (!projectId)
|
|
47
129
|
return null;
|
|
48
|
-
const serviceFilter = serviceId
|
|
49
|
-
? `serviceId: "${serviceId}",`
|
|
50
|
-
: "";
|
|
130
|
+
const serviceFilter = serviceId ? `serviceId: "${serviceId}",` : "";
|
|
51
131
|
const res = await fetch("https://backboard.railway.com/graphql/v2", {
|
|
52
132
|
method: "POST",
|
|
53
|
-
headers: {
|
|
54
|
-
Authorization: `Bearer ${token}`,
|
|
55
|
-
"Content-Type": "application/json",
|
|
56
|
-
},
|
|
133
|
+
headers: { Authorization: `Bearer ${token}`, "Content-Type": "application/json" },
|
|
57
134
|
body: JSON.stringify({
|
|
58
|
-
query: `{
|
|
59
|
-
deployments(
|
|
60
|
-
first: 1,
|
|
61
|
-
input: {
|
|
62
|
-
projectId: "${projectId}",
|
|
63
|
-
${serviceFilter}
|
|
64
|
-
}
|
|
65
|
-
) {
|
|
66
|
-
edges {
|
|
67
|
-
node { id status }
|
|
68
|
-
}
|
|
69
|
-
}
|
|
70
|
-
}`,
|
|
135
|
+
query: `{ deployments(first: 1, input: { projectId: "${projectId}", ${serviceFilter} }) { edges { node { id } } } }`,
|
|
71
136
|
}),
|
|
72
137
|
});
|
|
73
138
|
const data = (await res.json());
|
|
74
139
|
return data.data?.deployments?.edges?.[0]?.node?.id ?? null;
|
|
75
140
|
}
|
|
76
|
-
|
|
77
|
-
|
|
78
|
-
|
|
79
|
-
|
|
80
|
-
|
|
81
|
-
|
|
141
|
+
// --- Vercel ---
|
|
142
|
+
function vercelProvider() {
|
|
143
|
+
const token = process.env.VERCEL_TOKEN;
|
|
144
|
+
return {
|
|
145
|
+
name: "Vercel",
|
|
146
|
+
async fetch(limit) {
|
|
147
|
+
// Get latest deployment, then its build logs
|
|
148
|
+
const depRes = await fetch("https://api.vercel.com/v6/deployments?limit=1", {
|
|
149
|
+
headers: { Authorization: `Bearer ${token}` },
|
|
150
|
+
});
|
|
151
|
+
const depData = (await depRes.json());
|
|
152
|
+
const dep = depData.deployments?.[0];
|
|
153
|
+
if (!dep)
|
|
154
|
+
return [];
|
|
155
|
+
const logRes = await fetch(`https://api.vercel.com/v2/deployments/${dep.uid}/events`, { headers: { Authorization: `Bearer ${token}` } });
|
|
156
|
+
const events = (await logRes.json());
|
|
157
|
+
if (!Array.isArray(events))
|
|
158
|
+
return [];
|
|
159
|
+
return events
|
|
160
|
+
.filter((e) => e.type === "stdout" || e.type === "stderr")
|
|
161
|
+
.slice(-limit)
|
|
162
|
+
.map((e) => ({
|
|
163
|
+
timestamp: new Date(e.created).toISOString(),
|
|
164
|
+
message: e.payload?.text ?? e.text ?? "",
|
|
165
|
+
severity: e.type === "stderr" ? "ERROR" : "INFO",
|
|
166
|
+
}));
|
|
82
167
|
},
|
|
83
|
-
|
|
84
|
-
query: `{
|
|
85
|
-
deploymentLogs(deploymentId: "${deploymentId}", limit: ${limit}) {
|
|
86
|
-
timestamp
|
|
87
|
-
message
|
|
88
|
-
severity
|
|
89
|
-
}
|
|
90
|
-
}`,
|
|
91
|
-
}),
|
|
92
|
-
});
|
|
93
|
-
const data = (await res.json());
|
|
94
|
-
const logs = data.data?.deploymentLogs ?? [];
|
|
95
|
-
if (logs.length === 0) {
|
|
96
|
-
console.log(chalk.dim(" No logs found"));
|
|
97
|
-
return;
|
|
98
|
-
}
|
|
99
|
-
for (const log of logs) {
|
|
100
|
-
printLogLine(log);
|
|
101
|
-
}
|
|
168
|
+
};
|
|
102
169
|
}
|
|
103
|
-
|
|
104
|
-
|
|
105
|
-
|
|
106
|
-
|
|
107
|
-
|
|
108
|
-
|
|
109
|
-
|
|
170
|
+
// --- Fly.io ---
|
|
171
|
+
function flyProvider() {
|
|
172
|
+
const token = process.env.FLY_API_TOKEN;
|
|
173
|
+
const app = process.env.FLY_APP_NAME;
|
|
174
|
+
return {
|
|
175
|
+
name: "Fly.io",
|
|
176
|
+
async fetch(limit) {
|
|
177
|
+
if (!app) {
|
|
178
|
+
throw new Error("FLY_APP_NAME not set");
|
|
179
|
+
}
|
|
180
|
+
const res = await fetch("https://api.fly.io/graphql", {
|
|
110
181
|
method: "POST",
|
|
111
182
|
headers: {
|
|
112
183
|
Authorization: `Bearer ${token}`,
|
|
113
184
|
"Content-Type": "application/json",
|
|
114
185
|
},
|
|
115
186
|
body: JSON.stringify({
|
|
116
|
-
query: `{
|
|
117
|
-
|
|
118
|
-
|
|
119
|
-
message
|
|
120
|
-
severity
|
|
187
|
+
query: `query {
|
|
188
|
+
app(name: "${app}") {
|
|
189
|
+
currentRelease { status createdAt }
|
|
121
190
|
}
|
|
122
191
|
}`,
|
|
123
192
|
}),
|
|
124
193
|
});
|
|
125
194
|
const data = (await res.json());
|
|
126
|
-
const
|
|
127
|
-
|
|
128
|
-
|
|
129
|
-
|
|
130
|
-
|
|
131
|
-
|
|
132
|
-
|
|
133
|
-
|
|
134
|
-
|
|
135
|
-
|
|
136
|
-
|
|
137
|
-
|
|
195
|
+
const release = data.data?.app?.currentRelease;
|
|
196
|
+
if (!release)
|
|
197
|
+
return [];
|
|
198
|
+
// Fly doesn't have a simple log fetch API via GraphQL
|
|
199
|
+
// Return release info as a log line
|
|
200
|
+
return [
|
|
201
|
+
{
|
|
202
|
+
timestamp: release.createdAt,
|
|
203
|
+
message: `Current release: ${release.status}`,
|
|
204
|
+
severity: "INFO",
|
|
205
|
+
},
|
|
206
|
+
];
|
|
207
|
+
},
|
|
208
|
+
};
|
|
209
|
+
}
|
|
210
|
+
// --- Render ---
|
|
211
|
+
function renderProvider() {
|
|
212
|
+
const token = process.env.RENDER_API_KEY;
|
|
213
|
+
return {
|
|
214
|
+
name: "Render",
|
|
215
|
+
async fetch(logLimit) {
|
|
216
|
+
// Get first service
|
|
217
|
+
const svcRes = await fetch("https://api.render.com/v1/services?limit=1", {
|
|
218
|
+
headers: { Authorization: `Bearer ${token}` },
|
|
219
|
+
});
|
|
220
|
+
const services = (await svcRes.json());
|
|
221
|
+
const svc = services[0]?.service;
|
|
222
|
+
if (!svc)
|
|
223
|
+
return [];
|
|
224
|
+
// Get deploys for the service
|
|
225
|
+
const depRes = await fetch(`https://api.render.com/v1/services/${svc.id}/deploys?limit=1`, { headers: { Authorization: `Bearer ${token}` } });
|
|
226
|
+
const deploys = (await depRes.json());
|
|
227
|
+
const deploy = deploys[0]?.deploy;
|
|
228
|
+
if (!deploy)
|
|
229
|
+
return [];
|
|
230
|
+
// Get logs for the deploy
|
|
231
|
+
const logRes = await fetch(`https://api.render.com/v1/services/${svc.id}/deploys/${deploy.id}/logs`, { headers: { Authorization: `Bearer ${token}` } });
|
|
232
|
+
const logs = (await logRes.json());
|
|
233
|
+
if (!Array.isArray(logs))
|
|
234
|
+
return [];
|
|
235
|
+
return logs.slice(-logLimit).map((l) => ({
|
|
236
|
+
timestamp: l.timestamp ?? new Date().toISOString(),
|
|
237
|
+
message: l.message ?? l.text ?? String(l),
|
|
238
|
+
severity: "INFO",
|
|
239
|
+
}));
|
|
240
|
+
},
|
|
241
|
+
};
|
|
138
242
|
}
|
|
139
|
-
function printLogLine(log) {
|
|
243
|
+
function printLogLine(log, provider) {
|
|
140
244
|
const time = chalk.dim(new Date(log.timestamp).toLocaleTimeString());
|
|
141
245
|
const severity = log.severity?.toUpperCase() ?? "INFO";
|
|
142
246
|
const sevColor = severity === "ERROR"
|
|
@@ -144,7 +248,8 @@ function printLogLine(log) {
|
|
|
144
248
|
: severity === "WARN"
|
|
145
249
|
? chalk.yellow
|
|
146
250
|
: chalk.dim;
|
|
147
|
-
|
|
251
|
+
const tag = chalk.dim(`[${provider.toLowerCase()}]`);
|
|
252
|
+
console.log(` ${time} ${tag} ${sevColor(severity.padEnd(5))} ${log.message}`);
|
|
148
253
|
}
|
|
149
254
|
function sleep(ms) {
|
|
150
255
|
return new Promise((resolve) => setTimeout(resolve, ms));
|
package/dist/index.js
CHANGED
|
@@ -7,11 +7,12 @@ import { deployCommand } from "./commands/deploy.js";
|
|
|
7
7
|
import { envCommand } from "./commands/env.js";
|
|
8
8
|
import { logsCommand } from "./commands/logs.js";
|
|
9
9
|
import { todoCommand } from "./commands/todo.js";
|
|
10
|
+
import { doctorCommand } from "./commands/doctor.js";
|
|
10
11
|
const program = new Command();
|
|
11
12
|
program
|
|
12
13
|
.name("stk")
|
|
13
14
|
.description("One CLI to deploy, monitor, and debug your entire stack.")
|
|
14
|
-
.version("0.
|
|
15
|
+
.version("0.2.0");
|
|
15
16
|
program.addCommand(initCommand);
|
|
16
17
|
program.addCommand(statusCommand);
|
|
17
18
|
program.addCommand(healthCommand);
|
|
@@ -19,4 +20,5 @@ program.addCommand(deployCommand);
|
|
|
19
20
|
program.addCommand(envCommand);
|
|
20
21
|
program.addCommand(logsCommand);
|
|
21
22
|
program.addCommand(todoCommand);
|
|
23
|
+
program.addCommand(doctorCommand);
|
|
22
24
|
program.parse();
|
|
@@ -0,0 +1,40 @@
|
|
|
1
|
+
import type { CheckResult } from "../services/checker.js";
|
|
2
|
+
export interface StkPlugin {
|
|
3
|
+
name: string;
|
|
4
|
+
version?: string;
|
|
5
|
+
services?: Record<string, PluginService>;
|
|
6
|
+
}
|
|
7
|
+
export interface PluginService {
|
|
8
|
+
name: string;
|
|
9
|
+
envVars: string[];
|
|
10
|
+
healthCheck: () => Promise<CheckResult>;
|
|
11
|
+
}
|
|
12
|
+
/**
|
|
13
|
+
* Load plugins from .stk/plugins/ directory.
|
|
14
|
+
*
|
|
15
|
+
* Each plugin is a .js or .mjs file that exports a StkPlugin:
|
|
16
|
+
*
|
|
17
|
+
* ```js
|
|
18
|
+
* // .stk/plugins/my-service.mjs
|
|
19
|
+
* export default {
|
|
20
|
+
* name: "my-plugin",
|
|
21
|
+
* services: {
|
|
22
|
+
* myservice: {
|
|
23
|
+
* name: "My Service",
|
|
24
|
+
* envVars: ["MY_SERVICE_TOKEN"],
|
|
25
|
+
* healthCheck: async () => {
|
|
26
|
+
* const token = process.env.MY_SERVICE_TOKEN;
|
|
27
|
+
* if (!token) return { name: "My Service", status: "skipped", detail: "MY_SERVICE_TOKEN not set" };
|
|
28
|
+
* // ... check logic
|
|
29
|
+
* return { name: "My Service", status: "healthy", detail: "connected" };
|
|
30
|
+
* }
|
|
31
|
+
* }
|
|
32
|
+
* }
|
|
33
|
+
* };
|
|
34
|
+
* ```
|
|
35
|
+
*/
|
|
36
|
+
export declare function loadPlugins(): Promise<StkPlugin[]>;
|
|
37
|
+
/**
|
|
38
|
+
* Collect all plugin health checkers into a flat record.
|
|
39
|
+
*/
|
|
40
|
+
export declare function getPluginCheckers(): Promise<Record<string, () => Promise<CheckResult>>>;
|
|
@@ -0,0 +1,65 @@
|
|
|
1
|
+
import { existsSync, readdirSync } from "fs";
|
|
2
|
+
import { resolve, join } from "path";
|
|
3
|
+
import { pathToFileURL } from "url";
|
|
4
|
+
const PLUGIN_DIR = ".stk/plugins";
|
|
5
|
+
/**
|
|
6
|
+
* Load plugins from .stk/plugins/ directory.
|
|
7
|
+
*
|
|
8
|
+
* Each plugin is a .js or .mjs file that exports a StkPlugin:
|
|
9
|
+
*
|
|
10
|
+
* ```js
|
|
11
|
+
* // .stk/plugins/my-service.mjs
|
|
12
|
+
* export default {
|
|
13
|
+
* name: "my-plugin",
|
|
14
|
+
* services: {
|
|
15
|
+
* myservice: {
|
|
16
|
+
* name: "My Service",
|
|
17
|
+
* envVars: ["MY_SERVICE_TOKEN"],
|
|
18
|
+
* healthCheck: async () => {
|
|
19
|
+
* const token = process.env.MY_SERVICE_TOKEN;
|
|
20
|
+
* if (!token) return { name: "My Service", status: "skipped", detail: "MY_SERVICE_TOKEN not set" };
|
|
21
|
+
* // ... check logic
|
|
22
|
+
* return { name: "My Service", status: "healthy", detail: "connected" };
|
|
23
|
+
* }
|
|
24
|
+
* }
|
|
25
|
+
* }
|
|
26
|
+
* };
|
|
27
|
+
* ```
|
|
28
|
+
*/
|
|
29
|
+
export async function loadPlugins() {
|
|
30
|
+
const pluginDir = resolve(process.cwd(), PLUGIN_DIR);
|
|
31
|
+
if (!existsSync(pluginDir))
|
|
32
|
+
return [];
|
|
33
|
+
const plugins = [];
|
|
34
|
+
const files = readdirSync(pluginDir).filter((f) => f.endsWith(".js") || f.endsWith(".mjs"));
|
|
35
|
+
for (const file of files) {
|
|
36
|
+
try {
|
|
37
|
+
const filePath = join(pluginDir, file);
|
|
38
|
+
const fileUrl = pathToFileURL(filePath).href;
|
|
39
|
+
const mod = await import(fileUrl);
|
|
40
|
+
const plugin = mod.default ?? mod;
|
|
41
|
+
if (plugin.name && plugin.services) {
|
|
42
|
+
plugins.push(plugin);
|
|
43
|
+
}
|
|
44
|
+
}
|
|
45
|
+
catch {
|
|
46
|
+
// Skip invalid plugins silently
|
|
47
|
+
}
|
|
48
|
+
}
|
|
49
|
+
return plugins;
|
|
50
|
+
}
|
|
51
|
+
/**
|
|
52
|
+
* Collect all plugin health checkers into a flat record.
|
|
53
|
+
*/
|
|
54
|
+
export async function getPluginCheckers() {
|
|
55
|
+
const plugins = await loadPlugins();
|
|
56
|
+
const checkers = {};
|
|
57
|
+
for (const plugin of plugins) {
|
|
58
|
+
if (!plugin.services)
|
|
59
|
+
continue;
|
|
60
|
+
for (const [key, svc] of Object.entries(plugin.services)) {
|
|
61
|
+
checkers[key] = svc.healthCheck;
|
|
62
|
+
}
|
|
63
|
+
}
|
|
64
|
+
return checkers;
|
|
65
|
+
}
|
package/dist/services/aws.js
CHANGED
|
@@ -1,4 +1,5 @@
|
|
|
1
1
|
import { runCheck } from "./checker.js";
|
|
2
|
+
import { createHmac, createHash } from "crypto";
|
|
2
3
|
export async function checkAWS() {
|
|
3
4
|
const accessKey = process.env.AWS_ACCESS_KEY_ID;
|
|
4
5
|
const secretKey = process.env.AWS_SECRET_ACCESS_KEY;
|
|
@@ -11,22 +12,50 @@ export async function checkAWS() {
|
|
|
11
12
|
};
|
|
12
13
|
}
|
|
13
14
|
return runCheck("AWS", async () => {
|
|
14
|
-
//
|
|
15
|
+
// AWS STS GetCallerIdentity with Signature V4
|
|
16
|
+
const service = "sts";
|
|
15
17
|
const host = `sts.${region}.amazonaws.com`;
|
|
16
18
|
const body = "Action=GetCallerIdentity&Version=2011-06-15";
|
|
17
19
|
const now = new Date();
|
|
18
|
-
|
|
19
|
-
|
|
20
|
+
const amzDate = now.toISOString().replace(/[:-]|\.\d{3}/g, "");
|
|
21
|
+
const dateStamp = amzDate.slice(0, 8);
|
|
22
|
+
const bodyHash = sha256(body);
|
|
23
|
+
const canonicalHeaders = `content-type:application/x-www-form-urlencoded\nhost:${host}\nx-amz-date:${amzDate}\n`;
|
|
24
|
+
const signedHeaders = "content-type;host;x-amz-date";
|
|
25
|
+
const canonicalRequest = `POST\n/\n\n${canonicalHeaders}\n${signedHeaders}\n${bodyHash}`;
|
|
26
|
+
const credentialScope = `${dateStamp}/${region}/${service}/aws4_request`;
|
|
27
|
+
const stringToSign = `AWS4-HMAC-SHA256\n${amzDate}\n${credentialScope}\n${sha256(canonicalRequest)}`;
|
|
28
|
+
const signingKey = getSignatureKey(secretKey, dateStamp, region, service);
|
|
29
|
+
const signature = hmac(signingKey, stringToSign).toString("hex");
|
|
30
|
+
const authHeader = `AWS4-HMAC-SHA256 Credential=${accessKey}/${credentialScope}, SignedHeaders=${signedHeaders}, Signature=${signature}`;
|
|
20
31
|
const res = await fetch(`https://${host}/`, {
|
|
21
32
|
method: "POST",
|
|
22
|
-
headers: {
|
|
33
|
+
headers: {
|
|
34
|
+
"Content-Type": "application/x-www-form-urlencoded",
|
|
35
|
+
"X-Amz-Date": amzDate,
|
|
36
|
+
Authorization: authHeader,
|
|
37
|
+
},
|
|
23
38
|
body,
|
|
24
39
|
});
|
|
25
|
-
|
|
26
|
-
|
|
27
|
-
|
|
28
|
-
|
|
29
|
-
|
|
30
|
-
|
|
40
|
+
if (!res.ok)
|
|
41
|
+
throw new Error(`HTTP ${res.status} — invalid credentials`);
|
|
42
|
+
const text = await res.text();
|
|
43
|
+
const accountMatch = text.match(/<Account>(.+?)<\/Account>/);
|
|
44
|
+
const detail = accountMatch
|
|
45
|
+
? `account ${accountMatch[1]} (${region})`
|
|
46
|
+
: `${region} — authenticated`;
|
|
47
|
+
return { detail };
|
|
31
48
|
});
|
|
32
49
|
}
|
|
50
|
+
function sha256(data) {
|
|
51
|
+
return createHash("sha256").update(data).digest("hex");
|
|
52
|
+
}
|
|
53
|
+
function hmac(key, data) {
|
|
54
|
+
return createHmac("sha256", key).update(data).digest();
|
|
55
|
+
}
|
|
56
|
+
function getSignatureKey(key, date, region, service) {
|
|
57
|
+
const kDate = hmac(`AWS4${key}`, date);
|
|
58
|
+
const kRegion = hmac(kDate, region);
|
|
59
|
+
const kService = hmac(kRegion, service);
|
|
60
|
+
return hmac(kService, "aws4_request");
|
|
61
|
+
}
|
|
@@ -5,16 +5,35 @@ export async function checkDatabase() {
|
|
|
5
5
|
return { name: "PostgreSQL", status: "skipped", detail: "DATABASE_URL not set" };
|
|
6
6
|
}
|
|
7
7
|
return runCheck("PostgreSQL", async () => {
|
|
8
|
-
// Parse host and port from DATABASE_URL to do a basic TCP connect check
|
|
9
|
-
// Full query check would require pg client — keeping deps minimal for now
|
|
10
8
|
const parsed = new URL(url);
|
|
11
9
|
const host = parsed.hostname;
|
|
12
10
|
const port = parseInt(parsed.port || "5432", 10);
|
|
11
|
+
const dbName = parsed.pathname.replace("/", "") || "unknown";
|
|
13
12
|
const { createConnection } = await import("net");
|
|
14
|
-
|
|
13
|
+
// Attempt a real PostgreSQL startup message handshake
|
|
14
|
+
const version = await new Promise((resolve, reject) => {
|
|
15
15
|
const socket = createConnection({ host, port, timeout: 5000 }, () => {
|
|
16
|
-
|
|
17
|
-
|
|
16
|
+
// Send PostgreSQL startup message (protocol v3.0)
|
|
17
|
+
const user = parsed.username || "postgres";
|
|
18
|
+
const params = `user\0${user}\0database\0${dbName}\0\0`;
|
|
19
|
+
const len = 4 + 4 + params.length;
|
|
20
|
+
const buf = Buffer.alloc(len);
|
|
21
|
+
buf.writeInt32BE(len, 0);
|
|
22
|
+
buf.writeInt32BE(196608, 4); // protocol 3.0
|
|
23
|
+
buf.write(params, 8);
|
|
24
|
+
socket.write(buf);
|
|
25
|
+
socket.once("data", (data) => {
|
|
26
|
+
socket.destroy();
|
|
27
|
+
const tag = String.fromCharCode(data[0]);
|
|
28
|
+
// 'R' = AuthenticationRequest (server recognized us as postgres)
|
|
29
|
+
// 'E' = Error (but server is responding — it's alive)
|
|
30
|
+
if (tag === "R" || tag === "E") {
|
|
31
|
+
resolve("protocol ok");
|
|
32
|
+
}
|
|
33
|
+
else {
|
|
34
|
+
resolve("reachable");
|
|
35
|
+
}
|
|
36
|
+
});
|
|
18
37
|
});
|
|
19
38
|
socket.on("error", reject);
|
|
20
39
|
socket.on("timeout", () => {
|
|
@@ -22,6 +41,6 @@ export async function checkDatabase() {
|
|
|
22
41
|
reject(new Error("connection timeout"));
|
|
23
42
|
});
|
|
24
43
|
});
|
|
25
|
-
return { detail: `${host}:${port}
|
|
44
|
+
return { detail: `${host}:${port}/${dbName} — ${version}` };
|
|
26
45
|
});
|
|
27
46
|
}
|
|
@@ -1,4 +1,5 @@
|
|
|
1
1
|
import type { CheckResult } from "./checker.js";
|
|
2
2
|
export type HealthChecker = () => Promise<CheckResult>;
|
|
3
|
+
export declare function loadPluginCheckers(): Promise<void>;
|
|
3
4
|
export declare function getChecker(service: string): HealthChecker | null;
|
|
4
5
|
export declare function allCheckerNames(): string[];
|
|
@@ -29,9 +29,23 @@ const registry = {
|
|
|
29
29
|
r2: checkR2,
|
|
30
30
|
stripe: checkStripe,
|
|
31
31
|
};
|
|
32
|
+
let pluginCheckers = {};
|
|
33
|
+
let pluginsLoaded = false;
|
|
34
|
+
export async function loadPluginCheckers() {
|
|
35
|
+
if (pluginsLoaded)
|
|
36
|
+
return;
|
|
37
|
+
try {
|
|
38
|
+
const { getPluginCheckers } = await import("../lib/plugins.js");
|
|
39
|
+
pluginCheckers = await getPluginCheckers();
|
|
40
|
+
}
|
|
41
|
+
catch {
|
|
42
|
+
// Plugins not available
|
|
43
|
+
}
|
|
44
|
+
pluginsLoaded = true;
|
|
45
|
+
}
|
|
32
46
|
export function getChecker(service) {
|
|
33
|
-
return registry[service] ?? null;
|
|
47
|
+
return registry[service] ?? pluginCheckers[service] ?? null;
|
|
34
48
|
}
|
|
35
49
|
export function allCheckerNames() {
|
|
36
|
-
return Object.keys(registry);
|
|
50
|
+
return [...Object.keys(registry), ...Object.keys(pluginCheckers)];
|
|
37
51
|
}
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@prajwolkc/stk",
|
|
3
|
-
"version": "0.
|
|
3
|
+
"version": "0.2.0",
|
|
4
4
|
"description": "One CLI to deploy, monitor, and debug your entire stack. Health checks, deploy watching, env sync, logs, and GitHub issues — all from one command.",
|
|
5
5
|
"type": "module",
|
|
6
6
|
"license": "MIT",
|
|
@@ -40,6 +40,8 @@
|
|
|
40
40
|
"build": "tsc",
|
|
41
41
|
"dev": "tsx src/index.ts",
|
|
42
42
|
"start": "node dist/index.js",
|
|
43
|
+
"test": "vitest run",
|
|
44
|
+
"test:watch": "vitest",
|
|
43
45
|
"prepublishOnly": "npm run build"
|
|
44
46
|
},
|
|
45
47
|
"dependencies": {
|
|
@@ -50,6 +52,7 @@
|
|
|
50
52
|
"devDependencies": {
|
|
51
53
|
"@types/node": "^22.13.0",
|
|
52
54
|
"tsx": "^4.19.0",
|
|
53
|
-
"typescript": "^5.7.0"
|
|
55
|
+
"typescript": "^5.7.0",
|
|
56
|
+
"vitest": "^4.1.0"
|
|
54
57
|
}
|
|
55
58
|
}
|