@aexol/spectral 0.0.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +106 -0
- package/LICENSE +21 -0
- package/README.md +213 -0
- package/dist/cli.js +206 -0
- package/dist/commands/bind.js +96 -0
- package/dist/commands/login.js +109 -0
- package/dist/commands/logout.js +24 -0
- package/dist/commands/serve.js +374 -0
- package/dist/commands/unbind.js +36 -0
- package/dist/config.js +92 -0
- package/dist/extensions/aexol-mcp.js +117 -0
- package/dist/mcp-client.js +116 -0
- package/dist/preflight.js +36 -0
- package/dist/relay/client.js +240 -0
- package/dist/relay/dispatcher.js +504 -0
- package/dist/relay/machine-store.js +116 -0
- package/dist/relay/models-fetch.js +108 -0
- package/dist/relay/registration.js +135 -0
- package/dist/server/handlers/errors.js +34 -0
- package/dist/server/handlers/projects.js +86 -0
- package/dist/server/handlers/sessions.js +42 -0
- package/dist/server/paths.js +78 -0
- package/dist/server/pi-bridge.js +572 -0
- package/dist/server/session-stream.js +579 -0
- package/dist/server/shutdown.js +180 -0
- package/dist/server/storage.js +491 -0
- package/dist/server/title-generator.js +196 -0
- package/dist/server/wire.js +12 -0
- package/dist/studio-binding.js +97 -0
- package/package.json +67 -0
|
@@ -0,0 +1,109 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* `spectral login` — interactive authentication against the Aexol MCP backend.
|
|
3
|
+
*
|
|
4
|
+
* Flow:
|
|
5
|
+
* 1. Prompt for API URL (default = SPECTRAL_MCP_URL env, else hard default).
|
|
6
|
+
* 2. Prompt for team API key (masked input). Verify the sk-aexol-team- prefix
|
|
7
|
+
* locally so we don't send obviously-wrong credentials.
|
|
8
|
+
* 3. Verify reachability + auth by calling tools/list against the URL.
|
|
9
|
+
* 4. On success: persist to ~/.spectral/config.json (mode 0600).
|
|
10
|
+
*
|
|
11
|
+
* Exits non-zero on any failure so shell scripts can detect it.
|
|
12
|
+
*
|
|
13
|
+
* NOTE: pure logic lives in `performLogin` — the interactive `runLogin`
|
|
14
|
+
* wrapper delegates to it after collecting prompts. Tests target
|
|
15
|
+
* `performLogin` directly so we don't have to drive `@inquirer/prompts`.
|
|
16
|
+
*/
|
|
17
|
+
import { input, password } from "@inquirer/prompts";
|
|
18
|
+
import pc from "picocolors";
|
|
19
|
+
import { DEFAULT_API_URL, TEAM_API_KEY_PREFIX, getConfigFile, validateTeamApiKey, writeConfig, } from "../config.js";
|
|
20
|
+
import { AexolMcpClient, AexolMcpError } from "../mcp-client.js";
|
|
21
|
+
/**
|
|
22
|
+
* Pure login logic. Throws `Error` with one of these stable message prefixes:
|
|
23
|
+
* - "Invalid API key prefix" — local validation failed
|
|
24
|
+
* - "Invalid token" — server returned 401/403
|
|
25
|
+
* - "Authentication failed" — server rejected the request (other 4xx/5xx)
|
|
26
|
+
* - "Could not reach" — network error (DNS, refused connection, timeout)
|
|
27
|
+
*
|
|
28
|
+
* Tests assert on these substrings.
|
|
29
|
+
*/
|
|
30
|
+
export async function performLogin(opts) {
|
|
31
|
+
const apiUrl = opts.apiUrl?.trim() ?? "";
|
|
32
|
+
const teamApiKey = opts.teamApiKey?.trim() ?? "";
|
|
33
|
+
if (!apiUrl) {
|
|
34
|
+
throw new Error("API URL is required");
|
|
35
|
+
}
|
|
36
|
+
if (!validateTeamApiKey(teamApiKey)) {
|
|
37
|
+
throw new Error(`Invalid API key prefix: token must start with "${TEAM_API_KEY_PREFIX}"`);
|
|
38
|
+
}
|
|
39
|
+
const client = new AexolMcpClient(apiUrl, teamApiKey);
|
|
40
|
+
let toolCount;
|
|
41
|
+
try {
|
|
42
|
+
const tools = await client.listTools();
|
|
43
|
+
toolCount = tools.length;
|
|
44
|
+
}
|
|
45
|
+
catch (err) {
|
|
46
|
+
if (err instanceof AexolMcpError) {
|
|
47
|
+
if (err.status === 401 || err.status === 403) {
|
|
48
|
+
throw new Error(`Invalid token: Authentication failed (HTTP ${err.status}). Check that the key is correct and active.`);
|
|
49
|
+
}
|
|
50
|
+
if (err.status !== undefined) {
|
|
51
|
+
throw new Error(`Authentication failed: ${err.message}`);
|
|
52
|
+
}
|
|
53
|
+
throw new Error(`Could not reach ${apiUrl}: ${err.message}`);
|
|
54
|
+
}
|
|
55
|
+
const msg = err instanceof Error ? err.message : String(err);
|
|
56
|
+
throw new Error(`Could not reach ${apiUrl}: ${msg}`);
|
|
57
|
+
}
|
|
58
|
+
const cfg = { apiUrl, teamApiKey };
|
|
59
|
+
await writeConfig(cfg);
|
|
60
|
+
return { toolCount };
|
|
61
|
+
}
|
|
62
|
+
export async function runLogin() {
|
|
63
|
+
process.stdout.write(pc.bold("Spectral login\n"));
|
|
64
|
+
process.stdout.write(pc.dim("Authenticate against the Aexol MCP backend. Credentials are stored at ~/.spectral/config.json (chmod 600).\n\n"));
|
|
65
|
+
const defaultUrl = process.env.SPECTRAL_MCP_URL ?? DEFAULT_API_URL;
|
|
66
|
+
let apiUrl;
|
|
67
|
+
let teamApiKey;
|
|
68
|
+
try {
|
|
69
|
+
apiUrl = (await input({
|
|
70
|
+
message: "Aexol MCP URL",
|
|
71
|
+
default: defaultUrl,
|
|
72
|
+
validate: (v) => (v.trim().length > 0 ? true : "URL is required"),
|
|
73
|
+
})).trim();
|
|
74
|
+
teamApiKey = (await password({
|
|
75
|
+
message: `Team API key (starts with "${TEAM_API_KEY_PREFIX}")`,
|
|
76
|
+
mask: "*",
|
|
77
|
+
validate: (v) => {
|
|
78
|
+
const trimmed = v.trim();
|
|
79
|
+
if (trimmed.length === 0)
|
|
80
|
+
return "Token is required";
|
|
81
|
+
if (!validateTeamApiKey(trimmed)) {
|
|
82
|
+
return `Token must start with "${TEAM_API_KEY_PREFIX}"`;
|
|
83
|
+
}
|
|
84
|
+
return true;
|
|
85
|
+
},
|
|
86
|
+
})).trim();
|
|
87
|
+
}
|
|
88
|
+
catch (err) {
|
|
89
|
+
// @inquirer/prompts throws ExitPromptError on Ctrl+C; treat as cancellation.
|
|
90
|
+
const name = err?.name;
|
|
91
|
+
if (name === "ExitPromptError") {
|
|
92
|
+
process.stderr.write(pc.yellow("\nLogin cancelled.\n"));
|
|
93
|
+
process.exit(130);
|
|
94
|
+
}
|
|
95
|
+
throw err;
|
|
96
|
+
}
|
|
97
|
+
process.stdout.write(pc.dim(`\nVerifying credentials against ${apiUrl} ...\n`));
|
|
98
|
+
let result;
|
|
99
|
+
try {
|
|
100
|
+
result = await performLogin({ apiUrl, teamApiKey });
|
|
101
|
+
}
|
|
102
|
+
catch (err) {
|
|
103
|
+
const msg = err instanceof Error ? err.message : String(err);
|
|
104
|
+
process.stderr.write(pc.red(`✗ ${msg}\n`));
|
|
105
|
+
process.exit(1);
|
|
106
|
+
}
|
|
107
|
+
process.stdout.write(pc.green(`✓ Authenticated. Found ${result.toolCount} tools.\n`));
|
|
108
|
+
process.stdout.write(pc.dim(`Saved credentials to ${getConfigFile()}\n`));
|
|
109
|
+
}
|
|
@@ -0,0 +1,24 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* `spectral logout` — remove the persisted Aexol credentials.
|
|
3
|
+
*
|
|
4
|
+
* Idempotent: missing config is not an error. Always exits 0 unless the
|
|
5
|
+
* filesystem itself misbehaves (in which case we surface the OS error).
|
|
6
|
+
*/
|
|
7
|
+
import pc from "picocolors";
|
|
8
|
+
import { CONFIG_FILE, deleteConfig } from "../config.js";
|
|
9
|
+
export async function runLogout() {
|
|
10
|
+
try {
|
|
11
|
+
const removed = await deleteConfig();
|
|
12
|
+
if (removed) {
|
|
13
|
+
process.stdout.write(pc.green(`✓ Logged out. Removed ${CONFIG_FILE}\n`));
|
|
14
|
+
}
|
|
15
|
+
else {
|
|
16
|
+
process.stdout.write(pc.dim("Already logged out.\n"));
|
|
17
|
+
}
|
|
18
|
+
}
|
|
19
|
+
catch (err) {
|
|
20
|
+
const msg = err instanceof Error ? err.message : String(err);
|
|
21
|
+
process.stderr.write(pc.red(`✗ Failed to remove ${CONFIG_FILE}: ${msg}\n`));
|
|
22
|
+
process.exit(1);
|
|
23
|
+
}
|
|
24
|
+
}
|
|
@@ -0,0 +1,374 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* `spectral serve` — relay orchestration entry point.
|
|
3
|
+
*
|
|
4
|
+
* Architecture (post-Batch 2 cutover):
|
|
5
|
+
* 1. Pre-flight: `requireLogin()` — same UX as the main spawn path.
|
|
6
|
+
* 2. Pre-flight: `preflightSqlite()` — fail fast on a broken native module.
|
|
7
|
+
* 3. Open the SQLite store and construct a `SessionStreamManager`.
|
|
8
|
+
* 4. Register this machine with the backend (`ensureMachineRegistered`),
|
|
9
|
+
* which either reuses a fresh `machine.json` or POSTs to
|
|
10
|
+
* `/api/machines/register` and persists the issued JWT.
|
|
11
|
+
* 5. Open a single long-lived `RelayClient` to `/agent-connection` carrying
|
|
12
|
+
* the machine JWT. Reconnects forever with backoff.
|
|
13
|
+
* 6. Until Batch 3 ships, every inbound non-pong frame is acked back as
|
|
14
|
+
* `{kind:"ack", echo:<frame>}`. This proves the WS is bidirectional
|
|
15
|
+
* end-to-end and gives operators a smoke check before envelope routing.
|
|
16
|
+
* 7. SIGINT/SIGTERM trigger graceful shutdown: dispose the relay (close 1000),
|
|
17
|
+
* dispose the manager (tear down pi streams), close the store.
|
|
18
|
+
*
|
|
19
|
+
* What this module is NOT (yet):
|
|
20
|
+
* - It does NOT translate relay envelopes into REST handler calls or WS
|
|
21
|
+
* event broadcasts. That dispatcher is Batch 3.
|
|
22
|
+
* - It does NOT bind any local port. The local HTTP server is gone.
|
|
23
|
+
*
|
|
24
|
+
* Flag surface:
|
|
25
|
+
* - `--machine-name <name>` — display name used at registration time.
|
|
26
|
+
* - `--port <n>` / `-p <n>` / `SPECTRAL_PORT` — accepted but warned-and-ignored
|
|
27
|
+
* so users updating from the pre-relay version don't get a hard error.
|
|
28
|
+
*
|
|
29
|
+
* The exported `runServe()` returns a handle so tests can drive the relay
|
|
30
|
+
* loop in-process without spawning a subprocess. Tests inject
|
|
31
|
+
* `relayUrlOverride` + `fetchImpl` + `webSocketImpl` to point at an
|
|
32
|
+
* in-process fake backend.
|
|
33
|
+
*/
|
|
34
|
+
import { readFileSync } from "node:fs";
|
|
35
|
+
import { homedir, hostname } from "node:os";
|
|
36
|
+
import { dirname, join, resolve } from "node:path";
|
|
37
|
+
import { fileURLToPath } from "node:url";
|
|
38
|
+
import { getConfigDir } from "../config.js";
|
|
39
|
+
import { requireLogin } from "../preflight.js";
|
|
40
|
+
import { RelayClient } from "../relay/client.js";
|
|
41
|
+
import { detachAllSubscribers, handleClientMessage, handleRestRequest, handleSubscribe, } from "../relay/dispatcher.js";
|
|
42
|
+
import { ensureMachineRegistered } from "../relay/registration.js";
|
|
43
|
+
import { SessionStreamManager } from "../server/session-stream.js";
|
|
44
|
+
import { gracefulShutdown } from "../server/shutdown.js";
|
|
45
|
+
import { preflightSqlite, SessionStore } from "../server/storage.js";
|
|
46
|
+
const __dirname = dirname(fileURLToPath(import.meta.url));
|
|
47
|
+
/** Default backend base URL when neither env nor override is set. */
|
|
48
|
+
const DEFAULT_BACKEND_URL = "http://localhost:4008";
|
|
49
|
+
function readPackageVersion() {
|
|
50
|
+
// dist/commands/serve.js → ../../package.json
|
|
51
|
+
// src/commands/serve.ts → ../../package.json (tsx / vitest)
|
|
52
|
+
const pkgPath = resolve(__dirname, "..", "..", "package.json");
|
|
53
|
+
const pkg = JSON.parse(readFileSync(pkgPath, "utf8"));
|
|
54
|
+
return pkg.version;
|
|
55
|
+
}
|
|
56
|
+
/**
|
|
57
|
+
* Translate a backend HTTP(S) base URL to the relay WS(S) URL.
|
|
58
|
+
* `http://x:y` → `ws://x:y/agent-connection`
|
|
59
|
+
* `https://x` → `wss://x/agent-connection`
|
|
60
|
+
*/
|
|
61
|
+
export function deriveRelayUrl(backendUrl) {
|
|
62
|
+
const trimmed = backendUrl.replace(/\/$/, "");
|
|
63
|
+
if (trimmed.startsWith("https://")) {
|
|
64
|
+
return `wss://${trimmed.slice("https://".length)}/agent-connection`;
|
|
65
|
+
}
|
|
66
|
+
if (trimmed.startsWith("http://")) {
|
|
67
|
+
return `ws://${trimmed.slice("http://".length)}/agent-connection`;
|
|
68
|
+
}
|
|
69
|
+
// Already a ws[s] URL — assume the caller knows what they're doing.
|
|
70
|
+
if (trimmed.startsWith("ws://") || trimmed.startsWith("wss://")) {
|
|
71
|
+
return trimmed.endsWith("/agent-connection") ? trimmed : `${trimmed}/agent-connection`;
|
|
72
|
+
}
|
|
73
|
+
throw new Error(`Cannot derive relay URL from "${backendUrl}"`);
|
|
74
|
+
}
|
|
75
|
+
/**
|
|
76
|
+
* Parse the `serve` subcommand's flag surface. Manual parser, same style as
|
|
77
|
+
* `login`/`logout` (no CLI framework).
|
|
78
|
+
*
|
|
79
|
+
* Accepted:
|
|
80
|
+
* --machine-name <name>
|
|
81
|
+
* --machine-name=<name>
|
|
82
|
+
* Deprecated (warn, don't error):
|
|
83
|
+
* --port <n> / -p <n> / --port=<n>
|
|
84
|
+
*
|
|
85
|
+
* Anything else throws so users notice typos.
|
|
86
|
+
*/
|
|
87
|
+
export function parseServeArgs(args) {
|
|
88
|
+
const out = { deprecatedPort: false };
|
|
89
|
+
for (let i = 0; i < args.length; i++) {
|
|
90
|
+
const a = args[i];
|
|
91
|
+
if (a === "--machine-name") {
|
|
92
|
+
const next = args[i + 1];
|
|
93
|
+
if (!next)
|
|
94
|
+
throw new Error(`--machine-name requires a value`);
|
|
95
|
+
out.machineName = next;
|
|
96
|
+
i++;
|
|
97
|
+
continue;
|
|
98
|
+
}
|
|
99
|
+
if (a.startsWith("--machine-name=")) {
|
|
100
|
+
out.machineName = a.slice("--machine-name=".length);
|
|
101
|
+
continue;
|
|
102
|
+
}
|
|
103
|
+
// Deprecated port flag — warn-and-ignore so existing scripts don't crash.
|
|
104
|
+
if (a === "--port" || a === "-p") {
|
|
105
|
+
out.deprecatedPort = true;
|
|
106
|
+
// Swallow the value if present so we don't trip the unknown-arg branch.
|
|
107
|
+
if (args[i + 1] && !args[i + 1].startsWith("-"))
|
|
108
|
+
i++;
|
|
109
|
+
continue;
|
|
110
|
+
}
|
|
111
|
+
if (a.startsWith("--port=")) {
|
|
112
|
+
out.deprecatedPort = true;
|
|
113
|
+
continue;
|
|
114
|
+
}
|
|
115
|
+
throw new Error(`Unknown argument to "serve": ${a}`);
|
|
116
|
+
}
|
|
117
|
+
return out;
|
|
118
|
+
}
|
|
119
|
+
export async function runServe(opts = {}) {
|
|
120
|
+
const cfg = await requireLogin();
|
|
121
|
+
const cwd = opts.cwd ?? homedir();
|
|
122
|
+
const version = readPackageVersion();
|
|
123
|
+
const installSignalHandlers = opts.installSignalHandlers ?? true;
|
|
124
|
+
const silent = opts.silent ?? false;
|
|
125
|
+
const dbPath = opts.dbPath ?? join(getConfigDir(), "sessions.db");
|
|
126
|
+
// Pre-flight: native sqlite. Fail fast with a friendly hint BEFORE we
|
|
127
|
+
// start opening sockets — keeps the error close to the cause.
|
|
128
|
+
const preflight = preflightSqlite(dbPath);
|
|
129
|
+
if (!preflight.ok) {
|
|
130
|
+
process.stderr.write(`✗ ${preflight.error}\n`);
|
|
131
|
+
process.exit(1);
|
|
132
|
+
}
|
|
133
|
+
const store = new SessionStore(dbPath);
|
|
134
|
+
// Mutable holder for the meta-event publisher. Bound below once we
|
|
135
|
+
// have both `relay` (the send target) and `registration.record.machineId`
|
|
136
|
+
// (the canonical machineId the backend uses to fan out). The manager
|
|
137
|
+
// and dispatcher both close over a thin wrapper that defers to this
|
|
138
|
+
// ref, so a race where the title pipeline fires before the relay is
|
|
139
|
+
// up just no-ops (the deferred lookup sees `undefined`) instead of
|
|
140
|
+
// throwing or losing the rename.
|
|
141
|
+
let publishMetaEventRef;
|
|
142
|
+
const deferredPublishMetaEvent = (event) => {
|
|
143
|
+
publishMetaEventRef?.(event);
|
|
144
|
+
};
|
|
145
|
+
// Resolve URLs. Precedence: explicit option > env > default.
|
|
146
|
+
const backendUrl = opts.backendUrlOverride ?? process.env.SPECTRAL_BACKEND_URL ?? DEFAULT_BACKEND_URL;
|
|
147
|
+
const relayUrl = opts.relayUrlOverride ?? process.env.SPECTRAL_RELAY_URL ?? deriveRelayUrl(backendUrl);
|
|
148
|
+
// Register BEFORE constructing the SessionStreamManager. The manager
|
|
149
|
+
// (and every PiBridge it spawns) needs the machine JWT to authenticate
|
|
150
|
+
// backend-proxied inference calls, so registration must succeed first.
|
|
151
|
+
// Fail-fast on error — a clear message beats a hang.
|
|
152
|
+
let registration;
|
|
153
|
+
try {
|
|
154
|
+
registration = await ensureMachineRegistered({
|
|
155
|
+
backendUrl,
|
|
156
|
+
apiKey: cfg.teamApiKey,
|
|
157
|
+
machineNameOverride: opts.machineName,
|
|
158
|
+
version,
|
|
159
|
+
fetchImpl: opts.fetchImpl,
|
|
160
|
+
});
|
|
161
|
+
}
|
|
162
|
+
catch (err) {
|
|
163
|
+
const msg = err instanceof Error ? err.message : String(err);
|
|
164
|
+
process.stderr.write(`✗ Could not register machine: ${msg}\n`);
|
|
165
|
+
// Tear down what we've already opened so tests don't see leaks.
|
|
166
|
+
try {
|
|
167
|
+
store.close();
|
|
168
|
+
}
|
|
169
|
+
catch {
|
|
170
|
+
// ignore
|
|
171
|
+
}
|
|
172
|
+
process.exit(1);
|
|
173
|
+
}
|
|
174
|
+
// Stream manager owns per-session pi processes. Batch 3 will subscribe
|
|
175
|
+
// it to relay frames; for now it's just here so the handles match the
|
|
176
|
+
// shape Batch 3 expects.
|
|
177
|
+
const manager = new SessionStreamManager({
|
|
178
|
+
store,
|
|
179
|
+
cwd,
|
|
180
|
+
backendUrl,
|
|
181
|
+
machineJwt: registration.record.machineJwt,
|
|
182
|
+
bridgeFactory: opts.bridgeFactory,
|
|
183
|
+
titleLlmCall: opts.titleLlmCall,
|
|
184
|
+
disableAutoTitle: opts.disableAutoTitle,
|
|
185
|
+
publishMetaEvent: deferredPublishMetaEvent,
|
|
186
|
+
});
|
|
187
|
+
if (!silent) {
|
|
188
|
+
const action = registration.reused ? "Reusing" : "Registered";
|
|
189
|
+
process.stdout.write(`${action} machine "${registration.record.machineName}" (${registration.record.machineId})\n`);
|
|
190
|
+
process.stdout.write(`Connecting to relay: ${relayUrl} (cwd: ${cwd}, host: ${hostname()})\n`);
|
|
191
|
+
}
|
|
192
|
+
const relay = new RelayClient({
|
|
193
|
+
relayUrl,
|
|
194
|
+
machineJwt: registration.record.machineJwt,
|
|
195
|
+
webSocketImpl: opts.webSocketImpl,
|
|
196
|
+
logger: silent ? { log: () => { }, warn: () => { }, error: () => { } } : console,
|
|
197
|
+
});
|
|
198
|
+
// Wire the meta publisher now that we have both the relay socket and
|
|
199
|
+
// the canonical machineId. The backend ignores the body's machineId
|
|
200
|
+
// field on `meta_event` (it stamps it from the connection identity),
|
|
201
|
+
// but we still send the right value for local debuggability and to
|
|
202
|
+
// avoid relying on backend behaviour that may evolve.
|
|
203
|
+
publishMetaEventRef = (event) => {
|
|
204
|
+
relay.send({
|
|
205
|
+
kind: "meta_event",
|
|
206
|
+
machineId: registration.record.machineId,
|
|
207
|
+
event,
|
|
208
|
+
});
|
|
209
|
+
};
|
|
210
|
+
// Per-session subscriber registry. The dispatcher attaches at most
|
|
211
|
+
// one subscriber per (sessionId) on the first `client_message`; we
|
|
212
|
+
// hand the same map back to it for every subsequent frame so attach
|
|
213
|
+
// is idempotent. On relay disconnect we detach all of them so the
|
|
214
|
+
// manager doesn't keep an unreachable subscriber pinned, but we
|
|
215
|
+
// KEEP the underlying streams alive — a browser reconnect should
|
|
216
|
+
// resume mid-turn.
|
|
217
|
+
const subscribers = new Map();
|
|
218
|
+
// Envelope dispatcher. We only act on `rest_request` and
|
|
219
|
+
// `client_message`; everything else (welcome, pong, error, etc.) is
|
|
220
|
+
// either handled by RelayClient itself or is informational.
|
|
221
|
+
//
|
|
222
|
+
// Frames arrive typed loosely from `RelayClient` (which is protocol-
|
|
223
|
+
// agnostic — it just forwards JSON). We re-narrow via the `kind`
|
|
224
|
+
// discriminator and cast to the strict envelope types from
|
|
225
|
+
// `@aexol/relay-protocol`. The protocol package's own `parseFrame()`
|
|
226
|
+
// already validated `kind` membership in RelayClient itself; here we
|
|
227
|
+
// additionally trust that the inner shape matches its discriminator
|
|
228
|
+
// (the backend is the only producer and is exercised by tests).
|
|
229
|
+
relay.on("frame", (frame) => {
|
|
230
|
+
if (frame.kind === "rest_request") {
|
|
231
|
+
const response = handleRestRequest(frame, {
|
|
232
|
+
store,
|
|
233
|
+
manager,
|
|
234
|
+
publishMetaEvent: deferredPublishMetaEvent,
|
|
235
|
+
});
|
|
236
|
+
relay.send(response);
|
|
237
|
+
return;
|
|
238
|
+
}
|
|
239
|
+
if (frame.kind === "client_message") {
|
|
240
|
+
handleClientMessage(frame, {
|
|
241
|
+
manager,
|
|
242
|
+
relay,
|
|
243
|
+
subscribers,
|
|
244
|
+
});
|
|
245
|
+
return;
|
|
246
|
+
}
|
|
247
|
+
if (frame.kind === "subscribe") {
|
|
248
|
+
handleSubscribe(frame, {
|
|
249
|
+
manager,
|
|
250
|
+
relay,
|
|
251
|
+
subscribers,
|
|
252
|
+
});
|
|
253
|
+
return;
|
|
254
|
+
}
|
|
255
|
+
// Other frames (error, machine_disconnected addressed to us, etc.)
|
|
256
|
+
// are ignored at this layer. Future batches may surface them in
|
|
257
|
+
// structured logs.
|
|
258
|
+
});
|
|
259
|
+
// On every relay close, drop the subscriber registry. The manager
|
|
260
|
+
// keeps streams alive (so `dispose` is the only place that tears
|
|
261
|
+
// them down), but the previous subscribers are pointing at a stale
|
|
262
|
+
// socket — let the next `client_message` attach a fresh one.
|
|
263
|
+
relay.on("close", () => {
|
|
264
|
+
detachAllSubscribers(manager, subscribers);
|
|
265
|
+
});
|
|
266
|
+
if (!silent) {
|
|
267
|
+
relay.on("welcome", () => process.stdout.write("✓ Relay connected\n"));
|
|
268
|
+
relay.on("close", ({ code, reason }) => {
|
|
269
|
+
process.stdout.write(`Relay closed (${code}${reason ? `: ${reason}` : ""}); will reconnect…\n`);
|
|
270
|
+
});
|
|
271
|
+
relay.on("reconnect-scheduled", ({ delayMs, attempt }) => {
|
|
272
|
+
process.stdout.write(`Reconnect attempt ${attempt} in ${delayMs}ms\n`);
|
|
273
|
+
});
|
|
274
|
+
relay.on("error", (err) => {
|
|
275
|
+
const msg = err instanceof Error ? err.message : String(err);
|
|
276
|
+
process.stderr.write(`Relay error: ${msg}\n`);
|
|
277
|
+
});
|
|
278
|
+
}
|
|
279
|
+
relay.connect();
|
|
280
|
+
let closed = false;
|
|
281
|
+
const close = async () => {
|
|
282
|
+
if (closed)
|
|
283
|
+
return;
|
|
284
|
+
closed = true;
|
|
285
|
+
relay.dispose();
|
|
286
|
+
manager.dispose();
|
|
287
|
+
try {
|
|
288
|
+
store.close();
|
|
289
|
+
}
|
|
290
|
+
catch {
|
|
291
|
+
// ignore — best-effort
|
|
292
|
+
}
|
|
293
|
+
if (!silent)
|
|
294
|
+
process.stdout.write("Spectral relay stopped\n");
|
|
295
|
+
};
|
|
296
|
+
if (installSignalHandlers) {
|
|
297
|
+
// Top-level error nets. A daemon must NOT die from a transient
|
|
298
|
+
// unhandled rejection (e.g. a fetch promise that lost its `.catch`
|
|
299
|
+
// when the relay flapped) or a stray throw from a third-party
|
|
300
|
+
// library. Log loudly so we notice in operator logs, but keep the
|
|
301
|
+
// process alive — the relay client owns its own reconnect loop and
|
|
302
|
+
// the manager owns its own per-session error handling.
|
|
303
|
+
process.on("unhandledRejection", (reason) => {
|
|
304
|
+
const msg = reason instanceof Error
|
|
305
|
+
? `${reason.message}\n${reason.stack ?? ""}`
|
|
306
|
+
: String(reason);
|
|
307
|
+
try {
|
|
308
|
+
process.stderr.write(`[serve] unhandledRejection (kept alive): ${msg}\n`);
|
|
309
|
+
}
|
|
310
|
+
catch {
|
|
311
|
+
// ignore — best-effort
|
|
312
|
+
}
|
|
313
|
+
});
|
|
314
|
+
process.on("uncaughtException", (err) => {
|
|
315
|
+
try {
|
|
316
|
+
process.stderr.write(`[serve] uncaughtException (kept alive): ${err.message}\n${err.stack ?? ""}\n`);
|
|
317
|
+
}
|
|
318
|
+
catch {
|
|
319
|
+
// ignore — best-effort
|
|
320
|
+
}
|
|
321
|
+
});
|
|
322
|
+
// Two-stage signal handling. The first SIGINT/SIGTERM kicks off the
|
|
323
|
+
// graceful sequence (flag the dispatcher, drain in-flight turns up
|
|
324
|
+
// to 5 s, close relay→manager→store, exit 0). A second signal during
|
|
325
|
+
// the grace window forces an immediate exit(1) — operators must always
|
|
326
|
+
// be able to kill the process with Ctrl-C twice.
|
|
327
|
+
//
|
|
328
|
+
// We use `on` (not `once`) so the second signal hits `gracefulShutdown`
|
|
329
|
+
// again, where the entry-counter handles the force path.
|
|
330
|
+
const onSignal = (_sig) => {
|
|
331
|
+
void gracefulShutdown({
|
|
332
|
+
inFlightCount: () => manager.activeTurnCount(),
|
|
333
|
+
closeRelay: () => {
|
|
334
|
+
// Mirror our own `close()`: dispose the relay (sends close 1000).
|
|
335
|
+
// We don't await here because RelayClient.dispose is sync and
|
|
336
|
+
// the shutdown helper expects a void|Promise return.
|
|
337
|
+
relay.dispose();
|
|
338
|
+
},
|
|
339
|
+
disposeManager: () => {
|
|
340
|
+
manager.dispose();
|
|
341
|
+
},
|
|
342
|
+
closeStore: () => {
|
|
343
|
+
store.close();
|
|
344
|
+
},
|
|
345
|
+
logger: silent
|
|
346
|
+
? { log: () => { }, warn: () => { }, error: () => { } }
|
|
347
|
+
: console,
|
|
348
|
+
});
|
|
349
|
+
};
|
|
350
|
+
process.on("SIGINT", onSignal);
|
|
351
|
+
process.on("SIGTERM", onSignal);
|
|
352
|
+
}
|
|
353
|
+
return {
|
|
354
|
+
store,
|
|
355
|
+
manager,
|
|
356
|
+
relay,
|
|
357
|
+
machineId: registration.record.machineId,
|
|
358
|
+
close,
|
|
359
|
+
};
|
|
360
|
+
}
|
|
361
|
+
/**
|
|
362
|
+
* CLI entry point used by `cli.ts`. Surfaces the deprecation warning for
|
|
363
|
+
* `--port`/`SPECTRAL_PORT` once, then defers to `runServe`.
|
|
364
|
+
*/
|
|
365
|
+
export async function runServeCli(rawArgs) {
|
|
366
|
+
const parsed = parseServeArgs(rawArgs);
|
|
367
|
+
if (parsed.deprecatedPort || process.env.SPECTRAL_PORT) {
|
|
368
|
+
process.stderr.write(`warning: --port / SPECTRAL_PORT are no longer used. ` +
|
|
369
|
+
`spectral serve now connects outbound to the Aexol relay; there is no local port to bind.\n`);
|
|
370
|
+
}
|
|
371
|
+
return runServe({
|
|
372
|
+
machineName: parsed.machineName,
|
|
373
|
+
});
|
|
374
|
+
}
|
|
@@ -0,0 +1,36 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* `spectral unbind` — remove the local Studio project binding.
|
|
3
|
+
*
|
|
4
|
+
* Reads `.aexol/aexol.jsonc`, reports what project was unbound, and deletes
|
|
5
|
+
* the file. Idempotent: missing binding is not an error.
|
|
6
|
+
*/
|
|
7
|
+
import pc from "picocolors";
|
|
8
|
+
import { deleteStudioBinding, readStudioBinding, } from "../studio-binding.js";
|
|
9
|
+
export async function runUnbind() {
|
|
10
|
+
// ---- Read existing -------------------------------------------------------
|
|
11
|
+
let existing;
|
|
12
|
+
try {
|
|
13
|
+
existing = await readStudioBinding();
|
|
14
|
+
}
|
|
15
|
+
catch (err) {
|
|
16
|
+
const msg = err instanceof Error ? err.message : String(err);
|
|
17
|
+
process.stderr.write(pc.red(`Failed to read binding: ${msg}\n`));
|
|
18
|
+
process.exit(1);
|
|
19
|
+
}
|
|
20
|
+
if (!existing) {
|
|
21
|
+
process.stdout.write(pc.dim("No Studio binding found in this directory.\n"));
|
|
22
|
+
return;
|
|
23
|
+
}
|
|
24
|
+
const displayName = existing.name ?? existing.projectId;
|
|
25
|
+
process.stderr.write(`Unbinding from project: ${existing.name ?? "(unnamed)"} (${existing.projectId})\n`);
|
|
26
|
+
// ---- Delete --------------------------------------------------------------
|
|
27
|
+
try {
|
|
28
|
+
await deleteStudioBinding();
|
|
29
|
+
}
|
|
30
|
+
catch (err) {
|
|
31
|
+
const msg = err instanceof Error ? err.message : String(err);
|
|
32
|
+
process.stderr.write(pc.red(`Failed to remove binding: ${msg}\n`));
|
|
33
|
+
process.exit(1);
|
|
34
|
+
}
|
|
35
|
+
process.stdout.write(pc.green(`✓ Unbound.\n`));
|
|
36
|
+
}
|
package/dist/config.js
ADDED
|
@@ -0,0 +1,92 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Spectral configuration: persisted Aexol MCP credentials.
|
|
3
|
+
*
|
|
4
|
+
* Stored at ~/.spectral/config.json with mode 0600 (user read/write only).
|
|
5
|
+
* The directory is created with mode 0700.
|
|
6
|
+
*
|
|
7
|
+
* Keep this module zero-side-effect so it can be imported from both the
|
|
8
|
+
* CLI entry (pre-flight check) and the pi extension.
|
|
9
|
+
*
|
|
10
|
+
* `getConfigDir()` honors `SPECTRAL_CONFIG_DIR` so tests can redirect to a
|
|
11
|
+
* temporary directory without touching the real `~/.spectral`.
|
|
12
|
+
*/
|
|
13
|
+
import { mkdir, readFile, rm, writeFile } from "node:fs/promises";
|
|
14
|
+
import { homedir } from "node:os";
|
|
15
|
+
import { dirname, join } from "node:path";
|
|
16
|
+
export const DEFAULT_API_URL = "https://api.aexol.ai/mcp";
|
|
17
|
+
export const TEAM_API_KEY_PREFIX = "sk-aexol-team-";
|
|
18
|
+
/**
|
|
19
|
+
* Resolve the directory used to store Spectral configuration.
|
|
20
|
+
*
|
|
21
|
+
* Honors `SPECTRAL_CONFIG_DIR` first (used by tests and CI), then falls back
|
|
22
|
+
* to `<HOMEDIR>/.spectral`. Computed lazily on every call so test-time env
|
|
23
|
+
* mutation works without re-importing the module.
|
|
24
|
+
*/
|
|
25
|
+
export function getConfigDir() {
|
|
26
|
+
return process.env.SPECTRAL_CONFIG_DIR ?? join(homedir(), ".spectral");
|
|
27
|
+
}
|
|
28
|
+
/** Path to the config JSON file. Computed lazily — see `getConfigDir`. */
|
|
29
|
+
export function getConfigFile() {
|
|
30
|
+
return join(getConfigDir(), "config.json");
|
|
31
|
+
}
|
|
32
|
+
/**
|
|
33
|
+
* Backwards-compatible aliases for code that imported the old top-level
|
|
34
|
+
* constants. They eagerly resolve at import time, so they reflect the env
|
|
35
|
+
* state at startup. New code should call `getConfigDir()` / `getConfigFile()`
|
|
36
|
+
* directly so test-time env overrides take effect.
|
|
37
|
+
*/
|
|
38
|
+
export const CONFIG_DIR = getConfigDir();
|
|
39
|
+
export const CONFIG_FILE = getConfigFile();
|
|
40
|
+
/** Resolve the effective API URL: env wins, then stored value, then default. */
|
|
41
|
+
export function getApiUrl(stored) {
|
|
42
|
+
return process.env.SPECTRAL_MCP_URL ?? stored ?? DEFAULT_API_URL;
|
|
43
|
+
}
|
|
44
|
+
/** Validate the team API key prefix. The full token check happens server-side. */
|
|
45
|
+
export function validateTeamApiKey(key) {
|
|
46
|
+
return typeof key === "string" && key.startsWith(TEAM_API_KEY_PREFIX);
|
|
47
|
+
}
|
|
48
|
+
/** Read config; returns null if missing or malformed. Never throws. */
|
|
49
|
+
export async function readConfig() {
|
|
50
|
+
let raw;
|
|
51
|
+
try {
|
|
52
|
+
raw = await readFile(getConfigFile(), "utf8");
|
|
53
|
+
}
|
|
54
|
+
catch {
|
|
55
|
+
return null;
|
|
56
|
+
}
|
|
57
|
+
try {
|
|
58
|
+
const parsed = JSON.parse(raw);
|
|
59
|
+
if (typeof parsed.apiUrl === "string" &&
|
|
60
|
+
typeof parsed.teamApiKey === "string" &&
|
|
61
|
+
parsed.teamApiKey.length > 0) {
|
|
62
|
+
return { apiUrl: parsed.apiUrl, teamApiKey: parsed.teamApiKey };
|
|
63
|
+
}
|
|
64
|
+
return null;
|
|
65
|
+
}
|
|
66
|
+
catch {
|
|
67
|
+
return null;
|
|
68
|
+
}
|
|
69
|
+
}
|
|
70
|
+
/** Persist config with restrictive permissions (dir 0700, file 0600). */
|
|
71
|
+
export async function writeConfig(cfg) {
|
|
72
|
+
const file = getConfigFile();
|
|
73
|
+
await mkdir(dirname(file), { recursive: true, mode: 0o700 });
|
|
74
|
+
// Write atomically-ish: tsc target is Node 20 which has fs.writeFile mode option.
|
|
75
|
+
await writeFile(file, JSON.stringify(cfg, null, 2) + "\n", {
|
|
76
|
+
mode: 0o600,
|
|
77
|
+
encoding: "utf8",
|
|
78
|
+
});
|
|
79
|
+
}
|
|
80
|
+
/** Delete config; returns true if a file was removed, false if there was nothing to remove. */
|
|
81
|
+
export async function deleteConfig() {
|
|
82
|
+
try {
|
|
83
|
+
await rm(getConfigFile(), { force: false });
|
|
84
|
+
return true;
|
|
85
|
+
}
|
|
86
|
+
catch (err) {
|
|
87
|
+
const code = err.code;
|
|
88
|
+
if (code === "ENOENT")
|
|
89
|
+
return false;
|
|
90
|
+
throw err;
|
|
91
|
+
}
|
|
92
|
+
}
|