@stackmeter/cli 0.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,5 @@
1
+ #!/usr/bin/env node
2
+
3
+ import { startGateway } from "../src/gateway/server.mjs";
4
+
5
+ startGateway();
@@ -0,0 +1,619 @@
1
+ #!/usr/bin/env node
2
+
3
+ import { createInterface } from "node:readline/promises";
4
+ import { exec, execSync } from "node:child_process";
5
+ import {
6
+ readFileSync, writeFileSync, existsSync, mkdirSync,
7
+ unlinkSync, chmodSync, copyFileSync,
8
+ } from "node:fs";
9
+ import { homedir, platform } from "node:os";
10
+ import { join, dirname, resolve } from "node:path";
11
+ import { fileURLToPath } from "node:url";
12
+
13
+ const __filename = fileURLToPath(import.meta.url);
14
+ const __dirname = dirname(__filename);
15
+
16
+ const rl = createInterface({ input: process.stdin, output: process.stdout });
17
+ const args = process.argv.slice(2);
18
+ const command = args[0];
19
+ const target = args[1];
20
+
21
+ // ── Constants ────────────────────────────────────────────
22
+
23
+ const DEFAULT_BASE_URL = "https://stackmeter.app";
24
+ const DEFAULT_GATEWAY_PORT = 8787;
25
+ const LAUNCH_AGENT_LABEL = "com.stackmeter.gateway";
26
+ const PLIST_PATH = join(
27
+ homedir(), "Library", "LaunchAgents", `${LAUNCH_AGENT_LABEL}.plist`
28
+ );
29
+ const SM_CONFIG_DIR = join(homedir(), ".stackmeter");
30
+ const SM_CONFIG_PATH = join(SM_CONFIG_DIR, "config.json");
31
+ const SM_GATEWAY_PATH = join(SM_CONFIG_DIR, "gateway.mjs");
32
+ const SM_LAUNCHER_PATH = join(SM_CONFIG_DIR, "start-gateway.mjs");
33
+ const SM_GATEWAY_LOG = join(SM_CONFIG_DIR, "gateway.log");
34
+ const SM_GATEWAY_ERR_LOG = join(SM_CONFIG_DIR, "gateway.err.log");
35
+ const OC_CONFIG_PATH = join(homedir(), ".openclaw", "openclaw.json");
36
+ const OC_BACKUP_PATH = join(homedir(), ".openclaw", "openclaw.json.stackmeter.bak");
37
+
38
+ // ── Command routing ──────────────────────────────────────
39
+
40
+ if (command === "connect" && target === "openclaw") {
41
+ connectOpenClaw().catch(fatal);
42
+ } else if (command === "disconnect" && target === "openclaw") {
43
+ disconnectOpenClaw().catch(fatal);
44
+ } else if (command === "gateway") {
45
+ if (args.includes("--self-test")) {
46
+ selfTest().catch(fatal);
47
+ } else {
48
+ rl.close();
49
+ startGatewayCommand().catch(fatal);
50
+ }
51
+ } else {
52
+ printUsage();
53
+ rl.close();
54
+ process.exit(1);
55
+ }
56
+
57
+ // ── Helpers ──────────────────────────────────────────────
58
+
59
+ function printUsage() {
60
+ console.log("");
61
+ console.log(" Usage:");
62
+ console.log(
63
+ " stackmeter connect openclaw [--auto] Connect OpenClaw to StackMeter"
64
+ );
65
+ console.log(
66
+ " stackmeter disconnect openclaw Disconnect and restore config"
67
+ );
68
+ console.log(
69
+ " stackmeter gateway Start the StackMeter gateway"
70
+ );
71
+ console.log(
72
+ " stackmeter gateway --self-test Test the full pipeline"
73
+ );
74
+ console.log("");
75
+ }
76
+
77
+ function fatal(e) {
78
+ console.error(e);
79
+ rl.close();
80
+ process.exit(1);
81
+ }
82
+
83
+ function openBrowser(url) {
84
+ const p = platform();
85
+ const cmd = p === "darwin" ? "open" : p === "win32" ? "start" : "xdg-open";
86
+ exec(`${cmd} "${url}"`, () => {});
87
+ }
88
+
89
+ function redactKey(key) {
90
+ if (!key || key.length < 8) return "****";
91
+ return `...${key.slice(-4)}`;
92
+ }
93
+
94
+ // ── Config readers / writers ─────────────────────────────
95
+
96
+ function readOpenClawConfig() {
97
+ if (!existsSync(OC_CONFIG_PATH))
98
+ return { path: OC_CONFIG_PATH, config: null };
99
+ try {
100
+ return {
101
+ path: OC_CONFIG_PATH,
102
+ config: JSON.parse(readFileSync(OC_CONFIG_PATH, "utf-8")),
103
+ };
104
+ } catch {
105
+ return { path: OC_CONFIG_PATH, config: null };
106
+ }
107
+ }
108
+
109
+ function readStackMeterConfig() {
110
+ if (!existsSync(SM_CONFIG_PATH)) return {};
111
+ try {
112
+ return JSON.parse(readFileSync(SM_CONFIG_PATH, "utf-8"));
113
+ } catch {
114
+ return {};
115
+ }
116
+ }
117
+
118
+ function writeStackMeterConfig(data) {
119
+ if (!existsSync(SM_CONFIG_DIR))
120
+ mkdirSync(SM_CONFIG_DIR, { recursive: true });
121
+ writeFileSync(SM_CONFIG_PATH, JSON.stringify(data, null, 2) + "\n");
122
+ chmodSync(SM_CONFIG_PATH, 0o600);
123
+ }
124
+
125
+ // ── LaunchAgent helpers (macOS) ──────────────────────────
126
+
127
+ function installLaunchAgent(nodePath, gatewayScript) {
128
+ const plist = `<?xml version="1.0" encoding="UTF-8"?>
129
+ <!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
130
+ <plist version="1.0">
131
+ <dict>
132
+ <key>Label</key>
133
+ <string>${LAUNCH_AGENT_LABEL}</string>
134
+ <key>ProgramArguments</key>
135
+ <array>
136
+ <string>${nodePath}</string>
137
+ <string>${gatewayScript}</string>
138
+ </array>
139
+ <key>RunAtLoad</key>
140
+ <true/>
141
+ <key>KeepAlive</key>
142
+ <true/>
143
+ <key>StandardOutPath</key>
144
+ <string>${SM_GATEWAY_LOG}</string>
145
+ <key>StandardErrorPath</key>
146
+ <string>${SM_GATEWAY_ERR_LOG}</string>
147
+ <key>ThrottleInterval</key>
148
+ <integer>10</integer>
149
+ </dict>
150
+ </plist>`;
151
+
152
+ const laDir = dirname(PLIST_PATH);
153
+ if (!existsSync(laDir)) mkdirSync(laDir, { recursive: true });
154
+ writeFileSync(PLIST_PATH, plist);
155
+ }
156
+
157
+ function loadLaunchAgent() {
158
+ try {
159
+ execSync(`launchctl unload "${PLIST_PATH}" 2>/dev/null`, {
160
+ stdio: "ignore",
161
+ });
162
+ } catch {}
163
+ execSync(`launchctl load "${PLIST_PATH}"`);
164
+ }
165
+
166
+ function unloadLaunchAgent() {
167
+ try {
168
+ execSync(`launchctl unload "${PLIST_PATH}"`, { stdio: "ignore" });
169
+ } catch {}
170
+ if (existsSync(PLIST_PATH)) unlinkSync(PLIST_PATH);
171
+ }
172
+
173
+ // ── connect openclaw ─────────────────────────────────────
174
+
175
+ async function connectOpenClaw() {
176
+ const autoFlag = args.includes("--auto");
177
+ const isMac = platform() === "darwin";
178
+
179
+ console.log("");
180
+ console.log(" \u{1F517} StackMeter \u2014 Connect OpenClaw");
181
+ console.log("");
182
+
183
+ // ── Step 1: Get StackMeter base URL ────────────────
184
+ let baseUrl = process.env.STACKMETER_URL
185
+ ? process.env.STACKMETER_URL.replace(/\/api\/usage-events\/?$/, "")
186
+ : "";
187
+
188
+ if (!baseUrl) {
189
+ baseUrl = await rl.question(
190
+ ` StackMeter URL (default: ${DEFAULT_BASE_URL}): `
191
+ );
192
+ if (!baseUrl.trim()) baseUrl = DEFAULT_BASE_URL;
193
+ } else {
194
+ console.log(` Using STACKMETER_URL: ${baseUrl}`);
195
+ }
196
+ baseUrl = baseUrl.replace(/\/+$/, "");
197
+ const apiUrl = `${baseUrl}/api/usage-events`;
198
+
199
+ // ── Step 2: Get or create tracking token ───────────
200
+ let token = process.env.STACKMETER_TOKEN || "";
201
+
202
+ if (token) {
203
+ console.log(
204
+ ` Existing STACKMETER_TOKEN found (sm_...${token.slice(-4)})`
205
+ );
206
+ } else {
207
+ console.log("");
208
+ console.log(" Opening StackMeter to generate a tracking token...");
209
+ openBrowser(`${baseUrl}/app/ai-usage`);
210
+ console.log(
211
+ ` If the browser didn't open, visit: ${baseUrl}/app/ai-usage`
212
+ );
213
+ console.log(
214
+ ' Click "Track OpenClaw usage" \u2192 "Generate tracking token" \u2192 copy the token.'
215
+ );
216
+ console.log("");
217
+
218
+ token = await rl.question(" Paste your tracking token (sm_...): ");
219
+ token = token.trim();
220
+ }
221
+
222
+ if (!token.startsWith("sm_")) {
223
+ console.log(" \u274C Invalid token format. Tokens start with sm_");
224
+ rl.close();
225
+ process.exit(1);
226
+ }
227
+
228
+ // ── Step 3: Verify token ───────────────────────────
229
+ console.log("");
230
+ console.log(" Verifying token...");
231
+
232
+ try {
233
+ const res = await fetch(apiUrl, {
234
+ method: "POST",
235
+ headers: {
236
+ Authorization: `Bearer ${token}`,
237
+ "Content-Type": "application/json",
238
+ },
239
+ body: JSON.stringify({
240
+ provider: "stackmeter",
241
+ model: "connection-test",
242
+ inputTokens: 0,
243
+ outputTokens: 0,
244
+ costCents: 0,
245
+ sourceType: "openclaw",
246
+ sourceId: "cli-connect",
247
+ dedupKey: `connect-test-${token.slice(-8)}`,
248
+ }),
249
+ });
250
+
251
+ if (!res.ok) {
252
+ const err = await res.json().catch(() => ({}));
253
+ const msg = err.error || res.statusText;
254
+
255
+ if (
256
+ /dedup_key.*(schema cache|does not exist|could not find)/i.test(msg)
257
+ ) {
258
+ console.log("");
259
+ console.log(
260
+ " \u26A0\uFE0F The 'dedup_key' column is missing from usage_events."
261
+ );
262
+ console.log(" Run this SQL in the Supabase SQL Editor:");
263
+ console.log("");
264
+ console.log(
265
+ " alter table usage_events add column if not exists dedup_key text;"
266
+ );
267
+ console.log(
268
+ " create unique index if not exists idx_usage_events_dedup"
269
+ );
270
+ console.log(
271
+ " on usage_events(dedup_key) where dedup_key is not null;"
272
+ );
273
+ console.log("");
274
+ rl.close();
275
+ process.exit(1);
276
+ }
277
+
278
+ if (res.status === 401) {
279
+ console.log(
280
+ ` \u274C Invalid or revoked token. Generate a new one at ${baseUrl}/app/ai-usage`
281
+ );
282
+ } else {
283
+ console.log(` \u274C Token verification failed: ${msg}`);
284
+ }
285
+ rl.close();
286
+ process.exit(1);
287
+ }
288
+ } catch (e) {
289
+ console.log(` \u274C Could not reach ${apiUrl}`);
290
+ console.log(` ${e.message}`);
291
+ console.log("");
292
+ console.log(
293
+ " Check your internet connection or override with STACKMETER_URL."
294
+ );
295
+ rl.close();
296
+ process.exit(1);
297
+ }
298
+
299
+ console.log(" \u2705 Token verified!");
300
+ console.log("");
301
+
302
+ // ── Step 4: Read OpenClaw config ───────────────────
303
+ const { config: ocConfig } = readOpenClawConfig();
304
+ const existingApiKey =
305
+ ocConfig?.models?.providers?.openai?.apiKey ||
306
+ process.env.OPENAI_API_KEY ||
307
+ "";
308
+
309
+ if (!existingApiKey) {
310
+ console.log(" \u274C OpenAI API key not found.");
311
+ console.log(
312
+ " Set models.providers.openai.apiKey in ~/.openclaw/openclaw.json"
313
+ );
314
+ console.log(" or: export OPENAI_API_KEY=sk-...");
315
+ rl.close();
316
+ process.exit(1);
317
+ }
318
+
319
+ if (autoFlag) {
320
+ const gatewayPort = parseInt(
321
+ process.env.STACKMETER_GATEWAY_PORT || String(DEFAULT_GATEWAY_PORT),
322
+ 10
323
+ );
324
+ const gatewayBaseUrl = `http://127.0.0.1:${gatewayPort}/v1`;
325
+
326
+ // ── Backup openclaw.json (first run only) ────────
327
+ if (ocConfig && !existsSync(OC_BACKUP_PATH)) {
328
+ copyFileSync(OC_CONFIG_PATH, OC_BACKUP_PATH);
329
+ console.log(` \u{1F4CB} Backed up ${OC_CONFIG_PATH}`);
330
+ console.log(` \u2192 ${OC_BACKUP_PATH}`);
331
+ console.log("");
332
+ }
333
+
334
+ // ── Patch openclaw.json ──────────────────────────
335
+ if (ocConfig) {
336
+ if (!ocConfig.models) ocConfig.models = {};
337
+ if (!ocConfig.models.providers) ocConfig.models.providers = {};
338
+ if (!ocConfig.models.providers.openai)
339
+ ocConfig.models.providers.openai = {};
340
+ ocConfig.models.providers.openai.baseUrl = gatewayBaseUrl;
341
+ writeFileSync(OC_CONFIG_PATH, JSON.stringify(ocConfig, null, 2) + "\n");
342
+ console.log(` \u2705 Patched ${OC_CONFIG_PATH}`);
343
+ console.log(
344
+ ` models.providers.openai.baseUrl = "${gatewayBaseUrl}"`
345
+ );
346
+ } else {
347
+ console.log(
348
+ " \u274C OpenClaw config not found at ~/.openclaw/openclaw.json"
349
+ );
350
+ console.log(" Install OpenClaw first: https://openclaw.dev");
351
+ rl.close();
352
+ process.exit(1);
353
+ }
354
+ console.log("");
355
+
356
+ // ── Write ~/.stackmeter/config.json ──────────────
357
+ writeStackMeterConfig({
358
+ token,
359
+ url: apiUrl,
360
+ gatewayPort,
361
+ });
362
+ console.log(` \u2705 Saved config to ${SM_CONFIG_PATH}`);
363
+ console.log("");
364
+
365
+ // ── Install gateway as persistent service ────────
366
+ if (isMac) {
367
+ // Copy gateway to stable path so LaunchAgent survives npx cache eviction
368
+ const gatewaySource = resolve(
369
+ __dirname, "..", "src", "gateway", "server.mjs"
370
+ );
371
+ if (!existsSync(gatewaySource)) {
372
+ console.log(` \u274C Gateway script not found at ${gatewaySource}`);
373
+ rl.close();
374
+ process.exit(1);
375
+ }
376
+
377
+ copyFileSync(gatewaySource, SM_GATEWAY_PATH);
378
+ writeFileSync(
379
+ SM_LAUNCHER_PATH,
380
+ [
381
+ "#!/usr/bin/env node",
382
+ 'import { startGateway } from "./gateway.mjs";',
383
+ "startGateway();",
384
+ "",
385
+ ].join("\n")
386
+ );
387
+
388
+ installLaunchAgent(process.execPath, SM_LAUNCHER_PATH);
389
+ loadLaunchAgent();
390
+
391
+ console.log(" \u2705 Gateway installed as background service");
392
+ console.log(` LaunchAgent: ${PLIST_PATH}`);
393
+ console.log(` Logs: ${SM_GATEWAY_LOG}`);
394
+ } else {
395
+ // Non-macOS: start gateway in foreground
396
+ console.log(" Starting gateway in foreground...");
397
+ process.env.STACKMETER_URL = apiUrl;
398
+ process.env.STACKMETER_TOKEN = token;
399
+ if (!process.env.OPENAI_API_KEY)
400
+ process.env.OPENAI_API_KEY = existingApiKey;
401
+
402
+ rl.close();
403
+ const { startGateway } = await import("../src/gateway/server.mjs");
404
+ await startGateway();
405
+ return;
406
+ }
407
+
408
+ console.log("");
409
+ console.log(" \u2705 Done! OpenClaw is connected to StackMeter.");
410
+ console.log("");
411
+ console.log(
412
+ " Just run OpenClaw normally \u2014 no extra flags needed."
413
+ );
414
+ console.log(` View your usage at ${baseUrl}/app/ai-usage`);
415
+ console.log("");
416
+ rl.close();
417
+ } else {
418
+ // ── No --auto: Print manual steps ────────────────
419
+ console.log(" \u2705 Token is valid. Follow these steps to complete setup:");
420
+ console.log("");
421
+ console.log(` 1. Edit ${OC_CONFIG_PATH}`);
422
+ console.log(
423
+ ' Set models.providers.openai.baseUrl to "http://127.0.0.1:8787/v1"'
424
+ );
425
+ console.log("");
426
+ console.log(" 2. Set env vars:");
427
+ console.log(` export STACKMETER_URL="${apiUrl}"`);
428
+ console.log(` export STACKMETER_TOKEN="${token}"`);
429
+ if (existingApiKey) {
430
+ console.log(
431
+ ` export OPENAI_API_KEY="${redactKey(existingApiKey)}"`
432
+ );
433
+ } else {
434
+ console.log(' export OPENAI_API_KEY="sk-..." # your OpenAI key');
435
+ }
436
+ console.log("");
437
+ console.log(" 3. Start the gateway:");
438
+ console.log(" npx @stackmeter/cli gateway");
439
+ console.log("");
440
+ console.log(
441
+ " 4. Start OpenClaw \u2014 traffic will flow through the gateway."
442
+ );
443
+ console.log("");
444
+ rl.close();
445
+ }
446
+ }
447
+
448
+ // ── disconnect openclaw ──────────────────────────────────
449
+
450
+ async function disconnectOpenClaw() {
451
+ rl.close();
452
+ const isMac = platform() === "darwin";
453
+
454
+ console.log("");
455
+ console.log(" \u{1F50C} StackMeter \u2014 Disconnect OpenClaw");
456
+ console.log("");
457
+
458
+ // ── Stop and remove LaunchAgent ────────────────────
459
+ if (isMac) {
460
+ unloadLaunchAgent();
461
+ console.log(" \u2705 Gateway service stopped and removed");
462
+ }
463
+
464
+ // ── Restore openclaw.json from backup ──────────────
465
+ if (existsSync(OC_BACKUP_PATH)) {
466
+ copyFileSync(OC_BACKUP_PATH, OC_CONFIG_PATH);
467
+ unlinkSync(OC_BACKUP_PATH);
468
+ console.log(` \u2705 Restored ${OC_CONFIG_PATH} from backup`);
469
+ } else {
470
+ // No backup — just remove the baseUrl we set
471
+ const { config: ocConfig } = readOpenClawConfig();
472
+ if (ocConfig?.models?.providers?.openai?.baseUrl?.includes("127.0.0.1")) {
473
+ delete ocConfig.models.providers.openai.baseUrl;
474
+ writeFileSync(OC_CONFIG_PATH, JSON.stringify(ocConfig, null, 2) + "\n");
475
+ console.log(` \u2705 Removed gateway baseUrl from ${OC_CONFIG_PATH}`);
476
+ }
477
+ }
478
+
479
+ // ── Remove StackMeter config ───────────────────────
480
+ if (existsSync(SM_CONFIG_PATH)) {
481
+ unlinkSync(SM_CONFIG_PATH);
482
+ console.log(` \u2705 Removed ${SM_CONFIG_PATH}`);
483
+ }
484
+
485
+ // ── Clean up gateway files ─────────────────────────
486
+ for (const f of [SM_GATEWAY_PATH, SM_LAUNCHER_PATH]) {
487
+ if (existsSync(f)) unlinkSync(f);
488
+ }
489
+
490
+ console.log("");
491
+ console.log(" \u2705 OpenClaw disconnected from StackMeter.");
492
+ console.log(" OpenClaw will now talk directly to OpenAI.");
493
+ console.log("");
494
+ }
495
+
496
+ // ── gateway ──────────────────────────────────────────────
497
+
498
+ async function startGatewayCommand() {
499
+ const { startGateway } = await import("../src/gateway/server.mjs");
500
+ await startGateway();
501
+ }
502
+
503
+ // ── gateway --self-test ──────────────────────────────────
504
+
505
+ async function selfTest() {
506
+ rl.close();
507
+
508
+ // Read config from file or env
509
+ const smConfig = readStackMeterConfig();
510
+ const openaiKey =
511
+ process.env.OPENAI_API_KEY ||
512
+ (() => {
513
+ const { config } = readOpenClawConfig();
514
+ return config?.models?.providers?.openai?.apiKey;
515
+ })();
516
+ const smUrl = process.env.STACKMETER_URL || smConfig.url;
517
+ const smToken = process.env.STACKMETER_TOKEN || smConfig.token;
518
+
519
+ const missing = [];
520
+ if (!openaiKey) missing.push("OPENAI_API_KEY");
521
+ if (!smUrl) missing.push("STACKMETER_URL");
522
+ if (!smToken) missing.push("STACKMETER_TOKEN");
523
+ if (missing.length) {
524
+ console.log("");
525
+ console.log(` \u274C Missing: ${missing.join(", ")}`);
526
+ console.log(" Run: npx @stackmeter/cli connect openclaw --auto");
527
+ console.log(
528
+ " or set them via env vars / ~/.stackmeter/config.json"
529
+ );
530
+ console.log("");
531
+ process.exit(1);
532
+ }
533
+
534
+ // Set env vars so gateway picks them up
535
+ if (!process.env.OPENAI_API_KEY) process.env.OPENAI_API_KEY = openaiKey;
536
+ if (!process.env.STACKMETER_URL) process.env.STACKMETER_URL = smUrl;
537
+ if (!process.env.STACKMETER_TOKEN) process.env.STACKMETER_TOKEN = smToken;
538
+
539
+ console.log("");
540
+ console.log(" \u{1F9EA} StackMeter Gateway \u2014 Self-test");
541
+ console.log("");
542
+
543
+ const { startGateway } = await import("../src/gateway/server.mjs");
544
+
545
+ let emitStatus = 0;
546
+ const server = await startGateway({
547
+ port: 0, // OS-assigned port
548
+ onEmit: (status) => {
549
+ emitStatus = status;
550
+ },
551
+ });
552
+
553
+ const addr = server.address();
554
+ const gwUrl = `http://127.0.0.1:${addr.port}`;
555
+
556
+ // 1/3 — Health check
557
+ console.log(" 1/3 Health check...");
558
+ const healthRes = await fetch(`${gwUrl}/health`);
559
+ if (!healthRes.ok) {
560
+ console.log(" \u274C Health check failed");
561
+ server.close();
562
+ process.exit(1);
563
+ }
564
+ console.log(" \u2705 Gateway healthy");
565
+
566
+ // 2/3 — Send minimal completion through gateway
567
+ console.log(
568
+ " 2/3 Sending test completion (gpt-4o-mini, max_tokens=1)..."
569
+ );
570
+ try {
571
+ const compRes = await fetch(`${gwUrl}/v1/chat/completions`, {
572
+ method: "POST",
573
+ headers: { "Content-Type": "application/json" },
574
+ body: JSON.stringify({
575
+ model: "gpt-4o-mini",
576
+ messages: [{ role: "user", content: "Hi" }],
577
+ max_tokens: 1,
578
+ }),
579
+ });
580
+
581
+ if (!compRes.ok) {
582
+ const err = await compRes.text().catch(() => "");
583
+ console.log(
584
+ ` \u274C OpenAI returned ${compRes.status}: ${err.slice(0, 200)}`
585
+ );
586
+ server.close();
587
+ process.exit(1);
588
+ }
589
+
590
+ const result = await compRes.json();
591
+ console.log(
592
+ ` \u2705 OpenAI response: ${result.usage?.total_tokens ?? "?"} tokens`
593
+ );
594
+ } catch (e) {
595
+ console.log(` \u274C Request failed: ${e.message}`);
596
+ server.close();
597
+ process.exit(1);
598
+ }
599
+
600
+ // 3/3 — Wait for async StackMeter emit
601
+ console.log(" 3/3 Waiting for StackMeter ingestion...");
602
+ await new Promise((r) => setTimeout(r, 2000));
603
+
604
+ if (emitStatus >= 200 && emitStatus < 300) {
605
+ console.log(` \u2705 StackMeter ingestion returned ${emitStatus}`);
606
+ } else if (emitStatus > 0) {
607
+ console.log(
608
+ ` \u26A0\uFE0F StackMeter ingestion returned ${emitStatus}`
609
+ );
610
+ } else {
611
+ console.log(" \u26A0\uFE0F Could not confirm StackMeter ingestion");
612
+ }
613
+
614
+ console.log("");
615
+ console.log(" Self-test complete.");
616
+ console.log("");
617
+ server.close();
618
+ process.exit(0);
619
+ }
package/package.json ADDED
@@ -0,0 +1,32 @@
1
+ {
2
+ "name": "@stackmeter/cli",
3
+ "version": "0.1.0",
4
+ "description": "Track your SaaS AI costs from the terminal. One-command OpenClaw setup.",
5
+ "type": "module",
6
+ "bin": {
7
+ "stackmeter": "./bin/stackmeter.mjs"
8
+ },
9
+ "files": [
10
+ "bin/",
11
+ "src/gateway/"
12
+ ],
13
+ "engines": {
14
+ "node": ">=18"
15
+ },
16
+ "repository": {
17
+ "type": "git",
18
+ "url": "https://github.com/stackmeter/stackmeter"
19
+ },
20
+ "keywords": [
21
+ "ai",
22
+ "usage",
23
+ "tracking",
24
+ "openai",
25
+ "openclaw",
26
+ "saas",
27
+ "cost",
28
+ "llm"
29
+ ],
30
+ "license": "MIT",
31
+ "homepage": "https://stackmeter.app"
32
+ }
@@ -0,0 +1,298 @@
1
+ // StackMeter Gateway — OpenAI-compatible proxy with usage tracking.
2
+ // Does NOT store prompts or responses. Only token counts + metadata.
3
+
4
+ import { createServer } from "node:http";
5
+ import { request as httpsRequest } from "node:https";
6
+ import { readFileSync, existsSync } from "node:fs";
7
+ import { join } from "node:path";
8
+ import { homedir } from "node:os";
9
+
10
+ const OPENAI_BASE_URL = "https://api.openai.com";
11
+ const DEFAULT_PORT = 8787;
12
+
13
+ // Cost per 1M tokens in cents (best-effort estimates)
14
+ const MODEL_PRICING = {
15
+ "gpt-4o": { input: 250, output: 1000 },
16
+ "gpt-4o-mini": { input: 15, output: 60 },
17
+ "gpt-4-turbo": { input: 1000, output: 3000 },
18
+ "gpt-4": { input: 3000, output: 6000 },
19
+ "gpt-3.5-turbo": { input: 50, output: 150 },
20
+ "o1": { input: 1500, output: 6000 },
21
+ "o1-mini": { input: 300, output: 1200 },
22
+ "o3-mini": { input: 110, output: 440 },
23
+ };
24
+
25
+ /* ── Config file readers (fallback when env vars not set) ── */
26
+
27
+ function readStackMeterConfig() {
28
+ const p = join(homedir(), ".stackmeter", "config.json");
29
+ if (!existsSync(p)) return {};
30
+ try { return JSON.parse(readFileSync(p, "utf-8")); }
31
+ catch { return {}; }
32
+ }
33
+
34
+ function readOpenClawApiKey() {
35
+ const p = join(homedir(), ".openclaw", "openclaw.json");
36
+ if (!existsSync(p)) return null;
37
+ try {
38
+ const c = JSON.parse(readFileSync(p, "utf-8"));
39
+ return c?.models?.providers?.openai?.apiKey || null;
40
+ } catch { return null; }
41
+ }
42
+
43
+ /* ── Pricing + usage helpers ──────────────────────────────── */
44
+
45
+ function estimateCostCents(model, inputTokens, outputTokens) {
46
+ let pricing = null;
47
+ for (const [key, val] of Object.entries(MODEL_PRICING)) {
48
+ if (model === key || model.startsWith(key + "-")) {
49
+ pricing = val;
50
+ break;
51
+ }
52
+ }
53
+ if (!pricing) return 0;
54
+ return Math.round(
55
+ (inputTokens * pricing.input + outputTokens * pricing.output) / 1_000_000
56
+ );
57
+ }
58
+
59
+ async function emitUsageEvent(usage, model, smUrl, smToken) {
60
+ // Handle both Chat Completions (prompt_tokens) and Responses API (input_tokens)
61
+ const {
62
+ prompt_tokens = 0, completion_tokens = 0,
63
+ input_tokens = 0, output_tokens = 0,
64
+ } = usage;
65
+ const inTok = prompt_tokens || input_tokens;
66
+ const outTok = completion_tokens || output_tokens;
67
+ const costCents = estimateCostCents(model, inTok, outTok);
68
+
69
+ const event = {
70
+ provider: "openai",
71
+ model,
72
+ inputTokens: inTok,
73
+ outputTokens: outTok,
74
+ costCents,
75
+ sourceType: "openclaw",
76
+ sourceId: "openclaw",
77
+ ts: new Date().toISOString(),
78
+ dedupKey: `gw-${Date.now()}-${Math.random().toString(36).slice(2, 10)}`,
79
+ };
80
+
81
+ try {
82
+ const res = await fetch(smUrl, {
83
+ method: "POST",
84
+ headers: {
85
+ Authorization: `Bearer ${smToken}`,
86
+ "Content-Type": "application/json",
87
+ },
88
+ body: JSON.stringify(event),
89
+ });
90
+
91
+ if (res.ok) {
92
+ console.log(
93
+ ` [stackmeter] ${model} ${inTok}+${outTok} tokens \u2192 ${costCents}\u00A2 (${res.status})`
94
+ );
95
+ } else {
96
+ const err = await res.text().catch(() => "");
97
+ console.error(` [stackmeter] emit failed (${res.status}): ${err}`);
98
+ }
99
+ return res.status;
100
+ } catch (err) {
101
+ console.error(` [stackmeter] emit error: ${err.message}`);
102
+ return 0;
103
+ }
104
+ }
105
+
106
+ function extractUsageFromSSE(sseBuffer, smUrl, smToken) {
107
+ const lines = sseBuffer.split("\n");
108
+ let lastModel = "unknown";
109
+ let lastUsage = null;
110
+
111
+ for (const line of lines) {
112
+ if (!line.startsWith("data: ") || line === "data: [DONE]") continue;
113
+ try {
114
+ const data = JSON.parse(line.slice(6));
115
+ if (data.model) lastModel = data.model;
116
+ // Chat Completions format
117
+ if (data.usage) lastUsage = data.usage;
118
+ // Responses API streaming format (usage inside response object)
119
+ if (data.response?.usage) lastUsage = data.response.usage;
120
+ if (data.response?.model) lastModel = data.response.model;
121
+ } catch {}
122
+ }
123
+
124
+ if (lastUsage) {
125
+ return emitUsageEvent(lastUsage, lastModel, smUrl, smToken);
126
+ }
127
+ return null;
128
+ }
129
+
130
+ /* ── Gateway server ───────────────────────────────────────── */
131
+
132
+ export function startGateway(opts = {}) {
133
+ const smConfig = readStackMeterConfig();
134
+
135
+ const port =
136
+ opts.port ??
137
+ parseInt(
138
+ process.env.STACKMETER_GATEWAY_PORT ||
139
+ String(smConfig.gatewayPort || DEFAULT_PORT),
140
+ 10
141
+ );
142
+
143
+ const openaiKey = process.env.OPENAI_API_KEY || readOpenClawApiKey();
144
+ const smUrl = process.env.STACKMETER_URL || smConfig.url;
145
+ const smToken = process.env.STACKMETER_TOKEN || smConfig.token;
146
+ const onEmit = opts.onEmit;
147
+
148
+ if (!openaiKey) {
149
+ console.error(" Error: OPENAI_API_KEY is required.");
150
+ console.error(" Set it in ~/.openclaw/openclaw.json (models.providers.openai.apiKey)");
151
+ console.error(" or: export OPENAI_API_KEY=sk-...");
152
+ process.exit(1);
153
+ }
154
+ if (!smUrl) {
155
+ console.error(" Error: STACKMETER_URL is required.");
156
+ console.error(" Run: npx @stackmeter/cli connect openclaw --auto");
157
+ console.error(" or set it in ~/.stackmeter/config.json");
158
+ process.exit(1);
159
+ }
160
+ if (!smToken) {
161
+ console.error(" Error: STACKMETER_TOKEN is required.");
162
+ console.error(" Run: npx @stackmeter/cli connect openclaw --auto");
163
+ console.error(" or set it in ~/.stackmeter/config.json");
164
+ process.exit(1);
165
+ }
166
+
167
+ const server = createServer(async (req, res) => {
168
+ // Health check
169
+ if (req.method === "GET" && req.url === "/health") {
170
+ res.writeHead(200, { "Content-Type": "application/json" });
171
+ return res.end('{"ok":true}');
172
+ }
173
+
174
+ // Only proxy /v1/* paths
175
+ if (!req.url?.startsWith("/v1/")) {
176
+ res.writeHead(404, { "Content-Type": "application/json" });
177
+ return res.end('{"error":"Not found"}');
178
+ }
179
+
180
+ const isTracked =
181
+ req.method === "POST" &&
182
+ (req.url === "/v1/chat/completions" || req.url === "/v1/responses");
183
+
184
+ // Read body for methods that have one
185
+ let body = Buffer.alloc(0);
186
+ if (req.method !== "GET" && req.method !== "HEAD") {
187
+ const chunks = [];
188
+ for await (const chunk of req) chunks.push(chunk);
189
+ body = Buffer.concat(chunks);
190
+ }
191
+
192
+ // If tracked + streaming Chat Completions, inject stream_options.include_usage
193
+ let isStreaming = false;
194
+ if (isTracked && body.length > 0) {
195
+ try {
196
+ const parsed = JSON.parse(body.toString());
197
+ isStreaming = parsed.stream === true;
198
+ if (isStreaming && req.url === "/v1/chat/completions") {
199
+ parsed.stream_options = {
200
+ ...(parsed.stream_options || {}),
201
+ include_usage: true,
202
+ };
203
+ body = Buffer.from(JSON.stringify(parsed));
204
+ }
205
+ } catch {}
206
+ }
207
+
208
+ // Build upstream request
209
+ const targetUrl = new URL(req.url, OPENAI_BASE_URL);
210
+ const upHeaders = { Authorization: `Bearer ${openaiKey}` };
211
+ if (body.length > 0) {
212
+ upHeaders["Content-Type"] =
213
+ req.headers["content-type"] || "application/json";
214
+ upHeaders["Content-Length"] = String(body.length);
215
+ }
216
+
217
+ const upReq = httpsRequest(
218
+ targetUrl,
219
+ { method: req.method, headers: upHeaders },
220
+ (upRes) => {
221
+ // Forward response headers (skip hop-by-hop)
222
+ const fwd = {};
223
+ for (const [k, v] of Object.entries(upRes.headers)) {
224
+ if (!["transfer-encoding", "connection", "keep-alive"].includes(k)) {
225
+ fwd[k] = v;
226
+ }
227
+ }
228
+ res.writeHead(upRes.statusCode, fwd);
229
+
230
+ // Non-tracked or error: pipe through
231
+ if (!isTracked || upRes.statusCode >= 400) {
232
+ upRes.pipe(res);
233
+ return;
234
+ }
235
+
236
+ if (isStreaming) {
237
+ // Stream through, buffer SSE to extract usage from final chunk
238
+ let buf = "";
239
+ upRes.on("data", (c) => {
240
+ res.write(c);
241
+ buf += c.toString();
242
+ });
243
+ upRes.on("end", () => {
244
+ res.end();
245
+ const p = extractUsageFromSSE(buf, smUrl, smToken);
246
+ if (p && onEmit) p.then(onEmit);
247
+ });
248
+ } else {
249
+ // Buffer full response for usage extraction
250
+ const parts = [];
251
+ upRes.on("data", (c) => {
252
+ parts.push(c);
253
+ res.write(c);
254
+ });
255
+ upRes.on("end", () => {
256
+ res.end();
257
+ try {
258
+ const data = JSON.parse(Buffer.concat(parts).toString());
259
+ // Handle both Chat Completions and Responses API formats
260
+ const usage = data.usage || data.response?.usage;
261
+ const model = data.model || data.response?.model || "unknown";
262
+ if (usage) {
263
+ const p = emitUsageEvent(usage, model, smUrl, smToken);
264
+ if (onEmit) p.then(onEmit);
265
+ }
266
+ } catch {}
267
+ });
268
+ }
269
+ }
270
+ );
271
+
272
+ upReq.on("error", (err) => {
273
+ if (!res.headersSent) {
274
+ res.writeHead(502, { "Content-Type": "application/json" });
275
+ res.end(
276
+ JSON.stringify({ error: "Gateway error", detail: err.message })
277
+ );
278
+ }
279
+ });
280
+
281
+ if (body.length > 0) upReq.write(body);
282
+ upReq.end();
283
+ });
284
+
285
+ return new Promise((resolve) => {
286
+ server.listen(port, "127.0.0.1", () => {
287
+ const addr = server.address();
288
+ console.log("");
289
+ console.log(" StackMeter Gateway");
290
+ console.log(` Listening: http://127.0.0.1:${addr.port}`);
291
+ console.log(` Proxying: ${OPENAI_BASE_URL}`);
292
+ console.log(` Reporting: ${smUrl}`);
293
+ console.log(` API key: ...${openaiKey.slice(-4)}`);
294
+ console.log("");
295
+ resolve(server);
296
+ });
297
+ });
298
+ }