storyforge 0.4.11 → 0.4.13

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -134,7 +134,14 @@ var BridgePoller = class {
134
134
  let lastErr = "";
135
135
  for (const m of candidates) {
136
136
  try {
137
- const args = cli === "codex" ? ["exec", "-", "--model", m] : ["-p", "--model", m, "--no-session-persistence"];
137
+ const args = cli === "codex" ? ["exec", "-", "--model", m, "--search"] : [
138
+ "-p",
139
+ "--model",
140
+ m,
141
+ "--no-session-persistence",
142
+ "--allowedTools",
143
+ "WebSearch,WebFetch"
144
+ ];
138
145
  const text = await runSpawn(cli, args, prompt, CLI_TIMEOUT_MS);
139
146
  if (text.trim()) return { text, modelUsed: `${cli}:${m}` };
140
147
  lastErr = `${cli}/${m} returned empty stdout`;
@@ -0,0 +1,208 @@
1
+ #!/usr/bin/env node
2
+
3
+ // src/cli-usage.ts
4
+ import * as fs from "fs";
5
+ import * as path from "path";
6
+ import * as os from "os";
7
+ var PRICES = {
8
+ // Anthropic — per claude.com/pricing (April 2026)
9
+ "claude-opus-4-7": { input: 5, output: 25, cacheRead: 0.5, cacheWrite: 6.25 },
10
+ "claude-opus-4-6": { input: 5, output: 25, cacheRead: 0.5, cacheWrite: 6.25 },
11
+ "claude-opus-4-5": { input: 5, output: 25, cacheRead: 0.5, cacheWrite: 6.25 },
12
+ "claude-sonnet-4-6": { input: 3, output: 15, cacheRead: 0.3, cacheWrite: 3.75 },
13
+ "claude-sonnet-4-5": { input: 3, output: 15, cacheRead: 0.3, cacheWrite: 3.75 },
14
+ "claude-haiku-4-5": { input: 1, output: 5, cacheRead: 0.1, cacheWrite: 1.25 },
15
+ // OpenAI — per platform.openai.com/pricing (April 2026)
16
+ "gpt-5.5": { input: 5, output: 30 },
17
+ "gpt-5.4": { input: 2.5, output: 15 },
18
+ "gpt-5.4-mini": { input: 0.75, output: 4.5 },
19
+ "gpt-5.4-nano": { input: 0.2, output: 1.25 },
20
+ "gpt-5": { input: 2.5, output: 15 },
21
+ "gpt-5-codex": { input: 2.5, output: 15 },
22
+ // alias of gpt-5
23
+ "gpt-4o": { input: 2.5, output: 10 }
24
+ };
25
+ function priceFor(model) {
26
+ const lower = model.toLowerCase();
27
+ if (PRICES[lower]) return PRICES[lower];
28
+ for (const key of Object.keys(PRICES)) {
29
+ if (lower.startsWith(key)) return PRICES[key];
30
+ }
31
+ return null;
32
+ }
33
+ function emptyModelUsage(model) {
34
+ return { model, inputTokens: 0, cachedReadTokens: 0, cacheCreateTokens: 0, outputTokens: 0, costUsd: 0 };
35
+ }
36
+ function addUsage(target, model, delta) {
37
+ const row = target[model] ??= emptyModelUsage(model);
38
+ if (delta.inputTokens) row.inputTokens += delta.inputTokens;
39
+ if (delta.cachedReadTokens) row.cachedReadTokens += delta.cachedReadTokens;
40
+ if (delta.cacheCreateTokens) row.cacheCreateTokens += delta.cacheCreateTokens;
41
+ if (delta.outputTokens) row.outputTokens += delta.outputTokens;
42
+ }
43
+ function recompute(usage) {
44
+ const price = priceFor(usage.model);
45
+ if (!price) return { ...usage, costUsd: 0 };
46
+ const inputCost = usage.inputTokens / 1e6 * price.input;
47
+ const cacheReadCost = price.cacheRead != null ? usage.cachedReadTokens / 1e6 * price.cacheRead : 0;
48
+ const cacheWriteCost = price.cacheWrite != null ? usage.cacheCreateTokens / 1e6 * price.cacheWrite : 0;
49
+ const outputCost = usage.outputTokens / 1e6 * price.output;
50
+ return { ...usage, costUsd: inputCost + cacheReadCost + cacheWriteCost + outputCost };
51
+ }
52
+ function listJsonlFiles(rootDir) {
53
+ const out = [];
54
+ if (!fs.existsSync(rootDir)) return out;
55
+ const stack = [rootDir];
56
+ while (stack.length > 0) {
57
+ const cur = stack.pop();
58
+ let entries;
59
+ try {
60
+ entries = fs.readdirSync(cur, { withFileTypes: true });
61
+ } catch {
62
+ continue;
63
+ }
64
+ for (const e of entries) {
65
+ const full = path.join(cur, e.name);
66
+ if (e.isDirectory()) stack.push(full);
67
+ else if (e.isFile() && e.name.endsWith(".jsonl")) out.push(full);
68
+ }
69
+ }
70
+ return out;
71
+ }
72
+ function parseClaudeFile(file) {
73
+ let content;
74
+ try {
75
+ content = fs.readFileSync(file, "utf-8");
76
+ } catch {
77
+ return [];
78
+ }
79
+ const out = [];
80
+ for (const raw of content.split(/\r?\n/)) {
81
+ if (!raw.trim()) continue;
82
+ let obj;
83
+ try {
84
+ obj = JSON.parse(raw);
85
+ } catch {
86
+ continue;
87
+ }
88
+ const usage = obj?.message?.usage;
89
+ if (!usage) continue;
90
+ const model = String(obj?.message?.model ?? "").toLowerCase();
91
+ if (!model) continue;
92
+ const tsRaw = obj?.timestamp ?? obj?.ts;
93
+ const ts = tsRaw ? new Date(tsRaw).getTime() : Date.now();
94
+ if (!Number.isFinite(ts)) continue;
95
+ out.push({
96
+ ts,
97
+ model,
98
+ delta: {
99
+ inputTokens: usage.input_tokens ?? 0,
100
+ cachedReadTokens: usage.cache_read_input_tokens ?? 0,
101
+ cacheCreateTokens: usage.cache_creation_input_tokens ?? 0,
102
+ outputTokens: usage.output_tokens ?? 0
103
+ }
104
+ });
105
+ }
106
+ return out;
107
+ }
108
+ function parseCodexFile(file) {
109
+ let content;
110
+ try {
111
+ content = fs.readFileSync(file, "utf-8");
112
+ } catch {
113
+ return [];
114
+ }
115
+ const out = [];
116
+ let prev = { input: 0, cachedInput: 0, output: 0, total: 0 };
117
+ let model = "gpt-5";
118
+ for (const raw of content.split(/\r?\n/)) {
119
+ if (!raw.trim()) continue;
120
+ let obj;
121
+ try {
122
+ obj = JSON.parse(raw);
123
+ } catch {
124
+ continue;
125
+ }
126
+ const modelHint = obj?.event_msg?.payload?.turn_context?.model ?? obj?.payload?.turn_context?.model ?? obj?.message?.model;
127
+ if (typeof modelHint === "string" && modelHint) model = modelHint.toLowerCase();
128
+ const payload = obj?.event_msg?.payload ?? obj?.payload;
129
+ if (!payload) continue;
130
+ if (payload.type !== "token_count") continue;
131
+ const t = payload;
132
+ const cur = {
133
+ input: t.input_tokens ?? 0,
134
+ cachedInput: t.cached_input_tokens ?? 0,
135
+ output: t.output_tokens ?? 0,
136
+ total: t.total_tokens ?? 0
137
+ };
138
+ const delta = {
139
+ inputTokens: Math.max(0, cur.input - prev.input),
140
+ cachedReadTokens: Math.max(0, cur.cachedInput - prev.cachedInput),
141
+ outputTokens: Math.max(0, cur.output - prev.output)
142
+ };
143
+ prev = cur;
144
+ const tsRaw = obj?.timestamp ?? obj?.event_msg?.timestamp ?? obj?.event_msg?.event_ts;
145
+ const ts = tsRaw ? new Date(tsRaw).getTime() : Date.now();
146
+ if (!Number.isFinite(ts)) continue;
147
+ out.push({ ts, model, delta });
148
+ }
149
+ return out;
150
+ }
151
+ function startOfTodayMs(now = Date.now()) {
152
+ const d = new Date(now);
153
+ d.setHours(0, 0, 0, 0);
154
+ return d.getTime();
155
+ }
156
+ async function gatherCliUsage() {
157
+ const home = os.homedir();
158
+ const claudeDir = path.join(home, ".claude", "projects");
159
+ const codexDir = path.join(home, ".codex", "sessions");
160
+ const claudeFiles = listJsonlFiles(claudeDir);
161
+ const codexFiles = listJsonlFiles(codexDir);
162
+ let skipped = 0;
163
+ const lines = [];
164
+ for (const f of claudeFiles) {
165
+ const parsed = parseClaudeFile(f);
166
+ if (parsed.length === 0) skipped++;
167
+ else lines.push(...parsed);
168
+ }
169
+ for (const f of codexFiles) {
170
+ const parsed = parseCodexFile(f);
171
+ if (parsed.length === 0) skipped++;
172
+ else lines.push(...parsed);
173
+ }
174
+ const now = Date.now();
175
+ const cutoff24h = now - 24 * 60 * 60 * 1e3;
176
+ const startToday = startOfTodayMs(now);
177
+ const buckets = {
178
+ last24h: {},
179
+ today: {},
180
+ lifetime: {}
181
+ };
182
+ for (const ln of lines) {
183
+ addUsage(buckets.lifetime, ln.model, ln.delta);
184
+ if (ln.ts >= cutoff24h) addUsage(buckets.last24h, ln.model, ln.delta);
185
+ if (ln.ts >= startToday) addUsage(buckets.today, ln.model, ln.delta);
186
+ }
187
+ function finalise(rec) {
188
+ return Object.values(rec).map(recompute).sort((a, b) => b.costUsd - a.costUsd);
189
+ }
190
+ const last24h = finalise(buckets.last24h);
191
+ const today = finalise(buckets.today);
192
+ const lifetime = finalise(buckets.lifetime);
193
+ function sum(rows) {
194
+ return {
195
+ tokens: rows.reduce((s, r) => s + r.inputTokens + r.cachedReadTokens + r.cacheCreateTokens + r.outputTokens, 0),
196
+ costUsd: rows.reduce((s, r) => s + r.costUsd, 0)
197
+ };
198
+ }
199
+ return {
200
+ generatedAt: new Date(now).toISOString(),
201
+ windows: { last24h, today, lifetime },
202
+ totals: { last24h: sum(last24h), today: sum(today), lifetime: sum(lifetime) },
203
+ sources: { claudeFiles: claudeFiles.length, codexFiles: codexFiles.length, skipped }
204
+ };
205
+ }
206
+ export {
207
+ gatherCliUsage
208
+ };
package/dist/index.js CHANGED
@@ -864,6 +864,18 @@ async function devCommand(options) {
864
864
  }
865
865
  const url = new URL(req.url || "/", `http://localhost:${port}`);
866
866
  const pathname = url.pathname;
867
+ if (pathname === "/api/cli-usage") {
868
+ try {
869
+ const { gatherCliUsage } = await import("./cli-usage-CP4SX57J.js");
870
+ const report = await gatherCliUsage();
871
+ res.writeHead(200, { ...CORS_HEADERS, "Content-Type": "application/json" });
872
+ res.end(JSON.stringify(report));
873
+ } catch (err) {
874
+ res.writeHead(500, { ...CORS_HEADERS, "Content-Type": "application/json" });
875
+ res.end(JSON.stringify({ error: err.message }));
876
+ }
877
+ return;
878
+ }
867
879
  if (pathname === "/api/health") {
868
880
  const probeCli = (binary) => {
869
881
  try {
@@ -1065,7 +1077,14 @@ async function devCommand(options) {
1065
1077
  }
1066
1078
  try {
1067
1079
  log.info(`[script-gen] Generating via ${cli} CLI \xB7 model=${model}`);
1068
- const args = cli === "codex" ? ["exec", "-", "--model", model] : ["-p", "--model", model, "--no-session-persistence"];
1080
+ const args = cli === "codex" ? ["exec", "-", "--model", model, "--search"] : [
1081
+ "-p",
1082
+ "--model",
1083
+ model,
1084
+ "--no-session-persistence",
1085
+ "--allowedTools",
1086
+ "WebSearch,WebFetch"
1087
+ ];
1069
1088
  const timeoutMs = Number(process.env.SCRIPT_GEN_TIMEOUT_MS) || 12e5;
1070
1089
  const { stdout, code, stderr } = await runCliPipingStdin(cli, args, prompt2, {
1071
1090
  timeoutMs,
@@ -1577,7 +1596,7 @@ Return ONLY the complete updated TSX. No markdown fences, no explanation.`;
1577
1596
  return "0.0.0";
1578
1597
  })();
1579
1598
  void (async () => {
1580
- const { BridgePoller } = await import("./bridge-poller-IRROEZID.js");
1599
+ const { BridgePoller } = await import("./bridge-poller-YWYLEYOD.js");
1581
1600
  const poller = new BridgePoller({ baseUrl: bridgeUrl, token: bridgeToken, clientVersion: `storyforge ${pkgVersion}` });
1582
1601
  poller.start();
1583
1602
  })();
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "storyforge",
3
- "version": "0.4.11",
3
+ "version": "0.4.13",
4
4
  "description": "StoryForge — local bridge for the Forge video production web app. Zero runtime dependencies.",
5
5
  "type": "module",
6
6
  "bin": {