ai-speedometer 2.1.6 → 2.2.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (2) hide show
  1. package/dist/ai-speedometer +210 -47
  2. package/package.json +1 -1
@@ -1712,7 +1712,7 @@ var exports_benchmark = {};
1712
1712
  __export(exports_benchmark, {
1713
1713
  benchmarkSingleModelRest: () => benchmarkSingleModelRest
1714
1714
  });
1715
- async function benchmarkSingleModelRest(model) {
1715
+ async function benchmarkSingleModelRest(model, logger) {
1716
1716
  try {
1717
1717
  if (!model.providerConfig || !model.providerConfig.apiKey) {
1718
1718
  throw new Error(`Missing API key for provider ${model.providerName}`);
@@ -1729,6 +1729,7 @@ async function benchmarkSingleModelRest(model) {
1729
1729
  actualModelId = model.name;
1730
1730
  }
1731
1731
  actualModelId = actualModelId.trim();
1732
+ await logger?.logHeader(model.name, model.providerName, model.providerConfig.apiKey);
1732
1733
  const startTime = Date.now();
1733
1734
  let firstTokenTime = null;
1734
1735
  let streamedText = "";
@@ -1801,33 +1802,36 @@ async function benchmarkSingleModelRest(model) {
1801
1802
  const reader = response.body.getReader();
1802
1803
  const decoder = new TextDecoder;
1803
1804
  let buffer = "";
1804
- let isFirstChunk = true;
1805
+ let firstParsedTokenTime = null;
1805
1806
  while (true) {
1806
1807
  const { done, value } = await reader.read();
1807
1808
  if (done)
1808
1809
  break;
1809
- if (isFirstChunk && !firstTokenTime) {
1810
+ if (!firstTokenTime)
1810
1811
  firstTokenTime = Date.now();
1811
- isFirstChunk = false;
1812
- }
1813
1812
  buffer += decoder.decode(value, { stream: true });
1814
1813
  const lines = buffer.split(`
1815
1814
  `);
1816
1815
  buffer = lines.pop() || "";
1817
1816
  for (const line of lines) {
1818
1817
  const trimmedLine = line.trim();
1818
+ if (trimmedLine)
1819
+ await logger?.logRaw(trimmedLine);
1819
1820
  if (!trimmedLine)
1820
1821
  continue;
1821
1822
  try {
1822
1823
  if (model.providerType === "anthropic") {
1823
- if (trimmedLine.startsWith("data: ")) {
1824
- const jsonStr = trimmedLine.slice(6);
1824
+ const anthropicDataPrefix = trimmedLine.startsWith("data: ") ? 6 : trimmedLine.startsWith("data:") ? 5 : -1;
1825
+ if (anthropicDataPrefix !== -1) {
1826
+ const jsonStr = trimmedLine.slice(anthropicDataPrefix);
1825
1827
  if (jsonStr === "[DONE]")
1826
1828
  continue;
1827
1829
  const chunk = JSON.parse(jsonStr);
1828
1830
  const chunkTyped = chunk;
1829
- if (chunkTyped.type === "content_block_delta" && chunkTyped.delta?.text) {
1830
- streamedText += chunkTyped.delta.text;
1831
+ if (chunkTyped.type === "content_block_delta" && (chunkTyped.delta?.text || chunkTyped.delta?.thinking)) {
1832
+ if (!firstParsedTokenTime)
1833
+ firstParsedTokenTime = Date.now();
1834
+ streamedText += chunkTyped.delta?.text || chunkTyped.delta?.thinking || "";
1831
1835
  } else if (chunkTyped.type === "message_start" && chunkTyped.message?.usage) {
1832
1836
  inputTokens = chunkTyped.message.usage.input_tokens || 0;
1833
1837
  } else if (chunkTyped.type === "message_delta") {
@@ -1840,8 +1844,10 @@ async function benchmarkSingleModelRest(model) {
1840
1844
  continue;
1841
1845
  } else {
1842
1846
  const chunk = JSON.parse(trimmedLine);
1843
- if (chunk.type === "content_block_delta" && chunk.delta?.text) {
1844
- streamedText += chunk.delta.text;
1847
+ if (chunk.type === "content_block_delta" && (chunk.delta?.text || chunk.delta?.thinking)) {
1848
+ if (!firstParsedTokenTime)
1849
+ firstParsedTokenTime = Date.now();
1850
+ streamedText += chunk.delta?.text || chunk.delta?.thinking || "";
1845
1851
  } else if (chunk.type === "message_start" && chunk.message?.usage) {
1846
1852
  inputTokens = chunk.message.usage.input_tokens || 0;
1847
1853
  } else if (chunk.type === "message_delta") {
@@ -1854,6 +1860,8 @@ async function benchmarkSingleModelRest(model) {
1854
1860
  } else if (model.providerType === "google") {
1855
1861
  const chunk = JSON.parse(trimmedLine);
1856
1862
  if (chunk.candidates?.[0]?.content?.parts?.[0]?.text) {
1863
+ if (!firstParsedTokenTime)
1864
+ firstParsedTokenTime = Date.now();
1857
1865
  streamedText += chunk.candidates[0].content.parts[0].text;
1858
1866
  }
1859
1867
  if (chunk.usageMetadata?.promptTokenCount)
@@ -1861,19 +1869,42 @@ async function benchmarkSingleModelRest(model) {
1861
1869
  if (chunk.usageMetadata?.candidatesTokenCount)
1862
1870
  outputTokens = chunk.usageMetadata.candidatesTokenCount;
1863
1871
  } else {
1864
- if (trimmedLine.startsWith("data: ")) {
1865
- const jsonStr = trimmedLine.slice(6);
1866
- if (jsonStr === "[DONE]")
1867
- continue;
1868
- const chunk = JSON.parse(jsonStr);
1869
- if (chunk.choices?.[0]?.delta?.content)
1870
- streamedText += chunk.choices[0].delta.content;
1871
- else if (chunk.choices?.[0]?.delta?.reasoning)
1872
- streamedText += chunk.choices[0].delta.reasoning;
1873
- if (chunk.usage?.prompt_tokens)
1874
- inputTokens = chunk.usage.prompt_tokens;
1875
- if (chunk.usage?.completion_tokens)
1876
- outputTokens = chunk.usage.completion_tokens;
1872
+ const dataPrefix = trimmedLine.startsWith("data: ") ? 6 : trimmedLine.startsWith("data:") ? 5 : -1;
1873
+ if (dataPrefix === -1)
1874
+ continue;
1875
+ const jsonStr = trimmedLine.slice(dataPrefix);
1876
+ if (jsonStr === "[DONE]")
1877
+ continue;
1878
+ const chunk = JSON.parse(jsonStr);
1879
+ if (chunk.choices?.[0]?.delta?.content) {
1880
+ if (!firstParsedTokenTime)
1881
+ firstParsedTokenTime = Date.now();
1882
+ streamedText += chunk.choices[0].delta.content;
1883
+ } else if (chunk.choices?.[0]?.delta?.reasoning) {
1884
+ if (!firstParsedTokenTime)
1885
+ firstParsedTokenTime = Date.now();
1886
+ streamedText += chunk.choices[0].delta.reasoning;
1887
+ } else if (chunk.choices?.[0]?.delta?.reasoning_content) {
1888
+ if (!firstParsedTokenTime)
1889
+ firstParsedTokenTime = Date.now();
1890
+ streamedText += chunk.choices[0].delta.reasoning_content;
1891
+ } else if (chunk.type === "content_block_delta" && chunk.delta?.text) {
1892
+ if (!firstParsedTokenTime)
1893
+ firstParsedTokenTime = Date.now();
1894
+ streamedText += chunk.delta.text;
1895
+ } else if (chunk.type === "content_block_delta" && chunk.delta?.thinking) {
1896
+ if (!firstParsedTokenTime)
1897
+ firstParsedTokenTime = Date.now();
1898
+ streamedText += chunk.delta.thinking;
1899
+ }
1900
+ if (chunk.usage?.prompt_tokens)
1901
+ inputTokens = chunk.usage.prompt_tokens;
1902
+ if (chunk.usage?.completion_tokens)
1903
+ outputTokens = chunk.usage.completion_tokens;
1904
+ if (chunk.type === "message_start" && chunk.message?.usage?.input_tokens)
1905
+ inputTokens = chunk.message.usage.input_tokens;
1906
+ if (chunk.type === "message_delta" && chunk.usage?.output_tokens) {
1907
+ outputTokens = chunk.usage.output_tokens;
1877
1908
  }
1878
1909
  }
1879
1910
  } catch {
@@ -1881,15 +1912,18 @@ async function benchmarkSingleModelRest(model) {
1881
1912
  }
1882
1913
  }
1883
1914
  }
1915
+ await logger?.flush();
1884
1916
  const endTime = Date.now();
1885
1917
  const totalTime = endTime - startTime;
1886
- const timeToFirstToken = firstTokenTime ? firstTokenTime - startTime : totalTime;
1918
+ const effectiveFirstToken = firstParsedTokenTime ?? firstTokenTime;
1919
+ const timeToFirstToken = effectiveFirstToken ? effectiveFirstToken - startTime : totalTime;
1920
+ const generationTime = totalTime - timeToFirstToken;
1887
1921
  const usedEstimateForOutput = !outputTokens;
1888
1922
  const usedEstimateForInput = !inputTokens;
1889
1923
  const finalOutputTokens = outputTokens || Math.round(streamedText.length / 4);
1890
1924
  const finalInputTokens = inputTokens || Math.round(TEST_PROMPT.length / 4);
1891
1925
  const totalTokens = finalInputTokens + finalOutputTokens;
1892
- const tokensPerSecond = totalTime > 0 ? finalOutputTokens / totalTime * 1000 : 0;
1926
+ const tokensPerSecond = generationTime > 0 ? finalOutputTokens / generationTime * 1000 : 0;
1893
1927
  return {
1894
1928
  model: model.name,
1895
1929
  provider: model.providerName,
@@ -1904,6 +1938,7 @@ async function benchmarkSingleModelRest(model) {
1904
1938
  success: true
1905
1939
  };
1906
1940
  } catch (error) {
1941
+ await logger?.flush();
1907
1942
  return {
1908
1943
  model: model.name,
1909
1944
  provider: model.providerName,
@@ -2085,6 +2120,66 @@ var init_headless = __esm(() => {
2085
2120
  init_benchmark();
2086
2121
  });
2087
2122
 
2123
+ // ../core/src/logger.ts
2124
+ var exports_logger = {};
2125
+ __export(exports_logger, {
2126
+ getLogPath: () => getLogPath,
2127
+ createRunId: () => createRunId,
2128
+ createBenchLogger: () => createBenchLogger
2129
+ });
2130
+ import { mkdir, appendFile } from "fs/promises";
2131
+ import { homedir as homedir4 } from "os";
2132
+ import { join } from "path";
2133
+ function generateRunId() {
2134
+ const now = new Date;
2135
+ const date = now.toISOString().slice(0, 10);
2136
+ const time = now.toTimeString().slice(0, 8).replace(/:/g, "");
2137
+ const rand = Math.random().toString(16).slice(2, 6);
2138
+ return `${date}_${time}_${rand}`;
2139
+ }
2140
+ function redactSecrets(line, apiKey) {
2141
+ if (!apiKey)
2142
+ return line;
2143
+ return line.split(apiKey).join("[REDACTED]");
2144
+ }
2145
+ function createRunId() {
2146
+ return generateRunId();
2147
+ }
2148
+ function getLogPath(runId) {
2149
+ return join(homedir4(), ".local", "share", "ai-speedometer", "logs", `${runId}.log`);
2150
+ }
2151
+ async function createBenchLogger(runId) {
2152
+ const logPath = getLogPath(runId);
2153
+ const logDir = join(homedir4(), ".local", "share", "ai-speedometer", "logs");
2154
+ await mkdir(logDir, { recursive: true });
2155
+ let currentApiKey = "";
2156
+ let buffer = "";
2157
+ return {
2158
+ logPath,
2159
+ runId,
2160
+ logHeader: async (modelName, providerName, apiKey = "") => {
2161
+ currentApiKey = apiKey;
2162
+ const ts = new Date().toISOString();
2163
+ buffer = `
2164
+ === ${modelName} | ${providerName} | ${ts} ===
2165
+ `;
2166
+ },
2167
+ logRaw: async (line) => {
2168
+ const safe = redactSecrets(line, currentApiKey);
2169
+ buffer += safe + `
2170
+ `;
2171
+ },
2172
+ flush: async () => {
2173
+ buffer += `
2174
+ ` + "=".repeat(60) + `
2175
+ `;
2176
+ await appendFile(logPath, buffer, "utf8");
2177
+ buffer = "";
2178
+ }
2179
+ };
2180
+ }
2181
+ var init_logger = () => {};
2182
+
2088
2183
  // src/tui/context/AppContext.tsx
2089
2184
  import { createContext, useContext, useReducer, useEffect } from "react";
2090
2185
  import { jsxDEV } from "@opentui/react/jsx-dev-runtime";
@@ -2121,12 +2216,23 @@ function appReducer(state, action) {
2121
2216
  };
2122
2217
  case "BENCH_RESET":
2123
2218
  return { ...state, benchResults: [], selectedModels: [] };
2219
+ case "SET_LOG_INFO":
2220
+ return { ...state, logMode: action.logMode, logPath: action.logPath, runId: action.runId };
2124
2221
  default:
2125
2222
  return state;
2126
2223
  }
2127
2224
  }
2128
- function AppProvider({ children }) {
2225
+ function AppProvider({ children, logMode = false }) {
2129
2226
  const [state, dispatch] = useReducer(appReducer, initialState);
2227
+ useEffect(() => {
2228
+ if (logMode) {
2229
+ Promise.resolve().then(() => (init_logger(), exports_logger)).then(({ createRunId: createRunId2, getLogPath: getLogPath2 }) => {
2230
+ const runId = createRunId2();
2231
+ const logPath = getLogPath2(runId);
2232
+ dispatch({ type: "SET_LOG_INFO", logMode: true, logPath, runId });
2233
+ });
2234
+ }
2235
+ }, [logMode]);
2130
2236
  useEffect(() => {
2131
2237
  let cancelled = false;
2132
2238
  async function loadConfig2() {
@@ -2169,7 +2275,10 @@ var init_AppContext = __esm(() => {
2169
2275
  config: null,
2170
2276
  selectedModels: [],
2171
2277
  benchResults: [],
2172
- isLoadingConfig: true
2278
+ isLoadingConfig: true,
2279
+ logMode: false,
2280
+ logPath: null,
2281
+ runId: null
2173
2282
  };
2174
2283
  AppContext = createContext(null);
2175
2284
  });
@@ -2179,7 +2288,7 @@ var package_default;
2179
2288
  var init_package = __esm(() => {
2180
2289
  package_default = {
2181
2290
  name: "ai-speedometer",
2182
- version: "2.1.6",
2291
+ version: "2.2.0",
2183
2292
  description: "A comprehensive CLI tool for benchmarking AI models across multiple providers with parallel execution and professional metrics",
2184
2293
  bin: {
2185
2294
  "ai-speedometer": "dist/ai-speedometer",
@@ -2969,6 +3078,7 @@ var init_ResultsTable = () => {};
2969
3078
  // src/tui/screens/BenchmarkScreen.tsx
2970
3079
  import { useState as useState4, useEffect as useEffect4, useRef as useRef3, useMemo } from "react";
2971
3080
  import { useKeyboard as useKeyboard5 } from "@opentui/react";
3081
+ import { engine, Timeline } from "@opentui/core";
2972
3082
  import { jsxDEV as jsxDEV10 } from "@opentui/react/jsx-dev-runtime";
2973
3083
  function rankBadge(rank) {
2974
3084
  if (rank === 1)
@@ -2990,9 +3100,23 @@ function BenchmarkScreen() {
2990
3100
  const navigate = useNavigate();
2991
3101
  const [modelStates, setModelStates] = useState4([]);
2992
3102
  const [spinnerFrame, setSpinnerFrame] = useState4(0);
3103
+ const [shimmerPos, setShimmerPos] = useState4(0);
2993
3104
  const [allDone, setAllDone] = useState4(false);
2994
3105
  const spinnerRef = useRef3(null);
2995
3106
  const startedRef = useRef3(false);
3107
+ useEffect4(() => {
3108
+ const tl = new Timeline({ loop: true, duration: 1400 });
3109
+ const target = { pos: 0 };
3110
+ tl.add(target, { pos: 1, duration: 1400, ease: "inOutSine", onUpdate: (anim) => {
3111
+ setShimmerPos(anim.targets[0].pos);
3112
+ } }, 0);
3113
+ tl.play();
3114
+ engine.register(tl);
3115
+ return () => {
3116
+ tl.pause();
3117
+ engine.unregister(tl);
3118
+ };
3119
+ }, []);
2996
3120
  useEffect4(() => {
2997
3121
  if (startedRef.current)
2998
3122
  return;
@@ -3005,9 +3129,12 @@ function BenchmarkScreen() {
3005
3129
  setModelStates((prev) => prev.map((s) => ({ ...s, status: "running", startedAt: Date.now() })));
3006
3130
  async function runAll() {
3007
3131
  const { benchmarkSingleModelRest: benchmarkSingleModelRest2 } = await Promise.resolve().then(() => (init_benchmark(), exports_benchmark));
3132
+ const logEnabled = state.logMode && !!state.runId;
3133
+ const { createBenchLogger: createBenchLogger2 } = logEnabled ? await Promise.resolve().then(() => (init_logger(), exports_logger)) : { createBenchLogger: null };
3008
3134
  const promises = models.map(async (model) => {
3135
+ const logger = logEnabled && createBenchLogger2 ? await createBenchLogger2(state.runId) : undefined;
3009
3136
  try {
3010
- const result = await benchmarkSingleModelRest2(model);
3137
+ const result = await benchmarkSingleModelRest2(model, logger);
3011
3138
  if (!result.success) {
3012
3139
  const errMsg = result.error ?? "Request failed";
3013
3140
  setModelStates((prev) => prev.map((s) => s.model.id === model.id && s.model.providerId === model.providerId ? { ...s, status: "error", error: errMsg } : s));
@@ -3058,6 +3185,27 @@ function BenchmarkScreen() {
3058
3185
  const total = modelStates.length || 1;
3059
3186
  const filled = Math.round((done.length + errors.length) / total * BAR_W);
3060
3187
  const empty = BAR_W - filled;
3188
+ const shimmerCenter = shimmerPos * (BAR_W - 1);
3189
+ const shimmerHalf = 1.5;
3190
+ const barChars = Array.from({ length: BAR_W }, (_, i) => {
3191
+ const isFilled = i < filled;
3192
+ const dist = Math.abs(i - shimmerCenter);
3193
+ const inWindow = dist <= shimmerHalf;
3194
+ const intensity = inWindow ? 1 - dist / shimmerHalf : 0;
3195
+ if (isFilled) {
3196
+ if (intensity > 0.6)
3197
+ return { ch: "\u2588", fg: "#c8eeff" };
3198
+ if (intensity > 0)
3199
+ return { ch: "\u2593", fg: "#a0d8f0" };
3200
+ return { ch: "\u2588", fg: "#4a9abb" };
3201
+ } else {
3202
+ if (intensity > 0.6)
3203
+ return { ch: "\u2591", fg: "#2a3a52" };
3204
+ if (intensity > 0)
3205
+ return { ch: "\u2591", fg: "#222d40" };
3206
+ return { ch: "\u2591", fg: "#1a2030" };
3207
+ }
3208
+ });
3061
3209
  rows.push(/* @__PURE__ */ jsxDEV10("box", {
3062
3210
  height: 1,
3063
3211
  flexDirection: "row",
@@ -3076,14 +3224,10 @@ function BenchmarkScreen() {
3076
3224
  " "
3077
3225
  ]
3078
3226
  }, undefined, true, undefined, this),
3079
- /* @__PURE__ */ jsxDEV10("text", {
3080
- fg: "#7dcfff",
3081
- children: "\u2588".repeat(filled)
3082
- }, undefined, false, undefined, this),
3083
- /* @__PURE__ */ jsxDEV10("text", {
3084
- fg: "#292e42",
3085
- children: "\u2591".repeat(empty)
3086
- }, undefined, false, undefined, this),
3227
+ barChars.map((b, i) => /* @__PURE__ */ jsxDEV10("text", {
3228
+ fg: b.fg,
3229
+ children: b.ch
3230
+ }, i, false, undefined, this)),
3087
3231
  /* @__PURE__ */ jsxDEV10("text", {
3088
3232
  fg: "#ff9e64",
3089
3233
  children: [
@@ -3419,7 +3563,7 @@ function BenchmarkScreen() {
3419
3563
  }, "results-empty", false, undefined, this));
3420
3564
  }
3421
3565
  return rows;
3422
- }, [modelStates, allDone, tpsRanked, ttftRanked, doneResults, pendingCount, maxTps, maxTtftForBar]);
3566
+ }, [modelStates, allDone, shimmerPos, tpsRanked, ttftRanked, doneResults, pendingCount, maxTps, maxTtftForBar]);
3423
3567
  useKeyboard5((key) => {
3424
3568
  if (!allDone)
3425
3569
  return;
@@ -3428,10 +3572,22 @@ function BenchmarkScreen() {
3428
3572
  navigate("main-menu");
3429
3573
  }
3430
3574
  });
3431
- const statusLine = allDone ? /* @__PURE__ */ jsxDEV10("text", {
3432
- fg: "#9ece6a",
3433
- children: "All done! [Enter]/[q] return [\u2191\u2193/PgUp/PgDn/wheel] scroll"
3434
- }, undefined, false, undefined, this) : /* @__PURE__ */ jsxDEV10("box", {
3575
+ const statusLine = allDone ? /* @__PURE__ */ jsxDEV10("box", {
3576
+ flexDirection: "row",
3577
+ children: [
3578
+ /* @__PURE__ */ jsxDEV10("text", {
3579
+ fg: "#9ece6a",
3580
+ children: "All done! [Enter]/[Q] return [\u2191\u2193/PgUp/PgDn/wheel] scroll"
3581
+ }, undefined, false, undefined, this),
3582
+ state.logMode && state.logPath && /* @__PURE__ */ jsxDEV10("text", {
3583
+ fg: "#565f89",
3584
+ children: [
3585
+ " log: ",
3586
+ state.logPath
3587
+ ]
3588
+ }, undefined, true, undefined, this)
3589
+ ]
3590
+ }, undefined, true, undefined, this) : /* @__PURE__ */ jsxDEV10("box", {
3435
3591
  flexDirection: "row",
3436
3592
  children: [
3437
3593
  running.length > 0 && /* @__PURE__ */ jsxDEV10("text", {
@@ -5100,8 +5256,9 @@ function Shell() {
5100
5256
  ]
5101
5257
  }, undefined, true, undefined, this);
5102
5258
  }
5103
- function App() {
5259
+ function App({ logMode = false }) {
5104
5260
  return /* @__PURE__ */ jsxDEV15(AppProvider, {
5261
+ logMode,
5105
5262
  children: /* @__PURE__ */ jsxDEV15(Shell, {}, undefined, false, undefined, this)
5106
5263
  }, undefined, false, undefined, this);
5107
5264
  }
@@ -5127,7 +5284,7 @@ __export(exports_tui, {
5127
5284
  import { createCliRenderer } from "@opentui/core";
5128
5285
  import { createRoot } from "@opentui/react";
5129
5286
  import { jsxDEV as jsxDEV16 } from "@opentui/react/jsx-dev-runtime";
5130
- async function startTui() {
5287
+ async function startTui(logMode = false) {
5131
5288
  const renderer = await createCliRenderer({
5132
5289
  exitOnCtrlC: false
5133
5290
  });
@@ -5141,7 +5298,9 @@ async function startTui() {
5141
5298
  renderer.destroy();
5142
5299
  process.exit(0);
5143
5300
  });
5144
- createRoot(renderer).render(/* @__PURE__ */ jsxDEV16(App, {}, undefined, false, undefined, this));
5301
+ createRoot(renderer).render(/* @__PURE__ */ jsxDEV16(App, {
5302
+ logMode
5303
+ }, undefined, false, undefined, this));
5145
5304
  }
5146
5305
  var ENABLE_BRACKETED_PASTE = "\x1B[?2004h", DISABLE_BRACKETED_PASTE = "\x1B[?2004l";
5147
5306
  var init_tui = __esm(() => {
@@ -5153,6 +5312,7 @@ function parseCliArgs() {
5153
5312
  const args = process.argv.slice(2);
5154
5313
  const parsed = {
5155
5314
  debug: false,
5315
+ log: false,
5156
5316
  bench: null,
5157
5317
  benchCustom: null,
5158
5318
  apiKey: null,
@@ -5165,6 +5325,8 @@ function parseCliArgs() {
5165
5325
  const arg = args[i];
5166
5326
  if (arg === "--debug")
5167
5327
  parsed.debug = true;
5328
+ else if (arg === "--log")
5329
+ parsed.log = true;
5168
5330
  else if (arg === "--bench")
5169
5331
  parsed.bench = args[++i] ?? null;
5170
5332
  else if (arg === "--bench-custom")
@@ -5197,6 +5359,7 @@ function showHelp() {
5197
5359
  console.log(" --api-key <key> API key for custom provider");
5198
5360
  console.log(" --endpoint-format <format> Endpoint format (default: chat/completions)");
5199
5361
  console.log(" --formatted Format JSON output for human readability");
5362
+ console.log(" --log Log raw SSE streams to ~/.local/share/ai-speedometer/logs/");
5200
5363
  console.log(" --debug Enable debug logging");
5201
5364
  console.log(" --help, -h Show this help message");
5202
5365
  console.log("");
@@ -5214,5 +5377,5 @@ if (cliArgs.help) {
5214
5377
  await runHeadlessBenchmark2(cliArgs);
5215
5378
  } else {
5216
5379
  const { startTui: startTui2 } = await Promise.resolve().then(() => (init_tui(), exports_tui));
5217
- await startTui2();
5380
+ await startTui2(cliArgs.log);
5218
5381
  }
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "ai-speedometer",
3
- "version": "2.1.6",
3
+ "version": "2.2.0",
4
4
  "description": "A comprehensive CLI tool for benchmarking AI models across multiple providers with parallel execution and professional metrics",
5
5
  "bin": {
6
6
  "ai-speedometer": "dist/ai-speedometer",