ai-speedometer 2.1.7 → 2.2.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (2) hide show
  1. package/dist/ai-speedometer +85 -34
  2. package/package.json +1 -1
@@ -1821,16 +1821,17 @@ async function benchmarkSingleModelRest(model, logger) {
1821
1821
  continue;
1822
1822
  try {
1823
1823
  if (model.providerType === "anthropic") {
1824
- if (trimmedLine.startsWith("data: ")) {
1825
- const jsonStr = trimmedLine.slice(6);
1824
+ const anthropicDataPrefix = trimmedLine.startsWith("data: ") ? 6 : trimmedLine.startsWith("data:") ? 5 : -1;
1825
+ if (anthropicDataPrefix !== -1) {
1826
+ const jsonStr = trimmedLine.slice(anthropicDataPrefix);
1826
1827
  if (jsonStr === "[DONE]")
1827
1828
  continue;
1828
1829
  const chunk = JSON.parse(jsonStr);
1829
1830
  const chunkTyped = chunk;
1830
- if (chunkTyped.type === "content_block_delta" && chunkTyped.delta?.text) {
1831
+ if (chunkTyped.type === "content_block_delta" && (chunkTyped.delta?.text || chunkTyped.delta?.thinking)) {
1831
1832
  if (!firstParsedTokenTime)
1832
1833
  firstParsedTokenTime = Date.now();
1833
- streamedText += chunkTyped.delta.text;
1834
+ streamedText += chunkTyped.delta?.text || chunkTyped.delta?.thinking || "";
1834
1835
  } else if (chunkTyped.type === "message_start" && chunkTyped.message?.usage) {
1835
1836
  inputTokens = chunkTyped.message.usage.input_tokens || 0;
1836
1837
  } else if (chunkTyped.type === "message_delta") {
@@ -1843,10 +1844,10 @@ async function benchmarkSingleModelRest(model, logger) {
1843
1844
  continue;
1844
1845
  } else {
1845
1846
  const chunk = JSON.parse(trimmedLine);
1846
- if (chunk.type === "content_block_delta" && chunk.delta?.text) {
1847
+ if (chunk.type === "content_block_delta" && (chunk.delta?.text || chunk.delta?.thinking)) {
1847
1848
  if (!firstParsedTokenTime)
1848
1849
  firstParsedTokenTime = Date.now();
1849
- streamedText += chunk.delta.text;
1850
+ streamedText += chunk.delta?.text || chunk.delta?.thinking || "";
1850
1851
  } else if (chunk.type === "message_start" && chunk.message?.usage) {
1851
1852
  inputTokens = chunk.message.usage.input_tokens || 0;
1852
1853
  } else if (chunk.type === "message_delta") {
@@ -1868,24 +1869,42 @@ async function benchmarkSingleModelRest(model, logger) {
1868
1869
  if (chunk.usageMetadata?.candidatesTokenCount)
1869
1870
  outputTokens = chunk.usageMetadata.candidatesTokenCount;
1870
1871
  } else {
1871
- if (trimmedLine.startsWith("data: ")) {
1872
- const jsonStr = trimmedLine.slice(6);
1873
- if (jsonStr === "[DONE]")
1874
- continue;
1875
- const chunk = JSON.parse(jsonStr);
1876
- if (chunk.choices?.[0]?.delta?.content) {
1877
- if (!firstParsedTokenTime)
1878
- firstParsedTokenTime = Date.now();
1879
- streamedText += chunk.choices[0].delta.content;
1880
- } else if (chunk.choices?.[0]?.delta?.reasoning) {
1881
- if (!firstParsedTokenTime)
1882
- firstParsedTokenTime = Date.now();
1883
- streamedText += chunk.choices[0].delta.reasoning;
1884
- }
1885
- if (chunk.usage?.prompt_tokens)
1886
- inputTokens = chunk.usage.prompt_tokens;
1887
- if (chunk.usage?.completion_tokens)
1888
- outputTokens = chunk.usage.completion_tokens;
1872
+ const dataPrefix = trimmedLine.startsWith("data: ") ? 6 : trimmedLine.startsWith("data:") ? 5 : -1;
1873
+ if (dataPrefix === -1)
1874
+ continue;
1875
+ const jsonStr = trimmedLine.slice(dataPrefix);
1876
+ if (jsonStr === "[DONE]")
1877
+ continue;
1878
+ const chunk = JSON.parse(jsonStr);
1879
+ if (chunk.choices?.[0]?.delta?.content) {
1880
+ if (!firstParsedTokenTime)
1881
+ firstParsedTokenTime = Date.now();
1882
+ streamedText += chunk.choices[0].delta.content;
1883
+ } else if (chunk.choices?.[0]?.delta?.reasoning) {
1884
+ if (!firstParsedTokenTime)
1885
+ firstParsedTokenTime = Date.now();
1886
+ streamedText += chunk.choices[0].delta.reasoning;
1887
+ } else if (chunk.choices?.[0]?.delta?.reasoning_content) {
1888
+ if (!firstParsedTokenTime)
1889
+ firstParsedTokenTime = Date.now();
1890
+ streamedText += chunk.choices[0].delta.reasoning_content;
1891
+ } else if (chunk.type === "content_block_delta" && chunk.delta?.text) {
1892
+ if (!firstParsedTokenTime)
1893
+ firstParsedTokenTime = Date.now();
1894
+ streamedText += chunk.delta.text;
1895
+ } else if (chunk.type === "content_block_delta" && chunk.delta?.thinking) {
1896
+ if (!firstParsedTokenTime)
1897
+ firstParsedTokenTime = Date.now();
1898
+ streamedText += chunk.delta.thinking;
1899
+ }
1900
+ if (chunk.usage?.prompt_tokens)
1901
+ inputTokens = chunk.usage.prompt_tokens;
1902
+ if (chunk.usage?.completion_tokens)
1903
+ outputTokens = chunk.usage.completion_tokens;
1904
+ if (chunk.type === "message_start" && chunk.message?.usage?.input_tokens)
1905
+ inputTokens = chunk.message.usage.input_tokens;
1906
+ if (chunk.type === "message_delta" && chunk.usage?.output_tokens) {
1907
+ outputTokens = chunk.usage.output_tokens;
1889
1908
  }
1890
1909
  }
1891
1910
  } catch {
@@ -2269,7 +2288,7 @@ var package_default;
2269
2288
  var init_package = __esm(() => {
2270
2289
  package_default = {
2271
2290
  name: "ai-speedometer",
2272
- version: "2.1.7",
2291
+ version: "2.2.0",
2273
2292
  description: "A comprehensive CLI tool for benchmarking AI models across multiple providers with parallel execution and professional metrics",
2274
2293
  bin: {
2275
2294
  "ai-speedometer": "dist/ai-speedometer",
@@ -3059,6 +3078,7 @@ var init_ResultsTable = () => {};
3059
3078
  // src/tui/screens/BenchmarkScreen.tsx
3060
3079
  import { useState as useState4, useEffect as useEffect4, useRef as useRef3, useMemo } from "react";
3061
3080
  import { useKeyboard as useKeyboard5 } from "@opentui/react";
3081
+ import { engine, Timeline } from "@opentui/core";
3062
3082
  import { jsxDEV as jsxDEV10 } from "@opentui/react/jsx-dev-runtime";
3063
3083
  function rankBadge(rank) {
3064
3084
  if (rank === 1)
@@ -3080,9 +3100,23 @@ function BenchmarkScreen() {
3080
3100
  const navigate = useNavigate();
3081
3101
  const [modelStates, setModelStates] = useState4([]);
3082
3102
  const [spinnerFrame, setSpinnerFrame] = useState4(0);
3103
+ const [shimmerPos, setShimmerPos] = useState4(0);
3083
3104
  const [allDone, setAllDone] = useState4(false);
3084
3105
  const spinnerRef = useRef3(null);
3085
3106
  const startedRef = useRef3(false);
3107
+ useEffect4(() => {
3108
+ const tl = new Timeline({ loop: true, duration: 1400 });
3109
+ const target = { pos: 0 };
3110
+ tl.add(target, { pos: 1, duration: 1400, ease: "inOutSine", onUpdate: (anim) => {
3111
+ setShimmerPos(anim.targets[0].pos);
3112
+ } }, 0);
3113
+ tl.play();
3114
+ engine.register(tl);
3115
+ return () => {
3116
+ tl.pause();
3117
+ engine.unregister(tl);
3118
+ };
3119
+ }, []);
3086
3120
  useEffect4(() => {
3087
3121
  if (startedRef.current)
3088
3122
  return;
@@ -3151,6 +3185,27 @@ function BenchmarkScreen() {
3151
3185
  const total = modelStates.length || 1;
3152
3186
  const filled = Math.round((done.length + errors.length) / total * BAR_W);
3153
3187
  const empty = BAR_W - filled;
3188
+ const shimmerCenter = shimmerPos * (BAR_W - 1);
3189
+ const shimmerHalf = 1.5;
3190
+ const barChars = Array.from({ length: BAR_W }, (_, i) => {
3191
+ const isFilled = i < filled;
3192
+ const dist = Math.abs(i - shimmerCenter);
3193
+ const inWindow = dist <= shimmerHalf;
3194
+ const intensity = inWindow ? 1 - dist / shimmerHalf : 0;
3195
+ if (isFilled) {
3196
+ if (intensity > 0.6)
3197
+ return { ch: "\u2588", fg: "#c8eeff" };
3198
+ if (intensity > 0)
3199
+ return { ch: "\u2593", fg: "#a0d8f0" };
3200
+ return { ch: "\u2588", fg: "#4a9abb" };
3201
+ } else {
3202
+ if (intensity > 0.6)
3203
+ return { ch: "\u2591", fg: "#2a3a52" };
3204
+ if (intensity > 0)
3205
+ return { ch: "\u2591", fg: "#222d40" };
3206
+ return { ch: "\u2591", fg: "#1a2030" };
3207
+ }
3208
+ });
3154
3209
  rows.push(/* @__PURE__ */ jsxDEV10("box", {
3155
3210
  height: 1,
3156
3211
  flexDirection: "row",
@@ -3169,14 +3224,10 @@ function BenchmarkScreen() {
3169
3224
  " "
3170
3225
  ]
3171
3226
  }, undefined, true, undefined, this),
3172
- /* @__PURE__ */ jsxDEV10("text", {
3173
- fg: "#7dcfff",
3174
- children: "\u2588".repeat(filled)
3175
- }, undefined, false, undefined, this),
3176
- /* @__PURE__ */ jsxDEV10("text", {
3177
- fg: "#292e42",
3178
- children: "\u2591".repeat(empty)
3179
- }, undefined, false, undefined, this),
3227
+ barChars.map((b, i) => /* @__PURE__ */ jsxDEV10("text", {
3228
+ fg: b.fg,
3229
+ children: b.ch
3230
+ }, i, false, undefined, this)),
3180
3231
  /* @__PURE__ */ jsxDEV10("text", {
3181
3232
  fg: "#ff9e64",
3182
3233
  children: [
@@ -3512,7 +3563,7 @@ function BenchmarkScreen() {
3512
3563
  }, "results-empty", false, undefined, this));
3513
3564
  }
3514
3565
  return rows;
3515
- }, [modelStates, allDone, tpsRanked, ttftRanked, doneResults, pendingCount, maxTps, maxTtftForBar]);
3566
+ }, [modelStates, allDone, shimmerPos, tpsRanked, ttftRanked, doneResults, pendingCount, maxTps, maxTtftForBar]);
3516
3567
  useKeyboard5((key) => {
3517
3568
  if (!allDone)
3518
3569
  return;
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "ai-speedometer",
3
- "version": "2.1.7",
3
+ "version": "2.2.0",
4
4
  "description": "A comprehensive CLI tool for benchmarking AI models across multiple providers with parallel execution and professional metrics",
5
5
  "bin": {
6
6
  "ai-speedometer": "dist/ai-speedometer",