@chenpu17/cc-gw 0.4.1 → 0.4.3

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (23) hide show
  1. package/package.json +1 -1
  2. package/src/server/dist/index.js +164 -80
  3. package/src/web/dist/assets/{About-DpeHLv4-.js → About-BWcLLdLY.js} +2 -2
  4. package/src/web/dist/assets/{ApiKeys-CTR9Ynaf.js → ApiKeys-DsYmx21U.js} +1 -1
  5. package/src/web/dist/assets/{Button-CkW8jVT-.js → Button-CZXniSHM.js} +1 -1
  6. package/src/web/dist/assets/Dashboard-H7fcVgwO.js +16 -0
  7. package/src/web/dist/assets/{FormField-CsiVhN9M.js → FormField-SZpxR702.js} +1 -1
  8. package/src/web/dist/assets/{Help-CSSGZPF_.js → Help-CgWIUFIO.js} +1 -1
  9. package/src/web/dist/assets/{Input-jZIOmpmG.js → Input-BdyQWPOU.js} +1 -1
  10. package/src/web/dist/assets/{Login-D577Po2d.js → Login-0_Y4Go8x.js} +1 -1
  11. package/src/web/dist/assets/{Logs-DNLLeIq-.js → Logs-MTopPD6L.js} +1 -1
  12. package/src/web/dist/assets/{ModelManagement-BazsfV0F.js → ModelManagement-DBVBITho.js} +1 -1
  13. package/src/web/dist/assets/{PageSection-BbmHOMiD.js → PageSection-B08EcVAN.js} +1 -1
  14. package/src/web/dist/assets/{Settings-Btmf5gPD.js → Settings-DEloCGp7.js} +1 -1
  15. package/src/web/dist/assets/{StatusBadge-DCmNAKwK.js → StatusBadge-8KAMZvYW.js} +1 -1
  16. package/src/web/dist/assets/{copy-D50F736h.js → copy-BdNskWTP.js} +1 -1
  17. package/src/web/dist/assets/{index-But5PyJC.css → index-BFd07aus.css} +1 -1
  18. package/src/web/dist/assets/{index-B-8NzzZq.js → index-BK1UNVMz.js} +5 -5
  19. package/src/web/dist/assets/{index-BuSvAxno.js → index-CyrAg0Ev.js} +1 -1
  20. package/src/web/dist/assets/{info-B7P9oOld.js → info-BTcWJb9B.js} +1 -1
  21. package/src/web/dist/assets/{useApiQuery-DZHaAgf7.js → useApiQuery-BNTE55UK.js} +1 -1
  22. package/src/web/dist/index.html +2 -2
  23. package/src/web/dist/assets/Dashboard-D1jcy7qj.js +0 -16
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@chenpu17/cc-gw",
3
- "version": "0.4.1",
3
+ "version": "0.4.3",
4
4
  "private": false,
5
5
  "type": "module",
6
6
  "scripts": {
@@ -11102,6 +11102,12 @@ async function migrateDailyMetricsTable(db) {
11102
11102
  const hasCompositePrimaryKey = primaryKeyColumns.length > 1;
11103
11103
  if (!hasEndpointColumn || !hasCompositePrimaryKey) {
11104
11104
  const endpointSelector = hasEndpointColumn ? "COALESCE(endpoint, 'anthropic')" : "'anthropic'";
11105
+ const hasCachedTokensColumn = columns.some((column) => column.name === "total_cached_tokens");
11106
+ const hasCacheReadColumn = columns.some((column) => column.name === "total_cache_read_tokens");
11107
+ const hasCacheCreationColumn = columns.some((column) => column.name === "total_cache_creation_tokens");
11108
+ const cachedTokensSelector = hasCachedTokensColumn ? "COALESCE(total_cached_tokens, 0)" : "0";
11109
+ const cacheReadSelector = hasCacheReadColumn ? "COALESCE(total_cache_read_tokens, 0)" : "0";
11110
+ const cacheCreationSelector = hasCacheCreationColumn ? "COALESCE(total_cache_creation_tokens, 0)" : "0";
11105
11111
  await exec(
11106
11112
  db,
11107
11113
  `ALTER TABLE daily_metrics RENAME TO daily_metrics_old;
@@ -11111,15 +11117,21 @@ async function migrateDailyMetricsTable(db) {
11111
11117
  request_count INTEGER DEFAULT 0,
11112
11118
  total_input_tokens INTEGER DEFAULT 0,
11113
11119
  total_output_tokens INTEGER DEFAULT 0,
11120
+ total_cached_tokens INTEGER DEFAULT 0,
11121
+ total_cache_read_tokens INTEGER DEFAULT 0,
11122
+ total_cache_creation_tokens INTEGER DEFAULT 0,
11114
11123
  total_latency_ms INTEGER DEFAULT 0,
11115
11124
  PRIMARY KEY (date, endpoint)
11116
11125
  );
11117
- INSERT INTO daily_metrics (date, endpoint, request_count, total_input_tokens, total_output_tokens, total_latency_ms)
11126
+ INSERT INTO daily_metrics (date, endpoint, request_count, total_input_tokens, total_output_tokens, total_cached_tokens, total_cache_read_tokens, total_cache_creation_tokens, total_latency_ms)
11118
11127
  SELECT date,
11119
11128
  ${endpointSelector},
11120
11129
  request_count,
11121
11130
  total_input_tokens,
11122
11131
  total_output_tokens,
11132
+ ${cachedTokensSelector},
11133
+ ${cacheReadSelector},
11134
+ ${cacheCreationSelector},
11123
11135
  total_latency_ms
11124
11136
  FROM daily_metrics_old;
11125
11137
  DROP TABLE daily_metrics_old;`
@@ -11176,6 +11188,9 @@ async function ensureSchema(db) {
11176
11188
  request_count INTEGER DEFAULT 0,
11177
11189
  total_input_tokens INTEGER DEFAULT 0,
11178
11190
  total_output_tokens INTEGER DEFAULT 0,
11191
+ total_cached_tokens INTEGER DEFAULT 0,
11192
+ total_cache_read_tokens INTEGER DEFAULT 0,
11193
+ total_cache_creation_tokens INTEGER DEFAULT 0,
11179
11194
  total_latency_ms INTEGER DEFAULT 0,
11180
11195
  PRIMARY KEY (date, endpoint)
11181
11196
  );
@@ -11233,6 +11248,9 @@ async function ensureSchema(db) {
11233
11248
  await maybeAddColumn(db, "api_keys", "total_input_tokens", "INTEGER DEFAULT 0");
11234
11249
  await maybeAddColumn(db, "api_keys", "total_output_tokens", "INTEGER DEFAULT 0");
11235
11250
  await migrateDailyMetricsTable(db);
11251
+ await maybeAddColumn(db, "daily_metrics", "total_cached_tokens", "INTEGER DEFAULT 0");
11252
+ await maybeAddColumn(db, "daily_metrics", "total_cache_read_tokens", "INTEGER DEFAULT 0");
11253
+ await maybeAddColumn(db, "daily_metrics", "total_cache_creation_tokens", "INTEGER DEFAULT 0");
11236
11254
  await run(db, "CREATE UNIQUE INDEX IF NOT EXISTS idx_api_keys_hash ON api_keys(key_hash) WHERE key_hash IS NOT NULL");
11237
11255
  await run(db, "UPDATE api_keys SET key_hash = '*' WHERE is_wildcard = 1 AND (key_hash IS NULL OR key_hash = '')");
11238
11256
  await run(db, "UPDATE api_keys SET updated_at = created_at WHERE updated_at IS NULL");
@@ -11449,23 +11467,33 @@ async function upsertLogPayload(requestId, payload) {
11449
11467
  );
11450
11468
  }
11451
11469
  async function updateMetrics(date, endpoint, delta) {
11452
- await runQuery(
11453
- `INSERT INTO daily_metrics (date, endpoint, request_count, total_input_tokens, total_output_tokens, total_latency_ms)
11454
- VALUES (?, ?, ?, ?, ?, ?)
11455
- ON CONFLICT(date, endpoint) DO UPDATE SET
11456
- request_count = daily_metrics.request_count + excluded.request_count,
11457
- total_input_tokens = daily_metrics.total_input_tokens + excluded.total_input_tokens,
11458
- total_output_tokens = daily_metrics.total_output_tokens + excluded.total_output_tokens,
11459
- total_latency_ms = daily_metrics.total_latency_ms + excluded.total_latency_ms`,
11460
- [
11461
- date,
11462
- endpoint,
11463
- delta.requests,
11464
- delta.inputTokens,
11465
- delta.outputTokens,
11466
- delta.latencyMs
11467
- ]
11468
- );
11470
+ try {
11471
+ await runQuery(
11472
+ `INSERT INTO daily_metrics (date, endpoint, request_count, total_input_tokens, total_output_tokens, total_cached_tokens, total_cache_read_tokens, total_cache_creation_tokens, total_latency_ms)
11473
+ VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?)
11474
+ ON CONFLICT(date, endpoint) DO UPDATE SET
11475
+ request_count = daily_metrics.request_count + excluded.request_count,
11476
+ total_input_tokens = daily_metrics.total_input_tokens + excluded.total_input_tokens,
11477
+ total_output_tokens = daily_metrics.total_output_tokens + excluded.total_output_tokens,
11478
+ total_cached_tokens = daily_metrics.total_cached_tokens + excluded.total_cached_tokens,
11479
+ total_cache_read_tokens = daily_metrics.total_cache_read_tokens + excluded.total_cache_read_tokens,
11480
+ total_cache_creation_tokens = daily_metrics.total_cache_creation_tokens + excluded.total_cache_creation_tokens,
11481
+ total_latency_ms = daily_metrics.total_latency_ms + excluded.total_latency_ms`,
11482
+ [
11483
+ date,
11484
+ endpoint,
11485
+ delta.requests,
11486
+ delta.inputTokens,
11487
+ delta.outputTokens,
11488
+ delta.cachedTokens ?? 0,
11489
+ delta.cacheReadTokens ?? 0,
11490
+ delta.cacheCreationTokens ?? 0,
11491
+ delta.latencyMs
11492
+ ]
11493
+ );
11494
+ } catch (err) {
11495
+ console.error("[updateMetrics] Failed to update metrics:", err);
11496
+ }
11469
11497
  }
11470
11498
 
11471
11499
  // metrics/activity.ts
@@ -11935,23 +11963,24 @@ function computeTpot(totalLatencyMs, outputTokens, options) {
11935
11963
  return Number.isFinite(raw) ? roundTwoDecimals(raw) : null;
11936
11964
  }
11937
11965
  function resolveCachedTokens(usage) {
11966
+ const result = { read: 0, creation: 0 };
11938
11967
  if (!usage || typeof usage !== "object") {
11939
- return null;
11968
+ return result;
11969
+ }
11970
+ if (typeof usage.cache_read_input_tokens === "number") {
11971
+ result.read = usage.cache_read_input_tokens;
11972
+ }
11973
+ if (typeof usage.cache_creation_input_tokens === "number") {
11974
+ result.creation = usage.cache_creation_input_tokens;
11940
11975
  }
11941
11976
  if (typeof usage.cached_tokens === "number") {
11942
- return usage.cached_tokens;
11977
+ result.read = usage.cached_tokens;
11943
11978
  }
11944
11979
  const promptDetails = usage.prompt_tokens_details;
11945
11980
  if (promptDetails && typeof promptDetails.cached_tokens === "number") {
11946
- return promptDetails.cached_tokens;
11947
- }
11948
- if (typeof usage.cache_read_input_tokens === "number") {
11949
- return usage.cache_read_input_tokens;
11950
- }
11951
- if (typeof usage.cache_creation_input_tokens === "number") {
11952
- return usage.cache_creation_input_tokens;
11981
+ result.read = promptDetails.cached_tokens;
11953
11982
  }
11954
- return null;
11983
+ return result;
11955
11984
  }
11956
11985
  function cloneOriginalPayload(value) {
11957
11986
  const structuredCloneFn = globalThis.structuredClone;
@@ -12214,7 +12243,8 @@ async function registerMessagesRoute(app) {
12214
12243
  if (providerType === "anthropic") {
12215
12244
  let inputTokens2 = json.usage?.input_tokens ?? 0;
12216
12245
  let outputTokens2 = json.usage?.output_tokens ?? 0;
12217
- const cachedTokens2 = resolveCachedTokens(json.usage);
12246
+ const cached3 = resolveCachedTokens(json.usage);
12247
+ const cachedTokens3 = cached3.read + cached3.creation;
12218
12248
  if (!inputTokens2) {
12219
12249
  inputTokens2 = target.tokenEstimate || estimateTokens(normalized, target.modelId);
12220
12250
  }
@@ -12225,13 +12255,13 @@ async function registerMessagesRoute(app) {
12225
12255
  logUsage("non_stream.anthropic", {
12226
12256
  input: inputTokens2,
12227
12257
  output: outputTokens2,
12228
- cached: cachedTokens2
12258
+ cached: cachedTokens3
12229
12259
  });
12230
12260
  const latencyMs2 = Date.now() - requestStart;
12231
12261
  await updateLogTokens(logId, {
12232
12262
  inputTokens: inputTokens2,
12233
12263
  outputTokens: outputTokens2,
12234
- cachedTokens: cachedTokens2,
12264
+ cachedTokens: cachedTokens3,
12235
12265
  ttftMs: latencyMs2,
12236
12266
  tpotMs: computeTpot(latencyMs2, outputTokens2, { streaming: false })
12237
12267
  });
@@ -12240,6 +12270,9 @@ async function registerMessagesRoute(app) {
12240
12270
  requests: 1,
12241
12271
  inputTokens: inputTokens2,
12242
12272
  outputTokens: outputTokens2,
12273
+ cachedTokens: cachedTokens3,
12274
+ cacheReadTokens: cached3.read,
12275
+ cacheCreationTokens: cached3.creation,
12243
12276
  latencyMs: latencyMs2
12244
12277
  });
12245
12278
  if (storeResponsePayloads) {
@@ -12260,7 +12293,8 @@ async function registerMessagesRoute(app) {
12260
12293
  const claudeResponse = buildClaudeResponse(json, target.modelId);
12261
12294
  let inputTokens = json.usage?.prompt_tokens ?? 0;
12262
12295
  let outputTokens = json.usage?.completion_tokens ?? 0;
12263
- const cachedTokens = resolveCachedTokens(json.usage);
12296
+ const cached2 = resolveCachedTokens(json.usage);
12297
+ const cachedTokens2 = cached2.read + cached2.creation;
12264
12298
  if (!inputTokens) {
12265
12299
  inputTokens = target.tokenEstimate || estimateTokens(normalized, target.modelId);
12266
12300
  }
@@ -12271,13 +12305,13 @@ async function registerMessagesRoute(app) {
12271
12305
  logUsage("non_stream.openai", {
12272
12306
  input: inputTokens,
12273
12307
  output: outputTokens,
12274
- cached: cachedTokens
12308
+ cached: cachedTokens2
12275
12309
  });
12276
12310
  const latencyMs = Date.now() - requestStart;
12277
12311
  await updateLogTokens(logId, {
12278
12312
  inputTokens,
12279
12313
  outputTokens,
12280
- cachedTokens,
12314
+ cachedTokens: cachedTokens2,
12281
12315
  ttftMs: latencyMs,
12282
12316
  tpotMs: computeTpot(latencyMs, outputTokens, { streaming: false })
12283
12317
  });
@@ -12286,6 +12320,9 @@ async function registerMessagesRoute(app) {
12286
12320
  requests: 1,
12287
12321
  inputTokens,
12288
12322
  outputTokens,
12323
+ cachedTokens: cachedTokens2,
12324
+ cacheReadTokens: cached2.read,
12325
+ cacheCreationTokens: cached2.creation,
12289
12326
  latencyMs
12290
12327
  });
12291
12328
  if (storeResponsePayloads) {
@@ -12457,9 +12494,7 @@ async function registerMessagesRoute(app) {
12457
12494
  usagePrompt2 = payload2.usage.input_tokens ?? usagePrompt2;
12458
12495
  usageCompletion2 = payload2.usage.output_tokens ?? usageCompletion2;
12459
12496
  const maybeCached = resolveCachedTokens(payload2.usage);
12460
- if (maybeCached !== null) {
12461
- usageCached2 = maybeCached;
12462
- }
12497
+ usageCached2 = maybeCached.read + maybeCached.creation;
12463
12498
  lastUsagePayload = payload2.usage;
12464
12499
  }
12465
12500
  if (payload2?.delta) {
@@ -12486,9 +12521,7 @@ async function registerMessagesRoute(app) {
12486
12521
  usagePrompt2 = payload2.usage.input_tokens ?? usagePrompt2;
12487
12522
  usageCompletion2 = payload2.usage.output_tokens ?? usageCompletion2;
12488
12523
  const maybeCached = resolveCachedTokens(payload2.usage);
12489
- if (maybeCached !== null) {
12490
- usageCached2 = maybeCached;
12491
- }
12524
+ usageCached2 = maybeCached.read + maybeCached.creation;
12492
12525
  lastUsagePayload = payload2.usage;
12493
12526
  }
12494
12527
  if (payload2?.stop_reason) {
@@ -12540,8 +12573,9 @@ async function registerMessagesRoute(app) {
12540
12573
  }
12541
12574
  const totalLatencyMs = Date.now() - requestStart;
12542
12575
  const ttftMs = firstTokenAt2 ? firstTokenAt2 - requestStart : null;
12576
+ const cached2 = resolveCachedTokens(lastUsagePayload);
12543
12577
  if (usageCached2 === null) {
12544
- usageCached2 = resolveCachedTokens(lastUsagePayload);
12578
+ usageCached2 = cached2.read + cached2.creation;
12545
12579
  }
12546
12580
  logUsage("stream.anthropic.final", {
12547
12581
  input: usagePrompt2,
@@ -12563,6 +12597,9 @@ async function registerMessagesRoute(app) {
12563
12597
  requests: 1,
12564
12598
  inputTokens: usagePrompt2,
12565
12599
  outputTokens: usageCompletion2,
12600
+ cachedTokens: usageCached2,
12601
+ cacheReadTokens: cached2.read,
12602
+ cacheCreationTokens: cached2.creation,
12566
12603
  latencyMs: totalLatencyMs
12567
12604
  });
12568
12605
  if (storeResponsePayloads) {
@@ -12733,6 +12770,9 @@ data: ${JSON.stringify(data)}
12733
12770
  requests: 1,
12734
12771
  inputTokens: finalPromptTokens,
12735
12772
  outputTokens: finalCompletionTokens,
12773
+ cachedTokens: usageCached ?? 0,
12774
+ cacheReadTokens: 0,
12775
+ cacheCreationTokens: 0,
12736
12776
  latencyMs: totalLatencyMs
12737
12777
  });
12738
12778
  if (storeResponsePayloads) {
@@ -12894,6 +12934,9 @@ data: ${JSON.stringify(data)}
12894
12934
  requests: 1,
12895
12935
  inputTokens: fallbackPrompt,
12896
12936
  outputTokens: fallbackCompletion,
12937
+ cachedTokens: usageCached ?? 0,
12938
+ cacheReadTokens: 0,
12939
+ cacheCreationTokens: 0,
12897
12940
  latencyMs: totalLatencyMs
12898
12941
  });
12899
12942
  if (storeResponsePayloads) {
@@ -13294,27 +13337,28 @@ function computeTpot2(totalLatencyMs, outputTokens, options) {
13294
13337
  return Number.isFinite(raw) ? roundTwoDecimals2(raw) : null;
13295
13338
  }
13296
13339
  function resolveCachedTokens2(usage) {
13340
+ const result = { read: 0, creation: 0 };
13297
13341
  if (!usage || typeof usage !== "object") {
13298
- return null;
13342
+ return result;
13343
+ }
13344
+ if (typeof usage.cache_read_input_tokens === "number") {
13345
+ result.read = usage.cache_read_input_tokens;
13346
+ }
13347
+ if (typeof usage.cache_creation_input_tokens === "number") {
13348
+ result.creation = usage.cache_creation_input_tokens;
13299
13349
  }
13300
13350
  if (typeof usage.cached_tokens === "number") {
13301
- return usage.cached_tokens;
13351
+ result.read = usage.cached_tokens;
13302
13352
  }
13303
13353
  const promptDetails = usage.prompt_tokens_details;
13304
13354
  if (promptDetails && typeof promptDetails.cached_tokens === "number") {
13305
- return promptDetails.cached_tokens;
13355
+ result.read = promptDetails.cached_tokens;
13306
13356
  }
13307
13357
  const inputDetails = usage.input_tokens_details;
13308
13358
  if (inputDetails && typeof inputDetails.cached_tokens === "number") {
13309
- return inputDetails.cached_tokens;
13310
- }
13311
- if (typeof usage.cache_read_input_tokens === "number") {
13312
- return usage.cache_read_input_tokens;
13359
+ result.read = inputDetails.cached_tokens;
13313
13360
  }
13314
- if (typeof usage.cache_creation_input_tokens === "number") {
13315
- return usage.cache_creation_input_tokens;
13316
- }
13317
- return null;
13361
+ return result;
13318
13362
  }
13319
13363
  var generateId = (prefix) => `${prefix}_${Math.random().toString(36).slice(2, 10)}`;
13320
13364
  var isText = (input) => typeof input === "string" && input.length > 0;
@@ -13836,17 +13880,18 @@ async function registerOpenAiRoutes(app) {
13836
13880
  if (!Number.isFinite(inputTokens3) || inputTokens3 <= 0) {
13837
13881
  inputTokens3 = target.tokenEstimate ?? estimateTokens(normalized, target.modelId);
13838
13882
  }
13839
- const cachedTokens2 = resolveCachedTokens2(usagePayload2);
13883
+ const cached3 = resolveCachedTokens2(usagePayload2);
13884
+ const cachedTokens3 = cached3.read + cached3.creation;
13840
13885
  const latencyMs3 = Date.now() - requestStart;
13841
13886
  const openAIResponse = buildOpenAIResponseFromClaude(parsed, target.modelId, converted, {
13842
13887
  inputTokens: inputTokens3,
13843
13888
  outputTokens: outputTokens3,
13844
- cachedTokens: cachedTokens2
13889
+ cachedTokens: cachedTokens3
13845
13890
  });
13846
13891
  await updateLogTokens(logId, {
13847
13892
  inputTokens: inputTokens3,
13848
13893
  outputTokens: outputTokens3,
13849
- cachedTokens: cachedTokens2,
13894
+ cachedTokens: cachedTokens3,
13850
13895
  ttftMs: latencyMs3,
13851
13896
  tpotMs: computeTpot2(latencyMs3, outputTokens3, { streaming: false })
13852
13897
  });
@@ -13855,6 +13900,7 @@ async function registerOpenAiRoutes(app) {
13855
13900
  requests: 1,
13856
13901
  inputTokens: inputTokens3,
13857
13902
  outputTokens: outputTokens3,
13903
+ cachedTokens: cachedTokens3,
13858
13904
  latencyMs: latencyMs3
13859
13905
  });
13860
13906
  if (storeResponsePayloads) {
@@ -13889,12 +13935,13 @@ async function registerOpenAiRoutes(app) {
13889
13935
  return 0;
13890
13936
  })();
13891
13937
  const outputTokens2 = baseOutputTokens + reasoningTokens2;
13892
- const cachedTokens = resolveCachedTokens2(usagePayload);
13938
+ const cached2 = resolveCachedTokens2(usagePayload);
13939
+ const cachedTokens2 = cached2.read + cached2.creation;
13893
13940
  const latencyMs2 = Date.now() - requestStart;
13894
13941
  await updateLogTokens(logId, {
13895
13942
  inputTokens: inputTokens2,
13896
13943
  outputTokens: outputTokens2,
13897
- cachedTokens,
13944
+ cachedTokens: cachedTokens2,
13898
13945
  ttftMs: usagePayload?.first_token_latency_ms ?? latencyMs2,
13899
13946
  tpotMs: usagePayload?.tokens_per_second ? computeTpot2(latencyMs2, outputTokens2, { streaming: false, reasoningTokens: reasoningTokens2 }) : null
13900
13947
  });
@@ -14292,6 +14339,9 @@ async function registerOpenAiRoutes(app) {
14292
14339
  requests: 1,
14293
14340
  inputTokens: finalPromptTokens,
14294
14341
  outputTokens: finalCompletionTokens,
14342
+ cachedTokens: usageCached2,
14343
+ cacheReadTokens: cached.read,
14344
+ cacheCreationTokens: cached.creation,
14295
14345
  latencyMs: totalLatencyMs
14296
14346
  });
14297
14347
  if (storeResponsePayloads && capturedResponseChunks2) {
@@ -14699,12 +14749,13 @@ async function registerOpenAiRoutes(app) {
14699
14749
  inputTokens: inputTokens3,
14700
14750
  outputTokens: outputTokens3
14701
14751
  });
14702
- const cachedTokens2 = resolveCachedTokens2(usagePayload2);
14752
+ const cached3 = resolveCachedTokens2(usagePayload2);
14753
+ const cachedTokens3 = cached3.read + cached3.creation;
14703
14754
  const latencyMs3 = Date.now() - requestStart;
14704
14755
  await updateLogTokens(logId, {
14705
14756
  inputTokens: inputTokens3,
14706
14757
  outputTokens: outputTokens3,
14707
- cachedTokens: cachedTokens2,
14758
+ cachedTokens: cachedTokens3,
14708
14759
  ttftMs: latencyMs3,
14709
14760
  tpotMs: computeTpot2(latencyMs3, outputTokens3, { streaming: false })
14710
14761
  });
@@ -14713,6 +14764,7 @@ async function registerOpenAiRoutes(app) {
14713
14764
  requests: 1,
14714
14765
  inputTokens: inputTokens3,
14715
14766
  outputTokens: outputTokens3,
14767
+ cachedTokens: cachedTokens3,
14716
14768
  latencyMs: latencyMs3
14717
14769
  });
14718
14770
  if (storeResponsePayloads) {
@@ -14742,12 +14794,13 @@ async function registerOpenAiRoutes(app) {
14742
14794
  })(),
14743
14795
  target.modelId
14744
14796
  );
14745
- const cachedTokens = resolveCachedTokens2(usagePayload);
14797
+ const cached2 = resolveCachedTokens2(usagePayload);
14798
+ const cachedTokens2 = cached2.read + cached2.creation;
14746
14799
  const latencyMs2 = Date.now() - requestStart;
14747
14800
  await updateLogTokens(logId, {
14748
14801
  inputTokens: inputTokens2,
14749
14802
  outputTokens: outputTokens2,
14750
- cachedTokens,
14803
+ cachedTokens: cachedTokens2,
14751
14804
  ttftMs: usagePayload?.first_token_latency_ms ?? latencyMs2,
14752
14805
  tpotMs: usagePayload?.tokens_per_second ? computeTpot2(latencyMs2, outputTokens2, { streaming: false }) : null
14753
14806
  });
@@ -15188,6 +15241,9 @@ async function registerOpenAiRoutes(app) {
15188
15241
  requests: 1,
15189
15242
  inputTokens: finalPromptTokens,
15190
15243
  outputTokens: finalCompletionTokens,
15244
+ cachedTokens: usageCached2,
15245
+ cacheReadTokens: cached.read,
15246
+ cacheCreationTokens: cached.creation,
15191
15247
  latencyMs: totalLatencyMs
15192
15248
  });
15193
15249
  if (storeResponsePayloads && capturedResponseChunks2) {
@@ -15597,6 +15653,9 @@ async function getDailyMetrics(days = 7, endpoint) {
15597
15653
  request_count AS requestCount,
15598
15654
  total_input_tokens AS inputTokens,
15599
15655
  total_output_tokens AS outputTokens,
15656
+ total_cached_tokens AS cachedTokens,
15657
+ total_cache_read_tokens AS cacheReadTokens,
15658
+ total_cache_creation_tokens AS cacheCreationTokens,
15600
15659
  total_latency_ms AS totalLatency
15601
15660
  FROM daily_metrics
15602
15661
  ${whereClause}
@@ -15609,6 +15668,9 @@ async function getDailyMetrics(days = 7, endpoint) {
15609
15668
  requestCount: row.requestCount ?? 0,
15610
15669
  inputTokens: row.inputTokens ?? 0,
15611
15670
  outputTokens: row.outputTokens ?? 0,
15671
+ cachedTokens: row.cachedTokens ?? 0,
15672
+ cacheReadTokens: row.cacheReadTokens ?? 0,
15673
+ cacheCreationTokens: row.cacheCreationTokens ?? 0,
15612
15674
  avgLatencyMs: row.requestCount ? Math.round((row.totalLatency ?? 0) / row.requestCount) : 0
15613
15675
  })).reverse();
15614
15676
  }
@@ -15619,6 +15681,9 @@ async function getMetricsOverview(endpoint) {
15619
15681
  COALESCE(SUM(request_count), 0) AS requests,
15620
15682
  COALESCE(SUM(total_input_tokens), 0) AS inputTokens,
15621
15683
  COALESCE(SUM(total_output_tokens), 0) AS outputTokens,
15684
+ COALESCE(SUM(total_cached_tokens), 0) AS cachedTokens,
15685
+ COALESCE(SUM(total_cache_read_tokens), 0) AS cacheReadTokens,
15686
+ COALESCE(SUM(total_cache_creation_tokens), 0) AS cacheCreationTokens,
15622
15687
  COALESCE(SUM(total_latency_ms), 0) AS totalLatency
15623
15688
  FROM daily_metrics
15624
15689
  ${totalsWhere}`,
@@ -15629,6 +15694,9 @@ async function getMetricsOverview(endpoint) {
15629
15694
  `SELECT request_count AS requests,
15630
15695
  total_input_tokens AS inputTokens,
15631
15696
  total_output_tokens AS outputTokens,
15697
+ total_cached_tokens AS cachedTokens,
15698
+ total_cache_read_tokens AS cacheReadTokens,
15699
+ total_cache_creation_tokens AS cacheCreationTokens,
15632
15700
  total_latency_ms AS totalLatency
15633
15701
  FROM daily_metrics
15634
15702
  WHERE date = ?
@@ -15645,12 +15713,18 @@ async function getMetricsOverview(endpoint) {
15645
15713
  requests: totalsRequests,
15646
15714
  inputTokens: totalsRow?.inputTokens ?? 0,
15647
15715
  outputTokens: totalsRow?.outputTokens ?? 0,
15716
+ cachedTokens: totalsRow?.cachedTokens ?? 0,
15717
+ cacheReadTokens: totalsRow?.cacheReadTokens ?? 0,
15718
+ cacheCreationTokens: totalsRow?.cacheCreationTokens ?? 0,
15648
15719
  avgLatencyMs: resolveAvg(totalsLatency, totalsRequests)
15649
15720
  },
15650
15721
  today: {
15651
15722
  requests: todayRequests,
15652
15723
  inputTokens: todayRow?.inputTokens ?? 0,
15653
15724
  outputTokens: todayRow?.outputTokens ?? 0,
15725
+ cachedTokens: todayRow?.cachedTokens ?? 0,
15726
+ cacheReadTokens: todayRow?.cacheReadTokens ?? 0,
15727
+ cacheCreationTokens: todayRow?.cacheCreationTokens ?? 0,
15654
15728
  avgLatencyMs: resolveAvg(todayLatency, todayRequests)
15655
15729
  }
15656
15730
  };
@@ -17075,23 +17149,24 @@ function getPathsToRegister(basePath, protocol) {
17075
17149
  }
17076
17150
  }
17077
17151
  function resolveCachedTokens3(usage) {
17152
+ const result = { read: 0, creation: 0 };
17078
17153
  if (!usage || typeof usage !== "object") {
17079
- return null;
17154
+ return result;
17155
+ }
17156
+ if (typeof usage.cache_read_input_tokens === "number") {
17157
+ result.read = usage.cache_read_input_tokens;
17158
+ }
17159
+ if (typeof usage.cache_creation_input_tokens === "number") {
17160
+ result.creation = usage.cache_creation_input_tokens;
17080
17161
  }
17081
17162
  if (typeof usage.cached_tokens === "number") {
17082
- return usage.cached_tokens;
17163
+ result.read = usage.cached_tokens;
17083
17164
  }
17084
17165
  const promptDetails = usage.prompt_tokens_details;
17085
17166
  if (promptDetails && typeof promptDetails.cached_tokens === "number") {
17086
- return promptDetails.cached_tokens;
17167
+ result.read = promptDetails.cached_tokens;
17087
17168
  }
17088
- if (typeof usage.cache_read_input_tokens === "number") {
17089
- return usage.cache_read_input_tokens;
17090
- }
17091
- if (typeof usage.cache_creation_input_tokens === "number") {
17092
- return usage.cache_creation_input_tokens;
17093
- }
17094
- return null;
17169
+ return result;
17095
17170
  }
17096
17171
  var roundTwoDecimals3 = (value) => Math.round(value * 100) / 100;
17097
17172
  function cloneOriginalPayload2(value) {
@@ -17448,12 +17523,13 @@ async function handleAnthropicProtocol(request, reply, endpoint, endpointId, app
17448
17523
  const json = await new Response(upstream.body).json();
17449
17524
  const inputTokens = json.usage?.input_tokens ?? estimateTokens(normalized, target.modelId);
17450
17525
  const outputTokens = json.usage?.output_tokens ?? 0;
17451
- const cachedTokens = resolveCachedTokens3(json.usage);
17526
+ const cached2 = resolveCachedTokens3(json.usage);
17527
+ const cachedTokens2 = cached2.read + cached2.creation;
17452
17528
  const latencyMs = Date.now() - requestStart;
17453
17529
  await updateLogTokens(logId, {
17454
17530
  inputTokens,
17455
17531
  outputTokens,
17456
- cachedTokens,
17532
+ cachedTokens: cachedTokens2,
17457
17533
  ttftMs: latencyMs,
17458
17534
  tpotMs: computeTpot3(latencyMs, outputTokens, { streaming: false })
17459
17535
  });
@@ -17462,6 +17538,7 @@ async function handleAnthropicProtocol(request, reply, endpoint, endpointId, app
17462
17538
  requests: 1,
17463
17539
  inputTokens,
17464
17540
  outputTokens,
17541
+ cachedTokens: cachedTokens2,
17465
17542
  latencyMs
17466
17543
  });
17467
17544
  if (storeResponsePayloads) {
@@ -17517,9 +17594,9 @@ async function handleAnthropicProtocol(request, reply, endpoint, endpointId, app
17517
17594
  if (parsed?.usage) {
17518
17595
  usagePrompt = parsed.usage.input_tokens ?? usagePrompt;
17519
17596
  usageCompletion = parsed.usage.output_tokens ?? usageCompletion;
17520
- const cached = resolveCachedTokens3(parsed.usage);
17521
- if (cached !== null) {
17522
- usageCached = cached;
17597
+ const cached2 = resolveCachedTokens3(parsed.usage);
17598
+ if (cached2 !== null) {
17599
+ usageCached = cached2;
17523
17600
  }
17524
17601
  }
17525
17602
  if (!firstTokenAt && (parsed?.type === "content_block_delta" || parsed?.delta?.text)) {
@@ -17558,6 +17635,9 @@ async function handleAnthropicProtocol(request, reply, endpoint, endpointId, app
17558
17635
  requests: 1,
17559
17636
  inputTokens: usagePrompt,
17560
17637
  outputTokens: usageCompletion,
17638
+ cachedTokens: usageCached,
17639
+ cacheReadTokens: cached.read,
17640
+ cacheCreationTokens: cached.creation,
17561
17641
  latencyMs: totalLatencyMs
17562
17642
  });
17563
17643
  if (storeResponsePayloads && capturedChunks) {
@@ -17737,12 +17817,13 @@ async function handleOpenAIChatProtocol(request, reply, endpoint, endpointId, ap
17737
17817
  const usagePayload = json?.usage ?? null;
17738
17818
  const inputTokens2 = usagePayload?.prompt_tokens ?? usagePayload?.input_tokens ?? target.tokenEstimate ?? estimateTokens(normalized, target.modelId);
17739
17819
  const outputTokens2 = usagePayload?.completion_tokens ?? usagePayload?.output_tokens ?? estimateTextTokens(json?.choices?.[0]?.message?.content ?? "", target.modelId);
17740
- const cachedTokens = resolveCachedTokens3(usagePayload);
17820
+ const cached2 = resolveCachedTokens3(usagePayload);
17821
+ const cachedTokens2 = cached2.read + cached2.creation;
17741
17822
  const latencyMs2 = Date.now() - requestStart;
17742
17823
  await updateLogTokens(logId, {
17743
17824
  inputTokens: inputTokens2,
17744
17825
  outputTokens: outputTokens2,
17745
- cachedTokens,
17826
+ cachedTokens: cachedTokens2,
17746
17827
  ttftMs: latencyMs2,
17747
17828
  tpotMs: computeTpot3(latencyMs2, outputTokens2, { streaming: false })
17748
17829
  });
@@ -17751,6 +17832,7 @@ async function handleOpenAIChatProtocol(request, reply, endpoint, endpointId, ap
17751
17832
  requests: 1,
17752
17833
  inputTokens: inputTokens2,
17753
17834
  outputTokens: outputTokens2,
17835
+ cachedTokens: cachedTokens2,
17754
17836
  latencyMs: latencyMs2
17755
17837
  });
17756
17838
  if (storeResponsePayloads) {
@@ -18016,12 +18098,13 @@ async function handleOpenAIResponsesProtocol(request, reply, endpoint, endpointI
18016
18098
  const inputTokens2 = usagePayload?.prompt_tokens ?? usagePayload?.input_tokens ?? target.tokenEstimate ?? estimateTokens(normalized, target.modelId);
18017
18099
  const content = json?.response?.body?.content ?? json?.choices?.[0]?.message?.content ?? "";
18018
18100
  const outputTokens2 = usagePayload?.completion_tokens ?? usagePayload?.output_tokens ?? estimateTextTokens(content, target.modelId);
18019
- const cachedTokens = resolveCachedTokens3(usagePayload);
18101
+ const cached2 = resolveCachedTokens3(usagePayload);
18102
+ const cachedTokens2 = cached2.read + cached2.creation;
18020
18103
  const latencyMs2 = Date.now() - requestStart;
18021
18104
  await updateLogTokens(logId, {
18022
18105
  inputTokens: inputTokens2,
18023
18106
  outputTokens: outputTokens2,
18024
- cachedTokens,
18107
+ cachedTokens: cachedTokens2,
18025
18108
  ttftMs: latencyMs2,
18026
18109
  tpotMs: computeTpot3(latencyMs2, outputTokens2, { streaming: false })
18027
18110
  });
@@ -18030,6 +18113,7 @@ async function handleOpenAIResponsesProtocol(request, reply, endpoint, endpointI
18030
18113
  requests: 1,
18031
18114
  inputTokens: inputTokens2,
18032
18115
  outputTokens: outputTokens2,
18116
+ cachedTokens: cachedTokens2,
18033
18117
  latencyMs: latencyMs2
18034
18118
  });
18035
18119
  if (storeResponsePayloads) {
@@ -1,4 +1,4 @@
1
- import{c as p,u as v,a as k,r,j as e,d as o,U as j,m as i}from"./index-B-8NzzZq.js";import{u as N}from"./useApiQuery-DZHaAgf7.js";import{P as w,a as d}from"./PageSection-BbmHOMiD.js";import"./Input-jZIOmpmG.js";import{B as b}from"./Button-CkW8jVT-.js";import{I as y}from"./info-B7P9oOld.js";/**
1
+ import{c as p,u as v,a as k,r,j as e,d as o,U as j,m as i}from"./index-BK1UNVMz.js";import{u as N}from"./useApiQuery-BNTE55UK.js";import{P as w,a as d}from"./PageSection-B08EcVAN.js";import"./Input-BdyQWPOU.js";import{B as b}from"./Button-CZXniSHM.js";import{I as y}from"./info-BTcWJb9B.js";/**
2
2
  * @license lucide-react v0.344.0 - ISC
3
3
  *
4
4
  * This source code is licensed under the ISC license.
@@ -8,4 +8,4 @@ import{c as p,u as v,a as k,r,j as e,d as o,U as j,m as i}from"./index-B-8NzzZq.
8
8
  *
9
9
  * This source code is licensed under the ISC license.
10
10
  * See the LICENSE file in the root directory of this source tree.
11
- */const E=p("Sparkles",[["path",{d:"m12 3-1.912 5.813a2 2 0 0 1-1.275 1.275L3 12l5.813 1.912a2 2 0 0 1 1.275 1.275L12 21l1.912-5.813a2 2 0 0 1 1.275-1.275L21 12l-5.813-1.912a2 2 0 0 1-1.275-1.275L12 3Z",key:"17u4zn"}],["path",{d:"M5 3v4",key:"bklmnn"}],["path",{d:"M19 17v4",key:"iiml17"}],["path",{d:"M3 5h4",key:"nem4j1"}],["path",{d:"M17 19h4",key:"lbex7p"}]]),I="0.4.1",_={version:I},L={VITE_BUILD_TIME:"2025-10-29T01:35:31.647Z",VITE_NODE_VERSION:"v22.14.0"};function m({items:t}){return t.length===0?null:e.jsx("dl",{className:"grid gap-4 sm:grid-cols-2 xl:grid-cols-2",children:t.map(s=>e.jsxs("div",{className:"rounded-2xl border border-slate-200/50 bg-white p-4 shadow-sm shadow-slate-200/30 transition-all duration-200 hover:-translate-y-0.5 hover:border-slate-200/70 hover:shadow-md hover:shadow-slate-200/40 dark:border-slate-700/50 dark:bg-slate-900/80 dark:shadow-lg dark:shadow-slate-900/30 dark:hover:border-slate-600/70",children:[e.jsx("dt",{className:"text-xs font-semibold uppercase tracking-[0.14em] text-slate-500 dark:text-slate-400",children:s.label}),e.jsx("dd",{className:"mt-2 text-base font-semibold text-slate-900 dark:text-slate-100",children:s.value}),s.hint?e.jsx("p",{className:o(i,"mt-2 text-xs leading-relaxed"),children:s.hint}):null]},s.label))})}function P(){const{t}=v(),{pushToast:s}=k(),a=N(["status","gateway"],{url:"/api/status",method:"GET"},{staleTime:6e4});r.useEffect(()=>{a.isError&&a.error&&s({title:t("about.toast.statusError.title"),description:a.error.message,variant:"error"})},[a.isError,a.error,s,t]);const n=_.version,l=r.useMemo(()=>{const u=L,f=u.VITE_BUILD_TIME,g=u.VITE_NODE_VERSION;return{buildTime:f,nodeVersion:g}},[]),h=r.useMemo(()=>[{label:t("about.app.labels.name"),value:e.jsx("span",{className:"font-mono text-sm font-semibold text-slate-900 dark:text-slate-100",children:"cc-gw"})},{label:t("about.app.labels.version"),value:e.jsxs("span",{className:"font-mono text-sm font-semibold text-blue-700 dark:text-blue-200",children:["v",n]})},{label:t("about.app.labels.buildTime"),value:l.buildTime,hint:t("about.app.hint.buildTime")},{label:t("about.app.labels.node"),value:e.jsx("span",{className:"font-mono text-sm text-slate-800 dark:text-slate-200",children:l.nodeVersion})}],[n,l.buildTime,l.nodeVersion,t]),c=r.useMemo(()=>a.data?[{label:t("about.status.labels.host"),value:a.data.host??"127.0.0.1"},{label:t("about.status.labels.port"),value:a.data.port.toLocaleString()},{label:t("about.status.labels.providers"),value:a.data.providers.toLocaleString()},{label:t("about.status.labels.active"),value:(a.data.activeRequests??0).toLocaleString(),hint:t("about.status.hint.active")}]:[],[a.data,t]),x=()=>{s({title:t("about.toast.updatesPlanned"),variant:"info"})};return e.jsxs("div",{className:"space-y-8",children:[e.jsx(w,{icon:e.jsx(y,{className:"h-6 w-6","aria-hidden":"true"}),title:t("about.title"),description:t("about.description"),badge:`v${n}`,actions:e.jsx(b,{variant:"primary",icon:e.jsx(E,{className:"h-4 w-4","aria-hidden":"true"}),onClick:x,children:t("about.support.actions.checkUpdates")})}),e.jsxs("div",{className:"grid gap-6 lg:grid-cols-2",children:[e.jsx(d,{title:t("about.app.title"),description:t("about.app.subtitle"),className:"h-full",contentClassName:"gap-4",children:e.jsx(m,{items:h})}),e.jsx(d,{title:t("about.status.title"),description:t("about.status.subtitle"),className:"h-full",contentClassName:"gap-4",actions:e.jsx(b,{variant:"subtle",size:"sm",icon:e.jsx(T,{className:"h-4 w-4","aria-hidden":"true"}),onClick:()=>a.refetch(),loading:a.isFetching,children:a.isFetching?t("common.actions.refreshing"):t("common.actions.refresh")}),children:a.isLoading?e.jsxs("div",{className:"flex h-36 flex-col items-center justify-center gap-3 text-center",children:[e.jsx("div",{className:"h-10 w-10 animate-spin rounded-full border-[3px] border-blue-500/30 border-t-blue-600 dark:border-blue-400/20 dark:border-t-blue-300"}),e.jsx("p",{className:o(i,"text-sm"),children:t("about.status.loading")})]}):c.length>0?e.jsx(m,{items:c}):e.jsxs("div",{className:"flex h-36 flex-col items-center justify-center gap-2 rounded-2xl border border-dashed border-slate-200/60 bg-white p-6 text-center shadow-inner dark:border-slate-700/60 dark:bg-slate-900/60",children:[e.jsx("p",{className:"text-sm font-semibold text-slate-700 dark:text-slate-200",children:t("about.status.empty")}),e.jsx("p",{className:o(i,"text-xs"),children:t("common.actions.refresh")})]})})]}),e.jsx(d,{title:t("about.support.title"),description:e.jsxs("span",{className:"space-y-1",children:[e.jsx("span",{className:"block text-sm font-semibold text-blue-600 dark:text-blue-300",children:t("about.support.subtitle")}),e.jsx("span",{children:t("about.support.description")})]}),className:"relative overflow-hidden",contentClassName:"gap-6",children:e.jsxs("div",{className:"flex flex-col gap-4 rounded-3xl border border-slate-200/50 bg-white p-6 shadow-lg shadow-slate-200/30 backdrop-blur-md dark:border-slate-700/50 dark:bg-slate-900/80 dark:shadow-slate-900/40",children:[e.jsxs("div",{className:"flex flex-wrap items-start gap-4",children:[e.jsx("div",{className:"grid h-12 w-12 place-items-center rounded-2xl bg-gradient-to-br from-blue-500/20 to-indigo-500/20 text-blue-600 shadow-inner dark:text-blue-200",children:e.jsx(j,{className:"h-6 w-6","aria-hidden":"true"})}),e.jsx("p",{className:o(i,"text-sm leading-6"),children:t("about.support.tip")})]}),e.jsx("code",{className:"inline-flex items-center gap-2 self-start rounded-full border border-blue-200/50 bg-blue-50/80 px-4 py-2 text-xs font-semibold tracking-wide text-blue-700 shadow-sm dark:border-blue-500/30 dark:bg-blue-900/30 dark:text-blue-200",children:"~/.cc-gw/config.json"})]})})]})}export{P as default};
11
+ */const E=p("Sparkles",[["path",{d:"m12 3-1.912 5.813a2 2 0 0 1-1.275 1.275L3 12l5.813 1.912a2 2 0 0 1 1.275 1.275L12 21l1.912-5.813a2 2 0 0 1 1.275-1.275L21 12l-5.813-1.912a2 2 0 0 1-1.275-1.275L12 3Z",key:"17u4zn"}],["path",{d:"M5 3v4",key:"bklmnn"}],["path",{d:"M19 17v4",key:"iiml17"}],["path",{d:"M3 5h4",key:"nem4j1"}],["path",{d:"M17 19h4",key:"lbex7p"}]]),I="0.4.3",_={version:I},L={VITE_BUILD_TIME:"2025-10-30T03:41:36.581Z",VITE_NODE_VERSION:"v22.14.0"};function m({items:t}){return t.length===0?null:e.jsx("dl",{className:"grid gap-4 sm:grid-cols-2 xl:grid-cols-2",children:t.map(s=>e.jsxs("div",{className:"rounded-2xl border border-slate-200/50 bg-white p-4 shadow-sm shadow-slate-200/30 transition-all duration-200 hover:-translate-y-0.5 hover:border-slate-200/70 hover:shadow-md hover:shadow-slate-200/40 dark:border-slate-700/50 dark:bg-slate-900/80 dark:shadow-lg dark:shadow-slate-900/30 dark:hover:border-slate-600/70",children:[e.jsx("dt",{className:"text-xs font-semibold uppercase tracking-[0.14em] text-slate-500 dark:text-slate-400",children:s.label}),e.jsx("dd",{className:"mt-2 text-base font-semibold text-slate-900 dark:text-slate-100",children:s.value}),s.hint?e.jsx("p",{className:o(i,"mt-2 text-xs leading-relaxed"),children:s.hint}):null]},s.label))})}function P(){const{t}=v(),{pushToast:s}=k(),a=N(["status","gateway"],{url:"/api/status",method:"GET"},{staleTime:6e4});r.useEffect(()=>{a.isError&&a.error&&s({title:t("about.toast.statusError.title"),description:a.error.message,variant:"error"})},[a.isError,a.error,s,t]);const n=_.version,l=r.useMemo(()=>{const u=L,f=u.VITE_BUILD_TIME,g=u.VITE_NODE_VERSION;return{buildTime:f,nodeVersion:g}},[]),h=r.useMemo(()=>[{label:t("about.app.labels.name"),value:e.jsx("span",{className:"font-mono text-sm font-semibold text-slate-900 dark:text-slate-100",children:"cc-gw"})},{label:t("about.app.labels.version"),value:e.jsxs("span",{className:"font-mono text-sm font-semibold text-blue-700 dark:text-blue-200",children:["v",n]})},{label:t("about.app.labels.buildTime"),value:l.buildTime,hint:t("about.app.hint.buildTime")},{label:t("about.app.labels.node"),value:e.jsx("span",{className:"font-mono text-sm text-slate-800 dark:text-slate-200",children:l.nodeVersion})}],[n,l.buildTime,l.nodeVersion,t]),c=r.useMemo(()=>a.data?[{label:t("about.status.labels.host"),value:a.data.host??"127.0.0.1"},{label:t("about.status.labels.port"),value:a.data.port.toLocaleString()},{label:t("about.status.labels.providers"),value:a.data.providers.toLocaleString()},{label:t("about.status.labels.active"),value:(a.data.activeRequests??0).toLocaleString(),hint:t("about.status.hint.active")}]:[],[a.data,t]),x=()=>{s({title:t("about.toast.updatesPlanned"),variant:"info"})};return e.jsxs("div",{className:"space-y-8",children:[e.jsx(w,{icon:e.jsx(y,{className:"h-6 w-6","aria-hidden":"true"}),title:t("about.title"),description:t("about.description"),badge:`v${n}`,actions:e.jsx(b,{variant:"primary",icon:e.jsx(E,{className:"h-4 w-4","aria-hidden":"true"}),onClick:x,children:t("about.support.actions.checkUpdates")})}),e.jsxs("div",{className:"grid gap-6 lg:grid-cols-2",children:[e.jsx(d,{title:t("about.app.title"),description:t("about.app.subtitle"),className:"h-full",contentClassName:"gap-4",children:e.jsx(m,{items:h})}),e.jsx(d,{title:t("about.status.title"),description:t("about.status.subtitle"),className:"h-full",contentClassName:"gap-4",actions:e.jsx(b,{variant:"subtle",size:"sm",icon:e.jsx(T,{className:"h-4 w-4","aria-hidden":"true"}),onClick:()=>a.refetch(),loading:a.isFetching,children:a.isFetching?t("common.actions.refreshing"):t("common.actions.refresh")}),children:a.isLoading?e.jsxs("div",{className:"flex h-36 flex-col items-center justify-center gap-3 text-center",children:[e.jsx("div",{className:"h-10 w-10 animate-spin rounded-full border-[3px] border-blue-500/30 border-t-blue-600 dark:border-blue-400/20 dark:border-t-blue-300"}),e.jsx("p",{className:o(i,"text-sm"),children:t("about.status.loading")})]}):c.length>0?e.jsx(m,{items:c}):e.jsxs("div",{className:"flex h-36 flex-col items-center justify-center gap-2 rounded-2xl border border-dashed border-slate-200/60 bg-white p-6 text-center shadow-inner dark:border-slate-700/60 dark:bg-slate-900/60",children:[e.jsx("p",{className:"text-sm font-semibold text-slate-700 dark:text-slate-200",children:t("about.status.empty")}),e.jsx("p",{className:o(i,"text-xs"),children:t("common.actions.refresh")})]})})]}),e.jsx(d,{title:t("about.support.title"),description:e.jsxs("span",{className:"space-y-1",children:[e.jsx("span",{className:"block text-sm font-semibold text-blue-600 dark:text-blue-300",children:t("about.support.subtitle")}),e.jsx("span",{children:t("about.support.description")})]}),className:"relative overflow-hidden",contentClassName:"gap-6",children:e.jsxs("div",{className:"flex flex-col gap-4 rounded-3xl border border-slate-200/50 bg-white p-6 shadow-lg shadow-slate-200/30 backdrop-blur-md dark:border-slate-700/50 dark:bg-slate-900/80 dark:shadow-slate-900/40",children:[e.jsxs("div",{className:"flex flex-wrap items-start gap-4",children:[e.jsx("div",{className:"grid h-12 w-12 place-items-center rounded-2xl bg-gradient-to-br from-blue-500/20 to-indigo-500/20 text-blue-600 shadow-inner dark:text-blue-200",children:e.jsx(j,{className:"h-6 w-6","aria-hidden":"true"})}),e.jsx("p",{className:o(i,"text-sm leading-6"),children:t("about.support.tip")})]}),e.jsx("code",{className:"inline-flex items-center gap-2 self-start rounded-full border border-blue-200/50 bg-blue-50/80 px-4 py-2 text-xs font-semibold tracking-wide text-blue-700 shadow-sm dark:border-blue-500/30 dark:bg-blue-900/30 dark:text-blue-200",children:"~/.cc-gw/config.json"})]})})]})}export{P as default};
@@ -1,4 +1,4 @@
1
- import{c as T,u as W,a as J,r as d,j as e,L as X,N as Y,d as t,b as K,m as r,H as m,E as Z,k as ee,l as ae,f as se,h as te}from"./index-B-8NzzZq.js";import{E as le}from"./index-BuSvAxno.js";import{u as w}from"./useApiQuery-DZHaAgf7.js";import{P as ie,a as q}from"./PageSection-BbmHOMiD.js";import{F as I}from"./FormField-CsiVhN9M.js";import{I as re}from"./Input-jZIOmpmG.js";import{B as C}from"./Button-CkW8jVT-.js";import{S as ne}from"./StatusBadge-DCmNAKwK.js";import{C as ce}from"./copy-D50F736h.js";/**
1
+ import{c as T,u as W,a as J,r as d,j as e,L as X,N as Y,d as t,b as K,m as r,H as m,E as Z,k as ee,l as ae,f as se,h as te}from"./index-BK1UNVMz.js";import{E as le}from"./index-CyrAg0Ev.js";import{u as w}from"./useApiQuery-BNTE55UK.js";import{P as ie,a as q}from"./PageSection-B08EcVAN.js";import{F as I}from"./FormField-SZpxR702.js";import{I as re}from"./Input-BdyQWPOU.js";import{B as C}from"./Button-CZXniSHM.js";import{S as ne}from"./StatusBadge-8KAMZvYW.js";import{C as ce}from"./copy-BdNskWTP.js";/**
2
2
  * @license lucide-react v0.344.0 - ISC
3
3
  *
4
4
  * This source code is licensed under the ISC license.