@lobehub/lobehub 2.0.0-next.116 → 2.0.0-next.118

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -250,7 +250,7 @@ export class MCPService {
250
250
  } catch (error) {
251
251
  console.error(`Failed to initialize MCP client:`, error);
252
252
 
253
- // 保留完整的错误信息,特别是详细的 stderr 输出
253
+ // Preserve complete error information, especially detailed stderr output
254
254
  const errorMessage = error instanceof Error ? error.message : String(error);
255
255
 
256
256
  if (typeof error === 'object' && !!error && 'data' in error) {
@@ -261,7 +261,7 @@ export class MCPService {
261
261
  });
262
262
  }
263
263
 
264
- // 记录详细的错误信息用于调试
264
+ // Log detailed error information for debugging
265
265
  log('Detailed initialization error: %O', {
266
266
  error: errorMessage,
267
267
  params: this.sanitizeForLogging(params),
@@ -271,7 +271,7 @@ export class MCPService {
271
271
  throw new TRPCError({
272
272
  cause: error,
273
273
  code: 'INTERNAL_SERVER_ERROR',
274
- message: errorMessage, // 直接使用完整的错误信息
274
+ message: errorMessage, // Use complete error message directly
275
275
  });
276
276
  }
277
277
  }
@@ -307,12 +307,12 @@ export class MCPService {
307
307
  ): Promise<LobeChatPluginManifest> {
308
308
  const mcpParams = { name: identifier, type: 'http' as const, url };
309
309
 
310
- // 如果有认证信息,添加到参数中
310
+ // Add authentication info to parameters if available
311
311
  if (auth) {
312
312
  (mcpParams as any).auth = auth;
313
313
  }
314
314
 
315
- // 如果有 headers 信息,添加到参数中
315
+ // Add headers info to parameters if available
316
316
  if (headers) {
317
317
  (mcpParams as any).headers = headers;
318
318
  }
@@ -383,23 +383,23 @@ export class MCPService {
383
383
  log('Checking MCP plugin installation status: %O', loggableInput);
384
384
  const results = [];
385
385
 
386
- // 检查每个部署选项
386
+ // Check each deployment option
387
387
  for (const option of input.deploymentOptions) {
388
- // 使用系统依赖检查服务检查部署选项
388
+ // Use system dependency check service to check deployment option
389
389
  const result = await mcpSystemDepsCheckService.checkDeployOption(option);
390
390
  results.push(result);
391
391
  }
392
392
 
393
- // 找出推荐的或第一个可安装的选项
393
+ // Find the recommended or first installable option
394
394
  const recommendedResult = results.find((r) => r.isRecommended && r.allDependenciesMet);
395
395
  const firstInstallableResult = results.find((r) => r.allDependenciesMet);
396
396
 
397
- // 返回推荐的结果,或第一个可安装的结果,或第一个结果
397
+ // Return the recommended result, or the first installable result, or the first result
398
398
  const bestResult = recommendedResult || firstInstallableResult || results[0];
399
399
 
400
400
  log('Check completed, best result: %O', bestResult);
401
401
 
402
- // 构造返回结果,确保包含配置检查信息
402
+ // Construct return result, ensure configuration check information is included
403
403
  const checkResult: CheckMcpInstallResult = {
404
404
  ...bestResult,
405
405
  allOptions: results,
@@ -407,7 +407,7 @@ export class MCPService {
407
407
  success: true,
408
408
  };
409
409
 
410
- // 如果最佳结果需要配置,确保在顶层设置相关字段
410
+ // If the best result requires configuration, ensure related fields are set at the top level
411
411
  if (bestResult?.needsConfig) {
412
412
  checkResult.needsConfig = true;
413
413
  checkResult.configSchema = bestResult.configSchema;
@@ -1,4 +1,5 @@
1
- import { contextCachingModels, thinkingWithToolClaudeModels } from '@/const/models';
1
+ import { isContextCachingModel, isThinkingWithToolClaudeModel } from '@lobechat/model-runtime';
2
+
2
3
  import { DEFAULT_AGENT_CHAT_CONFIG, DEFAULT_AGENT_SEARCH_FC_MODEL } from '@/const/settings';
3
4
  import { AgentStoreState } from '@/store/agent/initialState';
4
5
  import { LobeAgentChatConfig } from '@/types/agent';
@@ -24,12 +25,12 @@ const enableHistoryCount = (s: AgentStoreState) => {
24
25
  // 如果开启了上下文缓存,且当前模型类型匹配,则不开启历史记录
25
26
  const enableContextCaching = !chatConfig.disableContextCaching;
26
27
 
27
- if (enableContextCaching && contextCachingModels.has(config.model)) return false;
28
+ if (enableContextCaching && isContextCachingModel(config.model)) return false;
28
29
 
29
30
  // 当开启搜索时,针对 claude 3.7 sonnet 模型不开启历史记录
30
31
  const enableSearch = isAgentEnableSearch(s);
31
32
 
32
- if (enableSearch && thinkingWithToolClaudeModels.has(config.model)) return false;
33
+ if (enableSearch && isThinkingWithToolClaudeModel(config.model)) return false;
33
34
 
34
35
  return chatConfig.enableHistoryCount;
35
36
  };
@@ -1,93 +0,0 @@
1
- export const systemToUserModels = new Set([
2
- 'o1-preview',
3
- 'o1-preview-2024-09-12',
4
- 'o1-mini',
5
- 'o1-mini-2024-09-12',
6
- ]);
7
-
8
- // TODO: temporary implementation, needs to be refactored into model card display configuration
9
- export const disableStreamModels = new Set([
10
- 'o1',
11
- 'o1-2024-12-17',
12
- 'o1-pro',
13
- 'o1-pro-2025-03-19',
14
- /*
15
- Official documentation shows no support, but actual testing shows Streaming is supported, temporarily commented out
16
- 'o3-pro',
17
- 'o3-pro-2025-06-10',
18
- */
19
- 'computer-use-preview',
20
- 'computer-use-preview-2025-03-11',
21
- ]);
22
-
23
- /**
24
- * models use Responses API only
25
- */
26
- export const responsesAPIModels = new Set([
27
- 'o1-pro',
28
- 'o1-pro-2025-03-19',
29
- 'o3-deep-research',
30
- 'o3-deep-research-2025-06-26',
31
- 'o3-pro',
32
- 'o3-pro-2025-06-10',
33
- 'o4-mini-deep-research',
34
- 'o4-mini-deep-research-2025-06-26',
35
- 'codex-mini-latest',
36
- 'computer-use-preview',
37
- 'computer-use-preview-2025-03-11',
38
- 'gpt-5-codex',
39
- 'gpt-5-pro',
40
- 'gpt-5-pro-2025-10-06',
41
- 'gpt-5.1-codex',
42
- 'gpt-5.1-codex-mini',
43
- ]);
44
-
45
- /**
46
- * models support context caching
47
- */
48
- export const contextCachingModels = new Set([
49
- 'claude-opus-4-5-20251101',
50
- 'claude-haiku-4-5-20251001',
51
- 'claude-sonnet-4-5-latest',
52
- 'claude-sonnet-4-5-20250929',
53
- 'anthropic/claude-sonnet-4.5',
54
- 'claude-opus-4-latest',
55
- 'claude-opus-4-20250514',
56
- 'claude-sonnet-4-latest',
57
- 'claude-sonnet-4-20250514',
58
- 'claude-3-7-sonnet-latest',
59
- 'claude-3-7-sonnet-20250219',
60
- 'claude-3-5-sonnet-latest',
61
- 'claude-3-5-sonnet-20241022',
62
- 'claude-3-5-sonnet-20240620',
63
- 'claude-3-5-haiku-latest',
64
- 'claude-3-5-haiku-20241022',
65
- // Bedrock model IDs
66
- 'us.anthropic.claude-sonnet-4-5-20250929-v1:0',
67
- 'anthropic.claude-sonnet-4-5-20250929-v1:0',
68
- 'us.anthropic.claude-haiku-4-5-20251001-v1:0',
69
- 'anthropic.claude-haiku-4-5-20251001-v1:0',
70
- 'global.anthropic.claude-opus-4-5-20251101-v1:0',
71
- 'anthropic.claude-opus-4-5-20251101-v1:0',
72
- ]);
73
-
74
- export const thinkingWithToolClaudeModels = new Set([
75
- 'claude-opus-4-5-20251101',
76
- 'claude-opus-4-latest',
77
- 'claude-opus-4-20250514',
78
- 'claude-sonnet-4-latest',
79
- 'claude-sonnet-4-20250514',
80
- 'claude-sonnet-4-5-latest',
81
- 'claude-sonnet-4-5-20250929',
82
- 'claude-haiku-4-5-20251001',
83
- 'anthropic/claude-sonnet-4.5',
84
- 'claude-3-7-sonnet-latest',
85
- 'claude-3-7-sonnet-20250219',
86
- // Bedrock model IDs
87
- 'us.anthropic.claude-sonnet-4-5-20250929-v1:0',
88
- 'anthropic.claude-sonnet-4-5-20250929-v1:0',
89
- 'us.anthropic.claude-haiku-4-5-20251001-v1:0',
90
- 'anthropic.claude-haiku-4-5-20251001-v1:0',
91
- 'global.anthropic.claude-opus-4-5-20251101-v1:0',
92
- 'anthropic.claude-opus-4-5-20251101-v1:0',
93
- ]);