@opentiny/next-sdk 0.1.12 → 0.1.14

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,13 +1,17 @@
1
1
  import { streamText, stepCountIs, generateText, StreamTextResult } from 'ai'
2
- import { experimental_createMCPClient as createMCPClient, experimental_MCPClientConfig as MCPClientConfig } from 'ai'
2
+ import { experimental_MCPClientConfig as MCPClientConfig } from 'ai'
3
3
  import type { ToolSet } from 'ai'
4
4
  import { StreamableHTTPClientTransport } from '@modelcontextprotocol/sdk/client/streamableHttp.js'
5
+ import { InMemoryTransport } from '@modelcontextprotocol/sdk/inMemory.js'
5
6
  import type { IAgentModelProviderOption, McpServerConfig } from './type'
6
7
  import { ProviderV2 } from '@ai-sdk/provider'
7
8
  import { OpenAIProvider } from '@ai-sdk/openai'
8
9
  import { createOpenAI } from '@ai-sdk/openai'
9
10
  import { createDeepSeek } from '@ai-sdk/deepseek'
10
11
  import { ExtensionClientTransport } from '../transport/ExtensionClientTransport'
12
+ import { MessageChannelTransport } from '@opentiny/next'
13
+ import { WebMcpClient } from '../WebMcpClient'
14
+ import { getAISDKTools } from './utils/getAISDKTools'
11
15
 
12
16
  export const AIProviderFactories = {
13
17
  ['openai']: createOpenAI,
@@ -40,27 +44,31 @@ export class AgentModelProvider {
40
44
  /** 缓存 ai-sdk response 中的 多轮会话的上下文 */
41
45
  messages: any[] = []
42
46
 
43
- constructor({ llmConfig, mcpServers, llm }: IAgentModelProviderOption) {
47
+ constructor({ llmConfig, mcpServers }: IAgentModelProviderOption) {
48
+ if (!llmConfig) {
49
+ throw new Error('llmConfig is required to initialize AgentModelProvider')
50
+ }
44
51
  this.mcpServers = mcpServers || {}
45
52
  this.mcpClients = {}
46
53
  this.mcpTools = {}
47
54
 
48
- if (llm) {
49
- this.llm = llm
50
- } else if (llmConfig) {
55
+ if (llmConfig.llm) {
56
+ this.llm = llmConfig.llm
57
+ } else if (llmConfig.providerType) {
58
+ const providerType = llmConfig.providerType
51
59
  let providerFn: (options: any) => ProviderV2 | OpenAIProvider
52
60
 
53
- if (typeof llmConfig.providerType === 'string') {
54
- providerFn = AIProviderFactories[llmConfig.providerType]
61
+ if (typeof providerType === 'string') {
62
+ providerFn = AIProviderFactories[providerType]
55
63
  } else {
56
- providerFn = llmConfig.providerType
64
+ providerFn = providerType
57
65
  }
58
66
  this.llm = providerFn({
59
67
  apiKey: llmConfig.apiKey,
60
68
  baseURL: llmConfig.baseURL
61
69
  })
62
70
  } else {
63
- throw new Error('Either llmConfig or llm must be provided')
71
+ throw new Error('Either llmConfig.llm or llmConfig.providerType must be provided')
64
72
  }
65
73
  }
66
74
 
@@ -77,7 +85,12 @@ export class AgentModelProvider {
77
85
  transport = serverConfig as MCPClientConfig['transport']
78
86
  }
79
87
 
80
- const client = await createMCPClient({ transport: transport as MCPClientConfig['transport'] })
88
+ const client = new WebMcpClient(
89
+ { name: 'mcp-web-client', version: '1.0.0' },
90
+ { capabilities: { roots: { listChanged: true }, sampling: {}, elicitation: {} } }
91
+ )
92
+ await client.connect(transport)
93
+
81
94
  //@ts-ignore
82
95
  client['__transport__'] = transport
83
96
 
@@ -93,8 +106,19 @@ export class AgentModelProvider {
93
106
  /** 关闭一个 mcpClient */
94
107
  private async _closeOneClient(client: any) {
95
108
  try {
96
- await client['__transport__']?.terminateSession?.()
97
- await client['__transport__']?.close?.()
109
+ const transport = client['__transport__']
110
+
111
+ // 如果是 InMemoryTransport,不关闭传输层 因为它是配对的,关闭一端会影响另一端(服务端)
112
+ if (
113
+ (transport && transport instanceof InMemoryTransport) ||
114
+ (transport && transport instanceof MessageChannelTransport)
115
+ ) {
116
+ return
117
+ }
118
+
119
+ // 其他类型的传输正常关闭
120
+ await transport?.terminateSession?.()
121
+ await transport?.close?.()
98
122
  await client?.close?.()
99
123
  } catch (error) {}
100
124
  }
@@ -120,7 +144,7 @@ export class AgentModelProvider {
120
144
  const tools = await Promise.all(
121
145
  clientEntries.map(async ([serverName, client]) => {
122
146
  try {
123
- const result = client ? await client?.tools?.() : null
147
+ const result = client ? await getAISDKTools(client) : null
124
148
  return { serverName, tools: result }
125
149
  } catch (error: unknown) {
126
150
  if (this.onError) {
@@ -134,7 +158,8 @@ export class AgentModelProvider {
134
158
  // 将结果存储到对象中,使用 serverName 作为键
135
159
  this.mcpTools = {}
136
160
  tools.forEach(({ serverName, tools: toolsData }) => {
137
- this.mcpTools[serverName] = toolsData
161
+ const normalizedTools = toolsData && typeof toolsData === 'object' ? (toolsData as Record<string, any>) : {}
162
+ this.mcpTools[serverName] = normalizedTools
138
163
  })
139
164
  }
140
165
  /** 关闭所有的 clients */
@@ -175,12 +200,19 @@ export class AgentModelProvider {
175
200
  }
176
201
 
177
202
  const client = await this._createOneClient(mcpServer)
203
+ if (!client) {
204
+ // 创建客户端失败时直接返回,避免后续出现空指针问题
205
+ this.onError?.(`Failed to create MCP client: ${serverName}`)
206
+ return null
207
+ }
178
208
  this.mcpClients[serverName] = client
179
- this.mcpTools[serverName] = (await client?.tools?.()) as Record<string, any>
209
+ const tools = await getAISDKTools(client)
210
+ // 工具列表可能为 null,统一兜底为空对象,确保类型安全
211
+ this.mcpTools[serverName] = tools && typeof tools === 'object' ? (tools as Record<string, any>) : {}
180
212
  this.mcpServers[serverName] = mcpServer
181
213
  this.onUpdatedTools?.()
182
214
 
183
- return true
215
+ return client
184
216
  }
185
217
  /** 通过服务器名称删除mcpServer: mcpServers mcpClients mcpTools ignoreToolnames */
186
218
  async removeMcpServer(serverName: string) {
package/agent/type.ts CHANGED
@@ -2,17 +2,31 @@ export type { experimental_MCPClient as MCPClient } from 'ai'
2
2
  import type { ProviderV2 } from '@ai-sdk/provider'
3
3
  import type { MCPTransport } from 'ai'
4
4
 
5
- /** 代理模型提供器的大语言配置对象 */
6
- export interface IAgentModelProviderLlmConfig {
5
+ type ProviderFactory = 'openai' | 'deepseek' | ((options: any) => ProviderV2)
6
+
7
+ type LlmFactoryConfig = {
8
+ /** API密钥 */
7
9
  apiKey: string
10
+ /** API基础URL */
8
11
  baseURL: string
9
- /** 支持内置的常用模型,或者传入一个ai-sdk官方的Provider工厂函数
10
- * @example
11
- * import { createOpenAI } from '@ai-sdk/openai'
12
- */
13
- providerType: 'openai' | 'deepseek' | ((options: any) => ProviderV2)
12
+ /** 内置或自定义 Provider 工厂函数 */
13
+ providerType: ProviderFactory
14
+ /** 互斥:当使用 providerType 分支时不允许传入 llm */
15
+ llm?: never
16
+ }
17
+
18
+ type LlmInstanceConfig = {
19
+ /** 自定义 Provider 实例,优先级最高 */
20
+ llm: ProviderV2
21
+ /** 互斥:当传入 llm 实例时不需要 apiKey/baseURL/providerType */
22
+ apiKey?: never
23
+ baseURL?: never
24
+ providerType?: never
14
25
  }
15
26
 
27
+ /** 代理模型提供器的大语言配置对象, 通过 XOR 表达二选一 */
28
+ export type IAgentModelProviderLlmConfig = LlmFactoryConfig | LlmInstanceConfig
29
+
16
30
  /** Mcp Server的配置对象 */
17
31
  export type McpServerConfig =
18
32
  | { type: 'streamableHttp'; url: string }
@@ -22,13 +36,8 @@ export type McpServerConfig =
22
36
 
23
37
  /** */
24
38
  export interface IAgentModelProviderOption {
25
- /** ai-sdk官方的Provider实例,不能与 llmConfig 同时传入
26
- * @example
27
- * import { openai } from '@ai-sdk/openai'
28
- */
29
- llm?: ProviderV2
30
- /** 代理模型提供器的大语言配置对象, 不能与 llm 同时传入 */
31
- llmConfig?: IAgentModelProviderLlmConfig
39
+ /** 代理模型提供器的大语言配置对象 */
40
+ llmConfig: IAgentModelProviderLlmConfig
32
41
  /** Mcp Server的配置对象的集合,键为服务器名称,值为配置对象 */
33
42
  mcpServers?: Record<string, McpServerConfig>
34
43
  }