@smithery/sdk 0.0.18 → 0.0.20
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +41 -33
- package/dist/config.d.ts +1 -1
- package/dist/config.js +8 -1
- package/dist/multi-client.d.ts +12 -14
- package/dist/server/builder.d.ts +19 -15
- package/package.json +2 -2
package/README.md
CHANGED
|
@@ -22,68 +22,75 @@ npm install @smithery/sdk
|
|
|
22
22
|
|
|
23
23
|
## Usage
|
|
24
24
|
|
|
25
|
-
In this example, we'll connect
|
|
25
|
+
In this example, we'll connect to Exa search capabilities using either OpenAI or Anthropic.
|
|
26
26
|
|
|
27
27
|
```bash
|
|
28
|
-
npm install @smithery/
|
|
28
|
+
npm install @smithery/sdk @modelcontextprotocol/sdk
|
|
29
29
|
```
|
|
30
30
|
|
|
31
|
-
The following code sets up
|
|
31
|
+
The following code sets up the client and connects to an Exa MCP server:
|
|
32
32
|
|
|
33
33
|
```typescript
|
|
34
34
|
import { MultiClient } from "@smithery/sdk"
|
|
35
35
|
import { OpenAIChatAdapter } from "@smithery/sdk/integrations/llm/openai"
|
|
36
|
-
import
|
|
36
|
+
import { AnthropicChatAdapter } from "@smithery/sdk/integrations/llm/anthropic"
|
|
37
|
+
import { SSEClientTransport } from "@modelcontextprotocol/sdk/client/sse.js"
|
|
37
38
|
import { OpenAI } from "openai"
|
|
38
|
-
import
|
|
39
|
+
import Anthropic from "@anthropic-ai/sdk"
|
|
40
|
+
import EventSource from "eventsource"
|
|
39
41
|
|
|
40
|
-
|
|
41
|
-
|
|
42
|
-
apiKey: process.env.EXA_API_KEY,
|
|
43
|
-
})
|
|
42
|
+
// Patch event source for Node.js environment
|
|
43
|
+
global.EventSource = EventSource as any
|
|
44
44
|
|
|
45
|
-
|
|
46
|
-
|
|
45
|
+
// Create a new connection
|
|
46
|
+
const exaTransport = new SSEClientTransport(
|
|
47
|
+
// Replace with your deployed MCP server URL
|
|
48
|
+
new URL("https://your-mcp-server.example.com/sse")
|
|
47
49
|
)
|
|
50
|
+
|
|
51
|
+
// Initialize a multi-client connection
|
|
48
52
|
const client = new MultiClient()
|
|
49
53
|
await client.connectAll({
|
|
50
|
-
exa:
|
|
51
|
-
|
|
54
|
+
exa: exaTransport,
|
|
55
|
+
// You can add more connections here...
|
|
56
|
+
})
|
|
57
|
+
|
|
58
|
+
// Configure and authenticate
|
|
59
|
+
await client.clients.exa.request({
|
|
60
|
+
method: "config",
|
|
61
|
+
params: {
|
|
62
|
+
config: {
|
|
63
|
+
apiKey: process.env.EXA_API_KEY,
|
|
64
|
+
},
|
|
65
|
+
},
|
|
52
66
|
})
|
|
53
67
|
```
|
|
54
68
|
|
|
55
|
-
Now you can
|
|
69
|
+
Now you can use either OpenAI or Anthropic to interact with the tools:
|
|
56
70
|
|
|
57
71
|
```typescript
|
|
58
|
-
//
|
|
59
|
-
const
|
|
60
|
-
const
|
|
72
|
+
// Using OpenAI
|
|
73
|
+
const openai = new OpenAI()
|
|
74
|
+
const openaiAdapter = new OpenAIChatAdapter(client)
|
|
75
|
+
const openaiResponse = await openai.chat.completions.create({
|
|
61
76
|
model: "gpt-4o-mini",
|
|
62
|
-
messages: [{ role: "user", content: "
|
|
63
|
-
|
|
64
|
-
tools: await adapter.listTools(),
|
|
77
|
+
messages: [{ role: "user", content: "What AI events are happening in Singapore?" }],
|
|
78
|
+
tools: await openaiAdapter.listTools(),
|
|
65
79
|
})
|
|
66
|
-
|
|
67
|
-
const toolMessages = await adapter.callTool(response)
|
|
80
|
+
const openaiToolMessages = await openaiAdapter.callTool(openaiResponse)
|
|
68
81
|
```
|
|
69
82
|
|
|
70
|
-
|
|
71
|
-
|
|
72
|
-
However, it's often the case where your LLM needs to call a tool, see its response, and continue processing output of the tool in order to give you a final response.
|
|
73
|
-
|
|
74
|
-
In this case, you have to loop your LLM call and update your messages until there are no more toolMessages to continue.
|
|
75
|
-
|
|
76
|
-
Example:
|
|
83
|
+
For more complex interactions where the LLM needs to process tool outputs and potentially make additional calls, you'll need to implement a conversation loop. Here's an example:
|
|
77
84
|
|
|
78
85
|
```typescript
|
|
79
86
|
let messages = [
|
|
80
87
|
{
|
|
81
88
|
role: "user",
|
|
82
|
-
content:
|
|
83
|
-
"Deduce Obama's age in number of days. It's November 28, 2024 today. Search to ensure correctness.",
|
|
89
|
+
content: "What are some AI events happening in Singapore and how many days until the next one?",
|
|
84
90
|
},
|
|
85
91
|
]
|
|
86
92
|
const adapter = new OpenAIChatAdapter(client)
|
|
93
|
+
let isDone = false
|
|
87
94
|
|
|
88
95
|
while (!isDone) {
|
|
89
96
|
const response = await openai.chat.completions.create({
|
|
@@ -91,6 +98,7 @@ while (!isDone) {
|
|
|
91
98
|
messages,
|
|
92
99
|
tools: await adapter.listTools(),
|
|
93
100
|
})
|
|
101
|
+
|
|
94
102
|
// Handle tool calls
|
|
95
103
|
const toolMessages = await adapter.callTool(response)
|
|
96
104
|
|
|
@@ -109,14 +117,14 @@ See a full example in the [examples](./src/examples) directory.
|
|
|
109
117
|
Error: ReferenceError: EventSource is not defined
|
|
110
118
|
```
|
|
111
119
|
|
|
112
|
-
This
|
|
120
|
+
This error means you're trying to use EventSource API (which is typically used in the browser) from Node. Install the following packages:
|
|
113
121
|
|
|
114
122
|
```bash
|
|
115
123
|
npm install eventsource
|
|
116
124
|
npm install -D @types/eventsource
|
|
117
125
|
```
|
|
118
126
|
|
|
119
|
-
|
|
127
|
+
Then patch the global EventSource object:
|
|
120
128
|
|
|
121
129
|
```typescript
|
|
122
130
|
import EventSource from "eventsource"
|
package/dist/config.d.ts
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
export declare
|
|
1
|
+
export declare function createSmitheryUrl(baseUrl: string, config: object): URL;
|
package/dist/config.js
CHANGED
|
@@ -1 +1,8 @@
|
|
|
1
|
-
export
|
|
1
|
+
export function createSmitheryUrl(baseUrl, config) {
|
|
2
|
+
const url = new URL(baseUrl);
|
|
3
|
+
const param = typeof window !== "undefined"
|
|
4
|
+
? btoa(JSON.stringify(config))
|
|
5
|
+
: Buffer.from(JSON.stringify(config)).toString("base64");
|
|
6
|
+
url.searchParams.set("config", param);
|
|
7
|
+
return url;
|
|
8
|
+
}
|
package/dist/multi-client.d.ts
CHANGED
|
@@ -37,20 +37,18 @@ export declare class MultiClient implements Pick<Client, "callTool" | "listTools
|
|
|
37
37
|
* @returns A promise that resolves to an array of tools.
|
|
38
38
|
*/
|
|
39
39
|
listTools(params?: ListToolsRequest["params"], options?: RequestOptions): Promise<{
|
|
40
|
-
tools:
|
|
41
|
-
|
|
42
|
-
|
|
43
|
-
inputSchema:
|
|
44
|
-
|
|
45
|
-
|
|
46
|
-
|
|
47
|
-
|
|
48
|
-
|
|
49
|
-
}
|
|
50
|
-
|
|
51
|
-
|
|
52
|
-
}, import("zod").ZodTypeAny, "passthrough">>;
|
|
53
|
-
}, import("zod").ZodTypeAny, "passthrough">[];
|
|
40
|
+
tools: {
|
|
41
|
+
[x: string]: unknown;
|
|
42
|
+
name: string;
|
|
43
|
+
inputSchema: {
|
|
44
|
+
[x: string]: unknown;
|
|
45
|
+
type: "object";
|
|
46
|
+
properties?: {
|
|
47
|
+
[x: string]: unknown;
|
|
48
|
+
} | undefined;
|
|
49
|
+
};
|
|
50
|
+
description?: string | undefined;
|
|
51
|
+
}[];
|
|
54
52
|
}>;
|
|
55
53
|
callTool(params: CallToolRequest["params"], resultSchema?: typeof CallToolResultSchema | typeof CompatibilityCallToolResultSchema, options?: RequestOptions): Promise<import("zod").objectOutputType<import("zod").objectUtil.extendShape<{
|
|
56
54
|
_meta: import("zod").ZodOptional<import("zod").ZodObject<{}, "passthrough", import("zod").ZodTypeAny, import("zod").objectOutputType<{}, import("zod").ZodTypeAny, "passthrough">, import("zod").objectInputType<{}, import("zod").ZodTypeAny, "passthrough">>>;
|
package/dist/server/builder.d.ts
CHANGED
|
@@ -15,21 +15,25 @@ export declare class ServerBuilder {
|
|
|
15
15
|
addPrompt<const Args extends PromptArgument[]>(prompt: Prompt<Args>): this;
|
|
16
16
|
build(): Server<{
|
|
17
17
|
method: string;
|
|
18
|
-
params?:
|
|
19
|
-
|
|
20
|
-
|
|
21
|
-
|
|
22
|
-
progressToken
|
|
23
|
-
}
|
|
24
|
-
|
|
25
|
-
}, import("zod").ZodTypeAny, "passthrough">>>;
|
|
26
|
-
}, import("zod").ZodTypeAny, "passthrough"> | undefined;
|
|
18
|
+
params?: {
|
|
19
|
+
[x: string]: unknown;
|
|
20
|
+
_meta?: {
|
|
21
|
+
[x: string]: unknown;
|
|
22
|
+
progressToken?: string | number | undefined;
|
|
23
|
+
} | undefined;
|
|
24
|
+
} | undefined;
|
|
27
25
|
}, {
|
|
28
26
|
method: string;
|
|
29
|
-
params?:
|
|
30
|
-
|
|
31
|
-
|
|
32
|
-
|
|
33
|
-
|
|
34
|
-
|
|
27
|
+
params?: {
|
|
28
|
+
[x: string]: unknown;
|
|
29
|
+
_meta?: {
|
|
30
|
+
[x: string]: unknown;
|
|
31
|
+
} | undefined;
|
|
32
|
+
} | undefined;
|
|
33
|
+
}, {
|
|
34
|
+
[x: string]: unknown;
|
|
35
|
+
_meta?: {
|
|
36
|
+
[x: string]: unknown;
|
|
37
|
+
} | undefined;
|
|
38
|
+
}>;
|
|
35
39
|
}
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@smithery/sdk",
|
|
3
|
-
"version": "0.0.
|
|
3
|
+
"version": "0.0.20",
|
|
4
4
|
"description": "Connect language models to Model Context Protocols",
|
|
5
5
|
"type": "module",
|
|
6
6
|
"main": "./dist/index.js",
|
|
@@ -23,7 +23,7 @@
|
|
|
23
23
|
"dependencies": {
|
|
24
24
|
"@anthropic-ai/sdk": "^0.32.1",
|
|
25
25
|
"@icons-pack/react-simple-icons": "^10.2.0",
|
|
26
|
-
"@modelcontextprotocol/sdk": "^1.
|
|
26
|
+
"@modelcontextprotocol/sdk": "^1.1.1",
|
|
27
27
|
"openai": "^4.0.0",
|
|
28
28
|
"uuid": "^11.0.3"
|
|
29
29
|
},
|