@matthesketh/utopia-ai 0.0.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/LICENSE ADDED
@@ -0,0 +1,21 @@
1
+ MIT License
2
+
3
+ Copyright (c) 2026 Matt Hesketh
4
+
5
+ Permission is hereby granted, free of charge, to any person obtaining a copy
6
+ of this software and associated documentation files (the "Software"), to deal
7
+ in the Software without restriction, including without limitation the rights
8
+ to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9
+ copies of the Software, and to permit persons to whom the Software is
10
+ furnished to do so, subject to the following conditions:
11
+
12
+ The above copyright notice and this permission notice shall be included in all
13
+ copies or substantial portions of the Software.
14
+
15
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16
+ IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17
+ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18
+ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19
+ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20
+ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21
+ SOFTWARE.
package/README.md ADDED
@@ -0,0 +1,67 @@
1
+ # @matthesketh/utopia-ai
2
+
3
+ AI adapters and MCP support for UtopiaJS. Unified interface for chat completions, streaming, embeddings, and agentic tool loops. Built-in support for OpenAI, Anthropic, Google Gemini, and Ollama. Includes a full MCP (Model Context Protocol) server and client implementation.
4
+
5
+ ## Install
6
+
7
+ ```bash
8
+ pnpm add @matthesketh/utopia-ai
9
+ ```
10
+
11
+ Install a provider SDK as needed:
12
+
13
+ ```bash
14
+ pnpm add openai # for OpenAI
15
+ pnpm add @anthropic-ai/sdk # for Anthropic
16
+ pnpm add @google/generative-ai # for Google Gemini
17
+ # Ollama requires no extra dependency
18
+ ```
19
+
20
+ ## Usage
21
+
22
+ ```ts
23
+ import { createAI } from '@matthesketh/utopia-ai';
24
+ import { openaiAdapter } from '@matthesketh/utopia-ai/openai';
25
+
26
+ const ai = createAI(openaiAdapter({
27
+ apiKey: process.env.OPENAI_API_KEY!,
28
+ }));
29
+
30
+ // Chat
31
+ const res = await ai.chat({
32
+ messages: [{ role: 'user', content: 'Hello!' }],
33
+ });
34
+
35
+ // Streaming
36
+ for await (const chunk of ai.stream({ messages })) {
37
+ process.stdout.write(chunk.delta);
38
+ }
39
+
40
+ // Agentic tool loop
41
+ const result = await ai.run({
42
+ messages: [{ role: 'user', content: 'What is the weather?' }],
43
+ tools: [{
44
+ definition: { name: 'get_weather', description: '...', parameters: { type: 'object', properties: {} } },
45
+ handler: async ({ city }) => ({ temp: 72 }),
46
+ }],
47
+ });
48
+ ```
49
+
50
+ ## API
51
+
52
+ | Export | Description |
53
+ |--------|-------------|
54
+ | `createAI(adapter, options?)` | Create an AI instance with hooks and retry |
55
+ | `streamSSE(res, stream)` | Stream chat chunks as Server-Sent Events |
56
+ | `parseSSEStream(response)` | Parse SSE events from a fetch Response (browser) |
57
+ | `collectStream(stream)` | Collect a stream into a single string |
58
+
59
+ **Adapters:** `@matthesketh/utopia-ai/openai`, `@matthesketh/utopia-ai/anthropic`, `@matthesketh/utopia-ai/google`, `@matthesketh/utopia-ai/ollama`.
60
+
61
+ **MCP (`@matthesketh/utopia-ai/mcp`):** `createMCPServer`, `createMCPClient`, `createMCPHandler`.
62
+
63
+ See [docs/ai.md](../../docs/ai.md) for full documentation on adapters, streaming, tool calling, MCP, and type reference.
64
+
65
+ ## License
66
+
67
+ MIT
@@ -0,0 +1,253 @@
1
+ "use strict";
2
+ var __create = Object.create;
3
+ var __defProp = Object.defineProperty;
4
+ var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
5
+ var __getOwnPropNames = Object.getOwnPropertyNames;
6
+ var __getProtoOf = Object.getPrototypeOf;
7
+ var __hasOwnProp = Object.prototype.hasOwnProperty;
8
+ var __export = (target, all) => {
9
+ for (var name in all)
10
+ __defProp(target, name, { get: all[name], enumerable: true });
11
+ };
12
+ var __copyProps = (to, from, except, desc) => {
13
+ if (from && typeof from === "object" || typeof from === "function") {
14
+ for (let key of __getOwnPropNames(from))
15
+ if (!__hasOwnProp.call(to, key) && key !== except)
16
+ __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
17
+ }
18
+ return to;
19
+ };
20
+ var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps(
21
+ // If the importer is in node compatibility mode or this is not an ESM
22
+ // file that has been converted to a CommonJS file using a Babel-
23
+ // compatible transform (i.e. "__esModule" has not been set), then set
24
+ // "default" to the CommonJS "module.exports" for node compatibility.
25
+ isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", { value: mod, enumerable: true }) : target,
26
+ mod
27
+ ));
28
+ var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
29
+
30
+ // src/adapters/anthropic.ts
31
+ var anthropic_exports = {};
32
+ __export(anthropic_exports, {
33
+ anthropicAdapter: () => anthropicAdapter
34
+ });
35
+ module.exports = __toCommonJS(anthropic_exports);
36
+ function anthropicAdapter(config) {
37
+ let client = null;
38
+ async function getClient() {
39
+ if (client) return client;
40
+ let Anthropic;
41
+ try {
42
+ const mod = await import("@anthropic-ai/sdk");
43
+ Anthropic = mod.Anthropic ?? mod.default;
44
+ } catch {
45
+ throw new Error(
46
+ '@matthesketh/utopia-ai: "@anthropic-ai/sdk" package is required for the Anthropic adapter. Install it with: npm install @anthropic-ai/sdk'
47
+ );
48
+ }
49
+ client = new Anthropic({
50
+ apiKey: config.apiKey,
51
+ ...config.baseURL ? { baseURL: config.baseURL } : {}
52
+ });
53
+ return client;
54
+ }
55
+ return {
56
+ async chat(request) {
57
+ const anthropic = await getClient();
58
+ const model = request.model ?? config.defaultModel ?? "claude-sonnet-4-5-20250929";
59
+ const { system, messages } = toAnthropicMessages(request.messages);
60
+ const body = {
61
+ model,
62
+ messages,
63
+ max_tokens: request.maxTokens ?? 4096,
64
+ ...request.extra
65
+ };
66
+ if (system) body.system = system;
67
+ if (request.temperature !== void 0) body.temperature = request.temperature;
68
+ if (request.topP !== void 0) body.top_p = request.topP;
69
+ if (request.stop) body.stop_sequences = request.stop;
70
+ if (request.tools?.length) {
71
+ body.tools = request.tools.map(toAnthropicTool);
72
+ if (request.toolChoice) {
73
+ body.tool_choice = toAnthropicToolChoice(request.toolChoice);
74
+ }
75
+ }
76
+ const response = await anthropic.messages.create(body);
77
+ const textContent = response.content.filter((b) => b.type === "text").map((b) => b.text).join("");
78
+ const toolCalls = response.content.filter((b) => b.type === "tool_use").map((b) => ({
79
+ id: b.id,
80
+ name: b.name,
81
+ arguments: b.input ?? {}
82
+ }));
83
+ return {
84
+ content: textContent,
85
+ toolCalls: toolCalls.length > 0 ? toolCalls : void 0,
86
+ finishReason: mapStopReason(response.stop_reason),
87
+ usage: response.usage ? {
88
+ promptTokens: response.usage.input_tokens,
89
+ completionTokens: response.usage.output_tokens,
90
+ totalTokens: response.usage.input_tokens + response.usage.output_tokens
91
+ } : void 0,
92
+ raw: response
93
+ };
94
+ },
95
+ async *stream(request) {
96
+ const anthropic = await getClient();
97
+ const model = request.model ?? config.defaultModel ?? "claude-sonnet-4-5-20250929";
98
+ const { system, messages } = toAnthropicMessages(request.messages);
99
+ const body = {
100
+ model,
101
+ messages,
102
+ max_tokens: request.maxTokens ?? 4096,
103
+ stream: true,
104
+ ...request.extra
105
+ };
106
+ if (system) body.system = system;
107
+ if (request.temperature !== void 0) body.temperature = request.temperature;
108
+ if (request.topP !== void 0) body.top_p = request.topP;
109
+ if (request.stop) body.stop_sequences = request.stop;
110
+ if (request.tools?.length) {
111
+ body.tools = request.tools.map(toAnthropicTool);
112
+ if (request.toolChoice) {
113
+ body.tool_choice = toAnthropicToolChoice(request.toolChoice);
114
+ }
115
+ }
116
+ const stream = anthropic.messages.stream(body);
117
+ let promptTokens = 0;
118
+ for await (const event of stream) {
119
+ if (event.type === "message_start" && event.message?.usage) {
120
+ promptTokens = event.message.usage.input_tokens ?? 0;
121
+ } else if (event.type === "content_block_delta") {
122
+ if (event.delta.type === "text_delta") {
123
+ yield { delta: event.delta.text };
124
+ } else if (event.delta.type === "input_json_delta") {
125
+ yield {
126
+ delta: "",
127
+ toolCallDelta: {
128
+ arguments: tryParseJSON(event.delta.partial_json)
129
+ }
130
+ };
131
+ }
132
+ } else if (event.type === "content_block_start") {
133
+ if (event.content_block.type === "tool_use") {
134
+ yield {
135
+ delta: "",
136
+ toolCallDelta: {
137
+ id: event.content_block.id,
138
+ name: event.content_block.name,
139
+ index: event.index
140
+ }
141
+ };
142
+ }
143
+ } else if (event.type === "message_delta") {
144
+ const outputTokens = event.usage?.output_tokens ?? 0;
145
+ yield {
146
+ delta: "",
147
+ finishReason: mapStopReason(event.delta.stop_reason),
148
+ usage: event.usage ? {
149
+ promptTokens,
150
+ completionTokens: outputTokens,
151
+ totalTokens: promptTokens + outputTokens
152
+ } : void 0
153
+ };
154
+ }
155
+ }
156
+ }
157
+ };
158
+ }
159
+ function toAnthropicMessages(messages) {
160
+ let system;
161
+ const out = [];
162
+ for (const msg of messages) {
163
+ if (msg.role === "system") {
164
+ system = typeof msg.content === "string" ? msg.content : Array.isArray(msg.content) ? msg.content.map((c) => typeof c === "string" ? c : "text" in c ? c.text : "").join("") : "text" in msg.content ? msg.content.text : "";
165
+ continue;
166
+ }
167
+ if (typeof msg.content === "string") {
168
+ out.push({ role: msg.role === "tool" ? "user" : msg.role, content: msg.content });
169
+ continue;
170
+ }
171
+ if (Array.isArray(msg.content)) {
172
+ const blocks = [];
173
+ for (const part2 of msg.content) {
174
+ if (typeof part2 === "string") {
175
+ blocks.push({ type: "text", text: part2 });
176
+ } else if (part2.type === "text") {
177
+ blocks.push({ type: "text", text: part2.text });
178
+ } else if (part2.type === "image") {
179
+ blocks.push({
180
+ type: "image",
181
+ source: {
182
+ type: part2.source.startsWith("data:") || part2.source.startsWith("http") ? "url" : "base64",
183
+ ...part2.source.startsWith("data:") || part2.source.startsWith("http") ? { url: part2.source } : { media_type: part2.mediaType ?? "image/png", data: part2.source }
184
+ }
185
+ });
186
+ } else if (part2.type === "tool_call") {
187
+ blocks.push({
188
+ type: "tool_use",
189
+ id: part2.id,
190
+ name: part2.name,
191
+ input: part2.arguments
192
+ });
193
+ } else if (part2.type === "tool_result") {
194
+ blocks.push({
195
+ type: "tool_result",
196
+ tool_use_id: part2.id,
197
+ content: part2.content,
198
+ is_error: part2.isError
199
+ });
200
+ }
201
+ }
202
+ const role = msg.role === "tool" ? "user" : msg.role;
203
+ out.push({ role, content: blocks });
204
+ continue;
205
+ }
206
+ const part = msg.content;
207
+ if (part.type === "text") {
208
+ out.push({ role: msg.role, content: part.text });
209
+ }
210
+ }
211
+ return { system, messages: out };
212
+ }
213
+ function toAnthropicTool(tool) {
214
+ return {
215
+ name: tool.name,
216
+ description: tool.description,
217
+ input_schema: tool.parameters
218
+ };
219
+ }
220
+ function toAnthropicToolChoice(choice) {
221
+ if (choice === "auto") return { type: "auto" };
222
+ if (choice === "none") return { type: "none" };
223
+ if (choice === "required") return { type: "any" };
224
+ if (choice && typeof choice === "object" && "name" in choice) {
225
+ return { type: "tool", name: choice.name };
226
+ }
227
+ return { type: "auto" };
228
+ }
229
+ function mapStopReason(reason) {
230
+ switch (reason) {
231
+ case "end_turn":
232
+ return "stop";
233
+ case "stop_sequence":
234
+ return "stop";
235
+ case "tool_use":
236
+ return "tool_calls";
237
+ case "max_tokens":
238
+ return "length";
239
+ default:
240
+ return "stop";
241
+ }
242
+ }
243
+ function tryParseJSON(str) {
244
+ try {
245
+ return JSON.parse(str);
246
+ } catch {
247
+ return void 0;
248
+ }
249
+ }
250
+ // Annotate the CommonJS export names for ESM import in node:
251
+ 0 && (module.exports = {
252
+ anthropicAdapter
253
+ });
@@ -0,0 +1,17 @@
1
+ import { b as AnthropicConfig, A as AIAdapter } from '../types-FSnS43LM.cjs';
2
+
3
+ /**
4
+ * Create an Anthropic adapter.
5
+ *
6
+ * Requires `@anthropic-ai/sdk` as a peer dependency.
7
+ *
8
+ * ```ts
9
+ * import { createAI } from '@matthesketh/utopia-ai';
10
+ * import { anthropicAdapter } from '@matthesketh/utopia-ai/anthropic';
11
+ *
12
+ * const ai = createAI(anthropicAdapter({ apiKey: process.env.ANTHROPIC_API_KEY }));
13
+ * ```
14
+ */
15
+ declare function anthropicAdapter(config: AnthropicConfig): AIAdapter;
16
+
17
+ export { anthropicAdapter };
@@ -0,0 +1,17 @@
1
+ import { b as AnthropicConfig, A as AIAdapter } from '../types-FSnS43LM.js';
2
+
3
+ /**
4
+ * Create an Anthropic adapter.
5
+ *
6
+ * Requires `@anthropic-ai/sdk` as a peer dependency.
7
+ *
8
+ * ```ts
9
+ * import { createAI } from '@matthesketh/utopia-ai';
10
+ * import { anthropicAdapter } from '@matthesketh/utopia-ai/anthropic';
11
+ *
12
+ * const ai = createAI(anthropicAdapter({ apiKey: process.env.ANTHROPIC_API_KEY }));
13
+ * ```
14
+ */
15
+ declare function anthropicAdapter(config: AnthropicConfig): AIAdapter;
16
+
17
+ export { anthropicAdapter };
@@ -0,0 +1,218 @@
1
+ // src/adapters/anthropic.ts
2
+ function anthropicAdapter(config) {
3
+ let client = null;
4
+ async function getClient() {
5
+ if (client) return client;
6
+ let Anthropic;
7
+ try {
8
+ const mod = await import("@anthropic-ai/sdk");
9
+ Anthropic = mod.Anthropic ?? mod.default;
10
+ } catch {
11
+ throw new Error(
12
+ '@matthesketh/utopia-ai: "@anthropic-ai/sdk" package is required for the Anthropic adapter. Install it with: npm install @anthropic-ai/sdk'
13
+ );
14
+ }
15
+ client = new Anthropic({
16
+ apiKey: config.apiKey,
17
+ ...config.baseURL ? { baseURL: config.baseURL } : {}
18
+ });
19
+ return client;
20
+ }
21
+ return {
22
+ async chat(request) {
23
+ const anthropic = await getClient();
24
+ const model = request.model ?? config.defaultModel ?? "claude-sonnet-4-5-20250929";
25
+ const { system, messages } = toAnthropicMessages(request.messages);
26
+ const body = {
27
+ model,
28
+ messages,
29
+ max_tokens: request.maxTokens ?? 4096,
30
+ ...request.extra
31
+ };
32
+ if (system) body.system = system;
33
+ if (request.temperature !== void 0) body.temperature = request.temperature;
34
+ if (request.topP !== void 0) body.top_p = request.topP;
35
+ if (request.stop) body.stop_sequences = request.stop;
36
+ if (request.tools?.length) {
37
+ body.tools = request.tools.map(toAnthropicTool);
38
+ if (request.toolChoice) {
39
+ body.tool_choice = toAnthropicToolChoice(request.toolChoice);
40
+ }
41
+ }
42
+ const response = await anthropic.messages.create(body);
43
+ const textContent = response.content.filter((b) => b.type === "text").map((b) => b.text).join("");
44
+ const toolCalls = response.content.filter((b) => b.type === "tool_use").map((b) => ({
45
+ id: b.id,
46
+ name: b.name,
47
+ arguments: b.input ?? {}
48
+ }));
49
+ return {
50
+ content: textContent,
51
+ toolCalls: toolCalls.length > 0 ? toolCalls : void 0,
52
+ finishReason: mapStopReason(response.stop_reason),
53
+ usage: response.usage ? {
54
+ promptTokens: response.usage.input_tokens,
55
+ completionTokens: response.usage.output_tokens,
56
+ totalTokens: response.usage.input_tokens + response.usage.output_tokens
57
+ } : void 0,
58
+ raw: response
59
+ };
60
+ },
61
+ async *stream(request) {
62
+ const anthropic = await getClient();
63
+ const model = request.model ?? config.defaultModel ?? "claude-sonnet-4-5-20250929";
64
+ const { system, messages } = toAnthropicMessages(request.messages);
65
+ const body = {
66
+ model,
67
+ messages,
68
+ max_tokens: request.maxTokens ?? 4096,
69
+ stream: true,
70
+ ...request.extra
71
+ };
72
+ if (system) body.system = system;
73
+ if (request.temperature !== void 0) body.temperature = request.temperature;
74
+ if (request.topP !== void 0) body.top_p = request.topP;
75
+ if (request.stop) body.stop_sequences = request.stop;
76
+ if (request.tools?.length) {
77
+ body.tools = request.tools.map(toAnthropicTool);
78
+ if (request.toolChoice) {
79
+ body.tool_choice = toAnthropicToolChoice(request.toolChoice);
80
+ }
81
+ }
82
+ const stream = anthropic.messages.stream(body);
83
+ let promptTokens = 0;
84
+ for await (const event of stream) {
85
+ if (event.type === "message_start" && event.message?.usage) {
86
+ promptTokens = event.message.usage.input_tokens ?? 0;
87
+ } else if (event.type === "content_block_delta") {
88
+ if (event.delta.type === "text_delta") {
89
+ yield { delta: event.delta.text };
90
+ } else if (event.delta.type === "input_json_delta") {
91
+ yield {
92
+ delta: "",
93
+ toolCallDelta: {
94
+ arguments: tryParseJSON(event.delta.partial_json)
95
+ }
96
+ };
97
+ }
98
+ } else if (event.type === "content_block_start") {
99
+ if (event.content_block.type === "tool_use") {
100
+ yield {
101
+ delta: "",
102
+ toolCallDelta: {
103
+ id: event.content_block.id,
104
+ name: event.content_block.name,
105
+ index: event.index
106
+ }
107
+ };
108
+ }
109
+ } else if (event.type === "message_delta") {
110
+ const outputTokens = event.usage?.output_tokens ?? 0;
111
+ yield {
112
+ delta: "",
113
+ finishReason: mapStopReason(event.delta.stop_reason),
114
+ usage: event.usage ? {
115
+ promptTokens,
116
+ completionTokens: outputTokens,
117
+ totalTokens: promptTokens + outputTokens
118
+ } : void 0
119
+ };
120
+ }
121
+ }
122
+ }
123
+ };
124
+ }
125
+ function toAnthropicMessages(messages) {
126
+ let system;
127
+ const out = [];
128
+ for (const msg of messages) {
129
+ if (msg.role === "system") {
130
+ system = typeof msg.content === "string" ? msg.content : Array.isArray(msg.content) ? msg.content.map((c) => typeof c === "string" ? c : "text" in c ? c.text : "").join("") : "text" in msg.content ? msg.content.text : "";
131
+ continue;
132
+ }
133
+ if (typeof msg.content === "string") {
134
+ out.push({ role: msg.role === "tool" ? "user" : msg.role, content: msg.content });
135
+ continue;
136
+ }
137
+ if (Array.isArray(msg.content)) {
138
+ const blocks = [];
139
+ for (const part2 of msg.content) {
140
+ if (typeof part2 === "string") {
141
+ blocks.push({ type: "text", text: part2 });
142
+ } else if (part2.type === "text") {
143
+ blocks.push({ type: "text", text: part2.text });
144
+ } else if (part2.type === "image") {
145
+ blocks.push({
146
+ type: "image",
147
+ source: {
148
+ type: part2.source.startsWith("data:") || part2.source.startsWith("http") ? "url" : "base64",
149
+ ...part2.source.startsWith("data:") || part2.source.startsWith("http") ? { url: part2.source } : { media_type: part2.mediaType ?? "image/png", data: part2.source }
150
+ }
151
+ });
152
+ } else if (part2.type === "tool_call") {
153
+ blocks.push({
154
+ type: "tool_use",
155
+ id: part2.id,
156
+ name: part2.name,
157
+ input: part2.arguments
158
+ });
159
+ } else if (part2.type === "tool_result") {
160
+ blocks.push({
161
+ type: "tool_result",
162
+ tool_use_id: part2.id,
163
+ content: part2.content,
164
+ is_error: part2.isError
165
+ });
166
+ }
167
+ }
168
+ const role = msg.role === "tool" ? "user" : msg.role;
169
+ out.push({ role, content: blocks });
170
+ continue;
171
+ }
172
+ const part = msg.content;
173
+ if (part.type === "text") {
174
+ out.push({ role: msg.role, content: part.text });
175
+ }
176
+ }
177
+ return { system, messages: out };
178
+ }
179
+ function toAnthropicTool(tool) {
180
+ return {
181
+ name: tool.name,
182
+ description: tool.description,
183
+ input_schema: tool.parameters
184
+ };
185
+ }
186
+ function toAnthropicToolChoice(choice) {
187
+ if (choice === "auto") return { type: "auto" };
188
+ if (choice === "none") return { type: "none" };
189
+ if (choice === "required") return { type: "any" };
190
+ if (choice && typeof choice === "object" && "name" in choice) {
191
+ return { type: "tool", name: choice.name };
192
+ }
193
+ return { type: "auto" };
194
+ }
195
+ function mapStopReason(reason) {
196
+ switch (reason) {
197
+ case "end_turn":
198
+ return "stop";
199
+ case "stop_sequence":
200
+ return "stop";
201
+ case "tool_use":
202
+ return "tool_calls";
203
+ case "max_tokens":
204
+ return "length";
205
+ default:
206
+ return "stop";
207
+ }
208
+ }
209
+ function tryParseJSON(str) {
210
+ try {
211
+ return JSON.parse(str);
212
+ } catch {
213
+ return void 0;
214
+ }
215
+ }
216
+ export {
217
+ anthropicAdapter
218
+ };