@agentxjs/mono-driver 2.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md ADDED
@@ -0,0 +1,244 @@
1
+ # @agentxjs/mono-driver
2
+
3
+ Unified cross-platform LLM driver powered by Vercel AI SDK. One `Driver` interface across multiple providers -- Anthropic, OpenAI, Google, xAI, DeepSeek, Mistral, and any OpenAI-compatible API.
4
+
5
+ ## Overview
6
+
7
+ `@agentxjs/mono-driver` is the recommended default driver for AgentX. It uses direct HTTP API calls via Vercel AI SDK, making it cross-platform (Node.js, Bun, Cloudflare Workers, Edge Runtime) with no subprocess required. This is the driver that `createAgentX` uses automatically in local mode.
8
+
9
+ For the difference with `@agentxjs/claude-driver`: use mono-driver for multi-provider support and cross-platform deployment. Use claude-driver only when you need Claude Code SDK-specific features (subprocess-based execution, built-in permission management).
10
+
11
+ ## Quick Start
12
+
13
+ ```typescript
14
+ import { createMonoDriver } from "@agentxjs/mono-driver";
15
+
16
+ const driver = createMonoDriver({
17
+ apiKey: process.env.ANTHROPIC_API_KEY!,
18
+ agentId: "my-agent",
19
+ systemPrompt: "You are a helpful assistant.",
20
+ options: { provider: "anthropic" },
21
+ });
22
+
23
+ await driver.initialize();
24
+
25
+ for await (const event of driver.receive({ content: "Hello!" })) {
26
+ if (event.type === "text_delta") {
27
+ process.stdout.write(event.data.text);
28
+ }
29
+ }
30
+
31
+ await driver.dispose();
32
+ ```
33
+
34
+ ## API Reference
35
+
36
+ ### `createMonoDriver(config: MonoDriverConfig): Driver`
37
+
38
+ Factory function. Returns a `Driver` conforming to `@agentxjs/core/driver`.
39
+
40
+ ### MonoDriver
41
+
42
+ ```typescript
43
+ class MonoDriver implements Driver {
44
+ readonly name: string; // "MonoDriver"
45
+ readonly sessionId: string | null; // available after initialize()
46
+ readonly state: DriverState; // "idle" | "active" | "disposed"
47
+
48
+ initialize(): Promise<void>; // connects MCP servers, generates session ID
49
+ receive(message: UserMessage): AsyncIterable<DriverStreamEvent>;
50
+ interrupt(): void; // aborts current request
51
+ dispose(): Promise<void>; // closes MCP clients, cleanup
52
+ }
53
+ ```
54
+
55
+ ### Converter Utilities (advanced)
56
+
57
+ ```typescript
58
+ import { toVercelMessage, toVercelMessages, toStopReason } from "@agentxjs/mono-driver";
59
+ ```
60
+
61
+ ### Re-exported
62
+
63
+ ```typescript
64
+ import { stepCountIs } from "@agentxjs/mono-driver"; // from Vercel AI SDK
65
+ ```
66
+
67
+ ## Configuration
68
+
69
+ ### MonoDriverConfig
70
+
71
+ `MonoDriverConfig` = `DriverConfig<MonoDriverOptions>` from `@agentxjs/core/driver`.
72
+
73
+ ```typescript
74
+ const config: MonoDriverConfig = {
75
+ // Base DriverConfig fields
76
+ apiKey: "sk-ant-xxxxx", // required
77
+ agentId: "my-agent", // required
78
+ model: "claude-sonnet-4-20250514", // optional, uses provider default
79
+ baseUrl: "https://custom.api", // optional
80
+ systemPrompt: "You are ...", // optional
81
+ cwd: "/path/to/workdir", // optional
82
+ mcpServers: { ... }, // optional
83
+ tools: [myTool], // optional
84
+ session: mySession, // optional, for history
85
+ timeout: 600000, // optional, default: 10 min
86
+
87
+ // MonoDriver-specific
88
+ options: {
89
+ provider: "anthropic", // default: "anthropic"
90
+ maxSteps: 10, // default: 10
91
+ compatibleConfig: { ... }, // required when provider = "openai-compatible"
92
+ },
93
+ };
94
+ ```
95
+
96
+ ### MonoDriverOptions
97
+
98
+ | Field | Type | Default | Description |
99
+ | ------------------ | ------------------------ | ------------- | -------------------------------------------- |
100
+ | `provider` | `MonoProvider` | `"anthropic"` | LLM provider |
101
+ | `maxSteps` | `number` | `10` | Max agentic tool-calling steps per receive() |
102
+ | `compatibleConfig` | `OpenAICompatibleConfig` | -- | Required for `"openai-compatible"` provider |
103
+
104
+ ### OpenAICompatibleConfig
105
+
106
+ ```typescript
107
+ interface OpenAICompatibleConfig {
108
+ name: string; // provider name (for logging)
109
+ baseURL: string; // API base URL
110
+ apiKey?: string; // overrides top-level apiKey
111
+ }
112
+ ```
113
+
114
+ ### Supported Providers
115
+
116
+ | Provider | Key | Default Model |
117
+ | ----------------- | --------------------- | -------------------------- |
118
+ | Anthropic | `"anthropic"` | `claude-sonnet-4-20250514` |
119
+ | OpenAI | `"openai"` | `gpt-4o` |
120
+ | Google | `"google"` | `gemini-2.0-flash` |
121
+ | xAI | `"xai"` | `grok-3` |
122
+ | DeepSeek | `"deepseek"` | `deepseek-chat` |
123
+ | Mistral | `"mistral"` | `mistral-large-latest` |
124
+ | OpenAI-Compatible | `"openai-compatible"` | `default` |
125
+
126
+ ## Provider Examples
127
+
128
+ ### Anthropic
129
+
130
+ ```typescript
131
+ createMonoDriver({
132
+ apiKey: "sk-ant-xxxxx",
133
+ agentId: "assistant",
134
+ options: { provider: "anthropic" },
135
+ });
136
+ ```
137
+
138
+ ### OpenAI
139
+
140
+ ```typescript
141
+ createMonoDriver({
142
+ apiKey: "sk-xxxxx",
143
+ agentId: "assistant",
144
+ model: "gpt-4o",
145
+ options: { provider: "openai" },
146
+ });
147
+ ```
148
+
149
+ ### DeepSeek
150
+
151
+ ```typescript
152
+ createMonoDriver({
153
+ apiKey: "sk-xxxxx",
154
+ agentId: "assistant",
155
+ model: "deepseek-chat",
156
+ options: { provider: "deepseek" },
157
+ });
158
+ ```
159
+
160
+ ### Ollama (OpenAI-Compatible)
161
+
162
+ ```typescript
163
+ createMonoDriver({
164
+ apiKey: "ollama", // Ollama doesn't require a real key
165
+ agentId: "assistant",
166
+ model: "llama3",
167
+ options: {
168
+ provider: "openai-compatible",
169
+ compatibleConfig: {
170
+ name: "ollama",
171
+ baseURL: "http://localhost:11434/v1",
172
+ },
173
+ },
174
+ });
175
+ ```
176
+
177
+ ### LM Studio (OpenAI-Compatible)
178
+
179
+ ```typescript
180
+ createMonoDriver({
181
+ apiKey: "lm-studio",
182
+ agentId: "assistant",
183
+ model: "local-model",
184
+ options: {
185
+ provider: "openai-compatible",
186
+ compatibleConfig: {
187
+ name: "lm-studio",
188
+ baseURL: "http://localhost:1234/v1",
189
+ },
190
+ },
191
+ });
192
+ ```
193
+
194
+ ### Kimi / Moonshot AI (OpenAI-Compatible)
195
+
196
+ ```typescript
197
+ createMonoDriver({
198
+ apiKey: "sk-xxxxx",
199
+ agentId: "assistant",
200
+ model: "moonshot-v1-8k",
201
+ options: {
202
+ provider: "openai-compatible",
203
+ compatibleConfig: {
204
+ name: "kimi",
205
+ baseURL: "https://api.moonshot.cn/v1",
206
+ apiKey: "sk-moonshot-xxxxx", // overrides top-level apiKey
207
+ },
208
+ },
209
+ });
210
+ ```
211
+
212
+ ## MCP Server Configuration
213
+
214
+ MonoDriver connects to MCP servers during `initialize()` and discovers tools automatically.
215
+
216
+ ```typescript
217
+ createMonoDriver({
218
+ apiKey: "sk-ant-xxxxx",
219
+ agentId: "my-agent",
220
+ mcpServers: {
221
+ // Stdio -- local subprocess
222
+ filesystem: {
223
+ command: "npx",
224
+ args: ["-y", "@modelcontextprotocol/server-filesystem", "/tmp"],
225
+ },
226
+ // HTTP Streamable -- remote server
227
+ remote: {
228
+ type: "http",
229
+ url: "https://mcp.example.com/mcp",
230
+ headers: { Authorization: "Bearer token" },
231
+ },
232
+ },
233
+ options: { provider: "anthropic" },
234
+ });
235
+ ```
236
+
237
+ MCP tools are merged with `tools` from config. Config tools take precedence on name conflicts.
238
+
239
+ ## Important Notes
240
+
241
+ - **API key is always passed via config**, never read from environment variables.
242
+ - **`baseUrl` auto-appends `/v1`** if missing (Vercel AI SDK requirement).
243
+ - **Stateless**: reads history from `config.session` on each `receive()`. Does not maintain internal history.
244
+ - **Cross-platform**: runs on Node.js, Bun, Workers, Edge -- no subprocess needed.
@@ -0,0 +1,170 @@
1
+ import { DriverConfig, Driver, DriverState, DriverStreamEvent, StopReason } from '@agentxjs/core/driver';
2
+ import { UserMessage, Message } from '@agentxjs/core/agent';
3
+ import { ModelMessage } from 'ai';
4
+ export { stepCountIs } from 'ai';
5
+
6
+ /**
7
+ * MonoDriver Types
8
+ *
9
+ * MonoDriver = Unified cross-platform Driver
10
+ * - One interface, multiple LLM providers
11
+ * - Powered by Vercel AI SDK
12
+ */
13
+
14
+ /**
15
+ * Built-in LLM providers
16
+ */
17
+ type MonoBuiltinProvider = "anthropic" | "openai" | "google" | "xai" | "deepseek" | "mistral";
18
+ /**
19
+ * Supported LLM providers
20
+ *
21
+ * Built-in providers + "openai-compatible" for any OpenAI-compatible API
22
+ * (Kimi, GLM, 豆包, Ollama, LM Studio, etc.)
23
+ */
24
+ type MonoProvider = MonoBuiltinProvider | "openai-compatible";
25
+ /**
26
+ * OpenAI-compatible provider configuration
27
+ *
28
+ * For providers that expose an OpenAI-compatible API:
29
+ * - Kimi (Moonshot AI): baseURL = "https://api.moonshot.cn/v1"
30
+ * - GLM (Zhipu AI): baseURL = "https://open.bigmodel.cn/api/paas/v4"
31
+ * - 豆包 (Volcengine): baseURL = "https://ark.cn-beijing.volces.com/api/v3"
32
+ * - Ollama: baseURL = "http://localhost:11434/v1"
33
+ * - LM Studio: baseURL = "http://localhost:1234/v1"
34
+ */
35
+ interface OpenAICompatibleConfig {
36
+ /**
37
+ * Provider name (for logging and identification)
38
+ */
39
+ name: string;
40
+ /**
41
+ * Base URL of the OpenAI-compatible API
42
+ */
43
+ baseURL: string;
44
+ /**
45
+ * API key
46
+ */
47
+ apiKey?: string;
48
+ }
49
+ /**
50
+ * MonoDriver-specific options
51
+ */
52
+ interface MonoDriverOptions {
53
+ /**
54
+ * LLM Provider
55
+ * @default 'anthropic'
56
+ */
57
+ provider?: MonoProvider;
58
+ /**
59
+ * Max agentic steps for tool calling
60
+ * @default 10
61
+ */
62
+ maxSteps?: number;
63
+ /**
64
+ * Configuration for openai-compatible provider
65
+ *
66
+ * Required when provider is "openai-compatible"
67
+ */
68
+ compatibleConfig?: OpenAICompatibleConfig;
69
+ }
70
+ /**
71
+ * MonoDriverConfig - DriverConfig with MonoDriverOptions
72
+ */
73
+ type MonoDriverConfig = DriverConfig<MonoDriverOptions>;
74
+
75
+ /**
76
+ * MonoDriver - Unified Cross-Platform Driver
77
+ *
78
+ * Implements the Driver interface using Vercel AI SDK.
79
+ * Supports multiple LLM providers (Anthropic, OpenAI, Google).
80
+ *
81
+ * ```
82
+ * UserMessage
83
+ * │
84
+ * ▼
85
+ * ┌─────────────────┐
86
+ * │ MonoDriver │
87
+ * │ │
88
+ * │ receive() │──► AsyncIterable<DriverStreamEvent>
89
+ * │ │ │
90
+ * │ ▼ │
91
+ * │ Vercel AI SDK │
92
+ * └─────────────────┘
93
+ * │
94
+ * ▼
95
+ * LLM Provider
96
+ * (Anthropic/OpenAI/...)
97
+ * ```
98
+ */
99
+
100
+ /**
101
+ * MonoDriver - Driver implementation using Vercel AI SDK
102
+ */
103
+ declare class MonoDriver implements Driver {
104
+ readonly name = "MonoDriver";
105
+ private _sessionId;
106
+ private _state;
107
+ private abortController;
108
+ private readonly config;
109
+ private readonly session?;
110
+ private readonly provider;
111
+ private readonly maxSteps;
112
+ private readonly compatibleConfig?;
113
+ /** MCP clients created during initialize() */
114
+ private mcpClients;
115
+ /** Tools discovered from MCP servers */
116
+ private mcpTools;
117
+ constructor(config: MonoDriverConfig);
118
+ get sessionId(): string | null;
119
+ get state(): DriverState;
120
+ initialize(): Promise<void>;
121
+ dispose(): Promise<void>;
122
+ receive(message: UserMessage): AsyncIterable<DriverStreamEvent>;
123
+ interrupt(): void;
124
+ /**
125
+ * Merge MCP tools and config tools into a single ToolSet.
126
+ * Config tools (bash etc.) take precedence over MCP tools with the same name.
127
+ */
128
+ private mergeTools;
129
+ private getModel;
130
+ /**
131
+ * Get the base URL for the provider SDK.
132
+ *
133
+ * Provider SDKs expect baseURL to include the version path (e.g. /v1).
134
+ * DriverConfig.baseUrl is the API root without version path.
135
+ * This method bridges the gap.
136
+ */
137
+ private getBaseURL;
138
+ private getDefaultModel;
139
+ }
140
+ /**
141
+ * Create a MonoDriver instance
142
+ */
143
+ declare function createMonoDriver(config: MonoDriverConfig): Driver;
144
+
145
+ /**
146
+ * Message and Event Converters
147
+ *
148
+ * Converts between AgentX types and Vercel AI SDK v6 types
149
+ */
150
+
151
+ /**
152
+ * Convert AgentX Message to Vercel ModelMessage
153
+ */
154
+ declare function toVercelMessage(message: Message): ModelMessage | null;
155
+ /**
156
+ * Convert array of AgentX Messages to Vercel ModelMessages
157
+ */
158
+ declare function toVercelMessages(messages: Message[]): ModelMessage[];
159
+ /**
160
+ * Map Vercel AI SDK v6 finish reason to AgentX StopReason
161
+ */
162
+ declare function toStopReason(finishReason: string | null | undefined): StopReason;
163
+ /**
164
+ * Create a DriverStreamEvent with timestamp
165
+ */
166
+ declare function createEvent<T extends DriverStreamEvent["type"]>(type: T, data: Extract<DriverStreamEvent, {
167
+ type: T;
168
+ }>["data"]): DriverStreamEvent;
169
+
170
+ export { type MonoBuiltinProvider, MonoDriver, type MonoDriverConfig, type MonoDriverOptions, type MonoProvider, type OpenAICompatibleConfig, createEvent, createMonoDriver, toStopReason, toVercelMessage, toVercelMessages };