adk-llm-bridge 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +21 -0
- package/README.md +247 -0
- package/dist/AIGatewayLlm.d.ts +14 -0
- package/dist/AIGatewayLlm.d.ts.map +1 -0
- package/dist/ai-gateway-llm.d.ts +14 -0
- package/dist/ai-gateway-llm.d.ts.map +1 -0
- package/dist/ai-gateway.d.ts +6 -0
- package/dist/ai-gateway.d.ts.map +1 -0
- package/dist/auto-register.d.ts +2 -0
- package/dist/auto-register.d.ts.map +1 -0
- package/dist/auto-register.js +331 -0
- package/dist/config.d.ts +5 -0
- package/dist/config.d.ts.map +1 -0
- package/dist/constants.d.ts +11 -0
- package/dist/constants.d.ts.map +1 -0
- package/dist/converters/index.d.ts +3 -0
- package/dist/converters/index.d.ts.map +1 -0
- package/dist/converters/request.d.ts +8 -0
- package/dist/converters/request.d.ts.map +1 -0
- package/dist/converters/response.d.ts +7 -0
- package/dist/converters/response.d.ts.map +1 -0
- package/dist/index.d.ts +6 -0
- package/dist/index.d.ts.map +1 -0
- package/dist/index.js +337 -0
- package/dist/register.d.ts +6 -0
- package/dist/register.d.ts.map +1 -0
- package/dist/types.d.ts +26 -0
- package/dist/types.d.ts.map +1 -0
- package/package.json +65 -0
package/LICENSE
ADDED
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
MIT License
|
|
2
|
+
|
|
3
|
+
Copyright (c) 2025 PAI
|
|
4
|
+
|
|
5
|
+
Permission is hereby granted, free of charge, to any person obtaining a copy
|
|
6
|
+
of this software and associated documentation files (the "Software"), to deal
|
|
7
|
+
in the Software without restriction, including without limitation the rights
|
|
8
|
+
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
|
9
|
+
copies of the Software, and to permit persons to whom the Software is
|
|
10
|
+
furnished to do so, subject to the following conditions:
|
|
11
|
+
|
|
12
|
+
The above copyright notice and this permission notice shall be included in all
|
|
13
|
+
copies or substantial portions of the Software.
|
|
14
|
+
|
|
15
|
+
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
|
16
|
+
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
|
17
|
+
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
|
18
|
+
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
|
19
|
+
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
|
20
|
+
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
|
21
|
+
SOFTWARE.
|
package/README.md
ADDED
|
@@ -0,0 +1,247 @@
|
|
|
1
|
+
# adk-llm-bridge
|
|
2
|
+
|
|
3
|
+
[](https://www.npmjs.com/package/adk-llm-bridge)
|
|
4
|
+
[](https://opensource.org/licenses/MIT)
|
|
5
|
+
|
|
6
|
+
Connect [Google ADK](https://github.com/google/adk-typescript) to [Vercel AI Gateway](https://vercel.com/ai-gateway) and 100+ LLM models.
|
|
7
|
+
|
|
8
|
+
## Why?
|
|
9
|
+
|
|
10
|
+
Google ADK is great for building AI agents, but it only supports Gemini models natively. This package lets you use **any model** from Vercel AI Gateway (Claude, GPT-4, Llama, Mistral, etc.) while keeping all ADK features like multi-agent orchestration, tool calling, and streaming.
|
|
11
|
+
|
|
12
|
+
## Installation
|
|
13
|
+
|
|
14
|
+
```bash
|
|
15
|
+
bun add adk-llm-bridge @google/adk
|
|
16
|
+
```
|
|
17
|
+
|
|
18
|
+
```bash
|
|
19
|
+
pnpm add adk-llm-bridge @google/adk
|
|
20
|
+
```
|
|
21
|
+
|
|
22
|
+
```bash
|
|
23
|
+
npm install adk-llm-bridge @google/adk
|
|
24
|
+
```
|
|
25
|
+
|
|
26
|
+
## Quick Start
|
|
27
|
+
|
|
28
|
+
```typescript
|
|
29
|
+
import { LlmAgent } from '@google/adk';
|
|
30
|
+
import { AIGateway } from 'adk-llm-bridge';
|
|
31
|
+
|
|
32
|
+
const agent = new LlmAgent({
|
|
33
|
+
name: 'assistant',
|
|
34
|
+
model: AIGateway('anthropic/claude-sonnet-4'),
|
|
35
|
+
instruction: 'You are a helpful assistant.',
|
|
36
|
+
});
|
|
37
|
+
|
|
38
|
+
const response = await agent.run('Hello!');
|
|
39
|
+
```
|
|
40
|
+
|
|
41
|
+
## Configuration
|
|
42
|
+
|
|
43
|
+
### Environment Variables (Recommended)
|
|
44
|
+
|
|
45
|
+
```bash
|
|
46
|
+
AI_GATEWAY_API_KEY=your-api-key
|
|
47
|
+
AI_GATEWAY_URL=https://ai-gateway.vercel.sh/v1 # optional
|
|
48
|
+
```
|
|
49
|
+
|
|
50
|
+
### With Options
|
|
51
|
+
|
|
52
|
+
```typescript
|
|
53
|
+
import { AIGateway } from 'adk-llm-bridge';
|
|
54
|
+
|
|
55
|
+
const agent = new LlmAgent({
|
|
56
|
+
name: 'assistant',
|
|
57
|
+
model: AIGateway('openai/gpt-4o', {
|
|
58
|
+
apiKey: process.env.OPENAI_API_KEY,
|
|
59
|
+
timeout: 30000,
|
|
60
|
+
maxRetries: 3,
|
|
61
|
+
}),
|
|
62
|
+
instruction: 'You are helpful.',
|
|
63
|
+
});
|
|
64
|
+
```
|
|
65
|
+
|
|
66
|
+
### Multiple Agents with Different Configs
|
|
67
|
+
|
|
68
|
+
Each agent can have its own API key or configuration:
|
|
69
|
+
|
|
70
|
+
```typescript
|
|
71
|
+
import { LlmAgent } from '@google/adk';
|
|
72
|
+
import { AIGateway } from 'adk-llm-bridge';
|
|
73
|
+
|
|
74
|
+
// Agent with production API key
|
|
75
|
+
const prodAgent = new LlmAgent({
|
|
76
|
+
name: 'prod-assistant',
|
|
77
|
+
model: AIGateway('anthropic/claude-sonnet-4', {
|
|
78
|
+
apiKey: process.env.PROD_API_KEY
|
|
79
|
+
}),
|
|
80
|
+
instruction: 'You are a production assistant.',
|
|
81
|
+
});
|
|
82
|
+
|
|
83
|
+
// Agent with development API key
|
|
84
|
+
const devAgent = new LlmAgent({
|
|
85
|
+
name: 'dev-assistant',
|
|
86
|
+
model: AIGateway('openai/gpt-4o', {
|
|
87
|
+
apiKey: process.env.DEV_API_KEY
|
|
88
|
+
}),
|
|
89
|
+
instruction: 'You are a development assistant.',
|
|
90
|
+
});
|
|
91
|
+
```
|
|
92
|
+
|
|
93
|
+
### Using with adk-devtools
|
|
94
|
+
|
|
95
|
+
When using `adk-devtools` (CLI or web interface), you must register with `LLMRegistry` due to how the tool bundles dependencies:
|
|
96
|
+
|
|
97
|
+
```typescript
|
|
98
|
+
import { LlmAgent, LLMRegistry } from '@google/adk';
|
|
99
|
+
import { AIGatewayLlm } from 'adk-llm-bridge';
|
|
100
|
+
|
|
101
|
+
// Required for adk-devtools
|
|
102
|
+
LLMRegistry.register(AIGatewayLlm);
|
|
103
|
+
|
|
104
|
+
export const rootAgent = new LlmAgent({
|
|
105
|
+
name: 'assistant',
|
|
106
|
+
model: 'anthropic/claude-sonnet-4', // Use string, not AIGateway()
|
|
107
|
+
instruction: 'You are helpful.',
|
|
108
|
+
});
|
|
109
|
+
```
|
|
110
|
+
|
|
111
|
+
### Using String Model Names (Alternative)
|
|
112
|
+
|
|
113
|
+
If you prefer string-based model names, register once at startup:
|
|
114
|
+
|
|
115
|
+
```typescript
|
|
116
|
+
import { LlmAgent } from '@google/adk';
|
|
117
|
+
import { registerAIGateway } from 'adk-llm-bridge';
|
|
118
|
+
|
|
119
|
+
// Register once (uses AI_GATEWAY_API_KEY from env)
|
|
120
|
+
registerAIGateway();
|
|
121
|
+
|
|
122
|
+
const agent = new LlmAgent({
|
|
123
|
+
name: 'assistant',
|
|
124
|
+
model: 'anthropic/claude-sonnet-4', // String works after registration
|
|
125
|
+
instruction: 'You are helpful.',
|
|
126
|
+
});
|
|
127
|
+
```
|
|
128
|
+
|
|
129
|
+
## Model Format
|
|
130
|
+
|
|
131
|
+
Use the `provider/model` format supported by AI Gateway:
|
|
132
|
+
|
|
133
|
+
```typescript
|
|
134
|
+
AIGateway('anthropic/claude-sonnet-4')
|
|
135
|
+
AIGateway('openai/gpt-4o')
|
|
136
|
+
AIGateway('google/gemini-2.0-flash')
|
|
137
|
+
AIGateway('zai/glm-4.6')
|
|
138
|
+
AIGateway('xai/grok-2')
|
|
139
|
+
AIGateway('deepseek/deepseek-chat')
|
|
140
|
+
```
|
|
141
|
+
|
|
142
|
+
**Any model available in [Vercel AI Gateway](https://sdk.vercel.ai/docs/ai-sdk-core/ai-gateway#supported-models) will work** - no code changes needed when new providers are added.
|
|
143
|
+
|
|
144
|
+
### Popular Providers
|
|
145
|
+
|
|
146
|
+
| Provider | Examples |
|
|
147
|
+
|----------|----------|
|
|
148
|
+
| Anthropic | `anthropic/claude-sonnet-4`, `anthropic/claude-haiku` |
|
|
149
|
+
| OpenAI | `openai/gpt-4o`, `openai/gpt-4o-mini` |
|
|
150
|
+
| Google | `google/gemini-2.0-flash` |
|
|
151
|
+
| Meta | `meta/llama-3.1-70b-instruct` |
|
|
152
|
+
| Mistral | `mistral/mistral-large` |
|
|
153
|
+
| xAI | `xai/grok-2` |
|
|
154
|
+
| DeepSeek | `deepseek/deepseek-chat` |
|
|
155
|
+
| Zhipu AI | `zai/glm-4.6`, `zai/glm-4.5` |
|
|
156
|
+
| Groq | `groq/llama-3.1-70b` |
|
|
157
|
+
| Perplexity | `perplexity/sonar-pro` |
|
|
158
|
+
|
|
159
|
+
Browse all models at [Vercel AI Gateway Models](https://sdk.vercel.ai/docs/ai-sdk-core/ai-gateway#supported-models).
|
|
160
|
+
|
|
161
|
+
## Features
|
|
162
|
+
|
|
163
|
+
- **Text generation** - Simple prompt/response
|
|
164
|
+
- **Streaming** - Real-time token streaming
|
|
165
|
+
- **Tool calling** - Function calling with automatic conversion
|
|
166
|
+
- **Multi-turn** - Full conversation history support
|
|
167
|
+
- **Usage metadata** - Token counts for monitoring
|
|
168
|
+
|
|
169
|
+
## Tool Calling Example
|
|
170
|
+
|
|
171
|
+
```typescript
|
|
172
|
+
import { LlmAgent } from '@google/adk';
|
|
173
|
+
import { AIGateway } from 'adk-llm-bridge';
|
|
174
|
+
|
|
175
|
+
const getWeather = {
|
|
176
|
+
name: 'get_weather',
|
|
177
|
+
description: 'Get current weather for a city',
|
|
178
|
+
parameters: {
|
|
179
|
+
type: 'object',
|
|
180
|
+
properties: {
|
|
181
|
+
city: { type: 'string', description: 'City name' },
|
|
182
|
+
},
|
|
183
|
+
required: ['city'],
|
|
184
|
+
},
|
|
185
|
+
};
|
|
186
|
+
|
|
187
|
+
const agent = new LlmAgent({
|
|
188
|
+
name: 'weather-assistant',
|
|
189
|
+
model: AIGateway('anthropic/claude-sonnet-4'),
|
|
190
|
+
instruction: 'You help users check the weather.',
|
|
191
|
+
tools: [getWeather],
|
|
192
|
+
});
|
|
193
|
+
```
|
|
194
|
+
|
|
195
|
+
## API Reference
|
|
196
|
+
|
|
197
|
+
### `AIGateway(model, options?)`
|
|
198
|
+
|
|
199
|
+
Creates an LLM instance for use with ADK agents.
|
|
200
|
+
|
|
201
|
+
```typescript
|
|
202
|
+
AIGateway('anthropic/claude-sonnet-4')
|
|
203
|
+
AIGateway('openai/gpt-4o', { apiKey: 'sk-...' })
|
|
204
|
+
```
|
|
205
|
+
|
|
206
|
+
| Parameter | Type | Description |
|
|
207
|
+
|-----------|------|-------------|
|
|
208
|
+
| `model` | `string` | Model identifier (e.g., `anthropic/claude-sonnet-4`) |
|
|
209
|
+
| `options.apiKey` | `string` | API key (default: `process.env.AI_GATEWAY_API_KEY`) |
|
|
210
|
+
| `options.baseURL` | `string` | Gateway URL (default: `https://ai-gateway.vercel.sh/v1`) |
|
|
211
|
+
| `options.timeout` | `number` | Request timeout in ms (default: `60000`) |
|
|
212
|
+
| `options.maxRetries` | `number` | Max retry attempts (default: `2`) |
|
|
213
|
+
|
|
214
|
+
### `registerAIGateway(options?)`
|
|
215
|
+
|
|
216
|
+
Registers AI Gateway with ADK's LLM registry for string-based model names.
|
|
217
|
+
|
|
218
|
+
```typescript
|
|
219
|
+
registerAIGateway()
|
|
220
|
+
registerAIGateway({ apiKey: 'sk-...' })
|
|
221
|
+
```
|
|
222
|
+
|
|
223
|
+
### `AIGatewayLlm`
|
|
224
|
+
|
|
225
|
+
Direct LLM class for advanced usage (same options as `AIGateway`).
|
|
226
|
+
|
|
227
|
+
```typescript
|
|
228
|
+
import { AIGatewayLlm } from 'adk-llm-bridge';
|
|
229
|
+
|
|
230
|
+
const llm = new AIGatewayLlm({
|
|
231
|
+
model: 'anthropic/claude-sonnet-4',
|
|
232
|
+
apiKey: 'sk-...',
|
|
233
|
+
});
|
|
234
|
+
```
|
|
235
|
+
|
|
236
|
+
## Requirements
|
|
237
|
+
|
|
238
|
+
- Node.js >= 18.0.0
|
|
239
|
+
- `@google/adk` >= 0.2.0
|
|
240
|
+
|
|
241
|
+
## Contributing
|
|
242
|
+
|
|
243
|
+
See [CONTRIBUTING.md](CONTRIBUTING.md) for development setup and guidelines.
|
|
244
|
+
|
|
245
|
+
## License
|
|
246
|
+
|
|
247
|
+
[MIT](LICENSE)
|
|
@@ -0,0 +1,14 @@
|
|
|
1
|
+
import { BaseLlm } from "@google/adk";
|
|
2
|
+
import type { LlmRequest, LlmResponse, BaseLlmConnection } from "@google/adk";
|
|
3
|
+
import type { AIGatewayConfig } from "./types";
|
|
4
|
+
export declare class AIGatewayLlm extends BaseLlm {
|
|
5
|
+
private readonly client;
|
|
6
|
+
static readonly supportedModels: (string | RegExp)[];
|
|
7
|
+
constructor(config: AIGatewayConfig);
|
|
8
|
+
generateContentAsync(llmRequest: LlmRequest, stream?: boolean): AsyncGenerator<LlmResponse, void>;
|
|
9
|
+
connect(_: LlmRequest): Promise<BaseLlmConnection>;
|
|
10
|
+
private singleResponse;
|
|
11
|
+
private streamResponse;
|
|
12
|
+
private errorResponse;
|
|
13
|
+
}
|
|
14
|
+
//# sourceMappingURL=AIGatewayLlm.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"AIGatewayLlm.d.ts","sourceRoot":"","sources":["../src/AIGatewayLlm.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,OAAO,EAAE,MAAM,aAAa,CAAC;AACtC,OAAO,KAAK,EAAE,UAAU,EAAE,WAAW,EAAE,iBAAiB,EAAE,MAAM,aAAa,CAAC;AAe9E,OAAO,KAAK,EAAE,eAAe,EAAE,MAAM,SAAS,CAAC;AAE/C,qBAAa,YAAa,SAAQ,OAAO;IACvC,OAAO,CAAC,QAAQ,CAAC,MAAM,CAAS;IAChC,MAAM,CAAC,QAAQ,CAAC,eAAe,sBAAkB;gBAErC,MAAM,EAAE,eAAe;IAuB5B,oBAAoB,CACzB,UAAU,EAAE,UAAU,EACtB,MAAM,UAAQ,GACb,cAAc,CAAC,WAAW,EAAE,IAAI,CAAC;IAc9B,OAAO,CAAC,CAAC,EAAE,UAAU,GAAG,OAAO,CAAC,iBAAiB,CAAC;YAI1C,cAAc;YAYb,cAAc;IAoB7B,OAAO,CAAC,aAAa;CAQtB"}
|
|
@@ -0,0 +1,14 @@
|
|
|
1
|
+
import { BaseLlm } from "@google/adk";
|
|
2
|
+
import type { LlmRequest, LlmResponse, BaseLlmConnection } from "@google/adk";
|
|
3
|
+
import type { AIGatewayConfig } from "./types";
|
|
4
|
+
export declare class AIGatewayLlm extends BaseLlm {
|
|
5
|
+
private readonly client;
|
|
6
|
+
static readonly supportedModels: (string | RegExp)[];
|
|
7
|
+
constructor(config: AIGatewayConfig);
|
|
8
|
+
generateContentAsync(llmRequest: LlmRequest, stream?: boolean): AsyncGenerator<LlmResponse, void>;
|
|
9
|
+
connect(_: LlmRequest): Promise<BaseLlmConnection>;
|
|
10
|
+
private singleResponse;
|
|
11
|
+
private streamResponse;
|
|
12
|
+
private errorResponse;
|
|
13
|
+
}
|
|
14
|
+
//# sourceMappingURL=ai-gateway-llm.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"ai-gateway-llm.d.ts","sourceRoot":"","sources":["../src/ai-gateway-llm.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,OAAO,EAAE,MAAM,aAAa,CAAC;AACtC,OAAO,KAAK,EAAE,UAAU,EAAE,WAAW,EAAE,iBAAiB,EAAE,MAAM,aAAa,CAAC;AAgB9E,OAAO,KAAK,EAAE,eAAe,EAAE,MAAM,SAAS,CAAC;AAE/C,qBAAa,YAAa,SAAQ,OAAO;IACvC,OAAO,CAAC,QAAQ,CAAC,MAAM,CAAS;IAChC,MAAM,CAAC,QAAQ,CAAC,eAAe,sBAAkB;gBAErC,MAAM,EAAE,eAAe;IA2B5B,oBAAoB,CACzB,UAAU,EAAE,UAAU,EACtB,MAAM,UAAQ,GACb,cAAc,CAAC,WAAW,EAAE,IAAI,CAAC;IAc9B,OAAO,CAAC,CAAC,EAAE,UAAU,GAAG,OAAO,CAAC,iBAAiB,CAAC;YAI1C,cAAc;YAYb,cAAc;IAoB7B,OAAO,CAAC,aAAa;CAQtB"}
|
|
@@ -0,0 +1,6 @@
|
|
|
1
|
+
import { AIGatewayLlm } from "./ai-gateway-llm";
|
|
2
|
+
import type { AIGatewayConfig } from "./types";
|
|
3
|
+
type AIGatewayOptions = Omit<AIGatewayConfig, "model">;
|
|
4
|
+
export declare function AIGateway(model: string, options?: AIGatewayOptions): AIGatewayLlm;
|
|
5
|
+
export {};
|
|
6
|
+
//# sourceMappingURL=ai-gateway.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"ai-gateway.d.ts","sourceRoot":"","sources":["../src/ai-gateway.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,YAAY,EAAE,MAAM,kBAAkB,CAAC;AAChD,OAAO,KAAK,EAAE,eAAe,EAAE,MAAM,SAAS,CAAC;AAE/C,KAAK,gBAAgB,GAAG,IAAI,CAAC,eAAe,EAAE,OAAO,CAAC,CAAC;AAEvD,wBAAgB,SAAS,CACvB,KAAK,EAAE,MAAM,EACb,OAAO,CAAC,EAAE,gBAAgB,GACzB,YAAY,CAEd"}
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"auto-register.d.ts","sourceRoot":"","sources":["../src/auto-register.ts"],"names":[],"mappings":""}
|
|
@@ -0,0 +1,331 @@
|
|
|
1
|
+
// src/register.ts
|
|
2
|
+
import { LLMRegistry } from "@google/adk";
|
|
3
|
+
|
|
4
|
+
// src/ai-gateway-llm.ts
|
|
5
|
+
import { BaseLlm } from "@google/adk";
|
|
6
|
+
import OpenAI from "openai";
|
|
7
|
+
|
|
8
|
+
// src/converters/request.ts
|
|
9
|
+
function convertRequest(llmRequest) {
|
|
10
|
+
const messages = [];
|
|
11
|
+
const systemContent = extractSystemInstruction(llmRequest);
|
|
12
|
+
if (systemContent) {
|
|
13
|
+
messages.push({ role: "system", content: systemContent });
|
|
14
|
+
}
|
|
15
|
+
for (const content of llmRequest.contents ?? []) {
|
|
16
|
+
processContent(content, messages);
|
|
17
|
+
}
|
|
18
|
+
return { messages, tools: convertTools(llmRequest) };
|
|
19
|
+
}
|
|
20
|
+
function extractSystemInstruction(req) {
|
|
21
|
+
const sys = req.config?.systemInstruction;
|
|
22
|
+
if (!sys)
|
|
23
|
+
return null;
|
|
24
|
+
if (typeof sys === "string")
|
|
25
|
+
return sys;
|
|
26
|
+
if ("parts" in sys)
|
|
27
|
+
return extractText(sys.parts ?? []);
|
|
28
|
+
return null;
|
|
29
|
+
}
|
|
30
|
+
function extractText(parts) {
|
|
31
|
+
return parts.map((p) => p.text).filter(Boolean).join(`
|
|
32
|
+
`);
|
|
33
|
+
}
|
|
34
|
+
function processContent(content, messages) {
|
|
35
|
+
if (!content.parts?.length)
|
|
36
|
+
return;
|
|
37
|
+
const texts = [];
|
|
38
|
+
const calls = [];
|
|
39
|
+
const responses = [];
|
|
40
|
+
for (const part of content.parts) {
|
|
41
|
+
if (part.text)
|
|
42
|
+
texts.push(part.text);
|
|
43
|
+
if (part.functionCall) {
|
|
44
|
+
calls.push({
|
|
45
|
+
id: part.functionCall.id ?? `call_${Date.now()}`,
|
|
46
|
+
name: part.functionCall.name ?? "",
|
|
47
|
+
arguments: JSON.stringify(part.functionCall.args ?? {})
|
|
48
|
+
});
|
|
49
|
+
}
|
|
50
|
+
if (part.functionResponse) {
|
|
51
|
+
responses.push({
|
|
52
|
+
id: part.functionResponse.id ?? "",
|
|
53
|
+
content: JSON.stringify(part.functionResponse.response ?? {})
|
|
54
|
+
});
|
|
55
|
+
}
|
|
56
|
+
}
|
|
57
|
+
if (content.role === "user") {
|
|
58
|
+
if (texts.length)
|
|
59
|
+
messages.push({ role: "user", content: texts.join(`
|
|
60
|
+
`) });
|
|
61
|
+
for (const r of responses) {
|
|
62
|
+
messages.push({ role: "tool", tool_call_id: r.id, content: r.content });
|
|
63
|
+
}
|
|
64
|
+
} else if (content.role === "model") {
|
|
65
|
+
if (texts.length || calls.length) {
|
|
66
|
+
const msg = {
|
|
67
|
+
role: "assistant",
|
|
68
|
+
content: texts.length ? texts.join(`
|
|
69
|
+
`) : null
|
|
70
|
+
};
|
|
71
|
+
if (calls.length) {
|
|
72
|
+
msg.tool_calls = calls.map((c) => ({
|
|
73
|
+
id: c.id,
|
|
74
|
+
type: "function",
|
|
75
|
+
function: { name: c.name, arguments: c.arguments }
|
|
76
|
+
}));
|
|
77
|
+
}
|
|
78
|
+
messages.push(msg);
|
|
79
|
+
}
|
|
80
|
+
}
|
|
81
|
+
}
|
|
82
|
+
function convertTools(req) {
|
|
83
|
+
const adkTools = req.config?.tools;
|
|
84
|
+
if (!adkTools?.length)
|
|
85
|
+
return;
|
|
86
|
+
const tools = [];
|
|
87
|
+
for (const group of adkTools) {
|
|
88
|
+
if ("functionDeclarations" in group && Array.isArray(group.functionDeclarations)) {
|
|
89
|
+
for (const fn of group.functionDeclarations) {
|
|
90
|
+
tools.push({
|
|
91
|
+
type: "function",
|
|
92
|
+
function: {
|
|
93
|
+
name: fn.name ?? "",
|
|
94
|
+
description: fn.description ?? "",
|
|
95
|
+
parameters: normalizeSchema(fn.parameters) ?? {
|
|
96
|
+
type: "object",
|
|
97
|
+
properties: {}
|
|
98
|
+
}
|
|
99
|
+
}
|
|
100
|
+
});
|
|
101
|
+
}
|
|
102
|
+
}
|
|
103
|
+
}
|
|
104
|
+
return tools.length ? tools : undefined;
|
|
105
|
+
}
|
|
106
|
+
function normalizeSchema(schema) {
|
|
107
|
+
if (!schema || typeof schema !== "object")
|
|
108
|
+
return;
|
|
109
|
+
const result = {};
|
|
110
|
+
const input = schema;
|
|
111
|
+
for (const [key, value] of Object.entries(input)) {
|
|
112
|
+
if (key === "type" && typeof value === "string") {
|
|
113
|
+
result[key] = value.toLowerCase();
|
|
114
|
+
} else if (typeof value === "object" && value !== null && !Array.isArray(value)) {
|
|
115
|
+
result[key] = normalizeSchema(value);
|
|
116
|
+
} else if (Array.isArray(value)) {
|
|
117
|
+
result[key] = value;
|
|
118
|
+
} else {
|
|
119
|
+
result[key] = value;
|
|
120
|
+
}
|
|
121
|
+
}
|
|
122
|
+
return result;
|
|
123
|
+
}
|
|
124
|
+
|
|
125
|
+
// src/converters/response.ts
|
|
126
|
+
function convertResponse(response) {
|
|
127
|
+
const choice = response.choices[0];
|
|
128
|
+
if (!choice) {
|
|
129
|
+
return {
|
|
130
|
+
errorCode: "NO_CHOICE",
|
|
131
|
+
errorMessage: "No response choice",
|
|
132
|
+
turnComplete: true
|
|
133
|
+
};
|
|
134
|
+
}
|
|
135
|
+
const parts = [];
|
|
136
|
+
if (choice.message.content) {
|
|
137
|
+
parts.push({ text: choice.message.content });
|
|
138
|
+
}
|
|
139
|
+
for (const tc of choice.message.tool_calls ?? []) {
|
|
140
|
+
parts.push({
|
|
141
|
+
functionCall: {
|
|
142
|
+
id: tc.id,
|
|
143
|
+
name: tc.function.name,
|
|
144
|
+
args: safeJsonParse(tc.function.arguments)
|
|
145
|
+
}
|
|
146
|
+
});
|
|
147
|
+
}
|
|
148
|
+
return {
|
|
149
|
+
content: parts.length ? { role: "model", parts } : undefined,
|
|
150
|
+
turnComplete: true,
|
|
151
|
+
usageMetadata: response.usage ? {
|
|
152
|
+
promptTokenCount: response.usage.prompt_tokens,
|
|
153
|
+
candidatesTokenCount: response.usage.completion_tokens,
|
|
154
|
+
totalTokenCount: response.usage.total_tokens
|
|
155
|
+
} : undefined
|
|
156
|
+
};
|
|
157
|
+
}
|
|
158
|
+
function convertStreamChunk(chunk, acc) {
|
|
159
|
+
const choice = chunk.choices[0];
|
|
160
|
+
if (!choice)
|
|
161
|
+
return { isComplete: false };
|
|
162
|
+
const delta = choice.delta;
|
|
163
|
+
if (delta?.content) {
|
|
164
|
+
acc.text += delta.content;
|
|
165
|
+
return {
|
|
166
|
+
response: {
|
|
167
|
+
content: { role: "model", parts: [{ text: delta.content }] },
|
|
168
|
+
partial: true
|
|
169
|
+
},
|
|
170
|
+
isComplete: false
|
|
171
|
+
};
|
|
172
|
+
}
|
|
173
|
+
if (delta?.tool_calls) {
|
|
174
|
+
for (const tc of delta.tool_calls) {
|
|
175
|
+
const idx = tc.index ?? 0;
|
|
176
|
+
let a = acc.toolCalls.get(idx);
|
|
177
|
+
if (!a) {
|
|
178
|
+
a = { id: "", name: "", arguments: "" };
|
|
179
|
+
acc.toolCalls.set(idx, a);
|
|
180
|
+
}
|
|
181
|
+
if (tc.id)
|
|
182
|
+
a.id = tc.id;
|
|
183
|
+
if (tc.function?.name)
|
|
184
|
+
a.name += tc.function.name;
|
|
185
|
+
if (tc.function?.arguments)
|
|
186
|
+
a.arguments += tc.function.arguments;
|
|
187
|
+
}
|
|
188
|
+
}
|
|
189
|
+
if (choice.finish_reason) {
|
|
190
|
+
const parts = [];
|
|
191
|
+
if (acc.text)
|
|
192
|
+
parts.push({ text: acc.text });
|
|
193
|
+
for (const tc of Array.from(acc.toolCalls.values())) {
|
|
194
|
+
if (tc.name) {
|
|
195
|
+
parts.push({
|
|
196
|
+
functionCall: {
|
|
197
|
+
id: tc.id,
|
|
198
|
+
name: tc.name,
|
|
199
|
+
args: safeJsonParse(tc.arguments)
|
|
200
|
+
}
|
|
201
|
+
});
|
|
202
|
+
}
|
|
203
|
+
}
|
|
204
|
+
acc.text = "";
|
|
205
|
+
acc.toolCalls.clear();
|
|
206
|
+
return {
|
|
207
|
+
response: {
|
|
208
|
+
content: parts.length ? { role: "model", parts } : undefined,
|
|
209
|
+
turnComplete: true
|
|
210
|
+
},
|
|
211
|
+
isComplete: true
|
|
212
|
+
};
|
|
213
|
+
}
|
|
214
|
+
return { isComplete: false };
|
|
215
|
+
}
|
|
216
|
+
function createStreamAccumulator() {
|
|
217
|
+
return { text: "", toolCalls: new Map };
|
|
218
|
+
}
|
|
219
|
+
function safeJsonParse(str) {
|
|
220
|
+
try {
|
|
221
|
+
return JSON.parse(str);
|
|
222
|
+
} catch {
|
|
223
|
+
return {};
|
|
224
|
+
}
|
|
225
|
+
}
|
|
226
|
+
|
|
227
|
+
// src/config.ts
|
|
228
|
+
var config = {};
|
|
229
|
+
function setConfig(options) {
|
|
230
|
+
config = { ...options };
|
|
231
|
+
}
|
|
232
|
+
function getConfig() {
|
|
233
|
+
return config;
|
|
234
|
+
}
|
|
235
|
+
|
|
236
|
+
// src/constants.ts
|
|
237
|
+
var DEFAULT_BASE_URL = "https://ai-gateway.vercel.sh/v1";
|
|
238
|
+
var DEFAULT_TIMEOUT = 60000;
|
|
239
|
+
var DEFAULT_MAX_RETRIES = 2;
|
|
240
|
+
var MODEL_PATTERNS = [/^.+\/.+$/];
|
|
241
|
+
var ENV = {
|
|
242
|
+
AI_GATEWAY_URL: "AI_GATEWAY_URL",
|
|
243
|
+
AI_GATEWAY_API_KEY: "AI_GATEWAY_API_KEY",
|
|
244
|
+
OPENAI_BASE_URL: "OPENAI_BASE_URL",
|
|
245
|
+
OPENAI_API_KEY: "OPENAI_API_KEY"
|
|
246
|
+
};
|
|
247
|
+
|
|
248
|
+
// src/ai-gateway-llm.ts
|
|
249
|
+
class AIGatewayLlm extends BaseLlm {
|
|
250
|
+
client;
|
|
251
|
+
static supportedModels = MODEL_PATTERNS;
|
|
252
|
+
constructor(config2) {
|
|
253
|
+
super({ model: config2.model });
|
|
254
|
+
const globalConfig = getConfig();
|
|
255
|
+
const baseURL = config2.baseURL ?? globalConfig.baseURL ?? process.env[ENV.AI_GATEWAY_URL] ?? process.env[ENV.OPENAI_BASE_URL] ?? DEFAULT_BASE_URL;
|
|
256
|
+
const apiKey = config2.apiKey ?? globalConfig.apiKey ?? process.env[ENV.AI_GATEWAY_API_KEY] ?? process.env[ENV.OPENAI_API_KEY] ?? "";
|
|
257
|
+
this.client = new OpenAI({
|
|
258
|
+
baseURL,
|
|
259
|
+
apiKey,
|
|
260
|
+
timeout: config2.timeout ?? DEFAULT_TIMEOUT,
|
|
261
|
+
maxRetries: config2.maxRetries ?? DEFAULT_MAX_RETRIES
|
|
262
|
+
});
|
|
263
|
+
}
|
|
264
|
+
async* generateContentAsync(llmRequest, stream = false) {
|
|
265
|
+
try {
|
|
266
|
+
const { messages, tools } = convertRequest(llmRequest);
|
|
267
|
+
if (stream) {
|
|
268
|
+
yield* this.streamResponse(messages, tools);
|
|
269
|
+
} else {
|
|
270
|
+
yield await this.singleResponse(messages, tools);
|
|
271
|
+
}
|
|
272
|
+
} catch (error) {
|
|
273
|
+
yield this.errorResponse(error);
|
|
274
|
+
}
|
|
275
|
+
}
|
|
276
|
+
async connect(_) {
|
|
277
|
+
throw new Error("AIGatewayLlm does not support bidirectional streaming");
|
|
278
|
+
}
|
|
279
|
+
async singleResponse(messages, tools) {
|
|
280
|
+
const response = await this.client.chat.completions.create({
|
|
281
|
+
model: this.model,
|
|
282
|
+
messages,
|
|
283
|
+
...tools?.length ? { tools } : {}
|
|
284
|
+
});
|
|
285
|
+
return convertResponse(response);
|
|
286
|
+
}
|
|
287
|
+
async* streamResponse(messages, tools) {
|
|
288
|
+
const stream = await this.client.chat.completions.create({
|
|
289
|
+
model: this.model,
|
|
290
|
+
messages,
|
|
291
|
+
stream: true,
|
|
292
|
+
...tools?.length ? { tools } : {}
|
|
293
|
+
});
|
|
294
|
+
const acc = createStreamAccumulator();
|
|
295
|
+
for await (const chunk of stream) {
|
|
296
|
+
const { response, isComplete } = convertStreamChunk(chunk, acc);
|
|
297
|
+
if (response)
|
|
298
|
+
yield response;
|
|
299
|
+
if (isComplete)
|
|
300
|
+
break;
|
|
301
|
+
}
|
|
302
|
+
}
|
|
303
|
+
errorResponse(error) {
|
|
304
|
+
const isApiError = error instanceof OpenAI.APIError;
|
|
305
|
+
return {
|
|
306
|
+
errorCode: isApiError ? `API_ERROR_${error.status}` : "AI_GATEWAY_ERROR",
|
|
307
|
+
errorMessage: error instanceof Error ? error.message : String(error),
|
|
308
|
+
turnComplete: true
|
|
309
|
+
};
|
|
310
|
+
}
|
|
311
|
+
}
|
|
312
|
+
|
|
313
|
+
// src/register.ts
|
|
314
|
+
var registered = false;
|
|
315
|
+
function registerAIGateway(options) {
|
|
316
|
+
if (registered) {
|
|
317
|
+
console.warn("[adk-llm-bridge] Already registered");
|
|
318
|
+
return;
|
|
319
|
+
}
|
|
320
|
+
if (options) {
|
|
321
|
+
setConfig(options);
|
|
322
|
+
}
|
|
323
|
+
LLMRegistry.register(AIGatewayLlm);
|
|
324
|
+
registered = true;
|
|
325
|
+
}
|
|
326
|
+
function isAIGatewayRegistered() {
|
|
327
|
+
return registered;
|
|
328
|
+
}
|
|
329
|
+
|
|
330
|
+
// src/auto-register.ts
|
|
331
|
+
registerAIGateway();
|
package/dist/config.d.ts
ADDED
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"config.d.ts","sourceRoot":"","sources":["../src/config.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,eAAe,EAAE,MAAM,SAAS,CAAC;AAI/C,wBAAgB,SAAS,CAAC,OAAO,EAAE,eAAe,GAAG,IAAI,CAExD;AAED,wBAAgB,SAAS,IAAI,QAAQ,CAAC,eAAe,CAAC,CAErD;AAED,wBAAgB,WAAW,IAAI,IAAI,CAElC"}
|
|
@@ -0,0 +1,11 @@
|
|
|
1
|
+
export declare const DEFAULT_BASE_URL = "https://ai-gateway.vercel.sh/v1";
|
|
2
|
+
export declare const DEFAULT_TIMEOUT = 60000;
|
|
3
|
+
export declare const DEFAULT_MAX_RETRIES = 2;
|
|
4
|
+
export declare const MODEL_PATTERNS: (string | RegExp)[];
|
|
5
|
+
export declare const ENV: {
|
|
6
|
+
readonly AI_GATEWAY_URL: "AI_GATEWAY_URL";
|
|
7
|
+
readonly AI_GATEWAY_API_KEY: "AI_GATEWAY_API_KEY";
|
|
8
|
+
readonly OPENAI_BASE_URL: "OPENAI_BASE_URL";
|
|
9
|
+
readonly OPENAI_API_KEY: "OPENAI_API_KEY";
|
|
10
|
+
};
|
|
11
|
+
//# sourceMappingURL=constants.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"constants.d.ts","sourceRoot":"","sources":["../src/constants.ts"],"names":[],"mappings":"AAAA,eAAO,MAAM,gBAAgB,oCAAoC,CAAC;AAClE,eAAO,MAAM,eAAe,QAAS,CAAC;AACtC,eAAO,MAAM,mBAAmB,IAAI,CAAC;AAIrC,eAAO,MAAM,cAAc,EAAE,CAAC,MAAM,GAAG,MAAM,CAAC,EAAiB,CAAC;AAEhE,eAAO,MAAM,GAAG;;;;;CAKN,CAAC"}
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../src/converters/index.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,cAAc,EAAE,MAAM,WAAW,CAAC;AAC3C,OAAO,EACL,eAAe,EACf,kBAAkB,EAClB,uBAAuB,GACxB,MAAM,YAAY,CAAC"}
|
|
@@ -0,0 +1,8 @@
|
|
|
1
|
+
import type { LlmRequest } from "@google/adk";
|
|
2
|
+
import type OpenAI from "openai";
|
|
3
|
+
export interface ConvertedRequest {
|
|
4
|
+
messages: OpenAI.ChatCompletionMessageParam[];
|
|
5
|
+
tools?: OpenAI.ChatCompletionTool[];
|
|
6
|
+
}
|
|
7
|
+
export declare function convertRequest(llmRequest: LlmRequest): ConvertedRequest;
|
|
8
|
+
//# sourceMappingURL=request.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"request.d.ts","sourceRoot":"","sources":["../../src/converters/request.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,UAAU,EAAE,MAAM,aAAa,CAAC;AAE9C,OAAO,KAAK,MAAM,MAAM,QAAQ,CAAC;AAEjC,MAAM,WAAW,gBAAgB;IAC/B,QAAQ,EAAE,MAAM,CAAC,0BAA0B,EAAE,CAAC;IAC9C,KAAK,CAAC,EAAE,MAAM,CAAC,kBAAkB,EAAE,CAAC;CACrC;AAED,wBAAgB,cAAc,CAAC,UAAU,EAAE,UAAU,GAAG,gBAAgB,CAavE"}
|
|
@@ -0,0 +1,7 @@
|
|
|
1
|
+
import type { LlmResponse } from "@google/adk";
|
|
2
|
+
import type OpenAI from "openai";
|
|
3
|
+
import type { StreamAccumulator, StreamChunkResult } from "../types";
|
|
4
|
+
export declare function convertResponse(response: OpenAI.ChatCompletion): LlmResponse;
|
|
5
|
+
export declare function convertStreamChunk(chunk: OpenAI.ChatCompletionChunk, acc: StreamAccumulator): StreamChunkResult;
|
|
6
|
+
export declare function createStreamAccumulator(): StreamAccumulator;
|
|
7
|
+
//# sourceMappingURL=response.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"response.d.ts","sourceRoot":"","sources":["../../src/converters/response.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,WAAW,EAAE,MAAM,aAAa,CAAC;AAE/C,OAAO,KAAK,MAAM,MAAM,QAAQ,CAAC;AACjC,OAAO,KAAK,EAAE,iBAAiB,EAAE,iBAAiB,EAAE,MAAM,UAAU,CAAC;AAErE,wBAAgB,eAAe,CAAC,QAAQ,EAAE,MAAM,CAAC,cAAc,GAAG,WAAW,CAqC5E;AAED,wBAAgB,kBAAkB,CAChC,KAAK,EAAE,MAAM,CAAC,mBAAmB,EACjC,GAAG,EAAE,iBAAiB,GACrB,iBAAiB,CA2DnB;AAED,wBAAgB,uBAAuB,IAAI,iBAAiB,CAE3D"}
|
package/dist/index.d.ts
ADDED
|
@@ -0,0 +1,6 @@
|
|
|
1
|
+
export { AIGatewayLlm } from "./ai-gateway-llm";
|
|
2
|
+
export { AIGateway } from "./ai-gateway";
|
|
3
|
+
export { registerAIGateway, isAIGatewayRegistered } from "./register";
|
|
4
|
+
export type { AIGatewayConfig, RegisterOptions } from "./types";
|
|
5
|
+
export { MODEL_PATTERNS, DEFAULT_BASE_URL } from "./constants";
|
|
6
|
+
//# sourceMappingURL=index.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../src/index.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,YAAY,EAAE,MAAM,kBAAkB,CAAC;AAChD,OAAO,EAAE,SAAS,EAAE,MAAM,cAAc,CAAC;AACzC,OAAO,EAAE,iBAAiB,EAAE,qBAAqB,EAAE,MAAM,YAAY,CAAC;AACtE,YAAY,EAAE,eAAe,EAAE,eAAe,EAAE,MAAM,SAAS,CAAC;AAChE,OAAO,EAAE,cAAc,EAAE,gBAAgB,EAAE,MAAM,aAAa,CAAC"}
|
package/dist/index.js
ADDED
|
@@ -0,0 +1,337 @@
|
|
|
1
|
+
// src/ai-gateway-llm.ts
|
|
2
|
+
import { BaseLlm } from "@google/adk";
|
|
3
|
+
import OpenAI from "openai";
|
|
4
|
+
|
|
5
|
+
// src/converters/request.ts
|
|
6
|
+
function convertRequest(llmRequest) {
|
|
7
|
+
const messages = [];
|
|
8
|
+
const systemContent = extractSystemInstruction(llmRequest);
|
|
9
|
+
if (systemContent) {
|
|
10
|
+
messages.push({ role: "system", content: systemContent });
|
|
11
|
+
}
|
|
12
|
+
for (const content of llmRequest.contents ?? []) {
|
|
13
|
+
processContent(content, messages);
|
|
14
|
+
}
|
|
15
|
+
return { messages, tools: convertTools(llmRequest) };
|
|
16
|
+
}
|
|
17
|
+
function extractSystemInstruction(req) {
|
|
18
|
+
const sys = req.config?.systemInstruction;
|
|
19
|
+
if (!sys)
|
|
20
|
+
return null;
|
|
21
|
+
if (typeof sys === "string")
|
|
22
|
+
return sys;
|
|
23
|
+
if ("parts" in sys)
|
|
24
|
+
return extractText(sys.parts ?? []);
|
|
25
|
+
return null;
|
|
26
|
+
}
|
|
27
|
+
function extractText(parts) {
|
|
28
|
+
return parts.map((p) => p.text).filter(Boolean).join(`
|
|
29
|
+
`);
|
|
30
|
+
}
|
|
31
|
+
function processContent(content, messages) {
|
|
32
|
+
if (!content.parts?.length)
|
|
33
|
+
return;
|
|
34
|
+
const texts = [];
|
|
35
|
+
const calls = [];
|
|
36
|
+
const responses = [];
|
|
37
|
+
for (const part of content.parts) {
|
|
38
|
+
if (part.text)
|
|
39
|
+
texts.push(part.text);
|
|
40
|
+
if (part.functionCall) {
|
|
41
|
+
calls.push({
|
|
42
|
+
id: part.functionCall.id ?? `call_${Date.now()}`,
|
|
43
|
+
name: part.functionCall.name ?? "",
|
|
44
|
+
arguments: JSON.stringify(part.functionCall.args ?? {})
|
|
45
|
+
});
|
|
46
|
+
}
|
|
47
|
+
if (part.functionResponse) {
|
|
48
|
+
responses.push({
|
|
49
|
+
id: part.functionResponse.id ?? "",
|
|
50
|
+
content: JSON.stringify(part.functionResponse.response ?? {})
|
|
51
|
+
});
|
|
52
|
+
}
|
|
53
|
+
}
|
|
54
|
+
if (content.role === "user") {
|
|
55
|
+
if (texts.length)
|
|
56
|
+
messages.push({ role: "user", content: texts.join(`
|
|
57
|
+
`) });
|
|
58
|
+
for (const r of responses) {
|
|
59
|
+
messages.push({ role: "tool", tool_call_id: r.id, content: r.content });
|
|
60
|
+
}
|
|
61
|
+
} else if (content.role === "model") {
|
|
62
|
+
if (texts.length || calls.length) {
|
|
63
|
+
const msg = {
|
|
64
|
+
role: "assistant",
|
|
65
|
+
content: texts.length ? texts.join(`
|
|
66
|
+
`) : null
|
|
67
|
+
};
|
|
68
|
+
if (calls.length) {
|
|
69
|
+
msg.tool_calls = calls.map((c) => ({
|
|
70
|
+
id: c.id,
|
|
71
|
+
type: "function",
|
|
72
|
+
function: { name: c.name, arguments: c.arguments }
|
|
73
|
+
}));
|
|
74
|
+
}
|
|
75
|
+
messages.push(msg);
|
|
76
|
+
}
|
|
77
|
+
}
|
|
78
|
+
}
|
|
79
|
+
function convertTools(req) {
|
|
80
|
+
const adkTools = req.config?.tools;
|
|
81
|
+
if (!adkTools?.length)
|
|
82
|
+
return;
|
|
83
|
+
const tools = [];
|
|
84
|
+
for (const group of adkTools) {
|
|
85
|
+
if ("functionDeclarations" in group && Array.isArray(group.functionDeclarations)) {
|
|
86
|
+
for (const fn of group.functionDeclarations) {
|
|
87
|
+
tools.push({
|
|
88
|
+
type: "function",
|
|
89
|
+
function: {
|
|
90
|
+
name: fn.name ?? "",
|
|
91
|
+
description: fn.description ?? "",
|
|
92
|
+
parameters: normalizeSchema(fn.parameters) ?? {
|
|
93
|
+
type: "object",
|
|
94
|
+
properties: {}
|
|
95
|
+
}
|
|
96
|
+
}
|
|
97
|
+
});
|
|
98
|
+
}
|
|
99
|
+
}
|
|
100
|
+
}
|
|
101
|
+
return tools.length ? tools : undefined;
|
|
102
|
+
}
|
|
103
|
+
function normalizeSchema(schema) {
|
|
104
|
+
if (!schema || typeof schema !== "object")
|
|
105
|
+
return;
|
|
106
|
+
const result = {};
|
|
107
|
+
const input = schema;
|
|
108
|
+
for (const [key, value] of Object.entries(input)) {
|
|
109
|
+
if (key === "type" && typeof value === "string") {
|
|
110
|
+
result[key] = value.toLowerCase();
|
|
111
|
+
} else if (typeof value === "object" && value !== null && !Array.isArray(value)) {
|
|
112
|
+
result[key] = normalizeSchema(value);
|
|
113
|
+
} else if (Array.isArray(value)) {
|
|
114
|
+
result[key] = value;
|
|
115
|
+
} else {
|
|
116
|
+
result[key] = value;
|
|
117
|
+
}
|
|
118
|
+
}
|
|
119
|
+
return result;
|
|
120
|
+
}
|
|
121
|
+
|
|
122
|
+
// src/converters/response.ts
|
|
123
|
+
function convertResponse(response) {
|
|
124
|
+
const choice = response.choices[0];
|
|
125
|
+
if (!choice) {
|
|
126
|
+
return {
|
|
127
|
+
errorCode: "NO_CHOICE",
|
|
128
|
+
errorMessage: "No response choice",
|
|
129
|
+
turnComplete: true
|
|
130
|
+
};
|
|
131
|
+
}
|
|
132
|
+
const parts = [];
|
|
133
|
+
if (choice.message.content) {
|
|
134
|
+
parts.push({ text: choice.message.content });
|
|
135
|
+
}
|
|
136
|
+
for (const tc of choice.message.tool_calls ?? []) {
|
|
137
|
+
parts.push({
|
|
138
|
+
functionCall: {
|
|
139
|
+
id: tc.id,
|
|
140
|
+
name: tc.function.name,
|
|
141
|
+
args: safeJsonParse(tc.function.arguments)
|
|
142
|
+
}
|
|
143
|
+
});
|
|
144
|
+
}
|
|
145
|
+
return {
|
|
146
|
+
content: parts.length ? { role: "model", parts } : undefined,
|
|
147
|
+
turnComplete: true,
|
|
148
|
+
usageMetadata: response.usage ? {
|
|
149
|
+
promptTokenCount: response.usage.prompt_tokens,
|
|
150
|
+
candidatesTokenCount: response.usage.completion_tokens,
|
|
151
|
+
totalTokenCount: response.usage.total_tokens
|
|
152
|
+
} : undefined
|
|
153
|
+
};
|
|
154
|
+
}
|
|
155
|
+
function convertStreamChunk(chunk, acc) {
|
|
156
|
+
const choice = chunk.choices[0];
|
|
157
|
+
if (!choice)
|
|
158
|
+
return { isComplete: false };
|
|
159
|
+
const delta = choice.delta;
|
|
160
|
+
if (delta?.content) {
|
|
161
|
+
acc.text += delta.content;
|
|
162
|
+
return {
|
|
163
|
+
response: {
|
|
164
|
+
content: { role: "model", parts: [{ text: delta.content }] },
|
|
165
|
+
partial: true
|
|
166
|
+
},
|
|
167
|
+
isComplete: false
|
|
168
|
+
};
|
|
169
|
+
}
|
|
170
|
+
if (delta?.tool_calls) {
|
|
171
|
+
for (const tc of delta.tool_calls) {
|
|
172
|
+
const idx = tc.index ?? 0;
|
|
173
|
+
let a = acc.toolCalls.get(idx);
|
|
174
|
+
if (!a) {
|
|
175
|
+
a = { id: "", name: "", arguments: "" };
|
|
176
|
+
acc.toolCalls.set(idx, a);
|
|
177
|
+
}
|
|
178
|
+
if (tc.id)
|
|
179
|
+
a.id = tc.id;
|
|
180
|
+
if (tc.function?.name)
|
|
181
|
+
a.name += tc.function.name;
|
|
182
|
+
if (tc.function?.arguments)
|
|
183
|
+
a.arguments += tc.function.arguments;
|
|
184
|
+
}
|
|
185
|
+
}
|
|
186
|
+
if (choice.finish_reason) {
|
|
187
|
+
const parts = [];
|
|
188
|
+
if (acc.text)
|
|
189
|
+
parts.push({ text: acc.text });
|
|
190
|
+
for (const tc of Array.from(acc.toolCalls.values())) {
|
|
191
|
+
if (tc.name) {
|
|
192
|
+
parts.push({
|
|
193
|
+
functionCall: {
|
|
194
|
+
id: tc.id,
|
|
195
|
+
name: tc.name,
|
|
196
|
+
args: safeJsonParse(tc.arguments)
|
|
197
|
+
}
|
|
198
|
+
});
|
|
199
|
+
}
|
|
200
|
+
}
|
|
201
|
+
acc.text = "";
|
|
202
|
+
acc.toolCalls.clear();
|
|
203
|
+
return {
|
|
204
|
+
response: {
|
|
205
|
+
content: parts.length ? { role: "model", parts } : undefined,
|
|
206
|
+
turnComplete: true
|
|
207
|
+
},
|
|
208
|
+
isComplete: true
|
|
209
|
+
};
|
|
210
|
+
}
|
|
211
|
+
return { isComplete: false };
|
|
212
|
+
}
|
|
213
|
+
function createStreamAccumulator() {
|
|
214
|
+
return { text: "", toolCalls: new Map };
|
|
215
|
+
}
|
|
216
|
+
function safeJsonParse(str) {
|
|
217
|
+
try {
|
|
218
|
+
return JSON.parse(str);
|
|
219
|
+
} catch {
|
|
220
|
+
return {};
|
|
221
|
+
}
|
|
222
|
+
}
|
|
223
|
+
|
|
224
|
+
// src/config.ts
|
|
225
|
+
var config = {};
|
|
226
|
+
function setConfig(options) {
|
|
227
|
+
config = { ...options };
|
|
228
|
+
}
|
|
229
|
+
function getConfig() {
|
|
230
|
+
return config;
|
|
231
|
+
}
|
|
232
|
+
|
|
233
|
+
// src/constants.ts
|
|
234
|
+
var DEFAULT_BASE_URL = "https://ai-gateway.vercel.sh/v1";
|
|
235
|
+
var DEFAULT_TIMEOUT = 60000;
|
|
236
|
+
var DEFAULT_MAX_RETRIES = 2;
|
|
237
|
+
var MODEL_PATTERNS = [/^.+\/.+$/];
|
|
238
|
+
var ENV = {
|
|
239
|
+
AI_GATEWAY_URL: "AI_GATEWAY_URL",
|
|
240
|
+
AI_GATEWAY_API_KEY: "AI_GATEWAY_API_KEY",
|
|
241
|
+
OPENAI_BASE_URL: "OPENAI_BASE_URL",
|
|
242
|
+
OPENAI_API_KEY: "OPENAI_API_KEY"
|
|
243
|
+
};
|
|
244
|
+
|
|
245
|
+
// src/ai-gateway-llm.ts
|
|
246
|
+
class AIGatewayLlm extends BaseLlm {
|
|
247
|
+
client;
|
|
248
|
+
static supportedModels = MODEL_PATTERNS;
|
|
249
|
+
constructor(config2) {
|
|
250
|
+
super({ model: config2.model });
|
|
251
|
+
const globalConfig = getConfig();
|
|
252
|
+
const baseURL = config2.baseURL ?? globalConfig.baseURL ?? process.env[ENV.AI_GATEWAY_URL] ?? process.env[ENV.OPENAI_BASE_URL] ?? DEFAULT_BASE_URL;
|
|
253
|
+
const apiKey = config2.apiKey ?? globalConfig.apiKey ?? process.env[ENV.AI_GATEWAY_API_KEY] ?? process.env[ENV.OPENAI_API_KEY] ?? "";
|
|
254
|
+
this.client = new OpenAI({
|
|
255
|
+
baseURL,
|
|
256
|
+
apiKey,
|
|
257
|
+
timeout: config2.timeout ?? DEFAULT_TIMEOUT,
|
|
258
|
+
maxRetries: config2.maxRetries ?? DEFAULT_MAX_RETRIES
|
|
259
|
+
});
|
|
260
|
+
}
|
|
261
|
+
async* generateContentAsync(llmRequest, stream = false) {
|
|
262
|
+
try {
|
|
263
|
+
const { messages, tools } = convertRequest(llmRequest);
|
|
264
|
+
if (stream) {
|
|
265
|
+
yield* this.streamResponse(messages, tools);
|
|
266
|
+
} else {
|
|
267
|
+
yield await this.singleResponse(messages, tools);
|
|
268
|
+
}
|
|
269
|
+
} catch (error) {
|
|
270
|
+
yield this.errorResponse(error);
|
|
271
|
+
}
|
|
272
|
+
}
|
|
273
|
+
async connect(_) {
|
|
274
|
+
throw new Error("AIGatewayLlm does not support bidirectional streaming");
|
|
275
|
+
}
|
|
276
|
+
async singleResponse(messages, tools) {
|
|
277
|
+
const response = await this.client.chat.completions.create({
|
|
278
|
+
model: this.model,
|
|
279
|
+
messages,
|
|
280
|
+
...tools?.length ? { tools } : {}
|
|
281
|
+
});
|
|
282
|
+
return convertResponse(response);
|
|
283
|
+
}
|
|
284
|
+
async* streamResponse(messages, tools) {
|
|
285
|
+
const stream = await this.client.chat.completions.create({
|
|
286
|
+
model: this.model,
|
|
287
|
+
messages,
|
|
288
|
+
stream: true,
|
|
289
|
+
...tools?.length ? { tools } : {}
|
|
290
|
+
});
|
|
291
|
+
const acc = createStreamAccumulator();
|
|
292
|
+
for await (const chunk of stream) {
|
|
293
|
+
const { response, isComplete } = convertStreamChunk(chunk, acc);
|
|
294
|
+
if (response)
|
|
295
|
+
yield response;
|
|
296
|
+
if (isComplete)
|
|
297
|
+
break;
|
|
298
|
+
}
|
|
299
|
+
}
|
|
300
|
+
errorResponse(error) {
|
|
301
|
+
const isApiError = error instanceof OpenAI.APIError;
|
|
302
|
+
return {
|
|
303
|
+
errorCode: isApiError ? `API_ERROR_${error.status}` : "AI_GATEWAY_ERROR",
|
|
304
|
+
errorMessage: error instanceof Error ? error.message : String(error),
|
|
305
|
+
turnComplete: true
|
|
306
|
+
};
|
|
307
|
+
}
|
|
308
|
+
}
|
|
309
|
+
// src/ai-gateway.ts
|
|
310
|
+
function AIGateway(model, options) {
|
|
311
|
+
return new AIGatewayLlm({ model, ...options });
|
|
312
|
+
}
|
|
313
|
+
// src/register.ts
|
|
314
|
+
import { LLMRegistry } from "@google/adk";
|
|
315
|
+
var registered = false;
|
|
316
|
+
function registerAIGateway(options) {
|
|
317
|
+
if (registered) {
|
|
318
|
+
console.warn("[adk-llm-bridge] Already registered");
|
|
319
|
+
return;
|
|
320
|
+
}
|
|
321
|
+
if (options) {
|
|
322
|
+
setConfig(options);
|
|
323
|
+
}
|
|
324
|
+
LLMRegistry.register(AIGatewayLlm);
|
|
325
|
+
registered = true;
|
|
326
|
+
}
|
|
327
|
+
function isAIGatewayRegistered() {
|
|
328
|
+
return registered;
|
|
329
|
+
}
|
|
330
|
+
export {
|
|
331
|
+
registerAIGateway,
|
|
332
|
+
isAIGatewayRegistered,
|
|
333
|
+
MODEL_PATTERNS,
|
|
334
|
+
DEFAULT_BASE_URL,
|
|
335
|
+
AIGatewayLlm,
|
|
336
|
+
AIGateway
|
|
337
|
+
};
|
|
@@ -0,0 +1,6 @@
|
|
|
1
|
+
import type { RegisterOptions } from "./types";
|
|
2
|
+
export declare function registerAIGateway(options?: RegisterOptions): void;
|
|
3
|
+
export declare function isAIGatewayRegistered(): boolean;
|
|
4
|
+
/** @internal */
|
|
5
|
+
export declare function _resetRegistration(): void;
|
|
6
|
+
//# sourceMappingURL=register.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"register.d.ts","sourceRoot":"","sources":["../src/register.ts"],"names":[],"mappings":"AAGA,OAAO,KAAK,EAAE,eAAe,EAAE,MAAM,SAAS,CAAC;AAI/C,wBAAgB,iBAAiB,CAAC,OAAO,CAAC,EAAE,eAAe,GAAG,IAAI,CAYjE;AAED,wBAAgB,qBAAqB,IAAI,OAAO,CAE/C;AAED,gBAAgB;AAChB,wBAAgB,kBAAkB,IAAI,IAAI,CAGzC"}
|
package/dist/types.d.ts
ADDED
|
@@ -0,0 +1,26 @@
|
|
|
1
|
+
import type { LlmResponse } from "@google/adk";
|
|
2
|
+
export interface AIGatewayConfig {
|
|
3
|
+
model: string;
|
|
4
|
+
baseURL?: string;
|
|
5
|
+
apiKey?: string;
|
|
6
|
+
timeout?: number;
|
|
7
|
+
maxRetries?: number;
|
|
8
|
+
}
|
|
9
|
+
export interface RegisterOptions {
|
|
10
|
+
baseURL?: string;
|
|
11
|
+
apiKey?: string;
|
|
12
|
+
}
|
|
13
|
+
export interface ToolCallAccumulator {
|
|
14
|
+
id: string;
|
|
15
|
+
name: string;
|
|
16
|
+
arguments: string;
|
|
17
|
+
}
|
|
18
|
+
export interface StreamAccumulator {
|
|
19
|
+
text: string;
|
|
20
|
+
toolCalls: Map<number, ToolCallAccumulator>;
|
|
21
|
+
}
|
|
22
|
+
export interface StreamChunkResult {
|
|
23
|
+
response?: LlmResponse;
|
|
24
|
+
isComplete: boolean;
|
|
25
|
+
}
|
|
26
|
+
//# sourceMappingURL=types.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"types.d.ts","sourceRoot":"","sources":["../src/types.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,WAAW,EAAE,MAAM,aAAa,CAAC;AAE/C,MAAM,WAAW,eAAe;IAC9B,KAAK,EAAE,MAAM,CAAC;IACd,OAAO,CAAC,EAAE,MAAM,CAAC;IACjB,MAAM,CAAC,EAAE,MAAM,CAAC;IAChB,OAAO,CAAC,EAAE,MAAM,CAAC;IACjB,UAAU,CAAC,EAAE,MAAM,CAAC;CACrB;AAED,MAAM,WAAW,eAAe;IAC9B,OAAO,CAAC,EAAE,MAAM,CAAC;IACjB,MAAM,CAAC,EAAE,MAAM,CAAC;CACjB;AAED,MAAM,WAAW,mBAAmB;IAClC,EAAE,EAAE,MAAM,CAAC;IACX,IAAI,EAAE,MAAM,CAAC;IACb,SAAS,EAAE,MAAM,CAAC;CACnB;AAED,MAAM,WAAW,iBAAiB;IAChC,IAAI,EAAE,MAAM,CAAC;IACb,SAAS,EAAE,GAAG,CAAC,MAAM,EAAE,mBAAmB,CAAC,CAAC;CAC7C;AAED,MAAM,WAAW,iBAAiB;IAChC,QAAQ,CAAC,EAAE,WAAW,CAAC;IACvB,UAAU,EAAE,OAAO,CAAC;CACrB"}
|
package/package.json
ADDED
|
@@ -0,0 +1,65 @@
|
|
|
1
|
+
{
|
|
2
|
+
"name": "adk-llm-bridge",
|
|
3
|
+
"version": "0.1.0",
|
|
4
|
+
"description": "Connect Google ADK TypeScript to Vercel AI Gateway and 100+ LLM models",
|
|
5
|
+
"keywords": [
|
|
6
|
+
"adk",
|
|
7
|
+
"google-adk",
|
|
8
|
+
"ai-gateway",
|
|
9
|
+
"vercel",
|
|
10
|
+
"llm",
|
|
11
|
+
"agents"
|
|
12
|
+
],
|
|
13
|
+
"license": "MIT",
|
|
14
|
+
"author": "PAI",
|
|
15
|
+
"repository": {
|
|
16
|
+
"type": "git",
|
|
17
|
+
"url": "https://github.com/pailat/adk-llm-bridge"
|
|
18
|
+
},
|
|
19
|
+
"homepage": "https://github.com/pailat/adk-llm-bridge#readme",
|
|
20
|
+
"bugs": {
|
|
21
|
+
"url": "https://github.com/pailat/adk-llm-bridge/issues"
|
|
22
|
+
},
|
|
23
|
+
"type": "module",
|
|
24
|
+
"main": "./dist/index.js",
|
|
25
|
+
"types": "./dist/index.d.ts",
|
|
26
|
+
"exports": {
|
|
27
|
+
".": {
|
|
28
|
+
"types": "./dist/index.d.ts",
|
|
29
|
+
"import": "./dist/index.js"
|
|
30
|
+
}
|
|
31
|
+
},
|
|
32
|
+
"files": [
|
|
33
|
+
"dist"
|
|
34
|
+
],
|
|
35
|
+
"engines": {
|
|
36
|
+
"node": ">=18.0.0"
|
|
37
|
+
},
|
|
38
|
+
"scripts": {
|
|
39
|
+
"build": "bun build ./src/index.ts --outdir ./dist --target node --external '@google/adk' --external '@google/genai' --external 'openai' && bun run build:types",
|
|
40
|
+
"build:types": "tsc --emitDeclarationOnly --outDir dist",
|
|
41
|
+
"test": "bun test",
|
|
42
|
+
"typecheck": "tsc --noEmit",
|
|
43
|
+
"typecheck:all": "tsc --noEmit -p tsconfig.test.json",
|
|
44
|
+
"lint": "biome lint ./src ./tests",
|
|
45
|
+
"lint:fix": "biome lint --write ./src ./tests",
|
|
46
|
+
"format": "biome format ./src ./tests",
|
|
47
|
+
"format:fix": "biome format --write ./src ./tests",
|
|
48
|
+
"check": "biome check ./src ./tests",
|
|
49
|
+
"check:fix": "biome check --write ./src ./tests",
|
|
50
|
+
"ci": "bun run typecheck:all && bun run lint && bun test && bun run build",
|
|
51
|
+
"prepublishOnly": "bun run build"
|
|
52
|
+
},
|
|
53
|
+
"peerDependencies": {
|
|
54
|
+
"@google/adk": ">=0.2.0"
|
|
55
|
+
},
|
|
56
|
+
"dependencies": {
|
|
57
|
+
"openai": "^4.73.0"
|
|
58
|
+
},
|
|
59
|
+
"devDependencies": {
|
|
60
|
+
"@biomejs/biome": "^2.3.10",
|
|
61
|
+
"@google/adk": "^0.2.1",
|
|
62
|
+
"@types/bun": "latest",
|
|
63
|
+
"typescript": "^5.3.0"
|
|
64
|
+
}
|
|
65
|
+
}
|