adk-llm-bridge 0.2.0 → 0.3.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (53) hide show
  1. package/README.md +28 -30
  2. package/dist/config.d.ts +124 -8
  3. package/dist/config.d.ts.map +1 -1
  4. package/dist/constants.d.ts +104 -0
  5. package/dist/constants.d.ts.map +1 -1
  6. package/dist/converters/index.d.ts +29 -0
  7. package/dist/converters/index.d.ts.map +1 -1
  8. package/dist/converters/request.d.ts +79 -0
  9. package/dist/converters/request.d.ts.map +1 -1
  10. package/dist/converters/response.d.ts +96 -0
  11. package/dist/converters/response.d.ts.map +1 -1
  12. package/dist/core/base-provider-llm.d.ts +95 -5
  13. package/dist/core/base-provider-llm.d.ts.map +1 -1
  14. package/dist/core/index.d.ts +37 -0
  15. package/dist/core/index.d.ts.map +1 -1
  16. package/dist/core/openai-compatible-llm.d.ts +163 -5
  17. package/dist/core/openai-compatible-llm.d.ts.map +1 -1
  18. package/dist/index.d.ts +59 -0
  19. package/dist/index.d.ts.map +1 -1
  20. package/dist/index.js +6 -471
  21. package/dist/index.js.map +21 -0
  22. package/dist/providers/ai-gateway/ai-gateway-llm.d.ts +67 -1
  23. package/dist/providers/ai-gateway/ai-gateway-llm.d.ts.map +1 -1
  24. package/dist/providers/ai-gateway/factory.d.ts +42 -2
  25. package/dist/providers/ai-gateway/factory.d.ts.map +1 -1
  26. package/dist/providers/ai-gateway/index.d.ts +24 -0
  27. package/dist/providers/ai-gateway/index.d.ts.map +1 -1
  28. package/dist/providers/ai-gateway/register.d.ts +48 -4
  29. package/dist/providers/ai-gateway/register.d.ts.map +1 -1
  30. package/dist/providers/index.d.ts +24 -0
  31. package/dist/providers/index.d.ts.map +1 -1
  32. package/dist/providers/openrouter/factory.d.ts +42 -6
  33. package/dist/providers/openrouter/factory.d.ts.map +1 -1
  34. package/dist/providers/openrouter/index.d.ts +27 -0
  35. package/dist/providers/openrouter/index.d.ts.map +1 -1
  36. package/dist/providers/openrouter/openrouter-llm.d.ts +77 -3
  37. package/dist/providers/openrouter/openrouter-llm.d.ts.map +1 -1
  38. package/dist/providers/openrouter/register.d.ts +43 -4
  39. package/dist/providers/openrouter/register.d.ts.map +1 -1
  40. package/dist/types.d.ts +281 -14
  41. package/dist/types.d.ts.map +1 -1
  42. package/package.json +4 -3
  43. package/dist/AIGatewayLlm.d.ts +0 -14
  44. package/dist/AIGatewayLlm.d.ts.map +0 -1
  45. package/dist/ai-gateway-llm.d.ts +0 -2
  46. package/dist/ai-gateway-llm.d.ts.map +0 -1
  47. package/dist/ai-gateway.d.ts +0 -2
  48. package/dist/ai-gateway.d.ts.map +0 -1
  49. package/dist/auto-register.d.ts +0 -2
  50. package/dist/auto-register.d.ts.map +0 -1
  51. package/dist/auto-register.js +0 -331
  52. package/dist/register.d.ts +0 -7
  53. package/dist/register.d.ts.map +0 -1
package/README.md CHANGED
@@ -7,22 +7,20 @@ Use **any LLM** with [Google ADK TypeScript](https://github.com/google/adk-js) i
7
7
 
8
8
  ## Why?
9
9
 
10
- Google ADK TypeScript [only supports Gemini models natively](https://github.com/google/adk-js/blob/main/core/src/models/registry.ts#L113) (unlike the Python version which has LiteLLM integration). This lightweight bridge lets you use **any model** from providers like Anthropic, OpenAI, Meta, and more—while keeping all ADK features like multi-agent orchestration, tool calling, and streaming.
10
+ Google ADK TypeScript comes with built-in Gemini support. This lightweight bridge extends it to work with **any model** from providers like Anthropic, OpenAI, Meta, and more—while keeping all ADK features like multi-agent orchestration, tool calling, and streaming.
11
11
 
12
12
  ### Key Benefits
13
13
 
14
- | | |
15
- |---|---|
16
- | **Minimal** | ~13KB bundle, single dependency (`openai`) |
17
- | **Simple** | 3 lines to integrate any model |
18
- | **Secure** | No complex dependency tree, just the battle-tested OpenAI SDK |
19
- | **Compatible** | Works with any OpenAI-compatible API (AI Gateway, OpenRouter, etc.) |
14
+ - **Minimal** — ~13KB bundle, single dependency (`openai`)
15
+ - **Simple** — 3 lines to integrate any model
16
+ - **Secure** No complex dependency tree, just the battle-tested OpenAI SDK
17
+ - **Compatible** Works with any OpenAI-compatible API (AI Gateway, OpenRouter, etc.)
20
18
 
21
19
  ## Supported Providers
22
20
 
23
21
  | Provider | Models | Features |
24
22
  |----------|--------|----------|
25
- | **[Vercel AI Gateway](https://vercel.com/ai-gateway)** | 100+ models (Claude, GPT-4, Llama, Mistral, etc.) | Simple, fast |
23
+ | **[Vercel AI Gateway](https://vercel.com/ai-gateway)** | 100+ models (Claude, GPT, Llama, Gemini, etc.) | Simple, fast |
26
24
  | **[OpenRouter](https://openrouter.ai/)** | 100+ models | Provider routing, fallbacks, price optimization |
27
25
 
28
26
  ## How It Works
@@ -63,7 +61,7 @@ npm install adk-llm-bridge @google/adk
63
61
 
64
62
  ## Quick Start
65
63
 
66
- Just 3 lines to use Claude, GPT-4, or any model with ADK:
64
+ Just 3 lines to use Claude, GPT, Gemini, or any model with ADK:
67
65
 
68
66
  ```typescript
69
67
  import { LlmAgent, LLMRegistry } from '@google/adk';
@@ -73,7 +71,7 @@ LLMRegistry.register(AIGatewayLlm); // 2. Register
73
71
 
74
72
  const agent = new LlmAgent({ // 3. Use any model
75
73
  name: 'assistant',
76
- model: 'anthropic/claude-sonnet-4', // Claude, GPT-4, Llama, etc.
74
+ model: 'anthropic/claude-sonnet-4.5', // Claude, GPT, Llama, etc.
77
75
  instruction: 'You are a helpful assistant.',
78
76
  });
79
77
  ```
@@ -90,7 +88,7 @@ LLMRegistry.register(OpenRouterLlm);
90
88
 
91
89
  const agent = new LlmAgent({
92
90
  name: 'assistant',
93
- model: 'anthropic/claude-sonnet-4',
91
+ model: 'anthropic/claude-sonnet-4.5',
94
92
  instruction: 'You are a helpful assistant.',
95
93
  });
96
94
  ```
@@ -140,14 +138,14 @@ import { AIGateway, OpenRouter } from 'adk-llm-bridge';
140
138
  // AI Gateway
141
139
  const agent1 = new LlmAgent({
142
140
  name: 'assistant',
143
- model: AIGateway('anthropic/claude-sonnet-4', { timeout: 30000 }),
141
+ model: AIGateway('anthropic/claude-sonnet-4.5', { timeout: 30000 }),
144
142
  instruction: 'You are helpful.',
145
143
  });
146
144
 
147
145
  // OpenRouter with provider routing
148
146
  const agent2 = new LlmAgent({
149
147
  name: 'fast-assistant',
150
- model: OpenRouter('anthropic/claude-sonnet-4', {
148
+ model: OpenRouter('anthropic/claude-sonnet-4.5', {
151
149
  provider: {
152
150
  sort: 'latency',
153
151
  allow_fallbacks: true,
@@ -164,7 +162,7 @@ OpenRouter provides additional features not available in AI Gateway:
164
162
  ```typescript
165
163
  import { OpenRouter } from 'adk-llm-bridge';
166
164
 
167
- const llm = OpenRouter('anthropic/claude-sonnet-4', {
165
+ const llm = OpenRouter('anthropic/claude-sonnet-4.5', {
168
166
  // Ranking headers (improves your rate limits)
169
167
  siteUrl: 'https://your-site.com',
170
168
  appName: 'Your App',
@@ -184,26 +182,26 @@ const llm = OpenRouter('anthropic/claude-sonnet-4', {
184
182
  Use the `provider/model` format:
185
183
 
186
184
  ```
187
- anthropic/claude-sonnet-4
188
- openai/gpt-4o
189
- google/gemini-2.0-flash
190
- meta/llama-3.1-70b
191
- mistral/mistral-large
192
- xai/grok-2
193
- deepseek/deepseek-chat
185
+ anthropic/claude-opus-4.5
186
+ openai/gpt-5.2-pro
187
+ google/gemini-3-pro
188
+ meta/llama-3.3-70b-instruct
189
+ mistral/mistral-large-3
190
+ xai/grok-4.1
191
+ deepseek/deepseek-r1
194
192
  ```
195
193
 
196
194
  ### Popular Models
197
195
 
198
196
  | Provider | Models |
199
197
  |----------|--------|
200
- | Anthropic | `anthropic/claude-opus-4`, `anthropic/claude-sonnet-4` |
201
- | OpenAI | `openai/gpt-4.1`, `openai/o3`, `openai/gpt-4o` |
202
- | Google | `google/gemini-2.5-pro`, `google/gemini-2.5-flash` |
203
- | Meta | `meta/llama-4-scout`, `meta/llama-4-maverick` |
204
- | Mistral | `mistral/mistral-large-2411`, `mistral/pixtral-large` |
205
- | xAI | `xai/grok-3`, `xai/grok-3-mini` |
206
- | DeepSeek | `deepseek/deepseek-v3`, `deepseek/deepseek-r1` |
198
+ | Anthropic | `anthropic/claude-sonnet-4.5`, `anthropic/claude-opus-4.5` |
199
+ | OpenAI | `openai/gpt-5.2-pro`, `openai/gpt-4.1`, `openai/o3-mini` |
200
+ | Google | `google/gemini-3-pro`, `google/gemini-3-flash`, `google/gemini-2.5-pro` |
201
+ | Meta | `meta/llama-3.3-70b-instruct`, `meta/llama-3.1-405b-instruct` |
202
+ | Mistral | `mistral/mistral-large-3`, `mistral/mistral-large-2411` |
203
+ | xAI | `xai/grok-4.1`, `xai/grok-4-fast`, `xai/grok-3` |
204
+ | DeepSeek | `deepseek/deepseek-v3.2`, `deepseek/deepseek-r1` |
207
205
 
208
206
  Browse all models:
209
207
  - [Vercel AI Gateway Models](https://vercel.com/ai-gateway/models)
@@ -240,7 +238,7 @@ const getWeather = new FunctionTool({
240
238
 
241
239
  const agent = new LlmAgent({
242
240
  name: 'weather-assistant',
243
- model: 'anthropic/claude-sonnet-4',
241
+ model: 'anthropic/claude-sonnet-4.5',
244
242
  instruction: 'You help users check the weather.',
245
243
  tools: [getWeather],
246
244
  });
@@ -261,7 +259,7 @@ LLMRegistry.register(OpenRouterLlm);
261
259
 
262
260
  export const rootAgent = new LlmAgent({
263
261
  name: 'assistant',
264
- model: 'anthropic/claude-sonnet-4',
262
+ model: 'anthropic/claude-sonnet-4.5',
265
263
  instruction: 'You are helpful.',
266
264
  });
267
265
  ```
package/dist/config.d.ts CHANGED
@@ -1,44 +1,160 @@
1
+ /**
2
+ * @license
3
+ * Copyright 2025 PAI
4
+ * SPDX-License-Identifier: MIT
5
+ */
6
+ /**
7
+ * Global configuration management for LLM providers.
8
+ *
9
+ * This module provides functions to set, get, and reset global configuration
10
+ * that applies to all instances of a provider. Configuration set here acts
11
+ * as a fallback when instance-specific configuration is not provided.
12
+ *
13
+ * Configuration priority (highest to lowest):
14
+ * 1. Instance configuration (passed to constructor or factory)
15
+ * 2. Global configuration (set via this module)
16
+ * 3. Environment variables
17
+ * 4. Default values
18
+ *
19
+ * @module config
20
+ *
21
+ * @example
22
+ * ```typescript
23
+ * import { setProviderConfig } from "adk-llm-bridge";
24
+ *
25
+ * // Set global config for AI Gateway
26
+ * setProviderConfig("ai-gateway", {
27
+ * apiKey: process.env.AI_GATEWAY_API_KEY
28
+ * });
29
+ *
30
+ * // Set global config for OpenRouter
31
+ * setProviderConfig("openrouter", {
32
+ * apiKey: process.env.OPENROUTER_API_KEY,
33
+ * siteUrl: "https://myapp.com"
34
+ * });
35
+ * ```
36
+ */
1
37
  import type { RegisterOptions, OpenRouterRegisterOptions } from "./types";
38
+ /**
39
+ * Mapping of provider identifiers to their configuration types.
40
+ *
41
+ * @internal
42
+ */
2
43
  type ProviderConfigMap = {
3
44
  "ai-gateway": RegisterOptions;
4
45
  openrouter: OpenRouterRegisterOptions;
5
46
  };
47
+ /**
48
+ * Valid provider type identifiers.
49
+ *
50
+ * @internal
51
+ */
6
52
  type ProviderType = keyof ProviderConfigMap;
7
53
  /**
8
- * Sets configuration for a specific provider.
54
+ * Sets global configuration for a specific provider.
55
+ *
56
+ * This configuration is used as a fallback when creating LLM instances
57
+ * without explicit configuration. Instance configuration always takes
58
+ * precedence over global configuration.
59
+ *
60
+ * @param provider - The provider identifier ("ai-gateway" or "openrouter")
61
+ * @param options - Configuration options for the provider
9
62
  *
10
63
  * @example
11
64
  * ```typescript
12
- * setProviderConfig("ai-gateway", { apiKey: "..." });
13
- * setProviderConfig("openrouter", { apiKey: "...", siteUrl: "https://myapp.com" });
65
+ * // Configure AI Gateway globally
66
+ * setProviderConfig("ai-gateway", {
67
+ * apiKey: "your-api-key",
68
+ * baseURL: "https://custom-gateway.example.com/v1"
69
+ * });
70
+ * ```
71
+ *
72
+ * @example
73
+ * ```typescript
74
+ * // Configure OpenRouter with site attribution
75
+ * setProviderConfig("openrouter", {
76
+ * apiKey: "your-api-key",
77
+ * siteUrl: "https://myapp.com",
78
+ * appName: "My Application"
79
+ * });
14
80
  * ```
15
81
  */
16
82
  export declare function setProviderConfig<T extends ProviderType>(provider: T, options: ProviderConfigMap[T]): void;
17
83
  /**
18
- * Gets configuration for a specific provider.
84
+ * Gets the current global configuration for a specific provider.
85
+ *
86
+ * @param provider - The provider identifier ("ai-gateway" or "openrouter")
87
+ * @returns The current configuration, or `undefined` if not set
88
+ *
89
+ * @example
90
+ * ```typescript
91
+ * const config = getProviderConfig("ai-gateway");
92
+ * if (config?.apiKey) {
93
+ * console.log("AI Gateway is configured");
94
+ * }
95
+ * ```
19
96
  */
20
97
  export declare function getProviderConfig<T extends ProviderType>(provider: T): Readonly<ProviderConfigMap[T]> | undefined;
21
98
  /**
22
- * Resets configuration for a specific provider.
99
+ * Resets the global configuration for a specific provider.
100
+ *
101
+ * After calling this, the provider will fall back to environment variables
102
+ * or default values.
103
+ *
104
+ * @param provider - The provider identifier ("ai-gateway" or "openrouter")
105
+ *
106
+ * @example
107
+ * ```typescript
108
+ * resetProviderConfig("ai-gateway");
109
+ * ```
23
110
  */
24
111
  export declare function resetProviderConfig(provider: ProviderType): void;
25
112
  /**
26
113
  * Resets all provider configurations.
114
+ *
115
+ * Useful for testing or when you need to clear all global state.
116
+ *
117
+ * @example
118
+ * ```typescript
119
+ * // In test teardown
120
+ * afterEach(() => {
121
+ * resetAllConfigs();
122
+ * });
123
+ * ```
27
124
  */
28
125
  export declare function resetAllConfigs(): void;
29
126
  /**
30
127
  * Sets global configuration for AI Gateway.
31
- * @deprecated Use `setProviderConfig("ai-gateway", options)` instead.
128
+ *
129
+ * @param options - Configuration options
130
+ *
131
+ * @deprecated Use {@link setProviderConfig | setProviderConfig("ai-gateway", options)} instead.
132
+ * This function will be removed in a future major version.
133
+ *
134
+ * @example
135
+ * ```typescript
136
+ * // Old way (deprecated)
137
+ * setConfig({ apiKey: "..." });
138
+ *
139
+ * // New way
140
+ * setProviderConfig("ai-gateway", { apiKey: "..." });
141
+ * ```
32
142
  */
33
143
  export declare function setConfig(options: RegisterOptions): void;
34
144
  /**
35
145
  * Gets global configuration for AI Gateway.
36
- * @deprecated Use `getProviderConfig("ai-gateway")` instead.
146
+ *
147
+ * @returns The current AI Gateway configuration
148
+ *
149
+ * @deprecated Use {@link getProviderConfig | getProviderConfig("ai-gateway")} instead.
150
+ * This function will be removed in a future major version.
37
151
  */
38
152
  export declare function getConfig(): Readonly<RegisterOptions>;
39
153
  /**
40
154
  * Resets global configuration for AI Gateway.
41
- * @deprecated Use `resetProviderConfig("ai-gateway")` instead.
155
+ *
156
+ * @deprecated Use {@link resetProviderConfig | resetProviderConfig("ai-gateway")} instead.
157
+ * This function will be removed in a future major version.
42
158
  */
43
159
  export declare function resetConfig(): void;
44
160
  export {};
@@ -1 +1 @@
1
- {"version":3,"file":"config.d.ts","sourceRoot":"","sources":["../src/config.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,eAAe,EAAE,yBAAyB,EAAE,MAAM,SAAS,CAAC;AAE1E,KAAK,iBAAiB,GAAG;IACvB,YAAY,EAAE,eAAe,CAAC;IAC9B,UAAU,EAAE,yBAAyB,CAAC;CACvC,CAAC;AAEF,KAAK,YAAY,GAAG,MAAM,iBAAiB,CAAC;AAS5C;;;;;;;;GAQG;AACH,wBAAgB,iBAAiB,CAAC,CAAC,SAAS,YAAY,EACtD,QAAQ,EAAE,CAAC,EACX,OAAO,EAAE,iBAAiB,CAAC,CAAC,CAAC,GAC5B,IAAI,CAEN;AAED;;GAEG;AACH,wBAAgB,iBAAiB,CAAC,CAAC,SAAS,YAAY,EACtD,QAAQ,EAAE,CAAC,GACV,QAAQ,CAAC,iBAAiB,CAAC,CAAC,CAAC,CAAC,GAAG,SAAS,CAE5C;AAED;;GAEG;AACH,wBAAgB,mBAAmB,CAAC,QAAQ,EAAE,YAAY,GAAG,IAAI,CAEhE;AAED;;GAEG;AACH,wBAAgB,eAAe,IAAI,IAAI,CAItC;AAMD;;;GAGG;AACH,wBAAgB,SAAS,CAAC,OAAO,EAAE,eAAe,GAAG,IAAI,CAExD;AAED;;;GAGG;AACH,wBAAgB,SAAS,IAAI,QAAQ,CAAC,eAAe,CAAC,CAErD;AAED;;;GAGG;AACH,wBAAgB,WAAW,IAAI,IAAI,CAElC"}
1
+ {"version":3,"file":"config.d.ts","sourceRoot":"","sources":["../src/config.ts"],"names":[],"mappings":"AAAA;;;;GAIG;AAEH;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;GA8BG;AAEH,OAAO,KAAK,EAAE,eAAe,EAAE,yBAAyB,EAAE,MAAM,SAAS,CAAC;AAE1E;;;;GAIG;AACH,KAAK,iBAAiB,GAAG;IACvB,YAAY,EAAE,eAAe,CAAC;IAC9B,UAAU,EAAE,yBAAyB,CAAC;CACvC,CAAC;AAEF;;;;GAIG;AACH,KAAK,YAAY,GAAG,MAAM,iBAAiB,CAAC;AAU5C;;;;;;;;;;;;;;;;;;;;;;;;;;;;GA4BG;AACH,wBAAgB,iBAAiB,CAAC,CAAC,SAAS,YAAY,EACtD,QAAQ,EAAE,CAAC,EACX,OAAO,EAAE,iBAAiB,CAAC,CAAC,CAAC,GAC5B,IAAI,CAEN;AAED;;;;;;;;;;;;;GAaG;AACH,wBAAgB,iBAAiB,CAAC,CAAC,SAAS,YAAY,EACtD,QAAQ,EAAE,CAAC,GACV,QAAQ,CAAC,iBAAiB,CAAC,CAAC,CAAC,CAAC,GAAG,SAAS,CAE5C;AAED;;;;;;;;;;;;GAYG;AACH,wBAAgB,mBAAmB,CAAC,QAAQ,EAAE,YAAY,GAAG,IAAI,CAEhE;AAED;;;;;;;;;;;;GAYG;AACH,wBAAgB,eAAe,IAAI,IAAI,CAItC;AAMD;;;;;;;;;;;;;;;;GAgBG;AACH,wBAAgB,SAAS,CAAC,OAAO,EAAE,eAAe,GAAG,IAAI,CAExD;AAED;;;;;;;GAOG;AACH,wBAAgB,SAAS,IAAI,QAAQ,CAAC,eAAe,CAAC,CAErD;AAED;;;;;GAKG;AACH,wBAAgB,WAAW,IAAI,IAAI,CAElC"}
@@ -1,22 +1,126 @@
1
+ /**
2
+ * @license
3
+ * Copyright 2025 PAI
4
+ * SPDX-License-Identifier: MIT
5
+ */
6
+ /**
7
+ * Constants and default values for adk-llm-bridge.
8
+ *
9
+ * This module contains all constant values including default URLs,
10
+ * timeouts, environment variable names, and model patterns.
11
+ *
12
+ * @module constants
13
+ */
14
+ /**
15
+ * Default base URL for the Vercel AI Gateway API.
16
+ *
17
+ * @constant
18
+ * @see {@link https://vercel.com/ai-gateway|Vercel AI Gateway}
19
+ */
1
20
  export declare const DEFAULT_BASE_URL = "https://ai-gateway.vercel.sh/v1";
21
+ /**
22
+ * Default request timeout in milliseconds.
23
+ *
24
+ * @constant
25
+ * @defaultValue 60000 (60 seconds)
26
+ */
2
27
  export declare const DEFAULT_TIMEOUT = 60000;
28
+ /**
29
+ * Default maximum number of retry attempts for failed requests.
30
+ *
31
+ * @constant
32
+ * @defaultValue 2
33
+ */
3
34
  export declare const DEFAULT_MAX_RETRIES = 2;
35
+ /**
36
+ * Model patterns for AI Gateway model validation.
37
+ *
38
+ * Matches any model identifier with the format "provider/model".
39
+ * AI Gateway validates actual model availability at runtime.
40
+ *
41
+ * @constant
42
+ * @example
43
+ * ```typescript
44
+ * MODEL_PATTERNS[0].test("anthropic/claude-sonnet-4"); // true
45
+ * MODEL_PATTERNS[0].test("invalid"); // false
46
+ * ```
47
+ */
4
48
  export declare const MODEL_PATTERNS: (string | RegExp)[];
49
+ /**
50
+ * Environment variable names for AI Gateway configuration.
51
+ *
52
+ * These environment variables are checked in order when resolving configuration.
53
+ *
54
+ * @constant
55
+ *
56
+ * @example
57
+ * ```bash
58
+ * # Set in your environment or .env file
59
+ * export AI_GATEWAY_API_KEY="your-api-key"
60
+ * export AI_GATEWAY_URL="https://custom-gateway.example.com/v1"
61
+ * ```
62
+ */
5
63
  export declare const ENV: {
64
+ /** Environment variable for AI Gateway URL override */
6
65
  readonly AI_GATEWAY_URL: "AI_GATEWAY_URL";
66
+ /** Environment variable for AI Gateway API key */
7
67
  readonly AI_GATEWAY_API_KEY: "AI_GATEWAY_API_KEY";
68
+ /** Fallback environment variable for base URL (OpenAI compatibility) */
8
69
  readonly OPENAI_BASE_URL: "OPENAI_BASE_URL";
70
+ /** Fallback environment variable for API key (OpenAI compatibility) */
9
71
  readonly OPENAI_API_KEY: "OPENAI_API_KEY";
10
72
  };
73
+ /**
74
+ * Default base URL for the OpenRouter API.
75
+ *
76
+ * @constant
77
+ * @see {@link https://openrouter.ai/docs|OpenRouter Documentation}
78
+ */
11
79
  export declare const OPENROUTER_BASE_URL = "https://openrouter.ai/api/v1";
80
+ /**
81
+ * Model patterns for OpenRouter model validation.
82
+ *
83
+ * Uses the same "provider/model" format as AI Gateway.
84
+ *
85
+ * @constant
86
+ * @example
87
+ * ```typescript
88
+ * OPENROUTER_MODEL_PATTERNS[0].test("anthropic/claude-sonnet-4"); // true
89
+ * ```
90
+ */
12
91
  export declare const OPENROUTER_MODEL_PATTERNS: (string | RegExp)[];
92
+ /**
93
+ * Environment variable names for OpenRouter configuration.
94
+ *
95
+ * @constant
96
+ *
97
+ * @example
98
+ * ```bash
99
+ * # Set in your environment or .env file
100
+ * export OPENROUTER_API_KEY="your-api-key"
101
+ * export OPENROUTER_SITE_URL="https://myapp.com"
102
+ * export OPENROUTER_APP_NAME="My Application"
103
+ * ```
104
+ */
13
105
  export declare const OPENROUTER_ENV: {
106
+ /** Environment variable for OpenRouter API key */
14
107
  readonly API_KEY: "OPENROUTER_API_KEY";
108
+ /** Environment variable for site URL (used for rankings) */
15
109
  readonly SITE_URL: "OPENROUTER_SITE_URL";
110
+ /** Environment variable for app name (used for rankings) */
16
111
  readonly APP_NAME: "OPENROUTER_APP_NAME";
17
112
  };
113
+ /**
114
+ * Unique identifiers for each provider.
115
+ *
116
+ * Used internally for configuration management and registry operations.
117
+ *
118
+ * @constant
119
+ */
18
120
  export declare const PROVIDER_IDS: {
121
+ /** Identifier for the AI Gateway provider */
19
122
  readonly AI_GATEWAY: "ai-gateway";
123
+ /** Identifier for the OpenRouter provider */
20
124
  readonly OPENROUTER: "openrouter";
21
125
  };
22
126
  //# sourceMappingURL=constants.d.ts.map
@@ -1 +1 @@
1
- {"version":3,"file":"constants.d.ts","sourceRoot":"","sources":["../src/constants.ts"],"names":[],"mappings":"AAIA,eAAO,MAAM,gBAAgB,oCAAoC,CAAC;AAClE,eAAO,MAAM,eAAe,QAAS,CAAC;AACtC,eAAO,MAAM,mBAAmB,IAAI,CAAC;AAIrC,eAAO,MAAM,cAAc,EAAE,CAAC,MAAM,GAAG,MAAM,CAAC,EAAiB,CAAC;AAEhE,eAAO,MAAM,GAAG;;;;;CAKN,CAAC;AAMX,eAAO,MAAM,mBAAmB,iCAAiC,CAAC;AAGlE,eAAO,MAAM,yBAAyB,EAAE,CAAC,MAAM,GAAG,MAAM,CAAC,EAAiB,CAAC;AAE3E,eAAO,MAAM,cAAc;;;;CAIjB,CAAC;AAMX,eAAO,MAAM,YAAY;;;CAGf,CAAC"}
1
+ {"version":3,"file":"constants.d.ts","sourceRoot":"","sources":["../src/constants.ts"],"names":[],"mappings":"AAAA;;;;GAIG;AAEH;;;;;;;GAOG;AAMH;;;;;GAKG;AACH,eAAO,MAAM,gBAAgB,oCAAoC,CAAC;AAElE;;;;;GAKG;AACH,eAAO,MAAM,eAAe,QAAS,CAAC;AAEtC;;;;;GAKG;AACH,eAAO,MAAM,mBAAmB,IAAI,CAAC;AAErC;;;;;;;;;;;;GAYG;AACH,eAAO,MAAM,cAAc,EAAE,CAAC,MAAM,GAAG,MAAM,CAAC,EAAiB,CAAC;AAEhE;;;;;;;;;;;;;GAaG;AACH,eAAO,MAAM,GAAG;IACd,uDAAuD;;IAGvD,kDAAkD;;IAGlD,wEAAwE;;IAGxE,uEAAuE;;CAE/D,CAAC;AAMX;;;;;GAKG;AACH,eAAO,MAAM,mBAAmB,iCAAiC,CAAC;AAElE;;;;;;;;;;GAUG;AACH,eAAO,MAAM,yBAAyB,EAAE,CAAC,MAAM,GAAG,MAAM,CAAC,EAAiB,CAAC;AAE3E;;;;;;;;;;;;GAYG;AACH,eAAO,MAAM,cAAc;IACzB,kDAAkD;;IAGlD,4DAA4D;;IAG5D,4DAA4D;;CAEpD,CAAC;AAMX;;;;;;GAMG;AACH,eAAO,MAAM,YAAY;IACvB,6CAA6C;;IAG7C,6CAA6C;;CAErC,CAAC"}
@@ -1,3 +1,32 @@
1
+ /**
2
+ * @license
3
+ * Copyright 2025 PAI
4
+ * SPDX-License-Identifier: MIT
5
+ */
6
+ /**
7
+ * Request and response converters for ADK ↔ OpenAI format.
8
+ *
9
+ * This module exports functions for converting between ADK's LlmRequest/LlmResponse
10
+ * format and OpenAI's chat completion API format.
11
+ *
12
+ * @module converters
13
+ *
14
+ * @example
15
+ * ```typescript
16
+ * import {
17
+ * convertRequest,
18
+ * convertResponse,
19
+ * convertStreamChunk,
20
+ * createStreamAccumulator
21
+ * } from "adk-llm-bridge";
22
+ *
23
+ * // Convert ADK request to OpenAI format
24
+ * const { messages, tools } = convertRequest(adkRequest);
25
+ *
26
+ * // Convert OpenAI response to ADK format
27
+ * const adkResponse = convertResponse(openaiResponse);
28
+ * ```
29
+ */
1
30
  export { convertRequest } from "./request";
2
31
  export { convertResponse, convertStreamChunk, createStreamAccumulator, } from "./response";
3
32
  //# sourceMappingURL=index.d.ts.map
@@ -1 +1 @@
1
- {"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../src/converters/index.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,cAAc,EAAE,MAAM,WAAW,CAAC;AAC3C,OAAO,EACL,eAAe,EACf,kBAAkB,EAClB,uBAAuB,GACxB,MAAM,YAAY,CAAC"}
1
+ {"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../src/converters/index.ts"],"names":[],"mappings":"AAAA;;;;GAIG;AAEH;;;;;;;;;;;;;;;;;;;;;;;GAuBG;AAEH,OAAO,EAAE,cAAc,EAAE,MAAM,WAAW,CAAC;AAC3C,OAAO,EACL,eAAe,EACf,kBAAkB,EAClB,uBAAuB,GACxB,MAAM,YAAY,CAAC"}
@@ -1,8 +1,87 @@
1
+ /**
2
+ * @license
3
+ * Copyright 2025 PAI
4
+ * SPDX-License-Identifier: MIT
5
+ */
6
+ /**
7
+ * Request converter for ADK to OpenAI format.
8
+ *
9
+ * This module handles the conversion of ADK LlmRequest objects to
10
+ * OpenAI-compatible chat completion request format.
11
+ *
12
+ * @module converters/request
13
+ */
1
14
  import type { LlmRequest } from "@google/adk";
2
15
  import type OpenAI from "openai";
16
+ /**
17
+ * Result of converting an ADK LlmRequest to OpenAI format.
18
+ *
19
+ * Contains the converted messages array and optional tools array
20
+ * ready for use with the OpenAI chat completions API.
21
+ */
3
22
  export interface ConvertedRequest {
23
+ /**
24
+ * Array of OpenAI-format chat messages.
25
+ *
26
+ * Includes system, user, assistant, and tool messages
27
+ * converted from ADK Content objects.
28
+ */
4
29
  messages: OpenAI.ChatCompletionMessageParam[];
30
+ /**
31
+ * Array of OpenAI-format tool definitions.
32
+ *
33
+ * Converted from ADK function declarations with schema normalization.
34
+ */
5
35
  tools?: OpenAI.ChatCompletionTool[];
6
36
  }
37
+ /**
38
+ * Converts an ADK LlmRequest to OpenAI chat completion format.
39
+ *
40
+ * This function handles:
41
+ * - System instruction extraction
42
+ * - User and model message conversion
43
+ * - Function call and response handling
44
+ * - Tool/function declaration conversion
45
+ * - Schema normalization (Gemini UPPERCASE to OpenAI lowercase types)
46
+ *
47
+ * @param llmRequest - The ADK LlmRequest to convert
48
+ * @returns The converted request with messages and optional tools
49
+ *
50
+ * @example
51
+ * ```typescript
52
+ * import { convertRequest } from "adk-llm-bridge";
53
+ *
54
+ * const adkRequest: LlmRequest = {
55
+ * contents: [{ role: "user", parts: [{ text: "Hello!" }] }],
56
+ * config: { systemInstruction: "You are a helpful assistant." }
57
+ * };
58
+ *
59
+ * const { messages, tools } = convertRequest(adkRequest);
60
+ * // messages = [
61
+ * // { role: "system", content: "You are a helpful assistant." },
62
+ * // { role: "user", content: "Hello!" }
63
+ * // ]
64
+ * ```
65
+ *
66
+ * @example
67
+ * ```typescript
68
+ * // With tools/functions
69
+ * const adkRequest: LlmRequest = {
70
+ * contents: [...],
71
+ * config: {
72
+ * tools: [{
73
+ * functionDeclarations: [{
74
+ * name: "get_weather",
75
+ * description: "Get current weather",
76
+ * parameters: { type: "OBJECT", properties: { city: { type: "STRING" } } }
77
+ * }]
78
+ * }]
79
+ * }
80
+ * };
81
+ *
82
+ * const { messages, tools } = convertRequest(adkRequest);
83
+ * // tools[0].function.parameters.type = "object" (normalized from "OBJECT")
84
+ * ```
85
+ */
7
86
  export declare function convertRequest(llmRequest: LlmRequest): ConvertedRequest;
8
87
  //# sourceMappingURL=request.d.ts.map
@@ -1 +1 @@
1
- {"version":3,"file":"request.d.ts","sourceRoot":"","sources":["../../src/converters/request.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,UAAU,EAAE,MAAM,aAAa,CAAC;AAE9C,OAAO,KAAK,MAAM,MAAM,QAAQ,CAAC;AAEjC,MAAM,WAAW,gBAAgB;IAC/B,QAAQ,EAAE,MAAM,CAAC,0BAA0B,EAAE,CAAC;IAC9C,KAAK,CAAC,EAAE,MAAM,CAAC,kBAAkB,EAAE,CAAC;CACrC;AAED,wBAAgB,cAAc,CAAC,UAAU,EAAE,UAAU,GAAG,gBAAgB,CAavE"}
1
+ {"version":3,"file":"request.d.ts","sourceRoot":"","sources":["../../src/converters/request.ts"],"names":[],"mappings":"AAAA;;;;GAIG;AAEH;;;;;;;GAOG;AAEH,OAAO,KAAK,EAAE,UAAU,EAAE,MAAM,aAAa,CAAC;AAE9C,OAAO,KAAK,MAAM,MAAM,QAAQ,CAAC;AAEjC;;;;;GAKG;AACH,MAAM,WAAW,gBAAgB;IAC/B;;;;;OAKG;IACH,QAAQ,EAAE,MAAM,CAAC,0BAA0B,EAAE,CAAC;IAE9C;;;;OAIG;IACH,KAAK,CAAC,EAAE,MAAM,CAAC,kBAAkB,EAAE,CAAC;CACrC;AAED;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;GAgDG;AACH,wBAAgB,cAAc,CAAC,UAAU,EAAE,UAAU,GAAG,gBAAgB,CAavE"}
@@ -1,7 +1,103 @@
1
+ /**
2
+ * @license
3
+ * Copyright 2025 PAI
4
+ * SPDX-License-Identifier: MIT
5
+ */
6
+ /**
7
+ * Response converter for OpenAI to ADK format.
8
+ *
9
+ * This module handles the conversion of OpenAI API responses to
10
+ * ADK LlmResponse format, supporting both single responses and streaming.
11
+ *
12
+ * @module converters/response
13
+ */
1
14
  import type { LlmResponse } from "@google/adk";
2
15
  import type OpenAI from "openai";
3
16
  import type { StreamAccumulator, StreamChunkResult } from "../types";
17
+ /**
18
+ * Converts an OpenAI chat completion response to ADK LlmResponse format.
19
+ *
20
+ * Handles:
21
+ * - Text content extraction
22
+ * - Tool/function call conversion
23
+ * - Usage metadata mapping
24
+ *
25
+ * @param response - The OpenAI ChatCompletion response
26
+ * @returns The converted ADK LlmResponse
27
+ *
28
+ * @example
29
+ * ```typescript
30
+ * import { convertResponse } from "adk-llm-bridge";
31
+ *
32
+ * const openaiResponse = await client.chat.completions.create({...});
33
+ * const adkResponse = convertResponse(openaiResponse);
34
+ *
35
+ * if (adkResponse.content?.parts) {
36
+ * for (const part of adkResponse.content.parts) {
37
+ * if (part.text) console.log(part.text);
38
+ * if (part.functionCall) console.log("Tool call:", part.functionCall.name);
39
+ * }
40
+ * }
41
+ * ```
42
+ */
4
43
  export declare function convertResponse(response: OpenAI.ChatCompletion): LlmResponse;
44
+ /**
45
+ * Processes a streaming chunk and returns the appropriate response.
46
+ *
47
+ * This function accumulates partial data (text and tool calls) across
48
+ * multiple chunks and returns:
49
+ * - Partial responses for text content (streamed immediately)
50
+ * - Complete responses when finish_reason is received
51
+ *
52
+ * Tool calls are accumulated and only returned in the final response
53
+ * because their arguments arrive in fragments across multiple chunks.
54
+ *
55
+ * @param chunk - The OpenAI streaming chunk
56
+ * @param acc - The stream accumulator for tracking partial data
57
+ * @returns Object containing optional response and completion status
58
+ *
59
+ * @example
60
+ * ```typescript
61
+ * import { createStreamAccumulator, convertStreamChunk } from "adk-llm-bridge";
62
+ *
63
+ * const accumulator = createStreamAccumulator();
64
+ *
65
+ * for await (const chunk of stream) {
66
+ * const { response, isComplete } = convertStreamChunk(chunk, accumulator);
67
+ *
68
+ * if (response?.content?.parts?.[0]?.text) {
69
+ * // Stream text to user immediately
70
+ * process.stdout.write(response.content.parts[0].text);
71
+ * }
72
+ *
73
+ * if (isComplete) {
74
+ * // Final response with complete tool calls
75
+ * return response;
76
+ * }
77
+ * }
78
+ * ```
79
+ */
5
80
  export declare function convertStreamChunk(chunk: OpenAI.ChatCompletionChunk, acc: StreamAccumulator): StreamChunkResult;
81
+ /**
82
+ * Creates a new stream accumulator for tracking partial responses.
83
+ *
84
+ * The accumulator stores:
85
+ * - Accumulated text content
86
+ * - Partial tool call data (indexed by position)
87
+ *
88
+ * Use with {@link convertStreamChunk} to process streaming responses.
89
+ *
90
+ * @returns A fresh StreamAccumulator instance
91
+ *
92
+ * @example
93
+ * ```typescript
94
+ * const accumulator = createStreamAccumulator();
95
+ *
96
+ * for await (const chunk of stream) {
97
+ * const result = convertStreamChunk(chunk, accumulator);
98
+ * // accumulator state is updated automatically
99
+ * }
100
+ * ```
101
+ */
6
102
  export declare function createStreamAccumulator(): StreamAccumulator;
7
103
  //# sourceMappingURL=response.d.ts.map