@auto-engineer/ai-gateway 0.6.3 → 0.8.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.turbo/turbo-build.log +1 -1
- package/.turbo/turbo-test.log +17 -35
- package/.turbo/turbo-type-check.log +4 -5
- package/CHANGELOG.md +6 -0
- package/README.md +365 -0
- package/dist/config.d.ts +2 -0
- package/dist/config.d.ts.map +1 -1
- package/dist/config.js +31 -2
- package/dist/config.js.map +1 -1
- package/dist/config.specs.d.ts +2 -0
- package/dist/config.specs.d.ts.map +1 -0
- package/dist/config.specs.js +123 -0
- package/dist/config.specs.js.map +1 -0
- package/dist/constants.d.ts +20 -0
- package/dist/constants.d.ts.map +1 -0
- package/dist/constants.js +15 -0
- package/dist/constants.js.map +1 -0
- package/dist/index-custom.specs.d.ts +2 -0
- package/dist/index-custom.specs.d.ts.map +1 -0
- package/dist/index-custom.specs.js +161 -0
- package/dist/index-custom.specs.js.map +1 -0
- package/dist/index.d.ts +12 -13
- package/dist/index.d.ts.map +1 -1
- package/dist/index.js +93 -59
- package/dist/index.js.map +1 -1
- package/dist/index.specs.js +152 -11
- package/dist/index.specs.js.map +1 -1
- package/dist/providers/custom.d.ts +6 -0
- package/dist/providers/custom.d.ts.map +1 -0
- package/dist/providers/custom.js +16 -0
- package/dist/providers/custom.js.map +1 -0
- package/dist/providers/custom.specs.d.ts +2 -0
- package/dist/providers/custom.specs.d.ts.map +1 -0
- package/dist/providers/custom.specs.js +129 -0
- package/dist/providers/custom.specs.js.map +1 -0
- package/package.json +5 -5
- package/src/config.specs.ts +147 -0
- package/src/config.ts +46 -2
- package/src/constants.ts +21 -0
- package/src/index-custom.specs.ts +192 -0
- package/src/index.specs.ts +199 -10
- package/src/index.ts +99 -78
- package/src/providers/custom.specs.ts +161 -0
- package/src/providers/custom.ts +24 -0
- package/tsconfig.tsbuildinfo +1 -1
- package/.turbo/turbo-format.log +0 -14
- package/.turbo/turbo-lint.log +0 -5
package/.turbo/turbo-build.log
CHANGED
|
@@ -1,5 +1,5 @@
|
|
|
1
1
|
|
|
2
|
-
> @auto-engineer/ai-gateway@0.
|
|
2
|
+
> @auto-engineer/ai-gateway@0.8.1 build /home/runner/work/auto-engineer/auto-engineer/packages/ai-gateway
|
|
3
3
|
> tsc && tsx ../../scripts/fix-esm-imports.ts
|
|
4
4
|
|
|
5
5
|
Fixed ESM imports in dist/
|
package/.turbo/turbo-test.log
CHANGED
|
@@ -1,35 +1,17 @@
|
|
|
1
|
-
|
|
2
|
-
|
|
3
|
-
>
|
|
4
|
-
|
|
5
|
-
|
|
6
|
-
[
|
|
7
|
-
|
|
8
|
-
|
|
9
|
-
[
|
|
10
|
-
|
|
11
|
-
|
|
12
|
-
|
|
13
|
-
[2m
|
|
14
|
-
[2m
|
|
15
|
-
[2m
|
|
16
|
-
[
|
|
17
|
-
|
|
18
|
-
|
|
19
|
-
[2m Test Files [22m[1m[32m0 passed[39m[22m[90m (1)[39m
|
|
20
|
-
[2m Tests [22m[1m[32m0 passed[39m[22m[90m (2)[39m
|
|
21
|
-
[2m Start at [22m23:05:11
|
|
22
|
-
[2m Duration [22m328ms
|
|
23
|
-
[?2026l[K[1A[K[1A[K[1A[K[1A[K[1A[K[1A[K[1A[K [32m✓[39m src/index.specs.ts [2m([22m[2m2 tests[22m[2m)[22m[32m 1[2mms[22m[39m
|
|
24
|
-
[32m✓[39m AI Integration[2m > [22mshould export the correct types[32m 1[2mms[22m[39m
|
|
25
|
-
[32m✓[39m AI Integration[2m > [22mshould have valid AIProvider type[32m 0[2mms[22m[39m
|
|
26
|
-
[90mstdout[2m | cleanup (/Users/sam/WebstormProjects/top/auto-engineer/packages/ai-gateway/src/mcp-server.ts:58:11)
|
|
27
|
-
[22m[39mCleaning up...
|
|
28
|
-
|
|
29
|
-
|
|
30
|
-
[2m Test Files [22m [1m[32m1 passed[39m[22m[90m (1)[39m
|
|
31
|
-
[2m Tests [22m [1m[32m2 passed[39m[22m[90m (2)[39m
|
|
32
|
-
[2m Start at [22m 23:05:11
|
|
33
|
-
[2m Duration [22m 407ms[2m (transform 50ms, setup 0ms, collect 172ms, tests 1ms, environment 0ms, prepare 38ms)[22m
|
|
34
|
-
|
|
35
|
-
[?25h
|
|
1
|
+
|
|
2
|
+
> @auto-engineer/ai-gateway@0.8.1 test /home/runner/work/auto-engineer/auto-engineer/packages/ai-gateway
|
|
3
|
+
> vitest run --reporter=dot
|
|
4
|
+
|
|
5
|
+
|
|
6
|
+
[1m[46m RUN [49m[22m [36mv3.2.4 [39m[90m/home/runner/work/auto-engineer/auto-engineer/packages/ai-gateway[39m
|
|
7
|
+
|
|
8
|
+
[33m[39m[32m·[39m[33m[39m[32m·[39m[33m[39m[32m·[39m[33m[39m[32m·[39m[33m[39m[32m·[39m[33m[39m[32m·[39m[33m[39m[32m·[39m[33m[39m[32m·[39m[33m[39m[32m·[39m[33m[39m[32m·[39m[33m[39m[32m·[39m[33m[39m[32m·[39m[33m[39m[32m·[39m[33m[39m[32m·[39m[33m[39m[32m·[39m[33m[39m[32m·[39m[33m[39m[32m·[39m[33m[39m[32m·[39m[33m[39m[32m·[39m[33m[39m[32m·[39m[33m[39m[32m·[39m[33m[39m[32m·[39m[33m[39m[32m·[39m[33m[39m[32m·[39m[90mstdout[2m | cleanup (/home/runner/work/auto-engineer/auto-engineer/packages/ai-gateway/src/mcp-server.ts:58:11)
|
|
9
|
+
[22m[39mCleaning up...
|
|
10
|
+
|
|
11
|
+
[33m[39m[32m·[39m[33m[39m[32m·[39m[33m[39m[32m·[39m[33m[39m[32m·[39m[33m[39m[32m·[39m[33m[39m[32m·[39m[33m[39m[32m·[39m[33m[39m[32m·[39m[33m[39m[32m·[39m[33m[39m[32m·[39m[33m[39m[32m·[39m[33m[39m[32m·[39m[33m[39m[32m·[39m[33m[39m[32m·[39m[33m[39m[32m·[39m[33m[39m[32m·[39m[33m[39m[32m·[39m[33m[39m[32m·[39m[33m[39m[32m·[39m
|
|
12
|
+
|
|
13
|
+
[2m Test Files [22m [1m[32m4 passed[39m[22m[90m (4)[39m
|
|
14
|
+
[2m Tests [22m [1m[32m43 passed[39m[22m[90m (43)[39m
|
|
15
|
+
[2m Start at [22m 00:27:31
|
|
16
|
+
[2m Duration [22m 3.20s[2m (transform 1.35s, setup 0ms, collect 3.94s, tests 202ms, environment 1ms, prepare 1.78s)[22m
|
|
17
|
+
|
|
@@ -1,5 +1,4 @@
|
|
|
1
|
-
|
|
2
|
-
|
|
3
|
-
>
|
|
4
|
-
|
|
5
|
-
|
|
1
|
+
|
|
2
|
+
> @auto-engineer/ai-gateway@0.8.1 type-check /home/runner/work/auto-engineer/auto-engineer/packages/ai-gateway
|
|
3
|
+
> tsc --noEmit
|
|
4
|
+
|
package/CHANGELOG.md
CHANGED
package/README.md
ADDED
|
@@ -0,0 +1,365 @@
|
|
|
1
|
+
# @auto-engineer/ai-gateway
|
|
2
|
+
|
|
3
|
+
AI Gateway plugin for the Auto Engineer CLI that provides a unified interface for interacting with multiple AI providers and managing AI-driven workflows. This plugin enables seamless integration with various AI models for text generation, structured data generation, and tool execution in event-driven architectures.
|
|
4
|
+
|
|
5
|
+
## Installation
|
|
6
|
+
|
|
7
|
+
This is a plugin for the Auto Engineer CLI. Install both the CLI and this plugin:
|
|
8
|
+
|
|
9
|
+
```bash
|
|
10
|
+
npm install -g @auto-engineer/cli
|
|
11
|
+
npm install @auto-engineer/ai-gateway
|
|
12
|
+
```
|
|
13
|
+
|
|
14
|
+
## Configuration
|
|
15
|
+
|
|
16
|
+
Add this plugin to your `auto.config.ts`:
|
|
17
|
+
|
|
18
|
+
```typescript
|
|
19
|
+
export default {
|
|
20
|
+
plugins: [
|
|
21
|
+
'@auto-engineer/ai-gateway',
|
|
22
|
+
// ... other plugins
|
|
23
|
+
],
|
|
24
|
+
};
|
|
25
|
+
```
|
|
26
|
+
|
|
27
|
+
### Environment Variables
|
|
28
|
+
|
|
29
|
+
Configure AI providers by setting environment variables in a `.env` file or your environment:
|
|
30
|
+
|
|
31
|
+
```bash
|
|
32
|
+
# At least one of these is required
|
|
33
|
+
OPENAI_API_KEY=your-openai-key
|
|
34
|
+
ANTHROPIC_API_KEY=your-anthropic-key
|
|
35
|
+
GEMINI_API_KEY=your-google-key
|
|
36
|
+
XAI_API_KEY=your-xai-key
|
|
37
|
+
|
|
38
|
+
# Custom Provider Configuration (optional)
|
|
39
|
+
# Use this to connect to any OpenAI-compatible API endpoint
|
|
40
|
+
CUSTOM_PROVIDER_NAME=litellm
|
|
41
|
+
CUSTOM_PROVIDER_BASE_URL=https://api.litellm.ai
|
|
42
|
+
CUSTOM_PROVIDER_API_KEY=your-custom-api-key
|
|
43
|
+
CUSTOM_PROVIDER_DEFAULT_MODEL=claude-3-sonnet
|
|
44
|
+
|
|
45
|
+
# Optional: Set default provider and model
|
|
46
|
+
DEFAULT_AI_PROVIDER=openai
|
|
47
|
+
DEFAULT_AI_MODEL=gpt-4o-mini
|
|
48
|
+
```
|
|
49
|
+
|
|
50
|
+
## Commands
|
|
51
|
+
|
|
52
|
+
This plugin provides the following commands:
|
|
53
|
+
|
|
54
|
+
- `ai:generate-text` - Generate text using AI models
|
|
55
|
+
- `ai:stream-text` - Stream text output from AI models
|
|
56
|
+
- `ai:generate-structured` - Generate structured data with schema validation
|
|
57
|
+
- `ai:stream-structured` - Stream structured data with schema validation
|
|
58
|
+
|
|
59
|
+
## What does this plugin do?
|
|
60
|
+
|
|
61
|
+
The AI Gateway plugin provides a unified interface for interacting with multiple AI providers (OpenAI, Anthropic, Google, XAI, and Custom providers) and integrates with the Auto Engineer ecosystem for AI-driven code generation and tool execution. It supports text generation, structured data generation, and streaming capabilities with robust error handling and debugging.
|
|
62
|
+
|
|
63
|
+
The plugin now includes custom provider support, enabling integration with any OpenAI-compatible API endpoint, including LiteLLM proxies, local AI models, and corporate AI services.
|
|
64
|
+
|
|
65
|
+
## Key Features
|
|
66
|
+
|
|
67
|
+
### Multi-Provider Support
|
|
68
|
+
|
|
69
|
+
- Supports OpenAI, Anthropic, Google, XAI, and Custom providers
|
|
70
|
+
- Automatic provider selection based on environment configuration
|
|
71
|
+
- Fallback to available providers if default is not configured
|
|
72
|
+
- Configurable default models per provider
|
|
73
|
+
|
|
74
|
+
### Custom Provider Support
|
|
75
|
+
|
|
76
|
+
The AI Gateway now supports custom providers, allowing you to connect to any OpenAI-compatible API endpoint. This is particularly useful for:
|
|
77
|
+
|
|
78
|
+
- **LiteLLM Proxy**: Access 100+ AI models through a single interface
|
|
79
|
+
- **Local AI models**: Connect to locally hosted models (Ollama, local OpenAI servers)
|
|
80
|
+
- **Corporate AI endpoints**: Use company-hosted AI services
|
|
81
|
+
- **Custom AI proxies**: Route through custom authentication or processing layers
|
|
82
|
+
|
|
83
|
+
#### Configuration
|
|
84
|
+
|
|
85
|
+
To configure a custom provider, set all four environment variables:
|
|
86
|
+
|
|
87
|
+
```bash
|
|
88
|
+
CUSTOM_PROVIDER_NAME=your-provider-name
|
|
89
|
+
CUSTOM_PROVIDER_BASE_URL=https://your-api-endpoint.com
|
|
90
|
+
CUSTOM_PROVIDER_API_KEY=your-api-key
|
|
91
|
+
CUSTOM_PROVIDER_DEFAULT_MODEL=your-default-model
|
|
92
|
+
```
|
|
93
|
+
|
|
94
|
+
#### Common Use Cases
|
|
95
|
+
|
|
96
|
+
**LiteLLM Proxy:**
|
|
97
|
+
|
|
98
|
+
```bash
|
|
99
|
+
CUSTOM_PROVIDER_NAME=litellm
|
|
100
|
+
CUSTOM_PROVIDER_BASE_URL=https://api.litellm.ai
|
|
101
|
+
CUSTOM_PROVIDER_API_KEY=sk-litellm-your-key
|
|
102
|
+
CUSTOM_PROVIDER_DEFAULT_MODEL=claude-3-sonnet
|
|
103
|
+
```
|
|
104
|
+
|
|
105
|
+
**Local Ollama:**
|
|
106
|
+
|
|
107
|
+
```bash
|
|
108
|
+
CUSTOM_PROVIDER_NAME=ollama
|
|
109
|
+
CUSTOM_PROVIDER_BASE_URL=http://localhost:11434/v1
|
|
110
|
+
CUSTOM_PROVIDER_API_KEY=ollama
|
|
111
|
+
CUSTOM_PROVIDER_DEFAULT_MODEL=llama3.1:8b
|
|
112
|
+
```
|
|
113
|
+
|
|
114
|
+
**Azure OpenAI:**
|
|
115
|
+
|
|
116
|
+
```bash
|
|
117
|
+
CUSTOM_PROVIDER_NAME=azure
|
|
118
|
+
CUSTOM_PROVIDER_BASE_URL=https://your-resource.openai.azure.com/openai/deployments
|
|
119
|
+
CUSTOM_PROVIDER_API_KEY=your-azure-key
|
|
120
|
+
CUSTOM_PROVIDER_DEFAULT_MODEL=gpt-4
|
|
121
|
+
```
|
|
122
|
+
|
|
123
|
+
The custom provider leverages the same robust OpenAI-compatible interface, ensuring full compatibility with all AI Gateway features including streaming, structured generation, and tool integration.
|
|
124
|
+
|
|
125
|
+
### Text Generation
|
|
126
|
+
|
|
127
|
+
- Generate text with customizable parameters (temperature, max tokens)
|
|
128
|
+
- Supports both synchronous and streaming text generation
|
|
129
|
+
- Integrates with registered tools for enhanced functionality
|
|
130
|
+
- Image-based text generation for supported providers (OpenAI, XAI)
|
|
131
|
+
|
|
132
|
+
### Structured Data Generation
|
|
133
|
+
|
|
134
|
+
- Generates structured data with Zod schema validation
|
|
135
|
+
- Retry logic for schema validation failures
|
|
136
|
+
- Enhanced error prompts for iterative refinement
|
|
137
|
+
- Streaming support for partial object updates
|
|
138
|
+
|
|
139
|
+
### Tool Integration
|
|
140
|
+
|
|
141
|
+
- Registers and executes custom tools via the Model Context Protocol (MCP) server
|
|
142
|
+
- Supports batch tool registration
|
|
143
|
+
- Validates tool inputs with Zod schemas
|
|
144
|
+
- Integrates tools with AI-driven workflows
|
|
145
|
+
|
|
146
|
+
### Debugging Support
|
|
147
|
+
|
|
148
|
+
Comprehensive debug logging with namespaces:
|
|
149
|
+
|
|
150
|
+
- `ai-gateway`: General operations
|
|
151
|
+
- `ai-gateway:call`: AI call operations
|
|
152
|
+
- `ai-gateway:provider`: Provider selection and initialization
|
|
153
|
+
- `ai-gateway:error`: Error handling
|
|
154
|
+
- `ai-gateway:stream`: Streaming operations
|
|
155
|
+
- `ai-gateway:result`: Result processing
|
|
156
|
+
|
|
157
|
+
Enable debugging:
|
|
158
|
+
|
|
159
|
+
```bash
|
|
160
|
+
DEBUG=ai-gateway:* npm run dev
|
|
161
|
+
```
|
|
162
|
+
|
|
163
|
+
See [DEBUG.md](./DEBUG.md) for detailed debugging instructions.
|
|
164
|
+
|
|
165
|
+
## Usage
|
|
166
|
+
|
|
167
|
+
### Generating Text
|
|
168
|
+
|
|
169
|
+
```typescript
|
|
170
|
+
import { generateTextWithAI } from '@auto-engineer/ai-gateway';
|
|
171
|
+
|
|
172
|
+
const result = await generateTextWithAI('Write a poem about the stars', {
|
|
173
|
+
provider: 'openai',
|
|
174
|
+
model: 'gpt-4o-mini',
|
|
175
|
+
temperature: 0.7,
|
|
176
|
+
maxTokens: 500,
|
|
177
|
+
});
|
|
178
|
+
|
|
179
|
+
console.log(result);
|
|
180
|
+
```
|
|
181
|
+
|
|
182
|
+
### Streaming Text
|
|
183
|
+
|
|
184
|
+
```typescript
|
|
185
|
+
import { generateTextStreamingWithAI } from '@auto-engineer/ai-gateway';
|
|
186
|
+
|
|
187
|
+
const result = await generateTextStreamingWithAI('Explain quantum computing', {
|
|
188
|
+
provider: 'anthropic',
|
|
189
|
+
model: 'claude-sonnet-4-20250514',
|
|
190
|
+
streamCallback: (token) => process.stdout.write(token),
|
|
191
|
+
});
|
|
192
|
+
|
|
193
|
+
console.log(result); // Full collected result
|
|
194
|
+
```
|
|
195
|
+
|
|
196
|
+
### Generating Structured Data
|
|
197
|
+
|
|
198
|
+
```typescript
|
|
199
|
+
import { generateStructuredDataWithAI, z } from '@auto-engineer/ai-gateway';
|
|
200
|
+
|
|
201
|
+
const schema = z.object({
|
|
202
|
+
title: z.string(),
|
|
203
|
+
description: z.string(),
|
|
204
|
+
completed: z.boolean(),
|
|
205
|
+
});
|
|
206
|
+
|
|
207
|
+
const result = await generateStructuredDataWithAI('Generate a todo item', {
|
|
208
|
+
provider: 'xai',
|
|
209
|
+
schema,
|
|
210
|
+
schemaName: 'TodoItem',
|
|
211
|
+
schemaDescription: 'A todo item with title, description, and completion status',
|
|
212
|
+
});
|
|
213
|
+
|
|
214
|
+
console.log(result); // { title: string, description: string, completed: boolean }
|
|
215
|
+
```
|
|
216
|
+
|
|
217
|
+
### Tool Registration and Execution
|
|
218
|
+
|
|
219
|
+
```typescript
|
|
220
|
+
import { registerTool, startServer } from '@auto-engineer/ai-gateway';
|
|
221
|
+
import { z } from 'zod';
|
|
222
|
+
|
|
223
|
+
registerTool(
|
|
224
|
+
'greet',
|
|
225
|
+
{
|
|
226
|
+
title: 'Greeting Tool',
|
|
227
|
+
description: 'Greets users in different languages',
|
|
228
|
+
inputSchema: {
|
|
229
|
+
name: z.string().min(1, 'Name is required'),
|
|
230
|
+
language: z.enum(['en', 'es', 'fr', 'de']).optional().default('en'),
|
|
231
|
+
},
|
|
232
|
+
},
|
|
233
|
+
async ({ name, language = 'en' }) => {
|
|
234
|
+
const greetings = {
|
|
235
|
+
en: `Hello, ${name}!`,
|
|
236
|
+
es: `¡Hola, ${name}!`,
|
|
237
|
+
fr: `Bonjour, ${name}!`,
|
|
238
|
+
de: `Hallo, ${name}!`,
|
|
239
|
+
};
|
|
240
|
+
return { content: [{ type: 'text', text: greetings[language] }] };
|
|
241
|
+
},
|
|
242
|
+
);
|
|
243
|
+
|
|
244
|
+
await startServer();
|
|
245
|
+
```
|
|
246
|
+
|
|
247
|
+
## Configuration Options
|
|
248
|
+
|
|
249
|
+
Customize behavior through `auto.config.ts`:
|
|
250
|
+
|
|
251
|
+
```typescript
|
|
252
|
+
export default {
|
|
253
|
+
plugins: [
|
|
254
|
+
[
|
|
255
|
+
'@auto-engineer/ai-gateway',
|
|
256
|
+
{
|
|
257
|
+
// Default AI provider
|
|
258
|
+
defaultProvider: 'openai',
|
|
259
|
+
|
|
260
|
+
// Default model per provider
|
|
261
|
+
defaultModels: {
|
|
262
|
+
openai: 'gpt-4o-mini',
|
|
263
|
+
anthropic: 'claude-sonnet-4-20250514',
|
|
264
|
+
google: 'gemini-2.5-pro',
|
|
265
|
+
xai: 'grok-4',
|
|
266
|
+
},
|
|
267
|
+
|
|
268
|
+
// Generation parameters
|
|
269
|
+
temperature: 0.7,
|
|
270
|
+
maxTokens: 1000,
|
|
271
|
+
|
|
272
|
+
// Tool integration
|
|
273
|
+
includeToolsByDefault: true,
|
|
274
|
+
},
|
|
275
|
+
],
|
|
276
|
+
],
|
|
277
|
+
};
|
|
278
|
+
```
|
|
279
|
+
|
|
280
|
+
## Integration with Auto Engineer Ecosystem
|
|
281
|
+
|
|
282
|
+
Works with other Auto Engineer plugins:
|
|
283
|
+
|
|
284
|
+
- **@auto-engineer/server-implementer**: Uses AI Gateway for AI-driven server code implementation
|
|
285
|
+
- **@auto-engineer/frontend-implementer**: Powers AI-driven frontend code generation
|
|
286
|
+
- **@auto-engineer/flow**: Integrates with Flow specifications for AI-driven workflows
|
|
287
|
+
- **@auto-engineer/server-generator-apollo-emmett**: Enhances server generation with AI capabilities
|
|
288
|
+
- **@auto-engineer/frontend-generator-react-graphql**: Supports AI-driven frontend scaffolding
|
|
289
|
+
|
|
290
|
+
## Project Structure
|
|
291
|
+
|
|
292
|
+
```
|
|
293
|
+
ai-gateway/
|
|
294
|
+
├── src/
|
|
295
|
+
│ ├── config.ts # AI provider configuration
|
|
296
|
+
│ ├── index.ts # Main API and provider logic
|
|
297
|
+
│ ├── mcp-server.ts # Model Context Protocol server for tool management
|
|
298
|
+
│ └── example-use.ts # Example tool implementations
|
|
299
|
+
├── DEBUG.md # Debugging instructions
|
|
300
|
+
├── CHANGELOG.md # Version history
|
|
301
|
+
├── package.json
|
|
302
|
+
└── tsconfig.json
|
|
303
|
+
```
|
|
304
|
+
|
|
305
|
+
## Quality Assurance
|
|
306
|
+
|
|
307
|
+
- **Type Safety**: Full TypeScript support with Zod schema validation
|
|
308
|
+
- **Error Handling**: Comprehensive error detection and logging
|
|
309
|
+
- **Testing**: Unit tests with Vitest for core functionality
|
|
310
|
+
- **Linting**: ESLint and Prettier for code quality
|
|
311
|
+
- **Debugging**: Detailed logging with `debug` library
|
|
312
|
+
|
|
313
|
+
## Advanced Features
|
|
314
|
+
|
|
315
|
+
### Retry Logic
|
|
316
|
+
|
|
317
|
+
- Automatic retries for schema validation failures
|
|
318
|
+
- Enhanced error prompts for better AI responses
|
|
319
|
+
- Configurable retry limits
|
|
320
|
+
|
|
321
|
+
### Streaming Support
|
|
322
|
+
|
|
323
|
+
- Real-time text streaming with callbacks
|
|
324
|
+
- Partial object streaming for structured data
|
|
325
|
+
- Efficient chunk handling for large responses
|
|
326
|
+
|
|
327
|
+
### Provider Flexibility
|
|
328
|
+
|
|
329
|
+
- Dynamic provider selection based on availability
|
|
330
|
+
- Environment-based configuration
|
|
331
|
+
- Support for provider-specific error handling
|
|
332
|
+
|
|
333
|
+
## Getting Started
|
|
334
|
+
|
|
335
|
+
1. Install the plugin (see Installation above)
|
|
336
|
+
2. Configure environment variables for your AI providers
|
|
337
|
+
3. Add the plugin to `auto.config.ts`
|
|
338
|
+
4. Use the provided commands or import functions for AI operations
|
|
339
|
+
|
|
340
|
+
Example workflow:
|
|
341
|
+
|
|
342
|
+
```bash
|
|
343
|
+
# Install dependencies
|
|
344
|
+
npm install @auto-engineer/ai-gateway
|
|
345
|
+
|
|
346
|
+
# Generate text
|
|
347
|
+
auto ai:generate-text --prompt="Write a story" --provider=openai
|
|
348
|
+
|
|
349
|
+
# Generate structured data
|
|
350
|
+
auto ai:generate-structured --prompt="Create a user profile" --schema=userSchema.json
|
|
351
|
+
```
|
|
352
|
+
|
|
353
|
+
## Debugging
|
|
354
|
+
|
|
355
|
+
Enable detailed logging for troubleshooting:
|
|
356
|
+
|
|
357
|
+
```bash
|
|
358
|
+
DEBUG=ai-gateway:* npm run dev
|
|
359
|
+
```
|
|
360
|
+
|
|
361
|
+
See [DEBUG.md](./DEBUG.md) for more details.
|
|
362
|
+
|
|
363
|
+
## Changelog
|
|
364
|
+
|
|
365
|
+
See [CHANGELOG.md](./CHANGELOG.md) for version history and updates.
|
package/dist/config.d.ts
CHANGED
|
@@ -1,3 +1,4 @@
|
|
|
1
|
+
import { CustomProviderConfig } from './constants';
|
|
1
2
|
export interface AIConfig {
|
|
2
3
|
openai?: {
|
|
3
4
|
apiKey: string;
|
|
@@ -11,6 +12,7 @@ export interface AIConfig {
|
|
|
11
12
|
xai?: {
|
|
12
13
|
apiKey: string;
|
|
13
14
|
};
|
|
15
|
+
custom?: CustomProviderConfig;
|
|
14
16
|
}
|
|
15
17
|
export declare function configureAIProvider(): AIConfig;
|
|
16
18
|
//# sourceMappingURL=config.d.ts.map
|
package/dist/config.d.ts.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"config.d.ts","sourceRoot":"","sources":["../src/config.ts"],"names":[],"mappings":"
|
|
1
|
+
{"version":3,"file":"config.d.ts","sourceRoot":"","sources":["../src/config.ts"],"names":[],"mappings":"AAIA,OAAO,EAAE,oBAAoB,EAAE,MAAM,aAAa,CAAC;AAWnD,MAAM,WAAW,QAAQ;IACvB,MAAM,CAAC,EAAE;QACP,MAAM,EAAE,MAAM,CAAC;KAChB,CAAC;IACF,SAAS,CAAC,EAAE;QACV,MAAM,EAAE,MAAM,CAAC;KAChB,CAAC;IACF,MAAM,CAAC,EAAE;QACP,MAAM,EAAE,MAAM,CAAC;KAChB,CAAC;IACF,GAAG,CAAC,EAAE;QACJ,MAAM,EAAE,MAAM,CAAC;KAChB,CAAC;IACF,MAAM,CAAC,EAAE,oBAAoB,CAAC;CAC/B;AAgDD,wBAAgB,mBAAmB,IAAI,QAAQ,CA0C9C"}
|
package/dist/config.js
CHANGED
|
@@ -15,6 +15,29 @@ function logProviderConfig(providerName, apiKey) {
|
|
|
15
15
|
debug('%s provider configured with API key ending in: ...%s', providerName, apiKey.slice(-4));
|
|
16
16
|
}
|
|
17
17
|
}
|
|
18
|
+
// Helper to build custom provider config
|
|
19
|
+
function buildCustomProviderConfig() {
|
|
20
|
+
const name = process.env.CUSTOM_PROVIDER_NAME;
|
|
21
|
+
const baseUrl = process.env.CUSTOM_PROVIDER_BASE_URL;
|
|
22
|
+
const apiKey = process.env.CUSTOM_PROVIDER_API_KEY;
|
|
23
|
+
const defaultModel = process.env.CUSTOM_PROVIDER_DEFAULT_MODEL;
|
|
24
|
+
if (name != null &&
|
|
25
|
+
name.length > 0 &&
|
|
26
|
+
baseUrl != null &&
|
|
27
|
+
baseUrl.length > 0 &&
|
|
28
|
+
apiKey != null &&
|
|
29
|
+
apiKey.length > 0 &&
|
|
30
|
+
defaultModel != null &&
|
|
31
|
+
defaultModel.length > 0) {
|
|
32
|
+
return {
|
|
33
|
+
name,
|
|
34
|
+
baseUrl,
|
|
35
|
+
apiKey,
|
|
36
|
+
defaultModel,
|
|
37
|
+
};
|
|
38
|
+
}
|
|
39
|
+
return undefined;
|
|
40
|
+
}
|
|
18
41
|
// Helper to build provider config
|
|
19
42
|
function buildProviderConfig() {
|
|
20
43
|
return {
|
|
@@ -22,6 +45,7 @@ function buildProviderConfig() {
|
|
|
22
45
|
anthropic: process.env.ANTHROPIC_API_KEY != null ? { apiKey: process.env.ANTHROPIC_API_KEY } : undefined,
|
|
23
46
|
google: process.env.GEMINI_API_KEY != null ? { apiKey: process.env.GEMINI_API_KEY } : undefined,
|
|
24
47
|
xai: process.env.XAI_API_KEY != null ? { apiKey: process.env.XAI_API_KEY } : undefined,
|
|
48
|
+
custom: buildCustomProviderConfig(),
|
|
25
49
|
};
|
|
26
50
|
}
|
|
27
51
|
export function configureAIProvider() {
|
|
@@ -32,15 +56,20 @@ export function configureAIProvider() {
|
|
|
32
56
|
debugEnv('Anthropic configured: %s', config.anthropic != null);
|
|
33
57
|
debugEnv('Google configured: %s', config.google != null);
|
|
34
58
|
debugEnv('XAI configured: %s', config.xai != null);
|
|
59
|
+
debugEnv('Custom configured: %s', config.custom != null);
|
|
35
60
|
// Log provider configurations
|
|
36
61
|
logProviderConfig('OpenAI', config.openai?.apiKey);
|
|
37
62
|
logProviderConfig('Anthropic', config.anthropic?.apiKey);
|
|
38
63
|
logProviderConfig('Google', config.google?.apiKey);
|
|
39
64
|
logProviderConfig('XAI', config.xai?.apiKey);
|
|
40
|
-
|
|
65
|
+
if (config.custom != null) {
|
|
66
|
+
debug('Custom provider configured: %s at %s with model %s', config.custom.name, config.custom.baseUrl, config.custom.defaultModel);
|
|
67
|
+
logProviderConfig('Custom', config.custom.apiKey);
|
|
68
|
+
}
|
|
69
|
+
const configuredProviders = [config.openai, config.anthropic, config.google, config.xai, config.custom].filter((p) => p != null);
|
|
41
70
|
if (configuredProviders.length === 0) {
|
|
42
71
|
debug('ERROR: No AI providers configured');
|
|
43
|
-
throw new Error('At least one AI provider must be configured. Please set OPENAI_API_KEY, ANTHROPIC_API_KEY, GEMINI_API_KEY,
|
|
72
|
+
throw new Error('At least one AI provider must be configured. Please set OPENAI_API_KEY, ANTHROPIC_API_KEY, GEMINI_API_KEY, XAI_API_KEY environment variables, or configure a custom provider with CUSTOM_PROVIDER_NAME, CUSTOM_PROVIDER_BASE_URL, CUSTOM_PROVIDER_API_KEY, and CUSTOM_PROVIDER_DEFAULT_MODEL.');
|
|
44
73
|
}
|
|
45
74
|
debug('AI configuration complete - %d provider(s) available', configuredProviders.length);
|
|
46
75
|
return config;
|
package/dist/config.js.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"config.js","sourceRoot":"","sources":["../src/config.ts"],"names":[],"mappings":"AAAA,OAAO,MAAM,MAAM,QAAQ,CAAC;AAC5B,OAAO,EAAE,aAAa,EAAE,MAAM,KAAK,CAAC;AACpC,OAAO,EAAE,OAAO,EAAE,OAAO,EAAE,MAAM,MAAM,CAAC;AACxC,OAAO,WAAW,MAAM,OAAO,CAAC;
|
|
1
|
+
{"version":3,"file":"config.js","sourceRoot":"","sources":["../src/config.ts"],"names":[],"mappings":"AAAA,OAAO,MAAM,MAAM,QAAQ,CAAC;AAC5B,OAAO,EAAE,aAAa,EAAE,MAAM,KAAK,CAAC;AACpC,OAAO,EAAE,OAAO,EAAE,OAAO,EAAE,MAAM,MAAM,CAAC;AACxC,OAAO,WAAW,MAAM,OAAO,CAAC;AAGhC,MAAM,KAAK,GAAG,WAAW,CAAC,mBAAmB,CAAC,CAAC;AAC/C,MAAM,QAAQ,GAAG,WAAW,CAAC,uBAAuB,CAAC,CAAC;AAEtD,MAAM,UAAU,GAAG,aAAa,CAAC,MAAM,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;AAClD,MAAM,SAAS,GAAG,OAAO,CAAC,UAAU,CAAC,CAAC;AACtC,MAAM,OAAO,GAAG,OAAO,CAAC,SAAS,EAAE,eAAe,CAAC,CAAC;AACpD,KAAK,CAAC,8BAA8B,EAAE,OAAO,CAAC,CAAC;AAC/C,MAAM,CAAC,MAAM,CAAC,EAAE,IAAI,EAAE,OAAO,EAAE,CAAC,CAAC;AAkBjC,uCAAuC;AACvC,SAAS,iBAAiB,CAAC,YAAoB,EAAE,MAA0B;IACzE,IAAI,MAAM,KAAK,SAAS,EAAE,CAAC;QACzB,KAAK,CAAC,sDAAsD,EAAE,YAAY,EAAE,MAAM,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;IAChG,CAAC;AACH,CAAC;AAED,yCAAyC;AACzC,SAAS,yBAAyB;IAChC,MAAM,IAAI,GAAG,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC;IAC9C,MAAM,OAAO,GAAG,OAAO,CAAC,GAAG,CAAC,wBAAwB,CAAC;IACrD,MAAM,MAAM,GAAG,OAAO,CAAC,GAAG,CAAC,uBAAuB,CAAC;IACnD,MAAM,YAAY,GAAG,OAAO,CAAC,GAAG,CAAC,6BAA6B,CAAC;IAE/D,IACE,IAAI,IAAI,IAAI;QACZ,IAAI,CAAC,MAAM,GAAG,CAAC;QACf,OAAO,IAAI,IAAI;QACf,OAAO,CAAC,MAAM,GAAG,CAAC;QAClB,MAAM,IAAI,IAAI;QACd,MAAM,CAAC,MAAM,GAAG,CAAC;QACjB,YAAY,IAAI,IAAI;QACpB,YAAY,CAAC,MAAM,GAAG,CAAC,EACvB,CAAC;QACD,OAAO;YACL,IAAI;YACJ,OAAO;YACP,MAAM;YACN,YAAY;SACb,CAAC;IACJ,CAAC;IAED,OAAO,SAAS,CAAC;AACnB,CAAC;AAED,kCAAkC;AAClC,SAAS,mBAAmB;IAC1B,OAAO;QACL,MAAM,EAAE,OAAO,CAAC,GAAG,CAAC,cAAc,IAAI,IAAI,CAAC,CAAC,CAAC,EAAE,MAAM,EAAE,OAAO,CAAC,GAAG,CAAC,cAAc,EAAE,CAAC,CAAC,CAAC,SAAS;QAC/F,SAAS,EAAE,OAAO,CAAC,GAAG,CAAC,iBAAiB,IAAI,IAAI,CAAC,CAAC,CAAC,EAAE,MAAM,EAAE,OAAO,CAAC,GAAG,CAAC,iBAAiB,EAAE,CAAC,CAAC,CAAC,SAAS;QACxG,MAAM,EAAE,OAAO,CAAC,GAAG,CAAC,cAAc,IAAI,IAAI,CAAC,CAAC,CAAC,EAAE,MAAM,EAAE,OAAO,CAAC,GAAG,CAAC,cAAc,EAAE,CAAC,CAAC,CAAC,SAAS;QAC/F,GAAG,EAAE,OAAO,CAAC,GAAG,CAAC,WAAW,IAAI,IAAI,CAAC,CAAC,CAAC,EAAE,MAAM,EAAE,OAAO,CAAC,GAAG,CAAC,WAAW,EAAE,CAAC,CAAC,CAAC,SAAS;QACtF,MAAM,EAAE,yBAAyB,EAAE;KACpC,CAAC;AACJ,CAAC;AAED,MAAM,UAAU,mBAAmB;IACjC,QAAQ,CAAC,iDAAiD,CAAC,CAAC;IAE5D,MAAM,MAAM,GAAG,mBAAmB,EAAE,CAAC;IAErC,6DAA6D;IAC7D,QAAQ,CAAC,uBAAuB,EAAE,MAAM,CAAC,MAAM,IAAI,IAAI,CAAC,CAAC;IACzD,QAAQ,CAAC,0BAA0B,EAAE,MAAM,CAAC,SAAS,IAAI,IAAI,CAAC,CAAC;IAC/D,QAAQ,CAAC,uBAAuB,EAAE,MAAM,CAAC,MAAM,IAAI,IAAI,CAAC,CAAC;IACzD,QAAQ,CAAC,oBAAoB,EAAE,MAAM,CAAC,GAAG,IAAI,IAAI,CAAC,CAAC;IACnD,QAAQ,CAAC,uBAAuB,EAAE,MAAM,CAAC,MAAM,IAAI,IAAI,CAAC,CAAC;IAEzD,8BAA8B;IAC9B,iBAAiB,CAAC,QAAQ,EAAE,MAAM,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;IACnD,iBAAiB,CAAC,WAAW,EAAE,MAAM,CAAC,SAAS,EAAE,MAAM,CAAC,CAAC;IACzD,iBAAiB,CAAC,QAAQ,EAAE,MAAM,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;IACnD,iBAAiB,CAAC,KAAK,EAAE,MAAM,CAAC,GAAG,EAAE,MAAM,CAAC,CAAC;IAE7C,IAAI,MAAM,CAAC,MAAM,IAAI,IAAI,EAAE,CAAC;QAC1B,KAAK,CACH,oDAAoD,EACpD,MAAM,CAAC,MAAM,CAAC,IAAI,EAClB,MAAM,CAAC,MAAM,CAAC,OAAO,EACrB,MAAM,CAAC,MAAM,CAAC,YAAY,CAC3B,CAAC;QACF,iBAAiB,CAAC,QAAQ,EAAE,MAAM,CAAC,MAAM,CAAC,MAAM,CAAC,CAAC;IACpD,CAAC;IAED,MAAM,mBAAmB,GAAG,CAAC,MAAM,CAAC,MAAM,EAAE,MAAM,CAAC,SAAS,EAAE,MAAM,CAAC,MAAM,EAAE,MAAM,CAAC,GAAG,EAAE,MAAM,CAAC,MAAM,CAAC,CAAC,MAAM,CAC5G,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC,IAAI,IAAI,CACjB,CAAC;IAEF,IAAI,mBAAmB,CAAC,MAAM,KAAK,CAAC,EAAE,CAAC;QACrC,KAAK,CAAC,mCAAmC,CAAC,CAAC;QAC3C,MAAM,IAAI,KAAK,CACb,+RAA+R,CAChS,CAAC;IACJ,CAAC;IAED,KAAK,CAAC,sDAAsD,EAAE,mBAAmB,CAAC,MAAM,CAAC,CAAC;IAE1F,OAAO,MAAM,CAAC;AAChB,CAAC"}
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"config.specs.d.ts","sourceRoot":"","sources":["../src/config.specs.ts"],"names":[],"mappings":""}
|
|
@@ -0,0 +1,123 @@
|
|
|
1
|
+
import { describe, it, expect, vi, beforeEach, afterEach } from 'vitest';
|
|
2
|
+
import { configureAIProvider } from './config.js';
|
|
3
|
+
// Mock environment variables
|
|
4
|
+
const originalEnv = process.env;
|
|
5
|
+
describe('AI Configuration with Custom Providers', () => {
|
|
6
|
+
beforeEach(() => {
|
|
7
|
+
vi.resetModules();
|
|
8
|
+
// Clear all AI-related environment variables for clean test state
|
|
9
|
+
process.env = {};
|
|
10
|
+
});
|
|
11
|
+
afterEach(() => {
|
|
12
|
+
process.env = originalEnv;
|
|
13
|
+
});
|
|
14
|
+
describe('configureAIProvider', () => {
|
|
15
|
+
it('should configure custom provider when all environment variables are set', () => {
|
|
16
|
+
process.env.CUSTOM_PROVIDER_NAME = 'litellm';
|
|
17
|
+
process.env.CUSTOM_PROVIDER_BASE_URL = 'https://api.litellm.ai';
|
|
18
|
+
process.env.CUSTOM_PROVIDER_API_KEY = 'sk-litellm-key';
|
|
19
|
+
process.env.CUSTOM_PROVIDER_DEFAULT_MODEL = 'claude-3-sonnet';
|
|
20
|
+
const config = configureAIProvider();
|
|
21
|
+
expect(config.custom).toBeDefined();
|
|
22
|
+
expect(config.custom?.name).toBe('litellm');
|
|
23
|
+
expect(config.custom?.baseUrl).toBe('https://api.litellm.ai');
|
|
24
|
+
expect(config.custom?.apiKey).toBe('sk-litellm-key');
|
|
25
|
+
expect(config.custom?.defaultModel).toBe('claude-3-sonnet');
|
|
26
|
+
});
|
|
27
|
+
it('should not configure custom provider when environment variables are missing', () => {
|
|
28
|
+
// Only set some environment variables
|
|
29
|
+
process.env.CUSTOM_PROVIDER_NAME = 'incomplete';
|
|
30
|
+
process.env.CUSTOM_PROVIDER_BASE_URL = 'https://api.example.com';
|
|
31
|
+
process.env.ANTHROPIC_API_KEY = 'sk-test'; // Need at least one provider
|
|
32
|
+
// Missing CUSTOM_PROVIDER_API_KEY and CUSTOM_PROVIDER_DEFAULT_MODEL
|
|
33
|
+
const config = configureAIProvider();
|
|
34
|
+
expect(config.custom).toBeUndefined();
|
|
35
|
+
});
|
|
36
|
+
it('should handle multiple providers including custom', () => {
|
|
37
|
+
process.env.OPENAI_API_KEY = 'sk-openai-key';
|
|
38
|
+
process.env.ANTHROPIC_API_KEY = 'sk-anthropic-key';
|
|
39
|
+
process.env.CUSTOM_PROVIDER_NAME = 'custom';
|
|
40
|
+
process.env.CUSTOM_PROVIDER_BASE_URL = 'https://api.custom.com';
|
|
41
|
+
process.env.CUSTOM_PROVIDER_API_KEY = 'custom-key';
|
|
42
|
+
process.env.CUSTOM_PROVIDER_DEFAULT_MODEL = 'custom-model';
|
|
43
|
+
const config = configureAIProvider();
|
|
44
|
+
expect(config.openai).toBeDefined();
|
|
45
|
+
expect(config.anthropic).toBeDefined();
|
|
46
|
+
expect(config.custom).toBeDefined();
|
|
47
|
+
});
|
|
48
|
+
it('should work with only custom provider configured', () => {
|
|
49
|
+
process.env.CUSTOM_PROVIDER_NAME = 'only-custom';
|
|
50
|
+
process.env.CUSTOM_PROVIDER_BASE_URL = 'https://api.only-custom.com';
|
|
51
|
+
process.env.CUSTOM_PROVIDER_API_KEY = 'only-custom-key';
|
|
52
|
+
process.env.CUSTOM_PROVIDER_DEFAULT_MODEL = 'only-custom-model';
|
|
53
|
+
const config = configureAIProvider();
|
|
54
|
+
expect(config.custom).toBeDefined();
|
|
55
|
+
expect(config.openai).toBeUndefined();
|
|
56
|
+
expect(config.anthropic).toBeUndefined();
|
|
57
|
+
expect(config.google).toBeUndefined();
|
|
58
|
+
expect(config.xai).toBeUndefined();
|
|
59
|
+
});
|
|
60
|
+
it('should throw error when no providers are configured', () => {
|
|
61
|
+
// Clear all environment variables
|
|
62
|
+
delete process.env.OPENAI_API_KEY;
|
|
63
|
+
delete process.env.ANTHROPIC_API_KEY;
|
|
64
|
+
delete process.env.GEMINI_API_KEY;
|
|
65
|
+
delete process.env.XAI_API_KEY;
|
|
66
|
+
delete process.env.CUSTOM_PROVIDER_NAME;
|
|
67
|
+
delete process.env.CUSTOM_PROVIDER_BASE_URL;
|
|
68
|
+
delete process.env.CUSTOM_PROVIDER_API_KEY;
|
|
69
|
+
delete process.env.CUSTOM_PROVIDER_DEFAULT_MODEL;
|
|
70
|
+
expect(() => configureAIProvider()).toThrow(/At least one AI provider must be configured/);
|
|
71
|
+
});
|
|
72
|
+
it('should handle custom provider with different configurations', () => {
|
|
73
|
+
const testCases = [
|
|
74
|
+
{
|
|
75
|
+
name: 'litellm-proxy',
|
|
76
|
+
baseUrl: 'https://litellm-proxy.company.com/v1',
|
|
77
|
+
model: 'gpt-4o-mini',
|
|
78
|
+
},
|
|
79
|
+
{
|
|
80
|
+
name: 'local-ollama',
|
|
81
|
+
baseUrl: 'http://localhost:11434/v1',
|
|
82
|
+
model: 'llama3.1:8b',
|
|
83
|
+
},
|
|
84
|
+
{
|
|
85
|
+
name: 'azure-openai',
|
|
86
|
+
baseUrl: 'https://company.openai.azure.com/openai/deployments',
|
|
87
|
+
model: 'gpt-4',
|
|
88
|
+
},
|
|
89
|
+
];
|
|
90
|
+
testCases.forEach((testCase) => {
|
|
91
|
+
process.env.CUSTOM_PROVIDER_NAME = testCase.name;
|
|
92
|
+
process.env.CUSTOM_PROVIDER_BASE_URL = testCase.baseUrl;
|
|
93
|
+
process.env.CUSTOM_PROVIDER_API_KEY = 'test-key';
|
|
94
|
+
process.env.CUSTOM_PROVIDER_DEFAULT_MODEL = testCase.model;
|
|
95
|
+
const config = configureAIProvider();
|
|
96
|
+
expect(config.custom?.name).toBe(testCase.name);
|
|
97
|
+
expect(config.custom?.baseUrl).toBe(testCase.baseUrl);
|
|
98
|
+
expect(config.custom?.defaultModel).toBe(testCase.model);
|
|
99
|
+
});
|
|
100
|
+
});
|
|
101
|
+
it('should handle empty string environment variables as undefined', () => {
|
|
102
|
+
process.env.CUSTOM_PROVIDER_NAME = '';
|
|
103
|
+
process.env.CUSTOM_PROVIDER_BASE_URL = '';
|
|
104
|
+
process.env.CUSTOM_PROVIDER_API_KEY = '';
|
|
105
|
+
process.env.CUSTOM_PROVIDER_DEFAULT_MODEL = '';
|
|
106
|
+
process.env.ANTHROPIC_API_KEY = 'sk-test'; // Need at least one provider
|
|
107
|
+
const config = configureAIProvider();
|
|
108
|
+
expect(config.custom).toBeUndefined();
|
|
109
|
+
});
|
|
110
|
+
it('should handle whitespace in environment variables', () => {
|
|
111
|
+
process.env.CUSTOM_PROVIDER_NAME = ' litellm ';
|
|
112
|
+
process.env.CUSTOM_PROVIDER_BASE_URL = ' https://api.litellm.ai ';
|
|
113
|
+
process.env.CUSTOM_PROVIDER_API_KEY = ' sk-key ';
|
|
114
|
+
process.env.CUSTOM_PROVIDER_DEFAULT_MODEL = ' claude-3-sonnet ';
|
|
115
|
+
const config = configureAIProvider();
|
|
116
|
+
expect(config.custom?.name).toBe(' litellm ');
|
|
117
|
+
expect(config.custom?.baseUrl).toBe(' https://api.litellm.ai ');
|
|
118
|
+
expect(config.custom?.apiKey).toBe(' sk-key ');
|
|
119
|
+
expect(config.custom?.defaultModel).toBe(' claude-3-sonnet ');
|
|
120
|
+
});
|
|
121
|
+
});
|
|
122
|
+
});
|
|
123
|
+
//# sourceMappingURL=config.specs.js.map
|