@providerprotocol/ai 0.0.19 → 0.0.21
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +82 -9
- package/dist/anthropic/index.d.ts +184 -14
- package/dist/anthropic/index.js +214 -86
- package/dist/anthropic/index.js.map +1 -1
- package/dist/{chunk-5FEAOEXV.js → chunk-EDENPF3E.js} +57 -103
- package/dist/chunk-EDENPF3E.js.map +1 -0
- package/dist/{chunk-UMKWXGO3.js → chunk-M4BMM5IB.js} +86 -2
- package/dist/chunk-M4BMM5IB.js.map +1 -0
- package/dist/chunk-Y3GBJNA2.js +120 -0
- package/dist/chunk-Y3GBJNA2.js.map +1 -0
- package/dist/{chunk-U4JJC2YX.js → chunk-Z4ILICF5.js} +2 -2
- package/dist/chunk-Z4ILICF5.js.map +1 -0
- package/dist/google/index.d.ts +16 -19
- package/dist/google/index.js +18 -40
- package/dist/google/index.js.map +1 -1
- package/dist/http/index.d.ts +2 -2
- package/dist/http/index.js +5 -4
- package/dist/index.d.ts +101 -38
- package/dist/index.js +69 -43
- package/dist/index.js.map +1 -1
- package/dist/ollama/index.d.ts +14 -16
- package/dist/ollama/index.js +9 -11
- package/dist/ollama/index.js.map +1 -1
- package/dist/openai/index.d.ts +25 -133
- package/dist/openai/index.js +31 -85
- package/dist/openai/index.js.map +1 -1
- package/dist/openrouter/index.d.ts +28 -53
- package/dist/openrouter/index.js +24 -47
- package/dist/openrouter/index.js.map +1 -1
- package/dist/provider-DGQHYE6I.d.ts +1319 -0
- package/dist/proxy/index.d.ts +194 -12
- package/dist/proxy/index.js +37 -65
- package/dist/proxy/index.js.map +1 -1
- package/dist/{retry-DR7YRJDz.d.ts → retry-Pcs3hnbu.d.ts} +2 -2
- package/dist/{stream-DRHy6q1a.d.ts → stream-Di9acos2.d.ts} +1 -1
- package/dist/xai/index.d.ts +16 -88
- package/dist/xai/index.js +34 -62
- package/dist/xai/index.js.map +1 -1
- package/package.json +4 -1
- package/dist/chunk-5FEAOEXV.js.map +0 -1
- package/dist/chunk-DZQHVGNV.js +0 -71
- package/dist/chunk-DZQHVGNV.js.map +0 -1
- package/dist/chunk-MSR5P65T.js +0 -39
- package/dist/chunk-MSR5P65T.js.map +0 -1
- package/dist/chunk-U4JJC2YX.js.map +0 -1
- package/dist/chunk-UMKWXGO3.js.map +0 -1
- package/dist/content-DEl3z_W2.d.ts +0 -276
- package/dist/image-Dhq-Yuq4.d.ts +0 -456
- package/dist/provider-BBMBZuGn.d.ts +0 -570
package/dist/proxy/index.d.ts
CHANGED
|
@@ -1,6 +1,5 @@
|
|
|
1
|
-
import {
|
|
2
|
-
import { M as Message, b as MessageJSON, T as Turn, H as TurnJSON, f as StreamEvent, S as StreamResult, J as JSONSchema, k as ToolMetadata, c as Tool } from '../stream-
|
|
3
|
-
import '../content-DEl3z_W2.js';
|
|
1
|
+
import { g as Provider, M as ModelReference } from '../provider-DGQHYE6I.js';
|
|
2
|
+
import { M as Message, b as MessageJSON, T as Turn, H as TurnJSON, f as StreamEvent, S as StreamResult, J as JSONSchema, k as ToolMetadata, c as Tool } from '../stream-Di9acos2.js';
|
|
4
3
|
|
|
5
4
|
/**
|
|
6
5
|
* @fileoverview Proxy provider types.
|
|
@@ -149,10 +148,11 @@ declare function sendError$2(message: string, status: number, event: H3Event): {
|
|
|
149
148
|
/**
|
|
150
149
|
* H3/Nitro/Nuxt adapter utilities.
|
|
151
150
|
*
|
|
152
|
-
* @example
|
|
151
|
+
* @example Basic usage
|
|
153
152
|
* ```typescript
|
|
154
153
|
* // Nuxt server route: server/api/ai.post.ts
|
|
155
|
-
* import { llm
|
|
154
|
+
* import { llm } from '@providerprotocol/ai';
|
|
155
|
+
* import { anthropic } from '@providerprotocol/ai/anthropic';
|
|
156
156
|
* import { parseBody } from '@providerprotocol/ai/proxy';
|
|
157
157
|
* import { h3 as h3Adapter } from '@providerprotocol/ai/proxy/server';
|
|
158
158
|
*
|
|
@@ -170,6 +170,49 @@ declare function sendError$2(message: string, status: number, event: H3Event): {
|
|
|
170
170
|
* }
|
|
171
171
|
* });
|
|
172
172
|
* ```
|
|
173
|
+
*
|
|
174
|
+
* @example API Gateway with authentication (Nuxt)
|
|
175
|
+
* ```typescript
|
|
176
|
+
* // server/api/ai.post.ts
|
|
177
|
+
* import { llm } from '@providerprotocol/ai';
|
|
178
|
+
* import { anthropic } from '@providerprotocol/ai/anthropic';
|
|
179
|
+
* import { ExponentialBackoff, RoundRobinKeys } from '@providerprotocol/ai/http';
|
|
180
|
+
* import { parseBody } from '@providerprotocol/ai/proxy';
|
|
181
|
+
* import { h3 as h3Adapter } from '@providerprotocol/ai/proxy/server';
|
|
182
|
+
*
|
|
183
|
+
* // Server manages AI provider keys - users never see them
|
|
184
|
+
* const claude = llm({
|
|
185
|
+
* model: anthropic('claude-sonnet-4-20250514'),
|
|
186
|
+
* config: {
|
|
187
|
+
* apiKey: new RoundRobinKeys([
|
|
188
|
+
* process.env.ANTHROPIC_KEY_1!,
|
|
189
|
+
* process.env.ANTHROPIC_KEY_2!,
|
|
190
|
+
* ]),
|
|
191
|
+
* retryStrategy: new ExponentialBackoff({ maxAttempts: 3 }),
|
|
192
|
+
* },
|
|
193
|
+
* });
|
|
194
|
+
*
|
|
195
|
+
* export default defineEventHandler(async (event) => {
|
|
196
|
+
* // Authenticate with your platform credentials
|
|
197
|
+
* const token = getHeader(event, 'authorization')?.replace('Bearer ', '');
|
|
198
|
+
* const user = await validatePlatformToken(token);
|
|
199
|
+
* if (!user) {
|
|
200
|
+
* throw createError({ statusCode: 401, message: 'Unauthorized' });
|
|
201
|
+
* }
|
|
202
|
+
*
|
|
203
|
+
* // Track usage per user
|
|
204
|
+
* // await trackUsage(user.id);
|
|
205
|
+
*
|
|
206
|
+
* const body = await readBody(event);
|
|
207
|
+
* const { messages, system, params } = parseBody(body);
|
|
208
|
+
*
|
|
209
|
+
* if (params?.stream) {
|
|
210
|
+
* return h3Adapter.streamSSE(claude.stream(messages, { system }), event);
|
|
211
|
+
* }
|
|
212
|
+
* const turn = await claude.generate(messages, { system });
|
|
213
|
+
* return h3Adapter.sendJSON(turn, event);
|
|
214
|
+
* });
|
|
215
|
+
* ```
|
|
173
216
|
*/
|
|
174
217
|
declare const h3: {
|
|
175
218
|
sendJSON: typeof sendJSON$2;
|
|
@@ -236,10 +279,11 @@ declare function sendError$1(message: string, status: number, reply: FastifyRepl
|
|
|
236
279
|
/**
|
|
237
280
|
* Fastify adapter utilities.
|
|
238
281
|
*
|
|
239
|
-
* @example
|
|
282
|
+
* @example Basic usage
|
|
240
283
|
* ```typescript
|
|
241
284
|
* import Fastify from 'fastify';
|
|
242
|
-
* import { llm
|
|
285
|
+
* import { llm } from '@providerprotocol/ai';
|
|
286
|
+
* import { anthropic } from '@providerprotocol/ai/anthropic';
|
|
243
287
|
* import { parseBody } from '@providerprotocol/ai/proxy';
|
|
244
288
|
* import { fastify as fastifyAdapter } from '@providerprotocol/ai/proxy/server';
|
|
245
289
|
*
|
|
@@ -257,6 +301,51 @@ declare function sendError$1(message: string, status: number, reply: FastifyRepl
|
|
|
257
301
|
* }
|
|
258
302
|
* });
|
|
259
303
|
* ```
|
|
304
|
+
*
|
|
305
|
+
* @example API Gateway with authentication
|
|
306
|
+
* ```typescript
|
|
307
|
+
* import Fastify from 'fastify';
|
|
308
|
+
* import { llm } from '@providerprotocol/ai';
|
|
309
|
+
* import { anthropic } from '@providerprotocol/ai/anthropic';
|
|
310
|
+
* import { ExponentialBackoff, RoundRobinKeys } from '@providerprotocol/ai/http';
|
|
311
|
+
* import { parseBody } from '@providerprotocol/ai/proxy';
|
|
312
|
+
* import { fastify as fastifyAdapter } from '@providerprotocol/ai/proxy/server';
|
|
313
|
+
*
|
|
314
|
+
* const app = Fastify();
|
|
315
|
+
*
|
|
316
|
+
* // Server manages AI provider keys - users never see them
|
|
317
|
+
* const claude = llm({
|
|
318
|
+
* model: anthropic('claude-sonnet-4-20250514'),
|
|
319
|
+
* config: {
|
|
320
|
+
* apiKey: new RoundRobinKeys([process.env.ANTHROPIC_KEY_1!, process.env.ANTHROPIC_KEY_2!]),
|
|
321
|
+
* retryStrategy: new ExponentialBackoff({ maxAttempts: 3 }),
|
|
322
|
+
* },
|
|
323
|
+
* });
|
|
324
|
+
*
|
|
325
|
+
* // Auth hook for your platform
|
|
326
|
+
* app.addHook('preHandler', async (request, reply) => {
|
|
327
|
+
* const token = request.headers.authorization?.replace('Bearer ', '');
|
|
328
|
+
* const user = await validatePlatformToken(token);
|
|
329
|
+
* if (!user) {
|
|
330
|
+
* reply.status(401).send({ error: 'Unauthorized' });
|
|
331
|
+
* return;
|
|
332
|
+
* }
|
|
333
|
+
* request.user = user;
|
|
334
|
+
* });
|
|
335
|
+
*
|
|
336
|
+
* app.post('/api/ai', async (request, reply) => {
|
|
337
|
+
* // Track usage per user
|
|
338
|
+
* // await trackUsage(request.user.id);
|
|
339
|
+
*
|
|
340
|
+
* const { messages, system, params } = parseBody(request.body);
|
|
341
|
+
*
|
|
342
|
+
* if (params?.stream) {
|
|
343
|
+
* return fastifyAdapter.streamSSE(claude.stream(messages, { system }), reply);
|
|
344
|
+
* }
|
|
345
|
+
* const turn = await claude.generate(messages, { system });
|
|
346
|
+
* return fastifyAdapter.sendJSON(turn, reply);
|
|
347
|
+
* });
|
|
348
|
+
* ```
|
|
260
349
|
*/
|
|
261
350
|
declare const fastify: {
|
|
262
351
|
sendJSON: typeof sendJSON$1;
|
|
@@ -320,11 +409,12 @@ declare function sendError(message: string, status: number, res: ExpressResponse
|
|
|
320
409
|
/**
|
|
321
410
|
* Express adapter utilities.
|
|
322
411
|
*
|
|
323
|
-
* @example
|
|
412
|
+
* @example Basic usage
|
|
324
413
|
* ```typescript
|
|
325
414
|
* import express from 'express';
|
|
326
|
-
* import { llm
|
|
327
|
-
* import {
|
|
415
|
+
* import { llm } from '@providerprotocol/ai';
|
|
416
|
+
* import { anthropic } from '@providerprotocol/ai/anthropic';
|
|
417
|
+
* import { parseBody } from '@providerprotocol/ai/proxy';
|
|
328
418
|
* import { express as expressAdapter } from '@providerprotocol/ai/proxy/server';
|
|
329
419
|
*
|
|
330
420
|
* const app = express();
|
|
@@ -342,6 +432,51 @@ declare function sendError(message: string, status: number, res: ExpressResponse
|
|
|
342
432
|
* }
|
|
343
433
|
* });
|
|
344
434
|
* ```
|
|
435
|
+
*
|
|
436
|
+
* @example API Gateway with authentication
|
|
437
|
+
* ```typescript
|
|
438
|
+
* import express from 'express';
|
|
439
|
+
* import { llm } from '@providerprotocol/ai';
|
|
440
|
+
* import { anthropic } from '@providerprotocol/ai/anthropic';
|
|
441
|
+
* import { ExponentialBackoff, RoundRobinKeys } from '@providerprotocol/ai/http';
|
|
442
|
+
* import { parseBody } from '@providerprotocol/ai/proxy';
|
|
443
|
+
* import { express as expressAdapter } from '@providerprotocol/ai/proxy/server';
|
|
444
|
+
*
|
|
445
|
+
* const app = express();
|
|
446
|
+
* app.use(express.json());
|
|
447
|
+
*
|
|
448
|
+
* // Your platform's auth middleware
|
|
449
|
+
* async function authMiddleware(req, res, next) {
|
|
450
|
+
* const token = req.headers.authorization?.replace('Bearer ', '');
|
|
451
|
+
* const user = await validatePlatformToken(token);
|
|
452
|
+
* if (!user) return res.status(401).json({ error: 'Unauthorized' });
|
|
453
|
+
* req.user = user;
|
|
454
|
+
* next();
|
|
455
|
+
* }
|
|
456
|
+
*
|
|
457
|
+
* // Server manages AI provider keys - users never see them
|
|
458
|
+
* const claude = llm({
|
|
459
|
+
* model: anthropic('claude-sonnet-4-20250514'),
|
|
460
|
+
* config: {
|
|
461
|
+
* apiKey: new RoundRobinKeys([process.env.ANTHROPIC_KEY_1!, process.env.ANTHROPIC_KEY_2!]),
|
|
462
|
+
* retryStrategy: new ExponentialBackoff({ maxAttempts: 3 }),
|
|
463
|
+
* },
|
|
464
|
+
* });
|
|
465
|
+
*
|
|
466
|
+
* app.post('/api/ai', authMiddleware, async (req, res) => {
|
|
467
|
+
* // Track usage per user
|
|
468
|
+
* // await trackUsage(req.user.id);
|
|
469
|
+
*
|
|
470
|
+
* const { messages, system, params } = parseBody(req.body);
|
|
471
|
+
*
|
|
472
|
+
* if (params?.stream) {
|
|
473
|
+
* expressAdapter.streamSSE(claude.stream(messages, { system }), res);
|
|
474
|
+
* } else {
|
|
475
|
+
* const turn = await claude.generate(messages, { system });
|
|
476
|
+
* expressAdapter.sendJSON(turn, res);
|
|
477
|
+
* }
|
|
478
|
+
* });
|
|
479
|
+
* ```
|
|
345
480
|
*/
|
|
346
481
|
declare const express: {
|
|
347
482
|
sendJSON: typeof sendJSON;
|
|
@@ -458,9 +593,13 @@ declare function bindTools(schemas: ParsedRequest['tools'], implementations: Rec
|
|
|
458
593
|
* For use with Bun, Deno, Next.js App Router, Cloudflare Workers,
|
|
459
594
|
* and other frameworks that support Web API Response.
|
|
460
595
|
*
|
|
461
|
-
*
|
|
596
|
+
* **Security Note:** The proxy works without configuration, meaning no
|
|
597
|
+
* authentication by default. Always add your own auth layer in production.
|
|
598
|
+
*
|
|
599
|
+
* @example Basic usage
|
|
462
600
|
* ```typescript
|
|
463
|
-
* import { llm
|
|
601
|
+
* import { llm } from '@providerprotocol/ai';
|
|
602
|
+
* import { anthropic } from '@providerprotocol/ai/anthropic';
|
|
464
603
|
* import { parseBody, toJSON, toSSE } from '@providerprotocol/ai/proxy';
|
|
465
604
|
*
|
|
466
605
|
* // Bun.serve / Deno.serve / Next.js App Router
|
|
@@ -474,6 +613,49 @@ declare function bindTools(schemas: ParsedRequest['tools'], implementations: Rec
|
|
|
474
613
|
* return toJSON(await instance.generate(messages));
|
|
475
614
|
* }
|
|
476
615
|
* ```
|
|
616
|
+
*
|
|
617
|
+
* @example API Gateway with authentication
|
|
618
|
+
* ```typescript
|
|
619
|
+
* import { llm } from '@providerprotocol/ai';
|
|
620
|
+
* import { anthropic } from '@providerprotocol/ai/anthropic';
|
|
621
|
+
* import { ExponentialBackoff, RoundRobinKeys } from '@providerprotocol/ai/http';
|
|
622
|
+
* import { parseBody, toJSON, toSSE, toError } from '@providerprotocol/ai/proxy';
|
|
623
|
+
*
|
|
624
|
+
* // Your platform's user validation
|
|
625
|
+
* async function validateToken(token: string): Promise<{ id: string } | null> {
|
|
626
|
+
* // Verify JWT, check database, etc.
|
|
627
|
+
* return token ? { id: 'user-123' } : null;
|
|
628
|
+
* }
|
|
629
|
+
*
|
|
630
|
+
* // Server manages AI provider keys - users never see them
|
|
631
|
+
* const claude = llm({
|
|
632
|
+
* model: anthropic('claude-sonnet-4-20250514'),
|
|
633
|
+
* config: {
|
|
634
|
+
* apiKey: new RoundRobinKeys([process.env.ANTHROPIC_KEY_1!, process.env.ANTHROPIC_KEY_2!]),
|
|
635
|
+
* retryStrategy: new ExponentialBackoff({ maxAttempts: 3 }),
|
|
636
|
+
* },
|
|
637
|
+
* });
|
|
638
|
+
*
|
|
639
|
+
* Bun.serve({
|
|
640
|
+
* port: 3000,
|
|
641
|
+
* async fetch(req) {
|
|
642
|
+
* // Authenticate with YOUR platform credentials
|
|
643
|
+
* const token = req.headers.get('Authorization')?.replace('Bearer ', '');
|
|
644
|
+
* const user = await validateToken(token ?? '');
|
|
645
|
+
* if (!user) return toError('Unauthorized', 401);
|
|
646
|
+
*
|
|
647
|
+
* // Rate limit, track usage, bill user, etc.
|
|
648
|
+
* // await trackUsage(user.id);
|
|
649
|
+
*
|
|
650
|
+
* const { messages, system, params } = parseBody(await req.json());
|
|
651
|
+
*
|
|
652
|
+
* if (params?.stream) {
|
|
653
|
+
* return toSSE(claude.stream(messages, { system }));
|
|
654
|
+
* }
|
|
655
|
+
* return toJSON(await claude.generate(messages, { system }));
|
|
656
|
+
* },
|
|
657
|
+
* });
|
|
658
|
+
* ```
|
|
477
659
|
*/
|
|
478
660
|
declare const webapi: {
|
|
479
661
|
parseBody: typeof parseBody;
|
package/dist/proxy/index.js
CHANGED
|
@@ -1,17 +1,18 @@
|
|
|
1
1
|
import {
|
|
2
2
|
emptyUsage
|
|
3
3
|
} from "../chunk-SKY2JLA7.js";
|
|
4
|
-
import {
|
|
5
|
-
createProvider
|
|
6
|
-
} from "../chunk-MSR5P65T.js";
|
|
7
4
|
import {
|
|
8
5
|
AssistantMessage,
|
|
9
6
|
ToolResultMessage,
|
|
10
|
-
UserMessage
|
|
11
|
-
|
|
7
|
+
UserMessage,
|
|
8
|
+
createProvider
|
|
9
|
+
} from "../chunk-M4BMM5IB.js";
|
|
12
10
|
import {
|
|
13
|
-
UPPError
|
|
14
|
-
|
|
11
|
+
UPPError,
|
|
12
|
+
doFetch,
|
|
13
|
+
doStreamFetch,
|
|
14
|
+
normalizeHttpError
|
|
15
|
+
} from "../chunk-EDENPF3E.js";
|
|
15
16
|
|
|
16
17
|
// src/providers/proxy/serialization.ts
|
|
17
18
|
function serializeMessage(m) {
|
|
@@ -98,12 +99,7 @@ var PROXY_CAPABILITIES = {
|
|
|
98
99
|
audioInput: true
|
|
99
100
|
};
|
|
100
101
|
function createLLMHandler(options) {
|
|
101
|
-
const {
|
|
102
|
-
endpoint,
|
|
103
|
-
headers: defaultHeaders = {},
|
|
104
|
-
fetch: customFetch = fetch,
|
|
105
|
-
timeout = 12e4
|
|
106
|
-
} = options;
|
|
102
|
+
const { endpoint, headers: defaultHeaders = {} } = options;
|
|
107
103
|
let providerRef = null;
|
|
108
104
|
return {
|
|
109
105
|
_setProvider(provider) {
|
|
@@ -127,10 +123,9 @@ function createLLMHandler(options) {
|
|
|
127
123
|
async complete(request) {
|
|
128
124
|
const body = serializeRequest(request);
|
|
129
125
|
const headers = mergeHeaders(request.config.headers, defaultHeaders);
|
|
130
|
-
const
|
|
131
|
-
|
|
132
|
-
|
|
133
|
-
const response = await customFetch(endpoint, {
|
|
126
|
+
const response = await doFetch(
|
|
127
|
+
endpoint,
|
|
128
|
+
{
|
|
134
129
|
method: "POST",
|
|
135
130
|
headers: {
|
|
136
131
|
...headers,
|
|
@@ -138,30 +133,18 @@ function createLLMHandler(options) {
|
|
|
138
133
|
Accept: "application/json"
|
|
139
134
|
},
|
|
140
135
|
body: JSON.stringify(body),
|
|
141
|
-
signal: request.signal
|
|
142
|
-
}
|
|
143
|
-
|
|
144
|
-
|
|
145
|
-
|
|
146
|
-
|
|
147
|
-
|
|
148
|
-
|
|
149
|
-
"proxy",
|
|
150
|
-
"llm"
|
|
151
|
-
);
|
|
152
|
-
}
|
|
153
|
-
const data = await response.json();
|
|
154
|
-
return turnJSONToLLMResponse(data);
|
|
155
|
-
} catch (error) {
|
|
156
|
-
clearTimeout(timeoutId);
|
|
157
|
-
throw error;
|
|
158
|
-
}
|
|
136
|
+
signal: request.signal
|
|
137
|
+
},
|
|
138
|
+
request.config,
|
|
139
|
+
"proxy",
|
|
140
|
+
"llm"
|
|
141
|
+
);
|
|
142
|
+
const data = await response.json();
|
|
143
|
+
return turnJSONToLLMResponse(data);
|
|
159
144
|
},
|
|
160
145
|
stream(request) {
|
|
161
146
|
const body = serializeRequest(request);
|
|
162
147
|
const headers = mergeHeaders(request.config.headers, defaultHeaders);
|
|
163
|
-
const controller = new AbortController();
|
|
164
|
-
const timeoutId = setTimeout(() => controller.abort(), timeout);
|
|
165
148
|
let resolveResponse;
|
|
166
149
|
let rejectResponse;
|
|
167
150
|
const responsePromise = new Promise((resolve, reject) => {
|
|
@@ -170,25 +153,24 @@ function createLLMHandler(options) {
|
|
|
170
153
|
});
|
|
171
154
|
const generator = async function* () {
|
|
172
155
|
try {
|
|
173
|
-
const response = await
|
|
174
|
-
|
|
175
|
-
|
|
176
|
-
|
|
177
|
-
|
|
178
|
-
|
|
156
|
+
const response = await doStreamFetch(
|
|
157
|
+
endpoint,
|
|
158
|
+
{
|
|
159
|
+
method: "POST",
|
|
160
|
+
headers: {
|
|
161
|
+
...headers,
|
|
162
|
+
"Content-Type": "application/json",
|
|
163
|
+
Accept: "text/event-stream"
|
|
164
|
+
},
|
|
165
|
+
body: JSON.stringify(body),
|
|
166
|
+
signal: request.signal
|
|
179
167
|
},
|
|
180
|
-
|
|
181
|
-
|
|
182
|
-
|
|
183
|
-
|
|
168
|
+
request.config,
|
|
169
|
+
"proxy",
|
|
170
|
+
"llm"
|
|
171
|
+
);
|
|
184
172
|
if (!response.ok) {
|
|
185
|
-
|
|
186
|
-
throw new UPPError(
|
|
187
|
-
text || `HTTP ${response.status}`,
|
|
188
|
-
"PROVIDER_ERROR",
|
|
189
|
-
"proxy",
|
|
190
|
-
"llm"
|
|
191
|
-
);
|
|
173
|
+
throw await normalizeHttpError(response, "proxy", "llm");
|
|
192
174
|
}
|
|
193
175
|
if (!response.body) {
|
|
194
176
|
throw new UPPError(
|
|
@@ -274,16 +256,6 @@ function turnJSONToLLMResponse(data) {
|
|
|
274
256
|
data: data.data
|
|
275
257
|
};
|
|
276
258
|
}
|
|
277
|
-
function combineSignals(signal1, signal2) {
|
|
278
|
-
const controller = new AbortController();
|
|
279
|
-
const onAbort = () => controller.abort();
|
|
280
|
-
signal1.addEventListener("abort", onAbort);
|
|
281
|
-
signal2.addEventListener("abort", onAbort);
|
|
282
|
-
if (signal1.aborted || signal2.aborted) {
|
|
283
|
-
controller.abort();
|
|
284
|
-
}
|
|
285
|
-
return controller.signal;
|
|
286
|
-
}
|
|
287
259
|
|
|
288
260
|
// src/providers/proxy/server/express.ts
|
|
289
261
|
function sendJSON(turn, res) {
|
|
@@ -535,7 +507,7 @@ function proxy(options) {
|
|
|
535
507
|
return createProvider({
|
|
536
508
|
name: "proxy",
|
|
537
509
|
version: "1.0.0",
|
|
538
|
-
|
|
510
|
+
handlers: {
|
|
539
511
|
llm: createLLMHandler(options)
|
|
540
512
|
}
|
|
541
513
|
});
|