@providerprotocol/ai 0.0.19 → 0.0.21

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (49) hide show
  1. package/README.md +82 -9
  2. package/dist/anthropic/index.d.ts +184 -14
  3. package/dist/anthropic/index.js +214 -86
  4. package/dist/anthropic/index.js.map +1 -1
  5. package/dist/{chunk-5FEAOEXV.js → chunk-EDENPF3E.js} +57 -103
  6. package/dist/chunk-EDENPF3E.js.map +1 -0
  7. package/dist/{chunk-UMKWXGO3.js → chunk-M4BMM5IB.js} +86 -2
  8. package/dist/chunk-M4BMM5IB.js.map +1 -0
  9. package/dist/chunk-Y3GBJNA2.js +120 -0
  10. package/dist/chunk-Y3GBJNA2.js.map +1 -0
  11. package/dist/{chunk-U4JJC2YX.js → chunk-Z4ILICF5.js} +2 -2
  12. package/dist/chunk-Z4ILICF5.js.map +1 -0
  13. package/dist/google/index.d.ts +16 -19
  14. package/dist/google/index.js +18 -40
  15. package/dist/google/index.js.map +1 -1
  16. package/dist/http/index.d.ts +2 -2
  17. package/dist/http/index.js +5 -4
  18. package/dist/index.d.ts +101 -38
  19. package/dist/index.js +69 -43
  20. package/dist/index.js.map +1 -1
  21. package/dist/ollama/index.d.ts +14 -16
  22. package/dist/ollama/index.js +9 -11
  23. package/dist/ollama/index.js.map +1 -1
  24. package/dist/openai/index.d.ts +25 -133
  25. package/dist/openai/index.js +31 -85
  26. package/dist/openai/index.js.map +1 -1
  27. package/dist/openrouter/index.d.ts +28 -53
  28. package/dist/openrouter/index.js +24 -47
  29. package/dist/openrouter/index.js.map +1 -1
  30. package/dist/provider-DGQHYE6I.d.ts +1319 -0
  31. package/dist/proxy/index.d.ts +194 -12
  32. package/dist/proxy/index.js +37 -65
  33. package/dist/proxy/index.js.map +1 -1
  34. package/dist/{retry-DR7YRJDz.d.ts → retry-Pcs3hnbu.d.ts} +2 -2
  35. package/dist/{stream-DRHy6q1a.d.ts → stream-Di9acos2.d.ts} +1 -1
  36. package/dist/xai/index.d.ts +16 -88
  37. package/dist/xai/index.js +34 -62
  38. package/dist/xai/index.js.map +1 -1
  39. package/package.json +4 -1
  40. package/dist/chunk-5FEAOEXV.js.map +0 -1
  41. package/dist/chunk-DZQHVGNV.js +0 -71
  42. package/dist/chunk-DZQHVGNV.js.map +0 -1
  43. package/dist/chunk-MSR5P65T.js +0 -39
  44. package/dist/chunk-MSR5P65T.js.map +0 -1
  45. package/dist/chunk-U4JJC2YX.js.map +0 -1
  46. package/dist/chunk-UMKWXGO3.js.map +0 -1
  47. package/dist/content-DEl3z_W2.d.ts +0 -276
  48. package/dist/image-Dhq-Yuq4.d.ts +0 -456
  49. package/dist/provider-BBMBZuGn.d.ts +0 -570
@@ -1,6 +1,5 @@
1
- import { d as Provider, f as ModelReference } from '../provider-BBMBZuGn.js';
2
- import { M as Message, b as MessageJSON, T as Turn, H as TurnJSON, f as StreamEvent, S as StreamResult, J as JSONSchema, k as ToolMetadata, c as Tool } from '../stream-DRHy6q1a.js';
3
- import '../content-DEl3z_W2.js';
1
+ import { g as Provider, M as ModelReference } from '../provider-DGQHYE6I.js';
2
+ import { M as Message, b as MessageJSON, T as Turn, H as TurnJSON, f as StreamEvent, S as StreamResult, J as JSONSchema, k as ToolMetadata, c as Tool } from '../stream-Di9acos2.js';
4
3
 
5
4
  /**
6
5
  * @fileoverview Proxy provider types.
@@ -149,10 +148,11 @@ declare function sendError$2(message: string, status: number, event: H3Event): {
149
148
  /**
150
149
  * H3/Nitro/Nuxt adapter utilities.
151
150
  *
152
- * @example
151
+ * @example Basic usage
153
152
  * ```typescript
154
153
  * // Nuxt server route: server/api/ai.post.ts
155
- * import { llm, anthropic } from '@providerprotocol/ai';
154
+ * import { llm } from '@providerprotocol/ai';
155
+ * import { anthropic } from '@providerprotocol/ai/anthropic';
156
156
  * import { parseBody } from '@providerprotocol/ai/proxy';
157
157
  * import { h3 as h3Adapter } from '@providerprotocol/ai/proxy/server';
158
158
  *
@@ -170,6 +170,49 @@ declare function sendError$2(message: string, status: number, event: H3Event): {
170
170
  * }
171
171
  * });
172
172
  * ```
173
+ *
174
+ * @example API Gateway with authentication (Nuxt)
175
+ * ```typescript
176
+ * // server/api/ai.post.ts
177
+ * import { llm } from '@providerprotocol/ai';
178
+ * import { anthropic } from '@providerprotocol/ai/anthropic';
179
+ * import { ExponentialBackoff, RoundRobinKeys } from '@providerprotocol/ai/http';
180
+ * import { parseBody } from '@providerprotocol/ai/proxy';
181
+ * import { h3 as h3Adapter } from '@providerprotocol/ai/proxy/server';
182
+ *
183
+ * // Server manages AI provider keys - users never see them
184
+ * const claude = llm({
185
+ * model: anthropic('claude-sonnet-4-20250514'),
186
+ * config: {
187
+ * apiKey: new RoundRobinKeys([
188
+ * process.env.ANTHROPIC_KEY_1!,
189
+ * process.env.ANTHROPIC_KEY_2!,
190
+ * ]),
191
+ * retryStrategy: new ExponentialBackoff({ maxAttempts: 3 }),
192
+ * },
193
+ * });
194
+ *
195
+ * export default defineEventHandler(async (event) => {
196
+ * // Authenticate with your platform credentials
197
+ * const token = getHeader(event, 'authorization')?.replace('Bearer ', '');
198
+ * const user = await validatePlatformToken(token);
199
+ * if (!user) {
200
+ * throw createError({ statusCode: 401, message: 'Unauthorized' });
201
+ * }
202
+ *
203
+ * // Track usage per user
204
+ * // await trackUsage(user.id);
205
+ *
206
+ * const body = await readBody(event);
207
+ * const { messages, system, params } = parseBody(body);
208
+ *
209
+ * if (params?.stream) {
210
+ * return h3Adapter.streamSSE(claude.stream(messages, { system }), event);
211
+ * }
212
+ * const turn = await claude.generate(messages, { system });
213
+ * return h3Adapter.sendJSON(turn, event);
214
+ * });
215
+ * ```
173
216
  */
174
217
  declare const h3: {
175
218
  sendJSON: typeof sendJSON$2;
@@ -236,10 +279,11 @@ declare function sendError$1(message: string, status: number, reply: FastifyRepl
236
279
  /**
237
280
  * Fastify adapter utilities.
238
281
  *
239
- * @example
282
+ * @example Basic usage
240
283
  * ```typescript
241
284
  * import Fastify from 'fastify';
242
- * import { llm, anthropic } from '@providerprotocol/ai';
285
+ * import { llm } from '@providerprotocol/ai';
286
+ * import { anthropic } from '@providerprotocol/ai/anthropic';
243
287
  * import { parseBody } from '@providerprotocol/ai/proxy';
244
288
  * import { fastify as fastifyAdapter } from '@providerprotocol/ai/proxy/server';
245
289
  *
@@ -257,6 +301,51 @@ declare function sendError$1(message: string, status: number, reply: FastifyRepl
257
301
  * }
258
302
  * });
259
303
  * ```
304
+ *
305
+ * @example API Gateway with authentication
306
+ * ```typescript
307
+ * import Fastify from 'fastify';
308
+ * import { llm } from '@providerprotocol/ai';
309
+ * import { anthropic } from '@providerprotocol/ai/anthropic';
310
+ * import { ExponentialBackoff, RoundRobinKeys } from '@providerprotocol/ai/http';
311
+ * import { parseBody } from '@providerprotocol/ai/proxy';
312
+ * import { fastify as fastifyAdapter } from '@providerprotocol/ai/proxy/server';
313
+ *
314
+ * const app = Fastify();
315
+ *
316
+ * // Server manages AI provider keys - users never see them
317
+ * const claude = llm({
318
+ * model: anthropic('claude-sonnet-4-20250514'),
319
+ * config: {
320
+ * apiKey: new RoundRobinKeys([process.env.ANTHROPIC_KEY_1!, process.env.ANTHROPIC_KEY_2!]),
321
+ * retryStrategy: new ExponentialBackoff({ maxAttempts: 3 }),
322
+ * },
323
+ * });
324
+ *
325
+ * // Auth hook for your platform
326
+ * app.addHook('preHandler', async (request, reply) => {
327
+ * const token = request.headers.authorization?.replace('Bearer ', '');
328
+ * const user = await validatePlatformToken(token);
329
+ * if (!user) {
330
+ * reply.status(401).send({ error: 'Unauthorized' });
331
+ * return;
332
+ * }
333
+ * request.user = user;
334
+ * });
335
+ *
336
+ * app.post('/api/ai', async (request, reply) => {
337
+ * // Track usage per user
338
+ * // await trackUsage(request.user.id);
339
+ *
340
+ * const { messages, system, params } = parseBody(request.body);
341
+ *
342
+ * if (params?.stream) {
343
+ * return fastifyAdapter.streamSSE(claude.stream(messages, { system }), reply);
344
+ * }
345
+ * const turn = await claude.generate(messages, { system });
346
+ * return fastifyAdapter.sendJSON(turn, reply);
347
+ * });
348
+ * ```
260
349
  */
261
350
  declare const fastify: {
262
351
  sendJSON: typeof sendJSON$1;
@@ -320,11 +409,12 @@ declare function sendError(message: string, status: number, res: ExpressResponse
320
409
  /**
321
410
  * Express adapter utilities.
322
411
  *
323
- * @example
412
+ * @example Basic usage
324
413
  * ```typescript
325
414
  * import express from 'express';
326
- * import { llm, anthropic } from '@providerprotocol/ai';
327
- * import { parseBody, bindTools } from '@providerprotocol/ai/proxy';
415
+ * import { llm } from '@providerprotocol/ai';
416
+ * import { anthropic } from '@providerprotocol/ai/anthropic';
417
+ * import { parseBody } from '@providerprotocol/ai/proxy';
328
418
  * import { express as expressAdapter } from '@providerprotocol/ai/proxy/server';
329
419
  *
330
420
  * const app = express();
@@ -342,6 +432,51 @@ declare function sendError(message: string, status: number, res: ExpressResponse
342
432
  * }
343
433
  * });
344
434
  * ```
435
+ *
436
+ * @example API Gateway with authentication
437
+ * ```typescript
438
+ * import express from 'express';
439
+ * import { llm } from '@providerprotocol/ai';
440
+ * import { anthropic } from '@providerprotocol/ai/anthropic';
441
+ * import { ExponentialBackoff, RoundRobinKeys } from '@providerprotocol/ai/http';
442
+ * import { parseBody } from '@providerprotocol/ai/proxy';
443
+ * import { express as expressAdapter } from '@providerprotocol/ai/proxy/server';
444
+ *
445
+ * const app = express();
446
+ * app.use(express.json());
447
+ *
448
+ * // Your platform's auth middleware
449
+ * async function authMiddleware(req, res, next) {
450
+ * const token = req.headers.authorization?.replace('Bearer ', '');
451
+ * const user = await validatePlatformToken(token);
452
+ * if (!user) return res.status(401).json({ error: 'Unauthorized' });
453
+ * req.user = user;
454
+ * next();
455
+ * }
456
+ *
457
+ * // Server manages AI provider keys - users never see them
458
+ * const claude = llm({
459
+ * model: anthropic('claude-sonnet-4-20250514'),
460
+ * config: {
461
+ * apiKey: new RoundRobinKeys([process.env.ANTHROPIC_KEY_1!, process.env.ANTHROPIC_KEY_2!]),
462
+ * retryStrategy: new ExponentialBackoff({ maxAttempts: 3 }),
463
+ * },
464
+ * });
465
+ *
466
+ * app.post('/api/ai', authMiddleware, async (req, res) => {
467
+ * // Track usage per user
468
+ * // await trackUsage(req.user.id);
469
+ *
470
+ * const { messages, system, params } = parseBody(req.body);
471
+ *
472
+ * if (params?.stream) {
473
+ * expressAdapter.streamSSE(claude.stream(messages, { system }), res);
474
+ * } else {
475
+ * const turn = await claude.generate(messages, { system });
476
+ * expressAdapter.sendJSON(turn, res);
477
+ * }
478
+ * });
479
+ * ```
345
480
  */
346
481
  declare const express: {
347
482
  sendJSON: typeof sendJSON;
@@ -458,9 +593,13 @@ declare function bindTools(schemas: ParsedRequest['tools'], implementations: Rec
458
593
  * For use with Bun, Deno, Next.js App Router, Cloudflare Workers,
459
594
  * and other frameworks that support Web API Response.
460
595
  *
461
- * @example
596
+ * **Security Note:** The proxy works without configuration, meaning no
597
+ * authentication by default. Always add your own auth layer in production.
598
+ *
599
+ * @example Basic usage
462
600
  * ```typescript
463
- * import { llm, anthropic } from '@providerprotocol/ai';
601
+ * import { llm } from '@providerprotocol/ai';
602
+ * import { anthropic } from '@providerprotocol/ai/anthropic';
464
603
  * import { parseBody, toJSON, toSSE } from '@providerprotocol/ai/proxy';
465
604
  *
466
605
  * // Bun.serve / Deno.serve / Next.js App Router
@@ -474,6 +613,49 @@ declare function bindTools(schemas: ParsedRequest['tools'], implementations: Rec
474
613
  * return toJSON(await instance.generate(messages));
475
614
  * }
476
615
  * ```
616
+ *
617
+ * @example API Gateway with authentication
618
+ * ```typescript
619
+ * import { llm } from '@providerprotocol/ai';
620
+ * import { anthropic } from '@providerprotocol/ai/anthropic';
621
+ * import { ExponentialBackoff, RoundRobinKeys } from '@providerprotocol/ai/http';
622
+ * import { parseBody, toJSON, toSSE, toError } from '@providerprotocol/ai/proxy';
623
+ *
624
+ * // Your platform's user validation
625
+ * async function validateToken(token: string): Promise<{ id: string } | null> {
626
+ * // Verify JWT, check database, etc.
627
+ * return token ? { id: 'user-123' } : null;
628
+ * }
629
+ *
630
+ * // Server manages AI provider keys - users never see them
631
+ * const claude = llm({
632
+ * model: anthropic('claude-sonnet-4-20250514'),
633
+ * config: {
634
+ * apiKey: new RoundRobinKeys([process.env.ANTHROPIC_KEY_1!, process.env.ANTHROPIC_KEY_2!]),
635
+ * retryStrategy: new ExponentialBackoff({ maxAttempts: 3 }),
636
+ * },
637
+ * });
638
+ *
639
+ * Bun.serve({
640
+ * port: 3000,
641
+ * async fetch(req) {
642
+ * // Authenticate with YOUR platform credentials
643
+ * const token = req.headers.get('Authorization')?.replace('Bearer ', '');
644
+ * const user = await validateToken(token ?? '');
645
+ * if (!user) return toError('Unauthorized', 401);
646
+ *
647
+ * // Rate limit, track usage, bill user, etc.
648
+ * // await trackUsage(user.id);
649
+ *
650
+ * const { messages, system, params } = parseBody(await req.json());
651
+ *
652
+ * if (params?.stream) {
653
+ * return toSSE(claude.stream(messages, { system }));
654
+ * }
655
+ * return toJSON(await claude.generate(messages, { system }));
656
+ * },
657
+ * });
658
+ * ```
477
659
  */
478
660
  declare const webapi: {
479
661
  parseBody: typeof parseBody;
@@ -1,17 +1,18 @@
1
1
  import {
2
2
  emptyUsage
3
3
  } from "../chunk-SKY2JLA7.js";
4
- import {
5
- createProvider
6
- } from "../chunk-MSR5P65T.js";
7
4
  import {
8
5
  AssistantMessage,
9
6
  ToolResultMessage,
10
- UserMessage
11
- } from "../chunk-UMKWXGO3.js";
7
+ UserMessage,
8
+ createProvider
9
+ } from "../chunk-M4BMM5IB.js";
12
10
  import {
13
- UPPError
14
- } from "../chunk-DZQHVGNV.js";
11
+ UPPError,
12
+ doFetch,
13
+ doStreamFetch,
14
+ normalizeHttpError
15
+ } from "../chunk-EDENPF3E.js";
15
16
 
16
17
  // src/providers/proxy/serialization.ts
17
18
  function serializeMessage(m) {
@@ -98,12 +99,7 @@ var PROXY_CAPABILITIES = {
98
99
  audioInput: true
99
100
  };
100
101
  function createLLMHandler(options) {
101
- const {
102
- endpoint,
103
- headers: defaultHeaders = {},
104
- fetch: customFetch = fetch,
105
- timeout = 12e4
106
- } = options;
102
+ const { endpoint, headers: defaultHeaders = {} } = options;
107
103
  let providerRef = null;
108
104
  return {
109
105
  _setProvider(provider) {
@@ -127,10 +123,9 @@ function createLLMHandler(options) {
127
123
  async complete(request) {
128
124
  const body = serializeRequest(request);
129
125
  const headers = mergeHeaders(request.config.headers, defaultHeaders);
130
- const controller = new AbortController();
131
- const timeoutId = setTimeout(() => controller.abort(), timeout);
132
- try {
133
- const response = await customFetch(endpoint, {
126
+ const response = await doFetch(
127
+ endpoint,
128
+ {
134
129
  method: "POST",
135
130
  headers: {
136
131
  ...headers,
@@ -138,30 +133,18 @@ function createLLMHandler(options) {
138
133
  Accept: "application/json"
139
134
  },
140
135
  body: JSON.stringify(body),
141
- signal: request.signal ? combineSignals(request.signal, controller.signal) : controller.signal
142
- });
143
- clearTimeout(timeoutId);
144
- if (!response.ok) {
145
- const text = await response.text();
146
- throw new UPPError(
147
- text || `HTTP ${response.status}`,
148
- "PROVIDER_ERROR",
149
- "proxy",
150
- "llm"
151
- );
152
- }
153
- const data = await response.json();
154
- return turnJSONToLLMResponse(data);
155
- } catch (error) {
156
- clearTimeout(timeoutId);
157
- throw error;
158
- }
136
+ signal: request.signal
137
+ },
138
+ request.config,
139
+ "proxy",
140
+ "llm"
141
+ );
142
+ const data = await response.json();
143
+ return turnJSONToLLMResponse(data);
159
144
  },
160
145
  stream(request) {
161
146
  const body = serializeRequest(request);
162
147
  const headers = mergeHeaders(request.config.headers, defaultHeaders);
163
- const controller = new AbortController();
164
- const timeoutId = setTimeout(() => controller.abort(), timeout);
165
148
  let resolveResponse;
166
149
  let rejectResponse;
167
150
  const responsePromise = new Promise((resolve, reject) => {
@@ -170,25 +153,24 @@ function createLLMHandler(options) {
170
153
  });
171
154
  const generator = async function* () {
172
155
  try {
173
- const response = await customFetch(endpoint, {
174
- method: "POST",
175
- headers: {
176
- ...headers,
177
- "Content-Type": "application/json",
178
- Accept: "text/event-stream"
156
+ const response = await doStreamFetch(
157
+ endpoint,
158
+ {
159
+ method: "POST",
160
+ headers: {
161
+ ...headers,
162
+ "Content-Type": "application/json",
163
+ Accept: "text/event-stream"
164
+ },
165
+ body: JSON.stringify(body),
166
+ signal: request.signal
179
167
  },
180
- body: JSON.stringify(body),
181
- signal: request.signal ? combineSignals(request.signal, controller.signal) : controller.signal
182
- });
183
- clearTimeout(timeoutId);
168
+ request.config,
169
+ "proxy",
170
+ "llm"
171
+ );
184
172
  if (!response.ok) {
185
- const text = await response.text();
186
- throw new UPPError(
187
- text || `HTTP ${response.status}`,
188
- "PROVIDER_ERROR",
189
- "proxy",
190
- "llm"
191
- );
173
+ throw await normalizeHttpError(response, "proxy", "llm");
192
174
  }
193
175
  if (!response.body) {
194
176
  throw new UPPError(
@@ -274,16 +256,6 @@ function turnJSONToLLMResponse(data) {
274
256
  data: data.data
275
257
  };
276
258
  }
277
- function combineSignals(signal1, signal2) {
278
- const controller = new AbortController();
279
- const onAbort = () => controller.abort();
280
- signal1.addEventListener("abort", onAbort);
281
- signal2.addEventListener("abort", onAbort);
282
- if (signal1.aborted || signal2.aborted) {
283
- controller.abort();
284
- }
285
- return controller.signal;
286
- }
287
259
 
288
260
  // src/providers/proxy/server/express.ts
289
261
  function sendJSON(turn, res) {
@@ -535,7 +507,7 @@ function proxy(options) {
535
507
  return createProvider({
536
508
  name: "proxy",
537
509
  version: "1.0.0",
538
- modalities: {
510
+ handlers: {
539
511
  llm: createLLMHandler(options)
540
512
  }
541
513
  });