@kernl-sdk/ai 0.4.2 → 0.4.4

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (53) hide show
  1. package/.turbo/turbo-build.log +1 -1
  2. package/CHANGELOG.md +24 -0
  3. package/dist/__tests__/integration.test.js +10 -11
  4. package/dist/__tests__/language-model.test.js +15 -20
  5. package/dist/convert/__tests__/response.test.js +18 -16
  6. package/dist/convert/__tests__/stream.test.js +21 -20
  7. package/dist/convert/__tests__/tools.test.js +5 -5
  8. package/dist/convert/__tests__/ui-stream.test.js +11 -5
  9. package/dist/convert/response.d.ts +4 -4
  10. package/dist/convert/response.d.ts.map +1 -1
  11. package/dist/convert/response.js +1 -20
  12. package/dist/convert/stream.d.ts.map +1 -1
  13. package/dist/convert/stream.js +1 -7
  14. package/dist/convert/tools.d.ts +2 -2
  15. package/dist/convert/tools.d.ts.map +1 -1
  16. package/dist/convert/tools.js +2 -2
  17. package/dist/embedding-model.d.ts +3 -3
  18. package/dist/embedding-model.d.ts.map +1 -1
  19. package/dist/oauth/anthropic.d.ts +8 -0
  20. package/dist/oauth/anthropic.d.ts.map +1 -0
  21. package/dist/oauth/anthropic.js +65 -0
  22. package/dist/oauth/openai.d.ts +8 -0
  23. package/dist/oauth/openai.d.ts.map +1 -0
  24. package/dist/oauth/openai.js +97 -0
  25. package/dist/oauth/types.d.ts +25 -0
  26. package/dist/oauth/types.d.ts.map +1 -0
  27. package/dist/oauth/types.js +1 -0
  28. package/dist/providers/anthropic.d.ts +46 -2
  29. package/dist/providers/anthropic.d.ts.map +1 -1
  30. package/dist/providers/anthropic.js +35 -2
  31. package/dist/providers/google.d.ts +7 -2
  32. package/dist/providers/google.d.ts.map +1 -1
  33. package/dist/providers/google.js +2 -2
  34. package/dist/providers/openai.d.ts +47 -2
  35. package/dist/providers/openai.d.ts.map +1 -1
  36. package/dist/providers/openai.js +37 -3
  37. package/package.json +9 -9
  38. package/src/__tests__/integration.test.ts +10 -11
  39. package/src/__tests__/language-model.test.ts +15 -20
  40. package/src/convert/__tests__/response.test.ts +23 -21
  41. package/src/convert/__tests__/stream.test.ts +21 -20
  42. package/src/convert/__tests__/tools.test.ts +6 -6
  43. package/src/convert/__tests__/ui-stream.test.ts +11 -5
  44. package/src/convert/response.ts +9 -30
  45. package/src/convert/stream.ts +1 -7
  46. package/src/convert/tools.ts +5 -5
  47. package/src/embedding-model.ts +3 -5
  48. package/src/oauth/anthropic.ts +87 -0
  49. package/src/oauth/openai.ts +129 -0
  50. package/src/oauth/types.ts +25 -0
  51. package/src/providers/anthropic.ts +65 -3
  52. package/src/providers/google.ts +8 -3
  53. package/src/providers/openai.ts +67 -4
@@ -0,0 +1,97 @@
1
+ import { appendFileSync } from "node:fs";
2
+ const TOKEN_URL = "https://auth.openai.com/oauth/token";
3
+ const CLIENT_ID = "app_EMoamEEZ73f0CkXaXp7hrann";
4
+ const CODEX_ENDPOINT = "https://chatgpt.com/backend-api/codex/responses";
5
+ function debug(msg) {
6
+ try {
7
+ appendFileSync("/tmp/popcorn-debug.log", `${new Date().toISOString()} [oauth/fetch] ${msg}\n`);
8
+ }
9
+ catch {
10
+ // ignore
11
+ }
12
+ }
13
+ /**
14
+ * Refresh OpenAI OAuth tokens.
15
+ */
16
+ async function refresh(creds) {
17
+ const res = await fetch(TOKEN_URL, {
18
+ method: "POST",
19
+ headers: { "Content-Type": "application/x-www-form-urlencoded" },
20
+ body: new URLSearchParams({
21
+ grant_type: "refresh_token",
22
+ refresh_token: creds.refreshToken,
23
+ client_id: CLIENT_ID,
24
+ }),
25
+ });
26
+ if (!res.ok) {
27
+ throw new Error(`Token refresh failed: ${res.status}`);
28
+ }
29
+ const data = (await res.json());
30
+ creds.accessToken = data.access_token;
31
+ creds.refreshToken = data.refresh_token;
32
+ creds.expiresAt = Date.now() + data.expires_in * 1000;
33
+ creds.onRefresh?.({
34
+ accessToken: creds.accessToken,
35
+ refreshToken: creds.refreshToken,
36
+ expiresAt: creds.expiresAt,
37
+ });
38
+ }
39
+ /**
40
+ * Create a fetch wrapper for OpenAI Codex OAuth.
41
+ *
42
+ * Redirects all requests to the Codex endpoint and adds OAuth headers.
43
+ */
44
+ export function createOAuthFetch(creds) {
45
+ return async (input, init) => {
46
+ // Refresh if expired (with 30s buffer)
47
+ if (Date.now() >= creds.expiresAt - 30_000) {
48
+ await refresh(creds);
49
+ }
50
+ const headers = new Headers(init?.headers);
51
+ headers.set("Authorization", `Bearer ${creds.accessToken}`);
52
+ if (creds.accountId) {
53
+ headers.set("ChatGPT-Account-Id", creds.accountId);
54
+ }
55
+ // Debug: log request
56
+ const url = typeof input === "string"
57
+ ? input
58
+ : input instanceof URL
59
+ ? input.toString()
60
+ : input.url;
61
+ debug(`Request to: ${url}`);
62
+ debug(`Redirecting to: ${CODEX_ENDPOINT}`);
63
+ // Transform request body for Codex API
64
+ // Codex requires "instructions" field instead of developer/system role in input
65
+ // Codex also requires "store: false"
66
+ let body = init?.body;
67
+ if (body && typeof body === "string") {
68
+ try {
69
+ const parsed = JSON.parse(body);
70
+ // Codex requires store: false
71
+ parsed.store = false;
72
+ // Extract developer/system message as instructions
73
+ if (parsed.input && Array.isArray(parsed.input)) {
74
+ const devIdx = parsed.input.findIndex((m) => m.role === "developer" || m.role === "system");
75
+ if (devIdx !== -1) {
76
+ const devMsg = parsed.input[devIdx];
77
+ parsed.instructions = devMsg.content;
78
+ parsed.input.splice(devIdx, 1);
79
+ }
80
+ }
81
+ body = JSON.stringify(parsed);
82
+ debug(`Transformed body: ${body.slice(0, 500)}`);
83
+ }
84
+ catch {
85
+ debug(`Failed to transform body`);
86
+ }
87
+ }
88
+ const response = await fetch(CODEX_ENDPOINT, { ...init, headers, body });
89
+ // Debug: log response
90
+ debug(`Response status: ${response.status}`);
91
+ if (!response.ok) {
92
+ const text = await response.clone().text();
93
+ debug(`Error response: ${text.slice(0, 1000)}`);
94
+ }
95
+ return response;
96
+ };
97
+ }
@@ -0,0 +1,25 @@
1
+ /**
2
+ * Base OAuth credentials.
3
+ */
4
+ export interface OAuthCredentials {
5
+ /** Current access token */
6
+ accessToken: string;
7
+ /** Refresh token for obtaining new access tokens */
8
+ refreshToken: string;
9
+ /** Expiration timestamp in milliseconds */
10
+ expiresAt: number;
11
+ /** Called when tokens are refreshed - use to persist new tokens */
12
+ onRefresh?: (tokens: {
13
+ accessToken: string;
14
+ refreshToken: string;
15
+ expiresAt: number;
16
+ }) => void;
17
+ }
18
+ /**
19
+ * OpenAI OAuth credentials (ChatGPT Plus/Pro via Codex).
20
+ */
21
+ export interface OpenAIOAuthCredentials extends OAuthCredentials {
22
+ /** Account ID for org/team subscriptions */
23
+ accountId?: string;
24
+ }
25
+ //# sourceMappingURL=types.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"types.d.ts","sourceRoot":"","sources":["../../src/oauth/types.ts"],"names":[],"mappings":"AAAA;;GAEG;AACH,MAAM,WAAW,gBAAgB;IAC/B,2BAA2B;IAC3B,WAAW,EAAE,MAAM,CAAC;IACpB,oDAAoD;IACpD,YAAY,EAAE,MAAM,CAAC;IACrB,2CAA2C;IAC3C,SAAS,EAAE,MAAM,CAAC;IAClB,mEAAmE;IACnE,SAAS,CAAC,EAAE,CAAC,MAAM,EAAE;QACnB,WAAW,EAAE,MAAM,CAAC;QACpB,YAAY,EAAE,MAAM,CAAC;QACrB,SAAS,EAAE,MAAM,CAAC;KACnB,KAAK,IAAI,CAAC;CACZ;AAED;;GAEG;AACH,MAAM,WAAW,sBAAuB,SAAQ,gBAAgB;IAC9D,4CAA4C;IAC5C,SAAS,CAAC,EAAE,MAAM,CAAC;CACpB"}
@@ -0,0 +1 @@
1
+ export {};
@@ -1,14 +1,58 @@
1
+ import { anthropic as _anthropic } from "@ai-sdk/anthropic";
1
2
  import { AISDKLanguageModel } from "../language-model.js";
3
+ import type { OAuthCredentials } from "../oauth/types.js";
4
+ /**
5
+ * Anthropic model IDs (derived from @ai-sdk/anthropic).
6
+ */
7
+ export type AnthropicModelId = Parameters<typeof _anthropic>[0];
8
+ /**
9
+ * Options for creating a custom Anthropic provider.
10
+ */
11
+ export interface AnthropicProviderOptions {
12
+ /** API key for standard authentication */
13
+ apiKey?: string;
14
+ /** OAuth credentials for Claude Pro/Max authentication */
15
+ oauth?: OAuthCredentials;
16
+ /** Custom base URL */
17
+ baseURL?: string;
18
+ /** Custom headers */
19
+ headers?: Record<string, string>;
20
+ }
21
+ /**
22
+ * Create a custom Anthropic provider with explicit credentials.
23
+ *
24
+ * @example API key auth
25
+ * ```ts
26
+ * const anthropic = createAnthropic({ apiKey: "sk-..." });
27
+ * const model = anthropic("claude-sonnet-4-5");
28
+ * ```
29
+ *
30
+ * @example OAuth auth (Claude Pro/Max)
31
+ * ```ts
32
+ * const anthropic = createAnthropic({
33
+ * oauth: {
34
+ * accessToken: "...",
35
+ * refreshToken: "...",
36
+ * expiresAt: Date.now() + 3600000,
37
+ * onRefresh: (tokens) => saveTokens(tokens),
38
+ * }
39
+ * });
40
+ * const model = anthropic("claude-sonnet-4-5");
41
+ * ```
42
+ */
43
+ export declare function createAnthropic(options?: AnthropicProviderOptions): (modelId: AnthropicModelId) => AISDKLanguageModel;
2
44
  /**
3
45
  * Create a kernl-compatible Anthropic language model.
46
+ * Uses ANTHROPIC_API_KEY environment variable.
4
47
  *
5
48
  * @example
6
49
  * ```ts
7
50
  * import { anthropic } from '@kernl-sdk/ai/anthropic';
8
51
  *
9
- * const claude = anthropic('claude-3-5-sonnet-20241022');
52
+ * const claude = anthropic('claude-sonnet-4-5');
10
53
  * const response = await claude.generate([...], {});
11
54
  * ```
12
55
  */
13
- export declare function anthropic(modelId: string): AISDKLanguageModel;
56
+ export declare function anthropic(modelId: AnthropicModelId): AISDKLanguageModel;
57
+ export type { OAuthCredentials } from "../oauth/types.js";
14
58
  //# sourceMappingURL=anthropic.d.ts.map
@@ -1 +1 @@
1
- {"version":3,"file":"anthropic.d.ts","sourceRoot":"","sources":["../../src/providers/anthropic.ts"],"names":[],"mappings":"AACA,OAAO,EAAE,kBAAkB,EAAE,MAAM,mBAAmB,CAAC;AAEvD;;;;;;;;;;GAUG;AACH,wBAAgB,SAAS,CAAC,OAAO,EAAE,MAAM,sBAGxC"}
1
+ {"version":3,"file":"anthropic.d.ts","sourceRoot":"","sources":["../../src/providers/anthropic.ts"],"names":[],"mappings":"AAAA,OAAO,EACL,SAAS,IAAI,UAAU,EAExB,MAAM,mBAAmB,CAAC;AAC3B,OAAO,EAAE,kBAAkB,EAAE,MAAM,mBAAmB,CAAC;AAEvD,OAAO,KAAK,EAAE,gBAAgB,EAAE,MAAM,gBAAgB,CAAC;AAEvD;;GAEG;AACH,MAAM,MAAM,gBAAgB,GAAG,UAAU,CAAC,OAAO,UAAU,CAAC,CAAC,CAAC,CAAC,CAAC;AAEhE;;GAEG;AACH,MAAM,WAAW,wBAAwB;IACvC,0CAA0C;IAC1C,MAAM,CAAC,EAAE,MAAM,CAAC;IAChB,0DAA0D;IAC1D,KAAK,CAAC,EAAE,gBAAgB,CAAC;IACzB,sBAAsB;IACtB,OAAO,CAAC,EAAE,MAAM,CAAC;IACjB,qBAAqB;IACrB,OAAO,CAAC,EAAE,MAAM,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;CAClC;AAED;;;;;;;;;;;;;;;;;;;;;GAqBG;AACH,wBAAgB,eAAe,CAAC,OAAO,GAAE,wBAA6B,IAQ5D,SAAS,gBAAgB,wBAElC;AAED;;;;;;;;;;;GAWG;AACH,wBAAgB,SAAS,CAAC,OAAO,EAAE,gBAAgB,sBAGlD;AAGD,YAAY,EAAE,gBAAgB,EAAE,MAAM,gBAAgB,CAAC"}
@@ -1,13 +1,46 @@
1
- import { anthropic as _anthropic } from "@ai-sdk/anthropic";
1
+ import { anthropic as _anthropic, createAnthropic as _createAnthropic, } from "@ai-sdk/anthropic";
2
2
  import { AISDKLanguageModel } from "../language-model.js";
3
+ import { createOAuthFetch } from "../oauth/anthropic.js";
4
+ /**
5
+ * Create a custom Anthropic provider with explicit credentials.
6
+ *
7
+ * @example API key auth
8
+ * ```ts
9
+ * const anthropic = createAnthropic({ apiKey: "sk-..." });
10
+ * const model = anthropic("claude-sonnet-4-5");
11
+ * ```
12
+ *
13
+ * @example OAuth auth (Claude Pro/Max)
14
+ * ```ts
15
+ * const anthropic = createAnthropic({
16
+ * oauth: {
17
+ * accessToken: "...",
18
+ * refreshToken: "...",
19
+ * expiresAt: Date.now() + 3600000,
20
+ * onRefresh: (tokens) => saveTokens(tokens),
21
+ * }
22
+ * });
23
+ * const model = anthropic("claude-sonnet-4-5");
24
+ * ```
25
+ */
26
+ export function createAnthropic(options = {}) {
27
+ const provider = _createAnthropic({
28
+ apiKey: options.oauth ? undefined : options.apiKey,
29
+ baseURL: options.baseURL,
30
+ headers: options.headers,
31
+ fetch: options.oauth ? createOAuthFetch(options.oauth) : undefined,
32
+ });
33
+ return (modelId) => new AISDKLanguageModel(provider(modelId));
34
+ }
3
35
  /**
4
36
  * Create a kernl-compatible Anthropic language model.
37
+ * Uses ANTHROPIC_API_KEY environment variable.
5
38
  *
6
39
  * @example
7
40
  * ```ts
8
41
  * import { anthropic } from '@kernl-sdk/ai/anthropic';
9
42
  *
10
- * const claude = anthropic('claude-3-5-sonnet-20241022');
43
+ * const claude = anthropic('claude-sonnet-4-5');
11
44
  * const response = await claude.generate([...], {});
12
45
  * ```
13
46
  */
@@ -1,4 +1,9 @@
1
+ import { google as _google } from "@ai-sdk/google";
1
2
  import { AISDKLanguageModel } from "../language-model.js";
3
+ /**
4
+ * Google model IDs (derived from @ai-sdk/google).
5
+ */
6
+ export type GoogleModelId = Parameters<typeof _google>[0];
2
7
  /**
3
8
  * Create a kernl-compatible Google Generative AI language model.
4
9
  *
@@ -6,9 +11,9 @@ import { AISDKLanguageModel } from "../language-model.js";
6
11
  * ```ts
7
12
  * import { google } from '@kernl-sdk/ai/google';
8
13
  *
9
- * const gemini = google('gemini-2.0-flash-exp');
14
+ * const gemini = google('gemini-2.5-pro');
10
15
  * const response = await gemini.generate([...], {});
11
16
  * ```
12
17
  */
13
- export declare function google(modelId: string): AISDKLanguageModel;
18
+ export declare function google(modelId: GoogleModelId): AISDKLanguageModel;
14
19
  //# sourceMappingURL=google.d.ts.map
@@ -1 +1 @@
1
- {"version":3,"file":"google.d.ts","sourceRoot":"","sources":["../../src/providers/google.ts"],"names":[],"mappings":"AACA,OAAO,EAAE,kBAAkB,EAAE,MAAM,mBAAmB,CAAC;AAIvD;;;;;;;;;;GAUG;AACH,wBAAgB,MAAM,CAAC,OAAO,EAAE,MAAM,sBAGrC"}
1
+ {"version":3,"file":"google.d.ts","sourceRoot":"","sources":["../../src/providers/google.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,MAAM,IAAI,OAAO,EAAE,MAAM,gBAAgB,CAAC;AAEnD,OAAO,EAAE,kBAAkB,EAAE,MAAM,mBAAmB,CAAC;AAGvD;;GAEG;AACH,MAAM,MAAM,aAAa,GAAG,UAAU,CAAC,OAAO,OAAO,CAAC,CAAC,CAAC,CAAC,CAAC;AAE1D;;;;;;;;;;GAUG;AACH,wBAAgB,MAAM,CAAC,OAAO,EAAE,aAAa,sBAG5C"}
@@ -1,7 +1,7 @@
1
1
  import { google as _google } from "@ai-sdk/google";
2
+ import { registerEmbeddingProvider } from "@kernl-sdk/retrieval";
2
3
  import { AISDKLanguageModel } from "../language-model.js";
3
4
  import { AISDKEmbeddingModel } from "../embedding-model.js";
4
- import { registerEmbeddingProvider } from "@kernl-sdk/retrieval";
5
5
  /**
6
6
  * Create a kernl-compatible Google Generative AI language model.
7
7
  *
@@ -9,7 +9,7 @@ import { registerEmbeddingProvider } from "@kernl-sdk/retrieval";
9
9
  * ```ts
10
10
  * import { google } from '@kernl-sdk/ai/google';
11
11
  *
12
- * const gemini = google('gemini-2.0-flash-exp');
12
+ * const gemini = google('gemini-2.5-pro');
13
13
  * const response = await gemini.generate([...], {});
14
14
  * ```
15
15
  */
@@ -1,14 +1,59 @@
1
+ import { openai as _openai } from "@ai-sdk/openai";
1
2
  import { AISDKLanguageModel } from "../language-model.js";
3
+ import type { OpenAIOAuthCredentials } from "../oauth/types.js";
4
+ /**
5
+ * OpenAI model IDs (derived from @ai-sdk/openai).
6
+ */
7
+ export type OpenAIModelId = Parameters<typeof _openai>[0];
8
+ /**
9
+ * Options for creating a custom OpenAI provider.
10
+ */
11
+ export interface OpenAIProviderOptions {
12
+ /** API key for standard authentication */
13
+ apiKey?: string;
14
+ /** OAuth credentials for ChatGPT Plus/Pro (Codex) authentication */
15
+ oauth?: OpenAIOAuthCredentials;
16
+ /** Custom base URL (ignored for OAuth - uses Codex endpoint) */
17
+ baseURL?: string;
18
+ /** Custom headers */
19
+ headers?: Record<string, string>;
20
+ }
21
+ /**
22
+ * Create a custom OpenAI provider with explicit credentials.
23
+ *
24
+ * @example API key auth
25
+ * ```ts
26
+ * const openai = createOpenAI({ apiKey: "sk-..." });
27
+ * const model = openai("gpt-4o");
28
+ * ```
29
+ *
30
+ * @example OAuth auth (ChatGPT Plus/Pro via Codex)
31
+ * ```ts
32
+ * const openai = createOpenAI({
33
+ * oauth: {
34
+ * accessToken: "...",
35
+ * refreshToken: "...",
36
+ * expiresAt: Date.now() + 3600000,
37
+ * accountId: "...", // for org subscriptions
38
+ * onRefresh: (tokens) => saveTokens(tokens),
39
+ * }
40
+ * });
41
+ * const model = openai("gpt-4o");
42
+ * ```
43
+ */
44
+ export declare function createOpenAI(options?: OpenAIProviderOptions): (modelId: OpenAIModelId) => AISDKLanguageModel;
2
45
  /**
3
46
  * Create a kernl-compatible OpenAI language model.
47
+ * Uses OPENAI_API_KEY environment variable.
4
48
  *
5
49
  * @example
6
50
  * ```ts
7
51
  * import { openai } from '@kernl-sdk/ai/openai';
8
52
  *
9
- * const gpt4 = openai('gpt-4-turbo');
53
+ * const gpt4 = openai('gpt-4o');
10
54
  * const response = await gpt4.generate([...], {});
11
55
  * ```
12
56
  */
13
- export declare function openai(modelId: string): AISDKLanguageModel;
57
+ export declare function openai(modelId: OpenAIModelId): AISDKLanguageModel;
58
+ export type { OpenAIOAuthCredentials } from "../oauth/types.js";
14
59
  //# sourceMappingURL=openai.d.ts.map
@@ -1 +1 @@
1
- {"version":3,"file":"openai.d.ts","sourceRoot":"","sources":["../../src/providers/openai.ts"],"names":[],"mappings":"AACA,OAAO,EAAE,kBAAkB,EAAE,MAAM,mBAAmB,CAAC;AAIvD;;;;;;;;;;GAUG;AACH,wBAAgB,MAAM,CAAC,OAAO,EAAE,MAAM,sBAGrC"}
1
+ {"version":3,"file":"openai.d.ts","sourceRoot":"","sources":["../../src/providers/openai.ts"],"names":[],"mappings":"AAAA,OAAO,EACL,MAAM,IAAI,OAAO,EAElB,MAAM,gBAAgB,CAAC;AAGxB,OAAO,EAAE,kBAAkB,EAAE,MAAM,mBAAmB,CAAC;AAGvD,OAAO,KAAK,EAAE,sBAAsB,EAAE,MAAM,gBAAgB,CAAC;AAE7D;;GAEG;AACH,MAAM,MAAM,aAAa,GAAG,UAAU,CAAC,OAAO,OAAO,CAAC,CAAC,CAAC,CAAC,CAAC;AAE1D;;GAEG;AACH,MAAM,WAAW,qBAAqB;IACpC,0CAA0C;IAC1C,MAAM,CAAC,EAAE,MAAM,CAAC;IAChB,oEAAoE;IACpE,KAAK,CAAC,EAAE,sBAAsB,CAAC;IAC/B,gEAAgE;IAChE,OAAO,CAAC,EAAE,MAAM,CAAC;IACjB,qBAAqB;IACrB,OAAO,CAAC,EAAE,MAAM,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;CAClC;AAED;;;;;;;;;;;;;;;;;;;;;;GAsBG;AACH,wBAAgB,YAAY,CAAC,OAAO,GAAE,qBAA0B,IAQtD,SAAS,aAAa,wBAC/B;AAED;;;;;;;;;;;GAWG;AACH,wBAAgB,MAAM,CAAC,OAAO,EAAE,aAAa,sBAG5C;AAGD,YAAY,EAAE,sBAAsB,EAAE,MAAM,gBAAgB,CAAC"}
@@ -1,15 +1,49 @@
1
- import { openai as _openai } from "@ai-sdk/openai";
1
+ import { openai as _openai, createOpenAI as _createOpenAI, } from "@ai-sdk/openai";
2
+ import { registerEmbeddingProvider } from "@kernl-sdk/retrieval";
2
3
  import { AISDKLanguageModel } from "../language-model.js";
3
4
  import { AISDKEmbeddingModel } from "../embedding-model.js";
4
- import { registerEmbeddingProvider } from "@kernl-sdk/retrieval";
5
+ import { createOAuthFetch } from "../oauth/openai.js";
6
+ /**
7
+ * Create a custom OpenAI provider with explicit credentials.
8
+ *
9
+ * @example API key auth
10
+ * ```ts
11
+ * const openai = createOpenAI({ apiKey: "sk-..." });
12
+ * const model = openai("gpt-4o");
13
+ * ```
14
+ *
15
+ * @example OAuth auth (ChatGPT Plus/Pro via Codex)
16
+ * ```ts
17
+ * const openai = createOpenAI({
18
+ * oauth: {
19
+ * accessToken: "...",
20
+ * refreshToken: "...",
21
+ * expiresAt: Date.now() + 3600000,
22
+ * accountId: "...", // for org subscriptions
23
+ * onRefresh: (tokens) => saveTokens(tokens),
24
+ * }
25
+ * });
26
+ * const model = openai("gpt-4o");
27
+ * ```
28
+ */
29
+ export function createOpenAI(options = {}) {
30
+ const provider = _createOpenAI({
31
+ apiKey: options.oauth ? undefined : options.apiKey,
32
+ baseURL: options.oauth ? undefined : options.baseURL,
33
+ headers: options.headers,
34
+ fetch: options.oauth ? createOAuthFetch(options.oauth) : undefined,
35
+ });
36
+ return (modelId) => new AISDKLanguageModel(provider(modelId));
37
+ }
5
38
  /**
6
39
  * Create a kernl-compatible OpenAI language model.
40
+ * Uses OPENAI_API_KEY environment variable.
7
41
  *
8
42
  * @example
9
43
  * ```ts
10
44
  * import { openai } from '@kernl-sdk/ai/openai';
11
45
  *
12
- * const gpt4 = openai('gpt-4-turbo');
46
+ * const gpt4 = openai('gpt-4o');
13
47
  * const response = await gpt4.generate([...], {});
14
48
  * ```
15
49
  */
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@kernl-sdk/ai",
3
- "version": "0.4.2",
3
+ "version": "0.4.4",
4
4
  "description": "Vercel AI SDK adapter for kernl",
5
5
  "keywords": [
6
6
  "kernl",
@@ -43,7 +43,7 @@
43
43
  }
44
44
  },
45
45
  "peerDependencies": {
46
- "@ai-sdk/provider": "^3.0.0-beta.15"
46
+ "@ai-sdk/provider": "^3.0.3"
47
47
  },
48
48
  "peerDependenciesMeta": {
49
49
  "@ai-sdk/anthropic": {
@@ -57,10 +57,10 @@
57
57
  }
58
58
  },
59
59
  "devDependencies": {
60
- "@ai-sdk/anthropic": "3.0.0-beta.53",
61
- "@ai-sdk/google": "3.0.0-beta.43",
62
- "@ai-sdk/openai": "3.0.0-beta.57",
63
- "@ai-sdk/provider": "3.0.0-beta.15",
60
+ "@ai-sdk/anthropic": "^3.0.14",
61
+ "@ai-sdk/google": "^3.0.9",
62
+ "@ai-sdk/openai": "^3.0.11",
63
+ "@ai-sdk/provider": "^3.0.3",
64
64
  "@types/node": "^24.10.0",
65
65
  "ai": "^5.0.93",
66
66
  "tsc-alias": "^1.8.10",
@@ -68,10 +68,10 @@
68
68
  "vitest": "^4.0.8"
69
69
  },
70
70
  "dependencies": {
71
- "@kernl-sdk/protocol": "0.5.0",
72
- "@kernl-sdk/retrieval": "0.1.9",
71
+ "@kernl-sdk/protocol": "0.5.1",
72
+ "@kernl-sdk/retrieval": "0.1.10",
73
73
  "@kernl-sdk/shared": "^0.4.0",
74
- "kernl": "0.12.2"
74
+ "kernl": "0.12.3"
75
75
  },
76
76
  "scripts": {
77
77
  "build": "tsc && tsc-alias --resolve-full-paths",
@@ -64,9 +64,8 @@ describe.skipIf(SKIP_OPENAI_TESTS)("AISDKLanguageModel - OpenAI", () => {
64
64
  expect(response.content).toBeDefined();
65
65
  expect(response.content.length).toBeGreaterThan(0);
66
66
  expect(response.usage).toBeDefined();
67
- expect(response.usage.totalTokens).toBeGreaterThan(0);
68
- expect(response.usage.inputTokens).toBeGreaterThan(0);
69
- expect(response.usage.outputTokens).toBeGreaterThan(0);
67
+ expect(response.usage.inputTokens.total).toBeGreaterThan(0);
68
+ expect(response.usage.outputTokens.total).toBeGreaterThan(0);
70
69
 
71
70
  // Should have at least one message
72
71
  const messages = response.content.filter(
@@ -103,7 +102,7 @@ describe.skipIf(SKIP_OPENAI_TESTS)("AISDKLanguageModel - OpenAI", () => {
103
102
  });
104
103
 
105
104
  expect(response.content).toBeDefined();
106
- expect(response.usage.totalTokens).toBeGreaterThan(0);
105
+ expect(response.usage.inputTokens.total).toBeGreaterThan(0);
107
106
  });
108
107
 
109
108
  it("should handle multi-turn conversations", async () => {
@@ -135,7 +134,7 @@ describe.skipIf(SKIP_OPENAI_TESTS)("AISDKLanguageModel - OpenAI", () => {
135
134
  });
136
135
 
137
136
  expect(response.content).toBeDefined();
138
- expect(response.usage.totalTokens).toBeGreaterThan(0);
137
+ expect(response.usage.inputTokens.total).toBeGreaterThan(0);
139
138
 
140
139
  // Check that it remembers the name (should mention Alice)
141
140
  const assistantMessages = response.content.filter(
@@ -161,7 +160,7 @@ describe.skipIf(SKIP_OPENAI_TESTS)("AISDKLanguageModel - OpenAI", () => {
161
160
  });
162
161
 
163
162
  expect(response.content).toBeDefined();
164
- expect(response.usage.totalTokens).toBeGreaterThan(0);
163
+ expect(response.usage.inputTokens.total).toBeGreaterThan(0);
165
164
  });
166
165
 
167
166
  it("should respect maxTokens setting", async () => {
@@ -181,8 +180,8 @@ describe.skipIf(SKIP_OPENAI_TESTS)("AISDKLanguageModel - OpenAI", () => {
181
180
  });
182
181
 
183
182
  expect(response.content).toBeDefined();
184
- expect(response.usage.outputTokens).toBeDefined();
185
- expect(response.usage.outputTokens).toBeLessThanOrEqual(20);
183
+ expect(response.usage.outputTokens.total).toBeDefined();
184
+ expect(response.usage.outputTokens.total).toBeLessThanOrEqual(20);
186
185
  });
187
186
  });
188
187
 
@@ -216,7 +215,7 @@ describe.skipIf(SKIP_OPENAI_TESTS)("AISDKLanguageModel - OpenAI", () => {
216
215
  // Should have usage information
217
216
  const finishEvent = finishEvents[0] as any;
218
217
  expect(finishEvent.usage).toBeDefined();
219
- expect(finishEvent.usage.totalTokens).toBeGreaterThan(0);
218
+ expect(finishEvent.usage.inputTokens.total).toBeGreaterThan(0);
220
219
  });
221
220
 
222
221
  it("should stream text deltas", async () => {
@@ -1141,7 +1140,7 @@ describe.skipIf(SKIP_GOOGLE_TESTS)("AISDKLanguageModel - Google", () => {
1141
1140
  expect(response.content).toBeDefined();
1142
1141
  expect(response.content.length).toBeGreaterThan(0);
1143
1142
  expect(response.usage).toBeDefined();
1144
- expect(response.usage.totalTokens).toBeGreaterThan(0);
1143
+ expect(response.usage.inputTokens.total).toBeGreaterThan(0);
1145
1144
 
1146
1145
  const messages = response.content.filter(
1147
1146
  (item) => item.kind === "message",
@@ -1180,7 +1179,7 @@ describe.skipIf(SKIP_GOOGLE_TESTS)("AISDKLanguageModel - Google", () => {
1180
1179
  // Should have usage information
1181
1180
  const finishEvent = finishEvents[0] as any;
1182
1181
  expect(finishEvent.usage).toBeDefined();
1183
- expect(finishEvent.usage.totalTokens).toBeGreaterThan(0);
1182
+ expect(finishEvent.usage.inputTokens.total).toBeGreaterThan(0);
1184
1183
  });
1185
1184
  });
1186
1185
 
@@ -51,11 +51,10 @@ describe("AISDKLanguageModel", () => {
51
51
  },
52
52
  {
53
53
  type: "finish",
54
- finishReason: "stop",
54
+ finishReason: { unified: "stop", raw: "stop" },
55
55
  usage: {
56
- inputTokens: 5,
57
- outputTokens: 10,
58
- totalTokens: 15,
56
+ inputTokens: { total: 5, noCache: 5, cacheRead: undefined, cacheWrite: undefined },
57
+ outputTokens: { total: 10, text: 10, reasoning: undefined },
59
58
  },
60
59
  providerMetadata: undefined,
61
60
  },
@@ -163,11 +162,10 @@ describe("AISDKLanguageModel", () => {
163
162
  },
164
163
  {
165
164
  type: "finish",
166
- finishReason: "stop",
165
+ finishReason: { unified: "stop", raw: "stop" },
167
166
  usage: {
168
- inputTokens: 5,
169
- outputTokens: 20,
170
- totalTokens: 25,
167
+ inputTokens: { total: 5, noCache: 5, cacheRead: undefined, cacheWrite: undefined },
168
+ outputTokens: { total: 20, text: 20, reasoning: undefined },
171
169
  },
172
170
  providerMetadata: undefined,
173
171
  },
@@ -272,11 +270,10 @@ describe("AISDKLanguageModel", () => {
272
270
  },
273
271
  {
274
272
  type: "finish",
275
- finishReason: "stop",
273
+ finishReason: { unified: "stop", raw: "stop" },
276
274
  usage: {
277
- inputTokens: 5,
278
- outputTokens: 10,
279
- totalTokens: 15,
275
+ inputTokens: { total: 5, noCache: 5, cacheRead: undefined, cacheWrite: undefined },
276
+ outputTokens: { total: 10, text: 10, reasoning: undefined },
280
277
  },
281
278
  providerMetadata: undefined,
282
279
  },
@@ -353,11 +350,10 @@ describe("AISDKLanguageModel", () => {
353
350
  },
354
351
  {
355
352
  type: "finish",
356
- finishReason: "stop",
353
+ finishReason: { unified: "stop", raw: "stop" },
357
354
  usage: {
358
- inputTokens: 5,
359
- outputTokens: 10,
360
- totalTokens: 15,
355
+ inputTokens: { total: 5, noCache: 5, cacheRead: undefined, cacheWrite: undefined },
356
+ outputTokens: { total: 10, text: 10, reasoning: undefined },
361
357
  },
362
358
  providerMetadata: undefined,
363
359
  },
@@ -413,11 +409,10 @@ describe("AISDKLanguageModel", () => {
413
409
  },
414
410
  {
415
411
  type: "finish",
416
- finishReason: "tool-calls",
412
+ finishReason: { unified: "tool-calls", raw: "tool_calls" },
417
413
  usage: {
418
- inputTokens: 5,
419
- outputTokens: 10,
420
- totalTokens: 15,
414
+ inputTokens: { total: 5, noCache: 5, cacheRead: undefined, cacheWrite: undefined },
415
+ outputTokens: { total: 10, text: 10, reasoning: undefined },
421
416
  },
422
417
  providerMetadata: undefined,
423
418
  },