@mariozechner/pi-ai 0.58.4 → 0.60.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (60) hide show
  1. package/README.md +5 -0
  2. package/dist/index.d.ts +9 -8
  3. package/dist/index.d.ts.map +1 -1
  4. package/dist/index.js +0 -8
  5. package/dist/index.js.map +1 -1
  6. package/dist/models.generated.d.ts +194 -131
  7. package/dist/models.generated.d.ts.map +1 -1
  8. package/dist/models.generated.js +259 -206
  9. package/dist/models.generated.js.map +1 -1
  10. package/dist/providers/anthropic.d.ts +7 -0
  11. package/dist/providers/anthropic.d.ts.map +1 -1
  12. package/dist/providers/anthropic.js +22 -11
  13. package/dist/providers/anthropic.js.map +1 -1
  14. package/dist/providers/google-gemini-cli.d.ts.map +1 -1
  15. package/dist/providers/google-gemini-cli.js +3 -0
  16. package/dist/providers/google-gemini-cli.js.map +1 -1
  17. package/dist/providers/google-shared.d.ts.map +1 -1
  18. package/dist/providers/google-shared.js +20 -8
  19. package/dist/providers/google-shared.js.map +1 -1
  20. package/dist/providers/google-vertex.d.ts.map +1 -1
  21. package/dist/providers/google-vertex.js +3 -0
  22. package/dist/providers/google-vertex.js.map +1 -1
  23. package/dist/providers/google.d.ts.map +1 -1
  24. package/dist/providers/google.js +3 -0
  25. package/dist/providers/google.js.map +1 -1
  26. package/dist/providers/mistral.d.ts.map +1 -1
  27. package/dist/providers/mistral.js +3 -0
  28. package/dist/providers/mistral.js.map +1 -1
  29. package/dist/providers/openai-completions.d.ts.map +1 -1
  30. package/dist/providers/openai-completions.js +25 -12
  31. package/dist/providers/openai-completions.js.map +1 -1
  32. package/dist/providers/openai-responses-shared.d.ts.map +1 -1
  33. package/dist/providers/openai-responses-shared.js +18 -12
  34. package/dist/providers/openai-responses-shared.js.map +1 -1
  35. package/dist/providers/register-builtins.d.ts +28 -1
  36. package/dist/providers/register-builtins.d.ts.map +1 -1
  37. package/dist/providers/register-builtins.js +170 -47
  38. package/dist/providers/register-builtins.js.map +1 -1
  39. package/dist/types.d.ts +1 -0
  40. package/dist/types.d.ts.map +1 -1
  41. package/dist/types.js.map +1 -1
  42. package/dist/utils/oauth/anthropic.d.ts.map +1 -1
  43. package/dist/utils/oauth/anthropic.js +20 -28
  44. package/dist/utils/oauth/anthropic.js.map +1 -1
  45. package/dist/utils/oauth/google-antigravity.d.ts.map +1 -1
  46. package/dist/utils/oauth/google-antigravity.js +22 -19
  47. package/dist/utils/oauth/google-antigravity.js.map +1 -1
  48. package/dist/utils/oauth/google-gemini-cli.d.ts.map +1 -1
  49. package/dist/utils/oauth/google-gemini-cli.js +22 -19
  50. package/dist/utils/oauth/google-gemini-cli.js.map +1 -1
  51. package/dist/utils/oauth/oauth-page.d.ts +3 -0
  52. package/dist/utils/oauth/oauth-page.d.ts.map +1 -0
  53. package/dist/utils/oauth/oauth-page.js +105 -0
  54. package/dist/utils/oauth/oauth-page.js.map +1 -0
  55. package/dist/utils/oauth/openai-codex.d.ts.map +1 -1
  56. package/dist/utils/oauth/openai-codex.js +24 -31
  57. package/dist/utils/oauth/openai-codex.js.map +1 -1
  58. package/package.json +39 -5
  59. package/bedrock-provider.d.ts +0 -1
  60. package/bedrock-provider.js +0 -1
package/README.md CHANGED
@@ -519,6 +519,8 @@ Every `AssistantMessage` includes a `stopReason` field that indicates how the ge
519
519
  - `"error"` - An error occurred during generation
520
520
  - `"aborted"` - Request was cancelled via abort signal
521
521
 
522
+ `AssistantMessage` may also include `responseId`, a provider-specific upstream response or message identifier when the underlying API exposes one. Do not assume it is always present across providers.
523
+
522
524
  ## Error Handling
523
525
 
524
526
  When a request ends with an error (including aborts and tool call validation errors), the streaming API emits an error event:
@@ -1159,6 +1161,9 @@ Create a new provider file (for example `amazon-bedrock.ts`) that exports:
1159
1161
  #### 3. API Registry Integration (`src/providers/register-builtins.ts`)
1160
1162
 
1161
1163
  - Register the API with `registerApiProvider()`
1164
+ - Add a package subpath export in `package.json` for the provider module (`./dist/providers/<provider>.js`)
1165
+ - Add lazy loader wrappers in `src/providers/register-builtins.ts`, do not statically import provider implementation modules there
1166
+ - Add any root-level `export type` re-exports in `src/index.ts` that should remain available from `@mariozechner/pi-ai`
1162
1167
  - Add credential detection in `env-api-keys.ts` for the new provider
1163
1168
  - Ensure `streamSimple` handles auth lookup via `getEnvApiKey()` or provider-specific auth
1164
1169
 
package/dist/index.d.ts CHANGED
@@ -3,14 +3,15 @@ export { Type } from "@sinclair/typebox";
3
3
  export * from "./api-registry.js";
4
4
  export * from "./env-api-keys.js";
5
5
  export * from "./models.js";
6
- export * from "./providers/anthropic.js";
7
- export * from "./providers/azure-openai-responses.js";
8
- export * from "./providers/google.js";
9
- export * from "./providers/google-gemini-cli.js";
10
- export * from "./providers/google-vertex.js";
11
- export * from "./providers/mistral.js";
12
- export * from "./providers/openai-completions.js";
13
- export * from "./providers/openai-responses.js";
6
+ export type { AnthropicOptions } from "./providers/anthropic.js";
7
+ export type { AzureOpenAIResponsesOptions } from "./providers/azure-openai-responses.js";
8
+ export type { GoogleOptions } from "./providers/google.js";
9
+ export type { GoogleGeminiCliOptions, GoogleThinkingLevel } from "./providers/google-gemini-cli.js";
10
+ export type { GoogleVertexOptions } from "./providers/google-vertex.js";
11
+ export type { MistralOptions } from "./providers/mistral.js";
12
+ export type { OpenAICodexResponsesOptions } from "./providers/openai-codex-responses.js";
13
+ export type { OpenAICompletionsOptions } from "./providers/openai-completions.js";
14
+ export type { OpenAIResponsesOptions } from "./providers/openai-responses.js";
14
15
  export * from "./providers/register-builtins.js";
15
16
  export * from "./stream.js";
16
17
  export * from "./types.js";
@@ -1 +1 @@
1
- {"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../src/index.ts"],"names":[],"mappings":"AAAA,YAAY,EAAE,MAAM,EAAE,OAAO,EAAE,MAAM,mBAAmB,CAAC;AACzD,OAAO,EAAE,IAAI,EAAE,MAAM,mBAAmB,CAAC;AAEzC,cAAc,mBAAmB,CAAC;AAClC,cAAc,mBAAmB,CAAC;AAClC,cAAc,aAAa,CAAC;AAC5B,cAAc,0BAA0B,CAAC;AACzC,cAAc,uCAAuC,CAAC;AACtD,cAAc,uBAAuB,CAAC;AACtC,cAAc,kCAAkC,CAAC;AACjD,cAAc,8BAA8B,CAAC;AAC7C,cAAc,wBAAwB,CAAC;AACvC,cAAc,mCAAmC,CAAC;AAClD,cAAc,iCAAiC,CAAC;AAChD,cAAc,kCAAkC,CAAC;AACjD,cAAc,aAAa,CAAC;AAC5B,cAAc,YAAY,CAAC;AAC3B,cAAc,yBAAyB,CAAC;AACxC,cAAc,uBAAuB,CAAC;AACtC,YAAY,EACX,aAAa,EACb,gBAAgB,EAChB,mBAAmB,EACnB,WAAW,EACX,aAAa,EACb,eAAe,EACf,iBAAiB,EACjB,sBAAsB,GACtB,MAAM,wBAAwB,CAAC;AAChC,cAAc,qBAAqB,CAAC;AACpC,cAAc,4BAA4B,CAAC;AAC3C,cAAc,uBAAuB,CAAC","sourcesContent":["export type { Static, TSchema } from \"@sinclair/typebox\";\nexport { Type } from \"@sinclair/typebox\";\n\nexport * from \"./api-registry.js\";\nexport * from \"./env-api-keys.js\";\nexport * from \"./models.js\";\nexport * from \"./providers/anthropic.js\";\nexport * from \"./providers/azure-openai-responses.js\";\nexport * from \"./providers/google.js\";\nexport * from \"./providers/google-gemini-cli.js\";\nexport * from \"./providers/google-vertex.js\";\nexport * from \"./providers/mistral.js\";\nexport * from \"./providers/openai-completions.js\";\nexport * from \"./providers/openai-responses.js\";\nexport * from \"./providers/register-builtins.js\";\nexport * from \"./stream.js\";\nexport * from \"./types.js\";\nexport * from \"./utils/event-stream.js\";\nexport * from \"./utils/json-parse.js\";\nexport type {\n\tOAuthAuthInfo,\n\tOAuthCredentials,\n\tOAuthLoginCallbacks,\n\tOAuthPrompt,\n\tOAuthProvider,\n\tOAuthProviderId,\n\tOAuthProviderInfo,\n\tOAuthProviderInterface,\n} from \"./utils/oauth/types.js\";\nexport * from \"./utils/overflow.js\";\nexport * from \"./utils/typebox-helpers.js\";\nexport * from \"./utils/validation.js\";\n"]}
1
+ {"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../src/index.ts"],"names":[],"mappings":"AAAA,YAAY,EAAE,MAAM,EAAE,OAAO,EAAE,MAAM,mBAAmB,CAAC;AACzD,OAAO,EAAE,IAAI,EAAE,MAAM,mBAAmB,CAAC;AAEzC,cAAc,mBAAmB,CAAC;AAClC,cAAc,mBAAmB,CAAC;AAClC,cAAc,aAAa,CAAC;AAC5B,YAAY,EAAE,gBAAgB,EAAE,MAAM,0BAA0B,CAAC;AACjE,YAAY,EAAE,2BAA2B,EAAE,MAAM,uCAAuC,CAAC;AACzF,YAAY,EAAE,aAAa,EAAE,MAAM,uBAAuB,CAAC;AAC3D,YAAY,EAAE,sBAAsB,EAAE,mBAAmB,EAAE,MAAM,kCAAkC,CAAC;AACpG,YAAY,EAAE,mBAAmB,EAAE,MAAM,8BAA8B,CAAC;AACxE,YAAY,EAAE,cAAc,EAAE,MAAM,wBAAwB,CAAC;AAC7D,YAAY,EAAE,2BAA2B,EAAE,MAAM,uCAAuC,CAAC;AACzF,YAAY,EAAE,wBAAwB,EAAE,MAAM,mCAAmC,CAAC;AAClF,YAAY,EAAE,sBAAsB,EAAE,MAAM,iCAAiC,CAAC;AAC9E,cAAc,kCAAkC,CAAC;AACjD,cAAc,aAAa,CAAC;AAC5B,cAAc,YAAY,CAAC;AAC3B,cAAc,yBAAyB,CAAC;AACxC,cAAc,uBAAuB,CAAC;AACtC,YAAY,EACX,aAAa,EACb,gBAAgB,EAChB,mBAAmB,EACnB,WAAW,EACX,aAAa,EACb,eAAe,EACf,iBAAiB,EACjB,sBAAsB,GACtB,MAAM,wBAAwB,CAAC;AAChC,cAAc,qBAAqB,CAAC;AACpC,cAAc,4BAA4B,CAAC;AAC3C,cAAc,uBAAuB,CAAC","sourcesContent":["export type { Static, TSchema } from \"@sinclair/typebox\";\nexport { Type } from \"@sinclair/typebox\";\n\nexport * from \"./api-registry.js\";\nexport * from \"./env-api-keys.js\";\nexport * from \"./models.js\";\nexport type { AnthropicOptions } from \"./providers/anthropic.js\";\nexport type { AzureOpenAIResponsesOptions } from \"./providers/azure-openai-responses.js\";\nexport type { GoogleOptions } from \"./providers/google.js\";\nexport type { GoogleGeminiCliOptions, GoogleThinkingLevel } from \"./providers/google-gemini-cli.js\";\nexport type { GoogleVertexOptions } from \"./providers/google-vertex.js\";\nexport type { MistralOptions } from \"./providers/mistral.js\";\nexport type { OpenAICodexResponsesOptions } from \"./providers/openai-codex-responses.js\";\nexport type { OpenAICompletionsOptions } from \"./providers/openai-completions.js\";\nexport type { OpenAIResponsesOptions } from \"./providers/openai-responses.js\";\nexport * from \"./providers/register-builtins.js\";\nexport * from \"./stream.js\";\nexport * from \"./types.js\";\nexport * from \"./utils/event-stream.js\";\nexport * from \"./utils/json-parse.js\";\nexport type {\n\tOAuthAuthInfo,\n\tOAuthCredentials,\n\tOAuthLoginCallbacks,\n\tOAuthPrompt,\n\tOAuthProvider,\n\tOAuthProviderId,\n\tOAuthProviderInfo,\n\tOAuthProviderInterface,\n} from \"./utils/oauth/types.js\";\nexport * from \"./utils/overflow.js\";\nexport * from \"./utils/typebox-helpers.js\";\nexport * from \"./utils/validation.js\";\n"]}
package/dist/index.js CHANGED
@@ -2,14 +2,6 @@ export { Type } from "@sinclair/typebox";
2
2
  export * from "./api-registry.js";
3
3
  export * from "./env-api-keys.js";
4
4
  export * from "./models.js";
5
- export * from "./providers/anthropic.js";
6
- export * from "./providers/azure-openai-responses.js";
7
- export * from "./providers/google.js";
8
- export * from "./providers/google-gemini-cli.js";
9
- export * from "./providers/google-vertex.js";
10
- export * from "./providers/mistral.js";
11
- export * from "./providers/openai-completions.js";
12
- export * from "./providers/openai-responses.js";
13
5
  export * from "./providers/register-builtins.js";
14
6
  export * from "./stream.js";
15
7
  export * from "./types.js";
package/dist/index.js.map CHANGED
@@ -1 +1 @@
1
- {"version":3,"file":"index.js","sourceRoot":"","sources":["../src/index.ts"],"names":[],"mappings":"AACA,OAAO,EAAE,IAAI,EAAE,MAAM,mBAAmB,CAAC;AAEzC,cAAc,mBAAmB,CAAC;AAClC,cAAc,mBAAmB,CAAC;AAClC,cAAc,aAAa,CAAC;AAC5B,cAAc,0BAA0B,CAAC;AACzC,cAAc,uCAAuC,CAAC;AACtD,cAAc,uBAAuB,CAAC;AACtC,cAAc,kCAAkC,CAAC;AACjD,cAAc,8BAA8B,CAAC;AAC7C,cAAc,wBAAwB,CAAC;AACvC,cAAc,mCAAmC,CAAC;AAClD,cAAc,iCAAiC,CAAC;AAChD,cAAc,kCAAkC,CAAC;AACjD,cAAc,aAAa,CAAC;AAC5B,cAAc,YAAY,CAAC;AAC3B,cAAc,yBAAyB,CAAC;AACxC,cAAc,uBAAuB,CAAC;AAWtC,cAAc,qBAAqB,CAAC;AACpC,cAAc,4BAA4B,CAAC;AAC3C,cAAc,uBAAuB,CAAC","sourcesContent":["export type { Static, TSchema } from \"@sinclair/typebox\";\nexport { Type } from \"@sinclair/typebox\";\n\nexport * from \"./api-registry.js\";\nexport * from \"./env-api-keys.js\";\nexport * from \"./models.js\";\nexport * from \"./providers/anthropic.js\";\nexport * from \"./providers/azure-openai-responses.js\";\nexport * from \"./providers/google.js\";\nexport * from \"./providers/google-gemini-cli.js\";\nexport * from \"./providers/google-vertex.js\";\nexport * from \"./providers/mistral.js\";\nexport * from \"./providers/openai-completions.js\";\nexport * from \"./providers/openai-responses.js\";\nexport * from \"./providers/register-builtins.js\";\nexport * from \"./stream.js\";\nexport * from \"./types.js\";\nexport * from \"./utils/event-stream.js\";\nexport * from \"./utils/json-parse.js\";\nexport type {\n\tOAuthAuthInfo,\n\tOAuthCredentials,\n\tOAuthLoginCallbacks,\n\tOAuthPrompt,\n\tOAuthProvider,\n\tOAuthProviderId,\n\tOAuthProviderInfo,\n\tOAuthProviderInterface,\n} from \"./utils/oauth/types.js\";\nexport * from \"./utils/overflow.js\";\nexport * from \"./utils/typebox-helpers.js\";\nexport * from \"./utils/validation.js\";\n"]}
1
+ {"version":3,"file":"index.js","sourceRoot":"","sources":["../src/index.ts"],"names":[],"mappings":"AACA,OAAO,EAAE,IAAI,EAAE,MAAM,mBAAmB,CAAC;AAEzC,cAAc,mBAAmB,CAAC;AAClC,cAAc,mBAAmB,CAAC;AAClC,cAAc,aAAa,CAAC;AAU5B,cAAc,kCAAkC,CAAC;AACjD,cAAc,aAAa,CAAC;AAC5B,cAAc,YAAY,CAAC;AAC3B,cAAc,yBAAyB,CAAC;AACxC,cAAc,uBAAuB,CAAC;AAWtC,cAAc,qBAAqB,CAAC;AACpC,cAAc,4BAA4B,CAAC;AAC3C,cAAc,uBAAuB,CAAC","sourcesContent":["export type { Static, TSchema } from \"@sinclair/typebox\";\nexport { Type } from \"@sinclair/typebox\";\n\nexport * from \"./api-registry.js\";\nexport * from \"./env-api-keys.js\";\nexport * from \"./models.js\";\nexport type { AnthropicOptions } from \"./providers/anthropic.js\";\nexport type { AzureOpenAIResponsesOptions } from \"./providers/azure-openai-responses.js\";\nexport type { GoogleOptions } from \"./providers/google.js\";\nexport type { GoogleGeminiCliOptions, GoogleThinkingLevel } from \"./providers/google-gemini-cli.js\";\nexport type { GoogleVertexOptions } from \"./providers/google-vertex.js\";\nexport type { MistralOptions } from \"./providers/mistral.js\";\nexport type { OpenAICodexResponsesOptions } from \"./providers/openai-codex-responses.js\";\nexport type { OpenAICompletionsOptions } from \"./providers/openai-completions.js\";\nexport type { OpenAIResponsesOptions } from \"./providers/openai-responses.js\";\nexport * from \"./providers/register-builtins.js\";\nexport * from \"./stream.js\";\nexport * from \"./types.js\";\nexport * from \"./utils/event-stream.js\";\nexport * from \"./utils/json-parse.js\";\nexport type {\n\tOAuthAuthInfo,\n\tOAuthCredentials,\n\tOAuthLoginCallbacks,\n\tOAuthPrompt,\n\tOAuthProvider,\n\tOAuthProviderId,\n\tOAuthProviderInfo,\n\tOAuthProviderInterface,\n} from \"./utils/oauth/types.js\";\nexport * from \"./utils/overflow.js\";\nexport * from \"./utils/typebox-helpers.js\";\nexport * from \"./utils/validation.js\";\n"]}
@@ -2299,6 +2299,40 @@ export declare const MODELS: {
2299
2299
  contextWindow: number;
2300
2300
  maxTokens: number;
2301
2301
  };
2302
+ readonly "gpt-5.4-mini": {
2303
+ id: string;
2304
+ name: string;
2305
+ api: "azure-openai-responses";
2306
+ provider: string;
2307
+ baseUrl: string;
2308
+ reasoning: true;
2309
+ input: ("image" | "text")[];
2310
+ cost: {
2311
+ input: number;
2312
+ output: number;
2313
+ cacheRead: number;
2314
+ cacheWrite: number;
2315
+ };
2316
+ contextWindow: number;
2317
+ maxTokens: number;
2318
+ };
2319
+ readonly "gpt-5.4-nano": {
2320
+ id: string;
2321
+ name: string;
2322
+ api: "azure-openai-responses";
2323
+ provider: string;
2324
+ baseUrl: string;
2325
+ reasoning: true;
2326
+ input: ("image" | "text")[];
2327
+ cost: {
2328
+ input: number;
2329
+ output: number;
2330
+ cacheRead: number;
2331
+ cacheWrite: number;
2332
+ };
2333
+ contextWindow: number;
2334
+ maxTokens: number;
2335
+ };
2302
2336
  readonly "gpt-5.4-pro": {
2303
2337
  id: string;
2304
2338
  name: string;
@@ -3060,6 +3094,52 @@ export declare const MODELS: {
3060
3094
  contextWindow: number;
3061
3095
  maxTokens: number;
3062
3096
  };
3097
+ readonly "gpt-5.4-mini": {
3098
+ id: string;
3099
+ name: string;
3100
+ api: "openai-responses";
3101
+ provider: string;
3102
+ baseUrl: string;
3103
+ headers: {
3104
+ "User-Agent": string;
3105
+ "Editor-Version": string;
3106
+ "Editor-Plugin-Version": string;
3107
+ "Copilot-Integration-Id": string;
3108
+ };
3109
+ reasoning: true;
3110
+ input: ("image" | "text")[];
3111
+ cost: {
3112
+ input: number;
3113
+ output: number;
3114
+ cacheRead: number;
3115
+ cacheWrite: number;
3116
+ };
3117
+ contextWindow: number;
3118
+ maxTokens: number;
3119
+ };
3120
+ readonly "gpt-5.4-nano": {
3121
+ id: string;
3122
+ name: string;
3123
+ api: "openai-responses";
3124
+ provider: string;
3125
+ baseUrl: string;
3126
+ headers: {
3127
+ "User-Agent": string;
3128
+ "Editor-Version": string;
3129
+ "Editor-Plugin-Version": string;
3130
+ "Copilot-Integration-Id": string;
3131
+ };
3132
+ reasoning: true;
3133
+ input: ("image" | "text")[];
3134
+ cost: {
3135
+ input: number;
3136
+ output: number;
3137
+ cacheRead: number;
3138
+ cacheWrite: number;
3139
+ };
3140
+ contextWindow: number;
3141
+ maxTokens: number;
3142
+ };
3063
3143
  readonly "grok-code-fast-1": {
3064
3144
  id: string;
3065
3145
  name: string;
@@ -5680,6 +5760,40 @@ export declare const MODELS: {
5680
5760
  contextWindow: number;
5681
5761
  maxTokens: number;
5682
5762
  };
5763
+ readonly "gpt-5.4-mini": {
5764
+ id: string;
5765
+ name: string;
5766
+ api: "openai-responses";
5767
+ provider: string;
5768
+ baseUrl: string;
5769
+ reasoning: true;
5770
+ input: ("image" | "text")[];
5771
+ cost: {
5772
+ input: number;
5773
+ output: number;
5774
+ cacheRead: number;
5775
+ cacheWrite: number;
5776
+ };
5777
+ contextWindow: number;
5778
+ maxTokens: number;
5779
+ };
5780
+ readonly "gpt-5.4-nano": {
5781
+ id: string;
5782
+ name: string;
5783
+ api: "openai-responses";
5784
+ provider: string;
5785
+ baseUrl: string;
5786
+ reasoning: true;
5787
+ input: ("image" | "text")[];
5788
+ cost: {
5789
+ input: number;
5790
+ output: number;
5791
+ cacheRead: number;
5792
+ cacheWrite: number;
5793
+ };
5794
+ contextWindow: number;
5795
+ maxTokens: number;
5796
+ };
5683
5797
  readonly "gpt-5.4-pro": {
5684
5798
  id: string;
5685
5799
  name: string;
@@ -6143,23 +6257,6 @@ export declare const MODELS: {
6143
6257
  contextWindow: number;
6144
6258
  maxTokens: number;
6145
6259
  };
6146
- readonly "gemini-3-pro": {
6147
- id: string;
6148
- name: string;
6149
- api: "google-generative-ai";
6150
- provider: string;
6151
- baseUrl: string;
6152
- reasoning: true;
6153
- input: ("image" | "text")[];
6154
- cost: {
6155
- input: number;
6156
- output: number;
6157
- cacheRead: number;
6158
- cacheWrite: number;
6159
- };
6160
- contextWindow: number;
6161
- maxTokens: number;
6162
- };
6163
6260
  readonly "gemini-3.1-pro": {
6164
6261
  id: string;
6165
6262
  name: string;
@@ -6177,40 +6274,6 @@ export declare const MODELS: {
6177
6274
  contextWindow: number;
6178
6275
  maxTokens: number;
6179
6276
  };
6180
- readonly "glm-4.6": {
6181
- id: string;
6182
- name: string;
6183
- api: "openai-completions";
6184
- provider: string;
6185
- baseUrl: string;
6186
- reasoning: true;
6187
- input: "text"[];
6188
- cost: {
6189
- input: number;
6190
- output: number;
6191
- cacheRead: number;
6192
- cacheWrite: number;
6193
- };
6194
- contextWindow: number;
6195
- maxTokens: number;
6196
- };
6197
- readonly "glm-4.7": {
6198
- id: string;
6199
- name: string;
6200
- api: "openai-completions";
6201
- provider: string;
6202
- baseUrl: string;
6203
- reasoning: true;
6204
- input: "text"[];
6205
- cost: {
6206
- input: number;
6207
- output: number;
6208
- cacheRead: number;
6209
- cacheWrite: number;
6210
- };
6211
- contextWindow: number;
6212
- maxTokens: number;
6213
- };
6214
6277
  readonly "glm-5": {
6215
6278
  id: string;
6216
6279
  name: string;
@@ -6466,23 +6529,6 @@ export declare const MODELS: {
6466
6529
  contextWindow: number;
6467
6530
  maxTokens: number;
6468
6531
  };
6469
- readonly "minimax-m2.1": {
6470
- id: string;
6471
- name: string;
6472
- api: "openai-completions";
6473
- provider: string;
6474
- baseUrl: string;
6475
- reasoning: true;
6476
- input: "text"[];
6477
- cost: {
6478
- input: number;
6479
- output: number;
6480
- cacheRead: number;
6481
- cacheWrite: number;
6482
- };
6483
- contextWindow: number;
6484
- maxTokens: number;
6485
- };
6486
6532
  readonly "minimax-m2.5": {
6487
6533
  id: string;
6488
6534
  name: string;
@@ -8136,6 +8182,23 @@ export declare const MODELS: {
8136
8182
  contextWindow: number;
8137
8183
  maxTokens: number;
8138
8184
  };
8185
+ readonly "mistralai/mistral-small-2603": {
8186
+ id: string;
8187
+ name: string;
8188
+ api: "openai-completions";
8189
+ provider: string;
8190
+ baseUrl: string;
8191
+ reasoning: true;
8192
+ input: ("image" | "text")[];
8193
+ cost: {
8194
+ input: number;
8195
+ output: number;
8196
+ cacheRead: number;
8197
+ cacheWrite: number;
8198
+ };
8199
+ contextWindow: number;
8200
+ maxTokens: number;
8201
+ };
8139
8202
  readonly "mistralai/mistral-small-3.1-24b-instruct:free": {
8140
8203
  id: string;
8141
8204
  name: string;
@@ -9122,6 +9185,40 @@ export declare const MODELS: {
9122
9185
  contextWindow: number;
9123
9186
  maxTokens: number;
9124
9187
  };
9188
+ readonly "openai/gpt-5.4-mini": {
9189
+ id: string;
9190
+ name: string;
9191
+ api: "openai-completions";
9192
+ provider: string;
9193
+ baseUrl: string;
9194
+ reasoning: true;
9195
+ input: ("image" | "text")[];
9196
+ cost: {
9197
+ input: number;
9198
+ output: number;
9199
+ cacheRead: number;
9200
+ cacheWrite: number;
9201
+ };
9202
+ contextWindow: number;
9203
+ maxTokens: number;
9204
+ };
9205
+ readonly "openai/gpt-5.4-nano": {
9206
+ id: string;
9207
+ name: string;
9208
+ api: "openai-completions";
9209
+ provider: string;
9210
+ baseUrl: string;
9211
+ reasoning: true;
9212
+ input: ("image" | "text")[];
9213
+ cost: {
9214
+ input: number;
9215
+ output: number;
9216
+ cacheRead: number;
9217
+ cacheWrite: number;
9218
+ };
9219
+ contextWindow: number;
9220
+ maxTokens: number;
9221
+ };
9125
9222
  readonly "openai/gpt-5.4-pro": {
9126
9223
  id: string;
9127
9224
  name: string;
@@ -9230,7 +9327,7 @@ export declare const MODELS: {
9230
9327
  api: "openai-completions";
9231
9328
  provider: string;
9232
9329
  baseUrl: string;
9233
- reasoning: false;
9330
+ reasoning: true;
9234
9331
  input: ("image" | "text")[];
9235
9332
  cost: {
9236
9333
  input: number;
@@ -9281,7 +9378,7 @@ export declare const MODELS: {
9281
9378
  api: "openai-completions";
9282
9379
  provider: string;
9283
9380
  baseUrl: string;
9284
- reasoning: false;
9381
+ reasoning: true;
9285
9382
  input: "text"[];
9286
9383
  cost: {
9287
9384
  input: number;
@@ -9298,7 +9395,7 @@ export declare const MODELS: {
9298
9395
  api: "openai-completions";
9299
9396
  provider: string;
9300
9397
  baseUrl: string;
9301
- reasoning: false;
9398
+ reasoning: true;
9302
9399
  input: "text"[];
9303
9400
  cost: {
9304
9401
  input: number;
@@ -11419,40 +11516,6 @@ export declare const MODELS: {
11419
11516
  contextWindow: number;
11420
11517
  maxTokens: number;
11421
11518
  };
11422
- readonly "google/gemini-2.5-flash-lite-preview-09-2025": {
11423
- id: string;
11424
- name: string;
11425
- api: "anthropic-messages";
11426
- provider: string;
11427
- baseUrl: string;
11428
- reasoning: true;
11429
- input: ("image" | "text")[];
11430
- cost: {
11431
- input: number;
11432
- output: number;
11433
- cacheRead: number;
11434
- cacheWrite: number;
11435
- };
11436
- contextWindow: number;
11437
- maxTokens: number;
11438
- };
11439
- readonly "google/gemini-2.5-flash-preview-09-2025": {
11440
- id: string;
11441
- name: string;
11442
- api: "anthropic-messages";
11443
- provider: string;
11444
- baseUrl: string;
11445
- reasoning: true;
11446
- input: ("image" | "text")[];
11447
- cost: {
11448
- input: number;
11449
- output: number;
11450
- cacheRead: number;
11451
- cacheWrite: number;
11452
- };
11453
- contextWindow: number;
11454
- maxTokens: number;
11455
- };
11456
11519
  readonly "google/gemini-2.5-pro": {
11457
11520
  id: string;
11458
11521
  name: string;
@@ -12524,7 +12587,7 @@ export declare const MODELS: {
12524
12587
  contextWindow: number;
12525
12588
  maxTokens: number;
12526
12589
  };
12527
- readonly "openai/gpt-5.4-pro": {
12590
+ readonly "openai/gpt-5.4-mini": {
12528
12591
  id: string;
12529
12592
  name: string;
12530
12593
  api: "anthropic-messages";
@@ -12541,14 +12604,14 @@ export declare const MODELS: {
12541
12604
  contextWindow: number;
12542
12605
  maxTokens: number;
12543
12606
  };
12544
- readonly "openai/gpt-oss-20b": {
12607
+ readonly "openai/gpt-5.4-nano": {
12545
12608
  id: string;
12546
12609
  name: string;
12547
12610
  api: "anthropic-messages";
12548
12611
  provider: string;
12549
12612
  baseUrl: string;
12550
12613
  reasoning: true;
12551
- input: "text"[];
12614
+ input: ("image" | "text")[];
12552
12615
  cost: {
12553
12616
  input: number;
12554
12617
  output: number;
@@ -12558,14 +12621,14 @@ export declare const MODELS: {
12558
12621
  contextWindow: number;
12559
12622
  maxTokens: number;
12560
12623
  };
12561
- readonly "openai/gpt-oss-safeguard-20b": {
12624
+ readonly "openai/gpt-5.4-pro": {
12562
12625
  id: string;
12563
12626
  name: string;
12564
12627
  api: "anthropic-messages";
12565
12628
  provider: string;
12566
12629
  baseUrl: string;
12567
12630
  reasoning: true;
12568
- input: "text"[];
12631
+ input: ("image" | "text")[];
12569
12632
  cost: {
12570
12633
  input: number;
12571
12634
  output: number;
@@ -12575,14 +12638,14 @@ export declare const MODELS: {
12575
12638
  contextWindow: number;
12576
12639
  maxTokens: number;
12577
12640
  };
12578
- readonly "openai/o1": {
12641
+ readonly "openai/gpt-oss-20b": {
12579
12642
  id: string;
12580
12643
  name: string;
12581
12644
  api: "anthropic-messages";
12582
12645
  provider: string;
12583
12646
  baseUrl: string;
12584
12647
  reasoning: true;
12585
- input: ("image" | "text")[];
12648
+ input: "text"[];
12586
12649
  cost: {
12587
12650
  input: number;
12588
12651
  output: number;
@@ -12592,14 +12655,14 @@ export declare const MODELS: {
12592
12655
  contextWindow: number;
12593
12656
  maxTokens: number;
12594
12657
  };
12595
- readonly "openai/o3": {
12658
+ readonly "openai/gpt-oss-safeguard-20b": {
12596
12659
  id: string;
12597
12660
  name: string;
12598
12661
  api: "anthropic-messages";
12599
12662
  provider: string;
12600
12663
  baseUrl: string;
12601
12664
  reasoning: true;
12602
- input: ("image" | "text")[];
12665
+ input: "text"[];
12603
12666
  cost: {
12604
12667
  input: number;
12605
12668
  output: number;
@@ -12609,7 +12672,7 @@ export declare const MODELS: {
12609
12672
  contextWindow: number;
12610
12673
  maxTokens: number;
12611
12674
  };
12612
- readonly "openai/o3-deep-research": {
12675
+ readonly "openai/o1": {
12613
12676
  id: string;
12614
12677
  name: string;
12615
12678
  api: "anthropic-messages";
@@ -12626,14 +12689,14 @@ export declare const MODELS: {
12626
12689
  contextWindow: number;
12627
12690
  maxTokens: number;
12628
12691
  };
12629
- readonly "openai/o3-mini": {
12692
+ readonly "openai/o3": {
12630
12693
  id: string;
12631
12694
  name: string;
12632
12695
  api: "anthropic-messages";
12633
12696
  provider: string;
12634
12697
  baseUrl: string;
12635
12698
  reasoning: true;
12636
- input: "text"[];
12699
+ input: ("image" | "text")[];
12637
12700
  cost: {
12638
12701
  input: number;
12639
12702
  output: number;
@@ -12643,7 +12706,7 @@ export declare const MODELS: {
12643
12706
  contextWindow: number;
12644
12707
  maxTokens: number;
12645
12708
  };
12646
- readonly "openai/o3-pro": {
12709
+ readonly "openai/o3-deep-research": {
12647
12710
  id: string;
12648
12711
  name: string;
12649
12712
  api: "anthropic-messages";
@@ -12660,14 +12723,14 @@ export declare const MODELS: {
12660
12723
  contextWindow: number;
12661
12724
  maxTokens: number;
12662
12725
  };
12663
- readonly "openai/o4-mini": {
12726
+ readonly "openai/o3-mini": {
12664
12727
  id: string;
12665
12728
  name: string;
12666
12729
  api: "anthropic-messages";
12667
12730
  provider: string;
12668
12731
  baseUrl: string;
12669
12732
  reasoning: true;
12670
- input: ("image" | "text")[];
12733
+ input: "text"[];
12671
12734
  cost: {
12672
12735
  input: number;
12673
12736
  output: number;
@@ -12677,13 +12740,13 @@ export declare const MODELS: {
12677
12740
  contextWindow: number;
12678
12741
  maxTokens: number;
12679
12742
  };
12680
- readonly "perplexity/sonar": {
12743
+ readonly "openai/o3-pro": {
12681
12744
  id: string;
12682
12745
  name: string;
12683
12746
  api: "anthropic-messages";
12684
12747
  provider: string;
12685
12748
  baseUrl: string;
12686
- reasoning: false;
12749
+ reasoning: true;
12687
12750
  input: ("image" | "text")[];
12688
12751
  cost: {
12689
12752
  input: number;
@@ -12694,13 +12757,13 @@ export declare const MODELS: {
12694
12757
  contextWindow: number;
12695
12758
  maxTokens: number;
12696
12759
  };
12697
- readonly "perplexity/sonar-pro": {
12760
+ readonly "openai/o4-mini": {
12698
12761
  id: string;
12699
12762
  name: string;
12700
12763
  api: "anthropic-messages";
12701
12764
  provider: string;
12702
12765
  baseUrl: string;
12703
- reasoning: false;
12766
+ reasoning: true;
12704
12767
  input: ("image" | "text")[];
12705
12768
  cost: {
12706
12769
  input: number;
@@ -12711,14 +12774,14 @@ export declare const MODELS: {
12711
12774
  contextWindow: number;
12712
12775
  maxTokens: number;
12713
12776
  };
12714
- readonly "prime-intellect/intellect-3": {
12777
+ readonly "perplexity/sonar": {
12715
12778
  id: string;
12716
12779
  name: string;
12717
12780
  api: "anthropic-messages";
12718
12781
  provider: string;
12719
12782
  baseUrl: string;
12720
- reasoning: true;
12721
- input: "text"[];
12783
+ reasoning: false;
12784
+ input: ("image" | "text")[];
12722
12785
  cost: {
12723
12786
  input: number;
12724
12787
  output: number;
@@ -12728,7 +12791,7 @@ export declare const MODELS: {
12728
12791
  contextWindow: number;
12729
12792
  maxTokens: number;
12730
12793
  };
12731
- readonly "vercel/v0-1.0-md": {
12794
+ readonly "perplexity/sonar-pro": {
12732
12795
  id: string;
12733
12796
  name: string;
12734
12797
  api: "anthropic-messages";
@@ -12745,14 +12808,14 @@ export declare const MODELS: {
12745
12808
  contextWindow: number;
12746
12809
  maxTokens: number;
12747
12810
  };
12748
- readonly "vercel/v0-1.5-md": {
12811
+ readonly "prime-intellect/intellect-3": {
12749
12812
  id: string;
12750
12813
  name: string;
12751
12814
  api: "anthropic-messages";
12752
12815
  provider: string;
12753
12816
  baseUrl: string;
12754
- reasoning: false;
12755
- input: ("image" | "text")[];
12817
+ reasoning: true;
12818
+ input: "text"[];
12756
12819
  cost: {
12757
12820
  input: number;
12758
12821
  output: number;