@mariozechner/pi-ai 0.45.7 → 0.47.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/constants.d.ts +6 -0
- package/dist/constants.d.ts.map +1 -0
- package/dist/constants.js +14 -0
- package/dist/constants.js.map +1 -0
- package/dist/index.d.ts +1 -1
- package/dist/index.d.ts.map +1 -1
- package/dist/index.js +1 -1
- package/dist/index.js.map +1 -1
- package/dist/models.generated.d.ts +134 -302
- package/dist/models.generated.d.ts.map +1 -1
- package/dist/models.generated.js +165 -333
- package/dist/models.generated.js.map +1 -1
- package/dist/providers/amazon-bedrock.d.ts.map +1 -1
- package/dist/providers/amazon-bedrock.js +27 -5
- package/dist/providers/amazon-bedrock.js.map +1 -1
- package/dist/providers/google-shared.d.ts.map +1 -1
- package/dist/providers/google-shared.js +22 -10
- package/dist/providers/google-shared.js.map +1 -1
- package/dist/providers/openai-codex-responses.d.ts +0 -2
- package/dist/providers/openai-codex-responses.d.ts.map +1 -1
- package/dist/providers/openai-codex-responses.js +476 -489
- package/dist/providers/openai-codex-responses.js.map +1 -1
- package/dist/providers/openai-completions.d.ts.map +1 -1
- package/dist/providers/openai-completions.js +2 -1
- package/dist/providers/openai-completions.js.map +1 -1
- package/dist/stream.d.ts.map +1 -1
- package/dist/stream.js +1 -0
- package/dist/stream.js.map +1 -1
- package/dist/types.d.ts +1 -1
- package/dist/types.d.ts.map +1 -1
- package/dist/types.js.map +1 -1
- package/package.json +1 -1
- package/dist/providers/openai-codex/constants.d.ts +0 -21
- package/dist/providers/openai-codex/constants.d.ts.map +0 -1
- package/dist/providers/openai-codex/constants.js +0 -21
- package/dist/providers/openai-codex/constants.js.map +0 -1
- package/dist/providers/openai-codex/index.d.ts +0 -7
- package/dist/providers/openai-codex/index.d.ts.map +0 -1
- package/dist/providers/openai-codex/index.js +0 -7
- package/dist/providers/openai-codex/index.js.map +0 -1
- package/dist/providers/openai-codex/prompts/codex.d.ts +0 -3
- package/dist/providers/openai-codex/prompts/codex.d.ts.map +0 -1
- package/dist/providers/openai-codex/prompts/codex.js +0 -323
- package/dist/providers/openai-codex/prompts/codex.js.map +0 -1
- package/dist/providers/openai-codex/prompts/pi-codex-bridge.d.ts +0 -7
- package/dist/providers/openai-codex/prompts/pi-codex-bridge.d.ts.map +0 -1
- package/dist/providers/openai-codex/prompts/pi-codex-bridge.js +0 -50
- package/dist/providers/openai-codex/prompts/pi-codex-bridge.js.map +0 -1
- package/dist/providers/openai-codex/prompts/system-prompt.d.ts +0 -10
- package/dist/providers/openai-codex/prompts/system-prompt.d.ts.map +0 -1
- package/dist/providers/openai-codex/prompts/system-prompt.js +0 -15
- package/dist/providers/openai-codex/prompts/system-prompt.js.map +0 -1
- package/dist/providers/openai-codex/request-transformer.d.ts +0 -44
- package/dist/providers/openai-codex/request-transformer.d.ts.map +0 -1
- package/dist/providers/openai-codex/request-transformer.js +0 -99
- package/dist/providers/openai-codex/request-transformer.js.map +0 -1
- package/dist/providers/openai-codex/response-handler.d.ts +0 -19
- package/dist/providers/openai-codex/response-handler.d.ts.map +0 -1
- package/dist/providers/openai-codex/response-handler.js +0 -107
- package/dist/providers/openai-codex/response-handler.js.map +0 -1
|
@@ -0,0 +1,6 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Static Pi instructions for OpenAI Codex.
|
|
3
|
+
* This string is whitelisted by OpenAI and must not change.
|
|
4
|
+
*/
|
|
5
|
+
export declare const PI_STATIC_INSTRUCTIONS = "You are pi, an expert coding assistant. You help users with coding tasks by reading files, executing commands, editing code, and writing new files.\n\nPi specific Documentation:\n- Main documentation: pi-internal://README.md\n- Additional docs: pi-internal://docs\n- Examples: pi-internal://examples (extensions, custom tools, SDK)\n- When asked to create: custom models/providers (README.md), extensions (docs/extensions.md, examples/extensions/), themes (docs/theme.md), skills (docs/skills.md), TUI components (docs/tui.md - has copy-paste patterns)\n- Always read the doc, examples, AND follow .md cross-references before implementing\n";
|
|
6
|
+
//# sourceMappingURL=constants.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"constants.d.ts","sourceRoot":"","sources":["../src/constants.ts"],"names":[],"mappings":"AAAA;;;GAGG;AACH,eAAO,MAAM,sBAAsB,qoBAQlC,CAAC","sourcesContent":["/**\n * Static Pi instructions for OpenAI Codex.\n * This string is whitelisted by OpenAI and must not change.\n */\nexport const PI_STATIC_INSTRUCTIONS = `You are pi, an expert coding assistant. You help users with coding tasks by reading files, executing commands, editing code, and writing new files.\n\nPi specific Documentation:\n- Main documentation: pi-internal://README.md\n- Additional docs: pi-internal://docs\n- Examples: pi-internal://examples (extensions, custom tools, SDK)\n- When asked to create: custom models/providers (README.md), extensions (docs/extensions.md, examples/extensions/), themes (docs/theme.md), skills (docs/skills.md), TUI components (docs/tui.md - has copy-paste patterns)\n- Always read the doc, examples, AND follow .md cross-references before implementing\n`;\n"]}
|
|
@@ -0,0 +1,14 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Static Pi instructions for OpenAI Codex.
|
|
3
|
+
* This string is whitelisted by OpenAI and must not change.
|
|
4
|
+
*/
|
|
5
|
+
export const PI_STATIC_INSTRUCTIONS = `You are pi, an expert coding assistant. You help users with coding tasks by reading files, executing commands, editing code, and writing new files.
|
|
6
|
+
|
|
7
|
+
Pi specific Documentation:
|
|
8
|
+
- Main documentation: pi-internal://README.md
|
|
9
|
+
- Additional docs: pi-internal://docs
|
|
10
|
+
- Examples: pi-internal://examples (extensions, custom tools, SDK)
|
|
11
|
+
- When asked to create: custom models/providers (README.md), extensions (docs/extensions.md, examples/extensions/), themes (docs/theme.md), skills (docs/skills.md), TUI components (docs/tui.md - has copy-paste patterns)
|
|
12
|
+
- Always read the doc, examples, AND follow .md cross-references before implementing
|
|
13
|
+
`;
|
|
14
|
+
//# sourceMappingURL=constants.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"constants.js","sourceRoot":"","sources":["../src/constants.ts"],"names":[],"mappings":"AAAA;;;GAGG;AACH,MAAM,CAAC,MAAM,sBAAsB,GAAG;;;;;;;;CAQrC,CAAC","sourcesContent":["/**\n * Static Pi instructions for OpenAI Codex.\n * This string is whitelisted by OpenAI and must not change.\n */\nexport const PI_STATIC_INSTRUCTIONS = `You are pi, an expert coding assistant. You help users with coding tasks by reading files, executing commands, editing code, and writing new files.\n\nPi specific Documentation:\n- Main documentation: pi-internal://README.md\n- Additional docs: pi-internal://docs\n- Examples: pi-internal://examples (extensions, custom tools, SDK)\n- When asked to create: custom models/providers (README.md), extensions (docs/extensions.md, examples/extensions/), themes (docs/theme.md), skills (docs/skills.md), TUI components (docs/tui.md - has copy-paste patterns)\n- Always read the doc, examples, AND follow .md cross-references before implementing\n`;\n"]}
|
package/dist/index.d.ts
CHANGED
|
@@ -1,9 +1,9 @@
|
|
|
1
|
+
export * from "./constants.js";
|
|
1
2
|
export * from "./models.js";
|
|
2
3
|
export * from "./providers/anthropic.js";
|
|
3
4
|
export * from "./providers/google.js";
|
|
4
5
|
export * from "./providers/google-gemini-cli.js";
|
|
5
6
|
export * from "./providers/google-vertex.js";
|
|
6
|
-
export * from "./providers/openai-codex/index.js";
|
|
7
7
|
export * from "./providers/openai-completions.js";
|
|
8
8
|
export * from "./providers/openai-responses.js";
|
|
9
9
|
export * from "./stream.js";
|
package/dist/index.d.ts.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../src/index.ts"],"names":[],"mappings":"AAAA,cAAc,aAAa,CAAC;AAC5B,cAAc,0BAA0B,CAAC;AACzC,cAAc,uBAAuB,CAAC;AACtC,cAAc,kCAAkC,CAAC;AACjD,cAAc,8BAA8B,CAAC;
|
|
1
|
+
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../src/index.ts"],"names":[],"mappings":"AAAA,cAAc,gBAAgB,CAAC;AAC/B,cAAc,aAAa,CAAC;AAC5B,cAAc,0BAA0B,CAAC;AACzC,cAAc,uBAAuB,CAAC;AACtC,cAAc,kCAAkC,CAAC;AACjD,cAAc,8BAA8B,CAAC;AAE7C,cAAc,mCAAmC,CAAC;AAClD,cAAc,iCAAiC,CAAC;AAChD,cAAc,aAAa,CAAC;AAC5B,cAAc,YAAY,CAAC;AAC3B,cAAc,yBAAyB,CAAC;AACxC,cAAc,uBAAuB,CAAC;AACtC,cAAc,wBAAwB,CAAC;AACvC,cAAc,qBAAqB,CAAC;AACpC,cAAc,4BAA4B,CAAC;AAC3C,cAAc,uBAAuB,CAAC","sourcesContent":["export * from \"./constants.js\";\nexport * from \"./models.js\";\nexport * from \"./providers/anthropic.js\";\nexport * from \"./providers/google.js\";\nexport * from \"./providers/google-gemini-cli.js\";\nexport * from \"./providers/google-vertex.js\";\n\nexport * from \"./providers/openai-completions.js\";\nexport * from \"./providers/openai-responses.js\";\nexport * from \"./stream.js\";\nexport * from \"./types.js\";\nexport * from \"./utils/event-stream.js\";\nexport * from \"./utils/json-parse.js\";\nexport * from \"./utils/oauth/index.js\";\nexport * from \"./utils/overflow.js\";\nexport * from \"./utils/typebox-helpers.js\";\nexport * from \"./utils/validation.js\";\n"]}
|
package/dist/index.js
CHANGED
|
@@ -1,9 +1,9 @@
|
|
|
1
|
+
export * from "./constants.js";
|
|
1
2
|
export * from "./models.js";
|
|
2
3
|
export * from "./providers/anthropic.js";
|
|
3
4
|
export * from "./providers/google.js";
|
|
4
5
|
export * from "./providers/google-gemini-cli.js";
|
|
5
6
|
export * from "./providers/google-vertex.js";
|
|
6
|
-
export * from "./providers/openai-codex/index.js";
|
|
7
7
|
export * from "./providers/openai-completions.js";
|
|
8
8
|
export * from "./providers/openai-responses.js";
|
|
9
9
|
export * from "./stream.js";
|
package/dist/index.js.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"index.js","sourceRoot":"","sources":["../src/index.ts"],"names":[],"mappings":"AAAA,cAAc,aAAa,CAAC;AAC5B,cAAc,0BAA0B,CAAC;AACzC,cAAc,uBAAuB,CAAC;AACtC,cAAc,kCAAkC,CAAC;AACjD,cAAc,8BAA8B,CAAC;
|
|
1
|
+
{"version":3,"file":"index.js","sourceRoot":"","sources":["../src/index.ts"],"names":[],"mappings":"AAAA,cAAc,gBAAgB,CAAC;AAC/B,cAAc,aAAa,CAAC;AAC5B,cAAc,0BAA0B,CAAC;AACzC,cAAc,uBAAuB,CAAC;AACtC,cAAc,kCAAkC,CAAC;AACjD,cAAc,8BAA8B,CAAC;AAE7C,cAAc,mCAAmC,CAAC;AAClD,cAAc,iCAAiC,CAAC;AAChD,cAAc,aAAa,CAAC;AAC5B,cAAc,YAAY,CAAC;AAC3B,cAAc,yBAAyB,CAAC;AACxC,cAAc,uBAAuB,CAAC;AACtC,cAAc,wBAAwB,CAAC;AACvC,cAAc,qBAAqB,CAAC;AACpC,cAAc,4BAA4B,CAAC;AAC3C,cAAc,uBAAuB,CAAC","sourcesContent":["export * from \"./constants.js\";\nexport * from \"./models.js\";\nexport * from \"./providers/anthropic.js\";\nexport * from \"./providers/google.js\";\nexport * from \"./providers/google-gemini-cli.js\";\nexport * from \"./providers/google-vertex.js\";\n\nexport * from \"./providers/openai-completions.js\";\nexport * from \"./providers/openai-responses.js\";\nexport * from \"./stream.js\";\nexport * from \"./types.js\";\nexport * from \"./utils/event-stream.js\";\nexport * from \"./utils/json-parse.js\";\nexport * from \"./utils/oauth/index.js\";\nexport * from \"./utils/overflow.js\";\nexport * from \"./utils/typebox-helpers.js\";\nexport * from \"./utils/validation.js\";\n"]}
|
|
@@ -1802,10 +1802,10 @@ export declare const MODELS: {
|
|
|
1802
1802
|
contextWindow: number;
|
|
1803
1803
|
maxTokens: number;
|
|
1804
1804
|
};
|
|
1805
|
-
readonly "
|
|
1805
|
+
readonly "gpt-5.2-codex": {
|
|
1806
1806
|
id: string;
|
|
1807
1807
|
name: string;
|
|
1808
|
-
api: "openai-
|
|
1808
|
+
api: "openai-responses";
|
|
1809
1809
|
provider: string;
|
|
1810
1810
|
baseUrl: string;
|
|
1811
1811
|
headers: {
|
|
@@ -1814,13 +1814,8 @@ export declare const MODELS: {
|
|
|
1814
1814
|
"Editor-Plugin-Version": string;
|
|
1815
1815
|
"Copilot-Integration-Id": string;
|
|
1816
1816
|
};
|
|
1817
|
-
compat: {
|
|
1818
|
-
supportsStore: false;
|
|
1819
|
-
supportsDeveloperRole: false;
|
|
1820
|
-
supportsReasoningEffort: false;
|
|
1821
|
-
};
|
|
1822
1817
|
reasoning: true;
|
|
1823
|
-
input: "text"[];
|
|
1818
|
+
input: ("image" | "text")[];
|
|
1824
1819
|
cost: {
|
|
1825
1820
|
input: number;
|
|
1826
1821
|
output: number;
|
|
@@ -1830,10 +1825,10 @@ export declare const MODELS: {
|
|
|
1830
1825
|
contextWindow: number;
|
|
1831
1826
|
maxTokens: number;
|
|
1832
1827
|
};
|
|
1833
|
-
readonly "
|
|
1828
|
+
readonly "grok-code-fast-1": {
|
|
1834
1829
|
id: string;
|
|
1835
1830
|
name: string;
|
|
1836
|
-
api: "openai-
|
|
1831
|
+
api: "openai-completions";
|
|
1837
1832
|
provider: string;
|
|
1838
1833
|
baseUrl: string;
|
|
1839
1834
|
headers: {
|
|
@@ -1842,8 +1837,13 @@ export declare const MODELS: {
|
|
|
1842
1837
|
"Editor-Plugin-Version": string;
|
|
1843
1838
|
"Copilot-Integration-Id": string;
|
|
1844
1839
|
};
|
|
1840
|
+
compat: {
|
|
1841
|
+
supportsStore: false;
|
|
1842
|
+
supportsDeveloperRole: false;
|
|
1843
|
+
supportsReasoningEffort: false;
|
|
1844
|
+
};
|
|
1845
1845
|
reasoning: true;
|
|
1846
|
-
input:
|
|
1846
|
+
input: "text"[];
|
|
1847
1847
|
cost: {
|
|
1848
1848
|
input: number;
|
|
1849
1849
|
output: number;
|
|
@@ -2903,6 +2903,42 @@ export declare const MODELS: {
|
|
|
2903
2903
|
maxTokens: number;
|
|
2904
2904
|
};
|
|
2905
2905
|
};
|
|
2906
|
+
readonly "minimax-cn": {
|
|
2907
|
+
readonly "MiniMax-M2": {
|
|
2908
|
+
id: string;
|
|
2909
|
+
name: string;
|
|
2910
|
+
api: "anthropic-messages";
|
|
2911
|
+
provider: string;
|
|
2912
|
+
baseUrl: string;
|
|
2913
|
+
reasoning: true;
|
|
2914
|
+
input: "text"[];
|
|
2915
|
+
cost: {
|
|
2916
|
+
input: number;
|
|
2917
|
+
output: number;
|
|
2918
|
+
cacheRead: number;
|
|
2919
|
+
cacheWrite: number;
|
|
2920
|
+
};
|
|
2921
|
+
contextWindow: number;
|
|
2922
|
+
maxTokens: number;
|
|
2923
|
+
};
|
|
2924
|
+
readonly "MiniMax-M2.1": {
|
|
2925
|
+
id: string;
|
|
2926
|
+
name: string;
|
|
2927
|
+
api: "anthropic-messages";
|
|
2928
|
+
provider: string;
|
|
2929
|
+
baseUrl: string;
|
|
2930
|
+
reasoning: true;
|
|
2931
|
+
input: "text"[];
|
|
2932
|
+
cost: {
|
|
2933
|
+
input: number;
|
|
2934
|
+
output: number;
|
|
2935
|
+
cacheRead: number;
|
|
2936
|
+
cacheWrite: number;
|
|
2937
|
+
};
|
|
2938
|
+
contextWindow: number;
|
|
2939
|
+
maxTokens: number;
|
|
2940
|
+
};
|
|
2941
|
+
};
|
|
2906
2942
|
readonly mistral: {
|
|
2907
2943
|
readonly "codestral-latest": {
|
|
2908
2944
|
id: string;
|
|
@@ -3739,6 +3775,23 @@ export declare const MODELS: {
|
|
|
3739
3775
|
contextWindow: number;
|
|
3740
3776
|
maxTokens: number;
|
|
3741
3777
|
};
|
|
3778
|
+
readonly "gpt-5.2-codex": {
|
|
3779
|
+
id: string;
|
|
3780
|
+
name: string;
|
|
3781
|
+
api: "openai-responses";
|
|
3782
|
+
provider: string;
|
|
3783
|
+
baseUrl: string;
|
|
3784
|
+
reasoning: true;
|
|
3785
|
+
input: ("image" | "text")[];
|
|
3786
|
+
cost: {
|
|
3787
|
+
input: number;
|
|
3788
|
+
output: number;
|
|
3789
|
+
cacheRead: number;
|
|
3790
|
+
cacheWrite: number;
|
|
3791
|
+
};
|
|
3792
|
+
contextWindow: number;
|
|
3793
|
+
maxTokens: number;
|
|
3794
|
+
};
|
|
3742
3795
|
readonly "gpt-5.2-pro": {
|
|
3743
3796
|
id: string;
|
|
3744
3797
|
name: string;
|
|
@@ -4338,6 +4391,23 @@ export declare const MODELS: {
|
|
|
4338
4391
|
contextWindow: number;
|
|
4339
4392
|
maxTokens: number;
|
|
4340
4393
|
};
|
|
4394
|
+
readonly "gpt-5.2-codex": {
|
|
4395
|
+
id: string;
|
|
4396
|
+
name: string;
|
|
4397
|
+
api: "openai-responses";
|
|
4398
|
+
provider: string;
|
|
4399
|
+
baseUrl: string;
|
|
4400
|
+
reasoning: true;
|
|
4401
|
+
input: ("image" | "text")[];
|
|
4402
|
+
cost: {
|
|
4403
|
+
input: number;
|
|
4404
|
+
output: number;
|
|
4405
|
+
cacheRead: number;
|
|
4406
|
+
cacheWrite: number;
|
|
4407
|
+
};
|
|
4408
|
+
contextWindow: number;
|
|
4409
|
+
maxTokens: number;
|
|
4410
|
+
};
|
|
4341
4411
|
readonly "grok-code": {
|
|
4342
4412
|
id: string;
|
|
4343
4413
|
name: string;
|
|
@@ -4476,23 +4546,6 @@ export declare const MODELS: {
|
|
|
4476
4546
|
contextWindow: number;
|
|
4477
4547
|
maxTokens: number;
|
|
4478
4548
|
};
|
|
4479
|
-
readonly "allenai/olmo-3-7b-instruct": {
|
|
4480
|
-
id: string;
|
|
4481
|
-
name: string;
|
|
4482
|
-
api: "openai-completions";
|
|
4483
|
-
provider: string;
|
|
4484
|
-
baseUrl: string;
|
|
4485
|
-
reasoning: false;
|
|
4486
|
-
input: "text"[];
|
|
4487
|
-
cost: {
|
|
4488
|
-
input: number;
|
|
4489
|
-
output: number;
|
|
4490
|
-
cacheRead: number;
|
|
4491
|
-
cacheWrite: number;
|
|
4492
|
-
};
|
|
4493
|
-
contextWindow: number;
|
|
4494
|
-
maxTokens: number;
|
|
4495
|
-
};
|
|
4496
4549
|
readonly "allenai/olmo-3.1-32b-instruct": {
|
|
4497
4550
|
id: string;
|
|
4498
4551
|
name: string;
|
|
@@ -4629,23 +4682,6 @@ export declare const MODELS: {
|
|
|
4629
4682
|
contextWindow: number;
|
|
4630
4683
|
maxTokens: number;
|
|
4631
4684
|
};
|
|
4632
|
-
readonly "anthropic/claude-3.5-haiku-20241022": {
|
|
4633
|
-
id: string;
|
|
4634
|
-
name: string;
|
|
4635
|
-
api: "openai-completions";
|
|
4636
|
-
provider: string;
|
|
4637
|
-
baseUrl: string;
|
|
4638
|
-
reasoning: false;
|
|
4639
|
-
input: ("image" | "text")[];
|
|
4640
|
-
cost: {
|
|
4641
|
-
input: number;
|
|
4642
|
-
output: number;
|
|
4643
|
-
cacheRead: number;
|
|
4644
|
-
cacheWrite: number;
|
|
4645
|
-
};
|
|
4646
|
-
contextWindow: number;
|
|
4647
|
-
maxTokens: number;
|
|
4648
|
-
};
|
|
4649
4685
|
readonly "anthropic/claude-3.5-sonnet": {
|
|
4650
4686
|
id: string;
|
|
4651
4687
|
name: string;
|
|
@@ -5462,23 +5498,6 @@ export declare const MODELS: {
|
|
|
5462
5498
|
contextWindow: number;
|
|
5463
5499
|
maxTokens: number;
|
|
5464
5500
|
};
|
|
5465
|
-
readonly "meta-llama/llama-3-70b-instruct": {
|
|
5466
|
-
id: string;
|
|
5467
|
-
name: string;
|
|
5468
|
-
api: "openai-completions";
|
|
5469
|
-
provider: string;
|
|
5470
|
-
baseUrl: string;
|
|
5471
|
-
reasoning: false;
|
|
5472
|
-
input: "text"[];
|
|
5473
|
-
cost: {
|
|
5474
|
-
input: number;
|
|
5475
|
-
output: number;
|
|
5476
|
-
cacheRead: number;
|
|
5477
|
-
cacheWrite: number;
|
|
5478
|
-
};
|
|
5479
|
-
contextWindow: number;
|
|
5480
|
-
maxTokens: number;
|
|
5481
|
-
};
|
|
5482
5501
|
readonly "meta-llama/llama-3-8b-instruct": {
|
|
5483
5502
|
id: string;
|
|
5484
5503
|
name: string;
|
|
@@ -5547,23 +5566,6 @@ export declare const MODELS: {
|
|
|
5547
5566
|
contextWindow: number;
|
|
5548
5567
|
maxTokens: number;
|
|
5549
5568
|
};
|
|
5550
|
-
readonly "meta-llama/llama-3.2-3b-instruct": {
|
|
5551
|
-
id: string;
|
|
5552
|
-
name: string;
|
|
5553
|
-
api: "openai-completions";
|
|
5554
|
-
provider: string;
|
|
5555
|
-
baseUrl: string;
|
|
5556
|
-
reasoning: false;
|
|
5557
|
-
input: "text"[];
|
|
5558
|
-
cost: {
|
|
5559
|
-
input: number;
|
|
5560
|
-
output: number;
|
|
5561
|
-
cacheRead: number;
|
|
5562
|
-
cacheWrite: number;
|
|
5563
|
-
};
|
|
5564
|
-
contextWindow: number;
|
|
5565
|
-
maxTokens: number;
|
|
5566
|
-
};
|
|
5567
5569
|
readonly "meta-llama/llama-3.3-70b-instruct": {
|
|
5568
5570
|
id: string;
|
|
5569
5571
|
name: string;
|
|
@@ -5853,40 +5855,6 @@ export declare const MODELS: {
|
|
|
5853
5855
|
contextWindow: number;
|
|
5854
5856
|
maxTokens: number;
|
|
5855
5857
|
};
|
|
5856
|
-
readonly "mistralai/mistral-7b-instruct": {
|
|
5857
|
-
id: string;
|
|
5858
|
-
name: string;
|
|
5859
|
-
api: "openai-completions";
|
|
5860
|
-
provider: string;
|
|
5861
|
-
baseUrl: string;
|
|
5862
|
-
reasoning: false;
|
|
5863
|
-
input: "text"[];
|
|
5864
|
-
cost: {
|
|
5865
|
-
input: number;
|
|
5866
|
-
output: number;
|
|
5867
|
-
cacheRead: number;
|
|
5868
|
-
cacheWrite: number;
|
|
5869
|
-
};
|
|
5870
|
-
contextWindow: number;
|
|
5871
|
-
maxTokens: number;
|
|
5872
|
-
};
|
|
5873
|
-
readonly "mistralai/mistral-7b-instruct:free": {
|
|
5874
|
-
id: string;
|
|
5875
|
-
name: string;
|
|
5876
|
-
api: "openai-completions";
|
|
5877
|
-
provider: string;
|
|
5878
|
-
baseUrl: string;
|
|
5879
|
-
reasoning: false;
|
|
5880
|
-
input: "text"[];
|
|
5881
|
-
cost: {
|
|
5882
|
-
input: number;
|
|
5883
|
-
output: number;
|
|
5884
|
-
cacheRead: number;
|
|
5885
|
-
cacheWrite: number;
|
|
5886
|
-
};
|
|
5887
|
-
contextWindow: number;
|
|
5888
|
-
maxTokens: number;
|
|
5889
|
-
};
|
|
5890
5858
|
readonly "mistralai/mistral-large": {
|
|
5891
5859
|
id: string;
|
|
5892
5860
|
name: string;
|
|
@@ -6448,23 +6416,6 @@ export declare const MODELS: {
|
|
|
6448
6416
|
contextWindow: number;
|
|
6449
6417
|
maxTokens: number;
|
|
6450
6418
|
};
|
|
6451
|
-
readonly "openai/codex-mini": {
|
|
6452
|
-
id: string;
|
|
6453
|
-
name: string;
|
|
6454
|
-
api: "openai-completions";
|
|
6455
|
-
provider: string;
|
|
6456
|
-
baseUrl: string;
|
|
6457
|
-
reasoning: true;
|
|
6458
|
-
input: ("image" | "text")[];
|
|
6459
|
-
cost: {
|
|
6460
|
-
input: number;
|
|
6461
|
-
output: number;
|
|
6462
|
-
cacheRead: number;
|
|
6463
|
-
cacheWrite: number;
|
|
6464
|
-
};
|
|
6465
|
-
contextWindow: number;
|
|
6466
|
-
maxTokens: number;
|
|
6467
|
-
};
|
|
6468
6419
|
readonly "openai/gpt-3.5-turbo": {
|
|
6469
6420
|
id: string;
|
|
6470
6421
|
name: string;
|
|
@@ -7026,6 +6977,23 @@ export declare const MODELS: {
|
|
|
7026
6977
|
contextWindow: number;
|
|
7027
6978
|
maxTokens: number;
|
|
7028
6979
|
};
|
|
6980
|
+
readonly "openai/gpt-5.2-codex": {
|
|
6981
|
+
id: string;
|
|
6982
|
+
name: string;
|
|
6983
|
+
api: "openai-completions";
|
|
6984
|
+
provider: string;
|
|
6985
|
+
baseUrl: string;
|
|
6986
|
+
reasoning: true;
|
|
6987
|
+
input: ("image" | "text")[];
|
|
6988
|
+
cost: {
|
|
6989
|
+
input: number;
|
|
6990
|
+
output: number;
|
|
6991
|
+
cacheRead: number;
|
|
6992
|
+
cacheWrite: number;
|
|
6993
|
+
};
|
|
6994
|
+
contextWindow: number;
|
|
6995
|
+
maxTokens: number;
|
|
6996
|
+
};
|
|
7029
6997
|
readonly "openai/gpt-5.2-pro": {
|
|
7030
6998
|
id: string;
|
|
7031
6999
|
name: string;
|
|
@@ -7366,108 +7334,6 @@ export declare const MODELS: {
|
|
|
7366
7334
|
contextWindow: number;
|
|
7367
7335
|
maxTokens: number;
|
|
7368
7336
|
};
|
|
7369
|
-
readonly "qwen/qwen-max": {
|
|
7370
|
-
id: string;
|
|
7371
|
-
name: string;
|
|
7372
|
-
api: "openai-completions";
|
|
7373
|
-
provider: string;
|
|
7374
|
-
baseUrl: string;
|
|
7375
|
-
reasoning: false;
|
|
7376
|
-
input: "text"[];
|
|
7377
|
-
cost: {
|
|
7378
|
-
input: number;
|
|
7379
|
-
output: number;
|
|
7380
|
-
cacheRead: number;
|
|
7381
|
-
cacheWrite: number;
|
|
7382
|
-
};
|
|
7383
|
-
contextWindow: number;
|
|
7384
|
-
maxTokens: number;
|
|
7385
|
-
};
|
|
7386
|
-
readonly "qwen/qwen-plus": {
|
|
7387
|
-
id: string;
|
|
7388
|
-
name: string;
|
|
7389
|
-
api: "openai-completions";
|
|
7390
|
-
provider: string;
|
|
7391
|
-
baseUrl: string;
|
|
7392
|
-
reasoning: false;
|
|
7393
|
-
input: "text"[];
|
|
7394
|
-
cost: {
|
|
7395
|
-
input: number;
|
|
7396
|
-
output: number;
|
|
7397
|
-
cacheRead: number;
|
|
7398
|
-
cacheWrite: number;
|
|
7399
|
-
};
|
|
7400
|
-
contextWindow: number;
|
|
7401
|
-
maxTokens: number;
|
|
7402
|
-
};
|
|
7403
|
-
readonly "qwen/qwen-plus-2025-07-28": {
|
|
7404
|
-
id: string;
|
|
7405
|
-
name: string;
|
|
7406
|
-
api: "openai-completions";
|
|
7407
|
-
provider: string;
|
|
7408
|
-
baseUrl: string;
|
|
7409
|
-
reasoning: false;
|
|
7410
|
-
input: "text"[];
|
|
7411
|
-
cost: {
|
|
7412
|
-
input: number;
|
|
7413
|
-
output: number;
|
|
7414
|
-
cacheRead: number;
|
|
7415
|
-
cacheWrite: number;
|
|
7416
|
-
};
|
|
7417
|
-
contextWindow: number;
|
|
7418
|
-
maxTokens: number;
|
|
7419
|
-
};
|
|
7420
|
-
readonly "qwen/qwen-plus-2025-07-28:thinking": {
|
|
7421
|
-
id: string;
|
|
7422
|
-
name: string;
|
|
7423
|
-
api: "openai-completions";
|
|
7424
|
-
provider: string;
|
|
7425
|
-
baseUrl: string;
|
|
7426
|
-
reasoning: true;
|
|
7427
|
-
input: "text"[];
|
|
7428
|
-
cost: {
|
|
7429
|
-
input: number;
|
|
7430
|
-
output: number;
|
|
7431
|
-
cacheRead: number;
|
|
7432
|
-
cacheWrite: number;
|
|
7433
|
-
};
|
|
7434
|
-
contextWindow: number;
|
|
7435
|
-
maxTokens: number;
|
|
7436
|
-
};
|
|
7437
|
-
readonly "qwen/qwen-turbo": {
|
|
7438
|
-
id: string;
|
|
7439
|
-
name: string;
|
|
7440
|
-
api: "openai-completions";
|
|
7441
|
-
provider: string;
|
|
7442
|
-
baseUrl: string;
|
|
7443
|
-
reasoning: false;
|
|
7444
|
-
input: "text"[];
|
|
7445
|
-
cost: {
|
|
7446
|
-
input: number;
|
|
7447
|
-
output: number;
|
|
7448
|
-
cacheRead: number;
|
|
7449
|
-
cacheWrite: number;
|
|
7450
|
-
};
|
|
7451
|
-
contextWindow: number;
|
|
7452
|
-
maxTokens: number;
|
|
7453
|
-
};
|
|
7454
|
-
readonly "qwen/qwen-vl-max": {
|
|
7455
|
-
id: string;
|
|
7456
|
-
name: string;
|
|
7457
|
-
api: "openai-completions";
|
|
7458
|
-
provider: string;
|
|
7459
|
-
baseUrl: string;
|
|
7460
|
-
reasoning: false;
|
|
7461
|
-
input: ("image" | "text")[];
|
|
7462
|
-
cost: {
|
|
7463
|
-
input: number;
|
|
7464
|
-
output: number;
|
|
7465
|
-
cacheRead: number;
|
|
7466
|
-
cacheWrite: number;
|
|
7467
|
-
};
|
|
7468
|
-
contextWindow: number;
|
|
7469
|
-
maxTokens: number;
|
|
7470
|
-
};
|
|
7471
7337
|
readonly "qwen/qwen3-14b": {
|
|
7472
7338
|
id: string;
|
|
7473
7339
|
name: string;
|
|
@@ -7672,40 +7538,6 @@ export declare const MODELS: {
|
|
|
7672
7538
|
contextWindow: number;
|
|
7673
7539
|
maxTokens: number;
|
|
7674
7540
|
};
|
|
7675
|
-
readonly "qwen/qwen3-coder-flash": {
|
|
7676
|
-
id: string;
|
|
7677
|
-
name: string;
|
|
7678
|
-
api: "openai-completions";
|
|
7679
|
-
provider: string;
|
|
7680
|
-
baseUrl: string;
|
|
7681
|
-
reasoning: false;
|
|
7682
|
-
input: "text"[];
|
|
7683
|
-
cost: {
|
|
7684
|
-
input: number;
|
|
7685
|
-
output: number;
|
|
7686
|
-
cacheRead: number;
|
|
7687
|
-
cacheWrite: number;
|
|
7688
|
-
};
|
|
7689
|
-
contextWindow: number;
|
|
7690
|
-
maxTokens: number;
|
|
7691
|
-
};
|
|
7692
|
-
readonly "qwen/qwen3-coder-plus": {
|
|
7693
|
-
id: string;
|
|
7694
|
-
name: string;
|
|
7695
|
-
api: "openai-completions";
|
|
7696
|
-
provider: string;
|
|
7697
|
-
baseUrl: string;
|
|
7698
|
-
reasoning: false;
|
|
7699
|
-
input: "text"[];
|
|
7700
|
-
cost: {
|
|
7701
|
-
input: number;
|
|
7702
|
-
output: number;
|
|
7703
|
-
cacheRead: number;
|
|
7704
|
-
cacheWrite: number;
|
|
7705
|
-
};
|
|
7706
|
-
contextWindow: number;
|
|
7707
|
-
maxTokens: number;
|
|
7708
|
-
};
|
|
7709
7541
|
readonly "qwen/qwen3-coder:exacto": {
|
|
7710
7542
|
id: string;
|
|
7711
7543
|
name: string;
|
|
@@ -7740,7 +7572,7 @@ export declare const MODELS: {
|
|
|
7740
7572
|
contextWindow: number;
|
|
7741
7573
|
maxTokens: number;
|
|
7742
7574
|
};
|
|
7743
|
-
readonly "qwen/qwen3-
|
|
7575
|
+
readonly "qwen/qwen3-next-80b-a3b-instruct": {
|
|
7744
7576
|
id: string;
|
|
7745
7577
|
name: string;
|
|
7746
7578
|
api: "openai-completions";
|
|
@@ -7757,7 +7589,7 @@ export declare const MODELS: {
|
|
|
7757
7589
|
contextWindow: number;
|
|
7758
7590
|
maxTokens: number;
|
|
7759
7591
|
};
|
|
7760
|
-
readonly "qwen/qwen3-next-80b-a3b-instruct": {
|
|
7592
|
+
readonly "qwen/qwen3-next-80b-a3b-instruct:free": {
|
|
7761
7593
|
id: string;
|
|
7762
7594
|
name: string;
|
|
7763
7595
|
api: "openai-completions";
|
|
@@ -7876,23 +7708,6 @@ export declare const MODELS: {
|
|
|
7876
7708
|
contextWindow: number;
|
|
7877
7709
|
maxTokens: number;
|
|
7878
7710
|
};
|
|
7879
|
-
readonly "qwen/qwen3-vl-8b-thinking": {
|
|
7880
|
-
id: string;
|
|
7881
|
-
name: string;
|
|
7882
|
-
api: "openai-completions";
|
|
7883
|
-
provider: string;
|
|
7884
|
-
baseUrl: string;
|
|
7885
|
-
reasoning: true;
|
|
7886
|
-
input: ("image" | "text")[];
|
|
7887
|
-
cost: {
|
|
7888
|
-
input: number;
|
|
7889
|
-
output: number;
|
|
7890
|
-
cacheRead: number;
|
|
7891
|
-
cacheWrite: number;
|
|
7892
|
-
};
|
|
7893
|
-
contextWindow: number;
|
|
7894
|
-
maxTokens: number;
|
|
7895
|
-
};
|
|
7896
7711
|
readonly "qwen/qwq-32b": {
|
|
7897
7712
|
id: string;
|
|
7898
7713
|
name: string;
|
|
@@ -8199,6 +8014,23 @@ export declare const MODELS: {
|
|
|
8199
8014
|
contextWindow: number;
|
|
8200
8015
|
maxTokens: number;
|
|
8201
8016
|
};
|
|
8017
|
+
readonly "xiaomi/mimo-v2-flash": {
|
|
8018
|
+
id: string;
|
|
8019
|
+
name: string;
|
|
8020
|
+
api: "openai-completions";
|
|
8021
|
+
provider: string;
|
|
8022
|
+
baseUrl: string;
|
|
8023
|
+
reasoning: true;
|
|
8024
|
+
input: "text"[];
|
|
8025
|
+
cost: {
|
|
8026
|
+
input: number;
|
|
8027
|
+
output: number;
|
|
8028
|
+
cacheRead: number;
|
|
8029
|
+
cacheWrite: number;
|
|
8030
|
+
};
|
|
8031
|
+
contextWindow: number;
|
|
8032
|
+
maxTokens: number;
|
|
8033
|
+
};
|
|
8202
8034
|
readonly "xiaomi/mimo-v2-flash:free": {
|
|
8203
8035
|
id: string;
|
|
8204
8036
|
name: string;
|
|
@@ -9357,23 +9189,6 @@ export declare const MODELS: {
|
|
|
9357
9189
|
contextWindow: number;
|
|
9358
9190
|
maxTokens: number;
|
|
9359
9191
|
};
|
|
9360
|
-
readonly "mistral/mistral-nemo": {
|
|
9361
|
-
id: string;
|
|
9362
|
-
name: string;
|
|
9363
|
-
api: "anthropic-messages";
|
|
9364
|
-
provider: string;
|
|
9365
|
-
baseUrl: string;
|
|
9366
|
-
reasoning: false;
|
|
9367
|
-
input: "text"[];
|
|
9368
|
-
cost: {
|
|
9369
|
-
input: number;
|
|
9370
|
-
output: number;
|
|
9371
|
-
cacheRead: number;
|
|
9372
|
-
cacheWrite: number;
|
|
9373
|
-
};
|
|
9374
|
-
contextWindow: number;
|
|
9375
|
-
maxTokens: number;
|
|
9376
|
-
};
|
|
9377
9192
|
readonly "mistral/mistral-small": {
|
|
9378
9193
|
id: string;
|
|
9379
9194
|
name: string;
|
|
@@ -9867,6 +9682,23 @@ export declare const MODELS: {
|
|
|
9867
9682
|
contextWindow: number;
|
|
9868
9683
|
maxTokens: number;
|
|
9869
9684
|
};
|
|
9685
|
+
readonly "openai/gpt-5.2-codex": {
|
|
9686
|
+
id: string;
|
|
9687
|
+
name: string;
|
|
9688
|
+
api: "anthropic-messages";
|
|
9689
|
+
provider: string;
|
|
9690
|
+
baseUrl: string;
|
|
9691
|
+
reasoning: true;
|
|
9692
|
+
input: ("image" | "text")[];
|
|
9693
|
+
cost: {
|
|
9694
|
+
input: number;
|
|
9695
|
+
output: number;
|
|
9696
|
+
cacheRead: number;
|
|
9697
|
+
cacheWrite: number;
|
|
9698
|
+
};
|
|
9699
|
+
contextWindow: number;
|
|
9700
|
+
maxTokens: number;
|
|
9701
|
+
};
|
|
9870
9702
|
readonly "openai/gpt-5.2-pro": {
|
|
9871
9703
|
id: string;
|
|
9872
9704
|
name: string;
|