@mariozechner/pi-ai 0.35.0 → 0.37.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/cli.d.ts.map +1 -1
- package/dist/cli.js +17 -1
- package/dist/cli.js.map +1 -1
- package/dist/models.d.ts +1 -1
- package/dist/models.d.ts.map +1 -1
- package/dist/models.generated.d.ts +231 -42
- package/dist/models.generated.d.ts.map +1 -1
- package/dist/models.generated.js +269 -80
- package/dist/models.generated.js.map +1 -1
- package/dist/models.js +3 -2
- package/dist/models.js.map +1 -1
- package/dist/providers/openai-codex/constants.d.ts +21 -0
- package/dist/providers/openai-codex/constants.d.ts.map +1 -0
- package/dist/providers/openai-codex/constants.js +21 -0
- package/dist/providers/openai-codex/constants.js.map +1 -0
- package/dist/providers/openai-codex/prompts/codex-instructions.md +105 -0
- package/dist/providers/openai-codex/prompts/codex.d.ts +11 -0
- package/dist/providers/openai-codex/prompts/codex.d.ts.map +1 -0
- package/dist/providers/openai-codex/prompts/codex.js +184 -0
- package/dist/providers/openai-codex/prompts/codex.js.map +1 -0
- package/dist/providers/openai-codex/prompts/pi-codex-bridge.d.ts +6 -0
- package/dist/providers/openai-codex/prompts/pi-codex-bridge.d.ts.map +1 -0
- package/dist/providers/openai-codex/prompts/pi-codex-bridge.js +48 -0
- package/dist/providers/openai-codex/prompts/pi-codex-bridge.js.map +1 -0
- package/dist/providers/openai-codex/request-transformer.d.ts +41 -0
- package/dist/providers/openai-codex/request-transformer.d.ts.map +1 -0
- package/dist/providers/openai-codex/request-transformer.js +247 -0
- package/dist/providers/openai-codex/request-transformer.js.map +1 -0
- package/dist/providers/openai-codex/response-handler.d.ts +19 -0
- package/dist/providers/openai-codex/response-handler.d.ts.map +1 -0
- package/dist/providers/openai-codex/response-handler.js +107 -0
- package/dist/providers/openai-codex/response-handler.js.map +1 -0
- package/dist/providers/openai-codex-responses.d.ts +10 -0
- package/dist/providers/openai-codex-responses.d.ts.map +1 -0
- package/dist/providers/openai-codex-responses.js +530 -0
- package/dist/providers/openai-codex-responses.js.map +1 -0
- package/dist/stream.d.ts.map +1 -1
- package/dist/stream.js +27 -1
- package/dist/stream.js.map +1 -1
- package/dist/types.d.ts +6 -4
- package/dist/types.d.ts.map +1 -1
- package/dist/types.js.map +1 -1
- package/dist/utils/oauth/github-copilot.d.ts +2 -0
- package/dist/utils/oauth/github-copilot.d.ts.map +1 -1
- package/dist/utils/oauth/github-copilot.js +28 -5
- package/dist/utils/oauth/github-copilot.js.map +1 -1
- package/dist/utils/oauth/google-antigravity.d.ts +3 -1
- package/dist/utils/oauth/google-antigravity.d.ts.map +1 -1
- package/dist/utils/oauth/google-antigravity.js +100 -19
- package/dist/utils/oauth/google-antigravity.js.map +1 -1
- package/dist/utils/oauth/google-gemini-cli.d.ts +3 -1
- package/dist/utils/oauth/google-gemini-cli.d.ts.map +1 -1
- package/dist/utils/oauth/google-gemini-cli.js +100 -19
- package/dist/utils/oauth/google-gemini-cli.js.map +1 -1
- package/dist/utils/oauth/index.d.ts +1 -0
- package/dist/utils/oauth/index.d.ts.map +1 -1
- package/dist/utils/oauth/index.js +11 -0
- package/dist/utils/oauth/index.js.map +1 -1
- package/dist/utils/oauth/openai-codex.d.ts +28 -0
- package/dist/utils/oauth/openai-codex.d.ts.map +1 -0
- package/dist/utils/oauth/openai-codex.js +342 -0
- package/dist/utils/oauth/openai-codex.js.map +1 -0
- package/dist/utils/oauth/types.d.ts +2 -1
- package/dist/utils/oauth/types.d.ts.map +1 -1
- package/dist/utils/oauth/types.js.map +1 -1
- package/package.json +2 -2
package/dist/models.generated.js
CHANGED
|
@@ -2770,6 +2770,229 @@ export const MODELS = {
|
|
|
2770
2770
|
maxTokens: 100000,
|
|
2771
2771
|
},
|
|
2772
2772
|
},
|
|
2773
|
+
"openai-codex": {
|
|
2774
|
+
"codex-mini-latest": {
|
|
2775
|
+
id: "codex-mini-latest",
|
|
2776
|
+
name: "Codex Mini Latest",
|
|
2777
|
+
api: "openai-codex-responses",
|
|
2778
|
+
provider: "openai-codex",
|
|
2779
|
+
baseUrl: "https://chatgpt.com/backend-api",
|
|
2780
|
+
reasoning: true,
|
|
2781
|
+
input: ["text", "image"],
|
|
2782
|
+
cost: {
|
|
2783
|
+
input: 0,
|
|
2784
|
+
output: 0,
|
|
2785
|
+
cacheRead: 0,
|
|
2786
|
+
cacheWrite: 0,
|
|
2787
|
+
},
|
|
2788
|
+
contextWindow: 400000,
|
|
2789
|
+
maxTokens: 128000,
|
|
2790
|
+
},
|
|
2791
|
+
"gpt-5": {
|
|
2792
|
+
id: "gpt-5",
|
|
2793
|
+
name: "gpt-5",
|
|
2794
|
+
api: "openai-codex-responses",
|
|
2795
|
+
provider: "openai-codex",
|
|
2796
|
+
baseUrl: "https://chatgpt.com/backend-api",
|
|
2797
|
+
reasoning: true,
|
|
2798
|
+
input: ["text", "image"],
|
|
2799
|
+
cost: {
|
|
2800
|
+
input: 0,
|
|
2801
|
+
output: 0,
|
|
2802
|
+
cacheRead: 0,
|
|
2803
|
+
cacheWrite: 0,
|
|
2804
|
+
},
|
|
2805
|
+
contextWindow: 400000,
|
|
2806
|
+
maxTokens: 128000,
|
|
2807
|
+
},
|
|
2808
|
+
"gpt-5-codex": {
|
|
2809
|
+
id: "gpt-5-codex",
|
|
2810
|
+
name: "gpt-5-codex",
|
|
2811
|
+
api: "openai-codex-responses",
|
|
2812
|
+
provider: "openai-codex",
|
|
2813
|
+
baseUrl: "https://chatgpt.com/backend-api",
|
|
2814
|
+
reasoning: true,
|
|
2815
|
+
input: ["text", "image"],
|
|
2816
|
+
cost: {
|
|
2817
|
+
input: 0,
|
|
2818
|
+
output: 0,
|
|
2819
|
+
cacheRead: 0,
|
|
2820
|
+
cacheWrite: 0,
|
|
2821
|
+
},
|
|
2822
|
+
contextWindow: 400000,
|
|
2823
|
+
maxTokens: 128000,
|
|
2824
|
+
},
|
|
2825
|
+
"gpt-5-codex-mini": {
|
|
2826
|
+
id: "gpt-5-codex-mini",
|
|
2827
|
+
name: "gpt-5-codex-mini",
|
|
2828
|
+
api: "openai-codex-responses",
|
|
2829
|
+
provider: "openai-codex",
|
|
2830
|
+
baseUrl: "https://chatgpt.com/backend-api",
|
|
2831
|
+
reasoning: true,
|
|
2832
|
+
input: ["text", "image"],
|
|
2833
|
+
cost: {
|
|
2834
|
+
input: 0,
|
|
2835
|
+
output: 0,
|
|
2836
|
+
cacheRead: 0,
|
|
2837
|
+
cacheWrite: 0,
|
|
2838
|
+
},
|
|
2839
|
+
contextWindow: 400000,
|
|
2840
|
+
maxTokens: 128000,
|
|
2841
|
+
},
|
|
2842
|
+
"gpt-5-mini": {
|
|
2843
|
+
id: "gpt-5-mini",
|
|
2844
|
+
name: "gpt-5-mini",
|
|
2845
|
+
api: "openai-codex-responses",
|
|
2846
|
+
provider: "openai-codex",
|
|
2847
|
+
baseUrl: "https://chatgpt.com/backend-api",
|
|
2848
|
+
reasoning: true,
|
|
2849
|
+
input: ["text", "image"],
|
|
2850
|
+
cost: {
|
|
2851
|
+
input: 0,
|
|
2852
|
+
output: 0,
|
|
2853
|
+
cacheRead: 0,
|
|
2854
|
+
cacheWrite: 0,
|
|
2855
|
+
},
|
|
2856
|
+
contextWindow: 400000,
|
|
2857
|
+
maxTokens: 128000,
|
|
2858
|
+
},
|
|
2859
|
+
"gpt-5-nano": {
|
|
2860
|
+
id: "gpt-5-nano",
|
|
2861
|
+
name: "gpt-5-nano",
|
|
2862
|
+
api: "openai-codex-responses",
|
|
2863
|
+
provider: "openai-codex",
|
|
2864
|
+
baseUrl: "https://chatgpt.com/backend-api",
|
|
2865
|
+
reasoning: true,
|
|
2866
|
+
input: ["text", "image"],
|
|
2867
|
+
cost: {
|
|
2868
|
+
input: 0,
|
|
2869
|
+
output: 0,
|
|
2870
|
+
cacheRead: 0,
|
|
2871
|
+
cacheWrite: 0,
|
|
2872
|
+
},
|
|
2873
|
+
contextWindow: 400000,
|
|
2874
|
+
maxTokens: 128000,
|
|
2875
|
+
},
|
|
2876
|
+
"gpt-5.1": {
|
|
2877
|
+
id: "gpt-5.1",
|
|
2878
|
+
name: "GPT-5.1",
|
|
2879
|
+
api: "openai-codex-responses",
|
|
2880
|
+
provider: "openai-codex",
|
|
2881
|
+
baseUrl: "https://chatgpt.com/backend-api",
|
|
2882
|
+
reasoning: true,
|
|
2883
|
+
input: ["text", "image"],
|
|
2884
|
+
cost: {
|
|
2885
|
+
input: 0,
|
|
2886
|
+
output: 0,
|
|
2887
|
+
cacheRead: 0,
|
|
2888
|
+
cacheWrite: 0,
|
|
2889
|
+
},
|
|
2890
|
+
contextWindow: 400000,
|
|
2891
|
+
maxTokens: 128000,
|
|
2892
|
+
},
|
|
2893
|
+
"gpt-5.1-chat-latest": {
|
|
2894
|
+
id: "gpt-5.1-chat-latest",
|
|
2895
|
+
name: "gpt-5.1-chat-latest",
|
|
2896
|
+
api: "openai-codex-responses",
|
|
2897
|
+
provider: "openai-codex",
|
|
2898
|
+
baseUrl: "https://chatgpt.com/backend-api",
|
|
2899
|
+
reasoning: true,
|
|
2900
|
+
input: ["text", "image"],
|
|
2901
|
+
cost: {
|
|
2902
|
+
input: 0,
|
|
2903
|
+
output: 0,
|
|
2904
|
+
cacheRead: 0,
|
|
2905
|
+
cacheWrite: 0,
|
|
2906
|
+
},
|
|
2907
|
+
contextWindow: 400000,
|
|
2908
|
+
maxTokens: 128000,
|
|
2909
|
+
},
|
|
2910
|
+
"gpt-5.1-codex": {
|
|
2911
|
+
id: "gpt-5.1-codex",
|
|
2912
|
+
name: "GPT-5.1 Codex",
|
|
2913
|
+
api: "openai-codex-responses",
|
|
2914
|
+
provider: "openai-codex",
|
|
2915
|
+
baseUrl: "https://chatgpt.com/backend-api",
|
|
2916
|
+
reasoning: true,
|
|
2917
|
+
input: ["text", "image"],
|
|
2918
|
+
cost: {
|
|
2919
|
+
input: 0,
|
|
2920
|
+
output: 0,
|
|
2921
|
+
cacheRead: 0,
|
|
2922
|
+
cacheWrite: 0,
|
|
2923
|
+
},
|
|
2924
|
+
contextWindow: 400000,
|
|
2925
|
+
maxTokens: 128000,
|
|
2926
|
+
},
|
|
2927
|
+
"gpt-5.1-codex-max": {
|
|
2928
|
+
id: "gpt-5.1-codex-max",
|
|
2929
|
+
name: "GPT-5.1 Codex Max",
|
|
2930
|
+
api: "openai-codex-responses",
|
|
2931
|
+
provider: "openai-codex",
|
|
2932
|
+
baseUrl: "https://chatgpt.com/backend-api",
|
|
2933
|
+
reasoning: true,
|
|
2934
|
+
input: ["text", "image"],
|
|
2935
|
+
cost: {
|
|
2936
|
+
input: 0,
|
|
2937
|
+
output: 0,
|
|
2938
|
+
cacheRead: 0,
|
|
2939
|
+
cacheWrite: 0,
|
|
2940
|
+
},
|
|
2941
|
+
contextWindow: 400000,
|
|
2942
|
+
maxTokens: 128000,
|
|
2943
|
+
},
|
|
2944
|
+
"gpt-5.1-codex-mini": {
|
|
2945
|
+
id: "gpt-5.1-codex-mini",
|
|
2946
|
+
name: "GPT-5.1 Codex Mini",
|
|
2947
|
+
api: "openai-codex-responses",
|
|
2948
|
+
provider: "openai-codex",
|
|
2949
|
+
baseUrl: "https://chatgpt.com/backend-api",
|
|
2950
|
+
reasoning: true,
|
|
2951
|
+
input: ["text", "image"],
|
|
2952
|
+
cost: {
|
|
2953
|
+
input: 0,
|
|
2954
|
+
output: 0,
|
|
2955
|
+
cacheRead: 0,
|
|
2956
|
+
cacheWrite: 0,
|
|
2957
|
+
},
|
|
2958
|
+
contextWindow: 400000,
|
|
2959
|
+
maxTokens: 128000,
|
|
2960
|
+
},
|
|
2961
|
+
"gpt-5.2": {
|
|
2962
|
+
id: "gpt-5.2",
|
|
2963
|
+
name: "GPT-5.2",
|
|
2964
|
+
api: "openai-codex-responses",
|
|
2965
|
+
provider: "openai-codex",
|
|
2966
|
+
baseUrl: "https://chatgpt.com/backend-api",
|
|
2967
|
+
reasoning: true,
|
|
2968
|
+
input: ["text", "image"],
|
|
2969
|
+
cost: {
|
|
2970
|
+
input: 0,
|
|
2971
|
+
output: 0,
|
|
2972
|
+
cacheRead: 0,
|
|
2973
|
+
cacheWrite: 0,
|
|
2974
|
+
},
|
|
2975
|
+
contextWindow: 400000,
|
|
2976
|
+
maxTokens: 128000,
|
|
2977
|
+
},
|
|
2978
|
+
"gpt-5.2-codex": {
|
|
2979
|
+
id: "gpt-5.2-codex",
|
|
2980
|
+
name: "GPT-5.2 Codex",
|
|
2981
|
+
api: "openai-codex-responses",
|
|
2982
|
+
provider: "openai-codex",
|
|
2983
|
+
baseUrl: "https://chatgpt.com/backend-api",
|
|
2984
|
+
reasoning: true,
|
|
2985
|
+
input: ["text", "image"],
|
|
2986
|
+
cost: {
|
|
2987
|
+
input: 0,
|
|
2988
|
+
output: 0,
|
|
2989
|
+
cacheRead: 0,
|
|
2990
|
+
cacheWrite: 0,
|
|
2991
|
+
},
|
|
2992
|
+
contextWindow: 400000,
|
|
2993
|
+
maxTokens: 128000,
|
|
2994
|
+
},
|
|
2995
|
+
},
|
|
2773
2996
|
"openrouter": {
|
|
2774
2997
|
"ai21/jamba-large-1.7": {
|
|
2775
2998
|
id: "ai21/jamba-large-1.7",
|
|
@@ -2822,23 +3045,6 @@ export const MODELS = {
|
|
|
2822
3045
|
contextWindow: 131072,
|
|
2823
3046
|
maxTokens: 131072,
|
|
2824
3047
|
},
|
|
2825
|
-
"alibaba/tongyi-deepresearch-30b-a3b:free": {
|
|
2826
|
-
id: "alibaba/tongyi-deepresearch-30b-a3b:free",
|
|
2827
|
-
name: "Tongyi DeepResearch 30B A3B (free)",
|
|
2828
|
-
api: "openai-completions",
|
|
2829
|
-
provider: "openrouter",
|
|
2830
|
-
baseUrl: "https://openrouter.ai/api/v1",
|
|
2831
|
-
reasoning: true,
|
|
2832
|
-
input: ["text"],
|
|
2833
|
-
cost: {
|
|
2834
|
-
input: 0,
|
|
2835
|
-
output: 0,
|
|
2836
|
-
cacheRead: 0,
|
|
2837
|
-
cacheWrite: 0,
|
|
2838
|
-
},
|
|
2839
|
-
contextWindow: 131072,
|
|
2840
|
-
maxTokens: 131072,
|
|
2841
|
-
},
|
|
2842
3048
|
"allenai/olmo-3-7b-instruct": {
|
|
2843
3049
|
id: "allenai/olmo-3-7b-instruct",
|
|
2844
3050
|
name: "AllenAI: Olmo 3 7B Instruct",
|
|
@@ -2958,23 +3164,6 @@ export const MODELS = {
|
|
|
2958
3164
|
contextWindow: 200000,
|
|
2959
3165
|
maxTokens: 4096,
|
|
2960
3166
|
},
|
|
2961
|
-
"anthropic/claude-3-opus": {
|
|
2962
|
-
id: "anthropic/claude-3-opus",
|
|
2963
|
-
name: "Anthropic: Claude 3 Opus",
|
|
2964
|
-
api: "openai-completions",
|
|
2965
|
-
provider: "openrouter",
|
|
2966
|
-
baseUrl: "https://openrouter.ai/api/v1",
|
|
2967
|
-
reasoning: false,
|
|
2968
|
-
input: ["text", "image"],
|
|
2969
|
-
cost: {
|
|
2970
|
-
input: 15,
|
|
2971
|
-
output: 75,
|
|
2972
|
-
cacheRead: 1.5,
|
|
2973
|
-
cacheWrite: 18.75,
|
|
2974
|
-
},
|
|
2975
|
-
contextWindow: 200000,
|
|
2976
|
-
maxTokens: 4096,
|
|
2977
|
-
},
|
|
2978
3167
|
"anthropic/claude-3.5-haiku": {
|
|
2979
3168
|
id: "anthropic/claude-3.5-haiku",
|
|
2980
3169
|
name: "Anthropic: Claude 3.5 Haiku",
|
|
@@ -3392,13 +3581,13 @@ export const MODELS = {
|
|
|
3392
3581
|
reasoning: true,
|
|
3393
3582
|
input: ["text"],
|
|
3394
3583
|
cost: {
|
|
3395
|
-
input: 0.
|
|
3396
|
-
output: 0.
|
|
3397
|
-
cacheRead: 0
|
|
3584
|
+
input: 0.19,
|
|
3585
|
+
output: 0.87,
|
|
3586
|
+
cacheRead: 0,
|
|
3398
3587
|
cacheWrite: 0,
|
|
3399
3588
|
},
|
|
3400
3589
|
contextWindow: 163840,
|
|
3401
|
-
maxTokens:
|
|
3590
|
+
maxTokens: 65536,
|
|
3402
3591
|
},
|
|
3403
3592
|
"deepseek/deepseek-chat-v3.1": {
|
|
3404
3593
|
id: "deepseek/deepseek-chat-v3.1",
|
|
@@ -3426,13 +3615,13 @@ export const MODELS = {
|
|
|
3426
3615
|
reasoning: true,
|
|
3427
3616
|
input: ["text"],
|
|
3428
3617
|
cost: {
|
|
3429
|
-
input: 0.
|
|
3430
|
-
output:
|
|
3618
|
+
input: 0.7,
|
|
3619
|
+
output: 2.4,
|
|
3431
3620
|
cacheRead: 0,
|
|
3432
3621
|
cacheWrite: 0,
|
|
3433
3622
|
},
|
|
3434
3623
|
contextWindow: 163840,
|
|
3435
|
-
maxTokens:
|
|
3624
|
+
maxTokens: 163840,
|
|
3436
3625
|
},
|
|
3437
3626
|
"deepseek/deepseek-r1-0528": {
|
|
3438
3627
|
id: "deepseek/deepseek-r1-0528",
|
|
@@ -3808,6 +3997,23 @@ export const MODELS = {
|
|
|
3808
3997
|
contextWindow: 128000,
|
|
3809
3998
|
maxTokens: 16384,
|
|
3810
3999
|
},
|
|
4000
|
+
"kwaipilot/kat-coder-pro": {
|
|
4001
|
+
id: "kwaipilot/kat-coder-pro",
|
|
4002
|
+
name: "Kwaipilot: KAT-Coder-Pro V1",
|
|
4003
|
+
api: "openai-completions",
|
|
4004
|
+
provider: "openrouter",
|
|
4005
|
+
baseUrl: "https://openrouter.ai/api/v1",
|
|
4006
|
+
reasoning: false,
|
|
4007
|
+
input: ["text"],
|
|
4008
|
+
cost: {
|
|
4009
|
+
input: 0.207,
|
|
4010
|
+
output: 0.828,
|
|
4011
|
+
cacheRead: 0.0414,
|
|
4012
|
+
cacheWrite: 0,
|
|
4013
|
+
},
|
|
4014
|
+
contextWindow: 256000,
|
|
4015
|
+
maxTokens: 128000,
|
|
4016
|
+
},
|
|
3811
4017
|
"kwaipilot/kat-coder-pro:free": {
|
|
3812
4018
|
id: "kwaipilot/kat-coder-pro:free",
|
|
3813
4019
|
name: "Kwaipilot: KAT-Coder-Pro V1 (free)",
|
|
@@ -3823,7 +4029,7 @@ export const MODELS = {
|
|
|
3823
4029
|
cacheWrite: 0,
|
|
3824
4030
|
},
|
|
3825
4031
|
contextWindow: 256000,
|
|
3826
|
-
maxTokens:
|
|
4032
|
+
maxTokens: 128000,
|
|
3827
4033
|
},
|
|
3828
4034
|
"meta-llama/llama-3-70b-instruct": {
|
|
3829
4035
|
id: "meta-llama/llama-3-70b-instruct",
|
|
@@ -3903,11 +4109,11 @@ export const MODELS = {
|
|
|
3903
4109
|
input: ["text"],
|
|
3904
4110
|
cost: {
|
|
3905
4111
|
input: 0.02,
|
|
3906
|
-
output: 0.
|
|
4112
|
+
output: 0.049999999999999996,
|
|
3907
4113
|
cacheRead: 0,
|
|
3908
4114
|
cacheWrite: 0,
|
|
3909
4115
|
},
|
|
3910
|
-
contextWindow:
|
|
4116
|
+
contextWindow: 16384,
|
|
3911
4117
|
maxTokens: 16384,
|
|
3912
4118
|
},
|
|
3913
4119
|
"meta-llama/llama-3.2-3b-instruct": {
|
|
@@ -4633,13 +4839,13 @@ export const MODELS = {
|
|
|
4633
4839
|
reasoning: false,
|
|
4634
4840
|
input: ["text"],
|
|
4635
4841
|
cost: {
|
|
4636
|
-
input: 0.
|
|
4637
|
-
output:
|
|
4842
|
+
input: 0.5,
|
|
4843
|
+
output: 2.4,
|
|
4638
4844
|
cacheRead: 0,
|
|
4639
4845
|
cacheWrite: 0,
|
|
4640
4846
|
},
|
|
4641
4847
|
contextWindow: 131072,
|
|
4642
|
-
maxTokens:
|
|
4848
|
+
maxTokens: 4096,
|
|
4643
4849
|
},
|
|
4644
4850
|
"moonshotai/kimi-k2-0905": {
|
|
4645
4851
|
id: "moonshotai/kimi-k2-0905",
|
|
@@ -4684,13 +4890,13 @@ export const MODELS = {
|
|
|
4684
4890
|
reasoning: true,
|
|
4685
4891
|
input: ["text"],
|
|
4686
4892
|
cost: {
|
|
4687
|
-
input: 0.
|
|
4688
|
-
output:
|
|
4893
|
+
input: 0.32,
|
|
4894
|
+
output: 0.48,
|
|
4689
4895
|
cacheRead: 0,
|
|
4690
4896
|
cacheWrite: 0,
|
|
4691
4897
|
},
|
|
4692
4898
|
contextWindow: 262144,
|
|
4693
|
-
maxTokens:
|
|
4899
|
+
maxTokens: 4096,
|
|
4694
4900
|
},
|
|
4695
4901
|
"nex-agi/deepseek-v3.1-nex-n1:free": {
|
|
4696
4902
|
id: "nex-agi/deepseek-v3.1-nex-n1:free",
|
|
@@ -4726,23 +4932,6 @@ export const MODELS = {
|
|
|
4726
4932
|
contextWindow: 32768,
|
|
4727
4933
|
maxTokens: 32768,
|
|
4728
4934
|
},
|
|
4729
|
-
"nousresearch/hermes-4-405b": {
|
|
4730
|
-
id: "nousresearch/hermes-4-405b",
|
|
4731
|
-
name: "Nous: Hermes 4 405B",
|
|
4732
|
-
api: "openai-completions",
|
|
4733
|
-
provider: "openrouter",
|
|
4734
|
-
baseUrl: "https://openrouter.ai/api/v1",
|
|
4735
|
-
reasoning: true,
|
|
4736
|
-
input: ["text"],
|
|
4737
|
-
cost: {
|
|
4738
|
-
input: 0.3,
|
|
4739
|
-
output: 1.2,
|
|
4740
|
-
cacheRead: 0,
|
|
4741
|
-
cacheWrite: 0,
|
|
4742
|
-
},
|
|
4743
|
-
contextWindow: 131072,
|
|
4744
|
-
maxTokens: 131072,
|
|
4745
|
-
},
|
|
4746
4935
|
"nousresearch/hermes-4-70b": {
|
|
4747
4936
|
id: "nousresearch/hermes-4-70b",
|
|
4748
4937
|
name: "Nous: Hermes 4 70B",
|
|
@@ -5557,7 +5746,7 @@ export const MODELS = {
|
|
|
5557
5746
|
cacheWrite: 0,
|
|
5558
5747
|
},
|
|
5559
5748
|
contextWindow: 131072,
|
|
5560
|
-
maxTokens:
|
|
5749
|
+
maxTokens: 4096,
|
|
5561
5750
|
},
|
|
5562
5751
|
"openai/gpt-oss-safeguard-20b": {
|
|
5563
5752
|
id: "openai/gpt-oss-safeguard-20b",
|
|
@@ -6214,8 +6403,8 @@ export const MODELS = {
|
|
|
6214
6403
|
reasoning: false,
|
|
6215
6404
|
input: ["text", "image"],
|
|
6216
6405
|
cost: {
|
|
6217
|
-
input: 0.
|
|
6218
|
-
output:
|
|
6406
|
+
input: 0.12,
|
|
6407
|
+
output: 0.56,
|
|
6219
6408
|
cacheRead: 0,
|
|
6220
6409
|
cacheWrite: 0,
|
|
6221
6410
|
},
|
|
@@ -6231,8 +6420,8 @@ export const MODELS = {
|
|
|
6231
6420
|
reasoning: true,
|
|
6232
6421
|
input: ["text", "image"],
|
|
6233
6422
|
cost: {
|
|
6234
|
-
input: 0.
|
|
6235
|
-
output:
|
|
6423
|
+
input: 0.44999999999999996,
|
|
6424
|
+
output: 3.5,
|
|
6236
6425
|
cacheRead: 0,
|
|
6237
6426
|
cacheWrite: 0,
|
|
6238
6427
|
},
|
|
@@ -6475,7 +6664,7 @@ export const MODELS = {
|
|
|
6475
6664
|
cacheWrite: 0,
|
|
6476
6665
|
},
|
|
6477
6666
|
contextWindow: 163840,
|
|
6478
|
-
maxTokens:
|
|
6667
|
+
maxTokens: 65536,
|
|
6479
6668
|
},
|
|
6480
6669
|
"x-ai/grok-3": {
|
|
6481
6670
|
id: "x-ai/grok-3",
|
|
@@ -6673,13 +6862,13 @@ export const MODELS = {
|
|
|
6673
6862
|
reasoning: true,
|
|
6674
6863
|
input: ["text"],
|
|
6675
6864
|
cost: {
|
|
6676
|
-
input: 0.
|
|
6677
|
-
output: 0.
|
|
6865
|
+
input: 0.049999999999999996,
|
|
6866
|
+
output: 0.22,
|
|
6678
6867
|
cacheRead: 0,
|
|
6679
6868
|
cacheWrite: 0,
|
|
6680
6869
|
},
|
|
6681
6870
|
contextWindow: 131072,
|
|
6682
|
-
maxTokens:
|
|
6871
|
+
maxTokens: 131072,
|
|
6683
6872
|
},
|
|
6684
6873
|
"z-ai/glm-4.5-air:free": {
|
|
6685
6874
|
id: "z-ai/glm-4.5-air:free",
|
|
@@ -6696,7 +6885,7 @@ export const MODELS = {
|
|
|
6696
6885
|
cacheWrite: 0,
|
|
6697
6886
|
},
|
|
6698
6887
|
contextWindow: 131072,
|
|
6699
|
-
maxTokens:
|
|
6888
|
+
maxTokens: 96000,
|
|
6700
6889
|
},
|
|
6701
6890
|
"z-ai/glm-4.5v": {
|
|
6702
6891
|
id: "z-ai/glm-4.5v",
|
|
@@ -6775,13 +6964,13 @@ export const MODELS = {
|
|
|
6775
6964
|
reasoning: true,
|
|
6776
6965
|
input: ["text"],
|
|
6777
6966
|
cost: {
|
|
6778
|
-
input: 0.
|
|
6779
|
-
output:
|
|
6967
|
+
input: 0.16,
|
|
6968
|
+
output: 0.7999999999999999,
|
|
6780
6969
|
cacheRead: 0,
|
|
6781
6970
|
cacheWrite: 0,
|
|
6782
6971
|
},
|
|
6783
6972
|
contextWindow: 202752,
|
|
6784
|
-
maxTokens:
|
|
6973
|
+
maxTokens: 4096,
|
|
6785
6974
|
},
|
|
6786
6975
|
},
|
|
6787
6976
|
"xai": {
|