@mariozechner/pi-ai 0.37.2 → 0.37.4

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (44) hide show
  1. package/README.md +61 -12
  2. package/dist/models.generated.d.ts +0 -51
  3. package/dist/models.generated.d.ts.map +1 -1
  4. package/dist/models.generated.js +41 -92
  5. package/dist/models.generated.js.map +1 -1
  6. package/dist/providers/google-gemini-cli.d.ts.map +1 -1
  7. package/dist/providers/google-gemini-cli.js +3 -3
  8. package/dist/providers/google-gemini-cli.js.map +1 -1
  9. package/dist/providers/google-shared.d.ts +26 -1
  10. package/dist/providers/google-shared.d.ts.map +1 -1
  11. package/dist/providers/google-shared.js +31 -0
  12. package/dist/providers/google-shared.js.map +1 -1
  13. package/dist/providers/google-vertex.d.ts.map +1 -1
  14. package/dist/providers/google-vertex.js +3 -3
  15. package/dist/providers/google-vertex.js.map +1 -1
  16. package/dist/providers/google.d.ts.map +1 -1
  17. package/dist/providers/google.js +3 -3
  18. package/dist/providers/google.js.map +1 -1
  19. package/dist/providers/openai-codex/prompts/codex.d.ts +0 -1
  20. package/dist/providers/openai-codex/prompts/codex.d.ts.map +1 -1
  21. package/dist/providers/openai-codex/prompts/codex.js +1 -42
  22. package/dist/providers/openai-codex/prompts/codex.js.map +1 -1
  23. package/dist/providers/openai-codex/prompts/pi-codex-bridge.d.ts +2 -1
  24. package/dist/providers/openai-codex/prompts/pi-codex-bridge.d.ts.map +1 -1
  25. package/dist/providers/openai-codex/prompts/pi-codex-bridge.js +42 -42
  26. package/dist/providers/openai-codex/prompts/pi-codex-bridge.js.map +1 -1
  27. package/dist/providers/openai-codex/prompts/system-prompt.d.ts +10 -0
  28. package/dist/providers/openai-codex/prompts/system-prompt.d.ts.map +1 -0
  29. package/dist/providers/openai-codex/prompts/system-prompt.js +15 -0
  30. package/dist/providers/openai-codex/prompts/system-prompt.js.map +1 -0
  31. package/dist/providers/openai-codex/request-transformer.d.ts +5 -1
  32. package/dist/providers/openai-codex/request-transformer.d.ts.map +1 -1
  33. package/dist/providers/openai-codex/request-transformer.js +9 -41
  34. package/dist/providers/openai-codex/request-transformer.js.map +1 -1
  35. package/dist/providers/openai-codex-responses.d.ts.map +1 -1
  36. package/dist/providers/openai-codex-responses.js +13 -2
  37. package/dist/providers/openai-codex-responses.js.map +1 -1
  38. package/dist/stream.d.ts.map +1 -1
  39. package/dist/stream.js +1 -0
  40. package/dist/stream.js.map +1 -1
  41. package/dist/types.d.ts +6 -0
  42. package/dist/types.d.ts.map +1 -1
  43. package/dist/types.js.map +1 -1
  44. package/package.json +1 -1
@@ -2780,9 +2780,9 @@ export const MODELS = {
2780
2780
  reasoning: true,
2781
2781
  input: ["text", "image"],
2782
2782
  cost: {
2783
- input: 0,
2784
- output: 0,
2785
- cacheRead: 0,
2783
+ input: 1.5,
2784
+ output: 6,
2785
+ cacheRead: 0.375,
2786
2786
  cacheWrite: 0,
2787
2787
  },
2788
2788
  contextWindow: 400000,
@@ -2797,9 +2797,9 @@ export const MODELS = {
2797
2797
  reasoning: true,
2798
2798
  input: ["text", "image"],
2799
2799
  cost: {
2800
- input: 0,
2801
- output: 0,
2802
- cacheRead: 0,
2800
+ input: 1.25,
2801
+ output: 10,
2802
+ cacheRead: 0.125,
2803
2803
  cacheWrite: 0,
2804
2804
  },
2805
2805
  contextWindow: 400000,
@@ -2814,9 +2814,9 @@ export const MODELS = {
2814
2814
  reasoning: true,
2815
2815
  input: ["text", "image"],
2816
2816
  cost: {
2817
- input: 0,
2818
- output: 0,
2819
- cacheRead: 0,
2817
+ input: 1.25,
2818
+ output: 10,
2819
+ cacheRead: 0.125,
2820
2820
  cacheWrite: 0,
2821
2821
  },
2822
2822
  contextWindow: 400000,
@@ -2831,9 +2831,9 @@ export const MODELS = {
2831
2831
  reasoning: true,
2832
2832
  input: ["text", "image"],
2833
2833
  cost: {
2834
- input: 0,
2835
- output: 0,
2836
- cacheRead: 0,
2834
+ input: 0.25,
2835
+ output: 2,
2836
+ cacheRead: 0.025,
2837
2837
  cacheWrite: 0,
2838
2838
  },
2839
2839
  contextWindow: 400000,
@@ -2848,9 +2848,9 @@ export const MODELS = {
2848
2848
  reasoning: true,
2849
2849
  input: ["text", "image"],
2850
2850
  cost: {
2851
- input: 0,
2852
- output: 0,
2853
- cacheRead: 0,
2851
+ input: 0.25,
2852
+ output: 2,
2853
+ cacheRead: 0.025,
2854
2854
  cacheWrite: 0,
2855
2855
  },
2856
2856
  contextWindow: 400000,
@@ -2865,9 +2865,9 @@ export const MODELS = {
2865
2865
  reasoning: true,
2866
2866
  input: ["text", "image"],
2867
2867
  cost: {
2868
- input: 0,
2869
- output: 0,
2870
- cacheRead: 0,
2868
+ input: 0.05,
2869
+ output: 0.4,
2870
+ cacheRead: 0.005,
2871
2871
  cacheWrite: 0,
2872
2872
  },
2873
2873
  contextWindow: 400000,
@@ -2882,9 +2882,9 @@ export const MODELS = {
2882
2882
  reasoning: true,
2883
2883
  input: ["text", "image"],
2884
2884
  cost: {
2885
- input: 0,
2886
- output: 0,
2887
- cacheRead: 0,
2885
+ input: 1.25,
2886
+ output: 10,
2887
+ cacheRead: 0.125,
2888
2888
  cacheWrite: 0,
2889
2889
  },
2890
2890
  contextWindow: 400000,
@@ -2899,9 +2899,9 @@ export const MODELS = {
2899
2899
  reasoning: true,
2900
2900
  input: ["text", "image"],
2901
2901
  cost: {
2902
- input: 0,
2903
- output: 0,
2904
- cacheRead: 0,
2902
+ input: 1.25,
2903
+ output: 10,
2904
+ cacheRead: 0.125,
2905
2905
  cacheWrite: 0,
2906
2906
  },
2907
2907
  contextWindow: 400000,
@@ -2916,9 +2916,9 @@ export const MODELS = {
2916
2916
  reasoning: true,
2917
2917
  input: ["text", "image"],
2918
2918
  cost: {
2919
- input: 0,
2920
- output: 0,
2921
- cacheRead: 0,
2919
+ input: 1.25,
2920
+ output: 10,
2921
+ cacheRead: 0.125,
2922
2922
  cacheWrite: 0,
2923
2923
  },
2924
2924
  contextWindow: 400000,
@@ -2933,9 +2933,9 @@ export const MODELS = {
2933
2933
  reasoning: true,
2934
2934
  input: ["text", "image"],
2935
2935
  cost: {
2936
- input: 0,
2937
- output: 0,
2938
- cacheRead: 0,
2936
+ input: 1.25,
2937
+ output: 10,
2938
+ cacheRead: 0.125,
2939
2939
  cacheWrite: 0,
2940
2940
  },
2941
2941
  contextWindow: 400000,
@@ -2950,9 +2950,9 @@ export const MODELS = {
2950
2950
  reasoning: true,
2951
2951
  input: ["text", "image"],
2952
2952
  cost: {
2953
- input: 0,
2954
- output: 0,
2955
- cacheRead: 0,
2953
+ input: 0.25,
2954
+ output: 2,
2955
+ cacheRead: 0.025,
2956
2956
  cacheWrite: 0,
2957
2957
  },
2958
2958
  contextWindow: 400000,
@@ -2967,9 +2967,9 @@ export const MODELS = {
2967
2967
  reasoning: true,
2968
2968
  input: ["text", "image"],
2969
2969
  cost: {
2970
- input: 0,
2971
- output: 0,
2972
- cacheRead: 0,
2970
+ input: 1.75,
2971
+ output: 14,
2972
+ cacheRead: 0.175,
2973
2973
  cacheWrite: 0,
2974
2974
  },
2975
2975
  contextWindow: 400000,
@@ -2984,9 +2984,9 @@ export const MODELS = {
2984
2984
  reasoning: true,
2985
2985
  input: ["text", "image"],
2986
2986
  cost: {
2987
- input: 0,
2988
- output: 0,
2989
- cacheRead: 0,
2987
+ input: 1.75,
2988
+ output: 14,
2989
+ cacheRead: 0.175,
2990
2990
  cacheWrite: 0,
2991
2991
  },
2992
2992
  contextWindow: 400000,
@@ -4201,57 +4201,6 @@ export const MODELS = {
4201
4201
  contextWindow: 327680,
4202
4202
  maxTokens: 16384,
4203
4203
  },
4204
- "microsoft/phi-3-medium-128k-instruct": {
4205
- id: "microsoft/phi-3-medium-128k-instruct",
4206
- name: "Microsoft: Phi-3 Medium 128K Instruct",
4207
- api: "openai-completions",
4208
- provider: "openrouter",
4209
- baseUrl: "https://openrouter.ai/api/v1",
4210
- reasoning: false,
4211
- input: ["text"],
4212
- cost: {
4213
- input: 1,
4214
- output: 1,
4215
- cacheRead: 0,
4216
- cacheWrite: 0,
4217
- },
4218
- contextWindow: 128000,
4219
- maxTokens: 4096,
4220
- },
4221
- "microsoft/phi-3-mini-128k-instruct": {
4222
- id: "microsoft/phi-3-mini-128k-instruct",
4223
- name: "Microsoft: Phi-3 Mini 128K Instruct",
4224
- api: "openai-completions",
4225
- provider: "openrouter",
4226
- baseUrl: "https://openrouter.ai/api/v1",
4227
- reasoning: false,
4228
- input: ["text"],
4229
- cost: {
4230
- input: 0.09999999999999999,
4231
- output: 0.09999999999999999,
4232
- cacheRead: 0,
4233
- cacheWrite: 0,
4234
- },
4235
- contextWindow: 128000,
4236
- maxTokens: 4096,
4237
- },
4238
- "microsoft/phi-3.5-mini-128k-instruct": {
4239
- id: "microsoft/phi-3.5-mini-128k-instruct",
4240
- name: "Microsoft: Phi-3.5 Mini 128K Instruct",
4241
- api: "openai-completions",
4242
- provider: "openrouter",
4243
- baseUrl: "https://openrouter.ai/api/v1",
4244
- reasoning: false,
4245
- input: ["text"],
4246
- cost: {
4247
- input: 0.09999999999999999,
4248
- output: 0.09999999999999999,
4249
- cacheRead: 0,
4250
- cacheWrite: 0,
4251
- },
4252
- contextWindow: 128000,
4253
- maxTokens: 4096,
4254
- },
4255
4204
  "minimax/minimax-m1": {
4256
4205
  id: "minimax/minimax-m1",
4257
4206
  name: "MiniMax: MiniMax M1",
@@ -6949,11 +6898,11 @@ export const MODELS = {
6949
6898
  cost: {
6950
6899
  input: 0.3,
6951
6900
  output: 0.8999999999999999,
6952
- cacheRead: 0.049999999999999996,
6901
+ cacheRead: 0,
6953
6902
  cacheWrite: 0,
6954
6903
  },
6955
6904
  contextWindow: 131072,
6956
- maxTokens: 24000,
6905
+ maxTokens: 131072,
6957
6906
  },
6958
6907
  "z-ai/glm-4.7": {
6959
6908
  id: "z-ai/glm-4.7",