@everworker/oneringai 0.4.5 → 0.4.7
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +32 -6
- package/dist/{ImageModel-OWbA277F.d.ts → ImageModel-1uP-2vk7.d.ts} +8 -2
- package/dist/{ImageModel-Ds5_6sf7.d.cts → ImageModel-BDI37OED.d.cts} +8 -2
- package/dist/capabilities/agents/index.d.cts +1 -1
- package/dist/capabilities/agents/index.d.ts +1 -1
- package/dist/capabilities/images/index.cjs +149 -7
- package/dist/capabilities/images/index.cjs.map +1 -1
- package/dist/capabilities/images/index.d.cts +1 -1
- package/dist/capabilities/images/index.d.ts +1 -1
- package/dist/capabilities/images/index.js +149 -7
- package/dist/capabilities/images/index.js.map +1 -1
- package/dist/{index-C6ApwIzB.d.ts → index-Blci0FEd.d.ts} +54 -8
- package/dist/{index-CsQOVhqe.d.cts → index-D8RCwpK9.d.cts} +54 -8
- package/dist/index.cjs +2105 -185
- package/dist/index.cjs.map +1 -1
- package/dist/index.d.cts +399 -14
- package/dist/index.d.ts +399 -14
- package/dist/index.js +2100 -186
- package/dist/index.js.map +1 -1
- package/dist/shared/index.cjs +788 -133
- package/dist/shared/index.cjs.map +1 -1
- package/dist/shared/index.d.cts +20 -2
- package/dist/shared/index.d.ts +20 -2
- package/dist/shared/index.js +788 -133
- package/dist/shared/index.js.map +1 -1
- package/package.json +3 -3
package/dist/index.cjs
CHANGED
|
@@ -18,6 +18,7 @@ var spawn = require('cross-spawn');
|
|
|
18
18
|
var process2 = require('process');
|
|
19
19
|
var stream = require('stream');
|
|
20
20
|
var fs17 = require('fs/promises');
|
|
21
|
+
var events = require('events');
|
|
21
22
|
var simpleIcons = require('simple-icons');
|
|
22
23
|
var child_process = require('child_process');
|
|
23
24
|
var util = require('util');
|
|
@@ -12631,18 +12632,30 @@ function isVendor(value) {
|
|
|
12631
12632
|
// src/domain/entities/Model.ts
|
|
12632
12633
|
var LLM_MODELS = {
|
|
12633
12634
|
[Vendor.OpenAI]: {
|
|
12635
|
+
// GPT-5.3 Series
|
|
12636
|
+
GPT_5_3_CODEX: "gpt-5.3-codex",
|
|
12637
|
+
GPT_5_3_CHAT: "gpt-5.3-chat-latest",
|
|
12634
12638
|
// GPT-5.2 Series (Current Flagship)
|
|
12635
12639
|
GPT_5_2: "gpt-5.2",
|
|
12636
12640
|
GPT_5_2_PRO: "gpt-5.2-pro",
|
|
12641
|
+
GPT_5_2_CODEX: "gpt-5.2-codex",
|
|
12642
|
+
GPT_5_2_CHAT: "gpt-5.2-chat-latest",
|
|
12643
|
+
// GPT-5.1 Series
|
|
12644
|
+
GPT_5_1: "gpt-5.1",
|
|
12645
|
+
GPT_5_1_CODEX: "gpt-5.1-codex",
|
|
12646
|
+
GPT_5_1_CODEX_MAX: "gpt-5.1-codex-max",
|
|
12647
|
+
GPT_5_1_CODEX_MINI: "gpt-5.1-codex-mini",
|
|
12648
|
+
GPT_5_1_CHAT: "gpt-5.1-chat-latest",
|
|
12637
12649
|
// GPT-5 Series
|
|
12638
12650
|
GPT_5: "gpt-5",
|
|
12639
12651
|
GPT_5_MINI: "gpt-5-mini",
|
|
12640
12652
|
GPT_5_NANO: "gpt-5-nano",
|
|
12653
|
+
GPT_5_CHAT: "gpt-5-chat-latest",
|
|
12641
12654
|
// GPT-4.1 Series
|
|
12642
12655
|
GPT_4_1: "gpt-4.1",
|
|
12643
12656
|
GPT_4_1_MINI: "gpt-4.1-mini",
|
|
12644
12657
|
GPT_4_1_NANO: "gpt-4.1-nano",
|
|
12645
|
-
// GPT-4o Series (Legacy
|
|
12658
|
+
// GPT-4o Series (Legacy)
|
|
12646
12659
|
GPT_4O: "gpt-4o",
|
|
12647
12660
|
GPT_4O_MINI: "gpt-4o-mini",
|
|
12648
12661
|
// Reasoning Models (o-series)
|
|
@@ -12650,18 +12663,26 @@ var LLM_MODELS = {
|
|
|
12650
12663
|
O1: "o1"
|
|
12651
12664
|
},
|
|
12652
12665
|
[Vendor.Anthropic]: {
|
|
12653
|
-
// Claude 4.
|
|
12666
|
+
// Claude 4.6 Series (Current)
|
|
12667
|
+
CLAUDE_OPUS_4_6: "claude-opus-4-6",
|
|
12668
|
+
CLAUDE_SONNET_4_6: "claude-sonnet-4-6",
|
|
12669
|
+
// Claude 4.5 Series
|
|
12654
12670
|
CLAUDE_OPUS_4_5: "claude-opus-4-5-20251101",
|
|
12655
12671
|
CLAUDE_SONNET_4_5: "claude-sonnet-4-5-20250929",
|
|
12656
12672
|
CLAUDE_HAIKU_4_5: "claude-haiku-4-5-20251001",
|
|
12657
12673
|
// Claude 4.x Legacy
|
|
12658
12674
|
CLAUDE_OPUS_4_1: "claude-opus-4-1-20250805",
|
|
12675
|
+
CLAUDE_OPUS_4: "claude-opus-4-20250514",
|
|
12659
12676
|
CLAUDE_SONNET_4: "claude-sonnet-4-20250514",
|
|
12660
12677
|
CLAUDE_SONNET_3_7: "claude-3-7-sonnet-20250219",
|
|
12661
|
-
// Claude 3.x Legacy
|
|
12678
|
+
// Claude 3.x Legacy (Deprecated)
|
|
12662
12679
|
CLAUDE_HAIKU_3: "claude-3-haiku-20240307"
|
|
12663
12680
|
},
|
|
12664
12681
|
[Vendor.Google]: {
|
|
12682
|
+
// Gemini 3.1 Series (Preview)
|
|
12683
|
+
GEMINI_3_1_PRO_PREVIEW: "gemini-3.1-pro-preview",
|
|
12684
|
+
GEMINI_3_1_FLASH_LITE_PREVIEW: "gemini-3.1-flash-lite-preview",
|
|
12685
|
+
GEMINI_3_1_FLASH_IMAGE_PREVIEW: "gemini-3.1-flash-image-preview",
|
|
12665
12686
|
// Gemini 3 Series (Preview)
|
|
12666
12687
|
GEMINI_3_FLASH_PREVIEW: "gemini-3-flash-preview",
|
|
12667
12688
|
GEMINI_3_PRO_PREVIEW: "gemini-3-pro-preview",
|
|
@@ -12693,12 +12714,91 @@ var MODEL_REGISTRY = {
|
|
|
12693
12714
|
// ============================================================================
|
|
12694
12715
|
// OpenAI Models (Verified from platform.openai.com)
|
|
12695
12716
|
// ============================================================================
|
|
12717
|
+
// GPT-5.3 Series
|
|
12718
|
+
"gpt-5.3-codex": {
|
|
12719
|
+
name: "gpt-5.3-codex",
|
|
12720
|
+
provider: Vendor.OpenAI,
|
|
12721
|
+
description: "Latest codex model for coding and agentic tasks. Reasoning.effort: low, medium, high, xhigh",
|
|
12722
|
+
isActive: true,
|
|
12723
|
+
releaseDate: "2026-02-01",
|
|
12724
|
+
knowledgeCutoff: "2025-08-31",
|
|
12725
|
+
features: {
|
|
12726
|
+
reasoning: true,
|
|
12727
|
+
streaming: true,
|
|
12728
|
+
structuredOutput: true,
|
|
12729
|
+
functionCalling: true,
|
|
12730
|
+
fineTuning: false,
|
|
12731
|
+
predictedOutputs: false,
|
|
12732
|
+
realtime: false,
|
|
12733
|
+
vision: true,
|
|
12734
|
+
audio: false,
|
|
12735
|
+
video: false,
|
|
12736
|
+
batchAPI: true,
|
|
12737
|
+
promptCaching: true,
|
|
12738
|
+
parameters: {
|
|
12739
|
+
temperature: false,
|
|
12740
|
+
topP: false,
|
|
12741
|
+
frequencyPenalty: false,
|
|
12742
|
+
presencePenalty: false
|
|
12743
|
+
},
|
|
12744
|
+
input: {
|
|
12745
|
+
tokens: 4e5,
|
|
12746
|
+
text: true,
|
|
12747
|
+
image: true,
|
|
12748
|
+
cpm: 1.75,
|
|
12749
|
+
cpmCached: 0.175
|
|
12750
|
+
},
|
|
12751
|
+
output: {
|
|
12752
|
+
tokens: 128e3,
|
|
12753
|
+
text: true,
|
|
12754
|
+
cpm: 14
|
|
12755
|
+
}
|
|
12756
|
+
}
|
|
12757
|
+
},
|
|
12758
|
+
"gpt-5.3-chat-latest": {
|
|
12759
|
+
name: "gpt-5.3-chat-latest",
|
|
12760
|
+
provider: Vendor.OpenAI,
|
|
12761
|
+
description: "Latest GPT-5.3 chat model for general-purpose use",
|
|
12762
|
+
isActive: true,
|
|
12763
|
+
releaseDate: "2026-02-01",
|
|
12764
|
+
knowledgeCutoff: "2025-08-31",
|
|
12765
|
+
features: {
|
|
12766
|
+
reasoning: false,
|
|
12767
|
+
streaming: true,
|
|
12768
|
+
structuredOutput: true,
|
|
12769
|
+
functionCalling: true,
|
|
12770
|
+
fineTuning: false,
|
|
12771
|
+
predictedOutputs: false,
|
|
12772
|
+
realtime: false,
|
|
12773
|
+
vision: true,
|
|
12774
|
+
audio: false,
|
|
12775
|
+
video: false,
|
|
12776
|
+
batchAPI: true,
|
|
12777
|
+
promptCaching: true,
|
|
12778
|
+
parameters: {
|
|
12779
|
+
temperature: false
|
|
12780
|
+
},
|
|
12781
|
+
input: {
|
|
12782
|
+
tokens: 128e3,
|
|
12783
|
+
text: true,
|
|
12784
|
+
image: true,
|
|
12785
|
+
cpm: 1.75,
|
|
12786
|
+
cpmCached: 0.175
|
|
12787
|
+
},
|
|
12788
|
+
output: {
|
|
12789
|
+
tokens: 16e3,
|
|
12790
|
+
text: true,
|
|
12791
|
+
cpm: 14
|
|
12792
|
+
}
|
|
12793
|
+
}
|
|
12794
|
+
},
|
|
12696
12795
|
// GPT-5.2 Series (Current Flagship)
|
|
12697
12796
|
"gpt-5.2": {
|
|
12698
12797
|
name: "gpt-5.2",
|
|
12699
12798
|
provider: Vendor.OpenAI,
|
|
12700
12799
|
description: "Flagship model for coding and agentic tasks. Reasoning.effort: none, low, medium, high, xhigh",
|
|
12701
12800
|
isActive: true,
|
|
12801
|
+
preferred: true,
|
|
12702
12802
|
releaseDate: "2025-12-01",
|
|
12703
12803
|
knowledgeCutoff: "2025-08-31",
|
|
12704
12804
|
features: {
|
|
@@ -12724,7 +12824,8 @@ var MODEL_REGISTRY = {
|
|
|
12724
12824
|
tokens: 4e5,
|
|
12725
12825
|
text: true,
|
|
12726
12826
|
image: true,
|
|
12727
|
-
cpm: 1.75
|
|
12827
|
+
cpm: 1.75,
|
|
12828
|
+
cpmCached: 0.175
|
|
12728
12829
|
},
|
|
12729
12830
|
output: {
|
|
12730
12831
|
tokens: 128e3,
|
|
@@ -12743,7 +12844,7 @@ var MODEL_REGISTRY = {
|
|
|
12743
12844
|
features: {
|
|
12744
12845
|
reasoning: true,
|
|
12745
12846
|
streaming: true,
|
|
12746
|
-
structuredOutput:
|
|
12847
|
+
structuredOutput: false,
|
|
12747
12848
|
functionCalling: true,
|
|
12748
12849
|
fineTuning: false,
|
|
12749
12850
|
predictedOutputs: false,
|
|
@@ -12772,6 +12873,276 @@ var MODEL_REGISTRY = {
|
|
|
12772
12873
|
}
|
|
12773
12874
|
}
|
|
12774
12875
|
},
|
|
12876
|
+
"gpt-5.2-codex": {
|
|
12877
|
+
name: "gpt-5.2-codex",
|
|
12878
|
+
provider: Vendor.OpenAI,
|
|
12879
|
+
description: "GPT-5.2 codex for coding and agentic tasks. Reasoning.effort: low, medium, high, xhigh",
|
|
12880
|
+
isActive: true,
|
|
12881
|
+
preferred: true,
|
|
12882
|
+
releaseDate: "2025-12-01",
|
|
12883
|
+
knowledgeCutoff: "2025-08-31",
|
|
12884
|
+
features: {
|
|
12885
|
+
reasoning: true,
|
|
12886
|
+
streaming: true,
|
|
12887
|
+
structuredOutput: true,
|
|
12888
|
+
functionCalling: true,
|
|
12889
|
+
fineTuning: false,
|
|
12890
|
+
predictedOutputs: false,
|
|
12891
|
+
realtime: false,
|
|
12892
|
+
vision: true,
|
|
12893
|
+
audio: false,
|
|
12894
|
+
video: false,
|
|
12895
|
+
batchAPI: true,
|
|
12896
|
+
promptCaching: true,
|
|
12897
|
+
parameters: {
|
|
12898
|
+
temperature: false,
|
|
12899
|
+
topP: false,
|
|
12900
|
+
frequencyPenalty: false,
|
|
12901
|
+
presencePenalty: false
|
|
12902
|
+
},
|
|
12903
|
+
input: {
|
|
12904
|
+
tokens: 4e5,
|
|
12905
|
+
text: true,
|
|
12906
|
+
image: true,
|
|
12907
|
+
cpm: 1.75,
|
|
12908
|
+
cpmCached: 0.175
|
|
12909
|
+
},
|
|
12910
|
+
output: {
|
|
12911
|
+
tokens: 128e3,
|
|
12912
|
+
text: true,
|
|
12913
|
+
cpm: 14
|
|
12914
|
+
}
|
|
12915
|
+
}
|
|
12916
|
+
},
|
|
12917
|
+
"gpt-5.2-chat-latest": {
|
|
12918
|
+
name: "gpt-5.2-chat-latest",
|
|
12919
|
+
provider: Vendor.OpenAI,
|
|
12920
|
+
description: "GPT-5.2 chat model for general-purpose use",
|
|
12921
|
+
isActive: true,
|
|
12922
|
+
releaseDate: "2025-12-01",
|
|
12923
|
+
knowledgeCutoff: "2025-08-31",
|
|
12924
|
+
features: {
|
|
12925
|
+
reasoning: false,
|
|
12926
|
+
streaming: true,
|
|
12927
|
+
structuredOutput: true,
|
|
12928
|
+
functionCalling: true,
|
|
12929
|
+
fineTuning: false,
|
|
12930
|
+
predictedOutputs: false,
|
|
12931
|
+
realtime: false,
|
|
12932
|
+
vision: true,
|
|
12933
|
+
audio: false,
|
|
12934
|
+
video: false,
|
|
12935
|
+
batchAPI: true,
|
|
12936
|
+
promptCaching: true,
|
|
12937
|
+
input: {
|
|
12938
|
+
tokens: 128e3,
|
|
12939
|
+
text: true,
|
|
12940
|
+
image: true,
|
|
12941
|
+
cpm: 1.75,
|
|
12942
|
+
cpmCached: 0.175
|
|
12943
|
+
},
|
|
12944
|
+
output: {
|
|
12945
|
+
tokens: 16e3,
|
|
12946
|
+
text: true,
|
|
12947
|
+
cpm: 14
|
|
12948
|
+
}
|
|
12949
|
+
}
|
|
12950
|
+
},
|
|
12951
|
+
// GPT-5.1 Series
|
|
12952
|
+
"gpt-5.1": {
|
|
12953
|
+
name: "gpt-5.1",
|
|
12954
|
+
provider: Vendor.OpenAI,
|
|
12955
|
+
description: "Intelligent reasoning model for coding and agentic tasks. Reasoning.effort: none, low, medium, high",
|
|
12956
|
+
isActive: true,
|
|
12957
|
+
releaseDate: "2025-10-01",
|
|
12958
|
+
knowledgeCutoff: "2024-09-30",
|
|
12959
|
+
features: {
|
|
12960
|
+
reasoning: true,
|
|
12961
|
+
streaming: true,
|
|
12962
|
+
structuredOutput: true,
|
|
12963
|
+
functionCalling: true,
|
|
12964
|
+
fineTuning: false,
|
|
12965
|
+
predictedOutputs: false,
|
|
12966
|
+
realtime: false,
|
|
12967
|
+
vision: true,
|
|
12968
|
+
audio: false,
|
|
12969
|
+
video: false,
|
|
12970
|
+
batchAPI: true,
|
|
12971
|
+
promptCaching: true,
|
|
12972
|
+
parameters: {
|
|
12973
|
+
temperature: false,
|
|
12974
|
+
topP: false,
|
|
12975
|
+
frequencyPenalty: false,
|
|
12976
|
+
presencePenalty: false
|
|
12977
|
+
},
|
|
12978
|
+
input: {
|
|
12979
|
+
tokens: 4e5,
|
|
12980
|
+
text: true,
|
|
12981
|
+
image: true,
|
|
12982
|
+
cpm: 1.25,
|
|
12983
|
+
cpmCached: 0.125
|
|
12984
|
+
},
|
|
12985
|
+
output: {
|
|
12986
|
+
tokens: 128e3,
|
|
12987
|
+
text: true,
|
|
12988
|
+
cpm: 10
|
|
12989
|
+
}
|
|
12990
|
+
}
|
|
12991
|
+
},
|
|
12992
|
+
"gpt-5.1-codex": {
|
|
12993
|
+
name: "gpt-5.1-codex",
|
|
12994
|
+
provider: Vendor.OpenAI,
|
|
12995
|
+
description: "GPT-5.1 codex for coding and agentic tasks with reasoning",
|
|
12996
|
+
isActive: true,
|
|
12997
|
+
releaseDate: "2025-10-01",
|
|
12998
|
+
knowledgeCutoff: "2024-09-30",
|
|
12999
|
+
features: {
|
|
13000
|
+
reasoning: true,
|
|
13001
|
+
streaming: true,
|
|
13002
|
+
structuredOutput: true,
|
|
13003
|
+
functionCalling: true,
|
|
13004
|
+
fineTuning: false,
|
|
13005
|
+
predictedOutputs: false,
|
|
13006
|
+
realtime: false,
|
|
13007
|
+
vision: true,
|
|
13008
|
+
audio: false,
|
|
13009
|
+
video: false,
|
|
13010
|
+
batchAPI: true,
|
|
13011
|
+
promptCaching: true,
|
|
13012
|
+
parameters: {
|
|
13013
|
+
temperature: false,
|
|
13014
|
+
topP: false,
|
|
13015
|
+
frequencyPenalty: false,
|
|
13016
|
+
presencePenalty: false
|
|
13017
|
+
},
|
|
13018
|
+
input: {
|
|
13019
|
+
tokens: 4e5,
|
|
13020
|
+
text: true,
|
|
13021
|
+
image: true,
|
|
13022
|
+
cpm: 1.25,
|
|
13023
|
+
cpmCached: 0.125
|
|
13024
|
+
},
|
|
13025
|
+
output: {
|
|
13026
|
+
tokens: 128e3,
|
|
13027
|
+
text: true,
|
|
13028
|
+
cpm: 10
|
|
13029
|
+
}
|
|
13030
|
+
}
|
|
13031
|
+
},
|
|
13032
|
+
"gpt-5.1-codex-max": {
|
|
13033
|
+
name: "gpt-5.1-codex-max",
|
|
13034
|
+
provider: Vendor.OpenAI,
|
|
13035
|
+
description: "GPT-5.1 codex max for maximum reasoning depth on coding tasks",
|
|
13036
|
+
isActive: true,
|
|
13037
|
+
releaseDate: "2025-10-01",
|
|
13038
|
+
knowledgeCutoff: "2024-09-30",
|
|
13039
|
+
features: {
|
|
13040
|
+
reasoning: true,
|
|
13041
|
+
streaming: true,
|
|
13042
|
+
structuredOutput: true,
|
|
13043
|
+
functionCalling: true,
|
|
13044
|
+
fineTuning: false,
|
|
13045
|
+
predictedOutputs: false,
|
|
13046
|
+
realtime: false,
|
|
13047
|
+
vision: true,
|
|
13048
|
+
audio: false,
|
|
13049
|
+
video: false,
|
|
13050
|
+
batchAPI: true,
|
|
13051
|
+
promptCaching: true,
|
|
13052
|
+
parameters: {
|
|
13053
|
+
temperature: false,
|
|
13054
|
+
topP: false,
|
|
13055
|
+
frequencyPenalty: false,
|
|
13056
|
+
presencePenalty: false
|
|
13057
|
+
},
|
|
13058
|
+
input: {
|
|
13059
|
+
tokens: 4e5,
|
|
13060
|
+
text: true,
|
|
13061
|
+
image: true,
|
|
13062
|
+
cpm: 1.25,
|
|
13063
|
+
cpmCached: 0.125
|
|
13064
|
+
},
|
|
13065
|
+
output: {
|
|
13066
|
+
tokens: 128e3,
|
|
13067
|
+
text: true,
|
|
13068
|
+
cpm: 10
|
|
13069
|
+
}
|
|
13070
|
+
}
|
|
13071
|
+
},
|
|
13072
|
+
"gpt-5.1-codex-mini": {
|
|
13073
|
+
name: "gpt-5.1-codex-mini",
|
|
13074
|
+
provider: Vendor.OpenAI,
|
|
13075
|
+
description: "GPT-5.1 codex mini for cost-efficient coding tasks",
|
|
13076
|
+
isActive: true,
|
|
13077
|
+
releaseDate: "2025-10-01",
|
|
13078
|
+
knowledgeCutoff: "2024-09-30",
|
|
13079
|
+
features: {
|
|
13080
|
+
reasoning: true,
|
|
13081
|
+
streaming: true,
|
|
13082
|
+
structuredOutput: true,
|
|
13083
|
+
functionCalling: true,
|
|
13084
|
+
fineTuning: false,
|
|
13085
|
+
predictedOutputs: false,
|
|
13086
|
+
realtime: false,
|
|
13087
|
+
vision: true,
|
|
13088
|
+
audio: false,
|
|
13089
|
+
video: false,
|
|
13090
|
+
batchAPI: true,
|
|
13091
|
+
promptCaching: true,
|
|
13092
|
+
parameters: {
|
|
13093
|
+
temperature: false,
|
|
13094
|
+
topP: false,
|
|
13095
|
+
frequencyPenalty: false,
|
|
13096
|
+
presencePenalty: false
|
|
13097
|
+
},
|
|
13098
|
+
input: {
|
|
13099
|
+
tokens: 4e5,
|
|
13100
|
+
text: true,
|
|
13101
|
+
image: true,
|
|
13102
|
+
cpm: 0.25,
|
|
13103
|
+
cpmCached: 0.025
|
|
13104
|
+
},
|
|
13105
|
+
output: {
|
|
13106
|
+
tokens: 128e3,
|
|
13107
|
+
text: true,
|
|
13108
|
+
cpm: 2
|
|
13109
|
+
}
|
|
13110
|
+
}
|
|
13111
|
+
},
|
|
13112
|
+
"gpt-5.1-chat-latest": {
|
|
13113
|
+
name: "gpt-5.1-chat-latest",
|
|
13114
|
+
provider: Vendor.OpenAI,
|
|
13115
|
+
description: "GPT-5.1 chat model for general-purpose use",
|
|
13116
|
+
isActive: true,
|
|
13117
|
+
releaseDate: "2025-10-01",
|
|
13118
|
+
knowledgeCutoff: "2024-09-30",
|
|
13119
|
+
features: {
|
|
13120
|
+
reasoning: false,
|
|
13121
|
+
streaming: true,
|
|
13122
|
+
structuredOutput: true,
|
|
13123
|
+
functionCalling: true,
|
|
13124
|
+
fineTuning: false,
|
|
13125
|
+
predictedOutputs: false,
|
|
13126
|
+
realtime: false,
|
|
13127
|
+
vision: true,
|
|
13128
|
+
audio: false,
|
|
13129
|
+
video: false,
|
|
13130
|
+
batchAPI: true,
|
|
13131
|
+
promptCaching: true,
|
|
13132
|
+
input: {
|
|
13133
|
+
tokens: 128e3,
|
|
13134
|
+
text: true,
|
|
13135
|
+
image: true,
|
|
13136
|
+
cpm: 1.25,
|
|
13137
|
+
cpmCached: 0.125
|
|
13138
|
+
},
|
|
13139
|
+
output: {
|
|
13140
|
+
tokens: 16e3,
|
|
13141
|
+
text: true,
|
|
13142
|
+
cpm: 10
|
|
13143
|
+
}
|
|
13144
|
+
}
|
|
13145
|
+
},
|
|
12775
13146
|
// GPT-5 Series
|
|
12776
13147
|
"gpt-5": {
|
|
12777
13148
|
name: "gpt-5",
|
|
@@ -12803,7 +13174,8 @@ var MODEL_REGISTRY = {
|
|
|
12803
13174
|
tokens: 4e5,
|
|
12804
13175
|
text: true,
|
|
12805
13176
|
image: true,
|
|
12806
|
-
cpm: 1.25
|
|
13177
|
+
cpm: 1.25,
|
|
13178
|
+
cpmCached: 0.125
|
|
12807
13179
|
},
|
|
12808
13180
|
output: {
|
|
12809
13181
|
tokens: 128e3,
|
|
@@ -12842,7 +13214,8 @@ var MODEL_REGISTRY = {
|
|
|
12842
13214
|
tokens: 4e5,
|
|
12843
13215
|
text: true,
|
|
12844
13216
|
image: true,
|
|
12845
|
-
cpm: 0.25
|
|
13217
|
+
cpm: 0.25,
|
|
13218
|
+
cpmCached: 0.025
|
|
12846
13219
|
},
|
|
12847
13220
|
output: {
|
|
12848
13221
|
tokens: 128e3,
|
|
@@ -12881,7 +13254,8 @@ var MODEL_REGISTRY = {
|
|
|
12881
13254
|
tokens: 4e5,
|
|
12882
13255
|
text: true,
|
|
12883
13256
|
image: true,
|
|
12884
|
-
cpm: 0.05
|
|
13257
|
+
cpm: 0.05,
|
|
13258
|
+
cpmCached: 5e-3
|
|
12885
13259
|
},
|
|
12886
13260
|
output: {
|
|
12887
13261
|
tokens: 128e3,
|
|
@@ -12890,6 +13264,40 @@ var MODEL_REGISTRY = {
|
|
|
12890
13264
|
}
|
|
12891
13265
|
}
|
|
12892
13266
|
},
|
|
13267
|
+
"gpt-5-chat-latest": {
|
|
13268
|
+
name: "gpt-5-chat-latest",
|
|
13269
|
+
provider: Vendor.OpenAI,
|
|
13270
|
+
description: "GPT-5 chat model for general-purpose use",
|
|
13271
|
+
isActive: true,
|
|
13272
|
+
releaseDate: "2025-08-01",
|
|
13273
|
+
knowledgeCutoff: "2024-09-30",
|
|
13274
|
+
features: {
|
|
13275
|
+
reasoning: false,
|
|
13276
|
+
streaming: true,
|
|
13277
|
+
structuredOutput: true,
|
|
13278
|
+
functionCalling: true,
|
|
13279
|
+
fineTuning: false,
|
|
13280
|
+
predictedOutputs: false,
|
|
13281
|
+
realtime: false,
|
|
13282
|
+
vision: true,
|
|
13283
|
+
audio: false,
|
|
13284
|
+
video: false,
|
|
13285
|
+
batchAPI: true,
|
|
13286
|
+
promptCaching: true,
|
|
13287
|
+
input: {
|
|
13288
|
+
tokens: 128e3,
|
|
13289
|
+
text: true,
|
|
13290
|
+
image: true,
|
|
13291
|
+
cpm: 1.25,
|
|
13292
|
+
cpmCached: 0.125
|
|
13293
|
+
},
|
|
13294
|
+
output: {
|
|
13295
|
+
tokens: 16e3,
|
|
13296
|
+
text: true,
|
|
13297
|
+
cpm: 10
|
|
13298
|
+
}
|
|
13299
|
+
}
|
|
13300
|
+
},
|
|
12893
13301
|
// GPT-4.1 Series
|
|
12894
13302
|
"gpt-4.1": {
|
|
12895
13303
|
name: "gpt-4.1",
|
|
@@ -12897,7 +13305,7 @@ var MODEL_REGISTRY = {
|
|
|
12897
13305
|
description: "GPT-4.1 specialized for coding with 1M token context window",
|
|
12898
13306
|
isActive: true,
|
|
12899
13307
|
releaseDate: "2025-04-14",
|
|
12900
|
-
knowledgeCutoff: "
|
|
13308
|
+
knowledgeCutoff: "2024-06-01",
|
|
12901
13309
|
features: {
|
|
12902
13310
|
reasoning: false,
|
|
12903
13311
|
streaming: true,
|
|
@@ -12915,7 +13323,8 @@ var MODEL_REGISTRY = {
|
|
|
12915
13323
|
tokens: 1e6,
|
|
12916
13324
|
text: true,
|
|
12917
13325
|
image: true,
|
|
12918
|
-
cpm: 2
|
|
13326
|
+
cpm: 2,
|
|
13327
|
+
cpmCached: 0.5
|
|
12919
13328
|
},
|
|
12920
13329
|
output: {
|
|
12921
13330
|
tokens: 32768,
|
|
@@ -12930,7 +13339,7 @@ var MODEL_REGISTRY = {
|
|
|
12930
13339
|
description: "Efficient GPT-4.1 model, beats GPT-4o in many benchmarks at 83% lower cost",
|
|
12931
13340
|
isActive: true,
|
|
12932
13341
|
releaseDate: "2025-04-14",
|
|
12933
|
-
knowledgeCutoff: "
|
|
13342
|
+
knowledgeCutoff: "2024-06-01",
|
|
12934
13343
|
features: {
|
|
12935
13344
|
reasoning: false,
|
|
12936
13345
|
streaming: true,
|
|
@@ -12948,7 +13357,8 @@ var MODEL_REGISTRY = {
|
|
|
12948
13357
|
tokens: 1e6,
|
|
12949
13358
|
text: true,
|
|
12950
13359
|
image: true,
|
|
12951
|
-
cpm: 0.4
|
|
13360
|
+
cpm: 0.4,
|
|
13361
|
+
cpmCached: 0.1
|
|
12952
13362
|
},
|
|
12953
13363
|
output: {
|
|
12954
13364
|
tokens: 16384,
|
|
@@ -12963,7 +13373,7 @@ var MODEL_REGISTRY = {
|
|
|
12963
13373
|
description: "Fastest and cheapest model with 1M context. 80.1% MMLU, ideal for classification/autocompletion",
|
|
12964
13374
|
isActive: true,
|
|
12965
13375
|
releaseDate: "2025-04-14",
|
|
12966
|
-
knowledgeCutoff: "
|
|
13376
|
+
knowledgeCutoff: "2024-06-01",
|
|
12967
13377
|
features: {
|
|
12968
13378
|
reasoning: false,
|
|
12969
13379
|
streaming: true,
|
|
@@ -12981,7 +13391,8 @@ var MODEL_REGISTRY = {
|
|
|
12981
13391
|
tokens: 1e6,
|
|
12982
13392
|
text: true,
|
|
12983
13393
|
image: true,
|
|
12984
|
-
cpm: 0.1
|
|
13394
|
+
cpm: 0.1,
|
|
13395
|
+
cpmCached: 0.025
|
|
12985
13396
|
},
|
|
12986
13397
|
output: {
|
|
12987
13398
|
tokens: 16384,
|
|
@@ -12990,14 +13401,14 @@ var MODEL_REGISTRY = {
|
|
|
12990
13401
|
}
|
|
12991
13402
|
}
|
|
12992
13403
|
},
|
|
12993
|
-
// GPT-4o Series (Legacy
|
|
13404
|
+
// GPT-4o Series (Legacy)
|
|
12994
13405
|
"gpt-4o": {
|
|
12995
13406
|
name: "gpt-4o",
|
|
12996
13407
|
provider: Vendor.OpenAI,
|
|
12997
|
-
description: "Versatile omni model
|
|
13408
|
+
description: "Versatile omni model. Legacy but still available",
|
|
12998
13409
|
isActive: true,
|
|
12999
13410
|
releaseDate: "2024-05-13",
|
|
13000
|
-
knowledgeCutoff: "
|
|
13411
|
+
knowledgeCutoff: "2023-10-01",
|
|
13001
13412
|
features: {
|
|
13002
13413
|
reasoning: false,
|
|
13003
13414
|
streaming: true,
|
|
@@ -13007,7 +13418,7 @@ var MODEL_REGISTRY = {
|
|
|
13007
13418
|
predictedOutputs: true,
|
|
13008
13419
|
realtime: true,
|
|
13009
13420
|
vision: true,
|
|
13010
|
-
audio:
|
|
13421
|
+
audio: false,
|
|
13011
13422
|
video: false,
|
|
13012
13423
|
batchAPI: true,
|
|
13013
13424
|
promptCaching: true,
|
|
@@ -13015,13 +13426,12 @@ var MODEL_REGISTRY = {
|
|
|
13015
13426
|
tokens: 128e3,
|
|
13016
13427
|
text: true,
|
|
13017
13428
|
image: true,
|
|
13018
|
-
|
|
13019
|
-
|
|
13429
|
+
cpm: 2.5,
|
|
13430
|
+
cpmCached: 1.25
|
|
13020
13431
|
},
|
|
13021
13432
|
output: {
|
|
13022
13433
|
tokens: 16384,
|
|
13023
13434
|
text: true,
|
|
13024
|
-
audio: true,
|
|
13025
13435
|
cpm: 10
|
|
13026
13436
|
}
|
|
13027
13437
|
}
|
|
@@ -13029,10 +13439,10 @@ var MODEL_REGISTRY = {
|
|
|
13029
13439
|
"gpt-4o-mini": {
|
|
13030
13440
|
name: "gpt-4o-mini",
|
|
13031
13441
|
provider: Vendor.OpenAI,
|
|
13032
|
-
description: "Fast, affordable omni model
|
|
13442
|
+
description: "Fast, affordable omni model",
|
|
13033
13443
|
isActive: true,
|
|
13034
13444
|
releaseDate: "2024-07-18",
|
|
13035
|
-
knowledgeCutoff: "
|
|
13445
|
+
knowledgeCutoff: "2023-10-01",
|
|
13036
13446
|
features: {
|
|
13037
13447
|
reasoning: false,
|
|
13038
13448
|
streaming: true,
|
|
@@ -13042,7 +13452,7 @@ var MODEL_REGISTRY = {
|
|
|
13042
13452
|
predictedOutputs: false,
|
|
13043
13453
|
realtime: true,
|
|
13044
13454
|
vision: true,
|
|
13045
|
-
audio:
|
|
13455
|
+
audio: false,
|
|
13046
13456
|
video: false,
|
|
13047
13457
|
batchAPI: true,
|
|
13048
13458
|
promptCaching: true,
|
|
@@ -13050,13 +13460,12 @@ var MODEL_REGISTRY = {
|
|
|
13050
13460
|
tokens: 128e3,
|
|
13051
13461
|
text: true,
|
|
13052
13462
|
image: true,
|
|
13053
|
-
|
|
13054
|
-
|
|
13463
|
+
cpm: 0.15,
|
|
13464
|
+
cpmCached: 0.075
|
|
13055
13465
|
},
|
|
13056
13466
|
output: {
|
|
13057
13467
|
tokens: 16384,
|
|
13058
13468
|
text: true,
|
|
13059
|
-
audio: true,
|
|
13060
13469
|
cpm: 0.6
|
|
13061
13470
|
}
|
|
13062
13471
|
}
|
|
@@ -13068,7 +13477,7 @@ var MODEL_REGISTRY = {
|
|
|
13068
13477
|
description: "Fast reasoning model tailored for coding, math, and science",
|
|
13069
13478
|
isActive: true,
|
|
13070
13479
|
releaseDate: "2025-01-31",
|
|
13071
|
-
knowledgeCutoff: "
|
|
13480
|
+
knowledgeCutoff: "2023-10-01",
|
|
13072
13481
|
features: {
|
|
13073
13482
|
reasoning: true,
|
|
13074
13483
|
streaming: true,
|
|
@@ -13077,11 +13486,11 @@ var MODEL_REGISTRY = {
|
|
|
13077
13486
|
fineTuning: false,
|
|
13078
13487
|
predictedOutputs: false,
|
|
13079
13488
|
realtime: false,
|
|
13080
|
-
vision:
|
|
13489
|
+
vision: false,
|
|
13081
13490
|
audio: false,
|
|
13082
13491
|
video: false,
|
|
13083
13492
|
batchAPI: true,
|
|
13084
|
-
promptCaching:
|
|
13493
|
+
promptCaching: true,
|
|
13085
13494
|
parameters: {
|
|
13086
13495
|
temperature: false,
|
|
13087
13496
|
topP: false,
|
|
@@ -13091,8 +13500,8 @@ var MODEL_REGISTRY = {
|
|
|
13091
13500
|
input: {
|
|
13092
13501
|
tokens: 2e5,
|
|
13093
13502
|
text: true,
|
|
13094
|
-
|
|
13095
|
-
|
|
13503
|
+
cpm: 1.1,
|
|
13504
|
+
cpmCached: 0.55
|
|
13096
13505
|
},
|
|
13097
13506
|
output: {
|
|
13098
13507
|
tokens: 1e5,
|
|
@@ -13107,7 +13516,7 @@ var MODEL_REGISTRY = {
|
|
|
13107
13516
|
description: "Advanced reasoning model for complex problems",
|
|
13108
13517
|
isActive: true,
|
|
13109
13518
|
releaseDate: "2024-12-17",
|
|
13110
|
-
knowledgeCutoff: "
|
|
13519
|
+
knowledgeCutoff: "2023-10-01",
|
|
13111
13520
|
features: {
|
|
13112
13521
|
reasoning: true,
|
|
13113
13522
|
streaming: true,
|
|
@@ -13120,7 +13529,7 @@ var MODEL_REGISTRY = {
|
|
|
13120
13529
|
audio: false,
|
|
13121
13530
|
video: false,
|
|
13122
13531
|
batchAPI: true,
|
|
13123
|
-
promptCaching:
|
|
13532
|
+
promptCaching: true,
|
|
13124
13533
|
parameters: {
|
|
13125
13534
|
temperature: false,
|
|
13126
13535
|
topP: false,
|
|
@@ -13131,7 +13540,8 @@ var MODEL_REGISTRY = {
|
|
|
13131
13540
|
tokens: 2e5,
|
|
13132
13541
|
text: true,
|
|
13133
13542
|
image: true,
|
|
13134
|
-
cpm: 15
|
|
13543
|
+
cpm: 15,
|
|
13544
|
+
cpmCached: 7.5
|
|
13135
13545
|
},
|
|
13136
13546
|
output: {
|
|
13137
13547
|
tokens: 1e5,
|
|
@@ -13141,13 +13551,88 @@ var MODEL_REGISTRY = {
|
|
|
13141
13551
|
}
|
|
13142
13552
|
},
|
|
13143
13553
|
// ============================================================================
|
|
13144
|
-
// Anthropic Models (Verified from platform.claude.com)
|
|
13554
|
+
// Anthropic Models (Verified from platform.claude.com - March 2026)
|
|
13145
13555
|
// ============================================================================
|
|
13146
|
-
// Claude 4.
|
|
13556
|
+
// Claude 4.6 Series (Current)
|
|
13557
|
+
"claude-opus-4-6": {
|
|
13558
|
+
name: "claude-opus-4-6",
|
|
13559
|
+
provider: Vendor.Anthropic,
|
|
13560
|
+
description: "The most intelligent model for building agents and coding. 128K output, adaptive thinking",
|
|
13561
|
+
isActive: true,
|
|
13562
|
+
preferred: true,
|
|
13563
|
+
releaseDate: "2026-02-01",
|
|
13564
|
+
knowledgeCutoff: "2025-05-01",
|
|
13565
|
+
features: {
|
|
13566
|
+
reasoning: false,
|
|
13567
|
+
streaming: true,
|
|
13568
|
+
structuredOutput: true,
|
|
13569
|
+
functionCalling: true,
|
|
13570
|
+
fineTuning: false,
|
|
13571
|
+
predictedOutputs: false,
|
|
13572
|
+
realtime: false,
|
|
13573
|
+
vision: true,
|
|
13574
|
+
audio: false,
|
|
13575
|
+
video: false,
|
|
13576
|
+
extendedThinking: true,
|
|
13577
|
+
batchAPI: true,
|
|
13578
|
+
promptCaching: true,
|
|
13579
|
+
input: {
|
|
13580
|
+
tokens: 2e5,
|
|
13581
|
+
// 1M with beta header
|
|
13582
|
+
text: true,
|
|
13583
|
+
image: true,
|
|
13584
|
+
cpm: 5,
|
|
13585
|
+
cpmCached: 0.5
|
|
13586
|
+
},
|
|
13587
|
+
output: {
|
|
13588
|
+
tokens: 128e3,
|
|
13589
|
+
text: true,
|
|
13590
|
+
cpm: 25
|
|
13591
|
+
}
|
|
13592
|
+
}
|
|
13593
|
+
},
|
|
13594
|
+
"claude-sonnet-4-6": {
|
|
13595
|
+
name: "claude-sonnet-4-6",
|
|
13596
|
+
provider: Vendor.Anthropic,
|
|
13597
|
+
description: "Best combination of speed and intelligence. Adaptive thinking, 1M context beta",
|
|
13598
|
+
isActive: true,
|
|
13599
|
+
preferred: true,
|
|
13600
|
+
releaseDate: "2026-02-01",
|
|
13601
|
+
knowledgeCutoff: "2025-08-01",
|
|
13602
|
+
features: {
|
|
13603
|
+
reasoning: false,
|
|
13604
|
+
streaming: true,
|
|
13605
|
+
structuredOutput: true,
|
|
13606
|
+
functionCalling: true,
|
|
13607
|
+
fineTuning: false,
|
|
13608
|
+
predictedOutputs: false,
|
|
13609
|
+
realtime: false,
|
|
13610
|
+
vision: true,
|
|
13611
|
+
audio: false,
|
|
13612
|
+
video: false,
|
|
13613
|
+
extendedThinking: true,
|
|
13614
|
+
batchAPI: true,
|
|
13615
|
+
promptCaching: true,
|
|
13616
|
+
input: {
|
|
13617
|
+
tokens: 2e5,
|
|
13618
|
+
// 1M with beta header
|
|
13619
|
+
text: true,
|
|
13620
|
+
image: true,
|
|
13621
|
+
cpm: 3,
|
|
13622
|
+
cpmCached: 0.3
|
|
13623
|
+
},
|
|
13624
|
+
output: {
|
|
13625
|
+
tokens: 64e3,
|
|
13626
|
+
text: true,
|
|
13627
|
+
cpm: 15
|
|
13628
|
+
}
|
|
13629
|
+
}
|
|
13630
|
+
},
|
|
13631
|
+
// Claude 4.5 Series
|
|
13147
13632
|
"claude-opus-4-5-20251101": {
|
|
13148
13633
|
name: "claude-opus-4-5-20251101",
|
|
13149
13634
|
provider: Vendor.Anthropic,
|
|
13150
|
-
description: "Premium model combining maximum intelligence with practical performance",
|
|
13635
|
+
description: "Legacy Opus 4.5. Premium model combining maximum intelligence with practical performance",
|
|
13151
13636
|
isActive: true,
|
|
13152
13637
|
releaseDate: "2025-11-01",
|
|
13153
13638
|
knowledgeCutoff: "2025-05-01",
|
|
@@ -13182,7 +13667,7 @@ var MODEL_REGISTRY = {
|
|
|
13182
13667
|
"claude-sonnet-4-5-20250929": {
|
|
13183
13668
|
name: "claude-sonnet-4-5-20250929",
|
|
13184
13669
|
provider: Vendor.Anthropic,
|
|
13185
|
-
description: "Smart model for complex agents and coding
|
|
13670
|
+
description: "Legacy Sonnet 4.5. Smart model for complex agents and coding",
|
|
13186
13671
|
isActive: true,
|
|
13187
13672
|
releaseDate: "2025-09-29",
|
|
13188
13673
|
knowledgeCutoff: "2025-01-01",
|
|
@@ -13202,6 +13687,7 @@ var MODEL_REGISTRY = {
|
|
|
13202
13687
|
promptCaching: true,
|
|
13203
13688
|
input: {
|
|
13204
13689
|
tokens: 2e5,
|
|
13690
|
+
// 1M with beta header
|
|
13205
13691
|
text: true,
|
|
13206
13692
|
image: true,
|
|
13207
13693
|
cpm: 3,
|
|
@@ -13285,10 +13771,45 @@ var MODEL_REGISTRY = {
|
|
|
13285
13771
|
}
|
|
13286
13772
|
}
|
|
13287
13773
|
},
|
|
13774
|
+
"claude-opus-4-20250514": {
|
|
13775
|
+
name: "claude-opus-4-20250514",
|
|
13776
|
+
provider: Vendor.Anthropic,
|
|
13777
|
+
description: "Legacy Opus 4. Agentic tasks and reasoning",
|
|
13778
|
+
isActive: true,
|
|
13779
|
+
releaseDate: "2025-05-14",
|
|
13780
|
+
knowledgeCutoff: "2025-01-01",
|
|
13781
|
+
features: {
|
|
13782
|
+
reasoning: false,
|
|
13783
|
+
streaming: true,
|
|
13784
|
+
structuredOutput: true,
|
|
13785
|
+
functionCalling: true,
|
|
13786
|
+
fineTuning: false,
|
|
13787
|
+
predictedOutputs: false,
|
|
13788
|
+
realtime: false,
|
|
13789
|
+
vision: true,
|
|
13790
|
+
audio: false,
|
|
13791
|
+
video: false,
|
|
13792
|
+
extendedThinking: true,
|
|
13793
|
+
batchAPI: true,
|
|
13794
|
+
promptCaching: true,
|
|
13795
|
+
input: {
|
|
13796
|
+
tokens: 2e5,
|
|
13797
|
+
text: true,
|
|
13798
|
+
image: true,
|
|
13799
|
+
cpm: 15,
|
|
13800
|
+
cpmCached: 1.5
|
|
13801
|
+
},
|
|
13802
|
+
output: {
|
|
13803
|
+
tokens: 32e3,
|
|
13804
|
+
text: true,
|
|
13805
|
+
cpm: 75
|
|
13806
|
+
}
|
|
13807
|
+
}
|
|
13808
|
+
},
|
|
13288
13809
|
"claude-sonnet-4-20250514": {
|
|
13289
13810
|
name: "claude-sonnet-4-20250514",
|
|
13290
13811
|
provider: Vendor.Anthropic,
|
|
13291
|
-
description: "Legacy Sonnet 4.
|
|
13812
|
+
description: "Legacy Sonnet 4. Supports 1M context beta",
|
|
13292
13813
|
isActive: true,
|
|
13293
13814
|
releaseDate: "2025-05-14",
|
|
13294
13815
|
knowledgeCutoff: "2025-01-01",
|
|
@@ -13324,7 +13845,7 @@ var MODEL_REGISTRY = {
|
|
|
13324
13845
|
"claude-3-7-sonnet-20250219": {
|
|
13325
13846
|
name: "claude-3-7-sonnet-20250219",
|
|
13326
13847
|
provider: Vendor.Anthropic,
|
|
13327
|
-
description: "Claude 3.7 Sonnet with extended thinking
|
|
13848
|
+
description: "Deprecated. Claude 3.7 Sonnet with extended thinking",
|
|
13328
13849
|
isActive: true,
|
|
13329
13850
|
releaseDate: "2025-02-19",
|
|
13330
13851
|
knowledgeCutoff: "2024-10-01",
|
|
@@ -13351,17 +13872,16 @@ var MODEL_REGISTRY = {
|
|
|
13351
13872
|
},
|
|
13352
13873
|
output: {
|
|
13353
13874
|
tokens: 64e3,
|
|
13354
|
-
// 128K with beta header
|
|
13355
13875
|
text: true,
|
|
13356
13876
|
cpm: 15
|
|
13357
13877
|
}
|
|
13358
13878
|
}
|
|
13359
13879
|
},
|
|
13360
|
-
// Claude 3.x Legacy
|
|
13880
|
+
// Claude 3.x Legacy (Deprecated - retiring April 19, 2026)
|
|
13361
13881
|
"claude-3-haiku-20240307": {
|
|
13362
13882
|
name: "claude-3-haiku-20240307",
|
|
13363
13883
|
provider: Vendor.Anthropic,
|
|
13364
|
-
description: "
|
|
13884
|
+
description: "Deprecated. Retiring April 19, 2026. Migrate to Haiku 4.5",
|
|
13365
13885
|
isActive: true,
|
|
13366
13886
|
releaseDate: "2024-03-07",
|
|
13367
13887
|
knowledgeCutoff: "2023-08-01",
|
|
@@ -13394,16 +13914,124 @@ var MODEL_REGISTRY = {
|
|
|
13394
13914
|
}
|
|
13395
13915
|
},
|
|
13396
13916
|
// ============================================================================
|
|
13397
|
-
// Google Models (Verified from ai.google.dev)
|
|
13917
|
+
// Google Models (Verified from ai.google.dev - March 2026)
|
|
13398
13918
|
// ============================================================================
|
|
13919
|
+
// Gemini 3.1 Series (Preview)
|
|
13920
|
+
"gemini-3.1-pro-preview": {
|
|
13921
|
+
name: "gemini-3.1-pro-preview",
|
|
13922
|
+
provider: Vendor.Google,
|
|
13923
|
+
description: "Advanced intelligence with powerful agentic and coding capabilities. Replaces gemini-3-pro-preview",
|
|
13924
|
+
isActive: true,
|
|
13925
|
+
preferred: true,
|
|
13926
|
+
releaseDate: "2026-02-01",
|
|
13927
|
+
knowledgeCutoff: "2025-01-01",
|
|
13928
|
+
features: {
|
|
13929
|
+
reasoning: true,
|
|
13930
|
+
streaming: true,
|
|
13931
|
+
structuredOutput: true,
|
|
13932
|
+
functionCalling: true,
|
|
13933
|
+
fineTuning: false,
|
|
13934
|
+
predictedOutputs: false,
|
|
13935
|
+
realtime: false,
|
|
13936
|
+
vision: true,
|
|
13937
|
+
audio: true,
|
|
13938
|
+
video: true,
|
|
13939
|
+
batchAPI: true,
|
|
13940
|
+
promptCaching: true,
|
|
13941
|
+
input: {
|
|
13942
|
+
tokens: 1048576,
|
|
13943
|
+
text: true,
|
|
13944
|
+
image: true,
|
|
13945
|
+
audio: true,
|
|
13946
|
+
video: true,
|
|
13947
|
+
cpm: 2,
|
|
13948
|
+
cpmCached: 0.2
|
|
13949
|
+
},
|
|
13950
|
+
output: {
|
|
13951
|
+
tokens: 65536,
|
|
13952
|
+
text: true,
|
|
13953
|
+
cpm: 12
|
|
13954
|
+
}
|
|
13955
|
+
}
|
|
13956
|
+
},
|
|
13957
|
+
"gemini-3.1-flash-lite-preview": {
|
|
13958
|
+
name: "gemini-3.1-flash-lite-preview",
|
|
13959
|
+
provider: Vendor.Google,
|
|
13960
|
+
description: "High performance, budget-friendly for high-volume agentic tasks and data extraction",
|
|
13961
|
+
isActive: true,
|
|
13962
|
+
releaseDate: "2026-03-01",
|
|
13963
|
+
knowledgeCutoff: "2025-01-01",
|
|
13964
|
+
features: {
|
|
13965
|
+
reasoning: true,
|
|
13966
|
+
streaming: true,
|
|
13967
|
+
structuredOutput: true,
|
|
13968
|
+
functionCalling: true,
|
|
13969
|
+
fineTuning: false,
|
|
13970
|
+
predictedOutputs: false,
|
|
13971
|
+
realtime: false,
|
|
13972
|
+
vision: true,
|
|
13973
|
+
audio: true,
|
|
13974
|
+
video: true,
|
|
13975
|
+
batchAPI: true,
|
|
13976
|
+
promptCaching: true,
|
|
13977
|
+
input: {
|
|
13978
|
+
tokens: 1048576,
|
|
13979
|
+
text: true,
|
|
13980
|
+
image: true,
|
|
13981
|
+
audio: true,
|
|
13982
|
+
video: true,
|
|
13983
|
+
cpm: 0.25
|
|
13984
|
+
},
|
|
13985
|
+
output: {
|
|
13986
|
+
tokens: 65536,
|
|
13987
|
+
text: true,
|
|
13988
|
+
cpm: 1.5
|
|
13989
|
+
}
|
|
13990
|
+
}
|
|
13991
|
+
},
|
|
13992
|
+
"gemini-3.1-flash-image-preview": {
|
|
13993
|
+
name: "gemini-3.1-flash-image-preview",
|
|
13994
|
+
provider: Vendor.Google,
|
|
13995
|
+
description: "High-efficiency image generation with up to 4K output, search grounding support",
|
|
13996
|
+
isActive: true,
|
|
13997
|
+
releaseDate: "2026-02-01",
|
|
13998
|
+
knowledgeCutoff: "2025-01-01",
|
|
13999
|
+
features: {
|
|
14000
|
+
reasoning: true,
|
|
14001
|
+
streaming: true,
|
|
14002
|
+
structuredOutput: false,
|
|
14003
|
+
functionCalling: false,
|
|
14004
|
+
fineTuning: false,
|
|
14005
|
+
predictedOutputs: false,
|
|
14006
|
+
realtime: false,
|
|
14007
|
+
vision: true,
|
|
14008
|
+
audio: false,
|
|
14009
|
+
video: false,
|
|
14010
|
+
batchAPI: true,
|
|
14011
|
+
promptCaching: false,
|
|
14012
|
+
input: {
|
|
14013
|
+
tokens: 131072,
|
|
14014
|
+
text: true,
|
|
14015
|
+
image: true,
|
|
14016
|
+
cpm: 0.25
|
|
14017
|
+
},
|
|
14018
|
+
output: {
|
|
14019
|
+
tokens: 32768,
|
|
14020
|
+
text: true,
|
|
14021
|
+
image: true,
|
|
14022
|
+
cpm: 1.5
|
|
14023
|
+
}
|
|
14024
|
+
}
|
|
14025
|
+
},
|
|
13399
14026
|
// Gemini 3 Series (Preview)
|
|
13400
14027
|
"gemini-3-flash-preview": {
|
|
13401
14028
|
name: "gemini-3-flash-preview",
|
|
13402
14029
|
provider: Vendor.Google,
|
|
13403
|
-
description: "
|
|
14030
|
+
description: "Most powerful agentic and coding model with frontier-class reasoning",
|
|
13404
14031
|
isActive: true,
|
|
13405
|
-
|
|
13406
|
-
|
|
14032
|
+
preferred: true,
|
|
14033
|
+
releaseDate: "2025-12-01",
|
|
14034
|
+
knowledgeCutoff: "2025-01-01",
|
|
13407
14035
|
features: {
|
|
13408
14036
|
reasoning: true,
|
|
13409
14037
|
streaming: true,
|
|
@@ -13418,27 +14046,28 @@ var MODEL_REGISTRY = {
|
|
|
13418
14046
|
batchAPI: true,
|
|
13419
14047
|
promptCaching: true,
|
|
13420
14048
|
input: {
|
|
13421
|
-
tokens:
|
|
14049
|
+
tokens: 1048576,
|
|
13422
14050
|
text: true,
|
|
13423
14051
|
image: true,
|
|
13424
14052
|
audio: true,
|
|
13425
14053
|
video: true,
|
|
13426
|
-
cpm: 0.
|
|
14054
|
+
cpm: 0.5,
|
|
14055
|
+
cpmCached: 0.05
|
|
13427
14056
|
},
|
|
13428
14057
|
output: {
|
|
13429
14058
|
tokens: 65536,
|
|
13430
14059
|
text: true,
|
|
13431
|
-
cpm:
|
|
14060
|
+
cpm: 3
|
|
13432
14061
|
}
|
|
13433
14062
|
}
|
|
13434
14063
|
},
|
|
13435
14064
|
"gemini-3-pro-preview": {
|
|
13436
14065
|
name: "gemini-3-pro-preview",
|
|
13437
14066
|
provider: Vendor.Google,
|
|
13438
|
-
description: "
|
|
14067
|
+
description: "Deprecated. Shutting down March 9, 2026. Migrate to gemini-3.1-pro-preview",
|
|
13439
14068
|
isActive: true,
|
|
13440
14069
|
releaseDate: "2025-11-18",
|
|
13441
|
-
knowledgeCutoff: "2025-
|
|
14070
|
+
knowledgeCutoff: "2025-01-01",
|
|
13442
14071
|
features: {
|
|
13443
14072
|
reasoning: true,
|
|
13444
14073
|
streaming: true,
|
|
@@ -13453,7 +14082,7 @@ var MODEL_REGISTRY = {
|
|
|
13453
14082
|
batchAPI: true,
|
|
13454
14083
|
promptCaching: true,
|
|
13455
14084
|
input: {
|
|
13456
|
-
tokens:
|
|
14085
|
+
tokens: 1048576,
|
|
13457
14086
|
text: true,
|
|
13458
14087
|
image: true,
|
|
13459
14088
|
audio: true,
|
|
@@ -13470,14 +14099,14 @@ var MODEL_REGISTRY = {
|
|
|
13470
14099
|
"gemini-3-pro-image-preview": {
|
|
13471
14100
|
name: "gemini-3-pro-image-preview",
|
|
13472
14101
|
provider: Vendor.Google,
|
|
13473
|
-
description: "
|
|
14102
|
+
description: "Professional-grade image generation and editing with reasoning",
|
|
13474
14103
|
isActive: true,
|
|
13475
14104
|
releaseDate: "2025-11-18",
|
|
13476
|
-
knowledgeCutoff: "2025-
|
|
14105
|
+
knowledgeCutoff: "2025-01-01",
|
|
13477
14106
|
features: {
|
|
13478
14107
|
reasoning: true,
|
|
13479
14108
|
streaming: true,
|
|
13480
|
-
structuredOutput:
|
|
14109
|
+
structuredOutput: true,
|
|
13481
14110
|
functionCalling: false,
|
|
13482
14111
|
fineTuning: false,
|
|
13483
14112
|
predictedOutputs: false,
|
|
@@ -13486,15 +14115,15 @@ var MODEL_REGISTRY = {
|
|
|
13486
14115
|
audio: false,
|
|
13487
14116
|
video: false,
|
|
13488
14117
|
batchAPI: true,
|
|
13489
|
-
promptCaching:
|
|
14118
|
+
promptCaching: false,
|
|
13490
14119
|
input: {
|
|
13491
|
-
tokens:
|
|
14120
|
+
tokens: 65536,
|
|
13492
14121
|
text: true,
|
|
13493
14122
|
image: true,
|
|
13494
14123
|
cpm: 1.25
|
|
13495
14124
|
},
|
|
13496
14125
|
output: {
|
|
13497
|
-
tokens:
|
|
14126
|
+
tokens: 32768,
|
|
13498
14127
|
text: true,
|
|
13499
14128
|
image: true,
|
|
13500
14129
|
cpm: 10
|
|
@@ -13505,7 +14134,7 @@ var MODEL_REGISTRY = {
|
|
|
13505
14134
|
"gemini-2.5-pro": {
|
|
13506
14135
|
name: "gemini-2.5-pro",
|
|
13507
14136
|
provider: Vendor.Google,
|
|
13508
|
-
description: "
|
|
14137
|
+
description: "Most advanced model for complex tasks with deep reasoning and coding",
|
|
13509
14138
|
isActive: true,
|
|
13510
14139
|
releaseDate: "2025-03-01",
|
|
13511
14140
|
knowledgeCutoff: "2025-01-01",
|
|
@@ -13523,12 +14152,13 @@ var MODEL_REGISTRY = {
|
|
|
13523
14152
|
batchAPI: true,
|
|
13524
14153
|
promptCaching: true,
|
|
13525
14154
|
input: {
|
|
13526
|
-
tokens:
|
|
14155
|
+
tokens: 1048576,
|
|
13527
14156
|
text: true,
|
|
13528
14157
|
image: true,
|
|
13529
14158
|
audio: true,
|
|
13530
14159
|
video: true,
|
|
13531
|
-
cpm: 1.25
|
|
14160
|
+
cpm: 1.25,
|
|
14161
|
+
cpmCached: 0.125
|
|
13532
14162
|
},
|
|
13533
14163
|
output: {
|
|
13534
14164
|
tokens: 65536,
|
|
@@ -13540,7 +14170,7 @@ var MODEL_REGISTRY = {
|
|
|
13540
14170
|
"gemini-2.5-flash": {
|
|
13541
14171
|
name: "gemini-2.5-flash",
|
|
13542
14172
|
provider: Vendor.Google,
|
|
13543
|
-
description: "
|
|
14173
|
+
description: "Best price-performance for low-latency, high-volume tasks with reasoning",
|
|
13544
14174
|
isActive: true,
|
|
13545
14175
|
releaseDate: "2025-06-17",
|
|
13546
14176
|
knowledgeCutoff: "2025-01-01",
|
|
@@ -13558,24 +14188,25 @@ var MODEL_REGISTRY = {
|
|
|
13558
14188
|
batchAPI: true,
|
|
13559
14189
|
promptCaching: true,
|
|
13560
14190
|
input: {
|
|
13561
|
-
tokens:
|
|
14191
|
+
tokens: 1048576,
|
|
13562
14192
|
text: true,
|
|
13563
14193
|
image: true,
|
|
13564
14194
|
audio: true,
|
|
13565
14195
|
video: true,
|
|
13566
|
-
cpm: 0.
|
|
14196
|
+
cpm: 0.3,
|
|
14197
|
+
cpmCached: 0.03
|
|
13567
14198
|
},
|
|
13568
14199
|
output: {
|
|
13569
14200
|
tokens: 65536,
|
|
13570
14201
|
text: true,
|
|
13571
|
-
cpm:
|
|
14202
|
+
cpm: 2.5
|
|
13572
14203
|
}
|
|
13573
14204
|
}
|
|
13574
14205
|
},
|
|
13575
14206
|
"gemini-2.5-flash-lite": {
|
|
13576
14207
|
name: "gemini-2.5-flash-lite",
|
|
13577
14208
|
provider: Vendor.Google,
|
|
13578
|
-
description: "
|
|
14209
|
+
description: "Fastest and most budget-friendly multimodal model in the 2.5 family",
|
|
13579
14210
|
isActive: true,
|
|
13580
14211
|
releaseDate: "2025-06-17",
|
|
13581
14212
|
knowledgeCutoff: "2025-01-01",
|
|
@@ -13593,31 +14224,31 @@ var MODEL_REGISTRY = {
|
|
|
13593
14224
|
batchAPI: true,
|
|
13594
14225
|
promptCaching: true,
|
|
13595
14226
|
input: {
|
|
13596
|
-
tokens:
|
|
14227
|
+
tokens: 1048576,
|
|
13597
14228
|
text: true,
|
|
13598
14229
|
image: true,
|
|
13599
14230
|
audio: true,
|
|
13600
14231
|
video: true,
|
|
13601
|
-
cpm: 0.
|
|
14232
|
+
cpm: 0.1
|
|
13602
14233
|
},
|
|
13603
14234
|
output: {
|
|
13604
14235
|
tokens: 65536,
|
|
13605
14236
|
text: true,
|
|
13606
|
-
cpm: 0.
|
|
14237
|
+
cpm: 0.4
|
|
13607
14238
|
}
|
|
13608
14239
|
}
|
|
13609
14240
|
},
|
|
13610
14241
|
"gemini-2.5-flash-image": {
|
|
13611
14242
|
name: "gemini-2.5-flash-image",
|
|
13612
14243
|
provider: Vendor.Google,
|
|
13613
|
-
description: "
|
|
14244
|
+
description: "Fast native image generation and editing (Nano Banana)",
|
|
13614
14245
|
isActive: true,
|
|
13615
|
-
releaseDate: "2025-
|
|
13616
|
-
knowledgeCutoff: "2025-
|
|
14246
|
+
releaseDate: "2025-10-01",
|
|
14247
|
+
knowledgeCutoff: "2025-06-01",
|
|
13617
14248
|
features: {
|
|
13618
|
-
reasoning:
|
|
14249
|
+
reasoning: false,
|
|
13619
14250
|
streaming: true,
|
|
13620
|
-
structuredOutput:
|
|
14251
|
+
structuredOutput: true,
|
|
13621
14252
|
functionCalling: false,
|
|
13622
14253
|
fineTuning: false,
|
|
13623
14254
|
predictedOutputs: false,
|
|
@@ -13628,13 +14259,13 @@ var MODEL_REGISTRY = {
|
|
|
13628
14259
|
batchAPI: true,
|
|
13629
14260
|
promptCaching: true,
|
|
13630
14261
|
input: {
|
|
13631
|
-
tokens:
|
|
14262
|
+
tokens: 65536,
|
|
13632
14263
|
text: true,
|
|
13633
14264
|
image: true,
|
|
13634
14265
|
cpm: 0.15
|
|
13635
14266
|
},
|
|
13636
14267
|
output: {
|
|
13637
|
-
tokens:
|
|
14268
|
+
tokens: 32768,
|
|
13638
14269
|
text: true,
|
|
13639
14270
|
image: true,
|
|
13640
14271
|
cpm: 0.6
|
|
@@ -13642,7 +14273,7 @@ var MODEL_REGISTRY = {
|
|
|
13642
14273
|
}
|
|
13643
14274
|
},
|
|
13644
14275
|
// ============================================================================
|
|
13645
|
-
// xAI Grok Models (Verified from docs.x.ai -
|
|
14276
|
+
// xAI Grok Models (Verified from docs.x.ai - March 2026)
|
|
13646
14277
|
// ============================================================================
|
|
13647
14278
|
// Grok 4.1 Series (2M context, fast)
|
|
13648
14279
|
"grok-4-1-fast-reasoning": {
|
|
@@ -13663,13 +14294,14 @@ var MODEL_REGISTRY = {
|
|
|
13663
14294
|
vision: true,
|
|
13664
14295
|
audio: false,
|
|
13665
14296
|
video: false,
|
|
13666
|
-
batchAPI:
|
|
13667
|
-
promptCaching:
|
|
14297
|
+
batchAPI: true,
|
|
14298
|
+
promptCaching: true,
|
|
13668
14299
|
input: {
|
|
13669
14300
|
tokens: 2e6,
|
|
13670
14301
|
text: true,
|
|
13671
14302
|
image: true,
|
|
13672
|
-
cpm: 0.2
|
|
14303
|
+
cpm: 0.2,
|
|
14304
|
+
cpmCached: 0.05
|
|
13673
14305
|
},
|
|
13674
14306
|
output: {
|
|
13675
14307
|
tokens: 65536,
|
|
@@ -13696,13 +14328,14 @@ var MODEL_REGISTRY = {
|
|
|
13696
14328
|
vision: true,
|
|
13697
14329
|
audio: false,
|
|
13698
14330
|
video: false,
|
|
13699
|
-
batchAPI:
|
|
13700
|
-
promptCaching:
|
|
14331
|
+
batchAPI: true,
|
|
14332
|
+
promptCaching: true,
|
|
13701
14333
|
input: {
|
|
13702
14334
|
tokens: 2e6,
|
|
13703
14335
|
text: true,
|
|
13704
14336
|
image: true,
|
|
13705
|
-
cpm: 0.2
|
|
14337
|
+
cpm: 0.2,
|
|
14338
|
+
cpmCached: 0.05
|
|
13706
14339
|
},
|
|
13707
14340
|
output: {
|
|
13708
14341
|
tokens: 65536,
|
|
@@ -13730,12 +14363,13 @@ var MODEL_REGISTRY = {
|
|
|
13730
14363
|
vision: false,
|
|
13731
14364
|
audio: false,
|
|
13732
14365
|
video: false,
|
|
13733
|
-
batchAPI:
|
|
13734
|
-
promptCaching:
|
|
14366
|
+
batchAPI: true,
|
|
14367
|
+
promptCaching: true,
|
|
13735
14368
|
input: {
|
|
13736
14369
|
tokens: 256e3,
|
|
13737
14370
|
text: true,
|
|
13738
|
-
cpm: 0.2
|
|
14371
|
+
cpm: 0.2,
|
|
14372
|
+
cpmCached: 0.02
|
|
13739
14373
|
},
|
|
13740
14374
|
output: {
|
|
13741
14375
|
tokens: 32768,
|
|
@@ -13748,7 +14382,7 @@ var MODEL_REGISTRY = {
|
|
|
13748
14382
|
"grok-4-fast-reasoning": {
|
|
13749
14383
|
name: "grok-4-fast-reasoning",
|
|
13750
14384
|
provider: Vendor.Grok,
|
|
13751
|
-
description: "Fast Grok 4 with reasoning capabilities, 2M context window",
|
|
14385
|
+
description: "Fast Grok 4 with reasoning capabilities, 2M context window, vision support",
|
|
13752
14386
|
isActive: true,
|
|
13753
14387
|
releaseDate: "2025-09-01",
|
|
13754
14388
|
knowledgeCutoff: "2024-11-01",
|
|
@@ -13760,15 +14394,17 @@ var MODEL_REGISTRY = {
|
|
|
13760
14394
|
fineTuning: false,
|
|
13761
14395
|
predictedOutputs: false,
|
|
13762
14396
|
realtime: false,
|
|
13763
|
-
vision:
|
|
14397
|
+
vision: true,
|
|
13764
14398
|
audio: false,
|
|
13765
14399
|
video: false,
|
|
13766
|
-
batchAPI:
|
|
13767
|
-
promptCaching:
|
|
14400
|
+
batchAPI: true,
|
|
14401
|
+
promptCaching: true,
|
|
13768
14402
|
input: {
|
|
13769
14403
|
tokens: 2e6,
|
|
13770
14404
|
text: true,
|
|
13771
|
-
|
|
14405
|
+
image: true,
|
|
14406
|
+
cpm: 0.2,
|
|
14407
|
+
cpmCached: 0.05
|
|
13772
14408
|
},
|
|
13773
14409
|
output: {
|
|
13774
14410
|
tokens: 65536,
|
|
@@ -13795,13 +14431,14 @@ var MODEL_REGISTRY = {
|
|
|
13795
14431
|
vision: true,
|
|
13796
14432
|
audio: false,
|
|
13797
14433
|
video: false,
|
|
13798
|
-
batchAPI:
|
|
13799
|
-
promptCaching:
|
|
14434
|
+
batchAPI: true,
|
|
14435
|
+
promptCaching: true,
|
|
13800
14436
|
input: {
|
|
13801
14437
|
tokens: 2e6,
|
|
13802
14438
|
text: true,
|
|
13803
14439
|
image: true,
|
|
13804
|
-
cpm: 0.2
|
|
14440
|
+
cpm: 0.2,
|
|
14441
|
+
cpmCached: 0.05
|
|
13805
14442
|
},
|
|
13806
14443
|
output: {
|
|
13807
14444
|
tokens: 65536,
|
|
@@ -13813,12 +14450,12 @@ var MODEL_REGISTRY = {
|
|
|
13813
14450
|
"grok-4-0709": {
|
|
13814
14451
|
name: "grok-4-0709",
|
|
13815
14452
|
provider: Vendor.Grok,
|
|
13816
|
-
description: "Grok 4 flagship model (July 2025 release), 256K context, vision support",
|
|
14453
|
+
description: "Grok 4 flagship model (July 2025 release), 256K context, vision support, reasoning",
|
|
13817
14454
|
isActive: true,
|
|
13818
14455
|
releaseDate: "2025-07-09",
|
|
13819
14456
|
knowledgeCutoff: "2024-11-01",
|
|
13820
14457
|
features: {
|
|
13821
|
-
reasoning:
|
|
14458
|
+
reasoning: true,
|
|
13822
14459
|
streaming: true,
|
|
13823
14460
|
structuredOutput: true,
|
|
13824
14461
|
functionCalling: true,
|
|
@@ -13828,13 +14465,14 @@ var MODEL_REGISTRY = {
|
|
|
13828
14465
|
vision: true,
|
|
13829
14466
|
audio: false,
|
|
13830
14467
|
video: false,
|
|
13831
|
-
batchAPI:
|
|
13832
|
-
promptCaching:
|
|
14468
|
+
batchAPI: true,
|
|
14469
|
+
promptCaching: true,
|
|
13833
14470
|
input: {
|
|
13834
14471
|
tokens: 256e3,
|
|
13835
14472
|
text: true,
|
|
13836
14473
|
image: true,
|
|
13837
|
-
cpm: 3
|
|
14474
|
+
cpm: 3,
|
|
14475
|
+
cpmCached: 0.75
|
|
13838
14476
|
},
|
|
13839
14477
|
output: {
|
|
13840
14478
|
tokens: 32768,
|
|
@@ -13847,12 +14485,12 @@ var MODEL_REGISTRY = {
|
|
|
13847
14485
|
"grok-3-mini": {
|
|
13848
14486
|
name: "grok-3-mini",
|
|
13849
14487
|
provider: Vendor.Grok,
|
|
13850
|
-
description: "Lightweight, cost-efficient model
|
|
14488
|
+
description: "Lightweight, cost-efficient model with reasoning, 131K context",
|
|
13851
14489
|
isActive: true,
|
|
13852
14490
|
releaseDate: "2025-06-01",
|
|
13853
14491
|
knowledgeCutoff: "2024-11-01",
|
|
13854
14492
|
features: {
|
|
13855
|
-
reasoning:
|
|
14493
|
+
reasoning: true,
|
|
13856
14494
|
streaming: true,
|
|
13857
14495
|
structuredOutput: true,
|
|
13858
14496
|
functionCalling: true,
|
|
@@ -13862,12 +14500,13 @@ var MODEL_REGISTRY = {
|
|
|
13862
14500
|
vision: false,
|
|
13863
14501
|
audio: false,
|
|
13864
14502
|
video: false,
|
|
13865
|
-
batchAPI:
|
|
13866
|
-
promptCaching:
|
|
14503
|
+
batchAPI: true,
|
|
14504
|
+
promptCaching: true,
|
|
13867
14505
|
input: {
|
|
13868
14506
|
tokens: 131072,
|
|
13869
14507
|
text: true,
|
|
13870
|
-
cpm: 0.3
|
|
14508
|
+
cpm: 0.3,
|
|
14509
|
+
cpmCached: 0.07
|
|
13871
14510
|
},
|
|
13872
14511
|
output: {
|
|
13873
14512
|
tokens: 32768,
|
|
@@ -13894,12 +14533,13 @@ var MODEL_REGISTRY = {
|
|
|
13894
14533
|
vision: false,
|
|
13895
14534
|
audio: false,
|
|
13896
14535
|
video: false,
|
|
13897
|
-
batchAPI:
|
|
13898
|
-
promptCaching:
|
|
14536
|
+
batchAPI: true,
|
|
14537
|
+
promptCaching: true,
|
|
13899
14538
|
input: {
|
|
13900
14539
|
tokens: 131072,
|
|
13901
14540
|
text: true,
|
|
13902
|
-
cpm: 3
|
|
14541
|
+
cpm: 3,
|
|
14542
|
+
cpmCached: 0.75
|
|
13903
14543
|
},
|
|
13904
14544
|
output: {
|
|
13905
14545
|
tokens: 32768,
|
|
@@ -13908,11 +14548,11 @@ var MODEL_REGISTRY = {
|
|
|
13908
14548
|
}
|
|
13909
14549
|
}
|
|
13910
14550
|
},
|
|
13911
|
-
// Grok 2 Series (
|
|
14551
|
+
// Grok 2 Series (Legacy - not in current docs)
|
|
13912
14552
|
"grok-2-vision-1212": {
|
|
13913
14553
|
name: "grok-2-vision-1212",
|
|
13914
14554
|
provider: Vendor.Grok,
|
|
13915
|
-
description: "
|
|
14555
|
+
description: "Legacy vision model for image understanding, 32K context. Not in current xAI docs",
|
|
13916
14556
|
isActive: true,
|
|
13917
14557
|
releaseDate: "2024-12-12",
|
|
13918
14558
|
knowledgeCutoff: "2024-11-01",
|
|
@@ -17058,27 +17698,6 @@ ${formatValue(entry.value)}`).join("\n\n")
|
|
|
17058
17698
|
// src/core/context-nextgen/plugins/ToolCatalogPluginNextGen.ts
|
|
17059
17699
|
init_Logger();
|
|
17060
17700
|
var DEFAULT_MAX_LOADED = 10;
|
|
17061
|
-
var TOOL_CATALOG_INSTRUCTIONS = `## Tool Catalog
|
|
17062
|
-
|
|
17063
|
-
You have access to a dynamic tool catalog. Not all tools are loaded at once \u2014 use these metatools to discover and load what you need:
|
|
17064
|
-
|
|
17065
|
-
**tool_catalog_search** \u2014 Browse available tool categories and search for specific tools.
|
|
17066
|
-
- No params \u2192 list all available categories with descriptions
|
|
17067
|
-
- \`category\` \u2192 list tools in that category
|
|
17068
|
-
- \`query\` \u2192 keyword search across categories and tools
|
|
17069
|
-
|
|
17070
|
-
**tool_catalog_load** \u2014 Load a category's tools so you can use them.
|
|
17071
|
-
- Tools become available immediately after loading.
|
|
17072
|
-
- If you need tools from a category, load it first.
|
|
17073
|
-
|
|
17074
|
-
**tool_catalog_unload** \u2014 Unload a category to free token budget.
|
|
17075
|
-
- Unloaded tools are no longer sent to you.
|
|
17076
|
-
- Use when you're done with a category.
|
|
17077
|
-
|
|
17078
|
-
**Best practices:**
|
|
17079
|
-
- Search first to find the right category before loading.
|
|
17080
|
-
- Unload categories you no longer need to keep context lean.
|
|
17081
|
-
- Categories marked [LOADED] are already available.`;
|
|
17082
17701
|
var catalogSearchDefinition = {
|
|
17083
17702
|
type: "function",
|
|
17084
17703
|
function: {
|
|
@@ -17137,6 +17756,8 @@ var ToolCatalogPluginNextGen = class extends BasePluginNextGen {
|
|
|
17137
17756
|
name = "tool_catalog";
|
|
17138
17757
|
/** category name → array of tool names that were loaded */
|
|
17139
17758
|
_loadedCategories = /* @__PURE__ */ new Map();
|
|
17759
|
+
/** Categories that cannot be unloaded */
|
|
17760
|
+
_pinnedCategories = /* @__PURE__ */ new Set();
|
|
17140
17761
|
/** Reference to the ToolManager for registering/disabling tools */
|
|
17141
17762
|
_toolManager = null;
|
|
17142
17763
|
/** Cached connector categories — discovered once in setToolManager() */
|
|
@@ -17152,12 +17773,17 @@ var ToolCatalogPluginNextGen = class extends BasePluginNextGen {
|
|
|
17152
17773
|
maxLoadedCategories: DEFAULT_MAX_LOADED,
|
|
17153
17774
|
...config
|
|
17154
17775
|
};
|
|
17776
|
+
if (this._config.pinned?.length) {
|
|
17777
|
+
for (const cat of this._config.pinned) {
|
|
17778
|
+
this._pinnedCategories.add(cat);
|
|
17779
|
+
}
|
|
17780
|
+
}
|
|
17155
17781
|
}
|
|
17156
17782
|
// ========================================================================
|
|
17157
17783
|
// Plugin Interface
|
|
17158
17784
|
// ========================================================================
|
|
17159
17785
|
getInstructions() {
|
|
17160
|
-
return
|
|
17786
|
+
return this.buildInstructions();
|
|
17161
17787
|
}
|
|
17162
17788
|
async getContent() {
|
|
17163
17789
|
const categories = this.getAllowedCategories();
|
|
@@ -17168,15 +17794,15 @@ var ToolCatalogPluginNextGen = class extends BasePluginNextGen {
|
|
|
17168
17794
|
if (loaded.length > 0) {
|
|
17169
17795
|
lines.push(`**Loaded:** ${loaded.join(", ")}`);
|
|
17170
17796
|
}
|
|
17171
|
-
lines.push(`**Available categories:** ${categories.length}`);
|
|
17797
|
+
lines.push(`**Available categories:** ${categories.length + this.getConnectorCategories().length}`);
|
|
17172
17798
|
for (const cat of categories) {
|
|
17173
17799
|
const tools = ToolCatalogRegistry.getToolsInCategory(cat.name);
|
|
17174
|
-
const
|
|
17175
|
-
lines.push(`- **${cat.displayName}** (${tools.length} tools)${
|
|
17800
|
+
const markers = this.getCategoryMarkers(cat.name);
|
|
17801
|
+
lines.push(`- **${cat.displayName}** (${tools.length} tools)${markers}: ${cat.description}`);
|
|
17176
17802
|
}
|
|
17177
17803
|
for (const cc of this.getConnectorCategories()) {
|
|
17178
|
-
const
|
|
17179
|
-
lines.push(`- **${cc.displayName}** (${cc.toolCount} tools)${
|
|
17804
|
+
const markers = this.getCategoryMarkers(cc.name);
|
|
17805
|
+
lines.push(`- **${cc.displayName}** (${cc.toolCount} tools)${markers}: ${cc.description}`);
|
|
17180
17806
|
}
|
|
17181
17807
|
const content = lines.join("\n");
|
|
17182
17808
|
this.updateTokenCache(this.estimator.estimateTokens(content));
|
|
@@ -17187,7 +17813,8 @@ var ToolCatalogPluginNextGen = class extends BasePluginNextGen {
|
|
|
17187
17813
|
loadedCategories: Array.from(this._loadedCategories.entries()).map(([name, tools]) => ({
|
|
17188
17814
|
category: name,
|
|
17189
17815
|
toolCount: tools.length,
|
|
17190
|
-
tools
|
|
17816
|
+
tools,
|
|
17817
|
+
pinned: this._pinnedCategories.has(name)
|
|
17191
17818
|
}))
|
|
17192
17819
|
};
|
|
17193
17820
|
}
|
|
@@ -17214,11 +17841,14 @@ var ToolCatalogPluginNextGen = class extends BasePluginNextGen {
|
|
|
17214
17841
|
return [searchTool, loadTool, unloadTool];
|
|
17215
17842
|
}
|
|
17216
17843
|
isCompactable() {
|
|
17217
|
-
|
|
17844
|
+
for (const category of this._loadedCategories.keys()) {
|
|
17845
|
+
if (!this._pinnedCategories.has(category)) return true;
|
|
17846
|
+
}
|
|
17847
|
+
return false;
|
|
17218
17848
|
}
|
|
17219
17849
|
async compact(targetTokensToFree) {
|
|
17220
17850
|
if (!this._toolManager || this._loadedCategories.size === 0) return 0;
|
|
17221
|
-
const categoriesByLastUsed = this.getCategoriesSortedByLastUsed();
|
|
17851
|
+
const categoriesByLastUsed = this.getCategoriesSortedByLastUsed().filter((cat) => !this._pinnedCategories.has(cat));
|
|
17222
17852
|
let freed = 0;
|
|
17223
17853
|
for (const category of categoriesByLastUsed) {
|
|
17224
17854
|
if (freed >= targetTokensToFree) break;
|
|
@@ -17259,6 +17889,7 @@ var ToolCatalogPluginNextGen = class extends BasePluginNextGen {
|
|
|
17259
17889
|
}
|
|
17260
17890
|
destroy() {
|
|
17261
17891
|
this._loadedCategories.clear();
|
|
17892
|
+
this._pinnedCategories.clear();
|
|
17262
17893
|
this._toolManager = null;
|
|
17263
17894
|
this._connectorCategories = null;
|
|
17264
17895
|
this._destroyed = true;
|
|
@@ -17272,11 +17903,20 @@ var ToolCatalogPluginNextGen = class extends BasePluginNextGen {
|
|
|
17272
17903
|
setToolManager(tm) {
|
|
17273
17904
|
this._toolManager = tm;
|
|
17274
17905
|
this._connectorCategories = ToolCatalogRegistry.discoverConnectorCategories({
|
|
17275
|
-
scope: this._config.categoryScope,
|
|
17276
17906
|
identities: this._config.identities
|
|
17277
17907
|
});
|
|
17908
|
+
for (const category of this._pinnedCategories) {
|
|
17909
|
+
const result = this.executeLoad(category);
|
|
17910
|
+
if (result.error) {
|
|
17911
|
+
exports.logger.warn(
|
|
17912
|
+
{ category, error: result.error },
|
|
17913
|
+
`[ToolCatalogPlugin] Failed to load pinned category '${category}'`
|
|
17914
|
+
);
|
|
17915
|
+
}
|
|
17916
|
+
}
|
|
17278
17917
|
if (this._config.autoLoadCategories?.length) {
|
|
17279
17918
|
for (const category of this._config.autoLoadCategories) {
|
|
17919
|
+
if (this._pinnedCategories.has(category)) continue;
|
|
17280
17920
|
const result = this.executeLoad(category);
|
|
17281
17921
|
if (result.error) {
|
|
17282
17922
|
exports.logger.warn(
|
|
@@ -17291,6 +17931,10 @@ var ToolCatalogPluginNextGen = class extends BasePluginNextGen {
|
|
|
17291
17931
|
get loadedCategories() {
|
|
17292
17932
|
return Array.from(this._loadedCategories.keys());
|
|
17293
17933
|
}
|
|
17934
|
+
/** Get set of pinned category names */
|
|
17935
|
+
get pinnedCategories() {
|
|
17936
|
+
return this._pinnedCategories;
|
|
17937
|
+
}
|
|
17294
17938
|
// ========================================================================
|
|
17295
17939
|
// Metatool Implementations
|
|
17296
17940
|
// ========================================================================
|
|
@@ -17311,6 +17955,7 @@ var ToolCatalogPluginNextGen = class extends BasePluginNextGen {
|
|
|
17311
17955
|
return {
|
|
17312
17956
|
category,
|
|
17313
17957
|
loaded,
|
|
17958
|
+
pinned: this._pinnedCategories.has(category),
|
|
17314
17959
|
tools: tools.map((t) => ({
|
|
17315
17960
|
name: t.name,
|
|
17316
17961
|
displayName: t.displayName,
|
|
@@ -17332,7 +17977,8 @@ var ToolCatalogPluginNextGen = class extends BasePluginNextGen {
|
|
|
17332
17977
|
displayName: cat.displayName,
|
|
17333
17978
|
description: cat.description,
|
|
17334
17979
|
toolCount: tools.length,
|
|
17335
|
-
loaded: this._loadedCategories.has(cat.name)
|
|
17980
|
+
loaded: this._loadedCategories.has(cat.name),
|
|
17981
|
+
pinned: this._pinnedCategories.has(cat.name)
|
|
17336
17982
|
});
|
|
17337
17983
|
}
|
|
17338
17984
|
for (const cc of connectorCats) {
|
|
@@ -17341,7 +17987,8 @@ var ToolCatalogPluginNextGen = class extends BasePluginNextGen {
|
|
|
17341
17987
|
displayName: cc.displayName,
|
|
17342
17988
|
description: cc.description,
|
|
17343
17989
|
toolCount: cc.toolCount,
|
|
17344
|
-
loaded: this._loadedCategories.has(cc.name)
|
|
17990
|
+
loaded: this._loadedCategories.has(cc.name),
|
|
17991
|
+
pinned: this._pinnedCategories.has(cc.name)
|
|
17345
17992
|
});
|
|
17346
17993
|
}
|
|
17347
17994
|
return { categories: result };
|
|
@@ -17351,20 +17998,28 @@ var ToolCatalogPluginNextGen = class extends BasePluginNextGen {
|
|
|
17351
17998
|
if (!this._toolManager) {
|
|
17352
17999
|
return { error: "ToolManager not connected. Plugin not properly initialized." };
|
|
17353
18000
|
}
|
|
17354
|
-
|
|
17355
|
-
|
|
18001
|
+
const isConnector = ToolCatalogRegistry.parseConnectorCategory(category) !== null;
|
|
18002
|
+
if (isConnector) {
|
|
18003
|
+
const allowed = this.getConnectorCategories().some((cc) => cc.name === category);
|
|
18004
|
+
if (!allowed) {
|
|
18005
|
+
return { error: `Category '${category}' is not available for this agent.` };
|
|
18006
|
+
}
|
|
18007
|
+
} else {
|
|
18008
|
+
if (!ToolCatalogRegistry.isCategoryAllowed(category, this._config.categoryScope)) {
|
|
18009
|
+
return { error: `Category '${category}' is not available for this agent.` };
|
|
18010
|
+
}
|
|
17356
18011
|
}
|
|
17357
18012
|
if (this._loadedCategories.has(category)) {
|
|
17358
18013
|
const toolNames2 = this._loadedCategories.get(category);
|
|
17359
18014
|
return { loaded: toolNames2.length, tools: toolNames2, alreadyLoaded: true };
|
|
17360
18015
|
}
|
|
17361
|
-
|
|
18016
|
+
const nonPinnedLoaded = this._loadedCategories.size - this._pinnedCategories.size;
|
|
18017
|
+
if (!this._pinnedCategories.has(category) && nonPinnedLoaded >= this._config.maxLoadedCategories) {
|
|
17362
18018
|
return {
|
|
17363
18019
|
error: `Maximum loaded categories (${this._config.maxLoadedCategories}) reached. Unload a category first.`,
|
|
17364
18020
|
loaded: Array.from(this._loadedCategories.keys())
|
|
17365
18021
|
};
|
|
17366
18022
|
}
|
|
17367
|
-
const isConnector = ToolCatalogRegistry.parseConnectorCategory(category) !== null;
|
|
17368
18023
|
let tools;
|
|
17369
18024
|
if (isConnector) {
|
|
17370
18025
|
tools = ToolCatalogRegistry.resolveConnectorCategoryTools(category);
|
|
@@ -17401,6 +18056,9 @@ var ToolCatalogPluginNextGen = class extends BasePluginNextGen {
|
|
|
17401
18056
|
if (!this._toolManager) {
|
|
17402
18057
|
return { error: "ToolManager not connected." };
|
|
17403
18058
|
}
|
|
18059
|
+
if (this._pinnedCategories.has(category)) {
|
|
18060
|
+
return { error: `Category '${category}' is pinned and cannot be unloaded.` };
|
|
18061
|
+
}
|
|
17404
18062
|
const toolNames = this._loadedCategories.get(category);
|
|
17405
18063
|
if (!toolNames) {
|
|
17406
18064
|
return { unloaded: 0, message: `Category '${category}' is not loaded.` };
|
|
@@ -17426,6 +18084,61 @@ var ToolCatalogPluginNextGen = class extends BasePluginNextGen {
|
|
|
17426
18084
|
getConnectorCategories() {
|
|
17427
18085
|
return this._connectorCategories ?? [];
|
|
17428
18086
|
}
|
|
18087
|
+
/**
|
|
18088
|
+
* Build status markers for a category (e.g., " [PINNED]", " [LOADED]", " [PINNED] [LOADED]")
|
|
18089
|
+
*/
|
|
18090
|
+
getCategoryMarkers(name) {
|
|
18091
|
+
const parts = [];
|
|
18092
|
+
if (this._pinnedCategories.has(name)) parts.push("[PINNED]");
|
|
18093
|
+
if (this._loadedCategories.has(name)) parts.push("[LOADED]");
|
|
18094
|
+
return parts.length > 0 ? " " + parts.join(" ") : "";
|
|
18095
|
+
}
|
|
18096
|
+
/**
|
|
18097
|
+
* Build dynamic instructions that include the list of available categories.
|
|
18098
|
+
*/
|
|
18099
|
+
buildInstructions() {
|
|
18100
|
+
const lines = [];
|
|
18101
|
+
lines.push("## Tool Catalog");
|
|
18102
|
+
lines.push("");
|
|
18103
|
+
lines.push("Your core tools (memory, context, instructions, etc.) are always available.");
|
|
18104
|
+
lines.push("Additional tool categories can be loaded on demand from the catalog below.");
|
|
18105
|
+
lines.push("");
|
|
18106
|
+
lines.push("**tool_catalog_search** \u2014 Browse available tool categories and search for specific tools.");
|
|
18107
|
+
lines.push(" - No params \u2192 list all available categories with descriptions");
|
|
18108
|
+
lines.push(" - `category` \u2192 list tools in that category");
|
|
18109
|
+
lines.push(" - `query` \u2192 keyword search across categories and tools");
|
|
18110
|
+
lines.push("");
|
|
18111
|
+
lines.push("**tool_catalog_load** \u2014 Load a category's tools so you can use them.");
|
|
18112
|
+
lines.push(" - Tools become available immediately after loading.");
|
|
18113
|
+
lines.push(" - If you need tools from a category, load it first.");
|
|
18114
|
+
lines.push("");
|
|
18115
|
+
lines.push("**tool_catalog_unload** \u2014 Unload a category to free token budget.");
|
|
18116
|
+
lines.push(" - Unloaded tools are no longer sent to you.");
|
|
18117
|
+
lines.push(" - Use when you're done with a category.");
|
|
18118
|
+
lines.push(" - Pinned categories cannot be unloaded.");
|
|
18119
|
+
lines.push("");
|
|
18120
|
+
const builtIn = this.getAllowedCategories();
|
|
18121
|
+
const connectors = this.getConnectorCategories();
|
|
18122
|
+
if (builtIn.length > 0 || connectors.length > 0) {
|
|
18123
|
+
lines.push("**Available categories:**");
|
|
18124
|
+
for (const cat of builtIn) {
|
|
18125
|
+
const tools = ToolCatalogRegistry.getToolsInCategory(cat.name);
|
|
18126
|
+
const pinned = this._pinnedCategories.has(cat.name) ? " [PINNED]" : "";
|
|
18127
|
+
lines.push(`- ${cat.name} (${tools.length} tools)${pinned}: ${cat.description}`);
|
|
18128
|
+
}
|
|
18129
|
+
for (const cc of connectors) {
|
|
18130
|
+
const pinned = this._pinnedCategories.has(cc.name) ? " [PINNED]" : "";
|
|
18131
|
+
lines.push(`- ${cc.name} (${cc.toolCount} tools)${pinned}: ${cc.description}`);
|
|
18132
|
+
}
|
|
18133
|
+
lines.push("");
|
|
18134
|
+
}
|
|
18135
|
+
lines.push("**Best practices:**");
|
|
18136
|
+
lines.push("- Search first to find the right category before loading.");
|
|
18137
|
+
lines.push("- Unload categories you no longer need to keep context lean.");
|
|
18138
|
+
lines.push("- Categories marked [LOADED] are already available.");
|
|
18139
|
+
lines.push("- Categories marked [PINNED] are always available and cannot be unloaded.");
|
|
18140
|
+
return lines.join("\n");
|
|
18141
|
+
}
|
|
17429
18142
|
keywordSearch(query) {
|
|
17430
18143
|
const lq = query.toLowerCase();
|
|
17431
18144
|
const results = [];
|
|
@@ -17463,12 +18176,17 @@ var ToolCatalogPluginNextGen = class extends BasePluginNextGen {
|
|
|
17463
18176
|
return { query, results, totalMatches: results.length };
|
|
17464
18177
|
}
|
|
17465
18178
|
searchConnectorCategory(category) {
|
|
18179
|
+
const allowed = this.getConnectorCategories().some((cc) => cc.name === category);
|
|
18180
|
+
if (!allowed) {
|
|
18181
|
+
return { error: `Category '${category}' is not available for this agent.` };
|
|
18182
|
+
}
|
|
17466
18183
|
const connectorName = ToolCatalogRegistry.parseConnectorCategory(category);
|
|
17467
18184
|
const tools = ToolCatalogRegistry.resolveConnectorCategoryTools(category);
|
|
17468
18185
|
const loaded = this._loadedCategories.has(category);
|
|
17469
18186
|
return {
|
|
17470
18187
|
category,
|
|
17471
18188
|
loaded,
|
|
18189
|
+
pinned: this._pinnedCategories.has(category),
|
|
17472
18190
|
connectorName,
|
|
17473
18191
|
tools: tools.map((t) => ({
|
|
17474
18192
|
name: t.name,
|
|
@@ -20217,6 +20935,9 @@ var StreamEventType = /* @__PURE__ */ ((StreamEventType2) => {
|
|
|
20217
20935
|
StreamEventType2["REASONING_DONE"] = "response.reasoning.done";
|
|
20218
20936
|
StreamEventType2["RESPONSE_COMPLETE"] = "response.complete";
|
|
20219
20937
|
StreamEventType2["ERROR"] = "response.error";
|
|
20938
|
+
StreamEventType2["AUDIO_CHUNK_READY"] = "response.audio_chunk.ready";
|
|
20939
|
+
StreamEventType2["AUDIO_CHUNK_ERROR"] = "response.audio_chunk.error";
|
|
20940
|
+
StreamEventType2["AUDIO_STREAM_COMPLETE"] = "response.audio_stream.complete";
|
|
20220
20941
|
return StreamEventType2;
|
|
20221
20942
|
})(StreamEventType || {});
|
|
20222
20943
|
function isStreamEvent(event, type) {
|
|
@@ -20246,6 +20967,15 @@ function isResponseComplete(event) {
|
|
|
20246
20967
|
function isErrorEvent(event) {
|
|
20247
20968
|
return event.type === "response.error" /* ERROR */;
|
|
20248
20969
|
}
|
|
20970
|
+
function isAudioChunkReady(event) {
|
|
20971
|
+
return event.type === "response.audio_chunk.ready" /* AUDIO_CHUNK_READY */;
|
|
20972
|
+
}
|
|
20973
|
+
function isAudioChunkError(event) {
|
|
20974
|
+
return event.type === "response.audio_chunk.error" /* AUDIO_CHUNK_ERROR */;
|
|
20975
|
+
}
|
|
20976
|
+
function isAudioStreamComplete(event) {
|
|
20977
|
+
return event.type === "response.audio_stream.complete" /* AUDIO_STREAM_COMPLETE */;
|
|
20978
|
+
}
|
|
20249
20979
|
|
|
20250
20980
|
// src/infrastructure/providers/openai/OpenAIResponsesStreamConverter.ts
|
|
20251
20981
|
var OpenAIResponsesStreamConverter = class {
|
|
@@ -20555,9 +21285,17 @@ var OpenAITextProvider = class extends BaseTextProvider {
|
|
|
20555
21285
|
...options.metadata && { metadata: options.metadata }
|
|
20556
21286
|
};
|
|
20557
21287
|
this.applyReasoningConfig(params, options);
|
|
21288
|
+
console.log(
|
|
21289
|
+
`[OpenAITextProvider] generate: calling OpenAI API (model=${options.model}, tools=${params.tools?.length ?? 0})`
|
|
21290
|
+
);
|
|
21291
|
+
const genStartTime = Date.now();
|
|
20558
21292
|
const response = await this.client.responses.create(params);
|
|
21293
|
+
console.log(
|
|
21294
|
+
`[OpenAITextProvider] generate: response received (${Date.now() - genStartTime}ms)`
|
|
21295
|
+
);
|
|
20559
21296
|
return this.converter.convertResponse(response);
|
|
20560
21297
|
} catch (error) {
|
|
21298
|
+
console.error(`[OpenAITextProvider] generate error (model=${options.model}):`, error.message || error);
|
|
20561
21299
|
this.handleError(error, options.model);
|
|
20562
21300
|
throw error;
|
|
20563
21301
|
}
|
|
@@ -20597,9 +21335,27 @@ var OpenAITextProvider = class extends BaseTextProvider {
|
|
|
20597
21335
|
stream: true
|
|
20598
21336
|
};
|
|
20599
21337
|
this.applyReasoningConfig(params, options);
|
|
21338
|
+
console.log(
|
|
21339
|
+
`[OpenAITextProvider] streamGenerate: calling OpenAI API (model=${options.model}, tools=${params.tools?.length ?? 0})`
|
|
21340
|
+
);
|
|
21341
|
+
const streamStartTime = Date.now();
|
|
20600
21342
|
const stream = await this.client.responses.create(params);
|
|
20601
|
-
|
|
21343
|
+
console.log(
|
|
21344
|
+
`[OpenAITextProvider] streamGenerate: OpenAI stream opened (${Date.now() - streamStartTime}ms)`
|
|
21345
|
+
);
|
|
21346
|
+
let chunkCount = 0;
|
|
21347
|
+
for await (const event of this.streamConverter.convertStream(stream)) {
|
|
21348
|
+
chunkCount++;
|
|
21349
|
+
yield event;
|
|
21350
|
+
}
|
|
21351
|
+
console.log(
|
|
21352
|
+
`[OpenAITextProvider] streamGenerate: stream complete (${chunkCount} events, ${Date.now() - streamStartTime}ms total)`
|
|
21353
|
+
);
|
|
20602
21354
|
} catch (error) {
|
|
21355
|
+
console.error(
|
|
21356
|
+
`[OpenAITextProvider] streamGenerate error (model=${options.model}):`,
|
|
21357
|
+
error.message || error
|
|
21358
|
+
);
|
|
20603
21359
|
this.handleError(error, options.model);
|
|
20604
21360
|
throw error;
|
|
20605
21361
|
}
|
|
@@ -21605,12 +22361,20 @@ var AnthropicTextProvider = class extends BaseTextProvider {
|
|
|
21605
22361
|
return this.executeWithCircuitBreaker(async () => {
|
|
21606
22362
|
try {
|
|
21607
22363
|
const anthropicRequest = this.converter.convertRequest(options);
|
|
22364
|
+
console.log(
|
|
22365
|
+
`[AnthropicTextProvider] generate: calling Anthropic API (model=${options.model}, messages=${anthropicRequest.messages?.length ?? 0}, tools=${anthropicRequest.tools?.length ?? 0})`
|
|
22366
|
+
);
|
|
22367
|
+
const genStartTime = Date.now();
|
|
21608
22368
|
const anthropicResponse = await this.client.messages.create({
|
|
21609
22369
|
...anthropicRequest,
|
|
21610
22370
|
stream: false
|
|
21611
22371
|
});
|
|
22372
|
+
console.log(
|
|
22373
|
+
`[AnthropicTextProvider] generate: response received (${Date.now() - genStartTime}ms)`
|
|
22374
|
+
);
|
|
21612
22375
|
return this.converter.convertResponse(anthropicResponse);
|
|
21613
22376
|
} catch (error) {
|
|
22377
|
+
console.error(`[AnthropicTextProvider] generate error (model=${options.model}):`, error.message || error);
|
|
21614
22378
|
this.handleError(error, options.model);
|
|
21615
22379
|
throw error;
|
|
21616
22380
|
}
|
|
@@ -21622,13 +22386,31 @@ var AnthropicTextProvider = class extends BaseTextProvider {
|
|
|
21622
22386
|
async *streamGenerate(options) {
|
|
21623
22387
|
try {
|
|
21624
22388
|
const anthropicRequest = this.converter.convertRequest(options);
|
|
22389
|
+
console.log(
|
|
22390
|
+
`[AnthropicTextProvider] streamGenerate: calling Anthropic API (model=${options.model}, messages=${anthropicRequest.messages?.length ?? 0}, tools=${anthropicRequest.tools?.length ?? 0})`
|
|
22391
|
+
);
|
|
22392
|
+
const streamStartTime = Date.now();
|
|
21625
22393
|
const stream = await this.client.messages.create({
|
|
21626
22394
|
...anthropicRequest,
|
|
21627
22395
|
stream: true
|
|
21628
22396
|
});
|
|
22397
|
+
console.log(
|
|
22398
|
+
`[AnthropicTextProvider] streamGenerate: Anthropic stream opened (${Date.now() - streamStartTime}ms)`
|
|
22399
|
+
);
|
|
21629
22400
|
this.streamConverter.reset();
|
|
21630
|
-
|
|
22401
|
+
let chunkCount = 0;
|
|
22402
|
+
for await (const event of this.streamConverter.convertStream(stream, options.model)) {
|
|
22403
|
+
chunkCount++;
|
|
22404
|
+
yield event;
|
|
22405
|
+
}
|
|
22406
|
+
console.log(
|
|
22407
|
+
`[AnthropicTextProvider] streamGenerate: stream complete (${chunkCount} events, ${Date.now() - streamStartTime}ms total)`
|
|
22408
|
+
);
|
|
21631
22409
|
} catch (error) {
|
|
22410
|
+
console.error(
|
|
22411
|
+
`[AnthropicTextProvider] streamGenerate error (model=${options.model}):`,
|
|
22412
|
+
error.message || error
|
|
22413
|
+
);
|
|
21632
22414
|
this.handleError(error, options.model);
|
|
21633
22415
|
throw error;
|
|
21634
22416
|
} finally {
|
|
@@ -22422,6 +23204,10 @@ var GoogleTextProvider = class extends BaseTextProvider {
|
|
|
22422
23204
|
// First message only
|
|
22423
23205
|
}, null, 2));
|
|
22424
23206
|
}
|
|
23207
|
+
console.log(
|
|
23208
|
+
`[GoogleTextProvider] generate: calling Google API (model=${options.model}, contents=${googleRequest.contents?.length ?? 0} messages, tools=${googleRequest.tools?.[0]?.functionDeclarations?.length ?? 0} tools)`
|
|
23209
|
+
);
|
|
23210
|
+
const genStartTime = Date.now();
|
|
22425
23211
|
const result = await this.client.models.generateContent({
|
|
22426
23212
|
model: options.model,
|
|
22427
23213
|
contents: googleRequest.contents,
|
|
@@ -22432,6 +23218,9 @@ var GoogleTextProvider = class extends BaseTextProvider {
|
|
|
22432
23218
|
...googleRequest.generationConfig
|
|
22433
23219
|
}
|
|
22434
23220
|
});
|
|
23221
|
+
console.log(
|
|
23222
|
+
`[GoogleTextProvider] generate: response received (${Date.now() - genStartTime}ms)`
|
|
23223
|
+
);
|
|
22435
23224
|
if (process.env.DEBUG_GOOGLE) {
|
|
22436
23225
|
console.error("[DEBUG] Google Response:", JSON.stringify({
|
|
22437
23226
|
candidates: result.candidates?.map((c) => ({
|
|
@@ -22450,6 +23239,7 @@ var GoogleTextProvider = class extends BaseTextProvider {
|
|
|
22450
23239
|
}
|
|
22451
23240
|
return response;
|
|
22452
23241
|
} catch (error) {
|
|
23242
|
+
console.error(`[GoogleTextProvider] generate error (model=${options.model}):`, error.message || error);
|
|
22453
23243
|
this.converter.clearMappings();
|
|
22454
23244
|
this.handleError(error, options.model);
|
|
22455
23245
|
throw error;
|
|
@@ -22462,6 +23252,10 @@ var GoogleTextProvider = class extends BaseTextProvider {
|
|
|
22462
23252
|
async *streamGenerate(options) {
|
|
22463
23253
|
try {
|
|
22464
23254
|
const googleRequest = await this.converter.convertRequest(options);
|
|
23255
|
+
console.log(
|
|
23256
|
+
`[GoogleTextProvider] streamGenerate: calling Google API (model=${options.model}, contents=${googleRequest.contents?.length ?? 0} messages, tools=${googleRequest.tools?.[0]?.functionDeclarations?.length ?? 0} tools)`
|
|
23257
|
+
);
|
|
23258
|
+
const streamStartTime = Date.now();
|
|
22465
23259
|
const stream = await this.client.models.generateContentStream({
|
|
22466
23260
|
model: options.model,
|
|
22467
23261
|
contents: googleRequest.contents,
|
|
@@ -22472,13 +23266,27 @@ var GoogleTextProvider = class extends BaseTextProvider {
|
|
|
22472
23266
|
...googleRequest.generationConfig
|
|
22473
23267
|
}
|
|
22474
23268
|
});
|
|
23269
|
+
console.log(
|
|
23270
|
+
`[GoogleTextProvider] streamGenerate: Google stream opened (${Date.now() - streamStartTime}ms)`
|
|
23271
|
+
);
|
|
22475
23272
|
this.streamConverter.reset();
|
|
22476
|
-
|
|
23273
|
+
let chunkCount = 0;
|
|
23274
|
+
for await (const event of this.streamConverter.convertStream(stream, options.model)) {
|
|
23275
|
+
chunkCount++;
|
|
23276
|
+
yield event;
|
|
23277
|
+
}
|
|
23278
|
+
console.log(
|
|
23279
|
+
`[GoogleTextProvider] streamGenerate: stream complete (${chunkCount} events, ${Date.now() - streamStartTime}ms total)`
|
|
23280
|
+
);
|
|
22477
23281
|
if (!this.streamConverter.hasToolCalls()) {
|
|
22478
23282
|
this.converter.clearMappings();
|
|
22479
23283
|
this.streamConverter.clear();
|
|
22480
23284
|
}
|
|
22481
23285
|
} catch (error) {
|
|
23286
|
+
console.error(
|
|
23287
|
+
`[GoogleTextProvider] streamGenerate error (model=${options.model}):`,
|
|
23288
|
+
error.message || error
|
|
23289
|
+
);
|
|
22482
23290
|
this.converter.clearMappings();
|
|
22483
23291
|
this.streamConverter.clear();
|
|
22484
23292
|
this.handleError(error, options.model);
|
|
@@ -22565,6 +23373,10 @@ var VertexAITextProvider = class extends BaseTextProvider {
|
|
|
22565
23373
|
async generate(options) {
|
|
22566
23374
|
try {
|
|
22567
23375
|
const googleRequest = await this.converter.convertRequest(options);
|
|
23376
|
+
console.log(
|
|
23377
|
+
`[VertexAITextProvider] generate: calling Vertex AI (model=${options.model}, contents=${googleRequest.contents?.length ?? 0} messages, tools=${googleRequest.tools?.[0]?.functionDeclarations?.length ?? 0} tools)`
|
|
23378
|
+
);
|
|
23379
|
+
const genStartTime = Date.now();
|
|
22568
23380
|
const result = await this.client.models.generateContent({
|
|
22569
23381
|
model: options.model,
|
|
22570
23382
|
contents: googleRequest.contents,
|
|
@@ -22575,8 +23387,12 @@ var VertexAITextProvider = class extends BaseTextProvider {
|
|
|
22575
23387
|
...googleRequest.generationConfig
|
|
22576
23388
|
}
|
|
22577
23389
|
});
|
|
23390
|
+
console.log(
|
|
23391
|
+
`[VertexAITextProvider] generate: response received (${Date.now() - genStartTime}ms)`
|
|
23392
|
+
);
|
|
22578
23393
|
return this.converter.convertResponse(result);
|
|
22579
23394
|
} catch (error) {
|
|
23395
|
+
console.error(`[VertexAITextProvider] generate error (model=${options.model}):`, error.message || error);
|
|
22580
23396
|
this.handleError(error, options.model);
|
|
22581
23397
|
throw error;
|
|
22582
23398
|
}
|
|
@@ -22587,6 +23403,10 @@ var VertexAITextProvider = class extends BaseTextProvider {
|
|
|
22587
23403
|
async *streamGenerate(options) {
|
|
22588
23404
|
try {
|
|
22589
23405
|
const googleRequest = await this.converter.convertRequest(options);
|
|
23406
|
+
console.log(
|
|
23407
|
+
`[VertexAITextProvider] streamGenerate: calling Vertex AI (model=${options.model}, contents=${googleRequest.contents?.length ?? 0} messages, tools=${googleRequest.tools?.[0]?.functionDeclarations?.length ?? 0} tools)`
|
|
23408
|
+
);
|
|
23409
|
+
const streamStartTime = Date.now();
|
|
22590
23410
|
const stream = await this.client.models.generateContentStream({
|
|
22591
23411
|
model: options.model,
|
|
22592
23412
|
contents: googleRequest.contents,
|
|
@@ -22597,9 +23417,23 @@ var VertexAITextProvider = class extends BaseTextProvider {
|
|
|
22597
23417
|
...googleRequest.generationConfig
|
|
22598
23418
|
}
|
|
22599
23419
|
});
|
|
23420
|
+
console.log(
|
|
23421
|
+
`[VertexAITextProvider] streamGenerate: Vertex AI stream opened (${Date.now() - streamStartTime}ms)`
|
|
23422
|
+
);
|
|
22600
23423
|
const streamConverter = new GoogleStreamConverter();
|
|
22601
|
-
|
|
23424
|
+
let chunkCount = 0;
|
|
23425
|
+
for await (const event of streamConverter.convertStream(stream, options.model)) {
|
|
23426
|
+
chunkCount++;
|
|
23427
|
+
yield event;
|
|
23428
|
+
}
|
|
23429
|
+
console.log(
|
|
23430
|
+
`[VertexAITextProvider] streamGenerate: stream complete (${chunkCount} events, ${Date.now() - streamStartTime}ms total)`
|
|
23431
|
+
);
|
|
22602
23432
|
} catch (error) {
|
|
23433
|
+
console.error(
|
|
23434
|
+
`[VertexAITextProvider] streamGenerate error (model=${options.model}):`,
|
|
23435
|
+
error.message || error
|
|
23436
|
+
);
|
|
22603
23437
|
this.handleError(error, options.model);
|
|
22604
23438
|
throw error;
|
|
22605
23439
|
}
|
|
@@ -24540,6 +25374,20 @@ var Agent = class _Agent extends BaseAgent {
|
|
|
24540
25374
|
timestamp: /* @__PURE__ */ new Date(),
|
|
24541
25375
|
duration: totalDuration
|
|
24542
25376
|
});
|
|
25377
|
+
const hasTextOutput = response.output_text?.trim() || response.output?.some(
|
|
25378
|
+
(item) => "content" in item && Array.isArray(item.content) && item.content.some((c) => c.type === "output_text" /* OUTPUT_TEXT */ && c.text?.trim())
|
|
25379
|
+
);
|
|
25380
|
+
if (!hasTextOutput) {
|
|
25381
|
+
console.warn(
|
|
25382
|
+
`[Agent] WARNING: ${methodName} completed with zero text output (executionId=${executionId}, iterations=${this.executionContext?.metrics.iterationCount ?? "?"}, tokens=${response.usage?.total_tokens ?? 0})`
|
|
25383
|
+
);
|
|
25384
|
+
this.emit("execution:empty_output", {
|
|
25385
|
+
executionId,
|
|
25386
|
+
timestamp: /* @__PURE__ */ new Date(),
|
|
25387
|
+
duration: totalDuration,
|
|
25388
|
+
usage: response.usage
|
|
25389
|
+
});
|
|
25390
|
+
}
|
|
24543
25391
|
const duration = Date.now() - startTime;
|
|
24544
25392
|
this._logger.info({ duration }, `Agent ${methodName} completed`);
|
|
24545
25393
|
exports.metrics.timing(`agent.${methodName}.duration`, duration, { model: this.model, connector: this.connector.name });
|
|
@@ -24594,6 +25442,17 @@ var Agent = class _Agent extends BaseAgent {
|
|
|
24594
25442
|
}
|
|
24595
25443
|
const iterationStartTime = Date.now();
|
|
24596
25444
|
const prepared = await this._agentContext.prepare();
|
|
25445
|
+
const b1 = prepared.budget;
|
|
25446
|
+
const bd1 = b1.breakdown;
|
|
25447
|
+
const bp1 = [
|
|
25448
|
+
`sysPrompt=${bd1.systemPrompt}`,
|
|
25449
|
+
`PI=${bd1.persistentInstructions}`,
|
|
25450
|
+
bd1.pluginInstructions ? `pluginInstr=${bd1.pluginInstructions}` : "",
|
|
25451
|
+
...Object.entries(bd1.pluginContents || {}).map(([k, v]) => `plugin:${k}=${v}`)
|
|
25452
|
+
].filter(Boolean).join(" ");
|
|
25453
|
+
console.log(
|
|
25454
|
+
`[Agent] [Context] iteration=${iteration} tokens: ${b1.totalUsed}/${b1.maxTokens} (${b1.utilizationPercent.toFixed(1)}%) tools=${b1.toolsTokens} conversation=${b1.conversationTokens} system=${b1.systemMessageTokens} input=${b1.currentInputTokens}` + (bp1 ? ` | ${bp1}` : "") + (prepared.compacted ? ` COMPACTED: ${prepared.compactionLog.join("; ")}` : "")
|
|
25455
|
+
);
|
|
24597
25456
|
const response = await this.generateWithHooks(prepared.input, iteration, executionId);
|
|
24598
25457
|
const toolCalls = this.extractToolCalls(response.output);
|
|
24599
25458
|
this._agentContext.addAssistantResponse(response.output);
|
|
@@ -24708,13 +25567,23 @@ var Agent = class _Agent extends BaseAgent {
|
|
|
24708
25567
|
* Build placeholder response for streaming finalization
|
|
24709
25568
|
*/
|
|
24710
25569
|
_buildPlaceholderResponse(executionId, startTime, streamState) {
|
|
25570
|
+
const outputText = streamState.getAllText();
|
|
25571
|
+
const output = [];
|
|
25572
|
+
if (outputText && outputText.trim()) {
|
|
25573
|
+
output.push({
|
|
25574
|
+
type: "message",
|
|
25575
|
+
role: "assistant" /* ASSISTANT */,
|
|
25576
|
+
content: [{ type: "output_text" /* OUTPUT_TEXT */, text: outputText }]
|
|
25577
|
+
});
|
|
25578
|
+
}
|
|
24711
25579
|
return {
|
|
24712
25580
|
id: executionId,
|
|
24713
25581
|
object: "response",
|
|
24714
25582
|
created_at: Math.floor(startTime / 1e3),
|
|
24715
25583
|
status: "completed",
|
|
24716
25584
|
model: this.model,
|
|
24717
|
-
output
|
|
25585
|
+
output,
|
|
25586
|
+
output_text: outputText || void 0,
|
|
24718
25587
|
usage: streamState.usage
|
|
24719
25588
|
};
|
|
24720
25589
|
}
|
|
@@ -24734,6 +25603,17 @@ var Agent = class _Agent extends BaseAgent {
|
|
|
24734
25603
|
break;
|
|
24735
25604
|
}
|
|
24736
25605
|
const prepared = await this._agentContext.prepare();
|
|
25606
|
+
const b2 = prepared.budget;
|
|
25607
|
+
const bd2 = b2.breakdown;
|
|
25608
|
+
const bp2 = [
|
|
25609
|
+
`sysPrompt=${bd2.systemPrompt}`,
|
|
25610
|
+
`PI=${bd2.persistentInstructions}`,
|
|
25611
|
+
bd2.pluginInstructions ? `pluginInstr=${bd2.pluginInstructions}` : "",
|
|
25612
|
+
...Object.entries(bd2.pluginContents || {}).map(([k, v]) => `plugin:${k}=${v}`)
|
|
25613
|
+
].filter(Boolean).join(" ");
|
|
25614
|
+
console.log(
|
|
25615
|
+
`[Agent] [Context] iteration=${iteration} tokens: ${b2.totalUsed}/${b2.maxTokens} (${b2.utilizationPercent.toFixed(1)}%) tools=${b2.toolsTokens} conversation=${b2.conversationTokens} system=${b2.systemMessageTokens} input=${b2.currentInputTokens}` + (bp2 ? ` | ${bp2}` : "") + (prepared.compacted ? ` COMPACTED: ${prepared.compactionLog.join("; ")}` : "")
|
|
25616
|
+
);
|
|
24737
25617
|
const iterationStreamState = new StreamState(executionId, this.model);
|
|
24738
25618
|
const toolCallsMap = /* @__PURE__ */ new Map();
|
|
24739
25619
|
yield* this.streamGenerateWithHooks(
|
|
@@ -33403,6 +34283,56 @@ var OpenAITTSProvider = class extends BaseMediaProvider {
|
|
|
33403
34283
|
{ model: options.model, voice: options.voice }
|
|
33404
34284
|
);
|
|
33405
34285
|
}
|
|
34286
|
+
/**
|
|
34287
|
+
* Check if streaming is supported for the given format
|
|
34288
|
+
*/
|
|
34289
|
+
supportsStreaming(format) {
|
|
34290
|
+
if (!format) return true;
|
|
34291
|
+
return ["pcm", "wav", "mp3", "opus", "aac", "flac"].includes(format);
|
|
34292
|
+
}
|
|
34293
|
+
/**
|
|
34294
|
+
* Stream TTS audio chunks as they arrive from the API
|
|
34295
|
+
*/
|
|
34296
|
+
async *synthesizeStream(options) {
|
|
34297
|
+
const format = this.mapFormat(options.format);
|
|
34298
|
+
const requestParams = {
|
|
34299
|
+
model: options.model,
|
|
34300
|
+
input: options.input,
|
|
34301
|
+
voice: options.voice,
|
|
34302
|
+
response_format: format,
|
|
34303
|
+
speed: options.speed
|
|
34304
|
+
};
|
|
34305
|
+
if (options.vendorOptions?.instructions) {
|
|
34306
|
+
requestParams.instructions = options.vendorOptions.instructions;
|
|
34307
|
+
}
|
|
34308
|
+
this.logOperationStart("tts.synthesizeStream", {
|
|
34309
|
+
model: options.model,
|
|
34310
|
+
voice: options.voice,
|
|
34311
|
+
inputLength: options.input.length,
|
|
34312
|
+
format
|
|
34313
|
+
});
|
|
34314
|
+
try {
|
|
34315
|
+
const response = await this.client.audio.speech.create(requestParams);
|
|
34316
|
+
const body = response.body;
|
|
34317
|
+
if (!body) {
|
|
34318
|
+
throw new Error("No response body from OpenAI TTS API");
|
|
34319
|
+
}
|
|
34320
|
+
let totalBytes = 0;
|
|
34321
|
+
for await (const chunk of body) {
|
|
34322
|
+
const buf = Buffer.isBuffer(chunk) ? chunk : Buffer.from(chunk);
|
|
34323
|
+
totalBytes += buf.length;
|
|
34324
|
+
yield { audio: buf, isFinal: false };
|
|
34325
|
+
}
|
|
34326
|
+
yield { audio: Buffer.alloc(0), isFinal: true };
|
|
34327
|
+
this.logOperationComplete("tts.synthesizeStream", {
|
|
34328
|
+
model: options.model,
|
|
34329
|
+
totalBytes
|
|
34330
|
+
});
|
|
34331
|
+
} catch (error) {
|
|
34332
|
+
this.handleError(error);
|
|
34333
|
+
throw error;
|
|
34334
|
+
}
|
|
34335
|
+
}
|
|
33406
34336
|
/**
|
|
33407
34337
|
* List available voices (returns static list for OpenAI)
|
|
33408
34338
|
*/
|
|
@@ -34004,13 +34934,13 @@ var TTS_MODEL_REGISTRY = {
|
|
|
34004
34934
|
name: "gemini-2.5-flash-preview-tts",
|
|
34005
34935
|
displayName: "Gemini 2.5 Flash TTS",
|
|
34006
34936
|
provider: Vendor.Google,
|
|
34007
|
-
description: "Google Gemini 2.5 Flash TTS - optimized for low latency",
|
|
34937
|
+
description: "Google Gemini 2.5 Flash TTS - optimized for low latency, 30 voices, 70+ languages",
|
|
34008
34938
|
isActive: true,
|
|
34009
34939
|
releaseDate: "2025-01-01",
|
|
34010
34940
|
sources: {
|
|
34011
34941
|
documentation: "https://ai.google.dev/gemini-api/docs/speech-generation",
|
|
34012
34942
|
pricing: "https://ai.google.dev/pricing",
|
|
34013
|
-
lastVerified: "2026-
|
|
34943
|
+
lastVerified: "2026-03-04"
|
|
34014
34944
|
},
|
|
34015
34945
|
capabilities: {
|
|
34016
34946
|
voices: GEMINI_VOICES,
|
|
@@ -34029,20 +34959,27 @@ var TTS_MODEL_REGISTRY = {
|
|
|
34029
34959
|
wordTimestamps: false
|
|
34030
34960
|
},
|
|
34031
34961
|
limits: { maxInputLength: 32e3 }
|
|
34032
|
-
// 32k
|
|
34962
|
+
// 32k token context window
|
|
34963
|
+
},
|
|
34964
|
+
pricing: {
|
|
34965
|
+
perMInputTokens: 0.5,
|
|
34966
|
+
// $0.50 per 1M input tokens
|
|
34967
|
+
perMOutputTokens: 10,
|
|
34968
|
+
// $10.00 per 1M output tokens
|
|
34969
|
+
currency: "USD"
|
|
34033
34970
|
}
|
|
34034
34971
|
},
|
|
34035
34972
|
"gemini-2.5-pro-preview-tts": {
|
|
34036
34973
|
name: "gemini-2.5-pro-preview-tts",
|
|
34037
34974
|
displayName: "Gemini 2.5 Pro TTS",
|
|
34038
34975
|
provider: Vendor.Google,
|
|
34039
|
-
description: "Google Gemini 2.5 Pro TTS - optimized for quality",
|
|
34976
|
+
description: "Google Gemini 2.5 Pro TTS - optimized for quality, 30 voices, 70+ languages",
|
|
34040
34977
|
isActive: true,
|
|
34041
34978
|
releaseDate: "2025-01-01",
|
|
34042
34979
|
sources: {
|
|
34043
34980
|
documentation: "https://ai.google.dev/gemini-api/docs/speech-generation",
|
|
34044
34981
|
pricing: "https://ai.google.dev/pricing",
|
|
34045
|
-
lastVerified: "2026-
|
|
34982
|
+
lastVerified: "2026-03-04"
|
|
34046
34983
|
},
|
|
34047
34984
|
capabilities: {
|
|
34048
34985
|
voices: GEMINI_VOICES,
|
|
@@ -34061,7 +34998,14 @@ var TTS_MODEL_REGISTRY = {
|
|
|
34061
34998
|
wordTimestamps: false
|
|
34062
34999
|
},
|
|
34063
35000
|
limits: { maxInputLength: 32e3 }
|
|
34064
|
-
// 32k
|
|
35001
|
+
// 32k token context window
|
|
35002
|
+
},
|
|
35003
|
+
pricing: {
|
|
35004
|
+
perMInputTokens: 1,
|
|
35005
|
+
// $1.00 per 1M input tokens
|
|
35006
|
+
perMOutputTokens: 20,
|
|
35007
|
+
// $20.00 per 1M output tokens
|
|
35008
|
+
currency: "USD"
|
|
34065
35009
|
}
|
|
34066
35010
|
}
|
|
34067
35011
|
};
|
|
@@ -34074,10 +35018,18 @@ function getTTSModelsWithFeature(feature) {
|
|
|
34074
35018
|
(model) => model.isActive && model.capabilities.features[feature]
|
|
34075
35019
|
);
|
|
34076
35020
|
}
|
|
34077
|
-
function calculateTTSCost(modelName, characterCount) {
|
|
35021
|
+
function calculateTTSCost(modelName, characterCount, options) {
|
|
34078
35022
|
const model = getTTSModelInfo(modelName);
|
|
34079
35023
|
if (!model?.pricing) return null;
|
|
34080
|
-
|
|
35024
|
+
if (model.pricing.per1kCharacters) {
|
|
35025
|
+
return characterCount / 1e3 * model.pricing.per1kCharacters;
|
|
35026
|
+
}
|
|
35027
|
+
if (model.pricing.perMInputTokens && options?.inputTokens != null) {
|
|
35028
|
+
const inputCost = options.inputTokens / 1e6 * model.pricing.perMInputTokens;
|
|
35029
|
+
const outputCost = options.outputTokens ? options.outputTokens / 1e6 * (model.pricing.perMOutputTokens ?? 0) : 0;
|
|
35030
|
+
return inputCost + outputCost;
|
|
35031
|
+
}
|
|
35032
|
+
return null;
|
|
34081
35033
|
}
|
|
34082
35034
|
var TextToSpeech = class _TextToSpeech {
|
|
34083
35035
|
provider;
|
|
@@ -34123,6 +35075,35 @@ var TextToSpeech = class _TextToSpeech {
|
|
|
34123
35075
|
const response = await this.synthesize(text, options);
|
|
34124
35076
|
await fs17__namespace.writeFile(filePath, response.audio);
|
|
34125
35077
|
}
|
|
35078
|
+
// ======================== Streaming Methods ========================
|
|
35079
|
+
/**
|
|
35080
|
+
* Check if the underlying provider supports streaming TTS
|
|
35081
|
+
*/
|
|
35082
|
+
supportsStreaming(format) {
|
|
35083
|
+
const provider = this.provider;
|
|
35084
|
+
return typeof provider.supportsStreaming === "function" && provider.supportsStreaming(format);
|
|
35085
|
+
}
|
|
35086
|
+
/**
|
|
35087
|
+
* Stream TTS audio chunks as they arrive from the API.
|
|
35088
|
+
* Falls back to buffered synthesis yielding a single chunk if provider doesn't support streaming.
|
|
35089
|
+
*/
|
|
35090
|
+
async *synthesizeStream(text, options) {
|
|
35091
|
+
const fullOptions = {
|
|
35092
|
+
model: this.config.model ?? this.getDefaultModel(),
|
|
35093
|
+
input: text,
|
|
35094
|
+
voice: options?.voice ?? this.config.voice ?? this.getDefaultVoice(),
|
|
35095
|
+
format: options?.format ?? this.config.format,
|
|
35096
|
+
speed: options?.speed ?? this.config.speed,
|
|
35097
|
+
vendorOptions: options?.vendorOptions
|
|
35098
|
+
};
|
|
35099
|
+
const provider = this.provider;
|
|
35100
|
+
if (typeof provider.synthesizeStream === "function" && provider.supportsStreaming?.(fullOptions.format)) {
|
|
35101
|
+
yield* provider.synthesizeStream(fullOptions);
|
|
35102
|
+
} else {
|
|
35103
|
+
const response = await this.provider.synthesize(fullOptions);
|
|
35104
|
+
yield { audio: response.audio, isFinal: true };
|
|
35105
|
+
}
|
|
35106
|
+
}
|
|
34126
35107
|
// ======================== Introspection Methods ========================
|
|
34127
35108
|
/**
|
|
34128
35109
|
* Get model information for current or specified model
|
|
@@ -35523,7 +36504,13 @@ var IMAGE_MODELS = {
|
|
|
35523
36504
|
/** Imagen 4.0 Ultra: Highest quality */
|
|
35524
36505
|
IMAGEN_4_ULTRA: "imagen-4.0-ultra-generate-001",
|
|
35525
36506
|
/** Imagen 4.0 Fast: Optimized for speed */
|
|
35526
|
-
IMAGEN_4_FAST: "imagen-4.0-fast-generate-001"
|
|
36507
|
+
IMAGEN_4_FAST: "imagen-4.0-fast-generate-001",
|
|
36508
|
+
/** Nano Banana 2: Gemini 3.1 Flash native image gen with 4K support */
|
|
36509
|
+
GEMINI_3_1_FLASH_IMAGE: "gemini-3.1-flash-image-preview",
|
|
36510
|
+
/** Nano Banana Pro: Gemini 3 Pro professional design engine with reasoning */
|
|
36511
|
+
GEMINI_3_PRO_IMAGE: "gemini-3-pro-image-preview",
|
|
36512
|
+
/** Nano Banana: Gemini 2.5 Flash native image gen/editing */
|
|
36513
|
+
GEMINI_2_5_FLASH_IMAGE: "gemini-2.5-flash-image"
|
|
35527
36514
|
},
|
|
35528
36515
|
[Vendor.Grok]: {
|
|
35529
36516
|
/** Grok Imagine Image: xAI image generation with editing support */
|
|
@@ -35707,7 +36694,7 @@ var IMAGE_MODEL_REGISTRY = {
|
|
|
35707
36694
|
sources: {
|
|
35708
36695
|
documentation: "https://ai.google.dev/gemini-api/docs/imagen",
|
|
35709
36696
|
pricing: "https://ai.google.dev/pricing",
|
|
35710
|
-
lastVerified: "2026-
|
|
36697
|
+
lastVerified: "2026-03-04"
|
|
35711
36698
|
},
|
|
35712
36699
|
capabilities: {
|
|
35713
36700
|
sizes: ["1024x1024"],
|
|
@@ -35818,7 +36805,7 @@ var IMAGE_MODEL_REGISTRY = {
|
|
|
35818
36805
|
sources: {
|
|
35819
36806
|
documentation: "https://ai.google.dev/gemini-api/docs/imagen",
|
|
35820
36807
|
pricing: "https://ai.google.dev/pricing",
|
|
35821
|
-
lastVerified: "2026-
|
|
36808
|
+
lastVerified: "2026-03-04"
|
|
35822
36809
|
},
|
|
35823
36810
|
capabilities: {
|
|
35824
36811
|
sizes: ["1024x1024"],
|
|
@@ -35915,7 +36902,8 @@ var IMAGE_MODEL_REGISTRY = {
|
|
|
35915
36902
|
}
|
|
35916
36903
|
},
|
|
35917
36904
|
pricing: {
|
|
35918
|
-
perImage: 0.
|
|
36905
|
+
perImage: 0.06,
|
|
36906
|
+
// Updated per official pricing page (was $0.08)
|
|
35919
36907
|
currency: "USD"
|
|
35920
36908
|
}
|
|
35921
36909
|
},
|
|
@@ -35929,7 +36917,7 @@ var IMAGE_MODEL_REGISTRY = {
|
|
|
35929
36917
|
sources: {
|
|
35930
36918
|
documentation: "https://ai.google.dev/gemini-api/docs/imagen",
|
|
35931
36919
|
pricing: "https://ai.google.dev/pricing",
|
|
35932
|
-
lastVerified: "2026-
|
|
36920
|
+
lastVerified: "2026-03-04"
|
|
35933
36921
|
},
|
|
35934
36922
|
capabilities: {
|
|
35935
36923
|
sizes: ["1024x1024"],
|
|
@@ -36030,6 +37018,141 @@ var IMAGE_MODEL_REGISTRY = {
|
|
|
36030
37018
|
currency: "USD"
|
|
36031
37019
|
}
|
|
36032
37020
|
},
|
|
37021
|
+
// ======================== Google Nano Banana (Gemini Native Image) ========================
|
|
37022
|
+
"gemini-3.1-flash-image-preview": {
|
|
37023
|
+
name: "gemini-3.1-flash-image-preview",
|
|
37024
|
+
displayName: "Nano Banana 2 (Gemini 3.1 Flash Image)",
|
|
37025
|
+
provider: Vendor.Google,
|
|
37026
|
+
description: "High-efficiency native image generation and editing with 4K support and thinking capabilities",
|
|
37027
|
+
isActive: true,
|
|
37028
|
+
releaseDate: "2026-02-01",
|
|
37029
|
+
sources: {
|
|
37030
|
+
documentation: "https://ai.google.dev/gemini-api/docs/models/gemini-3.1-flash-image-preview",
|
|
37031
|
+
pricing: "https://ai.google.dev/pricing",
|
|
37032
|
+
lastVerified: "2026-03-04"
|
|
37033
|
+
},
|
|
37034
|
+
capabilities: {
|
|
37035
|
+
sizes: ["512x512", "1024x1024", "1536x1536", "auto"],
|
|
37036
|
+
aspectRatios: ["1:1", "1:4", "4:1", "1:8", "8:1"],
|
|
37037
|
+
maxImagesPerRequest: 4,
|
|
37038
|
+
outputFormats: ["png", "jpeg"],
|
|
37039
|
+
features: {
|
|
37040
|
+
generation: true,
|
|
37041
|
+
editing: true,
|
|
37042
|
+
variations: false,
|
|
37043
|
+
styleControl: false,
|
|
37044
|
+
qualityControl: true,
|
|
37045
|
+
// Multiple resolution tiers: 0.5K, 1K, 2K, 4K
|
|
37046
|
+
transparency: false,
|
|
37047
|
+
promptRevision: false
|
|
37048
|
+
},
|
|
37049
|
+
limits: { maxPromptLength: 131072 },
|
|
37050
|
+
// 131K input tokens
|
|
37051
|
+
vendorOptions: {
|
|
37052
|
+
outputImageResolution: {
|
|
37053
|
+
type: "enum",
|
|
37054
|
+
label: "Resolution",
|
|
37055
|
+
description: "Output image resolution tier",
|
|
37056
|
+
enum: ["0.5K", "1K", "2K", "4K"],
|
|
37057
|
+
default: "1K",
|
|
37058
|
+
controlType: "select"
|
|
37059
|
+
}
|
|
37060
|
+
}
|
|
37061
|
+
},
|
|
37062
|
+
pricing: {
|
|
37063
|
+
// Per-image, varies by resolution: $0.045 (512px), $0.067 (1K), $0.101 (2K), $0.151 (4K)
|
|
37064
|
+
perImageStandard: 0.067,
|
|
37065
|
+
// 1K default
|
|
37066
|
+
perImageHD: 0.151,
|
|
37067
|
+
// 4K
|
|
37068
|
+
currency: "USD"
|
|
37069
|
+
}
|
|
37070
|
+
},
|
|
37071
|
+
"gemini-3-pro-image-preview": {
|
|
37072
|
+
name: "gemini-3-pro-image-preview",
|
|
37073
|
+
displayName: "Nano Banana Pro (Gemini 3 Pro Image)",
|
|
37074
|
+
provider: Vendor.Google,
|
|
37075
|
+
description: "Professional design engine with reasoning for studio-quality 4K visuals, complex layouts, and precise text rendering",
|
|
37076
|
+
isActive: true,
|
|
37077
|
+
releaseDate: "2025-11-01",
|
|
37078
|
+
sources: {
|
|
37079
|
+
documentation: "https://ai.google.dev/gemini-api/docs/models/gemini-3-pro-image-preview",
|
|
37080
|
+
pricing: "https://ai.google.dev/pricing",
|
|
37081
|
+
lastVerified: "2026-03-04"
|
|
37082
|
+
},
|
|
37083
|
+
capabilities: {
|
|
37084
|
+
sizes: ["1024x1024", "auto"],
|
|
37085
|
+
aspectRatios: ["1:1", "3:4", "4:3", "9:16", "16:9"],
|
|
37086
|
+
maxImagesPerRequest: 4,
|
|
37087
|
+
outputFormats: ["png", "jpeg"],
|
|
37088
|
+
features: {
|
|
37089
|
+
generation: true,
|
|
37090
|
+
editing: true,
|
|
37091
|
+
variations: false,
|
|
37092
|
+
styleControl: true,
|
|
37093
|
+
// Reasoning-driven design
|
|
37094
|
+
qualityControl: true,
|
|
37095
|
+
// 1K, 2K, 4K tiers
|
|
37096
|
+
transparency: false,
|
|
37097
|
+
promptRevision: false
|
|
37098
|
+
},
|
|
37099
|
+
limits: { maxPromptLength: 65536 },
|
|
37100
|
+
// 65K input tokens
|
|
37101
|
+
vendorOptions: {
|
|
37102
|
+
outputImageResolution: {
|
|
37103
|
+
type: "enum",
|
|
37104
|
+
label: "Resolution",
|
|
37105
|
+
description: "Output image resolution tier",
|
|
37106
|
+
enum: ["1K", "2K", "4K"],
|
|
37107
|
+
default: "1K",
|
|
37108
|
+
controlType: "select"
|
|
37109
|
+
}
|
|
37110
|
+
}
|
|
37111
|
+
},
|
|
37112
|
+
pricing: {
|
|
37113
|
+
// $0.134 per 1K/2K image, $0.24 per 4K image
|
|
37114
|
+
perImageStandard: 0.134,
|
|
37115
|
+
// 1K/2K
|
|
37116
|
+
perImageHD: 0.24,
|
|
37117
|
+
// 4K
|
|
37118
|
+
currency: "USD"
|
|
37119
|
+
}
|
|
37120
|
+
},
|
|
37121
|
+
"gemini-2.5-flash-image": {
|
|
37122
|
+
name: "gemini-2.5-flash-image",
|
|
37123
|
+
displayName: "Nano Banana (Gemini 2.5 Flash Image)",
|
|
37124
|
+
provider: Vendor.Google,
|
|
37125
|
+
description: "Native image generation and editing designed for fast, creative workflows",
|
|
37126
|
+
isActive: true,
|
|
37127
|
+
releaseDate: "2025-10-01",
|
|
37128
|
+
sources: {
|
|
37129
|
+
documentation: "https://ai.google.dev/gemini-api/docs/models/gemini-2.5-flash-image",
|
|
37130
|
+
pricing: "https://ai.google.dev/pricing",
|
|
37131
|
+
lastVerified: "2026-03-04"
|
|
37132
|
+
},
|
|
37133
|
+
capabilities: {
|
|
37134
|
+
sizes: ["1024x1024", "auto"],
|
|
37135
|
+
aspectRatios: ["1:1", "3:4", "4:3", "9:16", "16:9"],
|
|
37136
|
+
maxImagesPerRequest: 4,
|
|
37137
|
+
outputFormats: ["png", "jpeg"],
|
|
37138
|
+
features: {
|
|
37139
|
+
generation: true,
|
|
37140
|
+
editing: true,
|
|
37141
|
+
variations: false,
|
|
37142
|
+
styleControl: false,
|
|
37143
|
+
qualityControl: false,
|
|
37144
|
+
transparency: false,
|
|
37145
|
+
promptRevision: false
|
|
37146
|
+
},
|
|
37147
|
+
limits: { maxPromptLength: 65536 }
|
|
37148
|
+
// 65K input tokens
|
|
37149
|
+
},
|
|
37150
|
+
pricing: {
|
|
37151
|
+
perImage: 0.039,
|
|
37152
|
+
// $0.039 per image
|
|
37153
|
+
currency: "USD"
|
|
37154
|
+
}
|
|
37155
|
+
},
|
|
36033
37156
|
// ======================== xAI Grok ========================
|
|
36034
37157
|
"grok-imagine-image": {
|
|
36035
37158
|
name: "grok-imagine-image",
|
|
@@ -36041,11 +37164,11 @@ var IMAGE_MODEL_REGISTRY = {
|
|
|
36041
37164
|
sources: {
|
|
36042
37165
|
documentation: "https://docs.x.ai/docs/guides/image-generation",
|
|
36043
37166
|
pricing: "https://docs.x.ai/docs/models",
|
|
36044
|
-
lastVerified: "2026-
|
|
37167
|
+
lastVerified: "2026-03-04"
|
|
36045
37168
|
},
|
|
36046
37169
|
capabilities: {
|
|
36047
37170
|
sizes: ["1024x1024"],
|
|
36048
|
-
aspectRatios: ["1:1", "4:3", "3:4", "16:9", "9:16", "3:2", "2:3"],
|
|
37171
|
+
aspectRatios: ["1:1", "4:3", "3:4", "16:9", "9:16", "3:2", "2:3", "2:1", "1:2"],
|
|
36049
37172
|
maxImagesPerRequest: 10,
|
|
36050
37173
|
outputFormats: ["png", "jpeg"],
|
|
36051
37174
|
features: {
|
|
@@ -37301,9 +38424,9 @@ var OPENAI_SOURCES = {
|
|
|
37301
38424
|
lastVerified: "2026-01-25"
|
|
37302
38425
|
};
|
|
37303
38426
|
var GOOGLE_SOURCES = {
|
|
37304
|
-
documentation: "https://
|
|
37305
|
-
apiReference: "https://
|
|
37306
|
-
lastVerified: "2026-
|
|
38427
|
+
documentation: "https://ai.google.dev/gemini-api/docs/video",
|
|
38428
|
+
apiReference: "https://ai.google.dev/gemini-api/docs/models/veo",
|
|
38429
|
+
lastVerified: "2026-03-04"
|
|
37307
38430
|
};
|
|
37308
38431
|
var GROK_SOURCES = {
|
|
37309
38432
|
documentation: "https://docs.x.ai/docs/guides/video-generations",
|
|
@@ -37377,14 +38500,16 @@ var VIDEO_MODEL_REGISTRY = {
|
|
|
37377
38500
|
sources: GOOGLE_SOURCES,
|
|
37378
38501
|
capabilities: {
|
|
37379
38502
|
durations: [5, 6, 7, 8],
|
|
37380
|
-
resolutions: [],
|
|
37381
|
-
// Veo 2
|
|
38503
|
+
resolutions: ["720p"],
|
|
38504
|
+
// Veo 2 only supports 720p
|
|
37382
38505
|
aspectRatios: ["16:9", "9:16"],
|
|
37383
38506
|
maxFps: 24,
|
|
37384
38507
|
audio: false,
|
|
37385
|
-
imageToVideo:
|
|
38508
|
+
imageToVideo: false,
|
|
38509
|
+
// Veo 2 does not support reference images
|
|
37386
38510
|
videoExtension: false,
|
|
37387
38511
|
frameControl: true,
|
|
38512
|
+
// First/last frame interpolation supported
|
|
37388
38513
|
features: {
|
|
37389
38514
|
upscaling: false,
|
|
37390
38515
|
styleControl: false,
|
|
@@ -37393,7 +38518,8 @@ var VIDEO_MODEL_REGISTRY = {
|
|
|
37393
38518
|
}
|
|
37394
38519
|
},
|
|
37395
38520
|
pricing: {
|
|
37396
|
-
perSecond: 0.
|
|
38521
|
+
perSecond: 0.35,
|
|
38522
|
+
// Updated per official pricing page (was $0.03)
|
|
37397
38523
|
currency: "USD"
|
|
37398
38524
|
}
|
|
37399
38525
|
},
|
|
@@ -37405,14 +38531,18 @@ var VIDEO_MODEL_REGISTRY = {
|
|
|
37405
38531
|
sources: GOOGLE_SOURCES,
|
|
37406
38532
|
capabilities: {
|
|
37407
38533
|
durations: [4, 6, 8],
|
|
37408
|
-
resolutions: ["720p"],
|
|
37409
|
-
//
|
|
38534
|
+
resolutions: ["720p", "1080p", "4k"],
|
|
38535
|
+
// 1080p/4k require 8s duration
|
|
37410
38536
|
aspectRatios: ["16:9", "9:16"],
|
|
37411
38537
|
maxFps: 24,
|
|
37412
38538
|
audio: true,
|
|
38539
|
+
// Native audio generation
|
|
37413
38540
|
imageToVideo: true,
|
|
37414
|
-
|
|
37415
|
-
|
|
38541
|
+
// Up to 3 reference images
|
|
38542
|
+
videoExtension: true,
|
|
38543
|
+
// Supported (720p only)
|
|
38544
|
+
frameControl: true,
|
|
38545
|
+
// First/last frame interpolation
|
|
37416
38546
|
features: {
|
|
37417
38547
|
upscaling: false,
|
|
37418
38548
|
styleControl: false,
|
|
@@ -37421,7 +38551,8 @@ var VIDEO_MODEL_REGISTRY = {
|
|
|
37421
38551
|
}
|
|
37422
38552
|
},
|
|
37423
38553
|
pricing: {
|
|
37424
|
-
perSecond: 0.
|
|
38554
|
+
perSecond: 0.15,
|
|
38555
|
+
// $0.15 for 720p/1080p, $0.35 for 4K
|
|
37425
38556
|
currency: "USD"
|
|
37426
38557
|
}
|
|
37427
38558
|
},
|
|
@@ -37438,8 +38569,11 @@ var VIDEO_MODEL_REGISTRY = {
|
|
|
37438
38569
|
aspectRatios: ["16:9", "9:16"],
|
|
37439
38570
|
maxFps: 30,
|
|
37440
38571
|
audio: true,
|
|
38572
|
+
// Native audio generation
|
|
37441
38573
|
imageToVideo: true,
|
|
38574
|
+
// Up to 3 reference images
|
|
37442
38575
|
videoExtension: true,
|
|
38576
|
+
// Supported (720p only)
|
|
37443
38577
|
frameControl: true,
|
|
37444
38578
|
features: {
|
|
37445
38579
|
upscaling: true,
|
|
@@ -37449,7 +38583,8 @@ var VIDEO_MODEL_REGISTRY = {
|
|
|
37449
38583
|
}
|
|
37450
38584
|
},
|
|
37451
38585
|
pricing: {
|
|
37452
|
-
perSecond: 0.
|
|
38586
|
+
perSecond: 0.4,
|
|
38587
|
+
// $0.40 for 720p/1080p, $0.60 for 4K
|
|
37453
38588
|
currency: "USD"
|
|
37454
38589
|
}
|
|
37455
38590
|
},
|
|
@@ -37673,6 +38808,694 @@ var VideoGeneration = class _VideoGeneration {
|
|
|
37673
38808
|
}
|
|
37674
38809
|
};
|
|
37675
38810
|
|
|
38811
|
+
// src/capabilities/speech/SentenceSplitter.ts
|
|
38812
|
+
var DEFAULT_ABBREVIATIONS = /* @__PURE__ */ new Set([
|
|
38813
|
+
"dr.",
|
|
38814
|
+
"mr.",
|
|
38815
|
+
"mrs.",
|
|
38816
|
+
"ms.",
|
|
38817
|
+
"prof.",
|
|
38818
|
+
"sr.",
|
|
38819
|
+
"jr.",
|
|
38820
|
+
"st.",
|
|
38821
|
+
"ave.",
|
|
38822
|
+
"blvd.",
|
|
38823
|
+
"rd.",
|
|
38824
|
+
"u.s.",
|
|
38825
|
+
"u.k.",
|
|
38826
|
+
"u.s.a.",
|
|
38827
|
+
"u.n.",
|
|
38828
|
+
"e.g.",
|
|
38829
|
+
"i.e.",
|
|
38830
|
+
"etc.",
|
|
38831
|
+
"vs.",
|
|
38832
|
+
"viz.",
|
|
38833
|
+
"approx.",
|
|
38834
|
+
"dept.",
|
|
38835
|
+
"est.",
|
|
38836
|
+
"inc.",
|
|
38837
|
+
"ltd.",
|
|
38838
|
+
"corp.",
|
|
38839
|
+
"no.",
|
|
38840
|
+
"vol.",
|
|
38841
|
+
"rev.",
|
|
38842
|
+
"gen.",
|
|
38843
|
+
"gov.",
|
|
38844
|
+
"jan.",
|
|
38845
|
+
"feb.",
|
|
38846
|
+
"mar.",
|
|
38847
|
+
"apr.",
|
|
38848
|
+
"jun.",
|
|
38849
|
+
"jul.",
|
|
38850
|
+
"aug.",
|
|
38851
|
+
"sep.",
|
|
38852
|
+
"oct.",
|
|
38853
|
+
"nov.",
|
|
38854
|
+
"dec.",
|
|
38855
|
+
"fig.",
|
|
38856
|
+
"eq.",
|
|
38857
|
+
"ref.",
|
|
38858
|
+
"sec.",
|
|
38859
|
+
"ch.",
|
|
38860
|
+
"min.",
|
|
38861
|
+
"max.",
|
|
38862
|
+
"avg."
|
|
38863
|
+
]);
|
|
38864
|
+
var DEFAULT_OPTIONS = {
|
|
38865
|
+
minChunkLength: 20,
|
|
38866
|
+
maxChunkLength: 500,
|
|
38867
|
+
skipCodeBlocks: true,
|
|
38868
|
+
stripMarkdown: true,
|
|
38869
|
+
additionalAbbreviations: []
|
|
38870
|
+
};
|
|
38871
|
+
var SentenceChunkingStrategy = class {
|
|
38872
|
+
buffer = "";
|
|
38873
|
+
inCodeBlock = false;
|
|
38874
|
+
codeBlockBuffer = "";
|
|
38875
|
+
options;
|
|
38876
|
+
abbreviations;
|
|
38877
|
+
constructor(options) {
|
|
38878
|
+
this.options = { ...DEFAULT_OPTIONS, ...options };
|
|
38879
|
+
this.abbreviations = /* @__PURE__ */ new Set([
|
|
38880
|
+
...DEFAULT_ABBREVIATIONS,
|
|
38881
|
+
...this.options.additionalAbbreviations.map((a) => a.toLowerCase())
|
|
38882
|
+
]);
|
|
38883
|
+
}
|
|
38884
|
+
feed(delta) {
|
|
38885
|
+
this.buffer += delta;
|
|
38886
|
+
return this.extractChunks();
|
|
38887
|
+
}
|
|
38888
|
+
flush() {
|
|
38889
|
+
if (this.inCodeBlock) {
|
|
38890
|
+
this.codeBlockBuffer = "";
|
|
38891
|
+
this.inCodeBlock = false;
|
|
38892
|
+
}
|
|
38893
|
+
const text = this.cleanForSpeech(this.buffer.trim());
|
|
38894
|
+
this.buffer = "";
|
|
38895
|
+
return text.length > 0 ? text : null;
|
|
38896
|
+
}
|
|
38897
|
+
reset() {
|
|
38898
|
+
this.buffer = "";
|
|
38899
|
+
this.inCodeBlock = false;
|
|
38900
|
+
this.codeBlockBuffer = "";
|
|
38901
|
+
}
|
|
38902
|
+
// ======================== Private Methods ========================
|
|
38903
|
+
extractChunks() {
|
|
38904
|
+
const chunks = [];
|
|
38905
|
+
if (this.options.skipCodeBlocks) {
|
|
38906
|
+
this.processCodeBlocks();
|
|
38907
|
+
}
|
|
38908
|
+
let paragraphIdx = this.buffer.indexOf("\n\n");
|
|
38909
|
+
while (paragraphIdx !== -1) {
|
|
38910
|
+
const chunk = this.buffer.slice(0, paragraphIdx).trim();
|
|
38911
|
+
this.buffer = this.buffer.slice(paragraphIdx + 2);
|
|
38912
|
+
if (chunk.length > 0) {
|
|
38913
|
+
const cleaned = this.cleanForSpeech(chunk);
|
|
38914
|
+
if (cleaned.length > 0) {
|
|
38915
|
+
chunks.push(cleaned);
|
|
38916
|
+
}
|
|
38917
|
+
}
|
|
38918
|
+
paragraphIdx = this.buffer.indexOf("\n\n");
|
|
38919
|
+
}
|
|
38920
|
+
let sentenceEnd = this.findSentenceBoundary();
|
|
38921
|
+
while (sentenceEnd !== -1) {
|
|
38922
|
+
const sentence = this.buffer.slice(0, sentenceEnd).trim();
|
|
38923
|
+
this.buffer = this.buffer.slice(sentenceEnd).trimStart();
|
|
38924
|
+
if (sentence.length > 0) {
|
|
38925
|
+
const cleaned = this.cleanForSpeech(sentence);
|
|
38926
|
+
if (cleaned.length > 0) {
|
|
38927
|
+
chunks.push(cleaned);
|
|
38928
|
+
}
|
|
38929
|
+
}
|
|
38930
|
+
sentenceEnd = this.findSentenceBoundary();
|
|
38931
|
+
}
|
|
38932
|
+
if (this.buffer.length > this.options.maxChunkLength) {
|
|
38933
|
+
const splitChunks = this.splitLongText(this.buffer);
|
|
38934
|
+
this.buffer = splitChunks.pop() ?? "";
|
|
38935
|
+
for (const chunk of splitChunks) {
|
|
38936
|
+
const cleaned = this.cleanForSpeech(chunk.trim());
|
|
38937
|
+
if (cleaned.length > 0) {
|
|
38938
|
+
chunks.push(cleaned);
|
|
38939
|
+
}
|
|
38940
|
+
}
|
|
38941
|
+
}
|
|
38942
|
+
return this.mergeSmallChunks(chunks);
|
|
38943
|
+
}
|
|
38944
|
+
/**
|
|
38945
|
+
* Track and remove fenced code blocks from the buffer.
|
|
38946
|
+
* Text inside code blocks is discarded (not spoken).
|
|
38947
|
+
*/
|
|
38948
|
+
processCodeBlocks() {
|
|
38949
|
+
let idx = 0;
|
|
38950
|
+
let result = "";
|
|
38951
|
+
while (idx < this.buffer.length) {
|
|
38952
|
+
if (this.buffer.startsWith("```", idx)) {
|
|
38953
|
+
if (this.inCodeBlock) {
|
|
38954
|
+
this.inCodeBlock = false;
|
|
38955
|
+
this.codeBlockBuffer = "";
|
|
38956
|
+
idx += 3;
|
|
38957
|
+
const newline = this.buffer.indexOf("\n", idx);
|
|
38958
|
+
idx = newline !== -1 ? newline + 1 : this.buffer.length;
|
|
38959
|
+
} else {
|
|
38960
|
+
this.inCodeBlock = true;
|
|
38961
|
+
this.codeBlockBuffer = "";
|
|
38962
|
+
idx += 3;
|
|
38963
|
+
const newline = this.buffer.indexOf("\n", idx);
|
|
38964
|
+
idx = newline !== -1 ? newline + 1 : this.buffer.length;
|
|
38965
|
+
}
|
|
38966
|
+
} else if (this.inCodeBlock) {
|
|
38967
|
+
this.codeBlockBuffer += this.buffer[idx];
|
|
38968
|
+
idx++;
|
|
38969
|
+
} else {
|
|
38970
|
+
result += this.buffer[idx];
|
|
38971
|
+
idx++;
|
|
38972
|
+
}
|
|
38973
|
+
}
|
|
38974
|
+
this.buffer = result;
|
|
38975
|
+
}
|
|
38976
|
+
/**
|
|
38977
|
+
* Find the position right after the next sentence boundary.
|
|
38978
|
+
* Returns -1 if no complete sentence boundary found.
|
|
38979
|
+
*/
|
|
38980
|
+
findSentenceBoundary() {
|
|
38981
|
+
const terminators = [".", "?", "!"];
|
|
38982
|
+
for (let i = 0; i < this.buffer.length; i++) {
|
|
38983
|
+
const ch = this.buffer.charAt(i);
|
|
38984
|
+
if (!terminators.includes(ch)) continue;
|
|
38985
|
+
if (i + 1 >= this.buffer.length) return -1;
|
|
38986
|
+
const nextChar = this.buffer[i + 1];
|
|
38987
|
+
if (nextChar !== " " && nextChar !== "\n" && nextChar !== "\r" && nextChar !== " ") {
|
|
38988
|
+
continue;
|
|
38989
|
+
}
|
|
38990
|
+
if (ch === ".") {
|
|
38991
|
+
if (this.isAbbreviation(i)) continue;
|
|
38992
|
+
if (this.isDecimalNumber(i)) continue;
|
|
38993
|
+
if (this.isEllipsis(i)) continue;
|
|
38994
|
+
}
|
|
38995
|
+
const candidate = this.buffer.slice(0, i + 1).trim();
|
|
38996
|
+
if (candidate.length < this.options.minChunkLength) continue;
|
|
38997
|
+
return i + 1;
|
|
38998
|
+
}
|
|
38999
|
+
return -1;
|
|
39000
|
+
}
|
|
39001
|
+
/**
|
|
39002
|
+
* Check if the period at position `pos` is part of a known abbreviation.
|
|
39003
|
+
*/
|
|
39004
|
+
isAbbreviation(pos) {
|
|
39005
|
+
let wordStart = pos - 1;
|
|
39006
|
+
while (wordStart >= 0 && this.buffer[wordStart] !== " " && this.buffer[wordStart] !== "\n") {
|
|
39007
|
+
wordStart--;
|
|
39008
|
+
}
|
|
39009
|
+
wordStart++;
|
|
39010
|
+
const word = this.buffer.slice(wordStart, pos + 1).toLowerCase();
|
|
39011
|
+
return this.abbreviations.has(word);
|
|
39012
|
+
}
|
|
39013
|
+
/**
|
|
39014
|
+
* Check if the period at position `pos` is a decimal point.
|
|
39015
|
+
* e.g., 3.14, $1.50
|
|
39016
|
+
*/
|
|
39017
|
+
isDecimalNumber(pos) {
|
|
39018
|
+
if (pos === 0 || pos + 1 >= this.buffer.length) return false;
|
|
39019
|
+
const before = this.buffer.charAt(pos - 1);
|
|
39020
|
+
const after = this.buffer.charAt(pos + 1);
|
|
39021
|
+
return /\d/.test(before) && /\d/.test(after);
|
|
39022
|
+
}
|
|
39023
|
+
/**
|
|
39024
|
+
* Check if the period at position `pos` is part of an ellipsis (...).
|
|
39025
|
+
*/
|
|
39026
|
+
isEllipsis(pos) {
|
|
39027
|
+
if (pos >= 2 && this.buffer[pos - 1] === "." && this.buffer[pos - 2] === ".") return true;
|
|
39028
|
+
if (pos + 1 < this.buffer.length && this.buffer[pos + 1] === ".") return true;
|
|
39029
|
+
return false;
|
|
39030
|
+
}
|
|
39031
|
+
/**
|
|
39032
|
+
* Split text that exceeds maxChunkLength at clause boundaries.
|
|
39033
|
+
*/
|
|
39034
|
+
splitLongText(text) {
|
|
39035
|
+
const max = this.options.maxChunkLength;
|
|
39036
|
+
const chunks = [];
|
|
39037
|
+
let remaining = text;
|
|
39038
|
+
while (remaining.length > max) {
|
|
39039
|
+
let splitPos = -1;
|
|
39040
|
+
const clauseBreaks = [",", ";", ":", " \u2014", " \u2013", " -"];
|
|
39041
|
+
for (const brk of clauseBreaks) {
|
|
39042
|
+
const searchRegion = remaining.slice(0, max);
|
|
39043
|
+
const lastPos = searchRegion.lastIndexOf(brk);
|
|
39044
|
+
if (lastPos > this.options.minChunkLength) {
|
|
39045
|
+
splitPos = lastPos + brk.length;
|
|
39046
|
+
break;
|
|
39047
|
+
}
|
|
39048
|
+
}
|
|
39049
|
+
if (splitPos === -1) {
|
|
39050
|
+
const searchRegion = remaining.slice(0, max);
|
|
39051
|
+
splitPos = searchRegion.lastIndexOf(" ");
|
|
39052
|
+
if (splitPos <= this.options.minChunkLength) {
|
|
39053
|
+
splitPos = max;
|
|
39054
|
+
}
|
|
39055
|
+
}
|
|
39056
|
+
chunks.push(remaining.slice(0, splitPos));
|
|
39057
|
+
remaining = remaining.slice(splitPos);
|
|
39058
|
+
}
|
|
39059
|
+
chunks.push(remaining);
|
|
39060
|
+
return chunks;
|
|
39061
|
+
}
|
|
39062
|
+
/**
|
|
39063
|
+
* Merge chunks that are shorter than minChunkLength with the next chunk.
|
|
39064
|
+
*/
|
|
39065
|
+
mergeSmallChunks(chunks) {
|
|
39066
|
+
if (chunks.length <= 1) return chunks;
|
|
39067
|
+
const merged = [];
|
|
39068
|
+
let accumulator = "";
|
|
39069
|
+
for (const chunk of chunks) {
|
|
39070
|
+
if (accumulator.length > 0) {
|
|
39071
|
+
accumulator += " " + chunk;
|
|
39072
|
+
} else {
|
|
39073
|
+
accumulator = chunk;
|
|
39074
|
+
}
|
|
39075
|
+
if (accumulator.length >= this.options.minChunkLength) {
|
|
39076
|
+
merged.push(accumulator);
|
|
39077
|
+
accumulator = "";
|
|
39078
|
+
}
|
|
39079
|
+
}
|
|
39080
|
+
if (accumulator.length > 0) {
|
|
39081
|
+
if (merged.length > 0) {
|
|
39082
|
+
merged[merged.length - 1] += " " + accumulator;
|
|
39083
|
+
} else {
|
|
39084
|
+
merged.push(accumulator);
|
|
39085
|
+
}
|
|
39086
|
+
}
|
|
39087
|
+
return merged;
|
|
39088
|
+
}
|
|
39089
|
+
/**
|
|
39090
|
+
* Strip markdown formatting from text for natural speech.
|
|
39091
|
+
*/
|
|
39092
|
+
cleanForSpeech(text) {
|
|
39093
|
+
if (!this.options.stripMarkdown) return text;
|
|
39094
|
+
let cleaned = text;
|
|
39095
|
+
cleaned = cleaned.replace(/`([^`]+)`/g, "$1");
|
|
39096
|
+
cleaned = cleaned.replace(/\*\*([^*]+)\*\*/g, "$1");
|
|
39097
|
+
cleaned = cleaned.replace(/__([^_]+)__/g, "$1");
|
|
39098
|
+
cleaned = cleaned.replace(/(?<!\*)\*([^*]+)\*(?!\*)/g, "$1");
|
|
39099
|
+
cleaned = cleaned.replace(/(?<!_)_([^_]+)_(?!_)/g, "$1");
|
|
39100
|
+
cleaned = cleaned.replace(/~~([^~]+)~~/g, "$1");
|
|
39101
|
+
cleaned = cleaned.replace(/\[([^\]]+)\]\([^)]+\)/g, "$1");
|
|
39102
|
+
cleaned = cleaned.replace(/!\[([^\]]*)\]\([^)]+\)/g, "");
|
|
39103
|
+
cleaned = cleaned.replace(/^#{1,6}\s+/gm, "");
|
|
39104
|
+
cleaned = cleaned.replace(/^[-*+]\s+/gm, "");
|
|
39105
|
+
cleaned = cleaned.replace(/^\d+\.\s+/gm, "");
|
|
39106
|
+
cleaned = cleaned.replace(/^>\s+/gm, "");
|
|
39107
|
+
cleaned = cleaned.replace(/^[-*_]{3,}\s*$/gm, "");
|
|
39108
|
+
cleaned = cleaned.replace(/\n+/g, " ");
|
|
39109
|
+
cleaned = cleaned.replace(/\s{2,}/g, " ");
|
|
39110
|
+
return cleaned.trim();
|
|
39111
|
+
}
|
|
39112
|
+
};
|
|
39113
|
+
|
|
39114
|
+
// src/capabilities/speech/VoiceStream.ts
|
|
39115
|
+
var VoiceStream = class _VoiceStream extends events.EventEmitter {
|
|
39116
|
+
tts;
|
|
39117
|
+
chunker;
|
|
39118
|
+
format;
|
|
39119
|
+
speed;
|
|
39120
|
+
maxConcurrentTTS;
|
|
39121
|
+
maxQueuedChunks;
|
|
39122
|
+
vendorOptions;
|
|
39123
|
+
streaming;
|
|
39124
|
+
// Pipeline state
|
|
39125
|
+
chunkIndex = 0;
|
|
39126
|
+
totalCharacters = 0;
|
|
39127
|
+
totalDuration = 0;
|
|
39128
|
+
activeJobs = /* @__PURE__ */ new Map();
|
|
39129
|
+
activeTTSCount = 0;
|
|
39130
|
+
interrupted = false;
|
|
39131
|
+
lastResponseId = "";
|
|
39132
|
+
_isDestroyed = false;
|
|
39133
|
+
// Semaphore for TTS concurrency control
|
|
39134
|
+
slotWaiters = [];
|
|
39135
|
+
// Audio event buffer for interleaving with text events
|
|
39136
|
+
audioEventBuffer = [];
|
|
39137
|
+
// Async notification: resolves when new events are pushed to audioEventBuffer
|
|
39138
|
+
bufferNotify = null;
|
|
39139
|
+
// Queue backpressure
|
|
39140
|
+
queueWaiters = [];
|
|
39141
|
+
/**
|
|
39142
|
+
* Create a new VoiceStream instance
|
|
39143
|
+
*/
|
|
39144
|
+
static create(config) {
|
|
39145
|
+
return new _VoiceStream(config);
|
|
39146
|
+
}
|
|
39147
|
+
constructor(config) {
|
|
39148
|
+
super();
|
|
39149
|
+
this.tts = TextToSpeech.create({
|
|
39150
|
+
connector: config.ttsConnector,
|
|
39151
|
+
model: config.ttsModel,
|
|
39152
|
+
voice: config.voice
|
|
39153
|
+
});
|
|
39154
|
+
this.chunker = config.chunkingStrategy ?? new SentenceChunkingStrategy(config.chunkingOptions);
|
|
39155
|
+
this.format = config.format ?? "mp3";
|
|
39156
|
+
this.speed = config.speed ?? 1;
|
|
39157
|
+
this.maxConcurrentTTS = config.maxConcurrentTTS ?? 2;
|
|
39158
|
+
this.maxQueuedChunks = config.maxQueuedChunks ?? 5;
|
|
39159
|
+
this.vendorOptions = config.vendorOptions;
|
|
39160
|
+
this.streaming = config.streaming ?? false;
|
|
39161
|
+
}
|
|
39162
|
+
// ======================== Public API ========================
|
|
39163
|
+
/**
|
|
39164
|
+
* Transform an agent text stream into an augmented stream with audio events.
|
|
39165
|
+
* Original text events pass through unchanged; audio events are interleaved.
|
|
39166
|
+
*
|
|
39167
|
+
* The generator yields events in this order:
|
|
39168
|
+
* 1. All original StreamEvents (pass-through)
|
|
39169
|
+
* 2. AudioChunkReady/AudioChunkError events as TTS completes
|
|
39170
|
+
* 3. AudioStreamComplete as the final audio event
|
|
39171
|
+
*/
|
|
39172
|
+
async *wrap(textStream) {
|
|
39173
|
+
this.reset();
|
|
39174
|
+
try {
|
|
39175
|
+
for await (const event of textStream) {
|
|
39176
|
+
yield event;
|
|
39177
|
+
if (event.response_id) {
|
|
39178
|
+
this.lastResponseId = event.response_id;
|
|
39179
|
+
}
|
|
39180
|
+
if (event.type === "response.output_text.delta" /* OUTPUT_TEXT_DELTA */ && !this.interrupted) {
|
|
39181
|
+
const completedChunks = this.chunker.feed(event.delta);
|
|
39182
|
+
for (const chunk of completedChunks) {
|
|
39183
|
+
await this.scheduleTTS(chunk);
|
|
39184
|
+
}
|
|
39185
|
+
}
|
|
39186
|
+
if ((event.type === "response.output_text.done" /* OUTPUT_TEXT_DONE */ || event.type === "response.complete" /* RESPONSE_COMPLETE */) && !this.interrupted) {
|
|
39187
|
+
const remaining = this.chunker.flush();
|
|
39188
|
+
if (remaining) {
|
|
39189
|
+
await this.scheduleTTS(remaining);
|
|
39190
|
+
}
|
|
39191
|
+
}
|
|
39192
|
+
yield* this.drainAudioBuffer();
|
|
39193
|
+
}
|
|
39194
|
+
while (this.activeJobs.size > 0 || this.audioEventBuffer.length > 0) {
|
|
39195
|
+
if (this.audioEventBuffer.length === 0) {
|
|
39196
|
+
await Promise.race([
|
|
39197
|
+
this.waitForBufferNotify(),
|
|
39198
|
+
...Array.from(this.activeJobs.values()).map((j) => j.promise)
|
|
39199
|
+
]);
|
|
39200
|
+
}
|
|
39201
|
+
yield* this.drainAudioBuffer();
|
|
39202
|
+
}
|
|
39203
|
+
if (this.chunkIndex > 0) {
|
|
39204
|
+
const completeEvent = {
|
|
39205
|
+
type: "response.audio_stream.complete" /* AUDIO_STREAM_COMPLETE */,
|
|
39206
|
+
response_id: this.lastResponseId,
|
|
39207
|
+
total_chunks: this.chunkIndex,
|
|
39208
|
+
total_characters: this.totalCharacters,
|
|
39209
|
+
total_duration_seconds: this.totalDuration > 0 ? this.totalDuration : void 0
|
|
39210
|
+
};
|
|
39211
|
+
yield completeEvent;
|
|
39212
|
+
this.emit("audio:complete", {
|
|
39213
|
+
totalChunks: this.chunkIndex,
|
|
39214
|
+
totalDurationSeconds: this.totalDuration > 0 ? this.totalDuration : void 0
|
|
39215
|
+
});
|
|
39216
|
+
}
|
|
39217
|
+
} finally {
|
|
39218
|
+
this.cleanup();
|
|
39219
|
+
}
|
|
39220
|
+
}
|
|
39221
|
+
/**
|
|
39222
|
+
* Interrupt audio generation. Cancels pending TTS and flushes queue.
|
|
39223
|
+
* Call this when the user sends a new message mid-speech.
|
|
39224
|
+
* Active HTTP requests cannot be cancelled but their results will be discarded.
|
|
39225
|
+
*/
|
|
39226
|
+
interrupt() {
|
|
39227
|
+
this.interrupted = true;
|
|
39228
|
+
const pendingCount = this.activeJobs.size;
|
|
39229
|
+
this.activeJobs.clear();
|
|
39230
|
+
this.activeTTSCount = 0;
|
|
39231
|
+
this.audioEventBuffer = [];
|
|
39232
|
+
this.releaseAllWaiters();
|
|
39233
|
+
this.chunker.reset();
|
|
39234
|
+
this.emit("audio:interrupted", { pendingChunks: pendingCount });
|
|
39235
|
+
}
|
|
39236
|
+
/**
|
|
39237
|
+
* Reset state for a new stream. Called automatically by wrap().
|
|
39238
|
+
*/
|
|
39239
|
+
reset() {
|
|
39240
|
+
this.chunkIndex = 0;
|
|
39241
|
+
this.totalCharacters = 0;
|
|
39242
|
+
this.totalDuration = 0;
|
|
39243
|
+
this.activeJobs.clear();
|
|
39244
|
+
this.activeTTSCount = 0;
|
|
39245
|
+
this.interrupted = false;
|
|
39246
|
+
this.lastResponseId = "";
|
|
39247
|
+
this.audioEventBuffer = [];
|
|
39248
|
+
this.bufferNotify = null;
|
|
39249
|
+
this.slotWaiters = [];
|
|
39250
|
+
this.queueWaiters = [];
|
|
39251
|
+
this.chunker.reset();
|
|
39252
|
+
}
|
|
39253
|
+
destroy() {
|
|
39254
|
+
this.interrupt();
|
|
39255
|
+
this._isDestroyed = true;
|
|
39256
|
+
this.removeAllListeners();
|
|
39257
|
+
}
|
|
39258
|
+
get isDestroyed() {
|
|
39259
|
+
return this._isDestroyed;
|
|
39260
|
+
}
|
|
39261
|
+
// ======================== Private Methods ========================
|
|
39262
|
+
/**
|
|
39263
|
+
* Schedule a text chunk for TTS synthesis.
|
|
39264
|
+
* Awaits a free queue slot if backpressure is active (lossless).
|
|
39265
|
+
*/
|
|
39266
|
+
async scheduleTTS(text) {
|
|
39267
|
+
if (this.interrupted || this._isDestroyed) return;
|
|
39268
|
+
const cleanText = text.trim();
|
|
39269
|
+
if (cleanText.length === 0) return;
|
|
39270
|
+
while (this.activeJobs.size >= this.maxQueuedChunks && !this.interrupted) {
|
|
39271
|
+
await this.waitForQueueSlot();
|
|
39272
|
+
}
|
|
39273
|
+
if (this.interrupted) return;
|
|
39274
|
+
const index = this.chunkIndex++;
|
|
39275
|
+
this.totalCharacters += cleanText.length;
|
|
39276
|
+
const job = {
|
|
39277
|
+
index,
|
|
39278
|
+
text: cleanText,
|
|
39279
|
+
promise: this.executeTTS(index, cleanText)
|
|
39280
|
+
};
|
|
39281
|
+
this.activeJobs.set(index, job);
|
|
39282
|
+
job.promise.finally(() => {
|
|
39283
|
+
this.activeJobs.delete(index);
|
|
39284
|
+
this.releaseQueueWaiter();
|
|
39285
|
+
});
|
|
39286
|
+
}
|
|
39287
|
+
/**
|
|
39288
|
+
* Execute TTS for a single text chunk.
|
|
39289
|
+
* Respects concurrency semaphore.
|
|
39290
|
+
* Branches on streaming mode: yields sub-chunks or a single buffered chunk.
|
|
39291
|
+
*/
|
|
39292
|
+
async executeTTS(index, text) {
|
|
39293
|
+
while (this.activeTTSCount >= this.maxConcurrentTTS && !this.interrupted) {
|
|
39294
|
+
await this.waitForTTSSlot();
|
|
39295
|
+
}
|
|
39296
|
+
if (this.interrupted) return;
|
|
39297
|
+
this.activeTTSCount++;
|
|
39298
|
+
try {
|
|
39299
|
+
const ttsStart = Date.now();
|
|
39300
|
+
if (this.streaming && this.tts.supportsStreaming(this.format)) {
|
|
39301
|
+
let subIndex = 0;
|
|
39302
|
+
const streamFormat = this.format === "mp3" ? "pcm" : this.format;
|
|
39303
|
+
const MIN_BUFFER_BYTES = 6e3;
|
|
39304
|
+
const pendingBuffers = [];
|
|
39305
|
+
let pendingSize = 0;
|
|
39306
|
+
const flushPending = () => {
|
|
39307
|
+
if (pendingSize === 0) return;
|
|
39308
|
+
const merged = Buffer.concat(pendingBuffers, pendingSize);
|
|
39309
|
+
pendingBuffers.length = 0;
|
|
39310
|
+
pendingSize = 0;
|
|
39311
|
+
const currentSubIndex = subIndex++;
|
|
39312
|
+
const audioEvent = {
|
|
39313
|
+
type: "response.audio_chunk.ready" /* AUDIO_CHUNK_READY */,
|
|
39314
|
+
response_id: this.lastResponseId,
|
|
39315
|
+
chunk_index: index,
|
|
39316
|
+
sub_index: currentSubIndex,
|
|
39317
|
+
text: currentSubIndex === 0 ? text : "",
|
|
39318
|
+
audio_base64: merged.toString("base64"),
|
|
39319
|
+
format: streamFormat
|
|
39320
|
+
};
|
|
39321
|
+
this.pushAudioEvent(audioEvent);
|
|
39322
|
+
};
|
|
39323
|
+
for await (const chunk of this.tts.synthesizeStream(text, {
|
|
39324
|
+
format: streamFormat,
|
|
39325
|
+
speed: this.speed,
|
|
39326
|
+
vendorOptions: this.vendorOptions
|
|
39327
|
+
})) {
|
|
39328
|
+
if (this.interrupted) return;
|
|
39329
|
+
if (chunk.audio.length > 0) {
|
|
39330
|
+
pendingBuffers.push(chunk.audio);
|
|
39331
|
+
pendingSize += chunk.audio.length;
|
|
39332
|
+
if (pendingSize >= MIN_BUFFER_BYTES) {
|
|
39333
|
+
flushPending();
|
|
39334
|
+
}
|
|
39335
|
+
}
|
|
39336
|
+
if (chunk.isFinal) {
|
|
39337
|
+
break;
|
|
39338
|
+
}
|
|
39339
|
+
}
|
|
39340
|
+
flushPending();
|
|
39341
|
+
console.log(`[VoiceStream] TTS chunk ${index} streamed ${subIndex} sub-chunks in ${Date.now() - ttsStart}ms, text: "${text.slice(0, 40)}..."`);
|
|
39342
|
+
this.emit("audio:ready", { chunkIndex: index, text });
|
|
39343
|
+
} else {
|
|
39344
|
+
const response = await this.tts.synthesize(text, {
|
|
39345
|
+
format: this.format,
|
|
39346
|
+
speed: this.speed,
|
|
39347
|
+
vendorOptions: this.vendorOptions
|
|
39348
|
+
});
|
|
39349
|
+
if (this.interrupted) return;
|
|
39350
|
+
if (response.durationSeconds) {
|
|
39351
|
+
this.totalDuration += response.durationSeconds;
|
|
39352
|
+
}
|
|
39353
|
+
const audioEvent = {
|
|
39354
|
+
type: "response.audio_chunk.ready" /* AUDIO_CHUNK_READY */,
|
|
39355
|
+
response_id: this.lastResponseId,
|
|
39356
|
+
chunk_index: index,
|
|
39357
|
+
text,
|
|
39358
|
+
audio_base64: response.audio.toString("base64"),
|
|
39359
|
+
format: response.format,
|
|
39360
|
+
duration_seconds: response.durationSeconds,
|
|
39361
|
+
characters_used: response.charactersUsed
|
|
39362
|
+
};
|
|
39363
|
+
this.pushAudioEvent(audioEvent);
|
|
39364
|
+
console.log(`[VoiceStream] TTS chunk ${index} ready in ${Date.now() - ttsStart}ms, text: "${text.slice(0, 40)}..."`);
|
|
39365
|
+
this.emit("audio:ready", {
|
|
39366
|
+
chunkIndex: index,
|
|
39367
|
+
text,
|
|
39368
|
+
durationSeconds: response.durationSeconds
|
|
39369
|
+
});
|
|
39370
|
+
}
|
|
39371
|
+
} catch (error) {
|
|
39372
|
+
if (this.interrupted) return;
|
|
39373
|
+
const errorEvent = {
|
|
39374
|
+
type: "response.audio_chunk.error" /* AUDIO_CHUNK_ERROR */,
|
|
39375
|
+
response_id: this.lastResponseId,
|
|
39376
|
+
chunk_index: index,
|
|
39377
|
+
text,
|
|
39378
|
+
error: error.message
|
|
39379
|
+
};
|
|
39380
|
+
this.pushAudioEvent(errorEvent);
|
|
39381
|
+
this.emit("audio:error", {
|
|
39382
|
+
chunkIndex: index,
|
|
39383
|
+
text,
|
|
39384
|
+
error
|
|
39385
|
+
});
|
|
39386
|
+
} finally {
|
|
39387
|
+
this.activeTTSCount--;
|
|
39388
|
+
this.releaseTTSSlot();
|
|
39389
|
+
}
|
|
39390
|
+
}
|
|
39391
|
+
/**
|
|
39392
|
+
* Drain the audio event buffer, yielding all ready events.
|
|
39393
|
+
*/
|
|
39394
|
+
*drainAudioBuffer() {
|
|
39395
|
+
while (this.audioEventBuffer.length > 0) {
|
|
39396
|
+
yield this.audioEventBuffer.shift();
|
|
39397
|
+
}
|
|
39398
|
+
}
|
|
39399
|
+
// ======================== Buffer Notification ========================
|
|
39400
|
+
/**
|
|
39401
|
+
* Push an audio event and wake up the consumer in wrap()
|
|
39402
|
+
*/
|
|
39403
|
+
pushAudioEvent(event) {
|
|
39404
|
+
this.audioEventBuffer.push(event);
|
|
39405
|
+
if (this.bufferNotify) {
|
|
39406
|
+
this.bufferNotify();
|
|
39407
|
+
this.bufferNotify = null;
|
|
39408
|
+
}
|
|
39409
|
+
}
|
|
39410
|
+
/**
|
|
39411
|
+
* Wait until a new event is pushed to the audio buffer
|
|
39412
|
+
*/
|
|
39413
|
+
waitForBufferNotify() {
|
|
39414
|
+
return new Promise((resolve4) => {
|
|
39415
|
+
this.bufferNotify = resolve4;
|
|
39416
|
+
});
|
|
39417
|
+
}
|
|
39418
|
+
// ======================== Semaphore / Backpressure ========================
|
|
39419
|
+
waitForTTSSlot() {
|
|
39420
|
+
return new Promise((resolve4) => {
|
|
39421
|
+
this.slotWaiters.push(resolve4);
|
|
39422
|
+
});
|
|
39423
|
+
}
|
|
39424
|
+
releaseTTSSlot() {
|
|
39425
|
+
const waiter = this.slotWaiters.shift();
|
|
39426
|
+
if (waiter) waiter();
|
|
39427
|
+
}
|
|
39428
|
+
waitForQueueSlot() {
|
|
39429
|
+
return new Promise((resolve4) => {
|
|
39430
|
+
this.queueWaiters.push(resolve4);
|
|
39431
|
+
});
|
|
39432
|
+
}
|
|
39433
|
+
releaseQueueWaiter() {
|
|
39434
|
+
const waiter = this.queueWaiters.shift();
|
|
39435
|
+
if (waiter) waiter();
|
|
39436
|
+
}
|
|
39437
|
+
releaseAllWaiters() {
|
|
39438
|
+
for (const waiter of this.slotWaiters) waiter();
|
|
39439
|
+
this.slotWaiters = [];
|
|
39440
|
+
for (const waiter of this.queueWaiters) waiter();
|
|
39441
|
+
this.queueWaiters = [];
|
|
39442
|
+
if (this.bufferNotify) {
|
|
39443
|
+
this.bufferNotify();
|
|
39444
|
+
this.bufferNotify = null;
|
|
39445
|
+
}
|
|
39446
|
+
}
|
|
39447
|
+
cleanup() {
|
|
39448
|
+
this.releaseAllWaiters();
|
|
39449
|
+
}
|
|
39450
|
+
};
|
|
39451
|
+
|
|
39452
|
+
// src/capabilities/speech/AudioPlaybackQueue.ts
|
|
39453
|
+
var AudioPlaybackQueue = class {
|
|
39454
|
+
buffer = /* @__PURE__ */ new Map();
|
|
39455
|
+
nextPlayIndex = 0;
|
|
39456
|
+
onReady;
|
|
39457
|
+
constructor(onReady) {
|
|
39458
|
+
this.onReady = onReady;
|
|
39459
|
+
}
|
|
39460
|
+
/**
|
|
39461
|
+
* Enqueue an audio chunk event. If it's the next expected chunk,
|
|
39462
|
+
* it (and any subsequent buffered chunks) are immediately delivered
|
|
39463
|
+
* to the callback in order.
|
|
39464
|
+
*/
|
|
39465
|
+
enqueue(event) {
|
|
39466
|
+
this.buffer.set(event.chunk_index, event);
|
|
39467
|
+
this.drain();
|
|
39468
|
+
}
|
|
39469
|
+
/**
|
|
39470
|
+
* Reset the queue (e.g., on interruption or new stream).
|
|
39471
|
+
*/
|
|
39472
|
+
reset() {
|
|
39473
|
+
this.buffer.clear();
|
|
39474
|
+
this.nextPlayIndex = 0;
|
|
39475
|
+
}
|
|
39476
|
+
/**
|
|
39477
|
+
* Number of chunks currently buffered waiting for earlier chunks.
|
|
39478
|
+
*/
|
|
39479
|
+
get pendingCount() {
|
|
39480
|
+
return this.buffer.size;
|
|
39481
|
+
}
|
|
39482
|
+
/**
|
|
39483
|
+
* The next chunk index expected for playback.
|
|
39484
|
+
*/
|
|
39485
|
+
get nextExpectedIndex() {
|
|
39486
|
+
return this.nextPlayIndex;
|
|
39487
|
+
}
|
|
39488
|
+
// ======================== Private ========================
|
|
39489
|
+
drain() {
|
|
39490
|
+
while (this.buffer.has(this.nextPlayIndex)) {
|
|
39491
|
+
const event = this.buffer.get(this.nextPlayIndex);
|
|
39492
|
+
this.buffer.delete(this.nextPlayIndex);
|
|
39493
|
+
this.nextPlayIndex++;
|
|
39494
|
+
this.onReady(event);
|
|
39495
|
+
}
|
|
39496
|
+
}
|
|
39497
|
+
};
|
|
39498
|
+
|
|
37676
39499
|
// src/capabilities/search/SearchProvider.ts
|
|
37677
39500
|
init_Connector();
|
|
37678
39501
|
|
|
@@ -42352,6 +44175,14 @@ var SERVICE_DEFINITIONS = [
|
|
|
42352
44175
|
baseURL: "https://aws.amazon.com",
|
|
42353
44176
|
docsURL: "https://docs.aws.amazon.com/"
|
|
42354
44177
|
},
|
|
44178
|
+
{
|
|
44179
|
+
id: "cloudflare",
|
|
44180
|
+
name: "Cloudflare",
|
|
44181
|
+
category: "cloud",
|
|
44182
|
+
urlPattern: /api\.cloudflare\.com/i,
|
|
44183
|
+
baseURL: "https://api.cloudflare.com/client/v4",
|
|
44184
|
+
docsURL: "https://developers.cloudflare.com/api/"
|
|
44185
|
+
},
|
|
42355
44186
|
// ============ Storage ============
|
|
42356
44187
|
{
|
|
42357
44188
|
id: "dropbox",
|
|
@@ -42395,6 +44226,14 @@ var SERVICE_DEFINITIONS = [
|
|
|
42395
44226
|
baseURL: "https://api.postmarkapp.com",
|
|
42396
44227
|
docsURL: "https://postmarkapp.com/developer"
|
|
42397
44228
|
},
|
|
44229
|
+
{
|
|
44230
|
+
id: "mailgun",
|
|
44231
|
+
name: "Mailgun",
|
|
44232
|
+
category: "email",
|
|
44233
|
+
urlPattern: /api\.mailgun\.net|api\.eu\.mailgun\.net/i,
|
|
44234
|
+
baseURL: "https://api.mailgun.net/v3",
|
|
44235
|
+
docsURL: "https://documentation.mailgun.com/docs/mailgun/api-reference/"
|
|
44236
|
+
},
|
|
42398
44237
|
// ============ Monitoring & Observability ============
|
|
42399
44238
|
{
|
|
42400
44239
|
id: "datadog",
|
|
@@ -45134,6 +46973,43 @@ var awsTemplate = {
|
|
|
45134
46973
|
]
|
|
45135
46974
|
};
|
|
45136
46975
|
|
|
46976
|
+
// src/connectors/vendors/templates/cloudflare.ts
|
|
46977
|
+
var cloudflareTemplate = {
|
|
46978
|
+
id: "cloudflare",
|
|
46979
|
+
name: "Cloudflare",
|
|
46980
|
+
serviceType: "cloudflare",
|
|
46981
|
+
baseURL: "https://api.cloudflare.com/client/v4",
|
|
46982
|
+
docsURL: "https://developers.cloudflare.com/api/",
|
|
46983
|
+
credentialsSetupURL: "https://dash.cloudflare.com/profile/api-tokens",
|
|
46984
|
+
category: "cloud",
|
|
46985
|
+
notes: "API Tokens (recommended) are scoped and more secure. Global API Key requires email and has full account access.",
|
|
46986
|
+
authTemplates: [
|
|
46987
|
+
{
|
|
46988
|
+
id: "api-token",
|
|
46989
|
+
name: "API Token",
|
|
46990
|
+
type: "api_key",
|
|
46991
|
+
description: "Scoped API token (recommended). Create at dash.cloudflare.com > My Profile > API Tokens",
|
|
46992
|
+
requiredFields: ["apiKey"],
|
|
46993
|
+
defaults: {
|
|
46994
|
+
type: "api_key",
|
|
46995
|
+
headerName: "Authorization",
|
|
46996
|
+
headerPrefix: "Bearer"
|
|
46997
|
+
}
|
|
46998
|
+
},
|
|
46999
|
+
{
|
|
47000
|
+
id: "global-api-key",
|
|
47001
|
+
name: "Global API Key",
|
|
47002
|
+
type: "api_key",
|
|
47003
|
+
description: "Legacy global API key + email. Has full account access. Prefer API Tokens for least-privilege access",
|
|
47004
|
+
requiredFields: ["apiKey", "username"],
|
|
47005
|
+
defaults: {
|
|
47006
|
+
type: "api_key",
|
|
47007
|
+
headerName: "X-Auth-Key"
|
|
47008
|
+
}
|
|
47009
|
+
}
|
|
47010
|
+
]
|
|
47011
|
+
};
|
|
47012
|
+
|
|
45137
47013
|
// src/connectors/vendors/templates/dropbox.ts
|
|
45138
47014
|
var dropboxTemplate = {
|
|
45139
47015
|
id: "dropbox",
|
|
@@ -45322,6 +47198,30 @@ var postmarkTemplate = {
|
|
|
45322
47198
|
}
|
|
45323
47199
|
]
|
|
45324
47200
|
};
|
|
47201
|
+
var mailgunTemplate = {
|
|
47202
|
+
id: "mailgun",
|
|
47203
|
+
name: "Mailgun",
|
|
47204
|
+
serviceType: "mailgun",
|
|
47205
|
+
baseURL: "https://api.mailgun.net/v3",
|
|
47206
|
+
docsURL: "https://documentation.mailgun.com/docs/mailgun/api-reference/",
|
|
47207
|
+
credentialsSetupURL: "https://app.mailgun.com/settings/api_security",
|
|
47208
|
+
category: "email",
|
|
47209
|
+
notes: "EU region uses api.eu.mailgun.net. Most endpoints require /v3/<domain> in the path.",
|
|
47210
|
+
authTemplates: [
|
|
47211
|
+
{
|
|
47212
|
+
id: "api-key",
|
|
47213
|
+
name: "API Key",
|
|
47214
|
+
type: "api_key",
|
|
47215
|
+
description: "Private API key for full account access. Find at Settings > API Security",
|
|
47216
|
+
requiredFields: ["apiKey"],
|
|
47217
|
+
defaults: {
|
|
47218
|
+
type: "api_key",
|
|
47219
|
+
headerName: "Authorization",
|
|
47220
|
+
headerPrefix: "Basic"
|
|
47221
|
+
}
|
|
47222
|
+
}
|
|
47223
|
+
]
|
|
47224
|
+
};
|
|
45325
47225
|
|
|
45326
47226
|
// src/connectors/vendors/templates/monitoring.ts
|
|
45327
47227
|
var datadogTemplate = {
|
|
@@ -45770,6 +47670,7 @@ var allVendorTemplates = [
|
|
|
45770
47670
|
rampTemplate,
|
|
45771
47671
|
// Cloud
|
|
45772
47672
|
awsTemplate,
|
|
47673
|
+
cloudflareTemplate,
|
|
45773
47674
|
// Storage
|
|
45774
47675
|
dropboxTemplate,
|
|
45775
47676
|
boxTemplate,
|
|
@@ -45777,6 +47678,7 @@ var allVendorTemplates = [
|
|
|
45777
47678
|
sendgridTemplate,
|
|
45778
47679
|
mailchimpTemplate,
|
|
45779
47680
|
postmarkTemplate,
|
|
47681
|
+
mailgunTemplate,
|
|
45780
47682
|
// Monitoring
|
|
45781
47683
|
datadogTemplate,
|
|
45782
47684
|
pagerdutyTemplate,
|
|
@@ -49134,7 +51036,8 @@ SANDBOX API:
|
|
|
49134
51036
|
4. connectors.get(name) \u2014 Connector info: { displayName, description, baseURL, serviceType }
|
|
49135
51037
|
|
|
49136
51038
|
VARIABLES:
|
|
49137
|
-
\u2022 input \u2014 data passed via the "input" parameter (default: {})
|
|
51039
|
+
\u2022 input \u2014 data passed via the "input" parameter (default: {}). Always a parsed object/array, never a string.
|
|
51040
|
+
CRITICAL: You MUST pass actual data values directly. Template placeholders ({{results}}, {{param.name}}, etc.) are NOT supported and will be passed as literal strings. If you need data from a previous tool call, include the actual returned data in the input object.
|
|
49138
51041
|
\u2022 output \u2014 SET THIS to return your result to the caller
|
|
49139
51042
|
|
|
49140
51043
|
GLOBALS: console.log/error/warn, JSON, Math, Date, Buffer, Promise, Array, Object, String, Number, Boolean, setTimeout, setInterval, URL, URLSearchParams, RegExp, Map, Set, Error, TextEncoder, TextDecoder
|
|
@@ -49157,7 +51060,8 @@ const resp = await authenticatedFetch('/chat.postMessage', {
|
|
|
49157
51060
|
}, 'slack');
|
|
49158
51061
|
output = await resp.json();
|
|
49159
51062
|
${accountIdExamples}
|
|
49160
|
-
// Data processing
|
|
51063
|
+
// Data processing \u2014 pass actual data via the input parameter, NOT template references
|
|
51064
|
+
// e.g. call with: { "code": "...", "input": { "data": [{"score": 0.9}, {"score": 0.5}] } }
|
|
49161
51065
|
const items = input.data;
|
|
49162
51066
|
output = items.filter(i => i.score > 0.8).sort((a, b) => b.score - a.score);
|
|
49163
51067
|
|
|
@@ -49181,7 +51085,7 @@ function createExecuteJavaScriptTool(options) {
|
|
|
49181
51085
|
description: 'JavaScript code to execute. Set the "output" variable with your result. Code is auto-wrapped in async IIFE \u2014 you can use await directly. For explicit async control, wrap in (async () => { ... })().'
|
|
49182
51086
|
},
|
|
49183
51087
|
input: {
|
|
49184
|
-
description: 'Optional data available as the "input" variable in your code.
|
|
51088
|
+
description: 'Optional data available as the "input" variable in your code. IMPORTANT: Pass actual data directly as a JSON object/array. Template placeholders like {{results}} or {{param.name}} are NOT supported here and will be passed as literal strings. You must include the actual data values inline. Correct: "input": {"deals": [{"id":"1"}, ...]}. Wrong: "input": {"deals": "{{results}}"}.'
|
|
49185
51089
|
},
|
|
49186
51090
|
timeout: {
|
|
49187
51091
|
type: "number",
|
|
@@ -49204,9 +51108,19 @@ function createExecuteJavaScriptTool(options) {
|
|
|
49204
51108
|
try {
|
|
49205
51109
|
const timeout = Math.min(Math.max(args.timeout || defaultTimeout, 0), maxTimeout);
|
|
49206
51110
|
const registry = context?.connectorRegistry ?? exports.Connector.asRegistry();
|
|
51111
|
+
let resolvedInput = args.input;
|
|
51112
|
+
if (typeof resolvedInput === "string") {
|
|
51113
|
+
const trimmed = resolvedInput.trim();
|
|
51114
|
+
if (trimmed.startsWith("{") && trimmed.endsWith("}") || trimmed.startsWith("[") && trimmed.endsWith("]")) {
|
|
51115
|
+
try {
|
|
51116
|
+
resolvedInput = JSON.parse(trimmed);
|
|
51117
|
+
} catch {
|
|
51118
|
+
}
|
|
51119
|
+
}
|
|
51120
|
+
}
|
|
49207
51121
|
const result = await executeInVM(
|
|
49208
51122
|
args.code,
|
|
49209
|
-
|
|
51123
|
+
resolvedInput,
|
|
49210
51124
|
timeout,
|
|
49211
51125
|
logs,
|
|
49212
51126
|
context?.userId,
|
|
@@ -54622,6 +56536,7 @@ exports.APPROVAL_STATE_VERSION = APPROVAL_STATE_VERSION;
|
|
|
54622
56536
|
exports.Agent = Agent;
|
|
54623
56537
|
exports.AgentContextNextGen = AgentContextNextGen;
|
|
54624
56538
|
exports.ApproximateTokenEstimator = ApproximateTokenEstimator;
|
|
56539
|
+
exports.AudioPlaybackQueue = AudioPlaybackQueue;
|
|
54625
56540
|
exports.BaseMediaProvider = BaseMediaProvider;
|
|
54626
56541
|
exports.BasePluginNextGen = BasePluginNextGen;
|
|
54627
56542
|
exports.BaseProvider = BaseProvider;
|
|
@@ -54716,6 +56631,7 @@ exports.STT_MODELS = STT_MODELS;
|
|
|
54716
56631
|
exports.STT_MODEL_REGISTRY = STT_MODEL_REGISTRY;
|
|
54717
56632
|
exports.ScrapeProvider = ScrapeProvider;
|
|
54718
56633
|
exports.SearchProvider = SearchProvider;
|
|
56634
|
+
exports.SentenceChunkingStrategy = SentenceChunkingStrategy;
|
|
54719
56635
|
exports.SerperProvider = SerperProvider;
|
|
54720
56636
|
exports.Services = Services;
|
|
54721
56637
|
exports.SimpleScheduler = SimpleScheduler;
|
|
@@ -54751,6 +56667,7 @@ exports.VIDEO_MODELS = VIDEO_MODELS;
|
|
|
54751
56667
|
exports.VIDEO_MODEL_REGISTRY = VIDEO_MODEL_REGISTRY;
|
|
54752
56668
|
exports.Vendor = Vendor;
|
|
54753
56669
|
exports.VideoGeneration = VideoGeneration;
|
|
56670
|
+
exports.VoiceStream = VoiceStream;
|
|
54754
56671
|
exports.WorkingMemory = WorkingMemory;
|
|
54755
56672
|
exports.WorkingMemoryPluginNextGen = WorkingMemoryPluginNextGen;
|
|
54756
56673
|
exports.addJitter = addJitter;
|
|
@@ -54939,6 +56856,9 @@ exports.grep = grep;
|
|
|
54939
56856
|
exports.hasClipboardImage = hasClipboardImage;
|
|
54940
56857
|
exports.hasVendorLogo = hasVendorLogo;
|
|
54941
56858
|
exports.hydrateCustomTool = hydrateCustomTool;
|
|
56859
|
+
exports.isAudioChunkError = isAudioChunkError;
|
|
56860
|
+
exports.isAudioChunkReady = isAudioChunkReady;
|
|
56861
|
+
exports.isAudioStreamComplete = isAudioStreamComplete;
|
|
54942
56862
|
exports.isBlockedCommand = isBlockedCommand;
|
|
54943
56863
|
exports.isErrorEvent = isErrorEvent;
|
|
54944
56864
|
exports.isExcludedExtension = isExcludedExtension;
|