@wix/auto_sdk_ai-gateway_prompts 1.0.10 → 1.0.12
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/build/cjs/index.d.ts +1 -1
- package/build/cjs/index.js +131 -0
- package/build/cjs/index.js.map +1 -1
- package/build/cjs/index.typings.d.ts +274 -3
- package/build/cjs/index.typings.js +123 -0
- package/build/cjs/index.typings.js.map +1 -1
- package/build/cjs/meta.d.ts +261 -4
- package/build/cjs/meta.js +110 -0
- package/build/cjs/meta.js.map +1 -1
- package/build/es/index.d.mts +1 -1
- package/build/es/index.mjs +127 -0
- package/build/es/index.mjs.map +1 -1
- package/build/es/index.typings.d.mts +274 -3
- package/build/es/index.typings.mjs +119 -0
- package/build/es/index.typings.mjs.map +1 -1
- package/build/es/meta.d.mts +261 -4
- package/build/es/meta.mjs +106 -0
- package/build/es/meta.mjs.map +1 -1
- package/build/internal/cjs/index.d.ts +13 -5
- package/build/internal/cjs/index.js +131 -0
- package/build/internal/cjs/index.js.map +1 -1
- package/build/internal/cjs/index.typings.d.ts +282 -5
- package/build/internal/cjs/index.typings.js +123 -0
- package/build/internal/cjs/index.typings.js.map +1 -1
- package/build/internal/cjs/meta.d.ts +261 -4
- package/build/internal/cjs/meta.js +110 -0
- package/build/internal/cjs/meta.js.map +1 -1
- package/build/internal/es/index.d.mts +13 -5
- package/build/internal/es/index.mjs +127 -0
- package/build/internal/es/index.mjs.map +1 -1
- package/build/internal/es/index.typings.d.mts +282 -5
- package/build/internal/es/index.typings.mjs +119 -0
- package/build/internal/es/index.typings.mjs.map +1 -1
- package/build/internal/es/meta.d.mts +261 -4
- package/build/internal/es/meta.mjs +106 -0
- package/build/internal/es/meta.mjs.map +1 -1
- package/package.json +2 -2
|
@@ -31,6 +31,7 @@ __export(index_typings_exports, {
|
|
|
31
31
|
CreatePredictionModel: () => CreatePredictionModel,
|
|
32
32
|
DynamicRetrievalConfigMode: () => DynamicRetrievalConfigMode,
|
|
33
33
|
EditImageWithPromptRequestModel: () => EditImageWithPromptRequestModel,
|
|
34
|
+
ElevenLabsTextToSpeechModel: () => ElevenLabsTextToSpeechModel,
|
|
34
35
|
FinishReason: () => FinishReason,
|
|
35
36
|
GatewayMessageDefinitionRole: () => GatewayMessageDefinitionRole,
|
|
36
37
|
GenerateAnImageModel: () => GenerateAnImageModel,
|
|
@@ -74,6 +75,7 @@ __export(index_typings_exports, {
|
|
|
74
75
|
ResponsesModel: () => ResponsesModel,
|
|
75
76
|
Role: () => Role,
|
|
76
77
|
Sampler: () => Sampler,
|
|
78
|
+
SpeechModel: () => SpeechModel,
|
|
77
79
|
StylePreset: () => StylePreset,
|
|
78
80
|
TextBisonModel: () => TextBisonModel,
|
|
79
81
|
TextToImageRequestModel: () => TextToImageRequestModel,
|
|
@@ -91,9 +93,11 @@ __export(index_typings_exports, {
|
|
|
91
93
|
V1ResponseTypeType: () => V1ResponseTypeType,
|
|
92
94
|
V1ResponsesModel: () => V1ResponsesModel,
|
|
93
95
|
V1ToolChoiceType: () => V1ToolChoiceType,
|
|
96
|
+
V1VideoModel: () => V1VideoModel,
|
|
94
97
|
VideoGenModel: () => VideoGenModel,
|
|
95
98
|
VideoModel: () => VideoModel,
|
|
96
99
|
WebhookIdentityType: () => WebhookIdentityType,
|
|
100
|
+
generateAudioStreamed: () => generateAudioStreamed2,
|
|
97
101
|
generateContentByPromptObject: () => generateContentByPromptObject2,
|
|
98
102
|
generateContentByPromptObjectAsync: () => generateContentByPromptObjectAsync2,
|
|
99
103
|
generateTextByPromptObjectStreamed: () => generateTextByPromptObjectStreamed2
|
|
@@ -849,6 +853,70 @@ function generateContentByPromptObjectAsync(payload) {
|
|
|
849
853
|
}
|
|
850
854
|
return __generateContentByPromptObjectAsync;
|
|
851
855
|
}
|
|
856
|
+
function generateAudioStreamed(payload) {
|
|
857
|
+
function __generateAudioStreamed({ host }) {
|
|
858
|
+
const serializedData = (0, import_transform_paths.transformPaths)(payload, [
|
|
859
|
+
{
|
|
860
|
+
transformFn: import_float.transformSDKFloatToRESTFloat,
|
|
861
|
+
paths: [
|
|
862
|
+
{ path: "openAiCreateSpeechRequest.speed" },
|
|
863
|
+
{ path: "elevenlabsTextToSpeechRequest.voiceSettings.style" },
|
|
864
|
+
{ path: "elevenlabsTextToSpeechRequest.voiceSettings.stability" },
|
|
865
|
+
{
|
|
866
|
+
path: "elevenlabsTextToSpeechRequest.voiceSettings.similarityBoost"
|
|
867
|
+
}
|
|
868
|
+
]
|
|
869
|
+
}
|
|
870
|
+
]);
|
|
871
|
+
const metadata = {
|
|
872
|
+
entityFqdn: "wix.api_infra.v1.prompt_proxy",
|
|
873
|
+
method: "POST",
|
|
874
|
+
methodFqn: "wix.api_infra.v1.WixAiExternalGateway.GenerateAudioStreamed",
|
|
875
|
+
packageName: PACKAGE_NAME,
|
|
876
|
+
migrationOptions: {
|
|
877
|
+
optInTransformResponse: true
|
|
878
|
+
},
|
|
879
|
+
url: resolveWixApiInfraV1WixAiExternalGatewayUrl({
|
|
880
|
+
protoPath: "/v1/generate-audio-streamed",
|
|
881
|
+
data: serializedData,
|
|
882
|
+
host
|
|
883
|
+
}),
|
|
884
|
+
data: serializedData,
|
|
885
|
+
transformResponse: (payload2) => (0, import_transform_paths.transformPaths)(payload2, [
|
|
886
|
+
{
|
|
887
|
+
transformFn: import_bytes.transformRESTBytesToSDKBytes,
|
|
888
|
+
paths: [
|
|
889
|
+
{ path: "openAiSpeechChunk.content" },
|
|
890
|
+
{ path: "elevenlabsSpeechChunk.audioBase64" }
|
|
891
|
+
]
|
|
892
|
+
},
|
|
893
|
+
{
|
|
894
|
+
transformFn: import_float2.transformRESTFloatToSDKFloat,
|
|
895
|
+
paths: [
|
|
896
|
+
{
|
|
897
|
+
path: "elevenlabsSpeechChunk.alignment.characterStartTimesSeconds",
|
|
898
|
+
isRepeated: true
|
|
899
|
+
},
|
|
900
|
+
{
|
|
901
|
+
path: "elevenlabsSpeechChunk.alignment.characterEndTimesSeconds",
|
|
902
|
+
isRepeated: true
|
|
903
|
+
},
|
|
904
|
+
{
|
|
905
|
+
path: "elevenlabsSpeechChunk.normalizedAlignment.characterStartTimesSeconds",
|
|
906
|
+
isRepeated: true
|
|
907
|
+
},
|
|
908
|
+
{
|
|
909
|
+
path: "elevenlabsSpeechChunk.normalizedAlignment.characterEndTimesSeconds",
|
|
910
|
+
isRepeated: true
|
|
911
|
+
}
|
|
912
|
+
]
|
|
913
|
+
}
|
|
914
|
+
])
|
|
915
|
+
};
|
|
916
|
+
return metadata;
|
|
917
|
+
}
|
|
918
|
+
return __generateAudioStreamed;
|
|
919
|
+
}
|
|
852
920
|
|
|
853
921
|
// src/api-infra-v1-prompt-proxy-prompts.universal.ts
|
|
854
922
|
var OpenaiproxyV1ChatCompletionMessageMessageRole = /* @__PURE__ */ ((OpenaiproxyV1ChatCompletionMessageMessageRole2) => {
|
|
@@ -1476,6 +1544,7 @@ var VideoModel = /* @__PURE__ */ ((VideoModel2) => {
|
|
|
1476
1544
|
VideoModel2["UNKNOWN_VIDEO_MODEL"] = "UNKNOWN_VIDEO_MODEL";
|
|
1477
1545
|
VideoModel2["SEEDANCE_1_0_PRO"] = "SEEDANCE_1_0_PRO";
|
|
1478
1546
|
VideoModel2["SEEDANCE_1_0_LITE"] = "SEEDANCE_1_0_LITE";
|
|
1547
|
+
VideoModel2["SEEDANCE_1_0_PRO_FAST"] = "SEEDANCE_1_0_PRO_FAST";
|
|
1479
1548
|
return VideoModel2;
|
|
1480
1549
|
})(VideoModel || {});
|
|
1481
1550
|
var ResponsesInputMessageResponsesMessageRole = /* @__PURE__ */ ((ResponsesInputMessageResponsesMessageRole2) => {
|
|
@@ -1492,6 +1561,12 @@ var ResponsesMessageRole = /* @__PURE__ */ ((ResponsesMessageRole2) => {
|
|
|
1492
1561
|
ResponsesMessageRole2["DEVELOPER"] = "DEVELOPER";
|
|
1493
1562
|
return ResponsesMessageRole2;
|
|
1494
1563
|
})(ResponsesMessageRole || {});
|
|
1564
|
+
var V1VideoModel = /* @__PURE__ */ ((V1VideoModel2) => {
|
|
1565
|
+
V1VideoModel2["UNKNOWN_VIDEO_MODEL"] = "UNKNOWN_VIDEO_MODEL";
|
|
1566
|
+
V1VideoModel2["SORA_2"] = "SORA_2";
|
|
1567
|
+
V1VideoModel2["SORA_2_PRO"] = "SORA_2_PRO";
|
|
1568
|
+
return V1VideoModel2;
|
|
1569
|
+
})(V1VideoModel || {});
|
|
1495
1570
|
var GatewayMessageDefinitionRole = /* @__PURE__ */ ((GatewayMessageDefinitionRole2) => {
|
|
1496
1571
|
GatewayMessageDefinitionRole2["UNKNOWN"] = "UNKNOWN";
|
|
1497
1572
|
GatewayMessageDefinitionRole2["USER"] = "USER";
|
|
@@ -1501,6 +1576,19 @@ var GatewayMessageDefinitionRole = /* @__PURE__ */ ((GatewayMessageDefinitionRol
|
|
|
1501
1576
|
GatewayMessageDefinitionRole2["DEVELOPER"] = "DEVELOPER";
|
|
1502
1577
|
return GatewayMessageDefinitionRole2;
|
|
1503
1578
|
})(GatewayMessageDefinitionRole || {});
|
|
1579
|
+
var SpeechModel = /* @__PURE__ */ ((SpeechModel2) => {
|
|
1580
|
+
SpeechModel2["UNKNOWN_SPEECH_MODEL"] = "UNKNOWN_SPEECH_MODEL";
|
|
1581
|
+
SpeechModel2["TTS_1"] = "TTS_1";
|
|
1582
|
+
SpeechModel2["TTS_1_HD"] = "TTS_1_HD";
|
|
1583
|
+
return SpeechModel2;
|
|
1584
|
+
})(SpeechModel || {});
|
|
1585
|
+
var ElevenLabsTextToSpeechModel = /* @__PURE__ */ ((ElevenLabsTextToSpeechModel2) => {
|
|
1586
|
+
ElevenLabsTextToSpeechModel2["UNKNOWN_ELEVEN_LABS_TEXT_TO_SPEECH_MODEL"] = "UNKNOWN_ELEVEN_LABS_TEXT_TO_SPEECH_MODEL";
|
|
1587
|
+
ElevenLabsTextToSpeechModel2["ELEVEN_MULTILINGUAL_V2"] = "ELEVEN_MULTILINGUAL_V2";
|
|
1588
|
+
ElevenLabsTextToSpeechModel2["ELEVEN_FLASH_V2_5"] = "ELEVEN_FLASH_V2_5";
|
|
1589
|
+
ElevenLabsTextToSpeechModel2["ELEVEN_FLASH_V2"] = "ELEVEN_FLASH_V2";
|
|
1590
|
+
return ElevenLabsTextToSpeechModel2;
|
|
1591
|
+
})(ElevenLabsTextToSpeechModel || {});
|
|
1504
1592
|
var WebhookIdentityType = /* @__PURE__ */ ((WebhookIdentityType2) => {
|
|
1505
1593
|
WebhookIdentityType2["UNKNOWN"] = "UNKNOWN";
|
|
1506
1594
|
WebhookIdentityType2["ANONYMOUS_VISITOR"] = "ANONYMOUS_VISITOR";
|
|
@@ -1630,6 +1718,37 @@ async function generateContentByPromptObjectAsync2(options) {
|
|
|
1630
1718
|
throw transformedError;
|
|
1631
1719
|
}
|
|
1632
1720
|
}
|
|
1721
|
+
async function generateAudioStreamed2(options) {
|
|
1722
|
+
const { httpClient, sideEffects } = arguments[1];
|
|
1723
|
+
const payload = (0, import_rename_all_nested_keys.renameKeysFromSDKRequestToRESTRequest)({
|
|
1724
|
+
openAiCreateSpeechRequest: options?.openAiCreateSpeechRequest,
|
|
1725
|
+
elevenlabsTextToSpeechRequest: options?.elevenlabsTextToSpeechRequest,
|
|
1726
|
+
userRequestInfo: options?.userRequestInfo
|
|
1727
|
+
});
|
|
1728
|
+
const reqOpts = generateAudioStreamed(payload);
|
|
1729
|
+
sideEffects?.onSiteCall?.();
|
|
1730
|
+
try {
|
|
1731
|
+
const result = await httpClient.request(reqOpts);
|
|
1732
|
+
sideEffects?.onSuccess?.(result);
|
|
1733
|
+
return (0, import_rename_all_nested_keys.renameKeysFromRESTResponseToSDKResponse)(result.data);
|
|
1734
|
+
} catch (err) {
|
|
1735
|
+
const transformedError = (0, import_transform_error.transformError)(
|
|
1736
|
+
err,
|
|
1737
|
+
{
|
|
1738
|
+
spreadPathsToArguments: {},
|
|
1739
|
+
explicitPathsToArguments: {
|
|
1740
|
+
openAiCreateSpeechRequest: "$[0].openAiCreateSpeechRequest",
|
|
1741
|
+
elevenlabsTextToSpeechRequest: "$[0].elevenlabsTextToSpeechRequest",
|
|
1742
|
+
userRequestInfo: "$[0].userRequestInfo"
|
|
1743
|
+
},
|
|
1744
|
+
singleArgumentUnchanged: false
|
|
1745
|
+
},
|
|
1746
|
+
["options"]
|
|
1747
|
+
);
|
|
1748
|
+
sideEffects?.onError?.(err);
|
|
1749
|
+
throw transformedError;
|
|
1750
|
+
}
|
|
1751
|
+
}
|
|
1633
1752
|
// Annotate the CommonJS export names for ESM import in node:
|
|
1634
1753
|
0 && (module.exports = {
|
|
1635
1754
|
AnthropicModel,
|
|
@@ -1643,6 +1762,7 @@ async function generateContentByPromptObjectAsync2(options) {
|
|
|
1643
1762
|
CreatePredictionModel,
|
|
1644
1763
|
DynamicRetrievalConfigMode,
|
|
1645
1764
|
EditImageWithPromptRequestModel,
|
|
1765
|
+
ElevenLabsTextToSpeechModel,
|
|
1646
1766
|
FinishReason,
|
|
1647
1767
|
GatewayMessageDefinitionRole,
|
|
1648
1768
|
GenerateAnImageModel,
|
|
@@ -1686,6 +1806,7 @@ async function generateContentByPromptObjectAsync2(options) {
|
|
|
1686
1806
|
ResponsesModel,
|
|
1687
1807
|
Role,
|
|
1688
1808
|
Sampler,
|
|
1809
|
+
SpeechModel,
|
|
1689
1810
|
StylePreset,
|
|
1690
1811
|
TextBisonModel,
|
|
1691
1812
|
TextToImageRequestModel,
|
|
@@ -1703,9 +1824,11 @@ async function generateContentByPromptObjectAsync2(options) {
|
|
|
1703
1824
|
V1ResponseTypeType,
|
|
1704
1825
|
V1ResponsesModel,
|
|
1705
1826
|
V1ToolChoiceType,
|
|
1827
|
+
V1VideoModel,
|
|
1706
1828
|
VideoGenModel,
|
|
1707
1829
|
VideoModel,
|
|
1708
1830
|
WebhookIdentityType,
|
|
1831
|
+
generateAudioStreamed,
|
|
1709
1832
|
generateContentByPromptObject,
|
|
1710
1833
|
generateContentByPromptObjectAsync,
|
|
1711
1834
|
generateTextByPromptObjectStreamed
|