@threaded/ai 1.0.8 → 1.0.9
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/index.cjs +176 -12
- package/dist/index.cjs.map +1 -1
- package/dist/index.js +176 -12
- package/dist/index.js.map +1 -1
- package/package.json +1 -1
package/dist/index.js
CHANGED
|
@@ -307,6 +307,16 @@ var generateImage = async (model2, prompt, config) => {
|
|
|
307
307
|
};
|
|
308
308
|
|
|
309
309
|
// src/providers/openai.ts
|
|
310
|
+
var getApiKey2 = (configApiKey) => {
|
|
311
|
+
if (configApiKey) return configApiKey;
|
|
312
|
+
try {
|
|
313
|
+
return getKey("openai");
|
|
314
|
+
} catch {
|
|
315
|
+
const key = process.env.OPENAI_API_KEY || "";
|
|
316
|
+
if (!key) throw new Error("OpenAI API key not found");
|
|
317
|
+
return key;
|
|
318
|
+
}
|
|
319
|
+
};
|
|
310
320
|
var appendToolCalls = (toolCalls, tcchunklist) => {
|
|
311
321
|
for (const tcchunk of tcchunklist) {
|
|
312
322
|
while (toolCalls.length <= tcchunk.index) {
|
|
@@ -325,10 +335,7 @@ var appendToolCalls = (toolCalls, tcchunklist) => {
|
|
|
325
335
|
};
|
|
326
336
|
var callOpenAI = async (config, ctx) => {
|
|
327
337
|
const { model: model2, instructions, schema, apiKey: configApiKey } = config;
|
|
328
|
-
const apiKey = configApiKey
|
|
329
|
-
if (!apiKey) {
|
|
330
|
-
throw new Error("OpenAI API key not found");
|
|
331
|
-
}
|
|
338
|
+
const apiKey = getApiKey2(configApiKey);
|
|
332
339
|
const messages = [];
|
|
333
340
|
if (instructions) {
|
|
334
341
|
messages.push({ role: "system", content: instructions });
|
|
@@ -441,6 +448,16 @@ var handleOpenAIStream = async (response, ctx) => {
|
|
|
441
448
|
};
|
|
442
449
|
|
|
443
450
|
// src/providers/anthropic.ts
|
|
451
|
+
var getApiKey3 = (configApiKey) => {
|
|
452
|
+
if (configApiKey) return configApiKey;
|
|
453
|
+
try {
|
|
454
|
+
return getKey("anthropic");
|
|
455
|
+
} catch {
|
|
456
|
+
const key = process.env.ANTHROPIC_API_KEY || "";
|
|
457
|
+
if (!key) throw new Error("Anthropic API key not found");
|
|
458
|
+
return key;
|
|
459
|
+
}
|
|
460
|
+
};
|
|
444
461
|
var convertToAnthropicFormat = (messages) => {
|
|
445
462
|
const result = [];
|
|
446
463
|
let i = 0;
|
|
@@ -492,10 +509,7 @@ var convertToAnthropicFormat = (messages) => {
|
|
|
492
509
|
};
|
|
493
510
|
var callAnthropic = async (config, ctx) => {
|
|
494
511
|
const { model: model2, instructions, schema, apiKey: configApiKey } = config;
|
|
495
|
-
const apiKey = configApiKey
|
|
496
|
-
if (!apiKey) {
|
|
497
|
-
throw new Error("Anthropic API key not found");
|
|
498
|
-
}
|
|
512
|
+
const apiKey = getApiKey3(configApiKey);
|
|
499
513
|
let system = instructions;
|
|
500
514
|
if (ctx.history[0]?.role === "system") {
|
|
501
515
|
system = ctx.history[0].content;
|
|
@@ -640,12 +654,19 @@ var handleAnthropicStream = async (response, ctx) => {
|
|
|
640
654
|
};
|
|
641
655
|
|
|
642
656
|
// src/providers/google.ts
|
|
657
|
+
var getApiKey4 = (configApiKey) => {
|
|
658
|
+
if (configApiKey) return configApiKey;
|
|
659
|
+
try {
|
|
660
|
+
return getKey("google");
|
|
661
|
+
} catch {
|
|
662
|
+
const key = process.env.GEMINI_API_KEY || process.env.GOOGLE_AI_API_KEY || "";
|
|
663
|
+
if (!key) throw new Error("Google API key not found");
|
|
664
|
+
return key;
|
|
665
|
+
}
|
|
666
|
+
};
|
|
643
667
|
var callGoogle = async (config, ctx) => {
|
|
644
668
|
const { model: model2, instructions, apiKey: configApiKey } = config;
|
|
645
|
-
const apiKey = configApiKey
|
|
646
|
-
if (!apiKey) {
|
|
647
|
-
throw new Error("Google API key not found");
|
|
648
|
-
}
|
|
669
|
+
const apiKey = getApiKey4(configApiKey);
|
|
649
670
|
const contents = [];
|
|
650
671
|
if (instructions) {
|
|
651
672
|
contents.push({
|
|
@@ -791,6 +812,147 @@ var callHuggingFace = async (config, ctx) => {
|
|
|
791
812
|
);
|
|
792
813
|
};
|
|
793
814
|
|
|
815
|
+
// src/providers/xai.ts
|
|
816
|
+
var appendToolCalls2 = (toolCalls, tcchunklist) => {
|
|
817
|
+
for (const tcchunk of tcchunklist) {
|
|
818
|
+
while (toolCalls.length <= tcchunk.index) {
|
|
819
|
+
toolCalls.push({
|
|
820
|
+
id: "",
|
|
821
|
+
type: "function",
|
|
822
|
+
function: { name: "", arguments: "" }
|
|
823
|
+
});
|
|
824
|
+
}
|
|
825
|
+
const tc = toolCalls[tcchunk.index];
|
|
826
|
+
tc.id += tcchunk.id || "";
|
|
827
|
+
tc.function.name += tcchunk.function?.name || "";
|
|
828
|
+
tc.function.arguments += tcchunk.function?.arguments || "";
|
|
829
|
+
}
|
|
830
|
+
return toolCalls;
|
|
831
|
+
};
|
|
832
|
+
var getApiKey5 = (configApiKey) => {
|
|
833
|
+
if (configApiKey) return configApiKey;
|
|
834
|
+
try {
|
|
835
|
+
return getKey("xai");
|
|
836
|
+
} catch {
|
|
837
|
+
const key = process.env.XAI_API_KEY || "";
|
|
838
|
+
if (!key) throw new Error("xAI API key not found");
|
|
839
|
+
return key;
|
|
840
|
+
}
|
|
841
|
+
};
|
|
842
|
+
var callXAI = async (config, ctx) => {
|
|
843
|
+
const { model: model2, instructions, schema, apiKey: configApiKey } = config;
|
|
844
|
+
const apiKey = getApiKey5(configApiKey);
|
|
845
|
+
const messages = [];
|
|
846
|
+
if (instructions) {
|
|
847
|
+
messages.push({ role: "system", content: instructions });
|
|
848
|
+
}
|
|
849
|
+
messages.push(...ctx.history);
|
|
850
|
+
const body = {
|
|
851
|
+
model: model2,
|
|
852
|
+
messages,
|
|
853
|
+
stream: !!ctx.stream
|
|
854
|
+
};
|
|
855
|
+
if (schema) {
|
|
856
|
+
body.response_format = {
|
|
857
|
+
type: "json_schema",
|
|
858
|
+
json_schema: {
|
|
859
|
+
name: schema.name,
|
|
860
|
+
schema: { ...schema.schema, additionalProperties: false },
|
|
861
|
+
strict: true
|
|
862
|
+
}
|
|
863
|
+
};
|
|
864
|
+
}
|
|
865
|
+
if (ctx.tools && ctx.tools.length > 0) {
|
|
866
|
+
body.tools = ctx.tools;
|
|
867
|
+
body.tool_choice = "auto";
|
|
868
|
+
}
|
|
869
|
+
const response = await fetch("https://api.x.ai/v1/chat/completions", {
|
|
870
|
+
method: "POST",
|
|
871
|
+
headers: {
|
|
872
|
+
"Content-Type": "application/json",
|
|
873
|
+
Authorization: `Bearer ${apiKey}`
|
|
874
|
+
},
|
|
875
|
+
body: JSON.stringify(body),
|
|
876
|
+
signal: ctx.abortSignal
|
|
877
|
+
});
|
|
878
|
+
if (!response.ok) {
|
|
879
|
+
const error = await response.text();
|
|
880
|
+
throw new Error(`xAI API error: ${error}`);
|
|
881
|
+
}
|
|
882
|
+
if (ctx.stream) {
|
|
883
|
+
return handleXAIStream(response, ctx);
|
|
884
|
+
}
|
|
885
|
+
const data = await response.json();
|
|
886
|
+
const choice = data.choices[0];
|
|
887
|
+
const { message } = choice;
|
|
888
|
+
const msg = {
|
|
889
|
+
role: "assistant",
|
|
890
|
+
content: message.content || ""
|
|
891
|
+
};
|
|
892
|
+
if (message.tool_calls) {
|
|
893
|
+
msg.tool_calls = message.tool_calls;
|
|
894
|
+
}
|
|
895
|
+
return {
|
|
896
|
+
...ctx,
|
|
897
|
+
lastResponse: msg,
|
|
898
|
+
history: [...ctx.history, msg]
|
|
899
|
+
};
|
|
900
|
+
};
|
|
901
|
+
var handleXAIStream = async (response, ctx) => {
|
|
902
|
+
const reader = response.body.getReader();
|
|
903
|
+
const decoder = new TextDecoder();
|
|
904
|
+
let fullContent = "";
|
|
905
|
+
let toolCalls = [];
|
|
906
|
+
let buffer = "";
|
|
907
|
+
try {
|
|
908
|
+
while (true) {
|
|
909
|
+
if (ctx.abortSignal?.aborted) {
|
|
910
|
+
break;
|
|
911
|
+
}
|
|
912
|
+
const { done, value } = await reader.read();
|
|
913
|
+
if (done) break;
|
|
914
|
+
buffer += decoder.decode(value, { stream: true });
|
|
915
|
+
const lines = buffer.split("\n");
|
|
916
|
+
buffer = lines.pop() || "";
|
|
917
|
+
for (const line of lines) {
|
|
918
|
+
if (line.startsWith("data: ")) {
|
|
919
|
+
const data = line.slice(6).trim();
|
|
920
|
+
if (data === "[DONE]") continue;
|
|
921
|
+
if (!data) continue;
|
|
922
|
+
try {
|
|
923
|
+
const parsed = JSON.parse(data);
|
|
924
|
+
const delta = parsed.choices?.[0]?.delta;
|
|
925
|
+
if (delta?.content) {
|
|
926
|
+
fullContent += delta.content;
|
|
927
|
+
if (ctx.stream) {
|
|
928
|
+
ctx.stream({ type: "content", content: delta.content });
|
|
929
|
+
}
|
|
930
|
+
}
|
|
931
|
+
if (delta?.tool_calls) {
|
|
932
|
+
toolCalls = appendToolCalls2(toolCalls, delta.tool_calls);
|
|
933
|
+
}
|
|
934
|
+
} catch (e) {
|
|
935
|
+
}
|
|
936
|
+
}
|
|
937
|
+
}
|
|
938
|
+
}
|
|
939
|
+
} finally {
|
|
940
|
+
reader.releaseLock();
|
|
941
|
+
}
|
|
942
|
+
const msg = {
|
|
943
|
+
role: "assistant",
|
|
944
|
+
content: fullContent
|
|
945
|
+
};
|
|
946
|
+
if (toolCalls.length > 0) {
|
|
947
|
+
msg.tool_calls = toolCalls;
|
|
948
|
+
}
|
|
949
|
+
return {
|
|
950
|
+
...ctx,
|
|
951
|
+
lastResponse: msg,
|
|
952
|
+
history: [...ctx.history, msg]
|
|
953
|
+
};
|
|
954
|
+
};
|
|
955
|
+
|
|
794
956
|
// src/providers/index.ts
|
|
795
957
|
var callProvider = async (config, ctx) => {
|
|
796
958
|
const { provider, model: model2 } = parseModelName(config.model);
|
|
@@ -802,6 +964,8 @@ var callProvider = async (config, ctx) => {
|
|
|
802
964
|
return callAnthropic(providerConfig, ctx);
|
|
803
965
|
case "google":
|
|
804
966
|
return callGoogle(providerConfig, ctx);
|
|
967
|
+
case "xai":
|
|
968
|
+
return callXAI(providerConfig, ctx);
|
|
805
969
|
case "huggingface":
|
|
806
970
|
default:
|
|
807
971
|
return callHuggingFace(providerConfig, ctx);
|