@threaded/ai 1.0.26 → 1.0.28
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.lore +65 -0
- package/dist/index.cjs +56 -4
- package/dist/index.cjs.map +1 -1
- package/dist/index.js +56 -4
- package/dist/index.js.map +1 -1
- package/package.json +1 -1
package/dist/index.js
CHANGED
|
@@ -1037,10 +1037,61 @@ var handleGoogleStream = async (response, ctx) => {
|
|
|
1037
1037
|
};
|
|
1038
1038
|
|
|
1039
1039
|
// src/providers/huggingface.ts
|
|
1040
|
+
var modelCache2 = /* @__PURE__ */ new Map();
|
|
1041
|
+
var formatMessages = (instructions, history) => {
|
|
1042
|
+
const messages = [];
|
|
1043
|
+
if (instructions) {
|
|
1044
|
+
messages.push({ role: "system", content: instructions });
|
|
1045
|
+
}
|
|
1046
|
+
for (const msg of history) {
|
|
1047
|
+
messages.push({ role: msg.role, content: msg.content });
|
|
1048
|
+
}
|
|
1049
|
+
return messages;
|
|
1050
|
+
};
|
|
1040
1051
|
var callHuggingFace = async (config, ctx) => {
|
|
1041
|
-
|
|
1042
|
-
|
|
1043
|
-
)
|
|
1052
|
+
const { model: model2, instructions, schema } = config;
|
|
1053
|
+
const { pipeline } = await import("@huggingface/transformers");
|
|
1054
|
+
if (!modelCache2.has(model2)) {
|
|
1055
|
+
const generator2 = await pipeline("text-generation", model2, {
|
|
1056
|
+
dtype: "q4f16"
|
|
1057
|
+
});
|
|
1058
|
+
modelCache2.set(model2, generator2);
|
|
1059
|
+
}
|
|
1060
|
+
const generator = modelCache2.get(model2);
|
|
1061
|
+
const messages = formatMessages(instructions, ctx.history);
|
|
1062
|
+
if (schema) {
|
|
1063
|
+
const schemaMsg = messages.find((m) => m.role === "system");
|
|
1064
|
+
const schemaInstructions = [
|
|
1065
|
+
"you must respond with valid JSON matching this schema:",
|
|
1066
|
+
JSON.stringify(schema.schema, null, 2),
|
|
1067
|
+
"respond ONLY with the JSON object, no other text."
|
|
1068
|
+
].join("\n");
|
|
1069
|
+
if (schemaMsg) {
|
|
1070
|
+
schemaMsg.content += "\n\n" + schemaInstructions;
|
|
1071
|
+
} else {
|
|
1072
|
+
messages.unshift({ role: "system", content: schemaInstructions });
|
|
1073
|
+
}
|
|
1074
|
+
}
|
|
1075
|
+
const output = await generator(messages, {
|
|
1076
|
+
max_new_tokens: 2048,
|
|
1077
|
+
do_sample: false
|
|
1078
|
+
});
|
|
1079
|
+
const generatedMessages = output[0].generated_text;
|
|
1080
|
+
const lastMessage = generatedMessages.at(-1);
|
|
1081
|
+
const content = lastMessage?.content || "";
|
|
1082
|
+
const msg = {
|
|
1083
|
+
role: "assistant",
|
|
1084
|
+
content
|
|
1085
|
+
};
|
|
1086
|
+
if (ctx.stream) {
|
|
1087
|
+
ctx.stream({ type: "content", content });
|
|
1088
|
+
}
|
|
1089
|
+
return {
|
|
1090
|
+
...ctx,
|
|
1091
|
+
lastResponse: msg,
|
|
1092
|
+
history: [...ctx.history, msg],
|
|
1093
|
+
usage: addUsage(ctx.usage, 0, 0, 0)
|
|
1094
|
+
};
|
|
1044
1095
|
};
|
|
1045
1096
|
|
|
1046
1097
|
// src/providers/xai.ts
|
|
@@ -1357,8 +1408,9 @@ var callProvider = async (config, ctx) => {
|
|
|
1357
1408
|
case "local":
|
|
1358
1409
|
return callLocal(providerConfig, ctx);
|
|
1359
1410
|
case "huggingface":
|
|
1360
|
-
default:
|
|
1361
1411
|
return callHuggingFace(providerConfig, ctx);
|
|
1412
|
+
default:
|
|
1413
|
+
return callHuggingFace({ ...config }, ctx);
|
|
1362
1414
|
}
|
|
1363
1415
|
};
|
|
1364
1416
|
|