@saltcorn/large-language-model 0.4.2 → 0.4.4
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/generate.js +7 -3
- package/index.js +6 -0
- package/package.json +1 -1
package/generate.js
CHANGED
|
@@ -33,7 +33,7 @@ const getEmbedding = async (config, opts) => {
|
|
|
33
33
|
const { Ollama } = ollamaMod;
|
|
34
34
|
const ollama = new Ollama();
|
|
35
35
|
const olres = await ollama.embeddings({
|
|
36
|
-
model: opts?.model || config.model,
|
|
36
|
+
model: opts?.model || config.embed_model || config.model,
|
|
37
37
|
prompt: opts.prompt,
|
|
38
38
|
});
|
|
39
39
|
//console.log("embedding response ", olres);
|
|
@@ -140,8 +140,11 @@ const getCompletionOpenAICompatible = async (
|
|
|
140
140
|
);
|
|
141
141
|
};
|
|
142
142
|
|
|
143
|
-
const getEmbeddingOpenAICompatible = async (
|
|
144
|
-
|
|
143
|
+
const getEmbeddingOpenAICompatible = async (
|
|
144
|
+
config,
|
|
145
|
+
{ prompt, model, debugResult }
|
|
146
|
+
) => {
|
|
147
|
+
const { embeddingsEndpoint, bearer, embed_model } = config;
|
|
145
148
|
const headers = {
|
|
146
149
|
"Content-Type": "application/json",
|
|
147
150
|
Accept: "application/json",
|
|
@@ -162,6 +165,7 @@ const getEmbeddingOpenAICompatible = async (config, { prompt, model }) => {
|
|
|
162
165
|
if (debugResult)
|
|
163
166
|
console.log("OpenAI response", JSON.stringify(results, null, 2));
|
|
164
167
|
if (results.error) throw new Error(`OpenAI error: ${results.error.message}`);
|
|
168
|
+
if (Array.isArray(prompt)) return results?.data?.map?.((d) => d?.embedding);
|
|
165
169
|
return results?.data?.[0]?.embedding;
|
|
166
170
|
};
|
|
167
171
|
module.exports = { getCompletion, getEmbedding };
|
package/index.js
CHANGED
|
@@ -96,6 +96,12 @@ const configuration_workflow = () =>
|
|
|
96
96
|
type: "String",
|
|
97
97
|
showIf: { backend: ["OpenAI-compatible API", "Local Ollama"] },
|
|
98
98
|
},
|
|
99
|
+
{
|
|
100
|
+
name: "embed_model",
|
|
101
|
+
label: "Embedding model",
|
|
102
|
+
type: "String",
|
|
103
|
+
showIf: { backend: "Local Ollama" },
|
|
104
|
+
},
|
|
99
105
|
{
|
|
100
106
|
name: "endpoint",
|
|
101
107
|
label: "Chat completions endpoint",
|