@saltcorn/large-language-model 1.0.4 → 1.0.6
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/generate.js +42 -26
- package/index.js +96 -1
- package/package.json +2 -2
- package/tests/llm.test.js +3 -4
package/generate.js
CHANGED
|
@@ -424,13 +424,24 @@ const getCompletion = async (config, opts) => {
|
|
|
424
424
|
}
|
|
425
425
|
};
|
|
426
426
|
|
|
427
|
-
const getAiSdkModel = (
|
|
428
|
-
|
|
429
|
-
|
|
430
|
-
|
|
431
|
-
|
|
427
|
+
const getAiSdkModel = ({ config, alt_config, userCfg }, isEmbedding) => {
|
|
428
|
+
const use_config = alt_config
|
|
429
|
+
? config.alt_aisdk_configs?.find?.((acfg) => acfg.name === alt_config) ||
|
|
430
|
+
config
|
|
431
|
+
: config;
|
|
432
|
+
const use_provider = use_config.provider;
|
|
433
|
+
const model_name = isEmbedding
|
|
434
|
+
? userCfg.embed_model ||
|
|
435
|
+
userCfg.model ||
|
|
436
|
+
config.embed_model ||
|
|
437
|
+
"text-embedding-3-small"
|
|
438
|
+
: userCfg.model || use_config.model;
|
|
439
|
+
|
|
440
|
+
switch (use_provider) {
|
|
432
441
|
case "OpenAI":
|
|
433
|
-
const
|
|
442
|
+
const use_api_key =
|
|
443
|
+
userCfg.api_key || userCfg.apiKey || use_config.api_key;
|
|
444
|
+
const openai = createOpenAI({ apiKey: use_api_key });
|
|
434
445
|
return isEmbedding
|
|
435
446
|
? openai.textEmbeddingModel(model_name)
|
|
436
447
|
: openai(model_name);
|
|
@@ -439,7 +450,8 @@ const getAiSdkModel = (
|
|
|
439
450
|
if (isEmbedding)
|
|
440
451
|
throw new Error("Anthropic does not provide embedding models");
|
|
441
452
|
const anthropic = createAnthropic({
|
|
442
|
-
apiKey:
|
|
453
|
+
apiKey:
|
|
454
|
+
userCfg.api_key || userCfg.apiKey || use_config.anthropic_api_key,
|
|
443
455
|
});
|
|
444
456
|
return anthropic(model_name);
|
|
445
457
|
default:
|
|
@@ -448,7 +460,7 @@ const getAiSdkModel = (
|
|
|
448
460
|
};
|
|
449
461
|
|
|
450
462
|
const getCompletionAISDK = async (
|
|
451
|
-
|
|
463
|
+
config,
|
|
452
464
|
{
|
|
453
465
|
systemPrompt,
|
|
454
466
|
prompt,
|
|
@@ -461,12 +473,12 @@ const getCompletionAISDK = async (
|
|
|
461
473
|
...rest
|
|
462
474
|
},
|
|
463
475
|
) => {
|
|
476
|
+
const { apiKey, model, provider, temperature } = config;
|
|
464
477
|
const use_model_name = rest.model || model;
|
|
465
478
|
let model_obj = getAiSdkModel({
|
|
466
|
-
|
|
467
|
-
|
|
468
|
-
|
|
469
|
-
anthropic_api_key,
|
|
479
|
+
config,
|
|
480
|
+
alt_config: rest.alt_config,
|
|
481
|
+
userCfg: rest,
|
|
470
482
|
});
|
|
471
483
|
const modifyChat = (chat) => {
|
|
472
484
|
const f = (c) => {
|
|
@@ -620,13 +632,17 @@ const getCompletionOpenAICompatible = async (
|
|
|
620
632
|
}
|
|
621
633
|
if (responses_api) {
|
|
622
634
|
delete body.tool_choice;
|
|
623
|
-
|
|
624
|
-
|
|
625
|
-
tool
|
|
626
|
-
|
|
627
|
-
|
|
628
|
-
|
|
629
|
-
|
|
635
|
+
if (body.tools) {
|
|
636
|
+
const newtools = JSON.parse(JSON.stringify(body.tools));
|
|
637
|
+
for (const tool of newtools) {
|
|
638
|
+
if (tool.type !== "function" || !tool.function) continue;
|
|
639
|
+
tool.name = tool.function.name;
|
|
640
|
+
tool.description = tool.function.description;
|
|
641
|
+
tool.parameters = tool.function.parameters;
|
|
642
|
+
if (tool.function.required) tool.required = tool.function.required;
|
|
643
|
+
delete tool.function;
|
|
644
|
+
}
|
|
645
|
+
body.tools = newtools;
|
|
630
646
|
}
|
|
631
647
|
if (body.response_format?.type === "json_schema" && !body.text) {
|
|
632
648
|
body.text = {
|
|
@@ -970,16 +986,16 @@ const getEmbeddingOpenAICompatible = async (
|
|
|
970
986
|
return results?.data?.[0]?.embedding;
|
|
971
987
|
};
|
|
972
988
|
|
|
973
|
-
const getEmbeddingAISDK = async (
|
|
974
|
-
|
|
989
|
+
const getEmbeddingAISDK = async (
|
|
990
|
+
config,
|
|
991
|
+
{ prompt, model, debugResult, alt_config },
|
|
992
|
+
) => {
|
|
975
993
|
let providerOptions = {};
|
|
976
|
-
const model_name = model || embed_model || "text-embedding-3-small";
|
|
977
994
|
let model_obj = getAiSdkModel(
|
|
978
995
|
{
|
|
979
|
-
|
|
980
|
-
|
|
981
|
-
|
|
982
|
-
provider,
|
|
996
|
+
config,
|
|
997
|
+
userCfg: { model },
|
|
998
|
+
alt_config,
|
|
983
999
|
},
|
|
984
1000
|
true,
|
|
985
1001
|
);
|
package/index.js
CHANGED
|
@@ -65,12 +65,12 @@ ${domReady(`
|
|
|
65
65
|
required: true,
|
|
66
66
|
attributes: {
|
|
67
67
|
options: [
|
|
68
|
+
"AI SDK",
|
|
68
69
|
"OpenAI",
|
|
69
70
|
"OpenAI-compatible API",
|
|
70
71
|
"Local Ollama",
|
|
71
72
|
...(isRoot ? ["Local llama.cpp"] : []),
|
|
72
73
|
"Google Vertex AI",
|
|
73
|
-
"AI SDK",
|
|
74
74
|
],
|
|
75
75
|
onChange: "backendChange(this)",
|
|
76
76
|
},
|
|
@@ -357,6 +357,11 @@ ${domReady(`
|
|
|
357
357
|
label: "Alternative configurations",
|
|
358
358
|
showIf: { backend: "OpenAI-compatible API" },
|
|
359
359
|
},
|
|
360
|
+
{
|
|
361
|
+
input_type: "section_header",
|
|
362
|
+
label: "Alternative configurations",
|
|
363
|
+
showIf: { backend: ["OpenAI-compatible API", "AI SDK"] },
|
|
364
|
+
},
|
|
360
365
|
new FieldRepeat({
|
|
361
366
|
name: "altconfigs",
|
|
362
367
|
label: "Alternative configurations",
|
|
@@ -385,6 +390,77 @@ ${domReady(`
|
|
|
385
390
|
},
|
|
386
391
|
],
|
|
387
392
|
}),
|
|
393
|
+
new FieldRepeat({
|
|
394
|
+
name: "alt_aisdk_configs",
|
|
395
|
+
label: "Alternative configurations",
|
|
396
|
+
showIf: { backend: "AI SDK" },
|
|
397
|
+
fields: [
|
|
398
|
+
{ name: "name", label: "Configuration name", type: "String" },
|
|
399
|
+
{
|
|
400
|
+
name: "alt_provider",
|
|
401
|
+
label: "Provider", //gpt-3.5-turbo
|
|
402
|
+
type: "String",
|
|
403
|
+
required: true,
|
|
404
|
+
attributes: {
|
|
405
|
+
options: ["OpenAI", "Anthropic"],
|
|
406
|
+
},
|
|
407
|
+
},
|
|
408
|
+
{
|
|
409
|
+
name: "api_key",
|
|
410
|
+
label: "API key",
|
|
411
|
+
type: "String",
|
|
412
|
+
required: true,
|
|
413
|
+
fieldview: "password",
|
|
414
|
+
showIf: { alt_provider: "OpenAI" },
|
|
415
|
+
},
|
|
416
|
+
{
|
|
417
|
+
name: "anthropic_api_key",
|
|
418
|
+
label: "API key",
|
|
419
|
+
type: "String",
|
|
420
|
+
required: true,
|
|
421
|
+
fieldview: "password",
|
|
422
|
+
showIf: { alt_provider: "Anthropic" },
|
|
423
|
+
},
|
|
424
|
+
{
|
|
425
|
+
name: "model",
|
|
426
|
+
label: "Model", //gpt-3.5-turbo
|
|
427
|
+
type: "String",
|
|
428
|
+
required: true,
|
|
429
|
+
attributes: {
|
|
430
|
+
calcOptions: [
|
|
431
|
+
"alt_provider",
|
|
432
|
+
{
|
|
433
|
+
OpenAI: OPENAI_MODELS,
|
|
434
|
+
Anthropic: [
|
|
435
|
+
"claude-opus-4-6",
|
|
436
|
+
"claude-sonnet-4-6",
|
|
437
|
+
"claude-haiku-4-5",
|
|
438
|
+
],
|
|
439
|
+
},
|
|
440
|
+
],
|
|
441
|
+
},
|
|
442
|
+
},
|
|
443
|
+
{
|
|
444
|
+
name: "embed_model",
|
|
445
|
+
label: "Embedding model", //gpt-3.5-turbo
|
|
446
|
+
type: "String",
|
|
447
|
+
required: true,
|
|
448
|
+
showIf: { alt_provider: ["OpenAI"] },
|
|
449
|
+
attributes: {
|
|
450
|
+
calcOptions: [
|
|
451
|
+
"alt_provider",
|
|
452
|
+
{
|
|
453
|
+
OpenAI: [
|
|
454
|
+
"text-embedding-3-small",
|
|
455
|
+
"text-embedding-3-large",
|
|
456
|
+
"text-embedding-ada-002",
|
|
457
|
+
],
|
|
458
|
+
},
|
|
459
|
+
],
|
|
460
|
+
},
|
|
461
|
+
},
|
|
462
|
+
],
|
|
463
|
+
}),
|
|
388
464
|
],
|
|
389
465
|
});
|
|
390
466
|
},
|
|
@@ -465,6 +541,25 @@ const functions = (config) => {
|
|
|
465
541
|
{ name: "options", type: "JSON", tstype: "any" },
|
|
466
542
|
],
|
|
467
543
|
},
|
|
544
|
+
llm_get_configuration: {
|
|
545
|
+
run() {
|
|
546
|
+
return {
|
|
547
|
+
...config,
|
|
548
|
+
alt_config_names:
|
|
549
|
+
config?.backend === "AI SDK"
|
|
550
|
+
? config?.alt_aisdk_configs
|
|
551
|
+
?.map?.((cfg) => cfg.name)
|
|
552
|
+
.filter(Boolean)
|
|
553
|
+
: config?.backend === "OpenAI-compatible API"
|
|
554
|
+
? config?.altconfigs?.map?.((cfg) => cfg.name).filter(Boolean)
|
|
555
|
+
: [],
|
|
556
|
+
};
|
|
557
|
+
},
|
|
558
|
+
isAsync: false,
|
|
559
|
+
description: "Get the LLM configuration details",
|
|
560
|
+
tsreturns: "{alt_config_names: string[], backend: string}",
|
|
561
|
+
arguments: [],
|
|
562
|
+
},
|
|
468
563
|
};
|
|
469
564
|
};
|
|
470
565
|
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@saltcorn/large-language-model",
|
|
3
|
-
"version": "1.0.
|
|
3
|
+
"version": "1.0.6",
|
|
4
4
|
"description": "Large language models and functionality for Saltcorn",
|
|
5
5
|
"main": "index.js",
|
|
6
6
|
"dependencies": {
|
|
@@ -21,7 +21,7 @@
|
|
|
21
21
|
"jest": "^29.7.0"
|
|
22
22
|
},
|
|
23
23
|
"scripts": {
|
|
24
|
-
"test": "jest tests --runInBand"
|
|
24
|
+
"test": "jest tests --runInBand --verbose"
|
|
25
25
|
},
|
|
26
26
|
"author": "Tom Nielsen",
|
|
27
27
|
"license": "MIT",
|
package/tests/llm.test.js
CHANGED
|
@@ -97,7 +97,7 @@ for (const nameconfig of require("./configs")) {
|
|
|
97
97
|
});
|
|
98
98
|
it("uses tools", async () => {
|
|
99
99
|
const answer = await getState().functions.llm_generate.run(
|
|
100
|
-
"Generate a list of EU capitals in a structured format using the provided tool",
|
|
100
|
+
"Generate a list of all the EU capitals in a structured format using the provided tool",
|
|
101
101
|
cities_tool,
|
|
102
102
|
);
|
|
103
103
|
expect(typeof answer).toBe("object");
|
|
@@ -131,7 +131,7 @@ for (const nameconfig of require("./configs")) {
|
|
|
131
131
|
it("tool use sequence", async () => {
|
|
132
132
|
const chat = [];
|
|
133
133
|
const answer = await getState().functions.llm_generate.run(
|
|
134
|
-
"Generate a list of EU capitals in a structured format using the provided tool",
|
|
134
|
+
"Generate a list of all the EU capitals in a structured format using the provided tool",
|
|
135
135
|
{
|
|
136
136
|
chat,
|
|
137
137
|
appendToChat: true,
|
|
@@ -166,7 +166,7 @@ for (const nameconfig of require("./configs")) {
|
|
|
166
166
|
});
|
|
167
167
|
it("uses response_format", async () => {
|
|
168
168
|
const answer = await getState().functions.llm_generate.run(
|
|
169
|
-
"Generate a list of EU capitals in JSON format",
|
|
169
|
+
"Generate a list of all the EU capitals in JSON format",
|
|
170
170
|
{
|
|
171
171
|
response_format: {
|
|
172
172
|
type: "json_schema",
|
|
@@ -178,7 +178,6 @@ for (const nameconfig of require("./configs")) {
|
|
|
178
178
|
},
|
|
179
179
|
);
|
|
180
180
|
expect(typeof answer).toBe("string");
|
|
181
|
-
console.log("answer", answer);
|
|
182
181
|
|
|
183
182
|
const json_answer = JSON.parse(answer);
|
|
184
183
|
|