@saltcorn/large-language-model 1.0.5 → 1.0.7

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (3) hide show
  1. package/generate.js +36 -22
  2. package/index.js +96 -1
  3. package/package.json +1 -1
package/generate.js CHANGED
@@ -424,13 +424,26 @@ const getCompletion = async (config, opts) => {
424
424
  }
425
425
  };
426
426
 
427
- const getAiSdkModel = (
428
- { provider, api_key, model_name, anthropic_api_key },
429
- isEmbedding,
430
- ) => {
431
- switch (provider) {
427
+ const getAiSdkModel = ({ config, alt_config, userCfg }, isEmbedding) => {
428
+ const use_config = alt_config
429
+ ? config.alt_aisdk_configs?.find?.((acfg) => acfg.name === alt_config) ||
430
+ config
431
+ : config;
432
+
433
+ const use_provider = use_config.alt_provider || use_config.provider;
434
+
435
+ const model_name = isEmbedding
436
+ ? userCfg.embed_model ||
437
+ userCfg.model ||
438
+ config.embed_model ||
439
+ "text-embedding-3-small"
440
+ : userCfg.model || use_config.model;
441
+
442
+ switch (use_provider) {
432
443
  case "OpenAI":
433
- const openai = createOpenAI({ apiKey: api_key });
444
+ const use_api_key =
445
+ userCfg.api_key || userCfg.apiKey || use_config.api_key;
446
+ const openai = createOpenAI({ apiKey: use_api_key });
434
447
  return isEmbedding
435
448
  ? openai.textEmbeddingModel(model_name)
436
449
  : openai(model_name);
@@ -439,16 +452,17 @@ const getAiSdkModel = (
439
452
  if (isEmbedding)
440
453
  throw new Error("Anthropic does not provide embedding models");
441
454
  const anthropic = createAnthropic({
442
- apiKey: anthropic_api_key,
455
+ apiKey:
456
+ userCfg.api_key || userCfg.apiKey || use_config.anthropic_api_key,
443
457
  });
444
458
  return anthropic(model_name);
445
459
  default:
446
- throw new Error("Provider not found: " + provider);
460
+ throw new Error("Provider not found: " + use_provider);
447
461
  }
448
462
  };
449
463
 
450
464
  const getCompletionAISDK = async (
451
- { apiKey, model, provider, temperature, anthropic_api_key },
465
+ config,
452
466
  {
453
467
  systemPrompt,
454
468
  prompt,
@@ -461,12 +475,12 @@ const getCompletionAISDK = async (
461
475
  ...rest
462
476
  },
463
477
  ) => {
478
+ const { apiKey, model, provider, temperature } = config;
464
479
  const use_model_name = rest.model || model;
465
480
  let model_obj = getAiSdkModel({
466
- model_name: use_model_name,
467
- api_key: api_key || apiKey,
468
- provider,
469
- anthropic_api_key,
481
+ config,
482
+ alt_config: rest.alt_config,
483
+ userCfg: rest,
470
484
  });
471
485
  const modifyChat = (chat) => {
472
486
  const f = (c) => {
@@ -621,7 +635,7 @@ const getCompletionOpenAICompatible = async (
621
635
  if (responses_api) {
622
636
  delete body.tool_choice;
623
637
  if (body.tools) {
624
- const newtools = JSON.parse(JSON.stringify(body.tools))
638
+ const newtools = JSON.parse(JSON.stringify(body.tools));
625
639
  for (const tool of newtools) {
626
640
  if (tool.type !== "function" || !tool.function) continue;
627
641
  tool.name = tool.function.name;
@@ -630,7 +644,7 @@ const getCompletionOpenAICompatible = async (
630
644
  if (tool.function.required) tool.required = tool.function.required;
631
645
  delete tool.function;
632
646
  }
633
- body.tools = newtools
647
+ body.tools = newtools;
634
648
  }
635
649
  if (body.response_format?.type === "json_schema" && !body.text) {
636
650
  body.text = {
@@ -974,16 +988,16 @@ const getEmbeddingOpenAICompatible = async (
974
988
  return results?.data?.[0]?.embedding;
975
989
  };
976
990
 
977
- const getEmbeddingAISDK = async (config, { prompt, model, debugResult }) => {
978
- const { provider, apiKey, embed_model } = config;
991
+ const getEmbeddingAISDK = async (
992
+ config,
993
+ { prompt, model, debugResult, alt_config },
994
+ ) => {
979
995
  let providerOptions = {};
980
- const model_name = model || embed_model || "text-embedding-3-small";
981
996
  let model_obj = getAiSdkModel(
982
997
  {
983
- ...config,
984
- model_name,
985
- api_key: apiKey,
986
- provider,
998
+ config,
999
+ userCfg: { model },
1000
+ alt_config,
987
1001
  },
988
1002
  true,
989
1003
  );
package/index.js CHANGED
@@ -65,12 +65,12 @@ ${domReady(`
65
65
  required: true,
66
66
  attributes: {
67
67
  options: [
68
+ "AI SDK",
68
69
  "OpenAI",
69
70
  "OpenAI-compatible API",
70
71
  "Local Ollama",
71
72
  ...(isRoot ? ["Local llama.cpp"] : []),
72
73
  "Google Vertex AI",
73
- "AI SDK",
74
74
  ],
75
75
  onChange: "backendChange(this)",
76
76
  },
@@ -357,6 +357,11 @@ ${domReady(`
357
357
  label: "Alternative configurations",
358
358
  showIf: { backend: "OpenAI-compatible API" },
359
359
  },
360
+ {
361
+ input_type: "section_header",
362
+ label: "Alternative configurations",
363
+ showIf: { backend: ["OpenAI-compatible API", "AI SDK"] },
364
+ },
360
365
  new FieldRepeat({
361
366
  name: "altconfigs",
362
367
  label: "Alternative configurations",
@@ -385,6 +390,77 @@ ${domReady(`
385
390
  },
386
391
  ],
387
392
  }),
393
+ new FieldRepeat({
394
+ name: "alt_aisdk_configs",
395
+ label: "Alternative configurations",
396
+ showIf: { backend: "AI SDK" },
397
+ fields: [
398
+ { name: "name", label: "Configuration name", type: "String" },
399
+ {
400
+ name: "alt_provider",
401
+ label: "Provider", //gpt-3.5-turbo
402
+ type: "String",
403
+ required: true,
404
+ attributes: {
405
+ options: ["OpenAI", "Anthropic"],
406
+ },
407
+ },
408
+ {
409
+ name: "api_key",
410
+ label: "API key",
411
+ type: "String",
412
+ required: true,
413
+ fieldview: "password",
414
+ showIf: { alt_provider: "OpenAI" },
415
+ },
416
+ {
417
+ name: "anthropic_api_key",
418
+ label: "API key",
419
+ type: "String",
420
+ required: true,
421
+ fieldview: "password",
422
+ showIf: { alt_provider: "Anthropic" },
423
+ },
424
+ {
425
+ name: "model",
426
+ label: "Model", //gpt-3.5-turbo
427
+ type: "String",
428
+ required: true,
429
+ attributes: {
430
+ calcOptions: [
431
+ "alt_provider",
432
+ {
433
+ OpenAI: OPENAI_MODELS,
434
+ Anthropic: [
435
+ "claude-opus-4-6",
436
+ "claude-sonnet-4-6",
437
+ "claude-haiku-4-5",
438
+ ],
439
+ },
440
+ ],
441
+ },
442
+ },
443
+ {
444
+ name: "embed_model",
445
+ label: "Embedding model", //gpt-3.5-turbo
446
+ type: "String",
447
+ required: true,
448
+ showIf: { alt_provider: ["OpenAI"] },
449
+ attributes: {
450
+ calcOptions: [
451
+ "alt_provider",
452
+ {
453
+ OpenAI: [
454
+ "text-embedding-3-small",
455
+ "text-embedding-3-large",
456
+ "text-embedding-ada-002",
457
+ ],
458
+ },
459
+ ],
460
+ },
461
+ },
462
+ ],
463
+ }),
388
464
  ],
389
465
  });
390
466
  },
@@ -465,6 +541,25 @@ const functions = (config) => {
465
541
  { name: "options", type: "JSON", tstype: "any" },
466
542
  ],
467
543
  },
544
+ llm_get_configuration: {
545
+ run() {
546
+ return {
547
+ ...config,
548
+ alt_config_names:
549
+ config?.backend === "AI SDK"
550
+ ? config?.alt_aisdk_configs
551
+ ?.map?.((cfg) => cfg.name)
552
+ .filter(Boolean)
553
+ : config?.backend === "OpenAI-compatible API"
554
+ ? config?.altconfigs?.map?.((cfg) => cfg.name).filter(Boolean)
555
+ : [],
556
+ };
557
+ },
558
+ isAsync: false,
559
+ description: "Get the LLM configuration details",
560
+ tsreturns: "{alt_config_names: string[], backend: string}",
561
+ arguments: [],
562
+ },
468
563
  };
469
564
  };
470
565
 
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@saltcorn/large-language-model",
3
- "version": "1.0.5",
3
+ "version": "1.0.7",
4
4
  "description": "Large language models and functionality for Saltcorn",
5
5
  "main": "index.js",
6
6
  "dependencies": {