@saltcorn/large-language-model 0.5.4 → 0.6.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/index.js +145 -36
- package/package.json +1 -1
package/index.js
CHANGED
|
@@ -1,8 +1,10 @@
|
|
|
1
1
|
const Workflow = require("@saltcorn/data/models/workflow");
|
|
2
2
|
const Form = require("@saltcorn/data/models/form");
|
|
3
|
+
const FieldRepeat = require("@saltcorn/data/models/fieldrepeat");
|
|
3
4
|
const db = require("@saltcorn/data/db");
|
|
4
5
|
const { getCompletion, getEmbedding } = require("./generate");
|
|
5
6
|
const { OPENAI_MODELS } = require("./constants.js");
|
|
7
|
+
const { eval_expression } = require("@saltcorn/data/models/expression");
|
|
6
8
|
|
|
7
9
|
const configuration_workflow = () =>
|
|
8
10
|
new Workflow({
|
|
@@ -80,6 +82,12 @@ const configuration_workflow = () =>
|
|
|
80
82
|
type: "String",
|
|
81
83
|
showIf: { backend: "OpenAI-compatible API" },
|
|
82
84
|
},
|
|
85
|
+
{
|
|
86
|
+
name: "api_key",
|
|
87
|
+
label: "API key",
|
|
88
|
+
type: "String",
|
|
89
|
+
showIf: { backend: "OpenAI-compatible API" },
|
|
90
|
+
},
|
|
83
91
|
{
|
|
84
92
|
name: "model",
|
|
85
93
|
label: "Model",
|
|
@@ -114,6 +122,39 @@ const configuration_workflow = () =>
|
|
|
114
122
|
"Optional. Example: http://localhost:11434/api/embeddings",
|
|
115
123
|
showIf: { backend: "Local Ollama" },
|
|
116
124
|
},
|
|
125
|
+
{
|
|
126
|
+
input_type: "section_header",
|
|
127
|
+
label: "Alternative configurations",
|
|
128
|
+
showIf: { backend: "OpenAI-compatible API" },
|
|
129
|
+
},
|
|
130
|
+
new FieldRepeat({
|
|
131
|
+
name: "altconfigs",
|
|
132
|
+
label: "Alternative configurations",
|
|
133
|
+
showIf: { backend: "OpenAI-compatible API" },
|
|
134
|
+
fields: [
|
|
135
|
+
{ name: "name", label: "Configuration name", type: "String" },
|
|
136
|
+
{
|
|
137
|
+
name: "model",
|
|
138
|
+
label: "Model",
|
|
139
|
+
type: "String",
|
|
140
|
+
},
|
|
141
|
+
{
|
|
142
|
+
name: "endpoint",
|
|
143
|
+
label: "Endpoint",
|
|
144
|
+
type: "String",
|
|
145
|
+
},
|
|
146
|
+
{
|
|
147
|
+
name: "bearer_auth",
|
|
148
|
+
label: "Bearer Auth",
|
|
149
|
+
type: "String",
|
|
150
|
+
},
|
|
151
|
+
{
|
|
152
|
+
name: "api_key",
|
|
153
|
+
label: "API key",
|
|
154
|
+
type: "String",
|
|
155
|
+
},
|
|
156
|
+
],
|
|
157
|
+
}),
|
|
117
158
|
],
|
|
118
159
|
});
|
|
119
160
|
},
|
|
@@ -121,30 +162,11 @@ const configuration_workflow = () =>
|
|
|
121
162
|
],
|
|
122
163
|
});
|
|
123
164
|
|
|
124
|
-
let initialConfig;
|
|
125
165
|
const functions = (config) => {
|
|
126
|
-
initialConfig = JSON.stringify(config);
|
|
127
|
-
console.log("Initialising LLM functions with Config", config);
|
|
128
166
|
return {
|
|
129
167
|
llm_generate: {
|
|
130
168
|
run: async (prompt, opts) => {
|
|
131
|
-
let changedBefore = false;
|
|
132
|
-
if (JSON.stringify(config) !== initialConfig) {
|
|
133
|
-
console.error(
|
|
134
|
-
"LLM CONFIG CHANGED BEFORE COMPLETION RUN",
|
|
135
|
-
initialConfig,
|
|
136
|
-
JSON.stringify(config)
|
|
137
|
-
);
|
|
138
|
-
changedBefore = true;
|
|
139
|
-
}
|
|
140
169
|
const result = await getCompletion(config, { prompt, ...opts });
|
|
141
|
-
if (JSON.stringify(config) !== initialConfig && !changedBefore) {
|
|
142
|
-
console.error(
|
|
143
|
-
"LLM CONFIG CHANGED AFTER COMPLETION RUN",
|
|
144
|
-
initialConfig,
|
|
145
|
-
JSON.stringify(config)
|
|
146
|
-
);
|
|
147
|
-
}
|
|
148
170
|
return result;
|
|
149
171
|
},
|
|
150
172
|
isAsync: true,
|
|
@@ -153,24 +175,7 @@ const functions = (config) => {
|
|
|
153
175
|
},
|
|
154
176
|
llm_embedding: {
|
|
155
177
|
run: async (prompt, opts) => {
|
|
156
|
-
let changedBefore = false;
|
|
157
|
-
if (JSON.stringify(config) !== initialConfig) {
|
|
158
|
-
console.error(
|
|
159
|
-
"LLM CONFIG CHANGED BEFORE EMBEDDING RUN",
|
|
160
|
-
initialConfig,
|
|
161
|
-
JSON.stringify(config)
|
|
162
|
-
);
|
|
163
|
-
changedBefore = true;
|
|
164
|
-
}
|
|
165
|
-
|
|
166
178
|
const result = await getEmbedding(config, { prompt, ...opts });
|
|
167
|
-
if (JSON.stringify(config) !== initialConfig && !changedBefore) {
|
|
168
|
-
console.error(
|
|
169
|
-
"LLM CONFIG CHANGED AFTER EMBEDDING RUN",
|
|
170
|
-
initialConfig,
|
|
171
|
-
JSON.stringify(config)
|
|
172
|
-
);
|
|
173
|
-
}
|
|
174
179
|
return result;
|
|
175
180
|
},
|
|
176
181
|
isAsync: true,
|
|
@@ -187,5 +192,109 @@ module.exports = {
|
|
|
187
192
|
modelpatterns: require("./model.js"),
|
|
188
193
|
actions: (config) => ({
|
|
189
194
|
llm_function_call: require("./function-insert-action.js")(config),
|
|
195
|
+
llm_generate: {
|
|
196
|
+
requireRow: true,
|
|
197
|
+
configFields: ({ table, mode }) => {
|
|
198
|
+
const override_fields =
|
|
199
|
+
config.backend === "OpenAI-compatible API" &&
|
|
200
|
+
(config.altconfigs || []).filter((c) => c.name).length
|
|
201
|
+
? [
|
|
202
|
+
{
|
|
203
|
+
name: "override_config",
|
|
204
|
+
label: "Alternative LLM configuration",
|
|
205
|
+
type: "String",
|
|
206
|
+
attributes: { options: config.altconfigs.map((c) => c.name) },
|
|
207
|
+
},
|
|
208
|
+
]
|
|
209
|
+
: [];
|
|
210
|
+
|
|
211
|
+
if (mode === "workflow") {
|
|
212
|
+
return [
|
|
213
|
+
{
|
|
214
|
+
name: "prompt_formula",
|
|
215
|
+
label: "Prompt expression",
|
|
216
|
+
sublabel:
|
|
217
|
+
"JavaScript expression evalutating to the text of the prompt, based on the context",
|
|
218
|
+
type: "String",
|
|
219
|
+
required: true,
|
|
220
|
+
},
|
|
221
|
+
{
|
|
222
|
+
name: "answer_field",
|
|
223
|
+
label: "Answer variable",
|
|
224
|
+
sublabel: "Set the generated answer to this context variable",
|
|
225
|
+
type: "String",
|
|
226
|
+
required: true,
|
|
227
|
+
},
|
|
228
|
+
...override_fields,
|
|
229
|
+
];
|
|
230
|
+
}
|
|
231
|
+
if (table) {
|
|
232
|
+
const textFields = table.fields
|
|
233
|
+
.filter((f) => f.type?.sql_name === "text")
|
|
234
|
+
.map((f) => f.name);
|
|
235
|
+
|
|
236
|
+
return [
|
|
237
|
+
{
|
|
238
|
+
name: "prompt_field",
|
|
239
|
+
label: "Prompt field",
|
|
240
|
+
sublabel: "Field with the text of the prompt",
|
|
241
|
+
type: "String",
|
|
242
|
+
required: true,
|
|
243
|
+
attributes: { options: [...textFields, "Formula"] },
|
|
244
|
+
},
|
|
245
|
+
{
|
|
246
|
+
name: "prompt_formula",
|
|
247
|
+
label: "Prompt formula",
|
|
248
|
+
type: "String",
|
|
249
|
+
showIf: { prompt_field: "Formula" },
|
|
250
|
+
},
|
|
251
|
+
{
|
|
252
|
+
name: "answer_field",
|
|
253
|
+
label: "Answer field",
|
|
254
|
+
sublabel: "Output field will be set to the generated answer",
|
|
255
|
+
type: "String",
|
|
256
|
+
required: true,
|
|
257
|
+
attributes: { options: textFields },
|
|
258
|
+
},
|
|
259
|
+
...override_fields,
|
|
260
|
+
];
|
|
261
|
+
}
|
|
262
|
+
},
|
|
263
|
+
run: async ({
|
|
264
|
+
row,
|
|
265
|
+
table,
|
|
266
|
+
user,
|
|
267
|
+
mode,
|
|
268
|
+
configuration: {
|
|
269
|
+
prompt_field,
|
|
270
|
+
prompt_formula,
|
|
271
|
+
answer_field,
|
|
272
|
+
override_config,
|
|
273
|
+
},
|
|
274
|
+
}) => {
|
|
275
|
+
let prompt;
|
|
276
|
+
if (prompt_field === "Formula" || mode === "workflow")
|
|
277
|
+
prompt = eval_expression(
|
|
278
|
+
prompt_formula,
|
|
279
|
+
row,
|
|
280
|
+
user,
|
|
281
|
+
"llm_generate prompt formula"
|
|
282
|
+
);
|
|
283
|
+
else prompt = row[prompt_field];
|
|
284
|
+
const opts = {};
|
|
285
|
+
if (override_config) {
|
|
286
|
+
const altcfg = config.altconfigs.find(
|
|
287
|
+
(c) => c.name === override_config
|
|
288
|
+
);
|
|
289
|
+
opts.endpoint = altcfg.endpoint;
|
|
290
|
+
opts.model = altcfg.model;
|
|
291
|
+
opts.apikey = altcfg.apikey;
|
|
292
|
+
opts.bearer = altcfg.bearer;
|
|
293
|
+
}
|
|
294
|
+
const ans = await getCompletion(config, { prompt, ...opts });
|
|
295
|
+
if (mode === "workflow") return { [answer_field]: ans };
|
|
296
|
+
else await table.updateRow({ [answer_field]: ans }, row[table.pk_name]);
|
|
297
|
+
},
|
|
298
|
+
},
|
|
190
299
|
}),
|
|
191
300
|
};
|