@saltcorn/large-language-model 0.5.4 → 0.6.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (2) hide show
  1. package/index.js +125 -36
  2. package/package.json +1 -1
package/index.js CHANGED
@@ -3,6 +3,7 @@ const Form = require("@saltcorn/data/models/form");
3
3
  const db = require("@saltcorn/data/db");
4
4
  const { getCompletion, getEmbedding } = require("./generate");
5
5
  const { OPENAI_MODELS } = require("./constants.js");
6
+ const { eval_expression } = require("@saltcorn/data/models/expression");
6
7
 
7
8
  const configuration_workflow = () =>
8
9
  new Workflow({
@@ -121,30 +122,11 @@ const configuration_workflow = () =>
121
122
  ],
122
123
  });
123
124
 
124
- let initialConfig;
125
125
  const functions = (config) => {
126
- initialConfig = JSON.stringify(config);
127
- console.log("Initialising LLM functions with Config", config);
128
126
  return {
129
127
  llm_generate: {
130
128
  run: async (prompt, opts) => {
131
- let changedBefore = false;
132
- if (JSON.stringify(config) !== initialConfig) {
133
- console.error(
134
- "LLM CONFIG CHANGED BEFORE COMPLETION RUN",
135
- initialConfig,
136
- JSON.stringify(config)
137
- );
138
- changedBefore = true;
139
- }
140
129
  const result = await getCompletion(config, { prompt, ...opts });
141
- if (JSON.stringify(config) !== initialConfig && !changedBefore) {
142
- console.error(
143
- "LLM CONFIG CHANGED AFTER COMPLETION RUN",
144
- initialConfig,
145
- JSON.stringify(config)
146
- );
147
- }
148
130
  return result;
149
131
  },
150
132
  isAsync: true,
@@ -153,24 +135,7 @@ const functions = (config) => {
153
135
  },
154
136
  llm_embedding: {
155
137
  run: async (prompt, opts) => {
156
- let changedBefore = false;
157
- if (JSON.stringify(config) !== initialConfig) {
158
- console.error(
159
- "LLM CONFIG CHANGED BEFORE EMBEDDING RUN",
160
- initialConfig,
161
- JSON.stringify(config)
162
- );
163
- changedBefore = true;
164
- }
165
-
166
138
  const result = await getEmbedding(config, { prompt, ...opts });
167
- if (JSON.stringify(config) !== initialConfig && !changedBefore) {
168
- console.error(
169
- "LLM CONFIG CHANGED AFTER EMBEDDING RUN",
170
- initialConfig,
171
- JSON.stringify(config)
172
- );
173
- }
174
139
  return result;
175
140
  },
176
141
  isAsync: true,
@@ -187,5 +152,129 @@ module.exports = {
187
152
  modelpatterns: require("./model.js"),
188
153
  actions: (config) => ({
189
154
  llm_function_call: require("./function-insert-action.js")(config),
155
+ llm_generate: {
156
+ requireRow: true,
157
+ configFields: ({ table, mode }) => {
158
+ const override_fields = [
159
+ {
160
+ name: "override_config",
161
+ label: "Override LLM configuration",
162
+ type: "Bool",
163
+ },
164
+ {
165
+ name: "override_endpoint",
166
+ label: "Endpoint",
167
+ type: "String",
168
+ showIf: { override_config: true },
169
+ },
170
+ {
171
+ name: "override_model",
172
+ label: "Model",
173
+ type: "String",
174
+ showIf: { override_config: true },
175
+ },
176
+ {
177
+ name: "override_apikey",
178
+ label: "API key",
179
+ type: "String",
180
+ showIf: { override_config: true },
181
+ },
182
+ {
183
+ name: "override_bearer",
184
+ label: "Bearer",
185
+ type: "String",
186
+ showIf: { override_config: true },
187
+ },
188
+ ];
189
+
190
+ if (mode === "workflow") {
191
+ return [
192
+ {
193
+ name: "prompt_formula",
194
+ label: "Prompt expression",
195
+ sublabel:
196
+ "JavaScript expression evalutating to the text of the prompt, based on the context",
197
+ type: "String",
198
+ required: true,
199
+ },
200
+ {
201
+ name: "answer_field",
202
+ label: "Answer variable",
203
+ sublabel: "Set the generated answer to this context variable",
204
+ type: "String",
205
+ required: true,
206
+ },
207
+ ...override_fields,
208
+ ];
209
+ }
210
+ if (table) {
211
+ const textFields = table.fields
212
+ .filter((f) => f.type?.sql_name === "text")
213
+ .map((f) => f.name);
214
+
215
+ return [
216
+ {
217
+ name: "prompt_field",
218
+ label: "Prompt field",
219
+ sublabel: "Field with the text of the prompt",
220
+ type: "String",
221
+ required: true,
222
+ attributes: { options: [...textFields, "Formula"] },
223
+ },
224
+ {
225
+ name: "prompt_formula",
226
+ label: "Prompt formula",
227
+ type: "String",
228
+ showIf: { prompt_field: "Formula" },
229
+ },
230
+ {
231
+ name: "answer_field",
232
+ label: "Answer field",
233
+ sublabel: "Output field will be set to the generated answer",
234
+ type: "String",
235
+ required: true,
236
+ attributes: { options: textFields },
237
+ },
238
+ ...override_fields,
239
+ ];
240
+ }
241
+ },
242
+ run: async ({
243
+ row,
244
+ table,
245
+ user,
246
+ mode,
247
+ configuration: {
248
+ prompt_field,
249
+ prompt_formula,
250
+ answer_field,
251
+ override_config,
252
+ override_endpoint,
253
+ override_model,
254
+ override_apikey,
255
+ override_bearer,
256
+ },
257
+ }) => {
258
+ let prompt;
259
+ if (prompt_field === "Formula" || mode === "workflow")
260
+ prompt = eval_expression(
261
+ prompt_formula,
262
+ row,
263
+ user,
264
+ "llm_generate prompt formula"
265
+ );
266
+ else prompt = row[prompt_field];
267
+ const opts = {};
268
+ if (override_config) {
269
+ opts.endpoint = override_endpoint;
270
+ opts.model = override_model;
271
+ opts.apikey = override_apikey;
272
+ opts.bearer = override_bearer;
273
+ }
274
+ const ans = await getCompletion(config, { prompt, ...opts });
275
+ if (mode === "workflow") return { [answer_field]: ans };
276
+ else await table.updateRow({ [answer_field]: ans }, row[table.pk_name]);
277
+ },
278
+ },
190
279
  }),
191
280
  };
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@saltcorn/large-language-model",
3
- "version": "0.5.4",
3
+ "version": "0.6.0",
4
4
  "description": "Large language models and functionality for Saltcorn",
5
5
  "main": "index.js",
6
6
  "dependencies": {