@saltcorn/large-language-model 0.5.3 → 0.6.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (2) hide show
  1. package/index.js +125 -35
  2. package/package.json +1 -1
package/index.js CHANGED
@@ -3,6 +3,7 @@ const Form = require("@saltcorn/data/models/form");
3
3
  const db = require("@saltcorn/data/db");
4
4
  const { getCompletion, getEmbedding } = require("./generate");
5
5
  const { OPENAI_MODELS } = require("./constants.js");
6
+ const { eval_expression } = require("@saltcorn/data/models/expression");
6
7
 
7
8
  const configuration_workflow = () =>
8
9
  new Workflow({
@@ -121,29 +122,11 @@ const configuration_workflow = () =>
121
122
  ],
122
123
  });
123
124
 
124
- let initialConfig;
125
125
  const functions = (config) => {
126
- initialConfig = JSON.stringify(config);
127
126
  return {
128
127
  llm_generate: {
129
128
  run: async (prompt, opts) => {
130
- let changedBefore = false;
131
- if (JSON.stringify(config) !== initialConfig) {
132
- console.error(
133
- "LLM CONFIG CHANGED BEFORE COMPLETION RUN",
134
- initialConfig,
135
- JSON.stringify(config)
136
- );
137
- changedBefore = true;
138
- }
139
129
  const result = await getCompletion(config, { prompt, ...opts });
140
- if (JSON.stringify(config) !== initialConfig && !changedBefore) {
141
- console.error(
142
- "LLM CONFIG CHANGED AFTER COMPLETION RUN",
143
- initialConfig,
144
- JSON.stringify(config)
145
- );
146
- }
147
130
  return result;
148
131
  },
149
132
  isAsync: true,
@@ -152,24 +135,7 @@ const functions = (config) => {
152
135
  },
153
136
  llm_embedding: {
154
137
  run: async (prompt, opts) => {
155
- let changedBefore = false;
156
- if (JSON.stringify(config) !== initialConfig) {
157
- console.error(
158
- "LLM CONFIG CHANGED BEFORE EMBEDDING RUN",
159
- initialConfig,
160
- JSON.stringify(config)
161
- );
162
- changedBefore = true;
163
- }
164
-
165
138
  const result = await getEmbedding(config, { prompt, ...opts });
166
- if (JSON.stringify(config) !== initialConfig && !changedBefore) {
167
- console.error(
168
- "LLM CONFIG CHANGED AFTER EMBEDDING RUN",
169
- initialConfig,
170
- JSON.stringify(config)
171
- );
172
- }
173
139
  return result;
174
140
  },
175
141
  isAsync: true,
@@ -186,5 +152,129 @@ module.exports = {
186
152
  modelpatterns: require("./model.js"),
187
153
  actions: (config) => ({
188
154
  llm_function_call: require("./function-insert-action.js")(config),
155
+ llm_generate: {
156
+ requireRow: true,
157
+ configFields: ({ table, mode }) => {
158
+ const override_fields = [
159
+ {
160
+ name: "override_config",
161
+ label: "Override LLM configuration",
162
+ type: "Bool",
163
+ },
164
+ {
165
+ name: "override_endpoint",
166
+ label: "Endpoint",
167
+ type: "String",
168
+ showIf: { override_config: true },
169
+ },
170
+ {
171
+ name: "override_model",
172
+ label: "Model",
173
+ type: "String",
174
+ showIf: { override_config: true },
175
+ },
176
+ {
177
+ name: "override_apikey",
178
+ label: "API key",
179
+ type: "String",
180
+ showIf: { override_config: true },
181
+ },
182
+ {
183
+ name: "override_bearer",
184
+ label: "Bearer",
185
+ type: "String",
186
+ showIf: { override_config: true },
187
+ },
188
+ ];
189
+
190
+ if (mode === "workflow") {
191
+ return [
192
+ {
193
+ name: "prompt_formula",
194
+ label: "Prompt expression",
195
+ sublabel:
196
+ "JavaScript expression evalutating to the text of the prompt, based on the context",
197
+ type: "String",
198
+ required: true,
199
+ },
200
+ {
201
+ name: "answer_field",
202
+ label: "Answer variable",
203
+ sublabel: "Set the generated answer to this context variable",
204
+ type: "String",
205
+ required: true,
206
+ },
207
+ ...override_fields,
208
+ ];
209
+ }
210
+ if (table) {
211
+ const textFields = table.fields
212
+ .filter((f) => f.type?.sql_name === "text")
213
+ .map((f) => f.name);
214
+
215
+ return [
216
+ {
217
+ name: "prompt_field",
218
+ label: "Prompt field",
219
+ sublabel: "Field with the text of the prompt",
220
+ type: "String",
221
+ required: true,
222
+ attributes: { options: [...textFields, "Formula"] },
223
+ },
224
+ {
225
+ name: "prompt_formula",
226
+ label: "Prompt formula",
227
+ type: "String",
228
+ showIf: { prompt_field: "Formula" },
229
+ },
230
+ {
231
+ name: "answer_field",
232
+ label: "Answer field",
233
+ sublabel: "Output field will be set to the generated answer",
234
+ type: "String",
235
+ required: true,
236
+ attributes: { options: textFields },
237
+ },
238
+ ...override_fields,
239
+ ];
240
+ }
241
+ },
242
+ run: async ({
243
+ row,
244
+ table,
245
+ user,
246
+ mode,
247
+ configuration: {
248
+ prompt_field,
249
+ prompt_formula,
250
+ answer_field,
251
+ override_config,
252
+ override_endpoint,
253
+ override_model,
254
+ override_apikey,
255
+ override_bearer,
256
+ },
257
+ }) => {
258
+ let prompt;
259
+ if (prompt_field === "Formula" || mode === "workflow")
260
+ prompt = eval_expression(
261
+ prompt_formula,
262
+ row,
263
+ user,
264
+ "llm_generate prompt formula"
265
+ );
266
+ else prompt = row[prompt_field];
267
+ const opts = {};
268
+ if (override_config) {
269
+ opts.endpoint = override_endpoint;
270
+ opts.model = override_model;
271
+ opts.apikey = override_apikey;
272
+ opts.bearer = override_bearer;
273
+ }
274
+ const ans = await getCompletion(config, { prompt, ...opts });
275
+ if (mode === "workflow") return { [answer_field]: ans };
276
+ else await table.updateRow({ [answer_field]: ans }, row[table.pk_name]);
277
+ },
278
+ },
189
279
  }),
190
280
  };
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@saltcorn/large-language-model",
3
- "version": "0.5.3",
3
+ "version": "0.6.0",
4
4
  "description": "Large language models and functionality for Saltcorn",
5
5
  "main": "index.js",
6
6
  "dependencies": {