@saltcorn/large-language-model 0.5.2 → 0.5.3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/index.js +56 -16
- package/package.json +1 -1
package/index.js
CHANGED
|
@@ -120,24 +120,64 @@ const configuration_workflow = () =>
|
|
|
120
120
|
},
|
|
121
121
|
],
|
|
122
122
|
});
|
|
123
|
-
|
|
124
|
-
|
|
125
|
-
|
|
126
|
-
|
|
123
|
+
|
|
124
|
+
let initialConfig;
|
|
125
|
+
const functions = (config) => {
|
|
126
|
+
initialConfig = JSON.stringify(config);
|
|
127
|
+
return {
|
|
128
|
+
llm_generate: {
|
|
129
|
+
run: async (prompt, opts) => {
|
|
130
|
+
let changedBefore = false;
|
|
131
|
+
if (JSON.stringify(config) !== initialConfig) {
|
|
132
|
+
console.error(
|
|
133
|
+
"LLM CONFIG CHANGED BEFORE COMPLETION RUN",
|
|
134
|
+
initialConfig,
|
|
135
|
+
JSON.stringify(config)
|
|
136
|
+
);
|
|
137
|
+
changedBefore = true;
|
|
138
|
+
}
|
|
139
|
+
const result = await getCompletion(config, { prompt, ...opts });
|
|
140
|
+
if (JSON.stringify(config) !== initialConfig && !changedBefore) {
|
|
141
|
+
console.error(
|
|
142
|
+
"LLM CONFIG CHANGED AFTER COMPLETION RUN",
|
|
143
|
+
initialConfig,
|
|
144
|
+
JSON.stringify(config)
|
|
145
|
+
);
|
|
146
|
+
}
|
|
147
|
+
return result;
|
|
148
|
+
},
|
|
149
|
+
isAsync: true,
|
|
150
|
+
description: "Generate text with GPT",
|
|
151
|
+
arguments: [{ name: "prompt", type: "String" }],
|
|
127
152
|
},
|
|
128
|
-
|
|
129
|
-
|
|
130
|
-
|
|
131
|
-
|
|
132
|
-
|
|
133
|
-
|
|
134
|
-
|
|
153
|
+
llm_embedding: {
|
|
154
|
+
run: async (prompt, opts) => {
|
|
155
|
+
let changedBefore = false;
|
|
156
|
+
if (JSON.stringify(config) !== initialConfig) {
|
|
157
|
+
console.error(
|
|
158
|
+
"LLM CONFIG CHANGED BEFORE EMBEDDING RUN",
|
|
159
|
+
initialConfig,
|
|
160
|
+
JSON.stringify(config)
|
|
161
|
+
);
|
|
162
|
+
changedBefore = true;
|
|
163
|
+
}
|
|
164
|
+
|
|
165
|
+
const result = await getEmbedding(config, { prompt, ...opts });
|
|
166
|
+
if (JSON.stringify(config) !== initialConfig && !changedBefore) {
|
|
167
|
+
console.error(
|
|
168
|
+
"LLM CONFIG CHANGED AFTER EMBEDDING RUN",
|
|
169
|
+
initialConfig,
|
|
170
|
+
JSON.stringify(config)
|
|
171
|
+
);
|
|
172
|
+
}
|
|
173
|
+
return result;
|
|
174
|
+
},
|
|
175
|
+
isAsync: true,
|
|
176
|
+
description: "Get vector embedding",
|
|
177
|
+
arguments: [{ name: "prompt", type: "String" }],
|
|
135
178
|
},
|
|
136
|
-
|
|
137
|
-
|
|
138
|
-
arguments: [{ name: "prompt", type: "String" }],
|
|
139
|
-
},
|
|
140
|
-
});
|
|
179
|
+
};
|
|
180
|
+
};
|
|
141
181
|
|
|
142
182
|
module.exports = {
|
|
143
183
|
sc_plugin_api_version: 1,
|