@aj-archipelago/cortex 1.0.18 → 1.0.19
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/lib/pathwayTools.js
CHANGED
|
@@ -1,4 +1,5 @@
|
|
|
1
1
|
// pathwayTools.js
|
|
2
|
+
import { encode , decode } from 'gpt-3-encoder';
|
|
2
3
|
|
|
3
4
|
// callPathway - call a pathway from another pathway
|
|
4
5
|
const callPathway = async (config, pathwayName, args) => {
|
|
@@ -12,4 +13,12 @@ const callPathway = async (config, pathwayName, args) => {
|
|
|
12
13
|
return data?.result;
|
|
13
14
|
};
|
|
14
15
|
|
|
15
|
-
|
|
16
|
+
const gpt3Encode = (text) => {
|
|
17
|
+
return encode(text);
|
|
18
|
+
}
|
|
19
|
+
|
|
20
|
+
const gpt3Decode = (text) => {
|
|
21
|
+
return decode(text);
|
|
22
|
+
}
|
|
23
|
+
|
|
24
|
+
export { callPathway, gpt3Encode, gpt3Decode };
|
package/package.json
CHANGED
|
@@ -204,7 +204,7 @@ class PathwayResolver {
|
|
|
204
204
|
async promptAndParse(args) {
|
|
205
205
|
// Get saved context from contextId or change contextId if needed
|
|
206
206
|
const { contextId } = args;
|
|
207
|
-
this.savedContextId = contextId ? contextId :
|
|
207
|
+
this.savedContextId = contextId ? contextId : uuidv4();
|
|
208
208
|
this.savedContext = contextId ? (getv && (await getv(contextId)) || {}) : {};
|
|
209
209
|
|
|
210
210
|
// Save the context before processing the request
|
|
@@ -197,6 +197,13 @@ class ModelPlugin {
|
|
|
197
197
|
}
|
|
198
198
|
});
|
|
199
199
|
|
|
200
|
+
// Clean up any null messages if they exist
|
|
201
|
+
expandedMessages.forEach((message) => {
|
|
202
|
+
if (typeof message === 'object' && message.content === null) {
|
|
203
|
+
message.content = '';
|
|
204
|
+
}
|
|
205
|
+
});
|
|
206
|
+
|
|
200
207
|
return expandedMessages;
|
|
201
208
|
}
|
|
202
209
|
|
|
@@ -57,7 +57,7 @@ class OpenAIChatPlugin extends ModelPlugin {
|
|
|
57
57
|
if (isPalmFormat) {
|
|
58
58
|
const context = modelPrompt.context || '';
|
|
59
59
|
const examples = modelPrompt.examples || [];
|
|
60
|
-
requestMessages = this.convertPalmToOpenAIMessages(context, examples,
|
|
60
|
+
requestMessages = this.convertPalmToOpenAIMessages(context, examples, modelPromptMessages);
|
|
61
61
|
}
|
|
62
62
|
|
|
63
63
|
// Check if the token length exceeds the model's max token length
|