@sprucelabs/sprucebot-llm 11.2.0 → 11.3.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/build/bots/adapters/OpenAiMessageBuilder.d.ts +1 -2
- package/build/bots/adapters/OpenAiMessageBuilder.js +17 -10
- package/build/bots/templates.d.ts +1 -1
- package/build/bots/templates.js +1 -1
- package/build/esm/bots/adapters/OpenAiMessageBuilder.d.ts +1 -2
- package/build/esm/bots/adapters/OpenAiMessageBuilder.js +18 -10
- package/build/esm/bots/templates.d.ts +1 -1
- package/build/esm/bots/templates.js +1 -1
- package/package.json +1 -1
|
@@ -2,13 +2,12 @@ import { ChatCompletionMessageParam } from 'openai/resources';
|
|
|
2
2
|
import { SprucebotLlmBot } from '../../llm.types';
|
|
3
3
|
export default class OpenAiMessageBuilder {
|
|
4
4
|
private bot;
|
|
5
|
-
private imagesFound;
|
|
6
|
-
private totalImages;
|
|
7
5
|
protected constructor(bot: SprucebotLlmBot);
|
|
8
6
|
static Builder(bot: SprucebotLlmBot): OpenAiMessageBuilder;
|
|
9
7
|
buildMessages(): ChatCompletionMessageParam[];
|
|
10
8
|
private buildChatHistoryMessages;
|
|
11
9
|
private mapMessageToCompletion;
|
|
10
|
+
private maxCharsOfPastMessages;
|
|
12
11
|
private shouldRememberImages;
|
|
13
12
|
private buildFirstMessage;
|
|
14
13
|
private buildSkillMessages;
|
|
@@ -3,8 +3,6 @@ Object.defineProperty(exports, "__esModule", { value: true });
|
|
|
3
3
|
const templates_1 = require("../templates");
|
|
4
4
|
class OpenAiMessageBuilder {
|
|
5
5
|
constructor(bot) {
|
|
6
|
-
this.imagesFound = 0;
|
|
7
|
-
this.totalImages = 0;
|
|
8
6
|
this.bot = bot;
|
|
9
7
|
}
|
|
10
8
|
static Builder(bot) {
|
|
@@ -25,11 +23,10 @@ class OpenAiMessageBuilder {
|
|
|
25
23
|
if (limit > 0) {
|
|
26
24
|
messagesBeingConsidered = messages.slice(Math.max(messages.length - limit, 0));
|
|
27
25
|
}
|
|
28
|
-
|
|
29
|
-
this.
|
|
30
|
-
return messagesBeingConsidered.map((message) => this.mapMessageToCompletion(message));
|
|
26
|
+
const total = messagesBeingConsidered.length;
|
|
27
|
+
return messagesBeingConsidered.map((message, idx) => this.mapMessageToCompletion(message, idx === total - 1));
|
|
31
28
|
}
|
|
32
|
-
mapMessageToCompletion(message) {
|
|
29
|
+
mapMessageToCompletion(message, isLast) {
|
|
33
30
|
let content = message.message;
|
|
34
31
|
let role = message.from === 'Me'
|
|
35
32
|
? 'user'
|
|
@@ -38,9 +35,7 @@ class OpenAiMessageBuilder {
|
|
|
38
35
|
: 'developer';
|
|
39
36
|
if (message.imageBase64) {
|
|
40
37
|
role = 'user';
|
|
41
|
-
this.
|
|
42
|
-
const shouldBeIncluded = this.shouldRememberImages() ||
|
|
43
|
-
this.imagesFound === this.totalImages;
|
|
38
|
+
const shouldBeIncluded = this.shouldRememberImages() || isLast;
|
|
44
39
|
content = [
|
|
45
40
|
{
|
|
46
41
|
type: 'text',
|
|
@@ -59,11 +54,23 @@ class OpenAiMessageBuilder {
|
|
|
59
54
|
},
|
|
60
55
|
];
|
|
61
56
|
}
|
|
57
|
+
const shouldTruncate = typeof content === 'string' &&
|
|
58
|
+
!isLast &&
|
|
59
|
+
this.maxCharsOfPastMessages() > 0 &&
|
|
60
|
+
content.length > this.maxCharsOfPastMessages();
|
|
61
|
+
if (shouldTruncate) {
|
|
62
|
+
content = `[omitted due to length]`;
|
|
63
|
+
}
|
|
62
64
|
return {
|
|
63
65
|
role,
|
|
64
66
|
content,
|
|
65
67
|
};
|
|
66
68
|
}
|
|
69
|
+
maxCharsOfPastMessages() {
|
|
70
|
+
return process.env.OPENAI_PAST_MESSAGE_MAX_CHARS
|
|
71
|
+
? parseInt(process.env.OPENAI_PAST_MESSAGE_MAX_CHARS ?? '1000', 10)
|
|
72
|
+
: -1;
|
|
73
|
+
}
|
|
67
74
|
shouldRememberImages() {
|
|
68
75
|
return process.env.OPENAI_SHOULD_REMEMBER_IMAGES !== 'false';
|
|
69
76
|
}
|
|
@@ -146,7 +153,7 @@ class OpenAiMessageBuilder {
|
|
|
146
153
|
buildStateMessage(state) {
|
|
147
154
|
return {
|
|
148
155
|
role: 'developer',
|
|
149
|
-
content: `The current state of this conversation is:\n\n${JSON.stringify(state)}. As the state is being updated, send it back to me in json format (something in can JSON.parse()) at the end of each response (it's not meant for reading, but for parsing, so don't call it out, but send it as we progress), surrounded by
|
|
156
|
+
content: `The current state of this conversation is:\n\n${JSON.stringify(state)}. As the state is being updated, send it back to me in json format (something in can JSON.parse()) at the end of each response (it's not meant for reading, but for parsing, so don't call it out, but send it as we progress), surrounded by the State Boundary (${templates_1.STATE_BOUNDARY}), like this:\n\n${templates_1.STATE_BOUNDARY} { "fieldName": "fieldValue" } ${templates_1.STATE_BOUNDARY}`,
|
|
150
157
|
};
|
|
151
158
|
}
|
|
152
159
|
buildYourJobMessage(yourJob) {
|
package/build/bots/templates.js
CHANGED
|
@@ -5,7 +5,7 @@ var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
|
5
5
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
6
6
|
exports.PROMPT_TEMPLATE = exports.CALLBACK_BOUNDARY = exports.DONE_TOKEN = exports.STATE_BOUNDARY = void 0;
|
|
7
7
|
const renderPlaceholder_1 = __importDefault(require("../parsingResponses/renderPlaceholder"));
|
|
8
|
-
exports.STATE_BOUNDARY = '
|
|
8
|
+
exports.STATE_BOUNDARY = '*** STATE BOUNDARY ***';
|
|
9
9
|
exports.DONE_TOKEN = `DONE_DONE_DONE`;
|
|
10
10
|
exports.CALLBACK_BOUNDARY = 'xxxxx';
|
|
11
11
|
exports.PROMPT_TEMPLATE = `You are <%= it.youAre %>
|
|
@@ -2,13 +2,12 @@ import { ChatCompletionMessageParam } from 'openai/resources';
|
|
|
2
2
|
import { SprucebotLlmBot } from '../../llm.types';
|
|
3
3
|
export default class OpenAiMessageBuilder {
|
|
4
4
|
private bot;
|
|
5
|
-
private imagesFound;
|
|
6
|
-
private totalImages;
|
|
7
5
|
protected constructor(bot: SprucebotLlmBot);
|
|
8
6
|
static Builder(bot: SprucebotLlmBot): OpenAiMessageBuilder;
|
|
9
7
|
buildMessages(): ChatCompletionMessageParam[];
|
|
10
8
|
private buildChatHistoryMessages;
|
|
11
9
|
private mapMessageToCompletion;
|
|
10
|
+
private maxCharsOfPastMessages;
|
|
12
11
|
private shouldRememberImages;
|
|
13
12
|
private buildFirstMessage;
|
|
14
13
|
private buildSkillMessages;
|
|
@@ -1,8 +1,6 @@
|
|
|
1
1
|
import { DONE_TOKEN, STATE_BOUNDARY } from '../templates.js';
|
|
2
2
|
export default class OpenAiMessageBuilder {
|
|
3
3
|
constructor(bot) {
|
|
4
|
-
this.imagesFound = 0;
|
|
5
|
-
this.totalImages = 0;
|
|
6
4
|
this.bot = bot;
|
|
7
5
|
}
|
|
8
6
|
static Builder(bot) {
|
|
@@ -24,11 +22,10 @@ export default class OpenAiMessageBuilder {
|
|
|
24
22
|
if (limit > 0) {
|
|
25
23
|
messagesBeingConsidered = messages.slice(Math.max(messages.length - limit, 0));
|
|
26
24
|
}
|
|
27
|
-
|
|
28
|
-
this.
|
|
29
|
-
return messagesBeingConsidered.map((message) => this.mapMessageToCompletion(message));
|
|
25
|
+
const total = messagesBeingConsidered.length;
|
|
26
|
+
return messagesBeingConsidered.map((message, idx) => this.mapMessageToCompletion(message, idx === total - 1));
|
|
30
27
|
}
|
|
31
|
-
mapMessageToCompletion(message) {
|
|
28
|
+
mapMessageToCompletion(message, isLast) {
|
|
32
29
|
let content = message.message;
|
|
33
30
|
let role = message.from === 'Me'
|
|
34
31
|
? 'user'
|
|
@@ -37,9 +34,7 @@ export default class OpenAiMessageBuilder {
|
|
|
37
34
|
: 'developer';
|
|
38
35
|
if (message.imageBase64) {
|
|
39
36
|
role = 'user';
|
|
40
|
-
this.
|
|
41
|
-
const shouldBeIncluded = this.shouldRememberImages() ||
|
|
42
|
-
this.imagesFound === this.totalImages;
|
|
37
|
+
const shouldBeIncluded = this.shouldRememberImages() || isLast;
|
|
43
38
|
content = [
|
|
44
39
|
{
|
|
45
40
|
type: 'text',
|
|
@@ -58,11 +53,24 @@ export default class OpenAiMessageBuilder {
|
|
|
58
53
|
},
|
|
59
54
|
];
|
|
60
55
|
}
|
|
56
|
+
const shouldTruncate = typeof content === 'string' &&
|
|
57
|
+
!isLast &&
|
|
58
|
+
this.maxCharsOfPastMessages() > 0 &&
|
|
59
|
+
content.length > this.maxCharsOfPastMessages();
|
|
60
|
+
if (shouldTruncate) {
|
|
61
|
+
content = `[omitted due to length]`;
|
|
62
|
+
}
|
|
61
63
|
return {
|
|
62
64
|
role,
|
|
63
65
|
content,
|
|
64
66
|
};
|
|
65
67
|
}
|
|
68
|
+
maxCharsOfPastMessages() {
|
|
69
|
+
var _a;
|
|
70
|
+
return process.env.OPENAI_PAST_MESSAGE_MAX_CHARS
|
|
71
|
+
? parseInt((_a = process.env.OPENAI_PAST_MESSAGE_MAX_CHARS) !== null && _a !== void 0 ? _a : '1000', 10)
|
|
72
|
+
: -1;
|
|
73
|
+
}
|
|
66
74
|
shouldRememberImages() {
|
|
67
75
|
return process.env.OPENAI_SHOULD_REMEMBER_IMAGES !== 'false';
|
|
68
76
|
}
|
|
@@ -145,7 +153,7 @@ export default class OpenAiMessageBuilder {
|
|
|
145
153
|
buildStateMessage(state) {
|
|
146
154
|
return {
|
|
147
155
|
role: 'developer',
|
|
148
|
-
content: `The current state of this conversation is:\n\n${JSON.stringify(state)}. As the state is being updated, send it back to me in json format (something in can JSON.parse()) at the end of each response (it's not meant for reading, but for parsing, so don't call it out, but send it as we progress), surrounded by
|
|
156
|
+
content: `The current state of this conversation is:\n\n${JSON.stringify(state)}. As the state is being updated, send it back to me in json format (something in can JSON.parse()) at the end of each response (it's not meant for reading, but for parsing, so don't call it out, but send it as we progress), surrounded by the State Boundary (${STATE_BOUNDARY}), like this:\n\n${STATE_BOUNDARY} { "fieldName": "fieldValue" } ${STATE_BOUNDARY}`,
|
|
149
157
|
};
|
|
150
158
|
}
|
|
151
159
|
buildYourJobMessage(yourJob) {
|
|
@@ -1,5 +1,5 @@
|
|
|
1
1
|
import renderPlaceholder from '../parsingResponses/renderPlaceholder.js';
|
|
2
|
-
export const STATE_BOUNDARY = '
|
|
2
|
+
export const STATE_BOUNDARY = '*** STATE BOUNDARY ***';
|
|
3
3
|
export const DONE_TOKEN = `DONE_DONE_DONE`;
|
|
4
4
|
export const CALLBACK_BOUNDARY = 'xxxxx';
|
|
5
5
|
export const PROMPT_TEMPLATE = `You are <%= it.youAre %>
|