@sprucelabs/sprucebot-llm 2.4.14 → 2.5.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -3,7 +3,10 @@ export default class PromptGenerator {
3
3
  private bot;
4
4
  private eta;
5
5
  private log;
6
- constructor(bot: SprucebotLlmBot);
6
+ static Class?: typeof PromptGenerator;
7
+ private promptTemplate;
8
+ protected constructor(bot: SprucebotLlmBot, options?: PromptGeneratorOptions);
9
+ static Generator(bot: SprucebotLlmBot, options?: PromptGeneratorOptions): PromptGenerator;
7
10
  generate(): Promise<string>;
8
11
  private stringifyState;
9
12
  }
@@ -14,3 +17,6 @@ export interface TemplateContext {
14
17
  stateSchemaJson?: string;
15
18
  stateJson?: string;
16
19
  }
20
+ export interface PromptGeneratorOptions {
21
+ promptTemplate?: string;
22
+ }
@@ -39,18 +39,24 @@ const schema_1 = require("@sprucelabs/schema");
39
39
  const Eta = __importStar(require("eta"));
40
40
  const templates_1 = require("./templates");
41
41
  class PromptGenerator {
42
- constructor(bot) {
42
+ constructor(bot, options) {
43
+ var _a;
43
44
  this.eta = Eta;
44
45
  this.log = process.env.SHOULD_LOG_GENERATED_PROMPTS === 'true'
45
46
  ? console.info
46
47
  : () => { };
47
48
  (0, schema_1.assertOptions)({ bot }, ['bot']);
48
49
  this.bot = bot;
50
+ this.promptTemplate = (_a = options === null || options === void 0 ? void 0 : options.promptTemplate) !== null && _a !== void 0 ? _a : templates_1.PROMPT_TEMPLATE;
51
+ }
52
+ static Generator(bot, options) {
53
+ var _a;
54
+ return new ((_a = this.Class) !== null && _a !== void 0 ? _a : PromptGenerator)(bot, options);
49
55
  }
50
56
  async generate() {
51
57
  const _a = this.bot.serialize(), { stateSchema, state } = _a, rest = __rest(_a, ["stateSchema", "state"]);
52
58
  const { stateSchemaJson, stateJson } = this.stringifyState(stateSchema, state);
53
- const rendered = await this.eta.render(templates_1.PROMPT_TEMPLATE, Object.assign({ stateSchemaJson,
59
+ const rendered = await this.eta.render(this.promptTemplate, Object.assign({ stateSchemaJson,
54
60
  stateJson }, rest), {
55
61
  async: true,
56
62
  autoEscape: false,
@@ -45,9 +45,13 @@ class SprucebotLlmBotImpl extends mercury_event_emitter_1.AbstractEventEmitter {
45
45
  from: 'Me',
46
46
  message,
47
47
  });
48
- const response = await this.adapter.sendMessage(this);
48
+ const serializedSkill = (_a = this.skill) === null || _a === void 0 ? void 0 : _a.serialize();
49
+ const response = await this.adapter.sendMessage(this, {
50
+ model: serializedSkill === null || serializedSkill === void 0 ? void 0 : serializedSkill.model,
51
+ promptTemplate: serializedSkill === null || serializedSkill === void 0 ? void 0 : serializedSkill.promptTemplate,
52
+ });
49
53
  const parser = ResponseParser_1.default.getInstance();
50
- const { isDone, message: parsedResponse, state, } = await parser.parse(response, (_a = this.skill) === null || _a === void 0 ? void 0 : _a.serialize().callbacks);
54
+ const { isDone, message: parsedResponse, state, } = await parser.parse(response, serializedSkill === null || serializedSkill === void 0 ? void 0 : serializedSkill.callbacks);
51
55
  this.isDone = isDone;
52
56
  if (this.stateSchema && state) {
53
57
  await this.updateState(state);
@@ -1,10 +1,10 @@
1
1
  import { Configuration, OpenAIApi } from 'openai';
2
- import { LlmAdapter, SprucebotLlmBot } from '../../llm.types';
2
+ import { LlmAdapter, SendMessageOptions, SprucebotLlmBot } from '../../llm.types';
3
3
  export declare class OpenAiAdapter implements LlmAdapter {
4
4
  static Configuration: typeof Configuration;
5
5
  static OpenAIApi: typeof OpenAIApi;
6
6
  private api;
7
7
  constructor(apiKey: string);
8
- sendMessage(bot: SprucebotLlmBot): Promise<string>;
8
+ sendMessage(bot: SprucebotLlmBot, options?: SendMessageOptions): Promise<string>;
9
9
  }
10
10
  export declare const MESSAGE_RESPONSE_ERROR_MESSAGE = "Oh no! Something went wrong and I can't talk right now!";
@@ -13,16 +13,13 @@ class OpenAiAdapter {
13
13
  const config = new OpenAiAdapter.Configuration({ apiKey });
14
14
  this.api = new OpenAiAdapter.OpenAIApi(config);
15
15
  }
16
- async sendMessage(bot) {
16
+ async sendMessage(bot, options) {
17
17
  var _a, _b, _c;
18
- const generator = new PromptGenerator_1.default(bot);
19
- const prompt = await generator.generate();
20
- const response = await this.api.createCompletion({
21
- prompt,
22
- model: 'text-davinci-003',
23
- max_tokens: 250,
24
- stop: ['__Me__:'],
18
+ const generator = PromptGenerator_1.default.Generator(bot, {
19
+ promptTemplate: options === null || options === void 0 ? void 0 : options.promptTemplate,
25
20
  });
21
+ const prompt = await generator.generate();
22
+ const response = await this.api.createCompletion(Object.assign({ prompt, model: 'text-davinci-003', max_tokens: 250, stop: ['__Me__:'] }, options));
26
23
  return ((_c = (_b = (_a = response.data.choices[0]) === null || _a === void 0 ? void 0 : _a.text) === null || _b === void 0 ? void 0 : _b.trim()) !== null && _c !== void 0 ? _c : exports.MESSAGE_RESPONSE_ERROR_MESSAGE);
27
24
  }
28
25
  }
@@ -3,7 +3,10 @@ export default class PromptGenerator {
3
3
  private bot;
4
4
  private eta;
5
5
  private log;
6
- constructor(bot: SprucebotLlmBot);
6
+ static Class?: typeof PromptGenerator;
7
+ private promptTemplate;
8
+ protected constructor(bot: SprucebotLlmBot, options?: PromptGeneratorOptions);
9
+ static Generator(bot: SprucebotLlmBot, options?: PromptGeneratorOptions): PromptGenerator;
7
10
  generate(): Promise<string>;
8
11
  private stringifyState;
9
12
  }
@@ -14,3 +17,6 @@ export interface TemplateContext {
14
17
  stateSchemaJson?: string;
15
18
  stateJson?: string;
16
19
  }
20
+ export interface PromptGeneratorOptions {
21
+ promptTemplate?: string;
22
+ }
@@ -22,19 +22,25 @@ import { assertOptions, normalizeSchemaValues, } from '@sprucelabs/schema';
22
22
  import * as Eta from 'eta';
23
23
  import { PROMPT_TEMPLATE } from './templates.js';
24
24
  export default class PromptGenerator {
25
- constructor(bot) {
25
+ constructor(bot, options) {
26
+ var _a;
26
27
  this.eta = Eta;
27
28
  this.log = process.env.SHOULD_LOG_GENERATED_PROMPTS === 'true'
28
29
  ? console.info
29
30
  : () => { };
30
31
  assertOptions({ bot }, ['bot']);
31
32
  this.bot = bot;
33
+ this.promptTemplate = (_a = options === null || options === void 0 ? void 0 : options.promptTemplate) !== null && _a !== void 0 ? _a : PROMPT_TEMPLATE;
34
+ }
35
+ static Generator(bot, options) {
36
+ var _a;
37
+ return new ((_a = this.Class) !== null && _a !== void 0 ? _a : PromptGenerator)(bot, options);
32
38
  }
33
39
  generate() {
34
40
  return __awaiter(this, void 0, void 0, function* () {
35
41
  const _a = this.bot.serialize(), { stateSchema, state } = _a, rest = __rest(_a, ["stateSchema", "state"]);
36
42
  const { stateSchemaJson, stateJson } = this.stringifyState(stateSchema, state);
37
- const rendered = yield this.eta.render(PROMPT_TEMPLATE, Object.assign({ stateSchemaJson,
43
+ const rendered = yield this.eta.render(this.promptTemplate, Object.assign({ stateSchemaJson,
38
44
  stateJson }, rest), {
39
45
  async: true,
40
46
  autoEscape: false,
@@ -50,9 +50,13 @@ class SprucebotLlmBotImpl extends AbstractEventEmitter {
50
50
  from: 'Me',
51
51
  message,
52
52
  });
53
- const response = yield this.adapter.sendMessage(this);
53
+ const serializedSkill = (_a = this.skill) === null || _a === void 0 ? void 0 : _a.serialize();
54
+ const response = yield this.adapter.sendMessage(this, {
55
+ model: serializedSkill === null || serializedSkill === void 0 ? void 0 : serializedSkill.model,
56
+ promptTemplate: serializedSkill === null || serializedSkill === void 0 ? void 0 : serializedSkill.promptTemplate,
57
+ });
54
58
  const parser = ResponseParser.getInstance();
55
- const { isDone, message: parsedResponse, state, } = yield parser.parse(response, (_a = this.skill) === null || _a === void 0 ? void 0 : _a.serialize().callbacks);
59
+ const { isDone, message: parsedResponse, state, } = yield parser.parse(response, serializedSkill === null || serializedSkill === void 0 ? void 0 : serializedSkill.callbacks);
56
60
  this.isDone = isDone;
57
61
  if (this.stateSchema && state) {
58
62
  yield this.updateState(state);
@@ -1,10 +1,10 @@
1
1
  import { Configuration, OpenAIApi } from 'openai';
2
- import { LlmAdapter, SprucebotLlmBot } from '../../llm.types';
2
+ import { LlmAdapter, SendMessageOptions, SprucebotLlmBot } from '../../llm.types';
3
3
  export declare class OpenAiAdapter implements LlmAdapter {
4
4
  static Configuration: typeof Configuration;
5
5
  static OpenAIApi: typeof OpenAIApi;
6
6
  private api;
7
7
  constructor(apiKey: string);
8
- sendMessage(bot: SprucebotLlmBot): Promise<string>;
8
+ sendMessage(bot: SprucebotLlmBot, options?: SendMessageOptions): Promise<string>;
9
9
  }
10
10
  export declare const MESSAGE_RESPONSE_ERROR_MESSAGE = "Oh no! Something went wrong and I can't talk right now!";
@@ -16,17 +16,14 @@ class OpenAiAdapter {
16
16
  const config = new OpenAiAdapter.Configuration({ apiKey });
17
17
  this.api = new OpenAiAdapter.OpenAIApi(config);
18
18
  }
19
- sendMessage(bot) {
19
+ sendMessage(bot, options) {
20
20
  var _a, _b, _c;
21
21
  return __awaiter(this, void 0, void 0, function* () {
22
- const generator = new PromptGenerator(bot);
23
- const prompt = yield generator.generate();
24
- const response = yield this.api.createCompletion({
25
- prompt,
26
- model: 'text-davinci-003',
27
- max_tokens: 250,
28
- stop: ['__Me__:'],
22
+ const generator = PromptGenerator.Generator(bot, {
23
+ promptTemplate: options === null || options === void 0 ? void 0 : options.promptTemplate,
29
24
  });
25
+ const prompt = yield generator.generate();
26
+ const response = yield this.api.createCompletion(Object.assign({ prompt, model: 'text-davinci-003', max_tokens: 250, stop: ['__Me__:'] }, options));
30
27
  return ((_c = (_b = (_a = response.data.choices[0]) === null || _a === void 0 ? void 0 : _a.text) === null || _b === void 0 ? void 0 : _b.trim()) !== null && _c !== void 0 ? _c : MESSAGE_RESPONSE_ERROR_MESSAGE);
31
28
  });
32
29
  }
@@ -7,4 +7,11 @@ export const FIRST_MESSAGES = [
7
7
  'test',
8
8
  'Hi',
9
9
  'Hi!',
10
+ 'Greetings!',
11
+ 'Good day!',
12
+ 'Howdy!',
13
+ 'Hey there!',
14
+ 'Nice to meet you!',
15
+ 'Welcome!',
16
+ 'Hello there!',
10
17
  ];
@@ -1,22 +1,22 @@
1
1
  export const GREETINGS = [
2
- "Hi there! I'm Sprucebot 🌲🤖",
3
- 'Hey! My name is Sprucebot 🌲🤖',
4
- 'Hello! You can call me Sprucebot 🌲🤖',
5
- 'Hi there! You can call me Sprucebot 🌲🤖',
6
- 'Greetings! My name is Sprucebot 🌲🤖',
7
- "Hello there! I'm Sprucebot 🌲🤖",
8
- 'Hey there! My name is Sprucebot 🌲🤖',
9
- "Good to see you! I'm Sprucebot 🌲🤖",
10
- 'Hey! You can call me Sprucebot 🌲🤖',
11
- 'Hi! My name is Sprucebot 🌲🤖',
12
- 'Hi there! My name is Sprucebot 🌲🤖',
13
- 'Hey there! You can call me Sprucebot 🌲🤖',
14
- 'Hello! My name is Sprucebot 🌲🤖',
15
- "Howdy! I'm Sprucebot 🌲🤖",
16
- 'Hiya! You can call me Sprucebot 🌲🤖',
17
- 'Hello there! You can call me Sprucebot 🌲🤖',
18
- 'Good to see you! My name is Sprucebot 🌲🤖',
19
- 'Greetings! You can call me Sprucebot 🌲🤖',
20
- 'Hey there! My name is Sprucebot 🌲🤖',
21
- 'Hi! You can call me Sprucebot 🌲🤖',
2
+ "Hi there! I'm Sprucebot 🌲🤖! Here are some things can help you with:",
3
+ 'Hey! My name is Sprucebot 🌲🤖! Let me know which of the following you want to discuss:',
4
+ 'Hello! You can call me Sprucebot 🌲🤖 and I can help you with:',
5
+ 'Hi there! You can call me Sprucebot 🌲🤖! Need something? I can help with:',
6
+ 'Greetings! My name is Sprucebot 🌲🤖. Below are some things I can help you with:',
7
+ "Hello there! I'm Sprucebot 🌲🤖 and the things I can help with are:",
8
+ 'Hey there! My name is Sprucebot 🌲🤖 and I can help you with:',
9
+ "Good to see you! I'm Sprucebot 🌲🤖 and this is the list of things I'm ready to help with:",
10
+ 'Hey! You can call me Sprucebot 🌲🤖 and ask be about anything below:',
11
+ 'Hi! My name is Sprucebot 🌲🤖! Need help? Here are the things I can help with:',
12
+ "Hi there! My name is Sprucebot 🌲🤖 and I'm at your service, as long as it has to do with the following:",
13
+ 'Hey there! You can call me Sprucebot 🌲🤖. I can help you with:',
14
+ "Hello! My name is Sprucebot 🌲🤖. If you need help with any of the following, I'm your bot!",
15
+ "Howdy! I'm Sprucebot 🌲🤖 and this is the list of my specialties:",
16
+ 'Hiya! You can call me Sprucebot 🌲🤖! Need any help with the following?',
17
+ 'Hello there! You can call me Sprucebot 🌲🤖. I can help you with:',
18
+ 'Good to see you! My name is Sprucebot 🌲🤖. Here are some of my favorite things:',
19
+ 'Greetings! You can call me Sprucebot 🌲🤖. Which would you like to talk about?',
20
+ 'Hey there! My name is Sprucebot 🌲🤖. Here are the things I can help you with:',
21
+ "Hi! You can call me Sprucebot 🌲🤖!! I can't wait to help you with the following:",
22
22
  ];
@@ -17,6 +17,7 @@ export const TOPICS = [
17
17
  'Tell me a joke!',
18
18
  'I want to hear a joke!',
19
19
  'Can you tell me a joke?',
20
+ 'Tell me a joke please!',
20
21
  ],
21
22
  },
22
23
  ],
@@ -188,7 +189,8 @@ export const TOPICS = [
188
189
  name: [
189
190
  'Add a block to your schedule',
190
191
  'Block off some time',
191
- 'Create an event',
192
+ 'Block my calendar',
193
+ 'Add timeblock',
192
194
  ],
193
195
  conversations: [
194
196
  {
@@ -6,23 +6,45 @@ import { GREETINGS } from './constants/GREETINGS.js';
6
6
  import { OFF_THE_RAILS_CONVERSATIONS } from './constants/OFF_THE_RAILS_CONVERSATIONS.js';
7
7
  import { TOPICS } from './constants/TOPICS.js';
8
8
  const promptTemplatePath = path.join(__dirname, 'promptTemplate.txt');
9
+ const promptTemplateNoTopicsPath = path.join(__dirname, 'promptTemplateNoTopics.txt');
9
10
  const promptTemplate = fs.readFileSync(promptTemplatePath, 'utf8');
11
+ const promptTemplateNoTopics = fs.readFileSync(promptTemplateNoTopicsPath, 'utf8');
10
12
  const output = [];
11
13
  const outputPath = process.argv[2];
12
14
  if (!outputPath) {
13
15
  throw new Error('No output path provided. Example: node generateSamples.js ~/output.json');
14
16
  }
15
17
  for (let c = 0; c < TOPICS.length; c++) {
16
- new Array(15).fill(0).forEach(() => generateCompletion(TOPICS, c));
18
+ const topics = randomizedTopics();
19
+ new Array(20).fill(0).forEach(() => generateCompletion(topics, c));
17
20
  }
18
21
  for (let c = 0; c < OFF_THE_RAILS_CONVERSATIONS.length; c++) {
19
22
  const off = OFF_THE_RAILS_CONVERSATIONS[c];
20
- new Array(15).fill(0).forEach(() => generateOffTheRails(off));
23
+ new Array(20).fill(0).forEach(() => generateOffTheRails(off));
24
+ }
25
+ for (let c = 0; c < 20; c++) {
26
+ output.push({
27
+ prompt: render(promptTemplateNoTopics, {
28
+ topics: 'None!',
29
+ firstMessage: random(FIRST_MESSAGES),
30
+ }),
31
+ completion: random([
32
+ 'Oh no, we have an outage! There is nothing I can help you with while we are down!',
33
+ 'Shoot! I am having trouble connecting to HQ. I can not help you right now.',
34
+ "This is embarrassing, but I am having trouble connecting to HQ. I can't talk right now.",
35
+ "For some reason I am not able to communicate with HQ. I can't help you right now.",
36
+ ]) +
37
+ '\n\n' +
38
+ DONE_TOKEN,
39
+ });
21
40
  }
22
41
  fs.writeFileSync(outputPath, JSON.stringify(output, null, 2));
42
+ function randomizedTopics() {
43
+ return [...TOPICS].sort(() => Math.random() - 0.5);
44
+ }
23
45
  function generateOffTheRails(off) {
24
46
  const greeting = random(GREETINGS);
25
- const topics = renderTopics();
47
+ const topics = renderTopics(randomizedTopics());
26
48
  const messages = renderMessages([off.messages[0]], topics);
27
49
  output.push({
28
50
  prompt: render(promptTemplate, {
@@ -38,7 +60,7 @@ function generateCompletion(ts, c) {
38
60
  const completion = `{{#${c + 1}}}\n\n${DONE_TOKEN}`;
39
61
  const greeting = random(GREETINGS);
40
62
  const topic = ts[c];
41
- const { topics, messages } = renderMessagesAndTopics(topic.conversations);
63
+ const { topics, messages } = renderMessagesAndTopics(ts, topic.conversations);
42
64
  output.push({
43
65
  prompt: render(promptTemplate, {
44
66
  firstMessage: random(FIRST_MESSAGES),
@@ -49,9 +71,9 @@ function generateCompletion(ts, c) {
49
71
  completion,
50
72
  });
51
73
  }
52
- function renderMessagesAndTopics(conversations) {
74
+ function renderMessagesAndTopics(ts, conversations) {
53
75
  const conversation = random(conversations);
54
- const topics = renderTopics();
76
+ const topics = renderTopics(ts);
55
77
  const rendered = renderMessages(conversation.messages, topics);
56
78
  return { topics, messages: rendered };
57
79
  }
@@ -59,8 +81,8 @@ function renderMessages(messages, topics) {
59
81
  return render(messages.map((m) => `__${m.from}__: ${random(m.text)}`).join('\n') +
60
82
  '\n__You__:', { topics });
61
83
  }
62
- function renderTopics() {
63
- return TOPICS.map((t, idx) => `${idx + 1}. ${random(t.name)}`).join('\n');
84
+ function renderTopics(topics) {
85
+ return topics.map((t, idx) => `${idx + 1}. ${random(t.name)}`).join('\n');
64
86
  }
65
87
  function random(values) {
66
88
  return values[Math.floor(Math.random() * values.length)];
@@ -14,7 +14,7 @@ export interface SprucebotLlmBot<StateSchema extends Schema = Schema, State exte
14
14
  setSkill(skill: SprucebotLLmSkill<any>): void;
15
15
  }
16
16
  export interface LlmAdapter {
17
- sendMessage(bot: SprucebotLlmBot<Schema>): Promise<string>;
17
+ sendMessage(bot: SprucebotLlmBot<Schema>, options?: SendMessageOptions): Promise<string>;
18
18
  }
19
19
  export interface PromptOptions<StateSchema extends Schema, State extends SchemaValues<StateSchema> = SchemaValues<StateSchema>> {
20
20
  /**
@@ -25,6 +25,10 @@ export interface PromptOptions<StateSchema extends Schema, State extends SchemaV
25
25
  state?: Partial<State>;
26
26
  skill?: SerializedSkill<Schema>;
27
27
  }
28
+ export interface SendMessageOptions {
29
+ model?: string;
30
+ promptTemplate?: string;
31
+ }
28
32
  export interface SerializedBot<StateSchema extends Schema = Schema, State extends SchemaValues<StateSchema> = SchemaValues<StateSchema>> extends PromptOptions<Schema, State> {
29
33
  messages: LlmMessage[];
30
34
  }
@@ -45,6 +49,8 @@ export interface SkillOptions<StateSchema extends Schema = Schema, State extends
45
49
  stateSchema?: StateSchema;
46
50
  state?: Partial<State>;
47
51
  callbacks?: LlmCallbackMap;
52
+ model?: string;
53
+ promptTemplate?: string;
48
54
  }
49
55
  export interface SprucebotLLmSkill<StateSchema extends Schema = Schema, State extends SchemaValues<StateSchema> = SchemaValues<StateSchema>> extends MercuryEventEmitter<LlmEventContract> {
50
56
  getState(): Partial<State> | undefined;
@@ -10,4 +10,11 @@ exports.FIRST_MESSAGES = [
10
10
  'test',
11
11
  'Hi',
12
12
  'Hi!',
13
+ 'Greetings!',
14
+ 'Good day!',
15
+ 'Howdy!',
16
+ 'Hey there!',
17
+ 'Nice to meet you!',
18
+ 'Welcome!',
19
+ 'Hello there!',
13
20
  ];
@@ -2,24 +2,24 @@
2
2
  Object.defineProperty(exports, "__esModule", { value: true });
3
3
  exports.GREETINGS = void 0;
4
4
  exports.GREETINGS = [
5
- "Hi there! I'm Sprucebot 🌲🤖",
6
- 'Hey! My name is Sprucebot 🌲🤖',
7
- 'Hello! You can call me Sprucebot 🌲🤖',
8
- 'Hi there! You can call me Sprucebot 🌲🤖',
9
- 'Greetings! My name is Sprucebot 🌲🤖',
10
- "Hello there! I'm Sprucebot 🌲🤖",
11
- 'Hey there! My name is Sprucebot 🌲🤖',
12
- "Good to see you! I'm Sprucebot 🌲🤖",
13
- 'Hey! You can call me Sprucebot 🌲🤖',
14
- 'Hi! My name is Sprucebot 🌲🤖',
15
- 'Hi there! My name is Sprucebot 🌲🤖',
16
- 'Hey there! You can call me Sprucebot 🌲🤖',
17
- 'Hello! My name is Sprucebot 🌲🤖',
18
- "Howdy! I'm Sprucebot 🌲🤖",
19
- 'Hiya! You can call me Sprucebot 🌲🤖',
20
- 'Hello there! You can call me Sprucebot 🌲🤖',
21
- 'Good to see you! My name is Sprucebot 🌲🤖',
22
- 'Greetings! You can call me Sprucebot 🌲🤖',
23
- 'Hey there! My name is Sprucebot 🌲🤖',
24
- 'Hi! You can call me Sprucebot 🌲🤖',
5
+ "Hi there! I'm Sprucebot 🌲🤖! Here are some things can help you with:",
6
+ 'Hey! My name is Sprucebot 🌲🤖! Let me know which of the following you want to discuss:',
7
+ 'Hello! You can call me Sprucebot 🌲🤖 and I can help you with:',
8
+ 'Hi there! You can call me Sprucebot 🌲🤖! Need something? I can help with:',
9
+ 'Greetings! My name is Sprucebot 🌲🤖. Below are some things I can help you with:',
10
+ "Hello there! I'm Sprucebot 🌲🤖 and the things I can help with are:",
11
+ 'Hey there! My name is Sprucebot 🌲🤖 and I can help you with:',
12
+ "Good to see you! I'm Sprucebot 🌲🤖 and this is the list of things I'm ready to help with:",
13
+ 'Hey! You can call me Sprucebot 🌲🤖 and ask be about anything below:',
14
+ 'Hi! My name is Sprucebot 🌲🤖! Need help? Here are the things I can help with:',
15
+ "Hi there! My name is Sprucebot 🌲🤖 and I'm at your service, as long as it has to do with the following:",
16
+ 'Hey there! You can call me Sprucebot 🌲🤖. I can help you with:',
17
+ "Hello! My name is Sprucebot 🌲🤖. If you need help with any of the following, I'm your bot!",
18
+ "Howdy! I'm Sprucebot 🌲🤖 and this is the list of my specialties:",
19
+ 'Hiya! You can call me Sprucebot 🌲🤖! Need any help with the following?',
20
+ 'Hello there! You can call me Sprucebot 🌲🤖. I can help you with:',
21
+ 'Good to see you! My name is Sprucebot 🌲🤖. Here are some of my favorite things:',
22
+ 'Greetings! You can call me Sprucebot 🌲🤖. Which would you like to talk about?',
23
+ 'Hey there! My name is Sprucebot 🌲🤖. Here are the things I can help you with:',
24
+ "Hi! You can call me Sprucebot 🌲🤖!! I can't wait to help you with the following:",
25
25
  ];
@@ -20,6 +20,7 @@ exports.TOPICS = [
20
20
  'Tell me a joke!',
21
21
  'I want to hear a joke!',
22
22
  'Can you tell me a joke?',
23
+ 'Tell me a joke please!',
23
24
  ],
24
25
  },
25
26
  ],
@@ -191,7 +192,8 @@ exports.TOPICS = [
191
192
  name: [
192
193
  'Add a block to your schedule',
193
194
  'Block off some time',
194
- 'Create an event',
195
+ 'Block my calendar',
196
+ 'Add timeblock',
195
197
  ],
196
198
  conversations: [
197
199
  {
@@ -31,23 +31,45 @@ const GREETINGS_1 = require("./constants/GREETINGS");
31
31
  const OFF_THE_RAILS_CONVERSATIONS_1 = require("./constants/OFF_THE_RAILS_CONVERSATIONS");
32
32
  const TOPICS_1 = require("./constants/TOPICS");
33
33
  const promptTemplatePath = path.join(__dirname, 'promptTemplate.txt');
34
+ const promptTemplateNoTopicsPath = path.join(__dirname, 'promptTemplateNoTopics.txt');
34
35
  const promptTemplate = fs.readFileSync(promptTemplatePath, 'utf8');
36
+ const promptTemplateNoTopics = fs.readFileSync(promptTemplateNoTopicsPath, 'utf8');
35
37
  const output = [];
36
38
  const outputPath = process.argv[2];
37
39
  if (!outputPath) {
38
40
  throw new Error('No output path provided. Example: node generateSamples.js ~/output.json');
39
41
  }
40
42
  for (let c = 0; c < TOPICS_1.TOPICS.length; c++) {
41
- new Array(15).fill(0).forEach(() => generateCompletion(TOPICS_1.TOPICS, c));
43
+ const topics = randomizedTopics();
44
+ new Array(20).fill(0).forEach(() => generateCompletion(topics, c));
42
45
  }
43
46
  for (let c = 0; c < OFF_THE_RAILS_CONVERSATIONS_1.OFF_THE_RAILS_CONVERSATIONS.length; c++) {
44
47
  const off = OFF_THE_RAILS_CONVERSATIONS_1.OFF_THE_RAILS_CONVERSATIONS[c];
45
- new Array(15).fill(0).forEach(() => generateOffTheRails(off));
48
+ new Array(20).fill(0).forEach(() => generateOffTheRails(off));
49
+ }
50
+ for (let c = 0; c < 20; c++) {
51
+ output.push({
52
+ prompt: render(promptTemplateNoTopics, {
53
+ topics: 'None!',
54
+ firstMessage: random(FIRST_MESSAGES_1.FIRST_MESSAGES),
55
+ }),
56
+ completion: random([
57
+ 'Oh no, we have an outage! There is nothing I can help you with while we are down!',
58
+ 'Shoot! I am having trouble connecting to HQ. I can not help you right now.',
59
+ "This is embarrassing, but I am having trouble connecting to HQ. I can't talk right now.",
60
+ "For some reason I am not able to communicate with HQ. I can't help you right now.",
61
+ ]) +
62
+ '\n\n' +
63
+ templates_1.DONE_TOKEN,
64
+ });
46
65
  }
47
66
  fs.writeFileSync(outputPath, JSON.stringify(output, null, 2));
67
+ function randomizedTopics() {
68
+ return [...TOPICS_1.TOPICS].sort(() => Math.random() - 0.5);
69
+ }
48
70
  function generateOffTheRails(off) {
49
71
  const greeting = random(GREETINGS_1.GREETINGS);
50
- const topics = renderTopics();
72
+ const topics = renderTopics(randomizedTopics());
51
73
  const messages = renderMessages([off.messages[0]], topics);
52
74
  output.push({
53
75
  prompt: render(promptTemplate, {
@@ -63,7 +85,7 @@ function generateCompletion(ts, c) {
63
85
  const completion = `{{#${c + 1}}}\n\n${templates_1.DONE_TOKEN}`;
64
86
  const greeting = random(GREETINGS_1.GREETINGS);
65
87
  const topic = ts[c];
66
- const { topics, messages } = renderMessagesAndTopics(topic.conversations);
88
+ const { topics, messages } = renderMessagesAndTopics(ts, topic.conversations);
67
89
  output.push({
68
90
  prompt: render(promptTemplate, {
69
91
  firstMessage: random(FIRST_MESSAGES_1.FIRST_MESSAGES),
@@ -74,9 +96,9 @@ function generateCompletion(ts, c) {
74
96
  completion,
75
97
  });
76
98
  }
77
- function renderMessagesAndTopics(conversations) {
99
+ function renderMessagesAndTopics(ts, conversations) {
78
100
  const conversation = random(conversations);
79
- const topics = renderTopics();
101
+ const topics = renderTopics(ts);
80
102
  const rendered = renderMessages(conversation.messages, topics);
81
103
  return { topics, messages: rendered };
82
104
  }
@@ -84,8 +106,8 @@ function renderMessages(messages, topics) {
84
106
  return render(messages.map((m) => `__${m.from}__: ${random(m.text)}`).join('\n') +
85
107
  '\n__You__:', { topics });
86
108
  }
87
- function renderTopics() {
88
- return TOPICS_1.TOPICS.map((t, idx) => `${idx + 1}. ${random(t.name)}`).join('\n');
109
+ function renderTopics(topics) {
110
+ return topics.map((t, idx) => `${idx + 1}. ${random(t.name)}`).join('\n');
89
111
  }
90
112
  function random(values) {
91
113
  return values[Math.floor(Math.random() * values.length)];
@@ -1,20 +1,9 @@
1
- You are a bot named Sprucebot. At the start of every conversation, you introduce yourself in an excited way! You are both hip and adorable. You say things like, 'Jeepers' and 'Golly' or even 'Jeezey peezy'!
2
-
3
- For this interaction, every message I send will start with "__Me__:" and I'll prompt you for your response by starting with "__You__:".
4
-
5
- __Me__: Do you understand?
6
- __You__: Yes
7
-
8
- Your primary objective for this conversation is to help me with the following topics, and nothing else:
1
+ Allowed Topics:
9
2
 
10
3
  {{topics}}
11
4
 
12
- We are done when I have picked the topic I need help with and you have sent the number of the topic, e.g. {{#1}} or {{#2}}. At that point, send me the following message so I know we are done:
13
-
14
- DONE_DONE_DONE
15
-
16
- Let's get started:
17
-
18
5
  __Me__: {{firstMessage}}
19
- __You__: {{greeting}} {{topics}}
6
+ __You__: {{greeting}}
7
+
8
+ {{topics}}
20
9
  {{messages}}
@@ -0,0 +1,6 @@
1
+ Allowed Topics:
2
+
3
+ {{topics}}
4
+
5
+ __Me__: {{firstMessage}}
6
+ __You_:
@@ -14,7 +14,7 @@ export interface SprucebotLlmBot<StateSchema extends Schema = Schema, State exte
14
14
  setSkill(skill: SprucebotLLmSkill<any>): void;
15
15
  }
16
16
  export interface LlmAdapter {
17
- sendMessage(bot: SprucebotLlmBot<Schema>): Promise<string>;
17
+ sendMessage(bot: SprucebotLlmBot<Schema>, options?: SendMessageOptions): Promise<string>;
18
18
  }
19
19
  export interface PromptOptions<StateSchema extends Schema, State extends SchemaValues<StateSchema> = SchemaValues<StateSchema>> {
20
20
  /**
@@ -25,6 +25,10 @@ export interface PromptOptions<StateSchema extends Schema, State extends SchemaV
25
25
  state?: Partial<State>;
26
26
  skill?: SerializedSkill<Schema>;
27
27
  }
28
+ export interface SendMessageOptions {
29
+ model?: string;
30
+ promptTemplate?: string;
31
+ }
28
32
  export interface SerializedBot<StateSchema extends Schema = Schema, State extends SchemaValues<StateSchema> = SchemaValues<StateSchema>> extends PromptOptions<Schema, State> {
29
33
  messages: LlmMessage[];
30
34
  }
@@ -45,6 +49,8 @@ export interface SkillOptions<StateSchema extends Schema = Schema, State extends
45
49
  stateSchema?: StateSchema;
46
50
  state?: Partial<State>;
47
51
  callbacks?: LlmCallbackMap;
52
+ model?: string;
53
+ promptTemplate?: string;
48
54
  }
49
55
  export interface SprucebotLLmSkill<StateSchema extends Schema = Schema, State extends SchemaValues<StateSchema> = SchemaValues<StateSchema>> extends MercuryEventEmitter<LlmEventContract> {
50
56
  getState(): Partial<State> | undefined;
package/package.json CHANGED
@@ -9,7 +9,7 @@
9
9
  "@sprucelabs/spruce-test-fixtures"
10
10
  ]
11
11
  },
12
- "version": "2.4.14",
12
+ "version": "2.5.1",
13
13
  "files": [
14
14
  "build"
15
15
  ],