@sprucelabs/sprucebot-llm 12.2.0 → 12.3.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,15 +1,16 @@
1
+ import { Log } from '@sprucelabs/spruce-skill-utils';
1
2
  import OpenAI from 'openai';
2
3
  import { ReasoningEffort } from 'openai/resources';
3
4
  import { LlmAdapter, SendMessageOptions, SprucebotLlmBot } from '../../llm.types';
4
5
  export default class OpenAiAdapter implements LlmAdapter {
5
6
  static OpenAI: typeof OpenAI;
6
7
  private api;
7
- private log;
8
+ private log?;
8
9
  private model;
9
10
  private memoryLimit?;
10
11
  private reasoningEffort?;
11
- protected constructor(apiKey: string);
12
- static Adapter(apiKey: string): OpenAiAdapter;
12
+ protected constructor(apiKey: string, options?: OpenAiAdapterOptions);
13
+ static Adapter(apiKey: string, options?: OpenAiAdapterOptions): OpenAiAdapter;
13
14
  sendMessage(bot: SprucebotLlmBot, options?: SendMessageOptions): Promise<string>;
14
15
  private getReasoningEffort;
15
16
  setModel(model: string): void;
@@ -17,3 +18,7 @@ export default class OpenAiAdapter implements LlmAdapter {
17
18
  setReasoningEffort(effort: ReasoningEffort): void;
18
19
  }
19
20
  export declare const MESSAGE_RESPONSE_ERROR_MESSAGE = "Oh no! Something went wrong and I can't talk right now!";
21
+ interface OpenAiAdapterOptions {
22
+ log?: Log;
23
+ }
24
+ export {};
@@ -5,25 +5,25 @@ var __importDefault = (this && this.__importDefault) || function (mod) {
5
5
  Object.defineProperty(exports, "__esModule", { value: true });
6
6
  exports.MESSAGE_RESPONSE_ERROR_MESSAGE = void 0;
7
7
  const schema_1 = require("@sprucelabs/schema");
8
- const spruce_skill_utils_1 = require("@sprucelabs/spruce-skill-utils");
9
8
  const openai_1 = __importDefault(require("openai"));
10
9
  const OpenAiMessageBuilder_1 = __importDefault(require("./OpenAiMessageBuilder"));
11
10
  class OpenAiAdapter {
12
- constructor(apiKey) {
13
- this.log = (0, spruce_skill_utils_1.buildLog)('SprucebotLLM::OpenAiAdapter');
11
+ constructor(apiKey, options) {
14
12
  this.model = 'gpt-4o';
15
13
  (0, schema_1.assertOptions)({ apiKey }, ['apiKey']);
14
+ const { log } = options || {};
16
15
  this.api = new OpenAiAdapter.OpenAI({ apiKey });
16
+ this.log = log;
17
17
  }
18
- static Adapter(apiKey) {
19
- return new this(apiKey);
18
+ static Adapter(apiKey, options) {
19
+ return new this(apiKey, options);
20
20
  }
21
21
  async sendMessage(bot, options) {
22
22
  const messageBuilder = OpenAiMessageBuilder_1.default.Builder(bot, {
23
23
  memoryLimit: this.memoryLimit,
24
24
  });
25
25
  const messages = messageBuilder.buildMessages();
26
- this.log.info('Sending message to OpenAI', JSON.stringify(messages, null, 2));
26
+ this.log?.info('Sending message to OpenAI', JSON.stringify(messages, null, 2));
27
27
  const params = {
28
28
  messages,
29
29
  model: options?.model ?? this.model,
@@ -35,7 +35,7 @@ class OpenAiAdapter {
35
35
  const response = await this.api.chat.completions.create(params);
36
36
  const message = response.choices?.[0]?.message?.content?.trim() ??
37
37
  exports.MESSAGE_RESPONSE_ERROR_MESSAGE;
38
- this.log.info('Received response from OpenAI', message);
38
+ this.log?.info('Received response from OpenAI', message);
39
39
  return message;
40
40
  }
41
41
  getReasoningEffort() {
@@ -1,15 +1,16 @@
1
+ import { Log } from '@sprucelabs/spruce-skill-utils';
1
2
  import OpenAI from 'openai';
2
3
  import { ReasoningEffort } from 'openai/resources';
3
4
  import { LlmAdapter, SendMessageOptions, SprucebotLlmBot } from '../../llm.types';
4
5
  export default class OpenAiAdapter implements LlmAdapter {
5
6
  static OpenAI: typeof OpenAI;
6
7
  private api;
7
- private log;
8
+ private log?;
8
9
  private model;
9
10
  private memoryLimit?;
10
11
  private reasoningEffort?;
11
- protected constructor(apiKey: string);
12
- static Adapter(apiKey: string): OpenAiAdapter;
12
+ protected constructor(apiKey: string, options?: OpenAiAdapterOptions);
13
+ static Adapter(apiKey: string, options?: OpenAiAdapterOptions): OpenAiAdapter;
13
14
  sendMessage(bot: SprucebotLlmBot, options?: SendMessageOptions): Promise<string>;
14
15
  private getReasoningEffort;
15
16
  setModel(model: string): void;
@@ -17,3 +18,7 @@ export default class OpenAiAdapter implements LlmAdapter {
17
18
  setReasoningEffort(effort: ReasoningEffort): void;
18
19
  }
19
20
  export declare const MESSAGE_RESPONSE_ERROR_MESSAGE = "Oh no! Something went wrong and I can't talk right now!";
21
+ interface OpenAiAdapterOptions {
22
+ log?: Log;
23
+ }
24
+ export {};
@@ -8,38 +8,38 @@ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, ge
8
8
  });
9
9
  };
10
10
  import { assertOptions } from '@sprucelabs/schema';
11
- import { buildLog } from '@sprucelabs/spruce-skill-utils';
12
11
  import OpenAI from 'openai';
13
12
  import OpenAiMessageBuilder from './OpenAiMessageBuilder.js';
14
13
  class OpenAiAdapter {
15
- constructor(apiKey) {
16
- this.log = buildLog('SprucebotLLM::OpenAiAdapter');
14
+ constructor(apiKey, options) {
17
15
  this.model = 'gpt-4o';
18
16
  assertOptions({ apiKey }, ['apiKey']);
17
+ const { log } = options || {};
19
18
  this.api = new OpenAiAdapter.OpenAI({ apiKey });
19
+ this.log = log;
20
20
  }
21
- static Adapter(apiKey) {
22
- return new this(apiKey);
21
+ static Adapter(apiKey, options) {
22
+ return new this(apiKey, options);
23
23
  }
24
24
  sendMessage(bot, options) {
25
25
  return __awaiter(this, void 0, void 0, function* () {
26
- var _a, _b, _c, _d, _e, _f;
26
+ var _a, _b, _c, _d, _e, _f, _g, _h;
27
27
  const messageBuilder = OpenAiMessageBuilder.Builder(bot, {
28
28
  memoryLimit: this.memoryLimit,
29
29
  });
30
30
  const messages = messageBuilder.buildMessages();
31
- this.log.info('Sending message to OpenAI', JSON.stringify(messages, null, 2));
31
+ (_a = this.log) === null || _a === void 0 ? void 0 : _a.info('Sending message to OpenAI', JSON.stringify(messages, null, 2));
32
32
  const params = {
33
33
  messages,
34
- model: (_a = options === null || options === void 0 ? void 0 : options.model) !== null && _a !== void 0 ? _a : this.model,
34
+ model: (_b = options === null || options === void 0 ? void 0 : options.model) !== null && _b !== void 0 ? _b : this.model,
35
35
  };
36
36
  const reasoningEffort = this.getReasoningEffort();
37
37
  if (reasoningEffort) {
38
38
  params.reasoning_effort = reasoningEffort;
39
39
  }
40
40
  const response = yield this.api.chat.completions.create(params);
41
- const message = (_f = (_e = (_d = (_c = (_b = response.choices) === null || _b === void 0 ? void 0 : _b[0]) === null || _c === void 0 ? void 0 : _c.message) === null || _d === void 0 ? void 0 : _d.content) === null || _e === void 0 ? void 0 : _e.trim()) !== null && _f !== void 0 ? _f : MESSAGE_RESPONSE_ERROR_MESSAGE;
42
- this.log.info('Received response from OpenAI', message);
41
+ const message = (_g = (_f = (_e = (_d = (_c = response.choices) === null || _c === void 0 ? void 0 : _c[0]) === null || _d === void 0 ? void 0 : _d.message) === null || _e === void 0 ? void 0 : _e.content) === null || _f === void 0 ? void 0 : _f.trim()) !== null && _g !== void 0 ? _g : MESSAGE_RESPONSE_ERROR_MESSAGE;
42
+ (_h = this.log) === null || _h === void 0 ? void 0 : _h.info('Received response from OpenAI', message);
43
43
  return message;
44
44
  });
45
45
  }
package/package.json CHANGED
@@ -8,7 +8,7 @@
8
8
  "eta"
9
9
  ]
10
10
  },
11
- "version": "12.2.0",
11
+ "version": "12.3.0",
12
12
  "files": [
13
13
  "build"
14
14
  ],