@sprucelabs/sprucebot-llm 13.3.0 → 14.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (35) hide show
  1. package/build/.spruce/errors/errors.types.d.ts +19 -0
  2. package/build/.spruce/errors/options.types.d.ts +4 -1
  3. package/build/.spruce/errors/sprucebotLlm/invalidLlmAdapter.schema.d.ts +3 -0
  4. package/build/.spruce/errors/sprucebotLlm/invalidLlmAdapter.schema.js +18 -0
  5. package/build/bots/adapters/AnthropicAdapter.d.ts +3 -1
  6. package/build/bots/adapters/AnthropicAdapter.js +3 -0
  7. package/build/bots/adapters/LlmAdapterLoader.d.ts +7 -0
  8. package/build/bots/adapters/LlmAdapterLoader.js +61 -0
  9. package/build/bots/adapters/OllamaAdapter.d.ts +6 -3
  10. package/build/bots/adapters/OllamaAdapter.js +2 -2
  11. package/build/bots/adapters/OpenAiAdapter.d.ts +2 -2
  12. package/build/errors/SpruceError.js +3 -0
  13. package/build/errors/invalidLlmAdapter.builder.d.ts +11 -0
  14. package/build/errors/invalidLlmAdapter.builder.js +13 -0
  15. package/build/esm/.spruce/errors/errors.types.d.ts +19 -0
  16. package/build/esm/.spruce/errors/options.types.d.ts +4 -1
  17. package/build/esm/bots/adapters/AnthropicAdapter.d.ts +3 -1
  18. package/build/esm/bots/adapters/AnthropicAdapter.js +4 -0
  19. package/build/esm/bots/adapters/LlmAdapterLoader.d.ts +7 -0
  20. package/build/esm/bots/adapters/LlmAdapterLoader.js +49 -0
  21. package/build/esm/bots/adapters/OllamaAdapter.d.ts +6 -3
  22. package/build/esm/bots/adapters/OllamaAdapter.js +3 -2
  23. package/build/esm/bots/adapters/OpenAiAdapter.d.ts +2 -2
  24. package/build/esm/errors/SpruceError.js +3 -0
  25. package/build/esm/errors/invalidLlmAdapter.builder.d.ts +11 -0
  26. package/build/esm/errors/invalidLlmAdapter.builder.js +11 -0
  27. package/build/esm/index.d.ts +1 -0
  28. package/build/esm/index.js +1 -0
  29. package/build/esm/tests/SpyAdapter.d.ts +11 -4
  30. package/build/esm/tests/SpyAdapter.js +16 -6
  31. package/build/index.d.ts +1 -0
  32. package/build/index.js +3 -1
  33. package/build/tests/SpyAdapter.d.ts +11 -4
  34. package/build/tests/SpyAdapter.js +16 -6
  35. package/package.json +3 -3
@@ -11,6 +11,25 @@ export declare namespace SpruceErrors.SprucebotLlm {
11
11
  }
12
12
  type NoBotInstanceSetEntity = SchemaEntity<SpruceErrors.SprucebotLlm.NoBotInstanceSetSchema>;
13
13
  }
14
+ export declare namespace SpruceErrors.SprucebotLlm {
15
+ interface InvalidLlmAdapter {
16
+ 'adapter': string;
17
+ }
18
+ interface InvalidLlmAdapterSchema extends SpruceSchema.Schema {
19
+ id: 'invalidLlmAdapter';
20
+ namespace: 'SprucebotLlm';
21
+ name: 'Invalid Llm Adapter';
22
+ fields: {
23
+ /** . */
24
+ 'adapter': {
25
+ type: 'text';
26
+ isRequired: true;
27
+ options: undefined;
28
+ };
29
+ };
30
+ }
31
+ type InvalidLlmAdapterEntity = SchemaEntity<SpruceErrors.SprucebotLlm.InvalidLlmAdapterSchema>;
32
+ }
14
33
  export declare namespace SpruceErrors.SprucebotLlm {
15
34
  interface InvalidCallback {
16
35
  'validCallbacks': string[];
@@ -3,11 +3,14 @@ import { ErrorOptions as ISpruceErrorOptions } from "@sprucelabs/error";
3
3
  export interface NoBotInstanceSetErrorOptions extends SpruceErrors.SprucebotLlm.NoBotInstanceSet, ISpruceErrorOptions {
4
4
  code: 'NO_BOT_INSTANCE_SET';
5
5
  }
6
+ export interface InvalidLlmAdapterErrorOptions extends SpruceErrors.SprucebotLlm.InvalidLlmAdapter, ISpruceErrorOptions {
7
+ code: 'INVALID_LLM_ADAPTER';
8
+ }
6
9
  export interface InvalidCallbackErrorOptions extends SpruceErrors.SprucebotLlm.InvalidCallback, ISpruceErrorOptions {
7
10
  code: 'INVALID_CALLBACK';
8
11
  }
9
12
  export interface CallbackErrorErrorOptions extends SpruceErrors.SprucebotLlm.CallbackError, ISpruceErrorOptions {
10
13
  code: 'CALLBACK_ERROR';
11
14
  }
12
- type ErrorOptions = NoBotInstanceSetErrorOptions | InvalidCallbackErrorOptions | CallbackErrorErrorOptions;
15
+ type ErrorOptions = NoBotInstanceSetErrorOptions | InvalidLlmAdapterErrorOptions | InvalidCallbackErrorOptions | CallbackErrorErrorOptions;
13
16
  export default ErrorOptions;
@@ -0,0 +1,3 @@
1
+ import { SpruceErrors } from '../errors.types';
2
+ declare const invalidLlmAdapterSchema: SpruceErrors.SprucebotLlm.InvalidLlmAdapterSchema;
3
+ export default invalidLlmAdapterSchema;
@@ -0,0 +1,18 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ const schema_1 = require("@sprucelabs/schema");
4
+ const invalidLlmAdapterSchema = {
5
+ id: 'invalidLlmAdapter',
6
+ namespace: 'SprucebotLlm',
7
+ name: 'Invalid Llm Adapter',
8
+ fields: {
9
+ /** . */
10
+ 'adapter': {
11
+ type: 'text',
12
+ isRequired: true,
13
+ options: undefined
14
+ },
15
+ }
16
+ };
17
+ schema_1.SchemaRegistry.getInstance().trackSchema(invalidLlmAdapterSchema);
18
+ exports.default = invalidLlmAdapterSchema;
@@ -3,6 +3,7 @@ import { Log } from '@sprucelabs/spruce-skill-utils';
3
3
  import Anthropic from '@anthropic-ai/sdk';
4
4
  import { LlmAdapter, SprucebotLlmBot, SendMessageOptions, LllmReasoningEffort } from '../../llm.types';
5
5
  export default class AnthropicAdapter implements LlmAdapter {
6
+ static Class?: new (apiKey: string, options?: AnthropicAdapterOptions) => LlmAdapter;
6
7
  static Anthropic: typeof Anthropic;
7
8
  private api;
8
9
  private model;
@@ -11,7 +12,8 @@ export default class AnthropicAdapter implements LlmAdapter {
11
12
  private memoryLimit?;
12
13
  private isThinkingEnabled;
13
14
  private log?;
14
- constructor(apiKey: string, options: AnthropicAdapterOptions);
15
+ private constructor();
16
+ static Adapter(apiKey: string, options: AnthropicAdapterOptions): LlmAdapter;
15
17
  sendMessage(bot: SprucebotLlmBot<Schema>, options?: SendMessageOptions): Promise<string>;
16
18
  private sendHandler;
17
19
  setModel(model: string): void;
@@ -22,6 +22,9 @@ class AnthropicAdapter {
22
22
  this.log = log?.buildLog('AnthropicAdapter');
23
23
  this.sender = MessageSender_1.default.Sender(this.sendHandler.bind(this), log);
24
24
  }
25
+ static Adapter(apiKey, options) {
26
+ return new (this.Class ?? this)(apiKey, options);
27
+ }
25
28
  async sendMessage(bot, options) {
26
29
  const text = await this.sender.sendMessage(bot, {
27
30
  model: this.model,
@@ -0,0 +1,7 @@
1
+ export default class LlmAdapterLoader {
2
+ static VALID_ADAPTERS: string[];
3
+ private adapterName;
4
+ private constructor();
5
+ static Loader(): LlmAdapterLoader;
6
+ Adapter(): import("../../llm.types").LlmAdapter;
7
+ }
@@ -0,0 +1,61 @@
1
+ "use strict";
2
+ var __importDefault = (this && this.__importDefault) || function (mod) {
3
+ return (mod && mod.__esModule) ? mod : { "default": mod };
4
+ };
5
+ Object.defineProperty(exports, "__esModule", { value: true });
6
+ const schema_1 = require("@sprucelabs/schema");
7
+ const SpruceError_1 = __importDefault(require("../../errors/SpruceError"));
8
+ const AnthropicAdapter_1 = __importDefault(require("./AnthropicAdapter"));
9
+ const OllamaAdapter_1 = __importDefault(require("./OllamaAdapter"));
10
+ const OpenAiAdapter_1 = __importDefault(require("./OpenAiAdapter"));
11
+ class LlmAdapterLoader {
12
+ constructor(adapterName) {
13
+ this.adapterName = adapterName;
14
+ }
15
+ static Loader() {
16
+ const { env: { SPRUCE_LLM_ADAPTER }, } = (0, schema_1.assertOptions)({
17
+ env: process.env,
18
+ }, ['env.SPRUCE_LLM_ADAPTER']);
19
+ const name = SPRUCE_LLM_ADAPTER.toLowerCase();
20
+ if (!LlmAdapterLoader.VALID_ADAPTERS.includes(name)) {
21
+ throw new SpruceError_1.default({
22
+ code: 'INVALID_LLM_ADAPTER',
23
+ adapter: SPRUCE_LLM_ADAPTER,
24
+ });
25
+ }
26
+ if (name === 'anthropic') {
27
+ (0, schema_1.assertOptions)({
28
+ env: process.env,
29
+ }, ['env.SPRUCE_LLM_MAX_TOKENS']);
30
+ }
31
+ return new this(name);
32
+ }
33
+ Adapter() {
34
+ const key = process.env.SPRUCE_LLM_API_KEY;
35
+ const options = {
36
+ memoryLimit: process.env.SPRUCE_LLM_MEMORY_LIMIT
37
+ ? parseInt(process.env.SPRUCE_LLM_MEMORY_LIMIT, 10)
38
+ : undefined,
39
+ model: process.env.SPRUCE_LLM_MODEL,
40
+ baseUrl: process.env.SPRUCE_LLM_BASE_URL,
41
+ };
42
+ const thinking = process.env.SPRUCE_LLM_THINKING === 'true';
43
+ if (this.adapterName === 'anthropic') {
44
+ return AnthropicAdapter_1.default.Adapter(key, {
45
+ ...options,
46
+ thinking,
47
+ maxTokens: parseInt(process.env.SPRUCE_LLM_MAX_TOKENS, 10),
48
+ });
49
+ }
50
+ if (this.adapterName === 'ollama') {
51
+ return OllamaAdapter_1.default.Adapter({ ...options, think: thinking });
52
+ }
53
+ return OpenAiAdapter_1.default.Adapter(key, {
54
+ ...options,
55
+ reasoningEffort: process.env
56
+ .SPRUCE_LLM_REASONING_EFFORT,
57
+ });
58
+ }
59
+ }
60
+ LlmAdapterLoader.VALID_ADAPTERS = ['openai', 'anthropic', 'ollama'];
61
+ exports.default = LlmAdapterLoader;
@@ -1,17 +1,20 @@
1
1
  import { Schema } from '@sprucelabs/schema';
2
+ import { Log } from '@sprucelabs/spruce-skill-utils';
2
3
  import { LlmAdapter, SprucebotLlmBot, SendMessageOptions, LllmReasoningEffort } from '../../llm.types';
3
4
  export default class OllamaAdapter implements LlmAdapter {
5
+ static Class?: new (apiKey: string, options?: OllamaAdapterOptions) => LlmAdapter;
4
6
  private openai;
5
7
  private think;
6
8
  private constructor();
7
- static Adapter(options?: OllamaOptions): OllamaAdapter;
9
+ static Adapter(options?: OllamaAdapterOptions): LlmAdapter;
8
10
  sendMessage(bot: SprucebotLlmBot<Schema>, options?: SendMessageOptions): Promise<string>;
9
11
  setModel(model: string): void;
10
12
  setReasoningEffort(effort: LllmReasoningEffort): void;
11
13
  }
12
- interface OllamaOptions {
14
+ export interface OllamaAdapterOptions {
15
+ log?: Log;
13
16
  model?: string;
14
17
  think?: boolean;
15
18
  baseUrl?: string;
19
+ memoryLimit?: number;
16
20
  }
17
- export {};
@@ -5,7 +5,7 @@ var __importDefault = (this && this.__importDefault) || function (mod) {
5
5
  Object.defineProperty(exports, "__esModule", { value: true });
6
6
  const OpenAiAdapter_1 = __importDefault(require("./OpenAiAdapter"));
7
7
  class OllamaAdapter {
8
- constructor(options) {
8
+ constructor(_apiKey, options) {
9
9
  this.think = options?.think ?? false;
10
10
  this.openai = OpenAiAdapter_1.default.Adapter('***', {
11
11
  baseUrl: 'http://localhost:11434/v1',
@@ -13,7 +13,7 @@ class OllamaAdapter {
13
13
  });
14
14
  }
15
15
  static Adapter(options) {
16
- return new this(options);
16
+ return new (this.Class ?? this)('***', options);
17
17
  }
18
18
  async sendMessage(bot, options) {
19
19
  //@ts-ignore
@@ -3,7 +3,7 @@ import OpenAI from 'openai';
3
3
  import { ReasoningEffort } from 'openai/resources';
4
4
  import { LlmAdapter, SendMessageOptions, SprucebotLlmBot } from '../../llm.types';
5
5
  export default class OpenAiAdapter implements LlmAdapter {
6
- static Class: new (apiKey: string, options?: OpenAiAdapterOptions) => OpenAiAdapter;
6
+ static Class?: new (apiKey: string, options?: OpenAiAdapterOptions) => LlmAdapter;
7
7
  static OpenAI: typeof OpenAI;
8
8
  private api;
9
9
  private model;
@@ -11,7 +11,7 @@ export default class OpenAiAdapter implements LlmAdapter {
11
11
  private reasoningEffort?;
12
12
  private sender;
13
13
  protected constructor(apiKey: string, options?: OpenAiAdapterOptions);
14
- static Adapter(apiKey: string, options?: OpenAiAdapterOptions): OpenAiAdapter;
14
+ static Adapter(apiKey: string, options?: OpenAiAdapterOptions): LlmAdapter;
15
15
  sendMessage(bot: SprucebotLlmBot, options?: SendMessageOptions): Promise<string>;
16
16
  private sendHandler;
17
17
  private getReasoningEffort;
@@ -39,6 +39,9 @@ ${options.validCallbacks.map((name, idx) => `${idx + 1}: ${name}`).join('\n')}`;
39
39
  message += `\n\nOriginal Error: ${options.originalError.message}`;
40
40
  }
41
41
  break;
42
+ case 'INVALID_LLM_ADAPTER':
43
+ message = `SPRUCE_LLM_ADAPTER=${options.adapter} is not a valid adapter. Please set SPRUCE_LLM_ADAPTER to a valid adapter (OpenAi, Anthropic, Ollama) and try again.`;
44
+ break;
42
45
  default:
43
46
  message = super.friendlyMessage();
44
47
  }
@@ -0,0 +1,11 @@
1
+ declare const _default: {
2
+ id: string;
3
+ name: string;
4
+ fields: {
5
+ adapter: {
6
+ type: "text";
7
+ isRequired: true;
8
+ };
9
+ };
10
+ };
11
+ export default _default;
@@ -0,0 +1,13 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ const schema_1 = require("@sprucelabs/schema");
4
+ exports.default = (0, schema_1.buildErrorSchema)({
5
+ id: 'invalidLlmAdapter',
6
+ name: 'Invalid Llm Adapter',
7
+ fields: {
8
+ adapter: {
9
+ type: 'text',
10
+ isRequired: true,
11
+ },
12
+ },
13
+ });
@@ -11,6 +11,25 @@ export declare namespace SpruceErrors.SprucebotLlm {
11
11
  }
12
12
  type NoBotInstanceSetEntity = SchemaEntity<SpruceErrors.SprucebotLlm.NoBotInstanceSetSchema>;
13
13
  }
14
+ export declare namespace SpruceErrors.SprucebotLlm {
15
+ interface InvalidLlmAdapter {
16
+ 'adapter': string;
17
+ }
18
+ interface InvalidLlmAdapterSchema extends SpruceSchema.Schema {
19
+ id: 'invalidLlmAdapter';
20
+ namespace: 'SprucebotLlm';
21
+ name: 'Invalid Llm Adapter';
22
+ fields: {
23
+ /** . */
24
+ 'adapter': {
25
+ type: 'text';
26
+ isRequired: true;
27
+ options: undefined;
28
+ };
29
+ };
30
+ }
31
+ type InvalidLlmAdapterEntity = SchemaEntity<SpruceErrors.SprucebotLlm.InvalidLlmAdapterSchema>;
32
+ }
14
33
  export declare namespace SpruceErrors.SprucebotLlm {
15
34
  interface InvalidCallback {
16
35
  'validCallbacks': string[];
@@ -3,11 +3,14 @@ import { ErrorOptions as ISpruceErrorOptions } from "@sprucelabs/error";
3
3
  export interface NoBotInstanceSetErrorOptions extends SpruceErrors.SprucebotLlm.NoBotInstanceSet, ISpruceErrorOptions {
4
4
  code: 'NO_BOT_INSTANCE_SET';
5
5
  }
6
+ export interface InvalidLlmAdapterErrorOptions extends SpruceErrors.SprucebotLlm.InvalidLlmAdapter, ISpruceErrorOptions {
7
+ code: 'INVALID_LLM_ADAPTER';
8
+ }
6
9
  export interface InvalidCallbackErrorOptions extends SpruceErrors.SprucebotLlm.InvalidCallback, ISpruceErrorOptions {
7
10
  code: 'INVALID_CALLBACK';
8
11
  }
9
12
  export interface CallbackErrorErrorOptions extends SpruceErrors.SprucebotLlm.CallbackError, ISpruceErrorOptions {
10
13
  code: 'CALLBACK_ERROR';
11
14
  }
12
- type ErrorOptions = NoBotInstanceSetErrorOptions | InvalidCallbackErrorOptions | CallbackErrorErrorOptions;
15
+ type ErrorOptions = NoBotInstanceSetErrorOptions | InvalidLlmAdapterErrorOptions | InvalidCallbackErrorOptions | CallbackErrorErrorOptions;
13
16
  export default ErrorOptions;
@@ -3,6 +3,7 @@ import { Log } from '@sprucelabs/spruce-skill-utils';
3
3
  import Anthropic from '@anthropic-ai/sdk';
4
4
  import { LlmAdapter, SprucebotLlmBot, SendMessageOptions, LllmReasoningEffort } from '../../llm.types';
5
5
  export default class AnthropicAdapter implements LlmAdapter {
6
+ static Class?: new (apiKey: string, options?: AnthropicAdapterOptions) => LlmAdapter;
6
7
  static Anthropic: typeof Anthropic;
7
8
  private api;
8
9
  private model;
@@ -11,7 +12,8 @@ export default class AnthropicAdapter implements LlmAdapter {
11
12
  private memoryLimit?;
12
13
  private isThinkingEnabled;
13
14
  private log?;
14
- constructor(apiKey: string, options: AnthropicAdapterOptions);
15
+ private constructor();
16
+ static Adapter(apiKey: string, options: AnthropicAdapterOptions): LlmAdapter;
15
17
  sendMessage(bot: SprucebotLlmBot<Schema>, options?: SendMessageOptions): Promise<string>;
16
18
  private sendHandler;
17
19
  setModel(model: string): void;
@@ -26,6 +26,10 @@ class AnthropicAdapter {
26
26
  this.log = log === null || log === void 0 ? void 0 : log.buildLog('AnthropicAdapter');
27
27
  this.sender = MessageSenderImpl.Sender(this.sendHandler.bind(this), log);
28
28
  }
29
+ static Adapter(apiKey, options) {
30
+ var _a;
31
+ return new ((_a = this.Class) !== null && _a !== void 0 ? _a : this)(apiKey, options);
32
+ }
29
33
  sendMessage(bot, options) {
30
34
  return __awaiter(this, void 0, void 0, function* () {
31
35
  const text = yield this.sender.sendMessage(bot, Object.assign({ model: this.model, memoryLimit: this.memoryLimit }, options));
@@ -0,0 +1,7 @@
1
+ export default class LlmAdapterLoader {
2
+ static VALID_ADAPTERS: string[];
3
+ private adapterName;
4
+ private constructor();
5
+ static Loader(): LlmAdapterLoader;
6
+ Adapter(): import("../../llm.types").LlmAdapter;
7
+ }
@@ -0,0 +1,49 @@
1
+ import { assertOptions } from '@sprucelabs/schema';
2
+ import SpruceError from '../../errors/SpruceError.js';
3
+ import AnthropicAdapter from './AnthropicAdapter.js';
4
+ import OllamaAdapter from './OllamaAdapter.js';
5
+ import OpenAiAdapter from './OpenAiAdapter.js';
6
+ class LlmAdapterLoader {
7
+ constructor(adapterName) {
8
+ this.adapterName = adapterName;
9
+ }
10
+ static Loader() {
11
+ const { env: { SPRUCE_LLM_ADAPTER }, } = assertOptions({
12
+ env: process.env,
13
+ }, ['env.SPRUCE_LLM_ADAPTER']);
14
+ const name = SPRUCE_LLM_ADAPTER.toLowerCase();
15
+ if (!LlmAdapterLoader.VALID_ADAPTERS.includes(name)) {
16
+ throw new SpruceError({
17
+ code: 'INVALID_LLM_ADAPTER',
18
+ adapter: SPRUCE_LLM_ADAPTER,
19
+ });
20
+ }
21
+ if (name === 'anthropic') {
22
+ assertOptions({
23
+ env: process.env,
24
+ }, ['env.SPRUCE_LLM_MAX_TOKENS']);
25
+ }
26
+ return new this(name);
27
+ }
28
+ Adapter() {
29
+ const key = process.env.SPRUCE_LLM_API_KEY;
30
+ const options = {
31
+ memoryLimit: process.env.SPRUCE_LLM_MEMORY_LIMIT
32
+ ? parseInt(process.env.SPRUCE_LLM_MEMORY_LIMIT, 10)
33
+ : undefined,
34
+ model: process.env.SPRUCE_LLM_MODEL,
35
+ baseUrl: process.env.SPRUCE_LLM_BASE_URL,
36
+ };
37
+ const thinking = process.env.SPRUCE_LLM_THINKING === 'true';
38
+ if (this.adapterName === 'anthropic') {
39
+ return AnthropicAdapter.Adapter(key, Object.assign(Object.assign({}, options), { thinking, maxTokens: parseInt(process.env.SPRUCE_LLM_MAX_TOKENS, 10) }));
40
+ }
41
+ if (this.adapterName === 'ollama') {
42
+ return OllamaAdapter.Adapter(Object.assign(Object.assign({}, options), { think: thinking }));
43
+ }
44
+ return OpenAiAdapter.Adapter(key, Object.assign(Object.assign({}, options), { reasoningEffort: process.env
45
+ .SPRUCE_LLM_REASONING_EFFORT }));
46
+ }
47
+ }
48
+ LlmAdapterLoader.VALID_ADAPTERS = ['openai', 'anthropic', 'ollama'];
49
+ export default LlmAdapterLoader;
@@ -1,17 +1,20 @@
1
1
  import { Schema } from '@sprucelabs/schema';
2
+ import { Log } from '@sprucelabs/spruce-skill-utils';
2
3
  import { LlmAdapter, SprucebotLlmBot, SendMessageOptions, LllmReasoningEffort } from '../../llm.types';
3
4
  export default class OllamaAdapter implements LlmAdapter {
5
+ static Class?: new (apiKey: string, options?: OllamaAdapterOptions) => LlmAdapter;
4
6
  private openai;
5
7
  private think;
6
8
  private constructor();
7
- static Adapter(options?: OllamaOptions): OllamaAdapter;
9
+ static Adapter(options?: OllamaAdapterOptions): LlmAdapter;
8
10
  sendMessage(bot: SprucebotLlmBot<Schema>, options?: SendMessageOptions): Promise<string>;
9
11
  setModel(model: string): void;
10
12
  setReasoningEffort(effort: LllmReasoningEffort): void;
11
13
  }
12
- interface OllamaOptions {
14
+ export interface OllamaAdapterOptions {
15
+ log?: Log;
13
16
  model?: string;
14
17
  think?: boolean;
15
18
  baseUrl?: string;
19
+ memoryLimit?: number;
16
20
  }
17
- export {};
@@ -9,13 +9,14 @@ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, ge
9
9
  };
10
10
  import OpenAiAdapter from './OpenAiAdapter.js';
11
11
  export default class OllamaAdapter {
12
- constructor(options) {
12
+ constructor(_apiKey, options) {
13
13
  var _a;
14
14
  this.think = (_a = options === null || options === void 0 ? void 0 : options.think) !== null && _a !== void 0 ? _a : false;
15
15
  this.openai = OpenAiAdapter.Adapter('***', Object.assign({ baseUrl: 'http://localhost:11434/v1' }, options));
16
16
  }
17
17
  static Adapter(options) {
18
- return new this(options);
18
+ var _a;
19
+ return new ((_a = this.Class) !== null && _a !== void 0 ? _a : this)('***', options);
19
20
  }
20
21
  sendMessage(bot, options) {
21
22
  return __awaiter(this, void 0, void 0, function* () {
@@ -3,7 +3,7 @@ import OpenAI from 'openai';
3
3
  import { ReasoningEffort } from 'openai/resources';
4
4
  import { LlmAdapter, SendMessageOptions, SprucebotLlmBot } from '../../llm.types';
5
5
  export default class OpenAiAdapter implements LlmAdapter {
6
- static Class: new (apiKey: string, options?: OpenAiAdapterOptions) => OpenAiAdapter;
6
+ static Class?: new (apiKey: string, options?: OpenAiAdapterOptions) => LlmAdapter;
7
7
  static OpenAI: typeof OpenAI;
8
8
  private api;
9
9
  private model;
@@ -11,7 +11,7 @@ export default class OpenAiAdapter implements LlmAdapter {
11
11
  private reasoningEffort?;
12
12
  private sender;
13
13
  protected constructor(apiKey: string, options?: OpenAiAdapterOptions);
14
- static Adapter(apiKey: string, options?: OpenAiAdapterOptions): OpenAiAdapter;
14
+ static Adapter(apiKey: string, options?: OpenAiAdapterOptions): LlmAdapter;
15
15
  sendMessage(bot: SprucebotLlmBot, options?: SendMessageOptions): Promise<string>;
16
16
  private sendHandler;
17
17
  private getReasoningEffort;
@@ -34,6 +34,9 @@ ${options.validCallbacks.map((name, idx) => `${idx + 1}: ${name}`).join('\n')}`;
34
34
  message += `\n\nOriginal Error: ${options.originalError.message}`;
35
35
  }
36
36
  break;
37
+ case 'INVALID_LLM_ADAPTER':
38
+ message = `SPRUCE_LLM_ADAPTER=${options.adapter} is not a valid adapter. Please set SPRUCE_LLM_ADAPTER to a valid adapter (OpenAi, Anthropic, Ollama) and try again.`;
39
+ break;
37
40
  default:
38
41
  message = super.friendlyMessage();
39
42
  }
@@ -0,0 +1,11 @@
1
+ declare const _default: {
2
+ id: string;
3
+ name: string;
4
+ fields: {
5
+ adapter: {
6
+ type: "text";
7
+ isRequired: true;
8
+ };
9
+ };
10
+ };
11
+ export default _default;
@@ -0,0 +1,11 @@
1
+ import { buildErrorSchema } from '@sprucelabs/schema';
2
+ export default buildErrorSchema({
3
+ id: 'invalidLlmAdapter',
4
+ name: 'Invalid Llm Adapter',
5
+ fields: {
6
+ adapter: {
7
+ type: 'text',
8
+ isRequired: true,
9
+ },
10
+ },
11
+ });
@@ -12,3 +12,4 @@ export { default as OllamaAdapter } from './bots/adapters/OllamaAdapter';
12
12
  export * from './bots/adapters/OllamaAdapter';
13
13
  export { default as AthropicAdapter } from './bots/adapters/AnthropicAdapter';
14
14
  export * from './bots/adapters/AnthropicAdapter';
15
+ export { default as LlmAdapterLoader } from './bots/adapters/LlmAdapterLoader';
@@ -12,3 +12,4 @@ export { default as OllamaAdapter } from './bots/adapters/OllamaAdapter.js';
12
12
  export * from './bots/adapters/OllamaAdapter.js';
13
13
  export { default as AthropicAdapter } from './bots/adapters/AnthropicAdapter.js';
14
14
  export * from './bots/adapters/AnthropicAdapter.js';
15
+ export { default as LlmAdapterLoader } from './bots/adapters/LlmAdapterLoader.js';
@@ -1,11 +1,18 @@
1
+ import { OpenAiAdapterOptions } from '../bots/adapters/OpenAiAdapter';
1
2
  import { LllmReasoningEffort, LlmAdapter, SendMessageOptions, SprucebotLlmBot } from '../llm.types';
2
3
  export default class SpyLlmAdapter implements LlmAdapter {
3
- lastBot?: SprucebotLlmBot;
4
+ static instance: SpyLlmAdapter;
5
+ lastSendMessageBot?: SprucebotLlmBot;
4
6
  lastMessage?: string;
5
- messageResponse: string;
6
- lastSendOptions?: SendMessageOptions;
7
+ lastSendMessageResponse: string;
8
+ lastSendMessageOptions?: SendMessageOptions;
7
9
  responseDelayMs?: number;
10
+ manuallySetModel?: string;
11
+ apiKey: string;
12
+ constructorOptions?: OpenAiAdapterOptions;
13
+ shouldRandomizeResponseMessage: boolean;
14
+ constructor(apiKey: string, options?: OpenAiAdapterOptions);
8
15
  sendMessage(bot: SprucebotLlmBot, options?: SendMessageOptions): Promise<string>;
9
- setModel(_model: string): void;
16
+ setModel(model: string): void;
10
17
  setReasoningEffort(_effort: LllmReasoningEffort): void;
11
18
  }
@@ -7,20 +7,30 @@ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, ge
7
7
  step((generator = generator.apply(thisArg, _arguments || [])).next());
8
8
  });
9
9
  };
10
+ import { generateId } from '@sprucelabs/test-utils';
10
11
  export default class SpyLlmAdapter {
11
- constructor() {
12
- this.messageResponse = '';
12
+ constructor(apiKey, options) {
13
+ this.lastSendMessageResponse = generateId();
14
+ this.shouldRandomizeResponseMessage = true;
15
+ SpyLlmAdapter.instance = this;
16
+ this.apiKey = apiKey;
17
+ this.constructorOptions = options;
13
18
  }
14
19
  sendMessage(bot, options) {
15
20
  return __awaiter(this, void 0, void 0, function* () {
16
- this.lastBot = bot;
17
- this.lastSendOptions = options;
21
+ this.lastSendMessageBot = bot;
22
+ this.lastSendMessageOptions = options;
18
23
  if (this.responseDelayMs) {
19
24
  yield new Promise((resolve) => setTimeout(resolve, this.responseDelayMs));
20
25
  }
21
- return this.messageResponse;
26
+ if (this.shouldRandomizeResponseMessage) {
27
+ this.lastSendMessageResponse = generateId();
28
+ }
29
+ return this.lastSendMessageResponse;
22
30
  });
23
31
  }
24
- setModel(_model) { }
32
+ setModel(model) {
33
+ this.manuallySetModel = model;
34
+ }
25
35
  setReasoningEffort(_effort) { }
26
36
  }
package/build/index.d.ts CHANGED
@@ -12,3 +12,4 @@ export { default as OllamaAdapter } from './bots/adapters/OllamaAdapter';
12
12
  export * from './bots/adapters/OllamaAdapter';
13
13
  export { default as AthropicAdapter } from './bots/adapters/AnthropicAdapter';
14
14
  export * from './bots/adapters/AnthropicAdapter';
15
+ export { default as LlmAdapterLoader } from './bots/adapters/LlmAdapterLoader';
package/build/index.js CHANGED
@@ -17,7 +17,7 @@ var __importDefault = (this && this.__importDefault) || function (mod) {
17
17
  return (mod && mod.__esModule) ? mod : { "default": mod };
18
18
  };
19
19
  Object.defineProperty(exports, "__esModule", { value: true });
20
- exports.AthropicAdapter = exports.OllamaAdapter = exports.SpyOpenAiApi = exports.SpyLlmAdapter = exports.SpyLllmBot = exports.MockLlmSkill = exports.SprucebotLlmError = exports.OpenAiAdapter = exports.SprucebotLlmSkillImpl = exports.SprucebotLlmBotImpl = exports.SprucebotLlmFactory = void 0;
20
+ exports.LlmAdapterLoader = exports.AthropicAdapter = exports.OllamaAdapter = exports.SpyOpenAiApi = exports.SpyLlmAdapter = exports.SpyLllmBot = exports.MockLlmSkill = exports.SprucebotLlmError = exports.OpenAiAdapter = exports.SprucebotLlmSkillImpl = exports.SprucebotLlmBotImpl = exports.SprucebotLlmFactory = void 0;
21
21
  var SprucebotLlmFactory_1 = require("./bots/SprucebotLlmFactory");
22
22
  Object.defineProperty(exports, "SprucebotLlmFactory", { enumerable: true, get: function () { return __importDefault(SprucebotLlmFactory_1).default; } });
23
23
  var SprucebotLlmBotImpl_1 = require("./bots/SprucebotLlmBotImpl");
@@ -43,3 +43,5 @@ __exportStar(require("./bots/adapters/OllamaAdapter"), exports);
43
43
  var AnthropicAdapter_1 = require("./bots/adapters/AnthropicAdapter");
44
44
  Object.defineProperty(exports, "AthropicAdapter", { enumerable: true, get: function () { return __importDefault(AnthropicAdapter_1).default; } });
45
45
  __exportStar(require("./bots/adapters/AnthropicAdapter"), exports);
46
+ var LlmAdapterLoader_1 = require("./bots/adapters/LlmAdapterLoader");
47
+ Object.defineProperty(exports, "LlmAdapterLoader", { enumerable: true, get: function () { return __importDefault(LlmAdapterLoader_1).default; } });
@@ -1,11 +1,18 @@
1
+ import { OpenAiAdapterOptions } from '../bots/adapters/OpenAiAdapter';
1
2
  import { LllmReasoningEffort, LlmAdapter, SendMessageOptions, SprucebotLlmBot } from '../llm.types';
2
3
  export default class SpyLlmAdapter implements LlmAdapter {
3
- lastBot?: SprucebotLlmBot;
4
+ static instance: SpyLlmAdapter;
5
+ lastSendMessageBot?: SprucebotLlmBot;
4
6
  lastMessage?: string;
5
- messageResponse: string;
6
- lastSendOptions?: SendMessageOptions;
7
+ lastSendMessageResponse: string;
8
+ lastSendMessageOptions?: SendMessageOptions;
7
9
  responseDelayMs?: number;
10
+ manuallySetModel?: string;
11
+ apiKey: string;
12
+ constructorOptions?: OpenAiAdapterOptions;
13
+ shouldRandomizeResponseMessage: boolean;
14
+ constructor(apiKey: string, options?: OpenAiAdapterOptions);
8
15
  sendMessage(bot: SprucebotLlmBot, options?: SendMessageOptions): Promise<string>;
9
- setModel(_model: string): void;
16
+ setModel(model: string): void;
10
17
  setReasoningEffort(_effort: LllmReasoningEffort): void;
11
18
  }
@@ -1,18 +1,28 @@
1
1
  "use strict";
2
2
  Object.defineProperty(exports, "__esModule", { value: true });
3
+ const test_utils_1 = require("@sprucelabs/test-utils");
3
4
  class SpyLlmAdapter {
4
- constructor() {
5
- this.messageResponse = '';
5
+ constructor(apiKey, options) {
6
+ this.lastSendMessageResponse = (0, test_utils_1.generateId)();
7
+ this.shouldRandomizeResponseMessage = true;
8
+ SpyLlmAdapter.instance = this;
9
+ this.apiKey = apiKey;
10
+ this.constructorOptions = options;
6
11
  }
7
12
  async sendMessage(bot, options) {
8
- this.lastBot = bot;
9
- this.lastSendOptions = options;
13
+ this.lastSendMessageBot = bot;
14
+ this.lastSendMessageOptions = options;
10
15
  if (this.responseDelayMs) {
11
16
  await new Promise((resolve) => setTimeout(resolve, this.responseDelayMs));
12
17
  }
13
- return this.messageResponse;
18
+ if (this.shouldRandomizeResponseMessage) {
19
+ this.lastSendMessageResponse = (0, test_utils_1.generateId)();
20
+ }
21
+ return this.lastSendMessageResponse;
22
+ }
23
+ setModel(model) {
24
+ this.manuallySetModel = model;
14
25
  }
15
- setModel(_model) { }
16
26
  setReasoningEffort(_effort) { }
17
27
  }
18
28
  exports.default = SpyLlmAdapter;
package/package.json CHANGED
@@ -8,7 +8,7 @@
8
8
  "eta"
9
9
  ]
10
10
  },
11
- "version": "13.3.0",
11
+ "version": "14.0.0",
12
12
  "files": [
13
13
  "build"
14
14
  ],
@@ -67,10 +67,10 @@
67
67
  "dependencies": {
68
68
  "@anthropic-ai/sdk": "^0.76.0",
69
69
  "@sprucelabs/error": "^8.1.9",
70
- "@sprucelabs/mercury-event-emitter": "^46.1.6",
70
+ "@sprucelabs/mercury-event-emitter": "^46.1.7",
71
71
  "@sprucelabs/mercury-types": "^49.1.9",
72
72
  "@sprucelabs/schema": "^33.2.6",
73
- "@sprucelabs/spruce-skill-utils": "^34.0.13",
73
+ "@sprucelabs/spruce-skill-utils": "^34.0.14",
74
74
  "eta": "3.5.0",
75
75
  "openai": "^6.22.0"
76
76
  },