@node-llm/core 0.5.0 → 0.7.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (107) hide show
  1. package/README.md +83 -421
  2. package/dist/chat/Chat.d.ts +5 -0
  3. package/dist/chat/Chat.d.ts.map +1 -1
  4. package/dist/chat/Chat.js +31 -7
  5. package/dist/chat/ChatOptions.d.ts +3 -0
  6. package/dist/chat/ChatOptions.d.ts.map +1 -1
  7. package/dist/chat/ChatResponse.d.ts +5 -1
  8. package/dist/chat/ChatResponse.d.ts.map +1 -1
  9. package/dist/chat/ChatResponse.js +6 -1
  10. package/dist/chat/Stream.d.ts.map +1 -1
  11. package/dist/chat/Stream.js +7 -1
  12. package/dist/config.d.ts +29 -0
  13. package/dist/config.d.ts.map +1 -0
  14. package/dist/config.js +11 -0
  15. package/dist/index.d.ts +2 -0
  16. package/dist/index.d.ts.map +1 -1
  17. package/dist/index.js +1 -0
  18. package/dist/llm.d.ts +20 -10
  19. package/dist/llm.d.ts.map +1 -1
  20. package/dist/llm.js +52 -23
  21. package/dist/models/ModelRegistry.d.ts +39 -12
  22. package/dist/models/ModelRegistry.d.ts.map +1 -1
  23. package/dist/models/ModelRegistry.js +50 -40
  24. package/dist/models/models.d.ts +972 -0
  25. package/dist/models/models.d.ts.map +1 -0
  26. package/dist/models/models.js +7076 -0
  27. package/dist/models/types.d.ts +50 -0
  28. package/dist/models/types.d.ts.map +1 -0
  29. package/dist/models/types.js +1 -0
  30. package/dist/providers/Provider.d.ts +7 -0
  31. package/dist/providers/Provider.d.ts.map +1 -1
  32. package/dist/providers/anthropic/AnthropicProvider.d.ts +1 -0
  33. package/dist/providers/anthropic/AnthropicProvider.d.ts.map +1 -1
  34. package/dist/providers/anthropic/AnthropicProvider.js +2 -3
  35. package/dist/providers/anthropic/Capabilities.d.ts +1 -37
  36. package/dist/providers/anthropic/Capabilities.d.ts.map +1 -1
  37. package/dist/providers/anthropic/Capabilities.js +59 -130
  38. package/dist/providers/anthropic/Chat.d.ts.map +1 -1
  39. package/dist/providers/anthropic/Chat.js +6 -2
  40. package/dist/providers/anthropic/Models.d.ts +1 -0
  41. package/dist/providers/anthropic/Models.d.ts.map +1 -1
  42. package/dist/providers/anthropic/Models.js +36 -41
  43. package/dist/providers/anthropic/Streaming.d.ts.map +1 -1
  44. package/dist/providers/anthropic/Streaming.js +10 -1
  45. package/dist/providers/anthropic/index.d.ts.map +1 -1
  46. package/dist/providers/anthropic/index.js +3 -2
  47. package/dist/providers/deepseek/Capabilities.d.ts +14 -0
  48. package/dist/providers/deepseek/Capabilities.d.ts.map +1 -0
  49. package/dist/providers/deepseek/Capabilities.js +52 -0
  50. package/dist/providers/deepseek/Chat.d.ts +8 -0
  51. package/dist/providers/deepseek/Chat.d.ts.map +1 -0
  52. package/dist/providers/deepseek/Chat.js +89 -0
  53. package/dist/providers/deepseek/DeepSeekProvider.d.ts +28 -0
  54. package/dist/providers/deepseek/DeepSeekProvider.d.ts.map +1 -0
  55. package/dist/providers/deepseek/DeepSeekProvider.js +38 -0
  56. package/dist/providers/deepseek/Models.d.ts +8 -0
  57. package/dist/providers/deepseek/Models.d.ts.map +1 -0
  58. package/dist/providers/deepseek/Models.js +67 -0
  59. package/dist/providers/deepseek/Streaming.d.ts +8 -0
  60. package/dist/providers/deepseek/Streaming.d.ts.map +1 -0
  61. package/dist/providers/deepseek/Streaming.js +74 -0
  62. package/dist/providers/deepseek/index.d.ts +7 -0
  63. package/dist/providers/deepseek/index.d.ts.map +1 -0
  64. package/dist/providers/deepseek/index.js +22 -0
  65. package/dist/providers/gemini/Capabilities.d.ts +28 -7
  66. package/dist/providers/gemini/Capabilities.d.ts.map +1 -1
  67. package/dist/providers/gemini/Capabilities.js +32 -20
  68. package/dist/providers/gemini/Chat.d.ts.map +1 -1
  69. package/dist/providers/gemini/Chat.js +9 -11
  70. package/dist/providers/gemini/GeminiProvider.d.ts +1 -0
  71. package/dist/providers/gemini/GeminiProvider.d.ts.map +1 -1
  72. package/dist/providers/gemini/GeminiProvider.js +1 -0
  73. package/dist/providers/gemini/Models.d.ts +1 -0
  74. package/dist/providers/gemini/Models.d.ts.map +1 -1
  75. package/dist/providers/gemini/Models.js +46 -26
  76. package/dist/providers/gemini/index.d.ts.map +1 -1
  77. package/dist/providers/gemini/index.js +3 -2
  78. package/dist/providers/openai/Capabilities.d.ts +4 -11
  79. package/dist/providers/openai/Capabilities.d.ts.map +1 -1
  80. package/dist/providers/openai/Capabilities.js +124 -121
  81. package/dist/providers/openai/Chat.d.ts.map +1 -1
  82. package/dist/providers/openai/Chat.js +19 -17
  83. package/dist/providers/openai/Embedding.d.ts.map +1 -1
  84. package/dist/providers/openai/Embedding.js +2 -1
  85. package/dist/providers/openai/Image.d.ts.map +1 -1
  86. package/dist/providers/openai/Image.js +2 -1
  87. package/dist/providers/openai/ModelDefinitions.d.ts +1 -24
  88. package/dist/providers/openai/ModelDefinitions.d.ts.map +1 -1
  89. package/dist/providers/openai/ModelDefinitions.js +1 -211
  90. package/dist/providers/openai/Models.d.ts +1 -0
  91. package/dist/providers/openai/Models.d.ts.map +1 -1
  92. package/dist/providers/openai/Models.js +46 -22
  93. package/dist/providers/openai/Moderation.d.ts.map +1 -1
  94. package/dist/providers/openai/Moderation.js +2 -1
  95. package/dist/providers/openai/OpenAIProvider.d.ts +1 -0
  96. package/dist/providers/openai/OpenAIProvider.d.ts.map +1 -1
  97. package/dist/providers/openai/OpenAIProvider.js +1 -0
  98. package/dist/providers/openai/Streaming.d.ts.map +1 -1
  99. package/dist/providers/openai/Streaming.js +5 -1
  100. package/dist/providers/openai/Transcription.d.ts.map +1 -1
  101. package/dist/providers/openai/Transcription.js +3 -2
  102. package/dist/providers/openai/index.d.ts.map +1 -1
  103. package/dist/providers/openai/index.js +5 -3
  104. package/dist/providers/openai/utils.d.ts +20 -0
  105. package/dist/providers/openai/utils.d.ts.map +1 -0
  106. package/dist/providers/openai/utils.js +25 -0
  107. package/package.json +1 -1
package/dist/chat/Chat.js CHANGED
@@ -44,9 +44,10 @@ export class Chat {
44
44
  acc.total_tokens += msg.usage.total_tokens;
45
45
  acc.cached_tokens = (acc.cached_tokens ?? 0) + (msg.usage.cached_tokens ?? 0);
46
46
  acc.cache_creation_tokens = (acc.cache_creation_tokens ?? 0) + (msg.usage.cache_creation_tokens ?? 0);
47
+ acc.cost = (acc.cost ?? 0) + (msg.usage.cost ?? 0);
47
48
  }
48
49
  return acc;
49
- }, { input_tokens: 0, output_tokens: 0, total_tokens: 0, cached_tokens: 0, cache_creation_tokens: 0 });
50
+ }, { input_tokens: 0, output_tokens: 0, total_tokens: 0, cached_tokens: 0, cache_creation_tokens: 0, cost: 0 });
50
51
  }
51
52
  /**
52
53
  * Add a tool to the chat session (fluent API)
@@ -145,6 +146,14 @@ export class Chat {
145
146
  }
146
147
  return this;
147
148
  }
149
+ /**
150
+ * Set provider-specific parameters.
151
+ * These will be merged into the final request payload.
152
+ */
153
+ withParams(params) {
154
+ this.options.params = { ...this.options.params, ...params };
155
+ return this;
156
+ }
148
157
  /**
149
158
  * Enforce a specific schema for the output.
150
159
  * Can accept a Schema object or a Zod schema/JSON Schema directly.
@@ -191,9 +200,12 @@ export class Chat {
191
200
  if (files.length > 0) {
192
201
  const processedFiles = await Promise.all(files.map(f => FileLoader.load(f)));
193
202
  const hasBinary = processedFiles.some(p => p.type === "image_url" || p.type === "input_audio" || p.type === "video_url");
194
- if (hasBinary && this.provider.capabilities && !this.provider.capabilities.supportsVision(this.model)) {
203
+ if (hasBinary && !this.options.assumeModelExists && this.provider.capabilities && !this.provider.capabilities.supportsVision(this.model)) {
195
204
  throw new Error(`Model ${this.model} does not support vision/binary files.`);
196
205
  }
206
+ if (hasBinary && this.options.assumeModelExists) {
207
+ console.warn(`[NodeLLM] Skipping vision capability validation for model ${this.model}`);
208
+ }
197
209
  // Separate text files from binary files
198
210
  const textFiles = processedFiles.filter(p => p.type === "text");
199
211
  const binaryFiles = processedFiles.filter(p => p.type !== "text");
@@ -215,9 +227,12 @@ export class Chat {
215
227
  }
216
228
  }
217
229
  if (this.options.tools && this.options.tools.length > 0) {
218
- if (this.provider.capabilities && !this.provider.capabilities.supportsTools(this.model)) {
230
+ if (!this.options.assumeModelExists && this.provider.capabilities && !this.provider.capabilities.supportsTools(this.model)) {
219
231
  throw new Error(`Model ${this.model} does not support tool calling.`);
220
232
  }
233
+ if (this.options.assumeModelExists) {
234
+ console.warn(`[NodeLLM] Skipping tool capability validation for model ${this.model}`);
235
+ }
221
236
  }
222
237
  this.messages.push({
223
238
  role: "user",
@@ -226,9 +241,12 @@ export class Chat {
226
241
  // Process Schema/Structured Output
227
242
  let responseFormat = this.options.responseFormat;
228
243
  if (this.options.schema) {
229
- if (this.provider.capabilities && !this.provider.capabilities.supportsStructuredOutput(this.model)) {
244
+ if (!this.options.assumeModelExists && this.provider.capabilities && !this.provider.capabilities.supportsStructuredOutput(this.model)) {
230
245
  throw new Error(`Model ${this.model} does not support structured output.`);
231
246
  }
247
+ if (this.options.assumeModelExists) {
248
+ console.warn(`[NodeLLM] Skipping structured output capability validation for model ${this.model}`);
249
+ }
232
250
  const jsonSchema = toJsonSchema(this.options.schema.definition.schema);
233
251
  responseFormat = {
234
252
  type: "json_schema",
@@ -248,6 +266,7 @@ export class Chat {
248
266
  max_tokens: options?.maxTokens ?? this.options.maxTokens,
249
267
  headers: { ...this.options.headers, ...options?.headers },
250
268
  response_format: responseFormat, // Pass to provider
269
+ ...this.options.params,
251
270
  };
252
271
  let totalUsage = { input_tokens: 0, output_tokens: 0, total_tokens: 0 };
253
272
  const trackUsage = (u) => {
@@ -258,6 +277,9 @@ export class Chat {
258
277
  if (u.cached_tokens) {
259
278
  totalUsage.cached_tokens = (totalUsage.cached_tokens ?? 0) + u.cached_tokens;
260
279
  }
280
+ if (u.cost !== undefined) {
281
+ totalUsage.cost = (totalUsage.cost ?? 0) + u.cost;
282
+ }
261
283
  }
262
284
  };
263
285
  // First round
@@ -265,7 +287,7 @@ export class Chat {
265
287
  this.options.onNewMessage();
266
288
  let response = await this.executor.executeChat(executeOptions);
267
289
  trackUsage(response.usage);
268
- const firstAssistantMessage = new ChatResponseString(response.content ?? "", response.usage ?? { input_tokens: 0, output_tokens: 0, total_tokens: 0 }, this.model);
290
+ const firstAssistantMessage = new ChatResponseString(response.content ?? "", response.usage ?? { input_tokens: 0, output_tokens: 0, total_tokens: 0 }, this.model, response.reasoning);
269
291
  this.messages.push({
270
292
  role: "assistant",
271
293
  content: firstAssistantMessage,
@@ -315,7 +337,7 @@ export class Chat {
315
337
  headers: this.options.headers,
316
338
  });
317
339
  trackUsage(response.usage);
318
- const assistantMessage = new ChatResponseString(response.content ?? "", response.usage ?? { input_tokens: 0, output_tokens: 0, total_tokens: 0 }, this.model);
340
+ const assistantMessage = new ChatResponseString(response.content ?? "", response.usage ?? { input_tokens: 0, output_tokens: 0, total_tokens: 0 }, this.model, response.reasoning);
319
341
  this.messages.push({
320
342
  role: "assistant",
321
343
  content: assistantMessage,
@@ -326,7 +348,9 @@ export class Chat {
326
348
  this.options.onEndMessage(assistantMessage);
327
349
  }
328
350
  }
329
- return new ChatResponseString(response.content ?? "", totalUsage, this.model);
351
+ // For the final return, we might want to aggregate reasoning too if it happened in multiple turns?
352
+ // Usually reasoning only happens once or we just want the last one.
353
+ return new ChatResponseString(response.content ?? "", totalUsage, this.model, response.reasoning);
330
354
  }
331
355
  /**
332
356
  * Streams the model's response to a user question.
@@ -16,5 +16,8 @@ export interface ChatOptions {
16
16
  responseFormat?: {
17
17
  type: "json_object" | "text";
18
18
  };
19
+ params?: Record<string, any>;
20
+ assumeModelExists?: boolean;
21
+ provider?: string;
19
22
  }
20
23
  //# sourceMappingURL=ChatOptions.d.ts.map
@@ -1 +1 @@
1
- {"version":3,"file":"ChatOptions.d.ts","sourceRoot":"","sources":["../../src/chat/ChatOptions.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,OAAO,EAAE,MAAM,cAAc,CAAC;AACvC,OAAO,EAAE,IAAI,EAAE,MAAM,WAAW,CAAC;AACjC,OAAO,EAAE,MAAM,EAAE,MAAM,qBAAqB,CAAC;AAE7C,MAAM,WAAW,WAAW;IAC1B,YAAY,CAAC,EAAE,MAAM,CAAC;IACtB,QAAQ,CAAC,EAAE,OAAO,EAAE,CAAC;IACrB,KAAK,CAAC,EAAE,IAAI,EAAE,CAAC;IACf,WAAW,CAAC,EAAE,MAAM,CAAC;IACrB,SAAS,CAAC,EAAE,MAAM,CAAC;IACnB,YAAY,CAAC,EAAE,MAAM,IAAI,CAAC;IAC1B,YAAY,CAAC,EAAE,CAAC,OAAO,EAAE,GAAG,KAAK,IAAI,CAAC;IACtC,UAAU,CAAC,EAAE,CAAC,QAAQ,EAAE,GAAG,KAAK,IAAI,CAAC;IACrC,YAAY,CAAC,EAAE,CAAC,MAAM,EAAE,GAAG,KAAK,IAAI,CAAC;IACrC,OAAO,CAAC,EAAE,MAAM,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;IACjC,MAAM,CAAC,EAAE,MAAM,CAAC;IAChB,cAAc,CAAC,EAAE;QAAE,IAAI,EAAE,aAAa,GAAG,MAAM,CAAA;KAAE,CAAC;CACnD"}
1
+ {"version":3,"file":"ChatOptions.d.ts","sourceRoot":"","sources":["../../src/chat/ChatOptions.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,OAAO,EAAE,MAAM,cAAc,CAAC;AACvC,OAAO,EAAE,IAAI,EAAE,MAAM,WAAW,CAAC;AACjC,OAAO,EAAE,MAAM,EAAE,MAAM,qBAAqB,CAAC;AAE7C,MAAM,WAAW,WAAW;IAC1B,YAAY,CAAC,EAAE,MAAM,CAAC;IACtB,QAAQ,CAAC,EAAE,OAAO,EAAE,CAAC;IACrB,KAAK,CAAC,EAAE,IAAI,EAAE,CAAC;IACf,WAAW,CAAC,EAAE,MAAM,CAAC;IACrB,SAAS,CAAC,EAAE,MAAM,CAAC;IACnB,YAAY,CAAC,EAAE,MAAM,IAAI,CAAC;IAC1B,YAAY,CAAC,EAAE,CAAC,OAAO,EAAE,GAAG,KAAK,IAAI,CAAC;IACtC,UAAU,CAAC,EAAE,CAAC,QAAQ,EAAE,GAAG,KAAK,IAAI,CAAC;IACrC,YAAY,CAAC,EAAE,CAAC,MAAM,EAAE,GAAG,KAAK,IAAI,CAAC;IACrC,OAAO,CAAC,EAAE,MAAM,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;IACjC,MAAM,CAAC,EAAE,MAAM,CAAC;IAChB,cAAc,CAAC,EAAE;QAAE,IAAI,EAAE,aAAa,GAAG,MAAM,CAAA;KAAE,CAAC;IAClD,MAAM,CAAC,EAAE,MAAM,CAAC,MAAM,EAAE,GAAG,CAAC,CAAC;IAC7B,iBAAiB,CAAC,EAAE,OAAO,CAAC;IAC5B,QAAQ,CAAC,EAAE,MAAM,CAAC;CACnB"}
@@ -6,11 +6,15 @@ import { Usage } from "../providers/Provider.js";
6
6
  export declare class ChatResponseString extends String {
7
7
  readonly usage: Usage;
8
8
  readonly model: string;
9
- constructor(content: string, usage: Usage, model: string);
9
+ readonly reasoning?: string | null | undefined;
10
+ constructor(content: string, usage: Usage, model: string, reasoning?: string | null | undefined);
10
11
  get input_tokens(): number;
11
12
  get output_tokens(): number;
12
13
  get total_tokens(): number;
13
14
  get cached_tokens(): number | undefined;
15
+ get cost(): number | undefined;
16
+ get input_cost(): number | undefined;
17
+ get output_cost(): number | undefined;
14
18
  get content(): string;
15
19
  get model_id(): string;
16
20
  toString(): string;
@@ -1 +1 @@
1
- {"version":3,"file":"ChatResponse.d.ts","sourceRoot":"","sources":["../../src/chat/ChatResponse.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,KAAK,EAAE,MAAM,0BAA0B,CAAC;AAEjD;;;GAGG;AACH,qBAAa,kBAAmB,SAAQ,MAAM;aAG1B,KAAK,EAAE,KAAK;aACZ,KAAK,EAAE,MAAM;gBAF7B,OAAO,EAAE,MAAM,EACC,KAAK,EAAE,KAAK,EACZ,KAAK,EAAE,MAAM;IAK/B,IAAI,YAAY,WAAsC;IACtD,IAAI,aAAa,WAAuC;IACxD,IAAI,YAAY,WAAsC;IACtD,IAAI,aAAa,uBAAuC;IAExD,IAAI,OAAO,IAAI,MAAM,CAEpB;IAED,IAAI,QAAQ,IAAI,MAAM,CAErB;IAED,QAAQ;IAIR;;;OAGG;IACH,IAAI,MAAM,IAAI,GAAG,CAMhB;CACF"}
1
+ {"version":3,"file":"ChatResponse.d.ts","sourceRoot":"","sources":["../../src/chat/ChatResponse.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,KAAK,EAAE,MAAM,0BAA0B,CAAC;AAEjD;;;GAGG;AACH,qBAAa,kBAAmB,SAAQ,MAAM;aAG1B,KAAK,EAAE,KAAK;aACZ,KAAK,EAAE,MAAM;aACb,SAAS,CAAC,EAAE,MAAM,GAAG,IAAI;gBAHzC,OAAO,EAAE,MAAM,EACC,KAAK,EAAE,KAAK,EACZ,KAAK,EAAE,MAAM,EACb,SAAS,CAAC,EAAE,MAAM,GAAG,IAAI,YAAA;IAK3C,IAAI,YAAY,WAAsC;IACtD,IAAI,aAAa,WAAuC;IACxD,IAAI,YAAY,WAAsC;IACtD,IAAI,aAAa,uBAAuC;IACxD,IAAI,IAAI,uBAA8B;IACtC,IAAI,UAAU,uBAAoC;IAClD,IAAI,WAAW,uBAAqC;IAEpD,IAAI,OAAO,IAAI,MAAM,CAEpB;IAED,IAAI,QAAQ,IAAI,MAAM,CAErB;IAED,QAAQ;IAIR;;;OAGG;IACH,IAAI,MAAM,IAAI,GAAG,CAMhB;CACF"}
@@ -5,15 +5,20 @@
5
5
  export class ChatResponseString extends String {
6
6
  usage;
7
7
  model;
8
- constructor(content, usage, model) {
8
+ reasoning;
9
+ constructor(content, usage, model, reasoning) {
9
10
  super(content);
10
11
  this.usage = usage;
11
12
  this.model = model;
13
+ this.reasoning = reasoning;
12
14
  }
13
15
  get input_tokens() { return this.usage.input_tokens; }
14
16
  get output_tokens() { return this.usage.output_tokens; }
15
17
  get total_tokens() { return this.usage.total_tokens; }
16
18
  get cached_tokens() { return this.usage.cached_tokens; }
19
+ get cost() { return this.usage.cost; }
20
+ get input_cost() { return this.usage.input_cost; }
21
+ get output_cost() { return this.usage.output_cost; }
17
22
  get content() {
18
23
  return this.valueOf();
19
24
  }
@@ -1 +1 @@
1
- {"version":3,"file":"Stream.d.ts","sourceRoot":"","sources":["../../src/chat/Stream.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,OAAO,EAAE,MAAM,cAAc,CAAC;AACvC,OAAO,EAAE,WAAW,EAAE,MAAM,kBAAkB,CAAC;AAC/C,OAAO,EAAE,QAAQ,EAAE,MAAM,0BAA0B,CAAC;AAGpD,qBAAa,MAAM;IAIf,OAAO,CAAC,QAAQ,CAAC,QAAQ;IACzB,OAAO,CAAC,QAAQ,CAAC,KAAK;IACtB,OAAO,CAAC,QAAQ,CAAC,OAAO;IAL1B,OAAO,CAAC,QAAQ,CAAY;gBAGT,QAAQ,EAAE,QAAQ,EAClB,KAAK,EAAE,MAAM,EACb,OAAO,GAAE,WAAgB,EAC1C,QAAQ,CAAC,EAAE,OAAO,EAAE;IAmBtB;;OAEG;IACH,IAAI,OAAO,IAAI,SAAS,OAAO,EAAE,CAEhC;IAED;;;;OAIG;IACI,MAAM,CAAC,OAAO,EAAE,MAAM;CAwC9B"}
1
+ {"version":3,"file":"Stream.d.ts","sourceRoot":"","sources":["../../src/chat/Stream.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,OAAO,EAAE,MAAM,cAAc,CAAC;AACvC,OAAO,EAAE,WAAW,EAAE,MAAM,kBAAkB,CAAC;AAC/C,OAAO,EAAE,QAAQ,EAAE,MAAM,0BAA0B,CAAC;AAGpD,qBAAa,MAAM;IAIf,OAAO,CAAC,QAAQ,CAAC,QAAQ;IACzB,OAAO,CAAC,QAAQ,CAAC,KAAK;IACtB,OAAO,CAAC,QAAQ,CAAC,OAAO;IAL1B,OAAO,CAAC,QAAQ,CAAY;gBAGT,QAAQ,EAAE,QAAQ,EAClB,KAAK,EAAE,MAAM,EACb,OAAO,GAAE,WAAgB,EAC1C,QAAQ,CAAC,EAAE,OAAO,EAAE;IAmBtB;;OAEG;IACH,IAAI,OAAO,IAAI,SAAS,OAAO,EAAE,CAEhC;IAED;;;;OAIG;IACI,MAAM,CAAC,OAAO,EAAE,MAAM;CA+C9B"}
@@ -39,6 +39,7 @@ export class Stream {
39
39
  throw new Error("Streaming not supported by provider");
40
40
  }
41
41
  let full = "";
42
+ let fullReasoning = "";
42
43
  let isFirst = true;
43
44
  for await (const chunk of this.provider.stream({
44
45
  model: this.model,
@@ -54,14 +55,19 @@ export class Stream {
54
55
  if (chunk.content) {
55
56
  full += chunk.content;
56
57
  }
58
+ if (chunk.reasoning) {
59
+ fullReasoning += chunk.reasoning;
60
+ }
57
61
  yield chunk;
58
62
  }
59
63
  this.messages.push({
60
64
  role: "assistant",
61
65
  content: full,
66
+ // @ts-ignore
67
+ reasoning: fullReasoning || undefined
62
68
  });
63
69
  if (this.options.onEndMessage) {
64
- this.options.onEndMessage(new ChatResponseString(full, { input_tokens: 0, output_tokens: 0, total_tokens: 0 }, this.model));
70
+ this.options.onEndMessage(new ChatResponseString(full, { input_tokens: 0, output_tokens: 0, total_tokens: 0 }, this.model, fullReasoning || undefined));
65
71
  }
66
72
  }
67
73
  }
@@ -0,0 +1,29 @@
1
+ /**
2
+ * Global configuration for Node-LLM providers.
3
+ * Values are initialized from environment variables but can be overridden programmatically.
4
+ */
5
+ export interface NodeLLMConfig {
6
+ openaiApiKey?: string;
7
+ openaiApiBase?: string;
8
+ anthropicApiKey?: string;
9
+ anthropicApiBase?: string;
10
+ geminiApiKey?: string;
11
+ geminiApiBase?: string;
12
+ deepseekApiKey?: string;
13
+ deepseekApiBase?: string;
14
+ [key: string]: any;
15
+ }
16
+ declare class Configuration implements NodeLLMConfig {
17
+ openaiApiKey?: string;
18
+ openaiApiBase?: string;
19
+ anthropicApiKey?: string;
20
+ anthropicApiBase?: string;
21
+ geminiApiKey?: string;
22
+ geminiApiBase?: string;
23
+ deepseekApiKey?: string;
24
+ deepseekApiBase?: string;
25
+ [key: string]: any;
26
+ }
27
+ export declare const config: Configuration;
28
+ export {};
29
+ //# sourceMappingURL=config.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"config.d.ts","sourceRoot":"","sources":["../src/config.ts"],"names":[],"mappings":"AAAA;;;GAGG;AACH,MAAM,WAAW,aAAa;IAC5B,YAAY,CAAC,EAAE,MAAM,CAAC;IACtB,aAAa,CAAC,EAAE,MAAM,CAAC;IACvB,eAAe,CAAC,EAAE,MAAM,CAAC;IACzB,gBAAgB,CAAC,EAAE,MAAM,CAAC;IAC1B,YAAY,CAAC,EAAE,MAAM,CAAC;IACtB,aAAa,CAAC,EAAE,MAAM,CAAC;IACvB,cAAc,CAAC,EAAE,MAAM,CAAC;IACxB,eAAe,CAAC,EAAE,MAAM,CAAC;IACzB,CAAC,GAAG,EAAE,MAAM,GAAG,GAAG,CAAC;CACpB;AAED,cAAM,aAAc,YAAW,aAAa;IACnC,YAAY,CAAC,EAAE,MAAM,CAAsC;IAC3D,aAAa,CAAC,EAAE,MAAM,CAAuC;IAC7D,eAAe,CAAC,EAAE,MAAM,CAAyC;IACjE,gBAAgB,CAAC,EAAE,MAAM,CAA0C;IACnE,YAAY,CAAC,EAAE,MAAM,CAAsC;IAC3D,aAAa,CAAC,EAAE,MAAM,CAAuC;IAC7D,cAAc,CAAC,EAAE,MAAM,CAAwC;IAC/D,eAAe,CAAC,EAAE,MAAM,CAAyC;IAExE,CAAC,GAAG,EAAE,MAAM,GAAG,GAAG,CAAC;CACpB;AAED,eAAO,MAAM,MAAM,eAAsB,CAAC"}
package/dist/config.js ADDED
@@ -0,0 +1,11 @@
1
+ class Configuration {
2
+ openaiApiKey = process.env.OPENAI_API_KEY?.trim();
3
+ openaiApiBase = process.env.OPENAI_API_BASE?.trim();
4
+ anthropicApiKey = process.env.ANTHROPIC_API_KEY?.trim();
5
+ anthropicApiBase = process.env.ANTHROPIC_API_BASE?.trim();
6
+ geminiApiKey = process.env.GEMINI_API_KEY?.trim();
7
+ geminiApiBase = process.env.GEMINI_API_BASE?.trim();
8
+ deepseekApiKey = process.env.DEEPSEEK_API_KEY?.trim();
9
+ deepseekApiBase = process.env.DEEPSEEK_API_BASE?.trim();
10
+ }
11
+ export const config = new Configuration();
package/dist/index.d.ts CHANGED
@@ -8,6 +8,8 @@ export type { Tool, ToolCall } from "./chat/Tool.js";
8
8
  export type { MessageContent, ContentPart } from "./chat/Content.js";
9
9
  export { z } from "zod";
10
10
  export { LLM, Transcription, Moderation, Embedding } from "./llm.js";
11
+ export { config } from "./config.js";
12
+ export type { NodeLLMConfig } from "./config.js";
11
13
  export { providerRegistry } from "./providers/registry.js";
12
14
  export { OpenAIProvider } from "./providers/openai/OpenAIProvider.js";
13
15
  export { registerOpenAIProvider } from "./providers/openai/index.js";
@@ -1 +1 @@
1
- {"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../src/index.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,IAAI,EAAE,MAAM,gBAAgB,CAAC;AACtC,OAAO,EAAE,MAAM,EAAE,MAAM,kBAAkB,CAAC;AAC1C,OAAO,EAAE,cAAc,EAAE,MAAM,2BAA2B,CAAC;AAC3D,YAAY,EAAE,OAAO,EAAE,MAAM,mBAAmB,CAAC;AACjD,YAAY,EAAE,IAAI,EAAE,MAAM,gBAAgB,CAAC;AAC3C,YAAY,EAAE,WAAW,EAAE,MAAM,uBAAuB,CAAC;AACzD,YAAY,EAAE,IAAI,EAAE,QAAQ,EAAE,MAAM,gBAAgB,CAAC;AACrD,YAAY,EAAE,cAAc,EAAE,WAAW,EAAE,MAAM,mBAAmB,CAAC;AAErE,OAAO,EAAE,CAAC,EAAE,MAAM,KAAK,CAAC;AACxB,OAAO,EAAE,GAAG,EAAE,aAAa,EAAE,UAAU,EAAE,SAAS,EAAE,MAAM,UAAU,CAAC;AACrE,OAAO,EAAE,gBAAgB,EAAE,MAAM,yBAAyB,CAAC;AAE3D,OAAO,EAAE,cAAc,EAAE,MAAM,sCAAsC,CAAC;AACtE,OAAO,EAAE,sBAAsB,EAAE,MAAM,6BAA6B,CAAC;AACrE,OAAO,EAAE,yBAAyB,EAAE,MAAM,gCAAgC,CAAC;AAC3E,YAAY,EAAE,YAAY,EAAE,aAAa,EAAE,MAAM,yBAAyB,CAAC;AAC3E,cAAc,mBAAmB,CAAC"}
1
+ {"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../src/index.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,IAAI,EAAE,MAAM,gBAAgB,CAAC;AACtC,OAAO,EAAE,MAAM,EAAE,MAAM,kBAAkB,CAAC;AAC1C,OAAO,EAAE,cAAc,EAAE,MAAM,2BAA2B,CAAC;AAC3D,YAAY,EAAE,OAAO,EAAE,MAAM,mBAAmB,CAAC;AACjD,YAAY,EAAE,IAAI,EAAE,MAAM,gBAAgB,CAAC;AAC3C,YAAY,EAAE,WAAW,EAAE,MAAM,uBAAuB,CAAC;AACzD,YAAY,EAAE,IAAI,EAAE,QAAQ,EAAE,MAAM,gBAAgB,CAAC;AACrD,YAAY,EAAE,cAAc,EAAE,WAAW,EAAE,MAAM,mBAAmB,CAAC;AAErE,OAAO,EAAE,CAAC,EAAE,MAAM,KAAK,CAAC;AACxB,OAAO,EAAE,GAAG,EAAE,aAAa,EAAE,UAAU,EAAE,SAAS,EAAE,MAAM,UAAU,CAAC;AACrE,OAAO,EAAE,MAAM,EAAE,MAAM,aAAa,CAAC;AACrC,YAAY,EAAE,aAAa,EAAE,MAAM,aAAa,CAAC;AACjD,OAAO,EAAE,gBAAgB,EAAE,MAAM,yBAAyB,CAAC;AAE3D,OAAO,EAAE,cAAc,EAAE,MAAM,sCAAsC,CAAC;AACtE,OAAO,EAAE,sBAAsB,EAAE,MAAM,6BAA6B,CAAC;AACrE,OAAO,EAAE,yBAAyB,EAAE,MAAM,gCAAgC,CAAC;AAC3E,YAAY,EAAE,YAAY,EAAE,aAAa,EAAE,MAAM,yBAAyB,CAAC;AAC3E,cAAc,mBAAmB,CAAC"}
package/dist/index.js CHANGED
@@ -3,6 +3,7 @@ export { Stream } from "./chat/Stream.js";
3
3
  export { GeneratedImage } from "./image/GeneratedImage.js";
4
4
  export { z } from "zod";
5
5
  export { LLM, Transcription, Moderation, Embedding } from "./llm.js";
6
+ export { config } from "./config.js";
6
7
  export { providerRegistry } from "./providers/registry.js";
7
8
  export { OpenAIProvider } from "./providers/openai/OpenAIProvider.js";
8
9
  export { registerOpenAIProvider } from "./providers/openai/index.js";
package/dist/llm.d.ts CHANGED
@@ -6,31 +6,37 @@ import { ModelRegistry } from "./models/ModelRegistry.js";
6
6
  import { Transcription } from "./transcription/Transcription.js";
7
7
  import { Moderation } from "./moderation/Moderation.js";
8
8
  import { Embedding } from "./embedding/Embedding.js";
9
+ import { NodeLLMConfig } from "./config.js";
9
10
  export interface RetryOptions {
10
11
  attempts?: number;
11
12
  delayMs?: number;
12
13
  }
13
14
  type LLMConfig = {
14
- provider: Provider;
15
+ provider?: Provider | string;
15
16
  retry?: RetryOptions;
16
17
  defaultTranscriptionModel?: string;
17
18
  defaultModerationModel?: string;
18
19
  defaultEmbeddingModel?: string;
19
- } | {
20
- provider: string;
21
- retry?: RetryOptions;
22
- defaultTranscriptionModel?: string;
23
- defaultModerationModel?: string;
24
- defaultEmbeddingModel?: string;
25
- };
20
+ } & Partial<NodeLLMConfig>;
26
21
  declare class LLMCore {
27
- readonly models: ModelRegistry;
22
+ readonly models: typeof ModelRegistry;
23
+ readonly config: {
24
+ [key: string]: any;
25
+ openaiApiKey?: string;
26
+ openaiApiBase?: string;
27
+ anthropicApiKey?: string;
28
+ anthropicApiBase?: string;
29
+ geminiApiKey?: string;
30
+ geminiApiBase?: string;
31
+ deepseekApiKey?: string;
32
+ deepseekApiBase?: string;
33
+ };
28
34
  private provider?;
29
35
  private defaultTranscriptionModelId?;
30
36
  private defaultModerationModelId?;
31
37
  private defaultEmbeddingModelId?;
32
38
  private retry;
33
- configure(config: LLMConfig): void;
39
+ configure(configOrCallback: LLMConfig | ((config: NodeLLMConfig) => void)): void;
34
40
  private ensureProviderSupport;
35
41
  chat(model: string, options?: ChatOptions): Chat;
36
42
  listModels(): Promise<ModelInfo[]>;
@@ -38,6 +44,7 @@ declare class LLMCore {
38
44
  model?: string;
39
45
  size?: string;
40
46
  quality?: string;
47
+ assumeModelExists?: boolean;
41
48
  }): Promise<GeneratedImage>;
42
49
  transcribe(file: string, options?: {
43
50
  model?: string;
@@ -45,6 +52,7 @@ declare class LLMCore {
45
52
  language?: string;
46
53
  speakerNames?: string[];
47
54
  speakerReferences?: string[];
55
+ assumeModelExists?: boolean;
48
56
  }): Promise<Transcription>;
49
57
  get defaultTranscriptionModel(): string | undefined;
50
58
  get defaultModerationModel(): string | undefined;
@@ -52,10 +60,12 @@ declare class LLMCore {
52
60
  getRetryConfig(): Required<RetryOptions>;
53
61
  moderate(input: string | string[], options?: {
54
62
  model?: string;
63
+ assumeModelExists?: boolean;
55
64
  }): Promise<Moderation>;
56
65
  embed(input: string | string[], options?: {
57
66
  model?: string;
58
67
  dimensions?: number;
68
+ assumeModelExists?: boolean;
59
69
  }): Promise<Embedding>;
60
70
  }
61
71
  export { Transcription, Moderation, Embedding };
package/dist/llm.d.ts.map CHANGED
@@ -1 +1 @@
1
- {"version":3,"file":"llm.d.ts","sourceRoot":"","sources":["../src/llm.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,IAAI,EAAE,MAAM,gBAAgB,CAAC;AACtC,OAAO,EAAE,WAAW,EAAE,MAAM,uBAAuB,CAAC;AACpD,OAAO,EACL,QAAQ,EACR,SAAS,EAKV,MAAM,yBAAyB,CAAC;AAKjC,OAAO,EAAE,cAAc,EAAE,MAAM,2BAA2B,CAAC;AAC3D,OAAO,EAAU,aAAa,EAAE,MAAM,2BAA2B,CAAC;AAClE,OAAO,EAAE,aAAa,EAAE,MAAM,kCAAkC,CAAC;AACjE,OAAO,EAAE,UAAU,EAAE,MAAM,4BAA4B,CAAC;AACxD,OAAO,EAAE,SAAS,EAAE,MAAM,0BAA0B,CAAC;AAIrD,MAAM,WAAW,YAAY;IAC3B,QAAQ,CAAC,EAAE,MAAM,CAAC;IAClB,OAAO,CAAC,EAAE,MAAM,CAAC;CAClB;AAED,KAAK,SAAS,GACV;IAAE,QAAQ,EAAE,QAAQ,CAAC;IAAC,KAAK,CAAC,EAAE,YAAY,CAAC;IAAC,yBAAyB,CAAC,EAAE,MAAM,CAAC;IAAC,sBAAsB,CAAC,EAAE,MAAM,CAAC;IAAC,qBAAqB,CAAC,EAAE,MAAM,CAAA;CAAE,GACjJ;IAAE,QAAQ,EAAE,MAAM,CAAC;IAAC,KAAK,CAAC,EAAE,YAAY,CAAC;IAAC,yBAAyB,CAAC,EAAE,MAAM,CAAC;IAAC,sBAAsB,CAAC,EAAE,MAAM,CAAC;IAAC,qBAAqB,CAAC,EAAE,MAAM,CAAA;CAAE,CAAC;AAEpJ,cAAM,OAAO;IACX,SAAgB,MAAM,EAAE,aAAa,CAAU;IAC/C,OAAO,CAAC,QAAQ,CAAC,CAAW;IAC5B,OAAO,CAAC,2BAA2B,CAAC,CAAS;IAC7C,OAAO,CAAC,wBAAwB,CAAC,CAAS;IAC1C,OAAO,CAAC,uBAAuB,CAAC,CAAS;IAEzC,OAAO,CAAC,KAAK,CAGX;IAEF,SAAS,CAAC,MAAM,EAAE,SAAS;IAuC3B,OAAO,CAAC,qBAAqB;IAU7B,IAAI,CAAC,KAAK,EAAE,MAAM,EAAE,OAAO,CAAC,EAAE,WAAW,GAAG,IAAI;IAQ1C,UAAU,IAAI,OAAO,CAAC,SAAS,EAAE,CAAC;IAKlC,KAAK,CAAC,MAAM,EAAE,MAAM,EAAE,OAAO,CAAC,EAAE;QAAE,KAAK,CAAC,EAAE,MAAM,CAAC;QAAC,IAAI,CAAC,EAAE,MAAM,CAAC;QAAC,OAAO,CAAC,EAAE,MAAM,CAAA;KAAE,GAAG,OAAO,CAAC,cAAc,CAAC;IAgB7G,UAAU,CACd,IAAI,EAAE,MAAM,EACZ,OAAO,CAAC,EAAE;QACR,KAAK,CAAC,EAAE,MAAM,CAAC;QACf,MAAM,CAAC,EAAE,MAAM,CAAC;QAChB,QAAQ,CAAC,EAAE,MAAM,CAAC;QAClB,YAAY,CAAC,EAAE,MAAM,EAAE,CAAC;QACxB,iBAAiB,CAAC,EAAE,MAAM,EAAE,CAAC;KAC9B,GACA,OAAO,CAAC,aAAa,CAAC;IAiBzB,IAAI,yBAAyB,IAAI,MAAM,GAAG,SAAS,CAElD;IAED,IAAI,sBAAsB,IAAI,MAAM,GAAG,SAAS,CAE/C;IAED,IAAI,qBAAqB,IAAI,MAAM,GAAG,SAAS,CAE9C;IAED,cAAc;IAIR,QAAQ,CAAC,KAAK,EAAE,MAAM,GAAG,MAAM,EAAE,EAAE,OAAO,CAAC,EAAE;QAAE,KAAK,CAAC,EAAE,MAAM,CAAA;KAAE,GAAG,OAAO,CAAC,UAAU,CAAC;IAiBrF,KAAK,CACT,KAAK,EAAE,MAAM,GAAG,MAAM,EAAE,EACxB,OAAO,CAAC,EAAE;QAAE,KAAK,CAAC,EAAE,MAAM,CAAC;QAAC,UAAU,CAAC,EAAE,MAAM,CAAA;KAAE,GAChD,OAAO,CAAC,SAAS,CAAC;CAkBtB;AAED,OAAO,EAAE,aAAa,EAAE,UAAU,EAAE,SAAS,EAAE,CAAC;AAEhD,eAAO,MAAM,GAAG,SAAgB,CAAC"}
1
+ {"version":3,"file":"llm.d.ts","sourceRoot":"","sources":["../src/llm.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,IAAI,EAAE,MAAM,gBAAgB,CAAC;AACtC,OAAO,EAAE,WAAW,EAAE,MAAM,uBAAuB,CAAC;AACpD,OAAO,EACL,QAAQ,EACR,SAAS,EAKV,MAAM,yBAAyB,CAAC;AAMjC,OAAO,EAAE,cAAc,EAAE,MAAM,2BAA2B,CAAC;AAC3D,OAAO,EAAE,aAAa,EAAE,MAAM,2BAA2B,CAAC;AAC1D,OAAO,EAAE,aAAa,EAAE,MAAM,kCAAkC,CAAC;AACjE,OAAO,EAAE,UAAU,EAAE,MAAM,4BAA4B,CAAC;AACxD,OAAO,EAAE,SAAS,EAAE,MAAM,0BAA0B,CAAC;AAIrD,OAAO,EAAU,aAAa,EAAE,MAAM,aAAa,CAAC;AAEpD,MAAM,WAAW,YAAY;IAC3B,QAAQ,CAAC,EAAE,MAAM,CAAC;IAClB,OAAO,CAAC,EAAE,MAAM,CAAC;CAClB;AAED,KAAK,SAAS,GAAG;IACf,QAAQ,CAAC,EAAE,QAAQ,GAAG,MAAM,CAAC;IAC7B,KAAK,CAAC,EAAE,YAAY,CAAC;IACrB,yBAAyB,CAAC,EAAE,MAAM,CAAC;IACnC,sBAAsB,CAAC,EAAE,MAAM,CAAC;IAChC,qBAAqB,CAAC,EAAE,MAAM,CAAC;CAChC,GAAG,OAAO,CAAC,aAAa,CAAC,CAAC;AAE3B,cAAM,OAAO;IACX,SAAgB,MAAM,uBAAiB;IACvC,SAAgB,MAAM;;;;;;;;;;MAAU;IAChC,OAAO,CAAC,QAAQ,CAAC,CAAW;IAC5B,OAAO,CAAC,2BAA2B,CAAC,CAAS;IAC7C,OAAO,CAAC,wBAAwB,CAAC,CAAS;IAC1C,OAAO,CAAC,uBAAuB,CAAC,CAAS;IAEzC,OAAO,CAAC,KAAK,CAGX;IAEF,SAAS,CAAC,gBAAgB,EAAE,SAAS,GAAG,CAAC,CAAC,MAAM,EAAE,aAAa,KAAK,IAAI,CAAC;IAiEzE,OAAO,CAAC,qBAAqB;IAU7B,IAAI,CAAC,KAAK,EAAE,MAAM,EAAE,OAAO,CAAC,EAAE,WAAW,GAAG,IAAI;IAQ1C,UAAU,IAAI,OAAO,CAAC,SAAS,EAAE,CAAC;IAKlC,KAAK,CAAC,MAAM,EAAE,MAAM,EAAE,OAAO,CAAC,EAAE;QAAE,KAAK,CAAC,EAAE,MAAM,CAAC;QAAC,IAAI,CAAC,EAAE,MAAM,CAAC;QAAC,OAAO,CAAC,EAAE,MAAM,CAAC;QAAC,iBAAiB,CAAC,EAAE,OAAO,CAAA;KAAE,GAAG,OAAO,CAAC,cAAc,CAAC;IAkB1I,UAAU,CACd,IAAI,EAAE,MAAM,EACZ,OAAO,CAAC,EAAE;QACR,KAAK,CAAC,EAAE,MAAM,CAAC;QACf,MAAM,CAAC,EAAE,MAAM,CAAC;QAChB,QAAQ,CAAC,EAAE,MAAM,CAAC;QAClB,YAAY,CAAC,EAAE,MAAM,EAAE,CAAC;QACxB,iBAAiB,CAAC,EAAE,MAAM,EAAE,CAAC;QAC7B,iBAAiB,CAAC,EAAE,OAAO,CAAC;KAC7B,GACA,OAAO,CAAC,aAAa,CAAC;IAmBzB,IAAI,yBAAyB,IAAI,MAAM,GAAG,SAAS,CAElD;IAED,IAAI,sBAAsB,IAAI,MAAM,GAAG,SAAS,CAE/C;IAED,IAAI,qBAAqB,IAAI,MAAM,GAAG,SAAS,CAE9C;IAED,cAAc;IAIR,QAAQ,CAAC,KAAK,EAAE,MAAM,GAAG,MAAM,EAAE,EAAE,OAAO,CAAC,EAAE;QAAE,KAAK,CAAC,EAAE,MAAM,CAAC;QAAC,iBAAiB,CAAC,EAAE,OAAO,CAAA;KAAE,GAAG,OAAO,CAAC,UAAU,CAAC;IAmBlH,KAAK,CACT,KAAK,EAAE,MAAM,GAAG,MAAM,EAAE,EACxB,OAAO,CAAC,EAAE;QAAE,KAAK,CAAC,EAAE,MAAM,CAAC;QAAC,UAAU,CAAC,EAAE,MAAM,CAAC;QAAC,iBAAiB,CAAC,EAAE,OAAO,CAAA;KAAE,GAC7E,OAAO,CAAC,SAAS,CAAC;CAoBtB;AAED,OAAO,EAAE,aAAa,EAAE,UAAU,EAAE,SAAS,EAAE,CAAC;AAEhD,eAAO,MAAM,GAAG,SAAgB,CAAC"}
package/dist/llm.js CHANGED
@@ -3,13 +3,16 @@ import { providerRegistry } from "./providers/registry.js";
3
3
  import { ensureOpenAIRegistered } from "./providers/openai/index.js";
4
4
  import { registerGeminiProvider } from "./providers/gemini/index.js";
5
5
  import { registerAnthropicProvider } from "./providers/anthropic/index.js";
6
+ import { registerDeepSeekProvider } from "./providers/deepseek/index.js";
6
7
  import { GeneratedImage } from "./image/GeneratedImage.js";
7
- import { models } from "./models/ModelRegistry.js";
8
+ import { ModelRegistry } from "./models/ModelRegistry.js";
8
9
  import { Transcription } from "./transcription/Transcription.js";
9
10
  import { Moderation } from "./moderation/Moderation.js";
10
11
  import { Embedding } from "./embedding/Embedding.js";
12
+ import { config } from "./config.js";
11
13
  class LLMCore {
12
- models = models;
14
+ models = ModelRegistry;
15
+ config = config;
13
16
  provider;
14
17
  defaultTranscriptionModelId;
15
18
  defaultModerationModelId;
@@ -18,36 +21,50 @@ class LLMCore {
18
21
  attempts: 1,
19
22
  delayMs: 0,
20
23
  };
21
- configure(config) {
22
- if (config.defaultTranscriptionModel) {
23
- this.defaultTranscriptionModelId = config.defaultTranscriptionModel;
24
+ configure(configOrCallback) {
25
+ // Callback style: for setting API keys
26
+ if (typeof configOrCallback === "function") {
27
+ configOrCallback(this.config);
28
+ return;
24
29
  }
25
- if (config.defaultModerationModel) {
26
- this.defaultModerationModelId = config.defaultModerationModel;
30
+ // Object style: for setting provider and other options
31
+ const options = configOrCallback;
32
+ // Extract known control keys
33
+ const { provider, retry, defaultTranscriptionModel, defaultModerationModel, defaultEmbeddingModel, ...apiConfig } = options;
34
+ // Merge API keys into global config
35
+ Object.assign(this.config, apiConfig);
36
+ if (defaultTranscriptionModel) {
37
+ this.defaultTranscriptionModelId = defaultTranscriptionModel;
27
38
  }
28
- if (config.defaultEmbeddingModel) {
29
- this.defaultEmbeddingModelId = config.defaultEmbeddingModel;
39
+ if (defaultModerationModel) {
40
+ this.defaultModerationModelId = defaultModerationModel;
30
41
  }
31
- if (config.retry) {
42
+ if (defaultEmbeddingModel) {
43
+ this.defaultEmbeddingModelId = defaultEmbeddingModel;
44
+ }
45
+ if (retry) {
32
46
  this.retry = {
33
- attempts: config.retry.attempts ?? 1,
34
- delayMs: config.retry.delayMs ?? 0,
47
+ attempts: retry.attempts ?? 1,
48
+ delayMs: retry.delayMs ?? 0,
35
49
  };
36
50
  }
37
- if (typeof config.provider === "string") {
38
- if (config.provider === "openai") {
51
+ if (typeof provider === "string") {
52
+ if (provider === "openai") {
39
53
  ensureOpenAIRegistered();
40
54
  }
41
- if (config.provider === "gemini") {
55
+ if (provider === "gemini") {
42
56
  registerGeminiProvider();
43
57
  }
44
- if (config.provider === "anthropic") {
58
+ if (provider === "anthropic") {
45
59
  registerAnthropicProvider();
46
60
  }
47
- this.provider = providerRegistry.resolve(config.provider);
61
+ if (provider === "deepseek") {
62
+ registerDeepSeekProvider();
63
+ }
64
+ this.provider = providerRegistry.resolve(provider);
48
65
  }
49
- else {
50
- this.provider = config.provider;
66
+ else if (provider) {
67
+ this.provider = provider;
51
68
  }
52
69
  }
53
70
  ensureProviderSupport(method) {
@@ -72,7 +89,10 @@ class LLMCore {
72
89
  async paint(prompt, options) {
73
90
  const provider = this.ensureProviderSupport("paint");
74
91
  const model = options?.model;
75
- if (model && provider.capabilities && !provider.capabilities.supportsImageGeneration(model)) {
92
+ if (options?.assumeModelExists) {
93
+ console.warn(`[NodeLLM] Skipping validation for model ${model}`);
94
+ }
95
+ else if (model && provider.capabilities && !provider.capabilities.supportsImageGeneration(model)) {
76
96
  throw new Error(`Model ${model} does not support image generation.`);
77
97
  }
78
98
  const response = await provider.paint({
@@ -84,7 +104,10 @@ class LLMCore {
84
104
  async transcribe(file, options) {
85
105
  const provider = this.ensureProviderSupport("transcribe");
86
106
  const model = options?.model || this.defaultTranscriptionModelId;
87
- if (model && provider.capabilities && !provider.capabilities.supportsTranscription(model)) {
107
+ if (options?.assumeModelExists) {
108
+ console.warn(`[NodeLLM] Skipping validation for model ${model}`);
109
+ }
110
+ else if (model && provider.capabilities && !provider.capabilities.supportsTranscription(model)) {
88
111
  throw new Error(`Model ${model} does not support transcription.`);
89
112
  }
90
113
  const response = await provider.transcribe({
@@ -109,7 +132,10 @@ class LLMCore {
109
132
  async moderate(input, options) {
110
133
  const provider = this.ensureProviderSupport("moderate");
111
134
  const model = options?.model || this.defaultModerationModelId;
112
- if (model && provider.capabilities && !provider.capabilities.supportsModeration(model)) {
135
+ if (options?.assumeModelExists) {
136
+ console.warn(`[NodeLLM] Skipping validation for model ${model}`);
137
+ }
138
+ else if (model && provider.capabilities && !provider.capabilities.supportsModeration(model)) {
113
139
  throw new Error(`Model ${model} does not support moderation.`);
114
140
  }
115
141
  const response = await provider.moderate({
@@ -127,7 +153,10 @@ class LLMCore {
127
153
  model,
128
154
  dimensions: options?.dimensions,
129
155
  };
130
- if (request.model && provider.capabilities && !provider.capabilities.supportsEmbeddings(request.model)) {
156
+ if (options?.assumeModelExists) {
157
+ console.warn(`[NodeLLM] Skipping validation for model ${request.model}`);
158
+ }
159
+ else if (request.model && provider.capabilities && !provider.capabilities.supportsEmbeddings(request.model)) {
131
160
  throw new Error(`Model ${request.model} does not support embeddings.`);
132
161
  }
133
162
  const response = await provider.embed(request);
@@ -1,23 +1,50 @@
1
- import { ModelInfo } from "../providers/Provider.js";
1
+ import { Model } from "./types.js";
2
2
  export declare class ModelRegistry {
3
- private models;
4
- private static readonly API_URL;
3
+ private static models;
5
4
  /**
6
- * Refresh model information from the Parsera API
5
+ * Find a model by its ID.
7
6
  */
8
- refresh(): Promise<void>;
7
+ static find(modelId: string, provider?: string): Model | undefined;
9
8
  /**
10
- * Find a model by ID
9
+ * Get all available models.
11
10
  */
12
- find(id: string): ModelInfo | undefined;
11
+ static all(): Model[];
13
12
  /**
14
- * List all known models
13
+ * Get output tokens limit for a model.
15
14
  */
16
- all(): ModelInfo[];
15
+ static getMaxOutputTokens(modelId: string, provider: string): number | undefined;
17
16
  /**
18
- * Filter models by provider
17
+ * Check if a model supports a capability.
19
18
  */
20
- byProvider(provider: string): ModelInfo[];
19
+ static supports(modelId: string, capability: string, provider: string): boolean;
20
+ /**
21
+ * Get context window size.
22
+ */
23
+ static getContextWindow(modelId: string, provider: string): number | undefined;
24
+ /**
25
+ * Calculate cost for usage.
26
+ */
27
+ static calculateCost(usage: {
28
+ input_tokens: number;
29
+ output_tokens: number;
30
+ total_tokens: number;
31
+ cached_tokens?: number;
32
+ reasoning_tokens?: number;
33
+ }, modelId: string, provider: string): {
34
+ input_tokens: number;
35
+ output_tokens: number;
36
+ total_tokens: number;
37
+ cached_tokens?: number;
38
+ reasoning_tokens?: number;
39
+ } | {
40
+ input_cost: number;
41
+ output_cost: number;
42
+ cost: number;
43
+ input_tokens: number;
44
+ output_tokens: number;
45
+ total_tokens: number;
46
+ cached_tokens?: number;
47
+ reasoning_tokens?: number;
48
+ };
21
49
  }
22
- export declare const models: ModelRegistry;
23
50
  //# sourceMappingURL=ModelRegistry.d.ts.map
@@ -1 +1 @@
1
- {"version":3,"file":"ModelRegistry.d.ts","sourceRoot":"","sources":["../../src/models/ModelRegistry.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,SAAS,EAAE,MAAM,0BAA0B,CAAC;AAErD,qBAAa,aAAa;IACxB,OAAO,CAAC,MAAM,CAAqC;IACnD,OAAO,CAAC,MAAM,CAAC,QAAQ,CAAC,OAAO,CAA0C;IAEzE;;OAEG;IACG,OAAO,IAAI,OAAO,CAAC,IAAI,CAAC;IA8B9B;;OAEG;IACH,IAAI,CAAC,EAAE,EAAE,MAAM,GAAG,SAAS,GAAG,SAAS;IAIvC;;OAEG;IACH,GAAG,IAAI,SAAS,EAAE;IAIlB;;OAEG;IACH,UAAU,CAAC,QAAQ,EAAE,MAAM,GAAG,SAAS,EAAE;CAG1C;AAED,eAAO,MAAM,MAAM,eAAsB,CAAC"}
1
+ {"version":3,"file":"ModelRegistry.d.ts","sourceRoot":"","sources":["../../src/models/ModelRegistry.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,KAAK,EAAE,MAAM,YAAY,CAAC;AAGnC,qBAAa,aAAa;IACtB,OAAO,CAAC,MAAM,CAAC,MAAM,CAA6C;IAElE;;OAEG;IACH,MAAM,CAAC,IAAI,CAAC,OAAO,EAAE,MAAM,EAAE,QAAQ,CAAC,EAAE,MAAM,GAAG,KAAK,GAAG,SAAS;IAMlE;;OAEG;IACH,MAAM,CAAC,GAAG,IAAI,KAAK,EAAE;IAIrB;;OAEG;IACH,MAAM,CAAC,kBAAkB,CAAC,OAAO,EAAE,MAAM,EAAE,QAAQ,EAAE,MAAM,GAAG,MAAM,GAAG,SAAS;IAKhF;;OAEG;IACH,MAAM,CAAC,QAAQ,CAAC,OAAO,EAAE,MAAM,EAAE,UAAU,EAAE,MAAM,EAAE,QAAQ,EAAE,MAAM,GAAG,OAAO;IAK/E;;OAEG;IACH,MAAM,CAAC,gBAAgB,CAAC,OAAO,EAAE,MAAM,EAAE,QAAQ,EAAE,MAAM,GAAG,MAAM,GAAG,SAAS;IAK9E;;OAEG;IACH,MAAM,CAAC,aAAa,CAAC,KAAK,EAAE;QAAE,YAAY,EAAE,MAAM,CAAC;QAAC,aAAa,EAAE,MAAM,CAAC;QAAC,YAAY,EAAE,MAAM,CAAC;QAAC,aAAa,CAAC,EAAE,MAAM,CAAC;QAAC,gBAAgB,CAAC,EAAE,MAAM,CAAA;KAAE,EAAE,OAAO,EAAE,MAAM,EAAE,QAAQ,EAAE,MAAM;sBAA3I,MAAM;uBAAiB,MAAM;sBAAgB,MAAM;wBAAkB,MAAM;2BAAqB,MAAM;;;;;sBAAtG,MAAM;uBAAiB,MAAM;sBAAgB,MAAM;wBAAkB,MAAM;2BAAqB,MAAM;;CA+BrJ"}