@node-llm/core 0.5.0 → 0.6.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (70) hide show
  1. package/README.md +46 -7
  2. package/dist/chat/Chat.d.ts +5 -0
  3. package/dist/chat/Chat.d.ts.map +1 -1
  4. package/dist/chat/Chat.js +26 -4
  5. package/dist/chat/ChatOptions.d.ts +3 -0
  6. package/dist/chat/ChatOptions.d.ts.map +1 -1
  7. package/dist/chat/ChatResponse.d.ts +3 -0
  8. package/dist/chat/ChatResponse.d.ts.map +1 -1
  9. package/dist/chat/ChatResponse.js +3 -0
  10. package/dist/llm.d.ts +5 -1
  11. package/dist/llm.d.ts.map +1 -1
  12. package/dist/llm.js +18 -6
  13. package/dist/models/ModelRegistry.d.ts +39 -12
  14. package/dist/models/ModelRegistry.d.ts.map +1 -1
  15. package/dist/models/ModelRegistry.js +50 -40
  16. package/dist/models/models.d.ts +972 -0
  17. package/dist/models/models.d.ts.map +1 -0
  18. package/dist/models/models.js +7026 -0
  19. package/dist/models/types.d.ts +50 -0
  20. package/dist/models/types.d.ts.map +1 -0
  21. package/dist/models/types.js +1 -0
  22. package/dist/providers/Provider.d.ts +4 -0
  23. package/dist/providers/Provider.d.ts.map +1 -1
  24. package/dist/providers/anthropic/AnthropicProvider.d.ts.map +1 -1
  25. package/dist/providers/anthropic/AnthropicProvider.js +1 -3
  26. package/dist/providers/anthropic/Capabilities.d.ts +1 -37
  27. package/dist/providers/anthropic/Capabilities.d.ts.map +1 -1
  28. package/dist/providers/anthropic/Capabilities.js +59 -130
  29. package/dist/providers/anthropic/Chat.d.ts.map +1 -1
  30. package/dist/providers/anthropic/Chat.js +6 -2
  31. package/dist/providers/anthropic/Models.d.ts +1 -0
  32. package/dist/providers/anthropic/Models.d.ts.map +1 -1
  33. package/dist/providers/anthropic/Models.js +36 -41
  34. package/dist/providers/anthropic/Streaming.d.ts.map +1 -1
  35. package/dist/providers/anthropic/Streaming.js +10 -1
  36. package/dist/providers/gemini/Capabilities.d.ts +28 -7
  37. package/dist/providers/gemini/Capabilities.d.ts.map +1 -1
  38. package/dist/providers/gemini/Capabilities.js +32 -20
  39. package/dist/providers/gemini/Chat.d.ts.map +1 -1
  40. package/dist/providers/gemini/Chat.js +9 -11
  41. package/dist/providers/gemini/Models.d.ts +1 -0
  42. package/dist/providers/gemini/Models.d.ts.map +1 -1
  43. package/dist/providers/gemini/Models.js +46 -26
  44. package/dist/providers/openai/Capabilities.d.ts +3 -11
  45. package/dist/providers/openai/Capabilities.d.ts.map +1 -1
  46. package/dist/providers/openai/Capabilities.js +119 -122
  47. package/dist/providers/openai/Chat.d.ts.map +1 -1
  48. package/dist/providers/openai/Chat.js +19 -17
  49. package/dist/providers/openai/Embedding.d.ts.map +1 -1
  50. package/dist/providers/openai/Embedding.js +2 -1
  51. package/dist/providers/openai/Image.d.ts.map +1 -1
  52. package/dist/providers/openai/Image.js +2 -1
  53. package/dist/providers/openai/ModelDefinitions.d.ts +1 -24
  54. package/dist/providers/openai/ModelDefinitions.d.ts.map +1 -1
  55. package/dist/providers/openai/ModelDefinitions.js +1 -211
  56. package/dist/providers/openai/Models.d.ts +1 -0
  57. package/dist/providers/openai/Models.d.ts.map +1 -1
  58. package/dist/providers/openai/Models.js +46 -22
  59. package/dist/providers/openai/Moderation.d.ts.map +1 -1
  60. package/dist/providers/openai/Moderation.js +2 -1
  61. package/dist/providers/openai/Streaming.d.ts.map +1 -1
  62. package/dist/providers/openai/Streaming.js +5 -1
  63. package/dist/providers/openai/Transcription.d.ts.map +1 -1
  64. package/dist/providers/openai/Transcription.js +3 -2
  65. package/dist/providers/openai/index.d.ts.map +1 -1
  66. package/dist/providers/openai/index.js +2 -1
  67. package/dist/providers/openai/utils.d.ts +20 -0
  68. package/dist/providers/openai/utils.d.ts.map +1 -0
  69. package/dist/providers/openai/utils.js +25 -0
  70. package/package.json +1 -1
package/README.md CHANGED
@@ -66,6 +66,7 @@ console.log(response);
66
66
  console.log(response.content);
67
67
  console.log(`Model: ${response.model_id}`);
68
68
  console.log(`Tokens: ${response.input_tokens} in, ${response.output_tokens} out`);
69
+ console.log(`Cost: $${response.cost}`);
69
70
  ```
70
71
 
71
72
  ### 3. Streaming Responses
@@ -102,9 +103,11 @@ const response = await chat.ask("Hello!");
102
103
 
103
104
  console.log(response.input_tokens); // 10
104
105
  console.log(response.output_tokens); // 5
106
+ console.log(response.cost); // 0.000185
105
107
 
106
108
  // Access aggregated usage for the whole session
107
109
  console.log(chat.totalUsage.total_tokens);
110
+ console.log(chat.totalUsage.cost);
108
111
  ```
109
112
 
110
113
  ### 6. Embeddings
@@ -202,6 +205,44 @@ const factual = LLM.chat("gpt-4o").withTemperature(0.2);
202
205
  const creative = LLM.chat("gpt-4o").withTemperature(0.9);
203
206
  ```
204
207
 
208
+ ### 11. Provider-Specific Parameters
209
+
210
+ Access unique provider features while maintaining the unified interface. Parameters passed via `withParams()` will override any defaults set by the library.
211
+
212
+ ```ts
213
+ // OpenAI: Set seed for deterministic output
214
+ const chat = LLM.chat("gpt-4o-mini")
215
+ .withParams({
216
+ seed: 42,
217
+ user: "user-123",
218
+ presence_penalty: 0.5
219
+ });
220
+
221
+ // Gemini: Configure safety settings and generation params
222
+ const geminiChat = LLM.chat("gemini-2.0-flash")
223
+ .withParams({
224
+ generationConfig: { topP: 0.8, topK: 40 },
225
+ safetySettings: [
226
+ { category: "HARM_CATEGORY_HARASSMENT", threshold: "BLOCK_LOW_AND_ABOVE" }
227
+ ]
228
+ });
229
+
230
+ // Anthropic: Custom headers or beta features
231
+ const claudeChat = LLM.chat("claude-3-5-sonnet-20241022")
232
+ .withParams({
233
+ top_k: 50,
234
+ top_p: 0.9
235
+ });
236
+ ```
237
+
238
+ **⚠️ Important Notes:**
239
+ - Parameters from `withParams()` take precedence over library defaults
240
+ - Always consult the provider's API documentation for supported parameters
241
+ - The library passes these parameters through without validation
242
+ - Enable debug mode to see the exact request: `process.env.NODELLM_DEBUG = "true"`
243
+
244
+ See examples: [OpenAI](../../examples/openai/chat/params.mjs) | [Gemini](../../examples/gemini/chat/params.mjs)
245
+
205
246
  ---
206
247
 
207
248
  ## 📚 Examples
@@ -438,15 +479,13 @@ chat.withRequestOptions({
438
479
 
439
480
  Get up-to-date information about context windows, pricing, and capabilities directly from the Parsera API.
440
481
 
441
- ```javascript
442
- // Refresh model information from the API
443
- await LLM.models.refresh();
444
-
445
482
  // Use the data programmatically
446
483
  const model = LLM.models.find("gpt-4o-mini");
447
- console.log(model.context_window); // => 128000
448
- console.log(model.capabilities); // => ["function_calling", "structured_output", "streaming", "batch"]
449
- console.log(model.pricing.text_tokens.standard.input_per_million); // => 0.15
484
+ if (model) {
485
+ console.log(model.context_window); // => 128000
486
+ console.log(model.capabilities); // => ["function_calling", "structured_output", "streaming", "batch", "json_mode"]
487
+ console.log(model.pricing.text_tokens.standard.input_per_million); // => 0.15
488
+ }
450
489
  ```
451
490
 
452
491
  ---
@@ -74,6 +74,11 @@ export declare class Chat {
74
74
  headers?: Record<string, string>;
75
75
  responseFormat?: any;
76
76
  }): this;
77
+ /**
78
+ * Set provider-specific parameters.
79
+ * These will be merged into the final request payload.
80
+ */
81
+ withParams(params: Record<string, any>): this;
77
82
  /**
78
83
  * Enforce a specific schema for the output.
79
84
  * Can accept a Schema object or a Zod schema/JSON Schema directly.
@@ -1 +1 @@
1
- {"version":3,"file":"Chat.d.ts","sourceRoot":"","sources":["../../src/chat/Chat.ts"],"names":[],"mappings":"AACA,OAAO,EAAE,OAAO,EAAE,MAAM,cAAc,CAAC;AACvC,OAAO,EAAE,WAAW,EAAE,MAAM,kBAAkB,CAAC;AAC/C,OAAO,EAAE,QAAQ,EAAE,KAAK,EAAE,MAAM,0BAA0B,CAAC;AAI3D,OAAO,EAAE,IAAI,EAAE,MAAM,WAAW,CAAC;AACjC,OAAO,EAAE,MAAM,EAAE,MAAM,qBAAqB,CAAC;AAE7C,OAAO,EAAE,CAAC,EAAE,MAAM,KAAK,CAAC;AAExB,MAAM,WAAW,UAAU;IACzB,MAAM,CAAC,EAAE,MAAM,EAAE,CAAC;IAClB,KAAK,CAAC,EAAE,MAAM,EAAE,CAAC;IACjB,WAAW,CAAC,EAAE,MAAM,CAAC;IACrB,SAAS,CAAC,EAAE,MAAM,CAAC;IACnB,OAAO,CAAC,EAAE,MAAM,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;CAClC;AAED,OAAO,EAAE,kBAAkB,EAAE,MAAM,mBAAmB,CAAC;AAEvD,qBAAa,IAAI;IAKb,OAAO,CAAC,QAAQ,CAAC,QAAQ;IACzB,OAAO,CAAC,KAAK;IACb,OAAO,CAAC,QAAQ,CAAC,OAAO;IAN1B,OAAO,CAAC,QAAQ,CAAiB;IACjC,OAAO,CAAC,QAAQ,CAAW;gBAGR,QAAQ,EAAE,QAAQ,EAC3B,KAAK,EAAE,MAAM,EACJ,OAAO,GAAE,WAAgB;IAmB5C;;OAEG;IACH,IAAI,OAAO,IAAI,SAAS,OAAO,EAAE,CAEhC;IAED;;OAEG;IACH,IAAI,UAAU,IAAI,KAAK,CActB;IAED;;;OAGG;IACH,QAAQ,CAAC,IAAI,EAAE,GAAG,GAAG,IAAI;IAIzB;;;;;;;OAOG;IACH,SAAS,CAAC,KAAK,EAAE,CAAC,IAAI,GAAG,GAAG,CAAC,EAAE,EAAE,OAAO,CAAC,EAAE;QAAE,OAAO,CAAC,EAAE,OAAO,CAAA;KAAE,GAAG,IAAI;IA2BvE;;;;OAIG;IACH,gBAAgB,CAAC,WAAW,EAAE,MAAM,EAAE,OAAO,CAAC,EAAE;QAAE,OAAO,CAAC,EAAE,OAAO,CAAA;KAAE,GAAG,IAAI;IAmB5E;;OAEG;IACH,gBAAgB,CAAC,WAAW,EAAE,MAAM,EAAE,OAAO,CAAC,EAAE;QAAE,OAAO,CAAC,EAAE,OAAO,CAAA;KAAE,GAAG,IAAI;IAI5E;;;OAGG;IACH,eAAe,CAAC,IAAI,EAAE,MAAM,GAAG,IAAI;IAKnC;;OAEG;IACH,SAAS,CAAC,KAAK,EAAE,MAAM,GAAG,IAAI;IAK9B;;;OAGG;IACH,kBAAkB,CAAC,OAAO,EAAE;QAAE,OAAO,CAAC,EAAE,MAAM,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;QAAC,cAAc,CAAC,EAAE,GAAG,CAAA;KAAE,GAAG,IAAI;IAU7F;;;OAGG;IACH,UAAU,CAAC,MAAM,EAAE,MAAM,GAAG,CAAC,CAAC,OAAO,CAAC,GAAG,CAAC,GAAG,MAAM,CAAC,MAAM,EAAE,GAAG,CAAC,GAAG,IAAI,GAAG,IAAI;IAkB9E,YAAY,CAAC,OAAO,EAAE,MAAM,IAAI,GAAG,IAAI;IAKvC,YAAY,CAAC,OAAO,EAAE,CAAC,OAAO,EAAE,kBAAkB,KAAK,IAAI,GAAG,IAAI;IAKlE,UAAU,CAAC,OAAO,EAAE,CAAC,QAAQ,EAAE,GAAG,KAAK,IAAI,GAAG,IAAI;IAKlD,YAAY,CAAC,OAAO,EAAE,CAAC,MAAM,EAAE,GAAG,KAAK,IAAI,GAAG,IAAI;IAKlD;;OAEG;IACG,GAAG,CAAC,OAAO,EAAE,MAAM,GAAG,GAAG,EAAE,EAAE,OAAO,CAAC,EAAE,UAAU,GAAG,OAAO,CAAC,kBAAkB,CAAC;IA8KrF;;OAEG;IACI,MAAM,CAAC,OAAO,EAAE,MAAM;CAI9B"}
1
+ {"version":3,"file":"Chat.d.ts","sourceRoot":"","sources":["../../src/chat/Chat.ts"],"names":[],"mappings":"AACA,OAAO,EAAE,OAAO,EAAE,MAAM,cAAc,CAAC;AACvC,OAAO,EAAE,WAAW,EAAE,MAAM,kBAAkB,CAAC;AAC/C,OAAO,EAAE,QAAQ,EAAE,KAAK,EAAE,MAAM,0BAA0B,CAAC;AAI3D,OAAO,EAAE,IAAI,EAAE,MAAM,WAAW,CAAC;AACjC,OAAO,EAAE,MAAM,EAAE,MAAM,qBAAqB,CAAC;AAE7C,OAAO,EAAE,CAAC,EAAE,MAAM,KAAK,CAAC;AAExB,MAAM,WAAW,UAAU;IACzB,MAAM,CAAC,EAAE,MAAM,EAAE,CAAC;IAClB,KAAK,CAAC,EAAE,MAAM,EAAE,CAAC;IACjB,WAAW,CAAC,EAAE,MAAM,CAAC;IACrB,SAAS,CAAC,EAAE,MAAM,CAAC;IACnB,OAAO,CAAC,EAAE,MAAM,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;CAClC;AAED,OAAO,EAAE,kBAAkB,EAAE,MAAM,mBAAmB,CAAC;AAEvD,qBAAa,IAAI;IAKb,OAAO,CAAC,QAAQ,CAAC,QAAQ;IACzB,OAAO,CAAC,KAAK;IACb,OAAO,CAAC,QAAQ,CAAC,OAAO;IAN1B,OAAO,CAAC,QAAQ,CAAiB;IACjC,OAAO,CAAC,QAAQ,CAAW;gBAGR,QAAQ,EAAE,QAAQ,EAC3B,KAAK,EAAE,MAAM,EACJ,OAAO,GAAE,WAAgB;IAmB5C;;OAEG;IACH,IAAI,OAAO,IAAI,SAAS,OAAO,EAAE,CAEhC;IAED;;OAEG;IACH,IAAI,UAAU,IAAI,KAAK,CAetB;IAED;;;OAGG;IACH,QAAQ,CAAC,IAAI,EAAE,GAAG,GAAG,IAAI;IAIzB;;;;;;;OAOG;IACH,SAAS,CAAC,KAAK,EAAE,CAAC,IAAI,GAAG,GAAG,CAAC,EAAE,EAAE,OAAO,CAAC,EAAE;QAAE,OAAO,CAAC,EAAE,OAAO,CAAA;KAAE,GAAG,IAAI;IA2BvE;;;;OAIG;IACH,gBAAgB,CAAC,WAAW,EAAE,MAAM,EAAE,OAAO,CAAC,EAAE;QAAE,OAAO,CAAC,EAAE,OAAO,CAAA;KAAE,GAAG,IAAI;IAmB5E;;OAEG;IACH,gBAAgB,CAAC,WAAW,EAAE,MAAM,EAAE,OAAO,CAAC,EAAE;QAAE,OAAO,CAAC,EAAE,OAAO,CAAA;KAAE,GAAG,IAAI;IAI5E;;;OAGG;IACH,eAAe,CAAC,IAAI,EAAE,MAAM,GAAG,IAAI;IAKnC;;OAEG;IACH,SAAS,CAAC,KAAK,EAAE,MAAM,GAAG,IAAI;IAK9B;;;OAGG;IACH,kBAAkB,CAAC,OAAO,EAAE;QAAE,OAAO,CAAC,EAAE,MAAM,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;QAAC,cAAc,CAAC,EAAE,GAAG,CAAA;KAAE,GAAG,IAAI;IAU7F;;;OAGG;IACH,UAAU,CAAC,MAAM,EAAE,MAAM,CAAC,MAAM,EAAE,GAAG,CAAC,GAAG,IAAI;IAK7C;;;OAGG;IACH,UAAU,CAAC,MAAM,EAAE,MAAM,GAAG,CAAC,CAAC,OAAO,CAAC,GAAG,CAAC,GAAG,MAAM,CAAC,MAAM,EAAE,GAAG,CAAC,GAAG,IAAI,GAAG,IAAI;IAkB9E,YAAY,CAAC,OAAO,EAAE,MAAM,IAAI,GAAG,IAAI;IAKvC,YAAY,CAAC,OAAO,EAAE,CAAC,OAAO,EAAE,kBAAkB,KAAK,IAAI,GAAG,IAAI;IAKlE,UAAU,CAAC,OAAO,EAAE,CAAC,QAAQ,EAAE,GAAG,KAAK,IAAI,GAAG,IAAI;IAKlD,YAAY,CAAC,OAAO,EAAE,CAAC,MAAM,EAAE,GAAG,KAAK,IAAI,GAAG,IAAI;IAKlD;;OAEG;IACG,GAAG,CAAC,OAAO,EAAE,MAAM,GAAG,GAAG,EAAE,EAAE,OAAO,CAAC,EAAE,UAAU,GAAG,OAAO,CAAC,kBAAkB,CAAC;IA4LrF;;OAEG;IACI,MAAM,CAAC,OAAO,EAAE,MAAM;CAI9B"}
package/dist/chat/Chat.js CHANGED
@@ -44,9 +44,10 @@ export class Chat {
44
44
  acc.total_tokens += msg.usage.total_tokens;
45
45
  acc.cached_tokens = (acc.cached_tokens ?? 0) + (msg.usage.cached_tokens ?? 0);
46
46
  acc.cache_creation_tokens = (acc.cache_creation_tokens ?? 0) + (msg.usage.cache_creation_tokens ?? 0);
47
+ acc.cost = (acc.cost ?? 0) + (msg.usage.cost ?? 0);
47
48
  }
48
49
  return acc;
49
- }, { input_tokens: 0, output_tokens: 0, total_tokens: 0, cached_tokens: 0, cache_creation_tokens: 0 });
50
+ }, { input_tokens: 0, output_tokens: 0, total_tokens: 0, cached_tokens: 0, cache_creation_tokens: 0, cost: 0 });
50
51
  }
51
52
  /**
52
53
  * Add a tool to the chat session (fluent API)
@@ -145,6 +146,14 @@ export class Chat {
145
146
  }
146
147
  return this;
147
148
  }
149
+ /**
150
+ * Set provider-specific parameters.
151
+ * These will be merged into the final request payload.
152
+ */
153
+ withParams(params) {
154
+ this.options.params = { ...this.options.params, ...params };
155
+ return this;
156
+ }
148
157
  /**
149
158
  * Enforce a specific schema for the output.
150
159
  * Can accept a Schema object or a Zod schema/JSON Schema directly.
@@ -191,9 +200,12 @@ export class Chat {
191
200
  if (files.length > 0) {
192
201
  const processedFiles = await Promise.all(files.map(f => FileLoader.load(f)));
193
202
  const hasBinary = processedFiles.some(p => p.type === "image_url" || p.type === "input_audio" || p.type === "video_url");
194
- if (hasBinary && this.provider.capabilities && !this.provider.capabilities.supportsVision(this.model)) {
203
+ if (hasBinary && !this.options.assumeModelExists && this.provider.capabilities && !this.provider.capabilities.supportsVision(this.model)) {
195
204
  throw new Error(`Model ${this.model} does not support vision/binary files.`);
196
205
  }
206
+ if (hasBinary && this.options.assumeModelExists) {
207
+ console.warn(`[NodeLLM] Skipping vision capability validation for model ${this.model}`);
208
+ }
197
209
  // Separate text files from binary files
198
210
  const textFiles = processedFiles.filter(p => p.type === "text");
199
211
  const binaryFiles = processedFiles.filter(p => p.type !== "text");
@@ -215,9 +227,12 @@ export class Chat {
215
227
  }
216
228
  }
217
229
  if (this.options.tools && this.options.tools.length > 0) {
218
- if (this.provider.capabilities && !this.provider.capabilities.supportsTools(this.model)) {
230
+ if (!this.options.assumeModelExists && this.provider.capabilities && !this.provider.capabilities.supportsTools(this.model)) {
219
231
  throw new Error(`Model ${this.model} does not support tool calling.`);
220
232
  }
233
+ if (this.options.assumeModelExists) {
234
+ console.warn(`[NodeLLM] Skipping tool capability validation for model ${this.model}`);
235
+ }
221
236
  }
222
237
  this.messages.push({
223
238
  role: "user",
@@ -226,9 +241,12 @@ export class Chat {
226
241
  // Process Schema/Structured Output
227
242
  let responseFormat = this.options.responseFormat;
228
243
  if (this.options.schema) {
229
- if (this.provider.capabilities && !this.provider.capabilities.supportsStructuredOutput(this.model)) {
244
+ if (!this.options.assumeModelExists && this.provider.capabilities && !this.provider.capabilities.supportsStructuredOutput(this.model)) {
230
245
  throw new Error(`Model ${this.model} does not support structured output.`);
231
246
  }
247
+ if (this.options.assumeModelExists) {
248
+ console.warn(`[NodeLLM] Skipping structured output capability validation for model ${this.model}`);
249
+ }
232
250
  const jsonSchema = toJsonSchema(this.options.schema.definition.schema);
233
251
  responseFormat = {
234
252
  type: "json_schema",
@@ -248,6 +266,7 @@ export class Chat {
248
266
  max_tokens: options?.maxTokens ?? this.options.maxTokens,
249
267
  headers: { ...this.options.headers, ...options?.headers },
250
268
  response_format: responseFormat, // Pass to provider
269
+ ...this.options.params,
251
270
  };
252
271
  let totalUsage = { input_tokens: 0, output_tokens: 0, total_tokens: 0 };
253
272
  const trackUsage = (u) => {
@@ -258,6 +277,9 @@ export class Chat {
258
277
  if (u.cached_tokens) {
259
278
  totalUsage.cached_tokens = (totalUsage.cached_tokens ?? 0) + u.cached_tokens;
260
279
  }
280
+ if (u.cost !== undefined) {
281
+ totalUsage.cost = (totalUsage.cost ?? 0) + u.cost;
282
+ }
261
283
  }
262
284
  };
263
285
  // First round
@@ -16,5 +16,8 @@ export interface ChatOptions {
16
16
  responseFormat?: {
17
17
  type: "json_object" | "text";
18
18
  };
19
+ params?: Record<string, any>;
20
+ assumeModelExists?: boolean;
21
+ provider?: string;
19
22
  }
20
23
  //# sourceMappingURL=ChatOptions.d.ts.map
@@ -1 +1 @@
1
- {"version":3,"file":"ChatOptions.d.ts","sourceRoot":"","sources":["../../src/chat/ChatOptions.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,OAAO,EAAE,MAAM,cAAc,CAAC;AACvC,OAAO,EAAE,IAAI,EAAE,MAAM,WAAW,CAAC;AACjC,OAAO,EAAE,MAAM,EAAE,MAAM,qBAAqB,CAAC;AAE7C,MAAM,WAAW,WAAW;IAC1B,YAAY,CAAC,EAAE,MAAM,CAAC;IACtB,QAAQ,CAAC,EAAE,OAAO,EAAE,CAAC;IACrB,KAAK,CAAC,EAAE,IAAI,EAAE,CAAC;IACf,WAAW,CAAC,EAAE,MAAM,CAAC;IACrB,SAAS,CAAC,EAAE,MAAM,CAAC;IACnB,YAAY,CAAC,EAAE,MAAM,IAAI,CAAC;IAC1B,YAAY,CAAC,EAAE,CAAC,OAAO,EAAE,GAAG,KAAK,IAAI,CAAC;IACtC,UAAU,CAAC,EAAE,CAAC,QAAQ,EAAE,GAAG,KAAK,IAAI,CAAC;IACrC,YAAY,CAAC,EAAE,CAAC,MAAM,EAAE,GAAG,KAAK,IAAI,CAAC;IACrC,OAAO,CAAC,EAAE,MAAM,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;IACjC,MAAM,CAAC,EAAE,MAAM,CAAC;IAChB,cAAc,CAAC,EAAE;QAAE,IAAI,EAAE,aAAa,GAAG,MAAM,CAAA;KAAE,CAAC;CACnD"}
1
+ {"version":3,"file":"ChatOptions.d.ts","sourceRoot":"","sources":["../../src/chat/ChatOptions.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,OAAO,EAAE,MAAM,cAAc,CAAC;AACvC,OAAO,EAAE,IAAI,EAAE,MAAM,WAAW,CAAC;AACjC,OAAO,EAAE,MAAM,EAAE,MAAM,qBAAqB,CAAC;AAE7C,MAAM,WAAW,WAAW;IAC1B,YAAY,CAAC,EAAE,MAAM,CAAC;IACtB,QAAQ,CAAC,EAAE,OAAO,EAAE,CAAC;IACrB,KAAK,CAAC,EAAE,IAAI,EAAE,CAAC;IACf,WAAW,CAAC,EAAE,MAAM,CAAC;IACrB,SAAS,CAAC,EAAE,MAAM,CAAC;IACnB,YAAY,CAAC,EAAE,MAAM,IAAI,CAAC;IAC1B,YAAY,CAAC,EAAE,CAAC,OAAO,EAAE,GAAG,KAAK,IAAI,CAAC;IACtC,UAAU,CAAC,EAAE,CAAC,QAAQ,EAAE,GAAG,KAAK,IAAI,CAAC;IACrC,YAAY,CAAC,EAAE,CAAC,MAAM,EAAE,GAAG,KAAK,IAAI,CAAC;IACrC,OAAO,CAAC,EAAE,MAAM,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;IACjC,MAAM,CAAC,EAAE,MAAM,CAAC;IAChB,cAAc,CAAC,EAAE;QAAE,IAAI,EAAE,aAAa,GAAG,MAAM,CAAA;KAAE,CAAC;IAClD,MAAM,CAAC,EAAE,MAAM,CAAC,MAAM,EAAE,GAAG,CAAC,CAAC;IAC7B,iBAAiB,CAAC,EAAE,OAAO,CAAC;IAC5B,QAAQ,CAAC,EAAE,MAAM,CAAC;CACnB"}
@@ -11,6 +11,9 @@ export declare class ChatResponseString extends String {
11
11
  get output_tokens(): number;
12
12
  get total_tokens(): number;
13
13
  get cached_tokens(): number | undefined;
14
+ get cost(): number | undefined;
15
+ get input_cost(): number | undefined;
16
+ get output_cost(): number | undefined;
14
17
  get content(): string;
15
18
  get model_id(): string;
16
19
  toString(): string;
@@ -1 +1 @@
1
- {"version":3,"file":"ChatResponse.d.ts","sourceRoot":"","sources":["../../src/chat/ChatResponse.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,KAAK,EAAE,MAAM,0BAA0B,CAAC;AAEjD;;;GAGG;AACH,qBAAa,kBAAmB,SAAQ,MAAM;aAG1B,KAAK,EAAE,KAAK;aACZ,KAAK,EAAE,MAAM;gBAF7B,OAAO,EAAE,MAAM,EACC,KAAK,EAAE,KAAK,EACZ,KAAK,EAAE,MAAM;IAK/B,IAAI,YAAY,WAAsC;IACtD,IAAI,aAAa,WAAuC;IACxD,IAAI,YAAY,WAAsC;IACtD,IAAI,aAAa,uBAAuC;IAExD,IAAI,OAAO,IAAI,MAAM,CAEpB;IAED,IAAI,QAAQ,IAAI,MAAM,CAErB;IAED,QAAQ;IAIR;;;OAGG;IACH,IAAI,MAAM,IAAI,GAAG,CAMhB;CACF"}
1
+ {"version":3,"file":"ChatResponse.d.ts","sourceRoot":"","sources":["../../src/chat/ChatResponse.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,KAAK,EAAE,MAAM,0BAA0B,CAAC;AAEjD;;;GAGG;AACH,qBAAa,kBAAmB,SAAQ,MAAM;aAG1B,KAAK,EAAE,KAAK;aACZ,KAAK,EAAE,MAAM;gBAF7B,OAAO,EAAE,MAAM,EACC,KAAK,EAAE,KAAK,EACZ,KAAK,EAAE,MAAM;IAK/B,IAAI,YAAY,WAAsC;IACtD,IAAI,aAAa,WAAuC;IACxD,IAAI,YAAY,WAAsC;IACtD,IAAI,aAAa,uBAAuC;IACxD,IAAI,IAAI,uBAA8B;IACtC,IAAI,UAAU,uBAAoC;IAClD,IAAI,WAAW,uBAAqC;IAEpD,IAAI,OAAO,IAAI,MAAM,CAEpB;IAED,IAAI,QAAQ,IAAI,MAAM,CAErB;IAED,QAAQ;IAIR;;;OAGG;IACH,IAAI,MAAM,IAAI,GAAG,CAMhB;CACF"}
@@ -14,6 +14,9 @@ export class ChatResponseString extends String {
14
14
  get output_tokens() { return this.usage.output_tokens; }
15
15
  get total_tokens() { return this.usage.total_tokens; }
16
16
  get cached_tokens() { return this.usage.cached_tokens; }
17
+ get cost() { return this.usage.cost; }
18
+ get input_cost() { return this.usage.input_cost; }
19
+ get output_cost() { return this.usage.output_cost; }
17
20
  get content() {
18
21
  return this.valueOf();
19
22
  }
package/dist/llm.d.ts CHANGED
@@ -24,7 +24,7 @@ type LLMConfig = {
24
24
  defaultEmbeddingModel?: string;
25
25
  };
26
26
  declare class LLMCore {
27
- readonly models: ModelRegistry;
27
+ readonly models: typeof ModelRegistry;
28
28
  private provider?;
29
29
  private defaultTranscriptionModelId?;
30
30
  private defaultModerationModelId?;
@@ -38,6 +38,7 @@ declare class LLMCore {
38
38
  model?: string;
39
39
  size?: string;
40
40
  quality?: string;
41
+ assumeModelExists?: boolean;
41
42
  }): Promise<GeneratedImage>;
42
43
  transcribe(file: string, options?: {
43
44
  model?: string;
@@ -45,6 +46,7 @@ declare class LLMCore {
45
46
  language?: string;
46
47
  speakerNames?: string[];
47
48
  speakerReferences?: string[];
49
+ assumeModelExists?: boolean;
48
50
  }): Promise<Transcription>;
49
51
  get defaultTranscriptionModel(): string | undefined;
50
52
  get defaultModerationModel(): string | undefined;
@@ -52,10 +54,12 @@ declare class LLMCore {
52
54
  getRetryConfig(): Required<RetryOptions>;
53
55
  moderate(input: string | string[], options?: {
54
56
  model?: string;
57
+ assumeModelExists?: boolean;
55
58
  }): Promise<Moderation>;
56
59
  embed(input: string | string[], options?: {
57
60
  model?: string;
58
61
  dimensions?: number;
62
+ assumeModelExists?: boolean;
59
63
  }): Promise<Embedding>;
60
64
  }
61
65
  export { Transcription, Moderation, Embedding };
package/dist/llm.d.ts.map CHANGED
@@ -1 +1 @@
1
- {"version":3,"file":"llm.d.ts","sourceRoot":"","sources":["../src/llm.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,IAAI,EAAE,MAAM,gBAAgB,CAAC;AACtC,OAAO,EAAE,WAAW,EAAE,MAAM,uBAAuB,CAAC;AACpD,OAAO,EACL,QAAQ,EACR,SAAS,EAKV,MAAM,yBAAyB,CAAC;AAKjC,OAAO,EAAE,cAAc,EAAE,MAAM,2BAA2B,CAAC;AAC3D,OAAO,EAAU,aAAa,EAAE,MAAM,2BAA2B,CAAC;AAClE,OAAO,EAAE,aAAa,EAAE,MAAM,kCAAkC,CAAC;AACjE,OAAO,EAAE,UAAU,EAAE,MAAM,4BAA4B,CAAC;AACxD,OAAO,EAAE,SAAS,EAAE,MAAM,0BAA0B,CAAC;AAIrD,MAAM,WAAW,YAAY;IAC3B,QAAQ,CAAC,EAAE,MAAM,CAAC;IAClB,OAAO,CAAC,EAAE,MAAM,CAAC;CAClB;AAED,KAAK,SAAS,GACV;IAAE,QAAQ,EAAE,QAAQ,CAAC;IAAC,KAAK,CAAC,EAAE,YAAY,CAAC;IAAC,yBAAyB,CAAC,EAAE,MAAM,CAAC;IAAC,sBAAsB,CAAC,EAAE,MAAM,CAAC;IAAC,qBAAqB,CAAC,EAAE,MAAM,CAAA;CAAE,GACjJ;IAAE,QAAQ,EAAE,MAAM,CAAC;IAAC,KAAK,CAAC,EAAE,YAAY,CAAC;IAAC,yBAAyB,CAAC,EAAE,MAAM,CAAC;IAAC,sBAAsB,CAAC,EAAE,MAAM,CAAC;IAAC,qBAAqB,CAAC,EAAE,MAAM,CAAA;CAAE,CAAC;AAEpJ,cAAM,OAAO;IACX,SAAgB,MAAM,EAAE,aAAa,CAAU;IAC/C,OAAO,CAAC,QAAQ,CAAC,CAAW;IAC5B,OAAO,CAAC,2BAA2B,CAAC,CAAS;IAC7C,OAAO,CAAC,wBAAwB,CAAC,CAAS;IAC1C,OAAO,CAAC,uBAAuB,CAAC,CAAS;IAEzC,OAAO,CAAC,KAAK,CAGX;IAEF,SAAS,CAAC,MAAM,EAAE,SAAS;IAuC3B,OAAO,CAAC,qBAAqB;IAU7B,IAAI,CAAC,KAAK,EAAE,MAAM,EAAE,OAAO,CAAC,EAAE,WAAW,GAAG,IAAI;IAQ1C,UAAU,IAAI,OAAO,CAAC,SAAS,EAAE,CAAC;IAKlC,KAAK,CAAC,MAAM,EAAE,MAAM,EAAE,OAAO,CAAC,EAAE;QAAE,KAAK,CAAC,EAAE,MAAM,CAAC;QAAC,IAAI,CAAC,EAAE,MAAM,CAAC;QAAC,OAAO,CAAC,EAAE,MAAM,CAAA;KAAE,GAAG,OAAO,CAAC,cAAc,CAAC;IAgB7G,UAAU,CACd,IAAI,EAAE,MAAM,EACZ,OAAO,CAAC,EAAE;QACR,KAAK,CAAC,EAAE,MAAM,CAAC;QACf,MAAM,CAAC,EAAE,MAAM,CAAC;QAChB,QAAQ,CAAC,EAAE,MAAM,CAAC;QAClB,YAAY,CAAC,EAAE,MAAM,EAAE,CAAC;QACxB,iBAAiB,CAAC,EAAE,MAAM,EAAE,CAAC;KAC9B,GACA,OAAO,CAAC,aAAa,CAAC;IAiBzB,IAAI,yBAAyB,IAAI,MAAM,GAAG,SAAS,CAElD;IAED,IAAI,sBAAsB,IAAI,MAAM,GAAG,SAAS,CAE/C;IAED,IAAI,qBAAqB,IAAI,MAAM,GAAG,SAAS,CAE9C;IAED,cAAc;IAIR,QAAQ,CAAC,KAAK,EAAE,MAAM,GAAG,MAAM,EAAE,EAAE,OAAO,CAAC,EAAE;QAAE,KAAK,CAAC,EAAE,MAAM,CAAA;KAAE,GAAG,OAAO,CAAC,UAAU,CAAC;IAiBrF,KAAK,CACT,KAAK,EAAE,MAAM,GAAG,MAAM,EAAE,EACxB,OAAO,CAAC,EAAE;QAAE,KAAK,CAAC,EAAE,MAAM,CAAC;QAAC,UAAU,CAAC,EAAE,MAAM,CAAA;KAAE,GAChD,OAAO,CAAC,SAAS,CAAC;CAkBtB;AAED,OAAO,EAAE,aAAa,EAAE,UAAU,EAAE,SAAS,EAAE,CAAC;AAEhD,eAAO,MAAM,GAAG,SAAgB,CAAC"}
1
+ {"version":3,"file":"llm.d.ts","sourceRoot":"","sources":["../src/llm.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,IAAI,EAAE,MAAM,gBAAgB,CAAC;AACtC,OAAO,EAAE,WAAW,EAAE,MAAM,uBAAuB,CAAC;AACpD,OAAO,EACL,QAAQ,EACR,SAAS,EAKV,MAAM,yBAAyB,CAAC;AAKjC,OAAO,EAAE,cAAc,EAAE,MAAM,2BAA2B,CAAC;AAC3D,OAAO,EAAE,aAAa,EAAE,MAAM,2BAA2B,CAAC;AAC1D,OAAO,EAAE,aAAa,EAAE,MAAM,kCAAkC,CAAC;AACjE,OAAO,EAAE,UAAU,EAAE,MAAM,4BAA4B,CAAC;AACxD,OAAO,EAAE,SAAS,EAAE,MAAM,0BAA0B,CAAC;AAIrD,MAAM,WAAW,YAAY;IAC3B,QAAQ,CAAC,EAAE,MAAM,CAAC;IAClB,OAAO,CAAC,EAAE,MAAM,CAAC;CAClB;AAED,KAAK,SAAS,GACV;IAAE,QAAQ,EAAE,QAAQ,CAAC;IAAC,KAAK,CAAC,EAAE,YAAY,CAAC;IAAC,yBAAyB,CAAC,EAAE,MAAM,CAAC;IAAC,sBAAsB,CAAC,EAAE,MAAM,CAAC;IAAC,qBAAqB,CAAC,EAAE,MAAM,CAAA;CAAE,GACjJ;IAAE,QAAQ,EAAE,MAAM,CAAC;IAAC,KAAK,CAAC,EAAE,YAAY,CAAC;IAAC,yBAAyB,CAAC,EAAE,MAAM,CAAC;IAAC,sBAAsB,CAAC,EAAE,MAAM,CAAC;IAAC,qBAAqB,CAAC,EAAE,MAAM,CAAA;CAAE,CAAC;AAEpJ,cAAM,OAAO;IACX,SAAgB,MAAM,uBAAiB;IACvC,OAAO,CAAC,QAAQ,CAAC,CAAW;IAC5B,OAAO,CAAC,2BAA2B,CAAC,CAAS;IAC7C,OAAO,CAAC,wBAAwB,CAAC,CAAS;IAC1C,OAAO,CAAC,uBAAuB,CAAC,CAAS;IAEzC,OAAO,CAAC,KAAK,CAGX;IAEF,SAAS,CAAC,MAAM,EAAE,SAAS;IAuC3B,OAAO,CAAC,qBAAqB;IAU7B,IAAI,CAAC,KAAK,EAAE,MAAM,EAAE,OAAO,CAAC,EAAE,WAAW,GAAG,IAAI;IAQ1C,UAAU,IAAI,OAAO,CAAC,SAAS,EAAE,CAAC;IAKlC,KAAK,CAAC,MAAM,EAAE,MAAM,EAAE,OAAO,CAAC,EAAE;QAAE,KAAK,CAAC,EAAE,MAAM,CAAC;QAAC,IAAI,CAAC,EAAE,MAAM,CAAC;QAAC,OAAO,CAAC,EAAE,MAAM,CAAC;QAAC,iBAAiB,CAAC,EAAE,OAAO,CAAA;KAAE,GAAG,OAAO,CAAC,cAAc,CAAC;IAkB1I,UAAU,CACd,IAAI,EAAE,MAAM,EACZ,OAAO,CAAC,EAAE;QACR,KAAK,CAAC,EAAE,MAAM,CAAC;QACf,MAAM,CAAC,EAAE,MAAM,CAAC;QAChB,QAAQ,CAAC,EAAE,MAAM,CAAC;QAClB,YAAY,CAAC,EAAE,MAAM,EAAE,CAAC;QACxB,iBAAiB,CAAC,EAAE,MAAM,EAAE,CAAC;QAC7B,iBAAiB,CAAC,EAAE,OAAO,CAAC;KAC7B,GACA,OAAO,CAAC,aAAa,CAAC;IAmBzB,IAAI,yBAAyB,IAAI,MAAM,GAAG,SAAS,CAElD;IAED,IAAI,sBAAsB,IAAI,MAAM,GAAG,SAAS,CAE/C;IAED,IAAI,qBAAqB,IAAI,MAAM,GAAG,SAAS,CAE9C;IAED,cAAc;IAIR,QAAQ,CAAC,KAAK,EAAE,MAAM,GAAG,MAAM,EAAE,EAAE,OAAO,CAAC,EAAE;QAAE,KAAK,CAAC,EAAE,MAAM,CAAC;QAAC,iBAAiB,CAAC,EAAE,OAAO,CAAA;KAAE,GAAG,OAAO,CAAC,UAAU,CAAC;IAmBlH,KAAK,CACT,KAAK,EAAE,MAAM,GAAG,MAAM,EAAE,EACxB,OAAO,CAAC,EAAE;QAAE,KAAK,CAAC,EAAE,MAAM,CAAC;QAAC,UAAU,CAAC,EAAE,MAAM,CAAC;QAAC,iBAAiB,CAAC,EAAE,OAAO,CAAA;KAAE,GAC7E,OAAO,CAAC,SAAS,CAAC;CAoBtB;AAED,OAAO,EAAE,aAAa,EAAE,UAAU,EAAE,SAAS,EAAE,CAAC;AAEhD,eAAO,MAAM,GAAG,SAAgB,CAAC"}
package/dist/llm.js CHANGED
@@ -4,12 +4,12 @@ import { ensureOpenAIRegistered } from "./providers/openai/index.js";
4
4
  import { registerGeminiProvider } from "./providers/gemini/index.js";
5
5
  import { registerAnthropicProvider } from "./providers/anthropic/index.js";
6
6
  import { GeneratedImage } from "./image/GeneratedImage.js";
7
- import { models } from "./models/ModelRegistry.js";
7
+ import { ModelRegistry } from "./models/ModelRegistry.js";
8
8
  import { Transcription } from "./transcription/Transcription.js";
9
9
  import { Moderation } from "./moderation/Moderation.js";
10
10
  import { Embedding } from "./embedding/Embedding.js";
11
11
  class LLMCore {
12
- models = models;
12
+ models = ModelRegistry;
13
13
  provider;
14
14
  defaultTranscriptionModelId;
15
15
  defaultModerationModelId;
@@ -72,7 +72,10 @@ class LLMCore {
72
72
  async paint(prompt, options) {
73
73
  const provider = this.ensureProviderSupport("paint");
74
74
  const model = options?.model;
75
- if (model && provider.capabilities && !provider.capabilities.supportsImageGeneration(model)) {
75
+ if (options?.assumeModelExists) {
76
+ console.warn(`[NodeLLM] Skipping validation for model ${model}`);
77
+ }
78
+ else if (model && provider.capabilities && !provider.capabilities.supportsImageGeneration(model)) {
76
79
  throw new Error(`Model ${model} does not support image generation.`);
77
80
  }
78
81
  const response = await provider.paint({
@@ -84,7 +87,10 @@ class LLMCore {
84
87
  async transcribe(file, options) {
85
88
  const provider = this.ensureProviderSupport("transcribe");
86
89
  const model = options?.model || this.defaultTranscriptionModelId;
87
- if (model && provider.capabilities && !provider.capabilities.supportsTranscription(model)) {
90
+ if (options?.assumeModelExists) {
91
+ console.warn(`[NodeLLM] Skipping validation for model ${model}`);
92
+ }
93
+ else if (model && provider.capabilities && !provider.capabilities.supportsTranscription(model)) {
88
94
  throw new Error(`Model ${model} does not support transcription.`);
89
95
  }
90
96
  const response = await provider.transcribe({
@@ -109,7 +115,10 @@ class LLMCore {
109
115
  async moderate(input, options) {
110
116
  const provider = this.ensureProviderSupport("moderate");
111
117
  const model = options?.model || this.defaultModerationModelId;
112
- if (model && provider.capabilities && !provider.capabilities.supportsModeration(model)) {
118
+ if (options?.assumeModelExists) {
119
+ console.warn(`[NodeLLM] Skipping validation for model ${model}`);
120
+ }
121
+ else if (model && provider.capabilities && !provider.capabilities.supportsModeration(model)) {
113
122
  throw new Error(`Model ${model} does not support moderation.`);
114
123
  }
115
124
  const response = await provider.moderate({
@@ -127,7 +136,10 @@ class LLMCore {
127
136
  model,
128
137
  dimensions: options?.dimensions,
129
138
  };
130
- if (request.model && provider.capabilities && !provider.capabilities.supportsEmbeddings(request.model)) {
139
+ if (options?.assumeModelExists) {
140
+ console.warn(`[NodeLLM] Skipping validation for model ${request.model}`);
141
+ }
142
+ else if (request.model && provider.capabilities && !provider.capabilities.supportsEmbeddings(request.model)) {
131
143
  throw new Error(`Model ${request.model} does not support embeddings.`);
132
144
  }
133
145
  const response = await provider.embed(request);
@@ -1,23 +1,50 @@
1
- import { ModelInfo } from "../providers/Provider.js";
1
+ import { Model } from "./types.js";
2
2
  export declare class ModelRegistry {
3
- private models;
4
- private static readonly API_URL;
3
+ private static models;
5
4
  /**
6
- * Refresh model information from the Parsera API
5
+ * Find a model by its ID.
7
6
  */
8
- refresh(): Promise<void>;
7
+ static find(modelId: string, provider?: string): Model | undefined;
9
8
  /**
10
- * Find a model by ID
9
+ * Get all available models.
11
10
  */
12
- find(id: string): ModelInfo | undefined;
11
+ static all(): Model[];
13
12
  /**
14
- * List all known models
13
+ * Get output tokens limit for a model.
15
14
  */
16
- all(): ModelInfo[];
15
+ static getMaxOutputTokens(modelId: string, provider: string): number | undefined;
17
16
  /**
18
- * Filter models by provider
17
+ * Check if a model supports a capability.
19
18
  */
20
- byProvider(provider: string): ModelInfo[];
19
+ static supports(modelId: string, capability: string, provider: string): boolean;
20
+ /**
21
+ * Get context window size.
22
+ */
23
+ static getContextWindow(modelId: string, provider: string): number | undefined;
24
+ /**
25
+ * Calculate cost for usage.
26
+ */
27
+ static calculateCost(usage: {
28
+ input_tokens: number;
29
+ output_tokens: number;
30
+ total_tokens: number;
31
+ cached_tokens?: number;
32
+ reasoning_tokens?: number;
33
+ }, modelId: string, provider: string): {
34
+ input_tokens: number;
35
+ output_tokens: number;
36
+ total_tokens: number;
37
+ cached_tokens?: number;
38
+ reasoning_tokens?: number;
39
+ } | {
40
+ input_cost: number;
41
+ output_cost: number;
42
+ cost: number;
43
+ input_tokens: number;
44
+ output_tokens: number;
45
+ total_tokens: number;
46
+ cached_tokens?: number;
47
+ reasoning_tokens?: number;
48
+ };
21
49
  }
22
- export declare const models: ModelRegistry;
23
50
  //# sourceMappingURL=ModelRegistry.d.ts.map
@@ -1 +1 @@
1
- {"version":3,"file":"ModelRegistry.d.ts","sourceRoot":"","sources":["../../src/models/ModelRegistry.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,SAAS,EAAE,MAAM,0BAA0B,CAAC;AAErD,qBAAa,aAAa;IACxB,OAAO,CAAC,MAAM,CAAqC;IACnD,OAAO,CAAC,MAAM,CAAC,QAAQ,CAAC,OAAO,CAA0C;IAEzE;;OAEG;IACG,OAAO,IAAI,OAAO,CAAC,IAAI,CAAC;IA8B9B;;OAEG;IACH,IAAI,CAAC,EAAE,EAAE,MAAM,GAAG,SAAS,GAAG,SAAS;IAIvC;;OAEG;IACH,GAAG,IAAI,SAAS,EAAE;IAIlB;;OAEG;IACH,UAAU,CAAC,QAAQ,EAAE,MAAM,GAAG,SAAS,EAAE;CAG1C;AAED,eAAO,MAAM,MAAM,eAAsB,CAAC"}
1
+ {"version":3,"file":"ModelRegistry.d.ts","sourceRoot":"","sources":["../../src/models/ModelRegistry.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,KAAK,EAAE,MAAM,YAAY,CAAC;AAGnC,qBAAa,aAAa;IACtB,OAAO,CAAC,MAAM,CAAC,MAAM,CAA6C;IAElE;;OAEG;IACH,MAAM,CAAC,IAAI,CAAC,OAAO,EAAE,MAAM,EAAE,QAAQ,CAAC,EAAE,MAAM,GAAG,KAAK,GAAG,SAAS;IAMlE;;OAEG;IACH,MAAM,CAAC,GAAG,IAAI,KAAK,EAAE;IAIrB;;OAEG;IACH,MAAM,CAAC,kBAAkB,CAAC,OAAO,EAAE,MAAM,EAAE,QAAQ,EAAE,MAAM,GAAG,MAAM,GAAG,SAAS;IAKhF;;OAEG;IACH,MAAM,CAAC,QAAQ,CAAC,OAAO,EAAE,MAAM,EAAE,UAAU,EAAE,MAAM,EAAE,QAAQ,EAAE,MAAM,GAAG,OAAO;IAK/E;;OAEG;IACH,MAAM,CAAC,gBAAgB,CAAC,OAAO,EAAE,MAAM,EAAE,QAAQ,EAAE,MAAM,GAAG,MAAM,GAAG,SAAS;IAK9E;;OAEG;IACH,MAAM,CAAC,aAAa,CAAC,KAAK,EAAE;QAAE,YAAY,EAAE,MAAM,CAAC;QAAC,aAAa,EAAE,MAAM,CAAC;QAAC,YAAY,EAAE,MAAM,CAAC;QAAC,aAAa,CAAC,EAAE,MAAM,CAAC;QAAC,gBAAgB,CAAC,EAAE,MAAM,CAAA;KAAE,EAAE,OAAO,EAAE,MAAM,EAAE,QAAQ,EAAE,MAAM;sBAA3I,MAAM;uBAAiB,MAAM;sBAAgB,MAAM;wBAAkB,MAAM;2BAAqB,MAAM;;;;;sBAAtG,MAAM;uBAAiB,MAAM;sBAAgB,MAAM;wBAAkB,MAAM;2BAAqB,MAAM;;CA+BrJ"}
@@ -1,54 +1,64 @@
1
+ import { modelsData } from "./models.js";
1
2
  export class ModelRegistry {
2
- models = new Map();
3
- static API_URL = "https://api.parsera.org/v1/llm-specs";
3
+ static models = modelsData;
4
4
  /**
5
- * Refresh model information from the Parsera API
5
+ * Find a model by its ID.
6
6
  */
7
- async refresh() {
8
- try {
9
- const response = await fetch(ModelRegistry.API_URL);
10
- if (!response.ok) {
11
- throw new Error(`Failed to refresh models: ${response.statusText}`);
12
- }
13
- const specs = await response.json();
14
- this.models.clear();
15
- for (const spec of specs) {
16
- this.models.set(spec.id, {
17
- id: spec.id,
18
- name: spec.name || spec.id,
19
- provider: spec.provider,
20
- family: spec.family || spec.provider,
21
- context_window: spec.context_window,
22
- max_output_tokens: spec.max_output_tokens,
23
- modalities: spec.modalities || { input: ["text"], output: ["text"] },
24
- capabilities: spec.capabilities || [],
25
- pricing: spec.pricing || {},
26
- metadata: spec.metadata || {}
27
- });
28
- }
29
- }
30
- catch (error) {
31
- console.error("Error refreshing model registry:", error);
32
- throw error;
33
- }
7
+ static find(modelId, provider) {
8
+ return this.models.find(m => (m.id === modelId || m.family === modelId) && (!provider || m.provider === provider));
9
+ }
10
+ /**
11
+ * Get all available models.
12
+ */
13
+ static all() {
14
+ return this.models;
34
15
  }
35
16
  /**
36
- * Find a model by ID
17
+ * Get output tokens limit for a model.
37
18
  */
38
- find(id) {
39
- return this.models.get(id);
19
+ static getMaxOutputTokens(modelId, provider) {
20
+ const model = this.find(modelId, provider);
21
+ return model?.max_output_tokens ?? undefined;
40
22
  }
41
23
  /**
42
- * List all known models
24
+ * Check if a model supports a capability.
43
25
  */
44
- all() {
45
- return Array.from(this.models.values());
26
+ static supports(modelId, capability, provider) {
27
+ const model = this.find(modelId, provider);
28
+ return model?.capabilities.includes(capability) ?? false;
46
29
  }
47
30
  /**
48
- * Filter models by provider
31
+ * Get context window size.
49
32
  */
50
- byProvider(provider) {
51
- return this.all().filter(m => m.provider === provider);
33
+ static getContextWindow(modelId, provider) {
34
+ const model = this.find(modelId, provider);
35
+ return model?.context_window ?? undefined;
36
+ }
37
+ /**
38
+ * Calculate cost for usage.
39
+ */
40
+ static calculateCost(usage, modelId, provider) {
41
+ const model = this.find(modelId, provider);
42
+ if (!model || !model.pricing?.text_tokens?.standard) {
43
+ return usage;
44
+ }
45
+ const prices = model.pricing.text_tokens.standard;
46
+ const inputPrice = prices.input_per_million || 0;
47
+ const outputPrice = prices.output_per_million || 0;
48
+ const reasoningPrice = prices.reasoning_output_per_million || outputPrice;
49
+ const cachedPrice = prices.cached_input_per_million ?? (inputPrice / 2);
50
+ const inputCost = ((usage.input_tokens - (usage.cached_tokens || 0)) / 1_000_000) * inputPrice +
51
+ ((usage.cached_tokens || 0) / 1_000_000) * cachedPrice;
52
+ const outputTokens = usage.output_tokens - (usage.reasoning_tokens || 0);
53
+ const reasoningTokens = usage.reasoning_tokens || 0;
54
+ const outputCost = (outputTokens / 1_000_000) * outputPrice +
55
+ (reasoningTokens / 1_000_000) * reasoningPrice;
56
+ const totalCost = inputCost + outputCost;
57
+ return {
58
+ ...usage,
59
+ input_cost: Number(inputCost.toFixed(6)),
60
+ output_cost: Number(outputCost.toFixed(6)),
61
+ cost: Number(totalCost.toFixed(6))
62
+ };
52
63
  }
53
64
  }
54
- export const models = new ModelRegistry();