@node-llm/core 0.5.0 → 0.7.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (107) hide show
  1. package/README.md +83 -421
  2. package/dist/chat/Chat.d.ts +5 -0
  3. package/dist/chat/Chat.d.ts.map +1 -1
  4. package/dist/chat/Chat.js +31 -7
  5. package/dist/chat/ChatOptions.d.ts +3 -0
  6. package/dist/chat/ChatOptions.d.ts.map +1 -1
  7. package/dist/chat/ChatResponse.d.ts +5 -1
  8. package/dist/chat/ChatResponse.d.ts.map +1 -1
  9. package/dist/chat/ChatResponse.js +6 -1
  10. package/dist/chat/Stream.d.ts.map +1 -1
  11. package/dist/chat/Stream.js +7 -1
  12. package/dist/config.d.ts +29 -0
  13. package/dist/config.d.ts.map +1 -0
  14. package/dist/config.js +11 -0
  15. package/dist/index.d.ts +2 -0
  16. package/dist/index.d.ts.map +1 -1
  17. package/dist/index.js +1 -0
  18. package/dist/llm.d.ts +20 -10
  19. package/dist/llm.d.ts.map +1 -1
  20. package/dist/llm.js +52 -23
  21. package/dist/models/ModelRegistry.d.ts +39 -12
  22. package/dist/models/ModelRegistry.d.ts.map +1 -1
  23. package/dist/models/ModelRegistry.js +50 -40
  24. package/dist/models/models.d.ts +972 -0
  25. package/dist/models/models.d.ts.map +1 -0
  26. package/dist/models/models.js +7076 -0
  27. package/dist/models/types.d.ts +50 -0
  28. package/dist/models/types.d.ts.map +1 -0
  29. package/dist/models/types.js +1 -0
  30. package/dist/providers/Provider.d.ts +7 -0
  31. package/dist/providers/Provider.d.ts.map +1 -1
  32. package/dist/providers/anthropic/AnthropicProvider.d.ts +1 -0
  33. package/dist/providers/anthropic/AnthropicProvider.d.ts.map +1 -1
  34. package/dist/providers/anthropic/AnthropicProvider.js +2 -3
  35. package/dist/providers/anthropic/Capabilities.d.ts +1 -37
  36. package/dist/providers/anthropic/Capabilities.d.ts.map +1 -1
  37. package/dist/providers/anthropic/Capabilities.js +59 -130
  38. package/dist/providers/anthropic/Chat.d.ts.map +1 -1
  39. package/dist/providers/anthropic/Chat.js +6 -2
  40. package/dist/providers/anthropic/Models.d.ts +1 -0
  41. package/dist/providers/anthropic/Models.d.ts.map +1 -1
  42. package/dist/providers/anthropic/Models.js +36 -41
  43. package/dist/providers/anthropic/Streaming.d.ts.map +1 -1
  44. package/dist/providers/anthropic/Streaming.js +10 -1
  45. package/dist/providers/anthropic/index.d.ts.map +1 -1
  46. package/dist/providers/anthropic/index.js +3 -2
  47. package/dist/providers/deepseek/Capabilities.d.ts +14 -0
  48. package/dist/providers/deepseek/Capabilities.d.ts.map +1 -0
  49. package/dist/providers/deepseek/Capabilities.js +52 -0
  50. package/dist/providers/deepseek/Chat.d.ts +8 -0
  51. package/dist/providers/deepseek/Chat.d.ts.map +1 -0
  52. package/dist/providers/deepseek/Chat.js +89 -0
  53. package/dist/providers/deepseek/DeepSeekProvider.d.ts +28 -0
  54. package/dist/providers/deepseek/DeepSeekProvider.d.ts.map +1 -0
  55. package/dist/providers/deepseek/DeepSeekProvider.js +38 -0
  56. package/dist/providers/deepseek/Models.d.ts +8 -0
  57. package/dist/providers/deepseek/Models.d.ts.map +1 -0
  58. package/dist/providers/deepseek/Models.js +67 -0
  59. package/dist/providers/deepseek/Streaming.d.ts +8 -0
  60. package/dist/providers/deepseek/Streaming.d.ts.map +1 -0
  61. package/dist/providers/deepseek/Streaming.js +74 -0
  62. package/dist/providers/deepseek/index.d.ts +7 -0
  63. package/dist/providers/deepseek/index.d.ts.map +1 -0
  64. package/dist/providers/deepseek/index.js +22 -0
  65. package/dist/providers/gemini/Capabilities.d.ts +28 -7
  66. package/dist/providers/gemini/Capabilities.d.ts.map +1 -1
  67. package/dist/providers/gemini/Capabilities.js +32 -20
  68. package/dist/providers/gemini/Chat.d.ts.map +1 -1
  69. package/dist/providers/gemini/Chat.js +9 -11
  70. package/dist/providers/gemini/GeminiProvider.d.ts +1 -0
  71. package/dist/providers/gemini/GeminiProvider.d.ts.map +1 -1
  72. package/dist/providers/gemini/GeminiProvider.js +1 -0
  73. package/dist/providers/gemini/Models.d.ts +1 -0
  74. package/dist/providers/gemini/Models.d.ts.map +1 -1
  75. package/dist/providers/gemini/Models.js +46 -26
  76. package/dist/providers/gemini/index.d.ts.map +1 -1
  77. package/dist/providers/gemini/index.js +3 -2
  78. package/dist/providers/openai/Capabilities.d.ts +4 -11
  79. package/dist/providers/openai/Capabilities.d.ts.map +1 -1
  80. package/dist/providers/openai/Capabilities.js +124 -121
  81. package/dist/providers/openai/Chat.d.ts.map +1 -1
  82. package/dist/providers/openai/Chat.js +19 -17
  83. package/dist/providers/openai/Embedding.d.ts.map +1 -1
  84. package/dist/providers/openai/Embedding.js +2 -1
  85. package/dist/providers/openai/Image.d.ts.map +1 -1
  86. package/dist/providers/openai/Image.js +2 -1
  87. package/dist/providers/openai/ModelDefinitions.d.ts +1 -24
  88. package/dist/providers/openai/ModelDefinitions.d.ts.map +1 -1
  89. package/dist/providers/openai/ModelDefinitions.js +1 -211
  90. package/dist/providers/openai/Models.d.ts +1 -0
  91. package/dist/providers/openai/Models.d.ts.map +1 -1
  92. package/dist/providers/openai/Models.js +46 -22
  93. package/dist/providers/openai/Moderation.d.ts.map +1 -1
  94. package/dist/providers/openai/Moderation.js +2 -1
  95. package/dist/providers/openai/OpenAIProvider.d.ts +1 -0
  96. package/dist/providers/openai/OpenAIProvider.d.ts.map +1 -1
  97. package/dist/providers/openai/OpenAIProvider.js +1 -0
  98. package/dist/providers/openai/Streaming.d.ts.map +1 -1
  99. package/dist/providers/openai/Streaming.js +5 -1
  100. package/dist/providers/openai/Transcription.d.ts.map +1 -1
  101. package/dist/providers/openai/Transcription.js +3 -2
  102. package/dist/providers/openai/index.d.ts.map +1 -1
  103. package/dist/providers/openai/index.js +5 -3
  104. package/dist/providers/openai/utils.d.ts +20 -0
  105. package/dist/providers/openai/utils.d.ts.map +1 -0
  106. package/dist/providers/openai/utils.js +25 -0
  107. package/package.json +1 -1
@@ -1,6 +1,7 @@
1
1
  import { Capabilities } from "./Capabilities.js";
2
2
  import { handleGeminiError } from "./Errors.js";
3
3
  import { GeminiChatUtils } from "./ChatUtils.js";
4
+ import { ModelRegistry } from "../../models/ModelRegistry.js";
4
5
  export class GeminiChat {
5
6
  baseUrl;
6
7
  apiKey;
@@ -19,24 +20,20 @@ export class GeminiChat {
19
20
  if (request.response_format?.type === "json_object") {
20
21
  generationConfig.responseMimeType = "application/json";
21
22
  }
22
- else if (request.response_format?.type === "json_schema") {
23
- generationConfig.responseMimeType = "application/json";
24
- if (request.response_format.json_schema?.schema) {
25
- generationConfig.responseSchema = request.response_format.json_schema.schema;
26
- }
27
- }
28
- if (request.response_format?.type === "json_object") {
29
- generationConfig.responseMimeType = "application/json";
30
- }
31
23
  else if (request.response_format?.type === "json_schema") {
32
24
  generationConfig.responseMimeType = "application/json";
33
25
  if (request.response_format.json_schema?.schema) {
34
26
  generationConfig.responseSchema = this.sanitizeSchema(request.response_format.json_schema.schema);
35
27
  }
36
28
  }
29
+ const { model: _model, messages: _messages, tools: _tools, temperature: _temp, max_tokens: _max, response_format: _format, headers: _headers, ...rest } = request;
37
30
  const payload = {
38
31
  contents,
39
- generationConfig,
32
+ generationConfig: {
33
+ ...generationConfig,
34
+ ...(rest.generationConfig || {})
35
+ },
36
+ ...rest
40
37
  };
41
38
  if (systemInstructionParts.length > 0) {
42
39
  payload.systemInstruction = { parts: systemInstructionParts };
@@ -83,7 +80,8 @@ export class GeminiChat {
83
80
  output_tokens: json.usageMetadata.candidatesTokenCount,
84
81
  total_tokens: json.usageMetadata.totalTokenCount,
85
82
  } : undefined;
86
- return { content, tool_calls, usage };
83
+ const calculatedUsage = usage ? ModelRegistry.calculateCost(usage, request.model, "gemini") : undefined;
84
+ return { content, tool_calls, usage: calculatedUsage };
87
85
  }
88
86
  sanitizeSchema(schema) {
89
87
  if (typeof schema !== "object" || schema === null)
@@ -21,6 +21,7 @@ export declare class GeminiProvider implements Provider {
21
21
  supportsImageGeneration: (model: string) => boolean;
22
22
  supportsTranscription: (model: string) => boolean;
23
23
  supportsModeration: (model: string) => boolean;
24
+ supportsReasoning: (_model: string) => boolean;
24
25
  getContextWindow: (model: string) => number | null;
25
26
  };
26
27
  constructor(options: GeminiProviderOptions);
@@ -1 +1 @@
1
- {"version":3,"file":"GeminiProvider.d.ts","sourceRoot":"","sources":["../../../src/providers/gemini/GeminiProvider.ts"],"names":[],"mappings":"AAAA,OAAO,EACL,QAAQ,EACR,WAAW,EACX,YAAY,EACZ,SAAS,EACT,SAAS,EACT,YAAY,EACZ,aAAa,EACb,oBAAoB,EACpB,qBAAqB,EACrB,iBAAiB,EACjB,kBAAkB,EACnB,MAAM,gBAAgB,CAAC;AAQxB,OAAO,EAAE,gBAAgB,EAAE,iBAAiB,EAAE,MAAM,iBAAiB,CAAC;AAEtE,MAAM,WAAW,qBAAqB;IACpC,MAAM,EAAE,MAAM,CAAC;IACf,OAAO,CAAC,EAAE,MAAM,CAAC;CAClB;AAED,qBAAa,cAAe,YAAW,QAAQ;IAoBjC,OAAO,CAAC,QAAQ,CAAC,OAAO;IAnBpC,OAAO,CAAC,QAAQ,CAAC,OAAO,CAAS;IACjC,OAAO,CAAC,QAAQ,CAAC,WAAW,CAAa;IACzC,OAAO,CAAC,QAAQ,CAAC,gBAAgB,CAAkB;IACnD,OAAO,CAAC,QAAQ,CAAC,aAAa,CAAe;IAC7C,OAAO,CAAC,QAAQ,CAAC,YAAY,CAAc;IAC3C,OAAO,CAAC,QAAQ,CAAC,gBAAgB,CAAmB;IACpD,OAAO,CAAC,QAAQ,CAAC,oBAAoB,CAAsB;IAEpD,YAAY;gCACO,MAAM;+BACP,MAAM;0CACK,MAAM;oCACZ,MAAM;yCACD,MAAM;uCACR,MAAM;oCACT,MAAM;kCACR,MAAM;MAChC;gBAE2B,OAAO,EAAE,qBAAqB;IAUrD,IAAI,CAAC,OAAO,EAAE,WAAW,GAAG,OAAO,CAAC,YAAY,CAAC;IAIhD,MAAM,CAAC,OAAO,EAAE,WAAW,GAAG,cAAc,CAAC,SAAS,CAAC;IAIxD,UAAU,IAAI,OAAO,CAAC,SAAS,EAAE,CAAC;IAIlC,KAAK,CAAC,OAAO,EAAE,YAAY,GAAG,OAAO,CAAC,aAAa,CAAC;IAIpD,KAAK,CAAC,OAAO,EAAE,gBAAgB,GAAG,OAAO,CAAC,iBAAiB,CAAC;IAI5D,UAAU,CAAC,OAAO,EAAE,oBAAoB,GAAG,OAAO,CAAC,qBAAqB,CAAC;IAIzE,QAAQ,CAAC,QAAQ,EAAE,iBAAiB,GAAG,OAAO,CAAC,kBAAkB,CAAC;CAGzE"}
1
+ {"version":3,"file":"GeminiProvider.d.ts","sourceRoot":"","sources":["../../../src/providers/gemini/GeminiProvider.ts"],"names":[],"mappings":"AAAA,OAAO,EACL,QAAQ,EACR,WAAW,EACX,YAAY,EACZ,SAAS,EACT,SAAS,EACT,YAAY,EACZ,aAAa,EACb,oBAAoB,EACpB,qBAAqB,EACrB,iBAAiB,EACjB,kBAAkB,EACnB,MAAM,gBAAgB,CAAC;AAQxB,OAAO,EAAE,gBAAgB,EAAE,iBAAiB,EAAE,MAAM,iBAAiB,CAAC;AAEtE,MAAM,WAAW,qBAAqB;IACpC,MAAM,EAAE,MAAM,CAAC;IACf,OAAO,CAAC,EAAE,MAAM,CAAC;CAClB;AAED,qBAAa,cAAe,YAAW,QAAQ;IAqBjC,OAAO,CAAC,QAAQ,CAAC,OAAO;IApBpC,OAAO,CAAC,QAAQ,CAAC,OAAO,CAAS;IACjC,OAAO,CAAC,QAAQ,CAAC,WAAW,CAAa;IACzC,OAAO,CAAC,QAAQ,CAAC,gBAAgB,CAAkB;IACnD,OAAO,CAAC,QAAQ,CAAC,aAAa,CAAe;IAC7C,OAAO,CAAC,QAAQ,CAAC,YAAY,CAAc;IAC3C,OAAO,CAAC,QAAQ,CAAC,gBAAgB,CAAmB;IACpD,OAAO,CAAC,QAAQ,CAAC,oBAAoB,CAAsB;IAEpD,YAAY;gCACO,MAAM;+BACP,MAAM;0CACK,MAAM;oCACZ,MAAM;yCACD,MAAM;uCACR,MAAM;oCACT,MAAM;oCACN,MAAM;kCACR,MAAM;MAChC;gBAE2B,OAAO,EAAE,qBAAqB;IAUrD,IAAI,CAAC,OAAO,EAAE,WAAW,GAAG,OAAO,CAAC,YAAY,CAAC;IAIhD,MAAM,CAAC,OAAO,EAAE,WAAW,GAAG,cAAc,CAAC,SAAS,CAAC;IAIxD,UAAU,IAAI,OAAO,CAAC,SAAS,EAAE,CAAC;IAIlC,KAAK,CAAC,OAAO,EAAE,YAAY,GAAG,OAAO,CAAC,aAAa,CAAC;IAIpD,KAAK,CAAC,OAAO,EAAE,gBAAgB,GAAG,OAAO,CAAC,iBAAiB,CAAC;IAI5D,UAAU,CAAC,OAAO,EAAE,oBAAoB,GAAG,OAAO,CAAC,qBAAqB,CAAC;IAIzE,QAAQ,CAAC,QAAQ,EAAE,iBAAiB,GAAG,OAAO,CAAC,kBAAkB,CAAC;CAGzE"}
@@ -22,6 +22,7 @@ export class GeminiProvider {
22
22
  supportsImageGeneration: (model) => Capabilities.supportsImageGeneration(model),
23
23
  supportsTranscription: (model) => Capabilities.supportsTranscription(model),
24
24
  supportsModeration: (model) => Capabilities.supportsModeration(model),
25
+ supportsReasoning: (_model) => false,
25
26
  getContextWindow: (model) => Capabilities.getContextWindow(model),
26
27
  };
27
28
  constructor(options) {
@@ -4,5 +4,6 @@ export declare class GeminiModels {
4
4
  private readonly apiKey;
5
5
  constructor(baseUrl: string, apiKey: string);
6
6
  execute(): Promise<ModelInfo[]>;
7
+ find(modelId: string): import("../../models/types.js").Model | undefined;
7
8
  }
8
9
  //# sourceMappingURL=Models.d.ts.map
@@ -1 +1 @@
1
- {"version":3,"file":"Models.d.ts","sourceRoot":"","sources":["../../../src/providers/gemini/Models.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,SAAS,EAAE,MAAM,gBAAgB,CAAC;AAI3C,qBAAa,YAAY;IACX,OAAO,CAAC,QAAQ,CAAC,OAAO;IAAU,OAAO,CAAC,QAAQ,CAAC,MAAM;gBAAxC,OAAO,EAAE,MAAM,EAAmB,MAAM,EAAE,MAAM;IAEvE,OAAO,IAAI,OAAO,CAAC,SAAS,EAAE,CAAC;CAgCtC"}
1
+ {"version":3,"file":"Models.d.ts","sourceRoot":"","sources":["../../../src/providers/gemini/Models.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,SAAS,EAAE,MAAM,gBAAgB,CAAC;AAK3C,qBAAa,YAAY;IACX,OAAO,CAAC,QAAQ,CAAC,OAAO;IAAU,OAAO,CAAC,QAAQ,CAAC,MAAM;gBAAxC,OAAO,EAAE,MAAM,EAAmB,MAAM,EAAE,MAAM;IAEvE,OAAO,IAAI,OAAO,CAAC,SAAS,EAAE,CAAC;IAkDrC,IAAI,CAAC,OAAO,EAAE,MAAM;CAGrB"}
@@ -1,4 +1,5 @@
1
1
  import { Capabilities } from "./Capabilities.js";
2
+ import { ModelRegistry } from "../../models/ModelRegistry.js";
2
3
  export class GeminiModels {
3
4
  baseUrl;
4
5
  apiKey;
@@ -7,32 +8,51 @@ export class GeminiModels {
7
8
  this.apiKey = apiKey;
8
9
  }
9
10
  async execute() {
10
- const url = `${this.baseUrl}/models?key=${this.apiKey}`;
11
- const response = await fetch(url);
12
- if (!response.ok) {
13
- const errorText = await response.text();
14
- throw new Error(`Gemini error (${response.status}): ${errorText}`);
15
- }
16
- const json = (await response.json());
17
- return json.models
18
- .filter(m => m.supportedGenerationMethods.includes("generateContent"))
19
- .map((model) => {
20
- const id = model.name.replace("models/", "");
21
- return {
22
- id: id,
23
- name: model.displayName || Capabilities.formatDisplayName(id),
24
- provider: "gemini",
25
- family: Capabilities.getFamily(id),
26
- context_window: model.inputTokenLimit || Capabilities.getContextWindow(id),
27
- max_output_tokens: model.outputTokenLimit || Capabilities.getMaxOutputTokens(id),
28
- modalities: Capabilities.getModalities(id),
29
- capabilities: Capabilities.getCapabilities(id),
30
- pricing: Capabilities.getPricing(id),
31
- metadata: {
32
- description: model.description,
33
- version: model.version,
11
+ try {
12
+ const response = await fetch(`${this.baseUrl}/models?key=${this.apiKey}`, {
13
+ method: "GET",
14
+ headers: {
15
+ "Content-Type": "application/json",
34
16
  },
35
- };
36
- });
17
+ });
18
+ if (response.ok) {
19
+ const { models } = await response.json();
20
+ return models.map(m => {
21
+ const modelId = m.name.replace("models/", "");
22
+ const registryModel = ModelRegistry.find(modelId, "gemini");
23
+ const info = {
24
+ id: modelId,
25
+ name: registryModel?.name || m.displayName || modelId,
26
+ provider: "gemini",
27
+ family: registryModel?.family || modelId,
28
+ context_window: registryModel?.context_window || Capabilities.getContextWindow(modelId),
29
+ max_output_tokens: registryModel?.max_output_tokens || Capabilities.getMaxOutputTokens(modelId),
30
+ modalities: registryModel?.modalities || Capabilities.getModalities(modelId),
31
+ capabilities: Capabilities.getCapabilities(modelId),
32
+ pricing: registryModel?.pricing || Capabilities.getPricing(modelId),
33
+ metadata: {
34
+ ...(registryModel?.metadata || {}),
35
+ description: m.description,
36
+ input_token_limit: m.inputTokenLimit,
37
+ output_token_limit: m.outputTokenLimit,
38
+ supported_generation_methods: m.supportedGenerationMethods
39
+ }
40
+ };
41
+ return info;
42
+ });
43
+ }
44
+ }
45
+ catch (_error) {
46
+ // Fallback
47
+ }
48
+ return ModelRegistry.all()
49
+ .filter(m => m.provider === "gemini")
50
+ .map(m => ({
51
+ ...m,
52
+ capabilities: Capabilities.getCapabilities(m.id)
53
+ }));
54
+ }
55
+ find(modelId) {
56
+ return ModelRegistry.find(modelId, "gemini");
37
57
  }
38
58
  }
@@ -1 +1 @@
1
- {"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../../src/providers/gemini/index.ts"],"names":[],"mappings":"AAKA;;;GAGG;AACH,wBAAgB,sBAAsB,SAcrC;AAED;;GAEG;AACH,eAAO,MAAM,sBAAsB,+BAAyB,CAAC;AAE7D,cAAc,qBAAqB,CAAC"}
1
+ {"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../../src/providers/gemini/index.ts"],"names":[],"mappings":"AAMA;;;GAGG;AACH,wBAAgB,sBAAsB,SAcrC;AAED;;GAEG;AACH,eAAO,MAAM,sBAAsB,+BAAyB,CAAC;AAE7D,cAAc,qBAAqB,CAAC"}
@@ -1,3 +1,4 @@
1
+ import { config } from "../../config.js";
1
2
  import { providerRegistry } from "../registry.js";
2
3
  import { GeminiProvider } from "./GeminiProvider.js";
3
4
  let registered = false;
@@ -9,9 +10,9 @@ export function registerGeminiProvider() {
9
10
  if (registered)
10
11
  return;
11
12
  providerRegistry.register("gemini", () => {
12
- const apiKey = process.env.GEMINI_API_KEY;
13
+ const apiKey = config.geminiApiKey;
13
14
  if (!apiKey) {
14
- throw new Error("GEMINI_API_KEY is not set");
15
+ throw new Error("geminiApiKey is not set in config or GEMINI_API_KEY environment variable");
15
16
  }
16
17
  return new GeminiProvider({ apiKey });
17
18
  });
@@ -1,7 +1,4 @@
1
- import { ModelFamilyDefinition } from "./ModelDefinitions.js";
2
1
  export declare class Capabilities {
3
- static getFamily(modelId: string): string;
4
- static getDefinition(modelId: string): ModelFamilyDefinition;
5
2
  static getContextWindow(modelId: string): number | null;
6
3
  static getMaxOutputTokens(modelId: string): number | null;
7
4
  static supportsVision(modelId: string): boolean;
@@ -12,19 +9,15 @@ export declare class Capabilities {
12
9
  static supportsImageGeneration(modelId: string): boolean;
13
10
  static supportsTranscription(modelId: string): boolean;
14
11
  static supportsModeration(modelId: string): boolean;
15
- static getInputPrice(modelId: string): number;
16
- static getCachedInputPrice(modelId: string): number | undefined;
17
- static getOutputPrice(modelId: string): number;
18
- static getModelType(modelId: string): "embedding" | "audio" | "moderation" | "image" | "chat";
19
- static formatDisplayName(modelId: string): string;
20
- private static applySpecialFormatting;
21
- private static specialPrefixFormat;
22
- static normalizeTemperature(temperature: number | undefined, modelId: string): number | undefined | null;
12
+ static supportsReasoning(modelId: string): boolean;
13
+ static getModelType(modelId: string): "embedding" | "audio" | "moderation" | "image" | "chat" | "audio_transcription" | "audio_speech";
23
14
  static getModalities(modelId: string): {
24
15
  input: string[];
25
16
  output: string[];
26
17
  };
27
18
  static getCapabilities(modelId: string): string[];
19
+ static normalizeTemperature(temperature: number | undefined, modelId: string): number | undefined | null;
20
+ static formatDisplayName(modelId: string): string;
28
21
  static getPricing(modelId: string): any;
29
22
  }
30
23
  //# sourceMappingURL=Capabilities.d.ts.map
@@ -1 +1 @@
1
- {"version":3,"file":"Capabilities.d.ts","sourceRoot":"","sources":["../../../src/providers/openai/Capabilities.ts"],"names":[],"mappings":"AAAA,OAAO,EAAiB,qBAAqB,EAAE,MAAM,uBAAuB,CAAC;AAE7E,qBAAa,YAAY;IACvB,MAAM,CAAC,SAAS,CAAC,OAAO,EAAE,MAAM,GAAG,MAAM;IAUzC,MAAM,CAAC,aAAa,CAAC,OAAO,EAAE,MAAM,GAAG,qBAAqB;IAK5D,MAAM,CAAC,gBAAgB,CAAC,OAAO,EAAE,MAAM,GAAG,MAAM,GAAG,IAAI;IAIvD,MAAM,CAAC,kBAAkB,CAAC,OAAO,EAAE,MAAM,GAAG,MAAM,GAAG,IAAI;IAIzD,MAAM,CAAC,cAAc,CAAC,OAAO,EAAE,MAAM,GAAG,OAAO;IAI/C,MAAM,CAAC,aAAa,CAAC,OAAO,EAAE,MAAM,GAAG,OAAO;IAI9C,MAAM,CAAC,wBAAwB,CAAC,OAAO,EAAE,MAAM,GAAG,OAAO;IAIzD,MAAM,CAAC,gBAAgB,CAAC,OAAO,EAAE,MAAM,GAAG,OAAO;IAIjD,MAAM,CAAC,kBAAkB,CAAC,OAAO,EAAE,MAAM,GAAG,OAAO;IAInD,MAAM,CAAC,uBAAuB,CAAC,OAAO,EAAE,MAAM,GAAG,OAAO;IAIxD,MAAM,CAAC,qBAAqB,CAAC,OAAO,EAAE,MAAM,GAAG,OAAO;IAMtD,MAAM,CAAC,kBAAkB,CAAC,OAAO,EAAE,MAAM,GAAG,OAAO;IAInD,MAAM,CAAC,aAAa,CAAC,OAAO,EAAE,MAAM,GAAG,MAAM;IAK7C,MAAM,CAAC,mBAAmB,CAAC,OAAO,EAAE,MAAM,GAAG,MAAM,GAAG,SAAS;IAI/D,MAAM,CAAC,cAAc,CAAC,OAAO,EAAE,MAAM,GAAG,MAAM;IAK9C,MAAM,CAAC,YAAY,CAAC,OAAO,EAAE,MAAM,GAAG,WAAW,GAAG,OAAO,GAAG,YAAY,GAAG,OAAO,GAAG,MAAM;IAI7F,MAAM,CAAC,iBAAiB,CAAC,OAAO,EAAE,MAAM,GAAG,MAAM;IAKjD,OAAO,CAAC,MAAM,CAAC,sBAAsB;IAarC,OAAO,CAAC,MAAM,CAAC,mBAAmB;IAUlC,MAAM,CAAC,oBAAoB,CAAC,WAAW,EAAE,MAAM,GAAG,SAAS,EAAE,OAAO,EAAE,MAAM,GAAG,MAAM,GAAG,SAAS,GAAG,IAAI;IAUxG,MAAM,CAAC,aAAa,CAAC,OAAO,EAAE,MAAM,GAAG;QAAE,KAAK,EAAE,MAAM,EAAE,CAAC;QAAC,MAAM,EAAE,MAAM,EAAE,CAAA;KAAE;IAqB5E,MAAM,CAAC,eAAe,CAAC,OAAO,EAAE,MAAM,GAAG,MAAM,EAAE;IAiBjD,MAAM,CAAC,UAAU,CAAC,OAAO,EAAE,MAAM;CAyBlC"}
1
+ {"version":3,"file":"Capabilities.d.ts","sourceRoot":"","sources":["../../../src/providers/openai/Capabilities.ts"],"names":[],"mappings":"AAEA,qBAAa,YAAY;IACvB,MAAM,CAAC,gBAAgB,CAAC,OAAO,EAAE,MAAM,GAAG,MAAM,GAAG,IAAI;IAUvD,MAAM,CAAC,kBAAkB,CAAC,OAAO,EAAE,MAAM,GAAG,MAAM,GAAG,IAAI;IASzD,MAAM,CAAC,cAAc,CAAC,OAAO,EAAE,MAAM,GAAG,OAAO;IAO/C,MAAM,CAAC,aAAa,CAAC,OAAO,EAAE,MAAM,GAAG,OAAO;IAO9C,MAAM,CAAC,wBAAwB,CAAC,OAAO,EAAE,MAAM,GAAG,OAAO;IAOzD,MAAM,CAAC,gBAAgB,CAAC,OAAO,EAAE,MAAM,GAAG,OAAO;IAIjD,MAAM,CAAC,kBAAkB,CAAC,OAAO,EAAE,MAAM,GAAG,OAAO;IAOnD,MAAM,CAAC,uBAAuB,CAAC,OAAO,EAAE,MAAM,GAAG,OAAO;IAOxD,MAAM,CAAC,qBAAqB,CAAC,OAAO,EAAE,MAAM,GAAG,OAAO;IAOtD,MAAM,CAAC,kBAAkB,CAAC,OAAO,EAAE,MAAM,GAAG,OAAO;IAOnD,MAAM,CAAC,iBAAiB,CAAC,OAAO,EAAE,MAAM,GAAG,OAAO;IAOlD,MAAM,CAAC,YAAY,CAAC,OAAO,EAAE,MAAM,GAAG,WAAW,GAAG,OAAO,GAAG,YAAY,GAAG,OAAO,GAAG,MAAM,GAAG,qBAAqB,GAAG,cAAc;IAUtI,MAAM,CAAC,aAAa,CAAC,OAAO,EAAE,MAAM,GAAG;QAAE,KAAK,EAAE,MAAM,EAAE,CAAC;QAAC,MAAM,EAAE,MAAM,EAAE,CAAA;KAAE;IAiB5E,MAAM,CAAC,eAAe,CAAC,OAAO,EAAE,MAAM,GAAG,MAAM,EAAE;IAoBjD,MAAM,CAAC,oBAAoB,CAAC,WAAW,EAAE,MAAM,GAAG,SAAS,EAAE,OAAO,EAAE,MAAM,GAAG,MAAM,GAAG,SAAS,GAAG,IAAI;IAMxG,MAAM,CAAC,iBAAiB,CAAC,OAAO,EAAE,MAAM,GAAG,MAAM;IAOjD,MAAM,CAAC,UAAU,CAAC,OAAO,EAAE,MAAM,GAAG,GAAG;CAcxC"}
@@ -1,160 +1,163 @@
1
- import { OPENAI_MODELS } from "./ModelDefinitions.js";
1
+ import { ModelRegistry } from "../../models/ModelRegistry.js";
2
2
  export class Capabilities {
3
- static getFamily(modelId) {
4
- for (const [key, def] of Object.entries(OPENAI_MODELS)) {
5
- if (key === "other")
6
- continue;
7
- if (def.pattern.test(modelId)) {
8
- return key;
9
- }
10
- }
11
- return "other";
12
- }
13
- static getDefinition(modelId) {
14
- const family = this.getFamily(modelId);
15
- return OPENAI_MODELS[family];
16
- }
17
3
  static getContextWindow(modelId) {
18
- return this.getDefinition(modelId).contextWindow;
4
+ const val = ModelRegistry.getContextWindow(modelId, "openai");
5
+ if (val)
6
+ return val;
7
+ if (/gpt-4.*(preview|turbo|vision|o)/.test(modelId) || /o1|o3/.test(modelId))
8
+ return 128_000;
9
+ if (/gpt-4/.test(modelId))
10
+ return 8_192;
11
+ if (/gpt-3\.5/.test(modelId))
12
+ return 16_385;
13
+ return 128_000;
19
14
  }
20
15
  static getMaxOutputTokens(modelId) {
21
- return this.getDefinition(modelId).maxOutputTokens;
16
+ const val = ModelRegistry.getMaxOutputTokens(modelId, "openai");
17
+ if (val)
18
+ return val;
19
+ if (/o1.*(pro|mini)|o3/.test(modelId))
20
+ return 65_536;
21
+ if (/gpt-4o/.test(modelId))
22
+ return 16_384;
23
+ return 4_096;
22
24
  }
23
25
  static supportsVision(modelId) {
24
- return !!this.getDefinition(modelId).features.vision;
26
+ const model = ModelRegistry.find(modelId, "openai");
27
+ if (model?.modalities?.input?.includes("image"))
28
+ return true;
29
+ return /gpt-4(?!-3)|o1/.test(modelId) && !/audio|realtime|voice/.test(modelId);
25
30
  }
26
31
  static supportsTools(modelId) {
27
- return !!this.getDefinition(modelId).features.tools;
32
+ const model = ModelRegistry.find(modelId, "openai");
33
+ if (model?.capabilities?.includes("function_calling"))
34
+ return true;
35
+ return !/embedding|moderation|dall-e|tts|whisper/.test(modelId);
28
36
  }
29
37
  static supportsStructuredOutput(modelId) {
30
- return !!this.getDefinition(modelId).features.structuredOutput;
38
+ const model = ModelRegistry.find(modelId, "openai");
39
+ if (model?.capabilities?.includes("structured_output"))
40
+ return true;
41
+ return /gpt-4|o1|o3/.test(modelId);
31
42
  }
32
43
  static supportsJsonMode(modelId) {
33
44
  return this.supportsStructuredOutput(modelId);
34
45
  }
35
46
  static supportsEmbeddings(modelId) {
36
- return this.getDefinition(modelId).type === "embedding";
47
+ const model = ModelRegistry.find(modelId, "openai");
48
+ if (model?.modalities?.output?.includes("embeddings"))
49
+ return true;
50
+ return /embedding/.test(modelId);
37
51
  }
38
52
  static supportsImageGeneration(modelId) {
39
- return this.getDefinition(modelId).type === "image";
53
+ const model = ModelRegistry.find(modelId, "openai");
54
+ if (model?.capabilities?.includes("image_generation") || model?.modalities?.output?.includes("image"))
55
+ return true;
56
+ return /dall-e|image/.test(modelId);
40
57
  }
41
58
  static supportsTranscription(modelId) {
42
- // Transcription is supported by audio models or specific models like gpt-4o-audio
43
- const def = this.getDefinition(modelId);
44
- return def.type === "audio" || (def.type === "chat" && /audio|transcribe/.test(modelId));
59
+ const model = ModelRegistry.find(modelId, "openai");
60
+ if (model?.modalities?.input?.includes("audio"))
61
+ return true;
62
+ return /whisper|audio|transcribe/.test(modelId);
45
63
  }
46
64
  static supportsModeration(modelId) {
47
- return this.getDefinition(modelId).type === "moderation";
48
- }
49
- static getInputPrice(modelId) {
50
- const prices = this.getDefinition(modelId).pricing;
51
- return prices.input || prices.price || 0.5;
52
- }
53
- static getCachedInputPrice(modelId) {
54
- return this.getDefinition(modelId).pricing.cached_input;
65
+ const model = ModelRegistry.find(modelId, "openai");
66
+ if (model?.modalities?.output?.includes("moderation"))
67
+ return true;
68
+ return /moderation/.test(modelId);
55
69
  }
56
- static getOutputPrice(modelId) {
57
- const prices = this.getDefinition(modelId).pricing;
58
- return prices.output || prices.price || 1.5;
70
+ static supportsReasoning(modelId) {
71
+ const model = ModelRegistry.find(modelId, "openai");
72
+ if (model?.capabilities?.includes("reasoning"))
73
+ return true;
74
+ return /o\d|gpt-5/.test(modelId);
59
75
  }
60
76
  static getModelType(modelId) {
61
- return this.getDefinition(modelId).type;
77
+ if (/moderation/.test(modelId))
78
+ return "moderation";
79
+ if (/embedding/.test(modelId))
80
+ return "embedding";
81
+ if (/dall-e|image/.test(modelId))
82
+ return "image";
83
+ if (/whisper|transcribe/.test(modelId))
84
+ return "audio_transcription";
85
+ if (/tts|speech/.test(modelId))
86
+ return "audio_speech";
87
+ if (/audio/.test(modelId))
88
+ return "audio";
89
+ return "chat";
62
90
  }
63
- static formatDisplayName(modelId) {
64
- const humanized = modelId.replace(/-/g, " ").split(" ").map(s => s.charAt(0).toUpperCase() + s.slice(1)).join(" ");
65
- return this.applySpecialFormatting(humanized);
66
- }
67
- static applySpecialFormatting(name) {
68
- return name
69
- .replace(/(\d{4}) (\d{2}) (\d{2})/, "$1$2$3")
70
- .replace(/^(?:Gpt|Chatgpt|Tts|Dall E) /g, (m) => this.specialPrefixFormat(m.trim()))
71
- .replace(/^O([13]) /g, "O$1-")
72
- .replace(/^O[13] Mini/g, (m) => m.replace(" ", "-"))
73
- .replace(/\d\.\d /g, (m) => m.replace(" ", "-"))
74
- .replace(/4o (?=Mini|Preview|Turbo|Audio|Realtime|Transcribe|Tts)/g, "4o-")
75
- .replace(/\bHd\b/g, "HD")
76
- .replace(/(?:Omni|Text) Moderation/g, (m) => m.replace(" ", "-"))
77
- .replace("Text Embedding", "text-embedding-");
78
- }
79
- static specialPrefixFormat(prefix) {
80
- switch (prefix) {
81
- case "Gpt": return "GPT-";
82
- case "Chatgpt": return "ChatGPT-";
83
- case "Tts": return "TTS-";
84
- case "Dall E": return "DALL-E-";
85
- default: return prefix + "-";
91
+ static getModalities(modelId) {
92
+ const input = ["text"];
93
+ const output = ["text"];
94
+ const model = ModelRegistry.find(modelId, "openai");
95
+ if (model?.modalities)
96
+ return model.modalities;
97
+ if (this.supportsVision(modelId))
98
+ input.push("image", "pdf");
99
+ if (this.supportsTranscription(modelId))
100
+ input.push("audio");
101
+ if (this.supportsImageGeneration(modelId))
102
+ output.push("image");
103
+ if (this.supportsEmbeddings(modelId))
104
+ output.push("embeddings");
105
+ if (this.supportsModeration(modelId))
106
+ output.push("moderation");
107
+ return { input, output };
108
+ }
109
+ static getCapabilities(modelId) {
110
+ const caps = ["streaming"];
111
+ const model = ModelRegistry.find(modelId, "openai");
112
+ if (model) {
113
+ model.capabilities.forEach(c => { if (!caps.includes(c))
114
+ caps.push(c); });
115
+ return caps;
86
116
  }
117
+ if (this.supportsTools(modelId))
118
+ caps.push("function_calling");
119
+ if (this.supportsStructuredOutput(modelId))
120
+ caps.push("structured_output");
121
+ if (this.supportsEmbeddings(modelId))
122
+ caps.push("batch");
123
+ if (/o\d|gpt-5/.test(modelId))
124
+ caps.push("reasoning");
125
+ if (this.supportsImageGeneration(modelId))
126
+ caps.push("image_generation");
127
+ if (this.supportsTranscription(modelId))
128
+ caps.push("transcription");
129
+ return caps;
87
130
  }
88
131
  static normalizeTemperature(temperature, modelId) {
89
- if (/^(o\d|gpt-5)/.test(modelId)) {
132
+ if (/^(o\d|gpt-5)/.test(modelId))
90
133
  return 1.0;
91
- }
92
- if (/-search/.test(modelId)) {
134
+ if (/-search/.test(modelId))
93
135
  return null;
94
- }
95
136
  return temperature;
96
137
  }
97
- static getModalities(modelId) {
98
- const type = this.getModelType(modelId);
99
- const features = this.getDefinition(modelId).features;
100
- const modalities = {
101
- input: ["text"],
102
- output: ["text"]
103
- };
104
- if (features.vision)
105
- modalities.input.push("image", "pdf");
106
- if (type === "audio") {
107
- modalities.input.push("audio");
108
- modalities.output.push("audio");
109
- }
110
- if (type === "image")
111
- modalities.output.push("image");
112
- if (type === "embedding")
113
- modalities.output.push("embeddings");
114
- if (type === "moderation")
115
- modalities.output.push("moderation");
116
- return modalities;
117
- }
118
- static getCapabilities(modelId) {
119
- const capabilities = [];
120
- const type = this.getModelType(modelId);
121
- const features = this.getDefinition(modelId).features;
122
- if (type !== "moderation" && type !== "embedding")
123
- capabilities.push("streaming");
124
- if (features.tools)
125
- capabilities.push("function_calling");
126
- if (features.structuredOutput)
127
- capabilities.push("structured_output");
128
- if (type === "embedding")
129
- capabilities.push("batch");
130
- if (/o\d|gpt-5|codex/.test(modelId))
131
- capabilities.push("reasoning");
132
- if (type === "image")
133
- capabilities.push("image_generation");
134
- if (type === "audio")
135
- capabilities.push("speech_generation", "transcription");
136
- return capabilities;
138
+ static formatDisplayName(modelId) {
139
+ const model = ModelRegistry.find(modelId, "openai");
140
+ if (model?.name && model.name !== modelId)
141
+ return model.name;
142
+ return modelId.replace(/-/g, " ").replace(/\b\w/g, c => c.toUpperCase());
137
143
  }
138
144
  static getPricing(modelId) {
139
- const standardPricing = {
140
- input_per_million: this.getInputPrice(modelId),
141
- output_per_million: this.getOutputPrice(modelId)
142
- };
143
- const cachedPrice = this.getCachedInputPrice(modelId);
144
- const pricing = {
145
+ const model = ModelRegistry.find(modelId, "openai");
146
+ if (model?.pricing)
147
+ return model.pricing;
148
+ let input = 2.5, output = 10.0;
149
+ if (/gpt-3/.test(modelId)) {
150
+ input = 0.5;
151
+ output = 1.5;
152
+ }
153
+ if (/mini/.test(modelId)) {
154
+ input = 0.15;
155
+ output = 0.6;
156
+ }
157
+ return {
145
158
  text_tokens: {
146
- standard: {
147
- ...standardPricing,
148
- ...(cachedPrice ? { cached_input_per_million: cachedPrice } : {})
149
- }
159
+ standard: { input_per_million: input, output_per_million: output }
150
160
  }
151
161
  };
152
- if (this.getModelType(modelId) === "embedding") {
153
- pricing.text_tokens.batch = {
154
- input_per_million: standardPricing.input_per_million * 0.5,
155
- output_per_million: standardPricing.output_per_million * 0.5
156
- };
157
- }
158
- return pricing;
159
162
  }
160
163
  }
@@ -1 +1 @@
1
- {"version":3,"file":"Chat.d.ts","sourceRoot":"","sources":["../../../src/providers/openai/Chat.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,WAAW,EAAE,YAAY,EAAE,MAAM,gBAAgB,CAAC;AAK3D,qBAAa,UAAU;IACT,OAAO,CAAC,QAAQ,CAAC,OAAO;IAAU,OAAO,CAAC,QAAQ,CAAC,MAAM;gBAAxC,OAAO,EAAE,MAAM,EAAmB,MAAM,EAAE,MAAM;IAEvE,OAAO,CAAC,OAAO,EAAE,WAAW,GAAG,OAAO,CAAC,YAAY,CAAC;CA0D3D"}
1
+ {"version":3,"file":"Chat.d.ts","sourceRoot":"","sources":["../../../src/providers/openai/Chat.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,WAAW,EAAE,YAAY,EAAS,MAAM,gBAAgB,CAAC;AAOlE,qBAAa,UAAU;IACT,OAAO,CAAC,QAAQ,CAAC,OAAO;IAAU,OAAO,CAAC,QAAQ,CAAC,MAAM;gBAAxC,OAAO,EAAE,MAAM,EAAmB,MAAM,EAAE,MAAM;IAEvE,OAAO,CAAC,OAAO,EAAE,WAAW,GAAG,OAAO,CAAC,YAAY,CAAC;CAsD3D"}
@@ -1,5 +1,7 @@
1
1
  import { Capabilities } from "./Capabilities.js";
2
2
  import { handleOpenAIError } from "./Errors.js";
3
+ import { ModelRegistry } from "../../models/ModelRegistry.js";
4
+ import { buildUrl } from "./utils.js";
3
5
  export class OpenAIChat {
4
6
  baseUrl;
5
7
  apiKey;
@@ -9,25 +11,24 @@ export class OpenAIChat {
9
11
  }
10
12
  async execute(request) {
11
13
  const temperature = Capabilities.normalizeTemperature(request.temperature, request.model);
14
+ const { model, messages, tools, temperature: _, max_tokens, response_format, headers, ...rest } = request;
12
15
  const body = {
13
- model: request.model,
14
- messages: request.messages,
16
+ model,
17
+ messages,
18
+ ...rest
15
19
  };
16
- if (temperature !== undefined) {
17
- if (temperature !== null) {
18
- body.temperature = temperature;
19
- }
20
+ if (temperature !== undefined && temperature !== null)
21
+ body.temperature = temperature;
22
+ if (max_tokens)
23
+ body.max_tokens = max_tokens;
24
+ if (tools)
25
+ body.tools = tools;
26
+ if (response_format)
27
+ body.response_format = response_format;
28
+ if (process.env.NODELLM_DEBUG === "true") {
29
+ console.log(`[OpenAI Request] ${JSON.stringify(body, null, 2)}`);
20
30
  }
21
- if (request.max_tokens) {
22
- body.max_tokens = request.max_tokens;
23
- }
24
- if (request.tools) {
25
- body.tools = request.tools;
26
- }
27
- if (request.response_format) {
28
- body.response_format = request.response_format;
29
- }
30
- const response = await fetch(`${this.baseUrl}/chat/completions`, {
31
+ const response = await fetch(buildUrl(this.baseUrl, '/chat/completions'), {
31
32
  method: "POST",
32
33
  headers: {
33
34
  "Authorization": `Bearer ${this.apiKey}`,
@@ -52,6 +53,7 @@ export class OpenAIChat {
52
53
  if (!content && !tool_calls) {
53
54
  throw new Error("OpenAI returned empty response");
54
55
  }
55
- return { content, tool_calls, usage };
56
+ const calculatedUsage = usage ? ModelRegistry.calculateCost(usage, model, "openai") : undefined;
57
+ return { content, tool_calls, usage: calculatedUsage };
56
58
  }
57
59
  }
@@ -1 +1 @@
1
- {"version":3,"file":"Embedding.d.ts","sourceRoot":"","sources":["../../../src/providers/openai/Embedding.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,gBAAgB,EAAE,iBAAiB,EAAE,MAAM,iBAAiB,CAAC;AAKtE,qBAAa,eAAe;IAExB,OAAO,CAAC,QAAQ,CAAC,OAAO;IACxB,OAAO,CAAC,QAAQ,CAAC,MAAM;gBADN,OAAO,EAAE,MAAM,EACf,MAAM,EAAE,MAAM;IAG3B,OAAO,CAAC,OAAO,EAAE,gBAAgB,GAAG,OAAO,CAAC,iBAAiB,CAAC;CA8CrE"}
1
+ {"version":3,"file":"Embedding.d.ts","sourceRoot":"","sources":["../../../src/providers/openai/Embedding.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,gBAAgB,EAAE,iBAAiB,EAAE,MAAM,iBAAiB,CAAC;AAMtE,qBAAa,eAAe;IAExB,OAAO,CAAC,QAAQ,CAAC,OAAO;IACxB,OAAO,CAAC,QAAQ,CAAC,MAAM;gBADN,OAAO,EAAE,MAAM,EACf,MAAM,EAAE,MAAM;IAG3B,OAAO,CAAC,OAAO,EAAE,gBAAgB,GAAG,OAAO,CAAC,iBAAiB,CAAC;CA8CrE"}
@@ -1,6 +1,7 @@
1
1
  import { handleOpenAIError } from "./Errors.js";
2
2
  import { Capabilities } from "./Capabilities.js";
3
3
  import { DEFAULT_MODELS } from "../../constants.js";
4
+ import { buildUrl } from "./utils.js";
4
5
  export class OpenAIEmbedding {
5
6
  baseUrl;
6
7
  apiKey;
@@ -24,7 +25,7 @@ export class OpenAIEmbedding {
24
25
  if (request.user) {
25
26
  body.user = request.user;
26
27
  }
27
- const response = await fetch(`${this.baseUrl}/embeddings`, {
28
+ const response = await fetch(buildUrl(this.baseUrl, '/embeddings'), {
28
29
  method: "POST",
29
30
  headers: {
30
31
  "Authorization": `Bearer ${this.apiKey}`,
@@ -1 +1 @@
1
- {"version":3,"file":"Image.d.ts","sourceRoot":"","sources":["../../../src/providers/openai/Image.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,YAAY,EAAE,aAAa,EAAE,MAAM,gBAAgB,CAAC;AAG7D,qBAAa,WAAW;IACV,OAAO,CAAC,QAAQ,CAAC,OAAO;IAAU,OAAO,CAAC,QAAQ,CAAC,MAAM;gBAAxC,OAAO,EAAE,MAAM,EAAmB,MAAM,EAAE,MAAM;IAEvE,OAAO,CAAC,OAAO,EAAE,YAAY,GAAG,OAAO,CAAC,aAAa,CAAC;CAmC7D"}
1
+ {"version":3,"file":"Image.d.ts","sourceRoot":"","sources":["../../../src/providers/openai/Image.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,YAAY,EAAE,aAAa,EAAE,MAAM,gBAAgB,CAAC;AAI7D,qBAAa,WAAW;IACV,OAAO,CAAC,QAAQ,CAAC,OAAO;IAAU,OAAO,CAAC,QAAQ,CAAC,MAAM;gBAAxC,OAAO,EAAE,MAAM,EAAmB,MAAM,EAAE,MAAM;IAEvE,OAAO,CAAC,OAAO,EAAE,YAAY,GAAG,OAAO,CAAC,aAAa,CAAC;CAmC7D"}
@@ -1,4 +1,5 @@
1
1
  import { handleOpenAIError } from "./Errors.js";
2
+ import { buildUrl } from "./utils.js";
2
3
  export class OpenAIImage {
3
4
  baseUrl;
4
5
  apiKey;
@@ -14,7 +15,7 @@ export class OpenAIImage {
14
15
  quality: request.quality || "standard",
15
16
  n: request.n || 1,
16
17
  };
17
- const response = await fetch(`${this.baseUrl}/images/generations`, {
18
+ const response = await fetch(buildUrl(this.baseUrl, '/images/generations'), {
18
19
  method: "POST",
19
20
  headers: {
20
21
  "Authorization": `Bearer ${this.apiKey}`,