modelfusion 0.123.0 → 0.125.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (72) hide show
  1. package/CHANGELOG.md +47 -1
  2. package/README.md +9 -22
  3. package/model-function/generate-text/PromptTemplateFullTextModel.cjs +0 -11
  4. package/model-function/generate-text/PromptTemplateFullTextModel.d.ts +0 -1
  5. package/model-function/generate-text/PromptTemplateFullTextModel.js +0 -11
  6. package/model-function/generate-text/PromptTemplateTextGenerationModel.cjs +0 -11
  7. package/model-function/generate-text/PromptTemplateTextGenerationModel.d.ts +0 -1
  8. package/model-function/generate-text/PromptTemplateTextGenerationModel.js +0 -11
  9. package/model-function/generate-text/PromptTemplateTextStreamingModel.cjs +0 -11
  10. package/model-function/generate-text/PromptTemplateTextStreamingModel.d.ts +0 -1
  11. package/model-function/generate-text/PromptTemplateTextStreamingModel.js +0 -11
  12. package/model-function/generate-text/TextGenerationModel.d.ts +31 -1
  13. package/model-provider/cohere/CohereTextEmbeddingModel.d.ts +3 -3
  14. package/model-provider/cohere/CohereTextGenerationModel.cjs +6 -9
  15. package/model-provider/cohere/CohereTextGenerationModel.d.ts +4 -9
  16. package/model-provider/cohere/CohereTextGenerationModel.js +7 -10
  17. package/model-provider/cohere/CohereTokenizer.d.ts +3 -3
  18. package/model-provider/llamacpp/LlamaCppCompletionModel.d.ts +2 -2
  19. package/model-provider/mistral/MistralChatModel.cjs +0 -9
  20. package/model-provider/mistral/MistralChatModel.d.ts +2 -11
  21. package/model-provider/mistral/MistralChatModel.js +0 -9
  22. package/model-provider/mistral/index.cjs +1 -2
  23. package/model-provider/mistral/index.d.ts +0 -1
  24. package/model-provider/mistral/index.js +0 -1
  25. package/model-provider/ollama/OllamaChatModel.cjs +0 -9
  26. package/model-provider/ollama/OllamaChatModel.d.ts +2 -11
  27. package/model-provider/ollama/OllamaChatModel.js +0 -9
  28. package/model-provider/ollama/OllamaCompletionModel.d.ts +2 -2
  29. package/model-provider/ollama/index.cjs +0 -1
  30. package/model-provider/ollama/index.d.ts +0 -1
  31. package/model-provider/ollama/index.js +0 -1
  32. package/model-provider/openai/AbstractOpenAIChatModel.cjs +5 -3
  33. package/model-provider/openai/AbstractOpenAIChatModel.d.ts +5 -5
  34. package/model-provider/openai/AbstractOpenAIChatModel.js +5 -3
  35. package/model-provider/openai/AbstractOpenAITextEmbeddingModel.cjs +82 -0
  36. package/model-provider/openai/AbstractOpenAITextEmbeddingModel.d.ts +91 -0
  37. package/model-provider/openai/AbstractOpenAITextEmbeddingModel.js +78 -0
  38. package/model-provider/openai/OpenAIChatFunctionCallStructureGenerationModel.d.ts +1 -1
  39. package/model-provider/openai/OpenAIChatModel.cjs +0 -9
  40. package/model-provider/openai/OpenAIChatModel.d.ts +2 -11
  41. package/model-provider/openai/OpenAIChatModel.js +0 -9
  42. package/model-provider/openai/OpenAICompletionModel.cjs +3 -6
  43. package/model-provider/openai/OpenAICompletionModel.d.ts +3 -8
  44. package/model-provider/openai/OpenAICompletionModel.js +4 -7
  45. package/model-provider/openai/OpenAIFacade.cjs +18 -18
  46. package/model-provider/openai/OpenAIFacade.d.ts +18 -18
  47. package/model-provider/openai/OpenAIFacade.js +18 -18
  48. package/model-provider/openai/OpenAITextEmbeddingModel.cjs +3 -68
  49. package/model-provider/openai/OpenAITextEmbeddingModel.d.ts +4 -82
  50. package/model-provider/openai/OpenAITextEmbeddingModel.js +3 -68
  51. package/model-provider/openai/index.cjs +2 -2
  52. package/model-provider/openai/index.d.ts +1 -1
  53. package/model-provider/openai/index.js +1 -1
  54. package/model-provider/openai-compatible/OpenAICompatibleChatModel.cjs +0 -9
  55. package/model-provider/openai-compatible/OpenAICompatibleChatModel.d.ts +4 -11
  56. package/model-provider/openai-compatible/OpenAICompatibleChatModel.js +0 -9
  57. package/model-provider/openai-compatible/OpenAICompatibleCompletionModel.cjs +10 -0
  58. package/model-provider/openai-compatible/OpenAICompatibleCompletionModel.d.ts +10 -2
  59. package/model-provider/openai-compatible/OpenAICompatibleCompletionModel.js +10 -0
  60. package/model-provider/openai-compatible/OpenAICompatibleFacade.cjs +40 -7
  61. package/model-provider/openai-compatible/OpenAICompatibleFacade.d.ts +35 -6
  62. package/model-provider/openai-compatible/OpenAICompatibleFacade.js +37 -6
  63. package/model-provider/openai-compatible/OpenAICompatibleTextEmbeddingModel.cjs +27 -0
  64. package/model-provider/openai-compatible/OpenAICompatibleTextEmbeddingModel.d.ts +18 -0
  65. package/model-provider/openai-compatible/OpenAICompatibleTextEmbeddingModel.js +23 -0
  66. package/model-provider/openai-compatible/PerplexityApiConfiguration.cjs +33 -0
  67. package/model-provider/openai-compatible/PerplexityApiConfiguration.d.ts +13 -0
  68. package/model-provider/openai-compatible/PerplexityApiConfiguration.js +29 -0
  69. package/model-provider/openai-compatible/index.cjs +2 -0
  70. package/model-provider/openai-compatible/index.d.ts +2 -0
  71. package/model-provider/openai-compatible/index.js +2 -0
  72. package/package.json +1 -1
@@ -1,13 +1,15 @@
1
+ import { ApiConfiguration } from "../../core/api/ApiConfiguration.js";
1
2
  import { FlexibleStructureFromTextPromptTemplate, StructureFromTextPromptTemplate } from "../../model-function/generate-structure/StructureFromTextPromptTemplate.js";
2
3
  import { StructureFromTextStreamingModel } from "../../model-function/generate-structure/StructureFromTextStreamingModel.js";
3
4
  import { PromptTemplateFullTextModel } from "../../model-function/generate-text/PromptTemplateFullTextModel.js";
4
- import { TextStreamingModel } from "../../model-function/generate-text/TextGenerationModel.js";
5
+ import { TextStreamingBaseModel, TextStreamingModel } from "../../model-function/generate-text/TextGenerationModel.js";
5
6
  import { TextGenerationPromptTemplate } from "../../model-function/generate-text/TextGenerationPromptTemplate.js";
6
7
  import { ToolCallGenerationModel } from "../../tool/generate-tool-call/ToolCallGenerationModel.js";
7
8
  import { ToolCallsGenerationModel } from "../../tool/generate-tool-calls/ToolCallsGenerationModel.js";
8
9
  import { AbstractOpenAIChatModel, AbstractOpenAIChatSettings, OpenAIChatPrompt } from "../openai/AbstractOpenAIChatModel.js";
9
10
  import { OpenAICompatibleProviderName } from "./OpenAICompatibleProviderName.js";
10
11
  export interface OpenAICompatibleChatSettings extends AbstractOpenAIChatSettings {
12
+ api: ApiConfiguration;
11
13
  provider?: OpenAICompatibleProviderName;
12
14
  }
13
15
  /**
@@ -18,7 +20,7 @@ export interface OpenAICompatibleChatSettings extends AbstractOpenAIChatSettings
18
20
  *
19
21
  * @see https://platform.openai.com/docs/api-reference/chat/create
20
22
  */
21
- export declare class OpenAICompatibleChatModel extends AbstractOpenAIChatModel<OpenAICompatibleChatSettings> implements TextStreamingModel<OpenAIChatPrompt, OpenAICompatibleChatSettings>, ToolCallGenerationModel<OpenAIChatPrompt, OpenAICompatibleChatSettings>, ToolCallsGenerationModel<OpenAIChatPrompt, OpenAICompatibleChatSettings> {
23
+ export declare class OpenAICompatibleChatModel extends AbstractOpenAIChatModel<OpenAICompatibleChatSettings> implements TextStreamingBaseModel<OpenAIChatPrompt, OpenAICompatibleChatSettings>, ToolCallGenerationModel<OpenAIChatPrompt, OpenAICompatibleChatSettings>, ToolCallsGenerationModel<OpenAIChatPrompt, OpenAICompatibleChatSettings> {
22
24
  constructor(settings: OpenAICompatibleChatSettings);
23
25
  get provider(): OpenAICompatibleProviderName;
24
26
  get modelName(): string;
@@ -27,17 +29,8 @@ export declare class OpenAICompatibleChatModel extends AbstractOpenAIChatModel<O
27
29
  readonly countPromptTokens: undefined;
28
30
  get settingsForEvent(): Partial<OpenAICompatibleChatSettings>;
29
31
  asStructureGenerationModel<INPUT_PROMPT, OpenAIChatPrompt>(promptTemplate: StructureFromTextPromptTemplate<INPUT_PROMPT, OpenAIChatPrompt> | FlexibleStructureFromTextPromptTemplate<INPUT_PROMPT, unknown>): StructureFromTextStreamingModel<INPUT_PROMPT, unknown, TextStreamingModel<unknown, import("../../model-function/generate-text/TextGenerationModel.js").TextGenerationModelSettings>> | StructureFromTextStreamingModel<INPUT_PROMPT, OpenAIChatPrompt, TextStreamingModel<OpenAIChatPrompt, import("../../model-function/generate-text/TextGenerationModel.js").TextGenerationModelSettings>>;
30
- /**
31
- * Returns this model with a text prompt template.
32
- */
33
32
  withTextPrompt(): PromptTemplateFullTextModel<string, OpenAIChatPrompt, OpenAICompatibleChatSettings, this>;
34
- /**
35
- * Returns this model with an instruction prompt template.
36
- */
37
33
  withInstructionPrompt(): PromptTemplateFullTextModel<import("../../index.js").InstructionPrompt, OpenAIChatPrompt, OpenAICompatibleChatSettings, this>;
38
- /**
39
- * Returns this model with a chat prompt template.
40
- */
41
34
  withChatPrompt(): PromptTemplateFullTextModel<import("../../index.js").ChatPrompt, OpenAIChatPrompt, OpenAICompatibleChatSettings, this>;
42
35
  withPromptTemplate<INPUT_PROMPT>(promptTemplate: TextGenerationPromptTemplate<INPUT_PROMPT, OpenAIChatPrompt>): PromptTemplateFullTextModel<INPUT_PROMPT, OpenAIChatPrompt, OpenAICompatibleChatSettings, this>;
43
36
  withJsonOutput(): this;
@@ -65,21 +65,12 @@ export class OpenAICompatibleChatModel extends AbstractOpenAIChatModel {
65
65
  template: promptTemplate,
66
66
  });
67
67
  }
68
- /**
69
- * Returns this model with a text prompt template.
70
- */
71
68
  withTextPrompt() {
72
69
  return this.withPromptTemplate(text());
73
70
  }
74
- /**
75
- * Returns this model with an instruction prompt template.
76
- */
77
71
  withInstructionPrompt() {
78
72
  return this.withPromptTemplate(instruction());
79
73
  }
80
- /**
81
- * Returns this model with a chat prompt template.
82
- */
83
74
  withChatPrompt() {
84
75
  return this.withPromptTemplate(chat());
85
76
  }
@@ -3,6 +3,7 @@ Object.defineProperty(exports, "__esModule", { value: true });
3
3
  exports.OpenAICompatibleCompletionModel = void 0;
4
4
  const PromptTemplateTextStreamingModel_js_1 = require("../../model-function/generate-text/PromptTemplateTextStreamingModel.cjs");
5
5
  const TextGenerationModel_js_1 = require("../../model-function/generate-text/TextGenerationModel.cjs");
6
+ const TextPromptTemplate_js_1 = require("../../model-function/generate-text/prompt-template/TextPromptTemplate.cjs");
6
7
  const AbstractOpenAICompletionModel_js_1 = require("../openai/AbstractOpenAICompletionModel.cjs");
7
8
  /**
8
9
  * Create a text generation model that calls an API that is compatible with OpenAI's completion API.
@@ -56,6 +57,15 @@ class OpenAICompatibleCompletionModel extends AbstractOpenAICompletionModel_js_1
56
57
  ];
57
58
  return Object.fromEntries(Object.entries(this.settings).filter(([key]) => eventSettingProperties.includes(key)));
58
59
  }
60
+ withTextPrompt() {
61
+ return this.withPromptTemplate((0, TextPromptTemplate_js_1.text)());
62
+ }
63
+ withInstructionPrompt() {
64
+ return this.withPromptTemplate((0, TextPromptTemplate_js_1.instruction)());
65
+ }
66
+ withChatPrompt(options) {
67
+ return this.withPromptTemplate((0, TextPromptTemplate_js_1.chat)(options));
68
+ }
59
69
  withPromptTemplate(promptTemplate) {
60
70
  return new PromptTemplateTextStreamingModel_js_1.PromptTemplateTextStreamingModel({
61
71
  model: this.withSettings({
@@ -1,9 +1,11 @@
1
+ import { ApiConfiguration } from "../../core/api/ApiConfiguration.js";
1
2
  import { PromptTemplateTextStreamingModel } from "../../model-function/generate-text/PromptTemplateTextStreamingModel.js";
2
- import { TextStreamingModel } from "../../model-function/generate-text/TextGenerationModel.js";
3
+ import { TextStreamingBaseModel } from "../../model-function/generate-text/TextGenerationModel.js";
3
4
  import { TextGenerationPromptTemplate } from "../../model-function/generate-text/TextGenerationPromptTemplate.js";
4
5
  import { AbstractOpenAICompletionModel, AbstractOpenAICompletionModelSettings } from "../openai/AbstractOpenAICompletionModel.js";
5
6
  import { OpenAICompatibleProviderName } from "./OpenAICompatibleProviderName.js";
6
7
  export interface OpenAICompatibleCompletionModelSettings extends AbstractOpenAICompletionModelSettings {
8
+ api: ApiConfiguration;
7
9
  provider?: OpenAICompatibleProviderName;
8
10
  }
9
11
  /**
@@ -14,7 +16,7 @@ export interface OpenAICompatibleCompletionModelSettings extends AbstractOpenAIC
14
16
  *
15
17
  * @see https://platform.openai.com/docs/api-reference/completions/create
16
18
  */
17
- export declare class OpenAICompatibleCompletionModel extends AbstractOpenAICompletionModel<OpenAICompatibleCompletionModelSettings> implements TextStreamingModel<string, OpenAICompatibleCompletionModelSettings> {
19
+ export declare class OpenAICompatibleCompletionModel extends AbstractOpenAICompletionModel<OpenAICompatibleCompletionModelSettings> implements TextStreamingBaseModel<string, OpenAICompatibleCompletionModelSettings> {
18
20
  constructor(settings: OpenAICompatibleCompletionModelSettings);
19
21
  get provider(): OpenAICompatibleProviderName;
20
22
  get modelName(): string;
@@ -22,6 +24,12 @@ export declare class OpenAICompatibleCompletionModel extends AbstractOpenAICompl
22
24
  readonly tokenizer: undefined;
23
25
  readonly countPromptTokens: undefined;
24
26
  get settingsForEvent(): Partial<OpenAICompatibleCompletionModelSettings>;
27
+ withTextPrompt(): PromptTemplateTextStreamingModel<string, string, OpenAICompatibleCompletionModelSettings, this>;
28
+ withInstructionPrompt(): PromptTemplateTextStreamingModel<import("../../index.js").InstructionPrompt, string, OpenAICompatibleCompletionModelSettings, this>;
29
+ withChatPrompt(options?: {
30
+ user?: string;
31
+ assistant?: string;
32
+ }): PromptTemplateTextStreamingModel<import("../../index.js").ChatPrompt, string, OpenAICompatibleCompletionModelSettings, this>;
25
33
  withPromptTemplate<INPUT_PROMPT>(promptTemplate: TextGenerationPromptTemplate<INPUT_PROMPT, string>): PromptTemplateTextStreamingModel<INPUT_PROMPT, string, OpenAICompatibleCompletionModelSettings, this>;
26
34
  withSettings(additionalSettings: Partial<OpenAICompatibleCompletionModelSettings>): this;
27
35
  }
@@ -1,5 +1,6 @@
1
1
  import { PromptTemplateTextStreamingModel } from "../../model-function/generate-text/PromptTemplateTextStreamingModel.js";
2
2
  import { textGenerationModelProperties, } from "../../model-function/generate-text/TextGenerationModel.js";
3
+ import { chat, instruction, text, } from "../../model-function/generate-text/prompt-template/TextPromptTemplate.js";
3
4
  import { AbstractOpenAICompletionModel, } from "../openai/AbstractOpenAICompletionModel.js";
4
5
  /**
5
6
  * Create a text generation model that calls an API that is compatible with OpenAI's completion API.
@@ -53,6 +54,15 @@ export class OpenAICompatibleCompletionModel extends AbstractOpenAICompletionMod
53
54
  ];
54
55
  return Object.fromEntries(Object.entries(this.settings).filter(([key]) => eventSettingProperties.includes(key)));
55
56
  }
57
+ withTextPrompt() {
58
+ return this.withPromptTemplate(text());
59
+ }
60
+ withInstructionPrompt() {
61
+ return this.withPromptTemplate(instruction());
62
+ }
63
+ withChatPrompt(options) {
64
+ return this.withPromptTemplate(chat(options));
65
+ }
56
66
  withPromptTemplate(promptTemplate) {
57
67
  return new PromptTemplateTextStreamingModel({
58
68
  model: this.withSettings({
@@ -1,9 +1,11 @@
1
1
  "use strict";
2
2
  Object.defineProperty(exports, "__esModule", { value: true });
3
- exports.ChatTextGenerator = exports.CompletionTextGenerator = exports.TogetherAIApi = exports.FireworksAIApi = void 0;
3
+ exports.TextEmbedder = exports.ChatTextGenerator = exports.CompletionTextGenerator = exports.TogetherAIApi = exports.PerplexityApi = exports.FireworksAIApi = void 0;
4
4
  const FireworksAIApiConfiguration_js_1 = require("./FireworksAIApiConfiguration.cjs");
5
5
  const OpenAICompatibleChatModel_js_1 = require("./OpenAICompatibleChatModel.cjs");
6
6
  const OpenAICompatibleCompletionModel_js_1 = require("./OpenAICompatibleCompletionModel.cjs");
7
+ const OpenAICompatibleTextEmbeddingModel_js_1 = require("./OpenAICompatibleTextEmbeddingModel.cjs");
8
+ const PerplexityApiConfiguration_js_1 = require("./PerplexityApiConfiguration.cjs");
7
9
  const TogetherAIApiConfiguration_js_1 = require("./TogetherAIApiConfiguration.cjs");
8
10
  /**
9
11
  * Configuration for the Fireworks.ai API.
@@ -16,6 +18,17 @@ function FireworksAIApi(settings = {}) {
16
18
  return new FireworksAIApiConfiguration_js_1.FireworksAIApiConfiguration(settings);
17
19
  }
18
20
  exports.FireworksAIApi = FireworksAIApi;
21
+ /**
22
+ * Configuration for the Perplexity API.
23
+ *
24
+ * It calls the API at https://api.perplexity.ai/ and uses the `PERPLEXITY_API_KEY` api key environment variable.
25
+ *
26
+ * @see https://docs.perplexity.ai/reference/post_chat_completions
27
+ */
28
+ function PerplexityApi(settings = {}) {
29
+ return new PerplexityApiConfiguration_js_1.PerplexityApiConfiguration(settings);
30
+ }
31
+ exports.PerplexityApi = PerplexityApi;
19
32
  /**
20
33
  * Configuration for the Together.ai API.
21
34
  *
@@ -43,10 +56,10 @@ exports.TogetherAIApi = TogetherAIApi;
43
56
  * maxGenerationTokens: 500,
44
57
  * });
45
58
  *
46
- * const text = await generateText(
59
+ * const text = await generateText({
47
60
  * model,
48
- * "Write a short story about a robot learning to love:"
49
- * );
61
+ * prompt: "Write a short story about a robot learning to love:"
62
+ * });
50
63
  * ```
51
64
  */
52
65
  function CompletionTextGenerator(settings) {
@@ -69,17 +82,37 @@ exports.CompletionTextGenerator = CompletionTextGenerator;
69
82
  * maxGenerationTokens: 500,
70
83
  * });
71
84
  *
72
- * const text = await generateText(
85
+ * const text = await generateText({
73
86
  * model,
74
- * [
87
+ * prompt: [
75
88
  * openai.ChatMessage.user(
76
89
  * "Write a short story about a robot learning to love:"
77
90
  * ),
78
91
  * ]
79
- * );
92
+ * });
80
93
  * ```
81
94
  */
82
95
  function ChatTextGenerator(settings) {
83
96
  return new OpenAICompatibleChatModel_js_1.OpenAICompatibleChatModel(settings);
84
97
  }
85
98
  exports.ChatTextGenerator = ChatTextGenerator;
99
+ /**
100
+ * Create a text embedding model that calls the OpenAI embedding API.
101
+ *
102
+ * @see https://platform.openai.com/docs/api-reference/embeddings
103
+ *
104
+ * @example
105
+ * const embeddings = await embedMany({
106
+ * model: openaicompatible.TextEmbedder({ model: "provider-specific-model-name" }),
107
+ * values: [
108
+ * "At first, Nox didn't know what to do with the pup.",
109
+ * "He keenly observed and absorbed everything around him, from the birds in the sky to the trees in the forest.",
110
+ * ]
111
+ * });
112
+ *
113
+ * @returns A new instance of {@link OpenAITextEmbeddingModel}.
114
+ */
115
+ function TextEmbedder(settings) {
116
+ return new OpenAICompatibleTextEmbeddingModel_js_1.OpenAICompatibleTextEmbeddingModel(settings);
117
+ }
118
+ exports.TextEmbedder = TextEmbedder;
@@ -2,6 +2,8 @@ import { PartialBaseUrlPartsApiConfigurationOptions } from "../../core/api/BaseU
2
2
  import { FireworksAIApiConfiguration } from "./FireworksAIApiConfiguration.js";
3
3
  import { OpenAICompatibleChatModel, OpenAICompatibleChatSettings } from "./OpenAICompatibleChatModel.js";
4
4
  import { OpenAICompatibleCompletionModel } from "./OpenAICompatibleCompletionModel.js";
5
+ import { OpenAICompatibleTextEmbeddingModel, OpenAICompatibleTextEmbeddingModelSettings } from "./OpenAICompatibleTextEmbeddingModel.js";
6
+ import { PerplexityApiConfiguration } from "./PerplexityApiConfiguration.js";
5
7
  import { TogetherAIApiConfiguration } from "./TogetherAIApiConfiguration.js";
6
8
  /**
7
9
  * Configuration for the Fireworks.ai API.
@@ -13,6 +15,16 @@ import { TogetherAIApiConfiguration } from "./TogetherAIApiConfiguration.js";
13
15
  export declare function FireworksAIApi(settings?: PartialBaseUrlPartsApiConfigurationOptions & {
14
16
  apiKey?: string;
15
17
  }): FireworksAIApiConfiguration;
18
+ /**
19
+ * Configuration for the Perplexity API.
20
+ *
21
+ * It calls the API at https://api.perplexity.ai/ and uses the `PERPLEXITY_API_KEY` api key environment variable.
22
+ *
23
+ * @see https://docs.perplexity.ai/reference/post_chat_completions
24
+ */
25
+ export declare function PerplexityApi(settings?: PartialBaseUrlPartsApiConfigurationOptions & {
26
+ apiKey?: string;
27
+ }): PerplexityApiConfiguration;
16
28
  /**
17
29
  * Configuration for the Together.ai API.
18
30
  *
@@ -39,10 +51,10 @@ export declare function TogetherAIApi(settings?: PartialBaseUrlPartsApiConfigura
39
51
  * maxGenerationTokens: 500,
40
52
  * });
41
53
  *
42
- * const text = await generateText(
54
+ * const text = await generateText({
43
55
  * model,
44
- * "Write a short story about a robot learning to love:"
45
- * );
56
+ * prompt: "Write a short story about a robot learning to love:"
57
+ * });
46
58
  * ```
47
59
  */
48
60
  export declare function CompletionTextGenerator(settings: OpenAICompatibleChatSettings): OpenAICompatibleCompletionModel;
@@ -62,14 +74,31 @@ export declare function CompletionTextGenerator(settings: OpenAICompatibleChatSe
62
74
  * maxGenerationTokens: 500,
63
75
  * });
64
76
  *
65
- * const text = await generateText(
77
+ * const text = await generateText({
66
78
  * model,
67
- * [
79
+ * prompt: [
68
80
  * openai.ChatMessage.user(
69
81
  * "Write a short story about a robot learning to love:"
70
82
  * ),
71
83
  * ]
72
- * );
84
+ * });
73
85
  * ```
74
86
  */
75
87
  export declare function ChatTextGenerator(settings: OpenAICompatibleChatSettings): OpenAICompatibleChatModel;
88
+ /**
89
+ * Create a text embedding model that calls the OpenAI embedding API.
90
+ *
91
+ * @see https://platform.openai.com/docs/api-reference/embeddings
92
+ *
93
+ * @example
94
+ * const embeddings = await embedMany({
95
+ * model: openaicompatible.TextEmbedder({ model: "provider-specific-model-name" }),
96
+ * values: [
97
+ * "At first, Nox didn't know what to do with the pup.",
98
+ * "He keenly observed and absorbed everything around him, from the birds in the sky to the trees in the forest.",
99
+ * ]
100
+ * });
101
+ *
102
+ * @returns A new instance of {@link OpenAITextEmbeddingModel}.
103
+ */
104
+ export declare function TextEmbedder(settings: OpenAICompatibleTextEmbeddingModelSettings): OpenAICompatibleTextEmbeddingModel;
@@ -1,6 +1,8 @@
1
1
  import { FireworksAIApiConfiguration } from "./FireworksAIApiConfiguration.js";
2
2
  import { OpenAICompatibleChatModel, } from "./OpenAICompatibleChatModel.js";
3
3
  import { OpenAICompatibleCompletionModel } from "./OpenAICompatibleCompletionModel.js";
4
+ import { OpenAICompatibleTextEmbeddingModel, } from "./OpenAICompatibleTextEmbeddingModel.js";
5
+ import { PerplexityApiConfiguration } from "./PerplexityApiConfiguration.js";
4
6
  import { TogetherAIApiConfiguration } from "./TogetherAIApiConfiguration.js";
5
7
  /**
6
8
  * Configuration for the Fireworks.ai API.
@@ -12,6 +14,16 @@ import { TogetherAIApiConfiguration } from "./TogetherAIApiConfiguration.js";
12
14
  export function FireworksAIApi(settings = {}) {
13
15
  return new FireworksAIApiConfiguration(settings);
14
16
  }
17
+ /**
18
+ * Configuration for the Perplexity API.
19
+ *
20
+ * It calls the API at https://api.perplexity.ai/ and uses the `PERPLEXITY_API_KEY` api key environment variable.
21
+ *
22
+ * @see https://docs.perplexity.ai/reference/post_chat_completions
23
+ */
24
+ export function PerplexityApi(settings = {}) {
25
+ return new PerplexityApiConfiguration(settings);
26
+ }
15
27
  /**
16
28
  * Configuration for the Together.ai API.
17
29
  *
@@ -38,10 +50,10 @@ export function TogetherAIApi(settings = {}) {
38
50
  * maxGenerationTokens: 500,
39
51
  * });
40
52
  *
41
- * const text = await generateText(
53
+ * const text = await generateText({
42
54
  * model,
43
- * "Write a short story about a robot learning to love:"
44
- * );
55
+ * prompt: "Write a short story about a robot learning to love:"
56
+ * });
45
57
  * ```
46
58
  */
47
59
  export function CompletionTextGenerator(settings) {
@@ -63,16 +75,35 @@ export function CompletionTextGenerator(settings) {
63
75
  * maxGenerationTokens: 500,
64
76
  * });
65
77
  *
66
- * const text = await generateText(
78
+ * const text = await generateText({
67
79
  * model,
68
- * [
80
+ * prompt: [
69
81
  * openai.ChatMessage.user(
70
82
  * "Write a short story about a robot learning to love:"
71
83
  * ),
72
84
  * ]
73
- * );
85
+ * });
74
86
  * ```
75
87
  */
76
88
  export function ChatTextGenerator(settings) {
77
89
  return new OpenAICompatibleChatModel(settings);
78
90
  }
91
+ /**
92
+ * Create a text embedding model that calls the OpenAI embedding API.
93
+ *
94
+ * @see https://platform.openai.com/docs/api-reference/embeddings
95
+ *
96
+ * @example
97
+ * const embeddings = await embedMany({
98
+ * model: openaicompatible.TextEmbedder({ model: "provider-specific-model-name" }),
99
+ * values: [
100
+ * "At first, Nox didn't know what to do with the pup.",
101
+ * "He keenly observed and absorbed everything around him, from the birds in the sky to the trees in the forest.",
102
+ * ]
103
+ * });
104
+ *
105
+ * @returns A new instance of {@link OpenAITextEmbeddingModel}.
106
+ */
107
+ export function TextEmbedder(settings) {
108
+ return new OpenAICompatibleTextEmbeddingModel(settings);
109
+ }
@@ -0,0 +1,27 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ exports.OpenAICompatibleTextEmbeddingModel = void 0;
4
+ const AbstractOpenAITextEmbeddingModel_js_1 = require("../openai/AbstractOpenAITextEmbeddingModel.cjs");
5
+ class OpenAICompatibleTextEmbeddingModel extends AbstractOpenAITextEmbeddingModel_js_1.AbstractOpenAITextEmbeddingModel {
6
+ constructor(settings) {
7
+ super(settings);
8
+ }
9
+ get provider() {
10
+ return this.settings.provider ?? "openaicompatible";
11
+ }
12
+ get modelName() {
13
+ return this.settings.model;
14
+ }
15
+ get embeddingDimensions() {
16
+ return this.settings.embeddingDimensions;
17
+ }
18
+ get settingsForEvent() {
19
+ return {
20
+ embeddingDimensions: this.settings.embeddingDimensions,
21
+ };
22
+ }
23
+ withSettings(additionalSettings) {
24
+ return new OpenAICompatibleTextEmbeddingModel(Object.assign({}, this.settings, additionalSettings));
25
+ }
26
+ }
27
+ exports.OpenAICompatibleTextEmbeddingModel = OpenAICompatibleTextEmbeddingModel;
@@ -0,0 +1,18 @@
1
+ import { ApiConfiguration } from "../../core/api/ApiConfiguration.js";
2
+ import { EmbeddingModel } from "../../model-function/embed/EmbeddingModel.js";
3
+ import { AbstractOpenAITextEmbeddingModel, AbstractOpenAITextEmbeddingModelSettings } from "../openai/AbstractOpenAITextEmbeddingModel.js";
4
+ import { OpenAICompatibleProviderName } from "./OpenAICompatibleProviderName.js";
5
+ export interface OpenAICompatibleTextEmbeddingModelSettings extends AbstractOpenAITextEmbeddingModelSettings {
6
+ api: ApiConfiguration;
7
+ provider?: OpenAICompatibleProviderName;
8
+ model: string;
9
+ embeddingDimensions?: number;
10
+ }
11
+ export declare class OpenAICompatibleTextEmbeddingModel extends AbstractOpenAITextEmbeddingModel<OpenAICompatibleTextEmbeddingModelSettings> implements EmbeddingModel<string, OpenAICompatibleTextEmbeddingModelSettings> {
12
+ constructor(settings: OpenAICompatibleTextEmbeddingModelSettings);
13
+ get provider(): OpenAICompatibleProviderName;
14
+ get modelName(): string;
15
+ get embeddingDimensions(): number | undefined;
16
+ get settingsForEvent(): Partial<OpenAICompatibleTextEmbeddingModelSettings>;
17
+ withSettings(additionalSettings: OpenAICompatibleTextEmbeddingModelSettings): this;
18
+ }
@@ -0,0 +1,23 @@
1
+ import { AbstractOpenAITextEmbeddingModel, } from "../openai/AbstractOpenAITextEmbeddingModel.js";
2
+ export class OpenAICompatibleTextEmbeddingModel extends AbstractOpenAITextEmbeddingModel {
3
+ constructor(settings) {
4
+ super(settings);
5
+ }
6
+ get provider() {
7
+ return this.settings.provider ?? "openaicompatible";
8
+ }
9
+ get modelName() {
10
+ return this.settings.model;
11
+ }
12
+ get embeddingDimensions() {
13
+ return this.settings.embeddingDimensions;
14
+ }
15
+ get settingsForEvent() {
16
+ return {
17
+ embeddingDimensions: this.settings.embeddingDimensions,
18
+ };
19
+ }
20
+ withSettings(additionalSettings) {
21
+ return new OpenAICompatibleTextEmbeddingModel(Object.assign({}, this.settings, additionalSettings));
22
+ }
23
+ }
@@ -0,0 +1,33 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ exports.PerplexityApiConfiguration = void 0;
4
+ const BaseUrlApiConfiguration_js_1 = require("../../core/api/BaseUrlApiConfiguration.cjs");
5
+ const loadApiKey_js_1 = require("../../core/api/loadApiKey.cjs");
6
+ /**
7
+ * Configuration for the Perplexity API.
8
+ *
9
+ * It calls the API at https://api.perplexity.ai/ and uses the `PERPLEXITY_API_KEY` api key environment variable.
10
+ *
11
+ * @see https://docs.perplexity.ai/reference/post_chat_completions
12
+ */
13
+ class PerplexityApiConfiguration extends BaseUrlApiConfiguration_js_1.BaseUrlApiConfigurationWithDefaults {
14
+ constructor(settings = {}) {
15
+ super({
16
+ ...settings,
17
+ headers: {
18
+ Authorization: `Bearer ${(0, loadApiKey_js_1.loadApiKey)({
19
+ apiKey: settings.apiKey,
20
+ environmentVariableName: "PERPLEXITY_API_KEY",
21
+ description: "Perplexity",
22
+ })}`,
23
+ },
24
+ baseUrlDefaults: {
25
+ protocol: "https",
26
+ host: "api.perplexity.ai",
27
+ port: "443",
28
+ path: "",
29
+ },
30
+ });
31
+ }
32
+ }
33
+ exports.PerplexityApiConfiguration = PerplexityApiConfiguration;
@@ -0,0 +1,13 @@
1
+ import { BaseUrlApiConfigurationWithDefaults, PartialBaseUrlPartsApiConfigurationOptions } from "../../core/api/BaseUrlApiConfiguration.js";
2
+ /**
3
+ * Configuration for the Perplexity API.
4
+ *
5
+ * It calls the API at https://api.perplexity.ai/ and uses the `PERPLEXITY_API_KEY` api key environment variable.
6
+ *
7
+ * @see https://docs.perplexity.ai/reference/post_chat_completions
8
+ */
9
+ export declare class PerplexityApiConfiguration extends BaseUrlApiConfigurationWithDefaults {
10
+ constructor(settings?: PartialBaseUrlPartsApiConfigurationOptions & {
11
+ apiKey?: string;
12
+ });
13
+ }
@@ -0,0 +1,29 @@
1
+ import { BaseUrlApiConfigurationWithDefaults, } from "../../core/api/BaseUrlApiConfiguration.js";
2
+ import { loadApiKey } from "../../core/api/loadApiKey.js";
3
+ /**
4
+ * Configuration for the Perplexity API.
5
+ *
6
+ * It calls the API at https://api.perplexity.ai/ and uses the `PERPLEXITY_API_KEY` api key environment variable.
7
+ *
8
+ * @see https://docs.perplexity.ai/reference/post_chat_completions
9
+ */
10
+ export class PerplexityApiConfiguration extends BaseUrlApiConfigurationWithDefaults {
11
+ constructor(settings = {}) {
12
+ super({
13
+ ...settings,
14
+ headers: {
15
+ Authorization: `Bearer ${loadApiKey({
16
+ apiKey: settings.apiKey,
17
+ environmentVariableName: "PERPLEXITY_API_KEY",
18
+ description: "Perplexity",
19
+ })}`,
20
+ },
21
+ baseUrlDefaults: {
22
+ protocol: "https",
23
+ host: "api.perplexity.ai",
24
+ port: "443",
25
+ path: "",
26
+ },
27
+ });
28
+ }
29
+ }
@@ -32,4 +32,6 @@ __exportStar(require("./OpenAICompatibleChatModel.cjs"), exports);
32
32
  __exportStar(require("./OpenAICompatibleCompletionModel.cjs"), exports);
33
33
  exports.openaicompatible = __importStar(require("./OpenAICompatibleFacade.cjs"));
34
34
  __exportStar(require("./OpenAICompatibleProviderName.cjs"), exports);
35
+ __exportStar(require("./OpenAICompatibleTextEmbeddingModel.cjs"), exports);
36
+ __exportStar(require("./PerplexityApiConfiguration.cjs"), exports);
35
37
  __exportStar(require("./TogetherAIApiConfiguration.cjs"), exports);
@@ -3,4 +3,6 @@ export * from "./OpenAICompatibleChatModel.js";
3
3
  export * from "./OpenAICompatibleCompletionModel.js";
4
4
  export * as openaicompatible from "./OpenAICompatibleFacade.js";
5
5
  export * from "./OpenAICompatibleProviderName.js";
6
+ export * from "./OpenAICompatibleTextEmbeddingModel.js";
7
+ export * from "./PerplexityApiConfiguration.js";
6
8
  export * from "./TogetherAIApiConfiguration.js";
@@ -3,4 +3,6 @@ export * from "./OpenAICompatibleChatModel.js";
3
3
  export * from "./OpenAICompatibleCompletionModel.js";
4
4
  export * as openaicompatible from "./OpenAICompatibleFacade.js";
5
5
  export * from "./OpenAICompatibleProviderName.js";
6
+ export * from "./OpenAICompatibleTextEmbeddingModel.js";
7
+ export * from "./PerplexityApiConfiguration.js";
6
8
  export * from "./TogetherAIApiConfiguration.js";
package/package.json CHANGED
@@ -1,7 +1,7 @@
1
1
  {
2
2
  "name": "modelfusion",
3
3
  "description": "The TypeScript library for building AI applications.",
4
- "version": "0.123.0",
4
+ "version": "0.125.0",
5
5
  "author": "Lars Grammel",
6
6
  "license": "MIT",
7
7
  "keywords": [