modelfusion 0.55.0 → 0.56.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (26) hide show
  1. package/README.md +1 -1
  2. package/model-function/embed/EmbeddingModel.d.ts +4 -0
  3. package/model-function/embed/embed.cjs +12 -1
  4. package/model-function/embed/embed.js +12 -1
  5. package/model-provider/cohere/CohereTextEmbeddingModel.cjs +6 -0
  6. package/model-provider/cohere/CohereTextEmbeddingModel.d.ts +1 -0
  7. package/model-provider/cohere/CohereTextEmbeddingModel.js +6 -0
  8. package/model-provider/huggingface/HuggingFaceTextEmbeddingModel.cjs +6 -0
  9. package/model-provider/huggingface/HuggingFaceTextEmbeddingModel.d.ts +1 -0
  10. package/model-provider/huggingface/HuggingFaceTextEmbeddingModel.js +6 -0
  11. package/model-provider/llamacpp/LlamaCppTextEmbeddingModel.cjs +6 -7
  12. package/model-provider/llamacpp/LlamaCppTextEmbeddingModel.d.ts +3 -1
  13. package/model-provider/llamacpp/LlamaCppTextEmbeddingModel.js +6 -7
  14. package/model-provider/ollama/OllamaError.cjs +3 -9
  15. package/model-provider/ollama/OllamaError.d.ts +3 -13
  16. package/model-provider/ollama/OllamaError.js +2 -8
  17. package/model-provider/ollama/OllamaTextEmbeddingModel.cjs +84 -0
  18. package/model-provider/ollama/OllamaTextEmbeddingModel.d.ts +38 -0
  19. package/model-provider/ollama/OllamaTextEmbeddingModel.js +80 -0
  20. package/model-provider/ollama/index.cjs +1 -0
  21. package/model-provider/ollama/index.d.ts +2 -1
  22. package/model-provider/ollama/index.js +1 -0
  23. package/model-provider/openai/OpenAITextEmbeddingModel.cjs +6 -0
  24. package/model-provider/openai/OpenAITextEmbeddingModel.d.ts +1 -0
  25. package/model-provider/openai/OpenAITextEmbeddingModel.js +6 -0
  26. package/package.json +1 -1
package/README.md CHANGED
@@ -290,7 +290,7 @@ const embeddings = await embedMany(
290
290
  );
291
291
  ```
292
292
 
293
- Providers: [OpenAI](https://modelfusion.dev/integration/model-provider/openai), [Cohere](https://modelfusion.dev/integration/model-provider/cohere), [Llama.cpp](https://modelfusion.dev/integration/model-provider/llamacpp), [Hugging Face](https://modelfusion.dev/integration/model-provider/huggingface)
293
+ Providers: [OpenAI](https://modelfusion.dev/integration/model-provider/openai), [Cohere](https://modelfusion.dev/integration/model-provider/cohere), [Llama.cpp](https://modelfusion.dev/integration/model-provider/llamacpp), [Ollama](https://modelfusion.dev/integration/model-provider/ollama), [Hugging Face](https://modelfusion.dev/integration/model-provider/huggingface)
294
294
 
295
295
  ### [Tokenize Text](https://modelfusion.dev/guide/function/tokenize-text)
296
296
 
@@ -12,6 +12,10 @@ export interface EmbeddingModel<VALUE, SETTINGS extends EmbeddingModelSettings =
12
12
  * Limit of how many values can be sent in a single API call.
13
13
  */
14
14
  readonly maxValuesPerCall: number | undefined;
15
+ /**
16
+ * True if the model can handle multiple embedding calls in parallel.
17
+ */
18
+ readonly isParallizable: boolean;
15
19
  doEmbedValues(values: VALUE[], options?: FunctionOptions): PromiseLike<{
16
20
  response: unknown;
17
21
  embeddings: Vector[];
@@ -41,7 +41,18 @@ function embedMany(model, values, options) {
41
41
  valueGroups.push(values.slice(i, i + maxValuesPerCall));
42
42
  }
43
43
  }
44
- const responses = await Promise.all(valueGroups.map((valueGroup) => model.doEmbedValues(valueGroup, options)));
44
+ // call the model for each group:
45
+ let responses;
46
+ if (model.isParallizable) {
47
+ responses = await Promise.all(valueGroups.map((valueGroup) => model.doEmbedValues(valueGroup, options)));
48
+ }
49
+ else {
50
+ responses = [];
51
+ for (const valueGroup of valueGroups) {
52
+ const response = await model.doEmbedValues(valueGroup, options);
53
+ responses.push(response);
54
+ }
55
+ }
45
56
  const rawResponses = responses.map((response) => response.response);
46
57
  const embeddings = [];
47
58
  for (const response of responses) {
@@ -38,7 +38,18 @@ export function embedMany(model, values, options) {
38
38
  valueGroups.push(values.slice(i, i + maxValuesPerCall));
39
39
  }
40
40
  }
41
- const responses = await Promise.all(valueGroups.map((valueGroup) => model.doEmbedValues(valueGroup, options)));
41
+ // call the model for each group:
42
+ let responses;
43
+ if (model.isParallizable) {
44
+ responses = await Promise.all(valueGroups.map((valueGroup) => model.doEmbedValues(valueGroup, options)));
45
+ }
46
+ else {
47
+ responses = [];
48
+ for (const valueGroup of valueGroups) {
49
+ const response = await model.doEmbedValues(valueGroup, options);
50
+ responses.push(response);
51
+ }
52
+ }
42
53
  const rawResponses = responses.map((response) => response.response);
43
54
  const embeddings = [];
44
55
  for (const response of responses) {
@@ -51,6 +51,12 @@ class CohereTextEmbeddingModel extends AbstractModel_js_1.AbstractModel {
51
51
  writable: true,
52
52
  value: 96
53
53
  });
54
+ Object.defineProperty(this, "isParallizable", {
55
+ enumerable: true,
56
+ configurable: true,
57
+ writable: true,
58
+ value: true
59
+ });
54
60
  Object.defineProperty(this, "embeddingDimensions", {
55
61
  enumerable: true,
56
62
  configurable: true,
@@ -43,6 +43,7 @@ export declare class CohereTextEmbeddingModel extends AbstractModel<CohereTextEm
43
43
  readonly provider: "cohere";
44
44
  get modelName(): "embed-english-light-v2.0" | "embed-english-v2.0" | "embed-multilingual-v2.0";
45
45
  readonly maxValuesPerCall = 96;
46
+ readonly isParallizable = true;
46
47
  readonly embeddingDimensions: number;
47
48
  readonly contextWindowSize: number;
48
49
  private readonly tokenizer;
@@ -48,6 +48,12 @@ export class CohereTextEmbeddingModel extends AbstractModel {
48
48
  writable: true,
49
49
  value: 96
50
50
  });
51
+ Object.defineProperty(this, "isParallizable", {
52
+ enumerable: true,
53
+ configurable: true,
54
+ writable: true,
55
+ value: true
56
+ });
51
57
  Object.defineProperty(this, "embeddingDimensions", {
52
58
  enumerable: true,
53
59
  configurable: true,
@@ -42,6 +42,12 @@ class HuggingFaceTextEmbeddingModel extends AbstractModel_js_1.AbstractModel {
42
42
  writable: true,
43
43
  value: void 0
44
44
  });
45
+ Object.defineProperty(this, "isParallizable", {
46
+ enumerable: true,
47
+ configurable: true,
48
+ writable: true,
49
+ value: true
50
+ });
45
51
  Object.defineProperty(this, "contextWindowSize", {
46
52
  enumerable: true,
47
53
  configurable: true,
@@ -38,6 +38,7 @@ export declare class HuggingFaceTextEmbeddingModel extends AbstractModel<Hugging
38
38
  readonly provider = "huggingface";
39
39
  get modelName(): string;
40
40
  readonly maxValuesPerCall: number;
41
+ readonly isParallizable = true;
41
42
  readonly contextWindowSize: undefined;
42
43
  readonly embeddingDimensions: number | undefined;
43
44
  readonly tokenizer: undefined;
@@ -39,6 +39,12 @@ export class HuggingFaceTextEmbeddingModel extends AbstractModel {
39
39
  writable: true,
40
40
  value: void 0
41
41
  });
42
+ Object.defineProperty(this, "isParallizable", {
43
+ enumerable: true,
44
+ configurable: true,
45
+ writable: true,
46
+ value: true
47
+ });
42
48
  Object.defineProperty(this, "contextWindowSize", {
43
49
  enumerable: true,
44
50
  configurable: true,
@@ -29,12 +29,6 @@ class LlamaCppTextEmbeddingModel extends AbstractModel_js_1.AbstractModel {
29
29
  writable: true,
30
30
  value: undefined
31
31
  });
32
- Object.defineProperty(this, "embeddingDimensions", {
33
- enumerable: true,
34
- configurable: true,
35
- writable: true,
36
- value: void 0
37
- });
38
32
  Object.defineProperty(this, "tokenizer", {
39
33
  enumerable: true,
40
34
  configurable: true,
@@ -42,11 +36,16 @@ class LlamaCppTextEmbeddingModel extends AbstractModel_js_1.AbstractModel {
42
36
  value: void 0
43
37
  });
44
38
  this.tokenizer = new LlamaCppTokenizer_js_1.LlamaCppTokenizer(this.settings.api);
45
- this.embeddingDimensions = this.settings.embeddingDimensions;
46
39
  }
47
40
  get modelName() {
48
41
  return null;
49
42
  }
43
+ get isParallizable() {
44
+ return this.settings.isParallizable ?? false;
45
+ }
46
+ get embeddingDimensions() {
47
+ return this.settings.embeddingDimensions;
48
+ }
50
49
  async tokenize(text) {
51
50
  return this.tokenizer.tokenize(text);
52
51
  }
@@ -6,14 +6,16 @@ import { EmbeddingModel, EmbeddingModelSettings } from "../../model-function/emb
6
6
  export interface LlamaCppTextEmbeddingModelSettings extends EmbeddingModelSettings {
7
7
  api?: ApiConfiguration;
8
8
  embeddingDimensions?: number;
9
+ isParallizable?: boolean;
9
10
  }
10
11
  export declare class LlamaCppTextEmbeddingModel extends AbstractModel<LlamaCppTextEmbeddingModelSettings> implements EmbeddingModel<string, LlamaCppTextEmbeddingModelSettings> {
11
12
  constructor(settings?: LlamaCppTextEmbeddingModelSettings);
12
13
  readonly provider: "llamacpp";
13
14
  get modelName(): null;
14
15
  readonly maxValuesPerCall = 1;
16
+ get isParallizable(): boolean;
15
17
  readonly contextWindowSize: undefined;
16
- readonly embeddingDimensions: number | undefined;
18
+ get embeddingDimensions(): number | undefined;
17
19
  private readonly tokenizer;
18
20
  tokenize(text: string): Promise<number[]>;
19
21
  callAPI(texts: Array<string>, options?: FunctionOptions): Promise<LlamaCppTextEmbeddingResponse>;
@@ -26,12 +26,6 @@ export class LlamaCppTextEmbeddingModel extends AbstractModel {
26
26
  writable: true,
27
27
  value: undefined
28
28
  });
29
- Object.defineProperty(this, "embeddingDimensions", {
30
- enumerable: true,
31
- configurable: true,
32
- writable: true,
33
- value: void 0
34
- });
35
29
  Object.defineProperty(this, "tokenizer", {
36
30
  enumerable: true,
37
31
  configurable: true,
@@ -39,11 +33,16 @@ export class LlamaCppTextEmbeddingModel extends AbstractModel {
39
33
  value: void 0
40
34
  });
41
35
  this.tokenizer = new LlamaCppTokenizer(this.settings.api);
42
- this.embeddingDimensions = this.settings.embeddingDimensions;
43
36
  }
44
37
  get modelName() {
45
38
  return null;
46
39
  }
40
+ get isParallizable() {
41
+ return this.settings.isParallizable ?? false;
42
+ }
43
+ get embeddingDimensions() {
44
+ return this.settings.embeddingDimensions;
45
+ }
47
46
  async tokenize(text) {
48
47
  return this.tokenizer.tokenize(text);
49
48
  }
@@ -1,14 +1,9 @@
1
1
  "use strict";
2
2
  Object.defineProperty(exports, "__esModule", { value: true });
3
- exports.failedOllamaCallResponseHandler = exports.OllamaError = exports.ollamaErrorDataSchema = void 0;
4
- const zod_1 = require("zod");
3
+ exports.failedOllamaCallResponseHandler = exports.OllamaError = void 0;
5
4
  const ApiCallError_js_1 = require("../../core/api/ApiCallError.cjs");
6
- const parseJSON_js_1 = require("../../util/parseJSON.cjs");
7
- exports.ollamaErrorDataSchema = zod_1.z.object({
8
- error: zod_1.z.string(),
9
- });
10
5
  class OllamaError extends ApiCallError_js_1.ApiCallError {
11
- constructor({ data, statusCode, url, requestBodyValues, message = data.error, }) {
6
+ constructor({ statusCode, url, requestBodyValues, message, }) {
12
7
  super({ message, statusCode, requestBodyValues, url });
13
8
  Object.defineProperty(this, "data", {
14
9
  enumerable: true,
@@ -16,7 +11,6 @@ class OllamaError extends ApiCallError_js_1.ApiCallError {
16
11
  writable: true,
17
12
  value: void 0
18
13
  });
19
- this.data = data;
20
14
  }
21
15
  }
22
16
  exports.OllamaError = OllamaError;
@@ -24,6 +18,6 @@ const failedOllamaCallResponseHandler = async ({ response, url, requestBodyValue
24
18
  url,
25
19
  requestBodyValues,
26
20
  statusCode: response.status,
27
- data: (0, parseJSON_js_1.parseJsonWithZod)(await response.text(), exports.ollamaErrorDataSchema),
21
+ message: await response.text(),
28
22
  });
29
23
  exports.failedOllamaCallResponseHandler = failedOllamaCallResponseHandler;
@@ -1,22 +1,12 @@
1
- import { z } from "zod";
2
1
  import { ApiCallError } from "../../core/api/ApiCallError.js";
3
2
  import { ResponseHandler } from "../../core/api/postToApi.js";
4
- export declare const ollamaErrorDataSchema: z.ZodObject<{
5
- error: z.ZodString;
6
- }, "strip", z.ZodTypeAny, {
7
- error: string;
8
- }, {
9
- error: string;
10
- }>;
11
- export type OllamaErrorData = z.infer<typeof ollamaErrorDataSchema>;
12
3
  export declare class OllamaError extends ApiCallError {
13
- readonly data: OllamaErrorData;
14
- constructor({ data, statusCode, url, requestBodyValues, message, }: {
15
- message?: string;
4
+ readonly data: string;
5
+ constructor({ statusCode, url, requestBodyValues, message, }: {
6
+ message: string;
16
7
  statusCode: number;
17
8
  url: string;
18
9
  requestBodyValues: unknown;
19
- data: OllamaErrorData;
20
10
  });
21
11
  }
22
12
  export declare const failedOllamaCallResponseHandler: ResponseHandler<ApiCallError>;
@@ -1,11 +1,6 @@
1
- import { z } from "zod";
2
1
  import { ApiCallError } from "../../core/api/ApiCallError.js";
3
- import { parseJsonWithZod } from "../../util/parseJSON.js";
4
- export const ollamaErrorDataSchema = z.object({
5
- error: z.string(),
6
- });
7
2
  export class OllamaError extends ApiCallError {
8
- constructor({ data, statusCode, url, requestBodyValues, message = data.error, }) {
3
+ constructor({ statusCode, url, requestBodyValues, message, }) {
9
4
  super({ message, statusCode, requestBodyValues, url });
10
5
  Object.defineProperty(this, "data", {
11
6
  enumerable: true,
@@ -13,12 +8,11 @@ export class OllamaError extends ApiCallError {
13
8
  writable: true,
14
9
  value: void 0
15
10
  });
16
- this.data = data;
17
11
  }
18
12
  }
19
13
  export const failedOllamaCallResponseHandler = async ({ response, url, requestBodyValues }) => new OllamaError({
20
14
  url,
21
15
  requestBodyValues,
22
16
  statusCode: response.status,
23
- data: parseJsonWithZod(await response.text(), ollamaErrorDataSchema),
17
+ message: await response.text(),
24
18
  });
@@ -0,0 +1,84 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ exports.OllamaTextEmbeddingModel = void 0;
4
+ const zod_1 = require("zod");
5
+ const callWithRetryAndThrottle_js_1 = require("../../core/api/callWithRetryAndThrottle.cjs");
6
+ const postToApi_js_1 = require("../../core/api/postToApi.cjs");
7
+ const AbstractModel_js_1 = require("../../model-function/AbstractModel.cjs");
8
+ const OllamaApiConfiguration_js_1 = require("./OllamaApiConfiguration.cjs");
9
+ const OllamaError_js_1 = require("./OllamaError.cjs");
10
+ class OllamaTextEmbeddingModel extends AbstractModel_js_1.AbstractModel {
11
+ constructor(settings) {
12
+ super({ settings });
13
+ Object.defineProperty(this, "provider", {
14
+ enumerable: true,
15
+ configurable: true,
16
+ writable: true,
17
+ value: "ollama"
18
+ });
19
+ Object.defineProperty(this, "maxValuesPerCall", {
20
+ enumerable: true,
21
+ configurable: true,
22
+ writable: true,
23
+ value: 1
24
+ });
25
+ Object.defineProperty(this, "contextWindowSize", {
26
+ enumerable: true,
27
+ configurable: true,
28
+ writable: true,
29
+ value: undefined
30
+ });
31
+ }
32
+ get modelName() {
33
+ return null;
34
+ }
35
+ get isParallizable() {
36
+ return this.settings.isParallizable ?? false;
37
+ }
38
+ get embeddingDimensions() {
39
+ return this.settings.embeddingDimensions;
40
+ }
41
+ async callAPI(texts, options) {
42
+ if (texts.length > this.maxValuesPerCall) {
43
+ throw new Error(`The Llama.cpp embedding API only supports ${this.maxValuesPerCall} texts per API call.`);
44
+ }
45
+ return (0, callWithRetryAndThrottle_js_1.callWithRetryAndThrottle)({
46
+ retry: this.settings.api?.retry,
47
+ throttle: this.settings.api?.throttle,
48
+ call: async () => callOllamaEmbeddingAPI({
49
+ ...this.settings,
50
+ abortSignal: options?.run?.abortSignal,
51
+ prompt: texts[0],
52
+ }),
53
+ });
54
+ }
55
+ get settingsForEvent() {
56
+ return {
57
+ embeddingDimensions: this.settings.embeddingDimensions,
58
+ };
59
+ }
60
+ async doEmbedValues(texts, options) {
61
+ const response = await this.callAPI(texts, options);
62
+ return {
63
+ response,
64
+ embeddings: [response.embedding],
65
+ };
66
+ }
67
+ withSettings(additionalSettings) {
68
+ return new OllamaTextEmbeddingModel(Object.assign({}, this.settings, additionalSettings));
69
+ }
70
+ }
71
+ exports.OllamaTextEmbeddingModel = OllamaTextEmbeddingModel;
72
+ const ollamaTextEmbeddingResponseSchema = zod_1.z.object({
73
+ embedding: zod_1.z.array(zod_1.z.number()),
74
+ });
75
+ async function callOllamaEmbeddingAPI({ api = new OllamaApiConfiguration_js_1.OllamaApiConfiguration(), abortSignal, model, prompt, }) {
76
+ return (0, postToApi_js_1.postJsonToApi)({
77
+ url: api.assembleUrl(`/api/embeddings`),
78
+ headers: api.headers,
79
+ body: { model, prompt },
80
+ failedResponseHandler: OllamaError_js_1.failedOllamaCallResponseHandler,
81
+ successfulResponseHandler: (0, postToApi_js_1.createJsonResponseHandler)(ollamaTextEmbeddingResponseSchema),
82
+ abortSignal,
83
+ });
84
+ }
@@ -0,0 +1,38 @@
1
+ import { z } from "zod";
2
+ import { FunctionOptions } from "../../core/FunctionOptions.js";
3
+ import { ApiConfiguration } from "../../core/api/ApiConfiguration.js";
4
+ import { AbstractModel } from "../../model-function/AbstractModel.js";
5
+ import { EmbeddingModel, EmbeddingModelSettings } from "../../model-function/embed/EmbeddingModel.js";
6
+ export interface OllamaTextEmbeddingModelSettings extends EmbeddingModelSettings {
7
+ api?: ApiConfiguration;
8
+ model: string;
9
+ embeddingDimensions?: number;
10
+ isParallizable?: boolean;
11
+ }
12
+ export declare class OllamaTextEmbeddingModel extends AbstractModel<OllamaTextEmbeddingModelSettings> implements EmbeddingModel<string, OllamaTextEmbeddingModelSettings> {
13
+ constructor(settings: OllamaTextEmbeddingModelSettings);
14
+ readonly provider: "ollama";
15
+ get modelName(): null;
16
+ readonly maxValuesPerCall = 1;
17
+ get isParallizable(): boolean;
18
+ readonly contextWindowSize: undefined;
19
+ get embeddingDimensions(): number | undefined;
20
+ callAPI(texts: Array<string>, options?: FunctionOptions): Promise<OllamaTextEmbeddingResponse>;
21
+ get settingsForEvent(): Partial<OllamaTextEmbeddingModelSettings>;
22
+ doEmbedValues(texts: string[], options?: FunctionOptions): Promise<{
23
+ response: {
24
+ embedding: number[];
25
+ };
26
+ embeddings: number[][];
27
+ }>;
28
+ withSettings(additionalSettings: Partial<OllamaTextEmbeddingModelSettings>): this;
29
+ }
30
+ declare const ollamaTextEmbeddingResponseSchema: z.ZodObject<{
31
+ embedding: z.ZodArray<z.ZodNumber, "many">;
32
+ }, "strip", z.ZodTypeAny, {
33
+ embedding: number[];
34
+ }, {
35
+ embedding: number[];
36
+ }>;
37
+ export type OllamaTextEmbeddingResponse = z.infer<typeof ollamaTextEmbeddingResponseSchema>;
38
+ export {};
@@ -0,0 +1,80 @@
1
+ import { z } from "zod";
2
+ import { callWithRetryAndThrottle } from "../../core/api/callWithRetryAndThrottle.js";
3
+ import { createJsonResponseHandler, postJsonToApi, } from "../../core/api/postToApi.js";
4
+ import { AbstractModel } from "../../model-function/AbstractModel.js";
5
+ import { OllamaApiConfiguration } from "./OllamaApiConfiguration.js";
6
+ import { failedOllamaCallResponseHandler } from "./OllamaError.js";
7
+ export class OllamaTextEmbeddingModel extends AbstractModel {
8
+ constructor(settings) {
9
+ super({ settings });
10
+ Object.defineProperty(this, "provider", {
11
+ enumerable: true,
12
+ configurable: true,
13
+ writable: true,
14
+ value: "ollama"
15
+ });
16
+ Object.defineProperty(this, "maxValuesPerCall", {
17
+ enumerable: true,
18
+ configurable: true,
19
+ writable: true,
20
+ value: 1
21
+ });
22
+ Object.defineProperty(this, "contextWindowSize", {
23
+ enumerable: true,
24
+ configurable: true,
25
+ writable: true,
26
+ value: undefined
27
+ });
28
+ }
29
+ get modelName() {
30
+ return null;
31
+ }
32
+ get isParallizable() {
33
+ return this.settings.isParallizable ?? false;
34
+ }
35
+ get embeddingDimensions() {
36
+ return this.settings.embeddingDimensions;
37
+ }
38
+ async callAPI(texts, options) {
39
+ if (texts.length > this.maxValuesPerCall) {
40
+ throw new Error(`The Llama.cpp embedding API only supports ${this.maxValuesPerCall} texts per API call.`);
41
+ }
42
+ return callWithRetryAndThrottle({
43
+ retry: this.settings.api?.retry,
44
+ throttle: this.settings.api?.throttle,
45
+ call: async () => callOllamaEmbeddingAPI({
46
+ ...this.settings,
47
+ abortSignal: options?.run?.abortSignal,
48
+ prompt: texts[0],
49
+ }),
50
+ });
51
+ }
52
+ get settingsForEvent() {
53
+ return {
54
+ embeddingDimensions: this.settings.embeddingDimensions,
55
+ };
56
+ }
57
+ async doEmbedValues(texts, options) {
58
+ const response = await this.callAPI(texts, options);
59
+ return {
60
+ response,
61
+ embeddings: [response.embedding],
62
+ };
63
+ }
64
+ withSettings(additionalSettings) {
65
+ return new OllamaTextEmbeddingModel(Object.assign({}, this.settings, additionalSettings));
66
+ }
67
+ }
68
+ const ollamaTextEmbeddingResponseSchema = z.object({
69
+ embedding: z.array(z.number()),
70
+ });
71
+ async function callOllamaEmbeddingAPI({ api = new OllamaApiConfiguration(), abortSignal, model, prompt, }) {
72
+ return postJsonToApi({
73
+ url: api.assembleUrl(`/api/embeddings`),
74
+ headers: api.headers,
75
+ body: { model, prompt },
76
+ failedResponseHandler: failedOllamaCallResponseHandler,
77
+ successfulResponseHandler: createJsonResponseHandler(ollamaTextEmbeddingResponseSchema),
78
+ abortSignal,
79
+ });
80
+ }
@@ -18,4 +18,5 @@ exports.OllamaError = void 0;
18
18
  __exportStar(require("./OllamaApiConfiguration.cjs"), exports);
19
19
  var OllamaError_js_1 = require("./OllamaError.cjs");
20
20
  Object.defineProperty(exports, "OllamaError", { enumerable: true, get: function () { return OllamaError_js_1.OllamaError; } });
21
+ __exportStar(require("./OllamaTextEmbeddingModel.cjs"), exports);
21
22
  __exportStar(require("./OllamaTextGenerationModel.cjs"), exports);
@@ -1,3 +1,4 @@
1
1
  export * from "./OllamaApiConfiguration.js";
2
- export { OllamaError, OllamaErrorData } from "./OllamaError.js";
2
+ export { OllamaError } from "./OllamaError.js";
3
+ export * from "./OllamaTextEmbeddingModel.js";
3
4
  export * from "./OllamaTextGenerationModel.js";
@@ -1,3 +1,4 @@
1
1
  export * from "./OllamaApiConfiguration.js";
2
2
  export { OllamaError } from "./OllamaError.js";
3
+ export * from "./OllamaTextEmbeddingModel.js";
3
4
  export * from "./OllamaTextGenerationModel.js";
@@ -57,6 +57,12 @@ class OpenAITextEmbeddingModel extends AbstractModel_js_1.AbstractModel {
57
57
  writable: true,
58
58
  value: 2048
59
59
  });
60
+ Object.defineProperty(this, "isParallizable", {
61
+ enumerable: true,
62
+ configurable: true,
63
+ writable: true,
64
+ value: true
65
+ });
60
66
  Object.defineProperty(this, "embeddingDimensions", {
61
67
  enumerable: true,
62
68
  configurable: true,
@@ -41,6 +41,7 @@ export declare class OpenAITextEmbeddingModel extends AbstractModel<OpenAITextEm
41
41
  readonly provider: "openai";
42
42
  get modelName(): "text-embedding-ada-002";
43
43
  readonly maxValuesPerCall = 2048;
44
+ readonly isParallizable = true;
44
45
  readonly embeddingDimensions: number;
45
46
  readonly tokenizer: TikTokenTokenizer;
46
47
  readonly contextWindowSize: number;
@@ -52,6 +52,12 @@ export class OpenAITextEmbeddingModel extends AbstractModel {
52
52
  writable: true,
53
53
  value: 2048
54
54
  });
55
+ Object.defineProperty(this, "isParallizable", {
56
+ enumerable: true,
57
+ configurable: true,
58
+ writable: true,
59
+ value: true
60
+ });
55
61
  Object.defineProperty(this, "embeddingDimensions", {
56
62
  enumerable: true,
57
63
  configurable: true,
package/package.json CHANGED
@@ -1,7 +1,7 @@
1
1
  {
2
2
  "name": "modelfusion",
3
3
  "description": "Build multimodal applications, chatbots, and agents with JavaScript and TypeScript.",
4
- "version": "0.55.0",
4
+ "version": "0.56.0",
5
5
  "author": "Lars Grammel",
6
6
  "license": "MIT",
7
7
  "keywords": [