modelfusion 0.37.0 → 0.39.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +7 -8
- package/core/structure/JsonSchemaProducer.d.ts +9 -0
- package/core/structure/Schema.d.ts +1 -5
- package/core/structure/StructureDefinition.d.ts +2 -1
- package/core/structure/{UncheckedJsonSchemaSchema.cjs → UncheckedSchema.cjs} +3 -3
- package/core/structure/{UncheckedJsonSchemaSchema.d.ts → UncheckedSchema.d.ts} +3 -3
- package/core/structure/{UncheckedJsonSchemaSchema.js → UncheckedSchema.js} +1 -1
- package/core/structure/{UncheckedJsonSchemaStructureDefinition.cjs → UncheckedStructureDefinition.cjs} +5 -5
- package/core/structure/{UncheckedJsonSchemaStructureDefinition.d.ts → UncheckedStructureDefinition.d.ts} +3 -3
- package/core/structure/{UncheckedJsonSchemaStructureDefinition.js → UncheckedStructureDefinition.js} +3 -3
- package/core/structure/ZodStructureDefinition.d.ts +2 -2
- package/core/structure/index.cjs +3 -2
- package/core/structure/index.d.ts +3 -2
- package/core/structure/index.js +3 -2
- package/event-source/index.cjs +1 -0
- package/event-source/index.d.ts +1 -0
- package/event-source/index.js +1 -0
- package/event-source/readEventSource.cjs +27 -0
- package/event-source/readEventSource.d.ts +7 -0
- package/event-source/readEventSource.js +20 -0
- package/event-source/readEventSourceStream.cjs +6 -1
- package/event-source/readEventSourceStream.d.ts +2 -1
- package/event-source/readEventSourceStream.js +6 -1
- package/model-function/ModelCallEvent.d.ts +3 -3
- package/model-function/embed/EmbeddingEvent.d.ts +21 -0
- package/model-function/embed/EmbeddingModel.cjs +2 -0
- package/model-function/embed/EmbeddingModel.d.ts +17 -0
- package/model-function/embed/EmbeddingModel.js +1 -0
- package/model-function/{embed-text/embedText.cjs → embed/embed.cjs} +22 -22
- package/model-function/embed/embed.d.ts +27 -0
- package/model-function/{embed-text/embedText.js → embed/embed.js} +19 -19
- package/model-function/index.cjs +3 -3
- package/model-function/index.d.ts +3 -3
- package/model-function/index.js +3 -3
- package/model-provider/cohere/CohereTextEmbeddingModel.cjs +5 -5
- package/model-provider/cohere/CohereTextEmbeddingModel.d.ts +6 -6
- package/model-provider/cohere/CohereTextEmbeddingModel.js +5 -5
- package/model-provider/huggingface/HuggingFaceTextEmbeddingModel.cjs +5 -5
- package/model-provider/huggingface/HuggingFaceTextEmbeddingModel.d.ts +6 -6
- package/model-provider/huggingface/HuggingFaceTextEmbeddingModel.js +5 -5
- package/model-provider/llamacpp/LlamaCppTextEmbeddingModel.cjs +4 -4
- package/model-provider/llamacpp/LlamaCppTextEmbeddingModel.d.ts +5 -5
- package/model-provider/llamacpp/LlamaCppTextEmbeddingModel.js +4 -4
- package/model-provider/openai/OpenAICostCalculator.cjs +1 -1
- package/model-provider/openai/OpenAICostCalculator.js +1 -1
- package/model-provider/openai/OpenAITextEmbeddingModel.cjs +4 -4
- package/model-provider/openai/OpenAITextEmbeddingModel.d.ts +5 -5
- package/model-provider/openai/OpenAITextEmbeddingModel.js +4 -4
- package/package.json +1 -1
- package/tool/Tool.d.ts +3 -2
- package/vector-index/VectorIndexRetriever.cjs +3 -8
- package/vector-index/VectorIndexRetriever.d.ts +4 -9
- package/vector-index/VectorIndexRetriever.js +3 -8
- package/vector-index/memory/MemoryVectorIndex.cjs +15 -8
- package/vector-index/memory/MemoryVectorIndex.d.ts +2 -2
- package/vector-index/memory/MemoryVectorIndex.js +15 -8
- package/vector-index/upsertIntoVectorIndex.cjs +2 -2
- package/vector-index/upsertIntoVectorIndex.d.ts +4 -4
- package/vector-index/upsertIntoVectorIndex.js +2 -2
- package/model-function/embed-text/TextEmbeddingEvent.d.ts +0 -21
- package/model-function/embed-text/TextEmbeddingModel.d.ts +0 -21
- package/model-function/embed-text/embedText.d.ts +0 -27
- /package/{model-function/embed-text/TextEmbeddingEvent.cjs → core/structure/JsonSchemaProducer.cjs} +0 -0
- /package/{model-function/embed-text/TextEmbeddingEvent.js → core/structure/JsonSchemaProducer.js} +0 -0
- /package/model-function/{embed-text/TextEmbeddingModel.cjs → embed/EmbeddingEvent.cjs} +0 -0
- /package/model-function/{embed-text/TextEmbeddingModel.js → embed/EmbeddingEvent.js} +0 -0
package/model-function/index.js
CHANGED
@@ -7,9 +7,9 @@ export * from "./SuccessfulModelCall.js";
|
|
7
7
|
export * from "./describe-image/ImageDescriptionEvent.js";
|
8
8
|
export * from "./describe-image/ImageDescriptionModel.js";
|
9
9
|
export * from "./describe-image/describeImage.js";
|
10
|
-
export * from "./embed
|
11
|
-
export * from "./embed
|
12
|
-
export * from "./embed
|
10
|
+
export * from "./embed/EmbeddingEvent.js";
|
11
|
+
export * from "./embed/EmbeddingModel.js";
|
12
|
+
export * from "./embed/embed.js";
|
13
13
|
export * from "./generate-image/ImageGenerationEvent.js";
|
14
14
|
export * from "./generate-image/ImageGenerationModel.js";
|
15
15
|
export * from "./generate-image/generateImage.js";
|
@@ -5,9 +5,9 @@ var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
5
5
|
Object.defineProperty(exports, "__esModule", { value: true });
|
6
6
|
exports.CohereTextEmbeddingModel = exports.COHERE_TEXT_EMBEDDING_MODELS = void 0;
|
7
7
|
const zod_1 = __importDefault(require("zod"));
|
8
|
-
const AbstractModel_js_1 = require("../../model-function/AbstractModel.cjs");
|
9
8
|
const callWithRetryAndThrottle_js_1 = require("../../core/api/callWithRetryAndThrottle.cjs");
|
10
9
|
const postToApi_js_1 = require("../../core/api/postToApi.cjs");
|
10
|
+
const AbstractModel_js_1 = require("../../model-function/AbstractModel.cjs");
|
11
11
|
const CohereApiConfiguration_js_1 = require("./CohereApiConfiguration.cjs");
|
12
12
|
const CohereError_js_1 = require("./CohereError.cjs");
|
13
13
|
const CohereTokenizer_js_1 = require("./CohereTokenizer.cjs");
|
@@ -31,7 +31,7 @@ exports.COHERE_TEXT_EMBEDDING_MODELS = {
|
|
31
31
|
* @see https://docs.cohere.com/reference/embed
|
32
32
|
*
|
33
33
|
* @example
|
34
|
-
* const embeddings = await
|
34
|
+
* const embeddings = await embedMany(
|
35
35
|
* new CohereTextEmbeddingModel({ model: "embed-english-light-v2.0" }),
|
36
36
|
* [
|
37
37
|
* "At first, Nox didn't know what to do with the pup.",
|
@@ -48,7 +48,7 @@ class CohereTextEmbeddingModel extends AbstractModel_js_1.AbstractModel {
|
|
48
48
|
writable: true,
|
49
49
|
value: "cohere"
|
50
50
|
});
|
51
|
-
Object.defineProperty(this, "
|
51
|
+
Object.defineProperty(this, "maxValuesPerCall", {
|
52
52
|
enumerable: true,
|
53
53
|
configurable: true,
|
54
54
|
writable: true,
|
@@ -94,8 +94,8 @@ class CohereTextEmbeddingModel extends AbstractModel_js_1.AbstractModel {
|
|
94
94
|
return this.tokenizer.detokenize(tokens);
|
95
95
|
}
|
96
96
|
async callAPI(texts, options) {
|
97
|
-
if (texts.length > this.
|
98
|
-
throw new Error(`The Cohere embedding API only supports ${this.
|
97
|
+
if (texts.length > this.maxValuesPerCall) {
|
98
|
+
throw new Error(`The Cohere embedding API only supports ${this.maxValuesPerCall} texts per API call.`);
|
99
99
|
}
|
100
100
|
const run = options?.run;
|
101
101
|
const settings = options?.settings;
|
@@ -1,8 +1,8 @@
|
|
1
1
|
import z from "zod";
|
2
|
-
import { AbstractModel } from "../../model-function/AbstractModel.js";
|
3
2
|
import { ApiConfiguration } from "../../core/api/ApiConfiguration.js";
|
3
|
+
import { AbstractModel } from "../../model-function/AbstractModel.js";
|
4
4
|
import { ModelFunctionOptions } from "../../model-function/ModelFunctionOptions.js";
|
5
|
-
import {
|
5
|
+
import { EmbeddingModel, EmbeddingModelSettings } from "../../model-function/embed/EmbeddingModel.js";
|
6
6
|
import { FullTokenizer } from "../../model-function/tokenize-text/Tokenizer.js";
|
7
7
|
export declare const COHERE_TEXT_EMBEDDING_MODELS: {
|
8
8
|
"embed-english-light-v2.0": {
|
@@ -19,7 +19,7 @@ export declare const COHERE_TEXT_EMBEDDING_MODELS: {
|
|
19
19
|
};
|
20
20
|
};
|
21
21
|
export type CohereTextEmbeddingModelType = keyof typeof COHERE_TEXT_EMBEDDING_MODELS;
|
22
|
-
export interface CohereTextEmbeddingModelSettings extends
|
22
|
+
export interface CohereTextEmbeddingModelSettings extends EmbeddingModelSettings {
|
23
23
|
api?: ApiConfiguration;
|
24
24
|
model: CohereTextEmbeddingModelType;
|
25
25
|
truncate?: "NONE" | "START" | "END";
|
@@ -30,7 +30,7 @@ export interface CohereTextEmbeddingModelSettings extends TextEmbeddingModelSett
|
|
30
30
|
* @see https://docs.cohere.com/reference/embed
|
31
31
|
*
|
32
32
|
* @example
|
33
|
-
* const embeddings = await
|
33
|
+
* const embeddings = await embedMany(
|
34
34
|
* new CohereTextEmbeddingModel({ model: "embed-english-light-v2.0" }),
|
35
35
|
* [
|
36
36
|
* "At first, Nox didn't know what to do with the pup.",
|
@@ -38,11 +38,11 @@ export interface CohereTextEmbeddingModelSettings extends TextEmbeddingModelSett
|
|
38
38
|
* ]
|
39
39
|
* );
|
40
40
|
*/
|
41
|
-
export declare class CohereTextEmbeddingModel extends AbstractModel<CohereTextEmbeddingModelSettings> implements
|
41
|
+
export declare class CohereTextEmbeddingModel extends AbstractModel<CohereTextEmbeddingModelSettings> implements EmbeddingModel<string, CohereTextEmbeddingResponse, CohereTextEmbeddingModelSettings>, FullTokenizer {
|
42
42
|
constructor(settings: CohereTextEmbeddingModelSettings);
|
43
43
|
readonly provider: "cohere";
|
44
44
|
get modelName(): "embed-english-light-v2.0" | "embed-english-v2.0" | "embed-multilingual-v2.0";
|
45
|
-
readonly
|
45
|
+
readonly maxValuesPerCall = 96;
|
46
46
|
readonly embeddingDimensions: number;
|
47
47
|
readonly contextWindowSize: number;
|
48
48
|
private readonly tokenizer;
|
@@ -1,7 +1,7 @@
|
|
1
1
|
import z from "zod";
|
2
|
-
import { AbstractModel } from "../../model-function/AbstractModel.js";
|
3
2
|
import { callWithRetryAndThrottle } from "../../core/api/callWithRetryAndThrottle.js";
|
4
3
|
import { createJsonResponseHandler, postJsonToApi, } from "../../core/api/postToApi.js";
|
4
|
+
import { AbstractModel } from "../../model-function/AbstractModel.js";
|
5
5
|
import { CohereApiConfiguration } from "./CohereApiConfiguration.js";
|
6
6
|
import { failedCohereCallResponseHandler } from "./CohereError.js";
|
7
7
|
import { CohereTokenizer } from "./CohereTokenizer.js";
|
@@ -25,7 +25,7 @@ export const COHERE_TEXT_EMBEDDING_MODELS = {
|
|
25
25
|
* @see https://docs.cohere.com/reference/embed
|
26
26
|
*
|
27
27
|
* @example
|
28
|
-
* const embeddings = await
|
28
|
+
* const embeddings = await embedMany(
|
29
29
|
* new CohereTextEmbeddingModel({ model: "embed-english-light-v2.0" }),
|
30
30
|
* [
|
31
31
|
* "At first, Nox didn't know what to do with the pup.",
|
@@ -42,7 +42,7 @@ export class CohereTextEmbeddingModel extends AbstractModel {
|
|
42
42
|
writable: true,
|
43
43
|
value: "cohere"
|
44
44
|
});
|
45
|
-
Object.defineProperty(this, "
|
45
|
+
Object.defineProperty(this, "maxValuesPerCall", {
|
46
46
|
enumerable: true,
|
47
47
|
configurable: true,
|
48
48
|
writable: true,
|
@@ -88,8 +88,8 @@ export class CohereTextEmbeddingModel extends AbstractModel {
|
|
88
88
|
return this.tokenizer.detokenize(tokens);
|
89
89
|
}
|
90
90
|
async callAPI(texts, options) {
|
91
|
-
if (texts.length > this.
|
92
|
-
throw new Error(`The Cohere embedding API only supports ${this.
|
91
|
+
if (texts.length > this.maxValuesPerCall) {
|
92
|
+
throw new Error(`The Cohere embedding API only supports ${this.maxValuesPerCall} texts per API call.`);
|
93
93
|
}
|
94
94
|
const run = options?.run;
|
95
95
|
const settings = options?.settings;
|
@@ -22,7 +22,7 @@ const HuggingFaceApiConfiguration_js_1 = require("./HuggingFaceApiConfiguration.
|
|
22
22
|
* retry: retryWithExponentialBackoff({ maxTries: 5 }),
|
23
23
|
* });
|
24
24
|
*
|
25
|
-
* const embeddings = await
|
25
|
+
* const embeddings = await embedMany(
|
26
26
|
* model,
|
27
27
|
* [
|
28
28
|
* "At first, Nox didn't know what to do with the pup.",
|
@@ -39,7 +39,7 @@ class HuggingFaceTextEmbeddingModel extends AbstractModel_js_1.AbstractModel {
|
|
39
39
|
writable: true,
|
40
40
|
value: "huggingface"
|
41
41
|
});
|
42
|
-
Object.defineProperty(this, "
|
42
|
+
Object.defineProperty(this, "maxValuesPerCall", {
|
43
43
|
enumerable: true,
|
44
44
|
configurable: true,
|
45
45
|
writable: true,
|
@@ -70,15 +70,15 @@ class HuggingFaceTextEmbeddingModel extends AbstractModel_js_1.AbstractModel {
|
|
70
70
|
value: undefined
|
71
71
|
});
|
72
72
|
// There is no limit documented in the HuggingFace API. Use 1024 as a reasonable default.
|
73
|
-
this.
|
73
|
+
this.maxValuesPerCall = settings.maxValuesPerCall ?? 1024;
|
74
74
|
this.embeddingDimensions = settings.embeddingDimensions;
|
75
75
|
}
|
76
76
|
get modelName() {
|
77
77
|
return this.settings.model;
|
78
78
|
}
|
79
79
|
async callAPI(texts, options) {
|
80
|
-
if (texts.length > this.
|
81
|
-
throw new Error(`The HuggingFace feature extraction API is configured to only support ${this.
|
80
|
+
if (texts.length > this.maxValuesPerCall) {
|
81
|
+
throw new Error(`The HuggingFace feature extraction API is configured to only support ${this.maxValuesPerCall} texts per API call.`);
|
82
82
|
}
|
83
83
|
const run = options?.run;
|
84
84
|
const settings = options?.settings;
|
@@ -2,11 +2,11 @@ import z from "zod";
|
|
2
2
|
import { AbstractModel } from "../../model-function/AbstractModel.js";
|
3
3
|
import { ApiConfiguration } from "../../core/api/ApiConfiguration.js";
|
4
4
|
import { ModelFunctionOptions } from "../../model-function/ModelFunctionOptions.js";
|
5
|
-
import {
|
6
|
-
export interface HuggingFaceTextEmbeddingModelSettings extends
|
5
|
+
import { EmbeddingModel, EmbeddingModelSettings } from "../../model-function/embed/EmbeddingModel.js";
|
6
|
+
export interface HuggingFaceTextEmbeddingModelSettings extends EmbeddingModelSettings {
|
7
7
|
api?: ApiConfiguration;
|
8
8
|
model: string;
|
9
|
-
|
9
|
+
maxValuesPerCall?: number;
|
10
10
|
embeddingDimensions?: number;
|
11
11
|
options?: {
|
12
12
|
useCache?: boolean;
|
@@ -25,7 +25,7 @@ export interface HuggingFaceTextEmbeddingModelSettings extends TextEmbeddingMode
|
|
25
25
|
* retry: retryWithExponentialBackoff({ maxTries: 5 }),
|
26
26
|
* });
|
27
27
|
*
|
28
|
-
* const embeddings = await
|
28
|
+
* const embeddings = await embedMany(
|
29
29
|
* model,
|
30
30
|
* [
|
31
31
|
* "At first, Nox didn't know what to do with the pup.",
|
@@ -33,11 +33,11 @@ export interface HuggingFaceTextEmbeddingModelSettings extends TextEmbeddingMode
|
|
33
33
|
* ]
|
34
34
|
* );
|
35
35
|
*/
|
36
|
-
export declare class HuggingFaceTextEmbeddingModel extends AbstractModel<HuggingFaceTextEmbeddingModelSettings> implements
|
36
|
+
export declare class HuggingFaceTextEmbeddingModel extends AbstractModel<HuggingFaceTextEmbeddingModelSettings> implements EmbeddingModel<string, HuggingFaceTextEmbeddingResponse, HuggingFaceTextEmbeddingModelSettings> {
|
37
37
|
constructor(settings: HuggingFaceTextEmbeddingModelSettings);
|
38
38
|
readonly provider = "huggingface";
|
39
39
|
get modelName(): string;
|
40
|
-
readonly
|
40
|
+
readonly maxValuesPerCall: number;
|
41
41
|
readonly contextWindowSize: undefined;
|
42
42
|
readonly embeddingDimensions: number | undefined;
|
43
43
|
readonly tokenizer: undefined;
|
@@ -16,7 +16,7 @@ import { HuggingFaceApiConfiguration } from "./HuggingFaceApiConfiguration.js";
|
|
16
16
|
* retry: retryWithExponentialBackoff({ maxTries: 5 }),
|
17
17
|
* });
|
18
18
|
*
|
19
|
-
* const embeddings = await
|
19
|
+
* const embeddings = await embedMany(
|
20
20
|
* model,
|
21
21
|
* [
|
22
22
|
* "At first, Nox didn't know what to do with the pup.",
|
@@ -33,7 +33,7 @@ export class HuggingFaceTextEmbeddingModel extends AbstractModel {
|
|
33
33
|
writable: true,
|
34
34
|
value: "huggingface"
|
35
35
|
});
|
36
|
-
Object.defineProperty(this, "
|
36
|
+
Object.defineProperty(this, "maxValuesPerCall", {
|
37
37
|
enumerable: true,
|
38
38
|
configurable: true,
|
39
39
|
writable: true,
|
@@ -64,15 +64,15 @@ export class HuggingFaceTextEmbeddingModel extends AbstractModel {
|
|
64
64
|
value: undefined
|
65
65
|
});
|
66
66
|
// There is no limit documented in the HuggingFace API. Use 1024 as a reasonable default.
|
67
|
-
this.
|
67
|
+
this.maxValuesPerCall = settings.maxValuesPerCall ?? 1024;
|
68
68
|
this.embeddingDimensions = settings.embeddingDimensions;
|
69
69
|
}
|
70
70
|
get modelName() {
|
71
71
|
return this.settings.model;
|
72
72
|
}
|
73
73
|
async callAPI(texts, options) {
|
74
|
-
if (texts.length > this.
|
75
|
-
throw new Error(`The HuggingFace feature extraction API is configured to only support ${this.
|
74
|
+
if (texts.length > this.maxValuesPerCall) {
|
75
|
+
throw new Error(`The HuggingFace feature extraction API is configured to only support ${this.maxValuesPerCall} texts per API call.`);
|
76
76
|
}
|
77
77
|
const run = options?.run;
|
78
78
|
const settings = options?.settings;
|
@@ -5,9 +5,9 @@ var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
5
5
|
Object.defineProperty(exports, "__esModule", { value: true });
|
6
6
|
exports.LlamaCppTextEmbeddingModel = void 0;
|
7
7
|
const zod_1 = __importDefault(require("zod"));
|
8
|
-
const AbstractModel_js_1 = require("../../model-function/AbstractModel.cjs");
|
9
8
|
const callWithRetryAndThrottle_js_1 = require("../../core/api/callWithRetryAndThrottle.cjs");
|
10
9
|
const postToApi_js_1 = require("../../core/api/postToApi.cjs");
|
10
|
+
const AbstractModel_js_1 = require("../../model-function/AbstractModel.cjs");
|
11
11
|
const LlamaCppApiConfiguration_js_1 = require("./LlamaCppApiConfiguration.cjs");
|
12
12
|
const LlamaCppError_js_1 = require("./LlamaCppError.cjs");
|
13
13
|
const LlamaCppTokenizer_js_1 = require("./LlamaCppTokenizer.cjs");
|
@@ -20,7 +20,7 @@ class LlamaCppTextEmbeddingModel extends AbstractModel_js_1.AbstractModel {
|
|
20
20
|
writable: true,
|
21
21
|
value: "llamacpp"
|
22
22
|
});
|
23
|
-
Object.defineProperty(this, "
|
23
|
+
Object.defineProperty(this, "maxValuesPerCall", {
|
24
24
|
enumerable: true,
|
25
25
|
configurable: true,
|
26
26
|
writable: true,
|
@@ -54,8 +54,8 @@ class LlamaCppTextEmbeddingModel extends AbstractModel_js_1.AbstractModel {
|
|
54
54
|
return this.tokenizer.tokenize(text);
|
55
55
|
}
|
56
56
|
async callAPI(texts, options) {
|
57
|
-
if (texts.length > this.
|
58
|
-
throw new Error(`The Llama.cpp embedding API only supports ${this.
|
57
|
+
if (texts.length > this.maxValuesPerCall) {
|
58
|
+
throw new Error(`The Llama.cpp embedding API only supports ${this.maxValuesPerCall} texts per API call.`);
|
59
59
|
}
|
60
60
|
const run = options?.run;
|
61
61
|
const settings = options?.settings;
|
@@ -1,17 +1,17 @@
|
|
1
1
|
import z from "zod";
|
2
|
-
import { AbstractModel } from "../../model-function/AbstractModel.js";
|
3
2
|
import { ApiConfiguration } from "../../core/api/ApiConfiguration.js";
|
3
|
+
import { AbstractModel } from "../../model-function/AbstractModel.js";
|
4
4
|
import { ModelFunctionOptions } from "../../model-function/ModelFunctionOptions.js";
|
5
|
-
import {
|
6
|
-
export interface LlamaCppTextEmbeddingModelSettings extends
|
5
|
+
import { EmbeddingModel, EmbeddingModelSettings } from "../../model-function/embed/EmbeddingModel.js";
|
6
|
+
export interface LlamaCppTextEmbeddingModelSettings extends EmbeddingModelSettings {
|
7
7
|
api?: ApiConfiguration;
|
8
8
|
embeddingDimensions?: number;
|
9
9
|
}
|
10
|
-
export declare class LlamaCppTextEmbeddingModel extends AbstractModel<LlamaCppTextEmbeddingModelSettings> implements
|
10
|
+
export declare class LlamaCppTextEmbeddingModel extends AbstractModel<LlamaCppTextEmbeddingModelSettings> implements EmbeddingModel<string, LlamaCppTextEmbeddingResponse, LlamaCppTextEmbeddingModelSettings> {
|
11
11
|
constructor(settings?: LlamaCppTextEmbeddingModelSettings);
|
12
12
|
readonly provider: "llamacpp";
|
13
13
|
get modelName(): null;
|
14
|
-
readonly
|
14
|
+
readonly maxValuesPerCall = 1;
|
15
15
|
readonly contextWindowSize: undefined;
|
16
16
|
readonly embeddingDimensions: number | undefined;
|
17
17
|
private readonly tokenizer;
|
@@ -1,7 +1,7 @@
|
|
1
1
|
import z from "zod";
|
2
|
-
import { AbstractModel } from "../../model-function/AbstractModel.js";
|
3
2
|
import { callWithRetryAndThrottle } from "../../core/api/callWithRetryAndThrottle.js";
|
4
3
|
import { createJsonResponseHandler, postJsonToApi, } from "../../core/api/postToApi.js";
|
4
|
+
import { AbstractModel } from "../../model-function/AbstractModel.js";
|
5
5
|
import { LlamaCppApiConfiguration } from "./LlamaCppApiConfiguration.js";
|
6
6
|
import { failedLlamaCppCallResponseHandler } from "./LlamaCppError.js";
|
7
7
|
import { LlamaCppTokenizer } from "./LlamaCppTokenizer.js";
|
@@ -14,7 +14,7 @@ export class LlamaCppTextEmbeddingModel extends AbstractModel {
|
|
14
14
|
writable: true,
|
15
15
|
value: "llamacpp"
|
16
16
|
});
|
17
|
-
Object.defineProperty(this, "
|
17
|
+
Object.defineProperty(this, "maxValuesPerCall", {
|
18
18
|
enumerable: true,
|
19
19
|
configurable: true,
|
20
20
|
writable: true,
|
@@ -48,8 +48,8 @@ export class LlamaCppTextEmbeddingModel extends AbstractModel {
|
|
48
48
|
return this.tokenizer.tokenize(text);
|
49
49
|
}
|
50
50
|
async callAPI(texts, options) {
|
51
|
-
if (texts.length > this.
|
52
|
-
throw new Error(`The Llama.cpp embedding API only supports ${this.
|
51
|
+
if (texts.length > this.maxValuesPerCall) {
|
52
|
+
throw new Error(`The Llama.cpp embedding API only supports ${this.maxValuesPerCall} texts per API call.`);
|
53
53
|
}
|
54
54
|
const run = options?.run;
|
55
55
|
const settings = options?.settings;
|
@@ -37,7 +37,7 @@ exports.calculateOpenAIEmbeddingCostInMillicents = calculateOpenAIEmbeddingCostI
|
|
37
37
|
* @see https://platform.openai.com/docs/api-reference/embeddings
|
38
38
|
*
|
39
39
|
* @example
|
40
|
-
* const embeddings = await
|
40
|
+
* const embeddings = await embedMany(
|
41
41
|
* new OpenAITextEmbeddingModel({ model: "text-embedding-ada-002" }),
|
42
42
|
* [
|
43
43
|
* "At first, Nox didn't know what to do with the pup.",
|
@@ -54,7 +54,7 @@ class OpenAITextEmbeddingModel extends AbstractModel_js_1.AbstractModel {
|
|
54
54
|
writable: true,
|
55
55
|
value: "openai"
|
56
56
|
});
|
57
|
-
Object.defineProperty(this, "
|
57
|
+
Object.defineProperty(this, "maxValuesPerCall", {
|
58
58
|
enumerable: true,
|
59
59
|
configurable: true,
|
60
60
|
writable: true,
|
@@ -115,8 +115,8 @@ class OpenAITextEmbeddingModel extends AbstractModel_js_1.AbstractModel {
|
|
115
115
|
return {};
|
116
116
|
}
|
117
117
|
generateEmbeddingResponse(texts, options) {
|
118
|
-
if (texts.length > this.
|
119
|
-
throw new Error(`The OpenAI embedding API only supports ${this.
|
118
|
+
if (texts.length > this.maxValuesPerCall) {
|
119
|
+
throw new Error(`The OpenAI embedding API only supports ${this.maxValuesPerCall} texts per API call.`);
|
120
120
|
}
|
121
121
|
return this.callAPI(texts, options);
|
122
122
|
}
|
@@ -2,7 +2,7 @@ import z from "zod";
|
|
2
2
|
import { AbstractModel } from "../../model-function/AbstractModel.js";
|
3
3
|
import { ModelFunctionOptions } from "../../model-function/ModelFunctionOptions.js";
|
4
4
|
import { ApiConfiguration } from "../../core/api/ApiConfiguration.js";
|
5
|
-
import {
|
5
|
+
import { EmbeddingModel, EmbeddingModelSettings } from "../../model-function/embed/EmbeddingModel.js";
|
6
6
|
import { TikTokenTokenizer } from "./TikTokenTokenizer.js";
|
7
7
|
export declare const OPENAI_TEXT_EMBEDDING_MODELS: {
|
8
8
|
"text-embedding-ada-002": {
|
@@ -17,7 +17,7 @@ export declare const calculateOpenAIEmbeddingCostInMillicents: ({ model, respons
|
|
17
17
|
model: OpenAITextEmbeddingModelType;
|
18
18
|
responses: OpenAITextEmbeddingResponse[];
|
19
19
|
}) => number;
|
20
|
-
export interface OpenAITextEmbeddingModelSettings extends
|
20
|
+
export interface OpenAITextEmbeddingModelSettings extends EmbeddingModelSettings {
|
21
21
|
api?: ApiConfiguration;
|
22
22
|
model: OpenAITextEmbeddingModelType;
|
23
23
|
isUserIdForwardingEnabled?: boolean;
|
@@ -28,7 +28,7 @@ export interface OpenAITextEmbeddingModelSettings extends TextEmbeddingModelSett
|
|
28
28
|
* @see https://platform.openai.com/docs/api-reference/embeddings
|
29
29
|
*
|
30
30
|
* @example
|
31
|
-
* const embeddings = await
|
31
|
+
* const embeddings = await embedMany(
|
32
32
|
* new OpenAITextEmbeddingModel({ model: "text-embedding-ada-002" }),
|
33
33
|
* [
|
34
34
|
* "At first, Nox didn't know what to do with the pup.",
|
@@ -36,11 +36,11 @@ export interface OpenAITextEmbeddingModelSettings extends TextEmbeddingModelSett
|
|
36
36
|
* ]
|
37
37
|
* );
|
38
38
|
*/
|
39
|
-
export declare class OpenAITextEmbeddingModel extends AbstractModel<OpenAITextEmbeddingModelSettings> implements
|
39
|
+
export declare class OpenAITextEmbeddingModel extends AbstractModel<OpenAITextEmbeddingModelSettings> implements EmbeddingModel<string, OpenAITextEmbeddingResponse, OpenAITextEmbeddingModelSettings> {
|
40
40
|
constructor(settings: OpenAITextEmbeddingModelSettings);
|
41
41
|
readonly provider: "openai";
|
42
42
|
get modelName(): "text-embedding-ada-002";
|
43
|
-
readonly
|
43
|
+
readonly maxValuesPerCall = 2048;
|
44
44
|
readonly embeddingDimensions: number;
|
45
45
|
readonly tokenizer: TikTokenTokenizer;
|
46
46
|
readonly contextWindowSize: number;
|
@@ -29,7 +29,7 @@ export const calculateOpenAIEmbeddingCostInMillicents = ({ model, responses, })
|
|
29
29
|
* @see https://platform.openai.com/docs/api-reference/embeddings
|
30
30
|
*
|
31
31
|
* @example
|
32
|
-
* const embeddings = await
|
32
|
+
* const embeddings = await embedMany(
|
33
33
|
* new OpenAITextEmbeddingModel({ model: "text-embedding-ada-002" }),
|
34
34
|
* [
|
35
35
|
* "At first, Nox didn't know what to do with the pup.",
|
@@ -46,7 +46,7 @@ export class OpenAITextEmbeddingModel extends AbstractModel {
|
|
46
46
|
writable: true,
|
47
47
|
value: "openai"
|
48
48
|
});
|
49
|
-
Object.defineProperty(this, "
|
49
|
+
Object.defineProperty(this, "maxValuesPerCall", {
|
50
50
|
enumerable: true,
|
51
51
|
configurable: true,
|
52
52
|
writable: true,
|
@@ -107,8 +107,8 @@ export class OpenAITextEmbeddingModel extends AbstractModel {
|
|
107
107
|
return {};
|
108
108
|
}
|
109
109
|
generateEmbeddingResponse(texts, options) {
|
110
|
-
if (texts.length > this.
|
111
|
-
throw new Error(`The OpenAI embedding API only supports ${this.
|
110
|
+
if (texts.length > this.maxValuesPerCall) {
|
111
|
+
throw new Error(`The OpenAI embedding API only supports ${this.maxValuesPerCall} texts per API call.`);
|
112
112
|
}
|
113
113
|
return this.callAPI(texts, options);
|
114
114
|
}
|
package/package.json
CHANGED
package/tool/Tool.d.ts
CHANGED
@@ -1,4 +1,5 @@
|
|
1
1
|
import { FunctionOptions } from "../core/FunctionOptions.js";
|
2
|
+
import { JsonSchemaProducer } from "../core/structure/JsonSchemaProducer.js";
|
2
3
|
import { Schema } from "../core/structure/Schema.js";
|
3
4
|
import { StructureDefinition } from "../core/structure/StructureDefinition.js";
|
4
5
|
/**
|
@@ -20,7 +21,7 @@ export declare class Tool<NAME extends string, INPUT, OUTPUT> {
|
|
20
21
|
* The schema of the input that the tool expects. The language model will use this to generate the input.
|
21
22
|
* Use descriptions to make the input understandable for the language model.
|
22
23
|
*/
|
23
|
-
readonly inputSchema: Schema<INPUT
|
24
|
+
readonly inputSchema: Schema<INPUT> & JsonSchemaProducer;
|
24
25
|
/**
|
25
26
|
* An optional schema of the output that the tool produces. This will be used to validate the output.
|
26
27
|
*/
|
@@ -32,7 +33,7 @@ export declare class Tool<NAME extends string, INPUT, OUTPUT> {
|
|
32
33
|
constructor({ name, description, inputSchema, outputSchema, execute, }: {
|
33
34
|
name: NAME;
|
34
35
|
description: string;
|
35
|
-
inputSchema: Schema<INPUT
|
36
|
+
inputSchema: Schema<INPUT> & JsonSchemaProducer;
|
36
37
|
outputSchema?: Schema<OUTPUT>;
|
37
38
|
execute(input: INPUT, options?: FunctionOptions): PromiseLike<OUTPUT>;
|
38
39
|
});
|
@@ -1,7 +1,7 @@
|
|
1
1
|
"use strict";
|
2
2
|
Object.defineProperty(exports, "__esModule", { value: true });
|
3
3
|
exports.VectorIndexRetriever = void 0;
|
4
|
-
const
|
4
|
+
const embed_js_1 = require("../model-function/embed/embed.cjs");
|
5
5
|
class VectorIndexRetriever {
|
6
6
|
constructor({ vectorIndex, embeddingModel, maxResults, similarityThreshold, filter, }) {
|
7
7
|
Object.defineProperty(this, "vectorIndex", {
|
@@ -38,12 +38,7 @@ class VectorIndexRetriever {
|
|
38
38
|
run: options.run,
|
39
39
|
});
|
40
40
|
}
|
41
|
-
|
42
|
-
if (typeof query === "object") {
|
43
|
-
filter = query.filter ?? filter; // use filter from query if available
|
44
|
-
query = query.text;
|
45
|
-
}
|
46
|
-
const embedding = await (0, embedText_js_1.embedText)(this.embeddingModel, query, {
|
41
|
+
const embedding = await (0, embed_js_1.embed)(this.embeddingModel, query, {
|
47
42
|
functionId: options?.functionId,
|
48
43
|
run: options?.run,
|
49
44
|
});
|
@@ -51,7 +46,7 @@ class VectorIndexRetriever {
|
|
51
46
|
queryVector: embedding,
|
52
47
|
maxResults: this.settings.maxResults ?? 1,
|
53
48
|
similarityThreshold: this.settings.similarityThreshold,
|
54
|
-
filter,
|
49
|
+
filter: this.settings?.filter,
|
55
50
|
});
|
56
51
|
return queryResult.map((item) => item.data);
|
57
52
|
}
|
@@ -1,5 +1,5 @@
|
|
1
|
+
import { EmbeddingModel, EmbeddingModelSettings } from "../model-function/embed/EmbeddingModel.js";
|
1
2
|
import { ModelFunctionOptions } from "../model-function/ModelFunctionOptions.js";
|
2
|
-
import { TextEmbeddingModel, TextEmbeddingModelSettings } from "../model-function/embed-text/TextEmbeddingModel.js";
|
3
3
|
import { Retriever, RetrieverSettings } from "../retriever/Retriever.js";
|
4
4
|
import { VectorIndex } from "./VectorIndex.js";
|
5
5
|
export interface VectorIndexRetrieverSettings<FILTER> {
|
@@ -7,19 +7,14 @@ export interface VectorIndexRetrieverSettings<FILTER> {
|
|
7
7
|
similarityThreshold?: number;
|
8
8
|
filter?: FILTER;
|
9
9
|
}
|
10
|
-
|
11
|
-
text: string;
|
12
|
-
filter?: FILTER;
|
13
|
-
};
|
14
|
-
export declare class VectorIndexRetriever<OBJECT, INDEX, FILTER> implements Retriever<OBJECT, VectorIndexRetrieverQuery<FILTER>, VectorIndexRetrieverSettings<FILTER>> {
|
10
|
+
export declare class VectorIndexRetriever<OBJECT, VALUE, INDEX, FILTER> implements Retriever<OBJECT, VALUE, VectorIndexRetrieverSettings<FILTER>> {
|
15
11
|
private readonly vectorIndex;
|
16
12
|
private readonly embeddingModel;
|
17
13
|
private readonly settings;
|
18
14
|
constructor({ vectorIndex, embeddingModel, maxResults, similarityThreshold, filter, }: {
|
19
15
|
vectorIndex: VectorIndex<OBJECT, INDEX, FILTER>;
|
20
|
-
embeddingModel:
|
16
|
+
embeddingModel: EmbeddingModel<VALUE, unknown, EmbeddingModelSettings>;
|
21
17
|
} & VectorIndexRetrieverSettings<FILTER>);
|
22
|
-
retrieve(query:
|
18
|
+
retrieve(query: VALUE, options?: ModelFunctionOptions<RetrieverSettings>): Promise<OBJECT[]>;
|
23
19
|
withSettings(additionalSettings: Partial<VectorIndexRetrieverSettings<FILTER>>): this;
|
24
20
|
}
|
25
|
-
export {};
|
@@ -1,4 +1,4 @@
|
|
1
|
-
import {
|
1
|
+
import { embed } from "../model-function/embed/embed.js";
|
2
2
|
export class VectorIndexRetriever {
|
3
3
|
constructor({ vectorIndex, embeddingModel, maxResults, similarityThreshold, filter, }) {
|
4
4
|
Object.defineProperty(this, "vectorIndex", {
|
@@ -35,12 +35,7 @@ export class VectorIndexRetriever {
|
|
35
35
|
run: options.run,
|
36
36
|
});
|
37
37
|
}
|
38
|
-
|
39
|
-
if (typeof query === "object") {
|
40
|
-
filter = query.filter ?? filter; // use filter from query if available
|
41
|
-
query = query.text;
|
42
|
-
}
|
43
|
-
const embedding = await embedText(this.embeddingModel, query, {
|
38
|
+
const embedding = await embed(this.embeddingModel, query, {
|
44
39
|
functionId: options?.functionId,
|
45
40
|
run: options?.run,
|
46
41
|
});
|
@@ -48,7 +43,7 @@ export class VectorIndexRetriever {
|
|
48
43
|
queryVector: embedding,
|
49
44
|
maxResults: this.settings.maxResults ?? 1,
|
50
45
|
similarityThreshold: this.settings.similarityThreshold,
|
51
|
-
filter,
|
46
|
+
filter: this.settings?.filter,
|
52
47
|
});
|
53
48
|
return queryResult.map((item) => item.data);
|
54
49
|
}
|
@@ -4,8 +4,14 @@ var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
4
4
|
};
|
5
5
|
Object.defineProperty(exports, "__esModule", { value: true });
|
6
6
|
exports.MemoryVectorIndex = void 0;
|
7
|
+
const secure_json_parse_1 = __importDefault(require("secure-json-parse"));
|
7
8
|
const zod_1 = __importDefault(require("zod"));
|
8
9
|
const cosineSimilarity_js_1 = require("../../util/cosineSimilarity.cjs");
|
10
|
+
const jsonDataSchema = zod_1.default.array(zod_1.default.object({
|
11
|
+
id: zod_1.default.string(),
|
12
|
+
vector: zod_1.default.array(zod_1.default.number()),
|
13
|
+
data: zod_1.default.unknown(),
|
14
|
+
}));
|
9
15
|
/**
|
10
16
|
* A very simple vector index that stores all entries in memory. Useful when you only have
|
11
17
|
* a small number of entries and don't want to set up a real database, e.g. for conversational memory
|
@@ -21,15 +27,16 @@ class MemoryVectorIndex {
|
|
21
27
|
});
|
22
28
|
}
|
23
29
|
static async deserialize({ serializedData, schema, }) {
|
24
|
-
|
30
|
+
// validate the outer structure:
|
31
|
+
const json = jsonDataSchema.parse(secure_json_parse_1.default.parse(serializedData));
|
25
32
|
if (schema != null) {
|
26
|
-
|
27
|
-
|
28
|
-
|
29
|
-
|
30
|
-
|
31
|
-
|
32
|
-
|
33
|
+
// when a schema is provided, validate all entries:
|
34
|
+
for (const entry of json) {
|
35
|
+
const validationResult = schema.validate(entry.data);
|
36
|
+
if (!validationResult.success) {
|
37
|
+
throw validationResult.error;
|
38
|
+
}
|
39
|
+
}
|
33
40
|
}
|
34
41
|
const vectorIndex = new MemoryVectorIndex();
|
35
42
|
vectorIndex.upsertMany(json);
|
@@ -1,5 +1,5 @@
|
|
1
|
-
import z from "zod";
|
2
1
|
import { Vector } from "../../core/Vector.js";
|
2
|
+
import { Schema } from "../../core/structure/Schema.js";
|
3
3
|
import { VectorIndex } from "../VectorIndex.js";
|
4
4
|
/**
|
5
5
|
* A very simple vector index that stores all entries in memory. Useful when you only have
|
@@ -9,7 +9,7 @@ import { VectorIndex } from "../VectorIndex.js";
|
|
9
9
|
export declare class MemoryVectorIndex<DATA> implements VectorIndex<DATA, MemoryVectorIndex<DATA>, (value: DATA) => boolean> {
|
10
10
|
static deserialize<DATA>({ serializedData, schema, }: {
|
11
11
|
serializedData: string;
|
12
|
-
schema?:
|
12
|
+
schema?: Schema<DATA>;
|
13
13
|
}): Promise<MemoryVectorIndex<DATA>>;
|
14
14
|
private readonly entries;
|
15
15
|
upsertMany(data: Array<{
|