@mastra/fastembed 0.0.0-1.x-tester-20251106055847

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/CHANGELOG.md ADDED
@@ -0,0 +1,97 @@
1
+ # @mastra/fastembed
2
+
3
+ ## 0.0.0-1.x-tester-20251106055847
4
+
5
+ ### Major Changes
6
+
7
+ - Bump minimum required Node.js version to 22.13.0 ([#9706](https://github.com/mastra-ai/mastra/pull/9706))
8
+
9
+ - Upgraded to AI SDK v5 (specification version v2) for compatibility with @mastra/core. Default exports now use v2 specification. Legacy v1 exports available for backwards compatibility via `fastembed.smallLegacy` and `fastembed.baseLegacy`. ([#9349](https://github.com/mastra-ai/mastra/pull/9349))
10
+
11
+ - Mark as stable ([`83d5942`](https://github.com/mastra-ai/mastra/commit/83d5942669ce7bba4a6ca4fd4da697a10eb5ebdc))
12
+
13
+ ## 0.10.5
14
+
15
+ ### Patch Changes
16
+
17
+ - de3cbc6: Update the `package.json` file to include additional fields like `repository`, `homepage` or `files`.
18
+
19
+ ## 0.10.5-alpha.0
20
+
21
+ ### Patch Changes
22
+
23
+ - [#7343](https://github.com/mastra-ai/mastra/pull/7343) [`de3cbc6`](https://github.com/mastra-ai/mastra/commit/de3cbc61079211431bd30487982ea3653517278e) Thanks [@LekoArts](https://github.com/LekoArts)! - Update the `package.json` file to include additional fields like `repository`, `homepage` or `files`.
24
+
25
+ ## 0.10.4
26
+
27
+ ### Patch Changes
28
+
29
+ - [`c6113ed`](https://github.com/mastra-ai/mastra/commit/c6113ed7f9df297e130d94436ceee310273d6430) Thanks [@wardpeet](https://github.com/wardpeet)! - Fix peerdpes for @mastra/core
30
+
31
+ ## 0.10.3
32
+
33
+ ### Patch Changes
34
+
35
+ - [#6919](https://github.com/mastra-ai/mastra/pull/6919) [`6e7e120`](https://github.com/mastra-ai/mastra/commit/6e7e1207d6e8d8b838f9024f90bd10df1181ba27) Thanks [@dane-ai-mastra](https://github.com/apps/dane-ai-mastra)! - dependencies updates:
36
+ - Updated dependency [`ai@^4.3.19` ↗︎](https://www.npmjs.com/package/ai/v/4.3.19) (from `^4.3.16`, in `dependencies`)
37
+
38
+ ## 0.10.3-alpha.0
39
+
40
+ ### Patch Changes
41
+
42
+ - [#6919](https://github.com/mastra-ai/mastra/pull/6919) [`6e7e120`](https://github.com/mastra-ai/mastra/commit/6e7e1207d6e8d8b838f9024f90bd10df1181ba27) Thanks [@dane-ai-mastra](https://github.com/apps/dane-ai-mastra)! - dependencies updates:
43
+ - Updated dependency [`ai@^4.3.19` ↗︎](https://www.npmjs.com/package/ai/v/4.3.19) (from `^4.3.16`, in `dependencies`)
44
+
45
+ ## 0.10.2
46
+
47
+ ### Patch Changes
48
+
49
+ - 4a406ec: fixes TypeScript declaration file imports to ensure proper ESM compatibility
50
+
51
+ ## 0.10.2-alpha.0
52
+
53
+ ### Patch Changes
54
+
55
+ - 4a406ec: fixes TypeScript declaration file imports to ensure proper ESM compatibility
56
+
57
+ ## 0.10.1
58
+
59
+ ### Patch Changes
60
+
61
+ - f64b3f7: dependencies updates:
62
+ - Updated dependency [`ai@^4.3.16` ↗︎](https://www.npmjs.com/package/ai/v/4.3.16) (from `^3.4.33`, in `dependencies`)
63
+
64
+ ## 0.10.1-alpha.0
65
+
66
+ ### Patch Changes
67
+
68
+ - f64b3f7: dependencies updates:
69
+ - Updated dependency [`ai@^4.3.16` ↗︎](https://www.npmjs.com/package/ai/v/4.3.16) (from `^3.4.33`, in `dependencies`)
70
+
71
+ ## 0.0.3
72
+
73
+ ### Patch Changes
74
+
75
+ - 48b8c2c: dependencies updates:
76
+ - Updated dependency [`ai@^3.4.33` ↗︎](https://www.npmjs.com/package/ai/v/3.4.33) (from `^3.0.0`, in `dependencies`)
77
+
78
+ ## 0.0.3-alpha.0
79
+
80
+ ### Patch Changes
81
+
82
+ - 48b8c2c: dependencies updates:
83
+ - Updated dependency [`ai@^3.4.33` ↗︎](https://www.npmjs.com/package/ai/v/3.4.33) (from `^3.0.0`, in `dependencies`)
84
+
85
+ ## 0.0.2
86
+
87
+ ### Patch Changes
88
+
89
+ - 3a5f1e1: Created a new @mastra/fastembed package based on the default embedder in @mastra/core as the default embedder will be removed in a breaking change (May 20th)
90
+ Added a warning to use the new @mastra/fastembed package instead of the default embedder
91
+
92
+ ## 0.0.2-alpha.0
93
+
94
+ ### Patch Changes
95
+
96
+ - 3a5f1e1: Created a new @mastra/fastembed package based on the default embedder in @mastra/core as the default embedder will be removed in a breaking change (May 20th)
97
+ Added a warning to use the new @mastra/fastembed package instead of the default embedder
package/LICENSE.md ADDED
@@ -0,0 +1,15 @@
1
+ # Apache License 2.0
2
+
3
+ Copyright (c) 2025 Kepler Software, Inc.
4
+
5
+ Licensed under the Apache License, Version 2.0 (the "License");
6
+ you may not use this file except in compliance with the License.
7
+ You may obtain a copy of the License at
8
+
9
+ http://www.apache.org/licenses/LICENSE-2.0
10
+
11
+ Unless required by applicable law or agreed to in writing, software
12
+ distributed under the License is distributed on an "AS IS" BASIS,
13
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14
+ See the License for the specific language governing permissions and
15
+ limitations under the License.
package/README.md ADDED
@@ -0,0 +1,67 @@
1
+ # @mastra/fastembed
2
+
3
+ This package provides a FastEmbed embedding model integration for use with Mastra Memory.
4
+
5
+ **Note:** This functionality was previously included directly within `@mastra/core`. It has been moved to this separate package because `fastembed-js` relies on large native dependencies (like `onnxruntime-node`). Separating it keeps `@mastra/core` lightweight for users who may not need FastEmbed.
6
+
7
+ ## Installation
8
+
9
+ ```bash
10
+ pnpm add @mastra/fastembed
11
+ ```
12
+
13
+ ## AI SDK v2 Compatibility
14
+
15
+ This package supports AI SDK v5 (specification version v2). The default exports use v2, which is compatible with `@mastra/core` and AI SDK v5.
16
+
17
+ **Breaking Change:** Previous versions used AI SDK specification v1. If you need v1 compatibility for legacy code, use the `Legacy` exports.
18
+
19
+ ## Usage
20
+
21
+ ### Default (AI SDK v2)
22
+
23
+ ```typescript
24
+ import { Memory } from '@mastra/memory';
25
+ import { fastembed } from '@mastra/fastembed';
26
+
27
+ const memory = new Memory({
28
+ // ... other memory options
29
+ embedder: fastembed, // Uses v2 specification
30
+ });
31
+
32
+ // Now you can use this memory instance with an Agent
33
+ // const agent = new Agent({ memory, ... });
34
+ ```
35
+
36
+ ### Available Models
37
+
38
+ ```typescript
39
+ import { fastembed } from '@mastra/fastembed';
40
+
41
+ // Default export (bge-small-en-v1.5 with v2 spec)
42
+ const embedder = fastembed;
43
+
44
+ // Named exports for v2 models
45
+ const small = fastembed.small; // bge-small-en-v1.5
46
+ const base = fastembed.base; // bge-base-en-v1.5
47
+
48
+ // Legacy v1 models (for backwards compatibility)
49
+ const smallLegacy = fastembed.smallLegacy; // bge-small-en-v1.5 (v1 spec)
50
+ const baseLegacy = fastembed.baseLegacy; // bge-base-en-v1.5 (v1 spec)
51
+ ```
52
+
53
+ ### Direct Usage with AI SDK v5
54
+
55
+ ```typescript
56
+ import { embed } from 'ai';
57
+ import { fastembed } from '@mastra/fastembed';
58
+
59
+ const result = await embed({
60
+ model: fastembed,
61
+ value: 'Text to embed',
62
+ });
63
+
64
+ console.log(result.embedding); // number[]
65
+ ```
66
+
67
+ This package wraps the `fastembed-js` library to provide an embedding model compatible with the AI SDK and Mastra.
package/dist/index.cjs ADDED
@@ -0,0 +1,94 @@
1
+ 'use strict';
2
+
3
+ var fsp = require('fs/promises');
4
+ var os = require('os');
5
+ var path = require('path');
6
+ var ai = require('ai');
7
+ var aiV4 = require('ai-v4');
8
+ var fastembed$1 = require('fastembed');
9
+
10
+ function _interopDefault (e) { return e && e.__esModule ? e : { default: e }; }
11
+
12
+ var fsp__default = /*#__PURE__*/_interopDefault(fsp);
13
+ var os__default = /*#__PURE__*/_interopDefault(os);
14
+ var path__default = /*#__PURE__*/_interopDefault(path);
15
+
16
+ // src/index.ts
17
+ async function getModelCachePath() {
18
+ const cachePath = path__default.default.join(os__default.default.homedir(), ".cache", "mastra", "fastembed-models");
19
+ await fsp__default.default.mkdir(cachePath, { recursive: true });
20
+ return cachePath;
21
+ }
22
+ async function generateEmbeddings(values, modelType) {
23
+ const model = await fastembed$1.FlagEmbedding.init({
24
+ model: fastembed$1.EmbeddingModel[modelType],
25
+ cacheDir: await getModelCachePath()
26
+ });
27
+ const embeddings = model.embed(values);
28
+ const allResults = [];
29
+ for await (const result of embeddings) {
30
+ allResults.push(...result.map((embedding) => Array.from(embedding)));
31
+ }
32
+ if (allResults.length === 0) throw new Error("No embeddings generated");
33
+ return {
34
+ embeddings: allResults
35
+ };
36
+ }
37
+ var fastEmbedLegacyProvider = aiV4.experimental_customProvider({
38
+ textEmbeddingModels: {
39
+ "bge-small-en-v1.5": {
40
+ specificationVersion: "v1",
41
+ provider: "fastembed",
42
+ modelId: "bge-small-en-v1.5",
43
+ maxEmbeddingsPerCall: 256,
44
+ supportsParallelCalls: true,
45
+ async doEmbed({ values }) {
46
+ return generateEmbeddings(values, "BGESmallENV15");
47
+ }
48
+ },
49
+ "bge-base-en-v1.5": {
50
+ specificationVersion: "v1",
51
+ provider: "fastembed",
52
+ modelId: "bge-base-en-v1.5",
53
+ maxEmbeddingsPerCall: 256,
54
+ supportsParallelCalls: true,
55
+ async doEmbed({ values }) {
56
+ return generateEmbeddings(values, "BGEBaseENV15");
57
+ }
58
+ }
59
+ }
60
+ });
61
+ var fastEmbedProvider = ai.customProvider({
62
+ textEmbeddingModels: {
63
+ "bge-small-en-v1.5": {
64
+ specificationVersion: "v2",
65
+ provider: "fastembed",
66
+ modelId: "bge-small-en-v1.5",
67
+ maxEmbeddingsPerCall: 256,
68
+ supportsParallelCalls: true,
69
+ async doEmbed({ values }) {
70
+ return generateEmbeddings(values, "BGESmallENV15");
71
+ }
72
+ },
73
+ "bge-base-en-v1.5": {
74
+ specificationVersion: "v2",
75
+ provider: "fastembed",
76
+ modelId: "bge-base-en-v1.5",
77
+ maxEmbeddingsPerCall: 256,
78
+ supportsParallelCalls: true,
79
+ async doEmbed({ values }) {
80
+ return generateEmbeddings(values, "BGEBaseENV15");
81
+ }
82
+ }
83
+ }
84
+ });
85
+ var fastembed = Object.assign(fastEmbedProvider.textEmbeddingModel(`bge-small-en-v1.5`), {
86
+ small: fastEmbedProvider.textEmbeddingModel(`bge-small-en-v1.5`),
87
+ base: fastEmbedProvider.textEmbeddingModel(`bge-base-en-v1.5`),
88
+ smallLegacy: fastEmbedLegacyProvider.textEmbeddingModel(`bge-small-en-v1.5`),
89
+ baseLegacy: fastEmbedLegacyProvider.textEmbeddingModel(`bge-base-en-v1.5`)
90
+ });
91
+
92
+ exports.fastembed = fastembed;
93
+ //# sourceMappingURL=index.cjs.map
94
+ //# sourceMappingURL=index.cjs.map
@@ -0,0 +1 @@
1
+ {"version":3,"sources":["../src/index.ts"],"names":["path","os","fsp","FlagEmbedding","EmbeddingModel","experimental_customProvider","customProvider"],"mappings":";;;;;;;;;;;;;;;;AAOA,eAAe,iBAAA,GAAoB;AACjC,EAAA,MAAM,SAAA,GAAYA,sBAAK,IAAA,CAAKC,mBAAA,CAAG,SAAQ,EAAG,QAAA,EAAU,UAAU,kBAAkB,CAAA;AAChF,EAAA,MAAMC,qBAAI,KAAA,CAAM,SAAA,EAAW,EAAE,SAAA,EAAW,MAAM,CAAA;AAC9C,EAAA,OAAO,SAAA;AACT;AAGA,eAAe,kBAAA,CAAmB,QAAkB,SAAA,EAA6C;AAC/F,EAAA,MAAM,KAAA,GAAQ,MAAMC,yBAAA,CAAc,IAAA,CAAK;AAAA,IACrC,KAAA,EAAOC,2BAAe,SAAS,CAAA;AAAA,IAC/B,QAAA,EAAU,MAAM,iBAAA;AAAkB,GACnC,CAAA;AAGD,EAAA,MAAM,UAAA,GAAa,KAAA,CAAM,KAAA,CAAM,MAAM,CAAA;AAErC,EAAA,MAAM,aAAa,EAAC;AACpB,EAAA,WAAA,MAAiB,UAAU,UAAA,EAAY;AAGrC,IAAA,UAAA,CAAW,IAAA,CAAK,GAAG,MAAA,CAAO,GAAA,CAAI,eAAa,KAAA,CAAM,IAAA,CAAK,SAAS,CAAC,CAAC,CAAA;AAAA,EACnE;AAEA,EAAA,IAAI,WAAW,MAAA,KAAW,CAAA,EAAG,MAAM,IAAI,MAAM,yBAAyB,CAAA;AAEtE,EAAA,OAAO;AAAA,IACL,UAAA,EAAY;AAAA,GACd;AACF;AAGA,IAAM,0BAA0BC,gCAAA,CAA4B;AAAA,EAC1D,mBAAA,EAAqB;AAAA,IACnB,mBAAA,EAAqB;AAAA,MACnB,oBAAA,EAAsB,IAAA;AAAA,MACtB,QAAA,EAAU,WAAA;AAAA,MACV,OAAA,EAAS,mBAAA;AAAA,MACT,oBAAA,EAAsB,GAAA;AAAA,MACtB,qBAAA,EAAuB,IAAA;AAAA,MACvB,MAAM,OAAA,CAAQ,EAAE,MAAA,EAAO,EAAG;AACxB,QAAA,OAAO,kBAAA,CAAmB,QAAQ,eAAe,CAAA;AAAA,MACnD;AAAA,KACF;AAAA,IACA,kBAAA,EAAoB;AAAA,MAClB,oBAAA,EAAsB,IAAA;AAAA,MACtB,QAAA,EAAU,WAAA;AAAA,MACV,OAAA,EAAS,kBAAA;AAAA,MACT,oBAAA,EAAsB,GAAA;AAAA,MACtB,qBAAA,EAAuB,IAAA;AAAA,MACvB,MAAM,OAAA,CAAQ,EAAE,MAAA,EAAO,EAAG;AACxB,QAAA,OAAO,kBAAA,CAAmB,QAAQ,cAAc,CAAA;AAAA,MAClD;AAAA;AACF;AAEJ,CAAC,CAAA;AAGD,IAAM,oBAAoBC,iBAAA,CAAe;AAAA,EACvC,mBAAA,EAAqB;AAAA,IACnB,mBAAA,EAAqB;AAAA,MACnB,oBAAA,EAAsB,IAAA;AAAA,MACtB,QAAA,EAAU,WAAA;AAAA,MACV,OAAA,EAAS,mBAAA;AAAA,MACT,oBAAA,EAAsB,GAAA;AAAA,MACtB,qBAAA,EAAuB,IAAA;AAAA,MACvB,MAAM,OAAA,CAAQ,EAAE,MAAA,EAAO,EAAG;AACxB,QAAA,OAAO,kBAAA,CAAmB,QAAQ,eAAe,CAAA;AAAA,MACnD;AAAA,KACF;AAAA,IACA,kBAAA,EAAoB;AAAA,MAClB,oBAAA,EAAsB,IAAA;AAAA,MACtB,QAAA,EAAU,WAAA;AAAA,MACV,OAAA,EAAS,kBAAA;AAAA,MACT,oBAAA,EAAsB,GAAA;AAAA,MACtB,qBAAA,EAAuB,IAAA;AAAA,MACvB,MAAM,OAAA,CAAQ,EAAE,MAAA,EAAO,EAAG;AACxB,QAAA,OAAO,kBAAA,CAAmB,QAAQ,cAAc,CAAA;AAAA,MAClD;AAAA;AACF;AAEJ,CAAC,CAAA;AAEM,IAAM,YAAY,MAAA,CAAO,MAAA,CAAO,iBAAA,CAAkB,kBAAA,CAAmB,mBAAmB,CAAA,EAAG;AAAA,EAChG,KAAA,EAAO,iBAAA,CAAkB,kBAAA,CAAmB,CAAA,iBAAA,CAAmB,CAAA;AAAA,EAC/D,IAAA,EAAM,iBAAA,CAAkB,kBAAA,CAAmB,CAAA,gBAAA,CAAkB,CAAA;AAAA,EAC7D,WAAA,EAAa,uBAAA,CAAwB,kBAAA,CAAmB,CAAA,iBAAA,CAAmB,CAAA;AAAA,EAC3E,UAAA,EAAY,uBAAA,CAAwB,kBAAA,CAAmB,CAAA,gBAAA,CAAkB;AAC3E,CAAC","file":"index.cjs","sourcesContent":["import fsp from 'node:fs/promises';\nimport os from 'node:os';\nimport path from 'node:path';\nimport { customProvider } from 'ai';\nimport { experimental_customProvider } from 'ai-v4';\nimport { FlagEmbedding, EmbeddingModel } from 'fastembed';\n\nasync function getModelCachePath() {\n const cachePath = path.join(os.homedir(), '.cache', 'mastra', 'fastembed-models');\n await fsp.mkdir(cachePath, { recursive: true });\n return cachePath;\n}\n\n// Shared function to generate embeddings using fastembed\nasync function generateEmbeddings(values: string[], modelType: 'BGESmallENV15' | 'BGEBaseENV15') {\n const model = await FlagEmbedding.init({\n model: EmbeddingModel[modelType],\n cacheDir: await getModelCachePath(),\n });\n\n // model.embed() returns an AsyncGenerator that processes texts in batches (default size 256)\n const embeddings = model.embed(values);\n\n const allResults = [];\n for await (const result of embeddings) {\n // result is an array of embeddings, one for each text in the batch\n // We convert each Float32Array embedding to a regular number array\n allResults.push(...result.map(embedding => Array.from(embedding)));\n }\n\n if (allResults.length === 0) throw new Error('No embeddings generated');\n\n return {\n embeddings: allResults,\n };\n}\n\n// Legacy v1 provider for backwards compatibility\nconst fastEmbedLegacyProvider = experimental_customProvider({\n textEmbeddingModels: {\n 'bge-small-en-v1.5': {\n specificationVersion: 'v1',\n provider: 'fastembed',\n modelId: 'bge-small-en-v1.5',\n maxEmbeddingsPerCall: 256,\n supportsParallelCalls: true,\n async doEmbed({ values }) {\n return generateEmbeddings(values, 'BGESmallENV15');\n },\n },\n 'bge-base-en-v1.5': {\n specificationVersion: 'v1',\n provider: 'fastembed',\n modelId: 'bge-base-en-v1.5',\n maxEmbeddingsPerCall: 256,\n supportsParallelCalls: true,\n async doEmbed({ values }) {\n return generateEmbeddings(values, 'BGEBaseENV15');\n },\n },\n },\n});\n\n// V2 provider for AI SDK v5 compatibility\nconst fastEmbedProvider = customProvider({\n textEmbeddingModels: {\n 'bge-small-en-v1.5': {\n specificationVersion: 'v2',\n provider: 'fastembed',\n modelId: 'bge-small-en-v1.5',\n maxEmbeddingsPerCall: 256,\n supportsParallelCalls: true,\n async doEmbed({ values }) {\n return generateEmbeddings(values, 'BGESmallENV15');\n },\n },\n 'bge-base-en-v1.5': {\n specificationVersion: 'v2',\n provider: 'fastembed',\n modelId: 'bge-base-en-v1.5',\n maxEmbeddingsPerCall: 256,\n supportsParallelCalls: true,\n async doEmbed({ values }) {\n return generateEmbeddings(values, 'BGEBaseENV15');\n },\n },\n },\n});\n\nexport const fastembed = Object.assign(fastEmbedProvider.textEmbeddingModel(`bge-small-en-v1.5`), {\n small: fastEmbedProvider.textEmbeddingModel(`bge-small-en-v1.5`),\n base: fastEmbedProvider.textEmbeddingModel(`bge-base-en-v1.5`),\n smallLegacy: fastEmbedLegacyProvider.textEmbeddingModel(`bge-small-en-v1.5`),\n baseLegacy: fastEmbedLegacyProvider.textEmbeddingModel(`bge-base-en-v1.5`),\n});\n"]}
@@ -0,0 +1,7 @@
1
+ export declare const fastembed: import("@ai-sdk/provider").EmbeddingModelV2<string> & {
2
+ small: import("@ai-sdk/provider").EmbeddingModelV2<string>;
3
+ base: import("@ai-sdk/provider").EmbeddingModelV2<string>;
4
+ smallLegacy: import("ai-v4").EmbeddingModel<string>;
5
+ baseLegacy: import("ai-v4").EmbeddingModel<string>;
6
+ };
7
+ //# sourceMappingURL=index.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../src/index.ts"],"names":[],"mappings":"AAyFA,eAAO,MAAM,SAAS;;;;;CAKpB,CAAC"}
package/dist/index.js ADDED
@@ -0,0 +1,86 @@
1
+ import fsp from 'fs/promises';
2
+ import os from 'os';
3
+ import path from 'path';
4
+ import { customProvider } from 'ai';
5
+ import { experimental_customProvider } from 'ai-v4';
6
+ import { FlagEmbedding, EmbeddingModel } from 'fastembed';
7
+
8
+ // src/index.ts
9
+ async function getModelCachePath() {
10
+ const cachePath = path.join(os.homedir(), ".cache", "mastra", "fastembed-models");
11
+ await fsp.mkdir(cachePath, { recursive: true });
12
+ return cachePath;
13
+ }
14
+ async function generateEmbeddings(values, modelType) {
15
+ const model = await FlagEmbedding.init({
16
+ model: EmbeddingModel[modelType],
17
+ cacheDir: await getModelCachePath()
18
+ });
19
+ const embeddings = model.embed(values);
20
+ const allResults = [];
21
+ for await (const result of embeddings) {
22
+ allResults.push(...result.map((embedding) => Array.from(embedding)));
23
+ }
24
+ if (allResults.length === 0) throw new Error("No embeddings generated");
25
+ return {
26
+ embeddings: allResults
27
+ };
28
+ }
29
+ var fastEmbedLegacyProvider = experimental_customProvider({
30
+ textEmbeddingModels: {
31
+ "bge-small-en-v1.5": {
32
+ specificationVersion: "v1",
33
+ provider: "fastembed",
34
+ modelId: "bge-small-en-v1.5",
35
+ maxEmbeddingsPerCall: 256,
36
+ supportsParallelCalls: true,
37
+ async doEmbed({ values }) {
38
+ return generateEmbeddings(values, "BGESmallENV15");
39
+ }
40
+ },
41
+ "bge-base-en-v1.5": {
42
+ specificationVersion: "v1",
43
+ provider: "fastembed",
44
+ modelId: "bge-base-en-v1.5",
45
+ maxEmbeddingsPerCall: 256,
46
+ supportsParallelCalls: true,
47
+ async doEmbed({ values }) {
48
+ return generateEmbeddings(values, "BGEBaseENV15");
49
+ }
50
+ }
51
+ }
52
+ });
53
+ var fastEmbedProvider = customProvider({
54
+ textEmbeddingModels: {
55
+ "bge-small-en-v1.5": {
56
+ specificationVersion: "v2",
57
+ provider: "fastembed",
58
+ modelId: "bge-small-en-v1.5",
59
+ maxEmbeddingsPerCall: 256,
60
+ supportsParallelCalls: true,
61
+ async doEmbed({ values }) {
62
+ return generateEmbeddings(values, "BGESmallENV15");
63
+ }
64
+ },
65
+ "bge-base-en-v1.5": {
66
+ specificationVersion: "v2",
67
+ provider: "fastembed",
68
+ modelId: "bge-base-en-v1.5",
69
+ maxEmbeddingsPerCall: 256,
70
+ supportsParallelCalls: true,
71
+ async doEmbed({ values }) {
72
+ return generateEmbeddings(values, "BGEBaseENV15");
73
+ }
74
+ }
75
+ }
76
+ });
77
+ var fastembed = Object.assign(fastEmbedProvider.textEmbeddingModel(`bge-small-en-v1.5`), {
78
+ small: fastEmbedProvider.textEmbeddingModel(`bge-small-en-v1.5`),
79
+ base: fastEmbedProvider.textEmbeddingModel(`bge-base-en-v1.5`),
80
+ smallLegacy: fastEmbedLegacyProvider.textEmbeddingModel(`bge-small-en-v1.5`),
81
+ baseLegacy: fastEmbedLegacyProvider.textEmbeddingModel(`bge-base-en-v1.5`)
82
+ });
83
+
84
+ export { fastembed };
85
+ //# sourceMappingURL=index.js.map
86
+ //# sourceMappingURL=index.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"sources":["../src/index.ts"],"names":[],"mappings":";;;;;;;;AAOA,eAAe,iBAAA,GAAoB;AACjC,EAAA,MAAM,SAAA,GAAY,KAAK,IAAA,CAAK,EAAA,CAAG,SAAQ,EAAG,QAAA,EAAU,UAAU,kBAAkB,CAAA;AAChF,EAAA,MAAM,IAAI,KAAA,CAAM,SAAA,EAAW,EAAE,SAAA,EAAW,MAAM,CAAA;AAC9C,EAAA,OAAO,SAAA;AACT;AAGA,eAAe,kBAAA,CAAmB,QAAkB,SAAA,EAA6C;AAC/F,EAAA,MAAM,KAAA,GAAQ,MAAM,aAAA,CAAc,IAAA,CAAK;AAAA,IACrC,KAAA,EAAO,eAAe,SAAS,CAAA;AAAA,IAC/B,QAAA,EAAU,MAAM,iBAAA;AAAkB,GACnC,CAAA;AAGD,EAAA,MAAM,UAAA,GAAa,KAAA,CAAM,KAAA,CAAM,MAAM,CAAA;AAErC,EAAA,MAAM,aAAa,EAAC;AACpB,EAAA,WAAA,MAAiB,UAAU,UAAA,EAAY;AAGrC,IAAA,UAAA,CAAW,IAAA,CAAK,GAAG,MAAA,CAAO,GAAA,CAAI,eAAa,KAAA,CAAM,IAAA,CAAK,SAAS,CAAC,CAAC,CAAA;AAAA,EACnE;AAEA,EAAA,IAAI,WAAW,MAAA,KAAW,CAAA,EAAG,MAAM,IAAI,MAAM,yBAAyB,CAAA;AAEtE,EAAA,OAAO;AAAA,IACL,UAAA,EAAY;AAAA,GACd;AACF;AAGA,IAAM,0BAA0B,2BAAA,CAA4B;AAAA,EAC1D,mBAAA,EAAqB;AAAA,IACnB,mBAAA,EAAqB;AAAA,MACnB,oBAAA,EAAsB,IAAA;AAAA,MACtB,QAAA,EAAU,WAAA;AAAA,MACV,OAAA,EAAS,mBAAA;AAAA,MACT,oBAAA,EAAsB,GAAA;AAAA,MACtB,qBAAA,EAAuB,IAAA;AAAA,MACvB,MAAM,OAAA,CAAQ,EAAE,MAAA,EAAO,EAAG;AACxB,QAAA,OAAO,kBAAA,CAAmB,QAAQ,eAAe,CAAA;AAAA,MACnD;AAAA,KACF;AAAA,IACA,kBAAA,EAAoB;AAAA,MAClB,oBAAA,EAAsB,IAAA;AAAA,MACtB,QAAA,EAAU,WAAA;AAAA,MACV,OAAA,EAAS,kBAAA;AAAA,MACT,oBAAA,EAAsB,GAAA;AAAA,MACtB,qBAAA,EAAuB,IAAA;AAAA,MACvB,MAAM,OAAA,CAAQ,EAAE,MAAA,EAAO,EAAG;AACxB,QAAA,OAAO,kBAAA,CAAmB,QAAQ,cAAc,CAAA;AAAA,MAClD;AAAA;AACF;AAEJ,CAAC,CAAA;AAGD,IAAM,oBAAoB,cAAA,CAAe;AAAA,EACvC,mBAAA,EAAqB;AAAA,IACnB,mBAAA,EAAqB;AAAA,MACnB,oBAAA,EAAsB,IAAA;AAAA,MACtB,QAAA,EAAU,WAAA;AAAA,MACV,OAAA,EAAS,mBAAA;AAAA,MACT,oBAAA,EAAsB,GAAA;AAAA,MACtB,qBAAA,EAAuB,IAAA;AAAA,MACvB,MAAM,OAAA,CAAQ,EAAE,MAAA,EAAO,EAAG;AACxB,QAAA,OAAO,kBAAA,CAAmB,QAAQ,eAAe,CAAA;AAAA,MACnD;AAAA,KACF;AAAA,IACA,kBAAA,EAAoB;AAAA,MAClB,oBAAA,EAAsB,IAAA;AAAA,MACtB,QAAA,EAAU,WAAA;AAAA,MACV,OAAA,EAAS,kBAAA;AAAA,MACT,oBAAA,EAAsB,GAAA;AAAA,MACtB,qBAAA,EAAuB,IAAA;AAAA,MACvB,MAAM,OAAA,CAAQ,EAAE,MAAA,EAAO,EAAG;AACxB,QAAA,OAAO,kBAAA,CAAmB,QAAQ,cAAc,CAAA;AAAA,MAClD;AAAA;AACF;AAEJ,CAAC,CAAA;AAEM,IAAM,YAAY,MAAA,CAAO,MAAA,CAAO,iBAAA,CAAkB,kBAAA,CAAmB,mBAAmB,CAAA,EAAG;AAAA,EAChG,KAAA,EAAO,iBAAA,CAAkB,kBAAA,CAAmB,CAAA,iBAAA,CAAmB,CAAA;AAAA,EAC/D,IAAA,EAAM,iBAAA,CAAkB,kBAAA,CAAmB,CAAA,gBAAA,CAAkB,CAAA;AAAA,EAC7D,WAAA,EAAa,uBAAA,CAAwB,kBAAA,CAAmB,CAAA,iBAAA,CAAmB,CAAA;AAAA,EAC3E,UAAA,EAAY,uBAAA,CAAwB,kBAAA,CAAmB,CAAA,gBAAA,CAAkB;AAC3E,CAAC","file":"index.js","sourcesContent":["import fsp from 'node:fs/promises';\nimport os from 'node:os';\nimport path from 'node:path';\nimport { customProvider } from 'ai';\nimport { experimental_customProvider } from 'ai-v4';\nimport { FlagEmbedding, EmbeddingModel } from 'fastembed';\n\nasync function getModelCachePath() {\n const cachePath = path.join(os.homedir(), '.cache', 'mastra', 'fastembed-models');\n await fsp.mkdir(cachePath, { recursive: true });\n return cachePath;\n}\n\n// Shared function to generate embeddings using fastembed\nasync function generateEmbeddings(values: string[], modelType: 'BGESmallENV15' | 'BGEBaseENV15') {\n const model = await FlagEmbedding.init({\n model: EmbeddingModel[modelType],\n cacheDir: await getModelCachePath(),\n });\n\n // model.embed() returns an AsyncGenerator that processes texts in batches (default size 256)\n const embeddings = model.embed(values);\n\n const allResults = [];\n for await (const result of embeddings) {\n // result is an array of embeddings, one for each text in the batch\n // We convert each Float32Array embedding to a regular number array\n allResults.push(...result.map(embedding => Array.from(embedding)));\n }\n\n if (allResults.length === 0) throw new Error('No embeddings generated');\n\n return {\n embeddings: allResults,\n };\n}\n\n// Legacy v1 provider for backwards compatibility\nconst fastEmbedLegacyProvider = experimental_customProvider({\n textEmbeddingModels: {\n 'bge-small-en-v1.5': {\n specificationVersion: 'v1',\n provider: 'fastembed',\n modelId: 'bge-small-en-v1.5',\n maxEmbeddingsPerCall: 256,\n supportsParallelCalls: true,\n async doEmbed({ values }) {\n return generateEmbeddings(values, 'BGESmallENV15');\n },\n },\n 'bge-base-en-v1.5': {\n specificationVersion: 'v1',\n provider: 'fastembed',\n modelId: 'bge-base-en-v1.5',\n maxEmbeddingsPerCall: 256,\n supportsParallelCalls: true,\n async doEmbed({ values }) {\n return generateEmbeddings(values, 'BGEBaseENV15');\n },\n },\n },\n});\n\n// V2 provider for AI SDK v5 compatibility\nconst fastEmbedProvider = customProvider({\n textEmbeddingModels: {\n 'bge-small-en-v1.5': {\n specificationVersion: 'v2',\n provider: 'fastembed',\n modelId: 'bge-small-en-v1.5',\n maxEmbeddingsPerCall: 256,\n supportsParallelCalls: true,\n async doEmbed({ values }) {\n return generateEmbeddings(values, 'BGESmallENV15');\n },\n },\n 'bge-base-en-v1.5': {\n specificationVersion: 'v2',\n provider: 'fastembed',\n modelId: 'bge-base-en-v1.5',\n maxEmbeddingsPerCall: 256,\n supportsParallelCalls: true,\n async doEmbed({ values }) {\n return generateEmbeddings(values, 'BGEBaseENV15');\n },\n },\n },\n});\n\nexport const fastembed = Object.assign(fastEmbedProvider.textEmbeddingModel(`bge-small-en-v1.5`), {\n small: fastEmbedProvider.textEmbeddingModel(`bge-small-en-v1.5`),\n base: fastEmbedProvider.textEmbeddingModel(`bge-base-en-v1.5`),\n smallLegacy: fastEmbedLegacyProvider.textEmbeddingModel(`bge-small-en-v1.5`),\n baseLegacy: fastEmbedLegacyProvider.textEmbeddingModel(`bge-base-en-v1.5`),\n});\n"]}
package/package.json ADDED
@@ -0,0 +1,56 @@
1
+ {
2
+ "name": "@mastra/fastembed",
3
+ "version": "0.0.0-1.x-tester-20251106055847",
4
+ "license": "Apache-2.0",
5
+ "type": "module",
6
+ "main": "dist/index.js",
7
+ "types": "dist/index.d.ts",
8
+ "exports": {
9
+ ".": {
10
+ "import": {
11
+ "types": "./dist/index.d.ts",
12
+ "default": "./dist/index.js"
13
+ },
14
+ "require": {
15
+ "types": "./dist/index.d.ts",
16
+ "default": "./dist/index.cjs"
17
+ }
18
+ },
19
+ "./package.json": "./package.json"
20
+ },
21
+ "files": [
22
+ "dist",
23
+ "CHANGELOG.md"
24
+ ],
25
+ "homepage": "https://mastra.ai",
26
+ "repository": {
27
+ "type": "git",
28
+ "url": "git+https://github.com/mastra-ai/mastra.git",
29
+ "directory": "packages/fastembed"
30
+ },
31
+ "bugs": {
32
+ "url": "https://github.com/mastra-ai/mastra/issues"
33
+ },
34
+ "dependencies": {
35
+ "ai": "^5.0.76",
36
+ "ai-v4": "npm:ai@4.3.19",
37
+ "@ai-sdk/provider": "^2.0.0",
38
+ "fastembed": "^1.14.4"
39
+ },
40
+ "devDependencies": {
41
+ "@types/node": "^20.19.0",
42
+ "tsup": "^8.5.0",
43
+ "typescript": "^5.8.3",
44
+ "vitest": "^3.2.4",
45
+ "@internal/types-builder": "0.0.0-1.x-tester-20251106055847",
46
+ "@internal/lint": "0.0.0-1.x-tester-20251106055847"
47
+ },
48
+ "engines": {
49
+ "node": ">=22.13.0"
50
+ },
51
+ "scripts": {
52
+ "build": "tsup --silent --config tsup.config.ts",
53
+ "build:watch": "tsup --watch --silent --config tsup.config.ts",
54
+ "test": "vitest run"
55
+ }
56
+ }