@kernl-sdk/ai 0.2.6 → 0.2.7

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,5 +1,5 @@
1
1
 
2
2
  
3
- > @kernl-sdk/ai@0.2.5 build /Users/andjones/Documents/projects/kernl/packages/providers/ai
3
+ > @kernl-sdk/ai@0.2.6 build /Users/andjones/Documents/projects/kernl/packages/providers/ai
4
4
  > tsc && tsc-alias --resolve-full-paths
5
5
 
@@ -1,4 +1,4 @@
1
1
 
2
- > @kernl-sdk/ai@0.1.4 check-types /Users/andjones/Documents/projects/kernl/packages/_ai
2
+ > @kernl-sdk/ai@0.2.6 check-types /Users/andjones/Documents/projects/kernl/packages/providers/ai
3
3
  > tsc --noEmit
4
4
 
package/CHANGELOG.md CHANGED
@@ -1,5 +1,12 @@
1
1
  # @kernl/ai
2
2
 
3
+ ## 0.2.7
4
+
5
+ ### Patch Changes
6
+
7
+ - Updated dependencies [a7d6138]
8
+ - @kernl-sdk/retrieval@0.1.0
9
+
3
10
  ## 0.2.6
4
11
 
5
12
  ### Patch Changes
@@ -0,0 +1,2 @@
1
+ export {};
2
+ //# sourceMappingURL=embed-integration.test.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"embed-integration.test.d.ts","sourceRoot":"","sources":["../../src/__tests__/embed-integration.test.ts"],"names":[],"mappings":""}
@@ -0,0 +1,70 @@
1
+ import { describe, it, expect } from "vitest";
2
+ import { embed, embedMany } from "@kernl-sdk/retrieval";
3
+ import { openai } from "../providers/openai.js";
4
+ import { google } from "../providers/google.js";
5
+ // Force module evaluation by referencing exports
6
+ void openai;
7
+ void google;
8
+ // Integration tests for embedding functions with real APIs.
9
+ // Skip these in CI if API keys are not available.
10
+ describe.skipIf(!process.env.OPENAI_API_KEY)("embed integration (OpenAI)", () => {
11
+ it("should embed single text with OpenAI", async () => {
12
+ const result = await embed({
13
+ model: "openai/text-embedding-3-small",
14
+ text: "The quick brown fox jumps over the lazy dog",
15
+ });
16
+ expect(result.embedding).toBeDefined();
17
+ expect(Array.isArray(result.embedding)).toBe(true);
18
+ expect(result.embedding.length).toBe(1536); // text-embedding-3-small dimensions
19
+ expect(result.embedding[0]).toBeTypeOf("number");
20
+ });
21
+ it("should embed multiple texts with OpenAI", async () => {
22
+ const result = await embedMany({
23
+ model: "openai/text-embedding-3-small",
24
+ texts: [
25
+ "Hello world",
26
+ "Machine learning is fascinating",
27
+ "TypeScript is great",
28
+ ],
29
+ });
30
+ expect(result.embeddings).toBeDefined();
31
+ expect(result.embeddings.length).toBe(3);
32
+ expect(result.embeddings[0].length).toBe(1536);
33
+ expect(result.embeddings[1].length).toBe(1536);
34
+ expect(result.embeddings[2].length).toBe(1536);
35
+ });
36
+ it("should handle concurrency with OpenAI", async () => {
37
+ const texts = Array.from({ length: 10 }, (_, i) => `Text number ${i}`);
38
+ const result = await embedMany({
39
+ model: "openai/text-embedding-3-small",
40
+ texts,
41
+ concurrency: 3,
42
+ });
43
+ expect(result.embeddings.length).toBe(10);
44
+ result.embeddings.forEach((embedding) => {
45
+ expect(embedding.length).toBe(1536);
46
+ });
47
+ });
48
+ it("should retry on failure", async () => {
49
+ // This test might be flaky, but demonstrates retry behavior
50
+ const result = await embed({
51
+ model: "openai/text-embedding-3-small",
52
+ text: "Test retry behavior",
53
+ retries: 2,
54
+ });
55
+ expect(result.embedding).toBeDefined();
56
+ expect(result.embedding.length).toBe(1536);
57
+ });
58
+ });
59
+ describe.skipIf(!process.env.GOOGLE_GENERATIVE_AI_API_KEY)("embed integration (Google)", () => {
60
+ it("should embed single text with Google", async () => {
61
+ const result = await embed({
62
+ model: "google/text-embedding-004",
63
+ text: "The quick brown fox jumps over the lazy dog",
64
+ });
65
+ expect(result.embedding).toBeDefined();
66
+ expect(Array.isArray(result.embedding)).toBe(true);
67
+ expect(result.embedding.length).toBeGreaterThan(0);
68
+ expect(result.embedding[0]).toBeTypeOf("number");
69
+ });
70
+ });
@@ -0,0 +1,12 @@
1
+ import type { Codec } from "@kernl-sdk/shared/lib";
2
+ import type { EmbeddingModelRequestSettings } from "@kernl-sdk/protocol";
3
+ import type { SharedV3ProviderOptions } from "@ai-sdk/provider";
4
+ /**
5
+ * AI SDK embedding call options extracted from settings.
6
+ */
7
+ export interface AISdkEmbeddingOptions {
8
+ dimensions?: number;
9
+ providerOptions?: SharedV3ProviderOptions;
10
+ }
11
+ export declare const EMBEDDING_SETTINGS: Codec<EmbeddingModelRequestSettings, AISdkEmbeddingOptions>;
12
+ //# sourceMappingURL=embedding.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"embedding.d.ts","sourceRoot":"","sources":["../../src/convert/embedding.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,KAAK,EAAE,MAAM,uBAAuB,CAAC;AACnD,OAAO,KAAK,EAAE,6BAA6B,EAAE,MAAM,qBAAqB,CAAC;AACzE,OAAO,KAAK,EAAE,uBAAuB,EAAE,MAAM,kBAAkB,CAAC;AAEhE;;GAEG;AACH,MAAM,WAAW,qBAAqB;IACpC,UAAU,CAAC,EAAE,MAAM,CAAC;IACpB,eAAe,CAAC,EAAE,uBAAuB,CAAC;CAC3C;AAED,eAAO,MAAM,kBAAkB,EAAE,KAAK,CACpC,6BAA6B,EAC7B,qBAAqB,CAkBtB,CAAC"}
@@ -0,0 +1,16 @@
1
+ export const EMBEDDING_SETTINGS = {
2
+ encode: (settings) => {
3
+ const options = {};
4
+ if (settings.dimensions !== undefined) {
5
+ options.dimensions = settings.dimensions;
6
+ }
7
+ if (settings.providerOptions !== undefined) {
8
+ options.providerOptions =
9
+ settings.providerOptions;
10
+ }
11
+ return options;
12
+ },
13
+ decode: () => {
14
+ throw new Error("codec:unimplemented");
15
+ },
16
+ };
@@ -0,0 +1,16 @@
1
+ import type { EmbeddingModelV3 } from "@ai-sdk/provider";
2
+ import type { EmbeddingModel, EmbeddingModelRequest, EmbeddingModelResponse } from "@kernl-sdk/protocol";
3
+ /**
4
+ * EmbeddingModel adapter for the AI SDK EmbeddingModelV3.
5
+ */
6
+ export declare class AISDKEmbeddingModel<TValue = string> implements EmbeddingModel<TValue> {
7
+ private model;
8
+ readonly spec: "1.0";
9
+ readonly provider: string;
10
+ readonly modelId: string;
11
+ readonly maxEmbeddingsPerCall?: number;
12
+ readonly supportsParallelCalls?: boolean;
13
+ constructor(model: EmbeddingModelV3<TValue>);
14
+ embed(request: EmbeddingModelRequest<TValue>): Promise<EmbeddingModelResponse>;
15
+ }
16
+ //# sourceMappingURL=embedding-model.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"embedding-model.d.ts","sourceRoot":"","sources":["../src/embedding-model.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,gBAAgB,EAAE,MAAM,kBAAkB,CAAC;AACzD,OAAO,KAAK,EACV,cAAc,EACd,qBAAqB,EACrB,sBAAsB,EACvB,MAAM,qBAAqB,CAAC;AAI7B;;GAEG;AACH,qBAAa,mBAAmB,CAAC,MAAM,GAAG,MAAM,CAC9C,YAAW,cAAc,CAAC,MAAM,CAAC;IAQrB,OAAO,CAAC,KAAK;IANzB,QAAQ,CAAC,IAAI,EAAG,KAAK,CAAU;IAC/B,QAAQ,CAAC,QAAQ,EAAE,MAAM,CAAC;IAC1B,QAAQ,CAAC,OAAO,EAAE,MAAM,CAAC;IACzB,QAAQ,CAAC,oBAAoB,CAAC,EAAE,MAAM,CAAC;IACvC,QAAQ,CAAC,qBAAqB,CAAC,EAAE,OAAO,CAAC;gBAErB,KAAK,EAAE,gBAAgB,CAAC,MAAM,CAAC;IAa7C,KAAK,CACT,OAAO,EAAE,qBAAqB,CAAC,MAAM,CAAC,GACrC,OAAO,CAAC,sBAAsB,CAAC;CAiBnC"}
@@ -0,0 +1,39 @@
1
+ import { EMBEDDING_SETTINGS } from "./convert/embedding.js";
2
+ /**
3
+ * EmbeddingModel adapter for the AI SDK EmbeddingModelV3.
4
+ */
5
+ export class AISDKEmbeddingModel {
6
+ model;
7
+ spec = "1.0";
8
+ provider;
9
+ modelId;
10
+ maxEmbeddingsPerCall;
11
+ supportsParallelCalls;
12
+ constructor(model) {
13
+ this.model = model;
14
+ this.provider = model.provider;
15
+ this.modelId = model.modelId;
16
+ // AI SDK supports async values for these, we handle sync case
17
+ if (typeof model.maxEmbeddingsPerCall === "number") {
18
+ this.maxEmbeddingsPerCall = model.maxEmbeddingsPerCall;
19
+ }
20
+ if (typeof model.supportsParallelCalls === "boolean") {
21
+ this.supportsParallelCalls = model.supportsParallelCalls;
22
+ }
23
+ }
24
+ async embed(request) {
25
+ const settings = request.settings
26
+ ? EMBEDDING_SETTINGS.encode(request.settings)
27
+ : {};
28
+ const result = await this.model.doEmbed({
29
+ values: request.values,
30
+ abortSignal: request.abort,
31
+ ...settings,
32
+ });
33
+ return {
34
+ embeddings: result.embeddings,
35
+ usage: { inputTokens: result.usage?.tokens },
36
+ providerMetadata: result.providerMetadata,
37
+ };
38
+ }
39
+ }
package/dist/index.d.ts CHANGED
@@ -12,9 +12,11 @@
12
12
  * ```
13
13
  */
14
14
  export { AISDKLanguageModel } from "./language-model.js";
15
+ export { AISDKEmbeddingModel } from "./embedding-model.js";
15
16
  export { MESSAGE } from "./convert/message.js";
16
17
  export { TOOL, TOOL_CHOICE } from "./convert/tools.js";
17
18
  export { MODEL_SETTINGS } from "./convert/settings.js";
19
+ export { EMBEDDING_SETTINGS } from "./convert/embedding.js";
18
20
  export { MODEL_RESPONSE, WARNING } from "./convert/response.js";
19
21
  export { convertStream } from "./convert/stream.js";
20
22
  export { UIMessageCodec, historyToUIMessages } from "./convert/ui-message.js";
@@ -1 +1 @@
1
- {"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../src/index.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;GAYG;AAEH,OAAO,EAAE,kBAAkB,EAAE,MAAM,kBAAkB,CAAC;AAGtD,OAAO,EAAE,OAAO,EAAE,MAAM,mBAAmB,CAAC;AAC5C,OAAO,EAAE,IAAI,EAAE,WAAW,EAAE,MAAM,iBAAiB,CAAC;AACpD,OAAO,EAAE,cAAc,EAAE,MAAM,oBAAoB,CAAC;AACpD,OAAO,EAAE,cAAc,EAAE,OAAO,EAAE,MAAM,oBAAoB,CAAC;AAC7D,OAAO,EAAE,aAAa,EAAE,MAAM,kBAAkB,CAAC;AACjD,OAAO,EAAE,cAAc,EAAE,mBAAmB,EAAE,MAAM,sBAAsB,CAAC;AAC3E,OAAO,EAAE,cAAc,EAAE,iBAAiB,EAAE,MAAM,qBAAqB,CAAC"}
1
+ {"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../src/index.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;GAYG;AAEH,OAAO,EAAE,kBAAkB,EAAE,MAAM,kBAAkB,CAAC;AACtD,OAAO,EAAE,mBAAmB,EAAE,MAAM,mBAAmB,CAAC;AAGxD,OAAO,EAAE,OAAO,EAAE,MAAM,mBAAmB,CAAC;AAC5C,OAAO,EAAE,IAAI,EAAE,WAAW,EAAE,MAAM,iBAAiB,CAAC;AACpD,OAAO,EAAE,cAAc,EAAE,MAAM,oBAAoB,CAAC;AACpD,OAAO,EAAE,kBAAkB,EAAE,MAAM,qBAAqB,CAAC;AACzD,OAAO,EAAE,cAAc,EAAE,OAAO,EAAE,MAAM,oBAAoB,CAAC;AAC7D,OAAO,EAAE,aAAa,EAAE,MAAM,kBAAkB,CAAC;AACjD,OAAO,EAAE,cAAc,EAAE,mBAAmB,EAAE,MAAM,sBAAsB,CAAC;AAC3E,OAAO,EAAE,cAAc,EAAE,iBAAiB,EAAE,MAAM,qBAAqB,CAAC"}
package/dist/index.js CHANGED
@@ -12,10 +12,12 @@
12
12
  * ```
13
13
  */
14
14
  export { AISDKLanguageModel } from "./language-model.js";
15
+ export { AISDKEmbeddingModel } from "./embedding-model.js";
15
16
  // Re-export codecs for custom provider implementations
16
17
  export { MESSAGE } from "./convert/message.js";
17
18
  export { TOOL, TOOL_CHOICE } from "./convert/tools.js";
18
19
  export { MODEL_SETTINGS } from "./convert/settings.js";
20
+ export { EMBEDDING_SETTINGS } from "./convert/embedding.js";
19
21
  export { MODEL_RESPONSE, WARNING } from "./convert/response.js";
20
22
  export { convertStream } from "./convert/stream.js";
21
23
  export { UIMessageCodec, historyToUIMessages } from "./convert/ui-message.js";
@@ -15,3 +15,4 @@ export function anthropic(modelId) {
15
15
  const model = createAnthropicModel(modelId);
16
16
  return new AISDKLanguageModel(model);
17
17
  }
18
+ // Note: Anthropic does not currently support embeddings
@@ -1 +1 @@
1
- {"version":3,"file":"google.d.ts","sourceRoot":"","sources":["../../src/providers/google.ts"],"names":[],"mappings":"AACA,OAAO,EAAE,kBAAkB,EAAE,MAAM,mBAAmB,CAAC;AAEvD;;;;;;;;;;GAUG;AACH,wBAAgB,MAAM,CAAC,OAAO,EAAE,MAAM,sBAGrC"}
1
+ {"version":3,"file":"google.d.ts","sourceRoot":"","sources":["../../src/providers/google.ts"],"names":[],"mappings":"AACA,OAAO,EAAE,kBAAkB,EAAE,MAAM,mBAAmB,CAAC;AAIvD;;;;;;;;;;GAUG;AACH,wBAAgB,MAAM,CAAC,OAAO,EAAE,MAAM,sBAGrC"}
@@ -1,5 +1,7 @@
1
1
  import { google as createGoogleModel } from "@ai-sdk/google";
2
2
  import { AISDKLanguageModel } from "../language-model.js";
3
+ import { AISDKEmbeddingModel } from "../embedding-model.js";
4
+ import { registerEmbeddingProvider } from "@kernl-sdk/retrieval";
3
5
  /**
4
6
  * Create a kernl-compatible Google Generative AI language model.
5
7
  *
@@ -15,3 +17,5 @@ export function google(modelId) {
15
17
  const model = createGoogleModel(modelId);
16
18
  return new AISDKLanguageModel(model);
17
19
  }
20
+ // Auto-register Google embedding provider
21
+ registerEmbeddingProvider("google", (id) => new AISDKEmbeddingModel(createGoogleModel.textEmbedding(id)));
@@ -1 +1 @@
1
- {"version":3,"file":"openai.d.ts","sourceRoot":"","sources":["../../src/providers/openai.ts"],"names":[],"mappings":"AACA,OAAO,EAAE,kBAAkB,EAAE,MAAM,mBAAmB,CAAC;AAEvD;;;;;;;;;;GAUG;AACH,wBAAgB,MAAM,CAAC,OAAO,EAAE,MAAM,sBAGrC"}
1
+ {"version":3,"file":"openai.d.ts","sourceRoot":"","sources":["../../src/providers/openai.ts"],"names":[],"mappings":"AACA,OAAO,EAAE,kBAAkB,EAAE,MAAM,mBAAmB,CAAC;AAIvD;;;;;;;;;;GAUG;AACH,wBAAgB,MAAM,CAAC,OAAO,EAAE,MAAM,sBAGrC"}
@@ -1,5 +1,7 @@
1
1
  import { openai as createOpenAIModel } from "@ai-sdk/openai";
2
2
  import { AISDKLanguageModel } from "../language-model.js";
3
+ import { AISDKEmbeddingModel } from "../embedding-model.js";
4
+ import { registerEmbeddingProvider } from "@kernl-sdk/retrieval";
3
5
  /**
4
6
  * Create a kernl-compatible OpenAI language model.
5
7
  *
@@ -15,3 +17,5 @@ export function openai(modelId) {
15
17
  const model = createOpenAIModel(modelId);
16
18
  return new AISDKLanguageModel(model);
17
19
  }
20
+ // Auto-register OpenAI embedding provider
21
+ registerEmbeddingProvider("openai", (id) => new AISDKEmbeddingModel(createOpenAIModel.embedding(id)));
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@kernl-sdk/ai",
3
- "version": "0.2.6",
3
+ "version": "0.2.7",
4
4
  "description": "Vercel AI SDK adapter for kernl",
5
5
  "keywords": [
6
6
  "kernl",
@@ -69,7 +69,8 @@
69
69
  },
70
70
  "dependencies": {
71
71
  "@kernl-sdk/protocol": "0.2.5",
72
- "@kernl-sdk/shared": "^0.1.6"
72
+ "@kernl-sdk/shared": "^0.1.6",
73
+ "@kernl-sdk/retrieval": "0.1.0"
73
74
  },
74
75
  "scripts": {
75
76
  "build": "tsc && tsc-alias --resolve-full-paths",
@@ -0,0 +1,86 @@
1
+ import { describe, it, expect } from "vitest";
2
+ import { embed, embedMany } from "@kernl-sdk/retrieval";
3
+ import { openai } from "../providers/openai";
4
+ import { google } from "../providers/google";
5
+
6
+ // Force module evaluation by referencing exports
7
+ void openai;
8
+ void google;
9
+
10
+ // Integration tests for embedding functions with real APIs.
11
+ // Skip these in CI if API keys are not available.
12
+
13
+ describe.skipIf(!process.env.OPENAI_API_KEY)("embed integration (OpenAI)", () => {
14
+ it("should embed single text with OpenAI", async () => {
15
+ const result = await embed({
16
+ model: "openai/text-embedding-3-small",
17
+ text: "The quick brown fox jumps over the lazy dog",
18
+ });
19
+
20
+ expect(result.embedding).toBeDefined();
21
+ expect(Array.isArray(result.embedding)).toBe(true);
22
+ expect(result.embedding.length).toBe(1536); // text-embedding-3-small dimensions
23
+ expect(result.embedding[0]).toBeTypeOf("number");
24
+ });
25
+
26
+ it("should embed multiple texts with OpenAI", async () => {
27
+ const result = await embedMany({
28
+ model: "openai/text-embedding-3-small",
29
+ texts: [
30
+ "Hello world",
31
+ "Machine learning is fascinating",
32
+ "TypeScript is great",
33
+ ],
34
+ });
35
+
36
+ expect(result.embeddings).toBeDefined();
37
+ expect(result.embeddings.length).toBe(3);
38
+ expect(result.embeddings[0].length).toBe(1536);
39
+ expect(result.embeddings[1].length).toBe(1536);
40
+ expect(result.embeddings[2].length).toBe(1536);
41
+ });
42
+
43
+ it("should handle concurrency with OpenAI", async () => {
44
+ const texts = Array.from({ length: 10 }, (_, i) => `Text number ${i}`);
45
+
46
+ const result = await embedMany({
47
+ model: "openai/text-embedding-3-small",
48
+ texts,
49
+ concurrency: 3,
50
+ });
51
+
52
+ expect(result.embeddings.length).toBe(10);
53
+ result.embeddings.forEach((embedding) => {
54
+ expect(embedding.length).toBe(1536);
55
+ });
56
+ });
57
+
58
+ it("should retry on failure", async () => {
59
+ // This test might be flaky, but demonstrates retry behavior
60
+ const result = await embed({
61
+ model: "openai/text-embedding-3-small",
62
+ text: "Test retry behavior",
63
+ retries: 2,
64
+ });
65
+
66
+ expect(result.embedding).toBeDefined();
67
+ expect(result.embedding.length).toBe(1536);
68
+ });
69
+ });
70
+
71
+ describe.skipIf(!process.env.GOOGLE_GENERATIVE_AI_API_KEY)(
72
+ "embed integration (Google)",
73
+ () => {
74
+ it("should embed single text with Google", async () => {
75
+ const result = await embed({
76
+ model: "google/text-embedding-004",
77
+ text: "The quick brown fox jumps over the lazy dog",
78
+ });
79
+
80
+ expect(result.embedding).toBeDefined();
81
+ expect(Array.isArray(result.embedding)).toBe(true);
82
+ expect(result.embedding.length).toBeGreaterThan(0);
83
+ expect(result.embedding[0]).toBeTypeOf("number");
84
+ });
85
+ },
86
+ );
@@ -0,0 +1,33 @@
1
+ import type { Codec } from "@kernl-sdk/shared/lib";
2
+ import type { EmbeddingModelRequestSettings } from "@kernl-sdk/protocol";
3
+ import type { SharedV3ProviderOptions } from "@ai-sdk/provider";
4
+
5
+ /**
6
+ * AI SDK embedding call options extracted from settings.
7
+ */
8
+ export interface AISdkEmbeddingOptions {
9
+ dimensions?: number;
10
+ providerOptions?: SharedV3ProviderOptions;
11
+ }
12
+
13
+ export const EMBEDDING_SETTINGS: Codec<
14
+ EmbeddingModelRequestSettings,
15
+ AISdkEmbeddingOptions
16
+ > = {
17
+ encode: (settings: EmbeddingModelRequestSettings) => {
18
+ const options: AISdkEmbeddingOptions = {};
19
+
20
+ if (settings.dimensions !== undefined) {
21
+ options.dimensions = settings.dimensions;
22
+ }
23
+ if (settings.providerOptions !== undefined) {
24
+ options.providerOptions =
25
+ settings.providerOptions as SharedV3ProviderOptions;
26
+ }
27
+
28
+ return options;
29
+ },
30
+ decode: () => {
31
+ throw new Error("codec:unimplemented");
32
+ },
33
+ };
@@ -0,0 +1,54 @@
1
+ import type { EmbeddingModelV3 } from "@ai-sdk/provider";
2
+ import type {
3
+ EmbeddingModel,
4
+ EmbeddingModelRequest,
5
+ EmbeddingModelResponse,
6
+ } from "@kernl-sdk/protocol";
7
+
8
+ import { EMBEDDING_SETTINGS } from "./convert/embedding";
9
+
10
+ /**
11
+ * EmbeddingModel adapter for the AI SDK EmbeddingModelV3.
12
+ */
13
+ export class AISDKEmbeddingModel<TValue = string>
14
+ implements EmbeddingModel<TValue>
15
+ {
16
+ readonly spec = "1.0" as const;
17
+ readonly provider: string;
18
+ readonly modelId: string;
19
+ readonly maxEmbeddingsPerCall?: number;
20
+ readonly supportsParallelCalls?: boolean;
21
+
22
+ constructor(private model: EmbeddingModelV3<TValue>) {
23
+ this.provider = model.provider;
24
+ this.modelId = model.modelId;
25
+
26
+ // AI SDK supports async values for these, we handle sync case
27
+ if (typeof model.maxEmbeddingsPerCall === "number") {
28
+ this.maxEmbeddingsPerCall = model.maxEmbeddingsPerCall;
29
+ }
30
+ if (typeof model.supportsParallelCalls === "boolean") {
31
+ this.supportsParallelCalls = model.supportsParallelCalls;
32
+ }
33
+ }
34
+
35
+ async embed(
36
+ request: EmbeddingModelRequest<TValue>,
37
+ ): Promise<EmbeddingModelResponse> {
38
+ const settings = request.settings
39
+ ? EMBEDDING_SETTINGS.encode(request.settings)
40
+ : {};
41
+
42
+ const result = await this.model.doEmbed({
43
+ values: request.values,
44
+ abortSignal: request.abort,
45
+ ...settings,
46
+ });
47
+
48
+ return {
49
+ embeddings: result.embeddings,
50
+ usage: { inputTokens: result.usage?.tokens },
51
+ providerMetadata: result.providerMetadata,
52
+ };
53
+ }
54
+ }
package/src/index.ts CHANGED
@@ -13,11 +13,13 @@
13
13
  */
14
14
 
15
15
  export { AISDKLanguageModel } from "./language-model";
16
+ export { AISDKEmbeddingModel } from "./embedding-model";
16
17
 
17
18
  // Re-export codecs for custom provider implementations
18
19
  export { MESSAGE } from "./convert/message";
19
20
  export { TOOL, TOOL_CHOICE } from "./convert/tools";
20
21
  export { MODEL_SETTINGS } from "./convert/settings";
22
+ export { EMBEDDING_SETTINGS } from "./convert/embedding";
21
23
  export { MODEL_RESPONSE, WARNING } from "./convert/response";
22
24
  export { convertStream } from "./convert/stream";
23
25
  export { UIMessageCodec, historyToUIMessages } from "./convert/ui-message";
@@ -16,3 +16,5 @@ export function anthropic(modelId: string) {
16
16
  const model = createAnthropicModel(modelId);
17
17
  return new AISDKLanguageModel(model);
18
18
  }
19
+
20
+ // Note: Anthropic does not currently support embeddings
@@ -1,5 +1,7 @@
1
1
  import { google as createGoogleModel } from "@ai-sdk/google";
2
2
  import { AISDKLanguageModel } from "../language-model";
3
+ import { AISDKEmbeddingModel } from "../embedding-model";
4
+ import { registerEmbeddingProvider } from "@kernl-sdk/retrieval";
3
5
 
4
6
  /**
5
7
  * Create a kernl-compatible Google Generative AI language model.
@@ -16,3 +18,8 @@ export function google(modelId: string) {
16
18
  const model = createGoogleModel(modelId);
17
19
  return new AISDKLanguageModel(model);
18
20
  }
21
+
22
+ // Auto-register Google embedding provider
23
+ registerEmbeddingProvider("google", (id) =>
24
+ new AISDKEmbeddingModel(createGoogleModel.textEmbedding(id)),
25
+ );
@@ -1,5 +1,7 @@
1
1
  import { openai as createOpenAIModel } from "@ai-sdk/openai";
2
2
  import { AISDKLanguageModel } from "../language-model";
3
+ import { AISDKEmbeddingModel } from "../embedding-model";
4
+ import { registerEmbeddingProvider } from "@kernl-sdk/retrieval";
3
5
 
4
6
  /**
5
7
  * Create a kernl-compatible OpenAI language model.
@@ -16,3 +18,8 @@ export function openai(modelId: string) {
16
18
  const model = createOpenAIModel(modelId);
17
19
  return new AISDKLanguageModel(model);
18
20
  }
21
+
22
+ // Auto-register OpenAI embedding provider
23
+ registerEmbeddingProvider("openai", (id) =>
24
+ new AISDKEmbeddingModel(createOpenAIModel.embedding(id)),
25
+ );