@kernl-sdk/ai 0.2.10 → 0.3.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.turbo/turbo-build.log +1 -1
- package/CHANGELOG.md +23 -0
- package/dist/__tests__/integration.test.js +342 -1
- package/dist/convert/response.d.ts +20 -2
- package/dist/convert/response.d.ts.map +1 -1
- package/dist/convert/response.js +22 -0
- package/dist/language-model.d.ts.map +1 -1
- package/dist/language-model.js +5 -1
- package/dist/providers/anthropic.js +2 -2
- package/dist/providers/google.js +3 -3
- package/dist/providers/openai.js +3 -3
- package/package.json +4 -4
- package/src/__tests__/integration.test.ts +400 -1
- package/src/convert/response.ts +41 -0
- package/src/language-model.ts +5 -1
- package/src/providers/anthropic.ts +2 -2
- package/src/providers/google.ts +5 -4
- package/src/providers/openai.ts +5 -4
- package/.turbo/turbo-check-types.log +0 -4
- package/dist/convert/messages.d.ts +0 -4
- package/dist/convert/messages.d.ts.map +0 -1
- package/dist/convert/messages.js +0 -130
- package/dist/error.d.ts +0 -8
- package/dist/error.d.ts.map +0 -1
- package/dist/error.js +0 -15
- package/dist/types.d.ts +0 -1
- package/dist/types.d.ts.map +0 -1
- package/dist/types.js +0 -1
|
@@ -1,6 +1,7 @@
|
|
|
1
1
|
import { describe, it, expect, beforeAll } from "vitest";
|
|
2
2
|
import { openai } from "@ai-sdk/openai";
|
|
3
3
|
import { anthropic } from "@ai-sdk/anthropic";
|
|
4
|
+
import { google } from "@ai-sdk/google";
|
|
4
5
|
import { IN_PROGRESS } from "@kernl-sdk/protocol";
|
|
5
6
|
|
|
6
7
|
import { AISDKLanguageModel } from "../language-model";
|
|
@@ -11,12 +12,28 @@ import { AISDKLanguageModel } from "../language-model";
|
|
|
11
12
|
* These tests require API keys to be set:
|
|
12
13
|
* - OPENAI_API_KEY for OpenAI tests
|
|
13
14
|
* - ANTHROPIC_API_KEY for Anthropic tests
|
|
15
|
+
* - GOOGLE_GENERATIVE_AI_API_KEY for Google tests
|
|
14
16
|
*
|
|
15
|
-
* Run with: OPENAI_API_KEY=your-key ANTHROPIC_API_KEY=your-key pnpm test:run
|
|
17
|
+
* Run with: OPENAI_API_KEY=your-key ANTHROPIC_API_KEY=your-key GOOGLE_GENERATIVE_AI_API_KEY=your-key pnpm test:run
|
|
16
18
|
*/
|
|
17
19
|
|
|
18
20
|
const SKIP_OPENAI_TESTS = !process.env.OPENAI_API_KEY;
|
|
19
21
|
const SKIP_ANTHROPIC_TESTS = !process.env.ANTHROPIC_API_KEY;
|
|
22
|
+
const SKIP_GOOGLE_TESTS = !process.env.GOOGLE_GENERATIVE_AI_API_KEY;
|
|
23
|
+
|
|
24
|
+
/**
|
|
25
|
+
* Shared JSON schema for structured output tests.
|
|
26
|
+
* Extracts a person's name and age from text.
|
|
27
|
+
*/
|
|
28
|
+
const PERSON_SCHEMA = {
|
|
29
|
+
type: "object" as const,
|
|
30
|
+
properties: {
|
|
31
|
+
name: { type: "string" as const, description: "The person's name" },
|
|
32
|
+
age: { type: "number" as const, description: "The person's age in years" },
|
|
33
|
+
},
|
|
34
|
+
required: ["name", "age"],
|
|
35
|
+
additionalProperties: false,
|
|
36
|
+
};
|
|
20
37
|
|
|
21
38
|
describe.skipIf(SKIP_OPENAI_TESTS)("AISDKLanguageModel - OpenAI", () => {
|
|
22
39
|
let gpt41: AISDKLanguageModel;
|
|
@@ -741,6 +758,109 @@ describe.skipIf(SKIP_OPENAI_TESTS)("AISDKLanguageModel - OpenAI", () => {
|
|
|
741
758
|
).rejects.toThrow(/max_output_tokens/);
|
|
742
759
|
});
|
|
743
760
|
});
|
|
761
|
+
|
|
762
|
+
describe("structured output", () => {
|
|
763
|
+
it("should generate structured JSON output with responseType", async () => {
|
|
764
|
+
const response = await gpt41.generate({
|
|
765
|
+
input: [
|
|
766
|
+
{
|
|
767
|
+
kind: "message",
|
|
768
|
+
role: "user",
|
|
769
|
+
id: "msg-1",
|
|
770
|
+
content: [
|
|
771
|
+
{
|
|
772
|
+
kind: "text",
|
|
773
|
+
text: "Extract the person info: John Smith is 42 years old.",
|
|
774
|
+
},
|
|
775
|
+
],
|
|
776
|
+
},
|
|
777
|
+
],
|
|
778
|
+
responseType: {
|
|
779
|
+
kind: "json",
|
|
780
|
+
schema: PERSON_SCHEMA,
|
|
781
|
+
name: "person",
|
|
782
|
+
description: "A person with name and age",
|
|
783
|
+
},
|
|
784
|
+
settings: {
|
|
785
|
+
maxTokens: 100,
|
|
786
|
+
temperature: 0,
|
|
787
|
+
},
|
|
788
|
+
});
|
|
789
|
+
|
|
790
|
+
expect(response.content).toBeDefined();
|
|
791
|
+
expect(response.content.length).toBeGreaterThan(0);
|
|
792
|
+
|
|
793
|
+
// Find the assistant message with JSON output
|
|
794
|
+
const messages = response.content.filter(
|
|
795
|
+
(item) => item.kind === "message" && item.role === "assistant",
|
|
796
|
+
);
|
|
797
|
+
expect(messages.length).toBeGreaterThan(0);
|
|
798
|
+
|
|
799
|
+
const msg = messages[0] as any;
|
|
800
|
+
const textContent = msg.content.find((c: any) => c.kind === "text");
|
|
801
|
+
expect(textContent).toBeDefined();
|
|
802
|
+
|
|
803
|
+
// Parse and validate the JSON output
|
|
804
|
+
const parsed = JSON.parse(textContent.text);
|
|
805
|
+
expect(parsed.name).toBe("John Smith");
|
|
806
|
+
expect(parsed.age).toBe(42);
|
|
807
|
+
});
|
|
808
|
+
|
|
809
|
+
it("should stream structured JSON output with responseType", async () => {
|
|
810
|
+
const events = [];
|
|
811
|
+
|
|
812
|
+
for await (const event of gpt41.stream({
|
|
813
|
+
input: [
|
|
814
|
+
{
|
|
815
|
+
kind: "message",
|
|
816
|
+
role: "user",
|
|
817
|
+
id: "msg-1",
|
|
818
|
+
content: [
|
|
819
|
+
{
|
|
820
|
+
kind: "text",
|
|
821
|
+
text: "Extract the person info: Alice Wong is 28 years old.",
|
|
822
|
+
},
|
|
823
|
+
],
|
|
824
|
+
},
|
|
825
|
+
],
|
|
826
|
+
responseType: {
|
|
827
|
+
kind: "json",
|
|
828
|
+
schema: PERSON_SCHEMA,
|
|
829
|
+
name: "person",
|
|
830
|
+
description: "A person with name and age",
|
|
831
|
+
},
|
|
832
|
+
settings: {
|
|
833
|
+
maxTokens: 100,
|
|
834
|
+
temperature: 0,
|
|
835
|
+
},
|
|
836
|
+
})) {
|
|
837
|
+
events.push(event);
|
|
838
|
+
}
|
|
839
|
+
|
|
840
|
+
expect(events.length).toBeGreaterThan(0);
|
|
841
|
+
|
|
842
|
+
// Should have text-delta events for streaming JSON
|
|
843
|
+
const textDeltas = events.filter((e) => e.kind === "text-delta");
|
|
844
|
+
expect(textDeltas.length).toBeGreaterThan(0);
|
|
845
|
+
|
|
846
|
+
// Should have a complete message with the JSON
|
|
847
|
+
const messages = events.filter((e) => e.kind === "message");
|
|
848
|
+
expect(messages.length).toBeGreaterThan(0);
|
|
849
|
+
|
|
850
|
+
const msg = messages[0] as any;
|
|
851
|
+
const textContent = msg.content.find((c: any) => c.kind === "text");
|
|
852
|
+
expect(textContent).toBeDefined();
|
|
853
|
+
|
|
854
|
+
// Parse and validate the JSON output
|
|
855
|
+
const parsed = JSON.parse(textContent.text);
|
|
856
|
+
expect(parsed.name).toBe("Alice Wong");
|
|
857
|
+
expect(parsed.age).toBe(28);
|
|
858
|
+
|
|
859
|
+
// Should have finish event
|
|
860
|
+
const finishEvents = events.filter((e) => e.kind === "finish");
|
|
861
|
+
expect(finishEvents.length).toBe(1);
|
|
862
|
+
});
|
|
863
|
+
});
|
|
744
864
|
});
|
|
745
865
|
|
|
746
866
|
describe.skipIf(SKIP_ANTHROPIC_TESTS)("AISDKLanguageModel - Anthropic", () => {
|
|
@@ -887,4 +1007,283 @@ describe.skipIf(SKIP_ANTHROPIC_TESTS)("AISDKLanguageModel - Anthropic", () => {
|
|
|
887
1007
|
expect(finishEvents.length).toBe(1);
|
|
888
1008
|
});
|
|
889
1009
|
});
|
|
1010
|
+
|
|
1011
|
+
describe("structured output", () => {
|
|
1012
|
+
it("should generate structured JSON output with responseType", async () => {
|
|
1013
|
+
const response = await claude.generate({
|
|
1014
|
+
input: [
|
|
1015
|
+
{
|
|
1016
|
+
kind: "message",
|
|
1017
|
+
role: "user",
|
|
1018
|
+
id: "msg-1",
|
|
1019
|
+
content: [
|
|
1020
|
+
{
|
|
1021
|
+
kind: "text",
|
|
1022
|
+
text: "Extract the person info: Maria Garcia is 35 years old.",
|
|
1023
|
+
},
|
|
1024
|
+
],
|
|
1025
|
+
},
|
|
1026
|
+
],
|
|
1027
|
+
responseType: {
|
|
1028
|
+
kind: "json",
|
|
1029
|
+
schema: PERSON_SCHEMA,
|
|
1030
|
+
name: "person",
|
|
1031
|
+
description: "A person with name and age",
|
|
1032
|
+
},
|
|
1033
|
+
settings: {
|
|
1034
|
+
maxTokens: 100,
|
|
1035
|
+
temperature: 0,
|
|
1036
|
+
},
|
|
1037
|
+
});
|
|
1038
|
+
|
|
1039
|
+
expect(response.content).toBeDefined();
|
|
1040
|
+
expect(response.content.length).toBeGreaterThan(0);
|
|
1041
|
+
|
|
1042
|
+
// Find the assistant message with JSON output
|
|
1043
|
+
const messages = response.content.filter(
|
|
1044
|
+
(item) => item.kind === "message" && item.role === "assistant",
|
|
1045
|
+
);
|
|
1046
|
+
expect(messages.length).toBeGreaterThan(0);
|
|
1047
|
+
|
|
1048
|
+
const msg = messages[0] as any;
|
|
1049
|
+
const textContent = msg.content.find((c: any) => c.kind === "text");
|
|
1050
|
+
expect(textContent).toBeDefined();
|
|
1051
|
+
|
|
1052
|
+
// Parse and validate the JSON output
|
|
1053
|
+
const parsed = JSON.parse(textContent.text);
|
|
1054
|
+
expect(parsed.name).toBe("Maria Garcia");
|
|
1055
|
+
expect(parsed.age).toBe(35);
|
|
1056
|
+
});
|
|
1057
|
+
|
|
1058
|
+
it("should stream structured JSON output with responseType", async () => {
|
|
1059
|
+
const events = [];
|
|
1060
|
+
|
|
1061
|
+
for await (const event of claude.stream({
|
|
1062
|
+
input: [
|
|
1063
|
+
{
|
|
1064
|
+
kind: "message",
|
|
1065
|
+
role: "user",
|
|
1066
|
+
id: "msg-1",
|
|
1067
|
+
content: [
|
|
1068
|
+
{
|
|
1069
|
+
kind: "text",
|
|
1070
|
+
text: "Extract the person info: David Chen is 55 years old.",
|
|
1071
|
+
},
|
|
1072
|
+
],
|
|
1073
|
+
},
|
|
1074
|
+
],
|
|
1075
|
+
responseType: {
|
|
1076
|
+
kind: "json",
|
|
1077
|
+
schema: PERSON_SCHEMA,
|
|
1078
|
+
name: "person",
|
|
1079
|
+
description: "A person with name and age",
|
|
1080
|
+
},
|
|
1081
|
+
settings: {
|
|
1082
|
+
maxTokens: 100,
|
|
1083
|
+
temperature: 0,
|
|
1084
|
+
},
|
|
1085
|
+
})) {
|
|
1086
|
+
events.push(event);
|
|
1087
|
+
}
|
|
1088
|
+
|
|
1089
|
+
expect(events.length).toBeGreaterThan(0);
|
|
1090
|
+
|
|
1091
|
+
// Should have text-delta events for streaming JSON
|
|
1092
|
+
const textDeltas = events.filter((e) => e.kind === "text-delta");
|
|
1093
|
+
expect(textDeltas.length).toBeGreaterThan(0);
|
|
1094
|
+
|
|
1095
|
+
// Should have a complete message with the JSON
|
|
1096
|
+
const messages = events.filter((e) => e.kind === "message");
|
|
1097
|
+
expect(messages.length).toBeGreaterThan(0);
|
|
1098
|
+
|
|
1099
|
+
const msg = messages[0] as any;
|
|
1100
|
+
const textContent = msg.content.find((c: any) => c.kind === "text");
|
|
1101
|
+
expect(textContent).toBeDefined();
|
|
1102
|
+
|
|
1103
|
+
// Parse and validate the JSON output
|
|
1104
|
+
const parsed = JSON.parse(textContent.text);
|
|
1105
|
+
expect(parsed.name).toBe("David Chen");
|
|
1106
|
+
expect(parsed.age).toBe(55);
|
|
1107
|
+
|
|
1108
|
+
// Should have finish event
|
|
1109
|
+
const finishEvents = events.filter((e) => e.kind === "finish");
|
|
1110
|
+
expect(finishEvents.length).toBe(1);
|
|
1111
|
+
});
|
|
1112
|
+
});
|
|
1113
|
+
});
|
|
1114
|
+
|
|
1115
|
+
describe.skipIf(SKIP_GOOGLE_TESTS)("AISDKLanguageModel - Google", () => {
|
|
1116
|
+
let gemini: AISDKLanguageModel;
|
|
1117
|
+
|
|
1118
|
+
beforeAll(() => {
|
|
1119
|
+
gemini = new AISDKLanguageModel(google("gemini-2.5-flash-lite"));
|
|
1120
|
+
});
|
|
1121
|
+
|
|
1122
|
+
describe("generate", () => {
|
|
1123
|
+
it("should generate a simple text response", async () => {
|
|
1124
|
+
const response = await gemini.generate({
|
|
1125
|
+
input: [
|
|
1126
|
+
{
|
|
1127
|
+
kind: "message",
|
|
1128
|
+
role: "user",
|
|
1129
|
+
id: "msg-1",
|
|
1130
|
+
content: [
|
|
1131
|
+
{ kind: "text", text: "Say 'Hello, World!' and nothing else." },
|
|
1132
|
+
],
|
|
1133
|
+
},
|
|
1134
|
+
],
|
|
1135
|
+
settings: {
|
|
1136
|
+
maxTokens: 50,
|
|
1137
|
+
temperature: 0,
|
|
1138
|
+
},
|
|
1139
|
+
});
|
|
1140
|
+
|
|
1141
|
+
expect(response.content).toBeDefined();
|
|
1142
|
+
expect(response.content.length).toBeGreaterThan(0);
|
|
1143
|
+
expect(response.usage).toBeDefined();
|
|
1144
|
+
expect(response.usage.totalTokens).toBeGreaterThan(0);
|
|
1145
|
+
|
|
1146
|
+
const messages = response.content.filter(
|
|
1147
|
+
(item) => item.kind === "message",
|
|
1148
|
+
);
|
|
1149
|
+
expect(messages.length).toBeGreaterThan(0);
|
|
1150
|
+
});
|
|
1151
|
+
});
|
|
1152
|
+
|
|
1153
|
+
describe("stream", () => {
|
|
1154
|
+
it("should stream text responses", async () => {
|
|
1155
|
+
const events = [];
|
|
1156
|
+
|
|
1157
|
+
for await (const event of gemini.stream({
|
|
1158
|
+
input: [
|
|
1159
|
+
{
|
|
1160
|
+
kind: "message",
|
|
1161
|
+
role: "user",
|
|
1162
|
+
id: "msg-1",
|
|
1163
|
+
content: [{ kind: "text", text: "Count to 5" }],
|
|
1164
|
+
},
|
|
1165
|
+
],
|
|
1166
|
+
settings: {
|
|
1167
|
+
maxTokens: 50,
|
|
1168
|
+
temperature: 0,
|
|
1169
|
+
},
|
|
1170
|
+
})) {
|
|
1171
|
+
events.push(event);
|
|
1172
|
+
}
|
|
1173
|
+
|
|
1174
|
+
expect(events.length).toBeGreaterThan(0);
|
|
1175
|
+
|
|
1176
|
+
// Should have at least one finish event
|
|
1177
|
+
const finishEvents = events.filter((e) => e.kind === "finish");
|
|
1178
|
+
expect(finishEvents.length).toBe(1);
|
|
1179
|
+
|
|
1180
|
+
// Should have usage information
|
|
1181
|
+
const finishEvent = finishEvents[0] as any;
|
|
1182
|
+
expect(finishEvent.usage).toBeDefined();
|
|
1183
|
+
expect(finishEvent.usage.totalTokens).toBeGreaterThan(0);
|
|
1184
|
+
});
|
|
1185
|
+
});
|
|
1186
|
+
|
|
1187
|
+
describe("structured output", () => {
|
|
1188
|
+
it("should generate structured JSON output with responseType", async () => {
|
|
1189
|
+
const response = await gemini.generate({
|
|
1190
|
+
input: [
|
|
1191
|
+
{
|
|
1192
|
+
kind: "message",
|
|
1193
|
+
role: "user",
|
|
1194
|
+
id: "msg-1",
|
|
1195
|
+
content: [
|
|
1196
|
+
{
|
|
1197
|
+
kind: "text",
|
|
1198
|
+
text: "Extract the person info: Kenji Tanaka is 29 years old.",
|
|
1199
|
+
},
|
|
1200
|
+
],
|
|
1201
|
+
},
|
|
1202
|
+
],
|
|
1203
|
+
responseType: {
|
|
1204
|
+
kind: "json",
|
|
1205
|
+
schema: PERSON_SCHEMA,
|
|
1206
|
+
name: "person",
|
|
1207
|
+
description: "A person with name and age",
|
|
1208
|
+
},
|
|
1209
|
+
settings: {
|
|
1210
|
+
maxTokens: 100,
|
|
1211
|
+
temperature: 0,
|
|
1212
|
+
},
|
|
1213
|
+
});
|
|
1214
|
+
|
|
1215
|
+
expect(response.content).toBeDefined();
|
|
1216
|
+
expect(response.content.length).toBeGreaterThan(0);
|
|
1217
|
+
|
|
1218
|
+
// Find the assistant message with JSON output
|
|
1219
|
+
const messages = response.content.filter(
|
|
1220
|
+
(item) => item.kind === "message" && item.role === "assistant",
|
|
1221
|
+
);
|
|
1222
|
+
expect(messages.length).toBeGreaterThan(0);
|
|
1223
|
+
|
|
1224
|
+
const msg = messages[0] as any;
|
|
1225
|
+
const textContent = msg.content.find((c: any) => c.kind === "text");
|
|
1226
|
+
expect(textContent).toBeDefined();
|
|
1227
|
+
|
|
1228
|
+
// Parse and validate the JSON output
|
|
1229
|
+
const parsed = JSON.parse(textContent.text);
|
|
1230
|
+
expect(parsed.name).toBe("Kenji Tanaka");
|
|
1231
|
+
expect(parsed.age).toBe(29);
|
|
1232
|
+
});
|
|
1233
|
+
|
|
1234
|
+
it("should stream structured JSON output with responseType", async () => {
|
|
1235
|
+
const events = [];
|
|
1236
|
+
|
|
1237
|
+
for await (const event of gemini.stream({
|
|
1238
|
+
input: [
|
|
1239
|
+
{
|
|
1240
|
+
kind: "message",
|
|
1241
|
+
role: "user",
|
|
1242
|
+
id: "msg-1",
|
|
1243
|
+
content: [
|
|
1244
|
+
{
|
|
1245
|
+
kind: "text",
|
|
1246
|
+
text: "Extract the person info: Sarah Johnson is 41 years old.",
|
|
1247
|
+
},
|
|
1248
|
+
],
|
|
1249
|
+
},
|
|
1250
|
+
],
|
|
1251
|
+
responseType: {
|
|
1252
|
+
kind: "json",
|
|
1253
|
+
schema: PERSON_SCHEMA,
|
|
1254
|
+
name: "person",
|
|
1255
|
+
description: "A person with name and age",
|
|
1256
|
+
},
|
|
1257
|
+
settings: {
|
|
1258
|
+
maxTokens: 100,
|
|
1259
|
+
temperature: 0,
|
|
1260
|
+
},
|
|
1261
|
+
})) {
|
|
1262
|
+
events.push(event);
|
|
1263
|
+
}
|
|
1264
|
+
|
|
1265
|
+
expect(events.length).toBeGreaterThan(0);
|
|
1266
|
+
|
|
1267
|
+
// Should have text-delta events for streaming JSON
|
|
1268
|
+
const textDeltas = events.filter((e) => e.kind === "text-delta");
|
|
1269
|
+
expect(textDeltas.length).toBeGreaterThan(0);
|
|
1270
|
+
|
|
1271
|
+
// Should have a complete message with the JSON
|
|
1272
|
+
const messages = events.filter((e) => e.kind === "message");
|
|
1273
|
+
expect(messages.length).toBeGreaterThan(0);
|
|
1274
|
+
|
|
1275
|
+
const msg = messages[0] as any;
|
|
1276
|
+
const textContent = msg.content.find((c: any) => c.kind === "text");
|
|
1277
|
+
expect(textContent).toBeDefined();
|
|
1278
|
+
|
|
1279
|
+
// Parse and validate the JSON output
|
|
1280
|
+
const parsed = JSON.parse(textContent.text);
|
|
1281
|
+
expect(parsed.name).toBe("Sarah Johnson");
|
|
1282
|
+
expect(parsed.age).toBe(41);
|
|
1283
|
+
|
|
1284
|
+
// Should have finish event
|
|
1285
|
+
const finishEvents = events.filter((e) => e.kind === "finish");
|
|
1286
|
+
expect(finishEvents.length).toBe(1);
|
|
1287
|
+
});
|
|
1288
|
+
});
|
|
890
1289
|
});
|
package/src/convert/response.ts
CHANGED
|
@@ -5,6 +5,7 @@ import {
|
|
|
5
5
|
FAILED,
|
|
6
6
|
type LanguageModelResponse,
|
|
7
7
|
type LanguageModelResponseItem,
|
|
8
|
+
type LanguageModelResponseType,
|
|
8
9
|
type LanguageModelFinishReason,
|
|
9
10
|
type LanguageModelUsage,
|
|
10
11
|
type LanguageModelWarning,
|
|
@@ -16,6 +17,7 @@ import type {
|
|
|
16
17
|
LanguageModelV3FinishReason,
|
|
17
18
|
LanguageModelV3Usage,
|
|
18
19
|
LanguageModelV3CallWarning,
|
|
20
|
+
JSONSchema7,
|
|
19
21
|
} from "@ai-sdk/provider";
|
|
20
22
|
|
|
21
23
|
/**
|
|
@@ -172,3 +174,42 @@ export const WARNING: Codec<LanguageModelWarning, LanguageModelV3CallWarning> =
|
|
|
172
174
|
}
|
|
173
175
|
},
|
|
174
176
|
};
|
|
177
|
+
|
|
178
|
+
/**
|
|
179
|
+
* AI SDK response format type.
|
|
180
|
+
*
|
|
181
|
+
* Maps to the `responseFormat` parameter in AI SDK's doGenerate/doStream.
|
|
182
|
+
*/
|
|
183
|
+
export interface AISdkResponseFormat {
|
|
184
|
+
type: "json";
|
|
185
|
+
schema?: JSONSchema7;
|
|
186
|
+
name?: string;
|
|
187
|
+
description?: string;
|
|
188
|
+
}
|
|
189
|
+
|
|
190
|
+
/**
|
|
191
|
+
* Codec for converting protocol responseType to AI SDK responseFormat.
|
|
192
|
+
*
|
|
193
|
+
* - `kind: "text"` or undefined → undefined (AI SDK defaults to text)
|
|
194
|
+
* - `kind: "json"` → `{ type: "json", schema, name, description }`
|
|
195
|
+
*/
|
|
196
|
+
export const RESPONSE_FORMAT: Codec<
|
|
197
|
+
LanguageModelResponseType | undefined,
|
|
198
|
+
AISdkResponseFormat | undefined
|
|
199
|
+
> = {
|
|
200
|
+
encode: (responseType) => {
|
|
201
|
+
if (!responseType || responseType.kind === "text") {
|
|
202
|
+
return undefined;
|
|
203
|
+
}
|
|
204
|
+
|
|
205
|
+
return {
|
|
206
|
+
type: "json",
|
|
207
|
+
schema: responseType.schema,
|
|
208
|
+
name: responseType.name,
|
|
209
|
+
description: responseType.description,
|
|
210
|
+
};
|
|
211
|
+
},
|
|
212
|
+
decode: () => {
|
|
213
|
+
throw new Error("codec:unimplemented");
|
|
214
|
+
},
|
|
215
|
+
};
|
package/src/language-model.ts
CHANGED
|
@@ -11,7 +11,7 @@ import { message, reasoning } from "@kernl-sdk/protocol";
|
|
|
11
11
|
import { MESSAGE } from "./convert/message";
|
|
12
12
|
import { TOOL } from "./convert/tools";
|
|
13
13
|
import { MODEL_SETTINGS } from "./convert/settings";
|
|
14
|
-
import { MODEL_RESPONSE } from "./convert/response";
|
|
14
|
+
import { MODEL_RESPONSE, RESPONSE_FORMAT } from "./convert/response";
|
|
15
15
|
import { convertStream } from "./convert/stream";
|
|
16
16
|
|
|
17
17
|
/**
|
|
@@ -36,11 +36,13 @@ export class AISDKLanguageModel implements LanguageModel {
|
|
|
36
36
|
const messages = request.input.map(MESSAGE.encode);
|
|
37
37
|
const tools = request.tools ? request.tools.map(TOOL.encode) : undefined;
|
|
38
38
|
const settings = MODEL_SETTINGS.encode(request.settings);
|
|
39
|
+
const responseFormat = RESPONSE_FORMAT.encode(request.responseType);
|
|
39
40
|
|
|
40
41
|
const result = await this.model.doGenerate({
|
|
41
42
|
prompt: messages,
|
|
42
43
|
tools,
|
|
43
44
|
...settings,
|
|
45
|
+
responseFormat,
|
|
44
46
|
abortSignal: request.abort,
|
|
45
47
|
});
|
|
46
48
|
|
|
@@ -56,11 +58,13 @@ export class AISDKLanguageModel implements LanguageModel {
|
|
|
56
58
|
const messages = request.input.map(MESSAGE.encode);
|
|
57
59
|
const tools = request.tools ? request.tools.map(TOOL.encode) : undefined;
|
|
58
60
|
const settings = MODEL_SETTINGS.encode(request.settings);
|
|
61
|
+
const responseFormat = RESPONSE_FORMAT.encode(request.responseType);
|
|
59
62
|
|
|
60
63
|
const stream = await this.model.doStream({
|
|
61
64
|
prompt: messages,
|
|
62
65
|
tools,
|
|
63
66
|
...settings,
|
|
67
|
+
responseFormat,
|
|
64
68
|
abortSignal: request.abort,
|
|
65
69
|
});
|
|
66
70
|
|
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
import { anthropic as
|
|
1
|
+
import { anthropic as _anthropic } from "@ai-sdk/anthropic";
|
|
2
2
|
import { AISDKLanguageModel } from "../language-model";
|
|
3
3
|
|
|
4
4
|
/**
|
|
@@ -13,7 +13,7 @@ import { AISDKLanguageModel } from "../language-model";
|
|
|
13
13
|
* ```
|
|
14
14
|
*/
|
|
15
15
|
export function anthropic(modelId: string) {
|
|
16
|
-
const model =
|
|
16
|
+
const model = _anthropic(modelId);
|
|
17
17
|
return new AISDKLanguageModel(model);
|
|
18
18
|
}
|
|
19
19
|
|
package/src/providers/google.ts
CHANGED
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
import { google as
|
|
1
|
+
import { google as _google } from "@ai-sdk/google";
|
|
2
2
|
import { AISDKLanguageModel } from "../language-model";
|
|
3
3
|
import { AISDKEmbeddingModel } from "../embedding-model";
|
|
4
4
|
import { registerEmbeddingProvider } from "@kernl-sdk/retrieval";
|
|
@@ -15,11 +15,12 @@ import { registerEmbeddingProvider } from "@kernl-sdk/retrieval";
|
|
|
15
15
|
* ```
|
|
16
16
|
*/
|
|
17
17
|
export function google(modelId: string) {
|
|
18
|
-
const model =
|
|
18
|
+
const model = _google(modelId);
|
|
19
19
|
return new AISDKLanguageModel(model);
|
|
20
20
|
}
|
|
21
21
|
|
|
22
22
|
// Auto-register Google embedding provider
|
|
23
|
-
registerEmbeddingProvider(
|
|
24
|
-
|
|
23
|
+
registerEmbeddingProvider(
|
|
24
|
+
"google",
|
|
25
|
+
(id) => new AISDKEmbeddingModel(_google.textEmbedding(id)),
|
|
25
26
|
);
|
package/src/providers/openai.ts
CHANGED
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
import { openai as
|
|
1
|
+
import { openai as _openai } from "@ai-sdk/openai";
|
|
2
2
|
import { AISDKLanguageModel } from "../language-model";
|
|
3
3
|
import { AISDKEmbeddingModel } from "../embedding-model";
|
|
4
4
|
import { registerEmbeddingProvider } from "@kernl-sdk/retrieval";
|
|
@@ -15,11 +15,12 @@ import { registerEmbeddingProvider } from "@kernl-sdk/retrieval";
|
|
|
15
15
|
* ```
|
|
16
16
|
*/
|
|
17
17
|
export function openai(modelId: string) {
|
|
18
|
-
const model =
|
|
18
|
+
const model = _openai(modelId);
|
|
19
19
|
return new AISDKLanguageModel(model);
|
|
20
20
|
}
|
|
21
21
|
|
|
22
22
|
// Auto-register OpenAI embedding provider
|
|
23
|
-
registerEmbeddingProvider(
|
|
24
|
-
|
|
23
|
+
registerEmbeddingProvider(
|
|
24
|
+
"openai",
|
|
25
|
+
(id) => new AISDKEmbeddingModel(_openai.embedding(id)),
|
|
25
26
|
);
|
|
@@ -1 +0,0 @@
|
|
|
1
|
-
{"version":3,"file":"messages.d.ts","sourceRoot":"","sources":["../../src/convert/messages.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,KAAK,EAAE,iBAAiB,EAAE,MAAM,iBAAiB,CAAC;AAChE,OAAO,KAAK,EACV,sBAAsB,EAMvB,MAAM,IAAI,CAAC;AAEZ,eAAO,MAAM,QAAQ,EAAE,KAAK,CAAC,iBAAiB,EAAE,EAAE,sBAAsB,EAAE,CAwIzE,CAAC"}
|