@ai-sdk/huggingface 1.0.0-beta.8 → 1.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +528 -0
- package/dist/index.d.mts +4 -0
- package/dist/index.d.ts +4 -0
- package/dist/index.js +362 -199
- package/dist/index.js.map +1 -1
- package/dist/index.mjs +243 -70
- package/dist/index.mjs.map +1 -1
- package/package.json +7 -6
package/dist/index.mjs
CHANGED
|
@@ -20,11 +20,11 @@ import {
|
|
|
20
20
|
parseProviderOptions,
|
|
21
21
|
postJsonToApi
|
|
22
22
|
} from "@ai-sdk/provider-utils";
|
|
23
|
-
import
|
|
23
|
+
import { z as z2 } from "zod/v4";
|
|
24
24
|
|
|
25
25
|
// src/huggingface-error.ts
|
|
26
26
|
import { createJsonErrorResponseHandler } from "@ai-sdk/provider-utils";
|
|
27
|
-
import
|
|
27
|
+
import { z } from "zod/v4";
|
|
28
28
|
var huggingfaceErrorDataSchema = z.object({
|
|
29
29
|
error: z.object({
|
|
30
30
|
message: z.string(),
|
|
@@ -37,6 +37,45 @@ var huggingfaceFailedResponseHandler = createJsonErrorResponseHandler({
|
|
|
37
37
|
errorToMessage: (data) => data.error.message
|
|
38
38
|
});
|
|
39
39
|
|
|
40
|
+
// src/responses/convert-huggingface-responses-usage.ts
|
|
41
|
+
function convertHuggingFaceResponsesUsage(usage) {
|
|
42
|
+
var _a, _b, _c, _d;
|
|
43
|
+
if (usage == null) {
|
|
44
|
+
return {
|
|
45
|
+
inputTokens: {
|
|
46
|
+
total: void 0,
|
|
47
|
+
noCache: void 0,
|
|
48
|
+
cacheRead: void 0,
|
|
49
|
+
cacheWrite: void 0
|
|
50
|
+
},
|
|
51
|
+
outputTokens: {
|
|
52
|
+
total: void 0,
|
|
53
|
+
text: void 0,
|
|
54
|
+
reasoning: void 0
|
|
55
|
+
},
|
|
56
|
+
raw: void 0
|
|
57
|
+
};
|
|
58
|
+
}
|
|
59
|
+
const inputTokens = usage.input_tokens;
|
|
60
|
+
const outputTokens = usage.output_tokens;
|
|
61
|
+
const cachedTokens = (_b = (_a = usage.input_tokens_details) == null ? void 0 : _a.cached_tokens) != null ? _b : 0;
|
|
62
|
+
const reasoningTokens = (_d = (_c = usage.output_tokens_details) == null ? void 0 : _c.reasoning_tokens) != null ? _d : 0;
|
|
63
|
+
return {
|
|
64
|
+
inputTokens: {
|
|
65
|
+
total: inputTokens,
|
|
66
|
+
noCache: inputTokens - cachedTokens,
|
|
67
|
+
cacheRead: cachedTokens,
|
|
68
|
+
cacheWrite: void 0
|
|
69
|
+
},
|
|
70
|
+
outputTokens: {
|
|
71
|
+
total: outputTokens,
|
|
72
|
+
text: outputTokens - reasoningTokens,
|
|
73
|
+
reasoning: reasoningTokens
|
|
74
|
+
},
|
|
75
|
+
raw: usage
|
|
76
|
+
};
|
|
77
|
+
}
|
|
78
|
+
|
|
40
79
|
// src/responses/convert-to-huggingface-responses-messages.ts
|
|
41
80
|
import {
|
|
42
81
|
UnsupportedFunctionalityError
|
|
@@ -110,10 +149,7 @@ async function convertToHuggingFaceResponsesMessages({
|
|
|
110
149
|
break;
|
|
111
150
|
}
|
|
112
151
|
case "tool": {
|
|
113
|
-
warnings.push({
|
|
114
|
-
type: "unsupported-setting",
|
|
115
|
-
setting: "tool messages"
|
|
116
|
-
});
|
|
152
|
+
warnings.push({ type: "unsupported", feature: "tool messages" });
|
|
117
153
|
break;
|
|
118
154
|
}
|
|
119
155
|
default: {
|
|
@@ -125,24 +161,6 @@ async function convertToHuggingFaceResponsesMessages({
|
|
|
125
161
|
return { input: messages, warnings };
|
|
126
162
|
}
|
|
127
163
|
|
|
128
|
-
// src/responses/map-huggingface-responses-finish-reason.ts
|
|
129
|
-
function mapHuggingFaceResponsesFinishReason(finishReason) {
|
|
130
|
-
switch (finishReason) {
|
|
131
|
-
case "stop":
|
|
132
|
-
return "stop";
|
|
133
|
-
case "length":
|
|
134
|
-
return "length";
|
|
135
|
-
case "content_filter":
|
|
136
|
-
return "content-filter";
|
|
137
|
-
case "tool_calls":
|
|
138
|
-
return "tool-calls";
|
|
139
|
-
case "error":
|
|
140
|
-
return "error";
|
|
141
|
-
default:
|
|
142
|
-
return "unknown";
|
|
143
|
-
}
|
|
144
|
-
}
|
|
145
|
-
|
|
146
164
|
// src/responses/huggingface-responses-prepare-tools.ts
|
|
147
165
|
function prepareResponsesTools({
|
|
148
166
|
tools,
|
|
@@ -164,10 +182,10 @@ function prepareResponsesTools({
|
|
|
164
182
|
parameters: tool.inputSchema
|
|
165
183
|
});
|
|
166
184
|
break;
|
|
167
|
-
case "provider
|
|
185
|
+
case "provider":
|
|
168
186
|
toolWarnings.push({
|
|
169
|
-
type: "unsupported
|
|
170
|
-
tool
|
|
187
|
+
type: "unsupported",
|
|
188
|
+
feature: `provider-defined tool ${tool.id}`
|
|
171
189
|
});
|
|
172
190
|
break;
|
|
173
191
|
default: {
|
|
@@ -206,6 +224,24 @@ function prepareResponsesTools({
|
|
|
206
224
|
};
|
|
207
225
|
}
|
|
208
226
|
|
|
227
|
+
// src/responses/map-huggingface-responses-finish-reason.ts
|
|
228
|
+
function mapHuggingFaceResponsesFinishReason(finishReason) {
|
|
229
|
+
switch (finishReason) {
|
|
230
|
+
case "stop":
|
|
231
|
+
return "stop";
|
|
232
|
+
case "length":
|
|
233
|
+
return "length";
|
|
234
|
+
case "content_filter":
|
|
235
|
+
return "content-filter";
|
|
236
|
+
case "tool_calls":
|
|
237
|
+
return "tool-calls";
|
|
238
|
+
case "error":
|
|
239
|
+
return "error";
|
|
240
|
+
default:
|
|
241
|
+
return "other";
|
|
242
|
+
}
|
|
243
|
+
}
|
|
244
|
+
|
|
209
245
|
// src/responses/huggingface-responses-language-model.ts
|
|
210
246
|
var HuggingFaceResponsesLanguageModel = class {
|
|
211
247
|
constructor(modelId, config) {
|
|
@@ -237,25 +273,19 @@ var HuggingFaceResponsesLanguageModel = class {
|
|
|
237
273
|
var _a, _b;
|
|
238
274
|
const warnings = [];
|
|
239
275
|
if (topK != null) {
|
|
240
|
-
warnings.push({ type: "unsupported
|
|
276
|
+
warnings.push({ type: "unsupported", feature: "topK" });
|
|
241
277
|
}
|
|
242
278
|
if (seed != null) {
|
|
243
|
-
warnings.push({ type: "unsupported
|
|
279
|
+
warnings.push({ type: "unsupported", feature: "seed" });
|
|
244
280
|
}
|
|
245
281
|
if (presencePenalty != null) {
|
|
246
|
-
warnings.push({
|
|
247
|
-
type: "unsupported-setting",
|
|
248
|
-
setting: "presencePenalty"
|
|
249
|
-
});
|
|
282
|
+
warnings.push({ type: "unsupported", feature: "presencePenalty" });
|
|
250
283
|
}
|
|
251
284
|
if (frequencyPenalty != null) {
|
|
252
|
-
warnings.push({
|
|
253
|
-
type: "unsupported-setting",
|
|
254
|
-
setting: "frequencyPenalty"
|
|
255
|
-
});
|
|
285
|
+
warnings.push({ type: "unsupported", feature: "frequencyPenalty" });
|
|
256
286
|
}
|
|
257
287
|
if (stopSequences != null) {
|
|
258
|
-
warnings.push({ type: "unsupported
|
|
288
|
+
warnings.push({ type: "unsupported", feature: "stopSequences" });
|
|
259
289
|
}
|
|
260
290
|
const { input, warnings: messageWarnings } = await convertToHuggingFaceResponsesMessages({
|
|
261
291
|
prompt
|
|
@@ -296,12 +326,19 @@ var HuggingFaceResponsesLanguageModel = class {
|
|
|
296
326
|
metadata: huggingfaceOptions == null ? void 0 : huggingfaceOptions.metadata,
|
|
297
327
|
instructions: huggingfaceOptions == null ? void 0 : huggingfaceOptions.instructions,
|
|
298
328
|
...preparedTools && { tools: preparedTools },
|
|
299
|
-
...preparedToolChoice && { tool_choice: preparedToolChoice }
|
|
329
|
+
...preparedToolChoice && { tool_choice: preparedToolChoice },
|
|
330
|
+
...(huggingfaceOptions == null ? void 0 : huggingfaceOptions.reasoningEffort) != null && {
|
|
331
|
+
reasoning: {
|
|
332
|
+
...(huggingfaceOptions == null ? void 0 : huggingfaceOptions.reasoningEffort) != null && {
|
|
333
|
+
effort: huggingfaceOptions.reasoningEffort
|
|
334
|
+
}
|
|
335
|
+
}
|
|
336
|
+
}
|
|
300
337
|
};
|
|
301
338
|
return { args: baseArgs, warnings };
|
|
302
339
|
}
|
|
303
340
|
async doGenerate(options) {
|
|
304
|
-
var _a, _b, _c, _d, _e, _f, _g
|
|
341
|
+
var _a, _b, _c, _d, _e, _f, _g;
|
|
305
342
|
const { args, warnings } = await this.getArgs(options);
|
|
306
343
|
const body = {
|
|
307
344
|
...args,
|
|
@@ -365,6 +402,20 @@ var HuggingFaceResponsesLanguageModel = class {
|
|
|
365
402
|
}
|
|
366
403
|
break;
|
|
367
404
|
}
|
|
405
|
+
case "reasoning": {
|
|
406
|
+
for (const contentPart of part.content) {
|
|
407
|
+
content.push({
|
|
408
|
+
type: "reasoning",
|
|
409
|
+
text: contentPart.text,
|
|
410
|
+
providerMetadata: {
|
|
411
|
+
huggingface: {
|
|
412
|
+
itemId: part.id
|
|
413
|
+
}
|
|
414
|
+
}
|
|
415
|
+
});
|
|
416
|
+
}
|
|
417
|
+
break;
|
|
418
|
+
}
|
|
368
419
|
case "mcp_call": {
|
|
369
420
|
content.push({
|
|
370
421
|
type: "tool-call",
|
|
@@ -378,8 +429,7 @@ var HuggingFaceResponsesLanguageModel = class {
|
|
|
378
429
|
type: "tool-result",
|
|
379
430
|
toolCallId: part.id,
|
|
380
431
|
toolName: part.name,
|
|
381
|
-
result: part.output
|
|
382
|
-
providerExecuted: true
|
|
432
|
+
result: part.output
|
|
383
433
|
});
|
|
384
434
|
}
|
|
385
435
|
break;
|
|
@@ -397,8 +447,7 @@ var HuggingFaceResponsesLanguageModel = class {
|
|
|
397
447
|
type: "tool-result",
|
|
398
448
|
toolCallId: part.id,
|
|
399
449
|
toolName: "list_tools",
|
|
400
|
-
result: { tools: part.tools }
|
|
401
|
-
providerExecuted: true
|
|
450
|
+
result: { tools: part.tools }
|
|
402
451
|
});
|
|
403
452
|
}
|
|
404
453
|
break;
|
|
@@ -427,14 +476,13 @@ var HuggingFaceResponsesLanguageModel = class {
|
|
|
427
476
|
}
|
|
428
477
|
return {
|
|
429
478
|
content,
|
|
430
|
-
finishReason:
|
|
431
|
-
|
|
432
|
-
|
|
433
|
-
|
|
434
|
-
|
|
435
|
-
outputTokens: (_i = (_h = response.usage) == null ? void 0 : _h.output_tokens) != null ? _i : 0,
|
|
436
|
-
totalTokens: (_o = (_j = response.usage) == null ? void 0 : _j.total_tokens) != null ? _o : ((_l = (_k = response.usage) == null ? void 0 : _k.input_tokens) != null ? _l : 0) + ((_n = (_m = response.usage) == null ? void 0 : _m.output_tokens) != null ? _n : 0)
|
|
479
|
+
finishReason: {
|
|
480
|
+
unified: mapHuggingFaceResponsesFinishReason(
|
|
481
|
+
(_e = (_d = response.incomplete_details) == null ? void 0 : _d.reason) != null ? _e : "stop"
|
|
482
|
+
),
|
|
483
|
+
raw: (_g = (_f = response.incomplete_details) == null ? void 0 : _f.reason) != null ? _g : void 0
|
|
437
484
|
},
|
|
485
|
+
usage: convertHuggingFaceResponsesUsage(response.usage),
|
|
438
486
|
request: { body },
|
|
439
487
|
response: {
|
|
440
488
|
id: response.id,
|
|
@@ -471,13 +519,12 @@ var HuggingFaceResponsesLanguageModel = class {
|
|
|
471
519
|
abortSignal: options.abortSignal,
|
|
472
520
|
fetch: this.config.fetch
|
|
473
521
|
});
|
|
474
|
-
let finishReason =
|
|
475
|
-
|
|
476
|
-
|
|
477
|
-
inputTokens: void 0,
|
|
478
|
-
outputTokens: void 0,
|
|
479
|
-
totalTokens: void 0
|
|
522
|
+
let finishReason = {
|
|
523
|
+
unified: "other",
|
|
524
|
+
raw: void 0
|
|
480
525
|
};
|
|
526
|
+
let responseId = null;
|
|
527
|
+
let usage = void 0;
|
|
481
528
|
return {
|
|
482
529
|
stream: response.pipeThrough(
|
|
483
530
|
new TransformStream({
|
|
@@ -485,9 +532,12 @@ var HuggingFaceResponsesLanguageModel = class {
|
|
|
485
532
|
controller.enqueue({ type: "stream-start", warnings });
|
|
486
533
|
},
|
|
487
534
|
transform(chunk, controller) {
|
|
488
|
-
var _a, _b, _c;
|
|
535
|
+
var _a, _b, _c, _d;
|
|
489
536
|
if (!chunk.success) {
|
|
490
|
-
finishReason =
|
|
537
|
+
finishReason = {
|
|
538
|
+
unified: "error",
|
|
539
|
+
raw: void 0
|
|
540
|
+
};
|
|
491
541
|
controller.enqueue({ type: "error", error: chunk.error });
|
|
492
542
|
return;
|
|
493
543
|
}
|
|
@@ -519,6 +569,16 @@ var HuggingFaceResponsesLanguageModel = class {
|
|
|
519
569
|
id: value.item.call_id,
|
|
520
570
|
toolName: value.item.name
|
|
521
571
|
});
|
|
572
|
+
} else if (value.item.type === "reasoning") {
|
|
573
|
+
controller.enqueue({
|
|
574
|
+
type: "reasoning-start",
|
|
575
|
+
id: value.item.id,
|
|
576
|
+
providerMetadata: {
|
|
577
|
+
huggingface: {
|
|
578
|
+
itemId: value.item.id
|
|
579
|
+
}
|
|
580
|
+
}
|
|
581
|
+
});
|
|
522
582
|
}
|
|
523
583
|
return;
|
|
524
584
|
}
|
|
@@ -552,16 +612,32 @@ var HuggingFaceResponsesLanguageModel = class {
|
|
|
552
612
|
}
|
|
553
613
|
if (isResponseCompletedChunk(value)) {
|
|
554
614
|
responseId = value.response.id;
|
|
555
|
-
finishReason =
|
|
556
|
-
|
|
557
|
-
|
|
615
|
+
finishReason = {
|
|
616
|
+
unified: mapHuggingFaceResponsesFinishReason(
|
|
617
|
+
(_b = (_a = value.response.incomplete_details) == null ? void 0 : _a.reason) != null ? _b : "stop"
|
|
618
|
+
),
|
|
619
|
+
raw: (_d = (_c = value.response.incomplete_details) == null ? void 0 : _c.reason) != null ? _d : void 0
|
|
620
|
+
};
|
|
558
621
|
if (value.response.usage) {
|
|
559
|
-
usage
|
|
560
|
-
usage.outputTokens = value.response.usage.output_tokens;
|
|
561
|
-
usage.totalTokens = (_c = value.response.usage.total_tokens) != null ? _c : value.response.usage.input_tokens + value.response.usage.output_tokens;
|
|
622
|
+
usage = value.response.usage;
|
|
562
623
|
}
|
|
563
624
|
return;
|
|
564
625
|
}
|
|
626
|
+
if (isReasoningDeltaChunk(value)) {
|
|
627
|
+
controller.enqueue({
|
|
628
|
+
type: "reasoning-delta",
|
|
629
|
+
id: value.item_id,
|
|
630
|
+
delta: value.delta
|
|
631
|
+
});
|
|
632
|
+
return;
|
|
633
|
+
}
|
|
634
|
+
if (isReasoningEndChunk(value)) {
|
|
635
|
+
controller.enqueue({
|
|
636
|
+
type: "reasoning-end",
|
|
637
|
+
id: value.item_id
|
|
638
|
+
});
|
|
639
|
+
return;
|
|
640
|
+
}
|
|
565
641
|
if (isTextDeltaChunk(value)) {
|
|
566
642
|
controller.enqueue({
|
|
567
643
|
type: "text-delta",
|
|
@@ -575,7 +651,7 @@ var HuggingFaceResponsesLanguageModel = class {
|
|
|
575
651
|
controller.enqueue({
|
|
576
652
|
type: "finish",
|
|
577
653
|
finishReason,
|
|
578
|
-
usage,
|
|
654
|
+
usage: convertHuggingFaceResponsesUsage(usage),
|
|
579
655
|
providerMetadata: {
|
|
580
656
|
huggingface: {
|
|
581
657
|
responseId
|
|
@@ -593,8 +669,65 @@ var HuggingFaceResponsesLanguageModel = class {
|
|
|
593
669
|
var huggingfaceResponsesProviderOptionsSchema = z2.object({
|
|
594
670
|
metadata: z2.record(z2.string(), z2.string()).optional(),
|
|
595
671
|
instructions: z2.string().optional(),
|
|
596
|
-
strictJsonSchema: z2.boolean().optional()
|
|
672
|
+
strictJsonSchema: z2.boolean().optional(),
|
|
673
|
+
reasoningEffort: z2.string().optional()
|
|
597
674
|
});
|
|
675
|
+
var huggingfaceResponsesOutputSchema = z2.discriminatedUnion("type", [
|
|
676
|
+
z2.object({
|
|
677
|
+
type: z2.literal("message"),
|
|
678
|
+
id: z2.string(),
|
|
679
|
+
role: z2.string().optional(),
|
|
680
|
+
status: z2.string().optional(),
|
|
681
|
+
content: z2.array(
|
|
682
|
+
z2.object({
|
|
683
|
+
type: z2.literal("output_text"),
|
|
684
|
+
text: z2.string(),
|
|
685
|
+
annotations: z2.array(z2.any()).optional()
|
|
686
|
+
})
|
|
687
|
+
)
|
|
688
|
+
}),
|
|
689
|
+
z2.object({
|
|
690
|
+
type: z2.literal("reasoning"),
|
|
691
|
+
id: z2.string(),
|
|
692
|
+
status: z2.string().optional(),
|
|
693
|
+
content: z2.array(
|
|
694
|
+
z2.object({
|
|
695
|
+
type: z2.literal("reasoning_text"),
|
|
696
|
+
text: z2.string()
|
|
697
|
+
})
|
|
698
|
+
),
|
|
699
|
+
summary: z2.array(
|
|
700
|
+
z2.object({
|
|
701
|
+
type: z2.literal("reasoning_summary"),
|
|
702
|
+
text: z2.string()
|
|
703
|
+
}).optional()
|
|
704
|
+
).optional()
|
|
705
|
+
}),
|
|
706
|
+
z2.object({
|
|
707
|
+
type: z2.literal("function_call"),
|
|
708
|
+
id: z2.string(),
|
|
709
|
+
call_id: z2.string(),
|
|
710
|
+
name: z2.string(),
|
|
711
|
+
arguments: z2.string(),
|
|
712
|
+
output: z2.string().optional(),
|
|
713
|
+
status: z2.string().optional()
|
|
714
|
+
}),
|
|
715
|
+
z2.object({
|
|
716
|
+
type: z2.literal("mcp_call"),
|
|
717
|
+
id: z2.string(),
|
|
718
|
+
name: z2.string(),
|
|
719
|
+
arguments: z2.string(),
|
|
720
|
+
output: z2.string().optional(),
|
|
721
|
+
status: z2.string().optional()
|
|
722
|
+
}),
|
|
723
|
+
z2.object({
|
|
724
|
+
type: z2.literal("mcp_list_tools"),
|
|
725
|
+
id: z2.string(),
|
|
726
|
+
server_label: z2.string(),
|
|
727
|
+
tools: z2.array(z2.any()).optional(),
|
|
728
|
+
status: z2.string().optional()
|
|
729
|
+
})
|
|
730
|
+
]);
|
|
598
731
|
var huggingfaceResponsesResponseSchema = z2.object({
|
|
599
732
|
id: z2.string(),
|
|
600
733
|
model: z2.string(),
|
|
@@ -623,7 +756,7 @@ var huggingfaceResponsesResponseSchema = z2.object({
|
|
|
623
756
|
}).optional(),
|
|
624
757
|
total_tokens: z2.number()
|
|
625
758
|
}).nullable().optional(),
|
|
626
|
-
output: z2.array(
|
|
759
|
+
output: z2.array(huggingfaceResponsesOutputSchema),
|
|
627
760
|
output_text: z2.string().nullable().optional()
|
|
628
761
|
});
|
|
629
762
|
var responseOutputItemAddedSchema = z2.object({
|
|
@@ -637,6 +770,13 @@ var responseOutputItemAddedSchema = z2.object({
|
|
|
637
770
|
status: z2.string().optional(),
|
|
638
771
|
content: z2.array(z2.any()).optional()
|
|
639
772
|
}),
|
|
773
|
+
z2.object({
|
|
774
|
+
type: z2.literal("reasoning"),
|
|
775
|
+
id: z2.string(),
|
|
776
|
+
status: z2.string().optional(),
|
|
777
|
+
content: z2.array(z2.any()).optional(),
|
|
778
|
+
summary: z2.array(z2.any()).optional()
|
|
779
|
+
}),
|
|
640
780
|
z2.object({
|
|
641
781
|
type: z2.literal("mcp_list_tools"),
|
|
642
782
|
id: z2.string(),
|
|
@@ -700,6 +840,13 @@ var responseOutputItemDoneSchema = z2.object({
|
|
|
700
840
|
arguments: z2.string(),
|
|
701
841
|
output: z2.string().optional(),
|
|
702
842
|
error: z2.string().optional()
|
|
843
|
+
}),
|
|
844
|
+
z2.object({
|
|
845
|
+
type: z2.literal("reasoning"),
|
|
846
|
+
id: z2.string(),
|
|
847
|
+
status: z2.string().optional(),
|
|
848
|
+
content: z2.array(z2.any()).optional(),
|
|
849
|
+
summary: z2.array(z2.any()).optional()
|
|
703
850
|
})
|
|
704
851
|
]),
|
|
705
852
|
sequence_number: z2.number()
|
|
@@ -712,6 +859,22 @@ var textDeltaChunkSchema = z2.object({
|
|
|
712
859
|
delta: z2.string(),
|
|
713
860
|
sequence_number: z2.number()
|
|
714
861
|
});
|
|
862
|
+
var reasoningTextDeltaChunkSchema = z2.object({
|
|
863
|
+
type: z2.literal("response.reasoning_text.delta"),
|
|
864
|
+
item_id: z2.string(),
|
|
865
|
+
output_index: z2.number(),
|
|
866
|
+
content_index: z2.number(),
|
|
867
|
+
delta: z2.string(),
|
|
868
|
+
sequence_number: z2.number()
|
|
869
|
+
});
|
|
870
|
+
var reasoningTextEndChunkSchema = z2.object({
|
|
871
|
+
type: z2.literal("response.reasoning_text.done"),
|
|
872
|
+
item_id: z2.string(),
|
|
873
|
+
output_index: z2.number(),
|
|
874
|
+
content_index: z2.number(),
|
|
875
|
+
text: z2.string(),
|
|
876
|
+
sequence_number: z2.number()
|
|
877
|
+
});
|
|
715
878
|
var responseCompletedChunkSchema = z2.object({
|
|
716
879
|
type: z2.literal("response.completed"),
|
|
717
880
|
response: huggingfaceResponsesResponseSchema,
|
|
@@ -730,6 +893,8 @@ var responseCreatedChunkSchema = z2.object({
|
|
|
730
893
|
var huggingfaceResponsesChunkSchema = z2.union([
|
|
731
894
|
responseOutputItemAddedSchema,
|
|
732
895
|
responseOutputItemDoneSchema,
|
|
896
|
+
reasoningTextDeltaChunkSchema,
|
|
897
|
+
reasoningTextEndChunkSchema,
|
|
733
898
|
textDeltaChunkSchema,
|
|
734
899
|
responseCompletedChunkSchema,
|
|
735
900
|
responseCreatedChunkSchema,
|
|
@@ -745,6 +910,12 @@ function isResponseOutputItemDoneChunk(chunk) {
|
|
|
745
910
|
function isTextDeltaChunk(chunk) {
|
|
746
911
|
return chunk.type === "response.output_text.delta";
|
|
747
912
|
}
|
|
913
|
+
function isReasoningDeltaChunk(chunk) {
|
|
914
|
+
return chunk.type === "response.reasoning_text.delta";
|
|
915
|
+
}
|
|
916
|
+
function isReasoningEndChunk(chunk) {
|
|
917
|
+
return chunk.type === "response.reasoning_text.done";
|
|
918
|
+
}
|
|
748
919
|
function isResponseCompletedChunk(chunk) {
|
|
749
920
|
return chunk.type === "response.completed";
|
|
750
921
|
}
|
|
@@ -775,15 +946,17 @@ function createHuggingFace(options = {}) {
|
|
|
775
946
|
});
|
|
776
947
|
};
|
|
777
948
|
const provider = (modelId) => createResponsesModel(modelId);
|
|
949
|
+
provider.specificationVersion = "v3";
|
|
778
950
|
provider.languageModel = createResponsesModel;
|
|
779
951
|
provider.responses = createResponsesModel;
|
|
780
|
-
provider.
|
|
952
|
+
provider.embeddingModel = (modelId) => {
|
|
781
953
|
throw new NoSuchModelError({
|
|
782
954
|
modelId,
|
|
783
|
-
modelType: "
|
|
955
|
+
modelType: "embeddingModel",
|
|
784
956
|
message: "Hugging Face Responses API does not support text embeddings. Use the Hugging Face Inference API directly for embeddings."
|
|
785
957
|
});
|
|
786
958
|
};
|
|
959
|
+
provider.textEmbeddingModel = provider.embeddingModel;
|
|
787
960
|
provider.imageModel = (modelId) => {
|
|
788
961
|
throw new NoSuchModelError({
|
|
789
962
|
modelId,
|