objectiveai 1.2.3 → 1.2.4
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/index.cjs +41 -2
- package/dist/index.d.ts +18335 -2974
- package/dist/index.js +41 -2
- package/package.json +1 -1
package/dist/index.cjs
CHANGED
|
@@ -2486,6 +2486,18 @@ var Function;
|
|
|
2486
2486
|
.optional()
|
|
2487
2487
|
.nullable()
|
|
2488
2488
|
.describe("The retry token provided by a previous incomplete or failed function execution."),
|
|
2489
|
+
reasoning: zod_1.default
|
|
2490
|
+
.object({
|
|
2491
|
+
model: Chat.Completions.Request.ModelSchema,
|
|
2492
|
+
models: zod_1.default
|
|
2493
|
+
.array(Chat.Completions.Request.ModelSchema)
|
|
2494
|
+
.optional()
|
|
2495
|
+
.nullable()
|
|
2496
|
+
.describe("Fallback Ensemble LLMs to use if the primary Ensemble LLM fails."),
|
|
2497
|
+
})
|
|
2498
|
+
.optional()
|
|
2499
|
+
.nullable()
|
|
2500
|
+
.describe("If provided, a reasoning summary for the Function Execution will be generated. This reasoning summary attempts to detail why the final Output is what it is, based on AI assertions made during execution."),
|
|
2489
2501
|
input: Function_1.InputSchema_,
|
|
2490
2502
|
provider: Chat.Completions.Request.ProviderSchema.optional().nullable(),
|
|
2491
2503
|
seed: Chat.Completions.Request.SeedSchema.optional().nullable(),
|
|
@@ -2711,12 +2723,33 @@ var Function;
|
|
|
2711
2723
|
Streaming.TaskChunkSchema = zod_1.default
|
|
2712
2724
|
.union([TaskChunk.FunctionSchema, TaskChunk.VectorCompletionSchema])
|
|
2713
2725
|
.describe("A chunk of a task execution.");
|
|
2726
|
+
let ReasoningSummaryChunk;
|
|
2727
|
+
(function (ReasoningSummaryChunk) {
|
|
2728
|
+
function merged(a, b) {
|
|
2729
|
+
const [base, baseChanged] = Chat.Completions.Response.Streaming.ChatCompletionChunk.merged(a, b);
|
|
2730
|
+
const [error, errorChanged] = merge(a.error, b.error);
|
|
2731
|
+
if (baseChanged || errorChanged) {
|
|
2732
|
+
return [
|
|
2733
|
+
Object.assign(Object.assign({}, base), (error !== undefined ? { error } : {})),
|
|
2734
|
+
true,
|
|
2735
|
+
];
|
|
2736
|
+
}
|
|
2737
|
+
else {
|
|
2738
|
+
return [a, false];
|
|
2739
|
+
}
|
|
2740
|
+
}
|
|
2741
|
+
ReasoningSummaryChunk.merged = merged;
|
|
2742
|
+
})(ReasoningSummaryChunk = Streaming.ReasoningSummaryChunk || (Streaming.ReasoningSummaryChunk = {}));
|
|
2743
|
+
Streaming.ReasoningSummaryChunkSchema = Chat.Completions.Response.Streaming.ChatCompletionChunkSchema.extend({
|
|
2744
|
+
error: exports.ObjectiveAIErrorSchema.optional().describe("When present, indicates that an error occurred during the chat completion."),
|
|
2745
|
+
}).describe("A chunk of a reasoning summary generation.");
|
|
2714
2746
|
let FunctionExecutionChunk;
|
|
2715
2747
|
(function (FunctionExecutionChunk) {
|
|
2716
2748
|
function merged(a, b) {
|
|
2717
2749
|
const id = a.id;
|
|
2718
2750
|
const [tasks, tasksChanged] = TaskChunk.mergedList(a.tasks, b.tasks);
|
|
2719
2751
|
const [tasks_errors, tasks_errorsChanged] = merge(a.tasks_errors, b.tasks_errors);
|
|
2752
|
+
const [reasoning, reasoningChanged] = merge(a.reasoning, b.reasoning);
|
|
2720
2753
|
const [output, outputChanged] = merge(a.output, b.output);
|
|
2721
2754
|
const [error, errorChanged] = merge(a.error, b.error);
|
|
2722
2755
|
const [retry_token, retry_tokenChanged] = merge(a.retry_token, b.retry_token);
|
|
@@ -2727,13 +2760,14 @@ var Function;
|
|
|
2727
2760
|
const [usage, usageChanged] = merge(a.usage, b.usage);
|
|
2728
2761
|
if (tasksChanged ||
|
|
2729
2762
|
tasks_errorsChanged ||
|
|
2763
|
+
reasoningChanged ||
|
|
2730
2764
|
outputChanged ||
|
|
2731
2765
|
errorChanged ||
|
|
2732
2766
|
retry_tokenChanged ||
|
|
2733
2767
|
usageChanged) {
|
|
2734
2768
|
return [
|
|
2735
|
-
Object.assign(Object.assign(Object.assign(Object.assign(Object.assign(Object.assign({ id,
|
|
2736
|
-
tasks }, (tasks_errors !== undefined ? { tasks_errors } : {})), (output !== undefined ? { output } : {})), (error !== undefined ? { error } : {})), (retry_token !== undefined ? { retry_token } : {})), { created, function: function_, profile,
|
|
2769
|
+
Object.assign(Object.assign(Object.assign(Object.assign(Object.assign(Object.assign(Object.assign({ id,
|
|
2770
|
+
tasks }, (tasks_errors !== undefined ? { tasks_errors } : {})), (reasoning !== undefined ? { reasoning } : {})), (output !== undefined ? { output } : {})), (error !== undefined ? { error } : {})), (retry_token !== undefined ? { retry_token } : {})), { created, function: function_, profile,
|
|
2737
2771
|
object }), (usage !== undefined ? { usage } : {})),
|
|
2738
2772
|
true,
|
|
2739
2773
|
];
|
|
@@ -2756,6 +2790,7 @@ var Function;
|
|
|
2756
2790
|
.boolean()
|
|
2757
2791
|
.optional()
|
|
2758
2792
|
.describe("When true, indicates that one or more tasks encountered errors during execution."),
|
|
2793
|
+
reasoning: Streaming.ReasoningSummaryChunkSchema.optional(),
|
|
2759
2794
|
output: zod_1.default
|
|
2760
2795
|
.union([
|
|
2761
2796
|
zod_1.default
|
|
@@ -2822,6 +2857,9 @@ var Function;
|
|
|
2822
2857
|
Unary.TaskSchema = zod_1.default
|
|
2823
2858
|
.union([Task.FunctionSchema, Task.VectorCompletionSchema])
|
|
2824
2859
|
.describe("A task execution.");
|
|
2860
|
+
Unary.ReasoningSummarySchema = Chat.Completions.Response.Unary.ChatCompletionSchema.extend({
|
|
2861
|
+
error: exports.ObjectiveAIErrorSchema.nullable().describe("When non-null, indicates that an error occurred during the chat completion."),
|
|
2862
|
+
}).describe("A reasoning summary generation.");
|
|
2825
2863
|
Unary.FunctionExecutionSchema = zod_1.default
|
|
2826
2864
|
.object({
|
|
2827
2865
|
id: zod_1.default
|
|
@@ -2833,6 +2871,7 @@ var Function;
|
|
|
2833
2871
|
tasks_errors: zod_1.default
|
|
2834
2872
|
.boolean()
|
|
2835
2873
|
.describe("When true, indicates that one or more tasks encountered errors during execution."),
|
|
2874
|
+
reasoning: Unary.ReasoningSummarySchema.nullable(),
|
|
2836
2875
|
output: zod_1.default
|
|
2837
2876
|
.union([
|
|
2838
2877
|
zod_1.default
|