@mastra/client-js 1.0.0-beta.11 → 1.0.0-beta.14
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +179 -0
- package/dist/_types/@ai-sdk_ui-utils/dist/index.d.ts +820 -0
- package/dist/_types/@internal_ai-sdk-v5/dist/index.d.ts +8396 -0
- package/dist/index.cjs +1330 -440
- package/dist/index.cjs.map +1 -1
- package/dist/index.js +1328 -438
- package/dist/index.js.map +1 -1
- package/dist/resources/agent.d.ts +1 -1
- package/dist/resources/run.d.ts +156 -0
- package/dist/resources/run.d.ts.map +1 -0
- package/dist/resources/workflow.d.ts +15 -221
- package/dist/resources/workflow.d.ts.map +1 -1
- package/dist/tools.d.ts +2 -2
- package/dist/tools.d.ts.map +1 -1
- package/dist/types.d.ts +3 -3
- package/dist/types.d.ts.map +1 -1
- package/package.json +6 -6
package/dist/index.js
CHANGED
|
@@ -1,11 +1,1066 @@
|
|
|
1
|
-
import { processDataStream, parsePartialJson } from '@ai-sdk/ui-utils';
|
|
2
1
|
import { v4 } from '@lukeed/uuid';
|
|
3
2
|
import { getErrorFromUnknown } from '@mastra/core/error';
|
|
4
3
|
import { RequestContext } from '@mastra/core/request-context';
|
|
5
4
|
import { isVercelTool } from '@mastra/core/tools/is-vercel-tool';
|
|
6
|
-
import { zodToJsonSchema
|
|
5
|
+
import { zodToJsonSchema } from '@mastra/schema-compat/zod-to-json';
|
|
7
6
|
|
|
8
|
-
|
|
7
|
+
var __create = Object.create;
|
|
8
|
+
var __defProp = Object.defineProperty;
|
|
9
|
+
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
|
|
10
|
+
var __getOwnPropNames = Object.getOwnPropertyNames;
|
|
11
|
+
var __getProtoOf = Object.getPrototypeOf;
|
|
12
|
+
var __hasOwnProp = Object.prototype.hasOwnProperty;
|
|
13
|
+
var __commonJS = (cb, mod) => function __require() {
|
|
14
|
+
return mod || (0, cb[__getOwnPropNames(cb)[0]])((mod = { exports: {} }).exports, mod), mod.exports;
|
|
15
|
+
};
|
|
16
|
+
var __copyProps = (to, from, except, desc) => {
|
|
17
|
+
if (from && typeof from === "object" || typeof from === "function") {
|
|
18
|
+
for (let key of __getOwnPropNames(from))
|
|
19
|
+
if (!__hasOwnProp.call(to, key) && key !== except)
|
|
20
|
+
__defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
|
|
21
|
+
}
|
|
22
|
+
return to;
|
|
23
|
+
};
|
|
24
|
+
var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps(
|
|
25
|
+
// If the importer is in node compatibility mode or this is not an ESM
|
|
26
|
+
// file that has been converted to a CommonJS file using a Babel-
|
|
27
|
+
// compatible transform (i.e. "__esModule" has not been set), then set
|
|
28
|
+
// "default" to the CommonJS "module.exports" for node compatibility.
|
|
29
|
+
__defProp(target, "default", { value: mod, enumerable: true }) ,
|
|
30
|
+
mod
|
|
31
|
+
));
|
|
32
|
+
|
|
33
|
+
// ../../node_modules/.pnpm/secure-json-parse@2.7.0/node_modules/secure-json-parse/index.js
|
|
34
|
+
var require_secure_json_parse = __commonJS({
|
|
35
|
+
"../../node_modules/.pnpm/secure-json-parse@2.7.0/node_modules/secure-json-parse/index.js"(exports, module) {
|
|
36
|
+
var hasBuffer = typeof Buffer !== "undefined";
|
|
37
|
+
var suspectProtoRx = /"(?:_|\\u005[Ff])(?:_|\\u005[Ff])(?:p|\\u0070)(?:r|\\u0072)(?:o|\\u006[Ff])(?:t|\\u0074)(?:o|\\u006[Ff])(?:_|\\u005[Ff])(?:_|\\u005[Ff])"\s*:/;
|
|
38
|
+
var suspectConstructorRx = /"(?:c|\\u0063)(?:o|\\u006[Ff])(?:n|\\u006[Ee])(?:s|\\u0073)(?:t|\\u0074)(?:r|\\u0072)(?:u|\\u0075)(?:c|\\u0063)(?:t|\\u0074)(?:o|\\u006[Ff])(?:r|\\u0072)"\s*:/;
|
|
39
|
+
function _parse(text, reviver, options) {
|
|
40
|
+
if (options == null) {
|
|
41
|
+
if (reviver !== null && typeof reviver === "object") {
|
|
42
|
+
options = reviver;
|
|
43
|
+
reviver = void 0;
|
|
44
|
+
}
|
|
45
|
+
}
|
|
46
|
+
if (hasBuffer && Buffer.isBuffer(text)) {
|
|
47
|
+
text = text.toString();
|
|
48
|
+
}
|
|
49
|
+
if (text && text.charCodeAt(0) === 65279) {
|
|
50
|
+
text = text.slice(1);
|
|
51
|
+
}
|
|
52
|
+
const obj = JSON.parse(text, reviver);
|
|
53
|
+
if (obj === null || typeof obj !== "object") {
|
|
54
|
+
return obj;
|
|
55
|
+
}
|
|
56
|
+
const protoAction = options && options.protoAction || "error";
|
|
57
|
+
const constructorAction = options && options.constructorAction || "error";
|
|
58
|
+
if (protoAction === "ignore" && constructorAction === "ignore") {
|
|
59
|
+
return obj;
|
|
60
|
+
}
|
|
61
|
+
if (protoAction !== "ignore" && constructorAction !== "ignore") {
|
|
62
|
+
if (suspectProtoRx.test(text) === false && suspectConstructorRx.test(text) === false) {
|
|
63
|
+
return obj;
|
|
64
|
+
}
|
|
65
|
+
} else if (protoAction !== "ignore" && constructorAction === "ignore") {
|
|
66
|
+
if (suspectProtoRx.test(text) === false) {
|
|
67
|
+
return obj;
|
|
68
|
+
}
|
|
69
|
+
} else {
|
|
70
|
+
if (suspectConstructorRx.test(text) === false) {
|
|
71
|
+
return obj;
|
|
72
|
+
}
|
|
73
|
+
}
|
|
74
|
+
return filter(obj, { protoAction, constructorAction, safe: options && options.safe });
|
|
75
|
+
}
|
|
76
|
+
function filter(obj, { protoAction = "error", constructorAction = "error", safe } = {}) {
|
|
77
|
+
let next = [obj];
|
|
78
|
+
while (next.length) {
|
|
79
|
+
const nodes = next;
|
|
80
|
+
next = [];
|
|
81
|
+
for (const node of nodes) {
|
|
82
|
+
if (protoAction !== "ignore" && Object.prototype.hasOwnProperty.call(node, "__proto__")) {
|
|
83
|
+
if (safe === true) {
|
|
84
|
+
return null;
|
|
85
|
+
} else if (protoAction === "error") {
|
|
86
|
+
throw new SyntaxError("Object contains forbidden prototype property");
|
|
87
|
+
}
|
|
88
|
+
delete node.__proto__;
|
|
89
|
+
}
|
|
90
|
+
if (constructorAction !== "ignore" && Object.prototype.hasOwnProperty.call(node, "constructor") && Object.prototype.hasOwnProperty.call(node.constructor, "prototype")) {
|
|
91
|
+
if (safe === true) {
|
|
92
|
+
return null;
|
|
93
|
+
} else if (constructorAction === "error") {
|
|
94
|
+
throw new SyntaxError("Object contains forbidden prototype property");
|
|
95
|
+
}
|
|
96
|
+
delete node.constructor;
|
|
97
|
+
}
|
|
98
|
+
for (const key in node) {
|
|
99
|
+
const value = node[key];
|
|
100
|
+
if (value && typeof value === "object") {
|
|
101
|
+
next.push(value);
|
|
102
|
+
}
|
|
103
|
+
}
|
|
104
|
+
}
|
|
105
|
+
}
|
|
106
|
+
return obj;
|
|
107
|
+
}
|
|
108
|
+
function parse(text, reviver, options) {
|
|
109
|
+
const stackTraceLimit = Error.stackTraceLimit;
|
|
110
|
+
Error.stackTraceLimit = 0;
|
|
111
|
+
try {
|
|
112
|
+
return _parse(text, reviver, options);
|
|
113
|
+
} finally {
|
|
114
|
+
Error.stackTraceLimit = stackTraceLimit;
|
|
115
|
+
}
|
|
116
|
+
}
|
|
117
|
+
function safeParse(text, reviver) {
|
|
118
|
+
const stackTraceLimit = Error.stackTraceLimit;
|
|
119
|
+
Error.stackTraceLimit = 0;
|
|
120
|
+
try {
|
|
121
|
+
return _parse(text, reviver, { safe: true });
|
|
122
|
+
} catch (_e) {
|
|
123
|
+
return null;
|
|
124
|
+
} finally {
|
|
125
|
+
Error.stackTraceLimit = stackTraceLimit;
|
|
126
|
+
}
|
|
127
|
+
}
|
|
128
|
+
module.exports = parse;
|
|
129
|
+
module.exports.default = parse;
|
|
130
|
+
module.exports.parse = parse;
|
|
131
|
+
module.exports.safeParse = safeParse;
|
|
132
|
+
module.exports.scan = filter;
|
|
133
|
+
}
|
|
134
|
+
});
|
|
135
|
+
|
|
136
|
+
// ../../node_modules/.pnpm/@ai-sdk+provider@1.1.3/node_modules/@ai-sdk/provider/dist/index.mjs
|
|
137
|
+
var marker = "vercel.ai.error";
|
|
138
|
+
var symbol = Symbol.for(marker);
|
|
139
|
+
var _a;
|
|
140
|
+
var _AISDKError = class _AISDKError2 extends Error {
|
|
141
|
+
/**
|
|
142
|
+
* Creates an AI SDK Error.
|
|
143
|
+
*
|
|
144
|
+
* @param {Object} params - The parameters for creating the error.
|
|
145
|
+
* @param {string} params.name - The name of the error.
|
|
146
|
+
* @param {string} params.message - The error message.
|
|
147
|
+
* @param {unknown} [params.cause] - The underlying cause of the error.
|
|
148
|
+
*/
|
|
149
|
+
constructor({
|
|
150
|
+
name: name14,
|
|
151
|
+
message,
|
|
152
|
+
cause
|
|
153
|
+
}) {
|
|
154
|
+
super(message);
|
|
155
|
+
this[_a] = true;
|
|
156
|
+
this.name = name14;
|
|
157
|
+
this.cause = cause;
|
|
158
|
+
}
|
|
159
|
+
/**
|
|
160
|
+
* Checks if the given error is an AI SDK Error.
|
|
161
|
+
* @param {unknown} error - The error to check.
|
|
162
|
+
* @returns {boolean} True if the error is an AI SDK Error, false otherwise.
|
|
163
|
+
*/
|
|
164
|
+
static isInstance(error) {
|
|
165
|
+
return _AISDKError2.hasMarker(error, marker);
|
|
166
|
+
}
|
|
167
|
+
static hasMarker(error, marker15) {
|
|
168
|
+
const markerSymbol = Symbol.for(marker15);
|
|
169
|
+
return error != null && typeof error === "object" && markerSymbol in error && typeof error[markerSymbol] === "boolean" && error[markerSymbol] === true;
|
|
170
|
+
}
|
|
171
|
+
};
|
|
172
|
+
_a = symbol;
|
|
173
|
+
var AISDKError = _AISDKError;
|
|
174
|
+
function getErrorMessage(error) {
|
|
175
|
+
if (error == null) {
|
|
176
|
+
return "unknown error";
|
|
177
|
+
}
|
|
178
|
+
if (typeof error === "string") {
|
|
179
|
+
return error;
|
|
180
|
+
}
|
|
181
|
+
if (error instanceof Error) {
|
|
182
|
+
return error.message;
|
|
183
|
+
}
|
|
184
|
+
return JSON.stringify(error);
|
|
185
|
+
}
|
|
186
|
+
var name3 = "AI_InvalidArgumentError";
|
|
187
|
+
var marker4 = `vercel.ai.error.${name3}`;
|
|
188
|
+
var symbol4 = Symbol.for(marker4);
|
|
189
|
+
var _a4;
|
|
190
|
+
var InvalidArgumentError = class extends AISDKError {
|
|
191
|
+
constructor({
|
|
192
|
+
message,
|
|
193
|
+
cause,
|
|
194
|
+
argument
|
|
195
|
+
}) {
|
|
196
|
+
super({ name: name3, message, cause });
|
|
197
|
+
this[_a4] = true;
|
|
198
|
+
this.argument = argument;
|
|
199
|
+
}
|
|
200
|
+
static isInstance(error) {
|
|
201
|
+
return AISDKError.hasMarker(error, marker4);
|
|
202
|
+
}
|
|
203
|
+
};
|
|
204
|
+
_a4 = symbol4;
|
|
205
|
+
var name6 = "AI_JSONParseError";
|
|
206
|
+
var marker7 = `vercel.ai.error.${name6}`;
|
|
207
|
+
var symbol7 = Symbol.for(marker7);
|
|
208
|
+
var _a7;
|
|
209
|
+
var JSONParseError = class extends AISDKError {
|
|
210
|
+
constructor({ text, cause }) {
|
|
211
|
+
super({
|
|
212
|
+
name: name6,
|
|
213
|
+
message: `JSON parsing failed: Text: ${text}.
|
|
214
|
+
Error message: ${getErrorMessage(cause)}`,
|
|
215
|
+
cause
|
|
216
|
+
});
|
|
217
|
+
this[_a7] = true;
|
|
218
|
+
this.text = text;
|
|
219
|
+
}
|
|
220
|
+
static isInstance(error) {
|
|
221
|
+
return AISDKError.hasMarker(error, marker7);
|
|
222
|
+
}
|
|
223
|
+
};
|
|
224
|
+
_a7 = symbol7;
|
|
225
|
+
var name12 = "AI_TypeValidationError";
|
|
226
|
+
var marker13 = `vercel.ai.error.${name12}`;
|
|
227
|
+
var symbol13 = Symbol.for(marker13);
|
|
228
|
+
var _a13;
|
|
229
|
+
var _TypeValidationError = class _TypeValidationError2 extends AISDKError {
|
|
230
|
+
constructor({ value, cause }) {
|
|
231
|
+
super({
|
|
232
|
+
name: name12,
|
|
233
|
+
message: `Type validation failed: Value: ${JSON.stringify(value)}.
|
|
234
|
+
Error message: ${getErrorMessage(cause)}`,
|
|
235
|
+
cause
|
|
236
|
+
});
|
|
237
|
+
this[_a13] = true;
|
|
238
|
+
this.value = value;
|
|
239
|
+
}
|
|
240
|
+
static isInstance(error) {
|
|
241
|
+
return AISDKError.hasMarker(error, marker13);
|
|
242
|
+
}
|
|
243
|
+
/**
|
|
244
|
+
* Wraps an error into a TypeValidationError.
|
|
245
|
+
* If the cause is already a TypeValidationError with the same value, it returns the cause.
|
|
246
|
+
* Otherwise, it creates a new TypeValidationError.
|
|
247
|
+
*
|
|
248
|
+
* @param {Object} params - The parameters for wrapping the error.
|
|
249
|
+
* @param {unknown} params.value - The value that failed validation.
|
|
250
|
+
* @param {unknown} params.cause - The original error or cause of the validation failure.
|
|
251
|
+
* @returns {TypeValidationError} A TypeValidationError instance.
|
|
252
|
+
*/
|
|
253
|
+
static wrap({
|
|
254
|
+
value,
|
|
255
|
+
cause
|
|
256
|
+
}) {
|
|
257
|
+
return _TypeValidationError2.isInstance(cause) && cause.value === value ? cause : new _TypeValidationError2({ value, cause });
|
|
258
|
+
}
|
|
259
|
+
};
|
|
260
|
+
_a13 = symbol13;
|
|
261
|
+
var TypeValidationError = _TypeValidationError;
|
|
262
|
+
|
|
263
|
+
// ../../node_modules/.pnpm/nanoid@3.3.11/node_modules/nanoid/non-secure/index.js
|
|
264
|
+
var customAlphabet = (alphabet, defaultSize = 21) => {
|
|
265
|
+
return (size = defaultSize) => {
|
|
266
|
+
let id = "";
|
|
267
|
+
let i = size | 0;
|
|
268
|
+
while (i--) {
|
|
269
|
+
id += alphabet[Math.random() * alphabet.length | 0];
|
|
270
|
+
}
|
|
271
|
+
return id;
|
|
272
|
+
};
|
|
273
|
+
};
|
|
274
|
+
|
|
275
|
+
// ../../node_modules/.pnpm/@ai-sdk+provider-utils@2.2.8_zod@3.25.76/node_modules/@ai-sdk/provider-utils/dist/index.mjs
|
|
276
|
+
var import_secure_json_parse = __toESM(require_secure_json_parse());
|
|
277
|
+
var createIdGenerator = ({
|
|
278
|
+
prefix,
|
|
279
|
+
size: defaultSize = 16,
|
|
280
|
+
alphabet = "0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz",
|
|
281
|
+
separator = "-"
|
|
282
|
+
} = {}) => {
|
|
283
|
+
const generator = customAlphabet(alphabet, defaultSize);
|
|
284
|
+
if (prefix == null) {
|
|
285
|
+
return generator;
|
|
286
|
+
}
|
|
287
|
+
if (alphabet.includes(separator)) {
|
|
288
|
+
throw new InvalidArgumentError({
|
|
289
|
+
argument: "separator",
|
|
290
|
+
message: `The separator "${separator}" must not be part of the alphabet "${alphabet}".`
|
|
291
|
+
});
|
|
292
|
+
}
|
|
293
|
+
return (size) => `${prefix}${separator}${generator(size)}`;
|
|
294
|
+
};
|
|
295
|
+
createIdGenerator();
|
|
296
|
+
var validatorSymbol = Symbol.for("vercel.ai.validator");
|
|
297
|
+
function validator(validate) {
|
|
298
|
+
return { [validatorSymbol]: true, validate };
|
|
299
|
+
}
|
|
300
|
+
function isValidator(value) {
|
|
301
|
+
return typeof value === "object" && value !== null && validatorSymbol in value && value[validatorSymbol] === true && "validate" in value;
|
|
302
|
+
}
|
|
303
|
+
function asValidator(value) {
|
|
304
|
+
return isValidator(value) ? value : zodValidator(value);
|
|
305
|
+
}
|
|
306
|
+
function zodValidator(zodSchema) {
|
|
307
|
+
return validator((value) => {
|
|
308
|
+
const result = zodSchema.safeParse(value);
|
|
309
|
+
return result.success ? { success: true, value: result.data } : { success: false, error: result.error };
|
|
310
|
+
});
|
|
311
|
+
}
|
|
312
|
+
function safeValidateTypes({
|
|
313
|
+
value,
|
|
314
|
+
schema
|
|
315
|
+
}) {
|
|
316
|
+
const validator2 = asValidator(schema);
|
|
317
|
+
try {
|
|
318
|
+
if (validator2.validate == null) {
|
|
319
|
+
return { success: true, value };
|
|
320
|
+
}
|
|
321
|
+
const result = validator2.validate(value);
|
|
322
|
+
if (result.success) {
|
|
323
|
+
return result;
|
|
324
|
+
}
|
|
325
|
+
return {
|
|
326
|
+
success: false,
|
|
327
|
+
error: TypeValidationError.wrap({ value, cause: result.error })
|
|
328
|
+
};
|
|
329
|
+
} catch (error) {
|
|
330
|
+
return {
|
|
331
|
+
success: false,
|
|
332
|
+
error: TypeValidationError.wrap({ value, cause: error })
|
|
333
|
+
};
|
|
334
|
+
}
|
|
335
|
+
}
|
|
336
|
+
function safeParseJSON({
|
|
337
|
+
text,
|
|
338
|
+
schema
|
|
339
|
+
}) {
|
|
340
|
+
try {
|
|
341
|
+
const value = import_secure_json_parse.default.parse(text);
|
|
342
|
+
if (schema == null) {
|
|
343
|
+
return { success: true, value, rawValue: value };
|
|
344
|
+
}
|
|
345
|
+
const validationResult = safeValidateTypes({ value, schema });
|
|
346
|
+
return validationResult.success ? { ...validationResult, rawValue: value } : validationResult;
|
|
347
|
+
} catch (error) {
|
|
348
|
+
return {
|
|
349
|
+
success: false,
|
|
350
|
+
error: JSONParseError.isInstance(error) ? error : new JSONParseError({ text, cause: error })
|
|
351
|
+
};
|
|
352
|
+
}
|
|
353
|
+
}
|
|
354
|
+
|
|
355
|
+
// ../../node_modules/.pnpm/zod-to-json-schema@3.24.6_zod@3.25.76/node_modules/zod-to-json-schema/dist/esm/parsers/string.js
|
|
356
|
+
new Set("ABCDEFGHIJKLMNOPQRSTUVXYZabcdefghijklmnopqrstuvxyz0123456789");
|
|
357
|
+
function fixJson(input) {
|
|
358
|
+
const stack = ["ROOT"];
|
|
359
|
+
let lastValidIndex = -1;
|
|
360
|
+
let literalStart = null;
|
|
361
|
+
function processValueStart(char, i, swapState) {
|
|
362
|
+
{
|
|
363
|
+
switch (char) {
|
|
364
|
+
case '"': {
|
|
365
|
+
lastValidIndex = i;
|
|
366
|
+
stack.pop();
|
|
367
|
+
stack.push(swapState);
|
|
368
|
+
stack.push("INSIDE_STRING");
|
|
369
|
+
break;
|
|
370
|
+
}
|
|
371
|
+
case "f":
|
|
372
|
+
case "t":
|
|
373
|
+
case "n": {
|
|
374
|
+
lastValidIndex = i;
|
|
375
|
+
literalStart = i;
|
|
376
|
+
stack.pop();
|
|
377
|
+
stack.push(swapState);
|
|
378
|
+
stack.push("INSIDE_LITERAL");
|
|
379
|
+
break;
|
|
380
|
+
}
|
|
381
|
+
case "-": {
|
|
382
|
+
stack.pop();
|
|
383
|
+
stack.push(swapState);
|
|
384
|
+
stack.push("INSIDE_NUMBER");
|
|
385
|
+
break;
|
|
386
|
+
}
|
|
387
|
+
case "0":
|
|
388
|
+
case "1":
|
|
389
|
+
case "2":
|
|
390
|
+
case "3":
|
|
391
|
+
case "4":
|
|
392
|
+
case "5":
|
|
393
|
+
case "6":
|
|
394
|
+
case "7":
|
|
395
|
+
case "8":
|
|
396
|
+
case "9": {
|
|
397
|
+
lastValidIndex = i;
|
|
398
|
+
stack.pop();
|
|
399
|
+
stack.push(swapState);
|
|
400
|
+
stack.push("INSIDE_NUMBER");
|
|
401
|
+
break;
|
|
402
|
+
}
|
|
403
|
+
case "{": {
|
|
404
|
+
lastValidIndex = i;
|
|
405
|
+
stack.pop();
|
|
406
|
+
stack.push(swapState);
|
|
407
|
+
stack.push("INSIDE_OBJECT_START");
|
|
408
|
+
break;
|
|
409
|
+
}
|
|
410
|
+
case "[": {
|
|
411
|
+
lastValidIndex = i;
|
|
412
|
+
stack.pop();
|
|
413
|
+
stack.push(swapState);
|
|
414
|
+
stack.push("INSIDE_ARRAY_START");
|
|
415
|
+
break;
|
|
416
|
+
}
|
|
417
|
+
}
|
|
418
|
+
}
|
|
419
|
+
}
|
|
420
|
+
function processAfterObjectValue(char, i) {
|
|
421
|
+
switch (char) {
|
|
422
|
+
case ",": {
|
|
423
|
+
stack.pop();
|
|
424
|
+
stack.push("INSIDE_OBJECT_AFTER_COMMA");
|
|
425
|
+
break;
|
|
426
|
+
}
|
|
427
|
+
case "}": {
|
|
428
|
+
lastValidIndex = i;
|
|
429
|
+
stack.pop();
|
|
430
|
+
break;
|
|
431
|
+
}
|
|
432
|
+
}
|
|
433
|
+
}
|
|
434
|
+
function processAfterArrayValue(char, i) {
|
|
435
|
+
switch (char) {
|
|
436
|
+
case ",": {
|
|
437
|
+
stack.pop();
|
|
438
|
+
stack.push("INSIDE_ARRAY_AFTER_COMMA");
|
|
439
|
+
break;
|
|
440
|
+
}
|
|
441
|
+
case "]": {
|
|
442
|
+
lastValidIndex = i;
|
|
443
|
+
stack.pop();
|
|
444
|
+
break;
|
|
445
|
+
}
|
|
446
|
+
}
|
|
447
|
+
}
|
|
448
|
+
for (let i = 0; i < input.length; i++) {
|
|
449
|
+
const char = input[i];
|
|
450
|
+
const currentState = stack[stack.length - 1];
|
|
451
|
+
switch (currentState) {
|
|
452
|
+
case "ROOT":
|
|
453
|
+
processValueStart(char, i, "FINISH");
|
|
454
|
+
break;
|
|
455
|
+
case "INSIDE_OBJECT_START": {
|
|
456
|
+
switch (char) {
|
|
457
|
+
case '"': {
|
|
458
|
+
stack.pop();
|
|
459
|
+
stack.push("INSIDE_OBJECT_KEY");
|
|
460
|
+
break;
|
|
461
|
+
}
|
|
462
|
+
case "}": {
|
|
463
|
+
lastValidIndex = i;
|
|
464
|
+
stack.pop();
|
|
465
|
+
break;
|
|
466
|
+
}
|
|
467
|
+
}
|
|
468
|
+
break;
|
|
469
|
+
}
|
|
470
|
+
case "INSIDE_OBJECT_AFTER_COMMA": {
|
|
471
|
+
switch (char) {
|
|
472
|
+
case '"': {
|
|
473
|
+
stack.pop();
|
|
474
|
+
stack.push("INSIDE_OBJECT_KEY");
|
|
475
|
+
break;
|
|
476
|
+
}
|
|
477
|
+
}
|
|
478
|
+
break;
|
|
479
|
+
}
|
|
480
|
+
case "INSIDE_OBJECT_KEY": {
|
|
481
|
+
switch (char) {
|
|
482
|
+
case '"': {
|
|
483
|
+
stack.pop();
|
|
484
|
+
stack.push("INSIDE_OBJECT_AFTER_KEY");
|
|
485
|
+
break;
|
|
486
|
+
}
|
|
487
|
+
}
|
|
488
|
+
break;
|
|
489
|
+
}
|
|
490
|
+
case "INSIDE_OBJECT_AFTER_KEY": {
|
|
491
|
+
switch (char) {
|
|
492
|
+
case ":": {
|
|
493
|
+
stack.pop();
|
|
494
|
+
stack.push("INSIDE_OBJECT_BEFORE_VALUE");
|
|
495
|
+
break;
|
|
496
|
+
}
|
|
497
|
+
}
|
|
498
|
+
break;
|
|
499
|
+
}
|
|
500
|
+
case "INSIDE_OBJECT_BEFORE_VALUE": {
|
|
501
|
+
processValueStart(char, i, "INSIDE_OBJECT_AFTER_VALUE");
|
|
502
|
+
break;
|
|
503
|
+
}
|
|
504
|
+
case "INSIDE_OBJECT_AFTER_VALUE": {
|
|
505
|
+
processAfterObjectValue(char, i);
|
|
506
|
+
break;
|
|
507
|
+
}
|
|
508
|
+
case "INSIDE_STRING": {
|
|
509
|
+
switch (char) {
|
|
510
|
+
case '"': {
|
|
511
|
+
stack.pop();
|
|
512
|
+
lastValidIndex = i;
|
|
513
|
+
break;
|
|
514
|
+
}
|
|
515
|
+
case "\\": {
|
|
516
|
+
stack.push("INSIDE_STRING_ESCAPE");
|
|
517
|
+
break;
|
|
518
|
+
}
|
|
519
|
+
default: {
|
|
520
|
+
lastValidIndex = i;
|
|
521
|
+
}
|
|
522
|
+
}
|
|
523
|
+
break;
|
|
524
|
+
}
|
|
525
|
+
case "INSIDE_ARRAY_START": {
|
|
526
|
+
switch (char) {
|
|
527
|
+
case "]": {
|
|
528
|
+
lastValidIndex = i;
|
|
529
|
+
stack.pop();
|
|
530
|
+
break;
|
|
531
|
+
}
|
|
532
|
+
default: {
|
|
533
|
+
lastValidIndex = i;
|
|
534
|
+
processValueStart(char, i, "INSIDE_ARRAY_AFTER_VALUE");
|
|
535
|
+
break;
|
|
536
|
+
}
|
|
537
|
+
}
|
|
538
|
+
break;
|
|
539
|
+
}
|
|
540
|
+
case "INSIDE_ARRAY_AFTER_VALUE": {
|
|
541
|
+
switch (char) {
|
|
542
|
+
case ",": {
|
|
543
|
+
stack.pop();
|
|
544
|
+
stack.push("INSIDE_ARRAY_AFTER_COMMA");
|
|
545
|
+
break;
|
|
546
|
+
}
|
|
547
|
+
case "]": {
|
|
548
|
+
lastValidIndex = i;
|
|
549
|
+
stack.pop();
|
|
550
|
+
break;
|
|
551
|
+
}
|
|
552
|
+
default: {
|
|
553
|
+
lastValidIndex = i;
|
|
554
|
+
break;
|
|
555
|
+
}
|
|
556
|
+
}
|
|
557
|
+
break;
|
|
558
|
+
}
|
|
559
|
+
case "INSIDE_ARRAY_AFTER_COMMA": {
|
|
560
|
+
processValueStart(char, i, "INSIDE_ARRAY_AFTER_VALUE");
|
|
561
|
+
break;
|
|
562
|
+
}
|
|
563
|
+
case "INSIDE_STRING_ESCAPE": {
|
|
564
|
+
stack.pop();
|
|
565
|
+
lastValidIndex = i;
|
|
566
|
+
break;
|
|
567
|
+
}
|
|
568
|
+
case "INSIDE_NUMBER": {
|
|
569
|
+
switch (char) {
|
|
570
|
+
case "0":
|
|
571
|
+
case "1":
|
|
572
|
+
case "2":
|
|
573
|
+
case "3":
|
|
574
|
+
case "4":
|
|
575
|
+
case "5":
|
|
576
|
+
case "6":
|
|
577
|
+
case "7":
|
|
578
|
+
case "8":
|
|
579
|
+
case "9": {
|
|
580
|
+
lastValidIndex = i;
|
|
581
|
+
break;
|
|
582
|
+
}
|
|
583
|
+
case "e":
|
|
584
|
+
case "E":
|
|
585
|
+
case "-":
|
|
586
|
+
case ".": {
|
|
587
|
+
break;
|
|
588
|
+
}
|
|
589
|
+
case ",": {
|
|
590
|
+
stack.pop();
|
|
591
|
+
if (stack[stack.length - 1] === "INSIDE_ARRAY_AFTER_VALUE") {
|
|
592
|
+
processAfterArrayValue(char, i);
|
|
593
|
+
}
|
|
594
|
+
if (stack[stack.length - 1] === "INSIDE_OBJECT_AFTER_VALUE") {
|
|
595
|
+
processAfterObjectValue(char, i);
|
|
596
|
+
}
|
|
597
|
+
break;
|
|
598
|
+
}
|
|
599
|
+
case "}": {
|
|
600
|
+
stack.pop();
|
|
601
|
+
if (stack[stack.length - 1] === "INSIDE_OBJECT_AFTER_VALUE") {
|
|
602
|
+
processAfterObjectValue(char, i);
|
|
603
|
+
}
|
|
604
|
+
break;
|
|
605
|
+
}
|
|
606
|
+
case "]": {
|
|
607
|
+
stack.pop();
|
|
608
|
+
if (stack[stack.length - 1] === "INSIDE_ARRAY_AFTER_VALUE") {
|
|
609
|
+
processAfterArrayValue(char, i);
|
|
610
|
+
}
|
|
611
|
+
break;
|
|
612
|
+
}
|
|
613
|
+
default: {
|
|
614
|
+
stack.pop();
|
|
615
|
+
break;
|
|
616
|
+
}
|
|
617
|
+
}
|
|
618
|
+
break;
|
|
619
|
+
}
|
|
620
|
+
case "INSIDE_LITERAL": {
|
|
621
|
+
const partialLiteral = input.substring(literalStart, i + 1);
|
|
622
|
+
if (!"false".startsWith(partialLiteral) && !"true".startsWith(partialLiteral) && !"null".startsWith(partialLiteral)) {
|
|
623
|
+
stack.pop();
|
|
624
|
+
if (stack[stack.length - 1] === "INSIDE_OBJECT_AFTER_VALUE") {
|
|
625
|
+
processAfterObjectValue(char, i);
|
|
626
|
+
} else if (stack[stack.length - 1] === "INSIDE_ARRAY_AFTER_VALUE") {
|
|
627
|
+
processAfterArrayValue(char, i);
|
|
628
|
+
}
|
|
629
|
+
} else {
|
|
630
|
+
lastValidIndex = i;
|
|
631
|
+
}
|
|
632
|
+
break;
|
|
633
|
+
}
|
|
634
|
+
}
|
|
635
|
+
}
|
|
636
|
+
let result = input.slice(0, lastValidIndex + 1);
|
|
637
|
+
for (let i = stack.length - 1; i >= 0; i--) {
|
|
638
|
+
const state = stack[i];
|
|
639
|
+
switch (state) {
|
|
640
|
+
case "INSIDE_STRING": {
|
|
641
|
+
result += '"';
|
|
642
|
+
break;
|
|
643
|
+
}
|
|
644
|
+
case "INSIDE_OBJECT_KEY":
|
|
645
|
+
case "INSIDE_OBJECT_AFTER_KEY":
|
|
646
|
+
case "INSIDE_OBJECT_AFTER_COMMA":
|
|
647
|
+
case "INSIDE_OBJECT_START":
|
|
648
|
+
case "INSIDE_OBJECT_BEFORE_VALUE":
|
|
649
|
+
case "INSIDE_OBJECT_AFTER_VALUE": {
|
|
650
|
+
result += "}";
|
|
651
|
+
break;
|
|
652
|
+
}
|
|
653
|
+
case "INSIDE_ARRAY_START":
|
|
654
|
+
case "INSIDE_ARRAY_AFTER_COMMA":
|
|
655
|
+
case "INSIDE_ARRAY_AFTER_VALUE": {
|
|
656
|
+
result += "]";
|
|
657
|
+
break;
|
|
658
|
+
}
|
|
659
|
+
case "INSIDE_LITERAL": {
|
|
660
|
+
const partialLiteral = input.substring(literalStart, input.length);
|
|
661
|
+
if ("true".startsWith(partialLiteral)) {
|
|
662
|
+
result += "true".slice(partialLiteral.length);
|
|
663
|
+
} else if ("false".startsWith(partialLiteral)) {
|
|
664
|
+
result += "false".slice(partialLiteral.length);
|
|
665
|
+
} else if ("null".startsWith(partialLiteral)) {
|
|
666
|
+
result += "null".slice(partialLiteral.length);
|
|
667
|
+
}
|
|
668
|
+
}
|
|
669
|
+
}
|
|
670
|
+
}
|
|
671
|
+
return result;
|
|
672
|
+
}
|
|
673
|
+
function parsePartialJson(jsonText) {
|
|
674
|
+
if (jsonText === void 0) {
|
|
675
|
+
return { value: void 0, state: "undefined-input" };
|
|
676
|
+
}
|
|
677
|
+
let result = safeParseJSON({ text: jsonText });
|
|
678
|
+
if (result.success) {
|
|
679
|
+
return { value: result.value, state: "successful-parse" };
|
|
680
|
+
}
|
|
681
|
+
result = safeParseJSON({ text: fixJson(jsonText) });
|
|
682
|
+
if (result.success) {
|
|
683
|
+
return { value: result.value, state: "repaired-parse" };
|
|
684
|
+
}
|
|
685
|
+
return { value: void 0, state: "failed-parse" };
|
|
686
|
+
}
|
|
687
|
+
var textStreamPart2 = {
|
|
688
|
+
code: "0",
|
|
689
|
+
name: "text",
|
|
690
|
+
parse: (value) => {
|
|
691
|
+
if (typeof value !== "string") {
|
|
692
|
+
throw new Error('"text" parts expect a string value.');
|
|
693
|
+
}
|
|
694
|
+
return { type: "text", value };
|
|
695
|
+
}
|
|
696
|
+
};
|
|
697
|
+
var dataStreamPart = {
|
|
698
|
+
code: "2",
|
|
699
|
+
name: "data",
|
|
700
|
+
parse: (value) => {
|
|
701
|
+
if (!Array.isArray(value)) {
|
|
702
|
+
throw new Error('"data" parts expect an array value.');
|
|
703
|
+
}
|
|
704
|
+
return { type: "data", value };
|
|
705
|
+
}
|
|
706
|
+
};
|
|
707
|
+
var errorStreamPart2 = {
|
|
708
|
+
code: "3",
|
|
709
|
+
name: "error",
|
|
710
|
+
parse: (value) => {
|
|
711
|
+
if (typeof value !== "string") {
|
|
712
|
+
throw new Error('"error" parts expect a string value.');
|
|
713
|
+
}
|
|
714
|
+
return { type: "error", value };
|
|
715
|
+
}
|
|
716
|
+
};
|
|
717
|
+
var messageAnnotationsStreamPart = {
|
|
718
|
+
code: "8",
|
|
719
|
+
name: "message_annotations",
|
|
720
|
+
parse: (value) => {
|
|
721
|
+
if (!Array.isArray(value)) {
|
|
722
|
+
throw new Error('"message_annotations" parts expect an array value.');
|
|
723
|
+
}
|
|
724
|
+
return { type: "message_annotations", value };
|
|
725
|
+
}
|
|
726
|
+
};
|
|
727
|
+
var toolCallStreamPart = {
|
|
728
|
+
code: "9",
|
|
729
|
+
name: "tool_call",
|
|
730
|
+
parse: (value) => {
|
|
731
|
+
if (value == null || typeof value !== "object" || !("toolCallId" in value) || typeof value.toolCallId !== "string" || !("toolName" in value) || typeof value.toolName !== "string" || !("args" in value) || typeof value.args !== "object") {
|
|
732
|
+
throw new Error(
|
|
733
|
+
'"tool_call" parts expect an object with a "toolCallId", "toolName", and "args" property.'
|
|
734
|
+
);
|
|
735
|
+
}
|
|
736
|
+
return {
|
|
737
|
+
type: "tool_call",
|
|
738
|
+
value
|
|
739
|
+
};
|
|
740
|
+
}
|
|
741
|
+
};
|
|
742
|
+
var toolResultStreamPart = {
|
|
743
|
+
code: "a",
|
|
744
|
+
name: "tool_result",
|
|
745
|
+
parse: (value) => {
|
|
746
|
+
if (value == null || typeof value !== "object" || !("toolCallId" in value) || typeof value.toolCallId !== "string" || !("result" in value)) {
|
|
747
|
+
throw new Error(
|
|
748
|
+
'"tool_result" parts expect an object with a "toolCallId" and a "result" property.'
|
|
749
|
+
);
|
|
750
|
+
}
|
|
751
|
+
return {
|
|
752
|
+
type: "tool_result",
|
|
753
|
+
value
|
|
754
|
+
};
|
|
755
|
+
}
|
|
756
|
+
};
|
|
757
|
+
var toolCallStreamingStartStreamPart = {
|
|
758
|
+
code: "b",
|
|
759
|
+
name: "tool_call_streaming_start",
|
|
760
|
+
parse: (value) => {
|
|
761
|
+
if (value == null || typeof value !== "object" || !("toolCallId" in value) || typeof value.toolCallId !== "string" || !("toolName" in value) || typeof value.toolName !== "string") {
|
|
762
|
+
throw new Error(
|
|
763
|
+
'"tool_call_streaming_start" parts expect an object with a "toolCallId" and "toolName" property.'
|
|
764
|
+
);
|
|
765
|
+
}
|
|
766
|
+
return {
|
|
767
|
+
type: "tool_call_streaming_start",
|
|
768
|
+
value
|
|
769
|
+
};
|
|
770
|
+
}
|
|
771
|
+
};
|
|
772
|
+
var toolCallDeltaStreamPart = {
|
|
773
|
+
code: "c",
|
|
774
|
+
name: "tool_call_delta",
|
|
775
|
+
parse: (value) => {
|
|
776
|
+
if (value == null || typeof value !== "object" || !("toolCallId" in value) || typeof value.toolCallId !== "string" || !("argsTextDelta" in value) || typeof value.argsTextDelta !== "string") {
|
|
777
|
+
throw new Error(
|
|
778
|
+
'"tool_call_delta" parts expect an object with a "toolCallId" and "argsTextDelta" property.'
|
|
779
|
+
);
|
|
780
|
+
}
|
|
781
|
+
return {
|
|
782
|
+
type: "tool_call_delta",
|
|
783
|
+
value
|
|
784
|
+
};
|
|
785
|
+
}
|
|
786
|
+
};
|
|
787
|
+
var finishMessageStreamPart = {
|
|
788
|
+
code: "d",
|
|
789
|
+
name: "finish_message",
|
|
790
|
+
parse: (value) => {
|
|
791
|
+
if (value == null || typeof value !== "object" || !("finishReason" in value) || typeof value.finishReason !== "string") {
|
|
792
|
+
throw new Error(
|
|
793
|
+
'"finish_message" parts expect an object with a "finishReason" property.'
|
|
794
|
+
);
|
|
795
|
+
}
|
|
796
|
+
const result = {
|
|
797
|
+
finishReason: value.finishReason
|
|
798
|
+
};
|
|
799
|
+
if ("usage" in value && value.usage != null && typeof value.usage === "object" && "promptTokens" in value.usage && "completionTokens" in value.usage) {
|
|
800
|
+
result.usage = {
|
|
801
|
+
promptTokens: typeof value.usage.promptTokens === "number" ? value.usage.promptTokens : Number.NaN,
|
|
802
|
+
completionTokens: typeof value.usage.completionTokens === "number" ? value.usage.completionTokens : Number.NaN
|
|
803
|
+
};
|
|
804
|
+
}
|
|
805
|
+
return {
|
|
806
|
+
type: "finish_message",
|
|
807
|
+
value: result
|
|
808
|
+
};
|
|
809
|
+
}
|
|
810
|
+
};
|
|
811
|
+
var finishStepStreamPart = {
|
|
812
|
+
code: "e",
|
|
813
|
+
name: "finish_step",
|
|
814
|
+
parse: (value) => {
|
|
815
|
+
if (value == null || typeof value !== "object" || !("finishReason" in value) || typeof value.finishReason !== "string") {
|
|
816
|
+
throw new Error(
|
|
817
|
+
'"finish_step" parts expect an object with a "finishReason" property.'
|
|
818
|
+
);
|
|
819
|
+
}
|
|
820
|
+
const result = {
|
|
821
|
+
finishReason: value.finishReason,
|
|
822
|
+
isContinued: false
|
|
823
|
+
};
|
|
824
|
+
if ("usage" in value && value.usage != null && typeof value.usage === "object" && "promptTokens" in value.usage && "completionTokens" in value.usage) {
|
|
825
|
+
result.usage = {
|
|
826
|
+
promptTokens: typeof value.usage.promptTokens === "number" ? value.usage.promptTokens : Number.NaN,
|
|
827
|
+
completionTokens: typeof value.usage.completionTokens === "number" ? value.usage.completionTokens : Number.NaN
|
|
828
|
+
};
|
|
829
|
+
}
|
|
830
|
+
if ("isContinued" in value && typeof value.isContinued === "boolean") {
|
|
831
|
+
result.isContinued = value.isContinued;
|
|
832
|
+
}
|
|
833
|
+
return {
|
|
834
|
+
type: "finish_step",
|
|
835
|
+
value: result
|
|
836
|
+
};
|
|
837
|
+
}
|
|
838
|
+
};
|
|
839
|
+
var startStepStreamPart = {
|
|
840
|
+
code: "f",
|
|
841
|
+
name: "start_step",
|
|
842
|
+
parse: (value) => {
|
|
843
|
+
if (value == null || typeof value !== "object" || !("messageId" in value) || typeof value.messageId !== "string") {
|
|
844
|
+
throw new Error(
|
|
845
|
+
'"start_step" parts expect an object with an "id" property.'
|
|
846
|
+
);
|
|
847
|
+
}
|
|
848
|
+
return {
|
|
849
|
+
type: "start_step",
|
|
850
|
+
value: {
|
|
851
|
+
messageId: value.messageId
|
|
852
|
+
}
|
|
853
|
+
};
|
|
854
|
+
}
|
|
855
|
+
};
|
|
856
|
+
var reasoningStreamPart = {
|
|
857
|
+
code: "g",
|
|
858
|
+
name: "reasoning",
|
|
859
|
+
parse: (value) => {
|
|
860
|
+
if (typeof value !== "string") {
|
|
861
|
+
throw new Error('"reasoning" parts expect a string value.');
|
|
862
|
+
}
|
|
863
|
+
return { type: "reasoning", value };
|
|
864
|
+
}
|
|
865
|
+
};
|
|
866
|
+
var sourcePart = {
|
|
867
|
+
code: "h",
|
|
868
|
+
name: "source",
|
|
869
|
+
parse: (value) => {
|
|
870
|
+
if (value == null || typeof value !== "object") {
|
|
871
|
+
throw new Error('"source" parts expect a Source object.');
|
|
872
|
+
}
|
|
873
|
+
return {
|
|
874
|
+
type: "source",
|
|
875
|
+
value
|
|
876
|
+
};
|
|
877
|
+
}
|
|
878
|
+
};
|
|
879
|
+
var redactedReasoningStreamPart = {
|
|
880
|
+
code: "i",
|
|
881
|
+
name: "redacted_reasoning",
|
|
882
|
+
parse: (value) => {
|
|
883
|
+
if (value == null || typeof value !== "object" || !("data" in value) || typeof value.data !== "string") {
|
|
884
|
+
throw new Error(
|
|
885
|
+
'"redacted_reasoning" parts expect an object with a "data" property.'
|
|
886
|
+
);
|
|
887
|
+
}
|
|
888
|
+
return { type: "redacted_reasoning", value: { data: value.data } };
|
|
889
|
+
}
|
|
890
|
+
};
|
|
891
|
+
var reasoningSignatureStreamPart = {
|
|
892
|
+
code: "j",
|
|
893
|
+
name: "reasoning_signature",
|
|
894
|
+
parse: (value) => {
|
|
895
|
+
if (value == null || typeof value !== "object" || !("signature" in value) || typeof value.signature !== "string") {
|
|
896
|
+
throw new Error(
|
|
897
|
+
'"reasoning_signature" parts expect an object with a "signature" property.'
|
|
898
|
+
);
|
|
899
|
+
}
|
|
900
|
+
return {
|
|
901
|
+
type: "reasoning_signature",
|
|
902
|
+
value: { signature: value.signature }
|
|
903
|
+
};
|
|
904
|
+
}
|
|
905
|
+
};
|
|
906
|
+
var fileStreamPart = {
|
|
907
|
+
code: "k",
|
|
908
|
+
name: "file",
|
|
909
|
+
parse: (value) => {
|
|
910
|
+
if (value == null || typeof value !== "object" || !("data" in value) || typeof value.data !== "string" || !("mimeType" in value) || typeof value.mimeType !== "string") {
|
|
911
|
+
throw new Error(
|
|
912
|
+
'"file" parts expect an object with a "data" and "mimeType" property.'
|
|
913
|
+
);
|
|
914
|
+
}
|
|
915
|
+
return { type: "file", value };
|
|
916
|
+
}
|
|
917
|
+
};
|
|
918
|
+
var dataStreamParts = [
|
|
919
|
+
textStreamPart2,
|
|
920
|
+
dataStreamPart,
|
|
921
|
+
errorStreamPart2,
|
|
922
|
+
messageAnnotationsStreamPart,
|
|
923
|
+
toolCallStreamPart,
|
|
924
|
+
toolResultStreamPart,
|
|
925
|
+
toolCallStreamingStartStreamPart,
|
|
926
|
+
toolCallDeltaStreamPart,
|
|
927
|
+
finishMessageStreamPart,
|
|
928
|
+
finishStepStreamPart,
|
|
929
|
+
startStepStreamPart,
|
|
930
|
+
reasoningStreamPart,
|
|
931
|
+
sourcePart,
|
|
932
|
+
redactedReasoningStreamPart,
|
|
933
|
+
reasoningSignatureStreamPart,
|
|
934
|
+
fileStreamPart
|
|
935
|
+
];
|
|
936
|
+
var dataStreamPartsByCode = Object.fromEntries(
|
|
937
|
+
dataStreamParts.map((part) => [part.code, part])
|
|
938
|
+
);
|
|
939
|
+
Object.fromEntries(
|
|
940
|
+
dataStreamParts.map((part) => [part.name, part.code])
|
|
941
|
+
);
|
|
942
|
+
var validCodes2 = dataStreamParts.map((part) => part.code);
|
|
943
|
+
var parseDataStreamPart = (line) => {
|
|
944
|
+
const firstSeparatorIndex = line.indexOf(":");
|
|
945
|
+
if (firstSeparatorIndex === -1) {
|
|
946
|
+
throw new Error("Failed to parse stream string. No separator found.");
|
|
947
|
+
}
|
|
948
|
+
const prefix = line.slice(0, firstSeparatorIndex);
|
|
949
|
+
if (!validCodes2.includes(prefix)) {
|
|
950
|
+
throw new Error(`Failed to parse stream string. Invalid code ${prefix}.`);
|
|
951
|
+
}
|
|
952
|
+
const code = prefix;
|
|
953
|
+
const textValue = line.slice(firstSeparatorIndex + 1);
|
|
954
|
+
const jsonValue = JSON.parse(textValue);
|
|
955
|
+
return dataStreamPartsByCode[code].parse(jsonValue);
|
|
956
|
+
};
|
|
957
|
+
var NEWLINE = "\n".charCodeAt(0);
|
|
958
|
+
function concatChunks(chunks, totalLength) {
|
|
959
|
+
const concatenatedChunks = new Uint8Array(totalLength);
|
|
960
|
+
let offset = 0;
|
|
961
|
+
for (const chunk of chunks) {
|
|
962
|
+
concatenatedChunks.set(chunk, offset);
|
|
963
|
+
offset += chunk.length;
|
|
964
|
+
}
|
|
965
|
+
chunks.length = 0;
|
|
966
|
+
return concatenatedChunks;
|
|
967
|
+
}
|
|
968
|
+
async function processDataStream({
|
|
969
|
+
stream,
|
|
970
|
+
onTextPart,
|
|
971
|
+
onReasoningPart,
|
|
972
|
+
onReasoningSignaturePart,
|
|
973
|
+
onRedactedReasoningPart,
|
|
974
|
+
onSourcePart,
|
|
975
|
+
onFilePart,
|
|
976
|
+
onDataPart,
|
|
977
|
+
onErrorPart,
|
|
978
|
+
onToolCallStreamingStartPart,
|
|
979
|
+
onToolCallDeltaPart,
|
|
980
|
+
onToolCallPart,
|
|
981
|
+
onToolResultPart,
|
|
982
|
+
onMessageAnnotationsPart,
|
|
983
|
+
onFinishMessagePart,
|
|
984
|
+
onFinishStepPart,
|
|
985
|
+
onStartStepPart
|
|
986
|
+
}) {
|
|
987
|
+
const reader = stream.getReader();
|
|
988
|
+
const decoder = new TextDecoder();
|
|
989
|
+
const chunks = [];
|
|
990
|
+
let totalLength = 0;
|
|
991
|
+
while (true) {
|
|
992
|
+
const { value } = await reader.read();
|
|
993
|
+
if (value) {
|
|
994
|
+
chunks.push(value);
|
|
995
|
+
totalLength += value.length;
|
|
996
|
+
if (value[value.length - 1] !== NEWLINE) {
|
|
997
|
+
continue;
|
|
998
|
+
}
|
|
999
|
+
}
|
|
1000
|
+
if (chunks.length === 0) {
|
|
1001
|
+
break;
|
|
1002
|
+
}
|
|
1003
|
+
const concatenatedChunks = concatChunks(chunks, totalLength);
|
|
1004
|
+
totalLength = 0;
|
|
1005
|
+
const streamParts = decoder.decode(concatenatedChunks, { stream: true }).split("\n").filter((line) => line !== "").map(parseDataStreamPart);
|
|
1006
|
+
for (const { type, value: value2 } of streamParts) {
|
|
1007
|
+
switch (type) {
|
|
1008
|
+
case "text":
|
|
1009
|
+
await (onTextPart == null ? void 0 : onTextPart(value2));
|
|
1010
|
+
break;
|
|
1011
|
+
case "reasoning":
|
|
1012
|
+
await (onReasoningPart == null ? void 0 : onReasoningPart(value2));
|
|
1013
|
+
break;
|
|
1014
|
+
case "reasoning_signature":
|
|
1015
|
+
await (onReasoningSignaturePart == null ? void 0 : onReasoningSignaturePart(value2));
|
|
1016
|
+
break;
|
|
1017
|
+
case "redacted_reasoning":
|
|
1018
|
+
await (onRedactedReasoningPart == null ? void 0 : onRedactedReasoningPart(value2));
|
|
1019
|
+
break;
|
|
1020
|
+
case "file":
|
|
1021
|
+
await (onFilePart == null ? void 0 : onFilePart(value2));
|
|
1022
|
+
break;
|
|
1023
|
+
case "source":
|
|
1024
|
+
await (onSourcePart == null ? void 0 : onSourcePart(value2));
|
|
1025
|
+
break;
|
|
1026
|
+
case "data":
|
|
1027
|
+
await (onDataPart == null ? void 0 : onDataPart(value2));
|
|
1028
|
+
break;
|
|
1029
|
+
case "error":
|
|
1030
|
+
await (onErrorPart == null ? void 0 : onErrorPart(value2));
|
|
1031
|
+
break;
|
|
1032
|
+
case "message_annotations":
|
|
1033
|
+
await (onMessageAnnotationsPart == null ? void 0 : onMessageAnnotationsPart(value2));
|
|
1034
|
+
break;
|
|
1035
|
+
case "tool_call_streaming_start":
|
|
1036
|
+
await (onToolCallStreamingStartPart == null ? void 0 : onToolCallStreamingStartPart(value2));
|
|
1037
|
+
break;
|
|
1038
|
+
case "tool_call_delta":
|
|
1039
|
+
await (onToolCallDeltaPart == null ? void 0 : onToolCallDeltaPart(value2));
|
|
1040
|
+
break;
|
|
1041
|
+
case "tool_call":
|
|
1042
|
+
await (onToolCallPart == null ? void 0 : onToolCallPart(value2));
|
|
1043
|
+
break;
|
|
1044
|
+
case "tool_result":
|
|
1045
|
+
await (onToolResultPart == null ? void 0 : onToolResultPart(value2));
|
|
1046
|
+
break;
|
|
1047
|
+
case "finish_message":
|
|
1048
|
+
await (onFinishMessagePart == null ? void 0 : onFinishMessagePart(value2));
|
|
1049
|
+
break;
|
|
1050
|
+
case "finish_step":
|
|
1051
|
+
await (onFinishStepPart == null ? void 0 : onFinishStepPart(value2));
|
|
1052
|
+
break;
|
|
1053
|
+
case "start_step":
|
|
1054
|
+
await (onStartStepPart == null ? void 0 : onStartStepPart(value2));
|
|
1055
|
+
break;
|
|
1056
|
+
default: {
|
|
1057
|
+
const exhaustiveCheck = type;
|
|
1058
|
+
throw new Error(`Unknown stream part type: ${exhaustiveCheck}`);
|
|
1059
|
+
}
|
|
1060
|
+
}
|
|
1061
|
+
}
|
|
1062
|
+
}
|
|
1063
|
+
}
|
|
9
1064
|
function parseClientRequestContext(requestContext) {
|
|
10
1065
|
if (requestContext) {
|
|
11
1066
|
if (requestContext instanceof RequestContext) {
|
|
@@ -32,11 +1087,11 @@ function requestContextQueryString(requestContext, delimiter = "?") {
|
|
|
32
1087
|
function isZodType(value) {
|
|
33
1088
|
return typeof value === "object" && value !== null && "_def" in value && "parse" in value && typeof value.parse === "function" && "safeParse" in value && typeof value.safeParse === "function";
|
|
34
1089
|
}
|
|
35
|
-
function
|
|
1090
|
+
function zodToJsonSchema2(zodSchema) {
|
|
36
1091
|
if (!isZodType(zodSchema)) {
|
|
37
1092
|
return zodSchema;
|
|
38
1093
|
}
|
|
39
|
-
return zodToJsonSchema
|
|
1094
|
+
return zodToJsonSchema(zodSchema);
|
|
40
1095
|
}
|
|
41
1096
|
|
|
42
1097
|
// src/utils/process-client-tools.ts
|
|
@@ -51,7 +1106,7 @@ function processClientTools(clientTools) {
|
|
|
51
1106
|
key,
|
|
52
1107
|
{
|
|
53
1108
|
...value,
|
|
54
|
-
parameters: value.parameters ?
|
|
1109
|
+
parameters: value.parameters ? zodToJsonSchema2(value.parameters) : void 0
|
|
55
1110
|
}
|
|
56
1111
|
];
|
|
57
1112
|
} else {
|
|
@@ -59,8 +1114,8 @@ function processClientTools(clientTools) {
|
|
|
59
1114
|
key,
|
|
60
1115
|
{
|
|
61
1116
|
...value,
|
|
62
|
-
inputSchema: value.inputSchema ?
|
|
63
|
-
outputSchema: value.outputSchema ?
|
|
1117
|
+
inputSchema: value.inputSchema ? zodToJsonSchema2(value.inputSchema) : void 0,
|
|
1118
|
+
outputSchema: value.outputSchema ? zodToJsonSchema2(value.outputSchema) : void 0
|
|
64
1119
|
}
|
|
65
1120
|
];
|
|
66
1121
|
}
|
|
@@ -330,8 +1385,8 @@ var Agent = class extends BaseResource {
|
|
|
330
1385
|
async generateLegacy(params) {
|
|
331
1386
|
const processedParams = {
|
|
332
1387
|
...params,
|
|
333
|
-
output: params.output ?
|
|
334
|
-
experimental_output: params.experimental_output ?
|
|
1388
|
+
output: params.output ? zodToJsonSchema2(params.output) : void 0,
|
|
1389
|
+
experimental_output: params.experimental_output ? zodToJsonSchema2(params.experimental_output) : void 0,
|
|
335
1390
|
requestContext: parseClientRequestContext(params.requestContext),
|
|
336
1391
|
clientTools: processClientTools(params.clientTools)
|
|
337
1392
|
};
|
|
@@ -402,7 +1457,7 @@ var Agent = class extends BaseResource {
|
|
|
402
1457
|
clientTools: processClientTools(params.clientTools),
|
|
403
1458
|
structuredOutput: params.structuredOutput ? {
|
|
404
1459
|
...params.structuredOutput,
|
|
405
|
-
schema:
|
|
1460
|
+
schema: zodToJsonSchema2(params.structuredOutput.schema)
|
|
406
1461
|
} : void 0
|
|
407
1462
|
};
|
|
408
1463
|
const { resourceId, threadId, requestContext } = processedParams;
|
|
@@ -695,8 +1750,8 @@ var Agent = class extends BaseResource {
|
|
|
695
1750
|
async streamLegacy(params) {
|
|
696
1751
|
const processedParams = {
|
|
697
1752
|
...params,
|
|
698
|
-
output: params.output ?
|
|
699
|
-
experimental_output: params.experimental_output ?
|
|
1753
|
+
output: params.output ? zodToJsonSchema2(params.output) : void 0,
|
|
1754
|
+
experimental_output: params.experimental_output ? zodToJsonSchema2(params.experimental_output) : void 0,
|
|
700
1755
|
requestContext: parseClientRequestContext(params.requestContext),
|
|
701
1756
|
clientTools: processClientTools(params.clientTools)
|
|
702
1757
|
};
|
|
@@ -1146,7 +2201,7 @@ var Agent = class extends BaseResource {
|
|
|
1146
2201
|
clientTools: processClientTools(params.clientTools),
|
|
1147
2202
|
structuredOutput: params.structuredOutput ? {
|
|
1148
2203
|
...params.structuredOutput,
|
|
1149
|
-
schema:
|
|
2204
|
+
schema: zodToJsonSchema2(params.structuredOutput.schema)
|
|
1150
2205
|
} : void 0
|
|
1151
2206
|
};
|
|
1152
2207
|
let readableController;
|
|
@@ -1584,186 +2639,68 @@ var Tool = class extends BaseResource {
|
|
|
1584
2639
|
});
|
|
1585
2640
|
}
|
|
1586
2641
|
};
|
|
1587
|
-
|
|
1588
|
-
|
|
2642
|
+
function deserializeWorkflowError(result) {
|
|
2643
|
+
if (result.status === "failed" && result.error) {
|
|
2644
|
+
result.error = getErrorFromUnknown(result.error, {
|
|
2645
|
+
fallbackMessage: "Unknown workflow error",
|
|
2646
|
+
supportSerialization: false
|
|
2647
|
+
});
|
|
2648
|
+
}
|
|
2649
|
+
return result;
|
|
2650
|
+
}
|
|
1589
2651
|
var RECORD_SEPARATOR = "";
|
|
1590
|
-
var
|
|
1591
|
-
constructor(options, workflowId) {
|
|
2652
|
+
var Run = class extends BaseResource {
|
|
2653
|
+
constructor(options, workflowId, runId) {
|
|
1592
2654
|
super(options);
|
|
1593
2655
|
this.workflowId = workflowId;
|
|
2656
|
+
this.runId = runId;
|
|
1594
2657
|
}
|
|
1595
2658
|
/**
|
|
1596
|
-
*
|
|
1597
|
-
* @param requestContext - Optional request context to pass as query parameter
|
|
1598
|
-
* @returns Promise containing workflow details including steps and graphs
|
|
1599
|
-
*/
|
|
1600
|
-
details(requestContext) {
|
|
1601
|
-
return this.request(`/api/workflows/${this.workflowId}${requestContextQueryString(requestContext)}`);
|
|
1602
|
-
}
|
|
1603
|
-
/**
|
|
1604
|
-
* Retrieves all runs for a workflow
|
|
1605
|
-
* @param params - Parameters for filtering runs
|
|
1606
|
-
* @param requestContext - Optional request context to pass as query parameter
|
|
1607
|
-
* @returns Promise containing workflow runs array
|
|
2659
|
+
* Creates a transform stream that parses RECORD_SEPARATOR-delimited JSON chunks
|
|
1608
2660
|
*/
|
|
1609
|
-
|
|
1610
|
-
|
|
1611
|
-
|
|
1612
|
-
|
|
1613
|
-
|
|
1614
|
-
|
|
1615
|
-
|
|
1616
|
-
|
|
1617
|
-
|
|
1618
|
-
|
|
1619
|
-
|
|
1620
|
-
|
|
1621
|
-
|
|
1622
|
-
|
|
1623
|
-
|
|
1624
|
-
|
|
1625
|
-
|
|
1626
|
-
|
|
1627
|
-
|
|
1628
|
-
|
|
2661
|
+
createChunkTransformStream() {
|
|
2662
|
+
let failedChunk = void 0;
|
|
2663
|
+
return new TransformStream({
|
|
2664
|
+
start() {
|
|
2665
|
+
},
|
|
2666
|
+
async transform(chunk, controller) {
|
|
2667
|
+
try {
|
|
2668
|
+
const decoded = new TextDecoder().decode(chunk);
|
|
2669
|
+
const chunks = decoded.split(RECORD_SEPARATOR);
|
|
2670
|
+
for (const chunk2 of chunks) {
|
|
2671
|
+
if (chunk2) {
|
|
2672
|
+
const newChunk = failedChunk ? failedChunk + chunk2 : chunk2;
|
|
2673
|
+
try {
|
|
2674
|
+
const parsedChunk = JSON.parse(newChunk);
|
|
2675
|
+
controller.enqueue(parsedChunk);
|
|
2676
|
+
failedChunk = void 0;
|
|
2677
|
+
} catch {
|
|
2678
|
+
failedChunk = newChunk;
|
|
2679
|
+
}
|
|
2680
|
+
}
|
|
2681
|
+
}
|
|
2682
|
+
} catch {
|
|
2683
|
+
}
|
|
1629
2684
|
}
|
|
1630
|
-
}
|
|
1631
|
-
if (params?.offset !== null && params?.offset !== void 0 && !isNaN(Number(params?.offset))) {
|
|
1632
|
-
searchParams.set("offset", String(params.offset));
|
|
1633
|
-
}
|
|
1634
|
-
if (params?.resourceId) {
|
|
1635
|
-
searchParams.set("resourceId", params.resourceId);
|
|
1636
|
-
}
|
|
1637
|
-
if (requestContextParam) {
|
|
1638
|
-
searchParams.set("requestContext", requestContextParam);
|
|
1639
|
-
}
|
|
1640
|
-
if (searchParams.size) {
|
|
1641
|
-
return this.request(`/api/workflows/${this.workflowId}/runs?${searchParams}`);
|
|
1642
|
-
} else {
|
|
1643
|
-
return this.request(`/api/workflows/${this.workflowId}/runs`);
|
|
1644
|
-
}
|
|
1645
|
-
}
|
|
1646
|
-
/**
|
|
1647
|
-
* Retrieves a specific workflow run by its ID
|
|
1648
|
-
* @param runId - The ID of the workflow run to retrieve
|
|
1649
|
-
* @param requestContext - Optional request context to pass as query parameter
|
|
1650
|
-
* @returns Promise containing the workflow run details
|
|
1651
|
-
*/
|
|
1652
|
-
runById(runId, requestContext) {
|
|
1653
|
-
return this.request(`/api/workflows/${this.workflowId}/runs/${runId}${requestContextQueryString(requestContext)}`);
|
|
1654
|
-
}
|
|
1655
|
-
/**
|
|
1656
|
-
* Deletes a specific workflow run by its ID
|
|
1657
|
-
* @param runId - The ID of the workflow run to delete
|
|
1658
|
-
* @returns Promise containing a success message
|
|
1659
|
-
*/
|
|
1660
|
-
deleteRunById(runId) {
|
|
1661
|
-
return this.request(`/api/workflows/${this.workflowId}/runs/${runId}`, {
|
|
1662
|
-
method: "DELETE"
|
|
1663
2685
|
});
|
|
1664
2686
|
}
|
|
1665
|
-
/**
|
|
1666
|
-
* Retrieves the execution result for a specific workflow run by its ID
|
|
1667
|
-
* @param runId - The ID of the workflow run to retrieve the execution result for
|
|
1668
|
-
* @param requestContext - Optional request context to pass as query parameter
|
|
1669
|
-
* @returns Promise containing the workflow run execution result
|
|
1670
|
-
*/
|
|
1671
|
-
runExecutionResult(runId, requestContext) {
|
|
1672
|
-
return this.request(
|
|
1673
|
-
`/api/workflows/${this.workflowId}/runs/${runId}/execution-result${requestContextQueryString(requestContext)}`
|
|
1674
|
-
);
|
|
1675
|
-
}
|
|
1676
2687
|
/**
|
|
1677
2688
|
* Cancels a specific workflow run by its ID
|
|
1678
|
-
* @param runId - The ID of the workflow run to cancel
|
|
1679
2689
|
* @returns Promise containing a success message
|
|
1680
2690
|
*/
|
|
1681
|
-
cancelRun(
|
|
1682
|
-
return this.request(`/api/workflows/${this.workflowId}/runs/${runId}/cancel`, {
|
|
2691
|
+
cancelRun() {
|
|
2692
|
+
return this.request(`/api/workflows/${this.workflowId}/runs/${this.runId}/cancel`, {
|
|
1683
2693
|
method: "POST"
|
|
1684
2694
|
});
|
|
1685
2695
|
}
|
|
1686
|
-
/**
|
|
1687
|
-
* Creates a new workflow run
|
|
1688
|
-
* @param params - Optional object containing the optional runId
|
|
1689
|
-
* @returns Promise containing the runId of the created run with methods to control execution
|
|
1690
|
-
*/
|
|
1691
|
-
async createRun(params) {
|
|
1692
|
-
const searchParams = new URLSearchParams();
|
|
1693
|
-
if (!!params?.runId) {
|
|
1694
|
-
searchParams.set("runId", params.runId);
|
|
1695
|
-
}
|
|
1696
|
-
const res = await this.request(
|
|
1697
|
-
`/api/workflows/${this.workflowId}/create-run?${searchParams.toString()}`,
|
|
1698
|
-
{
|
|
1699
|
-
method: "POST"
|
|
1700
|
-
}
|
|
1701
|
-
);
|
|
1702
|
-
const runId = res.runId;
|
|
1703
|
-
return {
|
|
1704
|
-
runId,
|
|
1705
|
-
start: async (p) => {
|
|
1706
|
-
return this.start({
|
|
1707
|
-
runId,
|
|
1708
|
-
inputData: p.inputData,
|
|
1709
|
-
initialState: p.initialState,
|
|
1710
|
-
requestContext: p.requestContext,
|
|
1711
|
-
tracingOptions: p.tracingOptions
|
|
1712
|
-
});
|
|
1713
|
-
},
|
|
1714
|
-
startAsync: async (p) => {
|
|
1715
|
-
return this.startAsync({
|
|
1716
|
-
runId,
|
|
1717
|
-
inputData: p.inputData,
|
|
1718
|
-
initialState: p.initialState,
|
|
1719
|
-
requestContext: p.requestContext,
|
|
1720
|
-
tracingOptions: p.tracingOptions
|
|
1721
|
-
});
|
|
1722
|
-
},
|
|
1723
|
-
stream: async (p) => {
|
|
1724
|
-
return this.stream({
|
|
1725
|
-
runId,
|
|
1726
|
-
inputData: p.inputData,
|
|
1727
|
-
initialState: p.initialState,
|
|
1728
|
-
requestContext: p.requestContext
|
|
1729
|
-
});
|
|
1730
|
-
},
|
|
1731
|
-
resume: async (p) => {
|
|
1732
|
-
return this.resume({
|
|
1733
|
-
runId,
|
|
1734
|
-
step: p.step,
|
|
1735
|
-
resumeData: p.resumeData,
|
|
1736
|
-
requestContext: p.requestContext,
|
|
1737
|
-
tracingOptions: p.tracingOptions
|
|
1738
|
-
});
|
|
1739
|
-
},
|
|
1740
|
-
resumeAsync: async (p) => {
|
|
1741
|
-
return this.resumeAsync({
|
|
1742
|
-
runId,
|
|
1743
|
-
step: p.step,
|
|
1744
|
-
resumeData: p.resumeData,
|
|
1745
|
-
requestContext: p.requestContext,
|
|
1746
|
-
tracingOptions: p.tracingOptions
|
|
1747
|
-
});
|
|
1748
|
-
},
|
|
1749
|
-
resumeStreamVNext: async (p) => {
|
|
1750
|
-
return this.resumeStreamVNext({
|
|
1751
|
-
runId,
|
|
1752
|
-
step: p.step,
|
|
1753
|
-
resumeData: p.resumeData,
|
|
1754
|
-
requestContext: p.requestContext
|
|
1755
|
-
});
|
|
1756
|
-
}
|
|
1757
|
-
};
|
|
1758
|
-
}
|
|
1759
2696
|
/**
|
|
1760
2697
|
* Starts a workflow run synchronously without waiting for the workflow to complete
|
|
1761
|
-
* @param params - Object containing the
|
|
2698
|
+
* @param params - Object containing the inputData, initialState and requestContext
|
|
1762
2699
|
* @returns Promise containing success message
|
|
1763
2700
|
*/
|
|
1764
2701
|
start(params) {
|
|
1765
2702
|
const requestContext = parseClientRequestContext(params.requestContext);
|
|
1766
|
-
return this.request(`/api/workflows/${this.workflowId}/start?runId=${
|
|
2703
|
+
return this.request(`/api/workflows/${this.workflowId}/start?runId=${this.runId}`, {
|
|
1767
2704
|
method: "POST",
|
|
1768
2705
|
body: {
|
|
1769
2706
|
inputData: params?.inputData,
|
|
@@ -1775,18 +2712,17 @@ var Workflow = class extends BaseResource {
|
|
|
1775
2712
|
}
|
|
1776
2713
|
/**
|
|
1777
2714
|
* Resumes a suspended workflow step synchronously without waiting for the workflow to complete
|
|
1778
|
-
* @param params - Object containing the
|
|
2715
|
+
* @param params - Object containing the step, resumeData and requestContext
|
|
1779
2716
|
* @returns Promise containing success message
|
|
1780
2717
|
*/
|
|
1781
2718
|
resume({
|
|
1782
2719
|
step,
|
|
1783
|
-
runId,
|
|
1784
2720
|
resumeData,
|
|
1785
2721
|
tracingOptions,
|
|
1786
2722
|
...rest
|
|
1787
2723
|
}) {
|
|
1788
2724
|
const requestContext = parseClientRequestContext(rest.requestContext);
|
|
1789
|
-
return this.request(`/api/workflows/${this.workflowId}/resume?runId=${runId}`, {
|
|
2725
|
+
return this.request(`/api/workflows/${this.workflowId}/resume?runId=${this.runId}`, {
|
|
1790
2726
|
method: "POST",
|
|
1791
2727
|
body: {
|
|
1792
2728
|
step,
|
|
@@ -1798,14 +2734,12 @@ var Workflow = class extends BaseResource {
|
|
|
1798
2734
|
}
|
|
1799
2735
|
/**
|
|
1800
2736
|
* Starts a workflow run asynchronously and returns a promise that resolves when the workflow is complete
|
|
1801
|
-
* @param params - Object containing the
|
|
2737
|
+
* @param params - Object containing the inputData, initialState and requestContext
|
|
1802
2738
|
* @returns Promise containing the workflow execution results
|
|
1803
2739
|
*/
|
|
1804
2740
|
startAsync(params) {
|
|
1805
2741
|
const searchParams = new URLSearchParams();
|
|
1806
|
-
|
|
1807
|
-
searchParams.set("runId", params.runId);
|
|
1808
|
-
}
|
|
2742
|
+
searchParams.set("runId", this.runId);
|
|
1809
2743
|
const requestContext = parseClientRequestContext(params.requestContext);
|
|
1810
2744
|
return this.request(`/api/workflows/${this.workflowId}/start-async?${searchParams.toString()}`, {
|
|
1811
2745
|
method: "POST",
|
|
@@ -1813,20 +2747,19 @@ var Workflow = class extends BaseResource {
|
|
|
1813
2747
|
inputData: params.inputData,
|
|
1814
2748
|
initialState: params.initialState,
|
|
1815
2749
|
requestContext,
|
|
1816
|
-
tracingOptions: params.tracingOptions
|
|
2750
|
+
tracingOptions: params.tracingOptions,
|
|
2751
|
+
resourceId: params.resourceId
|
|
1817
2752
|
}
|
|
1818
|
-
});
|
|
2753
|
+
}).then(deserializeWorkflowError);
|
|
1819
2754
|
}
|
|
1820
2755
|
/**
|
|
1821
2756
|
* Starts a workflow run and returns a stream
|
|
1822
|
-
* @param params - Object containing the
|
|
2757
|
+
* @param params - Object containing the inputData, initialState and requestContext
|
|
1823
2758
|
* @returns Promise containing the workflow execution results
|
|
1824
2759
|
*/
|
|
1825
2760
|
async stream(params) {
|
|
1826
2761
|
const searchParams = new URLSearchParams();
|
|
1827
|
-
|
|
1828
|
-
searchParams.set("runId", params.runId);
|
|
1829
|
-
}
|
|
2762
|
+
searchParams.set("runId", this.runId);
|
|
1830
2763
|
const requestContext = parseClientRequestContext(params.requestContext);
|
|
1831
2764
|
const response = await this.request(
|
|
1832
2765
|
`/api/workflows/${this.workflowId}/stream?${searchParams.toString()}`,
|
|
@@ -1836,7 +2769,8 @@ var Workflow = class extends BaseResource {
|
|
|
1836
2769
|
inputData: params.inputData,
|
|
1837
2770
|
initialState: params.initialState,
|
|
1838
2771
|
requestContext,
|
|
1839
|
-
tracingOptions: params.tracingOptions
|
|
2772
|
+
tracingOptions: params.tracingOptions,
|
|
2773
|
+
resourceId: params.resourceId
|
|
1840
2774
|
},
|
|
1841
2775
|
stream: true
|
|
1842
2776
|
}
|
|
@@ -1847,40 +2781,15 @@ var Workflow = class extends BaseResource {
|
|
|
1847
2781
|
if (!response.body) {
|
|
1848
2782
|
throw new Error("Response body is null");
|
|
1849
2783
|
}
|
|
1850
|
-
|
|
1851
|
-
const transformStream = new TransformStream({
|
|
1852
|
-
start() {
|
|
1853
|
-
},
|
|
1854
|
-
async transform(chunk, controller) {
|
|
1855
|
-
try {
|
|
1856
|
-
const decoded = new TextDecoder().decode(chunk);
|
|
1857
|
-
const chunks = decoded.split(RECORD_SEPARATOR);
|
|
1858
|
-
for (const chunk2 of chunks) {
|
|
1859
|
-
if (chunk2) {
|
|
1860
|
-
const newChunk = failedChunk ? failedChunk + chunk2 : chunk2;
|
|
1861
|
-
try {
|
|
1862
|
-
const parsedChunk = JSON.parse(newChunk);
|
|
1863
|
-
controller.enqueue(parsedChunk);
|
|
1864
|
-
failedChunk = void 0;
|
|
1865
|
-
} catch {
|
|
1866
|
-
failedChunk = newChunk;
|
|
1867
|
-
}
|
|
1868
|
-
}
|
|
1869
|
-
}
|
|
1870
|
-
} catch {
|
|
1871
|
-
}
|
|
1872
|
-
}
|
|
1873
|
-
});
|
|
1874
|
-
return response.body.pipeThrough(transformStream);
|
|
2784
|
+
return response.body.pipeThrough(this.createChunkTransformStream());
|
|
1875
2785
|
}
|
|
1876
2786
|
/**
|
|
1877
2787
|
* Observes workflow stream for a workflow run
|
|
1878
|
-
* @param params - Object containing the runId
|
|
1879
2788
|
* @returns Promise containing the workflow execution results
|
|
1880
2789
|
*/
|
|
1881
|
-
async observeStream(
|
|
2790
|
+
async observeStream() {
|
|
1882
2791
|
const searchParams = new URLSearchParams();
|
|
1883
|
-
searchParams.set("runId",
|
|
2792
|
+
searchParams.set("runId", this.runId);
|
|
1884
2793
|
const response = await this.request(
|
|
1885
2794
|
`/api/workflows/${this.workflowId}/observe-stream?${searchParams.toString()}`,
|
|
1886
2795
|
{
|
|
@@ -1894,97 +2803,47 @@ var Workflow = class extends BaseResource {
|
|
|
1894
2803
|
if (!response.body) {
|
|
1895
2804
|
throw new Error("Response body is null");
|
|
1896
2805
|
}
|
|
1897
|
-
|
|
1898
|
-
const transformStream = new TransformStream({
|
|
1899
|
-
start() {
|
|
1900
|
-
},
|
|
1901
|
-
async transform(chunk, controller) {
|
|
1902
|
-
try {
|
|
1903
|
-
const decoded = new TextDecoder().decode(chunk);
|
|
1904
|
-
const chunks = decoded.split(RECORD_SEPARATOR);
|
|
1905
|
-
for (const chunk2 of chunks) {
|
|
1906
|
-
if (chunk2) {
|
|
1907
|
-
const newChunk = failedChunk ? failedChunk + chunk2 : chunk2;
|
|
1908
|
-
try {
|
|
1909
|
-
const parsedChunk = JSON.parse(newChunk);
|
|
1910
|
-
controller.enqueue(parsedChunk);
|
|
1911
|
-
failedChunk = void 0;
|
|
1912
|
-
} catch {
|
|
1913
|
-
failedChunk = newChunk;
|
|
1914
|
-
}
|
|
1915
|
-
}
|
|
1916
|
-
}
|
|
1917
|
-
} catch {
|
|
1918
|
-
}
|
|
1919
|
-
}
|
|
1920
|
-
});
|
|
1921
|
-
return response.body.pipeThrough(transformStream);
|
|
2806
|
+
return response.body.pipeThrough(this.createChunkTransformStream());
|
|
1922
2807
|
}
|
|
1923
2808
|
/**
|
|
1924
2809
|
* Starts a workflow run and returns a stream
|
|
1925
|
-
* @param params - Object containing the
|
|
2810
|
+
* @param params - Object containing the inputData, initialState and requestContext
|
|
1926
2811
|
* @returns Promise containing the workflow execution results
|
|
1927
2812
|
*/
|
|
1928
2813
|
async streamVNext(params) {
|
|
1929
2814
|
const searchParams = new URLSearchParams();
|
|
1930
|
-
|
|
1931
|
-
|
|
1932
|
-
|
|
1933
|
-
|
|
1934
|
-
|
|
1935
|
-
|
|
1936
|
-
|
|
1937
|
-
|
|
1938
|
-
|
|
1939
|
-
|
|
1940
|
-
|
|
1941
|
-
|
|
1942
|
-
|
|
1943
|
-
|
|
1944
|
-
|
|
1945
|
-
stream: true
|
|
1946
|
-
}
|
|
1947
|
-
);
|
|
1948
|
-
if (!response.ok) {
|
|
1949
|
-
throw new Error(`Failed to stream vNext workflow: ${response.statusText}`);
|
|
1950
|
-
}
|
|
1951
|
-
if (!response.body) {
|
|
1952
|
-
throw new Error("Response body is null");
|
|
1953
|
-
}
|
|
1954
|
-
let failedChunk = void 0;
|
|
1955
|
-
const transformStream = new TransformStream({
|
|
1956
|
-
start() {
|
|
1957
|
-
},
|
|
1958
|
-
async transform(chunk, controller) {
|
|
1959
|
-
try {
|
|
1960
|
-
const decoded = new TextDecoder().decode(chunk);
|
|
1961
|
-
const chunks = decoded.split(RECORD_SEPARATOR);
|
|
1962
|
-
for (const chunk2 of chunks) {
|
|
1963
|
-
if (chunk2) {
|
|
1964
|
-
const newChunk = failedChunk ? failedChunk + chunk2 : chunk2;
|
|
1965
|
-
try {
|
|
1966
|
-
const parsedChunk = JSON.parse(newChunk);
|
|
1967
|
-
controller.enqueue(parsedChunk);
|
|
1968
|
-
failedChunk = void 0;
|
|
1969
|
-
} catch {
|
|
1970
|
-
failedChunk = newChunk;
|
|
1971
|
-
}
|
|
1972
|
-
}
|
|
1973
|
-
}
|
|
1974
|
-
} catch {
|
|
1975
|
-
}
|
|
2815
|
+
searchParams.set("runId", this.runId);
|
|
2816
|
+
const requestContext = parseClientRequestContext(params.requestContext);
|
|
2817
|
+
const response = await this.request(
|
|
2818
|
+
`/api/workflows/${this.workflowId}/streamVNext?${searchParams.toString()}`,
|
|
2819
|
+
{
|
|
2820
|
+
method: "POST",
|
|
2821
|
+
body: {
|
|
2822
|
+
inputData: params.inputData,
|
|
2823
|
+
initialState: params.initialState,
|
|
2824
|
+
requestContext,
|
|
2825
|
+
closeOnSuspend: params.closeOnSuspend,
|
|
2826
|
+
tracingOptions: params.tracingOptions,
|
|
2827
|
+
resourceId: params.resourceId
|
|
2828
|
+
},
|
|
2829
|
+
stream: true
|
|
1976
2830
|
}
|
|
1977
|
-
|
|
1978
|
-
|
|
2831
|
+
);
|
|
2832
|
+
if (!response.ok) {
|
|
2833
|
+
throw new Error(`Failed to stream vNext workflow: ${response.statusText}`);
|
|
2834
|
+
}
|
|
2835
|
+
if (!response.body) {
|
|
2836
|
+
throw new Error("Response body is null");
|
|
2837
|
+
}
|
|
2838
|
+
return response.body.pipeThrough(this.createChunkTransformStream());
|
|
1979
2839
|
}
|
|
1980
2840
|
/**
|
|
1981
2841
|
* Observes workflow vNext stream for a workflow run
|
|
1982
|
-
* @param params - Object containing the runId
|
|
1983
2842
|
* @returns Promise containing the workflow execution results
|
|
1984
2843
|
*/
|
|
1985
|
-
async observeStreamVNext(
|
|
2844
|
+
async observeStreamVNext() {
|
|
1986
2845
|
const searchParams = new URLSearchParams();
|
|
1987
|
-
searchParams.set("runId",
|
|
2846
|
+
searchParams.set("runId", this.runId);
|
|
1988
2847
|
const response = await this.request(
|
|
1989
2848
|
`/api/workflows/${this.workflowId}/observe-streamVNext?${searchParams.toString()}`,
|
|
1990
2849
|
{
|
|
@@ -1998,40 +2857,16 @@ var Workflow = class extends BaseResource {
|
|
|
1998
2857
|
if (!response.body) {
|
|
1999
2858
|
throw new Error("Response body is null");
|
|
2000
2859
|
}
|
|
2001
|
-
|
|
2002
|
-
const transformStream = new TransformStream({
|
|
2003
|
-
start() {
|
|
2004
|
-
},
|
|
2005
|
-
async transform(chunk, controller) {
|
|
2006
|
-
try {
|
|
2007
|
-
const decoded = new TextDecoder().decode(chunk);
|
|
2008
|
-
const chunks = decoded.split(RECORD_SEPARATOR);
|
|
2009
|
-
for (const chunk2 of chunks) {
|
|
2010
|
-
if (chunk2) {
|
|
2011
|
-
const newChunk = failedChunk ? failedChunk + chunk2 : chunk2;
|
|
2012
|
-
try {
|
|
2013
|
-
const parsedChunk = JSON.parse(newChunk);
|
|
2014
|
-
controller.enqueue(parsedChunk);
|
|
2015
|
-
failedChunk = void 0;
|
|
2016
|
-
} catch {
|
|
2017
|
-
failedChunk = newChunk;
|
|
2018
|
-
}
|
|
2019
|
-
}
|
|
2020
|
-
}
|
|
2021
|
-
} catch {
|
|
2022
|
-
}
|
|
2023
|
-
}
|
|
2024
|
-
});
|
|
2025
|
-
return response.body.pipeThrough(transformStream);
|
|
2860
|
+
return response.body.pipeThrough(this.createChunkTransformStream());
|
|
2026
2861
|
}
|
|
2027
2862
|
/**
|
|
2028
2863
|
* Resumes a suspended workflow step asynchronously and returns a promise that resolves when the workflow is complete
|
|
2029
|
-
* @param params - Object containing the
|
|
2864
|
+
* @param params - Object containing the step, resumeData and requestContext
|
|
2030
2865
|
* @returns Promise containing the workflow resume results
|
|
2031
2866
|
*/
|
|
2032
2867
|
resumeAsync(params) {
|
|
2033
2868
|
const requestContext = parseClientRequestContext(params.requestContext);
|
|
2034
|
-
return this.request(`/api/workflows/${this.workflowId}/resume-async?runId=${
|
|
2869
|
+
return this.request(`/api/workflows/${this.workflowId}/resume-async?runId=${this.runId}`, {
|
|
2035
2870
|
method: "POST",
|
|
2036
2871
|
body: {
|
|
2037
2872
|
step: params.step,
|
|
@@ -2039,16 +2874,16 @@ var Workflow = class extends BaseResource {
|
|
|
2039
2874
|
requestContext,
|
|
2040
2875
|
tracingOptions: params.tracingOptions
|
|
2041
2876
|
}
|
|
2042
|
-
});
|
|
2877
|
+
}).then(deserializeWorkflowError);
|
|
2043
2878
|
}
|
|
2044
2879
|
/**
|
|
2045
2880
|
* Resumes a suspended workflow step that uses streamVNext asynchronously and returns a promise that resolves when the workflow is complete
|
|
2046
|
-
* @param params - Object containing the
|
|
2881
|
+
* @param params - Object containing the step, resumeData and requestContext
|
|
2047
2882
|
* @returns Promise containing the workflow resume results
|
|
2048
2883
|
*/
|
|
2049
2884
|
async resumeStreamVNext(params) {
|
|
2050
2885
|
const searchParams = new URLSearchParams();
|
|
2051
|
-
searchParams.set("runId",
|
|
2886
|
+
searchParams.set("runId", this.runId);
|
|
2052
2887
|
const requestContext = parseClientRequestContext(params.requestContext);
|
|
2053
2888
|
const response = await this.request(
|
|
2054
2889
|
`/api/workflows/${this.workflowId}/resume-stream?${searchParams.toString()}`,
|
|
@@ -2069,63 +2904,16 @@ var Workflow = class extends BaseResource {
|
|
|
2069
2904
|
if (!response.body) {
|
|
2070
2905
|
throw new Error("Response body is null");
|
|
2071
2906
|
}
|
|
2072
|
-
|
|
2073
|
-
const transformStream = new TransformStream({
|
|
2074
|
-
start() {
|
|
2075
|
-
},
|
|
2076
|
-
async transform(chunk, controller) {
|
|
2077
|
-
try {
|
|
2078
|
-
const decoded = new TextDecoder().decode(chunk);
|
|
2079
|
-
const chunks = decoded.split(RECORD_SEPARATOR);
|
|
2080
|
-
for (const chunk2 of chunks) {
|
|
2081
|
-
if (chunk2) {
|
|
2082
|
-
const newChunk = failedChunk ? failedChunk + chunk2 : chunk2;
|
|
2083
|
-
try {
|
|
2084
|
-
const parsedChunk = JSON.parse(newChunk);
|
|
2085
|
-
controller.enqueue(parsedChunk);
|
|
2086
|
-
failedChunk = void 0;
|
|
2087
|
-
} catch {
|
|
2088
|
-
failedChunk = newChunk;
|
|
2089
|
-
}
|
|
2090
|
-
}
|
|
2091
|
-
}
|
|
2092
|
-
} catch {
|
|
2093
|
-
}
|
|
2094
|
-
}
|
|
2095
|
-
});
|
|
2096
|
-
return response.body.pipeThrough(transformStream);
|
|
2097
|
-
}
|
|
2098
|
-
/**
|
|
2099
|
-
* Creates a new ReadableStream from an iterable or async iterable of objects,
|
|
2100
|
-
* serializing each as JSON and separating them with the record separator (\x1E).
|
|
2101
|
-
*
|
|
2102
|
-
* @param records - An iterable or async iterable of objects to stream
|
|
2103
|
-
* @returns A ReadableStream emitting the records as JSON strings separated by the record separator
|
|
2104
|
-
*/
|
|
2105
|
-
static createRecordStream(records) {
|
|
2106
|
-
const encoder = new TextEncoder();
|
|
2107
|
-
return new ReadableStream({
|
|
2108
|
-
async start(controller) {
|
|
2109
|
-
try {
|
|
2110
|
-
for await (const record of records) {
|
|
2111
|
-
const json = JSON.stringify(record) + RECORD_SEPARATOR;
|
|
2112
|
-
controller.enqueue(encoder.encode(json));
|
|
2113
|
-
}
|
|
2114
|
-
controller.close();
|
|
2115
|
-
} catch (err) {
|
|
2116
|
-
controller.error(err);
|
|
2117
|
-
}
|
|
2118
|
-
}
|
|
2119
|
-
});
|
|
2907
|
+
return response.body.pipeThrough(this.createChunkTransformStream());
|
|
2120
2908
|
}
|
|
2121
2909
|
/**
|
|
2122
2910
|
* Restarts an active workflow run synchronously without waiting for the workflow to complete
|
|
2123
|
-
* @param params - Object containing the
|
|
2911
|
+
* @param params - Object containing the requestContext
|
|
2124
2912
|
* @returns Promise containing success message
|
|
2125
2913
|
*/
|
|
2126
2914
|
restart(params) {
|
|
2127
2915
|
const requestContext = parseClientRequestContext(params.requestContext);
|
|
2128
|
-
return this.request(`/api/workflows/${this.workflowId}/restart?runId=${
|
|
2916
|
+
return this.request(`/api/workflows/${this.workflowId}/restart?runId=${this.runId}`, {
|
|
2129
2917
|
method: "POST",
|
|
2130
2918
|
body: {
|
|
2131
2919
|
requestContext,
|
|
@@ -2135,49 +2923,27 @@ var Workflow = class extends BaseResource {
|
|
|
2135
2923
|
}
|
|
2136
2924
|
/**
|
|
2137
2925
|
* Restarts an active workflow run asynchronously
|
|
2138
|
-
* @param params -
|
|
2926
|
+
* @param params - optional object containing the requestContext
|
|
2139
2927
|
* @returns Promise containing the workflow restart results
|
|
2140
2928
|
*/
|
|
2141
2929
|
restartAsync(params) {
|
|
2142
|
-
const requestContext = parseClientRequestContext(params
|
|
2143
|
-
return this.request(`/api/workflows/${this.workflowId}/restart-async?runId=${
|
|
2930
|
+
const requestContext = parseClientRequestContext(params?.requestContext);
|
|
2931
|
+
return this.request(`/api/workflows/${this.workflowId}/restart-async?runId=${this.runId}`, {
|
|
2144
2932
|
method: "POST",
|
|
2145
2933
|
body: {
|
|
2146
2934
|
requestContext,
|
|
2147
|
-
tracingOptions: params
|
|
2935
|
+
tracingOptions: params?.tracingOptions
|
|
2148
2936
|
}
|
|
2149
|
-
});
|
|
2150
|
-
}
|
|
2151
|
-
/**
|
|
2152
|
-
* Restart all active workflow runs synchronously without waiting for the workflow to complete
|
|
2153
|
-
* @returns Promise containing success message
|
|
2154
|
-
*/
|
|
2155
|
-
restartAllActiveWorkflowRuns() {
|
|
2156
|
-
return this.request(`/api/workflows/${this.workflowId}/restart-all-active-workflow-runs`, {
|
|
2157
|
-
method: "POST"
|
|
2158
|
-
});
|
|
2159
|
-
}
|
|
2160
|
-
/**
|
|
2161
|
-
* Restart all active workflow runs asynchronously
|
|
2162
|
-
* @returns Promise containing success message
|
|
2163
|
-
*/
|
|
2164
|
-
restartAllActiveWorkflowRunsAsync() {
|
|
2165
|
-
return this.request(`/api/workflows/${this.workflowId}/restart-all-active-workflow-runs-async`, {
|
|
2166
|
-
method: "POST"
|
|
2167
|
-
});
|
|
2937
|
+
}).then(deserializeWorkflowError);
|
|
2168
2938
|
}
|
|
2169
2939
|
/**
|
|
2170
2940
|
* Time travels a workflow run synchronously without waiting for the workflow to complete
|
|
2171
|
-
* @param params - Object containing the
|
|
2941
|
+
* @param params - Object containing the step, inputData, resumeData, initialState, context, nestedStepsContext, requestContext and tracingOptions
|
|
2172
2942
|
* @returns Promise containing success message
|
|
2173
2943
|
*/
|
|
2174
|
-
timeTravel({
|
|
2175
|
-
runId,
|
|
2176
|
-
requestContext: paramsRequestContext,
|
|
2177
|
-
...params
|
|
2178
|
-
}) {
|
|
2944
|
+
timeTravel({ requestContext: paramsRequestContext, ...params }) {
|
|
2179
2945
|
const requestContext = parseClientRequestContext(paramsRequestContext);
|
|
2180
|
-
return this.request(`/api/workflows/${this.workflowId}/time-travel?runId=${runId}`, {
|
|
2946
|
+
return this.request(`/api/workflows/${this.workflowId}/time-travel?runId=${this.runId}`, {
|
|
2181
2947
|
method: "POST",
|
|
2182
2948
|
body: {
|
|
2183
2949
|
...params,
|
|
@@ -2187,32 +2953,28 @@ var Workflow = class extends BaseResource {
|
|
|
2187
2953
|
}
|
|
2188
2954
|
/**
|
|
2189
2955
|
* Time travels a workflow run asynchronously
|
|
2190
|
-
* @param params - Object containing the
|
|
2956
|
+
* @param params - Object containing the step, inputData, resumeData, initialState, context, nestedStepsContext, requestContext and tracingOptions
|
|
2191
2957
|
* @returns Promise containing the workflow time travel results
|
|
2192
2958
|
*/
|
|
2193
|
-
timeTravelAsync({
|
|
2194
|
-
runId,
|
|
2195
|
-
requestContext: paramsRequestContext,
|
|
2196
|
-
...params
|
|
2197
|
-
}) {
|
|
2959
|
+
timeTravelAsync({ requestContext: paramsRequestContext, ...params }) {
|
|
2198
2960
|
const requestContext = parseClientRequestContext(paramsRequestContext);
|
|
2199
|
-
return this.request(`/api/workflows/${this.workflowId}/time-travel-async?runId=${runId}`, {
|
|
2961
|
+
return this.request(`/api/workflows/${this.workflowId}/time-travel-async?runId=${this.runId}`, {
|
|
2200
2962
|
method: "POST",
|
|
2201
2963
|
body: {
|
|
2202
2964
|
...params,
|
|
2203
2965
|
requestContext
|
|
2204
2966
|
}
|
|
2205
|
-
});
|
|
2967
|
+
}).then(deserializeWorkflowError);
|
|
2206
2968
|
}
|
|
2207
2969
|
/**
|
|
2208
2970
|
* Time travels a workflow run and returns a stream
|
|
2209
|
-
* @param params - Object containing the
|
|
2971
|
+
* @param params - Object containing the step, inputData, resumeData, initialState, context, nestedStepsContext, requestContext and tracingOptions
|
|
2210
2972
|
* @returns Promise containing the workflow execution results
|
|
2211
2973
|
*/
|
|
2212
|
-
async timeTravelStream({
|
|
2974
|
+
async timeTravelStream({ requestContext: paramsRequestContext, ...params }) {
|
|
2213
2975
|
const requestContext = parseClientRequestContext(paramsRequestContext);
|
|
2214
2976
|
const response = await this.request(
|
|
2215
|
-
`/api/workflows/${this.workflowId}/time-travel-stream?runId=${runId}`,
|
|
2977
|
+
`/api/workflows/${this.workflowId}/time-travel-stream?runId=${this.runId}`,
|
|
2216
2978
|
{
|
|
2217
2979
|
method: "POST",
|
|
2218
2980
|
body: {
|
|
@@ -2228,31 +2990,159 @@ var Workflow = class extends BaseResource {
|
|
|
2228
2990
|
if (!response.body) {
|
|
2229
2991
|
throw new Error("Response body is null");
|
|
2230
2992
|
}
|
|
2231
|
-
|
|
2232
|
-
|
|
2233
|
-
|
|
2234
|
-
|
|
2235
|
-
|
|
2993
|
+
return response.body.pipeThrough(this.createChunkTransformStream());
|
|
2994
|
+
}
|
|
2995
|
+
};
|
|
2996
|
+
|
|
2997
|
+
// src/resources/workflow.ts
|
|
2998
|
+
var RECORD_SEPARATOR2 = "";
|
|
2999
|
+
var Workflow = class extends BaseResource {
|
|
3000
|
+
constructor(options, workflowId) {
|
|
3001
|
+
super(options);
|
|
3002
|
+
this.workflowId = workflowId;
|
|
3003
|
+
}
|
|
3004
|
+
/**
|
|
3005
|
+
* Retrieves details about the workflow
|
|
3006
|
+
* @param requestContext - Optional request context to pass as query parameter
|
|
3007
|
+
* @returns Promise containing workflow details including steps and graphs
|
|
3008
|
+
*/
|
|
3009
|
+
details(requestContext) {
|
|
3010
|
+
return this.request(`/api/workflows/${this.workflowId}${requestContextQueryString(requestContext)}`);
|
|
3011
|
+
}
|
|
3012
|
+
/**
|
|
3013
|
+
* Retrieves all runs for a workflow
|
|
3014
|
+
* @param params - Parameters for filtering runs
|
|
3015
|
+
* @param requestContext - Optional request context to pass as query parameter
|
|
3016
|
+
* @returns Promise containing workflow runs array
|
|
3017
|
+
*/
|
|
3018
|
+
runs(params, requestContext) {
|
|
3019
|
+
const requestContextParam = base64RequestContext(parseClientRequestContext(requestContext));
|
|
3020
|
+
const searchParams = new URLSearchParams();
|
|
3021
|
+
if (params?.fromDate) {
|
|
3022
|
+
searchParams.set("fromDate", params.fromDate.toISOString());
|
|
3023
|
+
}
|
|
3024
|
+
if (params?.toDate) {
|
|
3025
|
+
searchParams.set("toDate", params.toDate.toISOString());
|
|
3026
|
+
}
|
|
3027
|
+
if (params?.page !== void 0) {
|
|
3028
|
+
searchParams.set("page", String(params.page));
|
|
3029
|
+
}
|
|
3030
|
+
if (params?.perPage !== void 0) {
|
|
3031
|
+
searchParams.set("perPage", String(params.perPage));
|
|
3032
|
+
}
|
|
3033
|
+
if (params?.limit !== null && params?.limit !== void 0) {
|
|
3034
|
+
if (params.limit === false) {
|
|
3035
|
+
searchParams.set("limit", "false");
|
|
3036
|
+
} else if (typeof params.limit === "number" && params.limit > 0 && Number.isInteger(params.limit)) {
|
|
3037
|
+
searchParams.set("limit", String(params.limit));
|
|
3038
|
+
}
|
|
3039
|
+
}
|
|
3040
|
+
if (params?.offset !== null && params?.offset !== void 0 && !isNaN(Number(params?.offset))) {
|
|
3041
|
+
searchParams.set("offset", String(params.offset));
|
|
3042
|
+
}
|
|
3043
|
+
if (params?.resourceId) {
|
|
3044
|
+
searchParams.set("resourceId", params.resourceId);
|
|
3045
|
+
}
|
|
3046
|
+
if (params?.status) {
|
|
3047
|
+
searchParams.set("status", params.status);
|
|
3048
|
+
}
|
|
3049
|
+
if (requestContextParam) {
|
|
3050
|
+
searchParams.set("requestContext", requestContextParam);
|
|
3051
|
+
}
|
|
3052
|
+
if (searchParams.size) {
|
|
3053
|
+
return this.request(`/api/workflows/${this.workflowId}/runs?${searchParams}`);
|
|
3054
|
+
} else {
|
|
3055
|
+
return this.request(`/api/workflows/${this.workflowId}/runs`);
|
|
3056
|
+
}
|
|
3057
|
+
}
|
|
3058
|
+
/**
|
|
3059
|
+
* Retrieves a specific workflow run by its ID
|
|
3060
|
+
* @param runId - The ID of the workflow run to retrieve
|
|
3061
|
+
* @param requestContext - Optional request context to pass as query parameter
|
|
3062
|
+
* @returns Promise containing the workflow run details
|
|
3063
|
+
*/
|
|
3064
|
+
runById(runId, requestContext) {
|
|
3065
|
+
return this.request(`/api/workflows/${this.workflowId}/runs/${runId}${requestContextQueryString(requestContext)}`);
|
|
3066
|
+
}
|
|
3067
|
+
/**
|
|
3068
|
+
* Deletes a specific workflow run by its ID
|
|
3069
|
+
* @param runId - The ID of the workflow run to delete
|
|
3070
|
+
* @returns Promise containing a success message
|
|
3071
|
+
*/
|
|
3072
|
+
deleteRunById(runId) {
|
|
3073
|
+
return this.request(`/api/workflows/${this.workflowId}/runs/${runId}`, {
|
|
3074
|
+
method: "DELETE"
|
|
3075
|
+
});
|
|
3076
|
+
}
|
|
3077
|
+
/**
|
|
3078
|
+
* Retrieves the execution result for a specific workflow run by its ID
|
|
3079
|
+
* @param runId - The ID of the workflow run to retrieve the execution result for
|
|
3080
|
+
* @param options - Optional configuration
|
|
3081
|
+
* @param options.requestContext - Optional request context to pass as query parameter
|
|
3082
|
+
* @param options.fields - Optional array of fields to return (e.g., ['status', 'result']). Available fields: status, result, error, payload, steps, activeStepsPath, serializedStepGraph. Omitting this returns all fields.
|
|
3083
|
+
* @param options.withNestedWorkflows - Whether to include nested workflow data in steps. Defaults to true. Set to false for better performance when you don't need nested workflow details.
|
|
3084
|
+
* @returns Promise containing the workflow run execution result
|
|
3085
|
+
*/
|
|
3086
|
+
runExecutionResult(runId, options) {
|
|
3087
|
+
const searchParams = new URLSearchParams();
|
|
3088
|
+
if (options?.fields && options.fields.length > 0) {
|
|
3089
|
+
searchParams.set("fields", options.fields.join(","));
|
|
3090
|
+
}
|
|
3091
|
+
if (options?.withNestedWorkflows !== void 0) {
|
|
3092
|
+
searchParams.set("withNestedWorkflows", String(options.withNestedWorkflows));
|
|
3093
|
+
}
|
|
3094
|
+
const requestContextParam = base64RequestContext(parseClientRequestContext(options?.requestContext));
|
|
3095
|
+
if (requestContextParam) {
|
|
3096
|
+
searchParams.set("requestContext", requestContextParam);
|
|
3097
|
+
}
|
|
3098
|
+
const queryString = searchParams.size > 0 ? `?${searchParams.toString()}` : "";
|
|
3099
|
+
return this.request(`/api/workflows/${this.workflowId}/runs/${runId}/execution-result${queryString}`);
|
|
3100
|
+
}
|
|
3101
|
+
/**
|
|
3102
|
+
* Creates a new workflow run
|
|
3103
|
+
* @param params - Optional object containing the optional runId
|
|
3104
|
+
* @returns Promise containing the Run instance
|
|
3105
|
+
*/
|
|
3106
|
+
async createRun(params) {
|
|
3107
|
+
const searchParams = new URLSearchParams();
|
|
3108
|
+
if (!!params?.runId) {
|
|
3109
|
+
searchParams.set("runId", params.runId);
|
|
3110
|
+
}
|
|
3111
|
+
const res = await this.request(
|
|
3112
|
+
`/api/workflows/${this.workflowId}/create-run?${searchParams.toString()}`,
|
|
3113
|
+
{
|
|
3114
|
+
method: "POST",
|
|
3115
|
+
body: {
|
|
3116
|
+
resourceId: params?.resourceId,
|
|
3117
|
+
disableScorers: params?.disableScorers
|
|
3118
|
+
}
|
|
3119
|
+
}
|
|
3120
|
+
);
|
|
3121
|
+
const run = new Run(this.options, this.workflowId, res.runId);
|
|
3122
|
+
return run;
|
|
3123
|
+
}
|
|
3124
|
+
/**
|
|
3125
|
+
* Creates a new ReadableStream from an iterable or async iterable of objects,
|
|
3126
|
+
* serializing each as JSON and separating them with the record separator (\x1E).
|
|
3127
|
+
*
|
|
3128
|
+
* @param records - An iterable or async iterable of objects to stream
|
|
3129
|
+
* @returns A ReadableStream emitting the records as JSON strings separated by the record separator
|
|
3130
|
+
*/
|
|
3131
|
+
static createRecordStream(records) {
|
|
3132
|
+
const encoder = new TextEncoder();
|
|
3133
|
+
return new ReadableStream({
|
|
3134
|
+
async start(controller) {
|
|
2236
3135
|
try {
|
|
2237
|
-
const
|
|
2238
|
-
|
|
2239
|
-
|
|
2240
|
-
if (chunk2) {
|
|
2241
|
-
const newChunk = failedChunk ? failedChunk + chunk2 : chunk2;
|
|
2242
|
-
try {
|
|
2243
|
-
const parsedChunk = JSON.parse(newChunk);
|
|
2244
|
-
controller.enqueue(parsedChunk);
|
|
2245
|
-
failedChunk = void 0;
|
|
2246
|
-
} catch {
|
|
2247
|
-
failedChunk = newChunk;
|
|
2248
|
-
}
|
|
2249
|
-
}
|
|
3136
|
+
for await (const record of records) {
|
|
3137
|
+
const json = JSON.stringify(record) + RECORD_SEPARATOR2;
|
|
3138
|
+
controller.enqueue(encoder.encode(json));
|
|
2250
3139
|
}
|
|
2251
|
-
|
|
3140
|
+
controller.close();
|
|
3141
|
+
} catch (err) {
|
|
3142
|
+
controller.error(err);
|
|
2252
3143
|
}
|
|
2253
3144
|
}
|
|
2254
3145
|
});
|
|
2255
|
-
return response.body.pipeThrough(transformStream);
|
|
2256
3146
|
}
|
|
2257
3147
|
};
|
|
2258
3148
|
|
|
@@ -2368,7 +3258,7 @@ var MCPTool = class extends BaseResource {
|
|
|
2368
3258
|
};
|
|
2369
3259
|
|
|
2370
3260
|
// src/resources/agent-builder.ts
|
|
2371
|
-
var
|
|
3261
|
+
var RECORD_SEPARATOR3 = "";
|
|
2372
3262
|
var AgentBuilder = class extends BaseResource {
|
|
2373
3263
|
constructor(options, actionId) {
|
|
2374
3264
|
super(options);
|
|
@@ -2414,7 +3304,7 @@ var AgentBuilder = class extends BaseResource {
|
|
|
2414
3304
|
async transform(chunk, controller) {
|
|
2415
3305
|
try {
|
|
2416
3306
|
const decoded = new TextDecoder().decode(chunk);
|
|
2417
|
-
const chunks = decoded.split(
|
|
3307
|
+
const chunks = decoded.split(RECORD_SEPARATOR3);
|
|
2418
3308
|
for (const chunk2 of chunks) {
|
|
2419
3309
|
if (chunk2) {
|
|
2420
3310
|
const newChunk = failedChunk ? failedChunk + chunk2 : chunk2;
|
|
@@ -2528,7 +3418,7 @@ var AgentBuilder = class extends BaseResource {
|
|
|
2528
3418
|
if (done && !value) continue;
|
|
2529
3419
|
try {
|
|
2530
3420
|
const decoded = value ? new TextDecoder().decode(value) : "";
|
|
2531
|
-
const chunks = (buffer + decoded).split(
|
|
3421
|
+
const chunks = (buffer + decoded).split(RECORD_SEPARATOR3);
|
|
2532
3422
|
buffer = chunks.pop() || "";
|
|
2533
3423
|
for (const chunk of chunks) {
|
|
2534
3424
|
if (chunk) {
|
|
@@ -2789,7 +3679,7 @@ var Observability = class extends BaseResource {
|
|
|
2789
3679
|
getTraces(params) {
|
|
2790
3680
|
const { pagination, filters } = params;
|
|
2791
3681
|
const { page, perPage, dateRange } = pagination || {};
|
|
2792
|
-
const { name, spanType, entityId, entityType } = filters || {};
|
|
3682
|
+
const { name: name14, spanType, entityId, entityType } = filters || {};
|
|
2793
3683
|
const searchParams = new URLSearchParams();
|
|
2794
3684
|
if (page !== void 0) {
|
|
2795
3685
|
searchParams.set("page", String(page));
|
|
@@ -2797,8 +3687,8 @@ var Observability = class extends BaseResource {
|
|
|
2797
3687
|
if (perPage !== void 0) {
|
|
2798
3688
|
searchParams.set("perPage", String(perPage));
|
|
2799
3689
|
}
|
|
2800
|
-
if (
|
|
2801
|
-
searchParams.set("name",
|
|
3690
|
+
if (name14) {
|
|
3691
|
+
searchParams.set("name", name14);
|
|
2802
3692
|
}
|
|
2803
3693
|
if (spanType !== void 0) {
|
|
2804
3694
|
searchParams.set("spanType", String(spanType));
|