@trigger.dev/sdk 4.3.0 → 4.3.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/commonjs/v3/batch.d.ts +3 -3
- package/dist/commonjs/v3/batch.js.map +1 -1
- package/dist/commonjs/v3/idempotencyKeys.d.ts +2 -1
- package/dist/commonjs/v3/idempotencyKeys.js +1 -0
- package/dist/commonjs/v3/idempotencyKeys.js.map +1 -1
- package/dist/commonjs/v3/index.d.ts +1 -1
- package/dist/commonjs/v3/index.js +2 -1
- package/dist/commonjs/v3/index.js.map +1 -1
- package/dist/commonjs/v3/shared.d.ts +83 -1
- package/dist/commonjs/v3/shared.js +933 -541
- package/dist/commonjs/v3/shared.js.map +1 -1
- package/dist/commonjs/v3/shared.test.d.ts +1 -0
- package/dist/commonjs/v3/shared.test.js +190 -0
- package/dist/commonjs/v3/shared.test.js.map +1 -0
- package/dist/commonjs/version.js +1 -1
- package/dist/esm/v3/batch.d.ts +3 -3
- package/dist/esm/v3/batch.js +1 -1
- package/dist/esm/v3/batch.js.map +1 -1
- package/dist/esm/v3/idempotencyKeys.d.ts +2 -1
- package/dist/esm/v3/idempotencyKeys.js +2 -1
- package/dist/esm/v3/idempotencyKeys.js.map +1 -1
- package/dist/esm/v3/index.d.ts +1 -1
- package/dist/esm/v3/index.js +1 -1
- package/dist/esm/v3/index.js.map +1 -1
- package/dist/esm/v3/shared.d.ts +83 -1
- package/dist/esm/v3/shared.js +931 -541
- package/dist/esm/v3/shared.js.map +1 -1
- package/dist/esm/v3/shared.test.d.ts +1 -0
- package/dist/esm/v3/shared.test.js +188 -0
- package/dist/esm/v3/shared.test.js.map +1 -0
- package/dist/esm/version.js +1 -1
- package/package.json +3 -2
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
"use strict";
|
|
2
2
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
-
exports.TaskRunPromise = exports.SubtaskUnwrapError = void 0;
|
|
3
|
+
exports.BatchTriggerError = exports.TaskRunPromise = exports.SubtaskUnwrapError = void 0;
|
|
4
4
|
exports.queue = queue;
|
|
5
5
|
exports.createTask = createTask;
|
|
6
6
|
exports.createToolTask = createToolTask;
|
|
@@ -13,6 +13,7 @@ exports.batchTriggerById = batchTriggerById;
|
|
|
13
13
|
exports.batchTriggerByIdAndWait = batchTriggerByIdAndWait;
|
|
14
14
|
exports.batchTriggerTasks = batchTriggerTasks;
|
|
15
15
|
exports.batchTriggerAndWaitTasks = batchTriggerAndWaitTasks;
|
|
16
|
+
exports.readableStreamToAsyncIterable = readableStreamToAsyncIterable;
|
|
16
17
|
const api_1 = require("@opentelemetry/api");
|
|
17
18
|
const v3_1 = require("@trigger.dev/core/v3");
|
|
18
19
|
Object.defineProperty(exports, "SubtaskUnwrapError", { enumerable: true, get: function () { return v3_1.SubtaskUnwrapError; } });
|
|
@@ -237,63 +238,14 @@ async function batchTriggerAndWait(id, items, options, requestOptions) {
|
|
|
237
238
|
async function batchTrigger(id, items, options, requestOptions) {
|
|
238
239
|
return await batchTrigger_internal("tasks.batchTrigger()", id, items, options, undefined, requestOptions);
|
|
239
240
|
}
|
|
240
|
-
|
|
241
|
-
|
|
242
|
-
|
|
243
|
-
* @template TTask - The type of task(s) to be triggered, extends AnyTask
|
|
244
|
-
*
|
|
245
|
-
* @param {Array<BatchByIdItem<InferRunTypes<TTask>>>} items - Array of task items to trigger
|
|
246
|
-
* @param {BatchTriggerOptions} [options] - Optional batch-level trigger options
|
|
247
|
-
* @param {TriggerApiRequestOptions} [requestOptions] - Optional API request configuration
|
|
248
|
-
*
|
|
249
|
-
* @returns {Promise<BatchRunHandleFromTypes<InferRunTypes<TTask>>>} A promise that resolves with the batch run handle
|
|
250
|
-
* containing batch ID, cached status, idempotency info, runs, and public access token
|
|
251
|
-
*
|
|
252
|
-
* @example
|
|
253
|
-
* ```ts
|
|
254
|
-
* import { batch } from "@trigger.dev/sdk/v3";
|
|
255
|
-
* import type { myTask1, myTask2 } from "~/trigger/myTasks";
|
|
256
|
-
*
|
|
257
|
-
* // Trigger multiple tasks with different payloads
|
|
258
|
-
* const result = await batch.trigger<typeof myTask1 | typeof myTask2>([
|
|
259
|
-
* {
|
|
260
|
-
* id: "my-task-1",
|
|
261
|
-
* payload: { some: "data" },
|
|
262
|
-
* options: {
|
|
263
|
-
* queue: "default",
|
|
264
|
-
* concurrencyKey: "key",
|
|
265
|
-
* idempotencyKey: "unique-key",
|
|
266
|
-
* delay: "5m",
|
|
267
|
-
* tags: ["tag1", "tag2"]
|
|
268
|
-
* }
|
|
269
|
-
* },
|
|
270
|
-
* {
|
|
271
|
-
* id: "my-task-2",
|
|
272
|
-
* payload: { other: "data" }
|
|
273
|
-
* }
|
|
274
|
-
* ]);
|
|
275
|
-
* ```
|
|
276
|
-
*
|
|
277
|
-
* @description
|
|
278
|
-
* Each task item in the array can include:
|
|
279
|
-
* - `id`: The unique identifier of the task
|
|
280
|
-
* - `payload`: The data to pass to the task
|
|
281
|
-
* - `options`: Optional task-specific settings including:
|
|
282
|
-
* - `queue`: Specify a queue for the task
|
|
283
|
-
* - `concurrencyKey`: Control concurrent execution
|
|
284
|
-
* - `idempotencyKey`: Prevent duplicate runs
|
|
285
|
-
* - `idempotencyKeyTTL`: Time-to-live for idempotency key
|
|
286
|
-
* - `delay`: Delay before task execution
|
|
287
|
-
* - `ttl`: Time-to-live for the task
|
|
288
|
-
* - `tags`: Array of tags for the task
|
|
289
|
-
* - `maxAttempts`: Maximum retry attempts
|
|
290
|
-
* - `metadata`: Additional metadata
|
|
291
|
-
* - `maxDuration`: Maximum execution duration
|
|
292
|
-
*/
|
|
293
|
-
async function batchTriggerById(items, options, requestOptions) {
|
|
241
|
+
// Implementation
|
|
242
|
+
async function batchTriggerById(...args) {
|
|
243
|
+
const [items, options, requestOptions] = args;
|
|
294
244
|
const apiClient = v3_1.apiClientManager.clientOrThrow(requestOptions?.clientConfig);
|
|
295
|
-
|
|
296
|
-
|
|
245
|
+
// Check if items is an array or a stream
|
|
246
|
+
if (Array.isArray(items)) {
|
|
247
|
+
// Array path: existing logic
|
|
248
|
+
const ndJsonItems = await Promise.all(items.map(async (item, index) => {
|
|
297
249
|
const taskMetadata = v3_1.resourceCatalog.getTask(item.id);
|
|
298
250
|
const parsedPayload = taskMetadata?.fns.parsePayload
|
|
299
251
|
? await taskMetadata?.fns.parsePayload(item.payload)
|
|
@@ -301,6 +253,7 @@ async function batchTriggerById(items, options, requestOptions) {
|
|
|
301
253
|
const payloadPacket = await (0, v3_1.stringifyIO)(parsedPayload);
|
|
302
254
|
const batchItemIdempotencyKey = await (0, v3_1.makeIdempotencyKey)((0, v3_1.flattenIdempotencyKey)([options?.idempotencyKey, `${index}`]));
|
|
303
255
|
return {
|
|
256
|
+
index,
|
|
304
257
|
task: item.id,
|
|
305
258
|
payload: payloadPacket.data,
|
|
306
259
|
options: {
|
|
@@ -320,257 +273,266 @@ async function batchTriggerById(items, options, requestOptions) {
|
|
|
320
273
|
priority: item.options?.priority,
|
|
321
274
|
region: item.options?.region,
|
|
322
275
|
lockToVersion: item.options?.version ?? (0, v3_1.getEnvVar)("TRIGGER_VERSION"),
|
|
276
|
+
debounce: item.options?.debounce,
|
|
323
277
|
},
|
|
324
278
|
};
|
|
325
|
-
}))
|
|
326
|
-
|
|
327
|
-
|
|
328
|
-
|
|
329
|
-
|
|
330
|
-
|
|
331
|
-
|
|
332
|
-
|
|
333
|
-
|
|
334
|
-
|
|
335
|
-
|
|
336
|
-
|
|
337
|
-
|
|
338
|
-
|
|
339
|
-
|
|
340
|
-
|
|
341
|
-
|
|
342
|
-
|
|
343
|
-
|
|
344
|
-
|
|
345
|
-
|
|
346
|
-
|
|
347
|
-
|
|
348
|
-
|
|
349
|
-
|
|
350
|
-
|
|
351
|
-
|
|
279
|
+
}));
|
|
280
|
+
// Execute 2-phase batch
|
|
281
|
+
const response = await tracer_js_1.tracer.startActiveSpan("batch.trigger()", async (span) => {
|
|
282
|
+
const result = await executeBatchTwoPhase(apiClient, ndJsonItems, {
|
|
283
|
+
parentRunId: v3_1.taskContext.ctx?.run.id,
|
|
284
|
+
idempotencyKey: await (0, v3_1.makeIdempotencyKey)(options?.idempotencyKey),
|
|
285
|
+
spanParentAsLink: true, // Fire-and-forget: child runs get separate trace IDs
|
|
286
|
+
}, requestOptions);
|
|
287
|
+
span.setAttribute("batchId", result.id);
|
|
288
|
+
span.setAttribute("runCount", result.runCount);
|
|
289
|
+
return result;
|
|
290
|
+
}, {
|
|
291
|
+
kind: api_1.SpanKind.PRODUCER,
|
|
292
|
+
attributes: {
|
|
293
|
+
[v3_1.SemanticInternalAttributes.STYLE_ICON]: "trigger",
|
|
294
|
+
},
|
|
295
|
+
});
|
|
296
|
+
const handle = {
|
|
297
|
+
batchId: response.id,
|
|
298
|
+
runCount: response.runCount,
|
|
299
|
+
publicAccessToken: response.publicAccessToken,
|
|
300
|
+
};
|
|
301
|
+
return handle;
|
|
302
|
+
}
|
|
303
|
+
else {
|
|
304
|
+
// Stream path: convert to AsyncIterable and transform
|
|
305
|
+
const asyncItems = normalizeToAsyncIterable(items);
|
|
306
|
+
const transformedItems = transformBatchItemsStream(asyncItems, options);
|
|
307
|
+
// Execute streaming 2-phase batch
|
|
308
|
+
const response = await tracer_js_1.tracer.startActiveSpan("batch.trigger()", async (span) => {
|
|
309
|
+
const result = await executeBatchTwoPhaseStreaming(apiClient, transformedItems, {
|
|
310
|
+
parentRunId: v3_1.taskContext.ctx?.run.id,
|
|
311
|
+
idempotencyKey: await (0, v3_1.makeIdempotencyKey)(options?.idempotencyKey),
|
|
312
|
+
spanParentAsLink: true, // Fire-and-forget: child runs get separate trace IDs
|
|
313
|
+
}, requestOptions);
|
|
314
|
+
span.setAttribute("batchId", result.id);
|
|
315
|
+
span.setAttribute("runCount", result.runCount);
|
|
316
|
+
return result;
|
|
317
|
+
}, {
|
|
318
|
+
kind: api_1.SpanKind.PRODUCER,
|
|
319
|
+
attributes: {
|
|
320
|
+
[v3_1.SemanticInternalAttributes.STYLE_ICON]: "trigger",
|
|
321
|
+
},
|
|
322
|
+
});
|
|
323
|
+
const handle = {
|
|
324
|
+
batchId: response.id,
|
|
325
|
+
runCount: response.runCount,
|
|
326
|
+
publicAccessToken: response.publicAccessToken,
|
|
327
|
+
};
|
|
328
|
+
return handle;
|
|
329
|
+
}
|
|
352
330
|
}
|
|
353
|
-
|
|
354
|
-
|
|
355
|
-
|
|
356
|
-
*
|
|
357
|
-
* @template TTask - Union type of tasks to be triggered, extends AnyTask
|
|
358
|
-
*
|
|
359
|
-
* @param {Array<BatchByIdAndWaitItem<InferRunTypes<TTask>>>} items - Array of task items to trigger
|
|
360
|
-
* @param {TriggerApiRequestOptions} [requestOptions] - Optional API request configuration
|
|
361
|
-
*
|
|
362
|
-
* @returns {Promise<BatchByIdResult<TTask>>} A promise that resolves with the batch results, including
|
|
363
|
-
* success/failure status and strongly-typed outputs for each task
|
|
364
|
-
*
|
|
365
|
-
* @throws {Error} If called outside of a task.run() context
|
|
366
|
-
* @throws {Error} If no API client is configured
|
|
367
|
-
*
|
|
368
|
-
* @example
|
|
369
|
-
* ```ts
|
|
370
|
-
* import { batch, task } from "@trigger.dev/sdk/v3";
|
|
371
|
-
*
|
|
372
|
-
* export const parentTask = task({
|
|
373
|
-
* id: "parent-task",
|
|
374
|
-
* run: async (payload: string) => {
|
|
375
|
-
* const results = await batch.triggerAndWait<typeof childTask1 | typeof childTask2>([
|
|
376
|
-
* {
|
|
377
|
-
* id: "child-task-1",
|
|
378
|
-
* payload: { foo: "World" },
|
|
379
|
-
* options: {
|
|
380
|
-
* queue: "default",
|
|
381
|
-
* delay: "5m",
|
|
382
|
-
* tags: ["batch", "child1"]
|
|
383
|
-
* }
|
|
384
|
-
* },
|
|
385
|
-
* {
|
|
386
|
-
* id: "child-task-2",
|
|
387
|
-
* payload: { bar: 42 }
|
|
388
|
-
* }
|
|
389
|
-
* ]);
|
|
390
|
-
*
|
|
391
|
-
* // Type-safe result handling
|
|
392
|
-
* for (const result of results) {
|
|
393
|
-
* if (result.ok) {
|
|
394
|
-
* switch (result.taskIdentifier) {
|
|
395
|
-
* case "child-task-1":
|
|
396
|
-
* console.log("Child task 1 output:", result.output); // string type
|
|
397
|
-
* break;
|
|
398
|
-
* case "child-task-2":
|
|
399
|
-
* console.log("Child task 2 output:", result.output); // number type
|
|
400
|
-
* break;
|
|
401
|
-
* }
|
|
402
|
-
* } else {
|
|
403
|
-
* console.error("Task failed:", result.error);
|
|
404
|
-
* }
|
|
405
|
-
* }
|
|
406
|
-
* }
|
|
407
|
-
* });
|
|
408
|
-
* ```
|
|
409
|
-
*
|
|
410
|
-
* @description
|
|
411
|
-
* Each task item in the array can include:
|
|
412
|
-
* - `id`: The task identifier (must match one of the tasks in the union type)
|
|
413
|
-
* - `payload`: Strongly-typed payload matching the task's input type
|
|
414
|
-
* - `options`: Optional task-specific settings including:
|
|
415
|
-
* - `queue`: Specify a queue for the task
|
|
416
|
-
* - `concurrencyKey`: Control concurrent execution
|
|
417
|
-
* - `delay`: Delay before task execution
|
|
418
|
-
* - `ttl`: Time-to-live for the task
|
|
419
|
-
* - `tags`: Array of tags for the task
|
|
420
|
-
* - `maxAttempts`: Maximum retry attempts
|
|
421
|
-
* - `metadata`: Additional metadata
|
|
422
|
-
* - `maxDuration`: Maximum execution duration
|
|
423
|
-
*
|
|
424
|
-
* The function provides full type safety for:
|
|
425
|
-
* - Task IDs
|
|
426
|
-
* - Payload types
|
|
427
|
-
* - Return value types
|
|
428
|
-
* - Error handling
|
|
429
|
-
*/
|
|
430
|
-
async function batchTriggerByIdAndWait(items, options, requestOptions) {
|
|
331
|
+
// Implementation
|
|
332
|
+
async function batchTriggerByIdAndWait(...args) {
|
|
333
|
+
const [items, options, requestOptions] = args;
|
|
431
334
|
const ctx = v3_1.taskContext.ctx;
|
|
432
335
|
if (!ctx) {
|
|
433
336
|
throw new Error("batchTriggerAndWait can only be used from inside a task.run()");
|
|
434
337
|
}
|
|
435
338
|
const apiClient = v3_1.apiClientManager.clientOrThrow(requestOptions?.clientConfig);
|
|
436
|
-
|
|
437
|
-
|
|
438
|
-
|
|
439
|
-
|
|
440
|
-
|
|
441
|
-
|
|
442
|
-
|
|
443
|
-
|
|
444
|
-
|
|
445
|
-
|
|
446
|
-
|
|
447
|
-
|
|
448
|
-
|
|
449
|
-
|
|
450
|
-
|
|
451
|
-
|
|
452
|
-
|
|
453
|
-
|
|
454
|
-
|
|
455
|
-
|
|
456
|
-
|
|
457
|
-
|
|
458
|
-
|
|
459
|
-
|
|
460
|
-
|
|
461
|
-
|
|
462
|
-
|
|
463
|
-
|
|
464
|
-
|
|
465
|
-
|
|
466
|
-
|
|
467
|
-
|
|
468
|
-
|
|
469
|
-
|
|
470
|
-
|
|
339
|
+
// Check if items is an array or a stream
|
|
340
|
+
if (Array.isArray(items)) {
|
|
341
|
+
// Array path: existing logic
|
|
342
|
+
const ndJsonItems = await Promise.all(items.map(async (item, index) => {
|
|
343
|
+
const taskMetadata = v3_1.resourceCatalog.getTask(item.id);
|
|
344
|
+
const parsedPayload = taskMetadata?.fns.parsePayload
|
|
345
|
+
? await taskMetadata?.fns.parsePayload(item.payload)
|
|
346
|
+
: item.payload;
|
|
347
|
+
const payloadPacket = await (0, v3_1.stringifyIO)(parsedPayload);
|
|
348
|
+
const batchItemIdempotencyKey = await (0, v3_1.makeIdempotencyKey)((0, v3_1.flattenIdempotencyKey)([options?.idempotencyKey, `${index}`]));
|
|
349
|
+
return {
|
|
350
|
+
index,
|
|
351
|
+
task: item.id,
|
|
352
|
+
payload: payloadPacket.data,
|
|
353
|
+
options: {
|
|
354
|
+
lockToVersion: v3_1.taskContext.worker?.version,
|
|
355
|
+
queue: item.options?.queue ? { name: item.options.queue } : undefined,
|
|
356
|
+
concurrencyKey: item.options?.concurrencyKey,
|
|
357
|
+
test: v3_1.taskContext.ctx?.run.isTest,
|
|
358
|
+
payloadType: payloadPacket.dataType,
|
|
359
|
+
delay: item.options?.delay,
|
|
360
|
+
ttl: item.options?.ttl,
|
|
361
|
+
tags: item.options?.tags,
|
|
362
|
+
maxAttempts: item.options?.maxAttempts,
|
|
363
|
+
metadata: item.options?.metadata,
|
|
364
|
+
maxDuration: item.options?.maxDuration,
|
|
365
|
+
idempotencyKey: (await (0, v3_1.makeIdempotencyKey)(item.options?.idempotencyKey)) ?? batchItemIdempotencyKey,
|
|
366
|
+
idempotencyKeyTTL: item.options?.idempotencyKeyTTL ?? options?.idempotencyKeyTTL,
|
|
367
|
+
machine: item.options?.machine,
|
|
368
|
+
priority: item.options?.priority,
|
|
369
|
+
region: item.options?.region,
|
|
370
|
+
debounce: item.options?.debounce,
|
|
371
|
+
},
|
|
372
|
+
};
|
|
373
|
+
}));
|
|
374
|
+
return await tracer_js_1.tracer.startActiveSpan("batch.triggerAndWait()", async (span) => {
|
|
375
|
+
// Execute 2-phase batch
|
|
376
|
+
const response = await executeBatchTwoPhase(apiClient, ndJsonItems, {
|
|
377
|
+
parentRunId: ctx.run.id,
|
|
378
|
+
resumeParentOnCompletion: true,
|
|
379
|
+
idempotencyKey: await (0, v3_1.makeIdempotencyKey)(options?.idempotencyKey),
|
|
380
|
+
spanParentAsLink: false, // Waiting: child runs share parent's trace ID
|
|
381
|
+
}, requestOptions);
|
|
382
|
+
span.setAttribute("batchId", response.id);
|
|
383
|
+
span.setAttribute("runCount", response.runCount);
|
|
384
|
+
const result = await v3_1.runtime.waitForBatch({
|
|
385
|
+
id: response.id,
|
|
386
|
+
runCount: response.runCount,
|
|
387
|
+
ctx,
|
|
388
|
+
});
|
|
389
|
+
const runs = await handleBatchTaskRunExecutionResultV2(result.items);
|
|
390
|
+
return {
|
|
391
|
+
id: result.id,
|
|
392
|
+
runs,
|
|
393
|
+
};
|
|
471
394
|
}, {
|
|
472
|
-
|
|
473
|
-
|
|
474
|
-
|
|
475
|
-
|
|
476
|
-
|
|
477
|
-
|
|
395
|
+
kind: api_1.SpanKind.PRODUCER,
|
|
396
|
+
attributes: {
|
|
397
|
+
[v3_1.SemanticInternalAttributes.STYLE_ICON]: "trigger",
|
|
398
|
+
},
|
|
399
|
+
});
|
|
400
|
+
}
|
|
401
|
+
else {
|
|
402
|
+
// Stream path: convert to AsyncIterable and transform
|
|
403
|
+
const asyncItems = normalizeToAsyncIterable(items);
|
|
404
|
+
const transformedItems = transformBatchItemsStreamForWait(asyncItems, options);
|
|
405
|
+
return await tracer_js_1.tracer.startActiveSpan("batch.triggerAndWait()", async (span) => {
|
|
406
|
+
// Execute streaming 2-phase batch
|
|
407
|
+
const response = await executeBatchTwoPhaseStreaming(apiClient, transformedItems, {
|
|
408
|
+
parentRunId: ctx.run.id,
|
|
409
|
+
resumeParentOnCompletion: true,
|
|
410
|
+
idempotencyKey: await (0, v3_1.makeIdempotencyKey)(options?.idempotencyKey),
|
|
411
|
+
spanParentAsLink: false, // Waiting: child runs share parent's trace ID
|
|
412
|
+
}, requestOptions);
|
|
413
|
+
span.setAttribute("batchId", response.id);
|
|
414
|
+
span.setAttribute("runCount", response.runCount);
|
|
415
|
+
const result = await v3_1.runtime.waitForBatch({
|
|
416
|
+
id: response.id,
|
|
417
|
+
runCount: response.runCount,
|
|
418
|
+
ctx,
|
|
419
|
+
});
|
|
420
|
+
const runs = await handleBatchTaskRunExecutionResultV2(result.items);
|
|
421
|
+
return {
|
|
422
|
+
id: result.id,
|
|
423
|
+
runs,
|
|
424
|
+
};
|
|
425
|
+
}, {
|
|
426
|
+
kind: api_1.SpanKind.PRODUCER,
|
|
427
|
+
attributes: {
|
|
428
|
+
[v3_1.SemanticInternalAttributes.STYLE_ICON]: "trigger",
|
|
429
|
+
},
|
|
430
|
+
});
|
|
431
|
+
}
|
|
432
|
+
}
|
|
433
|
+
// Implementation
|
|
434
|
+
async function batchTriggerTasks(...args) {
|
|
435
|
+
const [items, options, requestOptions] = args;
|
|
436
|
+
const apiClient = v3_1.apiClientManager.clientOrThrow(requestOptions?.clientConfig);
|
|
437
|
+
// Check if items is an array or a stream
|
|
438
|
+
if (Array.isArray(items)) {
|
|
439
|
+
// Array path: existing logic
|
|
440
|
+
const ndJsonItems = await Promise.all(items.map(async (item, index) => {
|
|
441
|
+
const taskMetadata = v3_1.resourceCatalog.getTask(item.task.id);
|
|
442
|
+
const parsedPayload = taskMetadata?.fns.parsePayload
|
|
443
|
+
? await taskMetadata?.fns.parsePayload(item.payload)
|
|
444
|
+
: item.payload;
|
|
445
|
+
const payloadPacket = await (0, v3_1.stringifyIO)(parsedPayload);
|
|
446
|
+
const batchItemIdempotencyKey = await (0, v3_1.makeIdempotencyKey)((0, v3_1.flattenIdempotencyKey)([options?.idempotencyKey, `${index}`]));
|
|
447
|
+
return {
|
|
448
|
+
index,
|
|
449
|
+
task: item.task.id,
|
|
450
|
+
payload: payloadPacket.data,
|
|
451
|
+
options: {
|
|
452
|
+
queue: item.options?.queue ? { name: item.options.queue } : undefined,
|
|
453
|
+
concurrencyKey: item.options?.concurrencyKey,
|
|
454
|
+
test: v3_1.taskContext.ctx?.run.isTest,
|
|
455
|
+
payloadType: payloadPacket.dataType,
|
|
456
|
+
delay: item.options?.delay,
|
|
457
|
+
ttl: item.options?.ttl,
|
|
458
|
+
tags: item.options?.tags,
|
|
459
|
+
maxAttempts: item.options?.maxAttempts,
|
|
460
|
+
metadata: item.options?.metadata,
|
|
461
|
+
maxDuration: item.options?.maxDuration,
|
|
462
|
+
idempotencyKey: (await (0, v3_1.makeIdempotencyKey)(item.options?.idempotencyKey)) ?? batchItemIdempotencyKey,
|
|
463
|
+
idempotencyKeyTTL: item.options?.idempotencyKeyTTL ?? options?.idempotencyKeyTTL,
|
|
464
|
+
machine: item.options?.machine,
|
|
465
|
+
priority: item.options?.priority,
|
|
466
|
+
region: item.options?.region,
|
|
467
|
+
lockToVersion: item.options?.version ?? (0, v3_1.getEnvVar)("TRIGGER_VERSION"),
|
|
468
|
+
debounce: item.options?.debounce,
|
|
469
|
+
},
|
|
470
|
+
};
|
|
471
|
+
}));
|
|
472
|
+
// Execute 2-phase batch
|
|
473
|
+
const response = await tracer_js_1.tracer.startActiveSpan("batch.triggerByTask()", async (span) => {
|
|
474
|
+
const result = await executeBatchTwoPhase(apiClient, ndJsonItems, {
|
|
475
|
+
parentRunId: v3_1.taskContext.ctx?.run.id,
|
|
476
|
+
idempotencyKey: await (0, v3_1.makeIdempotencyKey)(options?.idempotencyKey),
|
|
477
|
+
spanParentAsLink: true, // Fire-and-forget: child runs get separate trace IDs
|
|
478
|
+
}, requestOptions);
|
|
479
|
+
span.setAttribute("batchId", result.id);
|
|
480
|
+
span.setAttribute("runCount", result.runCount);
|
|
481
|
+
return result;
|
|
482
|
+
}, {
|
|
483
|
+
kind: api_1.SpanKind.PRODUCER,
|
|
484
|
+
attributes: {
|
|
485
|
+
[v3_1.SemanticInternalAttributes.STYLE_ICON]: "trigger",
|
|
486
|
+
},
|
|
487
|
+
});
|
|
488
|
+
const handle = {
|
|
489
|
+
batchId: response.id,
|
|
478
490
|
runCount: response.runCount,
|
|
479
|
-
|
|
491
|
+
publicAccessToken: response.publicAccessToken,
|
|
492
|
+
};
|
|
493
|
+
return handle;
|
|
494
|
+
}
|
|
495
|
+
else {
|
|
496
|
+
// Stream path: convert to AsyncIterable and transform
|
|
497
|
+
const streamItems = items;
|
|
498
|
+
const asyncItems = normalizeToAsyncIterable(streamItems);
|
|
499
|
+
const transformedItems = transformBatchByTaskItemsStream(asyncItems, options);
|
|
500
|
+
// Execute streaming 2-phase batch
|
|
501
|
+
const response = await tracer_js_1.tracer.startActiveSpan("batch.triggerByTask()", async (span) => {
|
|
502
|
+
const result = await executeBatchTwoPhaseStreaming(apiClient, transformedItems, {
|
|
503
|
+
parentRunId: v3_1.taskContext.ctx?.run.id,
|
|
504
|
+
idempotencyKey: await (0, v3_1.makeIdempotencyKey)(options?.idempotencyKey),
|
|
505
|
+
spanParentAsLink: true, // Fire-and-forget: child runs get separate trace IDs
|
|
506
|
+
}, requestOptions);
|
|
507
|
+
span.setAttribute("batchId", result.id);
|
|
508
|
+
span.setAttribute("runCount", result.runCount);
|
|
509
|
+
return result;
|
|
510
|
+
}, {
|
|
511
|
+
kind: api_1.SpanKind.PRODUCER,
|
|
512
|
+
attributes: {
|
|
513
|
+
[v3_1.SemanticInternalAttributes.STYLE_ICON]: "trigger",
|
|
514
|
+
},
|
|
480
515
|
});
|
|
481
|
-
const
|
|
482
|
-
|
|
483
|
-
|
|
484
|
-
|
|
516
|
+
const handle = {
|
|
517
|
+
batchId: response.id,
|
|
518
|
+
runCount: response.runCount,
|
|
519
|
+
publicAccessToken: response.publicAccessToken,
|
|
485
520
|
};
|
|
486
|
-
|
|
487
|
-
|
|
488
|
-
attributes: {
|
|
489
|
-
[v3_1.SemanticInternalAttributes.STYLE_ICON]: "trigger",
|
|
490
|
-
},
|
|
491
|
-
});
|
|
521
|
+
return handle;
|
|
522
|
+
}
|
|
492
523
|
}
|
|
493
|
-
|
|
494
|
-
|
|
495
|
-
|
|
496
|
-
|
|
497
|
-
|
|
498
|
-
|
|
499
|
-
|
|
500
|
-
* @param {TriggerApiRequestOptions} [requestOptions] - Optional API request configuration
|
|
501
|
-
*
|
|
502
|
-
* @returns {Promise<BatchByIdResult<TTask>>} A promise that resolves with the batch results, including
|
|
503
|
-
* success/failure status and strongly-typed outputs for each task
|
|
504
|
-
*
|
|
505
|
-
* @throws {Error} If called outside of a task.run() context
|
|
506
|
-
* @throws {Error} If no API client is configured
|
|
507
|
-
*
|
|
508
|
-
* @example
|
|
509
|
-
* ```ts
|
|
510
|
-
* import { batch, task } from "@trigger.dev/sdk/v3";
|
|
511
|
-
*
|
|
512
|
-
* export const parentTask = task({
|
|
513
|
-
* id: "parent-task",
|
|
514
|
-
* run: async (payload: string) => {
|
|
515
|
-
* const results = await batch.triggerAndWait<typeof childTask1 | typeof childTask2>([
|
|
516
|
-
* {
|
|
517
|
-
* id: "child-task-1",
|
|
518
|
-
* payload: { foo: "World" },
|
|
519
|
-
* options: {
|
|
520
|
-
* queue: "default",
|
|
521
|
-
* delay: "5m",
|
|
522
|
-
* tags: ["batch", "child1"]
|
|
523
|
-
* }
|
|
524
|
-
* },
|
|
525
|
-
* {
|
|
526
|
-
* id: "child-task-2",
|
|
527
|
-
* payload: { bar: 42 }
|
|
528
|
-
* }
|
|
529
|
-
* ]);
|
|
530
|
-
*
|
|
531
|
-
* // Type-safe result handling
|
|
532
|
-
* for (const result of results) {
|
|
533
|
-
* if (result.ok) {
|
|
534
|
-
* switch (result.taskIdentifier) {
|
|
535
|
-
* case "child-task-1":
|
|
536
|
-
* console.log("Child task 1 output:", result.output); // string type
|
|
537
|
-
* break;
|
|
538
|
-
* case "child-task-2":
|
|
539
|
-
* console.log("Child task 2 output:", result.output); // number type
|
|
540
|
-
* break;
|
|
541
|
-
* }
|
|
542
|
-
* } else {
|
|
543
|
-
* console.error("Task failed:", result.error);
|
|
544
|
-
* }
|
|
545
|
-
* }
|
|
546
|
-
* }
|
|
547
|
-
* });
|
|
548
|
-
* ```
|
|
549
|
-
*
|
|
550
|
-
* @description
|
|
551
|
-
* Each task item in the array can include:
|
|
552
|
-
* - `id`: The task identifier (must match one of the tasks in the union type)
|
|
553
|
-
* - `payload`: Strongly-typed payload matching the task's input type
|
|
554
|
-
* - `options`: Optional task-specific settings including:
|
|
555
|
-
* - `queue`: Specify a queue for the task
|
|
556
|
-
* - `concurrencyKey`: Control concurrent execution
|
|
557
|
-
* - `delay`: Delay before task execution
|
|
558
|
-
* - `ttl`: Time-to-live for the task
|
|
559
|
-
* - `tags`: Array of tags for the task
|
|
560
|
-
* - `maxAttempts`: Maximum retry attempts
|
|
561
|
-
* - `metadata`: Additional metadata
|
|
562
|
-
* - `maxDuration`: Maximum execution duration
|
|
563
|
-
*
|
|
564
|
-
* The function provides full type safety for:
|
|
565
|
-
* - Task IDs
|
|
566
|
-
* - Payload types
|
|
567
|
-
* - Return value types
|
|
568
|
-
* - Error handling
|
|
569
|
-
*/
|
|
570
|
-
async function batchTriggerTasks(items, options, requestOptions) {
|
|
524
|
+
// Implementation
|
|
525
|
+
async function batchTriggerAndWaitTasks(...args) {
|
|
526
|
+
const [items, options, requestOptions] = args;
|
|
527
|
+
const ctx = v3_1.taskContext.ctx;
|
|
528
|
+
if (!ctx) {
|
|
529
|
+
throw new Error("batchTriggerAndWait can only be used from inside a task.run()");
|
|
530
|
+
}
|
|
571
531
|
const apiClient = v3_1.apiClientManager.clientOrThrow(requestOptions?.clientConfig);
|
|
572
|
-
|
|
573
|
-
|
|
532
|
+
// Check if items is an array or a stream
|
|
533
|
+
if (Array.isArray(items)) {
|
|
534
|
+
// Array path: existing logic
|
|
535
|
+
const ndJsonItems = await Promise.all(items.map(async (item, index) => {
|
|
574
536
|
const taskMetadata = v3_1.resourceCatalog.getTask(item.task.id);
|
|
575
537
|
const parsedPayload = taskMetadata?.fns.parsePayload
|
|
576
538
|
? await taskMetadata?.fns.parsePayload(item.payload)
|
|
@@ -578,9 +540,11 @@ async function batchTriggerTasks(items, options, requestOptions) {
|
|
|
578
540
|
const payloadPacket = await (0, v3_1.stringifyIO)(parsedPayload);
|
|
579
541
|
const batchItemIdempotencyKey = await (0, v3_1.makeIdempotencyKey)((0, v3_1.flattenIdempotencyKey)([options?.idempotencyKey, `${index}`]));
|
|
580
542
|
return {
|
|
543
|
+
index,
|
|
581
544
|
task: item.task.id,
|
|
582
545
|
payload: payloadPacket.data,
|
|
583
546
|
options: {
|
|
547
|
+
lockToVersion: v3_1.taskContext.worker?.version,
|
|
584
548
|
queue: item.options?.queue ? { name: item.options.queue } : undefined,
|
|
585
549
|
concurrencyKey: item.options?.concurrencyKey,
|
|
586
550
|
test: v3_1.taskContext.ctx?.run.isTest,
|
|
@@ -596,176 +560,512 @@ async function batchTriggerTasks(items, options, requestOptions) {
|
|
|
596
560
|
machine: item.options?.machine,
|
|
597
561
|
priority: item.options?.priority,
|
|
598
562
|
region: item.options?.region,
|
|
599
|
-
|
|
563
|
+
debounce: item.options?.debounce,
|
|
600
564
|
},
|
|
601
565
|
};
|
|
602
|
-
}))
|
|
603
|
-
|
|
604
|
-
|
|
605
|
-
|
|
606
|
-
|
|
607
|
-
|
|
608
|
-
|
|
609
|
-
|
|
610
|
-
|
|
611
|
-
|
|
612
|
-
|
|
613
|
-
|
|
614
|
-
|
|
615
|
-
|
|
616
|
-
|
|
617
|
-
|
|
618
|
-
|
|
619
|
-
|
|
620
|
-
|
|
621
|
-
|
|
622
|
-
|
|
623
|
-
|
|
624
|
-
|
|
625
|
-
|
|
626
|
-
|
|
627
|
-
|
|
628
|
-
|
|
566
|
+
}));
|
|
567
|
+
return await tracer_js_1.tracer.startActiveSpan("batch.triggerByTaskAndWait()", async (span) => {
|
|
568
|
+
// Execute 2-phase batch
|
|
569
|
+
const response = await executeBatchTwoPhase(apiClient, ndJsonItems, {
|
|
570
|
+
parentRunId: ctx.run.id,
|
|
571
|
+
resumeParentOnCompletion: true,
|
|
572
|
+
idempotencyKey: await (0, v3_1.makeIdempotencyKey)(options?.idempotencyKey),
|
|
573
|
+
spanParentAsLink: false, // Waiting: child runs share parent's trace ID
|
|
574
|
+
}, requestOptions);
|
|
575
|
+
span.setAttribute("batchId", response.id);
|
|
576
|
+
span.setAttribute("runCount", response.runCount);
|
|
577
|
+
const result = await v3_1.runtime.waitForBatch({
|
|
578
|
+
id: response.id,
|
|
579
|
+
runCount: response.runCount,
|
|
580
|
+
ctx,
|
|
581
|
+
});
|
|
582
|
+
const runs = await handleBatchTaskRunExecutionResultV2(result.items);
|
|
583
|
+
return {
|
|
584
|
+
id: result.id,
|
|
585
|
+
runs,
|
|
586
|
+
};
|
|
587
|
+
}, {
|
|
588
|
+
kind: api_1.SpanKind.PRODUCER,
|
|
589
|
+
attributes: {
|
|
590
|
+
[v3_1.SemanticInternalAttributes.STYLE_ICON]: "trigger",
|
|
591
|
+
},
|
|
592
|
+
});
|
|
593
|
+
}
|
|
594
|
+
else {
|
|
595
|
+
// Stream path: convert to AsyncIterable and transform
|
|
596
|
+
const streamItems = items;
|
|
597
|
+
const asyncItems = normalizeToAsyncIterable(streamItems);
|
|
598
|
+
const transformedItems = transformBatchByTaskItemsStreamForWait(asyncItems, options);
|
|
599
|
+
return await tracer_js_1.tracer.startActiveSpan("batch.triggerByTaskAndWait()", async (span) => {
|
|
600
|
+
// Execute streaming 2-phase batch
|
|
601
|
+
const response = await executeBatchTwoPhaseStreaming(apiClient, transformedItems, {
|
|
602
|
+
parentRunId: ctx.run.id,
|
|
603
|
+
resumeParentOnCompletion: true,
|
|
604
|
+
idempotencyKey: await (0, v3_1.makeIdempotencyKey)(options?.idempotencyKey),
|
|
605
|
+
spanParentAsLink: false, // Waiting: child runs share parent's trace ID
|
|
606
|
+
}, requestOptions);
|
|
607
|
+
span.setAttribute("batchId", response.id);
|
|
608
|
+
span.setAttribute("runCount", response.runCount);
|
|
609
|
+
const result = await v3_1.runtime.waitForBatch({
|
|
610
|
+
id: response.id,
|
|
611
|
+
runCount: response.runCount,
|
|
612
|
+
ctx,
|
|
613
|
+
});
|
|
614
|
+
const runs = await handleBatchTaskRunExecutionResultV2(result.items);
|
|
615
|
+
return {
|
|
616
|
+
id: result.id,
|
|
617
|
+
runs,
|
|
618
|
+
};
|
|
619
|
+
}, {
|
|
620
|
+
kind: api_1.SpanKind.PRODUCER,
|
|
621
|
+
attributes: {
|
|
622
|
+
[v3_1.SemanticInternalAttributes.STYLE_ICON]: "trigger",
|
|
623
|
+
},
|
|
624
|
+
});
|
|
625
|
+
}
|
|
629
626
|
}
|
|
630
627
|
/**
|
|
631
|
-
*
|
|
632
|
-
*
|
|
628
|
+
* Helper function that executes a 2-phase batch trigger:
|
|
629
|
+
* 1. Creates the batch record with expected run count
|
|
630
|
+
* 2. Streams items as NDJSON to the server
|
|
633
631
|
*
|
|
634
|
-
* @
|
|
632
|
+
* @param apiClient - The API client instance
|
|
633
|
+
* @param items - Array of batch items
|
|
634
|
+
* @param options - Batch options including trace context settings
|
|
635
|
+
* @param options.spanParentAsLink - If true, child runs will have separate trace IDs with a link to parent.
|
|
636
|
+
* Use true for batchTrigger (fire-and-forget), false for batchTriggerAndWait.
|
|
637
|
+
* @param requestOptions - Optional request options
|
|
638
|
+
* @internal
|
|
639
|
+
*/
|
|
640
|
+
async function executeBatchTwoPhase(apiClient, items, options, requestOptions) {
|
|
641
|
+
let batch;
|
|
642
|
+
try {
|
|
643
|
+
// Phase 1: Create batch
|
|
644
|
+
batch = await apiClient.createBatch({
|
|
645
|
+
runCount: items.length,
|
|
646
|
+
parentRunId: options.parentRunId,
|
|
647
|
+
resumeParentOnCompletion: options.resumeParentOnCompletion,
|
|
648
|
+
idempotencyKey: options.idempotencyKey,
|
|
649
|
+
}, { spanParentAsLink: options.spanParentAsLink }, requestOptions);
|
|
650
|
+
}
|
|
651
|
+
catch (error) {
|
|
652
|
+
// Wrap with context about which phase failed
|
|
653
|
+
throw new BatchTriggerError(`Failed to create batch with ${items.length} items`, {
|
|
654
|
+
cause: error,
|
|
655
|
+
phase: "create",
|
|
656
|
+
itemCount: items.length,
|
|
657
|
+
});
|
|
658
|
+
}
|
|
659
|
+
// If the batch was cached (idempotent replay), skip streaming items
|
|
660
|
+
if (!batch.isCached) {
|
|
661
|
+
try {
|
|
662
|
+
// Phase 2: Stream items
|
|
663
|
+
await apiClient.streamBatchItems(batch.id, items, requestOptions);
|
|
664
|
+
}
|
|
665
|
+
catch (error) {
|
|
666
|
+
// Wrap with context about which phase failed and include batch ID
|
|
667
|
+
throw new BatchTriggerError(`Failed to stream items for batch ${batch.id} (${items.length} items)`, { cause: error, phase: "stream", batchId: batch.id, itemCount: items.length });
|
|
668
|
+
}
|
|
669
|
+
}
|
|
670
|
+
return {
|
|
671
|
+
id: batch.id,
|
|
672
|
+
runCount: batch.runCount,
|
|
673
|
+
publicAccessToken: batch.publicAccessToken,
|
|
674
|
+
};
|
|
675
|
+
}
|
|
676
|
+
/**
|
|
677
|
+
* Error thrown when batch trigger operations fail.
|
|
678
|
+
* Includes context about which phase failed and the batch details.
|
|
635
679
|
*
|
|
636
|
-
*
|
|
637
|
-
*
|
|
680
|
+
* When the underlying error is a rate limit (429), additional properties are exposed:
|
|
681
|
+
* - `isRateLimited`: true
|
|
682
|
+
* - `retryAfterMs`: milliseconds until the rate limit resets
|
|
683
|
+
*/
|
|
684
|
+
class BatchTriggerError extends Error {
|
|
685
|
+
phase;
|
|
686
|
+
batchId;
|
|
687
|
+
itemCount;
|
|
688
|
+
/** True if the error was caused by rate limiting (HTTP 429) */
|
|
689
|
+
isRateLimited;
|
|
690
|
+
/** Milliseconds until the rate limit resets. Only set when `isRateLimited` is true. */
|
|
691
|
+
retryAfterMs;
|
|
692
|
+
/** The underlying API error, if the cause was an ApiError */
|
|
693
|
+
apiError;
|
|
694
|
+
/** The underlying cause of the error */
|
|
695
|
+
cause;
|
|
696
|
+
constructor(message, options) {
|
|
697
|
+
// Build enhanced message that includes the cause's message
|
|
698
|
+
const fullMessage = buildBatchErrorMessage(message, options.cause);
|
|
699
|
+
super(fullMessage, { cause: options.cause });
|
|
700
|
+
this.name = "BatchTriggerError";
|
|
701
|
+
this.cause = options.cause;
|
|
702
|
+
this.phase = options.phase;
|
|
703
|
+
this.batchId = options.batchId;
|
|
704
|
+
this.itemCount = options.itemCount;
|
|
705
|
+
// Extract rate limit info from cause
|
|
706
|
+
if (options.cause instanceof v3_1.RateLimitError) {
|
|
707
|
+
this.isRateLimited = true;
|
|
708
|
+
this.retryAfterMs = options.cause.millisecondsUntilReset;
|
|
709
|
+
this.apiError = options.cause;
|
|
710
|
+
}
|
|
711
|
+
else if (options.cause instanceof v3_1.ApiError) {
|
|
712
|
+
this.isRateLimited = options.cause.status === 429;
|
|
713
|
+
this.apiError = options.cause;
|
|
714
|
+
}
|
|
715
|
+
else {
|
|
716
|
+
this.isRateLimited = false;
|
|
717
|
+
}
|
|
718
|
+
}
|
|
719
|
+
}
|
|
720
|
+
exports.BatchTriggerError = BatchTriggerError;
|
|
721
|
+
/**
|
|
722
|
+
* Build an enhanced error message that includes context from the cause.
|
|
723
|
+
*/
|
|
724
|
+
function buildBatchErrorMessage(baseMessage, cause) {
|
|
725
|
+
if (!cause) {
|
|
726
|
+
return baseMessage;
|
|
727
|
+
}
|
|
728
|
+
// Handle RateLimitError specifically for better messaging
|
|
729
|
+
if (cause instanceof v3_1.RateLimitError) {
|
|
730
|
+
const retryMs = cause.millisecondsUntilReset;
|
|
731
|
+
if (retryMs !== undefined) {
|
|
732
|
+
const retrySeconds = Math.ceil(retryMs / 1000);
|
|
733
|
+
return `${baseMessage}: Rate limit exceeded - retry after ${retrySeconds}s`;
|
|
734
|
+
}
|
|
735
|
+
return `${baseMessage}: Rate limit exceeded`;
|
|
736
|
+
}
|
|
737
|
+
// Handle other ApiErrors
|
|
738
|
+
if (cause instanceof v3_1.ApiError) {
|
|
739
|
+
return `${baseMessage}: ${cause.message}`;
|
|
740
|
+
}
|
|
741
|
+
// Handle generic errors
|
|
742
|
+
if (cause instanceof Error) {
|
|
743
|
+
return `${baseMessage}: ${cause.message}`;
|
|
744
|
+
}
|
|
745
|
+
return baseMessage;
|
|
746
|
+
}
|
|
747
|
+
/**
|
|
748
|
+
* Execute a streaming 2-phase batch trigger where items are streamed from an AsyncIterable.
|
|
749
|
+
* Unlike executeBatchTwoPhase, this doesn't know the count upfront.
|
|
638
750
|
*
|
|
639
|
-
* @
|
|
640
|
-
*
|
|
751
|
+
* @param apiClient - The API client instance
|
|
752
|
+
* @param items - AsyncIterable of batch items
|
|
753
|
+
* @param options - Batch options including trace context settings
|
|
754
|
+
* @param options.spanParentAsLink - If true, child runs will have separate trace IDs with a link to parent.
|
|
755
|
+
* Use true for batchTrigger (fire-and-forget), false for batchTriggerAndWait.
|
|
756
|
+
* @param requestOptions - Optional request options
|
|
757
|
+
* @internal
|
|
758
|
+
*/
|
|
759
|
+
async function executeBatchTwoPhaseStreaming(apiClient, items, options, requestOptions) {
|
|
760
|
+
// For streaming, we need to buffer items to get the count first
|
|
761
|
+
// This is because createBatch requires runCount upfront
|
|
762
|
+
// In the future, we could add a streaming-first endpoint that doesn't require this
|
|
763
|
+
const itemsArray = [];
|
|
764
|
+
for await (const item of items) {
|
|
765
|
+
itemsArray.push(item);
|
|
766
|
+
}
|
|
767
|
+
// Now we can use the regular 2-phase approach
|
|
768
|
+
return executeBatchTwoPhase(apiClient, itemsArray, options, requestOptions);
|
|
769
|
+
}
|
|
770
|
+
// ============================================================================
|
|
771
|
+
// Streaming Helpers
|
|
772
|
+
// ============================================================================
|
|
773
|
+
/**
|
|
774
|
+
* Type guard to check if a value is an AsyncIterable
|
|
775
|
+
*/
|
|
776
|
+
function isAsyncIterable(value) {
|
|
777
|
+
return (value != null &&
|
|
778
|
+
typeof value === "object" &&
|
|
779
|
+
Symbol.asyncIterator in value &&
|
|
780
|
+
typeof value[Symbol.asyncIterator] === "function");
|
|
781
|
+
}
|
|
782
|
+
/**
|
|
783
|
+
* Type guard to check if a value is a ReadableStream
|
|
784
|
+
*/
|
|
785
|
+
function isReadableStream(value) {
|
|
786
|
+
return (value != null &&
|
|
787
|
+
typeof value === "object" &&
|
|
788
|
+
"getReader" in value &&
|
|
789
|
+
typeof value.getReader === "function");
|
|
790
|
+
}
|
|
791
|
+
/**
|
|
792
|
+
* Convert a ReadableStream to an AsyncIterable.
|
|
793
|
+
* Properly cancels the stream when the consumer terminates early.
|
|
641
794
|
*
|
|
642
|
-
* @
|
|
643
|
-
|
|
795
|
+
* @internal Exported for testing purposes
|
|
796
|
+
*/
|
|
797
|
+
async function* readableStreamToAsyncIterable(stream) {
|
|
798
|
+
const reader = stream.getReader();
|
|
799
|
+
try {
|
|
800
|
+
while (true) {
|
|
801
|
+
const { done, value } = await reader.read();
|
|
802
|
+
if (done)
|
|
803
|
+
break;
|
|
804
|
+
yield value;
|
|
805
|
+
}
|
|
806
|
+
}
|
|
807
|
+
finally {
|
|
808
|
+
try {
|
|
809
|
+
await reader.cancel();
|
|
810
|
+
}
|
|
811
|
+
catch {
|
|
812
|
+
// Ignore errors - stream might already be errored or closed
|
|
813
|
+
}
|
|
814
|
+
reader.releaseLock();
|
|
815
|
+
}
|
|
816
|
+
}
|
|
817
|
+
/**
|
|
818
|
+
* Normalize stream input to AsyncIterable
|
|
819
|
+
*/
|
|
820
|
+
function normalizeToAsyncIterable(input) {
|
|
821
|
+
if (isReadableStream(input)) {
|
|
822
|
+
return readableStreamToAsyncIterable(input);
|
|
823
|
+
}
|
|
824
|
+
return input;
|
|
825
|
+
}
|
|
826
|
+
/**
|
|
827
|
+
* Transform a stream of BatchByIdItem to BatchItemNDJSON format.
|
|
828
|
+
* Handles payload serialization and idempotency key generation.
|
|
644
829
|
*
|
|
645
|
-
* @
|
|
646
|
-
|
|
647
|
-
*
|
|
830
|
+
* @internal
|
|
831
|
+
*/
|
|
832
|
+
async function* transformBatchItemsStream(items, options) {
|
|
833
|
+
let index = 0;
|
|
834
|
+
for await (const item of items) {
|
|
835
|
+
const taskMetadata = v3_1.resourceCatalog.getTask(item.id);
|
|
836
|
+
const parsedPayload = taskMetadata?.fns.parsePayload
|
|
837
|
+
? await taskMetadata?.fns.parsePayload(item.payload)
|
|
838
|
+
: item.payload;
|
|
839
|
+
const payloadPacket = await (0, v3_1.stringifyIO)(parsedPayload);
|
|
840
|
+
const batchItemIdempotencyKey = await (0, v3_1.makeIdempotencyKey)((0, v3_1.flattenIdempotencyKey)([options?.idempotencyKey, `${index}`]));
|
|
841
|
+
yield {
|
|
842
|
+
index: index++,
|
|
843
|
+
task: item.id,
|
|
844
|
+
payload: payloadPacket.data,
|
|
845
|
+
options: {
|
|
846
|
+
queue: item.options?.queue ? { name: item.options.queue } : undefined,
|
|
847
|
+
concurrencyKey: item.options?.concurrencyKey,
|
|
848
|
+
test: v3_1.taskContext.ctx?.run.isTest,
|
|
849
|
+
payloadType: payloadPacket.dataType,
|
|
850
|
+
delay: item.options?.delay,
|
|
851
|
+
ttl: item.options?.ttl,
|
|
852
|
+
tags: item.options?.tags,
|
|
853
|
+
maxAttempts: item.options?.maxAttempts,
|
|
854
|
+
metadata: item.options?.metadata,
|
|
855
|
+
maxDuration: item.options?.maxDuration,
|
|
856
|
+
idempotencyKey: (await (0, v3_1.makeIdempotencyKey)(item.options?.idempotencyKey)) ?? batchItemIdempotencyKey,
|
|
857
|
+
idempotencyKeyTTL: item.options?.idempotencyKeyTTL ?? options?.idempotencyKeyTTL,
|
|
858
|
+
machine: item.options?.machine,
|
|
859
|
+
priority: item.options?.priority,
|
|
860
|
+
region: item.options?.region,
|
|
861
|
+
lockToVersion: item.options?.version ?? (0, v3_1.getEnvVar)("TRIGGER_VERSION"),
|
|
862
|
+
debounce: item.options?.debounce,
|
|
863
|
+
},
|
|
864
|
+
};
|
|
865
|
+
}
|
|
866
|
+
}
|
|
867
|
+
/**
|
|
868
|
+
* Transform a stream of BatchByIdAndWaitItem to BatchItemNDJSON format for triggerAndWait.
|
|
869
|
+
* Uses the current worker version for lockToVersion.
|
|
648
870
|
*
|
|
649
|
-
*
|
|
650
|
-
|
|
651
|
-
*
|
|
652
|
-
|
|
653
|
-
|
|
654
|
-
|
|
655
|
-
|
|
656
|
-
|
|
657
|
-
|
|
658
|
-
|
|
659
|
-
|
|
660
|
-
|
|
661
|
-
|
|
662
|
-
|
|
663
|
-
|
|
664
|
-
|
|
665
|
-
|
|
666
|
-
|
|
871
|
+
* @internal
|
|
872
|
+
*/
|
|
873
|
+
async function* transformBatchItemsStreamForWait(items, options) {
|
|
874
|
+
let index = 0;
|
|
875
|
+
for await (const item of items) {
|
|
876
|
+
const taskMetadata = v3_1.resourceCatalog.getTask(item.id);
|
|
877
|
+
const parsedPayload = taskMetadata?.fns.parsePayload
|
|
878
|
+
? await taskMetadata?.fns.parsePayload(item.payload)
|
|
879
|
+
: item.payload;
|
|
880
|
+
const payloadPacket = await (0, v3_1.stringifyIO)(parsedPayload);
|
|
881
|
+
const batchItemIdempotencyKey = await (0, v3_1.makeIdempotencyKey)((0, v3_1.flattenIdempotencyKey)([options?.idempotencyKey, `${index}`]));
|
|
882
|
+
yield {
|
|
883
|
+
index: index++,
|
|
884
|
+
task: item.id,
|
|
885
|
+
payload: payloadPacket.data,
|
|
886
|
+
options: {
|
|
887
|
+
lockToVersion: v3_1.taskContext.worker?.version,
|
|
888
|
+
queue: item.options?.queue ? { name: item.options.queue } : undefined,
|
|
889
|
+
concurrencyKey: item.options?.concurrencyKey,
|
|
890
|
+
test: v3_1.taskContext.ctx?.run.isTest,
|
|
891
|
+
payloadType: payloadPacket.dataType,
|
|
892
|
+
delay: item.options?.delay,
|
|
893
|
+
ttl: item.options?.ttl,
|
|
894
|
+
tags: item.options?.tags,
|
|
895
|
+
maxAttempts: item.options?.maxAttempts,
|
|
896
|
+
metadata: item.options?.metadata,
|
|
897
|
+
maxDuration: item.options?.maxDuration,
|
|
898
|
+
idempotencyKey: (await (0, v3_1.makeIdempotencyKey)(item.options?.idempotencyKey)) ?? batchItemIdempotencyKey,
|
|
899
|
+
idempotencyKeyTTL: item.options?.idempotencyKeyTTL ?? options?.idempotencyKeyTTL,
|
|
900
|
+
machine: item.options?.machine,
|
|
901
|
+
priority: item.options?.priority,
|
|
902
|
+
region: item.options?.region,
|
|
903
|
+
debounce: item.options?.debounce,
|
|
904
|
+
},
|
|
905
|
+
};
|
|
906
|
+
}
|
|
907
|
+
}
|
|
908
|
+
/**
|
|
909
|
+
* Transform a stream of BatchByTaskItem to BatchItemNDJSON format.
|
|
667
910
|
*
|
|
668
|
-
*
|
|
669
|
-
|
|
670
|
-
*
|
|
671
|
-
|
|
672
|
-
|
|
673
|
-
|
|
674
|
-
|
|
675
|
-
|
|
676
|
-
|
|
677
|
-
|
|
678
|
-
|
|
679
|
-
|
|
680
|
-
|
|
681
|
-
|
|
682
|
-
|
|
683
|
-
|
|
684
|
-
|
|
685
|
-
|
|
911
|
+
* @internal
|
|
912
|
+
*/
|
|
913
|
+
async function* transformBatchByTaskItemsStream(items, options) {
|
|
914
|
+
let index = 0;
|
|
915
|
+
for await (const item of items) {
|
|
916
|
+
const taskMetadata = v3_1.resourceCatalog.getTask(item.task.id);
|
|
917
|
+
const parsedPayload = taskMetadata?.fns.parsePayload
|
|
918
|
+
? await taskMetadata?.fns.parsePayload(item.payload)
|
|
919
|
+
: item.payload;
|
|
920
|
+
const payloadPacket = await (0, v3_1.stringifyIO)(parsedPayload);
|
|
921
|
+
const batchItemIdempotencyKey = await (0, v3_1.makeIdempotencyKey)((0, v3_1.flattenIdempotencyKey)([options?.idempotencyKey, `${index}`]));
|
|
922
|
+
yield {
|
|
923
|
+
index: index++,
|
|
924
|
+
task: item.task.id,
|
|
925
|
+
payload: payloadPacket.data,
|
|
926
|
+
options: {
|
|
927
|
+
queue: item.options?.queue ? { name: item.options.queue } : undefined,
|
|
928
|
+
concurrencyKey: item.options?.concurrencyKey,
|
|
929
|
+
test: v3_1.taskContext.ctx?.run.isTest,
|
|
930
|
+
payloadType: payloadPacket.dataType,
|
|
931
|
+
delay: item.options?.delay,
|
|
932
|
+
ttl: item.options?.ttl,
|
|
933
|
+
tags: item.options?.tags,
|
|
934
|
+
maxAttempts: item.options?.maxAttempts,
|
|
935
|
+
metadata: item.options?.metadata,
|
|
936
|
+
maxDuration: item.options?.maxDuration,
|
|
937
|
+
idempotencyKey: (await (0, v3_1.makeIdempotencyKey)(item.options?.idempotencyKey)) ?? batchItemIdempotencyKey,
|
|
938
|
+
idempotencyKeyTTL: item.options?.idempotencyKeyTTL ?? options?.idempotencyKeyTTL,
|
|
939
|
+
machine: item.options?.machine,
|
|
940
|
+
priority: item.options?.priority,
|
|
941
|
+
region: item.options?.region,
|
|
942
|
+
lockToVersion: item.options?.version ?? (0, v3_1.getEnvVar)("TRIGGER_VERSION"),
|
|
943
|
+
debounce: item.options?.debounce,
|
|
944
|
+
},
|
|
945
|
+
};
|
|
946
|
+
}
|
|
947
|
+
}
|
|
948
|
+
/**
|
|
949
|
+
* Transform a stream of BatchByTaskAndWaitItem to BatchItemNDJSON format for triggerAndWait.
|
|
686
950
|
*
|
|
687
|
-
* @
|
|
688
|
-
|
|
689
|
-
*
|
|
690
|
-
|
|
691
|
-
|
|
692
|
-
|
|
693
|
-
|
|
694
|
-
|
|
695
|
-
|
|
696
|
-
|
|
697
|
-
|
|
698
|
-
|
|
699
|
-
|
|
951
|
+
* @internal
|
|
952
|
+
*/
|
|
953
|
+
async function* transformBatchByTaskItemsStreamForWait(items, options) {
|
|
954
|
+
let index = 0;
|
|
955
|
+
for await (const item of items) {
|
|
956
|
+
const taskMetadata = v3_1.resourceCatalog.getTask(item.task.id);
|
|
957
|
+
const parsedPayload = taskMetadata?.fns.parsePayload
|
|
958
|
+
? await taskMetadata?.fns.parsePayload(item.payload)
|
|
959
|
+
: item.payload;
|
|
960
|
+
const payloadPacket = await (0, v3_1.stringifyIO)(parsedPayload);
|
|
961
|
+
const batchItemIdempotencyKey = await (0, v3_1.makeIdempotencyKey)((0, v3_1.flattenIdempotencyKey)([options?.idempotencyKey, `${index}`]));
|
|
962
|
+
yield {
|
|
963
|
+
index: index++,
|
|
964
|
+
task: item.task.id,
|
|
965
|
+
payload: payloadPacket.data,
|
|
966
|
+
options: {
|
|
967
|
+
lockToVersion: v3_1.taskContext.worker?.version,
|
|
968
|
+
queue: item.options?.queue ? { name: item.options.queue } : undefined,
|
|
969
|
+
concurrencyKey: item.options?.concurrencyKey,
|
|
970
|
+
test: v3_1.taskContext.ctx?.run.isTest,
|
|
971
|
+
payloadType: payloadPacket.dataType,
|
|
972
|
+
delay: item.options?.delay,
|
|
973
|
+
ttl: item.options?.ttl,
|
|
974
|
+
tags: item.options?.tags,
|
|
975
|
+
maxAttempts: item.options?.maxAttempts,
|
|
976
|
+
metadata: item.options?.metadata,
|
|
977
|
+
maxDuration: item.options?.maxDuration,
|
|
978
|
+
idempotencyKey: (await (0, v3_1.makeIdempotencyKey)(item.options?.idempotencyKey)) ?? batchItemIdempotencyKey,
|
|
979
|
+
idempotencyKeyTTL: item.options?.idempotencyKeyTTL ?? options?.idempotencyKeyTTL,
|
|
980
|
+
machine: item.options?.machine,
|
|
981
|
+
priority: item.options?.priority,
|
|
982
|
+
region: item.options?.region,
|
|
983
|
+
debounce: item.options?.debounce,
|
|
984
|
+
},
|
|
985
|
+
};
|
|
986
|
+
}
|
|
987
|
+
}
|
|
988
|
+
/**
|
|
989
|
+
* Transform a stream of BatchItem (single task type) to BatchItemNDJSON format.
|
|
700
990
|
*
|
|
701
|
-
*
|
|
702
|
-
* - Task IDs
|
|
703
|
-
* - Payload types
|
|
704
|
-
* - Return value types
|
|
705
|
-
* - Error handling
|
|
991
|
+
* @internal
|
|
706
992
|
*/
|
|
707
|
-
async function
|
|
708
|
-
|
|
709
|
-
|
|
710
|
-
|
|
993
|
+
async function* transformSingleTaskBatchItemsStream(taskIdentifier, items, parsePayload, options, queue) {
|
|
994
|
+
let index = 0;
|
|
995
|
+
for await (const item of items) {
|
|
996
|
+
const parsedPayload = parsePayload ? await parsePayload(item.payload) : item.payload;
|
|
997
|
+
const payloadPacket = await (0, v3_1.stringifyIO)(parsedPayload);
|
|
998
|
+
const batchItemIdempotencyKey = await (0, v3_1.makeIdempotencyKey)((0, v3_1.flattenIdempotencyKey)([options?.idempotencyKey, `${index}`]));
|
|
999
|
+
yield {
|
|
1000
|
+
index: index++,
|
|
1001
|
+
task: taskIdentifier,
|
|
1002
|
+
payload: payloadPacket.data,
|
|
1003
|
+
options: {
|
|
1004
|
+
queue: item.options?.queue
|
|
1005
|
+
? { name: item.options.queue }
|
|
1006
|
+
: queue
|
|
1007
|
+
? { name: queue }
|
|
1008
|
+
: undefined,
|
|
1009
|
+
concurrencyKey: item.options?.concurrencyKey,
|
|
1010
|
+
test: v3_1.taskContext.ctx?.run.isTest,
|
|
1011
|
+
payloadType: payloadPacket.dataType,
|
|
1012
|
+
delay: item.options?.delay,
|
|
1013
|
+
ttl: item.options?.ttl,
|
|
1014
|
+
tags: item.options?.tags,
|
|
1015
|
+
maxAttempts: item.options?.maxAttempts,
|
|
1016
|
+
metadata: item.options?.metadata,
|
|
1017
|
+
maxDuration: item.options?.maxDuration,
|
|
1018
|
+
idempotencyKey: (await (0, v3_1.makeIdempotencyKey)(item.options?.idempotencyKey)) ?? batchItemIdempotencyKey,
|
|
1019
|
+
idempotencyKeyTTL: item.options?.idempotencyKeyTTL ?? options?.idempotencyKeyTTL,
|
|
1020
|
+
machine: item.options?.machine,
|
|
1021
|
+
priority: item.options?.priority,
|
|
1022
|
+
region: item.options?.region,
|
|
1023
|
+
lockToVersion: item.options?.version ?? (0, v3_1.getEnvVar)("TRIGGER_VERSION"),
|
|
1024
|
+
debounce: item.options?.debounce,
|
|
1025
|
+
},
|
|
1026
|
+
};
|
|
711
1027
|
}
|
|
712
|
-
|
|
713
|
-
|
|
714
|
-
|
|
715
|
-
|
|
716
|
-
|
|
717
|
-
|
|
718
|
-
|
|
719
|
-
|
|
720
|
-
|
|
721
|
-
|
|
722
|
-
|
|
723
|
-
|
|
724
|
-
|
|
725
|
-
|
|
726
|
-
|
|
727
|
-
|
|
728
|
-
|
|
729
|
-
|
|
730
|
-
|
|
731
|
-
|
|
732
|
-
|
|
733
|
-
|
|
734
|
-
|
|
735
|
-
|
|
736
|
-
|
|
737
|
-
|
|
738
|
-
|
|
739
|
-
|
|
740
|
-
|
|
741
|
-
|
|
742
|
-
|
|
743
|
-
|
|
744
|
-
|
|
745
|
-
|
|
746
|
-
|
|
747
|
-
|
|
748
|
-
|
|
749
|
-
|
|
750
|
-
|
|
751
|
-
span.setAttribute("batchId", response.id);
|
|
752
|
-
span.setAttribute("runCount", response.runCount);
|
|
753
|
-
const result = await v3_1.runtime.waitForBatch({
|
|
754
|
-
id: response.id,
|
|
755
|
-
runCount: response.runCount,
|
|
756
|
-
ctx,
|
|
757
|
-
});
|
|
758
|
-
const runs = await handleBatchTaskRunExecutionResultV2(result.items);
|
|
759
|
-
return {
|
|
760
|
-
id: result.id,
|
|
761
|
-
runs,
|
|
1028
|
+
}
|
|
1029
|
+
/**
|
|
1030
|
+
* Transform a stream of BatchTriggerAndWaitItem (single task type) to BatchItemNDJSON format.
|
|
1031
|
+
*
|
|
1032
|
+
* @internal
|
|
1033
|
+
*/
|
|
1034
|
+
async function* transformSingleTaskBatchItemsStreamForWait(taskIdentifier, items, parsePayload, options, queue) {
|
|
1035
|
+
let index = 0;
|
|
1036
|
+
for await (const item of items) {
|
|
1037
|
+
const parsedPayload = parsePayload ? await parsePayload(item.payload) : item.payload;
|
|
1038
|
+
const payloadPacket = await (0, v3_1.stringifyIO)(parsedPayload);
|
|
1039
|
+
const batchItemIdempotencyKey = await (0, v3_1.makeIdempotencyKey)((0, v3_1.flattenIdempotencyKey)([options?.idempotencyKey, `${index}`]));
|
|
1040
|
+
yield {
|
|
1041
|
+
index: index++,
|
|
1042
|
+
task: taskIdentifier,
|
|
1043
|
+
payload: payloadPacket.data,
|
|
1044
|
+
options: {
|
|
1045
|
+
lockToVersion: v3_1.taskContext.worker?.version,
|
|
1046
|
+
queue: item.options?.queue
|
|
1047
|
+
? { name: item.options.queue }
|
|
1048
|
+
: queue
|
|
1049
|
+
? { name: queue }
|
|
1050
|
+
: undefined,
|
|
1051
|
+
concurrencyKey: item.options?.concurrencyKey,
|
|
1052
|
+
test: v3_1.taskContext.ctx?.run.isTest,
|
|
1053
|
+
payloadType: payloadPacket.dataType,
|
|
1054
|
+
delay: item.options?.delay,
|
|
1055
|
+
ttl: item.options?.ttl,
|
|
1056
|
+
tags: item.options?.tags,
|
|
1057
|
+
maxAttempts: item.options?.maxAttempts,
|
|
1058
|
+
metadata: item.options?.metadata,
|
|
1059
|
+
maxDuration: item.options?.maxDuration,
|
|
1060
|
+
idempotencyKey: (await (0, v3_1.makeIdempotencyKey)(item.options?.idempotencyKey)) ?? batchItemIdempotencyKey,
|
|
1061
|
+
idempotencyKeyTTL: item.options?.idempotencyKeyTTL ?? options?.idempotencyKeyTTL,
|
|
1062
|
+
machine: item.options?.machine,
|
|
1063
|
+
priority: item.options?.priority,
|
|
1064
|
+
region: item.options?.region,
|
|
1065
|
+
debounce: item.options?.debounce,
|
|
1066
|
+
},
|
|
762
1067
|
};
|
|
763
|
-
}
|
|
764
|
-
kind: api_1.SpanKind.PRODUCER,
|
|
765
|
-
attributes: {
|
|
766
|
-
[v3_1.SemanticInternalAttributes.STYLE_ICON]: "trigger",
|
|
767
|
-
},
|
|
768
|
-
});
|
|
1068
|
+
}
|
|
769
1069
|
}
|
|
770
1070
|
async function trigger_internal(name, id, payload, parsePayload, options, requestOptions) {
|
|
771
1071
|
const apiClient = v3_1.apiClientManager.clientOrThrow(requestOptions?.clientConfig);
|
|
@@ -791,6 +1091,7 @@ async function trigger_internal(name, id, payload, parsePayload, options, reques
|
|
|
791
1091
|
priority: options?.priority,
|
|
792
1092
|
region: options?.region,
|
|
793
1093
|
lockToVersion: options?.version ?? (0, v3_1.getEnvVar)("TRIGGER_VERSION"),
|
|
1094
|
+
debounce: options?.debounce,
|
|
794
1095
|
},
|
|
795
1096
|
}, {
|
|
796
1097
|
spanParentAsLink: true,
|
|
@@ -812,12 +1113,15 @@ async function trigger_internal(name, id, payload, parsePayload, options, reques
|
|
|
812
1113
|
async function batchTrigger_internal(name, taskIdentifier, items, options, parsePayload, requestOptions, queue) {
|
|
813
1114
|
const apiClient = v3_1.apiClientManager.clientOrThrow(requestOptions?.clientConfig);
|
|
814
1115
|
const ctx = v3_1.taskContext.ctx;
|
|
815
|
-
|
|
816
|
-
|
|
1116
|
+
// Check if items is an array or a stream
|
|
1117
|
+
if (Array.isArray(items)) {
|
|
1118
|
+
// Prepare items as BatchItemNDJSON
|
|
1119
|
+
const ndJsonItems = await Promise.all(items.map(async (item, index) => {
|
|
817
1120
|
const parsedPayload = parsePayload ? await parsePayload(item.payload) : item.payload;
|
|
818
1121
|
const payloadPacket = await (0, v3_1.stringifyIO)(parsedPayload);
|
|
819
1122
|
const batchItemIdempotencyKey = await (0, v3_1.makeIdempotencyKey)((0, v3_1.flattenIdempotencyKey)([options?.idempotencyKey, `${index}`]));
|
|
820
1123
|
return {
|
|
1124
|
+
index,
|
|
821
1125
|
task: taskIdentifier,
|
|
822
1126
|
payload: payloadPacket.data,
|
|
823
1127
|
options: {
|
|
@@ -843,33 +1147,75 @@ async function batchTrigger_internal(name, taskIdentifier, items, options, parse
|
|
|
843
1147
|
lockToVersion: item.options?.version ?? (0, v3_1.getEnvVar)("TRIGGER_VERSION"),
|
|
844
1148
|
},
|
|
845
1149
|
};
|
|
846
|
-
}))
|
|
847
|
-
|
|
848
|
-
|
|
849
|
-
|
|
850
|
-
|
|
851
|
-
|
|
852
|
-
|
|
853
|
-
|
|
854
|
-
|
|
855
|
-
|
|
856
|
-
|
|
857
|
-
|
|
858
|
-
|
|
859
|
-
|
|
860
|
-
|
|
861
|
-
|
|
862
|
-
|
|
863
|
-
|
|
864
|
-
|
|
865
|
-
|
|
866
|
-
|
|
867
|
-
|
|
868
|
-
|
|
869
|
-
|
|
870
|
-
|
|
871
|
-
|
|
872
|
-
|
|
1150
|
+
}));
|
|
1151
|
+
// Execute 2-phase batch
|
|
1152
|
+
const response = await tracer_js_1.tracer.startActiveSpan(name, async (span) => {
|
|
1153
|
+
const result = await executeBatchTwoPhase(apiClient, ndJsonItems, {
|
|
1154
|
+
parentRunId: ctx?.run.id,
|
|
1155
|
+
idempotencyKey: await (0, v3_1.makeIdempotencyKey)(options?.idempotencyKey),
|
|
1156
|
+
spanParentAsLink: true, // Fire-and-forget: child runs get separate trace IDs
|
|
1157
|
+
}, requestOptions);
|
|
1158
|
+
span.setAttribute("batchId", result.id);
|
|
1159
|
+
span.setAttribute("runCount", result.runCount);
|
|
1160
|
+
return result;
|
|
1161
|
+
}, {
|
|
1162
|
+
kind: api_1.SpanKind.PRODUCER,
|
|
1163
|
+
attributes: {
|
|
1164
|
+
[v3_1.SemanticInternalAttributes.STYLE_ICON]: "trigger",
|
|
1165
|
+
...(0, v3_1.accessoryAttributes)({
|
|
1166
|
+
items: [
|
|
1167
|
+
{
|
|
1168
|
+
text: taskIdentifier,
|
|
1169
|
+
variant: "normal",
|
|
1170
|
+
},
|
|
1171
|
+
],
|
|
1172
|
+
style: "codepath",
|
|
1173
|
+
}),
|
|
1174
|
+
},
|
|
1175
|
+
});
|
|
1176
|
+
const handle = {
|
|
1177
|
+
batchId: response.id,
|
|
1178
|
+
runCount: response.runCount,
|
|
1179
|
+
publicAccessToken: response.publicAccessToken,
|
|
1180
|
+
};
|
|
1181
|
+
return handle;
|
|
1182
|
+
}
|
|
1183
|
+
else {
|
|
1184
|
+
// Stream path: convert to AsyncIterable and transform
|
|
1185
|
+
const asyncItems = normalizeToAsyncIterable(items);
|
|
1186
|
+
const transformedItems = transformSingleTaskBatchItemsStream(taskIdentifier, asyncItems, parsePayload, options, queue);
|
|
1187
|
+
// Execute streaming 2-phase batch
|
|
1188
|
+
const response = await tracer_js_1.tracer.startActiveSpan(name, async (span) => {
|
|
1189
|
+
const result = await executeBatchTwoPhaseStreaming(apiClient, transformedItems, {
|
|
1190
|
+
parentRunId: ctx?.run.id,
|
|
1191
|
+
idempotencyKey: await (0, v3_1.makeIdempotencyKey)(options?.idempotencyKey),
|
|
1192
|
+
spanParentAsLink: true, // Fire-and-forget: child runs get separate trace IDs
|
|
1193
|
+
}, requestOptions);
|
|
1194
|
+
span.setAttribute("batchId", result.id);
|
|
1195
|
+
span.setAttribute("runCount", result.runCount);
|
|
1196
|
+
return result;
|
|
1197
|
+
}, {
|
|
1198
|
+
kind: api_1.SpanKind.PRODUCER,
|
|
1199
|
+
attributes: {
|
|
1200
|
+
[v3_1.SemanticInternalAttributes.STYLE_ICON]: "trigger",
|
|
1201
|
+
...(0, v3_1.accessoryAttributes)({
|
|
1202
|
+
items: [
|
|
1203
|
+
{
|
|
1204
|
+
text: taskIdentifier,
|
|
1205
|
+
variant: "normal",
|
|
1206
|
+
},
|
|
1207
|
+
],
|
|
1208
|
+
style: "codepath",
|
|
1209
|
+
}),
|
|
1210
|
+
},
|
|
1211
|
+
});
|
|
1212
|
+
const handle = {
|
|
1213
|
+
batchId: response.id,
|
|
1214
|
+
runCount: response.runCount,
|
|
1215
|
+
publicAccessToken: response.publicAccessToken,
|
|
1216
|
+
};
|
|
1217
|
+
return handle;
|
|
1218
|
+
}
|
|
873
1219
|
}
|
|
874
1220
|
async function triggerAndWait_internal(name, id, payload, parsePayload, options, requestOptions) {
|
|
875
1221
|
const ctx = v3_1.taskContext.ctx;
|
|
@@ -901,6 +1247,7 @@ async function triggerAndWait_internal(name, id, payload, parsePayload, options,
|
|
|
901
1247
|
machine: options?.machine,
|
|
902
1248
|
priority: options?.priority,
|
|
903
1249
|
region: options?.region,
|
|
1250
|
+
debounce: options?.debounce,
|
|
904
1251
|
},
|
|
905
1252
|
}, {}, requestOptions);
|
|
906
1253
|
span.setAttribute("runId", response.id);
|
|
@@ -931,72 +1278,117 @@ async function batchTriggerAndWait_internal(name, id, items, parsePayload, optio
|
|
|
931
1278
|
throw new Error("batchTriggerAndWait can only be used from inside a task.run()");
|
|
932
1279
|
}
|
|
933
1280
|
const apiClient = v3_1.apiClientManager.clientOrThrow(requestOptions?.clientConfig);
|
|
934
|
-
|
|
935
|
-
|
|
936
|
-
|
|
937
|
-
|
|
938
|
-
|
|
939
|
-
|
|
940
|
-
|
|
941
|
-
|
|
942
|
-
|
|
943
|
-
|
|
944
|
-
|
|
945
|
-
|
|
946
|
-
|
|
947
|
-
|
|
948
|
-
|
|
949
|
-
|
|
950
|
-
|
|
951
|
-
|
|
952
|
-
|
|
953
|
-
|
|
954
|
-
|
|
955
|
-
|
|
956
|
-
|
|
957
|
-
|
|
958
|
-
|
|
959
|
-
|
|
960
|
-
|
|
961
|
-
|
|
962
|
-
|
|
963
|
-
|
|
964
|
-
|
|
965
|
-
|
|
966
|
-
}
|
|
967
|
-
}
|
|
968
|
-
|
|
969
|
-
|
|
1281
|
+
// Check if items is an array or a stream
|
|
1282
|
+
if (Array.isArray(items)) {
|
|
1283
|
+
// Prepare items as BatchItemNDJSON
|
|
1284
|
+
const ndJsonItems = await Promise.all(items.map(async (item, index) => {
|
|
1285
|
+
const parsedPayload = parsePayload ? await parsePayload(item.payload) : item.payload;
|
|
1286
|
+
const payloadPacket = await (0, v3_1.stringifyIO)(parsedPayload);
|
|
1287
|
+
const batchItemIdempotencyKey = await (0, v3_1.makeIdempotencyKey)((0, v3_1.flattenIdempotencyKey)([options?.idempotencyKey, `${index}`]));
|
|
1288
|
+
return {
|
|
1289
|
+
index,
|
|
1290
|
+
task: id,
|
|
1291
|
+
payload: payloadPacket.data,
|
|
1292
|
+
options: {
|
|
1293
|
+
lockToVersion: v3_1.taskContext.worker?.version,
|
|
1294
|
+
queue: item.options?.queue
|
|
1295
|
+
? { name: item.options.queue }
|
|
1296
|
+
: queue
|
|
1297
|
+
? { name: queue }
|
|
1298
|
+
: undefined,
|
|
1299
|
+
concurrencyKey: item.options?.concurrencyKey,
|
|
1300
|
+
test: v3_1.taskContext.ctx?.run.isTest,
|
|
1301
|
+
payloadType: payloadPacket.dataType,
|
|
1302
|
+
delay: item.options?.delay,
|
|
1303
|
+
ttl: item.options?.ttl,
|
|
1304
|
+
tags: item.options?.tags,
|
|
1305
|
+
maxAttempts: item.options?.maxAttempts,
|
|
1306
|
+
metadata: item.options?.metadata,
|
|
1307
|
+
maxDuration: item.options?.maxDuration,
|
|
1308
|
+
idempotencyKey: (await (0, v3_1.makeIdempotencyKey)(item.options?.idempotencyKey)) ?? batchItemIdempotencyKey,
|
|
1309
|
+
idempotencyKeyTTL: item.options?.idempotencyKeyTTL ?? options?.idempotencyKeyTTL,
|
|
1310
|
+
machine: item.options?.machine,
|
|
1311
|
+
priority: item.options?.priority,
|
|
1312
|
+
region: item.options?.region,
|
|
1313
|
+
},
|
|
1314
|
+
};
|
|
1315
|
+
}));
|
|
1316
|
+
return await tracer_js_1.tracer.startActiveSpan(name, async (span) => {
|
|
1317
|
+
// Execute 2-phase batch
|
|
1318
|
+
const response = await executeBatchTwoPhase(apiClient, ndJsonItems, {
|
|
1319
|
+
parentRunId: ctx.run.id,
|
|
1320
|
+
resumeParentOnCompletion: true,
|
|
1321
|
+
idempotencyKey: await (0, v3_1.makeIdempotencyKey)(options?.idempotencyKey),
|
|
1322
|
+
spanParentAsLink: false, // Waiting: child runs share parent's trace ID
|
|
1323
|
+
}, requestOptions);
|
|
1324
|
+
span.setAttribute("batchId", response.id);
|
|
1325
|
+
span.setAttribute("runCount", response.runCount);
|
|
1326
|
+
const result = await v3_1.runtime.waitForBatch({
|
|
1327
|
+
id: response.id,
|
|
1328
|
+
runCount: response.runCount,
|
|
1329
|
+
ctx,
|
|
1330
|
+
});
|
|
1331
|
+
const runs = await handleBatchTaskRunExecutionResult(result.items, id);
|
|
1332
|
+
return {
|
|
1333
|
+
id: result.id,
|
|
1334
|
+
runs,
|
|
1335
|
+
};
|
|
970
1336
|
}, {
|
|
971
|
-
|
|
972
|
-
|
|
973
|
-
|
|
974
|
-
|
|
975
|
-
|
|
976
|
-
|
|
977
|
-
|
|
978
|
-
|
|
1337
|
+
kind: api_1.SpanKind.PRODUCER,
|
|
1338
|
+
attributes: {
|
|
1339
|
+
[v3_1.SemanticInternalAttributes.STYLE_ICON]: "trigger",
|
|
1340
|
+
...(0, v3_1.accessoryAttributes)({
|
|
1341
|
+
items: [
|
|
1342
|
+
{
|
|
1343
|
+
text: id,
|
|
1344
|
+
variant: "normal",
|
|
1345
|
+
},
|
|
1346
|
+
],
|
|
1347
|
+
style: "codepath",
|
|
1348
|
+
}),
|
|
1349
|
+
},
|
|
979
1350
|
});
|
|
980
|
-
|
|
981
|
-
|
|
982
|
-
|
|
983
|
-
|
|
984
|
-
|
|
985
|
-
|
|
986
|
-
|
|
987
|
-
|
|
988
|
-
|
|
989
|
-
|
|
990
|
-
|
|
991
|
-
|
|
992
|
-
|
|
993
|
-
|
|
994
|
-
|
|
995
|
-
|
|
996
|
-
|
|
997
|
-
|
|
998
|
-
|
|
999
|
-
|
|
1351
|
+
}
|
|
1352
|
+
else {
|
|
1353
|
+
// Stream path: convert to AsyncIterable and transform
|
|
1354
|
+
const asyncItems = normalizeToAsyncIterable(items);
|
|
1355
|
+
const transformedItems = transformSingleTaskBatchItemsStreamForWait(id, asyncItems, parsePayload, options, queue);
|
|
1356
|
+
return await tracer_js_1.tracer.startActiveSpan(name, async (span) => {
|
|
1357
|
+
// Execute streaming 2-phase batch
|
|
1358
|
+
const response = await executeBatchTwoPhaseStreaming(apiClient, transformedItems, {
|
|
1359
|
+
parentRunId: ctx.run.id,
|
|
1360
|
+
resumeParentOnCompletion: true,
|
|
1361
|
+
idempotencyKey: await (0, v3_1.makeIdempotencyKey)(options?.idempotencyKey),
|
|
1362
|
+
spanParentAsLink: false, // Waiting: child runs share parent's trace ID
|
|
1363
|
+
}, requestOptions);
|
|
1364
|
+
span.setAttribute("batchId", response.id);
|
|
1365
|
+
span.setAttribute("runCount", response.runCount);
|
|
1366
|
+
const result = await v3_1.runtime.waitForBatch({
|
|
1367
|
+
id: response.id,
|
|
1368
|
+
runCount: response.runCount,
|
|
1369
|
+
ctx,
|
|
1370
|
+
});
|
|
1371
|
+
const runs = await handleBatchTaskRunExecutionResult(result.items, id);
|
|
1372
|
+
return {
|
|
1373
|
+
id: result.id,
|
|
1374
|
+
runs,
|
|
1375
|
+
};
|
|
1376
|
+
}, {
|
|
1377
|
+
kind: api_1.SpanKind.PRODUCER,
|
|
1378
|
+
attributes: {
|
|
1379
|
+
[v3_1.SemanticInternalAttributes.STYLE_ICON]: "trigger",
|
|
1380
|
+
...(0, v3_1.accessoryAttributes)({
|
|
1381
|
+
items: [
|
|
1382
|
+
{
|
|
1383
|
+
text: id,
|
|
1384
|
+
variant: "normal",
|
|
1385
|
+
},
|
|
1386
|
+
],
|
|
1387
|
+
style: "codepath",
|
|
1388
|
+
}),
|
|
1389
|
+
},
|
|
1390
|
+
});
|
|
1391
|
+
}
|
|
1000
1392
|
}
|
|
1001
1393
|
async function handleBatchTaskRunExecutionResult(items, taskIdentifier) {
|
|
1002
1394
|
const someObjectStoreOutputs = items.some((item) => item.ok && item.outputType === "application/store");
|