@trigger.dev/sdk 4.2.0 → 4.3.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/commonjs/v3/batch.d.ts +3 -3
- package/dist/commonjs/v3/batch.js.map +1 -1
- package/dist/commonjs/v3/idempotencyKeys.d.ts +2 -1
- package/dist/commonjs/v3/idempotencyKeys.js +1 -0
- package/dist/commonjs/v3/idempotencyKeys.js.map +1 -1
- package/dist/commonjs/v3/shared.d.ts +71 -0
- package/dist/commonjs/v3/shared.js +880 -542
- package/dist/commonjs/v3/shared.js.map +1 -1
- package/dist/commonjs/v3/shared.test.d.ts +1 -0
- package/dist/commonjs/v3/shared.test.js +190 -0
- package/dist/commonjs/v3/shared.test.js.map +1 -0
- package/dist/commonjs/version.js +1 -1
- package/dist/esm/v3/batch.d.ts +3 -3
- package/dist/esm/v3/batch.js +1 -1
- package/dist/esm/v3/batch.js.map +1 -1
- package/dist/esm/v3/idempotencyKeys.d.ts +2 -1
- package/dist/esm/v3/idempotencyKeys.js +2 -1
- package/dist/esm/v3/idempotencyKeys.js.map +1 -1
- package/dist/esm/v3/shared.d.ts +71 -0
- package/dist/esm/v3/shared.js +879 -541
- package/dist/esm/v3/shared.js.map +1 -1
- package/dist/esm/v3/shared.test.d.ts +1 -0
- package/dist/esm/v3/shared.test.js +188 -0
- package/dist/esm/v3/shared.test.js.map +1 -0
- package/dist/esm/version.js +1 -1
- package/package.json +3 -2
package/dist/esm/v3/shared.js
CHANGED
|
@@ -2,6 +2,8 @@ import { SpanKind } from "@opentelemetry/api";
|
|
|
2
2
|
import { accessoryAttributes, apiClientManager, conditionallyImportPacket, convertToolParametersToSchema, createErrorTaskError, defaultRetryOptions, flattenIdempotencyKey, getEnvVar, getSchemaParseFn, lifecycleHooks, makeIdempotencyKey, parsePacket, resourceCatalog, runtime, SemanticInternalAttributes, stringifyIO, SubtaskUnwrapError, taskContext, TaskRunPromise, } from "@trigger.dev/core/v3";
|
|
3
3
|
import { tracer } from "./tracer.js";
|
|
4
4
|
export { SubtaskUnwrapError, TaskRunPromise };
|
|
5
|
+
// Re-export for external use (defined later in file)
|
|
6
|
+
export { BatchTriggerError };
|
|
5
7
|
export function queue(options) {
|
|
6
8
|
resourceCatalog.registerQueueMetadata(options);
|
|
7
9
|
// @ts-expect-error
|
|
@@ -221,63 +223,14 @@ export async function batchTriggerAndWait(id, items, options, requestOptions) {
|
|
|
221
223
|
export async function batchTrigger(id, items, options, requestOptions) {
|
|
222
224
|
return await batchTrigger_internal("tasks.batchTrigger()", id, items, options, undefined, requestOptions);
|
|
223
225
|
}
|
|
224
|
-
|
|
225
|
-
|
|
226
|
-
|
|
227
|
-
* @template TTask - The type of task(s) to be triggered, extends AnyTask
|
|
228
|
-
*
|
|
229
|
-
* @param {Array<BatchByIdItem<InferRunTypes<TTask>>>} items - Array of task items to trigger
|
|
230
|
-
* @param {BatchTriggerOptions} [options] - Optional batch-level trigger options
|
|
231
|
-
* @param {TriggerApiRequestOptions} [requestOptions] - Optional API request configuration
|
|
232
|
-
*
|
|
233
|
-
* @returns {Promise<BatchRunHandleFromTypes<InferRunTypes<TTask>>>} A promise that resolves with the batch run handle
|
|
234
|
-
* containing batch ID, cached status, idempotency info, runs, and public access token
|
|
235
|
-
*
|
|
236
|
-
* @example
|
|
237
|
-
* ```ts
|
|
238
|
-
* import { batch } from "@trigger.dev/sdk/v3";
|
|
239
|
-
* import type { myTask1, myTask2 } from "~/trigger/myTasks";
|
|
240
|
-
*
|
|
241
|
-
* // Trigger multiple tasks with different payloads
|
|
242
|
-
* const result = await batch.trigger<typeof myTask1 | typeof myTask2>([
|
|
243
|
-
* {
|
|
244
|
-
* id: "my-task-1",
|
|
245
|
-
* payload: { some: "data" },
|
|
246
|
-
* options: {
|
|
247
|
-
* queue: "default",
|
|
248
|
-
* concurrencyKey: "key",
|
|
249
|
-
* idempotencyKey: "unique-key",
|
|
250
|
-
* delay: "5m",
|
|
251
|
-
* tags: ["tag1", "tag2"]
|
|
252
|
-
* }
|
|
253
|
-
* },
|
|
254
|
-
* {
|
|
255
|
-
* id: "my-task-2",
|
|
256
|
-
* payload: { other: "data" }
|
|
257
|
-
* }
|
|
258
|
-
* ]);
|
|
259
|
-
* ```
|
|
260
|
-
*
|
|
261
|
-
* @description
|
|
262
|
-
* Each task item in the array can include:
|
|
263
|
-
* - `id`: The unique identifier of the task
|
|
264
|
-
* - `payload`: The data to pass to the task
|
|
265
|
-
* - `options`: Optional task-specific settings including:
|
|
266
|
-
* - `queue`: Specify a queue for the task
|
|
267
|
-
* - `concurrencyKey`: Control concurrent execution
|
|
268
|
-
* - `idempotencyKey`: Prevent duplicate runs
|
|
269
|
-
* - `idempotencyKeyTTL`: Time-to-live for idempotency key
|
|
270
|
-
* - `delay`: Delay before task execution
|
|
271
|
-
* - `ttl`: Time-to-live for the task
|
|
272
|
-
* - `tags`: Array of tags for the task
|
|
273
|
-
* - `maxAttempts`: Maximum retry attempts
|
|
274
|
-
* - `metadata`: Additional metadata
|
|
275
|
-
* - `maxDuration`: Maximum execution duration
|
|
276
|
-
*/
|
|
277
|
-
export async function batchTriggerById(items, options, requestOptions) {
|
|
226
|
+
// Implementation
|
|
227
|
+
export async function batchTriggerById(...args) {
|
|
228
|
+
const [items, options, requestOptions] = args;
|
|
278
229
|
const apiClient = apiClientManager.clientOrThrow(requestOptions?.clientConfig);
|
|
279
|
-
|
|
280
|
-
|
|
230
|
+
// Check if items is an array or a stream
|
|
231
|
+
if (Array.isArray(items)) {
|
|
232
|
+
// Array path: existing logic
|
|
233
|
+
const ndJsonItems = await Promise.all(items.map(async (item, index) => {
|
|
281
234
|
const taskMetadata = resourceCatalog.getTask(item.id);
|
|
282
235
|
const parsedPayload = taskMetadata?.fns.parsePayload
|
|
283
236
|
? await taskMetadata?.fns.parsePayload(item.payload)
|
|
@@ -285,6 +238,7 @@ export async function batchTriggerById(items, options, requestOptions) {
|
|
|
285
238
|
const payloadPacket = await stringifyIO(parsedPayload);
|
|
286
239
|
const batchItemIdempotencyKey = await makeIdempotencyKey(flattenIdempotencyKey([options?.idempotencyKey, `${index}`]));
|
|
287
240
|
return {
|
|
241
|
+
index,
|
|
288
242
|
task: item.id,
|
|
289
243
|
payload: payloadPacket.data,
|
|
290
244
|
options: {
|
|
@@ -304,257 +258,266 @@ export async function batchTriggerById(items, options, requestOptions) {
|
|
|
304
258
|
priority: item.options?.priority,
|
|
305
259
|
region: item.options?.region,
|
|
306
260
|
lockToVersion: item.options?.version ?? getEnvVar("TRIGGER_VERSION"),
|
|
261
|
+
debounce: item.options?.debounce,
|
|
307
262
|
},
|
|
308
263
|
};
|
|
309
|
-
}))
|
|
310
|
-
|
|
311
|
-
|
|
312
|
-
|
|
313
|
-
|
|
314
|
-
|
|
315
|
-
|
|
316
|
-
|
|
317
|
-
|
|
318
|
-
|
|
319
|
-
|
|
320
|
-
|
|
321
|
-
|
|
322
|
-
|
|
323
|
-
|
|
324
|
-
|
|
325
|
-
|
|
326
|
-
|
|
327
|
-
|
|
328
|
-
|
|
329
|
-
|
|
330
|
-
|
|
331
|
-
|
|
332
|
-
|
|
333
|
-
|
|
334
|
-
|
|
335
|
-
|
|
264
|
+
}));
|
|
265
|
+
// Execute 2-phase batch
|
|
266
|
+
const response = await tracer.startActiveSpan("batch.trigger()", async (span) => {
|
|
267
|
+
const result = await executeBatchTwoPhase(apiClient, ndJsonItems, {
|
|
268
|
+
parentRunId: taskContext.ctx?.run.id,
|
|
269
|
+
idempotencyKey: await makeIdempotencyKey(options?.idempotencyKey),
|
|
270
|
+
spanParentAsLink: true, // Fire-and-forget: child runs get separate trace IDs
|
|
271
|
+
}, requestOptions);
|
|
272
|
+
span.setAttribute("batchId", result.id);
|
|
273
|
+
span.setAttribute("runCount", result.runCount);
|
|
274
|
+
return result;
|
|
275
|
+
}, {
|
|
276
|
+
kind: SpanKind.PRODUCER,
|
|
277
|
+
attributes: {
|
|
278
|
+
[SemanticInternalAttributes.STYLE_ICON]: "trigger",
|
|
279
|
+
},
|
|
280
|
+
});
|
|
281
|
+
const handle = {
|
|
282
|
+
batchId: response.id,
|
|
283
|
+
runCount: response.runCount,
|
|
284
|
+
publicAccessToken: response.publicAccessToken,
|
|
285
|
+
};
|
|
286
|
+
return handle;
|
|
287
|
+
}
|
|
288
|
+
else {
|
|
289
|
+
// Stream path: convert to AsyncIterable and transform
|
|
290
|
+
const asyncItems = normalizeToAsyncIterable(items);
|
|
291
|
+
const transformedItems = transformBatchItemsStream(asyncItems, options);
|
|
292
|
+
// Execute streaming 2-phase batch
|
|
293
|
+
const response = await tracer.startActiveSpan("batch.trigger()", async (span) => {
|
|
294
|
+
const result = await executeBatchTwoPhaseStreaming(apiClient, transformedItems, {
|
|
295
|
+
parentRunId: taskContext.ctx?.run.id,
|
|
296
|
+
idempotencyKey: await makeIdempotencyKey(options?.idempotencyKey),
|
|
297
|
+
spanParentAsLink: true, // Fire-and-forget: child runs get separate trace IDs
|
|
298
|
+
}, requestOptions);
|
|
299
|
+
span.setAttribute("batchId", result.id);
|
|
300
|
+
span.setAttribute("runCount", result.runCount);
|
|
301
|
+
return result;
|
|
302
|
+
}, {
|
|
303
|
+
kind: SpanKind.PRODUCER,
|
|
304
|
+
attributes: {
|
|
305
|
+
[SemanticInternalAttributes.STYLE_ICON]: "trigger",
|
|
306
|
+
},
|
|
307
|
+
});
|
|
308
|
+
const handle = {
|
|
309
|
+
batchId: response.id,
|
|
310
|
+
runCount: response.runCount,
|
|
311
|
+
publicAccessToken: response.publicAccessToken,
|
|
312
|
+
};
|
|
313
|
+
return handle;
|
|
314
|
+
}
|
|
336
315
|
}
|
|
337
|
-
|
|
338
|
-
|
|
339
|
-
|
|
340
|
-
*
|
|
341
|
-
* @template TTask - Union type of tasks to be triggered, extends AnyTask
|
|
342
|
-
*
|
|
343
|
-
* @param {Array<BatchByIdAndWaitItem<InferRunTypes<TTask>>>} items - Array of task items to trigger
|
|
344
|
-
* @param {TriggerApiRequestOptions} [requestOptions] - Optional API request configuration
|
|
345
|
-
*
|
|
346
|
-
* @returns {Promise<BatchByIdResult<TTask>>} A promise that resolves with the batch results, including
|
|
347
|
-
* success/failure status and strongly-typed outputs for each task
|
|
348
|
-
*
|
|
349
|
-
* @throws {Error} If called outside of a task.run() context
|
|
350
|
-
* @throws {Error} If no API client is configured
|
|
351
|
-
*
|
|
352
|
-
* @example
|
|
353
|
-
* ```ts
|
|
354
|
-
* import { batch, task } from "@trigger.dev/sdk/v3";
|
|
355
|
-
*
|
|
356
|
-
* export const parentTask = task({
|
|
357
|
-
* id: "parent-task",
|
|
358
|
-
* run: async (payload: string) => {
|
|
359
|
-
* const results = await batch.triggerAndWait<typeof childTask1 | typeof childTask2>([
|
|
360
|
-
* {
|
|
361
|
-
* id: "child-task-1",
|
|
362
|
-
* payload: { foo: "World" },
|
|
363
|
-
* options: {
|
|
364
|
-
* queue: "default",
|
|
365
|
-
* delay: "5m",
|
|
366
|
-
* tags: ["batch", "child1"]
|
|
367
|
-
* }
|
|
368
|
-
* },
|
|
369
|
-
* {
|
|
370
|
-
* id: "child-task-2",
|
|
371
|
-
* payload: { bar: 42 }
|
|
372
|
-
* }
|
|
373
|
-
* ]);
|
|
374
|
-
*
|
|
375
|
-
* // Type-safe result handling
|
|
376
|
-
* for (const result of results) {
|
|
377
|
-
* if (result.ok) {
|
|
378
|
-
* switch (result.taskIdentifier) {
|
|
379
|
-
* case "child-task-1":
|
|
380
|
-
* console.log("Child task 1 output:", result.output); // string type
|
|
381
|
-
* break;
|
|
382
|
-
* case "child-task-2":
|
|
383
|
-
* console.log("Child task 2 output:", result.output); // number type
|
|
384
|
-
* break;
|
|
385
|
-
* }
|
|
386
|
-
* } else {
|
|
387
|
-
* console.error("Task failed:", result.error);
|
|
388
|
-
* }
|
|
389
|
-
* }
|
|
390
|
-
* }
|
|
391
|
-
* });
|
|
392
|
-
* ```
|
|
393
|
-
*
|
|
394
|
-
* @description
|
|
395
|
-
* Each task item in the array can include:
|
|
396
|
-
* - `id`: The task identifier (must match one of the tasks in the union type)
|
|
397
|
-
* - `payload`: Strongly-typed payload matching the task's input type
|
|
398
|
-
* - `options`: Optional task-specific settings including:
|
|
399
|
-
* - `queue`: Specify a queue for the task
|
|
400
|
-
* - `concurrencyKey`: Control concurrent execution
|
|
401
|
-
* - `delay`: Delay before task execution
|
|
402
|
-
* - `ttl`: Time-to-live for the task
|
|
403
|
-
* - `tags`: Array of tags for the task
|
|
404
|
-
* - `maxAttempts`: Maximum retry attempts
|
|
405
|
-
* - `metadata`: Additional metadata
|
|
406
|
-
* - `maxDuration`: Maximum execution duration
|
|
407
|
-
*
|
|
408
|
-
* The function provides full type safety for:
|
|
409
|
-
* - Task IDs
|
|
410
|
-
* - Payload types
|
|
411
|
-
* - Return value types
|
|
412
|
-
* - Error handling
|
|
413
|
-
*/
|
|
414
|
-
export async function batchTriggerByIdAndWait(items, options, requestOptions) {
|
|
316
|
+
// Implementation
|
|
317
|
+
export async function batchTriggerByIdAndWait(...args) {
|
|
318
|
+
const [items, options, requestOptions] = args;
|
|
415
319
|
const ctx = taskContext.ctx;
|
|
416
320
|
if (!ctx) {
|
|
417
321
|
throw new Error("batchTriggerAndWait can only be used from inside a task.run()");
|
|
418
322
|
}
|
|
419
323
|
const apiClient = apiClientManager.clientOrThrow(requestOptions?.clientConfig);
|
|
420
|
-
|
|
421
|
-
|
|
422
|
-
|
|
423
|
-
|
|
424
|
-
|
|
425
|
-
|
|
426
|
-
|
|
427
|
-
|
|
428
|
-
|
|
429
|
-
|
|
430
|
-
|
|
431
|
-
|
|
432
|
-
|
|
433
|
-
|
|
434
|
-
|
|
435
|
-
|
|
436
|
-
|
|
437
|
-
|
|
438
|
-
|
|
439
|
-
|
|
440
|
-
|
|
441
|
-
|
|
442
|
-
|
|
443
|
-
|
|
444
|
-
|
|
445
|
-
|
|
446
|
-
|
|
447
|
-
|
|
448
|
-
|
|
449
|
-
|
|
450
|
-
|
|
451
|
-
|
|
452
|
-
|
|
453
|
-
|
|
454
|
-
|
|
324
|
+
// Check if items is an array or a stream
|
|
325
|
+
if (Array.isArray(items)) {
|
|
326
|
+
// Array path: existing logic
|
|
327
|
+
const ndJsonItems = await Promise.all(items.map(async (item, index) => {
|
|
328
|
+
const taskMetadata = resourceCatalog.getTask(item.id);
|
|
329
|
+
const parsedPayload = taskMetadata?.fns.parsePayload
|
|
330
|
+
? await taskMetadata?.fns.parsePayload(item.payload)
|
|
331
|
+
: item.payload;
|
|
332
|
+
const payloadPacket = await stringifyIO(parsedPayload);
|
|
333
|
+
const batchItemIdempotencyKey = await makeIdempotencyKey(flattenIdempotencyKey([options?.idempotencyKey, `${index}`]));
|
|
334
|
+
return {
|
|
335
|
+
index,
|
|
336
|
+
task: item.id,
|
|
337
|
+
payload: payloadPacket.data,
|
|
338
|
+
options: {
|
|
339
|
+
lockToVersion: taskContext.worker?.version,
|
|
340
|
+
queue: item.options?.queue ? { name: item.options.queue } : undefined,
|
|
341
|
+
concurrencyKey: item.options?.concurrencyKey,
|
|
342
|
+
test: taskContext.ctx?.run.isTest,
|
|
343
|
+
payloadType: payloadPacket.dataType,
|
|
344
|
+
delay: item.options?.delay,
|
|
345
|
+
ttl: item.options?.ttl,
|
|
346
|
+
tags: item.options?.tags,
|
|
347
|
+
maxAttempts: item.options?.maxAttempts,
|
|
348
|
+
metadata: item.options?.metadata,
|
|
349
|
+
maxDuration: item.options?.maxDuration,
|
|
350
|
+
idempotencyKey: (await makeIdempotencyKey(item.options?.idempotencyKey)) ?? batchItemIdempotencyKey,
|
|
351
|
+
idempotencyKeyTTL: item.options?.idempotencyKeyTTL ?? options?.idempotencyKeyTTL,
|
|
352
|
+
machine: item.options?.machine,
|
|
353
|
+
priority: item.options?.priority,
|
|
354
|
+
region: item.options?.region,
|
|
355
|
+
debounce: item.options?.debounce,
|
|
356
|
+
},
|
|
357
|
+
};
|
|
358
|
+
}));
|
|
359
|
+
return await tracer.startActiveSpan("batch.triggerAndWait()", async (span) => {
|
|
360
|
+
// Execute 2-phase batch
|
|
361
|
+
const response = await executeBatchTwoPhase(apiClient, ndJsonItems, {
|
|
362
|
+
parentRunId: ctx.run.id,
|
|
363
|
+
resumeParentOnCompletion: true,
|
|
364
|
+
idempotencyKey: await makeIdempotencyKey(options?.idempotencyKey),
|
|
365
|
+
spanParentAsLink: false, // Waiting: child runs share parent's trace ID
|
|
366
|
+
}, requestOptions);
|
|
367
|
+
span.setAttribute("batchId", response.id);
|
|
368
|
+
span.setAttribute("runCount", response.runCount);
|
|
369
|
+
const result = await runtime.waitForBatch({
|
|
370
|
+
id: response.id,
|
|
371
|
+
runCount: response.runCount,
|
|
372
|
+
ctx,
|
|
373
|
+
});
|
|
374
|
+
const runs = await handleBatchTaskRunExecutionResultV2(result.items);
|
|
375
|
+
return {
|
|
376
|
+
id: result.id,
|
|
377
|
+
runs,
|
|
378
|
+
};
|
|
455
379
|
}, {
|
|
456
|
-
|
|
457
|
-
|
|
458
|
-
|
|
459
|
-
|
|
460
|
-
|
|
461
|
-
|
|
380
|
+
kind: SpanKind.PRODUCER,
|
|
381
|
+
attributes: {
|
|
382
|
+
[SemanticInternalAttributes.STYLE_ICON]: "trigger",
|
|
383
|
+
},
|
|
384
|
+
});
|
|
385
|
+
}
|
|
386
|
+
else {
|
|
387
|
+
// Stream path: convert to AsyncIterable and transform
|
|
388
|
+
const asyncItems = normalizeToAsyncIterable(items);
|
|
389
|
+
const transformedItems = transformBatchItemsStreamForWait(asyncItems, options);
|
|
390
|
+
return await tracer.startActiveSpan("batch.triggerAndWait()", async (span) => {
|
|
391
|
+
// Execute streaming 2-phase batch
|
|
392
|
+
const response = await executeBatchTwoPhaseStreaming(apiClient, transformedItems, {
|
|
393
|
+
parentRunId: ctx.run.id,
|
|
394
|
+
resumeParentOnCompletion: true,
|
|
395
|
+
idempotencyKey: await makeIdempotencyKey(options?.idempotencyKey),
|
|
396
|
+
spanParentAsLink: false, // Waiting: child runs share parent's trace ID
|
|
397
|
+
}, requestOptions);
|
|
398
|
+
span.setAttribute("batchId", response.id);
|
|
399
|
+
span.setAttribute("runCount", response.runCount);
|
|
400
|
+
const result = await runtime.waitForBatch({
|
|
401
|
+
id: response.id,
|
|
402
|
+
runCount: response.runCount,
|
|
403
|
+
ctx,
|
|
404
|
+
});
|
|
405
|
+
const runs = await handleBatchTaskRunExecutionResultV2(result.items);
|
|
406
|
+
return {
|
|
407
|
+
id: result.id,
|
|
408
|
+
runs,
|
|
409
|
+
};
|
|
410
|
+
}, {
|
|
411
|
+
kind: SpanKind.PRODUCER,
|
|
412
|
+
attributes: {
|
|
413
|
+
[SemanticInternalAttributes.STYLE_ICON]: "trigger",
|
|
414
|
+
},
|
|
415
|
+
});
|
|
416
|
+
}
|
|
417
|
+
}
|
|
418
|
+
// Implementation
|
|
419
|
+
export async function batchTriggerTasks(...args) {
|
|
420
|
+
const [items, options, requestOptions] = args;
|
|
421
|
+
const apiClient = apiClientManager.clientOrThrow(requestOptions?.clientConfig);
|
|
422
|
+
// Check if items is an array or a stream
|
|
423
|
+
if (Array.isArray(items)) {
|
|
424
|
+
// Array path: existing logic
|
|
425
|
+
const ndJsonItems = await Promise.all(items.map(async (item, index) => {
|
|
426
|
+
const taskMetadata = resourceCatalog.getTask(item.task.id);
|
|
427
|
+
const parsedPayload = taskMetadata?.fns.parsePayload
|
|
428
|
+
? await taskMetadata?.fns.parsePayload(item.payload)
|
|
429
|
+
: item.payload;
|
|
430
|
+
const payloadPacket = await stringifyIO(parsedPayload);
|
|
431
|
+
const batchItemIdempotencyKey = await makeIdempotencyKey(flattenIdempotencyKey([options?.idempotencyKey, `${index}`]));
|
|
432
|
+
return {
|
|
433
|
+
index,
|
|
434
|
+
task: item.task.id,
|
|
435
|
+
payload: payloadPacket.data,
|
|
436
|
+
options: {
|
|
437
|
+
queue: item.options?.queue ? { name: item.options.queue } : undefined,
|
|
438
|
+
concurrencyKey: item.options?.concurrencyKey,
|
|
439
|
+
test: taskContext.ctx?.run.isTest,
|
|
440
|
+
payloadType: payloadPacket.dataType,
|
|
441
|
+
delay: item.options?.delay,
|
|
442
|
+
ttl: item.options?.ttl,
|
|
443
|
+
tags: item.options?.tags,
|
|
444
|
+
maxAttempts: item.options?.maxAttempts,
|
|
445
|
+
metadata: item.options?.metadata,
|
|
446
|
+
maxDuration: item.options?.maxDuration,
|
|
447
|
+
idempotencyKey: (await makeIdempotencyKey(item.options?.idempotencyKey)) ?? batchItemIdempotencyKey,
|
|
448
|
+
idempotencyKeyTTL: item.options?.idempotencyKeyTTL ?? options?.idempotencyKeyTTL,
|
|
449
|
+
machine: item.options?.machine,
|
|
450
|
+
priority: item.options?.priority,
|
|
451
|
+
region: item.options?.region,
|
|
452
|
+
lockToVersion: item.options?.version ?? getEnvVar("TRIGGER_VERSION"),
|
|
453
|
+
debounce: item.options?.debounce,
|
|
454
|
+
},
|
|
455
|
+
};
|
|
456
|
+
}));
|
|
457
|
+
// Execute 2-phase batch
|
|
458
|
+
const response = await tracer.startActiveSpan("batch.triggerByTask()", async (span) => {
|
|
459
|
+
const result = await executeBatchTwoPhase(apiClient, ndJsonItems, {
|
|
460
|
+
parentRunId: taskContext.ctx?.run.id,
|
|
461
|
+
idempotencyKey: await makeIdempotencyKey(options?.idempotencyKey),
|
|
462
|
+
spanParentAsLink: true, // Fire-and-forget: child runs get separate trace IDs
|
|
463
|
+
}, requestOptions);
|
|
464
|
+
span.setAttribute("batchId", result.id);
|
|
465
|
+
span.setAttribute("runCount", result.runCount);
|
|
466
|
+
return result;
|
|
467
|
+
}, {
|
|
468
|
+
kind: SpanKind.PRODUCER,
|
|
469
|
+
attributes: {
|
|
470
|
+
[SemanticInternalAttributes.STYLE_ICON]: "trigger",
|
|
471
|
+
},
|
|
472
|
+
});
|
|
473
|
+
const handle = {
|
|
474
|
+
batchId: response.id,
|
|
462
475
|
runCount: response.runCount,
|
|
463
|
-
|
|
476
|
+
publicAccessToken: response.publicAccessToken,
|
|
477
|
+
};
|
|
478
|
+
return handle;
|
|
479
|
+
}
|
|
480
|
+
else {
|
|
481
|
+
// Stream path: convert to AsyncIterable and transform
|
|
482
|
+
const streamItems = items;
|
|
483
|
+
const asyncItems = normalizeToAsyncIterable(streamItems);
|
|
484
|
+
const transformedItems = transformBatchByTaskItemsStream(asyncItems, options);
|
|
485
|
+
// Execute streaming 2-phase batch
|
|
486
|
+
const response = await tracer.startActiveSpan("batch.triggerByTask()", async (span) => {
|
|
487
|
+
const result = await executeBatchTwoPhaseStreaming(apiClient, transformedItems, {
|
|
488
|
+
parentRunId: taskContext.ctx?.run.id,
|
|
489
|
+
idempotencyKey: await makeIdempotencyKey(options?.idempotencyKey),
|
|
490
|
+
spanParentAsLink: true, // Fire-and-forget: child runs get separate trace IDs
|
|
491
|
+
}, requestOptions);
|
|
492
|
+
span.setAttribute("batchId", result.id);
|
|
493
|
+
span.setAttribute("runCount", result.runCount);
|
|
494
|
+
return result;
|
|
495
|
+
}, {
|
|
496
|
+
kind: SpanKind.PRODUCER,
|
|
497
|
+
attributes: {
|
|
498
|
+
[SemanticInternalAttributes.STYLE_ICON]: "trigger",
|
|
499
|
+
},
|
|
464
500
|
});
|
|
465
|
-
const
|
|
466
|
-
|
|
467
|
-
|
|
468
|
-
|
|
501
|
+
const handle = {
|
|
502
|
+
batchId: response.id,
|
|
503
|
+
runCount: response.runCount,
|
|
504
|
+
publicAccessToken: response.publicAccessToken,
|
|
469
505
|
};
|
|
470
|
-
|
|
471
|
-
|
|
472
|
-
attributes: {
|
|
473
|
-
[SemanticInternalAttributes.STYLE_ICON]: "trigger",
|
|
474
|
-
},
|
|
475
|
-
});
|
|
506
|
+
return handle;
|
|
507
|
+
}
|
|
476
508
|
}
|
|
477
|
-
|
|
478
|
-
|
|
479
|
-
|
|
480
|
-
|
|
481
|
-
|
|
482
|
-
|
|
483
|
-
|
|
484
|
-
* @param {TriggerApiRequestOptions} [requestOptions] - Optional API request configuration
|
|
485
|
-
*
|
|
486
|
-
* @returns {Promise<BatchByIdResult<TTask>>} A promise that resolves with the batch results, including
|
|
487
|
-
* success/failure status and strongly-typed outputs for each task
|
|
488
|
-
*
|
|
489
|
-
* @throws {Error} If called outside of a task.run() context
|
|
490
|
-
* @throws {Error} If no API client is configured
|
|
491
|
-
*
|
|
492
|
-
* @example
|
|
493
|
-
* ```ts
|
|
494
|
-
* import { batch, task } from "@trigger.dev/sdk/v3";
|
|
495
|
-
*
|
|
496
|
-
* export const parentTask = task({
|
|
497
|
-
* id: "parent-task",
|
|
498
|
-
* run: async (payload: string) => {
|
|
499
|
-
* const results = await batch.triggerAndWait<typeof childTask1 | typeof childTask2>([
|
|
500
|
-
* {
|
|
501
|
-
* id: "child-task-1",
|
|
502
|
-
* payload: { foo: "World" },
|
|
503
|
-
* options: {
|
|
504
|
-
* queue: "default",
|
|
505
|
-
* delay: "5m",
|
|
506
|
-
* tags: ["batch", "child1"]
|
|
507
|
-
* }
|
|
508
|
-
* },
|
|
509
|
-
* {
|
|
510
|
-
* id: "child-task-2",
|
|
511
|
-
* payload: { bar: 42 }
|
|
512
|
-
* }
|
|
513
|
-
* ]);
|
|
514
|
-
*
|
|
515
|
-
* // Type-safe result handling
|
|
516
|
-
* for (const result of results) {
|
|
517
|
-
* if (result.ok) {
|
|
518
|
-
* switch (result.taskIdentifier) {
|
|
519
|
-
* case "child-task-1":
|
|
520
|
-
* console.log("Child task 1 output:", result.output); // string type
|
|
521
|
-
* break;
|
|
522
|
-
* case "child-task-2":
|
|
523
|
-
* console.log("Child task 2 output:", result.output); // number type
|
|
524
|
-
* break;
|
|
525
|
-
* }
|
|
526
|
-
* } else {
|
|
527
|
-
* console.error("Task failed:", result.error);
|
|
528
|
-
* }
|
|
529
|
-
* }
|
|
530
|
-
* }
|
|
531
|
-
* });
|
|
532
|
-
* ```
|
|
533
|
-
*
|
|
534
|
-
* @description
|
|
535
|
-
* Each task item in the array can include:
|
|
536
|
-
* - `id`: The task identifier (must match one of the tasks in the union type)
|
|
537
|
-
* - `payload`: Strongly-typed payload matching the task's input type
|
|
538
|
-
* - `options`: Optional task-specific settings including:
|
|
539
|
-
* - `queue`: Specify a queue for the task
|
|
540
|
-
* - `concurrencyKey`: Control concurrent execution
|
|
541
|
-
* - `delay`: Delay before task execution
|
|
542
|
-
* - `ttl`: Time-to-live for the task
|
|
543
|
-
* - `tags`: Array of tags for the task
|
|
544
|
-
* - `maxAttempts`: Maximum retry attempts
|
|
545
|
-
* - `metadata`: Additional metadata
|
|
546
|
-
* - `maxDuration`: Maximum execution duration
|
|
547
|
-
*
|
|
548
|
-
* The function provides full type safety for:
|
|
549
|
-
* - Task IDs
|
|
550
|
-
* - Payload types
|
|
551
|
-
* - Return value types
|
|
552
|
-
* - Error handling
|
|
553
|
-
*/
|
|
554
|
-
export async function batchTriggerTasks(items, options, requestOptions) {
|
|
509
|
+
// Implementation
|
|
510
|
+
export async function batchTriggerAndWaitTasks(...args) {
|
|
511
|
+
const [items, options, requestOptions] = args;
|
|
512
|
+
const ctx = taskContext.ctx;
|
|
513
|
+
if (!ctx) {
|
|
514
|
+
throw new Error("batchTriggerAndWait can only be used from inside a task.run()");
|
|
515
|
+
}
|
|
555
516
|
const apiClient = apiClientManager.clientOrThrow(requestOptions?.clientConfig);
|
|
556
|
-
|
|
557
|
-
|
|
517
|
+
// Check if items is an array or a stream
|
|
518
|
+
if (Array.isArray(items)) {
|
|
519
|
+
// Array path: existing logic
|
|
520
|
+
const ndJsonItems = await Promise.all(items.map(async (item, index) => {
|
|
558
521
|
const taskMetadata = resourceCatalog.getTask(item.task.id);
|
|
559
522
|
const parsedPayload = taskMetadata?.fns.parsePayload
|
|
560
523
|
? await taskMetadata?.fns.parsePayload(item.payload)
|
|
@@ -562,9 +525,11 @@ export async function batchTriggerTasks(items, options, requestOptions) {
|
|
|
562
525
|
const payloadPacket = await stringifyIO(parsedPayload);
|
|
563
526
|
const batchItemIdempotencyKey = await makeIdempotencyKey(flattenIdempotencyKey([options?.idempotencyKey, `${index}`]));
|
|
564
527
|
return {
|
|
528
|
+
index,
|
|
565
529
|
task: item.task.id,
|
|
566
530
|
payload: payloadPacket.data,
|
|
567
531
|
options: {
|
|
532
|
+
lockToVersion: taskContext.worker?.version,
|
|
568
533
|
queue: item.options?.queue ? { name: item.options.queue } : undefined,
|
|
569
534
|
concurrencyKey: item.options?.concurrencyKey,
|
|
570
535
|
test: taskContext.ctx?.run.isTest,
|
|
@@ -580,176 +545,457 @@ export async function batchTriggerTasks(items, options, requestOptions) {
|
|
|
580
545
|
machine: item.options?.machine,
|
|
581
546
|
priority: item.options?.priority,
|
|
582
547
|
region: item.options?.region,
|
|
583
|
-
|
|
548
|
+
debounce: item.options?.debounce,
|
|
584
549
|
},
|
|
585
550
|
};
|
|
586
|
-
}))
|
|
587
|
-
|
|
588
|
-
|
|
589
|
-
|
|
590
|
-
|
|
591
|
-
|
|
592
|
-
|
|
593
|
-
|
|
594
|
-
|
|
595
|
-
|
|
596
|
-
|
|
597
|
-
|
|
598
|
-
|
|
599
|
-
|
|
600
|
-
|
|
601
|
-
|
|
602
|
-
|
|
603
|
-
|
|
604
|
-
|
|
605
|
-
|
|
606
|
-
|
|
607
|
-
|
|
608
|
-
|
|
609
|
-
|
|
610
|
-
|
|
611
|
-
|
|
612
|
-
|
|
551
|
+
}));
|
|
552
|
+
return await tracer.startActiveSpan("batch.triggerByTaskAndWait()", async (span) => {
|
|
553
|
+
// Execute 2-phase batch
|
|
554
|
+
const response = await executeBatchTwoPhase(apiClient, ndJsonItems, {
|
|
555
|
+
parentRunId: ctx.run.id,
|
|
556
|
+
resumeParentOnCompletion: true,
|
|
557
|
+
idempotencyKey: await makeIdempotencyKey(options?.idempotencyKey),
|
|
558
|
+
spanParentAsLink: false, // Waiting: child runs share parent's trace ID
|
|
559
|
+
}, requestOptions);
|
|
560
|
+
span.setAttribute("batchId", response.id);
|
|
561
|
+
span.setAttribute("runCount", response.runCount);
|
|
562
|
+
const result = await runtime.waitForBatch({
|
|
563
|
+
id: response.id,
|
|
564
|
+
runCount: response.runCount,
|
|
565
|
+
ctx,
|
|
566
|
+
});
|
|
567
|
+
const runs = await handleBatchTaskRunExecutionResultV2(result.items);
|
|
568
|
+
return {
|
|
569
|
+
id: result.id,
|
|
570
|
+
runs,
|
|
571
|
+
};
|
|
572
|
+
}, {
|
|
573
|
+
kind: SpanKind.PRODUCER,
|
|
574
|
+
attributes: {
|
|
575
|
+
[SemanticInternalAttributes.STYLE_ICON]: "trigger",
|
|
576
|
+
},
|
|
577
|
+
});
|
|
578
|
+
}
|
|
579
|
+
else {
|
|
580
|
+
// Stream path: convert to AsyncIterable and transform
|
|
581
|
+
const streamItems = items;
|
|
582
|
+
const asyncItems = normalizeToAsyncIterable(streamItems);
|
|
583
|
+
const transformedItems = transformBatchByTaskItemsStreamForWait(asyncItems, options);
|
|
584
|
+
return await tracer.startActiveSpan("batch.triggerByTaskAndWait()", async (span) => {
|
|
585
|
+
// Execute streaming 2-phase batch
|
|
586
|
+
const response = await executeBatchTwoPhaseStreaming(apiClient, transformedItems, {
|
|
587
|
+
parentRunId: ctx.run.id,
|
|
588
|
+
resumeParentOnCompletion: true,
|
|
589
|
+
idempotencyKey: await makeIdempotencyKey(options?.idempotencyKey),
|
|
590
|
+
spanParentAsLink: false, // Waiting: child runs share parent's trace ID
|
|
591
|
+
}, requestOptions);
|
|
592
|
+
span.setAttribute("batchId", response.id);
|
|
593
|
+
span.setAttribute("runCount", response.runCount);
|
|
594
|
+
const result = await runtime.waitForBatch({
|
|
595
|
+
id: response.id,
|
|
596
|
+
runCount: response.runCount,
|
|
597
|
+
ctx,
|
|
598
|
+
});
|
|
599
|
+
const runs = await handleBatchTaskRunExecutionResultV2(result.items);
|
|
600
|
+
return {
|
|
601
|
+
id: result.id,
|
|
602
|
+
runs,
|
|
603
|
+
};
|
|
604
|
+
}, {
|
|
605
|
+
kind: SpanKind.PRODUCER,
|
|
606
|
+
attributes: {
|
|
607
|
+
[SemanticInternalAttributes.STYLE_ICON]: "trigger",
|
|
608
|
+
},
|
|
609
|
+
});
|
|
610
|
+
}
|
|
613
611
|
}
|
|
614
612
|
/**
|
|
615
|
-
*
|
|
616
|
-
*
|
|
613
|
+
* Helper function that executes a 2-phase batch trigger:
|
|
614
|
+
* 1. Creates the batch record with expected run count
|
|
615
|
+
* 2. Streams items as NDJSON to the server
|
|
617
616
|
*
|
|
618
|
-
* @
|
|
619
|
-
*
|
|
620
|
-
* @param
|
|
621
|
-
* @param
|
|
617
|
+
* @param apiClient - The API client instance
|
|
618
|
+
* @param items - Array of batch items
|
|
619
|
+
* @param options - Batch options including trace context settings
|
|
620
|
+
* @param options.spanParentAsLink - If true, child runs will have separate trace IDs with a link to parent.
|
|
621
|
+
* Use true for batchTrigger (fire-and-forget), false for batchTriggerAndWait.
|
|
622
|
+
* @param requestOptions - Optional request options
|
|
623
|
+
* @internal
|
|
624
|
+
*/
|
|
625
|
+
async function executeBatchTwoPhase(apiClient, items, options, requestOptions) {
|
|
626
|
+
let batch;
|
|
627
|
+
try {
|
|
628
|
+
// Phase 1: Create batch
|
|
629
|
+
batch = await apiClient.createBatch({
|
|
630
|
+
runCount: items.length,
|
|
631
|
+
parentRunId: options.parentRunId,
|
|
632
|
+
resumeParentOnCompletion: options.resumeParentOnCompletion,
|
|
633
|
+
idempotencyKey: options.idempotencyKey,
|
|
634
|
+
}, { spanParentAsLink: options.spanParentAsLink }, requestOptions);
|
|
635
|
+
}
|
|
636
|
+
catch (error) {
|
|
637
|
+
// Wrap with context about which phase failed
|
|
638
|
+
throw new BatchTriggerError(`Failed to create batch with ${items.length} items`, {
|
|
639
|
+
cause: error,
|
|
640
|
+
phase: "create",
|
|
641
|
+
itemCount: items.length,
|
|
642
|
+
});
|
|
643
|
+
}
|
|
644
|
+
// If the batch was cached (idempotent replay), skip streaming items
|
|
645
|
+
if (!batch.isCached) {
|
|
646
|
+
try {
|
|
647
|
+
// Phase 2: Stream items
|
|
648
|
+
await apiClient.streamBatchItems(batch.id, items, requestOptions);
|
|
649
|
+
}
|
|
650
|
+
catch (error) {
|
|
651
|
+
// Wrap with context about which phase failed and include batch ID
|
|
652
|
+
throw new BatchTriggerError(`Failed to stream items for batch ${batch.id} (${items.length} items)`, { cause: error, phase: "stream", batchId: batch.id, itemCount: items.length });
|
|
653
|
+
}
|
|
654
|
+
}
|
|
655
|
+
return {
|
|
656
|
+
id: batch.id,
|
|
657
|
+
runCount: batch.runCount,
|
|
658
|
+
publicAccessToken: batch.publicAccessToken,
|
|
659
|
+
};
|
|
660
|
+
}
|
|
661
|
+
/**
|
|
662
|
+
* Error thrown when batch trigger operations fail.
|
|
663
|
+
* Includes context about which phase failed and the batch details.
|
|
664
|
+
*/
|
|
665
|
+
class BatchTriggerError extends Error {
|
|
666
|
+
phase;
|
|
667
|
+
batchId;
|
|
668
|
+
itemCount;
|
|
669
|
+
constructor(message, options) {
|
|
670
|
+
super(message, { cause: options.cause });
|
|
671
|
+
this.name = "BatchTriggerError";
|
|
672
|
+
this.phase = options.phase;
|
|
673
|
+
this.batchId = options.batchId;
|
|
674
|
+
this.itemCount = options.itemCount;
|
|
675
|
+
}
|
|
676
|
+
}
|
|
677
|
+
/**
|
|
678
|
+
* Execute a streaming 2-phase batch trigger where items are streamed from an AsyncIterable.
|
|
679
|
+
* Unlike executeBatchTwoPhase, this doesn't know the count upfront.
|
|
622
680
|
*
|
|
623
|
-
* @
|
|
624
|
-
*
|
|
681
|
+
* @param apiClient - The API client instance
|
|
682
|
+
* @param items - AsyncIterable of batch items
|
|
683
|
+
* @param options - Batch options including trace context settings
|
|
684
|
+
* @param options.spanParentAsLink - If true, child runs will have separate trace IDs with a link to parent.
|
|
685
|
+
* Use true for batchTrigger (fire-and-forget), false for batchTriggerAndWait.
|
|
686
|
+
* @param requestOptions - Optional request options
|
|
687
|
+
* @internal
|
|
688
|
+
*/
|
|
689
|
+
async function executeBatchTwoPhaseStreaming(apiClient, items, options, requestOptions) {
|
|
690
|
+
// For streaming, we need to buffer items to get the count first
|
|
691
|
+
// This is because createBatch requires runCount upfront
|
|
692
|
+
// In the future, we could add a streaming-first endpoint that doesn't require this
|
|
693
|
+
const itemsArray = [];
|
|
694
|
+
for await (const item of items) {
|
|
695
|
+
itemsArray.push(item);
|
|
696
|
+
}
|
|
697
|
+
// Now we can use the regular 2-phase approach
|
|
698
|
+
return executeBatchTwoPhase(apiClient, itemsArray, options, requestOptions);
|
|
699
|
+
}
|
|
700
|
+
// ============================================================================
|
|
701
|
+
// Streaming Helpers
|
|
702
|
+
// ============================================================================
|
|
703
|
+
/**
|
|
704
|
+
* Type guard to check if a value is an AsyncIterable
|
|
705
|
+
*/
|
|
706
|
+
function isAsyncIterable(value) {
|
|
707
|
+
return (value != null &&
|
|
708
|
+
typeof value === "object" &&
|
|
709
|
+
Symbol.asyncIterator in value &&
|
|
710
|
+
typeof value[Symbol.asyncIterator] === "function");
|
|
711
|
+
}
|
|
712
|
+
/**
|
|
713
|
+
* Type guard to check if a value is a ReadableStream
|
|
714
|
+
*/
|
|
715
|
+
function isReadableStream(value) {
|
|
716
|
+
return (value != null &&
|
|
717
|
+
typeof value === "object" &&
|
|
718
|
+
"getReader" in value &&
|
|
719
|
+
typeof value.getReader === "function");
|
|
720
|
+
}
|
|
721
|
+
/**
|
|
722
|
+
* Convert a ReadableStream to an AsyncIterable.
|
|
723
|
+
* Properly cancels the stream when the consumer terminates early.
|
|
625
724
|
*
|
|
626
|
-
* @
|
|
627
|
-
|
|
725
|
+
* @internal Exported for testing purposes
|
|
726
|
+
*/
|
|
727
|
+
export async function* readableStreamToAsyncIterable(stream) {
|
|
728
|
+
const reader = stream.getReader();
|
|
729
|
+
try {
|
|
730
|
+
while (true) {
|
|
731
|
+
const { done, value } = await reader.read();
|
|
732
|
+
if (done)
|
|
733
|
+
break;
|
|
734
|
+
yield value;
|
|
735
|
+
}
|
|
736
|
+
}
|
|
737
|
+
finally {
|
|
738
|
+
try {
|
|
739
|
+
await reader.cancel();
|
|
740
|
+
}
|
|
741
|
+
catch {
|
|
742
|
+
// Ignore errors - stream might already be errored or closed
|
|
743
|
+
}
|
|
744
|
+
reader.releaseLock();
|
|
745
|
+
}
|
|
746
|
+
}
|
|
747
|
+
/**
|
|
748
|
+
* Normalize stream input to AsyncIterable
|
|
749
|
+
*/
|
|
750
|
+
function normalizeToAsyncIterable(input) {
|
|
751
|
+
if (isReadableStream(input)) {
|
|
752
|
+
return readableStreamToAsyncIterable(input);
|
|
753
|
+
}
|
|
754
|
+
return input;
|
|
755
|
+
}
|
|
756
|
+
/**
|
|
757
|
+
* Transform a stream of BatchByIdItem to BatchItemNDJSON format.
|
|
758
|
+
* Handles payload serialization and idempotency key generation.
|
|
628
759
|
*
|
|
629
|
-
* @
|
|
630
|
-
|
|
631
|
-
*
|
|
760
|
+
* @internal
|
|
761
|
+
*/
|
|
762
|
+
async function* transformBatchItemsStream(items, options) {
|
|
763
|
+
let index = 0;
|
|
764
|
+
for await (const item of items) {
|
|
765
|
+
const taskMetadata = resourceCatalog.getTask(item.id);
|
|
766
|
+
const parsedPayload = taskMetadata?.fns.parsePayload
|
|
767
|
+
? await taskMetadata?.fns.parsePayload(item.payload)
|
|
768
|
+
: item.payload;
|
|
769
|
+
const payloadPacket = await stringifyIO(parsedPayload);
|
|
770
|
+
const batchItemIdempotencyKey = await makeIdempotencyKey(flattenIdempotencyKey([options?.idempotencyKey, `${index}`]));
|
|
771
|
+
yield {
|
|
772
|
+
index: index++,
|
|
773
|
+
task: item.id,
|
|
774
|
+
payload: payloadPacket.data,
|
|
775
|
+
options: {
|
|
776
|
+
queue: item.options?.queue ? { name: item.options.queue } : undefined,
|
|
777
|
+
concurrencyKey: item.options?.concurrencyKey,
|
|
778
|
+
test: taskContext.ctx?.run.isTest,
|
|
779
|
+
payloadType: payloadPacket.dataType,
|
|
780
|
+
delay: item.options?.delay,
|
|
781
|
+
ttl: item.options?.ttl,
|
|
782
|
+
tags: item.options?.tags,
|
|
783
|
+
maxAttempts: item.options?.maxAttempts,
|
|
784
|
+
metadata: item.options?.metadata,
|
|
785
|
+
maxDuration: item.options?.maxDuration,
|
|
786
|
+
idempotencyKey: (await makeIdempotencyKey(item.options?.idempotencyKey)) ?? batchItemIdempotencyKey,
|
|
787
|
+
idempotencyKeyTTL: item.options?.idempotencyKeyTTL ?? options?.idempotencyKeyTTL,
|
|
788
|
+
machine: item.options?.machine,
|
|
789
|
+
priority: item.options?.priority,
|
|
790
|
+
region: item.options?.region,
|
|
791
|
+
lockToVersion: item.options?.version ?? getEnvVar("TRIGGER_VERSION"),
|
|
792
|
+
debounce: item.options?.debounce,
|
|
793
|
+
},
|
|
794
|
+
};
|
|
795
|
+
}
|
|
796
|
+
}
|
|
797
|
+
/**
|
|
798
|
+
* Transform a stream of BatchByIdAndWaitItem to BatchItemNDJSON format for triggerAndWait.
|
|
799
|
+
* Uses the current worker version for lockToVersion.
|
|
632
800
|
*
|
|
633
|
-
*
|
|
634
|
-
|
|
635
|
-
*
|
|
636
|
-
|
|
637
|
-
|
|
638
|
-
|
|
639
|
-
|
|
640
|
-
|
|
641
|
-
|
|
642
|
-
|
|
643
|
-
|
|
644
|
-
|
|
645
|
-
|
|
646
|
-
|
|
647
|
-
|
|
648
|
-
|
|
649
|
-
|
|
650
|
-
|
|
801
|
+
* @internal
|
|
802
|
+
*/
|
|
803
|
+
async function* transformBatchItemsStreamForWait(items, options) {
|
|
804
|
+
let index = 0;
|
|
805
|
+
for await (const item of items) {
|
|
806
|
+
const taskMetadata = resourceCatalog.getTask(item.id);
|
|
807
|
+
const parsedPayload = taskMetadata?.fns.parsePayload
|
|
808
|
+
? await taskMetadata?.fns.parsePayload(item.payload)
|
|
809
|
+
: item.payload;
|
|
810
|
+
const payloadPacket = await stringifyIO(parsedPayload);
|
|
811
|
+
const batchItemIdempotencyKey = await makeIdempotencyKey(flattenIdempotencyKey([options?.idempotencyKey, `${index}`]));
|
|
812
|
+
yield {
|
|
813
|
+
index: index++,
|
|
814
|
+
task: item.id,
|
|
815
|
+
payload: payloadPacket.data,
|
|
816
|
+
options: {
|
|
817
|
+
lockToVersion: taskContext.worker?.version,
|
|
818
|
+
queue: item.options?.queue ? { name: item.options.queue } : undefined,
|
|
819
|
+
concurrencyKey: item.options?.concurrencyKey,
|
|
820
|
+
test: taskContext.ctx?.run.isTest,
|
|
821
|
+
payloadType: payloadPacket.dataType,
|
|
822
|
+
delay: item.options?.delay,
|
|
823
|
+
ttl: item.options?.ttl,
|
|
824
|
+
tags: item.options?.tags,
|
|
825
|
+
maxAttempts: item.options?.maxAttempts,
|
|
826
|
+
metadata: item.options?.metadata,
|
|
827
|
+
maxDuration: item.options?.maxDuration,
|
|
828
|
+
idempotencyKey: (await makeIdempotencyKey(item.options?.idempotencyKey)) ?? batchItemIdempotencyKey,
|
|
829
|
+
idempotencyKeyTTL: item.options?.idempotencyKeyTTL ?? options?.idempotencyKeyTTL,
|
|
830
|
+
machine: item.options?.machine,
|
|
831
|
+
priority: item.options?.priority,
|
|
832
|
+
region: item.options?.region,
|
|
833
|
+
debounce: item.options?.debounce,
|
|
834
|
+
},
|
|
835
|
+
};
|
|
836
|
+
}
|
|
837
|
+
}
|
|
838
|
+
/**
|
|
839
|
+
* Transform a stream of BatchByTaskItem to BatchItemNDJSON format.
|
|
651
840
|
*
|
|
652
|
-
*
|
|
653
|
-
|
|
654
|
-
*
|
|
655
|
-
|
|
656
|
-
|
|
657
|
-
|
|
658
|
-
|
|
659
|
-
|
|
660
|
-
|
|
661
|
-
|
|
662
|
-
|
|
663
|
-
|
|
664
|
-
|
|
665
|
-
|
|
666
|
-
|
|
667
|
-
|
|
668
|
-
|
|
669
|
-
|
|
841
|
+
* @internal
|
|
842
|
+
*/
|
|
843
|
+
async function* transformBatchByTaskItemsStream(items, options) {
|
|
844
|
+
let index = 0;
|
|
845
|
+
for await (const item of items) {
|
|
846
|
+
const taskMetadata = resourceCatalog.getTask(item.task.id);
|
|
847
|
+
const parsedPayload = taskMetadata?.fns.parsePayload
|
|
848
|
+
? await taskMetadata?.fns.parsePayload(item.payload)
|
|
849
|
+
: item.payload;
|
|
850
|
+
const payloadPacket = await stringifyIO(parsedPayload);
|
|
851
|
+
const batchItemIdempotencyKey = await makeIdempotencyKey(flattenIdempotencyKey([options?.idempotencyKey, `${index}`]));
|
|
852
|
+
yield {
|
|
853
|
+
index: index++,
|
|
854
|
+
task: item.task.id,
|
|
855
|
+
payload: payloadPacket.data,
|
|
856
|
+
options: {
|
|
857
|
+
queue: item.options?.queue ? { name: item.options.queue } : undefined,
|
|
858
|
+
concurrencyKey: item.options?.concurrencyKey,
|
|
859
|
+
test: taskContext.ctx?.run.isTest,
|
|
860
|
+
payloadType: payloadPacket.dataType,
|
|
861
|
+
delay: item.options?.delay,
|
|
862
|
+
ttl: item.options?.ttl,
|
|
863
|
+
tags: item.options?.tags,
|
|
864
|
+
maxAttempts: item.options?.maxAttempts,
|
|
865
|
+
metadata: item.options?.metadata,
|
|
866
|
+
maxDuration: item.options?.maxDuration,
|
|
867
|
+
idempotencyKey: (await makeIdempotencyKey(item.options?.idempotencyKey)) ?? batchItemIdempotencyKey,
|
|
868
|
+
idempotencyKeyTTL: item.options?.idempotencyKeyTTL ?? options?.idempotencyKeyTTL,
|
|
869
|
+
machine: item.options?.machine,
|
|
870
|
+
priority: item.options?.priority,
|
|
871
|
+
region: item.options?.region,
|
|
872
|
+
lockToVersion: item.options?.version ?? getEnvVar("TRIGGER_VERSION"),
|
|
873
|
+
debounce: item.options?.debounce,
|
|
874
|
+
},
|
|
875
|
+
};
|
|
876
|
+
}
|
|
877
|
+
}
|
|
878
|
+
/**
|
|
879
|
+
* Transform a stream of BatchByTaskAndWaitItem to BatchItemNDJSON format for triggerAndWait.
|
|
670
880
|
*
|
|
671
|
-
* @
|
|
672
|
-
|
|
673
|
-
*
|
|
674
|
-
|
|
675
|
-
|
|
676
|
-
|
|
677
|
-
|
|
678
|
-
|
|
679
|
-
|
|
680
|
-
|
|
681
|
-
|
|
682
|
-
|
|
683
|
-
|
|
881
|
+
* @internal
|
|
882
|
+
*/
|
|
883
|
+
async function* transformBatchByTaskItemsStreamForWait(items, options) {
|
|
884
|
+
let index = 0;
|
|
885
|
+
for await (const item of items) {
|
|
886
|
+
const taskMetadata = resourceCatalog.getTask(item.task.id);
|
|
887
|
+
const parsedPayload = taskMetadata?.fns.parsePayload
|
|
888
|
+
? await taskMetadata?.fns.parsePayload(item.payload)
|
|
889
|
+
: item.payload;
|
|
890
|
+
const payloadPacket = await stringifyIO(parsedPayload);
|
|
891
|
+
const batchItemIdempotencyKey = await makeIdempotencyKey(flattenIdempotencyKey([options?.idempotencyKey, `${index}`]));
|
|
892
|
+
yield {
|
|
893
|
+
index: index++,
|
|
894
|
+
task: item.task.id,
|
|
895
|
+
payload: payloadPacket.data,
|
|
896
|
+
options: {
|
|
897
|
+
lockToVersion: taskContext.worker?.version,
|
|
898
|
+
queue: item.options?.queue ? { name: item.options.queue } : undefined,
|
|
899
|
+
concurrencyKey: item.options?.concurrencyKey,
|
|
900
|
+
test: taskContext.ctx?.run.isTest,
|
|
901
|
+
payloadType: payloadPacket.dataType,
|
|
902
|
+
delay: item.options?.delay,
|
|
903
|
+
ttl: item.options?.ttl,
|
|
904
|
+
tags: item.options?.tags,
|
|
905
|
+
maxAttempts: item.options?.maxAttempts,
|
|
906
|
+
metadata: item.options?.metadata,
|
|
907
|
+
maxDuration: item.options?.maxDuration,
|
|
908
|
+
idempotencyKey: (await makeIdempotencyKey(item.options?.idempotencyKey)) ?? batchItemIdempotencyKey,
|
|
909
|
+
idempotencyKeyTTL: item.options?.idempotencyKeyTTL ?? options?.idempotencyKeyTTL,
|
|
910
|
+
machine: item.options?.machine,
|
|
911
|
+
priority: item.options?.priority,
|
|
912
|
+
region: item.options?.region,
|
|
913
|
+
debounce: item.options?.debounce,
|
|
914
|
+
},
|
|
915
|
+
};
|
|
916
|
+
}
|
|
917
|
+
}
|
|
918
|
+
/**
|
|
919
|
+
* Transform a stream of BatchItem (single task type) to BatchItemNDJSON format.
|
|
684
920
|
*
|
|
685
|
-
*
|
|
686
|
-
* - Task IDs
|
|
687
|
-
* - Payload types
|
|
688
|
-
* - Return value types
|
|
689
|
-
* - Error handling
|
|
921
|
+
* @internal
|
|
690
922
|
*/
|
|
691
|
-
|
|
692
|
-
|
|
693
|
-
|
|
694
|
-
|
|
923
|
+
async function* transformSingleTaskBatchItemsStream(taskIdentifier, items, parsePayload, options, queue) {
|
|
924
|
+
let index = 0;
|
|
925
|
+
for await (const item of items) {
|
|
926
|
+
const parsedPayload = parsePayload ? await parsePayload(item.payload) : item.payload;
|
|
927
|
+
const payloadPacket = await stringifyIO(parsedPayload);
|
|
928
|
+
const batchItemIdempotencyKey = await makeIdempotencyKey(flattenIdempotencyKey([options?.idempotencyKey, `${index}`]));
|
|
929
|
+
yield {
|
|
930
|
+
index: index++,
|
|
931
|
+
task: taskIdentifier,
|
|
932
|
+
payload: payloadPacket.data,
|
|
933
|
+
options: {
|
|
934
|
+
queue: item.options?.queue
|
|
935
|
+
? { name: item.options.queue }
|
|
936
|
+
: queue
|
|
937
|
+
? { name: queue }
|
|
938
|
+
: undefined,
|
|
939
|
+
concurrencyKey: item.options?.concurrencyKey,
|
|
940
|
+
test: taskContext.ctx?.run.isTest,
|
|
941
|
+
payloadType: payloadPacket.dataType,
|
|
942
|
+
delay: item.options?.delay,
|
|
943
|
+
ttl: item.options?.ttl,
|
|
944
|
+
tags: item.options?.tags,
|
|
945
|
+
maxAttempts: item.options?.maxAttempts,
|
|
946
|
+
metadata: item.options?.metadata,
|
|
947
|
+
maxDuration: item.options?.maxDuration,
|
|
948
|
+
idempotencyKey: (await makeIdempotencyKey(item.options?.idempotencyKey)) ?? batchItemIdempotencyKey,
|
|
949
|
+
idempotencyKeyTTL: item.options?.idempotencyKeyTTL ?? options?.idempotencyKeyTTL,
|
|
950
|
+
machine: item.options?.machine,
|
|
951
|
+
priority: item.options?.priority,
|
|
952
|
+
region: item.options?.region,
|
|
953
|
+
lockToVersion: item.options?.version ?? getEnvVar("TRIGGER_VERSION"),
|
|
954
|
+
debounce: item.options?.debounce,
|
|
955
|
+
},
|
|
956
|
+
};
|
|
695
957
|
}
|
|
696
|
-
|
|
697
|
-
|
|
698
|
-
|
|
699
|
-
|
|
700
|
-
|
|
701
|
-
|
|
702
|
-
|
|
703
|
-
|
|
704
|
-
|
|
705
|
-
|
|
706
|
-
|
|
707
|
-
|
|
708
|
-
|
|
709
|
-
|
|
710
|
-
|
|
711
|
-
|
|
712
|
-
|
|
713
|
-
|
|
714
|
-
|
|
715
|
-
|
|
716
|
-
|
|
717
|
-
|
|
718
|
-
|
|
719
|
-
|
|
720
|
-
|
|
721
|
-
|
|
722
|
-
|
|
723
|
-
|
|
724
|
-
|
|
725
|
-
|
|
726
|
-
|
|
727
|
-
|
|
728
|
-
|
|
729
|
-
|
|
730
|
-
|
|
731
|
-
|
|
732
|
-
|
|
733
|
-
|
|
734
|
-
|
|
735
|
-
span.setAttribute("batchId", response.id);
|
|
736
|
-
span.setAttribute("runCount", response.runCount);
|
|
737
|
-
const result = await runtime.waitForBatch({
|
|
738
|
-
id: response.id,
|
|
739
|
-
runCount: response.runCount,
|
|
740
|
-
ctx,
|
|
741
|
-
});
|
|
742
|
-
const runs = await handleBatchTaskRunExecutionResultV2(result.items);
|
|
743
|
-
return {
|
|
744
|
-
id: result.id,
|
|
745
|
-
runs,
|
|
958
|
+
}
|
|
959
|
+
/**
|
|
960
|
+
* Transform a stream of BatchTriggerAndWaitItem (single task type) to BatchItemNDJSON format.
|
|
961
|
+
*
|
|
962
|
+
* @internal
|
|
963
|
+
*/
|
|
964
|
+
async function* transformSingleTaskBatchItemsStreamForWait(taskIdentifier, items, parsePayload, options, queue) {
|
|
965
|
+
let index = 0;
|
|
966
|
+
for await (const item of items) {
|
|
967
|
+
const parsedPayload = parsePayload ? await parsePayload(item.payload) : item.payload;
|
|
968
|
+
const payloadPacket = await stringifyIO(parsedPayload);
|
|
969
|
+
const batchItemIdempotencyKey = await makeIdempotencyKey(flattenIdempotencyKey([options?.idempotencyKey, `${index}`]));
|
|
970
|
+
yield {
|
|
971
|
+
index: index++,
|
|
972
|
+
task: taskIdentifier,
|
|
973
|
+
payload: payloadPacket.data,
|
|
974
|
+
options: {
|
|
975
|
+
lockToVersion: taskContext.worker?.version,
|
|
976
|
+
queue: item.options?.queue
|
|
977
|
+
? { name: item.options.queue }
|
|
978
|
+
: queue
|
|
979
|
+
? { name: queue }
|
|
980
|
+
: undefined,
|
|
981
|
+
concurrencyKey: item.options?.concurrencyKey,
|
|
982
|
+
test: taskContext.ctx?.run.isTest,
|
|
983
|
+
payloadType: payloadPacket.dataType,
|
|
984
|
+
delay: item.options?.delay,
|
|
985
|
+
ttl: item.options?.ttl,
|
|
986
|
+
tags: item.options?.tags,
|
|
987
|
+
maxAttempts: item.options?.maxAttempts,
|
|
988
|
+
metadata: item.options?.metadata,
|
|
989
|
+
maxDuration: item.options?.maxDuration,
|
|
990
|
+
idempotencyKey: (await makeIdempotencyKey(item.options?.idempotencyKey)) ?? batchItemIdempotencyKey,
|
|
991
|
+
idempotencyKeyTTL: item.options?.idempotencyKeyTTL ?? options?.idempotencyKeyTTL,
|
|
992
|
+
machine: item.options?.machine,
|
|
993
|
+
priority: item.options?.priority,
|
|
994
|
+
region: item.options?.region,
|
|
995
|
+
debounce: item.options?.debounce,
|
|
996
|
+
},
|
|
746
997
|
};
|
|
747
|
-
}
|
|
748
|
-
kind: SpanKind.PRODUCER,
|
|
749
|
-
attributes: {
|
|
750
|
-
[SemanticInternalAttributes.STYLE_ICON]: "trigger",
|
|
751
|
-
},
|
|
752
|
-
});
|
|
998
|
+
}
|
|
753
999
|
}
|
|
754
1000
|
async function trigger_internal(name, id, payload, parsePayload, options, requestOptions) {
|
|
755
1001
|
const apiClient = apiClientManager.clientOrThrow(requestOptions?.clientConfig);
|
|
@@ -775,6 +1021,7 @@ async function trigger_internal(name, id, payload, parsePayload, options, reques
|
|
|
775
1021
|
priority: options?.priority,
|
|
776
1022
|
region: options?.region,
|
|
777
1023
|
lockToVersion: options?.version ?? getEnvVar("TRIGGER_VERSION"),
|
|
1024
|
+
debounce: options?.debounce,
|
|
778
1025
|
},
|
|
779
1026
|
}, {
|
|
780
1027
|
spanParentAsLink: true,
|
|
@@ -796,12 +1043,15 @@ async function trigger_internal(name, id, payload, parsePayload, options, reques
|
|
|
796
1043
|
async function batchTrigger_internal(name, taskIdentifier, items, options, parsePayload, requestOptions, queue) {
|
|
797
1044
|
const apiClient = apiClientManager.clientOrThrow(requestOptions?.clientConfig);
|
|
798
1045
|
const ctx = taskContext.ctx;
|
|
799
|
-
|
|
800
|
-
|
|
1046
|
+
// Check if items is an array or a stream
|
|
1047
|
+
if (Array.isArray(items)) {
|
|
1048
|
+
// Prepare items as BatchItemNDJSON
|
|
1049
|
+
const ndJsonItems = await Promise.all(items.map(async (item, index) => {
|
|
801
1050
|
const parsedPayload = parsePayload ? await parsePayload(item.payload) : item.payload;
|
|
802
1051
|
const payloadPacket = await stringifyIO(parsedPayload);
|
|
803
1052
|
const batchItemIdempotencyKey = await makeIdempotencyKey(flattenIdempotencyKey([options?.idempotencyKey, `${index}`]));
|
|
804
1053
|
return {
|
|
1054
|
+
index,
|
|
805
1055
|
task: taskIdentifier,
|
|
806
1056
|
payload: payloadPacket.data,
|
|
807
1057
|
options: {
|
|
@@ -827,33 +1077,75 @@ async function batchTrigger_internal(name, taskIdentifier, items, options, parse
|
|
|
827
1077
|
lockToVersion: item.options?.version ?? getEnvVar("TRIGGER_VERSION"),
|
|
828
1078
|
},
|
|
829
1079
|
};
|
|
830
|
-
}))
|
|
831
|
-
|
|
832
|
-
|
|
833
|
-
|
|
834
|
-
|
|
835
|
-
|
|
836
|
-
|
|
837
|
-
|
|
838
|
-
|
|
839
|
-
|
|
840
|
-
|
|
841
|
-
|
|
842
|
-
|
|
843
|
-
|
|
844
|
-
|
|
845
|
-
|
|
846
|
-
|
|
847
|
-
|
|
848
|
-
|
|
849
|
-
|
|
850
|
-
|
|
851
|
-
|
|
852
|
-
|
|
853
|
-
|
|
854
|
-
|
|
855
|
-
|
|
856
|
-
|
|
1080
|
+
}));
|
|
1081
|
+
// Execute 2-phase batch
|
|
1082
|
+
const response = await tracer.startActiveSpan(name, async (span) => {
|
|
1083
|
+
const result = await executeBatchTwoPhase(apiClient, ndJsonItems, {
|
|
1084
|
+
parentRunId: ctx?.run.id,
|
|
1085
|
+
idempotencyKey: await makeIdempotencyKey(options?.idempotencyKey),
|
|
1086
|
+
spanParentAsLink: true, // Fire-and-forget: child runs get separate trace IDs
|
|
1087
|
+
}, requestOptions);
|
|
1088
|
+
span.setAttribute("batchId", result.id);
|
|
1089
|
+
span.setAttribute("runCount", result.runCount);
|
|
1090
|
+
return result;
|
|
1091
|
+
}, {
|
|
1092
|
+
kind: SpanKind.PRODUCER,
|
|
1093
|
+
attributes: {
|
|
1094
|
+
[SemanticInternalAttributes.STYLE_ICON]: "trigger",
|
|
1095
|
+
...accessoryAttributes({
|
|
1096
|
+
items: [
|
|
1097
|
+
{
|
|
1098
|
+
text: taskIdentifier,
|
|
1099
|
+
variant: "normal",
|
|
1100
|
+
},
|
|
1101
|
+
],
|
|
1102
|
+
style: "codepath",
|
|
1103
|
+
}),
|
|
1104
|
+
},
|
|
1105
|
+
});
|
|
1106
|
+
const handle = {
|
|
1107
|
+
batchId: response.id,
|
|
1108
|
+
runCount: response.runCount,
|
|
1109
|
+
publicAccessToken: response.publicAccessToken,
|
|
1110
|
+
};
|
|
1111
|
+
return handle;
|
|
1112
|
+
}
|
|
1113
|
+
else {
|
|
1114
|
+
// Stream path: convert to AsyncIterable and transform
|
|
1115
|
+
const asyncItems = normalizeToAsyncIterable(items);
|
|
1116
|
+
const transformedItems = transformSingleTaskBatchItemsStream(taskIdentifier, asyncItems, parsePayload, options, queue);
|
|
1117
|
+
// Execute streaming 2-phase batch
|
|
1118
|
+
const response = await tracer.startActiveSpan(name, async (span) => {
|
|
1119
|
+
const result = await executeBatchTwoPhaseStreaming(apiClient, transformedItems, {
|
|
1120
|
+
parentRunId: ctx?.run.id,
|
|
1121
|
+
idempotencyKey: await makeIdempotencyKey(options?.idempotencyKey),
|
|
1122
|
+
spanParentAsLink: true, // Fire-and-forget: child runs get separate trace IDs
|
|
1123
|
+
}, requestOptions);
|
|
1124
|
+
span.setAttribute("batchId", result.id);
|
|
1125
|
+
span.setAttribute("runCount", result.runCount);
|
|
1126
|
+
return result;
|
|
1127
|
+
}, {
|
|
1128
|
+
kind: SpanKind.PRODUCER,
|
|
1129
|
+
attributes: {
|
|
1130
|
+
[SemanticInternalAttributes.STYLE_ICON]: "trigger",
|
|
1131
|
+
...accessoryAttributes({
|
|
1132
|
+
items: [
|
|
1133
|
+
{
|
|
1134
|
+
text: taskIdentifier,
|
|
1135
|
+
variant: "normal",
|
|
1136
|
+
},
|
|
1137
|
+
],
|
|
1138
|
+
style: "codepath",
|
|
1139
|
+
}),
|
|
1140
|
+
},
|
|
1141
|
+
});
|
|
1142
|
+
const handle = {
|
|
1143
|
+
batchId: response.id,
|
|
1144
|
+
runCount: response.runCount,
|
|
1145
|
+
publicAccessToken: response.publicAccessToken,
|
|
1146
|
+
};
|
|
1147
|
+
return handle;
|
|
1148
|
+
}
|
|
857
1149
|
}
|
|
858
1150
|
async function triggerAndWait_internal(name, id, payload, parsePayload, options, requestOptions) {
|
|
859
1151
|
const ctx = taskContext.ctx;
|
|
@@ -885,6 +1177,7 @@ async function triggerAndWait_internal(name, id, payload, parsePayload, options,
|
|
|
885
1177
|
machine: options?.machine,
|
|
886
1178
|
priority: options?.priority,
|
|
887
1179
|
region: options?.region,
|
|
1180
|
+
debounce: options?.debounce,
|
|
888
1181
|
},
|
|
889
1182
|
}, {}, requestOptions);
|
|
890
1183
|
span.setAttribute("runId", response.id);
|
|
@@ -915,72 +1208,117 @@ async function batchTriggerAndWait_internal(name, id, items, parsePayload, optio
|
|
|
915
1208
|
throw new Error("batchTriggerAndWait can only be used from inside a task.run()");
|
|
916
1209
|
}
|
|
917
1210
|
const apiClient = apiClientManager.clientOrThrow(requestOptions?.clientConfig);
|
|
918
|
-
|
|
919
|
-
|
|
920
|
-
|
|
921
|
-
|
|
922
|
-
|
|
923
|
-
|
|
924
|
-
|
|
925
|
-
|
|
926
|
-
|
|
927
|
-
|
|
928
|
-
|
|
929
|
-
|
|
930
|
-
|
|
931
|
-
|
|
932
|
-
|
|
933
|
-
|
|
934
|
-
|
|
935
|
-
|
|
936
|
-
|
|
937
|
-
|
|
938
|
-
|
|
939
|
-
|
|
940
|
-
|
|
941
|
-
|
|
942
|
-
|
|
943
|
-
|
|
944
|
-
|
|
945
|
-
|
|
946
|
-
|
|
947
|
-
|
|
948
|
-
|
|
949
|
-
|
|
950
|
-
}
|
|
951
|
-
}
|
|
952
|
-
|
|
953
|
-
|
|
1211
|
+
// Check if items is an array or a stream
|
|
1212
|
+
if (Array.isArray(items)) {
|
|
1213
|
+
// Prepare items as BatchItemNDJSON
|
|
1214
|
+
const ndJsonItems = await Promise.all(items.map(async (item, index) => {
|
|
1215
|
+
const parsedPayload = parsePayload ? await parsePayload(item.payload) : item.payload;
|
|
1216
|
+
const payloadPacket = await stringifyIO(parsedPayload);
|
|
1217
|
+
const batchItemIdempotencyKey = await makeIdempotencyKey(flattenIdempotencyKey([options?.idempotencyKey, `${index}`]));
|
|
1218
|
+
return {
|
|
1219
|
+
index,
|
|
1220
|
+
task: id,
|
|
1221
|
+
payload: payloadPacket.data,
|
|
1222
|
+
options: {
|
|
1223
|
+
lockToVersion: taskContext.worker?.version,
|
|
1224
|
+
queue: item.options?.queue
|
|
1225
|
+
? { name: item.options.queue }
|
|
1226
|
+
: queue
|
|
1227
|
+
? { name: queue }
|
|
1228
|
+
: undefined,
|
|
1229
|
+
concurrencyKey: item.options?.concurrencyKey,
|
|
1230
|
+
test: taskContext.ctx?.run.isTest,
|
|
1231
|
+
payloadType: payloadPacket.dataType,
|
|
1232
|
+
delay: item.options?.delay,
|
|
1233
|
+
ttl: item.options?.ttl,
|
|
1234
|
+
tags: item.options?.tags,
|
|
1235
|
+
maxAttempts: item.options?.maxAttempts,
|
|
1236
|
+
metadata: item.options?.metadata,
|
|
1237
|
+
maxDuration: item.options?.maxDuration,
|
|
1238
|
+
idempotencyKey: (await makeIdempotencyKey(item.options?.idempotencyKey)) ?? batchItemIdempotencyKey,
|
|
1239
|
+
idempotencyKeyTTL: item.options?.idempotencyKeyTTL ?? options?.idempotencyKeyTTL,
|
|
1240
|
+
machine: item.options?.machine,
|
|
1241
|
+
priority: item.options?.priority,
|
|
1242
|
+
region: item.options?.region,
|
|
1243
|
+
},
|
|
1244
|
+
};
|
|
1245
|
+
}));
|
|
1246
|
+
return await tracer.startActiveSpan(name, async (span) => {
|
|
1247
|
+
// Execute 2-phase batch
|
|
1248
|
+
const response = await executeBatchTwoPhase(apiClient, ndJsonItems, {
|
|
1249
|
+
parentRunId: ctx.run.id,
|
|
1250
|
+
resumeParentOnCompletion: true,
|
|
1251
|
+
idempotencyKey: await makeIdempotencyKey(options?.idempotencyKey),
|
|
1252
|
+
spanParentAsLink: false, // Waiting: child runs share parent's trace ID
|
|
1253
|
+
}, requestOptions);
|
|
1254
|
+
span.setAttribute("batchId", response.id);
|
|
1255
|
+
span.setAttribute("runCount", response.runCount);
|
|
1256
|
+
const result = await runtime.waitForBatch({
|
|
1257
|
+
id: response.id,
|
|
1258
|
+
runCount: response.runCount,
|
|
1259
|
+
ctx,
|
|
1260
|
+
});
|
|
1261
|
+
const runs = await handleBatchTaskRunExecutionResult(result.items, id);
|
|
1262
|
+
return {
|
|
1263
|
+
id: result.id,
|
|
1264
|
+
runs,
|
|
1265
|
+
};
|
|
954
1266
|
}, {
|
|
955
|
-
|
|
956
|
-
|
|
957
|
-
|
|
958
|
-
|
|
959
|
-
|
|
960
|
-
|
|
961
|
-
|
|
962
|
-
|
|
1267
|
+
kind: SpanKind.PRODUCER,
|
|
1268
|
+
attributes: {
|
|
1269
|
+
[SemanticInternalAttributes.STYLE_ICON]: "trigger",
|
|
1270
|
+
...accessoryAttributes({
|
|
1271
|
+
items: [
|
|
1272
|
+
{
|
|
1273
|
+
text: id,
|
|
1274
|
+
variant: "normal",
|
|
1275
|
+
},
|
|
1276
|
+
],
|
|
1277
|
+
style: "codepath",
|
|
1278
|
+
}),
|
|
1279
|
+
},
|
|
963
1280
|
});
|
|
964
|
-
|
|
965
|
-
|
|
966
|
-
|
|
967
|
-
|
|
968
|
-
|
|
969
|
-
|
|
970
|
-
|
|
971
|
-
|
|
972
|
-
|
|
973
|
-
|
|
974
|
-
|
|
975
|
-
|
|
976
|
-
|
|
977
|
-
|
|
978
|
-
|
|
979
|
-
|
|
980
|
-
|
|
981
|
-
|
|
982
|
-
|
|
983
|
-
|
|
1281
|
+
}
|
|
1282
|
+
else {
|
|
1283
|
+
// Stream path: convert to AsyncIterable and transform
|
|
1284
|
+
const asyncItems = normalizeToAsyncIterable(items);
|
|
1285
|
+
const transformedItems = transformSingleTaskBatchItemsStreamForWait(id, asyncItems, parsePayload, options, queue);
|
|
1286
|
+
return await tracer.startActiveSpan(name, async (span) => {
|
|
1287
|
+
// Execute streaming 2-phase batch
|
|
1288
|
+
const response = await executeBatchTwoPhaseStreaming(apiClient, transformedItems, {
|
|
1289
|
+
parentRunId: ctx.run.id,
|
|
1290
|
+
resumeParentOnCompletion: true,
|
|
1291
|
+
idempotencyKey: await makeIdempotencyKey(options?.idempotencyKey),
|
|
1292
|
+
spanParentAsLink: false, // Waiting: child runs share parent's trace ID
|
|
1293
|
+
}, requestOptions);
|
|
1294
|
+
span.setAttribute("batchId", response.id);
|
|
1295
|
+
span.setAttribute("runCount", response.runCount);
|
|
1296
|
+
const result = await runtime.waitForBatch({
|
|
1297
|
+
id: response.id,
|
|
1298
|
+
runCount: response.runCount,
|
|
1299
|
+
ctx,
|
|
1300
|
+
});
|
|
1301
|
+
const runs = await handleBatchTaskRunExecutionResult(result.items, id);
|
|
1302
|
+
return {
|
|
1303
|
+
id: result.id,
|
|
1304
|
+
runs,
|
|
1305
|
+
};
|
|
1306
|
+
}, {
|
|
1307
|
+
kind: SpanKind.PRODUCER,
|
|
1308
|
+
attributes: {
|
|
1309
|
+
[SemanticInternalAttributes.STYLE_ICON]: "trigger",
|
|
1310
|
+
...accessoryAttributes({
|
|
1311
|
+
items: [
|
|
1312
|
+
{
|
|
1313
|
+
text: id,
|
|
1314
|
+
variant: "normal",
|
|
1315
|
+
},
|
|
1316
|
+
],
|
|
1317
|
+
style: "codepath",
|
|
1318
|
+
}),
|
|
1319
|
+
},
|
|
1320
|
+
});
|
|
1321
|
+
}
|
|
984
1322
|
}
|
|
985
1323
|
async function handleBatchTaskRunExecutionResult(items, taskIdentifier) {
|
|
986
1324
|
const someObjectStoreOutputs = items.some((item) => item.ok && item.outputType === "application/store");
|