@upstash/workflow 0.2.0 → 0.2.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/astro.d.mts +2 -2
- package/astro.d.ts +2 -2
- package/astro.js +175 -88
- package/astro.mjs +5 -5
- package/{chunk-5R2BFC3N.mjs → chunk-Z7WS5XIR.mjs} +150 -234
- package/cloudflare.d.mts +2 -2
- package/cloudflare.d.ts +2 -2
- package/cloudflare.js +175 -88
- package/cloudflare.mjs +5 -5
- package/express.d.mts +1 -1
- package/express.d.ts +1 -1
- package/express.js +187 -93
- package/express.mjs +17 -10
- package/h3.d.mts +2 -2
- package/h3.d.ts +2 -2
- package/h3.js +175 -88
- package/h3.mjs +5 -5
- package/hono.d.mts +4 -2
- package/hono.d.ts +4 -2
- package/hono.js +175 -88
- package/hono.mjs +5 -5
- package/index.d.mts +3 -3
- package/index.d.ts +3 -3
- package/index.js +179 -86
- package/index.mjs +189 -3
- package/nextjs.d.mts +3 -3
- package/nextjs.d.ts +3 -3
- package/nextjs.js +179 -92
- package/nextjs.mjs +9 -9
- package/package.json +1 -1
- package/solidjs.d.mts +2 -2
- package/solidjs.d.ts +2 -2
- package/solidjs.js +175 -88
- package/solidjs.mjs +5 -5
- package/svelte.d.mts +3 -3
- package/svelte.d.ts +3 -3
- package/svelte.js +178 -88
- package/svelte.mjs +8 -5
- package/{types-Cki_MHrh.d.mts → types-APRap-aV.d.mts} +12 -2
- package/{types-Cki_MHrh.d.ts → types-APRap-aV.d.ts} +12 -2
package/h3.js
CHANGED
|
@@ -20,7 +20,7 @@ var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: tru
|
|
|
20
20
|
// platforms/h3.ts
|
|
21
21
|
var h3_exports = {};
|
|
22
22
|
__export(h3_exports, {
|
|
23
|
-
serve: () =>
|
|
23
|
+
serve: () => serve
|
|
24
24
|
});
|
|
25
25
|
module.exports = __toCommonJS(h3_exports);
|
|
26
26
|
|
|
@@ -336,48 +336,8 @@ async function _callHandler(event, handler, hooks) {
|
|
|
336
336
|
var H3Headers = globalThis.Headers;
|
|
337
337
|
var H3Response = globalThis.Response;
|
|
338
338
|
|
|
339
|
-
// src/error.ts
|
|
340
|
-
var import_qstash = require("@upstash/qstash");
|
|
341
|
-
var WorkflowError = class extends import_qstash.QstashError {
|
|
342
|
-
constructor(message) {
|
|
343
|
-
super(message);
|
|
344
|
-
this.name = "WorkflowError";
|
|
345
|
-
}
|
|
346
|
-
};
|
|
347
|
-
var WorkflowAbort = class extends Error {
|
|
348
|
-
stepInfo;
|
|
349
|
-
stepName;
|
|
350
|
-
/**
|
|
351
|
-
* whether workflow is to be canceled on abort
|
|
352
|
-
*/
|
|
353
|
-
cancelWorkflow;
|
|
354
|
-
/**
|
|
355
|
-
*
|
|
356
|
-
* @param stepName name of the aborting step
|
|
357
|
-
* @param stepInfo step information
|
|
358
|
-
* @param cancelWorkflow
|
|
359
|
-
*/
|
|
360
|
-
constructor(stepName, stepInfo, cancelWorkflow = false) {
|
|
361
|
-
super(
|
|
362
|
-
`This is an Upstash Workflow error thrown after a step executes. It is expected to be raised. Make sure that you await for each step. Also, if you are using try/catch blocks, you should not wrap context.run/sleep/sleepUntil/call methods with try/catch. Aborting workflow after executing step '${stepName}'.`
|
|
363
|
-
);
|
|
364
|
-
this.name = "WorkflowAbort";
|
|
365
|
-
this.stepName = stepName;
|
|
366
|
-
this.stepInfo = stepInfo;
|
|
367
|
-
this.cancelWorkflow = cancelWorkflow;
|
|
368
|
-
}
|
|
369
|
-
};
|
|
370
|
-
var formatWorkflowError = (error) => {
|
|
371
|
-
return error instanceof Error ? {
|
|
372
|
-
error: error.name,
|
|
373
|
-
message: error.message
|
|
374
|
-
} : {
|
|
375
|
-
error: "Error",
|
|
376
|
-
message: "An error occured while executing workflow."
|
|
377
|
-
};
|
|
378
|
-
};
|
|
379
|
-
|
|
380
339
|
// src/client/utils.ts
|
|
340
|
+
var import_qstash = require("@upstash/qstash");
|
|
381
341
|
var makeNotifyRequest = async (requester, eventId, eventData) => {
|
|
382
342
|
const result = await requester.request({
|
|
383
343
|
path: ["v2", "notify", eventId],
|
|
@@ -404,32 +364,82 @@ var getSteps = async (requester, workflowRunId, messageId, debug) => {
|
|
|
404
364
|
await debug?.log("INFO", "ENDPOINT_START", {
|
|
405
365
|
message: `Pulled ${steps.length} steps from QStashand returned them without filtering with messageId.`
|
|
406
366
|
});
|
|
407
|
-
return steps;
|
|
367
|
+
return { steps, workflowRunEnded: false };
|
|
408
368
|
} else {
|
|
409
369
|
const index = steps.findIndex((item) => item.messageId === messageId);
|
|
410
370
|
if (index === -1) {
|
|
411
|
-
return [];
|
|
371
|
+
return { steps: [], workflowRunEnded: false };
|
|
412
372
|
}
|
|
413
373
|
const filteredSteps = steps.slice(0, index + 1);
|
|
414
374
|
await debug?.log("INFO", "ENDPOINT_START", {
|
|
415
375
|
message: `Pulled ${steps.length} steps from QStash and filtered them to ${filteredSteps.length} using messageId.`
|
|
416
376
|
});
|
|
417
|
-
return filteredSteps;
|
|
377
|
+
return { steps: filteredSteps, workflowRunEnded: false };
|
|
418
378
|
}
|
|
419
379
|
} catch (error) {
|
|
420
|
-
|
|
421
|
-
|
|
422
|
-
|
|
423
|
-
|
|
424
|
-
|
|
380
|
+
if (error instanceof import_qstash.QstashError && error.status === 404) {
|
|
381
|
+
await debug?.log("WARN", "ENDPOINT_START", {
|
|
382
|
+
message: "Couldn't fetch workflow run steps. This can happen if the workflow run succesfully ends before some callback is executed.",
|
|
383
|
+
error
|
|
384
|
+
});
|
|
385
|
+
return { steps: void 0, workflowRunEnded: true };
|
|
386
|
+
} else {
|
|
387
|
+
throw error;
|
|
388
|
+
}
|
|
425
389
|
}
|
|
426
390
|
};
|
|
427
391
|
|
|
392
|
+
// src/error.ts
|
|
393
|
+
var import_qstash2 = require("@upstash/qstash");
|
|
394
|
+
var WorkflowError = class extends import_qstash2.QstashError {
|
|
395
|
+
constructor(message) {
|
|
396
|
+
super(message);
|
|
397
|
+
this.name = "WorkflowError";
|
|
398
|
+
}
|
|
399
|
+
};
|
|
400
|
+
var WorkflowAbort = class extends Error {
|
|
401
|
+
stepInfo;
|
|
402
|
+
stepName;
|
|
403
|
+
/**
|
|
404
|
+
* whether workflow is to be canceled on abort
|
|
405
|
+
*/
|
|
406
|
+
cancelWorkflow;
|
|
407
|
+
/**
|
|
408
|
+
*
|
|
409
|
+
* @param stepName name of the aborting step
|
|
410
|
+
* @param stepInfo step information
|
|
411
|
+
* @param cancelWorkflow
|
|
412
|
+
*/
|
|
413
|
+
constructor(stepName, stepInfo, cancelWorkflow = false) {
|
|
414
|
+
super(
|
|
415
|
+
`This is an Upstash Workflow error thrown after a step executes. It is expected to be raised. Make sure that you await for each step. Also, if you are using try/catch blocks, you should not wrap context.run/sleep/sleepUntil/call methods with try/catch. Aborting workflow after executing step '${stepName}'.`
|
|
416
|
+
);
|
|
417
|
+
this.name = "WorkflowAbort";
|
|
418
|
+
this.stepName = stepName;
|
|
419
|
+
this.stepInfo = stepInfo;
|
|
420
|
+
this.cancelWorkflow = cancelWorkflow;
|
|
421
|
+
}
|
|
422
|
+
};
|
|
423
|
+
var formatWorkflowError = (error) => {
|
|
424
|
+
return error instanceof Error ? {
|
|
425
|
+
error: error.name,
|
|
426
|
+
message: error.message
|
|
427
|
+
} : {
|
|
428
|
+
error: "Error",
|
|
429
|
+
message: "An error occured while executing workflow."
|
|
430
|
+
};
|
|
431
|
+
};
|
|
432
|
+
|
|
428
433
|
// src/context/steps.ts
|
|
429
434
|
var BaseLazyStep = class {
|
|
430
435
|
stepName;
|
|
431
436
|
// will be set in the subclasses
|
|
432
437
|
constructor(stepName) {
|
|
438
|
+
if (!stepName) {
|
|
439
|
+
throw new WorkflowError(
|
|
440
|
+
"A workflow step name cannot be undefined or an empty string. Please provide a name for your workflow step."
|
|
441
|
+
);
|
|
442
|
+
}
|
|
433
443
|
this.stepName = stepName;
|
|
434
444
|
}
|
|
435
445
|
};
|
|
@@ -522,15 +532,17 @@ var LazyCallStep = class extends BaseLazyStep {
|
|
|
522
532
|
method;
|
|
523
533
|
body;
|
|
524
534
|
headers;
|
|
525
|
-
stepType = "Call";
|
|
526
535
|
retries;
|
|
527
|
-
|
|
536
|
+
timeout;
|
|
537
|
+
stepType = "Call";
|
|
538
|
+
constructor(stepName, url, method, body, headers, retries, timeout) {
|
|
528
539
|
super(stepName);
|
|
529
540
|
this.url = url;
|
|
530
541
|
this.method = method;
|
|
531
542
|
this.body = body;
|
|
532
543
|
this.headers = headers;
|
|
533
544
|
this.retries = retries;
|
|
545
|
+
this.timeout = timeout;
|
|
534
546
|
}
|
|
535
547
|
getPlanStep(concurrent, targetStep) {
|
|
536
548
|
return {
|
|
@@ -1038,8 +1050,8 @@ var StepTypes = [
|
|
|
1038
1050
|
];
|
|
1039
1051
|
|
|
1040
1052
|
// src/workflow-requests.ts
|
|
1041
|
-
var
|
|
1042
|
-
var triggerFirstInvocation = async (workflowContext, retries, debug) => {
|
|
1053
|
+
var import_qstash3 = require("@upstash/qstash");
|
|
1054
|
+
var triggerFirstInvocation = async (workflowContext, retries, useJSONContent, debug) => {
|
|
1043
1055
|
const { headers } = getHeaders(
|
|
1044
1056
|
"true",
|
|
1045
1057
|
workflowContext.workflowRunId,
|
|
@@ -1049,6 +1061,9 @@ var triggerFirstInvocation = async (workflowContext, retries, debug) => {
|
|
|
1049
1061
|
workflowContext.failureUrl,
|
|
1050
1062
|
retries
|
|
1051
1063
|
);
|
|
1064
|
+
if (useJSONContent) {
|
|
1065
|
+
headers["content-type"] = "application/json";
|
|
1066
|
+
}
|
|
1052
1067
|
try {
|
|
1053
1068
|
const body = typeof workflowContext.requestPayload === "string" ? workflowContext.requestPayload : JSON.stringify(workflowContext.requestPayload);
|
|
1054
1069
|
const result = await workflowContext.qstashClient.publish({
|
|
@@ -1092,7 +1107,7 @@ var triggerRouteFunction = async ({
|
|
|
1092
1107
|
return ok("workflow-finished");
|
|
1093
1108
|
} catch (error) {
|
|
1094
1109
|
const error_ = error;
|
|
1095
|
-
if (error instanceof
|
|
1110
|
+
if (error instanceof import_qstash3.QstashError && error.status === 400) {
|
|
1096
1111
|
await debug?.log("WARN", "RESPONSE_WORKFLOW", {
|
|
1097
1112
|
message: `tried to append to a cancelled workflow. exiting without publishing.`,
|
|
1098
1113
|
name: error.name,
|
|
@@ -1126,7 +1141,7 @@ var triggerWorkflowDelete = async (workflowContext, debug, cancel = false) => {
|
|
|
1126
1141
|
);
|
|
1127
1142
|
return { deleted: true };
|
|
1128
1143
|
} catch (error) {
|
|
1129
|
-
if (error instanceof
|
|
1144
|
+
if (error instanceof import_qstash3.QstashError && error.status === 404) {
|
|
1130
1145
|
await debug?.log("WARN", "SUBMIT_CLEANUP", {
|
|
1131
1146
|
message: `Failed to remove workflow run ${workflowContext.workflowRunId} as it doesn't exist.`,
|
|
1132
1147
|
name: error.name,
|
|
@@ -1142,7 +1157,10 @@ var recreateUserHeaders = (headers) => {
|
|
|
1142
1157
|
const pairs = headers.entries();
|
|
1143
1158
|
for (const [header, value] of pairs) {
|
|
1144
1159
|
const headerLowerCase = header.toLowerCase();
|
|
1145
|
-
if (!headerLowerCase.startsWith("upstash-workflow-") &&
|
|
1160
|
+
if (!headerLowerCase.startsWith("upstash-workflow-") && // https://vercel.com/docs/edge-network/headers/request-headers#x-vercel-id
|
|
1161
|
+
!headerLowerCase.startsWith("x-vercel-") && !headerLowerCase.startsWith("x-forwarded-") && // https://blog.cloudflare.com/preventing-request-loops-using-cdn-loop/
|
|
1162
|
+
headerLowerCase !== "cf-connecting-ip" && headerLowerCase !== "cdn-loop" && headerLowerCase !== "cf-ew-via" && headerLowerCase !== "cf-ray" && // For Render https://render.com
|
|
1163
|
+
headerLowerCase !== "render-proxy-ttl") {
|
|
1146
1164
|
filteredHeaders.append(header, value);
|
|
1147
1165
|
}
|
|
1148
1166
|
}
|
|
@@ -1160,11 +1178,19 @@ var handleThirdPartyCallResult = async (request, requestPayload, client, workflo
|
|
|
1160
1178
|
if (!workflowRunId2)
|
|
1161
1179
|
throw new WorkflowError("workflow run id missing in context.call lazy fetch.");
|
|
1162
1180
|
if (!messageId) throw new WorkflowError("message id missing in context.call lazy fetch.");
|
|
1163
|
-
const steps = await getSteps(
|
|
1181
|
+
const { steps, workflowRunEnded } = await getSteps(
|
|
1182
|
+
client.http,
|
|
1183
|
+
workflowRunId2,
|
|
1184
|
+
messageId,
|
|
1185
|
+
debug
|
|
1186
|
+
);
|
|
1187
|
+
if (workflowRunEnded) {
|
|
1188
|
+
return ok("workflow-ended");
|
|
1189
|
+
}
|
|
1164
1190
|
const failingStep = steps.find((step) => step.messageId === messageId);
|
|
1165
1191
|
if (!failingStep)
|
|
1166
1192
|
throw new WorkflowError(
|
|
1167
|
-
"Failed to submit the context.call." + (steps.length === 0 ? "No steps found." : `No step was found with matching messageId ${messageId} out of ${steps.length} steps.`)
|
|
1193
|
+
"Failed to submit the context.call. " + (steps.length === 0 ? "No steps found." : `No step was found with matching messageId ${messageId} out of ${steps.length} steps.`)
|
|
1168
1194
|
);
|
|
1169
1195
|
callbackPayload = atob(failingStep.body);
|
|
1170
1196
|
}
|
|
@@ -1245,7 +1271,7 @@ ${atob(callbackMessage.body ?? "")}`
|
|
|
1245
1271
|
);
|
|
1246
1272
|
}
|
|
1247
1273
|
};
|
|
1248
|
-
var getHeaders = (initHeaderValue, workflowRunId, workflowUrl, userHeaders, step, failureUrl, retries, callRetries) => {
|
|
1274
|
+
var getHeaders = (initHeaderValue, workflowRunId, workflowUrl, userHeaders, step, failureUrl, retries, callRetries, callTimeout) => {
|
|
1249
1275
|
const baseHeaders = {
|
|
1250
1276
|
[WORKFLOW_INIT_HEADER]: initHeaderValue,
|
|
1251
1277
|
[WORKFLOW_ID_HEADER]: workflowRunId,
|
|
@@ -1255,6 +1281,9 @@ var getHeaders = (initHeaderValue, workflowRunId, workflowUrl, userHeaders, step
|
|
|
1255
1281
|
if (!step?.callUrl) {
|
|
1256
1282
|
baseHeaders[`Upstash-Forward-${WORKFLOW_PROTOCOL_VERSION_HEADER}`] = WORKFLOW_PROTOCOL_VERSION;
|
|
1257
1283
|
}
|
|
1284
|
+
if (callTimeout) {
|
|
1285
|
+
baseHeaders[`Upstash-Timeout`] = callTimeout.toString();
|
|
1286
|
+
}
|
|
1258
1287
|
if (failureUrl) {
|
|
1259
1288
|
baseHeaders[`Upstash-Failure-Callback-Forward-${WORKFLOW_FAILURE_HEADER}`] = "true";
|
|
1260
1289
|
if (!step?.callUrl) {
|
|
@@ -1630,7 +1659,8 @@ var AutoExecutor = class _AutoExecutor {
|
|
|
1630
1659
|
singleStep,
|
|
1631
1660
|
this.context.failureUrl,
|
|
1632
1661
|
this.context.retries,
|
|
1633
|
-
lazyStep instanceof LazyCallStep ? lazyStep.retries : void 0
|
|
1662
|
+
lazyStep instanceof LazyCallStep ? lazyStep.retries : void 0,
|
|
1663
|
+
lazyStep instanceof LazyCallStep ? lazyStep.timeout : void 0
|
|
1634
1664
|
);
|
|
1635
1665
|
const willWait = singleStep.concurrent === NO_CONCURRENCY || singleStep.stepId === 0;
|
|
1636
1666
|
singleStep.out = JSON.stringify(singleStep.out);
|
|
@@ -1980,6 +2010,7 @@ var WorkflowContext = class {
|
|
|
1980
2010
|
* @param body call body
|
|
1981
2011
|
* @param headers call headers
|
|
1982
2012
|
* @param retries number of call retries. 0 by default
|
|
2013
|
+
* @param timeout max duration to wait for the endpoint to respond. in seconds.
|
|
1983
2014
|
* @returns call result as {
|
|
1984
2015
|
* status: number;
|
|
1985
2016
|
* body: unknown;
|
|
@@ -1987,9 +2018,17 @@ var WorkflowContext = class {
|
|
|
1987
2018
|
* }
|
|
1988
2019
|
*/
|
|
1989
2020
|
async call(stepName, settings) {
|
|
1990
|
-
const { url, method = "GET", body, headers = {}, retries = 0 } = settings;
|
|
2021
|
+
const { url, method = "GET", body, headers = {}, retries = 0, timeout } = settings;
|
|
1991
2022
|
const result = await this.addStep(
|
|
1992
|
-
new LazyCallStep(
|
|
2023
|
+
new LazyCallStep(
|
|
2024
|
+
stepName,
|
|
2025
|
+
url,
|
|
2026
|
+
method,
|
|
2027
|
+
body,
|
|
2028
|
+
headers,
|
|
2029
|
+
retries,
|
|
2030
|
+
timeout
|
|
2031
|
+
)
|
|
1993
2032
|
);
|
|
1994
2033
|
if (typeof result === "string") {
|
|
1995
2034
|
try {
|
|
@@ -2190,7 +2229,7 @@ function decodeBase64(base64) {
|
|
|
2190
2229
|
}
|
|
2191
2230
|
|
|
2192
2231
|
// src/serve/authorization.ts
|
|
2193
|
-
var
|
|
2232
|
+
var import_qstash4 = require("@upstash/qstash");
|
|
2194
2233
|
var DisabledWorkflowContext = class _DisabledWorkflowContext extends WorkflowContext {
|
|
2195
2234
|
static disabledMessage = "disabled-qstash-worklfow-run";
|
|
2196
2235
|
/**
|
|
@@ -2221,7 +2260,7 @@ var DisabledWorkflowContext = class _DisabledWorkflowContext extends WorkflowCon
|
|
|
2221
2260
|
*/
|
|
2222
2261
|
static async tryAuthentication(routeFunction, context) {
|
|
2223
2262
|
const disabledContext = new _DisabledWorkflowContext({
|
|
2224
|
-
qstashClient: new
|
|
2263
|
+
qstashClient: new import_qstash4.Client({
|
|
2225
2264
|
baseUrl: "disabled-client",
|
|
2226
2265
|
token: "disabled-client"
|
|
2227
2266
|
}),
|
|
@@ -2345,7 +2384,8 @@ var parseRequest = async (requestPayload, isFirstInvocation, workflowRunId, requ
|
|
|
2345
2384
|
return {
|
|
2346
2385
|
rawInitialPayload: requestPayload ?? "",
|
|
2347
2386
|
steps: [],
|
|
2348
|
-
isLastDuplicate: false
|
|
2387
|
+
isLastDuplicate: false,
|
|
2388
|
+
workflowRunEnded: false
|
|
2349
2389
|
};
|
|
2350
2390
|
} else {
|
|
2351
2391
|
let rawSteps;
|
|
@@ -2355,7 +2395,21 @@ var parseRequest = async (requestPayload, isFirstInvocation, workflowRunId, requ
|
|
|
2355
2395
|
"ENDPOINT_START",
|
|
2356
2396
|
"request payload is empty, steps will be fetched from QStash."
|
|
2357
2397
|
);
|
|
2358
|
-
|
|
2398
|
+
const { steps: fetchedSteps, workflowRunEnded } = await getSteps(
|
|
2399
|
+
requester,
|
|
2400
|
+
workflowRunId,
|
|
2401
|
+
messageId,
|
|
2402
|
+
debug
|
|
2403
|
+
);
|
|
2404
|
+
if (workflowRunEnded) {
|
|
2405
|
+
return {
|
|
2406
|
+
rawInitialPayload: void 0,
|
|
2407
|
+
steps: void 0,
|
|
2408
|
+
isLastDuplicate: void 0,
|
|
2409
|
+
workflowRunEnded: true
|
|
2410
|
+
};
|
|
2411
|
+
}
|
|
2412
|
+
rawSteps = fetchedSteps;
|
|
2359
2413
|
} else {
|
|
2360
2414
|
rawSteps = JSON.parse(requestPayload);
|
|
2361
2415
|
}
|
|
@@ -2365,7 +2419,8 @@ var parseRequest = async (requestPayload, isFirstInvocation, workflowRunId, requ
|
|
|
2365
2419
|
return {
|
|
2366
2420
|
rawInitialPayload,
|
|
2367
2421
|
steps: deduplicatedSteps,
|
|
2368
|
-
isLastDuplicate
|
|
2422
|
+
isLastDuplicate,
|
|
2423
|
+
workflowRunEnded: false
|
|
2369
2424
|
};
|
|
2370
2425
|
}
|
|
2371
2426
|
};
|
|
@@ -2389,7 +2444,7 @@ var handleFailure = async (request, requestPayload, qstashClient, initialPayload
|
|
|
2389
2444
|
const workflowContext = new WorkflowContext({
|
|
2390
2445
|
qstashClient,
|
|
2391
2446
|
workflowRunId,
|
|
2392
|
-
initialPayload: initialPayloadParser(decodeBase64(sourceBody)),
|
|
2447
|
+
initialPayload: sourceBody ? initialPayloadParser(decodeBase64(sourceBody)) : void 0,
|
|
2393
2448
|
headers: recreateUserHeaders(new Headers(sourceHeader)),
|
|
2394
2449
|
steps: [],
|
|
2395
2450
|
url,
|
|
@@ -2419,22 +2474,35 @@ var handleFailure = async (request, requestPayload, qstashClient, initialPayload
|
|
|
2419
2474
|
};
|
|
2420
2475
|
|
|
2421
2476
|
// src/serve/options.ts
|
|
2422
|
-
var import_qstash4 = require("@upstash/qstash");
|
|
2423
2477
|
var import_qstash5 = require("@upstash/qstash");
|
|
2478
|
+
var import_qstash6 = require("@upstash/qstash");
|
|
2424
2479
|
var processOptions = (options) => {
|
|
2425
2480
|
const environment = options?.env ?? (typeof process === "undefined" ? {} : process.env);
|
|
2426
2481
|
const receiverEnvironmentVariablesSet = Boolean(
|
|
2427
2482
|
environment.QSTASH_CURRENT_SIGNING_KEY && environment.QSTASH_NEXT_SIGNING_KEY
|
|
2428
2483
|
);
|
|
2429
2484
|
return {
|
|
2430
|
-
qstashClient: new
|
|
2485
|
+
qstashClient: new import_qstash6.Client({
|
|
2431
2486
|
baseUrl: environment.QSTASH_URL,
|
|
2432
2487
|
token: environment.QSTASH_TOKEN
|
|
2433
2488
|
}),
|
|
2434
|
-
|
|
2435
|
-
|
|
2436
|
-
|
|
2437
|
-
|
|
2489
|
+
onStepFinish: (workflowRunId, finishCondition) => {
|
|
2490
|
+
if (finishCondition === "auth-fail") {
|
|
2491
|
+
console.error(AUTH_FAIL_MESSAGE);
|
|
2492
|
+
return new Response(
|
|
2493
|
+
JSON.stringify({
|
|
2494
|
+
message: AUTH_FAIL_MESSAGE,
|
|
2495
|
+
workflowRunId
|
|
2496
|
+
}),
|
|
2497
|
+
{
|
|
2498
|
+
status: 400
|
|
2499
|
+
}
|
|
2500
|
+
);
|
|
2501
|
+
}
|
|
2502
|
+
return new Response(JSON.stringify({ workflowRunId }), {
|
|
2503
|
+
status: 200
|
|
2504
|
+
});
|
|
2505
|
+
},
|
|
2438
2506
|
initialPayloadParser: (initialRequest) => {
|
|
2439
2507
|
if (!initialRequest) {
|
|
2440
2508
|
return void 0;
|
|
@@ -2448,13 +2516,14 @@ var processOptions = (options) => {
|
|
|
2448
2516
|
throw error;
|
|
2449
2517
|
}
|
|
2450
2518
|
},
|
|
2451
|
-
receiver: receiverEnvironmentVariablesSet ? new
|
|
2519
|
+
receiver: receiverEnvironmentVariablesSet ? new import_qstash5.Receiver({
|
|
2452
2520
|
currentSigningKey: environment.QSTASH_CURRENT_SIGNING_KEY,
|
|
2453
2521
|
nextSigningKey: environment.QSTASH_NEXT_SIGNING_KEY
|
|
2454
2522
|
}) : void 0,
|
|
2455
2523
|
baseUrl: environment.UPSTASH_WORKFLOW_URL,
|
|
2456
2524
|
env: environment,
|
|
2457
2525
|
retries: DEFAULT_RETRIES,
|
|
2526
|
+
useJSONContent: false,
|
|
2458
2527
|
...options
|
|
2459
2528
|
};
|
|
2460
2529
|
};
|
|
@@ -2471,14 +2540,25 @@ var determineUrls = async (request, url, baseUrl, failureFunction, failureUrl, d
|
|
|
2471
2540
|
});
|
|
2472
2541
|
}
|
|
2473
2542
|
const workflowFailureUrl = failureFunction ? workflowUrl : failureUrl;
|
|
2543
|
+
if (workflowUrl.includes("localhost")) {
|
|
2544
|
+
await debug?.log("WARN", "ENDPOINT_START", {
|
|
2545
|
+
message: `Workflow URL contains localhost. This can happen in local development, but shouldn't happen in production unless you have a route which contains localhost. Received: ${workflowUrl}`
|
|
2546
|
+
});
|
|
2547
|
+
}
|
|
2548
|
+
if (!(workflowUrl.startsWith("http://") || workflowUrl.startsWith("https://"))) {
|
|
2549
|
+
throw new WorkflowError(
|
|
2550
|
+
`Workflow URL should start with 'http://' or 'https://'. Recevied is '${workflowUrl}'`
|
|
2551
|
+
);
|
|
2552
|
+
}
|
|
2474
2553
|
return {
|
|
2475
2554
|
workflowUrl,
|
|
2476
2555
|
workflowFailureUrl
|
|
2477
2556
|
};
|
|
2478
2557
|
};
|
|
2558
|
+
var AUTH_FAIL_MESSAGE = `Failed to authenticate Workflow request. If this is unexpected, see the caveat https://upstash.com/docs/workflow/basics/caveats#avoid-non-deterministic-code-outside-context-run`;
|
|
2479
2559
|
|
|
2480
2560
|
// src/serve/index.ts
|
|
2481
|
-
var
|
|
2561
|
+
var serveBase = (routeFunction, options) => {
|
|
2482
2562
|
const {
|
|
2483
2563
|
qstashClient,
|
|
2484
2564
|
onStepFinish,
|
|
@@ -2490,7 +2570,8 @@ var serve = (routeFunction, options) => {
|
|
|
2490
2570
|
failureFunction,
|
|
2491
2571
|
baseUrl,
|
|
2492
2572
|
env,
|
|
2493
|
-
retries
|
|
2573
|
+
retries,
|
|
2574
|
+
useJSONContent
|
|
2494
2575
|
} = processOptions(options);
|
|
2495
2576
|
const debug = WorkflowLogger.getLogger(verbose);
|
|
2496
2577
|
const handler = async (request) => {
|
|
@@ -2507,7 +2588,7 @@ var serve = (routeFunction, options) => {
|
|
|
2507
2588
|
await verifyRequest(requestPayload, request.headers.get("upstash-signature"), receiver);
|
|
2508
2589
|
const { isFirstInvocation, workflowRunId } = validateRequest(request);
|
|
2509
2590
|
debug?.setWorkflowRunId(workflowRunId);
|
|
2510
|
-
const { rawInitialPayload, steps, isLastDuplicate } = await parseRequest(
|
|
2591
|
+
const { rawInitialPayload, steps, isLastDuplicate, workflowRunEnded } = await parseRequest(
|
|
2511
2592
|
requestPayload,
|
|
2512
2593
|
isFirstInvocation,
|
|
2513
2594
|
workflowRunId,
|
|
@@ -2515,8 +2596,11 @@ var serve = (routeFunction, options) => {
|
|
|
2515
2596
|
request.headers.get("upstash-message-id"),
|
|
2516
2597
|
debug
|
|
2517
2598
|
);
|
|
2599
|
+
if (workflowRunEnded) {
|
|
2600
|
+
return onStepFinish(workflowRunId, "workflow-already-ended");
|
|
2601
|
+
}
|
|
2518
2602
|
if (isLastDuplicate) {
|
|
2519
|
-
return onStepFinish(
|
|
2603
|
+
return onStepFinish(workflowRunId, "duplicate-step");
|
|
2520
2604
|
}
|
|
2521
2605
|
const failureCheck = await handleFailure(
|
|
2522
2606
|
request,
|
|
@@ -2530,7 +2614,7 @@ var serve = (routeFunction, options) => {
|
|
|
2530
2614
|
throw failureCheck.error;
|
|
2531
2615
|
} else if (failureCheck.value === "is-failure-callback") {
|
|
2532
2616
|
await debug?.log("WARN", "RESPONSE_DEFAULT", "failureFunction executed");
|
|
2533
|
-
return onStepFinish(
|
|
2617
|
+
return onStepFinish(workflowRunId, "failure-callback");
|
|
2534
2618
|
}
|
|
2535
2619
|
const workflowContext = new WorkflowContext({
|
|
2536
2620
|
qstashClient,
|
|
@@ -2552,7 +2636,11 @@ var serve = (routeFunction, options) => {
|
|
|
2552
2636
|
await debug?.log("ERROR", "ERROR", { error: authCheck.error.message });
|
|
2553
2637
|
throw authCheck.error;
|
|
2554
2638
|
} else if (authCheck.value === "run-ended") {
|
|
2555
|
-
|
|
2639
|
+
await debug?.log("ERROR", "ERROR", { error: AUTH_FAIL_MESSAGE });
|
|
2640
|
+
return onStepFinish(
|
|
2641
|
+
isFirstInvocation ? "no-workflow-id" : workflowContext.workflowRunId,
|
|
2642
|
+
"auth-fail"
|
|
2643
|
+
);
|
|
2556
2644
|
}
|
|
2557
2645
|
const callReturnCheck = await handleThirdPartyCallResult(
|
|
2558
2646
|
request,
|
|
@@ -2569,7 +2657,7 @@ var serve = (routeFunction, options) => {
|
|
|
2569
2657
|
});
|
|
2570
2658
|
throw callReturnCheck.error;
|
|
2571
2659
|
} else if (callReturnCheck.value === "continue-workflow") {
|
|
2572
|
-
const result = isFirstInvocation ? await triggerFirstInvocation(workflowContext, retries, debug) : await triggerRouteFunction({
|
|
2660
|
+
const result = isFirstInvocation ? await triggerFirstInvocation(workflowContext, retries, useJSONContent, debug) : await triggerRouteFunction({
|
|
2573
2661
|
onStep: async () => routeFunction(workflowContext),
|
|
2574
2662
|
onCleanup: async () => {
|
|
2575
2663
|
await triggerWorkflowDelete(workflowContext, debug);
|
|
@@ -2585,6 +2673,8 @@ var serve = (routeFunction, options) => {
|
|
|
2585
2673
|
}
|
|
2586
2674
|
await debug?.log("INFO", "RESPONSE_WORKFLOW");
|
|
2587
2675
|
return onStepFinish(workflowContext.workflowRunId, "success");
|
|
2676
|
+
} else if (callReturnCheck.value === "workflow-ended") {
|
|
2677
|
+
return onStepFinish(workflowContext.workflowRunId, "workflow-already-ended");
|
|
2588
2678
|
}
|
|
2589
2679
|
await debug?.log("INFO", "RESPONSE_DEFAULT");
|
|
2590
2680
|
return onStepFinish("no-workflow-id", "fromCallback");
|
|
@@ -2602,9 +2692,6 @@ var serve = (routeFunction, options) => {
|
|
|
2602
2692
|
return { handler: safeHandler };
|
|
2603
2693
|
};
|
|
2604
2694
|
|
|
2605
|
-
// src/client/index.ts
|
|
2606
|
-
var import_qstash6 = require("@upstash/qstash");
|
|
2607
|
-
|
|
2608
2695
|
// platforms/h3.ts
|
|
2609
2696
|
function transformHeaders(headers) {
|
|
2610
2697
|
const formattedHeaders = Object.entries(headers).map(([key, value]) => [
|
|
@@ -2613,7 +2700,7 @@ function transformHeaders(headers) {
|
|
|
2613
2700
|
]);
|
|
2614
2701
|
return formattedHeaders;
|
|
2615
2702
|
}
|
|
2616
|
-
var
|
|
2703
|
+
var serve = (routeFunction, options) => {
|
|
2617
2704
|
const handler = defineEventHandler(async (event) => {
|
|
2618
2705
|
const method = event.node.req.method;
|
|
2619
2706
|
if (method?.toUpperCase() !== "POST") {
|
|
@@ -2632,7 +2719,7 @@ var serve2 = (routeFunction, options) => {
|
|
|
2632
2719
|
body: await readRawBody(event),
|
|
2633
2720
|
method: "POST"
|
|
2634
2721
|
});
|
|
2635
|
-
const { handler: serveHandler } =
|
|
2722
|
+
const { handler: serveHandler } = serveBase(routeFunction, options);
|
|
2636
2723
|
return await serveHandler(request);
|
|
2637
2724
|
});
|
|
2638
2725
|
return { handler };
|
package/h3.mjs
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
import {
|
|
2
|
-
|
|
3
|
-
} from "./chunk-
|
|
2
|
+
serveBase
|
|
3
|
+
} from "./chunk-Z7WS5XIR.mjs";
|
|
4
4
|
|
|
5
5
|
// node_modules/defu/dist/defu.mjs
|
|
6
6
|
function isPlainObject(value) {
|
|
@@ -322,7 +322,7 @@ function transformHeaders(headers) {
|
|
|
322
322
|
]);
|
|
323
323
|
return formattedHeaders;
|
|
324
324
|
}
|
|
325
|
-
var
|
|
325
|
+
var serve = (routeFunction, options) => {
|
|
326
326
|
const handler = defineEventHandler(async (event) => {
|
|
327
327
|
const method = event.node.req.method;
|
|
328
328
|
if (method?.toUpperCase() !== "POST") {
|
|
@@ -341,11 +341,11 @@ var serve2 = (routeFunction, options) => {
|
|
|
341
341
|
body: await readRawBody(event),
|
|
342
342
|
method: "POST"
|
|
343
343
|
});
|
|
344
|
-
const { handler: serveHandler } =
|
|
344
|
+
const { handler: serveHandler } = serveBase(routeFunction, options);
|
|
345
345
|
return await serveHandler(request);
|
|
346
346
|
});
|
|
347
347
|
return { handler };
|
|
348
348
|
};
|
|
349
349
|
export {
|
|
350
|
-
|
|
350
|
+
serve
|
|
351
351
|
};
|
package/hono.d.mts
CHANGED
|
@@ -1,5 +1,6 @@
|
|
|
1
1
|
import { Context } from 'hono';
|
|
2
|
-
import { R as RouteFunction,
|
|
2
|
+
import { R as RouteFunction, j as PublicServeOptions } from './types-APRap-aV.mjs';
|
|
3
|
+
import { Variables } from 'hono/types';
|
|
3
4
|
import '@upstash/qstash';
|
|
4
5
|
|
|
5
6
|
type WorkflowBindings = {
|
|
@@ -18,8 +19,9 @@ type WorkflowBindings = {
|
|
|
18
19
|
* @param options workflow options
|
|
19
20
|
* @returns
|
|
20
21
|
*/
|
|
21
|
-
declare const serve: <TInitialPayload = unknown, TBindings extends WorkflowBindings = WorkflowBindings>(routeFunction: RouteFunction<TInitialPayload>, options?:
|
|
22
|
+
declare const serve: <TInitialPayload = unknown, TBindings extends WorkflowBindings = WorkflowBindings, TVariables extends Variables = object>(routeFunction: RouteFunction<TInitialPayload>, options?: PublicServeOptions<TInitialPayload>) => ((context: Context<{
|
|
22
23
|
Bindings: TBindings;
|
|
24
|
+
Variables: TVariables;
|
|
23
25
|
}>) => Promise<Response>);
|
|
24
26
|
|
|
25
27
|
export { type WorkflowBindings, serve };
|
package/hono.d.ts
CHANGED
|
@@ -1,5 +1,6 @@
|
|
|
1
1
|
import { Context } from 'hono';
|
|
2
|
-
import { R as RouteFunction,
|
|
2
|
+
import { R as RouteFunction, j as PublicServeOptions } from './types-APRap-aV.js';
|
|
3
|
+
import { Variables } from 'hono/types';
|
|
3
4
|
import '@upstash/qstash';
|
|
4
5
|
|
|
5
6
|
type WorkflowBindings = {
|
|
@@ -18,8 +19,9 @@ type WorkflowBindings = {
|
|
|
18
19
|
* @param options workflow options
|
|
19
20
|
* @returns
|
|
20
21
|
*/
|
|
21
|
-
declare const serve: <TInitialPayload = unknown, TBindings extends WorkflowBindings = WorkflowBindings>(routeFunction: RouteFunction<TInitialPayload>, options?:
|
|
22
|
+
declare const serve: <TInitialPayload = unknown, TBindings extends WorkflowBindings = WorkflowBindings, TVariables extends Variables = object>(routeFunction: RouteFunction<TInitialPayload>, options?: PublicServeOptions<TInitialPayload>) => ((context: Context<{
|
|
22
23
|
Bindings: TBindings;
|
|
24
|
+
Variables: TVariables;
|
|
23
25
|
}>) => Promise<Response>);
|
|
24
26
|
|
|
25
27
|
export { type WorkflowBindings, serve };
|