@upstash/workflow 0.1.4 → 0.2.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/astro.d.mts +1 -1
- package/astro.d.ts +1 -1
- package/astro.js +348 -191
- package/astro.mjs +1 -1
- package/{chunk-HO2SB246.mjs → chunk-5R2BFC3N.mjs} +419 -194
- package/cloudflare.d.mts +1 -1
- package/cloudflare.d.ts +1 -1
- package/cloudflare.js +348 -191
- package/cloudflare.mjs +1 -1
- package/express.d.mts +1 -1
- package/express.d.ts +1 -1
- package/express.js +369 -189
- package/express.mjs +17 -4
- package/h3.d.mts +1 -1
- package/h3.d.ts +1 -1
- package/h3.js +348 -191
- package/h3.mjs +1 -1
- package/hono.d.mts +1 -1
- package/hono.d.ts +1 -1
- package/hono.js +348 -191
- package/hono.mjs +1 -1
- package/index.d.mts +74 -21
- package/index.d.ts +74 -21
- package/index.js +426 -211
- package/index.mjs +5 -5
- package/nextjs.d.mts +1 -1
- package/nextjs.d.ts +1 -1
- package/nextjs.js +348 -191
- package/nextjs.mjs +1 -1
- package/package.json +1 -1
- package/solidjs.d.mts +1 -1
- package/solidjs.d.ts +1 -1
- package/solidjs.js +348 -191
- package/solidjs.mjs +1 -1
- package/svelte.d.mts +1 -1
- package/svelte.d.ts +1 -1
- package/svelte.js +348 -191
- package/svelte.mjs +1 -1
- package/{types-CQuc-j8n.d.mts → types-Cki_MHrh.d.mts} +85 -31
- package/{types-CQuc-j8n.d.ts → types-Cki_MHrh.d.ts} +85 -31
|
@@ -32,22 +32,33 @@ var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__ge
|
|
|
32
32
|
|
|
33
33
|
// src/error.ts
|
|
34
34
|
import { QstashError } from "@upstash/qstash";
|
|
35
|
-
var
|
|
35
|
+
var WorkflowError = class extends QstashError {
|
|
36
36
|
constructor(message) {
|
|
37
37
|
super(message);
|
|
38
|
-
this.name = "
|
|
38
|
+
this.name = "WorkflowError";
|
|
39
39
|
}
|
|
40
40
|
};
|
|
41
|
-
var
|
|
41
|
+
var WorkflowAbort = class extends Error {
|
|
42
42
|
stepInfo;
|
|
43
43
|
stepName;
|
|
44
|
-
|
|
44
|
+
/**
|
|
45
|
+
* whether workflow is to be canceled on abort
|
|
46
|
+
*/
|
|
47
|
+
cancelWorkflow;
|
|
48
|
+
/**
|
|
49
|
+
*
|
|
50
|
+
* @param stepName name of the aborting step
|
|
51
|
+
* @param stepInfo step information
|
|
52
|
+
* @param cancelWorkflow
|
|
53
|
+
*/
|
|
54
|
+
constructor(stepName, stepInfo, cancelWorkflow = false) {
|
|
45
55
|
super(
|
|
46
56
|
`This is an Upstash Workflow error thrown after a step executes. It is expected to be raised. Make sure that you await for each step. Also, if you are using try/catch blocks, you should not wrap context.run/sleep/sleepUntil/call methods with try/catch. Aborting workflow after executing step '${stepName}'.`
|
|
47
57
|
);
|
|
48
|
-
this.name = "
|
|
58
|
+
this.name = "WorkflowAbort";
|
|
49
59
|
this.stepName = stepName;
|
|
50
60
|
this.stepInfo = stepInfo;
|
|
61
|
+
this.cancelWorkflow = cancelWorkflow;
|
|
51
62
|
}
|
|
52
63
|
};
|
|
53
64
|
var formatWorkflowError = (error) => {
|
|
@@ -76,6 +87,44 @@ var makeGetWaitersRequest = async (requester, eventId) => {
|
|
|
76
87
|
});
|
|
77
88
|
return result;
|
|
78
89
|
};
|
|
90
|
+
var makeCancelRequest = async (requester, workflowRunId) => {
|
|
91
|
+
await requester.request({
|
|
92
|
+
path: ["v2", "workflows", "runs", `${workflowRunId}?cancel=true`],
|
|
93
|
+
method: "DELETE",
|
|
94
|
+
parseResponseAsJson: false
|
|
95
|
+
});
|
|
96
|
+
return true;
|
|
97
|
+
};
|
|
98
|
+
var getSteps = async (requester, workflowRunId, messageId, debug) => {
|
|
99
|
+
try {
|
|
100
|
+
const steps = await requester.request({
|
|
101
|
+
path: ["v2", "workflows", "runs", workflowRunId],
|
|
102
|
+
parseResponseAsJson: true
|
|
103
|
+
});
|
|
104
|
+
if (!messageId) {
|
|
105
|
+
await debug?.log("INFO", "ENDPOINT_START", {
|
|
106
|
+
message: `Pulled ${steps.length} steps from QStashand returned them without filtering with messageId.`
|
|
107
|
+
});
|
|
108
|
+
return steps;
|
|
109
|
+
} else {
|
|
110
|
+
const index = steps.findIndex((item) => item.messageId === messageId);
|
|
111
|
+
if (index === -1) {
|
|
112
|
+
return [];
|
|
113
|
+
}
|
|
114
|
+
const filteredSteps = steps.slice(0, index + 1);
|
|
115
|
+
await debug?.log("INFO", "ENDPOINT_START", {
|
|
116
|
+
message: `Pulled ${steps.length} steps from QStash and filtered them to ${filteredSteps.length} using messageId.`
|
|
117
|
+
});
|
|
118
|
+
return filteredSteps;
|
|
119
|
+
}
|
|
120
|
+
} catch (error) {
|
|
121
|
+
await debug?.log("ERROR", "ERROR", {
|
|
122
|
+
message: "failed while fetching steps.",
|
|
123
|
+
error
|
|
124
|
+
});
|
|
125
|
+
throw new WorkflowError(`Failed while pulling steps. ${error}`);
|
|
126
|
+
}
|
|
127
|
+
};
|
|
79
128
|
|
|
80
129
|
// src/context/steps.ts
|
|
81
130
|
var BaseLazyStep = class {
|
|
@@ -690,6 +739,7 @@ var StepTypes = [
|
|
|
690
739
|
];
|
|
691
740
|
|
|
692
741
|
// src/workflow-requests.ts
|
|
742
|
+
import { QstashError as QstashError2 } from "@upstash/qstash";
|
|
693
743
|
var triggerFirstInvocation = async (workflowContext, retries, debug) => {
|
|
694
744
|
const { headers } = getHeaders(
|
|
695
745
|
"true",
|
|
@@ -700,20 +750,32 @@ var triggerFirstInvocation = async (workflowContext, retries, debug) => {
|
|
|
700
750
|
workflowContext.failureUrl,
|
|
701
751
|
retries
|
|
702
752
|
);
|
|
703
|
-
await debug?.log("SUBMIT", "SUBMIT_FIRST_INVOCATION", {
|
|
704
|
-
headers,
|
|
705
|
-
requestPayload: workflowContext.requestPayload,
|
|
706
|
-
url: workflowContext.url
|
|
707
|
-
});
|
|
708
753
|
try {
|
|
709
754
|
const body = typeof workflowContext.requestPayload === "string" ? workflowContext.requestPayload : JSON.stringify(workflowContext.requestPayload);
|
|
710
|
-
await workflowContext.qstashClient.publish({
|
|
755
|
+
const result = await workflowContext.qstashClient.publish({
|
|
711
756
|
headers,
|
|
712
757
|
method: "POST",
|
|
713
758
|
body,
|
|
714
759
|
url: workflowContext.url
|
|
715
760
|
});
|
|
716
|
-
|
|
761
|
+
if (result.deduplicated) {
|
|
762
|
+
await debug?.log("WARN", "SUBMIT_FIRST_INVOCATION", {
|
|
763
|
+
message: `Workflow run ${workflowContext.workflowRunId} already exists. A new one isn't created.`,
|
|
764
|
+
headers,
|
|
765
|
+
requestPayload: workflowContext.requestPayload,
|
|
766
|
+
url: workflowContext.url,
|
|
767
|
+
messageId: result.messageId
|
|
768
|
+
});
|
|
769
|
+
return ok("workflow-run-already-exists");
|
|
770
|
+
} else {
|
|
771
|
+
await debug?.log("SUBMIT", "SUBMIT_FIRST_INVOCATION", {
|
|
772
|
+
headers,
|
|
773
|
+
requestPayload: workflowContext.requestPayload,
|
|
774
|
+
url: workflowContext.url,
|
|
775
|
+
messageId: result.messageId
|
|
776
|
+
});
|
|
777
|
+
return ok("success");
|
|
778
|
+
}
|
|
717
779
|
} catch (error) {
|
|
718
780
|
const error_ = error;
|
|
719
781
|
return err(error_);
|
|
@@ -721,7 +783,9 @@ var triggerFirstInvocation = async (workflowContext, retries, debug) => {
|
|
|
721
783
|
};
|
|
722
784
|
var triggerRouteFunction = async ({
|
|
723
785
|
onCleanup,
|
|
724
|
-
onStep
|
|
786
|
+
onStep,
|
|
787
|
+
onCancel,
|
|
788
|
+
debug
|
|
725
789
|
}) => {
|
|
726
790
|
try {
|
|
727
791
|
await onStep();
|
|
@@ -729,19 +793,50 @@ var triggerRouteFunction = async ({
|
|
|
729
793
|
return ok("workflow-finished");
|
|
730
794
|
} catch (error) {
|
|
731
795
|
const error_ = error;
|
|
732
|
-
|
|
796
|
+
if (error instanceof QstashError2 && error.status === 400) {
|
|
797
|
+
await debug?.log("WARN", "RESPONSE_WORKFLOW", {
|
|
798
|
+
message: `tried to append to a cancelled workflow. exiting without publishing.`,
|
|
799
|
+
name: error.name,
|
|
800
|
+
errorMessage: error.message
|
|
801
|
+
});
|
|
802
|
+
return ok("workflow-was-finished");
|
|
803
|
+
} else if (!(error_ instanceof WorkflowAbort)) {
|
|
804
|
+
return err(error_);
|
|
805
|
+
} else if (error_.cancelWorkflow) {
|
|
806
|
+
await onCancel();
|
|
807
|
+
return ok("workflow-finished");
|
|
808
|
+
} else {
|
|
809
|
+
return ok("step-finished");
|
|
810
|
+
}
|
|
733
811
|
}
|
|
734
812
|
};
|
|
735
813
|
var triggerWorkflowDelete = async (workflowContext, debug, cancel = false) => {
|
|
736
814
|
await debug?.log("SUBMIT", "SUBMIT_CLEANUP", {
|
|
737
815
|
deletedWorkflowRunId: workflowContext.workflowRunId
|
|
738
816
|
});
|
|
739
|
-
|
|
740
|
-
|
|
741
|
-
|
|
742
|
-
|
|
743
|
-
|
|
744
|
-
|
|
817
|
+
try {
|
|
818
|
+
await workflowContext.qstashClient.http.request({
|
|
819
|
+
path: ["v2", "workflows", "runs", `${workflowContext.workflowRunId}?cancel=${cancel}`],
|
|
820
|
+
method: "DELETE",
|
|
821
|
+
parseResponseAsJson: false
|
|
822
|
+
});
|
|
823
|
+
await debug?.log(
|
|
824
|
+
"SUBMIT",
|
|
825
|
+
"SUBMIT_CLEANUP",
|
|
826
|
+
`workflow run ${workflowContext.workflowRunId} deleted.`
|
|
827
|
+
);
|
|
828
|
+
return { deleted: true };
|
|
829
|
+
} catch (error) {
|
|
830
|
+
if (error instanceof QstashError2 && error.status === 404) {
|
|
831
|
+
await debug?.log("WARN", "SUBMIT_CLEANUP", {
|
|
832
|
+
message: `Failed to remove workflow run ${workflowContext.workflowRunId} as it doesn't exist.`,
|
|
833
|
+
name: error.name,
|
|
834
|
+
errorMessage: error.message
|
|
835
|
+
});
|
|
836
|
+
return { deleted: false };
|
|
837
|
+
}
|
|
838
|
+
throw error;
|
|
839
|
+
}
|
|
745
840
|
};
|
|
746
841
|
var recreateUserHeaders = (headers) => {
|
|
747
842
|
const filteredHeaders = new Headers();
|
|
@@ -757,15 +852,32 @@ var recreateUserHeaders = (headers) => {
|
|
|
757
852
|
var handleThirdPartyCallResult = async (request, requestPayload, client, workflowUrl, failureUrl, retries, debug) => {
|
|
758
853
|
try {
|
|
759
854
|
if (request.headers.get("Upstash-Workflow-Callback")) {
|
|
760
|
-
|
|
855
|
+
let callbackPayload;
|
|
856
|
+
if (requestPayload) {
|
|
857
|
+
callbackPayload = requestPayload;
|
|
858
|
+
} else {
|
|
859
|
+
const workflowRunId2 = request.headers.get("upstash-workflow-runid");
|
|
860
|
+
const messageId = request.headers.get("upstash-message-id");
|
|
861
|
+
if (!workflowRunId2)
|
|
862
|
+
throw new WorkflowError("workflow run id missing in context.call lazy fetch.");
|
|
863
|
+
if (!messageId) throw new WorkflowError("message id missing in context.call lazy fetch.");
|
|
864
|
+
const steps = await getSteps(client.http, workflowRunId2, messageId, debug);
|
|
865
|
+
const failingStep = steps.find((step) => step.messageId === messageId);
|
|
866
|
+
if (!failingStep)
|
|
867
|
+
throw new WorkflowError(
|
|
868
|
+
"Failed to submit the context.call." + (steps.length === 0 ? "No steps found." : `No step was found with matching messageId ${messageId} out of ${steps.length} steps.`)
|
|
869
|
+
);
|
|
870
|
+
callbackPayload = atob(failingStep.body);
|
|
871
|
+
}
|
|
872
|
+
const callbackMessage = JSON.parse(callbackPayload);
|
|
761
873
|
if (!(callbackMessage.status >= 200 && callbackMessage.status < 300) && callbackMessage.maxRetries && callbackMessage.retried !== callbackMessage.maxRetries) {
|
|
762
874
|
await debug?.log("WARN", "SUBMIT_THIRD_PARTY_RESULT", {
|
|
763
875
|
status: callbackMessage.status,
|
|
764
|
-
body: atob(callbackMessage.body)
|
|
876
|
+
body: atob(callbackMessage.body ?? "")
|
|
765
877
|
});
|
|
766
878
|
console.warn(
|
|
767
879
|
`Workflow Warning: "context.call" failed with status ${callbackMessage.status} and will retry (retried ${callbackMessage.retried ?? 0} out of ${callbackMessage.maxRetries} times). Error Message:
|
|
768
|
-
${atob(callbackMessage.body)}`
|
|
880
|
+
${atob(callbackMessage.body ?? "")}`
|
|
769
881
|
);
|
|
770
882
|
return ok("call-will-retry");
|
|
771
883
|
}
|
|
@@ -799,7 +911,7 @@ ${atob(callbackMessage.body)}`
|
|
|
799
911
|
);
|
|
800
912
|
const callResponse = {
|
|
801
913
|
status: callbackMessage.status,
|
|
802
|
-
body: atob(callbackMessage.body),
|
|
914
|
+
body: atob(callbackMessage.body ?? ""),
|
|
803
915
|
header: callbackMessage.header
|
|
804
916
|
};
|
|
805
917
|
const callResultStep = {
|
|
@@ -830,9 +942,7 @@ ${atob(callbackMessage.body)}`
|
|
|
830
942
|
} catch (error) {
|
|
831
943
|
const isCallReturn = request.headers.get("Upstash-Workflow-Callback");
|
|
832
944
|
return err(
|
|
833
|
-
new
|
|
834
|
-
`Error when handling call return (isCallReturn=${isCallReturn}): ${error}`
|
|
835
|
-
)
|
|
945
|
+
new WorkflowError(`Error when handling call return (isCallReturn=${isCallReturn}): ${error}`)
|
|
836
946
|
);
|
|
837
947
|
}
|
|
838
948
|
};
|
|
@@ -840,7 +950,8 @@ var getHeaders = (initHeaderValue, workflowRunId, workflowUrl, userHeaders, step
|
|
|
840
950
|
const baseHeaders = {
|
|
841
951
|
[WORKFLOW_INIT_HEADER]: initHeaderValue,
|
|
842
952
|
[WORKFLOW_ID_HEADER]: workflowRunId,
|
|
843
|
-
[WORKFLOW_URL_HEADER]: workflowUrl
|
|
953
|
+
[WORKFLOW_URL_HEADER]: workflowUrl,
|
|
954
|
+
[WORKFLOW_FEATURE_HEADER]: "LazyFetch,InitialBody"
|
|
844
955
|
};
|
|
845
956
|
if (!step?.callUrl) {
|
|
846
957
|
baseHeaders[`Upstash-Forward-${WORKFLOW_PROTOCOL_VERSION_HEADER}`] = WORKFLOW_PROTOCOL_VERSION;
|
|
@@ -853,8 +964,8 @@ var getHeaders = (initHeaderValue, workflowRunId, workflowUrl, userHeaders, step
|
|
|
853
964
|
}
|
|
854
965
|
if (step?.callUrl) {
|
|
855
966
|
baseHeaders["Upstash-Retries"] = callRetries?.toString() ?? "0";
|
|
856
|
-
baseHeaders[WORKFLOW_FEATURE_HEADER] = "WF_NoDelete";
|
|
857
|
-
if (retries) {
|
|
967
|
+
baseHeaders[WORKFLOW_FEATURE_HEADER] = "WF_NoDelete,InitialBody";
|
|
968
|
+
if (retries !== void 0) {
|
|
858
969
|
baseHeaders["Upstash-Callback-Retries"] = retries.toString();
|
|
859
970
|
baseHeaders["Upstash-Failure-Callback-Retries"] = retries.toString();
|
|
860
971
|
}
|
|
@@ -889,6 +1000,7 @@ var getHeaders = (initHeaderValue, workflowRunId, workflowUrl, userHeaders, step
|
|
|
889
1000
|
"Upstash-Callback-Workflow-CallType": "fromCallback",
|
|
890
1001
|
"Upstash-Callback-Workflow-Init": "false",
|
|
891
1002
|
"Upstash-Callback-Workflow-Url": workflowUrl,
|
|
1003
|
+
"Upstash-Callback-Feature-Set": "LazyFetch,InitialBody",
|
|
892
1004
|
"Upstash-Callback-Forward-Upstash-Workflow-Callback": "true",
|
|
893
1005
|
"Upstash-Callback-Forward-Upstash-Workflow-StepId": step.stepId.toString(),
|
|
894
1006
|
"Upstash-Callback-Forward-Upstash-Workflow-StepName": step.stepName,
|
|
@@ -937,7 +1049,7 @@ var verifyRequest = async (body, signature, verifier) => {
|
|
|
937
1049
|
throw new Error("Signature in `Upstash-Signature` header is not valid");
|
|
938
1050
|
}
|
|
939
1051
|
} catch (error) {
|
|
940
|
-
throw new
|
|
1052
|
+
throw new WorkflowError(
|
|
941
1053
|
`Failed to verify that the Workflow request comes from QStash: ${error}
|
|
942
1054
|
|
|
943
1055
|
If signature is missing, trigger the workflow endpoint by publishing your request to QStash instead of calling it directly.
|
|
@@ -977,14 +1089,14 @@ var AutoExecutor = class _AutoExecutor {
|
|
|
977
1089
|
*
|
|
978
1090
|
* If a function is already executing (this.executingStep), this
|
|
979
1091
|
* means that there is a nested step which is not allowed. In this
|
|
980
|
-
* case, addStep throws
|
|
1092
|
+
* case, addStep throws WorkflowError.
|
|
981
1093
|
*
|
|
982
1094
|
* @param stepInfo step plan to add
|
|
983
1095
|
* @returns result of the step function
|
|
984
1096
|
*/
|
|
985
1097
|
async addStep(stepInfo) {
|
|
986
1098
|
if (this.executingStep) {
|
|
987
|
-
throw new
|
|
1099
|
+
throw new WorkflowError(
|
|
988
1100
|
`A step can not be run inside another step. Tried to run '${stepInfo.stepName}' inside '${this.executingStep}'`
|
|
989
1101
|
);
|
|
990
1102
|
}
|
|
@@ -1069,7 +1181,7 @@ var AutoExecutor = class _AutoExecutor {
|
|
|
1069
1181
|
const sortedSteps = sortSteps(this.steps);
|
|
1070
1182
|
const plannedParallelStepCount = sortedSteps[initialStepCount + this.planStepCount]?.concurrent;
|
|
1071
1183
|
if (parallelCallState !== "first" && plannedParallelStepCount !== parallelSteps.length) {
|
|
1072
|
-
throw new
|
|
1184
|
+
throw new WorkflowError(
|
|
1073
1185
|
`Incompatible number of parallel steps when call state was '${parallelCallState}'. Expected ${parallelSteps.length}, got ${plannedParallelStepCount} from the request.`
|
|
1074
1186
|
);
|
|
1075
1187
|
}
|
|
@@ -1091,7 +1203,7 @@ var AutoExecutor = class _AutoExecutor {
|
|
|
1091
1203
|
case "partial": {
|
|
1092
1204
|
const planStep = this.steps.at(-1);
|
|
1093
1205
|
if (!planStep || planStep.targetStep === void 0) {
|
|
1094
|
-
throw new
|
|
1206
|
+
throw new WorkflowError(
|
|
1095
1207
|
`There must be a last step and it should have targetStep larger than 0.Received: ${JSON.stringify(planStep)}`
|
|
1096
1208
|
);
|
|
1097
1209
|
}
|
|
@@ -1105,17 +1217,17 @@ var AutoExecutor = class _AutoExecutor {
|
|
|
1105
1217
|
);
|
|
1106
1218
|
await this.submitStepsToQStash([resultStep], [parallelStep]);
|
|
1107
1219
|
} catch (error) {
|
|
1108
|
-
if (error instanceof
|
|
1220
|
+
if (error instanceof WorkflowAbort) {
|
|
1109
1221
|
throw error;
|
|
1110
1222
|
}
|
|
1111
|
-
throw new
|
|
1223
|
+
throw new WorkflowError(
|
|
1112
1224
|
`Error submitting steps to QStash in partial parallel step execution: ${error}`
|
|
1113
1225
|
);
|
|
1114
1226
|
}
|
|
1115
1227
|
break;
|
|
1116
1228
|
}
|
|
1117
1229
|
case "discard": {
|
|
1118
|
-
throw new
|
|
1230
|
+
throw new WorkflowAbort("discarded parallel");
|
|
1119
1231
|
}
|
|
1120
1232
|
case "last": {
|
|
1121
1233
|
const parallelResultSteps = sortedSteps.filter((step) => step.stepId >= initialStepCount).slice(0, parallelSteps.length);
|
|
@@ -1166,7 +1278,7 @@ var AutoExecutor = class _AutoExecutor {
|
|
|
1166
1278
|
*/
|
|
1167
1279
|
async submitStepsToQStash(steps, lazySteps) {
|
|
1168
1280
|
if (steps.length === 0) {
|
|
1169
|
-
throw new
|
|
1281
|
+
throw new WorkflowError(
|
|
1170
1282
|
`Unable to submit steps to QStash. Provided list is empty. Current step: ${this.stepCount}`
|
|
1171
1283
|
);
|
|
1172
1284
|
}
|
|
@@ -1206,7 +1318,7 @@ var AutoExecutor = class _AutoExecutor {
|
|
|
1206
1318
|
method: "POST",
|
|
1207
1319
|
parseResponseAsJson: false
|
|
1208
1320
|
});
|
|
1209
|
-
throw new
|
|
1321
|
+
throw new WorkflowAbort(steps[0].stepName, steps[0]);
|
|
1210
1322
|
}
|
|
1211
1323
|
const result = await this.context.qstashClient.batchJSON(
|
|
1212
1324
|
steps.map((singleStep, index) => {
|
|
@@ -1258,7 +1370,7 @@ var AutoExecutor = class _AutoExecutor {
|
|
|
1258
1370
|
};
|
|
1259
1371
|
})
|
|
1260
1372
|
});
|
|
1261
|
-
throw new
|
|
1373
|
+
throw new WorkflowAbort(steps[0].stepName, steps[0]);
|
|
1262
1374
|
}
|
|
1263
1375
|
/**
|
|
1264
1376
|
* Get the promise by executing the lazt steps list. If there is a single
|
|
@@ -1283,7 +1395,7 @@ var AutoExecutor = class _AutoExecutor {
|
|
|
1283
1395
|
} else if (Array.isArray(result) && lazyStepList.length === result.length && index < lazyStepList.length) {
|
|
1284
1396
|
return result[index];
|
|
1285
1397
|
} else {
|
|
1286
|
-
throw new
|
|
1398
|
+
throw new WorkflowError(
|
|
1287
1399
|
`Unexpected parallel call result while executing step ${index}: '${result}'. Expected ${lazyStepList.length} many items`
|
|
1288
1400
|
);
|
|
1289
1401
|
}
|
|
@@ -1295,12 +1407,12 @@ var AutoExecutor = class _AutoExecutor {
|
|
|
1295
1407
|
};
|
|
1296
1408
|
var validateStep = (lazyStep, stepFromRequest) => {
|
|
1297
1409
|
if (lazyStep.stepName !== stepFromRequest.stepName) {
|
|
1298
|
-
throw new
|
|
1410
|
+
throw new WorkflowError(
|
|
1299
1411
|
`Incompatible step name. Expected '${lazyStep.stepName}', got '${stepFromRequest.stepName}' from the request`
|
|
1300
1412
|
);
|
|
1301
1413
|
}
|
|
1302
1414
|
if (lazyStep.stepType !== stepFromRequest.stepType) {
|
|
1303
|
-
throw new
|
|
1415
|
+
throw new WorkflowError(
|
|
1304
1416
|
`Incompatible step type. Expected '${lazyStep.stepType}', got '${stepFromRequest.stepType}' from the request`
|
|
1305
1417
|
);
|
|
1306
1418
|
}
|
|
@@ -1311,12 +1423,12 @@ var validateParallelSteps = (lazySteps, stepsFromRequest) => {
|
|
|
1311
1423
|
validateStep(lazySteps[index], stepFromRequest);
|
|
1312
1424
|
}
|
|
1313
1425
|
} catch (error) {
|
|
1314
|
-
if (error instanceof
|
|
1426
|
+
if (error instanceof WorkflowError) {
|
|
1315
1427
|
const lazyStepNames = lazySteps.map((lazyStep) => lazyStep.stepName);
|
|
1316
1428
|
const lazyStepTypes = lazySteps.map((lazyStep) => lazyStep.stepType);
|
|
1317
1429
|
const requestStepNames = stepsFromRequest.map((step) => step.stepName);
|
|
1318
1430
|
const requestStepTypes = stepsFromRequest.map((step) => step.stepType);
|
|
1319
|
-
throw new
|
|
1431
|
+
throw new WorkflowError(
|
|
1320
1432
|
`Incompatible steps detected in parallel execution: ${error.message}
|
|
1321
1433
|
> Step Names from the request: ${JSON.stringify(requestStepNames)}
|
|
1322
1434
|
Step Types from the request: ${JSON.stringify(requestStepTypes)}
|
|
@@ -1429,10 +1541,6 @@ var WorkflowContext = class {
|
|
|
1429
1541
|
* headers of the initial request
|
|
1430
1542
|
*/
|
|
1431
1543
|
headers;
|
|
1432
|
-
/**
|
|
1433
|
-
* initial payload as a raw string
|
|
1434
|
-
*/
|
|
1435
|
-
rawInitialPayload;
|
|
1436
1544
|
/**
|
|
1437
1545
|
* Map of environment variables and their values.
|
|
1438
1546
|
*
|
|
@@ -1467,7 +1575,6 @@ var WorkflowContext = class {
|
|
|
1467
1575
|
failureUrl,
|
|
1468
1576
|
debug,
|
|
1469
1577
|
initialPayload,
|
|
1470
|
-
rawInitialPayload,
|
|
1471
1578
|
env,
|
|
1472
1579
|
retries
|
|
1473
1580
|
}) {
|
|
@@ -1478,7 +1585,6 @@ var WorkflowContext = class {
|
|
|
1478
1585
|
this.failureUrl = failureUrl;
|
|
1479
1586
|
this.headers = headers;
|
|
1480
1587
|
this.requestPayload = initialPayload;
|
|
1481
|
-
this.rawInitialPayload = rawInitialPayload ?? JSON.stringify(this.requestPayload);
|
|
1482
1588
|
this.env = env ?? {};
|
|
1483
1589
|
this.retries = retries ?? DEFAULT_RETRIES;
|
|
1484
1590
|
this.executor = new AutoExecutor(this, this.steps, debug);
|
|
@@ -1499,7 +1605,7 @@ var WorkflowContext = class {
|
|
|
1499
1605
|
* const [result1, result2] = await Promise.all([
|
|
1500
1606
|
* context.run("step 1", () => {
|
|
1501
1607
|
* return "result1"
|
|
1502
|
-
* })
|
|
1608
|
+
* }),
|
|
1503
1609
|
* context.run("step 2", async () => {
|
|
1504
1610
|
* return await fetchResults()
|
|
1505
1611
|
* })
|
|
@@ -1517,6 +1623,10 @@ var WorkflowContext = class {
|
|
|
1517
1623
|
/**
|
|
1518
1624
|
* Stops the execution for the duration provided.
|
|
1519
1625
|
*
|
|
1626
|
+
* ```typescript
|
|
1627
|
+
* await context.sleep('sleep1', 3) // wait for three seconds
|
|
1628
|
+
* ```
|
|
1629
|
+
*
|
|
1520
1630
|
* @param stepName
|
|
1521
1631
|
* @param duration sleep duration in seconds
|
|
1522
1632
|
* @returns undefined
|
|
@@ -1527,6 +1637,10 @@ var WorkflowContext = class {
|
|
|
1527
1637
|
/**
|
|
1528
1638
|
* Stops the execution until the date time provided.
|
|
1529
1639
|
*
|
|
1640
|
+
* ```typescript
|
|
1641
|
+
* await context.sleepUntil('sleep1', Date.now() / 1000 + 3) // wait for three seconds
|
|
1642
|
+
* ```
|
|
1643
|
+
*
|
|
1530
1644
|
* @param stepName
|
|
1531
1645
|
* @param datetime time to sleep until. Can be provided as a number (in unix seconds),
|
|
1532
1646
|
* as a Date object or a string (passed to `new Date(datetimeString)`)
|
|
@@ -1550,7 +1664,7 @@ var WorkflowContext = class {
|
|
|
1550
1664
|
* const { status, body } = await context.call<string>(
|
|
1551
1665
|
* "post call step",
|
|
1552
1666
|
* {
|
|
1553
|
-
* url:
|
|
1667
|
+
* url: "https://www.some-endpoint.com/api",
|
|
1554
1668
|
* method: "POST",
|
|
1555
1669
|
* body: "my-payload"
|
|
1556
1670
|
* }
|
|
@@ -1604,45 +1718,43 @@ var WorkflowContext = class {
|
|
|
1604
1718
|
}
|
|
1605
1719
|
}
|
|
1606
1720
|
/**
|
|
1607
|
-
*
|
|
1608
|
-
* timeout ends
|
|
1721
|
+
* Pauses workflow execution until a specific event occurs or a timeout is reached.
|
|
1609
1722
|
*
|
|
1610
|
-
|
|
1611
|
-
* const
|
|
1612
|
-
*
|
|
1613
|
-
*
|
|
1614
|
-
|
|
1615
|
-
* );
|
|
1616
|
-
* ```
|
|
1723
|
+
*```ts
|
|
1724
|
+
* const result = await workflow.waitForEvent("payment-confirmed", {
|
|
1725
|
+
* timeout: "5m"
|
|
1726
|
+
* });
|
|
1727
|
+
*```
|
|
1617
1728
|
*
|
|
1618
|
-
* To notify a waiting workflow
|
|
1729
|
+
* To notify a waiting workflow:
|
|
1619
1730
|
*
|
|
1620
1731
|
* ```ts
|
|
1621
1732
|
* import { Client } from "@upstash/workflow";
|
|
1622
1733
|
*
|
|
1623
|
-
* const client = new Client({ token: });
|
|
1734
|
+
* const client = new Client({ token: "<QSTASH_TOKEN>" });
|
|
1624
1735
|
*
|
|
1625
1736
|
* await client.notify({
|
|
1626
|
-
* eventId: "
|
|
1627
|
-
*
|
|
1737
|
+
* eventId: "payment.confirmed",
|
|
1738
|
+
* data: {
|
|
1739
|
+
* amount: 99.99,
|
|
1740
|
+
* currency: "USD"
|
|
1741
|
+
* }
|
|
1628
1742
|
* })
|
|
1629
1743
|
* ```
|
|
1630
1744
|
*
|
|
1745
|
+
* Alternatively, you can use the `context.notify` method.
|
|
1746
|
+
*
|
|
1631
1747
|
* @param stepName
|
|
1632
|
-
* @param eventId
|
|
1633
|
-
* @param
|
|
1634
|
-
* @returns
|
|
1635
|
-
* timeout
|
|
1636
|
-
* is the
|
|
1748
|
+
* @param eventId - Unique identifier for the event to wait for
|
|
1749
|
+
* @param options - Configuration options.
|
|
1750
|
+
* @returns `{ timeout: boolean, eventData: unknown }`.
|
|
1751
|
+
* The `timeout` property specifies if the workflow has timed out. The `eventData`
|
|
1752
|
+
* is the data passed when notifying this workflow of an event.
|
|
1637
1753
|
*/
|
|
1638
|
-
async waitForEvent(stepName, eventId,
|
|
1639
|
-
const
|
|
1640
|
-
|
|
1641
|
-
|
|
1642
|
-
eventId,
|
|
1643
|
-
typeof timeout === "string" ? timeout : `${timeout}s`
|
|
1644
|
-
)
|
|
1645
|
-
);
|
|
1754
|
+
async waitForEvent(stepName, eventId, options = {}) {
|
|
1755
|
+
const { timeout = "7d" } = options;
|
|
1756
|
+
const timeoutStr = typeof timeout === "string" ? timeout : `${timeout}s`;
|
|
1757
|
+
const result = await this.addStep(new LazyWaitForEventStep(stepName, eventId, timeoutStr));
|
|
1646
1758
|
try {
|
|
1647
1759
|
return {
|
|
1648
1760
|
...result,
|
|
@@ -1652,6 +1764,27 @@ var WorkflowContext = class {
|
|
|
1652
1764
|
return result;
|
|
1653
1765
|
}
|
|
1654
1766
|
}
|
|
1767
|
+
/**
|
|
1768
|
+
* Notify workflow runs waiting for an event
|
|
1769
|
+
*
|
|
1770
|
+
* ```ts
|
|
1771
|
+
* const { eventId, eventData, notifyResponse } = await context.notify(
|
|
1772
|
+
* "notify step", "event-id", "event-data"
|
|
1773
|
+
* );
|
|
1774
|
+
* ```
|
|
1775
|
+
*
|
|
1776
|
+
* Upon `context.notify`, the workflow runs waiting for the given eventId (context.waitForEvent)
|
|
1777
|
+
* will receive the given event data and resume execution.
|
|
1778
|
+
*
|
|
1779
|
+
* The response includes the same eventId and eventData. Additionally, there is
|
|
1780
|
+
* a notifyResponse field which contains a list of `Waiter` objects, each corresponding
|
|
1781
|
+
* to a notified workflow run.
|
|
1782
|
+
*
|
|
1783
|
+
* @param stepName
|
|
1784
|
+
* @param eventId event id to notify
|
|
1785
|
+
* @param eventData event data to notify with
|
|
1786
|
+
* @returns notify response which has event id, event data and list of waiters which were notified
|
|
1787
|
+
*/
|
|
1655
1788
|
async notify(stepName, eventId, eventData) {
|
|
1656
1789
|
const result = await this.addStep(
|
|
1657
1790
|
new LazyNotifyStep(stepName, eventId, eventData, this.qstashClient.http)
|
|
@@ -1665,6 +1798,15 @@ var WorkflowContext = class {
|
|
|
1665
1798
|
return result;
|
|
1666
1799
|
}
|
|
1667
1800
|
}
|
|
1801
|
+
/**
|
|
1802
|
+
* Cancel the current workflow run
|
|
1803
|
+
*
|
|
1804
|
+
* Will throw WorkflowAbort to stop workflow execution.
|
|
1805
|
+
* Shouldn't be inside try/catch.
|
|
1806
|
+
*/
|
|
1807
|
+
async cancel() {
|
|
1808
|
+
throw new WorkflowAbort("cancel", void 0, true);
|
|
1809
|
+
}
|
|
1668
1810
|
/**
|
|
1669
1811
|
* Adds steps to the executor. Needed so that it can be overwritten in
|
|
1670
1812
|
* DisabledWorkflowContext.
|
|
@@ -1705,7 +1847,8 @@ var WorkflowLogger = class _WorkflowLogger {
|
|
|
1705
1847
|
}
|
|
1706
1848
|
writeToConsole(logEntry) {
|
|
1707
1849
|
const JSON_SPACING = 2;
|
|
1708
|
-
console.
|
|
1850
|
+
const logMethod = logEntry.logLevel === "ERROR" ? console.error : logEntry.logLevel === "WARN" ? console.warn : console.log;
|
|
1851
|
+
logMethod(JSON.stringify(logEntry, void 0, JSON_SPACING));
|
|
1709
1852
|
}
|
|
1710
1853
|
shouldLog(level) {
|
|
1711
1854
|
return LOG_LEVELS.indexOf(level) >= LOG_LEVELS.indexOf(this.options.logLevel);
|
|
@@ -1723,11 +1866,13 @@ var WorkflowLogger = class _WorkflowLogger {
|
|
|
1723
1866
|
};
|
|
1724
1867
|
|
|
1725
1868
|
// src/utils.ts
|
|
1726
|
-
import crypto from "node:crypto";
|
|
1727
1869
|
var NANOID_CHARS = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789-_";
|
|
1728
1870
|
var NANOID_LENGTH = 21;
|
|
1871
|
+
function getRandomInt() {
|
|
1872
|
+
return Math.floor(Math.random() * NANOID_CHARS.length);
|
|
1873
|
+
}
|
|
1729
1874
|
function nanoid() {
|
|
1730
|
-
return
|
|
1875
|
+
return Array.from({ length: NANOID_LENGTH }).map(() => NANOID_CHARS[getRandomInt()]).join("");
|
|
1731
1876
|
}
|
|
1732
1877
|
function getWorkflowRunId(id) {
|
|
1733
1878
|
return `wfr_${id ?? nanoid()}`;
|
|
@@ -1745,6 +1890,63 @@ function decodeBase64(base64) {
|
|
|
1745
1890
|
}
|
|
1746
1891
|
}
|
|
1747
1892
|
|
|
1893
|
+
// src/serve/authorization.ts
|
|
1894
|
+
import { Client } from "@upstash/qstash";
|
|
1895
|
+
var DisabledWorkflowContext = class _DisabledWorkflowContext extends WorkflowContext {
|
|
1896
|
+
static disabledMessage = "disabled-qstash-worklfow-run";
|
|
1897
|
+
/**
|
|
1898
|
+
* overwrite the WorkflowContext.addStep method to always raise WorkflowAbort
|
|
1899
|
+
* error in order to stop the execution whenever we encounter a step.
|
|
1900
|
+
*
|
|
1901
|
+
* @param _step
|
|
1902
|
+
*/
|
|
1903
|
+
async addStep(_step) {
|
|
1904
|
+
throw new WorkflowAbort(_DisabledWorkflowContext.disabledMessage);
|
|
1905
|
+
}
|
|
1906
|
+
/**
|
|
1907
|
+
* overwrite cancel method to do nothing
|
|
1908
|
+
*/
|
|
1909
|
+
async cancel() {
|
|
1910
|
+
return;
|
|
1911
|
+
}
|
|
1912
|
+
/**
|
|
1913
|
+
* copies the passed context to create a DisabledWorkflowContext. Then, runs the
|
|
1914
|
+
* route function with the new context.
|
|
1915
|
+
*
|
|
1916
|
+
* - returns "run-ended" if there are no steps found or
|
|
1917
|
+
* if the auth failed and user called `return`
|
|
1918
|
+
* - returns "step-found" if DisabledWorkflowContext.addStep is called.
|
|
1919
|
+
* - if there is another error, returns the error.
|
|
1920
|
+
*
|
|
1921
|
+
* @param routeFunction
|
|
1922
|
+
*/
|
|
1923
|
+
static async tryAuthentication(routeFunction, context) {
|
|
1924
|
+
const disabledContext = new _DisabledWorkflowContext({
|
|
1925
|
+
qstashClient: new Client({
|
|
1926
|
+
baseUrl: "disabled-client",
|
|
1927
|
+
token: "disabled-client"
|
|
1928
|
+
}),
|
|
1929
|
+
workflowRunId: context.workflowRunId,
|
|
1930
|
+
headers: context.headers,
|
|
1931
|
+
steps: [],
|
|
1932
|
+
url: context.url,
|
|
1933
|
+
failureUrl: context.failureUrl,
|
|
1934
|
+
initialPayload: context.requestPayload,
|
|
1935
|
+
env: context.env,
|
|
1936
|
+
retries: context.retries
|
|
1937
|
+
});
|
|
1938
|
+
try {
|
|
1939
|
+
await routeFunction(disabledContext);
|
|
1940
|
+
} catch (error) {
|
|
1941
|
+
if (error instanceof WorkflowAbort && error.stepName === this.disabledMessage) {
|
|
1942
|
+
return ok("step-found");
|
|
1943
|
+
}
|
|
1944
|
+
return err(error);
|
|
1945
|
+
}
|
|
1946
|
+
return ok("run-ended");
|
|
1947
|
+
}
|
|
1948
|
+
};
|
|
1949
|
+
|
|
1748
1950
|
// src/workflow-parser.ts
|
|
1749
1951
|
var getPayload = async (request) => {
|
|
1750
1952
|
try {
|
|
@@ -1753,8 +1955,8 @@ var getPayload = async (request) => {
|
|
|
1753
1955
|
return;
|
|
1754
1956
|
}
|
|
1755
1957
|
};
|
|
1756
|
-
var
|
|
1757
|
-
const [encodedInitialPayload, ...encodedSteps] =
|
|
1958
|
+
var processRawSteps = (rawSteps) => {
|
|
1959
|
+
const [encodedInitialPayload, ...encodedSteps] = rawSteps;
|
|
1758
1960
|
const rawInitialPayload = decodeBase64(encodedInitialPayload.body);
|
|
1759
1961
|
const initialStep = {
|
|
1760
1962
|
stepId: 0,
|
|
@@ -1764,27 +1966,21 @@ var parsePayload = async (rawPayload, debug) => {
|
|
|
1764
1966
|
concurrent: NO_CONCURRENCY
|
|
1765
1967
|
};
|
|
1766
1968
|
const stepsToDecode = encodedSteps.filter((step) => step.callType === "step");
|
|
1767
|
-
const otherSteps =
|
|
1768
|
-
|
|
1769
|
-
|
|
1770
|
-
|
|
1771
|
-
|
|
1772
|
-
|
|
1773
|
-
|
|
1774
|
-
|
|
1775
|
-
|
|
1776
|
-
|
|
1777
|
-
}
|
|
1778
|
-
|
|
1779
|
-
|
|
1780
|
-
|
|
1781
|
-
|
|
1782
|
-
};
|
|
1783
|
-
step.out = newOut;
|
|
1784
|
-
}
|
|
1785
|
-
return step;
|
|
1786
|
-
})
|
|
1787
|
-
);
|
|
1969
|
+
const otherSteps = stepsToDecode.map((rawStep) => {
|
|
1970
|
+
const step = JSON.parse(decodeBase64(rawStep.body));
|
|
1971
|
+
try {
|
|
1972
|
+
step.out = JSON.parse(step.out);
|
|
1973
|
+
} catch {
|
|
1974
|
+
}
|
|
1975
|
+
if (step.waitEventId) {
|
|
1976
|
+
const newOut = {
|
|
1977
|
+
eventData: step.out ? decodeBase64(step.out) : void 0,
|
|
1978
|
+
timeout: step.waitTimeout ?? false
|
|
1979
|
+
};
|
|
1980
|
+
step.out = newOut;
|
|
1981
|
+
}
|
|
1982
|
+
return step;
|
|
1983
|
+
});
|
|
1788
1984
|
const steps = [initialStep, ...otherSteps];
|
|
1789
1985
|
return {
|
|
1790
1986
|
rawInitialPayload,
|
|
@@ -1832,20 +2028,20 @@ var validateRequest = (request) => {
|
|
|
1832
2028
|
const versionHeader = request.headers.get(WORKFLOW_PROTOCOL_VERSION_HEADER);
|
|
1833
2029
|
const isFirstInvocation = !versionHeader;
|
|
1834
2030
|
if (!isFirstInvocation && versionHeader !== WORKFLOW_PROTOCOL_VERSION) {
|
|
1835
|
-
throw new
|
|
2031
|
+
throw new WorkflowError(
|
|
1836
2032
|
`Incompatible workflow sdk protocol version. Expected ${WORKFLOW_PROTOCOL_VERSION}, got ${versionHeader} from the request.`
|
|
1837
2033
|
);
|
|
1838
2034
|
}
|
|
1839
2035
|
const workflowRunId = isFirstInvocation ? getWorkflowRunId() : request.headers.get(WORKFLOW_ID_HEADER) ?? "";
|
|
1840
2036
|
if (workflowRunId.length === 0) {
|
|
1841
|
-
throw new
|
|
2037
|
+
throw new WorkflowError("Couldn't get workflow id from header");
|
|
1842
2038
|
}
|
|
1843
2039
|
return {
|
|
1844
2040
|
isFirstInvocation,
|
|
1845
2041
|
workflowRunId
|
|
1846
2042
|
};
|
|
1847
2043
|
};
|
|
1848
|
-
var parseRequest = async (requestPayload, isFirstInvocation, debug) => {
|
|
2044
|
+
var parseRequest = async (requestPayload, isFirstInvocation, workflowRunId, requester, messageId, debug) => {
|
|
1849
2045
|
if (isFirstInvocation) {
|
|
1850
2046
|
return {
|
|
1851
2047
|
rawInitialPayload: requestPayload ?? "",
|
|
@@ -1853,10 +2049,18 @@ var parseRequest = async (requestPayload, isFirstInvocation, debug) => {
|
|
|
1853
2049
|
isLastDuplicate: false
|
|
1854
2050
|
};
|
|
1855
2051
|
} else {
|
|
2052
|
+
let rawSteps;
|
|
1856
2053
|
if (!requestPayload) {
|
|
1857
|
-
|
|
2054
|
+
await debug?.log(
|
|
2055
|
+
"INFO",
|
|
2056
|
+
"ENDPOINT_START",
|
|
2057
|
+
"request payload is empty, steps will be fetched from QStash."
|
|
2058
|
+
);
|
|
2059
|
+
rawSteps = await getSteps(requester, workflowRunId, messageId, debug);
|
|
2060
|
+
} else {
|
|
2061
|
+
rawSteps = JSON.parse(requestPayload);
|
|
1858
2062
|
}
|
|
1859
|
-
const { rawInitialPayload, steps } =
|
|
2063
|
+
const { rawInitialPayload, steps } = processRawSteps(rawSteps);
|
|
1860
2064
|
const isLastDuplicate = await checkIfLastOneIsDuplicate(steps, debug);
|
|
1861
2065
|
const deduplicatedSteps = deduplicateSteps(steps);
|
|
1862
2066
|
return {
|
|
@@ -1866,13 +2070,13 @@ var parseRequest = async (requestPayload, isFirstInvocation, debug) => {
|
|
|
1866
2070
|
};
|
|
1867
2071
|
}
|
|
1868
2072
|
};
|
|
1869
|
-
var handleFailure = async (request, requestPayload, qstashClient, initialPayloadParser, failureFunction, debug) => {
|
|
2073
|
+
var handleFailure = async (request, requestPayload, qstashClient, initialPayloadParser, routeFunction, failureFunction, debug) => {
|
|
1870
2074
|
if (request.headers.get(WORKFLOW_FAILURE_HEADER) !== "true") {
|
|
1871
2075
|
return ok("not-failure-callback");
|
|
1872
2076
|
}
|
|
1873
2077
|
if (!failureFunction) {
|
|
1874
2078
|
return err(
|
|
1875
|
-
new
|
|
2079
|
+
new WorkflowError(
|
|
1876
2080
|
"Workflow endpoint is called to handle a failure, but a failureFunction is not provided in serve options. Either provide a failureUrl or a failureFunction."
|
|
1877
2081
|
)
|
|
1878
2082
|
);
|
|
@@ -1883,82 +2087,38 @@ var handleFailure = async (request, requestPayload, qstashClient, initialPayload
|
|
|
1883
2087
|
);
|
|
1884
2088
|
const decodedBody = body ? decodeBase64(body) : "{}";
|
|
1885
2089
|
const errorPayload = JSON.parse(decodedBody);
|
|
1886
|
-
const {
|
|
1887
|
-
rawInitialPayload,
|
|
1888
|
-
steps,
|
|
1889
|
-
// eslint-disable-next-line @typescript-eslint/no-unused-vars
|
|
1890
|
-
isLastDuplicate: _isLastDuplicate
|
|
1891
|
-
} = await parseRequest(decodeBase64(sourceBody), false, debug);
|
|
1892
2090
|
const workflowContext = new WorkflowContext({
|
|
1893
2091
|
qstashClient,
|
|
1894
2092
|
workflowRunId,
|
|
1895
|
-
initialPayload: initialPayloadParser(
|
|
1896
|
-
rawInitialPayload,
|
|
2093
|
+
initialPayload: initialPayloadParser(decodeBase64(sourceBody)),
|
|
1897
2094
|
headers: recreateUserHeaders(new Headers(sourceHeader)),
|
|
1898
|
-
steps,
|
|
2095
|
+
steps: [],
|
|
1899
2096
|
url,
|
|
1900
2097
|
failureUrl: url,
|
|
1901
2098
|
debug
|
|
1902
2099
|
});
|
|
1903
|
-
|
|
2100
|
+
const authCheck = await DisabledWorkflowContext.tryAuthentication(
|
|
2101
|
+
routeFunction,
|
|
2102
|
+
workflowContext
|
|
2103
|
+
);
|
|
2104
|
+
if (authCheck.isErr()) {
|
|
2105
|
+
await debug?.log("ERROR", "ERROR", { error: authCheck.error.message });
|
|
2106
|
+
throw authCheck.error;
|
|
2107
|
+
} else if (authCheck.value === "run-ended") {
|
|
2108
|
+
return err(new WorkflowError("Not authorized to run the failure function."));
|
|
2109
|
+
}
|
|
2110
|
+
await failureFunction({
|
|
2111
|
+
context: workflowContext,
|
|
2112
|
+
failStatus: status,
|
|
2113
|
+
failResponse: errorPayload.message,
|
|
2114
|
+
failHeaders: header
|
|
2115
|
+
});
|
|
1904
2116
|
} catch (error) {
|
|
1905
2117
|
return err(error);
|
|
1906
2118
|
}
|
|
1907
2119
|
return ok("is-failure-callback");
|
|
1908
2120
|
};
|
|
1909
2121
|
|
|
1910
|
-
// src/serve/authorization.ts
|
|
1911
|
-
import { Client } from "@upstash/qstash";
|
|
1912
|
-
var DisabledWorkflowContext = class _DisabledWorkflowContext extends WorkflowContext {
|
|
1913
|
-
static disabledMessage = "disabled-qstash-worklfow-run";
|
|
1914
|
-
/**
|
|
1915
|
-
* overwrite the WorkflowContext.addStep method to always raise QStashWorkflowAbort
|
|
1916
|
-
* error in order to stop the execution whenever we encounter a step.
|
|
1917
|
-
*
|
|
1918
|
-
* @param _step
|
|
1919
|
-
*/
|
|
1920
|
-
async addStep(_step) {
|
|
1921
|
-
throw new QStashWorkflowAbort(_DisabledWorkflowContext.disabledMessage);
|
|
1922
|
-
}
|
|
1923
|
-
/**
|
|
1924
|
-
* copies the passed context to create a DisabledWorkflowContext. Then, runs the
|
|
1925
|
-
* route function with the new context.
|
|
1926
|
-
*
|
|
1927
|
-
* - returns "run-ended" if there are no steps found or
|
|
1928
|
-
* if the auth failed and user called `return`
|
|
1929
|
-
* - returns "step-found" if DisabledWorkflowContext.addStep is called.
|
|
1930
|
-
* - if there is another error, returns the error.
|
|
1931
|
-
*
|
|
1932
|
-
* @param routeFunction
|
|
1933
|
-
*/
|
|
1934
|
-
static async tryAuthentication(routeFunction, context) {
|
|
1935
|
-
const disabledContext = new _DisabledWorkflowContext({
|
|
1936
|
-
qstashClient: new Client({
|
|
1937
|
-
baseUrl: "disabled-client",
|
|
1938
|
-
token: "disabled-client"
|
|
1939
|
-
}),
|
|
1940
|
-
workflowRunId: context.workflowRunId,
|
|
1941
|
-
headers: context.headers,
|
|
1942
|
-
steps: [],
|
|
1943
|
-
url: context.url,
|
|
1944
|
-
failureUrl: context.failureUrl,
|
|
1945
|
-
initialPayload: context.requestPayload,
|
|
1946
|
-
rawInitialPayload: context.rawInitialPayload,
|
|
1947
|
-
env: context.env,
|
|
1948
|
-
retries: context.retries
|
|
1949
|
-
});
|
|
1950
|
-
try {
|
|
1951
|
-
await routeFunction(disabledContext);
|
|
1952
|
-
} catch (error) {
|
|
1953
|
-
if (error instanceof QStashWorkflowAbort && error.stepName === this.disabledMessage) {
|
|
1954
|
-
return ok("step-found");
|
|
1955
|
-
}
|
|
1956
|
-
return err(error);
|
|
1957
|
-
}
|
|
1958
|
-
return ok("run-ended");
|
|
1959
|
-
}
|
|
1960
|
-
};
|
|
1961
|
-
|
|
1962
2122
|
// src/serve/options.ts
|
|
1963
2123
|
import { Receiver } from "@upstash/qstash";
|
|
1964
2124
|
import { Client as Client2 } from "@upstash/qstash";
|
|
@@ -2051,6 +2211,9 @@ var serve = (routeFunction, options) => {
|
|
|
2051
2211
|
const { rawInitialPayload, steps, isLastDuplicate } = await parseRequest(
|
|
2052
2212
|
requestPayload,
|
|
2053
2213
|
isFirstInvocation,
|
|
2214
|
+
workflowRunId,
|
|
2215
|
+
qstashClient.http,
|
|
2216
|
+
request.headers.get("upstash-message-id"),
|
|
2054
2217
|
debug
|
|
2055
2218
|
);
|
|
2056
2219
|
if (isLastDuplicate) {
|
|
@@ -2061,6 +2224,7 @@ var serve = (routeFunction, options) => {
|
|
|
2061
2224
|
requestPayload,
|
|
2062
2225
|
qstashClient,
|
|
2063
2226
|
initialPayloadParser,
|
|
2227
|
+
routeFunction,
|
|
2064
2228
|
failureFunction
|
|
2065
2229
|
);
|
|
2066
2230
|
if (failureCheck.isErr()) {
|
|
@@ -2073,7 +2237,6 @@ var serve = (routeFunction, options) => {
|
|
|
2073
2237
|
qstashClient,
|
|
2074
2238
|
workflowRunId,
|
|
2075
2239
|
initialPayload: initialPayloadParser(rawInitialPayload),
|
|
2076
|
-
rawInitialPayload,
|
|
2077
2240
|
headers: recreateUserHeaders(request.headers),
|
|
2078
2241
|
steps,
|
|
2079
2242
|
url: workflowUrl,
|
|
@@ -2111,7 +2274,11 @@ var serve = (routeFunction, options) => {
|
|
|
2111
2274
|
onStep: async () => routeFunction(workflowContext),
|
|
2112
2275
|
onCleanup: async () => {
|
|
2113
2276
|
await triggerWorkflowDelete(workflowContext, debug);
|
|
2114
|
-
}
|
|
2277
|
+
},
|
|
2278
|
+
onCancel: async () => {
|
|
2279
|
+
await makeCancelRequest(workflowContext.qstashClient.http, workflowRunId);
|
|
2280
|
+
},
|
|
2281
|
+
debug
|
|
2115
2282
|
});
|
|
2116
2283
|
if (result.isErr()) {
|
|
2117
2284
|
await debug?.log("ERROR", "ERROR", { error: result.error.message });
|
|
@@ -2142,30 +2309,88 @@ var Client3 = class {
|
|
|
2142
2309
|
client;
|
|
2143
2310
|
constructor(clientConfig) {
|
|
2144
2311
|
if (!clientConfig.token) {
|
|
2145
|
-
console.
|
|
2312
|
+
console.error(
|
|
2313
|
+
"QStash token is required for Upstash Workflow!\n\nTo fix this:\n1. Get your token from the Upstash Console (https://console.upstash.com/qstash)\n2. Initialize the workflow client with:\n\n const client = new Client({\n token: '<YOUR_QSTASH_TOKEN>'\n });"
|
|
2314
|
+
);
|
|
2146
2315
|
}
|
|
2147
2316
|
this.client = new QStashClient(clientConfig);
|
|
2148
2317
|
}
|
|
2149
2318
|
/**
|
|
2150
2319
|
* Cancel an ongoing workflow
|
|
2151
2320
|
*
|
|
2321
|
+
* Returns true if workflow is canceled succesfully. Otherwise, throws error.
|
|
2322
|
+
*
|
|
2323
|
+
* There are multiple ways you can cancel workflows:
|
|
2324
|
+
* - pass one or more workflow run ids to cancel them
|
|
2325
|
+
* - pass a workflow url to cancel all runs starting with this url
|
|
2326
|
+
* - cancel all pending or active workflow runs
|
|
2327
|
+
*
|
|
2328
|
+
* ### Cancel a set of workflow runs
|
|
2329
|
+
*
|
|
2152
2330
|
* ```ts
|
|
2153
|
-
*
|
|
2331
|
+
* // cancel a single workflow
|
|
2332
|
+
* await client.cancel({ ids: "<WORKFLOW_RUN_ID>" })
|
|
2154
2333
|
*
|
|
2155
|
-
*
|
|
2156
|
-
* await client.cancel({
|
|
2334
|
+
* // cancel a set of workflow runs
|
|
2335
|
+
* await client.cancel({ ids: [
|
|
2336
|
+
* "<WORKFLOW_RUN_ID_1>",
|
|
2337
|
+
* "<WORKFLOW_RUN_ID_2>",
|
|
2338
|
+
* ]})
|
|
2339
|
+
* ```
|
|
2340
|
+
*
|
|
2341
|
+
* ### Cancel workflows starting with a url
|
|
2342
|
+
*
|
|
2343
|
+
* If you have an endpoint called `https://your-endpoint.com` and you
|
|
2344
|
+
* want to cancel all workflow runs on it, you can use `urlStartingWith`.
|
|
2345
|
+
*
|
|
2346
|
+
* Note that this will cancel workflows in all endpoints under
|
|
2347
|
+
* `https://your-endpoint.com`.
|
|
2348
|
+
*
|
|
2349
|
+
* ```ts
|
|
2350
|
+
* await client.cancel({ urlStartingWith: "https://your-endpoint.com" })
|
|
2157
2351
|
* ```
|
|
2158
2352
|
*
|
|
2159
|
-
*
|
|
2353
|
+
* ### Cancel *all* workflows
|
|
2354
|
+
*
|
|
2355
|
+
* To cancel all pending and currently running workflows, you can
|
|
2356
|
+
* do it like this:
|
|
2357
|
+
*
|
|
2358
|
+
* ```ts
|
|
2359
|
+
* await client.cancel({ all: true })
|
|
2360
|
+
* ```
|
|
2361
|
+
*
|
|
2362
|
+
* @param ids run id of the workflow to delete
|
|
2363
|
+
* @param urlStartingWith cancel workflows starting with this url. Will be ignored
|
|
2364
|
+
* if `ids` parameter is set.
|
|
2365
|
+
* @param all set to true in order to cancel all workflows. Will be ignored
|
|
2366
|
+
* if `ids` or `urlStartingWith` parameters are set.
|
|
2160
2367
|
* @returns true if workflow is succesfully deleted. Otherwise throws QStashError
|
|
2161
2368
|
*/
|
|
2162
|
-
async cancel({
|
|
2369
|
+
async cancel({
|
|
2370
|
+
ids,
|
|
2371
|
+
urlStartingWith,
|
|
2372
|
+
all
|
|
2373
|
+
}) {
|
|
2374
|
+
let body;
|
|
2375
|
+
if (ids) {
|
|
2376
|
+
const runIdArray = typeof ids === "string" ? [ids] : ids;
|
|
2377
|
+
body = JSON.stringify({ workflowRunIds: runIdArray });
|
|
2378
|
+
} else if (urlStartingWith) {
|
|
2379
|
+
body = JSON.stringify({ workflowUrl: urlStartingWith });
|
|
2380
|
+
} else if (all) {
|
|
2381
|
+
body = "{}";
|
|
2382
|
+
} else {
|
|
2383
|
+
throw new TypeError("The `cancel` method cannot be called without any options.");
|
|
2384
|
+
}
|
|
2163
2385
|
const result = await this.client.http.request({
|
|
2164
|
-
path: ["v2", "workflows", "runs"
|
|
2386
|
+
path: ["v2", "workflows", "runs"],
|
|
2165
2387
|
method: "DELETE",
|
|
2166
|
-
|
|
2388
|
+
body,
|
|
2389
|
+
headers: {
|
|
2390
|
+
"Content-Type": "application/json"
|
|
2391
|
+
}
|
|
2167
2392
|
});
|
|
2168
|
-
return result
|
|
2393
|
+
return result;
|
|
2169
2394
|
}
|
|
2170
2395
|
/**
|
|
2171
2396
|
* Notify a workflow run waiting for an event
|
|
@@ -2210,13 +2435,13 @@ var Client3 = class {
|
|
|
2210
2435
|
* Trigger new workflow run and returns the workflow run id
|
|
2211
2436
|
*
|
|
2212
2437
|
* ```ts
|
|
2213
|
-
* const { workflowRunId } await client.trigger({
|
|
2438
|
+
* const { workflowRunId } = await client.trigger({
|
|
2214
2439
|
* url: "https://workflow-endpoint.com",
|
|
2215
|
-
* body: "hello there!",
|
|
2216
|
-
* headers: { ... },
|
|
2217
|
-
* workflowRunId: "my-workflow", //
|
|
2218
|
-
* retries: 3
|
|
2219
|
-
* })
|
|
2440
|
+
* body: "hello there!", // Optional body
|
|
2441
|
+
* headers: { ... }, // Optional headers
|
|
2442
|
+
* workflowRunId: "my-workflow", // Optional workflow run ID
|
|
2443
|
+
* retries: 3 // Optional retries for the initial request
|
|
2444
|
+
* });
|
|
2220
2445
|
*
|
|
2221
2446
|
* console.log(workflowRunId)
|
|
2222
2447
|
* // wfr_my-workflow
|
|
@@ -2264,8 +2489,8 @@ export {
|
|
|
2264
2489
|
__require,
|
|
2265
2490
|
__commonJS,
|
|
2266
2491
|
__toESM,
|
|
2267
|
-
|
|
2268
|
-
|
|
2492
|
+
WorkflowError,
|
|
2493
|
+
WorkflowAbort,
|
|
2269
2494
|
StepTypes,
|
|
2270
2495
|
WorkflowContext,
|
|
2271
2496
|
WorkflowLogger,
|