@upstash/workflow 0.1.1 → 0.1.2-omit-errors
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +4 -4
- package/astro.d.mts +9 -0
- package/astro.d.ts +9 -0
- package/astro.js +2160 -0
- package/astro.mjs +18 -0
- package/{chunk-XPMFG3Q4.mjs → chunk-ZPVH5ACW.mjs} +70 -24
- package/cloudflare.d.mts +1 -1
- package/cloudflare.d.ts +1 -1
- package/cloudflare.js +76 -30
- package/cloudflare.mjs +1 -1
- package/h3.d.mts +1 -1
- package/h3.d.ts +1 -1
- package/h3.js +79 -30
- package/h3.mjs +4 -1
- package/hono.d.mts +1 -1
- package/hono.d.ts +1 -1
- package/hono.js +76 -30
- package/hono.mjs +1 -1
- package/index.d.mts +2 -2
- package/index.d.ts +2 -2
- package/index.js +77 -31
- package/index.mjs +1 -1
- package/nextjs.d.mts +1 -1
- package/nextjs.d.ts +1 -1
- package/nextjs.js +76 -30
- package/nextjs.mjs +1 -1
- package/package.json +1 -1
- package/solidjs.d.mts +1 -1
- package/solidjs.d.ts +1 -1
- package/solidjs.js +76 -30
- package/solidjs.mjs +1 -1
- package/svelte.d.mts +1 -1
- package/svelte.d.ts +1 -1
- package/svelte.js +76 -30
- package/svelte.mjs +1 -1
- package/{types-p7sxktVE.d.mts → types-CI-2skYU.d.mts} +5 -4
- package/{types-p7sxktVE.d.ts → types-CI-2skYU.d.ts} +5 -4
package/h3.js
CHANGED
|
@@ -253,6 +253,9 @@ function readRawBody(event, encoding = "utf8") {
|
|
|
253
253
|
if (_resolved.constructor === Object) {
|
|
254
254
|
return Buffer.from(JSON.stringify(_resolved));
|
|
255
255
|
}
|
|
256
|
+
if (_resolved instanceof URLSearchParams) {
|
|
257
|
+
return Buffer.from(_resolved.toString());
|
|
258
|
+
}
|
|
256
259
|
return Buffer.from(_resolved);
|
|
257
260
|
});
|
|
258
261
|
return encoding ? promise2.then((buff) => buff.toString(encoding)) : promise2;
|
|
@@ -802,6 +805,7 @@ var StepTypes = [
|
|
|
802
805
|
];
|
|
803
806
|
|
|
804
807
|
// src/workflow-requests.ts
|
|
808
|
+
var import_qstash2 = require("@upstash/qstash");
|
|
805
809
|
var triggerFirstInvocation = async (workflowContext, retries, debug) => {
|
|
806
810
|
const { headers } = getHeaders(
|
|
807
811
|
"true",
|
|
@@ -812,27 +816,37 @@ var triggerFirstInvocation = async (workflowContext, retries, debug) => {
|
|
|
812
816
|
workflowContext.failureUrl,
|
|
813
817
|
retries
|
|
814
818
|
);
|
|
815
|
-
await debug?.log("SUBMIT", "SUBMIT_FIRST_INVOCATION", {
|
|
816
|
-
headers,
|
|
817
|
-
requestPayload: workflowContext.requestPayload,
|
|
818
|
-
url: workflowContext.url
|
|
819
|
-
});
|
|
820
819
|
try {
|
|
821
|
-
await workflowContext.qstashClient.publishJSON({
|
|
820
|
+
const result = await workflowContext.qstashClient.publishJSON({
|
|
822
821
|
headers,
|
|
823
822
|
method: "POST",
|
|
824
823
|
body: workflowContext.requestPayload,
|
|
825
824
|
url: workflowContext.url
|
|
826
825
|
});
|
|
826
|
+
await debug?.log("SUBMIT", "SUBMIT_FIRST_INVOCATION", {
|
|
827
|
+
headers,
|
|
828
|
+
requestPayload: workflowContext.requestPayload,
|
|
829
|
+
url: workflowContext.url,
|
|
830
|
+
messageId: result.messageId
|
|
831
|
+
});
|
|
827
832
|
return ok("success");
|
|
828
833
|
} catch (error) {
|
|
829
834
|
const error_ = error;
|
|
835
|
+
if (error instanceof import_qstash2.QstashError && error.message.includes("a workflow already exists, can not initialize a new one with same id")) {
|
|
836
|
+
await debug?.log("WARN", "SUBMIT_FIRST_INVOCATION", {
|
|
837
|
+
message: `Workflow run ${workflowContext.workflowRunId} already exists.`,
|
|
838
|
+
name: error.name,
|
|
839
|
+
originalMessage: error.message
|
|
840
|
+
});
|
|
841
|
+
return ok("workflow-run-already-exists");
|
|
842
|
+
}
|
|
830
843
|
return err(error_);
|
|
831
844
|
}
|
|
832
845
|
};
|
|
833
846
|
var triggerRouteFunction = async ({
|
|
834
847
|
onCleanup,
|
|
835
|
-
onStep
|
|
848
|
+
onStep,
|
|
849
|
+
debug
|
|
836
850
|
}) => {
|
|
837
851
|
try {
|
|
838
852
|
await onStep();
|
|
@@ -840,6 +854,14 @@ var triggerRouteFunction = async ({
|
|
|
840
854
|
return ok("workflow-finished");
|
|
841
855
|
} catch (error) {
|
|
842
856
|
const error_ = error;
|
|
857
|
+
if (error instanceof import_qstash2.QstashError && error.message.includes("can not append to a a cancelled workflow")) {
|
|
858
|
+
await debug?.log("WARN", "RESPONSE_WORKFLOW", {
|
|
859
|
+
message: `tried to append to a cancelled workflow. exiting without publishing.`,
|
|
860
|
+
name: error.name,
|
|
861
|
+
originalMessage: error.message
|
|
862
|
+
});
|
|
863
|
+
return ok("workflow-was-finished");
|
|
864
|
+
}
|
|
843
865
|
return error_ instanceof QStashWorkflowAbort ? ok("step-finished") : err(error_);
|
|
844
866
|
}
|
|
845
867
|
};
|
|
@@ -847,12 +869,29 @@ var triggerWorkflowDelete = async (workflowContext, debug, cancel = false) => {
|
|
|
847
869
|
await debug?.log("SUBMIT", "SUBMIT_CLEANUP", {
|
|
848
870
|
deletedWorkflowRunId: workflowContext.workflowRunId
|
|
849
871
|
});
|
|
850
|
-
|
|
851
|
-
|
|
852
|
-
|
|
853
|
-
|
|
854
|
-
|
|
855
|
-
|
|
872
|
+
try {
|
|
873
|
+
await workflowContext.qstashClient.http.request({
|
|
874
|
+
path: ["v2", "workflows", "runs", `${workflowContext.workflowRunId}?cancel=${cancel}`],
|
|
875
|
+
method: "DELETE",
|
|
876
|
+
parseResponseAsJson: false
|
|
877
|
+
});
|
|
878
|
+
await debug?.log(
|
|
879
|
+
"SUBMIT",
|
|
880
|
+
"SUBMIT_CLEANUP",
|
|
881
|
+
`workflow run ${workflowContext.workflowRunId} deleted.`
|
|
882
|
+
);
|
|
883
|
+
return { deleted: true };
|
|
884
|
+
} catch (error) {
|
|
885
|
+
if (error instanceof import_qstash2.QstashError && error.message.includes(`workflowRun ${workflowContext.workflowRunId} not found`)) {
|
|
886
|
+
await debug?.log("WARN", "SUBMIT_CLEANUP", {
|
|
887
|
+
message: `Failed to remove workflow run ${workflowContext.workflowRunId} as it doesn't exist.`,
|
|
888
|
+
name: error.name,
|
|
889
|
+
originalMessage: error.message
|
|
890
|
+
});
|
|
891
|
+
return { deleted: false };
|
|
892
|
+
}
|
|
893
|
+
throw error;
|
|
894
|
+
}
|
|
856
895
|
};
|
|
857
896
|
var recreateUserHeaders = (headers) => {
|
|
858
897
|
const filteredHeaders = new Headers();
|
|
@@ -908,15 +947,16 @@ ${atob(callbackMessage.body)}`
|
|
|
908
947
|
failureUrl,
|
|
909
948
|
retries
|
|
910
949
|
);
|
|
950
|
+
const callResponse = {
|
|
951
|
+
status: callbackMessage.status,
|
|
952
|
+
body: atob(callbackMessage.body),
|
|
953
|
+
header: callbackMessage.header
|
|
954
|
+
};
|
|
911
955
|
const callResultStep = {
|
|
912
956
|
stepId: Number(stepIdString),
|
|
913
957
|
stepName,
|
|
914
958
|
stepType,
|
|
915
|
-
out:
|
|
916
|
-
status: callbackMessage.status,
|
|
917
|
-
body: atob(callbackMessage.body),
|
|
918
|
-
header: callbackMessage.header
|
|
919
|
-
},
|
|
959
|
+
out: JSON.stringify(callResponse),
|
|
920
960
|
concurrent: Number(concurrentString)
|
|
921
961
|
};
|
|
922
962
|
await debug?.log("SUBMIT", "SUBMIT_THIRD_PARTY_RESULT", {
|
|
@@ -950,10 +990,11 @@ var getHeaders = (initHeaderValue, workflowRunId, workflowUrl, userHeaders, step
|
|
|
950
990
|
const baseHeaders = {
|
|
951
991
|
[WORKFLOW_INIT_HEADER]: initHeaderValue,
|
|
952
992
|
[WORKFLOW_ID_HEADER]: workflowRunId,
|
|
953
|
-
[WORKFLOW_URL_HEADER]: workflowUrl
|
|
954
|
-
[WORKFLOW_FEATURE_HEADER]: "WF_NoDelete",
|
|
955
|
-
[`Upstash-Forward-${WORKFLOW_PROTOCOL_VERSION_HEADER}`]: WORKFLOW_PROTOCOL_VERSION
|
|
993
|
+
[WORKFLOW_URL_HEADER]: workflowUrl
|
|
956
994
|
};
|
|
995
|
+
if (!step?.callUrl) {
|
|
996
|
+
baseHeaders[`Upstash-Forward-${WORKFLOW_PROTOCOL_VERSION_HEADER}`] = WORKFLOW_PROTOCOL_VERSION;
|
|
997
|
+
}
|
|
957
998
|
if (failureUrl) {
|
|
958
999
|
if (!step?.callUrl) {
|
|
959
1000
|
baseHeaders[`Upstash-Failure-Callback-Forward-${WORKFLOW_FAILURE_HEADER}`] = "true";
|
|
@@ -962,6 +1003,7 @@ var getHeaders = (initHeaderValue, workflowRunId, workflowUrl, userHeaders, step
|
|
|
962
1003
|
}
|
|
963
1004
|
if (step?.callUrl) {
|
|
964
1005
|
baseHeaders["Upstash-Retries"] = "0";
|
|
1006
|
+
baseHeaders[WORKFLOW_FEATURE_HEADER] = "WF_NoDelete";
|
|
965
1007
|
if (retries) {
|
|
966
1008
|
baseHeaders["Upstash-Callback-Retries"] = retries.toString();
|
|
967
1009
|
baseHeaders["Upstash-Failure-Callback-Retries"] = retries.toString();
|
|
@@ -1325,6 +1367,7 @@ var AutoExecutor = class _AutoExecutor {
|
|
|
1325
1367
|
this.context.retries
|
|
1326
1368
|
);
|
|
1327
1369
|
const willWait = singleStep.concurrent === NO_CONCURRENCY || singleStep.stepId === 0;
|
|
1370
|
+
singleStep.out = JSON.stringify(singleStep.out);
|
|
1328
1371
|
return singleStep.callUrl ? (
|
|
1329
1372
|
// if the step is a third party call, we call the third party
|
|
1330
1373
|
// url (singleStep.callUrl) and pass information about the workflow
|
|
@@ -1986,7 +2029,8 @@ var WorkflowLogger = class _WorkflowLogger {
|
|
|
1986
2029
|
}
|
|
1987
2030
|
writeToConsole(logEntry) {
|
|
1988
2031
|
const JSON_SPACING = 2;
|
|
1989
|
-
console.
|
|
2032
|
+
const logMethod = logEntry.logLevel === "ERROR" ? console.error : logEntry.logLevel === "WARN" ? console.warn : console.log;
|
|
2033
|
+
logMethod(JSON.stringify(logEntry, void 0, JSON_SPACING));
|
|
1990
2034
|
}
|
|
1991
2035
|
shouldLog(level) {
|
|
1992
2036
|
return LOG_LEVELS.indexOf(level) >= LOG_LEVELS.indexOf(this.options.logLevel);
|
|
@@ -2043,9 +2087,13 @@ var parsePayload = (rawPayload) => {
|
|
|
2043
2087
|
const stepsToDecode = encodedSteps.filter((step) => step.callType === "step");
|
|
2044
2088
|
const otherSteps = stepsToDecode.map((rawStep) => {
|
|
2045
2089
|
const step = JSON.parse(decodeBase64(rawStep.body));
|
|
2090
|
+
try {
|
|
2091
|
+
step.out = JSON.parse(step.out);
|
|
2092
|
+
} catch {
|
|
2093
|
+
}
|
|
2046
2094
|
if (step.waitEventId) {
|
|
2047
2095
|
const newOut = {
|
|
2048
|
-
eventData: step.out,
|
|
2096
|
+
eventData: step.out ? decodeBase64(step.out) : void 0,
|
|
2049
2097
|
timeout: step.waitTimeout ?? false
|
|
2050
2098
|
};
|
|
2051
2099
|
step.out = newOut;
|
|
@@ -2175,7 +2223,7 @@ var handleFailure = async (request, requestPayload, qstashClient, initialPayload
|
|
|
2175
2223
|
};
|
|
2176
2224
|
|
|
2177
2225
|
// src/serve/authorization.ts
|
|
2178
|
-
var
|
|
2226
|
+
var import_qstash3 = require("@upstash/qstash");
|
|
2179
2227
|
var DisabledWorkflowContext = class _DisabledWorkflowContext extends WorkflowContext {
|
|
2180
2228
|
static disabledMessage = "disabled-qstash-worklfow-run";
|
|
2181
2229
|
/**
|
|
@@ -2200,7 +2248,7 @@ var DisabledWorkflowContext = class _DisabledWorkflowContext extends WorkflowCon
|
|
|
2200
2248
|
*/
|
|
2201
2249
|
static async tryAuthentication(routeFunction, context) {
|
|
2202
2250
|
const disabledContext = new _DisabledWorkflowContext({
|
|
2203
|
-
qstashClient: new
|
|
2251
|
+
qstashClient: new import_qstash3.Client({
|
|
2204
2252
|
baseUrl: "disabled-client",
|
|
2205
2253
|
token: "disabled-client"
|
|
2206
2254
|
}),
|
|
@@ -2227,15 +2275,15 @@ var DisabledWorkflowContext = class _DisabledWorkflowContext extends WorkflowCon
|
|
|
2227
2275
|
};
|
|
2228
2276
|
|
|
2229
2277
|
// src/serve/options.ts
|
|
2230
|
-
var import_qstash3 = require("@upstash/qstash");
|
|
2231
2278
|
var import_qstash4 = require("@upstash/qstash");
|
|
2279
|
+
var import_qstash5 = require("@upstash/qstash");
|
|
2232
2280
|
var processOptions = (options) => {
|
|
2233
2281
|
const environment = options?.env ?? (typeof process === "undefined" ? {} : process.env);
|
|
2234
2282
|
const receiverEnvironmentVariablesSet = Boolean(
|
|
2235
2283
|
environment.QSTASH_CURRENT_SIGNING_KEY && environment.QSTASH_NEXT_SIGNING_KEY
|
|
2236
2284
|
);
|
|
2237
2285
|
return {
|
|
2238
|
-
qstashClient: new
|
|
2286
|
+
qstashClient: new import_qstash5.Client({
|
|
2239
2287
|
baseUrl: environment.QSTASH_URL,
|
|
2240
2288
|
token: environment.QSTASH_TOKEN
|
|
2241
2289
|
}),
|
|
@@ -2256,7 +2304,7 @@ var processOptions = (options) => {
|
|
|
2256
2304
|
throw error;
|
|
2257
2305
|
}
|
|
2258
2306
|
},
|
|
2259
|
-
receiver: receiverEnvironmentVariablesSet ? new
|
|
2307
|
+
receiver: receiverEnvironmentVariablesSet ? new import_qstash4.Receiver({
|
|
2260
2308
|
currentSigningKey: environment.QSTASH_CURRENT_SIGNING_KEY,
|
|
2261
2309
|
nextSigningKey: environment.QSTASH_NEXT_SIGNING_KEY
|
|
2262
2310
|
}) : void 0,
|
|
@@ -2378,7 +2426,8 @@ var serve = (routeFunction, options) => {
|
|
|
2378
2426
|
onStep: async () => routeFunction(workflowContext),
|
|
2379
2427
|
onCleanup: async () => {
|
|
2380
2428
|
await triggerWorkflowDelete(workflowContext, debug);
|
|
2381
|
-
}
|
|
2429
|
+
},
|
|
2430
|
+
debug
|
|
2382
2431
|
});
|
|
2383
2432
|
if (result.isErr()) {
|
|
2384
2433
|
await debug?.log("ERROR", "ERROR", { error: result.error.message });
|
|
@@ -2404,7 +2453,7 @@ var serve = (routeFunction, options) => {
|
|
|
2404
2453
|
};
|
|
2405
2454
|
|
|
2406
2455
|
// src/client/index.ts
|
|
2407
|
-
var
|
|
2456
|
+
var import_qstash6 = require("@upstash/qstash");
|
|
2408
2457
|
|
|
2409
2458
|
// platforms/h3.ts
|
|
2410
2459
|
function transformHeaders(headers) {
|
package/h3.mjs
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
import {
|
|
2
2
|
serve
|
|
3
|
-
} from "./chunk-
|
|
3
|
+
} from "./chunk-ZPVH5ACW.mjs";
|
|
4
4
|
|
|
5
5
|
// node_modules/defu/dist/defu.mjs
|
|
6
6
|
function isPlainObject(value) {
|
|
@@ -231,6 +231,9 @@ function readRawBody(event, encoding = "utf8") {
|
|
|
231
231
|
if (_resolved.constructor === Object) {
|
|
232
232
|
return Buffer.from(JSON.stringify(_resolved));
|
|
233
233
|
}
|
|
234
|
+
if (_resolved instanceof URLSearchParams) {
|
|
235
|
+
return Buffer.from(_resolved.toString());
|
|
236
|
+
}
|
|
234
237
|
return Buffer.from(_resolved);
|
|
235
238
|
});
|
|
236
239
|
return encoding ? promise2.then((buff) => buff.toString(encoding)) : promise2;
|
package/hono.d.mts
CHANGED
package/hono.d.ts
CHANGED
package/hono.js
CHANGED
|
@@ -493,6 +493,7 @@ var StepTypes = [
|
|
|
493
493
|
];
|
|
494
494
|
|
|
495
495
|
// src/workflow-requests.ts
|
|
496
|
+
var import_qstash2 = require("@upstash/qstash");
|
|
496
497
|
var triggerFirstInvocation = async (workflowContext, retries, debug) => {
|
|
497
498
|
const { headers } = getHeaders(
|
|
498
499
|
"true",
|
|
@@ -503,27 +504,37 @@ var triggerFirstInvocation = async (workflowContext, retries, debug) => {
|
|
|
503
504
|
workflowContext.failureUrl,
|
|
504
505
|
retries
|
|
505
506
|
);
|
|
506
|
-
await debug?.log("SUBMIT", "SUBMIT_FIRST_INVOCATION", {
|
|
507
|
-
headers,
|
|
508
|
-
requestPayload: workflowContext.requestPayload,
|
|
509
|
-
url: workflowContext.url
|
|
510
|
-
});
|
|
511
507
|
try {
|
|
512
|
-
await workflowContext.qstashClient.publishJSON({
|
|
508
|
+
const result = await workflowContext.qstashClient.publishJSON({
|
|
513
509
|
headers,
|
|
514
510
|
method: "POST",
|
|
515
511
|
body: workflowContext.requestPayload,
|
|
516
512
|
url: workflowContext.url
|
|
517
513
|
});
|
|
514
|
+
await debug?.log("SUBMIT", "SUBMIT_FIRST_INVOCATION", {
|
|
515
|
+
headers,
|
|
516
|
+
requestPayload: workflowContext.requestPayload,
|
|
517
|
+
url: workflowContext.url,
|
|
518
|
+
messageId: result.messageId
|
|
519
|
+
});
|
|
518
520
|
return ok("success");
|
|
519
521
|
} catch (error) {
|
|
520
522
|
const error_ = error;
|
|
523
|
+
if (error instanceof import_qstash2.QstashError && error.message.includes("a workflow already exists, can not initialize a new one with same id")) {
|
|
524
|
+
await debug?.log("WARN", "SUBMIT_FIRST_INVOCATION", {
|
|
525
|
+
message: `Workflow run ${workflowContext.workflowRunId} already exists.`,
|
|
526
|
+
name: error.name,
|
|
527
|
+
originalMessage: error.message
|
|
528
|
+
});
|
|
529
|
+
return ok("workflow-run-already-exists");
|
|
530
|
+
}
|
|
521
531
|
return err(error_);
|
|
522
532
|
}
|
|
523
533
|
};
|
|
524
534
|
var triggerRouteFunction = async ({
|
|
525
535
|
onCleanup,
|
|
526
|
-
onStep
|
|
536
|
+
onStep,
|
|
537
|
+
debug
|
|
527
538
|
}) => {
|
|
528
539
|
try {
|
|
529
540
|
await onStep();
|
|
@@ -531,6 +542,14 @@ var triggerRouteFunction = async ({
|
|
|
531
542
|
return ok("workflow-finished");
|
|
532
543
|
} catch (error) {
|
|
533
544
|
const error_ = error;
|
|
545
|
+
if (error instanceof import_qstash2.QstashError && error.message.includes("can not append to a a cancelled workflow")) {
|
|
546
|
+
await debug?.log("WARN", "RESPONSE_WORKFLOW", {
|
|
547
|
+
message: `tried to append to a cancelled workflow. exiting without publishing.`,
|
|
548
|
+
name: error.name,
|
|
549
|
+
originalMessage: error.message
|
|
550
|
+
});
|
|
551
|
+
return ok("workflow-was-finished");
|
|
552
|
+
}
|
|
534
553
|
return error_ instanceof QStashWorkflowAbort ? ok("step-finished") : err(error_);
|
|
535
554
|
}
|
|
536
555
|
};
|
|
@@ -538,12 +557,29 @@ var triggerWorkflowDelete = async (workflowContext, debug, cancel = false) => {
|
|
|
538
557
|
await debug?.log("SUBMIT", "SUBMIT_CLEANUP", {
|
|
539
558
|
deletedWorkflowRunId: workflowContext.workflowRunId
|
|
540
559
|
});
|
|
541
|
-
|
|
542
|
-
|
|
543
|
-
|
|
544
|
-
|
|
545
|
-
|
|
546
|
-
|
|
560
|
+
try {
|
|
561
|
+
await workflowContext.qstashClient.http.request({
|
|
562
|
+
path: ["v2", "workflows", "runs", `${workflowContext.workflowRunId}?cancel=${cancel}`],
|
|
563
|
+
method: "DELETE",
|
|
564
|
+
parseResponseAsJson: false
|
|
565
|
+
});
|
|
566
|
+
await debug?.log(
|
|
567
|
+
"SUBMIT",
|
|
568
|
+
"SUBMIT_CLEANUP",
|
|
569
|
+
`workflow run ${workflowContext.workflowRunId} deleted.`
|
|
570
|
+
);
|
|
571
|
+
return { deleted: true };
|
|
572
|
+
} catch (error) {
|
|
573
|
+
if (error instanceof import_qstash2.QstashError && error.message.includes(`workflowRun ${workflowContext.workflowRunId} not found`)) {
|
|
574
|
+
await debug?.log("WARN", "SUBMIT_CLEANUP", {
|
|
575
|
+
message: `Failed to remove workflow run ${workflowContext.workflowRunId} as it doesn't exist.`,
|
|
576
|
+
name: error.name,
|
|
577
|
+
originalMessage: error.message
|
|
578
|
+
});
|
|
579
|
+
return { deleted: false };
|
|
580
|
+
}
|
|
581
|
+
throw error;
|
|
582
|
+
}
|
|
547
583
|
};
|
|
548
584
|
var recreateUserHeaders = (headers) => {
|
|
549
585
|
const filteredHeaders = new Headers();
|
|
@@ -599,15 +635,16 @@ ${atob(callbackMessage.body)}`
|
|
|
599
635
|
failureUrl,
|
|
600
636
|
retries
|
|
601
637
|
);
|
|
638
|
+
const callResponse = {
|
|
639
|
+
status: callbackMessage.status,
|
|
640
|
+
body: atob(callbackMessage.body),
|
|
641
|
+
header: callbackMessage.header
|
|
642
|
+
};
|
|
602
643
|
const callResultStep = {
|
|
603
644
|
stepId: Number(stepIdString),
|
|
604
645
|
stepName,
|
|
605
646
|
stepType,
|
|
606
|
-
out:
|
|
607
|
-
status: callbackMessage.status,
|
|
608
|
-
body: atob(callbackMessage.body),
|
|
609
|
-
header: callbackMessage.header
|
|
610
|
-
},
|
|
647
|
+
out: JSON.stringify(callResponse),
|
|
611
648
|
concurrent: Number(concurrentString)
|
|
612
649
|
};
|
|
613
650
|
await debug?.log("SUBMIT", "SUBMIT_THIRD_PARTY_RESULT", {
|
|
@@ -641,10 +678,11 @@ var getHeaders = (initHeaderValue, workflowRunId, workflowUrl, userHeaders, step
|
|
|
641
678
|
const baseHeaders = {
|
|
642
679
|
[WORKFLOW_INIT_HEADER]: initHeaderValue,
|
|
643
680
|
[WORKFLOW_ID_HEADER]: workflowRunId,
|
|
644
|
-
[WORKFLOW_URL_HEADER]: workflowUrl
|
|
645
|
-
[WORKFLOW_FEATURE_HEADER]: "WF_NoDelete",
|
|
646
|
-
[`Upstash-Forward-${WORKFLOW_PROTOCOL_VERSION_HEADER}`]: WORKFLOW_PROTOCOL_VERSION
|
|
681
|
+
[WORKFLOW_URL_HEADER]: workflowUrl
|
|
647
682
|
};
|
|
683
|
+
if (!step?.callUrl) {
|
|
684
|
+
baseHeaders[`Upstash-Forward-${WORKFLOW_PROTOCOL_VERSION_HEADER}`] = WORKFLOW_PROTOCOL_VERSION;
|
|
685
|
+
}
|
|
648
686
|
if (failureUrl) {
|
|
649
687
|
if (!step?.callUrl) {
|
|
650
688
|
baseHeaders[`Upstash-Failure-Callback-Forward-${WORKFLOW_FAILURE_HEADER}`] = "true";
|
|
@@ -653,6 +691,7 @@ var getHeaders = (initHeaderValue, workflowRunId, workflowUrl, userHeaders, step
|
|
|
653
691
|
}
|
|
654
692
|
if (step?.callUrl) {
|
|
655
693
|
baseHeaders["Upstash-Retries"] = "0";
|
|
694
|
+
baseHeaders[WORKFLOW_FEATURE_HEADER] = "WF_NoDelete";
|
|
656
695
|
if (retries) {
|
|
657
696
|
baseHeaders["Upstash-Callback-Retries"] = retries.toString();
|
|
658
697
|
baseHeaders["Upstash-Failure-Callback-Retries"] = retries.toString();
|
|
@@ -1016,6 +1055,7 @@ var AutoExecutor = class _AutoExecutor {
|
|
|
1016
1055
|
this.context.retries
|
|
1017
1056
|
);
|
|
1018
1057
|
const willWait = singleStep.concurrent === NO_CONCURRENCY || singleStep.stepId === 0;
|
|
1058
|
+
singleStep.out = JSON.stringify(singleStep.out);
|
|
1019
1059
|
return singleStep.callUrl ? (
|
|
1020
1060
|
// if the step is a third party call, we call the third party
|
|
1021
1061
|
// url (singleStep.callUrl) and pass information about the workflow
|
|
@@ -1677,7 +1717,8 @@ var WorkflowLogger = class _WorkflowLogger {
|
|
|
1677
1717
|
}
|
|
1678
1718
|
writeToConsole(logEntry) {
|
|
1679
1719
|
const JSON_SPACING = 2;
|
|
1680
|
-
console.
|
|
1720
|
+
const logMethod = logEntry.logLevel === "ERROR" ? console.error : logEntry.logLevel === "WARN" ? console.warn : console.log;
|
|
1721
|
+
logMethod(JSON.stringify(logEntry, void 0, JSON_SPACING));
|
|
1681
1722
|
}
|
|
1682
1723
|
shouldLog(level) {
|
|
1683
1724
|
return LOG_LEVELS.indexOf(level) >= LOG_LEVELS.indexOf(this.options.logLevel);
|
|
@@ -1734,9 +1775,13 @@ var parsePayload = (rawPayload) => {
|
|
|
1734
1775
|
const stepsToDecode = encodedSteps.filter((step) => step.callType === "step");
|
|
1735
1776
|
const otherSteps = stepsToDecode.map((rawStep) => {
|
|
1736
1777
|
const step = JSON.parse(decodeBase64(rawStep.body));
|
|
1778
|
+
try {
|
|
1779
|
+
step.out = JSON.parse(step.out);
|
|
1780
|
+
} catch {
|
|
1781
|
+
}
|
|
1737
1782
|
if (step.waitEventId) {
|
|
1738
1783
|
const newOut = {
|
|
1739
|
-
eventData: step.out,
|
|
1784
|
+
eventData: step.out ? decodeBase64(step.out) : void 0,
|
|
1740
1785
|
timeout: step.waitTimeout ?? false
|
|
1741
1786
|
};
|
|
1742
1787
|
step.out = newOut;
|
|
@@ -1866,7 +1911,7 @@ var handleFailure = async (request, requestPayload, qstashClient, initialPayload
|
|
|
1866
1911
|
};
|
|
1867
1912
|
|
|
1868
1913
|
// src/serve/authorization.ts
|
|
1869
|
-
var
|
|
1914
|
+
var import_qstash3 = require("@upstash/qstash");
|
|
1870
1915
|
var DisabledWorkflowContext = class _DisabledWorkflowContext extends WorkflowContext {
|
|
1871
1916
|
static disabledMessage = "disabled-qstash-worklfow-run";
|
|
1872
1917
|
/**
|
|
@@ -1891,7 +1936,7 @@ var DisabledWorkflowContext = class _DisabledWorkflowContext extends WorkflowCon
|
|
|
1891
1936
|
*/
|
|
1892
1937
|
static async tryAuthentication(routeFunction, context) {
|
|
1893
1938
|
const disabledContext = new _DisabledWorkflowContext({
|
|
1894
|
-
qstashClient: new
|
|
1939
|
+
qstashClient: new import_qstash3.Client({
|
|
1895
1940
|
baseUrl: "disabled-client",
|
|
1896
1941
|
token: "disabled-client"
|
|
1897
1942
|
}),
|
|
@@ -1918,15 +1963,15 @@ var DisabledWorkflowContext = class _DisabledWorkflowContext extends WorkflowCon
|
|
|
1918
1963
|
};
|
|
1919
1964
|
|
|
1920
1965
|
// src/serve/options.ts
|
|
1921
|
-
var import_qstash3 = require("@upstash/qstash");
|
|
1922
1966
|
var import_qstash4 = require("@upstash/qstash");
|
|
1967
|
+
var import_qstash5 = require("@upstash/qstash");
|
|
1923
1968
|
var processOptions = (options) => {
|
|
1924
1969
|
const environment = options?.env ?? (typeof process === "undefined" ? {} : process.env);
|
|
1925
1970
|
const receiverEnvironmentVariablesSet = Boolean(
|
|
1926
1971
|
environment.QSTASH_CURRENT_SIGNING_KEY && environment.QSTASH_NEXT_SIGNING_KEY
|
|
1927
1972
|
);
|
|
1928
1973
|
return {
|
|
1929
|
-
qstashClient: new
|
|
1974
|
+
qstashClient: new import_qstash5.Client({
|
|
1930
1975
|
baseUrl: environment.QSTASH_URL,
|
|
1931
1976
|
token: environment.QSTASH_TOKEN
|
|
1932
1977
|
}),
|
|
@@ -1947,7 +1992,7 @@ var processOptions = (options) => {
|
|
|
1947
1992
|
throw error;
|
|
1948
1993
|
}
|
|
1949
1994
|
},
|
|
1950
|
-
receiver: receiverEnvironmentVariablesSet ? new
|
|
1995
|
+
receiver: receiverEnvironmentVariablesSet ? new import_qstash4.Receiver({
|
|
1951
1996
|
currentSigningKey: environment.QSTASH_CURRENT_SIGNING_KEY,
|
|
1952
1997
|
nextSigningKey: environment.QSTASH_NEXT_SIGNING_KEY
|
|
1953
1998
|
}) : void 0,
|
|
@@ -2069,7 +2114,8 @@ var serve = (routeFunction, options) => {
|
|
|
2069
2114
|
onStep: async () => routeFunction(workflowContext),
|
|
2070
2115
|
onCleanup: async () => {
|
|
2071
2116
|
await triggerWorkflowDelete(workflowContext, debug);
|
|
2072
|
-
}
|
|
2117
|
+
},
|
|
2118
|
+
debug
|
|
2073
2119
|
});
|
|
2074
2120
|
if (result.isErr()) {
|
|
2075
2121
|
await debug?.log("ERROR", "ERROR", { error: result.error.message });
|
|
@@ -2095,7 +2141,7 @@ var serve = (routeFunction, options) => {
|
|
|
2095
2141
|
};
|
|
2096
2142
|
|
|
2097
2143
|
// src/client/index.ts
|
|
2098
|
-
var
|
|
2144
|
+
var import_qstash6 = require("@upstash/qstash");
|
|
2099
2145
|
|
|
2100
2146
|
// platforms/hono.ts
|
|
2101
2147
|
var serve2 = (routeFunction, options) => {
|
package/hono.mjs
CHANGED
package/index.d.mts
CHANGED
|
@@ -1,5 +1,5 @@
|
|
|
1
|
-
import { R as RouteFunction, W as WorkflowServeOptions, N as NotifyResponse, a as Waiter, S as Step } from './types-
|
|
2
|
-
export { A as AsyncStepFunction, C as CallResponse, j as FailureFunctionPayload, F as FinishCondition, L as LogLevel, n as NotifyStepResponse, P as ParallelCallState, g as RawStep, k as RequiredExceptFields, i as StepFunction, f as StepType, e as StepTypes, h as SyncStepFunction, l as WaitRequest, m as WaitStepResponse, c as WorkflowClient, b as WorkflowContext, p as WorkflowLogger, o as WorkflowLoggerOptions, d as WorkflowReceiver } from './types-
|
|
1
|
+
import { R as RouteFunction, W as WorkflowServeOptions, N as NotifyResponse, a as Waiter, S as Step } from './types-CI-2skYU.mjs';
|
|
2
|
+
export { A as AsyncStepFunction, C as CallResponse, D as Duration, j as FailureFunctionPayload, F as FinishCondition, L as LogLevel, n as NotifyStepResponse, P as ParallelCallState, g as RawStep, k as RequiredExceptFields, i as StepFunction, f as StepType, e as StepTypes, h as SyncStepFunction, l as WaitRequest, m as WaitStepResponse, c as WorkflowClient, b as WorkflowContext, p as WorkflowLogger, o as WorkflowLoggerOptions, d as WorkflowReceiver } from './types-CI-2skYU.mjs';
|
|
3
3
|
import { Client as Client$1, QstashError } from '@upstash/qstash';
|
|
4
4
|
|
|
5
5
|
/**
|
package/index.d.ts
CHANGED
|
@@ -1,5 +1,5 @@
|
|
|
1
|
-
import { R as RouteFunction, W as WorkflowServeOptions, N as NotifyResponse, a as Waiter, S as Step } from './types-
|
|
2
|
-
export { A as AsyncStepFunction, C as CallResponse, j as FailureFunctionPayload, F as FinishCondition, L as LogLevel, n as NotifyStepResponse, P as ParallelCallState, g as RawStep, k as RequiredExceptFields, i as StepFunction, f as StepType, e as StepTypes, h as SyncStepFunction, l as WaitRequest, m as WaitStepResponse, c as WorkflowClient, b as WorkflowContext, p as WorkflowLogger, o as WorkflowLoggerOptions, d as WorkflowReceiver } from './types-
|
|
1
|
+
import { R as RouteFunction, W as WorkflowServeOptions, N as NotifyResponse, a as Waiter, S as Step } from './types-CI-2skYU.js';
|
|
2
|
+
export { A as AsyncStepFunction, C as CallResponse, D as Duration, j as FailureFunctionPayload, F as FinishCondition, L as LogLevel, n as NotifyStepResponse, P as ParallelCallState, g as RawStep, k as RequiredExceptFields, i as StepFunction, f as StepType, e as StepTypes, h as SyncStepFunction, l as WaitRequest, m as WaitStepResponse, c as WorkflowClient, b as WorkflowContext, p as WorkflowLogger, o as WorkflowLoggerOptions, d as WorkflowReceiver } from './types-CI-2skYU.js';
|
|
3
3
|
import { Client as Client$1, QstashError } from '@upstash/qstash';
|
|
4
4
|
|
|
5
5
|
/**
|