@upstash/qstash 2.7.7 → 2.7.9
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +2 -1
- package/{chunk-3SZNLYUN.mjs → chunk-QK55BUNQ.mjs} +24 -9
- package/{chunk-FBNCATUF.js → chunk-R5CZPV7H.js} +52 -37
- package/{chunk-SDSVXZLR.mjs → chunk-UPFTIDSI.mjs} +1 -1
- package/{chunk-XJKBRKZE.js → chunk-YBZBGHDQ.js} +3 -3
- package/{client-BsCzCprQ.d.mts → client-DkrYCqaq.d.mts} +3 -3
- package/{client-BsCzCprQ.d.ts → client-DkrYCqaq.d.ts} +3 -3
- package/cloudflare.d.mts +1 -1
- package/cloudflare.d.ts +1 -1
- package/cloudflare.js +2 -2
- package/cloudflare.mjs +1 -1
- package/h3.d.mts +1 -1
- package/h3.d.ts +1 -1
- package/h3.js +4 -4
- package/h3.mjs +3 -3
- package/hono.d.mts +1 -1
- package/hono.d.ts +1 -1
- package/hono.js +2 -2
- package/hono.mjs +1 -1
- package/index.d.mts +16 -3
- package/index.d.ts +16 -3
- package/index.js +5 -3
- package/index.mjs +4 -2
- package/nextjs.d.mts +1 -1
- package/nextjs.d.ts +1 -1
- package/nextjs.js +6 -6
- package/nextjs.mjs +1 -1
- package/nuxt.js +4 -4
- package/nuxt.mjs +3 -3
- package/package.json +1 -1
- package/solidjs.d.mts +1 -1
- package/solidjs.d.ts +1 -1
- package/solidjs.js +4 -4
- package/solidjs.mjs +2 -2
- package/svelte.d.mts +1 -1
- package/svelte.d.ts +1 -1
- package/svelte.js +4 -4
- package/svelte.mjs +2 -2
- package/workflow.d.mts +1 -1
- package/workflow.d.ts +1 -1
- package/workflow.js +2 -2
- package/workflow.mjs +1 -1
- /package/{chunk-S7JMIMW4.mjs → chunk-CIVGPRQN.mjs} +0 -0
- /package/{chunk-IJ5AEYLN.js → chunk-VN7YQ2UN.js} +0 -0
package/README.md
CHANGED
|
@@ -2,7 +2,8 @@
|
|
|
2
2
|
|
|
3
3
|

|
|
4
4
|
|
|
5
|
-
> [!NOTE]
|
|
5
|
+
> [!NOTE]
|
|
6
|
+
> **This project is in GA Stage.**
|
|
6
7
|
> The Upstash Professional Support fully covers this project. It receives regular updates, and bug fixes.
|
|
7
8
|
> The Upstash team is committed to maintaining and improving its functionality.
|
|
8
9
|
|
|
@@ -481,7 +481,7 @@ var Chat = class _Chat {
|
|
|
481
481
|
// Helper method to get the authorization token
|
|
482
482
|
getAuthorizationToken() {
|
|
483
483
|
const authHeader = String(this.http.authorization);
|
|
484
|
-
const match =
|
|
484
|
+
const match = /Bearer (.+)/.exec(authHeader);
|
|
485
485
|
if (!match) {
|
|
486
486
|
throw new Error("Invalid authorization header format");
|
|
487
487
|
}
|
|
@@ -670,6 +670,18 @@ var NANOID_LENGTH = 21;
|
|
|
670
670
|
function nanoid() {
|
|
671
671
|
return [...crypto.getRandomValues(new Uint8Array(NANOID_LENGTH))].map((x) => NANOID_CHARS[x % NANOID_CHARS.length]).join("");
|
|
672
672
|
}
|
|
673
|
+
function decodeBase64(base64) {
|
|
674
|
+
try {
|
|
675
|
+
const binString = atob(base64);
|
|
676
|
+
const intArray = Uint8Array.from(binString, (m) => m.codePointAt(0));
|
|
677
|
+
return new TextDecoder().decode(intArray);
|
|
678
|
+
} catch (error) {
|
|
679
|
+
console.warn(
|
|
680
|
+
`Upstash Qstash: Failed while decoding base64 "${base64}". Decoding with atob and returning it instead. Error: ${error}`
|
|
681
|
+
);
|
|
682
|
+
return atob(base64);
|
|
683
|
+
}
|
|
684
|
+
}
|
|
673
685
|
|
|
674
686
|
// src/client/queue.ts
|
|
675
687
|
var Queue = class {
|
|
@@ -1129,8 +1141,10 @@ var Client = class {
|
|
|
1129
1141
|
*/
|
|
1130
1142
|
async events(request) {
|
|
1131
1143
|
const query = {};
|
|
1132
|
-
if (request?.cursor && request.cursor > 0) {
|
|
1144
|
+
if (typeof request?.cursor === "number" && request.cursor > 0) {
|
|
1133
1145
|
query.cursor = request.cursor.toString();
|
|
1146
|
+
} else if (typeof request?.cursor === "string" && request.cursor !== "") {
|
|
1147
|
+
query.cursor = request.cursor;
|
|
1134
1148
|
}
|
|
1135
1149
|
for (const [key, value] of Object.entries(request?.filter ?? {})) {
|
|
1136
1150
|
if (typeof value === "number" && value < 0) {
|
|
@@ -1616,11 +1630,11 @@ var handleThirdPartyCallResult = async (request, requestPayload, client, workflo
|
|
|
1616
1630
|
if (!(callbackMessage.status >= 200 && callbackMessage.status < 300)) {
|
|
1617
1631
|
await debug?.log("WARN", "SUBMIT_THIRD_PARTY_RESULT", {
|
|
1618
1632
|
status: callbackMessage.status,
|
|
1619
|
-
body:
|
|
1633
|
+
body: decodeBase64(callbackMessage.body)
|
|
1620
1634
|
});
|
|
1621
1635
|
console.warn(
|
|
1622
1636
|
`Workflow Warning: "context.call" failed with status ${callbackMessage.status} and will retry (if there are retries remaining). Error Message:
|
|
1623
|
-
${
|
|
1637
|
+
${decodeBase64(callbackMessage.body)}`
|
|
1624
1638
|
);
|
|
1625
1639
|
return ok("call-will-retry");
|
|
1626
1640
|
}
|
|
@@ -1656,7 +1670,7 @@ ${atob(callbackMessage.body)}`
|
|
|
1656
1670
|
stepId: Number(stepIdString),
|
|
1657
1671
|
stepName,
|
|
1658
1672
|
stepType,
|
|
1659
|
-
out:
|
|
1673
|
+
out: decodeBase64(callbackMessage.body),
|
|
1660
1674
|
concurrent: Number(concurrentString)
|
|
1661
1675
|
};
|
|
1662
1676
|
await debug?.log("SUBMIT", "SUBMIT_THIRD_PARTY_RESULT", {
|
|
@@ -2599,7 +2613,7 @@ var getPayload = async (request) => {
|
|
|
2599
2613
|
};
|
|
2600
2614
|
var parsePayload = (rawPayload) => {
|
|
2601
2615
|
const [encodedInitialPayload, ...encodedSteps] = JSON.parse(rawPayload);
|
|
2602
|
-
const rawInitialPayload =
|
|
2616
|
+
const rawInitialPayload = decodeBase64(encodedInitialPayload.body);
|
|
2603
2617
|
const initialStep = {
|
|
2604
2618
|
stepId: 0,
|
|
2605
2619
|
stepName: "init",
|
|
@@ -2609,7 +2623,7 @@ var parsePayload = (rawPayload) => {
|
|
|
2609
2623
|
};
|
|
2610
2624
|
const stepsToDecode = encodedSteps.filter((step) => step.callType === "step");
|
|
2611
2625
|
const otherSteps = stepsToDecode.map((rawStep) => {
|
|
2612
|
-
return JSON.parse(
|
|
2626
|
+
return JSON.parse(decodeBase64(rawStep.body));
|
|
2613
2627
|
});
|
|
2614
2628
|
const steps = [initialStep, ...otherSteps];
|
|
2615
2629
|
return {
|
|
@@ -2707,13 +2721,13 @@ var handleFailure = async (request, requestPayload, qstashClient, initialPayload
|
|
|
2707
2721
|
const { status, header, body, url, sourceHeader, sourceBody, workflowRunId } = JSON.parse(
|
|
2708
2722
|
requestPayload
|
|
2709
2723
|
);
|
|
2710
|
-
const decodedBody = body ?
|
|
2724
|
+
const decodedBody = body ? decodeBase64(body) : "{}";
|
|
2711
2725
|
const errorPayload = JSON.parse(decodedBody);
|
|
2712
2726
|
const {
|
|
2713
2727
|
rawInitialPayload,
|
|
2714
2728
|
steps,
|
|
2715
2729
|
isLastDuplicate: _isLastDuplicate
|
|
2716
|
-
} = await parseRequest(
|
|
2730
|
+
} = await parseRequest(decodeBase64(sourceBody), false, debug);
|
|
2717
2731
|
const workflowContext = new WorkflowContext({
|
|
2718
2732
|
qstashClient,
|
|
2719
2733
|
workflowRunId,
|
|
@@ -2924,6 +2938,7 @@ export {
|
|
|
2924
2938
|
custom,
|
|
2925
2939
|
Chat,
|
|
2926
2940
|
Messages,
|
|
2941
|
+
decodeBase64,
|
|
2927
2942
|
Schedules,
|
|
2928
2943
|
UrlGroups,
|
|
2929
2944
|
StepTypes,
|
|
@@ -481,7 +481,7 @@ var Chat = (_class2 = class _Chat {
|
|
|
481
481
|
// Helper method to get the authorization token
|
|
482
482
|
getAuthorizationToken() {
|
|
483
483
|
const authHeader = String(this.http.authorization);
|
|
484
|
-
const match =
|
|
484
|
+
const match = /Bearer (.+)/.exec(authHeader);
|
|
485
485
|
if (!match) {
|
|
486
486
|
throw new Error("Invalid authorization header format");
|
|
487
487
|
}
|
|
@@ -670,6 +670,18 @@ var NANOID_LENGTH = 21;
|
|
|
670
670
|
function nanoid() {
|
|
671
671
|
return [...crypto.getRandomValues(new Uint8Array(NANOID_LENGTH))].map((x) => NANOID_CHARS[x % NANOID_CHARS.length]).join("");
|
|
672
672
|
}
|
|
673
|
+
function decodeBase64(base64) {
|
|
674
|
+
try {
|
|
675
|
+
const binString = atob(base64);
|
|
676
|
+
const intArray = Uint8Array.from(binString, (m) => m.codePointAt(0));
|
|
677
|
+
return new TextDecoder().decode(intArray);
|
|
678
|
+
} catch (error) {
|
|
679
|
+
console.warn(
|
|
680
|
+
`Upstash Qstash: Failed while decoding base64 "${base64}". Decoding with atob and returning it instead. Error: ${error}`
|
|
681
|
+
);
|
|
682
|
+
return atob(base64);
|
|
683
|
+
}
|
|
684
|
+
}
|
|
673
685
|
|
|
674
686
|
// src/client/queue.ts
|
|
675
687
|
var Queue = class {
|
|
@@ -1129,10 +1141,12 @@ var Client = class {
|
|
|
1129
1141
|
*/
|
|
1130
1142
|
async events(request) {
|
|
1131
1143
|
const query = {};
|
|
1132
|
-
if (_optionalChain([request, 'optionalAccess', _31 => _31.cursor]) && request.cursor > 0) {
|
|
1144
|
+
if (typeof _optionalChain([request, 'optionalAccess', _31 => _31.cursor]) === "number" && request.cursor > 0) {
|
|
1133
1145
|
query.cursor = request.cursor.toString();
|
|
1146
|
+
} else if (typeof _optionalChain([request, 'optionalAccess', _32 => _32.cursor]) === "string" && request.cursor !== "") {
|
|
1147
|
+
query.cursor = request.cursor;
|
|
1134
1148
|
}
|
|
1135
|
-
for (const [key, value] of Object.entries(_nullishCoalesce(_optionalChain([request, 'optionalAccess',
|
|
1149
|
+
for (const [key, value] of Object.entries(_nullishCoalesce(_optionalChain([request, 'optionalAccess', _33 => _33.filter]), () => ( {})))) {
|
|
1136
1150
|
if (typeof value === "number" && value < 0) {
|
|
1137
1151
|
continue;
|
|
1138
1152
|
}
|
|
@@ -1556,7 +1570,7 @@ var triggerFirstInvocation = async (workflowContext, retries, debug) => {
|
|
|
1556
1570
|
workflowContext.failureUrl,
|
|
1557
1571
|
retries
|
|
1558
1572
|
);
|
|
1559
|
-
await _optionalChain([debug, 'optionalAccess',
|
|
1573
|
+
await _optionalChain([debug, 'optionalAccess', _34 => _34.log, 'call', _35 => _35("SUBMIT", "SUBMIT_FIRST_INVOCATION", {
|
|
1560
1574
|
headers,
|
|
1561
1575
|
requestPayload: workflowContext.requestPayload,
|
|
1562
1576
|
url: workflowContext.url
|
|
@@ -1588,7 +1602,7 @@ var triggerRouteFunction = async ({
|
|
|
1588
1602
|
}
|
|
1589
1603
|
};
|
|
1590
1604
|
var triggerWorkflowDelete = async (workflowContext, debug, cancel = false) => {
|
|
1591
|
-
await _optionalChain([debug, 'optionalAccess',
|
|
1605
|
+
await _optionalChain([debug, 'optionalAccess', _36 => _36.log, 'call', _37 => _37("SUBMIT", "SUBMIT_CLEANUP", {
|
|
1592
1606
|
deletedWorkflowRunId: workflowContext.workflowRunId
|
|
1593
1607
|
})]);
|
|
1594
1608
|
const result = await workflowContext.qstashClient.http.request({
|
|
@@ -1596,7 +1610,7 @@ var triggerWorkflowDelete = async (workflowContext, debug, cancel = false) => {
|
|
|
1596
1610
|
method: "DELETE",
|
|
1597
1611
|
parseResponseAsJson: false
|
|
1598
1612
|
});
|
|
1599
|
-
await _optionalChain([debug, 'optionalAccess',
|
|
1613
|
+
await _optionalChain([debug, 'optionalAccess', _38 => _38.log, 'call', _39 => _39("SUBMIT", "SUBMIT_CLEANUP", result)]);
|
|
1600
1614
|
};
|
|
1601
1615
|
var recreateUserHeaders = (headers) => {
|
|
1602
1616
|
const filteredHeaders = new Headers();
|
|
@@ -1614,13 +1628,13 @@ var handleThirdPartyCallResult = async (request, requestPayload, client, workflo
|
|
|
1614
1628
|
if (request.headers.get("Upstash-Workflow-Callback")) {
|
|
1615
1629
|
const callbackMessage = JSON.parse(requestPayload);
|
|
1616
1630
|
if (!(callbackMessage.status >= 200 && callbackMessage.status < 300)) {
|
|
1617
|
-
await _optionalChain([debug, 'optionalAccess',
|
|
1631
|
+
await _optionalChain([debug, 'optionalAccess', _40 => _40.log, 'call', _41 => _41("WARN", "SUBMIT_THIRD_PARTY_RESULT", {
|
|
1618
1632
|
status: callbackMessage.status,
|
|
1619
|
-
body:
|
|
1633
|
+
body: decodeBase64(callbackMessage.body)
|
|
1620
1634
|
})]);
|
|
1621
1635
|
console.warn(
|
|
1622
1636
|
`Workflow Warning: "context.call" failed with status ${callbackMessage.status} and will retry (if there are retries remaining). Error Message:
|
|
1623
|
-
${
|
|
1637
|
+
${decodeBase64(callbackMessage.body)}`
|
|
1624
1638
|
);
|
|
1625
1639
|
return ok("call-will-retry");
|
|
1626
1640
|
}
|
|
@@ -1656,10 +1670,10 @@ ${atob(callbackMessage.body)}`
|
|
|
1656
1670
|
stepId: Number(stepIdString),
|
|
1657
1671
|
stepName,
|
|
1658
1672
|
stepType,
|
|
1659
|
-
out:
|
|
1673
|
+
out: decodeBase64(callbackMessage.body),
|
|
1660
1674
|
concurrent: Number(concurrentString)
|
|
1661
1675
|
};
|
|
1662
|
-
await _optionalChain([debug, 'optionalAccess',
|
|
1676
|
+
await _optionalChain([debug, 'optionalAccess', _42 => _42.log, 'call', _43 => _43("SUBMIT", "SUBMIT_THIRD_PARTY_RESULT", {
|
|
1663
1677
|
step: callResultStep,
|
|
1664
1678
|
headers: requestHeaders,
|
|
1665
1679
|
url: workflowUrl
|
|
@@ -1670,7 +1684,7 @@ ${atob(callbackMessage.body)}`
|
|
|
1670
1684
|
body: callResultStep,
|
|
1671
1685
|
url: workflowUrl
|
|
1672
1686
|
});
|
|
1673
|
-
await _optionalChain([debug, 'optionalAccess',
|
|
1687
|
+
await _optionalChain([debug, 'optionalAccess', _44 => _44.log, 'call', _45 => _45("SUBMIT", "SUBMIT_THIRD_PARTY_RESULT", {
|
|
1674
1688
|
messageId: result.messageId
|
|
1675
1689
|
})]);
|
|
1676
1690
|
return ok("is-call-return");
|
|
@@ -1702,14 +1716,14 @@ var getHeaders = (initHeaderValue, workflowRunId, workflowUrl, userHeaders, step
|
|
|
1702
1716
|
};
|
|
1703
1717
|
if (userHeaders) {
|
|
1704
1718
|
for (const header of userHeaders.keys()) {
|
|
1705
|
-
if (_optionalChain([step, 'optionalAccess',
|
|
1719
|
+
if (_optionalChain([step, 'optionalAccess', _46 => _46.callHeaders])) {
|
|
1706
1720
|
baseHeaders[`Upstash-Callback-Forward-${header}`] = userHeaders.get(header);
|
|
1707
1721
|
} else {
|
|
1708
1722
|
baseHeaders[`Upstash-Forward-${header}`] = userHeaders.get(header);
|
|
1709
1723
|
}
|
|
1710
1724
|
}
|
|
1711
1725
|
}
|
|
1712
|
-
if (_optionalChain([step, 'optionalAccess',
|
|
1726
|
+
if (_optionalChain([step, 'optionalAccess', _47 => _47.callHeaders])) {
|
|
1713
1727
|
const forwardedHeaders = Object.fromEntries(
|
|
1714
1728
|
Object.entries(step.callHeaders).map(([header, value]) => [
|
|
1715
1729
|
`Upstash-Forward-${header}`,
|
|
@@ -1855,7 +1869,7 @@ var AutoExecutor = (_class3 = class _AutoExecutor {
|
|
|
1855
1869
|
if (this.stepCount < this.nonPlanStepCount) {
|
|
1856
1870
|
const step = this.steps[this.stepCount + this.planStepCount];
|
|
1857
1871
|
validateStep(lazyStep, step);
|
|
1858
|
-
await _optionalChain([this, 'access',
|
|
1872
|
+
await _optionalChain([this, 'access', _48 => _48.debug, 'optionalAccess', _49 => _49.log, 'call', _50 => _50("INFO", "RUN_SINGLE", {
|
|
1859
1873
|
fromRequest: true,
|
|
1860
1874
|
step,
|
|
1861
1875
|
stepCount: this.stepCount
|
|
@@ -1863,7 +1877,7 @@ var AutoExecutor = (_class3 = class _AutoExecutor {
|
|
|
1863
1877
|
return step.out;
|
|
1864
1878
|
}
|
|
1865
1879
|
const resultStep = await lazyStep.getResultStep(NO_CONCURRENCY, this.stepCount);
|
|
1866
|
-
await _optionalChain([this, 'access',
|
|
1880
|
+
await _optionalChain([this, 'access', _51 => _51.debug, 'optionalAccess', _52 => _52.log, 'call', _53 => _53("INFO", "RUN_SINGLE", {
|
|
1867
1881
|
fromRequest: false,
|
|
1868
1882
|
step: resultStep,
|
|
1869
1883
|
stepCount: this.stepCount
|
|
@@ -1882,13 +1896,13 @@ var AutoExecutor = (_class3 = class _AutoExecutor {
|
|
|
1882
1896
|
const initialStepCount = this.stepCount - (parallelSteps.length - 1);
|
|
1883
1897
|
const parallelCallState = this.getParallelCallState(parallelSteps.length, initialStepCount);
|
|
1884
1898
|
const sortedSteps = sortSteps(this.steps);
|
|
1885
|
-
const plannedParallelStepCount = _optionalChain([sortedSteps, 'access',
|
|
1899
|
+
const plannedParallelStepCount = _optionalChain([sortedSteps, 'access', _54 => _54[initialStepCount + this.planStepCount], 'optionalAccess', _55 => _55.concurrent]);
|
|
1886
1900
|
if (parallelCallState !== "first" && plannedParallelStepCount !== parallelSteps.length) {
|
|
1887
1901
|
throw new QStashWorkflowError(
|
|
1888
1902
|
`Incompatible number of parallel steps when call state was '${parallelCallState}'. Expected ${parallelSteps.length}, got ${plannedParallelStepCount} from the request.`
|
|
1889
1903
|
);
|
|
1890
1904
|
}
|
|
1891
|
-
await _optionalChain([this, 'access',
|
|
1905
|
+
await _optionalChain([this, 'access', _56 => _56.debug, 'optionalAccess', _57 => _57.log, 'call', _58 => _58("INFO", "RUN_PARALLEL", {
|
|
1892
1906
|
parallelCallState,
|
|
1893
1907
|
initialStepCount,
|
|
1894
1908
|
plannedParallelStepCount,
|
|
@@ -1967,7 +1981,7 @@ var AutoExecutor = (_class3 = class _AutoExecutor {
|
|
|
1967
1981
|
return "first";
|
|
1968
1982
|
} else if (remainingSteps.length >= 2 * parallelStepCount) {
|
|
1969
1983
|
return "last";
|
|
1970
|
-
} else if (_optionalChain([remainingSteps, 'access',
|
|
1984
|
+
} else if (_optionalChain([remainingSteps, 'access', _59 => _59.at, 'call', _60 => _60(-1), 'optionalAccess', _61 => _61.targetStep])) {
|
|
1971
1985
|
return "partial";
|
|
1972
1986
|
} else {
|
|
1973
1987
|
return "discard";
|
|
@@ -1984,7 +1998,7 @@ var AutoExecutor = (_class3 = class _AutoExecutor {
|
|
|
1984
1998
|
`Unable to submit steps to QStash. Provided list is empty. Current step: ${this.stepCount}`
|
|
1985
1999
|
);
|
|
1986
2000
|
}
|
|
1987
|
-
await _optionalChain([this, 'access',
|
|
2001
|
+
await _optionalChain([this, 'access', _62 => _62.debug, 'optionalAccess', _63 => _63.log, 'call', _64 => _64("SUBMIT", "SUBMIT_STEP", { length: steps.length, steps })]);
|
|
1988
2002
|
const result = await this.context.qstashClient.batchJSON(
|
|
1989
2003
|
steps.map((singleStep) => {
|
|
1990
2004
|
const headers = getHeaders(
|
|
@@ -2025,7 +2039,7 @@ var AutoExecutor = (_class3 = class _AutoExecutor {
|
|
|
2025
2039
|
);
|
|
2026
2040
|
})
|
|
2027
2041
|
);
|
|
2028
|
-
await _optionalChain([this, 'access',
|
|
2042
|
+
await _optionalChain([this, 'access', _65 => _65.debug, 'optionalAccess', _66 => _66.log, 'call', _67 => _67("INFO", "SUBMIT_STEP", {
|
|
2029
2043
|
messageIds: result.map((message) => {
|
|
2030
2044
|
return {
|
|
2031
2045
|
message: message.messageId
|
|
@@ -2599,7 +2613,7 @@ var getPayload = async (request) => {
|
|
|
2599
2613
|
};
|
|
2600
2614
|
var parsePayload = (rawPayload) => {
|
|
2601
2615
|
const [encodedInitialPayload, ...encodedSteps] = JSON.parse(rawPayload);
|
|
2602
|
-
const rawInitialPayload =
|
|
2616
|
+
const rawInitialPayload = decodeBase64(encodedInitialPayload.body);
|
|
2603
2617
|
const initialStep = {
|
|
2604
2618
|
stepId: 0,
|
|
2605
2619
|
stepName: "init",
|
|
@@ -2609,7 +2623,7 @@ var parsePayload = (rawPayload) => {
|
|
|
2609
2623
|
};
|
|
2610
2624
|
const stepsToDecode = encodedSteps.filter((step) => step.callType === "step");
|
|
2611
2625
|
const otherSteps = stepsToDecode.map((rawStep) => {
|
|
2612
|
-
return JSON.parse(
|
|
2626
|
+
return JSON.parse(decodeBase64(rawStep.body));
|
|
2613
2627
|
});
|
|
2614
2628
|
const steps = [initialStep, ...otherSteps];
|
|
2615
2629
|
return {
|
|
@@ -2647,7 +2661,7 @@ var checkIfLastOneIsDuplicate = async (steps, debug) => {
|
|
|
2647
2661
|
const step = steps[index];
|
|
2648
2662
|
if (step.stepId === lastStepId && step.targetStep === lastTargetStepId) {
|
|
2649
2663
|
const message = `QStash Workflow: The step '${step.stepName}' with id '${step.stepId}' has run twice during workflow execution. Rest of the workflow will continue running as usual.`;
|
|
2650
|
-
await _optionalChain([debug, 'optionalAccess',
|
|
2664
|
+
await _optionalChain([debug, 'optionalAccess', _68 => _68.log, 'call', _69 => _69("WARN", "RESPONSE_DEFAULT", message)]);
|
|
2651
2665
|
console.warn(message);
|
|
2652
2666
|
return true;
|
|
2653
2667
|
}
|
|
@@ -2707,13 +2721,13 @@ var handleFailure = async (request, requestPayload, qstashClient, initialPayload
|
|
|
2707
2721
|
const { status, header, body, url, sourceHeader, sourceBody, workflowRunId } = JSON.parse(
|
|
2708
2722
|
requestPayload
|
|
2709
2723
|
);
|
|
2710
|
-
const decodedBody = body ?
|
|
2724
|
+
const decodedBody = body ? decodeBase64(body) : "{}";
|
|
2711
2725
|
const errorPayload = JSON.parse(decodedBody);
|
|
2712
2726
|
const {
|
|
2713
2727
|
rawInitialPayload,
|
|
2714
2728
|
steps,
|
|
2715
2729
|
isLastDuplicate: _isLastDuplicate
|
|
2716
|
-
} = await parseRequest(
|
|
2730
|
+
} = await parseRequest(decodeBase64(sourceBody), false, debug);
|
|
2717
2731
|
const workflowContext = new WorkflowContext({
|
|
2718
2732
|
qstashClient,
|
|
2719
2733
|
workflowRunId,
|
|
@@ -2734,7 +2748,7 @@ var handleFailure = async (request, requestPayload, qstashClient, initialPayload
|
|
|
2734
2748
|
|
|
2735
2749
|
// src/client/workflow/serve.ts
|
|
2736
2750
|
var processOptions = (options) => {
|
|
2737
|
-
const environment = _nullishCoalesce(_optionalChain([options, 'optionalAccess',
|
|
2751
|
+
const environment = _nullishCoalesce(_optionalChain([options, 'optionalAccess', _70 => _70.env]), () => ( (typeof process === "undefined" ? {} : process.env)));
|
|
2738
2752
|
const receiverEnvironmentVariablesSet = Boolean(
|
|
2739
2753
|
environment.QSTASH_CURRENT_SIGNING_KEY && environment.QSTASH_NEXT_SIGNING_KEY
|
|
2740
2754
|
);
|
|
@@ -2790,7 +2804,7 @@ var serve = (routeFunction, options) => {
|
|
|
2790
2804
|
return baseUrl + (path || "");
|
|
2791
2805
|
}) : initialWorkflowUrl;
|
|
2792
2806
|
if (workflowUrl !== initialWorkflowUrl) {
|
|
2793
|
-
await _optionalChain([debug, 'optionalAccess',
|
|
2807
|
+
await _optionalChain([debug, 'optionalAccess', _71 => _71.log, 'call', _72 => _72("WARN", "ENDPOINT_START", {
|
|
2794
2808
|
warning: `QStash Workflow: replacing the base of the url with "${baseUrl}" and using it as workflow endpoint.`,
|
|
2795
2809
|
originalURL: initialWorkflowUrl,
|
|
2796
2810
|
updatedURL: workflowUrl
|
|
@@ -2799,7 +2813,7 @@ var serve = (routeFunction, options) => {
|
|
|
2799
2813
|
const workflowFailureUrl = failureFunction ? workflowUrl : failureUrl;
|
|
2800
2814
|
const requestPayload = await _asyncNullishCoalesce(await getPayload(request), async () => ( ""));
|
|
2801
2815
|
await verifyRequest(requestPayload, request.headers.get("upstash-signature"), receiver);
|
|
2802
|
-
await _optionalChain([debug, 'optionalAccess',
|
|
2816
|
+
await _optionalChain([debug, 'optionalAccess', _73 => _73.log, 'call', _74 => _74("INFO", "ENDPOINT_START")]);
|
|
2803
2817
|
const failureCheck = await handleFailure(
|
|
2804
2818
|
request,
|
|
2805
2819
|
requestPayload,
|
|
@@ -2810,11 +2824,11 @@ var serve = (routeFunction, options) => {
|
|
|
2810
2824
|
if (failureCheck.isErr()) {
|
|
2811
2825
|
throw failureCheck.error;
|
|
2812
2826
|
} else if (failureCheck.value === "is-failure-callback") {
|
|
2813
|
-
await _optionalChain([debug, 'optionalAccess',
|
|
2827
|
+
await _optionalChain([debug, 'optionalAccess', _75 => _75.log, 'call', _76 => _76("WARN", "RESPONSE_DEFAULT", "failureFunction executed")]);
|
|
2814
2828
|
return onStepFinish("no-workflow-id", "failure-callback");
|
|
2815
2829
|
}
|
|
2816
2830
|
const { isFirstInvocation, workflowRunId } = validateRequest(request);
|
|
2817
|
-
_optionalChain([debug, 'optionalAccess',
|
|
2831
|
+
_optionalChain([debug, 'optionalAccess', _77 => _77.setWorkflowRunId, 'call', _78 => _78(workflowRunId)]);
|
|
2818
2832
|
const { rawInitialPayload, steps, isLastDuplicate } = await parseRequest(
|
|
2819
2833
|
requestPayload,
|
|
2820
2834
|
isFirstInvocation,
|
|
@@ -2840,7 +2854,7 @@ var serve = (routeFunction, options) => {
|
|
|
2840
2854
|
workflowContext
|
|
2841
2855
|
);
|
|
2842
2856
|
if (authCheck.isErr()) {
|
|
2843
|
-
await _optionalChain([debug, 'optionalAccess',
|
|
2857
|
+
await _optionalChain([debug, 'optionalAccess', _79 => _79.log, 'call', _80 => _80("ERROR", "ERROR", { error: authCheck.error.message })]);
|
|
2844
2858
|
throw authCheck.error;
|
|
2845
2859
|
} else if (authCheck.value === "run-ended") {
|
|
2846
2860
|
return onStepFinish("no-workflow-id", "auth-fail");
|
|
@@ -2855,7 +2869,7 @@ var serve = (routeFunction, options) => {
|
|
|
2855
2869
|
debug
|
|
2856
2870
|
);
|
|
2857
2871
|
if (callReturnCheck.isErr()) {
|
|
2858
|
-
await _optionalChain([debug, 'optionalAccess',
|
|
2872
|
+
await _optionalChain([debug, 'optionalAccess', _81 => _81.log, 'call', _82 => _82("ERROR", "SUBMIT_THIRD_PARTY_RESULT", {
|
|
2859
2873
|
error: callReturnCheck.error.message
|
|
2860
2874
|
})]);
|
|
2861
2875
|
throw callReturnCheck.error;
|
|
@@ -2867,13 +2881,13 @@ var serve = (routeFunction, options) => {
|
|
|
2867
2881
|
}
|
|
2868
2882
|
});
|
|
2869
2883
|
if (result.isErr()) {
|
|
2870
|
-
await _optionalChain([debug, 'optionalAccess',
|
|
2884
|
+
await _optionalChain([debug, 'optionalAccess', _83 => _83.log, 'call', _84 => _84("ERROR", "ERROR", { error: result.error.message })]);
|
|
2871
2885
|
throw result.error;
|
|
2872
2886
|
}
|
|
2873
|
-
await _optionalChain([debug, 'optionalAccess',
|
|
2887
|
+
await _optionalChain([debug, 'optionalAccess', _85 => _85.log, 'call', _86 => _86("INFO", "RESPONSE_WORKFLOW")]);
|
|
2874
2888
|
return onStepFinish(workflowContext.workflowRunId, "success");
|
|
2875
2889
|
}
|
|
2876
|
-
await _optionalChain([debug, 'optionalAccess',
|
|
2890
|
+
await _optionalChain([debug, 'optionalAccess', _87 => _87.log, 'call', _88 => _88("INFO", "RESPONSE_DEFAULT")]);
|
|
2877
2891
|
return onStepFinish("no-workflow-id", "fromCallback");
|
|
2878
2892
|
};
|
|
2879
2893
|
return async (request) => {
|
|
@@ -2934,4 +2948,5 @@ var Workflow = class {
|
|
|
2934
2948
|
|
|
2935
2949
|
|
|
2936
2950
|
|
|
2937
|
-
|
|
2951
|
+
|
|
2952
|
+
exports.SignatureError = SignatureError; exports.Receiver = Receiver; exports.QstashError = QstashError; exports.QstashRatelimitError = QstashRatelimitError; exports.QstashChatRatelimitError = QstashChatRatelimitError; exports.QstashDailyRatelimitError = QstashDailyRatelimitError; exports.QStashWorkflowError = QStashWorkflowError; exports.QStashWorkflowAbort = QStashWorkflowAbort; exports.formatWorkflowError = formatWorkflowError; exports.setupAnalytics = setupAnalytics; exports.upstash = upstash; exports.openai = openai; exports.custom = custom; exports.Chat = Chat; exports.Messages = Messages; exports.decodeBase64 = decodeBase64; exports.Schedules = Schedules; exports.UrlGroups = UrlGroups; exports.StepTypes = StepTypes; exports.WorkflowContext = WorkflowContext; exports.DisabledWorkflowContext = DisabledWorkflowContext; exports.WorkflowLogger = WorkflowLogger; exports.processOptions = processOptions; exports.serve = serve; exports.Workflow = Workflow; exports.Client = Client;
|
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
"use strict";Object.defineProperty(exports, "__esModule", {value: true}); function _nullishCoalesce(lhs, rhsFn) { if (lhs != null) { return lhs; } else { return rhsFn(); } } function _optionalChain(ops) { let lastAccessLHS = undefined; let value = ops[0]; let i = 1; while (i < ops.length) { const op = ops[i]; const fn = ops[i + 1]; i += 2; if ((op === 'optionalAccess' || op === 'optionalCall') && value == null) { return undefined; } if (op === 'access' || op === 'optionalAccess') { lastAccessLHS = value; value = fn(value); } else if (op === 'call' || op === 'optionalCall') { value = fn((...args) => value.call(lastAccessLHS, ...args)); lastAccessLHS = undefined; } } return value; }
|
|
2
2
|
|
|
3
3
|
|
|
4
|
-
var
|
|
4
|
+
var _chunkR5CZPV7Hjs = require('./chunk-R5CZPV7H.js');
|
|
5
5
|
|
|
6
6
|
// node_modules/defu/dist/defu.mjs
|
|
7
7
|
function isPlainObject(value) {
|
|
@@ -340,7 +340,7 @@ var verifySignatureH3 = (handler, config) => {
|
|
|
340
340
|
"nextSigningKey is required, either in the config or as env variable QSTASH_NEXT_SIGNING_KEY"
|
|
341
341
|
);
|
|
342
342
|
}
|
|
343
|
-
const receiver = new (0,
|
|
343
|
+
const receiver = new (0, _chunkR5CZPV7Hjs.Receiver)({
|
|
344
344
|
currentSigningKey,
|
|
345
345
|
nextSigningKey
|
|
346
346
|
});
|
|
@@ -391,7 +391,7 @@ var serve2 = (routeFunction, options) => {
|
|
|
391
391
|
body: await readRawBody(event),
|
|
392
392
|
method: "POST"
|
|
393
393
|
});
|
|
394
|
-
const serveHandler =
|
|
394
|
+
const serveHandler = _chunkR5CZPV7Hjs.serve.call(void 0, routeFunction, options);
|
|
395
395
|
return await serveHandler(request);
|
|
396
396
|
});
|
|
397
397
|
return handler;
|
|
@@ -80,7 +80,7 @@ type EventPayload = Omit<Event, "urlGroup"> & {
|
|
|
80
80
|
topicName: string;
|
|
81
81
|
};
|
|
82
82
|
type GetEventsPayload = {
|
|
83
|
-
cursor?:
|
|
83
|
+
cursor?: string;
|
|
84
84
|
events: EventPayload[];
|
|
85
85
|
};
|
|
86
86
|
type WithCursor<T> = T & {
|
|
@@ -1668,7 +1668,7 @@ type PublishJsonRequest = Omit<PublishRequest, "body"> & {
|
|
|
1668
1668
|
body: unknown;
|
|
1669
1669
|
};
|
|
1670
1670
|
type EventsRequest = {
|
|
1671
|
-
cursor?: number;
|
|
1671
|
+
cursor?: string | number;
|
|
1672
1672
|
filter?: EventsRequestFilter;
|
|
1673
1673
|
};
|
|
1674
1674
|
type EventsRequestFilter = {
|
|
@@ -1685,7 +1685,7 @@ type EventsRequestFilter = {
|
|
|
1685
1685
|
count?: number;
|
|
1686
1686
|
};
|
|
1687
1687
|
type GetEventsResponse = {
|
|
1688
|
-
cursor?:
|
|
1688
|
+
cursor?: string;
|
|
1689
1689
|
events: Event[];
|
|
1690
1690
|
};
|
|
1691
1691
|
type QueueRequest = {
|
|
@@ -80,7 +80,7 @@ type EventPayload = Omit<Event, "urlGroup"> & {
|
|
|
80
80
|
topicName: string;
|
|
81
81
|
};
|
|
82
82
|
type GetEventsPayload = {
|
|
83
|
-
cursor?:
|
|
83
|
+
cursor?: string;
|
|
84
84
|
events: EventPayload[];
|
|
85
85
|
};
|
|
86
86
|
type WithCursor<T> = T & {
|
|
@@ -1668,7 +1668,7 @@ type PublishJsonRequest = Omit<PublishRequest, "body"> & {
|
|
|
1668
1668
|
body: unknown;
|
|
1669
1669
|
};
|
|
1670
1670
|
type EventsRequest = {
|
|
1671
|
-
cursor?: number;
|
|
1671
|
+
cursor?: string | number;
|
|
1672
1672
|
filter?: EventsRequestFilter;
|
|
1673
1673
|
};
|
|
1674
1674
|
type EventsRequestFilter = {
|
|
@@ -1685,7 +1685,7 @@ type EventsRequestFilter = {
|
|
|
1685
1685
|
count?: number;
|
|
1686
1686
|
};
|
|
1687
1687
|
type GetEventsResponse = {
|
|
1688
|
-
cursor?:
|
|
1688
|
+
cursor?: string;
|
|
1689
1689
|
events: Event[];
|
|
1690
1690
|
};
|
|
1691
1691
|
type QueueRequest = {
|
package/cloudflare.d.mts
CHANGED
package/cloudflare.d.ts
CHANGED
package/cloudflare.js
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
"use strict";Object.defineProperty(exports, "__esModule", {value: true});
|
|
2
2
|
|
|
3
|
-
var
|
|
3
|
+
var _chunkR5CZPV7Hjs = require('./chunk-R5CZPV7H.js');
|
|
4
4
|
|
|
5
5
|
// platforms/cloudflare.ts
|
|
6
6
|
var getArgs = (args) => {
|
|
@@ -24,7 +24,7 @@ var getArgs = (args) => {
|
|
|
24
24
|
var serve2 = (routeFunction, options) => {
|
|
25
25
|
const handler = async (...args) => {
|
|
26
26
|
const { request, env } = getArgs(args);
|
|
27
|
-
const serveHandler =
|
|
27
|
+
const serveHandler = _chunkR5CZPV7Hjs.serve.call(void 0, routeFunction, {
|
|
28
28
|
env,
|
|
29
29
|
...options
|
|
30
30
|
});
|
package/cloudflare.mjs
CHANGED
package/h3.d.mts
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
import * as h3 from 'h3';
|
|
2
2
|
import { H3Event } from 'h3';
|
|
3
|
-
import { a2 as RouteFunction, a3 as WorkflowServeOptions } from './client-
|
|
3
|
+
import { a2 as RouteFunction, a3 as WorkflowServeOptions } from './client-DkrYCqaq.mjs';
|
|
4
4
|
import 'neverthrow';
|
|
5
5
|
|
|
6
6
|
type VerifySignatureConfig = {
|
package/h3.d.ts
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
import * as h3 from 'h3';
|
|
2
2
|
import { H3Event } from 'h3';
|
|
3
|
-
import { a2 as RouteFunction, a3 as WorkflowServeOptions } from './client-
|
|
3
|
+
import { a2 as RouteFunction, a3 as WorkflowServeOptions } from './client-DkrYCqaq.js';
|
|
4
4
|
import 'neverthrow';
|
|
5
5
|
|
|
6
6
|
type VerifySignatureConfig = {
|
package/h3.js
CHANGED
|
@@ -1,10 +1,10 @@
|
|
|
1
1
|
"use strict";Object.defineProperty(exports, "__esModule", {value: true});
|
|
2
2
|
|
|
3
3
|
|
|
4
|
-
var
|
|
5
|
-
require('./chunk-
|
|
6
|
-
require('./chunk-
|
|
4
|
+
var _chunkYBZBGHDQjs = require('./chunk-YBZBGHDQ.js');
|
|
5
|
+
require('./chunk-VN7YQ2UN.js');
|
|
6
|
+
require('./chunk-R5CZPV7H.js');
|
|
7
7
|
|
|
8
8
|
|
|
9
9
|
|
|
10
|
-
exports.serve =
|
|
10
|
+
exports.serve = _chunkYBZBGHDQjs.serve; exports.verifySignatureH3 = _chunkYBZBGHDQjs.verifySignatureH3;
|
package/h3.mjs
CHANGED
|
@@ -1,9 +1,9 @@
|
|
|
1
1
|
import {
|
|
2
2
|
serve,
|
|
3
3
|
verifySignatureH3
|
|
4
|
-
} from "./chunk-
|
|
5
|
-
import "./chunk-
|
|
6
|
-
import "./chunk-
|
|
4
|
+
} from "./chunk-UPFTIDSI.mjs";
|
|
5
|
+
import "./chunk-CIVGPRQN.mjs";
|
|
6
|
+
import "./chunk-QK55BUNQ.mjs";
|
|
7
7
|
export {
|
|
8
8
|
serve,
|
|
9
9
|
verifySignatureH3
|
package/hono.d.mts
CHANGED
package/hono.d.ts
CHANGED
package/hono.js
CHANGED
|
@@ -1,13 +1,13 @@
|
|
|
1
1
|
"use strict";Object.defineProperty(exports, "__esModule", {value: true});
|
|
2
2
|
|
|
3
|
-
var
|
|
3
|
+
var _chunkR5CZPV7Hjs = require('./chunk-R5CZPV7H.js');
|
|
4
4
|
|
|
5
5
|
// platforms/hono.ts
|
|
6
6
|
var serve2 = (routeFunction, options) => {
|
|
7
7
|
const handler = async (context) => {
|
|
8
8
|
const environment = context.env;
|
|
9
9
|
const request = context.req.raw;
|
|
10
|
-
const serveHandler =
|
|
10
|
+
const serveHandler = _chunkR5CZPV7Hjs.serve.call(void 0, routeFunction, {
|
|
11
11
|
// when hono is used without cf workers, it sends a DebugHTTPServer
|
|
12
12
|
// object in `context.env`. don't pass env if this is the case:
|
|
13
13
|
env: "QSTASH_TOKEN" in environment ? environment : void 0,
|
package/hono.mjs
CHANGED
package/index.d.mts
CHANGED
|
@@ -1,5 +1,5 @@
|
|
|
1
|
-
import { R as RateLimit, C as ChatRateLimit, S as Step, F as FailureFunctionPayload } from './client-
|
|
2
|
-
export { A as AddEndpointsRequest, $ as AnalyticsConfig, a0 as AnalyticsSetup, B as BodyInit, y as Chat, D as ChatCompletion, I as ChatCompletionChunk, z as ChatCompletionMessage, T as ChatRequest, f as Client, n as CreateScheduleRequest, p as Endpoint, t as Event, u as EventPayload, E as EventsRequest, v as GetEventsPayload, G as GetEventsResponse, H as HTTPMethods, w as HeadersInit, M as Message, k as MessagePayload, l as Messages, O as OpenAIChatModel, N as PromptChatRequest, _ as ProviderReturnType, P as PublishBatchRequest, e as PublishJsonRequest, d as PublishRequest, j as PublishResponse, g as PublishToApiResponse, i as PublishToUrlGroupsResponse, h as PublishToUrlResponse, Q as QueueRequest, c as Receiver, a as ReceiverConfig, q as RemoveEndpointsRequest, x as RequestOptions, m as Schedule, o as Schedules, b as SignatureError, s as State, K as StreamDisabled, J as StreamEnabled, L as StreamParameter, U as UrlGroup, r as UrlGroups, V as VerifyRequest, W as WithCursor, X as custom, Y as openai, a1 as setupAnalytics, Z as upstash } from './client-
|
|
1
|
+
import { R as RateLimit, C as ChatRateLimit, S as Step, F as FailureFunctionPayload } from './client-DkrYCqaq.mjs';
|
|
2
|
+
export { A as AddEndpointsRequest, $ as AnalyticsConfig, a0 as AnalyticsSetup, B as BodyInit, y as Chat, D as ChatCompletion, I as ChatCompletionChunk, z as ChatCompletionMessage, T as ChatRequest, f as Client, n as CreateScheduleRequest, p as Endpoint, t as Event, u as EventPayload, E as EventsRequest, v as GetEventsPayload, G as GetEventsResponse, H as HTTPMethods, w as HeadersInit, M as Message, k as MessagePayload, l as Messages, O as OpenAIChatModel, N as PromptChatRequest, _ as ProviderReturnType, P as PublishBatchRequest, e as PublishJsonRequest, d as PublishRequest, j as PublishResponse, g as PublishToApiResponse, i as PublishToUrlGroupsResponse, h as PublishToUrlResponse, Q as QueueRequest, c as Receiver, a as ReceiverConfig, q as RemoveEndpointsRequest, x as RequestOptions, m as Schedule, o as Schedules, b as SignatureError, s as State, K as StreamDisabled, J as StreamEnabled, L as StreamParameter, U as UrlGroup, r as UrlGroups, V as VerifyRequest, W as WithCursor, X as custom, Y as openai, a1 as setupAnalytics, Z as upstash } from './client-DkrYCqaq.mjs';
|
|
3
3
|
import 'neverthrow';
|
|
4
4
|
|
|
5
5
|
/**
|
|
@@ -51,4 +51,17 @@ declare class QStashWorkflowAbort extends Error {
|
|
|
51
51
|
*/
|
|
52
52
|
declare const formatWorkflowError: (error: unknown) => FailureFunctionPayload;
|
|
53
53
|
|
|
54
|
-
|
|
54
|
+
/**
|
|
55
|
+
* When the base64 string has unicode characters, atob doesn't decode
|
|
56
|
+
* them correctly since it only outputs ASCII characters. Therefore,
|
|
57
|
+
* instead of using atob, we properly decode them.
|
|
58
|
+
*
|
|
59
|
+
* If the decoding into unicode somehow fails, returns the result of atob
|
|
60
|
+
*
|
|
61
|
+
* https://developer.mozilla.org/en-US/docs/Glossary/Base64#the_unicode_problem
|
|
62
|
+
*
|
|
63
|
+
* @param base64 encoded string
|
|
64
|
+
*/
|
|
65
|
+
declare function decodeBase64(base64: string): string;
|
|
66
|
+
|
|
67
|
+
export { ChatRateLimit, QStashWorkflowAbort, QStashWorkflowError, QstashChatRatelimitError, QstashDailyRatelimitError, QstashError, QstashRatelimitError, RateLimit, decodeBase64, formatWorkflowError };
|
package/index.d.ts
CHANGED
|
@@ -1,5 +1,5 @@
|
|
|
1
|
-
import { R as RateLimit, C as ChatRateLimit, S as Step, F as FailureFunctionPayload } from './client-
|
|
2
|
-
export { A as AddEndpointsRequest, $ as AnalyticsConfig, a0 as AnalyticsSetup, B as BodyInit, y as Chat, D as ChatCompletion, I as ChatCompletionChunk, z as ChatCompletionMessage, T as ChatRequest, f as Client, n as CreateScheduleRequest, p as Endpoint, t as Event, u as EventPayload, E as EventsRequest, v as GetEventsPayload, G as GetEventsResponse, H as HTTPMethods, w as HeadersInit, M as Message, k as MessagePayload, l as Messages, O as OpenAIChatModel, N as PromptChatRequest, _ as ProviderReturnType, P as PublishBatchRequest, e as PublishJsonRequest, d as PublishRequest, j as PublishResponse, g as PublishToApiResponse, i as PublishToUrlGroupsResponse, h as PublishToUrlResponse, Q as QueueRequest, c as Receiver, a as ReceiverConfig, q as RemoveEndpointsRequest, x as RequestOptions, m as Schedule, o as Schedules, b as SignatureError, s as State, K as StreamDisabled, J as StreamEnabled, L as StreamParameter, U as UrlGroup, r as UrlGroups, V as VerifyRequest, W as WithCursor, X as custom, Y as openai, a1 as setupAnalytics, Z as upstash } from './client-
|
|
1
|
+
import { R as RateLimit, C as ChatRateLimit, S as Step, F as FailureFunctionPayload } from './client-DkrYCqaq.js';
|
|
2
|
+
export { A as AddEndpointsRequest, $ as AnalyticsConfig, a0 as AnalyticsSetup, B as BodyInit, y as Chat, D as ChatCompletion, I as ChatCompletionChunk, z as ChatCompletionMessage, T as ChatRequest, f as Client, n as CreateScheduleRequest, p as Endpoint, t as Event, u as EventPayload, E as EventsRequest, v as GetEventsPayload, G as GetEventsResponse, H as HTTPMethods, w as HeadersInit, M as Message, k as MessagePayload, l as Messages, O as OpenAIChatModel, N as PromptChatRequest, _ as ProviderReturnType, P as PublishBatchRequest, e as PublishJsonRequest, d as PublishRequest, j as PublishResponse, g as PublishToApiResponse, i as PublishToUrlGroupsResponse, h as PublishToUrlResponse, Q as QueueRequest, c as Receiver, a as ReceiverConfig, q as RemoveEndpointsRequest, x as RequestOptions, m as Schedule, o as Schedules, b as SignatureError, s as State, K as StreamDisabled, J as StreamEnabled, L as StreamParameter, U as UrlGroup, r as UrlGroups, V as VerifyRequest, W as WithCursor, X as custom, Y as openai, a1 as setupAnalytics, Z as upstash } from './client-DkrYCqaq.js';
|
|
3
3
|
import 'neverthrow';
|
|
4
4
|
|
|
5
5
|
/**
|
|
@@ -51,4 +51,17 @@ declare class QStashWorkflowAbort extends Error {
|
|
|
51
51
|
*/
|
|
52
52
|
declare const formatWorkflowError: (error: unknown) => FailureFunctionPayload;
|
|
53
53
|
|
|
54
|
-
|
|
54
|
+
/**
|
|
55
|
+
* When the base64 string has unicode characters, atob doesn't decode
|
|
56
|
+
* them correctly since it only outputs ASCII characters. Therefore,
|
|
57
|
+
* instead of using atob, we properly decode them.
|
|
58
|
+
*
|
|
59
|
+
* If the decoding into unicode somehow fails, returns the result of atob
|
|
60
|
+
*
|
|
61
|
+
* https://developer.mozilla.org/en-US/docs/Glossary/Base64#the_unicode_problem
|
|
62
|
+
*
|
|
63
|
+
* @param base64 encoded string
|
|
64
|
+
*/
|
|
65
|
+
declare function decodeBase64(base64: string): string;
|
|
66
|
+
|
|
67
|
+
export { ChatRateLimit, QStashWorkflowAbort, QStashWorkflowError, QstashChatRatelimitError, QstashDailyRatelimitError, QstashError, QstashRatelimitError, RateLimit, decodeBase64, formatWorkflowError };
|
package/index.js
CHANGED
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
"use strict";Object.defineProperty(exports, "__esModule", {value: true});require('./chunk-
|
|
1
|
+
"use strict";Object.defineProperty(exports, "__esModule", {value: true});require('./chunk-VN7YQ2UN.js');
|
|
2
2
|
|
|
3
3
|
|
|
4
4
|
|
|
@@ -18,8 +18,8 @@
|
|
|
18
18
|
|
|
19
19
|
|
|
20
20
|
|
|
21
|
-
var _chunkFBNCATUFjs = require('./chunk-FBNCATUF.js');
|
|
22
21
|
|
|
22
|
+
var _chunkR5CZPV7Hjs = require('./chunk-R5CZPV7H.js');
|
|
23
23
|
|
|
24
24
|
|
|
25
25
|
|
|
@@ -38,4 +38,6 @@ var _chunkFBNCATUFjs = require('./chunk-FBNCATUF.js');
|
|
|
38
38
|
|
|
39
39
|
|
|
40
40
|
|
|
41
|
-
|
|
41
|
+
|
|
42
|
+
|
|
43
|
+
exports.Chat = _chunkR5CZPV7Hjs.Chat; exports.Client = _chunkR5CZPV7Hjs.Client; exports.Messages = _chunkR5CZPV7Hjs.Messages; exports.QStashWorkflowAbort = _chunkR5CZPV7Hjs.QStashWorkflowAbort; exports.QStashWorkflowError = _chunkR5CZPV7Hjs.QStashWorkflowError; exports.QstashChatRatelimitError = _chunkR5CZPV7Hjs.QstashChatRatelimitError; exports.QstashDailyRatelimitError = _chunkR5CZPV7Hjs.QstashDailyRatelimitError; exports.QstashError = _chunkR5CZPV7Hjs.QstashError; exports.QstashRatelimitError = _chunkR5CZPV7Hjs.QstashRatelimitError; exports.Receiver = _chunkR5CZPV7Hjs.Receiver; exports.Schedules = _chunkR5CZPV7Hjs.Schedules; exports.SignatureError = _chunkR5CZPV7Hjs.SignatureError; exports.UrlGroups = _chunkR5CZPV7Hjs.UrlGroups; exports.custom = _chunkR5CZPV7Hjs.custom; exports.decodeBase64 = _chunkR5CZPV7Hjs.decodeBase64; exports.formatWorkflowError = _chunkR5CZPV7Hjs.formatWorkflowError; exports.openai = _chunkR5CZPV7Hjs.openai; exports.setupAnalytics = _chunkR5CZPV7Hjs.setupAnalytics; exports.upstash = _chunkR5CZPV7Hjs.upstash;
|
package/index.mjs
CHANGED
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
import "./chunk-
|
|
1
|
+
import "./chunk-CIVGPRQN.mjs";
|
|
2
2
|
import {
|
|
3
3
|
Chat,
|
|
4
4
|
Client,
|
|
@@ -14,11 +14,12 @@ import {
|
|
|
14
14
|
SignatureError,
|
|
15
15
|
UrlGroups,
|
|
16
16
|
custom,
|
|
17
|
+
decodeBase64,
|
|
17
18
|
formatWorkflowError,
|
|
18
19
|
openai,
|
|
19
20
|
setupAnalytics,
|
|
20
21
|
upstash
|
|
21
|
-
} from "./chunk-
|
|
22
|
+
} from "./chunk-QK55BUNQ.mjs";
|
|
22
23
|
export {
|
|
23
24
|
Chat,
|
|
24
25
|
Client,
|
|
@@ -34,6 +35,7 @@ export {
|
|
|
34
35
|
SignatureError,
|
|
35
36
|
UrlGroups,
|
|
36
37
|
custom,
|
|
38
|
+
decodeBase64,
|
|
37
39
|
formatWorkflowError,
|
|
38
40
|
openai,
|
|
39
41
|
setupAnalytics,
|
package/nextjs.d.mts
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
import { NextApiHandler } from 'next';
|
|
2
2
|
import { NextRequest, NextFetchEvent, NextResponse } from 'next/server';
|
|
3
|
-
import { a2 as RouteFunction, a3 as WorkflowServeOptions } from './client-
|
|
3
|
+
import { a2 as RouteFunction, a3 as WorkflowServeOptions } from './client-DkrYCqaq.mjs';
|
|
4
4
|
import 'neverthrow';
|
|
5
5
|
|
|
6
6
|
type VerifySignatureConfig = {
|
package/nextjs.d.ts
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
import { NextApiHandler } from 'next';
|
|
2
2
|
import { NextRequest, NextFetchEvent, NextResponse } from 'next/server';
|
|
3
|
-
import { a2 as RouteFunction, a3 as WorkflowServeOptions } from './client-
|
|
3
|
+
import { a2 as RouteFunction, a3 as WorkflowServeOptions } from './client-DkrYCqaq.js';
|
|
4
4
|
import 'neverthrow';
|
|
5
5
|
|
|
6
6
|
type VerifySignatureConfig = {
|
package/nextjs.js
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
"use strict";Object.defineProperty(exports, "__esModule", {value: true}); function _nullishCoalesce(lhs, rhsFn) { if (lhs != null) { return lhs; } else { return rhsFn(); } } function _optionalChain(ops) { let lastAccessLHS = undefined; let value = ops[0]; let i = 1; while (i < ops.length) { const op = ops[i]; const fn = ops[i + 1]; i += 2; if ((op === 'optionalAccess' || op === 'optionalCall') && value == null) { return undefined; } if (op === 'access' || op === 'optionalAccess') { lastAccessLHS = value; value = fn(value); } else if (op === 'call' || op === 'optionalCall') { value = fn((...args) => value.call(lastAccessLHS, ...args)); lastAccessLHS = undefined; } } return value; }
|
|
2
2
|
|
|
3
3
|
|
|
4
|
-
var
|
|
4
|
+
var _chunkR5CZPV7Hjs = require('./chunk-R5CZPV7H.js');
|
|
5
5
|
|
|
6
6
|
// platforms/nextjs.ts
|
|
7
7
|
var _server = require('next/server');
|
|
@@ -19,7 +19,7 @@ function verifySignature(handler, config) {
|
|
|
19
19
|
"nextSigningKey is required, either in the config or as env variable QSTASH_NEXT_SIGNING_KEY"
|
|
20
20
|
);
|
|
21
21
|
}
|
|
22
|
-
const receiver = new (0,
|
|
22
|
+
const receiver = new (0, _chunkR5CZPV7Hjs.Receiver)({
|
|
23
23
|
currentSigningKey,
|
|
24
24
|
nextSigningKey
|
|
25
25
|
});
|
|
@@ -71,7 +71,7 @@ function verifySignatureEdge(handler, config) {
|
|
|
71
71
|
"nextSigningKey is required, either in the config or as env variable QSTASH_NEXT_SIGNING_KEY"
|
|
72
72
|
);
|
|
73
73
|
}
|
|
74
|
-
const receiver = new (0,
|
|
74
|
+
const receiver = new (0, _chunkR5CZPV7Hjs.Receiver)({
|
|
75
75
|
currentSigningKey,
|
|
76
76
|
nextSigningKey
|
|
77
77
|
});
|
|
@@ -111,7 +111,7 @@ function verifySignatureAppRouter(handler, config) {
|
|
|
111
111
|
"nextSigningKey is required, either in the config or as env variable QSTASH_NEXT_SIGNING_KEY"
|
|
112
112
|
);
|
|
113
113
|
}
|
|
114
|
-
const receiver = new (0,
|
|
114
|
+
const receiver = new (0, _chunkR5CZPV7Hjs.Receiver)({
|
|
115
115
|
currentSigningKey,
|
|
116
116
|
nextSigningKey
|
|
117
117
|
});
|
|
@@ -139,7 +139,7 @@ function verifySignatureAppRouter(handler, config) {
|
|
|
139
139
|
};
|
|
140
140
|
}
|
|
141
141
|
var serve2 = (routeFunction, options) => {
|
|
142
|
-
const handler =
|
|
142
|
+
const handler = _chunkR5CZPV7Hjs.serve.call(void 0, routeFunction, {
|
|
143
143
|
onStepFinish: (workflowRunId) => new (0, _server.NextResponse)(JSON.stringify({ workflowRunId }), { status: 200 }),
|
|
144
144
|
...options
|
|
145
145
|
});
|
|
@@ -148,7 +148,7 @@ var serve2 = (routeFunction, options) => {
|
|
|
148
148
|
};
|
|
149
149
|
};
|
|
150
150
|
var servePagesRouter = (routeFunction, options) => {
|
|
151
|
-
const handler =
|
|
151
|
+
const handler = _chunkR5CZPV7Hjs.serve.call(void 0, routeFunction, options);
|
|
152
152
|
return async (req, res) => {
|
|
153
153
|
if (_optionalChain([req, 'access', _10 => _10.method, 'optionalAccess', _11 => _11.toUpperCase, 'call', _12 => _12()]) !== "POST") {
|
|
154
154
|
res.status(405).json("Only POST requests are allowed in worklfows");
|
package/nextjs.mjs
CHANGED
package/nuxt.js
CHANGED
|
@@ -1,11 +1,11 @@
|
|
|
1
1
|
"use strict";Object.defineProperty(exports, "__esModule", {value: true});
|
|
2
2
|
|
|
3
|
-
var
|
|
4
|
-
require('./chunk-
|
|
5
|
-
require('./chunk-
|
|
3
|
+
var _chunkYBZBGHDQjs = require('./chunk-YBZBGHDQ.js');
|
|
4
|
+
require('./chunk-VN7YQ2UN.js');
|
|
5
|
+
require('./chunk-R5CZPV7H.js');
|
|
6
6
|
|
|
7
7
|
// platforms/nuxt.ts
|
|
8
|
-
var verifySignatureNuxt =
|
|
8
|
+
var verifySignatureNuxt = _chunkYBZBGHDQjs.verifySignatureH3;
|
|
9
9
|
|
|
10
10
|
|
|
11
11
|
exports.verifySignatureNuxt = verifySignatureNuxt;
|
package/nuxt.mjs
CHANGED
|
@@ -1,8 +1,8 @@
|
|
|
1
1
|
import {
|
|
2
2
|
verifySignatureH3
|
|
3
|
-
} from "./chunk-
|
|
4
|
-
import "./chunk-
|
|
5
|
-
import "./chunk-
|
|
3
|
+
} from "./chunk-UPFTIDSI.mjs";
|
|
4
|
+
import "./chunk-CIVGPRQN.mjs";
|
|
5
|
+
import "./chunk-QK55BUNQ.mjs";
|
|
6
6
|
|
|
7
7
|
// platforms/nuxt.ts
|
|
8
8
|
var verifySignatureNuxt = verifySignatureH3;
|
package/package.json
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":"v2.7.
|
|
1
|
+
{"version":"v2.7.9","name":"@upstash/qstash","description":"Official Typescript client for QStash","author":"Andreas Thomas <dev@chronark.com>","license":"MIT","homepage":"https://github.com/upstash/sdk-qstash-ts#readme","repository":{"type":"git","url":"git+https://github.com/upstash/sdk-qstash-ts.git"},"bugs":{"url":"https://github.com/upstash/sdk-qstash-ts/issues"},"main":"./index.js","module":"./index.mjs","types":"./index.d.ts","files":["./**"],"exports":{".":{"import":"./index.mjs","require":"./index.js"},"./nextjs":{"import":"./nextjs.js","require":"./nextjs.js"},"./dist/nextjs":{"import":"./nextjs.js","require":"./nextjs.js"},"./h3":{"types":"./h3.d.ts","import":"./h3.mjs","require":"./h3.js"},"./nuxt":{"types":"./nuxt.d.ts","import":"./nuxt.mjs","require":"./nuxt.js"},"./svelte":{"types":"./svelte.d.ts","import":"./svelte.mjs","require":"./svelte.js"},"./solidjs":{"types":"./solidjs.d.ts","import":"./solidjs.mjs","require":"./solidjs.js"},"./workflow":{"types":"./workflow.d.ts","import":"./workflow.mjs","require":"./workflow.js"},"./hono":{"types":"./hono.d.ts","import":"./hono.mjs","require":"./hono.js"},"./cloudflare":{"types":"./cloudflare.d.ts","import":"./cloudflare.mjs","require":"./cloudflare.js"}},"typesVersions":{"*":{"nextjs":["./nextjs.d.ts"]}},"keywords":["qstash","queue","events","serverless","upstash"],"scripts":{"build":"tsup && cp README.md ./dist/ && cp package.json ./dist/ && cp LICENSE ./dist/","test":"bun test src","fmt":"prettier --write .","lint":"tsc && eslint \"src/**/*.{js,ts,tsx}\" --quiet --fix"},"devDependencies":{"@commitlint/cli":"^19.2.2","@commitlint/config-conventional":"^19.2.2","@eslint/eslintrc":"^3.1.0","@eslint/js":"^9.10.0","@solidjs/start":"^1.0.6","@sveltejs/kit":"^2.5.18","@types/bun":"^1.1.1","@types/crypto-js":"^4.2.0","@typescript-eslint/eslint-plugin":"^8.4.0","@typescript-eslint/parser":"^8.4.0","ai":"^3.1.28","bun-types":"^1.1.7","eslint":"^9.10.0","eslint-plugin-unicorn":"^51.0.1","h3":"^1.12.0","hono":"^4.5.8","husky":"^9.0.10","neverthrow":"^7.0.1","next":"^14.0.2","prettier":"^3.2.5","tsup":"latest","typescript":"^5.4.5","undici-types":"^6.16.0","vitest":"latest"},"dependencies":{"crypto-js":">=4.2.0","jose":"^5.2.3"}}
|
package/solidjs.d.mts
CHANGED
|
@@ -1,5 +1,5 @@
|
|
|
1
1
|
import { APIHandler, APIEvent } from '@solidjs/start/server';
|
|
2
|
-
import { a2 as RouteFunction, a3 as WorkflowServeOptions } from './client-
|
|
2
|
+
import { a2 as RouteFunction, a3 as WorkflowServeOptions } from './client-DkrYCqaq.mjs';
|
|
3
3
|
import 'neverthrow';
|
|
4
4
|
|
|
5
5
|
type VerifySignatureConfig = {
|
package/solidjs.d.ts
CHANGED
|
@@ -1,5 +1,5 @@
|
|
|
1
1
|
import { APIHandler, APIEvent } from '@solidjs/start/server';
|
|
2
|
-
import { a2 as RouteFunction, a3 as WorkflowServeOptions } from './client-
|
|
2
|
+
import { a2 as RouteFunction, a3 as WorkflowServeOptions } from './client-DkrYCqaq.js';
|
|
3
3
|
import 'neverthrow';
|
|
4
4
|
|
|
5
5
|
type VerifySignatureConfig = {
|
package/solidjs.js
CHANGED
|
@@ -1,8 +1,8 @@
|
|
|
1
|
-
"use strict";Object.defineProperty(exports, "__esModule", {value: true}); function _nullishCoalesce(lhs, rhsFn) { if (lhs != null) { return lhs; } else { return rhsFn(); } } function _optionalChain(ops) { let lastAccessLHS = undefined; let value = ops[0]; let i = 1; while (i < ops.length) { const op = ops[i]; const fn = ops[i + 1]; i += 2; if ((op === 'optionalAccess' || op === 'optionalCall') && value == null) { return undefined; } if (op === 'access' || op === 'optionalAccess') { lastAccessLHS = value; value = fn(value); } else if (op === 'call' || op === 'optionalCall') { value = fn((...args) => value.call(lastAccessLHS, ...args)); lastAccessLHS = undefined; } } return value; }require('./chunk-
|
|
1
|
+
"use strict";Object.defineProperty(exports, "__esModule", {value: true}); function _nullishCoalesce(lhs, rhsFn) { if (lhs != null) { return lhs; } else { return rhsFn(); } } function _optionalChain(ops) { let lastAccessLHS = undefined; let value = ops[0]; let i = 1; while (i < ops.length) { const op = ops[i]; const fn = ops[i + 1]; i += 2; if ((op === 'optionalAccess' || op === 'optionalCall') && value == null) { return undefined; } if (op === 'access' || op === 'optionalAccess') { lastAccessLHS = value; value = fn(value); } else if (op === 'call' || op === 'optionalCall') { value = fn((...args) => value.call(lastAccessLHS, ...args)); lastAccessLHS = undefined; } } return value; }require('./chunk-VN7YQ2UN.js');
|
|
2
2
|
|
|
3
3
|
|
|
4
4
|
|
|
5
|
-
var
|
|
5
|
+
var _chunkR5CZPV7Hjs = require('./chunk-R5CZPV7H.js');
|
|
6
6
|
|
|
7
7
|
// platforms/solidjs.ts
|
|
8
8
|
var verifySignatureSolidjs = (handler, config) => {
|
|
@@ -14,7 +14,7 @@ var verifySignatureSolidjs = (handler, config) => {
|
|
|
14
14
|
if (!nextSigningKey) {
|
|
15
15
|
throw new Error("nextSigningKey is required, either in the config or from the env");
|
|
16
16
|
}
|
|
17
|
-
const receiver = new (0,
|
|
17
|
+
const receiver = new (0, _chunkR5CZPV7Hjs.Receiver)({
|
|
18
18
|
currentSigningKey,
|
|
19
19
|
nextSigningKey
|
|
20
20
|
});
|
|
@@ -45,7 +45,7 @@ var serve2 = (routeFunction, options) => {
|
|
|
45
45
|
if (method.toUpperCase() !== "POST") {
|
|
46
46
|
return new Response("Only POST requests are allowed in worklfows", { status: 405 });
|
|
47
47
|
}
|
|
48
|
-
const serveHandler =
|
|
48
|
+
const serveHandler = _chunkR5CZPV7Hjs.serve.call(void 0, routeFunction, options);
|
|
49
49
|
return await serveHandler(event.request);
|
|
50
50
|
};
|
|
51
51
|
return handler;
|
package/solidjs.mjs
CHANGED
package/svelte.d.mts
CHANGED
|
@@ -1,5 +1,5 @@
|
|
|
1
1
|
import { RequestHandler } from '@sveltejs/kit';
|
|
2
|
-
import { a2 as RouteFunction, a3 as WorkflowServeOptions } from './client-
|
|
2
|
+
import { a2 as RouteFunction, a3 as WorkflowServeOptions } from './client-DkrYCqaq.mjs';
|
|
3
3
|
import 'neverthrow';
|
|
4
4
|
|
|
5
5
|
type VerifySignatureConfig = {
|
package/svelte.d.ts
CHANGED
|
@@ -1,5 +1,5 @@
|
|
|
1
1
|
import { RequestHandler } from '@sveltejs/kit';
|
|
2
|
-
import { a2 as RouteFunction, a3 as WorkflowServeOptions } from './client-
|
|
2
|
+
import { a2 as RouteFunction, a3 as WorkflowServeOptions } from './client-DkrYCqaq.js';
|
|
3
3
|
import 'neverthrow';
|
|
4
4
|
|
|
5
5
|
type VerifySignatureConfig = {
|
package/svelte.js
CHANGED
|
@@ -1,8 +1,8 @@
|
|
|
1
|
-
"use strict";Object.defineProperty(exports, "__esModule", {value: true});require('./chunk-
|
|
1
|
+
"use strict";Object.defineProperty(exports, "__esModule", {value: true});require('./chunk-VN7YQ2UN.js');
|
|
2
2
|
|
|
3
3
|
|
|
4
4
|
|
|
5
|
-
var
|
|
5
|
+
var _chunkR5CZPV7Hjs = require('./chunk-R5CZPV7H.js');
|
|
6
6
|
|
|
7
7
|
// platforms/svelte.ts
|
|
8
8
|
var verifySignatureSvelte = (handler, config) => {
|
|
@@ -14,7 +14,7 @@ var verifySignatureSvelte = (handler, config) => {
|
|
|
14
14
|
if (!nextSigningKey) {
|
|
15
15
|
throw new Error("nextSigningKey is required, either in the config or from the env");
|
|
16
16
|
}
|
|
17
|
-
const receiver = new (0,
|
|
17
|
+
const receiver = new (0, _chunkR5CZPV7Hjs.Receiver)({
|
|
18
18
|
currentSigningKey,
|
|
19
19
|
nextSigningKey
|
|
20
20
|
});
|
|
@@ -42,7 +42,7 @@ var verifySignatureSvelte = (handler, config) => {
|
|
|
42
42
|
};
|
|
43
43
|
var serve2 = (routeFunction, options) => {
|
|
44
44
|
const handler = async ({ request }) => {
|
|
45
|
-
const serveMethod =
|
|
45
|
+
const serveMethod = _chunkR5CZPV7Hjs.serve.call(void 0, routeFunction, options);
|
|
46
46
|
return await serveMethod(request);
|
|
47
47
|
};
|
|
48
48
|
return handler;
|
package/svelte.mjs
CHANGED
package/workflow.d.mts
CHANGED
|
@@ -1,2 +1,2 @@
|
|
|
1
|
-
export { af as AsyncStepFunction, a8 as DisabledWorkflowContext, F as FailureFunctionPayload, ai as FinishCondition, ak as LogLevel, ah as ParallelCallState, ad as RawStep, aj as RequiredExceptFields, a2 as RouteFunction, S as Step, ag as StepFunction, ac as StepType, ab as StepTypes, ae as SyncStepFunction, a4 as Workflow, a9 as WorkflowClient, a7 as WorkflowContext, am as WorkflowLogger, al as WorkflowLoggerOptions, aa as WorkflowReceiver, a3 as WorkflowServeOptions, a5 as processOptions, a6 as serve } from './client-
|
|
1
|
+
export { af as AsyncStepFunction, a8 as DisabledWorkflowContext, F as FailureFunctionPayload, ai as FinishCondition, ak as LogLevel, ah as ParallelCallState, ad as RawStep, aj as RequiredExceptFields, a2 as RouteFunction, S as Step, ag as StepFunction, ac as StepType, ab as StepTypes, ae as SyncStepFunction, a4 as Workflow, a9 as WorkflowClient, a7 as WorkflowContext, am as WorkflowLogger, al as WorkflowLoggerOptions, aa as WorkflowReceiver, a3 as WorkflowServeOptions, a5 as processOptions, a6 as serve } from './client-DkrYCqaq.mjs';
|
|
2
2
|
import 'neverthrow';
|
package/workflow.d.ts
CHANGED
|
@@ -1,2 +1,2 @@
|
|
|
1
|
-
export { af as AsyncStepFunction, a8 as DisabledWorkflowContext, F as FailureFunctionPayload, ai as FinishCondition, ak as LogLevel, ah as ParallelCallState, ad as RawStep, aj as RequiredExceptFields, a2 as RouteFunction, S as Step, ag as StepFunction, ac as StepType, ab as StepTypes, ae as SyncStepFunction, a4 as Workflow, a9 as WorkflowClient, a7 as WorkflowContext, am as WorkflowLogger, al as WorkflowLoggerOptions, aa as WorkflowReceiver, a3 as WorkflowServeOptions, a5 as processOptions, a6 as serve } from './client-
|
|
1
|
+
export { af as AsyncStepFunction, a8 as DisabledWorkflowContext, F as FailureFunctionPayload, ai as FinishCondition, ak as LogLevel, ah as ParallelCallState, ad as RawStep, aj as RequiredExceptFields, a2 as RouteFunction, S as Step, ag as StepFunction, ac as StepType, ab as StepTypes, ae as SyncStepFunction, a4 as Workflow, a9 as WorkflowClient, a7 as WorkflowContext, am as WorkflowLogger, al as WorkflowLoggerOptions, aa as WorkflowReceiver, a3 as WorkflowServeOptions, a5 as processOptions, a6 as serve } from './client-DkrYCqaq.js';
|
|
2
2
|
import 'neverthrow';
|
package/workflow.js
CHANGED
|
@@ -6,7 +6,7 @@
|
|
|
6
6
|
|
|
7
7
|
|
|
8
8
|
|
|
9
|
-
var
|
|
9
|
+
var _chunkR5CZPV7Hjs = require('./chunk-R5CZPV7H.js');
|
|
10
10
|
|
|
11
11
|
|
|
12
12
|
|
|
@@ -15,4 +15,4 @@ var _chunkFBNCATUFjs = require('./chunk-FBNCATUF.js');
|
|
|
15
15
|
|
|
16
16
|
|
|
17
17
|
|
|
18
|
-
exports.DisabledWorkflowContext =
|
|
18
|
+
exports.DisabledWorkflowContext = _chunkR5CZPV7Hjs.DisabledWorkflowContext; exports.StepTypes = _chunkR5CZPV7Hjs.StepTypes; exports.Workflow = _chunkR5CZPV7Hjs.Workflow; exports.WorkflowContext = _chunkR5CZPV7Hjs.WorkflowContext; exports.WorkflowLogger = _chunkR5CZPV7Hjs.WorkflowLogger; exports.processOptions = _chunkR5CZPV7Hjs.processOptions; exports.serve = _chunkR5CZPV7Hjs.serve;
|
package/workflow.mjs
CHANGED
|
File without changes
|
|
File without changes
|