@upstash/qstash 2.7.8 → 2.7.9
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +2 -1
- package/{chunk-SFN2PBFB.mjs → chunk-QK55BUNQ.mjs} +4 -2
- package/{chunk-XY54CU56.js → chunk-R5CZPV7H.js} +31 -29
- package/{chunk-HS75QP5Z.mjs → chunk-UPFTIDSI.mjs} +1 -1
- package/{chunk-5OXFLS25.js → chunk-YBZBGHDQ.js} +3 -3
- package/{client-BsCzCprQ.d.mts → client-DkrYCqaq.d.mts} +3 -3
- package/{client-BsCzCprQ.d.ts → client-DkrYCqaq.d.ts} +3 -3
- package/cloudflare.d.mts +1 -1
- package/cloudflare.d.ts +1 -1
- package/cloudflare.js +2 -2
- package/cloudflare.mjs +1 -1
- package/h3.d.mts +1 -1
- package/h3.d.ts +1 -1
- package/h3.js +3 -3
- package/h3.mjs +2 -2
- package/hono.d.mts +1 -1
- package/hono.d.ts +1 -1
- package/hono.js +2 -2
- package/hono.mjs +1 -1
- package/index.d.mts +2 -2
- package/index.d.ts +2 -2
- package/index.js +2 -2
- package/index.mjs +1 -1
- package/nextjs.d.mts +1 -1
- package/nextjs.d.ts +1 -1
- package/nextjs.js +6 -6
- package/nextjs.mjs +1 -1
- package/nuxt.js +3 -3
- package/nuxt.mjs +2 -2
- package/package.json +1 -1
- package/solidjs.d.mts +1 -1
- package/solidjs.d.ts +1 -1
- package/solidjs.js +3 -3
- package/solidjs.mjs +1 -1
- package/svelte.d.mts +1 -1
- package/svelte.d.ts +1 -1
- package/svelte.js +3 -3
- package/svelte.mjs +1 -1
- package/workflow.d.mts +1 -1
- package/workflow.d.ts +1 -1
- package/workflow.js +2 -2
- package/workflow.mjs +1 -1
package/README.md
CHANGED
|
@@ -2,7 +2,8 @@
|
|
|
2
2
|
|
|
3
3
|

|
|
4
4
|
|
|
5
|
-
> [!NOTE]
|
|
5
|
+
> [!NOTE]
|
|
6
|
+
> **This project is in GA Stage.**
|
|
6
7
|
> The Upstash Professional Support fully covers this project. It receives regular updates, and bug fixes.
|
|
7
8
|
> The Upstash team is committed to maintaining and improving its functionality.
|
|
8
9
|
|
|
@@ -481,7 +481,7 @@ var Chat = class _Chat {
|
|
|
481
481
|
// Helper method to get the authorization token
|
|
482
482
|
getAuthorizationToken() {
|
|
483
483
|
const authHeader = String(this.http.authorization);
|
|
484
|
-
const match =
|
|
484
|
+
const match = /Bearer (.+)/.exec(authHeader);
|
|
485
485
|
if (!match) {
|
|
486
486
|
throw new Error("Invalid authorization header format");
|
|
487
487
|
}
|
|
@@ -1141,8 +1141,10 @@ var Client = class {
|
|
|
1141
1141
|
*/
|
|
1142
1142
|
async events(request) {
|
|
1143
1143
|
const query = {};
|
|
1144
|
-
if (request?.cursor && request.cursor > 0) {
|
|
1144
|
+
if (typeof request?.cursor === "number" && request.cursor > 0) {
|
|
1145
1145
|
query.cursor = request.cursor.toString();
|
|
1146
|
+
} else if (typeof request?.cursor === "string" && request.cursor !== "") {
|
|
1147
|
+
query.cursor = request.cursor;
|
|
1146
1148
|
}
|
|
1147
1149
|
for (const [key, value] of Object.entries(request?.filter ?? {})) {
|
|
1148
1150
|
if (typeof value === "number" && value < 0) {
|
|
@@ -481,7 +481,7 @@ var Chat = (_class2 = class _Chat {
|
|
|
481
481
|
// Helper method to get the authorization token
|
|
482
482
|
getAuthorizationToken() {
|
|
483
483
|
const authHeader = String(this.http.authorization);
|
|
484
|
-
const match =
|
|
484
|
+
const match = /Bearer (.+)/.exec(authHeader);
|
|
485
485
|
if (!match) {
|
|
486
486
|
throw new Error("Invalid authorization header format");
|
|
487
487
|
}
|
|
@@ -1141,10 +1141,12 @@ var Client = class {
|
|
|
1141
1141
|
*/
|
|
1142
1142
|
async events(request) {
|
|
1143
1143
|
const query = {};
|
|
1144
|
-
if (_optionalChain([request, 'optionalAccess', _31 => _31.cursor]) && request.cursor > 0) {
|
|
1144
|
+
if (typeof _optionalChain([request, 'optionalAccess', _31 => _31.cursor]) === "number" && request.cursor > 0) {
|
|
1145
1145
|
query.cursor = request.cursor.toString();
|
|
1146
|
+
} else if (typeof _optionalChain([request, 'optionalAccess', _32 => _32.cursor]) === "string" && request.cursor !== "") {
|
|
1147
|
+
query.cursor = request.cursor;
|
|
1146
1148
|
}
|
|
1147
|
-
for (const [key, value] of Object.entries(_nullishCoalesce(_optionalChain([request, 'optionalAccess',
|
|
1149
|
+
for (const [key, value] of Object.entries(_nullishCoalesce(_optionalChain([request, 'optionalAccess', _33 => _33.filter]), () => ( {})))) {
|
|
1148
1150
|
if (typeof value === "number" && value < 0) {
|
|
1149
1151
|
continue;
|
|
1150
1152
|
}
|
|
@@ -1568,7 +1570,7 @@ var triggerFirstInvocation = async (workflowContext, retries, debug) => {
|
|
|
1568
1570
|
workflowContext.failureUrl,
|
|
1569
1571
|
retries
|
|
1570
1572
|
);
|
|
1571
|
-
await _optionalChain([debug, 'optionalAccess',
|
|
1573
|
+
await _optionalChain([debug, 'optionalAccess', _34 => _34.log, 'call', _35 => _35("SUBMIT", "SUBMIT_FIRST_INVOCATION", {
|
|
1572
1574
|
headers,
|
|
1573
1575
|
requestPayload: workflowContext.requestPayload,
|
|
1574
1576
|
url: workflowContext.url
|
|
@@ -1600,7 +1602,7 @@ var triggerRouteFunction = async ({
|
|
|
1600
1602
|
}
|
|
1601
1603
|
};
|
|
1602
1604
|
var triggerWorkflowDelete = async (workflowContext, debug, cancel = false) => {
|
|
1603
|
-
await _optionalChain([debug, 'optionalAccess',
|
|
1605
|
+
await _optionalChain([debug, 'optionalAccess', _36 => _36.log, 'call', _37 => _37("SUBMIT", "SUBMIT_CLEANUP", {
|
|
1604
1606
|
deletedWorkflowRunId: workflowContext.workflowRunId
|
|
1605
1607
|
})]);
|
|
1606
1608
|
const result = await workflowContext.qstashClient.http.request({
|
|
@@ -1608,7 +1610,7 @@ var triggerWorkflowDelete = async (workflowContext, debug, cancel = false) => {
|
|
|
1608
1610
|
method: "DELETE",
|
|
1609
1611
|
parseResponseAsJson: false
|
|
1610
1612
|
});
|
|
1611
|
-
await _optionalChain([debug, 'optionalAccess',
|
|
1613
|
+
await _optionalChain([debug, 'optionalAccess', _38 => _38.log, 'call', _39 => _39("SUBMIT", "SUBMIT_CLEANUP", result)]);
|
|
1612
1614
|
};
|
|
1613
1615
|
var recreateUserHeaders = (headers) => {
|
|
1614
1616
|
const filteredHeaders = new Headers();
|
|
@@ -1626,7 +1628,7 @@ var handleThirdPartyCallResult = async (request, requestPayload, client, workflo
|
|
|
1626
1628
|
if (request.headers.get("Upstash-Workflow-Callback")) {
|
|
1627
1629
|
const callbackMessage = JSON.parse(requestPayload);
|
|
1628
1630
|
if (!(callbackMessage.status >= 200 && callbackMessage.status < 300)) {
|
|
1629
|
-
await _optionalChain([debug, 'optionalAccess',
|
|
1631
|
+
await _optionalChain([debug, 'optionalAccess', _40 => _40.log, 'call', _41 => _41("WARN", "SUBMIT_THIRD_PARTY_RESULT", {
|
|
1630
1632
|
status: callbackMessage.status,
|
|
1631
1633
|
body: decodeBase64(callbackMessage.body)
|
|
1632
1634
|
})]);
|
|
@@ -1671,7 +1673,7 @@ ${decodeBase64(callbackMessage.body)}`
|
|
|
1671
1673
|
out: decodeBase64(callbackMessage.body),
|
|
1672
1674
|
concurrent: Number(concurrentString)
|
|
1673
1675
|
};
|
|
1674
|
-
await _optionalChain([debug, 'optionalAccess',
|
|
1676
|
+
await _optionalChain([debug, 'optionalAccess', _42 => _42.log, 'call', _43 => _43("SUBMIT", "SUBMIT_THIRD_PARTY_RESULT", {
|
|
1675
1677
|
step: callResultStep,
|
|
1676
1678
|
headers: requestHeaders,
|
|
1677
1679
|
url: workflowUrl
|
|
@@ -1682,7 +1684,7 @@ ${decodeBase64(callbackMessage.body)}`
|
|
|
1682
1684
|
body: callResultStep,
|
|
1683
1685
|
url: workflowUrl
|
|
1684
1686
|
});
|
|
1685
|
-
await _optionalChain([debug, 'optionalAccess',
|
|
1687
|
+
await _optionalChain([debug, 'optionalAccess', _44 => _44.log, 'call', _45 => _45("SUBMIT", "SUBMIT_THIRD_PARTY_RESULT", {
|
|
1686
1688
|
messageId: result.messageId
|
|
1687
1689
|
})]);
|
|
1688
1690
|
return ok("is-call-return");
|
|
@@ -1714,14 +1716,14 @@ var getHeaders = (initHeaderValue, workflowRunId, workflowUrl, userHeaders, step
|
|
|
1714
1716
|
};
|
|
1715
1717
|
if (userHeaders) {
|
|
1716
1718
|
for (const header of userHeaders.keys()) {
|
|
1717
|
-
if (_optionalChain([step, 'optionalAccess',
|
|
1719
|
+
if (_optionalChain([step, 'optionalAccess', _46 => _46.callHeaders])) {
|
|
1718
1720
|
baseHeaders[`Upstash-Callback-Forward-${header}`] = userHeaders.get(header);
|
|
1719
1721
|
} else {
|
|
1720
1722
|
baseHeaders[`Upstash-Forward-${header}`] = userHeaders.get(header);
|
|
1721
1723
|
}
|
|
1722
1724
|
}
|
|
1723
1725
|
}
|
|
1724
|
-
if (_optionalChain([step, 'optionalAccess',
|
|
1726
|
+
if (_optionalChain([step, 'optionalAccess', _47 => _47.callHeaders])) {
|
|
1725
1727
|
const forwardedHeaders = Object.fromEntries(
|
|
1726
1728
|
Object.entries(step.callHeaders).map(([header, value]) => [
|
|
1727
1729
|
`Upstash-Forward-${header}`,
|
|
@@ -1867,7 +1869,7 @@ var AutoExecutor = (_class3 = class _AutoExecutor {
|
|
|
1867
1869
|
if (this.stepCount < this.nonPlanStepCount) {
|
|
1868
1870
|
const step = this.steps[this.stepCount + this.planStepCount];
|
|
1869
1871
|
validateStep(lazyStep, step);
|
|
1870
|
-
await _optionalChain([this, 'access',
|
|
1872
|
+
await _optionalChain([this, 'access', _48 => _48.debug, 'optionalAccess', _49 => _49.log, 'call', _50 => _50("INFO", "RUN_SINGLE", {
|
|
1871
1873
|
fromRequest: true,
|
|
1872
1874
|
step,
|
|
1873
1875
|
stepCount: this.stepCount
|
|
@@ -1875,7 +1877,7 @@ var AutoExecutor = (_class3 = class _AutoExecutor {
|
|
|
1875
1877
|
return step.out;
|
|
1876
1878
|
}
|
|
1877
1879
|
const resultStep = await lazyStep.getResultStep(NO_CONCURRENCY, this.stepCount);
|
|
1878
|
-
await _optionalChain([this, 'access',
|
|
1880
|
+
await _optionalChain([this, 'access', _51 => _51.debug, 'optionalAccess', _52 => _52.log, 'call', _53 => _53("INFO", "RUN_SINGLE", {
|
|
1879
1881
|
fromRequest: false,
|
|
1880
1882
|
step: resultStep,
|
|
1881
1883
|
stepCount: this.stepCount
|
|
@@ -1894,13 +1896,13 @@ var AutoExecutor = (_class3 = class _AutoExecutor {
|
|
|
1894
1896
|
const initialStepCount = this.stepCount - (parallelSteps.length - 1);
|
|
1895
1897
|
const parallelCallState = this.getParallelCallState(parallelSteps.length, initialStepCount);
|
|
1896
1898
|
const sortedSteps = sortSteps(this.steps);
|
|
1897
|
-
const plannedParallelStepCount = _optionalChain([sortedSteps, 'access',
|
|
1899
|
+
const plannedParallelStepCount = _optionalChain([sortedSteps, 'access', _54 => _54[initialStepCount + this.planStepCount], 'optionalAccess', _55 => _55.concurrent]);
|
|
1898
1900
|
if (parallelCallState !== "first" && plannedParallelStepCount !== parallelSteps.length) {
|
|
1899
1901
|
throw new QStashWorkflowError(
|
|
1900
1902
|
`Incompatible number of parallel steps when call state was '${parallelCallState}'. Expected ${parallelSteps.length}, got ${plannedParallelStepCount} from the request.`
|
|
1901
1903
|
);
|
|
1902
1904
|
}
|
|
1903
|
-
await _optionalChain([this, 'access',
|
|
1905
|
+
await _optionalChain([this, 'access', _56 => _56.debug, 'optionalAccess', _57 => _57.log, 'call', _58 => _58("INFO", "RUN_PARALLEL", {
|
|
1904
1906
|
parallelCallState,
|
|
1905
1907
|
initialStepCount,
|
|
1906
1908
|
plannedParallelStepCount,
|
|
@@ -1979,7 +1981,7 @@ var AutoExecutor = (_class3 = class _AutoExecutor {
|
|
|
1979
1981
|
return "first";
|
|
1980
1982
|
} else if (remainingSteps.length >= 2 * parallelStepCount) {
|
|
1981
1983
|
return "last";
|
|
1982
|
-
} else if (_optionalChain([remainingSteps, 'access',
|
|
1984
|
+
} else if (_optionalChain([remainingSteps, 'access', _59 => _59.at, 'call', _60 => _60(-1), 'optionalAccess', _61 => _61.targetStep])) {
|
|
1983
1985
|
return "partial";
|
|
1984
1986
|
} else {
|
|
1985
1987
|
return "discard";
|
|
@@ -1996,7 +1998,7 @@ var AutoExecutor = (_class3 = class _AutoExecutor {
|
|
|
1996
1998
|
`Unable to submit steps to QStash. Provided list is empty. Current step: ${this.stepCount}`
|
|
1997
1999
|
);
|
|
1998
2000
|
}
|
|
1999
|
-
await _optionalChain([this, 'access',
|
|
2001
|
+
await _optionalChain([this, 'access', _62 => _62.debug, 'optionalAccess', _63 => _63.log, 'call', _64 => _64("SUBMIT", "SUBMIT_STEP", { length: steps.length, steps })]);
|
|
2000
2002
|
const result = await this.context.qstashClient.batchJSON(
|
|
2001
2003
|
steps.map((singleStep) => {
|
|
2002
2004
|
const headers = getHeaders(
|
|
@@ -2037,7 +2039,7 @@ var AutoExecutor = (_class3 = class _AutoExecutor {
|
|
|
2037
2039
|
);
|
|
2038
2040
|
})
|
|
2039
2041
|
);
|
|
2040
|
-
await _optionalChain([this, 'access',
|
|
2042
|
+
await _optionalChain([this, 'access', _65 => _65.debug, 'optionalAccess', _66 => _66.log, 'call', _67 => _67("INFO", "SUBMIT_STEP", {
|
|
2041
2043
|
messageIds: result.map((message) => {
|
|
2042
2044
|
return {
|
|
2043
2045
|
message: message.messageId
|
|
@@ -2659,7 +2661,7 @@ var checkIfLastOneIsDuplicate = async (steps, debug) => {
|
|
|
2659
2661
|
const step = steps[index];
|
|
2660
2662
|
if (step.stepId === lastStepId && step.targetStep === lastTargetStepId) {
|
|
2661
2663
|
const message = `QStash Workflow: The step '${step.stepName}' with id '${step.stepId}' has run twice during workflow execution. Rest of the workflow will continue running as usual.`;
|
|
2662
|
-
await _optionalChain([debug, 'optionalAccess',
|
|
2664
|
+
await _optionalChain([debug, 'optionalAccess', _68 => _68.log, 'call', _69 => _69("WARN", "RESPONSE_DEFAULT", message)]);
|
|
2663
2665
|
console.warn(message);
|
|
2664
2666
|
return true;
|
|
2665
2667
|
}
|
|
@@ -2746,7 +2748,7 @@ var handleFailure = async (request, requestPayload, qstashClient, initialPayload
|
|
|
2746
2748
|
|
|
2747
2749
|
// src/client/workflow/serve.ts
|
|
2748
2750
|
var processOptions = (options) => {
|
|
2749
|
-
const environment = _nullishCoalesce(_optionalChain([options, 'optionalAccess',
|
|
2751
|
+
const environment = _nullishCoalesce(_optionalChain([options, 'optionalAccess', _70 => _70.env]), () => ( (typeof process === "undefined" ? {} : process.env)));
|
|
2750
2752
|
const receiverEnvironmentVariablesSet = Boolean(
|
|
2751
2753
|
environment.QSTASH_CURRENT_SIGNING_KEY && environment.QSTASH_NEXT_SIGNING_KEY
|
|
2752
2754
|
);
|
|
@@ -2802,7 +2804,7 @@ var serve = (routeFunction, options) => {
|
|
|
2802
2804
|
return baseUrl + (path || "");
|
|
2803
2805
|
}) : initialWorkflowUrl;
|
|
2804
2806
|
if (workflowUrl !== initialWorkflowUrl) {
|
|
2805
|
-
await _optionalChain([debug, 'optionalAccess',
|
|
2807
|
+
await _optionalChain([debug, 'optionalAccess', _71 => _71.log, 'call', _72 => _72("WARN", "ENDPOINT_START", {
|
|
2806
2808
|
warning: `QStash Workflow: replacing the base of the url with "${baseUrl}" and using it as workflow endpoint.`,
|
|
2807
2809
|
originalURL: initialWorkflowUrl,
|
|
2808
2810
|
updatedURL: workflowUrl
|
|
@@ -2811,7 +2813,7 @@ var serve = (routeFunction, options) => {
|
|
|
2811
2813
|
const workflowFailureUrl = failureFunction ? workflowUrl : failureUrl;
|
|
2812
2814
|
const requestPayload = await _asyncNullishCoalesce(await getPayload(request), async () => ( ""));
|
|
2813
2815
|
await verifyRequest(requestPayload, request.headers.get("upstash-signature"), receiver);
|
|
2814
|
-
await _optionalChain([debug, 'optionalAccess',
|
|
2816
|
+
await _optionalChain([debug, 'optionalAccess', _73 => _73.log, 'call', _74 => _74("INFO", "ENDPOINT_START")]);
|
|
2815
2817
|
const failureCheck = await handleFailure(
|
|
2816
2818
|
request,
|
|
2817
2819
|
requestPayload,
|
|
@@ -2822,11 +2824,11 @@ var serve = (routeFunction, options) => {
|
|
|
2822
2824
|
if (failureCheck.isErr()) {
|
|
2823
2825
|
throw failureCheck.error;
|
|
2824
2826
|
} else if (failureCheck.value === "is-failure-callback") {
|
|
2825
|
-
await _optionalChain([debug, 'optionalAccess',
|
|
2827
|
+
await _optionalChain([debug, 'optionalAccess', _75 => _75.log, 'call', _76 => _76("WARN", "RESPONSE_DEFAULT", "failureFunction executed")]);
|
|
2826
2828
|
return onStepFinish("no-workflow-id", "failure-callback");
|
|
2827
2829
|
}
|
|
2828
2830
|
const { isFirstInvocation, workflowRunId } = validateRequest(request);
|
|
2829
|
-
_optionalChain([debug, 'optionalAccess',
|
|
2831
|
+
_optionalChain([debug, 'optionalAccess', _77 => _77.setWorkflowRunId, 'call', _78 => _78(workflowRunId)]);
|
|
2830
2832
|
const { rawInitialPayload, steps, isLastDuplicate } = await parseRequest(
|
|
2831
2833
|
requestPayload,
|
|
2832
2834
|
isFirstInvocation,
|
|
@@ -2852,7 +2854,7 @@ var serve = (routeFunction, options) => {
|
|
|
2852
2854
|
workflowContext
|
|
2853
2855
|
);
|
|
2854
2856
|
if (authCheck.isErr()) {
|
|
2855
|
-
await _optionalChain([debug, 'optionalAccess',
|
|
2857
|
+
await _optionalChain([debug, 'optionalAccess', _79 => _79.log, 'call', _80 => _80("ERROR", "ERROR", { error: authCheck.error.message })]);
|
|
2856
2858
|
throw authCheck.error;
|
|
2857
2859
|
} else if (authCheck.value === "run-ended") {
|
|
2858
2860
|
return onStepFinish("no-workflow-id", "auth-fail");
|
|
@@ -2867,7 +2869,7 @@ var serve = (routeFunction, options) => {
|
|
|
2867
2869
|
debug
|
|
2868
2870
|
);
|
|
2869
2871
|
if (callReturnCheck.isErr()) {
|
|
2870
|
-
await _optionalChain([debug, 'optionalAccess',
|
|
2872
|
+
await _optionalChain([debug, 'optionalAccess', _81 => _81.log, 'call', _82 => _82("ERROR", "SUBMIT_THIRD_PARTY_RESULT", {
|
|
2871
2873
|
error: callReturnCheck.error.message
|
|
2872
2874
|
})]);
|
|
2873
2875
|
throw callReturnCheck.error;
|
|
@@ -2879,13 +2881,13 @@ var serve = (routeFunction, options) => {
|
|
|
2879
2881
|
}
|
|
2880
2882
|
});
|
|
2881
2883
|
if (result.isErr()) {
|
|
2882
|
-
await _optionalChain([debug, 'optionalAccess',
|
|
2884
|
+
await _optionalChain([debug, 'optionalAccess', _83 => _83.log, 'call', _84 => _84("ERROR", "ERROR", { error: result.error.message })]);
|
|
2883
2885
|
throw result.error;
|
|
2884
2886
|
}
|
|
2885
|
-
await _optionalChain([debug, 'optionalAccess',
|
|
2887
|
+
await _optionalChain([debug, 'optionalAccess', _85 => _85.log, 'call', _86 => _86("INFO", "RESPONSE_WORKFLOW")]);
|
|
2886
2888
|
return onStepFinish(workflowContext.workflowRunId, "success");
|
|
2887
2889
|
}
|
|
2888
|
-
await _optionalChain([debug, 'optionalAccess',
|
|
2890
|
+
await _optionalChain([debug, 'optionalAccess', _87 => _87.log, 'call', _88 => _88("INFO", "RESPONSE_DEFAULT")]);
|
|
2889
2891
|
return onStepFinish("no-workflow-id", "fromCallback");
|
|
2890
2892
|
};
|
|
2891
2893
|
return async (request) => {
|
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
"use strict";Object.defineProperty(exports, "__esModule", {value: true}); function _nullishCoalesce(lhs, rhsFn) { if (lhs != null) { return lhs; } else { return rhsFn(); } } function _optionalChain(ops) { let lastAccessLHS = undefined; let value = ops[0]; let i = 1; while (i < ops.length) { const op = ops[i]; const fn = ops[i + 1]; i += 2; if ((op === 'optionalAccess' || op === 'optionalCall') && value == null) { return undefined; } if (op === 'access' || op === 'optionalAccess') { lastAccessLHS = value; value = fn(value); } else if (op === 'call' || op === 'optionalCall') { value = fn((...args) => value.call(lastAccessLHS, ...args)); lastAccessLHS = undefined; } } return value; }
|
|
2
2
|
|
|
3
3
|
|
|
4
|
-
var
|
|
4
|
+
var _chunkR5CZPV7Hjs = require('./chunk-R5CZPV7H.js');
|
|
5
5
|
|
|
6
6
|
// node_modules/defu/dist/defu.mjs
|
|
7
7
|
function isPlainObject(value) {
|
|
@@ -340,7 +340,7 @@ var verifySignatureH3 = (handler, config) => {
|
|
|
340
340
|
"nextSigningKey is required, either in the config or as env variable QSTASH_NEXT_SIGNING_KEY"
|
|
341
341
|
);
|
|
342
342
|
}
|
|
343
|
-
const receiver = new (0,
|
|
343
|
+
const receiver = new (0, _chunkR5CZPV7Hjs.Receiver)({
|
|
344
344
|
currentSigningKey,
|
|
345
345
|
nextSigningKey
|
|
346
346
|
});
|
|
@@ -391,7 +391,7 @@ var serve2 = (routeFunction, options) => {
|
|
|
391
391
|
body: await readRawBody(event),
|
|
392
392
|
method: "POST"
|
|
393
393
|
});
|
|
394
|
-
const serveHandler =
|
|
394
|
+
const serveHandler = _chunkR5CZPV7Hjs.serve.call(void 0, routeFunction, options);
|
|
395
395
|
return await serveHandler(request);
|
|
396
396
|
});
|
|
397
397
|
return handler;
|
|
@@ -80,7 +80,7 @@ type EventPayload = Omit<Event, "urlGroup"> & {
|
|
|
80
80
|
topicName: string;
|
|
81
81
|
};
|
|
82
82
|
type GetEventsPayload = {
|
|
83
|
-
cursor?:
|
|
83
|
+
cursor?: string;
|
|
84
84
|
events: EventPayload[];
|
|
85
85
|
};
|
|
86
86
|
type WithCursor<T> = T & {
|
|
@@ -1668,7 +1668,7 @@ type PublishJsonRequest = Omit<PublishRequest, "body"> & {
|
|
|
1668
1668
|
body: unknown;
|
|
1669
1669
|
};
|
|
1670
1670
|
type EventsRequest = {
|
|
1671
|
-
cursor?: number;
|
|
1671
|
+
cursor?: string | number;
|
|
1672
1672
|
filter?: EventsRequestFilter;
|
|
1673
1673
|
};
|
|
1674
1674
|
type EventsRequestFilter = {
|
|
@@ -1685,7 +1685,7 @@ type EventsRequestFilter = {
|
|
|
1685
1685
|
count?: number;
|
|
1686
1686
|
};
|
|
1687
1687
|
type GetEventsResponse = {
|
|
1688
|
-
cursor?:
|
|
1688
|
+
cursor?: string;
|
|
1689
1689
|
events: Event[];
|
|
1690
1690
|
};
|
|
1691
1691
|
type QueueRequest = {
|
|
@@ -80,7 +80,7 @@ type EventPayload = Omit<Event, "urlGroup"> & {
|
|
|
80
80
|
topicName: string;
|
|
81
81
|
};
|
|
82
82
|
type GetEventsPayload = {
|
|
83
|
-
cursor?:
|
|
83
|
+
cursor?: string;
|
|
84
84
|
events: EventPayload[];
|
|
85
85
|
};
|
|
86
86
|
type WithCursor<T> = T & {
|
|
@@ -1668,7 +1668,7 @@ type PublishJsonRequest = Omit<PublishRequest, "body"> & {
|
|
|
1668
1668
|
body: unknown;
|
|
1669
1669
|
};
|
|
1670
1670
|
type EventsRequest = {
|
|
1671
|
-
cursor?: number;
|
|
1671
|
+
cursor?: string | number;
|
|
1672
1672
|
filter?: EventsRequestFilter;
|
|
1673
1673
|
};
|
|
1674
1674
|
type EventsRequestFilter = {
|
|
@@ -1685,7 +1685,7 @@ type EventsRequestFilter = {
|
|
|
1685
1685
|
count?: number;
|
|
1686
1686
|
};
|
|
1687
1687
|
type GetEventsResponse = {
|
|
1688
|
-
cursor?:
|
|
1688
|
+
cursor?: string;
|
|
1689
1689
|
events: Event[];
|
|
1690
1690
|
};
|
|
1691
1691
|
type QueueRequest = {
|
package/cloudflare.d.mts
CHANGED
package/cloudflare.d.ts
CHANGED
package/cloudflare.js
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
"use strict";Object.defineProperty(exports, "__esModule", {value: true});
|
|
2
2
|
|
|
3
|
-
var
|
|
3
|
+
var _chunkR5CZPV7Hjs = require('./chunk-R5CZPV7H.js');
|
|
4
4
|
|
|
5
5
|
// platforms/cloudflare.ts
|
|
6
6
|
var getArgs = (args) => {
|
|
@@ -24,7 +24,7 @@ var getArgs = (args) => {
|
|
|
24
24
|
var serve2 = (routeFunction, options) => {
|
|
25
25
|
const handler = async (...args) => {
|
|
26
26
|
const { request, env } = getArgs(args);
|
|
27
|
-
const serveHandler =
|
|
27
|
+
const serveHandler = _chunkR5CZPV7Hjs.serve.call(void 0, routeFunction, {
|
|
28
28
|
env,
|
|
29
29
|
...options
|
|
30
30
|
});
|
package/cloudflare.mjs
CHANGED
package/h3.d.mts
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
import * as h3 from 'h3';
|
|
2
2
|
import { H3Event } from 'h3';
|
|
3
|
-
import { a2 as RouteFunction, a3 as WorkflowServeOptions } from './client-
|
|
3
|
+
import { a2 as RouteFunction, a3 as WorkflowServeOptions } from './client-DkrYCqaq.mjs';
|
|
4
4
|
import 'neverthrow';
|
|
5
5
|
|
|
6
6
|
type VerifySignatureConfig = {
|
package/h3.d.ts
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
import * as h3 from 'h3';
|
|
2
2
|
import { H3Event } from 'h3';
|
|
3
|
-
import { a2 as RouteFunction, a3 as WorkflowServeOptions } from './client-
|
|
3
|
+
import { a2 as RouteFunction, a3 as WorkflowServeOptions } from './client-DkrYCqaq.js';
|
|
4
4
|
import 'neverthrow';
|
|
5
5
|
|
|
6
6
|
type VerifySignatureConfig = {
|
package/h3.js
CHANGED
|
@@ -1,10 +1,10 @@
|
|
|
1
1
|
"use strict";Object.defineProperty(exports, "__esModule", {value: true});
|
|
2
2
|
|
|
3
3
|
|
|
4
|
-
var
|
|
4
|
+
var _chunkYBZBGHDQjs = require('./chunk-YBZBGHDQ.js');
|
|
5
5
|
require('./chunk-VN7YQ2UN.js');
|
|
6
|
-
require('./chunk-
|
|
6
|
+
require('./chunk-R5CZPV7H.js');
|
|
7
7
|
|
|
8
8
|
|
|
9
9
|
|
|
10
|
-
exports.serve =
|
|
10
|
+
exports.serve = _chunkYBZBGHDQjs.serve; exports.verifySignatureH3 = _chunkYBZBGHDQjs.verifySignatureH3;
|
package/h3.mjs
CHANGED
package/hono.d.mts
CHANGED
package/hono.d.ts
CHANGED
package/hono.js
CHANGED
|
@@ -1,13 +1,13 @@
|
|
|
1
1
|
"use strict";Object.defineProperty(exports, "__esModule", {value: true});
|
|
2
2
|
|
|
3
|
-
var
|
|
3
|
+
var _chunkR5CZPV7Hjs = require('./chunk-R5CZPV7H.js');
|
|
4
4
|
|
|
5
5
|
// platforms/hono.ts
|
|
6
6
|
var serve2 = (routeFunction, options) => {
|
|
7
7
|
const handler = async (context) => {
|
|
8
8
|
const environment = context.env;
|
|
9
9
|
const request = context.req.raw;
|
|
10
|
-
const serveHandler =
|
|
10
|
+
const serveHandler = _chunkR5CZPV7Hjs.serve.call(void 0, routeFunction, {
|
|
11
11
|
// when hono is used without cf workers, it sends a DebugHTTPServer
|
|
12
12
|
// object in `context.env`. don't pass env if this is the case:
|
|
13
13
|
env: "QSTASH_TOKEN" in environment ? environment : void 0,
|
package/hono.mjs
CHANGED
package/index.d.mts
CHANGED
|
@@ -1,5 +1,5 @@
|
|
|
1
|
-
import { R as RateLimit, C as ChatRateLimit, S as Step, F as FailureFunctionPayload } from './client-
|
|
2
|
-
export { A as AddEndpointsRequest, $ as AnalyticsConfig, a0 as AnalyticsSetup, B as BodyInit, y as Chat, D as ChatCompletion, I as ChatCompletionChunk, z as ChatCompletionMessage, T as ChatRequest, f as Client, n as CreateScheduleRequest, p as Endpoint, t as Event, u as EventPayload, E as EventsRequest, v as GetEventsPayload, G as GetEventsResponse, H as HTTPMethods, w as HeadersInit, M as Message, k as MessagePayload, l as Messages, O as OpenAIChatModel, N as PromptChatRequest, _ as ProviderReturnType, P as PublishBatchRequest, e as PublishJsonRequest, d as PublishRequest, j as PublishResponse, g as PublishToApiResponse, i as PublishToUrlGroupsResponse, h as PublishToUrlResponse, Q as QueueRequest, c as Receiver, a as ReceiverConfig, q as RemoveEndpointsRequest, x as RequestOptions, m as Schedule, o as Schedules, b as SignatureError, s as State, K as StreamDisabled, J as StreamEnabled, L as StreamParameter, U as UrlGroup, r as UrlGroups, V as VerifyRequest, W as WithCursor, X as custom, Y as openai, a1 as setupAnalytics, Z as upstash } from './client-
|
|
1
|
+
import { R as RateLimit, C as ChatRateLimit, S as Step, F as FailureFunctionPayload } from './client-DkrYCqaq.mjs';
|
|
2
|
+
export { A as AddEndpointsRequest, $ as AnalyticsConfig, a0 as AnalyticsSetup, B as BodyInit, y as Chat, D as ChatCompletion, I as ChatCompletionChunk, z as ChatCompletionMessage, T as ChatRequest, f as Client, n as CreateScheduleRequest, p as Endpoint, t as Event, u as EventPayload, E as EventsRequest, v as GetEventsPayload, G as GetEventsResponse, H as HTTPMethods, w as HeadersInit, M as Message, k as MessagePayload, l as Messages, O as OpenAIChatModel, N as PromptChatRequest, _ as ProviderReturnType, P as PublishBatchRequest, e as PublishJsonRequest, d as PublishRequest, j as PublishResponse, g as PublishToApiResponse, i as PublishToUrlGroupsResponse, h as PublishToUrlResponse, Q as QueueRequest, c as Receiver, a as ReceiverConfig, q as RemoveEndpointsRequest, x as RequestOptions, m as Schedule, o as Schedules, b as SignatureError, s as State, K as StreamDisabled, J as StreamEnabled, L as StreamParameter, U as UrlGroup, r as UrlGroups, V as VerifyRequest, W as WithCursor, X as custom, Y as openai, a1 as setupAnalytics, Z as upstash } from './client-DkrYCqaq.mjs';
|
|
3
3
|
import 'neverthrow';
|
|
4
4
|
|
|
5
5
|
/**
|
package/index.d.ts
CHANGED
|
@@ -1,5 +1,5 @@
|
|
|
1
|
-
import { R as RateLimit, C as ChatRateLimit, S as Step, F as FailureFunctionPayload } from './client-
|
|
2
|
-
export { A as AddEndpointsRequest, $ as AnalyticsConfig, a0 as AnalyticsSetup, B as BodyInit, y as Chat, D as ChatCompletion, I as ChatCompletionChunk, z as ChatCompletionMessage, T as ChatRequest, f as Client, n as CreateScheduleRequest, p as Endpoint, t as Event, u as EventPayload, E as EventsRequest, v as GetEventsPayload, G as GetEventsResponse, H as HTTPMethods, w as HeadersInit, M as Message, k as MessagePayload, l as Messages, O as OpenAIChatModel, N as PromptChatRequest, _ as ProviderReturnType, P as PublishBatchRequest, e as PublishJsonRequest, d as PublishRequest, j as PublishResponse, g as PublishToApiResponse, i as PublishToUrlGroupsResponse, h as PublishToUrlResponse, Q as QueueRequest, c as Receiver, a as ReceiverConfig, q as RemoveEndpointsRequest, x as RequestOptions, m as Schedule, o as Schedules, b as SignatureError, s as State, K as StreamDisabled, J as StreamEnabled, L as StreamParameter, U as UrlGroup, r as UrlGroups, V as VerifyRequest, W as WithCursor, X as custom, Y as openai, a1 as setupAnalytics, Z as upstash } from './client-
|
|
1
|
+
import { R as RateLimit, C as ChatRateLimit, S as Step, F as FailureFunctionPayload } from './client-DkrYCqaq.js';
|
|
2
|
+
export { A as AddEndpointsRequest, $ as AnalyticsConfig, a0 as AnalyticsSetup, B as BodyInit, y as Chat, D as ChatCompletion, I as ChatCompletionChunk, z as ChatCompletionMessage, T as ChatRequest, f as Client, n as CreateScheduleRequest, p as Endpoint, t as Event, u as EventPayload, E as EventsRequest, v as GetEventsPayload, G as GetEventsResponse, H as HTTPMethods, w as HeadersInit, M as Message, k as MessagePayload, l as Messages, O as OpenAIChatModel, N as PromptChatRequest, _ as ProviderReturnType, P as PublishBatchRequest, e as PublishJsonRequest, d as PublishRequest, j as PublishResponse, g as PublishToApiResponse, i as PublishToUrlGroupsResponse, h as PublishToUrlResponse, Q as QueueRequest, c as Receiver, a as ReceiverConfig, q as RemoveEndpointsRequest, x as RequestOptions, m as Schedule, o as Schedules, b as SignatureError, s as State, K as StreamDisabled, J as StreamEnabled, L as StreamParameter, U as UrlGroup, r as UrlGroups, V as VerifyRequest, W as WithCursor, X as custom, Y as openai, a1 as setupAnalytics, Z as upstash } from './client-DkrYCqaq.js';
|
|
3
3
|
import 'neverthrow';
|
|
4
4
|
|
|
5
5
|
/**
|
package/index.js
CHANGED
|
@@ -19,7 +19,7 @@
|
|
|
19
19
|
|
|
20
20
|
|
|
21
21
|
|
|
22
|
-
var
|
|
22
|
+
var _chunkR5CZPV7Hjs = require('./chunk-R5CZPV7H.js');
|
|
23
23
|
|
|
24
24
|
|
|
25
25
|
|
|
@@ -40,4 +40,4 @@ var _chunkXY54CU56js = require('./chunk-XY54CU56.js');
|
|
|
40
40
|
|
|
41
41
|
|
|
42
42
|
|
|
43
|
-
exports.Chat =
|
|
43
|
+
exports.Chat = _chunkR5CZPV7Hjs.Chat; exports.Client = _chunkR5CZPV7Hjs.Client; exports.Messages = _chunkR5CZPV7Hjs.Messages; exports.QStashWorkflowAbort = _chunkR5CZPV7Hjs.QStashWorkflowAbort; exports.QStashWorkflowError = _chunkR5CZPV7Hjs.QStashWorkflowError; exports.QstashChatRatelimitError = _chunkR5CZPV7Hjs.QstashChatRatelimitError; exports.QstashDailyRatelimitError = _chunkR5CZPV7Hjs.QstashDailyRatelimitError; exports.QstashError = _chunkR5CZPV7Hjs.QstashError; exports.QstashRatelimitError = _chunkR5CZPV7Hjs.QstashRatelimitError; exports.Receiver = _chunkR5CZPV7Hjs.Receiver; exports.Schedules = _chunkR5CZPV7Hjs.Schedules; exports.SignatureError = _chunkR5CZPV7Hjs.SignatureError; exports.UrlGroups = _chunkR5CZPV7Hjs.UrlGroups; exports.custom = _chunkR5CZPV7Hjs.custom; exports.decodeBase64 = _chunkR5CZPV7Hjs.decodeBase64; exports.formatWorkflowError = _chunkR5CZPV7Hjs.formatWorkflowError; exports.openai = _chunkR5CZPV7Hjs.openai; exports.setupAnalytics = _chunkR5CZPV7Hjs.setupAnalytics; exports.upstash = _chunkR5CZPV7Hjs.upstash;
|
package/index.mjs
CHANGED
package/nextjs.d.mts
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
import { NextApiHandler } from 'next';
|
|
2
2
|
import { NextRequest, NextFetchEvent, NextResponse } from 'next/server';
|
|
3
|
-
import { a2 as RouteFunction, a3 as WorkflowServeOptions } from './client-
|
|
3
|
+
import { a2 as RouteFunction, a3 as WorkflowServeOptions } from './client-DkrYCqaq.mjs';
|
|
4
4
|
import 'neverthrow';
|
|
5
5
|
|
|
6
6
|
type VerifySignatureConfig = {
|
package/nextjs.d.ts
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
import { NextApiHandler } from 'next';
|
|
2
2
|
import { NextRequest, NextFetchEvent, NextResponse } from 'next/server';
|
|
3
|
-
import { a2 as RouteFunction, a3 as WorkflowServeOptions } from './client-
|
|
3
|
+
import { a2 as RouteFunction, a3 as WorkflowServeOptions } from './client-DkrYCqaq.js';
|
|
4
4
|
import 'neverthrow';
|
|
5
5
|
|
|
6
6
|
type VerifySignatureConfig = {
|
package/nextjs.js
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
"use strict";Object.defineProperty(exports, "__esModule", {value: true}); function _nullishCoalesce(lhs, rhsFn) { if (lhs != null) { return lhs; } else { return rhsFn(); } } function _optionalChain(ops) { let lastAccessLHS = undefined; let value = ops[0]; let i = 1; while (i < ops.length) { const op = ops[i]; const fn = ops[i + 1]; i += 2; if ((op === 'optionalAccess' || op === 'optionalCall') && value == null) { return undefined; } if (op === 'access' || op === 'optionalAccess') { lastAccessLHS = value; value = fn(value); } else if (op === 'call' || op === 'optionalCall') { value = fn((...args) => value.call(lastAccessLHS, ...args)); lastAccessLHS = undefined; } } return value; }
|
|
2
2
|
|
|
3
3
|
|
|
4
|
-
var
|
|
4
|
+
var _chunkR5CZPV7Hjs = require('./chunk-R5CZPV7H.js');
|
|
5
5
|
|
|
6
6
|
// platforms/nextjs.ts
|
|
7
7
|
var _server = require('next/server');
|
|
@@ -19,7 +19,7 @@ function verifySignature(handler, config) {
|
|
|
19
19
|
"nextSigningKey is required, either in the config or as env variable QSTASH_NEXT_SIGNING_KEY"
|
|
20
20
|
);
|
|
21
21
|
}
|
|
22
|
-
const receiver = new (0,
|
|
22
|
+
const receiver = new (0, _chunkR5CZPV7Hjs.Receiver)({
|
|
23
23
|
currentSigningKey,
|
|
24
24
|
nextSigningKey
|
|
25
25
|
});
|
|
@@ -71,7 +71,7 @@ function verifySignatureEdge(handler, config) {
|
|
|
71
71
|
"nextSigningKey is required, either in the config or as env variable QSTASH_NEXT_SIGNING_KEY"
|
|
72
72
|
);
|
|
73
73
|
}
|
|
74
|
-
const receiver = new (0,
|
|
74
|
+
const receiver = new (0, _chunkR5CZPV7Hjs.Receiver)({
|
|
75
75
|
currentSigningKey,
|
|
76
76
|
nextSigningKey
|
|
77
77
|
});
|
|
@@ -111,7 +111,7 @@ function verifySignatureAppRouter(handler, config) {
|
|
|
111
111
|
"nextSigningKey is required, either in the config or as env variable QSTASH_NEXT_SIGNING_KEY"
|
|
112
112
|
);
|
|
113
113
|
}
|
|
114
|
-
const receiver = new (0,
|
|
114
|
+
const receiver = new (0, _chunkR5CZPV7Hjs.Receiver)({
|
|
115
115
|
currentSigningKey,
|
|
116
116
|
nextSigningKey
|
|
117
117
|
});
|
|
@@ -139,7 +139,7 @@ function verifySignatureAppRouter(handler, config) {
|
|
|
139
139
|
};
|
|
140
140
|
}
|
|
141
141
|
var serve2 = (routeFunction, options) => {
|
|
142
|
-
const handler =
|
|
142
|
+
const handler = _chunkR5CZPV7Hjs.serve.call(void 0, routeFunction, {
|
|
143
143
|
onStepFinish: (workflowRunId) => new (0, _server.NextResponse)(JSON.stringify({ workflowRunId }), { status: 200 }),
|
|
144
144
|
...options
|
|
145
145
|
});
|
|
@@ -148,7 +148,7 @@ var serve2 = (routeFunction, options) => {
|
|
|
148
148
|
};
|
|
149
149
|
};
|
|
150
150
|
var servePagesRouter = (routeFunction, options) => {
|
|
151
|
-
const handler =
|
|
151
|
+
const handler = _chunkR5CZPV7Hjs.serve.call(void 0, routeFunction, options);
|
|
152
152
|
return async (req, res) => {
|
|
153
153
|
if (_optionalChain([req, 'access', _10 => _10.method, 'optionalAccess', _11 => _11.toUpperCase, 'call', _12 => _12()]) !== "POST") {
|
|
154
154
|
res.status(405).json("Only POST requests are allowed in worklfows");
|
package/nextjs.mjs
CHANGED
package/nuxt.js
CHANGED
|
@@ -1,11 +1,11 @@
|
|
|
1
1
|
"use strict";Object.defineProperty(exports, "__esModule", {value: true});
|
|
2
2
|
|
|
3
|
-
var
|
|
3
|
+
var _chunkYBZBGHDQjs = require('./chunk-YBZBGHDQ.js');
|
|
4
4
|
require('./chunk-VN7YQ2UN.js');
|
|
5
|
-
require('./chunk-
|
|
5
|
+
require('./chunk-R5CZPV7H.js');
|
|
6
6
|
|
|
7
7
|
// platforms/nuxt.ts
|
|
8
|
-
var verifySignatureNuxt =
|
|
8
|
+
var verifySignatureNuxt = _chunkYBZBGHDQjs.verifySignatureH3;
|
|
9
9
|
|
|
10
10
|
|
|
11
11
|
exports.verifySignatureNuxt = verifySignatureNuxt;
|
package/nuxt.mjs
CHANGED
|
@@ -1,8 +1,8 @@
|
|
|
1
1
|
import {
|
|
2
2
|
verifySignatureH3
|
|
3
|
-
} from "./chunk-
|
|
3
|
+
} from "./chunk-UPFTIDSI.mjs";
|
|
4
4
|
import "./chunk-CIVGPRQN.mjs";
|
|
5
|
-
import "./chunk-
|
|
5
|
+
import "./chunk-QK55BUNQ.mjs";
|
|
6
6
|
|
|
7
7
|
// platforms/nuxt.ts
|
|
8
8
|
var verifySignatureNuxt = verifySignatureH3;
|
package/package.json
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":"v2.7.
|
|
1
|
+
{"version":"v2.7.9","name":"@upstash/qstash","description":"Official Typescript client for QStash","author":"Andreas Thomas <dev@chronark.com>","license":"MIT","homepage":"https://github.com/upstash/sdk-qstash-ts#readme","repository":{"type":"git","url":"git+https://github.com/upstash/sdk-qstash-ts.git"},"bugs":{"url":"https://github.com/upstash/sdk-qstash-ts/issues"},"main":"./index.js","module":"./index.mjs","types":"./index.d.ts","files":["./**"],"exports":{".":{"import":"./index.mjs","require":"./index.js"},"./nextjs":{"import":"./nextjs.js","require":"./nextjs.js"},"./dist/nextjs":{"import":"./nextjs.js","require":"./nextjs.js"},"./h3":{"types":"./h3.d.ts","import":"./h3.mjs","require":"./h3.js"},"./nuxt":{"types":"./nuxt.d.ts","import":"./nuxt.mjs","require":"./nuxt.js"},"./svelte":{"types":"./svelte.d.ts","import":"./svelte.mjs","require":"./svelte.js"},"./solidjs":{"types":"./solidjs.d.ts","import":"./solidjs.mjs","require":"./solidjs.js"},"./workflow":{"types":"./workflow.d.ts","import":"./workflow.mjs","require":"./workflow.js"},"./hono":{"types":"./hono.d.ts","import":"./hono.mjs","require":"./hono.js"},"./cloudflare":{"types":"./cloudflare.d.ts","import":"./cloudflare.mjs","require":"./cloudflare.js"}},"typesVersions":{"*":{"nextjs":["./nextjs.d.ts"]}},"keywords":["qstash","queue","events","serverless","upstash"],"scripts":{"build":"tsup && cp README.md ./dist/ && cp package.json ./dist/ && cp LICENSE ./dist/","test":"bun test src","fmt":"prettier --write .","lint":"tsc && eslint \"src/**/*.{js,ts,tsx}\" --quiet --fix"},"devDependencies":{"@commitlint/cli":"^19.2.2","@commitlint/config-conventional":"^19.2.2","@eslint/eslintrc":"^3.1.0","@eslint/js":"^9.10.0","@solidjs/start":"^1.0.6","@sveltejs/kit":"^2.5.18","@types/bun":"^1.1.1","@types/crypto-js":"^4.2.0","@typescript-eslint/eslint-plugin":"^8.4.0","@typescript-eslint/parser":"^8.4.0","ai":"^3.1.28","bun-types":"^1.1.7","eslint":"^9.10.0","eslint-plugin-unicorn":"^51.0.1","h3":"^1.12.0","hono":"^4.5.8","husky":"^9.0.10","neverthrow":"^7.0.1","next":"^14.0.2","prettier":"^3.2.5","tsup":"latest","typescript":"^5.4.5","undici-types":"^6.16.0","vitest":"latest"},"dependencies":{"crypto-js":">=4.2.0","jose":"^5.2.3"}}
|
package/solidjs.d.mts
CHANGED
|
@@ -1,5 +1,5 @@
|
|
|
1
1
|
import { APIHandler, APIEvent } from '@solidjs/start/server';
|
|
2
|
-
import { a2 as RouteFunction, a3 as WorkflowServeOptions } from './client-
|
|
2
|
+
import { a2 as RouteFunction, a3 as WorkflowServeOptions } from './client-DkrYCqaq.mjs';
|
|
3
3
|
import 'neverthrow';
|
|
4
4
|
|
|
5
5
|
type VerifySignatureConfig = {
|
package/solidjs.d.ts
CHANGED
|
@@ -1,5 +1,5 @@
|
|
|
1
1
|
import { APIHandler, APIEvent } from '@solidjs/start/server';
|
|
2
|
-
import { a2 as RouteFunction, a3 as WorkflowServeOptions } from './client-
|
|
2
|
+
import { a2 as RouteFunction, a3 as WorkflowServeOptions } from './client-DkrYCqaq.js';
|
|
3
3
|
import 'neverthrow';
|
|
4
4
|
|
|
5
5
|
type VerifySignatureConfig = {
|
package/solidjs.js
CHANGED
|
@@ -2,7 +2,7 @@
|
|
|
2
2
|
|
|
3
3
|
|
|
4
4
|
|
|
5
|
-
var
|
|
5
|
+
var _chunkR5CZPV7Hjs = require('./chunk-R5CZPV7H.js');
|
|
6
6
|
|
|
7
7
|
// platforms/solidjs.ts
|
|
8
8
|
var verifySignatureSolidjs = (handler, config) => {
|
|
@@ -14,7 +14,7 @@ var verifySignatureSolidjs = (handler, config) => {
|
|
|
14
14
|
if (!nextSigningKey) {
|
|
15
15
|
throw new Error("nextSigningKey is required, either in the config or from the env");
|
|
16
16
|
}
|
|
17
|
-
const receiver = new (0,
|
|
17
|
+
const receiver = new (0, _chunkR5CZPV7Hjs.Receiver)({
|
|
18
18
|
currentSigningKey,
|
|
19
19
|
nextSigningKey
|
|
20
20
|
});
|
|
@@ -45,7 +45,7 @@ var serve2 = (routeFunction, options) => {
|
|
|
45
45
|
if (method.toUpperCase() !== "POST") {
|
|
46
46
|
return new Response("Only POST requests are allowed in worklfows", { status: 405 });
|
|
47
47
|
}
|
|
48
|
-
const serveHandler =
|
|
48
|
+
const serveHandler = _chunkR5CZPV7Hjs.serve.call(void 0, routeFunction, options);
|
|
49
49
|
return await serveHandler(event.request);
|
|
50
50
|
};
|
|
51
51
|
return handler;
|
package/solidjs.mjs
CHANGED
package/svelte.d.mts
CHANGED
|
@@ -1,5 +1,5 @@
|
|
|
1
1
|
import { RequestHandler } from '@sveltejs/kit';
|
|
2
|
-
import { a2 as RouteFunction, a3 as WorkflowServeOptions } from './client-
|
|
2
|
+
import { a2 as RouteFunction, a3 as WorkflowServeOptions } from './client-DkrYCqaq.mjs';
|
|
3
3
|
import 'neverthrow';
|
|
4
4
|
|
|
5
5
|
type VerifySignatureConfig = {
|
package/svelte.d.ts
CHANGED
|
@@ -1,5 +1,5 @@
|
|
|
1
1
|
import { RequestHandler } from '@sveltejs/kit';
|
|
2
|
-
import { a2 as RouteFunction, a3 as WorkflowServeOptions } from './client-
|
|
2
|
+
import { a2 as RouteFunction, a3 as WorkflowServeOptions } from './client-DkrYCqaq.js';
|
|
3
3
|
import 'neverthrow';
|
|
4
4
|
|
|
5
5
|
type VerifySignatureConfig = {
|
package/svelte.js
CHANGED
|
@@ -2,7 +2,7 @@
|
|
|
2
2
|
|
|
3
3
|
|
|
4
4
|
|
|
5
|
-
var
|
|
5
|
+
var _chunkR5CZPV7Hjs = require('./chunk-R5CZPV7H.js');
|
|
6
6
|
|
|
7
7
|
// platforms/svelte.ts
|
|
8
8
|
var verifySignatureSvelte = (handler, config) => {
|
|
@@ -14,7 +14,7 @@ var verifySignatureSvelte = (handler, config) => {
|
|
|
14
14
|
if (!nextSigningKey) {
|
|
15
15
|
throw new Error("nextSigningKey is required, either in the config or from the env");
|
|
16
16
|
}
|
|
17
|
-
const receiver = new (0,
|
|
17
|
+
const receiver = new (0, _chunkR5CZPV7Hjs.Receiver)({
|
|
18
18
|
currentSigningKey,
|
|
19
19
|
nextSigningKey
|
|
20
20
|
});
|
|
@@ -42,7 +42,7 @@ var verifySignatureSvelte = (handler, config) => {
|
|
|
42
42
|
};
|
|
43
43
|
var serve2 = (routeFunction, options) => {
|
|
44
44
|
const handler = async ({ request }) => {
|
|
45
|
-
const serveMethod =
|
|
45
|
+
const serveMethod = _chunkR5CZPV7Hjs.serve.call(void 0, routeFunction, options);
|
|
46
46
|
return await serveMethod(request);
|
|
47
47
|
};
|
|
48
48
|
return handler;
|
package/svelte.mjs
CHANGED
package/workflow.d.mts
CHANGED
|
@@ -1,2 +1,2 @@
|
|
|
1
|
-
export { af as AsyncStepFunction, a8 as DisabledWorkflowContext, F as FailureFunctionPayload, ai as FinishCondition, ak as LogLevel, ah as ParallelCallState, ad as RawStep, aj as RequiredExceptFields, a2 as RouteFunction, S as Step, ag as StepFunction, ac as StepType, ab as StepTypes, ae as SyncStepFunction, a4 as Workflow, a9 as WorkflowClient, a7 as WorkflowContext, am as WorkflowLogger, al as WorkflowLoggerOptions, aa as WorkflowReceiver, a3 as WorkflowServeOptions, a5 as processOptions, a6 as serve } from './client-
|
|
1
|
+
export { af as AsyncStepFunction, a8 as DisabledWorkflowContext, F as FailureFunctionPayload, ai as FinishCondition, ak as LogLevel, ah as ParallelCallState, ad as RawStep, aj as RequiredExceptFields, a2 as RouteFunction, S as Step, ag as StepFunction, ac as StepType, ab as StepTypes, ae as SyncStepFunction, a4 as Workflow, a9 as WorkflowClient, a7 as WorkflowContext, am as WorkflowLogger, al as WorkflowLoggerOptions, aa as WorkflowReceiver, a3 as WorkflowServeOptions, a5 as processOptions, a6 as serve } from './client-DkrYCqaq.mjs';
|
|
2
2
|
import 'neverthrow';
|
package/workflow.d.ts
CHANGED
|
@@ -1,2 +1,2 @@
|
|
|
1
|
-
export { af as AsyncStepFunction, a8 as DisabledWorkflowContext, F as FailureFunctionPayload, ai as FinishCondition, ak as LogLevel, ah as ParallelCallState, ad as RawStep, aj as RequiredExceptFields, a2 as RouteFunction, S as Step, ag as StepFunction, ac as StepType, ab as StepTypes, ae as SyncStepFunction, a4 as Workflow, a9 as WorkflowClient, a7 as WorkflowContext, am as WorkflowLogger, al as WorkflowLoggerOptions, aa as WorkflowReceiver, a3 as WorkflowServeOptions, a5 as processOptions, a6 as serve } from './client-
|
|
1
|
+
export { af as AsyncStepFunction, a8 as DisabledWorkflowContext, F as FailureFunctionPayload, ai as FinishCondition, ak as LogLevel, ah as ParallelCallState, ad as RawStep, aj as RequiredExceptFields, a2 as RouteFunction, S as Step, ag as StepFunction, ac as StepType, ab as StepTypes, ae as SyncStepFunction, a4 as Workflow, a9 as WorkflowClient, a7 as WorkflowContext, am as WorkflowLogger, al as WorkflowLoggerOptions, aa as WorkflowReceiver, a3 as WorkflowServeOptions, a5 as processOptions, a6 as serve } from './client-DkrYCqaq.js';
|
|
2
2
|
import 'neverthrow';
|
package/workflow.js
CHANGED
|
@@ -6,7 +6,7 @@
|
|
|
6
6
|
|
|
7
7
|
|
|
8
8
|
|
|
9
|
-
var
|
|
9
|
+
var _chunkR5CZPV7Hjs = require('./chunk-R5CZPV7H.js');
|
|
10
10
|
|
|
11
11
|
|
|
12
12
|
|
|
@@ -15,4 +15,4 @@ var _chunkXY54CU56js = require('./chunk-XY54CU56.js');
|
|
|
15
15
|
|
|
16
16
|
|
|
17
17
|
|
|
18
|
-
exports.DisabledWorkflowContext =
|
|
18
|
+
exports.DisabledWorkflowContext = _chunkR5CZPV7Hjs.DisabledWorkflowContext; exports.StepTypes = _chunkR5CZPV7Hjs.StepTypes; exports.Workflow = _chunkR5CZPV7Hjs.Workflow; exports.WorkflowContext = _chunkR5CZPV7Hjs.WorkflowContext; exports.WorkflowLogger = _chunkR5CZPV7Hjs.WorkflowLogger; exports.processOptions = _chunkR5CZPV7Hjs.processOptions; exports.serve = _chunkR5CZPV7Hjs.serve;
|