@workflow/world-testing 4.1.0-beta.58 → 4.1.0-beta.60
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/.well-known/workflow/v1/flow.js +2065 -1056
- package/dist/.well-known/workflow/v1/flow.js.map +1 -1
- package/dist/.well-known/workflow/v1/manifest.json +262 -27
- package/dist/.well-known/workflow/v1/step.js +2043 -1069
- package/dist/.well-known/workflow/v1/step.js.map +1 -1
- package/dist/src/addition.d.mts.map +1 -1
- package/dist/src/addition.mjs +2 -2
- package/dist/src/addition.mjs.map +1 -1
- package/dist/src/errors.d.mts.map +1 -1
- package/dist/src/errors.mjs +2 -2
- package/dist/src/errors.mjs.map +1 -1
- package/dist/src/hooks.d.mts.map +1 -1
- package/dist/src/hooks.mjs +2 -2
- package/dist/src/hooks.mjs.map +1 -1
- package/dist/src/idempotency.d.mts.map +1 -1
- package/dist/src/idempotency.mjs +2 -2
- package/dist/src/idempotency.mjs.map +1 -1
- package/dist/src/null-byte.d.mts.map +1 -1
- package/dist/src/null-byte.mjs +2 -2
- package/dist/src/null-byte.mjs.map +1 -1
- package/package.json +5 -4
|
@@ -200,12 +200,12 @@ var require_headers = __commonJS({
|
|
|
200
200
|
var POSTAL_CODE_HEADER_NAME = "x-vercel-ip-postal-code";
|
|
201
201
|
var REQUEST_ID_HEADER_NAME = "x-vercel-id";
|
|
202
202
|
var EMOJI_FLAG_UNICODE_STARTING_POSITION = 127397;
|
|
203
|
-
function
|
|
203
|
+
function getHeader2(headers, key) {
|
|
204
204
|
return headers.get(key) ?? void 0;
|
|
205
205
|
}
|
|
206
|
-
__name(
|
|
206
|
+
__name(getHeader2, "getHeader");
|
|
207
207
|
function getHeaderWithDecode(request, key) {
|
|
208
|
-
const header =
|
|
208
|
+
const header = getHeader2(request.headers, key);
|
|
209
209
|
return header ? decodeURIComponent(header) : void 0;
|
|
210
210
|
}
|
|
211
211
|
__name(getHeaderWithDecode, "getHeaderWithDecode");
|
|
@@ -218,7 +218,7 @@ var require_headers = __commonJS({
|
|
|
218
218
|
__name(getFlag, "getFlag");
|
|
219
219
|
function ipAddress2(input) {
|
|
220
220
|
const headers = "headers" in input ? input.headers : input;
|
|
221
|
-
return
|
|
221
|
+
return getHeader2(headers, IP_HEADER_NAME);
|
|
222
222
|
}
|
|
223
223
|
__name(ipAddress2, "ipAddress");
|
|
224
224
|
function getRegionFromRequestId(requestId) {
|
|
@@ -232,13 +232,13 @@ var require_headers = __commonJS({
|
|
|
232
232
|
return {
|
|
233
233
|
// city name may be encoded to support multi-byte characters
|
|
234
234
|
city: getHeaderWithDecode(request, CITY_HEADER_NAME),
|
|
235
|
-
country:
|
|
236
|
-
flag: getFlag(
|
|
237
|
-
countryRegion:
|
|
238
|
-
region: getRegionFromRequestId(
|
|
239
|
-
latitude:
|
|
240
|
-
longitude:
|
|
241
|
-
postalCode:
|
|
235
|
+
country: getHeader2(request.headers, COUNTRY_HEADER_NAME),
|
|
236
|
+
flag: getFlag(getHeader2(request.headers, COUNTRY_HEADER_NAME)),
|
|
237
|
+
countryRegion: getHeader2(request.headers, REGION_HEADER_NAME),
|
|
238
|
+
region: getRegionFromRequestId(getHeader2(request.headers, REQUEST_ID_HEADER_NAME)),
|
|
239
|
+
latitude: getHeader2(request.headers, LATITUDE_HEADER_NAME),
|
|
240
|
+
longitude: getHeader2(request.headers, LONGITUDE_HEADER_NAME),
|
|
241
|
+
postalCode: getHeader2(request.headers, POSTAL_CODE_HEADER_NAME)
|
|
242
242
|
};
|
|
243
243
|
}
|
|
244
244
|
__name(geolocation2, "geolocation");
|
|
@@ -4182,7 +4182,7 @@ var require_token_util = __commonJS({
|
|
|
4182
4182
|
getTokenPayload: /* @__PURE__ */ __name(() => getTokenPayload, "getTokenPayload"),
|
|
4183
4183
|
getVercelCliToken: /* @__PURE__ */ __name(() => getVercelCliToken, "getVercelCliToken"),
|
|
4184
4184
|
getVercelDataDir: /* @__PURE__ */ __name(() => getVercelDataDir, "getVercelDataDir"),
|
|
4185
|
-
getVercelOidcToken: /* @__PURE__ */ __name(() =>
|
|
4185
|
+
getVercelOidcToken: /* @__PURE__ */ __name(() => getVercelOidcToken6, "getVercelOidcToken"),
|
|
4186
4186
|
isExpired: /* @__PURE__ */ __name(() => isExpired, "isExpired"),
|
|
4187
4187
|
loadToken: /* @__PURE__ */ __name(() => loadToken, "loadToken"),
|
|
4188
4188
|
saveToken: /* @__PURE__ */ __name(() => saveToken, "saveToken")
|
|
@@ -4217,7 +4217,7 @@ var require_token_util = __commonJS({
|
|
|
4217
4217
|
return JSON.parse(token).token;
|
|
4218
4218
|
}
|
|
4219
4219
|
__name(getVercelCliToken, "getVercelCliToken");
|
|
4220
|
-
async function
|
|
4220
|
+
async function getVercelOidcToken6(authToken, projectId, teamId) {
|
|
4221
4221
|
try {
|
|
4222
4222
|
const url2 = `https://api.vercel.com/v1/projects/${projectId}/token?source=vercel-oidc-refresh${teamId ? `&teamId=${teamId}` : ""}`;
|
|
4223
4223
|
const res = await fetch(url2, {
|
|
@@ -4237,7 +4237,7 @@ var require_token_util = __commonJS({
|
|
|
4237
4237
|
throw new import_token_error.VercelOidcTokenError(`Failed to refresh OIDC token`, e);
|
|
4238
4238
|
}
|
|
4239
4239
|
}
|
|
4240
|
-
__name(
|
|
4240
|
+
__name(getVercelOidcToken6, "getVercelOidcToken");
|
|
4241
4241
|
function assertVercelOidcTokenResponse(res) {
|
|
4242
4242
|
if (!res || typeof res !== "object") {
|
|
4243
4243
|
throw new TypeError("Expected an object");
|
|
@@ -4417,13 +4417,13 @@ var require_get_vercel_oidc_token = __commonJS({
|
|
|
4417
4417
|
}), mod), "__toCommonJS");
|
|
4418
4418
|
var get_vercel_oidc_token_exports = {};
|
|
4419
4419
|
__export2(get_vercel_oidc_token_exports, {
|
|
4420
|
-
getVercelOidcToken: /* @__PURE__ */ __name(() =>
|
|
4420
|
+
getVercelOidcToken: /* @__PURE__ */ __name(() => getVercelOidcToken6, "getVercelOidcToken"),
|
|
4421
4421
|
getVercelOidcTokenSync: /* @__PURE__ */ __name(() => getVercelOidcTokenSync2, "getVercelOidcTokenSync")
|
|
4422
4422
|
});
|
|
4423
4423
|
module2.exports = __toCommonJS2(get_vercel_oidc_token_exports);
|
|
4424
4424
|
var import_get_context = require_get_context2();
|
|
4425
4425
|
var import_token_error = require_token_error();
|
|
4426
|
-
async function
|
|
4426
|
+
async function getVercelOidcToken6() {
|
|
4427
4427
|
let token = "";
|
|
4428
4428
|
let err;
|
|
4429
4429
|
try {
|
|
@@ -4451,7 +4451,7 @@ ${error45.message}`;
|
|
|
4451
4451
|
}
|
|
4452
4452
|
return token;
|
|
4453
4453
|
}
|
|
4454
|
-
__name(
|
|
4454
|
+
__name(getVercelOidcToken6, "getVercelOidcToken");
|
|
4455
4455
|
function getVercelOidcTokenSync2() {
|
|
4456
4456
|
const token = (0, import_get_context.getContext)().headers?.["x-vercel-oidc-token"] ?? process.env.VERCEL_OIDC_TOKEN;
|
|
4457
4457
|
if (!token) {
|
|
@@ -12180,7 +12180,7 @@ var require_client = __commonJS({
|
|
|
12180
12180
|
return client[kPipelining] ?? client[kHTTPContext]?.defaultPipelining ?? 1;
|
|
12181
12181
|
}
|
|
12182
12182
|
__name(getPipelining, "getPipelining");
|
|
12183
|
-
var
|
|
12183
|
+
var Client = class extends DispatcherBase {
|
|
12184
12184
|
static {
|
|
12185
12185
|
__name(this, "Client");
|
|
12186
12186
|
}
|
|
@@ -12612,7 +12612,7 @@ var require_client = __commonJS({
|
|
|
12612
12612
|
}
|
|
12613
12613
|
}
|
|
12614
12614
|
__name(_resume, "_resume");
|
|
12615
|
-
module2.exports =
|
|
12615
|
+
module2.exports = Client;
|
|
12616
12616
|
}
|
|
12617
12617
|
});
|
|
12618
12618
|
// ../../node_modules/.pnpm/undici@6.22.0/node_modules/undici/lib/dispatcher/fixed-queue.js
|
|
@@ -12894,7 +12894,7 @@ var require_pool = __commonJS({
|
|
|
12894
12894
|
"../../node_modules/.pnpm/undici@6.22.0/node_modules/undici/lib/dispatcher/pool.js"(exports2, module2) {
|
|
12895
12895
|
"use strict";
|
|
12896
12896
|
var { PoolBase, kClients, kNeedDrain, kAddClient, kGetDispatcher } = require_pool_base();
|
|
12897
|
-
var
|
|
12897
|
+
var Client = require_client();
|
|
12898
12898
|
var { InvalidArgumentError } = require_errors();
|
|
12899
12899
|
var util = require_util();
|
|
12900
12900
|
var { kUrl, kInterceptors } = require_symbols();
|
|
@@ -12903,7 +12903,7 @@ var require_pool = __commonJS({
|
|
|
12903
12903
|
var kConnections = Symbol("connections");
|
|
12904
12904
|
var kFactory = Symbol("factory");
|
|
12905
12905
|
function defaultFactory(origin, opts) {
|
|
12906
|
-
return new
|
|
12906
|
+
return new Client(origin, opts);
|
|
12907
12907
|
}
|
|
12908
12908
|
__name(defaultFactory, "defaultFactory");
|
|
12909
12909
|
var Pool = class extends PoolBase {
|
|
@@ -13121,7 +13121,7 @@ var require_agent = __commonJS({
|
|
|
13121
13121
|
var { kClients, kRunning, kClose, kDestroy, kDispatch, kInterceptors } = require_symbols();
|
|
13122
13122
|
var DispatcherBase = require_dispatcher_base();
|
|
13123
13123
|
var Pool = require_pool();
|
|
13124
|
-
var
|
|
13124
|
+
var Client = require_client();
|
|
13125
13125
|
var util = require_util();
|
|
13126
13126
|
var createRedirectInterceptor = require_redirect_interceptor();
|
|
13127
13127
|
var kOnConnect = Symbol("onConnect");
|
|
@@ -13132,7 +13132,7 @@ var require_agent = __commonJS({
|
|
|
13132
13132
|
var kFactory = Symbol("factory");
|
|
13133
13133
|
var kOptions = Symbol("options");
|
|
13134
13134
|
function defaultFactory(origin, opts) {
|
|
13135
|
-
return opts && opts.connections === 1 ? new
|
|
13135
|
+
return opts && opts.connections === 1 ? new Client(origin, opts) : new Pool(origin, opts);
|
|
13136
13136
|
}
|
|
13137
13137
|
__name(defaultFactory, "defaultFactory");
|
|
13138
13138
|
var Agent2 = class extends DispatcherBase {
|
|
@@ -13248,7 +13248,7 @@ var require_proxy_agent = __commonJS({
|
|
|
13248
13248
|
var DispatcherBase = require_dispatcher_base();
|
|
13249
13249
|
var { InvalidArgumentError, RequestAbortedError, SecureProxyConnectionError } = require_errors();
|
|
13250
13250
|
var buildConnector = require_connect();
|
|
13251
|
-
var
|
|
13251
|
+
var Client = require_client();
|
|
13252
13252
|
var kAgent = Symbol("proxy agent");
|
|
13253
13253
|
var kClient = Symbol("proxy client");
|
|
13254
13254
|
var kProxyHeaders = Symbol("proxy headers");
|
|
@@ -13268,7 +13268,7 @@ var require_proxy_agent = __commonJS({
|
|
|
13268
13268
|
}, "noop");
|
|
13269
13269
|
function defaultAgentFactory(origin, opts) {
|
|
13270
13270
|
if (opts.connections === 1) {
|
|
13271
|
-
return new
|
|
13271
|
+
return new Client(origin, opts);
|
|
13272
13272
|
}
|
|
13273
13273
|
return new Pool(origin, opts);
|
|
13274
13274
|
}
|
|
@@ -13290,7 +13290,7 @@ var require_proxy_agent = __commonJS({
|
|
|
13290
13290
|
});
|
|
13291
13291
|
}
|
|
13292
13292
|
else {
|
|
13293
|
-
this.#client = new
|
|
13293
|
+
this.#client = new Client(proxyUrl, {
|
|
13294
13294
|
connect
|
|
13295
13295
|
});
|
|
13296
13296
|
}
|
|
@@ -15810,13 +15810,13 @@ var require_mock_client = __commonJS({
|
|
|
15810
15810
|
"../../node_modules/.pnpm/undici@6.22.0/node_modules/undici/lib/mock/mock-client.js"(exports2, module2) {
|
|
15811
15811
|
"use strict";
|
|
15812
15812
|
var { promisify: promisify2 } = require("node:util");
|
|
15813
|
-
var
|
|
15813
|
+
var Client = require_client();
|
|
15814
15814
|
var { buildMockDispatch } = require_mock_utils();
|
|
15815
15815
|
var { kDispatches, kMockAgent, kClose, kOriginalClose, kOrigin, kOriginalDispatch, kConnected } = require_mock_symbols();
|
|
15816
15816
|
var { MockInterceptor } = require_mock_interceptor();
|
|
15817
15817
|
var Symbols = require_symbols();
|
|
15818
15818
|
var { InvalidArgumentError } = require_errors();
|
|
15819
|
-
var MockClient = class extends
|
|
15819
|
+
var MockClient = class extends Client {
|
|
15820
15820
|
static {
|
|
15821
15821
|
__name(this, "MockClient");
|
|
15822
15822
|
}
|
|
@@ -23618,7 +23618,7 @@ var require_eventsource = __commonJS({
|
|
|
23618
23618
|
var require_undici = __commonJS({
|
|
23619
23619
|
"../../node_modules/.pnpm/undici@6.22.0/node_modules/undici/index.js"(exports2, module2) {
|
|
23620
23620
|
"use strict";
|
|
23621
|
-
var
|
|
23621
|
+
var Client = require_client();
|
|
23622
23622
|
var Dispatcher = require_dispatcher();
|
|
23623
23623
|
var Pool = require_pool();
|
|
23624
23624
|
var BalancedPool = require_balanced_pool();
|
|
@@ -23642,7 +23642,7 @@ var require_undici = __commonJS({
|
|
|
23642
23642
|
var createRedirectInterceptor = require_redirect_interceptor();
|
|
23643
23643
|
Object.assign(Dispatcher.prototype, api);
|
|
23644
23644
|
module2.exports.Dispatcher = Dispatcher;
|
|
23645
|
-
module2.exports.Client =
|
|
23645
|
+
module2.exports.Client = Client;
|
|
23646
23646
|
module2.exports.Pool = Pool;
|
|
23647
23647
|
module2.exports.BalancedPool = BalancedPool;
|
|
23648
23648
|
module2.exports.Agent = Agent2;
|
|
@@ -25191,7 +25191,7 @@ async function fetch2(...args) {
|
|
|
25191
25191
|
return globalThis.fetch(...args);
|
|
25192
25192
|
}
|
|
25193
25193
|
__name(fetch2, "fetch");
|
|
25194
|
-
registerStepFunction("step//workflow@4.1.0-beta.
|
|
25194
|
+
registerStepFunction("step//workflow@4.1.0-beta.59//fetch", fetch2);
|
|
25195
25195
|
// workflows/addition.ts
|
|
25196
25196
|
async function add(num, num2) {
|
|
25197
25197
|
return num + num2;
|
|
@@ -39585,6 +39585,33 @@ var StepSchema = external_exports.object({
|
|
|
39585
39585
|
// Optional in database for backwards compatibility, defaults to 1 (legacy) when reading
|
|
39586
39586
|
specVersion: external_exports.number().optional()
|
|
39587
39587
|
});
|
|
39588
|
+
// ../world/dist/waits.js
|
|
39589
|
+
var WaitStatusSchema = external_exports.enum([
|
|
39590
|
+
"waiting",
|
|
39591
|
+
"completed"
|
|
39592
|
+
]);
|
|
39593
|
+
var WaitSchema = external_exports.object({
|
|
39594
|
+
waitId: external_exports.string(),
|
|
39595
|
+
runId: external_exports.string(),
|
|
39596
|
+
status: WaitStatusSchema,
|
|
39597
|
+
resumeAt: external_exports.coerce.date().optional(),
|
|
39598
|
+
completedAt: external_exports.coerce.date().optional(),
|
|
39599
|
+
createdAt: external_exports.coerce.date(),
|
|
39600
|
+
updatedAt: external_exports.coerce.date(),
|
|
39601
|
+
specVersion: external_exports.number().optional()
|
|
39602
|
+
});
|
|
39603
|
+
// ../core/dist/encryption.js
|
|
39604
|
+
var KEY_LENGTH = 32;
|
|
39605
|
+
async function importKey(raw) {
|
|
39606
|
+
if (raw.byteLength !== KEY_LENGTH) {
|
|
39607
|
+
throw new Error(`Encryption key must be exactly ${KEY_LENGTH} bytes, got ${raw.byteLength}`);
|
|
39608
|
+
}
|
|
39609
|
+
return globalThis.crypto.subtle.importKey("raw", raw, "AES-GCM", false, [
|
|
39610
|
+
"encrypt",
|
|
39611
|
+
"decrypt"
|
|
39612
|
+
]);
|
|
39613
|
+
}
|
|
39614
|
+
__name(importKey, "importKey");
|
|
39588
39615
|
// ../serde/dist/index.js
|
|
39589
39616
|
var WORKFLOW_SERIALIZE = Symbol.for("workflow-serialize");
|
|
39590
39617
|
var WORKFLOW_DESERIALIZE = Symbol.for("workflow-deserialize");
|
|
@@ -41675,9 +41702,7 @@ var StreamingMultipartParser = class {
|
|
|
41675
41702
|
}
|
|
41676
41703
|
}
|
|
41677
41704
|
};
|
|
41678
|
-
// ../../node_modules/.pnpm/@vercel+queue@0.0.0-alpha.
|
|
41679
|
-
var fs = __toESM(require("fs"), 1);
|
|
41680
|
-
var path2 = __toESM(require("path"), 1);
|
|
41705
|
+
// ../../node_modules/.pnpm/@vercel+queue@0.0.0-alpha.38/node_modules/@vercel/queue/dist/index.mjs
|
|
41681
41706
|
var import_oidc = __toESM(require_dist(), 1);
|
|
41682
41707
|
async function streamToBuffer(stream) {
|
|
41683
41708
|
let totalLength = 0;
|
|
@@ -41750,15 +41775,6 @@ var MessageCorruptedError = class extends Error {
|
|
|
41750
41775
|
this.name = "MessageCorruptedError";
|
|
41751
41776
|
}
|
|
41752
41777
|
};
|
|
41753
|
-
var QueueEmptyError = class extends Error {
|
|
41754
|
-
static {
|
|
41755
|
-
__name(this, "QueueEmptyError");
|
|
41756
|
-
}
|
|
41757
|
-
constructor(queueName, consumerGroup) {
|
|
41758
|
-
super(`No messages available in queue "${queueName}" for consumer group "${consumerGroup}"`);
|
|
41759
|
-
this.name = "QueueEmptyError";
|
|
41760
|
-
}
|
|
41761
|
-
};
|
|
41762
41778
|
var UnauthorizedError = class extends Error {
|
|
41763
41779
|
static {
|
|
41764
41780
|
__name(this, "UnauthorizedError");
|
|
@@ -41813,21 +41829,6 @@ var MessageAlreadyProcessedError = class extends Error {
|
|
|
41813
41829
|
this.name = "MessageAlreadyProcessedError";
|
|
41814
41830
|
}
|
|
41815
41831
|
};
|
|
41816
|
-
var ConcurrencyLimitError = class extends Error {
|
|
41817
|
-
static {
|
|
41818
|
-
__name(this, "ConcurrencyLimitError");
|
|
41819
|
-
}
|
|
41820
|
-
/** Current number of in-flight messages for this consumer group. */
|
|
41821
|
-
currentInflight;
|
|
41822
|
-
/** Maximum allowed concurrent messages (as configured). */
|
|
41823
|
-
maxConcurrency;
|
|
41824
|
-
constructor(message = "Concurrency limit exceeded", currentInflight, maxConcurrency) {
|
|
41825
|
-
super(message);
|
|
41826
|
-
this.name = "ConcurrencyLimitError";
|
|
41827
|
-
this.currentInflight = currentInflight;
|
|
41828
|
-
this.maxConcurrency = maxConcurrency;
|
|
41829
|
-
}
|
|
41830
|
-
};
|
|
41831
41832
|
var DuplicateMessageError = class extends Error {
|
|
41832
41833
|
static {
|
|
41833
41834
|
__name(this, "DuplicateMessageError");
|
|
@@ -41860,185 +41861,10 @@ var ConsumerRegistryNotConfiguredError = class extends Error {
|
|
|
41860
41861
|
}
|
|
41861
41862
|
};
|
|
41862
41863
|
var ROUTE_MAPPINGS_KEY = Symbol.for("@vercel/queue.devRouteMappings");
|
|
41863
|
-
function filePathToUrlPath(filePath) {
|
|
41864
|
-
let urlPath = filePath.replace(/^app\//, "/").replace(/^pages\//, "/").replace(/\/route\.(ts|js|tsx|jsx)$/, "").replace(/\.(ts|js|tsx|jsx)$/, "");
|
|
41865
|
-
if (!urlPath.startsWith("/")) {
|
|
41866
|
-
urlPath = "/" + urlPath;
|
|
41867
|
-
}
|
|
41868
|
-
return urlPath;
|
|
41869
|
-
}
|
|
41870
|
-
__name(filePathToUrlPath, "filePathToUrlPath");
|
|
41871
|
-
function getDevRouteMappings() {
|
|
41872
|
-
const g = globalThis;
|
|
41873
|
-
if (ROUTE_MAPPINGS_KEY in g) {
|
|
41874
|
-
return g[ROUTE_MAPPINGS_KEY] ?? null;
|
|
41875
|
-
}
|
|
41876
|
-
try {
|
|
41877
|
-
const vercelJsonPath = path2.join(process.cwd(), "vercel.json");
|
|
41878
|
-
if (!fs.existsSync(vercelJsonPath)) {
|
|
41879
|
-
g[ROUTE_MAPPINGS_KEY] = null;
|
|
41880
|
-
return null;
|
|
41881
|
-
}
|
|
41882
|
-
const vercelJson = JSON.parse(fs.readFileSync(vercelJsonPath, "utf-8"));
|
|
41883
|
-
if (!vercelJson.functions) {
|
|
41884
|
-
g[ROUTE_MAPPINGS_KEY] = null;
|
|
41885
|
-
return null;
|
|
41886
|
-
}
|
|
41887
|
-
const mappings = [];
|
|
41888
|
-
for (const [filePath, config3] of Object.entries(vercelJson.functions)) {
|
|
41889
|
-
if (!config3.experimentalTriggers)
|
|
41890
|
-
continue;
|
|
41891
|
-
for (const trigger of config3.experimentalTriggers) {
|
|
41892
|
-
if (trigger.type?.startsWith("queue/") && trigger.topic && trigger.consumer) {
|
|
41893
|
-
mappings.push({
|
|
41894
|
-
urlPath: filePathToUrlPath(filePath),
|
|
41895
|
-
topic: trigger.topic,
|
|
41896
|
-
consumer: trigger.consumer
|
|
41897
|
-
});
|
|
41898
|
-
}
|
|
41899
|
-
}
|
|
41900
|
-
}
|
|
41901
|
-
g[ROUTE_MAPPINGS_KEY] = mappings.length > 0 ? mappings : null;
|
|
41902
|
-
return g[ROUTE_MAPPINGS_KEY];
|
|
41903
|
-
}
|
|
41904
|
-
catch (error45) {
|
|
41905
|
-
console.warn("[Dev Mode] Failed to read vercel.json:", error45);
|
|
41906
|
-
g[ROUTE_MAPPINGS_KEY] = null;
|
|
41907
|
-
return null;
|
|
41908
|
-
}
|
|
41909
|
-
}
|
|
41910
|
-
__name(getDevRouteMappings, "getDevRouteMappings");
|
|
41911
|
-
function findMatchingRoutes(topicName) {
|
|
41912
|
-
const mappings = getDevRouteMappings();
|
|
41913
|
-
if (!mappings) {
|
|
41914
|
-
return [];
|
|
41915
|
-
}
|
|
41916
|
-
return mappings.filter((mapping) => {
|
|
41917
|
-
if (mapping.topic.includes("*")) {
|
|
41918
|
-
return matchesWildcardPattern(topicName, mapping.topic);
|
|
41919
|
-
}
|
|
41920
|
-
return mapping.topic === topicName;
|
|
41921
|
-
});
|
|
41922
|
-
}
|
|
41923
|
-
__name(findMatchingRoutes, "findMatchingRoutes");
|
|
41924
41864
|
function isDevMode() {
|
|
41925
41865
|
return process.env.NODE_ENV === "development";
|
|
41926
41866
|
}
|
|
41927
41867
|
__name(isDevMode, "isDevMode");
|
|
41928
|
-
var DEV_VISIBILITY_POLL_INTERVAL = 50;
|
|
41929
|
-
var DEV_VISIBILITY_MAX_WAIT = 5e3;
|
|
41930
|
-
var DEV_VISIBILITY_BACKOFF_MULTIPLIER = 2;
|
|
41931
|
-
async function waitForMessageVisibility(topicName, consumerGroup, messageId) {
|
|
41932
|
-
const client = new QueueClient();
|
|
41933
|
-
const transport = new JsonTransport();
|
|
41934
|
-
let elapsed = 0;
|
|
41935
|
-
let interval = DEV_VISIBILITY_POLL_INTERVAL;
|
|
41936
|
-
while (elapsed < DEV_VISIBILITY_MAX_WAIT) {
|
|
41937
|
-
try {
|
|
41938
|
-
await client.receiveMessageById({
|
|
41939
|
-
queueName: topicName,
|
|
41940
|
-
consumerGroup,
|
|
41941
|
-
messageId,
|
|
41942
|
-
visibilityTimeoutSeconds: 0
|
|
41943
|
-
}, transport);
|
|
41944
|
-
return true;
|
|
41945
|
-
}
|
|
41946
|
-
catch (error45) {
|
|
41947
|
-
if (error45 instanceof MessageNotFoundError) {
|
|
41948
|
-
await new Promise((resolve2) => setTimeout(resolve2, interval));
|
|
41949
|
-
elapsed += interval;
|
|
41950
|
-
interval = Math.min(interval * DEV_VISIBILITY_BACKOFF_MULTIPLIER, DEV_VISIBILITY_MAX_WAIT - elapsed);
|
|
41951
|
-
continue;
|
|
41952
|
-
}
|
|
41953
|
-
if (error45 instanceof MessageAlreadyProcessedError) {
|
|
41954
|
-
console.log(`[Dev Mode] Message already processed: topic="${topicName}" messageId="${messageId}"`);
|
|
41955
|
-
return false;
|
|
41956
|
-
}
|
|
41957
|
-
console.error(`[Dev Mode] Error polling for message visibility: topic="${topicName}" messageId="${messageId}"`, error45);
|
|
41958
|
-
return false;
|
|
41959
|
-
}
|
|
41960
|
-
}
|
|
41961
|
-
console.warn(`[Dev Mode] Message visibility timeout after ${DEV_VISIBILITY_MAX_WAIT}ms: topic="${topicName}" messageId="${messageId}"`);
|
|
41962
|
-
return false;
|
|
41963
|
-
}
|
|
41964
|
-
__name(waitForMessageVisibility, "waitForMessageVisibility");
|
|
41965
|
-
function triggerDevCallbacks(topicName, messageId, delaySeconds) {
|
|
41966
|
-
if (delaySeconds && delaySeconds > 0) {
|
|
41967
|
-
console.log(`[Dev Mode] Message sent with delay: topic="${topicName}" messageId="${messageId}" delay=${delaySeconds}s`);
|
|
41968
|
-
setTimeout(() => {
|
|
41969
|
-
triggerDevCallbacks(topicName, messageId);
|
|
41970
|
-
}, delaySeconds * 1e3);
|
|
41971
|
-
return;
|
|
41972
|
-
}
|
|
41973
|
-
console.log(`[Dev Mode] Message sent: topic="${topicName}" messageId="${messageId}"`);
|
|
41974
|
-
const matchingRoutes = findMatchingRoutes(topicName);
|
|
41975
|
-
if (matchingRoutes.length === 0) {
|
|
41976
|
-
console.log(`[Dev Mode] No matching routes in vercel.json for topic "${topicName}"`);
|
|
41977
|
-
return;
|
|
41978
|
-
}
|
|
41979
|
-
const consumerGroups = matchingRoutes.map((r) => r.consumer);
|
|
41980
|
-
console.log(`[Dev Mode] Scheduling callbacks for topic="${topicName}" messageId="${messageId}" \u2192 consumers: [${consumerGroups.join(", ")}]`);
|
|
41981
|
-
(async () => {
|
|
41982
|
-
const firstRoute = matchingRoutes[0];
|
|
41983
|
-
const isVisible = await waitForMessageVisibility(topicName, firstRoute.consumer, messageId);
|
|
41984
|
-
if (!isVisible) {
|
|
41985
|
-
console.warn(`[Dev Mode] Skipping callbacks - message not visible: topic="${topicName}" messageId="${messageId}"`);
|
|
41986
|
-
return;
|
|
41987
|
-
}
|
|
41988
|
-
const port = process.env.PORT || 3e3;
|
|
41989
|
-
const baseUrl = `http://localhost:${port}`;
|
|
41990
|
-
for (const route of matchingRoutes) {
|
|
41991
|
-
const url2 = `${baseUrl}${route.urlPath}`;
|
|
41992
|
-
console.log(`[Dev Mode] Invoking handler: topic="${topicName}" consumer="${route.consumer}" messageId="${messageId}" url="${url2}"`);
|
|
41993
|
-
const cloudEvent = {
|
|
41994
|
-
type: "com.vercel.queue.v1beta",
|
|
41995
|
-
source: `/topic/${topicName}/consumer/${route.consumer}`,
|
|
41996
|
-
id: messageId,
|
|
41997
|
-
datacontenttype: "application/json",
|
|
41998
|
-
data: {
|
|
41999
|
-
messageId,
|
|
42000
|
-
queueName: topicName,
|
|
42001
|
-
consumerGroup: route.consumer
|
|
42002
|
-
},
|
|
42003
|
-
time: /* @__PURE__ */ ( /* @__PURE__ */new Date()).toISOString(),
|
|
42004
|
-
specversion: "1.0"
|
|
42005
|
-
};
|
|
42006
|
-
try {
|
|
42007
|
-
const response = await fetch(url2, {
|
|
42008
|
-
method: "POST",
|
|
42009
|
-
headers: {
|
|
42010
|
-
"Content-Type": "application/cloudevents+json"
|
|
42011
|
-
},
|
|
42012
|
-
body: JSON.stringify(cloudEvent)
|
|
42013
|
-
});
|
|
42014
|
-
if (response.ok) {
|
|
42015
|
-
try {
|
|
42016
|
-
const responseData = await response.json();
|
|
42017
|
-
if (responseData.status === "success") {
|
|
42018
|
-
console.log(`[Dev Mode] \u2713 Message processed successfully: topic="${topicName}" consumer="${route.consumer}" messageId="${messageId}"`);
|
|
42019
|
-
}
|
|
42020
|
-
}
|
|
42021
|
-
catch {
|
|
42022
|
-
console.warn(`[Dev Mode] Handler returned OK but response was not JSON: topic="${topicName}" consumer="${route.consumer}"`);
|
|
42023
|
-
}
|
|
42024
|
-
}
|
|
42025
|
-
else {
|
|
42026
|
-
try {
|
|
42027
|
-
const errorData = await response.json();
|
|
42028
|
-
console.error(`[Dev Mode] \u2717 Handler failed: topic="${topicName}" consumer="${route.consumer}" messageId="${messageId}" error="${errorData.error || response.statusText}"`);
|
|
42029
|
-
}
|
|
42030
|
-
catch {
|
|
42031
|
-
console.error(`[Dev Mode] \u2717 Handler failed: topic="${topicName}" consumer="${route.consumer}" messageId="${messageId}" status=${response.status}`);
|
|
42032
|
-
}
|
|
42033
|
-
}
|
|
42034
|
-
}
|
|
42035
|
-
catch (error45) {
|
|
42036
|
-
console.error(`[Dev Mode] \u2717 HTTP request failed: topic="${topicName}" consumer="${route.consumer}" messageId="${messageId}" url="${url2}"`, error45);
|
|
42037
|
-
}
|
|
42038
|
-
}
|
|
42039
|
-
})();
|
|
42040
|
-
}
|
|
42041
|
-
__name(triggerDevCallbacks, "triggerDevCallbacks");
|
|
42042
41868
|
function clearDevRouteMappings() {
|
|
42043
41869
|
const g = globalThis;
|
|
42044
41870
|
delete g[ROUTE_MAPPINGS_KEY];
|
|
@@ -42113,6 +41939,7 @@ var QueueClient = class {
|
|
|
42113
41939
|
providedToken;
|
|
42114
41940
|
defaultDeploymentId;
|
|
42115
41941
|
pinToDeployment;
|
|
41942
|
+
transport;
|
|
42116
41943
|
constructor(options = {}) {
|
|
42117
41944
|
this.baseUrl = options.baseUrl || process.env.VERCEL_QUEUE_BASE_URL || "https://vercel-queue.com";
|
|
42118
41945
|
this.basePath = options.basePath || process.env.VERCEL_QUEUE_BASE_PATH || "/api/v3/topic";
|
|
@@ -42120,6 +41947,10 @@ var QueueClient = class {
|
|
|
42120
41947
|
this.providedToken = options.token;
|
|
42121
41948
|
this.defaultDeploymentId = options.deploymentId || process.env.VERCEL_DEPLOYMENT_ID;
|
|
42122
41949
|
this.pinToDeployment = options.pinToDeployment ?? true;
|
|
41950
|
+
this.transport = options.transport || new JsonTransport();
|
|
41951
|
+
}
|
|
41952
|
+
getTransport() {
|
|
41953
|
+
return this.transport;
|
|
42123
41954
|
}
|
|
42124
41955
|
getSendDeploymentId() {
|
|
42125
41956
|
if (isDevMode()) {
|
|
@@ -42177,6 +42008,8 @@ var QueueClient = class {
|
|
|
42177
42008
|
}
|
|
42178
42009
|
console.debug("[VQS Debug] Request:", JSON.stringify(logData, null, 2));
|
|
42179
42010
|
}
|
|
42011
|
+
init.headers.set("User-Agent", `@vercel/queue/${"0.0.0-alpha.38"}`);
|
|
42012
|
+
init.headers.set("Vqs-Client-Ts", /* @__PURE__ */ ( /* @__PURE__ */new Date()).toISOString());
|
|
42180
42013
|
const response = await fetch(url2, init);
|
|
42181
42014
|
if (isDebugEnabled()) {
|
|
42182
42015
|
const logData = {
|
|
@@ -42199,7 +42032,6 @@ var QueueClient = class {
|
|
|
42199
42032
|
* @param options.idempotencyKey - Optional deduplication key (dedup window: min(retention, 24h))
|
|
42200
42033
|
* @param options.retentionSeconds - Message TTL (default: 86400, min: 60, max: 86400)
|
|
42201
42034
|
* @param options.delaySeconds - Delivery delay (default: 0, max: retentionSeconds)
|
|
42202
|
-
* @param transport - Serializer for the payload
|
|
42203
42035
|
* @returns Promise with the generated messageId
|
|
42204
42036
|
* @throws {DuplicateMessageError} When idempotency key was already used
|
|
42205
42037
|
* @throws {ConsumerDiscoveryError} When consumer discovery fails
|
|
@@ -42209,7 +42041,8 @@ var QueueClient = class {
|
|
|
42209
42041
|
* @throws {ForbiddenError} When access is denied
|
|
42210
42042
|
* @throws {InternalServerError} When server encounters an error
|
|
42211
42043
|
*/
|
|
42212
|
-
async sendMessage(options
|
|
42044
|
+
async sendMessage(options) {
|
|
42045
|
+
const transport = this.transport;
|
|
42213
42046
|
const { queueName, payload, idempotencyKey, retentionSeconds, delaySeconds, headers: optionHeaders } = options;
|
|
42214
42047
|
const headers = new Headers();
|
|
42215
42048
|
if (this.customHeaders) {
|
|
@@ -42275,24 +42108,25 @@ var QueueClient = class {
|
|
|
42275
42108
|
/**
|
|
42276
42109
|
* Receive messages from a topic as an async generator.
|
|
42277
42110
|
*
|
|
42111
|
+
* When the queue is empty, the generator completes without yielding any
|
|
42112
|
+
* messages. Callers should handle the case where no messages are yielded.
|
|
42113
|
+
*
|
|
42278
42114
|
* @param options - Receive options
|
|
42279
42115
|
* @param options.queueName - Topic name (pattern: `[A-Za-z0-9_-]+`)
|
|
42280
42116
|
* @param options.consumerGroup - Consumer group name (pattern: `[A-Za-z0-9_-]+`)
|
|
42281
42117
|
* @param options.visibilityTimeoutSeconds - Lock duration (default: 30, min: 0, max: 3600)
|
|
42282
42118
|
* @param options.limit - Max messages to retrieve (default: 1, min: 1, max: 10)
|
|
42283
|
-
* @param options.maxConcurrency - Max in-flight messages (default: unlimited, min: 1)
|
|
42284
|
-
* @param transport - Deserializer for message payloads
|
|
42285
42119
|
* @yields Message objects with payload, messageId, receiptHandle, etc.
|
|
42286
|
-
*
|
|
42120
|
+
* Yields nothing if queue is empty.
|
|
42287
42121
|
* @throws {InvalidLimitError} When limit is outside 1-10 range
|
|
42288
|
-
* @throws {ConcurrencyLimitError} When maxConcurrency exceeded
|
|
42289
42122
|
* @throws {BadRequestError} When parameters are invalid
|
|
42290
42123
|
* @throws {UnauthorizedError} When authentication fails
|
|
42291
42124
|
* @throws {ForbiddenError} When access is denied
|
|
42292
42125
|
* @throws {InternalServerError} When server encounters an error
|
|
42293
42126
|
*/
|
|
42294
|
-
async *receiveMessages(options
|
|
42295
|
-
const
|
|
42127
|
+
async *receiveMessages(options) {
|
|
42128
|
+
const transport = this.transport;
|
|
42129
|
+
const { queueName, consumerGroup, visibilityTimeoutSeconds, limit } = options;
|
|
42296
42130
|
if (limit !== void 0 && (limit < 1 || limit > 10)) {
|
|
42297
42131
|
throw new InvalidLimitError(limit);
|
|
42298
42132
|
}
|
|
@@ -42307,9 +42141,6 @@ var QueueClient = class {
|
|
|
42307
42141
|
if (limit !== void 0) {
|
|
42308
42142
|
headers.set("Vqs-Max-Messages", limit.toString());
|
|
42309
42143
|
}
|
|
42310
|
-
if (maxConcurrency !== void 0) {
|
|
42311
|
-
headers.set("Vqs-Max-Concurrency", maxConcurrency.toString());
|
|
42312
|
-
}
|
|
42313
42144
|
const effectiveDeploymentId = this.getConsumeDeploymentId();
|
|
42314
42145
|
if (effectiveDeploymentId) {
|
|
42315
42146
|
headers.set("Vqs-Deployment-Id", effectiveDeploymentId);
|
|
@@ -42319,19 +42150,10 @@ var QueueClient = class {
|
|
|
42319
42150
|
headers
|
|
42320
42151
|
});
|
|
42321
42152
|
if (response.status === 204) {
|
|
42322
|
-
|
|
42153
|
+
return;
|
|
42323
42154
|
}
|
|
42324
42155
|
if (!response.ok) {
|
|
42325
42156
|
const errorText = await response.text();
|
|
42326
|
-
if (response.status === 429) {
|
|
42327
|
-
let errorData = {};
|
|
42328
|
-
try {
|
|
42329
|
-
errorData = JSON.parse(errorText);
|
|
42330
|
-
}
|
|
42331
|
-
catch {
|
|
42332
|
-
}
|
|
42333
|
-
throw new ConcurrencyLimitError(errorData.error || "Concurrency limit exceeded or throttled", errorData.currentInflight, errorData.maxConcurrency);
|
|
42334
|
-
}
|
|
42335
42157
|
throwCommonHttpError(response.status, response.statusText, errorText, "receive messages");
|
|
42336
42158
|
}
|
|
42337
42159
|
for await (const multipartMessage of parseMultipartStream(response)) {
|
|
@@ -42363,20 +42185,18 @@ var QueueClient = class {
|
|
|
42363
42185
|
* @param options.consumerGroup - Consumer group name (pattern: `[A-Za-z0-9_-]+`)
|
|
42364
42186
|
* @param options.messageId - Message ID to retrieve
|
|
42365
42187
|
* @param options.visibilityTimeoutSeconds - Lock duration (default: 30, min: 0, max: 3600)
|
|
42366
|
-
* @param options.maxConcurrency - Max in-flight messages (default: unlimited, min: 1)
|
|
42367
|
-
* @param transport - Deserializer for the message payload
|
|
42368
42188
|
* @returns Promise with the message
|
|
42369
42189
|
* @throws {MessageNotFoundError} When message doesn't exist
|
|
42370
42190
|
* @throws {MessageNotAvailableError} When message is in wrong state or was a duplicate
|
|
42371
42191
|
* @throws {MessageAlreadyProcessedError} When message was already processed
|
|
42372
|
-
* @throws {ConcurrencyLimitError} When maxConcurrency exceeded
|
|
42373
42192
|
* @throws {BadRequestError} When parameters are invalid
|
|
42374
42193
|
* @throws {UnauthorizedError} When authentication fails
|
|
42375
42194
|
* @throws {ForbiddenError} When access is denied
|
|
42376
42195
|
* @throws {InternalServerError} When server encounters an error
|
|
42377
42196
|
*/
|
|
42378
|
-
async receiveMessageById(options
|
|
42379
|
-
const
|
|
42197
|
+
async receiveMessageById(options) {
|
|
42198
|
+
const transport = this.transport;
|
|
42199
|
+
const { queueName, consumerGroup, messageId, visibilityTimeoutSeconds } = options;
|
|
42380
42200
|
const headers = new Headers({
|
|
42381
42201
|
Authorization: `Bearer ${await this.getToken()}`,
|
|
42382
42202
|
Accept: "multipart/mixed",
|
|
@@ -42385,9 +42205,6 @@ var QueueClient = class {
|
|
|
42385
42205
|
if (visibilityTimeoutSeconds !== void 0) {
|
|
42386
42206
|
headers.set("Vqs-Visibility-Timeout-Seconds", visibilityTimeoutSeconds.toString());
|
|
42387
42207
|
}
|
|
42388
|
-
if (maxConcurrency !== void 0) {
|
|
42389
|
-
headers.set("Vqs-Max-Concurrency", maxConcurrency.toString());
|
|
42390
|
-
}
|
|
42391
42208
|
const effectiveDeploymentId = this.getConsumeDeploymentId();
|
|
42392
42209
|
if (effectiveDeploymentId) {
|
|
42393
42210
|
headers.set("Vqs-Deployment-Id", effectiveDeploymentId);
|
|
@@ -42416,15 +42233,6 @@ var QueueClient = class {
|
|
|
42416
42233
|
if (response.status === 410) {
|
|
42417
42234
|
throw new MessageAlreadyProcessedError(messageId);
|
|
42418
42235
|
}
|
|
42419
|
-
if (response.status === 429) {
|
|
42420
|
-
let errorData = {};
|
|
42421
|
-
try {
|
|
42422
|
-
errorData = JSON.parse(errorText);
|
|
42423
|
-
}
|
|
42424
|
-
catch {
|
|
42425
|
-
}
|
|
42426
|
-
throw new ConcurrencyLimitError(errorData.error || "Concurrency limit exceeded or throttled", errorData.currentInflight, errorData.maxConcurrency);
|
|
42427
|
-
}
|
|
42428
42236
|
throwCommonHttpError(response.status, response.statusText, errorText, "receive message by ID");
|
|
42429
42237
|
}
|
|
42430
42238
|
for await (const multipartMessage of parseMultipartStream(response)) {
|
|
@@ -42577,449 +42385,6 @@ var QueueClient = class {
|
|
|
42577
42385
|
};
|
|
42578
42386
|
}
|
|
42579
42387
|
};
|
|
42580
|
-
var ConsumerGroup = class {
|
|
42581
|
-
static {
|
|
42582
|
-
__name(this, "ConsumerGroup");
|
|
42583
|
-
}
|
|
42584
|
-
client;
|
|
42585
|
-
topicName;
|
|
42586
|
-
consumerGroupName;
|
|
42587
|
-
visibilityTimeout;
|
|
42588
|
-
refreshInterval;
|
|
42589
|
-
transport;
|
|
42590
|
-
/**
|
|
42591
|
-
* Create a new ConsumerGroup instance.
|
|
42592
|
-
*
|
|
42593
|
-
* @param client - QueueClient instance to use for API calls
|
|
42594
|
-
* @param topicName - Name of the topic to consume from (pattern: `[A-Za-z0-9_-]+`)
|
|
42595
|
-
* @param consumerGroupName - Name of the consumer group (pattern: `[A-Za-z0-9_-]+`)
|
|
42596
|
-
* @param options - Optional configuration
|
|
42597
|
-
* @param options.transport - Payload serializer (default: JsonTransport)
|
|
42598
|
-
* @param options.visibilityTimeoutSeconds - Message lock duration (default: 30, max: 3600)
|
|
42599
|
-
* @param options.visibilityRefreshInterval - Lock refresh interval in seconds (default: visibilityTimeout / 3)
|
|
42600
|
-
*/
|
|
42601
|
-
constructor(client, topicName, consumerGroupName, options = {}) {
|
|
42602
|
-
this.client = client;
|
|
42603
|
-
this.topicName = topicName;
|
|
42604
|
-
this.consumerGroupName = consumerGroupName;
|
|
42605
|
-
this.visibilityTimeout = options.visibilityTimeoutSeconds ?? 30;
|
|
42606
|
-
this.refreshInterval = options.visibilityRefreshInterval ?? Math.floor(this.visibilityTimeout / 3);
|
|
42607
|
-
this.transport = options.transport || new JsonTransport();
|
|
42608
|
-
}
|
|
42609
|
-
/**
|
|
42610
|
-
* Starts a background loop that periodically extends the visibility timeout for a message.
|
|
42611
|
-
*/
|
|
42612
|
-
startVisibilityExtension(receiptHandle) {
|
|
42613
|
-
let isRunning = true;
|
|
42614
|
-
let isResolved = false;
|
|
42615
|
-
let resolveLifecycle;
|
|
42616
|
-
let timeoutId = null;
|
|
42617
|
-
const lifecyclePromise = new Promise((resolve2) => {
|
|
42618
|
-
resolveLifecycle = resolve2;
|
|
42619
|
-
});
|
|
42620
|
-
const safeResolve = /* @__PURE__ */ __name(() => {
|
|
42621
|
-
if (!isResolved) {
|
|
42622
|
-
isResolved = true;
|
|
42623
|
-
resolveLifecycle();
|
|
42624
|
-
}
|
|
42625
|
-
}, "safeResolve");
|
|
42626
|
-
const extend2 = /* @__PURE__ */ __name(async () => {
|
|
42627
|
-
if (!isRunning) {
|
|
42628
|
-
safeResolve();
|
|
42629
|
-
return;
|
|
42630
|
-
}
|
|
42631
|
-
try {
|
|
42632
|
-
await this.client.changeVisibility({
|
|
42633
|
-
queueName: this.topicName,
|
|
42634
|
-
consumerGroup: this.consumerGroupName,
|
|
42635
|
-
receiptHandle,
|
|
42636
|
-
visibilityTimeoutSeconds: this.visibilityTimeout
|
|
42637
|
-
});
|
|
42638
|
-
if (isRunning) {
|
|
42639
|
-
timeoutId = setTimeout(() => extend2(), this.refreshInterval * 1e3);
|
|
42640
|
-
}
|
|
42641
|
-
else {
|
|
42642
|
-
safeResolve();
|
|
42643
|
-
}
|
|
42644
|
-
}
|
|
42645
|
-
catch (error45) {
|
|
42646
|
-
console.error(`Failed to extend visibility for receipt handle ${receiptHandle}:`, error45);
|
|
42647
|
-
safeResolve();
|
|
42648
|
-
}
|
|
42649
|
-
}, "extend");
|
|
42650
|
-
timeoutId = setTimeout(() => extend2(), this.refreshInterval * 1e3);
|
|
42651
|
-
return async (waitForCompletion = false) => {
|
|
42652
|
-
isRunning = false;
|
|
42653
|
-
if (timeoutId) {
|
|
42654
|
-
clearTimeout(timeoutId);
|
|
42655
|
-
timeoutId = null;
|
|
42656
|
-
}
|
|
42657
|
-
if (waitForCompletion) {
|
|
42658
|
-
await lifecyclePromise;
|
|
42659
|
-
}
|
|
42660
|
-
else {
|
|
42661
|
-
safeResolve();
|
|
42662
|
-
}
|
|
42663
|
-
};
|
|
42664
|
-
}
|
|
42665
|
-
async processMessage(message, handler) {
|
|
42666
|
-
const stopExtension = this.startVisibilityExtension(message.receiptHandle);
|
|
42667
|
-
try {
|
|
42668
|
-
await handler(message.payload, {
|
|
42669
|
-
messageId: message.messageId,
|
|
42670
|
-
deliveryCount: message.deliveryCount,
|
|
42671
|
-
createdAt: message.createdAt,
|
|
42672
|
-
topicName: this.topicName,
|
|
42673
|
-
consumerGroup: this.consumerGroupName
|
|
42674
|
-
});
|
|
42675
|
-
await stopExtension();
|
|
42676
|
-
await this.client.deleteMessage({
|
|
42677
|
-
queueName: this.topicName,
|
|
42678
|
-
consumerGroup: this.consumerGroupName,
|
|
42679
|
-
receiptHandle: message.receiptHandle
|
|
42680
|
-
});
|
|
42681
|
-
}
|
|
42682
|
-
catch (error45) {
|
|
42683
|
-
await stopExtension();
|
|
42684
|
-
if (this.transport.finalize && message.payload !== void 0 && message.payload !== null) {
|
|
42685
|
-
try {
|
|
42686
|
-
await this.transport.finalize(message.payload);
|
|
42687
|
-
}
|
|
42688
|
-
catch (finalizeError) {
|
|
42689
|
-
console.warn("Failed to finalize message payload:", finalizeError);
|
|
42690
|
-
}
|
|
42691
|
-
}
|
|
42692
|
-
throw error45;
|
|
42693
|
-
}
|
|
42694
|
-
}
|
|
42695
|
-
async consume(handler, options) {
|
|
42696
|
-
if (options?.messageId) {
|
|
42697
|
-
const response = await this.client.receiveMessageById({
|
|
42698
|
-
queueName: this.topicName,
|
|
42699
|
-
consumerGroup: this.consumerGroupName,
|
|
42700
|
-
messageId: options.messageId,
|
|
42701
|
-
visibilityTimeoutSeconds: this.visibilityTimeout
|
|
42702
|
-
}, this.transport);
|
|
42703
|
-
await this.processMessage(response.message, handler);
|
|
42704
|
-
}
|
|
42705
|
-
else {
|
|
42706
|
-
let messageFound = false;
|
|
42707
|
-
for await (const message of this.client.receiveMessages({
|
|
42708
|
-
queueName: this.topicName,
|
|
42709
|
-
consumerGroup: this.consumerGroupName,
|
|
42710
|
-
visibilityTimeoutSeconds: this.visibilityTimeout,
|
|
42711
|
-
limit: 1
|
|
42712
|
-
}, this.transport)) {
|
|
42713
|
-
messageFound = true;
|
|
42714
|
-
await this.processMessage(message, handler);
|
|
42715
|
-
break;
|
|
42716
|
-
}
|
|
42717
|
-
if (!messageFound) {
|
|
42718
|
-
throw new Error("No messages available");
|
|
42719
|
-
}
|
|
42720
|
-
}
|
|
42721
|
-
}
|
|
42722
|
-
/**
|
|
42723
|
-
* Get the consumer group name
|
|
42724
|
-
*/
|
|
42725
|
-
get name() {
|
|
42726
|
-
return this.consumerGroupName;
|
|
42727
|
-
}
|
|
42728
|
-
/**
|
|
42729
|
-
* Get the topic name this consumer group is subscribed to
|
|
42730
|
-
*/
|
|
42731
|
-
get topic() {
|
|
42732
|
-
return this.topicName;
|
|
42733
|
-
}
|
|
42734
|
-
};
|
|
42735
|
-
var Topic = class {
|
|
42736
|
-
static {
|
|
42737
|
-
__name(this, "Topic");
|
|
42738
|
-
}
|
|
42739
|
-
client;
|
|
42740
|
-
topicName;
|
|
42741
|
-
transport;
|
|
42742
|
-
/**
|
|
42743
|
-
* Create a new Topic instance
|
|
42744
|
-
* @param client QueueClient instance to use for API calls
|
|
42745
|
-
* @param topicName Name of the topic to work with
|
|
42746
|
-
* @param transport Optional serializer/deserializer for the payload (defaults to JSON)
|
|
42747
|
-
*/
|
|
42748
|
-
constructor(client, topicName, transport) {
|
|
42749
|
-
this.client = client;
|
|
42750
|
-
this.topicName = topicName;
|
|
42751
|
-
this.transport = transport || new JsonTransport();
|
|
42752
|
-
}
|
|
42753
|
-
/**
|
|
42754
|
-
* Publish a message to the topic
|
|
42755
|
-
* @param payload The data to publish
|
|
42756
|
-
* @param options Optional publish options
|
|
42757
|
-
* @returns An object containing the message ID
|
|
42758
|
-
* @throws {BadRequestError} When request parameters are invalid
|
|
42759
|
-
* @throws {UnauthorizedError} When authentication fails
|
|
42760
|
-
* @throws {ForbiddenError} When access is denied (environment mismatch)
|
|
42761
|
-
* @throws {InternalServerError} When server encounters an error
|
|
42762
|
-
*/
|
|
42763
|
-
async publish(payload, options) {
|
|
42764
|
-
const result = await this.client.sendMessage({
|
|
42765
|
-
queueName: this.topicName,
|
|
42766
|
-
payload,
|
|
42767
|
-
idempotencyKey: options?.idempotencyKey,
|
|
42768
|
-
retentionSeconds: options?.retentionSeconds,
|
|
42769
|
-
delaySeconds: options?.delaySeconds,
|
|
42770
|
-
headers: options?.headers
|
|
42771
|
-
}, this.transport);
|
|
42772
|
-
if (isDevMode()) {
|
|
42773
|
-
triggerDevCallbacks(this.topicName, result.messageId);
|
|
42774
|
-
}
|
|
42775
|
-
return {
|
|
42776
|
-
messageId: result.messageId
|
|
42777
|
-
};
|
|
42778
|
-
}
|
|
42779
|
-
/**
|
|
42780
|
-
* Create a consumer group for this topic
|
|
42781
|
-
* @param consumerGroupName Name of the consumer group
|
|
42782
|
-
* @param options Optional configuration for the consumer group
|
|
42783
|
-
* @returns A ConsumerGroup instance
|
|
42784
|
-
*/
|
|
42785
|
-
consumerGroup(consumerGroupName, options) {
|
|
42786
|
-
const consumerOptions = {
|
|
42787
|
-
...options,
|
|
42788
|
-
transport: options?.transport || this.transport
|
|
42789
|
-
};
|
|
42790
|
-
return new ConsumerGroup(this.client, this.topicName, consumerGroupName, consumerOptions);
|
|
42791
|
-
}
|
|
42792
|
-
/**
|
|
42793
|
-
* Get the topic name
|
|
42794
|
-
*/
|
|
42795
|
-
get name() {
|
|
42796
|
-
return this.topicName;
|
|
42797
|
-
}
|
|
42798
|
-
/**
|
|
42799
|
-
* Get the transport used by this topic
|
|
42800
|
-
*/
|
|
42801
|
-
get serializer() {
|
|
42802
|
-
return this.transport;
|
|
42803
|
-
}
|
|
42804
|
-
};
|
|
42805
|
-
function validateWildcardPattern(pattern) {
|
|
42806
|
-
const firstIndex = pattern.indexOf("*");
|
|
42807
|
-
const lastIndex = pattern.lastIndexOf("*");
|
|
42808
|
-
if (firstIndex !== lastIndex) {
|
|
42809
|
-
return false;
|
|
42810
|
-
}
|
|
42811
|
-
if (firstIndex === -1) {
|
|
42812
|
-
return false;
|
|
42813
|
-
}
|
|
42814
|
-
if (firstIndex !== pattern.length - 1) {
|
|
42815
|
-
return false;
|
|
42816
|
-
}
|
|
42817
|
-
return true;
|
|
42818
|
-
}
|
|
42819
|
-
__name(validateWildcardPattern, "validateWildcardPattern");
|
|
42820
|
-
function matchesWildcardPattern(topicName, pattern) {
|
|
42821
|
-
const prefix = pattern.slice(0, -1);
|
|
42822
|
-
return topicName.startsWith(prefix);
|
|
42823
|
-
}
|
|
42824
|
-
__name(matchesWildcardPattern, "matchesWildcardPattern");
|
|
42825
|
-
function findTopicHandler(queueName, handlers) {
|
|
42826
|
-
const exactHandler = handlers[queueName];
|
|
42827
|
-
if (exactHandler) {
|
|
42828
|
-
return exactHandler;
|
|
42829
|
-
}
|
|
42830
|
-
for (const pattern in handlers) {
|
|
42831
|
-
if (pattern.includes("*") && matchesWildcardPattern(queueName, pattern)) {
|
|
42832
|
-
return handlers[pattern];
|
|
42833
|
-
}
|
|
42834
|
-
}
|
|
42835
|
-
return null;
|
|
42836
|
-
}
|
|
42837
|
-
__name(findTopicHandler, "findTopicHandler");
|
|
42838
|
-
async function parseCallback(request) {
|
|
42839
|
-
const contentType = request.headers.get("content-type");
|
|
42840
|
-
if (!contentType || !contentType.includes("application/cloudevents+json")) {
|
|
42841
|
-
throw new Error("Invalid content type: expected 'application/cloudevents+json'");
|
|
42842
|
-
}
|
|
42843
|
-
let cloudEvent;
|
|
42844
|
-
try {
|
|
42845
|
-
cloudEvent = await request.json();
|
|
42846
|
-
}
|
|
42847
|
-
catch (error45) {
|
|
42848
|
-
throw new Error("Failed to parse CloudEvent from request body");
|
|
42849
|
-
}
|
|
42850
|
-
if (!cloudEvent.type || !cloudEvent.source || !cloudEvent.id || typeof cloudEvent.data !== "object" || cloudEvent.data == null) {
|
|
42851
|
-
throw new Error("Invalid CloudEvent: missing required fields");
|
|
42852
|
-
}
|
|
42853
|
-
if (cloudEvent.type !== "com.vercel.queue.v1beta") {
|
|
42854
|
-
throw new Error(`Invalid CloudEvent type: expected 'com.vercel.queue.v1beta', got '${cloudEvent.type}'`);
|
|
42855
|
-
}
|
|
42856
|
-
const missingFields = [];
|
|
42857
|
-
if (!("queueName" in cloudEvent.data))
|
|
42858
|
-
missingFields.push("queueName");
|
|
42859
|
-
if (!("consumerGroup" in cloudEvent.data))
|
|
42860
|
-
missingFields.push("consumerGroup");
|
|
42861
|
-
if (!("messageId" in cloudEvent.data))
|
|
42862
|
-
missingFields.push("messageId");
|
|
42863
|
-
if (missingFields.length > 0) {
|
|
42864
|
-
throw new Error(`Missing required CloudEvent data fields: ${missingFields.join(", ")}`);
|
|
42865
|
-
}
|
|
42866
|
-
const { messageId, queueName, consumerGroup } = cloudEvent.data;
|
|
42867
|
-
return {
|
|
42868
|
-
queueName,
|
|
42869
|
-
consumerGroup,
|
|
42870
|
-
messageId
|
|
42871
|
-
};
|
|
42872
|
-
}
|
|
42873
|
-
__name(parseCallback, "parseCallback");
|
|
42874
|
-
function createCallbackHandler(handlers, client, visibilityTimeoutSeconds) {
|
|
42875
|
-
for (const topicPattern in handlers) {
|
|
42876
|
-
if (topicPattern.includes("*")) {
|
|
42877
|
-
if (!validateWildcardPattern(topicPattern)) {
|
|
42878
|
-
throw new Error(`Invalid wildcard pattern "${topicPattern}": * may only appear once and must be at the end of the topic name`);
|
|
42879
|
-
}
|
|
42880
|
-
}
|
|
42881
|
-
}
|
|
42882
|
-
const routeHandler = /* @__PURE__ */ __name(async (request) => {
|
|
42883
|
-
try {
|
|
42884
|
-
const { queueName, consumerGroup, messageId } = await parseCallback(request);
|
|
42885
|
-
const topicHandler = findTopicHandler(queueName, handlers);
|
|
42886
|
-
if (!topicHandler) {
|
|
42887
|
-
const availableTopics = Object.keys(handlers).join(", ");
|
|
42888
|
-
return Response.json({
|
|
42889
|
-
error: `No handler found for topic: ${queueName}`,
|
|
42890
|
-
availableTopics
|
|
42891
|
-
}, {
|
|
42892
|
-
status: 404
|
|
42893
|
-
});
|
|
42894
|
-
}
|
|
42895
|
-
const consumerGroupHandler = topicHandler[consumerGroup];
|
|
42896
|
-
if (!consumerGroupHandler) {
|
|
42897
|
-
const availableGroups = Object.keys(topicHandler).join(", ");
|
|
42898
|
-
return Response.json({
|
|
42899
|
-
error: `No handler found for consumer group "${consumerGroup}" in topic "${queueName}".`,
|
|
42900
|
-
availableGroups
|
|
42901
|
-
}, {
|
|
42902
|
-
status: 404
|
|
42903
|
-
});
|
|
42904
|
-
}
|
|
42905
|
-
const topic = new Topic(client, queueName);
|
|
42906
|
-
const cg = topic.consumerGroup(consumerGroup, visibilityTimeoutSeconds !== void 0 ? {
|
|
42907
|
-
visibilityTimeoutSeconds
|
|
42908
|
-
} : void 0);
|
|
42909
|
-
await cg.consume(consumerGroupHandler, {
|
|
42910
|
-
messageId
|
|
42911
|
-
});
|
|
42912
|
-
return Response.json({
|
|
42913
|
-
status: "success"
|
|
42914
|
-
});
|
|
42915
|
-
}
|
|
42916
|
-
catch (error45) {
|
|
42917
|
-
console.error("Queue callback error:", error45);
|
|
42918
|
-
if (error45 instanceof Error && (error45.message.includes("Missing required CloudEvent data fields") || error45.message.includes("Invalid CloudEvent") || error45.message.includes("Invalid CloudEvent type") || error45.message.includes("Invalid content type") || error45.message.includes("Failed to parse CloudEvent"))) {
|
|
42919
|
-
return Response.json({
|
|
42920
|
-
error: error45.message
|
|
42921
|
-
}, {
|
|
42922
|
-
status: 400
|
|
42923
|
-
});
|
|
42924
|
-
}
|
|
42925
|
-
return Response.json({
|
|
42926
|
-
error: "Failed to process queue message"
|
|
42927
|
-
}, {
|
|
42928
|
-
status: 500
|
|
42929
|
-
});
|
|
42930
|
-
}
|
|
42931
|
-
}, "routeHandler");
|
|
42932
|
-
return routeHandler;
|
|
42933
|
-
}
|
|
42934
|
-
__name(createCallbackHandler, "createCallbackHandler");
|
|
42935
|
-
function handleCallback(handlers, options) {
|
|
42936
|
-
return createCallbackHandler(handlers, options?.client || new QueueClient(), options?.visibilityTimeoutSeconds);
|
|
42937
|
-
}
|
|
42938
|
-
__name(handleCallback, "handleCallback");
|
|
42939
|
-
async function send(topicName, payload, options) {
|
|
42940
|
-
const transport = options?.transport || new JsonTransport();
|
|
42941
|
-
const client = options?.client || new QueueClient();
|
|
42942
|
-
const result = await client.sendMessage({
|
|
42943
|
-
queueName: topicName,
|
|
42944
|
-
payload,
|
|
42945
|
-
idempotencyKey: options?.idempotencyKey,
|
|
42946
|
-
retentionSeconds: options?.retentionSeconds,
|
|
42947
|
-
delaySeconds: options?.delaySeconds,
|
|
42948
|
-
headers: options?.headers
|
|
42949
|
-
}, transport);
|
|
42950
|
-
if (isDevMode()) {
|
|
42951
|
-
triggerDevCallbacks(topicName, result.messageId, options?.delaySeconds);
|
|
42952
|
-
}
|
|
42953
|
-
return {
|
|
42954
|
-
messageId: result.messageId
|
|
42955
|
-
};
|
|
42956
|
-
}
|
|
42957
|
-
__name(send, "send");
|
|
42958
|
-
var Client = class {
|
|
42959
|
-
static {
|
|
42960
|
-
__name(this, "Client");
|
|
42961
|
-
}
|
|
42962
|
-
client;
|
|
42963
|
-
/**
|
|
42964
|
-
* Create a new Client
|
|
42965
|
-
* @param options QueueClient configuration options
|
|
42966
|
-
*/
|
|
42967
|
-
constructor(options = {}) {
|
|
42968
|
-
this.client = new QueueClient(options);
|
|
42969
|
-
}
|
|
42970
|
-
/**
|
|
42971
|
-
* Send a message to a topic
|
|
42972
|
-
* @param topicName Name of the topic to send to
|
|
42973
|
-
* @param payload The data to send
|
|
42974
|
-
* @param options Optional publish options and transport
|
|
42975
|
-
* @returns Promise with the message ID
|
|
42976
|
-
* @throws {BadRequestError} When request parameters are invalid
|
|
42977
|
-
* @throws {UnauthorizedError} When authentication fails
|
|
42978
|
-
* @throws {ForbiddenError} When access is denied (environment mismatch)
|
|
42979
|
-
* @throws {InternalServerError} When server encounters an error
|
|
42980
|
-
*/
|
|
42981
|
-
async send(topicName, payload, options) {
|
|
42982
|
-
return send(topicName, payload, {
|
|
42983
|
-
...options,
|
|
42984
|
-
client: this.client
|
|
42985
|
-
});
|
|
42986
|
-
}
|
|
42987
|
-
/**
|
|
42988
|
-
* Create a callback handler for processing queue messages.
|
|
42989
|
-
* Returns a Next.js route handler function that routes messages to appropriate handlers.
|
|
42990
|
-
*
|
|
42991
|
-
* @param handlers - Object with topic-specific handlers organized by consumer groups
|
|
42992
|
-
* @param options - Optional configuration
|
|
42993
|
-
* @param options.visibilityTimeoutSeconds - Message lock duration (default: 30, max: 3600)
|
|
42994
|
-
* @returns A Next.js route handler function
|
|
42995
|
-
*
|
|
42996
|
-
* @example
|
|
42997
|
-
* ```typescript
|
|
42998
|
-
* // Basic usage
|
|
42999
|
-
* export const POST = client.handleCallback({
|
|
43000
|
-
* "user-events": {
|
|
43001
|
-
* "welcome": (user, metadata) => console.log("Welcoming user", user),
|
|
43002
|
-
* "analytics": (user, metadata) => console.log("Tracking user", user),
|
|
43003
|
-
* },
|
|
43004
|
-
* });
|
|
43005
|
-
*
|
|
43006
|
-
* // With custom visibility timeout
|
|
43007
|
-
* export const POST = client.handleCallback({
|
|
43008
|
-
* "video-processing": {
|
|
43009
|
-
* "transcode": async (video) => await transcodeVideo(video),
|
|
43010
|
-
* },
|
|
43011
|
-
* }, {
|
|
43012
|
-
* visibilityTimeoutSeconds: 300, // 5 minutes for long operations
|
|
43013
|
-
* });
|
|
43014
|
-
* ```
|
|
43015
|
-
*/
|
|
43016
|
-
handleCallback(handlers, options) {
|
|
43017
|
-
return handleCallback(handlers, {
|
|
43018
|
-
...options,
|
|
43019
|
-
client: this.client
|
|
43020
|
-
});
|
|
43021
|
-
}
|
|
43022
|
-
};
|
|
43023
42388
|
// ../world-local/dist/queue.js
|
|
43024
42389
|
var import_async_sema = __toESM(require_lib(), 1);
|
|
43025
42390
|
var import_undici = __toESM(require_undici(), 1);
|
|
@@ -43027,12 +42392,12 @@ var LOCAL_QUEUE_MAX_VISIBILITY = parseInt(process.env.WORKFLOW_LOCAL_QUEUE_MAX_V
|
|
|
43027
42392
|
var MAX_SAFE_TIMEOUT_MS = 2147483647;
|
|
43028
42393
|
var DEFAULT_CONCURRENCY_LIMIT = 1e3;
|
|
43029
42394
|
var WORKFLOW_LOCAL_QUEUE_CONCURRENCY = parseInt(process.env.WORKFLOW_LOCAL_QUEUE_CONCURRENCY ?? "0", 10) || DEFAULT_CONCURRENCY_LIMIT;
|
|
43030
|
-
var httpAgent = new import_undici.Agent({
|
|
43031
|
-
headersTimeout: 0,
|
|
43032
|
-
connections: 1e3,
|
|
43033
|
-
keepAliveTimeout: 3e4
|
|
43034
|
-
});
|
|
43035
42395
|
function createQueue(config3) {
|
|
42396
|
+
const httpAgent = new import_undici.Agent({
|
|
42397
|
+
headersTimeout: 0,
|
|
42398
|
+
connections: 1e3,
|
|
42399
|
+
keepAliveTimeout: 3e4
|
|
42400
|
+
});
|
|
43036
42401
|
const transport = new JsonTransport();
|
|
43037
42402
|
const generateId2 = monotonicFactory();
|
|
43038
42403
|
const semaphore = new import_async_sema.Sema(WORKFLOW_LOCAL_QUEUE_CONCURRENCY);
|
|
@@ -43193,7 +42558,10 @@ function createQueue(config3) {
|
|
|
43193
42558
|
return {
|
|
43194
42559
|
queue,
|
|
43195
42560
|
createQueueHandler,
|
|
43196
|
-
getDeploymentId
|
|
42561
|
+
getDeploymentId,
|
|
42562
|
+
async close() {
|
|
42563
|
+
await httpAgent.close();
|
|
42564
|
+
}
|
|
43197
42565
|
};
|
|
43198
42566
|
}
|
|
43199
42567
|
__name(createQueue, "createQueue");
|
|
@@ -43764,6 +43132,17 @@ async function handleLegacyEvent(basedir, runId, data, currentRun, params) {
|
|
|
43764
43132
|
}
|
|
43765
43133
|
__name(handleLegacyEvent, "handleLegacyEvent");
|
|
43766
43134
|
// ../world-local/dist/storage/events-storage.js
|
|
43135
|
+
async function deleteAllWaitsForRun(basedir, runId) {
|
|
43136
|
+
const waitsDir = import_node_path5.default.join(basedir, "waits");
|
|
43137
|
+
const files = await listJSONFiles(waitsDir);
|
|
43138
|
+
for (const file2 of files) {
|
|
43139
|
+
if (file2.startsWith(`${runId}-`)) {
|
|
43140
|
+
const waitPath = import_node_path5.default.join(waitsDir, `${file2}.json`);
|
|
43141
|
+
await deleteJSON(waitPath);
|
|
43142
|
+
}
|
|
43143
|
+
}
|
|
43144
|
+
}
|
|
43145
|
+
__name(deleteAllWaitsForRun, "deleteAllWaitsForRun");
|
|
43767
43146
|
function createEventsStorage(basedir) {
|
|
43768
43147
|
return {
|
|
43769
43148
|
async create(runId, data, params) {
|
|
@@ -43834,7 +43213,7 @@ function createEventsStorage(basedir) {
|
|
|
43834
43213
|
status: 409
|
|
43835
43214
|
});
|
|
43836
43215
|
}
|
|
43837
|
-
if (data.eventType === "step_created" || data.eventType === "hook_created") {
|
|
43216
|
+
if (data.eventType === "step_created" || data.eventType === "hook_created" || data.eventType === "wait_created") {
|
|
43838
43217
|
throw new WorkflowAPIError(`Cannot create new entities on run in terminal state "${currentRun.status}"`, {
|
|
43839
43218
|
status: 409
|
|
43840
43219
|
});
|
|
@@ -43892,6 +43271,7 @@ function createEventsStorage(basedir) {
|
|
|
43892
43271
|
let run;
|
|
43893
43272
|
let step;
|
|
43894
43273
|
let hook;
|
|
43274
|
+
let wait;
|
|
43895
43275
|
if (data.eventType === "run_created" && "eventData" in data) {
|
|
43896
43276
|
const runData = data.eventData;
|
|
43897
43277
|
run = {
|
|
@@ -43960,7 +43340,10 @@ function createEventsStorage(basedir) {
|
|
|
43960
43340
|
await writeJSON(runPath, run, {
|
|
43961
43341
|
overwrite: true
|
|
43962
43342
|
});
|
|
43963
|
-
await
|
|
43343
|
+
await Promise.all([
|
|
43344
|
+
deleteAllHooksForRun(basedir, effectiveRunId),
|
|
43345
|
+
deleteAllWaitsForRun(basedir, effectiveRunId)
|
|
43346
|
+
]);
|
|
43964
43347
|
}
|
|
43965
43348
|
}
|
|
43966
43349
|
else if (data.eventType === "run_failed" && "eventData" in data) {
|
|
@@ -43990,7 +43373,10 @@ function createEventsStorage(basedir) {
|
|
|
43990
43373
|
await writeJSON(runPath, run, {
|
|
43991
43374
|
overwrite: true
|
|
43992
43375
|
});
|
|
43993
|
-
await
|
|
43376
|
+
await Promise.all([
|
|
43377
|
+
deleteAllHooksForRun(basedir, effectiveRunId),
|
|
43378
|
+
deleteAllWaitsForRun(basedir, effectiveRunId)
|
|
43379
|
+
]);
|
|
43994
43380
|
}
|
|
43995
43381
|
}
|
|
43996
43382
|
else if (data.eventType === "run_cancelled") {
|
|
@@ -44015,7 +43401,10 @@ function createEventsStorage(basedir) {
|
|
|
44015
43401
|
await writeJSON(runPath, run, {
|
|
44016
43402
|
overwrite: true
|
|
44017
43403
|
});
|
|
44018
|
-
await
|
|
43404
|
+
await Promise.all([
|
|
43405
|
+
deleteAllHooksForRun(basedir, effectiveRunId),
|
|
43406
|
+
deleteAllWaitsForRun(basedir, effectiveRunId)
|
|
43407
|
+
]);
|
|
44019
43408
|
}
|
|
44020
43409
|
}
|
|
44021
43410
|
else if (
|
|
@@ -44188,6 +43577,52 @@ function createEventsStorage(basedir) {
|
|
|
44188
43577
|
const hookPath = import_node_path5.default.join(basedir, "hooks", `${data.correlationId}.json`);
|
|
44189
43578
|
await deleteJSON(hookPath);
|
|
44190
43579
|
}
|
|
43580
|
+
else if (data.eventType === "wait_created" && "eventData" in data) {
|
|
43581
|
+
const waitData = data.eventData;
|
|
43582
|
+
const waitCompositeKey = `${effectiveRunId}-${data.correlationId}`;
|
|
43583
|
+
const waitPath = import_node_path5.default.join(basedir, "waits", `${waitCompositeKey}.json`);
|
|
43584
|
+
const existingWait = await readJSON(waitPath, WaitSchema);
|
|
43585
|
+
if (existingWait) {
|
|
43586
|
+
throw new WorkflowAPIError(`Wait "${data.correlationId}" already exists`, {
|
|
43587
|
+
status: 409
|
|
43588
|
+
});
|
|
43589
|
+
}
|
|
43590
|
+
wait = {
|
|
43591
|
+
waitId: waitCompositeKey,
|
|
43592
|
+
runId: effectiveRunId,
|
|
43593
|
+
status: "waiting",
|
|
43594
|
+
resumeAt: waitData.resumeAt,
|
|
43595
|
+
completedAt: void 0,
|
|
43596
|
+
createdAt: now,
|
|
43597
|
+
updatedAt: now,
|
|
43598
|
+
specVersion: effectiveSpecVersion
|
|
43599
|
+
};
|
|
43600
|
+
await writeJSON(waitPath, wait);
|
|
43601
|
+
}
|
|
43602
|
+
else if (data.eventType === "wait_completed") {
|
|
43603
|
+
const waitCompositeKey = `${effectiveRunId}-${data.correlationId}`;
|
|
43604
|
+
const waitPath = import_node_path5.default.join(basedir, "waits", `${waitCompositeKey}.json`);
|
|
43605
|
+
const existingWait = await readJSON(waitPath, WaitSchema);
|
|
43606
|
+
if (!existingWait) {
|
|
43607
|
+
throw new WorkflowAPIError(`Wait "${data.correlationId}" not found`, {
|
|
43608
|
+
status: 404
|
|
43609
|
+
});
|
|
43610
|
+
}
|
|
43611
|
+
if (existingWait.status === "completed") {
|
|
43612
|
+
throw new WorkflowAPIError(`Wait "${data.correlationId}" already completed`, {
|
|
43613
|
+
status: 409
|
|
43614
|
+
});
|
|
43615
|
+
}
|
|
43616
|
+
wait = {
|
|
43617
|
+
...existingWait,
|
|
43618
|
+
status: "completed",
|
|
43619
|
+
completedAt: now,
|
|
43620
|
+
updatedAt: now
|
|
43621
|
+
};
|
|
43622
|
+
await writeJSON(waitPath, wait, {
|
|
43623
|
+
overwrite: true
|
|
43624
|
+
});
|
|
43625
|
+
}
|
|
44191
43626
|
const compositeKey = `${effectiveRunId}-${eventId}`;
|
|
44192
43627
|
const eventPath = import_node_path5.default.join(basedir, "events", `${compositeKey}.json`);
|
|
44193
43628
|
await writeJSON(eventPath, event);
|
|
@@ -44197,7 +43632,8 @@ function createEventsStorage(basedir) {
|
|
|
44197
43632
|
event: filteredEvent,
|
|
44198
43633
|
run,
|
|
44199
43634
|
step,
|
|
44200
|
-
hook
|
|
43635
|
+
hook,
|
|
43636
|
+
wait
|
|
44201
43637
|
};
|
|
44202
43638
|
},
|
|
44203
43639
|
async list(params) {
|
|
@@ -44338,286 +43774,1715 @@ function createStepsStorage(basedir) {
|
|
|
44338
43774
|
getCreatedAt: getObjectCreatedAt("step"),
|
|
44339
43775
|
getId: /* @__PURE__ */ __name((step) => step.stepId, "getId")
|
|
44340
43776
|
});
|
|
44341
|
-
if (resolveData === "none") {
|
|
44342
|
-
return {
|
|
44343
|
-
...result,
|
|
44344
|
-
data: result.data.map((step) => ({
|
|
44345
|
-
...step,
|
|
44346
|
-
input: void 0,
|
|
44347
|
-
output: void 0
|
|
44348
|
-
}))
|
|
44349
|
-
};
|
|
43777
|
+
if (resolveData === "none") {
|
|
43778
|
+
return {
|
|
43779
|
+
...result,
|
|
43780
|
+
data: result.data.map((step) => ({
|
|
43781
|
+
...step,
|
|
43782
|
+
input: void 0,
|
|
43783
|
+
output: void 0
|
|
43784
|
+
}))
|
|
43785
|
+
};
|
|
43786
|
+
}
|
|
43787
|
+
return result;
|
|
43788
|
+
}, "list")
|
|
43789
|
+
};
|
|
43790
|
+
}
|
|
43791
|
+
__name(createStepsStorage, "createStepsStorage");
|
|
43792
|
+
// ../world-local/dist/storage/index.js
|
|
43793
|
+
function createStorage(basedir) {
|
|
43794
|
+
const storage = {
|
|
43795
|
+
runs: createRunsStorage(basedir),
|
|
43796
|
+
steps: createStepsStorage(basedir),
|
|
43797
|
+
events: createEventsStorage(basedir),
|
|
43798
|
+
hooks: createHooksStorage(basedir)
|
|
43799
|
+
};
|
|
43800
|
+
return {
|
|
43801
|
+
runs: instrumentObject("world.runs", storage.runs),
|
|
43802
|
+
steps: instrumentObject("world.steps", storage.steps),
|
|
43803
|
+
events: instrumentObject("world.events", storage.events),
|
|
43804
|
+
hooks: instrumentObject("world.hooks", storage.hooks)
|
|
43805
|
+
};
|
|
43806
|
+
}
|
|
43807
|
+
__name(createStorage, "createStorage");
|
|
43808
|
+
// ../world-local/dist/streamer.js
|
|
43809
|
+
var import_node_events = require("node:events");
|
|
43810
|
+
var import_node_path8 = __toESM(require("node:path"), 1);
|
|
43811
|
+
var monotonicUlid2 = monotonicFactory(() => Math.random());
|
|
43812
|
+
var RunStreamsSchema = external_exports.object({
|
|
43813
|
+
streams: external_exports.array(external_exports.string())
|
|
43814
|
+
});
|
|
43815
|
+
function serializeChunk(chunk) {
|
|
43816
|
+
const eofByte = Buffer.from([
|
|
43817
|
+
chunk.eof ? 1 : 0
|
|
43818
|
+
]);
|
|
43819
|
+
return Buffer.concat([
|
|
43820
|
+
eofByte,
|
|
43821
|
+
chunk.chunk
|
|
43822
|
+
]);
|
|
43823
|
+
}
|
|
43824
|
+
__name(serializeChunk, "serializeChunk");
|
|
43825
|
+
function deserializeChunk(serialized) {
|
|
43826
|
+
const eof = serialized[0] === 1;
|
|
43827
|
+
const chunk = Buffer.from(serialized.subarray(1));
|
|
43828
|
+
return {
|
|
43829
|
+
eof,
|
|
43830
|
+
chunk
|
|
43831
|
+
};
|
|
43832
|
+
}
|
|
43833
|
+
__name(deserializeChunk, "deserializeChunk");
|
|
43834
|
+
function createStreamer(basedir) {
|
|
43835
|
+
const streamEmitter = new import_node_events.EventEmitter();
|
|
43836
|
+
const registeredStreams = /* @__PURE__ */ new Set();
|
|
43837
|
+
async function registerStreamForRun(runId, streamName) {
|
|
43838
|
+
const cacheKey = `${runId}:${streamName}`;
|
|
43839
|
+
if (registeredStreams.has(cacheKey)) {
|
|
43840
|
+
return;
|
|
43841
|
+
}
|
|
43842
|
+
const runStreamsPath = import_node_path8.default.join(basedir, "streams", "runs", `${runId}.json`);
|
|
43843
|
+
const existing = await readJSON(runStreamsPath, RunStreamsSchema);
|
|
43844
|
+
const streams = existing?.streams ?? [];
|
|
43845
|
+
if (!streams.includes(streamName)) {
|
|
43846
|
+
streams.push(streamName);
|
|
43847
|
+
await writeJSON(runStreamsPath, {
|
|
43848
|
+
streams
|
|
43849
|
+
}, {
|
|
43850
|
+
overwrite: true
|
|
43851
|
+
});
|
|
43852
|
+
}
|
|
43853
|
+
registeredStreams.add(cacheKey);
|
|
43854
|
+
}
|
|
43855
|
+
__name(registerStreamForRun, "registerStreamForRun");
|
|
43856
|
+
function toBuffer(chunk) {
|
|
43857
|
+
if (typeof chunk === "string") {
|
|
43858
|
+
return Buffer.from(new TextEncoder().encode(chunk));
|
|
43859
|
+
}
|
|
43860
|
+
else if (chunk instanceof Buffer) {
|
|
43861
|
+
return chunk;
|
|
43862
|
+
}
|
|
43863
|
+
else {
|
|
43864
|
+
return Buffer.from(chunk);
|
|
43865
|
+
}
|
|
43866
|
+
}
|
|
43867
|
+
__name(toBuffer, "toBuffer");
|
|
43868
|
+
return {
|
|
43869
|
+
async writeToStream(name, _runId, chunk) {
|
|
43870
|
+
const chunkId = `chnk_${monotonicUlid2()}`;
|
|
43871
|
+
const runId = await _runId;
|
|
43872
|
+
await registerStreamForRun(runId, name);
|
|
43873
|
+
const chunkBuffer = toBuffer(chunk);
|
|
43874
|
+
const serialized = serializeChunk({
|
|
43875
|
+
chunk: chunkBuffer,
|
|
43876
|
+
eof: false
|
|
43877
|
+
});
|
|
43878
|
+
const chunkPath = import_node_path8.default.join(basedir, "streams", "chunks", `${name}-${chunkId}.bin`);
|
|
43879
|
+
await write(chunkPath, serialized);
|
|
43880
|
+
const chunkData = Uint8Array.from(chunkBuffer);
|
|
43881
|
+
streamEmitter.emit(`chunk:${name}`, {
|
|
43882
|
+
streamName: name,
|
|
43883
|
+
chunkData,
|
|
43884
|
+
chunkId
|
|
43885
|
+
});
|
|
43886
|
+
},
|
|
43887
|
+
async writeToStreamMulti(name, _runId, chunks) {
|
|
43888
|
+
if (chunks.length === 0)
|
|
43889
|
+
return;
|
|
43890
|
+
const chunkIds = chunks.map(() => `chnk_${monotonicUlid2()}`);
|
|
43891
|
+
const runId = await _runId;
|
|
43892
|
+
await registerStreamForRun(runId, name);
|
|
43893
|
+
const chunkBuffers = chunks.map((chunk) => toBuffer(chunk));
|
|
43894
|
+
const writePromises = chunkBuffers.map(async (chunkBuffer, i) => {
|
|
43895
|
+
const chunkId = chunkIds[i];
|
|
43896
|
+
const serialized = serializeChunk({
|
|
43897
|
+
chunk: chunkBuffer,
|
|
43898
|
+
eof: false
|
|
43899
|
+
});
|
|
43900
|
+
const chunkPath = import_node_path8.default.join(basedir, "streams", "chunks", `${name}-${chunkId}.bin`);
|
|
43901
|
+
await write(chunkPath, serialized);
|
|
43902
|
+
return {
|
|
43903
|
+
chunkId,
|
|
43904
|
+
chunkData: Uint8Array.from(chunkBuffer)
|
|
43905
|
+
};
|
|
43906
|
+
});
|
|
43907
|
+
for (const writePromise of writePromises) {
|
|
43908
|
+
const { chunkId, chunkData } = await writePromise;
|
|
43909
|
+
streamEmitter.emit(`chunk:${name}`, {
|
|
43910
|
+
streamName: name,
|
|
43911
|
+
chunkData,
|
|
43912
|
+
chunkId
|
|
43913
|
+
});
|
|
43914
|
+
}
|
|
43915
|
+
},
|
|
43916
|
+
async closeStream(name, _runId) {
|
|
43917
|
+
const chunkId = `chnk_${monotonicUlid2()}`;
|
|
43918
|
+
const runId = await _runId;
|
|
43919
|
+
await registerStreamForRun(runId, name);
|
|
43920
|
+
const chunkPath = import_node_path8.default.join(basedir, "streams", "chunks", `${name}-${chunkId}.bin`);
|
|
43921
|
+
await write(chunkPath, serializeChunk({
|
|
43922
|
+
chunk: Buffer.from([]),
|
|
43923
|
+
eof: true
|
|
43924
|
+
}));
|
|
43925
|
+
streamEmitter.emit(`close:${name}`, {
|
|
43926
|
+
streamName: name
|
|
43927
|
+
});
|
|
43928
|
+
},
|
|
43929
|
+
async listStreamsByRunId(runId) {
|
|
43930
|
+
const runStreamsPath = import_node_path8.default.join(basedir, "streams", "runs", `${runId}.json`);
|
|
43931
|
+
const data = await readJSON(runStreamsPath, RunStreamsSchema);
|
|
43932
|
+
return data?.streams ?? [];
|
|
43933
|
+
},
|
|
43934
|
+
async readFromStream(name, startIndex = 0) {
|
|
43935
|
+
const chunksDir = import_node_path8.default.join(basedir, "streams", "chunks");
|
|
43936
|
+
let removeListeners = /* @__PURE__ */ __name(() => {
|
|
43937
|
+
}, "removeListeners");
|
|
43938
|
+
return new ReadableStream({
|
|
43939
|
+
async start(controller) {
|
|
43940
|
+
const deliveredChunkIds = /* @__PURE__ */ new Set();
|
|
43941
|
+
const bufferedEventChunks = [];
|
|
43942
|
+
let isReadingFromDisk = true;
|
|
43943
|
+
let pendingClose = false;
|
|
43944
|
+
const chunkListener = /* @__PURE__ */ __name((event) => {
|
|
43945
|
+
deliveredChunkIds.add(event.chunkId);
|
|
43946
|
+
if (event.chunkData.byteLength === 0) {
|
|
43947
|
+
return;
|
|
43948
|
+
}
|
|
43949
|
+
if (isReadingFromDisk) {
|
|
43950
|
+
bufferedEventChunks.push({
|
|
43951
|
+
chunkId: event.chunkId,
|
|
43952
|
+
chunkData: Uint8Array.from(event.chunkData)
|
|
43953
|
+
});
|
|
43954
|
+
}
|
|
43955
|
+
else {
|
|
43956
|
+
controller.enqueue(Uint8Array.from(event.chunkData));
|
|
43957
|
+
}
|
|
43958
|
+
}, "chunkListener");
|
|
43959
|
+
const closeListener = /* @__PURE__ */ __name(() => {
|
|
43960
|
+
if (isReadingFromDisk) {
|
|
43961
|
+
pendingClose = true;
|
|
43962
|
+
return;
|
|
43963
|
+
}
|
|
43964
|
+
streamEmitter.off(`chunk:${name}`, chunkListener);
|
|
43965
|
+
streamEmitter.off(`close:${name}`, closeListener);
|
|
43966
|
+
try {
|
|
43967
|
+
controller.close();
|
|
43968
|
+
}
|
|
43969
|
+
catch {
|
|
43970
|
+
}
|
|
43971
|
+
}, "closeListener");
|
|
43972
|
+
removeListeners = closeListener;
|
|
43973
|
+
streamEmitter.on(`chunk:${name}`, chunkListener);
|
|
43974
|
+
streamEmitter.on(`close:${name}`, closeListener);
|
|
43975
|
+
const [binFiles, jsonFiles] = await Promise.all([
|
|
43976
|
+
listFilesByExtension(chunksDir, ".bin"),
|
|
43977
|
+
listFilesByExtension(chunksDir, ".json")
|
|
43978
|
+
]);
|
|
43979
|
+
const fileExtMap = /* @__PURE__ */ new Map();
|
|
43980
|
+
for (const f of jsonFiles)
|
|
43981
|
+
fileExtMap.set(f, ".json");
|
|
43982
|
+
for (const f of binFiles)
|
|
43983
|
+
fileExtMap.set(f, ".bin");
|
|
43984
|
+
const chunkFiles = [
|
|
43985
|
+
...fileExtMap.keys()
|
|
43986
|
+
].filter((file2) => file2.startsWith(`${name}-`)).sort();
|
|
43987
|
+
let isComplete = false;
|
|
43988
|
+
for (let i = startIndex; i < chunkFiles.length; i++) {
|
|
43989
|
+
const file2 = chunkFiles[i];
|
|
43990
|
+
const chunkId = file2.substring(name.length + 1);
|
|
43991
|
+
if (deliveredChunkIds.has(chunkId)) {
|
|
43992
|
+
continue;
|
|
43993
|
+
}
|
|
43994
|
+
const ext = fileExtMap.get(file2) ?? ".bin";
|
|
43995
|
+
const chunk = deserializeChunk(await readBuffer(import_node_path8.default.join(chunksDir, `${file2}${ext}`)));
|
|
43996
|
+
if (chunk?.eof === true) {
|
|
43997
|
+
isComplete = true;
|
|
43998
|
+
break;
|
|
43999
|
+
}
|
|
44000
|
+
if (chunk.chunk.byteLength) {
|
|
44001
|
+
controller.enqueue(Uint8Array.from(chunk.chunk));
|
|
44002
|
+
}
|
|
44003
|
+
}
|
|
44004
|
+
isReadingFromDisk = false;
|
|
44005
|
+
bufferedEventChunks.sort((a, b) => a.chunkId.localeCompare(b.chunkId));
|
|
44006
|
+
for (const buffered of bufferedEventChunks) {
|
|
44007
|
+
controller.enqueue(Uint8Array.from(buffered.chunkData));
|
|
44008
|
+
}
|
|
44009
|
+
if (isComplete) {
|
|
44010
|
+
removeListeners();
|
|
44011
|
+
try {
|
|
44012
|
+
controller.close();
|
|
44013
|
+
}
|
|
44014
|
+
catch {
|
|
44015
|
+
}
|
|
44016
|
+
return;
|
|
44017
|
+
}
|
|
44018
|
+
if (pendingClose) {
|
|
44019
|
+
streamEmitter.off(`chunk:${name}`, chunkListener);
|
|
44020
|
+
streamEmitter.off(`close:${name}`, closeListener);
|
|
44021
|
+
try {
|
|
44022
|
+
controller.close();
|
|
44023
|
+
}
|
|
44024
|
+
catch {
|
|
44025
|
+
}
|
|
44026
|
+
}
|
|
44027
|
+
},
|
|
44028
|
+
cancel() {
|
|
44029
|
+
removeListeners();
|
|
44030
|
+
}
|
|
44031
|
+
});
|
|
44032
|
+
}
|
|
44033
|
+
};
|
|
44034
|
+
}
|
|
44035
|
+
__name(createStreamer, "createStreamer");
|
|
44036
|
+
// ../world-local/dist/index.js
|
|
44037
|
+
function createLocalWorld(args) {
|
|
44038
|
+
const definedArgs = args ? Object.fromEntries(Object.entries(args).filter(([, value]) => value !== void 0)) : {};
|
|
44039
|
+
const mergedConfig = {
|
|
44040
|
+
...config2.value,
|
|
44041
|
+
...definedArgs
|
|
44042
|
+
};
|
|
44043
|
+
const queue = createQueue(mergedConfig);
|
|
44044
|
+
return {
|
|
44045
|
+
...queue,
|
|
44046
|
+
...createStorage(mergedConfig.dataDir),
|
|
44047
|
+
...createStreamer(mergedConfig.dataDir),
|
|
44048
|
+
async start() {
|
|
44049
|
+
await initDataDir(mergedConfig.dataDir);
|
|
44050
|
+
},
|
|
44051
|
+
async close() {
|
|
44052
|
+
await queue.close();
|
|
44053
|
+
}
|
|
44054
|
+
};
|
|
44055
|
+
}
|
|
44056
|
+
__name(createLocalWorld, "createLocalWorld");
|
|
44057
|
+
// ../world-vercel/dist/encryption.js
|
|
44058
|
+
var import_node_crypto2 = require("node:crypto");
|
|
44059
|
+
var import_oidc2 = __toESM(require_dist(), 1);
|
|
44060
|
+
var KEY_BYTES = 32;
|
|
44061
|
+
async function deriveRunKey(deploymentKey, projectId, runId) {
|
|
44062
|
+
if (deploymentKey.length !== KEY_BYTES) {
|
|
44063
|
+
throw new Error(`Invalid deployment key length: expected ${KEY_BYTES} bytes for AES-256, got ${deploymentKey.length} bytes`);
|
|
44064
|
+
}
|
|
44065
|
+
if (!projectId || typeof projectId !== "string") {
|
|
44066
|
+
throw new Error("projectId must be a non-empty string");
|
|
44067
|
+
}
|
|
44068
|
+
const baseKey = await import_node_crypto2.webcrypto.subtle.importKey("raw", deploymentKey, "HKDF", false, [
|
|
44069
|
+
"deriveBits"
|
|
44070
|
+
]);
|
|
44071
|
+
const info = new TextEncoder().encode(`${projectId}|${runId}`);
|
|
44072
|
+
const derivedBits = await import_node_crypto2.webcrypto.subtle.deriveBits({
|
|
44073
|
+
name: "HKDF",
|
|
44074
|
+
hash: "SHA-256",
|
|
44075
|
+
salt: new Uint8Array(32),
|
|
44076
|
+
info
|
|
44077
|
+
}, baseKey, KEY_BYTES * 8
|
|
44078
|
+
// bits
|
|
44079
|
+
);
|
|
44080
|
+
return new Uint8Array(derivedBits);
|
|
44081
|
+
}
|
|
44082
|
+
__name(deriveRunKey, "deriveRunKey");
|
|
44083
|
+
async function fetchRunKey(deploymentId, projectId, runId, options) {
|
|
44084
|
+
const oidcToken = await (0, import_oidc2.getVercelOidcToken)().catch(() => null);
|
|
44085
|
+
const token = options?.token ?? oidcToken ?? process.env.VERCEL_TOKEN;
|
|
44086
|
+
if (!token) {
|
|
44087
|
+
throw new Error("Cannot fetch run key: no OIDC token or VERCEL_TOKEN available");
|
|
44088
|
+
}
|
|
44089
|
+
const params = new URLSearchParams({
|
|
44090
|
+
projectId,
|
|
44091
|
+
runId
|
|
44092
|
+
});
|
|
44093
|
+
const response = await fetch(`https://api.vercel.com/v1/workflow/run-key/${deploymentId}?${params}`, {
|
|
44094
|
+
headers: {
|
|
44095
|
+
Authorization: `Bearer ${token}`
|
|
44096
|
+
}
|
|
44097
|
+
});
|
|
44098
|
+
if (!response.ok) {
|
|
44099
|
+
throw new Error(`Failed to fetch run key for ${runId} (deployment ${deploymentId}): HTTP ${response.status}`);
|
|
44100
|
+
}
|
|
44101
|
+
const data = await response.json();
|
|
44102
|
+
const result = object({
|
|
44103
|
+
key: string2()
|
|
44104
|
+
}).safeParse(data);
|
|
44105
|
+
if (!result.success) {
|
|
44106
|
+
throw new Error('Invalid response from Vercel API, missing "key" field');
|
|
44107
|
+
}
|
|
44108
|
+
return Buffer.from(result.data.key, "base64");
|
|
44109
|
+
}
|
|
44110
|
+
__name(fetchRunKey, "fetchRunKey");
|
|
44111
|
+
function createGetEncryptionKeyForRun(projectId, token) {
|
|
44112
|
+
if (!projectId)
|
|
44113
|
+
return void 0;
|
|
44114
|
+
const currentDeploymentId = process.env.VERCEL_DEPLOYMENT_ID;
|
|
44115
|
+
let localDeploymentKey;
|
|
44116
|
+
function getLocalDeploymentKey() {
|
|
44117
|
+
if (localDeploymentKey)
|
|
44118
|
+
return localDeploymentKey;
|
|
44119
|
+
const deploymentKeyBase64 = process.env.VERCEL_DEPLOYMENT_KEY;
|
|
44120
|
+
if (!deploymentKeyBase64)
|
|
44121
|
+
return void 0;
|
|
44122
|
+
localDeploymentKey = Buffer.from(deploymentKeyBase64, "base64");
|
|
44123
|
+
return localDeploymentKey;
|
|
44124
|
+
}
|
|
44125
|
+
__name(getLocalDeploymentKey, "getLocalDeploymentKey");
|
|
44126
|
+
return /* @__PURE__ */ __name(async function getEncryptionKeyForRun(run, context) {
|
|
44127
|
+
const runId = typeof run === "string" ? run : run.runId;
|
|
44128
|
+
const deploymentId = typeof run === "string" ? context?.deploymentId : run.deploymentId;
|
|
44129
|
+
if (!deploymentId || deploymentId === currentDeploymentId) {
|
|
44130
|
+
const localKey = getLocalDeploymentKey();
|
|
44131
|
+
if (!localKey)
|
|
44132
|
+
return void 0;
|
|
44133
|
+
return deriveRunKey(localKey, projectId, runId);
|
|
44134
|
+
}
|
|
44135
|
+
return fetchRunKey(deploymentId, projectId, runId, {
|
|
44136
|
+
token
|
|
44137
|
+
});
|
|
44138
|
+
}, "getEncryptionKeyForRun");
|
|
44139
|
+
}
|
|
44140
|
+
__name(createGetEncryptionKeyForRun, "createGetEncryptionKeyForRun");
|
|
44141
|
+
// ../../node_modules/.pnpm/@vercel+queue@0.0.0-alpha.38/node_modules/@vercel/queue/dist/web.mjs
|
|
44142
|
+
var fs2 = __toESM(require("fs"), 1);
|
|
44143
|
+
var path9 = __toESM(require("path"), 1);
|
|
44144
|
+
var import_oidc3 = __toESM(require_dist(), 1);
|
|
44145
|
+
var MessageNotFoundError2 = class extends Error {
|
|
44146
|
+
static {
|
|
44147
|
+
__name(this, "MessageNotFoundError");
|
|
44148
|
+
}
|
|
44149
|
+
constructor(messageId) {
|
|
44150
|
+
super(`Message ${messageId} not found`);
|
|
44151
|
+
this.name = "MessageNotFoundError";
|
|
44152
|
+
}
|
|
44153
|
+
};
|
|
44154
|
+
var MessageNotAvailableError2 = class extends Error {
|
|
44155
|
+
static {
|
|
44156
|
+
__name(this, "MessageNotAvailableError");
|
|
44157
|
+
}
|
|
44158
|
+
constructor(messageId, reason) {
|
|
44159
|
+
super(`Message ${messageId} not available for processing${reason ? `: ${reason}` : ""}`);
|
|
44160
|
+
this.name = "MessageNotAvailableError";
|
|
44161
|
+
}
|
|
44162
|
+
};
|
|
44163
|
+
var MessageCorruptedError2 = class extends Error {
|
|
44164
|
+
static {
|
|
44165
|
+
__name(this, "MessageCorruptedError");
|
|
44166
|
+
}
|
|
44167
|
+
constructor(messageId, reason) {
|
|
44168
|
+
super(`Message ${messageId} is corrupted: ${reason}`);
|
|
44169
|
+
this.name = "MessageCorruptedError";
|
|
44170
|
+
}
|
|
44171
|
+
};
|
|
44172
|
+
var UnauthorizedError2 = class extends Error {
|
|
44173
|
+
static {
|
|
44174
|
+
__name(this, "UnauthorizedError");
|
|
44175
|
+
}
|
|
44176
|
+
constructor(message = "Missing or invalid authentication token") {
|
|
44177
|
+
super(message);
|
|
44178
|
+
this.name = "UnauthorizedError";
|
|
44179
|
+
}
|
|
44180
|
+
};
|
|
44181
|
+
var ForbiddenError2 = class extends Error {
|
|
44182
|
+
static {
|
|
44183
|
+
__name(this, "ForbiddenError");
|
|
44184
|
+
}
|
|
44185
|
+
constructor(message = "Queue environment doesn't match token environment") {
|
|
44186
|
+
super(message);
|
|
44187
|
+
this.name = "ForbiddenError";
|
|
44188
|
+
}
|
|
44189
|
+
};
|
|
44190
|
+
var BadRequestError2 = class extends Error {
|
|
44191
|
+
static {
|
|
44192
|
+
__name(this, "BadRequestError");
|
|
44193
|
+
}
|
|
44194
|
+
constructor(message) {
|
|
44195
|
+
super(message);
|
|
44196
|
+
this.name = "BadRequestError";
|
|
44197
|
+
}
|
|
44198
|
+
};
|
|
44199
|
+
var InternalServerError2 = class extends Error {
|
|
44200
|
+
static {
|
|
44201
|
+
__name(this, "InternalServerError");
|
|
44202
|
+
}
|
|
44203
|
+
constructor(message = "Unexpected server error") {
|
|
44204
|
+
super(message);
|
|
44205
|
+
this.name = "InternalServerError";
|
|
44206
|
+
}
|
|
44207
|
+
};
|
|
44208
|
+
var InvalidLimitError2 = class extends Error {
|
|
44209
|
+
static {
|
|
44210
|
+
__name(this, "InvalidLimitError");
|
|
44211
|
+
}
|
|
44212
|
+
constructor(limit, min = 1, max = 10) {
|
|
44213
|
+
super(`Invalid limit: ${limit}. Limit must be between ${min} and ${max}.`);
|
|
44214
|
+
this.name = "InvalidLimitError";
|
|
44215
|
+
}
|
|
44216
|
+
};
|
|
44217
|
+
var MessageAlreadyProcessedError2 = class extends Error {
|
|
44218
|
+
static {
|
|
44219
|
+
__name(this, "MessageAlreadyProcessedError");
|
|
44220
|
+
}
|
|
44221
|
+
constructor(messageId) {
|
|
44222
|
+
super(`Message ${messageId} has already been processed`);
|
|
44223
|
+
this.name = "MessageAlreadyProcessedError";
|
|
44224
|
+
}
|
|
44225
|
+
};
|
|
44226
|
+
var DuplicateMessageError2 = class extends Error {
|
|
44227
|
+
static {
|
|
44228
|
+
__name(this, "DuplicateMessageError");
|
|
44229
|
+
}
|
|
44230
|
+
idempotencyKey;
|
|
44231
|
+
constructor(message, idempotencyKey) {
|
|
44232
|
+
super(message);
|
|
44233
|
+
this.name = "DuplicateMessageError";
|
|
44234
|
+
this.idempotencyKey = idempotencyKey;
|
|
44235
|
+
}
|
|
44236
|
+
};
|
|
44237
|
+
var ConsumerDiscoveryError2 = class extends Error {
|
|
44238
|
+
static {
|
|
44239
|
+
__name(this, "ConsumerDiscoveryError");
|
|
44240
|
+
}
|
|
44241
|
+
deploymentId;
|
|
44242
|
+
constructor(message, deploymentId) {
|
|
44243
|
+
super(message);
|
|
44244
|
+
this.name = "ConsumerDiscoveryError";
|
|
44245
|
+
this.deploymentId = deploymentId;
|
|
44246
|
+
}
|
|
44247
|
+
};
|
|
44248
|
+
var ConsumerRegistryNotConfiguredError2 = class extends Error {
|
|
44249
|
+
static {
|
|
44250
|
+
__name(this, "ConsumerRegistryNotConfiguredError");
|
|
44251
|
+
}
|
|
44252
|
+
constructor(message = "Consumer registry not configured") {
|
|
44253
|
+
super(message);
|
|
44254
|
+
this.name = "ConsumerRegistryNotConfiguredError";
|
|
44255
|
+
}
|
|
44256
|
+
};
|
|
44257
|
+
var ROUTE_MAPPINGS_KEY2 = Symbol.for("@vercel/queue.devRouteMappings");
|
|
44258
|
+
function filePathToUrlPath(filePath) {
|
|
44259
|
+
let urlPath = filePath.replace(/^app\//, "/").replace(/^pages\//, "/").replace(/\/route\.(ts|mts|js|mjs|tsx|jsx)$/, "").replace(/\.(ts|mts|js|mjs|tsx|jsx)$/, "");
|
|
44260
|
+
if (!urlPath.startsWith("/")) {
|
|
44261
|
+
urlPath = "/" + urlPath;
|
|
44262
|
+
}
|
|
44263
|
+
return urlPath;
|
|
44264
|
+
}
|
|
44265
|
+
__name(filePathToUrlPath, "filePathToUrlPath");
|
|
44266
|
+
function filePathToConsumerGroup(filePath) {
|
|
44267
|
+
return filePath.replace(/_/g, "__").replace(/\//g, "_S").replace(/\./g, "_D");
|
|
44268
|
+
}
|
|
44269
|
+
__name(filePathToConsumerGroup, "filePathToConsumerGroup");
|
|
44270
|
+
function getDevRouteMappings() {
|
|
44271
|
+
const g = globalThis;
|
|
44272
|
+
if (ROUTE_MAPPINGS_KEY2 in g) {
|
|
44273
|
+
return g[ROUTE_MAPPINGS_KEY2] ?? null;
|
|
44274
|
+
}
|
|
44275
|
+
try {
|
|
44276
|
+
const vercelJsonPath = path9.join(process.cwd(), "vercel.json");
|
|
44277
|
+
if (!fs2.existsSync(vercelJsonPath)) {
|
|
44278
|
+
g[ROUTE_MAPPINGS_KEY2] = null;
|
|
44279
|
+
return null;
|
|
44280
|
+
}
|
|
44281
|
+
const vercelJson = JSON.parse(fs2.readFileSync(vercelJsonPath, "utf-8"));
|
|
44282
|
+
if (!vercelJson.functions) {
|
|
44283
|
+
g[ROUTE_MAPPINGS_KEY2] = null;
|
|
44284
|
+
return null;
|
|
44285
|
+
}
|
|
44286
|
+
const mappings = [];
|
|
44287
|
+
for (const [filePath, config3] of Object.entries(vercelJson.functions)) {
|
|
44288
|
+
if (!config3.experimentalTriggers)
|
|
44289
|
+
continue;
|
|
44290
|
+
for (const trigger of config3.experimentalTriggers) {
|
|
44291
|
+
if (trigger.type?.startsWith("queue/") && trigger.topic) {
|
|
44292
|
+
mappings.push({
|
|
44293
|
+
urlPath: filePathToUrlPath(filePath),
|
|
44294
|
+
topic: trigger.topic,
|
|
44295
|
+
consumer: filePathToConsumerGroup(filePath)
|
|
44296
|
+
});
|
|
44297
|
+
}
|
|
44298
|
+
}
|
|
44299
|
+
}
|
|
44300
|
+
g[ROUTE_MAPPINGS_KEY2] = mappings.length > 0 ? mappings : null;
|
|
44301
|
+
return g[ROUTE_MAPPINGS_KEY2];
|
|
44302
|
+
}
|
|
44303
|
+
catch (error45) {
|
|
44304
|
+
console.warn("[Dev Mode] Failed to read vercel.json:", error45);
|
|
44305
|
+
g[ROUTE_MAPPINGS_KEY2] = null;
|
|
44306
|
+
return null;
|
|
44307
|
+
}
|
|
44308
|
+
}
|
|
44309
|
+
__name(getDevRouteMappings, "getDevRouteMappings");
|
|
44310
|
+
function findMatchingRoutes(topicName) {
|
|
44311
|
+
const mappings = getDevRouteMappings();
|
|
44312
|
+
if (!mappings) {
|
|
44313
|
+
return [];
|
|
44314
|
+
}
|
|
44315
|
+
return mappings.filter((mapping) => {
|
|
44316
|
+
if (mapping.topic.includes("*")) {
|
|
44317
|
+
return matchesWildcardPattern(topicName, mapping.topic);
|
|
44318
|
+
}
|
|
44319
|
+
return mapping.topic === topicName;
|
|
44320
|
+
});
|
|
44321
|
+
}
|
|
44322
|
+
__name(findMatchingRoutes, "findMatchingRoutes");
|
|
44323
|
+
function isDevMode2() {
|
|
44324
|
+
return process.env.NODE_ENV === "development";
|
|
44325
|
+
}
|
|
44326
|
+
__name(isDevMode2, "isDevMode");
|
|
44327
|
+
var DEV_VISIBILITY_POLL_INTERVAL = 50;
|
|
44328
|
+
var DEV_VISIBILITY_MAX_WAIT = 5e3;
|
|
44329
|
+
var DEV_VISIBILITY_BACKOFF_MULTIPLIER = 2;
|
|
44330
|
+
async function waitForMessageVisibility(topicName, consumerGroup, messageId) {
|
|
44331
|
+
const client = new QueueClient2();
|
|
44332
|
+
let elapsed = 0;
|
|
44333
|
+
let interval = DEV_VISIBILITY_POLL_INTERVAL;
|
|
44334
|
+
while (elapsed < DEV_VISIBILITY_MAX_WAIT) {
|
|
44335
|
+
try {
|
|
44336
|
+
await client.receiveMessageById({
|
|
44337
|
+
queueName: topicName,
|
|
44338
|
+
consumerGroup,
|
|
44339
|
+
messageId,
|
|
44340
|
+
visibilityTimeoutSeconds: 0
|
|
44341
|
+
});
|
|
44342
|
+
return true;
|
|
44343
|
+
}
|
|
44344
|
+
catch (error45) {
|
|
44345
|
+
if (error45 instanceof MessageNotFoundError2) {
|
|
44346
|
+
await new Promise((resolve2) => setTimeout(resolve2, interval));
|
|
44347
|
+
elapsed += interval;
|
|
44348
|
+
interval = Math.min(interval * DEV_VISIBILITY_BACKOFF_MULTIPLIER, DEV_VISIBILITY_MAX_WAIT - elapsed);
|
|
44349
|
+
continue;
|
|
44350
|
+
}
|
|
44351
|
+
if (error45 instanceof MessageAlreadyProcessedError2) {
|
|
44352
|
+
console.log(`[Dev Mode] Message already processed: topic="${topicName}" messageId="${messageId}"`);
|
|
44353
|
+
return false;
|
|
44354
|
+
}
|
|
44355
|
+
console.error(`[Dev Mode] Error polling for message visibility: topic="${topicName}" messageId="${messageId}"`, error45);
|
|
44356
|
+
return false;
|
|
44357
|
+
}
|
|
44358
|
+
}
|
|
44359
|
+
console.warn(`[Dev Mode] Message visibility timeout after ${DEV_VISIBILITY_MAX_WAIT}ms: topic="${topicName}" messageId="${messageId}"`);
|
|
44360
|
+
return false;
|
|
44361
|
+
}
|
|
44362
|
+
__name(waitForMessageVisibility, "waitForMessageVisibility");
|
|
44363
|
+
function triggerDevCallbacks(topicName, messageId, delaySeconds) {
|
|
44364
|
+
if (delaySeconds && delaySeconds > 0) {
|
|
44365
|
+
console.log(`[Dev Mode] Message sent with delay: topic="${topicName}" messageId="${messageId}" delay=${delaySeconds}s`);
|
|
44366
|
+
setTimeout(() => {
|
|
44367
|
+
triggerDevCallbacks(topicName, messageId);
|
|
44368
|
+
}, delaySeconds * 1e3);
|
|
44369
|
+
return;
|
|
44370
|
+
}
|
|
44371
|
+
console.log(`[Dev Mode] Message sent: topic="${topicName}" messageId="${messageId}"`);
|
|
44372
|
+
const matchingRoutes = findMatchingRoutes(topicName);
|
|
44373
|
+
if (matchingRoutes.length === 0) {
|
|
44374
|
+
console.log(`[Dev Mode] No matching routes in vercel.json for topic "${topicName}"`);
|
|
44375
|
+
return;
|
|
44376
|
+
}
|
|
44377
|
+
const consumerGroups = matchingRoutes.map((r) => r.consumer);
|
|
44378
|
+
console.log(`[Dev Mode] Scheduling callbacks for topic="${topicName}" messageId="${messageId}" \u2192 consumers: [${consumerGroups.join(", ")}]`);
|
|
44379
|
+
(async () => {
|
|
44380
|
+
const firstRoute = matchingRoutes[0];
|
|
44381
|
+
const isVisible = await waitForMessageVisibility(topicName, firstRoute.consumer, messageId);
|
|
44382
|
+
if (!isVisible) {
|
|
44383
|
+
console.warn(`[Dev Mode] Skipping callbacks - message not visible: topic="${topicName}" messageId="${messageId}"`);
|
|
44384
|
+
return;
|
|
44385
|
+
}
|
|
44386
|
+
const port = process.env.PORT || 3e3;
|
|
44387
|
+
const baseUrl = `http://localhost:${port}`;
|
|
44388
|
+
for (const route of matchingRoutes) {
|
|
44389
|
+
const url2 = `${baseUrl}${route.urlPath}`;
|
|
44390
|
+
console.log(`[Dev Mode] Invoking handler: topic="${topicName}" consumer="${route.consumer}" messageId="${messageId}" url="${url2}"`);
|
|
44391
|
+
try {
|
|
44392
|
+
const response = await fetch(url2, {
|
|
44393
|
+
method: "POST",
|
|
44394
|
+
headers: {
|
|
44395
|
+
"ce-type": CLOUD_EVENT_TYPE_V2BETA,
|
|
44396
|
+
"ce-vqsqueuename": topicName,
|
|
44397
|
+
"ce-vqsconsumergroup": route.consumer,
|
|
44398
|
+
"ce-vqsmessageid": messageId
|
|
44399
|
+
}
|
|
44400
|
+
});
|
|
44401
|
+
if (response.ok) {
|
|
44402
|
+
try {
|
|
44403
|
+
const responseData = await response.json();
|
|
44404
|
+
if (responseData.status === "success") {
|
|
44405
|
+
console.log(`[Dev Mode] \u2713 Message processed successfully: topic="${topicName}" consumer="${route.consumer}" messageId="${messageId}"`);
|
|
44406
|
+
}
|
|
44407
|
+
}
|
|
44408
|
+
catch {
|
|
44409
|
+
console.warn(`[Dev Mode] Handler returned OK but response was not JSON: topic="${topicName}" consumer="${route.consumer}"`);
|
|
44410
|
+
}
|
|
44411
|
+
}
|
|
44412
|
+
else {
|
|
44413
|
+
try {
|
|
44414
|
+
const errorData = await response.json();
|
|
44415
|
+
console.error(`[Dev Mode] \u2717 Handler failed: topic="${topicName}" consumer="${route.consumer}" messageId="${messageId}" error="${errorData.error || response.statusText}"`);
|
|
44416
|
+
}
|
|
44417
|
+
catch {
|
|
44418
|
+
console.error(`[Dev Mode] \u2717 Handler failed: topic="${topicName}" consumer="${route.consumer}" messageId="${messageId}" status=${response.status}`);
|
|
44419
|
+
}
|
|
44420
|
+
}
|
|
44421
|
+
}
|
|
44422
|
+
catch (error45) {
|
|
44423
|
+
console.error(`[Dev Mode] \u2717 HTTP request failed: topic="${topicName}" consumer="${route.consumer}" messageId="${messageId}" url="${url2}"`, error45);
|
|
44424
|
+
}
|
|
44425
|
+
}
|
|
44426
|
+
})();
|
|
44427
|
+
}
|
|
44428
|
+
__name(triggerDevCallbacks, "triggerDevCallbacks");
|
|
44429
|
+
function clearDevRouteMappings2() {
|
|
44430
|
+
const g = globalThis;
|
|
44431
|
+
delete g[ROUTE_MAPPINGS_KEY2];
|
|
44432
|
+
}
|
|
44433
|
+
__name(clearDevRouteMappings2, "clearDevRouteMappings");
|
|
44434
|
+
if (process.env.NODE_ENV === "test" || process.env.VITEST) {
|
|
44435
|
+
globalThis.__clearDevRouteMappings = clearDevRouteMappings2;
|
|
44436
|
+
}
|
|
44437
|
+
async function streamToBuffer2(stream) {
|
|
44438
|
+
let totalLength = 0;
|
|
44439
|
+
const reader = stream.getReader();
|
|
44440
|
+
const chunks = [];
|
|
44441
|
+
try {
|
|
44442
|
+
while (true) {
|
|
44443
|
+
const { done, value } = await reader.read();
|
|
44444
|
+
if (done)
|
|
44445
|
+
break;
|
|
44446
|
+
chunks.push(value);
|
|
44447
|
+
totalLength += value.length;
|
|
44448
|
+
}
|
|
44449
|
+
}
|
|
44450
|
+
finally {
|
|
44451
|
+
reader.releaseLock();
|
|
44452
|
+
}
|
|
44453
|
+
return Buffer.concat(chunks, totalLength);
|
|
44454
|
+
}
|
|
44455
|
+
__name(streamToBuffer2, "streamToBuffer");
|
|
44456
|
+
var JsonTransport2 = class {
|
|
44457
|
+
static {
|
|
44458
|
+
__name(this, "JsonTransport");
|
|
44459
|
+
}
|
|
44460
|
+
contentType = "application/json";
|
|
44461
|
+
replacer;
|
|
44462
|
+
reviver;
|
|
44463
|
+
/**
|
|
44464
|
+
* Create a new JsonTransport.
|
|
44465
|
+
* @param options - Optional JSON serialization options
|
|
44466
|
+
* @param options.replacer - Custom replacer for JSON.stringify
|
|
44467
|
+
* @param options.reviver - Custom reviver for JSON.parse
|
|
44468
|
+
*/
|
|
44469
|
+
constructor(options = {}) {
|
|
44470
|
+
this.replacer = options.replacer;
|
|
44471
|
+
this.reviver = options.reviver;
|
|
44472
|
+
}
|
|
44473
|
+
serialize(value) {
|
|
44474
|
+
return Buffer.from(JSON.stringify(value, this.replacer), "utf8");
|
|
44475
|
+
}
|
|
44476
|
+
async deserialize(stream) {
|
|
44477
|
+
const buffer = await streamToBuffer2(stream);
|
|
44478
|
+
return JSON.parse(buffer.toString("utf8"), this.reviver);
|
|
44479
|
+
}
|
|
44480
|
+
};
|
|
44481
|
+
function isDebugEnabled2() {
|
|
44482
|
+
return process.env.VERCEL_QUEUE_DEBUG === "1" || process.env.VERCEL_QUEUE_DEBUG === "true";
|
|
44483
|
+
}
|
|
44484
|
+
__name(isDebugEnabled2, "isDebugEnabled");
|
|
44485
|
+
async function consumeStream2(stream) {
|
|
44486
|
+
const reader = stream.getReader();
|
|
44487
|
+
try {
|
|
44488
|
+
while (true) {
|
|
44489
|
+
const { done } = await reader.read();
|
|
44490
|
+
if (done)
|
|
44491
|
+
break;
|
|
44492
|
+
}
|
|
44493
|
+
}
|
|
44494
|
+
finally {
|
|
44495
|
+
reader.releaseLock();
|
|
44496
|
+
}
|
|
44497
|
+
}
|
|
44498
|
+
__name(consumeStream2, "consumeStream");
|
|
44499
|
+
function throwCommonHttpError2(status, statusText, errorText, operation, badRequestDefault = "Invalid parameters") {
|
|
44500
|
+
if (status === 400) {
|
|
44501
|
+
throw new BadRequestError2(errorText || badRequestDefault);
|
|
44502
|
+
}
|
|
44503
|
+
if (status === 401) {
|
|
44504
|
+
throw new UnauthorizedError2(errorText || void 0);
|
|
44505
|
+
}
|
|
44506
|
+
if (status === 403) {
|
|
44507
|
+
throw new ForbiddenError2(errorText || void 0);
|
|
44508
|
+
}
|
|
44509
|
+
if (status >= 500) {
|
|
44510
|
+
throw new InternalServerError2(errorText || `Server error: ${status} ${statusText}`);
|
|
44511
|
+
}
|
|
44512
|
+
throw new Error(`Failed to ${operation}: ${status} ${statusText}`);
|
|
44513
|
+
}
|
|
44514
|
+
__name(throwCommonHttpError2, "throwCommonHttpError");
|
|
44515
|
+
function parseQueueHeaders2(headers) {
|
|
44516
|
+
const messageId = headers.get("Vqs-Message-Id");
|
|
44517
|
+
const deliveryCountStr = headers.get("Vqs-Delivery-Count") || "0";
|
|
44518
|
+
const timestamp = headers.get("Vqs-Timestamp");
|
|
44519
|
+
const contentType = headers.get("Content-Type") || "application/octet-stream";
|
|
44520
|
+
const receiptHandle = headers.get("Vqs-Receipt-Handle");
|
|
44521
|
+
if (!messageId || !timestamp || !receiptHandle) {
|
|
44522
|
+
return null;
|
|
44523
|
+
}
|
|
44524
|
+
const deliveryCount = parseInt(deliveryCountStr, 10);
|
|
44525
|
+
if (Number.isNaN(deliveryCount)) {
|
|
44526
|
+
return null;
|
|
44527
|
+
}
|
|
44528
|
+
return {
|
|
44529
|
+
messageId,
|
|
44530
|
+
deliveryCount,
|
|
44531
|
+
createdAt: new Date(timestamp),
|
|
44532
|
+
contentType,
|
|
44533
|
+
receiptHandle
|
|
44534
|
+
};
|
|
44535
|
+
}
|
|
44536
|
+
__name(parseQueueHeaders2, "parseQueueHeaders");
|
|
44537
|
+
var QueueClient2 = class {
|
|
44538
|
+
static {
|
|
44539
|
+
__name(this, "QueueClient");
|
|
44540
|
+
}
|
|
44541
|
+
baseUrl;
|
|
44542
|
+
basePath;
|
|
44543
|
+
customHeaders;
|
|
44544
|
+
providedToken;
|
|
44545
|
+
defaultDeploymentId;
|
|
44546
|
+
pinToDeployment;
|
|
44547
|
+
transport;
|
|
44548
|
+
constructor(options = {}) {
|
|
44549
|
+
this.baseUrl = options.baseUrl || process.env.VERCEL_QUEUE_BASE_URL || "https://vercel-queue.com";
|
|
44550
|
+
this.basePath = options.basePath || process.env.VERCEL_QUEUE_BASE_PATH || "/api/v3/topic";
|
|
44551
|
+
this.customHeaders = options.headers || {};
|
|
44552
|
+
this.providedToken = options.token;
|
|
44553
|
+
this.defaultDeploymentId = options.deploymentId || process.env.VERCEL_DEPLOYMENT_ID;
|
|
44554
|
+
this.pinToDeployment = options.pinToDeployment ?? true;
|
|
44555
|
+
this.transport = options.transport || new JsonTransport2();
|
|
44556
|
+
}
|
|
44557
|
+
getTransport() {
|
|
44558
|
+
return this.transport;
|
|
44559
|
+
}
|
|
44560
|
+
getSendDeploymentId() {
|
|
44561
|
+
if (isDevMode2()) {
|
|
44562
|
+
return void 0;
|
|
44563
|
+
}
|
|
44564
|
+
if (this.pinToDeployment) {
|
|
44565
|
+
return this.defaultDeploymentId;
|
|
44566
|
+
}
|
|
44567
|
+
return void 0;
|
|
44568
|
+
}
|
|
44569
|
+
getConsumeDeploymentId() {
|
|
44570
|
+
if (isDevMode2()) {
|
|
44571
|
+
return void 0;
|
|
44572
|
+
}
|
|
44573
|
+
return this.defaultDeploymentId;
|
|
44574
|
+
}
|
|
44575
|
+
async getToken() {
|
|
44576
|
+
if (this.providedToken) {
|
|
44577
|
+
return this.providedToken;
|
|
44578
|
+
}
|
|
44579
|
+
const token = await (0, import_oidc3.getVercelOidcToken)();
|
|
44580
|
+
if (!token) {
|
|
44581
|
+
throw new Error("Failed to get OIDC token from Vercel Functions. Make sure you are running in a Vercel Function environment, or provide a token explicitly.\n\nTo set up your environment:\n1. Link your project: 'vercel link'\n2. Pull environment variables: 'vercel env pull'\n3. Run with environment: 'dotenv -e .env.local -- your-command'");
|
|
44582
|
+
}
|
|
44583
|
+
return token;
|
|
44584
|
+
}
|
|
44585
|
+
buildUrl(queueName, ...pathSegments) {
|
|
44586
|
+
const encodedQueue = encodeURIComponent(queueName);
|
|
44587
|
+
const segments = pathSegments.map((s) => encodeURIComponent(s));
|
|
44588
|
+
const path22 = segments.length > 0 ? "/" + segments.join("/") : "";
|
|
44589
|
+
return `${this.baseUrl}${this.basePath}/${encodedQueue}${path22}`;
|
|
44590
|
+
}
|
|
44591
|
+
async fetch(url2, init) {
|
|
44592
|
+
const method = init.method || "GET";
|
|
44593
|
+
if (isDebugEnabled2()) {
|
|
44594
|
+
const logData = {
|
|
44595
|
+
method,
|
|
44596
|
+
url: url2,
|
|
44597
|
+
headers: init.headers
|
|
44598
|
+
};
|
|
44599
|
+
const body = init.body;
|
|
44600
|
+
if (body !== void 0 && body !== null) {
|
|
44601
|
+
if (body instanceof ArrayBuffer) {
|
|
44602
|
+
logData.bodySize = body.byteLength;
|
|
44603
|
+
}
|
|
44604
|
+
else if (body instanceof Uint8Array) {
|
|
44605
|
+
logData.bodySize = body.byteLength;
|
|
44606
|
+
}
|
|
44607
|
+
else if (typeof body === "string") {
|
|
44608
|
+
logData.bodySize = body.length;
|
|
44609
|
+
}
|
|
44610
|
+
else {
|
|
44611
|
+
logData.bodyType = typeof body;
|
|
44612
|
+
}
|
|
44613
|
+
}
|
|
44614
|
+
console.debug("[VQS Debug] Request:", JSON.stringify(logData, null, 2));
|
|
44615
|
+
}
|
|
44616
|
+
init.headers.set("User-Agent", `@vercel/queue/${"0.0.0-alpha.38"}`);
|
|
44617
|
+
init.headers.set("Vqs-Client-Ts", /* @__PURE__ */ ( /* @__PURE__ */new Date()).toISOString());
|
|
44618
|
+
const response = await fetch(url2, init);
|
|
44619
|
+
if (isDebugEnabled2()) {
|
|
44620
|
+
const logData = {
|
|
44621
|
+
method,
|
|
44622
|
+
url: url2,
|
|
44623
|
+
status: response.status,
|
|
44624
|
+
statusText: response.statusText,
|
|
44625
|
+
headers: response.headers
|
|
44626
|
+
};
|
|
44627
|
+
console.debug("[VQS Debug] Response:", JSON.stringify(logData, null, 2));
|
|
44628
|
+
}
|
|
44629
|
+
return response;
|
|
44630
|
+
}
|
|
44631
|
+
/**
|
|
44632
|
+
* Send a message to a topic.
|
|
44633
|
+
*
|
|
44634
|
+
* @param options - Message options including queue name, payload, and optional settings
|
|
44635
|
+
* @param options.queueName - Topic name (pattern: `[A-Za-z0-9_-]+`)
|
|
44636
|
+
* @param options.payload - Message payload
|
|
44637
|
+
* @param options.idempotencyKey - Optional deduplication key (dedup window: min(retention, 24h))
|
|
44638
|
+
* @param options.retentionSeconds - Message TTL (default: 86400, min: 60, max: 86400)
|
|
44639
|
+
* @param options.delaySeconds - Delivery delay (default: 0, max: retentionSeconds)
|
|
44640
|
+
* @returns Promise with the generated messageId
|
|
44641
|
+
* @throws {DuplicateMessageError} When idempotency key was already used
|
|
44642
|
+
* @throws {ConsumerDiscoveryError} When consumer discovery fails
|
|
44643
|
+
* @throws {ConsumerRegistryNotConfiguredError} When registry not configured
|
|
44644
|
+
* @throws {BadRequestError} When parameters are invalid
|
|
44645
|
+
* @throws {UnauthorizedError} When authentication fails
|
|
44646
|
+
* @throws {ForbiddenError} When access is denied
|
|
44647
|
+
* @throws {InternalServerError} When server encounters an error
|
|
44648
|
+
*/
|
|
44649
|
+
async sendMessage(options) {
|
|
44650
|
+
const transport = this.transport;
|
|
44651
|
+
const { queueName, payload, idempotencyKey, retentionSeconds, delaySeconds, headers: optionHeaders } = options;
|
|
44652
|
+
const headers = new Headers();
|
|
44653
|
+
if (this.customHeaders) {
|
|
44654
|
+
for (const [name, value] of Object.entries(this.customHeaders)) {
|
|
44655
|
+
headers.append(name, value);
|
|
44656
|
+
}
|
|
44657
|
+
}
|
|
44658
|
+
if (optionHeaders) {
|
|
44659
|
+
const protectedHeaderNames = /* @__PURE__ */ new Set([
|
|
44660
|
+
"authorization",
|
|
44661
|
+
"content-type"
|
|
44662
|
+
]);
|
|
44663
|
+
const isProtectedHeader = /* @__PURE__ */ __name((name) => {
|
|
44664
|
+
const lower = name.toLowerCase();
|
|
44665
|
+
if (protectedHeaderNames.has(lower))
|
|
44666
|
+
return true;
|
|
44667
|
+
return lower.startsWith("vqs-");
|
|
44668
|
+
}, "isProtectedHeader");
|
|
44669
|
+
for (const [name, value] of Object.entries(optionHeaders)) {
|
|
44670
|
+
if (!isProtectedHeader(name) && value !== void 0) {
|
|
44671
|
+
headers.append(name, value);
|
|
44672
|
+
}
|
|
44673
|
+
}
|
|
44674
|
+
}
|
|
44675
|
+
headers.set("Authorization", `Bearer ${await this.getToken()}`);
|
|
44676
|
+
headers.set("Content-Type", transport.contentType);
|
|
44677
|
+
const deploymentId = this.getSendDeploymentId();
|
|
44678
|
+
if (deploymentId) {
|
|
44679
|
+
headers.set("Vqs-Deployment-Id", deploymentId);
|
|
44680
|
+
}
|
|
44681
|
+
if (idempotencyKey) {
|
|
44682
|
+
headers.set("Vqs-Idempotency-Key", idempotencyKey);
|
|
44683
|
+
}
|
|
44684
|
+
if (retentionSeconds !== void 0) {
|
|
44685
|
+
headers.set("Vqs-Retention-Seconds", retentionSeconds.toString());
|
|
44686
|
+
}
|
|
44687
|
+
if (delaySeconds !== void 0) {
|
|
44688
|
+
headers.set("Vqs-Delay-Seconds", delaySeconds.toString());
|
|
44689
|
+
}
|
|
44690
|
+
const serialized = transport.serialize(payload);
|
|
44691
|
+
const body = Buffer.isBuffer(serialized) ? new Uint8Array(serialized) : serialized;
|
|
44692
|
+
const response = await this.fetch(this.buildUrl(queueName), {
|
|
44693
|
+
method: "POST",
|
|
44694
|
+
body,
|
|
44695
|
+
headers
|
|
44696
|
+
});
|
|
44697
|
+
if (!response.ok) {
|
|
44698
|
+
const errorText = await response.text();
|
|
44699
|
+
if (response.status === 409) {
|
|
44700
|
+
throw new DuplicateMessageError2(errorText || "Duplicate idempotency key detected", idempotencyKey);
|
|
44701
|
+
}
|
|
44702
|
+
if (response.status === 502) {
|
|
44703
|
+
throw new ConsumerDiscoveryError2(errorText || "Consumer discovery failed", deploymentId);
|
|
44704
|
+
}
|
|
44705
|
+
if (response.status === 503) {
|
|
44706
|
+
throw new ConsumerRegistryNotConfiguredError2(errorText || "Consumer registry not configured");
|
|
44707
|
+
}
|
|
44708
|
+
throwCommonHttpError2(response.status, response.statusText, errorText, "send message");
|
|
44709
|
+
}
|
|
44710
|
+
const responseData = await response.json();
|
|
44711
|
+
return responseData;
|
|
44712
|
+
}
|
|
44713
|
+
/**
|
|
44714
|
+
* Receive messages from a topic as an async generator.
|
|
44715
|
+
*
|
|
44716
|
+
* When the queue is empty, the generator completes without yielding any
|
|
44717
|
+
* messages. Callers should handle the case where no messages are yielded.
|
|
44718
|
+
*
|
|
44719
|
+
* @param options - Receive options
|
|
44720
|
+
* @param options.queueName - Topic name (pattern: `[A-Za-z0-9_-]+`)
|
|
44721
|
+
* @param options.consumerGroup - Consumer group name (pattern: `[A-Za-z0-9_-]+`)
|
|
44722
|
+
* @param options.visibilityTimeoutSeconds - Lock duration (default: 30, min: 0, max: 3600)
|
|
44723
|
+
* @param options.limit - Max messages to retrieve (default: 1, min: 1, max: 10)
|
|
44724
|
+
* @yields Message objects with payload, messageId, receiptHandle, etc.
|
|
44725
|
+
* Yields nothing if queue is empty.
|
|
44726
|
+
* @throws {InvalidLimitError} When limit is outside 1-10 range
|
|
44727
|
+
* @throws {BadRequestError} When parameters are invalid
|
|
44728
|
+
* @throws {UnauthorizedError} When authentication fails
|
|
44729
|
+
* @throws {ForbiddenError} When access is denied
|
|
44730
|
+
* @throws {InternalServerError} When server encounters an error
|
|
44731
|
+
*/
|
|
44732
|
+
async *receiveMessages(options) {
|
|
44733
|
+
const transport = this.transport;
|
|
44734
|
+
const { queueName, consumerGroup, visibilityTimeoutSeconds, limit } = options;
|
|
44735
|
+
if (limit !== void 0 && (limit < 1 || limit > 10)) {
|
|
44736
|
+
throw new InvalidLimitError2(limit);
|
|
44737
|
+
}
|
|
44738
|
+
const headers = new Headers({
|
|
44739
|
+
Authorization: `Bearer ${await this.getToken()}`,
|
|
44740
|
+
Accept: "multipart/mixed",
|
|
44741
|
+
...this.customHeaders
|
|
44742
|
+
});
|
|
44743
|
+
if (visibilityTimeoutSeconds !== void 0) {
|
|
44744
|
+
headers.set("Vqs-Visibility-Timeout-Seconds", visibilityTimeoutSeconds.toString());
|
|
44745
|
+
}
|
|
44746
|
+
if (limit !== void 0) {
|
|
44747
|
+
headers.set("Vqs-Max-Messages", limit.toString());
|
|
44748
|
+
}
|
|
44749
|
+
const effectiveDeploymentId = this.getConsumeDeploymentId();
|
|
44750
|
+
if (effectiveDeploymentId) {
|
|
44751
|
+
headers.set("Vqs-Deployment-Id", effectiveDeploymentId);
|
|
44752
|
+
}
|
|
44753
|
+
const response = await this.fetch(this.buildUrl(queueName, "consumer", consumerGroup), {
|
|
44754
|
+
method: "POST",
|
|
44755
|
+
headers
|
|
44756
|
+
});
|
|
44757
|
+
if (response.status === 204) {
|
|
44758
|
+
return;
|
|
44759
|
+
}
|
|
44760
|
+
if (!response.ok) {
|
|
44761
|
+
const errorText = await response.text();
|
|
44762
|
+
throwCommonHttpError2(response.status, response.statusText, errorText, "receive messages");
|
|
44763
|
+
}
|
|
44764
|
+
for await (const multipartMessage of parseMultipartStream(response)) {
|
|
44765
|
+
try {
|
|
44766
|
+
const parsedHeaders = parseQueueHeaders2(multipartMessage.headers);
|
|
44767
|
+
if (!parsedHeaders) {
|
|
44768
|
+
console.warn("Missing required queue headers in multipart part");
|
|
44769
|
+
await consumeStream2(multipartMessage.payload);
|
|
44770
|
+
continue;
|
|
44771
|
+
}
|
|
44772
|
+
const deserializedPayload = await transport.deserialize(multipartMessage.payload);
|
|
44773
|
+
const message = {
|
|
44774
|
+
...parsedHeaders,
|
|
44775
|
+
payload: deserializedPayload
|
|
44776
|
+
};
|
|
44777
|
+
yield message;
|
|
44778
|
+
}
|
|
44779
|
+
catch (error45) {
|
|
44780
|
+
console.warn("Failed to process multipart message:", error45);
|
|
44781
|
+
await consumeStream2(multipartMessage.payload);
|
|
44782
|
+
}
|
|
44783
|
+
}
|
|
44784
|
+
}
|
|
44785
|
+
/**
|
|
44786
|
+
* Receive a specific message by its ID.
|
|
44787
|
+
*
|
|
44788
|
+
* @param options - Receive options
|
|
44789
|
+
* @param options.queueName - Topic name (pattern: `[A-Za-z0-9_-]+`)
|
|
44790
|
+
* @param options.consumerGroup - Consumer group name (pattern: `[A-Za-z0-9_-]+`)
|
|
44791
|
+
* @param options.messageId - Message ID to retrieve
|
|
44792
|
+
* @param options.visibilityTimeoutSeconds - Lock duration (default: 30, min: 0, max: 3600)
|
|
44793
|
+
* @returns Promise with the message
|
|
44794
|
+
* @throws {MessageNotFoundError} When message doesn't exist
|
|
44795
|
+
* @throws {MessageNotAvailableError} When message is in wrong state or was a duplicate
|
|
44796
|
+
* @throws {MessageAlreadyProcessedError} When message was already processed
|
|
44797
|
+
* @throws {BadRequestError} When parameters are invalid
|
|
44798
|
+
* @throws {UnauthorizedError} When authentication fails
|
|
44799
|
+
* @throws {ForbiddenError} When access is denied
|
|
44800
|
+
* @throws {InternalServerError} When server encounters an error
|
|
44801
|
+
*/
|
|
44802
|
+
async receiveMessageById(options) {
|
|
44803
|
+
const transport = this.transport;
|
|
44804
|
+
const { queueName, consumerGroup, messageId, visibilityTimeoutSeconds } = options;
|
|
44805
|
+
const headers = new Headers({
|
|
44806
|
+
Authorization: `Bearer ${await this.getToken()}`,
|
|
44807
|
+
Accept: "multipart/mixed",
|
|
44808
|
+
...this.customHeaders
|
|
44809
|
+
});
|
|
44810
|
+
if (visibilityTimeoutSeconds !== void 0) {
|
|
44811
|
+
headers.set("Vqs-Visibility-Timeout-Seconds", visibilityTimeoutSeconds.toString());
|
|
44812
|
+
}
|
|
44813
|
+
const effectiveDeploymentId = this.getConsumeDeploymentId();
|
|
44814
|
+
if (effectiveDeploymentId) {
|
|
44815
|
+
headers.set("Vqs-Deployment-Id", effectiveDeploymentId);
|
|
44816
|
+
}
|
|
44817
|
+
const response = await this.fetch(this.buildUrl(queueName, "consumer", consumerGroup, "id", messageId), {
|
|
44818
|
+
method: "POST",
|
|
44819
|
+
headers
|
|
44820
|
+
});
|
|
44821
|
+
if (!response.ok) {
|
|
44822
|
+
const errorText = await response.text();
|
|
44823
|
+
if (response.status === 404) {
|
|
44824
|
+
throw new MessageNotFoundError2(messageId);
|
|
44825
|
+
}
|
|
44826
|
+
if (response.status === 409) {
|
|
44827
|
+
let errorData = {};
|
|
44828
|
+
try {
|
|
44829
|
+
errorData = JSON.parse(errorText);
|
|
44830
|
+
}
|
|
44831
|
+
catch {
|
|
44832
|
+
}
|
|
44833
|
+
if (errorData.originalMessageId) {
|
|
44834
|
+
throw new MessageNotAvailableError2(messageId, `This message was a duplicate - use originalMessageId: ${errorData.originalMessageId}`);
|
|
44835
|
+
}
|
|
44836
|
+
throw new MessageNotAvailableError2(messageId);
|
|
44837
|
+
}
|
|
44838
|
+
if (response.status === 410) {
|
|
44839
|
+
throw new MessageAlreadyProcessedError2(messageId);
|
|
44840
|
+
}
|
|
44841
|
+
throwCommonHttpError2(response.status, response.statusText, errorText, "receive message by ID");
|
|
44842
|
+
}
|
|
44843
|
+
for await (const multipartMessage of parseMultipartStream(response)) {
|
|
44844
|
+
const parsedHeaders = parseQueueHeaders2(multipartMessage.headers);
|
|
44845
|
+
if (!parsedHeaders) {
|
|
44846
|
+
await consumeStream2(multipartMessage.payload);
|
|
44847
|
+
throw new MessageCorruptedError2(messageId, "Missing required queue headers in response");
|
|
44848
|
+
}
|
|
44849
|
+
const deserializedPayload = await transport.deserialize(multipartMessage.payload);
|
|
44850
|
+
const message = {
|
|
44851
|
+
...parsedHeaders,
|
|
44852
|
+
payload: deserializedPayload
|
|
44853
|
+
};
|
|
44854
|
+
return {
|
|
44855
|
+
message
|
|
44856
|
+
};
|
|
44857
|
+
}
|
|
44858
|
+
throw new MessageNotFoundError2(messageId);
|
|
44859
|
+
}
|
|
44860
|
+
/**
|
|
44861
|
+
* Delete (acknowledge) a message after successful processing.
|
|
44862
|
+
*
|
|
44863
|
+
* @param options - Delete options
|
|
44864
|
+
* @param options.queueName - Topic name
|
|
44865
|
+
* @param options.consumerGroup - Consumer group name
|
|
44866
|
+
* @param options.receiptHandle - Receipt handle from the received message (must use same deployment ID as receive)
|
|
44867
|
+
* @returns Promise indicating deletion success
|
|
44868
|
+
* @throws {MessageNotFoundError} When receipt handle not found
|
|
44869
|
+
* @throws {MessageNotAvailableError} When receipt handle invalid or message already processed
|
|
44870
|
+
* @throws {BadRequestError} When parameters are invalid
|
|
44871
|
+
* @throws {UnauthorizedError} When authentication fails
|
|
44872
|
+
* @throws {ForbiddenError} When access is denied
|
|
44873
|
+
* @throws {InternalServerError} When server encounters an error
|
|
44874
|
+
*/
|
|
44875
|
+
async deleteMessage(options) {
|
|
44876
|
+
const { queueName, consumerGroup, receiptHandle } = options;
|
|
44877
|
+
const headers = new Headers({
|
|
44878
|
+
Authorization: `Bearer ${await this.getToken()}`,
|
|
44879
|
+
...this.customHeaders
|
|
44880
|
+
});
|
|
44881
|
+
const effectiveDeploymentId = this.getConsumeDeploymentId();
|
|
44882
|
+
if (effectiveDeploymentId) {
|
|
44883
|
+
headers.set("Vqs-Deployment-Id", effectiveDeploymentId);
|
|
44884
|
+
}
|
|
44885
|
+
const response = await this.fetch(this.buildUrl(queueName, "consumer", consumerGroup, "lease", receiptHandle), {
|
|
44886
|
+
method: "DELETE",
|
|
44887
|
+
headers
|
|
44888
|
+
});
|
|
44889
|
+
if (!response.ok) {
|
|
44890
|
+
const errorText = await response.text();
|
|
44891
|
+
if (response.status === 404) {
|
|
44892
|
+
throw new MessageNotFoundError2(receiptHandle);
|
|
44893
|
+
}
|
|
44894
|
+
if (response.status === 409) {
|
|
44895
|
+
throw new MessageNotAvailableError2(receiptHandle, errorText || "Invalid receipt handle, message not in correct state, or already processed");
|
|
44896
|
+
}
|
|
44897
|
+
throwCommonHttpError2(response.status, response.statusText, errorText, "delete message", "Missing or invalid receipt handle");
|
|
44898
|
+
}
|
|
44899
|
+
return {
|
|
44900
|
+
deleted: true
|
|
44901
|
+
};
|
|
44902
|
+
}
|
|
44903
|
+
/**
|
|
44904
|
+
* Extend or change the visibility timeout of a message.
|
|
44905
|
+
* Used to prevent message redelivery while still processing.
|
|
44906
|
+
*
|
|
44907
|
+
* @param options - Visibility options
|
|
44908
|
+
* @param options.queueName - Topic name
|
|
44909
|
+
* @param options.consumerGroup - Consumer group name
|
|
44910
|
+
* @param options.receiptHandle - Receipt handle from the received message (must use same deployment ID as receive)
|
|
44911
|
+
* @param options.visibilityTimeoutSeconds - New timeout (min: 0, max: 3600, cannot exceed message expiration)
|
|
44912
|
+
* @returns Promise indicating success
|
|
44913
|
+
* @throws {MessageNotFoundError} When receipt handle not found
|
|
44914
|
+
* @throws {MessageNotAvailableError} When receipt handle invalid or message already processed
|
|
44915
|
+
* @throws {BadRequestError} When parameters are invalid
|
|
44916
|
+
* @throws {UnauthorizedError} When authentication fails
|
|
44917
|
+
* @throws {ForbiddenError} When access is denied
|
|
44918
|
+
* @throws {InternalServerError} When server encounters an error
|
|
44919
|
+
*/
|
|
44920
|
+
async changeVisibility(options) {
|
|
44921
|
+
const { queueName, consumerGroup, receiptHandle, visibilityTimeoutSeconds } = options;
|
|
44922
|
+
const headers = new Headers({
|
|
44923
|
+
Authorization: `Bearer ${await this.getToken()}`,
|
|
44924
|
+
"Content-Type": "application/json",
|
|
44925
|
+
...this.customHeaders
|
|
44926
|
+
});
|
|
44927
|
+
const effectiveDeploymentId = this.getConsumeDeploymentId();
|
|
44928
|
+
if (effectiveDeploymentId) {
|
|
44929
|
+
headers.set("Vqs-Deployment-Id", effectiveDeploymentId);
|
|
44930
|
+
}
|
|
44931
|
+
const response = await this.fetch(this.buildUrl(queueName, "consumer", consumerGroup, "lease", receiptHandle), {
|
|
44932
|
+
method: "PATCH",
|
|
44933
|
+
headers,
|
|
44934
|
+
body: JSON.stringify({
|
|
44935
|
+
visibilityTimeoutSeconds
|
|
44936
|
+
})
|
|
44937
|
+
});
|
|
44938
|
+
if (!response.ok) {
|
|
44939
|
+
const errorText = await response.text();
|
|
44940
|
+
if (response.status === 404) {
|
|
44941
|
+
throw new MessageNotFoundError2(receiptHandle);
|
|
44942
|
+
}
|
|
44943
|
+
if (response.status === 409) {
|
|
44944
|
+
throw new MessageNotAvailableError2(receiptHandle, errorText || "Invalid receipt handle, message not in correct state, or already processed");
|
|
44945
|
+
}
|
|
44946
|
+
throwCommonHttpError2(response.status, response.statusText, errorText, "change visibility", "Missing receipt handle or invalid visibility timeout");
|
|
44947
|
+
}
|
|
44948
|
+
return {
|
|
44949
|
+
success: true
|
|
44950
|
+
};
|
|
44951
|
+
}
|
|
44952
|
+
/**
|
|
44953
|
+
* Alternative endpoint for changing message visibility timeout.
|
|
44954
|
+
* Uses the /visibility path suffix and expects visibilityTimeoutSeconds in the body.
|
|
44955
|
+
* Functionally equivalent to changeVisibility but follows an alternative API pattern.
|
|
44956
|
+
*
|
|
44957
|
+
* @param options - Options for changing visibility
|
|
44958
|
+
* @returns Promise resolving to change visibility response
|
|
44959
|
+
*/
|
|
44960
|
+
async changeVisibilityAlt(options) {
|
|
44961
|
+
const { queueName, consumerGroup, receiptHandle, visibilityTimeoutSeconds } = options;
|
|
44962
|
+
const headers = new Headers({
|
|
44963
|
+
Authorization: `Bearer ${await this.getToken()}`,
|
|
44964
|
+
"Content-Type": "application/json",
|
|
44965
|
+
...this.customHeaders
|
|
44966
|
+
});
|
|
44967
|
+
const effectiveDeploymentId = this.getConsumeDeploymentId();
|
|
44968
|
+
if (effectiveDeploymentId) {
|
|
44969
|
+
headers.set("Vqs-Deployment-Id", effectiveDeploymentId);
|
|
44970
|
+
}
|
|
44971
|
+
const response = await this.fetch(this.buildUrl(queueName, "consumer", consumerGroup, "lease", receiptHandle, "visibility"), {
|
|
44972
|
+
method: "PATCH",
|
|
44973
|
+
headers,
|
|
44974
|
+
body: JSON.stringify({
|
|
44975
|
+
visibilityTimeoutSeconds
|
|
44976
|
+
})
|
|
44977
|
+
});
|
|
44978
|
+
if (!response.ok) {
|
|
44979
|
+
const errorText = await response.text();
|
|
44980
|
+
if (response.status === 404) {
|
|
44981
|
+
throw new MessageNotFoundError2(receiptHandle);
|
|
44982
|
+
}
|
|
44983
|
+
if (response.status === 409) {
|
|
44984
|
+
throw new MessageNotAvailableError2(receiptHandle, errorText || "Invalid receipt handle, message not in correct state, or already processed");
|
|
44985
|
+
}
|
|
44986
|
+
throwCommonHttpError2(response.status, response.statusText, errorText, "change visibility (alt)", "Missing receipt handle or invalid visibility timeout");
|
|
44987
|
+
}
|
|
44988
|
+
return {
|
|
44989
|
+
success: true
|
|
44990
|
+
};
|
|
44991
|
+
}
|
|
44992
|
+
};
|
|
44993
|
+
var DEFAULT_VISIBILITY_TIMEOUT_SECONDS = 300;
|
|
44994
|
+
var MIN_VISIBILITY_TIMEOUT_SECONDS = 30;
|
|
44995
|
+
var MAX_RENEWAL_INTERVAL_SECONDS = 60;
|
|
44996
|
+
var MIN_RENEWAL_INTERVAL_SECONDS = 10;
|
|
44997
|
+
var RETRY_INTERVAL_MS = 3e3;
|
|
44998
|
+
function calculateRenewalInterval(visibilityTimeoutSeconds) {
|
|
44999
|
+
return Math.min(MAX_RENEWAL_INTERVAL_SECONDS, Math.max(MIN_RENEWAL_INTERVAL_SECONDS, visibilityTimeoutSeconds / 5));
|
|
45000
|
+
}
|
|
45001
|
+
__name(calculateRenewalInterval, "calculateRenewalInterval");
|
|
45002
|
+
var ConsumerGroup = class {
|
|
45003
|
+
static {
|
|
45004
|
+
__name(this, "ConsumerGroup");
|
|
45005
|
+
}
|
|
45006
|
+
client;
|
|
45007
|
+
topicName;
|
|
45008
|
+
consumerGroupName;
|
|
45009
|
+
visibilityTimeout;
|
|
45010
|
+
/**
|
|
45011
|
+
* Create a new ConsumerGroup instance.
|
|
45012
|
+
*
|
|
45013
|
+
* @param client - QueueClient instance to use for API calls (transport is configured on the client)
|
|
45014
|
+
* @param topicName - Name of the topic to consume from (pattern: `[A-Za-z0-9_-]+`)
|
|
45015
|
+
* @param consumerGroupName - Name of the consumer group (pattern: `[A-Za-z0-9_-]+`)
|
|
45016
|
+
* @param options - Optional configuration
|
|
45017
|
+
* @param options.visibilityTimeoutSeconds - Message lock duration (default: 300, max: 3600)
|
|
45018
|
+
*/
|
|
45019
|
+
constructor(client, topicName, consumerGroupName, options = {}) {
|
|
45020
|
+
this.client = client;
|
|
45021
|
+
this.topicName = topicName;
|
|
45022
|
+
this.consumerGroupName = consumerGroupName;
|
|
45023
|
+
this.visibilityTimeout = Math.max(MIN_VISIBILITY_TIMEOUT_SECONDS, options.visibilityTimeoutSeconds ?? DEFAULT_VISIBILITY_TIMEOUT_SECONDS);
|
|
45024
|
+
}
|
|
45025
|
+
/**
|
|
45026
|
+
* Check if an error is a 4xx client error that should stop retries.
|
|
45027
|
+
* 4xx errors indicate the request is fundamentally invalid and retrying won't help.
|
|
45028
|
+
* - 409: Ticket mismatch (lost ownership to another consumer)
|
|
45029
|
+
* - 404: Message/receipt handle not found
|
|
45030
|
+
* - 400, 401, 403: Other client errors
|
|
45031
|
+
*/
|
|
45032
|
+
isClientError(error45) {
|
|
45033
|
+
return error45 instanceof MessageNotAvailableError2 || // 409 - ticket mismatch, lost ownership
|
|
45034
|
+
error45 instanceof MessageNotFoundError2 || // 404 - receipt handle not found
|
|
45035
|
+
error45 instanceof BadRequestError2 || // 400 - invalid parameters
|
|
45036
|
+
error45 instanceof UnauthorizedError2 || // 401 - auth failed
|
|
45037
|
+
error45 instanceof ForbiddenError2;
|
|
45038
|
+
}
|
|
45039
|
+
/**
|
|
45040
|
+
* Starts a background loop that periodically extends the visibility timeout for a message.
|
|
45041
|
+
*
|
|
45042
|
+
* Timing strategy:
|
|
45043
|
+
* - Renewal interval: min(60s, max(10s, visibilityTimeout/5))
|
|
45044
|
+
* - Extensions request the same duration as the initial visibility timeout
|
|
45045
|
+
* - When `visibilityDeadline` is provided (binary mode small body), the first
|
|
45046
|
+
* extension delay is calculated from the time remaining until the deadline
|
|
45047
|
+
* using the same renewal formula, ensuring the first extension fires before
|
|
45048
|
+
* the server-assigned lease expires. Subsequent renewals use the standard interval.
|
|
45049
|
+
*
|
|
45050
|
+
* Retry strategy:
|
|
45051
|
+
* - On transient failures (5xx, network errors): retry every 3 seconds
|
|
45052
|
+
* - On 4xx client errors: stop retrying (the lease is lost or invalid)
|
|
45053
|
+
*
|
|
45054
|
+
* @param receiptHandle - The receipt handle to extend visibility for
|
|
45055
|
+
* @param options - Optional configuration
|
|
45056
|
+
* @param options.visibilityDeadline - Absolute deadline (from server's `ce-vqsvisibilitydeadline`)
|
|
45057
|
+
* when the current visibility timeout expires. Used to calculate the first extension delay.
|
|
45058
|
+
*/
|
|
45059
|
+
startVisibilityExtension(receiptHandle, options) {
|
|
45060
|
+
let isRunning = true;
|
|
45061
|
+
let isResolved = false;
|
|
45062
|
+
let resolveLifecycle;
|
|
45063
|
+
let timeoutId = null;
|
|
45064
|
+
const renewalIntervalMs = calculateRenewalInterval(this.visibilityTimeout) * 1e3;
|
|
45065
|
+
let firstDelayMs = renewalIntervalMs;
|
|
45066
|
+
if (options?.visibilityDeadline) {
|
|
45067
|
+
const timeRemainingMs = options.visibilityDeadline.getTime() - Date.now();
|
|
45068
|
+
if (timeRemainingMs > 0) {
|
|
45069
|
+
const timeRemainingSeconds = timeRemainingMs / 1e3;
|
|
45070
|
+
firstDelayMs = calculateRenewalInterval(timeRemainingSeconds) * 1e3;
|
|
45071
|
+
}
|
|
45072
|
+
else {
|
|
45073
|
+
firstDelayMs = 0;
|
|
45074
|
+
}
|
|
45075
|
+
}
|
|
45076
|
+
const lifecyclePromise = new Promise((resolve2) => {
|
|
45077
|
+
resolveLifecycle = resolve2;
|
|
45078
|
+
});
|
|
45079
|
+
const safeResolve = /* @__PURE__ */ __name(() => {
|
|
45080
|
+
if (!isResolved) {
|
|
45081
|
+
isResolved = true;
|
|
45082
|
+
resolveLifecycle();
|
|
45083
|
+
}
|
|
45084
|
+
}, "safeResolve");
|
|
45085
|
+
const extend2 = /* @__PURE__ */ __name(async () => {
|
|
45086
|
+
if (!isRunning) {
|
|
45087
|
+
safeResolve();
|
|
45088
|
+
return;
|
|
45089
|
+
}
|
|
45090
|
+
try {
|
|
45091
|
+
await this.client.changeVisibility({
|
|
45092
|
+
queueName: this.topicName,
|
|
45093
|
+
consumerGroup: this.consumerGroupName,
|
|
45094
|
+
receiptHandle,
|
|
45095
|
+
visibilityTimeoutSeconds: this.visibilityTimeout
|
|
45096
|
+
});
|
|
45097
|
+
if (isRunning) {
|
|
45098
|
+
timeoutId = setTimeout(() => extend2(), renewalIntervalMs);
|
|
45099
|
+
}
|
|
45100
|
+
else {
|
|
45101
|
+
safeResolve();
|
|
45102
|
+
}
|
|
45103
|
+
}
|
|
45104
|
+
catch (error45) {
|
|
45105
|
+
if (this.isClientError(error45)) {
|
|
45106
|
+
console.error(`Visibility extension failed with client error for receipt handle ${receiptHandle} (stopping retries):`, error45);
|
|
45107
|
+
safeResolve();
|
|
45108
|
+
return;
|
|
45109
|
+
}
|
|
45110
|
+
console.error(`Failed to extend visibility for receipt handle ${receiptHandle} (will retry in ${RETRY_INTERVAL_MS / 1e3}s):`, error45);
|
|
45111
|
+
if (isRunning) {
|
|
45112
|
+
timeoutId = setTimeout(() => extend2(), RETRY_INTERVAL_MS);
|
|
45113
|
+
}
|
|
45114
|
+
else {
|
|
45115
|
+
safeResolve();
|
|
45116
|
+
}
|
|
45117
|
+
}
|
|
45118
|
+
}, "extend");
|
|
45119
|
+
timeoutId = setTimeout(() => extend2(), firstDelayMs);
|
|
45120
|
+
return async (waitForCompletion = false) => {
|
|
45121
|
+
isRunning = false;
|
|
45122
|
+
if (timeoutId) {
|
|
45123
|
+
clearTimeout(timeoutId);
|
|
45124
|
+
timeoutId = null;
|
|
45125
|
+
}
|
|
45126
|
+
if (waitForCompletion) {
|
|
45127
|
+
await lifecyclePromise;
|
|
45128
|
+
}
|
|
45129
|
+
else {
|
|
45130
|
+
safeResolve();
|
|
45131
|
+
}
|
|
45132
|
+
};
|
|
45133
|
+
}
|
|
45134
|
+
async processMessage(message, handler, options) {
|
|
45135
|
+
const stopExtension = this.startVisibilityExtension(message.receiptHandle, options);
|
|
45136
|
+
try {
|
|
45137
|
+
await handler(message.payload, {
|
|
45138
|
+
messageId: message.messageId,
|
|
45139
|
+
deliveryCount: message.deliveryCount,
|
|
45140
|
+
createdAt: message.createdAt,
|
|
45141
|
+
topicName: this.topicName,
|
|
45142
|
+
consumerGroup: this.consumerGroupName
|
|
45143
|
+
});
|
|
45144
|
+
await stopExtension();
|
|
45145
|
+
await this.client.deleteMessage({
|
|
45146
|
+
queueName: this.topicName,
|
|
45147
|
+
consumerGroup: this.consumerGroupName,
|
|
45148
|
+
receiptHandle: message.receiptHandle
|
|
45149
|
+
});
|
|
45150
|
+
}
|
|
45151
|
+
catch (error45) {
|
|
45152
|
+
await stopExtension();
|
|
45153
|
+
const transport = this.client.getTransport();
|
|
45154
|
+
if (transport.finalize && message.payload !== void 0 && message.payload !== null) {
|
|
45155
|
+
try {
|
|
45156
|
+
await transport.finalize(message.payload);
|
|
45157
|
+
}
|
|
45158
|
+
catch (finalizeError) {
|
|
45159
|
+
console.warn("Failed to finalize message payload:", finalizeError);
|
|
45160
|
+
}
|
|
45161
|
+
}
|
|
45162
|
+
throw error45;
|
|
45163
|
+
}
|
|
45164
|
+
}
|
|
45165
|
+
/**
|
|
45166
|
+
* Process a pre-fetched message directly, without calling `receiveMessageById`.
|
|
45167
|
+
*
|
|
45168
|
+
* Used by the binary mode (v2beta) small body fast path, where the server
|
|
45169
|
+
* pushes the full message payload in the callback request. The message is
|
|
45170
|
+
* processed with the same lifecycle guarantees as `consume()`:
|
|
45171
|
+
* - Visibility timeout is extended periodically during processing
|
|
45172
|
+
* - Message is deleted on successful handler completion
|
|
45173
|
+
* - Payload is finalized on error if the transport supports it
|
|
45174
|
+
*
|
|
45175
|
+
* @param handler - Function to process the message payload and metadata
|
|
45176
|
+
* @param message - The complete message including payload and receipt handle
|
|
45177
|
+
* @param options - Optional configuration
|
|
45178
|
+
* @param options.visibilityDeadline - Absolute deadline when the server-assigned
|
|
45179
|
+
* visibility timeout expires (from `ce-vqsvisibilitydeadline`). Used to
|
|
45180
|
+
* schedule the first visibility extension before the lease expires.
|
|
45181
|
+
*/
|
|
45182
|
+
async consumeMessage(handler, message, options) {
|
|
45183
|
+
await this.processMessage(message, handler, options);
|
|
45184
|
+
}
|
|
45185
|
+
async consume(handler, options) {
|
|
45186
|
+
if (options && "messageId" in options) {
|
|
45187
|
+
const response = await this.client.receiveMessageById({
|
|
45188
|
+
queueName: this.topicName,
|
|
45189
|
+
consumerGroup: this.consumerGroupName,
|
|
45190
|
+
messageId: options.messageId,
|
|
45191
|
+
visibilityTimeoutSeconds: this.visibilityTimeout
|
|
45192
|
+
});
|
|
45193
|
+
await this.processMessage(response.message, handler);
|
|
45194
|
+
}
|
|
45195
|
+
else {
|
|
45196
|
+
const limit = options && "limit" in options ? options.limit : 1;
|
|
45197
|
+
let messageFound = false;
|
|
45198
|
+
for await (const message of this.client.receiveMessages({
|
|
45199
|
+
queueName: this.topicName,
|
|
45200
|
+
consumerGroup: this.consumerGroupName,
|
|
45201
|
+
visibilityTimeoutSeconds: this.visibilityTimeout,
|
|
45202
|
+
limit
|
|
45203
|
+
})) {
|
|
45204
|
+
messageFound = true;
|
|
45205
|
+
await this.processMessage(message, handler);
|
|
44350
45206
|
}
|
|
44351
|
-
|
|
44352
|
-
|
|
44353
|
-
|
|
44354
|
-
}
|
|
44355
|
-
|
|
44356
|
-
|
|
44357
|
-
|
|
44358
|
-
|
|
44359
|
-
|
|
44360
|
-
|
|
44361
|
-
|
|
44362
|
-
|
|
44363
|
-
|
|
44364
|
-
|
|
44365
|
-
|
|
44366
|
-
|
|
44367
|
-
|
|
44368
|
-
|
|
44369
|
-
|
|
45207
|
+
if (!messageFound) {
|
|
45208
|
+
await handler(null, null);
|
|
45209
|
+
}
|
|
45210
|
+
}
|
|
45211
|
+
}
|
|
45212
|
+
/**
|
|
45213
|
+
* Get the consumer group name
|
|
45214
|
+
*/
|
|
45215
|
+
get name() {
|
|
45216
|
+
return this.consumerGroupName;
|
|
45217
|
+
}
|
|
45218
|
+
/**
|
|
45219
|
+
* Get the topic name this consumer group is subscribed to
|
|
45220
|
+
*/
|
|
45221
|
+
get topic() {
|
|
45222
|
+
return this.topicName;
|
|
45223
|
+
}
|
|
45224
|
+
};
|
|
45225
|
+
var Topic = class {
|
|
45226
|
+
static {
|
|
45227
|
+
__name(this, "Topic");
|
|
45228
|
+
}
|
|
45229
|
+
client;
|
|
45230
|
+
topicName;
|
|
45231
|
+
/**
|
|
45232
|
+
* Create a new Topic instance
|
|
45233
|
+
* @param client QueueClient instance to use for API calls (transport is configured on the client)
|
|
45234
|
+
* @param topicName Name of the topic to work with
|
|
45235
|
+
*/
|
|
45236
|
+
constructor(client, topicName) {
|
|
45237
|
+
this.client = client;
|
|
45238
|
+
this.topicName = topicName;
|
|
45239
|
+
}
|
|
45240
|
+
/**
|
|
45241
|
+
* Publish a message to the topic
|
|
45242
|
+
* @param payload The data to publish
|
|
45243
|
+
* @param options Optional publish options
|
|
45244
|
+
* @returns An object containing the message ID
|
|
45245
|
+
* @throws {BadRequestError} When request parameters are invalid
|
|
45246
|
+
* @throws {UnauthorizedError} When authentication fails
|
|
45247
|
+
* @throws {ForbiddenError} When access is denied (environment mismatch)
|
|
45248
|
+
* @throws {InternalServerError} When server encounters an error
|
|
45249
|
+
*/
|
|
45250
|
+
async publish(payload, options) {
|
|
45251
|
+
const result = await this.client.sendMessage({
|
|
45252
|
+
queueName: this.topicName,
|
|
45253
|
+
payload,
|
|
45254
|
+
idempotencyKey: options?.idempotencyKey,
|
|
45255
|
+
retentionSeconds: options?.retentionSeconds,
|
|
45256
|
+
delaySeconds: options?.delaySeconds,
|
|
45257
|
+
headers: options?.headers
|
|
45258
|
+
});
|
|
45259
|
+
if (isDevMode2()) {
|
|
45260
|
+
triggerDevCallbacks(this.topicName, result.messageId);
|
|
45261
|
+
}
|
|
45262
|
+
return {
|
|
45263
|
+
messageId: result.messageId
|
|
45264
|
+
};
|
|
45265
|
+
}
|
|
45266
|
+
/**
|
|
45267
|
+
* Create a consumer group for this topic
|
|
45268
|
+
* @param consumerGroupName Name of the consumer group
|
|
45269
|
+
* @param options Optional configuration for the consumer group
|
|
45270
|
+
* @returns A ConsumerGroup instance
|
|
45271
|
+
*/
|
|
45272
|
+
consumerGroup(consumerGroupName, options) {
|
|
45273
|
+
return new ConsumerGroup(this.client, this.topicName, consumerGroupName, options);
|
|
45274
|
+
}
|
|
45275
|
+
/**
|
|
45276
|
+
* Get the topic name
|
|
45277
|
+
*/
|
|
45278
|
+
get name() {
|
|
45279
|
+
return this.topicName;
|
|
45280
|
+
}
|
|
45281
|
+
};
|
|
45282
|
+
var CLOUD_EVENT_TYPE_V1BETA = "com.vercel.queue.v1beta";
|
|
45283
|
+
var CLOUD_EVENT_TYPE_V2BETA = "com.vercel.queue.v2beta";
|
|
45284
|
+
function matchesWildcardPattern(topicName, pattern) {
|
|
45285
|
+
const prefix = pattern.slice(0, -1);
|
|
45286
|
+
return topicName.startsWith(prefix);
|
|
44370
45287
|
}
|
|
44371
|
-
__name(
|
|
44372
|
-
|
|
44373
|
-
|
|
44374
|
-
var import_node_path8 = __toESM(require("node:path"), 1);
|
|
44375
|
-
var monotonicUlid2 = monotonicFactory(() => Math.random());
|
|
44376
|
-
var RunStreamsSchema = external_exports.object({
|
|
44377
|
-
streams: external_exports.array(external_exports.string())
|
|
44378
|
-
});
|
|
44379
|
-
function serializeChunk(chunk) {
|
|
44380
|
-
const eofByte = Buffer.from([
|
|
44381
|
-
chunk.eof ? 1 : 0
|
|
44382
|
-
]);
|
|
44383
|
-
return Buffer.concat([
|
|
44384
|
-
eofByte,
|
|
44385
|
-
chunk.chunk
|
|
44386
|
-
]);
|
|
45288
|
+
__name(matchesWildcardPattern, "matchesWildcardPattern");
|
|
45289
|
+
function isRecord(value) {
|
|
45290
|
+
return typeof value === "object" && value !== null;
|
|
44387
45291
|
}
|
|
44388
|
-
__name(
|
|
44389
|
-
function
|
|
44390
|
-
|
|
44391
|
-
|
|
45292
|
+
__name(isRecord, "isRecord");
|
|
45293
|
+
function parseV1StructuredBody(body, contentType) {
|
|
45294
|
+
if (!contentType || !contentType.includes("application/cloudevents+json")) {
|
|
45295
|
+
throw new Error("Invalid content type: expected 'application/cloudevents+json'");
|
|
45296
|
+
}
|
|
45297
|
+
if (!isRecord(body) || !body.type || !body.source || !body.id || !isRecord(body.data)) {
|
|
45298
|
+
throw new Error("Invalid CloudEvent: missing required fields");
|
|
45299
|
+
}
|
|
45300
|
+
if (body.type !== CLOUD_EVENT_TYPE_V1BETA) {
|
|
45301
|
+
throw new Error(`Invalid CloudEvent type: expected '${CLOUD_EVENT_TYPE_V1BETA}', got '${String(body.type)}'`);
|
|
45302
|
+
}
|
|
45303
|
+
const { data } = body;
|
|
45304
|
+
const missingFields = [];
|
|
45305
|
+
if (!("queueName" in data))
|
|
45306
|
+
missingFields.push("queueName");
|
|
45307
|
+
if (!("consumerGroup" in data))
|
|
45308
|
+
missingFields.push("consumerGroup");
|
|
45309
|
+
if (!("messageId" in data))
|
|
45310
|
+
missingFields.push("messageId");
|
|
45311
|
+
if (missingFields.length > 0) {
|
|
45312
|
+
throw new Error(`Missing required CloudEvent data fields: ${missingFields.join(", ")}`);
|
|
45313
|
+
}
|
|
44392
45314
|
return {
|
|
44393
|
-
|
|
44394
|
-
|
|
45315
|
+
queueName: String(data.queueName),
|
|
45316
|
+
consumerGroup: String(data.consumerGroup),
|
|
45317
|
+
messageId: String(data.messageId)
|
|
45318
|
+
};
|
|
45319
|
+
}
|
|
45320
|
+
__name(parseV1StructuredBody, "parseV1StructuredBody");
|
|
45321
|
+
function getHeader(headers, name) {
|
|
45322
|
+
if (headers instanceof Headers) {
|
|
45323
|
+
return headers.get(name);
|
|
45324
|
+
}
|
|
45325
|
+
const value = headers[name];
|
|
45326
|
+
if (Array.isArray(value))
|
|
45327
|
+
return value[0] ?? null;
|
|
45328
|
+
return value ?? null;
|
|
45329
|
+
}
|
|
45330
|
+
__name(getHeader, "getHeader");
|
|
45331
|
+
function parseBinaryHeaders(headers) {
|
|
45332
|
+
const ceType = getHeader(headers, "ce-type");
|
|
45333
|
+
if (ceType !== CLOUD_EVENT_TYPE_V2BETA) {
|
|
45334
|
+
throw new Error(`Invalid CloudEvent type: expected '${CLOUD_EVENT_TYPE_V2BETA}', got '${ceType}'`);
|
|
45335
|
+
}
|
|
45336
|
+
const queueName = getHeader(headers, "ce-vqsqueuename");
|
|
45337
|
+
const consumerGroup = getHeader(headers, "ce-vqsconsumergroup");
|
|
45338
|
+
const messageId = getHeader(headers, "ce-vqsmessageid");
|
|
45339
|
+
const missingFields = [];
|
|
45340
|
+
if (!queueName)
|
|
45341
|
+
missingFields.push("ce-vqsqueuename");
|
|
45342
|
+
if (!consumerGroup)
|
|
45343
|
+
missingFields.push("ce-vqsconsumergroup");
|
|
45344
|
+
if (!messageId)
|
|
45345
|
+
missingFields.push("ce-vqsmessageid");
|
|
45346
|
+
if (missingFields.length > 0) {
|
|
45347
|
+
throw new Error(`Missing required CloudEvent headers: ${missingFields.join(", ")}`);
|
|
45348
|
+
}
|
|
45349
|
+
const base = {
|
|
45350
|
+
queueName,
|
|
45351
|
+
consumerGroup,
|
|
45352
|
+
messageId
|
|
45353
|
+
};
|
|
45354
|
+
const receiptHandle = getHeader(headers, "ce-vqsreceipthandle");
|
|
45355
|
+
if (!receiptHandle) {
|
|
45356
|
+
return base;
|
|
45357
|
+
}
|
|
45358
|
+
const result = {
|
|
45359
|
+
...base,
|
|
45360
|
+
receiptHandle
|
|
44395
45361
|
};
|
|
45362
|
+
const deliveryCount = getHeader(headers, "ce-vqsdeliverycount");
|
|
45363
|
+
if (deliveryCount) {
|
|
45364
|
+
result.deliveryCount = parseInt(deliveryCount, 10);
|
|
45365
|
+
}
|
|
45366
|
+
const createdAt = getHeader(headers, "ce-vqscreatedat");
|
|
45367
|
+
if (createdAt) {
|
|
45368
|
+
result.createdAt = createdAt;
|
|
45369
|
+
}
|
|
45370
|
+
const contentType = getHeader(headers, "content-type");
|
|
45371
|
+
if (contentType) {
|
|
45372
|
+
result.contentType = contentType;
|
|
45373
|
+
}
|
|
45374
|
+
const visibilityDeadline = getHeader(headers, "ce-vqsvisibilitydeadline");
|
|
45375
|
+
if (visibilityDeadline) {
|
|
45376
|
+
result.visibilityDeadline = visibilityDeadline;
|
|
45377
|
+
}
|
|
45378
|
+
return result;
|
|
44396
45379
|
}
|
|
44397
|
-
__name(
|
|
44398
|
-
function
|
|
44399
|
-
const
|
|
44400
|
-
|
|
44401
|
-
|
|
44402
|
-
|
|
44403
|
-
|
|
44404
|
-
return;
|
|
44405
|
-
}
|
|
44406
|
-
const runStreamsPath = import_node_path8.default.join(basedir, "streams", "runs", `${runId}.json`);
|
|
44407
|
-
const existing = await readJSON(runStreamsPath, RunStreamsSchema);
|
|
44408
|
-
const streams = existing?.streams ?? [];
|
|
44409
|
-
if (!streams.includes(streamName)) {
|
|
44410
|
-
streams.push(streamName);
|
|
44411
|
-
await writeJSON(runStreamsPath, {
|
|
44412
|
-
streams
|
|
44413
|
-
}, {
|
|
44414
|
-
overwrite: true
|
|
44415
|
-
});
|
|
45380
|
+
__name(parseBinaryHeaders, "parseBinaryHeaders");
|
|
45381
|
+
function parseRawCallback(body, headers) {
|
|
45382
|
+
const ceType = getHeader(headers, "ce-type");
|
|
45383
|
+
if (ceType === CLOUD_EVENT_TYPE_V2BETA) {
|
|
45384
|
+
const result = parseBinaryHeaders(headers);
|
|
45385
|
+
if ("receiptHandle" in result) {
|
|
45386
|
+
result.parsedPayload = body;
|
|
44416
45387
|
}
|
|
44417
|
-
|
|
45388
|
+
return result;
|
|
44418
45389
|
}
|
|
44419
|
-
|
|
44420
|
-
|
|
44421
|
-
|
|
44422
|
-
|
|
45390
|
+
return parseV1StructuredBody(body, getHeader(headers, "content-type"));
|
|
45391
|
+
}
|
|
45392
|
+
__name(parseRawCallback, "parseRawCallback");
|
|
45393
|
+
async function parseCallback(request) {
|
|
45394
|
+
const ceType = request.headers.get("ce-type");
|
|
45395
|
+
if (ceType === CLOUD_EVENT_TYPE_V2BETA) {
|
|
45396
|
+
const result = parseBinaryHeaders(request.headers);
|
|
45397
|
+
if ("receiptHandle" in result && request.body) {
|
|
45398
|
+
result.rawBody = request.body;
|
|
44423
45399
|
}
|
|
44424
|
-
|
|
44425
|
-
|
|
45400
|
+
return result;
|
|
45401
|
+
}
|
|
45402
|
+
let body;
|
|
45403
|
+
try {
|
|
45404
|
+
body = await request.json();
|
|
45405
|
+
}
|
|
45406
|
+
catch {
|
|
45407
|
+
throw new Error("Failed to parse CloudEvent from request body");
|
|
45408
|
+
}
|
|
45409
|
+
const headers = {};
|
|
45410
|
+
request.headers.forEach((value, key) => {
|
|
45411
|
+
headers[key] = value;
|
|
45412
|
+
});
|
|
45413
|
+
return parseRawCallback(body, headers);
|
|
45414
|
+
}
|
|
45415
|
+
__name(parseCallback, "parseCallback");
|
|
45416
|
+
async function handleCallback(handler, request, options) {
|
|
45417
|
+
const { queueName, consumerGroup, messageId } = request;
|
|
45418
|
+
const client = options?.client || new QueueClient2();
|
|
45419
|
+
const topic = new Topic(client, queueName);
|
|
45420
|
+
const cg = topic.consumerGroup(consumerGroup, options?.visibilityTimeoutSeconds !== void 0 ? {
|
|
45421
|
+
visibilityTimeoutSeconds: options.visibilityTimeoutSeconds
|
|
45422
|
+
} : void 0);
|
|
45423
|
+
if ("receiptHandle" in request) {
|
|
45424
|
+
const transport = client.getTransport();
|
|
45425
|
+
let payload;
|
|
45426
|
+
if (request.rawBody) {
|
|
45427
|
+
payload = await transport.deserialize(request.rawBody);
|
|
45428
|
+
}
|
|
45429
|
+
else if (request.parsedPayload !== void 0) {
|
|
45430
|
+
payload = request.parsedPayload;
|
|
44426
45431
|
}
|
|
44427
45432
|
else {
|
|
44428
|
-
|
|
45433
|
+
throw new Error("Binary mode callback with receipt handle is missing payload");
|
|
44429
45434
|
}
|
|
45435
|
+
const message = {
|
|
45436
|
+
messageId,
|
|
45437
|
+
payload,
|
|
45438
|
+
deliveryCount: request.deliveryCount ?? 1,
|
|
45439
|
+
createdAt: request.createdAt ? new Date(request.createdAt) : /* @__PURE__ */ new Date(),
|
|
45440
|
+
contentType: request.contentType ?? transport.contentType,
|
|
45441
|
+
receiptHandle: request.receiptHandle
|
|
45442
|
+
};
|
|
45443
|
+
const visibilityDeadline = request.visibilityDeadline ? new Date(request.visibilityDeadline) : void 0;
|
|
45444
|
+
await cg.consumeMessage(handler, message, {
|
|
45445
|
+
visibilityDeadline
|
|
45446
|
+
});
|
|
44430
45447
|
}
|
|
44431
|
-
|
|
44432
|
-
|
|
44433
|
-
|
|
44434
|
-
|
|
44435
|
-
|
|
44436
|
-
|
|
44437
|
-
|
|
44438
|
-
|
|
44439
|
-
|
|
44440
|
-
|
|
44441
|
-
|
|
44442
|
-
|
|
44443
|
-
|
|
44444
|
-
|
|
44445
|
-
streamEmitter.emit(`chunk:${name}`, {
|
|
44446
|
-
streamName: name,
|
|
44447
|
-
chunkData,
|
|
44448
|
-
chunkId
|
|
44449
|
-
});
|
|
44450
|
-
},
|
|
44451
|
-
async writeToStreamMulti(name, _runId, chunks) {
|
|
44452
|
-
if (chunks.length === 0)
|
|
44453
|
-
return;
|
|
44454
|
-
const chunkIds = chunks.map(() => `chnk_${monotonicUlid2()}`);
|
|
44455
|
-
const runId = await _runId;
|
|
44456
|
-
await registerStreamForRun(runId, name);
|
|
44457
|
-
const chunkBuffers = chunks.map((chunk) => toBuffer(chunk));
|
|
44458
|
-
const writePromises = chunkBuffers.map(async (chunkBuffer, i) => {
|
|
44459
|
-
const chunkId = chunkIds[i];
|
|
44460
|
-
const serialized = serializeChunk({
|
|
44461
|
-
chunk: chunkBuffer,
|
|
44462
|
-
eof: false
|
|
44463
|
-
});
|
|
44464
|
-
const chunkPath = import_node_path8.default.join(basedir, "streams", "chunks", `${name}-${chunkId}.bin`);
|
|
44465
|
-
await write(chunkPath, serialized);
|
|
44466
|
-
return {
|
|
44467
|
-
chunkId,
|
|
44468
|
-
chunkData: Uint8Array.from(chunkBuffer)
|
|
44469
|
-
};
|
|
45448
|
+
else {
|
|
45449
|
+
await cg.consume(handler, {
|
|
45450
|
+
messageId
|
|
45451
|
+
});
|
|
45452
|
+
}
|
|
45453
|
+
}
|
|
45454
|
+
__name(handleCallback, "handleCallback");
|
|
45455
|
+
function handleCallback2(handler, options) {
|
|
45456
|
+
return async (request) => {
|
|
45457
|
+
try {
|
|
45458
|
+
const parsed = await parseCallback(request);
|
|
45459
|
+
await handleCallback(handler, parsed, options);
|
|
45460
|
+
return Response.json({
|
|
45461
|
+
status: "success"
|
|
44470
45462
|
});
|
|
44471
|
-
|
|
44472
|
-
|
|
44473
|
-
|
|
44474
|
-
|
|
44475
|
-
|
|
44476
|
-
|
|
45463
|
+
}
|
|
45464
|
+
catch (error45) {
|
|
45465
|
+
console.error("Queue callback error:", error45);
|
|
45466
|
+
if (error45 instanceof Error && (error45.message.includes("Invalid content type") || error45.message.includes("Invalid CloudEvent") || error45.message.includes("Missing required CloudEvent") || error45.message.includes("Failed to parse CloudEvent") || error45.message.includes("Binary mode callback"))) {
|
|
45467
|
+
return Response.json({
|
|
45468
|
+
error: error45.message
|
|
45469
|
+
}, {
|
|
45470
|
+
status: 400
|
|
44477
45471
|
});
|
|
44478
45472
|
}
|
|
44479
|
-
|
|
44480
|
-
|
|
44481
|
-
|
|
44482
|
-
|
|
44483
|
-
await registerStreamForRun(runId, name);
|
|
44484
|
-
const chunkPath = import_node_path8.default.join(basedir, "streams", "chunks", `${name}-${chunkId}.bin`);
|
|
44485
|
-
await write(chunkPath, serializeChunk({
|
|
44486
|
-
chunk: Buffer.from([]),
|
|
44487
|
-
eof: true
|
|
44488
|
-
}));
|
|
44489
|
-
streamEmitter.emit(`close:${name}`, {
|
|
44490
|
-
streamName: name
|
|
44491
|
-
});
|
|
44492
|
-
},
|
|
44493
|
-
async listStreamsByRunId(runId) {
|
|
44494
|
-
const runStreamsPath = import_node_path8.default.join(basedir, "streams", "runs", `${runId}.json`);
|
|
44495
|
-
const data = await readJSON(runStreamsPath, RunStreamsSchema);
|
|
44496
|
-
return data?.streams ?? [];
|
|
44497
|
-
},
|
|
44498
|
-
async readFromStream(name, startIndex = 0) {
|
|
44499
|
-
const chunksDir = import_node_path8.default.join(basedir, "streams", "chunks");
|
|
44500
|
-
let removeListeners = /* @__PURE__ */ __name(() => {
|
|
44501
|
-
}, "removeListeners");
|
|
44502
|
-
return new ReadableStream({
|
|
44503
|
-
async start(controller) {
|
|
44504
|
-
const deliveredChunkIds = /* @__PURE__ */ new Set();
|
|
44505
|
-
const bufferedEventChunks = [];
|
|
44506
|
-
let isReadingFromDisk = true;
|
|
44507
|
-
let pendingClose = false;
|
|
44508
|
-
const chunkListener = /* @__PURE__ */ __name((event) => {
|
|
44509
|
-
deliveredChunkIds.add(event.chunkId);
|
|
44510
|
-
if (event.chunkData.byteLength === 0) {
|
|
44511
|
-
return;
|
|
44512
|
-
}
|
|
44513
|
-
if (isReadingFromDisk) {
|
|
44514
|
-
bufferedEventChunks.push({
|
|
44515
|
-
chunkId: event.chunkId,
|
|
44516
|
-
chunkData: Uint8Array.from(event.chunkData)
|
|
44517
|
-
});
|
|
44518
|
-
}
|
|
44519
|
-
else {
|
|
44520
|
-
controller.enqueue(Uint8Array.from(event.chunkData));
|
|
44521
|
-
}
|
|
44522
|
-
}, "chunkListener");
|
|
44523
|
-
const closeListener = /* @__PURE__ */ __name(() => {
|
|
44524
|
-
if (isReadingFromDisk) {
|
|
44525
|
-
pendingClose = true;
|
|
44526
|
-
return;
|
|
44527
|
-
}
|
|
44528
|
-
streamEmitter.off(`chunk:${name}`, chunkListener);
|
|
44529
|
-
streamEmitter.off(`close:${name}`, closeListener);
|
|
44530
|
-
try {
|
|
44531
|
-
controller.close();
|
|
44532
|
-
}
|
|
44533
|
-
catch {
|
|
44534
|
-
}
|
|
44535
|
-
}, "closeListener");
|
|
44536
|
-
removeListeners = closeListener;
|
|
44537
|
-
streamEmitter.on(`chunk:${name}`, chunkListener);
|
|
44538
|
-
streamEmitter.on(`close:${name}`, closeListener);
|
|
44539
|
-
const [binFiles, jsonFiles] = await Promise.all([
|
|
44540
|
-
listFilesByExtension(chunksDir, ".bin"),
|
|
44541
|
-
listFilesByExtension(chunksDir, ".json")
|
|
44542
|
-
]);
|
|
44543
|
-
const fileExtMap = /* @__PURE__ */ new Map();
|
|
44544
|
-
for (const f of jsonFiles)
|
|
44545
|
-
fileExtMap.set(f, ".json");
|
|
44546
|
-
for (const f of binFiles)
|
|
44547
|
-
fileExtMap.set(f, ".bin");
|
|
44548
|
-
const chunkFiles = [
|
|
44549
|
-
...fileExtMap.keys()
|
|
44550
|
-
].filter((file2) => file2.startsWith(`${name}-`)).sort();
|
|
44551
|
-
let isComplete = false;
|
|
44552
|
-
for (let i = startIndex; i < chunkFiles.length; i++) {
|
|
44553
|
-
const file2 = chunkFiles[i];
|
|
44554
|
-
const chunkId = file2.substring(name.length + 1);
|
|
44555
|
-
if (deliveredChunkIds.has(chunkId)) {
|
|
44556
|
-
continue;
|
|
44557
|
-
}
|
|
44558
|
-
const ext = fileExtMap.get(file2) ?? ".bin";
|
|
44559
|
-
const chunk = deserializeChunk(await readBuffer(import_node_path8.default.join(chunksDir, `${file2}${ext}`)));
|
|
44560
|
-
if (chunk?.eof === true) {
|
|
44561
|
-
isComplete = true;
|
|
44562
|
-
break;
|
|
44563
|
-
}
|
|
44564
|
-
if (chunk.chunk.byteLength) {
|
|
44565
|
-
controller.enqueue(Uint8Array.from(chunk.chunk));
|
|
44566
|
-
}
|
|
44567
|
-
}
|
|
44568
|
-
isReadingFromDisk = false;
|
|
44569
|
-
bufferedEventChunks.sort((a, b) => a.chunkId.localeCompare(b.chunkId));
|
|
44570
|
-
for (const buffered of bufferedEventChunks) {
|
|
44571
|
-
controller.enqueue(Uint8Array.from(buffered.chunkData));
|
|
44572
|
-
}
|
|
44573
|
-
if (isComplete) {
|
|
44574
|
-
removeListeners();
|
|
44575
|
-
try {
|
|
44576
|
-
controller.close();
|
|
44577
|
-
}
|
|
44578
|
-
catch {
|
|
44579
|
-
}
|
|
44580
|
-
return;
|
|
44581
|
-
}
|
|
44582
|
-
if (pendingClose) {
|
|
44583
|
-
streamEmitter.off(`chunk:${name}`, chunkListener);
|
|
44584
|
-
streamEmitter.off(`close:${name}`, closeListener);
|
|
44585
|
-
try {
|
|
44586
|
-
controller.close();
|
|
44587
|
-
}
|
|
44588
|
-
catch {
|
|
44589
|
-
}
|
|
44590
|
-
}
|
|
44591
|
-
},
|
|
44592
|
-
cancel() {
|
|
44593
|
-
removeListeners();
|
|
44594
|
-
}
|
|
45473
|
+
return Response.json({
|
|
45474
|
+
error: "Failed to process queue message"
|
|
45475
|
+
}, {
|
|
45476
|
+
status: 500
|
|
44595
45477
|
});
|
|
44596
45478
|
}
|
|
44597
45479
|
};
|
|
44598
45480
|
}
|
|
44599
|
-
__name(
|
|
44600
|
-
// ../world-local/dist/index.js
|
|
44601
|
-
function createLocalWorld(args) {
|
|
44602
|
-
const definedArgs = args ? Object.fromEntries(Object.entries(args).filter(([, value]) => value !== void 0)) : {};
|
|
44603
|
-
const mergedConfig = {
|
|
44604
|
-
...config2.value,
|
|
44605
|
-
...definedArgs
|
|
44606
|
-
};
|
|
44607
|
-
return {
|
|
44608
|
-
...createQueue(mergedConfig),
|
|
44609
|
-
...createStorage(mergedConfig.dataDir),
|
|
44610
|
-
...createStreamer(mergedConfig.dataDir),
|
|
44611
|
-
async start() {
|
|
44612
|
-
await initDataDir(mergedConfig.dataDir);
|
|
44613
|
-
}
|
|
44614
|
-
};
|
|
44615
|
-
}
|
|
44616
|
-
__name(createLocalWorld, "createLocalWorld");
|
|
45481
|
+
__name(handleCallback2, "handleCallback2");
|
|
44617
45482
|
// ../world-vercel/dist/utils.js
|
|
44618
45483
|
var import_node_os = __toESM(require("node:os"), 1);
|
|
44619
45484
|
var import_node_util2 = require("node:util");
|
|
44620
|
-
var
|
|
45485
|
+
var import_oidc4 = __toESM(require_dist(), 1);
|
|
44621
45486
|
// ../../node_modules/.pnpm/cbor-x@1.6.0/node_modules/cbor-x/decode.js
|
|
44622
45487
|
var decoder;
|
|
44623
45488
|
try {
|
|
@@ -47236,7 +48101,7 @@ var RpcSystem3 = SemanticConvention3("rpc.system");
|
|
|
47236
48101
|
var RpcService3 = SemanticConvention3("rpc.service");
|
|
47237
48102
|
var RpcMethod3 = SemanticConvention3("rpc.method");
|
|
47238
48103
|
// ../world-vercel/dist/version.js
|
|
47239
|
-
var version2 = "4.1.0-beta.
|
|
48104
|
+
var version2 = "4.1.0-beta.34";
|
|
47240
48105
|
// ../world-vercel/dist/utils.js
|
|
47241
48106
|
var WORKFLOW_SERVER_URL_OVERRIDE = "";
|
|
47242
48107
|
var DEFAULT_RESOLVE_DATA_OPTION2 = "all";
|
|
@@ -47324,7 +48189,7 @@ async function getHttpConfig(config3) {
|
|
|
47324
48189
|
const headers = getHeaders(config3, {
|
|
47325
48190
|
usingProxy
|
|
47326
48191
|
});
|
|
47327
|
-
const token = config3?.token ?? await (0,
|
|
48192
|
+
const token = config3?.token ?? await (0, import_oidc4.getVercelOidcToken)();
|
|
47328
48193
|
if (token) {
|
|
47329
48194
|
headers.set("Authorization", `Bearer ${token}`);
|
|
47330
48195
|
}
|
|
@@ -47486,12 +48351,26 @@ var MessageWrapper = object({
|
|
|
47486
48351
|
var MAX_DELAY_SECONDS = Number(process.env.VERCEL_QUEUE_MAX_DELAY_SECONDS || 82800
|
|
47487
48352
|
// 23 hours - leave 1h buffer before 24h retention limit
|
|
47488
48353
|
);
|
|
48354
|
+
function getHeadersFromPayload(payload) {
|
|
48355
|
+
const headers = {};
|
|
48356
|
+
if ("runId" in payload && typeof payload.runId === "string") {
|
|
48357
|
+
headers["x-workflow-run-id"] = payload.runId;
|
|
48358
|
+
}
|
|
48359
|
+
if ("workflowRunId" in payload && typeof payload.workflowRunId === "string") {
|
|
48360
|
+
headers["x-workflow-run-id"] = payload.workflowRunId;
|
|
48361
|
+
}
|
|
48362
|
+
if ("stepId" in payload && typeof payload.stepId === "string") {
|
|
48363
|
+
headers["x-workflow-step-id"] = payload.stepId;
|
|
48364
|
+
}
|
|
48365
|
+
return Object.keys(headers).length > 0 ? headers : void 0;
|
|
48366
|
+
}
|
|
48367
|
+
__name(getHeadersFromPayload, "getHeadersFromPayload");
|
|
47489
48368
|
function createQueue2(config3) {
|
|
47490
48369
|
const { baseUrl, usingProxy } = getHttpUrl(config3);
|
|
47491
48370
|
const headers = getHeaders(config3, {
|
|
47492
48371
|
usingProxy
|
|
47493
48372
|
});
|
|
47494
|
-
const
|
|
48373
|
+
const clientOptions = {
|
|
47495
48374
|
baseUrl: usingProxy ? baseUrl : void 0,
|
|
47496
48375
|
// The proxy will strip `/queues` from the path, and add `/api` in front,
|
|
47497
48376
|
// so this ends up being `/api/v3/topic` when arriving at the queue server,
|
|
@@ -47505,8 +48384,8 @@ function createQueue2(config3) {
|
|
|
47505
48384
|
if (!deploymentId) {
|
|
47506
48385
|
throw new Error("No deploymentId provided and VERCEL_DEPLOYMENT_ID environment variable is not set. Queue messages require a deployment ID to route correctly. Either set VERCEL_DEPLOYMENT_ID or provide deploymentId in options.");
|
|
47507
48386
|
}
|
|
47508
|
-
const
|
|
47509
|
-
...
|
|
48387
|
+
const client = new QueueClient({
|
|
48388
|
+
...clientOptions,
|
|
47510
48389
|
deploymentId
|
|
47511
48390
|
});
|
|
47512
48391
|
const hasEncoder = typeof MessageWrapper.encode === "function";
|
|
@@ -47522,10 +48401,15 @@ function createQueue2(config3) {
|
|
|
47522
48401
|
});
|
|
47523
48402
|
const sanitizedQueueName = queueName.replace(/[^A-Za-z0-9-_]/g, "-");
|
|
47524
48403
|
try {
|
|
47525
|
-
const { messageId } = await
|
|
48404
|
+
const { messageId } = await client.sendMessage({
|
|
48405
|
+
queueName: sanitizedQueueName,
|
|
48406
|
+
payload: encoded,
|
|
47526
48407
|
idempotencyKey: opts?.idempotencyKey,
|
|
47527
48408
|
delaySeconds: opts?.delaySeconds,
|
|
47528
|
-
headers:
|
|
48409
|
+
headers: {
|
|
48410
|
+
...getHeadersFromPayload(payload),
|
|
48411
|
+
...opts?.headers
|
|
48412
|
+
}
|
|
47529
48413
|
});
|
|
47530
48414
|
return {
|
|
47531
48415
|
messageId: MessageId.parse(messageId)
|
|
@@ -47540,30 +48424,26 @@ function createQueue2(config3) {
|
|
|
47540
48424
|
throw error45;
|
|
47541
48425
|
}
|
|
47542
48426
|
}, "queue");
|
|
47543
|
-
const
|
|
47544
|
-
|
|
47545
|
-
|
|
47546
|
-
|
|
47547
|
-
return handleCallbackClient.handleCallback({
|
|
47548
|
-
[`${prefix}*`]: {
|
|
47549
|
-
default: /* @__PURE__ */ __name(async (body, meta) => {
|
|
47550
|
-
const { payload, queueName, deploymentId } = MessageWrapper.parse(body);
|
|
47551
|
-
const result = await handler(payload, {
|
|
47552
|
-
queueName,
|
|
47553
|
-
messageId: MessageId.parse(meta.messageId),
|
|
47554
|
-
attempt: meta.deliveryCount
|
|
47555
|
-
});
|
|
47556
|
-
if (typeof result?.timeoutSeconds === "number") {
|
|
47557
|
-
const delaySeconds = Math.min(result.timeoutSeconds, MAX_DELAY_SECONDS);
|
|
47558
|
-
await queue(queueName, payload, {
|
|
47559
|
-
deploymentId,
|
|
47560
|
-
delaySeconds
|
|
47561
|
-
});
|
|
47562
|
-
return void 0;
|
|
47563
|
-
}
|
|
47564
|
-
return void 0;
|
|
47565
|
-
}, "default")
|
|
48427
|
+
const createQueueHandler = /* @__PURE__ */ __name((_prefix, handler) => {
|
|
48428
|
+
return handleCallback2(async (message, metadata) => {
|
|
48429
|
+
if (!message || !metadata) {
|
|
48430
|
+
return;
|
|
47566
48431
|
}
|
|
48432
|
+
const { payload, queueName, deploymentId } = MessageWrapper.parse(message);
|
|
48433
|
+
const result = await handler(payload, {
|
|
48434
|
+
queueName,
|
|
48435
|
+
messageId: MessageId.parse(metadata.messageId),
|
|
48436
|
+
attempt: metadata.deliveryCount
|
|
48437
|
+
});
|
|
48438
|
+
if (typeof result?.timeoutSeconds === "number") {
|
|
48439
|
+
const delaySeconds = Math.min(result.timeoutSeconds, MAX_DELAY_SECONDS);
|
|
48440
|
+
await queue(queueName, payload, {
|
|
48441
|
+
deploymentId,
|
|
48442
|
+
delaySeconds
|
|
48443
|
+
});
|
|
48444
|
+
}
|
|
48445
|
+
}, {
|
|
48446
|
+
client: new QueueClient(clientOptions)
|
|
47567
48447
|
});
|
|
47568
48448
|
}, "createQueueHandler");
|
|
47569
48449
|
const getDeploymentId = /* @__PURE__ */ __name(async () => {
|
|
@@ -48207,10 +49087,12 @@ function createStreamer2(config3) {
|
|
|
48207
49087
|
__name(createStreamer2, "createStreamer");
|
|
48208
49088
|
// ../world-vercel/dist/index.js
|
|
48209
49089
|
function createVercelWorld(config3) {
|
|
49090
|
+
const projectId = config3?.projectConfig?.projectId || process.env.VERCEL_PROJECT_ID;
|
|
48210
49091
|
return {
|
|
48211
49092
|
...createQueue2(config3),
|
|
48212
49093
|
...createStorage2(config3),
|
|
48213
|
-
...createStreamer2(config3)
|
|
49094
|
+
...createStreamer2(config3),
|
|
49095
|
+
getEncryptionKeyForRun: createGetEncryptionKeyForRun(projectId, config3?.token)
|
|
48214
49096
|
};
|
|
48215
49097
|
}
|
|
48216
49098
|
__name(createVercelWorld, "createVercelWorld");
|
|
@@ -48234,6 +49116,7 @@ var createWorld = /* @__PURE__ */ __name(() => {
|
|
|
48234
49116
|
projectConfig: {
|
|
48235
49117
|
environment: process.env.WORKFLOW_VERCEL_ENV,
|
|
48236
49118
|
projectId: process.env.WORKFLOW_VERCEL_PROJECT,
|
|
49119
|
+
projectName: process.env.WORKFLOW_VERCEL_PROJECT_NAME,
|
|
48237
49120
|
teamId: process.env.WORKFLOW_VERCEL_TEAM
|
|
48238
49121
|
}
|
|
48239
49122
|
});
|
|
@@ -48921,7 +49804,7 @@ function getStepRevivers(global2 = globalThis, ops, runId) {
|
|
|
48921
49804
|
};
|
|
48922
49805
|
}
|
|
48923
49806
|
__name(getStepRevivers, "getStepRevivers");
|
|
48924
|
-
function hydrateStepArguments(value,
|
|
49807
|
+
async function hydrateStepArguments(value, runId, _key, ops = [], global2 = globalThis, extraRevivers = {}) {
|
|
48925
49808
|
if (!(value instanceof Uint8Array)) {
|
|
48926
49809
|
return unflatten(value, {
|
|
48927
49810
|
...getStepRevivers(global2, ops, runId),
|
|
@@ -48940,7 +49823,7 @@ function hydrateStepArguments(value, ops, runId, global2 = globalThis, extraRevi
|
|
|
48940
49823
|
throw new Error(`Unsupported serialization format: ${format}`);
|
|
48941
49824
|
}
|
|
48942
49825
|
__name(hydrateStepArguments, "hydrateStepArguments");
|
|
48943
|
-
function dehydrateStepReturnValue(value,
|
|
49826
|
+
async function dehydrateStepReturnValue(value, runId, _key, ops = [], global2 = globalThis, v1Compat = false) {
|
|
48944
49827
|
try {
|
|
48945
49828
|
const str = stringify(value, getStepReducers(global2, ops, runId));
|
|
48946
49829
|
if (v1Compat) {
|
|
@@ -49095,21 +49978,47 @@ async function withServerErrorRetry(fn) {
|
|
|
49095
49978
|
}
|
|
49096
49979
|
__name(withServerErrorRetry, "withServerErrorRetry");
|
|
49097
49980
|
// ../core/dist/runtime/resume-hook.js
|
|
49098
|
-
async function
|
|
49981
|
+
async function getHookByTokenWithKey(token) {
|
|
49099
49982
|
const world = getWorld();
|
|
49100
49983
|
const hook = await world.hooks.getByToken(token);
|
|
49984
|
+
const run = await world.runs.get(hook.runId);
|
|
49985
|
+
const rawKey = await world.getEncryptionKeyForRun?.(run);
|
|
49986
|
+
const encryptionKey = rawKey ? await importKey(rawKey) : void 0;
|
|
49101
49987
|
if (typeof hook.metadata !== "undefined") {
|
|
49102
|
-
hook.metadata = hydrateStepArguments(hook.metadata,
|
|
49988
|
+
hook.metadata = await hydrateStepArguments(hook.metadata, hook.runId, encryptionKey);
|
|
49103
49989
|
}
|
|
49104
|
-
return
|
|
49990
|
+
return {
|
|
49991
|
+
hook,
|
|
49992
|
+
run,
|
|
49993
|
+
encryptionKey
|
|
49994
|
+
};
|
|
49105
49995
|
}
|
|
49106
|
-
__name(
|
|
49107
|
-
async function resumeHook(tokenOrHook, payload) {
|
|
49996
|
+
__name(getHookByTokenWithKey, "getHookByTokenWithKey");
|
|
49997
|
+
async function resumeHook(tokenOrHook, payload, encryptionKeyOverride) {
|
|
49108
49998
|
return await waitedUntil(() => {
|
|
49109
49999
|
return trace("hook.resume", async (span) => {
|
|
49110
50000
|
const world = getWorld();
|
|
49111
50001
|
try {
|
|
49112
|
-
|
|
50002
|
+
let hook;
|
|
50003
|
+
let workflowRun;
|
|
50004
|
+
let encryptionKey;
|
|
50005
|
+
if (typeof tokenOrHook === "string") {
|
|
50006
|
+
const result = await getHookByTokenWithKey(tokenOrHook);
|
|
50007
|
+
hook = result.hook;
|
|
50008
|
+
workflowRun = result.run;
|
|
50009
|
+
encryptionKey = encryptionKeyOverride ?? result.encryptionKey;
|
|
50010
|
+
}
|
|
50011
|
+
else {
|
|
50012
|
+
hook = tokenOrHook;
|
|
50013
|
+
workflowRun = await world.runs.get(hook.runId);
|
|
50014
|
+
if (encryptionKeyOverride) {
|
|
50015
|
+
encryptionKey = encryptionKeyOverride;
|
|
50016
|
+
}
|
|
50017
|
+
else {
|
|
50018
|
+
const rawKey = await world.getEncryptionKeyForRun?.(workflowRun);
|
|
50019
|
+
encryptionKey = rawKey ? await importKey(rawKey) : void 0;
|
|
50020
|
+
}
|
|
50021
|
+
}
|
|
49113
50022
|
span?.setAttributes({
|
|
49114
50023
|
...HookToken(hook.token),
|
|
49115
50024
|
...HookId(hook.hookId),
|
|
@@ -49117,7 +50026,7 @@ async function resumeHook(tokenOrHook, payload) {
|
|
|
49117
50026
|
});
|
|
49118
50027
|
const ops = [];
|
|
49119
50028
|
const v1Compat = isLegacySpecVersion(hook.specVersion);
|
|
49120
|
-
const dehydratedPayload = dehydrateStepReturnValue(payload,
|
|
50029
|
+
const dehydratedPayload = await dehydrateStepReturnValue(payload, hook.runId, encryptionKey, ops, globalThis, v1Compat);
|
|
49121
50030
|
(0, import_functions2.waitUntil)(Promise.all(ops).catch((err) => {
|
|
49122
50031
|
if (err !== void 0)
|
|
49123
50032
|
throw err;
|
|
@@ -49132,7 +50041,6 @@ async function resumeHook(tokenOrHook, payload) {
|
|
|
49132
50041
|
}, {
|
|
49133
50042
|
v1Compat
|
|
49134
50043
|
});
|
|
49135
|
-
const workflowRun = await world.runs.get(hook.runId);
|
|
49136
50044
|
span?.setAttributes({
|
|
49137
50045
|
...WorkflowName(workflowRun.workflowName)
|
|
49138
50046
|
});
|
|
@@ -49442,7 +50350,7 @@ var stepHandler = getWorldHandlers().createQueueHandler("__wkf_step_", async (me
|
|
|
49442
50350
|
};
|
|
49443
50351
|
}
|
|
49444
50352
|
if (err.status === 410) {
|
|
49445
|
-
|
|
50353
|
+
runtimeLogger.info(`Workflow run "${workflowRunId}" has already completed, skipping step "${stepId}": ${err.message}`);
|
|
49446
50354
|
return;
|
|
49447
50355
|
}
|
|
49448
50356
|
if (err.status === 409) {
|
|
@@ -49512,15 +50420,29 @@ var stepHandler = getWorldHandlers().createQueueHandler("__wkf_step_", async (me
|
|
|
49512
50420
|
stepName,
|
|
49513
50421
|
retryCount
|
|
49514
50422
|
});
|
|
49515
|
-
|
|
49516
|
-
|
|
49517
|
-
|
|
49518
|
-
|
|
49519
|
-
|
|
49520
|
-
|
|
49521
|
-
|
|
50423
|
+
try {
|
|
50424
|
+
await world.events.create(workflowRunId, {
|
|
50425
|
+
eventType: "step_failed",
|
|
50426
|
+
specVersion: SPEC_VERSION_CURRENT,
|
|
50427
|
+
correlationId: stepId,
|
|
50428
|
+
eventData: {
|
|
50429
|
+
error: errorMessage,
|
|
50430
|
+
stack: step.error?.stack
|
|
50431
|
+
}
|
|
50432
|
+
});
|
|
50433
|
+
}
|
|
50434
|
+
catch (err) {
|
|
50435
|
+
if (WorkflowAPIError.is(err) && err.status === 409) {
|
|
50436
|
+
runtimeLogger.warn("Tried failing step, but step has already finished.", {
|
|
50437
|
+
workflowRunId,
|
|
50438
|
+
stepId,
|
|
50439
|
+
stepName,
|
|
50440
|
+
message: err.message
|
|
50441
|
+
});
|
|
50442
|
+
return;
|
|
49522
50443
|
}
|
|
49523
|
-
|
|
50444
|
+
throw err;
|
|
50445
|
+
}
|
|
49524
50446
|
span?.setAttributes({
|
|
49525
50447
|
...StepStatus("failed"),
|
|
49526
50448
|
...StepRetryExhausted(true)
|
|
@@ -49529,10 +50451,6 @@ var stepHandler = getWorldHandlers().createQueueHandler("__wkf_step_", async (me
|
|
|
49529
50451
|
runId: workflowRunId,
|
|
49530
50452
|
traceCarrier: await serializeTraceCarrier(),
|
|
49531
50453
|
requestedAt: /* @__PURE__ */ new Date()
|
|
49532
|
-
}, {
|
|
49533
|
-
headers: {
|
|
49534
|
-
"x-workflow-run-id": workflowRunId
|
|
49535
|
-
}
|
|
49536
50454
|
});
|
|
49537
50455
|
return;
|
|
49538
50456
|
}
|
|
@@ -49543,9 +50461,11 @@ var stepHandler = getWorldHandlers().createQueueHandler("__wkf_step_", async (me
|
|
|
49543
50461
|
}
|
|
49544
50462
|
const stepStartedAt = step.startedAt;
|
|
49545
50463
|
const ops = [];
|
|
50464
|
+
const rawKey = await world.getEncryptionKeyForRun?.(workflowRunId);
|
|
50465
|
+
const encryptionKey = rawKey ? await importKey(rawKey) : void 0;
|
|
49546
50466
|
const hydratedInput = await trace("step.hydrate", {}, async (hydrateSpan) => {
|
|
49547
50467
|
const startTime = Date.now();
|
|
49548
|
-
const result2 = hydrateStepArguments(step.input,
|
|
50468
|
+
const result2 = await hydrateStepArguments(step.input, workflowRunId, encryptionKey, ops);
|
|
49549
50469
|
const durationMs = Date.now() - startTime;
|
|
49550
50470
|
hydrateSpan?.setAttributes({
|
|
49551
50471
|
...StepArgumentsCount(result2.args.length),
|
|
@@ -49580,7 +50500,7 @@ var stepHandler = getWorldHandlers().createQueueHandler("__wkf_step_", async (me
|
|
|
49580
50500
|
});
|
|
49581
50501
|
result = await trace("step.dehydrate", {}, async (dehydrateSpan) => {
|
|
49582
50502
|
const startTime = Date.now();
|
|
49583
|
-
const dehydrated = dehydrateStepReturnValue(result,
|
|
50503
|
+
const dehydrated = await dehydrateStepReturnValue(result, workflowRunId, encryptionKey, ops);
|
|
49584
50504
|
const durationMs = Date.now() - startTime;
|
|
49585
50505
|
dehydrateSpan?.setAttributes({
|
|
49586
50506
|
...QueueSerializeTimeMs(durationMs),
|
|
@@ -49593,6 +50513,7 @@ var stepHandler = getWorldHandlers().createQueueHandler("__wkf_step_", async (me
|
|
|
49593
50513
|
if (!isAbortError)
|
|
49594
50514
|
throw err;
|
|
49595
50515
|
}));
|
|
50516
|
+
let stepCompleted409 = false;
|
|
49596
50517
|
const [, traceCarrier] = await Promise.all([
|
|
49597
50518
|
withServerErrorRetry(() => world.events.create(workflowRunId, {
|
|
49598
50519
|
eventType: "step_completed",
|
|
@@ -49601,9 +50522,24 @@ var stepHandler = getWorldHandlers().createQueueHandler("__wkf_step_", async (me
|
|
|
49601
50522
|
eventData: {
|
|
49602
50523
|
result
|
|
49603
50524
|
}
|
|
49604
|
-
}))
|
|
50525
|
+
})).catch((err) => {
|
|
50526
|
+
if (WorkflowAPIError.is(err) && err.status === 409) {
|
|
50527
|
+
runtimeLogger.warn("Tried completing step, but step has already finished.", {
|
|
50528
|
+
workflowRunId,
|
|
50529
|
+
stepId,
|
|
50530
|
+
stepName,
|
|
50531
|
+
message: err.message
|
|
50532
|
+
});
|
|
50533
|
+
stepCompleted409 = true;
|
|
50534
|
+
return;
|
|
50535
|
+
}
|
|
50536
|
+
throw err;
|
|
50537
|
+
}),
|
|
49605
50538
|
serializeTraceCarrier()
|
|
49606
50539
|
]);
|
|
50540
|
+
if (stepCompleted409) {
|
|
50541
|
+
return;
|
|
50542
|
+
}
|
|
49607
50543
|
span?.setAttributes({
|
|
49608
50544
|
...StepStatus("completed"),
|
|
49609
50545
|
...StepResultType(typeof result)
|
|
@@ -49657,15 +50593,29 @@ var stepHandler = getWorldHandlers().createQueueHandler("__wkf_step_", async (me
|
|
|
49657
50593
|
stepName,
|
|
49658
50594
|
errorStack: normalizedStack
|
|
49659
50595
|
});
|
|
49660
|
-
|
|
49661
|
-
|
|
49662
|
-
|
|
49663
|
-
|
|
49664
|
-
|
|
49665
|
-
|
|
49666
|
-
|
|
50596
|
+
try {
|
|
50597
|
+
await withServerErrorRetry(() => world.events.create(workflowRunId, {
|
|
50598
|
+
eventType: "step_failed",
|
|
50599
|
+
specVersion: SPEC_VERSION_CURRENT,
|
|
50600
|
+
correlationId: stepId,
|
|
50601
|
+
eventData: {
|
|
50602
|
+
error: normalizedError.message,
|
|
50603
|
+
stack: normalizedStack
|
|
50604
|
+
}
|
|
50605
|
+
}));
|
|
50606
|
+
}
|
|
50607
|
+
catch (stepFailErr) {
|
|
50608
|
+
if (WorkflowAPIError.is(stepFailErr) && stepFailErr.status === 409) {
|
|
50609
|
+
runtimeLogger.warn("Tried failing step, but step has already finished.", {
|
|
50610
|
+
workflowRunId,
|
|
50611
|
+
stepId,
|
|
50612
|
+
stepName,
|
|
50613
|
+
message: stepFailErr.message
|
|
50614
|
+
});
|
|
50615
|
+
return;
|
|
49667
50616
|
}
|
|
49668
|
-
|
|
50617
|
+
throw stepFailErr;
|
|
50618
|
+
}
|
|
49669
50619
|
span?.setAttributes({
|
|
49670
50620
|
...StepStatus("failed"),
|
|
49671
50621
|
...StepFatalError(true)
|
|
@@ -49688,15 +50638,29 @@ var stepHandler = getWorldHandlers().createQueueHandler("__wkf_step_", async (me
|
|
|
49688
50638
|
errorStack: normalizedStack
|
|
49689
50639
|
});
|
|
49690
50640
|
const errorMessage = `Step "${stepName}" failed after ${maxRetries2} ${pluralize("retry", "retries", maxRetries2)}: ${normalizedError.message}`;
|
|
49691
|
-
|
|
49692
|
-
|
|
49693
|
-
|
|
49694
|
-
|
|
49695
|
-
|
|
49696
|
-
|
|
49697
|
-
|
|
50641
|
+
try {
|
|
50642
|
+
await withServerErrorRetry(() => world.events.create(workflowRunId, {
|
|
50643
|
+
eventType: "step_failed",
|
|
50644
|
+
specVersion: SPEC_VERSION_CURRENT,
|
|
50645
|
+
correlationId: stepId,
|
|
50646
|
+
eventData: {
|
|
50647
|
+
error: errorMessage,
|
|
50648
|
+
stack: normalizedStack
|
|
50649
|
+
}
|
|
50650
|
+
}));
|
|
50651
|
+
}
|
|
50652
|
+
catch (stepFailErr) {
|
|
50653
|
+
if (WorkflowAPIError.is(stepFailErr) && stepFailErr.status === 409) {
|
|
50654
|
+
runtimeLogger.warn("Tried failing step, but step has already finished.", {
|
|
50655
|
+
workflowRunId,
|
|
50656
|
+
stepId,
|
|
50657
|
+
stepName,
|
|
50658
|
+
message: stepFailErr.message
|
|
50659
|
+
});
|
|
50660
|
+
return;
|
|
49698
50661
|
}
|
|
49699
|
-
|
|
50662
|
+
throw stepFailErr;
|
|
50663
|
+
}
|
|
49700
50664
|
span?.setAttributes({
|
|
49701
50665
|
...StepStatus("failed"),
|
|
49702
50666
|
...StepRetryExhausted(true)
|
|
@@ -49719,18 +50683,32 @@ var stepHandler = getWorldHandlers().createQueueHandler("__wkf_step_", async (me
|
|
|
49719
50683
|
errorStack: normalizedStack
|
|
49720
50684
|
});
|
|
49721
50685
|
}
|
|
49722
|
-
|
|
49723
|
-
|
|
49724
|
-
|
|
49725
|
-
|
|
49726
|
-
|
|
49727
|
-
|
|
49728
|
-
|
|
49729
|
-
|
|
49730
|
-
|
|
50686
|
+
try {
|
|
50687
|
+
await withServerErrorRetry(() => world.events.create(workflowRunId, {
|
|
50688
|
+
eventType: "step_retrying",
|
|
50689
|
+
specVersion: SPEC_VERSION_CURRENT,
|
|
50690
|
+
correlationId: stepId,
|
|
50691
|
+
eventData: {
|
|
50692
|
+
error: normalizedError.message,
|
|
50693
|
+
stack: normalizedStack,
|
|
50694
|
+
...RetryableError.is(err) && {
|
|
50695
|
+
retryAfter: err.retryAfter
|
|
50696
|
+
}
|
|
49731
50697
|
}
|
|
50698
|
+
}));
|
|
50699
|
+
}
|
|
50700
|
+
catch (stepRetryErr) {
|
|
50701
|
+
if (WorkflowAPIError.is(stepRetryErr) && stepRetryErr.status === 409) {
|
|
50702
|
+
runtimeLogger.warn("Tried retrying step, but step has already finished.", {
|
|
50703
|
+
workflowRunId,
|
|
50704
|
+
stepId,
|
|
50705
|
+
stepName,
|
|
50706
|
+
message: stepRetryErr.message
|
|
50707
|
+
});
|
|
50708
|
+
return;
|
|
49732
50709
|
}
|
|
49733
|
-
|
|
50710
|
+
throw stepRetryErr;
|
|
50711
|
+
}
|
|
49734
50712
|
const timeoutSeconds = Math.max(1, RetryableError.is(err) ? Math.ceil((+err.retryAfter.getTime() - Date.now()) / 1e3) : 1);
|
|
49735
50713
|
span?.setAttributes({
|
|
49736
50714
|
...StepRetryTimeoutSeconds(timeoutSeconds),
|
|
@@ -49751,10 +50729,6 @@ var stepHandler = getWorldHandlers().createQueueHandler("__wkf_step_", async (me
|
|
|
49751
50729
|
runId: workflowRunId,
|
|
49752
50730
|
traceCarrier: await serializeTraceCarrier(),
|
|
49753
50731
|
requestedAt: /* @__PURE__ */ new Date()
|
|
49754
|
-
}, {
|
|
49755
|
-
headers: {
|
|
49756
|
-
"x-workflow-run-id": workflowRunId
|
|
49757
|
-
}
|
|
49758
50732
|
});
|
|
49759
50733
|
});
|
|
49760
50734
|
});
|