@workflow/world-testing 4.0.1-beta.5 → 4.0.1-beta.7

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1156,43 +1156,43 @@ var require_ms = __commonJS({
1156
1156
  }
1157
1157
  }
1158
1158
  __name(parse4, "parse");
1159
- function fmtShort(ms2) {
1160
- var msAbs = Math.abs(ms2);
1159
+ function fmtShort(ms3) {
1160
+ var msAbs = Math.abs(ms3);
1161
1161
  if (msAbs >= d) {
1162
- return Math.round(ms2 / d) + "d";
1162
+ return Math.round(ms3 / d) + "d";
1163
1163
  }
1164
1164
  if (msAbs >= h) {
1165
- return Math.round(ms2 / h) + "h";
1165
+ return Math.round(ms3 / h) + "h";
1166
1166
  }
1167
1167
  if (msAbs >= m) {
1168
- return Math.round(ms2 / m) + "m";
1168
+ return Math.round(ms3 / m) + "m";
1169
1169
  }
1170
1170
  if (msAbs >= s) {
1171
- return Math.round(ms2 / s) + "s";
1171
+ return Math.round(ms3 / s) + "s";
1172
1172
  }
1173
- return ms2 + "ms";
1173
+ return ms3 + "ms";
1174
1174
  }
1175
1175
  __name(fmtShort, "fmtShort");
1176
- function fmtLong(ms2) {
1177
- var msAbs = Math.abs(ms2);
1176
+ function fmtLong(ms3) {
1177
+ var msAbs = Math.abs(ms3);
1178
1178
  if (msAbs >= d) {
1179
- return plural(ms2, msAbs, d, "day");
1179
+ return plural(ms3, msAbs, d, "day");
1180
1180
  }
1181
1181
  if (msAbs >= h) {
1182
- return plural(ms2, msAbs, h, "hour");
1182
+ return plural(ms3, msAbs, h, "hour");
1183
1183
  }
1184
1184
  if (msAbs >= m) {
1185
- return plural(ms2, msAbs, m, "minute");
1185
+ return plural(ms3, msAbs, m, "minute");
1186
1186
  }
1187
1187
  if (msAbs >= s) {
1188
- return plural(ms2, msAbs, s, "second");
1188
+ return plural(ms3, msAbs, s, "second");
1189
1189
  }
1190
- return ms2 + " ms";
1190
+ return ms3 + " ms";
1191
1191
  }
1192
1192
  __name(fmtLong, "fmtLong");
1193
- function plural(ms2, msAbs, n, name) {
1193
+ function plural(ms3, msAbs, n, name) {
1194
1194
  var isPlural = msAbs >= n * 1.5;
1195
- return Math.round(ms2 / n) + " " + name + (isPlural ? "s" : "");
1195
+ return Math.round(ms3 / n) + " " + name + (isPlural ? "s" : "");
1196
1196
  }
1197
1197
  __name(plural, "plural");
1198
1198
  }
@@ -1236,8 +1236,8 @@ var require_common = __commonJS({
1236
1236
  }
1237
1237
  const self2 = debug2;
1238
1238
  const curr = Number(/* @__PURE__ */ new Date());
1239
- const ms2 = curr - (prevTime || curr);
1240
- self2.diff = ms2;
1239
+ const ms3 = curr - (prevTime || curr);
1240
+ self2.diff = ms3;
1241
1241
  self2.prev = prevTime;
1242
1242
  self2.curr = curr;
1243
1243
  prevTime = curr;
@@ -4815,6 +4815,10 @@ var import_functions3 = __toESM(require_functions(), 1);
4815
4815
  // ../errors/dist/index.js
4816
4816
  var import_ms = __toESM(require_ms(), 1);
4817
4817
  var BASE_URL = "https://useworkflow.dev/err";
4818
+ function isError(value) {
4819
+ return typeof value === "object" && value !== null && "name" in value && "message" in value;
4820
+ }
4821
+ __name(isError, "isError");
4818
4822
  var ERROR_SLUGS = {
4819
4823
  NODE_JS_MODULE_IN_WORKFLOW: "node-js-module-in-workflow",
4820
4824
  START_INVALID_WORKFLOW_FUNCTION: "start-invalid-workflow-function",
@@ -4839,6 +4843,9 @@ Learn more: ${BASE_URL}/${options.slug}` : message;
4839
4843
  Caused by: ${options.cause.stack}`;
4840
4844
  }
4841
4845
  }
4846
+ static is(value) {
4847
+ return isError(value) && value.name === "WorkflowError";
4848
+ }
4842
4849
  };
4843
4850
  var WorkflowAPIError = class extends WorkflowError {
4844
4851
  static {
@@ -4856,6 +4863,9 @@ var WorkflowAPIError = class extends WorkflowError {
4856
4863
  this.code = options?.code;
4857
4864
  this.url = options?.url;
4858
4865
  }
4866
+ static is(value) {
4867
+ return isError(value) && value.name === "WorkflowAPIError";
4868
+ }
4859
4869
  };
4860
4870
  var WorkflowRuntimeError = class extends WorkflowError {
4861
4871
  static {
@@ -4867,6 +4877,9 @@ var WorkflowRuntimeError = class extends WorkflowError {
4867
4877
  });
4868
4878
  this.name = "WorkflowRuntimeError";
4869
4879
  }
4880
+ static is(value) {
4881
+ return isError(value) && value.name === "WorkflowRuntimeError";
4882
+ }
4870
4883
  };
4871
4884
  var WorkflowRunNotFoundError = class extends WorkflowError {
4872
4885
  static {
@@ -4878,6 +4891,9 @@ var WorkflowRunNotFoundError = class extends WorkflowError {
4878
4891
  this.name = "WorkflowRunNotFoundError";
4879
4892
  this.runId = runId;
4880
4893
  }
4894
+ static is(value) {
4895
+ return isError(value) && value.name === "WorkflowRunNotFoundError";
4896
+ }
4881
4897
  };
4882
4898
  var FatalError = class extends Error {
4883
4899
  static {
@@ -4888,9 +4904,12 @@ var FatalError = class extends Error {
4888
4904
  super(message);
4889
4905
  this.name = "FatalError";
4890
4906
  }
4907
+ static is(value) {
4908
+ return isError(value) && value.name === "FatalError";
4909
+ }
4891
4910
  };
4892
4911
  // ../core/dist/global.js
4893
- var WorkflowSuspension = class extends Error {
4912
+ var WorkflowSuspension = class _WorkflowSuspension extends Error {
4894
4913
  static {
4895
4914
  __name(this, "WorkflowSuspension");
4896
4915
  }
@@ -4898,9 +4917,11 @@ var WorkflowSuspension = class extends Error {
4898
4917
  globalThis;
4899
4918
  stepCount;
4900
4919
  hookCount;
4920
+ waitCount;
4901
4921
  constructor(steps, global2) {
4902
4922
  const stepCount = steps.filter((s) => s.type === "step").length;
4903
4923
  const hookCount = steps.filter((s) => s.type === "hook").length;
4924
+ const waitCount = steps.filter((s) => s.type === "wait").length;
4904
4925
  const parts = [];
4905
4926
  if (stepCount > 0) {
4906
4927
  parts.push(`${stepCount} ${stepCount === 1 ? "step" : "steps"}`);
@@ -4908,7 +4929,10 @@ var WorkflowSuspension = class extends Error {
4908
4929
  if (hookCount > 0) {
4909
4930
  parts.push(`${hookCount} ${hookCount === 1 ? "hook" : "hooks"}`);
4910
4931
  }
4911
- const totalCount = stepCount + hookCount;
4932
+ if (waitCount > 0) {
4933
+ parts.push(`${waitCount} ${waitCount === 1 ? "wait" : "waits"}`);
4934
+ }
4935
+ const totalCount = stepCount + hookCount + waitCount;
4912
4936
  const hasOrHave = totalCount === 1 ? "has" : "have";
4913
4937
  let action;
4914
4938
  if (stepCount > 0) {
@@ -4917,6 +4941,9 @@ var WorkflowSuspension = class extends Error {
4917
4941
  else if (hookCount > 0) {
4918
4942
  action = "created";
4919
4943
  }
4944
+ else if (waitCount > 0) {
4945
+ action = "created";
4946
+ }
4920
4947
  else {
4921
4948
  action = "received";
4922
4949
  }
@@ -4927,6 +4954,10 @@ var WorkflowSuspension = class extends Error {
4927
4954
  this.globalThis = global2;
4928
4955
  this.stepCount = stepCount;
4929
4956
  this.hookCount = hookCount;
4957
+ this.waitCount = waitCount;
4958
+ }
4959
+ static is(value) {
4960
+ return value instanceof _WorkflowSuspension;
4930
4961
  }
4931
4962
  };
4932
4963
  function ENOTSUP() {
@@ -4957,8 +4988,8 @@ function once(fn) {
4957
4988
  return result;
4958
4989
  }
4959
4990
  __name(once, "once");
4960
- function buildWorkflowSuspensionMessage(runId, stepCount, hookCount) {
4961
- if (stepCount === 0 && hookCount === 0) {
4991
+ function buildWorkflowSuspensionMessage(runId, stepCount, hookCount, waitCount) {
4992
+ if (stepCount === 0 && hookCount === 0 && waitCount === 0) {
4962
4993
  return null;
4963
4994
  }
4964
4995
  const parts = [];
@@ -4968,7 +4999,20 @@ function buildWorkflowSuspensionMessage(runId, stepCount, hookCount) {
4968
4999
  if (hookCount > 0) {
4969
5000
  parts.push(`${hookCount} ${hookCount === 1 ? "hook" : "hooks"}`);
4970
5001
  }
4971
- const resumeMsg = hookCount > 0 ? "steps are created and hooks are triggered" : "steps are created";
5002
+ if (waitCount > 0) {
5003
+ parts.push(`${waitCount} ${waitCount === 1 ? "timer" : "timers"}`);
5004
+ }
5005
+ const resumeMsgParts = [];
5006
+ if (stepCount > 0) {
5007
+ resumeMsgParts.push("steps are completed");
5008
+ }
5009
+ if (hookCount > 0) {
5010
+ resumeMsgParts.push("hooks are received");
5011
+ }
5012
+ if (waitCount > 0) {
5013
+ resumeMsgParts.push("timers have elapsed");
5014
+ }
5015
+ const resumeMsg = resumeMsgParts.join(" and ");
4972
5016
  return `[Workflows] "${runId}" - ${parts.join(" and ")} to be enqueued
4973
5017
  Workflow will suspend and resume when ${resumeMsg}`;
4974
5018
  }
@@ -5124,503 +5168,50 @@ var config = once2(() => {
5124
5168
  });
5125
5169
  // ../world-local/dist/queue.js
5126
5170
  var import_promises = require("node:timers/promises");
5127
- // ../../node_modules/.pnpm/mixpart@0.0.5-alpha.0/node_modules/mixpart/dist/index.mjs
5128
- var MultipartParseError = class extends Error {
5129
- static {
5130
- __name(this, "MultipartParseError");
5131
- }
5132
- constructor(message) {
5133
- super(message);
5134
- this.name = "MultipartParseError";
5135
- }
5136
- };
5137
- function createSearch(pattern) {
5138
- const needle = new TextEncoder().encode(pattern);
5139
- return (haystack, start2 = 0) => Buffer.prototype.indexOf.call(haystack, needle, start2);
5140
- }
5141
- __name(createSearch, "createSearch");
5142
- function createPartialTailSearch(pattern) {
5143
- const needle = new TextEncoder().encode(pattern);
5144
- const byteIndexes = {};
5145
- for (let i = 0; i < needle.length; ++i) {
5146
- const byte = needle[i];
5147
- if (byteIndexes[byte] === void 0)
5148
- byteIndexes[byte] = [];
5149
- byteIndexes[byte].push(i);
5150
- }
5151
- return function (haystack) {
5152
- const haystackEnd = haystack.length - 1;
5153
- if (haystack[haystackEnd] in byteIndexes) {
5154
- const indexes = byteIndexes[haystack[haystackEnd]];
5155
- for (let i = indexes.length - 1; i >= 0; --i) {
5156
- for (let j = indexes[i], k = haystackEnd; j >= 0 && haystack[k] === needle[j]; --j, --k) {
5157
- if (j === 0)
5158
- return k;
5159
- }
5160
- }
5161
- }
5162
- return -1;
5163
- };
5164
- }
5165
- __name(createPartialTailSearch, "createPartialTailSearch");
5166
- function parseHeaders(headerBytes) {
5167
- const headerText = new TextDecoder("iso-8859-1").decode(headerBytes);
5168
- const lines = headerText.trim().split(/\r?\n/);
5169
- const headerInit = [];
5170
- for (const line of lines) {
5171
- const colonIndex = line.indexOf(":");
5172
- if (colonIndex > 0) {
5173
- const name = line.slice(0, colonIndex).trim();
5174
- const value = line.slice(colonIndex + 1).trim();
5175
- headerInit.push([name, value]);
5171
+ // ../../node_modules/.pnpm/@vercel+queue@0.0.0-alpha.23/node_modules/@vercel/queue/dist/index.mjs
5172
+ async function streamToBuffer(stream) {
5173
+ let totalLength = 0;
5174
+ const reader = stream.getReader();
5175
+ const chunks = [];
5176
+ try {
5177
+ while (true) {
5178
+ const { done, value } = await reader.read();
5179
+ if (done)
5180
+ break;
5181
+ chunks.push(value);
5182
+ totalLength += value.length;
5176
5183
  }
5177
5184
  }
5178
- return new Headers(headerInit);
5179
- }
5180
- __name(parseHeaders, "parseHeaders");
5181
- function extractBoundary(contentType) {
5182
- const boundaryMatch = contentType.match(/boundary=(?:"([^"]+)"|([^;]+))/i);
5183
- if (!boundaryMatch) {
5184
- throw new MultipartParseError("No boundary found in Content-Type header");
5185
+ finally {
5186
+ reader.releaseLock();
5185
5187
  }
5186
- return boundaryMatch[1] ?? boundaryMatch[2];
5188
+ return Buffer.concat(chunks, totalLength);
5187
5189
  }
5188
- __name(extractBoundary, "extractBoundary");
5189
- var AsyncMessageQueue = class {
5190
+ __name(streamToBuffer, "streamToBuffer");
5191
+ var JsonTransport = class {
5190
5192
  static {
5191
- __name(this, "AsyncMessageQueue");
5192
- }
5193
- queue = [];
5194
- waiters = [];
5195
- finished = false;
5196
- cancelled = false;
5197
- error = null;
5198
- /**
5199
- * Producer: Enqueue a message for consumption
5200
- */
5201
- enqueue(message) {
5202
- if (this.finished || this.cancelled)
5203
- return;
5204
- if (this.waiters.length > 0) {
5205
- const waiter = this.waiters.shift();
5206
- waiter.resolve(message);
5207
- }
5208
- else {
5209
- this.queue.push(message);
5210
- }
5211
- }
5212
- /**
5213
- * Producer: Signal completion (with optional error)
5214
- */
5215
- finish(error45) {
5216
- if (this.finished)
5217
- return;
5218
- this.finished = true;
5219
- this.error = error45 || null;
5220
- while (this.waiters.length > 0) {
5221
- const waiter = this.waiters.shift();
5222
- if (error45) {
5223
- waiter.reject(error45);
5224
- }
5225
- else {
5226
- waiter.resolve(null);
5227
- }
5228
- }
5193
+ __name(this, "JsonTransport");
5229
5194
  }
5230
- /**
5231
- * Consumer: Cancel the queue (stops accepting new messages and notifies waiters)
5232
- */
5233
- cancel() {
5234
- if (this.cancelled || this.finished)
5235
- return;
5236
- this.cancelled = true;
5237
- while (this.waiters.length > 0) {
5238
- const waiter = this.waiters.shift();
5239
- waiter.resolve(null);
5240
- }
5195
+ contentType = "application/json";
5196
+ replacer;
5197
+ reviver;
5198
+ constructor(options = {}) {
5199
+ this.replacer = options.replacer;
5200
+ this.reviver = options.reviver;
5241
5201
  }
5242
- /**
5243
- * Consumer: Dequeue next message (or null if finished/cancelled)
5244
- */
5245
- async dequeue() {
5246
- if (this.queue.length > 0) {
5247
- return this.queue.shift();
5248
- }
5249
- if (this.finished || this.cancelled) {
5250
- if (this.error)
5251
- throw this.error;
5252
- return null;
5253
- }
5254
- return new Promise((resolve, reject) => {
5255
- this.waiters.push({ resolve, reject });
5256
- });
5202
+ serialize(value) {
5203
+ return Buffer.from(JSON.stringify(value, this.replacer), "utf8");
5257
5204
  }
5258
- /**
5259
- * Check if the queue is in a terminal state
5260
- */
5261
- get isTerminal() {
5262
- return this.finished || this.cancelled;
5205
+ async deserialize(stream) {
5206
+ const buffer = await streamToBuffer(stream);
5207
+ return JSON.parse(buffer.toString("utf8"), this.reviver);
5263
5208
  }
5264
5209
  };
5265
- async function* parseMultipartStream(response, options) {
5266
- if (!response.body) {
5267
- throw new MultipartParseError("Response body is null");
5268
- }
5269
- const contentType = response.headers.get("content-type");
5270
- if (!contentType) {
5271
- throw new MultipartParseError("Missing Content-Type header");
5272
- }
5273
- const boundary = extractBoundary(contentType);
5274
- const parser = new StreamingMultipartParser(boundary, options);
5275
- yield* parser.parseStream(response.body);
5276
- }
5277
- __name(parseMultipartStream, "parseMultipartStream");
5278
- var StreamingMultipartParser = class {
5279
- static {
5280
- __name(this, "StreamingMultipartParser");
5281
- }
5282
- boundary;
5283
- findOpeningBoundary;
5284
- openingBoundaryLength;
5285
- findBoundary;
5286
- findPartialTailBoundary;
5287
- boundaryLength;
5288
- findDoubleNewline;
5289
- // Safety limits
5290
- maxHeaderSize;
5291
- maxBoundaryBuffer;
5292
- state = 0;
5293
- buffer = null;
5294
- currentHeaders = new Headers();
5295
- currentPayloadController = null;
5296
- constructor(boundary, options = {}) {
5297
- this.boundary = boundary;
5298
- this.findOpeningBoundary = createSearch(`--${boundary}`);
5299
- this.openingBoundaryLength = 2 + boundary.length;
5300
- this.findBoundary = createSearch(`\r
5301
- --${boundary}`);
5302
- this.findPartialTailBoundary = createPartialTailSearch(`\r
5303
- --${boundary}`);
5304
- this.boundaryLength = 4 + boundary.length;
5305
- this.findDoubleNewline = createSearch("\r\n\r\n");
5306
- this.maxHeaderSize = options.maxHeaderSize ?? 65536;
5307
- this.maxBoundaryBuffer = options.maxBoundaryBuffer ?? 8192;
5308
- }
5309
- async *parseStream(stream) {
5310
- const reader = stream.getReader();
5311
- const messageQueue = new AsyncMessageQueue();
5312
- const producer = this.startProducer(reader, messageQueue);
5313
- try {
5314
- yield* this.consumeMessages(messageQueue);
5315
- }
5316
- finally {
5317
- messageQueue.cancel();
5318
- this.closeCurrentPayload();
5319
- try {
5320
- await reader.cancel();
5321
- }
5322
- catch (error45) {
5323
- }
5324
- await producer;
5325
- }
5326
- }
5327
- /**
5328
- * Producer: Continuously read chunks and parse messages
5329
- */
5330
- async startProducer(reader, messageQueue) {
5331
- try {
5332
- while (!messageQueue.isTerminal) {
5333
- let result;
5334
- try {
5335
- result = await reader.read();
5336
- }
5337
- catch (readError) {
5338
- if (readError instanceof Error && (readError.name === "AbortError" || readError.constructor.name === "AbortError" || readError.name === "TimeoutError" || readError.constructor.name === "TimeoutError")) {
5339
- break;
5340
- }
5341
- throw readError;
5342
- }
5343
- const { done, value } = result;
5344
- if (done) {
5345
- if (this.buffer !== null && this.buffer.length > 0) {
5346
- const messages2 = this.write(new Uint8Array(0));
5347
- for (const message of messages2) {
5348
- if (messageQueue.isTerminal)
5349
- break;
5350
- messageQueue.enqueue(message);
5351
- }
5352
- }
5353
- if (this.state !== 4) {
5354
- if (this.state === 0) {
5355
- throw new MultipartParseError("Invalid multipart stream: missing initial boundary");
5356
- }
5357
- throw new MultipartParseError("Unexpected end of stream");
5358
- }
5359
- break;
5360
- }
5361
- if (!(value instanceof Uint8Array)) {
5362
- throw new MultipartParseError(`Invalid chunk type: expected Uint8Array, got ${typeof value}`);
5363
- }
5364
- const messages = this.write(value);
5365
- for (const message of messages) {
5366
- if (messageQueue.isTerminal)
5367
- break;
5368
- messageQueue.enqueue(message);
5369
- }
5370
- }
5371
- if (!messageQueue.isTerminal) {
5372
- messageQueue.finish();
5373
- }
5374
- }
5375
- catch (error45) {
5376
- this.closeCurrentPayload(error45);
5377
- if (!messageQueue.isTerminal) {
5378
- messageQueue.finish(error45);
5379
- }
5380
- }
5381
- finally {
5382
- try {
5383
- reader.releaseLock();
5384
- }
5385
- catch (error45) {
5386
- }
5387
- }
5388
- }
5389
- /**
5390
- * Consumer: Yield messages from the queue
5391
- */
5392
- async *consumeMessages(messageQueue) {
5393
- while (true) {
5394
- const message = await messageQueue.dequeue();
5395
- if (message === null) {
5396
- break;
5397
- }
5398
- yield message;
5399
- }
5400
- }
5401
- /**
5402
- * Process a chunk of data through the state machine and return any complete messages.
5403
- *
5404
- * Returns an array because a single chunk can contain multiple complete messages
5405
- * when small messages with headers + body + boundary all fit in one network chunk.
5406
- * All messages must be captured and queued to maintain proper message ordering.
5407
- */
5408
- write(chunk) {
5409
- const newMessages = [];
5410
- if (this.state === 4) {
5411
- throw new MultipartParseError("Unexpected data after end of stream");
5412
- }
5413
- let index = 0;
5414
- let chunkLength = chunk.length;
5415
- if (this.buffer !== null) {
5416
- const newSize = this.buffer.length + chunkLength;
5417
- const maxAllowedSize = this.state === 2 ? this.maxHeaderSize : this.maxBoundaryBuffer;
5418
- if (newSize > maxAllowedSize) {
5419
- throw new MultipartParseError(`Buffer size limit exceeded: ${newSize} bytes > ${maxAllowedSize} bytes. This may indicate malformed multipart data with ${this.state === 2 ? "oversized headers" : "invalid boundaries"}.`);
5420
- }
5421
- const newChunk = new Uint8Array(newSize);
5422
- newChunk.set(this.buffer, 0);
5423
- newChunk.set(chunk, this.buffer.length);
5424
- chunk = newChunk;
5425
- chunkLength = chunk.length;
5426
- this.buffer = null;
5427
- }
5428
- if (chunkLength === 0 && this.state === 0) {
5429
- throw new MultipartParseError("Invalid multipart stream: missing initial boundary");
5430
- }
5431
- while (true) {
5432
- if (this.state === 3) {
5433
- if (chunkLength - index < this.boundaryLength) {
5434
- const remainingData = chunk.subarray(index);
5435
- if (remainingData.length > this.maxBoundaryBuffer) {
5436
- throw new MultipartParseError(`Boundary buffer limit exceeded: ${remainingData.length} > ${this.maxBoundaryBuffer}`);
5437
- }
5438
- this.buffer = remainingData;
5439
- break;
5440
- }
5441
- const boundaryIndex = this.findBoundary(chunk, index);
5442
- if (boundaryIndex === -1) {
5443
- const partialTailIndex = this.findPartialTailBoundary(chunk);
5444
- if (partialTailIndex === -1) {
5445
- this.writeBody(index === 0 ? chunk : chunk.subarray(index));
5446
- }
5447
- else {
5448
- this.writeBody(chunk.subarray(index, partialTailIndex));
5449
- const partialBoundary = chunk.subarray(partialTailIndex);
5450
- if (partialBoundary.length > this.maxBoundaryBuffer) {
5451
- throw new MultipartParseError(`Partial boundary too large: ${partialBoundary.length} > ${this.maxBoundaryBuffer}`);
5452
- }
5453
- this.buffer = partialBoundary;
5454
- }
5455
- break;
5456
- }
5457
- this.writeBody(chunk.subarray(index, boundaryIndex));
5458
- this.finishMessage();
5459
- index = boundaryIndex + this.boundaryLength;
5460
- this.state = 1;
5461
- }
5462
- if (this.state === 1) {
5463
- if (chunkLength - index < 2) {
5464
- const remainingData = chunk.subarray(index);
5465
- if (remainingData.length > this.maxBoundaryBuffer) {
5466
- throw new MultipartParseError(`After-boundary buffer limit exceeded: ${remainingData.length} > ${this.maxBoundaryBuffer}`);
5467
- }
5468
- this.buffer = remainingData;
5469
- break;
5470
- }
5471
- if (chunk[index] === 45 && chunk[index + 1] === 45) {
5472
- this.state = 4;
5473
- break;
5474
- }
5475
- if (chunk[index] === 13 && chunk[index + 1] === 10) {
5476
- index += 2;
5477
- }
5478
- else if (chunk[index] === 10) {
5479
- index += 1;
5480
- }
5481
- else {
5482
- throw new MultipartParseError(`Invalid character after boundary: expected CRLF or LF, got 0x${chunk[index].toString(16)}`);
5483
- }
5484
- this.state = 2;
5485
- }
5486
- if (this.state === 2) {
5487
- if (chunkLength - index < 4) {
5488
- const remainingData = chunk.subarray(index);
5489
- if (remainingData.length > this.maxHeaderSize) {
5490
- throw new MultipartParseError(`Header buffer limit exceeded: ${remainingData.length} > ${this.maxHeaderSize}`);
5491
- }
5492
- this.buffer = remainingData;
5493
- break;
5494
- }
5495
- let headerEndIndex = this.findDoubleNewline(chunk, index);
5496
- let headerEndOffset = 4;
5497
- if (headerEndIndex === -1) {
5498
- const lfDoubleNewline = createSearch("\n\n");
5499
- headerEndIndex = lfDoubleNewline(chunk, index);
5500
- headerEndOffset = 2;
5501
- }
5502
- if (headerEndIndex === -1) {
5503
- const headerData = chunk.subarray(index);
5504
- if (headerData.length > this.maxHeaderSize) {
5505
- throw new MultipartParseError(`Headers too large: ${headerData.length} > ${this.maxHeaderSize} bytes`);
5506
- }
5507
- this.buffer = headerData;
5508
- break;
5509
- }
5510
- const headerBytes = chunk.subarray(index, headerEndIndex);
5511
- this.currentHeaders = parseHeaders(headerBytes);
5512
- const message = this.createStreamingMessage();
5513
- newMessages.push(message);
5514
- index = headerEndIndex + headerEndOffset;
5515
- this.state = 3;
5516
- continue;
5517
- }
5518
- if (this.state === 0) {
5519
- if (chunkLength < this.openingBoundaryLength) {
5520
- if (chunk.length > this.maxBoundaryBuffer) {
5521
- throw new MultipartParseError(`Initial chunk too large for boundary detection: ${chunk.length} > ${this.maxBoundaryBuffer}`);
5522
- }
5523
- this.buffer = chunk;
5524
- break;
5525
- }
5526
- const boundaryIndex = this.findOpeningBoundary(chunk);
5527
- if (boundaryIndex !== 0) {
5528
- throw new MultipartParseError("Invalid multipart stream: missing initial boundary");
5529
- }
5530
- index = this.openingBoundaryLength;
5531
- this.state = 1;
5532
- }
5533
- }
5534
- return newMessages;
5535
- }
5536
- createStreamingMessage() {
5537
- const headers = new Headers(this.currentHeaders);
5538
- const payload = new ReadableStream({
5539
- start: /* @__PURE__ */ __name((controller) => {
5540
- this.currentPayloadController = controller;
5541
- }, "start")
5542
- });
5543
- this.currentHeaders = new Headers();
5544
- return {
5545
- headers,
5546
- payload
5547
- };
5548
- }
5549
- writeBody(chunk) {
5550
- if (this.currentPayloadController) {
5551
- this.currentPayloadController.enqueue(chunk);
5552
- }
5553
- }
5554
- finishMessage() {
5555
- if (this.currentPayloadController) {
5556
- this.currentPayloadController.close();
5557
- this.currentPayloadController = null;
5558
- }
5559
- }
5560
- /**
5561
- * Close current payload controller if open (used during cleanup)
5562
- * If an error is provided, forwards it to the payload consumer
5563
- */
5564
- closeCurrentPayload(error45) {
5565
- if (this.currentPayloadController) {
5566
- try {
5567
- if (error45) {
5568
- this.currentPayloadController.error(error45);
5569
- }
5570
- else {
5571
- this.currentPayloadController.close();
5572
- }
5573
- }
5574
- catch (controllerError) {
5575
- }
5576
- this.currentPayloadController = null;
5577
- }
5578
- }
5579
- };
5580
- // ../../node_modules/.pnpm/@vercel+queue@0.0.0-alpha.23/node_modules/@vercel/queue/dist/index.mjs
5581
- async function streamToBuffer(stream) {
5582
- let totalLength = 0;
5583
- const reader = stream.getReader();
5584
- const chunks = [];
5585
- try {
5586
- while (true) {
5587
- const { done, value } = await reader.read();
5588
- if (done)
5589
- break;
5590
- chunks.push(value);
5591
- totalLength += value.length;
5592
- }
5593
- }
5594
- finally {
5595
- reader.releaseLock();
5596
- }
5597
- return Buffer.concat(chunks, totalLength);
5598
- }
5599
- __name(streamToBuffer, "streamToBuffer");
5600
- var JsonTransport = class {
5601
- static {
5602
- __name(this, "JsonTransport");
5603
- }
5604
- contentType = "application/json";
5605
- replacer;
5606
- reviver;
5607
- constructor(options = {}) {
5608
- this.replacer = options.replacer;
5609
- this.reviver = options.reviver;
5610
- }
5611
- serialize(value) {
5612
- return Buffer.from(JSON.stringify(value, this.replacer), "utf8");
5613
- }
5614
- async deserialize(stream) {
5615
- const buffer = await streamToBuffer(stream);
5616
- return JSON.parse(buffer.toString("utf8"), this.reviver);
5617
- }
5618
- };
5619
- var devRouteHandlers = /* @__PURE__ */ new Map();
5620
- var wildcardRouteHandlers = /* @__PURE__ */ new Map();
5621
- function clearDevHandlers() {
5622
- devRouteHandlers.clear();
5623
- wildcardRouteHandlers.clear();
5210
+ var devRouteHandlers = /* @__PURE__ */ new Map();
5211
+ var wildcardRouteHandlers = /* @__PURE__ */ new Map();
5212
+ function clearDevHandlers() {
5213
+ devRouteHandlers.clear();
5214
+ wildcardRouteHandlers.clear();
5624
5215
  }
5625
5216
  __name(clearDevHandlers, "clearDevHandlers");
5626
5217
  if (process.env.NODE_ENV === "test" || process.env.VITEST) {
@@ -18486,6 +18077,8 @@ var EventTypeSchema = external_exports.enum([
18486
18077
  "hook_created",
18487
18078
  "hook_received",
18488
18079
  "hook_disposed",
18080
+ "wait_created",
18081
+ "wait_completed",
18489
18082
  "workflow_completed",
18490
18083
  "workflow_failed",
18491
18084
  "workflow_started"
@@ -18537,6 +18130,17 @@ var HookDisposedEventSchema = BaseEventSchema.extend({
18537
18130
  eventType: external_exports.literal("hook_disposed"),
18538
18131
  correlationId: external_exports.string()
18539
18132
  });
18133
+ var WaitCreatedEventSchema = BaseEventSchema.extend({
18134
+ eventType: external_exports.literal("wait_created"),
18135
+ correlationId: external_exports.string(),
18136
+ eventData: external_exports.object({
18137
+ resumeAt: external_exports.coerce.date()
18138
+ })
18139
+ });
18140
+ var WaitCompletedEventSchema = BaseEventSchema.extend({
18141
+ eventType: external_exports.literal("wait_completed"),
18142
+ correlationId: external_exports.string()
18143
+ });
18540
18144
  var WorkflowCompletedEventSchema = BaseEventSchema.extend({
18541
18145
  eventType: external_exports.literal("workflow_completed")
18542
18146
  });
@@ -18557,6 +18161,8 @@ var CreateEventSchema = external_exports.discriminatedUnion("eventType", [
18557
18161
  HookCreatedEventSchema,
18558
18162
  HookReceivedEventSchema,
18559
18163
  HookDisposedEventSchema,
18164
+ WaitCreatedEventSchema,
18165
+ WaitCompletedEventSchema,
18560
18166
  WorkflowCompletedEventSchema,
18561
18167
  WorkflowFailedEventSchema,
18562
18168
  WorkflowStartedEventSchema
@@ -18661,7 +18267,8 @@ var StepSchema = external_exports.object({
18661
18267
  startedAt: external_exports.coerce.date().optional(),
18662
18268
  completedAt: external_exports.coerce.date().optional(),
18663
18269
  createdAt: external_exports.coerce.date(),
18664
- updatedAt: external_exports.coerce.date()
18270
+ updatedAt: external_exports.coerce.date(),
18271
+ retryAfter: external_exports.coerce.date().optional()
18665
18272
  });
18666
18273
  // ../../node_modules/.pnpm/ulid@3.0.1/node_modules/ulid/dist/node/index.js
18667
18274
  var import_node_crypto = __toESM(require("node:crypto"), 1);
@@ -18714,933 +18321,1390 @@ function incrementBase32(str) {
18714
18321
  if (charIndex === -1) {
18715
18322
  throw new ULIDError(ULIDErrorCode.Base32IncorrectEncoding, "Incorrectly encoded string");
18716
18323
  }
18717
- if (charIndex === maxCharIndex) {
18718
- output = replaceCharAt(output, index, ENCODING[0]);
18719
- continue;
18324
+ if (charIndex === maxCharIndex) {
18325
+ output = replaceCharAt(output, index, ENCODING[0]);
18326
+ continue;
18327
+ }
18328
+ done = replaceCharAt(output, index, ENCODING[charIndex + 1]);
18329
+ }
18330
+ if (typeof done === "string") {
18331
+ return done;
18332
+ }
18333
+ throw new ULIDError(ULIDErrorCode.Base32IncorrectEncoding, "Failed incrementing string");
18334
+ }
18335
+ __name(incrementBase32, "incrementBase32");
18336
+ function decodeTime(id) {
18337
+ if (id.length !== TIME_LEN + RANDOM_LEN) {
18338
+ throw new ULIDError(ULIDErrorCode.DecodeTimeValueMalformed, "Malformed ULID");
18339
+ }
18340
+ const time3 = id.substr(0, TIME_LEN).toUpperCase().split("").reverse().reduce((carry, char, index) => {
18341
+ const encodingIndex = ENCODING.indexOf(char);
18342
+ if (encodingIndex === -1) {
18343
+ throw new ULIDError(ULIDErrorCode.DecodeTimeInvalidCharacter, `Time decode error: Invalid character: ${char}`);
18344
+ }
18345
+ return carry += encodingIndex * Math.pow(ENCODING_LEN, index);
18346
+ }, 0);
18347
+ if (time3 > TIME_MAX) {
18348
+ throw new ULIDError(ULIDErrorCode.DecodeTimeValueMalformed, `Malformed ULID: timestamp too large: ${time3}`);
18349
+ }
18350
+ return time3;
18351
+ }
18352
+ __name(decodeTime, "decodeTime");
18353
+ function detectPRNG(root) {
18354
+ const rootLookup = detectRoot();
18355
+ const globalCrypto = rootLookup && (rootLookup.crypto || rootLookup.msCrypto) || (typeof import_node_crypto.default !== "undefined" ? import_node_crypto.default : null);
18356
+ if (typeof globalCrypto?.getRandomValues === "function") {
18357
+ return () => {
18358
+ const buffer = new Uint8Array(1);
18359
+ globalCrypto.getRandomValues(buffer);
18360
+ return buffer[0] / 255;
18361
+ };
18362
+ }
18363
+ else if (typeof globalCrypto?.randomBytes === "function") {
18364
+ return () => globalCrypto.randomBytes(1).readUInt8() / 255;
18365
+ }
18366
+ else if (import_node_crypto.default?.randomBytes) {
18367
+ return () => import_node_crypto.default.randomBytes(1).readUInt8() / 255;
18368
+ }
18369
+ throw new ULIDError(ULIDErrorCode.PRNGDetectFailure, "Failed to find a reliable PRNG");
18370
+ }
18371
+ __name(detectPRNG, "detectPRNG");
18372
+ function detectRoot() {
18373
+ if (inWebWorker())
18374
+ return self;
18375
+ if (typeof window !== "undefined") {
18376
+ return window;
18377
+ }
18378
+ if (typeof global !== "undefined") {
18379
+ return global;
18380
+ }
18381
+ if (typeof globalThis !== "undefined") {
18382
+ return globalThis;
18383
+ }
18384
+ return null;
18385
+ }
18386
+ __name(detectRoot, "detectRoot");
18387
+ function encodeRandom(len, prng) {
18388
+ let str = "";
18389
+ for (; len > 0; len--) {
18390
+ str = randomChar(prng) + str;
18391
+ }
18392
+ return str;
18393
+ }
18394
+ __name(encodeRandom, "encodeRandom");
18395
+ function encodeTime(now, len = TIME_LEN) {
18396
+ if (isNaN(now)) {
18397
+ throw new ULIDError(ULIDErrorCode.EncodeTimeValueMalformed, `Time must be a number: ${now}`);
18398
+ }
18399
+ else if (now > TIME_MAX) {
18400
+ throw new ULIDError(ULIDErrorCode.EncodeTimeSizeExceeded, `Cannot encode a time larger than ${TIME_MAX}: ${now}`);
18401
+ }
18402
+ else if (now < 0) {
18403
+ throw new ULIDError(ULIDErrorCode.EncodeTimeNegative, `Time must be positive: ${now}`);
18404
+ }
18405
+ else if (Number.isInteger(now) === false) {
18406
+ throw new ULIDError(ULIDErrorCode.EncodeTimeValueMalformed, `Time must be an integer: ${now}`);
18407
+ }
18408
+ let mod, str = "";
18409
+ for (let currentLen = len; currentLen > 0; currentLen--) {
18410
+ mod = now % ENCODING_LEN;
18411
+ str = ENCODING.charAt(mod) + str;
18412
+ now = (now - mod) / ENCODING_LEN;
18413
+ }
18414
+ return str;
18415
+ }
18416
+ __name(encodeTime, "encodeTime");
18417
+ function inWebWorker() {
18418
+ return typeof WorkerGlobalScope !== "undefined" && self instanceof WorkerGlobalScope;
18419
+ }
18420
+ __name(inWebWorker, "inWebWorker");
18421
+ function monotonicFactory(prng) {
18422
+ const currentPRNG = prng || detectPRNG();
18423
+ let lastTime = 0, lastRandom;
18424
+ return /* @__PURE__ */ __name(function _ulid2(seedTime) {
18425
+ const seed = !seedTime || isNaN(seedTime) ? Date.now() : seedTime;
18426
+ if (seed <= lastTime) {
18427
+ const incrementedRandom = lastRandom = incrementBase32(lastRandom);
18428
+ return encodeTime(lastTime, TIME_LEN) + incrementedRandom;
18429
+ }
18430
+ lastTime = seed;
18431
+ const newRandom = lastRandom = encodeRandom(RANDOM_LEN, currentPRNG);
18432
+ return encodeTime(seed, TIME_LEN) + newRandom;
18433
+ }, "_ulid");
18434
+ }
18435
+ __name(monotonicFactory, "monotonicFactory");
18436
+ // ../world-local/dist/queue.js
18437
+ var LOCAL_QUEUE_MAX_VISIBILITY = parseInt(process.env.WORKFLOW_LOCAL_QUEUE_MAX_VISIBILITY ?? "0", 10) || Infinity;
18438
+ function createQueue(port) {
18439
+ const transport = new JsonTransport();
18440
+ const generateId = monotonicFactory();
18441
+ const inflightMessages = /* @__PURE__ */ new Map();
18442
+ const queue = /* @__PURE__ */ __name(async (queueName, message, opts) => {
18443
+ const cleanup = [];
18444
+ if (opts?.idempotencyKey) {
18445
+ const existing = inflightMessages.get(opts.idempotencyKey);
18446
+ if (existing) {
18447
+ return { messageId: existing };
18448
+ }
18449
+ }
18450
+ const body = transport.serialize(message);
18451
+ let pathname;
18452
+ if (queueName.startsWith("__wkf_step_")) {
18453
+ pathname = `step`;
18454
+ }
18455
+ else if (queueName.startsWith("__wkf_workflow_")) {
18456
+ pathname = `flow`;
18457
+ }
18458
+ else {
18459
+ throw new Error("Unknown queue name prefix");
18460
+ }
18461
+ const messageId = MessageId.parse(`msg_${generateId()}`);
18462
+ if (opts?.idempotencyKey) {
18463
+ const key = opts.idempotencyKey;
18464
+ inflightMessages.set(key, messageId);
18465
+ cleanup.push(() => {
18466
+ inflightMessages.delete(key);
18467
+ });
18468
+ }
18469
+ (async () => {
18470
+ let defaultRetriesLeft = 3;
18471
+ for (let attempt = 0; defaultRetriesLeft > 0; attempt++) {
18472
+ defaultRetriesLeft--;
18473
+ const response = await fetch(`http://localhost:${port}/.well-known/workflow/v1/${pathname}`, {
18474
+ method: "POST",
18475
+ duplex: "half",
18476
+ headers: {
18477
+ "x-vqs-queue-name": queueName,
18478
+ "x-vqs-message-id": messageId,
18479
+ "x-vqs-message-attempt": String(attempt + 1)
18480
+ },
18481
+ body
18482
+ });
18483
+ if (response.ok) {
18484
+ return;
18485
+ }
18486
+ const text = await response.text();
18487
+ if (response.status === 503) {
18488
+ try {
18489
+ const timeoutSeconds = Number(JSON.parse(text).timeoutSeconds);
18490
+ await (0, import_promises.setTimeout)(timeoutSeconds * 1e3);
18491
+ defaultRetriesLeft++;
18492
+ continue;
18493
+ }
18494
+ catch {
18495
+ }
18496
+ }
18497
+ console.error(`[embedded world] Failed to queue message`, {
18498
+ queueName,
18499
+ text,
18500
+ status: response.status,
18501
+ headers: Object.fromEntries(response.headers.entries()),
18502
+ body: body.toString()
18503
+ });
18504
+ }
18505
+ console.error(`[embedded world] Reached max retries of embedded world queue implementation`);
18506
+ })().finally(() => {
18507
+ for (const fn of cleanup) {
18508
+ fn();
18509
+ }
18510
+ });
18511
+ return { messageId };
18512
+ }, "queue");
18513
+ const HeaderParser = zod_default.object({
18514
+ "x-vqs-queue-name": ValidQueueName,
18515
+ "x-vqs-message-id": MessageId,
18516
+ "x-vqs-message-attempt": zod_default.coerce.number()
18517
+ });
18518
+ const createQueueHandler = /* @__PURE__ */ __name((prefix, handler) => {
18519
+ return async (req) => {
18520
+ const headers = HeaderParser.safeParse(Object.fromEntries(req.headers));
18521
+ if (!headers.success || !req.body) {
18522
+ return Response.json({ error: "Missing required headers" }, { status: 400 });
18523
+ }
18524
+ const queueName = headers.data["x-vqs-queue-name"];
18525
+ const messageId = headers.data["x-vqs-message-id"];
18526
+ const attempt = headers.data["x-vqs-message-attempt"];
18527
+ if (!queueName.startsWith(prefix)) {
18528
+ return Response.json({ error: "Unhandled queue" }, { status: 400 });
18529
+ }
18530
+ const body = await new JsonTransport().deserialize(req.body);
18531
+ try {
18532
+ const result = await handler(body, { attempt, queueName, messageId });
18533
+ let timeoutSeconds = null;
18534
+ if (typeof result?.timeoutSeconds === "number") {
18535
+ timeoutSeconds = Math.min(result.timeoutSeconds, LOCAL_QUEUE_MAX_VISIBILITY);
18536
+ }
18537
+ if (timeoutSeconds) {
18538
+ return Response.json({ timeoutSeconds }, { status: 503 });
18539
+ }
18540
+ return Response.json({ ok: true });
18541
+ }
18542
+ catch (error45) {
18543
+ return Response.json(String(error45), { status: 500 });
18544
+ }
18545
+ };
18546
+ }, "createQueueHandler");
18547
+ const getDeploymentId = /* @__PURE__ */ __name(async () => {
18548
+ return "dpl_embedded";
18549
+ }, "getDeploymentId");
18550
+ return { queue, createQueueHandler, getDeploymentId };
18551
+ }
18552
+ __name(createQueue, "createQueue");
18553
+ // ../world-local/dist/storage.js
18554
+ var import_node_path2 = __toESM(require("node:path"), 1);
18555
+ // ../world-local/dist/fs.js
18556
+ var import_node_fs = require("node:fs");
18557
+ var import_node_path = __toESM(require("node:path"), 1);
18558
+ var ulid3 = monotonicFactory(() => Math.random());
18559
+ var Ulid = external_exports.string().ulid();
18560
+ function ulidToDate(maybeUlid) {
18561
+ const ulid4 = Ulid.safeParse(maybeUlid);
18562
+ if (!ulid4.success) {
18563
+ return null;
18564
+ }
18565
+ return new Date(decodeTime(ulid4.data));
18566
+ }
18567
+ __name(ulidToDate, "ulidToDate");
18568
+ async function ensureDir(dirPath) {
18569
+ try {
18570
+ await import_node_fs.promises.mkdir(dirPath, { recursive: true });
18571
+ }
18572
+ catch (_error) {
18573
+ }
18574
+ }
18575
+ __name(ensureDir, "ensureDir");
18576
+ async function writeJSON(filePath, data, opts) {
18577
+ return write(filePath, JSON.stringify(data, null, 2), opts);
18578
+ }
18579
+ __name(writeJSON, "writeJSON");
18580
+ async function write(filePath, data, opts) {
18581
+ if (!opts?.overwrite) {
18582
+ try {
18583
+ await import_node_fs.promises.access(filePath);
18584
+ throw new WorkflowAPIError(`File ${filePath} already exists and 'overwrite' is false`, { status: 409 });
18585
+ }
18586
+ catch (error45) {
18587
+ if (error45.code !== "ENOENT") {
18588
+ throw error45;
18589
+ }
18720
18590
  }
18721
- done = replaceCharAt(output, index, ENCODING[charIndex + 1]);
18722
18591
  }
18723
- if (typeof done === "string") {
18724
- return done;
18592
+ const tempPath = `${filePath}.tmp.${ulid3()}`;
18593
+ try {
18594
+ await ensureDir(import_node_path.default.dirname(filePath));
18595
+ await import_node_fs.promises.writeFile(tempPath, data);
18596
+ await import_node_fs.promises.rename(tempPath, filePath);
18597
+ }
18598
+ catch (error45) {
18599
+ await import_node_fs.promises.unlink(tempPath).catch(() => {
18600
+ });
18601
+ throw error45;
18725
18602
  }
18726
- throw new ULIDError(ULIDErrorCode.Base32IncorrectEncoding, "Failed incrementing string");
18727
18603
  }
18728
- __name(incrementBase32, "incrementBase32");
18729
- function decodeTime(id) {
18730
- if (id.length !== TIME_LEN + RANDOM_LEN) {
18731
- throw new ULIDError(ULIDErrorCode.DecodeTimeValueMalformed, "Malformed ULID");
18604
+ __name(write, "write");
18605
+ async function readJSON(filePath, decoder) {
18606
+ try {
18607
+ const content = await import_node_fs.promises.readFile(filePath, "utf-8");
18608
+ return decoder.parse(JSON.parse(content));
18732
18609
  }
18733
- const time3 = id.substr(0, TIME_LEN).toUpperCase().split("").reverse().reduce((carry, char, index) => {
18734
- const encodingIndex = ENCODING.indexOf(char);
18735
- if (encodingIndex === -1) {
18736
- throw new ULIDError(ULIDErrorCode.DecodeTimeInvalidCharacter, `Time decode error: Invalid character: ${char}`);
18737
- }
18738
- return carry += encodingIndex * Math.pow(ENCODING_LEN, index);
18739
- }, 0);
18740
- if (time3 > TIME_MAX) {
18741
- throw new ULIDError(ULIDErrorCode.DecodeTimeValueMalformed, `Malformed ULID: timestamp too large: ${time3}`);
18610
+ catch (error45) {
18611
+ if (error45.code === "ENOENT")
18612
+ return null;
18613
+ throw error45;
18742
18614
  }
18743
- return time3;
18744
18615
  }
18745
- __name(decodeTime, "decodeTime");
18746
- function detectPRNG(root) {
18747
- const rootLookup = detectRoot();
18748
- const globalCrypto = rootLookup && (rootLookup.crypto || rootLookup.msCrypto) || (typeof import_node_crypto.default !== "undefined" ? import_node_crypto.default : null);
18749
- if (typeof globalCrypto?.getRandomValues === "function") {
18750
- return () => {
18751
- const buffer = new Uint8Array(1);
18752
- globalCrypto.getRandomValues(buffer);
18753
- return buffer[0] / 255;
18754
- };
18755
- }
18756
- else if (typeof globalCrypto?.randomBytes === "function") {
18757
- return () => globalCrypto.randomBytes(1).readUInt8() / 255;
18616
+ __name(readJSON, "readJSON");
18617
+ async function readBuffer(filePath) {
18618
+ const content = await import_node_fs.promises.readFile(filePath);
18619
+ return content;
18620
+ }
18621
+ __name(readBuffer, "readBuffer");
18622
+ async function deleteJSON(filePath) {
18623
+ try {
18624
+ await import_node_fs.promises.unlink(filePath);
18758
18625
  }
18759
- else if (import_node_crypto.default?.randomBytes) {
18760
- return () => import_node_crypto.default.randomBytes(1).readUInt8() / 255;
18626
+ catch (error45) {
18627
+ if (error45.code !== "ENOENT")
18628
+ throw error45;
18761
18629
  }
18762
- throw new ULIDError(ULIDErrorCode.PRNGDetectFailure, "Failed to find a reliable PRNG");
18763
18630
  }
18764
- __name(detectPRNG, "detectPRNG");
18765
- function detectRoot() {
18766
- if (inWebWorker())
18767
- return self;
18768
- if (typeof window !== "undefined") {
18769
- return window;
18770
- }
18771
- if (typeof global !== "undefined") {
18772
- return global;
18631
+ __name(deleteJSON, "deleteJSON");
18632
+ async function listJSONFiles(dirPath) {
18633
+ try {
18634
+ const files = await import_node_fs.promises.readdir(dirPath);
18635
+ return files.filter((f) => f.endsWith(".json")).map((f) => f.replace(".json", ""));
18773
18636
  }
18774
- if (typeof globalThis !== "undefined") {
18775
- return globalThis;
18637
+ catch (error45) {
18638
+ if (error45.code === "ENOENT")
18639
+ return [];
18640
+ throw error45;
18776
18641
  }
18777
- return null;
18778
18642
  }
18779
- __name(detectRoot, "detectRoot");
18780
- function encodeRandom(len, prng) {
18781
- let str = "";
18782
- for (; len > 0; len--) {
18783
- str = randomChar(prng) + str;
18784
- }
18785
- return str;
18643
+ __name(listJSONFiles, "listJSONFiles");
18644
+ function parseCursor(cursor) {
18645
+ if (!cursor)
18646
+ return null;
18647
+ const parts = cursor.split("|");
18648
+ return {
18649
+ timestamp: new Date(parts[0]),
18650
+ id: parts[1] || null
18651
+ };
18786
18652
  }
18787
- __name(encodeRandom, "encodeRandom");
18788
- function encodeTime(now, len = TIME_LEN) {
18789
- if (isNaN(now)) {
18790
- throw new ULIDError(ULIDErrorCode.EncodeTimeValueMalformed, `Time must be a number: ${now}`);
18791
- }
18792
- else if (now > TIME_MAX) {
18793
- throw new ULIDError(ULIDErrorCode.EncodeTimeSizeExceeded, `Cannot encode a time larger than ${TIME_MAX}: ${now}`);
18653
+ __name(parseCursor, "parseCursor");
18654
+ function createCursor(timestamp, id) {
18655
+ return id ? `${timestamp.toISOString()}|${id}` : timestamp.toISOString();
18656
+ }
18657
+ __name(createCursor, "createCursor");
18658
+ async function paginatedFileSystemQuery(config3) {
18659
+ const { directory, schema, filePrefix, filter, sortOrder = "desc", limit = 20, cursor, getCreatedAt, getId } = config3;
18660
+ const fileIds = await listJSONFiles(directory);
18661
+ const relevantFileIds = filePrefix ? fileIds.filter((fileId) => fileId.startsWith(filePrefix)) : fileIds;
18662
+ const parsedCursor = parseCursor(cursor);
18663
+ let candidateFileIds = relevantFileIds;
18664
+ if (parsedCursor) {
18665
+ candidateFileIds = relevantFileIds.filter((fileId) => {
18666
+ const filenameDate = getCreatedAt(`${fileId}.json`);
18667
+ if (filenameDate) {
18668
+ const cursorTime = parsedCursor.timestamp.getTime();
18669
+ const fileTime = filenameDate.getTime();
18670
+ if (parsedCursor.id) {
18671
+ return sortOrder === "desc" ? fileTime <= cursorTime : fileTime >= cursorTime;
18672
+ }
18673
+ else {
18674
+ return sortOrder === "desc" ? fileTime < cursorTime : fileTime > cursorTime;
18675
+ }
18676
+ }
18677
+ return false;
18678
+ });
18794
18679
  }
18795
- else if (now < 0) {
18796
- throw new ULIDError(ULIDErrorCode.EncodeTimeNegative, `Time must be positive: ${now}`);
18680
+ else {
18681
+ candidateFileIds = relevantFileIds.filter((fileId) => {
18682
+ return getCreatedAt(`${fileId}.json`) !== null;
18683
+ });
18797
18684
  }
18798
- else if (Number.isInteger(now) === false) {
18799
- throw new ULIDError(ULIDErrorCode.EncodeTimeValueMalformed, `Time must be an integer: ${now}`);
18685
+ const validItems = [];
18686
+ for (const fileId of candidateFileIds) {
18687
+ const filePath = import_node_path.default.join(directory, `${fileId}.json`);
18688
+ const item = await readJSON(filePath, schema);
18689
+ if (item) {
18690
+ if (filter && !filter(item))
18691
+ continue;
18692
+ if (parsedCursor) {
18693
+ const itemTime = item.createdAt.getTime();
18694
+ const cursorTime = parsedCursor.timestamp.getTime();
18695
+ if (sortOrder === "desc") {
18696
+ if (itemTime > cursorTime)
18697
+ continue;
18698
+ if (itemTime === cursorTime && parsedCursor.id && getId) {
18699
+ const itemId = getId(item);
18700
+ if (itemId >= parsedCursor.id)
18701
+ continue;
18702
+ }
18703
+ }
18704
+ else {
18705
+ if (itemTime < cursorTime)
18706
+ continue;
18707
+ if (itemTime === cursorTime && parsedCursor.id && getId) {
18708
+ const itemId = getId(item);
18709
+ if (itemId <= parsedCursor.id)
18710
+ continue;
18711
+ }
18712
+ }
18713
+ }
18714
+ validItems.push(item);
18715
+ }
18800
18716
  }
18801
- let mod, str = "";
18802
- for (let currentLen = len; currentLen > 0; currentLen--) {
18803
- mod = now % ENCODING_LEN;
18804
- str = ENCODING.charAt(mod) + str;
18805
- now = (now - mod) / ENCODING_LEN;
18717
+ validItems.sort((a, b) => {
18718
+ const aTime = a.createdAt.getTime();
18719
+ const bTime = b.createdAt.getTime();
18720
+ const timeComparison = sortOrder === "asc" ? aTime - bTime : bTime - aTime;
18721
+ if (timeComparison === 0 && getId) {
18722
+ const aId = getId(a);
18723
+ const bId = getId(b);
18724
+ return sortOrder === "asc" ? aId.localeCompare(bId) : bId.localeCompare(aId);
18725
+ }
18726
+ return timeComparison;
18727
+ });
18728
+ const hasMore = validItems.length > limit;
18729
+ const items = hasMore ? validItems.slice(0, limit) : validItems;
18730
+ const nextCursor = hasMore && items.length > 0 ? createCursor(items[items.length - 1].createdAt, getId?.(items[items.length - 1])) : null;
18731
+ return {
18732
+ data: items,
18733
+ cursor: nextCursor,
18734
+ hasMore
18735
+ };
18736
+ }
18737
+ __name(paginatedFileSystemQuery, "paginatedFileSystemQuery");
18738
+ // ../world-local/dist/storage.js
18739
+ var monotonicUlid = monotonicFactory(() => Math.random());
18740
+ function filterRunData(run, resolveData) {
18741
+ if (resolveData === "none") {
18742
+ return {
18743
+ ...run,
18744
+ input: [],
18745
+ output: void 0
18746
+ };
18806
18747
  }
18807
- return str;
18748
+ return run;
18808
18749
  }
18809
- __name(encodeTime, "encodeTime");
18810
- function inWebWorker() {
18811
- return typeof WorkerGlobalScope !== "undefined" && self instanceof WorkerGlobalScope;
18750
+ __name(filterRunData, "filterRunData");
18751
+ function filterStepData(step, resolveData) {
18752
+ if (resolveData === "none") {
18753
+ return {
18754
+ ...step,
18755
+ input: [],
18756
+ output: void 0
18757
+ };
18758
+ }
18759
+ return step;
18812
18760
  }
18813
- __name(inWebWorker, "inWebWorker");
18814
- function monotonicFactory(prng) {
18815
- const currentPRNG = prng || detectPRNG();
18816
- let lastTime = 0, lastRandom;
18817
- return /* @__PURE__ */ __name(function _ulid2(seedTime) {
18818
- const seed = !seedTime || isNaN(seedTime) ? Date.now() : seedTime;
18819
- if (seed <= lastTime) {
18820
- const incrementedRandom = lastRandom = incrementBase32(lastRandom);
18821
- return encodeTime(lastTime, TIME_LEN) + incrementedRandom;
18822
- }
18823
- lastTime = seed;
18824
- const newRandom = lastRandom = encodeRandom(RANDOM_LEN, currentPRNG);
18825
- return encodeTime(seed, TIME_LEN) + newRandom;
18826
- }, "_ulid");
18761
+ __name(filterStepData, "filterStepData");
18762
+ function filterEventData(event, resolveData) {
18763
+ if (resolveData === "none") {
18764
+ const { eventData: _eventData, ...rest } = event;
18765
+ return rest;
18766
+ }
18767
+ return event;
18827
18768
  }
18828
- __name(monotonicFactory, "monotonicFactory");
18829
- // ../world-local/dist/queue.js
18830
- function createQueue(port) {
18831
- const transport = new JsonTransport();
18832
- const generateId = monotonicFactory();
18833
- const inflightMessages = /* @__PURE__ */ new Map();
18834
- const queue = /* @__PURE__ */ __name(async (queueName, message, opts) => {
18835
- const cleanup = [];
18836
- if (opts?.idempotencyKey) {
18837
- const existing = inflightMessages.get(opts.idempotencyKey);
18838
- if (existing) {
18839
- return { messageId: existing };
18769
+ __name(filterEventData, "filterEventData");
18770
+ function filterHookData(hook, resolveData) {
18771
+ if (resolveData === "none") {
18772
+ const { metadata: _metadata, ...rest } = hook;
18773
+ return rest;
18774
+ }
18775
+ return hook;
18776
+ }
18777
+ __name(filterHookData, "filterHookData");
18778
+ var getObjectCreatedAt = /* @__PURE__ */ __name((idPrefix) => (filename) => {
18779
+ const replaceRegex = new RegExp(`^${idPrefix}_`, "g");
18780
+ const dashIndex = filename.indexOf("-");
18781
+ if (dashIndex === -1) {
18782
+ const ulid5 = filename.replace(/\.json$/, "").replace(replaceRegex, "");
18783
+ return ulidToDate(ulid5);
18784
+ }
18785
+ if (idPrefix === "step") {
18786
+ const runId = filename.substring(0, dashIndex);
18787
+ const ulid5 = runId.replace(/^wrun_/, "");
18788
+ return ulidToDate(ulid5);
18789
+ }
18790
+ const id = filename.substring(dashIndex + 1).replace(/\.json$/, "");
18791
+ const ulid4 = id.replace(replaceRegex, "");
18792
+ return ulidToDate(ulid4);
18793
+ }, "getObjectCreatedAt");
18794
+ function createStorage(basedir) {
18795
+ return {
18796
+ runs: {
18797
+ async create(data) {
18798
+ const runId = `wrun_${monotonicUlid()}`;
18799
+ const now = /* @__PURE__ */ new Date();
18800
+ const result = {
18801
+ runId,
18802
+ deploymentId: data.deploymentId,
18803
+ status: "pending",
18804
+ workflowName: data.workflowName,
18805
+ executionContext: data.executionContext,
18806
+ input: data.input || [],
18807
+ output: void 0,
18808
+ error: void 0,
18809
+ errorCode: void 0,
18810
+ startedAt: void 0,
18811
+ completedAt: void 0,
18812
+ createdAt: now,
18813
+ updatedAt: now
18814
+ };
18815
+ const runPath = import_node_path2.default.join(basedir, "runs", `${runId}.json`);
18816
+ await writeJSON(runPath, result);
18817
+ return result;
18818
+ },
18819
+ async get(id, params) {
18820
+ const runPath = import_node_path2.default.join(basedir, "runs", `${id}.json`);
18821
+ const run = await readJSON(runPath, WorkflowRunSchema);
18822
+ if (!run) {
18823
+ throw new WorkflowRunNotFoundError(id);
18824
+ }
18825
+ const resolveData = params?.resolveData ?? DEFAULT_RESOLVE_DATA_OPTION;
18826
+ return filterRunData(run, resolveData);
18827
+ },
18828
+ async update(id, data) {
18829
+ const runPath = import_node_path2.default.join(basedir, "runs", `${id}.json`);
18830
+ const run = await readJSON(runPath, WorkflowRunSchema);
18831
+ if (!run) {
18832
+ throw new WorkflowRunNotFoundError(id);
18833
+ }
18834
+ const now = /* @__PURE__ */ new Date();
18835
+ const updatedRun = {
18836
+ ...run,
18837
+ ...data,
18838
+ updatedAt: now
18839
+ };
18840
+ if (data.status === "running" && !updatedRun.startedAt) {
18841
+ updatedRun.startedAt = now;
18842
+ }
18843
+ if (data.status === "completed" || data.status === "failed" || data.status === "cancelled") {
18844
+ updatedRun.completedAt = now;
18845
+ }
18846
+ await writeJSON(runPath, updatedRun, { overwrite: true });
18847
+ return updatedRun;
18848
+ },
18849
+ async list(params) {
18850
+ const resolveData = params?.resolveData ?? DEFAULT_RESOLVE_DATA_OPTION;
18851
+ const result = await paginatedFileSystemQuery({
18852
+ directory: import_node_path2.default.join(basedir, "runs"),
18853
+ schema: WorkflowRunSchema,
18854
+ filter: /* @__PURE__ */ __name((run) => {
18855
+ if (params?.workflowName && run.workflowName !== params.workflowName) {
18856
+ return false;
18857
+ }
18858
+ if (params?.status && run.status !== params.status) {
18859
+ return false;
18860
+ }
18861
+ return true;
18862
+ }, "filter"),
18863
+ sortOrder: params?.pagination?.sortOrder ?? "desc",
18864
+ limit: params?.pagination?.limit,
18865
+ cursor: params?.pagination?.cursor,
18866
+ getCreatedAt: getObjectCreatedAt("wrun"),
18867
+ getId: /* @__PURE__ */ __name((run) => run.runId, "getId")
18868
+ });
18869
+ if (resolveData === "none") {
18870
+ return {
18871
+ ...result,
18872
+ data: result.data.map((run) => ({
18873
+ ...run,
18874
+ input: [],
18875
+ output: void 0
18876
+ }))
18877
+ };
18878
+ }
18879
+ return result;
18880
+ },
18881
+ async cancel(id, params) {
18882
+ const run = await this.update(id, { status: "cancelled" });
18883
+ const resolveData = params?.resolveData ?? DEFAULT_RESOLVE_DATA_OPTION;
18884
+ return filterRunData(run, resolveData);
18885
+ },
18886
+ async pause(id, params) {
18887
+ const run = await this.update(id, { status: "paused" });
18888
+ const resolveData = params?.resolveData ?? DEFAULT_RESOLVE_DATA_OPTION;
18889
+ return filterRunData(run, resolveData);
18890
+ },
18891
+ async resume(id, params) {
18892
+ const run = await this.update(id, { status: "running" });
18893
+ const resolveData = params?.resolveData ?? DEFAULT_RESOLVE_DATA_OPTION;
18894
+ return filterRunData(run, resolveData);
18840
18895
  }
18841
- }
18842
- const body = transport.serialize(message);
18843
- let pathname;
18844
- if (queueName.startsWith("__wkf_step_")) {
18845
- pathname = `step`;
18846
- }
18847
- else if (queueName.startsWith("__wkf_workflow_")) {
18848
- pathname = `flow`;
18849
- }
18850
- else {
18851
- throw new Error("Unknown queue name prefix");
18852
- }
18853
- const messageId = MessageId.parse(`msg_${generateId()}`);
18854
- if (opts?.idempotencyKey) {
18855
- const key = opts.idempotencyKey;
18856
- inflightMessages.set(key, messageId);
18857
- cleanup.push(() => {
18858
- inflightMessages.delete(key);
18859
- });
18860
- }
18861
- (async () => {
18862
- let defaultRetriesLeft = 3;
18863
- for (let attempt = 0; defaultRetriesLeft > 0; attempt++) {
18864
- defaultRetriesLeft--;
18865
- const response = await fetch(`http://localhost:${port}/.well-known/workflow/v1/${pathname}`, {
18866
- method: "POST",
18867
- duplex: "half",
18868
- headers: {
18869
- "x-vqs-queue-name": queueName,
18870
- "x-vqs-message-id": messageId,
18871
- "x-vqs-message-attempt": String(attempt + 1)
18872
- },
18873
- body
18896
+ },
18897
+ steps: {
18898
+ async create(runId, data) {
18899
+ const now = /* @__PURE__ */ new Date();
18900
+ const result = {
18901
+ runId,
18902
+ stepId: data.stepId,
18903
+ stepName: data.stepName,
18904
+ status: "pending",
18905
+ input: data.input,
18906
+ output: void 0,
18907
+ error: void 0,
18908
+ errorCode: void 0,
18909
+ attempt: 0,
18910
+ startedAt: void 0,
18911
+ completedAt: void 0,
18912
+ createdAt: now,
18913
+ updatedAt: now
18914
+ };
18915
+ const compositeKey = `${runId}-${data.stepId}`;
18916
+ const stepPath = import_node_path2.default.join(basedir, "steps", `${compositeKey}.json`);
18917
+ await writeJSON(stepPath, result);
18918
+ return result;
18919
+ },
18920
+ async get(runId, stepId, params) {
18921
+ if (!runId) {
18922
+ const fileIds = await listJSONFiles(import_node_path2.default.join(basedir, "steps"));
18923
+ const fileId = fileIds.find((fileId2) => fileId2.endsWith(`-${stepId}`));
18924
+ if (!fileId) {
18925
+ throw new Error(`Step ${stepId} not found`);
18926
+ }
18927
+ runId = fileId.split("-")[0];
18928
+ }
18929
+ const compositeKey = `${runId}-${stepId}`;
18930
+ const stepPath = import_node_path2.default.join(basedir, "steps", `${compositeKey}.json`);
18931
+ const step = await readJSON(stepPath, StepSchema);
18932
+ if (!step) {
18933
+ throw new Error(`Step ${stepId} in run ${runId} not found`);
18934
+ }
18935
+ const resolveData = params?.resolveData ?? DEFAULT_RESOLVE_DATA_OPTION;
18936
+ return filterStepData(step, resolveData);
18937
+ },
18938
+ async update(runId, stepId, data) {
18939
+ const compositeKey = `${runId}-${stepId}`;
18940
+ const stepPath = import_node_path2.default.join(basedir, "steps", `${compositeKey}.json`);
18941
+ const step = await readJSON(stepPath, StepSchema);
18942
+ if (!step) {
18943
+ throw new Error(`Step ${stepId} in run ${runId} not found`);
18944
+ }
18945
+ const now = /* @__PURE__ */ new Date();
18946
+ const updatedStep = {
18947
+ ...step,
18948
+ ...data,
18949
+ updatedAt: now
18950
+ };
18951
+ if (data.status === "running" && !updatedStep.startedAt) {
18952
+ updatedStep.startedAt = now;
18953
+ }
18954
+ if (data.status === "completed" || data.status === "failed") {
18955
+ updatedStep.completedAt = now;
18956
+ }
18957
+ await writeJSON(stepPath, updatedStep, { overwrite: true });
18958
+ return updatedStep;
18959
+ },
18960
+ async list(params) {
18961
+ const resolveData = params.resolveData ?? DEFAULT_RESOLVE_DATA_OPTION;
18962
+ const result = await paginatedFileSystemQuery({
18963
+ directory: import_node_path2.default.join(basedir, "steps"),
18964
+ schema: StepSchema,
18965
+ filePrefix: `${params.runId}-`,
18966
+ sortOrder: params.pagination?.sortOrder ?? "desc",
18967
+ limit: params.pagination?.limit,
18968
+ cursor: params.pagination?.cursor,
18969
+ getCreatedAt: getObjectCreatedAt("step"),
18970
+ getId: /* @__PURE__ */ __name((step) => step.stepId, "getId")
18971
+ });
18972
+ if (resolveData === "none") {
18973
+ return {
18974
+ ...result,
18975
+ data: result.data.map((step) => ({
18976
+ ...step,
18977
+ input: [],
18978
+ output: void 0
18979
+ }))
18980
+ };
18981
+ }
18982
+ return result;
18983
+ }
18984
+ },
18985
+ // Events - filesystem-backed storage
18986
+ events: {
18987
+ async create(runId, data, params) {
18988
+ const eventId = `evnt_${monotonicUlid()}`;
18989
+ const now = /* @__PURE__ */ new Date();
18990
+ const result = {
18991
+ ...data,
18992
+ runId,
18993
+ eventId,
18994
+ createdAt: now
18995
+ };
18996
+ const compositeKey = `${runId}-${eventId}`;
18997
+ const eventPath = import_node_path2.default.join(basedir, "events", `${compositeKey}.json`);
18998
+ await writeJSON(eventPath, result);
18999
+ const resolveData = params?.resolveData ?? DEFAULT_RESOLVE_DATA_OPTION;
19000
+ return filterEventData(result, resolveData);
19001
+ },
19002
+ async list(params) {
19003
+ const { runId } = params;
19004
+ const resolveData = params.resolveData ?? DEFAULT_RESOLVE_DATA_OPTION;
19005
+ const result = await paginatedFileSystemQuery({
19006
+ directory: import_node_path2.default.join(basedir, "events"),
19007
+ schema: EventSchema,
19008
+ filePrefix: `${runId}-`,
19009
+ // Events in chronological order (oldest first) by default,
19010
+ // different from the default for other list calls.
19011
+ sortOrder: params.pagination?.sortOrder ?? "asc",
19012
+ limit: params.pagination?.limit,
19013
+ cursor: params.pagination?.cursor,
19014
+ getCreatedAt: getObjectCreatedAt("evnt"),
19015
+ getId: /* @__PURE__ */ __name((event) => event.eventId, "getId")
18874
19016
  });
18875
- if (response.ok) {
18876
- return;
19017
+ if (resolveData === "none") {
19018
+ return {
19019
+ ...result,
19020
+ data: result.data.map((event) => {
19021
+ const { eventData: _eventData, ...rest } = event;
19022
+ return rest;
19023
+ })
19024
+ };
18877
19025
  }
18878
- const text = await response.text();
18879
- if (response.status === 503) {
18880
- try {
18881
- const retryIn = Number(JSON.parse(text).retryIn);
18882
- await (0, import_promises.setTimeout)(retryIn * 1e3);
18883
- defaultRetriesLeft++;
18884
- continue;
18885
- }
18886
- catch {
19026
+ return result;
19027
+ },
19028
+ async listByCorrelationId(params) {
19029
+ const correlationId = params.correlationId;
19030
+ const resolveData = params.resolveData ?? DEFAULT_RESOLVE_DATA_OPTION;
19031
+ const result = await paginatedFileSystemQuery({
19032
+ directory: import_node_path2.default.join(basedir, "events"),
19033
+ schema: EventSchema,
19034
+ // No filePrefix - search all events
19035
+ filter: /* @__PURE__ */ __name((event) => event.correlationId === correlationId, "filter"),
19036
+ // Events in chronological order (oldest first) by default,
19037
+ // different from the default for other list calls.
19038
+ sortOrder: params.pagination?.sortOrder ?? "asc",
19039
+ limit: params.pagination?.limit,
19040
+ cursor: params.pagination?.cursor,
19041
+ getCreatedAt: getObjectCreatedAt("evnt"),
19042
+ getId: /* @__PURE__ */ __name((event) => event.eventId, "getId")
19043
+ });
19044
+ if (resolveData === "none") {
19045
+ return {
19046
+ ...result,
19047
+ data: result.data.map((event) => {
19048
+ const { eventData: _eventData, ...rest } = event;
19049
+ return rest;
19050
+ })
19051
+ };
19052
+ }
19053
+ return result;
19054
+ }
19055
+ },
19056
+ // Hooks
19057
+ hooks: {
19058
+ async create(runId, data) {
19059
+ const now = /* @__PURE__ */ new Date();
19060
+ const result = {
19061
+ runId,
19062
+ hookId: data.hookId,
19063
+ token: data.token,
19064
+ metadata: data.metadata,
19065
+ ownerId: "embedded-owner",
19066
+ projectId: "embedded-project",
19067
+ environment: "embedded",
19068
+ createdAt: now
19069
+ };
19070
+ const hookPath = import_node_path2.default.join(basedir, "hooks", `${data.hookId}.json`);
19071
+ await writeJSON(hookPath, result);
19072
+ return result;
19073
+ },
19074
+ async get(hookId, params) {
19075
+ const hookPath = import_node_path2.default.join(basedir, "hooks", `${hookId}.json`);
19076
+ const hook = await readJSON(hookPath, HookSchema);
19077
+ if (!hook) {
19078
+ throw new Error(`Hook ${hookId} not found`);
19079
+ }
19080
+ const resolveData = params?.resolveData || DEFAULT_RESOLVE_DATA_OPTION;
19081
+ return filterHookData(hook, resolveData);
19082
+ },
19083
+ async getByToken(token) {
19084
+ const hooksDir = import_node_path2.default.join(basedir, "hooks");
19085
+ const files = await listJSONFiles(hooksDir);
19086
+ for (const file2 of files) {
19087
+ const hookPath = import_node_path2.default.join(hooksDir, `${file2}.json`);
19088
+ const hook = await readJSON(hookPath, HookSchema);
19089
+ if (hook && hook.token === token) {
19090
+ return hook;
18887
19091
  }
18888
19092
  }
18889
- console.error(`[embedded world] Failed to queue message`, {
18890
- queueName,
18891
- text,
18892
- status: response.status,
18893
- headers: Object.fromEntries(response.headers.entries()),
18894
- body: body.toString()
19093
+ throw new Error(`Hook with token ${token} not found`);
19094
+ },
19095
+ async list(params) {
19096
+ const hooksDir = import_node_path2.default.join(basedir, "hooks");
19097
+ const resolveData = params.resolveData || DEFAULT_RESOLVE_DATA_OPTION;
19098
+ const result = await paginatedFileSystemQuery({
19099
+ directory: hooksDir,
19100
+ schema: HookSchema,
19101
+ sortOrder: params.pagination?.sortOrder,
19102
+ limit: params.pagination?.limit,
19103
+ cursor: params.pagination?.cursor,
19104
+ filePrefix: void 0,
19105
+ // Hooks don't have ULIDs, so we can't optimize by filename
19106
+ filter: /* @__PURE__ */ __name((hook) => {
19107
+ if (params.runId && hook.runId !== params.runId) {
19108
+ return false;
19109
+ }
19110
+ return true;
19111
+ }, "filter"),
19112
+ getCreatedAt: /* @__PURE__ */ __name(() => {
19113
+ return /* @__PURE__ */ new Date(0);
19114
+ }, "getCreatedAt"),
19115
+ getId: /* @__PURE__ */ __name((hook) => hook.hookId, "getId")
18895
19116
  });
19117
+ return {
19118
+ ...result,
19119
+ data: result.data.map((hook) => filterHookData(hook, resolveData))
19120
+ };
19121
+ },
19122
+ async dispose(hookId) {
19123
+ const hookPath = import_node_path2.default.join(basedir, "hooks", `${hookId}.json`);
19124
+ const hook = await readJSON(hookPath, HookSchema);
19125
+ if (!hook) {
19126
+ throw new Error(`Hook ${hookId} not found`);
19127
+ }
19128
+ await deleteJSON(hookPath);
19129
+ return hook;
18896
19130
  }
18897
- console.error(`[embedded world] Reached max retries of embedded world queue implementation`);
18898
- })().finally(() => {
18899
- for (const fn of cleanup) {
18900
- fn();
18901
- }
18902
- });
18903
- return { messageId };
18904
- }, "queue");
18905
- const HeaderParser = zod_default.object({
18906
- "x-vqs-queue-name": ValidQueueName,
18907
- "x-vqs-message-id": MessageId,
18908
- "x-vqs-message-attempt": zod_default.coerce.number()
18909
- });
18910
- const createQueueHandler = /* @__PURE__ */ __name((prefix, handler) => {
18911
- return async (req) => {
18912
- const headers = HeaderParser.safeParse(Object.fromEntries(req.headers));
18913
- if (!headers.success || !req.body) {
18914
- return Response.json({ error: "Missing required headers" }, { status: 400 });
18915
- }
18916
- const queueName = headers.data["x-vqs-queue-name"];
18917
- const messageId = headers.data["x-vqs-message-id"];
18918
- const attempt = headers.data["x-vqs-message-attempt"];
18919
- if (!queueName.startsWith(prefix)) {
18920
- return Response.json({ error: "Unhandled queue" }, { status: 400 });
19131
+ }
19132
+ };
19133
+ }
19134
+ __name(createStorage, "createStorage");
19135
+ // ../world-local/dist/streamer.js
19136
+ var import_node_events = require("node:events");
19137
+ var import_node_path3 = __toESM(require("node:path"), 1);
19138
+ var monotonicUlid2 = monotonicFactory(() => Math.random());
19139
+ function serializeChunk(chunk) {
19140
+ const eofByte = Buffer.from([chunk.eof ? 1 : 0]);
19141
+ return Buffer.concat([eofByte, chunk.chunk]);
19142
+ }
19143
+ __name(serializeChunk, "serializeChunk");
19144
+ function deserializeChunk(serialized) {
19145
+ const eof = serialized[0] === 1;
19146
+ const chunk = serialized.subarray(1);
19147
+ return { eof, chunk };
19148
+ }
19149
+ __name(deserializeChunk, "deserializeChunk");
19150
+ function createStreamer(basedir) {
19151
+ const streamEmitter = new import_node_events.EventEmitter();
19152
+ return {
19153
+ async writeToStream(name, chunk) {
19154
+ const chunkId = `strm_${monotonicUlid2()}`;
19155
+ if (typeof chunk === "string") {
19156
+ chunk = new TextEncoder().encode(chunk);
18921
19157
  }
18922
- const body = await new JsonTransport().deserialize(req.body);
18923
- try {
18924
- const response = await handler(body, { attempt, queueName, messageId });
18925
- const retryIn = typeof response === "undefined" ? null : response.timeoutSeconds;
18926
- if (retryIn) {
18927
- return Response.json({ retryIn }, { status: 503 });
19158
+ const serialized = serializeChunk({
19159
+ chunk: Buffer.from(chunk),
19160
+ eof: false
19161
+ });
19162
+ const chunkPath = import_node_path3.default.join(basedir, "streams", "chunks", `${name}-${chunkId}.json`);
19163
+ await write(chunkPath, serialized);
19164
+ const chunkData = typeof chunk === "string" ? new TextEncoder().encode(chunk) : chunk instanceof Buffer ? new Uint8Array(chunk) : chunk;
19165
+ streamEmitter.emit(`chunk:${name}`, {
19166
+ streamName: name,
19167
+ chunkData,
19168
+ chunkId
19169
+ });
19170
+ },
19171
+ async closeStream(name) {
19172
+ const chunkId = `strm_${monotonicUlid2()}`;
19173
+ const chunkPath = import_node_path3.default.join(basedir, "streams", "chunks", `${name}-${chunkId}.json`);
19174
+ await write(chunkPath, serializeChunk({ chunk: Buffer.from([]), eof: true }));
19175
+ streamEmitter.emit(`close:${name}`, { streamName: name });
19176
+ },
19177
+ async readFromStream(name, startIndex = 0) {
19178
+ const chunksDir = import_node_path3.default.join(basedir, "streams", "chunks");
19179
+ let removeListeners = /* @__PURE__ */ __name(() => {
19180
+ }, "removeListeners");
19181
+ return new ReadableStream({
19182
+ async start(controller) {
19183
+ const deliveredChunkIds = /* @__PURE__ */ new Set();
19184
+ const bufferedEventChunks = [];
19185
+ let isReadingFromDisk = true;
19186
+ const chunkListener = /* @__PURE__ */ __name((event) => {
19187
+ deliveredChunkIds.add(event.chunkId);
19188
+ if (isReadingFromDisk) {
19189
+ bufferedEventChunks.push({
19190
+ chunkId: event.chunkId,
19191
+ chunkData: event.chunkData
19192
+ });
19193
+ }
19194
+ else {
19195
+ controller.enqueue(event.chunkData);
19196
+ }
19197
+ }, "chunkListener");
19198
+ const closeListener = /* @__PURE__ */ __name(() => {
19199
+ streamEmitter.off(`chunk:${name}`, chunkListener);
19200
+ streamEmitter.off(`close:${name}`, closeListener);
19201
+ controller.close();
19202
+ }, "closeListener");
19203
+ removeListeners = closeListener;
19204
+ streamEmitter.on(`chunk:${name}`, chunkListener);
19205
+ streamEmitter.on(`close:${name}`, closeListener);
19206
+ const files = await listJSONFiles(chunksDir);
19207
+ const chunkFiles = files.filter((file2) => file2.startsWith(`${name}-`)).sort();
19208
+ let isComplete = false;
19209
+ for (let i = startIndex; i < chunkFiles.length; i++) {
19210
+ const file2 = chunkFiles[i];
19211
+ const chunkId = file2.substring(name.length + 1);
19212
+ if (deliveredChunkIds.has(chunkId)) {
19213
+ continue;
19214
+ }
19215
+ const chunk = deserializeChunk(await readBuffer(import_node_path3.default.join(chunksDir, `${file2}.json`)));
19216
+ if (chunk?.eof === true) {
19217
+ isComplete = true;
19218
+ break;
19219
+ }
19220
+ if (chunk.chunk.byteLength) {
19221
+ controller.enqueue(chunk.chunk);
19222
+ }
19223
+ }
19224
+ isReadingFromDisk = false;
19225
+ bufferedEventChunks.sort((a, b) => a.chunkId.localeCompare(b.chunkId));
19226
+ for (const buffered of bufferedEventChunks) {
19227
+ controller.enqueue(buffered.chunkData);
19228
+ }
19229
+ if (isComplete) {
19230
+ removeListeners();
19231
+ controller.close();
19232
+ return;
19233
+ }
19234
+ },
19235
+ cancel() {
19236
+ removeListeners();
18928
19237
  }
18929
- return Response.json({ ok: true });
18930
- }
18931
- catch (error45) {
18932
- return Response.json(String(error45), { status: 500 });
18933
- }
18934
- };
18935
- }, "createQueueHandler");
18936
- const getDeploymentId = /* @__PURE__ */ __name(async () => {
18937
- return "dpl_embedded";
18938
- }, "getDeploymentId");
18939
- return { queue, createQueueHandler, getDeploymentId };
19238
+ });
19239
+ }
19240
+ };
18940
19241
  }
18941
- __name(createQueue, "createQueue");
18942
- // ../world-local/dist/storage.js
18943
- var import_node_path2 = __toESM(require("node:path"), 1);
18944
- // ../world-local/dist/fs.js
18945
- var import_node_fs = require("node:fs");
18946
- var import_node_path = __toESM(require("node:path"), 1);
18947
- var ulid3 = monotonicFactory(() => Math.random());
18948
- var Ulid = external_exports.string().ulid();
18949
- function ulidToDate(maybeUlid) {
18950
- const ulid4 = Ulid.safeParse(maybeUlid);
18951
- if (!ulid4.success) {
18952
- return null;
18953
- }
18954
- return new Date(decodeTime(ulid4.data));
19242
+ __name(createStreamer, "createStreamer");
19243
+ // ../world-local/dist/index.js
19244
+ function createEmbeddedWorld({ dataDir, port }) {
19245
+ const dir = dataDir ?? config.value.dataDir;
19246
+ const queuePort = port ?? config.value.port;
19247
+ return {
19248
+ ...createQueue(queuePort),
19249
+ ...createStorage(dir),
19250
+ ...createStreamer(dir)
19251
+ };
18955
19252
  }
18956
- __name(ulidToDate, "ulidToDate");
18957
- async function ensureDir(dirPath) {
18958
- try {
18959
- await import_node_fs.promises.mkdir(dirPath, { recursive: true });
19253
+ __name(createEmbeddedWorld, "createEmbeddedWorld");
19254
+ // ../../node_modules/.pnpm/mixpart@0.0.5-alpha.1/node_modules/mixpart/dist/index.mjs
19255
+ var MultipartParseError = class extends Error {
19256
+ static {
19257
+ __name(this, "MultipartParseError");
18960
19258
  }
18961
- catch (_error) {
19259
+ constructor(message) {
19260
+ super(message);
19261
+ this.name = "MultipartParseError";
18962
19262
  }
19263
+ };
19264
+ function createSearch(pattern) {
19265
+ const needle = new TextEncoder().encode(pattern);
19266
+ return (haystack, start2 = 0) => Buffer.prototype.indexOf.call(haystack, needle, start2);
18963
19267
  }
18964
- __name(ensureDir, "ensureDir");
18965
- async function writeJSON(filePath, data, opts) {
18966
- return write(filePath, JSON.stringify(data, null, 2), opts);
18967
- }
18968
- __name(writeJSON, "writeJSON");
18969
- async function write(filePath, data, opts) {
18970
- if (!opts?.overwrite) {
18971
- try {
18972
- await import_node_fs.promises.access(filePath);
18973
- throw new WorkflowAPIError(`File ${filePath} already exists and 'overwrite' is false`, { status: 409 });
18974
- }
18975
- catch (error45) {
18976
- if (error45.code !== "ENOENT") {
18977
- throw error45;
19268
+ __name(createSearch, "createSearch");
19269
+ function createPartialTailSearch(pattern) {
19270
+ const needle = new TextEncoder().encode(pattern);
19271
+ const byteIndexes = {};
19272
+ for (let i = 0; i < needle.length; ++i) {
19273
+ const byte = needle[i];
19274
+ if (byteIndexes[byte] === void 0)
19275
+ byteIndexes[byte] = [];
19276
+ byteIndexes[byte].push(i);
19277
+ }
19278
+ return function (haystack) {
19279
+ const haystackEnd = haystack.length - 1;
19280
+ if (haystack[haystackEnd] in byteIndexes) {
19281
+ const indexes = byteIndexes[haystack[haystackEnd]];
19282
+ for (let i = indexes.length - 1; i >= 0; --i) {
19283
+ for (let j = indexes[i], k = haystackEnd; j >= 0 && haystack[k] === needle[j]; --j, --k) {
19284
+ if (j === 0)
19285
+ return k;
19286
+ }
18978
19287
  }
18979
19288
  }
18980
- }
18981
- const tempPath = `${filePath}.tmp.${ulid3()}`;
18982
- try {
18983
- await ensureDir(import_node_path.default.dirname(filePath));
18984
- await import_node_fs.promises.writeFile(tempPath, data);
18985
- await import_node_fs.promises.rename(tempPath, filePath);
18986
- }
18987
- catch (error45) {
18988
- await import_node_fs.promises.unlink(tempPath).catch(() => {
18989
- });
18990
- throw error45;
18991
- }
19289
+ return -1;
19290
+ };
18992
19291
  }
18993
- __name(write, "write");
18994
- async function readJSON(filePath, decoder) {
18995
- try {
18996
- const content = await import_node_fs.promises.readFile(filePath, "utf-8");
18997
- return decoder.parse(JSON.parse(content));
18998
- }
18999
- catch (error45) {
19000
- if (error45.code === "ENOENT")
19001
- return null;
19002
- throw error45;
19292
+ __name(createPartialTailSearch, "createPartialTailSearch");
19293
+ function parseHeaders(headerBytes) {
19294
+ const headerText = new TextDecoder("iso-8859-1").decode(headerBytes);
19295
+ const lines = headerText.trim().split(/\r?\n/);
19296
+ const headerInit = [];
19297
+ for (const line of lines) {
19298
+ const colonIndex = line.indexOf(":");
19299
+ if (colonIndex > 0) {
19300
+ const name = line.slice(0, colonIndex).trim();
19301
+ const value = line.slice(colonIndex + 1).trim();
19302
+ headerInit.push([name, value]);
19303
+ }
19003
19304
  }
19305
+ return new Headers(headerInit);
19004
19306
  }
19005
- __name(readJSON, "readJSON");
19006
- async function readBuffer(filePath) {
19007
- const content = await import_node_fs.promises.readFile(filePath);
19008
- return content;
19009
- }
19010
- __name(readBuffer, "readBuffer");
19011
- async function deleteJSON(filePath) {
19012
- try {
19013
- await import_node_fs.promises.unlink(filePath);
19014
- }
19015
- catch (error45) {
19016
- if (error45.code !== "ENOENT")
19017
- throw error45;
19307
+ __name(parseHeaders, "parseHeaders");
19308
+ function extractBoundary(contentType) {
19309
+ const boundaryMatch = contentType.match(/boundary=(?:"([^"]+)"|([^;]+))/i);
19310
+ if (!boundaryMatch) {
19311
+ throw new MultipartParseError("No boundary found in Content-Type header");
19018
19312
  }
19313
+ return boundaryMatch[1] ?? boundaryMatch[2];
19019
19314
  }
19020
- __name(deleteJSON, "deleteJSON");
19021
- async function listJSONFiles(dirPath) {
19022
- try {
19023
- const files = await import_node_fs.promises.readdir(dirPath);
19024
- return files.filter((f) => f.endsWith(".json")).map((f) => f.replace(".json", ""));
19315
+ __name(extractBoundary, "extractBoundary");
19316
+ var AsyncMessageQueue = class {
19317
+ static {
19318
+ __name(this, "AsyncMessageQueue");
19025
19319
  }
19026
- catch (error45) {
19027
- if (error45.code === "ENOENT")
19028
- return [];
19029
- throw error45;
19320
+ queue = [];
19321
+ waiters = [];
19322
+ finished = false;
19323
+ cancelled = false;
19324
+ error = null;
19325
+ /**
19326
+ * Producer: Enqueue a message for consumption
19327
+ */
19328
+ enqueue(message) {
19329
+ if (this.finished || this.cancelled)
19330
+ return;
19331
+ if (this.waiters.length > 0) {
19332
+ const waiter = this.waiters.shift();
19333
+ waiter.resolve(message);
19334
+ }
19335
+ else {
19336
+ this.queue.push(message);
19337
+ }
19030
19338
  }
19031
- }
19032
- __name(listJSONFiles, "listJSONFiles");
19033
- function parseCursor(cursor) {
19034
- if (!cursor)
19035
- return null;
19036
- const parts = cursor.split("|");
19037
- return {
19038
- timestamp: new Date(parts[0]),
19039
- id: parts[1] || null
19040
- };
19041
- }
19042
- __name(parseCursor, "parseCursor");
19043
- function createCursor(timestamp, id) {
19044
- return id ? `${timestamp.toISOString()}|${id}` : timestamp.toISOString();
19045
- }
19046
- __name(createCursor, "createCursor");
19047
- async function paginatedFileSystemQuery(config3) {
19048
- const { directory, schema, filePrefix, filter, sortOrder = "desc", limit = 20, cursor, getCreatedAt, getId } = config3;
19049
- const fileIds = await listJSONFiles(directory);
19050
- const relevantFileIds = filePrefix ? fileIds.filter((fileId) => fileId.startsWith(filePrefix)) : fileIds;
19051
- const parsedCursor = parseCursor(cursor);
19052
- let candidateFileIds = relevantFileIds;
19053
- if (parsedCursor) {
19054
- candidateFileIds = relevantFileIds.filter((fileId) => {
19055
- const filenameDate = getCreatedAt(`${fileId}.json`);
19056
- if (filenameDate) {
19057
- const cursorTime = parsedCursor.timestamp.getTime();
19058
- const fileTime = filenameDate.getTime();
19059
- if (parsedCursor.id) {
19060
- return sortOrder === "desc" ? fileTime <= cursorTime : fileTime >= cursorTime;
19061
- }
19062
- else {
19063
- return sortOrder === "desc" ? fileTime < cursorTime : fileTime > cursorTime;
19064
- }
19339
+ /**
19340
+ * Producer: Signal completion (with optional error)
19341
+ */
19342
+ finish(error45) {
19343
+ if (this.finished)
19344
+ return;
19345
+ this.finished = true;
19346
+ this.error = error45 || null;
19347
+ while (this.waiters.length > 0) {
19348
+ const waiter = this.waiters.shift();
19349
+ if (error45) {
19350
+ waiter.reject(error45);
19065
19351
  }
19066
- return false;
19067
- });
19068
- }
19069
- else {
19070
- candidateFileIds = relevantFileIds.filter((fileId) => {
19071
- return getCreatedAt(`${fileId}.json`) !== null;
19072
- });
19073
- }
19074
- const validItems = [];
19075
- for (const fileId of candidateFileIds) {
19076
- const filePath = import_node_path.default.join(directory, `${fileId}.json`);
19077
- const item = await readJSON(filePath, schema);
19078
- if (item) {
19079
- if (filter && !filter(item))
19080
- continue;
19081
- if (parsedCursor) {
19082
- const itemTime = item.createdAt.getTime();
19083
- const cursorTime = parsedCursor.timestamp.getTime();
19084
- if (sortOrder === "desc") {
19085
- if (itemTime > cursorTime)
19086
- continue;
19087
- if (itemTime === cursorTime && parsedCursor.id && getId) {
19088
- const itemId = getId(item);
19089
- if (itemId >= parsedCursor.id)
19090
- continue;
19091
- }
19092
- }
19093
- else {
19094
- if (itemTime < cursorTime)
19095
- continue;
19096
- if (itemTime === cursorTime && parsedCursor.id && getId) {
19097
- const itemId = getId(item);
19098
- if (itemId <= parsedCursor.id)
19099
- continue;
19100
- }
19101
- }
19352
+ else {
19353
+ waiter.resolve(null);
19102
19354
  }
19103
- validItems.push(item);
19104
19355
  }
19105
19356
  }
19106
- validItems.sort((a, b) => {
19107
- const aTime = a.createdAt.getTime();
19108
- const bTime = b.createdAt.getTime();
19109
- const timeComparison = sortOrder === "asc" ? aTime - bTime : bTime - aTime;
19110
- if (timeComparison === 0 && getId) {
19111
- const aId = getId(a);
19112
- const bId = getId(b);
19113
- return sortOrder === "asc" ? aId.localeCompare(bId) : bId.localeCompare(aId);
19357
+ /**
19358
+ * Consumer: Cancel the queue (stops accepting new messages and notifies waiters)
19359
+ */
19360
+ cancel() {
19361
+ if (this.cancelled || this.finished)
19362
+ return;
19363
+ this.cancelled = true;
19364
+ while (this.waiters.length > 0) {
19365
+ const waiter = this.waiters.shift();
19366
+ waiter.resolve(null);
19114
19367
  }
19115
- return timeComparison;
19116
- });
19117
- const hasMore = validItems.length > limit;
19118
- const items = hasMore ? validItems.slice(0, limit) : validItems;
19119
- const nextCursor = hasMore && items.length > 0 ? createCursor(items[items.length - 1].createdAt, getId?.(items[items.length - 1])) : null;
19120
- return {
19121
- data: items,
19122
- cursor: nextCursor,
19123
- hasMore
19124
- };
19125
- }
19126
- __name(paginatedFileSystemQuery, "paginatedFileSystemQuery");
19127
- // ../world-local/dist/storage.js
19128
- var monotonicUlid = monotonicFactory(() => Math.random());
19129
- function filterRunData(run, resolveData) {
19130
- if (resolveData === "none") {
19131
- return {
19132
- ...run,
19133
- input: [],
19134
- output: void 0
19135
- };
19136
19368
  }
19137
- return run;
19138
- }
19139
- __name(filterRunData, "filterRunData");
19140
- function filterStepData(step, resolveData) {
19141
- if (resolveData === "none") {
19142
- return {
19143
- ...step,
19144
- input: [],
19145
- output: void 0
19146
- };
19369
+ /**
19370
+ * Consumer: Dequeue next message (or null if finished/cancelled)
19371
+ */
19372
+ async dequeue() {
19373
+ if (this.queue.length > 0) {
19374
+ return this.queue.shift();
19375
+ }
19376
+ if (this.finished || this.cancelled) {
19377
+ if (this.error)
19378
+ throw this.error;
19379
+ return null;
19380
+ }
19381
+ return new Promise((resolve, reject) => {
19382
+ this.waiters.push({ resolve, reject });
19383
+ });
19147
19384
  }
19148
- return step;
19149
- }
19150
- __name(filterStepData, "filterStepData");
19151
- function filterEventData(event, resolveData) {
19152
- if (resolveData === "none") {
19153
- const { eventData: _eventData, ...rest } = event;
19154
- return rest;
19385
+ /**
19386
+ * Check if the queue is in a terminal state
19387
+ */
19388
+ get isTerminal() {
19389
+ return this.finished || this.cancelled;
19155
19390
  }
19156
- return event;
19157
- }
19158
- __name(filterEventData, "filterEventData");
19159
- function filterHookData(hook, resolveData) {
19160
- if (resolveData === "none") {
19161
- const { metadata: _metadata, ...rest } = hook;
19162
- return rest;
19391
+ };
19392
+ async function* parseMultipartStream2(response, options) {
19393
+ if (!response.body) {
19394
+ throw new MultipartParseError("Response body is null");
19163
19395
  }
19164
- return hook;
19165
- }
19166
- __name(filterHookData, "filterHookData");
19167
- var getObjectCreatedAt = /* @__PURE__ */ __name((idPrefix) => (filename) => {
19168
- const replaceRegex = new RegExp(`^${idPrefix}_`, "g");
19169
- const dashIndex = filename.indexOf("-");
19170
- if (dashIndex === -1) {
19171
- const ulid5 = filename.replace(/\.json$/, "").replace(replaceRegex, "");
19172
- return ulidToDate(ulid5);
19396
+ const contentType = response.headers.get("content-type");
19397
+ if (!contentType) {
19398
+ throw new MultipartParseError("Missing Content-Type header");
19173
19399
  }
19174
- if (idPrefix === "step") {
19175
- const runId = filename.substring(0, dashIndex);
19176
- const ulid5 = runId.replace(/^wrun_/, "");
19177
- return ulidToDate(ulid5);
19400
+ const boundary = extractBoundary(contentType);
19401
+ const parser = new StreamingMultipartParser(boundary, options);
19402
+ yield* parser.parseStream(response.body);
19403
+ }
19404
+ __name(parseMultipartStream2, "parseMultipartStream");
19405
+ var StreamingMultipartParser = class {
19406
+ static {
19407
+ __name(this, "StreamingMultipartParser");
19178
19408
  }
19179
- const id = filename.substring(dashIndex + 1).replace(/\.json$/, "");
19180
- const ulid4 = id.replace(replaceRegex, "");
19181
- return ulidToDate(ulid4);
19182
- }, "getObjectCreatedAt");
19183
- function createStorage(basedir) {
19184
- return {
19185
- runs: {
19186
- async create(data) {
19187
- const runId = `wrun_${monotonicUlid()}`;
19188
- const now = /* @__PURE__ */ new Date();
19189
- const result = {
19190
- runId,
19191
- deploymentId: data.deploymentId,
19192
- status: "pending",
19193
- workflowName: data.workflowName,
19194
- executionContext: data.executionContext,
19195
- input: data.input || [],
19196
- output: void 0,
19197
- error: void 0,
19198
- errorCode: void 0,
19199
- startedAt: void 0,
19200
- completedAt: void 0,
19201
- createdAt: now,
19202
- updatedAt: now
19203
- };
19204
- const runPath = import_node_path2.default.join(basedir, "runs", `${runId}.json`);
19205
- await writeJSON(runPath, result);
19206
- return result;
19207
- },
19208
- async get(id, params) {
19209
- const runPath = import_node_path2.default.join(basedir, "runs", `${id}.json`);
19210
- const run = await readJSON(runPath, WorkflowRunSchema);
19211
- if (!run) {
19212
- throw new WorkflowRunNotFoundError(id);
19213
- }
19214
- const resolveData = params?.resolveData ?? DEFAULT_RESOLVE_DATA_OPTION;
19215
- return filterRunData(run, resolveData);
19216
- },
19217
- async update(id, data) {
19218
- const runPath = import_node_path2.default.join(basedir, "runs", `${id}.json`);
19219
- const run = await readJSON(runPath, WorkflowRunSchema);
19220
- if (!run) {
19221
- throw new WorkflowRunNotFoundError(id);
19222
- }
19223
- const now = /* @__PURE__ */ new Date();
19224
- const updatedRun = {
19225
- ...run,
19226
- ...data,
19227
- updatedAt: now
19228
- };
19229
- if (data.status === "running" && !updatedRun.startedAt) {
19230
- updatedRun.startedAt = now;
19231
- }
19232
- if (data.status === "completed" || data.status === "failed" || data.status === "cancelled") {
19233
- updatedRun.completedAt = now;
19234
- }
19235
- await writeJSON(runPath, updatedRun, { overwrite: true });
19236
- return updatedRun;
19237
- },
19238
- async list(params) {
19239
- const resolveData = params?.resolveData ?? DEFAULT_RESOLVE_DATA_OPTION;
19240
- const result = await paginatedFileSystemQuery({
19241
- directory: import_node_path2.default.join(basedir, "runs"),
19242
- schema: WorkflowRunSchema,
19243
- filter: /* @__PURE__ */ __name((run) => {
19244
- if (params?.workflowName && run.workflowName !== params.workflowName) {
19245
- return false;
19246
- }
19247
- if (params?.status && run.status !== params.status) {
19248
- return false;
19249
- }
19250
- return true;
19251
- }, "filter"),
19252
- sortOrder: params?.pagination?.sortOrder ?? "desc",
19253
- limit: params?.pagination?.limit,
19254
- cursor: params?.pagination?.cursor,
19255
- getCreatedAt: getObjectCreatedAt("wrun"),
19256
- getId: /* @__PURE__ */ __name((run) => run.runId, "getId")
19257
- });
19258
- if (resolveData === "none") {
19259
- return {
19260
- ...result,
19261
- data: result.data.map((run) => ({
19262
- ...run,
19263
- input: [],
19264
- output: void 0
19265
- }))
19266
- };
19267
- }
19268
- return result;
19269
- },
19270
- async cancel(id, params) {
19271
- const run = await this.update(id, { status: "cancelled" });
19272
- const resolveData = params?.resolveData ?? DEFAULT_RESOLVE_DATA_OPTION;
19273
- return filterRunData(run, resolveData);
19274
- },
19275
- async pause(id, params) {
19276
- const run = await this.update(id, { status: "paused" });
19277
- const resolveData = params?.resolveData ?? DEFAULT_RESOLVE_DATA_OPTION;
19278
- return filterRunData(run, resolveData);
19279
- },
19280
- async resume(id, params) {
19281
- const run = await this.update(id, { status: "running" });
19282
- const resolveData = params?.resolveData ?? DEFAULT_RESOLVE_DATA_OPTION;
19283
- return filterRunData(run, resolveData);
19409
+ boundary;
19410
+ findOpeningBoundary;
19411
+ openingBoundaryLength;
19412
+ findBoundary;
19413
+ findPartialTailBoundary;
19414
+ boundaryLength;
19415
+ findDoubleNewline;
19416
+ // Safety limits
19417
+ maxHeaderSize;
19418
+ maxBoundaryBuffer;
19419
+ state = 0;
19420
+ buffer = null;
19421
+ currentHeaders = new Headers();
19422
+ currentPayloadController = null;
19423
+ constructor(boundary, options = {}) {
19424
+ this.boundary = boundary;
19425
+ this.findOpeningBoundary = createSearch(`--${boundary}`);
19426
+ this.openingBoundaryLength = 2 + boundary.length;
19427
+ this.findBoundary = createSearch(`\r
19428
+ --${boundary}`);
19429
+ this.findPartialTailBoundary = createPartialTailSearch(`\r
19430
+ --${boundary}`);
19431
+ this.boundaryLength = 4 + boundary.length;
19432
+ this.findDoubleNewline = createSearch("\r\n\r\n");
19433
+ this.maxHeaderSize = options.maxHeaderSize ?? 65536;
19434
+ this.maxBoundaryBuffer = options.maxBoundaryBuffer ?? 8192;
19435
+ }
19436
+ async *parseStream(stream) {
19437
+ const reader = stream.getReader();
19438
+ const messageQueue = new AsyncMessageQueue();
19439
+ const producer = this.startProducer(reader, messageQueue);
19440
+ try {
19441
+ yield* this.consumeMessages(messageQueue);
19442
+ }
19443
+ finally {
19444
+ messageQueue.cancel();
19445
+ this.closeCurrentPayload();
19446
+ try {
19447
+ await reader.cancel();
19284
19448
  }
19285
- },
19286
- steps: {
19287
- async create(runId, data) {
19288
- const now = /* @__PURE__ */ new Date();
19289
- const result = {
19290
- runId,
19291
- stepId: data.stepId,
19292
- stepName: data.stepName,
19293
- status: "pending",
19294
- input: data.input,
19295
- output: void 0,
19296
- error: void 0,
19297
- errorCode: void 0,
19298
- attempt: 0,
19299
- startedAt: void 0,
19300
- completedAt: void 0,
19301
- createdAt: now,
19302
- updatedAt: now
19303
- };
19304
- const compositeKey = `${runId}-${data.stepId}`;
19305
- const stepPath = import_node_path2.default.join(basedir, "steps", `${compositeKey}.json`);
19306
- await writeJSON(stepPath, result);
19307
- return result;
19308
- },
19309
- async get(runId, stepId, params) {
19310
- if (!runId) {
19311
- const fileIds = await listJSONFiles(import_node_path2.default.join(basedir, "steps"));
19312
- const fileId = fileIds.find((fileId2) => fileId2.endsWith(`-${stepId}`));
19313
- if (!fileId) {
19314
- throw new Error(`Step ${stepId} not found`);
19315
- }
19316
- runId = fileId.split("-")[0];
19317
- }
19318
- const compositeKey = `${runId}-${stepId}`;
19319
- const stepPath = import_node_path2.default.join(basedir, "steps", `${compositeKey}.json`);
19320
- const step = await readJSON(stepPath, StepSchema);
19321
- if (!step) {
19322
- throw new Error(`Step ${stepId} in run ${runId} not found`);
19323
- }
19324
- const resolveData = params?.resolveData ?? DEFAULT_RESOLVE_DATA_OPTION;
19325
- return filterStepData(step, resolveData);
19326
- },
19327
- async update(runId, stepId, data) {
19328
- const compositeKey = `${runId}-${stepId}`;
19329
- const stepPath = import_node_path2.default.join(basedir, "steps", `${compositeKey}.json`);
19330
- const step = await readJSON(stepPath, StepSchema);
19331
- if (!step) {
19332
- throw new Error(`Step ${stepId} in run ${runId} not found`);
19333
- }
19334
- const now = /* @__PURE__ */ new Date();
19335
- const updatedStep = {
19336
- ...step,
19337
- ...data,
19338
- updatedAt: now
19339
- };
19340
- if (data.status === "running" && !updatedStep.startedAt) {
19341
- updatedStep.startedAt = now;
19342
- }
19343
- if (data.status === "completed" || data.status === "failed") {
19344
- updatedStep.completedAt = now;
19345
- }
19346
- await writeJSON(stepPath, updatedStep, { overwrite: true });
19347
- return updatedStep;
19348
- },
19349
- async list(params) {
19350
- const resolveData = params.resolveData ?? DEFAULT_RESOLVE_DATA_OPTION;
19351
- const result = await paginatedFileSystemQuery({
19352
- directory: import_node_path2.default.join(basedir, "steps"),
19353
- schema: StepSchema,
19354
- filePrefix: `${params.runId}-`,
19355
- sortOrder: params.pagination?.sortOrder ?? "desc",
19356
- limit: params.pagination?.limit,
19357
- cursor: params.pagination?.cursor,
19358
- getCreatedAt: getObjectCreatedAt("step"),
19359
- getId: /* @__PURE__ */ __name((step) => step.stepId, "getId")
19360
- });
19361
- if (resolveData === "none") {
19362
- return {
19363
- ...result,
19364
- data: result.data.map((step) => ({
19365
- ...step,
19366
- input: [],
19367
- output: void 0
19368
- }))
19369
- };
19370
- }
19371
- return result;
19449
+ catch (error45) {
19372
19450
  }
19373
- },
19374
- // Events - filesystem-backed storage
19375
- events: {
19376
- async create(runId, data, params) {
19377
- const eventId = `evnt_${monotonicUlid()}`;
19378
- const now = /* @__PURE__ */ new Date();
19379
- const result = {
19380
- ...data,
19381
- runId,
19382
- eventId,
19383
- createdAt: now
19384
- };
19385
- const compositeKey = `${runId}-${eventId}`;
19386
- const eventPath = import_node_path2.default.join(basedir, "events", `${compositeKey}.json`);
19387
- await writeJSON(eventPath, result);
19388
- const resolveData = params?.resolveData ?? DEFAULT_RESOLVE_DATA_OPTION;
19389
- return filterEventData(result, resolveData);
19390
- },
19391
- async list(params) {
19392
- const { runId } = params;
19393
- const resolveData = params.resolveData ?? DEFAULT_RESOLVE_DATA_OPTION;
19394
- const result = await paginatedFileSystemQuery({
19395
- directory: import_node_path2.default.join(basedir, "events"),
19396
- schema: EventSchema,
19397
- filePrefix: `${runId}-`,
19398
- // Events in chronological order (oldest first) by default,
19399
- // different from the default for other list calls.
19400
- sortOrder: params.pagination?.sortOrder ?? "asc",
19401
- limit: params.pagination?.limit,
19402
- cursor: params.pagination?.cursor,
19403
- getCreatedAt: getObjectCreatedAt("evnt"),
19404
- getId: /* @__PURE__ */ __name((event) => event.eventId, "getId")
19405
- });
19406
- if (resolveData === "none") {
19407
- return {
19408
- ...result,
19409
- data: result.data.map((event) => {
19410
- const { eventData: _eventData, ...rest } = event;
19411
- return rest;
19412
- })
19413
- };
19414
- }
19415
- return result;
19416
- },
19417
- async listByCorrelationId(params) {
19418
- const correlationId = params.correlationId;
19419
- const resolveData = params.resolveData ?? DEFAULT_RESOLVE_DATA_OPTION;
19420
- const result = await paginatedFileSystemQuery({
19421
- directory: import_node_path2.default.join(basedir, "events"),
19422
- schema: EventSchema,
19423
- // No filePrefix - search all events
19424
- filter: /* @__PURE__ */ __name((event) => event.correlationId === correlationId, "filter"),
19425
- // Events in chronological order (oldest first) by default,
19426
- // different from the default for other list calls.
19427
- sortOrder: params.pagination?.sortOrder ?? "asc",
19428
- limit: params.pagination?.limit,
19429
- cursor: params.pagination?.cursor,
19430
- getCreatedAt: getObjectCreatedAt("evnt"),
19431
- getId: /* @__PURE__ */ __name((event) => event.eventId, "getId")
19432
- });
19433
- if (resolveData === "none") {
19434
- return {
19435
- ...result,
19436
- data: result.data.map((event) => {
19437
- const { eventData: _eventData, ...rest } = event;
19438
- return rest;
19439
- })
19440
- };
19451
+ await producer;
19452
+ }
19453
+ }
19454
+ /**
19455
+ * Producer: Continuously read chunks and parse messages
19456
+ */
19457
+ async startProducer(reader, messageQueue) {
19458
+ try {
19459
+ while (!messageQueue.isTerminal) {
19460
+ let result;
19461
+ try {
19462
+ result = await reader.read();
19441
19463
  }
19442
- return result;
19443
- }
19444
- },
19445
- // Hooks
19446
- hooks: {
19447
- async create(runId, data) {
19448
- const now = /* @__PURE__ */ new Date();
19449
- const result = {
19450
- runId,
19451
- hookId: data.hookId,
19452
- token: data.token,
19453
- metadata: data.metadata,
19454
- ownerId: "embedded-owner",
19455
- projectId: "embedded-project",
19456
- environment: "embedded",
19457
- createdAt: now
19458
- };
19459
- const hookPath = import_node_path2.default.join(basedir, "hooks", `${data.hookId}.json`);
19460
- await writeJSON(hookPath, result);
19461
- return result;
19462
- },
19463
- async get(hookId, params) {
19464
- const hookPath = import_node_path2.default.join(basedir, "hooks", `${hookId}.json`);
19465
- const hook = await readJSON(hookPath, HookSchema);
19466
- if (!hook) {
19467
- throw new Error(`Hook ${hookId} not found`);
19464
+ catch (readError) {
19465
+ if (readError instanceof Error && (readError.name === "AbortError" || readError.constructor.name === "AbortError" || readError.name === "TimeoutError" || readError.constructor.name === "TimeoutError")) {
19466
+ break;
19467
+ }
19468
+ throw readError;
19468
19469
  }
19469
- const resolveData = params?.resolveData || DEFAULT_RESOLVE_DATA_OPTION;
19470
- return filterHookData(hook, resolveData);
19471
- },
19472
- async getByToken(token) {
19473
- const hooksDir = import_node_path2.default.join(basedir, "hooks");
19474
- const files = await listJSONFiles(hooksDir);
19475
- for (const file2 of files) {
19476
- const hookPath = import_node_path2.default.join(hooksDir, `${file2}.json`);
19477
- const hook = await readJSON(hookPath, HookSchema);
19478
- if (hook && hook.token === token) {
19479
- return hook;
19470
+ const { done, value } = result;
19471
+ if (done) {
19472
+ if (this.buffer !== null && this.buffer.length > 0) {
19473
+ const messages2 = this.write(new Uint8Array(0));
19474
+ for (const message of messages2) {
19475
+ if (messageQueue.isTerminal)
19476
+ break;
19477
+ messageQueue.enqueue(message);
19478
+ }
19480
19479
  }
19481
- }
19482
- throw new Error(`Hook with token ${token} not found`);
19483
- },
19484
- async list(params) {
19485
- const hooksDir = import_node_path2.default.join(basedir, "hooks");
19486
- const resolveData = params.resolveData || DEFAULT_RESOLVE_DATA_OPTION;
19487
- const result = await paginatedFileSystemQuery({
19488
- directory: hooksDir,
19489
- schema: HookSchema,
19490
- sortOrder: params.pagination?.sortOrder,
19491
- limit: params.pagination?.limit,
19492
- cursor: params.pagination?.cursor,
19493
- filePrefix: void 0,
19494
- // Hooks don't have ULIDs, so we can't optimize by filename
19495
- filter: /* @__PURE__ */ __name((hook) => {
19496
- if (params.runId && hook.runId !== params.runId) {
19497
- return false;
19480
+ if (this.state !== 4) {
19481
+ if (this.state === 0) {
19482
+ throw new MultipartParseError("Invalid multipart stream: missing initial boundary");
19498
19483
  }
19499
- return true;
19500
- }, "filter"),
19501
- getCreatedAt: /* @__PURE__ */ __name(() => {
19502
- return /* @__PURE__ */ new Date(0);
19503
- }, "getCreatedAt"),
19504
- getId: /* @__PURE__ */ __name((hook) => hook.hookId, "getId")
19505
- });
19506
- return {
19507
- ...result,
19508
- data: result.data.map((hook) => filterHookData(hook, resolveData))
19509
- };
19510
- },
19511
- async dispose(hookId) {
19512
- const hookPath = import_node_path2.default.join(basedir, "hooks", `${hookId}.json`);
19513
- const hook = await readJSON(hookPath, HookSchema);
19514
- if (!hook) {
19515
- throw new Error(`Hook ${hookId} not found`);
19484
+ throw new MultipartParseError("Unexpected end of stream");
19485
+ }
19486
+ break;
19487
+ }
19488
+ if (!(value instanceof Uint8Array)) {
19489
+ throw new MultipartParseError(`Invalid chunk type: expected Uint8Array, got ${typeof value}`);
19490
+ }
19491
+ const messages = this.write(value);
19492
+ for (const message of messages) {
19493
+ if (messageQueue.isTerminal)
19494
+ break;
19495
+ messageQueue.enqueue(message);
19516
19496
  }
19517
- await deleteJSON(hookPath);
19518
- return hook;
19497
+ }
19498
+ if (!messageQueue.isTerminal) {
19499
+ messageQueue.finish();
19519
19500
  }
19520
19501
  }
19521
- };
19522
- }
19523
- __name(createStorage, "createStorage");
19524
- // ../world-local/dist/streamer.js
19525
- var import_node_events = require("node:events");
19526
- var import_node_path3 = __toESM(require("node:path"), 1);
19527
- var monotonicUlid2 = monotonicFactory(() => Math.random());
19528
- function serializeChunk(chunk) {
19529
- const eofByte = Buffer.from([chunk.eof ? 1 : 0]);
19530
- return Buffer.concat([eofByte, chunk.chunk]);
19531
- }
19532
- __name(serializeChunk, "serializeChunk");
19533
- function deserializeChunk(serialized) {
19534
- const eof = serialized[0] === 1;
19535
- const chunk = serialized.subarray(1);
19536
- return { eof, chunk };
19537
- }
19538
- __name(deserializeChunk, "deserializeChunk");
19539
- function createStreamer(basedir) {
19540
- const streamEmitter = new import_node_events.EventEmitter();
19541
- return {
19542
- async writeToStream(name, chunk) {
19543
- const chunkId = `strm_${monotonicUlid2()}`;
19544
- if (typeof chunk === "string") {
19545
- chunk = new TextEncoder().encode(chunk);
19502
+ catch (error45) {
19503
+ this.closeCurrentPayload(error45);
19504
+ if (!messageQueue.isTerminal) {
19505
+ messageQueue.finish(error45);
19546
19506
  }
19547
- const serialized = serializeChunk({
19548
- chunk: Buffer.from(chunk),
19549
- eof: false
19550
- });
19551
- const chunkPath = import_node_path3.default.join(basedir, "streams", "chunks", `${name}-${chunkId}.json`);
19552
- await write(chunkPath, serialized);
19553
- const chunkData = typeof chunk === "string" ? new TextEncoder().encode(chunk) : chunk instanceof Buffer ? new Uint8Array(chunk) : chunk;
19554
- streamEmitter.emit(`chunk:${name}`, {
19555
- streamName: name,
19556
- chunkData,
19557
- chunkId
19558
- });
19559
- },
19560
- async closeStream(name) {
19561
- const chunkId = `strm_${monotonicUlid2()}`;
19562
- const chunkPath = import_node_path3.default.join(basedir, "streams", "chunks", `${name}-${chunkId}.json`);
19563
- await write(chunkPath, serializeChunk({ chunk: Buffer.from([]), eof: true }));
19564
- streamEmitter.emit(`close:${name}`, { streamName: name });
19565
- },
19566
- async readFromStream(name, startIndex = 0) {
19567
- const chunksDir = import_node_path3.default.join(basedir, "streams", "chunks");
19568
- let removeListeners = /* @__PURE__ */ __name(() => {
19569
- }, "removeListeners");
19570
- return new ReadableStream({
19571
- async start(controller) {
19572
- const deliveredChunkIds = /* @__PURE__ */ new Set();
19573
- const bufferedEventChunks = [];
19574
- let isReadingFromDisk = true;
19575
- const chunkListener = /* @__PURE__ */ __name((event) => {
19576
- deliveredChunkIds.add(event.chunkId);
19577
- if (isReadingFromDisk) {
19578
- bufferedEventChunks.push({
19579
- chunkId: event.chunkId,
19580
- chunkData: event.chunkData
19581
- });
19582
- }
19583
- else {
19584
- controller.enqueue(event.chunkData);
19585
- }
19586
- }, "chunkListener");
19587
- const closeListener = /* @__PURE__ */ __name(() => {
19588
- streamEmitter.off(`chunk:${name}`, chunkListener);
19589
- streamEmitter.off(`close:${name}`, closeListener);
19590
- controller.close();
19591
- }, "closeListener");
19592
- removeListeners = closeListener;
19593
- streamEmitter.on(`chunk:${name}`, chunkListener);
19594
- streamEmitter.on(`close:${name}`, closeListener);
19595
- const files = await listJSONFiles(chunksDir);
19596
- const chunkFiles = files.filter((file2) => file2.startsWith(`${name}-`)).sort();
19597
- let isComplete = false;
19598
- for (let i = startIndex; i < chunkFiles.length; i++) {
19599
- const file2 = chunkFiles[i];
19600
- const chunkId = file2.substring(name.length + 1);
19601
- if (deliveredChunkIds.has(chunkId)) {
19602
- continue;
19603
- }
19604
- const chunk = deserializeChunk(await readBuffer(import_node_path3.default.join(chunksDir, `${file2}.json`)));
19605
- if (chunk?.eof === true) {
19606
- isComplete = true;
19607
- break;
19608
- }
19609
- if (chunk.chunk.byteLength) {
19610
- controller.enqueue(chunk.chunk);
19507
+ }
19508
+ finally {
19509
+ try {
19510
+ reader.releaseLock();
19511
+ }
19512
+ catch (error45) {
19513
+ }
19514
+ }
19515
+ }
19516
+ /**
19517
+ * Consumer: Yield messages from the queue
19518
+ */
19519
+ async *consumeMessages(messageQueue) {
19520
+ while (true) {
19521
+ const message = await messageQueue.dequeue();
19522
+ if (message === null) {
19523
+ break;
19524
+ }
19525
+ yield message;
19526
+ }
19527
+ }
19528
+ /**
19529
+ * Process a chunk of data through the state machine and return any complete messages.
19530
+ *
19531
+ * Returns an array because a single chunk can contain multiple complete messages
19532
+ * when small messages with headers + body + boundary all fit in one network chunk.
19533
+ * All messages must be captured and queued to maintain proper message ordering.
19534
+ */
19535
+ write(chunk) {
19536
+ const newMessages = [];
19537
+ if (this.state === 4) {
19538
+ throw new MultipartParseError("Unexpected data after end of stream");
19539
+ }
19540
+ let index = 0;
19541
+ let chunkLength = chunk.length;
19542
+ if (this.buffer !== null) {
19543
+ const newSize = this.buffer.length + chunkLength;
19544
+ const maxAllowedSize = this.state === 2 ? this.maxHeaderSize : this.maxBoundaryBuffer;
19545
+ if (newSize > maxAllowedSize) {
19546
+ throw new MultipartParseError(`Buffer size limit exceeded: ${newSize} bytes > ${maxAllowedSize} bytes. This may indicate malformed multipart data with ${this.state === 2 ? "oversized headers" : "invalid boundaries"}.`);
19547
+ }
19548
+ const newChunk = new Uint8Array(newSize);
19549
+ newChunk.set(this.buffer, 0);
19550
+ newChunk.set(chunk, this.buffer.length);
19551
+ chunk = newChunk;
19552
+ chunkLength = chunk.length;
19553
+ this.buffer = null;
19554
+ }
19555
+ if (chunkLength === 0 && this.state === 0) {
19556
+ throw new MultipartParseError("Invalid multipart stream: missing initial boundary");
19557
+ }
19558
+ while (true) {
19559
+ if (this.state === 3) {
19560
+ if (chunkLength - index < this.boundaryLength) {
19561
+ const remainingData = chunk.subarray(index);
19562
+ if (remainingData.length > this.maxBoundaryBuffer) {
19563
+ throw new MultipartParseError(`Boundary buffer limit exceeded: ${remainingData.length} > ${this.maxBoundaryBuffer}`);
19564
+ }
19565
+ this.buffer = remainingData;
19566
+ break;
19567
+ }
19568
+ const boundaryIndex = this.findBoundary(chunk, index);
19569
+ if (boundaryIndex === -1) {
19570
+ const partialTailIndex = this.findPartialTailBoundary(chunk);
19571
+ if (partialTailIndex === -1) {
19572
+ this.writeBody(index === 0 ? chunk : chunk.subarray(index));
19573
+ }
19574
+ else {
19575
+ this.writeBody(chunk.subarray(index, partialTailIndex));
19576
+ const partialBoundary = chunk.subarray(partialTailIndex);
19577
+ if (partialBoundary.length > this.maxBoundaryBuffer) {
19578
+ throw new MultipartParseError(`Partial boundary too large: ${partialBoundary.length} > ${this.maxBoundaryBuffer}`);
19611
19579
  }
19580
+ this.buffer = partialBoundary;
19581
+ }
19582
+ break;
19583
+ }
19584
+ this.writeBody(chunk.subarray(index, boundaryIndex));
19585
+ this.finishMessage();
19586
+ index = boundaryIndex + this.boundaryLength;
19587
+ this.state = 1;
19588
+ }
19589
+ if (this.state === 1) {
19590
+ if (chunkLength - index < 2) {
19591
+ const remainingData = chunk.subarray(index);
19592
+ if (remainingData.length > this.maxBoundaryBuffer) {
19593
+ throw new MultipartParseError(`After-boundary buffer limit exceeded: ${remainingData.length} > ${this.maxBoundaryBuffer}`);
19594
+ }
19595
+ this.buffer = remainingData;
19596
+ break;
19597
+ }
19598
+ if (chunk[index] === 45 && chunk[index + 1] === 45) {
19599
+ this.state = 4;
19600
+ break;
19601
+ }
19602
+ if (chunk[index] === 13 && chunk[index + 1] === 10) {
19603
+ index += 2;
19604
+ }
19605
+ else if (chunk[index] === 10) {
19606
+ index += 1;
19607
+ }
19608
+ else {
19609
+ throw new MultipartParseError(`Invalid character after boundary: expected CRLF or LF, got 0x${chunk[index].toString(16)}`);
19610
+ }
19611
+ this.state = 2;
19612
+ }
19613
+ if (this.state === 2) {
19614
+ if (chunkLength - index < 4) {
19615
+ const remainingData = chunk.subarray(index);
19616
+ if (remainingData.length > this.maxHeaderSize) {
19617
+ throw new MultipartParseError(`Header buffer limit exceeded: ${remainingData.length} > ${this.maxHeaderSize}`);
19612
19618
  }
19613
- isReadingFromDisk = false;
19614
- bufferedEventChunks.sort((a, b) => a.chunkId.localeCompare(b.chunkId));
19615
- for (const buffered of bufferedEventChunks) {
19616
- controller.enqueue(buffered.chunkData);
19619
+ this.buffer = remainingData;
19620
+ break;
19621
+ }
19622
+ let headerEndIndex = this.findDoubleNewline(chunk, index);
19623
+ let headerEndOffset = 4;
19624
+ if (headerEndIndex === -1) {
19625
+ const lfDoubleNewline = createSearch("\n\n");
19626
+ headerEndIndex = lfDoubleNewline(chunk, index);
19627
+ headerEndOffset = 2;
19628
+ }
19629
+ if (headerEndIndex === -1) {
19630
+ const headerData = chunk.subarray(index);
19631
+ if (headerData.length > this.maxHeaderSize) {
19632
+ throw new MultipartParseError(`Headers too large: ${headerData.length} > ${this.maxHeaderSize} bytes`);
19617
19633
  }
19618
- if (isComplete) {
19619
- removeListeners();
19620
- controller.close();
19621
- return;
19634
+ this.buffer = headerData;
19635
+ break;
19636
+ }
19637
+ const headerBytes = chunk.subarray(index, headerEndIndex);
19638
+ this.currentHeaders = parseHeaders(headerBytes);
19639
+ const message = this.createStreamingMessage();
19640
+ newMessages.push(message);
19641
+ index = headerEndIndex + headerEndOffset;
19642
+ this.state = 3;
19643
+ continue;
19644
+ }
19645
+ if (this.state === 0) {
19646
+ if (chunkLength < this.openingBoundaryLength) {
19647
+ if (chunk.length > this.maxBoundaryBuffer) {
19648
+ throw new MultipartParseError(`Initial chunk too large for boundary detection: ${chunk.length} > ${this.maxBoundaryBuffer}`);
19622
19649
  }
19623
- },
19624
- cancel() {
19625
- removeListeners();
19650
+ this.buffer = chunk;
19651
+ break;
19626
19652
  }
19627
- });
19653
+ const boundaryIndex = this.findOpeningBoundary(chunk);
19654
+ if (boundaryIndex !== 0) {
19655
+ throw new MultipartParseError("Invalid multipart stream: missing initial boundary");
19656
+ }
19657
+ index = this.openingBoundaryLength;
19658
+ this.state = 1;
19659
+ }
19628
19660
  }
19629
- };
19630
- }
19631
- __name(createStreamer, "createStreamer");
19632
- // ../world-local/dist/index.js
19633
- function createEmbeddedWorld({ dataDir, port }) {
19634
- const dir = dataDir ?? config.value.dataDir;
19635
- const queuePort = port ?? config.value.port;
19636
- return {
19637
- ...createQueue(queuePort),
19638
- ...createStorage(dir),
19639
- ...createStreamer(dir)
19640
- };
19641
- }
19642
- __name(createEmbeddedWorld, "createEmbeddedWorld");
19643
- // ../../node_modules/.pnpm/@vercel+queue@0.0.0-alpha.24/node_modules/@vercel/queue/dist/index.mjs
19661
+ return newMessages;
19662
+ }
19663
+ createStreamingMessage() {
19664
+ const headers = new Headers(this.currentHeaders);
19665
+ const payload = new ReadableStream({
19666
+ start: /* @__PURE__ */ __name((controller) => {
19667
+ this.currentPayloadController = controller;
19668
+ }, "start")
19669
+ });
19670
+ this.currentHeaders = new Headers();
19671
+ return {
19672
+ headers,
19673
+ payload
19674
+ };
19675
+ }
19676
+ writeBody(chunk) {
19677
+ if (this.currentPayloadController) {
19678
+ this.currentPayloadController.enqueue(chunk);
19679
+ }
19680
+ }
19681
+ finishMessage() {
19682
+ if (this.currentPayloadController) {
19683
+ this.currentPayloadController.close();
19684
+ this.currentPayloadController = null;
19685
+ }
19686
+ }
19687
+ /**
19688
+ * Close current payload controller if open (used during cleanup)
19689
+ * If an error is provided, forwards it to the payload consumer
19690
+ */
19691
+ closeCurrentPayload(error45) {
19692
+ if (this.currentPayloadController) {
19693
+ try {
19694
+ if (error45) {
19695
+ this.currentPayloadController.error(error45);
19696
+ }
19697
+ else {
19698
+ this.currentPayloadController.close();
19699
+ }
19700
+ }
19701
+ catch (controllerError) {
19702
+ }
19703
+ this.currentPayloadController = null;
19704
+ }
19705
+ }
19706
+ };
19707
+ // ../../node_modules/.pnpm/@vercel+queue@0.0.0-alpha.28/node_modules/@vercel/queue/dist/index.mjs
19644
19708
  var import_oidc = __toESM(require_dist(), 1);
19645
19709
  async function streamToBuffer2(stream) {
19646
19710
  let totalLength = 0;
@@ -19815,15 +19879,22 @@ var QueueClient = class {
19815
19879
  }
19816
19880
  baseUrl;
19817
19881
  basePath;
19882
+ customHeaders = {};
19818
19883
  token;
19819
19884
  /**
19820
19885
  * Create a new Vercel Queue Service client
19821
19886
  * @param options Client configuration options
19822
19887
  */
19823
19888
  constructor(options = {}) {
19824
- this.baseUrl = options.baseUrl || "https://vercel-queue.com";
19825
- this.basePath = options.basePath || "/api/v2/messages";
19826
- this.token = options.token;
19889
+ this.baseUrl = options.baseUrl || process.env.VERCEL_QUEUE_BASE_URL || "https://vercel-queue.com";
19890
+ this.basePath = options.basePath || process.env.VERCEL_QUEUE_BASE_PATH || "/api/v2/messages";
19891
+ this.token = options.token || process.env.VERCEL_QUEUE_TOKEN;
19892
+ const VERCEL_QUEUE_HEADER_PREFIX = "VERCEL_QUEUE_HEADER_";
19893
+ this.customHeaders = Object.fromEntries(Object.entries(process.env).filter(([key]) => key.startsWith(VERCEL_QUEUE_HEADER_PREFIX)).map(([key, value]) => [
19894
+ // This allows headers to use dashes independent of shell used
19895
+ key.replace(VERCEL_QUEUE_HEADER_PREFIX, "").replaceAll("__", "-"),
19896
+ value || ""
19897
+ ]));
19827
19898
  }
19828
19899
  async getToken() {
19829
19900
  if (this.token) {
@@ -19850,7 +19921,8 @@ var QueueClient = class {
19850
19921
  const headers = new Headers({
19851
19922
  Authorization: `Bearer ${await this.getToken()}`,
19852
19923
  "Vqs-Queue-Name": queueName,
19853
- "Content-Type": transport.contentType
19924
+ "Content-Type": transport.contentType,
19925
+ ...this.customHeaders
19854
19926
  });
19855
19927
  const deploymentId = options.deploymentId || process.env.VERCEL_DEPLOYMENT_ID;
19856
19928
  if (deploymentId) {
@@ -19865,8 +19937,8 @@ var QueueClient = class {
19865
19937
  const body = transport.serialize(payload);
19866
19938
  const response = await fetch(`${this.baseUrl}${this.basePath}`, {
19867
19939
  method: "POST",
19868
- headers,
19869
- body
19940
+ body,
19941
+ headers
19870
19942
  });
19871
19943
  if (!response.ok) {
19872
19944
  if (response.status === 400) {
@@ -19912,7 +19984,8 @@ var QueueClient = class {
19912
19984
  Authorization: `Bearer ${await this.getToken()}`,
19913
19985
  "Vqs-Queue-Name": queueName,
19914
19986
  "Vqs-Consumer-Group": consumerGroup,
19915
- Accept: "multipart/mixed"
19987
+ Accept: "multipart/mixed",
19988
+ ...this.customHeaders
19916
19989
  });
19917
19990
  if (visibilityTimeoutSeconds !== void 0) {
19918
19991
  headers.set("Vqs-Visibility-Timeout", visibilityTimeoutSeconds.toString());
@@ -19952,7 +20025,7 @@ var QueueClient = class {
19952
20025
  }
19953
20026
  throw new Error(`Failed to receive messages: ${response.status} ${response.statusText}`);
19954
20027
  }
19955
- for await (const multipartMessage of parseMultipartStream(response)) {
20028
+ for await (const multipartMessage of parseMultipartStream2(response)) {
19956
20029
  try {
19957
20030
  const parsedHeaders = parseQueueHeaders(multipartMessage.headers);
19958
20031
  if (!parsedHeaders) {
@@ -19979,7 +20052,8 @@ var QueueClient = class {
19979
20052
  Authorization: `Bearer ${await this.getToken()}`,
19980
20053
  "Vqs-Queue-Name": queueName,
19981
20054
  "Vqs-Consumer-Group": consumerGroup,
19982
- Accept: "multipart/mixed"
20055
+ Accept: "multipart/mixed",
20056
+ ...this.customHeaders
19983
20057
  });
19984
20058
  if (visibilityTimeoutSeconds !== void 0) {
19985
20059
  headers.set("Vqs-Visibility-Timeout", visibilityTimeoutSeconds.toString());
@@ -20037,7 +20111,7 @@ var QueueClient = class {
20037
20111
  throw new Error("Transport is required when skipPayload is not true");
20038
20112
  }
20039
20113
  try {
20040
- for await (const multipartMessage of parseMultipartStream(response)) {
20114
+ for await (const multipartMessage of parseMultipartStream2(response)) {
20041
20115
  try {
20042
20116
  const parsedHeaders = parseQueueHeaders(multipartMessage.headers);
20043
20117
  if (!parsedHeaders) {
@@ -20086,7 +20160,8 @@ var QueueClient = class {
20086
20160
  Authorization: `Bearer ${await this.getToken()}`,
20087
20161
  "Vqs-Queue-Name": queueName,
20088
20162
  "Vqs-Consumer-Group": consumerGroup,
20089
- "Vqs-Ticket": ticket
20163
+ "Vqs-Ticket": ticket,
20164
+ ...this.customHeaders
20090
20165
  })
20091
20166
  });
20092
20167
  if (!response.ok) {
@@ -20132,7 +20207,8 @@ var QueueClient = class {
20132
20207
  "Vqs-Queue-Name": queueName,
20133
20208
  "Vqs-Consumer-Group": consumerGroup,
20134
20209
  "Vqs-Ticket": ticket,
20135
- "Vqs-Visibility-Timeout": visibilityTimeoutSeconds.toString()
20210
+ "Vqs-Visibility-Timeout": visibilityTimeoutSeconds.toString(),
20211
+ ...this.customHeaders
20136
20212
  })
20137
20213
  });
20138
20214
  if (!response.ok) {
@@ -20277,7 +20353,6 @@ function handleCallback(handlers) {
20277
20353
  __name(handleCallback, "handleCallback");
20278
20354
  var devRouteHandlers2 = /* @__PURE__ */ new Map();
20279
20355
  var wildcardRouteHandlers2 = /* @__PURE__ */ new Map();
20280
- var routeHandlerKeys = /* @__PURE__ */ new WeakMap();
20281
20356
  function cleanupDeadRefs(key, refs) {
20282
20357
  const aliveRefs = refs.filter((ref) => ref.deref() !== void 0);
20283
20358
  if (aliveRefs.length === 0) {
@@ -20293,45 +20368,14 @@ function isDevMode() {
20293
20368
  }
20294
20369
  __name(isDevMode, "isDevMode");
20295
20370
  function registerDevRouteHandler(routeHandler, handlers) {
20296
- const existingKeys = routeHandlerKeys.get(routeHandler);
20297
- if (existingKeys) {
20298
- const newKeys = /* @__PURE__ */ new Set();
20299
- for (const topicName in handlers) {
20300
- for (const consumerGroup in handlers[topicName]) {
20301
- newKeys.add(`${topicName}:${consumerGroup}`);
20302
- }
20303
- }
20304
- for (const key of existingKeys) {
20305
- if (!newKeys.has(key)) {
20306
- const [topicPattern] = key.split(":");
20307
- if (topicPattern.includes("*")) {
20308
- const refs = wildcardRouteHandlers2.get(key);
20309
- if (refs) {
20310
- const filteredRefs = refs.filter((ref) => ref.deref() !== routeHandler);
20311
- if (filteredRefs.length === 0) {
20312
- wildcardRouteHandlers2.delete(key);
20313
- }
20314
- else {
20315
- wildcardRouteHandlers2.set(key, filteredRefs);
20316
- }
20317
- }
20318
- }
20319
- else {
20320
- devRouteHandlers2.delete(key);
20321
- }
20322
- }
20323
- }
20324
- }
20325
- const keys = /* @__PURE__ */ new Set();
20326
20371
  for (const topicName in handlers) {
20327
20372
  for (const consumerGroup in handlers[topicName]) {
20328
20373
  const key = `${topicName}:${consumerGroup}`;
20329
- keys.add(key);
20330
20374
  if (topicName.includes("*")) {
20331
- const weakRef = new WeakRef(routeHandler);
20332
20375
  const existing = wildcardRouteHandlers2.get(key) || [];
20333
20376
  cleanupDeadRefs(key, existing);
20334
20377
  const cleanedRefs = wildcardRouteHandlers2.get(key) || [];
20378
+ const weakRef = new WeakRef(routeHandler);
20335
20379
  cleanedRefs.push(weakRef);
20336
20380
  wildcardRouteHandlers2.set(key, cleanedRefs);
20337
20381
  }
@@ -20343,7 +20387,6 @@ function registerDevRouteHandler(routeHandler, handlers) {
20343
20387
  }
20344
20388
  }
20345
20389
  }
20346
- routeHandlerKeys.set(routeHandler, keys);
20347
20390
  }
20348
20391
  __name(registerDevRouteHandler, "registerDevRouteHandler");
20349
20392
  function findRouteHandlersForTopic(topicName) {
@@ -20733,45 +20776,6 @@ async function send(topicName, payload, options) {
20733
20776
  return { messageId: result.messageId };
20734
20777
  }
20735
20778
  __name(send, "send");
20736
- // ../world-vercel/dist/queue.js
20737
- var MessageWrapper = object({
20738
- payload: QueuePayloadSchema,
20739
- queueName: ValidQueueName
20740
- });
20741
- function createQueue2() {
20742
- const queue = /* @__PURE__ */ __name(async (queueName, x, opts) => {
20743
- const encoded = MessageWrapper.encode({
20744
- payload: x,
20745
- queueName
20746
- });
20747
- const sanitizedQueueName = queueName.replace(/[^A-Za-z0-9-_]/g, "-");
20748
- const { messageId } = await send(sanitizedQueueName, encoded, opts);
20749
- return { messageId: MessageId.parse(messageId) };
20750
- }, "queue");
20751
- const createQueueHandler = /* @__PURE__ */ __name((prefix, handler) => {
20752
- return handleCallback({
20753
- [`${prefix}*`]: {
20754
- default: /* @__PURE__ */ __name((body, meta) => {
20755
- const { payload, queueName } = MessageWrapper.parse(body);
20756
- return handler(payload, {
20757
- queueName,
20758
- messageId: MessageId.parse(meta.messageId),
20759
- attempt: meta.deliveryCount
20760
- });
20761
- }, "default")
20762
- }
20763
- });
20764
- }, "createQueueHandler");
20765
- const getDeploymentId = /* @__PURE__ */ __name(async () => {
20766
- const deploymentId = process.env.VERCEL_DEPLOYMENT_ID;
20767
- if (!deploymentId) {
20768
- throw new Error("VERCEL_DEPLOYMENT_ID environment variable is not set");
20769
- }
20770
- return deploymentId;
20771
- }, "getDeploymentId");
20772
- return { queue, createQueueHandler, getDeploymentId };
20773
- }
20774
- __name(createQueue2, "createQueue");
20775
20779
  // ../world-vercel/dist/utils.js
20776
20780
  var import_oidc2 = __toESM(require_dist(), 1);
20777
20781
  var DEFAULT_RESOLVE_DATA_OPTION2 = "all";
@@ -20782,7 +20786,15 @@ function dateToStringReplacer(_key, value) {
20782
20786
  return value;
20783
20787
  }
20784
20788
  __name(dateToStringReplacer, "dateToStringReplacer");
20785
- async function getHttpConfig(config3) {
20789
+ var getHttpUrl = /* @__PURE__ */ __name((config3) => {
20790
+ const projectConfig = config3?.projectConfig;
20791
+ const defaultUrl = "https://vercel-workflow.com/api";
20792
+ const defaultProxyUrl = "https://api.vercel.com/v1/workflow";
20793
+ const usingProxy = Boolean(config3?.baseUrl || projectConfig?.projectId && projectConfig?.teamId);
20794
+ const baseUrl = config3?.baseUrl || (usingProxy ? defaultProxyUrl : defaultUrl);
20795
+ return { baseUrl, usingProxy };
20796
+ }, "getHttpUrl");
20797
+ var getHeaders = /* @__PURE__ */ __name((config3) => {
20786
20798
  const projectConfig = config3?.projectConfig;
20787
20799
  const headers = new Headers(config3?.headers);
20788
20800
  if (projectConfig) {
@@ -20794,16 +20806,16 @@ async function getHttpConfig(config3) {
20794
20806
  headers.set("x-vercel-team-id", projectConfig.teamId);
20795
20807
  }
20796
20808
  }
20809
+ return headers;
20810
+ }, "getHeaders");
20811
+ async function getHttpConfig(config3) {
20812
+ const headers = getHeaders(config3);
20797
20813
  const token = config3?.token ?? await (0, import_oidc2.getVercelOidcToken)();
20798
20814
  if (token) {
20799
20815
  headers.set("Authorization", `Bearer ${token}`);
20800
20816
  }
20801
- let baseUrl = config3?.baseUrl;
20802
- if (!baseUrl) {
20803
- const shouldUseProxy = projectConfig?.projectId && projectConfig?.teamId;
20804
- baseUrl = shouldUseProxy ? `https://api.vercel.com/v1/workflow` : "https://vercel-workflow.com/api";
20805
- }
20806
- return { baseUrl, headers };
20817
+ const { baseUrl, usingProxy } = getHttpUrl(config3);
20818
+ return { baseUrl, headers, usingProxy };
20807
20819
  }
20808
20820
  __name(getHttpConfig, "getHttpConfig");
20809
20821
  async function makeRequest({ endpoint, options = {}, config: config3 = {}, schema }) {
@@ -20835,6 +20847,68 @@ curl -X ${options.method} ${stringifiedHeaders} "${url2}"`);
20835
20847
  }
20836
20848
  }
20837
20849
  __name(makeRequest, "makeRequest");
20850
+ // ../world-vercel/dist/queue.js
20851
+ var MessageWrapper = object({
20852
+ payload: QueuePayloadSchema,
20853
+ queueName: ValidQueueName
20854
+ });
20855
+ var VERCEL_QUEUE_MAX_VISIBILITY = 82800;
20856
+ function createQueue2(config3) {
20857
+ const { baseUrl, usingProxy } = getHttpUrl(config3);
20858
+ const headers = getHeaders(config3);
20859
+ if (usingProxy) {
20860
+ process.env.VERCEL_QUEUE_BASE_URL = `${baseUrl}`;
20861
+ process.env.VERCEL_QUEUE_BASE_PATH = "/queues/v2/messages";
20862
+ if (config3?.token) {
20863
+ process.env.VERCEL_QUEUE_TOKEN = config3.token;
20864
+ }
20865
+ if (headers) {
20866
+ headers.forEach((value, key) => {
20867
+ const sanitizedKey = key.replaceAll("-", "__");
20868
+ process.env[`VERCEL_QUEUE_HEADER_${sanitizedKey}`] = value;
20869
+ });
20870
+ }
20871
+ }
20872
+ const queue = /* @__PURE__ */ __name(async (queueName, x, opts) => {
20873
+ const encoded = MessageWrapper.encode({
20874
+ payload: x,
20875
+ queueName
20876
+ });
20877
+ const sanitizedQueueName = queueName.replace(/[^A-Za-z0-9-_]/g, "-");
20878
+ const { messageId } = await send(sanitizedQueueName, encoded, opts);
20879
+ return { messageId: MessageId.parse(messageId) };
20880
+ }, "queue");
20881
+ const createQueueHandler = /* @__PURE__ */ __name((prefix, handler) => {
20882
+ return handleCallback({
20883
+ [`${prefix}*`]: {
20884
+ default: /* @__PURE__ */ __name(async (body, meta) => {
20885
+ const { payload, queueName } = MessageWrapper.parse(body);
20886
+ const result = await handler(payload, {
20887
+ queueName,
20888
+ messageId: MessageId.parse(meta.messageId),
20889
+ attempt: meta.deliveryCount
20890
+ });
20891
+ if (typeof result?.timeoutSeconds === "number") {
20892
+ const adjustedTimeoutSeconds = Math.min(result.timeoutSeconds, VERCEL_QUEUE_MAX_VISIBILITY);
20893
+ if (adjustedTimeoutSeconds !== result.timeoutSeconds) {
20894
+ result.timeoutSeconds = adjustedTimeoutSeconds;
20895
+ }
20896
+ }
20897
+ return result;
20898
+ }, "default")
20899
+ }
20900
+ });
20901
+ }, "createQueueHandler");
20902
+ const getDeploymentId = /* @__PURE__ */ __name(async () => {
20903
+ const deploymentId = process.env.VERCEL_DEPLOYMENT_ID;
20904
+ if (!deploymentId) {
20905
+ throw new Error("VERCEL_DEPLOYMENT_ID environment variable is not set");
20906
+ }
20907
+ return deploymentId;
20908
+ }, "getDeploymentId");
20909
+ return { queue, createQueueHandler, getDeploymentId };
20910
+ }
20911
+ __name(createQueue2, "createQueue");
20838
20912
  // ../world-vercel/dist/events.js
20839
20913
  function filterEventData2(event, resolveData) {
20840
20914
  if (resolveData === "none") {
@@ -21336,7 +21410,7 @@ __name(createStreamer2, "createStreamer");
21336
21410
  // ../world-vercel/dist/index.js
21337
21411
  function createVercelWorld(config3) {
21338
21412
  return {
21339
- ...createQueue2(),
21413
+ ...createQueue2(config3),
21340
21414
  ...createStorage2(config3),
21341
21415
  ...createStreamer2(config3)
21342
21416
  };
@@ -21921,6 +21995,7 @@ __name(stringify_primitive, "stringify_primitive");
21921
21995
  // ../core/dist/symbols.js
21922
21996
  var WORKFLOW_USE_STEP = Symbol.for("WORKFLOW_USE_STEP");
21923
21997
  var WORKFLOW_CREATE_HOOK = Symbol.for("WORKFLOW_CREATE_HOOK");
21998
+ var WORKFLOW_SLEEP = Symbol.for("WORKFLOW_SLEEP");
21924
21999
  var WORKFLOW_CONTEXT = Symbol.for("WORKFLOW_CONTEXT");
21925
22000
  var WORKFLOW_GET_STREAM_ID = Symbol.for("WORKFLOW_GET_STREAM_ID");
21926
22001
  var STREAM_NAME_SYMBOL = Symbol.for("WORKFLOW_STREAM_NAME");
@@ -21960,6 +22035,7 @@ function getCommonReducers(global2 = globalThis) {
21960
22035
  const viewToBase64 = /* @__PURE__ */ __name((value) => abToBase64(value.buffer, value.byteOffset, value.byteLength), "viewToBase64");
21961
22036
  return {
21962
22037
  ArrayBuffer: /* @__PURE__ */ __name((value) => value instanceof global2.ArrayBuffer && abToBase64(value, 0, value.byteLength), "ArrayBuffer"),
22038
+ BigInt: /* @__PURE__ */ __name((value) => typeof value === "bigint" && value.toString(), "BigInt"),
21963
22039
  BigInt64Array: /* @__PURE__ */ __name((value) => value instanceof global2.BigInt64Array && viewToBase64(value), "BigInt64Array"),
21964
22040
  BigUint64Array: /* @__PURE__ */ __name((value) => value instanceof global2.BigUint64Array && viewToBase64(value), "BigUint64Array"),
21965
22041
  Date: /* @__PURE__ */ __name((value) => {
@@ -22079,6 +22155,7 @@ function getCommonRevivers(global2 = globalThis) {
22079
22155
  __name(reviveArrayBuffer, "reviveArrayBuffer");
22080
22156
  return {
22081
22157
  ArrayBuffer: reviveArrayBuffer,
22158
+ BigInt: /* @__PURE__ */ __name((value) => global2.BigInt(value), "BigInt"),
22082
22159
  BigInt64Array: /* @__PURE__ */ __name((value) => {
22083
22160
  const ab = reviveArrayBuffer(value);
22084
22161
  return new global2.BigInt64Array(ab);
@@ -22266,30 +22343,6 @@ var HookFound = SemanticConvention("workflow.hook.found");
22266
22343
  var WebhookHandlersTriggered = SemanticConvention("webhook.handlers.triggered");
22267
22344
  // ../core/dist/types.js
22268
22345
  var import_node_util = require("node:util");
22269
- function getConstructorName(obj) {
22270
- if (obj === null || obj === void 0) {
22271
- return null;
22272
- }
22273
- const ctor = obj.constructor;
22274
- if (!ctor || ctor.name === "Object") {
22275
- return null;
22276
- }
22277
- return ctor.name;
22278
- }
22279
- __name(getConstructorName, "getConstructorName");
22280
- function getConstructorNames(obj) {
22281
- const proto = Object.getPrototypeOf(obj);
22282
- const name = getConstructorName(proto);
22283
- if (name === null) {
22284
- return [];
22285
- }
22286
- return [name, ...getConstructorNames(proto)];
22287
- }
22288
- __name(getConstructorNames, "getConstructorNames");
22289
- function isInstanceOf(v, ctor) {
22290
- return getConstructorNames(v).includes(ctor.name);
22291
- }
22292
- __name(isInstanceOf, "isInstanceOf");
22293
22346
  function getErrorName(v) {
22294
22347
  if (import_node_util.types.isNativeError(v)) {
22295
22348
  return v.name;
@@ -22657,6 +22710,63 @@ function createCreateHook(ctx) {
22657
22710
  }, "createHookImpl");
22658
22711
  }
22659
22712
  __name(createCreateHook, "createCreateHook");
22713
+ // ../core/dist/workflow/sleep.js
22714
+ var import_ms3 = __toESM(require_ms(), 1);
22715
+ function createSleep(ctx) {
22716
+ return /* @__PURE__ */ __name(async function sleepImpl(param) {
22717
+ const { promise: promise2, resolve } = withResolvers();
22718
+ const correlationId = `wait_${ctx.generateUlid()}`;
22719
+ let resumeAt;
22720
+ if (typeof param === "string") {
22721
+ const durationMs = (0, import_ms3.default)(param);
22722
+ if (typeof durationMs !== "number" || durationMs < 0) {
22723
+ throw new Error(`Invalid sleep duration: "${param}". Expected a valid duration string like "1s", "1m", "1h", etc.`);
22724
+ }
22725
+ resumeAt = new Date(Date.now() + durationMs);
22726
+ }
22727
+ else if (param instanceof Date || param && typeof param === "object" && typeof param.getTime === "function") {
22728
+ const dateParam = param instanceof Date ? param : new Date(param.getTime());
22729
+ resumeAt = dateParam;
22730
+ }
22731
+ else {
22732
+ throw new Error(`Invalid sleep parameter. Expected a duration string or Date object.`);
22733
+ }
22734
+ ctx.invocationsQueue.push({
22735
+ type: "wait",
22736
+ correlationId,
22737
+ resumeAt
22738
+ });
22739
+ ctx.eventsConsumer.subscribe((event) => {
22740
+ if (!event) {
22741
+ setTimeout(() => {
22742
+ ctx.onWorkflowError(new WorkflowSuspension(ctx.invocationsQueue, ctx.globalThis));
22743
+ }, 0);
22744
+ return EventConsumerResult.NotConsumed;
22745
+ }
22746
+ if (event?.eventType === "wait_created" && event.correlationId === correlationId) {
22747
+ const waitItem = ctx.invocationsQueue.find((item) => item.type === "wait" && item.correlationId === correlationId);
22748
+ if (waitItem) {
22749
+ waitItem.hasCreatedEvent = true;
22750
+ waitItem.resumeAt = event.eventData.resumeAt;
22751
+ }
22752
+ return EventConsumerResult.Consumed;
22753
+ }
22754
+ if (event?.eventType === "wait_completed" && event.correlationId === correlationId) {
22755
+ const index = ctx.invocationsQueue.findIndex((item) => item.type === "wait" && item.correlationId === correlationId);
22756
+ if (index !== -1) {
22757
+ ctx.invocationsQueue.splice(index, 1);
22758
+ }
22759
+ setTimeout(() => {
22760
+ resolve();
22761
+ }, 0);
22762
+ return EventConsumerResult.Finished;
22763
+ }
22764
+ return EventConsumerResult.NotConsumed;
22765
+ });
22766
+ return promise2;
22767
+ }, "sleepImpl");
22768
+ }
22769
+ __name(createSleep, "createSleep");
22660
22770
  // ../core/dist/workflow.js
22661
22771
  async function runWorkflow(workflowCode2, workflowRun, events) {
22662
22772
  return trace2(`WORKFLOW.run ${workflowRun.workflowName}`, async (span) => {
@@ -22694,8 +22804,10 @@ async function runWorkflow(workflowCode2, workflowRun, events) {
22694
22804
  });
22695
22805
  const useStep = createUseStep(workflowContext);
22696
22806
  const createHook2 = createCreateHook(workflowContext);
22807
+ const sleep = createSleep(workflowContext);
22697
22808
  vmGlobalThis[WORKFLOW_USE_STEP] = useStep;
22698
22809
  vmGlobalThis[WORKFLOW_CREATE_HOOK] = createHook2;
22810
+ vmGlobalThis[WORKFLOW_SLEEP] = sleep;
22699
22811
  vmGlobalThis[WORKFLOW_GET_STREAM_ID] = (namespace) => getWorkflowRunStreamId(workflowRun.runId, namespace);
22700
22812
  const url2 = process.env.VERCEL_URL ? `https://${process.env.VERCEL_URL}` : `http://localhost:${process.env.PORT || 3e3}`;
22701
22813
  const ctx = {
@@ -23128,6 +23240,20 @@ function workflowEntrypoint(workflowCode2) {
23128
23240
  return;
23129
23241
  }
23130
23242
  const events = await getAllWorkflowRunEvents(workflowRun.runId);
23243
+ const now = Date.now();
23244
+ for (const event of events) {
23245
+ if (event.eventType === "wait_created") {
23246
+ const resumeAt = event.eventData.resumeAt;
23247
+ const hasCompleted = events.some((e) => e.eventType === "wait_completed" && e.correlationId === event.correlationId);
23248
+ if (!hasCompleted && now >= resumeAt.getTime()) {
23249
+ const completedEvent = await world.events.create(runId, {
23250
+ eventType: "wait_completed",
23251
+ correlationId: event.correlationId
23252
+ });
23253
+ events.push(completedEvent);
23254
+ }
23255
+ }
23256
+ }
23131
23257
  const result = await runWorkflow(workflowCode2, workflowRun, events);
23132
23258
  await world.runs.update(runId, {
23133
23259
  status: "completed",
@@ -23139,10 +23265,11 @@ function workflowEntrypoint(workflowCode2) {
23139
23265
  });
23140
23266
  }
23141
23267
  catch (err) {
23142
- if (isInstanceOf(err, WorkflowSuspension)) {
23143
- const suspensionMessage = buildWorkflowSuspensionMessage(runId, err.stepCount, err.hookCount);
23268
+ if (WorkflowSuspension.is(err)) {
23269
+ const suspensionMessage = buildWorkflowSuspensionMessage(runId, err.stepCount, err.hookCount, err.waitCount);
23144
23270
  if (suspensionMessage) {
23145
23271
  }
23272
+ let minTimeoutSeconds = null;
23146
23273
  for (const queueItem of err.steps) {
23147
23274
  if (queueItem.type === "step") {
23148
23275
  const ops = [];
@@ -23165,7 +23292,7 @@ function workflowEntrypoint(workflowCode2) {
23165
23292
  });
23166
23293
  }
23167
23294
  catch (err2) {
23168
- if (isInstanceOf(err2, WorkflowAPIError) && err2.status === 409) {
23295
+ if (WorkflowAPIError.is(err2) && err2.status === 409) {
23169
23296
  console.warn(`Step "${queueItem.stepName}" with correlation ID "${queueItem.correlationId}" already exists, skipping: ${err2.message}`);
23170
23297
  continue;
23171
23298
  }
@@ -23186,7 +23313,7 @@ function workflowEntrypoint(workflowCode2) {
23186
23313
  });
23187
23314
  }
23188
23315
  catch (err2) {
23189
- if (isInstanceOf(err2, WorkflowAPIError)) {
23316
+ if (WorkflowAPIError.is(err2)) {
23190
23317
  if (err2.status === 409) {
23191
23318
  console.warn(`Hook with correlation ID "${queueItem.correlationId}" already exists, skipping: ${err2.message}`);
23192
23319
  continue;
@@ -23199,11 +23326,41 @@ function workflowEntrypoint(workflowCode2) {
23199
23326
  throw err2;
23200
23327
  }
23201
23328
  }
23329
+ else if (queueItem.type === "wait") {
23330
+ try {
23331
+ if (!queueItem.hasCreatedEvent) {
23332
+ await world.events.create(runId, {
23333
+ eventType: "wait_created",
23334
+ correlationId: queueItem.correlationId,
23335
+ eventData: {
23336
+ resumeAt: queueItem.resumeAt
23337
+ }
23338
+ });
23339
+ }
23340
+ const now = Date.now();
23341
+ const resumeAtMs = queueItem.resumeAt.getTime();
23342
+ const delayMs = Math.max(1e3, resumeAtMs - now);
23343
+ const timeoutSeconds = Math.ceil(delayMs / 1e3);
23344
+ if (minTimeoutSeconds === null || timeoutSeconds < minTimeoutSeconds) {
23345
+ minTimeoutSeconds = timeoutSeconds;
23346
+ }
23347
+ }
23348
+ catch (err2) {
23349
+ if (WorkflowAPIError.is(err2) && err2.status === 409) {
23350
+ console.warn(`Wait with correlation ID "${queueItem.correlationId}" already exists, skipping: ${err2.message}`);
23351
+ continue;
23352
+ }
23353
+ throw err2;
23354
+ }
23355
+ }
23202
23356
  }
23203
23357
  span?.setAttributes({
23204
23358
  ...WorkflowRunStatus("pending_steps"),
23205
23359
  ...WorkflowStepsCreated(err.steps.length)
23206
23360
  });
23361
+ if (minTimeoutSeconds !== null) {
23362
+ return { timeoutSeconds: minTimeoutSeconds };
23363
+ }
23207
23364
  }
23208
23365
  else {
23209
23366
  const errorName = getErrorName(err);