@workflow/world-testing 4.0.1-beta.5 → 4.0.1-beta.6
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/.well-known/workflow/v1/flow.js +1444 -1440
- package/dist/.well-known/workflow/v1/flow.js.map +1 -1
- package/dist/.well-known/workflow/v1/manifest.debug.json +5 -5
- package/dist/.well-known/workflow/v1/step.js +807 -799
- package/dist/.well-known/workflow/v1/step.js.map +1 -1
- package/package.json +3 -3
|
@@ -5124,503 +5124,50 @@ var config = once2(() => {
|
|
|
5124
5124
|
});
|
|
5125
5125
|
// ../world-local/dist/queue.js
|
|
5126
5126
|
var import_promises = require("node:timers/promises");
|
|
5127
|
-
// ../../node_modules/.pnpm
|
|
5128
|
-
|
|
5129
|
-
|
|
5130
|
-
|
|
5131
|
-
|
|
5132
|
-
|
|
5133
|
-
|
|
5134
|
-
|
|
5135
|
-
|
|
5136
|
-
|
|
5137
|
-
|
|
5138
|
-
|
|
5139
|
-
return (haystack, start2 = 0) => Buffer.prototype.indexOf.call(haystack, needle, start2);
|
|
5140
|
-
}
|
|
5141
|
-
__name(createSearch, "createSearch");
|
|
5142
|
-
function createPartialTailSearch(pattern) {
|
|
5143
|
-
const needle = new TextEncoder().encode(pattern);
|
|
5144
|
-
const byteIndexes = {};
|
|
5145
|
-
for (let i = 0; i < needle.length; ++i) {
|
|
5146
|
-
const byte = needle[i];
|
|
5147
|
-
if (byteIndexes[byte] === void 0)
|
|
5148
|
-
byteIndexes[byte] = [];
|
|
5149
|
-
byteIndexes[byte].push(i);
|
|
5150
|
-
}
|
|
5151
|
-
return function (haystack) {
|
|
5152
|
-
const haystackEnd = haystack.length - 1;
|
|
5153
|
-
if (haystack[haystackEnd] in byteIndexes) {
|
|
5154
|
-
const indexes = byteIndexes[haystack[haystackEnd]];
|
|
5155
|
-
for (let i = indexes.length - 1; i >= 0; --i) {
|
|
5156
|
-
for (let j = indexes[i], k = haystackEnd; j >= 0 && haystack[k] === needle[j]; --j, --k) {
|
|
5157
|
-
if (j === 0)
|
|
5158
|
-
return k;
|
|
5159
|
-
}
|
|
5160
|
-
}
|
|
5161
|
-
}
|
|
5162
|
-
return -1;
|
|
5163
|
-
};
|
|
5164
|
-
}
|
|
5165
|
-
__name(createPartialTailSearch, "createPartialTailSearch");
|
|
5166
|
-
function parseHeaders(headerBytes) {
|
|
5167
|
-
const headerText = new TextDecoder("iso-8859-1").decode(headerBytes);
|
|
5168
|
-
const lines = headerText.trim().split(/\r?\n/);
|
|
5169
|
-
const headerInit = [];
|
|
5170
|
-
for (const line of lines) {
|
|
5171
|
-
const colonIndex = line.indexOf(":");
|
|
5172
|
-
if (colonIndex > 0) {
|
|
5173
|
-
const name = line.slice(0, colonIndex).trim();
|
|
5174
|
-
const value = line.slice(colonIndex + 1).trim();
|
|
5175
|
-
headerInit.push([name, value]);
|
|
5127
|
+
// ../../node_modules/.pnpm/@vercel+queue@0.0.0-alpha.23/node_modules/@vercel/queue/dist/index.mjs
|
|
5128
|
+
async function streamToBuffer(stream) {
|
|
5129
|
+
let totalLength = 0;
|
|
5130
|
+
const reader = stream.getReader();
|
|
5131
|
+
const chunks = [];
|
|
5132
|
+
try {
|
|
5133
|
+
while (true) {
|
|
5134
|
+
const { done, value } = await reader.read();
|
|
5135
|
+
if (done)
|
|
5136
|
+
break;
|
|
5137
|
+
chunks.push(value);
|
|
5138
|
+
totalLength += value.length;
|
|
5176
5139
|
}
|
|
5177
5140
|
}
|
|
5178
|
-
|
|
5179
|
-
|
|
5180
|
-
__name(parseHeaders, "parseHeaders");
|
|
5181
|
-
function extractBoundary(contentType) {
|
|
5182
|
-
const boundaryMatch = contentType.match(/boundary=(?:"([^"]+)"|([^;]+))/i);
|
|
5183
|
-
if (!boundaryMatch) {
|
|
5184
|
-
throw new MultipartParseError("No boundary found in Content-Type header");
|
|
5141
|
+
finally {
|
|
5142
|
+
reader.releaseLock();
|
|
5185
5143
|
}
|
|
5186
|
-
return
|
|
5144
|
+
return Buffer.concat(chunks, totalLength);
|
|
5187
5145
|
}
|
|
5188
|
-
__name(
|
|
5189
|
-
var
|
|
5146
|
+
__name(streamToBuffer, "streamToBuffer");
|
|
5147
|
+
var JsonTransport = class {
|
|
5190
5148
|
static {
|
|
5191
|
-
__name(this, "
|
|
5192
|
-
}
|
|
5193
|
-
queue = [];
|
|
5194
|
-
waiters = [];
|
|
5195
|
-
finished = false;
|
|
5196
|
-
cancelled = false;
|
|
5197
|
-
error = null;
|
|
5198
|
-
/**
|
|
5199
|
-
* Producer: Enqueue a message for consumption
|
|
5200
|
-
*/
|
|
5201
|
-
enqueue(message) {
|
|
5202
|
-
if (this.finished || this.cancelled)
|
|
5203
|
-
return;
|
|
5204
|
-
if (this.waiters.length > 0) {
|
|
5205
|
-
const waiter = this.waiters.shift();
|
|
5206
|
-
waiter.resolve(message);
|
|
5207
|
-
}
|
|
5208
|
-
else {
|
|
5209
|
-
this.queue.push(message);
|
|
5210
|
-
}
|
|
5211
|
-
}
|
|
5212
|
-
/**
|
|
5213
|
-
* Producer: Signal completion (with optional error)
|
|
5214
|
-
*/
|
|
5215
|
-
finish(error45) {
|
|
5216
|
-
if (this.finished)
|
|
5217
|
-
return;
|
|
5218
|
-
this.finished = true;
|
|
5219
|
-
this.error = error45 || null;
|
|
5220
|
-
while (this.waiters.length > 0) {
|
|
5221
|
-
const waiter = this.waiters.shift();
|
|
5222
|
-
if (error45) {
|
|
5223
|
-
waiter.reject(error45);
|
|
5224
|
-
}
|
|
5225
|
-
else {
|
|
5226
|
-
waiter.resolve(null);
|
|
5227
|
-
}
|
|
5228
|
-
}
|
|
5149
|
+
__name(this, "JsonTransport");
|
|
5229
5150
|
}
|
|
5230
|
-
|
|
5231
|
-
|
|
5232
|
-
|
|
5233
|
-
|
|
5234
|
-
|
|
5235
|
-
|
|
5236
|
-
this.cancelled = true;
|
|
5237
|
-
while (this.waiters.length > 0) {
|
|
5238
|
-
const waiter = this.waiters.shift();
|
|
5239
|
-
waiter.resolve(null);
|
|
5240
|
-
}
|
|
5151
|
+
contentType = "application/json";
|
|
5152
|
+
replacer;
|
|
5153
|
+
reviver;
|
|
5154
|
+
constructor(options = {}) {
|
|
5155
|
+
this.replacer = options.replacer;
|
|
5156
|
+
this.reviver = options.reviver;
|
|
5241
5157
|
}
|
|
5242
|
-
|
|
5243
|
-
|
|
5244
|
-
*/
|
|
5245
|
-
async dequeue() {
|
|
5246
|
-
if (this.queue.length > 0) {
|
|
5247
|
-
return this.queue.shift();
|
|
5248
|
-
}
|
|
5249
|
-
if (this.finished || this.cancelled) {
|
|
5250
|
-
if (this.error)
|
|
5251
|
-
throw this.error;
|
|
5252
|
-
return null;
|
|
5253
|
-
}
|
|
5254
|
-
return new Promise((resolve, reject) => {
|
|
5255
|
-
this.waiters.push({ resolve, reject });
|
|
5256
|
-
});
|
|
5158
|
+
serialize(value) {
|
|
5159
|
+
return Buffer.from(JSON.stringify(value, this.replacer), "utf8");
|
|
5257
5160
|
}
|
|
5258
|
-
|
|
5259
|
-
|
|
5260
|
-
|
|
5261
|
-
get isTerminal() {
|
|
5262
|
-
return this.finished || this.cancelled;
|
|
5161
|
+
async deserialize(stream) {
|
|
5162
|
+
const buffer = await streamToBuffer(stream);
|
|
5163
|
+
return JSON.parse(buffer.toString("utf8"), this.reviver);
|
|
5263
5164
|
}
|
|
5264
5165
|
};
|
|
5265
|
-
|
|
5266
|
-
|
|
5267
|
-
|
|
5268
|
-
|
|
5269
|
-
|
|
5270
|
-
if (!contentType) {
|
|
5271
|
-
throw new MultipartParseError("Missing Content-Type header");
|
|
5272
|
-
}
|
|
5273
|
-
const boundary = extractBoundary(contentType);
|
|
5274
|
-
const parser = new StreamingMultipartParser(boundary, options);
|
|
5275
|
-
yield* parser.parseStream(response.body);
|
|
5276
|
-
}
|
|
5277
|
-
__name(parseMultipartStream, "parseMultipartStream");
|
|
5278
|
-
var StreamingMultipartParser = class {
|
|
5279
|
-
static {
|
|
5280
|
-
__name(this, "StreamingMultipartParser");
|
|
5281
|
-
}
|
|
5282
|
-
boundary;
|
|
5283
|
-
findOpeningBoundary;
|
|
5284
|
-
openingBoundaryLength;
|
|
5285
|
-
findBoundary;
|
|
5286
|
-
findPartialTailBoundary;
|
|
5287
|
-
boundaryLength;
|
|
5288
|
-
findDoubleNewline;
|
|
5289
|
-
// Safety limits
|
|
5290
|
-
maxHeaderSize;
|
|
5291
|
-
maxBoundaryBuffer;
|
|
5292
|
-
state = 0;
|
|
5293
|
-
buffer = null;
|
|
5294
|
-
currentHeaders = new Headers();
|
|
5295
|
-
currentPayloadController = null;
|
|
5296
|
-
constructor(boundary, options = {}) {
|
|
5297
|
-
this.boundary = boundary;
|
|
5298
|
-
this.findOpeningBoundary = createSearch(`--${boundary}`);
|
|
5299
|
-
this.openingBoundaryLength = 2 + boundary.length;
|
|
5300
|
-
this.findBoundary = createSearch(`\r
|
|
5301
|
-
--${boundary}`);
|
|
5302
|
-
this.findPartialTailBoundary = createPartialTailSearch(`\r
|
|
5303
|
-
--${boundary}`);
|
|
5304
|
-
this.boundaryLength = 4 + boundary.length;
|
|
5305
|
-
this.findDoubleNewline = createSearch("\r\n\r\n");
|
|
5306
|
-
this.maxHeaderSize = options.maxHeaderSize ?? 65536;
|
|
5307
|
-
this.maxBoundaryBuffer = options.maxBoundaryBuffer ?? 8192;
|
|
5308
|
-
}
|
|
5309
|
-
async *parseStream(stream) {
|
|
5310
|
-
const reader = stream.getReader();
|
|
5311
|
-
const messageQueue = new AsyncMessageQueue();
|
|
5312
|
-
const producer = this.startProducer(reader, messageQueue);
|
|
5313
|
-
try {
|
|
5314
|
-
yield* this.consumeMessages(messageQueue);
|
|
5315
|
-
}
|
|
5316
|
-
finally {
|
|
5317
|
-
messageQueue.cancel();
|
|
5318
|
-
this.closeCurrentPayload();
|
|
5319
|
-
try {
|
|
5320
|
-
await reader.cancel();
|
|
5321
|
-
}
|
|
5322
|
-
catch (error45) {
|
|
5323
|
-
}
|
|
5324
|
-
await producer;
|
|
5325
|
-
}
|
|
5326
|
-
}
|
|
5327
|
-
/**
|
|
5328
|
-
* Producer: Continuously read chunks and parse messages
|
|
5329
|
-
*/
|
|
5330
|
-
async startProducer(reader, messageQueue) {
|
|
5331
|
-
try {
|
|
5332
|
-
while (!messageQueue.isTerminal) {
|
|
5333
|
-
let result;
|
|
5334
|
-
try {
|
|
5335
|
-
result = await reader.read();
|
|
5336
|
-
}
|
|
5337
|
-
catch (readError) {
|
|
5338
|
-
if (readError instanceof Error && (readError.name === "AbortError" || readError.constructor.name === "AbortError" || readError.name === "TimeoutError" || readError.constructor.name === "TimeoutError")) {
|
|
5339
|
-
break;
|
|
5340
|
-
}
|
|
5341
|
-
throw readError;
|
|
5342
|
-
}
|
|
5343
|
-
const { done, value } = result;
|
|
5344
|
-
if (done) {
|
|
5345
|
-
if (this.buffer !== null && this.buffer.length > 0) {
|
|
5346
|
-
const messages2 = this.write(new Uint8Array(0));
|
|
5347
|
-
for (const message of messages2) {
|
|
5348
|
-
if (messageQueue.isTerminal)
|
|
5349
|
-
break;
|
|
5350
|
-
messageQueue.enqueue(message);
|
|
5351
|
-
}
|
|
5352
|
-
}
|
|
5353
|
-
if (this.state !== 4) {
|
|
5354
|
-
if (this.state === 0) {
|
|
5355
|
-
throw new MultipartParseError("Invalid multipart stream: missing initial boundary");
|
|
5356
|
-
}
|
|
5357
|
-
throw new MultipartParseError("Unexpected end of stream");
|
|
5358
|
-
}
|
|
5359
|
-
break;
|
|
5360
|
-
}
|
|
5361
|
-
if (!(value instanceof Uint8Array)) {
|
|
5362
|
-
throw new MultipartParseError(`Invalid chunk type: expected Uint8Array, got ${typeof value}`);
|
|
5363
|
-
}
|
|
5364
|
-
const messages = this.write(value);
|
|
5365
|
-
for (const message of messages) {
|
|
5366
|
-
if (messageQueue.isTerminal)
|
|
5367
|
-
break;
|
|
5368
|
-
messageQueue.enqueue(message);
|
|
5369
|
-
}
|
|
5370
|
-
}
|
|
5371
|
-
if (!messageQueue.isTerminal) {
|
|
5372
|
-
messageQueue.finish();
|
|
5373
|
-
}
|
|
5374
|
-
}
|
|
5375
|
-
catch (error45) {
|
|
5376
|
-
this.closeCurrentPayload(error45);
|
|
5377
|
-
if (!messageQueue.isTerminal) {
|
|
5378
|
-
messageQueue.finish(error45);
|
|
5379
|
-
}
|
|
5380
|
-
}
|
|
5381
|
-
finally {
|
|
5382
|
-
try {
|
|
5383
|
-
reader.releaseLock();
|
|
5384
|
-
}
|
|
5385
|
-
catch (error45) {
|
|
5386
|
-
}
|
|
5387
|
-
}
|
|
5388
|
-
}
|
|
5389
|
-
/**
|
|
5390
|
-
* Consumer: Yield messages from the queue
|
|
5391
|
-
*/
|
|
5392
|
-
async *consumeMessages(messageQueue) {
|
|
5393
|
-
while (true) {
|
|
5394
|
-
const message = await messageQueue.dequeue();
|
|
5395
|
-
if (message === null) {
|
|
5396
|
-
break;
|
|
5397
|
-
}
|
|
5398
|
-
yield message;
|
|
5399
|
-
}
|
|
5400
|
-
}
|
|
5401
|
-
/**
|
|
5402
|
-
* Process a chunk of data through the state machine and return any complete messages.
|
|
5403
|
-
*
|
|
5404
|
-
* Returns an array because a single chunk can contain multiple complete messages
|
|
5405
|
-
* when small messages with headers + body + boundary all fit in one network chunk.
|
|
5406
|
-
* All messages must be captured and queued to maintain proper message ordering.
|
|
5407
|
-
*/
|
|
5408
|
-
write(chunk) {
|
|
5409
|
-
const newMessages = [];
|
|
5410
|
-
if (this.state === 4) {
|
|
5411
|
-
throw new MultipartParseError("Unexpected data after end of stream");
|
|
5412
|
-
}
|
|
5413
|
-
let index = 0;
|
|
5414
|
-
let chunkLength = chunk.length;
|
|
5415
|
-
if (this.buffer !== null) {
|
|
5416
|
-
const newSize = this.buffer.length + chunkLength;
|
|
5417
|
-
const maxAllowedSize = this.state === 2 ? this.maxHeaderSize : this.maxBoundaryBuffer;
|
|
5418
|
-
if (newSize > maxAllowedSize) {
|
|
5419
|
-
throw new MultipartParseError(`Buffer size limit exceeded: ${newSize} bytes > ${maxAllowedSize} bytes. This may indicate malformed multipart data with ${this.state === 2 ? "oversized headers" : "invalid boundaries"}.`);
|
|
5420
|
-
}
|
|
5421
|
-
const newChunk = new Uint8Array(newSize);
|
|
5422
|
-
newChunk.set(this.buffer, 0);
|
|
5423
|
-
newChunk.set(chunk, this.buffer.length);
|
|
5424
|
-
chunk = newChunk;
|
|
5425
|
-
chunkLength = chunk.length;
|
|
5426
|
-
this.buffer = null;
|
|
5427
|
-
}
|
|
5428
|
-
if (chunkLength === 0 && this.state === 0) {
|
|
5429
|
-
throw new MultipartParseError("Invalid multipart stream: missing initial boundary");
|
|
5430
|
-
}
|
|
5431
|
-
while (true) {
|
|
5432
|
-
if (this.state === 3) {
|
|
5433
|
-
if (chunkLength - index < this.boundaryLength) {
|
|
5434
|
-
const remainingData = chunk.subarray(index);
|
|
5435
|
-
if (remainingData.length > this.maxBoundaryBuffer) {
|
|
5436
|
-
throw new MultipartParseError(`Boundary buffer limit exceeded: ${remainingData.length} > ${this.maxBoundaryBuffer}`);
|
|
5437
|
-
}
|
|
5438
|
-
this.buffer = remainingData;
|
|
5439
|
-
break;
|
|
5440
|
-
}
|
|
5441
|
-
const boundaryIndex = this.findBoundary(chunk, index);
|
|
5442
|
-
if (boundaryIndex === -1) {
|
|
5443
|
-
const partialTailIndex = this.findPartialTailBoundary(chunk);
|
|
5444
|
-
if (partialTailIndex === -1) {
|
|
5445
|
-
this.writeBody(index === 0 ? chunk : chunk.subarray(index));
|
|
5446
|
-
}
|
|
5447
|
-
else {
|
|
5448
|
-
this.writeBody(chunk.subarray(index, partialTailIndex));
|
|
5449
|
-
const partialBoundary = chunk.subarray(partialTailIndex);
|
|
5450
|
-
if (partialBoundary.length > this.maxBoundaryBuffer) {
|
|
5451
|
-
throw new MultipartParseError(`Partial boundary too large: ${partialBoundary.length} > ${this.maxBoundaryBuffer}`);
|
|
5452
|
-
}
|
|
5453
|
-
this.buffer = partialBoundary;
|
|
5454
|
-
}
|
|
5455
|
-
break;
|
|
5456
|
-
}
|
|
5457
|
-
this.writeBody(chunk.subarray(index, boundaryIndex));
|
|
5458
|
-
this.finishMessage();
|
|
5459
|
-
index = boundaryIndex + this.boundaryLength;
|
|
5460
|
-
this.state = 1;
|
|
5461
|
-
}
|
|
5462
|
-
if (this.state === 1) {
|
|
5463
|
-
if (chunkLength - index < 2) {
|
|
5464
|
-
const remainingData = chunk.subarray(index);
|
|
5465
|
-
if (remainingData.length > this.maxBoundaryBuffer) {
|
|
5466
|
-
throw new MultipartParseError(`After-boundary buffer limit exceeded: ${remainingData.length} > ${this.maxBoundaryBuffer}`);
|
|
5467
|
-
}
|
|
5468
|
-
this.buffer = remainingData;
|
|
5469
|
-
break;
|
|
5470
|
-
}
|
|
5471
|
-
if (chunk[index] === 45 && chunk[index + 1] === 45) {
|
|
5472
|
-
this.state = 4;
|
|
5473
|
-
break;
|
|
5474
|
-
}
|
|
5475
|
-
if (chunk[index] === 13 && chunk[index + 1] === 10) {
|
|
5476
|
-
index += 2;
|
|
5477
|
-
}
|
|
5478
|
-
else if (chunk[index] === 10) {
|
|
5479
|
-
index += 1;
|
|
5480
|
-
}
|
|
5481
|
-
else {
|
|
5482
|
-
throw new MultipartParseError(`Invalid character after boundary: expected CRLF or LF, got 0x${chunk[index].toString(16)}`);
|
|
5483
|
-
}
|
|
5484
|
-
this.state = 2;
|
|
5485
|
-
}
|
|
5486
|
-
if (this.state === 2) {
|
|
5487
|
-
if (chunkLength - index < 4) {
|
|
5488
|
-
const remainingData = chunk.subarray(index);
|
|
5489
|
-
if (remainingData.length > this.maxHeaderSize) {
|
|
5490
|
-
throw new MultipartParseError(`Header buffer limit exceeded: ${remainingData.length} > ${this.maxHeaderSize}`);
|
|
5491
|
-
}
|
|
5492
|
-
this.buffer = remainingData;
|
|
5493
|
-
break;
|
|
5494
|
-
}
|
|
5495
|
-
let headerEndIndex = this.findDoubleNewline(chunk, index);
|
|
5496
|
-
let headerEndOffset = 4;
|
|
5497
|
-
if (headerEndIndex === -1) {
|
|
5498
|
-
const lfDoubleNewline = createSearch("\n\n");
|
|
5499
|
-
headerEndIndex = lfDoubleNewline(chunk, index);
|
|
5500
|
-
headerEndOffset = 2;
|
|
5501
|
-
}
|
|
5502
|
-
if (headerEndIndex === -1) {
|
|
5503
|
-
const headerData = chunk.subarray(index);
|
|
5504
|
-
if (headerData.length > this.maxHeaderSize) {
|
|
5505
|
-
throw new MultipartParseError(`Headers too large: ${headerData.length} > ${this.maxHeaderSize} bytes`);
|
|
5506
|
-
}
|
|
5507
|
-
this.buffer = headerData;
|
|
5508
|
-
break;
|
|
5509
|
-
}
|
|
5510
|
-
const headerBytes = chunk.subarray(index, headerEndIndex);
|
|
5511
|
-
this.currentHeaders = parseHeaders(headerBytes);
|
|
5512
|
-
const message = this.createStreamingMessage();
|
|
5513
|
-
newMessages.push(message);
|
|
5514
|
-
index = headerEndIndex + headerEndOffset;
|
|
5515
|
-
this.state = 3;
|
|
5516
|
-
continue;
|
|
5517
|
-
}
|
|
5518
|
-
if (this.state === 0) {
|
|
5519
|
-
if (chunkLength < this.openingBoundaryLength) {
|
|
5520
|
-
if (chunk.length > this.maxBoundaryBuffer) {
|
|
5521
|
-
throw new MultipartParseError(`Initial chunk too large for boundary detection: ${chunk.length} > ${this.maxBoundaryBuffer}`);
|
|
5522
|
-
}
|
|
5523
|
-
this.buffer = chunk;
|
|
5524
|
-
break;
|
|
5525
|
-
}
|
|
5526
|
-
const boundaryIndex = this.findOpeningBoundary(chunk);
|
|
5527
|
-
if (boundaryIndex !== 0) {
|
|
5528
|
-
throw new MultipartParseError("Invalid multipart stream: missing initial boundary");
|
|
5529
|
-
}
|
|
5530
|
-
index = this.openingBoundaryLength;
|
|
5531
|
-
this.state = 1;
|
|
5532
|
-
}
|
|
5533
|
-
}
|
|
5534
|
-
return newMessages;
|
|
5535
|
-
}
|
|
5536
|
-
createStreamingMessage() {
|
|
5537
|
-
const headers = new Headers(this.currentHeaders);
|
|
5538
|
-
const payload = new ReadableStream({
|
|
5539
|
-
start: /* @__PURE__ */ __name((controller) => {
|
|
5540
|
-
this.currentPayloadController = controller;
|
|
5541
|
-
}, "start")
|
|
5542
|
-
});
|
|
5543
|
-
this.currentHeaders = new Headers();
|
|
5544
|
-
return {
|
|
5545
|
-
headers,
|
|
5546
|
-
payload
|
|
5547
|
-
};
|
|
5548
|
-
}
|
|
5549
|
-
writeBody(chunk) {
|
|
5550
|
-
if (this.currentPayloadController) {
|
|
5551
|
-
this.currentPayloadController.enqueue(chunk);
|
|
5552
|
-
}
|
|
5553
|
-
}
|
|
5554
|
-
finishMessage() {
|
|
5555
|
-
if (this.currentPayloadController) {
|
|
5556
|
-
this.currentPayloadController.close();
|
|
5557
|
-
this.currentPayloadController = null;
|
|
5558
|
-
}
|
|
5559
|
-
}
|
|
5560
|
-
/**
|
|
5561
|
-
* Close current payload controller if open (used during cleanup)
|
|
5562
|
-
* If an error is provided, forwards it to the payload consumer
|
|
5563
|
-
*/
|
|
5564
|
-
closeCurrentPayload(error45) {
|
|
5565
|
-
if (this.currentPayloadController) {
|
|
5566
|
-
try {
|
|
5567
|
-
if (error45) {
|
|
5568
|
-
this.currentPayloadController.error(error45);
|
|
5569
|
-
}
|
|
5570
|
-
else {
|
|
5571
|
-
this.currentPayloadController.close();
|
|
5572
|
-
}
|
|
5573
|
-
}
|
|
5574
|
-
catch (controllerError) {
|
|
5575
|
-
}
|
|
5576
|
-
this.currentPayloadController = null;
|
|
5577
|
-
}
|
|
5578
|
-
}
|
|
5579
|
-
};
|
|
5580
|
-
// ../../node_modules/.pnpm/@vercel+queue@0.0.0-alpha.23/node_modules/@vercel/queue/dist/index.mjs
|
|
5581
|
-
async function streamToBuffer(stream) {
|
|
5582
|
-
let totalLength = 0;
|
|
5583
|
-
const reader = stream.getReader();
|
|
5584
|
-
const chunks = [];
|
|
5585
|
-
try {
|
|
5586
|
-
while (true) {
|
|
5587
|
-
const { done, value } = await reader.read();
|
|
5588
|
-
if (done)
|
|
5589
|
-
break;
|
|
5590
|
-
chunks.push(value);
|
|
5591
|
-
totalLength += value.length;
|
|
5592
|
-
}
|
|
5593
|
-
}
|
|
5594
|
-
finally {
|
|
5595
|
-
reader.releaseLock();
|
|
5596
|
-
}
|
|
5597
|
-
return Buffer.concat(chunks, totalLength);
|
|
5598
|
-
}
|
|
5599
|
-
__name(streamToBuffer, "streamToBuffer");
|
|
5600
|
-
var JsonTransport = class {
|
|
5601
|
-
static {
|
|
5602
|
-
__name(this, "JsonTransport");
|
|
5603
|
-
}
|
|
5604
|
-
contentType = "application/json";
|
|
5605
|
-
replacer;
|
|
5606
|
-
reviver;
|
|
5607
|
-
constructor(options = {}) {
|
|
5608
|
-
this.replacer = options.replacer;
|
|
5609
|
-
this.reviver = options.reviver;
|
|
5610
|
-
}
|
|
5611
|
-
serialize(value) {
|
|
5612
|
-
return Buffer.from(JSON.stringify(value, this.replacer), "utf8");
|
|
5613
|
-
}
|
|
5614
|
-
async deserialize(stream) {
|
|
5615
|
-
const buffer = await streamToBuffer(stream);
|
|
5616
|
-
return JSON.parse(buffer.toString("utf8"), this.reviver);
|
|
5617
|
-
}
|
|
5618
|
-
};
|
|
5619
|
-
var devRouteHandlers = /* @__PURE__ */ new Map();
|
|
5620
|
-
var wildcardRouteHandlers = /* @__PURE__ */ new Map();
|
|
5621
|
-
function clearDevHandlers() {
|
|
5622
|
-
devRouteHandlers.clear();
|
|
5623
|
-
wildcardRouteHandlers.clear();
|
|
5166
|
+
var devRouteHandlers = /* @__PURE__ */ new Map();
|
|
5167
|
+
var wildcardRouteHandlers = /* @__PURE__ */ new Map();
|
|
5168
|
+
function clearDevHandlers() {
|
|
5169
|
+
devRouteHandlers.clear();
|
|
5170
|
+
wildcardRouteHandlers.clear();
|
|
5624
5171
|
}
|
|
5625
5172
|
__name(clearDevHandlers, "clearDevHandlers");
|
|
5626
5173
|
if (process.env.NODE_ENV === "test" || process.env.VITEST) {
|
|
@@ -18740,907 +18287,1360 @@ function decodeTime(id) {
|
|
|
18740
18287
|
if (time3 > TIME_MAX) {
|
|
18741
18288
|
throw new ULIDError(ULIDErrorCode.DecodeTimeValueMalformed, `Malformed ULID: timestamp too large: ${time3}`);
|
|
18742
18289
|
}
|
|
18743
|
-
return time3;
|
|
18290
|
+
return time3;
|
|
18291
|
+
}
|
|
18292
|
+
__name(decodeTime, "decodeTime");
|
|
18293
|
+
function detectPRNG(root) {
|
|
18294
|
+
const rootLookup = detectRoot();
|
|
18295
|
+
const globalCrypto = rootLookup && (rootLookup.crypto || rootLookup.msCrypto) || (typeof import_node_crypto.default !== "undefined" ? import_node_crypto.default : null);
|
|
18296
|
+
if (typeof globalCrypto?.getRandomValues === "function") {
|
|
18297
|
+
return () => {
|
|
18298
|
+
const buffer = new Uint8Array(1);
|
|
18299
|
+
globalCrypto.getRandomValues(buffer);
|
|
18300
|
+
return buffer[0] / 255;
|
|
18301
|
+
};
|
|
18302
|
+
}
|
|
18303
|
+
else if (typeof globalCrypto?.randomBytes === "function") {
|
|
18304
|
+
return () => globalCrypto.randomBytes(1).readUInt8() / 255;
|
|
18305
|
+
}
|
|
18306
|
+
else if (import_node_crypto.default?.randomBytes) {
|
|
18307
|
+
return () => import_node_crypto.default.randomBytes(1).readUInt8() / 255;
|
|
18308
|
+
}
|
|
18309
|
+
throw new ULIDError(ULIDErrorCode.PRNGDetectFailure, "Failed to find a reliable PRNG");
|
|
18310
|
+
}
|
|
18311
|
+
__name(detectPRNG, "detectPRNG");
|
|
18312
|
+
function detectRoot() {
|
|
18313
|
+
if (inWebWorker())
|
|
18314
|
+
return self;
|
|
18315
|
+
if (typeof window !== "undefined") {
|
|
18316
|
+
return window;
|
|
18317
|
+
}
|
|
18318
|
+
if (typeof global !== "undefined") {
|
|
18319
|
+
return global;
|
|
18320
|
+
}
|
|
18321
|
+
if (typeof globalThis !== "undefined") {
|
|
18322
|
+
return globalThis;
|
|
18323
|
+
}
|
|
18324
|
+
return null;
|
|
18325
|
+
}
|
|
18326
|
+
__name(detectRoot, "detectRoot");
|
|
18327
|
+
function encodeRandom(len, prng) {
|
|
18328
|
+
let str = "";
|
|
18329
|
+
for (; len > 0; len--) {
|
|
18330
|
+
str = randomChar(prng) + str;
|
|
18331
|
+
}
|
|
18332
|
+
return str;
|
|
18333
|
+
}
|
|
18334
|
+
__name(encodeRandom, "encodeRandom");
|
|
18335
|
+
function encodeTime(now, len = TIME_LEN) {
|
|
18336
|
+
if (isNaN(now)) {
|
|
18337
|
+
throw new ULIDError(ULIDErrorCode.EncodeTimeValueMalformed, `Time must be a number: ${now}`);
|
|
18338
|
+
}
|
|
18339
|
+
else if (now > TIME_MAX) {
|
|
18340
|
+
throw new ULIDError(ULIDErrorCode.EncodeTimeSizeExceeded, `Cannot encode a time larger than ${TIME_MAX}: ${now}`);
|
|
18341
|
+
}
|
|
18342
|
+
else if (now < 0) {
|
|
18343
|
+
throw new ULIDError(ULIDErrorCode.EncodeTimeNegative, `Time must be positive: ${now}`);
|
|
18344
|
+
}
|
|
18345
|
+
else if (Number.isInteger(now) === false) {
|
|
18346
|
+
throw new ULIDError(ULIDErrorCode.EncodeTimeValueMalformed, `Time must be an integer: ${now}`);
|
|
18347
|
+
}
|
|
18348
|
+
let mod, str = "";
|
|
18349
|
+
for (let currentLen = len; currentLen > 0; currentLen--) {
|
|
18350
|
+
mod = now % ENCODING_LEN;
|
|
18351
|
+
str = ENCODING.charAt(mod) + str;
|
|
18352
|
+
now = (now - mod) / ENCODING_LEN;
|
|
18353
|
+
}
|
|
18354
|
+
return str;
|
|
18355
|
+
}
|
|
18356
|
+
__name(encodeTime, "encodeTime");
|
|
18357
|
+
function inWebWorker() {
|
|
18358
|
+
return typeof WorkerGlobalScope !== "undefined" && self instanceof WorkerGlobalScope;
|
|
18359
|
+
}
|
|
18360
|
+
__name(inWebWorker, "inWebWorker");
|
|
18361
|
+
function monotonicFactory(prng) {
|
|
18362
|
+
const currentPRNG = prng || detectPRNG();
|
|
18363
|
+
let lastTime = 0, lastRandom;
|
|
18364
|
+
return /* @__PURE__ */ __name(function _ulid2(seedTime) {
|
|
18365
|
+
const seed = !seedTime || isNaN(seedTime) ? Date.now() : seedTime;
|
|
18366
|
+
if (seed <= lastTime) {
|
|
18367
|
+
const incrementedRandom = lastRandom = incrementBase32(lastRandom);
|
|
18368
|
+
return encodeTime(lastTime, TIME_LEN) + incrementedRandom;
|
|
18369
|
+
}
|
|
18370
|
+
lastTime = seed;
|
|
18371
|
+
const newRandom = lastRandom = encodeRandom(RANDOM_LEN, currentPRNG);
|
|
18372
|
+
return encodeTime(seed, TIME_LEN) + newRandom;
|
|
18373
|
+
}, "_ulid");
|
|
18374
|
+
}
|
|
18375
|
+
__name(monotonicFactory, "monotonicFactory");
|
|
18376
|
+
// ../world-local/dist/queue.js
|
|
18377
|
+
function createQueue(port) {
|
|
18378
|
+
const transport = new JsonTransport();
|
|
18379
|
+
const generateId = monotonicFactory();
|
|
18380
|
+
const inflightMessages = /* @__PURE__ */ new Map();
|
|
18381
|
+
const queue = /* @__PURE__ */ __name(async (queueName, message, opts) => {
|
|
18382
|
+
const cleanup = [];
|
|
18383
|
+
if (opts?.idempotencyKey) {
|
|
18384
|
+
const existing = inflightMessages.get(opts.idempotencyKey);
|
|
18385
|
+
if (existing) {
|
|
18386
|
+
return { messageId: existing };
|
|
18387
|
+
}
|
|
18388
|
+
}
|
|
18389
|
+
const body = transport.serialize(message);
|
|
18390
|
+
let pathname;
|
|
18391
|
+
if (queueName.startsWith("__wkf_step_")) {
|
|
18392
|
+
pathname = `step`;
|
|
18393
|
+
}
|
|
18394
|
+
else if (queueName.startsWith("__wkf_workflow_")) {
|
|
18395
|
+
pathname = `flow`;
|
|
18396
|
+
}
|
|
18397
|
+
else {
|
|
18398
|
+
throw new Error("Unknown queue name prefix");
|
|
18399
|
+
}
|
|
18400
|
+
const messageId = MessageId.parse(`msg_${generateId()}`);
|
|
18401
|
+
if (opts?.idempotencyKey) {
|
|
18402
|
+
const key = opts.idempotencyKey;
|
|
18403
|
+
inflightMessages.set(key, messageId);
|
|
18404
|
+
cleanup.push(() => {
|
|
18405
|
+
inflightMessages.delete(key);
|
|
18406
|
+
});
|
|
18407
|
+
}
|
|
18408
|
+
(async () => {
|
|
18409
|
+
let defaultRetriesLeft = 3;
|
|
18410
|
+
for (let attempt = 0; defaultRetriesLeft > 0; attempt++) {
|
|
18411
|
+
defaultRetriesLeft--;
|
|
18412
|
+
const response = await fetch(`http://localhost:${port}/.well-known/workflow/v1/${pathname}`, {
|
|
18413
|
+
method: "POST",
|
|
18414
|
+
duplex: "half",
|
|
18415
|
+
headers: {
|
|
18416
|
+
"x-vqs-queue-name": queueName,
|
|
18417
|
+
"x-vqs-message-id": messageId,
|
|
18418
|
+
"x-vqs-message-attempt": String(attempt + 1)
|
|
18419
|
+
},
|
|
18420
|
+
body
|
|
18421
|
+
});
|
|
18422
|
+
if (response.ok) {
|
|
18423
|
+
return;
|
|
18424
|
+
}
|
|
18425
|
+
const text = await response.text();
|
|
18426
|
+
if (response.status === 503) {
|
|
18427
|
+
try {
|
|
18428
|
+
const retryIn = Number(JSON.parse(text).retryIn);
|
|
18429
|
+
await (0, import_promises.setTimeout)(retryIn * 1e3);
|
|
18430
|
+
defaultRetriesLeft++;
|
|
18431
|
+
continue;
|
|
18432
|
+
}
|
|
18433
|
+
catch {
|
|
18434
|
+
}
|
|
18435
|
+
}
|
|
18436
|
+
console.error(`[embedded world] Failed to queue message`, {
|
|
18437
|
+
queueName,
|
|
18438
|
+
text,
|
|
18439
|
+
status: response.status,
|
|
18440
|
+
headers: Object.fromEntries(response.headers.entries()),
|
|
18441
|
+
body: body.toString()
|
|
18442
|
+
});
|
|
18443
|
+
}
|
|
18444
|
+
console.error(`[embedded world] Reached max retries of embedded world queue implementation`);
|
|
18445
|
+
})().finally(() => {
|
|
18446
|
+
for (const fn of cleanup) {
|
|
18447
|
+
fn();
|
|
18448
|
+
}
|
|
18449
|
+
});
|
|
18450
|
+
return { messageId };
|
|
18451
|
+
}, "queue");
|
|
18452
|
+
const HeaderParser = zod_default.object({
|
|
18453
|
+
"x-vqs-queue-name": ValidQueueName,
|
|
18454
|
+
"x-vqs-message-id": MessageId,
|
|
18455
|
+
"x-vqs-message-attempt": zod_default.coerce.number()
|
|
18456
|
+
});
|
|
18457
|
+
const createQueueHandler = /* @__PURE__ */ __name((prefix, handler) => {
|
|
18458
|
+
return async (req) => {
|
|
18459
|
+
const headers = HeaderParser.safeParse(Object.fromEntries(req.headers));
|
|
18460
|
+
if (!headers.success || !req.body) {
|
|
18461
|
+
return Response.json({ error: "Missing required headers" }, { status: 400 });
|
|
18462
|
+
}
|
|
18463
|
+
const queueName = headers.data["x-vqs-queue-name"];
|
|
18464
|
+
const messageId = headers.data["x-vqs-message-id"];
|
|
18465
|
+
const attempt = headers.data["x-vqs-message-attempt"];
|
|
18466
|
+
if (!queueName.startsWith(prefix)) {
|
|
18467
|
+
return Response.json({ error: "Unhandled queue" }, { status: 400 });
|
|
18468
|
+
}
|
|
18469
|
+
const body = await new JsonTransport().deserialize(req.body);
|
|
18470
|
+
try {
|
|
18471
|
+
const response = await handler(body, { attempt, queueName, messageId });
|
|
18472
|
+
const retryIn = typeof response === "undefined" ? null : response.timeoutSeconds;
|
|
18473
|
+
if (retryIn) {
|
|
18474
|
+
return Response.json({ retryIn }, { status: 503 });
|
|
18475
|
+
}
|
|
18476
|
+
return Response.json({ ok: true });
|
|
18477
|
+
}
|
|
18478
|
+
catch (error45) {
|
|
18479
|
+
return Response.json(String(error45), { status: 500 });
|
|
18480
|
+
}
|
|
18481
|
+
};
|
|
18482
|
+
}, "createQueueHandler");
|
|
18483
|
+
const getDeploymentId = /* @__PURE__ */ __name(async () => {
|
|
18484
|
+
return "dpl_embedded";
|
|
18485
|
+
}, "getDeploymentId");
|
|
18486
|
+
return { queue, createQueueHandler, getDeploymentId };
|
|
18487
|
+
}
|
|
18488
|
+
__name(createQueue, "createQueue");
|
|
18489
|
+
// ../world-local/dist/storage.js
|
|
18490
|
+
var import_node_path2 = __toESM(require("node:path"), 1);
|
|
18491
|
+
// ../world-local/dist/fs.js
|
|
18492
|
+
var import_node_fs = require("node:fs");
|
|
18493
|
+
var import_node_path = __toESM(require("node:path"), 1);
|
|
18494
|
+
var ulid3 = monotonicFactory(() => Math.random());
|
|
18495
|
+
var Ulid = external_exports.string().ulid();
|
|
18496
|
+
function ulidToDate(maybeUlid) {
|
|
18497
|
+
const ulid4 = Ulid.safeParse(maybeUlid);
|
|
18498
|
+
if (!ulid4.success) {
|
|
18499
|
+
return null;
|
|
18500
|
+
}
|
|
18501
|
+
return new Date(decodeTime(ulid4.data));
|
|
18502
|
+
}
|
|
18503
|
+
__name(ulidToDate, "ulidToDate");
|
|
18504
|
+
async function ensureDir(dirPath) {
|
|
18505
|
+
try {
|
|
18506
|
+
await import_node_fs.promises.mkdir(dirPath, { recursive: true });
|
|
18507
|
+
}
|
|
18508
|
+
catch (_error) {
|
|
18509
|
+
}
|
|
18744
18510
|
}
|
|
18745
|
-
__name(
|
|
18746
|
-
function
|
|
18747
|
-
|
|
18748
|
-
|
|
18749
|
-
|
|
18750
|
-
|
|
18751
|
-
|
|
18752
|
-
|
|
18753
|
-
|
|
18754
|
-
|
|
18511
|
+
__name(ensureDir, "ensureDir");
|
|
18512
|
+
async function writeJSON(filePath, data, opts) {
|
|
18513
|
+
return write(filePath, JSON.stringify(data, null, 2), opts);
|
|
18514
|
+
}
|
|
18515
|
+
__name(writeJSON, "writeJSON");
|
|
18516
|
+
async function write(filePath, data, opts) {
|
|
18517
|
+
if (!opts?.overwrite) {
|
|
18518
|
+
try {
|
|
18519
|
+
await import_node_fs.promises.access(filePath);
|
|
18520
|
+
throw new WorkflowAPIError(`File ${filePath} already exists and 'overwrite' is false`, { status: 409 });
|
|
18521
|
+
}
|
|
18522
|
+
catch (error45) {
|
|
18523
|
+
if (error45.code !== "ENOENT") {
|
|
18524
|
+
throw error45;
|
|
18525
|
+
}
|
|
18526
|
+
}
|
|
18755
18527
|
}
|
|
18756
|
-
|
|
18757
|
-
|
|
18528
|
+
const tempPath = `${filePath}.tmp.${ulid3()}`;
|
|
18529
|
+
try {
|
|
18530
|
+
await ensureDir(import_node_path.default.dirname(filePath));
|
|
18531
|
+
await import_node_fs.promises.writeFile(tempPath, data);
|
|
18532
|
+
await import_node_fs.promises.rename(tempPath, filePath);
|
|
18758
18533
|
}
|
|
18759
|
-
|
|
18760
|
-
|
|
18534
|
+
catch (error45) {
|
|
18535
|
+
await import_node_fs.promises.unlink(tempPath).catch(() => {
|
|
18536
|
+
});
|
|
18537
|
+
throw error45;
|
|
18761
18538
|
}
|
|
18762
|
-
throw new ULIDError(ULIDErrorCode.PRNGDetectFailure, "Failed to find a reliable PRNG");
|
|
18763
18539
|
}
|
|
18764
|
-
__name(
|
|
18765
|
-
function
|
|
18766
|
-
|
|
18767
|
-
|
|
18768
|
-
|
|
18769
|
-
return window;
|
|
18770
|
-
}
|
|
18771
|
-
if (typeof global !== "undefined") {
|
|
18772
|
-
return global;
|
|
18540
|
+
__name(write, "write");
|
|
18541
|
+
async function readJSON(filePath, decoder) {
|
|
18542
|
+
try {
|
|
18543
|
+
const content = await import_node_fs.promises.readFile(filePath, "utf-8");
|
|
18544
|
+
return decoder.parse(JSON.parse(content));
|
|
18773
18545
|
}
|
|
18774
|
-
|
|
18775
|
-
|
|
18546
|
+
catch (error45) {
|
|
18547
|
+
if (error45.code === "ENOENT")
|
|
18548
|
+
return null;
|
|
18549
|
+
throw error45;
|
|
18776
18550
|
}
|
|
18777
|
-
return null;
|
|
18778
18551
|
}
|
|
18779
|
-
__name(
|
|
18780
|
-
function
|
|
18781
|
-
|
|
18782
|
-
|
|
18783
|
-
|
|
18552
|
+
__name(readJSON, "readJSON");
|
|
18553
|
+
async function readBuffer(filePath) {
|
|
18554
|
+
const content = await import_node_fs.promises.readFile(filePath);
|
|
18555
|
+
return content;
|
|
18556
|
+
}
|
|
18557
|
+
__name(readBuffer, "readBuffer");
|
|
18558
|
+
async function deleteJSON(filePath) {
|
|
18559
|
+
try {
|
|
18560
|
+
await import_node_fs.promises.unlink(filePath);
|
|
18561
|
+
}
|
|
18562
|
+
catch (error45) {
|
|
18563
|
+
if (error45.code !== "ENOENT")
|
|
18564
|
+
throw error45;
|
|
18784
18565
|
}
|
|
18785
|
-
return str;
|
|
18786
18566
|
}
|
|
18787
|
-
__name(
|
|
18788
|
-
function
|
|
18789
|
-
|
|
18790
|
-
|
|
18567
|
+
__name(deleteJSON, "deleteJSON");
|
|
18568
|
+
async function listJSONFiles(dirPath) {
|
|
18569
|
+
try {
|
|
18570
|
+
const files = await import_node_fs.promises.readdir(dirPath);
|
|
18571
|
+
return files.filter((f) => f.endsWith(".json")).map((f) => f.replace(".json", ""));
|
|
18791
18572
|
}
|
|
18792
|
-
|
|
18793
|
-
|
|
18573
|
+
catch (error45) {
|
|
18574
|
+
if (error45.code === "ENOENT")
|
|
18575
|
+
return [];
|
|
18576
|
+
throw error45;
|
|
18794
18577
|
}
|
|
18795
|
-
|
|
18796
|
-
|
|
18578
|
+
}
|
|
18579
|
+
__name(listJSONFiles, "listJSONFiles");
|
|
18580
|
+
function parseCursor(cursor) {
|
|
18581
|
+
if (!cursor)
|
|
18582
|
+
return null;
|
|
18583
|
+
const parts = cursor.split("|");
|
|
18584
|
+
return {
|
|
18585
|
+
timestamp: new Date(parts[0]),
|
|
18586
|
+
id: parts[1] || null
|
|
18587
|
+
};
|
|
18588
|
+
}
|
|
18589
|
+
__name(parseCursor, "parseCursor");
|
|
18590
|
+
function createCursor(timestamp, id) {
|
|
18591
|
+
return id ? `${timestamp.toISOString()}|${id}` : timestamp.toISOString();
|
|
18592
|
+
}
|
|
18593
|
+
__name(createCursor, "createCursor");
|
|
18594
|
+
async function paginatedFileSystemQuery(config3) {
|
|
18595
|
+
const { directory, schema, filePrefix, filter, sortOrder = "desc", limit = 20, cursor, getCreatedAt, getId } = config3;
|
|
18596
|
+
const fileIds = await listJSONFiles(directory);
|
|
18597
|
+
const relevantFileIds = filePrefix ? fileIds.filter((fileId) => fileId.startsWith(filePrefix)) : fileIds;
|
|
18598
|
+
const parsedCursor = parseCursor(cursor);
|
|
18599
|
+
let candidateFileIds = relevantFileIds;
|
|
18600
|
+
if (parsedCursor) {
|
|
18601
|
+
candidateFileIds = relevantFileIds.filter((fileId) => {
|
|
18602
|
+
const filenameDate = getCreatedAt(`${fileId}.json`);
|
|
18603
|
+
if (filenameDate) {
|
|
18604
|
+
const cursorTime = parsedCursor.timestamp.getTime();
|
|
18605
|
+
const fileTime = filenameDate.getTime();
|
|
18606
|
+
if (parsedCursor.id) {
|
|
18607
|
+
return sortOrder === "desc" ? fileTime <= cursorTime : fileTime >= cursorTime;
|
|
18608
|
+
}
|
|
18609
|
+
else {
|
|
18610
|
+
return sortOrder === "desc" ? fileTime < cursorTime : fileTime > cursorTime;
|
|
18611
|
+
}
|
|
18612
|
+
}
|
|
18613
|
+
return false;
|
|
18614
|
+
});
|
|
18797
18615
|
}
|
|
18798
|
-
else
|
|
18799
|
-
|
|
18616
|
+
else {
|
|
18617
|
+
candidateFileIds = relevantFileIds.filter((fileId) => {
|
|
18618
|
+
return getCreatedAt(`${fileId}.json`) !== null;
|
|
18619
|
+
});
|
|
18800
18620
|
}
|
|
18801
|
-
|
|
18802
|
-
for (
|
|
18803
|
-
|
|
18804
|
-
|
|
18805
|
-
|
|
18621
|
+
const validItems = [];
|
|
18622
|
+
for (const fileId of candidateFileIds) {
|
|
18623
|
+
const filePath = import_node_path.default.join(directory, `${fileId}.json`);
|
|
18624
|
+
const item = await readJSON(filePath, schema);
|
|
18625
|
+
if (item) {
|
|
18626
|
+
if (filter && !filter(item))
|
|
18627
|
+
continue;
|
|
18628
|
+
if (parsedCursor) {
|
|
18629
|
+
const itemTime = item.createdAt.getTime();
|
|
18630
|
+
const cursorTime = parsedCursor.timestamp.getTime();
|
|
18631
|
+
if (sortOrder === "desc") {
|
|
18632
|
+
if (itemTime > cursorTime)
|
|
18633
|
+
continue;
|
|
18634
|
+
if (itemTime === cursorTime && parsedCursor.id && getId) {
|
|
18635
|
+
const itemId = getId(item);
|
|
18636
|
+
if (itemId >= parsedCursor.id)
|
|
18637
|
+
continue;
|
|
18638
|
+
}
|
|
18639
|
+
}
|
|
18640
|
+
else {
|
|
18641
|
+
if (itemTime < cursorTime)
|
|
18642
|
+
continue;
|
|
18643
|
+
if (itemTime === cursorTime && parsedCursor.id && getId) {
|
|
18644
|
+
const itemId = getId(item);
|
|
18645
|
+
if (itemId <= parsedCursor.id)
|
|
18646
|
+
continue;
|
|
18647
|
+
}
|
|
18648
|
+
}
|
|
18649
|
+
}
|
|
18650
|
+
validItems.push(item);
|
|
18651
|
+
}
|
|
18806
18652
|
}
|
|
18807
|
-
|
|
18653
|
+
validItems.sort((a, b) => {
|
|
18654
|
+
const aTime = a.createdAt.getTime();
|
|
18655
|
+
const bTime = b.createdAt.getTime();
|
|
18656
|
+
const timeComparison = sortOrder === "asc" ? aTime - bTime : bTime - aTime;
|
|
18657
|
+
if (timeComparison === 0 && getId) {
|
|
18658
|
+
const aId = getId(a);
|
|
18659
|
+
const bId = getId(b);
|
|
18660
|
+
return sortOrder === "asc" ? aId.localeCompare(bId) : bId.localeCompare(aId);
|
|
18661
|
+
}
|
|
18662
|
+
return timeComparison;
|
|
18663
|
+
});
|
|
18664
|
+
const hasMore = validItems.length > limit;
|
|
18665
|
+
const items = hasMore ? validItems.slice(0, limit) : validItems;
|
|
18666
|
+
const nextCursor = hasMore && items.length > 0 ? createCursor(items[items.length - 1].createdAt, getId?.(items[items.length - 1])) : null;
|
|
18667
|
+
return {
|
|
18668
|
+
data: items,
|
|
18669
|
+
cursor: nextCursor,
|
|
18670
|
+
hasMore
|
|
18671
|
+
};
|
|
18808
18672
|
}
|
|
18809
|
-
__name(
|
|
18810
|
-
|
|
18811
|
-
|
|
18673
|
+
__name(paginatedFileSystemQuery, "paginatedFileSystemQuery");
|
|
18674
|
+
// ../world-local/dist/storage.js
|
|
18675
|
+
var monotonicUlid = monotonicFactory(() => Math.random());
|
|
18676
|
+
function filterRunData(run, resolveData) {
|
|
18677
|
+
if (resolveData === "none") {
|
|
18678
|
+
return {
|
|
18679
|
+
...run,
|
|
18680
|
+
input: [],
|
|
18681
|
+
output: void 0
|
|
18682
|
+
};
|
|
18683
|
+
}
|
|
18684
|
+
return run;
|
|
18812
18685
|
}
|
|
18813
|
-
__name(
|
|
18814
|
-
function
|
|
18815
|
-
|
|
18816
|
-
|
|
18817
|
-
|
|
18818
|
-
|
|
18819
|
-
|
|
18820
|
-
|
|
18821
|
-
|
|
18822
|
-
|
|
18823
|
-
lastTime = seed;
|
|
18824
|
-
const newRandom = lastRandom = encodeRandom(RANDOM_LEN, currentPRNG);
|
|
18825
|
-
return encodeTime(seed, TIME_LEN) + newRandom;
|
|
18826
|
-
}, "_ulid");
|
|
18686
|
+
__name(filterRunData, "filterRunData");
|
|
18687
|
+
function filterStepData(step, resolveData) {
|
|
18688
|
+
if (resolveData === "none") {
|
|
18689
|
+
return {
|
|
18690
|
+
...step,
|
|
18691
|
+
input: [],
|
|
18692
|
+
output: void 0
|
|
18693
|
+
};
|
|
18694
|
+
}
|
|
18695
|
+
return step;
|
|
18827
18696
|
}
|
|
18828
|
-
__name(
|
|
18829
|
-
|
|
18830
|
-
|
|
18831
|
-
|
|
18832
|
-
|
|
18833
|
-
|
|
18834
|
-
|
|
18835
|
-
|
|
18836
|
-
|
|
18837
|
-
|
|
18838
|
-
|
|
18839
|
-
|
|
18697
|
+
__name(filterStepData, "filterStepData");
|
|
18698
|
+
function filterEventData(event, resolveData) {
|
|
18699
|
+
if (resolveData === "none") {
|
|
18700
|
+
const { eventData: _eventData, ...rest } = event;
|
|
18701
|
+
return rest;
|
|
18702
|
+
}
|
|
18703
|
+
return event;
|
|
18704
|
+
}
|
|
18705
|
+
__name(filterEventData, "filterEventData");
|
|
18706
|
+
function filterHookData(hook, resolveData) {
|
|
18707
|
+
if (resolveData === "none") {
|
|
18708
|
+
const { metadata: _metadata, ...rest } = hook;
|
|
18709
|
+
return rest;
|
|
18710
|
+
}
|
|
18711
|
+
return hook;
|
|
18712
|
+
}
|
|
18713
|
+
__name(filterHookData, "filterHookData");
|
|
18714
|
+
var getObjectCreatedAt = /* @__PURE__ */ __name((idPrefix) => (filename) => {
|
|
18715
|
+
const replaceRegex = new RegExp(`^${idPrefix}_`, "g");
|
|
18716
|
+
const dashIndex = filename.indexOf("-");
|
|
18717
|
+
if (dashIndex === -1) {
|
|
18718
|
+
const ulid5 = filename.replace(/\.json$/, "").replace(replaceRegex, "");
|
|
18719
|
+
return ulidToDate(ulid5);
|
|
18720
|
+
}
|
|
18721
|
+
if (idPrefix === "step") {
|
|
18722
|
+
const runId = filename.substring(0, dashIndex);
|
|
18723
|
+
const ulid5 = runId.replace(/^wrun_/, "");
|
|
18724
|
+
return ulidToDate(ulid5);
|
|
18725
|
+
}
|
|
18726
|
+
const id = filename.substring(dashIndex + 1).replace(/\.json$/, "");
|
|
18727
|
+
const ulid4 = id.replace(replaceRegex, "");
|
|
18728
|
+
return ulidToDate(ulid4);
|
|
18729
|
+
}, "getObjectCreatedAt");
|
|
18730
|
+
function createStorage(basedir) {
|
|
18731
|
+
return {
|
|
18732
|
+
runs: {
|
|
18733
|
+
async create(data) {
|
|
18734
|
+
const runId = `wrun_${monotonicUlid()}`;
|
|
18735
|
+
const now = /* @__PURE__ */ new Date();
|
|
18736
|
+
const result = {
|
|
18737
|
+
runId,
|
|
18738
|
+
deploymentId: data.deploymentId,
|
|
18739
|
+
status: "pending",
|
|
18740
|
+
workflowName: data.workflowName,
|
|
18741
|
+
executionContext: data.executionContext,
|
|
18742
|
+
input: data.input || [],
|
|
18743
|
+
output: void 0,
|
|
18744
|
+
error: void 0,
|
|
18745
|
+
errorCode: void 0,
|
|
18746
|
+
startedAt: void 0,
|
|
18747
|
+
completedAt: void 0,
|
|
18748
|
+
createdAt: now,
|
|
18749
|
+
updatedAt: now
|
|
18750
|
+
};
|
|
18751
|
+
const runPath = import_node_path2.default.join(basedir, "runs", `${runId}.json`);
|
|
18752
|
+
await writeJSON(runPath, result);
|
|
18753
|
+
return result;
|
|
18754
|
+
},
|
|
18755
|
+
async get(id, params) {
|
|
18756
|
+
const runPath = import_node_path2.default.join(basedir, "runs", `${id}.json`);
|
|
18757
|
+
const run = await readJSON(runPath, WorkflowRunSchema);
|
|
18758
|
+
if (!run) {
|
|
18759
|
+
throw new WorkflowRunNotFoundError(id);
|
|
18760
|
+
}
|
|
18761
|
+
const resolveData = params?.resolveData ?? DEFAULT_RESOLVE_DATA_OPTION;
|
|
18762
|
+
return filterRunData(run, resolveData);
|
|
18763
|
+
},
|
|
18764
|
+
async update(id, data) {
|
|
18765
|
+
const runPath = import_node_path2.default.join(basedir, "runs", `${id}.json`);
|
|
18766
|
+
const run = await readJSON(runPath, WorkflowRunSchema);
|
|
18767
|
+
if (!run) {
|
|
18768
|
+
throw new WorkflowRunNotFoundError(id);
|
|
18769
|
+
}
|
|
18770
|
+
const now = /* @__PURE__ */ new Date();
|
|
18771
|
+
const updatedRun = {
|
|
18772
|
+
...run,
|
|
18773
|
+
...data,
|
|
18774
|
+
updatedAt: now
|
|
18775
|
+
};
|
|
18776
|
+
if (data.status === "running" && !updatedRun.startedAt) {
|
|
18777
|
+
updatedRun.startedAt = now;
|
|
18778
|
+
}
|
|
18779
|
+
if (data.status === "completed" || data.status === "failed" || data.status === "cancelled") {
|
|
18780
|
+
updatedRun.completedAt = now;
|
|
18781
|
+
}
|
|
18782
|
+
await writeJSON(runPath, updatedRun, { overwrite: true });
|
|
18783
|
+
return updatedRun;
|
|
18784
|
+
},
|
|
18785
|
+
async list(params) {
|
|
18786
|
+
const resolveData = params?.resolveData ?? DEFAULT_RESOLVE_DATA_OPTION;
|
|
18787
|
+
const result = await paginatedFileSystemQuery({
|
|
18788
|
+
directory: import_node_path2.default.join(basedir, "runs"),
|
|
18789
|
+
schema: WorkflowRunSchema,
|
|
18790
|
+
filter: /* @__PURE__ */ __name((run) => {
|
|
18791
|
+
if (params?.workflowName && run.workflowName !== params.workflowName) {
|
|
18792
|
+
return false;
|
|
18793
|
+
}
|
|
18794
|
+
if (params?.status && run.status !== params.status) {
|
|
18795
|
+
return false;
|
|
18796
|
+
}
|
|
18797
|
+
return true;
|
|
18798
|
+
}, "filter"),
|
|
18799
|
+
sortOrder: params?.pagination?.sortOrder ?? "desc",
|
|
18800
|
+
limit: params?.pagination?.limit,
|
|
18801
|
+
cursor: params?.pagination?.cursor,
|
|
18802
|
+
getCreatedAt: getObjectCreatedAt("wrun"),
|
|
18803
|
+
getId: /* @__PURE__ */ __name((run) => run.runId, "getId")
|
|
18804
|
+
});
|
|
18805
|
+
if (resolveData === "none") {
|
|
18806
|
+
return {
|
|
18807
|
+
...result,
|
|
18808
|
+
data: result.data.map((run) => ({
|
|
18809
|
+
...run,
|
|
18810
|
+
input: [],
|
|
18811
|
+
output: void 0
|
|
18812
|
+
}))
|
|
18813
|
+
};
|
|
18814
|
+
}
|
|
18815
|
+
return result;
|
|
18816
|
+
},
|
|
18817
|
+
async cancel(id, params) {
|
|
18818
|
+
const run = await this.update(id, { status: "cancelled" });
|
|
18819
|
+
const resolveData = params?.resolveData ?? DEFAULT_RESOLVE_DATA_OPTION;
|
|
18820
|
+
return filterRunData(run, resolveData);
|
|
18821
|
+
},
|
|
18822
|
+
async pause(id, params) {
|
|
18823
|
+
const run = await this.update(id, { status: "paused" });
|
|
18824
|
+
const resolveData = params?.resolveData ?? DEFAULT_RESOLVE_DATA_OPTION;
|
|
18825
|
+
return filterRunData(run, resolveData);
|
|
18826
|
+
},
|
|
18827
|
+
async resume(id, params) {
|
|
18828
|
+
const run = await this.update(id, { status: "running" });
|
|
18829
|
+
const resolveData = params?.resolveData ?? DEFAULT_RESOLVE_DATA_OPTION;
|
|
18830
|
+
return filterRunData(run, resolveData);
|
|
18840
18831
|
}
|
|
18841
|
-
}
|
|
18842
|
-
|
|
18843
|
-
|
|
18844
|
-
|
|
18845
|
-
|
|
18846
|
-
|
|
18847
|
-
|
|
18848
|
-
|
|
18849
|
-
|
|
18850
|
-
|
|
18851
|
-
|
|
18852
|
-
|
|
18853
|
-
|
|
18854
|
-
|
|
18855
|
-
|
|
18856
|
-
|
|
18857
|
-
|
|
18858
|
-
|
|
18859
|
-
|
|
18860
|
-
|
|
18861
|
-
|
|
18862
|
-
|
|
18863
|
-
|
|
18864
|
-
|
|
18865
|
-
|
|
18866
|
-
|
|
18867
|
-
|
|
18868
|
-
|
|
18869
|
-
|
|
18870
|
-
|
|
18871
|
-
|
|
18872
|
-
|
|
18873
|
-
|
|
18832
|
+
},
|
|
18833
|
+
steps: {
|
|
18834
|
+
async create(runId, data) {
|
|
18835
|
+
const now = /* @__PURE__ */ new Date();
|
|
18836
|
+
const result = {
|
|
18837
|
+
runId,
|
|
18838
|
+
stepId: data.stepId,
|
|
18839
|
+
stepName: data.stepName,
|
|
18840
|
+
status: "pending",
|
|
18841
|
+
input: data.input,
|
|
18842
|
+
output: void 0,
|
|
18843
|
+
error: void 0,
|
|
18844
|
+
errorCode: void 0,
|
|
18845
|
+
attempt: 0,
|
|
18846
|
+
startedAt: void 0,
|
|
18847
|
+
completedAt: void 0,
|
|
18848
|
+
createdAt: now,
|
|
18849
|
+
updatedAt: now
|
|
18850
|
+
};
|
|
18851
|
+
const compositeKey = `${runId}-${data.stepId}`;
|
|
18852
|
+
const stepPath = import_node_path2.default.join(basedir, "steps", `${compositeKey}.json`);
|
|
18853
|
+
await writeJSON(stepPath, result);
|
|
18854
|
+
return result;
|
|
18855
|
+
},
|
|
18856
|
+
async get(runId, stepId, params) {
|
|
18857
|
+
if (!runId) {
|
|
18858
|
+
const fileIds = await listJSONFiles(import_node_path2.default.join(basedir, "steps"));
|
|
18859
|
+
const fileId = fileIds.find((fileId2) => fileId2.endsWith(`-${stepId}`));
|
|
18860
|
+
if (!fileId) {
|
|
18861
|
+
throw new Error(`Step ${stepId} not found`);
|
|
18862
|
+
}
|
|
18863
|
+
runId = fileId.split("-")[0];
|
|
18864
|
+
}
|
|
18865
|
+
const compositeKey = `${runId}-${stepId}`;
|
|
18866
|
+
const stepPath = import_node_path2.default.join(basedir, "steps", `${compositeKey}.json`);
|
|
18867
|
+
const step = await readJSON(stepPath, StepSchema);
|
|
18868
|
+
if (!step) {
|
|
18869
|
+
throw new Error(`Step ${stepId} in run ${runId} not found`);
|
|
18870
|
+
}
|
|
18871
|
+
const resolveData = params?.resolveData ?? DEFAULT_RESOLVE_DATA_OPTION;
|
|
18872
|
+
return filterStepData(step, resolveData);
|
|
18873
|
+
},
|
|
18874
|
+
async update(runId, stepId, data) {
|
|
18875
|
+
const compositeKey = `${runId}-${stepId}`;
|
|
18876
|
+
const stepPath = import_node_path2.default.join(basedir, "steps", `${compositeKey}.json`);
|
|
18877
|
+
const step = await readJSON(stepPath, StepSchema);
|
|
18878
|
+
if (!step) {
|
|
18879
|
+
throw new Error(`Step ${stepId} in run ${runId} not found`);
|
|
18880
|
+
}
|
|
18881
|
+
const now = /* @__PURE__ */ new Date();
|
|
18882
|
+
const updatedStep = {
|
|
18883
|
+
...step,
|
|
18884
|
+
...data,
|
|
18885
|
+
updatedAt: now
|
|
18886
|
+
};
|
|
18887
|
+
if (data.status === "running" && !updatedStep.startedAt) {
|
|
18888
|
+
updatedStep.startedAt = now;
|
|
18889
|
+
}
|
|
18890
|
+
if (data.status === "completed" || data.status === "failed") {
|
|
18891
|
+
updatedStep.completedAt = now;
|
|
18892
|
+
}
|
|
18893
|
+
await writeJSON(stepPath, updatedStep, { overwrite: true });
|
|
18894
|
+
return updatedStep;
|
|
18895
|
+
},
|
|
18896
|
+
async list(params) {
|
|
18897
|
+
const resolveData = params.resolveData ?? DEFAULT_RESOLVE_DATA_OPTION;
|
|
18898
|
+
const result = await paginatedFileSystemQuery({
|
|
18899
|
+
directory: import_node_path2.default.join(basedir, "steps"),
|
|
18900
|
+
schema: StepSchema,
|
|
18901
|
+
filePrefix: `${params.runId}-`,
|
|
18902
|
+
sortOrder: params.pagination?.sortOrder ?? "desc",
|
|
18903
|
+
limit: params.pagination?.limit,
|
|
18904
|
+
cursor: params.pagination?.cursor,
|
|
18905
|
+
getCreatedAt: getObjectCreatedAt("step"),
|
|
18906
|
+
getId: /* @__PURE__ */ __name((step) => step.stepId, "getId")
|
|
18907
|
+
});
|
|
18908
|
+
if (resolveData === "none") {
|
|
18909
|
+
return {
|
|
18910
|
+
...result,
|
|
18911
|
+
data: result.data.map((step) => ({
|
|
18912
|
+
...step,
|
|
18913
|
+
input: [],
|
|
18914
|
+
output: void 0
|
|
18915
|
+
}))
|
|
18916
|
+
};
|
|
18917
|
+
}
|
|
18918
|
+
return result;
|
|
18919
|
+
}
|
|
18920
|
+
},
|
|
18921
|
+
// Events - filesystem-backed storage
|
|
18922
|
+
events: {
|
|
18923
|
+
async create(runId, data, params) {
|
|
18924
|
+
const eventId = `evnt_${monotonicUlid()}`;
|
|
18925
|
+
const now = /* @__PURE__ */ new Date();
|
|
18926
|
+
const result = {
|
|
18927
|
+
...data,
|
|
18928
|
+
runId,
|
|
18929
|
+
eventId,
|
|
18930
|
+
createdAt: now
|
|
18931
|
+
};
|
|
18932
|
+
const compositeKey = `${runId}-${eventId}`;
|
|
18933
|
+
const eventPath = import_node_path2.default.join(basedir, "events", `${compositeKey}.json`);
|
|
18934
|
+
await writeJSON(eventPath, result);
|
|
18935
|
+
const resolveData = params?.resolveData ?? DEFAULT_RESOLVE_DATA_OPTION;
|
|
18936
|
+
return filterEventData(result, resolveData);
|
|
18937
|
+
},
|
|
18938
|
+
async list(params) {
|
|
18939
|
+
const { runId } = params;
|
|
18940
|
+
const resolveData = params.resolveData ?? DEFAULT_RESOLVE_DATA_OPTION;
|
|
18941
|
+
const result = await paginatedFileSystemQuery({
|
|
18942
|
+
directory: import_node_path2.default.join(basedir, "events"),
|
|
18943
|
+
schema: EventSchema,
|
|
18944
|
+
filePrefix: `${runId}-`,
|
|
18945
|
+
// Events in chronological order (oldest first) by default,
|
|
18946
|
+
// different from the default for other list calls.
|
|
18947
|
+
sortOrder: params.pagination?.sortOrder ?? "asc",
|
|
18948
|
+
limit: params.pagination?.limit,
|
|
18949
|
+
cursor: params.pagination?.cursor,
|
|
18950
|
+
getCreatedAt: getObjectCreatedAt("evnt"),
|
|
18951
|
+
getId: /* @__PURE__ */ __name((event) => event.eventId, "getId")
|
|
18874
18952
|
});
|
|
18875
|
-
if (
|
|
18876
|
-
return
|
|
18877
|
-
|
|
18878
|
-
|
|
18879
|
-
|
|
18880
|
-
|
|
18881
|
-
|
|
18882
|
-
|
|
18883
|
-
defaultRetriesLeft++;
|
|
18884
|
-
continue;
|
|
18885
|
-
}
|
|
18886
|
-
catch {
|
|
18887
|
-
}
|
|
18953
|
+
if (resolveData === "none") {
|
|
18954
|
+
return {
|
|
18955
|
+
...result,
|
|
18956
|
+
data: result.data.map((event) => {
|
|
18957
|
+
const { eventData: _eventData, ...rest } = event;
|
|
18958
|
+
return rest;
|
|
18959
|
+
})
|
|
18960
|
+
};
|
|
18888
18961
|
}
|
|
18889
|
-
|
|
18890
|
-
|
|
18891
|
-
|
|
18892
|
-
|
|
18893
|
-
|
|
18894
|
-
|
|
18962
|
+
return result;
|
|
18963
|
+
},
|
|
18964
|
+
async listByCorrelationId(params) {
|
|
18965
|
+
const correlationId = params.correlationId;
|
|
18966
|
+
const resolveData = params.resolveData ?? DEFAULT_RESOLVE_DATA_OPTION;
|
|
18967
|
+
const result = await paginatedFileSystemQuery({
|
|
18968
|
+
directory: import_node_path2.default.join(basedir, "events"),
|
|
18969
|
+
schema: EventSchema,
|
|
18970
|
+
// No filePrefix - search all events
|
|
18971
|
+
filter: /* @__PURE__ */ __name((event) => event.correlationId === correlationId, "filter"),
|
|
18972
|
+
// Events in chronological order (oldest first) by default,
|
|
18973
|
+
// different from the default for other list calls.
|
|
18974
|
+
sortOrder: params.pagination?.sortOrder ?? "asc",
|
|
18975
|
+
limit: params.pagination?.limit,
|
|
18976
|
+
cursor: params.pagination?.cursor,
|
|
18977
|
+
getCreatedAt: getObjectCreatedAt("evnt"),
|
|
18978
|
+
getId: /* @__PURE__ */ __name((event) => event.eventId, "getId")
|
|
18895
18979
|
});
|
|
18896
|
-
|
|
18897
|
-
|
|
18898
|
-
|
|
18899
|
-
|
|
18900
|
-
|
|
18901
|
-
|
|
18902
|
-
|
|
18903
|
-
|
|
18904
|
-
}, "queue");
|
|
18905
|
-
const HeaderParser = zod_default.object({
|
|
18906
|
-
"x-vqs-queue-name": ValidQueueName,
|
|
18907
|
-
"x-vqs-message-id": MessageId,
|
|
18908
|
-
"x-vqs-message-attempt": zod_default.coerce.number()
|
|
18909
|
-
});
|
|
18910
|
-
const createQueueHandler = /* @__PURE__ */ __name((prefix, handler) => {
|
|
18911
|
-
return async (req) => {
|
|
18912
|
-
const headers = HeaderParser.safeParse(Object.fromEntries(req.headers));
|
|
18913
|
-
if (!headers.success || !req.body) {
|
|
18914
|
-
return Response.json({ error: "Missing required headers" }, { status: 400 });
|
|
18915
|
-
}
|
|
18916
|
-
const queueName = headers.data["x-vqs-queue-name"];
|
|
18917
|
-
const messageId = headers.data["x-vqs-message-id"];
|
|
18918
|
-
const attempt = headers.data["x-vqs-message-attempt"];
|
|
18919
|
-
if (!queueName.startsWith(prefix)) {
|
|
18920
|
-
return Response.json({ error: "Unhandled queue" }, { status: 400 });
|
|
18921
|
-
}
|
|
18922
|
-
const body = await new JsonTransport().deserialize(req.body);
|
|
18923
|
-
try {
|
|
18924
|
-
const response = await handler(body, { attempt, queueName, messageId });
|
|
18925
|
-
const retryIn = typeof response === "undefined" ? null : response.timeoutSeconds;
|
|
18926
|
-
if (retryIn) {
|
|
18927
|
-
return Response.json({ retryIn }, { status: 503 });
|
|
18980
|
+
if (resolveData === "none") {
|
|
18981
|
+
return {
|
|
18982
|
+
...result,
|
|
18983
|
+
data: result.data.map((event) => {
|
|
18984
|
+
const { eventData: _eventData, ...rest } = event;
|
|
18985
|
+
return rest;
|
|
18986
|
+
})
|
|
18987
|
+
};
|
|
18928
18988
|
}
|
|
18929
|
-
return
|
|
18930
|
-
}
|
|
18931
|
-
catch (error45) {
|
|
18932
|
-
return Response.json(String(error45), { status: 500 });
|
|
18933
|
-
}
|
|
18934
|
-
};
|
|
18935
|
-
}, "createQueueHandler");
|
|
18936
|
-
const getDeploymentId = /* @__PURE__ */ __name(async () => {
|
|
18937
|
-
return "dpl_embedded";
|
|
18938
|
-
}, "getDeploymentId");
|
|
18939
|
-
return { queue, createQueueHandler, getDeploymentId };
|
|
18940
|
-
}
|
|
18941
|
-
__name(createQueue, "createQueue");
|
|
18942
|
-
// ../world-local/dist/storage.js
|
|
18943
|
-
var import_node_path2 = __toESM(require("node:path"), 1);
|
|
18944
|
-
// ../world-local/dist/fs.js
|
|
18945
|
-
var import_node_fs = require("node:fs");
|
|
18946
|
-
var import_node_path = __toESM(require("node:path"), 1);
|
|
18947
|
-
var ulid3 = monotonicFactory(() => Math.random());
|
|
18948
|
-
var Ulid = external_exports.string().ulid();
|
|
18949
|
-
function ulidToDate(maybeUlid) {
|
|
18950
|
-
const ulid4 = Ulid.safeParse(maybeUlid);
|
|
18951
|
-
if (!ulid4.success) {
|
|
18952
|
-
return null;
|
|
18953
|
-
}
|
|
18954
|
-
return new Date(decodeTime(ulid4.data));
|
|
18955
|
-
}
|
|
18956
|
-
__name(ulidToDate, "ulidToDate");
|
|
18957
|
-
async function ensureDir(dirPath) {
|
|
18958
|
-
try {
|
|
18959
|
-
await import_node_fs.promises.mkdir(dirPath, { recursive: true });
|
|
18960
|
-
}
|
|
18961
|
-
catch (_error) {
|
|
18962
|
-
}
|
|
18963
|
-
}
|
|
18964
|
-
__name(ensureDir, "ensureDir");
|
|
18965
|
-
async function writeJSON(filePath, data, opts) {
|
|
18966
|
-
return write(filePath, JSON.stringify(data, null, 2), opts);
|
|
18967
|
-
}
|
|
18968
|
-
__name(writeJSON, "writeJSON");
|
|
18969
|
-
async function write(filePath, data, opts) {
|
|
18970
|
-
if (!opts?.overwrite) {
|
|
18971
|
-
try {
|
|
18972
|
-
await import_node_fs.promises.access(filePath);
|
|
18973
|
-
throw new WorkflowAPIError(`File ${filePath} already exists and 'overwrite' is false`, { status: 409 });
|
|
18974
|
-
}
|
|
18975
|
-
catch (error45) {
|
|
18976
|
-
if (error45.code !== "ENOENT") {
|
|
18977
|
-
throw error45;
|
|
18989
|
+
return result;
|
|
18978
18990
|
}
|
|
18979
|
-
}
|
|
18980
|
-
|
|
18981
|
-
|
|
18982
|
-
|
|
18983
|
-
|
|
18984
|
-
|
|
18985
|
-
|
|
18986
|
-
|
|
18987
|
-
|
|
18988
|
-
|
|
18989
|
-
|
|
18990
|
-
|
|
18991
|
-
|
|
18992
|
-
|
|
18993
|
-
|
|
18994
|
-
|
|
18995
|
-
|
|
18996
|
-
|
|
18997
|
-
|
|
18998
|
-
|
|
18999
|
-
|
|
19000
|
-
|
|
19001
|
-
|
|
19002
|
-
|
|
19003
|
-
|
|
19004
|
-
|
|
19005
|
-
|
|
19006
|
-
|
|
19007
|
-
|
|
19008
|
-
|
|
18991
|
+
},
|
|
18992
|
+
// Hooks
|
|
18993
|
+
hooks: {
|
|
18994
|
+
async create(runId, data) {
|
|
18995
|
+
const now = /* @__PURE__ */ new Date();
|
|
18996
|
+
const result = {
|
|
18997
|
+
runId,
|
|
18998
|
+
hookId: data.hookId,
|
|
18999
|
+
token: data.token,
|
|
19000
|
+
metadata: data.metadata,
|
|
19001
|
+
ownerId: "embedded-owner",
|
|
19002
|
+
projectId: "embedded-project",
|
|
19003
|
+
environment: "embedded",
|
|
19004
|
+
createdAt: now
|
|
19005
|
+
};
|
|
19006
|
+
const hookPath = import_node_path2.default.join(basedir, "hooks", `${data.hookId}.json`);
|
|
19007
|
+
await writeJSON(hookPath, result);
|
|
19008
|
+
return result;
|
|
19009
|
+
},
|
|
19010
|
+
async get(hookId, params) {
|
|
19011
|
+
const hookPath = import_node_path2.default.join(basedir, "hooks", `${hookId}.json`);
|
|
19012
|
+
const hook = await readJSON(hookPath, HookSchema);
|
|
19013
|
+
if (!hook) {
|
|
19014
|
+
throw new Error(`Hook ${hookId} not found`);
|
|
19015
|
+
}
|
|
19016
|
+
const resolveData = params?.resolveData || DEFAULT_RESOLVE_DATA_OPTION;
|
|
19017
|
+
return filterHookData(hook, resolveData);
|
|
19018
|
+
},
|
|
19019
|
+
async getByToken(token) {
|
|
19020
|
+
const hooksDir = import_node_path2.default.join(basedir, "hooks");
|
|
19021
|
+
const files = await listJSONFiles(hooksDir);
|
|
19022
|
+
for (const file2 of files) {
|
|
19023
|
+
const hookPath = import_node_path2.default.join(hooksDir, `${file2}.json`);
|
|
19024
|
+
const hook = await readJSON(hookPath, HookSchema);
|
|
19025
|
+
if (hook && hook.token === token) {
|
|
19026
|
+
return hook;
|
|
19027
|
+
}
|
|
19028
|
+
}
|
|
19029
|
+
throw new Error(`Hook with token ${token} not found`);
|
|
19030
|
+
},
|
|
19031
|
+
async list(params) {
|
|
19032
|
+
const hooksDir = import_node_path2.default.join(basedir, "hooks");
|
|
19033
|
+
const resolveData = params.resolveData || DEFAULT_RESOLVE_DATA_OPTION;
|
|
19034
|
+
const result = await paginatedFileSystemQuery({
|
|
19035
|
+
directory: hooksDir,
|
|
19036
|
+
schema: HookSchema,
|
|
19037
|
+
sortOrder: params.pagination?.sortOrder,
|
|
19038
|
+
limit: params.pagination?.limit,
|
|
19039
|
+
cursor: params.pagination?.cursor,
|
|
19040
|
+
filePrefix: void 0,
|
|
19041
|
+
// Hooks don't have ULIDs, so we can't optimize by filename
|
|
19042
|
+
filter: /* @__PURE__ */ __name((hook) => {
|
|
19043
|
+
if (params.runId && hook.runId !== params.runId) {
|
|
19044
|
+
return false;
|
|
19045
|
+
}
|
|
19046
|
+
return true;
|
|
19047
|
+
}, "filter"),
|
|
19048
|
+
getCreatedAt: /* @__PURE__ */ __name(() => {
|
|
19049
|
+
return /* @__PURE__ */ new Date(0);
|
|
19050
|
+
}, "getCreatedAt"),
|
|
19051
|
+
getId: /* @__PURE__ */ __name((hook) => hook.hookId, "getId")
|
|
19052
|
+
});
|
|
19053
|
+
return {
|
|
19054
|
+
...result,
|
|
19055
|
+
data: result.data.map((hook) => filterHookData(hook, resolveData))
|
|
19056
|
+
};
|
|
19057
|
+
},
|
|
19058
|
+
async dispose(hookId) {
|
|
19059
|
+
const hookPath = import_node_path2.default.join(basedir, "hooks", `${hookId}.json`);
|
|
19060
|
+
const hook = await readJSON(hookPath, HookSchema);
|
|
19061
|
+
if (!hook) {
|
|
19062
|
+
throw new Error(`Hook ${hookId} not found`);
|
|
19063
|
+
}
|
|
19064
|
+
await deleteJSON(hookPath);
|
|
19065
|
+
return hook;
|
|
19066
|
+
}
|
|
19067
|
+
}
|
|
19068
|
+
};
|
|
19009
19069
|
}
|
|
19010
|
-
__name(
|
|
19011
|
-
|
|
19012
|
-
|
|
19013
|
-
|
|
19014
|
-
|
|
19015
|
-
|
|
19016
|
-
|
|
19017
|
-
|
|
19018
|
-
}
|
|
19070
|
+
__name(createStorage, "createStorage");
|
|
19071
|
+
// ../world-local/dist/streamer.js
|
|
19072
|
+
var import_node_events = require("node:events");
|
|
19073
|
+
var import_node_path3 = __toESM(require("node:path"), 1);
|
|
19074
|
+
var monotonicUlid2 = monotonicFactory(() => Math.random());
|
|
19075
|
+
function serializeChunk(chunk) {
|
|
19076
|
+
const eofByte = Buffer.from([chunk.eof ? 1 : 0]);
|
|
19077
|
+
return Buffer.concat([eofByte, chunk.chunk]);
|
|
19019
19078
|
}
|
|
19020
|
-
__name(
|
|
19021
|
-
|
|
19022
|
-
|
|
19023
|
-
|
|
19024
|
-
|
|
19025
|
-
}
|
|
19026
|
-
catch (error45) {
|
|
19027
|
-
if (error45.code === "ENOENT")
|
|
19028
|
-
return [];
|
|
19029
|
-
throw error45;
|
|
19030
|
-
}
|
|
19079
|
+
__name(serializeChunk, "serializeChunk");
|
|
19080
|
+
function deserializeChunk(serialized) {
|
|
19081
|
+
const eof = serialized[0] === 1;
|
|
19082
|
+
const chunk = serialized.subarray(1);
|
|
19083
|
+
return { eof, chunk };
|
|
19031
19084
|
}
|
|
19032
|
-
__name(
|
|
19033
|
-
function
|
|
19034
|
-
|
|
19035
|
-
return null;
|
|
19036
|
-
const parts = cursor.split("|");
|
|
19085
|
+
__name(deserializeChunk, "deserializeChunk");
|
|
19086
|
+
function createStreamer(basedir) {
|
|
19087
|
+
const streamEmitter = new import_node_events.EventEmitter();
|
|
19037
19088
|
return {
|
|
19038
|
-
|
|
19039
|
-
|
|
19040
|
-
|
|
19041
|
-
|
|
19042
|
-
__name(parseCursor, "parseCursor");
|
|
19043
|
-
function createCursor(timestamp, id) {
|
|
19044
|
-
return id ? `${timestamp.toISOString()}|${id}` : timestamp.toISOString();
|
|
19045
|
-
}
|
|
19046
|
-
__name(createCursor, "createCursor");
|
|
19047
|
-
async function paginatedFileSystemQuery(config3) {
|
|
19048
|
-
const { directory, schema, filePrefix, filter, sortOrder = "desc", limit = 20, cursor, getCreatedAt, getId } = config3;
|
|
19049
|
-
const fileIds = await listJSONFiles(directory);
|
|
19050
|
-
const relevantFileIds = filePrefix ? fileIds.filter((fileId) => fileId.startsWith(filePrefix)) : fileIds;
|
|
19051
|
-
const parsedCursor = parseCursor(cursor);
|
|
19052
|
-
let candidateFileIds = relevantFileIds;
|
|
19053
|
-
if (parsedCursor) {
|
|
19054
|
-
candidateFileIds = relevantFileIds.filter((fileId) => {
|
|
19055
|
-
const filenameDate = getCreatedAt(`${fileId}.json`);
|
|
19056
|
-
if (filenameDate) {
|
|
19057
|
-
const cursorTime = parsedCursor.timestamp.getTime();
|
|
19058
|
-
const fileTime = filenameDate.getTime();
|
|
19059
|
-
if (parsedCursor.id) {
|
|
19060
|
-
return sortOrder === "desc" ? fileTime <= cursorTime : fileTime >= cursorTime;
|
|
19061
|
-
}
|
|
19062
|
-
else {
|
|
19063
|
-
return sortOrder === "desc" ? fileTime < cursorTime : fileTime > cursorTime;
|
|
19064
|
-
}
|
|
19089
|
+
async writeToStream(name, chunk) {
|
|
19090
|
+
const chunkId = `strm_${monotonicUlid2()}`;
|
|
19091
|
+
if (typeof chunk === "string") {
|
|
19092
|
+
chunk = new TextEncoder().encode(chunk);
|
|
19065
19093
|
}
|
|
19066
|
-
|
|
19067
|
-
|
|
19068
|
-
|
|
19069
|
-
|
|
19070
|
-
|
|
19071
|
-
|
|
19072
|
-
|
|
19073
|
-
|
|
19074
|
-
|
|
19075
|
-
|
|
19076
|
-
|
|
19077
|
-
|
|
19078
|
-
|
|
19079
|
-
|
|
19080
|
-
|
|
19081
|
-
|
|
19082
|
-
|
|
19083
|
-
|
|
19084
|
-
|
|
19085
|
-
|
|
19086
|
-
|
|
19087
|
-
|
|
19088
|
-
|
|
19089
|
-
|
|
19094
|
+
const serialized = serializeChunk({
|
|
19095
|
+
chunk: Buffer.from(chunk),
|
|
19096
|
+
eof: false
|
|
19097
|
+
});
|
|
19098
|
+
const chunkPath = import_node_path3.default.join(basedir, "streams", "chunks", `${name}-${chunkId}.json`);
|
|
19099
|
+
await write(chunkPath, serialized);
|
|
19100
|
+
const chunkData = typeof chunk === "string" ? new TextEncoder().encode(chunk) : chunk instanceof Buffer ? new Uint8Array(chunk) : chunk;
|
|
19101
|
+
streamEmitter.emit(`chunk:${name}`, {
|
|
19102
|
+
streamName: name,
|
|
19103
|
+
chunkData,
|
|
19104
|
+
chunkId
|
|
19105
|
+
});
|
|
19106
|
+
},
|
|
19107
|
+
async closeStream(name) {
|
|
19108
|
+
const chunkId = `strm_${monotonicUlid2()}`;
|
|
19109
|
+
const chunkPath = import_node_path3.default.join(basedir, "streams", "chunks", `${name}-${chunkId}.json`);
|
|
19110
|
+
await write(chunkPath, serializeChunk({ chunk: Buffer.from([]), eof: true }));
|
|
19111
|
+
streamEmitter.emit(`close:${name}`, { streamName: name });
|
|
19112
|
+
},
|
|
19113
|
+
async readFromStream(name, startIndex = 0) {
|
|
19114
|
+
const chunksDir = import_node_path3.default.join(basedir, "streams", "chunks");
|
|
19115
|
+
let removeListeners = /* @__PURE__ */ __name(() => {
|
|
19116
|
+
}, "removeListeners");
|
|
19117
|
+
return new ReadableStream({
|
|
19118
|
+
async start(controller) {
|
|
19119
|
+
const deliveredChunkIds = /* @__PURE__ */ new Set();
|
|
19120
|
+
const bufferedEventChunks = [];
|
|
19121
|
+
let isReadingFromDisk = true;
|
|
19122
|
+
const chunkListener = /* @__PURE__ */ __name((event) => {
|
|
19123
|
+
deliveredChunkIds.add(event.chunkId);
|
|
19124
|
+
if (isReadingFromDisk) {
|
|
19125
|
+
bufferedEventChunks.push({
|
|
19126
|
+
chunkId: event.chunkId,
|
|
19127
|
+
chunkData: event.chunkData
|
|
19128
|
+
});
|
|
19129
|
+
}
|
|
19130
|
+
else {
|
|
19131
|
+
controller.enqueue(event.chunkData);
|
|
19132
|
+
}
|
|
19133
|
+
}, "chunkListener");
|
|
19134
|
+
const closeListener = /* @__PURE__ */ __name(() => {
|
|
19135
|
+
streamEmitter.off(`chunk:${name}`, chunkListener);
|
|
19136
|
+
streamEmitter.off(`close:${name}`, closeListener);
|
|
19137
|
+
controller.close();
|
|
19138
|
+
}, "closeListener");
|
|
19139
|
+
removeListeners = closeListener;
|
|
19140
|
+
streamEmitter.on(`chunk:${name}`, chunkListener);
|
|
19141
|
+
streamEmitter.on(`close:${name}`, closeListener);
|
|
19142
|
+
const files = await listJSONFiles(chunksDir);
|
|
19143
|
+
const chunkFiles = files.filter((file2) => file2.startsWith(`${name}-`)).sort();
|
|
19144
|
+
let isComplete = false;
|
|
19145
|
+
for (let i = startIndex; i < chunkFiles.length; i++) {
|
|
19146
|
+
const file2 = chunkFiles[i];
|
|
19147
|
+
const chunkId = file2.substring(name.length + 1);
|
|
19148
|
+
if (deliveredChunkIds.has(chunkId)) {
|
|
19090
19149
|
continue;
|
|
19150
|
+
}
|
|
19151
|
+
const chunk = deserializeChunk(await readBuffer(import_node_path3.default.join(chunksDir, `${file2}.json`)));
|
|
19152
|
+
if (chunk?.eof === true) {
|
|
19153
|
+
isComplete = true;
|
|
19154
|
+
break;
|
|
19155
|
+
}
|
|
19156
|
+
if (chunk.chunk.byteLength) {
|
|
19157
|
+
controller.enqueue(chunk.chunk);
|
|
19158
|
+
}
|
|
19091
19159
|
}
|
|
19092
|
-
|
|
19093
|
-
|
|
19094
|
-
|
|
19095
|
-
|
|
19096
|
-
|
|
19097
|
-
|
|
19098
|
-
|
|
19099
|
-
|
|
19160
|
+
isReadingFromDisk = false;
|
|
19161
|
+
bufferedEventChunks.sort((a, b) => a.chunkId.localeCompare(b.chunkId));
|
|
19162
|
+
for (const buffered of bufferedEventChunks) {
|
|
19163
|
+
controller.enqueue(buffered.chunkData);
|
|
19164
|
+
}
|
|
19165
|
+
if (isComplete) {
|
|
19166
|
+
removeListeners();
|
|
19167
|
+
controller.close();
|
|
19168
|
+
return;
|
|
19100
19169
|
}
|
|
19170
|
+
},
|
|
19171
|
+
cancel() {
|
|
19172
|
+
removeListeners();
|
|
19101
19173
|
}
|
|
19102
|
-
}
|
|
19103
|
-
validItems.push(item);
|
|
19104
|
-
}
|
|
19105
|
-
}
|
|
19106
|
-
validItems.sort((a, b) => {
|
|
19107
|
-
const aTime = a.createdAt.getTime();
|
|
19108
|
-
const bTime = b.createdAt.getTime();
|
|
19109
|
-
const timeComparison = sortOrder === "asc" ? aTime - bTime : bTime - aTime;
|
|
19110
|
-
if (timeComparison === 0 && getId) {
|
|
19111
|
-
const aId = getId(a);
|
|
19112
|
-
const bId = getId(b);
|
|
19113
|
-
return sortOrder === "asc" ? aId.localeCompare(bId) : bId.localeCompare(aId);
|
|
19174
|
+
});
|
|
19114
19175
|
}
|
|
19115
|
-
|
|
19116
|
-
|
|
19117
|
-
|
|
19118
|
-
|
|
19119
|
-
|
|
19176
|
+
};
|
|
19177
|
+
}
|
|
19178
|
+
__name(createStreamer, "createStreamer");
|
|
19179
|
+
// ../world-local/dist/index.js
|
|
19180
|
+
function createEmbeddedWorld({ dataDir, port }) {
|
|
19181
|
+
const dir = dataDir ?? config.value.dataDir;
|
|
19182
|
+
const queuePort = port ?? config.value.port;
|
|
19120
19183
|
return {
|
|
19121
|
-
|
|
19122
|
-
|
|
19123
|
-
|
|
19184
|
+
...createQueue(queuePort),
|
|
19185
|
+
...createStorage(dir),
|
|
19186
|
+
...createStreamer(dir)
|
|
19187
|
+
};
|
|
19188
|
+
}
|
|
19189
|
+
__name(createEmbeddedWorld, "createEmbeddedWorld");
|
|
19190
|
+
// ../../node_modules/.pnpm/mixpart@0.0.5-alpha.1/node_modules/mixpart/dist/index.mjs
|
|
19191
|
+
var MultipartParseError = class extends Error {
|
|
19192
|
+
static {
|
|
19193
|
+
__name(this, "MultipartParseError");
|
|
19194
|
+
}
|
|
19195
|
+
constructor(message) {
|
|
19196
|
+
super(message);
|
|
19197
|
+
this.name = "MultipartParseError";
|
|
19198
|
+
}
|
|
19199
|
+
};
|
|
19200
|
+
function createSearch(pattern) {
|
|
19201
|
+
const needle = new TextEncoder().encode(pattern);
|
|
19202
|
+
return (haystack, start2 = 0) => Buffer.prototype.indexOf.call(haystack, needle, start2);
|
|
19203
|
+
}
|
|
19204
|
+
__name(createSearch, "createSearch");
|
|
19205
|
+
function createPartialTailSearch(pattern) {
|
|
19206
|
+
const needle = new TextEncoder().encode(pattern);
|
|
19207
|
+
const byteIndexes = {};
|
|
19208
|
+
for (let i = 0; i < needle.length; ++i) {
|
|
19209
|
+
const byte = needle[i];
|
|
19210
|
+
if (byteIndexes[byte] === void 0)
|
|
19211
|
+
byteIndexes[byte] = [];
|
|
19212
|
+
byteIndexes[byte].push(i);
|
|
19213
|
+
}
|
|
19214
|
+
return function (haystack) {
|
|
19215
|
+
const haystackEnd = haystack.length - 1;
|
|
19216
|
+
if (haystack[haystackEnd] in byteIndexes) {
|
|
19217
|
+
const indexes = byteIndexes[haystack[haystackEnd]];
|
|
19218
|
+
for (let i = indexes.length - 1; i >= 0; --i) {
|
|
19219
|
+
for (let j = indexes[i], k = haystackEnd; j >= 0 && haystack[k] === needle[j]; --j, --k) {
|
|
19220
|
+
if (j === 0)
|
|
19221
|
+
return k;
|
|
19222
|
+
}
|
|
19223
|
+
}
|
|
19224
|
+
}
|
|
19225
|
+
return -1;
|
|
19124
19226
|
};
|
|
19125
19227
|
}
|
|
19126
|
-
__name(
|
|
19127
|
-
|
|
19128
|
-
|
|
19129
|
-
|
|
19130
|
-
|
|
19131
|
-
|
|
19132
|
-
|
|
19133
|
-
|
|
19134
|
-
|
|
19135
|
-
|
|
19228
|
+
__name(createPartialTailSearch, "createPartialTailSearch");
|
|
19229
|
+
function parseHeaders(headerBytes) {
|
|
19230
|
+
const headerText = new TextDecoder("iso-8859-1").decode(headerBytes);
|
|
19231
|
+
const lines = headerText.trim().split(/\r?\n/);
|
|
19232
|
+
const headerInit = [];
|
|
19233
|
+
for (const line of lines) {
|
|
19234
|
+
const colonIndex = line.indexOf(":");
|
|
19235
|
+
if (colonIndex > 0) {
|
|
19236
|
+
const name = line.slice(0, colonIndex).trim();
|
|
19237
|
+
const value = line.slice(colonIndex + 1).trim();
|
|
19238
|
+
headerInit.push([name, value]);
|
|
19239
|
+
}
|
|
19136
19240
|
}
|
|
19137
|
-
return
|
|
19241
|
+
return new Headers(headerInit);
|
|
19138
19242
|
}
|
|
19139
|
-
__name(
|
|
19140
|
-
function
|
|
19141
|
-
|
|
19142
|
-
|
|
19143
|
-
|
|
19144
|
-
input: [],
|
|
19145
|
-
output: void 0
|
|
19146
|
-
};
|
|
19243
|
+
__name(parseHeaders, "parseHeaders");
|
|
19244
|
+
function extractBoundary(contentType) {
|
|
19245
|
+
const boundaryMatch = contentType.match(/boundary=(?:"([^"]+)"|([^;]+))/i);
|
|
19246
|
+
if (!boundaryMatch) {
|
|
19247
|
+
throw new MultipartParseError("No boundary found in Content-Type header");
|
|
19147
19248
|
}
|
|
19148
|
-
return
|
|
19249
|
+
return boundaryMatch[1] ?? boundaryMatch[2];
|
|
19149
19250
|
}
|
|
19150
|
-
__name(
|
|
19151
|
-
|
|
19152
|
-
|
|
19153
|
-
|
|
19154
|
-
return rest;
|
|
19251
|
+
__name(extractBoundary, "extractBoundary");
|
|
19252
|
+
var AsyncMessageQueue = class {
|
|
19253
|
+
static {
|
|
19254
|
+
__name(this, "AsyncMessageQueue");
|
|
19155
19255
|
}
|
|
19156
|
-
|
|
19157
|
-
|
|
19158
|
-
|
|
19159
|
-
|
|
19160
|
-
|
|
19161
|
-
|
|
19162
|
-
|
|
19256
|
+
queue = [];
|
|
19257
|
+
waiters = [];
|
|
19258
|
+
finished = false;
|
|
19259
|
+
cancelled = false;
|
|
19260
|
+
error = null;
|
|
19261
|
+
/**
|
|
19262
|
+
* Producer: Enqueue a message for consumption
|
|
19263
|
+
*/
|
|
19264
|
+
enqueue(message) {
|
|
19265
|
+
if (this.finished || this.cancelled)
|
|
19266
|
+
return;
|
|
19267
|
+
if (this.waiters.length > 0) {
|
|
19268
|
+
const waiter = this.waiters.shift();
|
|
19269
|
+
waiter.resolve(message);
|
|
19270
|
+
}
|
|
19271
|
+
else {
|
|
19272
|
+
this.queue.push(message);
|
|
19273
|
+
}
|
|
19163
19274
|
}
|
|
19164
|
-
|
|
19165
|
-
|
|
19166
|
-
|
|
19167
|
-
|
|
19168
|
-
|
|
19169
|
-
|
|
19170
|
-
|
|
19171
|
-
|
|
19172
|
-
|
|
19275
|
+
/**
|
|
19276
|
+
* Producer: Signal completion (with optional error)
|
|
19277
|
+
*/
|
|
19278
|
+
finish(error45) {
|
|
19279
|
+
if (this.finished)
|
|
19280
|
+
return;
|
|
19281
|
+
this.finished = true;
|
|
19282
|
+
this.error = error45 || null;
|
|
19283
|
+
while (this.waiters.length > 0) {
|
|
19284
|
+
const waiter = this.waiters.shift();
|
|
19285
|
+
if (error45) {
|
|
19286
|
+
waiter.reject(error45);
|
|
19287
|
+
}
|
|
19288
|
+
else {
|
|
19289
|
+
waiter.resolve(null);
|
|
19290
|
+
}
|
|
19291
|
+
}
|
|
19173
19292
|
}
|
|
19174
|
-
|
|
19175
|
-
|
|
19176
|
-
|
|
19177
|
-
|
|
19293
|
+
/**
|
|
19294
|
+
* Consumer: Cancel the queue (stops accepting new messages and notifies waiters)
|
|
19295
|
+
*/
|
|
19296
|
+
cancel() {
|
|
19297
|
+
if (this.cancelled || this.finished)
|
|
19298
|
+
return;
|
|
19299
|
+
this.cancelled = true;
|
|
19300
|
+
while (this.waiters.length > 0) {
|
|
19301
|
+
const waiter = this.waiters.shift();
|
|
19302
|
+
waiter.resolve(null);
|
|
19303
|
+
}
|
|
19178
19304
|
}
|
|
19179
|
-
|
|
19180
|
-
|
|
19181
|
-
|
|
19182
|
-
|
|
19183
|
-
|
|
19184
|
-
|
|
19185
|
-
|
|
19186
|
-
|
|
19187
|
-
|
|
19188
|
-
|
|
19189
|
-
|
|
19190
|
-
|
|
19191
|
-
|
|
19192
|
-
|
|
19193
|
-
|
|
19194
|
-
|
|
19195
|
-
|
|
19196
|
-
|
|
19197
|
-
|
|
19198
|
-
|
|
19199
|
-
|
|
19200
|
-
|
|
19201
|
-
|
|
19202
|
-
|
|
19203
|
-
|
|
19204
|
-
|
|
19205
|
-
|
|
19206
|
-
|
|
19207
|
-
|
|
19208
|
-
|
|
19209
|
-
|
|
19210
|
-
|
|
19211
|
-
|
|
19212
|
-
|
|
19213
|
-
|
|
19214
|
-
|
|
19215
|
-
|
|
19216
|
-
|
|
19217
|
-
|
|
19218
|
-
|
|
19219
|
-
|
|
19220
|
-
|
|
19221
|
-
|
|
19222
|
-
|
|
19223
|
-
|
|
19224
|
-
|
|
19225
|
-
|
|
19226
|
-
|
|
19227
|
-
|
|
19228
|
-
|
|
19229
|
-
|
|
19230
|
-
|
|
19231
|
-
|
|
19232
|
-
|
|
19233
|
-
|
|
19234
|
-
|
|
19235
|
-
|
|
19236
|
-
|
|
19237
|
-
|
|
19238
|
-
|
|
19239
|
-
|
|
19240
|
-
|
|
19241
|
-
|
|
19242
|
-
|
|
19243
|
-
|
|
19244
|
-
|
|
19245
|
-
|
|
19246
|
-
|
|
19247
|
-
|
|
19248
|
-
|
|
19249
|
-
|
|
19250
|
-
|
|
19251
|
-
|
|
19252
|
-
|
|
19253
|
-
|
|
19254
|
-
|
|
19255
|
-
|
|
19256
|
-
|
|
19257
|
-
|
|
19258
|
-
if (resolveData === "none") {
|
|
19259
|
-
return {
|
|
19260
|
-
...result,
|
|
19261
|
-
data: result.data.map((run) => ({
|
|
19262
|
-
...run,
|
|
19263
|
-
input: [],
|
|
19264
|
-
output: void 0
|
|
19265
|
-
}))
|
|
19266
|
-
};
|
|
19267
|
-
}
|
|
19268
|
-
return result;
|
|
19269
|
-
},
|
|
19270
|
-
async cancel(id, params) {
|
|
19271
|
-
const run = await this.update(id, { status: "cancelled" });
|
|
19272
|
-
const resolveData = params?.resolveData ?? DEFAULT_RESOLVE_DATA_OPTION;
|
|
19273
|
-
return filterRunData(run, resolveData);
|
|
19274
|
-
},
|
|
19275
|
-
async pause(id, params) {
|
|
19276
|
-
const run = await this.update(id, { status: "paused" });
|
|
19277
|
-
const resolveData = params?.resolveData ?? DEFAULT_RESOLVE_DATA_OPTION;
|
|
19278
|
-
return filterRunData(run, resolveData);
|
|
19279
|
-
},
|
|
19280
|
-
async resume(id, params) {
|
|
19281
|
-
const run = await this.update(id, { status: "running" });
|
|
19282
|
-
const resolveData = params?.resolveData ?? DEFAULT_RESOLVE_DATA_OPTION;
|
|
19283
|
-
return filterRunData(run, resolveData);
|
|
19305
|
+
/**
|
|
19306
|
+
* Consumer: Dequeue next message (or null if finished/cancelled)
|
|
19307
|
+
*/
|
|
19308
|
+
async dequeue() {
|
|
19309
|
+
if (this.queue.length > 0) {
|
|
19310
|
+
return this.queue.shift();
|
|
19311
|
+
}
|
|
19312
|
+
if (this.finished || this.cancelled) {
|
|
19313
|
+
if (this.error)
|
|
19314
|
+
throw this.error;
|
|
19315
|
+
return null;
|
|
19316
|
+
}
|
|
19317
|
+
return new Promise((resolve, reject) => {
|
|
19318
|
+
this.waiters.push({ resolve, reject });
|
|
19319
|
+
});
|
|
19320
|
+
}
|
|
19321
|
+
/**
|
|
19322
|
+
* Check if the queue is in a terminal state
|
|
19323
|
+
*/
|
|
19324
|
+
get isTerminal() {
|
|
19325
|
+
return this.finished || this.cancelled;
|
|
19326
|
+
}
|
|
19327
|
+
};
|
|
19328
|
+
async function* parseMultipartStream2(response, options) {
|
|
19329
|
+
if (!response.body) {
|
|
19330
|
+
throw new MultipartParseError("Response body is null");
|
|
19331
|
+
}
|
|
19332
|
+
const contentType = response.headers.get("content-type");
|
|
19333
|
+
if (!contentType) {
|
|
19334
|
+
throw new MultipartParseError("Missing Content-Type header");
|
|
19335
|
+
}
|
|
19336
|
+
const boundary = extractBoundary(contentType);
|
|
19337
|
+
const parser = new StreamingMultipartParser(boundary, options);
|
|
19338
|
+
yield* parser.parseStream(response.body);
|
|
19339
|
+
}
|
|
19340
|
+
__name(parseMultipartStream2, "parseMultipartStream");
|
|
19341
|
+
var StreamingMultipartParser = class {
|
|
19342
|
+
static {
|
|
19343
|
+
__name(this, "StreamingMultipartParser");
|
|
19344
|
+
}
|
|
19345
|
+
boundary;
|
|
19346
|
+
findOpeningBoundary;
|
|
19347
|
+
openingBoundaryLength;
|
|
19348
|
+
findBoundary;
|
|
19349
|
+
findPartialTailBoundary;
|
|
19350
|
+
boundaryLength;
|
|
19351
|
+
findDoubleNewline;
|
|
19352
|
+
// Safety limits
|
|
19353
|
+
maxHeaderSize;
|
|
19354
|
+
maxBoundaryBuffer;
|
|
19355
|
+
state = 0;
|
|
19356
|
+
buffer = null;
|
|
19357
|
+
currentHeaders = new Headers();
|
|
19358
|
+
currentPayloadController = null;
|
|
19359
|
+
constructor(boundary, options = {}) {
|
|
19360
|
+
this.boundary = boundary;
|
|
19361
|
+
this.findOpeningBoundary = createSearch(`--${boundary}`);
|
|
19362
|
+
this.openingBoundaryLength = 2 + boundary.length;
|
|
19363
|
+
this.findBoundary = createSearch(`\r
|
|
19364
|
+
--${boundary}`);
|
|
19365
|
+
this.findPartialTailBoundary = createPartialTailSearch(`\r
|
|
19366
|
+
--${boundary}`);
|
|
19367
|
+
this.boundaryLength = 4 + boundary.length;
|
|
19368
|
+
this.findDoubleNewline = createSearch("\r\n\r\n");
|
|
19369
|
+
this.maxHeaderSize = options.maxHeaderSize ?? 65536;
|
|
19370
|
+
this.maxBoundaryBuffer = options.maxBoundaryBuffer ?? 8192;
|
|
19371
|
+
}
|
|
19372
|
+
async *parseStream(stream) {
|
|
19373
|
+
const reader = stream.getReader();
|
|
19374
|
+
const messageQueue = new AsyncMessageQueue();
|
|
19375
|
+
const producer = this.startProducer(reader, messageQueue);
|
|
19376
|
+
try {
|
|
19377
|
+
yield* this.consumeMessages(messageQueue);
|
|
19378
|
+
}
|
|
19379
|
+
finally {
|
|
19380
|
+
messageQueue.cancel();
|
|
19381
|
+
this.closeCurrentPayload();
|
|
19382
|
+
try {
|
|
19383
|
+
await reader.cancel();
|
|
19284
19384
|
}
|
|
19285
|
-
|
|
19286
|
-
steps: {
|
|
19287
|
-
async create(runId, data) {
|
|
19288
|
-
const now = /* @__PURE__ */ new Date();
|
|
19289
|
-
const result = {
|
|
19290
|
-
runId,
|
|
19291
|
-
stepId: data.stepId,
|
|
19292
|
-
stepName: data.stepName,
|
|
19293
|
-
status: "pending",
|
|
19294
|
-
input: data.input,
|
|
19295
|
-
output: void 0,
|
|
19296
|
-
error: void 0,
|
|
19297
|
-
errorCode: void 0,
|
|
19298
|
-
attempt: 0,
|
|
19299
|
-
startedAt: void 0,
|
|
19300
|
-
completedAt: void 0,
|
|
19301
|
-
createdAt: now,
|
|
19302
|
-
updatedAt: now
|
|
19303
|
-
};
|
|
19304
|
-
const compositeKey = `${runId}-${data.stepId}`;
|
|
19305
|
-
const stepPath = import_node_path2.default.join(basedir, "steps", `${compositeKey}.json`);
|
|
19306
|
-
await writeJSON(stepPath, result);
|
|
19307
|
-
return result;
|
|
19308
|
-
},
|
|
19309
|
-
async get(runId, stepId, params) {
|
|
19310
|
-
if (!runId) {
|
|
19311
|
-
const fileIds = await listJSONFiles(import_node_path2.default.join(basedir, "steps"));
|
|
19312
|
-
const fileId = fileIds.find((fileId2) => fileId2.endsWith(`-${stepId}`));
|
|
19313
|
-
if (!fileId) {
|
|
19314
|
-
throw new Error(`Step ${stepId} not found`);
|
|
19315
|
-
}
|
|
19316
|
-
runId = fileId.split("-")[0];
|
|
19317
|
-
}
|
|
19318
|
-
const compositeKey = `${runId}-${stepId}`;
|
|
19319
|
-
const stepPath = import_node_path2.default.join(basedir, "steps", `${compositeKey}.json`);
|
|
19320
|
-
const step = await readJSON(stepPath, StepSchema);
|
|
19321
|
-
if (!step) {
|
|
19322
|
-
throw new Error(`Step ${stepId} in run ${runId} not found`);
|
|
19323
|
-
}
|
|
19324
|
-
const resolveData = params?.resolveData ?? DEFAULT_RESOLVE_DATA_OPTION;
|
|
19325
|
-
return filterStepData(step, resolveData);
|
|
19326
|
-
},
|
|
19327
|
-
async update(runId, stepId, data) {
|
|
19328
|
-
const compositeKey = `${runId}-${stepId}`;
|
|
19329
|
-
const stepPath = import_node_path2.default.join(basedir, "steps", `${compositeKey}.json`);
|
|
19330
|
-
const step = await readJSON(stepPath, StepSchema);
|
|
19331
|
-
if (!step) {
|
|
19332
|
-
throw new Error(`Step ${stepId} in run ${runId} not found`);
|
|
19333
|
-
}
|
|
19334
|
-
const now = /* @__PURE__ */ new Date();
|
|
19335
|
-
const updatedStep = {
|
|
19336
|
-
...step,
|
|
19337
|
-
...data,
|
|
19338
|
-
updatedAt: now
|
|
19339
|
-
};
|
|
19340
|
-
if (data.status === "running" && !updatedStep.startedAt) {
|
|
19341
|
-
updatedStep.startedAt = now;
|
|
19342
|
-
}
|
|
19343
|
-
if (data.status === "completed" || data.status === "failed") {
|
|
19344
|
-
updatedStep.completedAt = now;
|
|
19345
|
-
}
|
|
19346
|
-
await writeJSON(stepPath, updatedStep, { overwrite: true });
|
|
19347
|
-
return updatedStep;
|
|
19348
|
-
},
|
|
19349
|
-
async list(params) {
|
|
19350
|
-
const resolveData = params.resolveData ?? DEFAULT_RESOLVE_DATA_OPTION;
|
|
19351
|
-
const result = await paginatedFileSystemQuery({
|
|
19352
|
-
directory: import_node_path2.default.join(basedir, "steps"),
|
|
19353
|
-
schema: StepSchema,
|
|
19354
|
-
filePrefix: `${params.runId}-`,
|
|
19355
|
-
sortOrder: params.pagination?.sortOrder ?? "desc",
|
|
19356
|
-
limit: params.pagination?.limit,
|
|
19357
|
-
cursor: params.pagination?.cursor,
|
|
19358
|
-
getCreatedAt: getObjectCreatedAt("step"),
|
|
19359
|
-
getId: /* @__PURE__ */ __name((step) => step.stepId, "getId")
|
|
19360
|
-
});
|
|
19361
|
-
if (resolveData === "none") {
|
|
19362
|
-
return {
|
|
19363
|
-
...result,
|
|
19364
|
-
data: result.data.map((step) => ({
|
|
19365
|
-
...step,
|
|
19366
|
-
input: [],
|
|
19367
|
-
output: void 0
|
|
19368
|
-
}))
|
|
19369
|
-
};
|
|
19370
|
-
}
|
|
19371
|
-
return result;
|
|
19385
|
+
catch (error45) {
|
|
19372
19386
|
}
|
|
19373
|
-
|
|
19374
|
-
|
|
19375
|
-
|
|
19376
|
-
|
|
19377
|
-
|
|
19378
|
-
|
|
19379
|
-
|
|
19380
|
-
|
|
19381
|
-
|
|
19382
|
-
|
|
19383
|
-
|
|
19384
|
-
|
|
19385
|
-
const compositeKey = `${runId}-${eventId}`;
|
|
19386
|
-
const eventPath = import_node_path2.default.join(basedir, "events", `${compositeKey}.json`);
|
|
19387
|
-
await writeJSON(eventPath, result);
|
|
19388
|
-
const resolveData = params?.resolveData ?? DEFAULT_RESOLVE_DATA_OPTION;
|
|
19389
|
-
return filterEventData(result, resolveData);
|
|
19390
|
-
},
|
|
19391
|
-
async list(params) {
|
|
19392
|
-
const { runId } = params;
|
|
19393
|
-
const resolveData = params.resolveData ?? DEFAULT_RESOLVE_DATA_OPTION;
|
|
19394
|
-
const result = await paginatedFileSystemQuery({
|
|
19395
|
-
directory: import_node_path2.default.join(basedir, "events"),
|
|
19396
|
-
schema: EventSchema,
|
|
19397
|
-
filePrefix: `${runId}-`,
|
|
19398
|
-
// Events in chronological order (oldest first) by default,
|
|
19399
|
-
// different from the default for other list calls.
|
|
19400
|
-
sortOrder: params.pagination?.sortOrder ?? "asc",
|
|
19401
|
-
limit: params.pagination?.limit,
|
|
19402
|
-
cursor: params.pagination?.cursor,
|
|
19403
|
-
getCreatedAt: getObjectCreatedAt("evnt"),
|
|
19404
|
-
getId: /* @__PURE__ */ __name((event) => event.eventId, "getId")
|
|
19405
|
-
});
|
|
19406
|
-
if (resolveData === "none") {
|
|
19407
|
-
return {
|
|
19408
|
-
...result,
|
|
19409
|
-
data: result.data.map((event) => {
|
|
19410
|
-
const { eventData: _eventData, ...rest } = event;
|
|
19411
|
-
return rest;
|
|
19412
|
-
})
|
|
19413
|
-
};
|
|
19414
|
-
}
|
|
19415
|
-
return result;
|
|
19416
|
-
},
|
|
19417
|
-
async listByCorrelationId(params) {
|
|
19418
|
-
const correlationId = params.correlationId;
|
|
19419
|
-
const resolveData = params.resolveData ?? DEFAULT_RESOLVE_DATA_OPTION;
|
|
19420
|
-
const result = await paginatedFileSystemQuery({
|
|
19421
|
-
directory: import_node_path2.default.join(basedir, "events"),
|
|
19422
|
-
schema: EventSchema,
|
|
19423
|
-
// No filePrefix - search all events
|
|
19424
|
-
filter: /* @__PURE__ */ __name((event) => event.correlationId === correlationId, "filter"),
|
|
19425
|
-
// Events in chronological order (oldest first) by default,
|
|
19426
|
-
// different from the default for other list calls.
|
|
19427
|
-
sortOrder: params.pagination?.sortOrder ?? "asc",
|
|
19428
|
-
limit: params.pagination?.limit,
|
|
19429
|
-
cursor: params.pagination?.cursor,
|
|
19430
|
-
getCreatedAt: getObjectCreatedAt("evnt"),
|
|
19431
|
-
getId: /* @__PURE__ */ __name((event) => event.eventId, "getId")
|
|
19432
|
-
});
|
|
19433
|
-
if (resolveData === "none") {
|
|
19434
|
-
return {
|
|
19435
|
-
...result,
|
|
19436
|
-
data: result.data.map((event) => {
|
|
19437
|
-
const { eventData: _eventData, ...rest } = event;
|
|
19438
|
-
return rest;
|
|
19439
|
-
})
|
|
19440
|
-
};
|
|
19387
|
+
await producer;
|
|
19388
|
+
}
|
|
19389
|
+
}
|
|
19390
|
+
/**
|
|
19391
|
+
* Producer: Continuously read chunks and parse messages
|
|
19392
|
+
*/
|
|
19393
|
+
async startProducer(reader, messageQueue) {
|
|
19394
|
+
try {
|
|
19395
|
+
while (!messageQueue.isTerminal) {
|
|
19396
|
+
let result;
|
|
19397
|
+
try {
|
|
19398
|
+
result = await reader.read();
|
|
19441
19399
|
}
|
|
19442
|
-
|
|
19443
|
-
|
|
19444
|
-
|
|
19445
|
-
|
|
19446
|
-
|
|
19447
|
-
async create(runId, data) {
|
|
19448
|
-
const now = /* @__PURE__ */ new Date();
|
|
19449
|
-
const result = {
|
|
19450
|
-
runId,
|
|
19451
|
-
hookId: data.hookId,
|
|
19452
|
-
token: data.token,
|
|
19453
|
-
metadata: data.metadata,
|
|
19454
|
-
ownerId: "embedded-owner",
|
|
19455
|
-
projectId: "embedded-project",
|
|
19456
|
-
environment: "embedded",
|
|
19457
|
-
createdAt: now
|
|
19458
|
-
};
|
|
19459
|
-
const hookPath = import_node_path2.default.join(basedir, "hooks", `${data.hookId}.json`);
|
|
19460
|
-
await writeJSON(hookPath, result);
|
|
19461
|
-
return result;
|
|
19462
|
-
},
|
|
19463
|
-
async get(hookId, params) {
|
|
19464
|
-
const hookPath = import_node_path2.default.join(basedir, "hooks", `${hookId}.json`);
|
|
19465
|
-
const hook = await readJSON(hookPath, HookSchema);
|
|
19466
|
-
if (!hook) {
|
|
19467
|
-
throw new Error(`Hook ${hookId} not found`);
|
|
19400
|
+
catch (readError) {
|
|
19401
|
+
if (readError instanceof Error && (readError.name === "AbortError" || readError.constructor.name === "AbortError" || readError.name === "TimeoutError" || readError.constructor.name === "TimeoutError")) {
|
|
19402
|
+
break;
|
|
19403
|
+
}
|
|
19404
|
+
throw readError;
|
|
19468
19405
|
}
|
|
19469
|
-
const
|
|
19470
|
-
|
|
19471
|
-
|
|
19472
|
-
|
|
19473
|
-
|
|
19474
|
-
|
|
19475
|
-
|
|
19476
|
-
|
|
19477
|
-
|
|
19478
|
-
if (hook && hook.token === token) {
|
|
19479
|
-
return hook;
|
|
19406
|
+
const { done, value } = result;
|
|
19407
|
+
if (done) {
|
|
19408
|
+
if (this.buffer !== null && this.buffer.length > 0) {
|
|
19409
|
+
const messages2 = this.write(new Uint8Array(0));
|
|
19410
|
+
for (const message of messages2) {
|
|
19411
|
+
if (messageQueue.isTerminal)
|
|
19412
|
+
break;
|
|
19413
|
+
messageQueue.enqueue(message);
|
|
19414
|
+
}
|
|
19480
19415
|
}
|
|
19481
|
-
|
|
19482
|
-
|
|
19483
|
-
|
|
19484
|
-
async list(params) {
|
|
19485
|
-
const hooksDir = import_node_path2.default.join(basedir, "hooks");
|
|
19486
|
-
const resolveData = params.resolveData || DEFAULT_RESOLVE_DATA_OPTION;
|
|
19487
|
-
const result = await paginatedFileSystemQuery({
|
|
19488
|
-
directory: hooksDir,
|
|
19489
|
-
schema: HookSchema,
|
|
19490
|
-
sortOrder: params.pagination?.sortOrder,
|
|
19491
|
-
limit: params.pagination?.limit,
|
|
19492
|
-
cursor: params.pagination?.cursor,
|
|
19493
|
-
filePrefix: void 0,
|
|
19494
|
-
// Hooks don't have ULIDs, so we can't optimize by filename
|
|
19495
|
-
filter: /* @__PURE__ */ __name((hook) => {
|
|
19496
|
-
if (params.runId && hook.runId !== params.runId) {
|
|
19497
|
-
return false;
|
|
19416
|
+
if (this.state !== 4) {
|
|
19417
|
+
if (this.state === 0) {
|
|
19418
|
+
throw new MultipartParseError("Invalid multipart stream: missing initial boundary");
|
|
19498
19419
|
}
|
|
19499
|
-
|
|
19500
|
-
}
|
|
19501
|
-
|
|
19502
|
-
return /* @__PURE__ */ new Date(0);
|
|
19503
|
-
}, "getCreatedAt"),
|
|
19504
|
-
getId: /* @__PURE__ */ __name((hook) => hook.hookId, "getId")
|
|
19505
|
-
});
|
|
19506
|
-
return {
|
|
19507
|
-
...result,
|
|
19508
|
-
data: result.data.map((hook) => filterHookData(hook, resolveData))
|
|
19509
|
-
};
|
|
19510
|
-
},
|
|
19511
|
-
async dispose(hookId) {
|
|
19512
|
-
const hookPath = import_node_path2.default.join(basedir, "hooks", `${hookId}.json`);
|
|
19513
|
-
const hook = await readJSON(hookPath, HookSchema);
|
|
19514
|
-
if (!hook) {
|
|
19515
|
-
throw new Error(`Hook ${hookId} not found`);
|
|
19420
|
+
throw new MultipartParseError("Unexpected end of stream");
|
|
19421
|
+
}
|
|
19422
|
+
break;
|
|
19516
19423
|
}
|
|
19517
|
-
|
|
19518
|
-
|
|
19424
|
+
if (!(value instanceof Uint8Array)) {
|
|
19425
|
+
throw new MultipartParseError(`Invalid chunk type: expected Uint8Array, got ${typeof value}`);
|
|
19426
|
+
}
|
|
19427
|
+
const messages = this.write(value);
|
|
19428
|
+
for (const message of messages) {
|
|
19429
|
+
if (messageQueue.isTerminal)
|
|
19430
|
+
break;
|
|
19431
|
+
messageQueue.enqueue(message);
|
|
19432
|
+
}
|
|
19433
|
+
}
|
|
19434
|
+
if (!messageQueue.isTerminal) {
|
|
19435
|
+
messageQueue.finish();
|
|
19519
19436
|
}
|
|
19520
19437
|
}
|
|
19521
|
-
|
|
19522
|
-
|
|
19523
|
-
|
|
19524
|
-
|
|
19525
|
-
var import_node_events = require("node:events");
|
|
19526
|
-
var import_node_path3 = __toESM(require("node:path"), 1);
|
|
19527
|
-
var monotonicUlid2 = monotonicFactory(() => Math.random());
|
|
19528
|
-
function serializeChunk(chunk) {
|
|
19529
|
-
const eofByte = Buffer.from([chunk.eof ? 1 : 0]);
|
|
19530
|
-
return Buffer.concat([eofByte, chunk.chunk]);
|
|
19531
|
-
}
|
|
19532
|
-
__name(serializeChunk, "serializeChunk");
|
|
19533
|
-
function deserializeChunk(serialized) {
|
|
19534
|
-
const eof = serialized[0] === 1;
|
|
19535
|
-
const chunk = serialized.subarray(1);
|
|
19536
|
-
return { eof, chunk };
|
|
19537
|
-
}
|
|
19538
|
-
__name(deserializeChunk, "deserializeChunk");
|
|
19539
|
-
function createStreamer(basedir) {
|
|
19540
|
-
const streamEmitter = new import_node_events.EventEmitter();
|
|
19541
|
-
return {
|
|
19542
|
-
async writeToStream(name, chunk) {
|
|
19543
|
-
const chunkId = `strm_${monotonicUlid2()}`;
|
|
19544
|
-
if (typeof chunk === "string") {
|
|
19545
|
-
chunk = new TextEncoder().encode(chunk);
|
|
19438
|
+
catch (error45) {
|
|
19439
|
+
this.closeCurrentPayload(error45);
|
|
19440
|
+
if (!messageQueue.isTerminal) {
|
|
19441
|
+
messageQueue.finish(error45);
|
|
19546
19442
|
}
|
|
19547
|
-
|
|
19548
|
-
|
|
19549
|
-
|
|
19550
|
-
|
|
19551
|
-
|
|
19552
|
-
|
|
19553
|
-
|
|
19554
|
-
|
|
19555
|
-
|
|
19556
|
-
|
|
19557
|
-
|
|
19558
|
-
|
|
19559
|
-
|
|
19560
|
-
|
|
19561
|
-
const
|
|
19562
|
-
|
|
19563
|
-
|
|
19564
|
-
|
|
19565
|
-
|
|
19566
|
-
|
|
19567
|
-
|
|
19568
|
-
|
|
19569
|
-
|
|
19570
|
-
|
|
19571
|
-
|
|
19572
|
-
|
|
19573
|
-
|
|
19574
|
-
|
|
19575
|
-
|
|
19576
|
-
|
|
19577
|
-
|
|
19578
|
-
|
|
19579
|
-
|
|
19580
|
-
|
|
19581
|
-
|
|
19582
|
-
|
|
19583
|
-
|
|
19584
|
-
|
|
19585
|
-
|
|
19586
|
-
|
|
19587
|
-
|
|
19588
|
-
|
|
19589
|
-
|
|
19590
|
-
|
|
19591
|
-
|
|
19592
|
-
|
|
19593
|
-
|
|
19594
|
-
|
|
19595
|
-
|
|
19596
|
-
|
|
19597
|
-
|
|
19598
|
-
|
|
19599
|
-
|
|
19600
|
-
|
|
19601
|
-
|
|
19602
|
-
|
|
19603
|
-
}
|
|
19604
|
-
|
|
19605
|
-
|
|
19606
|
-
|
|
19607
|
-
|
|
19608
|
-
|
|
19609
|
-
|
|
19610
|
-
|
|
19443
|
+
}
|
|
19444
|
+
finally {
|
|
19445
|
+
try {
|
|
19446
|
+
reader.releaseLock();
|
|
19447
|
+
}
|
|
19448
|
+
catch (error45) {
|
|
19449
|
+
}
|
|
19450
|
+
}
|
|
19451
|
+
}
|
|
19452
|
+
/**
|
|
19453
|
+
* Consumer: Yield messages from the queue
|
|
19454
|
+
*/
|
|
19455
|
+
async *consumeMessages(messageQueue) {
|
|
19456
|
+
while (true) {
|
|
19457
|
+
const message = await messageQueue.dequeue();
|
|
19458
|
+
if (message === null) {
|
|
19459
|
+
break;
|
|
19460
|
+
}
|
|
19461
|
+
yield message;
|
|
19462
|
+
}
|
|
19463
|
+
}
|
|
19464
|
+
/**
|
|
19465
|
+
* Process a chunk of data through the state machine and return any complete messages.
|
|
19466
|
+
*
|
|
19467
|
+
* Returns an array because a single chunk can contain multiple complete messages
|
|
19468
|
+
* when small messages with headers + body + boundary all fit in one network chunk.
|
|
19469
|
+
* All messages must be captured and queued to maintain proper message ordering.
|
|
19470
|
+
*/
|
|
19471
|
+
write(chunk) {
|
|
19472
|
+
const newMessages = [];
|
|
19473
|
+
if (this.state === 4) {
|
|
19474
|
+
throw new MultipartParseError("Unexpected data after end of stream");
|
|
19475
|
+
}
|
|
19476
|
+
let index = 0;
|
|
19477
|
+
let chunkLength = chunk.length;
|
|
19478
|
+
if (this.buffer !== null) {
|
|
19479
|
+
const newSize = this.buffer.length + chunkLength;
|
|
19480
|
+
const maxAllowedSize = this.state === 2 ? this.maxHeaderSize : this.maxBoundaryBuffer;
|
|
19481
|
+
if (newSize > maxAllowedSize) {
|
|
19482
|
+
throw new MultipartParseError(`Buffer size limit exceeded: ${newSize} bytes > ${maxAllowedSize} bytes. This may indicate malformed multipart data with ${this.state === 2 ? "oversized headers" : "invalid boundaries"}.`);
|
|
19483
|
+
}
|
|
19484
|
+
const newChunk = new Uint8Array(newSize);
|
|
19485
|
+
newChunk.set(this.buffer, 0);
|
|
19486
|
+
newChunk.set(chunk, this.buffer.length);
|
|
19487
|
+
chunk = newChunk;
|
|
19488
|
+
chunkLength = chunk.length;
|
|
19489
|
+
this.buffer = null;
|
|
19490
|
+
}
|
|
19491
|
+
if (chunkLength === 0 && this.state === 0) {
|
|
19492
|
+
throw new MultipartParseError("Invalid multipart stream: missing initial boundary");
|
|
19493
|
+
}
|
|
19494
|
+
while (true) {
|
|
19495
|
+
if (this.state === 3) {
|
|
19496
|
+
if (chunkLength - index < this.boundaryLength) {
|
|
19497
|
+
const remainingData = chunk.subarray(index);
|
|
19498
|
+
if (remainingData.length > this.maxBoundaryBuffer) {
|
|
19499
|
+
throw new MultipartParseError(`Boundary buffer limit exceeded: ${remainingData.length} > ${this.maxBoundaryBuffer}`);
|
|
19500
|
+
}
|
|
19501
|
+
this.buffer = remainingData;
|
|
19502
|
+
break;
|
|
19503
|
+
}
|
|
19504
|
+
const boundaryIndex = this.findBoundary(chunk, index);
|
|
19505
|
+
if (boundaryIndex === -1) {
|
|
19506
|
+
const partialTailIndex = this.findPartialTailBoundary(chunk);
|
|
19507
|
+
if (partialTailIndex === -1) {
|
|
19508
|
+
this.writeBody(index === 0 ? chunk : chunk.subarray(index));
|
|
19509
|
+
}
|
|
19510
|
+
else {
|
|
19511
|
+
this.writeBody(chunk.subarray(index, partialTailIndex));
|
|
19512
|
+
const partialBoundary = chunk.subarray(partialTailIndex);
|
|
19513
|
+
if (partialBoundary.length > this.maxBoundaryBuffer) {
|
|
19514
|
+
throw new MultipartParseError(`Partial boundary too large: ${partialBoundary.length} > ${this.maxBoundaryBuffer}`);
|
|
19611
19515
|
}
|
|
19516
|
+
this.buffer = partialBoundary;
|
|
19517
|
+
}
|
|
19518
|
+
break;
|
|
19519
|
+
}
|
|
19520
|
+
this.writeBody(chunk.subarray(index, boundaryIndex));
|
|
19521
|
+
this.finishMessage();
|
|
19522
|
+
index = boundaryIndex + this.boundaryLength;
|
|
19523
|
+
this.state = 1;
|
|
19524
|
+
}
|
|
19525
|
+
if (this.state === 1) {
|
|
19526
|
+
if (chunkLength - index < 2) {
|
|
19527
|
+
const remainingData = chunk.subarray(index);
|
|
19528
|
+
if (remainingData.length > this.maxBoundaryBuffer) {
|
|
19529
|
+
throw new MultipartParseError(`After-boundary buffer limit exceeded: ${remainingData.length} > ${this.maxBoundaryBuffer}`);
|
|
19530
|
+
}
|
|
19531
|
+
this.buffer = remainingData;
|
|
19532
|
+
break;
|
|
19533
|
+
}
|
|
19534
|
+
if (chunk[index] === 45 && chunk[index + 1] === 45) {
|
|
19535
|
+
this.state = 4;
|
|
19536
|
+
break;
|
|
19537
|
+
}
|
|
19538
|
+
if (chunk[index] === 13 && chunk[index + 1] === 10) {
|
|
19539
|
+
index += 2;
|
|
19540
|
+
}
|
|
19541
|
+
else if (chunk[index] === 10) {
|
|
19542
|
+
index += 1;
|
|
19543
|
+
}
|
|
19544
|
+
else {
|
|
19545
|
+
throw new MultipartParseError(`Invalid character after boundary: expected CRLF or LF, got 0x${chunk[index].toString(16)}`);
|
|
19546
|
+
}
|
|
19547
|
+
this.state = 2;
|
|
19548
|
+
}
|
|
19549
|
+
if (this.state === 2) {
|
|
19550
|
+
if (chunkLength - index < 4) {
|
|
19551
|
+
const remainingData = chunk.subarray(index);
|
|
19552
|
+
if (remainingData.length > this.maxHeaderSize) {
|
|
19553
|
+
throw new MultipartParseError(`Header buffer limit exceeded: ${remainingData.length} > ${this.maxHeaderSize}`);
|
|
19612
19554
|
}
|
|
19613
|
-
|
|
19614
|
-
|
|
19615
|
-
|
|
19616
|
-
|
|
19555
|
+
this.buffer = remainingData;
|
|
19556
|
+
break;
|
|
19557
|
+
}
|
|
19558
|
+
let headerEndIndex = this.findDoubleNewline(chunk, index);
|
|
19559
|
+
let headerEndOffset = 4;
|
|
19560
|
+
if (headerEndIndex === -1) {
|
|
19561
|
+
const lfDoubleNewline = createSearch("\n\n");
|
|
19562
|
+
headerEndIndex = lfDoubleNewline(chunk, index);
|
|
19563
|
+
headerEndOffset = 2;
|
|
19564
|
+
}
|
|
19565
|
+
if (headerEndIndex === -1) {
|
|
19566
|
+
const headerData = chunk.subarray(index);
|
|
19567
|
+
if (headerData.length > this.maxHeaderSize) {
|
|
19568
|
+
throw new MultipartParseError(`Headers too large: ${headerData.length} > ${this.maxHeaderSize} bytes`);
|
|
19617
19569
|
}
|
|
19618
|
-
|
|
19619
|
-
|
|
19620
|
-
|
|
19621
|
-
|
|
19570
|
+
this.buffer = headerData;
|
|
19571
|
+
break;
|
|
19572
|
+
}
|
|
19573
|
+
const headerBytes = chunk.subarray(index, headerEndIndex);
|
|
19574
|
+
this.currentHeaders = parseHeaders(headerBytes);
|
|
19575
|
+
const message = this.createStreamingMessage();
|
|
19576
|
+
newMessages.push(message);
|
|
19577
|
+
index = headerEndIndex + headerEndOffset;
|
|
19578
|
+
this.state = 3;
|
|
19579
|
+
continue;
|
|
19580
|
+
}
|
|
19581
|
+
if (this.state === 0) {
|
|
19582
|
+
if (chunkLength < this.openingBoundaryLength) {
|
|
19583
|
+
if (chunk.length > this.maxBoundaryBuffer) {
|
|
19584
|
+
throw new MultipartParseError(`Initial chunk too large for boundary detection: ${chunk.length} > ${this.maxBoundaryBuffer}`);
|
|
19622
19585
|
}
|
|
19623
|
-
|
|
19624
|
-
|
|
19625
|
-
removeListeners();
|
|
19586
|
+
this.buffer = chunk;
|
|
19587
|
+
break;
|
|
19626
19588
|
}
|
|
19627
|
-
|
|
19589
|
+
const boundaryIndex = this.findOpeningBoundary(chunk);
|
|
19590
|
+
if (boundaryIndex !== 0) {
|
|
19591
|
+
throw new MultipartParseError("Invalid multipart stream: missing initial boundary");
|
|
19592
|
+
}
|
|
19593
|
+
index = this.openingBoundaryLength;
|
|
19594
|
+
this.state = 1;
|
|
19595
|
+
}
|
|
19628
19596
|
}
|
|
19629
|
-
|
|
19630
|
-
}
|
|
19631
|
-
|
|
19632
|
-
|
|
19633
|
-
|
|
19634
|
-
|
|
19635
|
-
|
|
19636
|
-
|
|
19637
|
-
|
|
19638
|
-
|
|
19639
|
-
|
|
19640
|
-
|
|
19641
|
-
|
|
19642
|
-
|
|
19643
|
-
|
|
19597
|
+
return newMessages;
|
|
19598
|
+
}
|
|
19599
|
+
createStreamingMessage() {
|
|
19600
|
+
const headers = new Headers(this.currentHeaders);
|
|
19601
|
+
const payload = new ReadableStream({
|
|
19602
|
+
start: /* @__PURE__ */ __name((controller) => {
|
|
19603
|
+
this.currentPayloadController = controller;
|
|
19604
|
+
}, "start")
|
|
19605
|
+
});
|
|
19606
|
+
this.currentHeaders = new Headers();
|
|
19607
|
+
return {
|
|
19608
|
+
headers,
|
|
19609
|
+
payload
|
|
19610
|
+
};
|
|
19611
|
+
}
|
|
19612
|
+
writeBody(chunk) {
|
|
19613
|
+
if (this.currentPayloadController) {
|
|
19614
|
+
this.currentPayloadController.enqueue(chunk);
|
|
19615
|
+
}
|
|
19616
|
+
}
|
|
19617
|
+
finishMessage() {
|
|
19618
|
+
if (this.currentPayloadController) {
|
|
19619
|
+
this.currentPayloadController.close();
|
|
19620
|
+
this.currentPayloadController = null;
|
|
19621
|
+
}
|
|
19622
|
+
}
|
|
19623
|
+
/**
|
|
19624
|
+
* Close current payload controller if open (used during cleanup)
|
|
19625
|
+
* If an error is provided, forwards it to the payload consumer
|
|
19626
|
+
*/
|
|
19627
|
+
closeCurrentPayload(error45) {
|
|
19628
|
+
if (this.currentPayloadController) {
|
|
19629
|
+
try {
|
|
19630
|
+
if (error45) {
|
|
19631
|
+
this.currentPayloadController.error(error45);
|
|
19632
|
+
}
|
|
19633
|
+
else {
|
|
19634
|
+
this.currentPayloadController.close();
|
|
19635
|
+
}
|
|
19636
|
+
}
|
|
19637
|
+
catch (controllerError) {
|
|
19638
|
+
}
|
|
19639
|
+
this.currentPayloadController = null;
|
|
19640
|
+
}
|
|
19641
|
+
}
|
|
19642
|
+
};
|
|
19643
|
+
// ../../node_modules/.pnpm/@vercel+queue@0.0.0-alpha.28/node_modules/@vercel/queue/dist/index.mjs
|
|
19644
19644
|
var import_oidc = __toESM(require_dist(), 1);
|
|
19645
19645
|
async function streamToBuffer2(stream) {
|
|
19646
19646
|
let totalLength = 0;
|
|
@@ -19815,15 +19815,22 @@ var QueueClient = class {
|
|
|
19815
19815
|
}
|
|
19816
19816
|
baseUrl;
|
|
19817
19817
|
basePath;
|
|
19818
|
+
customHeaders = {};
|
|
19818
19819
|
token;
|
|
19819
19820
|
/**
|
|
19820
19821
|
* Create a new Vercel Queue Service client
|
|
19821
19822
|
* @param options Client configuration options
|
|
19822
19823
|
*/
|
|
19823
19824
|
constructor(options = {}) {
|
|
19824
|
-
this.baseUrl = options.baseUrl || "https://vercel-queue.com";
|
|
19825
|
-
this.basePath = options.basePath || "/api/v2/messages";
|
|
19826
|
-
this.token = options.token;
|
|
19825
|
+
this.baseUrl = options.baseUrl || process.env.VERCEL_QUEUE_BASE_URL || "https://vercel-queue.com";
|
|
19826
|
+
this.basePath = options.basePath || process.env.VERCEL_QUEUE_BASE_PATH || "/api/v2/messages";
|
|
19827
|
+
this.token = options.token || process.env.VERCEL_QUEUE_TOKEN;
|
|
19828
|
+
const VERCEL_QUEUE_HEADER_PREFIX = "VERCEL_QUEUE_HEADER_";
|
|
19829
|
+
this.customHeaders = Object.fromEntries(Object.entries(process.env).filter(([key]) => key.startsWith(VERCEL_QUEUE_HEADER_PREFIX)).map(([key, value]) => [
|
|
19830
|
+
// This allows headers to use dashes independent of shell used
|
|
19831
|
+
key.replace(VERCEL_QUEUE_HEADER_PREFIX, "").replaceAll("__", "-"),
|
|
19832
|
+
value || ""
|
|
19833
|
+
]));
|
|
19827
19834
|
}
|
|
19828
19835
|
async getToken() {
|
|
19829
19836
|
if (this.token) {
|
|
@@ -19850,7 +19857,8 @@ var QueueClient = class {
|
|
|
19850
19857
|
const headers = new Headers({
|
|
19851
19858
|
Authorization: `Bearer ${await this.getToken()}`,
|
|
19852
19859
|
"Vqs-Queue-Name": queueName,
|
|
19853
|
-
"Content-Type": transport.contentType
|
|
19860
|
+
"Content-Type": transport.contentType,
|
|
19861
|
+
...this.customHeaders
|
|
19854
19862
|
});
|
|
19855
19863
|
const deploymentId = options.deploymentId || process.env.VERCEL_DEPLOYMENT_ID;
|
|
19856
19864
|
if (deploymentId) {
|
|
@@ -19865,8 +19873,8 @@ var QueueClient = class {
|
|
|
19865
19873
|
const body = transport.serialize(payload);
|
|
19866
19874
|
const response = await fetch(`${this.baseUrl}${this.basePath}`, {
|
|
19867
19875
|
method: "POST",
|
|
19868
|
-
|
|
19869
|
-
|
|
19876
|
+
body,
|
|
19877
|
+
headers
|
|
19870
19878
|
});
|
|
19871
19879
|
if (!response.ok) {
|
|
19872
19880
|
if (response.status === 400) {
|
|
@@ -19912,7 +19920,8 @@ var QueueClient = class {
|
|
|
19912
19920
|
Authorization: `Bearer ${await this.getToken()}`,
|
|
19913
19921
|
"Vqs-Queue-Name": queueName,
|
|
19914
19922
|
"Vqs-Consumer-Group": consumerGroup,
|
|
19915
|
-
Accept: "multipart/mixed"
|
|
19923
|
+
Accept: "multipart/mixed",
|
|
19924
|
+
...this.customHeaders
|
|
19916
19925
|
});
|
|
19917
19926
|
if (visibilityTimeoutSeconds !== void 0) {
|
|
19918
19927
|
headers.set("Vqs-Visibility-Timeout", visibilityTimeoutSeconds.toString());
|
|
@@ -19952,7 +19961,7 @@ var QueueClient = class {
|
|
|
19952
19961
|
}
|
|
19953
19962
|
throw new Error(`Failed to receive messages: ${response.status} ${response.statusText}`);
|
|
19954
19963
|
}
|
|
19955
|
-
for await (const multipartMessage of
|
|
19964
|
+
for await (const multipartMessage of parseMultipartStream2(response)) {
|
|
19956
19965
|
try {
|
|
19957
19966
|
const parsedHeaders = parseQueueHeaders(multipartMessage.headers);
|
|
19958
19967
|
if (!parsedHeaders) {
|
|
@@ -19979,7 +19988,8 @@ var QueueClient = class {
|
|
|
19979
19988
|
Authorization: `Bearer ${await this.getToken()}`,
|
|
19980
19989
|
"Vqs-Queue-Name": queueName,
|
|
19981
19990
|
"Vqs-Consumer-Group": consumerGroup,
|
|
19982
|
-
Accept: "multipart/mixed"
|
|
19991
|
+
Accept: "multipart/mixed",
|
|
19992
|
+
...this.customHeaders
|
|
19983
19993
|
});
|
|
19984
19994
|
if (visibilityTimeoutSeconds !== void 0) {
|
|
19985
19995
|
headers.set("Vqs-Visibility-Timeout", visibilityTimeoutSeconds.toString());
|
|
@@ -20037,7 +20047,7 @@ var QueueClient = class {
|
|
|
20037
20047
|
throw new Error("Transport is required when skipPayload is not true");
|
|
20038
20048
|
}
|
|
20039
20049
|
try {
|
|
20040
|
-
for await (const multipartMessage of
|
|
20050
|
+
for await (const multipartMessage of parseMultipartStream2(response)) {
|
|
20041
20051
|
try {
|
|
20042
20052
|
const parsedHeaders = parseQueueHeaders(multipartMessage.headers);
|
|
20043
20053
|
if (!parsedHeaders) {
|
|
@@ -20086,7 +20096,8 @@ var QueueClient = class {
|
|
|
20086
20096
|
Authorization: `Bearer ${await this.getToken()}`,
|
|
20087
20097
|
"Vqs-Queue-Name": queueName,
|
|
20088
20098
|
"Vqs-Consumer-Group": consumerGroup,
|
|
20089
|
-
"Vqs-Ticket": ticket
|
|
20099
|
+
"Vqs-Ticket": ticket,
|
|
20100
|
+
...this.customHeaders
|
|
20090
20101
|
})
|
|
20091
20102
|
});
|
|
20092
20103
|
if (!response.ok) {
|
|
@@ -20132,7 +20143,8 @@ var QueueClient = class {
|
|
|
20132
20143
|
"Vqs-Queue-Name": queueName,
|
|
20133
20144
|
"Vqs-Consumer-Group": consumerGroup,
|
|
20134
20145
|
"Vqs-Ticket": ticket,
|
|
20135
|
-
"Vqs-Visibility-Timeout": visibilityTimeoutSeconds.toString()
|
|
20146
|
+
"Vqs-Visibility-Timeout": visibilityTimeoutSeconds.toString(),
|
|
20147
|
+
...this.customHeaders
|
|
20136
20148
|
})
|
|
20137
20149
|
});
|
|
20138
20150
|
if (!response.ok) {
|
|
@@ -20277,7 +20289,6 @@ function handleCallback(handlers) {
|
|
|
20277
20289
|
__name(handleCallback, "handleCallback");
|
|
20278
20290
|
var devRouteHandlers2 = /* @__PURE__ */ new Map();
|
|
20279
20291
|
var wildcardRouteHandlers2 = /* @__PURE__ */ new Map();
|
|
20280
|
-
var routeHandlerKeys = /* @__PURE__ */ new WeakMap();
|
|
20281
20292
|
function cleanupDeadRefs(key, refs) {
|
|
20282
20293
|
const aliveRefs = refs.filter((ref) => ref.deref() !== void 0);
|
|
20283
20294
|
if (aliveRefs.length === 0) {
|
|
@@ -20293,45 +20304,14 @@ function isDevMode() {
|
|
|
20293
20304
|
}
|
|
20294
20305
|
__name(isDevMode, "isDevMode");
|
|
20295
20306
|
function registerDevRouteHandler(routeHandler, handlers) {
|
|
20296
|
-
const existingKeys = routeHandlerKeys.get(routeHandler);
|
|
20297
|
-
if (existingKeys) {
|
|
20298
|
-
const newKeys = /* @__PURE__ */ new Set();
|
|
20299
|
-
for (const topicName in handlers) {
|
|
20300
|
-
for (const consumerGroup in handlers[topicName]) {
|
|
20301
|
-
newKeys.add(`${topicName}:${consumerGroup}`);
|
|
20302
|
-
}
|
|
20303
|
-
}
|
|
20304
|
-
for (const key of existingKeys) {
|
|
20305
|
-
if (!newKeys.has(key)) {
|
|
20306
|
-
const [topicPattern] = key.split(":");
|
|
20307
|
-
if (topicPattern.includes("*")) {
|
|
20308
|
-
const refs = wildcardRouteHandlers2.get(key);
|
|
20309
|
-
if (refs) {
|
|
20310
|
-
const filteredRefs = refs.filter((ref) => ref.deref() !== routeHandler);
|
|
20311
|
-
if (filteredRefs.length === 0) {
|
|
20312
|
-
wildcardRouteHandlers2.delete(key);
|
|
20313
|
-
}
|
|
20314
|
-
else {
|
|
20315
|
-
wildcardRouteHandlers2.set(key, filteredRefs);
|
|
20316
|
-
}
|
|
20317
|
-
}
|
|
20318
|
-
}
|
|
20319
|
-
else {
|
|
20320
|
-
devRouteHandlers2.delete(key);
|
|
20321
|
-
}
|
|
20322
|
-
}
|
|
20323
|
-
}
|
|
20324
|
-
}
|
|
20325
|
-
const keys = /* @__PURE__ */ new Set();
|
|
20326
20307
|
for (const topicName in handlers) {
|
|
20327
20308
|
for (const consumerGroup in handlers[topicName]) {
|
|
20328
20309
|
const key = `${topicName}:${consumerGroup}`;
|
|
20329
|
-
keys.add(key);
|
|
20330
20310
|
if (topicName.includes("*")) {
|
|
20331
|
-
const weakRef = new WeakRef(routeHandler);
|
|
20332
20311
|
const existing = wildcardRouteHandlers2.get(key) || [];
|
|
20333
20312
|
cleanupDeadRefs(key, existing);
|
|
20334
20313
|
const cleanedRefs = wildcardRouteHandlers2.get(key) || [];
|
|
20314
|
+
const weakRef = new WeakRef(routeHandler);
|
|
20335
20315
|
cleanedRefs.push(weakRef);
|
|
20336
20316
|
wildcardRouteHandlers2.set(key, cleanedRefs);
|
|
20337
20317
|
}
|
|
@@ -20343,7 +20323,6 @@ function registerDevRouteHandler(routeHandler, handlers) {
|
|
|
20343
20323
|
}
|
|
20344
20324
|
}
|
|
20345
20325
|
}
|
|
20346
|
-
routeHandlerKeys.set(routeHandler, keys);
|
|
20347
20326
|
}
|
|
20348
20327
|
__name(registerDevRouteHandler, "registerDevRouteHandler");
|
|
20349
20328
|
function findRouteHandlersForTopic(topicName) {
|
|
@@ -20733,45 +20712,6 @@ async function send(topicName, payload, options) {
|
|
|
20733
20712
|
return { messageId: result.messageId };
|
|
20734
20713
|
}
|
|
20735
20714
|
__name(send, "send");
|
|
20736
|
-
// ../world-vercel/dist/queue.js
|
|
20737
|
-
var MessageWrapper = object({
|
|
20738
|
-
payload: QueuePayloadSchema,
|
|
20739
|
-
queueName: ValidQueueName
|
|
20740
|
-
});
|
|
20741
|
-
function createQueue2() {
|
|
20742
|
-
const queue = /* @__PURE__ */ __name(async (queueName, x, opts) => {
|
|
20743
|
-
const encoded = MessageWrapper.encode({
|
|
20744
|
-
payload: x,
|
|
20745
|
-
queueName
|
|
20746
|
-
});
|
|
20747
|
-
const sanitizedQueueName = queueName.replace(/[^A-Za-z0-9-_]/g, "-");
|
|
20748
|
-
const { messageId } = await send(sanitizedQueueName, encoded, opts);
|
|
20749
|
-
return { messageId: MessageId.parse(messageId) };
|
|
20750
|
-
}, "queue");
|
|
20751
|
-
const createQueueHandler = /* @__PURE__ */ __name((prefix, handler) => {
|
|
20752
|
-
return handleCallback({
|
|
20753
|
-
[`${prefix}*`]: {
|
|
20754
|
-
default: /* @__PURE__ */ __name((body, meta) => {
|
|
20755
|
-
const { payload, queueName } = MessageWrapper.parse(body);
|
|
20756
|
-
return handler(payload, {
|
|
20757
|
-
queueName,
|
|
20758
|
-
messageId: MessageId.parse(meta.messageId),
|
|
20759
|
-
attempt: meta.deliveryCount
|
|
20760
|
-
});
|
|
20761
|
-
}, "default")
|
|
20762
|
-
}
|
|
20763
|
-
});
|
|
20764
|
-
}, "createQueueHandler");
|
|
20765
|
-
const getDeploymentId = /* @__PURE__ */ __name(async () => {
|
|
20766
|
-
const deploymentId = process.env.VERCEL_DEPLOYMENT_ID;
|
|
20767
|
-
if (!deploymentId) {
|
|
20768
|
-
throw new Error("VERCEL_DEPLOYMENT_ID environment variable is not set");
|
|
20769
|
-
}
|
|
20770
|
-
return deploymentId;
|
|
20771
|
-
}, "getDeploymentId");
|
|
20772
|
-
return { queue, createQueueHandler, getDeploymentId };
|
|
20773
|
-
}
|
|
20774
|
-
__name(createQueue2, "createQueue");
|
|
20775
20715
|
// ../world-vercel/dist/utils.js
|
|
20776
20716
|
var import_oidc2 = __toESM(require_dist(), 1);
|
|
20777
20717
|
var DEFAULT_RESOLVE_DATA_OPTION2 = "all";
|
|
@@ -20782,7 +20722,15 @@ function dateToStringReplacer(_key, value) {
|
|
|
20782
20722
|
return value;
|
|
20783
20723
|
}
|
|
20784
20724
|
__name(dateToStringReplacer, "dateToStringReplacer");
|
|
20785
|
-
|
|
20725
|
+
var getHttpUrl = /* @__PURE__ */ __name((config3) => {
|
|
20726
|
+
const projectConfig = config3?.projectConfig;
|
|
20727
|
+
const defaultUrl = "https://vercel-workflow.com/api";
|
|
20728
|
+
const defaultProxyUrl = "https://api.vercel.com/v1/workflow";
|
|
20729
|
+
const usingProxy = Boolean(config3?.baseUrl || projectConfig?.projectId && projectConfig?.teamId);
|
|
20730
|
+
const baseUrl = config3?.baseUrl || (usingProxy ? defaultProxyUrl : defaultUrl);
|
|
20731
|
+
return { baseUrl, usingProxy };
|
|
20732
|
+
}, "getHttpUrl");
|
|
20733
|
+
var getHeaders = /* @__PURE__ */ __name((config3) => {
|
|
20786
20734
|
const projectConfig = config3?.projectConfig;
|
|
20787
20735
|
const headers = new Headers(config3?.headers);
|
|
20788
20736
|
if (projectConfig) {
|
|
@@ -20794,16 +20742,16 @@ async function getHttpConfig(config3) {
|
|
|
20794
20742
|
headers.set("x-vercel-team-id", projectConfig.teamId);
|
|
20795
20743
|
}
|
|
20796
20744
|
}
|
|
20745
|
+
return headers;
|
|
20746
|
+
}, "getHeaders");
|
|
20747
|
+
async function getHttpConfig(config3) {
|
|
20748
|
+
const headers = getHeaders(config3);
|
|
20797
20749
|
const token = config3?.token ?? await (0, import_oidc2.getVercelOidcToken)();
|
|
20798
20750
|
if (token) {
|
|
20799
20751
|
headers.set("Authorization", `Bearer ${token}`);
|
|
20800
20752
|
}
|
|
20801
|
-
|
|
20802
|
-
|
|
20803
|
-
const shouldUseProxy = projectConfig?.projectId && projectConfig?.teamId;
|
|
20804
|
-
baseUrl = shouldUseProxy ? `https://api.vercel.com/v1/workflow` : "https://vercel-workflow.com/api";
|
|
20805
|
-
}
|
|
20806
|
-
return { baseUrl, headers };
|
|
20753
|
+
const { baseUrl, usingProxy } = getHttpUrl(config3);
|
|
20754
|
+
return { baseUrl, headers, usingProxy };
|
|
20807
20755
|
}
|
|
20808
20756
|
__name(getHttpConfig, "getHttpConfig");
|
|
20809
20757
|
async function makeRequest({ endpoint, options = {}, config: config3 = {}, schema }) {
|
|
@@ -20835,6 +20783,60 @@ curl -X ${options.method} ${stringifiedHeaders} "${url2}"`);
|
|
|
20835
20783
|
}
|
|
20836
20784
|
}
|
|
20837
20785
|
__name(makeRequest, "makeRequest");
|
|
20786
|
+
// ../world-vercel/dist/queue.js
|
|
20787
|
+
var MessageWrapper = object({
|
|
20788
|
+
payload: QueuePayloadSchema,
|
|
20789
|
+
queueName: ValidQueueName
|
|
20790
|
+
});
|
|
20791
|
+
function createQueue2(config3) {
|
|
20792
|
+
const { baseUrl, usingProxy } = getHttpUrl(config3);
|
|
20793
|
+
const headers = getHeaders(config3);
|
|
20794
|
+
if (usingProxy) {
|
|
20795
|
+
process.env.VERCEL_QUEUE_BASE_URL = `${baseUrl}`;
|
|
20796
|
+
process.env.VERCEL_QUEUE_BASE_PATH = "/queues/v2/messages";
|
|
20797
|
+
if (config3?.token) {
|
|
20798
|
+
process.env.VERCEL_QUEUE_TOKEN = config3.token;
|
|
20799
|
+
}
|
|
20800
|
+
if (headers) {
|
|
20801
|
+
headers.forEach((value, key) => {
|
|
20802
|
+
const sanitizedKey = key.replaceAll("-", "__");
|
|
20803
|
+
process.env[`VERCEL_QUEUE_HEADER_${sanitizedKey}`] = value;
|
|
20804
|
+
});
|
|
20805
|
+
}
|
|
20806
|
+
}
|
|
20807
|
+
const queue = /* @__PURE__ */ __name(async (queueName, x, opts) => {
|
|
20808
|
+
const encoded = MessageWrapper.encode({
|
|
20809
|
+
payload: x,
|
|
20810
|
+
queueName
|
|
20811
|
+
});
|
|
20812
|
+
const sanitizedQueueName = queueName.replace(/[^A-Za-z0-9-_]/g, "-");
|
|
20813
|
+
const { messageId } = await send(sanitizedQueueName, encoded, opts);
|
|
20814
|
+
return { messageId: MessageId.parse(messageId) };
|
|
20815
|
+
}, "queue");
|
|
20816
|
+
const createQueueHandler = /* @__PURE__ */ __name((prefix, handler) => {
|
|
20817
|
+
return handleCallback({
|
|
20818
|
+
[`${prefix}*`]: {
|
|
20819
|
+
default: /* @__PURE__ */ __name((body, meta) => {
|
|
20820
|
+
const { payload, queueName } = MessageWrapper.parse(body);
|
|
20821
|
+
return handler(payload, {
|
|
20822
|
+
queueName,
|
|
20823
|
+
messageId: MessageId.parse(meta.messageId),
|
|
20824
|
+
attempt: meta.deliveryCount
|
|
20825
|
+
});
|
|
20826
|
+
}, "default")
|
|
20827
|
+
}
|
|
20828
|
+
});
|
|
20829
|
+
}, "createQueueHandler");
|
|
20830
|
+
const getDeploymentId = /* @__PURE__ */ __name(async () => {
|
|
20831
|
+
const deploymentId = process.env.VERCEL_DEPLOYMENT_ID;
|
|
20832
|
+
if (!deploymentId) {
|
|
20833
|
+
throw new Error("VERCEL_DEPLOYMENT_ID environment variable is not set");
|
|
20834
|
+
}
|
|
20835
|
+
return deploymentId;
|
|
20836
|
+
}, "getDeploymentId");
|
|
20837
|
+
return { queue, createQueueHandler, getDeploymentId };
|
|
20838
|
+
}
|
|
20839
|
+
__name(createQueue2, "createQueue");
|
|
20838
20840
|
// ../world-vercel/dist/events.js
|
|
20839
20841
|
function filterEventData2(event, resolveData) {
|
|
20840
20842
|
if (resolveData === "none") {
|
|
@@ -21336,7 +21338,7 @@ __name(createStreamer2, "createStreamer");
|
|
|
21336
21338
|
// ../world-vercel/dist/index.js
|
|
21337
21339
|
function createVercelWorld(config3) {
|
|
21338
21340
|
return {
|
|
21339
|
-
...createQueue2(),
|
|
21341
|
+
...createQueue2(config3),
|
|
21340
21342
|
...createStorage2(config3),
|
|
21341
21343
|
...createStreamer2(config3)
|
|
21342
21344
|
};
|
|
@@ -21960,6 +21962,7 @@ function getCommonReducers(global2 = globalThis) {
|
|
|
21960
21962
|
const viewToBase64 = /* @__PURE__ */ __name((value) => abToBase64(value.buffer, value.byteOffset, value.byteLength), "viewToBase64");
|
|
21961
21963
|
return {
|
|
21962
21964
|
ArrayBuffer: /* @__PURE__ */ __name((value) => value instanceof global2.ArrayBuffer && abToBase64(value, 0, value.byteLength), "ArrayBuffer"),
|
|
21965
|
+
BigInt: /* @__PURE__ */ __name((value) => typeof value === "bigint" && value.toString(), "BigInt"),
|
|
21963
21966
|
BigInt64Array: /* @__PURE__ */ __name((value) => value instanceof global2.BigInt64Array && viewToBase64(value), "BigInt64Array"),
|
|
21964
21967
|
BigUint64Array: /* @__PURE__ */ __name((value) => value instanceof global2.BigUint64Array && viewToBase64(value), "BigUint64Array"),
|
|
21965
21968
|
Date: /* @__PURE__ */ __name((value) => {
|
|
@@ -22079,6 +22082,7 @@ function getCommonRevivers(global2 = globalThis) {
|
|
|
22079
22082
|
__name(reviveArrayBuffer, "reviveArrayBuffer");
|
|
22080
22083
|
return {
|
|
22081
22084
|
ArrayBuffer: reviveArrayBuffer,
|
|
22085
|
+
BigInt: /* @__PURE__ */ __name((value) => global2.BigInt(value), "BigInt"),
|
|
22082
22086
|
BigInt64Array: /* @__PURE__ */ __name((value) => {
|
|
22083
22087
|
const ab = reviveArrayBuffer(value);
|
|
22084
22088
|
return new global2.BigInt64Array(ab);
|
|
@@ -23237,25 +23241,6 @@ var __export = (target, all) => {
|
|
|
23237
23241
|
__defProp(target, name, { get: all[name], enumerable: true });
|
|
23238
23242
|
};
|
|
23239
23243
|
|
|
23240
|
-
// workflows/addition.ts
|
|
23241
|
-
var addition_exports = {};
|
|
23242
|
-
__export(addition_exports, {
|
|
23243
|
-
addition: () => addition
|
|
23244
|
-
});
|
|
23245
|
-
async function add(num, num2) {
|
|
23246
|
-
return globalThis[Symbol.for("WORKFLOW_USE_STEP")]("step//workflows/addition.ts//add")(num, num2);
|
|
23247
|
-
}
|
|
23248
|
-
__name(add, "add");
|
|
23249
|
-
async function addition(num, num2) {
|
|
23250
|
-
const result = await add(num, num2);
|
|
23251
|
-
console.log({
|
|
23252
|
-
result
|
|
23253
|
-
});
|
|
23254
|
-
return result;
|
|
23255
|
-
}
|
|
23256
|
-
__name(addition, "addition");
|
|
23257
|
-
addition.workflowId = "workflow//workflows/addition.ts//addition";
|
|
23258
|
-
|
|
23259
23244
|
// workflows/noop.ts
|
|
23260
23245
|
var noop_exports = {};
|
|
23261
23246
|
__export(noop_exports, {
|
|
@@ -23292,10 +23277,29 @@ async function brokenWf() {
|
|
|
23292
23277
|
__name(brokenWf, "brokenWf");
|
|
23293
23278
|
brokenWf.workflowId = "workflow//workflows/noop.ts//brokenWf";
|
|
23294
23279
|
|
|
23280
|
+
// workflows/addition.ts
|
|
23281
|
+
var addition_exports = {};
|
|
23282
|
+
__export(addition_exports, {
|
|
23283
|
+
addition: () => addition
|
|
23284
|
+
});
|
|
23285
|
+
async function add(num, num2) {
|
|
23286
|
+
return globalThis[Symbol.for("WORKFLOW_USE_STEP")]("step//workflows/addition.ts//add")(num, num2);
|
|
23287
|
+
}
|
|
23288
|
+
__name(add, "add");
|
|
23289
|
+
async function addition(num, num2) {
|
|
23290
|
+
const result = await add(num, num2);
|
|
23291
|
+
console.log({
|
|
23292
|
+
result
|
|
23293
|
+
});
|
|
23294
|
+
return result;
|
|
23295
|
+
}
|
|
23296
|
+
__name(addition, "addition");
|
|
23297
|
+
addition.workflowId = "workflow//workflows/addition.ts//addition";
|
|
23298
|
+
|
|
23295
23299
|
// virtual-entry.js
|
|
23296
23300
|
globalThis.__private_workflows = /* @__PURE__ */ new Map();
|
|
23297
|
-
Object.values(addition_exports).map((item) => item?.workflowId && globalThis.__private_workflows.set(item.workflowId, item));
|
|
23298
23301
|
Object.values(noop_exports).map((item) => item?.workflowId && globalThis.__private_workflows.set(item.workflowId, item));
|
|
23302
|
+
Object.values(addition_exports).map((item) => item?.workflowId && globalThis.__private_workflows.set(item.workflowId, item));
|
|
23299
23303
|
`;
|
|
23300
23304
|
var POST = workflowEntrypoint(workflowCode);
|
|
23301
23305
|
// Annotate the CommonJS export names for ESM import in node:
|