brass-runtime 1.13.8 → 1.15.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +6 -3
- package/dist/agent/cli/main.cjs +44 -43
- package/dist/agent/cli/main.js +5 -4
- package/dist/agent/cli/main.mjs +5 -4
- package/dist/agent/index.cjs +4 -3
- package/dist/agent/index.d.ts +1 -1
- package/dist/agent/index.js +3 -2
- package/dist/agent/index.mjs +3 -2
- package/dist/{chunk-3R7ZYRK2.mjs → chunk-3QMOKAS5.js} +9 -7
- package/dist/{chunk-ATHSSDUF.js → chunk-4NHES7VK.mjs} +113 -31
- package/dist/chunk-AR22SXML.js +1043 -0
- package/dist/chunk-BDF4AMWX.mjs +3773 -0
- package/dist/chunk-BDYEENHT.js +224 -0
- package/dist/chunk-BMH5AV44.js +3773 -0
- package/dist/chunk-ELOOF35R.mjs +131 -0
- package/dist/chunk-JFPU5GQI.mjs +1043 -0
- package/dist/{chunk-INZBKOHY.js → chunk-K6M7MDZ4.mjs} +9 -7
- package/dist/chunk-MS34J5LY.cjs +224 -0
- package/dist/{chunk-XNOTJSMZ.mjs → chunk-PPUXIH5R.js} +113 -31
- package/dist/chunk-R3R2FVLG.cjs +131 -0
- package/dist/{chunk-ZTDK2DLG.cjs → chunk-STVLQ3XD.cjs} +169 -87
- package/dist/chunk-TGIFUAK4.cjs +3773 -0
- package/dist/chunk-TO7IKXYT.js +131 -0
- package/dist/chunk-UMAZLXAB.mjs +224 -0
- package/dist/{chunk-XDINDYNA.cjs → chunk-VEZNF5GZ.cjs} +136 -134
- package/dist/chunk-XPZNXSVN.cjs +1043 -0
- package/dist/core/index.cjs +216 -0
- package/dist/core/index.d.ts +673 -0
- package/dist/core/index.js +216 -0
- package/dist/core/index.mjs +216 -0
- package/dist/{effect-ISvXPLgc.d.ts → effect-CMOQKX8y.d.ts} +202 -31
- package/dist/http/index.cjs +3177 -187
- package/dist/http/index.d.ts +1692 -9
- package/dist/http/index.js +3164 -174
- package/dist/http/index.mjs +3164 -174
- package/dist/index.cjs +936 -219
- package/dist/index.d.ts +313 -36
- package/dist/index.js +830 -113
- package/dist/index.mjs +830 -113
- package/dist/{stream-BvukHxCv.d.ts → stream-FQm9h4Mg.d.ts} +12 -4
- package/dist/tracing-DNT9jEbr.d.ts +106 -0
- package/package.json +11 -3
- package/wasm/pkg/brass_runtime_wasm_engine.d.ts +95 -16
- package/wasm/pkg/brass_runtime_wasm_engine.js +715 -15
- package/wasm/pkg/brass_runtime_wasm_engine_bg.wasm +0 -0
- package/wasm/pkg/brass_runtime_wasm_engine_bg.wasm.d.ts +78 -7
- package/dist/chunk-2P4PD6D7.cjs +0 -2557
- package/dist/chunk-7F2R7A2V.mjs +0 -2557
- package/dist/chunk-L6KKKM66.js +0 -2557
package/dist/http/index.cjs
CHANGED
|
@@ -1,16 +1,25 @@
|
|
|
1
|
-
"use strict";Object.defineProperty(exports, "__esModule", {value: true}); function _nullishCoalesce(lhs, rhsFn) { if (lhs != null) { return lhs; } else { return rhsFn(); } } function _optionalChain(ops) { let lastAccessLHS = undefined; let value = ops[0]; let i = 1; while (i < ops.length) { const op = ops[i]; const fn = ops[i + 1]; i += 2; if ((op === 'optionalAccess' || op === 'optionalCall') && value == null) { return undefined; } if (op === 'access' || op === 'optionalAccess') { lastAccessLHS = value; value = fn(value); } else if (op === 'call' || op === 'optionalCall') { value = fn((...args) => value.call(lastAccessLHS, ...args)); lastAccessLHS = undefined; } } return value; }
|
|
1
|
+
"use strict";Object.defineProperty(exports, "__esModule", {value: true}); function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } function _nullishCoalesce(lhs, rhsFn) { if (lhs != null) { return lhs; } else { return rhsFn(); } } function _optionalChain(ops) { let lastAccessLHS = undefined; let value = ops[0]; let i = 1; while (i < ops.length) { const op = ops[i]; const fn = ops[i + 1]; i += 2; if ((op === 'optionalAccess' || op === 'optionalCall') && value == null) { return undefined; } if (op === 'access' || op === 'optionalAccess') { lastAccessLHS = value; value = fn(value); } else if (op === 'call' || op === 'optionalCall') { value = fn((...args) => value.call(lastAccessLHS, ...args)); lastAccessLHS = undefined; } } return value; } var _class; var _class2; var _class3; var _class4; var _class5;
|
|
2
2
|
|
|
3
|
-
var
|
|
3
|
+
var _chunkSTVLQ3XDcjs = require('../chunk-STVLQ3XD.cjs');
|
|
4
4
|
|
|
5
5
|
|
|
6
6
|
|
|
7
|
+
var _chunkMS34J5LYcjs = require('../chunk-MS34J5LY.cjs');
|
|
8
|
+
require('../chunk-R3R2FVLG.cjs');
|
|
7
9
|
|
|
8
10
|
|
|
9
11
|
|
|
10
12
|
|
|
11
13
|
|
|
12
14
|
|
|
13
|
-
|
|
15
|
+
|
|
16
|
+
|
|
17
|
+
|
|
18
|
+
|
|
19
|
+
|
|
20
|
+
|
|
21
|
+
|
|
22
|
+
var _chunkTGIFUAK4cjs = require('../chunk-TGIFUAK4.cjs');
|
|
14
23
|
|
|
15
24
|
// src/http/optics/lens.ts
|
|
16
25
|
var Lens = {
|
|
@@ -39,36 +48,594 @@ var mergeHeaders = (extra) => (req) => Lens.over(Request.headers, (h) => ({ ...h
|
|
|
39
48
|
var mergeHeadersUnder = (under) => (req) => Lens.over(Request.headers, (h) => ({ ...under, ...h }))(req);
|
|
40
49
|
var setHeaderIfMissing = (k, v) => (req) => Lens.over(Request.headers, (h) => h[k] ? h : { ...h, [k]: v })(req);
|
|
41
50
|
|
|
42
|
-
// src/http/
|
|
43
|
-
var
|
|
44
|
-
|
|
45
|
-
|
|
46
|
-
|
|
47
|
-
return { _tag: "Abort" };
|
|
51
|
+
// src/http/retry/wasmRetryPlanner.ts
|
|
52
|
+
var WasmRetryPlannerBridge = class {
|
|
53
|
+
|
|
54
|
+
constructor(Ctor) {
|
|
55
|
+
this.planner = new Ctor();
|
|
48
56
|
}
|
|
49
|
-
|
|
57
|
+
start(options) {
|
|
58
|
+
return this.planner.start(
|
|
59
|
+
options.nowMs,
|
|
60
|
+
options.maxRetries,
|
|
61
|
+
options.baseDelayMs,
|
|
62
|
+
options.maxDelayMs,
|
|
63
|
+
_nullishCoalesce(options.maxElapsedMs, () => ( -1)),
|
|
64
|
+
BigInt(this.seed())
|
|
65
|
+
);
|
|
66
|
+
}
|
|
67
|
+
nextDelayMs(retryId, options) {
|
|
68
|
+
const delay = this.planner.next_delay_ms(retryId, options.nowMs, options.retryable, _nullishCoalesce(options.retryAfterMs, () => ( -1)));
|
|
69
|
+
return delay < 0 ? void 0 : delay;
|
|
70
|
+
}
|
|
71
|
+
drop(retryId) {
|
|
72
|
+
this.planner.drop_state(retryId);
|
|
73
|
+
}
|
|
74
|
+
stats() {
|
|
75
|
+
return {
|
|
76
|
+
live: this.planner.metric_u64(0),
|
|
77
|
+
planned: this.planner.metric_u64(1),
|
|
78
|
+
exhausted: this.planner.metric_u64(2),
|
|
79
|
+
dropped: this.planner.metric_u64(3)
|
|
80
|
+
};
|
|
81
|
+
}
|
|
82
|
+
seed() {
|
|
83
|
+
return Math.floor(Math.random() * Number.MAX_SAFE_INTEGER);
|
|
84
|
+
}
|
|
85
|
+
};
|
|
86
|
+
function makeWasmRetryPlanner() {
|
|
87
|
+
const mod = _chunkTGIFUAK4cjs.resolveWasmModule.call(void 0, );
|
|
88
|
+
const Ctor = _optionalChain([mod, 'optionalAccess', _ => _.BrassWasmRetryPlanner]);
|
|
89
|
+
if (!Ctor) throw new Error("brass-runtime wasm retry planner is not available. Run npm run build:wasm first.");
|
|
90
|
+
return new WasmRetryPlannerBridge(Ctor);
|
|
91
|
+
}
|
|
92
|
+
|
|
93
|
+
// src/http/retry/retry.ts
|
|
94
|
+
var defaultRetryableMethods = ["GET", "HEAD", "OPTIONS"];
|
|
95
|
+
var defaultRetryOnStatus = (s) => s === 408 || s === 429 || s === 500 || s === 502 || s === 503 || s === 504;
|
|
96
|
+
var defaultRetryOnError = (e) => e._tag === "FetchError" || e._tag === "Timeout" || e._tag === "PoolTimeout";
|
|
97
|
+
var clamp = (n, min, max) => Math.max(min, Math.min(max, n));
|
|
98
|
+
var backoffDelayMs = (attempt, base, cap) => {
|
|
99
|
+
const b = Math.max(0, base);
|
|
100
|
+
const c = Math.max(0, cap);
|
|
101
|
+
const exp = b * Math.pow(2, attempt);
|
|
102
|
+
const lim = clamp(exp, 0, c);
|
|
103
|
+
return Math.floor(Math.random() * lim);
|
|
104
|
+
};
|
|
105
|
+
var headerCI = (h, name) => {
|
|
106
|
+
const k = Object.keys(h).find((x) => x.toLowerCase() === name.toLowerCase());
|
|
107
|
+
return k ? h[k] : void 0;
|
|
108
|
+
};
|
|
109
|
+
var retryAfterMs = (headers) => {
|
|
110
|
+
const v = _optionalChain([headerCI, 'call', _2 => _2(headers, "retry-after"), 'optionalAccess', _3 => _3.trim, 'call', _4 => _4()]);
|
|
111
|
+
if (!v) return void 0;
|
|
112
|
+
const secs = Number(v);
|
|
113
|
+
if (Number.isFinite(secs)) return Math.max(0, Math.floor(secs * 1e3));
|
|
114
|
+
const t = Date.parse(v);
|
|
115
|
+
if (Number.isFinite(t)) return Math.max(0, t - Date.now());
|
|
116
|
+
return void 0;
|
|
117
|
+
};
|
|
118
|
+
var normalizeRetryBudget = (ms) => {
|
|
119
|
+
if (ms === void 0 || !Number.isFinite(ms)) return void 0;
|
|
120
|
+
return Math.max(0, Math.floor(ms));
|
|
121
|
+
};
|
|
122
|
+
var resolveEffectivePolicy = (req, basePolicy) => {
|
|
123
|
+
const override = req.retry;
|
|
124
|
+
if (override === false) return null;
|
|
125
|
+
if (override === void 0) return basePolicy;
|
|
126
|
+
return {
|
|
127
|
+
...basePolicy,
|
|
128
|
+
...override.maxRetries !== void 0 && { maxRetries: override.maxRetries },
|
|
129
|
+
...override.baseDelayMs !== void 0 && { baseDelayMs: override.baseDelayMs },
|
|
130
|
+
...override.maxDelayMs !== void 0 && { maxDelayMs: override.maxDelayMs },
|
|
131
|
+
...override.retryOnStatus !== void 0 && { retryOnStatus: override.retryOnStatus }
|
|
132
|
+
};
|
|
133
|
+
};
|
|
134
|
+
var resolveRetryEngine = (p) => {
|
|
135
|
+
if (p.engine !== void 0) {
|
|
136
|
+
if (p.engine === "ts" || p.engine === "wasm") return p.engine;
|
|
137
|
+
throw new Error(`brass-runtime retry engine must be 'ts' or 'wasm'; received '${String(p.engine)}'`);
|
|
138
|
+
}
|
|
139
|
+
if (p.wasm === true) return "wasm";
|
|
140
|
+
if (p.wasm === false) return "ts";
|
|
141
|
+
return "ts";
|
|
142
|
+
};
|
|
143
|
+
var withRetry = (p) => (next) => {
|
|
144
|
+
const retryOnMethods = _nullishCoalesce(p.retryOnMethods, () => ( defaultRetryableMethods));
|
|
145
|
+
const retryEngine = resolveRetryEngine(p);
|
|
146
|
+
const wasmPlanner = retryEngine === "wasm" ? makeWasmRetryPlanner() : void 0;
|
|
147
|
+
const isMethodRetryable = (req) => retryOnMethods.includes(req.method);
|
|
148
|
+
const nextDelay = (ep, epMaxElapsedMs, retryId, attempt, startedAt, retryable, retryAfter) => {
|
|
149
|
+
if (!retryable) return void 0;
|
|
150
|
+
if (wasmPlanner && retryId !== void 0) {
|
|
151
|
+
return wasmPlanner.nextDelayMs(retryId, {
|
|
152
|
+
nowMs: performance.now(),
|
|
153
|
+
retryable,
|
|
154
|
+
retryAfterMs: retryAfter
|
|
155
|
+
});
|
|
156
|
+
}
|
|
157
|
+
const remainingBudget = epMaxElapsedMs === void 0 ? Number.POSITIVE_INFINITY : epMaxElapsedMs - (performance.now() - startedAt);
|
|
158
|
+
if (remainingBudget <= 0) return void 0;
|
|
159
|
+
const rawDelay = retryAfter === void 0 ? backoffDelayMs(attempt, ep.baseDelayMs, ep.maxDelayMs) : Math.min(retryAfter, ep.maxDelayMs);
|
|
160
|
+
return Math.max(0, Math.min(rawDelay, remainingBudget));
|
|
161
|
+
};
|
|
162
|
+
const sleepWithCleanup = (ms, onCancel) => {
|
|
163
|
+
return _chunkTGIFUAK4cjs.asyncEffect.call(void 0, (_env, cb) => {
|
|
164
|
+
const delay = Math.max(0, Math.floor(ms));
|
|
165
|
+
const id = setTimeout(() => cb({ _tag: "Success", value: void 0 }), delay);
|
|
166
|
+
return () => {
|
|
167
|
+
clearTimeout(id);
|
|
168
|
+
onCancel();
|
|
169
|
+
};
|
|
170
|
+
});
|
|
171
|
+
};
|
|
172
|
+
const loop = (req, attempt, startedAt, retryId, ep, epMaxElapsedMs, epRetryOnStatus, epRetryOnError, originalPriority, safeDrop) => {
|
|
173
|
+
if (!isMethodRetryable(req)) return next(req);
|
|
174
|
+
const effectiveReq = attempt > 0 ? (() => {
|
|
175
|
+
const boostedReq = { ...req };
|
|
176
|
+
boostedReq.priority = Math.max(0, originalPriority - 1);
|
|
177
|
+
return boostedReq;
|
|
178
|
+
})() : req;
|
|
179
|
+
const remainingBudget = () => epMaxElapsedMs === void 0 ? Number.POSITIVE_INFINITY : epMaxElapsedMs - (performance.now() - startedAt);
|
|
180
|
+
return _chunkTGIFUAK4cjs.asyncFold.call(void 0,
|
|
181
|
+
next(effectiveReq),
|
|
182
|
+
(e) => {
|
|
183
|
+
if (e._tag === "Abort" || e._tag === "BadUrl" || e._tag === "PoolRejected" || e._tag === "CircuitBreakerOpen") {
|
|
184
|
+
safeDrop(retryId);
|
|
185
|
+
return _chunkTGIFUAK4cjs.asyncFail.call(void 0, e);
|
|
186
|
+
}
|
|
187
|
+
const retryable = attempt < ep.maxRetries && epRetryOnError(e) && remainingBudget() > 0;
|
|
188
|
+
const d = nextDelay(ep, epMaxElapsedMs, retryId, attempt, startedAt, retryable);
|
|
189
|
+
if (d === void 0 || d <= 0 && epMaxElapsedMs !== void 0) {
|
|
190
|
+
safeDrop(retryId);
|
|
191
|
+
return _chunkTGIFUAK4cjs.asyncFail.call(void 0, e);
|
|
192
|
+
}
|
|
193
|
+
if (ep.onRetry) {
|
|
194
|
+
ep.onRetry({
|
|
195
|
+
attempt,
|
|
196
|
+
delayMs: d,
|
|
197
|
+
error: e,
|
|
198
|
+
status: void 0,
|
|
199
|
+
url: req.url,
|
|
200
|
+
method: req.method,
|
|
201
|
+
timestamp: Date.now()
|
|
202
|
+
});
|
|
203
|
+
}
|
|
204
|
+
return _chunkTGIFUAK4cjs.asyncFlatMap.call(void 0, sleepWithCleanup(d, () => safeDrop(retryId)), () => loop(req, attempt + 1, startedAt, retryId, ep, epMaxElapsedMs, epRetryOnStatus, epRetryOnError, originalPriority, safeDrop));
|
|
205
|
+
},
|
|
206
|
+
(w) => {
|
|
207
|
+
const retryable = attempt < ep.maxRetries && epRetryOnStatus(w.status) && remainingBudget() > 0;
|
|
208
|
+
const ra = ep.respectRetryAfter === false ? void 0 : retryAfterMs(w.headers);
|
|
209
|
+
const d = nextDelay(ep, epMaxElapsedMs, retryId, attempt, startedAt, retryable, ra);
|
|
210
|
+
if (d === void 0 || d <= 0 && epMaxElapsedMs !== void 0) {
|
|
211
|
+
safeDrop(retryId);
|
|
212
|
+
return _chunkTGIFUAK4cjs.asyncSucceed.call(void 0, w);
|
|
213
|
+
}
|
|
214
|
+
if (ep.onRetry) {
|
|
215
|
+
ep.onRetry({
|
|
216
|
+
attempt,
|
|
217
|
+
delayMs: d,
|
|
218
|
+
error: void 0,
|
|
219
|
+
status: w.status,
|
|
220
|
+
url: req.url,
|
|
221
|
+
method: req.method,
|
|
222
|
+
timestamp: Date.now()
|
|
223
|
+
});
|
|
224
|
+
}
|
|
225
|
+
return _chunkTGIFUAK4cjs.asyncFlatMap.call(void 0, sleepWithCleanup(d, () => safeDrop(retryId)), () => loop(req, attempt + 1, startedAt, retryId, ep, epMaxElapsedMs, epRetryOnStatus, epRetryOnError, originalPriority, safeDrop));
|
|
226
|
+
}
|
|
227
|
+
);
|
|
228
|
+
};
|
|
229
|
+
return (req) => {
|
|
230
|
+
const effectivePolicy = resolveEffectivePolicy(req, p);
|
|
231
|
+
if (effectivePolicy === null) return next(req);
|
|
232
|
+
if (!isMethodRetryable(req)) return next(req);
|
|
233
|
+
const epRetryOnStatus = _nullishCoalesce(effectivePolicy.retryOnStatus, () => ( defaultRetryOnStatus));
|
|
234
|
+
const epRetryOnError = _nullishCoalesce(effectivePolicy.retryOnError, () => ( defaultRetryOnError));
|
|
235
|
+
const epMaxElapsedMs = normalizeRetryBudget(effectivePolicy.maxElapsedMs);
|
|
236
|
+
const originalPriority = _nullishCoalesce(req.priority, () => ( 5));
|
|
237
|
+
const startedAt = performance.now();
|
|
238
|
+
const retryId = _optionalChain([wasmPlanner, 'optionalAccess', _5 => _5.start, 'call', _6 => _6({
|
|
239
|
+
nowMs: startedAt,
|
|
240
|
+
maxRetries: effectivePolicy.maxRetries,
|
|
241
|
+
baseDelayMs: effectivePolicy.baseDelayMs,
|
|
242
|
+
maxDelayMs: effectivePolicy.maxDelayMs,
|
|
243
|
+
maxElapsedMs: epMaxElapsedMs
|
|
244
|
+
})]);
|
|
245
|
+
let plannerDropped = false;
|
|
246
|
+
const safeDrop = (id) => {
|
|
247
|
+
if (id !== void 0 && !plannerDropped) {
|
|
248
|
+
plannerDropped = true;
|
|
249
|
+
_optionalChain([wasmPlanner, 'optionalAccess', _7 => _7.drop, 'call', _8 => _8(id)]);
|
|
250
|
+
}
|
|
251
|
+
};
|
|
252
|
+
return loop(req, 0, startedAt, retryId, effectivePolicy, epMaxElapsedMs, epRetryOnStatus, epRetryOnError, originalPriority, safeDrop);
|
|
253
|
+
};
|
|
254
|
+
};
|
|
255
|
+
|
|
256
|
+
// src/http/wasmPermitPool.ts
|
|
257
|
+
var DECISION_RUN_NOW = 0;
|
|
258
|
+
var DECISION_QUEUED = 1;
|
|
259
|
+
var WasmHttpPermitPoolBridge = (_class = class {
|
|
260
|
+
|
|
261
|
+
__init() {this.keyCache = /* @__PURE__ */ new Map()}
|
|
262
|
+
constructor(Ctor, options) {;_class.prototype.__init.call(this);
|
|
263
|
+
this.pool = new Ctor(options.concurrency, options.maxQueue, toU64(options.queueTimeoutMs));
|
|
264
|
+
}
|
|
265
|
+
acquire(key, subjectId, nowMs = Date.now()) {
|
|
266
|
+
const keyId = this.internKey(key);
|
|
267
|
+
const decision = this.pool.acquire(subjectId, keyId, toU64(nowMs));
|
|
268
|
+
const permitId = this.pool.last_permit_id();
|
|
269
|
+
if (decision === DECISION_RUN_NOW) return { kind: "run", keyId, permitId };
|
|
270
|
+
if (decision === DECISION_QUEUED) return { kind: "queued", keyId, permitId };
|
|
271
|
+
return { kind: "rejected", keyId, permitId };
|
|
272
|
+
}
|
|
273
|
+
release(keyId, nowMs = Date.now()) {
|
|
274
|
+
const ptr = this.pool.release(keyId, toU64(nowMs));
|
|
275
|
+
return this.readEvents(ptr, this.pool.permit_events_len());
|
|
276
|
+
}
|
|
277
|
+
cancel(permitId) {
|
|
278
|
+
this.pool.cancel(permitId);
|
|
279
|
+
}
|
|
280
|
+
advanceTime(nowMs = Date.now()) {
|
|
281
|
+
const ptr = this.pool.advance_time(toU64(nowMs));
|
|
282
|
+
return this.readEvents(ptr, this.pool.permit_events_len());
|
|
283
|
+
}
|
|
284
|
+
nextDeadlineMs() {
|
|
285
|
+
return this.pool.next_deadline_ms();
|
|
286
|
+
}
|
|
287
|
+
stats() {
|
|
288
|
+
return {
|
|
289
|
+
running: this.pool.metric_u64(0),
|
|
290
|
+
queued: this.pool.metric_u64(1),
|
|
291
|
+
acquired: this.pool.metric_u64(2),
|
|
292
|
+
released: this.pool.metric_u64(3),
|
|
293
|
+
rejected: this.pool.metric_u64(4),
|
|
294
|
+
queueTimeouts: this.pool.metric_u64(5),
|
|
295
|
+
keys: this.pool.metric_u64(6)
|
|
296
|
+
};
|
|
297
|
+
}
|
|
298
|
+
internKey(key) {
|
|
299
|
+
const normalized = key.trim().slice(0, 160) || "global";
|
|
300
|
+
let id = this.keyCache.get(normalized);
|
|
301
|
+
if (id === void 0) {
|
|
302
|
+
id = this.pool.intern_key(normalized);
|
|
303
|
+
this.keyCache.set(normalized, id);
|
|
304
|
+
}
|
|
305
|
+
return id;
|
|
306
|
+
}
|
|
307
|
+
readEvents(ptr, len) {
|
|
308
|
+
if (ptr === 0 || len <= 1) return [];
|
|
309
|
+
const words = new Uint32Array(this.pool.memory().buffer, ptr, len);
|
|
310
|
+
const count = words[0] >>> 0;
|
|
311
|
+
const out = [];
|
|
312
|
+
for (let i = 0; i < count; i++) {
|
|
313
|
+
const base = 1 + i * 3;
|
|
314
|
+
if (base + 2 >= words.length) break;
|
|
315
|
+
out.push({
|
|
316
|
+
subjectId: words[base] >>> 0,
|
|
317
|
+
permitId: words[base + 1] >>> 0,
|
|
318
|
+
keyId: words[base + 2] >>> 0
|
|
319
|
+
});
|
|
320
|
+
}
|
|
321
|
+
return out;
|
|
322
|
+
}
|
|
323
|
+
}, _class);
|
|
324
|
+
function makeWasmHttpPermitPool(options) {
|
|
325
|
+
const mod = _chunkTGIFUAK4cjs.resolveWasmModule.call(void 0, );
|
|
326
|
+
const Ctor = _optionalChain([mod, 'optionalAccess', _9 => _9.BrassWasmHttpPermitPool]);
|
|
327
|
+
if (!Ctor) throw new Error("brass-runtime wasm HTTP permit pool is not available. Run npm run build:wasm first.");
|
|
328
|
+
return new WasmHttpPermitPoolBridge(Ctor, options);
|
|
329
|
+
}
|
|
330
|
+
function toU64(value) {
|
|
331
|
+
return BigInt(Math.max(0, Math.floor(value)));
|
|
332
|
+
}
|
|
333
|
+
|
|
334
|
+
// src/http/pool.ts
|
|
335
|
+
var DEFAULT_CONCURRENCY = 64;
|
|
336
|
+
var DEFAULT_MAX_QUEUE = 256;
|
|
337
|
+
var clampInt = (n, fallback, min) => {
|
|
338
|
+
if (n === void 0 || !Number.isFinite(n)) return fallback;
|
|
339
|
+
return Math.max(min, Math.floor(n));
|
|
50
340
|
};
|
|
51
|
-
var
|
|
52
|
-
|
|
53
|
-
|
|
54
|
-
|
|
55
|
-
|
|
56
|
-
|
|
57
|
-
|
|
341
|
+
var queueTimeoutError = (key, timeoutMs) => ({
|
|
342
|
+
_tag: "PoolTimeout",
|
|
343
|
+
key,
|
|
344
|
+
timeoutMs,
|
|
345
|
+
message: `HTTP pool '${key}' did not grant a slot within ${timeoutMs}ms`
|
|
346
|
+
});
|
|
347
|
+
var poolRejectedError = (key, maxQueue) => ({
|
|
348
|
+
_tag: "PoolRejected",
|
|
349
|
+
key,
|
|
350
|
+
limit: maxQueue,
|
|
351
|
+
message: `HTTP pool '${key}' queue is full`
|
|
352
|
+
});
|
|
353
|
+
var abortError = () => ({ _tag: "Abort" });
|
|
354
|
+
function resolveHttpPoolEngine(config) {
|
|
355
|
+
if (config.engine !== void 0) {
|
|
356
|
+
if (config.engine === "ts" || config.engine === "wasm") return config.engine;
|
|
357
|
+
throw new Error(`brass-runtime HTTP pool engine must be 'ts' or 'wasm'; received '${String(config.engine)}'`);
|
|
358
|
+
}
|
|
359
|
+
if (config.wasm === true) return "wasm";
|
|
360
|
+
if (config.wasm === false) return "ts";
|
|
361
|
+
return "ts";
|
|
362
|
+
}
|
|
363
|
+
function resolveHttpPoolKey(resolver, req, url) {
|
|
364
|
+
const custom = _optionalChain([req, 'access', _10 => _10.poolKey, 'optionalAccess', _11 => _11.trim, 'call', _12 => _12()]);
|
|
365
|
+
if (custom) return custom.slice(0, 160);
|
|
366
|
+
const r = _nullishCoalesce(resolver, () => ( "origin"));
|
|
367
|
+
if (typeof r === "function") return r(req, url).trim().slice(0, 160) || "global";
|
|
368
|
+
if (r === "global") return "global";
|
|
369
|
+
if (r === "host") return url.host;
|
|
370
|
+
return url.origin;
|
|
371
|
+
}
|
|
372
|
+
var HttpConcurrencyPool = (_class2 = class {
|
|
373
|
+
__init2() {this.states = /* @__PURE__ */ new Map()}
|
|
374
|
+
|
|
375
|
+
|
|
376
|
+
|
|
377
|
+
|
|
378
|
+
|
|
379
|
+
__init3() {this.wasmWaiters = /* @__PURE__ */ new Map()}
|
|
380
|
+
|
|
381
|
+
__init4() {this.nextSubjectId = 1}
|
|
382
|
+
constructor(config = {}) {;_class2.prototype.__init2.call(this);_class2.prototype.__init3.call(this);_class2.prototype.__init4.call(this);
|
|
383
|
+
this.concurrency = clampInt(config.concurrency, DEFAULT_CONCURRENCY, 1);
|
|
384
|
+
this.maxQueue = clampInt(config.maxQueue, DEFAULT_MAX_QUEUE, 0);
|
|
385
|
+
this.queueTimeoutMs = config.queueTimeoutMs !== void 0 && Number.isFinite(config.queueTimeoutMs) ? Math.max(0, Math.floor(config.queueTimeoutMs)) : void 0;
|
|
386
|
+
this.keyResolver = config.key;
|
|
387
|
+
const engine = resolveHttpPoolEngine(config);
|
|
388
|
+
this.wasm = engine === "wasm" ? makeWasmHttpPermitPool({
|
|
389
|
+
concurrency: this.concurrency,
|
|
390
|
+
maxQueue: this.maxQueue,
|
|
391
|
+
queueTimeoutMs: _nullishCoalesce(this.queueTimeoutMs, () => ( 0))
|
|
392
|
+
}) : void 0;
|
|
393
|
+
}
|
|
394
|
+
acquire(key, signal) {
|
|
395
|
+
return this.wasm ? this.acquireWasm(key, signal) : this.acquireJs(key, signal);
|
|
396
|
+
}
|
|
397
|
+
stats() {
|
|
398
|
+
const keys = Array.from(this.states.values()).map((state) => ({
|
|
399
|
+
key: state.key,
|
|
400
|
+
running: state.running,
|
|
401
|
+
queued: state.queue.length,
|
|
402
|
+
concurrency: this.concurrency,
|
|
403
|
+
maxQueue: this.maxQueue,
|
|
404
|
+
acquired: state.acquired,
|
|
405
|
+
released: state.released,
|
|
406
|
+
rejected: state.rejected,
|
|
407
|
+
queueTimeouts: state.queueTimeouts,
|
|
408
|
+
abortedWhileQueued: state.abortedWhileQueued
|
|
409
|
+
})).sort((a, b) => b.running + b.queued - (a.running + a.queued) || a.key.localeCompare(b.key));
|
|
410
|
+
return keys.reduce((acc, key) => ({
|
|
411
|
+
running: acc.running + key.running,
|
|
412
|
+
queued: acc.queued + key.queued,
|
|
413
|
+
acquired: acc.acquired + key.acquired,
|
|
414
|
+
released: acc.released + key.released,
|
|
415
|
+
rejected: acc.rejected + key.rejected,
|
|
416
|
+
queueTimeouts: acc.queueTimeouts + key.queueTimeouts,
|
|
417
|
+
abortedWhileQueued: acc.abortedWhileQueued + key.abortedWhileQueued,
|
|
418
|
+
wasm: _optionalChain([this, 'access', _13 => _13.wasm, 'optionalAccess', _14 => _14.stats, 'call', _15 => _15()]),
|
|
419
|
+
keys: acc.keys.concat(key)
|
|
420
|
+
}), {
|
|
421
|
+
running: 0,
|
|
422
|
+
queued: 0,
|
|
423
|
+
acquired: 0,
|
|
424
|
+
released: 0,
|
|
425
|
+
rejected: 0,
|
|
426
|
+
queueTimeouts: 0,
|
|
427
|
+
abortedWhileQueued: 0,
|
|
428
|
+
...this.wasm ? { wasm: this.wasm.stats() } : {},
|
|
429
|
+
keys: []
|
|
430
|
+
});
|
|
431
|
+
}
|
|
432
|
+
acquireJs(key, signal) {
|
|
433
|
+
const state = this.getState(key);
|
|
434
|
+
if (signal.aborted) return Promise.reject(abortError());
|
|
435
|
+
if (state.running < this.concurrency) {
|
|
436
|
+
state.running++;
|
|
437
|
+
state.acquired++;
|
|
438
|
+
return Promise.resolve(this.makeLease(state));
|
|
439
|
+
}
|
|
440
|
+
if (state.queue.length >= this.maxQueue) {
|
|
441
|
+
state.rejected++;
|
|
442
|
+
return Promise.reject(poolRejectedError(key, this.maxQueue));
|
|
443
|
+
}
|
|
444
|
+
return new Promise((resolve, reject) => {
|
|
445
|
+
const waiter = { signal, resolve, reject };
|
|
446
|
+
const removeWaiter = () => this.removeWaiter(state, waiter);
|
|
447
|
+
const cleanup = () => this.cleanupWaiter(waiter);
|
|
448
|
+
waiter.abort = () => {
|
|
449
|
+
cleanup();
|
|
450
|
+
removeWaiter();
|
|
451
|
+
state.abortedWhileQueued++;
|
|
452
|
+
reject(abortError());
|
|
453
|
+
};
|
|
454
|
+
signal.addEventListener("abort", waiter.abort, { once: true });
|
|
455
|
+
if (this.queueTimeoutMs !== void 0 && this.queueTimeoutMs > 0) {
|
|
456
|
+
waiter.timer = setTimeout(() => {
|
|
457
|
+
cleanup();
|
|
458
|
+
removeWaiter();
|
|
459
|
+
state.queueTimeouts++;
|
|
460
|
+
reject(queueTimeoutError(key, this.queueTimeoutMs));
|
|
461
|
+
}, this.queueTimeoutMs);
|
|
462
|
+
}
|
|
463
|
+
state.queue.push(waiter);
|
|
464
|
+
});
|
|
465
|
+
}
|
|
466
|
+
acquireWasm(key, signal) {
|
|
467
|
+
const wasm = this.wasm;
|
|
468
|
+
const state = this.getState(key);
|
|
469
|
+
if (signal.aborted) return Promise.reject(abortError());
|
|
470
|
+
const subjectId = this.allocateSubjectId();
|
|
471
|
+
const decision = wasm.acquire(key, subjectId);
|
|
472
|
+
if (decision.kind === "run") {
|
|
473
|
+
state.running++;
|
|
474
|
+
state.acquired++;
|
|
475
|
+
return Promise.resolve(this.makeLease(state, decision.keyId));
|
|
476
|
+
}
|
|
477
|
+
if (decision.kind === "rejected") {
|
|
478
|
+
state.rejected++;
|
|
479
|
+
return Promise.reject(poolRejectedError(key, this.maxQueue));
|
|
480
|
+
}
|
|
481
|
+
return new Promise((resolve, reject) => {
|
|
482
|
+
const waiter = { signal, resolve, reject };
|
|
483
|
+
const removeWaiter = () => this.removeWaiter(state, waiter);
|
|
484
|
+
const cleanup = () => this.cleanupWaiter(waiter);
|
|
485
|
+
waiter.abort = () => {
|
|
486
|
+
cleanup();
|
|
487
|
+
removeWaiter();
|
|
488
|
+
wasm.cancel(decision.permitId);
|
|
489
|
+
this.wasmWaiters.delete(decision.permitId);
|
|
490
|
+
state.abortedWhileQueued++;
|
|
491
|
+
reject(abortError());
|
|
492
|
+
};
|
|
493
|
+
signal.addEventListener("abort", waiter.abort, { once: true });
|
|
494
|
+
state.queue.push(waiter);
|
|
495
|
+
this.wasmWaiters.set(decision.permitId, { waiter, state, keyId: decision.keyId });
|
|
496
|
+
this.scheduleWasmTimeoutPump();
|
|
497
|
+
});
|
|
498
|
+
}
|
|
499
|
+
getState(key) {
|
|
500
|
+
const k = key.trim().slice(0, 160) || "global";
|
|
501
|
+
const existing = this.states.get(k);
|
|
502
|
+
if (existing) return existing;
|
|
503
|
+
const created = {
|
|
504
|
+
key: k,
|
|
505
|
+
running: 0,
|
|
506
|
+
queue: [],
|
|
507
|
+
acquired: 0,
|
|
508
|
+
released: 0,
|
|
509
|
+
rejected: 0,
|
|
510
|
+
queueTimeouts: 0,
|
|
511
|
+
abortedWhileQueued: 0
|
|
512
|
+
};
|
|
513
|
+
this.states.set(k, created);
|
|
514
|
+
return created;
|
|
515
|
+
}
|
|
516
|
+
makeLease(state, wasmKeyId) {
|
|
517
|
+
let released = false;
|
|
518
|
+
return {
|
|
519
|
+
key: state.key,
|
|
520
|
+
release: () => {
|
|
521
|
+
if (released) return;
|
|
522
|
+
released = true;
|
|
523
|
+
if (state.running > 0) state.running--;
|
|
524
|
+
state.released++;
|
|
525
|
+
if (this.wasm && wasmKeyId !== void 0) {
|
|
526
|
+
this.handleWasmGrants(this.wasm.release(wasmKeyId));
|
|
527
|
+
this.scheduleWasmTimeoutPump();
|
|
528
|
+
return;
|
|
529
|
+
}
|
|
530
|
+
this.drain(state);
|
|
531
|
+
}
|
|
58
532
|
};
|
|
59
|
-
|
|
60
|
-
|
|
61
|
-
|
|
62
|
-
);
|
|
533
|
+
}
|
|
534
|
+
drain(state) {
|
|
535
|
+
while (state.running < this.concurrency && state.queue.length > 0) {
|
|
536
|
+
const waiter = state.queue.shift();
|
|
537
|
+
this.cleanupWaiter(waiter);
|
|
538
|
+
if (waiter.signal.aborted) {
|
|
539
|
+
state.abortedWhileQueued++;
|
|
540
|
+
waiter.reject(abortError());
|
|
541
|
+
continue;
|
|
542
|
+
}
|
|
543
|
+
state.running++;
|
|
544
|
+
state.acquired++;
|
|
545
|
+
waiter.resolve(this.makeLease(state));
|
|
546
|
+
}
|
|
547
|
+
}
|
|
548
|
+
handleWasmGrants(events) {
|
|
549
|
+
for (const event of events) {
|
|
550
|
+
const pending = this.wasmWaiters.get(event.permitId);
|
|
551
|
+
if (!pending) continue;
|
|
552
|
+
this.wasmWaiters.delete(event.permitId);
|
|
553
|
+
this.cleanupWaiter(pending.waiter);
|
|
554
|
+
this.removeWaiter(pending.state, pending.waiter);
|
|
555
|
+
if (pending.waiter.signal.aborted) {
|
|
556
|
+
pending.state.abortedWhileQueued++;
|
|
557
|
+
pending.waiter.reject(abortError());
|
|
558
|
+
continue;
|
|
559
|
+
}
|
|
560
|
+
pending.state.running++;
|
|
561
|
+
pending.state.acquired++;
|
|
562
|
+
pending.waiter.resolve(this.makeLease(pending.state, event.keyId));
|
|
563
|
+
}
|
|
564
|
+
}
|
|
565
|
+
handleWasmTimeouts(events) {
|
|
566
|
+
for (const event of events) {
|
|
567
|
+
const pending = this.wasmWaiters.get(event.permitId);
|
|
568
|
+
if (!pending) continue;
|
|
569
|
+
this.wasmWaiters.delete(event.permitId);
|
|
570
|
+
this.cleanupWaiter(pending.waiter);
|
|
571
|
+
this.removeWaiter(pending.state, pending.waiter);
|
|
572
|
+
pending.state.queueTimeouts++;
|
|
573
|
+
pending.waiter.reject(queueTimeoutError(pending.state.key, _nullishCoalesce(this.queueTimeoutMs, () => ( 0))));
|
|
574
|
+
}
|
|
575
|
+
}
|
|
576
|
+
scheduleWasmTimeoutPump() {
|
|
577
|
+
if (!this.wasm) return;
|
|
578
|
+
if (this.wasmTimer !== void 0) clearTimeout(this.wasmTimer);
|
|
579
|
+
this.wasmTimer = void 0;
|
|
580
|
+
const next = this.wasm.nextDeadlineMs();
|
|
581
|
+
if (!Number.isFinite(next) || next < 0) return;
|
|
582
|
+
const delay = Math.max(0, Math.min(2 ** 31 - 1, Math.floor(next - Date.now())));
|
|
583
|
+
this.wasmTimer = setTimeout(() => {
|
|
584
|
+
this.wasmTimer = void 0;
|
|
585
|
+
if (!this.wasm) return;
|
|
586
|
+
this.handleWasmTimeouts(this.wasm.advanceTime());
|
|
587
|
+
this.scheduleWasmTimeoutPump();
|
|
588
|
+
}, delay);
|
|
589
|
+
if (typeof this.wasmTimer.unref === "function") this.wasmTimer.unref();
|
|
590
|
+
}
|
|
591
|
+
cleanupWaiter(waiter) {
|
|
592
|
+
if (waiter.timer !== void 0) {
|
|
593
|
+
clearTimeout(waiter.timer);
|
|
594
|
+
waiter.timer = void 0;
|
|
595
|
+
}
|
|
596
|
+
if (waiter.abort) {
|
|
597
|
+
waiter.signal.removeEventListener("abort", waiter.abort);
|
|
598
|
+
waiter.abort = void 0;
|
|
599
|
+
}
|
|
600
|
+
}
|
|
601
|
+
removeWaiter(state, waiter) {
|
|
602
|
+
const idx = state.queue.indexOf(waiter);
|
|
603
|
+
if (idx >= 0) state.queue.splice(idx, 1);
|
|
604
|
+
}
|
|
605
|
+
allocateSubjectId() {
|
|
606
|
+
const id = this.nextSubjectId >>> 0;
|
|
607
|
+
this.nextSubjectId = this.nextSubjectId + 1 >>> 0;
|
|
608
|
+
if (this.nextSubjectId === 0) this.nextSubjectId = 1;
|
|
609
|
+
return id === 0 ? this.allocateSubjectId() : id;
|
|
610
|
+
}
|
|
611
|
+
}, _class2);
|
|
63
612
|
|
|
64
613
|
// src/http/client.ts
|
|
65
|
-
var
|
|
66
|
-
|
|
67
|
-
|
|
614
|
+
var emptyStats = () => ({
|
|
615
|
+
inFlight: 0,
|
|
616
|
+
started: 0,
|
|
617
|
+
succeeded: 0,
|
|
618
|
+
failed: 0,
|
|
619
|
+
aborted: 0,
|
|
620
|
+
timedOut: 0,
|
|
621
|
+
poolRejected: 0,
|
|
622
|
+
poolTimeouts: 0
|
|
68
623
|
});
|
|
69
|
-
var
|
|
70
|
-
|
|
71
|
-
|
|
624
|
+
var decorate = (run, stats = emptyStats) => Object.assign(((req) => run(req)), {
|
|
625
|
+
with: (mw) => decorate(mw(run), stats),
|
|
626
|
+
stats
|
|
627
|
+
});
|
|
628
|
+
var withMiddleware = (mw) => (c) => decorate(mw(c), c.stats);
|
|
629
|
+
var decorateStream = (run, stats = emptyStats) => Object.assign(((req) => run(req)), { stats });
|
|
630
|
+
var isTaggedHttpError = (e) => {
|
|
631
|
+
if (typeof e !== "object" || e === null || !("_tag" in e)) return false;
|
|
632
|
+
const tag = e._tag;
|
|
633
|
+
return tag === "Abort" || tag === "BadUrl" || tag === "FetchError" || tag === "Timeout" || tag === "PoolRejected" || tag === "PoolTimeout";
|
|
634
|
+
};
|
|
635
|
+
var isAbortError = (e) => typeof e === "object" && e !== null && "name" in e && e.name === "AbortError";
|
|
636
|
+
var normalizeHttpError = (e) => {
|
|
637
|
+
if (isTaggedHttpError(e)) return e;
|
|
638
|
+
if (isAbortError(e)) return { _tag: "Abort" };
|
|
72
639
|
return { _tag: "FetchError", message: String(e) };
|
|
73
640
|
};
|
|
74
641
|
var normalizeHeadersInit = (h) => {
|
|
@@ -84,176 +651,286 @@ var normalizeHeadersInit = (h) => {
|
|
|
84
651
|
};
|
|
85
652
|
var normalizeRequest = (defaultHeaders) => (req0) => {
|
|
86
653
|
let req = Object.keys(defaultHeaders).length ? mergeHeadersUnder(defaultHeaders)(req0) : req0;
|
|
87
|
-
const initHeaders = normalizeHeadersInit(_optionalChain([req0, 'access',
|
|
654
|
+
const initHeaders = normalizeHeadersInit(_optionalChain([req0, 'access', _16 => _16.init, 'optionalAccess', _17 => _17.headers]));
|
|
88
655
|
if (initHeaders && Object.keys(initHeaders).length) {
|
|
89
656
|
req = mergeHeadersUnder(initHeaders)(req);
|
|
90
657
|
}
|
|
91
658
|
return req;
|
|
92
659
|
};
|
|
660
|
+
var resolvePositiveTimeout = (value) => {
|
|
661
|
+
if (value === void 0 || !Number.isFinite(value)) return void 0;
|
|
662
|
+
const n = Math.floor(value);
|
|
663
|
+
return n > 0 ? n : void 0;
|
|
664
|
+
};
|
|
665
|
+
var makeHttpStats = (pool) => {
|
|
666
|
+
const stats = {
|
|
667
|
+
inFlight: 0,
|
|
668
|
+
started: 0,
|
|
669
|
+
succeeded: 0,
|
|
670
|
+
failed: 0,
|
|
671
|
+
aborted: 0,
|
|
672
|
+
timedOut: 0,
|
|
673
|
+
poolRejected: 0,
|
|
674
|
+
poolTimeouts: 0
|
|
675
|
+
};
|
|
676
|
+
const onStart = () => {
|
|
677
|
+
stats.inFlight++;
|
|
678
|
+
stats.started++;
|
|
679
|
+
};
|
|
680
|
+
const onFinish = (finish) => {
|
|
681
|
+
if (stats.inFlight > 0) stats.inFlight--;
|
|
682
|
+
stats.lastDurationMs = finish.durationMs;
|
|
683
|
+
if (finish.outcome === "success") {
|
|
684
|
+
stats.succeeded++;
|
|
685
|
+
return;
|
|
686
|
+
}
|
|
687
|
+
if (finish.outcome === "interrupt") {
|
|
688
|
+
stats.aborted++;
|
|
689
|
+
return;
|
|
690
|
+
}
|
|
691
|
+
if (finish.outcome === "timeout") {
|
|
692
|
+
stats.timedOut++;
|
|
693
|
+
return;
|
|
694
|
+
}
|
|
695
|
+
const err = normalizeHttpError(finish.error);
|
|
696
|
+
switch (err._tag) {
|
|
697
|
+
case "Abort":
|
|
698
|
+
stats.aborted++;
|
|
699
|
+
return;
|
|
700
|
+
case "Timeout":
|
|
701
|
+
stats.timedOut++;
|
|
702
|
+
return;
|
|
703
|
+
case "PoolRejected":
|
|
704
|
+
stats.poolRejected++;
|
|
705
|
+
stats.failed++;
|
|
706
|
+
return;
|
|
707
|
+
case "PoolTimeout":
|
|
708
|
+
stats.poolTimeouts++;
|
|
709
|
+
stats.failed++;
|
|
710
|
+
return;
|
|
711
|
+
default:
|
|
712
|
+
stats.failed++;
|
|
713
|
+
return;
|
|
714
|
+
}
|
|
715
|
+
};
|
|
716
|
+
const snapshot = () => ({
|
|
717
|
+
...stats,
|
|
718
|
+
...pool ? { pool: pool.stats() } : {}
|
|
719
|
+
});
|
|
720
|
+
return { onStart, onFinish, snapshot };
|
|
721
|
+
};
|
|
722
|
+
var makePool = (cfg) => cfg.pool === void 0 || cfg.pool === false ? void 0 : new HttpConcurrencyPool(cfg.pool);
|
|
723
|
+
var resolveRequestUrl = (req, baseUrl) => {
|
|
724
|
+
try {
|
|
725
|
+
return new URL(req.url, baseUrl);
|
|
726
|
+
} catch (e2) {
|
|
727
|
+
return { _tag: "BadUrl", message: `URL inv\xE1lida: ${req.url}` };
|
|
728
|
+
}
|
|
729
|
+
};
|
|
730
|
+
var headersOf = (res) => {
|
|
731
|
+
const headers = {};
|
|
732
|
+
res.headers.forEach((v, k) => headers[k] = v);
|
|
733
|
+
return headers;
|
|
734
|
+
};
|
|
735
|
+
var fetchLabel = (req, url) => `http:${req.method}:${url.origin}`;
|
|
736
|
+
var timeoutReason = (req, url, timeoutMs) => ({
|
|
737
|
+
_tag: "Timeout",
|
|
738
|
+
timeoutMs,
|
|
739
|
+
phase: "request",
|
|
740
|
+
message: `HTTP ${req.method} ${url.origin} timed out after ${timeoutMs}ms`
|
|
741
|
+
});
|
|
742
|
+
var linkAbortSignals = (runtimeSignal, requestSignal) => {
|
|
743
|
+
if (!requestSignal) return { signal: runtimeSignal, cleanup: () => void 0 };
|
|
744
|
+
const controller = new AbortController();
|
|
745
|
+
const abort = (source) => {
|
|
746
|
+
try {
|
|
747
|
+
controller.abort(source.reason);
|
|
748
|
+
} catch (e3) {
|
|
749
|
+
controller.abort();
|
|
750
|
+
}
|
|
751
|
+
};
|
|
752
|
+
const abortFromRuntime = () => abort(runtimeSignal);
|
|
753
|
+
const abortFromRequest = () => abort(requestSignal);
|
|
754
|
+
if (runtimeSignal.aborted) abortFromRuntime();
|
|
755
|
+
else runtimeSignal.addEventListener("abort", abortFromRuntime, { once: true });
|
|
756
|
+
if (requestSignal.aborted) abortFromRequest();
|
|
757
|
+
else requestSignal.addEventListener("abort", abortFromRequest, { once: true });
|
|
758
|
+
return {
|
|
759
|
+
signal: controller.signal,
|
|
760
|
+
cleanup: () => {
|
|
761
|
+
runtimeSignal.removeEventListener("abort", abortFromRuntime);
|
|
762
|
+
requestSignal.removeEventListener("abort", abortFromRequest);
|
|
763
|
+
}
|
|
764
|
+
};
|
|
765
|
+
};
|
|
93
766
|
function makeHttpStream(cfg = {}) {
|
|
94
767
|
const baseUrl = _nullishCoalesce(cfg.baseUrl, () => ( ""));
|
|
95
768
|
const defaultHeaders = _nullishCoalesce(cfg.headers, () => ( {}));
|
|
96
769
|
const normalize = normalizeRequest(defaultHeaders);
|
|
97
|
-
|
|
98
|
-
|
|
99
|
-
|
|
100
|
-
|
|
101
|
-
|
|
102
|
-
|
|
103
|
-
|
|
104
|
-
|
|
105
|
-
|
|
106
|
-
|
|
107
|
-
|
|
108
|
-
|
|
109
|
-
|
|
110
|
-
|
|
111
|
-
|
|
112
|
-
|
|
113
|
-
|
|
114
|
-
|
|
115
|
-
|
|
116
|
-
|
|
117
|
-
|
|
118
|
-
|
|
119
|
-
|
|
120
|
-
|
|
121
|
-
|
|
122
|
-
|
|
123
|
-
|
|
124
|
-
|
|
125
|
-
|
|
126
|
-
|
|
127
|
-
|
|
770
|
+
const pool = makePool(cfg);
|
|
771
|
+
const metrics = makeHttpStats(pool);
|
|
772
|
+
const run = (req0) => {
|
|
773
|
+
const req = normalize(req0);
|
|
774
|
+
const url = resolveRequestUrl(req, baseUrl);
|
|
775
|
+
if (!(url instanceof URL)) return _chunkTGIFUAK4cjs.asyncFail.call(void 0, url);
|
|
776
|
+
const timeoutMs = resolvePositiveTimeout(_nullishCoalesce(req.timeoutMs, () => ( cfg.timeoutMs)));
|
|
777
|
+
return _chunkTGIFUAK4cjs.fromPromiseAbortable.call(void 0,
|
|
778
|
+
async (signal) => {
|
|
779
|
+
let lease;
|
|
780
|
+
const linkedSignal = linkAbortSignals(signal, _optionalChain([req, 'access', _18 => _18.init, 'optionalAccess', _19 => _19.signal]));
|
|
781
|
+
let cleanupTransferredToBody = false;
|
|
782
|
+
try {
|
|
783
|
+
if (pool) {
|
|
784
|
+
const key = resolveHttpPoolKey(pool.keyResolver, req, url);
|
|
785
|
+
lease = await pool.acquire(key, linkedSignal.signal);
|
|
786
|
+
}
|
|
787
|
+
const started = performance.now();
|
|
788
|
+
const res = await fetch(url, {
|
|
789
|
+
..._nullishCoalesce(req.init, () => ( {})),
|
|
790
|
+
method: req.method,
|
|
791
|
+
headers: Request.headers.get(req),
|
|
792
|
+
body: req.body,
|
|
793
|
+
signal: linkedSignal.signal
|
|
794
|
+
});
|
|
795
|
+
const headers = headersOf(res);
|
|
796
|
+
const body = _chunkSTVLQ3XDcjs.streamFromReadableStream.call(void 0, res.body, normalizeHttpError, {
|
|
797
|
+
signal: linkedSignal.signal,
|
|
798
|
+
onRelease: linkedSignal.cleanup
|
|
799
|
+
});
|
|
800
|
+
cleanupTransferredToBody = res.body !== null;
|
|
801
|
+
_optionalChain([lease, 'optionalAccess', _20 => _20.release, 'call', _21 => _21()]);
|
|
802
|
+
lease = void 0;
|
|
803
|
+
return {
|
|
804
|
+
status: res.status,
|
|
805
|
+
statusText: res.statusText,
|
|
806
|
+
headers,
|
|
807
|
+
body,
|
|
808
|
+
ms: Math.round(performance.now() - started)
|
|
809
|
+
};
|
|
810
|
+
} finally {
|
|
811
|
+
if (!cleanupTransferredToBody) {
|
|
812
|
+
linkedSignal.cleanup();
|
|
813
|
+
}
|
|
814
|
+
_optionalChain([lease, 'optionalAccess', _22 => _22.release, 'call', _23 => _23()]);
|
|
815
|
+
}
|
|
816
|
+
},
|
|
817
|
+
normalizeHttpError,
|
|
818
|
+
{
|
|
819
|
+
label: fetchLabel(req, url),
|
|
820
|
+
timeoutMs,
|
|
821
|
+
timeoutReason: timeoutMs ? () => timeoutReason(req, url, timeoutMs) : void 0,
|
|
822
|
+
onStart: metrics.onStart,
|
|
823
|
+
onFinish: metrics.onFinish
|
|
824
|
+
}
|
|
825
|
+
);
|
|
826
|
+
};
|
|
827
|
+
return decorateStream(run, metrics.snapshot);
|
|
128
828
|
}
|
|
129
829
|
function makeHttp(cfg = {}) {
|
|
130
830
|
const baseUrl = _nullishCoalesce(cfg.baseUrl, () => ( ""));
|
|
131
831
|
const defaultHeaders = _nullishCoalesce(cfg.headers, () => ( {}));
|
|
132
832
|
const normalize = normalizeRequest(defaultHeaders);
|
|
133
|
-
const
|
|
134
|
-
|
|
135
|
-
|
|
136
|
-
|
|
137
|
-
|
|
138
|
-
|
|
139
|
-
|
|
140
|
-
|
|
141
|
-
|
|
142
|
-
|
|
143
|
-
|
|
144
|
-
|
|
145
|
-
|
|
146
|
-
|
|
147
|
-
|
|
148
|
-
|
|
149
|
-
|
|
150
|
-
|
|
151
|
-
|
|
152
|
-
|
|
153
|
-
|
|
154
|
-
|
|
155
|
-
|
|
156
|
-
|
|
157
|
-
|
|
158
|
-
|
|
159
|
-
|
|
160
|
-
|
|
161
|
-
|
|
162
|
-
|
|
163
|
-
|
|
164
|
-
|
|
833
|
+
const pool = makePool(cfg);
|
|
834
|
+
const metrics = makeHttpStats(pool);
|
|
835
|
+
const run = (req0) => {
|
|
836
|
+
const req = normalize(req0);
|
|
837
|
+
const url = resolveRequestUrl(req, baseUrl);
|
|
838
|
+
if (!(url instanceof URL)) return _chunkTGIFUAK4cjs.asyncFail.call(void 0, url);
|
|
839
|
+
const timeoutMs = resolvePositiveTimeout(_nullishCoalesce(req.timeoutMs, () => ( cfg.timeoutMs)));
|
|
840
|
+
return _chunkTGIFUAK4cjs.fromPromiseAbortable.call(void 0,
|
|
841
|
+
async (signal) => {
|
|
842
|
+
let lease;
|
|
843
|
+
const linkedSignal = linkAbortSignals(signal, _optionalChain([req, 'access', _24 => _24.init, 'optionalAccess', _25 => _25.signal]));
|
|
844
|
+
try {
|
|
845
|
+
if (pool) {
|
|
846
|
+
const key = resolveHttpPoolKey(pool.keyResolver, req, url);
|
|
847
|
+
lease = await pool.acquire(key, linkedSignal.signal);
|
|
848
|
+
}
|
|
849
|
+
const started = performance.now();
|
|
850
|
+
const res = await fetch(url, {
|
|
851
|
+
..._nullishCoalesce(req.init, () => ( {})),
|
|
852
|
+
method: req.method,
|
|
853
|
+
headers: Request.headers.get(req),
|
|
854
|
+
body: req.body,
|
|
855
|
+
signal: linkedSignal.signal
|
|
856
|
+
});
|
|
857
|
+
const bodyText = await res.text();
|
|
858
|
+
const headers = headersOf(res);
|
|
859
|
+
return {
|
|
860
|
+
status: res.status,
|
|
861
|
+
statusText: res.statusText,
|
|
862
|
+
headers,
|
|
863
|
+
bodyText,
|
|
864
|
+
ms: Math.round(performance.now() - started)
|
|
865
|
+
};
|
|
866
|
+
} finally {
|
|
867
|
+
linkedSignal.cleanup();
|
|
868
|
+
_optionalChain([lease, 'optionalAccess', _26 => _26.release, 'call', _27 => _27()]);
|
|
869
|
+
}
|
|
870
|
+
},
|
|
871
|
+
normalizeHttpError,
|
|
872
|
+
{
|
|
873
|
+
label: fetchLabel(req, url),
|
|
874
|
+
timeoutMs,
|
|
875
|
+
timeoutReason: timeoutMs ? () => timeoutReason(req, url, timeoutMs) : void 0,
|
|
876
|
+
onStart: metrics.onStart,
|
|
877
|
+
onFinish: metrics.onFinish
|
|
878
|
+
}
|
|
879
|
+
);
|
|
880
|
+
};
|
|
881
|
+
return decorate(run, metrics.snapshot);
|
|
165
882
|
}
|
|
166
|
-
var
|
|
167
|
-
|
|
168
|
-
|
|
169
|
-
var backoffDelayMs = (attempt, base, cap) => {
|
|
170
|
-
const exp = base * Math.pow(2, attempt);
|
|
171
|
-
const lim = clamp(exp, 0, cap);
|
|
172
|
-
return Math.floor(Math.random() * lim);
|
|
173
|
-
};
|
|
174
|
-
var retryAfterMs = (headers) => {
|
|
175
|
-
const key = Object.keys(headers).find((k) => k.toLowerCase() === "retry-after");
|
|
176
|
-
if (!key) return void 0;
|
|
177
|
-
const v = _optionalChain([headers, 'access', _3 => _3[key], 'optionalAccess', _4 => _4.trim, 'call', _5 => _5()]);
|
|
178
|
-
if (!v) return void 0;
|
|
179
|
-
const secs = Number(v);
|
|
180
|
-
if (Number.isFinite(secs)) return Math.max(0, Math.floor(secs * 1e3));
|
|
181
|
-
const t = Date.parse(v);
|
|
182
|
-
if (Number.isFinite(t)) return Math.max(0, t - Date.now());
|
|
183
|
-
return void 0;
|
|
184
|
-
};
|
|
185
|
-
var withRetryStream = (p) => (next) => ((req) => {
|
|
186
|
-
const loop = (attempt) => _chunk2P4PD6D7cjs.asyncFold.call(void 0,
|
|
187
|
-
next(req),
|
|
188
|
-
(e) => {
|
|
189
|
-
if (e._tag === "Abort" || e._tag === "BadUrl") return _chunk2P4PD6D7cjs.asyncFail.call(void 0, e);
|
|
190
|
-
const canRetry = attempt < p.maxRetries && (_nullishCoalesce(p.retryOnError, () => ( defaultRetryOnError)))(e);
|
|
191
|
-
if (!canRetry) return _chunk2P4PD6D7cjs.asyncFail.call(void 0, e);
|
|
192
|
-
const d = backoffDelayMs(attempt, p.baseDelayMs, p.maxDelayMs);
|
|
193
|
-
return _chunk2P4PD6D7cjs.asyncFlatMap.call(void 0, sleepMs(d), () => loop(attempt + 1));
|
|
194
|
-
},
|
|
195
|
-
(w) => {
|
|
196
|
-
const canRetry = attempt < p.maxRetries && (_nullishCoalesce(p.retryOnStatus, () => ( defaultRetryOnStatus)))(w.status);
|
|
197
|
-
if (!canRetry) return _chunk2P4PD6D7cjs.asyncSucceed.call(void 0, w);
|
|
198
|
-
const ra = retryAfterMs(w.headers);
|
|
199
|
-
const d = _nullishCoalesce(ra, () => ( backoffDelayMs(attempt, p.baseDelayMs, p.maxDelayMs)));
|
|
200
|
-
return _chunk2P4PD6D7cjs.asyncFlatMap.call(void 0, sleepMs(d), () => loop(attempt + 1));
|
|
201
|
-
}
|
|
202
|
-
);
|
|
203
|
-
return loop(0);
|
|
204
|
-
});
|
|
205
|
-
|
|
206
|
-
// src/http/retry/retry.ts
|
|
207
|
-
var defaultRetryableMethods = ["GET", "HEAD", "OPTIONS"];
|
|
208
|
-
var defaultRetryOnStatus2 = (s) => s === 408 || s === 429 || s === 500 || s === 502 || s === 503 || s === 504;
|
|
209
|
-
var defaultRetryOnError2 = (e) => e._tag === "FetchError";
|
|
210
|
-
var clamp2 = (n, min, max) => Math.max(min, Math.min(max, n));
|
|
211
|
-
var backoffDelayMs2 = (attempt, base, cap) => {
|
|
212
|
-
const b = Math.max(0, base);
|
|
213
|
-
const c = Math.max(0, cap);
|
|
214
|
-
const exp = b * Math.pow(2, attempt);
|
|
215
|
-
const lim = clamp2(exp, 0, c);
|
|
216
|
-
return Math.floor(Math.random() * lim);
|
|
217
|
-
};
|
|
218
|
-
var headerCI = (h, name) => {
|
|
219
|
-
const k = Object.keys(h).find((x) => x.toLowerCase() === name.toLowerCase());
|
|
220
|
-
return k ? h[k] : void 0;
|
|
221
|
-
};
|
|
222
|
-
var retryAfterMs2 = (headers) => {
|
|
223
|
-
const v = _optionalChain([headerCI, 'call', _6 => _6(headers, "retry-after"), 'optionalAccess', _7 => _7.trim, 'call', _8 => _8()]);
|
|
224
|
-
if (!v) return void 0;
|
|
225
|
-
const secs = Number(v);
|
|
226
|
-
if (Number.isFinite(secs)) return Math.max(0, Math.floor(secs * 1e3));
|
|
227
|
-
const t = Date.parse(v);
|
|
228
|
-
if (Number.isFinite(t)) return Math.max(0, t - Date.now());
|
|
229
|
-
return void 0;
|
|
230
|
-
};
|
|
231
|
-
var withRetry = (p) => (next) => {
|
|
883
|
+
var withRetryStream = (p) => (next) => {
|
|
884
|
+
const retryOnStatus = _nullishCoalesce(p.retryOnStatus, () => ( defaultRetryOnStatus));
|
|
885
|
+
const retryOnError = _nullishCoalesce(p.retryOnError, () => ( defaultRetryOnError));
|
|
232
886
|
const retryOnMethods = _nullishCoalesce(p.retryOnMethods, () => ( defaultRetryableMethods));
|
|
233
|
-
const
|
|
234
|
-
const
|
|
235
|
-
|
|
236
|
-
|
|
237
|
-
|
|
238
|
-
|
|
887
|
+
const maxElapsedMs = normalizeRetryBudget(p.maxElapsedMs);
|
|
888
|
+
const run = (req) => {
|
|
889
|
+
if (!retryOnMethods.includes(req.method)) return next(req);
|
|
890
|
+
const startedAt = performance.now();
|
|
891
|
+
const remainingBudget = () => maxElapsedMs === void 0 ? Number.POSITIVE_INFINITY : maxElapsedMs - (performance.now() - startedAt);
|
|
892
|
+
const delayWithinBudget = (delayMs) => Math.max(0, Math.min(delayMs, remainingBudget()));
|
|
893
|
+
const loop = (attempt) => _chunkTGIFUAK4cjs.asyncFold.call(void 0,
|
|
239
894
|
next(req),
|
|
240
895
|
(e) => {
|
|
241
|
-
if (e._tag === "Abort" || e._tag === "BadUrl") return
|
|
242
|
-
const canRetry = attempt < p.maxRetries && retryOnError(e);
|
|
243
|
-
if (!canRetry) return
|
|
244
|
-
const d =
|
|
245
|
-
|
|
896
|
+
if (e._tag === "Abort" || e._tag === "BadUrl" || e._tag === "PoolRejected") return _chunkTGIFUAK4cjs.asyncFail.call(void 0, e);
|
|
897
|
+
const canRetry = attempt < p.maxRetries && retryOnError(e) && remainingBudget() > 0;
|
|
898
|
+
if (!canRetry) return _chunkTGIFUAK4cjs.asyncFail.call(void 0, e);
|
|
899
|
+
const d = delayWithinBudget(backoffDelayMs(attempt, p.baseDelayMs, p.maxDelayMs));
|
|
900
|
+
if (d <= 0 && maxElapsedMs !== void 0) return _chunkTGIFUAK4cjs.asyncFail.call(void 0, e);
|
|
901
|
+
_optionalChain([p, 'access', _28 => _28.onRetry, 'optionalCall', _29 => _29({
|
|
902
|
+
attempt,
|
|
903
|
+
delayMs: d,
|
|
904
|
+
error: e,
|
|
905
|
+
status: void 0,
|
|
906
|
+
url: req.url,
|
|
907
|
+
method: req.method,
|
|
908
|
+
timestamp: Date.now()
|
|
909
|
+
})]);
|
|
910
|
+
return _chunkTGIFUAK4cjs.asyncFlatMap.call(void 0, _chunkMS34J5LYcjs.sleep.call(void 0, d), () => loop(attempt + 1));
|
|
246
911
|
},
|
|
247
912
|
(w) => {
|
|
248
|
-
const canRetry = attempt < p.maxRetries && retryOnStatus(w.status);
|
|
249
|
-
if (!canRetry) return
|
|
250
|
-
const ra =
|
|
251
|
-
const
|
|
252
|
-
|
|
913
|
+
const canRetry = attempt < p.maxRetries && retryOnStatus(w.status) && remainingBudget() > 0;
|
|
914
|
+
if (!canRetry) return _chunkTGIFUAK4cjs.asyncSucceed.call(void 0, w);
|
|
915
|
+
const ra = p.respectRetryAfter === false ? void 0 : retryAfterMs(w.headers);
|
|
916
|
+
const rawDelay = ra === void 0 ? backoffDelayMs(attempt, p.baseDelayMs, p.maxDelayMs) : Math.min(ra, p.maxDelayMs);
|
|
917
|
+
const d = delayWithinBudget(rawDelay);
|
|
918
|
+
if (d <= 0 && maxElapsedMs !== void 0) return _chunkTGIFUAK4cjs.asyncSucceed.call(void 0, w);
|
|
919
|
+
_optionalChain([p, 'access', _30 => _30.onRetry, 'optionalCall', _31 => _31({
|
|
920
|
+
attempt,
|
|
921
|
+
delayMs: d,
|
|
922
|
+
error: void 0,
|
|
923
|
+
status: w.status,
|
|
924
|
+
url: req.url,
|
|
925
|
+
method: req.method,
|
|
926
|
+
timestamp: Date.now()
|
|
927
|
+
})]);
|
|
928
|
+
return _chunkTGIFUAK4cjs.asyncFlatMap.call(void 0, _chunkMS34J5LYcjs.sleep.call(void 0, d), () => loop(attempt + 1));
|
|
253
929
|
}
|
|
254
930
|
);
|
|
931
|
+
return loop(0);
|
|
255
932
|
};
|
|
256
|
-
return (
|
|
933
|
+
return decorateStream(run, next.stats);
|
|
257
934
|
};
|
|
258
935
|
|
|
259
936
|
// src/http/httpClient.ts
|
|
@@ -266,12 +943,14 @@ var resolveFinalUrl = (baseUrl, url) => {
|
|
|
266
943
|
};
|
|
267
944
|
var createHttpCore = (cfg = {}) => {
|
|
268
945
|
const wire = makeHttp(cfg);
|
|
269
|
-
const withPromise = (eff) =>
|
|
946
|
+
const withPromise = (eff) => _chunkTGIFUAK4cjs.withAsyncPromise.call(void 0, (e, env) => _chunkTGIFUAK4cjs.toPromise.call(void 0, e, env))(eff);
|
|
270
947
|
const requestRaw = (req) => wire(req);
|
|
271
948
|
const splitInit = (init) => {
|
|
272
|
-
const { headers, ...rest } = _nullishCoalesce(init, () => ( {}));
|
|
949
|
+
const { headers, timeoutMs, poolKey, ...rest } = _nullishCoalesce(init, () => ( {}));
|
|
273
950
|
return {
|
|
274
951
|
headers: normalizeHeadersInit(headers),
|
|
952
|
+
timeoutMs: typeof timeoutMs === "number" ? timeoutMs : void 0,
|
|
953
|
+
poolKey: typeof poolKey === "string" ? poolKey : void 0,
|
|
275
954
|
init: rest
|
|
276
955
|
};
|
|
277
956
|
};
|
|
@@ -282,6 +961,8 @@ var createHttpCore = (cfg = {}) => {
|
|
|
282
961
|
method,
|
|
283
962
|
url,
|
|
284
963
|
...body && body.length > 0 ? { body } : {},
|
|
964
|
+
...s.timeoutMs !== void 0 ? { timeoutMs: s.timeoutMs } : {},
|
|
965
|
+
...s.poolKey !== void 0 ? { poolKey: s.poolKey } : {},
|
|
285
966
|
init: s.init
|
|
286
967
|
};
|
|
287
968
|
return applyInitHeaders(s.headers)(req);
|
|
@@ -312,12 +993,12 @@ function httpClient(cfg = {}) {
|
|
|
312
993
|
const post = (url, body, init) => request(core.buildReq("POST", url, init, body));
|
|
313
994
|
const getText = (url, init) => {
|
|
314
995
|
const req = core.buildReq("GET", url, init);
|
|
315
|
-
return core.withPromise(
|
|
996
|
+
return core.withPromise(_chunkTGIFUAK4cjs.mapTryAsync.call(void 0, requestRaw(req), (w) => core.toResponse(w, w.bodyText)));
|
|
316
997
|
};
|
|
317
998
|
const getJson = (url, init) => {
|
|
318
999
|
const base = core.buildReq("GET", url, init);
|
|
319
1000
|
const req = setHeaderIfMissing("accept", "application/json")(base);
|
|
320
|
-
return core.withPromise(
|
|
1001
|
+
return core.withPromise(_chunkTGIFUAK4cjs.mapTryAsync.call(void 0, requestRaw(req), (w) => core.toResponse(w, JSON.parse(w.bodyText))));
|
|
321
1002
|
};
|
|
322
1003
|
const postJson = (url, bodyObj, init) => {
|
|
323
1004
|
const base = core.buildReq("POST", url, init, JSON.stringify(_nullishCoalesce(bodyObj, () => ( {}))));
|
|
@@ -325,7 +1006,7 @@ function httpClient(cfg = {}) {
|
|
|
325
1006
|
setHeaderIfMissing("accept", "application/json")(base)
|
|
326
1007
|
);
|
|
327
1008
|
return core.withPromise(
|
|
328
|
-
|
|
1009
|
+
_chunkTGIFUAK4cjs.mapTryAsync.call(void 0, requestRaw(req), (w) => core.toResponse(w, JSON.parse(w.bodyText)))
|
|
329
1010
|
);
|
|
330
1011
|
};
|
|
331
1012
|
return {
|
|
@@ -337,7 +1018,8 @@ function httpClient(cfg = {}) {
|
|
|
337
1018
|
postJson,
|
|
338
1019
|
with: (mw) => make(wire.with(mw)),
|
|
339
1020
|
withRetry: (p) => make(wire.with(withRetry(p))),
|
|
340
|
-
wire
|
|
1021
|
+
wire,
|
|
1022
|
+
stats: () => wire.stats()
|
|
341
1023
|
};
|
|
342
1024
|
};
|
|
343
1025
|
return make(core.wire);
|
|
@@ -353,7 +1035,7 @@ function httpClientWithMeta(cfg = {}) {
|
|
|
353
1035
|
const request = (req) => {
|
|
354
1036
|
const startedAt = Date.now();
|
|
355
1037
|
return core.withPromise(
|
|
356
|
-
|
|
1038
|
+
_chunkTGIFUAK4cjs.mapTryAsync.call(void 0, core.requestRaw(req), (w) => ({
|
|
357
1039
|
wire: w,
|
|
358
1040
|
meta: mkMeta(req, w, startedAt)
|
|
359
1041
|
}))
|
|
@@ -374,7 +1056,7 @@ function httpClientWithMeta(cfg = {}) {
|
|
|
374
1056
|
);
|
|
375
1057
|
const startedAt = Date.now();
|
|
376
1058
|
return core.withPromise(
|
|
377
|
-
|
|
1059
|
+
_chunkTGIFUAK4cjs.mapTryAsync.call(void 0, core.requestRaw(req), (w) => ({
|
|
378
1060
|
wire: w,
|
|
379
1061
|
response: core.toResponse(w, JSON.parse(w.bodyText)),
|
|
380
1062
|
meta: mkMeta(req, w, startedAt)
|
|
@@ -385,7 +1067,7 @@ function httpClientWithMeta(cfg = {}) {
|
|
|
385
1067
|
const req = core.buildReq("GET", url, init);
|
|
386
1068
|
const startedAt = Date.now();
|
|
387
1069
|
return core.withPromise(
|
|
388
|
-
|
|
1070
|
+
_chunkTGIFUAK4cjs.mapTryAsync.call(void 0, core.requestRaw(req), (w) => ({
|
|
389
1071
|
wire: w,
|
|
390
1072
|
response: core.toResponse(w, w.bodyText),
|
|
391
1073
|
meta: mkMeta(req, w, startedAt)
|
|
@@ -397,7 +1079,7 @@ function httpClientWithMeta(cfg = {}) {
|
|
|
397
1079
|
const req = setHeaderIfMissing("accept", "application/json")(base);
|
|
398
1080
|
const startedAt = Date.now();
|
|
399
1081
|
return core.withPromise(
|
|
400
|
-
|
|
1082
|
+
_chunkTGIFUAK4cjs.mapTryAsync.call(void 0, core.requestRaw(req), (w) => ({
|
|
401
1083
|
wire: w,
|
|
402
1084
|
response: core.toResponse(w, JSON.parse(w.bodyText)),
|
|
403
1085
|
meta: mkMeta(req, w, startedAt)
|
|
@@ -422,7 +1104,7 @@ function httpClientWithMeta(cfg = {}) {
|
|
|
422
1104
|
function httpClientStream(cfg = {}) {
|
|
423
1105
|
const wire = makeHttpStream(cfg);
|
|
424
1106
|
const make = (w) => {
|
|
425
|
-
const withPromise = (eff) =>
|
|
1107
|
+
const withPromise = (eff) => _chunkTGIFUAK4cjs.withAsyncPromise.call(void 0, (e, env) => _chunkTGIFUAK4cjs.toPromise.call(void 0, e, env))(eff);
|
|
426
1108
|
const request = (req) => withPromise(w(req));
|
|
427
1109
|
const getStream = (url, init) => {
|
|
428
1110
|
const base = { method: "GET", url, init };
|
|
@@ -435,11 +1117,2319 @@ function httpClientStream(cfg = {}) {
|
|
|
435
1117
|
get: getStream,
|
|
436
1118
|
with: (mw) => make(mw(w)),
|
|
437
1119
|
withRetry: (p) => make(withRetryStream(p)(w)),
|
|
438
|
-
wire: w
|
|
1120
|
+
wire: w,
|
|
1121
|
+
stats: () => w.stats()
|
|
439
1122
|
};
|
|
440
1123
|
};
|
|
441
1124
|
return make(wire);
|
|
442
1125
|
}
|
|
1126
|
+
|
|
1127
|
+
// src/http/effectRunner.ts
|
|
1128
|
+
function registerHttpEffect(effect, env, cb) {
|
|
1129
|
+
let done = false;
|
|
1130
|
+
let currentCancel;
|
|
1131
|
+
const finish = (exit) => {
|
|
1132
|
+
if (done) return;
|
|
1133
|
+
done = true;
|
|
1134
|
+
currentCancel = void 0;
|
|
1135
|
+
cb(exit);
|
|
1136
|
+
};
|
|
1137
|
+
const run = (current, cont) => {
|
|
1138
|
+
if (done) return;
|
|
1139
|
+
switch (current._tag) {
|
|
1140
|
+
case "Succeed":
|
|
1141
|
+
cont({ _tag: "Success", value: current.value });
|
|
1142
|
+
return;
|
|
1143
|
+
case "Fail":
|
|
1144
|
+
cont({ _tag: "Failure", cause: _chunkTGIFUAK4cjs.Cause.fail(current.error) });
|
|
1145
|
+
return;
|
|
1146
|
+
case "Sync":
|
|
1147
|
+
try {
|
|
1148
|
+
cont({ _tag: "Success", value: current.thunk(env) });
|
|
1149
|
+
} catch (e) {
|
|
1150
|
+
cont({ _tag: "Failure", cause: _chunkTGIFUAK4cjs.Cause.die(e) });
|
|
1151
|
+
}
|
|
1152
|
+
return;
|
|
1153
|
+
case "Async": {
|
|
1154
|
+
const cancel = current.register(env, (exit) => {
|
|
1155
|
+
currentCancel = void 0;
|
|
1156
|
+
if (done) return;
|
|
1157
|
+
cont(exit);
|
|
1158
|
+
});
|
|
1159
|
+
currentCancel = typeof cancel === "function" ? cancel : void 0;
|
|
1160
|
+
return;
|
|
1161
|
+
}
|
|
1162
|
+
case "FlatMap":
|
|
1163
|
+
run(current.first, (exit) => {
|
|
1164
|
+
if (done) return;
|
|
1165
|
+
if (exit._tag === "Failure") {
|
|
1166
|
+
cont(exit);
|
|
1167
|
+
return;
|
|
1168
|
+
}
|
|
1169
|
+
try {
|
|
1170
|
+
run(current.andThen(exit.value), cont);
|
|
1171
|
+
} catch (e) {
|
|
1172
|
+
cont({ _tag: "Failure", cause: _chunkTGIFUAK4cjs.Cause.die(e) });
|
|
1173
|
+
}
|
|
1174
|
+
});
|
|
1175
|
+
return;
|
|
1176
|
+
case "Fold":
|
|
1177
|
+
run(current.first, (exit) => {
|
|
1178
|
+
if (done) return;
|
|
1179
|
+
try {
|
|
1180
|
+
if (exit._tag === "Success") {
|
|
1181
|
+
run(current.onSuccess(exit.value), cont);
|
|
1182
|
+
return;
|
|
1183
|
+
}
|
|
1184
|
+
if (exit.cause._tag === "Fail") {
|
|
1185
|
+
run(current.onFailure(exit.cause.error), cont);
|
|
1186
|
+
return;
|
|
1187
|
+
}
|
|
1188
|
+
cont(exit);
|
|
1189
|
+
} catch (e) {
|
|
1190
|
+
cont({ _tag: "Failure", cause: _chunkTGIFUAK4cjs.Cause.die(e) });
|
|
1191
|
+
}
|
|
1192
|
+
});
|
|
1193
|
+
return;
|
|
1194
|
+
case "Fork":
|
|
1195
|
+
cont({ _tag: "Success", value: void 0 });
|
|
1196
|
+
return;
|
|
1197
|
+
}
|
|
1198
|
+
};
|
|
1199
|
+
run(effect, finish);
|
|
1200
|
+
return () => {
|
|
1201
|
+
if (done) return;
|
|
1202
|
+
const cancel = currentCancel;
|
|
1203
|
+
currentCancel = void 0;
|
|
1204
|
+
done = true;
|
|
1205
|
+
try {
|
|
1206
|
+
_optionalChain([cancel, 'optionalCall', _32 => _32()]);
|
|
1207
|
+
} finally {
|
|
1208
|
+
cb({ _tag: "Failure", cause: _chunkTGIFUAK4cjs.Cause.interrupt() });
|
|
1209
|
+
}
|
|
1210
|
+
};
|
|
1211
|
+
}
|
|
1212
|
+
|
|
1213
|
+
// src/http/circuitBreaker.ts
|
|
1214
|
+
function withCircuitBreaker(config = {}) {
|
|
1215
|
+
if (config.perOrigin) {
|
|
1216
|
+
const breakers = /* @__PURE__ */ new Map();
|
|
1217
|
+
const getBreaker = (url) => {
|
|
1218
|
+
try {
|
|
1219
|
+
const origin = new URL(url).origin;
|
|
1220
|
+
if (!breakers.has(origin)) {
|
|
1221
|
+
breakers.set(origin, _chunkMS34J5LYcjs.makeCircuitBreaker.call(void 0, config));
|
|
1222
|
+
}
|
|
1223
|
+
return breakers.get(origin);
|
|
1224
|
+
} catch (e5) {
|
|
1225
|
+
if (!breakers.has("__global__")) {
|
|
1226
|
+
breakers.set("__global__", _chunkMS34J5LYcjs.makeCircuitBreaker.call(void 0, config));
|
|
1227
|
+
}
|
|
1228
|
+
return breakers.get("__global__");
|
|
1229
|
+
}
|
|
1230
|
+
};
|
|
1231
|
+
return (next) => (req) => {
|
|
1232
|
+
const breaker2 = getBreaker(req.url);
|
|
1233
|
+
return protectLazy(breaker2, next, req);
|
|
1234
|
+
};
|
|
1235
|
+
}
|
|
1236
|
+
const breaker = _chunkMS34J5LYcjs.makeCircuitBreaker.call(void 0, {
|
|
1237
|
+
...config,
|
|
1238
|
+
isFailure: _nullishCoalesce(config.isFailure, () => ( ((e) => {
|
|
1239
|
+
const err = e;
|
|
1240
|
+
return err._tag !== "BadUrl" && err._tag !== "Abort";
|
|
1241
|
+
})))
|
|
1242
|
+
});
|
|
1243
|
+
return (next) => (req) => {
|
|
1244
|
+
return protectLazy(breaker, next, req);
|
|
1245
|
+
};
|
|
1246
|
+
}
|
|
1247
|
+
function protectLazy(breaker, next, req) {
|
|
1248
|
+
return {
|
|
1249
|
+
_tag: "Async",
|
|
1250
|
+
register: (env, cb) => {
|
|
1251
|
+
let cancel;
|
|
1252
|
+
try {
|
|
1253
|
+
const deferred = {
|
|
1254
|
+
_tag: "Async",
|
|
1255
|
+
register: (innerEnv, innerCb) => registerHttpEffect(next(req), innerEnv, innerCb)
|
|
1256
|
+
};
|
|
1257
|
+
cancel = registerHttpEffect(breaker.protect(deferred), env, cb);
|
|
1258
|
+
} catch (error) {
|
|
1259
|
+
cb({
|
|
1260
|
+
_tag: "Failure",
|
|
1261
|
+
cause: {
|
|
1262
|
+
_tag: "Fail",
|
|
1263
|
+
error: { _tag: "FetchError", message: String(error) }
|
|
1264
|
+
}
|
|
1265
|
+
});
|
|
1266
|
+
}
|
|
1267
|
+
return () => _optionalChain([cancel, 'optionalCall', _33 => _33()]);
|
|
1268
|
+
}
|
|
1269
|
+
};
|
|
1270
|
+
}
|
|
1271
|
+
|
|
1272
|
+
// src/http/tracing.ts
|
|
1273
|
+
function withTracing(tracer) {
|
|
1274
|
+
return (next) => (req) => {
|
|
1275
|
+
return tracer.span(
|
|
1276
|
+
`HTTP ${req.method} ${req.url}`,
|
|
1277
|
+
next(req),
|
|
1278
|
+
{
|
|
1279
|
+
"http.method": req.method,
|
|
1280
|
+
"http.url": req.url,
|
|
1281
|
+
..._optionalChain([req, 'access', _34 => _34.headers, 'optionalAccess', _35 => _35["content-type"]]) ? { "http.content_type": req.headers["content-type"] } : {}
|
|
1282
|
+
}
|
|
1283
|
+
);
|
|
1284
|
+
};
|
|
1285
|
+
}
|
|
1286
|
+
|
|
1287
|
+
// src/http/validation.ts
|
|
1288
|
+
function validatedJson(client, validator) {
|
|
1289
|
+
return (req) => _chunkTGIFUAK4cjs.asyncFold.call(void 0,
|
|
1290
|
+
client(req),
|
|
1291
|
+
(error) => _chunkTGIFUAK4cjs.asyncFail.call(void 0, error),
|
|
1292
|
+
(response) => {
|
|
1293
|
+
try {
|
|
1294
|
+
const parsed = JSON.parse(response.bodyText);
|
|
1295
|
+
const result = validator(parsed);
|
|
1296
|
+
if (result.success) {
|
|
1297
|
+
return _chunkTGIFUAK4cjs.asyncSucceed.call(void 0, result.data);
|
|
1298
|
+
}
|
|
1299
|
+
return _chunkTGIFUAK4cjs.asyncFail.call(void 0, {
|
|
1300
|
+
_tag: "ValidationError",
|
|
1301
|
+
message: result.error,
|
|
1302
|
+
body: response.bodyText
|
|
1303
|
+
});
|
|
1304
|
+
} catch (e) {
|
|
1305
|
+
return _chunkTGIFUAK4cjs.asyncFail.call(void 0, {
|
|
1306
|
+
_tag: "ValidationError",
|
|
1307
|
+
message: `JSON parse error: ${String(e)}`,
|
|
1308
|
+
body: response.bodyText
|
|
1309
|
+
});
|
|
1310
|
+
}
|
|
1311
|
+
}
|
|
1312
|
+
);
|
|
1313
|
+
}
|
|
1314
|
+
|
|
1315
|
+
// src/http/body.ts
|
|
1316
|
+
function httpBodyByteLength(body) {
|
|
1317
|
+
if (body === void 0) return 0;
|
|
1318
|
+
if (typeof body === "string") return Buffer.byteLength(body, "utf8");
|
|
1319
|
+
if (body instanceof ArrayBuffer) return body.byteLength;
|
|
1320
|
+
return body.byteLength;
|
|
1321
|
+
}
|
|
1322
|
+
function httpBodyToBuffer(body) {
|
|
1323
|
+
if (typeof body === "string") return Buffer.from(body, "utf8");
|
|
1324
|
+
if (body instanceof ArrayBuffer) return Buffer.from(body);
|
|
1325
|
+
return Buffer.from(body);
|
|
1326
|
+
}
|
|
1327
|
+
function httpBodyKeyPart(body) {
|
|
1328
|
+
if (body === void 0) return "";
|
|
1329
|
+
if (typeof body === "string") return body;
|
|
1330
|
+
return `base64:${httpBodyToBuffer(body).toString("base64")}`;
|
|
1331
|
+
}
|
|
1332
|
+
|
|
1333
|
+
// src/http/lifecycle/cacheKey.ts
|
|
1334
|
+
var SEPARATOR = "\0";
|
|
1335
|
+
var DEFAULT_CACHE_RELEVANT_HEADERS = ["accept", "authorization", "content-type"];
|
|
1336
|
+
function computeCacheKey(req, baseUrl, extraHeaders = []) {
|
|
1337
|
+
const method = req.method.toUpperCase();
|
|
1338
|
+
const resolvedUrl = new URL(req.url, baseUrl || void 0).toString();
|
|
1339
|
+
const relevantSet = /* @__PURE__ */ new Set([
|
|
1340
|
+
...DEFAULT_CACHE_RELEVANT_HEADERS,
|
|
1341
|
+
...extraHeaders.map((h) => h.toLowerCase())
|
|
1342
|
+
]);
|
|
1343
|
+
const headers = _nullishCoalesce(req.headers, () => ( {}));
|
|
1344
|
+
const sortedHeaders = Object.keys(headers).filter((k) => relevantSet.has(k.toLowerCase())).sort().map((k) => `${k.toLowerCase()}:${headers[k]}`).join(",");
|
|
1345
|
+
const body = httpBodyKeyPart(req.body);
|
|
1346
|
+
return `${method}${SEPARATOR}${resolvedUrl}${SEPARATOR}${sortedHeaders}${SEPARATOR}${body}`;
|
|
1347
|
+
}
|
|
1348
|
+
function parseCacheKey(key) {
|
|
1349
|
+
const [method, resolvedUrl, headersStr, ...bodyParts] = key.split(SEPARATOR);
|
|
1350
|
+
const body = bodyParts.join(SEPARATOR);
|
|
1351
|
+
const headers = {};
|
|
1352
|
+
if (headersStr) {
|
|
1353
|
+
for (const entry of headersStr.split(",")) {
|
|
1354
|
+
const colonIdx = entry.indexOf(":");
|
|
1355
|
+
if (colonIdx > 0) {
|
|
1356
|
+
headers[entry.slice(0, colonIdx)] = entry.slice(colonIdx + 1);
|
|
1357
|
+
}
|
|
1358
|
+
}
|
|
1359
|
+
}
|
|
1360
|
+
return { method, resolvedUrl, headers, body };
|
|
1361
|
+
}
|
|
1362
|
+
|
|
1363
|
+
// src/http/lifecycle/dedupKey.ts
|
|
1364
|
+
var HOP_BY_HOP = /* @__PURE__ */ new Set([
|
|
1365
|
+
"connection",
|
|
1366
|
+
"keep-alive",
|
|
1367
|
+
"proxy-authenticate",
|
|
1368
|
+
"proxy-authorization",
|
|
1369
|
+
"te",
|
|
1370
|
+
"trailer",
|
|
1371
|
+
"transfer-encoding",
|
|
1372
|
+
"upgrade"
|
|
1373
|
+
]);
|
|
1374
|
+
var SAFE_METHODS = /* @__PURE__ */ new Set(["GET", "HEAD", "OPTIONS"]);
|
|
1375
|
+
function computeDedupKey(req, baseUrl) {
|
|
1376
|
+
const method = req.method.toUpperCase();
|
|
1377
|
+
const resolvedUrl = new URL(req.url, baseUrl || void 0).toString();
|
|
1378
|
+
const headers = _nullishCoalesce(req.headers, () => ( {}));
|
|
1379
|
+
const sortedHeaders = Object.keys(headers).filter((k) => {
|
|
1380
|
+
const lower = k.toLowerCase();
|
|
1381
|
+
return !HOP_BY_HOP.has(lower) && lower !== "authorization";
|
|
1382
|
+
}).sort().map((k) => `${k.toLowerCase()}:${headers[k]}`).join(",");
|
|
1383
|
+
const body = httpBodyKeyPart(req.body);
|
|
1384
|
+
return `${method}${SEPARATOR}${resolvedUrl}${SEPARATOR}${sortedHeaders}${SEPARATOR}${body}`;
|
|
1385
|
+
}
|
|
1386
|
+
|
|
1387
|
+
// src/http/lifecycle/dedup.ts
|
|
1388
|
+
function safeEmit(onEvent, event) {
|
|
1389
|
+
if (!onEvent) return;
|
|
1390
|
+
try {
|
|
1391
|
+
onEvent(event);
|
|
1392
|
+
} catch (e6) {
|
|
1393
|
+
}
|
|
1394
|
+
}
|
|
1395
|
+
function withDedup(config) {
|
|
1396
|
+
const inFlight = /* @__PURE__ */ new Map();
|
|
1397
|
+
const customKeyFn = _optionalChain([config, 'optionalAccess', _36 => _36.dedupKey]);
|
|
1398
|
+
const onEvent = _optionalChain([config, 'optionalAccess', _37 => _37.onEvent]);
|
|
1399
|
+
return (next) => {
|
|
1400
|
+
return (req) => {
|
|
1401
|
+
if (!SAFE_METHODS.has(req.method.toUpperCase())) {
|
|
1402
|
+
return next(req);
|
|
1403
|
+
}
|
|
1404
|
+
let key;
|
|
1405
|
+
if (customKeyFn) {
|
|
1406
|
+
try {
|
|
1407
|
+
key = customKeyFn(req);
|
|
1408
|
+
} catch (e7) {
|
|
1409
|
+
return next(req);
|
|
1410
|
+
}
|
|
1411
|
+
if (!key) {
|
|
1412
|
+
return next(req);
|
|
1413
|
+
}
|
|
1414
|
+
} else {
|
|
1415
|
+
key = computeDedupKey(req, "");
|
|
1416
|
+
}
|
|
1417
|
+
return {
|
|
1418
|
+
_tag: "Async",
|
|
1419
|
+
register: (_env, cb) => {
|
|
1420
|
+
const existing = inFlight.get(key);
|
|
1421
|
+
let callerDone = false;
|
|
1422
|
+
const finishCaller = (exit) => {
|
|
1423
|
+
if (callerDone) return;
|
|
1424
|
+
callerDone = true;
|
|
1425
|
+
cb(exit);
|
|
1426
|
+
};
|
|
1427
|
+
if (existing) {
|
|
1428
|
+
safeEmit(onEvent, { type: "dedup-hit", cacheKey: key });
|
|
1429
|
+
existing.refCount++;
|
|
1430
|
+
const waiter = {
|
|
1431
|
+
resolve: (res) => {
|
|
1432
|
+
finishCaller({ _tag: "Success", value: res });
|
|
1433
|
+
},
|
|
1434
|
+
reject: (err) => {
|
|
1435
|
+
finishCaller({ _tag: "Failure", cause: _chunkTGIFUAK4cjs.Cause.fail(err) });
|
|
1436
|
+
}
|
|
1437
|
+
};
|
|
1438
|
+
existing.waiters.push(waiter);
|
|
1439
|
+
return () => {
|
|
1440
|
+
if (callerDone) return;
|
|
1441
|
+
existing.refCount--;
|
|
1442
|
+
const idx = existing.waiters.indexOf(waiter);
|
|
1443
|
+
if (idx >= 0) {
|
|
1444
|
+
existing.waiters.splice(idx, 1);
|
|
1445
|
+
}
|
|
1446
|
+
if (existing.refCount <= 0) {
|
|
1447
|
+
inFlight.delete(key);
|
|
1448
|
+
safeEmit(onEvent, { type: "dedup-active", active: inFlight.size });
|
|
1449
|
+
existing.controller.abort();
|
|
1450
|
+
}
|
|
1451
|
+
finishCaller({ _tag: "Failure", cause: _chunkTGIFUAK4cjs.Cause.interrupt() });
|
|
1452
|
+
};
|
|
1453
|
+
}
|
|
1454
|
+
safeEmit(onEvent, { type: "dedup-miss", cacheKey: key });
|
|
1455
|
+
const controller = new AbortController();
|
|
1456
|
+
const entry = {
|
|
1457
|
+
key,
|
|
1458
|
+
controller,
|
|
1459
|
+
refCount: 1,
|
|
1460
|
+
waiters: []
|
|
1461
|
+
};
|
|
1462
|
+
inFlight.set(key, entry);
|
|
1463
|
+
safeEmit(onEvent, { type: "dedup-active", active: inFlight.size });
|
|
1464
|
+
const dedupReq = {
|
|
1465
|
+
...req,
|
|
1466
|
+
init: {
|
|
1467
|
+
..._nullishCoalesce(req.init, () => ( {})),
|
|
1468
|
+
signal: controller.signal
|
|
1469
|
+
}
|
|
1470
|
+
};
|
|
1471
|
+
const innerEffect = next(dedupReq);
|
|
1472
|
+
const innerCancel = registerHttpEffect(innerEffect, _env, (exit) => {
|
|
1473
|
+
inFlight.delete(key);
|
|
1474
|
+
safeEmit(onEvent, { type: "dedup-active", active: inFlight.size });
|
|
1475
|
+
if (exit._tag === "Success") {
|
|
1476
|
+
resolveAll(entry, exit.value);
|
|
1477
|
+
finishCaller(exit);
|
|
1478
|
+
return;
|
|
1479
|
+
}
|
|
1480
|
+
if (exit.cause._tag === "Interrupt") {
|
|
1481
|
+
rejectAll(entry, { _tag: "Abort" });
|
|
1482
|
+
finishCaller({ _tag: "Failure", cause: _chunkTGIFUAK4cjs.Cause.interrupt() });
|
|
1483
|
+
return;
|
|
1484
|
+
}
|
|
1485
|
+
if (exit.cause._tag === "Fail") {
|
|
1486
|
+
rejectAll(entry, exit.cause.error);
|
|
1487
|
+
finishCaller(exit);
|
|
1488
|
+
return;
|
|
1489
|
+
}
|
|
1490
|
+
const err = { _tag: "FetchError", message: String(_nullishCoalesce(exit.cause.defect, () => ( "unknown"))) };
|
|
1491
|
+
rejectAll(entry, err);
|
|
1492
|
+
finishCaller({ _tag: "Failure", cause: _chunkTGIFUAK4cjs.Cause.fail(err) });
|
|
1493
|
+
});
|
|
1494
|
+
return () => {
|
|
1495
|
+
if (callerDone) return;
|
|
1496
|
+
entry.refCount--;
|
|
1497
|
+
if (entry.refCount <= 0) {
|
|
1498
|
+
inFlight.delete(key);
|
|
1499
|
+
safeEmit(onEvent, { type: "dedup-active", active: inFlight.size });
|
|
1500
|
+
controller.abort();
|
|
1501
|
+
if (innerCancel) {
|
|
1502
|
+
innerCancel();
|
|
1503
|
+
}
|
|
1504
|
+
}
|
|
1505
|
+
finishCaller({ _tag: "Failure", cause: _chunkTGIFUAK4cjs.Cause.interrupt() });
|
|
1506
|
+
};
|
|
1507
|
+
}
|
|
1508
|
+
};
|
|
1509
|
+
};
|
|
1510
|
+
};
|
|
1511
|
+
}
|
|
1512
|
+
function resolveAll(entry, res) {
|
|
1513
|
+
const waiters = entry.waiters.slice();
|
|
1514
|
+
for (const w of waiters) {
|
|
1515
|
+
w.resolve(res);
|
|
1516
|
+
}
|
|
1517
|
+
}
|
|
1518
|
+
function rejectAll(entry, err) {
|
|
1519
|
+
const waiters = entry.waiters.slice();
|
|
1520
|
+
for (const w of waiters) {
|
|
1521
|
+
w.reject(err);
|
|
1522
|
+
}
|
|
1523
|
+
}
|
|
1524
|
+
|
|
1525
|
+
// src/http/lifecycle/timing.ts
|
|
1526
|
+
var now = typeof performance !== "undefined" && typeof performance.now === "function" ? () => performance.now() : () => Date.now();
|
|
1527
|
+
|
|
1528
|
+
// src/http/lifecycle/lruCache.ts
|
|
1529
|
+
function isExpired(node) {
|
|
1530
|
+
return now() - node.storedAt >= node.ttlMs;
|
|
1531
|
+
}
|
|
1532
|
+
var LRUCache = (_class3 = class {
|
|
1533
|
+
__init5() {this.map = /* @__PURE__ */ new Map()}
|
|
1534
|
+
__init6() {this.head = null}
|
|
1535
|
+
__init7() {this.tail = null}
|
|
1536
|
+
|
|
1537
|
+
|
|
1538
|
+
/**
|
|
1539
|
+
* Creates a new LRU cache instance.
|
|
1540
|
+
*
|
|
1541
|
+
* @param config - Cache configuration options.
|
|
1542
|
+
* @param config.maxEntries - Maximum number of entries. Must be >= 1. Default: 1024.
|
|
1543
|
+
* @param config.onEvict - Optional eviction callback.
|
|
1544
|
+
*
|
|
1545
|
+
* @example
|
|
1546
|
+
* ```typescript
|
|
1547
|
+
* import { LRUCache } from "./lruCache";
|
|
1548
|
+
*
|
|
1549
|
+
* const cache = new LRUCache<number>({ maxEntries: 50 });
|
|
1550
|
+
* ```
|
|
1551
|
+
*/
|
|
1552
|
+
constructor(config = {}) {;_class3.prototype.__init5.call(this);_class3.prototype.__init6.call(this);_class3.prototype.__init7.call(this);
|
|
1553
|
+
const max = _nullishCoalesce(config.maxEntries, () => ( 1024));
|
|
1554
|
+
this.maxEntries = Math.max(1, Math.floor(max));
|
|
1555
|
+
this.onEvict = config.onEvict;
|
|
1556
|
+
}
|
|
1557
|
+
/**
|
|
1558
|
+
* Returns the number of entries currently in the cache.
|
|
1559
|
+
*
|
|
1560
|
+
* @returns The current entry count.
|
|
1561
|
+
*
|
|
1562
|
+
* @example
|
|
1563
|
+
* ```typescript
|
|
1564
|
+
* import { LRUCache } from "./lruCache";
|
|
1565
|
+
*
|
|
1566
|
+
* const cache = new LRUCache<string>();
|
|
1567
|
+
* cache.set("a", "1", 10_000);
|
|
1568
|
+
* console.log(cache.size); // 1
|
|
1569
|
+
* ```
|
|
1570
|
+
*/
|
|
1571
|
+
get size() {
|
|
1572
|
+
return this.map.size;
|
|
1573
|
+
}
|
|
1574
|
+
/**
|
|
1575
|
+
* Retrieves a value by key.
|
|
1576
|
+
*
|
|
1577
|
+
* Returns `undefined` if the key is not found or the entry has expired.
|
|
1578
|
+
* On a hit (non-expired), the entry is moved to the head (most recently used).
|
|
1579
|
+
* Expired entries are lazily removed on access.
|
|
1580
|
+
*
|
|
1581
|
+
* @param key - The cache key to look up.
|
|
1582
|
+
* @returns The cached value, or `undefined` if not found or expired.
|
|
1583
|
+
*
|
|
1584
|
+
* @example
|
|
1585
|
+
* ```typescript
|
|
1586
|
+
* import { LRUCache } from "./lruCache";
|
|
1587
|
+
*
|
|
1588
|
+
* const cache = new LRUCache<string>();
|
|
1589
|
+
* cache.set("greeting", "hello", 30_000);
|
|
1590
|
+
* const val = cache.get("greeting"); // "hello"
|
|
1591
|
+
* const miss = cache.get("unknown"); // undefined
|
|
1592
|
+
* ```
|
|
1593
|
+
*/
|
|
1594
|
+
get(key) {
|
|
1595
|
+
const node = this.map.get(key);
|
|
1596
|
+
if (!node) return void 0;
|
|
1597
|
+
if (isExpired(node)) {
|
|
1598
|
+
this.removeNode(node);
|
|
1599
|
+
this.map.delete(key);
|
|
1600
|
+
return void 0;
|
|
1601
|
+
}
|
|
1602
|
+
this.moveToHead(node);
|
|
1603
|
+
return node.value;
|
|
1604
|
+
}
|
|
1605
|
+
/**
|
|
1606
|
+
* Inserts or updates an entry in the cache.
|
|
1607
|
+
*
|
|
1608
|
+
* If the key already exists, the value and TTL are updated and the entry is
|
|
1609
|
+
* moved to the head. If inserting a new entry causes the cache to exceed
|
|
1610
|
+
* `maxEntries` (must be >= 1), the least recently used entry is evicted.
|
|
1611
|
+
*
|
|
1612
|
+
* @param key - The cache key.
|
|
1613
|
+
* @param value - The value to store.
|
|
1614
|
+
* @param ttlMs - Time-to-live in milliseconds. The entry expires after this duration.
|
|
1615
|
+
*
|
|
1616
|
+
* @example
|
|
1617
|
+
* ```typescript
|
|
1618
|
+
* import { LRUCache } from "./lruCache";
|
|
1619
|
+
*
|
|
1620
|
+
* const cache = new LRUCache<string>({ maxEntries: 2 });
|
|
1621
|
+
* cache.set("a", "alpha", 60_000);
|
|
1622
|
+
* cache.set("b", "beta", 60_000);
|
|
1623
|
+
* cache.set("c", "gamma", 60_000); // evicts "a" (LRU)
|
|
1624
|
+
* ```
|
|
1625
|
+
*/
|
|
1626
|
+
set(key, value, ttlMs) {
|
|
1627
|
+
const existing = this.map.get(key);
|
|
1628
|
+
if (existing) {
|
|
1629
|
+
existing.value = value;
|
|
1630
|
+
existing.storedAt = now();
|
|
1631
|
+
existing.ttlMs = ttlMs;
|
|
1632
|
+
this.moveToHead(existing);
|
|
1633
|
+
return;
|
|
1634
|
+
}
|
|
1635
|
+
const node = {
|
|
1636
|
+
key,
|
|
1637
|
+
value,
|
|
1638
|
+
storedAt: now(),
|
|
1639
|
+
ttlMs,
|
|
1640
|
+
prev: null,
|
|
1641
|
+
next: null
|
|
1642
|
+
};
|
|
1643
|
+
this.map.set(key, node);
|
|
1644
|
+
this.addToHead(node);
|
|
1645
|
+
if (this.map.size > this.maxEntries) {
|
|
1646
|
+
this.evictTail();
|
|
1647
|
+
}
|
|
1648
|
+
}
|
|
1649
|
+
/**
|
|
1650
|
+
* Removes an entry by key.
|
|
1651
|
+
*
|
|
1652
|
+
* @param key - The cache key to remove.
|
|
1653
|
+
* @returns `true` if the entry was found and removed, `false` otherwise.
|
|
1654
|
+
*
|
|
1655
|
+
* @example
|
|
1656
|
+
* ```typescript
|
|
1657
|
+
* import { LRUCache } from "./lruCache";
|
|
1658
|
+
*
|
|
1659
|
+
* const cache = new LRUCache<string>();
|
|
1660
|
+
* cache.set("x", "value", 10_000);
|
|
1661
|
+
* cache.delete("x"); // true
|
|
1662
|
+
* cache.delete("x"); // false (already removed)
|
|
1663
|
+
* ```
|
|
1664
|
+
*/
|
|
1665
|
+
delete(key) {
|
|
1666
|
+
const node = this.map.get(key);
|
|
1667
|
+
if (!node) return false;
|
|
1668
|
+
this.removeNode(node);
|
|
1669
|
+
this.map.delete(key);
|
|
1670
|
+
return true;
|
|
1671
|
+
}
|
|
1672
|
+
/**
|
|
1673
|
+
* Removes all entries from the cache, resetting it to an empty state.
|
|
1674
|
+
*
|
|
1675
|
+
* @example
|
|
1676
|
+
* ```typescript
|
|
1677
|
+
* import { LRUCache } from "./lruCache";
|
|
1678
|
+
*
|
|
1679
|
+
* const cache = new LRUCache<string>();
|
|
1680
|
+
* cache.set("a", "1", 10_000);
|
|
1681
|
+
* cache.clear();
|
|
1682
|
+
* console.log(cache.size); // 0
|
|
1683
|
+
* ```
|
|
1684
|
+
*/
|
|
1685
|
+
clear() {
|
|
1686
|
+
this.map.clear();
|
|
1687
|
+
this.head = null;
|
|
1688
|
+
this.tail = null;
|
|
1689
|
+
}
|
|
1690
|
+
// --- Doubly-linked list operations ---
|
|
1691
|
+
/** Adds a node to the head of the list (most recently used position). */
|
|
1692
|
+
addToHead(node) {
|
|
1693
|
+
node.prev = null;
|
|
1694
|
+
node.next = this.head;
|
|
1695
|
+
if (this.head) {
|
|
1696
|
+
this.head.prev = node;
|
|
1697
|
+
}
|
|
1698
|
+
this.head = node;
|
|
1699
|
+
if (!this.tail) {
|
|
1700
|
+
this.tail = node;
|
|
1701
|
+
}
|
|
1702
|
+
}
|
|
1703
|
+
/** Removes a node from its current position in the list. */
|
|
1704
|
+
removeNode(node) {
|
|
1705
|
+
if (node.prev) {
|
|
1706
|
+
node.prev.next = node.next;
|
|
1707
|
+
} else {
|
|
1708
|
+
this.head = node.next;
|
|
1709
|
+
}
|
|
1710
|
+
if (node.next) {
|
|
1711
|
+
node.next.prev = node.prev;
|
|
1712
|
+
} else {
|
|
1713
|
+
this.tail = node.prev;
|
|
1714
|
+
}
|
|
1715
|
+
node.prev = null;
|
|
1716
|
+
node.next = null;
|
|
1717
|
+
}
|
|
1718
|
+
/** Moves an existing node to the head of the list. */
|
|
1719
|
+
moveToHead(node) {
|
|
1720
|
+
if (this.head === node) return;
|
|
1721
|
+
this.removeNode(node);
|
|
1722
|
+
this.addToHead(node);
|
|
1723
|
+
}
|
|
1724
|
+
/** Evicts the tail node (least recently used) and notifies via callback. */
|
|
1725
|
+
evictTail() {
|
|
1726
|
+
if (!this.tail) return;
|
|
1727
|
+
const evicted = this.tail;
|
|
1728
|
+
this.removeNode(evicted);
|
|
1729
|
+
this.map.delete(evicted.key);
|
|
1730
|
+
if (this.onEvict) {
|
|
1731
|
+
this.onEvict(1);
|
|
1732
|
+
}
|
|
1733
|
+
}
|
|
1734
|
+
}, _class3);
|
|
1735
|
+
|
|
1736
|
+
// src/http/lifecycle/responseCache.ts
|
|
1737
|
+
function clamp2(n, min, max) {
|
|
1738
|
+
return Math.max(min, Math.min(max, n));
|
|
1739
|
+
}
|
|
1740
|
+
function safeEmit2(onEvent, event) {
|
|
1741
|
+
if (!onEvent) return;
|
|
1742
|
+
try {
|
|
1743
|
+
onEvent(event);
|
|
1744
|
+
} catch (e8) {
|
|
1745
|
+
}
|
|
1746
|
+
}
|
|
1747
|
+
function withCache(config) {
|
|
1748
|
+
const ttlSeconds = clamp2(_nullishCoalesce(_optionalChain([config, 'optionalAccess', _38 => _38.ttlSeconds]), () => ( 60)), 1, 86400);
|
|
1749
|
+
const ttlMs = ttlSeconds * 1e3;
|
|
1750
|
+
const maxEntries = Math.max(1, Math.floor(_nullishCoalesce(_optionalChain([config, 'optionalAccess', _39 => _39.maxEntries]), () => ( 1024))));
|
|
1751
|
+
const staleWhileRevalidate = _nullishCoalesce(_optionalChain([config, 'optionalAccess', _40 => _40.staleWhileRevalidate]), () => ( false));
|
|
1752
|
+
const cachePolicy = _optionalChain([config, 'optionalAccess', _41 => _41.cachePolicy]);
|
|
1753
|
+
const cacheRelevantHeaders = _nullishCoalesce(_optionalChain([config, 'optionalAccess', _42 => _42.cacheRelevantHeaders]), () => ( []));
|
|
1754
|
+
const baseUrl = _nullishCoalesce(_optionalChain([config, 'optionalAccess', _43 => _43.baseUrl]), () => ( ""));
|
|
1755
|
+
const onEvent = _optionalChain([config, 'optionalAccess', _44 => _44.onEvent]);
|
|
1756
|
+
const onLifecycleEvent = _optionalChain([config, 'optionalAccess', _45 => _45.onLifecycleEvent]);
|
|
1757
|
+
const cache = new LRUCache({
|
|
1758
|
+
maxEntries,
|
|
1759
|
+
onEvict: (count) => _optionalChain([onLifecycleEvent, 'optionalCall', _46 => _46({ type: "cache-eviction", count })])
|
|
1760
|
+
});
|
|
1761
|
+
const revalidating = /* @__PURE__ */ new Set();
|
|
1762
|
+
const invalidate = (key) => {
|
|
1763
|
+
cache.delete(key);
|
|
1764
|
+
};
|
|
1765
|
+
const clear = () => {
|
|
1766
|
+
cache.clear();
|
|
1767
|
+
};
|
|
1768
|
+
const middleware = (next) => {
|
|
1769
|
+
return (req) => {
|
|
1770
|
+
const method = req.method.toUpperCase();
|
|
1771
|
+
if (!SAFE_METHODS.has(method) && !cachePolicy) {
|
|
1772
|
+
return next(req);
|
|
1773
|
+
}
|
|
1774
|
+
const key = computeCacheKey(req, baseUrl, cacheRelevantHeaders);
|
|
1775
|
+
return {
|
|
1776
|
+
_tag: "Async",
|
|
1777
|
+
register: (env, cb) => {
|
|
1778
|
+
const cached = cache.get(key);
|
|
1779
|
+
if (cached !== void 0) {
|
|
1780
|
+
_optionalChain([onLifecycleEvent, 'optionalCall', _47 => _47({ type: "cache-hit", cacheKey: key })]);
|
|
1781
|
+
cb({ _tag: "Success", value: cached });
|
|
1782
|
+
return;
|
|
1783
|
+
}
|
|
1784
|
+
_optionalChain([onLifecycleEvent, 'optionalCall', _48 => _48({ type: "cache-miss", cacheKey: key })]);
|
|
1785
|
+
const innerEffect = next(req);
|
|
1786
|
+
return registerHttpEffect(innerEffect, env, (exit) => {
|
|
1787
|
+
if (exit._tag === "Success") {
|
|
1788
|
+
storeIfCacheable(req, exit.value, key);
|
|
1789
|
+
}
|
|
1790
|
+
cb(exit);
|
|
1791
|
+
});
|
|
1792
|
+
}
|
|
1793
|
+
};
|
|
1794
|
+
};
|
|
1795
|
+
};
|
|
1796
|
+
function storeIfCacheable(req, res, key) {
|
|
1797
|
+
const method = req.method.toUpperCase();
|
|
1798
|
+
if (cachePolicy) {
|
|
1799
|
+
const result = cachePolicy(req, res);
|
|
1800
|
+
if (!result.cacheable) return;
|
|
1801
|
+
const entryTtlMs = result.ttlSeconds !== void 0 ? clamp2(result.ttlSeconds, 1, 86400) * 1e3 : ttlMs;
|
|
1802
|
+
cache.set(key, res, entryTtlMs);
|
|
1803
|
+
return;
|
|
1804
|
+
}
|
|
1805
|
+
if (!SAFE_METHODS.has(method)) return;
|
|
1806
|
+
cache.set(key, res, ttlMs);
|
|
1807
|
+
}
|
|
1808
|
+
function triggerRevalidation(next, req, key) {
|
|
1809
|
+
if (revalidating.has(key)) return;
|
|
1810
|
+
revalidating.add(key);
|
|
1811
|
+
const innerEffect = next(req);
|
|
1812
|
+
const handleExit = (exit) => {
|
|
1813
|
+
revalidating.delete(key);
|
|
1814
|
+
if (exit._tag === "Success") {
|
|
1815
|
+
storeIfCacheable(req, exit.value, key);
|
|
1816
|
+
} else {
|
|
1817
|
+
safeEmit2(onEvent, {
|
|
1818
|
+
type: "revalidation-failure",
|
|
1819
|
+
cacheKey: key,
|
|
1820
|
+
error: exit.cause._tag === "Fail" ? exit.cause.error : void 0
|
|
1821
|
+
});
|
|
1822
|
+
}
|
|
1823
|
+
};
|
|
1824
|
+
registerHttpEffect(innerEffect, void 0, handleExit);
|
|
1825
|
+
}
|
|
1826
|
+
const expirationMap = /* @__PURE__ */ new Map();
|
|
1827
|
+
const swrMiddleware = (next) => {
|
|
1828
|
+
return (req) => {
|
|
1829
|
+
const method = req.method.toUpperCase();
|
|
1830
|
+
if (!SAFE_METHODS.has(method) && !cachePolicy) {
|
|
1831
|
+
return next(req);
|
|
1832
|
+
}
|
|
1833
|
+
const key = computeCacheKey(req, baseUrl, cacheRelevantHeaders);
|
|
1834
|
+
return {
|
|
1835
|
+
_tag: "Async",
|
|
1836
|
+
register: (env, cb) => {
|
|
1837
|
+
const cached = cache.get(key);
|
|
1838
|
+
if (cached !== void 0) {
|
|
1839
|
+
const expiresAt = expirationMap.get(key);
|
|
1840
|
+
if (expiresAt !== void 0 && now() < expiresAt) {
|
|
1841
|
+
_optionalChain([onLifecycleEvent, 'optionalCall', _49 => _49({ type: "cache-hit", cacheKey: key })]);
|
|
1842
|
+
cb({ _tag: "Success", value: cached });
|
|
1843
|
+
return;
|
|
1844
|
+
}
|
|
1845
|
+
_optionalChain([onLifecycleEvent, 'optionalCall', _50 => _50({ type: "cache-hit", cacheKey: key })]);
|
|
1846
|
+
cb({ _tag: "Success", value: cached });
|
|
1847
|
+
triggerRevalidation(next, req, key);
|
|
1848
|
+
return;
|
|
1849
|
+
}
|
|
1850
|
+
_optionalChain([onLifecycleEvent, 'optionalCall', _51 => _51({ type: "cache-miss", cacheKey: key })]);
|
|
1851
|
+
const innerEffect = next(req);
|
|
1852
|
+
const handleSuccess = (res) => {
|
|
1853
|
+
swrStoreIfCacheable(req, res, key);
|
|
1854
|
+
};
|
|
1855
|
+
return registerHttpEffect(innerEffect, env, (exit) => {
|
|
1856
|
+
if (exit._tag === "Success") {
|
|
1857
|
+
handleSuccess(exit.value);
|
|
1858
|
+
}
|
|
1859
|
+
cb(exit);
|
|
1860
|
+
});
|
|
1861
|
+
}
|
|
1862
|
+
};
|
|
1863
|
+
};
|
|
1864
|
+
};
|
|
1865
|
+
function swrStoreIfCacheable(req, res, key) {
|
|
1866
|
+
const method = req.method.toUpperCase();
|
|
1867
|
+
let entryTtlMs = ttlMs;
|
|
1868
|
+
if (cachePolicy) {
|
|
1869
|
+
const result = cachePolicy(req, res);
|
|
1870
|
+
if (!result.cacheable) return;
|
|
1871
|
+
entryTtlMs = result.ttlSeconds !== void 0 ? clamp2(result.ttlSeconds, 1, 86400) * 1e3 : ttlMs;
|
|
1872
|
+
} else if (!SAFE_METHODS.has(method)) {
|
|
1873
|
+
return;
|
|
1874
|
+
}
|
|
1875
|
+
const lruTtl = Number.MAX_SAFE_INTEGER;
|
|
1876
|
+
cache.set(key, res, lruTtl);
|
|
1877
|
+
expirationMap.set(key, now() + entryTtlMs);
|
|
1878
|
+
}
|
|
1879
|
+
const swrInvalidate = (key) => {
|
|
1880
|
+
cache.delete(key);
|
|
1881
|
+
expirationMap.delete(key);
|
|
1882
|
+
};
|
|
1883
|
+
const swrClear = () => {
|
|
1884
|
+
cache.clear();
|
|
1885
|
+
expirationMap.clear();
|
|
1886
|
+
};
|
|
1887
|
+
if (staleWhileRevalidate) {
|
|
1888
|
+
return {
|
|
1889
|
+
middleware: swrMiddleware,
|
|
1890
|
+
invalidate: swrInvalidate,
|
|
1891
|
+
clear: swrClear
|
|
1892
|
+
};
|
|
1893
|
+
}
|
|
1894
|
+
return {
|
|
1895
|
+
middleware,
|
|
1896
|
+
invalidate,
|
|
1897
|
+
clear
|
|
1898
|
+
};
|
|
1899
|
+
}
|
|
1900
|
+
|
|
1901
|
+
// src/http/lifecycle/priorityQueue.ts
|
|
1902
|
+
function clampPriority(value) {
|
|
1903
|
+
if (value === void 0 || !Number.isFinite(value)) return 5;
|
|
1904
|
+
return Math.max(0, Math.min(9, Math.trunc(value)));
|
|
1905
|
+
}
|
|
1906
|
+
function comparePriority(a, b) {
|
|
1907
|
+
if (a.priority !== b.priority) return a.priority - b.priority;
|
|
1908
|
+
return a.arrivalOrder - b.arrivalOrder;
|
|
1909
|
+
}
|
|
1910
|
+
var PriorityQueue = (_class4 = class {constructor() { _class4.prototype.__init8.call(this);_class4.prototype.__init9.call(this); }
|
|
1911
|
+
__init8() {this.heap = []}
|
|
1912
|
+
__init9() {this.counter = 0}
|
|
1913
|
+
/**
|
|
1914
|
+
* Returns the number of entries in the queue (including cancelled entries).
|
|
1915
|
+
*
|
|
1916
|
+
* @returns The total number of entries in the internal heap.
|
|
1917
|
+
*
|
|
1918
|
+
* @example
|
|
1919
|
+
* ```typescript
|
|
1920
|
+
* import { PriorityQueue } from "./priorityQueue";
|
|
1921
|
+
*
|
|
1922
|
+
* const queue = new PriorityQueue<string>();
|
|
1923
|
+
* queue.enqueue("task", 5);
|
|
1924
|
+
* console.log(queue.size); // 1
|
|
1925
|
+
* ```
|
|
1926
|
+
*/
|
|
1927
|
+
get size() {
|
|
1928
|
+
return this.heap.length;
|
|
1929
|
+
}
|
|
1930
|
+
/** Returns the number of entries that have not been cancelled. */
|
|
1931
|
+
get activeSize() {
|
|
1932
|
+
return this.heap.reduce((n, entry) => n + (entry.cancelled ? 0 : 1), 0);
|
|
1933
|
+
}
|
|
1934
|
+
/**
|
|
1935
|
+
* Adds a value to the queue with the given priority.
|
|
1936
|
+
*
|
|
1937
|
+
* Priority is clamped to the valid range [0, 9] via `clampPriority`.
|
|
1938
|
+
* Returns the created entry, which can be used for later cancellation
|
|
1939
|
+
* by setting `entry.cancelled = true`.
|
|
1940
|
+
*
|
|
1941
|
+
* @param value - The value to enqueue.
|
|
1942
|
+
* @param priority - Priority level, integer from 0 (highest) to 9 (lowest).
|
|
1943
|
+
* Clamped to [0, 9]. Defaults to 5 if undefined.
|
|
1944
|
+
* @returns The created queue entry.
|
|
1945
|
+
*
|
|
1946
|
+
* @example
|
|
1947
|
+
* ```typescript
|
|
1948
|
+
* import { PriorityQueue } from "./priorityQueue";
|
|
1949
|
+
*
|
|
1950
|
+
* const queue = new PriorityQueue<string>();
|
|
1951
|
+
* const entry = queue.enqueue("urgent-task", 0);
|
|
1952
|
+
* entry.cancelled = true; // cancel later if needed
|
|
1953
|
+
* ```
|
|
1954
|
+
*/
|
|
1955
|
+
enqueue(value, priority) {
|
|
1956
|
+
const entry = {
|
|
1957
|
+
priority: clampPriority(priority),
|
|
1958
|
+
arrivalOrder: this.counter++,
|
|
1959
|
+
value,
|
|
1960
|
+
cancelled: false
|
|
1961
|
+
};
|
|
1962
|
+
this.heap.push(entry);
|
|
1963
|
+
this.bubbleUp(this.heap.length - 1);
|
|
1964
|
+
return entry;
|
|
1965
|
+
}
|
|
1966
|
+
/**
|
|
1967
|
+
* Removes and returns the highest-priority non-cancelled entry.
|
|
1968
|
+
*
|
|
1969
|
+
* Skips (and discards) any cancelled entries at the top of the heap.
|
|
1970
|
+
* Returns `undefined` if the queue is empty or all entries are cancelled.
|
|
1971
|
+
*
|
|
1972
|
+
* @returns The highest-priority non-cancelled entry, or `undefined` if none available.
|
|
1973
|
+
*
|
|
1974
|
+
* @example
|
|
1975
|
+
* ```typescript
|
|
1976
|
+
* import { PriorityQueue } from "./priorityQueue";
|
|
1977
|
+
*
|
|
1978
|
+
* const queue = new PriorityQueue<string>();
|
|
1979
|
+
* queue.enqueue("first", 1);
|
|
1980
|
+
* queue.enqueue("second", 2);
|
|
1981
|
+
* const entry = queue.dequeue(); // { value: "first", priority: 1, ... }
|
|
1982
|
+
* ```
|
|
1983
|
+
*/
|
|
1984
|
+
dequeue() {
|
|
1985
|
+
while (this.heap.length > 0) {
|
|
1986
|
+
const top = this.heap[0];
|
|
1987
|
+
if (top.cancelled) {
|
|
1988
|
+
this.removeTop();
|
|
1989
|
+
continue;
|
|
1990
|
+
}
|
|
1991
|
+
this.removeTop();
|
|
1992
|
+
return top;
|
|
1993
|
+
}
|
|
1994
|
+
return void 0;
|
|
1995
|
+
}
|
|
1996
|
+
/**
|
|
1997
|
+
* Returns the highest-priority non-cancelled entry without removing it.
|
|
1998
|
+
*
|
|
1999
|
+
* Discards cancelled entries at the top of the heap as a side effect.
|
|
2000
|
+
* Returns `undefined` if the queue is empty or all entries are cancelled.
|
|
2001
|
+
*
|
|
2002
|
+
* @returns The highest-priority non-cancelled entry, or `undefined` if none available.
|
|
2003
|
+
*
|
|
2004
|
+
* @example
|
|
2005
|
+
* ```typescript
|
|
2006
|
+
* import { PriorityQueue } from "./priorityQueue";
|
|
2007
|
+
*
|
|
2008
|
+
* const queue = new PriorityQueue<string>();
|
|
2009
|
+
* queue.enqueue("task", 3);
|
|
2010
|
+
* const top = queue.peek(); // { value: "task", priority: 3, ... }
|
|
2011
|
+
* console.log(queue.size); // 1 (not removed)
|
|
2012
|
+
* ```
|
|
2013
|
+
*/
|
|
2014
|
+
peek() {
|
|
2015
|
+
while (this.heap.length > 0) {
|
|
2016
|
+
const top = this.heap[0];
|
|
2017
|
+
if (top.cancelled) {
|
|
2018
|
+
this.removeTop();
|
|
2019
|
+
continue;
|
|
2020
|
+
}
|
|
2021
|
+
return top;
|
|
2022
|
+
}
|
|
2023
|
+
return void 0;
|
|
2024
|
+
}
|
|
2025
|
+
/**
|
|
2026
|
+
* Marks all entries matching the predicate as cancelled (lazy removal).
|
|
2027
|
+
*
|
|
2028
|
+
* Cancelled entries are skipped on subsequent dequeue/peek calls.
|
|
2029
|
+
* This does not immediately remove entries from the heap; they are
|
|
2030
|
+
* discarded lazily when encountered at the top during dequeue or peek.
|
|
2031
|
+
*
|
|
2032
|
+
* @param predicate - A function that returns `true` for entries to cancel.
|
|
2033
|
+
* @returns The number of entries marked as cancelled.
|
|
2034
|
+
*
|
|
2035
|
+
* @example
|
|
2036
|
+
* ```typescript
|
|
2037
|
+
* import { PriorityQueue } from "./priorityQueue";
|
|
2038
|
+
*
|
|
2039
|
+
* const queue = new PriorityQueue<string>();
|
|
2040
|
+
* queue.enqueue("a", 1);
|
|
2041
|
+
* queue.enqueue("b", 2);
|
|
2042
|
+
* const removed = queue.remove((e) => e.value === "a"); // 1
|
|
2043
|
+
* ```
|
|
2044
|
+
*/
|
|
2045
|
+
remove(predicate) {
|
|
2046
|
+
let count = 0;
|
|
2047
|
+
for (const entry of this.heap) {
|
|
2048
|
+
if (!entry.cancelled && predicate(entry)) {
|
|
2049
|
+
entry.cancelled = true;
|
|
2050
|
+
count++;
|
|
2051
|
+
}
|
|
2052
|
+
}
|
|
2053
|
+
return count;
|
|
2054
|
+
}
|
|
2055
|
+
// --- Binary heap operations ---
|
|
2056
|
+
/** Removes the top element from the heap and restores heap property. */
|
|
2057
|
+
removeTop() {
|
|
2058
|
+
const last = this.heap.pop();
|
|
2059
|
+
if (this.heap.length > 0 && last !== void 0) {
|
|
2060
|
+
this.heap[0] = last;
|
|
2061
|
+
this.sinkDown(0);
|
|
2062
|
+
}
|
|
2063
|
+
}
|
|
2064
|
+
/** Moves an element up the heap until the heap property is restored. */
|
|
2065
|
+
bubbleUp(index) {
|
|
2066
|
+
while (index > 0) {
|
|
2067
|
+
const parentIndex = index - 1 >>> 1;
|
|
2068
|
+
const current = this.heap[index];
|
|
2069
|
+
const parent = this.heap[parentIndex];
|
|
2070
|
+
if (comparePriority(current, parent) >= 0) break;
|
|
2071
|
+
this.heap[index] = parent;
|
|
2072
|
+
this.heap[parentIndex] = current;
|
|
2073
|
+
index = parentIndex;
|
|
2074
|
+
}
|
|
2075
|
+
}
|
|
2076
|
+
/** Moves an element down the heap until the heap property is restored. */
|
|
2077
|
+
sinkDown(index) {
|
|
2078
|
+
const length = this.heap.length;
|
|
2079
|
+
while (true) {
|
|
2080
|
+
const leftIndex = 2 * index + 1;
|
|
2081
|
+
const rightIndex = 2 * index + 2;
|
|
2082
|
+
let smallest = index;
|
|
2083
|
+
if (leftIndex < length && comparePriority(this.heap[leftIndex], this.heap[smallest]) < 0) {
|
|
2084
|
+
smallest = leftIndex;
|
|
2085
|
+
}
|
|
2086
|
+
if (rightIndex < length && comparePriority(this.heap[rightIndex], this.heap[smallest]) < 0) {
|
|
2087
|
+
smallest = rightIndex;
|
|
2088
|
+
}
|
|
2089
|
+
if (smallest === index) break;
|
|
2090
|
+
const temp = this.heap[index];
|
|
2091
|
+
this.heap[index] = this.heap[smallest];
|
|
2092
|
+
this.heap[smallest] = temp;
|
|
2093
|
+
index = smallest;
|
|
2094
|
+
}
|
|
2095
|
+
}
|
|
2096
|
+
}, _class4);
|
|
2097
|
+
|
|
2098
|
+
// src/http/lifecycle/priorityScheduler.ts
|
|
2099
|
+
var DEFAULT_CONCURRENCY2 = 32;
|
|
2100
|
+
function extractPriority(req) {
|
|
2101
|
+
const fromReq = req.priority;
|
|
2102
|
+
if (fromReq !== void 0) return clampPriority(fromReq);
|
|
2103
|
+
const fromInit = _optionalChain([req, 'access', _52 => _52.init, 'optionalAccess', _53 => _53.priority]);
|
|
2104
|
+
if (fromInit !== void 0) return clampPriority(fromInit);
|
|
2105
|
+
return 5;
|
|
2106
|
+
}
|
|
2107
|
+
function safeEmit3(onEvent, event) {
|
|
2108
|
+
if (!onEvent) return;
|
|
2109
|
+
try {
|
|
2110
|
+
onEvent(event);
|
|
2111
|
+
} catch (e9) {
|
|
2112
|
+
}
|
|
2113
|
+
}
|
|
2114
|
+
function withPriority(config) {
|
|
2115
|
+
const concurrency = resolveConcurrency(_optionalChain([config, 'optionalAccess', _54 => _54.concurrency]));
|
|
2116
|
+
const queueTimeoutMs = resolveQueueTimeout(_optionalChain([config, 'optionalAccess', _55 => _55.queueTimeoutMs]));
|
|
2117
|
+
const onEvent = _optionalChain([config, 'optionalAccess', _56 => _56.onEvent]);
|
|
2118
|
+
const queue = new PriorityQueue();
|
|
2119
|
+
let inFlight = 0;
|
|
2120
|
+
const queueDepth = () => {
|
|
2121
|
+
return queue.activeSize;
|
|
2122
|
+
};
|
|
2123
|
+
const middleware = (next) => {
|
|
2124
|
+
return (req) => {
|
|
2125
|
+
const priority = extractPriority(req);
|
|
2126
|
+
return {
|
|
2127
|
+
_tag: "Async",
|
|
2128
|
+
register: (env, cb) => {
|
|
2129
|
+
if (inFlight < concurrency) {
|
|
2130
|
+
return dispatchRequest(next, req, env, cb);
|
|
2131
|
+
}
|
|
2132
|
+
const queued = { req, env, cb, signal: getSignal(req) };
|
|
2133
|
+
const entry = queue.enqueue(queued, priority);
|
|
2134
|
+
safeEmit3(onEvent, { type: "queue-enqueue", priority });
|
|
2135
|
+
if (queueTimeoutMs !== void 0) {
|
|
2136
|
+
queued.timer = setTimeout(() => {
|
|
2137
|
+
entry.cancelled = true;
|
|
2138
|
+
queued.timer = void 0;
|
|
2139
|
+
cb({
|
|
2140
|
+
_tag: "Failure",
|
|
2141
|
+
cause: _chunkTGIFUAK4cjs.Cause.fail({
|
|
2142
|
+
_tag: "PoolTimeout",
|
|
2143
|
+
key: "priority",
|
|
2144
|
+
timeoutMs: queueTimeoutMs,
|
|
2145
|
+
message: `Priority queue did not dispatch within ${queueTimeoutMs}ms`
|
|
2146
|
+
})
|
|
2147
|
+
});
|
|
2148
|
+
}, queueTimeoutMs);
|
|
2149
|
+
}
|
|
2150
|
+
const signal = queued.signal;
|
|
2151
|
+
let abortHandler;
|
|
2152
|
+
if (signal && !signal.aborted) {
|
|
2153
|
+
abortHandler = () => {
|
|
2154
|
+
entry.cancelled = true;
|
|
2155
|
+
if (queued.timer !== void 0) {
|
|
2156
|
+
clearTimeout(queued.timer);
|
|
2157
|
+
queued.timer = void 0;
|
|
2158
|
+
}
|
|
2159
|
+
cb({ _tag: "Failure", cause: _chunkTGIFUAK4cjs.Cause.fail({ _tag: "Abort" }) });
|
|
2160
|
+
};
|
|
2161
|
+
signal.addEventListener("abort", abortHandler, { once: true });
|
|
2162
|
+
} else if (_optionalChain([signal, 'optionalAccess', _57 => _57.aborted])) {
|
|
2163
|
+
entry.cancelled = true;
|
|
2164
|
+
cb({ _tag: "Failure", cause: _chunkTGIFUAK4cjs.Cause.fail({ _tag: "Abort" }) });
|
|
2165
|
+
return;
|
|
2166
|
+
}
|
|
2167
|
+
return () => {
|
|
2168
|
+
entry.cancelled = true;
|
|
2169
|
+
if (queued.timer !== void 0) {
|
|
2170
|
+
clearTimeout(queued.timer);
|
|
2171
|
+
queued.timer = void 0;
|
|
2172
|
+
}
|
|
2173
|
+
if (abortHandler && signal) {
|
|
2174
|
+
signal.removeEventListener("abort", abortHandler);
|
|
2175
|
+
}
|
|
2176
|
+
cb({ _tag: "Failure", cause: _chunkTGIFUAK4cjs.Cause.interrupt() });
|
|
2177
|
+
};
|
|
2178
|
+
}
|
|
2179
|
+
};
|
|
2180
|
+
};
|
|
2181
|
+
function dispatchRequest(downstream, req, env, cb) {
|
|
2182
|
+
inFlight++;
|
|
2183
|
+
safeEmit3(onEvent, { type: "queue-dispatch", priority: extractPriority(req) });
|
|
2184
|
+
const innerEffect = downstream(req);
|
|
2185
|
+
let completed = false;
|
|
2186
|
+
const onComplete = (exit) => {
|
|
2187
|
+
if (completed) return;
|
|
2188
|
+
completed = true;
|
|
2189
|
+
inFlight--;
|
|
2190
|
+
cb(exit);
|
|
2191
|
+
drainNext(downstream);
|
|
2192
|
+
};
|
|
2193
|
+
const innerCancel = registerHttpEffect(innerEffect, env, onComplete);
|
|
2194
|
+
return () => {
|
|
2195
|
+
innerCancel();
|
|
2196
|
+
};
|
|
2197
|
+
}
|
|
2198
|
+
function drainNext(downstream) {
|
|
2199
|
+
while (inFlight < concurrency) {
|
|
2200
|
+
const entry = queue.dequeue();
|
|
2201
|
+
if (!entry) break;
|
|
2202
|
+
if (entry.cancelled) continue;
|
|
2203
|
+
const queued = entry.value;
|
|
2204
|
+
if (queued.timer !== void 0) {
|
|
2205
|
+
clearTimeout(queued.timer);
|
|
2206
|
+
queued.timer = void 0;
|
|
2207
|
+
}
|
|
2208
|
+
if (_optionalChain([queued, 'access', _58 => _58.signal, 'optionalAccess', _59 => _59.aborted])) {
|
|
2209
|
+
queued.cb({ _tag: "Failure", cause: _chunkTGIFUAK4cjs.Cause.fail({ _tag: "Abort" }) });
|
|
2210
|
+
continue;
|
|
2211
|
+
}
|
|
2212
|
+
dispatchRequest(downstream, queued.req, queued.env, queued.cb);
|
|
2213
|
+
}
|
|
2214
|
+
}
|
|
2215
|
+
};
|
|
2216
|
+
return Object.assign(middleware, { queueDepth });
|
|
2217
|
+
}
|
|
2218
|
+
function resolveConcurrency(value) {
|
|
2219
|
+
if (value === void 0 || !Number.isFinite(value)) return DEFAULT_CONCURRENCY2;
|
|
2220
|
+
return Math.max(1, Math.floor(value));
|
|
2221
|
+
}
|
|
2222
|
+
function resolveQueueTimeout(value) {
|
|
2223
|
+
if (value === void 0 || !Number.isFinite(value)) return void 0;
|
|
2224
|
+
const n = Math.floor(value);
|
|
2225
|
+
return n > 0 ? n : void 0;
|
|
2226
|
+
}
|
|
2227
|
+
function getSignal(req) {
|
|
2228
|
+
return _optionalChain([req, 'access', _60 => _60.init, 'optionalAccess', _61 => _61.signal]);
|
|
2229
|
+
}
|
|
2230
|
+
|
|
2231
|
+
// src/http/lifecycle/stats.ts
|
|
2232
|
+
var LifecycleStatsTracker = (_class5 = class {
|
|
2233
|
+
__init10() {this._cacheHits = 0}
|
|
2234
|
+
__init11() {this._cacheMisses = 0}
|
|
2235
|
+
__init12() {this._cacheEvictions = 0}
|
|
2236
|
+
__init13() {this._dedupHits = 0}
|
|
2237
|
+
__init14() {this._dedupActive = 0}
|
|
2238
|
+
__init15() {this._queueDepth = 0}
|
|
2239
|
+
__init16() {this._requestsStarted = 0}
|
|
2240
|
+
__init17() {this._requestsCompleted = 0}
|
|
2241
|
+
__init18() {this._requestsFailed = 0}
|
|
2242
|
+
__init19() {this._retries = 0}
|
|
2243
|
+
|
|
2244
|
+
|
|
2245
|
+
/**
|
|
2246
|
+
* Creates a new lifecycle stats tracker.
|
|
2247
|
+
*
|
|
2248
|
+
* @param opts - Configuration options for the tracker.
|
|
2249
|
+
* @param opts.onEvent - Optional callback invoked on each lifecycle event.
|
|
2250
|
+
* Errors thrown by this callback are silently discarded.
|
|
2251
|
+
* @param opts.wireStats - A function returning the current wire-level HTTP client stats.
|
|
2252
|
+
*
|
|
2253
|
+
* @example
|
|
2254
|
+
* ```typescript
|
|
2255
|
+
* import { LifecycleStatsTracker } from "./stats";
|
|
2256
|
+
*
|
|
2257
|
+
* const tracker = new LifecycleStatsTracker({
|
|
2258
|
+
* wireStats: () => ({ requestCount: 0, errorCount: 0 }),
|
|
2259
|
+
* });
|
|
2260
|
+
* ```
|
|
2261
|
+
*/
|
|
2262
|
+
constructor(opts) {;_class5.prototype.__init10.call(this);_class5.prototype.__init11.call(this);_class5.prototype.__init12.call(this);_class5.prototype.__init13.call(this);_class5.prototype.__init14.call(this);_class5.prototype.__init15.call(this);_class5.prototype.__init16.call(this);_class5.prototype.__init17.call(this);_class5.prototype.__init18.call(this);_class5.prototype.__init19.call(this);
|
|
2263
|
+
this._onEvent = opts.onEvent;
|
|
2264
|
+
this._wireStats = opts.wireStats;
|
|
2265
|
+
}
|
|
2266
|
+
// --- Increment methods ---
|
|
2267
|
+
/**
|
|
2268
|
+
* Records a cache hit. Increments the cache hit counter by 1.
|
|
2269
|
+
*
|
|
2270
|
+
* @example
|
|
2271
|
+
* ```typescript
|
|
2272
|
+
* import { LifecycleStatsTracker } from "./stats";
|
|
2273
|
+
*
|
|
2274
|
+
* const tracker = new LifecycleStatsTracker({ wireStats: () => ({ requestCount: 0, errorCount: 0 }) });
|
|
2275
|
+
* tracker.cacheHit();
|
|
2276
|
+
* ```
|
|
2277
|
+
*/
|
|
2278
|
+
cacheHit() {
|
|
2279
|
+
this._cacheHits++;
|
|
2280
|
+
}
|
|
2281
|
+
/**
|
|
2282
|
+
* Records a cache miss. Increments the cache miss counter by 1.
|
|
2283
|
+
*
|
|
2284
|
+
* @example
|
|
2285
|
+
* ```typescript
|
|
2286
|
+
* import { LifecycleStatsTracker } from "./stats";
|
|
2287
|
+
*
|
|
2288
|
+
* const tracker = new LifecycleStatsTracker({ wireStats: () => ({ requestCount: 0, errorCount: 0 }) });
|
|
2289
|
+
* tracker.cacheMiss();
|
|
2290
|
+
* ```
|
|
2291
|
+
*/
|
|
2292
|
+
cacheMiss() {
|
|
2293
|
+
this._cacheMisses++;
|
|
2294
|
+
}
|
|
2295
|
+
/**
|
|
2296
|
+
* Records a cache eviction. Increments the cache eviction counter by 1.
|
|
2297
|
+
*
|
|
2298
|
+
* @example
|
|
2299
|
+
* ```typescript
|
|
2300
|
+
* import { LifecycleStatsTracker } from "./stats";
|
|
2301
|
+
*
|
|
2302
|
+
* const tracker = new LifecycleStatsTracker({ wireStats: () => ({ requestCount: 0, errorCount: 0 }) });
|
|
2303
|
+
* tracker.cacheEviction();
|
|
2304
|
+
* ```
|
|
2305
|
+
*/
|
|
2306
|
+
cacheEviction() {
|
|
2307
|
+
this._cacheEvictions++;
|
|
2308
|
+
}
|
|
2309
|
+
/**
|
|
2310
|
+
* Records a dedup hit (a request that joined an in-flight duplicate).
|
|
2311
|
+
* Increments the dedup hit counter by 1.
|
|
2312
|
+
*
|
|
2313
|
+
* @example
|
|
2314
|
+
* ```typescript
|
|
2315
|
+
* import { LifecycleStatsTracker } from "./stats";
|
|
2316
|
+
*
|
|
2317
|
+
* const tracker = new LifecycleStatsTracker({ wireStats: () => ({ requestCount: 0, errorCount: 0 }) });
|
|
2318
|
+
* tracker.dedupHit();
|
|
2319
|
+
* ```
|
|
2320
|
+
*/
|
|
2321
|
+
dedupHit() {
|
|
2322
|
+
this._dedupHits++;
|
|
2323
|
+
}
|
|
2324
|
+
/**
|
|
2325
|
+
* Sets the current number of active dedup groups.
|
|
2326
|
+
*
|
|
2327
|
+
* @param n - The current count of active dedup groups. Must be >= 0.
|
|
2328
|
+
*
|
|
2329
|
+
* @example
|
|
2330
|
+
* ```typescript
|
|
2331
|
+
* import { LifecycleStatsTracker } from "./stats";
|
|
2332
|
+
*
|
|
2333
|
+
* const tracker = new LifecycleStatsTracker({ wireStats: () => ({ requestCount: 0, errorCount: 0 }) });
|
|
2334
|
+
* tracker.setDedupActive(3);
|
|
2335
|
+
* ```
|
|
2336
|
+
*/
|
|
2337
|
+
setDedupActive(n) {
|
|
2338
|
+
this._dedupActive = n;
|
|
2339
|
+
}
|
|
2340
|
+
/**
|
|
2341
|
+
* Sets the current priority queue depth.
|
|
2342
|
+
*
|
|
2343
|
+
* @param n - The current number of entries in the priority queue. Must be >= 0.
|
|
2344
|
+
*
|
|
2345
|
+
* @example
|
|
2346
|
+
* ```typescript
|
|
2347
|
+
* import { LifecycleStatsTracker } from "./stats";
|
|
2348
|
+
*
|
|
2349
|
+
* const tracker = new LifecycleStatsTracker({ wireStats: () => ({ requestCount: 0, errorCount: 0 }) });
|
|
2350
|
+
* tracker.setQueueDepth(5);
|
|
2351
|
+
* ```
|
|
2352
|
+
*/
|
|
2353
|
+
setQueueDepth(n) {
|
|
2354
|
+
this._queueDepth = n;
|
|
2355
|
+
}
|
|
2356
|
+
/**
|
|
2357
|
+
* Records that a request has started. Increments the requests started counter by 1.
|
|
2358
|
+
*
|
|
2359
|
+
* @example
|
|
2360
|
+
* ```typescript
|
|
2361
|
+
* import { LifecycleStatsTracker } from "./stats";
|
|
2362
|
+
*
|
|
2363
|
+
* const tracker = new LifecycleStatsTracker({ wireStats: () => ({ requestCount: 0, errorCount: 0 }) });
|
|
2364
|
+
* tracker.requestStarted();
|
|
2365
|
+
* ```
|
|
2366
|
+
*/
|
|
2367
|
+
requestStarted() {
|
|
2368
|
+
this._requestsStarted++;
|
|
2369
|
+
}
|
|
2370
|
+
/**
|
|
2371
|
+
* Records that a request has completed successfully.
|
|
2372
|
+
* Increments the requests completed counter by 1.
|
|
2373
|
+
*
|
|
2374
|
+
* @example
|
|
2375
|
+
* ```typescript
|
|
2376
|
+
* import { LifecycleStatsTracker } from "./stats";
|
|
2377
|
+
*
|
|
2378
|
+
* const tracker = new LifecycleStatsTracker({ wireStats: () => ({ requestCount: 0, errorCount: 0 }) });
|
|
2379
|
+
* tracker.requestCompleted();
|
|
2380
|
+
* ```
|
|
2381
|
+
*/
|
|
2382
|
+
requestCompleted() {
|
|
2383
|
+
this._requestsCompleted++;
|
|
2384
|
+
}
|
|
2385
|
+
/**
|
|
2386
|
+
* Records that a request has failed.
|
|
2387
|
+
* Increments the requests failed counter by 1.
|
|
2388
|
+
*
|
|
2389
|
+
* @example
|
|
2390
|
+
* ```typescript
|
|
2391
|
+
* import { LifecycleStatsTracker } from "./stats";
|
|
2392
|
+
*
|
|
2393
|
+
* const tracker = new LifecycleStatsTracker({ wireStats: () => ({ requestCount: 0, errorCount: 0 }) });
|
|
2394
|
+
* tracker.requestFailed();
|
|
2395
|
+
* ```
|
|
2396
|
+
*/
|
|
2397
|
+
requestFailed() {
|
|
2398
|
+
this._requestsFailed++;
|
|
2399
|
+
}
|
|
2400
|
+
retry() {
|
|
2401
|
+
this._retries++;
|
|
2402
|
+
}
|
|
2403
|
+
// --- Event emission ---
|
|
2404
|
+
/**
|
|
2405
|
+
* Emits a lifecycle event to the registered `onEvent` callback.
|
|
2406
|
+
*
|
|
2407
|
+
* The callback is wrapped in a try-catch so that any exception thrown by
|
|
2408
|
+
* the callback is silently discarded and request processing continues
|
|
2409
|
+
* unaffected. If no `onEvent` callback was provided, this is a no-op.
|
|
2410
|
+
*
|
|
2411
|
+
* @param type - The lifecycle event type to emit (e.g., `"cache-hit"`, `"request-start"`).
|
|
2412
|
+
* @param extra - Optional additional event data.
|
|
2413
|
+
*
|
|
2414
|
+
* @example
|
|
2415
|
+
* ```typescript
|
|
2416
|
+
* import { LifecycleStatsTracker } from "./stats";
|
|
2417
|
+
*
|
|
2418
|
+
* const tracker = new LifecycleStatsTracker({
|
|
2419
|
+
* onEvent: (event) => console.log(event.type, event.timestamp),
|
|
2420
|
+
* wireStats: () => ({ requestCount: 0, errorCount: 0 }),
|
|
2421
|
+
* });
|
|
2422
|
+
* tracker.emit("cache-hit", { cacheKey: "GET|/api/users" });
|
|
2423
|
+
* ```
|
|
2424
|
+
*/
|
|
2425
|
+
emit(type, extra) {
|
|
2426
|
+
if (!this._onEvent) return;
|
|
2427
|
+
try {
|
|
2428
|
+
const event = {
|
|
2429
|
+
type,
|
|
2430
|
+
timestamp: now(),
|
|
2431
|
+
...extra
|
|
2432
|
+
};
|
|
2433
|
+
this._onEvent(event);
|
|
2434
|
+
} catch (e10) {
|
|
2435
|
+
}
|
|
2436
|
+
}
|
|
2437
|
+
// --- Snapshot ---
|
|
2438
|
+
/**
|
|
2439
|
+
* Returns a frozen snapshot of all lifecycle statistics including wire stats.
|
|
2440
|
+
*
|
|
2441
|
+
* The returned object is frozen (immutable) and represents a point-in-time
|
|
2442
|
+
* view of all counters and gauges.
|
|
2443
|
+
*
|
|
2444
|
+
* @returns A frozen `LifecycleStats` object containing all current statistics.
|
|
2445
|
+
*
|
|
2446
|
+
* @example
|
|
2447
|
+
* ```typescript
|
|
2448
|
+
* import { LifecycleStatsTracker } from "./stats";
|
|
2449
|
+
*
|
|
2450
|
+
* const tracker = new LifecycleStatsTracker({
|
|
2451
|
+
* wireStats: () => ({ requestCount: 10, errorCount: 1 }),
|
|
2452
|
+
* });
|
|
2453
|
+
* tracker.cacheHit();
|
|
2454
|
+
* tracker.cacheHit();
|
|
2455
|
+
* const stats = tracker.snapshot();
|
|
2456
|
+
* console.log(stats.cacheHits); // 2
|
|
2457
|
+
* ```
|
|
2458
|
+
*/
|
|
2459
|
+
snapshot() {
|
|
2460
|
+
return Object.freeze({
|
|
2461
|
+
cacheHits: this._cacheHits,
|
|
2462
|
+
cacheMisses: this._cacheMisses,
|
|
2463
|
+
cacheEvictions: this._cacheEvictions,
|
|
2464
|
+
dedupHits: this._dedupHits,
|
|
2465
|
+
dedupActive: this._dedupActive,
|
|
2466
|
+
queueDepth: this._queueDepth,
|
|
2467
|
+
requestsStarted: this._requestsStarted,
|
|
2468
|
+
requestsCompleted: this._requestsCompleted,
|
|
2469
|
+
requestsFailed: this._requestsFailed,
|
|
2470
|
+
retries: this._retries,
|
|
2471
|
+
wire: this._wireStats()
|
|
2472
|
+
});
|
|
2473
|
+
}
|
|
2474
|
+
}, _class5);
|
|
2475
|
+
|
|
2476
|
+
// src/http/lifecycle/lifecycleClient.ts
|
|
2477
|
+
function validateGlobals() {
|
|
2478
|
+
if (typeof fetch === "undefined") {
|
|
2479
|
+
throw new Error(
|
|
2480
|
+
"makeLifecycleClient: global `fetch` is not available. Ensure you are running in an environment with fetch support (Node.js 18+ or modern browser)."
|
|
2481
|
+
);
|
|
2482
|
+
}
|
|
2483
|
+
if (typeof AbortController === "undefined") {
|
|
2484
|
+
throw new Error(
|
|
2485
|
+
"makeLifecycleClient: global `AbortController` is not available. Ensure you are running in an environment with AbortController support (Node.js 15+ or modern browser)."
|
|
2486
|
+
);
|
|
2487
|
+
}
|
|
2488
|
+
}
|
|
2489
|
+
function extractWireConfig(config) {
|
|
2490
|
+
const { dedup, cache, priority, retry, onEvent, ...wireConfig } = config;
|
|
2491
|
+
return wireConfig;
|
|
2492
|
+
}
|
|
2493
|
+
function makeLifecycleClient(config = {}) {
|
|
2494
|
+
validateGlobals();
|
|
2495
|
+
const wireConfig = extractWireConfig(config);
|
|
2496
|
+
const wireClient = makeHttp(wireConfig);
|
|
2497
|
+
const activeControllers = /* @__PURE__ */ new Set();
|
|
2498
|
+
const tracker = new LifecycleStatsTracker({
|
|
2499
|
+
onEvent: config.onEvent,
|
|
2500
|
+
wireStats: wireClient.stats
|
|
2501
|
+
});
|
|
2502
|
+
const hasDedup = config.dedup !== void 0 && config.dedup !== false;
|
|
2503
|
+
const hasCache = config.cache !== void 0 && config.cache !== false;
|
|
2504
|
+
const hasPriority = config.priority !== void 0 && config.priority !== false;
|
|
2505
|
+
const hasRetry = config.retry !== void 0 && config.retry !== false;
|
|
2506
|
+
if (!hasDedup && !hasCache && !hasPriority && !hasRetry) {
|
|
2507
|
+
return buildLifecycleClient(wireClient, tracker, {
|
|
2508
|
+
cacheInvalidate: noopInvalidate,
|
|
2509
|
+
cacheClear: noopClear,
|
|
2510
|
+
cancelAll: () => cancelControllers(activeControllers),
|
|
2511
|
+
activeControllers
|
|
2512
|
+
});
|
|
2513
|
+
}
|
|
2514
|
+
let priorityMiddleware;
|
|
2515
|
+
if (hasPriority) {
|
|
2516
|
+
const priorityConfig = config.priority;
|
|
2517
|
+
priorityMiddleware = withPriority({
|
|
2518
|
+
...priorityConfig,
|
|
2519
|
+
onEvent: (event) => {
|
|
2520
|
+
tracker.setQueueDepth(_nullishCoalesce(_optionalChain([priorityMiddleware, 'optionalAccess', _62 => _62.queueDepth, 'call', _63 => _63()]), () => ( 0)));
|
|
2521
|
+
tracker.emit(event.type, { priority: event.priority });
|
|
2522
|
+
}
|
|
2523
|
+
});
|
|
2524
|
+
}
|
|
2525
|
+
let cacheLayer;
|
|
2526
|
+
if (hasCache) {
|
|
2527
|
+
const cacheConfig = config.cache;
|
|
2528
|
+
cacheLayer = withCache({
|
|
2529
|
+
...cacheConfig,
|
|
2530
|
+
baseUrl: wireConfig.baseUrl,
|
|
2531
|
+
onLifecycleEvent: (event) => {
|
|
2532
|
+
if (event.type === "cache-hit") tracker.cacheHit();
|
|
2533
|
+
if (event.type === "cache-miss") tracker.cacheMiss();
|
|
2534
|
+
if (event.type === "cache-eviction") tracker.cacheEviction();
|
|
2535
|
+
if (event.type === "cache-hit" || event.type === "cache-miss") {
|
|
2536
|
+
tracker.emit(event.type, { cacheKey: event.cacheKey });
|
|
2537
|
+
}
|
|
2538
|
+
}
|
|
2539
|
+
});
|
|
2540
|
+
}
|
|
2541
|
+
let dedupMiddleware;
|
|
2542
|
+
if (hasDedup) {
|
|
2543
|
+
const dedupConfig = config.dedup;
|
|
2544
|
+
const baseUrl = _nullishCoalesce(wireConfig.baseUrl, () => ( ""));
|
|
2545
|
+
const effectiveDedupConfig = dedupConfig.dedupKey || !baseUrl ? dedupConfig : { ...dedupConfig, dedupKey: (req) => computeDedupKey(req, baseUrl) };
|
|
2546
|
+
dedupMiddleware = withDedup({
|
|
2547
|
+
...effectiveDedupConfig,
|
|
2548
|
+
onEvent: (event) => {
|
|
2549
|
+
if (event.type === "dedup-hit") tracker.dedupHit();
|
|
2550
|
+
if (event.type === "dedup-active") {
|
|
2551
|
+
tracker.setDedupActive(_nullishCoalesce(event.active, () => ( 0)));
|
|
2552
|
+
return;
|
|
2553
|
+
}
|
|
2554
|
+
tracker.emit(event.type, { cacheKey: event.cacheKey });
|
|
2555
|
+
}
|
|
2556
|
+
});
|
|
2557
|
+
}
|
|
2558
|
+
let composedFn = wireClient;
|
|
2559
|
+
if (priorityMiddleware) {
|
|
2560
|
+
composedFn = priorityMiddleware(composedFn);
|
|
2561
|
+
}
|
|
2562
|
+
if (hasRetry) {
|
|
2563
|
+
const retryConfig = config.retry;
|
|
2564
|
+
composedFn = withRetry({
|
|
2565
|
+
...retryConfig,
|
|
2566
|
+
onRetry: (event) => {
|
|
2567
|
+
tracker.retry();
|
|
2568
|
+
tracker.emit("retry", {
|
|
2569
|
+
attempt: event.attempt,
|
|
2570
|
+
delayMs: event.delayMs,
|
|
2571
|
+
status: event.status,
|
|
2572
|
+
errorTag: _optionalChain([event, 'access', _64 => _64.error, 'optionalAccess', _65 => _65._tag])
|
|
2573
|
+
});
|
|
2574
|
+
_optionalChain([retryConfig, 'access', _66 => _66.onRetry, 'optionalCall', _67 => _67(event)]);
|
|
2575
|
+
}
|
|
2576
|
+
})(composedFn);
|
|
2577
|
+
}
|
|
2578
|
+
if (cacheLayer) {
|
|
2579
|
+
composedFn = cacheLayer.middleware(composedFn);
|
|
2580
|
+
}
|
|
2581
|
+
if (dedupMiddleware) {
|
|
2582
|
+
composedFn = dedupMiddleware(composedFn);
|
|
2583
|
+
}
|
|
2584
|
+
return buildLifecycleClient(composedFn, tracker, {
|
|
2585
|
+
cacheInvalidate: _nullishCoalesce(_optionalChain([cacheLayer, 'optionalAccess', _68 => _68.invalidate]), () => ( noopInvalidate)),
|
|
2586
|
+
cacheClear: _nullishCoalesce(_optionalChain([cacheLayer, 'optionalAccess', _69 => _69.clear]), () => ( noopClear)),
|
|
2587
|
+
cancelAll: () => cancelControllers(activeControllers),
|
|
2588
|
+
activeControllers,
|
|
2589
|
+
queueDepth: _optionalChain([priorityMiddleware, 'optionalAccess', _70 => _70.queueDepth])
|
|
2590
|
+
});
|
|
2591
|
+
}
|
|
2592
|
+
function makeHttpClient(config = {}) {
|
|
2593
|
+
return makeLifecycleClient(config);
|
|
2594
|
+
}
|
|
2595
|
+
function noopInvalidate(_key) {
|
|
2596
|
+
}
|
|
2597
|
+
function noopClear() {
|
|
2598
|
+
}
|
|
2599
|
+
function buildLifecycleClient(fn, tracker, internals) {
|
|
2600
|
+
const client = (req) => trackRequest(fn, req, tracker, internals);
|
|
2601
|
+
const stats = () => {
|
|
2602
|
+
tracker.setQueueDepth(_nullishCoalesce(_optionalChain([internals, 'access', _71 => _71.queueDepth, 'optionalCall', _72 => _72()]), () => ( 0)));
|
|
2603
|
+
return tracker.snapshot();
|
|
2604
|
+
};
|
|
2605
|
+
const withMw = (mw) => {
|
|
2606
|
+
const wrappedFn = mw(fn);
|
|
2607
|
+
return buildLifecycleClient(wrappedFn, tracker, internals);
|
|
2608
|
+
};
|
|
2609
|
+
const lifecycleClient = Object.assign(client, {
|
|
2610
|
+
with: withMw,
|
|
2611
|
+
stats,
|
|
2612
|
+
cancelAll: internals.cancelAll,
|
|
2613
|
+
cache: {
|
|
2614
|
+
invalidate: internals.cacheInvalidate,
|
|
2615
|
+
clear: internals.cacheClear
|
|
2616
|
+
}
|
|
2617
|
+
});
|
|
2618
|
+
return lifecycleClient;
|
|
2619
|
+
}
|
|
2620
|
+
function cancelControllers(activeControllers) {
|
|
2621
|
+
for (const controller of Array.from(activeControllers)) {
|
|
2622
|
+
try {
|
|
2623
|
+
controller.abort();
|
|
2624
|
+
} catch (e11) {
|
|
2625
|
+
}
|
|
2626
|
+
}
|
|
2627
|
+
return _chunkTGIFUAK4cjs.asyncSucceed.call(void 0, void 0);
|
|
2628
|
+
}
|
|
2629
|
+
function trackRequest(fn, req, tracker, internals) {
|
|
2630
|
+
return {
|
|
2631
|
+
_tag: "Async",
|
|
2632
|
+
register: (env, cb) => {
|
|
2633
|
+
const controller = new AbortController();
|
|
2634
|
+
const previousSignal = _optionalChain([req, 'access', _73 => _73.init, 'optionalAccess', _74 => _74.signal]);
|
|
2635
|
+
let done = false;
|
|
2636
|
+
let abortedByPreviousSignal = false;
|
|
2637
|
+
let cancelInner;
|
|
2638
|
+
const abortFromPrevious = () => {
|
|
2639
|
+
abortedByPreviousSignal = true;
|
|
2640
|
+
try {
|
|
2641
|
+
controller.abort(_optionalChain([previousSignal, 'optionalAccess', _75 => _75.reason]));
|
|
2642
|
+
} catch (e12) {
|
|
2643
|
+
controller.abort();
|
|
2644
|
+
}
|
|
2645
|
+
_optionalChain([cancelInner, 'optionalCall', _76 => _76()]);
|
|
2646
|
+
};
|
|
2647
|
+
if (_optionalChain([previousSignal, 'optionalAccess', _77 => _77.aborted])) {
|
|
2648
|
+
abortFromPrevious();
|
|
2649
|
+
} else {
|
|
2650
|
+
_optionalChain([previousSignal, 'optionalAccess', _78 => _78.addEventListener, 'call', _79 => _79("abort", abortFromPrevious, { once: true })]);
|
|
2651
|
+
}
|
|
2652
|
+
internals.activeControllers.add(controller);
|
|
2653
|
+
tracker.requestStarted();
|
|
2654
|
+
tracker.emit("request-start");
|
|
2655
|
+
const finish = (exit0) => {
|
|
2656
|
+
if (done) return;
|
|
2657
|
+
done = true;
|
|
2658
|
+
const exit = abortedByPreviousSignal && exit0._tag === "Failure" && exit0.cause._tag === "Interrupt" ? { _tag: "Failure", cause: _chunkTGIFUAK4cjs.Cause.fail({ _tag: "Abort" }) } : exit0;
|
|
2659
|
+
_optionalChain([previousSignal, 'optionalAccess', _80 => _80.removeEventListener, 'call', _81 => _81("abort", abortFromPrevious)]);
|
|
2660
|
+
internals.activeControllers.delete(controller);
|
|
2661
|
+
if (exit._tag === "Success") {
|
|
2662
|
+
tracker.requestCompleted();
|
|
2663
|
+
} else {
|
|
2664
|
+
tracker.requestFailed();
|
|
2665
|
+
}
|
|
2666
|
+
tracker.emit("request-end");
|
|
2667
|
+
cb(exit);
|
|
2668
|
+
};
|
|
2669
|
+
const trackedReq = {
|
|
2670
|
+
...req,
|
|
2671
|
+
init: {
|
|
2672
|
+
..._nullishCoalesce(req.init, () => ( {})),
|
|
2673
|
+
signal: controller.signal
|
|
2674
|
+
}
|
|
2675
|
+
};
|
|
2676
|
+
try {
|
|
2677
|
+
cancelInner = registerHttpEffect(fn(trackedReq), env, finish);
|
|
2678
|
+
} catch (error) {
|
|
2679
|
+
finish({
|
|
2680
|
+
_tag: "Failure",
|
|
2681
|
+
cause: _chunkTGIFUAK4cjs.Cause.fail({ _tag: "FetchError", message: String(error) })
|
|
2682
|
+
});
|
|
2683
|
+
}
|
|
2684
|
+
return () => {
|
|
2685
|
+
if (done) return;
|
|
2686
|
+
try {
|
|
2687
|
+
controller.abort();
|
|
2688
|
+
} catch (e13) {
|
|
2689
|
+
}
|
|
2690
|
+
if (cancelInner) {
|
|
2691
|
+
cancelInner();
|
|
2692
|
+
} else {
|
|
2693
|
+
finish({ _tag: "Failure", cause: _chunkTGIFUAK4cjs.Cause.interrupt() });
|
|
2694
|
+
}
|
|
2695
|
+
};
|
|
2696
|
+
}
|
|
2697
|
+
};
|
|
2698
|
+
}
|
|
2699
|
+
|
|
2700
|
+
// src/http/lifecycle/middleware.ts
|
|
2701
|
+
function withAuth(tokenProvider) {
|
|
2702
|
+
return (next) => {
|
|
2703
|
+
return (req) => {
|
|
2704
|
+
return _chunkTGIFUAK4cjs.asyncFlatMap.call(void 0, tokenProvider(), (token) => {
|
|
2705
|
+
const modifiedReq = {
|
|
2706
|
+
...req,
|
|
2707
|
+
headers: {
|
|
2708
|
+
..._nullishCoalesce(req.headers, () => ( {})),
|
|
2709
|
+
Authorization: `Bearer ${token}`
|
|
2710
|
+
}
|
|
2711
|
+
};
|
|
2712
|
+
return next(modifiedReq);
|
|
2713
|
+
});
|
|
2714
|
+
};
|
|
2715
|
+
};
|
|
2716
|
+
}
|
|
2717
|
+
function withLogging(logger) {
|
|
2718
|
+
return (next) => {
|
|
2719
|
+
return (req) => {
|
|
2720
|
+
try {
|
|
2721
|
+
logger({ phase: "request", req });
|
|
2722
|
+
} catch (e14) {
|
|
2723
|
+
}
|
|
2724
|
+
const startedAt = now();
|
|
2725
|
+
return _chunkTGIFUAK4cjs.asyncFold.call(void 0,
|
|
2726
|
+
next(req),
|
|
2727
|
+
(error) => {
|
|
2728
|
+
const durationMs = Math.round(now() - startedAt);
|
|
2729
|
+
try {
|
|
2730
|
+
logger({ phase: "error", req, error, durationMs });
|
|
2731
|
+
} catch (e15) {
|
|
2732
|
+
}
|
|
2733
|
+
return _chunkTGIFUAK4cjs.asyncFail.call(void 0, error);
|
|
2734
|
+
},
|
|
2735
|
+
(res) => {
|
|
2736
|
+
const durationMs = Math.round(now() - startedAt);
|
|
2737
|
+
try {
|
|
2738
|
+
logger({ phase: "response", req, res, durationMs });
|
|
2739
|
+
} catch (e16) {
|
|
2740
|
+
}
|
|
2741
|
+
return _chunkTGIFUAK4cjs.asyncSucceed.call(void 0, res);
|
|
2742
|
+
}
|
|
2743
|
+
);
|
|
2744
|
+
};
|
|
2745
|
+
};
|
|
2746
|
+
}
|
|
2747
|
+
function withResponseTransform(fn) {
|
|
2748
|
+
return (next) => {
|
|
2749
|
+
return (req) => {
|
|
2750
|
+
return _chunkTGIFUAK4cjs.asyncFold.call(void 0,
|
|
2751
|
+
next(req),
|
|
2752
|
+
(error) => {
|
|
2753
|
+
return _chunkTGIFUAK4cjs.asyncFail.call(void 0, error);
|
|
2754
|
+
},
|
|
2755
|
+
(res) => {
|
|
2756
|
+
try {
|
|
2757
|
+
const transformed = fn(res, req);
|
|
2758
|
+
return _chunkTGIFUAK4cjs.asyncSucceed.call(void 0, transformed);
|
|
2759
|
+
} catch (e) {
|
|
2760
|
+
return _chunkTGIFUAK4cjs.asyncFail.call(void 0, { _tag: "FetchError", message: String(e) });
|
|
2761
|
+
}
|
|
2762
|
+
}
|
|
2763
|
+
);
|
|
2764
|
+
};
|
|
2765
|
+
};
|
|
2766
|
+
}
|
|
2767
|
+
|
|
2768
|
+
// src/http/compression/decompressor.ts
|
|
2769
|
+
var _zlib = require('zlib'); var _zlib2 = _interopRequireDefault(_zlib);
|
|
2770
|
+
|
|
2771
|
+
// src/http/compression/environment.ts
|
|
2772
|
+
function isNodeEnvironment() {
|
|
2773
|
+
return typeof process !== "undefined" && process.versions != null && process.versions.node != null;
|
|
2774
|
+
}
|
|
2775
|
+
|
|
2776
|
+
// src/http/compression/decompressorNode.ts
|
|
2777
|
+
function createNodeDecompressor(zlib2) {
|
|
2778
|
+
return {
|
|
2779
|
+
isPassthrough: false,
|
|
2780
|
+
decompress(data, encoding) {
|
|
2781
|
+
try {
|
|
2782
|
+
const input = Buffer.isBuffer(data) ? data : Buffer.from(data);
|
|
2783
|
+
let result;
|
|
2784
|
+
switch (encoding) {
|
|
2785
|
+
case "gzip":
|
|
2786
|
+
result = zlib2.gunzipSync(input);
|
|
2787
|
+
break;
|
|
2788
|
+
case "br":
|
|
2789
|
+
result = zlib2.brotliDecompressSync(input);
|
|
2790
|
+
break;
|
|
2791
|
+
case "deflate":
|
|
2792
|
+
result = zlib2.inflateSync(input);
|
|
2793
|
+
break;
|
|
2794
|
+
default:
|
|
2795
|
+
return { ok: false, error: `Unsupported encoding: ${encoding}` };
|
|
2796
|
+
}
|
|
2797
|
+
return { ok: true, data: result };
|
|
2798
|
+
} catch (err) {
|
|
2799
|
+
const message = err instanceof Error ? err.message : String(err);
|
|
2800
|
+
return { ok: false, error: message };
|
|
2801
|
+
}
|
|
2802
|
+
}
|
|
2803
|
+
};
|
|
2804
|
+
}
|
|
2805
|
+
|
|
2806
|
+
// src/http/compression/decompressor.noop.ts
|
|
2807
|
+
function createNoopDecompressor() {
|
|
2808
|
+
return {
|
|
2809
|
+
isPassthrough: true,
|
|
2810
|
+
decompress(data, _encoding) {
|
|
2811
|
+
const buf = Buffer.isBuffer(data) ? data : Buffer.from(data);
|
|
2812
|
+
return { ok: true, data: buf };
|
|
2813
|
+
}
|
|
2814
|
+
};
|
|
2815
|
+
}
|
|
2816
|
+
|
|
2817
|
+
// src/http/compression/decompressor.ts
|
|
2818
|
+
function createDecompressor() {
|
|
2819
|
+
if (isNodeEnvironment()) {
|
|
2820
|
+
return createNodeDecompressor(_zlib2.default);
|
|
2821
|
+
}
|
|
2822
|
+
return createNoopDecompressor();
|
|
2823
|
+
}
|
|
2824
|
+
|
|
2825
|
+
// src/http/compression/types.ts
|
|
2826
|
+
var SUPPORTED_ENCODINGS = ["br", "gzip", "deflate"];
|
|
2827
|
+
function emptyStats2() {
|
|
2828
|
+
return {
|
|
2829
|
+
decompressed: { gzip: 0, br: 0, deflate: 0 },
|
|
2830
|
+
compressedBytes: 0,
|
|
2831
|
+
decompressedBytes: 0,
|
|
2832
|
+
passthroughCount: 0,
|
|
2833
|
+
errorCount: 0,
|
|
2834
|
+
unsupportedEncodingCount: 0
|
|
2835
|
+
};
|
|
2836
|
+
}
|
|
2837
|
+
function emptyRequestCompressionStats() {
|
|
2838
|
+
return {
|
|
2839
|
+
compressedCount: 0,
|
|
2840
|
+
skippedCount: 0,
|
|
2841
|
+
errorCount: 0,
|
|
2842
|
+
originalBytes: 0,
|
|
2843
|
+
compressedBytes: 0
|
|
2844
|
+
};
|
|
2845
|
+
}
|
|
2846
|
+
|
|
2847
|
+
// src/http/compression/middleware.ts
|
|
2848
|
+
function injectAcceptEncoding(req, encodings) {
|
|
2849
|
+
const headers = _nullishCoalesce(req.headers, () => ( {}));
|
|
2850
|
+
const hasAcceptEncoding = Object.keys(headers).some(
|
|
2851
|
+
(k) => k.toLowerCase() === "accept-encoding"
|
|
2852
|
+
);
|
|
2853
|
+
if (hasAcceptEncoding) return req;
|
|
2854
|
+
return {
|
|
2855
|
+
...req,
|
|
2856
|
+
headers: {
|
|
2857
|
+
...headers,
|
|
2858
|
+
"Accept-Encoding": encodings.join(", ")
|
|
2859
|
+
}
|
|
2860
|
+
};
|
|
2861
|
+
}
|
|
2862
|
+
function isSupportedEncoding(enc) {
|
|
2863
|
+
return SUPPORTED_ENCODINGS.includes(enc);
|
|
2864
|
+
}
|
|
2865
|
+
function processResponse(res, decompressor, enabledEncodings, stats) {
|
|
2866
|
+
const contentEncodingKey = Object.keys(res.headers).find(
|
|
2867
|
+
(k) => k.toLowerCase() === "content-encoding"
|
|
2868
|
+
);
|
|
2869
|
+
const contentEncodingValue = contentEncodingKey ? _optionalChain([res, 'access', _82 => _82.headers, 'access', _83 => _83[contentEncodingKey], 'optionalAccess', _84 => _84.trim, 'call', _85 => _85()]) : void 0;
|
|
2870
|
+
if (!contentEncodingValue || contentEncodingValue.toLowerCase() === "identity") {
|
|
2871
|
+
stats.passthroughCount++;
|
|
2872
|
+
return res;
|
|
2873
|
+
}
|
|
2874
|
+
const encodings = contentEncodingValue.split(",").map((e) => e.trim().toLowerCase());
|
|
2875
|
+
const reversedEncodings = [...encodings].reverse();
|
|
2876
|
+
let currentData = Buffer.from(res.bodyText, "latin1");
|
|
2877
|
+
const originalData = currentData;
|
|
2878
|
+
let decompressedCount = 0;
|
|
2879
|
+
for (let i = 0; i < reversedEncodings.length; i++) {
|
|
2880
|
+
const enc = reversedEncodings[i];
|
|
2881
|
+
if (!isSupportedEncoding(enc)) {
|
|
2882
|
+
stats.unsupportedEncodingCount++;
|
|
2883
|
+
if (decompressedCount === 0) {
|
|
2884
|
+
stats.passthroughCount++;
|
|
2885
|
+
return res;
|
|
2886
|
+
}
|
|
2887
|
+
const remainingEncodings = reversedEncodings.slice(i).reverse();
|
|
2888
|
+
const newHeaders2 = { ...res.headers };
|
|
2889
|
+
if (contentEncodingKey) {
|
|
2890
|
+
newHeaders2[contentEncodingKey] = remainingEncodings.join(", ");
|
|
2891
|
+
}
|
|
2892
|
+
newHeaders2["Content-Length"] = String(currentData.byteLength);
|
|
2893
|
+
return {
|
|
2894
|
+
...res,
|
|
2895
|
+
headers: newHeaders2,
|
|
2896
|
+
bodyText: currentData.toString("latin1")
|
|
2897
|
+
};
|
|
2898
|
+
}
|
|
2899
|
+
if (!enabledEncodings.includes(enc)) {
|
|
2900
|
+
stats.passthroughCount++;
|
|
2901
|
+
if (decompressedCount === 0) {
|
|
2902
|
+
return res;
|
|
2903
|
+
}
|
|
2904
|
+
const remainingEncodings = reversedEncodings.slice(i).reverse();
|
|
2905
|
+
const newHeaders2 = { ...res.headers };
|
|
2906
|
+
if (contentEncodingKey) {
|
|
2907
|
+
newHeaders2[contentEncodingKey] = remainingEncodings.join(", ");
|
|
2908
|
+
}
|
|
2909
|
+
newHeaders2["Content-Length"] = String(currentData.byteLength);
|
|
2910
|
+
return {
|
|
2911
|
+
...res,
|
|
2912
|
+
headers: newHeaders2,
|
|
2913
|
+
bodyText: currentData.toString("latin1")
|
|
2914
|
+
};
|
|
2915
|
+
}
|
|
2916
|
+
const result = decompressor.decompress(currentData, enc);
|
|
2917
|
+
if (!result.ok) {
|
|
2918
|
+
stats.errorCount++;
|
|
2919
|
+
return res;
|
|
2920
|
+
}
|
|
2921
|
+
stats.compressedBytes += currentData.byteLength;
|
|
2922
|
+
stats.decompressedBytes += result.data.byteLength;
|
|
2923
|
+
stats.decompressed[enc]++;
|
|
2924
|
+
decompressedCount++;
|
|
2925
|
+
currentData = result.data;
|
|
2926
|
+
}
|
|
2927
|
+
const newHeaders = { ...res.headers };
|
|
2928
|
+
if (contentEncodingKey) {
|
|
2929
|
+
delete newHeaders[contentEncodingKey];
|
|
2930
|
+
}
|
|
2931
|
+
const lowerKey = Object.keys(newHeaders).find(
|
|
2932
|
+
(k) => k.toLowerCase() === "content-encoding"
|
|
2933
|
+
);
|
|
2934
|
+
if (lowerKey) {
|
|
2935
|
+
delete newHeaders[lowerKey];
|
|
2936
|
+
}
|
|
2937
|
+
newHeaders["Content-Length"] = String(currentData.byteLength);
|
|
2938
|
+
return {
|
|
2939
|
+
...res,
|
|
2940
|
+
headers: newHeaders,
|
|
2941
|
+
bodyText: currentData.toString("utf-8")
|
|
2942
|
+
};
|
|
2943
|
+
}
|
|
2944
|
+
function makeCompressionMiddleware(config) {
|
|
2945
|
+
const enabledEncodings = _nullishCoalesce(_optionalChain([config, 'optionalAccess', _86 => _86.encodings]), () => ( [...SUPPORTED_ENCODINGS]));
|
|
2946
|
+
const decompressor = createDecompressor();
|
|
2947
|
+
const mutableStats = emptyStats2();
|
|
2948
|
+
const middleware = (next) => {
|
|
2949
|
+
return (req) => {
|
|
2950
|
+
const modifiedReq = injectAcceptEncoding(req, enabledEncodings);
|
|
2951
|
+
return _chunkTGIFUAK4cjs.asyncFold.call(void 0,
|
|
2952
|
+
next(modifiedReq),
|
|
2953
|
+
// Pass HttpErrors through unchanged
|
|
2954
|
+
(error) => _chunkTGIFUAK4cjs.asyncFail.call(void 0, error),
|
|
2955
|
+
// Process successful responses
|
|
2956
|
+
(res) => {
|
|
2957
|
+
if (decompressor.isPassthrough) {
|
|
2958
|
+
mutableStats.passthroughCount++;
|
|
2959
|
+
return _chunkTGIFUAK4cjs.asyncSucceed.call(void 0, res);
|
|
2960
|
+
}
|
|
2961
|
+
const processed = processResponse(
|
|
2962
|
+
res,
|
|
2963
|
+
decompressor,
|
|
2964
|
+
enabledEncodings,
|
|
2965
|
+
mutableStats
|
|
2966
|
+
);
|
|
2967
|
+
return _chunkTGIFUAK4cjs.asyncSucceed.call(void 0, processed);
|
|
2968
|
+
}
|
|
2969
|
+
);
|
|
2970
|
+
};
|
|
2971
|
+
};
|
|
2972
|
+
const stats = () => Object.freeze({
|
|
2973
|
+
decompressed: Object.freeze({ ...mutableStats.decompressed }),
|
|
2974
|
+
compressedBytes: mutableStats.compressedBytes,
|
|
2975
|
+
decompressedBytes: mutableStats.decompressedBytes,
|
|
2976
|
+
passthroughCount: mutableStats.passthroughCount,
|
|
2977
|
+
errorCount: mutableStats.errorCount,
|
|
2978
|
+
unsupportedEncodingCount: mutableStats.unsupportedEncodingCount
|
|
2979
|
+
});
|
|
2980
|
+
return { middleware, stats };
|
|
2981
|
+
}
|
|
2982
|
+
var makeResponseCompressionMiddleware = makeCompressionMiddleware;
|
|
2983
|
+
var DEFAULT_REQUEST_COMPRESS_METHODS = ["POST", "PUT", "PATCH"];
|
|
2984
|
+
function makeRequestCompressionMiddleware(config) {
|
|
2985
|
+
const encoding = _nullishCoalesce(_optionalChain([config, 'optionalAccess', _87 => _87.encoding]), () => ( "gzip"));
|
|
2986
|
+
const minBytes = Math.max(0, Math.floor(_nullishCoalesce(_optionalChain([config, 'optionalAccess', _88 => _88.minBytes]), () => ( 1024))));
|
|
2987
|
+
const methods = new Set((_nullishCoalesce(_optionalChain([config, 'optionalAccess', _89 => _89.methods]), () => ( DEFAULT_REQUEST_COMPRESS_METHODS))).map((m) => m.toUpperCase()));
|
|
2988
|
+
const mutableStats = emptyRequestCompressionStats();
|
|
2989
|
+
const middleware = (next) => {
|
|
2990
|
+
return (req) => {
|
|
2991
|
+
const compressed = compressRequest(req, encoding, minBytes, methods, mutableStats);
|
|
2992
|
+
return next(compressed);
|
|
2993
|
+
};
|
|
2994
|
+
};
|
|
2995
|
+
const stats = () => Object.freeze({
|
|
2996
|
+
compressedCount: mutableStats.compressedCount,
|
|
2997
|
+
skippedCount: mutableStats.skippedCount,
|
|
2998
|
+
errorCount: mutableStats.errorCount,
|
|
2999
|
+
originalBytes: mutableStats.originalBytes,
|
|
3000
|
+
compressedBytes: mutableStats.compressedBytes
|
|
3001
|
+
});
|
|
3002
|
+
return { middleware, stats };
|
|
3003
|
+
}
|
|
3004
|
+
function compressRequest(req, encoding, minBytes, methods, stats) {
|
|
3005
|
+
if (!methods.has(req.method.toUpperCase())) {
|
|
3006
|
+
stats.skippedCount++;
|
|
3007
|
+
return req;
|
|
3008
|
+
}
|
|
3009
|
+
if (req.body === void 0 || hasHeader(req.headers, "content-encoding")) {
|
|
3010
|
+
stats.skippedCount++;
|
|
3011
|
+
return req;
|
|
3012
|
+
}
|
|
3013
|
+
const originalBytes = httpBodyByteLength(req.body);
|
|
3014
|
+
if (originalBytes < minBytes) {
|
|
3015
|
+
stats.skippedCount++;
|
|
3016
|
+
return req;
|
|
3017
|
+
}
|
|
3018
|
+
try {
|
|
3019
|
+
const compressed = compressBuffer(httpBodyToBuffer(req.body), encoding);
|
|
3020
|
+
stats.compressedCount++;
|
|
3021
|
+
stats.originalBytes += originalBytes;
|
|
3022
|
+
stats.compressedBytes += compressed.byteLength;
|
|
3023
|
+
return {
|
|
3024
|
+
...req,
|
|
3025
|
+
body: compressed,
|
|
3026
|
+
headers: setHeaders(req.headers, {
|
|
3027
|
+
"Content-Encoding": encoding,
|
|
3028
|
+
"Content-Length": String(compressed.byteLength)
|
|
3029
|
+
})
|
|
3030
|
+
};
|
|
3031
|
+
} catch (e17) {
|
|
3032
|
+
stats.errorCount++;
|
|
3033
|
+
return req;
|
|
3034
|
+
}
|
|
3035
|
+
}
|
|
3036
|
+
function compressBuffer(input, encoding) {
|
|
3037
|
+
const zlib2 = _chunkTGIFUAK4cjs.__require.call(void 0, "zlib");
|
|
3038
|
+
switch (encoding) {
|
|
3039
|
+
case "gzip":
|
|
3040
|
+
return zlib2.gzipSync(input);
|
|
3041
|
+
case "br":
|
|
3042
|
+
return zlib2.brotliCompressSync(input);
|
|
3043
|
+
case "deflate":
|
|
3044
|
+
return zlib2.deflateSync(input);
|
|
3045
|
+
}
|
|
3046
|
+
}
|
|
3047
|
+
function hasHeader(headers, name) {
|
|
3048
|
+
if (!headers) return false;
|
|
3049
|
+
const lower = name.toLowerCase();
|
|
3050
|
+
return Object.keys(headers).some((key) => key.toLowerCase() === lower);
|
|
3051
|
+
}
|
|
3052
|
+
function setHeaders(headers, values) {
|
|
3053
|
+
const out = { ..._nullishCoalesce(headers, () => ( {})) };
|
|
3054
|
+
for (const [key, value] of Object.entries(values)) {
|
|
3055
|
+
const existing = Object.keys(out).find((h) => h.toLowerCase() === key.toLowerCase());
|
|
3056
|
+
if (existing) out[existing] = value;
|
|
3057
|
+
else out[key] = value;
|
|
3058
|
+
}
|
|
3059
|
+
return out;
|
|
3060
|
+
}
|
|
3061
|
+
|
|
3062
|
+
// src/http/batching.ts
|
|
3063
|
+
var DEFAULT_MAX_BATCH_SIZE = 16;
|
|
3064
|
+
var DEFAULT_MAX_WAIT_MS = 5;
|
|
3065
|
+
function withRequestBatching(config) {
|
|
3066
|
+
const maxBatchSize = Math.max(1, Math.floor(_nullishCoalesce(config.maxBatchSize, () => ( DEFAULT_MAX_BATCH_SIZE))));
|
|
3067
|
+
const maxWaitMs = Math.max(0, Math.floor(_nullishCoalesce(config.maxWaitMs, () => ( DEFAULT_MAX_WAIT_MS))));
|
|
3068
|
+
const keyOf = _nullishCoalesce(config.key, () => ( ((req) => `${req.method}:${req.url}`)));
|
|
3069
|
+
const pending = /* @__PURE__ */ new Map();
|
|
3070
|
+
return (next) => {
|
|
3071
|
+
return (req) => {
|
|
3072
|
+
let key;
|
|
3073
|
+
try {
|
|
3074
|
+
if (config.shouldBatch && !config.shouldBatch(req)) return next(req);
|
|
3075
|
+
key = _nullishCoalesce(keyOf(req), () => ( void 0));
|
|
3076
|
+
} catch (e18) {
|
|
3077
|
+
return next(req);
|
|
3078
|
+
}
|
|
3079
|
+
if (!key) return next(req);
|
|
3080
|
+
return {
|
|
3081
|
+
_tag: "Async",
|
|
3082
|
+
register: (env, cb) => {
|
|
3083
|
+
const entry = { req, env, cb, cancelled: false, done: false };
|
|
3084
|
+
const batch = getOrCreatePending(key);
|
|
3085
|
+
batch.entries.push(entry);
|
|
3086
|
+
emit(config, { type: "batch-enqueue", key, size: batch.entries.length, request: req });
|
|
3087
|
+
if (batch.entries.length >= maxBatchSize) {
|
|
3088
|
+
flush(key, next, "size");
|
|
3089
|
+
} else if (batch.timer === void 0) {
|
|
3090
|
+
batch.timer = setTimeout(() => flush(key, next, "timer"), maxWaitMs);
|
|
3091
|
+
}
|
|
3092
|
+
return () => cancelEntry(key, entry);
|
|
3093
|
+
}
|
|
3094
|
+
};
|
|
3095
|
+
};
|
|
3096
|
+
function getOrCreatePending(key) {
|
|
3097
|
+
const existing = pending.get(key);
|
|
3098
|
+
if (existing) return existing;
|
|
3099
|
+
const created = { key, entries: [] };
|
|
3100
|
+
pending.set(key, created);
|
|
3101
|
+
return created;
|
|
3102
|
+
}
|
|
3103
|
+
function cancelEntry(key, entry) {
|
|
3104
|
+
if (entry.done || entry.cancelled) return;
|
|
3105
|
+
entry.cancelled = true;
|
|
3106
|
+
complete(entry, { _tag: "Failure", cause: _chunkTGIFUAK4cjs.Cause.interrupt() });
|
|
3107
|
+
const queued = pending.get(key);
|
|
3108
|
+
if (queued) {
|
|
3109
|
+
queued.entries = queued.entries.filter((e) => e !== entry);
|
|
3110
|
+
emit(config, { type: "batch-cancel", key, remaining: queued.entries.length });
|
|
3111
|
+
if (queued.entries.length === 0) {
|
|
3112
|
+
if (queued.timer !== void 0) clearTimeout(queued.timer);
|
|
3113
|
+
pending.delete(key);
|
|
3114
|
+
}
|
|
3115
|
+
return;
|
|
3116
|
+
}
|
|
3117
|
+
const group = entry.group;
|
|
3118
|
+
if (!group || group.cancelled) return;
|
|
3119
|
+
if (group.entries.every((e) => e.cancelled || e.done)) {
|
|
3120
|
+
group.cancelled = true;
|
|
3121
|
+
_optionalChain([group, 'access', _90 => _90.cancel, 'optionalCall', _91 => _91()]);
|
|
3122
|
+
}
|
|
3123
|
+
}
|
|
3124
|
+
function flush(key, downstream, reason) {
|
|
3125
|
+
const batch = pending.get(key);
|
|
3126
|
+
if (!batch) return;
|
|
3127
|
+
pending.delete(key);
|
|
3128
|
+
if (batch.timer !== void 0) clearTimeout(batch.timer);
|
|
3129
|
+
const entries = batch.entries.filter((entry) => !entry.cancelled && !entry.done);
|
|
3130
|
+
if (entries.length === 0) return;
|
|
3131
|
+
emit(config, { type: "batch-flush", key, size: entries.length, reason });
|
|
3132
|
+
let batchReq;
|
|
3133
|
+
try {
|
|
3134
|
+
batchReq = config.encode(entries.map((entry) => entry.req));
|
|
3135
|
+
} catch (e) {
|
|
3136
|
+
failEntries(config, key, entries, toFetchError(e));
|
|
3137
|
+
return;
|
|
3138
|
+
}
|
|
3139
|
+
const group = { key, entries, cancelled: false };
|
|
3140
|
+
for (const entry of entries) entry.group = group;
|
|
3141
|
+
const effect = downstream(batchReq);
|
|
3142
|
+
group.cancel = runEffect(effect, entries[0].env, (exit) => {
|
|
3143
|
+
if (exit._tag === "Failure") {
|
|
3144
|
+
const err = exit.cause._tag === "Fail" ? exit.cause.error : exit.cause._tag === "Interrupt" ? { _tag: "Abort" } : toFetchError(exit.cause.defect);
|
|
3145
|
+
failEntries(config, key, entries, err);
|
|
3146
|
+
return;
|
|
3147
|
+
}
|
|
3148
|
+
let decoded;
|
|
3149
|
+
try {
|
|
3150
|
+
decoded = config.decode(exit.value, entries.map((entry) => entry.req));
|
|
3151
|
+
if (decoded.length !== entries.length) {
|
|
3152
|
+
throw new Error(`batch decoder returned ${decoded.length} responses for ${entries.length} requests`);
|
|
3153
|
+
}
|
|
3154
|
+
} catch (e) {
|
|
3155
|
+
failEntries(config, key, entries, toFetchError(e));
|
|
3156
|
+
return;
|
|
3157
|
+
}
|
|
3158
|
+
for (let i = 0; i < entries.length; i++) {
|
|
3159
|
+
complete(entries[i], { _tag: "Success", value: decoded[i] });
|
|
3160
|
+
}
|
|
3161
|
+
});
|
|
3162
|
+
}
|
|
3163
|
+
};
|
|
3164
|
+
}
|
|
3165
|
+
function complete(entry, exit) {
|
|
3166
|
+
if (entry.done) return;
|
|
3167
|
+
entry.done = true;
|
|
3168
|
+
entry.cb(exit);
|
|
3169
|
+
}
|
|
3170
|
+
function failEntries(config, key, entries, error) {
|
|
3171
|
+
emit(config, { type: "batch-error", key, size: entries.length, error });
|
|
3172
|
+
for (const entry of entries) {
|
|
3173
|
+
complete(entry, { _tag: "Failure", cause: _chunkTGIFUAK4cjs.Cause.fail(error) });
|
|
3174
|
+
}
|
|
3175
|
+
}
|
|
3176
|
+
function toFetchError(error) {
|
|
3177
|
+
if (isHttpError(error)) return error;
|
|
3178
|
+
return { _tag: "FetchError", message: error instanceof Error ? error.message : String(error) };
|
|
3179
|
+
}
|
|
3180
|
+
function isHttpError(error) {
|
|
3181
|
+
if (typeof error !== "object" || error === null || !("_tag" in error)) return false;
|
|
3182
|
+
const tag = error._tag;
|
|
3183
|
+
return tag === "Abort" || tag === "BadUrl" || tag === "FetchError" || tag === "Timeout" || tag === "PoolRejected" || tag === "PoolTimeout";
|
|
3184
|
+
}
|
|
3185
|
+
function emit(config, event) {
|
|
3186
|
+
if (!config.onEvent) return;
|
|
3187
|
+
try {
|
|
3188
|
+
config.onEvent(event);
|
|
3189
|
+
} catch (e19) {
|
|
3190
|
+
}
|
|
3191
|
+
}
|
|
3192
|
+
function runEffect(effect, env, cb) {
|
|
3193
|
+
let cancelled = false;
|
|
3194
|
+
let currentCancel;
|
|
3195
|
+
const finish = (exit) => {
|
|
3196
|
+
if (cancelled) return;
|
|
3197
|
+
cb(exit);
|
|
3198
|
+
};
|
|
3199
|
+
const run = (eff, k) => {
|
|
3200
|
+
if (cancelled) return;
|
|
3201
|
+
switch (eff._tag) {
|
|
3202
|
+
case "Succeed":
|
|
3203
|
+
k({ _tag: "Success", value: eff.value });
|
|
3204
|
+
return;
|
|
3205
|
+
case "Fail":
|
|
3206
|
+
k({ _tag: "Failure", cause: _chunkTGIFUAK4cjs.Cause.fail(eff.error) });
|
|
3207
|
+
return;
|
|
3208
|
+
case "Sync":
|
|
3209
|
+
try {
|
|
3210
|
+
k({ _tag: "Success", value: eff.thunk(env) });
|
|
3211
|
+
} catch (e) {
|
|
3212
|
+
k({ _tag: "Failure", cause: _chunkTGIFUAK4cjs.Cause.die(e) });
|
|
3213
|
+
}
|
|
3214
|
+
return;
|
|
3215
|
+
case "Async": {
|
|
3216
|
+
const cancel = eff.register(env, (exit) => {
|
|
3217
|
+
currentCancel = void 0;
|
|
3218
|
+
k(exit);
|
|
3219
|
+
});
|
|
3220
|
+
currentCancel = typeof cancel === "function" ? cancel : void 0;
|
|
3221
|
+
return;
|
|
3222
|
+
}
|
|
3223
|
+
case "FlatMap":
|
|
3224
|
+
run(eff.first, (exit) => {
|
|
3225
|
+
if (exit._tag === "Failure") {
|
|
3226
|
+
k(exit);
|
|
3227
|
+
return;
|
|
3228
|
+
}
|
|
3229
|
+
try {
|
|
3230
|
+
run(eff.andThen(exit.value), k);
|
|
3231
|
+
} catch (e) {
|
|
3232
|
+
k({ _tag: "Failure", cause: _chunkTGIFUAK4cjs.Cause.die(e) });
|
|
3233
|
+
}
|
|
3234
|
+
});
|
|
3235
|
+
return;
|
|
3236
|
+
case "Fold":
|
|
3237
|
+
run(eff.first, (exit) => {
|
|
3238
|
+
try {
|
|
3239
|
+
if (exit._tag === "Success") {
|
|
3240
|
+
run(eff.onSuccess(exit.value), k);
|
|
3241
|
+
} else if (exit.cause._tag === "Fail") {
|
|
3242
|
+
run(eff.onFailure(exit.cause.error), k);
|
|
3243
|
+
} else {
|
|
3244
|
+
k(exit);
|
|
3245
|
+
}
|
|
3246
|
+
} catch (e) {
|
|
3247
|
+
k({ _tag: "Failure", cause: _chunkTGIFUAK4cjs.Cause.die(e) });
|
|
3248
|
+
}
|
|
3249
|
+
});
|
|
3250
|
+
return;
|
|
3251
|
+
case "Fork":
|
|
3252
|
+
k({ _tag: "Success", value: void 0 });
|
|
3253
|
+
return;
|
|
3254
|
+
}
|
|
3255
|
+
};
|
|
3256
|
+
run(effect, finish);
|
|
3257
|
+
return () => {
|
|
3258
|
+
if (cancelled) return;
|
|
3259
|
+
cancelled = true;
|
|
3260
|
+
_optionalChain([currentCancel, 'optionalCall', _92 => _92()]);
|
|
3261
|
+
};
|
|
3262
|
+
}
|
|
3263
|
+
|
|
3264
|
+
// src/http/prewarm.ts
|
|
3265
|
+
function prewarmConnections(config = {}) {
|
|
3266
|
+
const fetchImpl = _nullishCoalesce(config.fetchImpl, () => ( globalThis.fetch));
|
|
3267
|
+
const method = _nullishCoalesce(config.method, () => ( "HEAD"));
|
|
3268
|
+
const targets = resolveTargets(config);
|
|
3269
|
+
return _chunkTGIFUAK4cjs.fromPromiseAbortable.call(void 0,
|
|
3270
|
+
async (signal) => {
|
|
3271
|
+
if (typeof fetchImpl !== "function" || targets.length === 0) {
|
|
3272
|
+
return { attempted: 0, warmed: 0, failed: 0, skipped: targets.length, attempts: [] };
|
|
3273
|
+
}
|
|
3274
|
+
const attempts = [];
|
|
3275
|
+
for (const url of targets) {
|
|
3276
|
+
const origin = new URL(url).origin;
|
|
3277
|
+
const started = performance.now();
|
|
3278
|
+
emit2(config, { type: "prewarm-start", url, origin });
|
|
3279
|
+
try {
|
|
3280
|
+
const res = await fetchImpl(url, {
|
|
3281
|
+
method,
|
|
3282
|
+
headers: config.headers,
|
|
3283
|
+
cache: "no-store",
|
|
3284
|
+
signal
|
|
3285
|
+
});
|
|
3286
|
+
const ms = Math.round(performance.now() - started);
|
|
3287
|
+
attempts.push({ url, origin, ok: true, status: res.status, ms });
|
|
3288
|
+
emit2(config, { type: "prewarm-success", url, origin, status: res.status, ms });
|
|
3289
|
+
} catch (e) {
|
|
3290
|
+
const ms = Math.round(performance.now() - started);
|
|
3291
|
+
const error = normalizePrewarmError(e);
|
|
3292
|
+
attempts.push({ url, origin, ok: false, error, ms });
|
|
3293
|
+
emit2(config, { type: "prewarm-failure", url, origin, error, ms });
|
|
3294
|
+
if (config.failFast) throw error;
|
|
3295
|
+
}
|
|
3296
|
+
}
|
|
3297
|
+
return {
|
|
3298
|
+
attempted: attempts.length,
|
|
3299
|
+
warmed: attempts.filter((attempt) => attempt.ok).length,
|
|
3300
|
+
failed: attempts.filter((attempt) => !attempt.ok).length,
|
|
3301
|
+
skipped: 0,
|
|
3302
|
+
attempts
|
|
3303
|
+
};
|
|
3304
|
+
},
|
|
3305
|
+
normalizePrewarmError,
|
|
3306
|
+
{
|
|
3307
|
+
label: "http:prewarm",
|
|
3308
|
+
timeoutMs: config.timeoutMs,
|
|
3309
|
+
timeoutReason: config.timeoutMs ? () => ({
|
|
3310
|
+
_tag: "Timeout",
|
|
3311
|
+
timeoutMs: config.timeoutMs,
|
|
3312
|
+
phase: "request",
|
|
3313
|
+
message: `HTTP prewarm timed out after ${config.timeoutMs}ms`
|
|
3314
|
+
}) : void 0
|
|
3315
|
+
}
|
|
3316
|
+
);
|
|
3317
|
+
}
|
|
3318
|
+
var prewarmHttpConnections = prewarmConnections;
|
|
3319
|
+
function withConnectionPrewarming(config = {}) {
|
|
3320
|
+
const once = _nullishCoalesce(config.once, () => ( true));
|
|
3321
|
+
const warmed = /* @__PURE__ */ new Set();
|
|
3322
|
+
const warming = /* @__PURE__ */ new Set();
|
|
3323
|
+
return (next) => (req) => {
|
|
3324
|
+
if (config.shouldPrewarm && !config.shouldPrewarm(req)) return next(req);
|
|
3325
|
+
const target = _nullishCoalesce(_optionalChain([config, 'access', _93 => _93.target, 'optionalCall', _94 => _94(req)]), () => ( req.url));
|
|
3326
|
+
if (!target) return next(req);
|
|
3327
|
+
const resolved = resolveUrl(target, config.baseUrl);
|
|
3328
|
+
if (!resolved) return next(req);
|
|
3329
|
+
const key = resolved.origin;
|
|
3330
|
+
if (once && (warmed.has(key) || warming.has(key))) return next(req);
|
|
3331
|
+
warming.add(key);
|
|
3332
|
+
return _chunkTGIFUAK4cjs.asyncFold.call(void 0,
|
|
3333
|
+
prewarmConnections({
|
|
3334
|
+
...config,
|
|
3335
|
+
urls: [resolved.toString()],
|
|
3336
|
+
origins: void 0,
|
|
3337
|
+
onEvent: (event) => {
|
|
3338
|
+
if (event.type === "prewarm-success") warmed.add(key);
|
|
3339
|
+
_optionalChain([config, 'access', _95 => _95.onEvent, 'optionalCall', _96 => _96(event)]);
|
|
3340
|
+
}
|
|
3341
|
+
}),
|
|
3342
|
+
(error) => {
|
|
3343
|
+
warming.delete(key);
|
|
3344
|
+
if (config.failFast || error._tag === "Abort") return _chunkTGIFUAK4cjs.asyncFail.call(void 0, error);
|
|
3345
|
+
return next(req);
|
|
3346
|
+
},
|
|
3347
|
+
() => {
|
|
3348
|
+
warming.delete(key);
|
|
3349
|
+
return next(req);
|
|
3350
|
+
}
|
|
3351
|
+
);
|
|
3352
|
+
};
|
|
3353
|
+
}
|
|
3354
|
+
function resolveTargets(config) {
|
|
3355
|
+
const out = [];
|
|
3356
|
+
const path = _nullishCoalesce(config.path, () => ( "/"));
|
|
3357
|
+
for (const url of _nullishCoalesce(config.urls, () => ( []))) {
|
|
3358
|
+
const resolved = resolveUrl(url, config.baseUrl);
|
|
3359
|
+
if (resolved) out.push(resolved.toString());
|
|
3360
|
+
}
|
|
3361
|
+
for (const origin of _nullishCoalesce(config.origins, () => ( []))) {
|
|
3362
|
+
const resolved = resolveUrl(path, origin);
|
|
3363
|
+
if (resolved) out.push(resolved.toString());
|
|
3364
|
+
}
|
|
3365
|
+
if (out.length === 0 && config.baseUrl) {
|
|
3366
|
+
const resolved = resolveUrl(path, config.baseUrl);
|
|
3367
|
+
if (resolved) out.push(resolved.toString());
|
|
3368
|
+
}
|
|
3369
|
+
return Array.from(new Set(out));
|
|
3370
|
+
}
|
|
3371
|
+
function resolveUrl(value, baseUrl) {
|
|
3372
|
+
try {
|
|
3373
|
+
return new URL(value, baseUrl || void 0);
|
|
3374
|
+
} catch (e20) {
|
|
3375
|
+
return void 0;
|
|
3376
|
+
}
|
|
3377
|
+
}
|
|
3378
|
+
function normalizePrewarmError(error) {
|
|
3379
|
+
if (isHttpError2(error)) return error;
|
|
3380
|
+
if (typeof error === "object" && error !== null && error.name === "AbortError") {
|
|
3381
|
+
return { _tag: "Abort" };
|
|
3382
|
+
}
|
|
3383
|
+
return { _tag: "FetchError", message: error instanceof Error ? error.message : String(error) };
|
|
3384
|
+
}
|
|
3385
|
+
function isHttpError2(error) {
|
|
3386
|
+
if (typeof error !== "object" || error === null || !("_tag" in error)) return false;
|
|
3387
|
+
const tag = error._tag;
|
|
3388
|
+
return tag === "Abort" || tag === "BadUrl" || tag === "FetchError" || tag === "Timeout" || tag === "PoolRejected" || tag === "PoolTimeout";
|
|
3389
|
+
}
|
|
3390
|
+
function emit2(config, event) {
|
|
3391
|
+
if (!config.onEvent) return;
|
|
3392
|
+
try {
|
|
3393
|
+
config.onEvent(event);
|
|
3394
|
+
} catch (e21) {
|
|
3395
|
+
}
|
|
3396
|
+
}
|
|
3397
|
+
|
|
3398
|
+
|
|
3399
|
+
|
|
3400
|
+
|
|
3401
|
+
|
|
3402
|
+
|
|
3403
|
+
|
|
3404
|
+
|
|
3405
|
+
|
|
3406
|
+
|
|
3407
|
+
|
|
3408
|
+
|
|
3409
|
+
|
|
3410
|
+
|
|
3411
|
+
|
|
3412
|
+
|
|
3413
|
+
|
|
3414
|
+
|
|
3415
|
+
|
|
3416
|
+
|
|
3417
|
+
|
|
3418
|
+
|
|
3419
|
+
|
|
3420
|
+
|
|
3421
|
+
|
|
3422
|
+
|
|
3423
|
+
|
|
3424
|
+
|
|
3425
|
+
|
|
3426
|
+
|
|
3427
|
+
|
|
3428
|
+
|
|
3429
|
+
|
|
3430
|
+
|
|
3431
|
+
|
|
3432
|
+
|
|
443
3433
|
|
|
444
3434
|
|
|
445
3435
|
|
|
@@ -450,4 +3440,4 @@ function httpClientStream(cfg = {}) {
|
|
|
450
3440
|
|
|
451
3441
|
|
|
452
3442
|
|
|
453
|
-
exports.decorate = decorate; exports.httpClient = httpClient; exports.httpClientStream = httpClientStream; exports.httpClientWithMeta = httpClientWithMeta; exports.makeHttp = makeHttp; exports.makeHttpStream = makeHttpStream; exports.normalizeHeadersInit = normalizeHeadersInit; exports.withMiddleware = withMiddleware; exports.withRetryStream = withRetryStream;
|
|
3443
|
+
exports.DEFAULT_CACHE_RELEVANT_HEADERS = DEFAULT_CACHE_RELEVANT_HEADERS; exports.HttpConcurrencyPool = HttpConcurrencyPool; exports.LRUCache = LRUCache; exports.LifecycleStatsTracker = LifecycleStatsTracker; exports.PriorityQueue = PriorityQueue; exports.SEPARATOR = SEPARATOR; exports.SUPPORTED_ENCODINGS = SUPPORTED_ENCODINGS; exports.backoffDelayMs = backoffDelayMs; exports.clampPriority = clampPriority; exports.computeCacheKey = computeCacheKey; exports.decorate = decorate; exports.defaultRetryOnError = defaultRetryOnError; exports.defaultRetryOnStatus = defaultRetryOnStatus; exports.defaultRetryableMethods = defaultRetryableMethods; exports.httpClient = httpClient; exports.httpClientStream = httpClientStream; exports.httpClientWithMeta = httpClientWithMeta; exports.makeCompressionMiddleware = makeCompressionMiddleware; exports.makeHttp = makeHttp; exports.makeHttpClient = makeHttpClient; exports.makeHttpStream = makeHttpStream; exports.makeLifecycleClient = makeLifecycleClient; exports.makeRequestCompressionMiddleware = makeRequestCompressionMiddleware; exports.makeResponseCompressionMiddleware = makeResponseCompressionMiddleware; exports.normalizeHeadersInit = normalizeHeadersInit; exports.normalizeRetryBudget = normalizeRetryBudget; exports.parseCacheKey = parseCacheKey; exports.prewarmConnections = prewarmConnections; exports.prewarmHttpConnections = prewarmHttpConnections; exports.resolveHttpPoolKey = resolveHttpPoolKey; exports.retryAfterMs = retryAfterMs; exports.validatedJson = validatedJson; exports.withAuth = withAuth; exports.withCache = withCache; exports.withCircuitBreaker = withCircuitBreaker; exports.withConnectionPrewarming = withConnectionPrewarming; exports.withDedup = withDedup; exports.withLogging = withLogging; exports.withMiddleware = withMiddleware; exports.withPriority = withPriority; exports.withRequestBatching = withRequestBatching; exports.withResponseTransform = withResponseTransform; exports.withRetry = withRetry; exports.withRetryStream = withRetryStream; exports.withTracing = withTracing;
|