brass-runtime 1.14.0 → 1.15.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +6 -3
- package/dist/agent/cli/main.cjs +44 -43
- package/dist/agent/cli/main.js +5 -4
- package/dist/agent/cli/main.mjs +5 -4
- package/dist/agent/index.cjs +4 -3
- package/dist/agent/index.d.ts +1 -1
- package/dist/agent/index.js +3 -2
- package/dist/agent/index.mjs +3 -2
- package/dist/{chunk-WJESVBWN.js → chunk-3QMOKAS5.js} +9 -7
- package/dist/{chunk-BMRF4FN6.js → chunk-4NHES7VK.mjs} +59 -237
- package/dist/chunk-AR22SXML.js +1043 -0
- package/dist/{chunk-4N2JEK4H.mjs → chunk-BDF4AMWX.mjs} +27 -151
- package/dist/chunk-BDYEENHT.js +224 -0
- package/dist/{chunk-JT7D6M5H.js → chunk-BMH5AV44.js} +27 -151
- package/dist/chunk-ELOOF35R.mjs +131 -0
- package/dist/chunk-JFPU5GQI.mjs +1043 -0
- package/dist/{chunk-MQF7HZ7Y.mjs → chunk-K6M7MDZ4.mjs} +9 -7
- package/dist/chunk-MS34J5LY.cjs +224 -0
- package/dist/{chunk-UWMMYKVK.mjs → chunk-PPUXIH5R.js} +59 -237
- package/dist/chunk-R3R2FVLG.cjs +131 -0
- package/dist/chunk-STVLQ3XD.cjs +489 -0
- package/dist/{chunk-BKBFSOGT.cjs → chunk-TGIFUAK4.cjs} +26 -150
- package/dist/chunk-TO7IKXYT.js +131 -0
- package/dist/chunk-UMAZLXAB.mjs +224 -0
- package/dist/{chunk-XTMZTVIT.cjs → chunk-VEZNF5GZ.cjs} +136 -134
- package/dist/chunk-XPZNXSVN.cjs +1043 -0
- package/dist/core/index.cjs +216 -0
- package/dist/core/index.d.ts +673 -0
- package/dist/core/index.js +216 -0
- package/dist/core/index.mjs +216 -0
- package/dist/{effect-DM56H743.d.ts → effect-CMOQKX8y.d.ts} +12 -11
- package/dist/http/index.cjs +2557 -235
- package/dist/http/index.d.ts +1514 -4
- package/dist/http/index.js +2549 -227
- package/dist/http/index.mjs +2549 -227
- package/dist/index.cjs +237 -1168
- package/dist/index.d.ts +7 -673
- package/dist/index.js +77 -1008
- package/dist/index.mjs +77 -1008
- package/dist/stream-FQm9h4Mg.d.ts +74 -0
- package/dist/tracing-DNT9jEbr.d.ts +106 -0
- package/package.json +11 -3
- package/dist/chunk-SKVY72E5.cjs +0 -667
- package/dist/stream-Oqe6WeLE.d.ts +0 -173
package/dist/http/index.mjs
CHANGED
|
@@ -1,9 +1,15 @@
|
|
|
1
1
|
import {
|
|
2
|
-
makeCircuitBreaker,
|
|
3
|
-
sleep,
|
|
4
2
|
streamFromReadableStream
|
|
5
|
-
} from "../chunk-
|
|
3
|
+
} from "../chunk-4NHES7VK.mjs";
|
|
4
|
+
import {
|
|
5
|
+
makeCircuitBreaker,
|
|
6
|
+
sleep
|
|
7
|
+
} from "../chunk-UMAZLXAB.mjs";
|
|
8
|
+
import "../chunk-ELOOF35R.mjs";
|
|
6
9
|
import {
|
|
10
|
+
Cause,
|
|
11
|
+
__require,
|
|
12
|
+
asyncEffect,
|
|
7
13
|
asyncFail,
|
|
8
14
|
asyncFlatMap,
|
|
9
15
|
asyncFold,
|
|
@@ -13,7 +19,7 @@ import {
|
|
|
13
19
|
resolveWasmModule,
|
|
14
20
|
toPromise,
|
|
15
21
|
withAsyncPromise
|
|
16
|
-
} from "../chunk-
|
|
22
|
+
} from "../chunk-BDF4AMWX.mjs";
|
|
17
23
|
|
|
18
24
|
// src/http/optics/lens.ts
|
|
19
25
|
var Lens = {
|
|
@@ -42,6 +48,211 @@ var mergeHeaders = (extra) => (req) => Lens.over(Request.headers, (h) => ({ ...h
|
|
|
42
48
|
var mergeHeadersUnder = (under) => (req) => Lens.over(Request.headers, (h) => ({ ...under, ...h }))(req);
|
|
43
49
|
var setHeaderIfMissing = (k, v) => (req) => Lens.over(Request.headers, (h) => h[k] ? h : { ...h, [k]: v })(req);
|
|
44
50
|
|
|
51
|
+
// src/http/retry/wasmRetryPlanner.ts
|
|
52
|
+
var WasmRetryPlannerBridge = class {
|
|
53
|
+
planner;
|
|
54
|
+
constructor(Ctor) {
|
|
55
|
+
this.planner = new Ctor();
|
|
56
|
+
}
|
|
57
|
+
start(options) {
|
|
58
|
+
return this.planner.start(
|
|
59
|
+
options.nowMs,
|
|
60
|
+
options.maxRetries,
|
|
61
|
+
options.baseDelayMs,
|
|
62
|
+
options.maxDelayMs,
|
|
63
|
+
options.maxElapsedMs ?? -1,
|
|
64
|
+
BigInt(this.seed())
|
|
65
|
+
);
|
|
66
|
+
}
|
|
67
|
+
nextDelayMs(retryId, options) {
|
|
68
|
+
const delay = this.planner.next_delay_ms(retryId, options.nowMs, options.retryable, options.retryAfterMs ?? -1);
|
|
69
|
+
return delay < 0 ? void 0 : delay;
|
|
70
|
+
}
|
|
71
|
+
drop(retryId) {
|
|
72
|
+
this.planner.drop_state(retryId);
|
|
73
|
+
}
|
|
74
|
+
stats() {
|
|
75
|
+
return {
|
|
76
|
+
live: this.planner.metric_u64(0),
|
|
77
|
+
planned: this.planner.metric_u64(1),
|
|
78
|
+
exhausted: this.planner.metric_u64(2),
|
|
79
|
+
dropped: this.planner.metric_u64(3)
|
|
80
|
+
};
|
|
81
|
+
}
|
|
82
|
+
seed() {
|
|
83
|
+
return Math.floor(Math.random() * Number.MAX_SAFE_INTEGER);
|
|
84
|
+
}
|
|
85
|
+
};
|
|
86
|
+
function makeWasmRetryPlanner() {
|
|
87
|
+
const mod = resolveWasmModule();
|
|
88
|
+
const Ctor = mod?.BrassWasmRetryPlanner;
|
|
89
|
+
if (!Ctor) throw new Error("brass-runtime wasm retry planner is not available. Run npm run build:wasm first.");
|
|
90
|
+
return new WasmRetryPlannerBridge(Ctor);
|
|
91
|
+
}
|
|
92
|
+
|
|
93
|
+
// src/http/retry/retry.ts
|
|
94
|
+
var defaultRetryableMethods = ["GET", "HEAD", "OPTIONS"];
|
|
95
|
+
var defaultRetryOnStatus = (s) => s === 408 || s === 429 || s === 500 || s === 502 || s === 503 || s === 504;
|
|
96
|
+
var defaultRetryOnError = (e) => e._tag === "FetchError" || e._tag === "Timeout" || e._tag === "PoolTimeout";
|
|
97
|
+
var clamp = (n, min, max) => Math.max(min, Math.min(max, n));
|
|
98
|
+
var backoffDelayMs = (attempt, base, cap) => {
|
|
99
|
+
const b = Math.max(0, base);
|
|
100
|
+
const c = Math.max(0, cap);
|
|
101
|
+
const exp = b * Math.pow(2, attempt);
|
|
102
|
+
const lim = clamp(exp, 0, c);
|
|
103
|
+
return Math.floor(Math.random() * lim);
|
|
104
|
+
};
|
|
105
|
+
var headerCI = (h, name) => {
|
|
106
|
+
const k = Object.keys(h).find((x) => x.toLowerCase() === name.toLowerCase());
|
|
107
|
+
return k ? h[k] : void 0;
|
|
108
|
+
};
|
|
109
|
+
var retryAfterMs = (headers) => {
|
|
110
|
+
const v = headerCI(headers, "retry-after")?.trim();
|
|
111
|
+
if (!v) return void 0;
|
|
112
|
+
const secs = Number(v);
|
|
113
|
+
if (Number.isFinite(secs)) return Math.max(0, Math.floor(secs * 1e3));
|
|
114
|
+
const t = Date.parse(v);
|
|
115
|
+
if (Number.isFinite(t)) return Math.max(0, t - Date.now());
|
|
116
|
+
return void 0;
|
|
117
|
+
};
|
|
118
|
+
var normalizeRetryBudget = (ms) => {
|
|
119
|
+
if (ms === void 0 || !Number.isFinite(ms)) return void 0;
|
|
120
|
+
return Math.max(0, Math.floor(ms));
|
|
121
|
+
};
|
|
122
|
+
var resolveEffectivePolicy = (req, basePolicy) => {
|
|
123
|
+
const override = req.retry;
|
|
124
|
+
if (override === false) return null;
|
|
125
|
+
if (override === void 0) return basePolicy;
|
|
126
|
+
return {
|
|
127
|
+
...basePolicy,
|
|
128
|
+
...override.maxRetries !== void 0 && { maxRetries: override.maxRetries },
|
|
129
|
+
...override.baseDelayMs !== void 0 && { baseDelayMs: override.baseDelayMs },
|
|
130
|
+
...override.maxDelayMs !== void 0 && { maxDelayMs: override.maxDelayMs },
|
|
131
|
+
...override.retryOnStatus !== void 0 && { retryOnStatus: override.retryOnStatus }
|
|
132
|
+
};
|
|
133
|
+
};
|
|
134
|
+
var resolveRetryEngine = (p) => {
|
|
135
|
+
if (p.engine !== void 0) {
|
|
136
|
+
if (p.engine === "ts" || p.engine === "wasm") return p.engine;
|
|
137
|
+
throw new Error(`brass-runtime retry engine must be 'ts' or 'wasm'; received '${String(p.engine)}'`);
|
|
138
|
+
}
|
|
139
|
+
if (p.wasm === true) return "wasm";
|
|
140
|
+
if (p.wasm === false) return "ts";
|
|
141
|
+
return "ts";
|
|
142
|
+
};
|
|
143
|
+
var withRetry = (p) => (next) => {
|
|
144
|
+
const retryOnMethods = p.retryOnMethods ?? defaultRetryableMethods;
|
|
145
|
+
const retryEngine = resolveRetryEngine(p);
|
|
146
|
+
const wasmPlanner = retryEngine === "wasm" ? makeWasmRetryPlanner() : void 0;
|
|
147
|
+
const isMethodRetryable = (req) => retryOnMethods.includes(req.method);
|
|
148
|
+
const nextDelay = (ep, epMaxElapsedMs, retryId, attempt, startedAt, retryable, retryAfter) => {
|
|
149
|
+
if (!retryable) return void 0;
|
|
150
|
+
if (wasmPlanner && retryId !== void 0) {
|
|
151
|
+
return wasmPlanner.nextDelayMs(retryId, {
|
|
152
|
+
nowMs: performance.now(),
|
|
153
|
+
retryable,
|
|
154
|
+
retryAfterMs: retryAfter
|
|
155
|
+
});
|
|
156
|
+
}
|
|
157
|
+
const remainingBudget = epMaxElapsedMs === void 0 ? Number.POSITIVE_INFINITY : epMaxElapsedMs - (performance.now() - startedAt);
|
|
158
|
+
if (remainingBudget <= 0) return void 0;
|
|
159
|
+
const rawDelay = retryAfter === void 0 ? backoffDelayMs(attempt, ep.baseDelayMs, ep.maxDelayMs) : Math.min(retryAfter, ep.maxDelayMs);
|
|
160
|
+
return Math.max(0, Math.min(rawDelay, remainingBudget));
|
|
161
|
+
};
|
|
162
|
+
const sleepWithCleanup = (ms, onCancel) => {
|
|
163
|
+
return asyncEffect((_env, cb) => {
|
|
164
|
+
const delay = Math.max(0, Math.floor(ms));
|
|
165
|
+
const id = setTimeout(() => cb({ _tag: "Success", value: void 0 }), delay);
|
|
166
|
+
return () => {
|
|
167
|
+
clearTimeout(id);
|
|
168
|
+
onCancel();
|
|
169
|
+
};
|
|
170
|
+
});
|
|
171
|
+
};
|
|
172
|
+
const loop = (req, attempt, startedAt, retryId, ep, epMaxElapsedMs, epRetryOnStatus, epRetryOnError, originalPriority, safeDrop) => {
|
|
173
|
+
if (!isMethodRetryable(req)) return next(req);
|
|
174
|
+
const effectiveReq = attempt > 0 ? (() => {
|
|
175
|
+
const boostedReq = { ...req };
|
|
176
|
+
boostedReq.priority = Math.max(0, originalPriority - 1);
|
|
177
|
+
return boostedReq;
|
|
178
|
+
})() : req;
|
|
179
|
+
const remainingBudget = () => epMaxElapsedMs === void 0 ? Number.POSITIVE_INFINITY : epMaxElapsedMs - (performance.now() - startedAt);
|
|
180
|
+
return asyncFold(
|
|
181
|
+
next(effectiveReq),
|
|
182
|
+
(e) => {
|
|
183
|
+
if (e._tag === "Abort" || e._tag === "BadUrl" || e._tag === "PoolRejected" || e._tag === "CircuitBreakerOpen") {
|
|
184
|
+
safeDrop(retryId);
|
|
185
|
+
return asyncFail(e);
|
|
186
|
+
}
|
|
187
|
+
const retryable = attempt < ep.maxRetries && epRetryOnError(e) && remainingBudget() > 0;
|
|
188
|
+
const d = nextDelay(ep, epMaxElapsedMs, retryId, attempt, startedAt, retryable);
|
|
189
|
+
if (d === void 0 || d <= 0 && epMaxElapsedMs !== void 0) {
|
|
190
|
+
safeDrop(retryId);
|
|
191
|
+
return asyncFail(e);
|
|
192
|
+
}
|
|
193
|
+
if (ep.onRetry) {
|
|
194
|
+
ep.onRetry({
|
|
195
|
+
attempt,
|
|
196
|
+
delayMs: d,
|
|
197
|
+
error: e,
|
|
198
|
+
status: void 0,
|
|
199
|
+
url: req.url,
|
|
200
|
+
method: req.method,
|
|
201
|
+
timestamp: Date.now()
|
|
202
|
+
});
|
|
203
|
+
}
|
|
204
|
+
return asyncFlatMap(sleepWithCleanup(d, () => safeDrop(retryId)), () => loop(req, attempt + 1, startedAt, retryId, ep, epMaxElapsedMs, epRetryOnStatus, epRetryOnError, originalPriority, safeDrop));
|
|
205
|
+
},
|
|
206
|
+
(w) => {
|
|
207
|
+
const retryable = attempt < ep.maxRetries && epRetryOnStatus(w.status) && remainingBudget() > 0;
|
|
208
|
+
const ra = ep.respectRetryAfter === false ? void 0 : retryAfterMs(w.headers);
|
|
209
|
+
const d = nextDelay(ep, epMaxElapsedMs, retryId, attempt, startedAt, retryable, ra);
|
|
210
|
+
if (d === void 0 || d <= 0 && epMaxElapsedMs !== void 0) {
|
|
211
|
+
safeDrop(retryId);
|
|
212
|
+
return asyncSucceed(w);
|
|
213
|
+
}
|
|
214
|
+
if (ep.onRetry) {
|
|
215
|
+
ep.onRetry({
|
|
216
|
+
attempt,
|
|
217
|
+
delayMs: d,
|
|
218
|
+
error: void 0,
|
|
219
|
+
status: w.status,
|
|
220
|
+
url: req.url,
|
|
221
|
+
method: req.method,
|
|
222
|
+
timestamp: Date.now()
|
|
223
|
+
});
|
|
224
|
+
}
|
|
225
|
+
return asyncFlatMap(sleepWithCleanup(d, () => safeDrop(retryId)), () => loop(req, attempt + 1, startedAt, retryId, ep, epMaxElapsedMs, epRetryOnStatus, epRetryOnError, originalPriority, safeDrop));
|
|
226
|
+
}
|
|
227
|
+
);
|
|
228
|
+
};
|
|
229
|
+
return (req) => {
|
|
230
|
+
const effectivePolicy = resolveEffectivePolicy(req, p);
|
|
231
|
+
if (effectivePolicy === null) return next(req);
|
|
232
|
+
if (!isMethodRetryable(req)) return next(req);
|
|
233
|
+
const epRetryOnStatus = effectivePolicy.retryOnStatus ?? defaultRetryOnStatus;
|
|
234
|
+
const epRetryOnError = effectivePolicy.retryOnError ?? defaultRetryOnError;
|
|
235
|
+
const epMaxElapsedMs = normalizeRetryBudget(effectivePolicy.maxElapsedMs);
|
|
236
|
+
const originalPriority = req.priority ?? 5;
|
|
237
|
+
const startedAt = performance.now();
|
|
238
|
+
const retryId = wasmPlanner?.start({
|
|
239
|
+
nowMs: startedAt,
|
|
240
|
+
maxRetries: effectivePolicy.maxRetries,
|
|
241
|
+
baseDelayMs: effectivePolicy.baseDelayMs,
|
|
242
|
+
maxDelayMs: effectivePolicy.maxDelayMs,
|
|
243
|
+
maxElapsedMs: epMaxElapsedMs
|
|
244
|
+
});
|
|
245
|
+
let plannerDropped = false;
|
|
246
|
+
const safeDrop = (id) => {
|
|
247
|
+
if (id !== void 0 && !plannerDropped) {
|
|
248
|
+
plannerDropped = true;
|
|
249
|
+
wasmPlanner?.drop(id);
|
|
250
|
+
}
|
|
251
|
+
};
|
|
252
|
+
return loop(req, 0, startedAt, retryId, effectivePolicy, epMaxElapsedMs, epRetryOnStatus, epRetryOnError, originalPriority, safeDrop);
|
|
253
|
+
};
|
|
254
|
+
};
|
|
255
|
+
|
|
45
256
|
// src/http/wasmPermitPool.ts
|
|
46
257
|
var DECISION_RUN_NOW = 0;
|
|
47
258
|
var DECISION_QUEUED = 1;
|
|
@@ -528,6 +739,30 @@ var timeoutReason = (req, url, timeoutMs) => ({
|
|
|
528
739
|
phase: "request",
|
|
529
740
|
message: `HTTP ${req.method} ${url.origin} timed out after ${timeoutMs}ms`
|
|
530
741
|
});
|
|
742
|
+
var linkAbortSignals = (runtimeSignal, requestSignal) => {
|
|
743
|
+
if (!requestSignal) return { signal: runtimeSignal, cleanup: () => void 0 };
|
|
744
|
+
const controller = new AbortController();
|
|
745
|
+
const abort = (source) => {
|
|
746
|
+
try {
|
|
747
|
+
controller.abort(source.reason);
|
|
748
|
+
} catch {
|
|
749
|
+
controller.abort();
|
|
750
|
+
}
|
|
751
|
+
};
|
|
752
|
+
const abortFromRuntime = () => abort(runtimeSignal);
|
|
753
|
+
const abortFromRequest = () => abort(requestSignal);
|
|
754
|
+
if (runtimeSignal.aborted) abortFromRuntime();
|
|
755
|
+
else runtimeSignal.addEventListener("abort", abortFromRuntime, { once: true });
|
|
756
|
+
if (requestSignal.aborted) abortFromRequest();
|
|
757
|
+
else requestSignal.addEventListener("abort", abortFromRequest, { once: true });
|
|
758
|
+
return {
|
|
759
|
+
signal: controller.signal,
|
|
760
|
+
cleanup: () => {
|
|
761
|
+
runtimeSignal.removeEventListener("abort", abortFromRuntime);
|
|
762
|
+
requestSignal.removeEventListener("abort", abortFromRequest);
|
|
763
|
+
}
|
|
764
|
+
};
|
|
765
|
+
};
|
|
531
766
|
function makeHttpStream(cfg = {}) {
|
|
532
767
|
const baseUrl = cfg.baseUrl ?? "";
|
|
533
768
|
const defaultHeaders = cfg.headers ?? {};
|
|
@@ -542,10 +777,12 @@ function makeHttpStream(cfg = {}) {
|
|
|
542
777
|
return fromPromiseAbortable(
|
|
543
778
|
async (signal) => {
|
|
544
779
|
let lease;
|
|
780
|
+
const linkedSignal = linkAbortSignals(signal, req.init?.signal);
|
|
781
|
+
let cleanupTransferredToBody = false;
|
|
545
782
|
try {
|
|
546
783
|
if (pool) {
|
|
547
784
|
const key = resolveHttpPoolKey(pool.keyResolver, req, url);
|
|
548
|
-
lease = await pool.acquire(key, signal);
|
|
785
|
+
lease = await pool.acquire(key, linkedSignal.signal);
|
|
549
786
|
}
|
|
550
787
|
const started = performance.now();
|
|
551
788
|
const res = await fetch(url, {
|
|
@@ -553,10 +790,14 @@ function makeHttpStream(cfg = {}) {
|
|
|
553
790
|
method: req.method,
|
|
554
791
|
headers: Request.headers.get(req),
|
|
555
792
|
body: req.body,
|
|
556
|
-
signal
|
|
793
|
+
signal: linkedSignal.signal
|
|
557
794
|
});
|
|
558
795
|
const headers = headersOf(res);
|
|
559
|
-
const body = streamFromReadableStream(res.body, normalizeHttpError
|
|
796
|
+
const body = streamFromReadableStream(res.body, normalizeHttpError, {
|
|
797
|
+
signal: linkedSignal.signal,
|
|
798
|
+
onRelease: linkedSignal.cleanup
|
|
799
|
+
});
|
|
800
|
+
cleanupTransferredToBody = res.body !== null;
|
|
560
801
|
lease?.release();
|
|
561
802
|
lease = void 0;
|
|
562
803
|
return {
|
|
@@ -567,6 +808,9 @@ function makeHttpStream(cfg = {}) {
|
|
|
567
808
|
ms: Math.round(performance.now() - started)
|
|
568
809
|
};
|
|
569
810
|
} finally {
|
|
811
|
+
if (!cleanupTransferredToBody) {
|
|
812
|
+
linkedSignal.cleanup();
|
|
813
|
+
}
|
|
570
814
|
lease?.release();
|
|
571
815
|
}
|
|
572
816
|
},
|
|
@@ -596,10 +840,11 @@ function makeHttp(cfg = {}) {
|
|
|
596
840
|
return fromPromiseAbortable(
|
|
597
841
|
async (signal) => {
|
|
598
842
|
let lease;
|
|
843
|
+
const linkedSignal = linkAbortSignals(signal, req.init?.signal);
|
|
599
844
|
try {
|
|
600
845
|
if (pool) {
|
|
601
846
|
const key = resolveHttpPoolKey(pool.keyResolver, req, url);
|
|
602
|
-
lease = await pool.acquire(key, signal);
|
|
847
|
+
lease = await pool.acquire(key, linkedSignal.signal);
|
|
603
848
|
}
|
|
604
849
|
const started = performance.now();
|
|
605
850
|
const res = await fetch(url, {
|
|
@@ -607,7 +852,7 @@ function makeHttp(cfg = {}) {
|
|
|
607
852
|
method: req.method,
|
|
608
853
|
headers: Request.headers.get(req),
|
|
609
854
|
body: req.body,
|
|
610
|
-
signal
|
|
855
|
+
signal: linkedSignal.signal
|
|
611
856
|
});
|
|
612
857
|
const bodyText = await res.text();
|
|
613
858
|
const headers = headersOf(res);
|
|
@@ -619,6 +864,7 @@ function makeHttp(cfg = {}) {
|
|
|
619
864
|
ms: Math.round(performance.now() - started)
|
|
620
865
|
};
|
|
621
866
|
} finally {
|
|
867
|
+
linkedSignal.cleanup();
|
|
622
868
|
lease?.release();
|
|
623
869
|
}
|
|
624
870
|
},
|
|
@@ -634,30 +880,13 @@ function makeHttp(cfg = {}) {
|
|
|
634
880
|
};
|
|
635
881
|
return decorate(run, metrics.snapshot);
|
|
636
882
|
}
|
|
637
|
-
var clamp = (n, min, max) => Math.max(min, Math.min(max, n));
|
|
638
|
-
var defaultRetryOnError = (e) => e._tag === "FetchError" || e._tag === "Timeout" || e._tag === "PoolTimeout";
|
|
639
|
-
var defaultRetryOnStatus = (s) => s === 408 || s === 429 || s === 500 || s === 502 || s === 503 || s === 504;
|
|
640
|
-
var backoffDelayMs = (attempt, base, cap) => {
|
|
641
|
-
const exp = base * Math.pow(2, attempt);
|
|
642
|
-
const lim = clamp(exp, 0, cap);
|
|
643
|
-
return Math.floor(Math.random() * lim);
|
|
644
|
-
};
|
|
645
|
-
var retryAfterMs = (headers) => {
|
|
646
|
-
const key = Object.keys(headers).find((k) => k.toLowerCase() === "retry-after");
|
|
647
|
-
if (!key) return void 0;
|
|
648
|
-
const v = headers[key]?.trim();
|
|
649
|
-
if (!v) return void 0;
|
|
650
|
-
const secs = Number(v);
|
|
651
|
-
if (Number.isFinite(secs)) return Math.max(0, Math.floor(secs * 1e3));
|
|
652
|
-
const t = Date.parse(v);
|
|
653
|
-
if (Number.isFinite(t)) return Math.max(0, t - Date.now());
|
|
654
|
-
return void 0;
|
|
655
|
-
};
|
|
656
883
|
var withRetryStream = (p) => (next) => {
|
|
657
884
|
const retryOnStatus = p.retryOnStatus ?? defaultRetryOnStatus;
|
|
658
885
|
const retryOnError = p.retryOnError ?? defaultRetryOnError;
|
|
659
|
-
const
|
|
886
|
+
const retryOnMethods = p.retryOnMethods ?? defaultRetryableMethods;
|
|
887
|
+
const maxElapsedMs = normalizeRetryBudget(p.maxElapsedMs);
|
|
660
888
|
const run = (req) => {
|
|
889
|
+
if (!retryOnMethods.includes(req.method)) return next(req);
|
|
661
890
|
const startedAt = performance.now();
|
|
662
891
|
const remainingBudget = () => maxElapsedMs === void 0 ? Number.POSITIVE_INFINITY : maxElapsedMs - (performance.now() - startedAt);
|
|
663
892
|
const delayWithinBudget = (delayMs) => Math.max(0, Math.min(delayMs, remainingBudget()));
|
|
@@ -669,6 +898,15 @@ var withRetryStream = (p) => (next) => {
|
|
|
669
898
|
if (!canRetry) return asyncFail(e);
|
|
670
899
|
const d = delayWithinBudget(backoffDelayMs(attempt, p.baseDelayMs, p.maxDelayMs));
|
|
671
900
|
if (d <= 0 && maxElapsedMs !== void 0) return asyncFail(e);
|
|
901
|
+
p.onRetry?.({
|
|
902
|
+
attempt,
|
|
903
|
+
delayMs: d,
|
|
904
|
+
error: e,
|
|
905
|
+
status: void 0,
|
|
906
|
+
url: req.url,
|
|
907
|
+
method: req.method,
|
|
908
|
+
timestamp: Date.now()
|
|
909
|
+
});
|
|
672
910
|
return asyncFlatMap(sleep(d), () => loop(attempt + 1));
|
|
673
911
|
},
|
|
674
912
|
(w) => {
|
|
@@ -678,6 +916,15 @@ var withRetryStream = (p) => (next) => {
|
|
|
678
916
|
const rawDelay = ra === void 0 ? backoffDelayMs(attempt, p.baseDelayMs, p.maxDelayMs) : Math.min(ra, p.maxDelayMs);
|
|
679
917
|
const d = delayWithinBudget(rawDelay);
|
|
680
918
|
if (d <= 0 && maxElapsedMs !== void 0) return asyncSucceed(w);
|
|
919
|
+
p.onRetry?.({
|
|
920
|
+
attempt,
|
|
921
|
+
delayMs: d,
|
|
922
|
+
error: void 0,
|
|
923
|
+
status: w.status,
|
|
924
|
+
url: req.url,
|
|
925
|
+
method: req.method,
|
|
926
|
+
timestamp: Date.now()
|
|
927
|
+
});
|
|
681
928
|
return asyncFlatMap(sleep(d), () => loop(attempt + 1));
|
|
682
929
|
}
|
|
683
930
|
);
|
|
@@ -686,205 +933,56 @@ var withRetryStream = (p) => (next) => {
|
|
|
686
933
|
return decorateStream(run, next.stats);
|
|
687
934
|
};
|
|
688
935
|
|
|
689
|
-
// src/http/
|
|
690
|
-
var
|
|
691
|
-
|
|
692
|
-
|
|
693
|
-
|
|
694
|
-
|
|
695
|
-
start(options) {
|
|
696
|
-
return this.planner.start(
|
|
697
|
-
options.nowMs,
|
|
698
|
-
options.maxRetries,
|
|
699
|
-
options.baseDelayMs,
|
|
700
|
-
options.maxDelayMs,
|
|
701
|
-
options.maxElapsedMs ?? -1,
|
|
702
|
-
BigInt(this.seed())
|
|
703
|
-
);
|
|
704
|
-
}
|
|
705
|
-
nextDelayMs(retryId, options) {
|
|
706
|
-
const delay = this.planner.next_delay_ms(retryId, options.nowMs, options.retryable, options.retryAfterMs ?? -1);
|
|
707
|
-
return delay < 0 ? void 0 : delay;
|
|
708
|
-
}
|
|
709
|
-
drop(retryId) {
|
|
710
|
-
this.planner.drop_state(retryId);
|
|
936
|
+
// src/http/httpClient.ts
|
|
937
|
+
var resolveFinalUrl = (baseUrl, url) => {
|
|
938
|
+
try {
|
|
939
|
+
return new URL(url, baseUrl ?? "").toString();
|
|
940
|
+
} catch {
|
|
941
|
+
return (baseUrl ?? "") + url;
|
|
711
942
|
}
|
|
712
|
-
|
|
943
|
+
};
|
|
944
|
+
var createHttpCore = (cfg = {}) => {
|
|
945
|
+
const wire = makeHttp(cfg);
|
|
946
|
+
const withPromise = (eff) => withAsyncPromise((e, env) => toPromise(e, env))(eff);
|
|
947
|
+
const requestRaw = (req) => wire(req);
|
|
948
|
+
const splitInit = (init) => {
|
|
949
|
+
const { headers, timeoutMs, poolKey, ...rest } = init ?? {};
|
|
713
950
|
return {
|
|
714
|
-
|
|
715
|
-
|
|
716
|
-
|
|
717
|
-
|
|
951
|
+
headers: normalizeHeadersInit(headers),
|
|
952
|
+
timeoutMs: typeof timeoutMs === "number" ? timeoutMs : void 0,
|
|
953
|
+
poolKey: typeof poolKey === "string" ? poolKey : void 0,
|
|
954
|
+
init: rest
|
|
718
955
|
};
|
|
719
|
-
}
|
|
720
|
-
|
|
721
|
-
|
|
722
|
-
|
|
723
|
-
|
|
724
|
-
|
|
725
|
-
|
|
726
|
-
|
|
727
|
-
|
|
728
|
-
|
|
729
|
-
|
|
730
|
-
|
|
731
|
-
|
|
732
|
-
|
|
733
|
-
|
|
734
|
-
|
|
735
|
-
|
|
736
|
-
|
|
737
|
-
|
|
738
|
-
|
|
739
|
-
|
|
740
|
-
|
|
741
|
-
|
|
742
|
-
|
|
743
|
-
|
|
744
|
-
|
|
745
|
-
|
|
746
|
-
|
|
747
|
-
|
|
748
|
-
|
|
749
|
-
if (!v) return void 0;
|
|
750
|
-
const secs = Number(v);
|
|
751
|
-
if (Number.isFinite(secs)) return Math.max(0, Math.floor(secs * 1e3));
|
|
752
|
-
const t = Date.parse(v);
|
|
753
|
-
if (Number.isFinite(t)) return Math.max(0, t - Date.now());
|
|
754
|
-
return void 0;
|
|
755
|
-
};
|
|
756
|
-
var normalizeBudget = (ms) => {
|
|
757
|
-
if (ms === void 0 || !Number.isFinite(ms)) return void 0;
|
|
758
|
-
return Math.max(0, Math.floor(ms));
|
|
759
|
-
};
|
|
760
|
-
var resolveRetryEngine = (p) => {
|
|
761
|
-
if (p.engine !== void 0) {
|
|
762
|
-
if (p.engine === "ts" || p.engine === "wasm") return p.engine;
|
|
763
|
-
throw new Error(`brass-runtime retry engine must be 'ts' or 'wasm'; received '${String(p.engine)}'`);
|
|
764
|
-
}
|
|
765
|
-
if (p.wasm === true) return "wasm";
|
|
766
|
-
if (p.wasm === false) return "ts";
|
|
767
|
-
return "ts";
|
|
768
|
-
};
|
|
769
|
-
var withRetry = (p) => (next) => {
|
|
770
|
-
const retryOnMethods = p.retryOnMethods ?? defaultRetryableMethods;
|
|
771
|
-
const retryOnStatus = p.retryOnStatus ?? defaultRetryOnStatus2;
|
|
772
|
-
const retryOnError = p.retryOnError ?? defaultRetryOnError2;
|
|
773
|
-
const maxElapsedMs = normalizeBudget(p.maxElapsedMs);
|
|
774
|
-
const retryEngine = resolveRetryEngine(p);
|
|
775
|
-
const wasmPlanner = retryEngine === "wasm" ? makeWasmRetryPlanner() : void 0;
|
|
776
|
-
const isMethodRetryable = (req) => retryOnMethods.includes(req.method);
|
|
777
|
-
const nextDelay = (retryId, attempt, startedAt, retryable, retryAfter) => {
|
|
778
|
-
if (!retryable) return void 0;
|
|
779
|
-
if (wasmPlanner && retryId !== void 0) {
|
|
780
|
-
return wasmPlanner.nextDelayMs(retryId, {
|
|
781
|
-
nowMs: performance.now(),
|
|
782
|
-
retryable,
|
|
783
|
-
retryAfterMs: retryAfter
|
|
784
|
-
});
|
|
785
|
-
}
|
|
786
|
-
const remainingBudget = maxElapsedMs === void 0 ? Number.POSITIVE_INFINITY : maxElapsedMs - (performance.now() - startedAt);
|
|
787
|
-
if (remainingBudget <= 0) return void 0;
|
|
788
|
-
const rawDelay = retryAfter === void 0 ? backoffDelayMs2(attempt, p.baseDelayMs, p.maxDelayMs) : Math.min(retryAfter, p.maxDelayMs);
|
|
789
|
-
return Math.max(0, Math.min(rawDelay, remainingBudget));
|
|
790
|
-
};
|
|
791
|
-
const dropPlanner = (retryId) => {
|
|
792
|
-
if (retryId !== void 0) wasmPlanner?.drop(retryId);
|
|
793
|
-
};
|
|
794
|
-
const loop = (req, attempt, startedAt, retryId) => {
|
|
795
|
-
if (!isMethodRetryable(req)) return next(req);
|
|
796
|
-
const remainingBudget = () => maxElapsedMs === void 0 ? Number.POSITIVE_INFINITY : maxElapsedMs - (performance.now() - startedAt);
|
|
797
|
-
return asyncFold(
|
|
798
|
-
next(req),
|
|
799
|
-
(e) => {
|
|
800
|
-
if (e._tag === "Abort" || e._tag === "BadUrl" || e._tag === "PoolRejected") {
|
|
801
|
-
dropPlanner(retryId);
|
|
802
|
-
return asyncFail(e);
|
|
803
|
-
}
|
|
804
|
-
const retryable = attempt < p.maxRetries && retryOnError(e) && remainingBudget() > 0;
|
|
805
|
-
const d = nextDelay(retryId, attempt, startedAt, retryable);
|
|
806
|
-
if (d === void 0 || d <= 0 && maxElapsedMs !== void 0) {
|
|
807
|
-
dropPlanner(retryId);
|
|
808
|
-
return asyncFail(e);
|
|
809
|
-
}
|
|
810
|
-
return asyncFlatMap(sleep(d), () => loop(req, attempt + 1, startedAt, retryId));
|
|
811
|
-
},
|
|
812
|
-
(w) => {
|
|
813
|
-
const retryable = attempt < p.maxRetries && retryOnStatus(w.status) && remainingBudget() > 0;
|
|
814
|
-
const ra = p.respectRetryAfter === false ? void 0 : retryAfterMs2(w.headers);
|
|
815
|
-
const d = nextDelay(retryId, attempt, startedAt, retryable, ra);
|
|
816
|
-
if (d === void 0 || d <= 0 && maxElapsedMs !== void 0) {
|
|
817
|
-
dropPlanner(retryId);
|
|
818
|
-
return asyncSucceed(w);
|
|
819
|
-
}
|
|
820
|
-
return asyncFlatMap(sleep(d), () => loop(req, attempt + 1, startedAt, retryId));
|
|
821
|
-
}
|
|
822
|
-
);
|
|
823
|
-
};
|
|
824
|
-
return (req) => {
|
|
825
|
-
if (!isMethodRetryable(req)) return next(req);
|
|
826
|
-
const startedAt = performance.now();
|
|
827
|
-
const retryId = wasmPlanner?.start({
|
|
828
|
-
nowMs: startedAt,
|
|
829
|
-
maxRetries: p.maxRetries,
|
|
830
|
-
baseDelayMs: p.baseDelayMs,
|
|
831
|
-
maxDelayMs: p.maxDelayMs,
|
|
832
|
-
maxElapsedMs
|
|
833
|
-
});
|
|
834
|
-
return loop(req, 0, startedAt, retryId);
|
|
835
|
-
};
|
|
836
|
-
};
|
|
837
|
-
|
|
838
|
-
// src/http/httpClient.ts
|
|
839
|
-
var resolveFinalUrl = (baseUrl, url) => {
|
|
840
|
-
try {
|
|
841
|
-
return new URL(url, baseUrl ?? "").toString();
|
|
842
|
-
} catch {
|
|
843
|
-
return (baseUrl ?? "") + url;
|
|
844
|
-
}
|
|
845
|
-
};
|
|
846
|
-
var createHttpCore = (cfg = {}) => {
|
|
847
|
-
const wire = makeHttp(cfg);
|
|
848
|
-
const withPromise = (eff) => withAsyncPromise((e, env) => toPromise(e, env))(eff);
|
|
849
|
-
const requestRaw = (req) => wire(req);
|
|
850
|
-
const splitInit = (init) => {
|
|
851
|
-
const { headers, timeoutMs, poolKey, ...rest } = init ?? {};
|
|
852
|
-
return {
|
|
853
|
-
headers: normalizeHeadersInit(headers),
|
|
854
|
-
timeoutMs: typeof timeoutMs === "number" ? timeoutMs : void 0,
|
|
855
|
-
poolKey: typeof poolKey === "string" ? poolKey : void 0,
|
|
856
|
-
init: rest
|
|
857
|
-
};
|
|
858
|
-
};
|
|
859
|
-
const applyInitHeaders = (headers) => (req) => headers ? mergeHeaders(headers)(req) : req;
|
|
860
|
-
const buildReq = (method, url, init, body) => {
|
|
861
|
-
const s = splitInit(init);
|
|
862
|
-
const req = {
|
|
863
|
-
method,
|
|
864
|
-
url,
|
|
865
|
-
...body && body.length > 0 ? { body } : {},
|
|
866
|
-
...s.timeoutMs !== void 0 ? { timeoutMs: s.timeoutMs } : {},
|
|
867
|
-
...s.poolKey !== void 0 ? { poolKey: s.poolKey } : {},
|
|
868
|
-
init: s.init
|
|
869
|
-
};
|
|
870
|
-
return applyInitHeaders(s.headers)(req);
|
|
871
|
-
};
|
|
872
|
-
const toResponse = (w, body) => ({
|
|
873
|
-
status: w.status,
|
|
874
|
-
statusText: w.statusText,
|
|
875
|
-
headers: w.headers,
|
|
876
|
-
body
|
|
877
|
-
});
|
|
878
|
-
return {
|
|
879
|
-
cfg,
|
|
880
|
-
wire,
|
|
881
|
-
withPromise,
|
|
882
|
-
requestRaw,
|
|
883
|
-
splitInit,
|
|
884
|
-
applyInitHeaders,
|
|
885
|
-
buildReq,
|
|
886
|
-
toResponse
|
|
887
|
-
};
|
|
956
|
+
};
|
|
957
|
+
const applyInitHeaders = (headers) => (req) => headers ? mergeHeaders(headers)(req) : req;
|
|
958
|
+
const buildReq = (method, url, init, body) => {
|
|
959
|
+
const s = splitInit(init);
|
|
960
|
+
const req = {
|
|
961
|
+
method,
|
|
962
|
+
url,
|
|
963
|
+
...body && body.length > 0 ? { body } : {},
|
|
964
|
+
...s.timeoutMs !== void 0 ? { timeoutMs: s.timeoutMs } : {},
|
|
965
|
+
...s.poolKey !== void 0 ? { poolKey: s.poolKey } : {},
|
|
966
|
+
init: s.init
|
|
967
|
+
};
|
|
968
|
+
return applyInitHeaders(s.headers)(req);
|
|
969
|
+
};
|
|
970
|
+
const toResponse = (w, body) => ({
|
|
971
|
+
status: w.status,
|
|
972
|
+
statusText: w.statusText,
|
|
973
|
+
headers: w.headers,
|
|
974
|
+
body
|
|
975
|
+
});
|
|
976
|
+
return {
|
|
977
|
+
cfg,
|
|
978
|
+
wire,
|
|
979
|
+
withPromise,
|
|
980
|
+
requestRaw,
|
|
981
|
+
splitInit,
|
|
982
|
+
applyInitHeaders,
|
|
983
|
+
buildReq,
|
|
984
|
+
toResponse
|
|
985
|
+
};
|
|
888
986
|
};
|
|
889
987
|
function httpClient(cfg = {}) {
|
|
890
988
|
const core = createHttpCore(cfg);
|
|
@@ -1026,6 +1124,92 @@ function httpClientStream(cfg = {}) {
|
|
|
1026
1124
|
return make(wire);
|
|
1027
1125
|
}
|
|
1028
1126
|
|
|
1127
|
+
// src/http/effectRunner.ts
|
|
1128
|
+
function registerHttpEffect(effect, env, cb) {
|
|
1129
|
+
let done = false;
|
|
1130
|
+
let currentCancel;
|
|
1131
|
+
const finish = (exit) => {
|
|
1132
|
+
if (done) return;
|
|
1133
|
+
done = true;
|
|
1134
|
+
currentCancel = void 0;
|
|
1135
|
+
cb(exit);
|
|
1136
|
+
};
|
|
1137
|
+
const run = (current, cont) => {
|
|
1138
|
+
if (done) return;
|
|
1139
|
+
switch (current._tag) {
|
|
1140
|
+
case "Succeed":
|
|
1141
|
+
cont({ _tag: "Success", value: current.value });
|
|
1142
|
+
return;
|
|
1143
|
+
case "Fail":
|
|
1144
|
+
cont({ _tag: "Failure", cause: Cause.fail(current.error) });
|
|
1145
|
+
return;
|
|
1146
|
+
case "Sync":
|
|
1147
|
+
try {
|
|
1148
|
+
cont({ _tag: "Success", value: current.thunk(env) });
|
|
1149
|
+
} catch (e) {
|
|
1150
|
+
cont({ _tag: "Failure", cause: Cause.die(e) });
|
|
1151
|
+
}
|
|
1152
|
+
return;
|
|
1153
|
+
case "Async": {
|
|
1154
|
+
const cancel = current.register(env, (exit) => {
|
|
1155
|
+
currentCancel = void 0;
|
|
1156
|
+
if (done) return;
|
|
1157
|
+
cont(exit);
|
|
1158
|
+
});
|
|
1159
|
+
currentCancel = typeof cancel === "function" ? cancel : void 0;
|
|
1160
|
+
return;
|
|
1161
|
+
}
|
|
1162
|
+
case "FlatMap":
|
|
1163
|
+
run(current.first, (exit) => {
|
|
1164
|
+
if (done) return;
|
|
1165
|
+
if (exit._tag === "Failure") {
|
|
1166
|
+
cont(exit);
|
|
1167
|
+
return;
|
|
1168
|
+
}
|
|
1169
|
+
try {
|
|
1170
|
+
run(current.andThen(exit.value), cont);
|
|
1171
|
+
} catch (e) {
|
|
1172
|
+
cont({ _tag: "Failure", cause: Cause.die(e) });
|
|
1173
|
+
}
|
|
1174
|
+
});
|
|
1175
|
+
return;
|
|
1176
|
+
case "Fold":
|
|
1177
|
+
run(current.first, (exit) => {
|
|
1178
|
+
if (done) return;
|
|
1179
|
+
try {
|
|
1180
|
+
if (exit._tag === "Success") {
|
|
1181
|
+
run(current.onSuccess(exit.value), cont);
|
|
1182
|
+
return;
|
|
1183
|
+
}
|
|
1184
|
+
if (exit.cause._tag === "Fail") {
|
|
1185
|
+
run(current.onFailure(exit.cause.error), cont);
|
|
1186
|
+
return;
|
|
1187
|
+
}
|
|
1188
|
+
cont(exit);
|
|
1189
|
+
} catch (e) {
|
|
1190
|
+
cont({ _tag: "Failure", cause: Cause.die(e) });
|
|
1191
|
+
}
|
|
1192
|
+
});
|
|
1193
|
+
return;
|
|
1194
|
+
case "Fork":
|
|
1195
|
+
cont({ _tag: "Success", value: void 0 });
|
|
1196
|
+
return;
|
|
1197
|
+
}
|
|
1198
|
+
};
|
|
1199
|
+
run(effect, finish);
|
|
1200
|
+
return () => {
|
|
1201
|
+
if (done) return;
|
|
1202
|
+
const cancel = currentCancel;
|
|
1203
|
+
currentCancel = void 0;
|
|
1204
|
+
done = true;
|
|
1205
|
+
try {
|
|
1206
|
+
cancel?.();
|
|
1207
|
+
} finally {
|
|
1208
|
+
cb({ _tag: "Failure", cause: Cause.interrupt() });
|
|
1209
|
+
}
|
|
1210
|
+
};
|
|
1211
|
+
}
|
|
1212
|
+
|
|
1029
1213
|
// src/http/circuitBreaker.ts
|
|
1030
1214
|
function withCircuitBreaker(config = {}) {
|
|
1031
1215
|
if (config.perOrigin) {
|
|
@@ -1046,7 +1230,7 @@ function withCircuitBreaker(config = {}) {
|
|
|
1046
1230
|
};
|
|
1047
1231
|
return (next) => (req) => {
|
|
1048
1232
|
const breaker2 = getBreaker(req.url);
|
|
1049
|
-
return breaker2
|
|
1233
|
+
return protectLazy(breaker2, next, req);
|
|
1050
1234
|
};
|
|
1051
1235
|
}
|
|
1052
1236
|
const breaker = makeCircuitBreaker({
|
|
@@ -1057,7 +1241,31 @@ function withCircuitBreaker(config = {}) {
|
|
|
1057
1241
|
})
|
|
1058
1242
|
});
|
|
1059
1243
|
return (next) => (req) => {
|
|
1060
|
-
return breaker
|
|
1244
|
+
return protectLazy(breaker, next, req);
|
|
1245
|
+
};
|
|
1246
|
+
}
|
|
1247
|
+
function protectLazy(breaker, next, req) {
|
|
1248
|
+
return {
|
|
1249
|
+
_tag: "Async",
|
|
1250
|
+
register: (env, cb) => {
|
|
1251
|
+
let cancel;
|
|
1252
|
+
try {
|
|
1253
|
+
const deferred = {
|
|
1254
|
+
_tag: "Async",
|
|
1255
|
+
register: (innerEnv, innerCb) => registerHttpEffect(next(req), innerEnv, innerCb)
|
|
1256
|
+
};
|
|
1257
|
+
cancel = registerHttpEffect(breaker.protect(deferred), env, cb);
|
|
1258
|
+
} catch (error) {
|
|
1259
|
+
cb({
|
|
1260
|
+
_tag: "Failure",
|
|
1261
|
+
cause: {
|
|
1262
|
+
_tag: "Fail",
|
|
1263
|
+
error: { _tag: "FetchError", message: String(error) }
|
|
1264
|
+
}
|
|
1265
|
+
});
|
|
1266
|
+
}
|
|
1267
|
+
return () => cancel?.();
|
|
1268
|
+
}
|
|
1061
1269
|
};
|
|
1062
1270
|
}
|
|
1063
1271
|
|
|
@@ -1103,19 +1311,2133 @@ function validatedJson(client, validator) {
|
|
|
1103
1311
|
}
|
|
1104
1312
|
);
|
|
1105
1313
|
}
|
|
1314
|
+
|
|
1315
|
+
// src/http/body.ts
|
|
1316
|
+
function httpBodyByteLength(body) {
|
|
1317
|
+
if (body === void 0) return 0;
|
|
1318
|
+
if (typeof body === "string") return Buffer.byteLength(body, "utf8");
|
|
1319
|
+
if (body instanceof ArrayBuffer) return body.byteLength;
|
|
1320
|
+
return body.byteLength;
|
|
1321
|
+
}
|
|
1322
|
+
function httpBodyToBuffer(body) {
|
|
1323
|
+
if (typeof body === "string") return Buffer.from(body, "utf8");
|
|
1324
|
+
if (body instanceof ArrayBuffer) return Buffer.from(body);
|
|
1325
|
+
return Buffer.from(body);
|
|
1326
|
+
}
|
|
1327
|
+
function httpBodyKeyPart(body) {
|
|
1328
|
+
if (body === void 0) return "";
|
|
1329
|
+
if (typeof body === "string") return body;
|
|
1330
|
+
return `base64:${httpBodyToBuffer(body).toString("base64")}`;
|
|
1331
|
+
}
|
|
1332
|
+
|
|
1333
|
+
// src/http/lifecycle/cacheKey.ts
|
|
1334
|
+
var SEPARATOR = "\0";
|
|
1335
|
+
var DEFAULT_CACHE_RELEVANT_HEADERS = ["accept", "authorization", "content-type"];
|
|
1336
|
+
function computeCacheKey(req, baseUrl, extraHeaders = []) {
|
|
1337
|
+
const method = req.method.toUpperCase();
|
|
1338
|
+
const resolvedUrl = new URL(req.url, baseUrl || void 0).toString();
|
|
1339
|
+
const relevantSet = /* @__PURE__ */ new Set([
|
|
1340
|
+
...DEFAULT_CACHE_RELEVANT_HEADERS,
|
|
1341
|
+
...extraHeaders.map((h) => h.toLowerCase())
|
|
1342
|
+
]);
|
|
1343
|
+
const headers = req.headers ?? {};
|
|
1344
|
+
const sortedHeaders = Object.keys(headers).filter((k) => relevantSet.has(k.toLowerCase())).sort().map((k) => `${k.toLowerCase()}:${headers[k]}`).join(",");
|
|
1345
|
+
const body = httpBodyKeyPart(req.body);
|
|
1346
|
+
return `${method}${SEPARATOR}${resolvedUrl}${SEPARATOR}${sortedHeaders}${SEPARATOR}${body}`;
|
|
1347
|
+
}
|
|
1348
|
+
function parseCacheKey(key) {
|
|
1349
|
+
const [method, resolvedUrl, headersStr, ...bodyParts] = key.split(SEPARATOR);
|
|
1350
|
+
const body = bodyParts.join(SEPARATOR);
|
|
1351
|
+
const headers = {};
|
|
1352
|
+
if (headersStr) {
|
|
1353
|
+
for (const entry of headersStr.split(",")) {
|
|
1354
|
+
const colonIdx = entry.indexOf(":");
|
|
1355
|
+
if (colonIdx > 0) {
|
|
1356
|
+
headers[entry.slice(0, colonIdx)] = entry.slice(colonIdx + 1);
|
|
1357
|
+
}
|
|
1358
|
+
}
|
|
1359
|
+
}
|
|
1360
|
+
return { method, resolvedUrl, headers, body };
|
|
1361
|
+
}
|
|
1362
|
+
|
|
1363
|
+
// src/http/lifecycle/dedupKey.ts
|
|
1364
|
+
var HOP_BY_HOP = /* @__PURE__ */ new Set([
|
|
1365
|
+
"connection",
|
|
1366
|
+
"keep-alive",
|
|
1367
|
+
"proxy-authenticate",
|
|
1368
|
+
"proxy-authorization",
|
|
1369
|
+
"te",
|
|
1370
|
+
"trailer",
|
|
1371
|
+
"transfer-encoding",
|
|
1372
|
+
"upgrade"
|
|
1373
|
+
]);
|
|
1374
|
+
var SAFE_METHODS = /* @__PURE__ */ new Set(["GET", "HEAD", "OPTIONS"]);
|
|
1375
|
+
function computeDedupKey(req, baseUrl) {
|
|
1376
|
+
const method = req.method.toUpperCase();
|
|
1377
|
+
const resolvedUrl = new URL(req.url, baseUrl || void 0).toString();
|
|
1378
|
+
const headers = req.headers ?? {};
|
|
1379
|
+
const sortedHeaders = Object.keys(headers).filter((k) => {
|
|
1380
|
+
const lower = k.toLowerCase();
|
|
1381
|
+
return !HOP_BY_HOP.has(lower) && lower !== "authorization";
|
|
1382
|
+
}).sort().map((k) => `${k.toLowerCase()}:${headers[k]}`).join(",");
|
|
1383
|
+
const body = httpBodyKeyPart(req.body);
|
|
1384
|
+
return `${method}${SEPARATOR}${resolvedUrl}${SEPARATOR}${sortedHeaders}${SEPARATOR}${body}`;
|
|
1385
|
+
}
|
|
1386
|
+
|
|
1387
|
+
// src/http/lifecycle/dedup.ts
|
|
1388
|
+
function safeEmit(onEvent, event) {
|
|
1389
|
+
if (!onEvent) return;
|
|
1390
|
+
try {
|
|
1391
|
+
onEvent(event);
|
|
1392
|
+
} catch {
|
|
1393
|
+
}
|
|
1394
|
+
}
|
|
1395
|
+
function withDedup(config) {
|
|
1396
|
+
const inFlight = /* @__PURE__ */ new Map();
|
|
1397
|
+
const customKeyFn = config?.dedupKey;
|
|
1398
|
+
const onEvent = config?.onEvent;
|
|
1399
|
+
return (next) => {
|
|
1400
|
+
return (req) => {
|
|
1401
|
+
if (!SAFE_METHODS.has(req.method.toUpperCase())) {
|
|
1402
|
+
return next(req);
|
|
1403
|
+
}
|
|
1404
|
+
let key;
|
|
1405
|
+
if (customKeyFn) {
|
|
1406
|
+
try {
|
|
1407
|
+
key = customKeyFn(req);
|
|
1408
|
+
} catch {
|
|
1409
|
+
return next(req);
|
|
1410
|
+
}
|
|
1411
|
+
if (!key) {
|
|
1412
|
+
return next(req);
|
|
1413
|
+
}
|
|
1414
|
+
} else {
|
|
1415
|
+
key = computeDedupKey(req, "");
|
|
1416
|
+
}
|
|
1417
|
+
return {
|
|
1418
|
+
_tag: "Async",
|
|
1419
|
+
register: (_env, cb) => {
|
|
1420
|
+
const existing = inFlight.get(key);
|
|
1421
|
+
let callerDone = false;
|
|
1422
|
+
const finishCaller = (exit) => {
|
|
1423
|
+
if (callerDone) return;
|
|
1424
|
+
callerDone = true;
|
|
1425
|
+
cb(exit);
|
|
1426
|
+
};
|
|
1427
|
+
if (existing) {
|
|
1428
|
+
safeEmit(onEvent, { type: "dedup-hit", cacheKey: key });
|
|
1429
|
+
existing.refCount++;
|
|
1430
|
+
const waiter = {
|
|
1431
|
+
resolve: (res) => {
|
|
1432
|
+
finishCaller({ _tag: "Success", value: res });
|
|
1433
|
+
},
|
|
1434
|
+
reject: (err) => {
|
|
1435
|
+
finishCaller({ _tag: "Failure", cause: Cause.fail(err) });
|
|
1436
|
+
}
|
|
1437
|
+
};
|
|
1438
|
+
existing.waiters.push(waiter);
|
|
1439
|
+
return () => {
|
|
1440
|
+
if (callerDone) return;
|
|
1441
|
+
existing.refCount--;
|
|
1442
|
+
const idx = existing.waiters.indexOf(waiter);
|
|
1443
|
+
if (idx >= 0) {
|
|
1444
|
+
existing.waiters.splice(idx, 1);
|
|
1445
|
+
}
|
|
1446
|
+
if (existing.refCount <= 0) {
|
|
1447
|
+
inFlight.delete(key);
|
|
1448
|
+
safeEmit(onEvent, { type: "dedup-active", active: inFlight.size });
|
|
1449
|
+
existing.controller.abort();
|
|
1450
|
+
}
|
|
1451
|
+
finishCaller({ _tag: "Failure", cause: Cause.interrupt() });
|
|
1452
|
+
};
|
|
1453
|
+
}
|
|
1454
|
+
safeEmit(onEvent, { type: "dedup-miss", cacheKey: key });
|
|
1455
|
+
const controller = new AbortController();
|
|
1456
|
+
const entry = {
|
|
1457
|
+
key,
|
|
1458
|
+
controller,
|
|
1459
|
+
refCount: 1,
|
|
1460
|
+
waiters: []
|
|
1461
|
+
};
|
|
1462
|
+
inFlight.set(key, entry);
|
|
1463
|
+
safeEmit(onEvent, { type: "dedup-active", active: inFlight.size });
|
|
1464
|
+
const dedupReq = {
|
|
1465
|
+
...req,
|
|
1466
|
+
init: {
|
|
1467
|
+
...req.init ?? {},
|
|
1468
|
+
signal: controller.signal
|
|
1469
|
+
}
|
|
1470
|
+
};
|
|
1471
|
+
const innerEffect = next(dedupReq);
|
|
1472
|
+
const innerCancel = registerHttpEffect(innerEffect, _env, (exit) => {
|
|
1473
|
+
inFlight.delete(key);
|
|
1474
|
+
safeEmit(onEvent, { type: "dedup-active", active: inFlight.size });
|
|
1475
|
+
if (exit._tag === "Success") {
|
|
1476
|
+
resolveAll(entry, exit.value);
|
|
1477
|
+
finishCaller(exit);
|
|
1478
|
+
return;
|
|
1479
|
+
}
|
|
1480
|
+
if (exit.cause._tag === "Interrupt") {
|
|
1481
|
+
rejectAll(entry, { _tag: "Abort" });
|
|
1482
|
+
finishCaller({ _tag: "Failure", cause: Cause.interrupt() });
|
|
1483
|
+
return;
|
|
1484
|
+
}
|
|
1485
|
+
if (exit.cause._tag === "Fail") {
|
|
1486
|
+
rejectAll(entry, exit.cause.error);
|
|
1487
|
+
finishCaller(exit);
|
|
1488
|
+
return;
|
|
1489
|
+
}
|
|
1490
|
+
const err = { _tag: "FetchError", message: String(exit.cause.defect ?? "unknown") };
|
|
1491
|
+
rejectAll(entry, err);
|
|
1492
|
+
finishCaller({ _tag: "Failure", cause: Cause.fail(err) });
|
|
1493
|
+
});
|
|
1494
|
+
return () => {
|
|
1495
|
+
if (callerDone) return;
|
|
1496
|
+
entry.refCount--;
|
|
1497
|
+
if (entry.refCount <= 0) {
|
|
1498
|
+
inFlight.delete(key);
|
|
1499
|
+
safeEmit(onEvent, { type: "dedup-active", active: inFlight.size });
|
|
1500
|
+
controller.abort();
|
|
1501
|
+
if (innerCancel) {
|
|
1502
|
+
innerCancel();
|
|
1503
|
+
}
|
|
1504
|
+
}
|
|
1505
|
+
finishCaller({ _tag: "Failure", cause: Cause.interrupt() });
|
|
1506
|
+
};
|
|
1507
|
+
}
|
|
1508
|
+
};
|
|
1509
|
+
};
|
|
1510
|
+
};
|
|
1511
|
+
}
|
|
1512
|
+
function resolveAll(entry, res) {
|
|
1513
|
+
const waiters = entry.waiters.slice();
|
|
1514
|
+
for (const w of waiters) {
|
|
1515
|
+
w.resolve(res);
|
|
1516
|
+
}
|
|
1517
|
+
}
|
|
1518
|
+
function rejectAll(entry, err) {
|
|
1519
|
+
const waiters = entry.waiters.slice();
|
|
1520
|
+
for (const w of waiters) {
|
|
1521
|
+
w.reject(err);
|
|
1522
|
+
}
|
|
1523
|
+
}
|
|
1524
|
+
|
|
1525
|
+
// src/http/lifecycle/timing.ts
|
|
1526
|
+
var now = typeof performance !== "undefined" && typeof performance.now === "function" ? () => performance.now() : () => Date.now();
|
|
1527
|
+
|
|
1528
|
+
// src/http/lifecycle/lruCache.ts
|
|
1529
|
+
function isExpired(node) {
|
|
1530
|
+
return now() - node.storedAt >= node.ttlMs;
|
|
1531
|
+
}
|
|
1532
|
+
var LRUCache = class {
|
|
1533
|
+
map = /* @__PURE__ */ new Map();
|
|
1534
|
+
head = null;
|
|
1535
|
+
tail = null;
|
|
1536
|
+
maxEntries;
|
|
1537
|
+
onEvict;
|
|
1538
|
+
/**
|
|
1539
|
+
* Creates a new LRU cache instance.
|
|
1540
|
+
*
|
|
1541
|
+
* @param config - Cache configuration options.
|
|
1542
|
+
* @param config.maxEntries - Maximum number of entries. Must be >= 1. Default: 1024.
|
|
1543
|
+
* @param config.onEvict - Optional eviction callback.
|
|
1544
|
+
*
|
|
1545
|
+
* @example
|
|
1546
|
+
* ```typescript
|
|
1547
|
+
* import { LRUCache } from "./lruCache";
|
|
1548
|
+
*
|
|
1549
|
+
* const cache = new LRUCache<number>({ maxEntries: 50 });
|
|
1550
|
+
* ```
|
|
1551
|
+
*/
|
|
1552
|
+
constructor(config = {}) {
|
|
1553
|
+
const max = config.maxEntries ?? 1024;
|
|
1554
|
+
this.maxEntries = Math.max(1, Math.floor(max));
|
|
1555
|
+
this.onEvict = config.onEvict;
|
|
1556
|
+
}
|
|
1557
|
+
/**
|
|
1558
|
+
* Returns the number of entries currently in the cache.
|
|
1559
|
+
*
|
|
1560
|
+
* @returns The current entry count.
|
|
1561
|
+
*
|
|
1562
|
+
* @example
|
|
1563
|
+
* ```typescript
|
|
1564
|
+
* import { LRUCache } from "./lruCache";
|
|
1565
|
+
*
|
|
1566
|
+
* const cache = new LRUCache<string>();
|
|
1567
|
+
* cache.set("a", "1", 10_000);
|
|
1568
|
+
* console.log(cache.size); // 1
|
|
1569
|
+
* ```
|
|
1570
|
+
*/
|
|
1571
|
+
get size() {
|
|
1572
|
+
return this.map.size;
|
|
1573
|
+
}
|
|
1574
|
+
/**
|
|
1575
|
+
* Retrieves a value by key.
|
|
1576
|
+
*
|
|
1577
|
+
* Returns `undefined` if the key is not found or the entry has expired.
|
|
1578
|
+
* On a hit (non-expired), the entry is moved to the head (most recently used).
|
|
1579
|
+
* Expired entries are lazily removed on access.
|
|
1580
|
+
*
|
|
1581
|
+
* @param key - The cache key to look up.
|
|
1582
|
+
* @returns The cached value, or `undefined` if not found or expired.
|
|
1583
|
+
*
|
|
1584
|
+
* @example
|
|
1585
|
+
* ```typescript
|
|
1586
|
+
* import { LRUCache } from "./lruCache";
|
|
1587
|
+
*
|
|
1588
|
+
* const cache = new LRUCache<string>();
|
|
1589
|
+
* cache.set("greeting", "hello", 30_000);
|
|
1590
|
+
* const val = cache.get("greeting"); // "hello"
|
|
1591
|
+
* const miss = cache.get("unknown"); // undefined
|
|
1592
|
+
* ```
|
|
1593
|
+
*/
|
|
1594
|
+
get(key) {
|
|
1595
|
+
const node = this.map.get(key);
|
|
1596
|
+
if (!node) return void 0;
|
|
1597
|
+
if (isExpired(node)) {
|
|
1598
|
+
this.removeNode(node);
|
|
1599
|
+
this.map.delete(key);
|
|
1600
|
+
return void 0;
|
|
1601
|
+
}
|
|
1602
|
+
this.moveToHead(node);
|
|
1603
|
+
return node.value;
|
|
1604
|
+
}
|
|
1605
|
+
/**
|
|
1606
|
+
* Inserts or updates an entry in the cache.
|
|
1607
|
+
*
|
|
1608
|
+
* If the key already exists, the value and TTL are updated and the entry is
|
|
1609
|
+
* moved to the head. If inserting a new entry causes the cache to exceed
|
|
1610
|
+
* `maxEntries` (must be >= 1), the least recently used entry is evicted.
|
|
1611
|
+
*
|
|
1612
|
+
* @param key - The cache key.
|
|
1613
|
+
* @param value - The value to store.
|
|
1614
|
+
* @param ttlMs - Time-to-live in milliseconds. The entry expires after this duration.
|
|
1615
|
+
*
|
|
1616
|
+
* @example
|
|
1617
|
+
* ```typescript
|
|
1618
|
+
* import { LRUCache } from "./lruCache";
|
|
1619
|
+
*
|
|
1620
|
+
* const cache = new LRUCache<string>({ maxEntries: 2 });
|
|
1621
|
+
* cache.set("a", "alpha", 60_000);
|
|
1622
|
+
* cache.set("b", "beta", 60_000);
|
|
1623
|
+
* cache.set("c", "gamma", 60_000); // evicts "a" (LRU)
|
|
1624
|
+
* ```
|
|
1625
|
+
*/
|
|
1626
|
+
set(key, value, ttlMs) {
|
|
1627
|
+
const existing = this.map.get(key);
|
|
1628
|
+
if (existing) {
|
|
1629
|
+
existing.value = value;
|
|
1630
|
+
existing.storedAt = now();
|
|
1631
|
+
existing.ttlMs = ttlMs;
|
|
1632
|
+
this.moveToHead(existing);
|
|
1633
|
+
return;
|
|
1634
|
+
}
|
|
1635
|
+
const node = {
|
|
1636
|
+
key,
|
|
1637
|
+
value,
|
|
1638
|
+
storedAt: now(),
|
|
1639
|
+
ttlMs,
|
|
1640
|
+
prev: null,
|
|
1641
|
+
next: null
|
|
1642
|
+
};
|
|
1643
|
+
this.map.set(key, node);
|
|
1644
|
+
this.addToHead(node);
|
|
1645
|
+
if (this.map.size > this.maxEntries) {
|
|
1646
|
+
this.evictTail();
|
|
1647
|
+
}
|
|
1648
|
+
}
|
|
1649
|
+
/**
|
|
1650
|
+
* Removes an entry by key.
|
|
1651
|
+
*
|
|
1652
|
+
* @param key - The cache key to remove.
|
|
1653
|
+
* @returns `true` if the entry was found and removed, `false` otherwise.
|
|
1654
|
+
*
|
|
1655
|
+
* @example
|
|
1656
|
+
* ```typescript
|
|
1657
|
+
* import { LRUCache } from "./lruCache";
|
|
1658
|
+
*
|
|
1659
|
+
* const cache = new LRUCache<string>();
|
|
1660
|
+
* cache.set("x", "value", 10_000);
|
|
1661
|
+
* cache.delete("x"); // true
|
|
1662
|
+
* cache.delete("x"); // false (already removed)
|
|
1663
|
+
* ```
|
|
1664
|
+
*/
|
|
1665
|
+
delete(key) {
|
|
1666
|
+
const node = this.map.get(key);
|
|
1667
|
+
if (!node) return false;
|
|
1668
|
+
this.removeNode(node);
|
|
1669
|
+
this.map.delete(key);
|
|
1670
|
+
return true;
|
|
1671
|
+
}
|
|
1672
|
+
/**
|
|
1673
|
+
* Removes all entries from the cache, resetting it to an empty state.
|
|
1674
|
+
*
|
|
1675
|
+
* @example
|
|
1676
|
+
* ```typescript
|
|
1677
|
+
* import { LRUCache } from "./lruCache";
|
|
1678
|
+
*
|
|
1679
|
+
* const cache = new LRUCache<string>();
|
|
1680
|
+
* cache.set("a", "1", 10_000);
|
|
1681
|
+
* cache.clear();
|
|
1682
|
+
* console.log(cache.size); // 0
|
|
1683
|
+
* ```
|
|
1684
|
+
*/
|
|
1685
|
+
clear() {
|
|
1686
|
+
this.map.clear();
|
|
1687
|
+
this.head = null;
|
|
1688
|
+
this.tail = null;
|
|
1689
|
+
}
|
|
1690
|
+
// --- Doubly-linked list operations ---
|
|
1691
|
+
/** Adds a node to the head of the list (most recently used position). */
|
|
1692
|
+
addToHead(node) {
|
|
1693
|
+
node.prev = null;
|
|
1694
|
+
node.next = this.head;
|
|
1695
|
+
if (this.head) {
|
|
1696
|
+
this.head.prev = node;
|
|
1697
|
+
}
|
|
1698
|
+
this.head = node;
|
|
1699
|
+
if (!this.tail) {
|
|
1700
|
+
this.tail = node;
|
|
1701
|
+
}
|
|
1702
|
+
}
|
|
1703
|
+
/** Removes a node from its current position in the list. */
|
|
1704
|
+
removeNode(node) {
|
|
1705
|
+
if (node.prev) {
|
|
1706
|
+
node.prev.next = node.next;
|
|
1707
|
+
} else {
|
|
1708
|
+
this.head = node.next;
|
|
1709
|
+
}
|
|
1710
|
+
if (node.next) {
|
|
1711
|
+
node.next.prev = node.prev;
|
|
1712
|
+
} else {
|
|
1713
|
+
this.tail = node.prev;
|
|
1714
|
+
}
|
|
1715
|
+
node.prev = null;
|
|
1716
|
+
node.next = null;
|
|
1717
|
+
}
|
|
1718
|
+
/** Moves an existing node to the head of the list. */
|
|
1719
|
+
moveToHead(node) {
|
|
1720
|
+
if (this.head === node) return;
|
|
1721
|
+
this.removeNode(node);
|
|
1722
|
+
this.addToHead(node);
|
|
1723
|
+
}
|
|
1724
|
+
/** Evicts the tail node (least recently used) and notifies via callback. */
|
|
1725
|
+
evictTail() {
|
|
1726
|
+
if (!this.tail) return;
|
|
1727
|
+
const evicted = this.tail;
|
|
1728
|
+
this.removeNode(evicted);
|
|
1729
|
+
this.map.delete(evicted.key);
|
|
1730
|
+
if (this.onEvict) {
|
|
1731
|
+
this.onEvict(1);
|
|
1732
|
+
}
|
|
1733
|
+
}
|
|
1734
|
+
};
|
|
1735
|
+
|
|
1736
|
+
// src/http/lifecycle/responseCache.ts
|
|
1737
|
+
function clamp2(n, min, max) {
|
|
1738
|
+
return Math.max(min, Math.min(max, n));
|
|
1739
|
+
}
|
|
1740
|
+
function safeEmit2(onEvent, event) {
|
|
1741
|
+
if (!onEvent) return;
|
|
1742
|
+
try {
|
|
1743
|
+
onEvent(event);
|
|
1744
|
+
} catch {
|
|
1745
|
+
}
|
|
1746
|
+
}
|
|
1747
|
+
function withCache(config) {
|
|
1748
|
+
const ttlSeconds = clamp2(config?.ttlSeconds ?? 60, 1, 86400);
|
|
1749
|
+
const ttlMs = ttlSeconds * 1e3;
|
|
1750
|
+
const maxEntries = Math.max(1, Math.floor(config?.maxEntries ?? 1024));
|
|
1751
|
+
const staleWhileRevalidate = config?.staleWhileRevalidate ?? false;
|
|
1752
|
+
const cachePolicy = config?.cachePolicy;
|
|
1753
|
+
const cacheRelevantHeaders = config?.cacheRelevantHeaders ?? [];
|
|
1754
|
+
const baseUrl = config?.baseUrl ?? "";
|
|
1755
|
+
const onEvent = config?.onEvent;
|
|
1756
|
+
const onLifecycleEvent = config?.onLifecycleEvent;
|
|
1757
|
+
const cache = new LRUCache({
|
|
1758
|
+
maxEntries,
|
|
1759
|
+
onEvict: (count) => onLifecycleEvent?.({ type: "cache-eviction", count })
|
|
1760
|
+
});
|
|
1761
|
+
const revalidating = /* @__PURE__ */ new Set();
|
|
1762
|
+
const invalidate = (key) => {
|
|
1763
|
+
cache.delete(key);
|
|
1764
|
+
};
|
|
1765
|
+
const clear = () => {
|
|
1766
|
+
cache.clear();
|
|
1767
|
+
};
|
|
1768
|
+
const middleware = (next) => {
|
|
1769
|
+
return (req) => {
|
|
1770
|
+
const method = req.method.toUpperCase();
|
|
1771
|
+
if (!SAFE_METHODS.has(method) && !cachePolicy) {
|
|
1772
|
+
return next(req);
|
|
1773
|
+
}
|
|
1774
|
+
const key = computeCacheKey(req, baseUrl, cacheRelevantHeaders);
|
|
1775
|
+
return {
|
|
1776
|
+
_tag: "Async",
|
|
1777
|
+
register: (env, cb) => {
|
|
1778
|
+
const cached = cache.get(key);
|
|
1779
|
+
if (cached !== void 0) {
|
|
1780
|
+
onLifecycleEvent?.({ type: "cache-hit", cacheKey: key });
|
|
1781
|
+
cb({ _tag: "Success", value: cached });
|
|
1782
|
+
return;
|
|
1783
|
+
}
|
|
1784
|
+
onLifecycleEvent?.({ type: "cache-miss", cacheKey: key });
|
|
1785
|
+
const innerEffect = next(req);
|
|
1786
|
+
return registerHttpEffect(innerEffect, env, (exit) => {
|
|
1787
|
+
if (exit._tag === "Success") {
|
|
1788
|
+
storeIfCacheable(req, exit.value, key);
|
|
1789
|
+
}
|
|
1790
|
+
cb(exit);
|
|
1791
|
+
});
|
|
1792
|
+
}
|
|
1793
|
+
};
|
|
1794
|
+
};
|
|
1795
|
+
};
|
|
1796
|
+
function storeIfCacheable(req, res, key) {
|
|
1797
|
+
const method = req.method.toUpperCase();
|
|
1798
|
+
if (cachePolicy) {
|
|
1799
|
+
const result = cachePolicy(req, res);
|
|
1800
|
+
if (!result.cacheable) return;
|
|
1801
|
+
const entryTtlMs = result.ttlSeconds !== void 0 ? clamp2(result.ttlSeconds, 1, 86400) * 1e3 : ttlMs;
|
|
1802
|
+
cache.set(key, res, entryTtlMs);
|
|
1803
|
+
return;
|
|
1804
|
+
}
|
|
1805
|
+
if (!SAFE_METHODS.has(method)) return;
|
|
1806
|
+
cache.set(key, res, ttlMs);
|
|
1807
|
+
}
|
|
1808
|
+
function triggerRevalidation(next, req, key) {
|
|
1809
|
+
if (revalidating.has(key)) return;
|
|
1810
|
+
revalidating.add(key);
|
|
1811
|
+
const innerEffect = next(req);
|
|
1812
|
+
const handleExit = (exit) => {
|
|
1813
|
+
revalidating.delete(key);
|
|
1814
|
+
if (exit._tag === "Success") {
|
|
1815
|
+
storeIfCacheable(req, exit.value, key);
|
|
1816
|
+
} else {
|
|
1817
|
+
safeEmit2(onEvent, {
|
|
1818
|
+
type: "revalidation-failure",
|
|
1819
|
+
cacheKey: key,
|
|
1820
|
+
error: exit.cause._tag === "Fail" ? exit.cause.error : void 0
|
|
1821
|
+
});
|
|
1822
|
+
}
|
|
1823
|
+
};
|
|
1824
|
+
registerHttpEffect(innerEffect, void 0, handleExit);
|
|
1825
|
+
}
|
|
1826
|
+
const expirationMap = /* @__PURE__ */ new Map();
|
|
1827
|
+
const swrMiddleware = (next) => {
|
|
1828
|
+
return (req) => {
|
|
1829
|
+
const method = req.method.toUpperCase();
|
|
1830
|
+
if (!SAFE_METHODS.has(method) && !cachePolicy) {
|
|
1831
|
+
return next(req);
|
|
1832
|
+
}
|
|
1833
|
+
const key = computeCacheKey(req, baseUrl, cacheRelevantHeaders);
|
|
1834
|
+
return {
|
|
1835
|
+
_tag: "Async",
|
|
1836
|
+
register: (env, cb) => {
|
|
1837
|
+
const cached = cache.get(key);
|
|
1838
|
+
if (cached !== void 0) {
|
|
1839
|
+
const expiresAt = expirationMap.get(key);
|
|
1840
|
+
if (expiresAt !== void 0 && now() < expiresAt) {
|
|
1841
|
+
onLifecycleEvent?.({ type: "cache-hit", cacheKey: key });
|
|
1842
|
+
cb({ _tag: "Success", value: cached });
|
|
1843
|
+
return;
|
|
1844
|
+
}
|
|
1845
|
+
onLifecycleEvent?.({ type: "cache-hit", cacheKey: key });
|
|
1846
|
+
cb({ _tag: "Success", value: cached });
|
|
1847
|
+
triggerRevalidation(next, req, key);
|
|
1848
|
+
return;
|
|
1849
|
+
}
|
|
1850
|
+
onLifecycleEvent?.({ type: "cache-miss", cacheKey: key });
|
|
1851
|
+
const innerEffect = next(req);
|
|
1852
|
+
const handleSuccess = (res) => {
|
|
1853
|
+
swrStoreIfCacheable(req, res, key);
|
|
1854
|
+
};
|
|
1855
|
+
return registerHttpEffect(innerEffect, env, (exit) => {
|
|
1856
|
+
if (exit._tag === "Success") {
|
|
1857
|
+
handleSuccess(exit.value);
|
|
1858
|
+
}
|
|
1859
|
+
cb(exit);
|
|
1860
|
+
});
|
|
1861
|
+
}
|
|
1862
|
+
};
|
|
1863
|
+
};
|
|
1864
|
+
};
|
|
1865
|
+
function swrStoreIfCacheable(req, res, key) {
|
|
1866
|
+
const method = req.method.toUpperCase();
|
|
1867
|
+
let entryTtlMs = ttlMs;
|
|
1868
|
+
if (cachePolicy) {
|
|
1869
|
+
const result = cachePolicy(req, res);
|
|
1870
|
+
if (!result.cacheable) return;
|
|
1871
|
+
entryTtlMs = result.ttlSeconds !== void 0 ? clamp2(result.ttlSeconds, 1, 86400) * 1e3 : ttlMs;
|
|
1872
|
+
} else if (!SAFE_METHODS.has(method)) {
|
|
1873
|
+
return;
|
|
1874
|
+
}
|
|
1875
|
+
const lruTtl = Number.MAX_SAFE_INTEGER;
|
|
1876
|
+
cache.set(key, res, lruTtl);
|
|
1877
|
+
expirationMap.set(key, now() + entryTtlMs);
|
|
1878
|
+
}
|
|
1879
|
+
const swrInvalidate = (key) => {
|
|
1880
|
+
cache.delete(key);
|
|
1881
|
+
expirationMap.delete(key);
|
|
1882
|
+
};
|
|
1883
|
+
const swrClear = () => {
|
|
1884
|
+
cache.clear();
|
|
1885
|
+
expirationMap.clear();
|
|
1886
|
+
};
|
|
1887
|
+
if (staleWhileRevalidate) {
|
|
1888
|
+
return {
|
|
1889
|
+
middleware: swrMiddleware,
|
|
1890
|
+
invalidate: swrInvalidate,
|
|
1891
|
+
clear: swrClear
|
|
1892
|
+
};
|
|
1893
|
+
}
|
|
1894
|
+
return {
|
|
1895
|
+
middleware,
|
|
1896
|
+
invalidate,
|
|
1897
|
+
clear
|
|
1898
|
+
};
|
|
1899
|
+
}
|
|
1900
|
+
|
|
1901
|
+
// src/http/lifecycle/priorityQueue.ts
|
|
1902
|
+
function clampPriority(value) {
|
|
1903
|
+
if (value === void 0 || !Number.isFinite(value)) return 5;
|
|
1904
|
+
return Math.max(0, Math.min(9, Math.trunc(value)));
|
|
1905
|
+
}
|
|
1906
|
+
function comparePriority(a, b) {
|
|
1907
|
+
if (a.priority !== b.priority) return a.priority - b.priority;
|
|
1908
|
+
return a.arrivalOrder - b.arrivalOrder;
|
|
1909
|
+
}
|
|
1910
|
+
var PriorityQueue = class {
|
|
1911
|
+
heap = [];
|
|
1912
|
+
counter = 0;
|
|
1913
|
+
/**
|
|
1914
|
+
* Returns the number of entries in the queue (including cancelled entries).
|
|
1915
|
+
*
|
|
1916
|
+
* @returns The total number of entries in the internal heap.
|
|
1917
|
+
*
|
|
1918
|
+
* @example
|
|
1919
|
+
* ```typescript
|
|
1920
|
+
* import { PriorityQueue } from "./priorityQueue";
|
|
1921
|
+
*
|
|
1922
|
+
* const queue = new PriorityQueue<string>();
|
|
1923
|
+
* queue.enqueue("task", 5);
|
|
1924
|
+
* console.log(queue.size); // 1
|
|
1925
|
+
* ```
|
|
1926
|
+
*/
|
|
1927
|
+
get size() {
|
|
1928
|
+
return this.heap.length;
|
|
1929
|
+
}
|
|
1930
|
+
/** Returns the number of entries that have not been cancelled. */
|
|
1931
|
+
get activeSize() {
|
|
1932
|
+
return this.heap.reduce((n, entry) => n + (entry.cancelled ? 0 : 1), 0);
|
|
1933
|
+
}
|
|
1934
|
+
/**
|
|
1935
|
+
* Adds a value to the queue with the given priority.
|
|
1936
|
+
*
|
|
1937
|
+
* Priority is clamped to the valid range [0, 9] via `clampPriority`.
|
|
1938
|
+
* Returns the created entry, which can be used for later cancellation
|
|
1939
|
+
* by setting `entry.cancelled = true`.
|
|
1940
|
+
*
|
|
1941
|
+
* @param value - The value to enqueue.
|
|
1942
|
+
* @param priority - Priority level, integer from 0 (highest) to 9 (lowest).
|
|
1943
|
+
* Clamped to [0, 9]. Defaults to 5 if undefined.
|
|
1944
|
+
* @returns The created queue entry.
|
|
1945
|
+
*
|
|
1946
|
+
* @example
|
|
1947
|
+
* ```typescript
|
|
1948
|
+
* import { PriorityQueue } from "./priorityQueue";
|
|
1949
|
+
*
|
|
1950
|
+
* const queue = new PriorityQueue<string>();
|
|
1951
|
+
* const entry = queue.enqueue("urgent-task", 0);
|
|
1952
|
+
* entry.cancelled = true; // cancel later if needed
|
|
1953
|
+
* ```
|
|
1954
|
+
*/
|
|
1955
|
+
enqueue(value, priority) {
|
|
1956
|
+
const entry = {
|
|
1957
|
+
priority: clampPriority(priority),
|
|
1958
|
+
arrivalOrder: this.counter++,
|
|
1959
|
+
value,
|
|
1960
|
+
cancelled: false
|
|
1961
|
+
};
|
|
1962
|
+
this.heap.push(entry);
|
|
1963
|
+
this.bubbleUp(this.heap.length - 1);
|
|
1964
|
+
return entry;
|
|
1965
|
+
}
|
|
1966
|
+
/**
|
|
1967
|
+
* Removes and returns the highest-priority non-cancelled entry.
|
|
1968
|
+
*
|
|
1969
|
+
* Skips (and discards) any cancelled entries at the top of the heap.
|
|
1970
|
+
* Returns `undefined` if the queue is empty or all entries are cancelled.
|
|
1971
|
+
*
|
|
1972
|
+
* @returns The highest-priority non-cancelled entry, or `undefined` if none available.
|
|
1973
|
+
*
|
|
1974
|
+
* @example
|
|
1975
|
+
* ```typescript
|
|
1976
|
+
* import { PriorityQueue } from "./priorityQueue";
|
|
1977
|
+
*
|
|
1978
|
+
* const queue = new PriorityQueue<string>();
|
|
1979
|
+
* queue.enqueue("first", 1);
|
|
1980
|
+
* queue.enqueue("second", 2);
|
|
1981
|
+
* const entry = queue.dequeue(); // { value: "first", priority: 1, ... }
|
|
1982
|
+
* ```
|
|
1983
|
+
*/
|
|
1984
|
+
dequeue() {
|
|
1985
|
+
while (this.heap.length > 0) {
|
|
1986
|
+
const top = this.heap[0];
|
|
1987
|
+
if (top.cancelled) {
|
|
1988
|
+
this.removeTop();
|
|
1989
|
+
continue;
|
|
1990
|
+
}
|
|
1991
|
+
this.removeTop();
|
|
1992
|
+
return top;
|
|
1993
|
+
}
|
|
1994
|
+
return void 0;
|
|
1995
|
+
}
|
|
1996
|
+
/**
|
|
1997
|
+
* Returns the highest-priority non-cancelled entry without removing it.
|
|
1998
|
+
*
|
|
1999
|
+
* Discards cancelled entries at the top of the heap as a side effect.
|
|
2000
|
+
* Returns `undefined` if the queue is empty or all entries are cancelled.
|
|
2001
|
+
*
|
|
2002
|
+
* @returns The highest-priority non-cancelled entry, or `undefined` if none available.
|
|
2003
|
+
*
|
|
2004
|
+
* @example
|
|
2005
|
+
* ```typescript
|
|
2006
|
+
* import { PriorityQueue } from "./priorityQueue";
|
|
2007
|
+
*
|
|
2008
|
+
* const queue = new PriorityQueue<string>();
|
|
2009
|
+
* queue.enqueue("task", 3);
|
|
2010
|
+
* const top = queue.peek(); // { value: "task", priority: 3, ... }
|
|
2011
|
+
* console.log(queue.size); // 1 (not removed)
|
|
2012
|
+
* ```
|
|
2013
|
+
*/
|
|
2014
|
+
peek() {
|
|
2015
|
+
while (this.heap.length > 0) {
|
|
2016
|
+
const top = this.heap[0];
|
|
2017
|
+
if (top.cancelled) {
|
|
2018
|
+
this.removeTop();
|
|
2019
|
+
continue;
|
|
2020
|
+
}
|
|
2021
|
+
return top;
|
|
2022
|
+
}
|
|
2023
|
+
return void 0;
|
|
2024
|
+
}
|
|
2025
|
+
/**
|
|
2026
|
+
* Marks all entries matching the predicate as cancelled (lazy removal).
|
|
2027
|
+
*
|
|
2028
|
+
* Cancelled entries are skipped on subsequent dequeue/peek calls.
|
|
2029
|
+
* This does not immediately remove entries from the heap; they are
|
|
2030
|
+
* discarded lazily when encountered at the top during dequeue or peek.
|
|
2031
|
+
*
|
|
2032
|
+
* @param predicate - A function that returns `true` for entries to cancel.
|
|
2033
|
+
* @returns The number of entries marked as cancelled.
|
|
2034
|
+
*
|
|
2035
|
+
* @example
|
|
2036
|
+
* ```typescript
|
|
2037
|
+
* import { PriorityQueue } from "./priorityQueue";
|
|
2038
|
+
*
|
|
2039
|
+
* const queue = new PriorityQueue<string>();
|
|
2040
|
+
* queue.enqueue("a", 1);
|
|
2041
|
+
* queue.enqueue("b", 2);
|
|
2042
|
+
* const removed = queue.remove((e) => e.value === "a"); // 1
|
|
2043
|
+
* ```
|
|
2044
|
+
*/
|
|
2045
|
+
remove(predicate) {
|
|
2046
|
+
let count = 0;
|
|
2047
|
+
for (const entry of this.heap) {
|
|
2048
|
+
if (!entry.cancelled && predicate(entry)) {
|
|
2049
|
+
entry.cancelled = true;
|
|
2050
|
+
count++;
|
|
2051
|
+
}
|
|
2052
|
+
}
|
|
2053
|
+
return count;
|
|
2054
|
+
}
|
|
2055
|
+
// --- Binary heap operations ---
|
|
2056
|
+
/** Removes the top element from the heap and restores heap property. */
|
|
2057
|
+
removeTop() {
|
|
2058
|
+
const last = this.heap.pop();
|
|
2059
|
+
if (this.heap.length > 0 && last !== void 0) {
|
|
2060
|
+
this.heap[0] = last;
|
|
2061
|
+
this.sinkDown(0);
|
|
2062
|
+
}
|
|
2063
|
+
}
|
|
2064
|
+
/** Moves an element up the heap until the heap property is restored. */
|
|
2065
|
+
bubbleUp(index) {
|
|
2066
|
+
while (index > 0) {
|
|
2067
|
+
const parentIndex = index - 1 >>> 1;
|
|
2068
|
+
const current = this.heap[index];
|
|
2069
|
+
const parent = this.heap[parentIndex];
|
|
2070
|
+
if (comparePriority(current, parent) >= 0) break;
|
|
2071
|
+
this.heap[index] = parent;
|
|
2072
|
+
this.heap[parentIndex] = current;
|
|
2073
|
+
index = parentIndex;
|
|
2074
|
+
}
|
|
2075
|
+
}
|
|
2076
|
+
/** Moves an element down the heap until the heap property is restored. */
|
|
2077
|
+
sinkDown(index) {
|
|
2078
|
+
const length = this.heap.length;
|
|
2079
|
+
while (true) {
|
|
2080
|
+
const leftIndex = 2 * index + 1;
|
|
2081
|
+
const rightIndex = 2 * index + 2;
|
|
2082
|
+
let smallest = index;
|
|
2083
|
+
if (leftIndex < length && comparePriority(this.heap[leftIndex], this.heap[smallest]) < 0) {
|
|
2084
|
+
smallest = leftIndex;
|
|
2085
|
+
}
|
|
2086
|
+
if (rightIndex < length && comparePriority(this.heap[rightIndex], this.heap[smallest]) < 0) {
|
|
2087
|
+
smallest = rightIndex;
|
|
2088
|
+
}
|
|
2089
|
+
if (smallest === index) break;
|
|
2090
|
+
const temp = this.heap[index];
|
|
2091
|
+
this.heap[index] = this.heap[smallest];
|
|
2092
|
+
this.heap[smallest] = temp;
|
|
2093
|
+
index = smallest;
|
|
2094
|
+
}
|
|
2095
|
+
}
|
|
2096
|
+
};
|
|
2097
|
+
|
|
2098
|
+
// src/http/lifecycle/priorityScheduler.ts
|
|
2099
|
+
var DEFAULT_CONCURRENCY2 = 32;
|
|
2100
|
+
function extractPriority(req) {
|
|
2101
|
+
const fromReq = req.priority;
|
|
2102
|
+
if (fromReq !== void 0) return clampPriority(fromReq);
|
|
2103
|
+
const fromInit = req.init?.priority;
|
|
2104
|
+
if (fromInit !== void 0) return clampPriority(fromInit);
|
|
2105
|
+
return 5;
|
|
2106
|
+
}
|
|
2107
|
+
function safeEmit3(onEvent, event) {
|
|
2108
|
+
if (!onEvent) return;
|
|
2109
|
+
try {
|
|
2110
|
+
onEvent(event);
|
|
2111
|
+
} catch {
|
|
2112
|
+
}
|
|
2113
|
+
}
|
|
2114
|
+
function withPriority(config) {
|
|
2115
|
+
const concurrency = resolveConcurrency(config?.concurrency);
|
|
2116
|
+
const queueTimeoutMs = resolveQueueTimeout(config?.queueTimeoutMs);
|
|
2117
|
+
const onEvent = config?.onEvent;
|
|
2118
|
+
const queue = new PriorityQueue();
|
|
2119
|
+
let inFlight = 0;
|
|
2120
|
+
const queueDepth = () => {
|
|
2121
|
+
return queue.activeSize;
|
|
2122
|
+
};
|
|
2123
|
+
const middleware = (next) => {
|
|
2124
|
+
return (req) => {
|
|
2125
|
+
const priority = extractPriority(req);
|
|
2126
|
+
return {
|
|
2127
|
+
_tag: "Async",
|
|
2128
|
+
register: (env, cb) => {
|
|
2129
|
+
if (inFlight < concurrency) {
|
|
2130
|
+
return dispatchRequest(next, req, env, cb);
|
|
2131
|
+
}
|
|
2132
|
+
const queued = { req, env, cb, signal: getSignal(req) };
|
|
2133
|
+
const entry = queue.enqueue(queued, priority);
|
|
2134
|
+
safeEmit3(onEvent, { type: "queue-enqueue", priority });
|
|
2135
|
+
if (queueTimeoutMs !== void 0) {
|
|
2136
|
+
queued.timer = setTimeout(() => {
|
|
2137
|
+
entry.cancelled = true;
|
|
2138
|
+
queued.timer = void 0;
|
|
2139
|
+
cb({
|
|
2140
|
+
_tag: "Failure",
|
|
2141
|
+
cause: Cause.fail({
|
|
2142
|
+
_tag: "PoolTimeout",
|
|
2143
|
+
key: "priority",
|
|
2144
|
+
timeoutMs: queueTimeoutMs,
|
|
2145
|
+
message: `Priority queue did not dispatch within ${queueTimeoutMs}ms`
|
|
2146
|
+
})
|
|
2147
|
+
});
|
|
2148
|
+
}, queueTimeoutMs);
|
|
2149
|
+
}
|
|
2150
|
+
const signal = queued.signal;
|
|
2151
|
+
let abortHandler;
|
|
2152
|
+
if (signal && !signal.aborted) {
|
|
2153
|
+
abortHandler = () => {
|
|
2154
|
+
entry.cancelled = true;
|
|
2155
|
+
if (queued.timer !== void 0) {
|
|
2156
|
+
clearTimeout(queued.timer);
|
|
2157
|
+
queued.timer = void 0;
|
|
2158
|
+
}
|
|
2159
|
+
cb({ _tag: "Failure", cause: Cause.fail({ _tag: "Abort" }) });
|
|
2160
|
+
};
|
|
2161
|
+
signal.addEventListener("abort", abortHandler, { once: true });
|
|
2162
|
+
} else if (signal?.aborted) {
|
|
2163
|
+
entry.cancelled = true;
|
|
2164
|
+
cb({ _tag: "Failure", cause: Cause.fail({ _tag: "Abort" }) });
|
|
2165
|
+
return;
|
|
2166
|
+
}
|
|
2167
|
+
return () => {
|
|
2168
|
+
entry.cancelled = true;
|
|
2169
|
+
if (queued.timer !== void 0) {
|
|
2170
|
+
clearTimeout(queued.timer);
|
|
2171
|
+
queued.timer = void 0;
|
|
2172
|
+
}
|
|
2173
|
+
if (abortHandler && signal) {
|
|
2174
|
+
signal.removeEventListener("abort", abortHandler);
|
|
2175
|
+
}
|
|
2176
|
+
cb({ _tag: "Failure", cause: Cause.interrupt() });
|
|
2177
|
+
};
|
|
2178
|
+
}
|
|
2179
|
+
};
|
|
2180
|
+
};
|
|
2181
|
+
function dispatchRequest(downstream, req, env, cb) {
|
|
2182
|
+
inFlight++;
|
|
2183
|
+
safeEmit3(onEvent, { type: "queue-dispatch", priority: extractPriority(req) });
|
|
2184
|
+
const innerEffect = downstream(req);
|
|
2185
|
+
let completed = false;
|
|
2186
|
+
const onComplete = (exit) => {
|
|
2187
|
+
if (completed) return;
|
|
2188
|
+
completed = true;
|
|
2189
|
+
inFlight--;
|
|
2190
|
+
cb(exit);
|
|
2191
|
+
drainNext(downstream);
|
|
2192
|
+
};
|
|
2193
|
+
const innerCancel = registerHttpEffect(innerEffect, env, onComplete);
|
|
2194
|
+
return () => {
|
|
2195
|
+
innerCancel();
|
|
2196
|
+
};
|
|
2197
|
+
}
|
|
2198
|
+
function drainNext(downstream) {
|
|
2199
|
+
while (inFlight < concurrency) {
|
|
2200
|
+
const entry = queue.dequeue();
|
|
2201
|
+
if (!entry) break;
|
|
2202
|
+
if (entry.cancelled) continue;
|
|
2203
|
+
const queued = entry.value;
|
|
2204
|
+
if (queued.timer !== void 0) {
|
|
2205
|
+
clearTimeout(queued.timer);
|
|
2206
|
+
queued.timer = void 0;
|
|
2207
|
+
}
|
|
2208
|
+
if (queued.signal?.aborted) {
|
|
2209
|
+
queued.cb({ _tag: "Failure", cause: Cause.fail({ _tag: "Abort" }) });
|
|
2210
|
+
continue;
|
|
2211
|
+
}
|
|
2212
|
+
dispatchRequest(downstream, queued.req, queued.env, queued.cb);
|
|
2213
|
+
}
|
|
2214
|
+
}
|
|
2215
|
+
};
|
|
2216
|
+
return Object.assign(middleware, { queueDepth });
|
|
2217
|
+
}
|
|
2218
|
+
function resolveConcurrency(value) {
|
|
2219
|
+
if (value === void 0 || !Number.isFinite(value)) return DEFAULT_CONCURRENCY2;
|
|
2220
|
+
return Math.max(1, Math.floor(value));
|
|
2221
|
+
}
|
|
2222
|
+
function resolveQueueTimeout(value) {
|
|
2223
|
+
if (value === void 0 || !Number.isFinite(value)) return void 0;
|
|
2224
|
+
const n = Math.floor(value);
|
|
2225
|
+
return n > 0 ? n : void 0;
|
|
2226
|
+
}
|
|
2227
|
+
function getSignal(req) {
|
|
2228
|
+
return req.init?.signal;
|
|
2229
|
+
}
|
|
2230
|
+
|
|
2231
|
+
// src/http/lifecycle/stats.ts
|
|
2232
|
+
var LifecycleStatsTracker = class {
|
|
2233
|
+
_cacheHits = 0;
|
|
2234
|
+
_cacheMisses = 0;
|
|
2235
|
+
_cacheEvictions = 0;
|
|
2236
|
+
_dedupHits = 0;
|
|
2237
|
+
_dedupActive = 0;
|
|
2238
|
+
_queueDepth = 0;
|
|
2239
|
+
_requestsStarted = 0;
|
|
2240
|
+
_requestsCompleted = 0;
|
|
2241
|
+
_requestsFailed = 0;
|
|
2242
|
+
_retries = 0;
|
|
2243
|
+
_onEvent;
|
|
2244
|
+
_wireStats;
|
|
2245
|
+
/**
|
|
2246
|
+
* Creates a new lifecycle stats tracker.
|
|
2247
|
+
*
|
|
2248
|
+
* @param opts - Configuration options for the tracker.
|
|
2249
|
+
* @param opts.onEvent - Optional callback invoked on each lifecycle event.
|
|
2250
|
+
* Errors thrown by this callback are silently discarded.
|
|
2251
|
+
* @param opts.wireStats - A function returning the current wire-level HTTP client stats.
|
|
2252
|
+
*
|
|
2253
|
+
* @example
|
|
2254
|
+
* ```typescript
|
|
2255
|
+
* import { LifecycleStatsTracker } from "./stats";
|
|
2256
|
+
*
|
|
2257
|
+
* const tracker = new LifecycleStatsTracker({
|
|
2258
|
+
* wireStats: () => ({ requestCount: 0, errorCount: 0 }),
|
|
2259
|
+
* });
|
|
2260
|
+
* ```
|
|
2261
|
+
*/
|
|
2262
|
+
constructor(opts) {
|
|
2263
|
+
this._onEvent = opts.onEvent;
|
|
2264
|
+
this._wireStats = opts.wireStats;
|
|
2265
|
+
}
|
|
2266
|
+
// --- Increment methods ---
|
|
2267
|
+
/**
|
|
2268
|
+
* Records a cache hit. Increments the cache hit counter by 1.
|
|
2269
|
+
*
|
|
2270
|
+
* @example
|
|
2271
|
+
* ```typescript
|
|
2272
|
+
* import { LifecycleStatsTracker } from "./stats";
|
|
2273
|
+
*
|
|
2274
|
+
* const tracker = new LifecycleStatsTracker({ wireStats: () => ({ requestCount: 0, errorCount: 0 }) });
|
|
2275
|
+
* tracker.cacheHit();
|
|
2276
|
+
* ```
|
|
2277
|
+
*/
|
|
2278
|
+
cacheHit() {
|
|
2279
|
+
this._cacheHits++;
|
|
2280
|
+
}
|
|
2281
|
+
/**
|
|
2282
|
+
* Records a cache miss. Increments the cache miss counter by 1.
|
|
2283
|
+
*
|
|
2284
|
+
* @example
|
|
2285
|
+
* ```typescript
|
|
2286
|
+
* import { LifecycleStatsTracker } from "./stats";
|
|
2287
|
+
*
|
|
2288
|
+
* const tracker = new LifecycleStatsTracker({ wireStats: () => ({ requestCount: 0, errorCount: 0 }) });
|
|
2289
|
+
* tracker.cacheMiss();
|
|
2290
|
+
* ```
|
|
2291
|
+
*/
|
|
2292
|
+
cacheMiss() {
|
|
2293
|
+
this._cacheMisses++;
|
|
2294
|
+
}
|
|
2295
|
+
/**
|
|
2296
|
+
* Records a cache eviction. Increments the cache eviction counter by 1.
|
|
2297
|
+
*
|
|
2298
|
+
* @example
|
|
2299
|
+
* ```typescript
|
|
2300
|
+
* import { LifecycleStatsTracker } from "./stats";
|
|
2301
|
+
*
|
|
2302
|
+
* const tracker = new LifecycleStatsTracker({ wireStats: () => ({ requestCount: 0, errorCount: 0 }) });
|
|
2303
|
+
* tracker.cacheEviction();
|
|
2304
|
+
* ```
|
|
2305
|
+
*/
|
|
2306
|
+
cacheEviction() {
|
|
2307
|
+
this._cacheEvictions++;
|
|
2308
|
+
}
|
|
2309
|
+
/**
|
|
2310
|
+
* Records a dedup hit (a request that joined an in-flight duplicate).
|
|
2311
|
+
* Increments the dedup hit counter by 1.
|
|
2312
|
+
*
|
|
2313
|
+
* @example
|
|
2314
|
+
* ```typescript
|
|
2315
|
+
* import { LifecycleStatsTracker } from "./stats";
|
|
2316
|
+
*
|
|
2317
|
+
* const tracker = new LifecycleStatsTracker({ wireStats: () => ({ requestCount: 0, errorCount: 0 }) });
|
|
2318
|
+
* tracker.dedupHit();
|
|
2319
|
+
* ```
|
|
2320
|
+
*/
|
|
2321
|
+
dedupHit() {
|
|
2322
|
+
this._dedupHits++;
|
|
2323
|
+
}
|
|
2324
|
+
/**
|
|
2325
|
+
* Sets the current number of active dedup groups.
|
|
2326
|
+
*
|
|
2327
|
+
* @param n - The current count of active dedup groups. Must be >= 0.
|
|
2328
|
+
*
|
|
2329
|
+
* @example
|
|
2330
|
+
* ```typescript
|
|
2331
|
+
* import { LifecycleStatsTracker } from "./stats";
|
|
2332
|
+
*
|
|
2333
|
+
* const tracker = new LifecycleStatsTracker({ wireStats: () => ({ requestCount: 0, errorCount: 0 }) });
|
|
2334
|
+
* tracker.setDedupActive(3);
|
|
2335
|
+
* ```
|
|
2336
|
+
*/
|
|
2337
|
+
setDedupActive(n) {
|
|
2338
|
+
this._dedupActive = n;
|
|
2339
|
+
}
|
|
2340
|
+
/**
|
|
2341
|
+
* Sets the current priority queue depth.
|
|
2342
|
+
*
|
|
2343
|
+
* @param n - The current number of entries in the priority queue. Must be >= 0.
|
|
2344
|
+
*
|
|
2345
|
+
* @example
|
|
2346
|
+
* ```typescript
|
|
2347
|
+
* import { LifecycleStatsTracker } from "./stats";
|
|
2348
|
+
*
|
|
2349
|
+
* const tracker = new LifecycleStatsTracker({ wireStats: () => ({ requestCount: 0, errorCount: 0 }) });
|
|
2350
|
+
* tracker.setQueueDepth(5);
|
|
2351
|
+
* ```
|
|
2352
|
+
*/
|
|
2353
|
+
setQueueDepth(n) {
|
|
2354
|
+
this._queueDepth = n;
|
|
2355
|
+
}
|
|
2356
|
+
/**
|
|
2357
|
+
* Records that a request has started. Increments the requests started counter by 1.
|
|
2358
|
+
*
|
|
2359
|
+
* @example
|
|
2360
|
+
* ```typescript
|
|
2361
|
+
* import { LifecycleStatsTracker } from "./stats";
|
|
2362
|
+
*
|
|
2363
|
+
* const tracker = new LifecycleStatsTracker({ wireStats: () => ({ requestCount: 0, errorCount: 0 }) });
|
|
2364
|
+
* tracker.requestStarted();
|
|
2365
|
+
* ```
|
|
2366
|
+
*/
|
|
2367
|
+
requestStarted() {
|
|
2368
|
+
this._requestsStarted++;
|
|
2369
|
+
}
|
|
2370
|
+
/**
|
|
2371
|
+
* Records that a request has completed successfully.
|
|
2372
|
+
* Increments the requests completed counter by 1.
|
|
2373
|
+
*
|
|
2374
|
+
* @example
|
|
2375
|
+
* ```typescript
|
|
2376
|
+
* import { LifecycleStatsTracker } from "./stats";
|
|
2377
|
+
*
|
|
2378
|
+
* const tracker = new LifecycleStatsTracker({ wireStats: () => ({ requestCount: 0, errorCount: 0 }) });
|
|
2379
|
+
* tracker.requestCompleted();
|
|
2380
|
+
* ```
|
|
2381
|
+
*/
|
|
2382
|
+
requestCompleted() {
|
|
2383
|
+
this._requestsCompleted++;
|
|
2384
|
+
}
|
|
2385
|
+
/**
|
|
2386
|
+
* Records that a request has failed.
|
|
2387
|
+
* Increments the requests failed counter by 1.
|
|
2388
|
+
*
|
|
2389
|
+
* @example
|
|
2390
|
+
* ```typescript
|
|
2391
|
+
* import { LifecycleStatsTracker } from "./stats";
|
|
2392
|
+
*
|
|
2393
|
+
* const tracker = new LifecycleStatsTracker({ wireStats: () => ({ requestCount: 0, errorCount: 0 }) });
|
|
2394
|
+
* tracker.requestFailed();
|
|
2395
|
+
* ```
|
|
2396
|
+
*/
|
|
2397
|
+
requestFailed() {
|
|
2398
|
+
this._requestsFailed++;
|
|
2399
|
+
}
|
|
2400
|
+
retry() {
|
|
2401
|
+
this._retries++;
|
|
2402
|
+
}
|
|
2403
|
+
// --- Event emission ---
|
|
2404
|
+
/**
|
|
2405
|
+
* Emits a lifecycle event to the registered `onEvent` callback.
|
|
2406
|
+
*
|
|
2407
|
+
* The callback is wrapped in a try-catch so that any exception thrown by
|
|
2408
|
+
* the callback is silently discarded and request processing continues
|
|
2409
|
+
* unaffected. If no `onEvent` callback was provided, this is a no-op.
|
|
2410
|
+
*
|
|
2411
|
+
* @param type - The lifecycle event type to emit (e.g., `"cache-hit"`, `"request-start"`).
|
|
2412
|
+
* @param extra - Optional additional event data.
|
|
2413
|
+
*
|
|
2414
|
+
* @example
|
|
2415
|
+
* ```typescript
|
|
2416
|
+
* import { LifecycleStatsTracker } from "./stats";
|
|
2417
|
+
*
|
|
2418
|
+
* const tracker = new LifecycleStatsTracker({
|
|
2419
|
+
* onEvent: (event) => console.log(event.type, event.timestamp),
|
|
2420
|
+
* wireStats: () => ({ requestCount: 0, errorCount: 0 }),
|
|
2421
|
+
* });
|
|
2422
|
+
* tracker.emit("cache-hit", { cacheKey: "GET|/api/users" });
|
|
2423
|
+
* ```
|
|
2424
|
+
*/
|
|
2425
|
+
emit(type, extra) {
|
|
2426
|
+
if (!this._onEvent) return;
|
|
2427
|
+
try {
|
|
2428
|
+
const event = {
|
|
2429
|
+
type,
|
|
2430
|
+
timestamp: now(),
|
|
2431
|
+
...extra
|
|
2432
|
+
};
|
|
2433
|
+
this._onEvent(event);
|
|
2434
|
+
} catch {
|
|
2435
|
+
}
|
|
2436
|
+
}
|
|
2437
|
+
// --- Snapshot ---
|
|
2438
|
+
/**
|
|
2439
|
+
* Returns a frozen snapshot of all lifecycle statistics including wire stats.
|
|
2440
|
+
*
|
|
2441
|
+
* The returned object is frozen (immutable) and represents a point-in-time
|
|
2442
|
+
* view of all counters and gauges.
|
|
2443
|
+
*
|
|
2444
|
+
* @returns A frozen `LifecycleStats` object containing all current statistics.
|
|
2445
|
+
*
|
|
2446
|
+
* @example
|
|
2447
|
+
* ```typescript
|
|
2448
|
+
* import { LifecycleStatsTracker } from "./stats";
|
|
2449
|
+
*
|
|
2450
|
+
* const tracker = new LifecycleStatsTracker({
|
|
2451
|
+
* wireStats: () => ({ requestCount: 10, errorCount: 1 }),
|
|
2452
|
+
* });
|
|
2453
|
+
* tracker.cacheHit();
|
|
2454
|
+
* tracker.cacheHit();
|
|
2455
|
+
* const stats = tracker.snapshot();
|
|
2456
|
+
* console.log(stats.cacheHits); // 2
|
|
2457
|
+
* ```
|
|
2458
|
+
*/
|
|
2459
|
+
snapshot() {
|
|
2460
|
+
return Object.freeze({
|
|
2461
|
+
cacheHits: this._cacheHits,
|
|
2462
|
+
cacheMisses: this._cacheMisses,
|
|
2463
|
+
cacheEvictions: this._cacheEvictions,
|
|
2464
|
+
dedupHits: this._dedupHits,
|
|
2465
|
+
dedupActive: this._dedupActive,
|
|
2466
|
+
queueDepth: this._queueDepth,
|
|
2467
|
+
requestsStarted: this._requestsStarted,
|
|
2468
|
+
requestsCompleted: this._requestsCompleted,
|
|
2469
|
+
requestsFailed: this._requestsFailed,
|
|
2470
|
+
retries: this._retries,
|
|
2471
|
+
wire: this._wireStats()
|
|
2472
|
+
});
|
|
2473
|
+
}
|
|
2474
|
+
};
|
|
2475
|
+
|
|
2476
|
+
// src/http/lifecycle/lifecycleClient.ts
|
|
2477
|
+
function validateGlobals() {
|
|
2478
|
+
if (typeof fetch === "undefined") {
|
|
2479
|
+
throw new Error(
|
|
2480
|
+
"makeLifecycleClient: global `fetch` is not available. Ensure you are running in an environment with fetch support (Node.js 18+ or modern browser)."
|
|
2481
|
+
);
|
|
2482
|
+
}
|
|
2483
|
+
if (typeof AbortController === "undefined") {
|
|
2484
|
+
throw new Error(
|
|
2485
|
+
"makeLifecycleClient: global `AbortController` is not available. Ensure you are running in an environment with AbortController support (Node.js 15+ or modern browser)."
|
|
2486
|
+
);
|
|
2487
|
+
}
|
|
2488
|
+
}
|
|
2489
|
+
function extractWireConfig(config) {
|
|
2490
|
+
const { dedup, cache, priority, retry, onEvent, ...wireConfig } = config;
|
|
2491
|
+
return wireConfig;
|
|
2492
|
+
}
|
|
2493
|
+
function makeLifecycleClient(config = {}) {
|
|
2494
|
+
validateGlobals();
|
|
2495
|
+
const wireConfig = extractWireConfig(config);
|
|
2496
|
+
const wireClient = makeHttp(wireConfig);
|
|
2497
|
+
const activeControllers = /* @__PURE__ */ new Set();
|
|
2498
|
+
const tracker = new LifecycleStatsTracker({
|
|
2499
|
+
onEvent: config.onEvent,
|
|
2500
|
+
wireStats: wireClient.stats
|
|
2501
|
+
});
|
|
2502
|
+
const hasDedup = config.dedup !== void 0 && config.dedup !== false;
|
|
2503
|
+
const hasCache = config.cache !== void 0 && config.cache !== false;
|
|
2504
|
+
const hasPriority = config.priority !== void 0 && config.priority !== false;
|
|
2505
|
+
const hasRetry = config.retry !== void 0 && config.retry !== false;
|
|
2506
|
+
if (!hasDedup && !hasCache && !hasPriority && !hasRetry) {
|
|
2507
|
+
return buildLifecycleClient(wireClient, tracker, {
|
|
2508
|
+
cacheInvalidate: noopInvalidate,
|
|
2509
|
+
cacheClear: noopClear,
|
|
2510
|
+
cancelAll: () => cancelControllers(activeControllers),
|
|
2511
|
+
activeControllers
|
|
2512
|
+
});
|
|
2513
|
+
}
|
|
2514
|
+
let priorityMiddleware;
|
|
2515
|
+
if (hasPriority) {
|
|
2516
|
+
const priorityConfig = config.priority;
|
|
2517
|
+
priorityMiddleware = withPriority({
|
|
2518
|
+
...priorityConfig,
|
|
2519
|
+
onEvent: (event) => {
|
|
2520
|
+
tracker.setQueueDepth(priorityMiddleware?.queueDepth() ?? 0);
|
|
2521
|
+
tracker.emit(event.type, { priority: event.priority });
|
|
2522
|
+
}
|
|
2523
|
+
});
|
|
2524
|
+
}
|
|
2525
|
+
let cacheLayer;
|
|
2526
|
+
if (hasCache) {
|
|
2527
|
+
const cacheConfig = config.cache;
|
|
2528
|
+
cacheLayer = withCache({
|
|
2529
|
+
...cacheConfig,
|
|
2530
|
+
baseUrl: wireConfig.baseUrl,
|
|
2531
|
+
onLifecycleEvent: (event) => {
|
|
2532
|
+
if (event.type === "cache-hit") tracker.cacheHit();
|
|
2533
|
+
if (event.type === "cache-miss") tracker.cacheMiss();
|
|
2534
|
+
if (event.type === "cache-eviction") tracker.cacheEviction();
|
|
2535
|
+
if (event.type === "cache-hit" || event.type === "cache-miss") {
|
|
2536
|
+
tracker.emit(event.type, { cacheKey: event.cacheKey });
|
|
2537
|
+
}
|
|
2538
|
+
}
|
|
2539
|
+
});
|
|
2540
|
+
}
|
|
2541
|
+
let dedupMiddleware;
|
|
2542
|
+
if (hasDedup) {
|
|
2543
|
+
const dedupConfig = config.dedup;
|
|
2544
|
+
const baseUrl = wireConfig.baseUrl ?? "";
|
|
2545
|
+
const effectiveDedupConfig = dedupConfig.dedupKey || !baseUrl ? dedupConfig : { ...dedupConfig, dedupKey: (req) => computeDedupKey(req, baseUrl) };
|
|
2546
|
+
dedupMiddleware = withDedup({
|
|
2547
|
+
...effectiveDedupConfig,
|
|
2548
|
+
onEvent: (event) => {
|
|
2549
|
+
if (event.type === "dedup-hit") tracker.dedupHit();
|
|
2550
|
+
if (event.type === "dedup-active") {
|
|
2551
|
+
tracker.setDedupActive(event.active ?? 0);
|
|
2552
|
+
return;
|
|
2553
|
+
}
|
|
2554
|
+
tracker.emit(event.type, { cacheKey: event.cacheKey });
|
|
2555
|
+
}
|
|
2556
|
+
});
|
|
2557
|
+
}
|
|
2558
|
+
let composedFn = wireClient;
|
|
2559
|
+
if (priorityMiddleware) {
|
|
2560
|
+
composedFn = priorityMiddleware(composedFn);
|
|
2561
|
+
}
|
|
2562
|
+
if (hasRetry) {
|
|
2563
|
+
const retryConfig = config.retry;
|
|
2564
|
+
composedFn = withRetry({
|
|
2565
|
+
...retryConfig,
|
|
2566
|
+
onRetry: (event) => {
|
|
2567
|
+
tracker.retry();
|
|
2568
|
+
tracker.emit("retry", {
|
|
2569
|
+
attempt: event.attempt,
|
|
2570
|
+
delayMs: event.delayMs,
|
|
2571
|
+
status: event.status,
|
|
2572
|
+
errorTag: event.error?._tag
|
|
2573
|
+
});
|
|
2574
|
+
retryConfig.onRetry?.(event);
|
|
2575
|
+
}
|
|
2576
|
+
})(composedFn);
|
|
2577
|
+
}
|
|
2578
|
+
if (cacheLayer) {
|
|
2579
|
+
composedFn = cacheLayer.middleware(composedFn);
|
|
2580
|
+
}
|
|
2581
|
+
if (dedupMiddleware) {
|
|
2582
|
+
composedFn = dedupMiddleware(composedFn);
|
|
2583
|
+
}
|
|
2584
|
+
return buildLifecycleClient(composedFn, tracker, {
|
|
2585
|
+
cacheInvalidate: cacheLayer?.invalidate ?? noopInvalidate,
|
|
2586
|
+
cacheClear: cacheLayer?.clear ?? noopClear,
|
|
2587
|
+
cancelAll: () => cancelControllers(activeControllers),
|
|
2588
|
+
activeControllers,
|
|
2589
|
+
queueDepth: priorityMiddleware?.queueDepth
|
|
2590
|
+
});
|
|
2591
|
+
}
|
|
2592
|
+
function makeHttpClient(config = {}) {
|
|
2593
|
+
return makeLifecycleClient(config);
|
|
2594
|
+
}
|
|
2595
|
+
function noopInvalidate(_key) {
|
|
2596
|
+
}
|
|
2597
|
+
function noopClear() {
|
|
2598
|
+
}
|
|
2599
|
+
function buildLifecycleClient(fn, tracker, internals) {
|
|
2600
|
+
const client = (req) => trackRequest(fn, req, tracker, internals);
|
|
2601
|
+
const stats = () => {
|
|
2602
|
+
tracker.setQueueDepth(internals.queueDepth?.() ?? 0);
|
|
2603
|
+
return tracker.snapshot();
|
|
2604
|
+
};
|
|
2605
|
+
const withMw = (mw) => {
|
|
2606
|
+
const wrappedFn = mw(fn);
|
|
2607
|
+
return buildLifecycleClient(wrappedFn, tracker, internals);
|
|
2608
|
+
};
|
|
2609
|
+
const lifecycleClient = Object.assign(client, {
|
|
2610
|
+
with: withMw,
|
|
2611
|
+
stats,
|
|
2612
|
+
cancelAll: internals.cancelAll,
|
|
2613
|
+
cache: {
|
|
2614
|
+
invalidate: internals.cacheInvalidate,
|
|
2615
|
+
clear: internals.cacheClear
|
|
2616
|
+
}
|
|
2617
|
+
});
|
|
2618
|
+
return lifecycleClient;
|
|
2619
|
+
}
|
|
2620
|
+
function cancelControllers(activeControllers) {
|
|
2621
|
+
for (const controller of Array.from(activeControllers)) {
|
|
2622
|
+
try {
|
|
2623
|
+
controller.abort();
|
|
2624
|
+
} catch {
|
|
2625
|
+
}
|
|
2626
|
+
}
|
|
2627
|
+
return asyncSucceed(void 0);
|
|
2628
|
+
}
|
|
2629
|
+
function trackRequest(fn, req, tracker, internals) {
|
|
2630
|
+
return {
|
|
2631
|
+
_tag: "Async",
|
|
2632
|
+
register: (env, cb) => {
|
|
2633
|
+
const controller = new AbortController();
|
|
2634
|
+
const previousSignal = req.init?.signal;
|
|
2635
|
+
let done = false;
|
|
2636
|
+
let abortedByPreviousSignal = false;
|
|
2637
|
+
let cancelInner;
|
|
2638
|
+
const abortFromPrevious = () => {
|
|
2639
|
+
abortedByPreviousSignal = true;
|
|
2640
|
+
try {
|
|
2641
|
+
controller.abort(previousSignal?.reason);
|
|
2642
|
+
} catch {
|
|
2643
|
+
controller.abort();
|
|
2644
|
+
}
|
|
2645
|
+
cancelInner?.();
|
|
2646
|
+
};
|
|
2647
|
+
if (previousSignal?.aborted) {
|
|
2648
|
+
abortFromPrevious();
|
|
2649
|
+
} else {
|
|
2650
|
+
previousSignal?.addEventListener("abort", abortFromPrevious, { once: true });
|
|
2651
|
+
}
|
|
2652
|
+
internals.activeControllers.add(controller);
|
|
2653
|
+
tracker.requestStarted();
|
|
2654
|
+
tracker.emit("request-start");
|
|
2655
|
+
const finish = (exit0) => {
|
|
2656
|
+
if (done) return;
|
|
2657
|
+
done = true;
|
|
2658
|
+
const exit = abortedByPreviousSignal && exit0._tag === "Failure" && exit0.cause._tag === "Interrupt" ? { _tag: "Failure", cause: Cause.fail({ _tag: "Abort" }) } : exit0;
|
|
2659
|
+
previousSignal?.removeEventListener("abort", abortFromPrevious);
|
|
2660
|
+
internals.activeControllers.delete(controller);
|
|
2661
|
+
if (exit._tag === "Success") {
|
|
2662
|
+
tracker.requestCompleted();
|
|
2663
|
+
} else {
|
|
2664
|
+
tracker.requestFailed();
|
|
2665
|
+
}
|
|
2666
|
+
tracker.emit("request-end");
|
|
2667
|
+
cb(exit);
|
|
2668
|
+
};
|
|
2669
|
+
const trackedReq = {
|
|
2670
|
+
...req,
|
|
2671
|
+
init: {
|
|
2672
|
+
...req.init ?? {},
|
|
2673
|
+
signal: controller.signal
|
|
2674
|
+
}
|
|
2675
|
+
};
|
|
2676
|
+
try {
|
|
2677
|
+
cancelInner = registerHttpEffect(fn(trackedReq), env, finish);
|
|
2678
|
+
} catch (error) {
|
|
2679
|
+
finish({
|
|
2680
|
+
_tag: "Failure",
|
|
2681
|
+
cause: Cause.fail({ _tag: "FetchError", message: String(error) })
|
|
2682
|
+
});
|
|
2683
|
+
}
|
|
2684
|
+
return () => {
|
|
2685
|
+
if (done) return;
|
|
2686
|
+
try {
|
|
2687
|
+
controller.abort();
|
|
2688
|
+
} catch {
|
|
2689
|
+
}
|
|
2690
|
+
if (cancelInner) {
|
|
2691
|
+
cancelInner();
|
|
2692
|
+
} else {
|
|
2693
|
+
finish({ _tag: "Failure", cause: Cause.interrupt() });
|
|
2694
|
+
}
|
|
2695
|
+
};
|
|
2696
|
+
}
|
|
2697
|
+
};
|
|
2698
|
+
}
|
|
2699
|
+
|
|
2700
|
+
// src/http/lifecycle/middleware.ts
|
|
2701
|
+
function withAuth(tokenProvider) {
|
|
2702
|
+
return (next) => {
|
|
2703
|
+
return (req) => {
|
|
2704
|
+
return asyncFlatMap(tokenProvider(), (token) => {
|
|
2705
|
+
const modifiedReq = {
|
|
2706
|
+
...req,
|
|
2707
|
+
headers: {
|
|
2708
|
+
...req.headers ?? {},
|
|
2709
|
+
Authorization: `Bearer ${token}`
|
|
2710
|
+
}
|
|
2711
|
+
};
|
|
2712
|
+
return next(modifiedReq);
|
|
2713
|
+
});
|
|
2714
|
+
};
|
|
2715
|
+
};
|
|
2716
|
+
}
|
|
2717
|
+
function withLogging(logger) {
|
|
2718
|
+
return (next) => {
|
|
2719
|
+
return (req) => {
|
|
2720
|
+
try {
|
|
2721
|
+
logger({ phase: "request", req });
|
|
2722
|
+
} catch {
|
|
2723
|
+
}
|
|
2724
|
+
const startedAt = now();
|
|
2725
|
+
return asyncFold(
|
|
2726
|
+
next(req),
|
|
2727
|
+
(error) => {
|
|
2728
|
+
const durationMs = Math.round(now() - startedAt);
|
|
2729
|
+
try {
|
|
2730
|
+
logger({ phase: "error", req, error, durationMs });
|
|
2731
|
+
} catch {
|
|
2732
|
+
}
|
|
2733
|
+
return asyncFail(error);
|
|
2734
|
+
},
|
|
2735
|
+
(res) => {
|
|
2736
|
+
const durationMs = Math.round(now() - startedAt);
|
|
2737
|
+
try {
|
|
2738
|
+
logger({ phase: "response", req, res, durationMs });
|
|
2739
|
+
} catch {
|
|
2740
|
+
}
|
|
2741
|
+
return asyncSucceed(res);
|
|
2742
|
+
}
|
|
2743
|
+
);
|
|
2744
|
+
};
|
|
2745
|
+
};
|
|
2746
|
+
}
|
|
2747
|
+
function withResponseTransform(fn) {
|
|
2748
|
+
return (next) => {
|
|
2749
|
+
return (req) => {
|
|
2750
|
+
return asyncFold(
|
|
2751
|
+
next(req),
|
|
2752
|
+
(error) => {
|
|
2753
|
+
return asyncFail(error);
|
|
2754
|
+
},
|
|
2755
|
+
(res) => {
|
|
2756
|
+
try {
|
|
2757
|
+
const transformed = fn(res, req);
|
|
2758
|
+
return asyncSucceed(transformed);
|
|
2759
|
+
} catch (e) {
|
|
2760
|
+
return asyncFail({ _tag: "FetchError", message: String(e) });
|
|
2761
|
+
}
|
|
2762
|
+
}
|
|
2763
|
+
);
|
|
2764
|
+
};
|
|
2765
|
+
};
|
|
2766
|
+
}
|
|
2767
|
+
|
|
2768
|
+
// src/http/compression/decompressor.ts
|
|
2769
|
+
import zlib from "zlib";
|
|
2770
|
+
|
|
2771
|
+
// src/http/compression/environment.ts
|
|
2772
|
+
function isNodeEnvironment() {
|
|
2773
|
+
return typeof process !== "undefined" && process.versions != null && process.versions.node != null;
|
|
2774
|
+
}
|
|
2775
|
+
|
|
2776
|
+
// src/http/compression/decompressorNode.ts
|
|
2777
|
+
function createNodeDecompressor(zlib2) {
|
|
2778
|
+
return {
|
|
2779
|
+
isPassthrough: false,
|
|
2780
|
+
decompress(data, encoding) {
|
|
2781
|
+
try {
|
|
2782
|
+
const input = Buffer.isBuffer(data) ? data : Buffer.from(data);
|
|
2783
|
+
let result;
|
|
2784
|
+
switch (encoding) {
|
|
2785
|
+
case "gzip":
|
|
2786
|
+
result = zlib2.gunzipSync(input);
|
|
2787
|
+
break;
|
|
2788
|
+
case "br":
|
|
2789
|
+
result = zlib2.brotliDecompressSync(input);
|
|
2790
|
+
break;
|
|
2791
|
+
case "deflate":
|
|
2792
|
+
result = zlib2.inflateSync(input);
|
|
2793
|
+
break;
|
|
2794
|
+
default:
|
|
2795
|
+
return { ok: false, error: `Unsupported encoding: ${encoding}` };
|
|
2796
|
+
}
|
|
2797
|
+
return { ok: true, data: result };
|
|
2798
|
+
} catch (err) {
|
|
2799
|
+
const message = err instanceof Error ? err.message : String(err);
|
|
2800
|
+
return { ok: false, error: message };
|
|
2801
|
+
}
|
|
2802
|
+
}
|
|
2803
|
+
};
|
|
2804
|
+
}
|
|
2805
|
+
|
|
2806
|
+
// src/http/compression/decompressor.noop.ts
|
|
2807
|
+
function createNoopDecompressor() {
|
|
2808
|
+
return {
|
|
2809
|
+
isPassthrough: true,
|
|
2810
|
+
decompress(data, _encoding) {
|
|
2811
|
+
const buf = Buffer.isBuffer(data) ? data : Buffer.from(data);
|
|
2812
|
+
return { ok: true, data: buf };
|
|
2813
|
+
}
|
|
2814
|
+
};
|
|
2815
|
+
}
|
|
2816
|
+
|
|
2817
|
+
// src/http/compression/decompressor.ts
|
|
2818
|
+
function createDecompressor() {
|
|
2819
|
+
if (isNodeEnvironment()) {
|
|
2820
|
+
return createNodeDecompressor(zlib);
|
|
2821
|
+
}
|
|
2822
|
+
return createNoopDecompressor();
|
|
2823
|
+
}
|
|
2824
|
+
|
|
2825
|
+
// src/http/compression/types.ts
|
|
2826
|
+
var SUPPORTED_ENCODINGS = ["br", "gzip", "deflate"];
|
|
2827
|
+
function emptyStats2() {
|
|
2828
|
+
return {
|
|
2829
|
+
decompressed: { gzip: 0, br: 0, deflate: 0 },
|
|
2830
|
+
compressedBytes: 0,
|
|
2831
|
+
decompressedBytes: 0,
|
|
2832
|
+
passthroughCount: 0,
|
|
2833
|
+
errorCount: 0,
|
|
2834
|
+
unsupportedEncodingCount: 0
|
|
2835
|
+
};
|
|
2836
|
+
}
|
|
2837
|
+
function emptyRequestCompressionStats() {
|
|
2838
|
+
return {
|
|
2839
|
+
compressedCount: 0,
|
|
2840
|
+
skippedCount: 0,
|
|
2841
|
+
errorCount: 0,
|
|
2842
|
+
originalBytes: 0,
|
|
2843
|
+
compressedBytes: 0
|
|
2844
|
+
};
|
|
2845
|
+
}
|
|
2846
|
+
|
|
2847
|
+
// src/http/compression/middleware.ts
|
|
2848
|
+
function injectAcceptEncoding(req, encodings) {
|
|
2849
|
+
const headers = req.headers ?? {};
|
|
2850
|
+
const hasAcceptEncoding = Object.keys(headers).some(
|
|
2851
|
+
(k) => k.toLowerCase() === "accept-encoding"
|
|
2852
|
+
);
|
|
2853
|
+
if (hasAcceptEncoding) return req;
|
|
2854
|
+
return {
|
|
2855
|
+
...req,
|
|
2856
|
+
headers: {
|
|
2857
|
+
...headers,
|
|
2858
|
+
"Accept-Encoding": encodings.join(", ")
|
|
2859
|
+
}
|
|
2860
|
+
};
|
|
2861
|
+
}
|
|
2862
|
+
function isSupportedEncoding(enc) {
|
|
2863
|
+
return SUPPORTED_ENCODINGS.includes(enc);
|
|
2864
|
+
}
|
|
2865
|
+
function processResponse(res, decompressor, enabledEncodings, stats) {
|
|
2866
|
+
const contentEncodingKey = Object.keys(res.headers).find(
|
|
2867
|
+
(k) => k.toLowerCase() === "content-encoding"
|
|
2868
|
+
);
|
|
2869
|
+
const contentEncodingValue = contentEncodingKey ? res.headers[contentEncodingKey]?.trim() : void 0;
|
|
2870
|
+
if (!contentEncodingValue || contentEncodingValue.toLowerCase() === "identity") {
|
|
2871
|
+
stats.passthroughCount++;
|
|
2872
|
+
return res;
|
|
2873
|
+
}
|
|
2874
|
+
const encodings = contentEncodingValue.split(",").map((e) => e.trim().toLowerCase());
|
|
2875
|
+
const reversedEncodings = [...encodings].reverse();
|
|
2876
|
+
let currentData = Buffer.from(res.bodyText, "latin1");
|
|
2877
|
+
const originalData = currentData;
|
|
2878
|
+
let decompressedCount = 0;
|
|
2879
|
+
for (let i = 0; i < reversedEncodings.length; i++) {
|
|
2880
|
+
const enc = reversedEncodings[i];
|
|
2881
|
+
if (!isSupportedEncoding(enc)) {
|
|
2882
|
+
stats.unsupportedEncodingCount++;
|
|
2883
|
+
if (decompressedCount === 0) {
|
|
2884
|
+
stats.passthroughCount++;
|
|
2885
|
+
return res;
|
|
2886
|
+
}
|
|
2887
|
+
const remainingEncodings = reversedEncodings.slice(i).reverse();
|
|
2888
|
+
const newHeaders2 = { ...res.headers };
|
|
2889
|
+
if (contentEncodingKey) {
|
|
2890
|
+
newHeaders2[contentEncodingKey] = remainingEncodings.join(", ");
|
|
2891
|
+
}
|
|
2892
|
+
newHeaders2["Content-Length"] = String(currentData.byteLength);
|
|
2893
|
+
return {
|
|
2894
|
+
...res,
|
|
2895
|
+
headers: newHeaders2,
|
|
2896
|
+
bodyText: currentData.toString("latin1")
|
|
2897
|
+
};
|
|
2898
|
+
}
|
|
2899
|
+
if (!enabledEncodings.includes(enc)) {
|
|
2900
|
+
stats.passthroughCount++;
|
|
2901
|
+
if (decompressedCount === 0) {
|
|
2902
|
+
return res;
|
|
2903
|
+
}
|
|
2904
|
+
const remainingEncodings = reversedEncodings.slice(i).reverse();
|
|
2905
|
+
const newHeaders2 = { ...res.headers };
|
|
2906
|
+
if (contentEncodingKey) {
|
|
2907
|
+
newHeaders2[contentEncodingKey] = remainingEncodings.join(", ");
|
|
2908
|
+
}
|
|
2909
|
+
newHeaders2["Content-Length"] = String(currentData.byteLength);
|
|
2910
|
+
return {
|
|
2911
|
+
...res,
|
|
2912
|
+
headers: newHeaders2,
|
|
2913
|
+
bodyText: currentData.toString("latin1")
|
|
2914
|
+
};
|
|
2915
|
+
}
|
|
2916
|
+
const result = decompressor.decompress(currentData, enc);
|
|
2917
|
+
if (!result.ok) {
|
|
2918
|
+
stats.errorCount++;
|
|
2919
|
+
return res;
|
|
2920
|
+
}
|
|
2921
|
+
stats.compressedBytes += currentData.byteLength;
|
|
2922
|
+
stats.decompressedBytes += result.data.byteLength;
|
|
2923
|
+
stats.decompressed[enc]++;
|
|
2924
|
+
decompressedCount++;
|
|
2925
|
+
currentData = result.data;
|
|
2926
|
+
}
|
|
2927
|
+
const newHeaders = { ...res.headers };
|
|
2928
|
+
if (contentEncodingKey) {
|
|
2929
|
+
delete newHeaders[contentEncodingKey];
|
|
2930
|
+
}
|
|
2931
|
+
const lowerKey = Object.keys(newHeaders).find(
|
|
2932
|
+
(k) => k.toLowerCase() === "content-encoding"
|
|
2933
|
+
);
|
|
2934
|
+
if (lowerKey) {
|
|
2935
|
+
delete newHeaders[lowerKey];
|
|
2936
|
+
}
|
|
2937
|
+
newHeaders["Content-Length"] = String(currentData.byteLength);
|
|
2938
|
+
return {
|
|
2939
|
+
...res,
|
|
2940
|
+
headers: newHeaders,
|
|
2941
|
+
bodyText: currentData.toString("utf-8")
|
|
2942
|
+
};
|
|
2943
|
+
}
|
|
2944
|
+
function makeCompressionMiddleware(config) {
|
|
2945
|
+
const enabledEncodings = config?.encodings ?? [...SUPPORTED_ENCODINGS];
|
|
2946
|
+
const decompressor = createDecompressor();
|
|
2947
|
+
const mutableStats = emptyStats2();
|
|
2948
|
+
const middleware = (next) => {
|
|
2949
|
+
return (req) => {
|
|
2950
|
+
const modifiedReq = injectAcceptEncoding(req, enabledEncodings);
|
|
2951
|
+
return asyncFold(
|
|
2952
|
+
next(modifiedReq),
|
|
2953
|
+
// Pass HttpErrors through unchanged
|
|
2954
|
+
(error) => asyncFail(error),
|
|
2955
|
+
// Process successful responses
|
|
2956
|
+
(res) => {
|
|
2957
|
+
if (decompressor.isPassthrough) {
|
|
2958
|
+
mutableStats.passthroughCount++;
|
|
2959
|
+
return asyncSucceed(res);
|
|
2960
|
+
}
|
|
2961
|
+
const processed = processResponse(
|
|
2962
|
+
res,
|
|
2963
|
+
decompressor,
|
|
2964
|
+
enabledEncodings,
|
|
2965
|
+
mutableStats
|
|
2966
|
+
);
|
|
2967
|
+
return asyncSucceed(processed);
|
|
2968
|
+
}
|
|
2969
|
+
);
|
|
2970
|
+
};
|
|
2971
|
+
};
|
|
2972
|
+
const stats = () => Object.freeze({
|
|
2973
|
+
decompressed: Object.freeze({ ...mutableStats.decompressed }),
|
|
2974
|
+
compressedBytes: mutableStats.compressedBytes,
|
|
2975
|
+
decompressedBytes: mutableStats.decompressedBytes,
|
|
2976
|
+
passthroughCount: mutableStats.passthroughCount,
|
|
2977
|
+
errorCount: mutableStats.errorCount,
|
|
2978
|
+
unsupportedEncodingCount: mutableStats.unsupportedEncodingCount
|
|
2979
|
+
});
|
|
2980
|
+
return { middleware, stats };
|
|
2981
|
+
}
|
|
2982
|
+
var makeResponseCompressionMiddleware = makeCompressionMiddleware;
|
|
2983
|
+
var DEFAULT_REQUEST_COMPRESS_METHODS = ["POST", "PUT", "PATCH"];
|
|
2984
|
+
function makeRequestCompressionMiddleware(config) {
|
|
2985
|
+
const encoding = config?.encoding ?? "gzip";
|
|
2986
|
+
const minBytes = Math.max(0, Math.floor(config?.minBytes ?? 1024));
|
|
2987
|
+
const methods = new Set((config?.methods ?? DEFAULT_REQUEST_COMPRESS_METHODS).map((m) => m.toUpperCase()));
|
|
2988
|
+
const mutableStats = emptyRequestCompressionStats();
|
|
2989
|
+
const middleware = (next) => {
|
|
2990
|
+
return (req) => {
|
|
2991
|
+
const compressed = compressRequest(req, encoding, minBytes, methods, mutableStats);
|
|
2992
|
+
return next(compressed);
|
|
2993
|
+
};
|
|
2994
|
+
};
|
|
2995
|
+
const stats = () => Object.freeze({
|
|
2996
|
+
compressedCount: mutableStats.compressedCount,
|
|
2997
|
+
skippedCount: mutableStats.skippedCount,
|
|
2998
|
+
errorCount: mutableStats.errorCount,
|
|
2999
|
+
originalBytes: mutableStats.originalBytes,
|
|
3000
|
+
compressedBytes: mutableStats.compressedBytes
|
|
3001
|
+
});
|
|
3002
|
+
return { middleware, stats };
|
|
3003
|
+
}
|
|
3004
|
+
function compressRequest(req, encoding, minBytes, methods, stats) {
|
|
3005
|
+
if (!methods.has(req.method.toUpperCase())) {
|
|
3006
|
+
stats.skippedCount++;
|
|
3007
|
+
return req;
|
|
3008
|
+
}
|
|
3009
|
+
if (req.body === void 0 || hasHeader(req.headers, "content-encoding")) {
|
|
3010
|
+
stats.skippedCount++;
|
|
3011
|
+
return req;
|
|
3012
|
+
}
|
|
3013
|
+
const originalBytes = httpBodyByteLength(req.body);
|
|
3014
|
+
if (originalBytes < minBytes) {
|
|
3015
|
+
stats.skippedCount++;
|
|
3016
|
+
return req;
|
|
3017
|
+
}
|
|
3018
|
+
try {
|
|
3019
|
+
const compressed = compressBuffer(httpBodyToBuffer(req.body), encoding);
|
|
3020
|
+
stats.compressedCount++;
|
|
3021
|
+
stats.originalBytes += originalBytes;
|
|
3022
|
+
stats.compressedBytes += compressed.byteLength;
|
|
3023
|
+
return {
|
|
3024
|
+
...req,
|
|
3025
|
+
body: compressed,
|
|
3026
|
+
headers: setHeaders(req.headers, {
|
|
3027
|
+
"Content-Encoding": encoding,
|
|
3028
|
+
"Content-Length": String(compressed.byteLength)
|
|
3029
|
+
})
|
|
3030
|
+
};
|
|
3031
|
+
} catch {
|
|
3032
|
+
stats.errorCount++;
|
|
3033
|
+
return req;
|
|
3034
|
+
}
|
|
3035
|
+
}
|
|
3036
|
+
function compressBuffer(input, encoding) {
|
|
3037
|
+
const zlib2 = __require("zlib");
|
|
3038
|
+
switch (encoding) {
|
|
3039
|
+
case "gzip":
|
|
3040
|
+
return zlib2.gzipSync(input);
|
|
3041
|
+
case "br":
|
|
3042
|
+
return zlib2.brotliCompressSync(input);
|
|
3043
|
+
case "deflate":
|
|
3044
|
+
return zlib2.deflateSync(input);
|
|
3045
|
+
}
|
|
3046
|
+
}
|
|
3047
|
+
function hasHeader(headers, name) {
|
|
3048
|
+
if (!headers) return false;
|
|
3049
|
+
const lower = name.toLowerCase();
|
|
3050
|
+
return Object.keys(headers).some((key) => key.toLowerCase() === lower);
|
|
3051
|
+
}
|
|
3052
|
+
function setHeaders(headers, values) {
|
|
3053
|
+
const out = { ...headers ?? {} };
|
|
3054
|
+
for (const [key, value] of Object.entries(values)) {
|
|
3055
|
+
const existing = Object.keys(out).find((h) => h.toLowerCase() === key.toLowerCase());
|
|
3056
|
+
if (existing) out[existing] = value;
|
|
3057
|
+
else out[key] = value;
|
|
3058
|
+
}
|
|
3059
|
+
return out;
|
|
3060
|
+
}
|
|
3061
|
+
|
|
3062
|
+
// src/http/batching.ts
|
|
3063
|
+
var DEFAULT_MAX_BATCH_SIZE = 16;
|
|
3064
|
+
var DEFAULT_MAX_WAIT_MS = 5;
|
|
3065
|
+
function withRequestBatching(config) {
|
|
3066
|
+
const maxBatchSize = Math.max(1, Math.floor(config.maxBatchSize ?? DEFAULT_MAX_BATCH_SIZE));
|
|
3067
|
+
const maxWaitMs = Math.max(0, Math.floor(config.maxWaitMs ?? DEFAULT_MAX_WAIT_MS));
|
|
3068
|
+
const keyOf = config.key ?? ((req) => `${req.method}:${req.url}`);
|
|
3069
|
+
const pending = /* @__PURE__ */ new Map();
|
|
3070
|
+
return (next) => {
|
|
3071
|
+
return (req) => {
|
|
3072
|
+
let key;
|
|
3073
|
+
try {
|
|
3074
|
+
if (config.shouldBatch && !config.shouldBatch(req)) return next(req);
|
|
3075
|
+
key = keyOf(req) ?? void 0;
|
|
3076
|
+
} catch {
|
|
3077
|
+
return next(req);
|
|
3078
|
+
}
|
|
3079
|
+
if (!key) return next(req);
|
|
3080
|
+
return {
|
|
3081
|
+
_tag: "Async",
|
|
3082
|
+
register: (env, cb) => {
|
|
3083
|
+
const entry = { req, env, cb, cancelled: false, done: false };
|
|
3084
|
+
const batch = getOrCreatePending(key);
|
|
3085
|
+
batch.entries.push(entry);
|
|
3086
|
+
emit(config, { type: "batch-enqueue", key, size: batch.entries.length, request: req });
|
|
3087
|
+
if (batch.entries.length >= maxBatchSize) {
|
|
3088
|
+
flush(key, next, "size");
|
|
3089
|
+
} else if (batch.timer === void 0) {
|
|
3090
|
+
batch.timer = setTimeout(() => flush(key, next, "timer"), maxWaitMs);
|
|
3091
|
+
}
|
|
3092
|
+
return () => cancelEntry(key, entry);
|
|
3093
|
+
}
|
|
3094
|
+
};
|
|
3095
|
+
};
|
|
3096
|
+
function getOrCreatePending(key) {
|
|
3097
|
+
const existing = pending.get(key);
|
|
3098
|
+
if (existing) return existing;
|
|
3099
|
+
const created = { key, entries: [] };
|
|
3100
|
+
pending.set(key, created);
|
|
3101
|
+
return created;
|
|
3102
|
+
}
|
|
3103
|
+
function cancelEntry(key, entry) {
|
|
3104
|
+
if (entry.done || entry.cancelled) return;
|
|
3105
|
+
entry.cancelled = true;
|
|
3106
|
+
complete(entry, { _tag: "Failure", cause: Cause.interrupt() });
|
|
3107
|
+
const queued = pending.get(key);
|
|
3108
|
+
if (queued) {
|
|
3109
|
+
queued.entries = queued.entries.filter((e) => e !== entry);
|
|
3110
|
+
emit(config, { type: "batch-cancel", key, remaining: queued.entries.length });
|
|
3111
|
+
if (queued.entries.length === 0) {
|
|
3112
|
+
if (queued.timer !== void 0) clearTimeout(queued.timer);
|
|
3113
|
+
pending.delete(key);
|
|
3114
|
+
}
|
|
3115
|
+
return;
|
|
3116
|
+
}
|
|
3117
|
+
const group = entry.group;
|
|
3118
|
+
if (!group || group.cancelled) return;
|
|
3119
|
+
if (group.entries.every((e) => e.cancelled || e.done)) {
|
|
3120
|
+
group.cancelled = true;
|
|
3121
|
+
group.cancel?.();
|
|
3122
|
+
}
|
|
3123
|
+
}
|
|
3124
|
+
function flush(key, downstream, reason) {
|
|
3125
|
+
const batch = pending.get(key);
|
|
3126
|
+
if (!batch) return;
|
|
3127
|
+
pending.delete(key);
|
|
3128
|
+
if (batch.timer !== void 0) clearTimeout(batch.timer);
|
|
3129
|
+
const entries = batch.entries.filter((entry) => !entry.cancelled && !entry.done);
|
|
3130
|
+
if (entries.length === 0) return;
|
|
3131
|
+
emit(config, { type: "batch-flush", key, size: entries.length, reason });
|
|
3132
|
+
let batchReq;
|
|
3133
|
+
try {
|
|
3134
|
+
batchReq = config.encode(entries.map((entry) => entry.req));
|
|
3135
|
+
} catch (e) {
|
|
3136
|
+
failEntries(config, key, entries, toFetchError(e));
|
|
3137
|
+
return;
|
|
3138
|
+
}
|
|
3139
|
+
const group = { key, entries, cancelled: false };
|
|
3140
|
+
for (const entry of entries) entry.group = group;
|
|
3141
|
+
const effect = downstream(batchReq);
|
|
3142
|
+
group.cancel = runEffect(effect, entries[0].env, (exit) => {
|
|
3143
|
+
if (exit._tag === "Failure") {
|
|
3144
|
+
const err = exit.cause._tag === "Fail" ? exit.cause.error : exit.cause._tag === "Interrupt" ? { _tag: "Abort" } : toFetchError(exit.cause.defect);
|
|
3145
|
+
failEntries(config, key, entries, err);
|
|
3146
|
+
return;
|
|
3147
|
+
}
|
|
3148
|
+
let decoded;
|
|
3149
|
+
try {
|
|
3150
|
+
decoded = config.decode(exit.value, entries.map((entry) => entry.req));
|
|
3151
|
+
if (decoded.length !== entries.length) {
|
|
3152
|
+
throw new Error(`batch decoder returned ${decoded.length} responses for ${entries.length} requests`);
|
|
3153
|
+
}
|
|
3154
|
+
} catch (e) {
|
|
3155
|
+
failEntries(config, key, entries, toFetchError(e));
|
|
3156
|
+
return;
|
|
3157
|
+
}
|
|
3158
|
+
for (let i = 0; i < entries.length; i++) {
|
|
3159
|
+
complete(entries[i], { _tag: "Success", value: decoded[i] });
|
|
3160
|
+
}
|
|
3161
|
+
});
|
|
3162
|
+
}
|
|
3163
|
+
};
|
|
3164
|
+
}
|
|
3165
|
+
function complete(entry, exit) {
|
|
3166
|
+
if (entry.done) return;
|
|
3167
|
+
entry.done = true;
|
|
3168
|
+
entry.cb(exit);
|
|
3169
|
+
}
|
|
3170
|
+
function failEntries(config, key, entries, error) {
|
|
3171
|
+
emit(config, { type: "batch-error", key, size: entries.length, error });
|
|
3172
|
+
for (const entry of entries) {
|
|
3173
|
+
complete(entry, { _tag: "Failure", cause: Cause.fail(error) });
|
|
3174
|
+
}
|
|
3175
|
+
}
|
|
3176
|
+
function toFetchError(error) {
|
|
3177
|
+
if (isHttpError(error)) return error;
|
|
3178
|
+
return { _tag: "FetchError", message: error instanceof Error ? error.message : String(error) };
|
|
3179
|
+
}
|
|
3180
|
+
function isHttpError(error) {
|
|
3181
|
+
if (typeof error !== "object" || error === null || !("_tag" in error)) return false;
|
|
3182
|
+
const tag = error._tag;
|
|
3183
|
+
return tag === "Abort" || tag === "BadUrl" || tag === "FetchError" || tag === "Timeout" || tag === "PoolRejected" || tag === "PoolTimeout";
|
|
3184
|
+
}
|
|
3185
|
+
function emit(config, event) {
|
|
3186
|
+
if (!config.onEvent) return;
|
|
3187
|
+
try {
|
|
3188
|
+
config.onEvent(event);
|
|
3189
|
+
} catch {
|
|
3190
|
+
}
|
|
3191
|
+
}
|
|
3192
|
+
function runEffect(effect, env, cb) {
|
|
3193
|
+
let cancelled = false;
|
|
3194
|
+
let currentCancel;
|
|
3195
|
+
const finish = (exit) => {
|
|
3196
|
+
if (cancelled) return;
|
|
3197
|
+
cb(exit);
|
|
3198
|
+
};
|
|
3199
|
+
const run = (eff, k) => {
|
|
3200
|
+
if (cancelled) return;
|
|
3201
|
+
switch (eff._tag) {
|
|
3202
|
+
case "Succeed":
|
|
3203
|
+
k({ _tag: "Success", value: eff.value });
|
|
3204
|
+
return;
|
|
3205
|
+
case "Fail":
|
|
3206
|
+
k({ _tag: "Failure", cause: Cause.fail(eff.error) });
|
|
3207
|
+
return;
|
|
3208
|
+
case "Sync":
|
|
3209
|
+
try {
|
|
3210
|
+
k({ _tag: "Success", value: eff.thunk(env) });
|
|
3211
|
+
} catch (e) {
|
|
3212
|
+
k({ _tag: "Failure", cause: Cause.die(e) });
|
|
3213
|
+
}
|
|
3214
|
+
return;
|
|
3215
|
+
case "Async": {
|
|
3216
|
+
const cancel = eff.register(env, (exit) => {
|
|
3217
|
+
currentCancel = void 0;
|
|
3218
|
+
k(exit);
|
|
3219
|
+
});
|
|
3220
|
+
currentCancel = typeof cancel === "function" ? cancel : void 0;
|
|
3221
|
+
return;
|
|
3222
|
+
}
|
|
3223
|
+
case "FlatMap":
|
|
3224
|
+
run(eff.first, (exit) => {
|
|
3225
|
+
if (exit._tag === "Failure") {
|
|
3226
|
+
k(exit);
|
|
3227
|
+
return;
|
|
3228
|
+
}
|
|
3229
|
+
try {
|
|
3230
|
+
run(eff.andThen(exit.value), k);
|
|
3231
|
+
} catch (e) {
|
|
3232
|
+
k({ _tag: "Failure", cause: Cause.die(e) });
|
|
3233
|
+
}
|
|
3234
|
+
});
|
|
3235
|
+
return;
|
|
3236
|
+
case "Fold":
|
|
3237
|
+
run(eff.first, (exit) => {
|
|
3238
|
+
try {
|
|
3239
|
+
if (exit._tag === "Success") {
|
|
3240
|
+
run(eff.onSuccess(exit.value), k);
|
|
3241
|
+
} else if (exit.cause._tag === "Fail") {
|
|
3242
|
+
run(eff.onFailure(exit.cause.error), k);
|
|
3243
|
+
} else {
|
|
3244
|
+
k(exit);
|
|
3245
|
+
}
|
|
3246
|
+
} catch (e) {
|
|
3247
|
+
k({ _tag: "Failure", cause: Cause.die(e) });
|
|
3248
|
+
}
|
|
3249
|
+
});
|
|
3250
|
+
return;
|
|
3251
|
+
case "Fork":
|
|
3252
|
+
k({ _tag: "Success", value: void 0 });
|
|
3253
|
+
return;
|
|
3254
|
+
}
|
|
3255
|
+
};
|
|
3256
|
+
run(effect, finish);
|
|
3257
|
+
return () => {
|
|
3258
|
+
if (cancelled) return;
|
|
3259
|
+
cancelled = true;
|
|
3260
|
+
currentCancel?.();
|
|
3261
|
+
};
|
|
3262
|
+
}
|
|
3263
|
+
|
|
3264
|
+
// src/http/prewarm.ts
|
|
3265
|
+
function prewarmConnections(config = {}) {
|
|
3266
|
+
const fetchImpl = config.fetchImpl ?? globalThis.fetch;
|
|
3267
|
+
const method = config.method ?? "HEAD";
|
|
3268
|
+
const targets = resolveTargets(config);
|
|
3269
|
+
return fromPromiseAbortable(
|
|
3270
|
+
async (signal) => {
|
|
3271
|
+
if (typeof fetchImpl !== "function" || targets.length === 0) {
|
|
3272
|
+
return { attempted: 0, warmed: 0, failed: 0, skipped: targets.length, attempts: [] };
|
|
3273
|
+
}
|
|
3274
|
+
const attempts = [];
|
|
3275
|
+
for (const url of targets) {
|
|
3276
|
+
const origin = new URL(url).origin;
|
|
3277
|
+
const started = performance.now();
|
|
3278
|
+
emit2(config, { type: "prewarm-start", url, origin });
|
|
3279
|
+
try {
|
|
3280
|
+
const res = await fetchImpl(url, {
|
|
3281
|
+
method,
|
|
3282
|
+
headers: config.headers,
|
|
3283
|
+
cache: "no-store",
|
|
3284
|
+
signal
|
|
3285
|
+
});
|
|
3286
|
+
const ms = Math.round(performance.now() - started);
|
|
3287
|
+
attempts.push({ url, origin, ok: true, status: res.status, ms });
|
|
3288
|
+
emit2(config, { type: "prewarm-success", url, origin, status: res.status, ms });
|
|
3289
|
+
} catch (e) {
|
|
3290
|
+
const ms = Math.round(performance.now() - started);
|
|
3291
|
+
const error = normalizePrewarmError(e);
|
|
3292
|
+
attempts.push({ url, origin, ok: false, error, ms });
|
|
3293
|
+
emit2(config, { type: "prewarm-failure", url, origin, error, ms });
|
|
3294
|
+
if (config.failFast) throw error;
|
|
3295
|
+
}
|
|
3296
|
+
}
|
|
3297
|
+
return {
|
|
3298
|
+
attempted: attempts.length,
|
|
3299
|
+
warmed: attempts.filter((attempt) => attempt.ok).length,
|
|
3300
|
+
failed: attempts.filter((attempt) => !attempt.ok).length,
|
|
3301
|
+
skipped: 0,
|
|
3302
|
+
attempts
|
|
3303
|
+
};
|
|
3304
|
+
},
|
|
3305
|
+
normalizePrewarmError,
|
|
3306
|
+
{
|
|
3307
|
+
label: "http:prewarm",
|
|
3308
|
+
timeoutMs: config.timeoutMs,
|
|
3309
|
+
timeoutReason: config.timeoutMs ? () => ({
|
|
3310
|
+
_tag: "Timeout",
|
|
3311
|
+
timeoutMs: config.timeoutMs,
|
|
3312
|
+
phase: "request",
|
|
3313
|
+
message: `HTTP prewarm timed out after ${config.timeoutMs}ms`
|
|
3314
|
+
}) : void 0
|
|
3315
|
+
}
|
|
3316
|
+
);
|
|
3317
|
+
}
|
|
3318
|
+
var prewarmHttpConnections = prewarmConnections;
|
|
3319
|
+
function withConnectionPrewarming(config = {}) {
|
|
3320
|
+
const once = config.once ?? true;
|
|
3321
|
+
const warmed = /* @__PURE__ */ new Set();
|
|
3322
|
+
const warming = /* @__PURE__ */ new Set();
|
|
3323
|
+
return (next) => (req) => {
|
|
3324
|
+
if (config.shouldPrewarm && !config.shouldPrewarm(req)) return next(req);
|
|
3325
|
+
const target = config.target?.(req) ?? req.url;
|
|
3326
|
+
if (!target) return next(req);
|
|
3327
|
+
const resolved = resolveUrl(target, config.baseUrl);
|
|
3328
|
+
if (!resolved) return next(req);
|
|
3329
|
+
const key = resolved.origin;
|
|
3330
|
+
if (once && (warmed.has(key) || warming.has(key))) return next(req);
|
|
3331
|
+
warming.add(key);
|
|
3332
|
+
return asyncFold(
|
|
3333
|
+
prewarmConnections({
|
|
3334
|
+
...config,
|
|
3335
|
+
urls: [resolved.toString()],
|
|
3336
|
+
origins: void 0,
|
|
3337
|
+
onEvent: (event) => {
|
|
3338
|
+
if (event.type === "prewarm-success") warmed.add(key);
|
|
3339
|
+
config.onEvent?.(event);
|
|
3340
|
+
}
|
|
3341
|
+
}),
|
|
3342
|
+
(error) => {
|
|
3343
|
+
warming.delete(key);
|
|
3344
|
+
if (config.failFast || error._tag === "Abort") return asyncFail(error);
|
|
3345
|
+
return next(req);
|
|
3346
|
+
},
|
|
3347
|
+
() => {
|
|
3348
|
+
warming.delete(key);
|
|
3349
|
+
return next(req);
|
|
3350
|
+
}
|
|
3351
|
+
);
|
|
3352
|
+
};
|
|
3353
|
+
}
|
|
3354
|
+
function resolveTargets(config) {
|
|
3355
|
+
const out = [];
|
|
3356
|
+
const path = config.path ?? "/";
|
|
3357
|
+
for (const url of config.urls ?? []) {
|
|
3358
|
+
const resolved = resolveUrl(url, config.baseUrl);
|
|
3359
|
+
if (resolved) out.push(resolved.toString());
|
|
3360
|
+
}
|
|
3361
|
+
for (const origin of config.origins ?? []) {
|
|
3362
|
+
const resolved = resolveUrl(path, origin);
|
|
3363
|
+
if (resolved) out.push(resolved.toString());
|
|
3364
|
+
}
|
|
3365
|
+
if (out.length === 0 && config.baseUrl) {
|
|
3366
|
+
const resolved = resolveUrl(path, config.baseUrl);
|
|
3367
|
+
if (resolved) out.push(resolved.toString());
|
|
3368
|
+
}
|
|
3369
|
+
return Array.from(new Set(out));
|
|
3370
|
+
}
|
|
3371
|
+
function resolveUrl(value, baseUrl) {
|
|
3372
|
+
try {
|
|
3373
|
+
return new URL(value, baseUrl || void 0);
|
|
3374
|
+
} catch {
|
|
3375
|
+
return void 0;
|
|
3376
|
+
}
|
|
3377
|
+
}
|
|
3378
|
+
function normalizePrewarmError(error) {
|
|
3379
|
+
if (isHttpError2(error)) return error;
|
|
3380
|
+
if (typeof error === "object" && error !== null && error.name === "AbortError") {
|
|
3381
|
+
return { _tag: "Abort" };
|
|
3382
|
+
}
|
|
3383
|
+
return { _tag: "FetchError", message: error instanceof Error ? error.message : String(error) };
|
|
3384
|
+
}
|
|
3385
|
+
function isHttpError2(error) {
|
|
3386
|
+
if (typeof error !== "object" || error === null || !("_tag" in error)) return false;
|
|
3387
|
+
const tag = error._tag;
|
|
3388
|
+
return tag === "Abort" || tag === "BadUrl" || tag === "FetchError" || tag === "Timeout" || tag === "PoolRejected" || tag === "PoolTimeout";
|
|
3389
|
+
}
|
|
3390
|
+
function emit2(config, event) {
|
|
3391
|
+
if (!config.onEvent) return;
|
|
3392
|
+
try {
|
|
3393
|
+
config.onEvent(event);
|
|
3394
|
+
} catch {
|
|
3395
|
+
}
|
|
3396
|
+
}
|
|
1106
3397
|
export {
|
|
3398
|
+
DEFAULT_CACHE_RELEVANT_HEADERS,
|
|
1107
3399
|
HttpConcurrencyPool,
|
|
3400
|
+
LRUCache,
|
|
3401
|
+
LifecycleStatsTracker,
|
|
3402
|
+
PriorityQueue,
|
|
3403
|
+
SEPARATOR,
|
|
3404
|
+
SUPPORTED_ENCODINGS,
|
|
3405
|
+
backoffDelayMs,
|
|
3406
|
+
clampPriority,
|
|
3407
|
+
computeCacheKey,
|
|
1108
3408
|
decorate,
|
|
3409
|
+
defaultRetryOnError,
|
|
3410
|
+
defaultRetryOnStatus,
|
|
3411
|
+
defaultRetryableMethods,
|
|
1109
3412
|
httpClient,
|
|
1110
3413
|
httpClientStream,
|
|
1111
3414
|
httpClientWithMeta,
|
|
3415
|
+
makeCompressionMiddleware,
|
|
1112
3416
|
makeHttp,
|
|
3417
|
+
makeHttpClient,
|
|
1113
3418
|
makeHttpStream,
|
|
3419
|
+
makeLifecycleClient,
|
|
3420
|
+
makeRequestCompressionMiddleware,
|
|
3421
|
+
makeResponseCompressionMiddleware,
|
|
1114
3422
|
normalizeHeadersInit,
|
|
3423
|
+
normalizeRetryBudget,
|
|
3424
|
+
parseCacheKey,
|
|
3425
|
+
prewarmConnections,
|
|
3426
|
+
prewarmHttpConnections,
|
|
1115
3427
|
resolveHttpPoolKey,
|
|
3428
|
+
retryAfterMs,
|
|
1116
3429
|
validatedJson,
|
|
3430
|
+
withAuth,
|
|
3431
|
+
withCache,
|
|
1117
3432
|
withCircuitBreaker,
|
|
3433
|
+
withConnectionPrewarming,
|
|
3434
|
+
withDedup,
|
|
3435
|
+
withLogging,
|
|
1118
3436
|
withMiddleware,
|
|
3437
|
+
withPriority,
|
|
3438
|
+
withRequestBatching,
|
|
3439
|
+
withResponseTransform,
|
|
3440
|
+
withRetry,
|
|
1119
3441
|
withRetryStream,
|
|
1120
3442
|
withTracing
|
|
1121
3443
|
};
|