fastfetch-api-fetch-enhancer 2.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.idea/FastFetch-Smart-API-Fetcher.iml +12 -0
- package/.idea/modules.xml +8 -0
- package/.idea/vcs.xml +6 -0
- package/LICENSE +21 -0
- package/README.md +101 -0
- package/__tests__/demo.test.ts +216 -0
- package/__tests__/test_database.json +5002 -0
- package/coverage/clover.xml +463 -0
- package/coverage/coverage-final.json +11 -0
- package/coverage/lcov-report/base.css +224 -0
- package/coverage/lcov-report/block-navigation.js +87 -0
- package/coverage/lcov-report/circuit-breaker.ts.html +547 -0
- package/coverage/lcov-report/client.ts.html +1858 -0
- package/coverage/lcov-report/errors.ts.html +415 -0
- package/coverage/lcov-report/fastFetch.ts.html +1045 -0
- package/coverage/lcov-report/favicon.png +0 -0
- package/coverage/lcov-report/index.html +251 -0
- package/coverage/lcov-report/index.ts.html +241 -0
- package/coverage/lcov-report/metrics.ts.html +685 -0
- package/coverage/lcov-report/middleware.ts.html +403 -0
- package/coverage/lcov-report/offline-queue.ts.html +535 -0
- package/coverage/lcov-report/prettify.css +1 -0
- package/coverage/lcov-report/prettify.js +2 -0
- package/coverage/lcov-report/queue.ts.html +421 -0
- package/coverage/lcov-report/sort-arrow-sprite.png +0 -0
- package/coverage/lcov-report/sorter.js +196 -0
- package/coverage/lcov-report/streaming.ts.html +466 -0
- package/coverage/lcov.info +908 -0
- package/dist/circuit-breaker.d.ts +61 -0
- package/dist/circuit-breaker.d.ts.map +1 -0
- package/dist/circuit-breaker.js +106 -0
- package/dist/client.d.ts +215 -0
- package/dist/client.d.ts.map +1 -0
- package/dist/client.js +391 -0
- package/dist/errors.d.ts +56 -0
- package/dist/errors.d.ts.map +1 -0
- package/dist/errors.js +91 -0
- package/dist/fastFetch.d.ts +65 -0
- package/dist/fastFetch.d.ts.map +1 -0
- package/dist/fastFetch.js +209 -0
- package/dist/index.d.ts +18 -0
- package/dist/index.d.ts.map +1 -0
- package/dist/index.js +18 -0
- package/dist/metrics.d.ts +71 -0
- package/dist/metrics.d.ts.map +1 -0
- package/dist/metrics.js +131 -0
- package/dist/middleware.d.ts +66 -0
- package/dist/middleware.d.ts.map +1 -0
- package/dist/middleware.js +45 -0
- package/dist/offline-queue.d.ts +65 -0
- package/dist/offline-queue.d.ts.map +1 -0
- package/dist/offline-queue.js +120 -0
- package/dist/queue.d.ts +33 -0
- package/dist/queue.d.ts.map +1 -0
- package/dist/queue.js +76 -0
- package/dist/streaming.d.ts +40 -0
- package/dist/streaming.d.ts.map +1 -0
- package/dist/streaming.js +98 -0
- package/index.d.ts +167 -0
- package/jest.config.js +16 -0
- package/package.json +55 -0
- package/src/circuit-breaker.ts +154 -0
- package/src/client.ts +591 -0
- package/src/errors.ts +110 -0
- package/src/fastFetch.ts +320 -0
- package/src/index.ts +52 -0
- package/src/metrics.ts +200 -0
- package/src/middleware.ts +106 -0
- package/src/offline-queue.ts +150 -0
- package/src/queue.ts +112 -0
- package/src/streaming.ts +127 -0
- package/tsconfig.json +18 -0
package/src/fastFetch.ts
ADDED
|
@@ -0,0 +1,320 @@
|
|
|
1
|
+
import fetch from "cross-fetch";
|
|
2
|
+
import {
|
|
3
|
+
HttpError,
|
|
4
|
+
TimeoutError,
|
|
5
|
+
NetworkError,
|
|
6
|
+
} from "./errors.js";
|
|
7
|
+
|
|
8
|
+
// ---------------------------------------------------------------------------
|
|
9
|
+
// Types
|
|
10
|
+
// ---------------------------------------------------------------------------
|
|
11
|
+
|
|
12
|
+
/**
|
|
13
|
+
* Options for controlling FastFetch behavior.
|
|
14
|
+
*/
|
|
15
|
+
export interface FastFetchOptions {
|
|
16
|
+
/** Number of retries on failure (default: 0). */
|
|
17
|
+
retries?: number;
|
|
18
|
+
/**
|
|
19
|
+
* Base delay in ms before next retry — applied with exponential backoff (default: 1000).
|
|
20
|
+
* Ignored when a `Retry-After` header is present on a 429 response.
|
|
21
|
+
*/
|
|
22
|
+
retryDelay?: number;
|
|
23
|
+
/** Deduplicate identical in-flight requests (default: true). */
|
|
24
|
+
deduplicate?: boolean;
|
|
25
|
+
/**
|
|
26
|
+
* Custom retry predicate. Return `true` to retry.
|
|
27
|
+
* Receives the raw Response (for HTTP errors) or an Error (for network failures),
|
|
28
|
+
* plus the current attempt number (1-indexed).
|
|
29
|
+
* Overrides the built-in smart retry logic when provided.
|
|
30
|
+
*/
|
|
31
|
+
shouldRetry?: (errorOrResponse: any, attempt: number) => boolean;
|
|
32
|
+
/**
|
|
33
|
+
* Abort the request after this many milliseconds.
|
|
34
|
+
* Throws a `TimeoutError` when the request is aborted by FastFetch's own timer.
|
|
35
|
+
*/
|
|
36
|
+
timeout?: number;
|
|
37
|
+
/**
|
|
38
|
+
* Throw an `HttpError` instead of returning a non-ok Response (default: false).
|
|
39
|
+
* `FastFetchClient` sets this to `true` by default.
|
|
40
|
+
*/
|
|
41
|
+
throwOnError?: boolean;
|
|
42
|
+
/**
|
|
43
|
+
* Cache successful GET responses in memory (default: false).
|
|
44
|
+
* Only applies to GET requests.
|
|
45
|
+
*/
|
|
46
|
+
fastCache?: boolean;
|
|
47
|
+
/** How long (ms) to keep a cached response fresh (default: 30 000 ms = 30 s). */
|
|
48
|
+
cacheTTL?: number;
|
|
49
|
+
/**
|
|
50
|
+
* Called each time a retry is about to happen.
|
|
51
|
+
* @param attempt The attempt number that just failed (1-indexed).
|
|
52
|
+
* @param error The error/Response that caused the retry.
|
|
53
|
+
* @param delay How long (ms) FastFetch will wait before the next attempt.
|
|
54
|
+
*/
|
|
55
|
+
onRetry?: (attempt: number, error: any, delay: number) => void;
|
|
56
|
+
/** Enable verbose debug logging to console (default: false). */
|
|
57
|
+
debug?: boolean;
|
|
58
|
+
}
|
|
59
|
+
|
|
60
|
+
// ---------------------------------------------------------------------------
|
|
61
|
+
// Internal state
|
|
62
|
+
// ---------------------------------------------------------------------------
|
|
63
|
+
|
|
64
|
+
/**
|
|
65
|
+
* In-flight request map for deduplication.
|
|
66
|
+
* Maps a stable request signature → the pending Promise<Response>.
|
|
67
|
+
*/
|
|
68
|
+
const inFlightMap = new Map<string, Promise<Response>>();
|
|
69
|
+
|
|
70
|
+
/** TTL cache entry. */
|
|
71
|
+
interface CacheEntry {
|
|
72
|
+
response: Response;
|
|
73
|
+
expiresAt: number;
|
|
74
|
+
}
|
|
75
|
+
|
|
76
|
+
/** In-memory TTL cache. */
|
|
77
|
+
const responseCache = new Map<string, CacheEntry>();
|
|
78
|
+
|
|
79
|
+
// ---------------------------------------------------------------------------
|
|
80
|
+
// Helpers
|
|
81
|
+
// ---------------------------------------------------------------------------
|
|
82
|
+
|
|
83
|
+
/** Stable key for dedup and cache, based on URL + method + headers + body. */
|
|
84
|
+
function makeKey(input: RequestInfo, init?: RequestInit): string {
|
|
85
|
+
const normalized = {
|
|
86
|
+
url: typeof input === "string" ? input : (input as Request).url,
|
|
87
|
+
method: (init?.method ?? "GET").toUpperCase(),
|
|
88
|
+
headers: init?.headers ?? {},
|
|
89
|
+
body: init?.body ?? null,
|
|
90
|
+
};
|
|
91
|
+
return JSON.stringify(normalized);
|
|
92
|
+
}
|
|
93
|
+
|
|
94
|
+
/** Returns true for HTTP status codes that we retry by default. */
|
|
95
|
+
function isRetryableStatus(status: number): boolean {
|
|
96
|
+
return status === 429 || (status >= 500 && status <= 599);
|
|
97
|
+
}
|
|
98
|
+
|
|
99
|
+
/**
|
|
100
|
+
* Parse the `Retry-After` response header and return the delay in ms.
|
|
101
|
+
* Supports both numeric (seconds) and HTTP-date formats.
|
|
102
|
+
* Returns `null` if the header is missing or unparseable.
|
|
103
|
+
*/
|
|
104
|
+
function parseRetryAfter(response: Response): number | null {
|
|
105
|
+
const header = response.headers?.get("Retry-After");
|
|
106
|
+
if (!header) return null;
|
|
107
|
+
|
|
108
|
+
// Numeric: number of seconds
|
|
109
|
+
const seconds = Number(header);
|
|
110
|
+
if (!isNaN(seconds)) return Math.max(0, seconds * 1000);
|
|
111
|
+
|
|
112
|
+
// HTTP-date: "Wed, 21 Oct 2025 07:28:00 GMT"
|
|
113
|
+
const date = new Date(header);
|
|
114
|
+
if (!isNaN(date.getTime())) {
|
|
115
|
+
return Math.max(0, date.getTime() - Date.now());
|
|
116
|
+
}
|
|
117
|
+
|
|
118
|
+
return null;
|
|
119
|
+
}
|
|
120
|
+
|
|
121
|
+
/** Run fetch with an AbortController-based timeout. */
|
|
122
|
+
async function fetchWithTimeout(
|
|
123
|
+
input: RequestInfo,
|
|
124
|
+
init: RequestInit,
|
|
125
|
+
timeoutMs: number,
|
|
126
|
+
): Promise<Response> {
|
|
127
|
+
const controller = new AbortController();
|
|
128
|
+
const timer = setTimeout(() => controller.abort(), timeoutMs);
|
|
129
|
+
try {
|
|
130
|
+
return await fetch(input, { ...init, signal: controller.signal });
|
|
131
|
+
} finally {
|
|
132
|
+
clearTimeout(timer);
|
|
133
|
+
}
|
|
134
|
+
}
|
|
135
|
+
|
|
136
|
+
function sleep(ms: number): Promise<void> {
|
|
137
|
+
return new Promise((resolve) => setTimeout(resolve, ms));
|
|
138
|
+
}
|
|
139
|
+
|
|
140
|
+
// ---------------------------------------------------------------------------
|
|
141
|
+
// Core fastFetch function
|
|
142
|
+
// ---------------------------------------------------------------------------
|
|
143
|
+
|
|
144
|
+
/**
|
|
145
|
+
* FastFetch — A smarter `fetch()` wrapper.
|
|
146
|
+
*
|
|
147
|
+
* Features:
|
|
148
|
+
* - **Retry** with exponential backoff (`retries`, `retryDelay`, `shouldRetry`, `onRetry`)
|
|
149
|
+
* - **Smart default retry** — automatically retries 5xx and 429 responses
|
|
150
|
+
* - **Retry-After** — respects the `Retry-After` header on 429 responses
|
|
151
|
+
* - **Timeout** — aborts the request after `timeout` ms
|
|
152
|
+
* - **Deduplication** — merges identical in-flight requests
|
|
153
|
+
* - **TTL Cache** — caches successful GET responses for `cacheTTL` ms
|
|
154
|
+
* - **Debug logging** — optional verbose logging via `debug: true`
|
|
155
|
+
*/
|
|
156
|
+
export async function fastFetch(
|
|
157
|
+
input: RequestInfo,
|
|
158
|
+
init?: RequestInit & FastFetchOptions,
|
|
159
|
+
): Promise<Response> {
|
|
160
|
+
const {
|
|
161
|
+
retries = 0,
|
|
162
|
+
retryDelay = 1000,
|
|
163
|
+
deduplicate = true,
|
|
164
|
+
shouldRetry,
|
|
165
|
+
timeout,
|
|
166
|
+
fastCache = false,
|
|
167
|
+
cacheTTL = 30_000,
|
|
168
|
+
onRetry,
|
|
169
|
+
throwOnError = false,
|
|
170
|
+
debug = false,
|
|
171
|
+
} = init || {};
|
|
172
|
+
|
|
173
|
+
const method = (init?.method ?? "GET").toUpperCase();
|
|
174
|
+
const isGet = method === "GET";
|
|
175
|
+
|
|
176
|
+
const log = (...args: any[]) => {
|
|
177
|
+
if (debug) console.log("[FastFetch]", ...args);
|
|
178
|
+
};
|
|
179
|
+
|
|
180
|
+
log("Starting request:", method, input);
|
|
181
|
+
|
|
182
|
+
// ── TTL Cache check (GET only) ─────────────────────────────────────────
|
|
183
|
+
const cacheKey = makeKey(input, init);
|
|
184
|
+
if (fastCache && isGet) {
|
|
185
|
+
const entry = responseCache.get(cacheKey);
|
|
186
|
+
if (entry && Date.now() < entry.expiresAt) {
|
|
187
|
+
log("Cache hit:", cacheKey);
|
|
188
|
+
return entry.response.clone();
|
|
189
|
+
}
|
|
190
|
+
// Stale or missing — remove if it was there
|
|
191
|
+
responseCache.delete(cacheKey);
|
|
192
|
+
}
|
|
193
|
+
|
|
194
|
+
// ── Deduplication check ────────────────────────────────────────────────
|
|
195
|
+
const dedupKey = cacheKey; // same key works for both
|
|
196
|
+
if (deduplicate) {
|
|
197
|
+
if (inFlightMap.has(dedupKey)) {
|
|
198
|
+
log("Reusing in-flight request:", dedupKey);
|
|
199
|
+
const shared = await inFlightMap.get(dedupKey)!;
|
|
200
|
+
return shared.clone();
|
|
201
|
+
}
|
|
202
|
+
}
|
|
203
|
+
|
|
204
|
+
// ── Build the retry loop ───────────────────────────────────────────────
|
|
205
|
+
let attempt = 0;
|
|
206
|
+
|
|
207
|
+
const promise = (async function fetchWithRetry(): Promise<Response> {
|
|
208
|
+
while (true) {
|
|
209
|
+
attempt++;
|
|
210
|
+
log(`Attempt #${attempt}`);
|
|
211
|
+
|
|
212
|
+
let response: Response;
|
|
213
|
+
try {
|
|
214
|
+
response = timeout
|
|
215
|
+
? await fetchWithTimeout(input, init ?? {}, timeout)
|
|
216
|
+
: await fetch(input, init);
|
|
217
|
+
} catch (rawError: any) {
|
|
218
|
+
// Wrap raw DOMException / TypeError into typed FastFetch errors
|
|
219
|
+
let typedError: Error;
|
|
220
|
+
if (rawError?.name === "AbortError" && timeout) {
|
|
221
|
+
typedError = new TimeoutError(timeout);
|
|
222
|
+
} else if (
|
|
223
|
+
rawError instanceof TypeError ||
|
|
224
|
+
rawError?.name === "TypeError"
|
|
225
|
+
) {
|
|
226
|
+
typedError = new NetworkError(rawError.message, rawError);
|
|
227
|
+
} else {
|
|
228
|
+
typedError = rawError; // pass CircuitOpenError & others through
|
|
229
|
+
}
|
|
230
|
+
|
|
231
|
+
log(`Network/abort error on attempt #${attempt}:`, typedError.message);
|
|
232
|
+
|
|
233
|
+
// TimeoutError — don't retry by default (retrying a timed-out request
|
|
234
|
+
// will just time out again and waste the user's time).
|
|
235
|
+
const isTimeout = typedError instanceof TimeoutError;
|
|
236
|
+
|
|
237
|
+
const shouldDoRetry = shouldRetry
|
|
238
|
+
? shouldRetry(typedError, attempt)
|
|
239
|
+
: !isTimeout && attempt <= retries;
|
|
240
|
+
|
|
241
|
+
if (shouldDoRetry && attempt <= retries) {
|
|
242
|
+
const delay = retryDelay * Math.pow(2, attempt - 1);
|
|
243
|
+
log(`Retrying in ${delay}ms…`);
|
|
244
|
+
onRetry?.(attempt, typedError, delay);
|
|
245
|
+
await sleep(delay);
|
|
246
|
+
continue;
|
|
247
|
+
}
|
|
248
|
+
|
|
249
|
+
throw typedError;
|
|
250
|
+
}
|
|
251
|
+
|
|
252
|
+
// HTTP-level failure
|
|
253
|
+
if (!response.ok) {
|
|
254
|
+
const shouldDoRetry = shouldRetry
|
|
255
|
+
? shouldRetry(response, attempt)
|
|
256
|
+
: isRetryableStatus(response.status) && attempt <= retries;
|
|
257
|
+
|
|
258
|
+
if (shouldDoRetry && attempt <= retries) {
|
|
259
|
+
// Respect Retry-After header (e.g. on 429)
|
|
260
|
+
const retryAfterMs = parseRetryAfter(response);
|
|
261
|
+
const delay = retryAfterMs ?? retryDelay * Math.pow(2, attempt - 1);
|
|
262
|
+
log(
|
|
263
|
+
`HTTP ${response.status} — retrying in ${delay}ms (attempt ${attempt}/${retries})`,
|
|
264
|
+
);
|
|
265
|
+
onRetry?.(attempt, response, delay);
|
|
266
|
+
await sleep(delay);
|
|
267
|
+
continue;
|
|
268
|
+
}
|
|
269
|
+
}
|
|
270
|
+
|
|
271
|
+
log(`Succeeded on attempt #${attempt} (status: ${response.status})`);
|
|
272
|
+
|
|
273
|
+
// Throw HttpError if caller opted in and response is not ok
|
|
274
|
+
if (!response.ok && throwOnError) {
|
|
275
|
+
throw new HttpError(response);
|
|
276
|
+
}
|
|
277
|
+
|
|
278
|
+
return response;
|
|
279
|
+
}
|
|
280
|
+
})();
|
|
281
|
+
|
|
282
|
+
// ── Register in-flight for dedup ───────────────────────────────────────
|
|
283
|
+
if (deduplicate) {
|
|
284
|
+
inFlightMap.set(dedupKey, promise);
|
|
285
|
+
}
|
|
286
|
+
|
|
287
|
+
let result: Response;
|
|
288
|
+
try {
|
|
289
|
+
result = await promise;
|
|
290
|
+
} finally {
|
|
291
|
+
if (deduplicate) {
|
|
292
|
+
inFlightMap.delete(dedupKey);
|
|
293
|
+
}
|
|
294
|
+
}
|
|
295
|
+
|
|
296
|
+
// ── Populate TTL cache ─────────────────────────────────────────────────
|
|
297
|
+
if (fastCache && isGet && result!.ok) {
|
|
298
|
+
responseCache.set(cacheKey, {
|
|
299
|
+
response: result!.clone(),
|
|
300
|
+
expiresAt: Date.now() + cacheTTL,
|
|
301
|
+
});
|
|
302
|
+
log(`Cached response for ${cacheTTL}ms`);
|
|
303
|
+
}
|
|
304
|
+
|
|
305
|
+
return result!.clone();
|
|
306
|
+
}
|
|
307
|
+
|
|
308
|
+
/**
|
|
309
|
+
* Manually clear all TTL-cached responses, or a single entry by URL.
|
|
310
|
+
*/
|
|
311
|
+
export function clearCache(url?: string): void {
|
|
312
|
+
if (url) {
|
|
313
|
+
// Remove any entry whose key contains the URL
|
|
314
|
+
for (const key of responseCache.keys()) {
|
|
315
|
+
if (key.includes(url)) responseCache.delete(key);
|
|
316
|
+
}
|
|
317
|
+
} else {
|
|
318
|
+
responseCache.clear();
|
|
319
|
+
}
|
|
320
|
+
}
|
package/src/index.ts
ADDED
|
@@ -0,0 +1,52 @@
|
|
|
1
|
+
// Core fetch function + options
|
|
2
|
+
export { fastFetch, clearCache } from "./fastFetch.js";
|
|
3
|
+
export type { FastFetchOptions } from "./fastFetch.js";
|
|
4
|
+
|
|
5
|
+
// Client factory + types
|
|
6
|
+
export { createClient, FastFetchClient } from "./client.js";
|
|
7
|
+
export type {
|
|
8
|
+
ClientOptions,
|
|
9
|
+
RequestInterceptor,
|
|
10
|
+
ResponseInterceptor,
|
|
11
|
+
ErrorInterceptor,
|
|
12
|
+
} from "./client.js";
|
|
13
|
+
|
|
14
|
+
// Typed error hierarchy
|
|
15
|
+
export {
|
|
16
|
+
FastFetchError,
|
|
17
|
+
HttpError,
|
|
18
|
+
TimeoutError,
|
|
19
|
+
NetworkError,
|
|
20
|
+
CircuitOpenError,
|
|
21
|
+
} from "./errors.js";
|
|
22
|
+
|
|
23
|
+
// Metrics
|
|
24
|
+
export { MetricsCollector } from "./metrics.js";
|
|
25
|
+
export type {
|
|
26
|
+
MetricsSnapshot,
|
|
27
|
+
EndpointMetrics,
|
|
28
|
+
LatencyPercentiles,
|
|
29
|
+
} from "./metrics.js";
|
|
30
|
+
|
|
31
|
+
// Circuit breaker
|
|
32
|
+
export { CircuitBreaker } from "./circuit-breaker.js";
|
|
33
|
+
export type {
|
|
34
|
+
CircuitBreakerOptions,
|
|
35
|
+
CircuitState,
|
|
36
|
+
} from "./circuit-breaker.js";
|
|
37
|
+
|
|
38
|
+
// Concurrency queue
|
|
39
|
+
export { RequestQueue } from "./queue.js";
|
|
40
|
+
export type { QueuePriority } from "./queue.js";
|
|
41
|
+
|
|
42
|
+
// Middleware
|
|
43
|
+
export { compose } from "./middleware.js";
|
|
44
|
+
export type { FastFetchContext, MiddlewareFn } from "./middleware.js";
|
|
45
|
+
|
|
46
|
+
// SSE Streaming
|
|
47
|
+
export { consumeSSE } from "./streaming.js";
|
|
48
|
+
export type { SSEEvent, SSEHandler } from "./streaming.js";
|
|
49
|
+
|
|
50
|
+
// Offline queue
|
|
51
|
+
export { OfflineQueue } from "./offline-queue.js";
|
|
52
|
+
export type { OfflineRequest, ReplayResult } from "./offline-queue.js";
|
package/src/metrics.ts
ADDED
|
@@ -0,0 +1,200 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* FastFetch lightweight metrics collector.
|
|
3
|
+
*
|
|
4
|
+
* Tracks request latencies, success/error rates, and per-endpoint breakdowns
|
|
5
|
+
* using a rolling window of the last N samples.
|
|
6
|
+
*/
|
|
7
|
+
|
|
8
|
+
// ---------------------------------------------------------------------------
|
|
9
|
+
// Types
|
|
10
|
+
// ---------------------------------------------------------------------------
|
|
11
|
+
|
|
12
|
+
export interface LatencyPercentiles {
|
|
13
|
+
/** Median (50th percentile) latency in ms. */
|
|
14
|
+
p50: number;
|
|
15
|
+
/** 95th percentile latency in ms. */
|
|
16
|
+
p95: number;
|
|
17
|
+
/** 99th percentile latency in ms. */
|
|
18
|
+
p99: number;
|
|
19
|
+
min: number;
|
|
20
|
+
max: number;
|
|
21
|
+
/** Mean (average) latency in ms. */
|
|
22
|
+
mean: number;
|
|
23
|
+
}
|
|
24
|
+
|
|
25
|
+
export interface EndpointMetrics {
|
|
26
|
+
/** Total requests recorded for this endpoint. */
|
|
27
|
+
count: number;
|
|
28
|
+
/** Number of failed requests (non-2xx or thrown error). */
|
|
29
|
+
errors: number;
|
|
30
|
+
/** Success rate in [0, 1]. */
|
|
31
|
+
successRate: number;
|
|
32
|
+
latency: LatencyPercentiles;
|
|
33
|
+
}
|
|
34
|
+
|
|
35
|
+
export interface MetricsSnapshot {
|
|
36
|
+
/** Total requests in the current rolling window. */
|
|
37
|
+
totalRequests: number;
|
|
38
|
+
/** Overall success rate in [0, 1]. */
|
|
39
|
+
successRate: number;
|
|
40
|
+
/** Overall error rate in [0, 1]. */
|
|
41
|
+
errorRate: number;
|
|
42
|
+
/** Aggregated latency across all endpoints. */
|
|
43
|
+
latency: LatencyPercentiles;
|
|
44
|
+
/** Per-endpoint breakdown (URL paths normalised — numeric IDs replaced with :id). */
|
|
45
|
+
byEndpoint: Record<string, EndpointMetrics>;
|
|
46
|
+
}
|
|
47
|
+
|
|
48
|
+
interface Sample {
|
|
49
|
+
url: string;
|
|
50
|
+
duration: number;
|
|
51
|
+
success: boolean;
|
|
52
|
+
status?: number;
|
|
53
|
+
}
|
|
54
|
+
|
|
55
|
+
// ---------------------------------------------------------------------------
|
|
56
|
+
// MetricsCollector
|
|
57
|
+
// ---------------------------------------------------------------------------
|
|
58
|
+
|
|
59
|
+
export class MetricsCollector {
|
|
60
|
+
private samples: Sample[] = [];
|
|
61
|
+
private readonly windowSize: number;
|
|
62
|
+
|
|
63
|
+
/**
|
|
64
|
+
* @param windowSize Maximum number of samples to retain (default: 1 000).
|
|
65
|
+
* Older samples are evicted when the window is full.
|
|
66
|
+
*/
|
|
67
|
+
constructor(windowSize = 1_000) {
|
|
68
|
+
this.windowSize = windowSize;
|
|
69
|
+
}
|
|
70
|
+
|
|
71
|
+
/** Record a completed request. Called automatically by FastFetchClient. */
|
|
72
|
+
record(
|
|
73
|
+
url: string,
|
|
74
|
+
duration: number,
|
|
75
|
+
success: boolean,
|
|
76
|
+
status?: number,
|
|
77
|
+
): void {
|
|
78
|
+
this.samples.push({ url, duration, success, status });
|
|
79
|
+
if (this.samples.length > this.windowSize) {
|
|
80
|
+
this.samples.shift(); // evict oldest
|
|
81
|
+
}
|
|
82
|
+
}
|
|
83
|
+
|
|
84
|
+
// ── Percentile computation ───────────────────────────────────────────────
|
|
85
|
+
|
|
86
|
+
private percentile(sorted: number[], p: number): number {
|
|
87
|
+
if (sorted.length === 0) return 0;
|
|
88
|
+
const idx = Math.ceil((p / 100) * sorted.length) - 1;
|
|
89
|
+
return sorted[Math.max(0, idx)];
|
|
90
|
+
}
|
|
91
|
+
|
|
92
|
+
private computeLatency(durations: number[]): LatencyPercentiles {
|
|
93
|
+
if (durations.length === 0) {
|
|
94
|
+
return { p50: 0, p95: 0, p99: 0, min: 0, max: 0, mean: 0 };
|
|
95
|
+
}
|
|
96
|
+
const sorted = [...durations].sort((a, b) => a - b);
|
|
97
|
+
const sum = sorted.reduce((s, v) => s + v, 0);
|
|
98
|
+
return {
|
|
99
|
+
p50: this.percentile(sorted, 50),
|
|
100
|
+
p95: this.percentile(sorted, 95),
|
|
101
|
+
p99: this.percentile(sorted, 99),
|
|
102
|
+
min: sorted[0],
|
|
103
|
+
max: sorted[sorted.length - 1],
|
|
104
|
+
mean: Math.round(sum / sorted.length),
|
|
105
|
+
};
|
|
106
|
+
}
|
|
107
|
+
|
|
108
|
+
/**
|
|
109
|
+
* Normalise a URL for grouping — strips query params and replaces numeric
|
|
110
|
+
* path segments with `:id` so `/users/1` and `/users/2` are the same bucket.
|
|
111
|
+
*/
|
|
112
|
+
private normalizeUrl(url: string): string {
|
|
113
|
+
try {
|
|
114
|
+
const u = new URL(url);
|
|
115
|
+
// Replace numeric-only path segments with :id
|
|
116
|
+
const path = u.pathname.replace(/\/\d+(?=\/|$)/g, "/:id");
|
|
117
|
+
return u.hostname + path;
|
|
118
|
+
} catch {
|
|
119
|
+
return url.replace(/\/\d+(?=\/|$)/g, "/:id");
|
|
120
|
+
}
|
|
121
|
+
}
|
|
122
|
+
|
|
123
|
+
// ── Public API ───────────────────────────────────────────────────────────
|
|
124
|
+
|
|
125
|
+
/**
|
|
126
|
+
* Return a serialisable metrics snapshot of the current rolling window.
|
|
127
|
+
*
|
|
128
|
+
* @example
|
|
129
|
+
* ```ts
|
|
130
|
+
* console.table(api.metrics.snapshot().byEndpoint);
|
|
131
|
+
* ```
|
|
132
|
+
*/
|
|
133
|
+
snapshot(): MetricsSnapshot {
|
|
134
|
+
const total = this.samples.length;
|
|
135
|
+
const emptyLatency: LatencyPercentiles = {
|
|
136
|
+
p50: 0,
|
|
137
|
+
p95: 0,
|
|
138
|
+
p99: 0,
|
|
139
|
+
min: 0,
|
|
140
|
+
max: 0,
|
|
141
|
+
mean: 0,
|
|
142
|
+
};
|
|
143
|
+
|
|
144
|
+
if (total === 0) {
|
|
145
|
+
return {
|
|
146
|
+
totalRequests: 0,
|
|
147
|
+
successRate: 1,
|
|
148
|
+
errorRate: 0,
|
|
149
|
+
latency: emptyLatency,
|
|
150
|
+
byEndpoint: {},
|
|
151
|
+
};
|
|
152
|
+
}
|
|
153
|
+
|
|
154
|
+
const errors = this.samples.filter((s) => !s.success).length;
|
|
155
|
+
const allDurations = this.samples.map((s) => s.duration);
|
|
156
|
+
|
|
157
|
+
// Group samples by normalised URL path
|
|
158
|
+
const groups = new Map<string, Sample[]>();
|
|
159
|
+
for (const sample of this.samples) {
|
|
160
|
+
const key = this.normalizeUrl(sample.url);
|
|
161
|
+
const existing = groups.get(key);
|
|
162
|
+
if (existing) {
|
|
163
|
+
existing.push(sample);
|
|
164
|
+
} else {
|
|
165
|
+
groups.set(key, [sample]);
|
|
166
|
+
}
|
|
167
|
+
}
|
|
168
|
+
|
|
169
|
+
const byEndpoint: Record<string, EndpointMetrics> = {};
|
|
170
|
+
for (const [key, bucket] of groups) {
|
|
171
|
+
const bucketErrors = bucket.filter((s) => !s.success).length;
|
|
172
|
+
byEndpoint[key] = {
|
|
173
|
+
count: bucket.length,
|
|
174
|
+
errors: bucketErrors,
|
|
175
|
+
successRate: parseFloat(
|
|
176
|
+
((bucket.length - bucketErrors) / bucket.length).toFixed(4),
|
|
177
|
+
),
|
|
178
|
+
latency: this.computeLatency(bucket.map((s) => s.duration)),
|
|
179
|
+
};
|
|
180
|
+
}
|
|
181
|
+
|
|
182
|
+
return {
|
|
183
|
+
totalRequests: total,
|
|
184
|
+
successRate: parseFloat(((total - errors) / total).toFixed(4)),
|
|
185
|
+
errorRate: parseFloat((errors / total).toFixed(4)),
|
|
186
|
+
latency: this.computeLatency(allDurations),
|
|
187
|
+
byEndpoint,
|
|
188
|
+
};
|
|
189
|
+
}
|
|
190
|
+
|
|
191
|
+
/** Reset all collected samples. */
|
|
192
|
+
reset(): void {
|
|
193
|
+
this.samples = [];
|
|
194
|
+
}
|
|
195
|
+
|
|
196
|
+
/** Current number of samples in the rolling window. */
|
|
197
|
+
get size(): number {
|
|
198
|
+
return this.samples.length;
|
|
199
|
+
}
|
|
200
|
+
}
|
|
@@ -0,0 +1,106 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* FastFetch Koa-style middleware (plugin) system.
|
|
3
|
+
*
|
|
4
|
+
* Middleware functions receive a `FastFetchContext` object and a `next()`
|
|
5
|
+
* callback. They can:
|
|
6
|
+
* - Mutate `ctx.init` (headers, body, options) before the request is sent.
|
|
7
|
+
* - Await `next()` and then inspect / transform `ctx.response` after.
|
|
8
|
+
* - Attach data to `ctx.meta` for consumption by later middleware.
|
|
9
|
+
*
|
|
10
|
+
* Execution order follows the classic onion model:
|
|
11
|
+
* mw[0] wraps mw[1] wraps mw[2] wraps … wraps the actual fetch.
|
|
12
|
+
*/
|
|
13
|
+
|
|
14
|
+
import type { FastFetchOptions } from "./fastFetch.js";
|
|
15
|
+
|
|
16
|
+
// ---------------------------------------------------------------------------
|
|
17
|
+
// Context
|
|
18
|
+
// ---------------------------------------------------------------------------
|
|
19
|
+
|
|
20
|
+
/**
|
|
21
|
+
* Mutable request/response context passed through the middleware pipeline.
|
|
22
|
+
*/
|
|
23
|
+
export interface FastFetchContext {
|
|
24
|
+
/** Fully-resolved URL (baseURL + path). */
|
|
25
|
+
url: string;
|
|
26
|
+
/** Uppercase HTTP method (GET, POST, …). */
|
|
27
|
+
method: string;
|
|
28
|
+
/**
|
|
29
|
+
* Mutable request init — mutate `ctx.init.headers`, `ctx.init.body`, etc.
|
|
30
|
+
* inside request middleware.
|
|
31
|
+
*/
|
|
32
|
+
init: RequestInit & FastFetchOptions;
|
|
33
|
+
/**
|
|
34
|
+
* Set to the raw `Response` after the request completes.
|
|
35
|
+
* Available inside response middleware (i.e., code after `await next()`).
|
|
36
|
+
*/
|
|
37
|
+
response?: Response;
|
|
38
|
+
/**
|
|
39
|
+
* Request duration in milliseconds.
|
|
40
|
+
* Available after `await next()` completes.
|
|
41
|
+
*/
|
|
42
|
+
duration?: number;
|
|
43
|
+
/**
|
|
44
|
+
* Free-form metadata bag. Attach anything here and read it in later
|
|
45
|
+
* middleware or after the final `await next()`.
|
|
46
|
+
*
|
|
47
|
+
* @example
|
|
48
|
+
* ```ts
|
|
49
|
+
* ctx.meta.requestId = crypto.randomUUID();
|
|
50
|
+
* await next();
|
|
51
|
+
* console.log(`${ctx.meta.requestId} — ${ctx.duration}ms`);
|
|
52
|
+
* ```
|
|
53
|
+
*/
|
|
54
|
+
meta: Record<string, unknown>;
|
|
55
|
+
}
|
|
56
|
+
|
|
57
|
+
// ---------------------------------------------------------------------------
|
|
58
|
+
// Middleware type
|
|
59
|
+
// ---------------------------------------------------------------------------
|
|
60
|
+
|
|
61
|
+
export type MiddlewareFn = (
|
|
62
|
+
ctx: FastFetchContext,
|
|
63
|
+
next: () => Promise<void>,
|
|
64
|
+
) => Promise<void>;
|
|
65
|
+
|
|
66
|
+
// ---------------------------------------------------------------------------
|
|
67
|
+
// Compose
|
|
68
|
+
// ---------------------------------------------------------------------------
|
|
69
|
+
|
|
70
|
+
/**
|
|
71
|
+
* Compose an array of middleware functions into a single callable.
|
|
72
|
+
*
|
|
73
|
+
* Works identically to Koa's `koa-compose`:
|
|
74
|
+
* - Middleware runs in insertion order during the "down" pass.
|
|
75
|
+
* - Code after `await next()` runs in reverse order during the "up" pass.
|
|
76
|
+
* - Calling `next()` more than once throws an Error.
|
|
77
|
+
*
|
|
78
|
+
* @example
|
|
79
|
+
* ```ts
|
|
80
|
+
* const run = compose([loggerMiddleware, authMiddleware]);
|
|
81
|
+
* await run(ctx);
|
|
82
|
+
* ```
|
|
83
|
+
*/
|
|
84
|
+
export function compose(
|
|
85
|
+
middlewares: MiddlewareFn[],
|
|
86
|
+
): (ctx: FastFetchContext) => Promise<void> {
|
|
87
|
+
return async function (ctx: FastFetchContext): Promise<void> {
|
|
88
|
+
let lastIndex = -1;
|
|
89
|
+
|
|
90
|
+
async function dispatch(i: number): Promise<void> {
|
|
91
|
+
if (i <= lastIndex) {
|
|
92
|
+
throw new Error(
|
|
93
|
+
"FastFetch middleware: next() was called more than once in the same middleware.",
|
|
94
|
+
);
|
|
95
|
+
}
|
|
96
|
+
lastIndex = i;
|
|
97
|
+
|
|
98
|
+
if (i >= middlewares.length) return; // reached the end of the chain
|
|
99
|
+
|
|
100
|
+
const fn = middlewares[i];
|
|
101
|
+
await fn(ctx, () => dispatch(i + 1));
|
|
102
|
+
}
|
|
103
|
+
|
|
104
|
+
await dispatch(0);
|
|
105
|
+
};
|
|
106
|
+
}
|