@resq-sw/rate-limiting 0.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md ADDED
@@ -0,0 +1,224 @@
1
+ # @resq-sw/rate-limiting
2
+
3
+ > Rate limiting algorithms, throttle/debounce utilities, and distributed rate limit stores.
4
+
5
+ ## Installation
6
+
7
+ ```bash
8
+ bun add @resq-sw/rate-limiting effect
9
+ ```
10
+
11
+ Optional peer dependencies for Redis-backed limiting:
12
+
13
+ ```bash
14
+ bun add @upstash/ratelimit @upstash/redis
15
+ ```
16
+
17
+ ## Quick Start
18
+
19
+ ```ts
20
+ import { throttle, debounce, TokenBucketLimiter } from "@resq-sw/rate-limiting";
21
+
22
+ const save = throttle(() => persist(data), 1000);
23
+ const search = debounce((q: string) => fetchResults(q), 300);
24
+
25
+ const limiter = new TokenBucketLimiter(5, 60000); // 5 per minute
26
+ await limiter.acquire();
27
+ ```
28
+
29
+ ## API Reference
30
+
31
+ ### `throttle(func, wait, options?)`
32
+
33
+ Limits function execution to at most once per `wait` milliseconds.
34
+
35
+ - **func** (`T extends (...args) => unknown`) -- function to throttle.
36
+ - **wait** (`number`) -- interval in ms.
37
+ - **options** (`ThrottleOptions`) -- see below.
38
+ - Returns the throttled function with a `.cancel()` method.
39
+
40
+ | Option | Type | Default | Description |
41
+ |--------|------|---------|-------------|
42
+ | `leading` | `boolean` | `true` | Call on leading edge |
43
+ | `trailing` | `boolean` | `true` | Call on trailing edge |
44
+
45
+ ```ts
46
+ const fn = throttle(handleScroll, 200, { trailing: false });
47
+ fn(); // executes immediately
48
+ fn(); // ignored
49
+ fn.cancel(); // cancel pending trailing call
50
+ ```
51
+
52
+ ### `debounce(func, wait, options?)`
53
+
54
+ Delays function execution until `wait` ms after the last call.
55
+
56
+ - **func** (`T extends (...args) => unknown`) -- function to debounce.
57
+ - **wait** (`number`) -- delay in ms.
58
+ - **options** (`DebounceOptions`) -- see below.
59
+ - Returns the debounced function with `.cancel()` and `.flush()` methods.
60
+
61
+ | Option | Type | Default | Description |
62
+ |--------|------|---------|-------------|
63
+ | `leading` | `boolean` | `false` | Call on leading edge |
64
+ | `maxWait` | `number` | -- | Maximum time before forced execution |
65
+
66
+ ```ts
67
+ const search = debounce(fetchResults, 300, { maxWait: 1000 });
68
+ search("a"); search("ab"); search("abc");
69
+ // Executes after 300ms idle, or forced after 1000ms
70
+ search.flush(); // execute immediately
71
+ search.cancel(); // cancel pending
72
+ ```
73
+
74
+ ### `KeyedThrottle<T>`
75
+
76
+ Per-key throttle manager. Each key gets its own independent throttle.
77
+
78
+ ```ts
79
+ const perUser = new KeyedThrottle(handleRequest, 1000);
80
+ perUser.execute("user:123", requestData); // throttled per user
81
+ perUser.cancel("user:123"); // cancel specific key
82
+ perUser.cancelAll(); // cancel all keys
83
+ perUser.getStats(); // { activeKeys: number, keys: string[] }
84
+ ```
85
+
86
+ ### `KeyedDebounce<T>`
87
+
88
+ Per-key debounce manager. Each key gets its own independent debounce.
89
+
90
+ ```ts
91
+ const perField = new KeyedDebounce(validate, 300);
92
+ perField.execute("email", value); // debounced per field
93
+ perField.flush("email"); // execute immediately
94
+ perField.cancel("email"); // cancel specific key
95
+ perField.cancelAll(); // cancel all keys
96
+ perField.getStats(); // { activeKeys, keys }
97
+ ```
98
+
99
+ ### `TokenBucketLimiter`
100
+
101
+ Token bucket algorithm -- tokens refill over time.
102
+
103
+ ```ts
104
+ const limiter = new TokenBucketLimiter(capacity, windowMs);
105
+ ```
106
+
107
+ | Parameter | Type | Description |
108
+ |-----------|------|-------------|
109
+ | `capacity` | `number` | Max tokens (burst size) |
110
+ | `windowMs` | `number` | Refill window in ms |
111
+
112
+ | Method | Returns | Description |
113
+ |--------|---------|-------------|
114
+ | `acquire()` | `Promise<void>` | Wait for a token (blocks if empty) |
115
+ | `tryAcquire()` | `boolean` | Try to get a token without waiting |
116
+ | `getStats()` | `RateLimiterStats` | `{ availableTokens, queueSize, capacity }` |
117
+ | `reset()` | `void` | Reset to full capacity |
118
+
119
+ ```ts
120
+ const limiter = new TokenBucketLimiter(10, 60000); // 10 req/min
121
+ if (limiter.tryAcquire()) {
122
+ await handleRequest();
123
+ } else {
124
+ return new Response("Too Many Requests", { status: 429 });
125
+ }
126
+ ```
127
+
128
+ ### `LeakyBucketLimiter`
129
+
130
+ Leaky bucket algorithm -- requests drain at a constant rate for smoother limiting.
131
+
132
+ ```ts
133
+ const limiter = new LeakyBucketLimiter(capacity, requestsPerSecond);
134
+ ```
135
+
136
+ | Parameter | Type | Description |
137
+ |-----------|------|-------------|
138
+ | `capacity` | `number` | Max queue size |
139
+ | `requestsPerSecond` | `number` | Drain rate |
140
+
141
+ | Method | Returns | Description |
142
+ |--------|---------|-------------|
143
+ | `acquire()` | `Promise<void>` | Queue a request (throws if queue full) |
144
+ | `tryAcquire()` | `boolean` | Check if immediate processing is possible |
145
+ | `getStats()` | `RateLimiterStats` | `{ availableTokens, queueSize, capacity }` |
146
+ | `reset()` | `void` | Clear the queue |
147
+
148
+ ```ts
149
+ const limiter = new LeakyBucketLimiter(100, 10); // 100 queue, 10 req/sec
150
+ try {
151
+ await limiter.acquire();
152
+ await processRequest();
153
+ } catch (e) {
154
+ // "Rate limit exceeded: queue full"
155
+ }
156
+ ```
157
+
158
+ ### `SlidingWindowCounter`
159
+
160
+ Sliding window counter for accurate per-key rate limiting.
161
+
162
+ ```ts
163
+ const counter = new SlidingWindowCounter(windowMs, maxRequests);
164
+ ```
165
+
166
+ | Method | Returns | Description |
167
+ |--------|---------|-------------|
168
+ | `check(key)` | `{ allowed, remaining, resetAt }` | Check and increment counter |
169
+ | `reset(key)` | `void` | Reset counter for a key |
170
+ | `getStats()` | `KeyedStats` | `{ activeKeys, keys }` |
171
+
172
+ ```ts
173
+ const counter = new SlidingWindowCounter(60000, 100); // 100 per minute
174
+ const { allowed, remaining, resetAt } = counter.check("user:123");
175
+ if (!allowed) {
176
+ return new Response("Rate limited", {
177
+ status: 429,
178
+ headers: { "X-RateLimit-Remaining": String(remaining), "X-RateLimit-Reset": String(resetAt) },
179
+ });
180
+ }
181
+ ```
182
+
183
+ ### Rate Limit Stores
184
+
185
+ #### `MemoryRateLimitStore`
186
+
187
+ In-memory store implementing `IRateLimitStore`. Suitable for single-process applications.
188
+
189
+ ```ts
190
+ const store = new MemoryRateLimitStore();
191
+ const result = await store.check("key", windowMs, maxRequests);
192
+ // { limited, remaining, resetTime, total }
193
+ ```
194
+
195
+ #### `RedisRateLimitStore`
196
+
197
+ Redis-backed store using `@upstash/ratelimit` with sliding window algorithm.
198
+
199
+ ```ts
200
+ import { Redis } from "@upstash/redis";
201
+ const store = new RedisRateLimitStore(new Redis({ url: "...", token: "..." }));
202
+ const result = await store.check("key", 60000, 100);
203
+ ```
204
+
205
+ ### Presets
206
+
207
+ ```ts
208
+ import { RATE_LIMIT_PRESETS } from "@resq-sw/rate-limiting";
209
+ ```
210
+
211
+ | Preset | Window | Max Requests |
212
+ |--------|--------|-------------|
213
+ | `auth` | 15 min | 5 |
214
+ | `api` | 1 min | 100 |
215
+ | `read` | 1 min | 200 |
216
+ | `upload` | 1 hour | 20 |
217
+
218
+ ### Effect Schemas
219
+
220
+ Exported for runtime validation: `ThrottleOptionsSchema`, `DebounceOptionsSchema`, `RateLimiterStatsSchema`, `KeyedStatsSchema`, `RateLimitConfigSchema`, `RateLimitCheckResultSchema`.
221
+
222
+ ## License
223
+
224
+ Apache-2.0
package/lib/index.d.ts ADDED
@@ -0,0 +1,3 @@
1
+ import { DebounceOptions, DebounceOptionsSchema, KeyedDebounce, KeyedStats, KeyedStatsSchema, KeyedThrottle, LeakyBucketLimiter, RateLimiterStats, RateLimiterStatsSchema, SlidingWindowCounter, ThrottleOptions, ThrottleOptionsSchema, TokenBucketLimiter, debounce, throttle } from "./throttle.js";
2
+ import { IRateLimitStore, MemoryRateLimitStore, RATE_LIMIT_PRESETS, RateLimitCheckResult, RateLimitCheckResultSchema, RateLimitConfig, RateLimitConfigSchema, RedisRateLimitStore } from "./rate-limit.js";
3
+ export { DebounceOptions, DebounceOptionsSchema, IRateLimitStore, KeyedDebounce, KeyedStats, KeyedStatsSchema, KeyedThrottle, LeakyBucketLimiter, MemoryRateLimitStore, RATE_LIMIT_PRESETS, RateLimitCheckResult, RateLimitCheckResultSchema, RateLimitConfig, RateLimitConfigSchema, RateLimiterStats, RateLimiterStatsSchema, RedisRateLimitStore, SlidingWindowCounter, ThrottleOptions, ThrottleOptionsSchema, TokenBucketLimiter, debounce, throttle };
package/lib/index.js ADDED
@@ -0,0 +1,3 @@
1
+ import { DebounceOptionsSchema, KeyedDebounce, KeyedStatsSchema, KeyedThrottle, LeakyBucketLimiter, RateLimiterStatsSchema, SlidingWindowCounter, ThrottleOptionsSchema, TokenBucketLimiter, debounce, throttle } from "./throttle.js";
2
+ import { MemoryRateLimitStore, RATE_LIMIT_PRESETS, RateLimitCheckResultSchema, RateLimitConfigSchema, RedisRateLimitStore } from "./rate-limit.js";
3
+ export { DebounceOptionsSchema, KeyedDebounce, KeyedStatsSchema, KeyedThrottle, LeakyBucketLimiter, MemoryRateLimitStore, RATE_LIMIT_PRESETS, RateLimitCheckResultSchema, RateLimitConfigSchema, RateLimiterStatsSchema, RedisRateLimitStore, SlidingWindowCounter, ThrottleOptionsSchema, TokenBucketLimiter, debounce, throttle };
@@ -0,0 +1,61 @@
1
+ import { Schema } from "effect";
2
+ import { Redis } from "@upstash/redis";
3
+
4
+ //#region src/rate-limit.d.ts
5
+ declare const RateLimitConfigSchema: Schema.Struct<{
6
+ readonly windowMs: Schema.Number;
7
+ readonly maxRequests: Schema.Number;
8
+ readonly headers: Schema.optional<Schema.Boolean>;
9
+ }>;
10
+ type RateLimitConfig = typeof RateLimitConfigSchema.Type;
11
+ declare const RateLimitCheckResultSchema: Schema.Struct<{
12
+ readonly limited: Schema.Boolean;
13
+ readonly remaining: Schema.Number;
14
+ readonly resetTime: Schema.Number;
15
+ readonly total: Schema.Number;
16
+ }>;
17
+ type RateLimitCheckResult = typeof RateLimitCheckResultSchema.Type;
18
+ interface IRateLimitStore {
19
+ check(key: string, windowMs: number, maxRequests: number): Promise<RateLimitCheckResult>;
20
+ reset(key: string): Promise<void>;
21
+ }
22
+ /**
23
+ * Redis-backed rate limit store using @upstash/ratelimit
24
+ */
25
+ declare class RedisRateLimitStore implements IRateLimitStore {
26
+ private readonly limiters;
27
+ private readonly redis;
28
+ constructor(redisClient: Redis);
29
+ private getLimiter;
30
+ check(key: string, windowMs: number, maxRequests: number): Promise<RateLimitCheckResult>;
31
+ reset(key: string): Promise<void>;
32
+ }
33
+ /**
34
+ * In-memory rate limit store for local development or simple services
35
+ */
36
+ declare class MemoryRateLimitStore implements IRateLimitStore {
37
+ private readonly store;
38
+ check(key: string, windowMs: number, maxRequests: number): Promise<RateLimitCheckResult>;
39
+ reset(key: string): Promise<void>;
40
+ }
41
+ declare const RATE_LIMIT_PRESETS: {
42
+ readonly auth: {
43
+ readonly windowMs: number;
44
+ readonly maxRequests: 5;
45
+ };
46
+ readonly api: {
47
+ readonly windowMs: number;
48
+ readonly maxRequests: 100;
49
+ };
50
+ readonly read: {
51
+ readonly windowMs: number;
52
+ readonly maxRequests: 200;
53
+ };
54
+ readonly upload: {
55
+ readonly windowMs: number;
56
+ readonly maxRequests: 20;
57
+ };
58
+ };
59
+ //#endregion
60
+ export { IRateLimitStore, MemoryRateLimitStore, RATE_LIMIT_PRESETS, RateLimitCheckResult, RateLimitCheckResultSchema, RateLimitConfig, RateLimitConfigSchema, RedisRateLimitStore };
61
+ //# sourceMappingURL=rate-limit.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"rate-limit.d.ts","names":[],"sources":["../src/rate-limit.ts"],"mappings":";;;;cA6Ba,qBAAA,EAAqB,MAAA,CAAA,MAAA;EAAA;;;;KAMtB,eAAA,UAAyB,qBAAA,CAAsB,IAAA;AAAA,cAE9C,0BAAA,EAA0B,MAAA,CAAA,MAAA;EAAA;;;;;KAO3B,oBAAA,UAA8B,0BAAA,CAA2B,IAAA;AAAA,UAMpD,eAAA;EACf,KAAA,CAAM,GAAA,UAAa,QAAA,UAAkB,WAAA,WAAsB,OAAA,CAAQ,oBAAA;EACnE,KAAA,CAAM,GAAA,WAAc,OAAA;AAAA;AAftB;;;AAAA,cAqBa,mBAAA,YAA+B,eAAA;EAAA,iBACzB,QAAA;EAAA,iBACA,KAAA;cAEL,WAAA,EAAa,KAAA;EAAA,QAIjB,UAAA;EAcF,KAAA,CAAM,GAAA,UAAa,QAAA,UAAkB,WAAA,WAAsB,OAAA,CAAQ,oBAAA;EAYnE,KAAA,CAAM,GAAA,WAAc,OAAA;AAAA;;;;cASf,oBAAA,YAAgC,eAAA;EAAA,iBAC1B,KAAA;EAEX,KAAA,CAAM,GAAA,UAAa,QAAA,UAAkB,WAAA,WAAsB,OAAA,CAAQ,oBAAA;EAyBnE,KAAA,CAAM,GAAA,WAAc,OAAA;AAAA;AAAA,cASf,kBAAA;EAAA"}
@@ -0,0 +1,121 @@
1
+ import { Schema } from "effect";
2
+ import { Ratelimit } from "@upstash/ratelimit";
3
+ //#region src/rate-limit.ts
4
+ /**
5
+ * Copyright 2026 ResQ
6
+ *
7
+ * Licensed under the Apache License, Version 2.0 (the "License");
8
+ * you may not use this file except in compliance with the License.
9
+ * You may obtain a copy of the License at
10
+ *
11
+ * http://www.apache.org/licenses/LICENSE-2.0
12
+ *
13
+ * Unless required by applicable law or agreed to in writing, software
14
+ * distributed under the License is distributed on an "AS IS" BASIS,
15
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
16
+ * See the License for the specific language governing permissions and
17
+ * limitations under the License.
18
+ */
19
+ /**
20
+ * @file Rate Limiting Utilities
21
+ * @module @resq/typescript/utils/middleware/rate-limit
22
+ */
23
+ const RateLimitConfigSchema = Schema.Struct({
24
+ windowMs: Schema.Number,
25
+ maxRequests: Schema.Number,
26
+ headers: Schema.optional(Schema.Boolean)
27
+ });
28
+ const RateLimitCheckResultSchema = Schema.Struct({
29
+ limited: Schema.Boolean,
30
+ remaining: Schema.Number,
31
+ resetTime: Schema.Number,
32
+ total: Schema.Number
33
+ });
34
+ /**
35
+ * Redis-backed rate limit store using @upstash/ratelimit
36
+ */
37
+ var RedisRateLimitStore = class {
38
+ limiters = /* @__PURE__ */ new Map();
39
+ redis;
40
+ constructor(redisClient) {
41
+ this.redis = redisClient;
42
+ }
43
+ getLimiter(windowMs, maxRequests) {
44
+ const key = `${windowMs}:${maxRequests}`;
45
+ let limiter = this.limiters.get(key);
46
+ if (!limiter) {
47
+ limiter = new Ratelimit({
48
+ redis: this.redis,
49
+ limiter: Ratelimit.slidingWindow(maxRequests, `${windowMs} ms`),
50
+ prefix: "@resq/ratelimit"
51
+ });
52
+ this.limiters.set(key, limiter);
53
+ }
54
+ return limiter;
55
+ }
56
+ async check(key, windowMs, maxRequests) {
57
+ const result = await this.getLimiter(windowMs, maxRequests).limit(key);
58
+ return {
59
+ limited: !result.success,
60
+ remaining: result.remaining,
61
+ resetTime: result.reset,
62
+ total: result.limit
63
+ };
64
+ }
65
+ async reset(key) {}
66
+ };
67
+ /**
68
+ * In-memory rate limit store for local development or simple services
69
+ */
70
+ var MemoryRateLimitStore = class {
71
+ store = /* @__PURE__ */ new Map();
72
+ async check(key, windowMs, maxRequests) {
73
+ const now = Date.now();
74
+ const state = this.store.get(key);
75
+ if (!state || state.resetTime <= now) {
76
+ const newState = {
77
+ count: 1,
78
+ resetTime: now + windowMs
79
+ };
80
+ this.store.set(key, newState);
81
+ return {
82
+ limited: false,
83
+ remaining: maxRequests - 1,
84
+ resetTime: newState.resetTime,
85
+ total: maxRequests
86
+ };
87
+ }
88
+ state.count++;
89
+ return {
90
+ limited: state.count > maxRequests,
91
+ remaining: Math.max(0, maxRequests - state.count),
92
+ resetTime: state.resetTime,
93
+ total: maxRequests
94
+ };
95
+ }
96
+ async reset(key) {
97
+ this.store.delete(key);
98
+ }
99
+ };
100
+ const RATE_LIMIT_PRESETS = {
101
+ auth: {
102
+ windowMs: 900 * 1e3,
103
+ maxRequests: 5
104
+ },
105
+ api: {
106
+ windowMs: 60 * 1e3,
107
+ maxRequests: 100
108
+ },
109
+ read: {
110
+ windowMs: 60 * 1e3,
111
+ maxRequests: 200
112
+ },
113
+ upload: {
114
+ windowMs: 3600 * 1e3,
115
+ maxRequests: 20
116
+ }
117
+ };
118
+ //#endregion
119
+ export { MemoryRateLimitStore, RATE_LIMIT_PRESETS, RateLimitCheckResultSchema, RateLimitConfigSchema, RedisRateLimitStore };
120
+
121
+ //# sourceMappingURL=rate-limit.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"rate-limit.js","names":["S"],"sources":["../src/rate-limit.ts"],"sourcesContent":["/**\n * Copyright 2026 ResQ\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\n/**\n * @file Rate Limiting Utilities\n * @module @resq/typescript/utils/middleware/rate-limit\n */\n\nimport { Ratelimit } from '@upstash/ratelimit';\nimport type { Redis } from '@upstash/redis';\nimport { Schema as S } from 'effect';\n\n// ============================================\n// Effect Schema Definitions\n// ============================================\n\nexport const RateLimitConfigSchema = S.Struct({\n windowMs: S.Number,\n maxRequests: S.Number,\n headers: S.optional(S.Boolean),\n});\n\nexport type RateLimitConfig = typeof RateLimitConfigSchema.Type;\n\nexport const RateLimitCheckResultSchema = S.Struct({\n limited: S.Boolean,\n remaining: S.Number,\n resetTime: S.Number,\n total: S.Number,\n});\n\nexport type RateLimitCheckResult = typeof RateLimitCheckResultSchema.Type;\n\n// ============================================\n// Rate Limit Store Interfaces\n// ============================================\n\nexport interface IRateLimitStore {\n check(key: string, windowMs: number, maxRequests: number): Promise<RateLimitCheckResult>;\n reset(key: string): Promise<void>;\n}\n\n/**\n * Redis-backed rate limit store using @upstash/ratelimit\n */\nexport class RedisRateLimitStore implements IRateLimitStore {\n private readonly limiters = new Map<string, Ratelimit>();\n private readonly redis: Redis;\n\n constructor(redisClient: Redis) {\n this.redis = redisClient;\n }\n\n private getLimiter(windowMs: number, maxRequests: number): Ratelimit {\n const key = `${windowMs}:${maxRequests}`;\n let limiter = this.limiters.get(key);\n if (!limiter) {\n limiter = new Ratelimit({\n redis: this.redis,\n limiter: Ratelimit.slidingWindow(maxRequests, `${windowMs} ms`),\n prefix: '@resq/ratelimit',\n });\n this.limiters.set(key, limiter);\n }\n return limiter;\n }\n\n async check(key: string, windowMs: number, maxRequests: number): Promise<RateLimitCheckResult> {\n const limiter = this.getLimiter(windowMs, maxRequests);\n const result = await limiter.limit(key);\n\n return {\n limited: !result.success,\n remaining: result.remaining,\n resetTime: result.reset,\n total: result.limit,\n };\n }\n\n async reset(key: string): Promise<void> {\n // Note: @upstash/ratelimit reset is complex as it uses multiple keys.\n // For now, we clear the main key if possible.\n }\n}\n\n/**\n * In-memory rate limit store for local development or simple services\n */\nexport class MemoryRateLimitStore implements IRateLimitStore {\n private readonly store = new Map<string, { count: number; resetTime: number }>();\n\n async check(key: string, windowMs: number, maxRequests: number): Promise<RateLimitCheckResult> {\n const now = Date.now();\n const state = this.store.get(key);\n\n if (!state || state.resetTime <= now) {\n const newState = { count: 1, resetTime: now + windowMs };\n this.store.set(key, newState);\n return {\n limited: false,\n remaining: maxRequests - 1,\n resetTime: newState.resetTime,\n total: maxRequests,\n };\n }\n\n state.count++;\n const limited = state.count > maxRequests;\n return {\n limited,\n remaining: Math.max(0, maxRequests - state.count),\n resetTime: state.resetTime,\n total: maxRequests,\n };\n }\n\n async reset(key: string): Promise<void> {\n this.store.delete(key);\n }\n}\n\n// ============================================\n// Rate Limit Presets\n// ============================================\n\nexport const RATE_LIMIT_PRESETS = {\n auth: {\n windowMs: 15 * 60 * 1000,\n maxRequests: 5,\n },\n api: {\n windowMs: 60 * 1000,\n maxRequests: 100,\n },\n read: {\n windowMs: 60 * 1000,\n maxRequests: 200,\n },\n upload: {\n windowMs: 60 * 60 * 1000,\n maxRequests: 20,\n },\n} as const;\n"],"mappings":";;;;;;;;;;;;;;;;;;;;;;AA6BA,MAAa,wBAAwBA,OAAE,OAAO;CAC5C,UAAUA,OAAE;CACZ,aAAaA,OAAE;CACf,SAASA,OAAE,SAASA,OAAE,QAAQ;CAC/B,CAAC;AAIF,MAAa,6BAA6BA,OAAE,OAAO;CACjD,SAASA,OAAE;CACX,WAAWA,OAAE;CACb,WAAWA,OAAE;CACb,OAAOA,OAAE;CACV,CAAC;;;;AAgBF,IAAa,sBAAb,MAA4D;CAC1D,2BAA4B,IAAI,KAAwB;CACxD;CAEA,YAAY,aAAoB;AAC9B,OAAK,QAAQ;;CAGf,WAAmB,UAAkB,aAAgC;EACnE,MAAM,MAAM,GAAG,SAAS,GAAG;EAC3B,IAAI,UAAU,KAAK,SAAS,IAAI,IAAI;AACpC,MAAI,CAAC,SAAS;AACZ,aAAU,IAAI,UAAU;IACtB,OAAO,KAAK;IACZ,SAAS,UAAU,cAAc,aAAa,GAAG,SAAS,KAAK;IAC/D,QAAQ;IACT,CAAC;AACF,QAAK,SAAS,IAAI,KAAK,QAAQ;;AAEjC,SAAO;;CAGT,MAAM,MAAM,KAAa,UAAkB,aAAoD;EAE7F,MAAM,SAAS,MADC,KAAK,WAAW,UAAU,YAAY,CACzB,MAAM,IAAI;AAEvC,SAAO;GACL,SAAS,CAAC,OAAO;GACjB,WAAW,OAAO;GAClB,WAAW,OAAO;GAClB,OAAO,OAAO;GACf;;CAGH,MAAM,MAAM,KAA4B;;;;;AAS1C,IAAa,uBAAb,MAA6D;CAC3D,wBAAyB,IAAI,KAAmD;CAEhF,MAAM,MAAM,KAAa,UAAkB,aAAoD;EAC7F,MAAM,MAAM,KAAK,KAAK;EACtB,MAAM,QAAQ,KAAK,MAAM,IAAI,IAAI;AAEjC,MAAI,CAAC,SAAS,MAAM,aAAa,KAAK;GACpC,MAAM,WAAW;IAAE,OAAO;IAAG,WAAW,MAAM;IAAU;AACxD,QAAK,MAAM,IAAI,KAAK,SAAS;AAC7B,UAAO;IACL,SAAS;IACT,WAAW,cAAc;IACzB,WAAW,SAAS;IACpB,OAAO;IACR;;AAGH,QAAM;AAEN,SAAO;GACL,SAFc,MAAM,QAAQ;GAG5B,WAAW,KAAK,IAAI,GAAG,cAAc,MAAM,MAAM;GACjD,WAAW,MAAM;GACjB,OAAO;GACR;;CAGH,MAAM,MAAM,KAA4B;AACtC,OAAK,MAAM,OAAO,IAAI;;;AAQ1B,MAAa,qBAAqB;CAChC,MAAM;EACJ,UAAU,MAAU;EACpB,aAAa;EACd;CACD,KAAK;EACH,UAAU,KAAK;EACf,aAAa;EACd;CACD,MAAM;EACJ,UAAU,KAAK;EACf,aAAa;EACd;CACD,QAAQ;EACN,UAAU,OAAU;EACpB,aAAa;EACd;CACF"}
@@ -0,0 +1,253 @@
1
+ import { Schema } from "effect";
2
+
3
+ //#region src/throttle.d.ts
4
+ /**
5
+ * Throttle Options Schema
6
+ */
7
+ declare const ThrottleOptionsSchema: Schema.Struct<{
8
+ /** Whether to call the function on the leading edge */readonly leading: Schema.optional<Schema.Boolean>; /** Whether to call the function on the trailing edge */
9
+ readonly trailing: Schema.optional<Schema.Boolean>;
10
+ }>;
11
+ type ThrottleOptions = typeof ThrottleOptionsSchema.Type;
12
+ /**
13
+ * Debounce Options Schema
14
+ */
15
+ declare const DebounceOptionsSchema: Schema.Struct<{
16
+ /** Whether to call the function on the leading edge */readonly leading: Schema.optional<Schema.Boolean>; /** Maximum time to wait before forcing execution */
17
+ readonly maxWait: Schema.optional<Schema.Number>;
18
+ }>;
19
+ type DebounceOptions = typeof DebounceOptionsSchema.Type;
20
+ /**
21
+ * Rate Limiter Stats Schema
22
+ */
23
+ declare const RateLimiterStatsSchema: Schema.Struct<{
24
+ readonly availableTokens: Schema.Number;
25
+ readonly queueSize: Schema.Number;
26
+ readonly capacity: Schema.Number;
27
+ }>;
28
+ type RateLimiterStats = typeof RateLimiterStatsSchema.Type;
29
+ /**
30
+ * Keyed Stats Schema
31
+ */
32
+ declare const KeyedStatsSchema: Schema.Struct<{
33
+ readonly activeKeys: Schema.Number;
34
+ readonly keys: Schema.$Array<Schema.String>;
35
+ }>;
36
+ type KeyedStats = typeof KeyedStatsSchema.Type;
37
+ /** Generic callable function type */
38
+ type AnyFunction = (...args: never[]) => unknown;
39
+ /**
40
+ * Throttle a function to only execute once per specified interval
41
+ *
42
+ * @param func Function to throttle
43
+ * @param wait Wait time in milliseconds
44
+ * @param options Throttle options
45
+ * @returns Throttled function
46
+ *
47
+ * @example
48
+ * ```ts
49
+ * const fetchData = throttle(() => fetch('/api/data'), 1000);
50
+ * fetchData(); // Executes immediately
51
+ * fetchData(); // Ignored
52
+ * fetchData(); // Ignored
53
+ * // After 1000ms, next call will execute
54
+ * ```
55
+ */
56
+ declare function throttle<T extends AnyFunction>(func: T, wait: number, options?: ThrottleOptions): ((...args: Parameters<T>) => ReturnType<T> | undefined) & {
57
+ cancel: () => void;
58
+ };
59
+ /**
60
+ * Debounce a function to only execute after it stops being called for specified time
61
+ *
62
+ * @param func Function to debounce
63
+ * @param wait Wait time in milliseconds
64
+ * @param options Debounce options
65
+ * @returns Debounced function
66
+ *
67
+ * @example
68
+ * ```ts
69
+ * const search = debounce((query) => fetchSearchResults(query), 300);
70
+ * search('a'); // Waiting...
71
+ * search('ab'); // Waiting...
72
+ * search('abc'); // Executes after 300ms of no calls
73
+ * ```
74
+ */
75
+ declare function debounce<T extends AnyFunction>(func: T, wait: number, options?: DebounceOptions): ((...args: Parameters<T>) => void) & {
76
+ cancel: () => void;
77
+ flush: () => void;
78
+ };
79
+ /**
80
+ * Per-key throttle manager for throttling by specific keys
81
+ * Useful for throttling per-endpoint or per-user
82
+ */
83
+ declare class KeyedThrottle<T extends AnyFunction> {
84
+ private throttles;
85
+ private readonly func;
86
+ private readonly wait;
87
+ private readonly options;
88
+ constructor(func: T, wait: number, options?: ThrottleOptions);
89
+ /**
90
+ * Execute function with throttling per key
91
+ */
92
+ execute(key: string, ...args: Parameters<T>): ReturnType<T> | undefined;
93
+ /**
94
+ * Cancel throttle for specific key
95
+ */
96
+ cancel(key: string): void;
97
+ /**
98
+ * Cancel all throttles
99
+ */
100
+ cancelAll(): void;
101
+ /**
102
+ * Get stats
103
+ */
104
+ getStats(): KeyedStats;
105
+ }
106
+ /**
107
+ * Per-key debounce manager for debouncing by specific keys
108
+ * Useful for debouncing per-endpoint or per-user
109
+ */
110
+ declare class KeyedDebounce<T extends AnyFunction> {
111
+ private debounces;
112
+ private readonly func;
113
+ private readonly wait;
114
+ private readonly options;
115
+ constructor(func: T, wait: number, options?: DebounceOptions);
116
+ /**
117
+ * Execute function with debouncing per key
118
+ */
119
+ execute(key: string, ...args: Parameters<T>): void;
120
+ /**
121
+ * Cancel debounce for specific key
122
+ */
123
+ cancel(key: string): void;
124
+ /**
125
+ * Flush debounce for specific key (execute immediately)
126
+ */
127
+ flush(key: string): void;
128
+ /**
129
+ * Cancel all debounces
130
+ */
131
+ cancelAll(): void;
132
+ /**
133
+ * Get stats
134
+ */
135
+ getStats(): KeyedStats;
136
+ }
137
+ /**
138
+ * Rate limiter using token bucket algorithm
139
+ *
140
+ * @example
141
+ * ```ts
142
+ * const limiter = new TokenBucketLimiter(5, 60000); // 5 requests per minute
143
+ *
144
+ * async function fetchData() {
145
+ * await limiter.acquire();
146
+ * return fetch('/api/data');
147
+ * }
148
+ * ```
149
+ */
150
+ declare class TokenBucketLimiter {
151
+ private tokens;
152
+ private lastRefill;
153
+ private readonly capacity;
154
+ private readonly refillRate;
155
+ private readonly refillInterval;
156
+ private queue;
157
+ /**
158
+ * @param capacity Maximum number of tokens (requests)
159
+ * @param windowMs Time window in milliseconds
160
+ */
161
+ constructor(capacity: number, windowMs: number);
162
+ /**
163
+ * Refill tokens based on elapsed time
164
+ */
165
+ private refill;
166
+ /**
167
+ * Acquire a token (wait if none available)
168
+ */
169
+ acquire(): Promise<void>;
170
+ /**
171
+ * Try to acquire a token without waiting
172
+ */
173
+ tryAcquire(): boolean;
174
+ /**
175
+ * Schedule next token release
176
+ */
177
+ private scheduleNextRelease;
178
+ /**
179
+ * Get rate limiter stats
180
+ */
181
+ getStats(): RateLimiterStats;
182
+ /**
183
+ * Reset the rate limiter
184
+ */
185
+ reset(): void;
186
+ }
187
+ /**
188
+ * Leaky bucket algorithm - requests "leak" out at a constant rate
189
+ * Provides smoother rate limiting than token bucket
190
+ */
191
+ declare class LeakyBucketLimiter {
192
+ private queue;
193
+ private readonly capacity;
194
+ private readonly leakRate;
195
+ private processing;
196
+ /**
197
+ * @param capacity Maximum queue size
198
+ * @param requestsPerSecond How many requests to process per second
199
+ */
200
+ constructor(capacity: number, requestsPerSecond: number);
201
+ /**
202
+ * Add a request to the bucket
203
+ */
204
+ acquire(): Promise<void>;
205
+ /**
206
+ * Try to acquire without blocking
207
+ */
208
+ tryAcquire(): boolean;
209
+ /**
210
+ * Process the queue at the leak rate
211
+ */
212
+ private processQueue;
213
+ /**
214
+ * Get stats
215
+ */
216
+ getStats(): RateLimiterStats;
217
+ /**
218
+ * Clear the queue
219
+ */
220
+ reset(): void;
221
+ }
222
+ /**
223
+ * Sliding window counter for accurate rate limiting
224
+ */
225
+ declare class SlidingWindowCounter {
226
+ private counters;
227
+ private readonly windowMs;
228
+ private readonly maxRequests;
229
+ constructor(windowMs: number, maxRequests: number);
230
+ /**
231
+ * Check and increment counter for a key
232
+ */
233
+ check(key: string): {
234
+ allowed: boolean;
235
+ remaining: number;
236
+ resetAt: number;
237
+ };
238
+ /**
239
+ * Reset counter for a key
240
+ */
241
+ reset(key: string): void;
242
+ /**
243
+ * Cleanup old entries
244
+ */
245
+ private cleanup;
246
+ /**
247
+ * Get stats
248
+ */
249
+ getStats(): KeyedStats;
250
+ }
251
+ //#endregion
252
+ export { DebounceOptions, DebounceOptionsSchema, KeyedDebounce, KeyedStats, KeyedStatsSchema, KeyedThrottle, LeakyBucketLimiter, RateLimiterStats, RateLimiterStatsSchema, SlidingWindowCounter, ThrottleOptions, ThrottleOptionsSchema, TokenBucketLimiter, debounce, throttle };
253
+ //# sourceMappingURL=throttle.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"throttle.d.ts","names":[],"sources":["../src/throttle.ts"],"mappings":";;;;;AA0CA;cAPM,qBAAA,EAAqB,MAAA,CAAA,MAAA;4GAOoC;EAAA;;KAAnD,eAAA,UAAyB,qBAAA,CAAsB,IAAA;;;;cAKrD,qBAAA,EAAqB,MAAA,CAAA,MAAA;EAAA,0GAAA;EAAA;;KAOf,eAAA,UAAyB,qBAAA,CAAsB,IAAA;;;;cAKrD,sBAAA,EAAsB,MAAA,CAAA,MAAA;EAAA;;;;KAMhB,gBAAA,UAA0B,sBAAA,CAAuB,IAAA;;;AAX7D;cAgBM,gBAAA,EAAgB,MAAA,CAAA,MAAA;EAAA;;;KAKV,UAAA,UAAoB,gBAAA,CAAiB,IAAA;;KAO5C,WAAA,OAAkB,IAAA;;;;;;;;;;;;;;;;;;iBAuBP,QAAA,WAAmB,WAAA,CAAA,CACjC,IAAA,EAAM,CAAA,EACN,IAAA,UACA,OAAA,GAAS,eAAA,QACH,IAAA,EAAM,UAAA,CAAW,CAAA,MAAO,UAAA,CAAW,CAAA;EAAoB,MAAA;AAAA;;;;;AA5CG;;;;;;;;;;;;iBA+GlD,QAAA,WAAmB,WAAA,CAAA,CACjC,IAAA,EAAM,CAAA,EACN,IAAA,UACA,OAAA,GAAS,eAAA,QACH,IAAA,EAAM,UAAA,CAAW,CAAA;EAAiB,MAAA;EAAoB,KAAA;AAAA;;;;;cA2EjD,aAAA,WAAwB,WAAA;EAAA,QAC3B,SAAA;EAAA,iBAIS,IAAA;EAAA,iBACA,IAAA;EAAA,iBACA,OAAA;cAEL,IAAA,EAAM,CAAA,EAAG,IAAA,UAAc,OAAA,GAAS,eAAA;EAtLzC;;;EA+LI,OAAA,CAAQ,GAAA,aAAgB,IAAA,EAAM,UAAA,CAAW,CAAA,IAAK,UAAA,CAAW,CAAA;EA/LvC;AAuB3B;;EAsLS,MAAA,CAAO,GAAA;EAtLmB;;;EAiM1B,SAAA,CAAA;EA7LK;;;EAuML,QAAA,CAAA,GAAY,UAAA;AAAA;;;;;cAgBR,aAAA,WAAwB,WAAA;EAAA,QAC3B,SAAA;EAAA,iBAIS,IAAA;EAAA,iBACA,IAAA;EAAA,iBACA,OAAA;cAEL,IAAA,EAAM,CAAA,EAAG,IAAA,UAAc,OAAA,GAAS,eAAA;EAhOd;;;EAyOvB,OAAA,CAAQ,GAAA,aAAgB,IAAA,EAAM,UAAA,CAAW,CAAA;EAzOmB;AAmErE;;EAoLS,MAAA,CAAO,GAAA;EApLmB;;;EA+L1B,KAAA,CAAM,GAAA;EA3LD;;;EAqML,SAAA,CAAA;EAzM0B;;;EAmN1B,QAAA,CAAA,GAAY,UAAA;AAAA;;;;;;;;;AApIrB;;;;;cA6Ja,kBAAA;EAAA,QACH,MAAA;EAAA,QACA,UAAA;EAAA,iBACS,QAAA;EAAA,iBACA,UAAA;EAAA,iBACA,cAAA;EAAA,QACT,KAAA;EA9GqB;;;;cAoHjB,QAAA,UAAkB,QAAA;EAnKb;;;EAAA,QA8KT,MAAA;EA3KI;;;EAyLC,OAAA,CAAA,GAAW,OAAA;EAhLjB;;;EAkMA,UAAA,CAAA;EAlMyC;;;EAAA,QAgNxC,mBAAA;EAlMM;;;EAqNP,QAAA,CAAA,GAAY,gBAAA;EAhMU;;AAgB/B;EA4LS,KAAA,CAAA;AAAA;;;;;cAeI,kBAAA;EAAA,QACH,KAAA;EAAA,iBACS,QAAA;EAAA,iBACA,QAAA;EAAA,QACT,UAAA;EA/M2B;;;;cAqNvB,QAAA,UAAkB,iBAAA;;;;EAQjB,OAAA,CAAA,GAAW,OAAA;EApNoB;;;EAkOrC,UAAA,CAAA;EAzNwB;;;EAAA,QAyOvB,YAAA;EA3NM;;;EAmPP,QAAA,CAAA,GAAY,gBAAA;EApNZ;;;EA+NA,KAAA,CAAA;AAAA;;;;cAaI,oBAAA;EAAA,QACH,QAAA;EAAA,iBACS,QAAA;EAAA,iBACA,WAAA;cAEL,QAAA,UAAkB,WAAA;EAlNtB;;;EA6ND,KAAA,CAAM,GAAA;IAAgB,OAAA;IAAkB,SAAA;IAAmB,OAAA;EAAA;EA9J1D;;;EAyMD,KAAA,CAAM,GAAA;EA1KD;;AAed;EAfc,QAiLJ,OAAA;;;;EAYD,QAAA,CAAA,GAAY,UAAA;AAAA"}
@@ -0,0 +1,521 @@
1
+ import { Schema } from "effect";
2
+ //#region src/throttle.ts
3
+ /**
4
+ * Copyright 2026 ResQ
5
+ *
6
+ * Licensed under the Apache License, Version 2.0 (the "License");
7
+ * you may not use this file except in compliance with the License.
8
+ * You may obtain a copy of the License at
9
+ *
10
+ * http://www.apache.org/licenses/LICENSE-2.0
11
+ *
12
+ * Unless required by applicable law or agreed to in writing, software
13
+ * distributed under the License is distributed on an "AS IS" BASIS,
14
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15
+ * See the License for the specific language governing permissions and
16
+ * limitations under the License.
17
+ */
18
+ /**
19
+ * @file Throttle and Debounce Utilities
20
+ * @module utils/throttle
21
+ * @author ResQ
22
+ * @description Provides functions to limit the rate at which functions can be called.
23
+ * Useful for preventing excessive API calls and managing request frequency.
24
+ * Includes throttle, debounce, rate limiter, and queue utilities.
25
+ * @compliance NIST 800-53 SC-5 (Denial of Service Protection)
26
+ */
27
+ /**
28
+ * Throttle Options Schema
29
+ */
30
+ const ThrottleOptionsSchema = Schema.Struct({
31
+ leading: Schema.optional(Schema.Boolean),
32
+ trailing: Schema.optional(Schema.Boolean)
33
+ });
34
+ /**
35
+ * Debounce Options Schema
36
+ */
37
+ const DebounceOptionsSchema = Schema.Struct({
38
+ leading: Schema.optional(Schema.Boolean),
39
+ maxWait: Schema.optional(Schema.Number)
40
+ });
41
+ /**
42
+ * Rate Limiter Stats Schema
43
+ */
44
+ const RateLimiterStatsSchema = Schema.Struct({
45
+ availableTokens: Schema.Number,
46
+ queueSize: Schema.Number,
47
+ capacity: Schema.Number
48
+ });
49
+ /**
50
+ * Keyed Stats Schema
51
+ */
52
+ const KeyedStatsSchema = Schema.Struct({
53
+ activeKeys: Schema.Number,
54
+ keys: Schema.Array(Schema.String)
55
+ });
56
+ /**
57
+ * Throttle a function to only execute once per specified interval
58
+ *
59
+ * @param func Function to throttle
60
+ * @param wait Wait time in milliseconds
61
+ * @param options Throttle options
62
+ * @returns Throttled function
63
+ *
64
+ * @example
65
+ * ```ts
66
+ * const fetchData = throttle(() => fetch('/api/data'), 1000);
67
+ * fetchData(); // Executes immediately
68
+ * fetchData(); // Ignored
69
+ * fetchData(); // Ignored
70
+ * // After 1000ms, next call will execute
71
+ * ```
72
+ */
73
+ function throttle(func, wait, options = {}) {
74
+ let timeout = null;
75
+ let previous = 0;
76
+ let result;
77
+ const { leading = true, trailing = true } = options;
78
+ const later = (context, args) => {
79
+ previous = leading === false ? 0 : Date.now();
80
+ timeout = null;
81
+ result = func.apply(context, args);
82
+ };
83
+ const throttled = function(...args) {
84
+ const now = Date.now();
85
+ if (!previous && leading === false) previous = now;
86
+ const remaining = wait - (now - previous);
87
+ if (remaining <= 0 || remaining > wait) {
88
+ if (timeout) {
89
+ clearTimeout(timeout);
90
+ timeout = null;
91
+ }
92
+ previous = now;
93
+ result = func.apply(this, args);
94
+ } else if (!timeout && trailing) timeout = setTimeout(() => later(this, args), remaining);
95
+ return result;
96
+ };
97
+ throttled.cancel = () => {
98
+ if (timeout) {
99
+ clearTimeout(timeout);
100
+ timeout = null;
101
+ }
102
+ previous = 0;
103
+ };
104
+ return throttled;
105
+ }
106
+ /**
107
+ * Debounce a function to only execute after it stops being called for specified time
108
+ *
109
+ * @param func Function to debounce
110
+ * @param wait Wait time in milliseconds
111
+ * @param options Debounce options
112
+ * @returns Debounced function
113
+ *
114
+ * @example
115
+ * ```ts
116
+ * const search = debounce((query) => fetchSearchResults(query), 300);
117
+ * search('a'); // Waiting...
118
+ * search('ab'); // Waiting...
119
+ * search('abc'); // Executes after 300ms of no calls
120
+ * ```
121
+ */
122
+ function debounce(func, wait, options = {}) {
123
+ let timeout = null;
124
+ let lastCallTime = 0;
125
+ let lastInvokeTime = 0;
126
+ const { leading = false, maxWait } = options;
127
+ const invokeFunc = (context, args) => {
128
+ lastInvokeTime = Date.now();
129
+ func.apply(context, args);
130
+ };
131
+ const shouldInvoke = (time) => {
132
+ const timeSinceLastCall = time - lastCallTime;
133
+ const timeSinceLastInvoke = time - lastInvokeTime;
134
+ return lastCallTime === 0 || timeSinceLastCall >= wait || timeSinceLastCall < 0 || maxWait !== void 0 && timeSinceLastInvoke >= maxWait;
135
+ };
136
+ const timerExpired = function(args) {
137
+ timeout = null;
138
+ invokeFunc(this, args);
139
+ };
140
+ const debounced = function(...args) {
141
+ const time = Date.now();
142
+ const isInvoking = shouldInvoke(time);
143
+ lastCallTime = time;
144
+ if (isInvoking && timeout === null && leading) {
145
+ invokeFunc(this, args);
146
+ timeout = setTimeout(() => timerExpired.call(this, args), wait);
147
+ return;
148
+ }
149
+ if (timeout) clearTimeout(timeout);
150
+ timeout = setTimeout(() => timerExpired.call(this, args), wait);
151
+ };
152
+ debounced.cancel = () => {
153
+ if (timeout) {
154
+ clearTimeout(timeout);
155
+ timeout = null;
156
+ }
157
+ lastCallTime = 0;
158
+ lastInvokeTime = 0;
159
+ };
160
+ debounced.flush = () => {
161
+ if (timeout) {
162
+ clearTimeout(timeout);
163
+ timeout = null;
164
+ }
165
+ };
166
+ return debounced;
167
+ }
168
+ /**
169
+ * Per-key throttle manager for throttling by specific keys
170
+ * Useful for throttling per-endpoint or per-user
171
+ */
172
+ var KeyedThrottle = class {
173
+ throttles = /* @__PURE__ */ new Map();
174
+ func;
175
+ wait;
176
+ options;
177
+ constructor(func, wait, options = {}) {
178
+ this.func = func;
179
+ this.wait = wait;
180
+ this.options = options;
181
+ }
182
+ /**
183
+ * Execute function with throttling per key
184
+ */
185
+ execute(key, ...args) {
186
+ let throttled = this.throttles.get(key);
187
+ if (!throttled) {
188
+ throttled = throttle(this.func, this.wait, this.options);
189
+ this.throttles.set(key, throttled);
190
+ }
191
+ return throttled(...args);
192
+ }
193
+ /**
194
+ * Cancel throttle for specific key
195
+ */
196
+ cancel(key) {
197
+ const throttled = this.throttles.get(key);
198
+ if (throttled) throttled.cancel();
199
+ this.throttles.delete(key);
200
+ }
201
+ /**
202
+ * Cancel all throttles
203
+ */
204
+ cancelAll() {
205
+ for (const throttled of this.throttles.values()) throttled.cancel();
206
+ this.throttles.clear();
207
+ }
208
+ /**
209
+ * Get stats
210
+ */
211
+ getStats() {
212
+ return {
213
+ activeKeys: this.throttles.size,
214
+ keys: Array.from(this.throttles.keys())
215
+ };
216
+ }
217
+ };
218
+ /**
219
+ * Per-key debounce manager for debouncing by specific keys
220
+ * Useful for debouncing per-endpoint or per-user
221
+ */
222
+ var KeyedDebounce = class {
223
+ debounces = /* @__PURE__ */ new Map();
224
+ func;
225
+ wait;
226
+ options;
227
+ constructor(func, wait, options = {}) {
228
+ this.func = func;
229
+ this.wait = wait;
230
+ this.options = options;
231
+ }
232
+ /**
233
+ * Execute function with debouncing per key
234
+ */
235
+ execute(key, ...args) {
236
+ let debounced = this.debounces.get(key);
237
+ if (!debounced) {
238
+ debounced = debounce(this.func, this.wait, this.options);
239
+ this.debounces.set(key, debounced);
240
+ }
241
+ debounced(...args);
242
+ }
243
+ /**
244
+ * Cancel debounce for specific key
245
+ */
246
+ cancel(key) {
247
+ const debounced = this.debounces.get(key);
248
+ if (debounced) debounced.cancel();
249
+ this.debounces.delete(key);
250
+ }
251
+ /**
252
+ * Flush debounce for specific key (execute immediately)
253
+ */
254
+ flush(key) {
255
+ const debounced = this.debounces.get(key);
256
+ if (debounced) debounced.flush();
257
+ }
258
+ /**
259
+ * Cancel all debounces
260
+ */
261
+ cancelAll() {
262
+ for (const debounced of this.debounces.values()) debounced.cancel();
263
+ this.debounces.clear();
264
+ }
265
+ /**
266
+ * Get stats
267
+ */
268
+ getStats() {
269
+ return {
270
+ activeKeys: this.debounces.size,
271
+ keys: Array.from(this.debounces.keys())
272
+ };
273
+ }
274
+ };
275
+ /**
276
+ * Rate limiter using token bucket algorithm
277
+ *
278
+ * @example
279
+ * ```ts
280
+ * const limiter = new TokenBucketLimiter(5, 60000); // 5 requests per minute
281
+ *
282
+ * async function fetchData() {
283
+ * await limiter.acquire();
284
+ * return fetch('/api/data');
285
+ * }
286
+ * ```
287
+ */
288
+ var TokenBucketLimiter = class {
289
+ tokens;
290
+ lastRefill;
291
+ capacity;
292
+ refillRate;
293
+ refillInterval;
294
+ queue = [];
295
+ /**
296
+ * @param capacity Maximum number of tokens (requests)
297
+ * @param windowMs Time window in milliseconds
298
+ */
299
+ constructor(capacity, windowMs) {
300
+ this.capacity = capacity;
301
+ this.tokens = capacity;
302
+ this.lastRefill = Date.now();
303
+ this.refillRate = capacity;
304
+ this.refillInterval = windowMs;
305
+ }
306
+ /**
307
+ * Refill tokens based on elapsed time
308
+ */
309
+ refill() {
310
+ const now = Date.now();
311
+ const tokensToAdd = (now - this.lastRefill) / this.refillInterval * this.refillRate;
312
+ if (tokensToAdd > 0) {
313
+ this.tokens = Math.min(this.capacity, this.tokens + tokensToAdd);
314
+ this.lastRefill = now;
315
+ }
316
+ }
317
+ /**
318
+ * Acquire a token (wait if none available)
319
+ */
320
+ async acquire() {
321
+ this.refill();
322
+ if (this.tokens >= 1) {
323
+ this.tokens -= 1;
324
+ return Promise.resolve();
325
+ }
326
+ return new Promise((resolve) => {
327
+ this.queue.push(resolve);
328
+ this.scheduleNextRelease();
329
+ });
330
+ }
331
+ /**
332
+ * Try to acquire a token without waiting
333
+ */
334
+ tryAcquire() {
335
+ this.refill();
336
+ if (this.tokens >= 1) {
337
+ this.tokens -= 1;
338
+ return true;
339
+ }
340
+ return false;
341
+ }
342
+ /**
343
+ * Schedule next token release
344
+ */
345
+ scheduleNextRelease() {
346
+ if (this.queue.length === 0) return;
347
+ const waitTime = this.refillInterval / this.refillRate;
348
+ setTimeout(() => {
349
+ this.refill();
350
+ const resolve = this.queue.shift();
351
+ if (resolve && this.tokens >= 1) {
352
+ this.tokens -= 1;
353
+ resolve();
354
+ }
355
+ this.scheduleNextRelease();
356
+ }, waitTime);
357
+ }
358
+ /**
359
+ * Get rate limiter stats
360
+ */
361
+ getStats() {
362
+ this.refill();
363
+ return {
364
+ availableTokens: Math.floor(this.tokens),
365
+ queueSize: this.queue.length,
366
+ capacity: this.capacity
367
+ };
368
+ }
369
+ /**
370
+ * Reset the rate limiter
371
+ */
372
+ reset() {
373
+ this.tokens = this.capacity;
374
+ this.lastRefill = Date.now();
375
+ this.queue = [];
376
+ }
377
+ };
378
+ /**
379
+ * Leaky bucket algorithm - requests "leak" out at a constant rate
380
+ * Provides smoother rate limiting than token bucket
381
+ */
382
+ var LeakyBucketLimiter = class {
383
+ queue = [];
384
+ capacity;
385
+ leakRate;
386
+ processing = false;
387
+ /**
388
+ * @param capacity Maximum queue size
389
+ * @param requestsPerSecond How many requests to process per second
390
+ */
391
+ constructor(capacity, requestsPerSecond) {
392
+ this.capacity = capacity;
393
+ this.leakRate = 1e3 / requestsPerSecond;
394
+ }
395
+ /**
396
+ * Add a request to the bucket
397
+ */
398
+ async acquire() {
399
+ if (this.queue.length >= this.capacity) throw new Error("Rate limit exceeded: queue full");
400
+ return new Promise((resolve) => {
401
+ this.queue.push({
402
+ resolve,
403
+ timestamp: Date.now()
404
+ });
405
+ this.processQueue();
406
+ });
407
+ }
408
+ /**
409
+ * Try to acquire without blocking
410
+ */
411
+ tryAcquire() {
412
+ if (this.queue.length >= this.capacity) return false;
413
+ if (!this.processing && this.queue.length === 0) return true;
414
+ return false;
415
+ }
416
+ /**
417
+ * Process the queue at the leak rate
418
+ */
419
+ processQueue() {
420
+ if (this.processing || this.queue.length === 0) return;
421
+ this.processing = true;
422
+ const processNext = () => {
423
+ const item = this.queue.shift();
424
+ if (item) item.resolve();
425
+ if (this.queue.length > 0) setTimeout(processNext, this.leakRate);
426
+ else this.processing = false;
427
+ };
428
+ processNext();
429
+ }
430
+ /**
431
+ * Get stats
432
+ */
433
+ getStats() {
434
+ return {
435
+ availableTokens: this.capacity - this.queue.length,
436
+ queueSize: this.queue.length,
437
+ capacity: this.capacity
438
+ };
439
+ }
440
+ /**
441
+ * Clear the queue
442
+ */
443
+ reset() {
444
+ this.queue = [];
445
+ this.processing = false;
446
+ }
447
+ };
448
+ /**
449
+ * Sliding window counter for accurate rate limiting
450
+ */
451
+ var SlidingWindowCounter = class {
452
+ counters = /* @__PURE__ */ new Map();
453
+ windowMs;
454
+ maxRequests;
455
+ constructor(windowMs, maxRequests) {
456
+ this.windowMs = windowMs;
457
+ this.maxRequests = maxRequests;
458
+ setInterval(() => this.cleanup(), windowMs);
459
+ }
460
+ /**
461
+ * Check and increment counter for a key
462
+ */
463
+ check(key) {
464
+ const now = Date.now();
465
+ const windowStart = Math.floor(now / this.windowMs) * this.windowMs;
466
+ const previousWindowStart = windowStart - this.windowMs;
467
+ let counter = this.counters.get(key);
468
+ if (!counter) {
469
+ counter = {
470
+ current: 0,
471
+ previous: 0,
472
+ windowStart
473
+ };
474
+ this.counters.set(key, counter);
475
+ }
476
+ if (counter.windowStart < previousWindowStart) {
477
+ counter.previous = 0;
478
+ counter.current = 0;
479
+ counter.windowStart = windowStart;
480
+ } else if (counter.windowStart < windowStart) {
481
+ counter.previous = counter.current;
482
+ counter.current = 0;
483
+ counter.windowStart = windowStart;
484
+ }
485
+ const windowPosition = (now - windowStart) / this.windowMs;
486
+ const weightedCount = counter.previous * (1 - windowPosition) + counter.current;
487
+ const allowed = weightedCount < this.maxRequests;
488
+ if (allowed) counter.current++;
489
+ return {
490
+ allowed,
491
+ remaining: Math.max(0, Math.floor(this.maxRequests - weightedCount - (allowed ? 1 : 0))),
492
+ resetAt: windowStart + this.windowMs
493
+ };
494
+ }
495
+ /**
496
+ * Reset counter for a key
497
+ */
498
+ reset(key) {
499
+ this.counters.delete(key);
500
+ }
501
+ /**
502
+ * Cleanup old entries
503
+ */
504
+ cleanup() {
505
+ const cutoff = Date.now() - this.windowMs * 2;
506
+ for (const [key, counter] of this.counters.entries()) if (counter.windowStart < cutoff) this.counters.delete(key);
507
+ }
508
+ /**
509
+ * Get stats
510
+ */
511
+ getStats() {
512
+ return {
513
+ activeKeys: this.counters.size,
514
+ keys: Array.from(this.counters.keys())
515
+ };
516
+ }
517
+ };
518
+ //#endregion
519
+ export { DebounceOptionsSchema, KeyedDebounce, KeyedStatsSchema, KeyedThrottle, LeakyBucketLimiter, RateLimiterStatsSchema, SlidingWindowCounter, ThrottleOptionsSchema, TokenBucketLimiter, debounce, throttle };
520
+
521
+ //# sourceMappingURL=throttle.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"throttle.js","names":["S"],"sources":["../src/throttle.ts"],"sourcesContent":["/**\n * Copyright 2026 ResQ\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\n/**\n * @file Throttle and Debounce Utilities\n * @module utils/throttle\n * @author ResQ\n * @description Provides functions to limit the rate at which functions can be called.\n * Useful for preventing excessive API calls and managing request frequency.\n * Includes throttle, debounce, rate limiter, and queue utilities.\n * @compliance NIST 800-53 SC-5 (Denial of Service Protection)\n */\n\nimport { Schema as S } from 'effect';\n\n// ============================================\n// Effect Schema Definitions\n// ============================================\n\n/**\n * Throttle Options Schema\n */\nconst ThrottleOptionsSchema = S.Struct({\n /** Whether to call the function on the leading edge */\n leading: S.optional(S.Boolean),\n /** Whether to call the function on the trailing edge */\n trailing: S.optional(S.Boolean),\n});\n\nexport type ThrottleOptions = typeof ThrottleOptionsSchema.Type;\n\n/**\n * Debounce Options Schema\n */\nconst DebounceOptionsSchema = S.Struct({\n /** Whether to call the function on the leading edge */\n leading: S.optional(S.Boolean),\n /** Maximum time to wait before forcing execution */\n maxWait: S.optional(S.Number),\n});\n\nexport type DebounceOptions = typeof DebounceOptionsSchema.Type;\n\n/**\n * Rate Limiter Stats Schema\n */\nconst RateLimiterStatsSchema = S.Struct({\n availableTokens: S.Number,\n queueSize: S.Number,\n capacity: S.Number,\n});\n\nexport type RateLimiterStats = typeof RateLimiterStatsSchema.Type;\n\n/**\n * Keyed Stats Schema\n */\nconst KeyedStatsSchema = S.Struct({\n activeKeys: S.Number,\n keys: S.Array(S.String),\n});\n\nexport type KeyedStats = typeof KeyedStatsSchema.Type;\n\n// ============================================\n// Generic Function Type\n// ============================================\n\n/** Generic callable function type */\ntype AnyFunction = (...args: never[]) => unknown;\n\n// ============================================\n// Throttle Function\n// ============================================\n\n/**\n * Throttle a function to only execute once per specified interval\n *\n * @param func Function to throttle\n * @param wait Wait time in milliseconds\n * @param options Throttle options\n * @returns Throttled function\n *\n * @example\n * ```ts\n * const fetchData = throttle(() => fetch('/api/data'), 1000);\n * fetchData(); // Executes immediately\n * fetchData(); // Ignored\n * fetchData(); // Ignored\n * // After 1000ms, next call will execute\n * ```\n */\nexport function throttle<T extends AnyFunction>(\n func: T,\n wait: number,\n options: ThrottleOptions = {},\n): ((...args: Parameters<T>) => ReturnType<T> | undefined) & { cancel: () => void } {\n let timeout: ReturnType<typeof setTimeout> | null = null;\n let previous = 0;\n let result: ReturnType<T> | undefined;\n\n const { leading = true, trailing = true } = options;\n\n const later = (context: unknown, args: Parameters<T>) => {\n previous = leading === false ? 0 : Date.now();\n timeout = null;\n result = func.apply(context, args) as ReturnType<T>;\n };\n\n const throttled = function (this: unknown, ...args: Parameters<T>): ReturnType<T> | undefined {\n const now = Date.now();\n\n if (!previous && leading === false) {\n previous = now;\n }\n\n const remaining = wait - (now - previous);\n\n if (remaining <= 0 || remaining > wait) {\n if (timeout) {\n clearTimeout(timeout);\n timeout = null;\n }\n previous = now;\n result = func.apply(this, args) as ReturnType<T>;\n } else if (!timeout && trailing) {\n timeout = setTimeout(() => later(this, args), remaining);\n }\n\n return result;\n } as ((...args: Parameters<T>) => ReturnType<T> | undefined) & { cancel: () => void };\n\n throttled.cancel = () => {\n if (timeout) {\n clearTimeout(timeout);\n timeout = null;\n }\n previous = 0;\n };\n\n return throttled;\n}\n\n// ============================================\n// Debounce Function\n// ============================================\n\n/**\n * Debounce a function to only execute after it stops being called for specified time\n *\n * @param func Function to debounce\n * @param wait Wait time in milliseconds\n * @param options Debounce options\n * @returns Debounced function\n *\n * @example\n * ```ts\n * const search = debounce((query) => fetchSearchResults(query), 300);\n * search('a'); // Waiting...\n * search('ab'); // Waiting...\n * search('abc'); // Executes after 300ms of no calls\n * ```\n */\nexport function debounce<T extends AnyFunction>(\n func: T,\n wait: number,\n options: DebounceOptions = {},\n): ((...args: Parameters<T>) => void) & { cancel: () => void; flush: () => void } {\n let timeout: ReturnType<typeof setTimeout> | null = null;\n let lastCallTime = 0;\n let lastInvokeTime = 0;\n\n const { leading = false, maxWait } = options;\n\n const invokeFunc = (context: unknown, args: Parameters<T>) => {\n lastInvokeTime = Date.now();\n func.apply(context, args);\n };\n\n const shouldInvoke = (time: number) => {\n const timeSinceLastCall = time - lastCallTime;\n const timeSinceLastInvoke = time - lastInvokeTime;\n\n return (\n lastCallTime === 0 ||\n timeSinceLastCall >= wait ||\n timeSinceLastCall < 0 ||\n (maxWait !== undefined && timeSinceLastInvoke >= maxWait)\n );\n };\n\n const timerExpired = function (this: unknown, args: Parameters<T>) {\n timeout = null;\n invokeFunc(this, args);\n };\n\n const debounced = function (this: unknown, ...args: Parameters<T>): void {\n const time = Date.now();\n const isInvoking = shouldInvoke(time);\n\n lastCallTime = time;\n\n if (isInvoking && timeout === null && leading) {\n invokeFunc(this, args);\n timeout = setTimeout(() => timerExpired.call(this, args), wait);\n return;\n }\n\n if (timeout) {\n clearTimeout(timeout);\n }\n\n timeout = setTimeout(() => timerExpired.call(this, args), wait);\n } as ((...args: Parameters<T>) => void) & { cancel: () => void; flush: () => void };\n\n debounced.cancel = () => {\n if (timeout) {\n clearTimeout(timeout);\n timeout = null;\n }\n lastCallTime = 0;\n lastInvokeTime = 0;\n };\n\n debounced.flush = () => {\n if (timeout) {\n clearTimeout(timeout);\n timeout = null;\n }\n };\n\n return debounced;\n}\n\n// ============================================\n// Keyed Throttle Manager\n// ============================================\n\n/**\n * Per-key throttle manager for throttling by specific keys\n * Useful for throttling per-endpoint or per-user\n */\nexport class KeyedThrottle<T extends AnyFunction> {\n private throttles = new Map<\n string,\n ((...args: Parameters<T>) => ReturnType<T> | undefined) & { cancel: () => void }\n >();\n private readonly func: T;\n private readonly wait: number;\n private readonly options: ThrottleOptions;\n\n constructor(func: T, wait: number, options: ThrottleOptions = {}) {\n this.func = func;\n this.wait = wait;\n this.options = options;\n }\n\n /**\n * Execute function with throttling per key\n */\n public execute(key: string, ...args: Parameters<T>): ReturnType<T> | undefined {\n let throttled = this.throttles.get(key);\n\n if (!throttled) {\n throttled = throttle(this.func, this.wait, this.options);\n this.throttles.set(key, throttled);\n }\n\n return throttled(...args);\n }\n\n /**\n * Cancel throttle for specific key\n */\n public cancel(key: string): void {\n const throttled = this.throttles.get(key);\n if (throttled) {\n throttled.cancel();\n }\n this.throttles.delete(key);\n }\n\n /**\n * Cancel all throttles\n */\n public cancelAll(): void {\n for (const throttled of this.throttles.values()) {\n throttled.cancel();\n }\n this.throttles.clear();\n }\n\n /**\n * Get stats\n */\n public getStats(): KeyedStats {\n return {\n activeKeys: this.throttles.size,\n keys: Array.from(this.throttles.keys()),\n };\n }\n}\n\n// ============================================\n// Keyed Debounce Manager\n// ============================================\n\n/**\n * Per-key debounce manager for debouncing by specific keys\n * Useful for debouncing per-endpoint or per-user\n */\nexport class KeyedDebounce<T extends AnyFunction> {\n private debounces = new Map<\n string,\n ((...args: Parameters<T>) => void) & { cancel: () => void; flush: () => void }\n >();\n private readonly func: T;\n private readonly wait: number;\n private readonly options: DebounceOptions;\n\n constructor(func: T, wait: number, options: DebounceOptions = {}) {\n this.func = func;\n this.wait = wait;\n this.options = options;\n }\n\n /**\n * Execute function with debouncing per key\n */\n public execute(key: string, ...args: Parameters<T>): void {\n let debounced = this.debounces.get(key);\n\n if (!debounced) {\n debounced = debounce(this.func, this.wait, this.options);\n this.debounces.set(key, debounced);\n }\n\n debounced(...args);\n }\n\n /**\n * Cancel debounce for specific key\n */\n public cancel(key: string): void {\n const debounced = this.debounces.get(key);\n if (debounced) {\n debounced.cancel();\n }\n this.debounces.delete(key);\n }\n\n /**\n * Flush debounce for specific key (execute immediately)\n */\n public flush(key: string): void {\n const debounced = this.debounces.get(key);\n if (debounced) {\n debounced.flush();\n }\n }\n\n /**\n * Cancel all debounces\n */\n public cancelAll(): void {\n for (const debounced of this.debounces.values()) {\n debounced.cancel();\n }\n this.debounces.clear();\n }\n\n /**\n * Get stats\n */\n public getStats(): KeyedStats {\n return {\n activeKeys: this.debounces.size,\n keys: Array.from(this.debounces.keys()),\n };\n }\n}\n\n// ============================================\n// Token Bucket Rate Limiter\n// ============================================\n\n/**\n * Rate limiter using token bucket algorithm\n *\n * @example\n * ```ts\n * const limiter = new TokenBucketLimiter(5, 60000); // 5 requests per minute\n *\n * async function fetchData() {\n * await limiter.acquire();\n * return fetch('/api/data');\n * }\n * ```\n */\nexport class TokenBucketLimiter {\n private tokens: number;\n private lastRefill: number;\n private readonly capacity: number;\n private readonly refillRate: number;\n private readonly refillInterval: number;\n private queue: Array<() => void> = [];\n\n /**\n * @param capacity Maximum number of tokens (requests)\n * @param windowMs Time window in milliseconds\n */\n constructor(capacity: number, windowMs: number) {\n this.capacity = capacity;\n this.tokens = capacity;\n this.lastRefill = Date.now();\n this.refillRate = capacity;\n this.refillInterval = windowMs;\n }\n\n /**\n * Refill tokens based on elapsed time\n */\n private refill(): void {\n const now = Date.now();\n const elapsed = now - this.lastRefill;\n const tokensToAdd = (elapsed / this.refillInterval) * this.refillRate;\n\n if (tokensToAdd > 0) {\n this.tokens = Math.min(this.capacity, this.tokens + tokensToAdd);\n this.lastRefill = now;\n }\n }\n\n /**\n * Acquire a token (wait if none available)\n */\n public async acquire(): Promise<void> {\n this.refill();\n\n if (this.tokens >= 1) {\n this.tokens -= 1;\n return Promise.resolve();\n }\n\n // Wait for token to become available\n return new Promise<void>((resolve) => {\n this.queue.push(resolve);\n this.scheduleNextRelease();\n });\n }\n\n /**\n * Try to acquire a token without waiting\n */\n public tryAcquire(): boolean {\n this.refill();\n\n if (this.tokens >= 1) {\n this.tokens -= 1;\n return true;\n }\n\n return false;\n }\n\n /**\n * Schedule next token release\n */\n private scheduleNextRelease(): void {\n if (this.queue.length === 0) return;\n\n const waitTime = this.refillInterval / this.refillRate;\n\n setTimeout(() => {\n this.refill();\n const resolve = this.queue.shift();\n if (resolve && this.tokens >= 1) {\n this.tokens -= 1;\n resolve();\n }\n this.scheduleNextRelease();\n }, waitTime);\n }\n\n /**\n * Get rate limiter stats\n */\n public getStats(): RateLimiterStats {\n this.refill();\n return {\n availableTokens: Math.floor(this.tokens),\n queueSize: this.queue.length,\n capacity: this.capacity,\n };\n }\n\n /**\n * Reset the rate limiter\n */\n public reset(): void {\n this.tokens = this.capacity;\n this.lastRefill = Date.now();\n this.queue = [];\n }\n}\n\n// ============================================\n// Leaky Bucket Rate Limiter\n// ============================================\n\n/**\n * Leaky bucket algorithm - requests \"leak\" out at a constant rate\n * Provides smoother rate limiting than token bucket\n */\nexport class LeakyBucketLimiter {\n private queue: Array<{ resolve: () => void; timestamp: number }> = [];\n private readonly capacity: number;\n private readonly leakRate: number; // requests per second\n private processing = false;\n\n /**\n * @param capacity Maximum queue size\n * @param requestsPerSecond How many requests to process per second\n */\n constructor(capacity: number, requestsPerSecond: number) {\n this.capacity = capacity;\n this.leakRate = 1000 / requestsPerSecond; // ms between requests\n }\n\n /**\n * Add a request to the bucket\n */\n public async acquire(): Promise<void> {\n if (this.queue.length >= this.capacity) {\n throw new Error('Rate limit exceeded: queue full');\n }\n\n return new Promise<void>((resolve) => {\n this.queue.push({ resolve, timestamp: Date.now() });\n this.processQueue();\n });\n }\n\n /**\n * Try to acquire without blocking\n */\n public tryAcquire(): boolean {\n if (this.queue.length >= this.capacity) {\n return false;\n }\n\n // Check if we can process immediately\n if (!this.processing && this.queue.length === 0) {\n return true;\n }\n\n return false;\n }\n\n /**\n * Process the queue at the leak rate\n */\n private processQueue(): void {\n if (this.processing || this.queue.length === 0) return;\n\n this.processing = true;\n\n const processNext = () => {\n const item = this.queue.shift();\n if (item) {\n item.resolve();\n }\n\n if (this.queue.length > 0) {\n setTimeout(processNext, this.leakRate);\n } else {\n this.processing = false;\n }\n };\n\n processNext();\n }\n\n /**\n * Get stats\n */\n public getStats(): RateLimiterStats {\n return {\n availableTokens: this.capacity - this.queue.length,\n queueSize: this.queue.length,\n capacity: this.capacity,\n };\n }\n\n /**\n * Clear the queue\n */\n public reset(): void {\n this.queue = [];\n this.processing = false;\n }\n}\n\n// ============================================\n// Sliding Window Counter\n// ============================================\n\n/**\n * Sliding window counter for accurate rate limiting\n */\nexport class SlidingWindowCounter {\n private counters = new Map<string, { current: number; previous: number; windowStart: number }>();\n private readonly windowMs: number;\n private readonly maxRequests: number;\n\n constructor(windowMs: number, maxRequests: number) {\n this.windowMs = windowMs;\n this.maxRequests = maxRequests;\n\n // Cleanup old entries periodically\n setInterval(() => this.cleanup(), windowMs);\n }\n\n /**\n * Check and increment counter for a key\n */\n public check(key: string): { allowed: boolean; remaining: number; resetAt: number } {\n const now = Date.now();\n const windowStart = Math.floor(now / this.windowMs) * this.windowMs;\n const previousWindowStart = windowStart - this.windowMs;\n\n let counter = this.counters.get(key);\n\n if (!counter) {\n counter = { current: 0, previous: 0, windowStart };\n this.counters.set(key, counter);\n }\n\n // Roll over to new window if needed\n if (counter.windowStart < previousWindowStart) {\n counter.previous = 0;\n counter.current = 0;\n counter.windowStart = windowStart;\n } else if (counter.windowStart < windowStart) {\n counter.previous = counter.current;\n counter.current = 0;\n counter.windowStart = windowStart;\n }\n\n // Calculate weighted count\n const windowPosition = (now - windowStart) / this.windowMs;\n const weightedCount = counter.previous * (1 - windowPosition) + counter.current;\n\n const allowed = weightedCount < this.maxRequests;\n\n if (allowed) {\n counter.current++;\n }\n\n return {\n allowed,\n remaining: Math.max(0, Math.floor(this.maxRequests - weightedCount - (allowed ? 1 : 0))),\n resetAt: windowStart + this.windowMs,\n };\n }\n\n /**\n * Reset counter for a key\n */\n public reset(key: string): void {\n this.counters.delete(key);\n }\n\n /**\n * Cleanup old entries\n */\n private cleanup(): void {\n const cutoff = Date.now() - this.windowMs * 2;\n for (const [key, counter] of this.counters.entries()) {\n if (counter.windowStart < cutoff) {\n this.counters.delete(key);\n }\n }\n }\n\n /**\n * Get stats\n */\n public getStats(): KeyedStats {\n return {\n activeKeys: this.counters.size,\n keys: Array.from(this.counters.keys()),\n };\n }\n}\n\n// ============================================\n// Exports\n// ============================================\n\nexport { DebounceOptionsSchema, KeyedStatsSchema, RateLimiterStatsSchema, ThrottleOptionsSchema };\n"],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAmCA,MAAM,wBAAwBA,OAAE,OAAO;CAErC,SAASA,OAAE,SAASA,OAAE,QAAQ;CAE9B,UAAUA,OAAE,SAASA,OAAE,QAAQ;CAChC,CAAC;;;;AAOF,MAAM,wBAAwBA,OAAE,OAAO;CAErC,SAASA,OAAE,SAASA,OAAE,QAAQ;CAE9B,SAASA,OAAE,SAASA,OAAE,OAAO;CAC9B,CAAC;;;;AAOF,MAAM,yBAAyBA,OAAE,OAAO;CACtC,iBAAiBA,OAAE;CACnB,WAAWA,OAAE;CACb,UAAUA,OAAE;CACb,CAAC;;;;AAOF,MAAM,mBAAmBA,OAAE,OAAO;CAChC,YAAYA,OAAE;CACd,MAAMA,OAAE,MAAMA,OAAE,OAAO;CACxB,CAAC;;;;;;;;;;;;;;;;;;AAgCF,SAAgB,SACd,MACA,MACA,UAA2B,EAAE,EACqD;CAClF,IAAI,UAAgD;CACpD,IAAI,WAAW;CACf,IAAI;CAEJ,MAAM,EAAE,UAAU,MAAM,WAAW,SAAS;CAE5C,MAAM,SAAS,SAAkB,SAAwB;AACvD,aAAW,YAAY,QAAQ,IAAI,KAAK,KAAK;AAC7C,YAAU;AACV,WAAS,KAAK,MAAM,SAAS,KAAK;;CAGpC,MAAM,YAAY,SAAyB,GAAG,MAAgD;EAC5F,MAAM,MAAM,KAAK,KAAK;AAEtB,MAAI,CAAC,YAAY,YAAY,MAC3B,YAAW;EAGb,MAAM,YAAY,QAAQ,MAAM;AAEhC,MAAI,aAAa,KAAK,YAAY,MAAM;AACtC,OAAI,SAAS;AACX,iBAAa,QAAQ;AACrB,cAAU;;AAEZ,cAAW;AACX,YAAS,KAAK,MAAM,MAAM,KAAK;aACtB,CAAC,WAAW,SACrB,WAAU,iBAAiB,MAAM,MAAM,KAAK,EAAE,UAAU;AAG1D,SAAO;;AAGT,WAAU,eAAe;AACvB,MAAI,SAAS;AACX,gBAAa,QAAQ;AACrB,aAAU;;AAEZ,aAAW;;AAGb,QAAO;;;;;;;;;;;;;;;;;;AAuBT,SAAgB,SACd,MACA,MACA,UAA2B,EAAE,EACmD;CAChF,IAAI,UAAgD;CACpD,IAAI,eAAe;CACnB,IAAI,iBAAiB;CAErB,MAAM,EAAE,UAAU,OAAO,YAAY;CAErC,MAAM,cAAc,SAAkB,SAAwB;AAC5D,mBAAiB,KAAK,KAAK;AAC3B,OAAK,MAAM,SAAS,KAAK;;CAG3B,MAAM,gBAAgB,SAAiB;EACrC,MAAM,oBAAoB,OAAO;EACjC,MAAM,sBAAsB,OAAO;AAEnC,SACE,iBAAiB,KACjB,qBAAqB,QACrB,oBAAoB,KACnB,YAAY,KAAA,KAAa,uBAAuB;;CAIrD,MAAM,eAAe,SAAyB,MAAqB;AACjE,YAAU;AACV,aAAW,MAAM,KAAK;;CAGxB,MAAM,YAAY,SAAyB,GAAG,MAA2B;EACvE,MAAM,OAAO,KAAK,KAAK;EACvB,MAAM,aAAa,aAAa,KAAK;AAErC,iBAAe;AAEf,MAAI,cAAc,YAAY,QAAQ,SAAS;AAC7C,cAAW,MAAM,KAAK;AACtB,aAAU,iBAAiB,aAAa,KAAK,MAAM,KAAK,EAAE,KAAK;AAC/D;;AAGF,MAAI,QACF,cAAa,QAAQ;AAGvB,YAAU,iBAAiB,aAAa,KAAK,MAAM,KAAK,EAAE,KAAK;;AAGjE,WAAU,eAAe;AACvB,MAAI,SAAS;AACX,gBAAa,QAAQ;AACrB,aAAU;;AAEZ,iBAAe;AACf,mBAAiB;;AAGnB,WAAU,cAAc;AACtB,MAAI,SAAS;AACX,gBAAa,QAAQ;AACrB,aAAU;;;AAId,QAAO;;;;;;AAWT,IAAa,gBAAb,MAAkD;CAChD,4BAAoB,IAAI,KAGrB;CACH;CACA;CACA;CAEA,YAAY,MAAS,MAAc,UAA2B,EAAE,EAAE;AAChE,OAAK,OAAO;AACZ,OAAK,OAAO;AACZ,OAAK,UAAU;;;;;CAMjB,QAAe,KAAa,GAAG,MAAgD;EAC7E,IAAI,YAAY,KAAK,UAAU,IAAI,IAAI;AAEvC,MAAI,CAAC,WAAW;AACd,eAAY,SAAS,KAAK,MAAM,KAAK,MAAM,KAAK,QAAQ;AACxD,QAAK,UAAU,IAAI,KAAK,UAAU;;AAGpC,SAAO,UAAU,GAAG,KAAK;;;;;CAM3B,OAAc,KAAmB;EAC/B,MAAM,YAAY,KAAK,UAAU,IAAI,IAAI;AACzC,MAAI,UACF,WAAU,QAAQ;AAEpB,OAAK,UAAU,OAAO,IAAI;;;;;CAM5B,YAAyB;AACvB,OAAK,MAAM,aAAa,KAAK,UAAU,QAAQ,CAC7C,WAAU,QAAQ;AAEpB,OAAK,UAAU,OAAO;;;;;CAMxB,WAA8B;AAC5B,SAAO;GACL,YAAY,KAAK,UAAU;GAC3B,MAAM,MAAM,KAAK,KAAK,UAAU,MAAM,CAAC;GACxC;;;;;;;AAYL,IAAa,gBAAb,MAAkD;CAChD,4BAAoB,IAAI,KAGrB;CACH;CACA;CACA;CAEA,YAAY,MAAS,MAAc,UAA2B,EAAE,EAAE;AAChE,OAAK,OAAO;AACZ,OAAK,OAAO;AACZ,OAAK,UAAU;;;;;CAMjB,QAAe,KAAa,GAAG,MAA2B;EACxD,IAAI,YAAY,KAAK,UAAU,IAAI,IAAI;AAEvC,MAAI,CAAC,WAAW;AACd,eAAY,SAAS,KAAK,MAAM,KAAK,MAAM,KAAK,QAAQ;AACxD,QAAK,UAAU,IAAI,KAAK,UAAU;;AAGpC,YAAU,GAAG,KAAK;;;;;CAMpB,OAAc,KAAmB;EAC/B,MAAM,YAAY,KAAK,UAAU,IAAI,IAAI;AACzC,MAAI,UACF,WAAU,QAAQ;AAEpB,OAAK,UAAU,OAAO,IAAI;;;;;CAM5B,MAAa,KAAmB;EAC9B,MAAM,YAAY,KAAK,UAAU,IAAI,IAAI;AACzC,MAAI,UACF,WAAU,OAAO;;;;;CAOrB,YAAyB;AACvB,OAAK,MAAM,aAAa,KAAK,UAAU,QAAQ,CAC7C,WAAU,QAAQ;AAEpB,OAAK,UAAU,OAAO;;;;;CAMxB,WAA8B;AAC5B,SAAO;GACL,YAAY,KAAK,UAAU;GAC3B,MAAM,MAAM,KAAK,KAAK,UAAU,MAAM,CAAC;GACxC;;;;;;;;;;;;;;;;AAqBL,IAAa,qBAAb,MAAgC;CAC9B;CACA;CACA;CACA;CACA;CACA,QAAmC,EAAE;;;;;CAMrC,YAAY,UAAkB,UAAkB;AAC9C,OAAK,WAAW;AAChB,OAAK,SAAS;AACd,OAAK,aAAa,KAAK,KAAK;AAC5B,OAAK,aAAa;AAClB,OAAK,iBAAiB;;;;;CAMxB,SAAuB;EACrB,MAAM,MAAM,KAAK,KAAK;EAEtB,MAAM,eADU,MAAM,KAAK,cACI,KAAK,iBAAkB,KAAK;AAE3D,MAAI,cAAc,GAAG;AACnB,QAAK,SAAS,KAAK,IAAI,KAAK,UAAU,KAAK,SAAS,YAAY;AAChE,QAAK,aAAa;;;;;;CAOtB,MAAa,UAAyB;AACpC,OAAK,QAAQ;AAEb,MAAI,KAAK,UAAU,GAAG;AACpB,QAAK,UAAU;AACf,UAAO,QAAQ,SAAS;;AAI1B,SAAO,IAAI,SAAe,YAAY;AACpC,QAAK,MAAM,KAAK,QAAQ;AACxB,QAAK,qBAAqB;IAC1B;;;;;CAMJ,aAA6B;AAC3B,OAAK,QAAQ;AAEb,MAAI,KAAK,UAAU,GAAG;AACpB,QAAK,UAAU;AACf,UAAO;;AAGT,SAAO;;;;;CAMT,sBAAoC;AAClC,MAAI,KAAK,MAAM,WAAW,EAAG;EAE7B,MAAM,WAAW,KAAK,iBAAiB,KAAK;AAE5C,mBAAiB;AACf,QAAK,QAAQ;GACb,MAAM,UAAU,KAAK,MAAM,OAAO;AAClC,OAAI,WAAW,KAAK,UAAU,GAAG;AAC/B,SAAK,UAAU;AACf,aAAS;;AAEX,QAAK,qBAAqB;KACzB,SAAS;;;;;CAMd,WAAoC;AAClC,OAAK,QAAQ;AACb,SAAO;GACL,iBAAiB,KAAK,MAAM,KAAK,OAAO;GACxC,WAAW,KAAK,MAAM;GACtB,UAAU,KAAK;GAChB;;;;;CAMH,QAAqB;AACnB,OAAK,SAAS,KAAK;AACnB,OAAK,aAAa,KAAK,KAAK;AAC5B,OAAK,QAAQ,EAAE;;;;;;;AAYnB,IAAa,qBAAb,MAAgC;CAC9B,QAAmE,EAAE;CACrE;CACA;CACA,aAAqB;;;;;CAMrB,YAAY,UAAkB,mBAA2B;AACvD,OAAK,WAAW;AAChB,OAAK,WAAW,MAAO;;;;;CAMzB,MAAa,UAAyB;AACpC,MAAI,KAAK,MAAM,UAAU,KAAK,SAC5B,OAAM,IAAI,MAAM,kCAAkC;AAGpD,SAAO,IAAI,SAAe,YAAY;AACpC,QAAK,MAAM,KAAK;IAAE;IAAS,WAAW,KAAK,KAAK;IAAE,CAAC;AACnD,QAAK,cAAc;IACnB;;;;;CAMJ,aAA6B;AAC3B,MAAI,KAAK,MAAM,UAAU,KAAK,SAC5B,QAAO;AAIT,MAAI,CAAC,KAAK,cAAc,KAAK,MAAM,WAAW,EAC5C,QAAO;AAGT,SAAO;;;;;CAMT,eAA6B;AAC3B,MAAI,KAAK,cAAc,KAAK,MAAM,WAAW,EAAG;AAEhD,OAAK,aAAa;EAElB,MAAM,oBAAoB;GACxB,MAAM,OAAO,KAAK,MAAM,OAAO;AAC/B,OAAI,KACF,MAAK,SAAS;AAGhB,OAAI,KAAK,MAAM,SAAS,EACtB,YAAW,aAAa,KAAK,SAAS;OAEtC,MAAK,aAAa;;AAItB,eAAa;;;;;CAMf,WAAoC;AAClC,SAAO;GACL,iBAAiB,KAAK,WAAW,KAAK,MAAM;GAC5C,WAAW,KAAK,MAAM;GACtB,UAAU,KAAK;GAChB;;;;;CAMH,QAAqB;AACnB,OAAK,QAAQ,EAAE;AACf,OAAK,aAAa;;;;;;AAWtB,IAAa,uBAAb,MAAkC;CAChC,2BAAmB,IAAI,KAAyE;CAChG;CACA;CAEA,YAAY,UAAkB,aAAqB;AACjD,OAAK,WAAW;AAChB,OAAK,cAAc;AAGnB,oBAAkB,KAAK,SAAS,EAAE,SAAS;;;;;CAM7C,MAAa,KAAuE;EAClF,MAAM,MAAM,KAAK,KAAK;EACtB,MAAM,cAAc,KAAK,MAAM,MAAM,KAAK,SAAS,GAAG,KAAK;EAC3D,MAAM,sBAAsB,cAAc,KAAK;EAE/C,IAAI,UAAU,KAAK,SAAS,IAAI,IAAI;AAEpC,MAAI,CAAC,SAAS;AACZ,aAAU;IAAE,SAAS;IAAG,UAAU;IAAG;IAAa;AAClD,QAAK,SAAS,IAAI,KAAK,QAAQ;;AAIjC,MAAI,QAAQ,cAAc,qBAAqB;AAC7C,WAAQ,WAAW;AACnB,WAAQ,UAAU;AAClB,WAAQ,cAAc;aACb,QAAQ,cAAc,aAAa;AAC5C,WAAQ,WAAW,QAAQ;AAC3B,WAAQ,UAAU;AAClB,WAAQ,cAAc;;EAIxB,MAAM,kBAAkB,MAAM,eAAe,KAAK;EAClD,MAAM,gBAAgB,QAAQ,YAAY,IAAI,kBAAkB,QAAQ;EAExE,MAAM,UAAU,gBAAgB,KAAK;AAErC,MAAI,QACF,SAAQ;AAGV,SAAO;GACL;GACA,WAAW,KAAK,IAAI,GAAG,KAAK,MAAM,KAAK,cAAc,iBAAiB,UAAU,IAAI,GAAG,CAAC;GACxF,SAAS,cAAc,KAAK;GAC7B;;;;;CAMH,MAAa,KAAmB;AAC9B,OAAK,SAAS,OAAO,IAAI;;;;;CAM3B,UAAwB;EACtB,MAAM,SAAS,KAAK,KAAK,GAAG,KAAK,WAAW;AAC5C,OAAK,MAAM,CAAC,KAAK,YAAY,KAAK,SAAS,SAAS,CAClD,KAAI,QAAQ,cAAc,OACxB,MAAK,SAAS,OAAO,IAAI;;;;;CAQ/B,WAA8B;AAC5B,SAAO;GACL,YAAY,KAAK,SAAS;GAC1B,MAAM,MAAM,KAAK,KAAK,SAAS,MAAM,CAAC;GACvC"}
package/package.json ADDED
@@ -0,0 +1,59 @@
1
+ {
2
+ "name": "@resq-sw/rate-limiting",
3
+ "version": "0.1.0",
4
+ "description": "Rate limiting algorithms, throttle/debounce utilities, and HTTP middleware",
5
+ "license": "Apache-2.0",
6
+ "type": "module",
7
+ "exports": {
8
+ ".": {
9
+ "types": "./lib/index.d.ts",
10
+ "import": "./lib/index.js",
11
+ "default": "./lib/index.js"
12
+ }
13
+ },
14
+ "main": "lib/index.js",
15
+ "types": "lib/index.d.ts",
16
+ "files": ["lib", "README.md"],
17
+ "scripts": {
18
+ "build": "tsdown",
19
+ "test": "vitest run"
20
+ },
21
+ "peerDependencies": {
22
+ "effect": ">=3.0.0",
23
+ "@upstash/redis": ">=1.0.0",
24
+ "@upstash/ratelimit": ">=2.0.0"
25
+ },
26
+ "peerDependenciesMeta": {
27
+ "effect": {
28
+ "optional": true
29
+ },
30
+ "@upstash/redis": {
31
+ "optional": true
32
+ },
33
+ "@upstash/ratelimit": {
34
+ "optional": true
35
+ }
36
+ },
37
+ "devDependencies": {
38
+ "@upstash/ratelimit": "^2.0.5",
39
+ "@upstash/redis": "^1.36.3",
40
+ "effect": "4.0.0-beta.43",
41
+ "tsdown": "^0.21.7",
42
+ "typescript": "5.9.3",
43
+ "vitest": "3.2.4"
44
+ },
45
+ "publishConfig": {
46
+ "access": "public",
47
+ "provenance": true,
48
+ "registry": "https://registry.npmjs.org/"
49
+ },
50
+ "repository": {
51
+ "type": "git",
52
+ "url": "git+https://github.com/resq-software/npm.git",
53
+ "directory": "packages/rate-limiting"
54
+ },
55
+ "keywords": ["rate-limiting", "throttle", "debounce", "sliding-window", "token-bucket", "leaky-bucket", "middleware"],
56
+ "engines": {
57
+ "node": ">=20.19.0"
58
+ }
59
+ }