@upstash/ratelimit 0.1.3-rc.0 → 0.1.4-rc.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/.releaserc ADDED
@@ -0,0 +1,14 @@
1
+ {
2
+ "branches": [
3
+ {
4
+ "name": "release"
5
+ },
6
+ {
7
+ "name": "main",
8
+ "channel": "next",
9
+ "prerelease": "next"
10
+ }
11
+ ],
12
+ "dryRun": false,
13
+ "ci": true
14
+ }
package/README.md CHANGED
@@ -25,8 +25,9 @@ It is the only connectionless (HTTP based) ratelimiter and designed for:
25
25
  - [Create database](#create-database)
26
26
  - [Use it](#use-it)
27
27
  - [Block until ready](#block-until-ready)
28
- - [Globally replicated ratelimiting](#globally-replicated-ratelimiting)
28
+ - [MultiRegionly replicated ratelimiting](#multiregionly-replicated-ratelimiting)
29
29
  - [Usage](#usage)
30
+ - [Asynchronous synchronization between databases](#asynchronous-synchronization-between-databases)
30
31
  - [Example](#example)
31
32
  - [Ratelimiting algorithms](#ratelimiting-algorithms)
32
33
  - [Fixed Window](#fixed-window)
@@ -99,29 +100,50 @@ return "Here you go!";
99
100
 
100
101
  The `limit` method returns some more metadata that might be useful to you:
101
102
 
102
- ```ts
103
+ ````ts
103
104
  export type RatelimitResponse = {
104
105
  /**
105
106
  * Whether the request may pass(true) or exceeded the limit(false)
106
107
  */
107
108
  success: boolean;
108
-
109
109
  /**
110
110
  * Maximum number of requests allowed within a window.
111
111
  */
112
112
  limit: number;
113
-
114
113
  /**
115
114
  * How many requests the user has left within the current window.
116
115
  */
117
116
  remaining: number;
118
-
119
117
  /**
120
118
  * Unix timestamp in milliseconds when the limits are reset.
121
119
  */
122
120
  reset: number;
121
+
122
+ /**
123
+ * For the MultiRegion setup we do some synchronizing in the background, after returning the current limit.
124
+ * In most case you can simply ignore this.
125
+ *
126
+ * On Vercel Edge or Cloudflare workers, you need to explicitely handle the pending Promise like this:
127
+ *
128
+ * **Vercel Edge:**
129
+ * https://nextjs.org/docs/api-reference/next/server#nextfetchevent
130
+ *
131
+ * ```ts
132
+ * const { pending } = await ratelimit.limit("id")
133
+ * event.waitUntil(pending)
134
+ * ```
135
+ *
136
+ * **Cloudflare Worker:**
137
+ * https://developers.cloudflare.com/workers/runtime-apis/fetch-event/#syntax-module-worker
138
+ *
139
+ * ```ts
140
+ * const { pending } = await ratelimit.limit("id")
141
+ * context.waitUntil(pending)
142
+ * ```
143
+ */
144
+ pending: Promise<unknown>;
123
145
  };
124
- ```
146
+ ````
125
147
 
126
148
  ### Block until ready
127
149
 
@@ -155,6 +177,36 @@ doExpensiveCalculation();
155
177
  return "Here you go!";
156
178
  ```
157
179
 
180
+ ### Ephemeral Cache
181
+
182
+ For extreme load or denial of service attacks, it might be too expensive to call
183
+ redis for every incoming request, just to find out the request should be blocked
184
+ because they have exceeded the limit.
185
+
186
+ You can use an ephemeral in memory cache by passing the `ephemeralCache`
187
+ options:
188
+
189
+ ```ts
190
+ const cache = new Map(); // must be outside of your serverless function handler
191
+
192
+ // ...
193
+
194
+ const ratelimit = new Ratelimit({
195
+ // ...
196
+ ephemeralCache: cache,
197
+ });
198
+ ```
199
+
200
+ If enabled, the ratelimiter will keep a global cache of identifiers and a reset
201
+ timestamp, that have exhausted their ratelimit. In serverless environments this
202
+ is only possible if you create the ratelimiter instance outside of your handler
203
+ function. While the function is still hot, the ratelimiter can block requests
204
+ without having to request data from redis, thus saving time and money.
205
+
206
+ Whenever an identifier has exceeded its limit, the ratelimiter will add it to an
207
+ internal list together with its reset timestamp. If the same identifier makes a
208
+ new request before it is reset, we can immediately reject it.
209
+
158
210
  ## MultiRegionly replicated ratelimiting
159
211
 
160
212
  Using a single redis instance has the downside of providing low latencies to the
@@ -181,9 +233,15 @@ import { Redis } from "@upstash/redis";
181
233
  // Create a new ratelimiter, that allows 10 requests per 10 seconds
182
234
  const ratelimit = new MultiRegionRatelimit({
183
235
  redis: [
184
- new Redis({/* auth */}),
185
- new Redis({/* auth */}),
186
- new Redis({/* auth */}),
236
+ new Redis({
237
+ /* auth */
238
+ }),
239
+ new Redis({
240
+ /* auth */
241
+ }),
242
+ new Redis({
243
+ /* auth */
244
+ }),
187
245
  ],
188
246
  limiter: Ratelimit.slidingWindow(10, "10 s"),
189
247
  });
@@ -194,6 +252,29 @@ const identifier = "api";
194
252
  const { success } = await ratelimit.limit(identifier);
195
253
  ```
196
254
 
255
+ ### Asynchronous synchronization between databases
256
+
257
+ The MultiRegion setup will do some synchronization between databases after
258
+ returning the current limit. This can lead to problems on Cloudflare Workers and
259
+ therefore Vercel Edge functions, because dangling promises must be taken care
260
+ of:
261
+
262
+ **Vercel Edge:**
263
+ [docs](https://nextjs.org/docs/api-reference/next/server#nextfetchevent)
264
+
265
+ ```ts
266
+ const { pending } = await ratelimit.limit("id");
267
+ event.waitUntil(pending);
268
+ ```
269
+
270
+ **Cloudflare Worker:**
271
+ [docs](https://developers.cloudflare.com/workers/runtime-apis/fetch-event/#syntax-module-worker)
272
+
273
+ ```ts
274
+ const { pending } = await ratelimit.limit("id");
275
+ context.waitUntil(pending);
276
+ ```
277
+
197
278
  ### Example
198
279
 
199
280
  Let's assume you have customers in the US and Europe. In this case you can
package/esm/cache.js ADDED
@@ -0,0 +1,28 @@
1
+ export class Cache {
2
+ constructor(cache) {
3
+ /**
4
+ * Stores identifier -> reset (in milliseconds)
5
+ */
6
+ Object.defineProperty(this, "cache", {
7
+ enumerable: true,
8
+ configurable: true,
9
+ writable: true,
10
+ value: void 0
11
+ });
12
+ this.cache = cache;
13
+ }
14
+ isBlocked(identifier) {
15
+ if (!this.cache.has(identifier)) {
16
+ return { blocked: false, reset: 0 };
17
+ }
18
+ const reset = this.cache.get(identifier);
19
+ if (reset < Date.now()) {
20
+ this.cache.delete(identifier);
21
+ return { blocked: false, reset: 0 };
22
+ }
23
+ return { blocked: true, reset: reset };
24
+ }
25
+ blockUntil(identifier, reset) {
26
+ this.cache.set(identifier, reset);
27
+ }
28
+ }
package/esm/multi.js CHANGED
@@ -1,5 +1,6 @@
1
1
  import { ms } from "./duration.js";
2
2
  import { Ratelimit } from "./ratelimit.js";
3
+ import { Cache } from "./cache.js";
3
4
  /**
4
5
  * Ratelimiter using serverless redis from https://upstash.com/
5
6
  *
@@ -23,7 +24,10 @@ export class MultiRegionRatelimit extends Ratelimit {
23
24
  super({
24
25
  prefix: config.prefix,
25
26
  limiter: config.limiter,
26
- ctx: { redis: config.redis },
27
+ ctx: {
28
+ redis: config.redis,
29
+ cache: config.ephermeralCache ? new Cache() : undefined,
30
+ },
27
31
  });
28
32
  }
29
33
  /**
@@ -70,6 +74,18 @@ export class MultiRegionRatelimit extends Ratelimit {
70
74
  return members
71
75
  `;
72
76
  return async function (ctx, identifier) {
77
+ if (ctx.cache) {
78
+ const { blocked, reset } = ctx.cache.isBlocked(identifier);
79
+ if (blocked) {
80
+ return {
81
+ success: false,
82
+ limit: tokens,
83
+ remaining: 0,
84
+ reset: reset,
85
+ pending: Promise.resolve(),
86
+ };
87
+ }
88
+ }
73
89
  const requestID = crypto.randomUUID();
74
90
  const bucket = Math.floor(Date.now() / windowDuration);
75
91
  const key = [identifier, bucket].join(":");
@@ -108,11 +124,16 @@ export class MultiRegionRatelimit extends Ratelimit {
108
124
  /**
109
125
  * Do not await sync. This should not run in the critical path.
110
126
  */
127
+ const success = remaining > 0;
128
+ const reset = (bucket + 1) * windowDuration;
129
+ if (ctx.cache && !success) {
130
+ ctx.cache.blockUntil(identifier, reset);
131
+ }
111
132
  return {
112
- success: remaining > 0,
133
+ success,
113
134
  limit: tokens,
114
135
  remaining,
115
- reset: (bucket + 1) * windowDuration,
136
+ reset,
116
137
  pending: sync(),
117
138
  };
118
139
  };
@@ -173,6 +194,18 @@ export class MultiRegionRatelimit extends Ratelimit {
173
194
  `;
174
195
  const windowDuration = ms(window);
175
196
  return async function (ctx, identifier) {
197
+ if (ctx.cache) {
198
+ const { blocked, reset } = ctx.cache.isBlocked(identifier);
199
+ if (blocked) {
200
+ return {
201
+ success: false,
202
+ limit: tokens,
203
+ remaining: 0,
204
+ reset: reset,
205
+ pending: Promise.resolve(),
206
+ };
207
+ }
208
+ }
176
209
  const requestID = crypto.randomUUID();
177
210
  const now = Date.now();
178
211
  const currentWindow = Math.floor(now / windowSize);
@@ -213,14 +246,16 @@ export class MultiRegionRatelimit extends Ratelimit {
213
246
  await db.redis.sadd(currentKey, ...allIDs);
214
247
  }
215
248
  }
216
- /**
217
- * Do not await sync. This should not run in the critical path.
218
- */
249
+ const success = remaining > 0;
250
+ const reset = (currentWindow + 1) * windowDuration;
251
+ if (ctx.cache && !success) {
252
+ ctx.cache.blockUntil(identifier, reset);
253
+ }
219
254
  return {
220
- success: remaining > 0,
255
+ success,
221
256
  limit: tokens,
222
257
  remaining,
223
- reset: (currentWindow + 1) * windowDuration,
258
+ reset,
224
259
  pending: sync(),
225
260
  };
226
261
  };
package/esm/ratelimit.js CHANGED
@@ -1,3 +1,4 @@
1
+ import { Cache } from "./cache.js";
1
2
  /**
2
3
  * Ratelimiter using serverless redis from https://upstash.com/
3
4
  *
@@ -125,5 +126,11 @@ export class Ratelimit {
125
126
  this.ctx = config.ctx;
126
127
  this.limiter = config.limiter;
127
128
  this.prefix = config.prefix ?? "@upstash/ratelimit";
129
+ if (config.ephermeralCache instanceof Map) {
130
+ this.ctx.cache = new Cache(config.ephermeralCache);
131
+ }
132
+ else if (typeof config.ephermeralCache === "undefined") {
133
+ this.ctx.cache = new Cache(new Map());
134
+ }
128
135
  }
129
136
  }
package/esm/single.js CHANGED
@@ -23,7 +23,10 @@ export class RegionRatelimit extends Ratelimit {
23
23
  super({
24
24
  prefix: config.prefix,
25
25
  limiter: config.limiter,
26
- ctx: { redis: config.redis },
26
+ ctx: {
27
+ redis: config.redis,
28
+ },
29
+ ephermeralCache: config.ephermeralCache,
27
30
  });
28
31
  }
29
32
  /**
@@ -70,12 +73,29 @@ export class RegionRatelimit extends Ratelimit {
70
73
  return async function (ctx, identifier) {
71
74
  const bucket = Math.floor(Date.now() / windowDuration);
72
75
  const key = [identifier, bucket].join(":");
76
+ if (ctx.cache) {
77
+ const { blocked, reset } = ctx.cache.isBlocked(identifier);
78
+ if (blocked) {
79
+ return {
80
+ success: false,
81
+ limit: tokens,
82
+ remaining: 0,
83
+ reset: reset,
84
+ pending: Promise.resolve(),
85
+ };
86
+ }
87
+ }
73
88
  const usedTokensAfterUpdate = (await ctx.redis.eval(script, [key], [windowDuration]));
89
+ const success = usedTokensAfterUpdate <= tokens;
90
+ const reset = (bucket + 1) * windowDuration;
91
+ if (ctx.cache && !success) {
92
+ ctx.cache.blockUntil(identifier, reset);
93
+ }
74
94
  return {
75
- success: usedTokensAfterUpdate <= tokens,
95
+ success,
76
96
  limit: tokens,
77
97
  remaining: tokens - usedTokensAfterUpdate,
78
- reset: (bucket + 1) * windowDuration,
98
+ reset,
79
99
  pending: Promise.resolve(),
80
100
  };
81
101
  };
@@ -142,12 +162,29 @@ export class RegionRatelimit extends Ratelimit {
142
162
  const currentKey = [identifier, currentWindow].join(":");
143
163
  const previousWindow = currentWindow - windowSize;
144
164
  const previousKey = [identifier, previousWindow].join(":");
165
+ if (ctx.cache) {
166
+ const { blocked, reset } = ctx.cache.isBlocked(identifier);
167
+ if (blocked) {
168
+ return {
169
+ success: false,
170
+ limit: tokens,
171
+ remaining: 0,
172
+ reset: reset,
173
+ pending: Promise.resolve(),
174
+ };
175
+ }
176
+ }
145
177
  const remaining = (await ctx.redis.eval(script, [currentKey, previousKey], [tokens, now, windowSize]));
178
+ const success = remaining > 0;
179
+ const reset = (currentWindow + 1) * windowSize;
180
+ if (ctx.cache && !success) {
181
+ ctx.cache.blockUntil(identifier, reset);
182
+ }
146
183
  return {
147
- success: remaining > 0,
184
+ success,
148
185
  limit: tokens,
149
186
  remaining,
150
- reset: (currentWindow + 1) * windowSize,
187
+ reset,
151
188
  pending: Promise.resolve(),
152
189
  };
153
190
  };
@@ -225,11 +262,27 @@ export class RegionRatelimit extends Ratelimit {
225
262
  `;
226
263
  const intervalDuration = ms(interval);
227
264
  return async function (ctx, identifier) {
265
+ if (ctx.cache) {
266
+ const { blocked, reset } = ctx.cache.isBlocked(identifier);
267
+ if (blocked) {
268
+ return {
269
+ success: false,
270
+ limit: maxTokens,
271
+ remaining: 0,
272
+ reset: reset,
273
+ pending: Promise.resolve(),
274
+ };
275
+ }
276
+ }
228
277
  const now = Date.now();
229
278
  const key = [identifier, Math.floor(now / intervalDuration)].join(":");
230
279
  const [remaining, reset] = (await ctx.redis.eval(script, [key], [maxTokens, intervalDuration, refillRate, now]));
280
+ const success = remaining > 0;
281
+ if (ctx.cache && !success) {
282
+ ctx.cache.blockUntil(identifier, reset);
283
+ }
231
284
  return {
232
- success: remaining > 0,
285
+ success,
233
286
  limit: maxTokens,
234
287
  remaining,
235
288
  reset,
package/package.json CHANGED
@@ -3,7 +3,7 @@
3
3
  "main": "./script/mod.js",
4
4
  "types": "./types/mod.d.ts",
5
5
  "name": "@upstash/ratelimit",
6
- "version": "v0.1.3-rc.0",
6
+ "version": "v0.1.4-rc.1",
7
7
  "description": "A serverless ratelimiter built on top of Upstash REST API.",
8
8
  "repository": {
9
9
  "type": "git",
@@ -29,7 +29,7 @@
29
29
  "size-limit": "latest"
30
30
  },
31
31
  "peerDependencies": {
32
- "@upstash/redis": "^1.3.4"
32
+ "@upstash/redis": "^1.4.0"
33
33
  },
34
34
  "size-limit": [
35
35
  {
@@ -0,0 +1,32 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ exports.Cache = void 0;
4
+ class Cache {
5
+ constructor(cache) {
6
+ /**
7
+ * Stores identifier -> reset (in milliseconds)
8
+ */
9
+ Object.defineProperty(this, "cache", {
10
+ enumerable: true,
11
+ configurable: true,
12
+ writable: true,
13
+ value: void 0
14
+ });
15
+ this.cache = cache;
16
+ }
17
+ isBlocked(identifier) {
18
+ if (!this.cache.has(identifier)) {
19
+ return { blocked: false, reset: 0 };
20
+ }
21
+ const reset = this.cache.get(identifier);
22
+ if (reset < Date.now()) {
23
+ this.cache.delete(identifier);
24
+ return { blocked: false, reset: 0 };
25
+ }
26
+ return { blocked: true, reset: reset };
27
+ }
28
+ blockUntil(identifier, reset) {
29
+ this.cache.set(identifier, reset);
30
+ }
31
+ }
32
+ exports.Cache = Cache;
package/script/multi.js CHANGED
@@ -3,6 +3,7 @@ Object.defineProperty(exports, "__esModule", { value: true });
3
3
  exports.MultiRegionRatelimit = void 0;
4
4
  const duration_js_1 = require("./duration.js");
5
5
  const ratelimit_js_1 = require("./ratelimit.js");
6
+ const cache_js_1 = require("./cache.js");
6
7
  /**
7
8
  * Ratelimiter using serverless redis from https://upstash.com/
8
9
  *
@@ -26,7 +27,10 @@ class MultiRegionRatelimit extends ratelimit_js_1.Ratelimit {
26
27
  super({
27
28
  prefix: config.prefix,
28
29
  limiter: config.limiter,
29
- ctx: { redis: config.redis },
30
+ ctx: {
31
+ redis: config.redis,
32
+ cache: config.ephermeralCache ? new cache_js_1.Cache() : undefined,
33
+ },
30
34
  });
31
35
  }
32
36
  /**
@@ -73,6 +77,18 @@ class MultiRegionRatelimit extends ratelimit_js_1.Ratelimit {
73
77
  return members
74
78
  `;
75
79
  return async function (ctx, identifier) {
80
+ if (ctx.cache) {
81
+ const { blocked, reset } = ctx.cache.isBlocked(identifier);
82
+ if (blocked) {
83
+ return {
84
+ success: false,
85
+ limit: tokens,
86
+ remaining: 0,
87
+ reset: reset,
88
+ pending: Promise.resolve(),
89
+ };
90
+ }
91
+ }
76
92
  const requestID = crypto.randomUUID();
77
93
  const bucket = Math.floor(Date.now() / windowDuration);
78
94
  const key = [identifier, bucket].join(":");
@@ -111,11 +127,16 @@ class MultiRegionRatelimit extends ratelimit_js_1.Ratelimit {
111
127
  /**
112
128
  * Do not await sync. This should not run in the critical path.
113
129
  */
130
+ const success = remaining > 0;
131
+ const reset = (bucket + 1) * windowDuration;
132
+ if (ctx.cache && !success) {
133
+ ctx.cache.blockUntil(identifier, reset);
134
+ }
114
135
  return {
115
- success: remaining > 0,
136
+ success,
116
137
  limit: tokens,
117
138
  remaining,
118
- reset: (bucket + 1) * windowDuration,
139
+ reset,
119
140
  pending: sync(),
120
141
  };
121
142
  };
@@ -176,6 +197,18 @@ class MultiRegionRatelimit extends ratelimit_js_1.Ratelimit {
176
197
  `;
177
198
  const windowDuration = (0, duration_js_1.ms)(window);
178
199
  return async function (ctx, identifier) {
200
+ if (ctx.cache) {
201
+ const { blocked, reset } = ctx.cache.isBlocked(identifier);
202
+ if (blocked) {
203
+ return {
204
+ success: false,
205
+ limit: tokens,
206
+ remaining: 0,
207
+ reset: reset,
208
+ pending: Promise.resolve(),
209
+ };
210
+ }
211
+ }
179
212
  const requestID = crypto.randomUUID();
180
213
  const now = Date.now();
181
214
  const currentWindow = Math.floor(now / windowSize);
@@ -216,14 +249,16 @@ class MultiRegionRatelimit extends ratelimit_js_1.Ratelimit {
216
249
  await db.redis.sadd(currentKey, ...allIDs);
217
250
  }
218
251
  }
219
- /**
220
- * Do not await sync. This should not run in the critical path.
221
- */
252
+ const success = remaining > 0;
253
+ const reset = (currentWindow + 1) * windowDuration;
254
+ if (ctx.cache && !success) {
255
+ ctx.cache.blockUntil(identifier, reset);
256
+ }
222
257
  return {
223
- success: remaining > 0,
258
+ success,
224
259
  limit: tokens,
225
260
  remaining,
226
- reset: (currentWindow + 1) * windowDuration,
261
+ reset,
227
262
  pending: sync(),
228
263
  };
229
264
  };
@@ -1,6 +1,7 @@
1
1
  "use strict";
2
2
  Object.defineProperty(exports, "__esModule", { value: true });
3
3
  exports.Ratelimit = void 0;
4
+ const cache_js_1 = require("./cache.js");
4
5
  /**
5
6
  * Ratelimiter using serverless redis from https://upstash.com/
6
7
  *
@@ -128,6 +129,12 @@ class Ratelimit {
128
129
  this.ctx = config.ctx;
129
130
  this.limiter = config.limiter;
130
131
  this.prefix = config.prefix ?? "@upstash/ratelimit";
132
+ if (config.ephermeralCache instanceof Map) {
133
+ this.ctx.cache = new cache_js_1.Cache(config.ephermeralCache);
134
+ }
135
+ else if (typeof config.ephermeralCache === "undefined") {
136
+ this.ctx.cache = new cache_js_1.Cache(new Map());
137
+ }
131
138
  }
132
139
  }
133
140
  exports.Ratelimit = Ratelimit;
package/script/single.js CHANGED
@@ -26,7 +26,10 @@ class RegionRatelimit extends ratelimit_js_1.Ratelimit {
26
26
  super({
27
27
  prefix: config.prefix,
28
28
  limiter: config.limiter,
29
- ctx: { redis: config.redis },
29
+ ctx: {
30
+ redis: config.redis,
31
+ },
32
+ ephermeralCache: config.ephermeralCache,
30
33
  });
31
34
  }
32
35
  /**
@@ -73,12 +76,29 @@ class RegionRatelimit extends ratelimit_js_1.Ratelimit {
73
76
  return async function (ctx, identifier) {
74
77
  const bucket = Math.floor(Date.now() / windowDuration);
75
78
  const key = [identifier, bucket].join(":");
79
+ if (ctx.cache) {
80
+ const { blocked, reset } = ctx.cache.isBlocked(identifier);
81
+ if (blocked) {
82
+ return {
83
+ success: false,
84
+ limit: tokens,
85
+ remaining: 0,
86
+ reset: reset,
87
+ pending: Promise.resolve(),
88
+ };
89
+ }
90
+ }
76
91
  const usedTokensAfterUpdate = (await ctx.redis.eval(script, [key], [windowDuration]));
92
+ const success = usedTokensAfterUpdate <= tokens;
93
+ const reset = (bucket + 1) * windowDuration;
94
+ if (ctx.cache && !success) {
95
+ ctx.cache.blockUntil(identifier, reset);
96
+ }
77
97
  return {
78
- success: usedTokensAfterUpdate <= tokens,
98
+ success,
79
99
  limit: tokens,
80
100
  remaining: tokens - usedTokensAfterUpdate,
81
- reset: (bucket + 1) * windowDuration,
101
+ reset,
82
102
  pending: Promise.resolve(),
83
103
  };
84
104
  };
@@ -145,12 +165,29 @@ class RegionRatelimit extends ratelimit_js_1.Ratelimit {
145
165
  const currentKey = [identifier, currentWindow].join(":");
146
166
  const previousWindow = currentWindow - windowSize;
147
167
  const previousKey = [identifier, previousWindow].join(":");
168
+ if (ctx.cache) {
169
+ const { blocked, reset } = ctx.cache.isBlocked(identifier);
170
+ if (blocked) {
171
+ return {
172
+ success: false,
173
+ limit: tokens,
174
+ remaining: 0,
175
+ reset: reset,
176
+ pending: Promise.resolve(),
177
+ };
178
+ }
179
+ }
148
180
  const remaining = (await ctx.redis.eval(script, [currentKey, previousKey], [tokens, now, windowSize]));
181
+ const success = remaining > 0;
182
+ const reset = (currentWindow + 1) * windowSize;
183
+ if (ctx.cache && !success) {
184
+ ctx.cache.blockUntil(identifier, reset);
185
+ }
149
186
  return {
150
- success: remaining > 0,
187
+ success,
151
188
  limit: tokens,
152
189
  remaining,
153
- reset: (currentWindow + 1) * windowSize,
190
+ reset,
154
191
  pending: Promise.resolve(),
155
192
  };
156
193
  };
@@ -228,11 +265,27 @@ class RegionRatelimit extends ratelimit_js_1.Ratelimit {
228
265
  `;
229
266
  const intervalDuration = (0, duration_js_1.ms)(interval);
230
267
  return async function (ctx, identifier) {
268
+ if (ctx.cache) {
269
+ const { blocked, reset } = ctx.cache.isBlocked(identifier);
270
+ if (blocked) {
271
+ return {
272
+ success: false,
273
+ limit: maxTokens,
274
+ remaining: 0,
275
+ reset: reset,
276
+ pending: Promise.resolve(),
277
+ };
278
+ }
279
+ }
231
280
  const now = Date.now();
232
281
  const key = [identifier, Math.floor(now / intervalDuration)].join(":");
233
282
  const [remaining, reset] = (await ctx.redis.eval(script, [key], [maxTokens, intervalDuration, refillRate, now]));
283
+ const success = remaining > 0;
284
+ if (ctx.cache && !success) {
285
+ ctx.cache.blockUntil(identifier, reset);
286
+ }
234
287
  return {
235
- success: remaining > 0,
288
+ success,
236
289
  limit: maxTokens,
237
290
  remaining,
238
291
  reset,
@@ -0,0 +1,13 @@
1
+ import { EphermeralCache } from "./types.js";
2
+ export declare class Cache implements EphermeralCache {
3
+ /**
4
+ * Stores identifier -> reset (in milliseconds)
5
+ */
6
+ private readonly cache;
7
+ constructor(cache: Map<string, number>);
8
+ isBlocked(identifier: string): {
9
+ blocked: boolean;
10
+ reset: number;
11
+ };
12
+ blockUntil(identifier: string, reset: number): void;
13
+ }
package/types/multi.d.ts CHANGED
@@ -22,6 +22,23 @@ export declare type MultiRegionRatelimitConfig = {
22
22
  * @default `@upstash/ratelimit`
23
23
  */
24
24
  prefix?: string;
25
+ /**
26
+ * If enabled, the ratelimiter will keep a global cache of identifiers, that have
27
+ * exhausted their ratelimit. In serverless environments this is only possible if
28
+ * you create the ratelimiter instance outside of your handler function. While the
29
+ * function is still hot, the ratelimiter can block requests without having to
30
+ * request data from redis, thus saving time and money.
31
+ *
32
+ * Whenever an identifier has exceeded its limit, the ratelimiter will add it to an
33
+ * internal list together with its reset timestamp. If the same identifier makes a
34
+ * new request before it is reset, we can immediately reject it.
35
+ *
36
+ * Set to `false` to disable.
37
+ *
38
+ * If left undefined, a map is created automatically, but it can only work
39
+ * if the map or th ratelimit instance is created outside your serverless function handler.
40
+ */
41
+ ephermeralCache?: Map<string, number> | false;
25
42
  };
26
43
  /**
27
44
  * Ratelimiter using serverless redis from https://upstash.com/
@@ -18,6 +18,23 @@ export declare type RatelimitConfig<TContext> = {
18
18
  * @default `@upstash/ratelimit`
19
19
  */
20
20
  prefix?: string;
21
+ /**
22
+ * If enabled, the ratelimiter will keep a global cache of identifiers, that have
23
+ * exhausted their ratelimit. In serverless environments this is only possible if
24
+ * you create the ratelimiter instance outside of your handler function. While the
25
+ * function is still hot, the ratelimiter can block requests without having to
26
+ * request data from redis, thus saving time and money.
27
+ *
28
+ * Whenever an identifier has exceeded its limit, the ratelimiter will add it to an
29
+ * internal list together with its reset timestamp. If the same identifier makes a
30
+ * new request before it is reset, we can immediately reject it.
31
+ *
32
+ * Set to `false` to disable.
33
+ *
34
+ * If left undefined, a map is created automatically, but it can only work
35
+ * if the map or the ratelimit instance is created outside your serverless function handler.
36
+ */
37
+ ephermeralCache?: Map<string, number> | false;
21
38
  };
22
39
  /**
23
40
  * Ratelimiter using serverless redis from https://upstash.com/
package/types/single.d.ts CHANGED
@@ -25,6 +25,23 @@ export declare type RegionRatelimitConfig = {
25
25
  * @default `@upstash/ratelimit`
26
26
  */
27
27
  prefix?: string;
28
+ /**
29
+ * If enabled, the ratelimiter will keep a global cache of identifiers, that have
30
+ * exhausted their ratelimit. In serverless environments this is only possible if
31
+ * you create the ratelimiter instance outside of your handler function. While the
32
+ * function is still hot, the ratelimiter can block requests without having to
33
+ * request data from redis, thus saving time and money.
34
+ *
35
+ * Whenever an identifier has exceeded its limit, the ratelimiter will add it to an
36
+ * internal list together with its reset timestamp. If the same identifier makes a
37
+ * new request before it is reset, we can immediately reject it.
38
+ *
39
+ * Set to `false` to disable.
40
+ *
41
+ * If left undefined, a map is created automatically, but it can only work
42
+ * if the map or the ratelimit instance is created outside your serverless function handler.
43
+ */
44
+ ephermeralCache?: Map<string, number> | false;
28
45
  };
29
46
  /**
30
47
  * Ratelimiter using serverless redis from https://upstash.com/
package/types/types.d.ts CHANGED
@@ -2,11 +2,23 @@ export interface Redis {
2
2
  eval: (script: string, keys: string[], values: unknown[]) => Promise<unknown>;
3
3
  sadd: (key: string, ...members: string[]) => Promise<number>;
4
4
  }
5
+ /**
6
+ * EphermeralCache is used to block certain identifiers right away in case they have already exceedd the ratelimit.
7
+ */
8
+ export interface EphermeralCache {
9
+ isBlocked: (identifier: string) => {
10
+ blocked: boolean;
11
+ reset: number;
12
+ };
13
+ blockUntil: (identifier: string, reset: number) => void;
14
+ }
5
15
  export declare type RegionContext = {
6
16
  redis: Redis;
17
+ cache?: EphermeralCache;
7
18
  };
8
19
  export declare type MultiRegionContext = {
9
20
  redis: Redis[];
21
+ cache?: EphermeralCache;
10
22
  };
11
23
  export declare type Context = RegionContext | MultiRegionContext;
12
24
  export declare type RatelimitResponse = {
@@ -50,4 +62,6 @@ export declare type RatelimitResponse = {
50
62
  */
51
63
  pending: Promise<unknown>;
52
64
  };
53
- export declare type Algorithm<TContext> = (ctx: TContext, identifier: string) => Promise<RatelimitResponse>;
65
+ export declare type Algorithm<TContext> = (ctx: TContext, identifier: string, opts?: {
66
+ cache?: EphermeralCache;
67
+ }) => Promise<RatelimitResponse>;