pg-ratelimit 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +21 -0
- package/README.md +39 -0
- package/dist/index.cjs +482 -0
- package/dist/index.d.cts +71 -0
- package/dist/index.d.ts +71 -0
- package/dist/index.js +454 -0
- package/package.json +61 -0
package/LICENSE
ADDED
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
MIT License
|
|
2
|
+
|
|
3
|
+
Copyright (c) 2026 Max Malm
|
|
4
|
+
|
|
5
|
+
Permission is hereby granted, free of charge, to any person obtaining a copy
|
|
6
|
+
of this software and associated documentation files (the "Software"), to deal
|
|
7
|
+
in the Software without restriction, including without limitation the rights
|
|
8
|
+
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
|
9
|
+
copies of the Software, and to permit persons to whom the Software is
|
|
10
|
+
furnished to do so, subject to the following conditions:
|
|
11
|
+
|
|
12
|
+
The above copyright notice and this permission notice shall be included in all
|
|
13
|
+
copies or substantial portions of the Software.
|
|
14
|
+
|
|
15
|
+
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
|
16
|
+
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
|
17
|
+
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
|
18
|
+
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
|
19
|
+
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
|
20
|
+
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
|
21
|
+
SOFTWARE.
|
package/README.md
ADDED
|
@@ -0,0 +1,39 @@
|
|
|
1
|
+
# pg-ratelimit
|
|
2
|
+
|
|
3
|
+
PostgreSQL-backed rate limiting for Node.js. No Redis required.
|
|
4
|
+
|
|
5
|
+
```typescript
|
|
6
|
+
import { Pool } from "pg";
|
|
7
|
+
import { Ratelimit } from "pg-ratelimit";
|
|
8
|
+
|
|
9
|
+
const pool = new Pool({ connectionString: process.env.DATABASE_URL });
|
|
10
|
+
|
|
11
|
+
const ratelimit = new Ratelimit({
|
|
12
|
+
pool,
|
|
13
|
+
limiter: Ratelimit.slidingWindow(10, "1m"),
|
|
14
|
+
prefix: "api",
|
|
15
|
+
});
|
|
16
|
+
|
|
17
|
+
const { success } = await ratelimit.limit("user:123");
|
|
18
|
+
```
|
|
19
|
+
|
|
20
|
+
## Features
|
|
21
|
+
|
|
22
|
+
- **Three algorithms** - fixed window, sliding window, token bucket
|
|
23
|
+
- **Zero runtime deps** - just `pg` as a peer dependency
|
|
24
|
+
- **Serverless-safe** - no background processes, probabilistic inline cleanup, no long-lived state
|
|
25
|
+
- **Upstash-compatible API** - same `limit()`, `blockUntilReady()`, `getRemaining()`, `resetUsedTokens()` surface
|
|
26
|
+
|
|
27
|
+
## Install
|
|
28
|
+
|
|
29
|
+
```bash
|
|
30
|
+
npm install pg-ratelimit pg
|
|
31
|
+
```
|
|
32
|
+
|
|
33
|
+
## Docs
|
|
34
|
+
|
|
35
|
+
[benjick.js.org/pg-ratelimit](https://benjick.js.org/pg-ratelimit)
|
|
36
|
+
|
|
37
|
+
## License
|
|
38
|
+
|
|
39
|
+
MIT
|
package/dist/index.cjs
ADDED
|
@@ -0,0 +1,482 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __defProp = Object.defineProperty;
|
|
3
|
+
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
|
|
4
|
+
var __getOwnPropNames = Object.getOwnPropertyNames;
|
|
5
|
+
var __hasOwnProp = Object.prototype.hasOwnProperty;
|
|
6
|
+
var __export = (target, all) => {
|
|
7
|
+
for (var name in all)
|
|
8
|
+
__defProp(target, name, { get: all[name], enumerable: true });
|
|
9
|
+
};
|
|
10
|
+
var __copyProps = (to, from, except, desc) => {
|
|
11
|
+
if (from && typeof from === "object" || typeof from === "function") {
|
|
12
|
+
for (let key of __getOwnPropNames(from))
|
|
13
|
+
if (!__hasOwnProp.call(to, key) && key !== except)
|
|
14
|
+
__defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
|
|
15
|
+
}
|
|
16
|
+
return to;
|
|
17
|
+
};
|
|
18
|
+
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
|
|
19
|
+
|
|
20
|
+
// src/index.ts
|
|
21
|
+
var index_exports = {};
|
|
22
|
+
__export(index_exports, {
|
|
23
|
+
Ratelimit: () => Ratelimit,
|
|
24
|
+
TABLE_SQL: () => TABLE_SQL
|
|
25
|
+
});
|
|
26
|
+
module.exports = __toCommonJS(index_exports);
|
|
27
|
+
|
|
28
|
+
// src/duration.ts
|
|
29
|
+
var MULTIPLIERS = {
|
|
30
|
+
s: 1e3,
|
|
31
|
+
m: 6e4,
|
|
32
|
+
h: 36e5,
|
|
33
|
+
d: 864e5
|
|
34
|
+
};
|
|
35
|
+
var DURATION_RE = /^\s*(\d+)\s*(s|m|h|d)\s*$/;
|
|
36
|
+
function toMs(duration) {
|
|
37
|
+
if (typeof duration === "number") {
|
|
38
|
+
if (!Number.isFinite(duration) || duration <= 0) {
|
|
39
|
+
throw new Error(
|
|
40
|
+
`Invalid duration: ${duration}. Must be a positive finite number of milliseconds.`
|
|
41
|
+
);
|
|
42
|
+
}
|
|
43
|
+
return duration;
|
|
44
|
+
}
|
|
45
|
+
const match = DURATION_RE.exec(duration);
|
|
46
|
+
if (!match) {
|
|
47
|
+
throw new Error(
|
|
48
|
+
`Invalid duration format: "${duration}". Expected format: "<number><unit>" where unit is s, m, h, or d.`
|
|
49
|
+
);
|
|
50
|
+
}
|
|
51
|
+
const value = Number(match[1]);
|
|
52
|
+
const unit = match[2];
|
|
53
|
+
if (value <= 0) {
|
|
54
|
+
throw new Error(`Invalid duration: "${duration}". Value must be positive.`);
|
|
55
|
+
}
|
|
56
|
+
return value * MULTIPLIERS[unit];
|
|
57
|
+
}
|
|
58
|
+
|
|
59
|
+
// src/tables.sql
|
|
60
|
+
var tables_default = "CREATE UNLOGGED TABLE IF NOT EXISTS rate_limit_ephemeral (\n prefix TEXT NOT NULL,\n key TEXT NOT NULL,\n count BIGINT,\n prev_count BIGINT,\n window_start TIMESTAMPTZ,\n tokens DOUBLE PRECISION,\n last_refill TIMESTAMPTZ,\n expires_at TIMESTAMPTZ NOT NULL,\n PRIMARY KEY (prefix, key)\n);\n\nCREATE INDEX IF NOT EXISTS idx_rate_limit_ephemeral_cleanup\n ON rate_limit_ephemeral (prefix, expires_at);\n\nCREATE TABLE IF NOT EXISTS rate_limit_durable (\n prefix TEXT NOT NULL,\n key TEXT NOT NULL,\n count BIGINT,\n prev_count BIGINT,\n window_start TIMESTAMPTZ,\n tokens DOUBLE PRECISION,\n last_refill TIMESTAMPTZ,\n expires_at TIMESTAMPTZ NOT NULL,\n PRIMARY KEY (prefix, key)\n);\n\nCREATE INDEX IF NOT EXISTS idx_rate_limit_durable_cleanup\n ON rate_limit_durable (prefix, expires_at);\n";
|
|
61
|
+
|
|
62
|
+
// src/tables.ts
|
|
63
|
+
var TABLE_SQL = tables_default;
|
|
64
|
+
var initialized = /* @__PURE__ */ new WeakSet();
|
|
65
|
+
async function ensureTables(pool) {
|
|
66
|
+
if (initialized.has(pool)) {
|
|
67
|
+
return;
|
|
68
|
+
}
|
|
69
|
+
if (process.env.PG_RATELIMIT_DISABLE_AUTO_MIGRATE === "true") {
|
|
70
|
+
initialized.add(pool);
|
|
71
|
+
return;
|
|
72
|
+
}
|
|
73
|
+
await pool.query(TABLE_SQL);
|
|
74
|
+
initialized.add(pool);
|
|
75
|
+
}
|
|
76
|
+
|
|
77
|
+
// src/algorithms/fixed-window.ts
|
|
78
|
+
async function fixedWindow(ctx, tokens, windowMs) {
|
|
79
|
+
const { pool, table, prefix, key, rate, now, debug } = ctx;
|
|
80
|
+
const windowInterval = `${windowMs} milliseconds`;
|
|
81
|
+
const sql = `
|
|
82
|
+
INSERT INTO ${table} (prefix, key, count, window_start, expires_at)
|
|
83
|
+
VALUES ($1, $2, $3, $4::timestamptz, $4::timestamptz + $5::interval)
|
|
84
|
+
ON CONFLICT (prefix, key) DO UPDATE
|
|
85
|
+
SET count = CASE
|
|
86
|
+
WHEN ${table}.expires_at < $4::timestamptz THEN $3
|
|
87
|
+
ELSE ${table}.count + $3
|
|
88
|
+
END,
|
|
89
|
+
window_start = CASE
|
|
90
|
+
WHEN ${table}.expires_at < $4::timestamptz THEN $4::timestamptz
|
|
91
|
+
ELSE ${table}.window_start
|
|
92
|
+
END,
|
|
93
|
+
expires_at = CASE
|
|
94
|
+
WHEN ${table}.expires_at < $4::timestamptz THEN $4::timestamptz + $5::interval
|
|
95
|
+
ELSE ${table}.expires_at
|
|
96
|
+
END
|
|
97
|
+
RETURNING count, expires_at
|
|
98
|
+
`;
|
|
99
|
+
const params = [prefix, key, rate, now, windowInterval];
|
|
100
|
+
if (debug) {
|
|
101
|
+
console.debug("pg-ratelimit fixed-window:", sql, params);
|
|
102
|
+
}
|
|
103
|
+
const result = await pool.query(sql, params);
|
|
104
|
+
const row = result.rows[0];
|
|
105
|
+
const count = Number(row.count);
|
|
106
|
+
return {
|
|
107
|
+
success: count <= tokens,
|
|
108
|
+
limit: tokens,
|
|
109
|
+
remaining: Math.max(0, tokens - count),
|
|
110
|
+
reset: new Date(row.expires_at).getTime()
|
|
111
|
+
};
|
|
112
|
+
}
|
|
113
|
+
|
|
114
|
+
// src/algorithms/sliding-window.ts
|
|
115
|
+
async function slidingWindow(ctx, tokens, windowMs) {
|
|
116
|
+
const { pool, table, prefix, key, rate, now, debug, synchronousCommit } = ctx;
|
|
117
|
+
const client = await pool.connect();
|
|
118
|
+
try {
|
|
119
|
+
await client.query("BEGIN");
|
|
120
|
+
if (!synchronousCommit) {
|
|
121
|
+
await client.query("SET LOCAL synchronous_commit = off");
|
|
122
|
+
}
|
|
123
|
+
const doubleWindowInterval = `${2 * windowMs} milliseconds`;
|
|
124
|
+
const ensureSql = `
|
|
125
|
+
INSERT INTO ${table} (prefix, key, count, prev_count, window_start, expires_at)
|
|
126
|
+
VALUES ($1, $2, 0, 0, $3::timestamptz, $3::timestamptz + $4::interval)
|
|
127
|
+
ON CONFLICT (prefix, key) DO NOTHING
|
|
128
|
+
`;
|
|
129
|
+
await client.query(ensureSql, [prefix, key, now, doubleWindowInterval]);
|
|
130
|
+
const selectSql = `
|
|
131
|
+
SELECT count, prev_count, window_start, expires_at
|
|
132
|
+
FROM ${table}
|
|
133
|
+
WHERE prefix = $1 AND key = $2
|
|
134
|
+
FOR UPDATE
|
|
135
|
+
`;
|
|
136
|
+
if (debug) {
|
|
137
|
+
console.debug("pg-ratelimit sliding-window SELECT:", selectSql, [prefix, key]);
|
|
138
|
+
}
|
|
139
|
+
const existing = await client.query(selectSql, [prefix, key]);
|
|
140
|
+
const row = existing.rows[0];
|
|
141
|
+
const oldWindowStart = new Date(row.window_start).getTime();
|
|
142
|
+
const nowMs = now.getTime();
|
|
143
|
+
let prevCount;
|
|
144
|
+
let count;
|
|
145
|
+
let windowStart;
|
|
146
|
+
if (oldWindowStart + windowMs > nowMs) {
|
|
147
|
+
prevCount = Number(row.prev_count) || 0;
|
|
148
|
+
count = Number(row.count) || 0;
|
|
149
|
+
windowStart = new Date(row.window_start);
|
|
150
|
+
} else if (oldWindowStart + 2 * windowMs > nowMs) {
|
|
151
|
+
prevCount = Number(row.count) || 0;
|
|
152
|
+
count = 0;
|
|
153
|
+
windowStart = new Date(oldWindowStart + windowMs);
|
|
154
|
+
} else {
|
|
155
|
+
prevCount = 0;
|
|
156
|
+
count = 0;
|
|
157
|
+
windowStart = now;
|
|
158
|
+
}
|
|
159
|
+
const elapsed = now.getTime() - windowStart.getTime();
|
|
160
|
+
const weight = 1 - elapsed / windowMs;
|
|
161
|
+
const effective = prevCount * weight + count + rate;
|
|
162
|
+
const success = effective <= tokens;
|
|
163
|
+
if (success) {
|
|
164
|
+
const newCount = count + rate;
|
|
165
|
+
const upsertSql = `
|
|
166
|
+
INSERT INTO ${table} (prefix, key, count, prev_count, window_start, expires_at)
|
|
167
|
+
VALUES ($1, $2, $3, $4, $5::timestamptz, $5::timestamptz + $6::interval)
|
|
168
|
+
ON CONFLICT (prefix, key) DO UPDATE
|
|
169
|
+
SET count = $3,
|
|
170
|
+
prev_count = $4,
|
|
171
|
+
window_start = $5::timestamptz,
|
|
172
|
+
expires_at = $5::timestamptz + $6::interval
|
|
173
|
+
`;
|
|
174
|
+
const upsertParams = [prefix, key, newCount, prevCount, windowStart, doubleWindowInterval];
|
|
175
|
+
if (debug) {
|
|
176
|
+
console.debug("pg-ratelimit sliding-window UPSERT:", upsertSql, upsertParams);
|
|
177
|
+
}
|
|
178
|
+
await client.query(upsertSql, upsertParams);
|
|
179
|
+
}
|
|
180
|
+
await client.query("COMMIT");
|
|
181
|
+
const reset = windowStart.getTime() + windowMs;
|
|
182
|
+
return {
|
|
183
|
+
success,
|
|
184
|
+
limit: tokens,
|
|
185
|
+
remaining: Math.max(0, tokens - effective),
|
|
186
|
+
reset
|
|
187
|
+
};
|
|
188
|
+
} catch (err) {
|
|
189
|
+
await client.query("ROLLBACK").catch(() => {
|
|
190
|
+
});
|
|
191
|
+
throw err;
|
|
192
|
+
} finally {
|
|
193
|
+
client.release();
|
|
194
|
+
}
|
|
195
|
+
}
|
|
196
|
+
|
|
197
|
+
// src/algorithms/token-bucket.ts
|
|
198
|
+
async function tokenBucket(ctx, maxTokens, refillRate, intervalMs) {
|
|
199
|
+
const { pool, table, prefix, key, rate, now, debug, synchronousCommit } = ctx;
|
|
200
|
+
const client = await pool.connect();
|
|
201
|
+
const ttlMs = maxTokens / refillRate * intervalMs;
|
|
202
|
+
try {
|
|
203
|
+
await client.query("BEGIN");
|
|
204
|
+
if (!synchronousCommit) {
|
|
205
|
+
await client.query("SET LOCAL synchronous_commit = off");
|
|
206
|
+
}
|
|
207
|
+
const ttlInterval = `${ttlMs} milliseconds`;
|
|
208
|
+
const ensureSql = `
|
|
209
|
+
INSERT INTO ${table} (prefix, key, tokens, last_refill, expires_at)
|
|
210
|
+
VALUES ($1, $2, $3, $4::timestamptz, $4::timestamptz + $5::interval)
|
|
211
|
+
ON CONFLICT (prefix, key) DO NOTHING
|
|
212
|
+
`;
|
|
213
|
+
await client.query(ensureSql, [prefix, key, maxTokens, now, ttlInterval]);
|
|
214
|
+
const selectSql = `
|
|
215
|
+
SELECT tokens, last_refill, expires_at
|
|
216
|
+
FROM ${table}
|
|
217
|
+
WHERE prefix = $1 AND key = $2
|
|
218
|
+
FOR UPDATE
|
|
219
|
+
`;
|
|
220
|
+
if (debug) {
|
|
221
|
+
console.debug("pg-ratelimit token-bucket SELECT:", selectSql, [prefix, key]);
|
|
222
|
+
}
|
|
223
|
+
const existing = await client.query(selectSql, [prefix, key]);
|
|
224
|
+
const row = existing.rows[0];
|
|
225
|
+
const lastRefill = new Date(row.last_refill).getTime();
|
|
226
|
+
const elapsed = now.getTime() - lastRefill;
|
|
227
|
+
const refilled = Math.floor(elapsed / intervalMs) * refillRate;
|
|
228
|
+
const currentTokens = Math.min(Number(row.tokens) + refilled, maxTokens);
|
|
229
|
+
const newTokens = currentTokens - rate;
|
|
230
|
+
const success = newTokens >= 0;
|
|
231
|
+
if (success) {
|
|
232
|
+
const upsertSql = `
|
|
233
|
+
INSERT INTO ${table} (prefix, key, tokens, last_refill, expires_at)
|
|
234
|
+
VALUES ($1, $2, $3, $4::timestamptz, $4::timestamptz + $5::interval)
|
|
235
|
+
ON CONFLICT (prefix, key) DO UPDATE
|
|
236
|
+
SET tokens = $3, last_refill = $4::timestamptz, expires_at = $4::timestamptz + $5::interval
|
|
237
|
+
`;
|
|
238
|
+
const upsertParams = [prefix, key, newTokens, now, ttlInterval];
|
|
239
|
+
if (debug) {
|
|
240
|
+
console.debug("pg-ratelimit token-bucket UPSERT:", upsertSql, upsertParams);
|
|
241
|
+
}
|
|
242
|
+
await client.query(upsertSql, upsertParams);
|
|
243
|
+
}
|
|
244
|
+
await client.query("COMMIT");
|
|
245
|
+
let reset;
|
|
246
|
+
if (success) {
|
|
247
|
+
reset = now.getTime() + ttlMs;
|
|
248
|
+
} else {
|
|
249
|
+
const tokensNeeded = rate - currentTokens;
|
|
250
|
+
const intervalsNeeded = Math.ceil(tokensNeeded / refillRate);
|
|
251
|
+
reset = now.getTime() + intervalsNeeded * intervalMs;
|
|
252
|
+
}
|
|
253
|
+
return {
|
|
254
|
+
success,
|
|
255
|
+
limit: maxTokens,
|
|
256
|
+
remaining: Math.max(0, success ? newTokens : currentTokens),
|
|
257
|
+
reset
|
|
258
|
+
};
|
|
259
|
+
} catch (err) {
|
|
260
|
+
await client.query("ROLLBACK").catch(() => {
|
|
261
|
+
});
|
|
262
|
+
throw err;
|
|
263
|
+
} finally {
|
|
264
|
+
client.release();
|
|
265
|
+
}
|
|
266
|
+
}
|
|
267
|
+
|
|
268
|
+
// src/limiter.ts
|
|
269
|
+
var Ratelimit = class {
|
|
270
|
+
pool;
|
|
271
|
+
algorithm;
|
|
272
|
+
prefix;
|
|
273
|
+
table;
|
|
274
|
+
debug;
|
|
275
|
+
clock;
|
|
276
|
+
durable;
|
|
277
|
+
synchronousCommit;
|
|
278
|
+
cleanupProbability;
|
|
279
|
+
// Pre-parsed durations (only the relevant one is used per algorithm)
|
|
280
|
+
windowMs;
|
|
281
|
+
intervalMs;
|
|
282
|
+
constructor(config) {
|
|
283
|
+
if (!config.prefix) {
|
|
284
|
+
throw new Error("prefix must be a non-empty string");
|
|
285
|
+
}
|
|
286
|
+
const cleanupProbability = config.cleanupProbability ?? 0.1;
|
|
287
|
+
if (cleanupProbability < 0 || cleanupProbability > 1) {
|
|
288
|
+
throw new Error("cleanupProbability must be between 0 and 1");
|
|
289
|
+
}
|
|
290
|
+
this.pool = config.pool;
|
|
291
|
+
this.algorithm = config.limiter;
|
|
292
|
+
this.prefix = config.prefix;
|
|
293
|
+
this.debug = config.debug ?? false;
|
|
294
|
+
this.clock = config.clock ?? (() => /* @__PURE__ */ new Date());
|
|
295
|
+
this.durable = "durable" in config && config.durable === true;
|
|
296
|
+
this.table = this.durable ? "rate_limit_durable" : "rate_limit_ephemeral";
|
|
297
|
+
this.cleanupProbability = cleanupProbability;
|
|
298
|
+
this.synchronousCommit = !this.durable || "synchronousCommit" in config && config.synchronousCommit === true;
|
|
299
|
+
if (this.algorithm.type === "fixedWindow" || this.algorithm.type === "slidingWindow") {
|
|
300
|
+
this.windowMs = toMs(this.algorithm.window);
|
|
301
|
+
this.intervalMs = 0;
|
|
302
|
+
} else {
|
|
303
|
+
this.windowMs = 0;
|
|
304
|
+
this.intervalMs = toMs(this.algorithm.interval);
|
|
305
|
+
}
|
|
306
|
+
}
|
|
307
|
+
static fixedWindow(tokens, window) {
|
|
308
|
+
return { type: "fixedWindow", tokens, window };
|
|
309
|
+
}
|
|
310
|
+
static slidingWindow(tokens, window) {
|
|
311
|
+
return { type: "slidingWindow", tokens, window };
|
|
312
|
+
}
|
|
313
|
+
static tokenBucket(refillRate, interval, maxTokens) {
|
|
314
|
+
return { type: "tokenBucket", refillRate, interval, maxTokens };
|
|
315
|
+
}
|
|
316
|
+
async limit(key, opts) {
|
|
317
|
+
const rate = opts?.rate ?? 1;
|
|
318
|
+
const now = this.clock();
|
|
319
|
+
await ensureTables(this.pool);
|
|
320
|
+
const ctx = {
|
|
321
|
+
pool: this.pool,
|
|
322
|
+
table: this.table,
|
|
323
|
+
prefix: this.prefix,
|
|
324
|
+
key,
|
|
325
|
+
rate,
|
|
326
|
+
now,
|
|
327
|
+
debug: this.debug,
|
|
328
|
+
synchronousCommit: this.synchronousCommit
|
|
329
|
+
};
|
|
330
|
+
let result;
|
|
331
|
+
switch (this.algorithm.type) {
|
|
332
|
+
case "fixedWindow":
|
|
333
|
+
result = await fixedWindow(ctx, this.algorithm.tokens, this.windowMs);
|
|
334
|
+
break;
|
|
335
|
+
case "slidingWindow":
|
|
336
|
+
result = await slidingWindow(ctx, this.algorithm.tokens, this.windowMs);
|
|
337
|
+
break;
|
|
338
|
+
case "tokenBucket":
|
|
339
|
+
result = await tokenBucket(
|
|
340
|
+
ctx,
|
|
341
|
+
this.algorithm.maxTokens,
|
|
342
|
+
this.algorithm.refillRate,
|
|
343
|
+
this.intervalMs
|
|
344
|
+
);
|
|
345
|
+
break;
|
|
346
|
+
}
|
|
347
|
+
if (Math.random() < this.cleanupProbability) {
|
|
348
|
+
void this.pool.query(`DELETE FROM ${this.table} WHERE prefix = $1 AND expires_at < $2`, [
|
|
349
|
+
this.prefix,
|
|
350
|
+
now
|
|
351
|
+
]).catch(() => {
|
|
352
|
+
});
|
|
353
|
+
}
|
|
354
|
+
return result;
|
|
355
|
+
}
|
|
356
|
+
async blockUntilReady(key, timeout, opts) {
|
|
357
|
+
const timeoutMs = toMs(timeout);
|
|
358
|
+
const deadline = this.clock().getTime() + timeoutMs;
|
|
359
|
+
let result = await this.limit(key, opts);
|
|
360
|
+
if (result.success) {
|
|
361
|
+
return result;
|
|
362
|
+
}
|
|
363
|
+
while (true) {
|
|
364
|
+
const now = this.clock().getTime();
|
|
365
|
+
const remaining = deadline - now;
|
|
366
|
+
if (remaining <= 0) {
|
|
367
|
+
return result;
|
|
368
|
+
}
|
|
369
|
+
const sleepMs = result.reset - now;
|
|
370
|
+
if (sleepMs > remaining) {
|
|
371
|
+
return result;
|
|
372
|
+
}
|
|
373
|
+
if (sleepMs > 0) {
|
|
374
|
+
await new Promise((resolve) => setTimeout(resolve, sleepMs));
|
|
375
|
+
}
|
|
376
|
+
result = await this.limit(key, opts);
|
|
377
|
+
if (result.success) {
|
|
378
|
+
return result;
|
|
379
|
+
}
|
|
380
|
+
}
|
|
381
|
+
}
|
|
382
|
+
async getRemaining(key) {
|
|
383
|
+
await ensureTables(this.pool);
|
|
384
|
+
const now = this.clock();
|
|
385
|
+
const selectSql = `
|
|
386
|
+
SELECT count, prev_count, window_start, expires_at, tokens, last_refill
|
|
387
|
+
FROM ${this.table}
|
|
388
|
+
WHERE prefix = $1 AND key = $2
|
|
389
|
+
`;
|
|
390
|
+
if (this.debug) {
|
|
391
|
+
console.debug("pg-ratelimit getRemaining:", selectSql, [this.prefix, key]);
|
|
392
|
+
}
|
|
393
|
+
const result = await this.pool.query(selectSql, [this.prefix, key]);
|
|
394
|
+
if (result.rows.length === 0) {
|
|
395
|
+
switch (this.algorithm.type) {
|
|
396
|
+
case "fixedWindow":
|
|
397
|
+
return {
|
|
398
|
+
remaining: this.algorithm.tokens,
|
|
399
|
+
reset: now.getTime() + this.windowMs
|
|
400
|
+
};
|
|
401
|
+
case "slidingWindow":
|
|
402
|
+
return {
|
|
403
|
+
remaining: this.algorithm.tokens,
|
|
404
|
+
reset: now.getTime() + this.windowMs
|
|
405
|
+
};
|
|
406
|
+
case "tokenBucket":
|
|
407
|
+
return {
|
|
408
|
+
remaining: this.algorithm.maxTokens,
|
|
409
|
+
reset: now.getTime() + this.algorithm.maxTokens / this.algorithm.refillRate * this.intervalMs
|
|
410
|
+
};
|
|
411
|
+
}
|
|
412
|
+
}
|
|
413
|
+
const row = result.rows[0];
|
|
414
|
+
switch (this.algorithm.type) {
|
|
415
|
+
case "fixedWindow": {
|
|
416
|
+
if (new Date(row.expires_at).getTime() < now.getTime()) {
|
|
417
|
+
return {
|
|
418
|
+
remaining: this.algorithm.tokens,
|
|
419
|
+
reset: now.getTime() + this.windowMs
|
|
420
|
+
};
|
|
421
|
+
}
|
|
422
|
+
const count = Number(row.count) || 0;
|
|
423
|
+
return {
|
|
424
|
+
remaining: Math.max(0, this.algorithm.tokens - count),
|
|
425
|
+
reset: new Date(row.expires_at).getTime()
|
|
426
|
+
};
|
|
427
|
+
}
|
|
428
|
+
case "slidingWindow": {
|
|
429
|
+
const oldWindowStart = new Date(row.window_start).getTime();
|
|
430
|
+
const nowMs = now.getTime();
|
|
431
|
+
let prevCount;
|
|
432
|
+
let count;
|
|
433
|
+
let windowStart;
|
|
434
|
+
if (oldWindowStart + this.windowMs > nowMs) {
|
|
435
|
+
prevCount = Number(row.prev_count) || 0;
|
|
436
|
+
count = Number(row.count) || 0;
|
|
437
|
+
windowStart = oldWindowStart;
|
|
438
|
+
} else if (oldWindowStart + 2 * this.windowMs > nowMs) {
|
|
439
|
+
prevCount = Number(row.count) || 0;
|
|
440
|
+
count = 0;
|
|
441
|
+
windowStart = oldWindowStart + this.windowMs;
|
|
442
|
+
} else {
|
|
443
|
+
return {
|
|
444
|
+
remaining: this.algorithm.tokens,
|
|
445
|
+
reset: nowMs + this.windowMs
|
|
446
|
+
};
|
|
447
|
+
}
|
|
448
|
+
const elapsed = nowMs - windowStart;
|
|
449
|
+
const weight = 1 - elapsed / this.windowMs;
|
|
450
|
+
const effective = prevCount * weight + count;
|
|
451
|
+
return {
|
|
452
|
+
remaining: Math.max(0, this.algorithm.tokens - effective),
|
|
453
|
+
reset: windowStart + this.windowMs
|
|
454
|
+
};
|
|
455
|
+
}
|
|
456
|
+
case "tokenBucket": {
|
|
457
|
+
const lastRefill = new Date(row.last_refill).getTime();
|
|
458
|
+
const elapsed = now.getTime() - lastRefill;
|
|
459
|
+
const refilled = Math.floor(elapsed / this.intervalMs) * this.algorithm.refillRate;
|
|
460
|
+
const currentTokens = Math.min(Number(row.tokens) + refilled, this.algorithm.maxTokens);
|
|
461
|
+
const ttlMs = this.algorithm.maxTokens / this.algorithm.refillRate * this.intervalMs;
|
|
462
|
+
return {
|
|
463
|
+
remaining: Math.max(0, currentTokens),
|
|
464
|
+
reset: now.getTime() + ttlMs
|
|
465
|
+
};
|
|
466
|
+
}
|
|
467
|
+
}
|
|
468
|
+
}
|
|
469
|
+
async resetUsedTokens(key) {
|
|
470
|
+
await ensureTables(this.pool);
|
|
471
|
+
const sql = `DELETE FROM ${this.table} WHERE prefix = $1 AND key = $2`;
|
|
472
|
+
if (this.debug) {
|
|
473
|
+
console.debug("pg-ratelimit resetUsedTokens:", sql, [this.prefix, key]);
|
|
474
|
+
}
|
|
475
|
+
await this.pool.query(sql, [this.prefix, key]);
|
|
476
|
+
}
|
|
477
|
+
};
|
|
478
|
+
// Annotate the CommonJS export names for ESM import in node:
|
|
479
|
+
0 && (module.exports = {
|
|
480
|
+
Ratelimit,
|
|
481
|
+
TABLE_SQL
|
|
482
|
+
});
|
package/dist/index.d.cts
ADDED
|
@@ -0,0 +1,71 @@
|
|
|
1
|
+
import { Pool } from 'pg';
|
|
2
|
+
|
|
3
|
+
type TimeUnit = "s" | "m" | "h" | "d";
|
|
4
|
+
type Duration = `${number} ${TimeUnit}` | `${number}${TimeUnit}`;
|
|
5
|
+
type Algorithm = {
|
|
6
|
+
type: "fixedWindow";
|
|
7
|
+
tokens: number;
|
|
8
|
+
window: Duration | number;
|
|
9
|
+
} | {
|
|
10
|
+
type: "slidingWindow";
|
|
11
|
+
tokens: number;
|
|
12
|
+
window: Duration | number;
|
|
13
|
+
} | {
|
|
14
|
+
type: "tokenBucket";
|
|
15
|
+
refillRate: number;
|
|
16
|
+
interval: Duration | number;
|
|
17
|
+
maxTokens: number;
|
|
18
|
+
};
|
|
19
|
+
interface LimitResult {
|
|
20
|
+
success: boolean;
|
|
21
|
+
limit: number;
|
|
22
|
+
remaining: number;
|
|
23
|
+
reset: number;
|
|
24
|
+
}
|
|
25
|
+
type Clock = () => Date;
|
|
26
|
+
type RatelimitConfig = {
|
|
27
|
+
pool: Pool;
|
|
28
|
+
limiter: Algorithm;
|
|
29
|
+
prefix: string;
|
|
30
|
+
debug?: boolean;
|
|
31
|
+
clock?: Clock;
|
|
32
|
+
cleanupProbability?: number;
|
|
33
|
+
} & ({
|
|
34
|
+
durable?: false;
|
|
35
|
+
} | {
|
|
36
|
+
durable: true;
|
|
37
|
+
synchronousCommit?: boolean;
|
|
38
|
+
});
|
|
39
|
+
|
|
40
|
+
declare class Ratelimit {
|
|
41
|
+
private readonly pool;
|
|
42
|
+
private readonly algorithm;
|
|
43
|
+
private readonly prefix;
|
|
44
|
+
private readonly table;
|
|
45
|
+
private readonly debug;
|
|
46
|
+
private readonly clock;
|
|
47
|
+
private readonly durable;
|
|
48
|
+
private readonly synchronousCommit;
|
|
49
|
+
private readonly cleanupProbability;
|
|
50
|
+
private readonly windowMs;
|
|
51
|
+
private readonly intervalMs;
|
|
52
|
+
constructor(config: RatelimitConfig);
|
|
53
|
+
static fixedWindow(tokens: number, window: Duration | number): Algorithm;
|
|
54
|
+
static slidingWindow(tokens: number, window: Duration | number): Algorithm;
|
|
55
|
+
static tokenBucket(refillRate: number, interval: Duration | number, maxTokens: number): Algorithm;
|
|
56
|
+
limit(key: string, opts?: {
|
|
57
|
+
rate?: number;
|
|
58
|
+
}): Promise<LimitResult>;
|
|
59
|
+
blockUntilReady(key: string, timeout: Duration | number, opts?: {
|
|
60
|
+
rate?: number;
|
|
61
|
+
}): Promise<LimitResult>;
|
|
62
|
+
getRemaining(key: string): Promise<{
|
|
63
|
+
remaining: number;
|
|
64
|
+
reset: number;
|
|
65
|
+
}>;
|
|
66
|
+
resetUsedTokens(key: string): Promise<void>;
|
|
67
|
+
}
|
|
68
|
+
|
|
69
|
+
declare const TABLE_SQL: string;
|
|
70
|
+
|
|
71
|
+
export { type Algorithm, type Clock, type Duration, type LimitResult, Ratelimit, type RatelimitConfig, TABLE_SQL };
|
package/dist/index.d.ts
ADDED
|
@@ -0,0 +1,71 @@
|
|
|
1
|
+
import { Pool } from 'pg';
|
|
2
|
+
|
|
3
|
+
type TimeUnit = "s" | "m" | "h" | "d";
|
|
4
|
+
type Duration = `${number} ${TimeUnit}` | `${number}${TimeUnit}`;
|
|
5
|
+
type Algorithm = {
|
|
6
|
+
type: "fixedWindow";
|
|
7
|
+
tokens: number;
|
|
8
|
+
window: Duration | number;
|
|
9
|
+
} | {
|
|
10
|
+
type: "slidingWindow";
|
|
11
|
+
tokens: number;
|
|
12
|
+
window: Duration | number;
|
|
13
|
+
} | {
|
|
14
|
+
type: "tokenBucket";
|
|
15
|
+
refillRate: number;
|
|
16
|
+
interval: Duration | number;
|
|
17
|
+
maxTokens: number;
|
|
18
|
+
};
|
|
19
|
+
interface LimitResult {
|
|
20
|
+
success: boolean;
|
|
21
|
+
limit: number;
|
|
22
|
+
remaining: number;
|
|
23
|
+
reset: number;
|
|
24
|
+
}
|
|
25
|
+
type Clock = () => Date;
|
|
26
|
+
type RatelimitConfig = {
|
|
27
|
+
pool: Pool;
|
|
28
|
+
limiter: Algorithm;
|
|
29
|
+
prefix: string;
|
|
30
|
+
debug?: boolean;
|
|
31
|
+
clock?: Clock;
|
|
32
|
+
cleanupProbability?: number;
|
|
33
|
+
} & ({
|
|
34
|
+
durable?: false;
|
|
35
|
+
} | {
|
|
36
|
+
durable: true;
|
|
37
|
+
synchronousCommit?: boolean;
|
|
38
|
+
});
|
|
39
|
+
|
|
40
|
+
declare class Ratelimit {
|
|
41
|
+
private readonly pool;
|
|
42
|
+
private readonly algorithm;
|
|
43
|
+
private readonly prefix;
|
|
44
|
+
private readonly table;
|
|
45
|
+
private readonly debug;
|
|
46
|
+
private readonly clock;
|
|
47
|
+
private readonly durable;
|
|
48
|
+
private readonly synchronousCommit;
|
|
49
|
+
private readonly cleanupProbability;
|
|
50
|
+
private readonly windowMs;
|
|
51
|
+
private readonly intervalMs;
|
|
52
|
+
constructor(config: RatelimitConfig);
|
|
53
|
+
static fixedWindow(tokens: number, window: Duration | number): Algorithm;
|
|
54
|
+
static slidingWindow(tokens: number, window: Duration | number): Algorithm;
|
|
55
|
+
static tokenBucket(refillRate: number, interval: Duration | number, maxTokens: number): Algorithm;
|
|
56
|
+
limit(key: string, opts?: {
|
|
57
|
+
rate?: number;
|
|
58
|
+
}): Promise<LimitResult>;
|
|
59
|
+
blockUntilReady(key: string, timeout: Duration | number, opts?: {
|
|
60
|
+
rate?: number;
|
|
61
|
+
}): Promise<LimitResult>;
|
|
62
|
+
getRemaining(key: string): Promise<{
|
|
63
|
+
remaining: number;
|
|
64
|
+
reset: number;
|
|
65
|
+
}>;
|
|
66
|
+
resetUsedTokens(key: string): Promise<void>;
|
|
67
|
+
}
|
|
68
|
+
|
|
69
|
+
declare const TABLE_SQL: string;
|
|
70
|
+
|
|
71
|
+
export { type Algorithm, type Clock, type Duration, type LimitResult, Ratelimit, type RatelimitConfig, TABLE_SQL };
|
package/dist/index.js
ADDED
|
@@ -0,0 +1,454 @@
|
|
|
1
|
+
// src/duration.ts
|
|
2
|
+
var MULTIPLIERS = {
|
|
3
|
+
s: 1e3,
|
|
4
|
+
m: 6e4,
|
|
5
|
+
h: 36e5,
|
|
6
|
+
d: 864e5
|
|
7
|
+
};
|
|
8
|
+
var DURATION_RE = /^\s*(\d+)\s*(s|m|h|d)\s*$/;
|
|
9
|
+
function toMs(duration) {
|
|
10
|
+
if (typeof duration === "number") {
|
|
11
|
+
if (!Number.isFinite(duration) || duration <= 0) {
|
|
12
|
+
throw new Error(
|
|
13
|
+
`Invalid duration: ${duration}. Must be a positive finite number of milliseconds.`
|
|
14
|
+
);
|
|
15
|
+
}
|
|
16
|
+
return duration;
|
|
17
|
+
}
|
|
18
|
+
const match = DURATION_RE.exec(duration);
|
|
19
|
+
if (!match) {
|
|
20
|
+
throw new Error(
|
|
21
|
+
`Invalid duration format: "${duration}". Expected format: "<number><unit>" where unit is s, m, h, or d.`
|
|
22
|
+
);
|
|
23
|
+
}
|
|
24
|
+
const value = Number(match[1]);
|
|
25
|
+
const unit = match[2];
|
|
26
|
+
if (value <= 0) {
|
|
27
|
+
throw new Error(`Invalid duration: "${duration}". Value must be positive.`);
|
|
28
|
+
}
|
|
29
|
+
return value * MULTIPLIERS[unit];
|
|
30
|
+
}
|
|
31
|
+
|
|
32
|
+
// src/tables.sql
|
|
33
|
+
var tables_default = "CREATE UNLOGGED TABLE IF NOT EXISTS rate_limit_ephemeral (\n prefix TEXT NOT NULL,\n key TEXT NOT NULL,\n count BIGINT,\n prev_count BIGINT,\n window_start TIMESTAMPTZ,\n tokens DOUBLE PRECISION,\n last_refill TIMESTAMPTZ,\n expires_at TIMESTAMPTZ NOT NULL,\n PRIMARY KEY (prefix, key)\n);\n\nCREATE INDEX IF NOT EXISTS idx_rate_limit_ephemeral_cleanup\n ON rate_limit_ephemeral (prefix, expires_at);\n\nCREATE TABLE IF NOT EXISTS rate_limit_durable (\n prefix TEXT NOT NULL,\n key TEXT NOT NULL,\n count BIGINT,\n prev_count BIGINT,\n window_start TIMESTAMPTZ,\n tokens DOUBLE PRECISION,\n last_refill TIMESTAMPTZ,\n expires_at TIMESTAMPTZ NOT NULL,\n PRIMARY KEY (prefix, key)\n);\n\nCREATE INDEX IF NOT EXISTS idx_rate_limit_durable_cleanup\n ON rate_limit_durable (prefix, expires_at);\n";
|
|
34
|
+
|
|
35
|
+
// src/tables.ts
|
|
36
|
+
var TABLE_SQL = tables_default;
|
|
37
|
+
var initialized = /* @__PURE__ */ new WeakSet();
|
|
38
|
+
async function ensureTables(pool) {
|
|
39
|
+
if (initialized.has(pool)) {
|
|
40
|
+
return;
|
|
41
|
+
}
|
|
42
|
+
if (process.env.PG_RATELIMIT_DISABLE_AUTO_MIGRATE === "true") {
|
|
43
|
+
initialized.add(pool);
|
|
44
|
+
return;
|
|
45
|
+
}
|
|
46
|
+
await pool.query(TABLE_SQL);
|
|
47
|
+
initialized.add(pool);
|
|
48
|
+
}
|
|
49
|
+
|
|
50
|
+
// src/algorithms/fixed-window.ts
|
|
51
|
+
async function fixedWindow(ctx, tokens, windowMs) {
|
|
52
|
+
const { pool, table, prefix, key, rate, now, debug } = ctx;
|
|
53
|
+
const windowInterval = `${windowMs} milliseconds`;
|
|
54
|
+
const sql = `
|
|
55
|
+
INSERT INTO ${table} (prefix, key, count, window_start, expires_at)
|
|
56
|
+
VALUES ($1, $2, $3, $4::timestamptz, $4::timestamptz + $5::interval)
|
|
57
|
+
ON CONFLICT (prefix, key) DO UPDATE
|
|
58
|
+
SET count = CASE
|
|
59
|
+
WHEN ${table}.expires_at < $4::timestamptz THEN $3
|
|
60
|
+
ELSE ${table}.count + $3
|
|
61
|
+
END,
|
|
62
|
+
window_start = CASE
|
|
63
|
+
WHEN ${table}.expires_at < $4::timestamptz THEN $4::timestamptz
|
|
64
|
+
ELSE ${table}.window_start
|
|
65
|
+
END,
|
|
66
|
+
expires_at = CASE
|
|
67
|
+
WHEN ${table}.expires_at < $4::timestamptz THEN $4::timestamptz + $5::interval
|
|
68
|
+
ELSE ${table}.expires_at
|
|
69
|
+
END
|
|
70
|
+
RETURNING count, expires_at
|
|
71
|
+
`;
|
|
72
|
+
const params = [prefix, key, rate, now, windowInterval];
|
|
73
|
+
if (debug) {
|
|
74
|
+
console.debug("pg-ratelimit fixed-window:", sql, params);
|
|
75
|
+
}
|
|
76
|
+
const result = await pool.query(sql, params);
|
|
77
|
+
const row = result.rows[0];
|
|
78
|
+
const count = Number(row.count);
|
|
79
|
+
return {
|
|
80
|
+
success: count <= tokens,
|
|
81
|
+
limit: tokens,
|
|
82
|
+
remaining: Math.max(0, tokens - count),
|
|
83
|
+
reset: new Date(row.expires_at).getTime()
|
|
84
|
+
};
|
|
85
|
+
}
|
|
86
|
+
|
|
87
|
+
// src/algorithms/sliding-window.ts
|
|
88
|
+
async function slidingWindow(ctx, tokens, windowMs) {
|
|
89
|
+
const { pool, table, prefix, key, rate, now, debug, synchronousCommit } = ctx;
|
|
90
|
+
const client = await pool.connect();
|
|
91
|
+
try {
|
|
92
|
+
await client.query("BEGIN");
|
|
93
|
+
if (!synchronousCommit) {
|
|
94
|
+
await client.query("SET LOCAL synchronous_commit = off");
|
|
95
|
+
}
|
|
96
|
+
const doubleWindowInterval = `${2 * windowMs} milliseconds`;
|
|
97
|
+
const ensureSql = `
|
|
98
|
+
INSERT INTO ${table} (prefix, key, count, prev_count, window_start, expires_at)
|
|
99
|
+
VALUES ($1, $2, 0, 0, $3::timestamptz, $3::timestamptz + $4::interval)
|
|
100
|
+
ON CONFLICT (prefix, key) DO NOTHING
|
|
101
|
+
`;
|
|
102
|
+
await client.query(ensureSql, [prefix, key, now, doubleWindowInterval]);
|
|
103
|
+
const selectSql = `
|
|
104
|
+
SELECT count, prev_count, window_start, expires_at
|
|
105
|
+
FROM ${table}
|
|
106
|
+
WHERE prefix = $1 AND key = $2
|
|
107
|
+
FOR UPDATE
|
|
108
|
+
`;
|
|
109
|
+
if (debug) {
|
|
110
|
+
console.debug("pg-ratelimit sliding-window SELECT:", selectSql, [prefix, key]);
|
|
111
|
+
}
|
|
112
|
+
const existing = await client.query(selectSql, [prefix, key]);
|
|
113
|
+
const row = existing.rows[0];
|
|
114
|
+
const oldWindowStart = new Date(row.window_start).getTime();
|
|
115
|
+
const nowMs = now.getTime();
|
|
116
|
+
let prevCount;
|
|
117
|
+
let count;
|
|
118
|
+
let windowStart;
|
|
119
|
+
if (oldWindowStart + windowMs > nowMs) {
|
|
120
|
+
prevCount = Number(row.prev_count) || 0;
|
|
121
|
+
count = Number(row.count) || 0;
|
|
122
|
+
windowStart = new Date(row.window_start);
|
|
123
|
+
} else if (oldWindowStart + 2 * windowMs > nowMs) {
|
|
124
|
+
prevCount = Number(row.count) || 0;
|
|
125
|
+
count = 0;
|
|
126
|
+
windowStart = new Date(oldWindowStart + windowMs);
|
|
127
|
+
} else {
|
|
128
|
+
prevCount = 0;
|
|
129
|
+
count = 0;
|
|
130
|
+
windowStart = now;
|
|
131
|
+
}
|
|
132
|
+
const elapsed = now.getTime() - windowStart.getTime();
|
|
133
|
+
const weight = 1 - elapsed / windowMs;
|
|
134
|
+
const effective = prevCount * weight + count + rate;
|
|
135
|
+
const success = effective <= tokens;
|
|
136
|
+
if (success) {
|
|
137
|
+
const newCount = count + rate;
|
|
138
|
+
const upsertSql = `
|
|
139
|
+
INSERT INTO ${table} (prefix, key, count, prev_count, window_start, expires_at)
|
|
140
|
+
VALUES ($1, $2, $3, $4, $5::timestamptz, $5::timestamptz + $6::interval)
|
|
141
|
+
ON CONFLICT (prefix, key) DO UPDATE
|
|
142
|
+
SET count = $3,
|
|
143
|
+
prev_count = $4,
|
|
144
|
+
window_start = $5::timestamptz,
|
|
145
|
+
expires_at = $5::timestamptz + $6::interval
|
|
146
|
+
`;
|
|
147
|
+
const upsertParams = [prefix, key, newCount, prevCount, windowStart, doubleWindowInterval];
|
|
148
|
+
if (debug) {
|
|
149
|
+
console.debug("pg-ratelimit sliding-window UPSERT:", upsertSql, upsertParams);
|
|
150
|
+
}
|
|
151
|
+
await client.query(upsertSql, upsertParams);
|
|
152
|
+
}
|
|
153
|
+
await client.query("COMMIT");
|
|
154
|
+
const reset = windowStart.getTime() + windowMs;
|
|
155
|
+
return {
|
|
156
|
+
success,
|
|
157
|
+
limit: tokens,
|
|
158
|
+
remaining: Math.max(0, tokens - effective),
|
|
159
|
+
reset
|
|
160
|
+
};
|
|
161
|
+
} catch (err) {
|
|
162
|
+
await client.query("ROLLBACK").catch(() => {
|
|
163
|
+
});
|
|
164
|
+
throw err;
|
|
165
|
+
} finally {
|
|
166
|
+
client.release();
|
|
167
|
+
}
|
|
168
|
+
}
|
|
169
|
+
|
|
170
|
+
// src/algorithms/token-bucket.ts
|
|
171
|
+
async function tokenBucket(ctx, maxTokens, refillRate, intervalMs) {
|
|
172
|
+
const { pool, table, prefix, key, rate, now, debug, synchronousCommit } = ctx;
|
|
173
|
+
const client = await pool.connect();
|
|
174
|
+
const ttlMs = maxTokens / refillRate * intervalMs;
|
|
175
|
+
try {
|
|
176
|
+
await client.query("BEGIN");
|
|
177
|
+
if (!synchronousCommit) {
|
|
178
|
+
await client.query("SET LOCAL synchronous_commit = off");
|
|
179
|
+
}
|
|
180
|
+
const ttlInterval = `${ttlMs} milliseconds`;
|
|
181
|
+
const ensureSql = `
|
|
182
|
+
INSERT INTO ${table} (prefix, key, tokens, last_refill, expires_at)
|
|
183
|
+
VALUES ($1, $2, $3, $4::timestamptz, $4::timestamptz + $5::interval)
|
|
184
|
+
ON CONFLICT (prefix, key) DO NOTHING
|
|
185
|
+
`;
|
|
186
|
+
await client.query(ensureSql, [prefix, key, maxTokens, now, ttlInterval]);
|
|
187
|
+
const selectSql = `
|
|
188
|
+
SELECT tokens, last_refill, expires_at
|
|
189
|
+
FROM ${table}
|
|
190
|
+
WHERE prefix = $1 AND key = $2
|
|
191
|
+
FOR UPDATE
|
|
192
|
+
`;
|
|
193
|
+
if (debug) {
|
|
194
|
+
console.debug("pg-ratelimit token-bucket SELECT:", selectSql, [prefix, key]);
|
|
195
|
+
}
|
|
196
|
+
const existing = await client.query(selectSql, [prefix, key]);
|
|
197
|
+
const row = existing.rows[0];
|
|
198
|
+
const lastRefill = new Date(row.last_refill).getTime();
|
|
199
|
+
const elapsed = now.getTime() - lastRefill;
|
|
200
|
+
const refilled = Math.floor(elapsed / intervalMs) * refillRate;
|
|
201
|
+
const currentTokens = Math.min(Number(row.tokens) + refilled, maxTokens);
|
|
202
|
+
const newTokens = currentTokens - rate;
|
|
203
|
+
const success = newTokens >= 0;
|
|
204
|
+
if (success) {
|
|
205
|
+
const upsertSql = `
|
|
206
|
+
INSERT INTO ${table} (prefix, key, tokens, last_refill, expires_at)
|
|
207
|
+
VALUES ($1, $2, $3, $4::timestamptz, $4::timestamptz + $5::interval)
|
|
208
|
+
ON CONFLICT (prefix, key) DO UPDATE
|
|
209
|
+
SET tokens = $3, last_refill = $4::timestamptz, expires_at = $4::timestamptz + $5::interval
|
|
210
|
+
`;
|
|
211
|
+
const upsertParams = [prefix, key, newTokens, now, ttlInterval];
|
|
212
|
+
if (debug) {
|
|
213
|
+
console.debug("pg-ratelimit token-bucket UPSERT:", upsertSql, upsertParams);
|
|
214
|
+
}
|
|
215
|
+
await client.query(upsertSql, upsertParams);
|
|
216
|
+
}
|
|
217
|
+
await client.query("COMMIT");
|
|
218
|
+
let reset;
|
|
219
|
+
if (success) {
|
|
220
|
+
reset = now.getTime() + ttlMs;
|
|
221
|
+
} else {
|
|
222
|
+
const tokensNeeded = rate - currentTokens;
|
|
223
|
+
const intervalsNeeded = Math.ceil(tokensNeeded / refillRate);
|
|
224
|
+
reset = now.getTime() + intervalsNeeded * intervalMs;
|
|
225
|
+
}
|
|
226
|
+
return {
|
|
227
|
+
success,
|
|
228
|
+
limit: maxTokens,
|
|
229
|
+
remaining: Math.max(0, success ? newTokens : currentTokens),
|
|
230
|
+
reset
|
|
231
|
+
};
|
|
232
|
+
} catch (err) {
|
|
233
|
+
await client.query("ROLLBACK").catch(() => {
|
|
234
|
+
});
|
|
235
|
+
throw err;
|
|
236
|
+
} finally {
|
|
237
|
+
client.release();
|
|
238
|
+
}
|
|
239
|
+
}
|
|
240
|
+
|
|
241
|
+
// src/limiter.ts
|
|
242
|
+
var Ratelimit = class {
|
|
243
|
+
pool;
|
|
244
|
+
algorithm;
|
|
245
|
+
prefix;
|
|
246
|
+
table;
|
|
247
|
+
debug;
|
|
248
|
+
clock;
|
|
249
|
+
durable;
|
|
250
|
+
synchronousCommit;
|
|
251
|
+
cleanupProbability;
|
|
252
|
+
// Pre-parsed durations (only the relevant one is used per algorithm)
|
|
253
|
+
windowMs;
|
|
254
|
+
intervalMs;
|
|
255
|
+
constructor(config) {
|
|
256
|
+
if (!config.prefix) {
|
|
257
|
+
throw new Error("prefix must be a non-empty string");
|
|
258
|
+
}
|
|
259
|
+
const cleanupProbability = config.cleanupProbability ?? 0.1;
|
|
260
|
+
if (cleanupProbability < 0 || cleanupProbability > 1) {
|
|
261
|
+
throw new Error("cleanupProbability must be between 0 and 1");
|
|
262
|
+
}
|
|
263
|
+
this.pool = config.pool;
|
|
264
|
+
this.algorithm = config.limiter;
|
|
265
|
+
this.prefix = config.prefix;
|
|
266
|
+
this.debug = config.debug ?? false;
|
|
267
|
+
this.clock = config.clock ?? (() => /* @__PURE__ */ new Date());
|
|
268
|
+
this.durable = "durable" in config && config.durable === true;
|
|
269
|
+
this.table = this.durable ? "rate_limit_durable" : "rate_limit_ephemeral";
|
|
270
|
+
this.cleanupProbability = cleanupProbability;
|
|
271
|
+
this.synchronousCommit = !this.durable || "synchronousCommit" in config && config.synchronousCommit === true;
|
|
272
|
+
if (this.algorithm.type === "fixedWindow" || this.algorithm.type === "slidingWindow") {
|
|
273
|
+
this.windowMs = toMs(this.algorithm.window);
|
|
274
|
+
this.intervalMs = 0;
|
|
275
|
+
} else {
|
|
276
|
+
this.windowMs = 0;
|
|
277
|
+
this.intervalMs = toMs(this.algorithm.interval);
|
|
278
|
+
}
|
|
279
|
+
}
|
|
280
|
+
static fixedWindow(tokens, window) {
|
|
281
|
+
return { type: "fixedWindow", tokens, window };
|
|
282
|
+
}
|
|
283
|
+
static slidingWindow(tokens, window) {
|
|
284
|
+
return { type: "slidingWindow", tokens, window };
|
|
285
|
+
}
|
|
286
|
+
static tokenBucket(refillRate, interval, maxTokens) {
|
|
287
|
+
return { type: "tokenBucket", refillRate, interval, maxTokens };
|
|
288
|
+
}
|
|
289
|
+
async limit(key, opts) {
|
|
290
|
+
const rate = opts?.rate ?? 1;
|
|
291
|
+
const now = this.clock();
|
|
292
|
+
await ensureTables(this.pool);
|
|
293
|
+
const ctx = {
|
|
294
|
+
pool: this.pool,
|
|
295
|
+
table: this.table,
|
|
296
|
+
prefix: this.prefix,
|
|
297
|
+
key,
|
|
298
|
+
rate,
|
|
299
|
+
now,
|
|
300
|
+
debug: this.debug,
|
|
301
|
+
synchronousCommit: this.synchronousCommit
|
|
302
|
+
};
|
|
303
|
+
let result;
|
|
304
|
+
switch (this.algorithm.type) {
|
|
305
|
+
case "fixedWindow":
|
|
306
|
+
result = await fixedWindow(ctx, this.algorithm.tokens, this.windowMs);
|
|
307
|
+
break;
|
|
308
|
+
case "slidingWindow":
|
|
309
|
+
result = await slidingWindow(ctx, this.algorithm.tokens, this.windowMs);
|
|
310
|
+
break;
|
|
311
|
+
case "tokenBucket":
|
|
312
|
+
result = await tokenBucket(
|
|
313
|
+
ctx,
|
|
314
|
+
this.algorithm.maxTokens,
|
|
315
|
+
this.algorithm.refillRate,
|
|
316
|
+
this.intervalMs
|
|
317
|
+
);
|
|
318
|
+
break;
|
|
319
|
+
}
|
|
320
|
+
if (Math.random() < this.cleanupProbability) {
|
|
321
|
+
void this.pool.query(`DELETE FROM ${this.table} WHERE prefix = $1 AND expires_at < $2`, [
|
|
322
|
+
this.prefix,
|
|
323
|
+
now
|
|
324
|
+
]).catch(() => {
|
|
325
|
+
});
|
|
326
|
+
}
|
|
327
|
+
return result;
|
|
328
|
+
}
|
|
329
|
+
async blockUntilReady(key, timeout, opts) {
|
|
330
|
+
const timeoutMs = toMs(timeout);
|
|
331
|
+
const deadline = this.clock().getTime() + timeoutMs;
|
|
332
|
+
let result = await this.limit(key, opts);
|
|
333
|
+
if (result.success) {
|
|
334
|
+
return result;
|
|
335
|
+
}
|
|
336
|
+
while (true) {
|
|
337
|
+
const now = this.clock().getTime();
|
|
338
|
+
const remaining = deadline - now;
|
|
339
|
+
if (remaining <= 0) {
|
|
340
|
+
return result;
|
|
341
|
+
}
|
|
342
|
+
const sleepMs = result.reset - now;
|
|
343
|
+
if (sleepMs > remaining) {
|
|
344
|
+
return result;
|
|
345
|
+
}
|
|
346
|
+
if (sleepMs > 0) {
|
|
347
|
+
await new Promise((resolve) => setTimeout(resolve, sleepMs));
|
|
348
|
+
}
|
|
349
|
+
result = await this.limit(key, opts);
|
|
350
|
+
if (result.success) {
|
|
351
|
+
return result;
|
|
352
|
+
}
|
|
353
|
+
}
|
|
354
|
+
}
|
|
355
|
+
async getRemaining(key) {
|
|
356
|
+
await ensureTables(this.pool);
|
|
357
|
+
const now = this.clock();
|
|
358
|
+
const selectSql = `
|
|
359
|
+
SELECT count, prev_count, window_start, expires_at, tokens, last_refill
|
|
360
|
+
FROM ${this.table}
|
|
361
|
+
WHERE prefix = $1 AND key = $2
|
|
362
|
+
`;
|
|
363
|
+
if (this.debug) {
|
|
364
|
+
console.debug("pg-ratelimit getRemaining:", selectSql, [this.prefix, key]);
|
|
365
|
+
}
|
|
366
|
+
const result = await this.pool.query(selectSql, [this.prefix, key]);
|
|
367
|
+
if (result.rows.length === 0) {
|
|
368
|
+
switch (this.algorithm.type) {
|
|
369
|
+
case "fixedWindow":
|
|
370
|
+
return {
|
|
371
|
+
remaining: this.algorithm.tokens,
|
|
372
|
+
reset: now.getTime() + this.windowMs
|
|
373
|
+
};
|
|
374
|
+
case "slidingWindow":
|
|
375
|
+
return {
|
|
376
|
+
remaining: this.algorithm.tokens,
|
|
377
|
+
reset: now.getTime() + this.windowMs
|
|
378
|
+
};
|
|
379
|
+
case "tokenBucket":
|
|
380
|
+
return {
|
|
381
|
+
remaining: this.algorithm.maxTokens,
|
|
382
|
+
reset: now.getTime() + this.algorithm.maxTokens / this.algorithm.refillRate * this.intervalMs
|
|
383
|
+
};
|
|
384
|
+
}
|
|
385
|
+
}
|
|
386
|
+
const row = result.rows[0];
|
|
387
|
+
switch (this.algorithm.type) {
|
|
388
|
+
case "fixedWindow": {
|
|
389
|
+
if (new Date(row.expires_at).getTime() < now.getTime()) {
|
|
390
|
+
return {
|
|
391
|
+
remaining: this.algorithm.tokens,
|
|
392
|
+
reset: now.getTime() + this.windowMs
|
|
393
|
+
};
|
|
394
|
+
}
|
|
395
|
+
const count = Number(row.count) || 0;
|
|
396
|
+
return {
|
|
397
|
+
remaining: Math.max(0, this.algorithm.tokens - count),
|
|
398
|
+
reset: new Date(row.expires_at).getTime()
|
|
399
|
+
};
|
|
400
|
+
}
|
|
401
|
+
case "slidingWindow": {
|
|
402
|
+
const oldWindowStart = new Date(row.window_start).getTime();
|
|
403
|
+
const nowMs = now.getTime();
|
|
404
|
+
let prevCount;
|
|
405
|
+
let count;
|
|
406
|
+
let windowStart;
|
|
407
|
+
if (oldWindowStart + this.windowMs > nowMs) {
|
|
408
|
+
prevCount = Number(row.prev_count) || 0;
|
|
409
|
+
count = Number(row.count) || 0;
|
|
410
|
+
windowStart = oldWindowStart;
|
|
411
|
+
} else if (oldWindowStart + 2 * this.windowMs > nowMs) {
|
|
412
|
+
prevCount = Number(row.count) || 0;
|
|
413
|
+
count = 0;
|
|
414
|
+
windowStart = oldWindowStart + this.windowMs;
|
|
415
|
+
} else {
|
|
416
|
+
return {
|
|
417
|
+
remaining: this.algorithm.tokens,
|
|
418
|
+
reset: nowMs + this.windowMs
|
|
419
|
+
};
|
|
420
|
+
}
|
|
421
|
+
const elapsed = nowMs - windowStart;
|
|
422
|
+
const weight = 1 - elapsed / this.windowMs;
|
|
423
|
+
const effective = prevCount * weight + count;
|
|
424
|
+
return {
|
|
425
|
+
remaining: Math.max(0, this.algorithm.tokens - effective),
|
|
426
|
+
reset: windowStart + this.windowMs
|
|
427
|
+
};
|
|
428
|
+
}
|
|
429
|
+
case "tokenBucket": {
|
|
430
|
+
const lastRefill = new Date(row.last_refill).getTime();
|
|
431
|
+
const elapsed = now.getTime() - lastRefill;
|
|
432
|
+
const refilled = Math.floor(elapsed / this.intervalMs) * this.algorithm.refillRate;
|
|
433
|
+
const currentTokens = Math.min(Number(row.tokens) + refilled, this.algorithm.maxTokens);
|
|
434
|
+
const ttlMs = this.algorithm.maxTokens / this.algorithm.refillRate * this.intervalMs;
|
|
435
|
+
return {
|
|
436
|
+
remaining: Math.max(0, currentTokens),
|
|
437
|
+
reset: now.getTime() + ttlMs
|
|
438
|
+
};
|
|
439
|
+
}
|
|
440
|
+
}
|
|
441
|
+
}
|
|
442
|
+
async resetUsedTokens(key) {
|
|
443
|
+
await ensureTables(this.pool);
|
|
444
|
+
const sql = `DELETE FROM ${this.table} WHERE prefix = $1 AND key = $2`;
|
|
445
|
+
if (this.debug) {
|
|
446
|
+
console.debug("pg-ratelimit resetUsedTokens:", sql, [this.prefix, key]);
|
|
447
|
+
}
|
|
448
|
+
await this.pool.query(sql, [this.prefix, key]);
|
|
449
|
+
}
|
|
450
|
+
};
|
|
451
|
+
export {
|
|
452
|
+
Ratelimit,
|
|
453
|
+
TABLE_SQL
|
|
454
|
+
};
|
package/package.json
ADDED
|
@@ -0,0 +1,61 @@
|
|
|
1
|
+
{
|
|
2
|
+
"name": "pg-ratelimit",
|
|
3
|
+
"version": "0.1.0",
|
|
4
|
+
"description": "PostgreSQL-backed rate limiting for Node.js",
|
|
5
|
+
"license": "MIT",
|
|
6
|
+
"author": "Max Malm",
|
|
7
|
+
"repository": {
|
|
8
|
+
"type": "git",
|
|
9
|
+
"url": "git+https://github.com/benjick/pg-ratelimit.git",
|
|
10
|
+
"directory": "packages/pg-ratelimit"
|
|
11
|
+
},
|
|
12
|
+
"homepage": "https://benjick.js.org/pg-ratelimit",
|
|
13
|
+
"bugs": {
|
|
14
|
+
"url": "https://github.com/benjick/pg-ratelimit/issues"
|
|
15
|
+
},
|
|
16
|
+
"keywords": [
|
|
17
|
+
"rate-limit",
|
|
18
|
+
"ratelimit",
|
|
19
|
+
"postgres",
|
|
20
|
+
"postgresql",
|
|
21
|
+
"rate-limiting",
|
|
22
|
+
"throttle"
|
|
23
|
+
],
|
|
24
|
+
"engines": {
|
|
25
|
+
"node": ">=18.0.0"
|
|
26
|
+
},
|
|
27
|
+
"files": [
|
|
28
|
+
"dist"
|
|
29
|
+
],
|
|
30
|
+
"type": "module",
|
|
31
|
+
"main": "./dist/index.cjs",
|
|
32
|
+
"module": "./dist/index.js",
|
|
33
|
+
"types": "./dist/index.d.ts",
|
|
34
|
+
"exports": {
|
|
35
|
+
".": {
|
|
36
|
+
"import": {
|
|
37
|
+
"types": "./dist/index.d.ts",
|
|
38
|
+
"default": "./dist/index.js"
|
|
39
|
+
},
|
|
40
|
+
"require": {
|
|
41
|
+
"types": "./dist/index.d.cts",
|
|
42
|
+
"default": "./dist/index.cjs"
|
|
43
|
+
}
|
|
44
|
+
}
|
|
45
|
+
},
|
|
46
|
+
"devDependencies": {
|
|
47
|
+
"@testcontainers/postgresql": "^10.0.0",
|
|
48
|
+
"@types/pg": "^8.0.0",
|
|
49
|
+
"pg": "^8.0.0",
|
|
50
|
+
"tsup": "^8.0.0",
|
|
51
|
+
"typescript": "^5.0.0",
|
|
52
|
+
"vitest": "^3.0.0"
|
|
53
|
+
},
|
|
54
|
+
"peerDependencies": {
|
|
55
|
+
"pg": "^8.0.0"
|
|
56
|
+
},
|
|
57
|
+
"scripts": {
|
|
58
|
+
"build": "tsup",
|
|
59
|
+
"test": "vitest run"
|
|
60
|
+
}
|
|
61
|
+
}
|