@open-mercato/cache 0.3.2 → 0.4.2-canary-c02407ff85
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.test-cache.json +4 -0
- package/build.mjs +61 -0
- package/dist/errors.js +12 -8
- package/dist/errors.js.map +7 -0
- package/dist/index.js +16 -7
- package/dist/index.js.map +7 -0
- package/dist/service.js +240 -287
- package/dist/service.js.map +7 -0
- package/dist/strategies/jsonfile.js +179 -194
- package/dist/strategies/jsonfile.js.map +7 -0
- package/dist/strategies/memory.js +143 -157
- package/dist/strategies/memory.js.map +7 -0
- package/dist/strategies/redis.js +292 -359
- package/dist/strategies/redis.js.map +7 -0
- package/dist/strategies/sqlite.js +164 -191
- package/dist/strategies/sqlite.js.map +7 -0
- package/dist/tenantContext.js +10 -6
- package/dist/tenantContext.js.map +7 -0
- package/dist/types.js +1 -1
- package/dist/types.js.map +7 -0
- package/jest.config.cjs +19 -0
- package/package.json +40 -12
- package/src/__tests__/memory.strategy.test.ts +15 -7
- package/tsconfig.build.json +9 -1
- package/tsconfig.json +4 -7
- package/watch.mjs +6 -0
- package/dist/errors.d.ts +0 -7
- package/dist/errors.d.ts.map +0 -1
- package/dist/index.d.ts +0 -8
- package/dist/index.d.ts.map +0 -1
- package/dist/service.d.ts +0 -40
- package/dist/service.d.ts.map +0 -1
- package/dist/strategies/jsonfile.d.ts +0 -10
- package/dist/strategies/jsonfile.d.ts.map +0 -1
- package/dist/strategies/memory.d.ts +0 -9
- package/dist/strategies/memory.d.ts.map +0 -1
- package/dist/strategies/redis.d.ts +0 -5
- package/dist/strategies/redis.d.ts.map +0 -1
- package/dist/strategies/sqlite.d.ts +0 -13
- package/dist/strategies/sqlite.d.ts.map +0 -1
- package/dist/tenantContext.d.ts +0 -4
- package/dist/tenantContext.d.ts.map +0 -1
- package/dist/types.d.ts +0 -86
- package/dist/types.d.ts.map +0 -1
- package/jest.config.js +0 -19
package/dist/strategies/redis.js
CHANGED
|
@@ -1,388 +1,321 @@
|
|
|
1
|
-
import { CacheDependencyUnavailableError } from
|
|
2
|
-
/**
|
|
3
|
-
* Redis cache strategy with tag support
|
|
4
|
-
* Persistent across process restarts, can be shared across multiple instances
|
|
5
|
-
*
|
|
6
|
-
* Uses Redis data structures:
|
|
7
|
-
* - Hash for storing cache entries: cache:{key} -> {value, tags, expiresAt, createdAt}
|
|
8
|
-
* - Sets for tag index: tag:{tag} -> Set of keys
|
|
9
|
-
*/
|
|
1
|
+
import { CacheDependencyUnavailableError } from "../errors.js";
|
|
10
2
|
let redisModulePromise = null;
|
|
11
|
-
const redisRegistry = new Map();
|
|
3
|
+
const redisRegistry = /* @__PURE__ */ new Map();
|
|
12
4
|
function resolveRequire() {
|
|
13
|
-
|
|
14
|
-
|
|
15
|
-
|
|
16
|
-
|
|
17
|
-
|
|
18
|
-
|
|
19
|
-
|
|
20
|
-
|
|
21
|
-
|
|
22
|
-
|
|
23
|
-
|
|
24
|
-
|
|
25
|
-
}
|
|
26
|
-
catch (_a) {
|
|
27
|
-
// ignore
|
|
28
|
-
}
|
|
29
|
-
return null;
|
|
5
|
+
const nonWebpack = globalThis.__non_webpack_require__;
|
|
6
|
+
if (typeof nonWebpack === "function") return nonWebpack;
|
|
7
|
+
if (typeof require === "function") return require;
|
|
8
|
+
if (typeof module !== "undefined" && typeof module.require === "function") {
|
|
9
|
+
return module.require.bind(module);
|
|
10
|
+
}
|
|
11
|
+
try {
|
|
12
|
+
const maybeRequire = Function('return typeof require !== "undefined" ? require : undefined')();
|
|
13
|
+
if (typeof maybeRequire === "function") return maybeRequire;
|
|
14
|
+
} catch {
|
|
15
|
+
}
|
|
16
|
+
return null;
|
|
30
17
|
}
|
|
31
18
|
function loadRedisModuleViaRequire() {
|
|
32
|
-
|
|
33
|
-
|
|
34
|
-
|
|
35
|
-
|
|
36
|
-
|
|
37
|
-
|
|
38
|
-
|
|
39
|
-
return null;
|
|
40
|
-
}
|
|
19
|
+
const resolver = resolveRequire();
|
|
20
|
+
if (!resolver) return null;
|
|
21
|
+
try {
|
|
22
|
+
return resolver("ioredis");
|
|
23
|
+
} catch {
|
|
24
|
+
return null;
|
|
25
|
+
}
|
|
41
26
|
}
|
|
42
27
|
function pickRedisConstructor(mod) {
|
|
43
|
-
|
|
44
|
-
|
|
45
|
-
|
|
46
|
-
|
|
47
|
-
|
|
48
|
-
|
|
49
|
-
|
|
50
|
-
|
|
51
|
-
|
|
52
|
-
|
|
53
|
-
|
|
54
|
-
|
|
55
|
-
queue.push(current.Redis);
|
|
56
|
-
queue.push((_a = current.module) === null || _a === void 0 ? void 0 : _a.exports);
|
|
57
|
-
queue.push(current.exports);
|
|
58
|
-
}
|
|
28
|
+
const queue = [mod];
|
|
29
|
+
const seen = /* @__PURE__ */ new Set();
|
|
30
|
+
while (queue.length) {
|
|
31
|
+
const current = queue.shift();
|
|
32
|
+
if (!current || seen.has(current)) continue;
|
|
33
|
+
seen.add(current);
|
|
34
|
+
if (typeof current === "function") return current;
|
|
35
|
+
if (typeof current === "object") {
|
|
36
|
+
queue.push(current.default);
|
|
37
|
+
queue.push(current.Redis);
|
|
38
|
+
queue.push(current.module?.exports);
|
|
39
|
+
queue.push(current.exports);
|
|
59
40
|
}
|
|
60
|
-
|
|
41
|
+
}
|
|
42
|
+
return null;
|
|
61
43
|
}
|
|
62
44
|
async function loadRedisModule() {
|
|
63
|
-
|
|
64
|
-
|
|
65
|
-
|
|
66
|
-
|
|
67
|
-
|
|
68
|
-
|
|
69
|
-
|
|
70
|
-
|
|
71
|
-
|
|
72
|
-
|
|
73
|
-
return redisModulePromise;
|
|
45
|
+
if (!redisModulePromise) {
|
|
46
|
+
redisModulePromise = (async () => {
|
|
47
|
+
const required = loadRedisModuleViaRequire() ?? await import("ioredis");
|
|
48
|
+
return required;
|
|
49
|
+
})().catch((error) => {
|
|
50
|
+
redisModulePromise = null;
|
|
51
|
+
throw new CacheDependencyUnavailableError("redis", "ioredis", error);
|
|
52
|
+
});
|
|
53
|
+
}
|
|
54
|
+
return redisModulePromise;
|
|
74
55
|
}
|
|
75
56
|
function retainRedisEntry(key) {
|
|
76
|
-
|
|
77
|
-
|
|
78
|
-
|
|
79
|
-
|
|
80
|
-
|
|
81
|
-
|
|
82
|
-
|
|
57
|
+
let entry = redisRegistry.get(key);
|
|
58
|
+
if (!entry) {
|
|
59
|
+
entry = { refs: 0 };
|
|
60
|
+
redisRegistry.set(key, entry);
|
|
61
|
+
}
|
|
62
|
+
entry.refs += 1;
|
|
63
|
+
return entry;
|
|
83
64
|
}
|
|
84
65
|
async function acquireRedisClient(key, entry) {
|
|
85
|
-
|
|
86
|
-
|
|
87
|
-
|
|
88
|
-
|
|
89
|
-
|
|
90
|
-
|
|
91
|
-
|
|
92
|
-
|
|
93
|
-
|
|
94
|
-
|
|
95
|
-
|
|
96
|
-
|
|
97
|
-
|
|
98
|
-
|
|
99
|
-
|
|
100
|
-
|
|
101
|
-
redisRegistry.delete(key);
|
|
102
|
-
}
|
|
103
|
-
else if (redisRegistry.get(key) === entry) {
|
|
104
|
-
entry.client = undefined;
|
|
105
|
-
}
|
|
106
|
-
});
|
|
107
|
-
return client;
|
|
108
|
-
})
|
|
109
|
-
.catch((error) => {
|
|
110
|
-
entry.creating = undefined;
|
|
111
|
-
throw error;
|
|
66
|
+
if (entry.client) return entry.client;
|
|
67
|
+
if (entry.creating) return entry.creating;
|
|
68
|
+
entry.creating = loadRedisModule().then((mod) => {
|
|
69
|
+
const ctor = pickRedisConstructor(mod);
|
|
70
|
+
if (!ctor) {
|
|
71
|
+
throw new CacheDependencyUnavailableError("redis", "ioredis", new Error("No usable Redis constructor"));
|
|
72
|
+
}
|
|
73
|
+
const client = new ctor(key);
|
|
74
|
+
entry.client = client;
|
|
75
|
+
entry.creating = void 0;
|
|
76
|
+
client.once?.("end", () => {
|
|
77
|
+
if (redisRegistry.get(key) === entry && entry.refs === 0) {
|
|
78
|
+
redisRegistry.delete(key);
|
|
79
|
+
} else if (redisRegistry.get(key) === entry) {
|
|
80
|
+
entry.client = void 0;
|
|
81
|
+
}
|
|
112
82
|
});
|
|
113
|
-
return
|
|
83
|
+
return client;
|
|
84
|
+
}).catch((error) => {
|
|
85
|
+
entry.creating = void 0;
|
|
86
|
+
throw error;
|
|
87
|
+
});
|
|
88
|
+
return entry.creating;
|
|
114
89
|
}
|
|
115
90
|
async function releaseRedisEntry(key, entry) {
|
|
116
|
-
|
|
117
|
-
|
|
118
|
-
|
|
119
|
-
|
|
120
|
-
|
|
121
|
-
|
|
122
|
-
|
|
123
|
-
|
|
124
|
-
|
|
125
|
-
|
|
91
|
+
entry.refs = Math.max(0, entry.refs - 1);
|
|
92
|
+
if (entry.refs > 0) return;
|
|
93
|
+
redisRegistry.delete(key);
|
|
94
|
+
if (entry.client) {
|
|
95
|
+
try {
|
|
96
|
+
await entry.client.quit();
|
|
97
|
+
} catch {
|
|
98
|
+
} finally {
|
|
99
|
+
entry.client = void 0;
|
|
100
|
+
}
|
|
101
|
+
}
|
|
102
|
+
}
|
|
103
|
+
function createRedisStrategy(redisUrl, options) {
|
|
104
|
+
const defaultTtl = options?.defaultTtl;
|
|
105
|
+
const keyPrefix = "cache:";
|
|
106
|
+
const tagPrefix = "tag:";
|
|
107
|
+
const connectionUrl = redisUrl || process.env.REDIS_URL || process.env.CACHE_REDIS_URL || "redis://localhost:6379";
|
|
108
|
+
const registryEntry = retainRedisEntry(connectionUrl);
|
|
109
|
+
let redis = registryEntry.client ?? null;
|
|
110
|
+
async function getRedisClient() {
|
|
111
|
+
if (redis) return redis;
|
|
112
|
+
redis = await acquireRedisClient(connectionUrl, registryEntry);
|
|
113
|
+
return redis;
|
|
114
|
+
}
|
|
115
|
+
function getCacheKey(key) {
|
|
116
|
+
return `${keyPrefix}${key}`;
|
|
117
|
+
}
|
|
118
|
+
function getTagKey(tag) {
|
|
119
|
+
return `${tagPrefix}${tag}`;
|
|
120
|
+
}
|
|
121
|
+
function isExpired(entry) {
|
|
122
|
+
if (entry.expiresAt === null) return false;
|
|
123
|
+
return Date.now() > entry.expiresAt;
|
|
124
|
+
}
|
|
125
|
+
function matchPattern(key, pattern) {
|
|
126
|
+
const regexPattern = pattern.replace(/[.+^${}()|[\]\\]/g, "\\$&").replace(/\*/g, ".*").replace(/\?/g, ".");
|
|
127
|
+
const regex = new RegExp(`^${regexPattern}$`);
|
|
128
|
+
return regex.test(key);
|
|
129
|
+
}
|
|
130
|
+
const get = async (key, options2) => {
|
|
131
|
+
const client = await getRedisClient();
|
|
132
|
+
const cacheKey = getCacheKey(key);
|
|
133
|
+
const data = await client.get(cacheKey);
|
|
134
|
+
if (!data) return null;
|
|
135
|
+
try {
|
|
136
|
+
const entry = JSON.parse(data);
|
|
137
|
+
if (isExpired(entry)) {
|
|
138
|
+
if (options2?.returnExpired) {
|
|
139
|
+
return entry.value;
|
|
126
140
|
}
|
|
127
|
-
|
|
128
|
-
|
|
141
|
+
await deleteKey(key);
|
|
142
|
+
return null;
|
|
143
|
+
}
|
|
144
|
+
return entry.value;
|
|
145
|
+
} catch {
|
|
146
|
+
await client.del(cacheKey);
|
|
147
|
+
return null;
|
|
148
|
+
}
|
|
149
|
+
};
|
|
150
|
+
const set = async (key, value, options2) => {
|
|
151
|
+
const client = await getRedisClient();
|
|
152
|
+
const cacheKey = getCacheKey(key);
|
|
153
|
+
const oldData = await client.get(cacheKey);
|
|
154
|
+
if (oldData) {
|
|
155
|
+
try {
|
|
156
|
+
const oldEntry = JSON.parse(oldData);
|
|
157
|
+
const pipeline2 = client.pipeline();
|
|
158
|
+
for (const tag of oldEntry.tags) {
|
|
159
|
+
pipeline2.srem(getTagKey(tag), key);
|
|
129
160
|
}
|
|
161
|
+
await pipeline2.exec();
|
|
162
|
+
} catch {
|
|
163
|
+
}
|
|
130
164
|
}
|
|
131
|
-
|
|
132
|
-
|
|
133
|
-
|
|
134
|
-
const
|
|
135
|
-
|
|
136
|
-
|
|
137
|
-
|
|
138
|
-
|
|
139
|
-
|
|
140
|
-
|
|
141
|
-
|
|
142
|
-
|
|
143
|
-
|
|
144
|
-
|
|
165
|
+
const ttl = options2?.ttl ?? defaultTtl;
|
|
166
|
+
const tags = options2?.tags || [];
|
|
167
|
+
const expiresAt = ttl ? Date.now() + ttl : null;
|
|
168
|
+
const entry = {
|
|
169
|
+
key,
|
|
170
|
+
value,
|
|
171
|
+
tags,
|
|
172
|
+
expiresAt,
|
|
173
|
+
createdAt: Date.now()
|
|
174
|
+
};
|
|
175
|
+
const pipeline = client.pipeline();
|
|
176
|
+
const serialized = JSON.stringify(entry);
|
|
177
|
+
if (ttl) {
|
|
178
|
+
pipeline.setex(cacheKey, Math.ceil(ttl / 1e3), serialized);
|
|
179
|
+
} else {
|
|
180
|
+
pipeline.set(cacheKey, serialized);
|
|
145
181
|
}
|
|
146
|
-
|
|
147
|
-
|
|
182
|
+
for (const tag of tags) {
|
|
183
|
+
pipeline.sadd(getTagKey(tag), key);
|
|
148
184
|
}
|
|
149
|
-
|
|
150
|
-
|
|
185
|
+
await pipeline.exec();
|
|
186
|
+
};
|
|
187
|
+
const has = async (key) => {
|
|
188
|
+
const client = await getRedisClient();
|
|
189
|
+
const cacheKey = getCacheKey(key);
|
|
190
|
+
const exists = await client.exists(cacheKey);
|
|
191
|
+
if (!exists) return false;
|
|
192
|
+
const data = await client.get(cacheKey);
|
|
193
|
+
if (!data) return false;
|
|
194
|
+
try {
|
|
195
|
+
const entry = JSON.parse(data);
|
|
196
|
+
if (isExpired(entry)) {
|
|
197
|
+
await deleteKey(key);
|
|
198
|
+
return false;
|
|
199
|
+
}
|
|
200
|
+
return true;
|
|
201
|
+
} catch {
|
|
202
|
+
return false;
|
|
151
203
|
}
|
|
152
|
-
|
|
153
|
-
|
|
154
|
-
|
|
155
|
-
|
|
204
|
+
};
|
|
205
|
+
const deleteKey = async (key) => {
|
|
206
|
+
const client = await getRedisClient();
|
|
207
|
+
const cacheKey = getCacheKey(key);
|
|
208
|
+
const data = await client.get(cacheKey);
|
|
209
|
+
if (!data) return false;
|
|
210
|
+
try {
|
|
211
|
+
const entry = JSON.parse(data);
|
|
212
|
+
const pipeline = client.pipeline();
|
|
213
|
+
for (const tag of entry.tags) {
|
|
214
|
+
pipeline.srem(getTagKey(tag), key);
|
|
215
|
+
}
|
|
216
|
+
pipeline.del(cacheKey);
|
|
217
|
+
await pipeline.exec();
|
|
218
|
+
return true;
|
|
219
|
+
} catch {
|
|
220
|
+
await client.del(cacheKey);
|
|
221
|
+
return true;
|
|
156
222
|
}
|
|
157
|
-
|
|
158
|
-
|
|
159
|
-
|
|
160
|
-
|
|
161
|
-
|
|
162
|
-
|
|
163
|
-
|
|
223
|
+
};
|
|
224
|
+
const deleteByTags = async (tags) => {
|
|
225
|
+
const client = await getRedisClient();
|
|
226
|
+
const keysToDelete = /* @__PURE__ */ new Set();
|
|
227
|
+
for (const tag of tags) {
|
|
228
|
+
const tagKey = getTagKey(tag);
|
|
229
|
+
const keys2 = await client.smembers(tagKey);
|
|
230
|
+
for (const key of keys2) {
|
|
231
|
+
keysToDelete.add(key);
|
|
232
|
+
}
|
|
164
233
|
}
|
|
165
|
-
|
|
166
|
-
|
|
167
|
-
|
|
168
|
-
|
|
169
|
-
|
|
170
|
-
|
|
171
|
-
|
|
172
|
-
|
|
173
|
-
|
|
174
|
-
|
|
175
|
-
|
|
176
|
-
|
|
177
|
-
|
|
178
|
-
|
|
179
|
-
|
|
180
|
-
|
|
181
|
-
|
|
182
|
-
|
|
183
|
-
|
|
184
|
-
|
|
185
|
-
|
|
186
|
-
|
|
187
|
-
|
|
188
|
-
|
|
189
|
-
|
|
190
|
-
|
|
191
|
-
|
|
192
|
-
|
|
193
|
-
|
|
194
|
-
|
|
195
|
-
|
|
196
|
-
|
|
197
|
-
|
|
198
|
-
|
|
199
|
-
const pipeline = client.pipeline();
|
|
200
|
-
for (const tag of oldEntry.tags) {
|
|
201
|
-
pipeline.srem(getTagKey(tag), key);
|
|
202
|
-
}
|
|
203
|
-
await pipeline.exec();
|
|
204
|
-
}
|
|
205
|
-
catch (_b) {
|
|
206
|
-
// Ignore parse errors
|
|
207
|
-
}
|
|
208
|
-
}
|
|
209
|
-
const ttl = (_a = options === null || options === void 0 ? void 0 : options.ttl) !== null && _a !== void 0 ? _a : defaultTtl;
|
|
210
|
-
const tags = (options === null || options === void 0 ? void 0 : options.tags) || [];
|
|
211
|
-
const expiresAt = ttl ? Date.now() + ttl : null;
|
|
212
|
-
const entry = {
|
|
213
|
-
key,
|
|
214
|
-
value,
|
|
215
|
-
tags,
|
|
216
|
-
expiresAt,
|
|
217
|
-
createdAt: Date.now(),
|
|
218
|
-
};
|
|
219
|
-
const pipeline = client.pipeline();
|
|
220
|
-
// Store the entry
|
|
221
|
-
const serialized = JSON.stringify(entry);
|
|
222
|
-
if (ttl) {
|
|
223
|
-
pipeline.setex(cacheKey, Math.ceil(ttl / 1000), serialized);
|
|
224
|
-
}
|
|
225
|
-
else {
|
|
226
|
-
pipeline.set(cacheKey, serialized);
|
|
227
|
-
}
|
|
228
|
-
// Add to tag index
|
|
229
|
-
for (const tag of tags) {
|
|
230
|
-
pipeline.sadd(getTagKey(tag), key);
|
|
231
|
-
}
|
|
232
|
-
await pipeline.exec();
|
|
233
|
-
};
|
|
234
|
-
const has = async (key) => {
|
|
235
|
-
const client = await getRedisClient();
|
|
236
|
-
const cacheKey = getCacheKey(key);
|
|
237
|
-
const exists = await client.exists(cacheKey);
|
|
238
|
-
if (!exists)
|
|
239
|
-
return false;
|
|
240
|
-
// Check if expired
|
|
241
|
-
const data = await client.get(cacheKey);
|
|
242
|
-
if (!data)
|
|
243
|
-
return false;
|
|
234
|
+
let deleted = 0;
|
|
235
|
+
for (const key of keysToDelete) {
|
|
236
|
+
const success = await deleteKey(key);
|
|
237
|
+
if (success) deleted++;
|
|
238
|
+
}
|
|
239
|
+
return deleted;
|
|
240
|
+
};
|
|
241
|
+
const clear = async () => {
|
|
242
|
+
const client = await getRedisClient();
|
|
243
|
+
const cacheKeys = await client.keys(`${keyPrefix}*`);
|
|
244
|
+
const tagKeys = await client.keys(`${tagPrefix}*`);
|
|
245
|
+
if (cacheKeys.length === 0 && tagKeys.length === 0) return 0;
|
|
246
|
+
const pipeline = client.pipeline();
|
|
247
|
+
for (const key of [...cacheKeys, ...tagKeys]) {
|
|
248
|
+
pipeline.del(key);
|
|
249
|
+
}
|
|
250
|
+
await pipeline.exec();
|
|
251
|
+
return cacheKeys.length;
|
|
252
|
+
};
|
|
253
|
+
const keys = async (pattern) => {
|
|
254
|
+
const client = await getRedisClient();
|
|
255
|
+
const searchPattern = pattern ? `${keyPrefix}${pattern}` : `${keyPrefix}*`;
|
|
256
|
+
const cacheKeys = await client.keys(searchPattern);
|
|
257
|
+
const result = cacheKeys.map((key) => key.substring(keyPrefix.length));
|
|
258
|
+
if (!pattern) return result;
|
|
259
|
+
return result.filter((key) => matchPattern(key, pattern));
|
|
260
|
+
};
|
|
261
|
+
const stats = async () => {
|
|
262
|
+
const client = await getRedisClient();
|
|
263
|
+
const cacheKeys = await client.keys(`${keyPrefix}*`);
|
|
264
|
+
let expired = 0;
|
|
265
|
+
for (const cacheKey of cacheKeys) {
|
|
266
|
+
const data = await client.get(cacheKey);
|
|
267
|
+
if (data) {
|
|
244
268
|
try {
|
|
245
|
-
|
|
246
|
-
|
|
247
|
-
|
|
248
|
-
|
|
249
|
-
|
|
250
|
-
return true;
|
|
251
|
-
}
|
|
252
|
-
catch (_a) {
|
|
253
|
-
return false;
|
|
269
|
+
const entry = JSON.parse(data);
|
|
270
|
+
if (isExpired(entry)) {
|
|
271
|
+
expired++;
|
|
272
|
+
}
|
|
273
|
+
} catch {
|
|
254
274
|
}
|
|
255
|
-
|
|
256
|
-
|
|
257
|
-
|
|
258
|
-
|
|
259
|
-
|
|
260
|
-
|
|
261
|
-
|
|
262
|
-
|
|
275
|
+
}
|
|
276
|
+
}
|
|
277
|
+
return { size: cacheKeys.length, expired };
|
|
278
|
+
};
|
|
279
|
+
const cleanup = async () => {
|
|
280
|
+
const client = await getRedisClient();
|
|
281
|
+
const cacheKeys = await client.keys(`${keyPrefix}*`);
|
|
282
|
+
let removed = 0;
|
|
283
|
+
for (const cacheKey of cacheKeys) {
|
|
284
|
+
const data = await client.get(cacheKey);
|
|
285
|
+
if (data) {
|
|
263
286
|
try {
|
|
264
|
-
|
|
265
|
-
|
|
266
|
-
|
|
267
|
-
|
|
268
|
-
|
|
269
|
-
|
|
270
|
-
|
|
271
|
-
|
|
272
|
-
|
|
273
|
-
return true;
|
|
274
|
-
}
|
|
275
|
-
catch (_a) {
|
|
276
|
-
// Just delete the key if we can't parse it
|
|
277
|
-
await client.del(cacheKey);
|
|
278
|
-
return true;
|
|
287
|
+
const entry = JSON.parse(data);
|
|
288
|
+
if (isExpired(entry)) {
|
|
289
|
+
const key = cacheKey.substring(keyPrefix.length);
|
|
290
|
+
await deleteKey(key);
|
|
291
|
+
removed++;
|
|
292
|
+
}
|
|
293
|
+
} catch {
|
|
294
|
+
await client.del(cacheKey);
|
|
295
|
+
removed++;
|
|
279
296
|
}
|
|
280
|
-
|
|
281
|
-
|
|
282
|
-
|
|
283
|
-
|
|
284
|
-
|
|
285
|
-
|
|
286
|
-
|
|
287
|
-
|
|
288
|
-
|
|
289
|
-
|
|
290
|
-
|
|
291
|
-
|
|
292
|
-
|
|
293
|
-
|
|
294
|
-
|
|
295
|
-
|
|
296
|
-
|
|
297
|
-
|
|
298
|
-
|
|
299
|
-
|
|
300
|
-
};
|
|
301
|
-
const clear = async () => {
|
|
302
|
-
const client = await getRedisClient();
|
|
303
|
-
// Get all cache keys
|
|
304
|
-
const cacheKeys = await client.keys(`${keyPrefix}*`);
|
|
305
|
-
const tagKeys = await client.keys(`${tagPrefix}*`);
|
|
306
|
-
if (cacheKeys.length === 0 && tagKeys.length === 0)
|
|
307
|
-
return 0;
|
|
308
|
-
const pipeline = client.pipeline();
|
|
309
|
-
for (const key of [...cacheKeys, ...tagKeys]) {
|
|
310
|
-
pipeline.del(key);
|
|
311
|
-
}
|
|
312
|
-
await pipeline.exec();
|
|
313
|
-
return cacheKeys.length;
|
|
314
|
-
};
|
|
315
|
-
const keys = async (pattern) => {
|
|
316
|
-
const client = await getRedisClient();
|
|
317
|
-
const searchPattern = pattern
|
|
318
|
-
? `${keyPrefix}${pattern}`
|
|
319
|
-
: `${keyPrefix}*`;
|
|
320
|
-
const cacheKeys = await client.keys(searchPattern);
|
|
321
|
-
// Remove prefix from keys
|
|
322
|
-
const result = cacheKeys.map((key) => key.substring(keyPrefix.length));
|
|
323
|
-
if (!pattern)
|
|
324
|
-
return result;
|
|
325
|
-
// Apply pattern matching (Redis KEYS command uses glob pattern, but we want our pattern)
|
|
326
|
-
return result.filter((key) => matchPattern(key, pattern));
|
|
327
|
-
};
|
|
328
|
-
const stats = async () => {
|
|
329
|
-
const client = await getRedisClient();
|
|
330
|
-
const cacheKeys = await client.keys(`${keyPrefix}*`);
|
|
331
|
-
let expired = 0;
|
|
332
|
-
for (const cacheKey of cacheKeys) {
|
|
333
|
-
const data = await client.get(cacheKey);
|
|
334
|
-
if (data) {
|
|
335
|
-
try {
|
|
336
|
-
const entry = JSON.parse(data);
|
|
337
|
-
if (isExpired(entry)) {
|
|
338
|
-
expired++;
|
|
339
|
-
}
|
|
340
|
-
}
|
|
341
|
-
catch (_a) {
|
|
342
|
-
// Ignore parse errors
|
|
343
|
-
}
|
|
344
|
-
}
|
|
345
|
-
}
|
|
346
|
-
return { size: cacheKeys.length, expired };
|
|
347
|
-
};
|
|
348
|
-
const cleanup = async () => {
|
|
349
|
-
const client = await getRedisClient();
|
|
350
|
-
const cacheKeys = await client.keys(`${keyPrefix}*`);
|
|
351
|
-
let removed = 0;
|
|
352
|
-
for (const cacheKey of cacheKeys) {
|
|
353
|
-
const data = await client.get(cacheKey);
|
|
354
|
-
if (data) {
|
|
355
|
-
try {
|
|
356
|
-
const entry = JSON.parse(data);
|
|
357
|
-
if (isExpired(entry)) {
|
|
358
|
-
const key = cacheKey.substring(keyPrefix.length);
|
|
359
|
-
await deleteKey(key);
|
|
360
|
-
removed++;
|
|
361
|
-
}
|
|
362
|
-
}
|
|
363
|
-
catch (_a) {
|
|
364
|
-
// Remove invalid entries
|
|
365
|
-
await client.del(cacheKey);
|
|
366
|
-
removed++;
|
|
367
|
-
}
|
|
368
|
-
}
|
|
369
|
-
}
|
|
370
|
-
return removed;
|
|
371
|
-
};
|
|
372
|
-
const close = async () => {
|
|
373
|
-
await releaseRedisEntry(connectionUrl, registryEntry);
|
|
374
|
-
redis = null;
|
|
375
|
-
};
|
|
376
|
-
return {
|
|
377
|
-
get,
|
|
378
|
-
set,
|
|
379
|
-
has,
|
|
380
|
-
delete: deleteKey,
|
|
381
|
-
deleteByTags,
|
|
382
|
-
clear,
|
|
383
|
-
keys,
|
|
384
|
-
stats,
|
|
385
|
-
cleanup,
|
|
386
|
-
close,
|
|
387
|
-
};
|
|
297
|
+
}
|
|
298
|
+
}
|
|
299
|
+
return removed;
|
|
300
|
+
};
|
|
301
|
+
const close = async () => {
|
|
302
|
+
await releaseRedisEntry(connectionUrl, registryEntry);
|
|
303
|
+
redis = null;
|
|
304
|
+
};
|
|
305
|
+
return {
|
|
306
|
+
get,
|
|
307
|
+
set,
|
|
308
|
+
has,
|
|
309
|
+
delete: deleteKey,
|
|
310
|
+
deleteByTags,
|
|
311
|
+
clear,
|
|
312
|
+
keys,
|
|
313
|
+
stats,
|
|
314
|
+
cleanup,
|
|
315
|
+
close
|
|
316
|
+
};
|
|
388
317
|
}
|
|
318
|
+
export {
|
|
319
|
+
createRedisStrategy
|
|
320
|
+
};
|
|
321
|
+
//# sourceMappingURL=redis.js.map
|