idb-refined 0.0.3 → 0.0.5
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +40 -7
- package/dist/cleanWhenTooLarge.js +1 -1
- package/dist/client.d.ts +15 -6
- package/dist/client.js +91 -65
- package/dist/clientCore.d.ts +17 -0
- package/dist/clientCore.js +112 -0
- package/dist/constants.d.ts +6 -0
- package/dist/constants.js +6 -0
- package/dist/index.d.ts +6 -2
- package/dist/index.js +4 -1
- package/dist/initDb.d.ts +4 -0
- package/dist/initDb.js +16 -4
- package/dist/putWithEviction.js +1 -2
- package/dist/worker.js +579 -0
- package/dist/workerProtocol.d.ts +52 -0
- package/dist/workerProtocol.js +1 -0
- package/package.json +15 -3
package/README.md
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
# idb-refined
|
|
2
2
|
|
|
3
|
-
Minimal IndexedDB client on top of [idb](https://www.npmjs.com/package/idb). Exposes **
|
|
3
|
+
Minimal IndexedDB client on top of [idb](https://www.npmjs.com/package/idb). Exposes **set**, **get**, **update**, **delete**, and **deleteDb**. Init, schema, cleanup and eviction run automatically. See [CHANGELOG.md](CHANGELOG.md) for version history.
|
|
4
4
|
|
|
5
5
|
## Install
|
|
6
6
|
|
|
@@ -10,27 +10,37 @@ pnpm add idb-refined
|
|
|
10
10
|
npm install idb-refined
|
|
11
11
|
```
|
|
12
12
|
|
|
13
|
+
## When to use
|
|
14
|
+
|
|
15
|
+
Good for: key-value cache with TTL and eviction, offline storage, simple app storage by id. Not for: complex multi-index queries or raw IDB transactions—use [idb](https://www.npmjs.com/package/idb) or native IndexedDB instead.
|
|
16
|
+
|
|
13
17
|
## API
|
|
14
18
|
|
|
15
19
|
| Export | Purpose |
|
|
16
20
|
|--------|---------|
|
|
17
|
-
| **
|
|
18
|
-
| **set(value)** | Store a value. Value must have an `id` property.
|
|
21
|
+
| **createIdb(options, workerUrl?)** | Returns a Promise of `{ set, get, getAll, keys, getMany, update, delete, deleteDb }`. Uses a Web Worker by default (browser); worker URL is auto-generated. Options: `dbName` (required), `storeName` (optional), `ttlMs` (optional, default 3600000), `maxCount` (optional, default 1000). Pass `workerUrl` only when bundling requires it. |
|
|
22
|
+
| **set(value)** | Store a value. Value must have an `id` property. The object is not mutated; expiry and eviction run automatically. |
|
|
19
23
|
| **get(key)** | Get a value by key. Returns `undefined` if not found. |
|
|
20
|
-
| **
|
|
24
|
+
| **getAll()** | Get all values in the store. |
|
|
25
|
+
| **keys()** | Get all keys in the store. |
|
|
26
|
+
| **getMany(keys)** | Get values for multiple keys; returns array aligned with input (undefined where missing). |
|
|
27
|
+
| **update(key, value)** | Merge partial fields into the existing entry by key (other fields preserved). |
|
|
21
28
|
| **delete(key)** | Delete an entry by key. |
|
|
22
29
|
| **deleteDb()** | Close the DB and delete it from disk. |
|
|
23
30
|
|
|
24
|
-
For details, see **[Advanced documentation](docs/advanced.md)**. Run the **[example](example/)** in the browser (see `example/README.md`).
|
|
31
|
+
For details, see **[Advanced documentation](docs/advanced.md)**. Run the **[example](example/)** in the browser (see `example/README.md`). The `idb-refined/worker` export is a script entry for the Worker constructor only (no types).
|
|
25
32
|
|
|
26
33
|
## Example
|
|
27
34
|
|
|
28
35
|
```ts
|
|
29
|
-
import {
|
|
36
|
+
import { createIdb } from "idb-refined";
|
|
30
37
|
|
|
31
38
|
// Optional: type the stored value for set/get/update
|
|
32
39
|
type User = { id: string; name: string; createdAt?: number; expiresAt?: number };
|
|
33
|
-
const { set, get, update, delete: del, deleteDb } =
|
|
40
|
+
const { set, get, update, delete: del, deleteDb } = await createIdb<User>({
|
|
41
|
+
dbName: "my-app",
|
|
42
|
+
ttlMs: 3600_000, // optional: 1 hour default TTL
|
|
43
|
+
});
|
|
34
44
|
|
|
35
45
|
await set({ id: "1", name: "Alice" });
|
|
36
46
|
const value = await get("1"); // User | undefined
|
|
@@ -39,10 +49,33 @@ await del("1");
|
|
|
39
49
|
await deleteDb();
|
|
40
50
|
```
|
|
41
51
|
|
|
52
|
+
## Bundling
|
|
53
|
+
|
|
54
|
+
With Vite or Webpack the worker URL may not resolve; pass `workerUrl` explicitly:
|
|
55
|
+
|
|
56
|
+
**Vite:**
|
|
57
|
+
|
|
58
|
+
```ts
|
|
59
|
+
const client = await createIdb(
|
|
60
|
+
{ dbName: "my-app" },
|
|
61
|
+
new URL("idb-refined/worker", import.meta.url)
|
|
62
|
+
);
|
|
63
|
+
```
|
|
64
|
+
|
|
65
|
+
**Webpack 5:** Use a path that resolves to the worker script (e.g. `new URL("./node_modules/idb-refined/dist/worker.js", import.meta.url)` or configure your bundler to expose it). See [Bundling](docs/advanced.md#bundling) in the advanced docs.
|
|
66
|
+
|
|
42
67
|
## Requirements
|
|
43
68
|
|
|
44
69
|
- Values must include an `id` property (used as the store key).
|
|
45
70
|
- The library uses a single store (default name `"store"`) with indexes on `expiresAt` and `createdAt`. Cleanup and eviction run on set.
|
|
71
|
+
- **Environment:** Requires IndexedDB (and Web Workers in the browser for worker mode). Works in modern browsers; tests use fake-indexeddb in Node.
|
|
72
|
+
|
|
73
|
+
## Under the hood
|
|
74
|
+
|
|
75
|
+
- **Schema & versioning** — A single store (and indexes on `expiresAt`, `createdAt`) is created or upgraded automatically; version bumps are derived from a schema fingerprint so you don’t manage versions by hand.
|
|
76
|
+
- **Cleanup** — Expired entries (`expiresAt` in the past) are removed before and after each `set`, so TTL “just works” and there’s room to add.
|
|
77
|
+
- **Eviction** — If the store is at or over `maxCount` (default 1000), the oldest entries by `createdAt` are evicted *before* the add, then again after if needed, so the store stays under the cap and adds don’t run out of space.
|
|
78
|
+
- **Web Worker** — `createIdb` runs all of the above (schema, cleanup, eviction, put/get/update/delete) inside a Web Worker by default. The worker URL is auto-generated; the main thread only sends messages and receives results, so heavy I/O and bookkeeping stay off the UI thread.
|
|
46
79
|
|
|
47
80
|
## Releasing
|
|
48
81
|
|
package/dist/client.d.ts
CHANGED
|
@@ -1,21 +1,30 @@
|
|
|
1
|
-
export interface
|
|
1
|
+
export interface IdbRefinedOptions {
|
|
2
2
|
dbName: string;
|
|
3
3
|
storeName?: string;
|
|
4
|
+
/** Default TTL in ms for values without `expiresAt`. Default: 3600000 (1 hour). */
|
|
5
|
+
ttlMs?: number;
|
|
6
|
+
/** Max entries before eviction. Default: 1000. */
|
|
7
|
+
maxCount?: number;
|
|
4
8
|
}
|
|
5
9
|
/** Stored value must have `id`. `createdAt` and `expiresAt` are set by the client if missing. */
|
|
6
10
|
export type StoredValue = Record<string, unknown> & {
|
|
7
11
|
id: IDBValidKey;
|
|
8
12
|
};
|
|
9
13
|
export interface IdbRefinedClient<T extends StoredValue = StoredValue> {
|
|
10
|
-
|
|
14
|
+
set: (value: T) => Promise<void>;
|
|
11
15
|
get: (key: IDBValidKey) => Promise<T | undefined>;
|
|
16
|
+
getAll: () => Promise<T[]>;
|
|
17
|
+
keys: () => Promise<IDBValidKey[]>;
|
|
18
|
+
getMany: (keys: IDBValidKey[]) => Promise<(T | undefined)[]>;
|
|
12
19
|
update: (key: IDBValidKey, value: Partial<T>) => Promise<void>;
|
|
13
20
|
delete: (key: IDBValidKey) => Promise<void>;
|
|
14
|
-
|
|
21
|
+
deleteDb: () => Promise<void>;
|
|
15
22
|
}
|
|
23
|
+
import type { WorkerMessage, WorkerResponse } from "./workerProtocol.js";
|
|
24
|
+
export type { WorkerMessage, WorkerResponse };
|
|
16
25
|
/**
|
|
17
|
-
* Create
|
|
18
|
-
*
|
|
26
|
+
* Create an idb-refined client. Uses a Web Worker by default (browser); falls back to main thread when Worker is unavailable (e.g. Node/tests).
|
|
27
|
+
* Worker URL is auto-generated from the same directory as the library; pass `workerUrl` only when bundling requires it (e.g. `new URL('idb-refined/worker', import.meta.url)`).
|
|
19
28
|
* @template T - Stored value shape (must include `id`). Omit for a generic client.
|
|
20
29
|
*/
|
|
21
|
-
export declare function
|
|
30
|
+
export declare function createIdb<T extends StoredValue = StoredValue>(options: IdbRefinedOptions, workerUrl?: string | URL): Promise<IdbRefinedClient<T>>;
|
package/dist/client.js
CHANGED
|
@@ -1,84 +1,110 @@
|
|
|
1
|
-
import {
|
|
2
|
-
import { initDb } from "./initDb.js";
|
|
3
|
-
import { cleanOldEntries } from "./cleanOldEntries.js";
|
|
4
|
-
import { cleanWhenTooLarge } from "./cleanWhenTooLarge.js";
|
|
1
|
+
import { executeDelete as execDelete, executeDeleteDb as execDeleteDb, executeGet as execGet, executeGetAll as execGetAll, executeGetMany as execGetMany, executeKeys as execKeys, executeSet as execSet, executeUpdate as execUpdate, } from "./clientCore.js";
|
|
5
2
|
const DEFAULT_STORE_NAME = "store";
|
|
6
|
-
|
|
7
|
-
const DATE_INDEXES = ["expiresAt", "createdAt"];
|
|
8
|
-
const DEFAULT_TTL_MS = 3600 * 1000;
|
|
9
|
-
const DEFAULT_MAX_COUNT = 1000;
|
|
10
|
-
const dbCache = new Map();
|
|
11
|
-
function getDefaultSchema(storeName) {
|
|
3
|
+
function mainThreadClient(dbName, storeName, ttlMs, maxCount) {
|
|
12
4
|
return {
|
|
13
|
-
|
|
14
|
-
|
|
15
|
-
|
|
16
|
-
|
|
17
|
-
}
|
|
5
|
+
async set(value) {
|
|
6
|
+
await execSet(dbName, storeName, value, {
|
|
7
|
+
ttlMs,
|
|
8
|
+
maxCount,
|
|
9
|
+
});
|
|
10
|
+
},
|
|
11
|
+
async get(key) {
|
|
12
|
+
return execGet(dbName, storeName, key);
|
|
13
|
+
},
|
|
14
|
+
async getAll() {
|
|
15
|
+
return execGetAll(dbName, storeName);
|
|
16
|
+
},
|
|
17
|
+
async keys() {
|
|
18
|
+
return execKeys(dbName, storeName);
|
|
19
|
+
},
|
|
20
|
+
async getMany(keys) {
|
|
21
|
+
return execGetMany(dbName, storeName, keys);
|
|
22
|
+
},
|
|
23
|
+
async update(key, value) {
|
|
24
|
+
await execUpdate(dbName, storeName, key, value);
|
|
25
|
+
},
|
|
26
|
+
async delete(key) {
|
|
27
|
+
await execDelete(dbName, storeName, key);
|
|
28
|
+
},
|
|
29
|
+
async deleteDb() {
|
|
30
|
+
await execDeleteDb(dbName);
|
|
18
31
|
},
|
|
19
32
|
};
|
|
20
33
|
}
|
|
21
|
-
async function getDb(dbName, storeName) {
|
|
22
|
-
let db = dbCache.get(dbName);
|
|
23
|
-
if (db != null)
|
|
24
|
-
return db;
|
|
25
|
-
const schema = getDefaultSchema(storeName);
|
|
26
|
-
db = await initDb(dbName, { schema });
|
|
27
|
-
dbCache.set(dbName, db);
|
|
28
|
-
return db;
|
|
29
|
-
}
|
|
30
|
-
function setExpiryFields(value) {
|
|
31
|
-
const now = Date.now();
|
|
32
|
-
if (value.createdAt === undefined)
|
|
33
|
-
value.createdAt = now;
|
|
34
|
-
if (value.expiresAt === undefined)
|
|
35
|
-
value.expiresAt = now + DEFAULT_TTL_MS;
|
|
36
|
-
}
|
|
37
34
|
/**
|
|
38
|
-
* Create
|
|
39
|
-
*
|
|
35
|
+
* Create an idb-refined client. Uses a Web Worker by default (browser); falls back to main thread when Worker is unavailable (e.g. Node/tests).
|
|
36
|
+
* Worker URL is auto-generated from the same directory as the library; pass `workerUrl` only when bundling requires it (e.g. `new URL('idb-refined/worker', import.meta.url)`).
|
|
40
37
|
* @template T - Stored value shape (must include `id`). Omit for a generic client.
|
|
41
38
|
*/
|
|
42
|
-
export function
|
|
43
|
-
const { dbName } = options;
|
|
39
|
+
export async function createIdb(options, workerUrl) {
|
|
40
|
+
const { dbName, ttlMs, maxCount } = options;
|
|
44
41
|
const storeName = options.storeName ?? DEFAULT_STORE_NAME;
|
|
42
|
+
if (typeof Worker === "undefined") {
|
|
43
|
+
return mainThreadClient(dbName, storeName, ttlMs, maxCount);
|
|
44
|
+
}
|
|
45
|
+
const url = workerUrl ??
|
|
46
|
+
(typeof import.meta !== "undefined" && import.meta.url
|
|
47
|
+
? new URL("./worker.js", import.meta.url).href
|
|
48
|
+
: undefined);
|
|
49
|
+
if (url == null) {
|
|
50
|
+
throw new Error("createIdb: worker URL could not be resolved. Pass workerUrl (e.g. new URL('idb-refined/worker', import.meta.url)) when bundling.");
|
|
51
|
+
}
|
|
52
|
+
const worker = new Worker(url, { type: "module" });
|
|
53
|
+
const pending = new Map();
|
|
54
|
+
let nextId = 0;
|
|
55
|
+
const workerUnavailableError = new Error("Worker unavailable; create a new client with createIdb().");
|
|
56
|
+
let workerDead = false;
|
|
57
|
+
worker.onmessage = (e) => {
|
|
58
|
+
const data = e.data;
|
|
59
|
+
const p = pending.get(data.id);
|
|
60
|
+
if (p) {
|
|
61
|
+
pending.delete(data.id);
|
|
62
|
+
if ("error" in data && data.error)
|
|
63
|
+
p.reject(new Error(data.error));
|
|
64
|
+
else
|
|
65
|
+
p.resolve(("result" in data ? data.result : undefined));
|
|
66
|
+
}
|
|
67
|
+
};
|
|
68
|
+
worker.onerror = (ev) => {
|
|
69
|
+
workerDead = true;
|
|
70
|
+
for (const [, p] of pending)
|
|
71
|
+
p.reject(ev.error ?? new Error("Worker error"));
|
|
72
|
+
pending.clear();
|
|
73
|
+
};
|
|
74
|
+
const send = (type, payload) => new Promise((resolve, reject) => {
|
|
75
|
+
if (workerDead) {
|
|
76
|
+
reject(workerUnavailableError);
|
|
77
|
+
return;
|
|
78
|
+
}
|
|
79
|
+
const id = nextId++;
|
|
80
|
+
pending.set(id, { resolve: resolve, reject });
|
|
81
|
+
worker.postMessage({ type, id, payload });
|
|
82
|
+
});
|
|
83
|
+
await send("init", { dbName, storeName, ttlMs, maxCount });
|
|
45
84
|
return {
|
|
46
|
-
async
|
|
47
|
-
|
|
48
|
-
setExpiryFields(value);
|
|
49
|
-
await db.put(storeName, value);
|
|
50
|
-
const count = await db.count(storeName);
|
|
51
|
-
if (count > DEFAULT_MAX_COUNT) {
|
|
52
|
-
await cleanWhenTooLarge(db, storeName, {
|
|
53
|
-
dateKey: "createdAt",
|
|
54
|
-
maxCount: DEFAULT_MAX_COUNT,
|
|
55
|
-
});
|
|
56
|
-
}
|
|
57
|
-
await cleanOldEntries(db, storeName, {
|
|
58
|
-
dateKey: "expiresAt",
|
|
59
|
-
before: Date.now(),
|
|
60
|
-
});
|
|
85
|
+
async set(value) {
|
|
86
|
+
await send("set", value);
|
|
61
87
|
},
|
|
62
88
|
async get(key) {
|
|
63
|
-
|
|
64
|
-
|
|
89
|
+
return send("get", key);
|
|
90
|
+
},
|
|
91
|
+
async getAll() {
|
|
92
|
+
return send("getAll");
|
|
93
|
+
},
|
|
94
|
+
async keys() {
|
|
95
|
+
return send("keys");
|
|
96
|
+
},
|
|
97
|
+
async getMany(keys) {
|
|
98
|
+
return send("getMany", keys);
|
|
65
99
|
},
|
|
66
100
|
async update(key, value) {
|
|
67
|
-
|
|
68
|
-
const withKey = { ...value, [DEFAULT_KEY_PATH]: key };
|
|
69
|
-
await db.put(storeName, withKey);
|
|
101
|
+
await send("update", { key, value });
|
|
70
102
|
},
|
|
71
103
|
async delete(key) {
|
|
72
|
-
|
|
73
|
-
|
|
74
|
-
|
|
75
|
-
|
|
76
|
-
const db = dbCache.get(dbName);
|
|
77
|
-
if (db != null) {
|
|
78
|
-
db.close();
|
|
79
|
-
dbCache.delete(dbName);
|
|
80
|
-
}
|
|
81
|
-
await deleteDB(dbName);
|
|
104
|
+
await send("delete", key);
|
|
105
|
+
},
|
|
106
|
+
async deleteDb() {
|
|
107
|
+
await send("deleteDb");
|
|
82
108
|
},
|
|
83
109
|
};
|
|
84
110
|
}
|
|
@@ -0,0 +1,17 @@
|
|
|
1
|
+
import type { IDBPDatabase } from "idb";
|
|
2
|
+
export { DEFAULT_TTL_MS } from "./constants.js";
|
|
3
|
+
export declare function getDb(dbName: string, storeName: string): Promise<IDBPDatabase<unknown>>;
|
|
4
|
+
export declare function setExpiryFields(value: Record<string, unknown>, ttlMs?: number): void;
|
|
5
|
+
export interface ExecuteSetOptions {
|
|
6
|
+
ttlMs?: number;
|
|
7
|
+
/** Max entries before eviction. Default: 1000. */
|
|
8
|
+
maxCount?: number;
|
|
9
|
+
}
|
|
10
|
+
export declare function executeSet(dbName: string, storeName: string, value: Record<string, unknown>, options?: ExecuteSetOptions): Promise<void>;
|
|
11
|
+
export declare function executeGet<T>(dbName: string, storeName: string, key: IDBValidKey): Promise<T | undefined>;
|
|
12
|
+
export declare function executeGetAll<T>(dbName: string, storeName: string): Promise<T[]>;
|
|
13
|
+
export declare function executeKeys(dbName: string, storeName: string): Promise<IDBValidKey[]>;
|
|
14
|
+
export declare function executeGetMany<T>(dbName: string, storeName: string, keys: IDBValidKey[]): Promise<(T | undefined)[]>;
|
|
15
|
+
export declare function executeUpdate(dbName: string, storeName: string, key: IDBValidKey, value: Record<string, unknown>): Promise<void>;
|
|
16
|
+
export declare function executeDelete(dbName: string, storeName: string, key: IDBValidKey): Promise<void>;
|
|
17
|
+
export declare function executeDeleteDb(dbName: string): Promise<void>;
|
|
@@ -0,0 +1,112 @@
|
|
|
1
|
+
import { deleteDB } from "idb";
|
|
2
|
+
import { clearMetaForDb, initDb } from "./initDb.js";
|
|
3
|
+
import { cleanOldEntries } from "./cleanOldEntries.js";
|
|
4
|
+
import { cleanWhenTooLarge } from "./cleanWhenTooLarge.js";
|
|
5
|
+
import { DEFAULT_MAX_COUNT, DEFAULT_TTL_MS } from "./constants.js";
|
|
6
|
+
export { DEFAULT_TTL_MS } from "./constants.js";
|
|
7
|
+
const DEFAULT_KEY_PATH = "id";
|
|
8
|
+
const DATE_INDEXES = ["expiresAt", "createdAt"];
|
|
9
|
+
const dbCache = new Map();
|
|
10
|
+
function getDefaultSchema(storeNames) {
|
|
11
|
+
const stores = {};
|
|
12
|
+
for (const name of storeNames) {
|
|
13
|
+
stores[name] = {
|
|
14
|
+
keyPath: DEFAULT_KEY_PATH,
|
|
15
|
+
indexes: DATE_INDEXES,
|
|
16
|
+
};
|
|
17
|
+
}
|
|
18
|
+
return { stores };
|
|
19
|
+
}
|
|
20
|
+
export async function getDb(dbName, storeName) {
|
|
21
|
+
const entry = dbCache.get(dbName);
|
|
22
|
+
if (entry != null && entry.storeNames.has(storeName)) {
|
|
23
|
+
return entry.db;
|
|
24
|
+
}
|
|
25
|
+
const existingStores = entry != null ? [...entry.storeNames] : [];
|
|
26
|
+
if (entry != null) {
|
|
27
|
+
entry.db.close();
|
|
28
|
+
dbCache.delete(dbName);
|
|
29
|
+
}
|
|
30
|
+
const allStores = existingStores.includes(storeName)
|
|
31
|
+
? existingStores
|
|
32
|
+
: [...existingStores, storeName];
|
|
33
|
+
const schema = getDefaultSchema(allStores);
|
|
34
|
+
const db = await initDb(dbName, { schema });
|
|
35
|
+
dbCache.set(dbName, {
|
|
36
|
+
db,
|
|
37
|
+
storeNames: new Set(allStores),
|
|
38
|
+
});
|
|
39
|
+
return db;
|
|
40
|
+
}
|
|
41
|
+
export function setExpiryFields(value, ttlMs = DEFAULT_TTL_MS) {
|
|
42
|
+
const now = Date.now();
|
|
43
|
+
if (value.createdAt === undefined)
|
|
44
|
+
value.createdAt = now;
|
|
45
|
+
if (value.expiresAt === undefined)
|
|
46
|
+
value.expiresAt = now + ttlMs;
|
|
47
|
+
}
|
|
48
|
+
export async function executeSet(dbName, storeName, value, options) {
|
|
49
|
+
const db = await getDb(dbName, storeName);
|
|
50
|
+
const toStore = { ...value };
|
|
51
|
+
setExpiryFields(toStore, options?.ttlMs ?? DEFAULT_TTL_MS);
|
|
52
|
+
await cleanOldEntries(db, storeName, {
|
|
53
|
+
dateKey: "expiresAt",
|
|
54
|
+
before: Date.now(),
|
|
55
|
+
});
|
|
56
|
+
const maxCount = options?.maxCount ?? DEFAULT_MAX_COUNT;
|
|
57
|
+
const count = await db.count(storeName);
|
|
58
|
+
if (count >= maxCount) {
|
|
59
|
+
await cleanWhenTooLarge(db, storeName, {
|
|
60
|
+
dateKey: "createdAt",
|
|
61
|
+
maxCount,
|
|
62
|
+
});
|
|
63
|
+
}
|
|
64
|
+
await db.put(storeName, toStore);
|
|
65
|
+
const countAfter = await db.count(storeName);
|
|
66
|
+
if (countAfter > maxCount) {
|
|
67
|
+
await cleanWhenTooLarge(db, storeName, {
|
|
68
|
+
dateKey: "createdAt",
|
|
69
|
+
maxCount,
|
|
70
|
+
});
|
|
71
|
+
}
|
|
72
|
+
}
|
|
73
|
+
export async function executeGet(dbName, storeName, key) {
|
|
74
|
+
const db = await getDb(dbName, storeName);
|
|
75
|
+
return (await db.get(storeName, key));
|
|
76
|
+
}
|
|
77
|
+
export async function executeGetAll(dbName, storeName) {
|
|
78
|
+
const db = await getDb(dbName, storeName);
|
|
79
|
+
return (await db.getAll(storeName));
|
|
80
|
+
}
|
|
81
|
+
export async function executeKeys(dbName, storeName) {
|
|
82
|
+
const db = await getDb(dbName, storeName);
|
|
83
|
+
return db.getAllKeys(storeName);
|
|
84
|
+
}
|
|
85
|
+
export async function executeGetMany(dbName, storeName, keys) {
|
|
86
|
+
const db = await getDb(dbName, storeName);
|
|
87
|
+
const results = await Promise.all(keys.map((key) => db.get(storeName, key)));
|
|
88
|
+
return results;
|
|
89
|
+
}
|
|
90
|
+
export async function executeUpdate(dbName, storeName, key, value) {
|
|
91
|
+
const db = await getDb(dbName, storeName);
|
|
92
|
+
const existing = (await db.get(storeName, key));
|
|
93
|
+
const merged = {
|
|
94
|
+
...(existing ?? {}),
|
|
95
|
+
...value,
|
|
96
|
+
[DEFAULT_KEY_PATH]: key,
|
|
97
|
+
};
|
|
98
|
+
await db.put(storeName, merged);
|
|
99
|
+
}
|
|
100
|
+
export async function executeDelete(dbName, storeName, key) {
|
|
101
|
+
const db = await getDb(dbName, storeName);
|
|
102
|
+
await db.delete(storeName, key);
|
|
103
|
+
}
|
|
104
|
+
export async function executeDeleteDb(dbName) {
|
|
105
|
+
const entry = dbCache.get(dbName);
|
|
106
|
+
if (entry != null) {
|
|
107
|
+
entry.db.close();
|
|
108
|
+
dbCache.delete(dbName);
|
|
109
|
+
}
|
|
110
|
+
await deleteDB(dbName);
|
|
111
|
+
await clearMetaForDb(dbName);
|
|
112
|
+
}
|
|
@@ -0,0 +1,6 @@
|
|
|
1
|
+
/** Default TTL in ms (1 hour). Used for values without `expiresAt`. */
|
|
2
|
+
export declare const DEFAULT_TTL_MS: number;
|
|
3
|
+
/** Default max entries before eviction. */
|
|
4
|
+
export declare const DEFAULT_MAX_COUNT = 1000;
|
|
5
|
+
/** Default TTL in seconds (1 hour). Used by putWithEviction when expiresAt is not set. */
|
|
6
|
+
export declare const DEFAULT_TTL_SECONDS = 3600;
|
|
@@ -0,0 +1,6 @@
|
|
|
1
|
+
/** Default TTL in ms (1 hour). Used for values without `expiresAt`. */
|
|
2
|
+
export const DEFAULT_TTL_MS = 3600 * 1000;
|
|
3
|
+
/** Default max entries before eviction. */
|
|
4
|
+
export const DEFAULT_MAX_COUNT = 1000;
|
|
5
|
+
/** Default TTL in seconds (1 hour). Used by putWithEviction when expiresAt is not set. */
|
|
6
|
+
export const DEFAULT_TTL_SECONDS = 3600;
|
package/dist/index.d.ts
CHANGED
|
@@ -1,2 +1,6 @@
|
|
|
1
|
-
export {
|
|
2
|
-
export type {
|
|
1
|
+
export { createIdb } from "./client.js";
|
|
2
|
+
export type { IdbRefinedClient, IdbRefinedOptions, StoredValue, WorkerMessage, WorkerResponse, } from "./client.js";
|
|
3
|
+
/** Advanced: low-level helpers when you have a raw IDBPDatabase (e.g. from initDb). */
|
|
4
|
+
export { clearStore, deleteByKey, } from "./delete.js";
|
|
5
|
+
export { putWithEviction } from "./putWithEviction.js";
|
|
6
|
+
export type { PutWithEvictionOptions } from "./putWithEviction.js";
|
package/dist/index.js
CHANGED
|
@@ -1 +1,4 @@
|
|
|
1
|
-
export {
|
|
1
|
+
export { createIdb } from "./client.js";
|
|
2
|
+
/** Advanced: low-level helpers when you have a raw IDBPDatabase (e.g. from initDb). */
|
|
3
|
+
export { clearStore, deleteByKey, } from "./delete.js";
|
|
4
|
+
export { putWithEviction } from "./putWithEviction.js";
|
package/dist/initDb.d.ts
CHANGED
|
@@ -10,3 +10,7 @@ export interface InitDbOptions {
|
|
|
10
10
|
* from a fingerprint; when `version` and/or `upgrade` are provided, use them directly.
|
|
11
11
|
*/
|
|
12
12
|
export declare function initDb(name: string, options?: InitDbOptions): Promise<IDBPDatabase<unknown>>;
|
|
13
|
+
/**
|
|
14
|
+
* Remove the meta row for a database (e.g. after deleteDB). Keeps the meta DB from growing.
|
|
15
|
+
*/
|
|
16
|
+
export declare function clearMetaForDb(dbName: string): Promise<void>;
|
package/dist/initDb.js
CHANGED
|
@@ -25,7 +25,6 @@ async function initDbWithSchema(name, schema, customUpgrade) {
|
|
|
25
25
|
},
|
|
26
26
|
});
|
|
27
27
|
const stored = (await metaDb.get(META_STORE, name));
|
|
28
|
-
metaDb.close();
|
|
29
28
|
let currentVersion = 0;
|
|
30
29
|
try {
|
|
31
30
|
const probe = await openDB(name);
|
|
@@ -48,13 +47,26 @@ async function initDbWithSchema(name, schema, customUpgrade) {
|
|
|
48
47
|
},
|
|
49
48
|
}));
|
|
50
49
|
if (needUpgrade) {
|
|
51
|
-
|
|
52
|
-
await metaDb2.put(META_STORE, {
|
|
50
|
+
await metaDb.put(META_STORE, {
|
|
53
51
|
dbName: name,
|
|
54
52
|
schemaFingerprint: fp,
|
|
55
53
|
version: newVersion,
|
|
56
54
|
});
|
|
57
|
-
metaDb2.close();
|
|
58
55
|
}
|
|
56
|
+
metaDb.close();
|
|
59
57
|
return db;
|
|
60
58
|
}
|
|
59
|
+
/**
|
|
60
|
+
* Remove the meta row for a database (e.g. after deleteDB). Keeps the meta DB from growing.
|
|
61
|
+
*/
|
|
62
|
+
export async function clearMetaForDb(dbName) {
|
|
63
|
+
const metaDb = await openDB(META_DB_NAME, 1, {
|
|
64
|
+
upgrade(db) {
|
|
65
|
+
if (!db.objectStoreNames.contains(META_STORE)) {
|
|
66
|
+
db.createObjectStore(META_STORE, { keyPath: "dbName" });
|
|
67
|
+
}
|
|
68
|
+
},
|
|
69
|
+
});
|
|
70
|
+
await metaDb.delete(META_STORE, dbName);
|
|
71
|
+
metaDb.close();
|
|
72
|
+
}
|
package/dist/putWithEviction.js
CHANGED
|
@@ -1,6 +1,5 @@
|
|
|
1
1
|
import { cleanWhenTooLarge } from "./cleanWhenTooLarge.js";
|
|
2
|
-
|
|
3
|
-
const DEFAULT_MAX_COUNT = 1000;
|
|
2
|
+
import { DEFAULT_MAX_COUNT, DEFAULT_TTL_SECONDS } from "./constants.js";
|
|
4
3
|
/**
|
|
5
4
|
* Put a value in the store, then if count > maxCount evict oldest entries (by dateKey).
|
|
6
5
|
* Optionally set the dateKey field from expiresAt (absolute ms) or ttlSeconds (relative seconds).
|
package/dist/worker.js
ADDED
|
@@ -0,0 +1,579 @@
|
|
|
1
|
+
// node_modules/.pnpm/idb@8.0.3/node_modules/idb/build/index.js
|
|
2
|
+
var instanceOfAny = (object, constructors) => constructors.some((c) => object instanceof c);
|
|
3
|
+
var idbProxyableTypes;
|
|
4
|
+
var cursorAdvanceMethods;
|
|
5
|
+
function getIdbProxyableTypes() {
|
|
6
|
+
return idbProxyableTypes || (idbProxyableTypes = [
|
|
7
|
+
IDBDatabase,
|
|
8
|
+
IDBObjectStore,
|
|
9
|
+
IDBIndex,
|
|
10
|
+
IDBCursor,
|
|
11
|
+
IDBTransaction
|
|
12
|
+
]);
|
|
13
|
+
}
|
|
14
|
+
function getCursorAdvanceMethods() {
|
|
15
|
+
return cursorAdvanceMethods || (cursorAdvanceMethods = [
|
|
16
|
+
IDBCursor.prototype.advance,
|
|
17
|
+
IDBCursor.prototype.continue,
|
|
18
|
+
IDBCursor.prototype.continuePrimaryKey
|
|
19
|
+
]);
|
|
20
|
+
}
|
|
21
|
+
var transactionDoneMap = /* @__PURE__ */ new WeakMap();
|
|
22
|
+
var transformCache = /* @__PURE__ */ new WeakMap();
|
|
23
|
+
var reverseTransformCache = /* @__PURE__ */ new WeakMap();
|
|
24
|
+
function promisifyRequest(request) {
|
|
25
|
+
const promise = new Promise((resolve, reject) => {
|
|
26
|
+
const unlisten = () => {
|
|
27
|
+
request.removeEventListener("success", success);
|
|
28
|
+
request.removeEventListener("error", error);
|
|
29
|
+
};
|
|
30
|
+
const success = () => {
|
|
31
|
+
resolve(wrap(request.result));
|
|
32
|
+
unlisten();
|
|
33
|
+
};
|
|
34
|
+
const error = () => {
|
|
35
|
+
reject(request.error);
|
|
36
|
+
unlisten();
|
|
37
|
+
};
|
|
38
|
+
request.addEventListener("success", success);
|
|
39
|
+
request.addEventListener("error", error);
|
|
40
|
+
});
|
|
41
|
+
reverseTransformCache.set(promise, request);
|
|
42
|
+
return promise;
|
|
43
|
+
}
|
|
44
|
+
function cacheDonePromiseForTransaction(tx) {
|
|
45
|
+
if (transactionDoneMap.has(tx))
|
|
46
|
+
return;
|
|
47
|
+
const done = new Promise((resolve, reject) => {
|
|
48
|
+
const unlisten = () => {
|
|
49
|
+
tx.removeEventListener("complete", complete);
|
|
50
|
+
tx.removeEventListener("error", error);
|
|
51
|
+
tx.removeEventListener("abort", error);
|
|
52
|
+
};
|
|
53
|
+
const complete = () => {
|
|
54
|
+
resolve();
|
|
55
|
+
unlisten();
|
|
56
|
+
};
|
|
57
|
+
const error = () => {
|
|
58
|
+
reject(tx.error || new DOMException("AbortError", "AbortError"));
|
|
59
|
+
unlisten();
|
|
60
|
+
};
|
|
61
|
+
tx.addEventListener("complete", complete);
|
|
62
|
+
tx.addEventListener("error", error);
|
|
63
|
+
tx.addEventListener("abort", error);
|
|
64
|
+
});
|
|
65
|
+
transactionDoneMap.set(tx, done);
|
|
66
|
+
}
|
|
67
|
+
var idbProxyTraps = {
|
|
68
|
+
get(target, prop, receiver) {
|
|
69
|
+
if (target instanceof IDBTransaction) {
|
|
70
|
+
if (prop === "done")
|
|
71
|
+
return transactionDoneMap.get(target);
|
|
72
|
+
if (prop === "store") {
|
|
73
|
+
return receiver.objectStoreNames[1] ? void 0 : receiver.objectStore(receiver.objectStoreNames[0]);
|
|
74
|
+
}
|
|
75
|
+
}
|
|
76
|
+
return wrap(target[prop]);
|
|
77
|
+
},
|
|
78
|
+
set(target, prop, value) {
|
|
79
|
+
target[prop] = value;
|
|
80
|
+
return true;
|
|
81
|
+
},
|
|
82
|
+
has(target, prop) {
|
|
83
|
+
if (target instanceof IDBTransaction && (prop === "done" || prop === "store")) {
|
|
84
|
+
return true;
|
|
85
|
+
}
|
|
86
|
+
return prop in target;
|
|
87
|
+
}
|
|
88
|
+
};
|
|
89
|
+
function replaceTraps(callback) {
|
|
90
|
+
idbProxyTraps = callback(idbProxyTraps);
|
|
91
|
+
}
|
|
92
|
+
function wrapFunction(func) {
|
|
93
|
+
if (getCursorAdvanceMethods().includes(func)) {
|
|
94
|
+
return function(...args) {
|
|
95
|
+
func.apply(unwrap(this), args);
|
|
96
|
+
return wrap(this.request);
|
|
97
|
+
};
|
|
98
|
+
}
|
|
99
|
+
return function(...args) {
|
|
100
|
+
return wrap(func.apply(unwrap(this), args));
|
|
101
|
+
};
|
|
102
|
+
}
|
|
103
|
+
function transformCachableValue(value) {
|
|
104
|
+
if (typeof value === "function")
|
|
105
|
+
return wrapFunction(value);
|
|
106
|
+
if (value instanceof IDBTransaction)
|
|
107
|
+
cacheDonePromiseForTransaction(value);
|
|
108
|
+
if (instanceOfAny(value, getIdbProxyableTypes()))
|
|
109
|
+
return new Proxy(value, idbProxyTraps);
|
|
110
|
+
return value;
|
|
111
|
+
}
|
|
112
|
+
function wrap(value) {
|
|
113
|
+
if (value instanceof IDBRequest)
|
|
114
|
+
return promisifyRequest(value);
|
|
115
|
+
if (transformCache.has(value))
|
|
116
|
+
return transformCache.get(value);
|
|
117
|
+
const newValue = transformCachableValue(value);
|
|
118
|
+
if (newValue !== value) {
|
|
119
|
+
transformCache.set(value, newValue);
|
|
120
|
+
reverseTransformCache.set(newValue, value);
|
|
121
|
+
}
|
|
122
|
+
return newValue;
|
|
123
|
+
}
|
|
124
|
+
var unwrap = (value) => reverseTransformCache.get(value);
|
|
125
|
+
function openDB(name, version, { blocked, upgrade, blocking, terminated } = {}) {
|
|
126
|
+
const request = indexedDB.open(name, version);
|
|
127
|
+
const openPromise = wrap(request);
|
|
128
|
+
if (upgrade) {
|
|
129
|
+
request.addEventListener("upgradeneeded", (event) => {
|
|
130
|
+
upgrade(wrap(request.result), event.oldVersion, event.newVersion, wrap(request.transaction), event);
|
|
131
|
+
});
|
|
132
|
+
}
|
|
133
|
+
if (blocked) {
|
|
134
|
+
request.addEventListener("blocked", (event) => blocked(
|
|
135
|
+
// Casting due to https://github.com/microsoft/TypeScript-DOM-lib-generator/pull/1405
|
|
136
|
+
event.oldVersion,
|
|
137
|
+
event.newVersion,
|
|
138
|
+
event
|
|
139
|
+
));
|
|
140
|
+
}
|
|
141
|
+
openPromise.then((db) => {
|
|
142
|
+
if (terminated)
|
|
143
|
+
db.addEventListener("close", () => terminated());
|
|
144
|
+
if (blocking) {
|
|
145
|
+
db.addEventListener("versionchange", (event) => blocking(event.oldVersion, event.newVersion, event));
|
|
146
|
+
}
|
|
147
|
+
}).catch(() => {
|
|
148
|
+
});
|
|
149
|
+
return openPromise;
|
|
150
|
+
}
|
|
151
|
+
function deleteDB(name, { blocked } = {}) {
|
|
152
|
+
const request = indexedDB.deleteDatabase(name);
|
|
153
|
+
if (blocked) {
|
|
154
|
+
request.addEventListener("blocked", (event) => blocked(
|
|
155
|
+
// Casting due to https://github.com/microsoft/TypeScript-DOM-lib-generator/pull/1405
|
|
156
|
+
event.oldVersion,
|
|
157
|
+
event
|
|
158
|
+
));
|
|
159
|
+
}
|
|
160
|
+
return wrap(request).then(() => void 0);
|
|
161
|
+
}
|
|
162
|
+
var readMethods = ["get", "getKey", "getAll", "getAllKeys", "count"];
|
|
163
|
+
var writeMethods = ["put", "add", "delete", "clear"];
|
|
164
|
+
var cachedMethods = /* @__PURE__ */ new Map();
|
|
165
|
+
function getMethod(target, prop) {
|
|
166
|
+
if (!(target instanceof IDBDatabase && !(prop in target) && typeof prop === "string")) {
|
|
167
|
+
return;
|
|
168
|
+
}
|
|
169
|
+
if (cachedMethods.get(prop))
|
|
170
|
+
return cachedMethods.get(prop);
|
|
171
|
+
const targetFuncName = prop.replace(/FromIndex$/, "");
|
|
172
|
+
const useIndex = prop !== targetFuncName;
|
|
173
|
+
const isWrite = writeMethods.includes(targetFuncName);
|
|
174
|
+
if (
|
|
175
|
+
// Bail if the target doesn't exist on the target. Eg, getAll isn't in Edge.
|
|
176
|
+
!(targetFuncName in (useIndex ? IDBIndex : IDBObjectStore).prototype) || !(isWrite || readMethods.includes(targetFuncName))
|
|
177
|
+
) {
|
|
178
|
+
return;
|
|
179
|
+
}
|
|
180
|
+
const method = async function(storeName2, ...args) {
|
|
181
|
+
const tx = this.transaction(storeName2, isWrite ? "readwrite" : "readonly");
|
|
182
|
+
let target2 = tx.store;
|
|
183
|
+
if (useIndex)
|
|
184
|
+
target2 = target2.index(args.shift());
|
|
185
|
+
return (await Promise.all([
|
|
186
|
+
target2[targetFuncName](...args),
|
|
187
|
+
isWrite && tx.done
|
|
188
|
+
]))[0];
|
|
189
|
+
};
|
|
190
|
+
cachedMethods.set(prop, method);
|
|
191
|
+
return method;
|
|
192
|
+
}
|
|
193
|
+
replaceTraps((oldTraps) => ({
|
|
194
|
+
...oldTraps,
|
|
195
|
+
get: (target, prop, receiver) => getMethod(target, prop) || oldTraps.get(target, prop, receiver),
|
|
196
|
+
has: (target, prop) => !!getMethod(target, prop) || oldTraps.has(target, prop)
|
|
197
|
+
}));
|
|
198
|
+
var advanceMethodProps = ["continue", "continuePrimaryKey", "advance"];
|
|
199
|
+
var methodMap = {};
|
|
200
|
+
var advanceResults = /* @__PURE__ */ new WeakMap();
|
|
201
|
+
var ittrProxiedCursorToOriginalProxy = /* @__PURE__ */ new WeakMap();
|
|
202
|
+
var cursorIteratorTraps = {
|
|
203
|
+
get(target, prop) {
|
|
204
|
+
if (!advanceMethodProps.includes(prop))
|
|
205
|
+
return target[prop];
|
|
206
|
+
let cachedFunc = methodMap[prop];
|
|
207
|
+
if (!cachedFunc) {
|
|
208
|
+
cachedFunc = methodMap[prop] = function(...args) {
|
|
209
|
+
advanceResults.set(this, ittrProxiedCursorToOriginalProxy.get(this)[prop](...args));
|
|
210
|
+
};
|
|
211
|
+
}
|
|
212
|
+
return cachedFunc;
|
|
213
|
+
}
|
|
214
|
+
};
|
|
215
|
+
async function* iterate(...args) {
|
|
216
|
+
let cursor = this;
|
|
217
|
+
if (!(cursor instanceof IDBCursor)) {
|
|
218
|
+
cursor = await cursor.openCursor(...args);
|
|
219
|
+
}
|
|
220
|
+
if (!cursor)
|
|
221
|
+
return;
|
|
222
|
+
cursor = cursor;
|
|
223
|
+
const proxiedCursor = new Proxy(cursor, cursorIteratorTraps);
|
|
224
|
+
ittrProxiedCursorToOriginalProxy.set(proxiedCursor, cursor);
|
|
225
|
+
reverseTransformCache.set(proxiedCursor, unwrap(cursor));
|
|
226
|
+
while (cursor) {
|
|
227
|
+
yield proxiedCursor;
|
|
228
|
+
cursor = await (advanceResults.get(proxiedCursor) || cursor.continue());
|
|
229
|
+
advanceResults.delete(proxiedCursor);
|
|
230
|
+
}
|
|
231
|
+
}
|
|
232
|
+
function isIteratorProp(target, prop) {
|
|
233
|
+
return prop === Symbol.asyncIterator && instanceOfAny(target, [IDBIndex, IDBObjectStore, IDBCursor]) || prop === "iterate" && instanceOfAny(target, [IDBIndex, IDBObjectStore]);
|
|
234
|
+
}
|
|
235
|
+
replaceTraps((oldTraps) => ({
|
|
236
|
+
...oldTraps,
|
|
237
|
+
get(target, prop, receiver) {
|
|
238
|
+
if (isIteratorProp(target, prop))
|
|
239
|
+
return iterate;
|
|
240
|
+
return oldTraps.get(target, prop, receiver);
|
|
241
|
+
},
|
|
242
|
+
has(target, prop) {
|
|
243
|
+
return isIteratorProp(target, prop) || oldTraps.has(target, prop);
|
|
244
|
+
}
|
|
245
|
+
}));
|
|
246
|
+
|
|
247
|
+
// src/schema.ts
|
|
248
|
+
function fingerprint(schema) {
|
|
249
|
+
const normalized = {};
|
|
250
|
+
for (const name of Object.keys(schema.stores).sort()) {
|
|
251
|
+
const def = schema.stores[name];
|
|
252
|
+
const indexes = def.indexes == null ? void 0 : Array.isArray(def.indexes) ? [...def.indexes].sort() : Object.fromEntries(
|
|
253
|
+
Object.keys(def.indexes).sort().map((k) => [
|
|
254
|
+
k,
|
|
255
|
+
def.indexes[k]
|
|
256
|
+
])
|
|
257
|
+
);
|
|
258
|
+
normalized[name] = {
|
|
259
|
+
keyPath: def.keyPath,
|
|
260
|
+
autoIncrement: def.autoIncrement,
|
|
261
|
+
indexes
|
|
262
|
+
};
|
|
263
|
+
}
|
|
264
|
+
return JSON.stringify(normalized);
|
|
265
|
+
}
|
|
266
|
+
function applySchema(db, schema, transaction) {
|
|
267
|
+
for (const [storeName2, def] of Object.entries(schema.stores)) {
|
|
268
|
+
const store = !db.objectStoreNames.contains(storeName2) ? db.createObjectStore(storeName2, {
|
|
269
|
+
keyPath: def.keyPath,
|
|
270
|
+
autoIncrement: def.autoIncrement ?? false
|
|
271
|
+
}) : transaction.objectStore(storeName2);
|
|
272
|
+
if (def.indexes) {
|
|
273
|
+
const indexEntries = Array.isArray(def.indexes) ? def.indexes.map((name) => [name, name]) : Object.entries(def.indexes);
|
|
274
|
+
for (const [indexName, keyPath] of indexEntries) {
|
|
275
|
+
if (!store.indexNames.contains(indexName)) {
|
|
276
|
+
store.createIndex(indexName, keyPath);
|
|
277
|
+
}
|
|
278
|
+
}
|
|
279
|
+
}
|
|
280
|
+
}
|
|
281
|
+
}
|
|
282
|
+
|
|
283
|
+
// src/initDb.ts
|
|
284
|
+
var META_DB_NAME = "idb-refined-meta";
|
|
285
|
+
var META_STORE = "meta";
|
|
286
|
+
async function initDb(name, options) {
|
|
287
|
+
if (options?.schema != null) {
|
|
288
|
+
return initDbWithSchema(name, options.schema, options.upgrade);
|
|
289
|
+
}
|
|
290
|
+
const version = options?.version ?? 1;
|
|
291
|
+
return openDB(name, version, {
|
|
292
|
+
upgrade: options?.upgrade
|
|
293
|
+
});
|
|
294
|
+
}
|
|
295
|
+
async function initDbWithSchema(name, schema, customUpgrade) {
|
|
296
|
+
const fp = fingerprint(schema);
|
|
297
|
+
const metaDb = await openDB(META_DB_NAME, 1, {
|
|
298
|
+
upgrade(db2) {
|
|
299
|
+
if (!db2.objectStoreNames.contains(META_STORE)) {
|
|
300
|
+
db2.createObjectStore(META_STORE, { keyPath: "dbName" });
|
|
301
|
+
}
|
|
302
|
+
}
|
|
303
|
+
});
|
|
304
|
+
const stored = await metaDb.get(META_STORE, name);
|
|
305
|
+
let currentVersion = 0;
|
|
306
|
+
try {
|
|
307
|
+
const probe = await openDB(name);
|
|
308
|
+
currentVersion = probe.version;
|
|
309
|
+
probe.close();
|
|
310
|
+
} catch {
|
|
311
|
+
}
|
|
312
|
+
const needUpgrade = stored == null || stored.schemaFingerprint !== fp;
|
|
313
|
+
const newVersion = needUpgrade ? Math.max((stored?.version ?? 0) + 1, currentVersion + 1) : stored.version;
|
|
314
|
+
const db = await openDB(name, newVersion, {
|
|
315
|
+
upgrade(db2, _oldVersion, _newVersion, transaction) {
|
|
316
|
+
if (needUpgrade) {
|
|
317
|
+
applySchema(db2, schema, transaction);
|
|
318
|
+
}
|
|
319
|
+
customUpgrade?.(db2);
|
|
320
|
+
}
|
|
321
|
+
});
|
|
322
|
+
if (needUpgrade) {
|
|
323
|
+
await metaDb.put(META_STORE, {
|
|
324
|
+
dbName: name,
|
|
325
|
+
schemaFingerprint: fp,
|
|
326
|
+
version: newVersion
|
|
327
|
+
});
|
|
328
|
+
}
|
|
329
|
+
metaDb.close();
|
|
330
|
+
return db;
|
|
331
|
+
}
|
|
332
|
+
async function clearMetaForDb(dbName2) {
|
|
333
|
+
const metaDb = await openDB(META_DB_NAME, 1, {
|
|
334
|
+
upgrade(db) {
|
|
335
|
+
if (!db.objectStoreNames.contains(META_STORE)) {
|
|
336
|
+
db.createObjectStore(META_STORE, { keyPath: "dbName" });
|
|
337
|
+
}
|
|
338
|
+
}
|
|
339
|
+
});
|
|
340
|
+
await metaDb.delete(META_STORE, dbName2);
|
|
341
|
+
metaDb.close();
|
|
342
|
+
}
|
|
343
|
+
|
|
344
|
+
// src/cleanOldEntries.ts
|
|
345
|
+
async function cleanOldEntries(db, storeName2, options) {
|
|
346
|
+
const { dateKey, before } = options;
|
|
347
|
+
const tx = db.transaction(storeName2, "readwrite");
|
|
348
|
+
const store = tx.objectStore(storeName2);
|
|
349
|
+
const index = store.index(dateKey);
|
|
350
|
+
const range = IDBKeyRange.upperBound(before, true);
|
|
351
|
+
const keysToDelete = await index.getAllKeys(range);
|
|
352
|
+
for (const key of keysToDelete) {
|
|
353
|
+
store.delete(key);
|
|
354
|
+
}
|
|
355
|
+
await tx.done;
|
|
356
|
+
}
|
|
357
|
+
|
|
358
|
+
// src/constants.ts
|
|
359
|
+
var DEFAULT_TTL_MS = 3600 * 1e3;
|
|
360
|
+
var DEFAULT_MAX_COUNT = 1e3;
|
|
361
|
+
|
|
362
|
+
// src/cleanWhenTooLarge.ts
|
|
363
|
+
async function cleanWhenTooLarge(db, storeName2, options) {
|
|
364
|
+
const { dateKey } = options;
|
|
365
|
+
const maxCount2 = options.maxCount ?? DEFAULT_MAX_COUNT;
|
|
366
|
+
const count = await db.count(storeName2);
|
|
367
|
+
if (count <= maxCount2) return 0;
|
|
368
|
+
const toDelete = count - maxCount2;
|
|
369
|
+
const tx = db.transaction(storeName2, "readwrite");
|
|
370
|
+
const store = tx.objectStore(storeName2);
|
|
371
|
+
const index = store.index(dateKey);
|
|
372
|
+
const keysToDelete = [];
|
|
373
|
+
let cursor = await index.openKeyCursor(null, "next");
|
|
374
|
+
while (cursor != null && keysToDelete.length < toDelete) {
|
|
375
|
+
keysToDelete.push(cursor.primaryKey);
|
|
376
|
+
cursor = await cursor.continue();
|
|
377
|
+
}
|
|
378
|
+
for (const key of keysToDelete) {
|
|
379
|
+
store.delete(key);
|
|
380
|
+
}
|
|
381
|
+
await tx.done;
|
|
382
|
+
return keysToDelete.length;
|
|
383
|
+
}
|
|
384
|
+
|
|
385
|
+
// src/clientCore.ts
|
|
386
|
+
var DEFAULT_KEY_PATH = "id";
|
|
387
|
+
var DATE_INDEXES = ["expiresAt", "createdAt"];
|
|
388
|
+
var dbCache = /* @__PURE__ */ new Map();
|
|
389
|
+
function getDefaultSchema(storeNames) {
|
|
390
|
+
const stores = {};
|
|
391
|
+
for (const name of storeNames) {
|
|
392
|
+
stores[name] = {
|
|
393
|
+
keyPath: DEFAULT_KEY_PATH,
|
|
394
|
+
indexes: DATE_INDEXES
|
|
395
|
+
};
|
|
396
|
+
}
|
|
397
|
+
return { stores };
|
|
398
|
+
}
|
|
399
|
+
async function getDb(dbName2, storeName2) {
|
|
400
|
+
const entry = dbCache.get(dbName2);
|
|
401
|
+
if (entry != null && entry.storeNames.has(storeName2)) {
|
|
402
|
+
return entry.db;
|
|
403
|
+
}
|
|
404
|
+
const existingStores = entry != null ? [...entry.storeNames] : [];
|
|
405
|
+
if (entry != null) {
|
|
406
|
+
entry.db.close();
|
|
407
|
+
dbCache.delete(dbName2);
|
|
408
|
+
}
|
|
409
|
+
const allStores = existingStores.includes(storeName2) ? existingStores : [...existingStores, storeName2];
|
|
410
|
+
const schema = getDefaultSchema(allStores);
|
|
411
|
+
const db = await initDb(dbName2, { schema });
|
|
412
|
+
dbCache.set(dbName2, {
|
|
413
|
+
db,
|
|
414
|
+
storeNames: new Set(allStores)
|
|
415
|
+
});
|
|
416
|
+
return db;
|
|
417
|
+
}
|
|
418
|
+
function setExpiryFields(value, ttlMs2 = DEFAULT_TTL_MS) {
|
|
419
|
+
const now = Date.now();
|
|
420
|
+
if (value.createdAt === void 0) value.createdAt = now;
|
|
421
|
+
if (value.expiresAt === void 0) value.expiresAt = now + ttlMs2;
|
|
422
|
+
}
|
|
423
|
+
async function executeSet(dbName2, storeName2, value, options) {
|
|
424
|
+
const db = await getDb(dbName2, storeName2);
|
|
425
|
+
const toStore = { ...value };
|
|
426
|
+
setExpiryFields(toStore, options?.ttlMs ?? DEFAULT_TTL_MS);
|
|
427
|
+
await cleanOldEntries(db, storeName2, {
|
|
428
|
+
dateKey: "expiresAt",
|
|
429
|
+
before: Date.now()
|
|
430
|
+
});
|
|
431
|
+
const maxCount2 = options?.maxCount ?? DEFAULT_MAX_COUNT;
|
|
432
|
+
const count = await db.count(storeName2);
|
|
433
|
+
if (count >= maxCount2) {
|
|
434
|
+
await cleanWhenTooLarge(db, storeName2, {
|
|
435
|
+
dateKey: "createdAt",
|
|
436
|
+
maxCount: maxCount2
|
|
437
|
+
});
|
|
438
|
+
}
|
|
439
|
+
await db.put(storeName2, toStore);
|
|
440
|
+
const countAfter = await db.count(storeName2);
|
|
441
|
+
if (countAfter > maxCount2) {
|
|
442
|
+
await cleanWhenTooLarge(db, storeName2, {
|
|
443
|
+
dateKey: "createdAt",
|
|
444
|
+
maxCount: maxCount2
|
|
445
|
+
});
|
|
446
|
+
}
|
|
447
|
+
}
|
|
448
|
+
async function executeGet(dbName2, storeName2, key) {
|
|
449
|
+
const db = await getDb(dbName2, storeName2);
|
|
450
|
+
return await db.get(storeName2, key);
|
|
451
|
+
}
|
|
452
|
+
async function executeGetAll(dbName2, storeName2) {
|
|
453
|
+
const db = await getDb(dbName2, storeName2);
|
|
454
|
+
return await db.getAll(storeName2);
|
|
455
|
+
}
|
|
456
|
+
async function executeKeys(dbName2, storeName2) {
|
|
457
|
+
const db = await getDb(dbName2, storeName2);
|
|
458
|
+
return db.getAllKeys(storeName2);
|
|
459
|
+
}
|
|
460
|
+
async function executeGetMany(dbName2, storeName2, keys) {
|
|
461
|
+
const db = await getDb(dbName2, storeName2);
|
|
462
|
+
const results = await Promise.all(
|
|
463
|
+
keys.map((key) => db.get(storeName2, key))
|
|
464
|
+
);
|
|
465
|
+
return results;
|
|
466
|
+
}
|
|
467
|
+
async function executeUpdate(dbName2, storeName2, key, value) {
|
|
468
|
+
const db = await getDb(dbName2, storeName2);
|
|
469
|
+
const existing = await db.get(storeName2, key);
|
|
470
|
+
const merged = {
|
|
471
|
+
...existing ?? {},
|
|
472
|
+
...value,
|
|
473
|
+
[DEFAULT_KEY_PATH]: key
|
|
474
|
+
};
|
|
475
|
+
await db.put(storeName2, merged);
|
|
476
|
+
}
|
|
477
|
+
async function executeDelete(dbName2, storeName2, key) {
|
|
478
|
+
const db = await getDb(dbName2, storeName2);
|
|
479
|
+
await db.delete(storeName2, key);
|
|
480
|
+
}
|
|
481
|
+
async function executeDeleteDb(dbName2) {
|
|
482
|
+
const entry = dbCache.get(dbName2);
|
|
483
|
+
if (entry != null) {
|
|
484
|
+
entry.db.close();
|
|
485
|
+
dbCache.delete(dbName2);
|
|
486
|
+
}
|
|
487
|
+
await deleteDB(dbName2);
|
|
488
|
+
await clearMetaForDb(dbName2);
|
|
489
|
+
}
|
|
490
|
+
|
|
491
|
+
// src/worker.ts
|
|
492
|
+
var dbName;
|
|
493
|
+
var storeName;
|
|
494
|
+
var ttlMs;
|
|
495
|
+
var maxCount;
|
|
496
|
+
function respond(id, result, error) {
|
|
497
|
+
const msg = error != null ? { id, error } : { id, result };
|
|
498
|
+
self.postMessage(msg);
|
|
499
|
+
}
|
|
500
|
+
self.onmessage = async (e) => {
|
|
501
|
+
const { type, id, payload } = e.data;
|
|
502
|
+
try {
|
|
503
|
+
switch (type) {
|
|
504
|
+
case "init": {
|
|
505
|
+
const {
|
|
506
|
+
dbName: dn,
|
|
507
|
+
storeName: sn,
|
|
508
|
+
ttlMs: ttl,
|
|
509
|
+
maxCount: max
|
|
510
|
+
} = payload;
|
|
511
|
+
dbName = dn;
|
|
512
|
+
storeName = sn;
|
|
513
|
+
ttlMs = ttl;
|
|
514
|
+
maxCount = max;
|
|
515
|
+
await getDb(dbName, storeName);
|
|
516
|
+
respond(id);
|
|
517
|
+
break;
|
|
518
|
+
}
|
|
519
|
+
case "set":
|
|
520
|
+
await executeSet(
|
|
521
|
+
dbName,
|
|
522
|
+
storeName,
|
|
523
|
+
payload,
|
|
524
|
+
{
|
|
525
|
+
ttlMs,
|
|
526
|
+
maxCount
|
|
527
|
+
}
|
|
528
|
+
);
|
|
529
|
+
respond(id);
|
|
530
|
+
break;
|
|
531
|
+
case "get": {
|
|
532
|
+
const value = await executeGet(
|
|
533
|
+
dbName,
|
|
534
|
+
storeName,
|
|
535
|
+
payload
|
|
536
|
+
);
|
|
537
|
+
respond(id, value);
|
|
538
|
+
break;
|
|
539
|
+
}
|
|
540
|
+
case "getAll": {
|
|
541
|
+
const values = await executeGetAll(dbName, storeName);
|
|
542
|
+
respond(id, values);
|
|
543
|
+
break;
|
|
544
|
+
}
|
|
545
|
+
case "keys": {
|
|
546
|
+
const keysResult = await executeKeys(dbName, storeName);
|
|
547
|
+
respond(id, keysResult);
|
|
548
|
+
break;
|
|
549
|
+
}
|
|
550
|
+
case "getMany": {
|
|
551
|
+
const values = await executeGetMany(
|
|
552
|
+
dbName,
|
|
553
|
+
storeName,
|
|
554
|
+
payload
|
|
555
|
+
);
|
|
556
|
+
respond(id, values);
|
|
557
|
+
break;
|
|
558
|
+
}
|
|
559
|
+
case "update": {
|
|
560
|
+
const { key, value } = payload;
|
|
561
|
+
await executeUpdate(dbName, storeName, key, value);
|
|
562
|
+
respond(id);
|
|
563
|
+
break;
|
|
564
|
+
}
|
|
565
|
+
case "delete":
|
|
566
|
+
await executeDelete(dbName, storeName, payload);
|
|
567
|
+
respond(id);
|
|
568
|
+
break;
|
|
569
|
+
case "deleteDb":
|
|
570
|
+
await executeDeleteDb(dbName);
|
|
571
|
+
respond(id);
|
|
572
|
+
break;
|
|
573
|
+
default:
|
|
574
|
+
respond(id, void 0, `Unknown message type: ${type}`);
|
|
575
|
+
}
|
|
576
|
+
} catch (err) {
|
|
577
|
+
respond(id, void 0, err instanceof Error ? err.message : String(err));
|
|
578
|
+
}
|
|
579
|
+
};
|
|
@@ -0,0 +1,52 @@
|
|
|
1
|
+
export type WorkerMessage = {
|
|
2
|
+
type: "init";
|
|
3
|
+
id: number;
|
|
4
|
+
payload: {
|
|
5
|
+
dbName: string;
|
|
6
|
+
storeName: string;
|
|
7
|
+
ttlMs?: number;
|
|
8
|
+
maxCount?: number;
|
|
9
|
+
};
|
|
10
|
+
} | {
|
|
11
|
+
type: "set";
|
|
12
|
+
id: number;
|
|
13
|
+
payload: unknown;
|
|
14
|
+
} | {
|
|
15
|
+
type: "get";
|
|
16
|
+
id: number;
|
|
17
|
+
payload: IDBValidKey;
|
|
18
|
+
} | {
|
|
19
|
+
type: "getAll";
|
|
20
|
+
id: number;
|
|
21
|
+
payload?: undefined;
|
|
22
|
+
} | {
|
|
23
|
+
type: "keys";
|
|
24
|
+
id: number;
|
|
25
|
+
payload?: undefined;
|
|
26
|
+
} | {
|
|
27
|
+
type: "getMany";
|
|
28
|
+
id: number;
|
|
29
|
+
payload: IDBValidKey[];
|
|
30
|
+
} | {
|
|
31
|
+
type: "update";
|
|
32
|
+
id: number;
|
|
33
|
+
payload: {
|
|
34
|
+
key: IDBValidKey;
|
|
35
|
+
value: unknown;
|
|
36
|
+
};
|
|
37
|
+
} | {
|
|
38
|
+
type: "delete";
|
|
39
|
+
id: number;
|
|
40
|
+
payload: IDBValidKey;
|
|
41
|
+
} | {
|
|
42
|
+
type: "deleteDb";
|
|
43
|
+
id: number;
|
|
44
|
+
payload?: undefined;
|
|
45
|
+
};
|
|
46
|
+
export type WorkerResponse = {
|
|
47
|
+
id: number;
|
|
48
|
+
result: unknown;
|
|
49
|
+
} | {
|
|
50
|
+
id: number;
|
|
51
|
+
error: string;
|
|
52
|
+
};
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
export {};
|
package/package.json
CHANGED
|
@@ -1,8 +1,13 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "idb-refined",
|
|
3
|
-
"version": "0.0.
|
|
4
|
-
"description": "
|
|
3
|
+
"version": "0.0.5",
|
|
4
|
+
"description": "Minimal IndexedDB client with TTL, eviction, and Web Worker support—no manual schema versions.",
|
|
5
5
|
"type": "module",
|
|
6
|
+
"repository": "github:thiennp/idb-refined",
|
|
7
|
+
"homepage": "https://github.com/thiennp/idb-refined#readme",
|
|
8
|
+
"bugs": {
|
|
9
|
+
"url": "https://github.com/thiennp/idb-refined/issues"
|
|
10
|
+
},
|
|
6
11
|
"main": "./dist/index.js",
|
|
7
12
|
"module": "./dist/index.js",
|
|
8
13
|
"types": "./dist/index.d.ts",
|
|
@@ -11,13 +16,18 @@
|
|
|
11
16
|
"types": "./dist/index.d.ts",
|
|
12
17
|
"import": "./dist/index.js",
|
|
13
18
|
"default": "./dist/index.js"
|
|
19
|
+
},
|
|
20
|
+
"./worker": {
|
|
21
|
+
"import": "./dist/worker.js",
|
|
22
|
+
"default": "./dist/worker.js"
|
|
14
23
|
}
|
|
15
24
|
},
|
|
16
25
|
"files": [
|
|
17
26
|
"dist"
|
|
18
27
|
],
|
|
19
28
|
"scripts": {
|
|
20
|
-
"build": "tsc -p tsconfig.build.json",
|
|
29
|
+
"build": "tsc -p tsconfig.build.json && esbuild src/worker.ts --bundle --format=esm --outfile=dist/worker.js --platform=browser",
|
|
30
|
+
"example": "pnpm build && vite --config example/vite.config.ts",
|
|
21
31
|
"lint": "eslint src",
|
|
22
32
|
"lint:fix": "eslint src --fix",
|
|
23
33
|
"format": "prettier --write \"src/**/*.ts\"",
|
|
@@ -39,6 +49,7 @@
|
|
|
39
49
|
"@commitlint/cli": "^19.8.1",
|
|
40
50
|
"@commitlint/config-conventional": "^19.8.1",
|
|
41
51
|
"@eslint/js": "^9.39.2",
|
|
52
|
+
"esbuild": "^0.24.2",
|
|
42
53
|
"eslint": "^9.39.2",
|
|
43
54
|
"eslint-config-prettier": "^9.1.2",
|
|
44
55
|
"fake-indexeddb": "^6.2.5",
|
|
@@ -47,6 +58,7 @@
|
|
|
47
58
|
"prettier": "^3.8.1",
|
|
48
59
|
"typescript": "^5.6.3",
|
|
49
60
|
"typescript-eslint": "^8.54.0",
|
|
61
|
+
"vite": "^6.0.3",
|
|
50
62
|
"vitest": "^2.1.9"
|
|
51
63
|
},
|
|
52
64
|
"keywords": [
|