@abraca/dabra 0.6.0 → 0.7.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/abracadabra-provider.cjs +729 -65
- package/dist/abracadabra-provider.cjs.map +1 -1
- package/dist/abracadabra-provider.esm.js +707 -61
- package/dist/abracadabra-provider.esm.js.map +1 -1
- package/dist/index.d.ts +245 -31
- package/package.json +1 -2
- package/src/{HocuspocusProvider.ts → AbracadabraBaseProvider.ts} +33 -22
- package/src/AbracadabraClient.ts +69 -3
- package/src/AbracadabraProvider.ts +11 -11
- package/src/{HocuspocusProviderWebsocket.ts → AbracadabraWS.ts} +36 -22
- package/src/CloseEvents.ts +49 -0
- package/src/DocumentCache.ts +210 -0
- package/src/FileBlobStore.ts +300 -0
- package/src/MessageReceiver.ts +8 -8
- package/src/OutgoingMessages/AuthenticationMessage.ts +1 -1
- package/src/SearchIndex.ts +247 -0
- package/src/auth.ts +62 -0
- package/src/awarenessStatesToArray.ts +10 -0
- package/src/index.ts +9 -2
- package/src/types.ts +46 -1
|
@@ -0,0 +1,300 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* IndexedDB-backed file blob cache with an offline-tolerant upload queue.
|
|
3
|
+
*
|
|
4
|
+
* Responsibilities:
|
|
5
|
+
* - Cache downloaded file blobs locally so they can be served offline via
|
|
6
|
+
* object URLs (URL.createObjectURL).
|
|
7
|
+
* - Queue file uploads when the network is unavailable. The queue persists
|
|
8
|
+
* across page reloads (IndexedDB-backed).
|
|
9
|
+
* - Auto-flush the upload queue when the browser reports it is back online,
|
|
10
|
+
* and expose flushQueue() for manual flushing.
|
|
11
|
+
*
|
|
12
|
+
* Events:
|
|
13
|
+
* - "upload:queued" — entry added to queue
|
|
14
|
+
* - "upload:started" — upload attempt started
|
|
15
|
+
* - "upload:done" — upload succeeded
|
|
16
|
+
* - "upload:error" — upload attempt failed
|
|
17
|
+
*
|
|
18
|
+
* Falls back to a silent no-op when IndexedDB is unavailable (SSR / Node.js).
|
|
19
|
+
*/
|
|
20
|
+
|
|
21
|
+
import type { UploadQueueEntry } from "./types.ts";
|
|
22
|
+
import type { AbracadabraClient } from "./AbracadabraClient.ts";
|
|
23
|
+
import EventEmitter from "./EventEmitter.ts";
|
|
24
|
+
|
|
25
|
+
const DB_VERSION = 1;
|
|
26
|
+
|
|
27
|
+
function idbAvailable(): boolean {
|
|
28
|
+
return typeof globalThis !== "undefined" && "indexedDB" in globalThis;
|
|
29
|
+
}
|
|
30
|
+
|
|
31
|
+
function openDb(origin: string): Promise<IDBDatabase> {
|
|
32
|
+
return new Promise((resolve, reject) => {
|
|
33
|
+
const req = globalThis.indexedDB.open(`abracadabra:files:${origin}`, DB_VERSION);
|
|
34
|
+
|
|
35
|
+
req.onupgradeneeded = (event) => {
|
|
36
|
+
const db = (event.target as IDBOpenDBRequest).result;
|
|
37
|
+
if (!db.objectStoreNames.contains("blobs")) {
|
|
38
|
+
db.createObjectStore("blobs");
|
|
39
|
+
}
|
|
40
|
+
if (!db.objectStoreNames.contains("upload_queue")) {
|
|
41
|
+
db.createObjectStore("upload_queue", { keyPath: "id" });
|
|
42
|
+
}
|
|
43
|
+
};
|
|
44
|
+
|
|
45
|
+
req.onsuccess = () => resolve(req.result);
|
|
46
|
+
req.onerror = () => reject(req.error);
|
|
47
|
+
});
|
|
48
|
+
}
|
|
49
|
+
|
|
50
|
+
function txPromise<T>(store: IDBObjectStore, request: IDBRequest<T>): Promise<T> {
|
|
51
|
+
return new Promise((resolve, reject) => {
|
|
52
|
+
request.onsuccess = () => resolve(request.result);
|
|
53
|
+
request.onerror = () => reject(request.error);
|
|
54
|
+
});
|
|
55
|
+
}
|
|
56
|
+
|
|
57
|
+
interface BlobCacheEntry {
|
|
58
|
+
blob: Blob;
|
|
59
|
+
mime_type: string;
|
|
60
|
+
filename: string;
|
|
61
|
+
cachedAt: number;
|
|
62
|
+
}
|
|
63
|
+
|
|
64
|
+
export class FileBlobStore extends EventEmitter {
|
|
65
|
+
private readonly origin: string;
|
|
66
|
+
private readonly client: AbracadabraClient;
|
|
67
|
+
private dbPromise: Promise<IDBDatabase | null> | null = null;
|
|
68
|
+
private db: IDBDatabase | null = null;
|
|
69
|
+
|
|
70
|
+
/** Tracks active object URLs so we can revoke them on destroy. */
|
|
71
|
+
private readonly objectUrls = new Map<string, string>();
|
|
72
|
+
|
|
73
|
+
/** Prevents concurrent flush runs. */
|
|
74
|
+
private _flushing = false;
|
|
75
|
+
|
|
76
|
+
private readonly _onlineHandler: () => void;
|
|
77
|
+
|
|
78
|
+
constructor(serverOrigin: string, client: AbracadabraClient) {
|
|
79
|
+
super();
|
|
80
|
+
this.origin = serverOrigin;
|
|
81
|
+
this.client = client;
|
|
82
|
+
|
|
83
|
+
this._onlineHandler = () => { this.flushQueue().catch(() => null); };
|
|
84
|
+
if (typeof window !== "undefined") {
|
|
85
|
+
window.addEventListener("online", this._onlineHandler);
|
|
86
|
+
}
|
|
87
|
+
}
|
|
88
|
+
|
|
89
|
+
private getDb(): Promise<IDBDatabase | null> {
|
|
90
|
+
if (!idbAvailable()) return Promise.resolve(null);
|
|
91
|
+
if (!this.dbPromise) {
|
|
92
|
+
this.dbPromise = openDb(this.origin)
|
|
93
|
+
.catch(() => null)
|
|
94
|
+
.then((db) => {
|
|
95
|
+
this.db = db;
|
|
96
|
+
return db;
|
|
97
|
+
});
|
|
98
|
+
}
|
|
99
|
+
return this.dbPromise;
|
|
100
|
+
}
|
|
101
|
+
|
|
102
|
+
private blobKey(docId: string, uploadId: string): string {
|
|
103
|
+
return `${docId}/${uploadId}`;
|
|
104
|
+
}
|
|
105
|
+
|
|
106
|
+
// ── Blob cache ────────────────────────────────────────────────────────────
|
|
107
|
+
|
|
108
|
+
/**
|
|
109
|
+
* Return a local object URL for a file.
|
|
110
|
+
* On first call the blob is downloaded from the server and cached in IDB.
|
|
111
|
+
* Returns null when offline and the blob is not yet cached, or when
|
|
112
|
+
* URL.createObjectURL is unavailable (e.g. Node.js / SSR).
|
|
113
|
+
*/
|
|
114
|
+
async getBlobUrl(docId: string, uploadId: string): Promise<string | null> {
|
|
115
|
+
// Object URLs are only meaningful in browser environments.
|
|
116
|
+
if (typeof window === "undefined") return null;
|
|
117
|
+
|
|
118
|
+
const key = this.blobKey(docId, uploadId);
|
|
119
|
+
|
|
120
|
+
// Reuse existing in-memory object URL if available
|
|
121
|
+
const existing = this.objectUrls.get(key);
|
|
122
|
+
if (existing) return existing;
|
|
123
|
+
|
|
124
|
+
const db = await this.getDb();
|
|
125
|
+
if (db) {
|
|
126
|
+
const tx = db.transaction("blobs", "readonly");
|
|
127
|
+
const entry = await txPromise<BlobCacheEntry | undefined>(
|
|
128
|
+
tx.objectStore("blobs"),
|
|
129
|
+
tx.objectStore("blobs").get(key),
|
|
130
|
+
);
|
|
131
|
+
if (entry) {
|
|
132
|
+
const url = URL.createObjectURL(entry.blob);
|
|
133
|
+
this.objectUrls.set(key, url);
|
|
134
|
+
return url;
|
|
135
|
+
}
|
|
136
|
+
}
|
|
137
|
+
|
|
138
|
+
// Not cached — try downloading
|
|
139
|
+
let blob: Blob;
|
|
140
|
+
try {
|
|
141
|
+
blob = await this.client.getUpload(docId, uploadId);
|
|
142
|
+
} catch {
|
|
143
|
+
return null;
|
|
144
|
+
}
|
|
145
|
+
|
|
146
|
+
// Cache the blob
|
|
147
|
+
if (db) {
|
|
148
|
+
const entry: BlobCacheEntry = {
|
|
149
|
+
blob,
|
|
150
|
+
mime_type: blob.type,
|
|
151
|
+
filename: uploadId,
|
|
152
|
+
cachedAt: Date.now(),
|
|
153
|
+
};
|
|
154
|
+
const tx = db.transaction("blobs", "readwrite");
|
|
155
|
+
tx.objectStore("blobs").put(entry, key);
|
|
156
|
+
}
|
|
157
|
+
|
|
158
|
+
const url = URL.createObjectURL(blob);
|
|
159
|
+
this.objectUrls.set(key, url);
|
|
160
|
+
return url;
|
|
161
|
+
}
|
|
162
|
+
|
|
163
|
+
/** Revoke the object URL and remove the blob from cache. */
|
|
164
|
+
async evictBlob(docId: string, uploadId: string): Promise<void> {
|
|
165
|
+
const key = this.blobKey(docId, uploadId);
|
|
166
|
+
|
|
167
|
+
const url = this.objectUrls.get(key);
|
|
168
|
+
if (url) {
|
|
169
|
+
URL.revokeObjectURL(url);
|
|
170
|
+
this.objectUrls.delete(key);
|
|
171
|
+
}
|
|
172
|
+
|
|
173
|
+
const db = await this.getDb();
|
|
174
|
+
if (!db) return;
|
|
175
|
+
const tx = db.transaction("blobs", "readwrite");
|
|
176
|
+
await txPromise(tx.objectStore("blobs"), tx.objectStore("blobs").delete(key));
|
|
177
|
+
}
|
|
178
|
+
|
|
179
|
+
// ── Upload queue ──────────────────────────────────────────────────────────
|
|
180
|
+
|
|
181
|
+
/**
|
|
182
|
+
* Queue a file for upload. Works offline — the entry is persisted to IDB
|
|
183
|
+
* and flushed the next time the queue is flushed.
|
|
184
|
+
* Returns the generated queue entry id.
|
|
185
|
+
*/
|
|
186
|
+
async queueUpload(
|
|
187
|
+
docId: string,
|
|
188
|
+
file: File | Blob,
|
|
189
|
+
filename?: string,
|
|
190
|
+
): Promise<string> {
|
|
191
|
+
const id = crypto.randomUUID();
|
|
192
|
+
const resolvedFilename =
|
|
193
|
+
file instanceof File ? file.name : (filename ?? "file");
|
|
194
|
+
|
|
195
|
+
const entry: UploadQueueEntry = {
|
|
196
|
+
id,
|
|
197
|
+
docId,
|
|
198
|
+
file,
|
|
199
|
+
filename: resolvedFilename,
|
|
200
|
+
status: "pending",
|
|
201
|
+
createdAt: Date.now(),
|
|
202
|
+
};
|
|
203
|
+
|
|
204
|
+
const db = await this.getDb();
|
|
205
|
+
if (db) {
|
|
206
|
+
const tx = db.transaction("upload_queue", "readwrite");
|
|
207
|
+
await txPromise(
|
|
208
|
+
tx.objectStore("upload_queue"),
|
|
209
|
+
tx.objectStore("upload_queue").put(entry),
|
|
210
|
+
);
|
|
211
|
+
}
|
|
212
|
+
|
|
213
|
+
this.emit("upload:queued", entry);
|
|
214
|
+
return id;
|
|
215
|
+
}
|
|
216
|
+
|
|
217
|
+
/** Return all upload queue entries. */
|
|
218
|
+
async getQueue(): Promise<UploadQueueEntry[]> {
|
|
219
|
+
const db = await this.getDb();
|
|
220
|
+
if (!db) return [];
|
|
221
|
+
return new Promise((resolve, reject) => {
|
|
222
|
+
const tx = db.transaction("upload_queue", "readonly");
|
|
223
|
+
const req = tx.objectStore("upload_queue").getAll();
|
|
224
|
+
req.onsuccess = () => resolve(req.result as UploadQueueEntry[]);
|
|
225
|
+
req.onerror = () => reject(req.error);
|
|
226
|
+
});
|
|
227
|
+
}
|
|
228
|
+
|
|
229
|
+
/**
|
|
230
|
+
* Upload all pending queue entries via AbracadabraClient.
|
|
231
|
+
* Safe to call repeatedly — a concurrent call is a no-op.
|
|
232
|
+
* Entries that fail are marked with status "error" and left in the queue.
|
|
233
|
+
*/
|
|
234
|
+
async flushQueue(): Promise<void> {
|
|
235
|
+
if (this._flushing) return;
|
|
236
|
+
this._flushing = true;
|
|
237
|
+
|
|
238
|
+
try {
|
|
239
|
+
const all = await this.getQueue();
|
|
240
|
+
const pending = all.filter((e) => e.status === "pending");
|
|
241
|
+
|
|
242
|
+
for (const entry of pending) {
|
|
243
|
+
await this._updateQueueEntry(entry.id, { status: "uploading" });
|
|
244
|
+
this.emit("upload:started", { ...entry, status: "uploading" });
|
|
245
|
+
|
|
246
|
+
try {
|
|
247
|
+
await this.client.upload(entry.docId, entry.file, entry.filename);
|
|
248
|
+
await this._updateQueueEntry(entry.id, { status: "done" });
|
|
249
|
+
this.emit("upload:done", { ...entry, status: "done" });
|
|
250
|
+
} catch (err) {
|
|
251
|
+
const message = err instanceof Error ? err.message : String(err);
|
|
252
|
+
await this._updateQueueEntry(entry.id, { status: "error", error: message });
|
|
253
|
+
this.emit("upload:error", { ...entry, status: "error", error: message });
|
|
254
|
+
}
|
|
255
|
+
}
|
|
256
|
+
} finally {
|
|
257
|
+
this._flushing = false;
|
|
258
|
+
}
|
|
259
|
+
}
|
|
260
|
+
|
|
261
|
+
private async _updateQueueEntry(
|
|
262
|
+
id: string,
|
|
263
|
+
patch: Partial<UploadQueueEntry>,
|
|
264
|
+
): Promise<void> {
|
|
265
|
+
const db = await this.getDb();
|
|
266
|
+
if (!db) return;
|
|
267
|
+
|
|
268
|
+
return new Promise<void>((resolve, reject) => {
|
|
269
|
+
const tx = db.transaction("upload_queue", "readwrite");
|
|
270
|
+
const store = tx.objectStore("upload_queue");
|
|
271
|
+
const req = store.get(id);
|
|
272
|
+
req.onsuccess = () => {
|
|
273
|
+
if (!req.result) { resolve(); return; }
|
|
274
|
+
const updated = { ...req.result, ...patch };
|
|
275
|
+
store.put(updated);
|
|
276
|
+
tx.oncomplete = () => resolve();
|
|
277
|
+
tx.onerror = () => reject(tx.error);
|
|
278
|
+
};
|
|
279
|
+
req.onerror = () => reject(req.error);
|
|
280
|
+
});
|
|
281
|
+
}
|
|
282
|
+
|
|
283
|
+
// ── Lifecycle ─────────────────────────────────────────────────────────────
|
|
284
|
+
|
|
285
|
+
destroy(): void {
|
|
286
|
+
if (typeof window !== "undefined") {
|
|
287
|
+
window.removeEventListener("online", this._onlineHandler);
|
|
288
|
+
}
|
|
289
|
+
|
|
290
|
+
// Revoke all tracked object URLs
|
|
291
|
+
for (const url of this.objectUrls.values()) {
|
|
292
|
+
URL.revokeObjectURL(url);
|
|
293
|
+
}
|
|
294
|
+
this.objectUrls.clear();
|
|
295
|
+
|
|
296
|
+
this.db?.close();
|
|
297
|
+
this.db = null;
|
|
298
|
+
this.removeAllListeners();
|
|
299
|
+
}
|
|
300
|
+
}
|
package/src/MessageReceiver.ts
CHANGED
|
@@ -1,9 +1,9 @@
|
|
|
1
|
-
import { readAuthMessage } from "
|
|
1
|
+
import { readAuthMessage } from "./auth.ts";
|
|
2
2
|
import { readVarInt, readVarString } from "lib0/decoding";
|
|
3
3
|
import type { CloseEvent } from "ws";
|
|
4
4
|
import * as awarenessProtocol from "y-protocols/awareness";
|
|
5
5
|
import { messageYjsSyncStep2, readSyncMessage } from "y-protocols/sync";
|
|
6
|
-
import type {
|
|
6
|
+
import type { AbracadabraBaseProvider } from "./AbracadabraBaseProvider.ts";
|
|
7
7
|
import type { IncomingMessage } from "./IncomingMessage.ts";
|
|
8
8
|
import { OutgoingMessage } from "./OutgoingMessage.ts";
|
|
9
9
|
import { MessageType } from "./types.ts";
|
|
@@ -15,7 +15,7 @@ export class MessageReceiver {
|
|
|
15
15
|
this.message = message;
|
|
16
16
|
}
|
|
17
17
|
|
|
18
|
-
public apply(provider:
|
|
18
|
+
public apply(provider: AbracadabraBaseProvider, emitSynced: boolean) {
|
|
19
19
|
const { message } = this;
|
|
20
20
|
const type = message.readVarUint();
|
|
21
21
|
|
|
@@ -72,7 +72,7 @@ export class MessageReceiver {
|
|
|
72
72
|
}
|
|
73
73
|
}
|
|
74
74
|
|
|
75
|
-
private applySyncMessage(provider:
|
|
75
|
+
private applySyncMessage(provider: AbracadabraBaseProvider, emitSynced: boolean) {
|
|
76
76
|
const { message } = this;
|
|
77
77
|
|
|
78
78
|
message.writeVarUint(MessageType.Sync);
|
|
@@ -91,13 +91,13 @@ export class MessageReceiver {
|
|
|
91
91
|
}
|
|
92
92
|
}
|
|
93
93
|
|
|
94
|
-
applySyncStatusMessage(provider:
|
|
94
|
+
applySyncStatusMessage(provider: AbracadabraBaseProvider, applied: boolean) {
|
|
95
95
|
if (applied) {
|
|
96
96
|
provider.decrementUnsyncedChanges();
|
|
97
97
|
}
|
|
98
98
|
}
|
|
99
99
|
|
|
100
|
-
private applyAwarenessMessage(provider:
|
|
100
|
+
private applyAwarenessMessage(provider: AbracadabraBaseProvider) {
|
|
101
101
|
if (!provider.awareness) return;
|
|
102
102
|
|
|
103
103
|
const { message } = this;
|
|
@@ -109,7 +109,7 @@ export class MessageReceiver {
|
|
|
109
109
|
);
|
|
110
110
|
}
|
|
111
111
|
|
|
112
|
-
private applyAuthMessage(provider:
|
|
112
|
+
private applyAuthMessage(provider: AbracadabraBaseProvider) {
|
|
113
113
|
const { message } = this;
|
|
114
114
|
|
|
115
115
|
readAuthMessage(
|
|
@@ -120,7 +120,7 @@ export class MessageReceiver {
|
|
|
120
120
|
);
|
|
121
121
|
}
|
|
122
122
|
|
|
123
|
-
private applyQueryAwarenessMessage(provider:
|
|
123
|
+
private applyQueryAwarenessMessage(provider: AbracadabraBaseProvider) {
|
|
124
124
|
if (!provider.awareness) return;
|
|
125
125
|
|
|
126
126
|
const { message } = this;
|
|
@@ -1,5 +1,5 @@
|
|
|
1
1
|
import { writeVarString, writeVarUint } from "lib0/encoding";
|
|
2
|
-
import { writeAuthentication } from "
|
|
2
|
+
import { writeAuthentication } from "../auth.ts";
|
|
3
3
|
import type { OutgoingMessageArguments } from "../types.ts";
|
|
4
4
|
import { MessageType } from "../types.ts";
|
|
5
5
|
import { OutgoingMessage } from "../OutgoingMessage.ts";
|
|
@@ -0,0 +1,247 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* IndexedDB-backed trigram full-text search index.
|
|
3
|
+
*
|
|
4
|
+
* Generic: caller passes (docId, texts[]) — works for Y.Doc text content,
|
|
5
|
+
* file names, document titles, or any other text. No Y.js coupling.
|
|
6
|
+
*
|
|
7
|
+
* Algorithm: trigram inverted index.
|
|
8
|
+
* - Each document is decomposed into overlapping 3-character windows.
|
|
9
|
+
* - The "postings" store maps trigram → [docId, ...].
|
|
10
|
+
* - The "doc_trigrams" store maps docId → [trigram, ...] for efficient removal.
|
|
11
|
+
* - search() scores by number of query trigrams that match.
|
|
12
|
+
*
|
|
13
|
+
* IDB transactions that touch multiple stores use the callback-only pattern
|
|
14
|
+
* (not async/await inside a transaction) to avoid the transaction auto-commit
|
|
15
|
+
* issue across microtask boundaries.
|
|
16
|
+
*/
|
|
17
|
+
|
|
18
|
+
import type { SearchResult } from "./types.ts";
|
|
19
|
+
|
|
20
|
+
const DB_VERSION = 1;
|
|
21
|
+
|
|
22
|
+
function idbAvailable(): boolean {
|
|
23
|
+
return typeof globalThis !== "undefined" && "indexedDB" in globalThis;
|
|
24
|
+
}
|
|
25
|
+
|
|
26
|
+
function openDb(origin: string): Promise<IDBDatabase> {
|
|
27
|
+
return new Promise((resolve, reject) => {
|
|
28
|
+
const req = globalThis.indexedDB.open(`abracadabra:search:${origin}`, DB_VERSION);
|
|
29
|
+
|
|
30
|
+
req.onupgradeneeded = (event) => {
|
|
31
|
+
const db = (event.target as IDBOpenDBRequest).result;
|
|
32
|
+
if (!db.objectStoreNames.contains("postings")) {
|
|
33
|
+
db.createObjectStore("postings");
|
|
34
|
+
}
|
|
35
|
+
if (!db.objectStoreNames.contains("doc_trigrams")) {
|
|
36
|
+
db.createObjectStore("doc_trigrams");
|
|
37
|
+
}
|
|
38
|
+
};
|
|
39
|
+
|
|
40
|
+
req.onsuccess = () => resolve(req.result);
|
|
41
|
+
req.onerror = () => reject(req.error);
|
|
42
|
+
});
|
|
43
|
+
}
|
|
44
|
+
|
|
45
|
+
/** Extract the set of trigrams for a piece of text. */
|
|
46
|
+
function extractTrigrams(text: string): Set<string> {
|
|
47
|
+
const trigrams = new Set<string>();
|
|
48
|
+
const padded = ` ${text.toLowerCase()} `;
|
|
49
|
+
for (let i = 0; i <= padded.length - 3; i++) {
|
|
50
|
+
trigrams.add(padded.slice(i, i + 3));
|
|
51
|
+
}
|
|
52
|
+
return trigrams;
|
|
53
|
+
}
|
|
54
|
+
|
|
55
|
+
/** Merge trigrams from multiple texts into a single set. */
|
|
56
|
+
function extractAllTrigrams(texts: string[]): Set<string> {
|
|
57
|
+
const result = new Set<string>();
|
|
58
|
+
for (const t of texts) {
|
|
59
|
+
for (const trigram of extractTrigrams(t)) {
|
|
60
|
+
result.add(trigram);
|
|
61
|
+
}
|
|
62
|
+
}
|
|
63
|
+
return result;
|
|
64
|
+
}
|
|
65
|
+
|
|
66
|
+
export class SearchIndex {
|
|
67
|
+
private readonly origin: string;
|
|
68
|
+
private dbPromise: Promise<IDBDatabase | null> | null = null;
|
|
69
|
+
private db: IDBDatabase | null = null;
|
|
70
|
+
|
|
71
|
+
constructor(serverOrigin: string) {
|
|
72
|
+
this.origin = serverOrigin;
|
|
73
|
+
}
|
|
74
|
+
|
|
75
|
+
private getDb(): Promise<IDBDatabase | null> {
|
|
76
|
+
if (!idbAvailable()) return Promise.resolve(null);
|
|
77
|
+
if (!this.dbPromise) {
|
|
78
|
+
this.dbPromise = openDb(this.origin)
|
|
79
|
+
.catch(() => null)
|
|
80
|
+
.then((db) => {
|
|
81
|
+
this.db = db;
|
|
82
|
+
return db;
|
|
83
|
+
});
|
|
84
|
+
}
|
|
85
|
+
return this.dbPromise;
|
|
86
|
+
}
|
|
87
|
+
|
|
88
|
+
/**
|
|
89
|
+
* Replace the index for docId with the given texts.
|
|
90
|
+
* Old trigram associations are removed before new ones are added.
|
|
91
|
+
*/
|
|
92
|
+
async index(docId: string, texts: string[]): Promise<void> {
|
|
93
|
+
const db = await this.getDb();
|
|
94
|
+
if (!db) return;
|
|
95
|
+
|
|
96
|
+
const newTrigrams = extractAllTrigrams(texts);
|
|
97
|
+
|
|
98
|
+
return new Promise<void>((resolve, reject) => {
|
|
99
|
+
const tx = db.transaction(["postings", "doc_trigrams"], "readwrite");
|
|
100
|
+
tx.oncomplete = () => resolve();
|
|
101
|
+
tx.onerror = () => reject(tx.error);
|
|
102
|
+
|
|
103
|
+
const postings = tx.objectStore("postings");
|
|
104
|
+
const docTrigramsStore = tx.objectStore("doc_trigrams");
|
|
105
|
+
|
|
106
|
+
// Step 1: read old trigrams for this doc
|
|
107
|
+
const oldReq = docTrigramsStore.get(docId);
|
|
108
|
+
oldReq.onsuccess = () => {
|
|
109
|
+
const oldTrigrams: string[] = oldReq.result ?? [];
|
|
110
|
+
let pending = oldTrigrams.length + newTrigrams.size + 1; // +1 for doc_trigrams write
|
|
111
|
+
|
|
112
|
+
function done() {
|
|
113
|
+
pending--;
|
|
114
|
+
// tx.oncomplete fires naturally once all requests settle
|
|
115
|
+
}
|
|
116
|
+
|
|
117
|
+
// Step 2: remove docId from each old trigram's posting list
|
|
118
|
+
for (const trigram of oldTrigrams) {
|
|
119
|
+
const req = postings.get(trigram);
|
|
120
|
+
req.onsuccess = () => {
|
|
121
|
+
const list: string[] = req.result ?? [];
|
|
122
|
+
const updated = list.filter((id) => id !== docId);
|
|
123
|
+
if (updated.length === 0) {
|
|
124
|
+
postings.delete(trigram);
|
|
125
|
+
} else {
|
|
126
|
+
postings.put(updated, trigram);
|
|
127
|
+
}
|
|
128
|
+
done();
|
|
129
|
+
};
|
|
130
|
+
req.onerror = done;
|
|
131
|
+
}
|
|
132
|
+
|
|
133
|
+
// Step 3: add docId to each new trigram's posting list
|
|
134
|
+
for (const trigram of newTrigrams) {
|
|
135
|
+
const req = postings.get(trigram);
|
|
136
|
+
req.onsuccess = () => {
|
|
137
|
+
const list: string[] = req.result ?? [];
|
|
138
|
+
if (!list.includes(docId)) {
|
|
139
|
+
list.push(docId);
|
|
140
|
+
}
|
|
141
|
+
postings.put(list, trigram);
|
|
142
|
+
done();
|
|
143
|
+
};
|
|
144
|
+
req.onerror = done;
|
|
145
|
+
}
|
|
146
|
+
|
|
147
|
+
// Step 4: save new trigram set for this doc
|
|
148
|
+
const writeReq = docTrigramsStore.put([...newTrigrams], docId);
|
|
149
|
+
writeReq.onsuccess = done;
|
|
150
|
+
writeReq.onerror = done;
|
|
151
|
+
};
|
|
152
|
+
oldReq.onerror = () => reject(oldReq.error);
|
|
153
|
+
});
|
|
154
|
+
}
|
|
155
|
+
|
|
156
|
+
/** Remove all indexed content for a document. */
|
|
157
|
+
async remove(docId: string): Promise<void> {
|
|
158
|
+
const db = await this.getDb();
|
|
159
|
+
if (!db) return;
|
|
160
|
+
|
|
161
|
+
return new Promise<void>((resolve, reject) => {
|
|
162
|
+
const tx = db.transaction(["postings", "doc_trigrams"], "readwrite");
|
|
163
|
+
tx.oncomplete = () => resolve();
|
|
164
|
+
tx.onerror = () => reject(tx.error);
|
|
165
|
+
|
|
166
|
+
const postings = tx.objectStore("postings");
|
|
167
|
+
const docTrigramsStore = tx.objectStore("doc_trigrams");
|
|
168
|
+
|
|
169
|
+
const oldReq = docTrigramsStore.get(docId);
|
|
170
|
+
oldReq.onsuccess = () => {
|
|
171
|
+
const oldTrigrams: string[] = oldReq.result ?? [];
|
|
172
|
+
|
|
173
|
+
for (const trigram of oldTrigrams) {
|
|
174
|
+
const req = postings.get(trigram);
|
|
175
|
+
req.onsuccess = () => {
|
|
176
|
+
const list: string[] = req.result ?? [];
|
|
177
|
+
const updated = list.filter((id) => id !== docId);
|
|
178
|
+
if (updated.length === 0) {
|
|
179
|
+
postings.delete(trigram);
|
|
180
|
+
} else {
|
|
181
|
+
postings.put(updated, trigram);
|
|
182
|
+
}
|
|
183
|
+
};
|
|
184
|
+
}
|
|
185
|
+
|
|
186
|
+
docTrigramsStore.delete(docId);
|
|
187
|
+
};
|
|
188
|
+
oldReq.onerror = () => reject(oldReq.error);
|
|
189
|
+
});
|
|
190
|
+
}
|
|
191
|
+
|
|
192
|
+
/**
|
|
193
|
+
* Search for documents matching the query.
|
|
194
|
+
* Returns results sorted by score (matching trigram count) descending.
|
|
195
|
+
*/
|
|
196
|
+
async search(query: string, limit = 20): Promise<SearchResult[]> {
|
|
197
|
+
const db = await this.getDb();
|
|
198
|
+
if (!db) return [];
|
|
199
|
+
|
|
200
|
+
const queryTrigrams = [...extractTrigrams(query)];
|
|
201
|
+
if (queryTrigrams.length === 0) return [];
|
|
202
|
+
|
|
203
|
+
return new Promise<SearchResult[]>((resolve, reject) => {
|
|
204
|
+
const tx = db.transaction("postings", "readonly");
|
|
205
|
+
const postings = tx.objectStore("postings");
|
|
206
|
+
const scores = new Map<string, number>();
|
|
207
|
+
let remaining = queryTrigrams.length;
|
|
208
|
+
|
|
209
|
+
for (const trigram of queryTrigrams) {
|
|
210
|
+
const req = postings.get(trigram);
|
|
211
|
+
req.onsuccess = () => {
|
|
212
|
+
const docIds: string[] = req.result ?? [];
|
|
213
|
+
for (const docId of docIds) {
|
|
214
|
+
scores.set(docId, (scores.get(docId) ?? 0) + 1);
|
|
215
|
+
}
|
|
216
|
+
remaining--;
|
|
217
|
+
if (remaining === 0) {
|
|
218
|
+
const results: SearchResult[] = [...scores.entries()]
|
|
219
|
+
.map(([docId, score]) => ({ docId, score }))
|
|
220
|
+
.sort((a, b) => b.score - a.score)
|
|
221
|
+
.slice(0, limit);
|
|
222
|
+
resolve(results);
|
|
223
|
+
}
|
|
224
|
+
};
|
|
225
|
+
req.onerror = () => {
|
|
226
|
+
remaining--;
|
|
227
|
+
if (remaining === 0) {
|
|
228
|
+
const results: SearchResult[] = [...scores.entries()]
|
|
229
|
+
.map(([docId, score]) => ({ docId, score }))
|
|
230
|
+
.sort((a, b) => b.score - a.score)
|
|
231
|
+
.slice(0, limit);
|
|
232
|
+
resolve(results);
|
|
233
|
+
}
|
|
234
|
+
};
|
|
235
|
+
}
|
|
236
|
+
|
|
237
|
+
tx.onerror = () => reject(tx.error);
|
|
238
|
+
});
|
|
239
|
+
}
|
|
240
|
+
|
|
241
|
+
// ── Lifecycle ─────────────────────────────────────────────────────────────
|
|
242
|
+
|
|
243
|
+
destroy(): void {
|
|
244
|
+
this.db?.close();
|
|
245
|
+
this.db = null;
|
|
246
|
+
}
|
|
247
|
+
}
|
package/src/auth.ts
ADDED
|
@@ -0,0 +1,62 @@
|
|
|
1
|
+
import * as encoding from "lib0/encoding";
|
|
2
|
+
import * as decoding from "lib0/decoding";
|
|
3
|
+
import type { AuthorizedScope } from "./types.ts";
|
|
4
|
+
|
|
5
|
+
export enum AuthMessageType {
|
|
6
|
+
Token = 0,
|
|
7
|
+
PermissionDenied = 1,
|
|
8
|
+
Authenticated = 2,
|
|
9
|
+
}
|
|
10
|
+
|
|
11
|
+
export const writeAuthentication = (
|
|
12
|
+
encoder: encoding.Encoder,
|
|
13
|
+
auth: string,
|
|
14
|
+
) => {
|
|
15
|
+
encoding.writeVarUint(encoder, AuthMessageType.Token);
|
|
16
|
+
encoding.writeVarString(encoder, auth);
|
|
17
|
+
};
|
|
18
|
+
|
|
19
|
+
export const writePermissionDenied = (
|
|
20
|
+
encoder: encoding.Encoder,
|
|
21
|
+
reason: string,
|
|
22
|
+
) => {
|
|
23
|
+
encoding.writeVarUint(encoder, AuthMessageType.PermissionDenied);
|
|
24
|
+
encoding.writeVarString(encoder, reason);
|
|
25
|
+
};
|
|
26
|
+
|
|
27
|
+
export const writeAuthenticated = (
|
|
28
|
+
encoder: encoding.Encoder,
|
|
29
|
+
scope: AuthorizedScope,
|
|
30
|
+
) => {
|
|
31
|
+
encoding.writeVarUint(encoder, AuthMessageType.Authenticated);
|
|
32
|
+
encoding.writeVarString(encoder, scope);
|
|
33
|
+
};
|
|
34
|
+
|
|
35
|
+
export const writeTokenSyncRequest = (
|
|
36
|
+
encoder: encoding.Encoder,
|
|
37
|
+
) => {
|
|
38
|
+
encoding.writeVarUint(encoder, AuthMessageType.Token);
|
|
39
|
+
};
|
|
40
|
+
|
|
41
|
+
export const readAuthMessage = (
|
|
42
|
+
decoder: decoding.Decoder,
|
|
43
|
+
sendToken: () => void,
|
|
44
|
+
permissionDeniedHandler: (reason: string) => void,
|
|
45
|
+
authenticatedHandler: (scope: string) => void,
|
|
46
|
+
) => {
|
|
47
|
+
switch (decoding.readVarUint(decoder)) {
|
|
48
|
+
case AuthMessageType.Token: {
|
|
49
|
+
sendToken();
|
|
50
|
+
break;
|
|
51
|
+
}
|
|
52
|
+
case AuthMessageType.PermissionDenied: {
|
|
53
|
+
permissionDeniedHandler(decoding.readVarString(decoder));
|
|
54
|
+
break;
|
|
55
|
+
}
|
|
56
|
+
case AuthMessageType.Authenticated: {
|
|
57
|
+
authenticatedHandler(decoding.readVarString(decoder));
|
|
58
|
+
break;
|
|
59
|
+
}
|
|
60
|
+
default:
|
|
61
|
+
}
|
|
62
|
+
};
|