@delma/fylo 1.1.2 → 2.0.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +141 -62
- package/eslint.config.js +8 -4
- package/package.json +9 -7
- package/src/CLI +16 -14
- package/src/adapters/cipher.ts +12 -6
- package/src/adapters/redis.ts +193 -123
- package/src/adapters/s3.ts +6 -12
- package/src/core/collection.ts +5 -0
- package/src/core/directory.ts +120 -151
- package/src/core/extensions.ts +4 -2
- package/src/core/format.ts +390 -419
- package/src/core/parser.ts +167 -142
- package/src/core/query.ts +31 -26
- package/src/core/walker.ts +68 -61
- package/src/core/write-queue.ts +7 -4
- package/src/engines/s3-files.ts +1068 -0
- package/src/engines/types.ts +21 -0
- package/src/index.ts +754 -378
- package/src/migrate-cli.ts +22 -0
- package/src/migrate.ts +74 -0
- package/src/types/bun-runtime.d.ts +73 -0
- package/src/types/fylo.d.ts +115 -27
- package/src/types/node-runtime.d.ts +61 -0
- package/src/types/query.d.ts +6 -2
- package/src/types/vendor-modules.d.ts +8 -7
- package/src/worker.ts +7 -1
- package/src/workers/write-worker.ts +25 -24
- package/tests/collection/truncate.test.js +35 -0
- package/tests/{data.ts → data.js} +8 -21
- package/tests/{index.ts → index.js} +4 -9
- package/tests/integration/aws-s3-files.canary.test.js +22 -0
- package/tests/integration/{create.test.ts → create.test.js} +13 -31
- package/tests/integration/delete.test.js +95 -0
- package/tests/integration/{edge-cases.test.ts → edge-cases.test.js} +50 -124
- package/tests/integration/{encryption.test.ts → encryption.test.js} +20 -65
- package/tests/integration/{export.test.ts → export.test.js} +8 -23
- package/tests/integration/{join-modes.test.ts → join-modes.test.js} +37 -104
- package/tests/integration/migration.test.js +38 -0
- package/tests/integration/nested.test.js +142 -0
- package/tests/integration/operators.test.js +122 -0
- package/tests/integration/{queue.test.ts → queue.test.js} +24 -40
- package/tests/integration/read.test.js +119 -0
- package/tests/integration/rollback.test.js +60 -0
- package/tests/integration/s3-files.test.js +192 -0
- package/tests/integration/update.test.js +99 -0
- package/tests/mocks/{cipher.ts → cipher.js} +11 -26
- package/tests/mocks/redis.js +123 -0
- package/tests/mocks/{s3.ts → s3.js} +24 -58
- package/tests/schemas/album.json +1 -1
- package/tests/schemas/comment.json +1 -1
- package/tests/schemas/photo.json +1 -1
- package/tests/schemas/post.json +1 -1
- package/tests/schemas/tip.json +1 -1
- package/tests/schemas/todo.json +1 -1
- package/tests/schemas/user.d.ts +12 -12
- package/tests/schemas/user.json +1 -1
- package/tsconfig.json +4 -2
- package/tsconfig.typecheck.json +31 -0
- package/tests/collection/truncate.test.ts +0 -56
- package/tests/integration/delete.test.ts +0 -147
- package/tests/integration/nested.test.ts +0 -212
- package/tests/integration/operators.test.ts +0 -167
- package/tests/integration/read.test.ts +0 -203
- package/tests/integration/rollback.test.ts +0 -105
- package/tests/integration/update.test.ts +0 -130
- package/tests/mocks/redis.ts +0 -169
|
@@ -0,0 +1,1068 @@
|
|
|
1
|
+
import { mkdir, readFile, readdir, rm, stat, writeFile, open } from 'node:fs/promises'
|
|
2
|
+
import path from 'node:path'
|
|
3
|
+
import { createHash } from 'node:crypto'
|
|
4
|
+
import { Database } from 'bun:sqlite'
|
|
5
|
+
import type { SQLQueryBindings } from 'bun:sqlite'
|
|
6
|
+
import TTID from '@delma/ttid'
|
|
7
|
+
import { Dir } from '../core/directory'
|
|
8
|
+
import { validateCollectionName } from '../core/collection'
|
|
9
|
+
import { Cipher } from '../adapters/cipher'
|
|
10
|
+
import type { EventBus, FyloStorageEngineKind, LockManager, StorageEngine } from './types'
|
|
11
|
+
|
|
12
|
+
type FyloRecord<T extends Record<string, any>> = Record<_ttid, T>
|
|
13
|
+
|
|
14
|
+
type S3FilesQueryResult<T extends Record<string, any>> =
|
|
15
|
+
| _ttid
|
|
16
|
+
| FyloRecord<T>
|
|
17
|
+
| Record<string, _ttid[]>
|
|
18
|
+
| Record<string, Record<_ttid, Partial<T>>>
|
|
19
|
+
| Record<_ttid, Partial<T>>
|
|
20
|
+
|
|
21
|
+
type S3FilesEvent<T extends Record<string, any>> = {
|
|
22
|
+
ts: number
|
|
23
|
+
action: 'insert' | 'delete'
|
|
24
|
+
id: _ttid
|
|
25
|
+
doc?: T
|
|
26
|
+
}
|
|
27
|
+
|
|
28
|
+
type StoredDoc<T extends Record<string, any>> = {
|
|
29
|
+
id: _ttid
|
|
30
|
+
createdAt: number
|
|
31
|
+
updatedAt: number
|
|
32
|
+
data: T
|
|
33
|
+
}
|
|
34
|
+
|
|
35
|
+
class FilesystemStorage implements StorageEngine {
|
|
36
|
+
async read(target: string): Promise<string> {
|
|
37
|
+
return await readFile(target, 'utf8')
|
|
38
|
+
}
|
|
39
|
+
|
|
40
|
+
async write(target: string, data: string): Promise<void> {
|
|
41
|
+
await mkdir(path.dirname(target), { recursive: true })
|
|
42
|
+
await writeFile(target, data, 'utf8')
|
|
43
|
+
}
|
|
44
|
+
|
|
45
|
+
async delete(target: string): Promise<void> {
|
|
46
|
+
await rm(target, { recursive: true, force: true })
|
|
47
|
+
}
|
|
48
|
+
|
|
49
|
+
async list(target: string): Promise<string[]> {
|
|
50
|
+
const results: string[] = []
|
|
51
|
+
|
|
52
|
+
try {
|
|
53
|
+
const entries = await readdir(target, { withFileTypes: true })
|
|
54
|
+
for (const entry of entries) {
|
|
55
|
+
const child = path.join(target, entry.name)
|
|
56
|
+
if (entry.isDirectory()) {
|
|
57
|
+
results.push(...(await this.list(child)))
|
|
58
|
+
} else {
|
|
59
|
+
results.push(child)
|
|
60
|
+
}
|
|
61
|
+
}
|
|
62
|
+
} catch (err) {
|
|
63
|
+
if ((err as NodeJS.ErrnoException).code !== 'ENOENT') throw err
|
|
64
|
+
}
|
|
65
|
+
|
|
66
|
+
return results
|
|
67
|
+
}
|
|
68
|
+
|
|
69
|
+
async mkdir(target: string): Promise<void> {
|
|
70
|
+
await mkdir(target, { recursive: true })
|
|
71
|
+
}
|
|
72
|
+
|
|
73
|
+
async rmdir(target: string): Promise<void> {
|
|
74
|
+
await rm(target, { recursive: true, force: true })
|
|
75
|
+
}
|
|
76
|
+
|
|
77
|
+
async exists(target: string): Promise<boolean> {
|
|
78
|
+
try {
|
|
79
|
+
await stat(target)
|
|
80
|
+
return true
|
|
81
|
+
} catch (err) {
|
|
82
|
+
if ((err as NodeJS.ErrnoException).code === 'ENOENT') return false
|
|
83
|
+
throw err
|
|
84
|
+
}
|
|
85
|
+
}
|
|
86
|
+
}
|
|
87
|
+
|
|
88
|
+
class FilesystemLockManager implements LockManager {
|
|
89
|
+
constructor(
|
|
90
|
+
private readonly root: string,
|
|
91
|
+
private readonly storage: StorageEngine
|
|
92
|
+
) {}
|
|
93
|
+
|
|
94
|
+
private lockDir(collection: string, docId: _ttid) {
|
|
95
|
+
return path.join(this.root, collection, '.fylo', 'locks', `${docId}.lock`)
|
|
96
|
+
}
|
|
97
|
+
|
|
98
|
+
async acquire(
|
|
99
|
+
collection: string,
|
|
100
|
+
docId: _ttid,
|
|
101
|
+
owner: string,
|
|
102
|
+
ttlMs: number = 30_000
|
|
103
|
+
): Promise<boolean> {
|
|
104
|
+
const dir = this.lockDir(collection, docId)
|
|
105
|
+
const metaPath = path.join(dir, 'meta.json')
|
|
106
|
+
await mkdir(path.dirname(dir), { recursive: true })
|
|
107
|
+
|
|
108
|
+
try {
|
|
109
|
+
await mkdir(dir, { recursive: false })
|
|
110
|
+
await this.storage.write(metaPath, JSON.stringify({ owner, ts: Date.now() }))
|
|
111
|
+
return true
|
|
112
|
+
} catch (err) {
|
|
113
|
+
if ((err as NodeJS.ErrnoException).code !== 'EEXIST') throw err
|
|
114
|
+
}
|
|
115
|
+
|
|
116
|
+
try {
|
|
117
|
+
const meta = JSON.parse(await this.storage.read(metaPath)) as { ts?: number }
|
|
118
|
+
if (meta.ts && Date.now() - meta.ts > ttlMs) {
|
|
119
|
+
await this.storage.rmdir(dir)
|
|
120
|
+
await mkdir(dir, { recursive: false })
|
|
121
|
+
await this.storage.write(metaPath, JSON.stringify({ owner, ts: Date.now() }))
|
|
122
|
+
return true
|
|
123
|
+
}
|
|
124
|
+
} catch {
|
|
125
|
+
await this.storage.rmdir(dir)
|
|
126
|
+
await mkdir(dir, { recursive: false })
|
|
127
|
+
await this.storage.write(metaPath, JSON.stringify({ owner, ts: Date.now() }))
|
|
128
|
+
return true
|
|
129
|
+
}
|
|
130
|
+
|
|
131
|
+
return false
|
|
132
|
+
}
|
|
133
|
+
|
|
134
|
+
async release(collection: string, docId: _ttid, owner: string): Promise<void> {
|
|
135
|
+
const dir = this.lockDir(collection, docId)
|
|
136
|
+
const metaPath = path.join(dir, 'meta.json')
|
|
137
|
+
|
|
138
|
+
try {
|
|
139
|
+
const meta = JSON.parse(await this.storage.read(metaPath)) as { owner?: string }
|
|
140
|
+
if (meta.owner === owner) await this.storage.rmdir(dir)
|
|
141
|
+
} catch (err) {
|
|
142
|
+
if ((err as NodeJS.ErrnoException).code !== 'ENOENT') throw err
|
|
143
|
+
}
|
|
144
|
+
}
|
|
145
|
+
}
|
|
146
|
+
|
|
147
|
+
class FilesystemEventBus<T extends Record<string, any>> implements EventBus<S3FilesEvent<T>> {
|
|
148
|
+
constructor(
|
|
149
|
+
private readonly root: string,
|
|
150
|
+
private readonly storage: StorageEngine
|
|
151
|
+
) {}
|
|
152
|
+
|
|
153
|
+
private journalPath(collection: string) {
|
|
154
|
+
return path.join(this.root, collection, '.fylo', 'events', `${collection}.ndjson`)
|
|
155
|
+
}
|
|
156
|
+
|
|
157
|
+
async publish(collection: string, event: S3FilesEvent<T>): Promise<void> {
|
|
158
|
+
const target = this.journalPath(collection)
|
|
159
|
+
await mkdir(path.dirname(target), { recursive: true })
|
|
160
|
+
const line = `${JSON.stringify(event)}\n`
|
|
161
|
+
const handle = await open(target, 'a')
|
|
162
|
+
try {
|
|
163
|
+
await handle.write(line)
|
|
164
|
+
} finally {
|
|
165
|
+
await handle.close()
|
|
166
|
+
}
|
|
167
|
+
}
|
|
168
|
+
|
|
169
|
+
async *listen(collection: string): AsyncGenerator<S3FilesEvent<T>, void, unknown> {
|
|
170
|
+
const target = this.journalPath(collection)
|
|
171
|
+
let position = 0
|
|
172
|
+
|
|
173
|
+
while (true) {
|
|
174
|
+
try {
|
|
175
|
+
const fileStat = await stat(target)
|
|
176
|
+
if (fileStat.size > position) {
|
|
177
|
+
const handle = await open(target, 'r')
|
|
178
|
+
try {
|
|
179
|
+
const size = fileStat.size - position
|
|
180
|
+
const buffer = Buffer.alloc(size)
|
|
181
|
+
await handle.read(buffer, 0, size, position)
|
|
182
|
+
position = fileStat.size
|
|
183
|
+
|
|
184
|
+
for (const line of buffer.toString('utf8').split('\n')) {
|
|
185
|
+
if (line.trim().length === 0) continue
|
|
186
|
+
yield JSON.parse(line) as S3FilesEvent<T>
|
|
187
|
+
}
|
|
188
|
+
} finally {
|
|
189
|
+
await handle.close()
|
|
190
|
+
}
|
|
191
|
+
}
|
|
192
|
+
} catch (err) {
|
|
193
|
+
if ((err as NodeJS.ErrnoException).code !== 'ENOENT') throw err
|
|
194
|
+
}
|
|
195
|
+
|
|
196
|
+
await Bun.sleep(100)
|
|
197
|
+
}
|
|
198
|
+
}
|
|
199
|
+
}
|
|
200
|
+
|
|
201
|
+
export class S3FilesEngine {
|
|
202
|
+
readonly kind: FyloStorageEngineKind = 's3-files'
|
|
203
|
+
|
|
204
|
+
private readonly databases = new Map<string, Database>()
|
|
205
|
+
|
|
206
|
+
private readonly storage: StorageEngine
|
|
207
|
+
private readonly locks: LockManager
|
|
208
|
+
private readonly events: EventBus<Record<string, any>>
|
|
209
|
+
|
|
210
|
+
constructor(readonly root: string = process.env.FYLO_S3FILES_ROOT ?? '/mnt/fylo') {
|
|
211
|
+
this.storage = new FilesystemStorage()
|
|
212
|
+
this.locks = new FilesystemLockManager(this.root, this.storage)
|
|
213
|
+
this.events = new FilesystemEventBus<Record<string, any>>(this.root, this.storage)
|
|
214
|
+
}
|
|
215
|
+
|
|
216
|
+
private collectionRoot(collection: string) {
|
|
217
|
+
validateCollectionName(collection)
|
|
218
|
+
return path.join(this.root, collection)
|
|
219
|
+
}
|
|
220
|
+
|
|
221
|
+
private docsRoot(collection: string) {
|
|
222
|
+
return path.join(this.collectionRoot(collection), '.fylo', 'docs')
|
|
223
|
+
}
|
|
224
|
+
|
|
225
|
+
private metaRoot(collection: string) {
|
|
226
|
+
return path.join(this.collectionRoot(collection), '.fylo')
|
|
227
|
+
}
|
|
228
|
+
|
|
229
|
+
private indexDbPath(collection: string) {
|
|
230
|
+
return path.join(this.metaRoot(collection), 'index.db')
|
|
231
|
+
}
|
|
232
|
+
|
|
233
|
+
private docPath(collection: string, docId: _ttid) {
|
|
234
|
+
return path.join(this.docsRoot(collection), docId.slice(0, 2), `${docId}.json`)
|
|
235
|
+
}
|
|
236
|
+
|
|
237
|
+
private hash(value: string) {
|
|
238
|
+
return createHash('sha256').update(value).digest('hex')
|
|
239
|
+
}
|
|
240
|
+
|
|
241
|
+
private database(collection: string) {
|
|
242
|
+
const existing = this.databases.get(collection)
|
|
243
|
+
if (existing) return existing
|
|
244
|
+
|
|
245
|
+
const db = new Database(this.indexDbPath(collection))
|
|
246
|
+
db.exec(`
|
|
247
|
+
CREATE TABLE IF NOT EXISTS doc_index_entries (
|
|
248
|
+
doc_id TEXT NOT NULL,
|
|
249
|
+
field_path TEXT NOT NULL,
|
|
250
|
+
value_hash TEXT NOT NULL,
|
|
251
|
+
raw_value TEXT NOT NULL,
|
|
252
|
+
value_type TEXT NOT NULL,
|
|
253
|
+
numeric_value REAL,
|
|
254
|
+
PRIMARY KEY (doc_id, field_path, value_hash)
|
|
255
|
+
);
|
|
256
|
+
|
|
257
|
+
CREATE INDEX IF NOT EXISTS idx_doc_index_entries_field_hash
|
|
258
|
+
ON doc_index_entries (field_path, value_hash);
|
|
259
|
+
|
|
260
|
+
CREATE INDEX IF NOT EXISTS idx_doc_index_entries_field_numeric
|
|
261
|
+
ON doc_index_entries (field_path, numeric_value);
|
|
262
|
+
`)
|
|
263
|
+
this.databases.set(collection, db)
|
|
264
|
+
return db
|
|
265
|
+
}
|
|
266
|
+
|
|
267
|
+
private closeDatabase(collection: string) {
|
|
268
|
+
const db = this.databases.get(collection)
|
|
269
|
+
if (db) {
|
|
270
|
+
db.close()
|
|
271
|
+
this.databases.delete(collection)
|
|
272
|
+
}
|
|
273
|
+
}
|
|
274
|
+
|
|
275
|
+
private normalizeIndexValue(rawValue: string) {
|
|
276
|
+
const parsed = Dir.parseValue(rawValue.replaceAll('%2F', '/'))
|
|
277
|
+
const numeric = typeof parsed === 'number' ? parsed : Number(parsed)
|
|
278
|
+
return {
|
|
279
|
+
rawValue,
|
|
280
|
+
valueHash: this.hash(rawValue),
|
|
281
|
+
valueType: typeof parsed,
|
|
282
|
+
numericValue: Number.isNaN(numeric) ? null : numeric
|
|
283
|
+
}
|
|
284
|
+
}
|
|
285
|
+
|
|
286
|
+
private async ensureCollection(collection: string) {
|
|
287
|
+
await this.storage.mkdir(this.collectionRoot(collection))
|
|
288
|
+
await this.storage.mkdir(this.metaRoot(collection))
|
|
289
|
+
await this.storage.mkdir(this.docsRoot(collection))
|
|
290
|
+
this.database(collection)
|
|
291
|
+
}
|
|
292
|
+
|
|
293
|
+
async createCollection(collection: string) {
|
|
294
|
+
await this.ensureCollection(collection)
|
|
295
|
+
}
|
|
296
|
+
|
|
297
|
+
async dropCollection(collection: string) {
|
|
298
|
+
this.closeDatabase(collection)
|
|
299
|
+
await this.storage.rmdir(this.collectionRoot(collection))
|
|
300
|
+
}
|
|
301
|
+
|
|
302
|
+
async hasCollection(collection: string) {
|
|
303
|
+
return await this.storage.exists(this.collectionRoot(collection))
|
|
304
|
+
}
|
|
305
|
+
|
|
306
|
+
private async encodeEncrypted<T extends Record<string, any>>(
|
|
307
|
+
collection: string,
|
|
308
|
+
value: T,
|
|
309
|
+
parentField?: string
|
|
310
|
+
): Promise<T> {
|
|
311
|
+
if (Array.isArray(value)) {
|
|
312
|
+
const encodedItems = await Promise.all(
|
|
313
|
+
value.map(async (item) => {
|
|
314
|
+
if (item && typeof item === 'object')
|
|
315
|
+
return await this.encodeEncrypted(collection, item as Record<string, any>)
|
|
316
|
+
if (
|
|
317
|
+
parentField &&
|
|
318
|
+
Cipher.isConfigured() &&
|
|
319
|
+
Cipher.isEncryptedField(collection, parentField)
|
|
320
|
+
) {
|
|
321
|
+
return await Cipher.encrypt(String(item).replaceAll('/', '%2F'), true)
|
|
322
|
+
}
|
|
323
|
+
return item
|
|
324
|
+
})
|
|
325
|
+
)
|
|
326
|
+
return encodedItems as unknown as T
|
|
327
|
+
}
|
|
328
|
+
|
|
329
|
+
if (value && typeof value === 'object') {
|
|
330
|
+
const copy: Record<string, any> = {}
|
|
331
|
+
for (const field in value) {
|
|
332
|
+
const nextField = parentField ? `${parentField}/${field}` : field
|
|
333
|
+
const fieldValue = value[field]
|
|
334
|
+
if (fieldValue && typeof fieldValue === 'object')
|
|
335
|
+
copy[field] = await this.encodeEncrypted(collection, fieldValue, nextField)
|
|
336
|
+
else if (Cipher.isConfigured() && Cipher.isEncryptedField(collection, nextField)) {
|
|
337
|
+
copy[field] = await Cipher.encrypt(
|
|
338
|
+
String(fieldValue).replaceAll('/', '%2F'),
|
|
339
|
+
true
|
|
340
|
+
)
|
|
341
|
+
} else copy[field] = fieldValue
|
|
342
|
+
}
|
|
343
|
+
return copy as T
|
|
344
|
+
}
|
|
345
|
+
|
|
346
|
+
return value
|
|
347
|
+
}
|
|
348
|
+
|
|
349
|
+
private async decodeEncrypted<T extends Record<string, any>>(
|
|
350
|
+
collection: string,
|
|
351
|
+
value: T,
|
|
352
|
+
parentField?: string
|
|
353
|
+
): Promise<T> {
|
|
354
|
+
if (Array.isArray(value)) {
|
|
355
|
+
const decodedItems = await Promise.all(
|
|
356
|
+
value.map(async (item) => {
|
|
357
|
+
if (item && typeof item === 'object')
|
|
358
|
+
return await this.decodeEncrypted(collection, item as Record<string, any>)
|
|
359
|
+
if (
|
|
360
|
+
parentField &&
|
|
361
|
+
Cipher.isConfigured() &&
|
|
362
|
+
Cipher.isEncryptedField(collection, parentField) &&
|
|
363
|
+
typeof item === 'string'
|
|
364
|
+
) {
|
|
365
|
+
return Dir.parseValue((await Cipher.decrypt(item)).replaceAll('%2F', '/'))
|
|
366
|
+
}
|
|
367
|
+
return item
|
|
368
|
+
})
|
|
369
|
+
)
|
|
370
|
+
return decodedItems as unknown as T
|
|
371
|
+
}
|
|
372
|
+
|
|
373
|
+
if (value && typeof value === 'object') {
|
|
374
|
+
const copy: Record<string, any> = {}
|
|
375
|
+
for (const field in value) {
|
|
376
|
+
const nextField = parentField ? `${parentField}/${field}` : field
|
|
377
|
+
const fieldValue = value[field]
|
|
378
|
+
if (fieldValue && typeof fieldValue === 'object')
|
|
379
|
+
copy[field] = await this.decodeEncrypted(collection, fieldValue, nextField)
|
|
380
|
+
else if (
|
|
381
|
+
Cipher.isConfigured() &&
|
|
382
|
+
Cipher.isEncryptedField(collection, nextField) &&
|
|
383
|
+
typeof fieldValue === 'string'
|
|
384
|
+
) {
|
|
385
|
+
copy[field] = Dir.parseValue(
|
|
386
|
+
(await Cipher.decrypt(fieldValue)).replaceAll('%2F', '/')
|
|
387
|
+
)
|
|
388
|
+
} else copy[field] = fieldValue
|
|
389
|
+
}
|
|
390
|
+
return copy as T
|
|
391
|
+
}
|
|
392
|
+
|
|
393
|
+
return value
|
|
394
|
+
}
|
|
395
|
+
|
|
396
|
+
private async readStoredDoc<T extends Record<string, any>>(
|
|
397
|
+
collection: string,
|
|
398
|
+
docId: _ttid
|
|
399
|
+
): Promise<StoredDoc<T> | null> {
|
|
400
|
+
const target = this.docPath(collection, docId)
|
|
401
|
+
|
|
402
|
+
try {
|
|
403
|
+
const raw = JSON.parse(await this.storage.read(target)) as StoredDoc<T>
|
|
404
|
+
raw.data = await this.decodeEncrypted(collection, raw.data)
|
|
405
|
+
return raw
|
|
406
|
+
} catch (err) {
|
|
407
|
+
if ((err as NodeJS.ErrnoException).code === 'ENOENT') return null
|
|
408
|
+
throw err
|
|
409
|
+
}
|
|
410
|
+
}
|
|
411
|
+
|
|
412
|
+
private async writeStoredDoc<T extends Record<string, any>>(
|
|
413
|
+
collection: string,
|
|
414
|
+
docId: _ttid,
|
|
415
|
+
data: T
|
|
416
|
+
) {
|
|
417
|
+
await this.ensureCollection(collection)
|
|
418
|
+
const encoded = await this.encodeEncrypted(collection, data)
|
|
419
|
+
const { createdAt, updatedAt } = TTID.decodeTime(docId)
|
|
420
|
+
const target = this.docPath(collection, docId)
|
|
421
|
+
const record: StoredDoc<T> = {
|
|
422
|
+
id: docId,
|
|
423
|
+
createdAt,
|
|
424
|
+
updatedAt: updatedAt ?? createdAt,
|
|
425
|
+
data: encoded
|
|
426
|
+
}
|
|
427
|
+
await this.storage.write(target, JSON.stringify(record))
|
|
428
|
+
}
|
|
429
|
+
|
|
430
|
+
private async removeStoredDoc(collection: string, docId: _ttid) {
|
|
431
|
+
await this.storage.delete(this.docPath(collection, docId))
|
|
432
|
+
}
|
|
433
|
+
|
|
434
|
+
private async listDocIds(collection: string) {
|
|
435
|
+
const files = await this.storage.list(this.docsRoot(collection))
|
|
436
|
+
return files
|
|
437
|
+
.filter((file) => file.endsWith('.json'))
|
|
438
|
+
.map((file) => path.basename(file, '.json'))
|
|
439
|
+
.filter((key) => TTID.isTTID(key)) as _ttid[]
|
|
440
|
+
}
|
|
441
|
+
|
|
442
|
+
private getValueByPath(target: Record<string, any>, fieldPath: string) {
|
|
443
|
+
return fieldPath
|
|
444
|
+
.replaceAll('/', '.')
|
|
445
|
+
.split('.')
|
|
446
|
+
.reduce<any>(
|
|
447
|
+
(acc, key) => (acc === undefined || acc === null ? undefined : acc[key]),
|
|
448
|
+
target
|
|
449
|
+
)
|
|
450
|
+
}
|
|
451
|
+
|
|
452
|
+
private normalizeFieldPath(fieldPath: string) {
|
|
453
|
+
return fieldPath.replaceAll('.', '/')
|
|
454
|
+
}
|
|
455
|
+
|
|
456
|
+
private matchesTimestamp(docId: _ttid, query?: _storeQuery<Record<string, any>>) {
|
|
457
|
+
if (!query?.$created && !query?.$updated) return true
|
|
458
|
+
const { createdAt, updatedAt } = TTID.decodeTime(docId)
|
|
459
|
+
const timestamps = { createdAt, updatedAt: updatedAt ?? createdAt }
|
|
460
|
+
|
|
461
|
+
const match = (value: number, range?: _timestamp) => {
|
|
462
|
+
if (!range) return true
|
|
463
|
+
if (range.$gt !== undefined && !(value > range.$gt)) return false
|
|
464
|
+
if (range.$gte !== undefined && !(value >= range.$gte)) return false
|
|
465
|
+
if (range.$lt !== undefined && !(value < range.$lt)) return false
|
|
466
|
+
if (range.$lte !== undefined && !(value <= range.$lte)) return false
|
|
467
|
+
return true
|
|
468
|
+
}
|
|
469
|
+
|
|
470
|
+
return (
|
|
471
|
+
match(timestamps.createdAt, query.$created) &&
|
|
472
|
+
match(timestamps.updatedAt, query.$updated)
|
|
473
|
+
)
|
|
474
|
+
}
|
|
475
|
+
|
|
476
|
+
private likeToRegex(pattern: string) {
|
|
477
|
+
const escaped = pattern.replace(/[.*+?^${}()|[\]\\]/g, '\\$&').replaceAll('%', '.*')
|
|
478
|
+
return new RegExp(`^${escaped}$`)
|
|
479
|
+
}
|
|
480
|
+
|
|
481
|
+
private matchesOperand(value: unknown, operand: _operand) {
|
|
482
|
+
if (operand.$eq !== undefined && value != operand.$eq) return false
|
|
483
|
+
if (operand.$ne !== undefined && value == operand.$ne) return false
|
|
484
|
+
if (operand.$gt !== undefined && !(Number(value) > operand.$gt)) return false
|
|
485
|
+
if (operand.$gte !== undefined && !(Number(value) >= operand.$gte)) return false
|
|
486
|
+
if (operand.$lt !== undefined && !(Number(value) < operand.$lt)) return false
|
|
487
|
+
if (operand.$lte !== undefined && !(Number(value) <= operand.$lte)) return false
|
|
488
|
+
if (
|
|
489
|
+
operand.$like !== undefined &&
|
|
490
|
+
(typeof value !== 'string' || !this.likeToRegex(operand.$like).test(value))
|
|
491
|
+
)
|
|
492
|
+
return false
|
|
493
|
+
if (operand.$contains !== undefined) {
|
|
494
|
+
if (!Array.isArray(value) || !value.some((item) => item == operand.$contains))
|
|
495
|
+
return false
|
|
496
|
+
}
|
|
497
|
+
return true
|
|
498
|
+
}
|
|
499
|
+
|
|
500
|
+
private async normalizeQueryValue(collection: string, fieldPath: string, value: unknown) {
|
|
501
|
+
let rawValue = String(value).replaceAll('/', '%2F')
|
|
502
|
+
if (Cipher.isConfigured() && Cipher.isEncryptedField(collection, fieldPath))
|
|
503
|
+
rawValue = await Cipher.encrypt(rawValue, true)
|
|
504
|
+
return this.normalizeIndexValue(rawValue)
|
|
505
|
+
}
|
|
506
|
+
|
|
507
|
+
private intersectDocIds(current: Set<_ttid> | null, next: Iterable<_ttid>) {
|
|
508
|
+
const nextSet = next instanceof Set ? next : new Set(next)
|
|
509
|
+
if (current === null) return new Set(nextSet)
|
|
510
|
+
|
|
511
|
+
const intersection = new Set<_ttid>()
|
|
512
|
+
for (const docId of current) {
|
|
513
|
+
if (nextSet.has(docId)) intersection.add(docId)
|
|
514
|
+
}
|
|
515
|
+
return intersection
|
|
516
|
+
}
|
|
517
|
+
|
|
518
|
+
private async queryDocIdsBySql(
|
|
519
|
+
collection: string,
|
|
520
|
+
sql: string,
|
|
521
|
+
...params: SQLQueryBindings[]
|
|
522
|
+
): Promise<Set<_ttid>> {
|
|
523
|
+
const db = this.database(collection)
|
|
524
|
+
const rows = db
|
|
525
|
+
.query(sql)
|
|
526
|
+
.all(...params)
|
|
527
|
+
.map((row) => (row as { doc_id: _ttid }).doc_id)
|
|
528
|
+
|
|
529
|
+
return new Set(rows)
|
|
530
|
+
}
|
|
531
|
+
|
|
532
|
+
private async candidateDocIdsForOperand(
|
|
533
|
+
collection: string,
|
|
534
|
+
fieldPath: string,
|
|
535
|
+
operand: _operand
|
|
536
|
+
): Promise<Set<_ttid> | null> {
|
|
537
|
+
if (Cipher.isConfigured() && Cipher.isEncryptedField(collection, fieldPath)) return null
|
|
538
|
+
|
|
539
|
+
let candidateIds: Set<_ttid> | null = null
|
|
540
|
+
|
|
541
|
+
if (operand.$eq !== undefined) {
|
|
542
|
+
const normalized = await this.normalizeQueryValue(collection, fieldPath, operand.$eq)
|
|
543
|
+
candidateIds = this.intersectDocIds(
|
|
544
|
+
candidateIds,
|
|
545
|
+
await this.queryDocIdsBySql(
|
|
546
|
+
collection,
|
|
547
|
+
`SELECT DISTINCT doc_id
|
|
548
|
+
FROM doc_index_entries
|
|
549
|
+
WHERE field_path = ? AND value_hash = ?`,
|
|
550
|
+
fieldPath,
|
|
551
|
+
normalized.valueHash
|
|
552
|
+
)
|
|
553
|
+
)
|
|
554
|
+
}
|
|
555
|
+
|
|
556
|
+
if (
|
|
557
|
+
operand.$gt !== undefined ||
|
|
558
|
+
operand.$gte !== undefined ||
|
|
559
|
+
operand.$lt !== undefined ||
|
|
560
|
+
operand.$lte !== undefined
|
|
561
|
+
) {
|
|
562
|
+
const clauses = ['field_path = ?']
|
|
563
|
+
const params: SQLQueryBindings[] = [fieldPath]
|
|
564
|
+
if (operand.$gt !== undefined) {
|
|
565
|
+
clauses.push('numeric_value > ?')
|
|
566
|
+
params.push(operand.$gt)
|
|
567
|
+
}
|
|
568
|
+
if (operand.$gte !== undefined) {
|
|
569
|
+
clauses.push('numeric_value >= ?')
|
|
570
|
+
params.push(operand.$gte)
|
|
571
|
+
}
|
|
572
|
+
if (operand.$lt !== undefined) {
|
|
573
|
+
clauses.push('numeric_value < ?')
|
|
574
|
+
params.push(operand.$lt)
|
|
575
|
+
}
|
|
576
|
+
if (operand.$lte !== undefined) {
|
|
577
|
+
clauses.push('numeric_value <= ?')
|
|
578
|
+
params.push(operand.$lte)
|
|
579
|
+
}
|
|
580
|
+
|
|
581
|
+
candidateIds = this.intersectDocIds(
|
|
582
|
+
candidateIds,
|
|
583
|
+
await this.queryDocIdsBySql(
|
|
584
|
+
collection,
|
|
585
|
+
`SELECT DISTINCT doc_id
|
|
586
|
+
FROM doc_index_entries
|
|
587
|
+
WHERE ${clauses.join(' AND ')}`,
|
|
588
|
+
...params
|
|
589
|
+
)
|
|
590
|
+
)
|
|
591
|
+
}
|
|
592
|
+
|
|
593
|
+
if (operand.$like !== undefined) {
|
|
594
|
+
candidateIds = this.intersectDocIds(
|
|
595
|
+
candidateIds,
|
|
596
|
+
await this.queryDocIdsBySql(
|
|
597
|
+
collection,
|
|
598
|
+
`SELECT DISTINCT doc_id
|
|
599
|
+
FROM doc_index_entries
|
|
600
|
+
WHERE field_path = ? AND value_type = 'string' AND raw_value LIKE ?`,
|
|
601
|
+
fieldPath,
|
|
602
|
+
operand.$like.replaceAll('/', '%2F')
|
|
603
|
+
)
|
|
604
|
+
)
|
|
605
|
+
}
|
|
606
|
+
|
|
607
|
+
if (operand.$contains !== undefined) {
|
|
608
|
+
const normalized = await this.normalizeQueryValue(
|
|
609
|
+
collection,
|
|
610
|
+
fieldPath,
|
|
611
|
+
operand.$contains
|
|
612
|
+
)
|
|
613
|
+
candidateIds = this.intersectDocIds(
|
|
614
|
+
candidateIds,
|
|
615
|
+
await this.queryDocIdsBySql(
|
|
616
|
+
collection,
|
|
617
|
+
`SELECT DISTINCT doc_id
|
|
618
|
+
FROM doc_index_entries
|
|
619
|
+
WHERE (field_path = ? OR field_path LIKE ?)
|
|
620
|
+
AND value_hash = ?`,
|
|
621
|
+
fieldPath,
|
|
622
|
+
`${fieldPath}/%`,
|
|
623
|
+
normalized.valueHash
|
|
624
|
+
)
|
|
625
|
+
)
|
|
626
|
+
}
|
|
627
|
+
|
|
628
|
+
return candidateIds
|
|
629
|
+
}
|
|
630
|
+
|
|
631
|
+
private async candidateDocIdsForOperation<T extends Record<string, any>>(
|
|
632
|
+
collection: string,
|
|
633
|
+
operation: _op<T>
|
|
634
|
+
): Promise<Set<_ttid> | null> {
|
|
635
|
+
let candidateIds: Set<_ttid> | null = null
|
|
636
|
+
|
|
637
|
+
for (const [field, operand] of Object.entries(operation) as Array<[keyof T, _operand]>) {
|
|
638
|
+
if (!operand) continue
|
|
639
|
+
|
|
640
|
+
const fieldPath = this.normalizeFieldPath(String(field))
|
|
641
|
+
const fieldCandidates = await this.candidateDocIdsForOperand(
|
|
642
|
+
collection,
|
|
643
|
+
fieldPath,
|
|
644
|
+
operand
|
|
645
|
+
)
|
|
646
|
+
|
|
647
|
+
if (fieldCandidates === null) continue
|
|
648
|
+
candidateIds = this.intersectDocIds(candidateIds, fieldCandidates)
|
|
649
|
+
}
|
|
650
|
+
|
|
651
|
+
return candidateIds
|
|
652
|
+
}
|
|
653
|
+
|
|
654
|
+
private async candidateDocIdsForQuery<T extends Record<string, any>>(
|
|
655
|
+
collection: string,
|
|
656
|
+
query?: _storeQuery<T>
|
|
657
|
+
): Promise<Set<_ttid> | null> {
|
|
658
|
+
if (!query?.$ops || query.$ops.length === 0) return null
|
|
659
|
+
|
|
660
|
+
const union = new Set<_ttid>()
|
|
661
|
+
let usedIndex = false
|
|
662
|
+
|
|
663
|
+
for (const operation of query.$ops) {
|
|
664
|
+
const candidateIds = await this.candidateDocIdsForOperation(collection, operation)
|
|
665
|
+
if (candidateIds === null) return null
|
|
666
|
+
usedIndex = true
|
|
667
|
+
for (const docId of candidateIds) union.add(docId)
|
|
668
|
+
}
|
|
669
|
+
|
|
670
|
+
return usedIndex ? union : null
|
|
671
|
+
}
|
|
672
|
+
|
|
673
|
+
private matchesQuery<T extends Record<string, any>>(
|
|
674
|
+
docId: _ttid,
|
|
675
|
+
doc: T,
|
|
676
|
+
query?: _storeQuery<T>
|
|
677
|
+
) {
|
|
678
|
+
if (!this.matchesTimestamp(docId, query as _storeQuery<Record<string, any>> | undefined))
|
|
679
|
+
return false
|
|
680
|
+
if (!query?.$ops || query.$ops.length === 0) return true
|
|
681
|
+
|
|
682
|
+
return query.$ops.some((operation) => {
|
|
683
|
+
for (const field in operation) {
|
|
684
|
+
const value = this.getValueByPath(doc, field)
|
|
685
|
+
if (!this.matchesOperand(value, operation[field as keyof T]!)) return false
|
|
686
|
+
}
|
|
687
|
+
return true
|
|
688
|
+
})
|
|
689
|
+
}
|
|
690
|
+
|
|
691
|
+
private selectValues<T extends Record<string, any>>(selection: Array<keyof T>, data: T) {
|
|
692
|
+
const copy = { ...data }
|
|
693
|
+
for (const field in copy) {
|
|
694
|
+
if (!selection.includes(field as keyof T)) delete copy[field]
|
|
695
|
+
}
|
|
696
|
+
return copy
|
|
697
|
+
}
|
|
698
|
+
|
|
699
|
+
private renameFields<T extends Record<string, any>>(
|
|
700
|
+
rename: Record<keyof Partial<T>, string>,
|
|
701
|
+
data: T
|
|
702
|
+
) {
|
|
703
|
+
const copy = { ...data }
|
|
704
|
+
for (const field in copy) {
|
|
705
|
+
if (rename[field]) {
|
|
706
|
+
copy[rename[field]] = copy[field]
|
|
707
|
+
delete copy[field]
|
|
708
|
+
}
|
|
709
|
+
}
|
|
710
|
+
return copy
|
|
711
|
+
}
|
|
712
|
+
|
|
713
|
+
private processDoc<T extends Record<string, any>>(
|
|
714
|
+
doc: FyloRecord<T>,
|
|
715
|
+
query?: _storeQuery<T>
|
|
716
|
+
): S3FilesQueryResult<T> | undefined {
|
|
717
|
+
if (Object.keys(doc).length === 0) return
|
|
718
|
+
|
|
719
|
+
const next = { ...doc }
|
|
720
|
+
|
|
721
|
+
for (let [_id, data] of Object.entries(next)) {
|
|
722
|
+
if (query?.$select?.length)
|
|
723
|
+
data = this.selectValues(query.$select as Array<keyof T>, data)
|
|
724
|
+
if (query?.$rename) data = this.renameFields(query.$rename, data)
|
|
725
|
+
next[_id as _ttid] = data as T
|
|
726
|
+
}
|
|
727
|
+
|
|
728
|
+
if (query?.$groupby) {
|
|
729
|
+
const docGroup: Record<string, Record<string, Partial<T>>> = {}
|
|
730
|
+
for (const [id, data] of Object.entries(next)) {
|
|
731
|
+
const groupValue = data[query.$groupby] as string
|
|
732
|
+
if (groupValue) {
|
|
733
|
+
const groupData = { ...data }
|
|
734
|
+
delete groupData[query.$groupby]
|
|
735
|
+
docGroup[groupValue] = { [id]: groupData as Partial<T> }
|
|
736
|
+
}
|
|
737
|
+
}
|
|
738
|
+
|
|
739
|
+
if (query.$onlyIds) {
|
|
740
|
+
const groupedIds: Record<string, _ttid[]> = {}
|
|
741
|
+
for (const group in docGroup)
|
|
742
|
+
groupedIds[group] = Object.keys(docGroup[group]) as _ttid[]
|
|
743
|
+
return groupedIds
|
|
744
|
+
}
|
|
745
|
+
|
|
746
|
+
return docGroup
|
|
747
|
+
}
|
|
748
|
+
|
|
749
|
+
if (query?.$onlyIds) return Object.keys(next).shift() as _ttid
|
|
750
|
+
|
|
751
|
+
return next
|
|
752
|
+
}
|
|
753
|
+
|
|
754
|
+
private async docResults<T extends Record<string, any>>(
|
|
755
|
+
collection: string,
|
|
756
|
+
query?: _storeQuery<T>
|
|
757
|
+
) {
|
|
758
|
+
const candidateIds = await this.candidateDocIdsForQuery(collection, query)
|
|
759
|
+
const ids = candidateIds ? Array.from(candidateIds) : await this.listDocIds(collection)
|
|
760
|
+
const limit = query?.$limit
|
|
761
|
+
const results: Array<FyloRecord<T>> = []
|
|
762
|
+
|
|
763
|
+
for (const id of ids) {
|
|
764
|
+
const stored = await this.readStoredDoc<T>(collection, id)
|
|
765
|
+
if (!stored) continue
|
|
766
|
+
if (!this.matchesQuery(id, stored.data, query)) continue
|
|
767
|
+
results.push({ [id]: stored.data } as FyloRecord<T>)
|
|
768
|
+
if (limit && results.length >= limit) break
|
|
769
|
+
}
|
|
770
|
+
|
|
771
|
+
return results
|
|
772
|
+
}
|
|
773
|
+
|
|
774
|
+
private async rebuildIndexes<T extends Record<string, any>>(
|
|
775
|
+
collection: string,
|
|
776
|
+
docId: _ttid,
|
|
777
|
+
doc: T
|
|
778
|
+
) {
|
|
779
|
+
const keys = await Dir.extractKeys(collection, docId, doc)
|
|
780
|
+
const db = this.database(collection)
|
|
781
|
+
const insert = db.query(`
|
|
782
|
+
INSERT OR REPLACE INTO doc_index_entries
|
|
783
|
+
(doc_id, field_path, value_hash, raw_value, value_type, numeric_value)
|
|
784
|
+
VALUES (?, ?, ?, ?, ?, ?)
|
|
785
|
+
`)
|
|
786
|
+
|
|
787
|
+
const transaction = db.transaction((logicalKeys: string[]) => {
|
|
788
|
+
for (const logicalKey of logicalKeys) {
|
|
789
|
+
const segments = logicalKey.split('/')
|
|
790
|
+
const fieldPath = segments.slice(0, -2).join('/')
|
|
791
|
+
const rawValue = segments.at(-2) ?? ''
|
|
792
|
+
const normalized = this.normalizeIndexValue(rawValue)
|
|
793
|
+
insert.run(
|
|
794
|
+
docId,
|
|
795
|
+
fieldPath,
|
|
796
|
+
normalized.valueHash,
|
|
797
|
+
normalized.rawValue,
|
|
798
|
+
normalized.valueType,
|
|
799
|
+
normalized.numericValue
|
|
800
|
+
)
|
|
801
|
+
}
|
|
802
|
+
})
|
|
803
|
+
|
|
804
|
+
transaction(keys.indexes)
|
|
805
|
+
}
|
|
806
|
+
|
|
807
|
+
private async removeIndexes<T extends Record<string, any>>(
|
|
808
|
+
collection: string,
|
|
809
|
+
docId: _ttid,
|
|
810
|
+
doc: T
|
|
811
|
+
) {
|
|
812
|
+
const keys = await Dir.extractKeys(collection, docId, doc)
|
|
813
|
+
const db = this.database(collection)
|
|
814
|
+
const remove = db.query(`
|
|
815
|
+
DELETE FROM doc_index_entries
|
|
816
|
+
WHERE doc_id = ? AND field_path = ? AND value_hash = ?
|
|
817
|
+
`)
|
|
818
|
+
|
|
819
|
+
const transaction = db.transaction((logicalKeys: string[]) => {
|
|
820
|
+
for (const logicalKey of logicalKeys) {
|
|
821
|
+
const segments = logicalKey.split('/')
|
|
822
|
+
const fieldPath = segments.slice(0, -2).join('/')
|
|
823
|
+
const rawValue = segments.at(-2) ?? ''
|
|
824
|
+
remove.run(docId, fieldPath, this.hash(rawValue))
|
|
825
|
+
}
|
|
826
|
+
})
|
|
827
|
+
|
|
828
|
+
transaction(keys.indexes)
|
|
829
|
+
}
|
|
830
|
+
|
|
831
|
+
async putDocument<T extends Record<string, any>>(collection: string, docId: _ttid, doc: T) {
|
|
832
|
+
const owner = Bun.randomUUIDv7()
|
|
833
|
+
if (!(await this.locks.acquire(collection, docId, owner)))
|
|
834
|
+
throw new Error(`Unable to acquire filesystem lock for ${docId}`)
|
|
835
|
+
|
|
836
|
+
try {
|
|
837
|
+
await this.writeStoredDoc(collection, docId, doc)
|
|
838
|
+
await this.rebuildIndexes(collection, docId, doc)
|
|
839
|
+
await this.events.publish(collection, {
|
|
840
|
+
ts: Date.now(),
|
|
841
|
+
action: 'insert',
|
|
842
|
+
id: docId,
|
|
843
|
+
doc
|
|
844
|
+
})
|
|
845
|
+
} finally {
|
|
846
|
+
await this.locks.release(collection, docId, owner)
|
|
847
|
+
}
|
|
848
|
+
}
|
|
849
|
+
|
|
850
|
+
async patchDocument<T extends Record<string, any>>(
|
|
851
|
+
collection: string,
|
|
852
|
+
oldId: _ttid,
|
|
853
|
+
newId: _ttid,
|
|
854
|
+
patch: Partial<T>,
|
|
855
|
+
oldDoc?: T
|
|
856
|
+
) {
|
|
857
|
+
const owner = Bun.randomUUIDv7()
|
|
858
|
+
if (!(await this.locks.acquire(collection, oldId, owner)))
|
|
859
|
+
throw new Error(`Unable to acquire filesystem lock for ${oldId}`)
|
|
860
|
+
|
|
861
|
+
try {
|
|
862
|
+
const existing = oldDoc ?? (await this.readStoredDoc<T>(collection, oldId))?.data
|
|
863
|
+
if (!existing) return oldId
|
|
864
|
+
|
|
865
|
+
const nextDoc = { ...existing, ...patch } as T
|
|
866
|
+
await this.removeIndexes(collection, oldId, existing)
|
|
867
|
+
await this.removeStoredDoc(collection, oldId)
|
|
868
|
+
await this.events.publish(collection, {
|
|
869
|
+
ts: Date.now(),
|
|
870
|
+
action: 'delete',
|
|
871
|
+
id: oldId,
|
|
872
|
+
doc: existing
|
|
873
|
+
})
|
|
874
|
+
await this.writeStoredDoc(collection, newId, nextDoc)
|
|
875
|
+
await this.rebuildIndexes(collection, newId, nextDoc)
|
|
876
|
+
await this.events.publish(collection, {
|
|
877
|
+
ts: Date.now(),
|
|
878
|
+
action: 'insert',
|
|
879
|
+
id: newId,
|
|
880
|
+
doc: nextDoc
|
|
881
|
+
})
|
|
882
|
+
return newId
|
|
883
|
+
} finally {
|
|
884
|
+
await this.locks.release(collection, oldId, owner)
|
|
885
|
+
}
|
|
886
|
+
}
|
|
887
|
+
|
|
888
|
+
async deleteDocument<T extends Record<string, any>>(collection: string, docId: _ttid) {
|
|
889
|
+
const owner = Bun.randomUUIDv7()
|
|
890
|
+
if (!(await this.locks.acquire(collection, docId, owner)))
|
|
891
|
+
throw new Error(`Unable to acquire filesystem lock for ${docId}`)
|
|
892
|
+
|
|
893
|
+
try {
|
|
894
|
+
const existing = await this.readStoredDoc<T>(collection, docId)
|
|
895
|
+
if (!existing) return
|
|
896
|
+
await this.removeIndexes(collection, docId, existing.data)
|
|
897
|
+
await this.removeStoredDoc(collection, docId)
|
|
898
|
+
await this.events.publish(collection, {
|
|
899
|
+
ts: Date.now(),
|
|
900
|
+
action: 'delete',
|
|
901
|
+
id: docId,
|
|
902
|
+
doc: existing.data
|
|
903
|
+
})
|
|
904
|
+
} finally {
|
|
905
|
+
await this.locks.release(collection, docId, owner)
|
|
906
|
+
}
|
|
907
|
+
}
|
|
908
|
+
|
|
909
|
+
getDoc<T extends Record<string, any>>(
|
|
910
|
+
collection: string,
|
|
911
|
+
docId: _ttid,
|
|
912
|
+
onlyId: boolean = false
|
|
913
|
+
) {
|
|
914
|
+
const engine = this
|
|
915
|
+
|
|
916
|
+
return {
|
|
917
|
+
async *[Symbol.asyncIterator]() {
|
|
918
|
+
const doc = await this.once()
|
|
919
|
+
if (Object.keys(doc).length > 0) yield onlyId ? Object.keys(doc).shift()! : doc
|
|
920
|
+
|
|
921
|
+
for await (const event of engine.events.listen(collection)) {
|
|
922
|
+
if (event.action !== 'insert' || event.id !== docId || !event.doc) continue
|
|
923
|
+
yield onlyId ? event.id : ({ [event.id]: event.doc } as FyloRecord<T>)
|
|
924
|
+
}
|
|
925
|
+
},
|
|
926
|
+
async once() {
|
|
927
|
+
const stored = await engine.readStoredDoc<T>(collection, docId)
|
|
928
|
+
return stored ? ({ [docId]: stored.data } as FyloRecord<T>) : {}
|
|
929
|
+
},
|
|
930
|
+
async *onDelete() {
|
|
931
|
+
for await (const event of engine.events.listen(collection)) {
|
|
932
|
+
if (event.action === 'delete' && event.id === docId) yield event.id
|
|
933
|
+
}
|
|
934
|
+
}
|
|
935
|
+
}
|
|
936
|
+
}
|
|
937
|
+
|
|
938
|
+
findDocs<T extends Record<string, any>>(collection: string, query?: _storeQuery<T>) {
|
|
939
|
+
const engine = this
|
|
940
|
+
|
|
941
|
+
const collectDocs = async function* () {
|
|
942
|
+
const docs = await engine.docResults(collection, query)
|
|
943
|
+
for (const doc of docs) {
|
|
944
|
+
const result = engine.processDoc(doc, query)
|
|
945
|
+
if (result !== undefined) yield result
|
|
946
|
+
}
|
|
947
|
+
}
|
|
948
|
+
|
|
949
|
+
return {
|
|
950
|
+
async *[Symbol.asyncIterator]() {
|
|
951
|
+
for await (const result of collectDocs()) yield result
|
|
952
|
+
|
|
953
|
+
for await (const event of engine.events.listen(collection)) {
|
|
954
|
+
if (event.action !== 'insert' || !event.doc) continue
|
|
955
|
+
if (!engine.matchesQuery(event.id, event.doc as T, query)) continue
|
|
956
|
+
const processed = engine.processDoc(
|
|
957
|
+
{ [event.id]: event.doc as T } as FyloRecord<T>,
|
|
958
|
+
query
|
|
959
|
+
)
|
|
960
|
+
if (processed !== undefined) yield processed
|
|
961
|
+
}
|
|
962
|
+
},
|
|
963
|
+
async *collect() {
|
|
964
|
+
for await (const result of collectDocs()) yield result
|
|
965
|
+
},
|
|
966
|
+
async *onDelete() {
|
|
967
|
+
for await (const event of engine.events.listen(collection)) {
|
|
968
|
+
if (event.action !== 'delete' || !event.doc) continue
|
|
969
|
+
if (!engine.matchesQuery(event.id, event.doc as T, query)) continue
|
|
970
|
+
yield event.id
|
|
971
|
+
}
|
|
972
|
+
}
|
|
973
|
+
}
|
|
974
|
+
}
|
|
975
|
+
|
|
976
|
+
async *exportBulkData<T extends Record<string, any>>(collection: string) {
|
|
977
|
+
const ids = await this.listDocIds(collection)
|
|
978
|
+
for (const id of ids) {
|
|
979
|
+
const stored = await this.readStoredDoc<T>(collection, id)
|
|
980
|
+
if (stored) yield stored.data
|
|
981
|
+
}
|
|
982
|
+
}
|
|
983
|
+
|
|
984
|
+
async joinDocs<T extends Record<string, any>, U extends Record<string, any>>(
|
|
985
|
+
join: _join<T, U>
|
|
986
|
+
) {
|
|
987
|
+
const leftDocs = await this.docResults<T>(join.$leftCollection)
|
|
988
|
+
const rightDocs = await this.docResults<U>(join.$rightCollection)
|
|
989
|
+
const docs: Record<`${_ttid}, ${_ttid}`, T | U | (T & U) | (Partial<T> & Partial<U>)> = {}
|
|
990
|
+
|
|
991
|
+
const compareMap = {
|
|
992
|
+
$eq: (leftVal: any, rightVal: any) => leftVal === rightVal,
|
|
993
|
+
$ne: (leftVal: any, rightVal: any) => leftVal !== rightVal,
|
|
994
|
+
$gt: (leftVal: any, rightVal: any) => Number(leftVal) > Number(rightVal),
|
|
995
|
+
$lt: (leftVal: any, rightVal: any) => Number(leftVal) < Number(rightVal),
|
|
996
|
+
$gte: (leftVal: any, rightVal: any) => Number(leftVal) >= Number(rightVal),
|
|
997
|
+
$lte: (leftVal: any, rightVal: any) => Number(leftVal) <= Number(rightVal)
|
|
998
|
+
} as const
|
|
999
|
+
|
|
1000
|
+
for (const leftEntry of leftDocs) {
|
|
1001
|
+
const [leftId, leftData] = Object.entries(leftEntry)[0] as [_ttid, T]
|
|
1002
|
+
for (const rightEntry of rightDocs) {
|
|
1003
|
+
const [rightId, rightData] = Object.entries(rightEntry)[0] as [_ttid, U]
|
|
1004
|
+
|
|
1005
|
+
let matched = false
|
|
1006
|
+
|
|
1007
|
+
for (const field in join.$on) {
|
|
1008
|
+
const operand = join.$on[field as keyof T]!
|
|
1009
|
+
for (const opKey of Object.keys(compareMap) as Array<keyof typeof compareMap>) {
|
|
1010
|
+
const rightField = operand[opKey]
|
|
1011
|
+
if (!rightField) continue
|
|
1012
|
+
const leftValue = this.getValueByPath(
|
|
1013
|
+
leftData as Record<string, any>,
|
|
1014
|
+
String(field)
|
|
1015
|
+
)
|
|
1016
|
+
const rightValue = this.getValueByPath(
|
|
1017
|
+
rightData as Record<string, any>,
|
|
1018
|
+
String(rightField)
|
|
1019
|
+
)
|
|
1020
|
+
if (compareMap[opKey](leftValue, rightValue)) matched = true
|
|
1021
|
+
}
|
|
1022
|
+
}
|
|
1023
|
+
|
|
1024
|
+
if (!matched) continue
|
|
1025
|
+
|
|
1026
|
+
switch (join.$mode) {
|
|
1027
|
+
case 'inner':
|
|
1028
|
+
docs[`${leftId}, ${rightId}`] = { ...leftData, ...rightData } as T & U
|
|
1029
|
+
break
|
|
1030
|
+
case 'left':
|
|
1031
|
+
docs[`${leftId}, ${rightId}`] = leftData
|
|
1032
|
+
break
|
|
1033
|
+
case 'right':
|
|
1034
|
+
docs[`${leftId}, ${rightId}`] = rightData
|
|
1035
|
+
break
|
|
1036
|
+
case 'outer':
|
|
1037
|
+
docs[`${leftId}, ${rightId}`] = { ...leftData, ...rightData } as T & U
|
|
1038
|
+
break
|
|
1039
|
+
}
|
|
1040
|
+
|
|
1041
|
+
if (join.$limit && Object.keys(docs).length >= join.$limit) break
|
|
1042
|
+
}
|
|
1043
|
+
|
|
1044
|
+
if (join.$limit && Object.keys(docs).length >= join.$limit) break
|
|
1045
|
+
}
|
|
1046
|
+
|
|
1047
|
+
if (join.$groupby) {
|
|
1048
|
+
const groupedDocs: Record<string, Record<string, Partial<T | U>>> = {}
|
|
1049
|
+
for (const ids in docs) {
|
|
1050
|
+
const data = docs[ids as `${_ttid}, ${_ttid}`] as Record<string, any>
|
|
1051
|
+
const key = String(data[join.$groupby as string])
|
|
1052
|
+
if (!groupedDocs[key]) groupedDocs[key] = {}
|
|
1053
|
+
groupedDocs[key][ids] = data as Partial<T | U>
|
|
1054
|
+
}
|
|
1055
|
+
if (join.$onlyIds) {
|
|
1056
|
+
const groupedIds: Record<string, _ttid[]> = {}
|
|
1057
|
+
for (const key in groupedDocs)
|
|
1058
|
+
groupedIds[key] = Object.keys(groupedDocs[key]).flat() as _ttid[]
|
|
1059
|
+
return groupedIds
|
|
1060
|
+
}
|
|
1061
|
+
return groupedDocs
|
|
1062
|
+
}
|
|
1063
|
+
|
|
1064
|
+
if (join.$onlyIds) return Array.from(new Set(Object.keys(docs).flat())) as _ttid[]
|
|
1065
|
+
|
|
1066
|
+
return docs
|
|
1067
|
+
}
|
|
1068
|
+
}
|