@delma/fylo 1.1.1 → 2.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (74) hide show
  1. package/.github/copilot-instructions.md +1 -1
  2. package/.github/prompts/release.prompt.md +4 -43
  3. package/AGENTS.md +1 -1
  4. package/CLAUDE.md +1 -1
  5. package/README.md +141 -62
  6. package/eslint.config.js +8 -4
  7. package/package.json +9 -7
  8. package/src/CLI +16 -14
  9. package/src/adapters/cipher.ts +12 -6
  10. package/src/adapters/redis.ts +193 -123
  11. package/src/adapters/s3.ts +6 -12
  12. package/src/core/collection.ts +5 -0
  13. package/src/core/directory.ts +120 -151
  14. package/src/core/extensions.ts +4 -2
  15. package/src/core/format.ts +390 -419
  16. package/src/core/parser.ts +167 -142
  17. package/src/core/query.ts +31 -26
  18. package/src/core/walker.ts +68 -61
  19. package/src/core/write-queue.ts +7 -4
  20. package/src/engines/s3-files.ts +888 -0
  21. package/src/engines/types.ts +21 -0
  22. package/src/index.ts +754 -378
  23. package/src/migrate-cli.ts +22 -0
  24. package/src/migrate.ts +74 -0
  25. package/src/types/bun-runtime.d.ts +73 -0
  26. package/src/types/fylo.d.ts +115 -27
  27. package/src/types/node-runtime.d.ts +61 -0
  28. package/src/types/query.d.ts +6 -2
  29. package/src/types/vendor-modules.d.ts +8 -7
  30. package/src/worker.ts +7 -1
  31. package/src/workers/write-worker.ts +25 -24
  32. package/tests/collection/truncate.test.js +35 -0
  33. package/tests/{data.ts → data.js} +8 -21
  34. package/tests/{index.ts → index.js} +4 -9
  35. package/tests/integration/aws-s3-files.canary.test.js +22 -0
  36. package/tests/integration/{create.test.ts → create.test.js} +13 -31
  37. package/tests/integration/delete.test.js +95 -0
  38. package/tests/integration/{edge-cases.test.ts → edge-cases.test.js} +50 -124
  39. package/tests/integration/{encryption.test.ts → encryption.test.js} +20 -65
  40. package/tests/integration/{export.test.ts → export.test.js} +8 -23
  41. package/tests/integration/{join-modes.test.ts → join-modes.test.js} +37 -104
  42. package/tests/integration/migration.test.js +38 -0
  43. package/tests/integration/nested.test.js +142 -0
  44. package/tests/integration/operators.test.js +122 -0
  45. package/tests/integration/{queue.test.ts → queue.test.js} +24 -40
  46. package/tests/integration/read.test.js +119 -0
  47. package/tests/integration/rollback.test.js +60 -0
  48. package/tests/integration/s3-files.test.js +108 -0
  49. package/tests/integration/update.test.js +99 -0
  50. package/tests/mocks/{cipher.ts → cipher.js} +11 -26
  51. package/tests/mocks/redis.js +123 -0
  52. package/tests/mocks/{s3.ts → s3.js} +24 -58
  53. package/tests/schemas/album.json +1 -1
  54. package/tests/schemas/comment.json +1 -1
  55. package/tests/schemas/photo.json +1 -1
  56. package/tests/schemas/post.json +1 -1
  57. package/tests/schemas/tip.json +1 -1
  58. package/tests/schemas/todo.json +1 -1
  59. package/tests/schemas/user.d.ts +12 -12
  60. package/tests/schemas/user.json +1 -1
  61. package/tsconfig.json +4 -2
  62. package/tsconfig.typecheck.json +31 -0
  63. package/.github/prompts/issue.prompt.md +0 -19
  64. package/.github/prompts/pr.prompt.md +0 -18
  65. package/.github/prompts/review-pr.prompt.md +0 -19
  66. package/.github/prompts/sync-main.prompt.md +0 -14
  67. package/tests/collection/truncate.test.ts +0 -56
  68. package/tests/integration/delete.test.ts +0 -147
  69. package/tests/integration/nested.test.ts +0 -212
  70. package/tests/integration/operators.test.ts +0 -167
  71. package/tests/integration/read.test.ts +0 -203
  72. package/tests/integration/rollback.test.ts +0 -105
  73. package/tests/integration/update.test.ts +0 -130
  74. package/tests/mocks/redis.ts +0 -169
@@ -0,0 +1,888 @@
1
+ import { mkdir, readFile, readdir, rm, stat, writeFile, open } from 'node:fs/promises'
2
+ import path from 'node:path'
3
+ import { createHash } from 'node:crypto'
4
+ import { Database } from 'bun:sqlite'
5
+ import TTID from '@delma/ttid'
6
+ import { Dir } from '../core/directory'
7
+ import { validateCollectionName } from '../core/collection'
8
+ import { Cipher } from '../adapters/cipher'
9
+ import type { EventBus, FyloStorageEngineKind, LockManager, StorageEngine } from './types'
10
+
11
+ type FyloRecord<T extends Record<string, any>> = Record<_ttid, T>
12
+
13
+ type S3FilesQueryResult<T extends Record<string, any>> =
14
+ | _ttid
15
+ | FyloRecord<T>
16
+ | Record<string, _ttid[]>
17
+ | Record<string, Record<_ttid, Partial<T>>>
18
+ | Record<_ttid, Partial<T>>
19
+
20
+ type S3FilesEvent<T extends Record<string, any>> = {
21
+ ts: number
22
+ action: 'insert' | 'delete'
23
+ id: _ttid
24
+ doc?: T
25
+ }
26
+
27
+ type StoredDoc<T extends Record<string, any>> = {
28
+ id: _ttid
29
+ createdAt: number
30
+ updatedAt: number
31
+ data: T
32
+ }
33
+
34
+ class FilesystemStorage implements StorageEngine {
35
+ async read(target: string): Promise<string> {
36
+ return await readFile(target, 'utf8')
37
+ }
38
+
39
+ async write(target: string, data: string): Promise<void> {
40
+ await mkdir(path.dirname(target), { recursive: true })
41
+ await writeFile(target, data, 'utf8')
42
+ }
43
+
44
+ async delete(target: string): Promise<void> {
45
+ await rm(target, { recursive: true, force: true })
46
+ }
47
+
48
+ async list(target: string): Promise<string[]> {
49
+ const results: string[] = []
50
+
51
+ try {
52
+ const entries = await readdir(target, { withFileTypes: true })
53
+ for (const entry of entries) {
54
+ const child = path.join(target, entry.name)
55
+ if (entry.isDirectory()) {
56
+ results.push(...(await this.list(child)))
57
+ } else {
58
+ results.push(child)
59
+ }
60
+ }
61
+ } catch (err) {
62
+ if ((err as NodeJS.ErrnoException).code !== 'ENOENT') throw err
63
+ }
64
+
65
+ return results
66
+ }
67
+
68
+ async mkdir(target: string): Promise<void> {
69
+ await mkdir(target, { recursive: true })
70
+ }
71
+
72
+ async rmdir(target: string): Promise<void> {
73
+ await rm(target, { recursive: true, force: true })
74
+ }
75
+
76
+ async exists(target: string): Promise<boolean> {
77
+ try {
78
+ await stat(target)
79
+ return true
80
+ } catch (err) {
81
+ if ((err as NodeJS.ErrnoException).code === 'ENOENT') return false
82
+ throw err
83
+ }
84
+ }
85
+ }
86
+
87
+ class FilesystemLockManager implements LockManager {
88
+ constructor(
89
+ private readonly root: string,
90
+ private readonly storage: StorageEngine
91
+ ) {}
92
+
93
+ private lockDir(collection: string, docId: _ttid) {
94
+ return path.join(this.root, collection, '.fylo', 'locks', `${docId}.lock`)
95
+ }
96
+
97
+ async acquire(
98
+ collection: string,
99
+ docId: _ttid,
100
+ owner: string,
101
+ ttlMs: number = 30_000
102
+ ): Promise<boolean> {
103
+ const dir = this.lockDir(collection, docId)
104
+ const metaPath = path.join(dir, 'meta.json')
105
+ await mkdir(path.dirname(dir), { recursive: true })
106
+
107
+ try {
108
+ await mkdir(dir, { recursive: false })
109
+ await this.storage.write(metaPath, JSON.stringify({ owner, ts: Date.now() }))
110
+ return true
111
+ } catch (err) {
112
+ if ((err as NodeJS.ErrnoException).code !== 'EEXIST') throw err
113
+ }
114
+
115
+ try {
116
+ const meta = JSON.parse(await this.storage.read(metaPath)) as { ts?: number }
117
+ if (meta.ts && Date.now() - meta.ts > ttlMs) {
118
+ await this.storage.rmdir(dir)
119
+ await mkdir(dir, { recursive: false })
120
+ await this.storage.write(metaPath, JSON.stringify({ owner, ts: Date.now() }))
121
+ return true
122
+ }
123
+ } catch {
124
+ await this.storage.rmdir(dir)
125
+ await mkdir(dir, { recursive: false })
126
+ await this.storage.write(metaPath, JSON.stringify({ owner, ts: Date.now() }))
127
+ return true
128
+ }
129
+
130
+ return false
131
+ }
132
+
133
+ async release(collection: string, docId: _ttid, owner: string): Promise<void> {
134
+ const dir = this.lockDir(collection, docId)
135
+ const metaPath = path.join(dir, 'meta.json')
136
+
137
+ try {
138
+ const meta = JSON.parse(await this.storage.read(metaPath)) as { owner?: string }
139
+ if (meta.owner === owner) await this.storage.rmdir(dir)
140
+ } catch (err) {
141
+ if ((err as NodeJS.ErrnoException).code !== 'ENOENT') throw err
142
+ }
143
+ }
144
+ }
145
+
146
+ class FilesystemEventBus<T extends Record<string, any>> implements EventBus<S3FilesEvent<T>> {
147
+ constructor(
148
+ private readonly root: string,
149
+ private readonly storage: StorageEngine
150
+ ) {}
151
+
152
+ private journalPath(collection: string) {
153
+ return path.join(this.root, collection, '.fylo', 'events', `${collection}.ndjson`)
154
+ }
155
+
156
+ async publish(collection: string, event: S3FilesEvent<T>): Promise<void> {
157
+ const target = this.journalPath(collection)
158
+ await mkdir(path.dirname(target), { recursive: true })
159
+ const line = `${JSON.stringify(event)}\n`
160
+ const handle = await open(target, 'a')
161
+ try {
162
+ await handle.write(line)
163
+ } finally {
164
+ await handle.close()
165
+ }
166
+ }
167
+
168
+ async *listen(collection: string): AsyncGenerator<S3FilesEvent<T>, void, unknown> {
169
+ const target = this.journalPath(collection)
170
+ let position = 0
171
+
172
+ while (true) {
173
+ try {
174
+ const fileStat = await stat(target)
175
+ if (fileStat.size > position) {
176
+ const handle = await open(target, 'r')
177
+ try {
178
+ const size = fileStat.size - position
179
+ const buffer = Buffer.alloc(size)
180
+ await handle.read(buffer, 0, size, position)
181
+ position = fileStat.size
182
+
183
+ for (const line of buffer.toString('utf8').split('\n')) {
184
+ if (line.trim().length === 0) continue
185
+ yield JSON.parse(line) as S3FilesEvent<T>
186
+ }
187
+ } finally {
188
+ await handle.close()
189
+ }
190
+ }
191
+ } catch (err) {
192
+ if ((err as NodeJS.ErrnoException).code !== 'ENOENT') throw err
193
+ }
194
+
195
+ await Bun.sleep(100)
196
+ }
197
+ }
198
+ }
199
+
200
+ export class S3FilesEngine {
201
+ readonly kind: FyloStorageEngineKind = 's3-files'
202
+
203
+ private readonly databases = new Map<string, Database>()
204
+
205
+ private readonly storage: StorageEngine
206
+ private readonly locks: LockManager
207
+ private readonly events: EventBus<Record<string, any>>
208
+
209
+ constructor(readonly root: string = process.env.FYLO_S3FILES_ROOT ?? '/mnt/fylo') {
210
+ this.storage = new FilesystemStorage()
211
+ this.locks = new FilesystemLockManager(this.root, this.storage)
212
+ this.events = new FilesystemEventBus<Record<string, any>>(this.root, this.storage)
213
+ }
214
+
215
+ private collectionRoot(collection: string) {
216
+ validateCollectionName(collection)
217
+ return path.join(this.root, collection)
218
+ }
219
+
220
+ private docsRoot(collection: string) {
221
+ return path.join(this.collectionRoot(collection), '.fylo', 'docs')
222
+ }
223
+
224
+ private metaRoot(collection: string) {
225
+ return path.join(this.collectionRoot(collection), '.fylo')
226
+ }
227
+
228
+ private indexDbPath(collection: string) {
229
+ return path.join(this.metaRoot(collection), 'index.db')
230
+ }
231
+
232
+ private docPath(collection: string, docId: _ttid) {
233
+ return path.join(this.docsRoot(collection), docId.slice(0, 2), `${docId}.json`)
234
+ }
235
+
236
+ private hash(value: string) {
237
+ return createHash('sha256').update(value).digest('hex')
238
+ }
239
+
240
+ private database(collection: string) {
241
+ const existing = this.databases.get(collection)
242
+ if (existing) return existing
243
+
244
+ const db = new Database(this.indexDbPath(collection))
245
+ db.exec(`
246
+ CREATE TABLE IF NOT EXISTS doc_index_entries (
247
+ doc_id TEXT NOT NULL,
248
+ field_path TEXT NOT NULL,
249
+ value_hash TEXT NOT NULL,
250
+ raw_value TEXT NOT NULL,
251
+ value_type TEXT NOT NULL,
252
+ numeric_value REAL,
253
+ PRIMARY KEY (doc_id, field_path, value_hash)
254
+ );
255
+
256
+ CREATE INDEX IF NOT EXISTS idx_doc_index_entries_field_hash
257
+ ON doc_index_entries (field_path, value_hash);
258
+
259
+ CREATE INDEX IF NOT EXISTS idx_doc_index_entries_field_numeric
260
+ ON doc_index_entries (field_path, numeric_value);
261
+ `)
262
+ this.databases.set(collection, db)
263
+ return db
264
+ }
265
+
266
+ private closeDatabase(collection: string) {
267
+ const db = this.databases.get(collection)
268
+ if (db) {
269
+ db.close()
270
+ this.databases.delete(collection)
271
+ }
272
+ }
273
+
274
+ private normalizeIndexValue(rawValue: string) {
275
+ const parsed = Dir.parseValue(rawValue.replaceAll('%2F', '/'))
276
+ const numeric = typeof parsed === 'number' ? parsed : Number(parsed)
277
+ return {
278
+ rawValue,
279
+ valueHash: this.hash(rawValue),
280
+ valueType: typeof parsed,
281
+ numericValue: Number.isNaN(numeric) ? null : numeric
282
+ }
283
+ }
284
+
285
+ private async ensureCollection(collection: string) {
286
+ await this.storage.mkdir(this.collectionRoot(collection))
287
+ await this.storage.mkdir(this.metaRoot(collection))
288
+ await this.storage.mkdir(this.docsRoot(collection))
289
+ this.database(collection)
290
+ }
291
+
292
+ async createCollection(collection: string) {
293
+ await this.ensureCollection(collection)
294
+ }
295
+
296
+ async dropCollection(collection: string) {
297
+ this.closeDatabase(collection)
298
+ await this.storage.rmdir(this.collectionRoot(collection))
299
+ }
300
+
301
+ async hasCollection(collection: string) {
302
+ return await this.storage.exists(this.collectionRoot(collection))
303
+ }
304
+
305
+ private async encodeEncrypted<T extends Record<string, any>>(
306
+ collection: string,
307
+ value: T,
308
+ parentField?: string
309
+ ): Promise<T> {
310
+ if (Array.isArray(value)) {
311
+ const encodedItems = await Promise.all(
312
+ value.map(async (item) => {
313
+ if (item && typeof item === 'object')
314
+ return await this.encodeEncrypted(collection, item as Record<string, any>)
315
+ if (
316
+ parentField &&
317
+ Cipher.isConfigured() &&
318
+ Cipher.isEncryptedField(collection, parentField)
319
+ ) {
320
+ return await Cipher.encrypt(String(item).replaceAll('/', '%2F'), true)
321
+ }
322
+ return item
323
+ })
324
+ )
325
+ return encodedItems as unknown as T
326
+ }
327
+
328
+ if (value && typeof value === 'object') {
329
+ const copy: Record<string, any> = {}
330
+ for (const field in value) {
331
+ const nextField = parentField ? `${parentField}/${field}` : field
332
+ const fieldValue = value[field]
333
+ if (fieldValue && typeof fieldValue === 'object')
334
+ copy[field] = await this.encodeEncrypted(collection, fieldValue, nextField)
335
+ else if (Cipher.isConfigured() && Cipher.isEncryptedField(collection, nextField)) {
336
+ copy[field] = await Cipher.encrypt(
337
+ String(fieldValue).replaceAll('/', '%2F'),
338
+ true
339
+ )
340
+ } else copy[field] = fieldValue
341
+ }
342
+ return copy as T
343
+ }
344
+
345
+ return value
346
+ }
347
+
348
+ private async decodeEncrypted<T extends Record<string, any>>(
349
+ collection: string,
350
+ value: T,
351
+ parentField?: string
352
+ ): Promise<T> {
353
+ if (Array.isArray(value)) {
354
+ const decodedItems = await Promise.all(
355
+ value.map(async (item) => {
356
+ if (item && typeof item === 'object')
357
+ return await this.decodeEncrypted(collection, item as Record<string, any>)
358
+ if (
359
+ parentField &&
360
+ Cipher.isConfigured() &&
361
+ Cipher.isEncryptedField(collection, parentField) &&
362
+ typeof item === 'string'
363
+ ) {
364
+ return Dir.parseValue((await Cipher.decrypt(item)).replaceAll('%2F', '/'))
365
+ }
366
+ return item
367
+ })
368
+ )
369
+ return decodedItems as unknown as T
370
+ }
371
+
372
+ if (value && typeof value === 'object') {
373
+ const copy: Record<string, any> = {}
374
+ for (const field in value) {
375
+ const nextField = parentField ? `${parentField}/${field}` : field
376
+ const fieldValue = value[field]
377
+ if (fieldValue && typeof fieldValue === 'object')
378
+ copy[field] = await this.decodeEncrypted(collection, fieldValue, nextField)
379
+ else if (
380
+ Cipher.isConfigured() &&
381
+ Cipher.isEncryptedField(collection, nextField) &&
382
+ typeof fieldValue === 'string'
383
+ ) {
384
+ copy[field] = Dir.parseValue(
385
+ (await Cipher.decrypt(fieldValue)).replaceAll('%2F', '/')
386
+ )
387
+ } else copy[field] = fieldValue
388
+ }
389
+ return copy as T
390
+ }
391
+
392
+ return value
393
+ }
394
+
395
+ private async readStoredDoc<T extends Record<string, any>>(
396
+ collection: string,
397
+ docId: _ttid
398
+ ): Promise<StoredDoc<T> | null> {
399
+ const target = this.docPath(collection, docId)
400
+
401
+ try {
402
+ const raw = JSON.parse(await this.storage.read(target)) as StoredDoc<T>
403
+ raw.data = await this.decodeEncrypted(collection, raw.data)
404
+ return raw
405
+ } catch (err) {
406
+ if ((err as NodeJS.ErrnoException).code === 'ENOENT') return null
407
+ throw err
408
+ }
409
+ }
410
+
411
+ private async writeStoredDoc<T extends Record<string, any>>(
412
+ collection: string,
413
+ docId: _ttid,
414
+ data: T
415
+ ) {
416
+ await this.ensureCollection(collection)
417
+ const encoded = await this.encodeEncrypted(collection, data)
418
+ const { createdAt, updatedAt } = TTID.decodeTime(docId)
419
+ const target = this.docPath(collection, docId)
420
+ const record: StoredDoc<T> = {
421
+ id: docId,
422
+ createdAt,
423
+ updatedAt: updatedAt ?? createdAt,
424
+ data: encoded
425
+ }
426
+ await this.storage.write(target, JSON.stringify(record))
427
+ }
428
+
429
+ private async removeStoredDoc(collection: string, docId: _ttid) {
430
+ await this.storage.delete(this.docPath(collection, docId))
431
+ }
432
+
433
+ private async listDocIds(collection: string) {
434
+ const files = await this.storage.list(this.docsRoot(collection))
435
+ return files
436
+ .filter((file) => file.endsWith('.json'))
437
+ .map((file) => path.basename(file, '.json'))
438
+ .filter((key) => TTID.isTTID(key)) as _ttid[]
439
+ }
440
+
441
+ private getValueByPath(target: Record<string, any>, fieldPath: string) {
442
+ return fieldPath
443
+ .split('.')
444
+ .reduce<any>(
445
+ (acc, key) => (acc === undefined || acc === null ? undefined : acc[key]),
446
+ target
447
+ )
448
+ }
449
+
450
+ private matchesTimestamp(docId: _ttid, query?: _storeQuery<Record<string, any>>) {
451
+ if (!query?.$created && !query?.$updated) return true
452
+ const { createdAt, updatedAt } = TTID.decodeTime(docId)
453
+ const timestamps = { createdAt, updatedAt: updatedAt ?? createdAt }
454
+
455
+ const match = (value: number, range?: _timestamp) => {
456
+ if (!range) return true
457
+ if (range.$gt !== undefined && !(value > range.$gt)) return false
458
+ if (range.$gte !== undefined && !(value >= range.$gte)) return false
459
+ if (range.$lt !== undefined && !(value < range.$lt)) return false
460
+ if (range.$lte !== undefined && !(value <= range.$lte)) return false
461
+ return true
462
+ }
463
+
464
+ return (
465
+ match(timestamps.createdAt, query.$created) &&
466
+ match(timestamps.updatedAt, query.$updated)
467
+ )
468
+ }
469
+
470
+ private likeToRegex(pattern: string) {
471
+ const escaped = pattern.replace(/[.*+?^${}()|[\]\\]/g, '\\$&').replaceAll('%', '.*')
472
+ return new RegExp(`^${escaped}$`)
473
+ }
474
+
475
+ private matchesOperand(value: unknown, operand: _operand) {
476
+ if (operand.$eq !== undefined && value != operand.$eq) return false
477
+ if (operand.$ne !== undefined && value == operand.$ne) return false
478
+ if (operand.$gt !== undefined && !(Number(value) > operand.$gt)) return false
479
+ if (operand.$gte !== undefined && !(Number(value) >= operand.$gte)) return false
480
+ if (operand.$lt !== undefined && !(Number(value) < operand.$lt)) return false
481
+ if (operand.$lte !== undefined && !(Number(value) <= operand.$lte)) return false
482
+ if (
483
+ operand.$like !== undefined &&
484
+ (typeof value !== 'string' || !this.likeToRegex(operand.$like).test(value))
485
+ )
486
+ return false
487
+ if (operand.$contains !== undefined) {
488
+ if (!Array.isArray(value) || !value.some((item) => item == operand.$contains))
489
+ return false
490
+ }
491
+ return true
492
+ }
493
+
494
+ private matchesQuery<T extends Record<string, any>>(
495
+ docId: _ttid,
496
+ doc: T,
497
+ query?: _storeQuery<T>
498
+ ) {
499
+ if (!this.matchesTimestamp(docId, query as _storeQuery<Record<string, any>> | undefined))
500
+ return false
501
+ if (!query?.$ops || query.$ops.length === 0) return true
502
+
503
+ return query.$ops.some((operation) => {
504
+ for (const field in operation) {
505
+ const value = this.getValueByPath(doc, field)
506
+ if (!this.matchesOperand(value, operation[field as keyof T]!)) return false
507
+ }
508
+ return true
509
+ })
510
+ }
511
+
512
+ private selectValues<T extends Record<string, any>>(selection: Array<keyof T>, data: T) {
513
+ const copy = { ...data }
514
+ for (const field in copy) {
515
+ if (!selection.includes(field as keyof T)) delete copy[field]
516
+ }
517
+ return copy
518
+ }
519
+
520
+ private renameFields<T extends Record<string, any>>(
521
+ rename: Record<keyof Partial<T>, string>,
522
+ data: T
523
+ ) {
524
+ const copy = { ...data }
525
+ for (const field in copy) {
526
+ if (rename[field]) {
527
+ copy[rename[field]] = copy[field]
528
+ delete copy[field]
529
+ }
530
+ }
531
+ return copy
532
+ }
533
+
534
+ private processDoc<T extends Record<string, any>>(
535
+ doc: FyloRecord<T>,
536
+ query?: _storeQuery<T>
537
+ ): S3FilesQueryResult<T> | undefined {
538
+ if (Object.keys(doc).length === 0) return
539
+
540
+ const next = { ...doc }
541
+
542
+ for (let [_id, data] of Object.entries(next)) {
543
+ if (query?.$select?.length)
544
+ data = this.selectValues(query.$select as Array<keyof T>, data)
545
+ if (query?.$rename) data = this.renameFields(query.$rename, data)
546
+ next[_id as _ttid] = data as T
547
+ }
548
+
549
+ if (query?.$groupby) {
550
+ const docGroup: Record<string, Record<string, Partial<T>>> = {}
551
+ for (const [id, data] of Object.entries(next)) {
552
+ const groupValue = data[query.$groupby] as string
553
+ if (groupValue) {
554
+ const groupData = { ...data }
555
+ delete groupData[query.$groupby]
556
+ docGroup[groupValue] = { [id]: groupData as Partial<T> }
557
+ }
558
+ }
559
+
560
+ if (query.$onlyIds) {
561
+ const groupedIds: Record<string, _ttid[]> = {}
562
+ for (const group in docGroup)
563
+ groupedIds[group] = Object.keys(docGroup[group]) as _ttid[]
564
+ return groupedIds
565
+ }
566
+
567
+ return docGroup
568
+ }
569
+
570
+ if (query?.$onlyIds) return Object.keys(next).shift() as _ttid
571
+
572
+ return next
573
+ }
574
+
575
+ private async docResults<T extends Record<string, any>>(
576
+ collection: string,
577
+ query?: _storeQuery<T>
578
+ ) {
579
+ const ids = await this.listDocIds(collection)
580
+ const limit = query?.$limit
581
+ const results: Array<FyloRecord<T>> = []
582
+
583
+ for (const id of ids) {
584
+ const stored = await this.readStoredDoc<T>(collection, id)
585
+ if (!stored) continue
586
+ if (!this.matchesQuery(id, stored.data, query)) continue
587
+ results.push({ [id]: stored.data } as FyloRecord<T>)
588
+ if (limit && results.length >= limit) break
589
+ }
590
+
591
+ return results
592
+ }
593
+
594
+ private async rebuildIndexes<T extends Record<string, any>>(
595
+ collection: string,
596
+ docId: _ttid,
597
+ doc: T
598
+ ) {
599
+ const keys = await Dir.extractKeys(collection, docId, doc)
600
+ const db = this.database(collection)
601
+ const insert = db.query(`
602
+ INSERT OR REPLACE INTO doc_index_entries
603
+ (doc_id, field_path, value_hash, raw_value, value_type, numeric_value)
604
+ VALUES (?, ?, ?, ?, ?, ?)
605
+ `)
606
+
607
+ const transaction = db.transaction((logicalKeys: string[]) => {
608
+ for (const logicalKey of logicalKeys) {
609
+ const segments = logicalKey.split('/')
610
+ const fieldPath = segments.slice(0, -2).join('/')
611
+ const rawValue = segments.at(-2) ?? ''
612
+ const normalized = this.normalizeIndexValue(rawValue)
613
+ insert.run(
614
+ docId,
615
+ fieldPath,
616
+ normalized.valueHash,
617
+ normalized.rawValue,
618
+ normalized.valueType,
619
+ normalized.numericValue
620
+ )
621
+ }
622
+ })
623
+
624
+ transaction(keys.indexes)
625
+ }
626
+
627
+ private async removeIndexes<T extends Record<string, any>>(
628
+ collection: string,
629
+ docId: _ttid,
630
+ doc: T
631
+ ) {
632
+ const keys = await Dir.extractKeys(collection, docId, doc)
633
+ const db = this.database(collection)
634
+ const remove = db.query(`
635
+ DELETE FROM doc_index_entries
636
+ WHERE doc_id = ? AND field_path = ? AND value_hash = ?
637
+ `)
638
+
639
+ const transaction = db.transaction((logicalKeys: string[]) => {
640
+ for (const logicalKey of logicalKeys) {
641
+ const segments = logicalKey.split('/')
642
+ const fieldPath = segments.slice(0, -2).join('/')
643
+ const rawValue = segments.at(-2) ?? ''
644
+ remove.run(docId, fieldPath, this.hash(rawValue))
645
+ }
646
+ })
647
+
648
+ transaction(keys.indexes)
649
+ }
650
+
651
+ async putDocument<T extends Record<string, any>>(collection: string, docId: _ttid, doc: T) {
652
+ const owner = Bun.randomUUIDv7()
653
+ if (!(await this.locks.acquire(collection, docId, owner)))
654
+ throw new Error(`Unable to acquire filesystem lock for ${docId}`)
655
+
656
+ try {
657
+ await this.writeStoredDoc(collection, docId, doc)
658
+ await this.rebuildIndexes(collection, docId, doc)
659
+ await this.events.publish(collection, {
660
+ ts: Date.now(),
661
+ action: 'insert',
662
+ id: docId,
663
+ doc
664
+ })
665
+ } finally {
666
+ await this.locks.release(collection, docId, owner)
667
+ }
668
+ }
669
+
670
+ async patchDocument<T extends Record<string, any>>(
671
+ collection: string,
672
+ oldId: _ttid,
673
+ newId: _ttid,
674
+ patch: Partial<T>,
675
+ oldDoc?: T
676
+ ) {
677
+ const owner = Bun.randomUUIDv7()
678
+ if (!(await this.locks.acquire(collection, oldId, owner)))
679
+ throw new Error(`Unable to acquire filesystem lock for ${oldId}`)
680
+
681
+ try {
682
+ const existing = oldDoc ?? (await this.readStoredDoc<T>(collection, oldId))?.data
683
+ if (!existing) return oldId
684
+
685
+ const nextDoc = { ...existing, ...patch } as T
686
+ await this.removeIndexes(collection, oldId, existing)
687
+ await this.removeStoredDoc(collection, oldId)
688
+ await this.events.publish(collection, {
689
+ ts: Date.now(),
690
+ action: 'delete',
691
+ id: oldId,
692
+ doc: existing
693
+ })
694
+ await this.writeStoredDoc(collection, newId, nextDoc)
695
+ await this.rebuildIndexes(collection, newId, nextDoc)
696
+ await this.events.publish(collection, {
697
+ ts: Date.now(),
698
+ action: 'insert',
699
+ id: newId,
700
+ doc: nextDoc
701
+ })
702
+ return newId
703
+ } finally {
704
+ await this.locks.release(collection, oldId, owner)
705
+ }
706
+ }
707
+
708
+ async deleteDocument<T extends Record<string, any>>(collection: string, docId: _ttid) {
709
+ const owner = Bun.randomUUIDv7()
710
+ if (!(await this.locks.acquire(collection, docId, owner)))
711
+ throw new Error(`Unable to acquire filesystem lock for ${docId}`)
712
+
713
+ try {
714
+ const existing = await this.readStoredDoc<T>(collection, docId)
715
+ if (!existing) return
716
+ await this.removeIndexes(collection, docId, existing.data)
717
+ await this.removeStoredDoc(collection, docId)
718
+ await this.events.publish(collection, {
719
+ ts: Date.now(),
720
+ action: 'delete',
721
+ id: docId,
722
+ doc: existing.data
723
+ })
724
+ } finally {
725
+ await this.locks.release(collection, docId, owner)
726
+ }
727
+ }
728
+
729
+ getDoc<T extends Record<string, any>>(
730
+ collection: string,
731
+ docId: _ttid,
732
+ onlyId: boolean = false
733
+ ) {
734
+ const engine = this
735
+
736
+ return {
737
+ async *[Symbol.asyncIterator]() {
738
+ const doc = await this.once()
739
+ if (Object.keys(doc).length > 0) yield onlyId ? Object.keys(doc).shift()! : doc
740
+
741
+ for await (const event of engine.events.listen(collection)) {
742
+ if (event.action !== 'insert' || event.id !== docId || !event.doc) continue
743
+ yield onlyId ? event.id : ({ [event.id]: event.doc } as FyloRecord<T>)
744
+ }
745
+ },
746
+ async once() {
747
+ const stored = await engine.readStoredDoc<T>(collection, docId)
748
+ return stored ? ({ [docId]: stored.data } as FyloRecord<T>) : {}
749
+ },
750
+ async *onDelete() {
751
+ for await (const event of engine.events.listen(collection)) {
752
+ if (event.action === 'delete' && event.id === docId) yield event.id
753
+ }
754
+ }
755
+ }
756
+ }
757
+
758
+ findDocs<T extends Record<string, any>>(collection: string, query?: _storeQuery<T>) {
759
+ const engine = this
760
+
761
+ const collectDocs = async function* () {
762
+ const docs = await engine.docResults(collection, query)
763
+ for (const doc of docs) {
764
+ const result = engine.processDoc(doc, query)
765
+ if (result !== undefined) yield result
766
+ }
767
+ }
768
+
769
+ return {
770
+ async *[Symbol.asyncIterator]() {
771
+ for await (const result of collectDocs()) yield result
772
+
773
+ for await (const event of engine.events.listen(collection)) {
774
+ if (event.action !== 'insert' || !event.doc) continue
775
+ if (!engine.matchesQuery(event.id, event.doc as T, query)) continue
776
+ const processed = engine.processDoc(
777
+ { [event.id]: event.doc as T } as FyloRecord<T>,
778
+ query
779
+ )
780
+ if (processed !== undefined) yield processed
781
+ }
782
+ },
783
+ async *collect() {
784
+ for await (const result of collectDocs()) yield result
785
+ },
786
+ async *onDelete() {
787
+ for await (const event of engine.events.listen(collection)) {
788
+ if (event.action !== 'delete' || !event.doc) continue
789
+ if (!engine.matchesQuery(event.id, event.doc as T, query)) continue
790
+ yield event.id
791
+ }
792
+ }
793
+ }
794
+ }
795
+
796
+ async *exportBulkData<T extends Record<string, any>>(collection: string) {
797
+ const ids = await this.listDocIds(collection)
798
+ for (const id of ids) {
799
+ const stored = await this.readStoredDoc<T>(collection, id)
800
+ if (stored) yield stored.data
801
+ }
802
+ }
803
+
804
+ async joinDocs<T extends Record<string, any>, U extends Record<string, any>>(
805
+ join: _join<T, U>
806
+ ) {
807
+ const leftDocs = await this.docResults<T>(join.$leftCollection)
808
+ const rightDocs = await this.docResults<U>(join.$rightCollection)
809
+ const docs: Record<`${_ttid}, ${_ttid}`, T | U | (T & U) | (Partial<T> & Partial<U>)> = {}
810
+
811
+ const compareMap = {
812
+ $eq: (leftVal: any, rightVal: any) => leftVal === rightVal,
813
+ $ne: (leftVal: any, rightVal: any) => leftVal !== rightVal,
814
+ $gt: (leftVal: any, rightVal: any) => Number(leftVal) > Number(rightVal),
815
+ $lt: (leftVal: any, rightVal: any) => Number(leftVal) < Number(rightVal),
816
+ $gte: (leftVal: any, rightVal: any) => Number(leftVal) >= Number(rightVal),
817
+ $lte: (leftVal: any, rightVal: any) => Number(leftVal) <= Number(rightVal)
818
+ } as const
819
+
820
+ for (const leftEntry of leftDocs) {
821
+ const [leftId, leftData] = Object.entries(leftEntry)[0] as [_ttid, T]
822
+ for (const rightEntry of rightDocs) {
823
+ const [rightId, rightData] = Object.entries(rightEntry)[0] as [_ttid, U]
824
+
825
+ let matched = false
826
+
827
+ for (const field in join.$on) {
828
+ const operand = join.$on[field as keyof T]!
829
+ for (const opKey of Object.keys(compareMap) as Array<keyof typeof compareMap>) {
830
+ const rightField = operand[opKey]
831
+ if (!rightField) continue
832
+ const leftValue = this.getValueByPath(
833
+ leftData as Record<string, any>,
834
+ String(field)
835
+ )
836
+ const rightValue = this.getValueByPath(
837
+ rightData as Record<string, any>,
838
+ String(rightField)
839
+ )
840
+ if (compareMap[opKey](leftValue, rightValue)) matched = true
841
+ }
842
+ }
843
+
844
+ if (!matched) continue
845
+
846
+ switch (join.$mode) {
847
+ case 'inner':
848
+ docs[`${leftId}, ${rightId}`] = { ...leftData, ...rightData } as T & U
849
+ break
850
+ case 'left':
851
+ docs[`${leftId}, ${rightId}`] = leftData
852
+ break
853
+ case 'right':
854
+ docs[`${leftId}, ${rightId}`] = rightData
855
+ break
856
+ case 'outer':
857
+ docs[`${leftId}, ${rightId}`] = { ...leftData, ...rightData } as T & U
858
+ break
859
+ }
860
+
861
+ if (join.$limit && Object.keys(docs).length >= join.$limit) break
862
+ }
863
+
864
+ if (join.$limit && Object.keys(docs).length >= join.$limit) break
865
+ }
866
+
867
+ if (join.$groupby) {
868
+ const groupedDocs: Record<string, Record<string, Partial<T | U>>> = {}
869
+ for (const ids in docs) {
870
+ const data = docs[ids as `${_ttid}, ${_ttid}`] as Record<string, any>
871
+ const key = String(data[join.$groupby as string])
872
+ if (!groupedDocs[key]) groupedDocs[key] = {}
873
+ groupedDocs[key][ids] = data as Partial<T | U>
874
+ }
875
+ if (join.$onlyIds) {
876
+ const groupedIds: Record<string, _ttid[]> = {}
877
+ for (const key in groupedDocs)
878
+ groupedIds[key] = Object.keys(groupedDocs[key]).flat() as _ttid[]
879
+ return groupedIds
880
+ }
881
+ return groupedDocs
882
+ }
883
+
884
+ if (join.$onlyIds) return Array.from(new Set(Object.keys(docs).flat())) as _ttid[]
885
+
886
+ return docs
887
+ }
888
+ }