@delma/fylo 2.0.0 → 2.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (43) hide show
  1. package/README.md +185 -267
  2. package/package.json +2 -5
  3. package/src/core/directory.ts +22 -354
  4. package/src/engines/s3-files/documents.ts +65 -0
  5. package/src/engines/s3-files/filesystem.ts +172 -0
  6. package/src/engines/s3-files/query.ts +291 -0
  7. package/src/engines/s3-files/types.ts +42 -0
  8. package/src/engines/s3-files.ts +391 -510
  9. package/src/engines/types.ts +1 -1
  10. package/src/index.ts +142 -1237
  11. package/src/sync.ts +58 -0
  12. package/src/types/fylo.d.ts +66 -161
  13. package/src/types/node-runtime.d.ts +1 -0
  14. package/tests/collection/truncate.test.js +11 -10
  15. package/tests/helpers/root.js +7 -0
  16. package/tests/integration/create.test.js +9 -9
  17. package/tests/integration/delete.test.js +16 -14
  18. package/tests/integration/edge-cases.test.js +29 -25
  19. package/tests/integration/encryption.test.js +47 -30
  20. package/tests/integration/export.test.js +11 -11
  21. package/tests/integration/join-modes.test.js +16 -16
  22. package/tests/integration/nested.test.js +26 -24
  23. package/tests/integration/operators.test.js +43 -29
  24. package/tests/integration/read.test.js +25 -21
  25. package/tests/integration/rollback.test.js +21 -51
  26. package/tests/integration/s3-files.performance.test.js +75 -0
  27. package/tests/integration/s3-files.test.js +115 -18
  28. package/tests/integration/sync.test.js +154 -0
  29. package/tests/integration/update.test.js +24 -18
  30. package/src/adapters/redis.ts +0 -487
  31. package/src/adapters/s3.ts +0 -61
  32. package/src/core/walker.ts +0 -174
  33. package/src/core/write-queue.ts +0 -59
  34. package/src/migrate-cli.ts +0 -22
  35. package/src/migrate.ts +0 -74
  36. package/src/types/write-queue.ts +0 -42
  37. package/src/worker.ts +0 -18
  38. package/src/workers/write-worker.ts +0 -120
  39. package/tests/index.js +0 -14
  40. package/tests/integration/migration.test.js +0 -38
  41. package/tests/integration/queue.test.js +0 -83
  42. package/tests/mocks/redis.js +0 -123
  43. package/tests/mocks/s3.js +0 -80
@@ -1,274 +1,256 @@
1
- import { mkdir, readFile, readdir, rm, stat, writeFile, open } from 'node:fs/promises'
1
+ import { rename, writeFile } from 'node:fs/promises'
2
2
  import path from 'node:path'
3
3
  import { createHash } from 'node:crypto'
4
- import { Database } from 'bun:sqlite'
5
4
  import TTID from '@delma/ttid'
6
5
  import { Dir } from '../core/directory'
7
6
  import { validateCollectionName } from '../core/collection'
8
7
  import { Cipher } from '../adapters/cipher'
8
+ import {
9
+ FyloSyncError,
10
+ resolveSyncMode,
11
+ type FyloDeleteSyncEvent,
12
+ type FyloSyncHooks,
13
+ type FyloSyncMode,
14
+ type FyloWriteSyncEvent
15
+ } from '../sync'
9
16
  import type { EventBus, FyloStorageEngineKind, LockManager, StorageEngine } from './types'
17
+ import {
18
+ type CollectionIndexCache,
19
+ type FyloRecord,
20
+ type StoredCollectionIndex,
21
+ type StoredIndexEntry
22
+ } from './s3-files/types'
23
+ import { FilesystemEventBus, FilesystemLockManager, FilesystemStorage } from './s3-files/filesystem'
24
+ import { S3FilesDocuments } from './s3-files/documents'
25
+ import { S3FilesQueryEngine } from './s3-files/query'
10
26
 
11
- type FyloRecord<T extends Record<string, any>> = Record<_ttid, T>
12
-
13
- type S3FilesQueryResult<T extends Record<string, any>> =
14
- | _ttid
15
- | FyloRecord<T>
16
- | Record<string, _ttid[]>
17
- | Record<string, Record<_ttid, Partial<T>>>
18
- | Record<_ttid, Partial<T>>
19
-
20
- type S3FilesEvent<T extends Record<string, any>> = {
21
- ts: number
22
- action: 'insert' | 'delete'
23
- id: _ttid
24
- doc?: T
25
- }
27
+ export class S3FilesEngine {
28
+ readonly kind: FyloStorageEngineKind = 's3-files'
26
29
 
27
- type StoredDoc<T extends Record<string, any>> = {
28
- id: _ttid
29
- createdAt: number
30
- updatedAt: number
31
- data: T
32
- }
30
+ private readonly indexes = new Map<string, CollectionIndexCache>()
31
+ private readonly writeLanes = new Map<string, Promise<void>>()
33
32
 
34
- class FilesystemStorage implements StorageEngine {
35
- async read(target: string): Promise<string> {
36
- return await readFile(target, 'utf8')
37
- }
33
+ private readonly storage: StorageEngine
34
+ private readonly locks: LockManager
35
+ private readonly events: EventBus<Record<string, any>>
36
+ private readonly documents: S3FilesDocuments
37
+ private readonly queryEngine: S3FilesQueryEngine
38
+ private readonly sync?: FyloSyncHooks
39
+ private readonly syncMode: FyloSyncMode
38
40
 
39
- async write(target: string, data: string): Promise<void> {
40
- await mkdir(path.dirname(target), { recursive: true })
41
- await writeFile(target, data, 'utf8')
41
+ constructor(
42
+ readonly root: string = process.env.FYLO_ROOT ??
43
+ process.env.FYLO_S3FILES_ROOT ??
44
+ path.join(process.cwd(), '.fylo-data'),
45
+ options: {
46
+ sync?: FyloSyncHooks
47
+ syncMode?: FyloSyncMode
48
+ } = {}
49
+ ) {
50
+ this.sync = options.sync
51
+ this.syncMode = resolveSyncMode(options.syncMode)
52
+ this.storage = new FilesystemStorage()
53
+ this.locks = new FilesystemLockManager(this.root, this.storage)
54
+ this.events = new FilesystemEventBus<Record<string, any>>(this.root, this.storage)
55
+ this.documents = new S3FilesDocuments(
56
+ this.storage,
57
+ this.docsRoot.bind(this),
58
+ this.docPath.bind(this),
59
+ this.ensureCollection.bind(this),
60
+ this.encodeEncrypted.bind(this),
61
+ this.decodeEncrypted.bind(this)
62
+ )
63
+ this.queryEngine = new S3FilesQueryEngine({
64
+ loadIndexCache: this.loadIndexCache.bind(this),
65
+ normalizeIndexValue: this.normalizeIndexValue.bind(this)
66
+ })
42
67
  }
43
68
 
44
- async delete(target: string): Promise<void> {
45
- await rm(target, { recursive: true, force: true })
69
+ private collectionRoot(collection: string) {
70
+ validateCollectionName(collection)
71
+ return path.join(this.root, collection)
46
72
  }
47
73
 
48
- async list(target: string): Promise<string[]> {
49
- const results: string[] = []
50
-
51
- try {
52
- const entries = await readdir(target, { withFileTypes: true })
53
- for (const entry of entries) {
54
- const child = path.join(target, entry.name)
55
- if (entry.isDirectory()) {
56
- results.push(...(await this.list(child)))
57
- } else {
58
- results.push(child)
59
- }
60
- }
61
- } catch (err) {
62
- if ((err as NodeJS.ErrnoException).code !== 'ENOENT') throw err
63
- }
64
-
65
- return results
74
+ private docsRoot(collection: string) {
75
+ return path.join(this.collectionRoot(collection), '.fylo', 'docs')
66
76
  }
67
77
 
68
- async mkdir(target: string): Promise<void> {
69
- await mkdir(target, { recursive: true })
78
+ private metaRoot(collection: string) {
79
+ return path.join(this.collectionRoot(collection), '.fylo')
70
80
  }
71
81
 
72
- async rmdir(target: string): Promise<void> {
73
- await rm(target, { recursive: true, force: true })
82
+ private indexesRoot(collection: string) {
83
+ return path.join(this.metaRoot(collection), 'indexes')
74
84
  }
75
85
 
76
- async exists(target: string): Promise<boolean> {
77
- try {
78
- await stat(target)
79
- return true
80
- } catch (err) {
81
- if ((err as NodeJS.ErrnoException).code === 'ENOENT') return false
82
- throw err
83
- }
86
+ private indexFilePath(collection: string) {
87
+ return path.join(this.indexesRoot(collection), `${collection}.idx.json`)
84
88
  }
85
- }
86
89
 
87
- class FilesystemLockManager implements LockManager {
88
- constructor(
89
- private readonly root: string,
90
- private readonly storage: StorageEngine
91
- ) {}
92
-
93
- private lockDir(collection: string, docId: _ttid) {
94
- return path.join(this.root, collection, '.fylo', 'locks', `${docId}.lock`)
90
+ private docPath(collection: string, docId: _ttid) {
91
+ return path.join(this.docsRoot(collection), docId.slice(0, 2), `${docId}.json`)
95
92
  }
96
93
 
97
- async acquire(
94
+ private async runSyncTask(
98
95
  collection: string,
99
96
  docId: _ttid,
100
- owner: string,
101
- ttlMs: number = 30_000
102
- ): Promise<boolean> {
103
- const dir = this.lockDir(collection, docId)
104
- const metaPath = path.join(dir, 'meta.json')
105
- await mkdir(path.dirname(dir), { recursive: true })
106
-
107
- try {
108
- await mkdir(dir, { recursive: false })
109
- await this.storage.write(metaPath, JSON.stringify({ owner, ts: Date.now() }))
110
- return true
111
- } catch (err) {
112
- if ((err as NodeJS.ErrnoException).code !== 'EEXIST') throw err
97
+ operation: string,
98
+ targetPath: string,
99
+ task: () => Promise<void>
100
+ ) {
101
+ if (!this.sync?.onWrite && !this.sync?.onDelete) return
102
+
103
+ if (this.syncMode === 'fire-and-forget') {
104
+ void task().catch((cause) => {
105
+ console.error(
106
+ new FyloSyncError({
107
+ collection,
108
+ docId,
109
+ operation,
110
+ path: targetPath,
111
+ cause
112
+ })
113
+ )
114
+ })
115
+ return
113
116
  }
114
117
 
115
118
  try {
116
- const meta = JSON.parse(await this.storage.read(metaPath)) as { ts?: number }
117
- if (meta.ts && Date.now() - meta.ts > ttlMs) {
118
- await this.storage.rmdir(dir)
119
- await mkdir(dir, { recursive: false })
120
- await this.storage.write(metaPath, JSON.stringify({ owner, ts: Date.now() }))
121
- return true
122
- }
123
- } catch {
124
- await this.storage.rmdir(dir)
125
- await mkdir(dir, { recursive: false })
126
- await this.storage.write(metaPath, JSON.stringify({ owner, ts: Date.now() }))
127
- return true
119
+ await task()
120
+ } catch (cause) {
121
+ throw new FyloSyncError({
122
+ collection,
123
+ docId,
124
+ operation,
125
+ path: targetPath,
126
+ cause
127
+ })
128
128
  }
129
-
130
- return false
131
129
  }
132
130
 
133
- async release(collection: string, docId: _ttid, owner: string): Promise<void> {
134
- const dir = this.lockDir(collection, docId)
135
- const metaPath = path.join(dir, 'meta.json')
136
-
137
- try {
138
- const meta = JSON.parse(await this.storage.read(metaPath)) as { owner?: string }
139
- if (meta.owner === owner) await this.storage.rmdir(dir)
140
- } catch (err) {
141
- if ((err as NodeJS.ErrnoException).code !== 'ENOENT') throw err
142
- }
131
+ private async syncWrite<T extends Record<string, any>>(event: FyloWriteSyncEvent<T>) {
132
+ if (!this.sync?.onWrite) return
133
+ await this.sync.onWrite(event)
143
134
  }
144
- }
145
135
 
146
- class FilesystemEventBus<T extends Record<string, any>> implements EventBus<S3FilesEvent<T>> {
147
- constructor(
148
- private readonly root: string,
149
- private readonly storage: StorageEngine
150
- ) {}
151
-
152
- private journalPath(collection: string) {
153
- return path.join(this.root, collection, '.fylo', 'events', `${collection}.ndjson`)
136
+ private async syncDelete(event: FyloDeleteSyncEvent) {
137
+ if (!this.sync?.onDelete) return
138
+ await this.sync.onDelete(event)
154
139
  }
155
140
 
156
- async publish(collection: string, event: S3FilesEvent<T>): Promise<void> {
157
- const target = this.journalPath(collection)
158
- await mkdir(path.dirname(target), { recursive: true })
159
- const line = `${JSON.stringify(event)}\n`
160
- const handle = await open(target, 'a')
161
- try {
162
- await handle.write(line)
163
- } finally {
164
- await handle.close()
165
- }
141
+ private hash(value: string) {
142
+ return createHash('sha256').update(value).digest('hex')
166
143
  }
167
144
 
168
- async *listen(collection: string): AsyncGenerator<S3FilesEvent<T>, void, unknown> {
169
- const target = this.journalPath(collection)
170
- let position = 0
171
-
172
- while (true) {
173
- try {
174
- const fileStat = await stat(target)
175
- if (fileStat.size > position) {
176
- const handle = await open(target, 'r')
177
- try {
178
- const size = fileStat.size - position
179
- const buffer = Buffer.alloc(size)
180
- await handle.read(buffer, 0, size, position)
181
- position = fileStat.size
182
-
183
- for (const line of buffer.toString('utf8').split('\n')) {
184
- if (line.trim().length === 0) continue
185
- yield JSON.parse(line) as S3FilesEvent<T>
186
- }
187
- } finally {
188
- await handle.close()
189
- }
190
- }
191
- } catch (err) {
192
- if ((err as NodeJS.ErrnoException).code !== 'ENOENT') throw err
193
- }
194
-
195
- await Bun.sleep(100)
145
+ private createEmptyIndexCache(): CollectionIndexCache {
146
+ return {
147
+ docs: new Map(),
148
+ fieldHash: new Map(),
149
+ fieldNumeric: new Map(),
150
+ fieldString: new Map()
196
151
  }
197
152
  }
198
- }
199
153
 
200
- export class S3FilesEngine {
201
- readonly kind: FyloStorageEngineKind = 's3-files'
154
+ private addEntryToCache(cache: CollectionIndexCache, docId: _ttid, entry: StoredIndexEntry) {
155
+ let valueHashBucket = cache.fieldHash.get(entry.fieldPath)
156
+ if (!valueHashBucket) {
157
+ valueHashBucket = new Map()
158
+ cache.fieldHash.set(entry.fieldPath, valueHashBucket)
159
+ }
202
160
 
203
- private readonly databases = new Map<string, Database>()
161
+ let docsForValue = valueHashBucket.get(entry.valueHash)
162
+ if (!docsForValue) {
163
+ docsForValue = new Set()
164
+ valueHashBucket.set(entry.valueHash, docsForValue)
165
+ }
166
+ docsForValue.add(docId)
204
167
 
205
- private readonly storage: StorageEngine
206
- private readonly locks: LockManager
207
- private readonly events: EventBus<Record<string, any>>
168
+ if (entry.numericValue !== null) {
169
+ const numericEntries = cache.fieldNumeric.get(entry.fieldPath) ?? []
170
+ numericEntries.push({ docId, numericValue: entry.numericValue })
171
+ cache.fieldNumeric.set(entry.fieldPath, numericEntries)
172
+ }
208
173
 
209
- constructor(readonly root: string = process.env.FYLO_S3FILES_ROOT ?? '/mnt/fylo') {
210
- this.storage = new FilesystemStorage()
211
- this.locks = new FilesystemLockManager(this.root, this.storage)
212
- this.events = new FilesystemEventBus<Record<string, any>>(this.root, this.storage)
174
+ if (entry.valueType === 'string') {
175
+ const stringEntries = cache.fieldString.get(entry.fieldPath) ?? []
176
+ stringEntries.push({ docId, rawValue: entry.rawValue })
177
+ cache.fieldString.set(entry.fieldPath, stringEntries)
178
+ }
213
179
  }
214
180
 
215
- private collectionRoot(collection: string) {
216
- validateCollectionName(collection)
217
- return path.join(this.root, collection)
218
- }
181
+ private deleteEntryFromCache(
182
+ cache: CollectionIndexCache,
183
+ docId: _ttid,
184
+ entry: StoredIndexEntry
185
+ ) {
186
+ const valueHashBucket = cache.fieldHash.get(entry.fieldPath)
187
+ const docsForValue = valueHashBucket?.get(entry.valueHash)
188
+ docsForValue?.delete(docId)
189
+ if (docsForValue?.size === 0) valueHashBucket?.delete(entry.valueHash)
190
+ if (valueHashBucket?.size === 0) cache.fieldHash.delete(entry.fieldPath)
191
+
192
+ if (entry.numericValue !== null) {
193
+ const numericEntries = cache.fieldNumeric
194
+ .get(entry.fieldPath)
195
+ ?.filter(
196
+ (candidate) =>
197
+ !(
198
+ candidate.docId === docId &&
199
+ candidate.numericValue === entry.numericValue
200
+ )
201
+ )
202
+ if (!numericEntries?.length) cache.fieldNumeric.delete(entry.fieldPath)
203
+ else cache.fieldNumeric.set(entry.fieldPath, numericEntries)
204
+ }
219
205
 
220
- private docsRoot(collection: string) {
221
- return path.join(this.collectionRoot(collection), '.fylo', 'docs')
206
+ if (entry.valueType === 'string') {
207
+ const stringEntries = cache.fieldString
208
+ .get(entry.fieldPath)
209
+ ?.filter(
210
+ (candidate) =>
211
+ !(candidate.docId === docId && candidate.rawValue === entry.rawValue)
212
+ )
213
+ if (!stringEntries?.length) cache.fieldString.delete(entry.fieldPath)
214
+ else cache.fieldString.set(entry.fieldPath, stringEntries)
215
+ }
222
216
  }
223
217
 
224
- private metaRoot(collection: string) {
225
- return path.join(this.collectionRoot(collection), '.fylo')
226
- }
218
+ private async writeIndexFile(collection: string, cache: CollectionIndexCache) {
219
+ await this.storage.mkdir(this.indexesRoot(collection))
220
+ const target = this.indexFilePath(collection)
221
+ const temp = `${target}.tmp`
227
222
 
228
- private indexDbPath(collection: string) {
229
- return path.join(this.metaRoot(collection), 'index.db')
230
- }
223
+ const payload: StoredCollectionIndex = {
224
+ version: 1,
225
+ docs: Object.fromEntries(cache.docs)
226
+ }
231
227
 
232
- private docPath(collection: string, docId: _ttid) {
233
- return path.join(this.docsRoot(collection), docId.slice(0, 2), `${docId}.json`)
228
+ await writeFile(temp, JSON.stringify(payload), 'utf8')
229
+ await rename(temp, target)
234
230
  }
235
231
 
236
- private hash(value: string) {
237
- return createHash('sha256').update(value).digest('hex')
238
- }
232
+ private async loadIndexCache(collection: string) {
233
+ const cache = this.createEmptyIndexCache()
239
234
 
240
- private database(collection: string) {
241
- const existing = this.databases.get(collection)
242
- if (existing) return existing
243
-
244
- const db = new Database(this.indexDbPath(collection))
245
- db.exec(`
246
- CREATE TABLE IF NOT EXISTS doc_index_entries (
247
- doc_id TEXT NOT NULL,
248
- field_path TEXT NOT NULL,
249
- value_hash TEXT NOT NULL,
250
- raw_value TEXT NOT NULL,
251
- value_type TEXT NOT NULL,
252
- numeric_value REAL,
253
- PRIMARY KEY (doc_id, field_path, value_hash)
254
- );
255
-
256
- CREATE INDEX IF NOT EXISTS idx_doc_index_entries_field_hash
257
- ON doc_index_entries (field_path, value_hash);
258
-
259
- CREATE INDEX IF NOT EXISTS idx_doc_index_entries_field_numeric
260
- ON doc_index_entries (field_path, numeric_value);
261
- `)
262
- this.databases.set(collection, db)
263
- return db
264
- }
265
-
266
- private closeDatabase(collection: string) {
267
- const db = this.databases.get(collection)
268
- if (db) {
269
- db.close()
270
- this.databases.delete(collection)
235
+ try {
236
+ const raw = JSON.parse(await this.storage.read(this.indexFilePath(collection))) as
237
+ | StoredCollectionIndex
238
+ | undefined
239
+
240
+ if (raw?.version === 1 && raw.docs) {
241
+ for (const [docId, entries] of Object.entries(raw.docs) as Array<
242
+ [_ttid, StoredIndexEntry[]]
243
+ >) {
244
+ cache.docs.set(docId, entries)
245
+ for (const entry of entries) this.addEntryToCache(cache, docId, entry)
246
+ }
247
+ }
248
+ } catch (err) {
249
+ if ((err as NodeJS.ErrnoException).code !== 'ENOENT') throw err
271
250
  }
251
+
252
+ this.indexes.set(collection, cache)
253
+ return cache
272
254
  }
273
255
 
274
256
  private normalizeIndexValue(rawValue: string) {
@@ -286,7 +268,30 @@ export class S3FilesEngine {
286
268
  await this.storage.mkdir(this.collectionRoot(collection))
287
269
  await this.storage.mkdir(this.metaRoot(collection))
288
270
  await this.storage.mkdir(this.docsRoot(collection))
289
- this.database(collection)
271
+ await this.storage.mkdir(this.indexesRoot(collection))
272
+ await this.loadIndexCache(collection)
273
+ }
274
+
275
+ private async withCollectionWriteLock<T>(
276
+ collection: string,
277
+ action: () => Promise<T>
278
+ ): Promise<T> {
279
+ const previous = this.writeLanes.get(collection) ?? Promise.resolve()
280
+ let release!: () => void
281
+ const current = new Promise<void>((resolve) => {
282
+ release = resolve
283
+ })
284
+ const lane = previous.then(() => current)
285
+ this.writeLanes.set(collection, lane)
286
+
287
+ await previous
288
+
289
+ try {
290
+ return await action()
291
+ } finally {
292
+ release()
293
+ if (this.writeLanes.get(collection) === lane) this.writeLanes.delete(collection)
294
+ }
290
295
  }
291
296
 
292
297
  async createCollection(collection: string) {
@@ -294,7 +299,7 @@ export class S3FilesEngine {
294
299
  }
295
300
 
296
301
  async dropCollection(collection: string) {
297
- this.closeDatabase(collection)
302
+ this.indexes.delete(collection)
298
303
  await this.storage.rmdir(this.collectionRoot(collection))
299
304
  }
300
305
 
@@ -392,198 +397,21 @@ export class S3FilesEngine {
392
397
  return value
393
398
  }
394
399
 
395
- private async readStoredDoc<T extends Record<string, any>>(
396
- collection: string,
397
- docId: _ttid
398
- ): Promise<StoredDoc<T> | null> {
399
- const target = this.docPath(collection, docId)
400
-
401
- try {
402
- const raw = JSON.parse(await this.storage.read(target)) as StoredDoc<T>
403
- raw.data = await this.decodeEncrypted(collection, raw.data)
404
- return raw
405
- } catch (err) {
406
- if ((err as NodeJS.ErrnoException).code === 'ENOENT') return null
407
- throw err
408
- }
409
- }
410
-
411
- private async writeStoredDoc<T extends Record<string, any>>(
412
- collection: string,
413
- docId: _ttid,
414
- data: T
415
- ) {
416
- await this.ensureCollection(collection)
417
- const encoded = await this.encodeEncrypted(collection, data)
418
- const { createdAt, updatedAt } = TTID.decodeTime(docId)
419
- const target = this.docPath(collection, docId)
420
- const record: StoredDoc<T> = {
421
- id: docId,
422
- createdAt,
423
- updatedAt: updatedAt ?? createdAt,
424
- data: encoded
425
- }
426
- await this.storage.write(target, JSON.stringify(record))
427
- }
428
-
429
- private async removeStoredDoc(collection: string, docId: _ttid) {
430
- await this.storage.delete(this.docPath(collection, docId))
431
- }
432
-
433
- private async listDocIds(collection: string) {
434
- const files = await this.storage.list(this.docsRoot(collection))
435
- return files
436
- .filter((file) => file.endsWith('.json'))
437
- .map((file) => path.basename(file, '.json'))
438
- .filter((key) => TTID.isTTID(key)) as _ttid[]
439
- }
440
-
441
- private getValueByPath(target: Record<string, any>, fieldPath: string) {
442
- return fieldPath
443
- .split('.')
444
- .reduce<any>(
445
- (acc, key) => (acc === undefined || acc === null ? undefined : acc[key]),
446
- target
447
- )
448
- }
449
-
450
- private matchesTimestamp(docId: _ttid, query?: _storeQuery<Record<string, any>>) {
451
- if (!query?.$created && !query?.$updated) return true
452
- const { createdAt, updatedAt } = TTID.decodeTime(docId)
453
- const timestamps = { createdAt, updatedAt: updatedAt ?? createdAt }
454
-
455
- const match = (value: number, range?: _timestamp) => {
456
- if (!range) return true
457
- if (range.$gt !== undefined && !(value > range.$gt)) return false
458
- if (range.$gte !== undefined && !(value >= range.$gte)) return false
459
- if (range.$lt !== undefined && !(value < range.$lt)) return false
460
- if (range.$lte !== undefined && !(value <= range.$lte)) return false
461
- return true
462
- }
463
-
464
- return (
465
- match(timestamps.createdAt, query.$created) &&
466
- match(timestamps.updatedAt, query.$updated)
467
- )
468
- }
469
-
470
- private likeToRegex(pattern: string) {
471
- const escaped = pattern.replace(/[.*+?^${}()|[\]\\]/g, '\\$&').replaceAll('%', '.*')
472
- return new RegExp(`^${escaped}$`)
473
- }
474
-
475
- private matchesOperand(value: unknown, operand: _operand) {
476
- if (operand.$eq !== undefined && value != operand.$eq) return false
477
- if (operand.$ne !== undefined && value == operand.$ne) return false
478
- if (operand.$gt !== undefined && !(Number(value) > operand.$gt)) return false
479
- if (operand.$gte !== undefined && !(Number(value) >= operand.$gte)) return false
480
- if (operand.$lt !== undefined && !(Number(value) < operand.$lt)) return false
481
- if (operand.$lte !== undefined && !(Number(value) <= operand.$lte)) return false
482
- if (
483
- operand.$like !== undefined &&
484
- (typeof value !== 'string' || !this.likeToRegex(operand.$like).test(value))
485
- )
486
- return false
487
- if (operand.$contains !== undefined) {
488
- if (!Array.isArray(value) || !value.some((item) => item == operand.$contains))
489
- return false
490
- }
491
- return true
492
- }
493
-
494
- private matchesQuery<T extends Record<string, any>>(
495
- docId: _ttid,
496
- doc: T,
497
- query?: _storeQuery<T>
498
- ) {
499
- if (!this.matchesTimestamp(docId, query as _storeQuery<Record<string, any>> | undefined))
500
- return false
501
- if (!query?.$ops || query.$ops.length === 0) return true
502
-
503
- return query.$ops.some((operation) => {
504
- for (const field in operation) {
505
- const value = this.getValueByPath(doc, field)
506
- if (!this.matchesOperand(value, operation[field as keyof T]!)) return false
507
- }
508
- return true
509
- })
510
- }
511
-
512
- private selectValues<T extends Record<string, any>>(selection: Array<keyof T>, data: T) {
513
- const copy = { ...data }
514
- for (const field in copy) {
515
- if (!selection.includes(field as keyof T)) delete copy[field]
516
- }
517
- return copy
518
- }
519
-
520
- private renameFields<T extends Record<string, any>>(
521
- rename: Record<keyof Partial<T>, string>,
522
- data: T
523
- ) {
524
- const copy = { ...data }
525
- for (const field in copy) {
526
- if (rename[field]) {
527
- copy[rename[field]] = copy[field]
528
- delete copy[field]
529
- }
530
- }
531
- return copy
532
- }
533
-
534
- private processDoc<T extends Record<string, any>>(
535
- doc: FyloRecord<T>,
536
- query?: _storeQuery<T>
537
- ): S3FilesQueryResult<T> | undefined {
538
- if (Object.keys(doc).length === 0) return
539
-
540
- const next = { ...doc }
541
-
542
- for (let [_id, data] of Object.entries(next)) {
543
- if (query?.$select?.length)
544
- data = this.selectValues(query.$select as Array<keyof T>, data)
545
- if (query?.$rename) data = this.renameFields(query.$rename, data)
546
- next[_id as _ttid] = data as T
547
- }
548
-
549
- if (query?.$groupby) {
550
- const docGroup: Record<string, Record<string, Partial<T>>> = {}
551
- for (const [id, data] of Object.entries(next)) {
552
- const groupValue = data[query.$groupby] as string
553
- if (groupValue) {
554
- const groupData = { ...data }
555
- delete groupData[query.$groupby]
556
- docGroup[groupValue] = { [id]: groupData as Partial<T> }
557
- }
558
- }
559
-
560
- if (query.$onlyIds) {
561
- const groupedIds: Record<string, _ttid[]> = {}
562
- for (const group in docGroup)
563
- groupedIds[group] = Object.keys(docGroup[group]) as _ttid[]
564
- return groupedIds
565
- }
566
-
567
- return docGroup
568
- }
569
-
570
- if (query?.$onlyIds) return Object.keys(next).shift() as _ttid
571
-
572
- return next
573
- }
574
-
575
400
  private async docResults<T extends Record<string, any>>(
576
401
  collection: string,
577
402
  query?: _storeQuery<T>
578
403
  ) {
579
- const ids = await this.listDocIds(collection)
404
+ const candidateIds = await this.queryEngine.candidateDocIdsForQuery(collection, query)
405
+ const ids = candidateIds
406
+ ? Array.from(candidateIds)
407
+ : await this.documents.listDocIds(collection)
580
408
  const limit = query?.$limit
581
409
  const results: Array<FyloRecord<T>> = []
582
410
 
583
411
  for (const id of ids) {
584
- const stored = await this.readStoredDoc<T>(collection, id)
412
+ const stored = await this.documents.readStoredDoc<T>(collection, id)
585
413
  if (!stored) continue
586
- if (!this.matchesQuery(id, stored.data, query)) continue
414
+ if (!this.queryEngine.matchesQuery(id, stored.data, query)) continue
587
415
  results.push({ [id]: stored.data } as FyloRecord<T>)
588
416
  if (limit && results.length >= limit) break
589
417
  }
@@ -597,74 +425,74 @@ export class S3FilesEngine {
597
425
  doc: T
598
426
  ) {
599
427
  const keys = await Dir.extractKeys(collection, docId, doc)
600
- const db = this.database(collection)
601
- const insert = db.query(`
602
- INSERT OR REPLACE INTO doc_index_entries
603
- (doc_id, field_path, value_hash, raw_value, value_type, numeric_value)
604
- VALUES (?, ?, ?, ?, ?, ?)
605
- `)
606
-
607
- const transaction = db.transaction((logicalKeys: string[]) => {
608
- for (const logicalKey of logicalKeys) {
609
- const segments = logicalKey.split('/')
610
- const fieldPath = segments.slice(0, -2).join('/')
611
- const rawValue = segments.at(-2) ?? ''
612
- const normalized = this.normalizeIndexValue(rawValue)
613
- insert.run(
614
- docId,
615
- fieldPath,
616
- normalized.valueHash,
617
- normalized.rawValue,
618
- normalized.valueType,
619
- normalized.numericValue
620
- )
621
- }
428
+ const cache = await this.loadIndexCache(collection)
429
+ const entries = keys.indexes.map((logicalKey) => {
430
+ const segments = logicalKey.split('/')
431
+ const fieldPath = segments.slice(0, -2).join('/')
432
+ const rawValue = segments.at(-2) ?? ''
433
+ const normalized = this.normalizeIndexValue(rawValue)
434
+
435
+ return {
436
+ fieldPath,
437
+ rawValue: normalized.rawValue,
438
+ valueHash: normalized.valueHash,
439
+ valueType: normalized.valueType,
440
+ numericValue: normalized.numericValue
441
+ } satisfies StoredIndexEntry
622
442
  })
623
443
 
624
- transaction(keys.indexes)
444
+ const existingEntries = cache.docs.get(docId)
445
+ if (existingEntries) {
446
+ for (const entry of existingEntries) this.deleteEntryFromCache(cache, docId, entry)
447
+ }
448
+
449
+ cache.docs.set(docId, entries)
450
+ for (const entry of entries) this.addEntryToCache(cache, docId, entry)
451
+ await this.writeIndexFile(collection, cache)
625
452
  }
626
453
 
627
454
  private async removeIndexes<T extends Record<string, any>>(
628
455
  collection: string,
629
456
  docId: _ttid,
630
- doc: T
457
+ _doc: T
631
458
  ) {
632
- const keys = await Dir.extractKeys(collection, docId, doc)
633
- const db = this.database(collection)
634
- const remove = db.query(`
635
- DELETE FROM doc_index_entries
636
- WHERE doc_id = ? AND field_path = ? AND value_hash = ?
637
- `)
638
-
639
- const transaction = db.transaction((logicalKeys: string[]) => {
640
- for (const logicalKey of logicalKeys) {
641
- const segments = logicalKey.split('/')
642
- const fieldPath = segments.slice(0, -2).join('/')
643
- const rawValue = segments.at(-2) ?? ''
644
- remove.run(docId, fieldPath, this.hash(rawValue))
645
- }
646
- })
647
-
648
- transaction(keys.indexes)
459
+ const cache = await this.loadIndexCache(collection)
460
+ const existingEntries = cache.docs.get(docId) ?? []
461
+ for (const entry of existingEntries) this.deleteEntryFromCache(cache, docId, entry)
462
+ cache.docs.delete(docId)
463
+ await this.writeIndexFile(collection, cache)
649
464
  }
650
465
 
651
466
  async putDocument<T extends Record<string, any>>(collection: string, docId: _ttid, doc: T) {
652
- const owner = Bun.randomUUIDv7()
653
- if (!(await this.locks.acquire(collection, docId, owner)))
654
- throw new Error(`Unable to acquire filesystem lock for ${docId}`)
467
+ await this.withCollectionWriteLock(collection, async () => {
468
+ const owner = Bun.randomUUIDv7()
469
+ if (!(await this.locks.acquire(collection, docId, owner)))
470
+ throw new Error(`Unable to acquire filesystem lock for ${docId}`)
655
471
 
656
- try {
657
- await this.writeStoredDoc(collection, docId, doc)
658
- await this.rebuildIndexes(collection, docId, doc)
659
- await this.events.publish(collection, {
660
- ts: Date.now(),
661
- action: 'insert',
662
- id: docId,
663
- doc
664
- })
665
- } finally {
666
- await this.locks.release(collection, docId, owner)
667
- }
472
+ const targetPath = this.docPath(collection, docId)
473
+
474
+ try {
475
+ await this.documents.writeStoredDoc(collection, docId, doc)
476
+ await this.rebuildIndexes(collection, docId, doc)
477
+ await this.events.publish(collection, {
478
+ ts: Date.now(),
479
+ action: 'insert',
480
+ id: docId,
481
+ doc
482
+ })
483
+ await this.runSyncTask(collection, docId, 'put', targetPath, async () => {
484
+ await this.syncWrite({
485
+ operation: 'put',
486
+ collection,
487
+ docId,
488
+ path: targetPath,
489
+ data: doc
490
+ })
491
+ })
492
+ } finally {
493
+ await this.locks.release(collection, docId, owner)
494
+ }
495
+ })
668
496
  }
669
497
 
670
498
  async patchDocument<T extends Record<string, any>>(
@@ -674,56 +502,90 @@ export class S3FilesEngine {
674
502
  patch: Partial<T>,
675
503
  oldDoc?: T
676
504
  ) {
677
- const owner = Bun.randomUUIDv7()
678
- if (!(await this.locks.acquire(collection, oldId, owner)))
679
- throw new Error(`Unable to acquire filesystem lock for ${oldId}`)
505
+ return await this.withCollectionWriteLock(collection, async () => {
506
+ const owner = Bun.randomUUIDv7()
507
+ if (!(await this.locks.acquire(collection, oldId, owner)))
508
+ throw new Error(`Unable to acquire filesystem lock for ${oldId}`)
680
509
 
681
- try {
682
- const existing = oldDoc ?? (await this.readStoredDoc<T>(collection, oldId))?.data
683
- if (!existing) return oldId
684
-
685
- const nextDoc = { ...existing, ...patch } as T
686
- await this.removeIndexes(collection, oldId, existing)
687
- await this.removeStoredDoc(collection, oldId)
688
- await this.events.publish(collection, {
689
- ts: Date.now(),
690
- action: 'delete',
691
- id: oldId,
692
- doc: existing
693
- })
694
- await this.writeStoredDoc(collection, newId, nextDoc)
695
- await this.rebuildIndexes(collection, newId, nextDoc)
696
- await this.events.publish(collection, {
697
- ts: Date.now(),
698
- action: 'insert',
699
- id: newId,
700
- doc: nextDoc
701
- })
702
- return newId
703
- } finally {
704
- await this.locks.release(collection, oldId, owner)
705
- }
510
+ const oldPath = this.docPath(collection, oldId)
511
+
512
+ try {
513
+ const existing =
514
+ oldDoc ?? (await this.documents.readStoredDoc<T>(collection, oldId))?.data
515
+ if (!existing) return oldId
516
+
517
+ const nextDoc = { ...existing, ...patch } as T
518
+ const newPath = this.docPath(collection, newId)
519
+ await this.removeIndexes(collection, oldId, existing)
520
+ await this.documents.removeStoredDoc(collection, oldId)
521
+ await this.events.publish(collection, {
522
+ ts: Date.now(),
523
+ action: 'delete',
524
+ id: oldId,
525
+ doc: existing
526
+ })
527
+ await this.documents.writeStoredDoc(collection, newId, nextDoc)
528
+ await this.rebuildIndexes(collection, newId, nextDoc)
529
+ await this.events.publish(collection, {
530
+ ts: Date.now(),
531
+ action: 'insert',
532
+ id: newId,
533
+ doc: nextDoc
534
+ })
535
+ await this.runSyncTask(collection, newId, 'patch', newPath, async () => {
536
+ await this.syncDelete({
537
+ operation: 'patch',
538
+ collection,
539
+ docId: oldId,
540
+ path: oldPath
541
+ })
542
+ await this.syncWrite({
543
+ operation: 'patch',
544
+ collection,
545
+ docId: newId,
546
+ previousDocId: oldId,
547
+ path: newPath,
548
+ data: nextDoc
549
+ })
550
+ })
551
+ return newId
552
+ } finally {
553
+ await this.locks.release(collection, oldId, owner)
554
+ }
555
+ })
706
556
  }
707
557
 
708
558
  async deleteDocument<T extends Record<string, any>>(collection: string, docId: _ttid) {
709
- const owner = Bun.randomUUIDv7()
710
- if (!(await this.locks.acquire(collection, docId, owner)))
711
- throw new Error(`Unable to acquire filesystem lock for ${docId}`)
559
+ await this.withCollectionWriteLock(collection, async () => {
560
+ const owner = Bun.randomUUIDv7()
561
+ if (!(await this.locks.acquire(collection, docId, owner)))
562
+ throw new Error(`Unable to acquire filesystem lock for ${docId}`)
712
563
 
713
- try {
714
- const existing = await this.readStoredDoc<T>(collection, docId)
715
- if (!existing) return
716
- await this.removeIndexes(collection, docId, existing.data)
717
- await this.removeStoredDoc(collection, docId)
718
- await this.events.publish(collection, {
719
- ts: Date.now(),
720
- action: 'delete',
721
- id: docId,
722
- doc: existing.data
723
- })
724
- } finally {
725
- await this.locks.release(collection, docId, owner)
726
- }
564
+ const targetPath = this.docPath(collection, docId)
565
+
566
+ try {
567
+ const existing = await this.documents.readStoredDoc<T>(collection, docId)
568
+ if (!existing) return
569
+ await this.removeIndexes(collection, docId, existing.data)
570
+ await this.documents.removeStoredDoc(collection, docId)
571
+ await this.events.publish(collection, {
572
+ ts: Date.now(),
573
+ action: 'delete',
574
+ id: docId,
575
+ doc: existing.data
576
+ })
577
+ await this.runSyncTask(collection, docId, 'delete', targetPath, async () => {
578
+ await this.syncDelete({
579
+ operation: 'delete',
580
+ collection,
581
+ docId,
582
+ path: targetPath
583
+ })
584
+ })
585
+ } finally {
586
+ await this.locks.release(collection, docId, owner)
587
+ }
588
+ })
727
589
  }
728
590
 
729
591
  getDoc<T extends Record<string, any>>(
@@ -744,7 +606,7 @@ export class S3FilesEngine {
744
606
  }
745
607
  },
746
608
  async once() {
747
- const stored = await engine.readStoredDoc<T>(collection, docId)
609
+ const stored = await engine.documents.readStoredDoc<T>(collection, docId)
748
610
  return stored ? ({ [docId]: stored.data } as FyloRecord<T>) : {}
749
611
  },
750
612
  async *onDelete() {
@@ -761,7 +623,7 @@ export class S3FilesEngine {
761
623
  const collectDocs = async function* () {
762
624
  const docs = await engine.docResults(collection, query)
763
625
  for (const doc of docs) {
764
- const result = engine.processDoc(doc, query)
626
+ const result = engine.queryEngine.processDoc(doc, query)
765
627
  if (result !== undefined) yield result
766
628
  }
767
629
  }
@@ -772,8 +634,8 @@ export class S3FilesEngine {
772
634
 
773
635
  for await (const event of engine.events.listen(collection)) {
774
636
  if (event.action !== 'insert' || !event.doc) continue
775
- if (!engine.matchesQuery(event.id, event.doc as T, query)) continue
776
- const processed = engine.processDoc(
637
+ if (!engine.queryEngine.matchesQuery(event.id, event.doc as T, query)) continue
638
+ const processed = engine.queryEngine.processDoc(
777
639
  { [event.id]: event.doc as T } as FyloRecord<T>,
778
640
  query
779
641
  )
@@ -786,7 +648,7 @@ export class S3FilesEngine {
786
648
  async *onDelete() {
787
649
  for await (const event of engine.events.listen(collection)) {
788
650
  if (event.action !== 'delete' || !event.doc) continue
789
- if (!engine.matchesQuery(event.id, event.doc as T, query)) continue
651
+ if (!engine.queryEngine.matchesQuery(event.id, event.doc as T, query)) continue
790
652
  yield event.id
791
653
  }
792
654
  }
@@ -794,9 +656,9 @@ export class S3FilesEngine {
794
656
  }
795
657
 
796
658
  async *exportBulkData<T extends Record<string, any>>(collection: string) {
797
- const ids = await this.listDocIds(collection)
659
+ const ids = await this.documents.listDocIds(collection)
798
660
  for (const id of ids) {
799
- const stored = await this.readStoredDoc<T>(collection, id)
661
+ const stored = await this.documents.readStoredDoc<T>(collection, id)
800
662
  if (stored) yield stored.data
801
663
  }
802
664
  }
@@ -829,11 +691,11 @@ export class S3FilesEngine {
829
691
  for (const opKey of Object.keys(compareMap) as Array<keyof typeof compareMap>) {
830
692
  const rightField = operand[opKey]
831
693
  if (!rightField) continue
832
- const leftValue = this.getValueByPath(
694
+ const leftValue = this.queryEngine.getValueByPath(
833
695
  leftData as Record<string, any>,
834
696
  String(field)
835
697
  )
836
- const rightValue = this.getValueByPath(
698
+ const rightValue = this.queryEngine.getValueByPath(
837
699
  rightData as Record<string, any>,
838
700
  String(rightField)
839
701
  )
@@ -858,6 +720,25 @@ export class S3FilesEngine {
858
720
  break
859
721
  }
860
722
 
723
+ let projected = docs[`${leftId}, ${rightId}`] as Record<string, any>
724
+ if (join.$select?.length) {
725
+ projected = this.queryEngine.selectValues(
726
+ join.$select as Array<keyof typeof projected>,
727
+ projected
728
+ )
729
+ }
730
+ if (join.$rename) {
731
+ projected = this.queryEngine.renameFields(
732
+ join.$rename as Record<string, string>,
733
+ projected
734
+ )
735
+ }
736
+ docs[`${leftId}, ${rightId}`] = projected as
737
+ | T
738
+ | U
739
+ | (T & U)
740
+ | (Partial<T> & Partial<U>)
741
+
861
742
  if (join.$limit && Object.keys(docs).length >= join.$limit) break
862
743
  }
863
744