@delma/fylo 2.1.0 → 2.1.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (99) hide show
  1. package/README.md +27 -0
  2. package/dist/adapters/cipher.js +155 -0
  3. package/dist/adapters/cipher.js.map +1 -0
  4. package/dist/core/collection.js +6 -0
  5. package/dist/core/collection.js.map +1 -0
  6. package/{src/core/directory.ts → dist/core/directory.js} +28 -35
  7. package/dist/core/directory.js.map +1 -0
  8. package/dist/core/doc-id.js +15 -0
  9. package/dist/core/doc-id.js.map +1 -0
  10. package/dist/core/extensions.js +16 -0
  11. package/dist/core/extensions.js.map +1 -0
  12. package/dist/core/format.js +355 -0
  13. package/dist/core/format.js.map +1 -0
  14. package/dist/core/parser.js +764 -0
  15. package/dist/core/parser.js.map +1 -0
  16. package/dist/core/query.js +47 -0
  17. package/dist/core/query.js.map +1 -0
  18. package/dist/engines/s3-files/documents.js +62 -0
  19. package/dist/engines/s3-files/documents.js.map +1 -0
  20. package/dist/engines/s3-files/filesystem.js +165 -0
  21. package/dist/engines/s3-files/filesystem.js.map +1 -0
  22. package/dist/engines/s3-files/query.js +235 -0
  23. package/dist/engines/s3-files/query.js.map +1 -0
  24. package/dist/engines/s3-files/types.js +2 -0
  25. package/dist/engines/s3-files/types.js.map +1 -0
  26. package/dist/engines/s3-files.js +629 -0
  27. package/dist/engines/s3-files.js.map +1 -0
  28. package/dist/engines/types.js +2 -0
  29. package/dist/engines/types.js.map +1 -0
  30. package/dist/index.js +562 -0
  31. package/dist/index.js.map +1 -0
  32. package/dist/sync.js +18 -0
  33. package/dist/sync.js.map +1 -0
  34. package/{src → dist}/types/fylo.d.ts +14 -1
  35. package/package.json +2 -2
  36. package/.env.example +0 -16
  37. package/.github/copilot-instructions.md +0 -3
  38. package/.github/prompts/release.prompt.md +0 -10
  39. package/.github/workflows/ci.yml +0 -37
  40. package/.github/workflows/publish.yml +0 -91
  41. package/.prettierrc +0 -7
  42. package/AGENTS.md +0 -3
  43. package/CLAUDE.md +0 -3
  44. package/eslint.config.js +0 -32
  45. package/src/CLI +0 -39
  46. package/src/adapters/cipher.ts +0 -180
  47. package/src/core/collection.ts +0 -5
  48. package/src/core/extensions.ts +0 -21
  49. package/src/core/format.ts +0 -457
  50. package/src/core/parser.ts +0 -901
  51. package/src/core/query.ts +0 -53
  52. package/src/engines/s3-files/documents.ts +0 -65
  53. package/src/engines/s3-files/filesystem.ts +0 -172
  54. package/src/engines/s3-files/query.ts +0 -291
  55. package/src/engines/s3-files/types.ts +0 -42
  56. package/src/engines/s3-files.ts +0 -769
  57. package/src/engines/types.ts +0 -21
  58. package/src/index.ts +0 -632
  59. package/src/sync.ts +0 -58
  60. package/tests/collection/truncate.test.js +0 -36
  61. package/tests/data.js +0 -97
  62. package/tests/helpers/root.js +0 -7
  63. package/tests/integration/aws-s3-files.canary.test.js +0 -22
  64. package/tests/integration/create.test.js +0 -39
  65. package/tests/integration/delete.test.js +0 -97
  66. package/tests/integration/edge-cases.test.js +0 -162
  67. package/tests/integration/encryption.test.js +0 -148
  68. package/tests/integration/export.test.js +0 -46
  69. package/tests/integration/join-modes.test.js +0 -154
  70. package/tests/integration/nested.test.js +0 -144
  71. package/tests/integration/operators.test.js +0 -136
  72. package/tests/integration/read.test.js +0 -123
  73. package/tests/integration/rollback.test.js +0 -30
  74. package/tests/integration/s3-files.performance.test.js +0 -75
  75. package/tests/integration/s3-files.test.js +0 -205
  76. package/tests/integration/sync.test.js +0 -154
  77. package/tests/integration/update.test.js +0 -105
  78. package/tests/mocks/cipher.js +0 -40
  79. package/tests/schemas/album.d.ts +0 -5
  80. package/tests/schemas/album.json +0 -5
  81. package/tests/schemas/comment.d.ts +0 -7
  82. package/tests/schemas/comment.json +0 -7
  83. package/tests/schemas/photo.d.ts +0 -7
  84. package/tests/schemas/photo.json +0 -7
  85. package/tests/schemas/post.d.ts +0 -6
  86. package/tests/schemas/post.json +0 -6
  87. package/tests/schemas/tip.d.ts +0 -7
  88. package/tests/schemas/tip.json +0 -7
  89. package/tests/schemas/todo.d.ts +0 -6
  90. package/tests/schemas/todo.json +0 -6
  91. package/tests/schemas/user.d.ts +0 -23
  92. package/tests/schemas/user.json +0 -23
  93. package/tsconfig.json +0 -21
  94. package/tsconfig.typecheck.json +0 -31
  95. /package/{src → dist}/types/bun-runtime.d.ts +0 -0
  96. /package/{src → dist}/types/index.d.ts +0 -0
  97. /package/{src → dist}/types/node-runtime.d.ts +0 -0
  98. /package/{src → dist}/types/query.d.ts +0 -0
  99. /package/{src → dist}/types/vendor-modules.d.ts +0 -0
@@ -1,769 +0,0 @@
1
- import { rename, writeFile } from 'node:fs/promises'
2
- import path from 'node:path'
3
- import { createHash } from 'node:crypto'
4
- import TTID from '@delma/ttid'
5
- import { Dir } from '../core/directory'
6
- import { validateCollectionName } from '../core/collection'
7
- import { Cipher } from '../adapters/cipher'
8
- import {
9
- FyloSyncError,
10
- resolveSyncMode,
11
- type FyloDeleteSyncEvent,
12
- type FyloSyncHooks,
13
- type FyloSyncMode,
14
- type FyloWriteSyncEvent
15
- } from '../sync'
16
- import type { EventBus, FyloStorageEngineKind, LockManager, StorageEngine } from './types'
17
- import {
18
- type CollectionIndexCache,
19
- type FyloRecord,
20
- type StoredCollectionIndex,
21
- type StoredIndexEntry
22
- } from './s3-files/types'
23
- import { FilesystemEventBus, FilesystemLockManager, FilesystemStorage } from './s3-files/filesystem'
24
- import { S3FilesDocuments } from './s3-files/documents'
25
- import { S3FilesQueryEngine } from './s3-files/query'
26
-
27
- export class S3FilesEngine {
28
- readonly kind: FyloStorageEngineKind = 's3-files'
29
-
30
- private readonly indexes = new Map<string, CollectionIndexCache>()
31
- private readonly writeLanes = new Map<string, Promise<void>>()
32
-
33
- private readonly storage: StorageEngine
34
- private readonly locks: LockManager
35
- private readonly events: EventBus<Record<string, any>>
36
- private readonly documents: S3FilesDocuments
37
- private readonly queryEngine: S3FilesQueryEngine
38
- private readonly sync?: FyloSyncHooks
39
- private readonly syncMode: FyloSyncMode
40
-
41
- constructor(
42
- readonly root: string = process.env.FYLO_ROOT ??
43
- process.env.FYLO_S3FILES_ROOT ??
44
- path.join(process.cwd(), '.fylo-data'),
45
- options: {
46
- sync?: FyloSyncHooks
47
- syncMode?: FyloSyncMode
48
- } = {}
49
- ) {
50
- this.sync = options.sync
51
- this.syncMode = resolveSyncMode(options.syncMode)
52
- this.storage = new FilesystemStorage()
53
- this.locks = new FilesystemLockManager(this.root, this.storage)
54
- this.events = new FilesystemEventBus<Record<string, any>>(this.root, this.storage)
55
- this.documents = new S3FilesDocuments(
56
- this.storage,
57
- this.docsRoot.bind(this),
58
- this.docPath.bind(this),
59
- this.ensureCollection.bind(this),
60
- this.encodeEncrypted.bind(this),
61
- this.decodeEncrypted.bind(this)
62
- )
63
- this.queryEngine = new S3FilesQueryEngine({
64
- loadIndexCache: this.loadIndexCache.bind(this),
65
- normalizeIndexValue: this.normalizeIndexValue.bind(this)
66
- })
67
- }
68
-
69
- private collectionRoot(collection: string) {
70
- validateCollectionName(collection)
71
- return path.join(this.root, collection)
72
- }
73
-
74
- private docsRoot(collection: string) {
75
- return path.join(this.collectionRoot(collection), '.fylo', 'docs')
76
- }
77
-
78
- private metaRoot(collection: string) {
79
- return path.join(this.collectionRoot(collection), '.fylo')
80
- }
81
-
82
- private indexesRoot(collection: string) {
83
- return path.join(this.metaRoot(collection), 'indexes')
84
- }
85
-
86
- private indexFilePath(collection: string) {
87
- return path.join(this.indexesRoot(collection), `${collection}.idx.json`)
88
- }
89
-
90
- private docPath(collection: string, docId: _ttid) {
91
- return path.join(this.docsRoot(collection), docId.slice(0, 2), `${docId}.json`)
92
- }
93
-
94
- private async runSyncTask(
95
- collection: string,
96
- docId: _ttid,
97
- operation: string,
98
- targetPath: string,
99
- task: () => Promise<void>
100
- ) {
101
- if (!this.sync?.onWrite && !this.sync?.onDelete) return
102
-
103
- if (this.syncMode === 'fire-and-forget') {
104
- void task().catch((cause) => {
105
- console.error(
106
- new FyloSyncError({
107
- collection,
108
- docId,
109
- operation,
110
- path: targetPath,
111
- cause
112
- })
113
- )
114
- })
115
- return
116
- }
117
-
118
- try {
119
- await task()
120
- } catch (cause) {
121
- throw new FyloSyncError({
122
- collection,
123
- docId,
124
- operation,
125
- path: targetPath,
126
- cause
127
- })
128
- }
129
- }
130
-
131
- private async syncWrite<T extends Record<string, any>>(event: FyloWriteSyncEvent<T>) {
132
- if (!this.sync?.onWrite) return
133
- await this.sync.onWrite(event)
134
- }
135
-
136
- private async syncDelete(event: FyloDeleteSyncEvent) {
137
- if (!this.sync?.onDelete) return
138
- await this.sync.onDelete(event)
139
- }
140
-
141
- private hash(value: string) {
142
- return createHash('sha256').update(value).digest('hex')
143
- }
144
-
145
- private createEmptyIndexCache(): CollectionIndexCache {
146
- return {
147
- docs: new Map(),
148
- fieldHash: new Map(),
149
- fieldNumeric: new Map(),
150
- fieldString: new Map()
151
- }
152
- }
153
-
154
- private addEntryToCache(cache: CollectionIndexCache, docId: _ttid, entry: StoredIndexEntry) {
155
- let valueHashBucket = cache.fieldHash.get(entry.fieldPath)
156
- if (!valueHashBucket) {
157
- valueHashBucket = new Map()
158
- cache.fieldHash.set(entry.fieldPath, valueHashBucket)
159
- }
160
-
161
- let docsForValue = valueHashBucket.get(entry.valueHash)
162
- if (!docsForValue) {
163
- docsForValue = new Set()
164
- valueHashBucket.set(entry.valueHash, docsForValue)
165
- }
166
- docsForValue.add(docId)
167
-
168
- if (entry.numericValue !== null) {
169
- const numericEntries = cache.fieldNumeric.get(entry.fieldPath) ?? []
170
- numericEntries.push({ docId, numericValue: entry.numericValue })
171
- cache.fieldNumeric.set(entry.fieldPath, numericEntries)
172
- }
173
-
174
- if (entry.valueType === 'string') {
175
- const stringEntries = cache.fieldString.get(entry.fieldPath) ?? []
176
- stringEntries.push({ docId, rawValue: entry.rawValue })
177
- cache.fieldString.set(entry.fieldPath, stringEntries)
178
- }
179
- }
180
-
181
- private deleteEntryFromCache(
182
- cache: CollectionIndexCache,
183
- docId: _ttid,
184
- entry: StoredIndexEntry
185
- ) {
186
- const valueHashBucket = cache.fieldHash.get(entry.fieldPath)
187
- const docsForValue = valueHashBucket?.get(entry.valueHash)
188
- docsForValue?.delete(docId)
189
- if (docsForValue?.size === 0) valueHashBucket?.delete(entry.valueHash)
190
- if (valueHashBucket?.size === 0) cache.fieldHash.delete(entry.fieldPath)
191
-
192
- if (entry.numericValue !== null) {
193
- const numericEntries = cache.fieldNumeric
194
- .get(entry.fieldPath)
195
- ?.filter(
196
- (candidate) =>
197
- !(
198
- candidate.docId === docId &&
199
- candidate.numericValue === entry.numericValue
200
- )
201
- )
202
- if (!numericEntries?.length) cache.fieldNumeric.delete(entry.fieldPath)
203
- else cache.fieldNumeric.set(entry.fieldPath, numericEntries)
204
- }
205
-
206
- if (entry.valueType === 'string') {
207
- const stringEntries = cache.fieldString
208
- .get(entry.fieldPath)
209
- ?.filter(
210
- (candidate) =>
211
- !(candidate.docId === docId && candidate.rawValue === entry.rawValue)
212
- )
213
- if (!stringEntries?.length) cache.fieldString.delete(entry.fieldPath)
214
- else cache.fieldString.set(entry.fieldPath, stringEntries)
215
- }
216
- }
217
-
218
- private async writeIndexFile(collection: string, cache: CollectionIndexCache) {
219
- await this.storage.mkdir(this.indexesRoot(collection))
220
- const target = this.indexFilePath(collection)
221
- const temp = `${target}.tmp`
222
-
223
- const payload: StoredCollectionIndex = {
224
- version: 1,
225
- docs: Object.fromEntries(cache.docs)
226
- }
227
-
228
- await writeFile(temp, JSON.stringify(payload), 'utf8')
229
- await rename(temp, target)
230
- }
231
-
232
- private async loadIndexCache(collection: string) {
233
- const cache = this.createEmptyIndexCache()
234
-
235
- try {
236
- const raw = JSON.parse(await this.storage.read(this.indexFilePath(collection))) as
237
- | StoredCollectionIndex
238
- | undefined
239
-
240
- if (raw?.version === 1 && raw.docs) {
241
- for (const [docId, entries] of Object.entries(raw.docs) as Array<
242
- [_ttid, StoredIndexEntry[]]
243
- >) {
244
- cache.docs.set(docId, entries)
245
- for (const entry of entries) this.addEntryToCache(cache, docId, entry)
246
- }
247
- }
248
- } catch (err) {
249
- if ((err as NodeJS.ErrnoException).code !== 'ENOENT') throw err
250
- }
251
-
252
- this.indexes.set(collection, cache)
253
- return cache
254
- }
255
-
256
- private normalizeIndexValue(rawValue: string) {
257
- const parsed = Dir.parseValue(rawValue.replaceAll('%2F', '/'))
258
- const numeric = typeof parsed === 'number' ? parsed : Number(parsed)
259
- return {
260
- rawValue,
261
- valueHash: this.hash(rawValue),
262
- valueType: typeof parsed,
263
- numericValue: Number.isNaN(numeric) ? null : numeric
264
- }
265
- }
266
-
267
- private async ensureCollection(collection: string) {
268
- await this.storage.mkdir(this.collectionRoot(collection))
269
- await this.storage.mkdir(this.metaRoot(collection))
270
- await this.storage.mkdir(this.docsRoot(collection))
271
- await this.storage.mkdir(this.indexesRoot(collection))
272
- await this.loadIndexCache(collection)
273
- }
274
-
275
- private async withCollectionWriteLock<T>(
276
- collection: string,
277
- action: () => Promise<T>
278
- ): Promise<T> {
279
- const previous = this.writeLanes.get(collection) ?? Promise.resolve()
280
- let release!: () => void
281
- const current = new Promise<void>((resolve) => {
282
- release = resolve
283
- })
284
- const lane = previous.then(() => current)
285
- this.writeLanes.set(collection, lane)
286
-
287
- await previous
288
-
289
- try {
290
- return await action()
291
- } finally {
292
- release()
293
- if (this.writeLanes.get(collection) === lane) this.writeLanes.delete(collection)
294
- }
295
- }
296
-
297
- async createCollection(collection: string) {
298
- await this.ensureCollection(collection)
299
- }
300
-
301
- async dropCollection(collection: string) {
302
- this.indexes.delete(collection)
303
- await this.storage.rmdir(this.collectionRoot(collection))
304
- }
305
-
306
- async hasCollection(collection: string) {
307
- return await this.storage.exists(this.collectionRoot(collection))
308
- }
309
-
310
- private async encodeEncrypted<T extends Record<string, any>>(
311
- collection: string,
312
- value: T,
313
- parentField?: string
314
- ): Promise<T> {
315
- if (Array.isArray(value)) {
316
- const encodedItems = await Promise.all(
317
- value.map(async (item) => {
318
- if (item && typeof item === 'object')
319
- return await this.encodeEncrypted(collection, item as Record<string, any>)
320
- if (
321
- parentField &&
322
- Cipher.isConfigured() &&
323
- Cipher.isEncryptedField(collection, parentField)
324
- ) {
325
- return await Cipher.encrypt(String(item).replaceAll('/', '%2F'), true)
326
- }
327
- return item
328
- })
329
- )
330
- return encodedItems as unknown as T
331
- }
332
-
333
- if (value && typeof value === 'object') {
334
- const copy: Record<string, any> = {}
335
- for (const field in value) {
336
- const nextField = parentField ? `${parentField}/${field}` : field
337
- const fieldValue = value[field]
338
- if (fieldValue && typeof fieldValue === 'object')
339
- copy[field] = await this.encodeEncrypted(collection, fieldValue, nextField)
340
- else if (Cipher.isConfigured() && Cipher.isEncryptedField(collection, nextField)) {
341
- copy[field] = await Cipher.encrypt(
342
- String(fieldValue).replaceAll('/', '%2F'),
343
- true
344
- )
345
- } else copy[field] = fieldValue
346
- }
347
- return copy as T
348
- }
349
-
350
- return value
351
- }
352
-
353
- private async decodeEncrypted<T extends Record<string, any>>(
354
- collection: string,
355
- value: T,
356
- parentField?: string
357
- ): Promise<T> {
358
- if (Array.isArray(value)) {
359
- const decodedItems = await Promise.all(
360
- value.map(async (item) => {
361
- if (item && typeof item === 'object')
362
- return await this.decodeEncrypted(collection, item as Record<string, any>)
363
- if (
364
- parentField &&
365
- Cipher.isConfigured() &&
366
- Cipher.isEncryptedField(collection, parentField) &&
367
- typeof item === 'string'
368
- ) {
369
- return Dir.parseValue((await Cipher.decrypt(item)).replaceAll('%2F', '/'))
370
- }
371
- return item
372
- })
373
- )
374
- return decodedItems as unknown as T
375
- }
376
-
377
- if (value && typeof value === 'object') {
378
- const copy: Record<string, any> = {}
379
- for (const field in value) {
380
- const nextField = parentField ? `${parentField}/${field}` : field
381
- const fieldValue = value[field]
382
- if (fieldValue && typeof fieldValue === 'object')
383
- copy[field] = await this.decodeEncrypted(collection, fieldValue, nextField)
384
- else if (
385
- Cipher.isConfigured() &&
386
- Cipher.isEncryptedField(collection, nextField) &&
387
- typeof fieldValue === 'string'
388
- ) {
389
- copy[field] = Dir.parseValue(
390
- (await Cipher.decrypt(fieldValue)).replaceAll('%2F', '/')
391
- )
392
- } else copy[field] = fieldValue
393
- }
394
- return copy as T
395
- }
396
-
397
- return value
398
- }
399
-
400
- private async docResults<T extends Record<string, any>>(
401
- collection: string,
402
- query?: _storeQuery<T>
403
- ) {
404
- const candidateIds = await this.queryEngine.candidateDocIdsForQuery(collection, query)
405
- const ids = candidateIds
406
- ? Array.from(candidateIds)
407
- : await this.documents.listDocIds(collection)
408
- const limit = query?.$limit
409
- const results: Array<FyloRecord<T>> = []
410
-
411
- for (const id of ids) {
412
- const stored = await this.documents.readStoredDoc<T>(collection, id)
413
- if (!stored) continue
414
- if (!this.queryEngine.matchesQuery(id, stored.data, query)) continue
415
- results.push({ [id]: stored.data } as FyloRecord<T>)
416
- if (limit && results.length >= limit) break
417
- }
418
-
419
- return results
420
- }
421
-
422
- private async rebuildIndexes<T extends Record<string, any>>(
423
- collection: string,
424
- docId: _ttid,
425
- doc: T
426
- ) {
427
- const keys = await Dir.extractKeys(collection, docId, doc)
428
- const cache = await this.loadIndexCache(collection)
429
- const entries = keys.indexes.map((logicalKey) => {
430
- const segments = logicalKey.split('/')
431
- const fieldPath = segments.slice(0, -2).join('/')
432
- const rawValue = segments.at(-2) ?? ''
433
- const normalized = this.normalizeIndexValue(rawValue)
434
-
435
- return {
436
- fieldPath,
437
- rawValue: normalized.rawValue,
438
- valueHash: normalized.valueHash,
439
- valueType: normalized.valueType,
440
- numericValue: normalized.numericValue
441
- } satisfies StoredIndexEntry
442
- })
443
-
444
- const existingEntries = cache.docs.get(docId)
445
- if (existingEntries) {
446
- for (const entry of existingEntries) this.deleteEntryFromCache(cache, docId, entry)
447
- }
448
-
449
- cache.docs.set(docId, entries)
450
- for (const entry of entries) this.addEntryToCache(cache, docId, entry)
451
- await this.writeIndexFile(collection, cache)
452
- }
453
-
454
- private async removeIndexes<T extends Record<string, any>>(
455
- collection: string,
456
- docId: _ttid,
457
- _doc: T
458
- ) {
459
- const cache = await this.loadIndexCache(collection)
460
- const existingEntries = cache.docs.get(docId) ?? []
461
- for (const entry of existingEntries) this.deleteEntryFromCache(cache, docId, entry)
462
- cache.docs.delete(docId)
463
- await this.writeIndexFile(collection, cache)
464
- }
465
-
466
- async putDocument<T extends Record<string, any>>(collection: string, docId: _ttid, doc: T) {
467
- await this.withCollectionWriteLock(collection, async () => {
468
- const owner = Bun.randomUUIDv7()
469
- if (!(await this.locks.acquire(collection, docId, owner)))
470
- throw new Error(`Unable to acquire filesystem lock for ${docId}`)
471
-
472
- const targetPath = this.docPath(collection, docId)
473
-
474
- try {
475
- await this.documents.writeStoredDoc(collection, docId, doc)
476
- await this.rebuildIndexes(collection, docId, doc)
477
- await this.events.publish(collection, {
478
- ts: Date.now(),
479
- action: 'insert',
480
- id: docId,
481
- doc
482
- })
483
- await this.runSyncTask(collection, docId, 'put', targetPath, async () => {
484
- await this.syncWrite({
485
- operation: 'put',
486
- collection,
487
- docId,
488
- path: targetPath,
489
- data: doc
490
- })
491
- })
492
- } finally {
493
- await this.locks.release(collection, docId, owner)
494
- }
495
- })
496
- }
497
-
498
- async patchDocument<T extends Record<string, any>>(
499
- collection: string,
500
- oldId: _ttid,
501
- newId: _ttid,
502
- patch: Partial<T>,
503
- oldDoc?: T
504
- ) {
505
- return await this.withCollectionWriteLock(collection, async () => {
506
- const owner = Bun.randomUUIDv7()
507
- if (!(await this.locks.acquire(collection, oldId, owner)))
508
- throw new Error(`Unable to acquire filesystem lock for ${oldId}`)
509
-
510
- const oldPath = this.docPath(collection, oldId)
511
-
512
- try {
513
- const existing =
514
- oldDoc ?? (await this.documents.readStoredDoc<T>(collection, oldId))?.data
515
- if (!existing) return oldId
516
-
517
- const nextDoc = { ...existing, ...patch } as T
518
- const newPath = this.docPath(collection, newId)
519
- await this.removeIndexes(collection, oldId, existing)
520
- await this.documents.removeStoredDoc(collection, oldId)
521
- await this.events.publish(collection, {
522
- ts: Date.now(),
523
- action: 'delete',
524
- id: oldId,
525
- doc: existing
526
- })
527
- await this.documents.writeStoredDoc(collection, newId, nextDoc)
528
- await this.rebuildIndexes(collection, newId, nextDoc)
529
- await this.events.publish(collection, {
530
- ts: Date.now(),
531
- action: 'insert',
532
- id: newId,
533
- doc: nextDoc
534
- })
535
- await this.runSyncTask(collection, newId, 'patch', newPath, async () => {
536
- await this.syncDelete({
537
- operation: 'patch',
538
- collection,
539
- docId: oldId,
540
- path: oldPath
541
- })
542
- await this.syncWrite({
543
- operation: 'patch',
544
- collection,
545
- docId: newId,
546
- previousDocId: oldId,
547
- path: newPath,
548
- data: nextDoc
549
- })
550
- })
551
- return newId
552
- } finally {
553
- await this.locks.release(collection, oldId, owner)
554
- }
555
- })
556
- }
557
-
558
- async deleteDocument<T extends Record<string, any>>(collection: string, docId: _ttid) {
559
- await this.withCollectionWriteLock(collection, async () => {
560
- const owner = Bun.randomUUIDv7()
561
- if (!(await this.locks.acquire(collection, docId, owner)))
562
- throw new Error(`Unable to acquire filesystem lock for ${docId}`)
563
-
564
- const targetPath = this.docPath(collection, docId)
565
-
566
- try {
567
- const existing = await this.documents.readStoredDoc<T>(collection, docId)
568
- if (!existing) return
569
- await this.removeIndexes(collection, docId, existing.data)
570
- await this.documents.removeStoredDoc(collection, docId)
571
- await this.events.publish(collection, {
572
- ts: Date.now(),
573
- action: 'delete',
574
- id: docId,
575
- doc: existing.data
576
- })
577
- await this.runSyncTask(collection, docId, 'delete', targetPath, async () => {
578
- await this.syncDelete({
579
- operation: 'delete',
580
- collection,
581
- docId,
582
- path: targetPath
583
- })
584
- })
585
- } finally {
586
- await this.locks.release(collection, docId, owner)
587
- }
588
- })
589
- }
590
-
591
- getDoc<T extends Record<string, any>>(
592
- collection: string,
593
- docId: _ttid,
594
- onlyId: boolean = false
595
- ) {
596
- const engine = this
597
-
598
- return {
599
- async *[Symbol.asyncIterator]() {
600
- const doc = await this.once()
601
- if (Object.keys(doc).length > 0) yield onlyId ? Object.keys(doc).shift()! : doc
602
-
603
- for await (const event of engine.events.listen(collection)) {
604
- if (event.action !== 'insert' || event.id !== docId || !event.doc) continue
605
- yield onlyId ? event.id : ({ [event.id]: event.doc } as FyloRecord<T>)
606
- }
607
- },
608
- async once() {
609
- const stored = await engine.documents.readStoredDoc<T>(collection, docId)
610
- return stored ? ({ [docId]: stored.data } as FyloRecord<T>) : {}
611
- },
612
- async *onDelete() {
613
- for await (const event of engine.events.listen(collection)) {
614
- if (event.action === 'delete' && event.id === docId) yield event.id
615
- }
616
- }
617
- }
618
- }
619
-
620
- findDocs<T extends Record<string, any>>(collection: string, query?: _storeQuery<T>) {
621
- const engine = this
622
-
623
- const collectDocs = async function* () {
624
- const docs = await engine.docResults(collection, query)
625
- for (const doc of docs) {
626
- const result = engine.queryEngine.processDoc(doc, query)
627
- if (result !== undefined) yield result
628
- }
629
- }
630
-
631
- return {
632
- async *[Symbol.asyncIterator]() {
633
- for await (const result of collectDocs()) yield result
634
-
635
- for await (const event of engine.events.listen(collection)) {
636
- if (event.action !== 'insert' || !event.doc) continue
637
- if (!engine.queryEngine.matchesQuery(event.id, event.doc as T, query)) continue
638
- const processed = engine.queryEngine.processDoc(
639
- { [event.id]: event.doc as T } as FyloRecord<T>,
640
- query
641
- )
642
- if (processed !== undefined) yield processed
643
- }
644
- },
645
- async *collect() {
646
- for await (const result of collectDocs()) yield result
647
- },
648
- async *onDelete() {
649
- for await (const event of engine.events.listen(collection)) {
650
- if (event.action !== 'delete' || !event.doc) continue
651
- if (!engine.queryEngine.matchesQuery(event.id, event.doc as T, query)) continue
652
- yield event.id
653
- }
654
- }
655
- }
656
- }
657
-
658
- async *exportBulkData<T extends Record<string, any>>(collection: string) {
659
- const ids = await this.documents.listDocIds(collection)
660
- for (const id of ids) {
661
- const stored = await this.documents.readStoredDoc<T>(collection, id)
662
- if (stored) yield stored.data
663
- }
664
- }
665
-
666
- async joinDocs<T extends Record<string, any>, U extends Record<string, any>>(
667
- join: _join<T, U>
668
- ) {
669
- const leftDocs = await this.docResults<T>(join.$leftCollection)
670
- const rightDocs = await this.docResults<U>(join.$rightCollection)
671
- const docs: Record<`${_ttid}, ${_ttid}`, T | U | (T & U) | (Partial<T> & Partial<U>)> = {}
672
-
673
- const compareMap = {
674
- $eq: (leftVal: any, rightVal: any) => leftVal === rightVal,
675
- $ne: (leftVal: any, rightVal: any) => leftVal !== rightVal,
676
- $gt: (leftVal: any, rightVal: any) => Number(leftVal) > Number(rightVal),
677
- $lt: (leftVal: any, rightVal: any) => Number(leftVal) < Number(rightVal),
678
- $gte: (leftVal: any, rightVal: any) => Number(leftVal) >= Number(rightVal),
679
- $lte: (leftVal: any, rightVal: any) => Number(leftVal) <= Number(rightVal)
680
- } as const
681
-
682
- for (const leftEntry of leftDocs) {
683
- const [leftId, leftData] = Object.entries(leftEntry)[0] as [_ttid, T]
684
- for (const rightEntry of rightDocs) {
685
- const [rightId, rightData] = Object.entries(rightEntry)[0] as [_ttid, U]
686
-
687
- let matched = false
688
-
689
- for (const field in join.$on) {
690
- const operand = join.$on[field as keyof T]!
691
- for (const opKey of Object.keys(compareMap) as Array<keyof typeof compareMap>) {
692
- const rightField = operand[opKey]
693
- if (!rightField) continue
694
- const leftValue = this.queryEngine.getValueByPath(
695
- leftData as Record<string, any>,
696
- String(field)
697
- )
698
- const rightValue = this.queryEngine.getValueByPath(
699
- rightData as Record<string, any>,
700
- String(rightField)
701
- )
702
- if (compareMap[opKey](leftValue, rightValue)) matched = true
703
- }
704
- }
705
-
706
- if (!matched) continue
707
-
708
- switch (join.$mode) {
709
- case 'inner':
710
- docs[`${leftId}, ${rightId}`] = { ...leftData, ...rightData } as T & U
711
- break
712
- case 'left':
713
- docs[`${leftId}, ${rightId}`] = leftData
714
- break
715
- case 'right':
716
- docs[`${leftId}, ${rightId}`] = rightData
717
- break
718
- case 'outer':
719
- docs[`${leftId}, ${rightId}`] = { ...leftData, ...rightData } as T & U
720
- break
721
- }
722
-
723
- let projected = docs[`${leftId}, ${rightId}`] as Record<string, any>
724
- if (join.$select?.length) {
725
- projected = this.queryEngine.selectValues(
726
- join.$select as Array<keyof typeof projected>,
727
- projected
728
- )
729
- }
730
- if (join.$rename) {
731
- projected = this.queryEngine.renameFields(
732
- join.$rename as Record<string, string>,
733
- projected
734
- )
735
- }
736
- docs[`${leftId}, ${rightId}`] = projected as
737
- | T
738
- | U
739
- | (T & U)
740
- | (Partial<T> & Partial<U>)
741
-
742
- if (join.$limit && Object.keys(docs).length >= join.$limit) break
743
- }
744
-
745
- if (join.$limit && Object.keys(docs).length >= join.$limit) break
746
- }
747
-
748
- if (join.$groupby) {
749
- const groupedDocs: Record<string, Record<string, Partial<T | U>>> = {}
750
- for (const ids in docs) {
751
- const data = docs[ids as `${_ttid}, ${_ttid}`] as Record<string, any>
752
- const key = String(data[join.$groupby as string])
753
- if (!groupedDocs[key]) groupedDocs[key] = {}
754
- groupedDocs[key][ids] = data as Partial<T | U>
755
- }
756
- if (join.$onlyIds) {
757
- const groupedIds: Record<string, _ttid[]> = {}
758
- for (const key in groupedDocs)
759
- groupedIds[key] = Object.keys(groupedDocs[key]).flat() as _ttid[]
760
- return groupedIds
761
- }
762
- return groupedDocs
763
- }
764
-
765
- if (join.$onlyIds) return Array.from(new Set(Object.keys(docs).flat())) as _ttid[]
766
-
767
- return docs
768
- }
769
- }