@delma/fylo 2.0.1 → 2.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (43) hide show
  1. package/README.md +185 -267
  2. package/package.json +2 -5
  3. package/src/core/directory.ts +22 -354
  4. package/src/engines/s3-files/documents.ts +65 -0
  5. package/src/engines/s3-files/filesystem.ts +172 -0
  6. package/src/engines/s3-files/query.ts +291 -0
  7. package/src/engines/s3-files/types.ts +42 -0
  8. package/src/engines/s3-files.ts +391 -690
  9. package/src/engines/types.ts +1 -1
  10. package/src/index.ts +142 -1237
  11. package/src/sync.ts +58 -0
  12. package/src/types/fylo.d.ts +66 -161
  13. package/src/types/node-runtime.d.ts +1 -0
  14. package/tests/collection/truncate.test.js +11 -10
  15. package/tests/helpers/root.js +7 -0
  16. package/tests/integration/create.test.js +9 -9
  17. package/tests/integration/delete.test.js +16 -14
  18. package/tests/integration/edge-cases.test.js +29 -25
  19. package/tests/integration/encryption.test.js +47 -30
  20. package/tests/integration/export.test.js +11 -11
  21. package/tests/integration/join-modes.test.js +16 -16
  22. package/tests/integration/nested.test.js +26 -24
  23. package/tests/integration/operators.test.js +43 -29
  24. package/tests/integration/read.test.js +25 -21
  25. package/tests/integration/rollback.test.js +21 -51
  26. package/tests/integration/s3-files.performance.test.js +75 -0
  27. package/tests/integration/s3-files.test.js +57 -44
  28. package/tests/integration/sync.test.js +154 -0
  29. package/tests/integration/update.test.js +24 -18
  30. package/src/adapters/redis.ts +0 -487
  31. package/src/adapters/s3.ts +0 -61
  32. package/src/core/walker.ts +0 -174
  33. package/src/core/write-queue.ts +0 -59
  34. package/src/migrate-cli.ts +0 -22
  35. package/src/migrate.ts +0 -74
  36. package/src/types/write-queue.ts +0 -42
  37. package/src/worker.ts +0 -18
  38. package/src/workers/write-worker.ts +0 -120
  39. package/tests/index.js +0 -14
  40. package/tests/integration/migration.test.js +0 -38
  41. package/tests/integration/queue.test.js +0 -83
  42. package/tests/mocks/redis.js +0 -123
  43. package/tests/mocks/s3.js +0 -80
@@ -1,380 +1,48 @@
1
- import { Walker } from './walker'
2
- import TTID from '@delma/ttid'
3
- import { S3 } from '../adapters/s3'
4
- import { Redis } from '../adapters/redis'
5
1
  import { Cipher } from '../adapters/cipher'
6
2
 
7
3
  export class Dir {
8
- private static readonly KEY_LIMIT = 1024
9
-
10
4
  private static readonly SLASH_ASCII = '%2F'
11
5
 
12
- private readonly transactions: Array<{
13
- action: (...args: string[]) => Promise<void>
14
- args: string[]
15
- }>
16
-
17
- private static _redis: Redis | null = null
18
-
19
- private static get redis(): Redis {
20
- if (!Dir._redis) Dir._redis = new Redis()
21
- return Dir._redis
22
- }
23
-
24
- constructor() {
25
- this.transactions = []
26
- }
27
-
28
- static async claimTTID(_id: _ttid, ttlSeconds: number = 10): Promise<boolean> {
29
- return await Dir.redis.claimTTID(_id, ttlSeconds)
30
- }
31
-
32
- static async reconstructData(collection: string, items: string[]) {
33
- items = await this.readValues(collection, items)
34
-
35
- let fieldVal: Record<string, string> = {}
36
-
37
- for (const data of items) {
38
- const segs = data.split('/')
39
- const val = segs.pop()!
40
- const fieldPath = segs.join('/')
41
-
42
- // Decrypt value if field is encrypted — fieldPath starts with TTID segment
43
- // so strip it to get the actual field name for the check
44
- const fieldOnly = segs.slice(1).join('/')
45
- if (Cipher.isConfigured() && Cipher.isEncryptedField(collection, fieldOnly)) {
46
- fieldVal[fieldPath] = await Cipher.decrypt(val)
47
- } else {
48
- fieldVal[fieldPath] = val
49
- }
50
- }
51
-
52
- return this.constructData(fieldVal)
53
- }
54
-
55
- private static async readValues(collection: string, items: string[]) {
56
- for (let i = 0; i < items.length; i++) {
57
- const segments = items[i].split('/')
58
-
59
- const filename = segments.pop()!
60
-
61
- if (TTID.isUUID(filename)) {
62
- const file = S3.file(collection, items[i])
63
- const val = await file.text()
64
-
65
- items[i] = `${segments.join('/')}/${val}`
66
- }
67
- }
68
-
69
- return items
70
- }
71
-
72
- private static async filterByTimestamp(
73
- _id: _ttid,
74
- indexes: string[],
75
- { updated, created }: { updated?: _timestamp; created?: _timestamp }
76
- ) {
77
- const { createdAt, updatedAt } = TTID.decodeTime(_id)
78
-
79
- if (updated && updatedAt) {
80
- if ((updated.$gt || updated.$gte) && (updated.$lt || updated.$lte)) {
81
- if (updated.$gt && updated.$lt) {
82
- if (updated.$gt! > updated.$lt!) throw new Error('Invalid updated query')
83
-
84
- indexes = updatedAt > updated.$gt! && updatedAt < updated.$lt! ? indexes : []
85
- } else if (updated.$gt && updated.$lte) {
86
- if (updated.$gt! > updated.$lte!) throw new Error('Invalid updated query')
87
-
88
- indexes = updatedAt > updated.$gt! && updatedAt <= updated.$lte! ? indexes : []
89
- } else if (updated.$gte && updated.$lt) {
90
- if (updated.$gte! > updated.$lt!) throw new Error('Invalid updated query')
91
-
92
- indexes = updatedAt >= updated.$gte! && updatedAt < updated.$lt! ? indexes : []
93
- } else if (updated.$gte && updated.$lte) {
94
- if (updated.$gte! > updated.$lte!) throw new Error('Invalid updated query')
95
-
96
- indexes =
97
- updatedAt >= updated.$gte! && updatedAt <= updated.$lte! ? indexes : []
98
- }
99
- } else if ((updated.$gt || updated.$gte) && !updated.$lt && !updated.$lte) {
100
- indexes = updated.$gt
101
- ? updatedAt > updated.$gt!
102
- ? indexes
103
- : []
104
- : updatedAt >= updated.$gte!
105
- ? indexes
106
- : []
107
- } else if (!updated.$gt && !updated.$gte && (updated.$lt || updated.$lte)) {
108
- indexes = updated.$lt
109
- ? updatedAt < updated.$lt!
110
- ? indexes
111
- : []
112
- : updatedAt <= updated.$lte!
113
- ? indexes
114
- : []
115
- }
116
- }
117
-
118
- if (created) {
119
- if ((created.$gt || created.$gte) && (created.$lt || created.$lte)) {
120
- if (created.$gt && created.$lt) {
121
- if (created.$gt! > created.$lt!) throw new Error('Invalid created query')
122
-
123
- indexes = createdAt > created.$gt! && createdAt < created.$lt! ? indexes : []
124
- } else if (created.$gt && created.$lte) {
125
- if (created.$gt! > created.$lte!) throw new Error('Invalid updated query')
126
-
127
- indexes = createdAt > created.$gt! && createdAt <= created.$lte! ? indexes : []
128
- } else if (created.$gte && created.$lt) {
129
- if (created.$gte! > created.$lt!) throw new Error('Invalid updated query')
130
-
131
- indexes = createdAt >= created.$gte! && createdAt < created.$lt! ? indexes : []
132
- } else if (created.$gte && created.$lte) {
133
- if (created.$gte! > created.$lte!) throw new Error('Invalid updated query')
134
-
135
- indexes =
136
- createdAt >= created.$gte! && createdAt <= created.$lte! ? indexes : []
137
- }
138
- } else if ((created.$gt || created.$gte) && !created.$lt && !created.$lte) {
139
- if (created.$gt) indexes = createdAt > created.$gt! ? indexes : []
140
- else if (created.$gte) indexes = createdAt >= created.$gte! ? indexes : []
141
- } else if (!created.$gt && !created.$gte && (created.$lt || created.$lte)) {
142
- if (created.$lt) indexes = createdAt < created.$lt! ? indexes : []
143
- else if (created.$lte) indexes = createdAt <= created.$lte! ? indexes : []
144
- }
145
- }
146
-
147
- return indexes.length > 0
148
- }
149
-
150
- static async *searchDocs<T extends Record<string, any>>(
151
- collection: string,
152
- pattern: string | string[],
153
- { updated, created }: { updated?: _timestamp; created?: _timestamp },
154
- { listen = false, skip = false }: { listen: boolean; skip: boolean },
155
- deleted: boolean = false
156
- ): AsyncGenerator<Record<_ttid, T> | _ttid | void, void, { count: number; limit?: number }> {
157
- const data = yield
158
- let count = data.count
159
- let limit = data.limit
160
-
161
- const constructData = async (collection: string, _id: _ttid, items: string[]) => {
162
- if (created || updated) {
163
- if (await this.filterByTimestamp(_id, items, { created, updated })) {
164
- const data = await this.reconstructData(collection, items)
165
-
166
- return { [_id]: data } as Record<_ttid, T>
167
- } else return {}
168
- } else {
169
- const data = await this.reconstructData(collection, items)
170
-
171
- return { [_id]: data } as Record<_ttid, T>
172
- }
173
- }
174
-
175
- const processQuery = async function* (
176
- p: string
177
- ): AsyncGenerator<
178
- Record<_ttid, T> | _ttid | void,
179
- void,
180
- { count: number; limit?: number }
181
- > {
182
- let finished = false
183
-
184
- if (listen && !deleted) {
185
- const iter = Walker.search(collection, p, { listen, skip })
186
-
187
- do {
188
- const { value, done } = await iter.next({ count, limit })
189
-
190
- if (done) finished = true
191
-
192
- if (value) {
193
- const data = yield await constructData(collection, value._id, value.data)
194
- count = data.count
195
- limit = data.limit
196
- }
197
- } while (!finished)
198
- } else if (listen && deleted) {
199
- const iter = Walker.search(collection, p, { listen, skip }, 'delete')
200
-
201
- do {
202
- const { value, done } = await iter.next({ count, limit })
203
-
204
- if (done) finished = true
205
-
206
- if (value) {
207
- const data = yield value._id
208
- count = data.count
209
- limit = data.limit
210
- }
211
- } while (!finished)
212
- } else {
213
- const iter = Walker.search(collection, p, { listen, skip })
214
-
215
- do {
216
- const { value, done } = await iter.next({ count, limit })
217
-
218
- if (done) finished = true
219
-
220
- if (value) {
221
- const data = yield await constructData(collection, value._id, value.data)
222
- count = data.count
223
- limit = data.limit
224
- }
225
- } while (!finished)
226
- }
227
- }
228
-
229
- if (Array.isArray(pattern)) {
230
- for (const p of pattern) yield* processQuery(p)
231
- } else yield* processQuery(pattern)
232
- }
233
-
234
- async putKeys(
235
- collection: string,
236
- { dataKey, indexKey }: { dataKey: string; indexKey: string }
237
- ) {
238
- let dataBody: string | undefined
239
- let indexBody: string | undefined
240
-
241
- if (dataKey.length > Dir.KEY_LIMIT) {
242
- const dataSegs = dataKey.split('/')
243
-
244
- dataBody = dataSegs.pop()!
245
-
246
- indexKey = `${dataSegs.join('/')}/${Bun.randomUUIDv7()}`
247
- }
248
-
249
- if (indexKey.length > Dir.KEY_LIMIT) {
250
- const indexSegs = indexKey.split('/')
251
-
252
- const _id = indexSegs.pop()! as _ttid
253
-
254
- indexBody = indexSegs.pop()!
255
-
256
- dataKey = `${indexSegs.join('/')}/${_id}`
257
- }
258
-
259
- await Promise.all([
260
- S3.put(collection, dataKey, dataBody ?? ''),
261
- S3.put(collection, indexKey, indexBody ?? '')
262
- ])
263
-
264
- this.transactions.push({
265
- action: S3.delete,
266
- args: [collection, dataKey]
267
- })
268
-
269
- this.transactions.push({
270
- action: S3.delete,
271
- args: [collection, indexKey]
272
- })
273
-
274
- await Dir.redis.publish(collection, 'insert', indexKey)
275
- }
276
-
277
- async executeRollback() {
278
- do {
279
- const transaction = this.transactions.pop()
280
-
281
- if (transaction) {
282
- const { action, args } = transaction
283
-
284
- await action(...args)
285
- }
286
- } while (this.transactions.length > 0)
287
- }
288
-
289
- async deleteKeys(collection: string, dataKey: string) {
290
- const segments = dataKey.split('/')
291
-
292
- const _id = segments.shift()!
293
-
294
- const indexKey = `${segments.join('/')}/${_id}`
295
-
296
- const dataFile = S3.file(collection, dataKey)
297
- const indexFile = S3.file(collection, indexKey)
298
-
299
- let dataBody: string | undefined
300
- let indexBody: string | undefined
301
-
302
- if (dataFile.size > 0) dataBody = await dataFile.text()
303
- if (indexFile.size > 0) indexBody = await indexFile.text()
304
-
305
- await Promise.all([S3.delete(collection, indexKey), S3.delete(collection, dataKey)])
306
-
307
- this.transactions.push({
308
- action: S3.put,
309
- args: [collection, dataKey, dataBody ?? '']
310
- })
311
-
312
- this.transactions.push({
313
- action: S3.put,
314
- args: [collection, indexKey, indexBody ?? '']
315
- })
316
-
317
- await Dir.redis.publish(collection, 'delete', _id)
318
- }
319
-
320
6
  static async extractKeys<T>(collection: string, _id: _ttid, data: T, parentField?: string) {
321
7
  const keys: { data: string[]; indexes: string[] } = { data: [], indexes: [] }
322
-
323
- const obj = { ...data }
8
+ const obj = { ...data } as Record<string, any>
324
9
 
325
10
  for (const field in obj) {
326
11
  const newField = parentField ? `${parentField}/${field}` : field
12
+ const fieldValue = obj[field]
327
13
 
328
- if (typeof obj[field] === 'object' && !Array.isArray(obj[field])) {
329
- const items = await this.extractKeys(collection, _id, obj[field], newField)
14
+ if (fieldValue && typeof fieldValue === 'object' && !Array.isArray(fieldValue)) {
15
+ const items = await this.extractKeys(collection, _id, fieldValue, newField)
330
16
  keys.data.push(...items.data)
331
17
  keys.indexes.push(...items.indexes)
332
- } else if (typeof obj[field] === 'object' && Array.isArray(obj[field])) {
333
- const items: (string | number | boolean)[] = obj[field]
334
- if (items.some((item) => typeof item === 'object'))
18
+ continue
19
+ }
20
+
21
+ if (Array.isArray(fieldValue)) {
22
+ if (fieldValue.some((item) => typeof item === 'object')) {
335
23
  throw new Error(`Cannot have an array of objects`)
336
- for (let i = 0; i < items.length; i++) {
337
- let val = String(items[i]).split('/').join(this.SLASH_ASCII)
338
- if (Cipher.isConfigured() && Cipher.isEncryptedField(collection, newField))
24
+ }
25
+
26
+ for (let i = 0; i < fieldValue.length; i++) {
27
+ let val = String(fieldValue[i]).replaceAll('/', this.SLASH_ASCII)
28
+ if (Cipher.isConfigured() && Cipher.isEncryptedField(collection, newField)) {
339
29
  val = await Cipher.encrypt(val, true)
30
+ }
340
31
  keys.data.push(`${_id}/${newField}/${i}/${val}`)
341
32
  keys.indexes.push(`${newField}/${i}/${val}/${_id}`)
342
33
  }
343
- } else {
344
- let val = String(obj[field]).replaceAll('/', this.SLASH_ASCII)
345
- if (Cipher.isConfigured() && Cipher.isEncryptedField(collection, newField))
346
- val = await Cipher.encrypt(val, true)
347
- keys.data.push(`${_id}/${newField}/${val}`)
348
- keys.indexes.push(`${newField}/${val}/${_id}`)
34
+ continue
349
35
  }
350
- }
351
-
352
- return keys
353
- }
354
-
355
- static constructData(fieldVal: Record<string, string>) {
356
- const data: Record<string, any> = {}
357
36
 
358
- for (let fullField in fieldVal) {
359
- const fields = fullField.split('/').slice(1)
360
-
361
- let curr = data
362
-
363
- while (fields.length > 1) {
364
- const field = fields.shift()!
365
-
366
- if (typeof curr[field] !== 'object' || curr[field] === null)
367
- curr[field] = isNaN(Number(fields[0])) ? {} : []
368
-
369
- curr = curr[field]
37
+ let val = String(fieldValue).replaceAll('/', this.SLASH_ASCII)
38
+ if (Cipher.isConfigured() && Cipher.isEncryptedField(collection, newField)) {
39
+ val = await Cipher.encrypt(val, true)
370
40
  }
371
-
372
- const lastKey = fields.shift()!
373
-
374
- curr[lastKey] = this.parseValue(fieldVal[fullField].replaceAll(this.SLASH_ASCII, '/'))
41
+ keys.data.push(`${_id}/${newField}/${val}`)
42
+ keys.indexes.push(`${newField}/${val}/${_id}`)
375
43
  }
376
44
 
377
- return data
45
+ return keys
378
46
  }
379
47
 
380
48
  static parseValue(value: string) {
@@ -0,0 +1,65 @@
1
+ import path from 'node:path'
2
+ import TTID from '@delma/ttid'
3
+ import type { StorageEngine } from '../types'
4
+ import type { StoredDoc } from './types'
5
+
6
+ export class S3FilesDocuments {
7
+ constructor(
8
+ private readonly storage: StorageEngine,
9
+ private readonly docsRoot: (collection: string) => string,
10
+ private readonly docPath: (collection: string, docId: _ttid) => string,
11
+ private readonly ensureCollection: (collection: string) => Promise<void>,
12
+ private readonly encodeEncrypted: <T extends Record<string, any>>(
13
+ collection: string,
14
+ value: T,
15
+ parentField?: string
16
+ ) => Promise<T>,
17
+ private readonly decodeEncrypted: <T extends Record<string, any>>(
18
+ collection: string,
19
+ value: T,
20
+ parentField?: string
21
+ ) => Promise<T>
22
+ ) {}
23
+
24
+ async readStoredDoc<T extends Record<string, any>>(
25
+ collection: string,
26
+ docId: _ttid
27
+ ): Promise<StoredDoc<T> | null> {
28
+ const target = this.docPath(collection, docId)
29
+
30
+ try {
31
+ const raw = JSON.parse(await this.storage.read(target)) as T
32
+ const decoded = await this.decodeEncrypted(collection, raw)
33
+ const { createdAt, updatedAt } = TTID.decodeTime(docId)
34
+
35
+ return {
36
+ id: docId,
37
+ createdAt,
38
+ updatedAt: updatedAt ?? createdAt,
39
+ data: decoded
40
+ }
41
+ } catch (err) {
42
+ if ((err as NodeJS.ErrnoException).code === 'ENOENT') return null
43
+ throw err
44
+ }
45
+ }
46
+
47
+ async writeStoredDoc<T extends Record<string, any>>(collection: string, docId: _ttid, data: T) {
48
+ await this.ensureCollection(collection)
49
+ const encoded = await this.encodeEncrypted(collection, data)
50
+ const target = this.docPath(collection, docId)
51
+ await this.storage.write(target, JSON.stringify(encoded))
52
+ }
53
+
54
+ async removeStoredDoc(collection: string, docId: _ttid) {
55
+ await this.storage.delete(this.docPath(collection, docId))
56
+ }
57
+
58
+ async listDocIds(collection: string) {
59
+ const files = await this.storage.list(this.docsRoot(collection))
60
+ return files
61
+ .filter((file) => file.endsWith('.json'))
62
+ .map((file) => path.basename(file, '.json'))
63
+ .filter((key) => TTID.isTTID(key)) as _ttid[]
64
+ }
65
+ }
@@ -0,0 +1,172 @@
1
+ import { mkdir, open, readFile, readdir, rm, stat, writeFile } from 'node:fs/promises'
2
+ import path from 'node:path'
3
+ import type { EventBus, LockManager, StorageEngine } from '../types'
4
+ import type { S3FilesEvent } from './types'
5
+
6
+ export class FilesystemStorage implements StorageEngine {
7
+ async read(target: string): Promise<string> {
8
+ return await readFile(target, 'utf8')
9
+ }
10
+
11
+ async write(target: string, data: string): Promise<void> {
12
+ await mkdir(path.dirname(target), { recursive: true })
13
+ await writeFile(target, data, 'utf8')
14
+ }
15
+
16
+ async delete(target: string): Promise<void> {
17
+ await rm(target, { recursive: true, force: true })
18
+ }
19
+
20
+ async list(target: string): Promise<string[]> {
21
+ const results: string[] = []
22
+
23
+ try {
24
+ const entries = await readdir(target, { withFileTypes: true })
25
+ for (const entry of entries) {
26
+ const child = path.join(target, entry.name)
27
+ if (entry.isDirectory()) {
28
+ results.push(...(await this.list(child)))
29
+ } else {
30
+ results.push(child)
31
+ }
32
+ }
33
+ } catch (err) {
34
+ if ((err as NodeJS.ErrnoException).code !== 'ENOENT') throw err
35
+ }
36
+
37
+ return results
38
+ }
39
+
40
+ async mkdir(target: string): Promise<void> {
41
+ await mkdir(target, { recursive: true })
42
+ }
43
+
44
+ async rmdir(target: string): Promise<void> {
45
+ await rm(target, { recursive: true, force: true })
46
+ }
47
+
48
+ async exists(target: string): Promise<boolean> {
49
+ try {
50
+ await stat(target)
51
+ return true
52
+ } catch (err) {
53
+ if ((err as NodeJS.ErrnoException).code === 'ENOENT') return false
54
+ throw err
55
+ }
56
+ }
57
+ }
58
+
59
+ export class FilesystemLockManager implements LockManager {
60
+ constructor(
61
+ private readonly root: string,
62
+ private readonly storage: StorageEngine
63
+ ) {}
64
+
65
+ private lockDir(collection: string, docId: _ttid) {
66
+ return path.join(this.root, collection, '.fylo', 'locks', `${docId}.lock`)
67
+ }
68
+
69
+ async acquire(
70
+ collection: string,
71
+ docId: _ttid,
72
+ owner: string,
73
+ ttlMs: number = 30_000
74
+ ): Promise<boolean> {
75
+ const dir = this.lockDir(collection, docId)
76
+ const metaPath = path.join(dir, 'meta.json')
77
+ await mkdir(path.dirname(dir), { recursive: true })
78
+
79
+ try {
80
+ await mkdir(dir, { recursive: false })
81
+ await this.storage.write(metaPath, JSON.stringify({ owner, ts: Date.now() }))
82
+ return true
83
+ } catch (err) {
84
+ if ((err as NodeJS.ErrnoException).code !== 'EEXIST') throw err
85
+ }
86
+
87
+ try {
88
+ const meta = JSON.parse(await this.storage.read(metaPath)) as { ts?: number }
89
+ if (meta.ts && Date.now() - meta.ts > ttlMs) {
90
+ await this.storage.rmdir(dir)
91
+ await mkdir(dir, { recursive: false })
92
+ await this.storage.write(metaPath, JSON.stringify({ owner, ts: Date.now() }))
93
+ return true
94
+ }
95
+ } catch {
96
+ await this.storage.rmdir(dir)
97
+ await mkdir(dir, { recursive: false })
98
+ await this.storage.write(metaPath, JSON.stringify({ owner, ts: Date.now() }))
99
+ return true
100
+ }
101
+
102
+ return false
103
+ }
104
+
105
+ async release(collection: string, docId: _ttid, owner: string): Promise<void> {
106
+ const dir = this.lockDir(collection, docId)
107
+ const metaPath = path.join(dir, 'meta.json')
108
+
109
+ try {
110
+ const meta = JSON.parse(await this.storage.read(metaPath)) as { owner?: string }
111
+ if (meta.owner === owner) await this.storage.rmdir(dir)
112
+ } catch (err) {
113
+ if ((err as NodeJS.ErrnoException).code !== 'ENOENT') throw err
114
+ }
115
+ }
116
+ }
117
+
118
+ export class FilesystemEventBus<T extends Record<string, any>> implements EventBus<
119
+ S3FilesEvent<T>
120
+ > {
121
+ constructor(
122
+ private readonly root: string,
123
+ private readonly storage: StorageEngine
124
+ ) {}
125
+
126
+ private journalPath(collection: string) {
127
+ return path.join(this.root, collection, '.fylo', 'events', `${collection}.ndjson`)
128
+ }
129
+
130
+ async publish(collection: string, event: S3FilesEvent<T>): Promise<void> {
131
+ const target = this.journalPath(collection)
132
+ await mkdir(path.dirname(target), { recursive: true })
133
+ const line = `${JSON.stringify(event)}\n`
134
+ const handle = await open(target, 'a')
135
+ try {
136
+ await handle.write(line)
137
+ } finally {
138
+ await handle.close()
139
+ }
140
+ }
141
+
142
+ async *listen(collection: string): AsyncGenerator<S3FilesEvent<T>, void, unknown> {
143
+ const target = this.journalPath(collection)
144
+ let position = 0
145
+
146
+ while (true) {
147
+ try {
148
+ const fileStat = await stat(target)
149
+ if (fileStat.size > position) {
150
+ const handle = await open(target, 'r')
151
+ try {
152
+ const size = fileStat.size - position
153
+ const buffer = Buffer.alloc(size)
154
+ await handle.read(buffer, 0, size, position)
155
+ position = fileStat.size
156
+
157
+ for (const line of buffer.toString('utf8').split('\n')) {
158
+ if (line.trim().length === 0) continue
159
+ yield JSON.parse(line) as S3FilesEvent<T>
160
+ }
161
+ } finally {
162
+ await handle.close()
163
+ }
164
+ }
165
+ } catch (err) {
166
+ if ((err as NodeJS.ErrnoException).code !== 'ENOENT') throw err
167
+ }
168
+
169
+ await Bun.sleep(100)
170
+ }
171
+ }
172
+ }