@delma/fylo 1.1.2 → 2.0.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (66) hide show
  1. package/README.md +141 -62
  2. package/eslint.config.js +8 -4
  3. package/package.json +9 -7
  4. package/src/CLI +16 -14
  5. package/src/adapters/cipher.ts +12 -6
  6. package/src/adapters/redis.ts +193 -123
  7. package/src/adapters/s3.ts +6 -12
  8. package/src/core/collection.ts +5 -0
  9. package/src/core/directory.ts +120 -151
  10. package/src/core/extensions.ts +4 -2
  11. package/src/core/format.ts +390 -419
  12. package/src/core/parser.ts +167 -142
  13. package/src/core/query.ts +31 -26
  14. package/src/core/walker.ts +68 -61
  15. package/src/core/write-queue.ts +7 -4
  16. package/src/engines/s3-files.ts +1068 -0
  17. package/src/engines/types.ts +21 -0
  18. package/src/index.ts +754 -378
  19. package/src/migrate-cli.ts +22 -0
  20. package/src/migrate.ts +74 -0
  21. package/src/types/bun-runtime.d.ts +73 -0
  22. package/src/types/fylo.d.ts +115 -27
  23. package/src/types/node-runtime.d.ts +61 -0
  24. package/src/types/query.d.ts +6 -2
  25. package/src/types/vendor-modules.d.ts +8 -7
  26. package/src/worker.ts +7 -1
  27. package/src/workers/write-worker.ts +25 -24
  28. package/tests/collection/truncate.test.js +35 -0
  29. package/tests/{data.ts → data.js} +8 -21
  30. package/tests/{index.ts → index.js} +4 -9
  31. package/tests/integration/aws-s3-files.canary.test.js +22 -0
  32. package/tests/integration/{create.test.ts → create.test.js} +13 -31
  33. package/tests/integration/delete.test.js +95 -0
  34. package/tests/integration/{edge-cases.test.ts → edge-cases.test.js} +50 -124
  35. package/tests/integration/{encryption.test.ts → encryption.test.js} +20 -65
  36. package/tests/integration/{export.test.ts → export.test.js} +8 -23
  37. package/tests/integration/{join-modes.test.ts → join-modes.test.js} +37 -104
  38. package/tests/integration/migration.test.js +38 -0
  39. package/tests/integration/nested.test.js +142 -0
  40. package/tests/integration/operators.test.js +122 -0
  41. package/tests/integration/{queue.test.ts → queue.test.js} +24 -40
  42. package/tests/integration/read.test.js +119 -0
  43. package/tests/integration/rollback.test.js +60 -0
  44. package/tests/integration/s3-files.test.js +192 -0
  45. package/tests/integration/update.test.js +99 -0
  46. package/tests/mocks/{cipher.ts → cipher.js} +11 -26
  47. package/tests/mocks/redis.js +123 -0
  48. package/tests/mocks/{s3.ts → s3.js} +24 -58
  49. package/tests/schemas/album.json +1 -1
  50. package/tests/schemas/comment.json +1 -1
  51. package/tests/schemas/photo.json +1 -1
  52. package/tests/schemas/post.json +1 -1
  53. package/tests/schemas/tip.json +1 -1
  54. package/tests/schemas/todo.json +1 -1
  55. package/tests/schemas/user.d.ts +12 -12
  56. package/tests/schemas/user.json +1 -1
  57. package/tsconfig.json +4 -2
  58. package/tsconfig.typecheck.json +31 -0
  59. package/tests/collection/truncate.test.ts +0 -56
  60. package/tests/integration/delete.test.ts +0 -147
  61. package/tests/integration/nested.test.ts +0 -212
  62. package/tests/integration/operators.test.ts +0 -167
  63. package/tests/integration/read.test.ts +0 -203
  64. package/tests/integration/rollback.test.ts +0 -105
  65. package/tests/integration/update.test.ts +0 -130
  66. package/tests/mocks/redis.ts +0 -169
@@ -1,16 +1,18 @@
1
- import { Walker } from "./walker"
2
- import TTID from "@delma/ttid"
3
- import { S3 } from "../adapters/s3"
4
- import { Redis } from "../adapters/redis"
5
- import { Cipher } from "../adapters/cipher"
1
+ import { Walker } from './walker'
2
+ import TTID from '@delma/ttid'
3
+ import { S3 } from '../adapters/s3'
4
+ import { Redis } from '../adapters/redis'
5
+ import { Cipher } from '../adapters/cipher'
6
6
 
7
7
  export class Dir {
8
-
9
8
  private static readonly KEY_LIMIT = 1024
10
9
 
11
- private static readonly SLASH_ASCII = "%2F"
10
+ private static readonly SLASH_ASCII = '%2F'
12
11
 
13
- private readonly transactions: Array<{ action: (...args: string[]) => Promise<void>, args: string[] }>;
12
+ private readonly transactions: Array<{
13
+ action: (...args: string[]) => Promise<void>
14
+ args: string[]
15
+ }>
14
16
 
15
17
  private static _redis: Redis | null = null
16
18
 
@@ -28,7 +30,6 @@ export class Dir {
28
30
  }
29
31
 
30
32
  static async reconstructData(collection: string, items: string[]) {
31
-
32
33
  items = await this.readValues(collection, items)
33
34
 
34
35
  let fieldVal: Record<string, string> = {}
@@ -41,7 +42,7 @@ export class Dir {
41
42
  // Decrypt value if field is encrypted — fieldPath starts with TTID segment
42
43
  // so strip it to get the actual field name for the check
43
44
  const fieldOnly = segs.slice(1).join('/')
44
- if(Cipher.isConfigured() && Cipher.isEncryptedField(collection, fieldOnly)) {
45
+ if (Cipher.isConfigured() && Cipher.isEncryptedField(collection, fieldOnly)) {
45
46
  fieldVal[fieldPath] = await Cipher.decrypt(val)
46
47
  } else {
47
48
  fieldVal[fieldPath] = val
@@ -52,15 +53,12 @@ export class Dir {
52
53
  }
53
54
 
54
55
  private static async readValues(collection: string, items: string[]) {
55
-
56
- for(let i = 0; i < items.length; i++) {
57
-
56
+ for (let i = 0; i < items.length; i++) {
58
57
  const segments = items[i].split('/')
59
58
 
60
59
  const filename = segments.pop()!
61
60
 
62
- if(TTID.isUUID(filename)) {
63
-
61
+ if (TTID.isUUID(filename)) {
64
62
  const file = S3.file(collection, items[i])
65
63
  const val = await file.text()
66
64
 
@@ -71,193 +69,176 @@ export class Dir {
71
69
  return items
72
70
  }
73
71
 
74
- private static async filterByTimestamp(_id: _ttid, indexes: string[], { updated, created }: { updated?: _timestamp, created?: _timestamp }) {
75
-
72
+ private static async filterByTimestamp(
73
+ _id: _ttid,
74
+ indexes: string[],
75
+ { updated, created }: { updated?: _timestamp; created?: _timestamp }
76
+ ) {
76
77
  const { createdAt, updatedAt } = TTID.decodeTime(_id)
77
78
 
78
- if(updated && updatedAt) {
79
-
80
- if((updated.$gt || updated.$gte) && (updated.$lt || updated.$lte)) {
81
-
82
- if(updated.$gt && updated.$lt) {
83
-
84
- if(updated.$gt! > updated.$lt!) throw new Error("Invalid updated query")
79
+ if (updated && updatedAt) {
80
+ if ((updated.$gt || updated.$gte) && (updated.$lt || updated.$lte)) {
81
+ if (updated.$gt && updated.$lt) {
82
+ if (updated.$gt! > updated.$lt!) throw new Error('Invalid updated query')
85
83
 
86
84
  indexes = updatedAt > updated.$gt! && updatedAt < updated.$lt! ? indexes : []
87
-
88
- } else if(updated.$gt && updated.$lte) {
89
-
90
- if(updated.$gt! > updated.$lte!) throw new Error("Invalid updated query")
85
+ } else if (updated.$gt && updated.$lte) {
86
+ if (updated.$gt! > updated.$lte!) throw new Error('Invalid updated query')
91
87
 
92
88
  indexes = updatedAt > updated.$gt! && updatedAt <= updated.$lte! ? indexes : []
93
-
94
- } else if(updated.$gte && updated.$lt) {
95
-
96
- if(updated.$gte! > updated.$lt!) throw new Error("Invalid updated query")
89
+ } else if (updated.$gte && updated.$lt) {
90
+ if (updated.$gte! > updated.$lt!) throw new Error('Invalid updated query')
97
91
 
98
92
  indexes = updatedAt >= updated.$gte! && updatedAt < updated.$lt! ? indexes : []
93
+ } else if (updated.$gte && updated.$lte) {
94
+ if (updated.$gte! > updated.$lte!) throw new Error('Invalid updated query')
99
95
 
100
- } else if(updated.$gte && updated.$lte) {
101
-
102
- if(updated.$gte! > updated.$lte!) throw new Error("Invalid updated query")
103
-
104
- indexes = updatedAt >= updated.$gte! && updatedAt <= updated.$lte! ? indexes : []
96
+ indexes =
97
+ updatedAt >= updated.$gte! && updatedAt <= updated.$lte! ? indexes : []
105
98
  }
106
-
107
- } else if((updated.$gt || updated.$gte) && !updated.$lt && !updated.$lte) {
108
-
109
- indexes = updated.$gt ? updatedAt > updated.$gt! ? indexes : [] : updatedAt >= updated.$gte! ? indexes : []
110
-
111
- } else if(!updated.$gt && !updated.$gte && (updated.$lt || updated.$lte)) {
112
-
113
- indexes = updated.$lt ? updatedAt < updated.$lt! ? indexes : [] : updatedAt <= updated.$lte! ? indexes : []
99
+ } else if ((updated.$gt || updated.$gte) && !updated.$lt && !updated.$lte) {
100
+ indexes = updated.$gt
101
+ ? updatedAt > updated.$gt!
102
+ ? indexes
103
+ : []
104
+ : updatedAt >= updated.$gte!
105
+ ? indexes
106
+ : []
107
+ } else if (!updated.$gt && !updated.$gte && (updated.$lt || updated.$lte)) {
108
+ indexes = updated.$lt
109
+ ? updatedAt < updated.$lt!
110
+ ? indexes
111
+ : []
112
+ : updatedAt <= updated.$lte!
113
+ ? indexes
114
+ : []
114
115
  }
115
116
  }
116
117
 
117
- if(created) {
118
-
119
- if((created.$gt || created.$gte) && (created.$lt || created.$lte)) {
120
-
121
- if(created.$gt && created.$lt) {
122
-
123
- if(created.$gt! > created.$lt!) throw new Error("Invalid created query")
118
+ if (created) {
119
+ if ((created.$gt || created.$gte) && (created.$lt || created.$lte)) {
120
+ if (created.$gt && created.$lt) {
121
+ if (created.$gt! > created.$lt!) throw new Error('Invalid created query')
124
122
 
125
123
  indexes = createdAt > created.$gt! && createdAt < created.$lt! ? indexes : []
126
-
127
- } else if(created.$gt && created.$lte) {
128
-
129
- if(created.$gt! > created.$lte!) throw new Error("Invalid updated query")
124
+ } else if (created.$gt && created.$lte) {
125
+ if (created.$gt! > created.$lte!) throw new Error('Invalid updated query')
130
126
 
131
127
  indexes = createdAt > created.$gt! && createdAt <= created.$lte! ? indexes : []
132
-
133
- } else if(created.$gte && created.$lt) {
134
-
135
- if(created.$gte! > created.$lt!) throw new Error("Invalid updated query")
128
+ } else if (created.$gte && created.$lt) {
129
+ if (created.$gte! > created.$lt!) throw new Error('Invalid updated query')
136
130
 
137
131
  indexes = createdAt >= created.$gte! && createdAt < created.$lt! ? indexes : []
132
+ } else if (created.$gte && created.$lte) {
133
+ if (created.$gte! > created.$lte!) throw new Error('Invalid updated query')
138
134
 
139
- } else if(created.$gte && created.$lte) {
140
-
141
- if(created.$gte! > created.$lte!) throw new Error("Invalid updated query")
142
-
143
- indexes = createdAt >= created.$gte! && createdAt <= created.$lte! ? indexes : []
135
+ indexes =
136
+ createdAt >= created.$gte! && createdAt <= created.$lte! ? indexes : []
144
137
  }
145
-
146
- } else if((created.$gt || created.$gte) && !created.$lt && !created.$lte) {
147
-
148
- if(created.$gt) indexes = createdAt > created.$gt! ? indexes : []
149
- else if(created.$gte) indexes = createdAt >= created.$gte! ? indexes : []
150
-
151
- } else if(!created.$gt && !created.$gte && (created.$lt || created.$lte)) {
152
-
153
- if(created.$lt) indexes = createdAt < created.$lt! ? indexes : []
154
- else if(created.$lte) indexes = createdAt <= created.$lte! ? indexes : []
138
+ } else if ((created.$gt || created.$gte) && !created.$lt && !created.$lte) {
139
+ if (created.$gt) indexes = createdAt > created.$gt! ? indexes : []
140
+ else if (created.$gte) indexes = createdAt >= created.$gte! ? indexes : []
141
+ } else if (!created.$gt && !created.$gte && (created.$lt || created.$lte)) {
142
+ if (created.$lt) indexes = createdAt < created.$lt! ? indexes : []
143
+ else if (created.$lte) indexes = createdAt <= created.$lte! ? indexes : []
155
144
  }
156
145
  }
157
146
 
158
147
  return indexes.length > 0
159
148
  }
160
149
 
161
- static async *searchDocs<T extends Record<string, any>>(collection: string, pattern: string | string[], { updated, created }: { updated?: _timestamp, created?: _timestamp }, { listen = false, skip = false }: { listen: boolean, skip: boolean }, deleted: boolean = false): AsyncGenerator<Record<_ttid, T> | _ttid | void, void, { count: number, limit?: number }> {
162
-
150
+ static async *searchDocs<T extends Record<string, any>>(
151
+ collection: string,
152
+ pattern: string | string[],
153
+ { updated, created }: { updated?: _timestamp; created?: _timestamp },
154
+ { listen = false, skip = false }: { listen: boolean; skip: boolean },
155
+ deleted: boolean = false
156
+ ): AsyncGenerator<Record<_ttid, T> | _ttid | void, void, { count: number; limit?: number }> {
163
157
  const data = yield
164
158
  let count = data.count
165
159
  let limit = data.limit
166
160
 
167
161
  const constructData = async (collection: string, _id: _ttid, items: string[]) => {
168
-
169
- if(created || updated) {
170
-
171
- if(await this.filterByTimestamp(_id, items, { created, updated })) {
172
-
162
+ if (created || updated) {
163
+ if (await this.filterByTimestamp(_id, items, { created, updated })) {
173
164
  const data = await this.reconstructData(collection, items)
174
165
 
175
166
  return { [_id]: data } as Record<_ttid, T>
176
-
177
167
  } else return {}
178
-
179
168
  } else {
180
-
181
169
  const data = await this.reconstructData(collection, items)
182
170
 
183
171
  return { [_id]: data } as Record<_ttid, T>
184
172
  }
185
173
  }
186
174
 
187
- const processQuery = async function*(p: string): AsyncGenerator<Record<_ttid, T> | _ttid | void, void, { count: number, limit?: number }> {
188
-
175
+ const processQuery = async function* (
176
+ p: string
177
+ ): AsyncGenerator<
178
+ Record<_ttid, T> | _ttid | void,
179
+ void,
180
+ { count: number; limit?: number }
181
+ > {
189
182
  let finished = false
190
183
 
191
- if(listen && !deleted) {
192
-
184
+ if (listen && !deleted) {
193
185
  const iter = Walker.search(collection, p, { listen, skip })
194
186
 
195
187
  do {
196
-
197
188
  const { value, done } = await iter.next({ count, limit })
198
189
 
199
- if(done) finished = true
190
+ if (done) finished = true
200
191
 
201
- if(value) {
192
+ if (value) {
202
193
  const data = yield await constructData(collection, value._id, value.data)
203
194
  count = data.count
204
195
  limit = data.limit
205
196
  }
206
-
207
- } while(!finished)
208
-
209
- } else if(listen && deleted) {
210
-
211
- const iter = Walker.search(collection, p, { listen, skip }, "delete")
197
+ } while (!finished)
198
+ } else if (listen && deleted) {
199
+ const iter = Walker.search(collection, p, { listen, skip }, 'delete')
212
200
 
213
201
  do {
214
-
215
202
  const { value, done } = await iter.next({ count, limit })
216
203
 
217
- if(done) finished = true
204
+ if (done) finished = true
218
205
 
219
- if(value) {
206
+ if (value) {
220
207
  const data = yield value._id
221
208
  count = data.count
222
209
  limit = data.limit
223
210
  }
224
-
225
- } while(!finished)
226
-
211
+ } while (!finished)
227
212
  } else {
228
-
229
213
  const iter = Walker.search(collection, p, { listen, skip })
230
214
 
231
215
  do {
232
-
233
216
  const { value, done } = await iter.next({ count, limit })
234
217
 
235
- if(done) finished = true
218
+ if (done) finished = true
236
219
 
237
- if(value) {
220
+ if (value) {
238
221
  const data = yield await constructData(collection, value._id, value.data)
239
222
  count = data.count
240
223
  limit = data.limit
241
224
  }
242
-
243
- } while(!finished)
225
+ } while (!finished)
244
226
  }
245
227
  }
246
228
 
247
- if(Array.isArray(pattern)) {
248
-
249
- for(const p of pattern) yield* processQuery(p)
250
-
229
+ if (Array.isArray(pattern)) {
230
+ for (const p of pattern) yield* processQuery(p)
251
231
  } else yield* processQuery(pattern)
252
232
  }
253
233
 
254
- async putKeys(collection: string, { dataKey, indexKey }: { dataKey: string, indexKey: string }) {
255
-
234
+ async putKeys(
235
+ collection: string,
236
+ { dataKey, indexKey }: { dataKey: string; indexKey: string }
237
+ ) {
256
238
  let dataBody: string | undefined
257
239
  let indexBody: string | undefined
258
240
 
259
- if(dataKey.length > Dir.KEY_LIMIT) {
260
-
241
+ if (dataKey.length > Dir.KEY_LIMIT) {
261
242
  const dataSegs = dataKey.split('/')
262
243
 
263
244
  dataBody = dataSegs.pop()!
@@ -265,8 +246,7 @@ export class Dir {
265
246
  indexKey = `${dataSegs.join('/')}/${Bun.randomUUIDv7()}`
266
247
  }
267
248
 
268
- if(indexKey.length > Dir.KEY_LIMIT) {
269
-
249
+ if (indexKey.length > Dir.KEY_LIMIT) {
270
250
  const indexSegs = indexKey.split('/')
271
251
 
272
252
  const _id = indexSegs.pop()! as _ttid
@@ -295,23 +275,18 @@ export class Dir {
295
275
  }
296
276
 
297
277
  async executeRollback() {
298
-
299
278
  do {
300
-
301
279
  const transaction = this.transactions.pop()
302
280
 
303
- if(transaction) {
304
-
281
+ if (transaction) {
305
282
  const { action, args } = transaction
306
283
 
307
284
  await action(...args)
308
285
  }
309
-
310
- } while(this.transactions.length > 0)
286
+ } while (this.transactions.length > 0)
311
287
  }
312
288
 
313
289
  async deleteKeys(collection: string, dataKey: string) {
314
-
315
290
  const segments = dataKey.split('/')
316
291
 
317
292
  const _id = segments.shift()!
@@ -324,13 +299,10 @@ export class Dir {
324
299
  let dataBody: string | undefined
325
300
  let indexBody: string | undefined
326
301
 
327
- if(dataFile.size > 0) dataBody = await dataFile.text()
328
- if(indexFile.size > 0) indexBody = await indexFile.text()
302
+ if (dataFile.size > 0) dataBody = await dataFile.text()
303
+ if (indexFile.size > 0) indexBody = await indexFile.text()
329
304
 
330
- await Promise.all([
331
- S3.delete(collection, indexKey),
332
- S3.delete(collection, dataKey)
333
- ])
305
+ await Promise.all([S3.delete(collection, indexKey), S3.delete(collection, dataKey)])
334
306
 
335
307
  this.transactions.push({
336
308
  action: S3.put,
@@ -346,31 +318,32 @@ export class Dir {
346
318
  }
347
319
 
348
320
  static async extractKeys<T>(collection: string, _id: _ttid, data: T, parentField?: string) {
321
+ const keys: { data: string[]; indexes: string[] } = { data: [], indexes: [] }
349
322
 
350
- const keys: { data: string[], indexes: string[] } = { data: [], indexes: [] }
351
-
352
- const obj = {...data}
353
-
354
- for(const field in obj) {
323
+ const obj = { ...data }
355
324
 
325
+ for (const field in obj) {
356
326
  const newField = parentField ? `${parentField}/${field}` : field
357
327
 
358
- if(typeof obj[field] === 'object' && !Array.isArray(obj[field])) {
328
+ if (typeof obj[field] === 'object' && !Array.isArray(obj[field])) {
359
329
  const items = await this.extractKeys(collection, _id, obj[field], newField)
360
330
  keys.data.push(...items.data)
361
331
  keys.indexes.push(...items.indexes)
362
- } else if(typeof obj[field] === 'object' && Array.isArray(obj[field])) {
332
+ } else if (typeof obj[field] === 'object' && Array.isArray(obj[field])) {
363
333
  const items: (string | number | boolean)[] = obj[field]
364
- if(items.some((item) => typeof item === 'object')) throw new Error(`Cannot have an array of objects`)
365
- for(let i = 0; i < items.length; i++) {
334
+ if (items.some((item) => typeof item === 'object'))
335
+ throw new Error(`Cannot have an array of objects`)
336
+ for (let i = 0; i < items.length; i++) {
366
337
  let val = String(items[i]).split('/').join(this.SLASH_ASCII)
367
- if(Cipher.isConfigured() && Cipher.isEncryptedField(collection, newField)) val = await Cipher.encrypt(val, true)
338
+ if (Cipher.isConfigured() && Cipher.isEncryptedField(collection, newField))
339
+ val = await Cipher.encrypt(val, true)
368
340
  keys.data.push(`${_id}/${newField}/${i}/${val}`)
369
341
  keys.indexes.push(`${newField}/${i}/${val}/${_id}`)
370
342
  }
371
343
  } else {
372
344
  let val = String(obj[field]).replaceAll('/', this.SLASH_ASCII)
373
- if(Cipher.isConfigured() && Cipher.isEncryptedField(collection, newField)) val = await Cipher.encrypt(val, true)
345
+ if (Cipher.isConfigured() && Cipher.isEncryptedField(collection, newField))
346
+ val = await Cipher.encrypt(val, true)
374
347
  keys.data.push(`${_id}/${newField}/${val}`)
375
348
  keys.indexes.push(`${newField}/${val}/${_id}`)
376
349
  }
@@ -380,20 +353,17 @@ export class Dir {
380
353
  }
381
354
 
382
355
  static constructData(fieldVal: Record<string, string>) {
383
-
384
356
  const data: Record<string, any> = {}
385
357
 
386
- for(let fullField in fieldVal) {
387
-
358
+ for (let fullField in fieldVal) {
388
359
  const fields = fullField.split('/').slice(1)
389
360
 
390
361
  let curr = data
391
362
 
392
- while(fields.length > 1) {
393
-
363
+ while (fields.length > 1) {
394
364
  const field = fields.shift()!
395
365
 
396
- if(typeof curr[field] !== 'object' || curr[field] === null)
366
+ if (typeof curr[field] !== 'object' || curr[field] === null)
397
367
  curr[field] = isNaN(Number(fields[0])) ? {} : []
398
368
 
399
369
  curr = curr[field]
@@ -408,7 +378,6 @@ export class Dir {
408
378
  }
409
379
 
410
380
  static parseValue(value: string) {
411
-
412
381
  try {
413
382
  return JSON.parse(value)
414
383
  } catch {
@@ -1,10 +1,12 @@
1
1
  /* eslint-disable @typescript-eslint/no-explicit-any */
2
2
 
3
- Object.appendGroup = function (target: Record<string, any>, source: Record<string, any>): Record<string, any> {
3
+ Object.appendGroup = function (
4
+ target: Record<string, any>,
5
+ source: Record<string, any>
6
+ ): Record<string, any> {
4
7
  const result = { ...target }
5
8
 
6
9
  for (const [sourceId, sourceGroup] of Object.entries(source)) {
7
-
8
10
  if (!result[sourceId]) {
9
11
  result[sourceId] = sourceGroup
10
12
  continue