@delma/fylo 2.0.0 → 2.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +185 -267
- package/package.json +2 -5
- package/src/core/directory.ts +22 -354
- package/src/engines/s3-files/documents.ts +65 -0
- package/src/engines/s3-files/filesystem.ts +172 -0
- package/src/engines/s3-files/query.ts +291 -0
- package/src/engines/s3-files/types.ts +42 -0
- package/src/engines/s3-files.ts +391 -510
- package/src/engines/types.ts +1 -1
- package/src/index.ts +142 -1237
- package/src/sync.ts +58 -0
- package/src/types/fylo.d.ts +66 -161
- package/src/types/node-runtime.d.ts +1 -0
- package/tests/collection/truncate.test.js +11 -10
- package/tests/helpers/root.js +7 -0
- package/tests/integration/create.test.js +9 -9
- package/tests/integration/delete.test.js +16 -14
- package/tests/integration/edge-cases.test.js +29 -25
- package/tests/integration/encryption.test.js +47 -30
- package/tests/integration/export.test.js +11 -11
- package/tests/integration/join-modes.test.js +16 -16
- package/tests/integration/nested.test.js +26 -24
- package/tests/integration/operators.test.js +43 -29
- package/tests/integration/read.test.js +25 -21
- package/tests/integration/rollback.test.js +21 -51
- package/tests/integration/s3-files.performance.test.js +75 -0
- package/tests/integration/s3-files.test.js +115 -18
- package/tests/integration/sync.test.js +154 -0
- package/tests/integration/update.test.js +24 -18
- package/src/adapters/redis.ts +0 -487
- package/src/adapters/s3.ts +0 -61
- package/src/core/walker.ts +0 -174
- package/src/core/write-queue.ts +0 -59
- package/src/migrate-cli.ts +0 -22
- package/src/migrate.ts +0 -74
- package/src/types/write-queue.ts +0 -42
- package/src/worker.ts +0 -18
- package/src/workers/write-worker.ts +0 -120
- package/tests/index.js +0 -14
- package/tests/integration/migration.test.js +0 -38
- package/tests/integration/queue.test.js +0 -83
- package/tests/mocks/redis.js +0 -123
- package/tests/mocks/s3.js +0 -80
|
@@ -0,0 +1,291 @@
|
|
|
1
|
+
import TTID from '@delma/ttid'
|
|
2
|
+
import { Cipher } from '../../adapters/cipher'
|
|
3
|
+
import type { CollectionIndexCache, FyloRecord, S3FilesQueryResult } from './types'
|
|
4
|
+
|
|
5
|
+
type QueryContext = {
|
|
6
|
+
loadIndexCache: (collection: string) => Promise<CollectionIndexCache>
|
|
7
|
+
normalizeIndexValue: (rawValue: string) => {
|
|
8
|
+
rawValue: string
|
|
9
|
+
valueHash: string
|
|
10
|
+
valueType: string
|
|
11
|
+
numericValue: number | null
|
|
12
|
+
}
|
|
13
|
+
}
|
|
14
|
+
|
|
15
|
+
export class S3FilesQueryEngine {
|
|
16
|
+
constructor(private readonly context: QueryContext) {}
|
|
17
|
+
|
|
18
|
+
getValueByPath(target: Record<string, any>, fieldPath: string) {
|
|
19
|
+
return fieldPath
|
|
20
|
+
.replaceAll('/', '.')
|
|
21
|
+
.split('.')
|
|
22
|
+
.reduce<any>(
|
|
23
|
+
(acc, key) => (acc === undefined || acc === null ? undefined : acc[key]),
|
|
24
|
+
target
|
|
25
|
+
)
|
|
26
|
+
}
|
|
27
|
+
|
|
28
|
+
normalizeFieldPath(fieldPath: string) {
|
|
29
|
+
return fieldPath.replaceAll('.', '/')
|
|
30
|
+
}
|
|
31
|
+
|
|
32
|
+
matchesTimestamp(docId: _ttid, query?: _storeQuery<Record<string, any>>) {
|
|
33
|
+
if (!query?.$created && !query?.$updated) return true
|
|
34
|
+
const { createdAt, updatedAt } = TTID.decodeTime(docId)
|
|
35
|
+
const timestamps = { createdAt, updatedAt: updatedAt ?? createdAt }
|
|
36
|
+
|
|
37
|
+
const match = (value: number, range?: _timestamp) => {
|
|
38
|
+
if (!range) return true
|
|
39
|
+
if (range.$gt !== undefined && !(value > range.$gt)) return false
|
|
40
|
+
if (range.$gte !== undefined && !(value >= range.$gte)) return false
|
|
41
|
+
if (range.$lt !== undefined && !(value < range.$lt)) return false
|
|
42
|
+
if (range.$lte !== undefined && !(value <= range.$lte)) return false
|
|
43
|
+
return true
|
|
44
|
+
}
|
|
45
|
+
|
|
46
|
+
return (
|
|
47
|
+
match(timestamps.createdAt, query.$created) &&
|
|
48
|
+
match(timestamps.updatedAt, query.$updated)
|
|
49
|
+
)
|
|
50
|
+
}
|
|
51
|
+
|
|
52
|
+
likeToRegex(pattern: string) {
|
|
53
|
+
const escaped = pattern.replace(/[.*+?^${}()|[\]\\]/g, '\\$&').replaceAll('%', '.*')
|
|
54
|
+
return new RegExp(`^${escaped}$`)
|
|
55
|
+
}
|
|
56
|
+
|
|
57
|
+
matchesOperand(value: unknown, operand: _operand) {
|
|
58
|
+
if (operand.$eq !== undefined && value != operand.$eq) return false
|
|
59
|
+
if (operand.$ne !== undefined && value == operand.$ne) return false
|
|
60
|
+
if (operand.$gt !== undefined && !(Number(value) > operand.$gt)) return false
|
|
61
|
+
if (operand.$gte !== undefined && !(Number(value) >= operand.$gte)) return false
|
|
62
|
+
if (operand.$lt !== undefined && !(Number(value) < operand.$lt)) return false
|
|
63
|
+
if (operand.$lte !== undefined && !(Number(value) <= operand.$lte)) return false
|
|
64
|
+
if (
|
|
65
|
+
operand.$like !== undefined &&
|
|
66
|
+
(typeof value !== 'string' || !this.likeToRegex(operand.$like).test(value))
|
|
67
|
+
)
|
|
68
|
+
return false
|
|
69
|
+
if (operand.$contains !== undefined) {
|
|
70
|
+
if (!Array.isArray(value) || !value.some((item) => item == operand.$contains))
|
|
71
|
+
return false
|
|
72
|
+
}
|
|
73
|
+
return true
|
|
74
|
+
}
|
|
75
|
+
|
|
76
|
+
async normalizeQueryValue(collection: string, fieldPath: string, value: unknown) {
|
|
77
|
+
let rawValue = String(value).replaceAll('/', '%2F')
|
|
78
|
+
if (Cipher.isConfigured() && Cipher.isEncryptedField(collection, fieldPath))
|
|
79
|
+
rawValue = await Cipher.encrypt(rawValue, true)
|
|
80
|
+
return this.context.normalizeIndexValue(rawValue)
|
|
81
|
+
}
|
|
82
|
+
|
|
83
|
+
intersectDocIds(current: Set<_ttid> | null, next: Iterable<_ttid>) {
|
|
84
|
+
const nextSet = next instanceof Set ? next : new Set(next)
|
|
85
|
+
if (current === null) return new Set(nextSet)
|
|
86
|
+
|
|
87
|
+
const intersection = new Set<_ttid>()
|
|
88
|
+
for (const docId of current) {
|
|
89
|
+
if (nextSet.has(docId)) intersection.add(docId)
|
|
90
|
+
}
|
|
91
|
+
return intersection
|
|
92
|
+
}
|
|
93
|
+
|
|
94
|
+
async candidateDocIdsForOperand(
|
|
95
|
+
collection: string,
|
|
96
|
+
fieldPath: string,
|
|
97
|
+
operand: _operand
|
|
98
|
+
): Promise<Set<_ttid> | null> {
|
|
99
|
+
if (Cipher.isConfigured() && Cipher.isEncryptedField(collection, fieldPath)) {
|
|
100
|
+
const unsupported =
|
|
101
|
+
operand.$ne !== undefined ||
|
|
102
|
+
operand.$gt !== undefined ||
|
|
103
|
+
operand.$gte !== undefined ||
|
|
104
|
+
operand.$lt !== undefined ||
|
|
105
|
+
operand.$lte !== undefined ||
|
|
106
|
+
operand.$like !== undefined ||
|
|
107
|
+
operand.$contains !== undefined
|
|
108
|
+
|
|
109
|
+
if (unsupported) {
|
|
110
|
+
throw new Error(`Operator is not supported on encrypted field: ${fieldPath}`)
|
|
111
|
+
}
|
|
112
|
+
}
|
|
113
|
+
|
|
114
|
+
const cache = await this.context.loadIndexCache(collection)
|
|
115
|
+
let candidateIds: Set<_ttid> | null = null
|
|
116
|
+
|
|
117
|
+
if (operand.$eq !== undefined) {
|
|
118
|
+
const normalized = await this.normalizeQueryValue(collection, fieldPath, operand.$eq)
|
|
119
|
+
candidateIds = this.intersectDocIds(
|
|
120
|
+
candidateIds,
|
|
121
|
+
cache.fieldHash.get(fieldPath)?.get(normalized.valueHash) ?? new Set<_ttid>()
|
|
122
|
+
)
|
|
123
|
+
}
|
|
124
|
+
|
|
125
|
+
if (
|
|
126
|
+
operand.$gt !== undefined ||
|
|
127
|
+
operand.$gte !== undefined ||
|
|
128
|
+
operand.$lt !== undefined ||
|
|
129
|
+
operand.$lte !== undefined
|
|
130
|
+
) {
|
|
131
|
+
const numericMatches = new Set<_ttid>()
|
|
132
|
+
for (const entry of cache.fieldNumeric.get(fieldPath) ?? []) {
|
|
133
|
+
if (operand.$gt !== undefined && !(entry.numericValue > operand.$gt)) continue
|
|
134
|
+
if (operand.$gte !== undefined && !(entry.numericValue >= operand.$gte)) continue
|
|
135
|
+
if (operand.$lt !== undefined && !(entry.numericValue < operand.$lt)) continue
|
|
136
|
+
if (operand.$lte !== undefined && !(entry.numericValue <= operand.$lte)) continue
|
|
137
|
+
numericMatches.add(entry.docId)
|
|
138
|
+
}
|
|
139
|
+
|
|
140
|
+
candidateIds = this.intersectDocIds(candidateIds, numericMatches)
|
|
141
|
+
}
|
|
142
|
+
|
|
143
|
+
if (operand.$like !== undefined) {
|
|
144
|
+
const regex = this.likeToRegex(operand.$like.replaceAll('/', '%2F'))
|
|
145
|
+
const stringMatches = new Set<_ttid>()
|
|
146
|
+
for (const entry of cache.fieldString.get(fieldPath) ?? []) {
|
|
147
|
+
if (regex.test(entry.rawValue)) stringMatches.add(entry.docId)
|
|
148
|
+
}
|
|
149
|
+
|
|
150
|
+
candidateIds = this.intersectDocIds(candidateIds, stringMatches)
|
|
151
|
+
}
|
|
152
|
+
|
|
153
|
+
if (operand.$contains !== undefined) {
|
|
154
|
+
const normalized = await this.normalizeQueryValue(
|
|
155
|
+
collection,
|
|
156
|
+
fieldPath,
|
|
157
|
+
operand.$contains
|
|
158
|
+
)
|
|
159
|
+
const containsMatches = new Set<_ttid>()
|
|
160
|
+
for (const [candidateFieldPath, hashes] of cache.fieldHash.entries()) {
|
|
161
|
+
if (
|
|
162
|
+
candidateFieldPath !== fieldPath &&
|
|
163
|
+
!candidateFieldPath.startsWith(`${fieldPath}/`)
|
|
164
|
+
)
|
|
165
|
+
continue
|
|
166
|
+
for (const docId of hashes.get(normalized.valueHash) ?? [])
|
|
167
|
+
containsMatches.add(docId)
|
|
168
|
+
}
|
|
169
|
+
|
|
170
|
+
candidateIds = this.intersectDocIds(candidateIds, containsMatches)
|
|
171
|
+
}
|
|
172
|
+
|
|
173
|
+
return candidateIds
|
|
174
|
+
}
|
|
175
|
+
|
|
176
|
+
async candidateDocIdsForOperation<T extends Record<string, any>>(
|
|
177
|
+
collection: string,
|
|
178
|
+
operation: _op<T>
|
|
179
|
+
): Promise<Set<_ttid> | null> {
|
|
180
|
+
let candidateIds: Set<_ttid> | null = null
|
|
181
|
+
|
|
182
|
+
for (const [field, operand] of Object.entries(operation) as Array<[keyof T, _operand]>) {
|
|
183
|
+
if (!operand) continue
|
|
184
|
+
|
|
185
|
+
const fieldPath = this.normalizeFieldPath(String(field))
|
|
186
|
+
const fieldCandidates = await this.candidateDocIdsForOperand(
|
|
187
|
+
collection,
|
|
188
|
+
fieldPath,
|
|
189
|
+
operand
|
|
190
|
+
)
|
|
191
|
+
|
|
192
|
+
if (fieldCandidates === null) continue
|
|
193
|
+
candidateIds = this.intersectDocIds(candidateIds, fieldCandidates)
|
|
194
|
+
}
|
|
195
|
+
|
|
196
|
+
return candidateIds
|
|
197
|
+
}
|
|
198
|
+
|
|
199
|
+
async candidateDocIdsForQuery<T extends Record<string, any>>(
|
|
200
|
+
collection: string,
|
|
201
|
+
query?: _storeQuery<T>
|
|
202
|
+
): Promise<Set<_ttid> | null> {
|
|
203
|
+
if (!query?.$ops || query.$ops.length === 0) return null
|
|
204
|
+
|
|
205
|
+
const union = new Set<_ttid>()
|
|
206
|
+
let usedIndex = false
|
|
207
|
+
|
|
208
|
+
for (const operation of query.$ops) {
|
|
209
|
+
const candidateIds = await this.candidateDocIdsForOperation(collection, operation)
|
|
210
|
+
if (candidateIds === null) return null
|
|
211
|
+
usedIndex = true
|
|
212
|
+
for (const docId of candidateIds) union.add(docId)
|
|
213
|
+
}
|
|
214
|
+
|
|
215
|
+
return usedIndex ? union : null
|
|
216
|
+
}
|
|
217
|
+
|
|
218
|
+
matchesQuery<T extends Record<string, any>>(docId: _ttid, doc: T, query?: _storeQuery<T>) {
|
|
219
|
+
if (!this.matchesTimestamp(docId, query as _storeQuery<Record<string, any>> | undefined))
|
|
220
|
+
return false
|
|
221
|
+
if (!query?.$ops || query.$ops.length === 0) return true
|
|
222
|
+
|
|
223
|
+
return query.$ops.some((operation) => {
|
|
224
|
+
for (const field in operation) {
|
|
225
|
+
const value = this.getValueByPath(doc, field)
|
|
226
|
+
if (!this.matchesOperand(value, operation[field as keyof T]!)) return false
|
|
227
|
+
}
|
|
228
|
+
return true
|
|
229
|
+
})
|
|
230
|
+
}
|
|
231
|
+
|
|
232
|
+
selectValues<T extends Record<string, any>>(selection: Array<keyof T>, data: T) {
|
|
233
|
+
const copy = { ...data }
|
|
234
|
+
for (const field in copy) {
|
|
235
|
+
if (!selection.includes(field as keyof T)) delete copy[field]
|
|
236
|
+
}
|
|
237
|
+
return copy
|
|
238
|
+
}
|
|
239
|
+
|
|
240
|
+
renameFields<T extends Record<string, any>>(rename: Record<keyof Partial<T>, string>, data: T) {
|
|
241
|
+
const copy = { ...data }
|
|
242
|
+
for (const field in copy) {
|
|
243
|
+
if (rename[field]) {
|
|
244
|
+
copy[rename[field]] = copy[field]
|
|
245
|
+
delete copy[field]
|
|
246
|
+
}
|
|
247
|
+
}
|
|
248
|
+
return copy
|
|
249
|
+
}
|
|
250
|
+
|
|
251
|
+
processDoc<T extends Record<string, any>>(
|
|
252
|
+
doc: FyloRecord<T>,
|
|
253
|
+
query?: _storeQuery<T>
|
|
254
|
+
): S3FilesQueryResult<T> | undefined {
|
|
255
|
+
if (Object.keys(doc).length === 0) return
|
|
256
|
+
|
|
257
|
+
const next = { ...doc }
|
|
258
|
+
|
|
259
|
+
for (let [_id, data] of Object.entries(next)) {
|
|
260
|
+
if (query?.$select?.length)
|
|
261
|
+
data = this.selectValues(query.$select as Array<keyof T>, data)
|
|
262
|
+
if (query?.$rename) data = this.renameFields(query.$rename, data)
|
|
263
|
+
next[_id as _ttid] = data as T
|
|
264
|
+
}
|
|
265
|
+
|
|
266
|
+
if (query?.$groupby) {
|
|
267
|
+
const docGroup: Record<string, Record<string, Partial<T>>> = {}
|
|
268
|
+
for (const [id, data] of Object.entries(next)) {
|
|
269
|
+
const groupValue = data[query.$groupby] as string
|
|
270
|
+
if (groupValue) {
|
|
271
|
+
const groupData = { ...data }
|
|
272
|
+
delete groupData[query.$groupby]
|
|
273
|
+
docGroup[groupValue] = { [id]: groupData as Partial<T> }
|
|
274
|
+
}
|
|
275
|
+
}
|
|
276
|
+
|
|
277
|
+
if (query.$onlyIds) {
|
|
278
|
+
const groupedIds: Record<string, _ttid[]> = {}
|
|
279
|
+
for (const group in docGroup)
|
|
280
|
+
groupedIds[group] = Object.keys(docGroup[group]) as _ttid[]
|
|
281
|
+
return groupedIds
|
|
282
|
+
}
|
|
283
|
+
|
|
284
|
+
return docGroup
|
|
285
|
+
}
|
|
286
|
+
|
|
287
|
+
if (query?.$onlyIds) return Object.keys(next).shift() as _ttid
|
|
288
|
+
|
|
289
|
+
return next
|
|
290
|
+
}
|
|
291
|
+
}
|
|
@@ -0,0 +1,42 @@
|
|
|
1
|
+
export type FyloRecord<T extends Record<string, any>> = Record<_ttid, T>
|
|
2
|
+
|
|
3
|
+
export type S3FilesQueryResult<T extends Record<string, any>> =
|
|
4
|
+
| _ttid
|
|
5
|
+
| FyloRecord<T>
|
|
6
|
+
| Record<string, _ttid[]>
|
|
7
|
+
| Record<string, Record<_ttid, Partial<T>>>
|
|
8
|
+
| Record<_ttid, Partial<T>>
|
|
9
|
+
|
|
10
|
+
export type S3FilesEvent<T extends Record<string, any>> = {
|
|
11
|
+
ts: number
|
|
12
|
+
action: 'insert' | 'delete'
|
|
13
|
+
id: _ttid
|
|
14
|
+
doc?: T
|
|
15
|
+
}
|
|
16
|
+
|
|
17
|
+
export type StoredDoc<T extends Record<string, any>> = {
|
|
18
|
+
id: _ttid
|
|
19
|
+
createdAt: number
|
|
20
|
+
updatedAt: number
|
|
21
|
+
data: T
|
|
22
|
+
}
|
|
23
|
+
|
|
24
|
+
export type StoredIndexEntry = {
|
|
25
|
+
fieldPath: string
|
|
26
|
+
rawValue: string
|
|
27
|
+
valueHash: string
|
|
28
|
+
valueType: string
|
|
29
|
+
numericValue: number | null
|
|
30
|
+
}
|
|
31
|
+
|
|
32
|
+
export type StoredCollectionIndex = {
|
|
33
|
+
version: 1
|
|
34
|
+
docs: Record<_ttid, StoredIndexEntry[]>
|
|
35
|
+
}
|
|
36
|
+
|
|
37
|
+
export type CollectionIndexCache = {
|
|
38
|
+
docs: Map<_ttid, StoredIndexEntry[]>
|
|
39
|
+
fieldHash: Map<string, Map<string, Set<_ttid>>>
|
|
40
|
+
fieldNumeric: Map<string, Array<{ docId: _ttid; numericValue: number }>>
|
|
41
|
+
fieldString: Map<string, Array<{ docId: _ttid; rawValue: string }>>
|
|
42
|
+
}
|