@delma/fylo 2.1.0 → 2.1.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (99) hide show
  1. package/README.md +27 -0
  2. package/dist/adapters/cipher.js +155 -0
  3. package/dist/adapters/cipher.js.map +1 -0
  4. package/dist/core/collection.js +6 -0
  5. package/dist/core/collection.js.map +1 -0
  6. package/{src/core/directory.ts → dist/core/directory.js} +28 -35
  7. package/dist/core/directory.js.map +1 -0
  8. package/dist/core/doc-id.js +15 -0
  9. package/dist/core/doc-id.js.map +1 -0
  10. package/dist/core/extensions.js +16 -0
  11. package/dist/core/extensions.js.map +1 -0
  12. package/dist/core/format.js +355 -0
  13. package/dist/core/format.js.map +1 -0
  14. package/dist/core/parser.js +764 -0
  15. package/dist/core/parser.js.map +1 -0
  16. package/dist/core/query.js +47 -0
  17. package/dist/core/query.js.map +1 -0
  18. package/dist/engines/s3-files/documents.js +62 -0
  19. package/dist/engines/s3-files/documents.js.map +1 -0
  20. package/dist/engines/s3-files/filesystem.js +165 -0
  21. package/dist/engines/s3-files/filesystem.js.map +1 -0
  22. package/dist/engines/s3-files/query.js +235 -0
  23. package/dist/engines/s3-files/query.js.map +1 -0
  24. package/dist/engines/s3-files/types.js +2 -0
  25. package/dist/engines/s3-files/types.js.map +1 -0
  26. package/dist/engines/s3-files.js +629 -0
  27. package/dist/engines/s3-files.js.map +1 -0
  28. package/dist/engines/types.js +2 -0
  29. package/dist/engines/types.js.map +1 -0
  30. package/dist/index.js +562 -0
  31. package/dist/index.js.map +1 -0
  32. package/dist/sync.js +18 -0
  33. package/dist/sync.js.map +1 -0
  34. package/{src → dist}/types/fylo.d.ts +14 -1
  35. package/package.json +2 -2
  36. package/.env.example +0 -16
  37. package/.github/copilot-instructions.md +0 -3
  38. package/.github/prompts/release.prompt.md +0 -10
  39. package/.github/workflows/ci.yml +0 -37
  40. package/.github/workflows/publish.yml +0 -91
  41. package/.prettierrc +0 -7
  42. package/AGENTS.md +0 -3
  43. package/CLAUDE.md +0 -3
  44. package/eslint.config.js +0 -32
  45. package/src/CLI +0 -39
  46. package/src/adapters/cipher.ts +0 -180
  47. package/src/core/collection.ts +0 -5
  48. package/src/core/extensions.ts +0 -21
  49. package/src/core/format.ts +0 -457
  50. package/src/core/parser.ts +0 -901
  51. package/src/core/query.ts +0 -53
  52. package/src/engines/s3-files/documents.ts +0 -65
  53. package/src/engines/s3-files/filesystem.ts +0 -172
  54. package/src/engines/s3-files/query.ts +0 -291
  55. package/src/engines/s3-files/types.ts +0 -42
  56. package/src/engines/s3-files.ts +0 -769
  57. package/src/engines/types.ts +0 -21
  58. package/src/index.ts +0 -632
  59. package/src/sync.ts +0 -58
  60. package/tests/collection/truncate.test.js +0 -36
  61. package/tests/data.js +0 -97
  62. package/tests/helpers/root.js +0 -7
  63. package/tests/integration/aws-s3-files.canary.test.js +0 -22
  64. package/tests/integration/create.test.js +0 -39
  65. package/tests/integration/delete.test.js +0 -97
  66. package/tests/integration/edge-cases.test.js +0 -162
  67. package/tests/integration/encryption.test.js +0 -148
  68. package/tests/integration/export.test.js +0 -46
  69. package/tests/integration/join-modes.test.js +0 -154
  70. package/tests/integration/nested.test.js +0 -144
  71. package/tests/integration/operators.test.js +0 -136
  72. package/tests/integration/read.test.js +0 -123
  73. package/tests/integration/rollback.test.js +0 -30
  74. package/tests/integration/s3-files.performance.test.js +0 -75
  75. package/tests/integration/s3-files.test.js +0 -205
  76. package/tests/integration/sync.test.js +0 -154
  77. package/tests/integration/update.test.js +0 -105
  78. package/tests/mocks/cipher.js +0 -40
  79. package/tests/schemas/album.d.ts +0 -5
  80. package/tests/schemas/album.json +0 -5
  81. package/tests/schemas/comment.d.ts +0 -7
  82. package/tests/schemas/comment.json +0 -7
  83. package/tests/schemas/photo.d.ts +0 -7
  84. package/tests/schemas/photo.json +0 -7
  85. package/tests/schemas/post.d.ts +0 -6
  86. package/tests/schemas/post.json +0 -6
  87. package/tests/schemas/tip.d.ts +0 -7
  88. package/tests/schemas/tip.json +0 -7
  89. package/tests/schemas/todo.d.ts +0 -6
  90. package/tests/schemas/todo.json +0 -6
  91. package/tests/schemas/user.d.ts +0 -23
  92. package/tests/schemas/user.json +0 -23
  93. package/tsconfig.json +0 -21
  94. package/tsconfig.typecheck.json +0 -31
  95. /package/{src → dist}/types/bun-runtime.d.ts +0 -0
  96. /package/{src → dist}/types/index.d.ts +0 -0
  97. /package/{src → dist}/types/node-runtime.d.ts +0 -0
  98. /package/{src → dist}/types/query.d.ts +0 -0
  99. /package/{src → dist}/types/vendor-modules.d.ts +0 -0
package/src/core/query.ts DELETED
@@ -1,53 +0,0 @@
1
- import { Cipher } from '../adapters/cipher'
2
-
3
- const ENCRYPTED_FIELD_OPS = ['$ne', '$gt', '$gte', '$lt', '$lte', '$like', '$contains'] as const
4
-
5
- export class Query {
6
- static async getExprs<T extends Record<string, any>>(
7
- collection: string,
8
- query: _storeQuery<T>
9
- ) {
10
- let exprs = new Set<string>()
11
-
12
- if (query.$ops) {
13
- for (const op of query.$ops) {
14
- for (const column in op) {
15
- const col = op[column as keyof T]!
16
-
17
- const fieldPath = String(column).replaceAll('.', '/')
18
- const encrypted =
19
- Cipher.isConfigured() && Cipher.isEncryptedField(collection, fieldPath)
20
-
21
- if (encrypted) {
22
- for (const opKey of ENCRYPTED_FIELD_OPS) {
23
- if (col[opKey] !== undefined) {
24
- throw new Error(
25
- `Operator ${opKey} is not supported on encrypted field "${String(column)}"`
26
- )
27
- }
28
- }
29
- }
30
-
31
- if (col.$eq) {
32
- const val = encrypted
33
- ? await Cipher.encrypt(String(col.$eq).replaceAll('/', '%2F'))
34
- : col.$eq
35
- exprs.add(`${column}/${val}/**/*`)
36
- }
37
- if (col.$ne) exprs.add(`${column}/**/*`)
38
- if (col.$gt) exprs.add(`${column}/**/*`)
39
- if (col.$gte) exprs.add(`${column}/**/*`)
40
- if (col.$lt) exprs.add(`${column}/**/*`)
41
- if (col.$lte) exprs.add(`${column}/**/*`)
42
- if (col.$like) exprs.add(`${column}/${col.$like.replaceAll('%', '*')}/**/*`)
43
- if (col.$contains !== undefined)
44
- exprs.add(
45
- `${column}/*/${String(col.$contains).split('/').join('%2F')}/**/*`
46
- )
47
- }
48
- }
49
- } else exprs = new Set([`**/*`])
50
-
51
- return Array.from(exprs)
52
- }
53
- }
@@ -1,65 +0,0 @@
1
- import path from 'node:path'
2
- import TTID from '@delma/ttid'
3
- import type { StorageEngine } from '../types'
4
- import type { StoredDoc } from './types'
5
-
6
- export class S3FilesDocuments {
7
- constructor(
8
- private readonly storage: StorageEngine,
9
- private readonly docsRoot: (collection: string) => string,
10
- private readonly docPath: (collection: string, docId: _ttid) => string,
11
- private readonly ensureCollection: (collection: string) => Promise<void>,
12
- private readonly encodeEncrypted: <T extends Record<string, any>>(
13
- collection: string,
14
- value: T,
15
- parentField?: string
16
- ) => Promise<T>,
17
- private readonly decodeEncrypted: <T extends Record<string, any>>(
18
- collection: string,
19
- value: T,
20
- parentField?: string
21
- ) => Promise<T>
22
- ) {}
23
-
24
- async readStoredDoc<T extends Record<string, any>>(
25
- collection: string,
26
- docId: _ttid
27
- ): Promise<StoredDoc<T> | null> {
28
- const target = this.docPath(collection, docId)
29
-
30
- try {
31
- const raw = JSON.parse(await this.storage.read(target)) as T
32
- const decoded = await this.decodeEncrypted(collection, raw)
33
- const { createdAt, updatedAt } = TTID.decodeTime(docId)
34
-
35
- return {
36
- id: docId,
37
- createdAt,
38
- updatedAt: updatedAt ?? createdAt,
39
- data: decoded
40
- }
41
- } catch (err) {
42
- if ((err as NodeJS.ErrnoException).code === 'ENOENT') return null
43
- throw err
44
- }
45
- }
46
-
47
- async writeStoredDoc<T extends Record<string, any>>(collection: string, docId: _ttid, data: T) {
48
- await this.ensureCollection(collection)
49
- const encoded = await this.encodeEncrypted(collection, data)
50
- const target = this.docPath(collection, docId)
51
- await this.storage.write(target, JSON.stringify(encoded))
52
- }
53
-
54
- async removeStoredDoc(collection: string, docId: _ttid) {
55
- await this.storage.delete(this.docPath(collection, docId))
56
- }
57
-
58
- async listDocIds(collection: string) {
59
- const files = await this.storage.list(this.docsRoot(collection))
60
- return files
61
- .filter((file) => file.endsWith('.json'))
62
- .map((file) => path.basename(file, '.json'))
63
- .filter((key) => TTID.isTTID(key)) as _ttid[]
64
- }
65
- }
@@ -1,172 +0,0 @@
1
- import { mkdir, open, readFile, readdir, rm, stat, writeFile } from 'node:fs/promises'
2
- import path from 'node:path'
3
- import type { EventBus, LockManager, StorageEngine } from '../types'
4
- import type { S3FilesEvent } from './types'
5
-
6
- export class FilesystemStorage implements StorageEngine {
7
- async read(target: string): Promise<string> {
8
- return await readFile(target, 'utf8')
9
- }
10
-
11
- async write(target: string, data: string): Promise<void> {
12
- await mkdir(path.dirname(target), { recursive: true })
13
- await writeFile(target, data, 'utf8')
14
- }
15
-
16
- async delete(target: string): Promise<void> {
17
- await rm(target, { recursive: true, force: true })
18
- }
19
-
20
- async list(target: string): Promise<string[]> {
21
- const results: string[] = []
22
-
23
- try {
24
- const entries = await readdir(target, { withFileTypes: true })
25
- for (const entry of entries) {
26
- const child = path.join(target, entry.name)
27
- if (entry.isDirectory()) {
28
- results.push(...(await this.list(child)))
29
- } else {
30
- results.push(child)
31
- }
32
- }
33
- } catch (err) {
34
- if ((err as NodeJS.ErrnoException).code !== 'ENOENT') throw err
35
- }
36
-
37
- return results
38
- }
39
-
40
- async mkdir(target: string): Promise<void> {
41
- await mkdir(target, { recursive: true })
42
- }
43
-
44
- async rmdir(target: string): Promise<void> {
45
- await rm(target, { recursive: true, force: true })
46
- }
47
-
48
- async exists(target: string): Promise<boolean> {
49
- try {
50
- await stat(target)
51
- return true
52
- } catch (err) {
53
- if ((err as NodeJS.ErrnoException).code === 'ENOENT') return false
54
- throw err
55
- }
56
- }
57
- }
58
-
59
- export class FilesystemLockManager implements LockManager {
60
- constructor(
61
- private readonly root: string,
62
- private readonly storage: StorageEngine
63
- ) {}
64
-
65
- private lockDir(collection: string, docId: _ttid) {
66
- return path.join(this.root, collection, '.fylo', 'locks', `${docId}.lock`)
67
- }
68
-
69
- async acquire(
70
- collection: string,
71
- docId: _ttid,
72
- owner: string,
73
- ttlMs: number = 30_000
74
- ): Promise<boolean> {
75
- const dir = this.lockDir(collection, docId)
76
- const metaPath = path.join(dir, 'meta.json')
77
- await mkdir(path.dirname(dir), { recursive: true })
78
-
79
- try {
80
- await mkdir(dir, { recursive: false })
81
- await this.storage.write(metaPath, JSON.stringify({ owner, ts: Date.now() }))
82
- return true
83
- } catch (err) {
84
- if ((err as NodeJS.ErrnoException).code !== 'EEXIST') throw err
85
- }
86
-
87
- try {
88
- const meta = JSON.parse(await this.storage.read(metaPath)) as { ts?: number }
89
- if (meta.ts && Date.now() - meta.ts > ttlMs) {
90
- await this.storage.rmdir(dir)
91
- await mkdir(dir, { recursive: false })
92
- await this.storage.write(metaPath, JSON.stringify({ owner, ts: Date.now() }))
93
- return true
94
- }
95
- } catch {
96
- await this.storage.rmdir(dir)
97
- await mkdir(dir, { recursive: false })
98
- await this.storage.write(metaPath, JSON.stringify({ owner, ts: Date.now() }))
99
- return true
100
- }
101
-
102
- return false
103
- }
104
-
105
- async release(collection: string, docId: _ttid, owner: string): Promise<void> {
106
- const dir = this.lockDir(collection, docId)
107
- const metaPath = path.join(dir, 'meta.json')
108
-
109
- try {
110
- const meta = JSON.parse(await this.storage.read(metaPath)) as { owner?: string }
111
- if (meta.owner === owner) await this.storage.rmdir(dir)
112
- } catch (err) {
113
- if ((err as NodeJS.ErrnoException).code !== 'ENOENT') throw err
114
- }
115
- }
116
- }
117
-
118
- export class FilesystemEventBus<T extends Record<string, any>> implements EventBus<
119
- S3FilesEvent<T>
120
- > {
121
- constructor(
122
- private readonly root: string,
123
- private readonly storage: StorageEngine
124
- ) {}
125
-
126
- private journalPath(collection: string) {
127
- return path.join(this.root, collection, '.fylo', 'events', `${collection}.ndjson`)
128
- }
129
-
130
- async publish(collection: string, event: S3FilesEvent<T>): Promise<void> {
131
- const target = this.journalPath(collection)
132
- await mkdir(path.dirname(target), { recursive: true })
133
- const line = `${JSON.stringify(event)}\n`
134
- const handle = await open(target, 'a')
135
- try {
136
- await handle.write(line)
137
- } finally {
138
- await handle.close()
139
- }
140
- }
141
-
142
- async *listen(collection: string): AsyncGenerator<S3FilesEvent<T>, void, unknown> {
143
- const target = this.journalPath(collection)
144
- let position = 0
145
-
146
- while (true) {
147
- try {
148
- const fileStat = await stat(target)
149
- if (fileStat.size > position) {
150
- const handle = await open(target, 'r')
151
- try {
152
- const size = fileStat.size - position
153
- const buffer = Buffer.alloc(size)
154
- await handle.read(buffer, 0, size, position)
155
- position = fileStat.size
156
-
157
- for (const line of buffer.toString('utf8').split('\n')) {
158
- if (line.trim().length === 0) continue
159
- yield JSON.parse(line) as S3FilesEvent<T>
160
- }
161
- } finally {
162
- await handle.close()
163
- }
164
- }
165
- } catch (err) {
166
- if ((err as NodeJS.ErrnoException).code !== 'ENOENT') throw err
167
- }
168
-
169
- await Bun.sleep(100)
170
- }
171
- }
172
- }
@@ -1,291 +0,0 @@
1
- import TTID from '@delma/ttid'
2
- import { Cipher } from '../../adapters/cipher'
3
- import type { CollectionIndexCache, FyloRecord, S3FilesQueryResult } from './types'
4
-
5
- type QueryContext = {
6
- loadIndexCache: (collection: string) => Promise<CollectionIndexCache>
7
- normalizeIndexValue: (rawValue: string) => {
8
- rawValue: string
9
- valueHash: string
10
- valueType: string
11
- numericValue: number | null
12
- }
13
- }
14
-
15
- export class S3FilesQueryEngine {
16
- constructor(private readonly context: QueryContext) {}
17
-
18
- getValueByPath(target: Record<string, any>, fieldPath: string) {
19
- return fieldPath
20
- .replaceAll('/', '.')
21
- .split('.')
22
- .reduce<any>(
23
- (acc, key) => (acc === undefined || acc === null ? undefined : acc[key]),
24
- target
25
- )
26
- }
27
-
28
- normalizeFieldPath(fieldPath: string) {
29
- return fieldPath.replaceAll('.', '/')
30
- }
31
-
32
- matchesTimestamp(docId: _ttid, query?: _storeQuery<Record<string, any>>) {
33
- if (!query?.$created && !query?.$updated) return true
34
- const { createdAt, updatedAt } = TTID.decodeTime(docId)
35
- const timestamps = { createdAt, updatedAt: updatedAt ?? createdAt }
36
-
37
- const match = (value: number, range?: _timestamp) => {
38
- if (!range) return true
39
- if (range.$gt !== undefined && !(value > range.$gt)) return false
40
- if (range.$gte !== undefined && !(value >= range.$gte)) return false
41
- if (range.$lt !== undefined && !(value < range.$lt)) return false
42
- if (range.$lte !== undefined && !(value <= range.$lte)) return false
43
- return true
44
- }
45
-
46
- return (
47
- match(timestamps.createdAt, query.$created) &&
48
- match(timestamps.updatedAt, query.$updated)
49
- )
50
- }
51
-
52
- likeToRegex(pattern: string) {
53
- const escaped = pattern.replace(/[.*+?^${}()|[\]\\]/g, '\\$&').replaceAll('%', '.*')
54
- return new RegExp(`^${escaped}$`)
55
- }
56
-
57
- matchesOperand(value: unknown, operand: _operand) {
58
- if (operand.$eq !== undefined && value != operand.$eq) return false
59
- if (operand.$ne !== undefined && value == operand.$ne) return false
60
- if (operand.$gt !== undefined && !(Number(value) > operand.$gt)) return false
61
- if (operand.$gte !== undefined && !(Number(value) >= operand.$gte)) return false
62
- if (operand.$lt !== undefined && !(Number(value) < operand.$lt)) return false
63
- if (operand.$lte !== undefined && !(Number(value) <= operand.$lte)) return false
64
- if (
65
- operand.$like !== undefined &&
66
- (typeof value !== 'string' || !this.likeToRegex(operand.$like).test(value))
67
- )
68
- return false
69
- if (operand.$contains !== undefined) {
70
- if (!Array.isArray(value) || !value.some((item) => item == operand.$contains))
71
- return false
72
- }
73
- return true
74
- }
75
-
76
- async normalizeQueryValue(collection: string, fieldPath: string, value: unknown) {
77
- let rawValue = String(value).replaceAll('/', '%2F')
78
- if (Cipher.isConfigured() && Cipher.isEncryptedField(collection, fieldPath))
79
- rawValue = await Cipher.encrypt(rawValue, true)
80
- return this.context.normalizeIndexValue(rawValue)
81
- }
82
-
83
- intersectDocIds(current: Set<_ttid> | null, next: Iterable<_ttid>) {
84
- const nextSet = next instanceof Set ? next : new Set(next)
85
- if (current === null) return new Set(nextSet)
86
-
87
- const intersection = new Set<_ttid>()
88
- for (const docId of current) {
89
- if (nextSet.has(docId)) intersection.add(docId)
90
- }
91
- return intersection
92
- }
93
-
94
- async candidateDocIdsForOperand(
95
- collection: string,
96
- fieldPath: string,
97
- operand: _operand
98
- ): Promise<Set<_ttid> | null> {
99
- if (Cipher.isConfigured() && Cipher.isEncryptedField(collection, fieldPath)) {
100
- const unsupported =
101
- operand.$ne !== undefined ||
102
- operand.$gt !== undefined ||
103
- operand.$gte !== undefined ||
104
- operand.$lt !== undefined ||
105
- operand.$lte !== undefined ||
106
- operand.$like !== undefined ||
107
- operand.$contains !== undefined
108
-
109
- if (unsupported) {
110
- throw new Error(`Operator is not supported on encrypted field: ${fieldPath}`)
111
- }
112
- }
113
-
114
- const cache = await this.context.loadIndexCache(collection)
115
- let candidateIds: Set<_ttid> | null = null
116
-
117
- if (operand.$eq !== undefined) {
118
- const normalized = await this.normalizeQueryValue(collection, fieldPath, operand.$eq)
119
- candidateIds = this.intersectDocIds(
120
- candidateIds,
121
- cache.fieldHash.get(fieldPath)?.get(normalized.valueHash) ?? new Set<_ttid>()
122
- )
123
- }
124
-
125
- if (
126
- operand.$gt !== undefined ||
127
- operand.$gte !== undefined ||
128
- operand.$lt !== undefined ||
129
- operand.$lte !== undefined
130
- ) {
131
- const numericMatches = new Set<_ttid>()
132
- for (const entry of cache.fieldNumeric.get(fieldPath) ?? []) {
133
- if (operand.$gt !== undefined && !(entry.numericValue > operand.$gt)) continue
134
- if (operand.$gte !== undefined && !(entry.numericValue >= operand.$gte)) continue
135
- if (operand.$lt !== undefined && !(entry.numericValue < operand.$lt)) continue
136
- if (operand.$lte !== undefined && !(entry.numericValue <= operand.$lte)) continue
137
- numericMatches.add(entry.docId)
138
- }
139
-
140
- candidateIds = this.intersectDocIds(candidateIds, numericMatches)
141
- }
142
-
143
- if (operand.$like !== undefined) {
144
- const regex = this.likeToRegex(operand.$like.replaceAll('/', '%2F'))
145
- const stringMatches = new Set<_ttid>()
146
- for (const entry of cache.fieldString.get(fieldPath) ?? []) {
147
- if (regex.test(entry.rawValue)) stringMatches.add(entry.docId)
148
- }
149
-
150
- candidateIds = this.intersectDocIds(candidateIds, stringMatches)
151
- }
152
-
153
- if (operand.$contains !== undefined) {
154
- const normalized = await this.normalizeQueryValue(
155
- collection,
156
- fieldPath,
157
- operand.$contains
158
- )
159
- const containsMatches = new Set<_ttid>()
160
- for (const [candidateFieldPath, hashes] of cache.fieldHash.entries()) {
161
- if (
162
- candidateFieldPath !== fieldPath &&
163
- !candidateFieldPath.startsWith(`${fieldPath}/`)
164
- )
165
- continue
166
- for (const docId of hashes.get(normalized.valueHash) ?? [])
167
- containsMatches.add(docId)
168
- }
169
-
170
- candidateIds = this.intersectDocIds(candidateIds, containsMatches)
171
- }
172
-
173
- return candidateIds
174
- }
175
-
176
- async candidateDocIdsForOperation<T extends Record<string, any>>(
177
- collection: string,
178
- operation: _op<T>
179
- ): Promise<Set<_ttid> | null> {
180
- let candidateIds: Set<_ttid> | null = null
181
-
182
- for (const [field, operand] of Object.entries(operation) as Array<[keyof T, _operand]>) {
183
- if (!operand) continue
184
-
185
- const fieldPath = this.normalizeFieldPath(String(field))
186
- const fieldCandidates = await this.candidateDocIdsForOperand(
187
- collection,
188
- fieldPath,
189
- operand
190
- )
191
-
192
- if (fieldCandidates === null) continue
193
- candidateIds = this.intersectDocIds(candidateIds, fieldCandidates)
194
- }
195
-
196
- return candidateIds
197
- }
198
-
199
- async candidateDocIdsForQuery<T extends Record<string, any>>(
200
- collection: string,
201
- query?: _storeQuery<T>
202
- ): Promise<Set<_ttid> | null> {
203
- if (!query?.$ops || query.$ops.length === 0) return null
204
-
205
- const union = new Set<_ttid>()
206
- let usedIndex = false
207
-
208
- for (const operation of query.$ops) {
209
- const candidateIds = await this.candidateDocIdsForOperation(collection, operation)
210
- if (candidateIds === null) return null
211
- usedIndex = true
212
- for (const docId of candidateIds) union.add(docId)
213
- }
214
-
215
- return usedIndex ? union : null
216
- }
217
-
218
- matchesQuery<T extends Record<string, any>>(docId: _ttid, doc: T, query?: _storeQuery<T>) {
219
- if (!this.matchesTimestamp(docId, query as _storeQuery<Record<string, any>> | undefined))
220
- return false
221
- if (!query?.$ops || query.$ops.length === 0) return true
222
-
223
- return query.$ops.some((operation) => {
224
- for (const field in operation) {
225
- const value = this.getValueByPath(doc, field)
226
- if (!this.matchesOperand(value, operation[field as keyof T]!)) return false
227
- }
228
- return true
229
- })
230
- }
231
-
232
- selectValues<T extends Record<string, any>>(selection: Array<keyof T>, data: T) {
233
- const copy = { ...data }
234
- for (const field in copy) {
235
- if (!selection.includes(field as keyof T)) delete copy[field]
236
- }
237
- return copy
238
- }
239
-
240
- renameFields<T extends Record<string, any>>(rename: Record<keyof Partial<T>, string>, data: T) {
241
- const copy = { ...data }
242
- for (const field in copy) {
243
- if (rename[field]) {
244
- copy[rename[field]] = copy[field]
245
- delete copy[field]
246
- }
247
- }
248
- return copy
249
- }
250
-
251
- processDoc<T extends Record<string, any>>(
252
- doc: FyloRecord<T>,
253
- query?: _storeQuery<T>
254
- ): S3FilesQueryResult<T> | undefined {
255
- if (Object.keys(doc).length === 0) return
256
-
257
- const next = { ...doc }
258
-
259
- for (let [_id, data] of Object.entries(next)) {
260
- if (query?.$select?.length)
261
- data = this.selectValues(query.$select as Array<keyof T>, data)
262
- if (query?.$rename) data = this.renameFields(query.$rename, data)
263
- next[_id as _ttid] = data as T
264
- }
265
-
266
- if (query?.$groupby) {
267
- const docGroup: Record<string, Record<string, Partial<T>>> = {}
268
- for (const [id, data] of Object.entries(next)) {
269
- const groupValue = data[query.$groupby] as string
270
- if (groupValue) {
271
- const groupData = { ...data }
272
- delete groupData[query.$groupby]
273
- docGroup[groupValue] = { [id]: groupData as Partial<T> }
274
- }
275
- }
276
-
277
- if (query.$onlyIds) {
278
- const groupedIds: Record<string, _ttid[]> = {}
279
- for (const group in docGroup)
280
- groupedIds[group] = Object.keys(docGroup[group]) as _ttid[]
281
- return groupedIds
282
- }
283
-
284
- return docGroup
285
- }
286
-
287
- if (query?.$onlyIds) return Object.keys(next).shift() as _ttid
288
-
289
- return next
290
- }
291
- }
@@ -1,42 +0,0 @@
1
- export type FyloRecord<T extends Record<string, any>> = Record<_ttid, T>
2
-
3
- export type S3FilesQueryResult<T extends Record<string, any>> =
4
- | _ttid
5
- | FyloRecord<T>
6
- | Record<string, _ttid[]>
7
- | Record<string, Record<_ttid, Partial<T>>>
8
- | Record<_ttid, Partial<T>>
9
-
10
- export type S3FilesEvent<T extends Record<string, any>> = {
11
- ts: number
12
- action: 'insert' | 'delete'
13
- id: _ttid
14
- doc?: T
15
- }
16
-
17
- export type StoredDoc<T extends Record<string, any>> = {
18
- id: _ttid
19
- createdAt: number
20
- updatedAt: number
21
- data: T
22
- }
23
-
24
- export type StoredIndexEntry = {
25
- fieldPath: string
26
- rawValue: string
27
- valueHash: string
28
- valueType: string
29
- numericValue: number | null
30
- }
31
-
32
- export type StoredCollectionIndex = {
33
- version: 1
34
- docs: Record<_ttid, StoredIndexEntry[]>
35
- }
36
-
37
- export type CollectionIndexCache = {
38
- docs: Map<_ttid, StoredIndexEntry[]>
39
- fieldHash: Map<string, Map<string, Set<_ttid>>>
40
- fieldNumeric: Map<string, Array<{ docId: _ttid; numericValue: number }>>
41
- fieldString: Map<string, Array<{ docId: _ttid; rawValue: string }>>
42
- }