@delma/fylo 1.1.2 → 2.0.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +141 -62
- package/eslint.config.js +8 -4
- package/package.json +9 -7
- package/src/CLI +16 -14
- package/src/adapters/cipher.ts +12 -6
- package/src/adapters/redis.ts +193 -123
- package/src/adapters/s3.ts +6 -12
- package/src/core/collection.ts +5 -0
- package/src/core/directory.ts +120 -151
- package/src/core/extensions.ts +4 -2
- package/src/core/format.ts +390 -419
- package/src/core/parser.ts +167 -142
- package/src/core/query.ts +31 -26
- package/src/core/walker.ts +68 -61
- package/src/core/write-queue.ts +7 -4
- package/src/engines/s3-files.ts +1068 -0
- package/src/engines/types.ts +21 -0
- package/src/index.ts +754 -378
- package/src/migrate-cli.ts +22 -0
- package/src/migrate.ts +74 -0
- package/src/types/bun-runtime.d.ts +73 -0
- package/src/types/fylo.d.ts +115 -27
- package/src/types/node-runtime.d.ts +61 -0
- package/src/types/query.d.ts +6 -2
- package/src/types/vendor-modules.d.ts +8 -7
- package/src/worker.ts +7 -1
- package/src/workers/write-worker.ts +25 -24
- package/tests/collection/truncate.test.js +35 -0
- package/tests/{data.ts → data.js} +8 -21
- package/tests/{index.ts → index.js} +4 -9
- package/tests/integration/aws-s3-files.canary.test.js +22 -0
- package/tests/integration/{create.test.ts → create.test.js} +13 -31
- package/tests/integration/delete.test.js +95 -0
- package/tests/integration/{edge-cases.test.ts → edge-cases.test.js} +50 -124
- package/tests/integration/{encryption.test.ts → encryption.test.js} +20 -65
- package/tests/integration/{export.test.ts → export.test.js} +8 -23
- package/tests/integration/{join-modes.test.ts → join-modes.test.js} +37 -104
- package/tests/integration/migration.test.js +38 -0
- package/tests/integration/nested.test.js +142 -0
- package/tests/integration/operators.test.js +122 -0
- package/tests/integration/{queue.test.ts → queue.test.js} +24 -40
- package/tests/integration/read.test.js +119 -0
- package/tests/integration/rollback.test.js +60 -0
- package/tests/integration/s3-files.test.js +192 -0
- package/tests/integration/update.test.js +99 -0
- package/tests/mocks/{cipher.ts → cipher.js} +11 -26
- package/tests/mocks/redis.js +123 -0
- package/tests/mocks/{s3.ts → s3.js} +24 -58
- package/tests/schemas/album.json +1 -1
- package/tests/schemas/comment.json +1 -1
- package/tests/schemas/photo.json +1 -1
- package/tests/schemas/post.json +1 -1
- package/tests/schemas/tip.json +1 -1
- package/tests/schemas/todo.json +1 -1
- package/tests/schemas/user.d.ts +12 -12
- package/tests/schemas/user.json +1 -1
- package/tsconfig.json +4 -2
- package/tsconfig.typecheck.json +31 -0
- package/tests/collection/truncate.test.ts +0 -56
- package/tests/integration/delete.test.ts +0 -147
- package/tests/integration/nested.test.ts +0 -212
- package/tests/integration/operators.test.ts +0 -167
- package/tests/integration/read.test.ts +0 -203
- package/tests/integration/rollback.test.ts +0 -105
- package/tests/integration/update.test.ts +0 -130
- package/tests/mocks/redis.ts +0 -169
|
@@ -0,0 +1,192 @@
|
|
|
1
|
+
import { afterAll, beforeAll, describe, expect, test } from 'bun:test'
|
|
2
|
+
import { mkdtemp, rm, stat } from 'node:fs/promises'
|
|
3
|
+
import os from 'node:os'
|
|
4
|
+
import path from 'node:path'
|
|
5
|
+
import { Database } from 'bun:sqlite'
|
|
6
|
+
import Fylo from '../../src'
|
|
7
|
+
const root = await mkdtemp(path.join(os.tmpdir(), 'fylo-s3files-'))
|
|
8
|
+
const fylo = new Fylo({ engine: 's3-files', s3FilesRoot: root })
|
|
9
|
+
const POSTS = 's3files-posts'
|
|
10
|
+
const USERS = 's3files-users'
|
|
11
|
+
describe('s3-files engine', () => {
|
|
12
|
+
beforeAll(async () => {
|
|
13
|
+
await fylo.createCollection(POSTS)
|
|
14
|
+
await fylo.createCollection(USERS)
|
|
15
|
+
})
|
|
16
|
+
afterAll(async () => {
|
|
17
|
+
await rm(root, { recursive: true, force: true })
|
|
18
|
+
})
|
|
19
|
+
test('put/get/patch/delete works without Redis or S3 adapters', async () => {
|
|
20
|
+
const id = await fylo.putData(POSTS, {
|
|
21
|
+
title: 'Hello',
|
|
22
|
+
tags: ['bun', 'aws'],
|
|
23
|
+
meta: { score: 1 }
|
|
24
|
+
})
|
|
25
|
+
const created = await fylo.getDoc(POSTS, id).once()
|
|
26
|
+
expect(created[id].title).toBe('Hello')
|
|
27
|
+
expect(created[id].tags).toEqual(['bun', 'aws'])
|
|
28
|
+
const nextId = await fylo.patchDoc(POSTS, {
|
|
29
|
+
[id]: {
|
|
30
|
+
title: 'Hello 2',
|
|
31
|
+
meta: { score: 2 }
|
|
32
|
+
}
|
|
33
|
+
})
|
|
34
|
+
const updated = await fylo.getDoc(POSTS, nextId).once()
|
|
35
|
+
expect(updated[nextId].title).toBe('Hello 2')
|
|
36
|
+
expect(updated[nextId].meta.score).toBe(2)
|
|
37
|
+
expect(await fylo.getDoc(POSTS, id).once()).toEqual({})
|
|
38
|
+
await fylo.delDoc(POSTS, nextId)
|
|
39
|
+
expect(await fylo.getDoc(POSTS, nextId).once()).toEqual({})
|
|
40
|
+
})
|
|
41
|
+
test('findDocs listener is backed by the filesystem event journal', async () => {
|
|
42
|
+
const iter = fylo
|
|
43
|
+
.findDocs(POSTS, {
|
|
44
|
+
$ops: [{ title: { $eq: 'Live event' } }]
|
|
45
|
+
})
|
|
46
|
+
[Symbol.asyncIterator]()
|
|
47
|
+
const pending = iter.next()
|
|
48
|
+
await Bun.sleep(100)
|
|
49
|
+
const id = await fylo.putData(POSTS, { title: 'Live event' })
|
|
50
|
+
const { value } = await pending
|
|
51
|
+
expect(value).toEqual({ [id]: { title: 'Live event' } })
|
|
52
|
+
await iter.return?.()
|
|
53
|
+
})
|
|
54
|
+
test('supports long values without path-length issues', async () => {
|
|
55
|
+
const longBody = 'x'.repeat(5000)
|
|
56
|
+
const id = await fylo.putData(POSTS, {
|
|
57
|
+
title: 'Long payload',
|
|
58
|
+
body: longBody
|
|
59
|
+
})
|
|
60
|
+
const result = await fylo.getDoc(POSTS, id).once()
|
|
61
|
+
expect(result[id].body).toBe(longBody)
|
|
62
|
+
})
|
|
63
|
+
test('stores indexes in a single SQLite database instead of per-entry files', async () => {
|
|
64
|
+
const dbStat = await stat(path.join(root, POSTS, '.fylo', 'index.db'))
|
|
65
|
+
expect(dbStat.isFile()).toBe(true)
|
|
66
|
+
await expect(stat(path.join(root, POSTS, '.fylo', 'indexes'))).rejects.toThrow()
|
|
67
|
+
})
|
|
68
|
+
test('uses SQLite index rows to support exact, range, and contains queries', async () => {
|
|
69
|
+
const queryCollection = 's3files-query'
|
|
70
|
+
await fylo.createCollection(queryCollection)
|
|
71
|
+
|
|
72
|
+
const bunId = await fylo.putData(queryCollection, {
|
|
73
|
+
title: 'Bun launch',
|
|
74
|
+
tags: ['bun', 'aws'],
|
|
75
|
+
meta: { score: 10 }
|
|
76
|
+
})
|
|
77
|
+
const nodeId = await fylo.putData(queryCollection, {
|
|
78
|
+
title: 'Node launch',
|
|
79
|
+
tags: ['node'],
|
|
80
|
+
meta: { score: 2 }
|
|
81
|
+
})
|
|
82
|
+
|
|
83
|
+
let eqResults = {}
|
|
84
|
+
for await (const data of fylo
|
|
85
|
+
.findDocs(queryCollection, {
|
|
86
|
+
$ops: [{ title: { $eq: 'Bun launch' } }]
|
|
87
|
+
})
|
|
88
|
+
.collect()) {
|
|
89
|
+
eqResults = { ...eqResults, ...data }
|
|
90
|
+
}
|
|
91
|
+
expect(Object.keys(eqResults)).toEqual([bunId])
|
|
92
|
+
|
|
93
|
+
let rangeResults = {}
|
|
94
|
+
for await (const data of fylo
|
|
95
|
+
.findDocs(queryCollection, {
|
|
96
|
+
$ops: [{ ['meta.score']: { $gte: 5 } }]
|
|
97
|
+
})
|
|
98
|
+
.collect()) {
|
|
99
|
+
rangeResults = { ...rangeResults, ...data }
|
|
100
|
+
}
|
|
101
|
+
expect(Object.keys(rangeResults)).toEqual([bunId])
|
|
102
|
+
|
|
103
|
+
let containsResults = {}
|
|
104
|
+
for await (const data of fylo
|
|
105
|
+
.findDocs(queryCollection, {
|
|
106
|
+
$ops: [{ tags: { $contains: 'aws' } }]
|
|
107
|
+
})
|
|
108
|
+
.collect()) {
|
|
109
|
+
containsResults = { ...containsResults, ...data }
|
|
110
|
+
}
|
|
111
|
+
expect(Object.keys(containsResults)).toEqual([bunId])
|
|
112
|
+
expect(containsResults[nodeId]).toBeUndefined()
|
|
113
|
+
|
|
114
|
+
const db = new Database(path.join(root, queryCollection, '.fylo', 'index.db'))
|
|
115
|
+
const rows = db
|
|
116
|
+
.query(
|
|
117
|
+
`SELECT doc_id, field_path, raw_value, value_type, numeric_value
|
|
118
|
+
FROM doc_index_entries
|
|
119
|
+
WHERE doc_id = ?
|
|
120
|
+
ORDER BY field_path, raw_value`
|
|
121
|
+
)
|
|
122
|
+
.all(bunId)
|
|
123
|
+
db.close()
|
|
124
|
+
|
|
125
|
+
expect(rows).toEqual(
|
|
126
|
+
expect.arrayContaining([
|
|
127
|
+
expect.objectContaining({
|
|
128
|
+
doc_id: bunId,
|
|
129
|
+
field_path: 'title',
|
|
130
|
+
raw_value: 'Bun launch',
|
|
131
|
+
value_type: 'string',
|
|
132
|
+
numeric_value: null
|
|
133
|
+
}),
|
|
134
|
+
expect.objectContaining({
|
|
135
|
+
doc_id: bunId,
|
|
136
|
+
field_path: 'meta/score',
|
|
137
|
+
raw_value: '10',
|
|
138
|
+
value_type: 'number',
|
|
139
|
+
numeric_value: 10
|
|
140
|
+
}),
|
|
141
|
+
expect.objectContaining({
|
|
142
|
+
doc_id: bunId,
|
|
143
|
+
field_path: 'tags/1',
|
|
144
|
+
raw_value: 'aws',
|
|
145
|
+
value_type: 'string',
|
|
146
|
+
numeric_value: null
|
|
147
|
+
})
|
|
148
|
+
])
|
|
149
|
+
)
|
|
150
|
+
})
|
|
151
|
+
test('joins work in s3-files mode', async () => {
|
|
152
|
+
const userId = await fylo.putData(USERS, { id: 42, name: 'Ada' })
|
|
153
|
+
const postId = await fylo.putData(POSTS, { id: 42, title: 'Shared', content: 'join me' })
|
|
154
|
+
const joined = await fylo.joinDocs({
|
|
155
|
+
$leftCollection: USERS,
|
|
156
|
+
$rightCollection: POSTS,
|
|
157
|
+
$mode: 'inner',
|
|
158
|
+
$on: {
|
|
159
|
+
id: { $eq: 'id' }
|
|
160
|
+
}
|
|
161
|
+
})
|
|
162
|
+
expect(joined[`${userId}, ${postId}`]).toBeDefined()
|
|
163
|
+
})
|
|
164
|
+
test('queue APIs are explicitly unsupported', async () => {
|
|
165
|
+
await expect(fylo.queuePutData(POSTS, { title: 'no queue' })).rejects.toThrow(
|
|
166
|
+
'queuePutData is not supported'
|
|
167
|
+
)
|
|
168
|
+
await expect(fylo.processQueuedWrites(1)).rejects.toThrow(
|
|
169
|
+
'processQueuedWrites is not supported'
|
|
170
|
+
)
|
|
171
|
+
})
|
|
172
|
+
test('rejects collection names that are unsafe for cross-platform filesystems', async () => {
|
|
173
|
+
await expect(fylo.createCollection('bad/name')).rejects.toThrow('Invalid collection name')
|
|
174
|
+
await expect(fylo.createCollection('bad\\name')).rejects.toThrow('Invalid collection name')
|
|
175
|
+
await expect(fylo.createCollection('bad:name')).rejects.toThrow('Invalid collection name')
|
|
176
|
+
})
|
|
177
|
+
test('static helpers can use s3-files through env defaults', async () => {
|
|
178
|
+
const prevEngine = process.env.FYLO_STORAGE_ENGINE
|
|
179
|
+
const prevRoot = process.env.FYLO_S3FILES_ROOT
|
|
180
|
+
process.env.FYLO_STORAGE_ENGINE = 's3-files'
|
|
181
|
+
process.env.FYLO_S3FILES_ROOT = root
|
|
182
|
+
const collection = 's3files-static'
|
|
183
|
+
await Fylo.createCollection(collection)
|
|
184
|
+
const id = await fylo.putData(collection, { title: 'Static path' })
|
|
185
|
+
const result = await Fylo.getDoc(collection, id).once()
|
|
186
|
+
expect(result[id].title).toBe('Static path')
|
|
187
|
+
if (prevEngine === undefined) delete process.env.FYLO_STORAGE_ENGINE
|
|
188
|
+
else process.env.FYLO_STORAGE_ENGINE = prevEngine
|
|
189
|
+
if (prevRoot === undefined) delete process.env.FYLO_S3FILES_ROOT
|
|
190
|
+
else process.env.FYLO_S3FILES_ROOT = prevRoot
|
|
191
|
+
})
|
|
192
|
+
})
|
|
@@ -0,0 +1,99 @@
|
|
|
1
|
+
import { test, expect, describe, beforeAll, afterAll, mock } from 'bun:test'
|
|
2
|
+
import Fylo from '../../src'
|
|
3
|
+
import { photosURL, todosURL } from '../data'
|
|
4
|
+
import S3Mock from '../mocks/s3'
|
|
5
|
+
import RedisMock from '../mocks/redis'
|
|
6
|
+
const PHOTOS = `photo`
|
|
7
|
+
const TODOS = `todo`
|
|
8
|
+
const fylo = new Fylo()
|
|
9
|
+
mock.module('../../src/adapters/s3', () => ({ S3: S3Mock }))
|
|
10
|
+
mock.module('../../src/adapters/redis', () => ({ Redis: RedisMock }))
|
|
11
|
+
beforeAll(async () => {
|
|
12
|
+
await Promise.all([Fylo.createCollection(PHOTOS), fylo.executeSQL(`CREATE TABLE ${TODOS}`)])
|
|
13
|
+
try {
|
|
14
|
+
await fylo.importBulkData(PHOTOS, new URL(photosURL), 100)
|
|
15
|
+
await fylo.importBulkData(TODOS, new URL(todosURL), 100)
|
|
16
|
+
} catch {
|
|
17
|
+
await fylo.rollback()
|
|
18
|
+
}
|
|
19
|
+
})
|
|
20
|
+
afterAll(async () => {
|
|
21
|
+
await Promise.all([Fylo.dropCollection(PHOTOS), fylo.executeSQL(`DROP TABLE ${TODOS}`)])
|
|
22
|
+
})
|
|
23
|
+
describe('NO-SQL', async () => {
|
|
24
|
+
test('UPDATE ONE', async () => {
|
|
25
|
+
const ids = []
|
|
26
|
+
for await (const data of Fylo.findDocs(PHOTOS, { $limit: 1, $onlyIds: true }).collect()) {
|
|
27
|
+
ids.push(data)
|
|
28
|
+
}
|
|
29
|
+
try {
|
|
30
|
+
await fylo.patchDoc(PHOTOS, { [ids.shift()]: { title: 'All Mighty' } })
|
|
31
|
+
} catch {
|
|
32
|
+
await fylo.rollback()
|
|
33
|
+
}
|
|
34
|
+
let results = {}
|
|
35
|
+
for await (const data of Fylo.findDocs(PHOTOS, {
|
|
36
|
+
$ops: [{ title: { $eq: 'All Mighty' } }]
|
|
37
|
+
}).collect()) {
|
|
38
|
+
results = { ...results, ...data }
|
|
39
|
+
}
|
|
40
|
+
expect(Object.keys(results).length).toBe(1)
|
|
41
|
+
})
|
|
42
|
+
test('UPDATE CLAUSE', async () => {
|
|
43
|
+
let count = -1
|
|
44
|
+
try {
|
|
45
|
+
count = await fylo.patchDocs(PHOTOS, {
|
|
46
|
+
$set: { title: 'All Mighti' },
|
|
47
|
+
$where: { $ops: [{ title: { $like: '%est%' } }] }
|
|
48
|
+
})
|
|
49
|
+
} catch {
|
|
50
|
+
await fylo.rollback()
|
|
51
|
+
}
|
|
52
|
+
let results = {}
|
|
53
|
+
for await (const data of Fylo.findDocs(PHOTOS, {
|
|
54
|
+
$ops: [{ title: { $eq: 'All Mighti' } }]
|
|
55
|
+
}).collect()) {
|
|
56
|
+
results = { ...results, ...data }
|
|
57
|
+
}
|
|
58
|
+
expect(Object.keys(results).length).toBe(count)
|
|
59
|
+
})
|
|
60
|
+
test('UPDATE ALL', async () => {
|
|
61
|
+
let count = -1
|
|
62
|
+
try {
|
|
63
|
+
count = await fylo.patchDocs(PHOTOS, { $set: { title: 'All Mighter' } })
|
|
64
|
+
} catch {
|
|
65
|
+
await fylo.rollback()
|
|
66
|
+
}
|
|
67
|
+
let results = {}
|
|
68
|
+
for await (const data of Fylo.findDocs(PHOTOS, {
|
|
69
|
+
$ops: [{ title: { $eq: 'All Mighter' } }]
|
|
70
|
+
}).collect()) {
|
|
71
|
+
results = { ...results, ...data }
|
|
72
|
+
}
|
|
73
|
+
expect(Object.keys(results).length).toBe(count)
|
|
74
|
+
}, 20000)
|
|
75
|
+
})
|
|
76
|
+
describe('SQL', async () => {
|
|
77
|
+
test('UPDATE CLAUSE', async () => {
|
|
78
|
+
let count = -1
|
|
79
|
+
try {
|
|
80
|
+
count = await fylo.executeSQL(
|
|
81
|
+
`UPDATE ${TODOS} SET title = 'All Mighty' WHERE title LIKE '%est%'`
|
|
82
|
+
)
|
|
83
|
+
} catch {
|
|
84
|
+
await fylo.rollback()
|
|
85
|
+
}
|
|
86
|
+
const results = await fylo.executeSQL(`SELECT * FROM ${TODOS} WHERE title = 'All Mighty'`)
|
|
87
|
+
expect(Object.keys(results).length).toBe(count)
|
|
88
|
+
})
|
|
89
|
+
test('UPDATE ALL', async () => {
|
|
90
|
+
let count = -1
|
|
91
|
+
try {
|
|
92
|
+
count = await fylo.executeSQL(`UPDATE ${TODOS} SET title = 'All Mightier'`)
|
|
93
|
+
} catch {
|
|
94
|
+
await fylo.rollback()
|
|
95
|
+
}
|
|
96
|
+
const results = await fylo.executeSQL(`SELECT * FROM ${TODOS} WHERE title = 'All Mightier'`)
|
|
97
|
+
expect(Object.keys(results).length).toBe(count)
|
|
98
|
+
}, 20000)
|
|
99
|
+
})
|
|
@@ -1,55 +1,40 @@
|
|
|
1
|
-
/**
|
|
2
|
-
* Pass-through Cipher mock for tests that don't need real encryption.
|
|
3
|
-
* Returns values as-is (base64-encoded to match real adapter interface shape).
|
|
4
|
-
*/
|
|
5
1
|
export class CipherMock {
|
|
6
|
-
|
|
7
|
-
|
|
8
|
-
|
|
9
|
-
|
|
10
|
-
static isConfigured(): boolean {
|
|
2
|
+
static _configured = false
|
|
3
|
+
static collections = new Map()
|
|
4
|
+
static isConfigured() {
|
|
11
5
|
return CipherMock._configured
|
|
12
6
|
}
|
|
13
|
-
|
|
14
|
-
static hasEncryptedFields(collection: string): boolean {
|
|
7
|
+
static hasEncryptedFields(collection) {
|
|
15
8
|
const fields = CipherMock.collections.get(collection)
|
|
16
9
|
return !!fields && fields.size > 0
|
|
17
10
|
}
|
|
18
|
-
|
|
19
|
-
static isEncryptedField(collection: string, field: string): boolean {
|
|
11
|
+
static isEncryptedField(collection, field) {
|
|
20
12
|
const fields = CipherMock.collections.get(collection)
|
|
21
13
|
if (!fields || fields.size === 0) return false
|
|
22
|
-
|
|
23
14
|
for (const pattern of fields) {
|
|
24
15
|
if (field === pattern) return true
|
|
25
16
|
if (field.startsWith(`${pattern}/`)) return true
|
|
26
17
|
}
|
|
27
|
-
|
|
28
18
|
return false
|
|
29
19
|
}
|
|
30
|
-
|
|
31
|
-
static registerFields(collection: string, fields: string[]): void {
|
|
20
|
+
static registerFields(collection, fields) {
|
|
32
21
|
if (fields.length > 0) {
|
|
33
22
|
CipherMock.collections.set(collection, new Set(fields))
|
|
34
23
|
}
|
|
35
24
|
}
|
|
36
|
-
|
|
37
|
-
static async configure(_secret: string): Promise<void> {
|
|
25
|
+
static async configure(_secret) {
|
|
38
26
|
CipherMock._configured = true
|
|
39
27
|
}
|
|
40
|
-
|
|
41
|
-
static reset(): void {
|
|
28
|
+
static reset() {
|
|
42
29
|
CipherMock._configured = false
|
|
43
30
|
CipherMock.collections = new Map()
|
|
44
31
|
}
|
|
45
|
-
|
|
46
|
-
static async encrypt(value: string): Promise<string> {
|
|
32
|
+
static async encrypt(value) {
|
|
47
33
|
return btoa(value).replace(/\+/g, '-').replace(/\//g, '_').replace(/=+$/, '')
|
|
48
34
|
}
|
|
49
|
-
|
|
50
|
-
static async decrypt(encoded: string): Promise<string> {
|
|
35
|
+
static async decrypt(encoded) {
|
|
51
36
|
const b64 = encoded.replace(/-/g, '+').replace(/_/g, '/')
|
|
52
|
-
const padded = b64 + '='.repeat((4 - b64.length % 4) % 4)
|
|
37
|
+
const padded = b64 + '='.repeat((4 - (b64.length % 4)) % 4)
|
|
53
38
|
return atob(padded)
|
|
54
39
|
}
|
|
55
40
|
}
|
|
@@ -0,0 +1,123 @@
|
|
|
1
|
+
export default class RedisMock {
|
|
2
|
+
static stream = []
|
|
3
|
+
static jobs = new Map()
|
|
4
|
+
static docs = new Map()
|
|
5
|
+
static locks = new Map()
|
|
6
|
+
static deadLetters = []
|
|
7
|
+
static nextId = 0
|
|
8
|
+
async publish(_collection, _action, _keyId) {}
|
|
9
|
+
async claimTTID(_id, _ttlSeconds = 10) {
|
|
10
|
+
return true
|
|
11
|
+
}
|
|
12
|
+
async enqueueWrite(job) {
|
|
13
|
+
RedisMock.jobs.set(job.jobId, { ...job, nextAttemptAt: job.nextAttemptAt ?? Date.now() })
|
|
14
|
+
RedisMock.docs.set(`fylo:doc:${job.collection}:${job.docId}`, {
|
|
15
|
+
status: 'queued',
|
|
16
|
+
lastJobId: job.jobId,
|
|
17
|
+
updatedAt: String(Date.now())
|
|
18
|
+
})
|
|
19
|
+
const streamId = String(++RedisMock.nextId)
|
|
20
|
+
RedisMock.stream.push({
|
|
21
|
+
streamId,
|
|
22
|
+
jobId: job.jobId,
|
|
23
|
+
collection: job.collection,
|
|
24
|
+
docId: job.docId,
|
|
25
|
+
operation: job.operation
|
|
26
|
+
})
|
|
27
|
+
return streamId
|
|
28
|
+
}
|
|
29
|
+
async readWriteJobs(workerId, count = 1) {
|
|
30
|
+
const available = RedisMock.stream.filter((entry) => !entry.claimedBy).slice(0, count)
|
|
31
|
+
for (const entry of available) entry.claimedBy = workerId
|
|
32
|
+
return available.map((entry) => ({
|
|
33
|
+
streamId: entry.streamId,
|
|
34
|
+
job: { ...RedisMock.jobs.get(entry.jobId) }
|
|
35
|
+
}))
|
|
36
|
+
}
|
|
37
|
+
async ackWriteJob(streamId) {
|
|
38
|
+
RedisMock.stream = RedisMock.stream.filter((item) => item.streamId !== streamId)
|
|
39
|
+
}
|
|
40
|
+
async deadLetterWriteJob(streamId, job, reason) {
|
|
41
|
+
RedisMock.deadLetters.push({
|
|
42
|
+
streamId: String(RedisMock.deadLetters.length + 1),
|
|
43
|
+
jobId: job.jobId,
|
|
44
|
+
reason,
|
|
45
|
+
failedAt: Date.now()
|
|
46
|
+
})
|
|
47
|
+
await this.ackWriteJob(streamId)
|
|
48
|
+
}
|
|
49
|
+
async claimPendingJobs(workerId, _minIdleMs = 30000, count = 10) {
|
|
50
|
+
const pending = RedisMock.stream.filter((entry) => entry.claimedBy).slice(0, count)
|
|
51
|
+
for (const entry of pending) entry.claimedBy = workerId
|
|
52
|
+
return pending.map((entry) => ({
|
|
53
|
+
streamId: entry.streamId,
|
|
54
|
+
job: { ...RedisMock.jobs.get(entry.jobId) }
|
|
55
|
+
}))
|
|
56
|
+
}
|
|
57
|
+
async setJobStatus(jobId, status, extra = {}) {
|
|
58
|
+
const job = RedisMock.jobs.get(jobId)
|
|
59
|
+
if (job) Object.assign(job, extra, { status, updatedAt: Date.now() })
|
|
60
|
+
}
|
|
61
|
+
async setDocStatus(collection, docId, status, jobId) {
|
|
62
|
+
const key = `fylo:doc:${collection}:${docId}`
|
|
63
|
+
const curr = RedisMock.docs.get(key) ?? {}
|
|
64
|
+
RedisMock.docs.set(key, {
|
|
65
|
+
...curr,
|
|
66
|
+
status,
|
|
67
|
+
updatedAt: String(Date.now()),
|
|
68
|
+
...(jobId ? { lastJobId: jobId } : {})
|
|
69
|
+
})
|
|
70
|
+
}
|
|
71
|
+
async getJob(jobId) {
|
|
72
|
+
const job = RedisMock.jobs.get(jobId)
|
|
73
|
+
return job ? { ...job } : null
|
|
74
|
+
}
|
|
75
|
+
async getDocStatus(collection, docId) {
|
|
76
|
+
return RedisMock.docs.get(`fylo:doc:${collection}:${docId}`) ?? null
|
|
77
|
+
}
|
|
78
|
+
async readDeadLetters(count = 10) {
|
|
79
|
+
return RedisMock.deadLetters.slice(0, count).map((item) => ({
|
|
80
|
+
streamId: item.streamId,
|
|
81
|
+
job: { ...RedisMock.jobs.get(item.jobId) },
|
|
82
|
+
reason: item.reason,
|
|
83
|
+
failedAt: item.failedAt
|
|
84
|
+
}))
|
|
85
|
+
}
|
|
86
|
+
async replayDeadLetter(streamId) {
|
|
87
|
+
const item = RedisMock.deadLetters.find((entry) => entry.streamId === streamId)
|
|
88
|
+
if (!item) return null
|
|
89
|
+
const job = RedisMock.jobs.get(item.jobId)
|
|
90
|
+
if (!job) return null
|
|
91
|
+
const replayed = {
|
|
92
|
+
...job,
|
|
93
|
+
status: 'queued',
|
|
94
|
+
error: undefined,
|
|
95
|
+
workerId: undefined,
|
|
96
|
+
attempts: 0,
|
|
97
|
+
updatedAt: Date.now(),
|
|
98
|
+
nextAttemptAt: Date.now()
|
|
99
|
+
}
|
|
100
|
+
RedisMock.jobs.set(item.jobId, replayed)
|
|
101
|
+
await this.enqueueWrite(replayed)
|
|
102
|
+
RedisMock.deadLetters = RedisMock.deadLetters.filter((entry) => entry.streamId !== streamId)
|
|
103
|
+
return { ...replayed }
|
|
104
|
+
}
|
|
105
|
+
async getQueueStats() {
|
|
106
|
+
return {
|
|
107
|
+
queued: RedisMock.stream.length,
|
|
108
|
+
pending: RedisMock.stream.filter((entry) => entry.claimedBy).length,
|
|
109
|
+
deadLetters: RedisMock.deadLetters.length
|
|
110
|
+
}
|
|
111
|
+
}
|
|
112
|
+
async acquireDocLock(collection, docId, jobId) {
|
|
113
|
+
const key = `fylo:lock:${collection}:${docId}`
|
|
114
|
+
if (RedisMock.locks.has(key)) return false
|
|
115
|
+
RedisMock.locks.set(key, jobId)
|
|
116
|
+
return true
|
|
117
|
+
}
|
|
118
|
+
async releaseDocLock(collection, docId, jobId) {
|
|
119
|
+
const key = `fylo:lock:${collection}:${docId}`
|
|
120
|
+
if (RedisMock.locks.get(key) === jobId) RedisMock.locks.delete(key)
|
|
121
|
+
}
|
|
122
|
+
async *subscribe(_collection) {}
|
|
123
|
+
}
|
|
@@ -1,66 +1,43 @@
|
|
|
1
|
-
|
|
2
|
-
|
|
3
|
-
* or the AWS CLI. Each test file gets a fresh store because mock.module is
|
|
4
|
-
* hoisted before imports, and module-level state is isolated per test file
|
|
5
|
-
* in Bun's test runner.
|
|
6
|
-
*
|
|
7
|
-
* createBucket / deleteBucket are no-ops (bucket creation/deletion is
|
|
8
|
-
* handled implicitly by the in-memory store).
|
|
9
|
-
*/
|
|
10
|
-
|
|
11
|
-
const store = new Map<string, Map<string, string>>()
|
|
12
|
-
|
|
13
|
-
function getBucket(name: string): Map<string, string> {
|
|
1
|
+
const store = new Map()
|
|
2
|
+
function getBucket(name) {
|
|
14
3
|
if (!store.has(name)) store.set(name, new Map())
|
|
15
|
-
return store.get(name)
|
|
4
|
+
return store.get(name)
|
|
16
5
|
}
|
|
17
|
-
|
|
18
6
|
export default class S3Mock {
|
|
19
|
-
|
|
20
|
-
static
|
|
21
|
-
|
|
22
|
-
static readonly CREDS = {
|
|
7
|
+
static BUCKET_ENV = process.env.BUCKET_PREFIX
|
|
8
|
+
static CREDS = {
|
|
23
9
|
accessKeyId: 'mock',
|
|
24
10
|
secretAccessKey: 'mock',
|
|
25
11
|
region: 'mock',
|
|
26
|
-
endpoint: undefined
|
|
12
|
+
endpoint: undefined
|
|
27
13
|
}
|
|
28
|
-
|
|
29
|
-
static getBucketFormat(collection: string): string {
|
|
14
|
+
static getBucketFormat(collection) {
|
|
30
15
|
return S3Mock.BUCKET_ENV ? `${S3Mock.BUCKET_ENV}-${collection}` : collection
|
|
31
16
|
}
|
|
32
|
-
|
|
33
|
-
static file(collection: string, path: string) {
|
|
17
|
+
static file(collection, path) {
|
|
34
18
|
const bucket = getBucket(S3Mock.getBucketFormat(collection))
|
|
35
19
|
return {
|
|
36
20
|
get size() {
|
|
37
21
|
const val = bucket.get(path)
|
|
38
22
|
return val !== undefined ? val.length : 0
|
|
39
23
|
},
|
|
40
|
-
async text()
|
|
24
|
+
async text() {
|
|
41
25
|
return bucket.get(path) ?? ''
|
|
42
26
|
}
|
|
43
27
|
}
|
|
44
28
|
}
|
|
45
|
-
|
|
46
|
-
static async list(collection: string, options: {
|
|
47
|
-
prefix?: string
|
|
48
|
-
delimiter?: string
|
|
49
|
-
maxKeys?: number
|
|
50
|
-
continuationToken?: string
|
|
51
|
-
} = {}) {
|
|
29
|
+
static async list(collection, options = {}) {
|
|
52
30
|
const bucket = getBucket(S3Mock.getBucketFormat(collection))
|
|
53
31
|
const prefix = options.prefix ?? ''
|
|
54
32
|
const delimiter = options.delimiter
|
|
55
33
|
const maxKeys = options.maxKeys ?? 1000
|
|
56
34
|
const token = options.continuationToken
|
|
57
|
-
|
|
58
|
-
|
|
59
|
-
|
|
35
|
+
const allKeys = Array.from(bucket.keys())
|
|
36
|
+
.filter((k) => k.startsWith(prefix))
|
|
37
|
+
.sort()
|
|
60
38
|
if (delimiter) {
|
|
61
|
-
const prefixSet = new Set
|
|
62
|
-
const contents
|
|
63
|
-
|
|
39
|
+
const prefixSet = new Set()
|
|
40
|
+
const contents = []
|
|
64
41
|
for (const key of allKeys) {
|
|
65
42
|
const rest = key.slice(prefix.length)
|
|
66
43
|
const idx = rest.indexOf(delimiter)
|
|
@@ -70,45 +47,34 @@ export default class S3Mock {
|
|
|
70
47
|
contents.push({ key })
|
|
71
48
|
}
|
|
72
49
|
}
|
|
73
|
-
|
|
74
|
-
const allPrefixes = Array.from(prefixSet).map(p => ({ prefix: p }))
|
|
50
|
+
const allPrefixes = Array.from(prefixSet).map((p) => ({ prefix: p }))
|
|
75
51
|
const limitedPrefixes = allPrefixes.slice(0, maxKeys)
|
|
76
|
-
|
|
77
52
|
return {
|
|
78
53
|
contents: contents.length ? contents : undefined,
|
|
79
|
-
commonPrefixes: limitedPrefixes.length
|
|
80
|
-
? limitedPrefixes
|
|
81
|
-
: undefined,
|
|
54
|
+
commonPrefixes: limitedPrefixes.length ? limitedPrefixes : undefined,
|
|
82
55
|
isTruncated: allPrefixes.length > maxKeys,
|
|
83
56
|
nextContinuationToken: undefined
|
|
84
57
|
}
|
|
85
58
|
}
|
|
86
|
-
|
|
87
59
|
const startIdx = token ? parseInt(token) : 0
|
|
88
60
|
const page = allKeys.slice(startIdx, startIdx + maxKeys)
|
|
89
|
-
const nextToken =
|
|
90
|
-
? String(startIdx + maxKeys)
|
|
91
|
-
: undefined
|
|
92
|
-
|
|
61
|
+
const nextToken =
|
|
62
|
+
startIdx + maxKeys < allKeys.length ? String(startIdx + maxKeys) : undefined
|
|
93
63
|
return {
|
|
94
|
-
contents: page.length ? page.map(k => ({ key: k })) : undefined,
|
|
64
|
+
contents: page.length ? page.map((k) => ({ key: k })) : undefined,
|
|
95
65
|
isTruncated: !!nextToken,
|
|
96
66
|
nextContinuationToken: nextToken,
|
|
97
67
|
commonPrefixes: undefined
|
|
98
68
|
}
|
|
99
69
|
}
|
|
100
|
-
|
|
101
|
-
static async put(collection: string, path: string, data: string): Promise<void> {
|
|
70
|
+
static async put(collection, path, data) {
|
|
102
71
|
getBucket(S3Mock.getBucketFormat(collection)).set(path, data)
|
|
103
72
|
}
|
|
104
|
-
|
|
105
|
-
static async delete(collection: string, path: string): Promise<void> {
|
|
73
|
+
static async delete(collection, path) {
|
|
106
74
|
getBucket(S3Mock.getBucketFormat(collection)).delete(path)
|
|
107
75
|
}
|
|
108
|
-
|
|
109
|
-
static async
|
|
110
|
-
|
|
111
|
-
static async deleteBucket(collection: string): Promise<void> {
|
|
76
|
+
static async createBucket(_collection) {}
|
|
77
|
+
static async deleteBucket(collection) {
|
|
112
78
|
store.delete(S3Mock.getBucketFormat(collection))
|
|
113
79
|
}
|
|
114
80
|
}
|
package/tests/schemas/album.json
CHANGED
package/tests/schemas/photo.json
CHANGED
package/tests/schemas/post.json
CHANGED
package/tests/schemas/tip.json
CHANGED
package/tests/schemas/todo.json
CHANGED