@delma/fylo 2.0.1 → 2.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +185 -267
- package/package.json +2 -5
- package/src/core/directory.ts +22 -354
- package/src/engines/s3-files/documents.ts +65 -0
- package/src/engines/s3-files/filesystem.ts +172 -0
- package/src/engines/s3-files/query.ts +291 -0
- package/src/engines/s3-files/types.ts +42 -0
- package/src/engines/s3-files.ts +391 -690
- package/src/engines/types.ts +1 -1
- package/src/index.ts +142 -1237
- package/src/sync.ts +58 -0
- package/src/types/fylo.d.ts +66 -161
- package/src/types/node-runtime.d.ts +1 -0
- package/tests/collection/truncate.test.js +11 -10
- package/tests/helpers/root.js +7 -0
- package/tests/integration/create.test.js +9 -9
- package/tests/integration/delete.test.js +16 -14
- package/tests/integration/edge-cases.test.js +29 -25
- package/tests/integration/encryption.test.js +47 -30
- package/tests/integration/export.test.js +11 -11
- package/tests/integration/join-modes.test.js +16 -16
- package/tests/integration/nested.test.js +26 -24
- package/tests/integration/operators.test.js +43 -29
- package/tests/integration/read.test.js +25 -21
- package/tests/integration/rollback.test.js +21 -51
- package/tests/integration/s3-files.performance.test.js +75 -0
- package/tests/integration/s3-files.test.js +57 -44
- package/tests/integration/sync.test.js +154 -0
- package/tests/integration/update.test.js +24 -18
- package/src/adapters/redis.ts +0 -487
- package/src/adapters/s3.ts +0 -61
- package/src/core/walker.ts +0 -174
- package/src/core/write-queue.ts +0 -59
- package/src/migrate-cli.ts +0 -22
- package/src/migrate.ts +0 -74
- package/src/types/write-queue.ts +0 -42
- package/src/worker.ts +0 -18
- package/src/workers/write-worker.ts +0 -120
- package/tests/index.js +0 -14
- package/tests/integration/migration.test.js +0 -38
- package/tests/integration/queue.test.js +0 -83
- package/tests/mocks/redis.js +0 -123
- package/tests/mocks/s3.js +0 -80
package/tests/mocks/redis.js
DELETED
|
@@ -1,123 +0,0 @@
|
|
|
1
|
-
export default class RedisMock {
|
|
2
|
-
static stream = []
|
|
3
|
-
static jobs = new Map()
|
|
4
|
-
static docs = new Map()
|
|
5
|
-
static locks = new Map()
|
|
6
|
-
static deadLetters = []
|
|
7
|
-
static nextId = 0
|
|
8
|
-
async publish(_collection, _action, _keyId) {}
|
|
9
|
-
async claimTTID(_id, _ttlSeconds = 10) {
|
|
10
|
-
return true
|
|
11
|
-
}
|
|
12
|
-
async enqueueWrite(job) {
|
|
13
|
-
RedisMock.jobs.set(job.jobId, { ...job, nextAttemptAt: job.nextAttemptAt ?? Date.now() })
|
|
14
|
-
RedisMock.docs.set(`fylo:doc:${job.collection}:${job.docId}`, {
|
|
15
|
-
status: 'queued',
|
|
16
|
-
lastJobId: job.jobId,
|
|
17
|
-
updatedAt: String(Date.now())
|
|
18
|
-
})
|
|
19
|
-
const streamId = String(++RedisMock.nextId)
|
|
20
|
-
RedisMock.stream.push({
|
|
21
|
-
streamId,
|
|
22
|
-
jobId: job.jobId,
|
|
23
|
-
collection: job.collection,
|
|
24
|
-
docId: job.docId,
|
|
25
|
-
operation: job.operation
|
|
26
|
-
})
|
|
27
|
-
return streamId
|
|
28
|
-
}
|
|
29
|
-
async readWriteJobs(workerId, count = 1) {
|
|
30
|
-
const available = RedisMock.stream.filter((entry) => !entry.claimedBy).slice(0, count)
|
|
31
|
-
for (const entry of available) entry.claimedBy = workerId
|
|
32
|
-
return available.map((entry) => ({
|
|
33
|
-
streamId: entry.streamId,
|
|
34
|
-
job: { ...RedisMock.jobs.get(entry.jobId) }
|
|
35
|
-
}))
|
|
36
|
-
}
|
|
37
|
-
async ackWriteJob(streamId) {
|
|
38
|
-
RedisMock.stream = RedisMock.stream.filter((item) => item.streamId !== streamId)
|
|
39
|
-
}
|
|
40
|
-
async deadLetterWriteJob(streamId, job, reason) {
|
|
41
|
-
RedisMock.deadLetters.push({
|
|
42
|
-
streamId: String(RedisMock.deadLetters.length + 1),
|
|
43
|
-
jobId: job.jobId,
|
|
44
|
-
reason,
|
|
45
|
-
failedAt: Date.now()
|
|
46
|
-
})
|
|
47
|
-
await this.ackWriteJob(streamId)
|
|
48
|
-
}
|
|
49
|
-
async claimPendingJobs(workerId, _minIdleMs = 30000, count = 10) {
|
|
50
|
-
const pending = RedisMock.stream.filter((entry) => entry.claimedBy).slice(0, count)
|
|
51
|
-
for (const entry of pending) entry.claimedBy = workerId
|
|
52
|
-
return pending.map((entry) => ({
|
|
53
|
-
streamId: entry.streamId,
|
|
54
|
-
job: { ...RedisMock.jobs.get(entry.jobId) }
|
|
55
|
-
}))
|
|
56
|
-
}
|
|
57
|
-
async setJobStatus(jobId, status, extra = {}) {
|
|
58
|
-
const job = RedisMock.jobs.get(jobId)
|
|
59
|
-
if (job) Object.assign(job, extra, { status, updatedAt: Date.now() })
|
|
60
|
-
}
|
|
61
|
-
async setDocStatus(collection, docId, status, jobId) {
|
|
62
|
-
const key = `fylo:doc:${collection}:${docId}`
|
|
63
|
-
const curr = RedisMock.docs.get(key) ?? {}
|
|
64
|
-
RedisMock.docs.set(key, {
|
|
65
|
-
...curr,
|
|
66
|
-
status,
|
|
67
|
-
updatedAt: String(Date.now()),
|
|
68
|
-
...(jobId ? { lastJobId: jobId } : {})
|
|
69
|
-
})
|
|
70
|
-
}
|
|
71
|
-
async getJob(jobId) {
|
|
72
|
-
const job = RedisMock.jobs.get(jobId)
|
|
73
|
-
return job ? { ...job } : null
|
|
74
|
-
}
|
|
75
|
-
async getDocStatus(collection, docId) {
|
|
76
|
-
return RedisMock.docs.get(`fylo:doc:${collection}:${docId}`) ?? null
|
|
77
|
-
}
|
|
78
|
-
async readDeadLetters(count = 10) {
|
|
79
|
-
return RedisMock.deadLetters.slice(0, count).map((item) => ({
|
|
80
|
-
streamId: item.streamId,
|
|
81
|
-
job: { ...RedisMock.jobs.get(item.jobId) },
|
|
82
|
-
reason: item.reason,
|
|
83
|
-
failedAt: item.failedAt
|
|
84
|
-
}))
|
|
85
|
-
}
|
|
86
|
-
async replayDeadLetter(streamId) {
|
|
87
|
-
const item = RedisMock.deadLetters.find((entry) => entry.streamId === streamId)
|
|
88
|
-
if (!item) return null
|
|
89
|
-
const job = RedisMock.jobs.get(item.jobId)
|
|
90
|
-
if (!job) return null
|
|
91
|
-
const replayed = {
|
|
92
|
-
...job,
|
|
93
|
-
status: 'queued',
|
|
94
|
-
error: undefined,
|
|
95
|
-
workerId: undefined,
|
|
96
|
-
attempts: 0,
|
|
97
|
-
updatedAt: Date.now(),
|
|
98
|
-
nextAttemptAt: Date.now()
|
|
99
|
-
}
|
|
100
|
-
RedisMock.jobs.set(item.jobId, replayed)
|
|
101
|
-
await this.enqueueWrite(replayed)
|
|
102
|
-
RedisMock.deadLetters = RedisMock.deadLetters.filter((entry) => entry.streamId !== streamId)
|
|
103
|
-
return { ...replayed }
|
|
104
|
-
}
|
|
105
|
-
async getQueueStats() {
|
|
106
|
-
return {
|
|
107
|
-
queued: RedisMock.stream.length,
|
|
108
|
-
pending: RedisMock.stream.filter((entry) => entry.claimedBy).length,
|
|
109
|
-
deadLetters: RedisMock.deadLetters.length
|
|
110
|
-
}
|
|
111
|
-
}
|
|
112
|
-
async acquireDocLock(collection, docId, jobId) {
|
|
113
|
-
const key = `fylo:lock:${collection}:${docId}`
|
|
114
|
-
if (RedisMock.locks.has(key)) return false
|
|
115
|
-
RedisMock.locks.set(key, jobId)
|
|
116
|
-
return true
|
|
117
|
-
}
|
|
118
|
-
async releaseDocLock(collection, docId, jobId) {
|
|
119
|
-
const key = `fylo:lock:${collection}:${docId}`
|
|
120
|
-
if (RedisMock.locks.get(key) === jobId) RedisMock.locks.delete(key)
|
|
121
|
-
}
|
|
122
|
-
async *subscribe(_collection) {}
|
|
123
|
-
}
|
package/tests/mocks/s3.js
DELETED
|
@@ -1,80 +0,0 @@
|
|
|
1
|
-
const store = new Map()
|
|
2
|
-
function getBucket(name) {
|
|
3
|
-
if (!store.has(name)) store.set(name, new Map())
|
|
4
|
-
return store.get(name)
|
|
5
|
-
}
|
|
6
|
-
export default class S3Mock {
|
|
7
|
-
static BUCKET_ENV = process.env.BUCKET_PREFIX
|
|
8
|
-
static CREDS = {
|
|
9
|
-
accessKeyId: 'mock',
|
|
10
|
-
secretAccessKey: 'mock',
|
|
11
|
-
region: 'mock',
|
|
12
|
-
endpoint: undefined
|
|
13
|
-
}
|
|
14
|
-
static getBucketFormat(collection) {
|
|
15
|
-
return S3Mock.BUCKET_ENV ? `${S3Mock.BUCKET_ENV}-${collection}` : collection
|
|
16
|
-
}
|
|
17
|
-
static file(collection, path) {
|
|
18
|
-
const bucket = getBucket(S3Mock.getBucketFormat(collection))
|
|
19
|
-
return {
|
|
20
|
-
get size() {
|
|
21
|
-
const val = bucket.get(path)
|
|
22
|
-
return val !== undefined ? val.length : 0
|
|
23
|
-
},
|
|
24
|
-
async text() {
|
|
25
|
-
return bucket.get(path) ?? ''
|
|
26
|
-
}
|
|
27
|
-
}
|
|
28
|
-
}
|
|
29
|
-
static async list(collection, options = {}) {
|
|
30
|
-
const bucket = getBucket(S3Mock.getBucketFormat(collection))
|
|
31
|
-
const prefix = options.prefix ?? ''
|
|
32
|
-
const delimiter = options.delimiter
|
|
33
|
-
const maxKeys = options.maxKeys ?? 1000
|
|
34
|
-
const token = options.continuationToken
|
|
35
|
-
const allKeys = Array.from(bucket.keys())
|
|
36
|
-
.filter((k) => k.startsWith(prefix))
|
|
37
|
-
.sort()
|
|
38
|
-
if (delimiter) {
|
|
39
|
-
const prefixSet = new Set()
|
|
40
|
-
const contents = []
|
|
41
|
-
for (const key of allKeys) {
|
|
42
|
-
const rest = key.slice(prefix.length)
|
|
43
|
-
const idx = rest.indexOf(delimiter)
|
|
44
|
-
if (idx >= 0) {
|
|
45
|
-
prefixSet.add(prefix + rest.slice(0, idx + 1))
|
|
46
|
-
} else {
|
|
47
|
-
contents.push({ key })
|
|
48
|
-
}
|
|
49
|
-
}
|
|
50
|
-
const allPrefixes = Array.from(prefixSet).map((p) => ({ prefix: p }))
|
|
51
|
-
const limitedPrefixes = allPrefixes.slice(0, maxKeys)
|
|
52
|
-
return {
|
|
53
|
-
contents: contents.length ? contents : undefined,
|
|
54
|
-
commonPrefixes: limitedPrefixes.length ? limitedPrefixes : undefined,
|
|
55
|
-
isTruncated: allPrefixes.length > maxKeys,
|
|
56
|
-
nextContinuationToken: undefined
|
|
57
|
-
}
|
|
58
|
-
}
|
|
59
|
-
const startIdx = token ? parseInt(token) : 0
|
|
60
|
-
const page = allKeys.slice(startIdx, startIdx + maxKeys)
|
|
61
|
-
const nextToken =
|
|
62
|
-
startIdx + maxKeys < allKeys.length ? String(startIdx + maxKeys) : undefined
|
|
63
|
-
return {
|
|
64
|
-
contents: page.length ? page.map((k) => ({ key: k })) : undefined,
|
|
65
|
-
isTruncated: !!nextToken,
|
|
66
|
-
nextContinuationToken: nextToken,
|
|
67
|
-
commonPrefixes: undefined
|
|
68
|
-
}
|
|
69
|
-
}
|
|
70
|
-
static async put(collection, path, data) {
|
|
71
|
-
getBucket(S3Mock.getBucketFormat(collection)).set(path, data)
|
|
72
|
-
}
|
|
73
|
-
static async delete(collection, path) {
|
|
74
|
-
getBucket(S3Mock.getBucketFormat(collection)).delete(path)
|
|
75
|
-
}
|
|
76
|
-
static async createBucket(_collection) {}
|
|
77
|
-
static async deleteBucket(collection) {
|
|
78
|
-
store.delete(S3Mock.getBucketFormat(collection))
|
|
79
|
-
}
|
|
80
|
-
}
|