@fireproof/core 0.1.1 → 0.3.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/hooks/use-fireproof.ts +44 -27
- package/package.json +9 -6
- package/scripts/keygen.js +3 -0
- package/src/blockstore.js +27 -56
- package/src/clock.js +0 -1
- package/src/crypto.js +65 -0
- package/src/db-index.js +34 -45
- package/src/fireproof.js +87 -66
- package/src/hydrator.js +14 -11
- package/src/listener.js +7 -14
- package/src/prolly.js +130 -54
- package/src/sha1.js +82 -0
- package/src/valet.js +169 -11
- package/test/db-index.test.js +15 -1
- package/test/fireproof.test.js +83 -4
- package/test/hydrator.test.js +8 -2
package/src/sha1.js
ADDED
@@ -0,0 +1,82 @@
|
|
1
|
+
// from https://github.com/duzun/sync-sha1/blob/master/rawSha1.js
|
2
|
+
// MIT License Copyright (c) 2020 Dumitru Uzun
|
3
|
+
// Permission is hereby granted, free of charge, to any person obtaining a copy
|
4
|
+
// of this software and associated documentation files (the "Software"), to deal
|
5
|
+
// in the Software without restriction, including without limitation the rights
|
6
|
+
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
7
|
+
// copies of the Software, and to permit persons to whom the Software is
|
8
|
+
// furnished to do so, subject to the following conditions:
|
9
|
+
|
10
|
+
// The above copyright notice and this permission notice shall be included in all
|
11
|
+
// copies or substantial portions of the Software.
|
12
|
+
|
13
|
+
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
14
|
+
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
15
|
+
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
16
|
+
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
17
|
+
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
18
|
+
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
19
|
+
// SOFTWARE.
|
20
|
+
|
21
|
+
// import {
|
22
|
+
// isLittleEndian, switchEndianness32
|
23
|
+
// } from 'string-encode'
|
24
|
+
|
25
|
+
/**
|
26
|
+
* SHA1 on binary array
|
27
|
+
*
|
28
|
+
* @param {Uint8Array} b Data to hash
|
29
|
+
*
|
30
|
+
* @return {Uint8Array} sha1 hash
|
31
|
+
*/
|
32
|
+
export default function rawSha1 (b) {
|
33
|
+
let i = b.byteLength
|
34
|
+
let bs = 0
|
35
|
+
let A; let B; let C; let D; let G
|
36
|
+
const H = Uint32Array.from([A = 0x67452301, B = 0xEFCDAB89, ~A, ~B, 0xC3D2E1F0])
|
37
|
+
const W = new Uint32Array(80)
|
38
|
+
const nrWords = (i / 4 + 2) | 15
|
39
|
+
const words = new Uint32Array(nrWords + 1)
|
40
|
+
let j
|
41
|
+
|
42
|
+
words[nrWords] = i * 8
|
43
|
+
words[i >> 2] |= 0x80 << (~i << 3)
|
44
|
+
for (;i--;) {
|
45
|
+
words[i >> 2] |= b[i] << (~i << 3)
|
46
|
+
}
|
47
|
+
|
48
|
+
for (A = H.slice(); bs < nrWords; bs += 16, A.set(H)) {
|
49
|
+
for (i = 0; i < 80;
|
50
|
+
A[0] = (
|
51
|
+
G = ((b = A[0]) << 5 | b >>> 27) +
|
52
|
+
A[4] +
|
53
|
+
(W[i] = (i < 16) ? words[bs + i] : G << 1 | G >>> 31) +
|
54
|
+
0x5A827999,
|
55
|
+
B = A[1],
|
56
|
+
C = A[2],
|
57
|
+
D = A[3],
|
58
|
+
G + ((j = i / 5 >> 2) // eslint-disable-line no-cond-assign
|
59
|
+
? j !== 2
|
60
|
+
? (B ^ C ^ D) + (j & 2 ? 0x6FE0483D : 0x14577208)
|
61
|
+
: (B & C | B & D | C & D) + 0x34994343
|
62
|
+
: B & C | ~B & D
|
63
|
+
)
|
64
|
+
)
|
65
|
+
, A[1] = b
|
66
|
+
, A[2] = B << 30 | B >>> 2
|
67
|
+
, A[3] = C
|
68
|
+
, A[4] = D
|
69
|
+
, ++i
|
70
|
+
) {
|
71
|
+
G = W[i - 3] ^ W[i - 8] ^ W[i - 14] ^ W[i - 16]
|
72
|
+
}
|
73
|
+
|
74
|
+
for (i = 5; i;) H[--i] = H[i] + A[i]
|
75
|
+
}
|
76
|
+
|
77
|
+
// if (isLittleEndian()) {
|
78
|
+
// H = H.map(switchEndianness32)
|
79
|
+
// }
|
80
|
+
|
81
|
+
return new Uint8Array(H.buffer, H.byteOffset, H.byteLength)
|
82
|
+
}
|
package/src/valet.js
CHANGED
@@ -1,12 +1,30 @@
|
|
1
1
|
import { CarReader } from '@ipld/car'
|
2
2
|
import { CID } from 'multiformats/cid'
|
3
|
+
import { sha256 } from 'multiformats/hashes/sha2'
|
4
|
+
import * as CBW from '@ipld/car/buffer-writer'
|
5
|
+
import * as raw from 'multiformats/codecs/raw'
|
6
|
+
import * as Block from 'multiformats/block'
|
7
|
+
import * as dagcbor from '@ipld/dag-cbor'
|
3
8
|
import { openDB } from 'idb'
|
4
9
|
import cargoQueue from 'async/cargoQueue.js'
|
10
|
+
import { bf } from 'prolly-trees/utils'
|
11
|
+
import { nocache as cache } from 'prolly-trees/cache'
|
12
|
+
import { encrypt, decrypt } from './crypto.js'
|
13
|
+
import { Buffer } from 'buffer'
|
14
|
+
import * as codec from 'encrypted-block'
|
15
|
+
import sha1sync from './sha1.js'
|
16
|
+
const chunker = bf(3)
|
17
|
+
|
18
|
+
const NO_ENCRYPT =
|
19
|
+
typeof process !== 'undefined' ? process.env.NO_ENCRYPT : import.meta && import.meta.env.VITE_NO_ENCRYPT
|
5
20
|
|
6
21
|
export default class Valet {
|
7
22
|
idb = null
|
23
|
+
name = null
|
8
24
|
#uploadQueue = null
|
9
25
|
#alreadyEnqueued = new Set()
|
26
|
+
#keyMaterial = null
|
27
|
+
keyId = 'null'
|
10
28
|
|
11
29
|
/**
|
12
30
|
* Function installed by the database to upload car files
|
@@ -14,8 +32,9 @@ export default class Valet {
|
|
14
32
|
*/
|
15
33
|
uploadFunction = null
|
16
34
|
|
17
|
-
constructor (name = 'default') {
|
35
|
+
constructor (name = 'default', keyMaterial) {
|
18
36
|
this.name = name
|
37
|
+
this.setKeyMaterial(keyMaterial)
|
19
38
|
this.#uploadQueue = cargoQueue(async (tasks, callback) => {
|
20
39
|
console.log(
|
21
40
|
'queue worker',
|
@@ -24,7 +43,7 @@ export default class Valet {
|
|
24
43
|
)
|
25
44
|
if (this.uploadFunction) {
|
26
45
|
// todo we can coalesce these into a single car file
|
27
|
-
return await this.withDB(async
|
46
|
+
return await this.withDB(async db => {
|
28
47
|
for (const task of tasks) {
|
29
48
|
await this.uploadFunction(task.carCid, task.value)
|
30
49
|
// update the indexedb to mark this car as no longer pending
|
@@ -38,8 +57,8 @@ export default class Valet {
|
|
38
57
|
})
|
39
58
|
|
40
59
|
this.#uploadQueue.drain(async () => {
|
41
|
-
return await this.withDB(async
|
42
|
-
const carKeys = (await db.getAllFromIndex('cidToCar', 'pending')).map(
|
60
|
+
return await this.withDB(async db => {
|
61
|
+
const carKeys = (await db.getAllFromIndex('cidToCar', 'pending')).map(c => c.car)
|
43
62
|
for (const carKey of carKeys) {
|
44
63
|
await this.uploadFunction(carKey, await db.get('cars', carKey))
|
45
64
|
const carMeta = await db.get('cidToCar', carKey)
|
@@ -50,9 +69,46 @@ export default class Valet {
|
|
50
69
|
})
|
51
70
|
}
|
52
71
|
|
53
|
-
|
72
|
+
getKeyMaterial () {
|
73
|
+
return this.#keyMaterial
|
74
|
+
}
|
75
|
+
|
76
|
+
setKeyMaterial (km) {
|
77
|
+
if (km && !NO_ENCRYPT) {
|
78
|
+
const hex = Uint8Array.from(Buffer.from(km, 'hex'))
|
79
|
+
this.#keyMaterial = km
|
80
|
+
const hash = sha1sync(hex)
|
81
|
+
this.keyId = Buffer.from(hash).toString('hex')
|
82
|
+
} else {
|
83
|
+
this.#keyMaterial = null
|
84
|
+
this.keyId = 'null'
|
85
|
+
}
|
86
|
+
// console.trace('keyId', this.name, this.keyId)
|
87
|
+
}
|
88
|
+
|
89
|
+
/**
|
90
|
+
* Group the blocks into a car and write it to the valet.
|
91
|
+
* @param {InnerBlockstore} innerBlockstore
|
92
|
+
* @param {Set<string>} cids
|
93
|
+
* @returns {Promise<void>}
|
94
|
+
* @memberof Valet
|
95
|
+
*/
|
96
|
+
async writeTransaction (innerBlockstore, cids) {
|
97
|
+
if (innerBlockstore.lastCid) {
|
98
|
+
if (this.#keyMaterial) {
|
99
|
+
// console.log('encrypting car', innerBlockstore.label)
|
100
|
+
const newCar = await blocksToEncryptedCarBlock(innerBlockstore.lastCid, innerBlockstore, this.#keyMaterial)
|
101
|
+
await this.parkCar(newCar.cid.toString(), newCar.bytes, cids)
|
102
|
+
} else {
|
103
|
+
const newCar = await blocksToCarBlock(innerBlockstore.lastCid, innerBlockstore)
|
104
|
+
await this.parkCar(newCar.cid.toString(), newCar.bytes, cids)
|
105
|
+
}
|
106
|
+
}
|
107
|
+
}
|
108
|
+
|
109
|
+
withDB = async dbWorkFun => {
|
54
110
|
if (!this.idb) {
|
55
|
-
this.idb = await openDB(`fp.${this.name}.valet`, 2, {
|
111
|
+
this.idb = await openDB(`fp.${this.keyId}.${this.name}.valet`, 2, {
|
56
112
|
upgrade (db, oldVersion, newVersion, transaction) {
|
57
113
|
if (oldVersion < 1) {
|
58
114
|
db.createObjectStore('cars') // todo use database name
|
@@ -75,7 +131,7 @@ export default class Valet {
|
|
75
131
|
* @param {*} value
|
76
132
|
*/
|
77
133
|
async parkCar (carCid, value, cids) {
|
78
|
-
await this.withDB(async
|
134
|
+
await this.withDB(async db => {
|
79
135
|
const tx = db.transaction(['cars', 'cidToCar'], 'readwrite')
|
80
136
|
await tx.objectStore('cars').put(value, carCid)
|
81
137
|
await tx.objectStore('cidToCar').put({ pending: 'y', car: carCid, cids: Array.from(cids) })
|
@@ -100,7 +156,7 @@ export default class Valet {
|
|
100
156
|
remoteBlockFunction = null
|
101
157
|
|
102
158
|
async getBlock (dataCID) {
|
103
|
-
return await this.withDB(async
|
159
|
+
return await this.withDB(async db => {
|
104
160
|
const tx = db.transaction(['cars', 'cidToCar'], 'readonly')
|
105
161
|
const indexResp = await tx.objectStore('cidToCar').index('cids').get(dataCID)
|
106
162
|
const carCid = indexResp?.car
|
@@ -109,10 +165,112 @@ export default class Valet {
|
|
109
165
|
}
|
110
166
|
const carBytes = await tx.objectStore('cars').get(carCid)
|
111
167
|
const reader = await CarReader.fromBytes(carBytes)
|
112
|
-
|
113
|
-
|
114
|
-
|
168
|
+
if (this.#keyMaterial) {
|
169
|
+
const roots = await reader.getRoots()
|
170
|
+
const readerGetWithCodec = async cid => {
|
171
|
+
const got = await reader.get(cid)
|
172
|
+
// console.log('got.', cid.toString())
|
173
|
+
let useCodec = codec
|
174
|
+
if (cid.toString().indexOf('bafy') === 0) {
|
175
|
+
useCodec = dagcbor
|
176
|
+
}
|
177
|
+
const decoded = await Block.decode({
|
178
|
+
...got,
|
179
|
+
codec: useCodec,
|
180
|
+
hasher: sha256
|
181
|
+
})
|
182
|
+
// console.log('decoded', decoded.value)
|
183
|
+
return decoded
|
184
|
+
}
|
185
|
+
const { blocks } = await blocksFromEncryptedCarBlock(roots[0], readerGetWithCodec, this.#keyMaterial)
|
186
|
+
const block = blocks.find(b => b.cid.toString() === dataCID)
|
187
|
+
if (block) {
|
188
|
+
return block.bytes
|
189
|
+
}
|
190
|
+
} else {
|
191
|
+
const gotBlock = await reader.get(CID.parse(dataCID))
|
192
|
+
if (gotBlock) {
|
193
|
+
return gotBlock.bytes
|
194
|
+
}
|
115
195
|
}
|
116
196
|
})
|
117
197
|
}
|
118
198
|
}
|
199
|
+
|
200
|
+
const blocksToCarBlock = async (lastCid, blocks) => {
|
201
|
+
let size = 0
|
202
|
+
const headerSize = CBW.headerLength({ roots: [lastCid] })
|
203
|
+
size += headerSize
|
204
|
+
if (!Array.isArray(blocks)) {
|
205
|
+
blocks = Array.from(blocks.entries())
|
206
|
+
}
|
207
|
+
for (const { cid, bytes } of blocks) {
|
208
|
+
size += CBW.blockLength({ cid, bytes })
|
209
|
+
}
|
210
|
+
const buffer = new Uint8Array(size)
|
211
|
+
const writer = await CBW.createWriter(buffer, { headerSize })
|
212
|
+
|
213
|
+
writer.addRoot(lastCid)
|
214
|
+
|
215
|
+
for (const { cid, bytes } of blocks) {
|
216
|
+
writer.write({ cid, bytes })
|
217
|
+
}
|
218
|
+
await writer.close()
|
219
|
+
return await Block.encode({ value: writer.bytes, hasher: sha256, codec: raw })
|
220
|
+
}
|
221
|
+
|
222
|
+
const blocksToEncryptedCarBlock = async (innerBlockStoreClockRootCid, blocks, keyMaterial) => {
|
223
|
+
const encryptionKey = Buffer.from(keyMaterial, 'hex')
|
224
|
+
const encryptedBlocks = []
|
225
|
+
const theCids = []
|
226
|
+
for (const { cid } of blocks.entries()) {
|
227
|
+
theCids.push(cid.toString())
|
228
|
+
}
|
229
|
+
|
230
|
+
let last
|
231
|
+
for await (const block of encrypt({
|
232
|
+
cids: theCids,
|
233
|
+
get: async cid => blocks.get(cid), // maybe we can just use blocks.get
|
234
|
+
key: encryptionKey,
|
235
|
+
hasher: sha256,
|
236
|
+
chunker,
|
237
|
+
codec: dagcbor, // should be crypto?
|
238
|
+
root: innerBlockStoreClockRootCid
|
239
|
+
})) {
|
240
|
+
encryptedBlocks.push(block)
|
241
|
+
last = block
|
242
|
+
}
|
243
|
+
// console.log('last', last.cid.toString(), 'for clock', innerBlockStoreClockRootCid.toString())
|
244
|
+
const encryptedCar = await blocksToCarBlock(last.cid, encryptedBlocks)
|
245
|
+
return encryptedCar
|
246
|
+
}
|
247
|
+
// { root, get, key, cache, chunker, hasher }
|
248
|
+
|
249
|
+
const memoizeDecryptedCarBlocks = new Map()
|
250
|
+
const blocksFromEncryptedCarBlock = async (cid, get, keyMaterial) => {
|
251
|
+
if (memoizeDecryptedCarBlocks.has(cid.toString())) {
|
252
|
+
return memoizeDecryptedCarBlocks.get(cid.toString())
|
253
|
+
} else {
|
254
|
+
const blocksPromise = (async () => {
|
255
|
+
const decryptionKey = Buffer.from(keyMaterial, 'hex')
|
256
|
+
// console.log('decrypting', keyMaterial, cid.toString())
|
257
|
+
const cids = new Set()
|
258
|
+
const decryptedBlocks = []
|
259
|
+
for await (const block of decrypt({
|
260
|
+
root: cid,
|
261
|
+
get,
|
262
|
+
key: decryptionKey,
|
263
|
+
chunker,
|
264
|
+
hasher: sha256,
|
265
|
+
cache,
|
266
|
+
codec: dagcbor
|
267
|
+
})) {
|
268
|
+
decryptedBlocks.push(block)
|
269
|
+
cids.add(block.cid.toString())
|
270
|
+
}
|
271
|
+
return { blocks: decryptedBlocks, cids }
|
272
|
+
})()
|
273
|
+
memoizeDecryptedCarBlocks.set(cid.toString(), blocksPromise)
|
274
|
+
return blocksPromise
|
275
|
+
}
|
276
|
+
}
|
package/test/db-index.test.js
CHANGED
@@ -27,7 +27,10 @@ describe('DbIndex query', () => {
|
|
27
27
|
}
|
28
28
|
index = new DbIndex(database, function (doc, map) {
|
29
29
|
map(doc.age, doc.name)
|
30
|
-
})
|
30
|
+
}, null, { name: 'namesByAge' })
|
31
|
+
})
|
32
|
+
it('has a name', () => {
|
33
|
+
assert.equal(index.name, 'namesByAge')
|
31
34
|
})
|
32
35
|
it('query index range', async () => {
|
33
36
|
const result = await index.query({ range: [41, 49] })
|
@@ -56,6 +59,14 @@ describe('DbIndex query', () => {
|
|
56
59
|
assert.equal(result.rows[0].value, 'emily')
|
57
60
|
assert.equal(result.rows[result.rows.length - 1].value, 'dave')
|
58
61
|
})
|
62
|
+
it('query index limit', async () => {
|
63
|
+
const result = await index.query({ limit: 3 })
|
64
|
+
assert(result, 'did return result')
|
65
|
+
assert(result.rows)
|
66
|
+
assert.equal(result.rows.length, 3, 'six row matched')
|
67
|
+
assert.equal(result.rows[0].key, 4)
|
68
|
+
assert.equal(result.rows[0].value, 'emily')
|
69
|
+
})
|
59
70
|
it('query index NaN', async () => {
|
60
71
|
const result = await index.query({ range: [NaN, 44] })
|
61
72
|
assert(result, 'did return result')
|
@@ -236,6 +247,9 @@ describe('DbIndex query with bad index definition', () => {
|
|
236
247
|
map(doc.oops.missingField, doc.name)
|
237
248
|
})
|
238
249
|
})
|
250
|
+
it('has a default name', () => {
|
251
|
+
assert.equal(index.name, 'doc.oops.missingField, doc.name')
|
252
|
+
})
|
239
253
|
it('query index range', async () => {
|
240
254
|
const oldErrFn = console.error
|
241
255
|
console.error = () => {}
|
package/test/fireproof.test.js
CHANGED
@@ -20,10 +20,13 @@ describe('Fireproof', () => {
|
|
20
20
|
})
|
21
21
|
it('takes an optional name', () => {
|
22
22
|
assert.equal(database.name, 'helloName')
|
23
|
+
const km = database.blocks.valet.getKeyMaterial()
|
24
|
+
if (process.env.NO_ENCRYPT) { assert.equal(km, null) } else { assert.equal(km.length, 64) }
|
23
25
|
const x = database.blocks.valet.idb
|
24
|
-
|
26
|
+
const keyId = database.blocks.valet.keyId
|
27
|
+
assert.equal(x.name.toString(), `fp.${keyId}.helloName.valet`)
|
25
28
|
})
|
26
|
-
it('put and get document', async () => {
|
29
|
+
it('only put and get document', async () => {
|
27
30
|
assert(resp0.id, 'should have id')
|
28
31
|
assert.equal(resp0.id, '1ef3b32a-3c3a-4b5e-9c1c-8c5c0c5c0c5c')
|
29
32
|
const avalue = await database.get('1ef3b32a-3c3a-4b5e-9c1c-8c5c0c5c0c5c')
|
@@ -103,7 +106,11 @@ describe('Fireproof', () => {
|
|
103
106
|
console.log('err', err)
|
104
107
|
assert.match(err.message, /MVCC conflict/)
|
105
108
|
})
|
106
|
-
|
109
|
+
it('allDocuments', async () => {
|
110
|
+
await database.put({ name: 'bob' })
|
111
|
+
const allDocs = await database.allDocuments()
|
112
|
+
assert.equal(allDocs.rows.length, 2)
|
113
|
+
})
|
107
114
|
it('has a factory for making new instances with default settings', async () => {
|
108
115
|
// TODO if you pass it an email it asks the local keyring, and if no key, does the email validation thing
|
109
116
|
const db = await Fireproof.storage({ email: 'jchris@gmail.com' })
|
@@ -117,6 +124,14 @@ describe('Fireproof', () => {
|
|
117
124
|
const changes = await db.changesSince()
|
118
125
|
assert.equal(changes.rows.length, 0)
|
119
126
|
})
|
127
|
+
it('delete on an empty database', async () => {
|
128
|
+
const db = Fireproof.storage()
|
129
|
+
assert(db instanceof Fireproof)
|
130
|
+
const e = await db.del('8c5c0c5c0c5c').catch((err) => err)
|
131
|
+
assert.equal(e.id, '8c5c0c5c0c5c')
|
132
|
+
const changes = await db.changesSince()
|
133
|
+
assert.equal(changes.rows.length, 0)
|
134
|
+
})
|
120
135
|
it('update existing document', async () => {
|
121
136
|
// const alice = await database.get('1ef3b32a-3c3a-4b5e-9c1c-8c5c0c5c0c5c')
|
122
137
|
// assert.equal(alice.name, 'alice')
|
@@ -187,7 +202,7 @@ describe('Fireproof', () => {
|
|
187
202
|
const e = await database.get('missing').catch((e) => e)
|
188
203
|
assert.equal(e.message, 'Not found')
|
189
204
|
})
|
190
|
-
it('delete
|
205
|
+
it('delete the only document', async () => {
|
191
206
|
const id = '1ef3b32a-3c3a-4b5e-9c1c-8c5c0c5c0c5c'
|
192
207
|
const found = await database.get(id)
|
193
208
|
assert.equal(found._id, id)
|
@@ -205,6 +220,31 @@ describe('Fireproof', () => {
|
|
205
220
|
assert.equal(e.message, 'Not found')
|
206
221
|
})
|
207
222
|
|
223
|
+
it('delete not last document', async () => {
|
224
|
+
const resp1 = await database.put({
|
225
|
+
_id: 'second',
|
226
|
+
name: 'bob',
|
227
|
+
age: 39
|
228
|
+
})
|
229
|
+
|
230
|
+
// const id = '1ef3b32a-3c3a-4b5e-9c1c-8c5c0c5c0c5c'
|
231
|
+
const id = resp1.id
|
232
|
+
const found = await database.get(id)
|
233
|
+
assert.equal(found._id, id)
|
234
|
+
const deleted = await database.del(id)
|
235
|
+
assert.equal(deleted.id, id)
|
236
|
+
const e = await database
|
237
|
+
.get(id)
|
238
|
+
.then((doc) => assert.equal('should be deleted', JSON.stringify(doc)))
|
239
|
+
.catch((e) => {
|
240
|
+
if (e.message !== 'Not found') {
|
241
|
+
throw e
|
242
|
+
}
|
243
|
+
return e
|
244
|
+
})
|
245
|
+
assert.equal(e.message, 'Not found')
|
246
|
+
})
|
247
|
+
|
208
248
|
it("delete a document with validation function that doesn't allow it", async () => {
|
209
249
|
const validationDatabase = new Fireproof(new Blockstore(), [], {
|
210
250
|
validateChange: (newDoc, oldDoc, authCtx) => {
|
@@ -367,6 +407,24 @@ describe('Fireproof', () => {
|
|
367
407
|
}
|
368
408
|
})
|
369
409
|
|
410
|
+
database.blocks.clearCommittedCache() // clear cache to force re-reading from encrypted store
|
411
|
+
|
412
|
+
doc = await database.get(resp.id).catch(e => {
|
413
|
+
console.log('failed', e)
|
414
|
+
assert.fail(`get failed on _id: ${id}, error: ${e.message}`)
|
415
|
+
})
|
416
|
+
|
417
|
+
assert.equal(doc.index, index, `doc.index is not equal to index for _id: ${id}`)
|
418
|
+
changes = await database.changesSince().catch(async e => {
|
419
|
+
assert.fail(`changesSince failed on _id: ${id}, error: ${e.message}`)
|
420
|
+
})
|
421
|
+
changes.rows.forEach(row => {
|
422
|
+
for (const key in row) {
|
423
|
+
const value = row[key]
|
424
|
+
assert(!/^bafy/.test(value), `Unexpected "bafy..." value found at index ${index} in row ${JSON.stringify(row)}`)
|
425
|
+
}
|
426
|
+
})
|
427
|
+
|
370
428
|
// console.log('changes: ', index, changes.rows.length, JSON.stringify(changes.rows))
|
371
429
|
assert.equal(changes.rows.length, index + 2, `failed on ${index}, with ${changes.rows.length} ${id}`)
|
372
430
|
}
|
@@ -411,4 +469,25 @@ describe('Fireproof', () => {
|
|
411
469
|
assert.equal(serialized.name, 'helloName')
|
412
470
|
assert.equal(serialized.clock.length, 1)
|
413
471
|
})
|
472
|
+
it('clocked changes in order', async () => {
|
473
|
+
await database.put({ _id: '2' })
|
474
|
+
await database.put({ _id: 'three' })
|
475
|
+
await database.put({ _id: '4' })
|
476
|
+
const changes = await database.changesSince(resp0.clock)
|
477
|
+
assert.equal(changes.rows.length, 3)
|
478
|
+
assert.equal(changes.rows[0].key, '2')
|
479
|
+
assert.equal(changes.rows[1].key, 'three')
|
480
|
+
assert.equal(changes.rows[2].key, '4')
|
481
|
+
})
|
482
|
+
it.skip('changes in order', async () => {
|
483
|
+
await database.put({ _id: '2' })
|
484
|
+
await database.put({ _id: 'three' })
|
485
|
+
await database.put({ _id: '4' })
|
486
|
+
const changes = await database.changesSince()
|
487
|
+
assert.equal(changes.rows.length, 4)
|
488
|
+
assert.equal(changes.rows[0].key, resp0.id)
|
489
|
+
assert.equal(changes.rows[1].key, '2')
|
490
|
+
assert.equal(changes.rows[2].key, 'three')
|
491
|
+
assert.equal(changes.rows[3].key, '4')
|
492
|
+
})
|
414
493
|
})
|
package/test/hydrator.test.js
CHANGED
@@ -3,7 +3,6 @@ import assert from 'node:assert'
|
|
3
3
|
import Fireproof from '../src/fireproof.js'
|
4
4
|
import DbIndex from '../src/db-index.js'
|
5
5
|
import Hydrator from '../src/hydrator.js'
|
6
|
-
console.x = function () {}
|
7
6
|
|
8
7
|
describe('DbIndex query', () => {
|
9
8
|
let database, index
|
@@ -26,7 +25,7 @@ describe('DbIndex query', () => {
|
|
26
25
|
}
|
27
26
|
index = new DbIndex(database, function (doc, map) {
|
28
27
|
map(doc.age, doc.name)
|
29
|
-
})
|
28
|
+
}, null, { name: 'names_by_age' })
|
30
29
|
})
|
31
30
|
it('serialize database with index', async () => {
|
32
31
|
await database.put({ _id: 'rehy', name: 'drate', age: 1 })
|
@@ -36,12 +35,17 @@ describe('DbIndex query', () => {
|
|
36
35
|
const serialized = database.toJSON()
|
37
36
|
// console.log('serialized', serialized)
|
38
37
|
assert.equal(serialized.name, 'global')
|
38
|
+
if (database.blocks.valet.keyId !== 'null') {
|
39
|
+
assert.equal(serialized.key.length, 64)
|
40
|
+
}
|
39
41
|
assert.equal(serialized.clock.length, 1)
|
40
42
|
assert.equal(serialized.clock[0].constructor.name, 'String')
|
41
43
|
assert.equal(serialized.indexes.length, 1)
|
42
44
|
assert.equal(serialized.indexes[0].code, `function (doc, map) {
|
43
45
|
map(doc.age, doc.name)
|
44
46
|
}`)
|
47
|
+
assert.equal(serialized.indexes[0].name, 'names_by_age')
|
48
|
+
|
45
49
|
assert.equal(serialized.indexes[0].clock.byId.constructor.name, 'String')
|
46
50
|
assert.equal(serialized.indexes[0].clock.byKey.constructor.name, 'String')
|
47
51
|
assert.equal(serialized.indexes[0].clock.db[0].constructor.name, 'String')
|
@@ -69,6 +73,8 @@ describe('DbIndex query', () => {
|
|
69
73
|
assert.equal(newIndex.indexByKey.cid, 'bafyreicr5rpvsxnqchcwk5rxlmdvd3fah2vexmbsp2dvr4cfdxd2q2ycgu')
|
70
74
|
// assert.equal(newIndex.indexByKey.root, null)
|
71
75
|
|
76
|
+
assert.equal(newIndex.name, 'names_by_age')
|
77
|
+
|
72
78
|
const newResult = await newIndex.query({ range: [0, 54] })
|
73
79
|
assert.equal(newResult.rows[0].value, 'drate')
|
74
80
|
})
|