@fireproof/core 0.0.6 → 0.0.7
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/hooks/use-fireproof.ts +0 -2
- package/package.json +5 -5
- package/src/blockstore.js +14 -10
- package/src/clock.js +4 -3
- package/src/db-index.js +35 -20
- package/src/fireproof.js +40 -14
- package/src/listener.js +0 -6
- package/src/prolly.js +42 -32
- package/src/valet.js +6 -62
- package/{src → test}/block.js +6 -6
- package/test/clock.test.js +0 -5
- package/test/db-index.test.js +10 -9
- package/test/fireproof.test.js +73 -16
- package/test/helpers.js +1 -1
- package/test/prolly.test.js +2 -2
- package/test/proofs.test.js +5 -5
- package/test/reproduce-fixture-bug.test.js +2 -2
- package/scripts/propernames/gen.sh +0 -3
- package/scripts/randomcid.js +0 -12
- package/scripts/words/gen.js +0 -55
package/hooks/use-fireproof.ts
CHANGED
package/package.json
CHANGED
@@ -1,6 +1,6 @@
|
|
1
1
|
{
|
2
2
|
"name": "@fireproof/core",
|
3
|
-
"version": "0.0.
|
3
|
+
"version": "0.0.7",
|
4
4
|
"description": "Realtime database for IPFS",
|
5
5
|
"main": "index.js",
|
6
6
|
"type": "module",
|
@@ -37,7 +37,7 @@
|
|
37
37
|
"cli-color": "^2.0.3",
|
38
38
|
"idb": "^7.1.1",
|
39
39
|
"multiformats": "^11.0.1",
|
40
|
-
"prolly-trees": "
|
40
|
+
"prolly-trees": "1.0.2",
|
41
41
|
"sade": "^1.8.1"
|
42
42
|
},
|
43
43
|
"devDependencies": {
|
@@ -62,12 +62,12 @@
|
|
62
62
|
},
|
63
63
|
"repository": {
|
64
64
|
"type": "git",
|
65
|
-
"url": "git+https://github.com/
|
65
|
+
"url": "git+https://github.com/fireproof-storage/fireproof.git"
|
66
66
|
},
|
67
67
|
"bugs": {
|
68
|
-
"url": "https://github.com/
|
68
|
+
"url": "https://github.com/fireproof-storage/fireproof/issues"
|
69
69
|
},
|
70
|
-
"homepage": "https://github.com/
|
70
|
+
"homepage": "https://github.com/fireproof-storage/fireproof#readme",
|
71
71
|
"workspaces": [
|
72
72
|
"examples/todomvc"
|
73
73
|
]
|
package/src/blockstore.js
CHANGED
@@ -36,11 +36,15 @@ export default class TransactionBlockstore {
|
|
36
36
|
/** @type {Map<string, Uint8Array>} */
|
37
37
|
#oldBlocks = new Map()
|
38
38
|
|
39
|
-
valet =
|
39
|
+
valet = null
|
40
40
|
|
41
41
|
#instanceId = 'blkz.' + Math.random().toString(36).substring(2, 4)
|
42
42
|
#inflightTransactions = new Set()
|
43
43
|
|
44
|
+
constructor (name) {
|
45
|
+
this.valet = new Valet(name)
|
46
|
+
}
|
47
|
+
|
44
48
|
/**
|
45
49
|
* Get a block from the store.
|
46
50
|
*
|
@@ -111,15 +115,15 @@ export default class TransactionBlockstore {
|
|
111
115
|
* @yields {AnyBlock}
|
112
116
|
* @returns {AsyncGenerator<AnyBlock>}
|
113
117
|
*/
|
114
|
-
* entries () {
|
115
|
-
|
116
|
-
|
117
|
-
|
118
|
-
|
119
|
-
|
120
|
-
|
121
|
-
|
122
|
-
}
|
118
|
+
// * entries () {
|
119
|
+
// // needs transaction blocks?
|
120
|
+
// // for (const [str, bytes] of this.#blocks) {
|
121
|
+
// // yield { cid: parse(str), bytes }
|
122
|
+
// // }
|
123
|
+
// for (const [str, bytes] of this.#oldBlocks) {
|
124
|
+
// yield { cid: parse(str), bytes }
|
125
|
+
// }
|
126
|
+
// }
|
123
127
|
|
124
128
|
/**
|
125
129
|
* Begin a transaction. Ensures the uncommited blocks are empty at the begining.
|
package/src/clock.js
CHANGED
@@ -22,7 +22,7 @@ import { CIDCounter } from 'prolly-trees/utils'
|
|
22
22
|
* Advance the clock by adding an event.
|
23
23
|
*
|
24
24
|
* @template T
|
25
|
-
* @param {import('
|
25
|
+
* @param {import('../test/block').BlockFetcher} blocks Block storage.
|
26
26
|
* @param {EventLink<T>[]} head The head of the clock.
|
27
27
|
* @param {EventLink<T>} event The event to add.
|
28
28
|
* @returns {Promise<EventLink<T>[]>} The new head of the clock.
|
@@ -89,7 +89,7 @@ export class EventBlock extends Block {
|
|
89
89
|
|
90
90
|
/** @template T */
|
91
91
|
export class EventFetcher {
|
92
|
-
/** @param {import('
|
92
|
+
/** @param {import('../test/block').BlockFetcher} blocks */
|
93
93
|
constructor (blocks) {
|
94
94
|
/** @private */
|
95
95
|
this._blocks = blocks
|
@@ -162,7 +162,7 @@ async function contains (events, a, b) {
|
|
162
162
|
|
163
163
|
/**
|
164
164
|
* @template T
|
165
|
-
* @param {import('
|
165
|
+
* @param {import('../test/block').BlockFetcher} blocks Block storage.
|
166
166
|
* @param {EventLink<T>[]} head
|
167
167
|
* @param {object} [options]
|
168
168
|
* @param {(b: EventBlockView<T>) => string} [options.renderNodeLabel]
|
@@ -285,6 +285,7 @@ async function findSortedEvents (events, head, tail) {
|
|
285
285
|
const all = await Promise.all(head.map((h) => findEvents(events, h, tail)))
|
286
286
|
for (const arr of all) {
|
287
287
|
for (const { event, depth } of arr) {
|
288
|
+
// console.log('event value', event.value.data.value)
|
288
289
|
const info = weights.get(event.cid.toString())
|
289
290
|
if (info) {
|
290
291
|
info.weight += depth
|
package/src/db-index.js
CHANGED
@@ -11,22 +11,38 @@ import charwise from 'charwise'
|
|
11
11
|
|
12
12
|
const ALWAYS_REBUILD = true // todo: remove this
|
13
13
|
|
14
|
-
const arrayCompare = (a, b) => {
|
15
|
-
|
16
|
-
|
17
|
-
|
18
|
-
|
19
|
-
|
20
|
-
|
21
|
-
|
22
|
-
|
23
|
-
|
24
|
-
|
25
|
-
|
26
|
-
|
14
|
+
// const arrayCompare = (a, b) => {
|
15
|
+
// if (Array.isArray(a) && Array.isArray(b)) {
|
16
|
+
// const len = Math.min(a.length, b.length)
|
17
|
+
// for (let i = 0; i < len; i++) {
|
18
|
+
// const comp = simpleCompare(a[i], b[i])
|
19
|
+
// if (comp !== 0) {
|
20
|
+
// return comp
|
21
|
+
// }
|
22
|
+
// }
|
23
|
+
// return simpleCompare(a.length, b.length)
|
24
|
+
// } else {
|
25
|
+
// return simpleCompare(a, b)
|
26
|
+
// }
|
27
|
+
// }
|
28
|
+
|
29
|
+
const compare = (a, b) => {
|
30
|
+
const [aKey, aRef] = a
|
31
|
+
const [bKey, bRef] = b
|
32
|
+
const comp = simpleCompare(aKey, bKey)
|
33
|
+
if (comp !== 0) return comp
|
34
|
+
return refCompare(aRef, bRef)
|
27
35
|
}
|
28
36
|
|
29
|
-
const
|
37
|
+
const refCompare = (aRef, bRef) => {
|
38
|
+
if (Number.isNaN(aRef)) return -1
|
39
|
+
if (Number.isNaN(bRef)) throw new Error('ref may not be Infinity or NaN')
|
40
|
+
if (!Number.isFinite(aRef)) return 1
|
41
|
+
// if (!Number.isFinite(bRef)) throw new Error('ref may not be Infinity or NaN')
|
42
|
+
return simpleCompare(aRef, bRef)
|
43
|
+
}
|
44
|
+
|
45
|
+
const opts = { cache, chunker: bf(3), codec, hasher, compare }
|
30
46
|
|
31
47
|
const makeDoc = ({ key, value }) => ({ _id: key, ...value })
|
32
48
|
|
@@ -134,7 +150,10 @@ export default class DbIndex {
|
|
134
150
|
return {
|
135
151
|
proof: { index: await cidsToProof(response.cids) },
|
136
152
|
// TODO fix this naming upstream in prolly/db-DbIndex?
|
137
|
-
rows: response.result.map(({ id, key, row }) =>
|
153
|
+
rows: response.result.map(({ id, key, row }) => {
|
154
|
+
// console.log('query', id, key, row)
|
155
|
+
return ({ id, key: charwise.decode(key), value: row })
|
156
|
+
})
|
138
157
|
}
|
139
158
|
}
|
140
159
|
|
@@ -188,11 +207,6 @@ export default class DbIndex {
|
|
188
207
|
|
189
208
|
this.dbHead = result.clock
|
190
209
|
}
|
191
|
-
|
192
|
-
// todo use the DbIndex from other peers?
|
193
|
-
// we might need to add CRDT logic to it for that
|
194
|
-
// it would only be a performance improvement, but might add a lot of complexity
|
195
|
-
// advanceIndex ()) {}
|
196
210
|
}
|
197
211
|
|
198
212
|
/**
|
@@ -240,6 +254,7 @@ async function doIndexQuery (blocks, dbIndexRoot, dbIndex, query) {
|
|
240
254
|
return dbIndex.range(...encodedRange)
|
241
255
|
} else if (query.key) {
|
242
256
|
const encodedKey = charwise.encode(query.key)
|
257
|
+
console.log('getting key', encodedKey)
|
243
258
|
return dbIndex.get(encodedKey)
|
244
259
|
}
|
245
260
|
}
|
package/src/fireproof.js
CHANGED
@@ -1,5 +1,6 @@
|
|
1
|
-
import { put, get, getAll, eventsSince } from './prolly.js'
|
2
|
-
import
|
1
|
+
import { vis, put, get, getAll, eventsSince } from './prolly.js'
|
2
|
+
import TransactionBlockstore, { doTransaction } from './blockstore.js'
|
3
|
+
import charwise from 'charwise'
|
3
4
|
|
4
5
|
// const sleep = ms => new Promise(resolve => setTimeout(resolve, ms))
|
5
6
|
|
@@ -26,16 +27,17 @@ export default class Fireproof {
|
|
26
27
|
* @static
|
27
28
|
* @returns {Fireproof} - a new Fireproof instance
|
28
29
|
*/
|
29
|
-
static storage = () => {
|
30
|
-
return new Fireproof(new
|
30
|
+
static storage = (name) => {
|
31
|
+
return new Fireproof(new TransactionBlockstore(name), [], { name })
|
31
32
|
}
|
32
33
|
|
33
|
-
constructor (blocks, clock, config
|
34
|
+
constructor (blocks, clock, config, authCtx = {}) {
|
35
|
+
this.name = config?.name || 'global'
|
36
|
+
this.instanceId = `fp.${this.name}.${Math.random().toString(36).substring(2, 7)}`
|
34
37
|
this.blocks = blocks
|
35
38
|
this.clock = clock
|
36
39
|
this.config = config
|
37
40
|
this.authCtx = authCtx
|
38
|
-
this.instanceId = 'fp.' + Math.random().toString(36).substring(2, 7)
|
39
41
|
}
|
40
42
|
|
41
43
|
/**
|
@@ -49,7 +51,7 @@ export default class Fireproof {
|
|
49
51
|
*/
|
50
52
|
snapshot (clock) {
|
51
53
|
// how to handle listeners, views, and config?
|
52
|
-
// todo needs a test for
|
54
|
+
// todo needs a test for listeners, views, and config
|
53
55
|
return new Fireproof(this.blocks, clock || this.clock)
|
54
56
|
}
|
55
57
|
|
@@ -93,7 +95,7 @@ export default class Fireproof {
|
|
93
95
|
if (event) {
|
94
96
|
const resp = await eventsSince(this.blocks, this.clock, event)
|
95
97
|
const docsMap = new Map()
|
96
|
-
for (const { key, type, value } of resp.result) {
|
98
|
+
for (const { key, type, value } of resp.result.map(decodeEvent)) {
|
97
99
|
if (type === 'del') {
|
98
100
|
docsMap.set(key, { key, del: true })
|
99
101
|
} else {
|
@@ -105,11 +107,15 @@ export default class Fireproof {
|
|
105
107
|
// console.log('change rows', this.instanceId, rows)
|
106
108
|
} else {
|
107
109
|
const allResp = await getAll(this.blocks, this.clock)
|
108
|
-
rows = allResp.result.map(({ key, value }) => ({ key, value }))
|
110
|
+
rows = allResp.result.map(({ key, value }) => (decodeEvent({ key, value })))
|
109
111
|
dataCIDs = allResp.cids
|
110
112
|
// console.log('dbdoc rows', this.instanceId, rows)
|
111
113
|
}
|
112
|
-
return {
|
114
|
+
return {
|
115
|
+
rows,
|
116
|
+
clock: this.clock,
|
117
|
+
proof: { data: await cidsToProof(dataCIDs), clock: await cidsToProof(clockCIDs) }
|
118
|
+
}
|
113
119
|
}
|
114
120
|
|
115
121
|
/**
|
@@ -195,7 +201,8 @@ export default class Fireproof {
|
|
195
201
|
* @param {Object<{key : string, value: any}>} event - the event to add
|
196
202
|
* @returns {Object<{ id: string, clock: CID[] }>} - The result of adding the event to storage
|
197
203
|
*/
|
198
|
-
async #putToProllyTree (
|
204
|
+
async #putToProllyTree (decodedEvent, clock = null) {
|
205
|
+
const event = encodeEvent(decodedEvent)
|
199
206
|
if (clock && JSON.stringify(clock) !== JSON.stringify(this.clock)) {
|
200
207
|
// we need to check and see what version of the document exists at the clock specified
|
201
208
|
// if it is the same as the one we are trying to put, then we can proceed
|
@@ -215,8 +222,12 @@ export default class Fireproof {
|
|
215
222
|
throw new Error('failed to put at storage layer')
|
216
223
|
}
|
217
224
|
this.clock = result.head // do we want to do this as a finally block
|
218
|
-
await this.#notifyListeners([
|
219
|
-
return {
|
225
|
+
await this.#notifyListeners([decodedEvent]) // this type is odd
|
226
|
+
return {
|
227
|
+
id: decodedEvent.key,
|
228
|
+
clock: this.clock,
|
229
|
+
proof: { data: await cidsToProof(result.cids), clock: await cidsToProof(result.clockCIDs) }
|
230
|
+
}
|
220
231
|
// todo should include additions (or split clock)
|
221
232
|
}
|
222
233
|
|
@@ -255,7 +266,7 @@ export default class Fireproof {
|
|
255
266
|
*/
|
256
267
|
async get (key, opts = {}) {
|
257
268
|
const clock = opts.clock || this.clock
|
258
|
-
const resp = await get(this.blocks, clock, key)
|
269
|
+
const resp = await get(this.blocks, clock, charwise.encode(key))
|
259
270
|
|
260
271
|
// this tombstone is temporary until we can get the prolly tree to delete
|
261
272
|
if (!resp || resp.result === null) {
|
@@ -273,6 +284,10 @@ export default class Fireproof {
|
|
273
284
|
return doc
|
274
285
|
}
|
275
286
|
|
287
|
+
async * vis () {
|
288
|
+
return yield * vis(this.blocks, this.clock)
|
289
|
+
}
|
290
|
+
|
276
291
|
setCarUploader (carUploaderFn) {
|
277
292
|
console.log('registering car uploader')
|
278
293
|
// https://en.wikipedia.org/wiki/Law_of_Demeter - this is a violation of the law of demeter
|
@@ -290,3 +305,14 @@ export async function cidsToProof (cids) {
|
|
290
305
|
const all = await cids.all()
|
291
306
|
return [...all].map((cid) => cid.toString())
|
292
307
|
}
|
308
|
+
|
309
|
+
function decodeEvent (event) {
|
310
|
+
const decodedKey = charwise.decode(event.key)
|
311
|
+
return { ...event, key: decodedKey }
|
312
|
+
}
|
313
|
+
|
314
|
+
function encodeEvent (event) {
|
315
|
+
if (!(event && event.key)) return
|
316
|
+
const encodedKey = charwise.encode(event.key)
|
317
|
+
return { ...event, key: encodedKey }
|
318
|
+
}
|
package/src/listener.js
CHANGED
@@ -11,12 +11,6 @@
|
|
11
11
|
|
12
12
|
export default class Listener {
|
13
13
|
#subcribers = new Map()
|
14
|
-
|
15
|
-
// todo code review if there is a better way that doesn't create a circular reference
|
16
|
-
// because otherwise we need to document that the user must call stopListening
|
17
|
-
// or else the listener will never be garbage collected
|
18
|
-
// maybe we can use WeakRef on the db side
|
19
|
-
// https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/WeakRef
|
20
14
|
#doStopListening = null
|
21
15
|
|
22
16
|
constructor (database, routingFn) {
|
package/src/prolly.js
CHANGED
@@ -6,13 +6,12 @@ import {
|
|
6
6
|
findEventsToSync
|
7
7
|
} from './clock.js'
|
8
8
|
import { create, load } from 'prolly-trees/map'
|
9
|
+
// import { create, load } from '../../../../prolly-trees/src/map.js'
|
10
|
+
import { nocache as cache } from 'prolly-trees/cache'
|
11
|
+
import { CIDCounter, bf, simpleCompare as compare } from 'prolly-trees/utils'
|
9
12
|
import * as codec from '@ipld/dag-cbor'
|
10
13
|
import { sha256 as hasher } from 'multiformats/hashes/sha2'
|
11
|
-
import { MemoryBlockstore, MultiBlockFetcher } from './block.js'
|
12
14
|
import { doTransaction } from './blockstore.js'
|
13
|
-
|
14
|
-
import { nocache as cache } from 'prolly-trees/cache'
|
15
|
-
import { CIDCounter, bf, simpleCompare as compare } from 'prolly-trees/utils'
|
16
15
|
import { create as createBlock } from 'multiformats/block'
|
17
16
|
const opts = { cache, chunker: bf(3), codec, hasher, compare }
|
18
17
|
|
@@ -22,9 +21,9 @@ const withLog = async (label, fn) => {
|
|
22
21
|
return resp
|
23
22
|
}
|
24
23
|
|
25
|
-
//
|
24
|
+
// should also return a CIDCounter
|
26
25
|
export const makeGetBlock = (blocks) => {
|
27
|
-
// const cids = new CIDCounter() //
|
26
|
+
// const cids = new CIDCounter() // this could be used for proofs of mutations
|
28
27
|
const getBlockFn = async (address) => {
|
29
28
|
const { cid, bytes } = await withLog(address, () => blocks.get(address))
|
30
29
|
// cids.add({ address: cid })
|
@@ -55,18 +54,17 @@ export const makeGetBlock = (blocks) => {
|
|
55
54
|
* event: CID[]
|
56
55
|
* }>}
|
57
56
|
*/
|
58
|
-
async function createAndSaveNewEvent (
|
57
|
+
async function createAndSaveNewEvent ({
|
59
58
|
inBlocks,
|
60
|
-
mblocks,
|
61
|
-
getBlock,
|
62
59
|
bigPut,
|
63
60
|
root,
|
64
|
-
|
61
|
+
event: inEvent,
|
65
62
|
head,
|
66
63
|
additions,
|
67
64
|
removals = []
|
68
|
-
) {
|
65
|
+
}) {
|
69
66
|
let cids
|
67
|
+
const { key, value, del } = inEvent
|
70
68
|
const data = {
|
71
69
|
type: 'put',
|
72
70
|
root: {
|
@@ -100,20 +98,20 @@ async function createAndSaveNewEvent (
|
|
100
98
|
}
|
101
99
|
|
102
100
|
const makeGetAndPutBlock = (inBlocks) => {
|
103
|
-
const mblocks = new MemoryBlockstore()
|
104
|
-
const blocks = new MultiBlockFetcher(mblocks, inBlocks)
|
105
|
-
const { getBlock, cids } = makeGetBlock(
|
101
|
+
// const mblocks = new MemoryBlockstore()
|
102
|
+
// const blocks = new MultiBlockFetcher(mblocks, inBlocks)
|
103
|
+
const { getBlock, cids } = makeGetBlock(inBlocks)
|
106
104
|
const put = inBlocks.put.bind(inBlocks)
|
107
105
|
const bigPut = async (block, additions) => {
|
108
106
|
// console.log('bigPut', block.cid.toString())
|
109
107
|
const { cid, bytes } = block
|
110
108
|
put(cid, bytes)
|
111
|
-
mblocks.putSync(cid, bytes)
|
109
|
+
// mblocks.putSync(cid, bytes)
|
112
110
|
if (additions) {
|
113
111
|
additions.set(cid.toString(), block)
|
114
112
|
}
|
115
113
|
}
|
116
|
-
return { getBlock, bigPut,
|
114
|
+
return { getBlock, bigPut, blocks: inBlocks, cids }
|
117
115
|
}
|
118
116
|
|
119
117
|
const bulkFromEvents = (sorted) =>
|
@@ -143,7 +141,7 @@ const prollyRootFromAncestor = async (events, ancestor, getBlock) => {
|
|
143
141
|
/**
|
144
142
|
* Put a value (a CID) for the given key. If the key exists it's value is overwritten.
|
145
143
|
*
|
146
|
-
* @param {import('
|
144
|
+
* @param {import('../test/block.js').BlockFetcher} blocks Bucket block storage.
|
147
145
|
* @param {import('./clock').EventLink<EventData>[]} head Merkle clock head.
|
148
146
|
* @param {string} key The key of the value to put.
|
149
147
|
* @param {CID} value The value to put.
|
@@ -151,7 +149,7 @@ const prollyRootFromAncestor = async (events, ancestor, getBlock) => {
|
|
151
149
|
* @returns {Promise<Result>}
|
152
150
|
*/
|
153
151
|
export async function put (inBlocks, head, event, options) {
|
154
|
-
const { getBlock, bigPut,
|
152
|
+
const { getBlock, bigPut, blocks } = makeGetAndPutBlock(inBlocks)
|
155
153
|
|
156
154
|
// If the head is empty, we create a new event and return the root and addition blocks
|
157
155
|
if (!head.length) {
|
@@ -161,12 +159,14 @@ export async function put (inBlocks, head, event, options) {
|
|
161
159
|
root = await node.block
|
162
160
|
bigPut(root, additions)
|
163
161
|
}
|
164
|
-
return createAndSaveNewEvent(inBlocks,
|
162
|
+
return createAndSaveNewEvent({ inBlocks, bigPut, root, event, head, additions: Array.from(additions.values()) })
|
165
163
|
}
|
166
164
|
|
167
165
|
// Otherwise, we find the common ancestor and update the root and other blocks
|
168
166
|
const events = new EventFetcher(blocks)
|
167
|
+
// this is returning more events than necessary
|
169
168
|
const { ancestor, sorted } = await findCommonAncestorWithSortedEvents(events, head)
|
169
|
+
// console.log('sorted', JSON.stringify(sorted.map(({ value: { data: { key, value } } }) => ({ key, value }))))
|
170
170
|
const prollyRootNode = await prollyRootFromAncestor(events, ancestor, getBlock)
|
171
171
|
|
172
172
|
const bulkOperations = bulkFromEvents(sorted)
|
@@ -178,22 +178,20 @@ export async function put (inBlocks, head, event, options) {
|
|
178
178
|
bigPut(nb, additions)
|
179
179
|
}
|
180
180
|
// additions are new blocks
|
181
|
-
return createAndSaveNewEvent(
|
181
|
+
return createAndSaveNewEvent({
|
182
182
|
inBlocks,
|
183
|
-
mblocks,
|
184
|
-
getBlock,
|
185
183
|
bigPut,
|
186
|
-
prollyRootBlock,
|
184
|
+
root: prollyRootBlock,
|
187
185
|
event,
|
188
186
|
head,
|
189
|
-
Array.from(additions.values()) /*, todo? Array.from(removals.values()) */
|
190
|
-
)
|
187
|
+
additions: Array.from(additions.values()) /*, todo? Array.from(removals.values()) */
|
188
|
+
})
|
191
189
|
}
|
192
190
|
|
193
191
|
/**
|
194
192
|
* Determine the effective prolly root given the current merkle clock head.
|
195
193
|
*
|
196
|
-
* @param {import('
|
194
|
+
* @param {import('../test/block.js').BlockFetcher} blocks Bucket block storage.
|
197
195
|
* @param {import('./clock').EventLink<EventData>[]} head Merkle clock head.
|
198
196
|
*/
|
199
197
|
export async function root (inBlocks, head) {
|
@@ -208,22 +206,22 @@ export async function root (inBlocks, head) {
|
|
208
206
|
// Perform bulk operations (put or delete) for each event in the sorted array
|
209
207
|
const bulkOperations = bulkFromEvents(sorted)
|
210
208
|
const { root: newProllyRootNode, blocks: newBlocks } = await prollyRootNode.bulk(bulkOperations)
|
211
|
-
const prollyRootBlock = await newProllyRootNode.block
|
212
|
-
// console.log('
|
209
|
+
// const prollyRootBlock = await newProllyRootNode.block
|
210
|
+
// console.log('newBlocks', newBlocks.map((nb) => nb.cid.toString()))
|
213
211
|
// todo maybe these should go to a temp blockstore?
|
214
212
|
await doTransaction('root', inBlocks, async (transactionBlockstore) => {
|
215
213
|
const { bigPut } = makeGetAndPutBlock(transactionBlockstore)
|
216
214
|
for (const nb of newBlocks) {
|
217
215
|
bigPut(nb)
|
218
216
|
}
|
219
|
-
bigPut(prollyRootBlock)
|
217
|
+
// bigPut(prollyRootBlock)
|
220
218
|
})
|
221
219
|
return { cids: events.cids, node: newProllyRootNode }
|
222
220
|
}
|
223
221
|
|
224
222
|
/**
|
225
223
|
* Get the list of events not known by the `since` event
|
226
|
-
* @param {import('
|
224
|
+
* @param {import('../test/block.js').BlockFetcher} blocks Bucket block storage.
|
227
225
|
* @param {import('./clock').EventLink<EventData>[]} head Merkle clock head.
|
228
226
|
* @param {import('./clock').EventLink<EventData>} since Event to compare against.
|
229
227
|
* @returns {Promise<import('./clock').EventLink<EventData>[]>}
|
@@ -239,7 +237,7 @@ export async function eventsSince (blocks, head, since) {
|
|
239
237
|
|
240
238
|
/**
|
241
239
|
*
|
242
|
-
* @param {import('
|
240
|
+
* @param {import('../test/block.js').BlockFetcher} blocks Bucket block storage.
|
243
241
|
* @param {import('./clock').EventLink<EventData>[]} head Merkle clock head.
|
244
242
|
*
|
245
243
|
* @returns {Promise<import('./prolly').Entry[]>}
|
@@ -257,7 +255,7 @@ export async function getAll (blocks, head) {
|
|
257
255
|
}
|
258
256
|
|
259
257
|
/**
|
260
|
-
* @param {import('
|
258
|
+
* @param {import('../test/block.js').BlockFetcher} blocks Bucket block storage.
|
261
259
|
* @param {import('./clock').EventLink<EventData>[]} head Merkle clock head.
|
262
260
|
* @param {string} key The key of the value to retrieve.
|
263
261
|
*/
|
@@ -270,3 +268,15 @@ export async function get (blocks, head, key) {
|
|
270
268
|
const { result, cids } = await prollyRootNode.get(key)
|
271
269
|
return { result, cids, clockCIDs }
|
272
270
|
}
|
271
|
+
|
272
|
+
export async function * vis (blocks, head) {
|
273
|
+
if (!head.length) {
|
274
|
+
return { cids: new CIDCounter(), result: null }
|
275
|
+
}
|
276
|
+
const { node: prollyRootNode, cids } = await root(blocks, head)
|
277
|
+
const lines = []
|
278
|
+
for await (const line of prollyRootNode.vis()) {
|
279
|
+
yield line
|
280
|
+
}
|
281
|
+
return { vis: lines.join('\n'), cids }
|
282
|
+
}
|
package/src/valet.js
CHANGED
@@ -3,17 +3,8 @@ import { CID } from 'multiformats/cid'
|
|
3
3
|
import { openDB } from 'idb'
|
4
4
|
import cargoQueue from 'async/cargoQueue.js'
|
5
5
|
|
6
|
-
// const sleep = (ms) => new Promise((resolve) => setTimeout(resolve, ms))
|
7
|
-
// let storageSupported = false
|
8
|
-
// try {
|
9
|
-
// storageSupported = window.localStorage && true
|
10
|
-
// } catch (e) {}
|
11
|
-
// const sleep = (ms) => new Promise((resolve) => setTimeout(resolve, ms))
|
12
|
-
|
13
6
|
export default class Valet {
|
14
|
-
|
15
|
-
#cidToCar = new Map() // cid to car
|
16
|
-
#db = null
|
7
|
+
idb = null
|
17
8
|
#uploadQueue = null
|
18
9
|
#alreadyEnqueued = new Set()
|
19
10
|
|
@@ -23,7 +14,8 @@ export default class Valet {
|
|
23
14
|
*/
|
24
15
|
uploadFunction = null
|
25
16
|
|
26
|
-
constructor () {
|
17
|
+
constructor (name = 'default') {
|
18
|
+
this.name = name
|
27
19
|
this.#uploadQueue = cargoQueue(async (tasks, callback) => {
|
28
20
|
console.log(
|
29
21
|
'queue worker',
|
@@ -59,9 +51,8 @@ export default class Valet {
|
|
59
51
|
}
|
60
52
|
|
61
53
|
withDB = async (dbWorkFun) => {
|
62
|
-
|
63
|
-
|
64
|
-
this.#db = await openDB('valet', 2, {
|
54
|
+
if (!this.idb) {
|
55
|
+
this.idb = await openDB(`fp.${this.name}.valet`, 2, {
|
65
56
|
upgrade (db, oldVersion, newVersion, transaction) {
|
66
57
|
if (oldVersion < 1) {
|
67
58
|
db.createObjectStore('cars') // todo use database name
|
@@ -75,7 +66,7 @@ export default class Valet {
|
|
75
66
|
}
|
76
67
|
})
|
77
68
|
}
|
78
|
-
return await dbWorkFun(this
|
69
|
+
return await dbWorkFun(this.idb)
|
79
70
|
}
|
80
71
|
|
81
72
|
/**
|
@@ -84,11 +75,6 @@ export default class Valet {
|
|
84
75
|
* @param {*} value
|
85
76
|
*/
|
86
77
|
async parkCar (carCid, value, cids) {
|
87
|
-
// this.#cars.set(carCid, value)
|
88
|
-
// for (const cid of cids) {
|
89
|
-
// this.#cidToCar.set(cid, carCid)
|
90
|
-
// }
|
91
|
-
|
92
78
|
await this.withDB(async (db) => {
|
93
79
|
const tx = db.transaction(['cars', 'cidToCar'], 'readwrite')
|
94
80
|
await tx.objectStore('cars').put(value, carCid)
|
@@ -130,45 +116,3 @@ export default class Valet {
|
|
130
116
|
})
|
131
117
|
}
|
132
118
|
}
|
133
|
-
|
134
|
-
// export class MemoryValet {
|
135
|
-
// #cars = new Map() // cars by cid
|
136
|
-
// #cidToCar = new Map() // cid to car
|
137
|
-
|
138
|
-
// /**
|
139
|
-
// *
|
140
|
-
// * @param {string} carCid
|
141
|
-
// * @param {*} value
|
142
|
-
// */
|
143
|
-
// async parkCar (carCid, value, cids) {
|
144
|
-
// this.#cars.set(carCid, value)
|
145
|
-
// for (const cid of cids) {
|
146
|
-
// this.#cidToCar.set(cid, carCid)
|
147
|
-
// }
|
148
|
-
// }
|
149
|
-
|
150
|
-
// async getBlock (dataCID) {
|
151
|
-
// return await this.#valetGet(dataCID)
|
152
|
-
// }
|
153
|
-
|
154
|
-
// /**
|
155
|
-
// * Internal function to load blocks from persistent storage.
|
156
|
-
// * Currently it just searches all the cars for the block, but in the future
|
157
|
-
// * we need to index the block CIDs to the cars, and reference that to find the block.
|
158
|
-
// * This index will also allow us to use accelerator links for the gateway when needed.
|
159
|
-
// * It can itself be a prolly tree...
|
160
|
-
// * @param {string} cid
|
161
|
-
// * @returns {Promise<Uint8Array|undefined>}
|
162
|
-
// */
|
163
|
-
// #valetGet = async (cid) => {
|
164
|
-
// const carCid = this.#cidToCar.get(cid)
|
165
|
-
// if (carCid) {
|
166
|
-
// const carBytes = this.#cars.get(carCid)
|
167
|
-
// const reader = await CarReader.fromBytes(carBytes)
|
168
|
-
// const gotBlock = await reader.get(CID.parse(cid))
|
169
|
-
// if (gotBlock) {
|
170
|
-
// return gotBlock.bytes
|
171
|
-
// }
|
172
|
-
// }
|
173
|
-
// }
|
174
|
-
// }
|
package/{src → test}/block.js
RENAMED
@@ -1,8 +1,8 @@
|
|
1
1
|
import { parse } from 'multiformats/link'
|
2
2
|
|
3
3
|
/**
|
4
|
-
* @typedef {{ cid: import('
|
5
|
-
* @typedef {{ get: (link: import('
|
4
|
+
* @typedef {{ cid: import('../src/link').AnyLink, bytes: Uint8Array }} AnyBlock
|
5
|
+
* @typedef {{ get: (link: import('../src/link').AnyLink) => Promise<AnyBlock | undefined> }} BlockFetcher
|
6
6
|
*/
|
7
7
|
|
8
8
|
/** @implements {BlockFetcher} */
|
@@ -11,7 +11,7 @@ export class MemoryBlockstore {
|
|
11
11
|
#blocks = new Map()
|
12
12
|
|
13
13
|
/**
|
14
|
-
* @param {import('
|
14
|
+
* @param {import('../src/link').AnyLink} cid
|
15
15
|
* @returns {Promise<AnyBlock | undefined>}
|
16
16
|
*/
|
17
17
|
async get (cid) {
|
@@ -21,7 +21,7 @@ export class MemoryBlockstore {
|
|
21
21
|
}
|
22
22
|
|
23
23
|
/**
|
24
|
-
* @param {import('
|
24
|
+
* @param {import('../src/link').AnyLink} cid
|
25
25
|
* @param {Uint8Array} bytes
|
26
26
|
*/
|
27
27
|
async put (cid, bytes) {
|
@@ -30,7 +30,7 @@ export class MemoryBlockstore {
|
|
30
30
|
}
|
31
31
|
|
32
32
|
/**
|
33
|
-
* @param {import('
|
33
|
+
* @param {import('../src/link').AnyLink} cid
|
34
34
|
* @param {Uint8Array} bytes
|
35
35
|
*/
|
36
36
|
putSync (cid, bytes) {
|
@@ -53,7 +53,7 @@ export class MultiBlockFetcher {
|
|
53
53
|
this.#fetchers = fetchers
|
54
54
|
}
|
55
55
|
|
56
|
-
/** @param {import('
|
56
|
+
/** @param {import('../src/link').AnyLink} link */
|
57
57
|
async get (link) {
|
58
58
|
for (const f of this.#fetchers) {
|
59
59
|
const v = await f.get(link)
|
package/test/clock.test.js
CHANGED
@@ -440,13 +440,8 @@ describe('Clock', () => {
|
|
440
440
|
toSync = await testFindEventsToSync(blocks, sinceHead)
|
441
441
|
assert.equal(toSync.length, 0)
|
442
442
|
|
443
|
-
// todo do these since heads make sense?
|
444
443
|
sinceHead = [...head0, ...head2]
|
445
444
|
toSync = await testFindEventsToSync(blocks, sinceHead)
|
446
|
-
// console.log('need', toSync.map(b => b.value.data))
|
447
|
-
// assert.equal(toSync.length, 2) // 0
|
448
|
-
// assert.equal(toSync[0].cid.toString(), event1.cid.toString())
|
449
|
-
// assert.equal(toSync[1].cid.toString(), event2.cid.toString())
|
450
445
|
})
|
451
446
|
|
452
447
|
it('add two events with some shared parents', async () => {
|
package/test/db-index.test.js
CHANGED
@@ -2,13 +2,13 @@ import { describe, it, beforeEach } from 'mocha'
|
|
2
2
|
import assert from 'node:assert'
|
3
3
|
import Blockstore from '../src/blockstore.js'
|
4
4
|
import Fireproof from '../src/fireproof.js'
|
5
|
-
import
|
5
|
+
import DbIndex from '../src/db-index.js'
|
6
6
|
console.x = function () {}
|
7
7
|
|
8
|
-
describe('
|
8
|
+
describe('DbIndex query', () => {
|
9
9
|
let database, index
|
10
10
|
beforeEach(async () => {
|
11
|
-
database =
|
11
|
+
database = Fireproof.storage()
|
12
12
|
const docs = [
|
13
13
|
{ _id: 'a1s3b32a-3c3a-4b5e-9c1c-8c5c0c5c0c5c', name: 'alice', age: 40 },
|
14
14
|
{ _id: 'b2s3b32a-3c3a-4b5e-9c1c-8c5c0c5c0c5c', name: 'bob', age: 40 },
|
@@ -24,7 +24,7 @@ describe('Index query', () => {
|
|
24
24
|
assert(response.id, 'should have id')
|
25
25
|
assert.equal(response.id, id)
|
26
26
|
}
|
27
|
-
index = new
|
27
|
+
index = new DbIndex(database, function (doc, map) {
|
28
28
|
map(doc.age, doc.name)
|
29
29
|
})
|
30
30
|
})
|
@@ -36,8 +36,10 @@ describe('Index query', () => {
|
|
36
36
|
assert.equal(result.rows[0].key, 43)
|
37
37
|
assert(result.rows[0].value === 'carol', 'correct value')
|
38
38
|
})
|
39
|
-
it
|
40
|
-
|
39
|
+
it('query exact key', async () => {
|
40
|
+
let result = await index.query({ range: [41, 44] })
|
41
|
+
assert(result.rows[0].key === 43, 'correct key')
|
42
|
+
result = await index.query({ key: 43 })
|
41
43
|
assert(result, 'did return result')
|
42
44
|
assert(result.rows)
|
43
45
|
assert.equal(result.rows.length, 1, 'one row matched')
|
@@ -197,12 +199,12 @@ describe('Index query', () => {
|
|
197
199
|
})
|
198
200
|
})
|
199
201
|
|
200
|
-
describe('
|
202
|
+
describe('DbIndex query with bad index definition', () => {
|
201
203
|
let database, index
|
202
204
|
beforeEach(async () => {
|
203
205
|
database = new Fireproof(new Blockstore(), []) // todo: these need a cloud name aka w3name, add this after we have cloud storage of blocks
|
204
206
|
await database.put({ _id: 'a1s3b32a-3c3a-4b5e-9c1c-8c5c0c5c0c5c', name: 'alice', age: 40 })
|
205
|
-
index = new
|
207
|
+
index = new DbIndex(database, function (doc, map) {
|
206
208
|
map(doc.oops.missingField, doc.name)
|
207
209
|
})
|
208
210
|
})
|
@@ -214,5 +216,4 @@ describe('Index query with bad index definition', () => {
|
|
214
216
|
console.error = oldErrFn
|
215
217
|
})
|
216
218
|
})
|
217
|
-
it.skip('reproduce missing block error from browser so we can turn off always rebuild', async () => {})
|
218
219
|
})
|
package/test/fireproof.test.js
CHANGED
@@ -2,6 +2,7 @@ import { describe, it, beforeEach } from 'mocha'
|
|
2
2
|
import assert from 'node:assert'
|
3
3
|
import Blockstore from '../src/blockstore.js'
|
4
4
|
import Fireproof from '../src/fireproof.js'
|
5
|
+
import * as codec from '@ipld/dag-cbor'
|
5
6
|
|
6
7
|
let database, resp0
|
7
8
|
|
@@ -9,14 +10,18 @@ let database, resp0
|
|
9
10
|
|
10
11
|
describe('Fireproof', () => {
|
11
12
|
beforeEach(async () => {
|
12
|
-
database =
|
13
|
+
database = Fireproof.storage('helloName')
|
13
14
|
resp0 = await database.put({
|
14
15
|
_id: '1ef3b32a-3c3a-4b5e-9c1c-8c5c0c5c0c5c',
|
15
16
|
name: 'alice',
|
16
17
|
age: 42
|
17
18
|
})
|
18
19
|
})
|
19
|
-
|
20
|
+
it('takes an optional name', () => {
|
21
|
+
assert.equal(database.name, 'helloName')
|
22
|
+
const x = database.blocks.valet.idb
|
23
|
+
assert.equal(x.name.toString(), 'fp.helloName.valet')
|
24
|
+
})
|
20
25
|
it('put and get document', async () => {
|
21
26
|
assert(resp0.id, 'should have id')
|
22
27
|
assert.equal(resp0.id, '1ef3b32a-3c3a-4b5e-9c1c-8c5c0c5c0c5c')
|
@@ -27,13 +32,13 @@ describe('Fireproof', () => {
|
|
27
32
|
})
|
28
33
|
it('mvcc put and get document with _clock that matches', async () => {
|
29
34
|
assert(resp0.clock, 'should have clock')
|
30
|
-
assert.equal(resp0.clock[0].toString(), '
|
35
|
+
assert.equal(resp0.clock[0].toString(), 'bafyreiadhnnxgaeeqdxujfew6zxr4lnjyskkrg26cdjvk7tivy6dt4xmsm')
|
31
36
|
const theDoc = await database.get('1ef3b32a-3c3a-4b5e-9c1c-8c5c0c5c0c5c')
|
32
37
|
theDoc._clock = database.clock
|
33
38
|
const put2 = await database.put(theDoc)
|
34
39
|
assert.equal(put2.id, '1ef3b32a-3c3a-4b5e-9c1c-8c5c0c5c0c5c')
|
35
40
|
assert.equal(put2.clock.length, 1)
|
36
|
-
assert.equal(put2.clock[0].toString(), '
|
41
|
+
assert.equal(put2.clock[0].toString(), 'bafyreib2kck2fv73lgahfcd5imarslgxcmachbxxavhtwahx5ppjfts4qe')
|
37
42
|
})
|
38
43
|
it('get should return an object instance that is not the same as the one in the db', async () => {
|
39
44
|
const theDoc = await database.get('1ef3b32a-3c3a-4b5e-9c1c-8c5c0c5c0c5c')
|
@@ -46,7 +51,7 @@ describe('Fireproof', () => {
|
|
46
51
|
it('get with mvcc option', async () => {
|
47
52
|
const theDoc = await database.get('1ef3b32a-3c3a-4b5e-9c1c-8c5c0c5c0c5c', { mvcc: true })
|
48
53
|
assert(theDoc._clock, 'should have _clock')
|
49
|
-
assert.equal(theDoc._clock[0].toString(), '
|
54
|
+
assert.equal(theDoc._clock[0].toString(), 'bafyreiadhnnxgaeeqdxujfew6zxr4lnjyskkrg26cdjvk7tivy6dt4xmsm')
|
50
55
|
})
|
51
56
|
it('get with mvcc option where someone else changed another document first', async () => {
|
52
57
|
const theDoc = await database.get('1ef3b32a-3c3a-4b5e-9c1c-8c5c0c5c0c5c', { mvcc: true })
|
@@ -237,6 +242,34 @@ describe('Fireproof', () => {
|
|
237
242
|
assert.equal(prevBob.age, 11)
|
238
243
|
})
|
239
244
|
|
245
|
+
it('provides docs since tiny', async () => {
|
246
|
+
const result = await database.changesSince()
|
247
|
+
assert.equal(result.rows.length, 1)
|
248
|
+
assert.equal(result.rows[0].key, '1ef3b32a-3c3a-4b5e-9c1c-8c5c0c5c0c5c')
|
249
|
+
|
250
|
+
// console.log('result', result)
|
251
|
+
|
252
|
+
// const result2 = await database.changesSince(result.clock)
|
253
|
+
// console.log('result2', result2)
|
254
|
+
// assert.equal(result2.rows.length, 0)
|
255
|
+
|
256
|
+
const bKey = 'befbef-3c3a-4b5e-9c1c-bbbbbb'
|
257
|
+
const bvalue = {
|
258
|
+
_id: bKey,
|
259
|
+
name: 'bob',
|
260
|
+
age: 44
|
261
|
+
}
|
262
|
+
const response = await database.put(bvalue)
|
263
|
+
assert(response.id, 'should have id')
|
264
|
+
assert.equal(response.id, bKey)
|
265
|
+
|
266
|
+
const res3 = await database.changesSince()
|
267
|
+
assert.equal(res3.rows.length, 2)
|
268
|
+
|
269
|
+
const res4 = await database.changesSince(result.clock)
|
270
|
+
assert.equal(res4.rows.length, 1)
|
271
|
+
})
|
272
|
+
|
240
273
|
it('provides docs since', async () => {
|
241
274
|
const result = await database.changesSince()
|
242
275
|
assert.equal(result.rows.length, 1)
|
@@ -275,8 +308,6 @@ describe('Fireproof', () => {
|
|
275
308
|
|
276
309
|
const res5 = await database.changesSince(res4.clock)
|
277
310
|
|
278
|
-
// await database.visClock()
|
279
|
-
|
280
311
|
assert.equal(res5.rows.length, 1)
|
281
312
|
|
282
313
|
const res6 = await database.changesSince(result2.clock)
|
@@ -308,22 +339,48 @@ describe('Fireproof', () => {
|
|
308
339
|
assert.equal((await database.changesSince()).rows.length, 1)
|
309
340
|
let resp, doc, changes
|
310
341
|
for (let index = 0; index < 200; index++) {
|
311
|
-
const id = '' + (
|
342
|
+
const id = '1' + (301 - index).toString()
|
343
|
+
console.log(`Putting id: ${id}, index: ${index}`)
|
312
344
|
resp = await database.put({ index, _id: id }).catch(e => {
|
313
|
-
assert.
|
345
|
+
assert.fail(`put failed on _id: ${id}, error: ${e.message}`)
|
314
346
|
})
|
315
|
-
assert(resp.id)
|
347
|
+
assert(resp.id, `Failed to obtain resp.id for _id: ${id}`)
|
348
|
+
|
349
|
+
console.log(`vis for update id: ${id}, index:`, index)
|
350
|
+
for await (const line of database.vis()) {
|
351
|
+
console.log(line)
|
352
|
+
}
|
353
|
+
|
316
354
|
doc = await database.get(resp.id).catch(e => {
|
317
|
-
console.
|
318
|
-
assert.
|
355
|
+
console.log('failed', e)
|
356
|
+
assert.fail(`get failed on _id: ${id}, error: ${e.message}`)
|
357
|
+
})
|
358
|
+
|
359
|
+
assert.equal(doc.index, index, `doc.index is not equal to index for _id: ${id}`)
|
360
|
+
changes = await database.changesSince().catch(async e => {
|
361
|
+
assert.fail(`changesSince failed on _id: ${id}, error: ${e.message}`)
|
319
362
|
})
|
320
|
-
|
321
|
-
|
322
|
-
|
363
|
+
changes.rows.forEach(row => {
|
364
|
+
for (const key in row) {
|
365
|
+
const value = row[key]
|
366
|
+
assert(!/^bafy/.test(value), `Unexpected "bafy..." value found at index ${index} in row ${JSON.stringify(row)}`)
|
367
|
+
}
|
323
368
|
})
|
324
|
-
|
369
|
+
if (index > 3) {
|
370
|
+
const stored = await database.blocks.get('bafyreicumn7tvssch4xslbe4jjq55c6w3jt4yxyjagkr2tengsudato7vi').catch((e) => {
|
371
|
+
console.log(`Error getting block for index ${index}: ${e.message}`)
|
372
|
+
})
|
373
|
+
if (stored) {
|
374
|
+
const doc = codec.decode(await stored.bytes)
|
375
|
+
// console.log('stored', JSON.stringify(dec))
|
376
|
+
assert.equal(doc.closed, false)
|
377
|
+
}
|
378
|
+
}
|
379
|
+
console.log('changes: ', index, changes.rows.length, JSON.stringify(changes.rows))
|
380
|
+
assert.equal(changes.rows.length, index + 2, `failed on ${index}, with ${changes.rows.length} ${id}`)
|
325
381
|
}
|
326
382
|
}).timeout(20000)
|
383
|
+
|
327
384
|
it('concurrent transactions', async () => {
|
328
385
|
assert.equal((await database.changesSince()).rows.length, 1)
|
329
386
|
const promises = []
|
package/test/helpers.js
CHANGED
@@ -3,7 +3,7 @@ import crypto from 'node:crypto'
|
|
3
3
|
import * as Link from 'multiformats/link'
|
4
4
|
import * as raw from 'multiformats/codecs/raw'
|
5
5
|
import { sha256 } from 'multiformats/hashes/sha2'
|
6
|
-
import { MemoryBlockstore } from '
|
6
|
+
import { MemoryBlockstore } from './block.js'
|
7
7
|
|
8
8
|
// console.x = console.log
|
9
9
|
// console.log = function (...args) {
|
package/test/prolly.test.js
CHANGED
@@ -110,7 +110,7 @@ describe('Prolly', () => {
|
|
110
110
|
assert.equal(bvalue.toString(), data[0][1].toString())
|
111
111
|
})
|
112
112
|
|
113
|
-
it.skip('linear put hundreds of values', async () => {
|
113
|
+
it.skip('passing, slow: linear put hundreds of values', async () => {
|
114
114
|
const blocks = new Blockstore()
|
115
115
|
const alice = new TestPail(blocks, [])
|
116
116
|
|
@@ -173,7 +173,7 @@ class TestPail {
|
|
173
173
|
/** @param {string} key */
|
174
174
|
async get (key) {
|
175
175
|
const resp = await get(this.blocks, this.head, key)
|
176
|
-
console.log('prolly GET', key, resp)
|
176
|
+
// console.log('prolly GET', key, resp)
|
177
177
|
return resp.result
|
178
178
|
}
|
179
179
|
|
package/test/proofs.test.js
CHANGED
@@ -14,11 +14,11 @@ describe('Proofs', () => {
|
|
14
14
|
doc = await database.get(ok.id, { mvcc: true })
|
15
15
|
})
|
16
16
|
|
17
|
-
it
|
17
|
+
it('first put result shoud not include proof', async () => {
|
18
18
|
assert(ok.proof)
|
19
19
|
assert(ok.proof.data)
|
20
20
|
assert(ok.proof.clock)
|
21
|
-
console.log('ok', ok)
|
21
|
+
// console.log('ok', ok)
|
22
22
|
assert.equal(ok.proof.data.length, 0)
|
23
23
|
assert.equal(ok.proof.clock.length, 0)
|
24
24
|
|
@@ -31,7 +31,7 @@ describe('Proofs', () => {
|
|
31
31
|
assert(ok2.proof)
|
32
32
|
assert(ok2.proof.data)
|
33
33
|
assert(ok2.proof.clock)
|
34
|
-
console.log('ok2', ok2)
|
34
|
+
// console.log('ok2', ok2)
|
35
35
|
assert.equal(ok2.proof.data.length, 1)
|
36
36
|
assert.equal(ok2.proof.clock.length, 1)
|
37
37
|
|
@@ -47,7 +47,7 @@ describe('Proofs', () => {
|
|
47
47
|
assert(doc._proof.clock)
|
48
48
|
assert.equal(doc._proof.data.length, 1)
|
49
49
|
assert.equal(doc._proof.clock.length, 1)
|
50
|
-
assert.equal(doc._proof.data[0], '
|
51
|
-
assert.equal(doc._proof.clock[0].toString(), '
|
50
|
+
assert.equal(doc._proof.data[0], 'bafyreieilmvxq6wudu46i2ssmuyrmaszr4onzlqxzlvngrczbn7ppyvloq')
|
51
|
+
assert.equal(doc._proof.clock[0].toString(), 'bafyreict4aip45uwnm4xcsn4oikh73t5n7nzdmc2u36rdbguroun2yaf2y')
|
52
52
|
})
|
53
53
|
})
|
@@ -58,8 +58,8 @@ const reproduceBug = async (database) => {
|
|
58
58
|
const id = '02pkji8'
|
59
59
|
const doc = await database.get(id)
|
60
60
|
// (await database.put({ completed: !completed, ...doc }))
|
61
|
-
|
61
|
+
await database.put(doc)
|
62
62
|
await database.todosByList.query({ range: [0, 1] })
|
63
63
|
|
64
|
-
console.log('ok', ok)
|
64
|
+
// console.log('ok', ok)
|
65
65
|
}
|
package/scripts/randomcid.js
DELETED
@@ -1,12 +0,0 @@
|
|
1
|
-
import crypto from 'node:crypto'
|
2
|
-
import { CID } from 'multiformats/cid'
|
3
|
-
import * as raw from 'multiformats/codecs/raw'
|
4
|
-
import { sha256 } from 'multiformats/hashes/sha2'
|
5
|
-
|
6
|
-
async function main () {
|
7
|
-
const bytes = crypto.webcrypto.getRandomValues(new Uint8Array(32))
|
8
|
-
const hash = await sha256.digest(bytes)
|
9
|
-
process.stdout.write(CID.create(1, raw.code, hash).toString())
|
10
|
-
}
|
11
|
-
|
12
|
-
main()
|
package/scripts/words/gen.js
DELETED
@@ -1,55 +0,0 @@
|
|
1
|
-
import fs from 'node:fs'
|
2
|
-
import { Readable } from 'node:stream'
|
3
|
-
import { CarWriter } from '@ipld/car'
|
4
|
-
import { CID } from 'multiformats/cid'
|
5
|
-
import * as raw from 'multiformats/codecs/raw'
|
6
|
-
import { sha256 } from 'multiformats/hashes/sha2'
|
7
|
-
import { ShardBlock, put } from '../../index.js'
|
8
|
-
import { MemoryBlockstore } from '../../block.js'
|
9
|
-
|
10
|
-
/** @param {string} str */
|
11
|
-
async function stringToCID (str) {
|
12
|
-
const hash = await sha256.digest(new TextEncoder().encode(str))
|
13
|
-
return CID.create(1, raw.code, hash)
|
14
|
-
}
|
15
|
-
|
16
|
-
async function main () {
|
17
|
-
const data = await fs.promises.readFile('/usr/share/dict/words', 'utf8')
|
18
|
-
const words = data.split(/\n/)
|
19
|
-
const cids = await Promise.all(words.map(stringToCID))
|
20
|
-
const blocks = new MemoryBlockstore()
|
21
|
-
const rootblk = await ShardBlock.create()
|
22
|
-
blocks.putSync(rootblk.cid, rootblk.bytes)
|
23
|
-
|
24
|
-
console.time(`put x${words.length}`)
|
25
|
-
/** @type {import('../../shard').ShardLink} */
|
26
|
-
let root = rootblk.cid
|
27
|
-
for (const [i, word] of words.entries()) {
|
28
|
-
const res = await put(blocks, root, word, cids[i])
|
29
|
-
root = res.root
|
30
|
-
for (const b of res.additions) {
|
31
|
-
blocks.putSync(b.cid, b.bytes)
|
32
|
-
}
|
33
|
-
for (const b of res.removals) {
|
34
|
-
blocks.deleteSync(b.cid)
|
35
|
-
}
|
36
|
-
if (i % 1000 === 0) {
|
37
|
-
console.log(`${Math.floor(i / words.length * 100)}%`)
|
38
|
-
}
|
39
|
-
}
|
40
|
-
console.timeEnd(`put x${words.length}`)
|
41
|
-
|
42
|
-
// @ts-expect-error
|
43
|
-
const { writer, out } = CarWriter.create(root)
|
44
|
-
const finishPromise = new Promise(resolve => {
|
45
|
-
Readable.from(out).pipe(fs.createWriteStream('./pail.car')).on('finish', resolve)
|
46
|
-
})
|
47
|
-
|
48
|
-
for (const b of blocks.entries()) {
|
49
|
-
await writer.put(b)
|
50
|
-
}
|
51
|
-
await writer.close()
|
52
|
-
await finishPromise
|
53
|
-
}
|
54
|
-
|
55
|
-
main()
|