@fireproof/core 0.0.7 → 0.0.9
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +148 -0
- package/hooks/use-fireproof.ts +9 -6
- package/package.json +2 -2
- package/src/blockstore.js +2 -4
- package/src/clock.js +23 -2
- package/src/db-index.js +106 -78
- package/src/fireproof.js +8 -2
- package/src/hydrator.js +10 -0
- package/src/prolly.js +2 -1
- package/test/db-index.test.js +1 -1
- package/test/fireproof.test.js +18 -20
- package/test/hydrator.test.js +75 -0
- package/coverage/base.css +0 -224
- package/coverage/block-navigation.js +0 -87
- package/coverage/block.js.html +0 -280
- package/coverage/blockstore.js.html +0 -916
- package/coverage/clock.js.html +0 -1141
- package/coverage/db-index.js.html +0 -694
- package/coverage/favicon.png +0 -0
- package/coverage/fireproof.js.html +0 -856
- package/coverage/index.html +0 -221
- package/coverage/listener.js.html +0 -421
- package/coverage/prettify.css +0 -1
- package/coverage/prettify.js +0 -2
- package/coverage/prolly.js.html +0 -883
- package/coverage/sort-arrow-sprite.png +0 -0
- package/coverage/sorter.js +0 -196
- package/coverage/tmp/coverage-42191-1678146904346-0.json +0 -1
- package/coverage/tmp/coverage-42193-1678146903521-0.json +0 -1
- package/coverage/tmp/coverage-42196-1678146904322-0.json +0 -1
- package/coverage/tmp/coverage-42197-1678146904292-0.json +0 -1
- package/coverage/valet.js.html +0 -589
package/README.md
ADDED
@@ -0,0 +1,148 @@
|
|
1
|
+
# 🔥 Fireproof
|
2
|
+
|
3
|
+
Fireproof is a realtime database for today's interactive applications. It uses immutable data and distributed protocols
|
4
|
+
to offer a new kind of database that:
|
5
|
+
- can be embedded in any page or app, with a flexible data ownership model
|
6
|
+
- scales without incurring developer costs, thanks to Filecoin
|
7
|
+
- uses cryptographically verifiable protocols (what plants crave)
|
8
|
+
|
9
|
+
Learn more about the concepts and architecture behind Fireproof [in our plan,](https://hackmd.io/@j-chris/SyoE-Plpj) or jump to the [quick start](#quick-start) for React and server-side examples.
|
10
|
+
|
11
|
+
### Status
|
12
|
+
|
13
|
+
Fireproof is alpha software, you should only use it if you are planning to contribute. For now, [check out our React TodoMVC implementation running in browser-local mode.](https://main--lucky-naiad-5aa507.netlify.app/) It demonstrates document persistence, index queries, and event subscriptions, and uses the [`useFireproof()` React hook.](https://github.com/fireproof-storage/fireproof/blob/main/packages/fireproof/hooks/use-fireproof.tsx)
|
14
|
+
|
15
|
+
[](https://github.com/jchris/fireproof/actions/workflows/test.yml)
|
16
|
+
[](https://standardjs.com)
|
17
|
+
|
18
|
+
## Usage
|
19
|
+
|
20
|
+
```js
|
21
|
+
import Fireproof from 'fireproof';
|
22
|
+
|
23
|
+
async function main() {
|
24
|
+
const database = Fireproof.storage('my-db');
|
25
|
+
const ok = await database.put({
|
26
|
+
name: 'alice',
|
27
|
+
age: 42
|
28
|
+
});
|
29
|
+
|
30
|
+
const doc = await database.get(ok.id);
|
31
|
+
console.log(doc.name); // 'alice'
|
32
|
+
}
|
33
|
+
|
34
|
+
main();
|
35
|
+
```
|
36
|
+
|
37
|
+
## Features
|
38
|
+
|
39
|
+
### Document Store
|
40
|
+
|
41
|
+
A simple put, get, and delete interface for keeping track of all your JSON documents. Once your data is in Fireproof you can access it from any app or website. Fireproof document store uses MVCC versioning and Merkle clocks so you can always recover the version you are looking for.
|
42
|
+
|
43
|
+
```js
|
44
|
+
const { id, ref } = await database.put({
|
45
|
+
_id: 'three-thousand'
|
46
|
+
name: 'André',
|
47
|
+
age: 47
|
48
|
+
});
|
49
|
+
const doc = await database.get('three-thousand', {mvcc : true}) // mvcc is optional
|
50
|
+
// {
|
51
|
+
// _id : 'three-thousand'
|
52
|
+
// _clock : CID(bafy84...agfw7)
|
53
|
+
// name : 'André',
|
54
|
+
// age : 47
|
55
|
+
// }
|
56
|
+
```
|
57
|
+
|
58
|
+
The `_clock` allows you to query a stable snapshot of that version of the database. Fireproof uses immutable data structures under the hood, so you can always rollback to old data. Files can be embedded anywhere in your document using IPFS links like `{"/":"bafybeih3e3zdiehbqfpxzpppxrb6kaaw4xkbqzyr2f5pwr5refq2te2ape"}`, with API sugar coming soon.
|
59
|
+
|
60
|
+
### Flexible Indexes
|
61
|
+
|
62
|
+
Fireproof indexes are defined by custom JavaScript functions that you write, allowing you to easily index and search your data in the way that works best for your application. Easily handle data variety and schema drift by normalizing any data to the desired index.
|
63
|
+
|
64
|
+
```js
|
65
|
+
const index = new Index(database, function (doc, map) {
|
66
|
+
map(doc.age, doc.name)
|
67
|
+
})
|
68
|
+
const { rows, ref } = await index.query({ range: [40, 52] })
|
69
|
+
// [ { key: 42, value: 'alice', id: 'a1s3b32a-3c3a-4b5e-9c1c-8c5c0c5c0c5c' },
|
70
|
+
// { key: 47, value: 'André', id: 'three-thousand' } ]
|
71
|
+
```
|
72
|
+
|
73
|
+
### Realtime Updates
|
74
|
+
|
75
|
+
Subscribe to query changes in your application, so your UI updates automatically. Use the supplied React hooks, our Redux connector, or simple function calls to be notified of relevant changes.
|
76
|
+
|
77
|
+
```js
|
78
|
+
const listener = new Listener(database, function(doc, emit) {
|
79
|
+
if (doc.type == 'member') {
|
80
|
+
emit('member')
|
81
|
+
}
|
82
|
+
})
|
83
|
+
listener.on('member', (id) => {
|
84
|
+
const doc = await db.get(id)
|
85
|
+
alert(`Member update ${doc.name}`)
|
86
|
+
})
|
87
|
+
```
|
88
|
+
|
89
|
+
### Self-sovereign Identity
|
90
|
+
|
91
|
+
Fireproof is so easy to integrate with any site or app because you can get started right away, and set up an account later. By default users write to their own database copy, so you can get pretty far before you even have to think about API keys. [Authorization is via non-extractable keypair](https://ucan.xyz), like TouchID / FaceID.
|
92
|
+
|
93
|
+
### Automatic Replication
|
94
|
+
|
95
|
+
Documents changes are persisted to [Filecoin](https://filecoin.io) via [web3.storage](https://web3.storage), and made available over [IPFS] and on a global content delivery network. All you need to do to sync state is send a link to the latest database head, and Fireproof will take care of the rest. [Learn how to enable replication.](#status)
|
96
|
+
|
97
|
+
### Cryptographic Proofs
|
98
|
+
|
99
|
+
The [UCAN protocol](https://ucan.xyz) verifably links Fireproof updates to authorized agents via cryptographic proof chains. These proofs are portable like bearer tokens, but because invocations are signed by end-user device keys, UCAN proofs don't need to be hidden to be secure, allowing for delegation of service capabilities across devices and parties. Additionally, Fireproof's Merkle clocks and hash trees are immutable and self-validating, making merging changes safe and efficient. Fireproof makes cryptographic proofs available for all of it's operations, making it an ideal verfiable document database for smart contracts and other applications running in trustless environments. [Proof chains provide performance benefits as well](https://purrfect-tracker-45c.notion.site/Data-Routing-23c37b269b4c4c3dacb60d0077113bcb), by allowing recipients to skip costly I/O operations and instead cryptographically verify that changes contain all of the required context.
|
100
|
+
|
101
|
+
## Limitations 💣
|
102
|
+
|
103
|
+
### Security
|
104
|
+
|
105
|
+
Until encryption support is enabled, all data written to Fireproof is public. There are no big hurdles for this feature but it's not ready yet.
|
106
|
+
|
107
|
+
### Replication
|
108
|
+
|
109
|
+
Currently Fireproof writes transactions and proofs to [CAR files](https://ipld.io/specs/transport/car/carv2/) which are well suited for peer and cloud replication. They are stored in IndexedDB locally, with cloud replication coming very soon.
|
110
|
+
|
111
|
+
### Pre-beta Software
|
112
|
+
|
113
|
+
While the underlying data structures and libraries Fireproof uses are trusted with billions of dollars worth of data, Fireproof started in February of 2023. Results may vary.
|
114
|
+
|
115
|
+
## Thanks 🙏
|
116
|
+
|
117
|
+
Fireproof is a synthesis of work done by people in the web community over the years. I couldn't even begin to name all the folks who made pivotal contributions. Without npm, React, and VS Code all this would have taken so much longer. Thanks to everyone who supported me getting into database development via Apache CouchDB, one of the original document databases. The distinguishing work on immutable datastructures comes from the years of consideration [IPFS](https://ipfs.tech), [IPLD](https://ipld.io), and the [Filecoin APIs](https://docs.filecoin.io) have enjoyed.
|
118
|
+
|
119
|
+
Thanks to Alan Shaw and Mikeal Rogers without whom this project would have never got started. The core Merkle hash-tree clock is based on [Alan's Pail](https://github.com/alanshaw/pail), and you can see the repository history goes all the way back to work begun as a branch of that repo. Mikeal wrote [the prolly trees implementation](https://github.com/mikeal/prolly-trees).
|
120
|
+
|
121
|
+
## Quick Start
|
122
|
+
|
123
|
+
Look in the `examples/` directory for projects using the database. It's not picky how you use it, but we want to provide convenient jumping off places. Think of the examples as great to fork when starting your next project.
|
124
|
+
|
125
|
+
If are adding Fireproof to an existing page, just install it and try some operations.
|
126
|
+
|
127
|
+
```sh
|
128
|
+
npm install @fireproof/core
|
129
|
+
```
|
130
|
+
|
131
|
+
In your `app.js` or `app.tsx` file:
|
132
|
+
|
133
|
+
```js
|
134
|
+
import { Fireproof } from '@fireproof/core'
|
135
|
+
const fireproof = Fireproof.storage()
|
136
|
+
const ok = await fireproof.put({ hello: 'world' })
|
137
|
+
const doc = await fireproof.get(ok.id)
|
138
|
+
```
|
139
|
+
|
140
|
+
🤫 I like to drop a `window.fireproof = fireproof` in there as a development aid.
|
141
|
+
|
142
|
+
# Contributing
|
143
|
+
|
144
|
+
Feel free to join in. All welcome. [Open an issue](https://github.com/jchris/fireproof/issues)!
|
145
|
+
|
146
|
+
# License
|
147
|
+
|
148
|
+
Dual-licensed under [MIT or Apache 2.0](https://github.com/jchris/fireproof/blob/main/LICENSE.md)
|
package/hooks/use-fireproof.ts
CHANGED
@@ -53,9 +53,13 @@ export function useFireproof(defineDatabaseFn: Function, setupDatabaseFn: Functi
|
|
53
53
|
console.log("Loading previous database clock. (localStorage.removeItem('fireproof') to reset)")
|
54
54
|
await database.setClock(clock)
|
55
55
|
try {
|
56
|
-
await database.changesSince()
|
56
|
+
const changes = await database.changesSince()
|
57
|
+
if (changes.rows.length < 2) {
|
58
|
+
console.log('Resetting database')
|
59
|
+
throw new Error('Resetting database')
|
60
|
+
}
|
57
61
|
} catch (e) {
|
58
|
-
console.error(
|
62
|
+
console.error(`Error loading previous database clock. ${fp} Resetting.`, e)
|
59
63
|
await database.setClock([])
|
60
64
|
await setupDatabaseFn(database)
|
61
65
|
localSet('fireproof', JSON.stringify(database))
|
@@ -80,10 +84,9 @@ export function useFireproof(defineDatabaseFn: Function, setupDatabaseFn: Functi
|
|
80
84
|
const husherMap = new Map()
|
81
85
|
const husher = (id: string, workFn: { (): Promise<any> }, ms: number) => {
|
82
86
|
if (!husherMap.has(id)) {
|
83
|
-
|
84
|
-
|
85
|
-
|
86
|
-
)
|
87
|
+
const start: number = Date.now()
|
88
|
+
husherMap.set(id, workFn().finally(() =>
|
89
|
+
setTimeout(() => husherMap.delete(id), ms - (Date.now() - start))))
|
87
90
|
}
|
88
91
|
return husherMap.get(id)
|
89
92
|
}
|
package/package.json
CHANGED
@@ -1,6 +1,6 @@
|
|
1
1
|
{
|
2
2
|
"name": "@fireproof/core",
|
3
|
-
"version": "0.0.
|
3
|
+
"version": "0.0.9",
|
4
4
|
"description": "Realtime database for IPFS",
|
5
5
|
"main": "index.js",
|
6
6
|
"type": "module",
|
@@ -37,7 +37,7 @@
|
|
37
37
|
"cli-color": "^2.0.3",
|
38
38
|
"idb": "^7.1.1",
|
39
39
|
"multiformats": "^11.0.1",
|
40
|
-
"prolly-trees": "1.0.
|
40
|
+
"prolly-trees": "1.0.3",
|
41
41
|
"sade": "^1.8.1"
|
42
42
|
},
|
43
43
|
"devDependencies": {
|
package/src/blockstore.js
CHANGED
@@ -55,7 +55,7 @@ export default class TransactionBlockstore {
|
|
55
55
|
const key = cid.toString()
|
56
56
|
// it is safe to read from the in-flight transactions becauase they are immutable
|
57
57
|
const bytes = await Promise.any([this.#transactionsGet(key), this.commitedGet(key)]).catch((e) => {
|
58
|
-
console.log('networkGet', cid.toString(), e)
|
58
|
+
// console.log('networkGet', cid.toString(), e)
|
59
59
|
return this.networkGet(key)
|
60
60
|
})
|
61
61
|
if (!bytes) throw new Error('Missing block: ' + key)
|
@@ -83,15 +83,13 @@ export default class TransactionBlockstore {
|
|
83
83
|
const value = await husher(key, async () => await this.valet.remoteBlockFunction(key))
|
84
84
|
if (value) {
|
85
85
|
// console.log('networkGot: ' + key, value.length)
|
86
|
-
// dont turn this on until the Nan thing is fixed
|
87
|
-
// it keep the network blocks in indexedb but lets get the basics solid first
|
88
86
|
doTransaction('networkGot: ' + key, this, async (innerBlockstore) => {
|
89
87
|
await innerBlockstore.put(CID.parse(key), value)
|
90
88
|
})
|
91
89
|
return value
|
92
90
|
}
|
93
91
|
} else {
|
94
|
-
|
92
|
+
return false
|
95
93
|
}
|
96
94
|
}
|
97
95
|
|
package/src/clock.js
CHANGED
@@ -94,6 +94,7 @@ export class EventFetcher {
|
|
94
94
|
/** @private */
|
95
95
|
this._blocks = blocks
|
96
96
|
this._cids = new CIDCounter()
|
97
|
+
this._cache = new Map()
|
97
98
|
}
|
98
99
|
|
99
100
|
/**
|
@@ -101,10 +102,15 @@ export class EventFetcher {
|
|
101
102
|
* @returns {Promise<EventBlockView<T>>}
|
102
103
|
*/
|
103
104
|
async get (link) {
|
105
|
+
const slink = link.toString()
|
106
|
+
// console.log('get', link.toString())
|
107
|
+
if (this._cache.has(slink)) return this._cache.get(slink)
|
104
108
|
const block = await this._blocks.get(link)
|
105
109
|
this._cids.add({ address: link })
|
106
110
|
if (!block) throw new Error(`missing block: ${link}`)
|
107
|
-
|
111
|
+
const got = decodeEventBlock(block.bytes)
|
112
|
+
this._cache.set(slink, got)
|
113
|
+
return got
|
108
114
|
}
|
109
115
|
|
110
116
|
async all () {
|
@@ -200,22 +206,34 @@ export async function * vis (blocks, head, options = {}) {
|
|
200
206
|
}
|
201
207
|
|
202
208
|
export async function findEventsToSync (blocks, head) {
|
209
|
+
// const callTag = Math.random().toString(36).substring(7)
|
203
210
|
const events = new EventFetcher(blocks)
|
211
|
+
// console.time(callTag + '.findCommonAncestorWithSortedEvents')
|
204
212
|
const { ancestor, sorted } = await findCommonAncestorWithSortedEvents(events, head)
|
213
|
+
// console.timeEnd(callTag + '.findCommonAncestorWithSortedEvents')
|
214
|
+
// console.log('sorted', sorted.length)
|
215
|
+
// console.time(callTag + '.contains')
|
205
216
|
const toSync = await asyncFilter(sorted, async (uks) => !(await contains(events, ancestor, uks.cid)))
|
217
|
+
// console.timeEnd(callTag + '.contains')
|
218
|
+
|
206
219
|
return { cids: events.cids, events: toSync }
|
207
220
|
}
|
208
221
|
|
209
222
|
const asyncFilter = async (arr, predicate) =>
|
223
|
+
|
210
224
|
Promise.all(arr.map(predicate)).then((results) => arr.filter((_v, index) => results[index]))
|
211
225
|
|
212
226
|
export async function findCommonAncestorWithSortedEvents (events, children) {
|
227
|
+
// const callTag = Math.random().toString(36).substring(7)
|
228
|
+
// console.time(callTag + '.findCommonAncestor')
|
213
229
|
const ancestor = await findCommonAncestor(events, children)
|
230
|
+
// console.timeEnd(callTag + '.findCommonAncestor')
|
214
231
|
if (!ancestor) {
|
215
232
|
throw new Error('failed to find common ancestor event')
|
216
233
|
}
|
217
|
-
//
|
234
|
+
// console.time(callTag + '.findSortedEvents')
|
218
235
|
const sorted = await findSortedEvents(events, children, ancestor)
|
236
|
+
// console.timeEnd(callTag + '.findSortedEvents')
|
219
237
|
return { ancestor, sorted }
|
220
238
|
}
|
221
239
|
|
@@ -279,6 +297,7 @@ function findCommonString (arrays) {
|
|
279
297
|
* @param {import('./clock').EventLink<EventData>} tail
|
280
298
|
*/
|
281
299
|
async function findSortedEvents (events, head, tail) {
|
300
|
+
// const callTag = Math.random().toString(36).substring(7)
|
282
301
|
// get weighted events - heavier events happened first
|
283
302
|
/** @type {Map<string, { event: import('./clock').EventBlockView<EventData>, weight: number }>} */
|
284
303
|
const weights = new Map()
|
@@ -312,6 +331,7 @@ async function findSortedEvents (events, head, tail) {
|
|
312
331
|
.sort((a, b) => b[0] - a[0])
|
313
332
|
.flatMap(([, es]) => es.sort((a, b) => (String(a.cid) < String(b.cid) ? -1 : 1)))
|
314
333
|
// console.log('sorted', sorted.map(s => s.value.data.value))
|
334
|
+
|
315
335
|
return sorted
|
316
336
|
}
|
317
337
|
|
@@ -322,6 +342,7 @@ async function findSortedEvents (events, head, tail) {
|
|
322
342
|
* @returns {Promise<Array<{ event: import('./clock').EventBlockView<EventData>, depth: number }>>}
|
323
343
|
*/
|
324
344
|
async function findEvents (events, start, end, depth = 0) {
|
345
|
+
// console.log('findEvents', start)
|
325
346
|
const event = await events.get(start)
|
326
347
|
const acc = [{ event, depth }]
|
327
348
|
const { parents } = event.value
|
package/src/db-index.js
CHANGED
@@ -1,15 +1,19 @@
|
|
1
1
|
import { create, load } from 'prolly-trees/db-index'
|
2
|
+
// import { create, load } from '../../../../prolly-trees/src/db-index.js'
|
3
|
+
|
2
4
|
import { sha256 as hasher } from 'multiformats/hashes/sha2'
|
3
5
|
import { nocache as cache } from 'prolly-trees/cache'
|
4
6
|
import { bf, simpleCompare } from 'prolly-trees/utils'
|
5
7
|
import { makeGetBlock } from './prolly.js'
|
6
8
|
import { cidsToProof } from './fireproof.js'
|
9
|
+
import { CID } from 'multiformats'
|
10
|
+
|
7
11
|
import * as codec from '@ipld/dag-cbor'
|
8
12
|
// import { create as createBlock } from 'multiformats/block'
|
9
13
|
import { doTransaction } from './blockstore.js'
|
10
14
|
import charwise from 'charwise'
|
11
15
|
|
12
|
-
const ALWAYS_REBUILD =
|
16
|
+
const ALWAYS_REBUILD = false // todo: make false
|
13
17
|
|
14
18
|
// const arrayCompare = (a, b) => {
|
15
19
|
// if (Array.isArray(a) && Array.isArray(b)) {
|
@@ -37,12 +41,13 @@ const compare = (a, b) => {
|
|
37
41
|
const refCompare = (aRef, bRef) => {
|
38
42
|
if (Number.isNaN(aRef)) return -1
|
39
43
|
if (Number.isNaN(bRef)) throw new Error('ref may not be Infinity or NaN')
|
40
|
-
if (
|
44
|
+
if (aRef === Infinity) return 1 // need to test this on equal docids!
|
41
45
|
// if (!Number.isFinite(bRef)) throw new Error('ref may not be Infinity or NaN')
|
42
46
|
return simpleCompare(aRef, bRef)
|
43
47
|
}
|
44
48
|
|
45
|
-
const
|
49
|
+
const dbIndexOpts = { cache, chunker: bf(3), codec, hasher, compare }
|
50
|
+
const idIndexOpts = { cache, chunker: bf(3), codec, hasher, compare: simpleCompare }
|
46
51
|
|
47
52
|
const makeDoc = ({ key, value }) => ({ _id: key, ...value })
|
48
53
|
|
@@ -87,14 +92,6 @@ const indexEntriesForChanges = (changes, mapFun) => {
|
|
87
92
|
return indexEntries
|
88
93
|
}
|
89
94
|
|
90
|
-
const indexEntriesForOldChanges = async (blocks, byIDindexRoot, ids, mapFun) => {
|
91
|
-
const { getBlock } = makeGetBlock(blocks)
|
92
|
-
const byIDindex = await load({ cid: byIDindexRoot.cid, get: getBlock, ...opts })
|
93
|
-
|
94
|
-
const result = await byIDindex.getMany(ids)
|
95
|
-
return result
|
96
|
-
}
|
97
|
-
|
98
95
|
/**
|
99
96
|
* Represents an DbIndex for a Fireproof database.
|
100
97
|
*
|
@@ -118,11 +115,31 @@ export default class DbIndex {
|
|
118
115
|
*/
|
119
116
|
this.mapFun = mapFun
|
120
117
|
|
121
|
-
this.
|
122
|
-
|
118
|
+
this.database.indexes.set(mapFun.toString(), this)
|
119
|
+
|
120
|
+
this.indexById = { root: null, cid: null }
|
121
|
+
this.indexByKey = { root: null, cid: null }
|
123
122
|
|
124
|
-
this.byIDindexRoot = null
|
125
123
|
this.dbHead = null
|
124
|
+
|
125
|
+
this.instanceId = this.database.instanceId + `.DbIndex.${Math.random().toString(36).substring(2, 7)}`
|
126
|
+
|
127
|
+
this.updateIndexPromise = null
|
128
|
+
}
|
129
|
+
|
130
|
+
toJSON () {
|
131
|
+
return { code: this.mapFun?.toString(), clock: { db: this.dbHead?.map(cid => cid.toString()), byId: this.indexById.cid?.toString(), byKey: this.indexByKey.cid?.toString() } }
|
132
|
+
}
|
133
|
+
|
134
|
+
static fromJSON (database, { code, clock: { byId, byKey, db } }) {
|
135
|
+
let mapFun
|
136
|
+
// eslint-disable-next-line
|
137
|
+
eval("mapFun = "+ code)
|
138
|
+
const index = new DbIndex(database, mapFun)
|
139
|
+
index.indexById.cid = CID.parse(byId)
|
140
|
+
index.indexByKey.cid = CID.parse(byKey)
|
141
|
+
index.dbHead = db.map(cid => CID.parse(cid))
|
142
|
+
return index
|
126
143
|
}
|
127
144
|
|
128
145
|
/**
|
@@ -140,13 +157,18 @@ export default class DbIndex {
|
|
140
157
|
* @instance
|
141
158
|
*/
|
142
159
|
async query (query) {
|
160
|
+
// const callId = Math.random().toString(36).substring(2, 7)
|
143
161
|
// if (!root) {
|
144
162
|
// pass a root to query a snapshot
|
145
|
-
|
146
|
-
|
147
|
-
|
163
|
+
// console.time(callId + '.#updateIndex')
|
164
|
+
await this.#updateIndex(this.database.blocks)
|
165
|
+
// console.timeEnd(callId + '.#updateIndex')
|
166
|
+
|
148
167
|
// }
|
149
|
-
|
168
|
+
// console.time(callId + '.doIndexQuery')
|
169
|
+
const response = await doIndexQuery(this.database.blocks, this.indexByKey, query)
|
170
|
+
// console.timeEnd(callId + '.doIndexQuery')
|
171
|
+
|
150
172
|
return {
|
151
173
|
proof: { index: await cidsToProof(response.cids) },
|
152
174
|
// TODO fix this naming upstream in prolly/db-DbIndex?
|
@@ -162,50 +184,54 @@ export default class DbIndex {
|
|
162
184
|
* @private
|
163
185
|
* @returns {Promise<void>}
|
164
186
|
*/
|
187
|
+
|
165
188
|
async #updateIndex (blocks) {
|
189
|
+
// todo this could enqueue the request and give fresh ones to all second comers -- right now it gives out stale promises while working
|
190
|
+
// what would it do in a world where all indexes provide a database snapshot to query?
|
191
|
+
if (this.updateIndexPromise) return this.updateIndexPromise
|
192
|
+
this.updateIndexPromise = this.#innerUpdateIndex(blocks)
|
193
|
+
this.updateIndexPromise.finally(() => { this.updateIndexPromise = null })
|
194
|
+
return this.updateIndexPromise
|
195
|
+
}
|
196
|
+
|
197
|
+
async #innerUpdateIndex (inBlocks) {
|
198
|
+
// const callTag = Math.random().toString(36).substring(4)
|
199
|
+
// console.log(`#updateIndex ${callTag} >`, this.instanceId, this.dbHead?.toString(), this.dbIndexRoot?.cid.toString(), this.indexByIdRoot?.cid.toString())
|
166
200
|
// todo remove this hack
|
167
201
|
if (ALWAYS_REBUILD) {
|
168
202
|
this.dbHead = null // hack
|
169
|
-
this.
|
203
|
+
this.indexByKey = null // hack
|
170
204
|
this.dbIndexRoot = null
|
171
205
|
}
|
206
|
+
// console.log('dbHead', this.dbHead)
|
207
|
+
// console.time(callTag + '.changesSince')
|
172
208
|
const result = await this.database.changesSince(this.dbHead) // {key, value, del}
|
173
|
-
|
174
|
-
|
175
|
-
blocks,
|
176
|
-
this.byIDindexRoot,
|
177
|
-
result.rows.map(({ key }) => key),
|
178
|
-
this.mapFun
|
179
|
-
)
|
180
|
-
const oldIndexEntries = oldChangeEntries.result.map((key) => ({ key, del: true }))
|
181
|
-
const removalResult = await bulkIndex(blocks, this.dbIndexRoot, this.dbIndex, oldIndexEntries, opts)
|
182
|
-
this.dbIndexRoot = removalResult.root
|
183
|
-
this.dbIndex = removalResult.dbIndex
|
209
|
+
// console.timeEnd(callTag + '.changesSince')
|
210
|
+
// console.log('result.rows.length', result.rows.length)
|
184
211
|
|
185
|
-
|
186
|
-
const purgedRemovalResults = await bulkIndex(
|
187
|
-
blocks,
|
188
|
-
this.byIDindexRoot,
|
189
|
-
this.byIDIndex,
|
190
|
-
removeByIdIndexEntries,
|
191
|
-
opts
|
192
|
-
)
|
193
|
-
this.byIDindexRoot = purgedRemovalResults.root
|
194
|
-
this.byIDIndex = purgedRemovalResults.dbIndex
|
195
|
-
}
|
196
|
-
const indexEntries = indexEntriesForChanges(result.rows, this.mapFun)
|
197
|
-
const byIdIndexEntries = indexEntries.map(({ key }) => ({ key: key[1], value: key }))
|
198
|
-
const addFutureRemovalsResult = await bulkIndex(blocks, this.byIDindexRoot, this.byIDIndex, byIdIndexEntries, opts)
|
199
|
-
this.byIDindexRoot = addFutureRemovalsResult.root
|
200
|
-
this.byIDIndex = addFutureRemovalsResult.dbIndex
|
212
|
+
// console.time(callTag + '.doTransaction#updateIndex')
|
201
213
|
|
202
|
-
|
203
|
-
|
204
|
-
|
205
|
-
|
206
|
-
|
207
|
-
|
208
|
-
|
214
|
+
if (result.rows.length === 0) {
|
215
|
+
// console.log('#updateIndex < no changes')
|
216
|
+
this.dbHead = result.clock
|
217
|
+
return
|
218
|
+
}
|
219
|
+
await doTransaction('#updateIndex', inBlocks, async (blocks) => {
|
220
|
+
let oldIndexEntries = []
|
221
|
+
let removeByIdIndexEntries = []
|
222
|
+
if (this.dbHead) { // need a maybe load
|
223
|
+
const oldChangeEntries = await this.indexById.root.getMany(result.rows.map(({ key }) => key))
|
224
|
+
oldIndexEntries = oldChangeEntries.result.map((key) => ({ key, del: true }))
|
225
|
+
removeByIdIndexEntries = oldIndexEntries.map(({ key }) => ({ key: key[1], del: true }))
|
226
|
+
}
|
227
|
+
const indexEntries = indexEntriesForChanges(result.rows, this.mapFun)
|
228
|
+
const byIdIndexEntries = indexEntries.map(({ key }) => ({ key: key[1], value: key }))
|
229
|
+
this.indexById = await bulkIndex(blocks, this.indexById, removeByIdIndexEntries.concat(byIdIndexEntries), idIndexOpts)
|
230
|
+
this.indexByKey = await bulkIndex(blocks, this.indexByKey, oldIndexEntries.concat(indexEntries), dbIndexOpts)
|
231
|
+
this.dbHead = result.clock
|
232
|
+
})
|
233
|
+
// console.timeEnd(callTag + '.doTransaction#updateIndex')
|
234
|
+
// console.log(`#updateIndex ${callTag} <`, this.instanceId, this.dbHead?.toString(), this.dbIndexRoot?.cid.toString(), this.indexByIdRoot?.cid.toString())
|
209
235
|
}
|
210
236
|
}
|
211
237
|
|
@@ -216,45 +242,47 @@ export default class DbIndex {
|
|
216
242
|
* @param {DbIndexEntry[]} indexEntries
|
217
243
|
* @private
|
218
244
|
*/
|
219
|
-
async function bulkIndex (blocks,
|
220
|
-
if (!indexEntries.length) return
|
245
|
+
async function bulkIndex (blocks, inIndex, indexEntries, opts) {
|
246
|
+
if (!indexEntries.length) return inIndex
|
221
247
|
const putBlock = blocks.put.bind(blocks)
|
222
248
|
const { getBlock } = makeGetBlock(blocks)
|
223
249
|
let returnRootBlock
|
224
250
|
let returnNode
|
225
|
-
if (!
|
226
|
-
|
227
|
-
|
228
|
-
await
|
229
|
-
|
230
|
-
|
231
|
-
|
232
|
-
|
233
|
-
|
234
|
-
|
235
|
-
returnRootBlock = await root.block
|
236
|
-
returnNode = root
|
237
|
-
for await (const block of blocks) {
|
238
|
-
await putBlock(block.cid, block.bytes)
|
251
|
+
if (!inIndex.root) {
|
252
|
+
const cid = inIndex.cid
|
253
|
+
if (!cid) {
|
254
|
+
for await (const node of await create({ get: getBlock, list: indexEntries, ...opts })) {
|
255
|
+
const block = await node.block
|
256
|
+
await putBlock(block.cid, block.bytes)
|
257
|
+
returnRootBlock = block
|
258
|
+
returnNode = node
|
259
|
+
}
|
260
|
+
return { root: returnNode, cid: returnRootBlock.cid }
|
239
261
|
}
|
240
|
-
await
|
262
|
+
inIndex.root = await load({ cid, get: getBlock, ...dbIndexOpts })
|
263
|
+
}
|
264
|
+
const { root, blocks: newBlocks } = await inIndex.root.bulk(indexEntries)
|
265
|
+
returnRootBlock = await root.block
|
266
|
+
returnNode = root
|
267
|
+
for await (const block of newBlocks) {
|
268
|
+
await putBlock(block.cid, block.bytes)
|
241
269
|
}
|
242
|
-
|
270
|
+
await putBlock(returnRootBlock.cid, returnRootBlock.bytes)
|
271
|
+
return { root: returnNode, cid: returnRootBlock.cid }
|
243
272
|
}
|
244
273
|
|
245
|
-
async function doIndexQuery (blocks,
|
246
|
-
if (!
|
247
|
-
const cid =
|
274
|
+
async function doIndexQuery (blocks, indexByKey, query) {
|
275
|
+
if (!indexByKey.root) {
|
276
|
+
const cid = indexByKey.cid
|
248
277
|
if (!cid) return { result: [] }
|
249
278
|
const { getBlock } = makeGetBlock(blocks)
|
250
|
-
|
279
|
+
indexByKey.root = await load({ cid, get: getBlock, ...dbIndexOpts })
|
251
280
|
}
|
252
281
|
if (query.range) {
|
253
282
|
const encodedRange = query.range.map((key) => charwise.encode(key))
|
254
|
-
return
|
283
|
+
return indexByKey.root.range(...encodedRange)
|
255
284
|
} else if (query.key) {
|
256
285
|
const encodedKey = charwise.encode(query.key)
|
257
|
-
|
258
|
-
return dbIndex.get(encodedKey)
|
286
|
+
return indexByKey.root.get(encodedKey)
|
259
287
|
}
|
260
288
|
}
|
package/src/fireproof.js
CHANGED
@@ -38,6 +38,7 @@ export default class Fireproof {
|
|
38
38
|
this.clock = clock
|
39
39
|
this.config = config
|
40
40
|
this.authCtx = authCtx
|
41
|
+
this.indexes = new Map()
|
41
42
|
}
|
42
43
|
|
43
44
|
/**
|
@@ -78,7 +79,11 @@ export default class Fireproof {
|
|
78
79
|
*/
|
79
80
|
toJSON () {
|
80
81
|
// todo this also needs to return the index roots...
|
81
|
-
return {
|
82
|
+
return {
|
83
|
+
clock: this.clock.map(cid => cid.toString()),
|
84
|
+
name: this.name,
|
85
|
+
indexes: [...this.indexes.values()].map((index) => index.toJSON())
|
86
|
+
}
|
82
87
|
}
|
83
88
|
|
84
89
|
/**
|
@@ -133,7 +138,7 @@ export default class Fireproof {
|
|
133
138
|
}
|
134
139
|
|
135
140
|
async #notifyListeners (changes) {
|
136
|
-
// await sleep(
|
141
|
+
// await sleep(10)
|
137
142
|
for (const listener of this.#listeners) {
|
138
143
|
await listener(changes)
|
139
144
|
}
|
@@ -221,6 +226,7 @@ export default class Fireproof {
|
|
221
226
|
console.error('failed', event)
|
222
227
|
throw new Error('failed to put at storage layer')
|
223
228
|
}
|
229
|
+
// console.log('new clock head', this.instanceId, result.head.toString())
|
224
230
|
this.clock = result.head // do we want to do this as a finally block
|
225
231
|
await this.#notifyListeners([decodedEvent]) // this type is odd
|
226
232
|
return {
|
package/src/hydrator.js
ADDED
@@ -0,0 +1,10 @@
|
|
1
|
+
import Fireproof from './fireproof.js'
|
2
|
+
import DbIndex from './db-index.js'
|
3
|
+
|
4
|
+
export function fromJSON (json, blocks) {
|
5
|
+
const fp = new Fireproof(blocks, json.clock, { name: json.name })
|
6
|
+
for (const index of json.indexes) {
|
7
|
+
DbIndex.fromJSON(fp, index)
|
8
|
+
}
|
9
|
+
return fp
|
10
|
+
}
|
package/src/prolly.js
CHANGED
@@ -164,7 +164,8 @@ export async function put (inBlocks, head, event, options) {
|
|
164
164
|
|
165
165
|
// Otherwise, we find the common ancestor and update the root and other blocks
|
166
166
|
const events = new EventFetcher(blocks)
|
167
|
-
// this is returning more events than necessary
|
167
|
+
// todo this is returning more events than necessary, lets define the desired semantics from the top down
|
168
|
+
// good semantics mean we can cache the results of this call
|
168
169
|
const { ancestor, sorted } = await findCommonAncestorWithSortedEvents(events, head)
|
169
170
|
// console.log('sorted', JSON.stringify(sorted.map(({ value: { data: { key, value } } }) => ({ key, value }))))
|
170
171
|
const prollyRootNode = await prollyRootFromAncestor(events, ancestor, getBlock)
|
package/test/db-index.test.js
CHANGED
@@ -113,7 +113,7 @@ describe('DbIndex query', () => {
|
|
113
113
|
await index.query({ range: [51, 54] })
|
114
114
|
|
115
115
|
console.x('--- make Xander 53')
|
116
|
-
const DOCID = '
|
116
|
+
const DOCID = 'xander-doc'
|
117
117
|
const r1 = await database.put({ _id: DOCID, name: 'Xander', age: 53 })
|
118
118
|
assert(r1.id, 'should have id')
|
119
119
|
|