@fireproof/core 0.1.0 → 0.2.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md CHANGED
@@ -96,7 +96,7 @@ Documents changes are persisted to [Filecoin](https://filecoin.io) via [web3.sto
96
96
 
97
97
  ### Cryptographic Proofs
98
98
 
99
- The [UCAN protocol](https://ucan.xyz) verifably links Fireproof updates to authorized agents via cryptographic proof chains. These proofs are portable like bearer tokens, but because invocations are signed by end-user device keys, UCAN proofs don't need to be hidden to be secure, allowing for delegation of service capabilities across devices and parties. Additionally, Fireproof's Merkle clocks and hash trees are immutable and self-validating, making merging changes safe and efficient. Fireproof makes cryptographic proofs available for all of it's operations, making it an ideal verfiable document database for smart contracts and other applications running in trustless environments. [Proof chains provide performance benefits as well](https://purrfect-tracker-45c.notion.site/Data-Routing-23c37b269b4c4c3dacb60d0077113bcb), by allowing recipients to skip costly I/O operations and instead cryptographically verify that changes contain all of the required context.
99
+ The [UCAN protocol](https://ucan.xyz) verifiably links Fireproof updates to authorized agents via cryptographic proof chains. These proofs are portable like bearer tokens, but because invocations are signed by end-user device keys, UCAN proofs don't need to be hidden to be secure, allowing for delegation of service capabilities across devices and parties. Additionally, Fireproof's Merkle clocks and hash trees are immutable and self-validating, making merging changes safe and efficient. Fireproof makes cryptographic proofs available for all of its operations, making it an ideal verifiable document database for smart contracts and other applications running in trustless environments. [Proof chains provide performance benefits as well](https://purrfect-tracker-45c.notion.site/Data-Routing-23c37b269b4c4c3dacb60d0077113bcb), by allowing recipients to skip costly I/O operations and instead cryptographically verify that changes contain all of the required context.
100
100
 
101
101
  ## Limitations 💣
102
102
 
@@ -4,44 +4,53 @@
4
4
  import { useEffect, useState, createContext } from 'react'
5
5
  import { Fireproof, Listener, Hydrator } from '../index'
6
6
 
7
-
8
7
  export interface FireproofCtxValue {
9
8
  addSubscriber: (label: String, fn: Function) => void
10
9
  database: Fireproof
11
10
  ready: boolean
11
+ persist: () => void
12
12
  }
13
13
  export const FireproofCtx = createContext<FireproofCtxValue>({
14
14
  addSubscriber: () => {},
15
15
  database: null,
16
- ready: false,
16
+ ready: false
17
17
  })
18
18
 
19
19
  const inboundSubscriberQueue = new Map()
20
- const database = Fireproof.storage()
21
- const listener = new Listener(database)
22
- let startedSetup = false;
20
+
21
+ let startedSetup = false
22
+ let database
23
+ let listener
24
+ const initializeDatabase = name => {
25
+ if (database) return
26
+ database = Fireproof.storage(name)
27
+ listener = new Listener(database)
28
+ }
23
29
 
24
30
  /**
25
31
  * @function useFireproof
26
32
  * React hook to initialize a Fireproof database, automatically saving and loading the clock.
33
+ * You might need to `import { nodePolyfills } from 'vite-plugin-node-polyfills'` in your vite.config.ts
27
34
  * @param [defineDatabaseFn] Synchronous function that defines the database, run this before any async calls
28
35
  * @param [setupDatabaseFn] Asynchronous function that sets up the database, run this to load fixture data etc
29
36
  * @returns {FireproofCtxValue} { addSubscriber, database, ready }
30
37
  */
31
- export function useFireproof(defineDatabaseFn: Function, setupDatabaseFn: Function): FireproofCtxValue {
38
+ export function useFireproof(
39
+ defineDatabaseFn = (database: Fireproof) => {},
40
+ setupDatabaseFn = async (database: Fireproof) => {},
41
+ name: string
42
+ ): FireproofCtxValue {
32
43
  const [ready, setReady] = useState(false)
33
- defineDatabaseFn = defineDatabaseFn || (() => {})
34
- setupDatabaseFn = setupDatabaseFn || (() => {})
35
- // console.log('useFireproof', database, ready)
36
-
44
+ initializeDatabase(name || 'useFireproof')
45
+ const localStorageKey = 'fp.' + database.name
37
46
 
38
47
  const addSubscriber = (label: String, fn: Function) => {
39
48
  inboundSubscriberQueue.set(label, fn)
40
49
  }
41
50
 
42
- const listenerCallback = async () => {
43
- // console.log ('listenerCallback', JSON.stringify(database))
44
- localSet('fireproof', JSON.stringify(database))
51
+ const listenerCallback = async event => {
52
+ localSet(localStorageKey, JSON.stringify(database))
53
+ if (event._external) return
45
54
  for (const [, fn] of inboundSubscriberQueue) fn()
46
55
  }
47
56
 
@@ -51,14 +60,14 @@ export function useFireproof(defineDatabaseFn: Function, setupDatabaseFn: Functi
51
60
  if (startedSetup) return
52
61
  startedSetup = true
53
62
  defineDatabaseFn(database) // define indexes before querying them
54
- const fp = localGet('fireproof')
63
+ console.log('Initializing database', database.name)
64
+ const fp = localGet(localStorageKey) // todo use db.name
55
65
  if (fp) {
56
- const serialized = JSON.parse(fp)
57
- // console.log('serialized', JSON.stringify(serialized.indexes.map(c => c.clock)))
58
- console.log("Loading previous database clock. (localStorage.removeItem('fireproof') to reset)")
59
- Hydrator.fromJSON(serialized, database)
60
- // await database.setClock(clock)
61
66
  try {
67
+ const serialized = JSON.parse(fp)
68
+ // console.log('serialized', JSON.stringify(serialized.indexes.map(c => c.clock)))
69
+ console.log(`Loading previous database clock. (localStorage.removeItem('${localStorageKey}') to reset)`)
70
+ await Hydrator.fromJSON(serialized, database)
62
71
  const changes = await database.changesSince()
63
72
  if (changes.rows.length < 2) {
64
73
  // console.log('Resetting database')
@@ -66,16 +75,16 @@ export function useFireproof(defineDatabaseFn: Function, setupDatabaseFn: Functi
66
75
  }
67
76
  } catch (e) {
68
77
  console.error(`Error loading previous database clock. ${fp} Resetting.`, e)
69
- await database.setClock([]) // todo this should be resetClock and also reset the indexes
78
+ await Hydrator.zoom(database, [])
70
79
  await setupDatabaseFn(database)
71
- localSet('fireproof', JSON.stringify(database))
80
+ localSet(localStorageKey, JSON.stringify(database))
72
81
  }
73
82
  } else {
74
83
  await setupDatabaseFn(database)
75
- localSet('fireproof', JSON.stringify(database))
84
+ localSet(localStorageKey, JSON.stringify(database))
76
85
  }
77
86
  setReady(true)
78
- listener.on('*', hushed('*', listenerCallback, 250))
87
+ listener.on('*', listenerCallback)//hushed('*', listenerCallback, 250))
79
88
  }
80
89
  doSetup()
81
90
  }, [ready])
@@ -84,6 +93,9 @@ export function useFireproof(defineDatabaseFn: Function, setupDatabaseFn: Functi
84
93
  addSubscriber,
85
94
  database,
86
95
  ready,
96
+ persist: () => {
97
+ localSet(localStorageKey, JSON.stringify(database))
98
+ }
87
99
  }
88
100
  }
89
101
 
@@ -91,12 +103,17 @@ const husherMap = new Map()
91
103
  const husher = (id: string, workFn: { (): Promise<any> }, ms: number) => {
92
104
  if (!husherMap.has(id)) {
93
105
  const start: number = Date.now()
94
- husherMap.set(id, workFn().finally(() =>
95
- setTimeout(() => husherMap.delete(id), ms - (Date.now() - start))))
106
+ husherMap.set(
107
+ id,
108
+ workFn().finally(() => setTimeout(() => husherMap.delete(id), ms - (Date.now() - start)))
109
+ )
96
110
  }
97
111
  return husherMap.get(id)
98
112
  }
99
- const hushed = (id: string, workFn: { (): Promise<any> }, ms: number) => () => husher(id, workFn, ms)
113
+ const hushed =
114
+ (id: string, workFn: { (...args): Promise<any> }, ms: number) =>
115
+ (...args) =>
116
+ husher(id, () => workFn(...args), ms)
100
117
 
101
118
  let storageSupported = false
102
119
  try {
@@ -116,4 +133,4 @@ function localSet(key: string, value: string) {
116
133
  // if (storageSupported) {
117
134
  // return localStorage && localStorage.removeItem(key)
118
135
  // }
119
- // }
136
+ // }
package/package.json CHANGED
@@ -1,15 +1,17 @@
1
1
  {
2
2
  "name": "@fireproof/core",
3
- "version": "0.1.0",
3
+ "version": "0.2.0",
4
4
  "description": "Realtime database for IPFS",
5
5
  "main": "index.js",
6
6
  "type": "module",
7
7
  "scripts": {
8
- "test": "standard && npm run test:mocha",
9
- "test:mocha": "mocha test/*.test.js",
10
- "test:watch": "npm run test:mocha -- -w --parallel test/*.test.js",
8
+ "keygen": "node scripts/keygen.js",
9
+ "test": "standard && npm run test:unencrypted && npm run test:mocha",
10
+ "test:unencrypted": "NO_ENCRYPT=true npm run test:mocha",
11
+ "test:mocha": "mocha --reporter list test/*.test.js",
12
+ "test:watch": "npm run test:mocha -- -w --parallel",
11
13
  "coverage": "c8 -r html -r text npm test",
12
- "prepublishOnly" : "cp ../../README.md .",
14
+ "prepublishOnly": "cp ../../README.md .",
13
15
  "postpublish": "rm README.md",
14
16
  "lint": "standard",
15
17
  "lint:fix": "standard --fix"
@@ -37,6 +39,7 @@
37
39
  "car-transaction": "^1.0.1",
38
40
  "charwise": "^3.0.1",
39
41
  "cli-color": "^2.0.3",
42
+ "encrypted-block": "^0.0.3",
40
43
  "idb": "^7.1.1",
41
44
  "multiformats": "^11.0.1",
42
45
  "prolly-trees": "1.0.3",
@@ -69,7 +72,7 @@
69
72
  "bugs": {
70
73
  "url": "https://github.com/fireproof-storage/fireproof/issues"
71
74
  },
72
- "homepage": "https://github.com/fireproof-storage/fireproof#readme",
75
+ "homepage": "https://fireproof.storage",
73
76
  "workspaces": [
74
77
  "examples/todomvc"
75
78
  ]
@@ -0,0 +1,3 @@
1
+ import { randomBytes } from 'crypto'
2
+
3
+ console.log(randomBytes(32).toString('hex'))
package/src/blockstore.js CHANGED
@@ -1,10 +1,5 @@
1
1
  import { parse } from 'multiformats/link'
2
- import * as raw from 'multiformats/codecs/raw'
3
- import { sha256 } from 'multiformats/hashes/sha2'
4
- import * as Block from 'multiformats/block'
5
- import * as CBW from '@ipld/car/buffer-writer'
6
2
  import { CID } from 'multiformats'
7
-
8
3
  import Valet from './valet.js'
9
4
 
10
5
  // const sleep = ms => new Promise(r => setTimeout(r, ms))
@@ -34,15 +29,15 @@ const husher = (id, workFn) => {
34
29
  */
35
30
  export default class TransactionBlockstore {
36
31
  /** @type {Map<string, Uint8Array>} */
37
- #oldBlocks = new Map()
32
+ #committedBlocks = new Map()
38
33
 
39
34
  valet = null
40
35
 
41
36
  #instanceId = 'blkz.' + Math.random().toString(36).substring(2, 4)
42
37
  #inflightTransactions = new Set()
43
38
 
44
- constructor (name) {
45
- this.valet = new Valet(name)
39
+ constructor (name, encryptionKey) {
40
+ this.valet = new Valet(name, encryptionKey)
46
41
  }
47
42
 
48
43
  /**
@@ -54,7 +49,7 @@ export default class TransactionBlockstore {
54
49
  async get (cid) {
55
50
  const key = cid.toString()
56
51
  // it is safe to read from the in-flight transactions becauase they are immutable
57
- const bytes = await Promise.any([this.#transactionsGet(key), this.commitedGet(key)]).catch((e) => {
52
+ const bytes = await Promise.any([this.#transactionsGet(key), this.committedGet(key)]).catch(e => {
58
53
  // console.log('networkGet', cid.toString(), e)
59
54
  return this.networkGet(key)
60
55
  })
@@ -72,18 +67,26 @@ export default class TransactionBlockstore {
72
67
  throw new Error('Missing block: ' + key)
73
68
  }
74
69
 
75
- async commitedGet (key) {
76
- const old = this.#oldBlocks.get(key)
70
+ async committedGet (key) {
71
+ const old = this.#committedBlocks.get(key)
77
72
  if (old) return old
78
- return await this.valet.getBlock(key)
73
+ const got = await this.valet.getBlock(key)
74
+ // console.log('committedGet: ' + key)
75
+ this.#committedBlocks.set(key, got)
76
+ return got
77
+ }
78
+
79
+ async clearCommittedCache () {
80
+ this.#committedBlocks.clear()
79
81
  }
80
82
 
81
83
  async networkGet (key) {
82
84
  if (this.valet.remoteBlockFunction) {
85
+ // todo why is this on valet?
83
86
  const value = await husher(key, async () => await this.valet.remoteBlockFunction(key))
84
87
  if (value) {
85
88
  // console.log('networkGot: ' + key, value.length)
86
- doTransaction('networkGot: ' + key, this, async (innerBlockstore) => {
89
+ doTransaction('networkGot: ' + key, this, async innerBlockstore => {
87
90
  await innerBlockstore.put(CID.parse(key), value)
88
91
  })
89
92
  return value
@@ -118,7 +121,7 @@ export default class TransactionBlockstore {
118
121
  // // for (const [str, bytes] of this.#blocks) {
119
122
  // // yield { cid: parse(str), bytes }
120
123
  // // }
121
- // for (const [str, bytes] of this.#oldBlocks) {
124
+ // for (const [str, bytes] of this.#committedBlocks) {
122
125
  // yield { cid: parse(str), bytes }
123
126
  // }
124
127
  // }
@@ -145,39 +148,24 @@ export default class TransactionBlockstore {
145
148
  }
146
149
 
147
150
  // first get the transaction blockstore from the map of transaction blockstores
148
- // then copy it to oldBlocks
151
+ // then copy it to committedBlocks
149
152
  // then write the transaction blockstore to a car
150
153
  // then write the car to the valet
151
154
  // then remove the transaction blockstore from the map of transaction blockstores
152
- #doCommit = async (innerBlockstore) => {
155
+ #doCommit = async innerBlockstore => {
153
156
  const cids = new Set()
154
157
  for (const { cid, bytes } of innerBlockstore.entries()) {
155
158
  const stringCid = cid.toString() // unnecessary string conversion, can we fix upstream?
156
- if (this.#oldBlocks.has(stringCid)) {
157
- // console.log('Duplicate block: ' + stringCid)
159
+ if (this.#committedBlocks.has(stringCid)) {
160
+ // console.log('Duplicate block: ' + stringCid) // todo some of this can be avoided, cost is extra size on car files
158
161
  } else {
159
- this.#oldBlocks.set(stringCid, bytes)
162
+ this.#committedBlocks.set(stringCid, bytes)
160
163
  cids.add(stringCid)
161
164
  }
162
165
  }
163
166
  if (cids.size > 0) {
164
167
  // console.log(innerBlockstore.label, 'committing', cids.size, 'blocks')
165
- await this.#valetWriteTransaction(innerBlockstore, cids)
166
- }
167
- }
168
-
169
- /**
170
- * Group the blocks into a car and write it to the valet.
171
- * @param {InnerBlockstore} innerBlockstore
172
- * @param {Set<string>} cids
173
- * @returns {Promise<void>}
174
- * @memberof TransactionBlockstore
175
- * @private
176
- */
177
- #valetWriteTransaction = async (innerBlockstore, cids) => {
178
- if (innerBlockstore.lastCid) {
179
- const newCar = await blocksToCarBlock(innerBlockstore.lastCid, innerBlockstore)
180
- await this.valet.parkCar(newCar.cid.toString(), newCar.bytes, cids)
168
+ await this.valet.writeTransaction(innerBlockstore, cids)
181
169
  }
182
170
  }
183
171
 
@@ -215,25 +203,6 @@ export const doTransaction = async (label, blockstore, doFun) => {
215
203
  }
216
204
  }
217
205
 
218
- const blocksToCarBlock = async (lastCid, blocks) => {
219
- let size = 0
220
- const headerSize = CBW.headerLength({ roots: [lastCid] })
221
- size += headerSize
222
- for (const { cid, bytes } of blocks.entries()) {
223
- size += CBW.blockLength({ cid, bytes })
224
- }
225
- const buffer = new Uint8Array(size)
226
- const writer = await CBW.createWriter(buffer, { headerSize })
227
-
228
- writer.addRoot(lastCid)
229
-
230
- for (const { cid, bytes } of blocks.entries()) {
231
- writer.write({ cid, bytes })
232
- }
233
- await writer.close()
234
- return await Block.encode({ value: writer.bytes, hasher: sha256, codec: raw })
235
- }
236
-
237
206
  /** @implements {BlockFetcher} */
238
207
  export class InnerBlockstore {
239
208
  /** @type {Map<string, Uint8Array>} */
@@ -254,8 +223,10 @@ export class InnerBlockstore {
254
223
  async get (cid) {
255
224
  const key = cid.toString()
256
225
  let bytes = this.#blocks.get(key)
257
- if (bytes) { return { cid, bytes } }
258
- bytes = await this.parentBlockstore.commitedGet(key)
226
+ if (bytes) {
227
+ return { cid, bytes }
228
+ }
229
+ bytes = await this.parentBlockstore.committedGet(key)
259
230
  if (bytes) {
260
231
  return { cid, bytes }
261
232
  }
package/src/clock.js CHANGED
@@ -220,7 +220,6 @@ export async function findEventsToSync (blocks, head) {
220
220
  }
221
221
 
222
222
  const asyncFilter = async (arr, predicate) =>
223
-
224
223
  Promise.all(arr.map(predicate)).then((results) => arr.filter((_v, index) => results[index]))
225
224
 
226
225
  export async function findCommonAncestorWithSortedEvents (events, children) {
package/src/crypto.js ADDED
@@ -0,0 +1,65 @@
1
+ import * as codec from 'encrypted-block'
2
+ import {
3
+ create,
4
+ load
5
+ } from 'prolly-trees/cid-set'
6
+ import { CID } from 'multiformats'
7
+ import { encode, decode, create as mfCreate } from 'multiformats/block'
8
+ import * as dagcbor from '@ipld/dag-cbor'
9
+ import { sha256 as hasher } from 'multiformats/hashes/sha2'
10
+
11
+ const createBlock = (bytes, cid) => mfCreate({ cid, bytes, hasher, codec })
12
+
13
+ const encrypt = async function * ({ get, cids, hasher, key, cache, chunker, root }) {
14
+ const set = new Set()
15
+ let eroot
16
+ for (const string of cids) {
17
+ const cid = CID.parse(string)
18
+ const unencrypted = await get(cid)
19
+ const block = await encode({ ...await codec.encrypt({ ...unencrypted, key }), codec, hasher })
20
+ // console.log(`encrypting ${string} as ${block.cid}`)
21
+ yield block
22
+ set.add(block.cid.toString())
23
+ if (unencrypted.cid.equals(root)) eroot = block.cid
24
+ }
25
+ if (!eroot) throw new Error('cids does not include root')
26
+ const list = [...set].map(s => CID.parse(s))
27
+ let last
28
+ for await (const node of create({ list, get, cache, chunker, hasher, codec: dagcbor })) {
29
+ const block = await node.block
30
+ yield block
31
+ last = block
32
+ }
33
+ const head = [eroot, last.cid]
34
+ const block = await encode({ value: head, codec: dagcbor, hasher })
35
+ yield block
36
+ }
37
+
38
+ const decrypt = async function * ({ root, get, key, cache, chunker, hasher }) {
39
+ const o = { ...await get(root), codec: dagcbor, hasher }
40
+ const decodedRoot = await decode(o)
41
+ // console.log('decodedRoot', decodedRoot)
42
+ const { value: [eroot, tree] } = decodedRoot
43
+ const rootBlock = await get(eroot) // should I decrypt?
44
+ const cidset = await load({ cid: tree, get, cache, chunker, codec, hasher })
45
+ const { result: nodes } = await cidset.getAllEntries()
46
+ const unwrap = async (eblock) => {
47
+ const { bytes, cid } = await codec.decrypt({ ...eblock, key }).catch(e => {
48
+ console.log('ekey', e)
49
+ throw new Error('bad key: ' + key.toString('hex'))
50
+ })
51
+ const block = await createBlock(bytes, cid)
52
+ return block
53
+ }
54
+ const promises = []
55
+ for (const { cid } of nodes) {
56
+ if (!rootBlock.cid.equals(cid)) promises.push(get(cid).then(unwrap))
57
+ }
58
+ yield * promises
59
+ yield unwrap(rootBlock)
60
+ }
61
+
62
+ export {
63
+ encrypt,
64
+ decrypt
65
+ }
package/src/db-index.js CHANGED
@@ -9,7 +9,7 @@ import { cidsToProof } from './fireproof.js'
9
9
 
10
10
  import * as codec from '@ipld/dag-cbor'
11
11
  // import { create as createBlock } from 'multiformats/block'
12
- import { doTransaction } from './blockstore.js'
12
+ import TransactionBlockstore, { doTransaction } from './blockstore.js'
13
13
  import charwise from 'charwise'
14
14
 
15
15
  const ALWAYS_REBUILD = false // todo: make false
@@ -82,6 +82,7 @@ const indexEntriesForChanges = (changes, mapFn) => {
82
82
  changes.forEach(({ key, value, del }) => {
83
83
  if (del || !value) return
84
84
  mapFn(makeDoc({ key, value }), (k, v) => {
85
+ if (typeof v === 'undefined' || typeof k === 'undefined') return
85
86
  indexEntries.push({
86
87
  key: [charwise.encode(k), key],
87
88
  value: v
@@ -102,13 +103,16 @@ const indexEntriesForChanges = (changes, mapFn) => {
102
103
  *
103
104
  */
104
105
  export default class DbIndex {
105
- constructor (database, mapFn, clock) {
106
+ constructor (database, mapFn, clock, opts = {}) {
106
107
  // console.log('DbIndex constructor', database.constructor.name, typeof mapFn, clock)
107
108
  /**
108
109
  * The database instance to DbIndex.
109
110
  * @type {Fireproof}
110
111
  */
111
112
  this.database = database
113
+ if (!database.indexBlocks) {
114
+ database.indexBlocks = new TransactionBlockstore(database.name + '.indexes', database.blocks.valet.getKeyMaterial())
115
+ }
112
116
  /**
113
117
  * The map function to apply to each entry in the database.
114
118
  * @type {Function}
@@ -130,7 +134,7 @@ export default class DbIndex {
130
134
  }
131
135
  this.instanceId = this.database.instanceId + `.DbIndex.${Math.random().toString(36).substring(2, 7)}`
132
136
  this.updateIndexPromise = null
133
- DbIndex.registerWithDatabase(this, this.database)
137
+ if (!opts.temporary) { DbIndex.registerWithDatabase(this, this.database) }
134
138
  }
135
139
 
136
140
  static registerWithDatabase (inIndex, database) {
@@ -159,7 +163,7 @@ export default class DbIndex {
159
163
  }
160
164
 
161
165
  toJSON () {
162
- const indexJson = { code: this.mapFn?.toString(), clock: { db: null, byId: null, byKey: null } }
166
+ const indexJson = { code: this.mapFnString, clock: { db: null, byId: null, byKey: null } }
163
167
  indexJson.clock.db = this.dbHead?.map(cid => cid.toString())
164
168
  indexJson.clock.byId = this.indexById.cid?.toString()
165
169
  indexJson.clock.byKey = this.indexByKey.cid?.toString()
@@ -185,18 +189,18 @@ export default class DbIndex {
185
189
  * @memberof DbIndex
186
190
  * @instance
187
191
  */
188
- async query (query) {
192
+ async query (query, update = true) {
189
193
  // const callId = Math.random().toString(36).substring(2, 7)
190
194
  // if (!root) {
191
195
  // pass a root to query a snapshot
192
196
  // console.time(callId + '.#updateIndex')
193
- await this.#updateIndex(this.database.blocks)
197
+ update && await this.#updateIndex(this.database.indexBlocks)
194
198
  // console.timeEnd(callId + '.#updateIndex')
195
199
 
196
200
  // }
197
201
  // console.time(callId + '.doIndexQuery')
198
202
  // console.log('query', query)
199
- const response = await doIndexQuery(this.database.blocks, this.indexByKey, query)
203
+ const response = await doIndexQuery(this.database.indexBlocks, this.indexByKey, query)
200
204
  // console.timeEnd(callId + '.doIndexQuery')
201
205
 
202
206
  return {
@@ -266,6 +270,7 @@ export default class DbIndex {
266
270
  this.indexByKey = await bulkIndex(blocks, this.indexByKey, oldIndexEntries.concat(indexEntries), dbIndexOpts)
267
271
  this.dbHead = result.clock
268
272
  })
273
+ this.database.notifyExternal('dbIndex')
269
274
  // console.timeEnd(callTag + '.doTransaction#updateIndex')
270
275
  // console.log(`#updateIndex ${callTag} <`, this.instanceId, this.dbHead?.toString(), this.indexByKey.cid?.toString(), this.indexById.cid?.toString())
271
276
  }
@@ -317,7 +322,7 @@ async function loadIndex (blocks, index, indexOpts) {
317
322
  return index.root
318
323
  }
319
324
 
320
- async function doIndexQuery (blocks, indexByKey, query) {
325
+ async function doIndexQuery (blocks, indexByKey, query = {}) {
321
326
  await loadIndex(blocks, indexByKey, dbIndexOpts)
322
327
  if (query.range) {
323
328
  const encodedRange = query.range.map((key) => charwise.encode(key))
@@ -325,5 +330,12 @@ async function doIndexQuery (blocks, indexByKey, query) {
325
330
  } else if (query.key) {
326
331
  const encodedKey = charwise.encode(query.key)
327
332
  return indexByKey.root.get(encodedKey)
333
+ } else {
334
+ if (indexByKey.root) {
335
+ const { result, ...all } = await indexByKey.root.getAllEntries()
336
+ return { result: result.map(({ key: [k, id], value }) => ({ key: k, id, row: value })), ...all }
337
+ } else {
338
+ return { result: [] }
339
+ }
328
340
  }
329
341
  }
package/src/fireproof.js CHANGED
@@ -1,4 +1,5 @@
1
- import { vis, put, get, getAll, eventsSince } from './prolly.js'
1
+ import { randomBytes } from 'crypto'
2
+ import { visMerkleClock, visMerkleTree, vis, put, get, getAll, eventsSince } from './prolly.js'
2
3
  import TransactionBlockstore, { doTransaction } from './blockstore.js'
3
4
  import charwise from 'charwise'
4
5
 
@@ -27,8 +28,11 @@ export default class Fireproof {
27
28
  * @static
28
29
  * @returns {Fireproof} - a new Fireproof instance
29
30
  */
30
- static storage = (name) => {
31
- return new Fireproof(new TransactionBlockstore(name), [], { name })
31
+ static storage = (name = 'global') => {
32
+ const instanceKey = randomBytes(32).toString('hex') // pass null to disable encryption
33
+ // pick a random key from const validatedKeys
34
+ // const instanceKey = validatedKeys[Math.floor(Math.random() * validatedKeys.length)]
35
+ return new Fireproof(new TransactionBlockstore(name, instanceKey), [], { name })
32
36
  }
33
37
 
34
38
  constructor (blocks, clock, config, authCtx = {}) {
@@ -50,15 +54,22 @@ export default class Fireproof {
50
54
  toJSON () {
51
55
  // todo this also needs to return the index roots...
52
56
  return {
53
- clock: this.clock.map(cid => cid.toString()),
57
+ clock: this.clockToJSON(),
54
58
  name: this.name,
59
+ key: this.blocks.valet.getKeyMaterial(),
55
60
  indexes: [...this.indexes.values()].map(index => index.toJSON())
56
61
  }
57
62
  }
58
63
 
59
- hydrate ({ clock, name }) {
64
+ clockToJSON () {
65
+ return this.clock.map(cid => cid.toString())
66
+ }
67
+
68
+ hydrate ({ clock, name, key }) {
60
69
  this.name = name
61
70
  this.clock = clock
71
+ this.blocks.valet.setKeyMaterial(key)
72
+ this.indexBlocks = null
62
73
  }
63
74
 
64
75
  /**
@@ -71,7 +82,12 @@ export default class Fireproof {
71
82
  * @instance
72
83
  */
73
84
  async notifyReset () {
74
- await this.#notifyListeners({ reset: true, clock: this.clock })
85
+ await this.#notifyListeners({ _reset: true, _clock: this.clockToJSON() })
86
+ }
87
+
88
+ // used be indexes etc to notify database listeners of new availability
89
+ async notifyExternal (source = 'unknown') {
90
+ await this.#notifyListeners({ _external: source, _clock: this.clockToJSON() })
75
91
  }
76
92
 
77
93
  /**
@@ -85,6 +101,7 @@ export default class Fireproof {
85
101
  async changesSince (event) {
86
102
  // console.log('changesSince', this.instanceId, event, this.clock)
87
103
  let rows, dataCIDs, clockCIDs
104
+ // if (!event) event = []
88
105
  if (event) {
89
106
  const resp = await eventsSince(this.blocks, this.clock, event)
90
107
  const docsMap = new Map()
@@ -106,11 +123,21 @@ export default class Fireproof {
106
123
  }
107
124
  return {
108
125
  rows,
109
- clock: this.clock,
126
+ clock: this.clockToJSON(),
110
127
  proof: { data: await cidsToProof(dataCIDs), clock: await cidsToProof(clockCIDs) }
111
128
  }
112
129
  }
113
130
 
131
+ async allDocuments () {
132
+ const allResp = await getAll(this.blocks, this.clock)
133
+ const rows = allResp.result.map(({ key, value }) => (decodeEvent({ key, value }))).map(({ key, value }) => ({ key, value: { _id: key, ...value } }))
134
+ return {
135
+ rows,
136
+ clock: this.clockToJSON(),
137
+ proof: await cidsToProof(allResp.cids)
138
+ }
139
+ }
140
+
114
141
  /**
115
142
  * Registers a Listener to be called when the Fireproof instance's clock is updated.
116
143
  * Recieves live changes from the database after they are committed.
@@ -196,7 +223,7 @@ export default class Fireproof {
196
223
  */
197
224
  async #putToProllyTree (decodedEvent, clock = null) {
198
225
  const event = encodeEvent(decodedEvent)
199
- if (clock && JSON.stringify(clock) !== JSON.stringify(this.clock)) {
226
+ if (clock && JSON.stringify(clock) !== JSON.stringify(this.clockToJSON())) {
200
227
  // we need to check and see what version of the document exists at the clock specified
201
228
  // if it is the same as the one we are trying to put, then we can proceed
202
229
  const resp = await eventsSince(this.blocks, this.clock, event.value._clock)
@@ -219,7 +246,7 @@ export default class Fireproof {
219
246
  await this.#notifyListeners([decodedEvent]) // this type is odd
220
247
  return {
221
248
  id: decodedEvent.key,
222
- clock: this.clock,
249
+ clock: this.clockToJSON(),
223
250
  proof: { data: await cidsToProof(result.cids), clock: await cidsToProof(result.clockCIDs) }
224
251
  }
225
252
  // todo should include additions (or split clock)
@@ -268,11 +295,11 @@ export default class Fireproof {
268
295
  }
269
296
  const doc = resp.result
270
297
  if (opts.mvcc === true) {
271
- doc._clock = this.clock
298
+ doc._clock = this.clockToJSON()
272
299
  }
273
300
  doc._proof = {
274
301
  data: await cidsToProof(resp.cids),
275
- clock: this.clock
302
+ clock: this.clockToJSON()
276
303
  }
277
304
  doc._id = key
278
305
  return doc
@@ -282,6 +309,14 @@ export default class Fireproof {
282
309
  return yield * vis(this.blocks, this.clock)
283
310
  }
284
311
 
312
+ async visTree () {
313
+ return await visMerkleTree(this.blocks, this.clock)
314
+ }
315
+
316
+ async visClock () {
317
+ return await visMerkleClock(this.blocks, this.clock)
318
+ }
319
+
285
320
  setCarUploader (carUploaderFn) {
286
321
  // console.log('registering car uploader')
287
322
  // https://en.wikipedia.org/wiki/Law_of_Demeter - this is a violation of the law of demeter
package/src/hydrator.js CHANGED
@@ -6,16 +6,18 @@ const parseCID = cid => typeof cid === 'string' ? CID.parse(cid) : cid
6
6
 
7
7
  export default class Hydrator {
8
8
  static fromJSON (json, database) {
9
- database.hydrate({ clock: json.clock.map(c => parseCID(c)), name: json.name })
10
- for (const { code, clock: { byId, byKey, db } } of json.indexes) {
11
- DbIndex.fromJSON(database, {
12
- clock: {
13
- byId: byId ? parseCID(byId) : null,
14
- byKey: byKey ? parseCID(byKey) : null,
15
- db: db ? db.map(c => parseCID(c)) : null
16
- },
17
- code
18
- })
9
+ database.hydrate({ clock: json.clock.map(c => parseCID(c)), name: json.name, key: json.key })
10
+ if (json.indexes) {
11
+ for (const { code, clock: { byId, byKey, db } } of json.indexes) {
12
+ DbIndex.fromJSON(database, {
13
+ clock: {
14
+ byId: byId ? parseCID(byId) : null,
15
+ byKey: byKey ? parseCID(byKey) : null,
16
+ db: db ? db.map(c => parseCID(c)) : null
17
+ },
18
+ code
19
+ })
20
+ }
19
21
  }
20
22
  return database
21
23
  }
@@ -45,7 +47,7 @@ export default class Hydrator {
45
47
  index.dbHead = null
46
48
  })
47
49
  database.clock = clock.map(c => parseCID(c))
48
- await database.notifyReset()
50
+ await database.notifyReset() // hmm... indexes should listen to this? might be more complex than worth it. so far this is the only caller
49
51
  return database
50
52
  }
51
53
  }
package/src/listener.js CHANGED
@@ -19,7 +19,7 @@ export default class Listener {
19
19
  * @type {Fireproof}
20
20
  */
21
21
  this.database = database
22
- this.#doStopListening = database.registerListener((changes) => this.#onChanges(changes))
22
+ this.#doStopListening = database.registerListener(changes => this.#onChanges(changes))
23
23
  /**
24
24
  * The map function to apply to each entry in the database.
25
25
  * @type {Function}
@@ -46,7 +46,7 @@ export default class Listener {
46
46
  if (typeof since !== 'undefined') {
47
47
  this.database.changesSince(since).then(({ rows: changes }) => {
48
48
  const keys = topicsForChanges(changes, this.routingFn).get(topic)
49
- if (keys) keys.forEach((key) => subscriber(key))
49
+ if (keys) keys.forEach(key => subscriber(key))
50
50
  })
51
51
  }
52
52
  return () => {
@@ -60,18 +60,14 @@ export default class Listener {
60
60
  const seenTopics = topicsForChanges(changes, this.routingFn)
61
61
  for (const [topic, keys] of seenTopics) {
62
62
  const listOfTopicSubscribers = getTopicList(this.#subcribers, topic)
63
- listOfTopicSubscribers.forEach((subscriber) => keys.forEach((key) => subscriber(key)))
63
+ listOfTopicSubscribers.forEach(subscriber => keys.forEach(key => subscriber(key)))
64
64
  }
65
65
  } else {
66
- // reset event
67
- if (changes.reset) {
68
- for (const [, listOfTopicSubscribers] of this.#subcribers) {
69
- listOfTopicSubscribers.forEach((subscriber) => subscriber(changes))
70
- }
66
+ // non-arrays go to all subscribers
67
+ for (const [, listOfTopicSubscribers] of this.#subcribers) {
68
+ listOfTopicSubscribers.forEach(subscriber => subscriber(changes))
71
69
  }
72
70
  }
73
- // if changes is special, notify all listeners?
74
- // first make the example app use listeners
75
71
  }
76
72
  }
77
73
 
@@ -99,7 +95,7 @@ const topicsForChanges = (changes, routingFn) => {
99
95
  const seenTopics = new Map()
100
96
  changes.forEach(({ key, value, del }) => {
101
97
  if (del || !value) value = { _deleted: true }
102
- routingFn(makeDoc({ key, value }), (t) => {
98
+ routingFn(makeDoc({ key, value }), t => {
103
99
  const topicList = getTopicList(seenTopics, t)
104
100
  topicList.push(key)
105
101
  })
package/src/prolly.js CHANGED
@@ -3,7 +3,8 @@ import {
3
3
  EventFetcher,
4
4
  EventBlock,
5
5
  findCommonAncestorWithSortedEvents,
6
- findEventsToSync
6
+ findEventsToSync,
7
+ vis as visClock
7
8
  } from './clock.js'
8
9
  import { create, load } from 'prolly-trees/map'
9
10
  // import { create, load } from '../../../../prolly-trees/src/map.js'
@@ -13,7 +14,7 @@ import * as codec from '@ipld/dag-cbor'
13
14
  import { sha256 as hasher } from 'multiformats/hashes/sha2'
14
15
  import { doTransaction } from './blockstore.js'
15
16
  import { create as createBlock } from 'multiformats/block'
16
- const opts = { cache, chunker: bf(3), codec, hasher, compare }
17
+ const blockOpts = { cache, chunker: bf(3), codec, hasher, compare }
17
18
 
18
19
  const withLog = async (label, fn) => {
19
20
  const resp = await fn()
@@ -135,7 +136,7 @@ const prollyRootFromAncestor = async (events, ancestor, getBlock) => {
135
136
  const event = await events.get(ancestor)
136
137
  const { root } = event.value.data
137
138
  // console.log('prollyRootFromAncestor', root.cid, JSON.stringify(root.value))
138
- return load({ cid: root.cid, get: getBlock, ...opts })
139
+ return load({ cid: root.cid, get: getBlock, ...blockOpts })
139
140
  }
140
141
 
141
142
  /**
@@ -155,7 +156,7 @@ export async function put (inBlocks, head, event, options) {
155
156
  if (!head.length) {
156
157
  const additions = new Map()
157
158
  let root
158
- for await (const node of create({ get: getBlock, list: [event], ...opts })) {
159
+ for await (const node of create({ get: getBlock, list: [event], ...blockOpts })) {
159
160
  root = await node.block
160
161
  bigPut(root, additions)
161
162
  }
@@ -278,6 +279,28 @@ export async function * vis (blocks, head) {
278
279
  const lines = []
279
280
  for await (const line of prollyRootNode.vis()) {
280
281
  yield line
282
+ lines.push(line)
281
283
  }
282
284
  return { vis: lines.join('\n'), cids }
283
285
  }
286
+
287
+ export async function visMerkleTree (blocks, head) {
288
+ if (!head.length) {
289
+ return { cids: new CIDCounter(), result: null }
290
+ }
291
+ const { node: prollyRootNode, cids } = await root(blocks, head)
292
+ const lines = []
293
+ for await (const line of prollyRootNode.vis()) {
294
+ lines.push(line)
295
+ }
296
+ return { vis: lines.join('\n'), cids }
297
+ }
298
+
299
+ export async function visMerkleClock (blocks, head) {
300
+ const lines = []
301
+ for await (const line of visClock(blocks, head)) {
302
+ // yield line
303
+ lines.push(line)
304
+ }
305
+ return { vis: lines.join('\n') }
306
+ }
package/src/sha1.js ADDED
@@ -0,0 +1,82 @@
1
+ // from https://github.com/duzun/sync-sha1/blob/master/rawSha1.js
2
+ // MIT License Copyright (c) 2020 Dumitru Uzun
3
+ // Permission is hereby granted, free of charge, to any person obtaining a copy
4
+ // of this software and associated documentation files (the "Software"), to deal
5
+ // in the Software without restriction, including without limitation the rights
6
+ // to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
7
+ // copies of the Software, and to permit persons to whom the Software is
8
+ // furnished to do so, subject to the following conditions:
9
+
10
+ // The above copyright notice and this permission notice shall be included in all
11
+ // copies or substantial portions of the Software.
12
+
13
+ // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
14
+ // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
15
+ // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
16
+ // AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
17
+ // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
18
+ // OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
19
+ // SOFTWARE.
20
+
21
+ // import {
22
+ // isLittleEndian, switchEndianness32
23
+ // } from 'string-encode'
24
+
25
+ /**
26
+ * SHA1 on binary array
27
+ *
28
+ * @param {Uint8Array} b Data to hash
29
+ *
30
+ * @return {Uint8Array} sha1 hash
31
+ */
32
+ export default function rawSha1 (b) {
33
+ let i = b.byteLength
34
+ let bs = 0
35
+ let A; let B; let C; let D; let G
36
+ const H = Uint32Array.from([A = 0x67452301, B = 0xEFCDAB89, ~A, ~B, 0xC3D2E1F0])
37
+ const W = new Uint32Array(80)
38
+ const nrWords = (i / 4 + 2) | 15
39
+ const words = new Uint32Array(nrWords + 1)
40
+ let j
41
+
42
+ words[nrWords] = i * 8
43
+ words[i >> 2] |= 0x80 << (~i << 3)
44
+ for (;i--;) {
45
+ words[i >> 2] |= b[i] << (~i << 3)
46
+ }
47
+
48
+ for (A = H.slice(); bs < nrWords; bs += 16, A.set(H)) {
49
+ for (i = 0; i < 80;
50
+ A[0] = (
51
+ G = ((b = A[0]) << 5 | b >>> 27) +
52
+ A[4] +
53
+ (W[i] = (i < 16) ? words[bs + i] : G << 1 | G >>> 31) +
54
+ 0x5A827999,
55
+ B = A[1],
56
+ C = A[2],
57
+ D = A[3],
58
+ G + ((j = i / 5 >> 2) // eslint-disable-line no-cond-assign
59
+ ? j !== 2
60
+ ? (B ^ C ^ D) + (j & 2 ? 0x6FE0483D : 0x14577208)
61
+ : (B & C | B & D | C & D) + 0x34994343
62
+ : B & C | ~B & D
63
+ )
64
+ )
65
+ , A[1] = b
66
+ , A[2] = B << 30 | B >>> 2
67
+ , A[3] = C
68
+ , A[4] = D
69
+ , ++i
70
+ ) {
71
+ G = W[i - 3] ^ W[i - 8] ^ W[i - 14] ^ W[i - 16]
72
+ }
73
+
74
+ for (i = 5; i;) H[--i] = H[i] + A[i]
75
+ }
76
+
77
+ // if (isLittleEndian()) {
78
+ // H = H.map(switchEndianness32)
79
+ // }
80
+
81
+ return new Uint8Array(H.buffer, H.byteOffset, H.byteLength)
82
+ }
package/src/valet.js CHANGED
@@ -1,12 +1,30 @@
1
1
  import { CarReader } from '@ipld/car'
2
2
  import { CID } from 'multiformats/cid'
3
+ import { sha256 } from 'multiformats/hashes/sha2'
4
+ import * as CBW from '@ipld/car/buffer-writer'
5
+ import * as raw from 'multiformats/codecs/raw'
6
+ import * as Block from 'multiformats/block'
7
+ import * as dagcbor from '@ipld/dag-cbor'
3
8
  import { openDB } from 'idb'
4
9
  import cargoQueue from 'async/cargoQueue.js'
10
+ import { bf } from 'prolly-trees/utils'
11
+ import { nocache as cache } from 'prolly-trees/cache'
12
+ import { encrypt, decrypt } from './crypto.js'
13
+ import { Buffer } from 'buffer'
14
+ import * as codec from 'encrypted-block'
15
+ import sha1sync from './sha1.js'
16
+ const chunker = bf(3)
17
+
18
+ const NO_ENCRYPT =
19
+ typeof process !== 'undefined' ? process.env.NO_ENCRYPT : import.meta && import.meta.env.VITE_NO_ENCRYPT
5
20
 
6
21
  export default class Valet {
7
22
  idb = null
23
+ name = null
8
24
  #uploadQueue = null
9
25
  #alreadyEnqueued = new Set()
26
+ #keyMaterial = null
27
+ keyId = 'null'
10
28
 
11
29
  /**
12
30
  * Function installed by the database to upload car files
@@ -14,8 +32,9 @@ export default class Valet {
14
32
  */
15
33
  uploadFunction = null
16
34
 
17
- constructor (name = 'default') {
35
+ constructor (name = 'default', keyMaterial) {
18
36
  this.name = name
37
+ this.setKeyMaterial(keyMaterial)
19
38
  this.#uploadQueue = cargoQueue(async (tasks, callback) => {
20
39
  console.log(
21
40
  'queue worker',
@@ -24,7 +43,7 @@ export default class Valet {
24
43
  )
25
44
  if (this.uploadFunction) {
26
45
  // todo we can coalesce these into a single car file
27
- return await this.withDB(async (db) => {
46
+ return await this.withDB(async db => {
28
47
  for (const task of tasks) {
29
48
  await this.uploadFunction(task.carCid, task.value)
30
49
  // update the indexedb to mark this car as no longer pending
@@ -38,8 +57,8 @@ export default class Valet {
38
57
  })
39
58
 
40
59
  this.#uploadQueue.drain(async () => {
41
- return await this.withDB(async (db) => {
42
- const carKeys = (await db.getAllFromIndex('cidToCar', 'pending')).map((c) => c.car)
60
+ return await this.withDB(async db => {
61
+ const carKeys = (await db.getAllFromIndex('cidToCar', 'pending')).map(c => c.car)
43
62
  for (const carKey of carKeys) {
44
63
  await this.uploadFunction(carKey, await db.get('cars', carKey))
45
64
  const carMeta = await db.get('cidToCar', carKey)
@@ -50,9 +69,46 @@ export default class Valet {
50
69
  })
51
70
  }
52
71
 
53
- withDB = async (dbWorkFun) => {
72
+ getKeyMaterial () {
73
+ return this.#keyMaterial
74
+ }
75
+
76
+ setKeyMaterial (km) {
77
+ if (km && !NO_ENCRYPT) {
78
+ const hex = Uint8Array.from(Buffer.from(km, 'hex'))
79
+ this.#keyMaterial = km
80
+ const hash = sha1sync(hex)
81
+ this.keyId = Buffer.from(hash).toString('hex')
82
+ } else {
83
+ this.#keyMaterial = null
84
+ this.keyId = 'null'
85
+ }
86
+ // console.trace('keyId', this.name, this.keyId)
87
+ }
88
+
89
+ /**
90
+ * Group the blocks into a car and write it to the valet.
91
+ * @param {InnerBlockstore} innerBlockstore
92
+ * @param {Set<string>} cids
93
+ * @returns {Promise<void>}
94
+ * @memberof Valet
95
+ */
96
+ async writeTransaction (innerBlockstore, cids) {
97
+ if (innerBlockstore.lastCid) {
98
+ if (this.#keyMaterial) {
99
+ // console.log('encrypting car', innerBlockstore.label)
100
+ const newCar = await blocksToEncryptedCarBlock(innerBlockstore.lastCid, innerBlockstore, this.#keyMaterial)
101
+ await this.parkCar(newCar.cid.toString(), newCar.bytes, cids)
102
+ } else {
103
+ const newCar = await blocksToCarBlock(innerBlockstore.lastCid, innerBlockstore)
104
+ await this.parkCar(newCar.cid.toString(), newCar.bytes, cids)
105
+ }
106
+ }
107
+ }
108
+
109
+ withDB = async dbWorkFun => {
54
110
  if (!this.idb) {
55
- this.idb = await openDB(`fp.${this.name}.valet`, 2, {
111
+ this.idb = await openDB(`fp.${this.keyId}.${this.name}.valet`, 2, {
56
112
  upgrade (db, oldVersion, newVersion, transaction) {
57
113
  if (oldVersion < 1) {
58
114
  db.createObjectStore('cars') // todo use database name
@@ -75,7 +131,7 @@ export default class Valet {
75
131
  * @param {*} value
76
132
  */
77
133
  async parkCar (carCid, value, cids) {
78
- await this.withDB(async (db) => {
134
+ await this.withDB(async db => {
79
135
  const tx = db.transaction(['cars', 'cidToCar'], 'readwrite')
80
136
  await tx.objectStore('cars').put(value, carCid)
81
137
  await tx.objectStore('cidToCar').put({ pending: 'y', car: carCid, cids: Array.from(cids) })
@@ -100,7 +156,7 @@ export default class Valet {
100
156
  remoteBlockFunction = null
101
157
 
102
158
  async getBlock (dataCID) {
103
- return await this.withDB(async (db) => {
159
+ return await this.withDB(async db => {
104
160
  const tx = db.transaction(['cars', 'cidToCar'], 'readonly')
105
161
  const indexResp = await tx.objectStore('cidToCar').index('cids').get(dataCID)
106
162
  const carCid = indexResp?.car
@@ -109,10 +165,112 @@ export default class Valet {
109
165
  }
110
166
  const carBytes = await tx.objectStore('cars').get(carCid)
111
167
  const reader = await CarReader.fromBytes(carBytes)
112
- const gotBlock = await reader.get(CID.parse(dataCID))
113
- if (gotBlock) {
114
- return gotBlock.bytes
168
+ if (this.#keyMaterial) {
169
+ const roots = await reader.getRoots()
170
+ const readerGetWithCodec = async cid => {
171
+ const got = await reader.get(cid)
172
+ // console.log('got.', cid.toString())
173
+ let useCodec = codec
174
+ if (cid.toString().indexOf('bafy') === 0) {
175
+ useCodec = dagcbor
176
+ }
177
+ const decoded = await Block.decode({
178
+ ...got,
179
+ codec: useCodec,
180
+ hasher: sha256
181
+ })
182
+ // console.log('decoded', decoded.value)
183
+ return decoded
184
+ }
185
+ const { blocks } = await blocksFromEncryptedCarBlock(roots[0], readerGetWithCodec, this.#keyMaterial)
186
+ const block = blocks.find(b => b.cid.toString() === dataCID)
187
+ if (block) {
188
+ return block.bytes
189
+ }
190
+ } else {
191
+ const gotBlock = await reader.get(CID.parse(dataCID))
192
+ if (gotBlock) {
193
+ return gotBlock.bytes
194
+ }
115
195
  }
116
196
  })
117
197
  }
118
198
  }
199
+
200
+ const blocksToCarBlock = async (lastCid, blocks) => {
201
+ let size = 0
202
+ const headerSize = CBW.headerLength({ roots: [lastCid] })
203
+ size += headerSize
204
+ if (!Array.isArray(blocks)) {
205
+ blocks = Array.from(blocks.entries())
206
+ }
207
+ for (const { cid, bytes } of blocks) {
208
+ size += CBW.blockLength({ cid, bytes })
209
+ }
210
+ const buffer = new Uint8Array(size)
211
+ const writer = await CBW.createWriter(buffer, { headerSize })
212
+
213
+ writer.addRoot(lastCid)
214
+
215
+ for (const { cid, bytes } of blocks) {
216
+ writer.write({ cid, bytes })
217
+ }
218
+ await writer.close()
219
+ return await Block.encode({ value: writer.bytes, hasher: sha256, codec: raw })
220
+ }
221
+
222
+ const blocksToEncryptedCarBlock = async (innerBlockStoreClockRootCid, blocks, keyMaterial) => {
223
+ const encryptionKey = Buffer.from(keyMaterial, 'hex')
224
+ const encryptedBlocks = []
225
+ const theCids = []
226
+ for (const { cid } of blocks.entries()) {
227
+ theCids.push(cid.toString())
228
+ }
229
+
230
+ let last
231
+ for await (const block of encrypt({
232
+ cids: theCids,
233
+ get: async cid => blocks.get(cid), // maybe we can just use blocks.get
234
+ key: encryptionKey,
235
+ hasher: sha256,
236
+ chunker,
237
+ codec: dagcbor, // should be crypto?
238
+ root: innerBlockStoreClockRootCid
239
+ })) {
240
+ encryptedBlocks.push(block)
241
+ last = block
242
+ }
243
+ // console.log('last', last.cid.toString(), 'for clock', innerBlockStoreClockRootCid.toString())
244
+ const encryptedCar = await blocksToCarBlock(last.cid, encryptedBlocks)
245
+ return encryptedCar
246
+ }
247
+ // { root, get, key, cache, chunker, hasher }
248
+
249
+ const memoizeDecryptedCarBlocks = new Map()
250
+ const blocksFromEncryptedCarBlock = async (cid, get, keyMaterial) => {
251
+ if (memoizeDecryptedCarBlocks.has(cid.toString())) {
252
+ return memoizeDecryptedCarBlocks.get(cid.toString())
253
+ } else {
254
+ const blocksPromise = (async () => {
255
+ const decryptionKey = Buffer.from(keyMaterial, 'hex')
256
+ // console.log('decrypting', keyMaterial, cid.toString())
257
+ const cids = new Set()
258
+ const decryptedBlocks = []
259
+ for await (const block of decrypt({
260
+ root: cid,
261
+ get,
262
+ key: decryptionKey,
263
+ chunker,
264
+ hasher: sha256,
265
+ cache,
266
+ codec: dagcbor
267
+ })) {
268
+ decryptedBlocks.push(block)
269
+ cids.add(block.cid.toString())
270
+ }
271
+ return { blocks: decryptedBlocks, cids }
272
+ })()
273
+ memoizeDecryptedCarBlocks.set(cid.toString(), blocksPromise)
274
+ return blocksPromise
275
+ }
276
+ }
@@ -30,10 +30,10 @@ describe('DbIndex query', () => {
30
30
  })
31
31
  })
32
32
  it('query index range', async () => {
33
- const result = await index.query({ range: [41, 44] })
33
+ const result = await index.query({ range: [41, 49] })
34
34
  assert(result, 'did return result')
35
35
  assert(result.rows)
36
- assert.equal(result.rows.length, 1, 'one row matched')
36
+ assert.equal(result.rows.length, 2, 'two row matched')
37
37
  assert.equal(result.rows[0].key, 43)
38
38
  assert(result.rows[0].value === 'carol', 'correct value')
39
39
  })
@@ -47,6 +47,33 @@ describe('DbIndex query', () => {
47
47
  assert.equal(result.rows[0].key, 43)
48
48
  assert(result.rows[0].value === 'carol', 'correct value')
49
49
  })
50
+ it('query index all', async () => {
51
+ const result = await index.query()
52
+ assert(result, 'did return result')
53
+ assert(result.rows)
54
+ assert.equal(result.rows.length, 6, 'six row matched')
55
+ assert.equal(result.rows[0].key, 4)
56
+ assert.equal(result.rows[0].value, 'emily')
57
+ assert.equal(result.rows[result.rows.length - 1].value, 'dave')
58
+ })
59
+ it('query index NaN', async () => {
60
+ const result = await index.query({ range: [NaN, 44] })
61
+ assert(result, 'did return result')
62
+ assert(result.rows)
63
+ assert.equal(result.rows.length, 5, 'six row matched')
64
+ assert.equal(result.rows[0].key, 4)
65
+ assert.equal(result.rows[0].value, 'emily')
66
+ assert.equal(result.rows[result.rows.length - 1].value, 'carol')
67
+ })
68
+ it('query index Infinity', async () => {
69
+ const result = await index.query({ range: [42, Infinity] })
70
+ assert(result, 'did return result')
71
+ assert(result.rows)
72
+ assert.equal(result.rows.length, 2, 'six row matched')
73
+ assert.equal(result.rows[0].key, 43)
74
+ assert.equal(result.rows[0].value, 'carol')
75
+ assert.equal(result.rows[result.rows.length - 1].value, 'dave')
76
+ })
50
77
  it('query twice', async () => {
51
78
  let result = await index.query({ range: [41, 44] })
52
79
  assert(result, 'did return result')
@@ -20,10 +20,13 @@ describe('Fireproof', () => {
20
20
  })
21
21
  it('takes an optional name', () => {
22
22
  assert.equal(database.name, 'helloName')
23
+ const km = database.blocks.valet.getKeyMaterial()
24
+ if (process.env.NO_ENCRYPT) { assert.equal(km, null) } else { assert.equal(km.length, 64) }
23
25
  const x = database.blocks.valet.idb
24
- assert.equal(x.name.toString(), 'fp.helloName.valet')
26
+ const keyId = database.blocks.valet.keyId
27
+ assert.equal(x.name.toString(), `fp.${keyId}.helloName.valet`)
25
28
  })
26
- it('put and get document', async () => {
29
+ it('only put and get document', async () => {
27
30
  assert(resp0.id, 'should have id')
28
31
  assert.equal(resp0.id, '1ef3b32a-3c3a-4b5e-9c1c-8c5c0c5c0c5c')
29
32
  const avalue = await database.get('1ef3b32a-3c3a-4b5e-9c1c-8c5c0c5c0c5c')
@@ -103,7 +106,11 @@ describe('Fireproof', () => {
103
106
  console.log('err', err)
104
107
  assert.match(err.message, /MVCC conflict/)
105
108
  })
106
-
109
+ it('allDocuments', async () => {
110
+ await database.put({ name: 'bob' })
111
+ const allDocs = await database.allDocuments()
112
+ assert.equal(allDocs.rows.length, 2)
113
+ })
107
114
  it('has a factory for making new instances with default settings', async () => {
108
115
  // TODO if you pass it an email it asks the local keyring, and if no key, does the email validation thing
109
116
  const db = await Fireproof.storage({ email: 'jchris@gmail.com' })
@@ -367,6 +374,24 @@ describe('Fireproof', () => {
367
374
  }
368
375
  })
369
376
 
377
+ database.blocks.clearCommittedCache() // clear cache to force re-reading from encrypted store
378
+
379
+ doc = await database.get(resp.id).catch(e => {
380
+ console.log('failed', e)
381
+ assert.fail(`get failed on _id: ${id}, error: ${e.message}`)
382
+ })
383
+
384
+ assert.equal(doc.index, index, `doc.index is not equal to index for _id: ${id}`)
385
+ changes = await database.changesSince().catch(async e => {
386
+ assert.fail(`changesSince failed on _id: ${id}, error: ${e.message}`)
387
+ })
388
+ changes.rows.forEach(row => {
389
+ for (const key in row) {
390
+ const value = row[key]
391
+ assert(!/^bafy/.test(value), `Unexpected "bafy..." value found at index ${index} in row ${JSON.stringify(row)}`)
392
+ }
393
+ })
394
+
370
395
  // console.log('changes: ', index, changes.rows.length, JSON.stringify(changes.rows))
371
396
  assert.equal(changes.rows.length, index + 2, `failed on ${index}, with ${changes.rows.length} ${id}`)
372
397
  }
@@ -411,4 +436,25 @@ describe('Fireproof', () => {
411
436
  assert.equal(serialized.name, 'helloName')
412
437
  assert.equal(serialized.clock.length, 1)
413
438
  })
439
+ it('clocked changes in order', async () => {
440
+ await database.put({ _id: '2' })
441
+ await database.put({ _id: 'three' })
442
+ await database.put({ _id: '4' })
443
+ const changes = await database.changesSince(resp0.clock)
444
+ assert.equal(changes.rows.length, 3)
445
+ assert.equal(changes.rows[0].key, '2')
446
+ assert.equal(changes.rows[1].key, 'three')
447
+ assert.equal(changes.rows[2].key, '4')
448
+ })
449
+ it.skip('changes in order', async () => {
450
+ await database.put({ _id: '2' })
451
+ await database.put({ _id: 'three' })
452
+ await database.put({ _id: '4' })
453
+ const changes = await database.changesSince()
454
+ assert.equal(changes.rows.length, 4)
455
+ assert.equal(changes.rows[0].key, resp0.id)
456
+ assert.equal(changes.rows[1].key, '2')
457
+ assert.equal(changes.rows[2].key, 'three')
458
+ assert.equal(changes.rows[3].key, '4')
459
+ })
414
460
  })
@@ -3,7 +3,6 @@ import assert from 'node:assert'
3
3
  import Fireproof from '../src/fireproof.js'
4
4
  import DbIndex from '../src/db-index.js'
5
5
  import Hydrator from '../src/hydrator.js'
6
- console.x = function () {}
7
6
 
8
7
  describe('DbIndex query', () => {
9
8
  let database, index
@@ -36,6 +35,9 @@ describe('DbIndex query', () => {
36
35
  const serialized = database.toJSON()
37
36
  // console.log('serialized', serialized)
38
37
  assert.equal(serialized.name, 'global')
38
+ if (database.blocks.valet.keyId !== 'null') {
39
+ assert.equal(serialized.key.length, 64)
40
+ }
39
41
  assert.equal(serialized.clock.length, 1)
40
42
  assert.equal(serialized.clock[0].constructor.name, 'String')
41
43
  assert.equal(serialized.indexes.length, 1)