@fireproof/core 0.0.4 → 0.0.6

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -2,7 +2,7 @@
2
2
  // eslint-disable-next-line @typescript-eslint/ban-ts-comment
3
3
  // @ts-ignore
4
4
  import { useEffect, useState, createContext } from 'react'
5
- import { Fireproof, Listener } from '@fireproof/core'
5
+ import { Fireproof, Listener } from '../index'
6
6
 
7
7
  export interface FireproofCtxValue {
8
8
  addSubscriber: (label: String, fn: Function) => void
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@fireproof/core",
3
- "version": "0.0.4",
3
+ "version": "0.0.6",
4
4
  "description": "Realtime database for IPFS",
5
5
  "main": "index.js",
6
6
  "type": "module",
@@ -37,12 +37,13 @@
37
37
  "cli-color": "^2.0.3",
38
38
  "idb": "^7.1.1",
39
39
  "multiformats": "^11.0.1",
40
- "prolly-trees": "^0.2.2",
40
+ "prolly-trees": "^0.2.5",
41
41
  "sade": "^1.8.1"
42
42
  },
43
43
  "devDependencies": {
44
44
  "c8": "^7.12.0",
45
45
  "fake-indexeddb": "^4.0.1",
46
+ "flexsearch": "^0.7.31",
46
47
  "mocha": "^10.2.0",
47
48
  "nanoid": "^4.0.0",
48
49
  "standard": "^17.0.0"
package/src/clock.js CHANGED
@@ -1,6 +1,7 @@
1
1
  import { Block, encode, decode } from 'multiformats/block'
2
2
  import { sha256 } from 'multiformats/hashes/sha2'
3
3
  import * as cbor from '@ipld/dag-cbor'
4
+ import { CIDCounter } from 'prolly-trees/utils'
4
5
 
5
6
  /**
6
7
  * @template T
@@ -29,10 +30,10 @@ import * as cbor from '@ipld/dag-cbor'
29
30
  export async function advance (blocks, head, event) {
30
31
  /** @type {EventFetcher<T>} */
31
32
  const events = new EventFetcher(blocks)
32
- const headmap = new Map(head.map(cid => [cid.toString(), cid]))
33
+ const headmap = new Map(head.map((cid) => [cid.toString(), cid]))
33
34
 
34
35
  // Check if the headmap already includes the event, return head if it does
35
- if (headmap.has(event.toString())) return head
36
+ if (headmap.has(event.toString())) return { head, cids: events.cids }
36
37
 
37
38
  // Does event contain the clock?
38
39
  let changed = false
@@ -46,18 +47,18 @@ export async function advance (blocks, head, event) {
46
47
 
47
48
  // If the headmap has been changed, return the new headmap values
48
49
  if (changed) {
49
- return [...headmap.values()]
50
+ return { head: [...headmap.values()], cids: events.cids }
50
51
  }
51
52
 
52
53
  // Does clock contain the event?
53
54
  for (const p of head) {
54
55
  if (await contains(events, p, event)) {
55
- return head
56
+ return { head, cids: events.cids }
56
57
  }
57
58
  }
58
59
 
59
60
  // Return the head concatenated with the new event if it passes both checks
60
- return head.concat(event)
61
+ return { head: head.concat(event), cids: events.cids }
61
62
  }
62
63
 
63
64
  /**
@@ -92,6 +93,7 @@ export class EventFetcher {
92
93
  constructor (blocks) {
93
94
  /** @private */
94
95
  this._blocks = blocks
96
+ this._cids = new CIDCounter()
95
97
  }
96
98
 
97
99
  /**
@@ -100,9 +102,15 @@ export class EventFetcher {
100
102
  */
101
103
  async get (link) {
102
104
  const block = await this._blocks.get(link)
105
+ this._cids.add({ address: link })
103
106
  if (!block) throw new Error(`missing block: ${link}`)
104
107
  return decodeEventBlock(block.bytes)
105
108
  }
109
+
110
+ async all () {
111
+ await Promise.all([...this._cids])
112
+ return this._cids
113
+ }
106
114
  }
107
115
 
108
116
  /**
@@ -145,7 +153,7 @@ async function contains (events, a, b) {
145
153
  if (link.toString() === b.toString()) return true
146
154
  // if any of b's parents are this link, then b cannot exist in any of the
147
155
  // tree below, since that would create a cycle.
148
- if (bevent.parents.some(p => link.toString() === p.toString())) continue
156
+ if (bevent.parents.some((p) => link.toString() === p.toString())) continue
149
157
  const { value: event } = await events.get(link)
150
158
  links.push(...event.parents)
151
159
  }
@@ -160,11 +168,11 @@ async function contains (events, a, b) {
160
168
  * @param {(b: EventBlockView<T>) => string} [options.renderNodeLabel]
161
169
  */
162
170
  export async function * vis (blocks, head, options = {}) {
163
- const renderNodeLabel = options.renderNodeLabel ?? (b => (b.value.data.value))
171
+ const renderNodeLabel = options.renderNodeLabel ?? ((b) => b.value.data.value)
164
172
  const events = new EventFetcher(blocks)
165
173
  yield 'digraph clock {'
166
174
  yield ' node [shape=point fontname="Courier"]; head;'
167
- const hevents = await Promise.all(head.map(link => events.get(link)))
175
+ const hevents = await Promise.all(head.map((link) => events.get(link)))
168
176
  const links = []
169
177
  const nodes = new Set()
170
178
  for (const e of hevents) {
@@ -192,46 +200,22 @@ export async function * vis (blocks, head, options = {}) {
192
200
  }
193
201
 
194
202
  export async function findEventsToSync (blocks, head) {
195
- const toSync = await findUnknownSortedEvents(blocks, head, await findCommonAncestorWithSortedEvents(blocks, head))
196
- return toSync
197
- }
198
-
199
- export async function findUnknownSortedEvents (blocks, children, { ancestor, sorted }) {
200
203
  const events = new EventFetcher(blocks)
201
- // const childrenCids = children.map(c => c.toString())
202
- // const lowerEvent = sorted.find(({ cid }) => childrenCids.includes(cid.toString()))
203
- // const knownAncestor = await findCommonAncestor(events, [lowerEvent.cid]) // should this be [lowerEvent.cid] ?
204
- // const knownAncestor = await findCommonAncestor(events, [...children]) // should this be [lowerEvent.cid] ?
205
- // console.x('already knownAncestor', knownAncestor.toString() === ancestor.toString(),
206
- // (await (await decodeEventBlock((await blocks.get(knownAncestor)).bytes)).value.data?.value), knownAncestor
207
- // )
208
-
209
- const matchHead = [ancestor]
210
- const unknownSorted = await asyncFilter(sorted, async (uks) => {
211
- for (const ev of matchHead) {
212
- const isIn = await contains(events, ev, uks.cid)
213
- if (isIn) return false
214
- }
215
- return true
216
- })
217
- // console.x('unknownSorted contains', unknownSorted.length, sorted.length)
218
- return unknownSorted
204
+ const { ancestor, sorted } = await findCommonAncestorWithSortedEvents(events, head)
205
+ const toSync = await asyncFilter(sorted, async (uks) => !(await contains(events, ancestor, uks.cid)))
206
+ return { cids: events.cids, events: toSync }
219
207
  }
220
208
 
221
- const asyncFilter = async (arr, predicate) => Promise.all(arr.map(predicate))
222
- .then((results) => arr.filter((_v, index) => results[index]))
223
-
224
- export async function findCommonAncestorWithSortedEvents (blocks, children) {
225
- const events = new EventFetcher(blocks)
209
+ const asyncFilter = async (arr, predicate) =>
210
+ Promise.all(arr.map(predicate)).then((results) => arr.filter((_v, index) => results[index]))
226
211
 
212
+ export async function findCommonAncestorWithSortedEvents (events, children) {
227
213
  const ancestor = await findCommonAncestor(events, children)
228
214
  if (!ancestor) {
229
215
  throw new Error('failed to find common ancestor event')
230
216
  }
231
217
  // Sort the events by their sequence number
232
218
  const sorted = await findSortedEvents(events, children, ancestor)
233
- // console.x('ancstor', ancestor, (await decodeEventBlock((await blocks.get(ancestor)).bytes)).value.data?.value)
234
- // sorted.forEach(({ cid, value }) => console.x('xsorted', cid, value.data.value))
235
219
  return { ancestor, sorted }
236
220
  }
237
221
 
@@ -266,9 +250,7 @@ async function findCommonAncestor (events, children) {
266
250
  async function findAncestorCandidate (events, root) {
267
251
  const { value: event } = await events.get(root)
268
252
  if (!event.parents.length) return root
269
- return event.parents.length === 1
270
- ? event.parents[0]
271
- : findCommonAncestor(events, event.parents)
253
+ return event.parents.length === 1 ? event.parents[0] : findCommonAncestor(events, event.parents)
272
254
  }
273
255
 
274
256
  /**
@@ -327,9 +309,7 @@ async function findSortedEvents (events, head, tail) {
327
309
  // sort by weight, and by CID within weight
328
310
  const sorted = Array.from(buckets)
329
311
  .sort((a, b) => b[0] - a[0])
330
- .flatMap(([, es]) =>
331
- es.sort((a, b) => (String(a.cid) < String(b.cid) ? -1 : 1))
332
- )
312
+ .flatMap(([, es]) => es.sort((a, b) => (String(a.cid) < String(b.cid) ? -1 : 1)))
333
313
  // console.log('sorted', sorted.map(s => s.value.data.value))
334
314
  return sorted
335
315
  }
@@ -345,8 +325,6 @@ async function findEvents (events, start, end, depth = 0) {
345
325
  const acc = [{ event, depth }]
346
326
  const { parents } = event.value
347
327
  if (parents.length === 1 && String(parents[0]) === String(end)) return acc
348
- const rest = await Promise.all(
349
- parents.map((p) => findEvents(events, p, end, depth + 1))
350
- )
328
+ const rest = await Promise.all(parents.map((p) => findEvents(events, p, end, depth + 1)))
351
329
  return acc.concat(...rest)
352
330
  }
package/src/db-index.js CHANGED
@@ -1,19 +1,33 @@
1
1
  import { create, load } from 'prolly-trees/db-index'
2
2
  import { sha256 as hasher } from 'multiformats/hashes/sha2'
3
3
  import { nocache as cache } from 'prolly-trees/cache'
4
- import { bf, simpleCompare as compare } from 'prolly-trees/utils'
4
+ import { bf, simpleCompare } from 'prolly-trees/utils'
5
+ import { makeGetBlock } from './prolly.js'
6
+ import { cidsToProof } from './fireproof.js'
5
7
  import * as codec from '@ipld/dag-cbor'
6
- import { create as createBlock } from 'multiformats/block'
8
+ // import { create as createBlock } from 'multiformats/block'
7
9
  import { doTransaction } from './blockstore.js'
8
10
  import charwise from 'charwise'
9
- const opts = { cache, chunker: bf(3), codec, hasher, compare }
10
11
 
11
12
  const ALWAYS_REBUILD = true // todo: remove this
12
13
 
13
- const makeGetBlock = (blocks) => async (address) => {
14
- const { cid, bytes } = await blocks.get(address)
15
- return createBlock({ cid, bytes, hasher, codec })
14
+ const arrayCompare = (a, b) => {
15
+ if (Array.isArray(a) && Array.isArray(b)) {
16
+ const len = Math.min(a.length, b.length)
17
+ for (let i = 0; i < len; i++) {
18
+ const comp = simpleCompare(a[i], b[i])
19
+ if (comp !== 0) {
20
+ return comp
21
+ }
22
+ }
23
+ return simpleCompare(a.length, b.length)
24
+ } else {
25
+ return simpleCompare(a, b)
26
+ }
16
27
  }
28
+
29
+ const opts = { cache, chunker: bf(3), codec, hasher, compare: arrayCompare }
30
+
17
31
  const makeDoc = ({ key, value }) => ({ _id: key, ...value })
18
32
 
19
33
  /**
@@ -58,11 +72,11 @@ const indexEntriesForChanges = (changes, mapFun) => {
58
72
  }
59
73
 
60
74
  const indexEntriesForOldChanges = async (blocks, byIDindexRoot, ids, mapFun) => {
61
- const getBlock = makeGetBlock(blocks)
75
+ const { getBlock } = makeGetBlock(blocks)
62
76
  const byIDindex = await load({ cid: byIDindexRoot.cid, get: getBlock, ...opts })
63
- // console.trace('ids', ids)
77
+
64
78
  const result = await byIDindex.getMany(ids)
65
- return result.result
79
+ return result
66
80
  }
67
81
 
68
82
  /**
@@ -87,7 +101,10 @@ export default class DbIndex {
87
101
  * @type {Function}
88
102
  */
89
103
  this.mapFun = mapFun
90
- this.indexRoot = null
104
+
105
+ this.dbIndexRoot = null
106
+ this.dbIndex = null
107
+
91
108
  this.byIDindexRoot = null
92
109
  this.dbHead = null
93
110
  }
@@ -102,22 +119,21 @@ export default class DbIndex {
102
119
  /**
103
120
  * Query object can have {range}
104
121
  * @param {DbQuery} query - the query range to use
105
- * @param {CID} [root] - an optional root to query a snapshot
106
122
  * @returns {Promise<{rows: Array<{id: string, key: string, value: any}>}>}
107
123
  * @memberof DbIndex
108
124
  * @instance
109
125
  */
110
- async query (query, root = null) {
111
- if (!root) {
112
- // pass a root to query a snapshot
113
- await doTransaction('#updateIndex', this.database.blocks, async (blocks) => {
114
- await this.#updateIndex(blocks)
115
- })
116
- }
117
- const response = await doIndexQuery(this.database.blocks, root || this.indexRoot, query)
126
+ async query (query) {
127
+ // if (!root) {
128
+ // pass a root to query a snapshot
129
+ await doTransaction('#updateIndex', this.database.blocks, async (blocks) => {
130
+ await this.#updateIndex(blocks)
131
+ })
132
+ // }
133
+ const response = await doIndexQuery(this.database.blocks, this.dbIndexRoot, this.dbIndex, query)
118
134
  return {
119
- // TODO fix this naming upstream in prolly/db-DbIndex
120
- // todo maybe this is a hint about why deletes arent working?
135
+ proof: { index: await cidsToProof(response.cids) },
136
+ // TODO fix this naming upstream in prolly/db-DbIndex?
121
137
  rows: response.result.map(({ id, key, row }) => ({ id: key, key: charwise.decode(id), value: row }))
122
138
  }
123
139
  }
@@ -131,37 +147,45 @@ export default class DbIndex {
131
147
  // todo remove this hack
132
148
  if (ALWAYS_REBUILD) {
133
149
  this.dbHead = null // hack
134
- this.indexRoot = null // hack
150
+ this.dbIndex = null // hack
151
+ this.dbIndexRoot = null
135
152
  }
136
153
  const result = await this.database.changesSince(this.dbHead) // {key, value, del}
137
154
  if (this.dbHead) {
138
- const oldIndexEntries = (
139
- await indexEntriesForOldChanges(
140
- blocks,
141
- this.byIDindexRoot,
142
- result.rows.map(({ key }) => key),
143
- this.mapFun
144
- )
155
+ const oldChangeEntries = await indexEntriesForOldChanges(
156
+ blocks,
157
+ this.byIDindexRoot,
158
+ result.rows.map(({ key }) => key),
159
+ this.mapFun
145
160
  )
146
- // .map((key) => ({ key, value: null })) // tombstone just adds more rows...
147
- .map((key) => ({ key, del: true })) // should be this
148
- // .map((key) => ({ key: undefined, del: true })) // todo why does this work?
149
-
150
- this.indexRoot = await bulkIndex(blocks, this.indexRoot, oldIndexEntries, opts)
151
- // console.x('oldIndexEntries', oldIndexEntries)
152
- // [ { key: ['b', 1], del: true } ]
153
- // [ { key: [ 5, 'x' ], del: true } ]
154
- // for now we just let the by id DbIndex grow and then don't use the results...
155
- // const removeByIdIndexEntries = oldIndexEntries.map(({ key }) => ({ key: key[1], del: true }))
156
- // this.byIDindexRoot = await bulkIndex(blocks, this.byIDindexRoot, removeByIdIndexEntries, opts)
161
+ const oldIndexEntries = oldChangeEntries.result.map((key) => ({ key, del: true }))
162
+ const removalResult = await bulkIndex(blocks, this.dbIndexRoot, this.dbIndex, oldIndexEntries, opts)
163
+ this.dbIndexRoot = removalResult.root
164
+ this.dbIndex = removalResult.dbIndex
165
+
166
+ const removeByIdIndexEntries = oldIndexEntries.map(({ key }) => ({ key: key[1], del: true }))
167
+ const purgedRemovalResults = await bulkIndex(
168
+ blocks,
169
+ this.byIDindexRoot,
170
+ this.byIDIndex,
171
+ removeByIdIndexEntries,
172
+ opts
173
+ )
174
+ this.byIDindexRoot = purgedRemovalResults.root
175
+ this.byIDIndex = purgedRemovalResults.dbIndex
157
176
  }
158
177
  const indexEntries = indexEntriesForChanges(result.rows, this.mapFun)
159
178
  const byIdIndexEntries = indexEntries.map(({ key }) => ({ key: key[1], value: key }))
160
- // [{key: 'xxxx-3c3a-4b5e-9c1c-8c5c0c5c0c5c', value : [ 53, 'xxxx-3c3a-4b5e-9c1c-8c5c0c5c0c5c' ]}]
161
- this.byIDindexRoot = await bulkIndex(blocks, this.byIDindexRoot, byIdIndexEntries, opts)
179
+ const addFutureRemovalsResult = await bulkIndex(blocks, this.byIDindexRoot, this.byIDIndex, byIdIndexEntries, opts)
180
+ this.byIDindexRoot = addFutureRemovalsResult.root
181
+ this.byIDIndex = addFutureRemovalsResult.dbIndex
182
+
162
183
  // console.log('indexEntries', indexEntries)
163
- this.indexRoot = await bulkIndex(blocks, this.indexRoot, indexEntries, opts)
164
- // console.log('did DbIndex', this.indexRoot)
184
+
185
+ const updateIndexResult = await bulkIndex(blocks, this.dbIndexRoot, this.dbIndex, indexEntries, opts)
186
+ this.dbIndexRoot = updateIndexResult.root
187
+ this.dbIndex = updateIndexResult.dbIndex
188
+
165
189
  this.dbHead = result.clock
166
190
  }
167
191
 
@@ -178,44 +202,44 @@ export default class DbIndex {
178
202
  * @param {DbIndexEntry[]} indexEntries
179
203
  * @private
180
204
  */
181
- async function bulkIndex (blocks, inRoot, indexEntries) {
182
- if (!indexEntries.length) return inRoot
205
+ async function bulkIndex (blocks, inRoot, inDBindex, indexEntries) {
206
+ if (!indexEntries.length) return { dbIndex: inDBindex, root: inRoot }
183
207
  const putBlock = blocks.put.bind(blocks)
184
- const getBlock = makeGetBlock(blocks)
185
- if (!inRoot) {
186
- // make a new DbIndex
187
-
208
+ const { getBlock } = makeGetBlock(blocks)
209
+ let returnRootBlock
210
+ let returnNode
211
+ if (!inDBindex) {
188
212
  for await (const node of await create({ get: getBlock, list: indexEntries, ...opts })) {
189
213
  const block = await node.block
190
214
  await putBlock(block.cid, block.bytes)
191
- inRoot = block
215
+ returnRootBlock = block
216
+ returnNode = node
192
217
  }
193
- // console.x('created DbIndex', inRoot.cid)
194
- return inRoot
195
218
  } else {
196
- // load existing DbIndex
197
- // console.x('loading DbIndex', inRoot.cid)
198
- const DbIndex = await load({ cid: inRoot.cid, get: getBlock, ...opts })
199
- // console.log('new indexEntries', indexEntries)
200
- const { root, blocks } = await DbIndex.bulk(indexEntries)
219
+ // const dbIndex = await load({ cid: inRoot.cid, get: getBlock, ...opts }) // todo load from root on refresh
220
+ const { root, blocks } = await inDBindex.bulk(indexEntries)
221
+ returnRootBlock = await root.block
222
+ returnNode = root
201
223
  for await (const block of blocks) {
202
224
  await putBlock(block.cid, block.bytes)
203
225
  }
204
- // console.x('updated DbIndex', root.block.cid)
205
- return await root.block // if we hold the root we won't have to load every time
226
+ await putBlock(returnRootBlock.cid, returnRootBlock.bytes)
206
227
  }
228
+ return { dbIndex: returnNode, root: returnRootBlock }
207
229
  }
208
230
 
209
- async function doIndexQuery (blocks, root, query) {
210
- const cid = root && root.cid
211
- if (!cid) return { result: [] }
212
- const getBlock = makeGetBlock(blocks)
213
- const DbIndex = await load({ cid, get: getBlock, ...opts })
231
+ async function doIndexQuery (blocks, dbIndexRoot, dbIndex, query) {
232
+ if (!dbIndex) {
233
+ const cid = dbIndexRoot && dbIndexRoot.cid
234
+ if (!cid) return { result: [] }
235
+ const { getBlock } = makeGetBlock(blocks)
236
+ dbIndex = await load({ cid, get: getBlock, ...opts })
237
+ }
214
238
  if (query.range) {
215
239
  const encodedRange = query.range.map((key) => charwise.encode(key))
216
- return DbIndex.range(...encodedRange)
240
+ return dbIndex.range(...encodedRange)
217
241
  } else if (query.key) {
218
242
  const encodedKey = charwise.encode(query.key)
219
- return DbIndex.get(encodedKey)
243
+ return dbIndex.get(encodedKey)
220
244
  }
221
245
  }
package/src/fireproof.js CHANGED
@@ -35,7 +35,7 @@ export default class Fireproof {
35
35
  this.clock = clock
36
36
  this.config = config
37
37
  this.authCtx = authCtx
38
- this.instanceId = 'db.' + Math.random().toString(36).substring(2, 7)
38
+ this.instanceId = 'fp.' + Math.random().toString(36).substring(2, 7)
39
39
  }
40
40
 
41
41
  /**
@@ -89,11 +89,11 @@ export default class Fireproof {
89
89
  */
90
90
  async changesSince (event) {
91
91
  // console.log('changesSince', this.instanceId, event, this.clock)
92
- let rows
92
+ let rows, dataCIDs, clockCIDs
93
93
  if (event) {
94
94
  const resp = await eventsSince(this.blocks, this.clock, event)
95
95
  const docsMap = new Map()
96
- for (const { key, type, value } of resp) {
96
+ for (const { key, type, value } of resp.result) {
97
97
  if (type === 'del') {
98
98
  docsMap.set(key, { key, del: true })
99
99
  } else {
@@ -101,12 +101,15 @@ export default class Fireproof {
101
101
  }
102
102
  }
103
103
  rows = Array.from(docsMap.values())
104
+ clockCIDs = resp.cids
104
105
  // console.log('change rows', this.instanceId, rows)
105
106
  } else {
106
- rows = (await getAll(this.blocks, this.clock)).map(({ key, value }) => ({ key, value }))
107
+ const allResp = await getAll(this.blocks, this.clock)
108
+ rows = allResp.result.map(({ key, value }) => ({ key, value }))
109
+ dataCIDs = allResp.cids
107
110
  // console.log('dbdoc rows', this.instanceId, rows)
108
111
  }
109
- return { rows, clock: this.clock }
112
+ return { rows, clock: this.clock, proof: { data: await cidsToProof(dataCIDs), clock: await cidsToProof(clockCIDs) } }
110
113
  }
111
114
 
112
115
  /**
@@ -158,10 +161,10 @@ export default class Fireproof {
158
161
  * @memberof Fireproof
159
162
  * @instance
160
163
  */
161
- async put ({ _id, ...doc }) {
164
+ async put ({ _id, _proof, ...doc }) {
162
165
  const id = _id || 'f' + Math.random().toString(36).slice(2)
163
166
  await this.#runValidation({ _id: id, ...doc })
164
- return await this.#putToProllyTree({ key: id, value: doc })
167
+ return await this.#putToProllyTree({ key: id, value: doc }, doc._clock)
165
168
  }
166
169
 
167
170
  /**
@@ -171,20 +174,37 @@ export default class Fireproof {
171
174
  * @memberof Fireproof
172
175
  * @instance
173
176
  */
174
- async del (id) {
177
+ async del (docOrId) {
178
+ let id
179
+ let clock = null
180
+ if (docOrId._id) {
181
+ id = docOrId._id
182
+ clock = docOrId._clock
183
+ } else {
184
+ id = docOrId
185
+ }
175
186
  await this.#runValidation({ _id: id, _deleted: true })
176
187
  // return await this.#putToProllyTree({ key: id, del: true }) // not working at prolly tree layer?
177
188
  // this tombstone is temporary until we can get the prolly tree to delete
178
- return await this.#putToProllyTree({ key: id, value: null })
189
+ return await this.#putToProllyTree({ key: id, value: null }, clock)
179
190
  }
180
191
 
181
192
  /**
182
193
  * Updates the underlying storage with the specified event.
183
194
  * @private
184
- * @param {CID[]} event - the event to add
195
+ * @param {Object<{key : string, value: any}>} event - the event to add
185
196
  * @returns {Object<{ id: string, clock: CID[] }>} - The result of adding the event to storage
186
197
  */
187
- async #putToProllyTree (event) {
198
+ async #putToProllyTree (event, clock = null) {
199
+ if (clock && JSON.stringify(clock) !== JSON.stringify(this.clock)) {
200
+ // we need to check and see what version of the document exists at the clock specified
201
+ // if it is the same as the one we are trying to put, then we can proceed
202
+ const resp = await eventsSince(this.blocks, this.clock, event.value._clock)
203
+ const missedChange = resp.result.find(({ key }) => key === event.key)
204
+ if (missedChange) {
205
+ throw new Error('MVCC conflict, document is changed, please reload the document and try again.')
206
+ }
207
+ }
188
208
  const result = await doTransaction(
189
209
  '#putToProllyTree',
190
210
  this.blocks,
@@ -195,9 +215,9 @@ export default class Fireproof {
195
215
  throw new Error('failed to put at storage layer')
196
216
  }
197
217
  this.clock = result.head // do we want to do this as a finally block
198
- result.id = event.key
199
218
  await this.#notifyListeners([event])
200
- return { id: result.id, clock: this.clock }
219
+ return { id: event.key, clock: this.clock, proof: { data: await cidsToProof(result.cids), clock: await cidsToProof(result.clockCIDs) } }
220
+ // todo should include additions (or split clock)
201
221
  }
202
222
 
203
223
  // /**
@@ -228,18 +248,29 @@ export default class Fireproof {
228
248
  * Retrieves the document with the specified ID from the database
229
249
  *
230
250
  * @param {string} key - the ID of the document to retrieve
251
+ * @param {Object} [opts] - options
231
252
  * @returns {Object<{_id: string, ...doc: Object}>} - the document with the specified ID
232
253
  * @memberof Fireproof
233
254
  * @instance
234
255
  */
235
- async get (key) {
236
- const got = await get(this.blocks, this.clock, key)
256
+ async get (key, opts = {}) {
257
+ const clock = opts.clock || this.clock
258
+ const resp = await get(this.blocks, clock, key)
259
+
237
260
  // this tombstone is temporary until we can get the prolly tree to delete
238
- if (got === null) {
261
+ if (!resp || resp.result === null) {
239
262
  throw new Error('Not found')
240
263
  }
241
- got._id = key
242
- return got
264
+ const doc = resp.result
265
+ if (opts.mvcc === true) {
266
+ doc._clock = this.clock
267
+ }
268
+ doc._proof = {
269
+ data: await cidsToProof(resp.cids),
270
+ clock: this.clock
271
+ }
272
+ doc._id = key
273
+ return doc
243
274
  }
244
275
 
245
276
  setCarUploader (carUploaderFn) {
@@ -253,3 +284,9 @@ export default class Fireproof {
253
284
  this.blocks.valet.remoteBlockFunction = remoteBlockReaderFn
254
285
  }
255
286
  }
287
+
288
+ export async function cidsToProof (cids) {
289
+ if (!cids || !cids.all) return []
290
+ const all = await cids.all()
291
+ return [...all].map((cid) => cid.toString())
292
+ }