@fireproof/core 0.0.6 → 0.0.8

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (38) hide show
  1. package/coverage/blockstore.js.html +254 -242
  2. package/coverage/clock.js.html +203 -266
  3. package/coverage/db-index.js.html +426 -213
  4. package/coverage/fireproof.js.html +424 -238
  5. package/coverage/index.html +69 -84
  6. package/coverage/listener.js.html +13 -25
  7. package/coverage/prolly.js.html +267 -219
  8. package/coverage/tmp/coverage-28490-1680373005621-0.json +1 -0
  9. package/coverage/tmp/coverage-28494-1680373004502-0.json +1 -0
  10. package/coverage/tmp/coverage-28500-1680373005593-0.json +1 -0
  11. package/coverage/tmp/coverage-28504-1680373005559-0.json +1 -0
  12. package/coverage/valet.js.html +96 -246
  13. package/hooks/use-fireproof.ts +9 -8
  14. package/package.json +5 -5
  15. package/src/blockstore.js +16 -14
  16. package/src/clock.js +27 -5
  17. package/src/db-index.js +139 -96
  18. package/src/fireproof.js +48 -16
  19. package/src/hydrator.js +10 -0
  20. package/src/listener.js +0 -6
  21. package/src/prolly.js +43 -32
  22. package/src/valet.js +6 -62
  23. package/{src → test}/block.js +6 -6
  24. package/test/clock.test.js +0 -5
  25. package/test/db-index.test.js +11 -10
  26. package/test/fireproof.test.js +74 -19
  27. package/test/helpers.js +1 -1
  28. package/test/hydrator.test.js +75 -0
  29. package/test/prolly.test.js +2 -2
  30. package/test/proofs.test.js +5 -5
  31. package/test/reproduce-fixture-bug.test.js +2 -2
  32. package/coverage/tmp/coverage-42191-1678146904346-0.json +0 -1
  33. package/coverage/tmp/coverage-42193-1678146903521-0.json +0 -1
  34. package/coverage/tmp/coverage-42196-1678146904322-0.json +0 -1
  35. package/coverage/tmp/coverage-42197-1678146904292-0.json +0 -1
  36. package/scripts/propernames/gen.sh +0 -3
  37. package/scripts/randomcid.js +0 -12
  38. package/scripts/words/gen.js +0 -55
package/src/prolly.js CHANGED
@@ -6,13 +6,12 @@ import {
6
6
  findEventsToSync
7
7
  } from './clock.js'
8
8
  import { create, load } from 'prolly-trees/map'
9
+ // import { create, load } from '../../../../prolly-trees/src/map.js'
10
+ import { nocache as cache } from 'prolly-trees/cache'
11
+ import { CIDCounter, bf, simpleCompare as compare } from 'prolly-trees/utils'
9
12
  import * as codec from '@ipld/dag-cbor'
10
13
  import { sha256 as hasher } from 'multiformats/hashes/sha2'
11
- import { MemoryBlockstore, MultiBlockFetcher } from './block.js'
12
14
  import { doTransaction } from './blockstore.js'
13
-
14
- import { nocache as cache } from 'prolly-trees/cache'
15
- import { CIDCounter, bf, simpleCompare as compare } from 'prolly-trees/utils'
16
15
  import { create as createBlock } from 'multiformats/block'
17
16
  const opts = { cache, chunker: bf(3), codec, hasher, compare }
18
17
 
@@ -22,9 +21,9 @@ const withLog = async (label, fn) => {
22
21
  return resp
23
22
  }
24
23
 
25
- // todo should also return a CIDCounter
24
+ // should also return a CIDCounter
26
25
  export const makeGetBlock = (blocks) => {
27
- // const cids = new CIDCounter() // todo this could be used for proofs of mutations
26
+ // const cids = new CIDCounter() // this could be used for proofs of mutations
28
27
  const getBlockFn = async (address) => {
29
28
  const { cid, bytes } = await withLog(address, () => blocks.get(address))
30
29
  // cids.add({ address: cid })
@@ -55,18 +54,17 @@ export const makeGetBlock = (blocks) => {
55
54
  * event: CID[]
56
55
  * }>}
57
56
  */
58
- async function createAndSaveNewEvent (
57
+ async function createAndSaveNewEvent ({
59
58
  inBlocks,
60
- mblocks,
61
- getBlock,
62
59
  bigPut,
63
60
  root,
64
- { key, value, del },
61
+ event: inEvent,
65
62
  head,
66
63
  additions,
67
64
  removals = []
68
- ) {
65
+ }) {
69
66
  let cids
67
+ const { key, value, del } = inEvent
70
68
  const data = {
71
69
  type: 'put',
72
70
  root: {
@@ -100,20 +98,20 @@ async function createAndSaveNewEvent (
100
98
  }
101
99
 
102
100
  const makeGetAndPutBlock = (inBlocks) => {
103
- const mblocks = new MemoryBlockstore()
104
- const blocks = new MultiBlockFetcher(mblocks, inBlocks)
105
- const { getBlock, cids } = makeGetBlock(blocks)
101
+ // const mblocks = new MemoryBlockstore()
102
+ // const blocks = new MultiBlockFetcher(mblocks, inBlocks)
103
+ const { getBlock, cids } = makeGetBlock(inBlocks)
106
104
  const put = inBlocks.put.bind(inBlocks)
107
105
  const bigPut = async (block, additions) => {
108
106
  // console.log('bigPut', block.cid.toString())
109
107
  const { cid, bytes } = block
110
108
  put(cid, bytes)
111
- mblocks.putSync(cid, bytes)
109
+ // mblocks.putSync(cid, bytes)
112
110
  if (additions) {
113
111
  additions.set(cid.toString(), block)
114
112
  }
115
113
  }
116
- return { getBlock, bigPut, mblocks, blocks, cids }
114
+ return { getBlock, bigPut, blocks: inBlocks, cids }
117
115
  }
118
116
 
119
117
  const bulkFromEvents = (sorted) =>
@@ -143,7 +141,7 @@ const prollyRootFromAncestor = async (events, ancestor, getBlock) => {
143
141
  /**
144
142
  * Put a value (a CID) for the given key. If the key exists it's value is overwritten.
145
143
  *
146
- * @param {import('./block').BlockFetcher} blocks Bucket block storage.
144
+ * @param {import('../test/block.js').BlockFetcher} blocks Bucket block storage.
147
145
  * @param {import('./clock').EventLink<EventData>[]} head Merkle clock head.
148
146
  * @param {string} key The key of the value to put.
149
147
  * @param {CID} value The value to put.
@@ -151,7 +149,7 @@ const prollyRootFromAncestor = async (events, ancestor, getBlock) => {
151
149
  * @returns {Promise<Result>}
152
150
  */
153
151
  export async function put (inBlocks, head, event, options) {
154
- const { getBlock, bigPut, mblocks, blocks } = makeGetAndPutBlock(inBlocks)
152
+ const { getBlock, bigPut, blocks } = makeGetAndPutBlock(inBlocks)
155
153
 
156
154
  // If the head is empty, we create a new event and return the root and addition blocks
157
155
  if (!head.length) {
@@ -161,12 +159,15 @@ export async function put (inBlocks, head, event, options) {
161
159
  root = await node.block
162
160
  bigPut(root, additions)
163
161
  }
164
- return createAndSaveNewEvent(inBlocks, mblocks, getBlock, bigPut, root, event, head, Array.from(additions.values()))
162
+ return createAndSaveNewEvent({ inBlocks, bigPut, root, event, head, additions: Array.from(additions.values()) })
165
163
  }
166
164
 
167
165
  // Otherwise, we find the common ancestor and update the root and other blocks
168
166
  const events = new EventFetcher(blocks)
167
+ // todo this is returning more events than necessary, lets define the desired semantics from the top down
168
+ // good semantics mean we can cache the results of this call
169
169
  const { ancestor, sorted } = await findCommonAncestorWithSortedEvents(events, head)
170
+ // console.log('sorted', JSON.stringify(sorted.map(({ value: { data: { key, value } } }) => ({ key, value }))))
170
171
  const prollyRootNode = await prollyRootFromAncestor(events, ancestor, getBlock)
171
172
 
172
173
  const bulkOperations = bulkFromEvents(sorted)
@@ -178,22 +179,20 @@ export async function put (inBlocks, head, event, options) {
178
179
  bigPut(nb, additions)
179
180
  }
180
181
  // additions are new blocks
181
- return createAndSaveNewEvent(
182
+ return createAndSaveNewEvent({
182
183
  inBlocks,
183
- mblocks,
184
- getBlock,
185
184
  bigPut,
186
- prollyRootBlock,
185
+ root: prollyRootBlock,
187
186
  event,
188
187
  head,
189
- Array.from(additions.values()) /*, todo? Array.from(removals.values()) */
190
- )
188
+ additions: Array.from(additions.values()) /*, todo? Array.from(removals.values()) */
189
+ })
191
190
  }
192
191
 
193
192
  /**
194
193
  * Determine the effective prolly root given the current merkle clock head.
195
194
  *
196
- * @param {import('./block').BlockFetcher} blocks Bucket block storage.
195
+ * @param {import('../test/block.js').BlockFetcher} blocks Bucket block storage.
197
196
  * @param {import('./clock').EventLink<EventData>[]} head Merkle clock head.
198
197
  */
199
198
  export async function root (inBlocks, head) {
@@ -208,22 +207,22 @@ export async function root (inBlocks, head) {
208
207
  // Perform bulk operations (put or delete) for each event in the sorted array
209
208
  const bulkOperations = bulkFromEvents(sorted)
210
209
  const { root: newProllyRootNode, blocks: newBlocks } = await prollyRootNode.bulk(bulkOperations)
211
- const prollyRootBlock = await newProllyRootNode.block
212
- // console.log('emphemeral blocks', newBlocks.map((nb) => nb.cid.toString()))
210
+ // const prollyRootBlock = await newProllyRootNode.block
211
+ // console.log('newBlocks', newBlocks.map((nb) => nb.cid.toString()))
213
212
  // todo maybe these should go to a temp blockstore?
214
213
  await doTransaction('root', inBlocks, async (transactionBlockstore) => {
215
214
  const { bigPut } = makeGetAndPutBlock(transactionBlockstore)
216
215
  for (const nb of newBlocks) {
217
216
  bigPut(nb)
218
217
  }
219
- bigPut(prollyRootBlock)
218
+ // bigPut(prollyRootBlock)
220
219
  })
221
220
  return { cids: events.cids, node: newProllyRootNode }
222
221
  }
223
222
 
224
223
  /**
225
224
  * Get the list of events not known by the `since` event
226
- * @param {import('./block').BlockFetcher} blocks Bucket block storage.
225
+ * @param {import('../test/block.js').BlockFetcher} blocks Bucket block storage.
227
226
  * @param {import('./clock').EventLink<EventData>[]} head Merkle clock head.
228
227
  * @param {import('./clock').EventLink<EventData>} since Event to compare against.
229
228
  * @returns {Promise<import('./clock').EventLink<EventData>[]>}
@@ -239,7 +238,7 @@ export async function eventsSince (blocks, head, since) {
239
238
 
240
239
  /**
241
240
  *
242
- * @param {import('./block').BlockFetcher} blocks Bucket block storage.
241
+ * @param {import('../test/block.js').BlockFetcher} blocks Bucket block storage.
243
242
  * @param {import('./clock').EventLink<EventData>[]} head Merkle clock head.
244
243
  *
245
244
  * @returns {Promise<import('./prolly').Entry[]>}
@@ -257,7 +256,7 @@ export async function getAll (blocks, head) {
257
256
  }
258
257
 
259
258
  /**
260
- * @param {import('./block').BlockFetcher} blocks Bucket block storage.
259
+ * @param {import('../test/block.js').BlockFetcher} blocks Bucket block storage.
261
260
  * @param {import('./clock').EventLink<EventData>[]} head Merkle clock head.
262
261
  * @param {string} key The key of the value to retrieve.
263
262
  */
@@ -270,3 +269,15 @@ export async function get (blocks, head, key) {
270
269
  const { result, cids } = await prollyRootNode.get(key)
271
270
  return { result, cids, clockCIDs }
272
271
  }
272
+
273
+ export async function * vis (blocks, head) {
274
+ if (!head.length) {
275
+ return { cids: new CIDCounter(), result: null }
276
+ }
277
+ const { node: prollyRootNode, cids } = await root(blocks, head)
278
+ const lines = []
279
+ for await (const line of prollyRootNode.vis()) {
280
+ yield line
281
+ }
282
+ return { vis: lines.join('\n'), cids }
283
+ }
package/src/valet.js CHANGED
@@ -3,17 +3,8 @@ import { CID } from 'multiformats/cid'
3
3
  import { openDB } from 'idb'
4
4
  import cargoQueue from 'async/cargoQueue.js'
5
5
 
6
- // const sleep = (ms) => new Promise((resolve) => setTimeout(resolve, ms))
7
- // let storageSupported = false
8
- // try {
9
- // storageSupported = window.localStorage && true
10
- // } catch (e) {}
11
- // const sleep = (ms) => new Promise((resolve) => setTimeout(resolve, ms))
12
-
13
6
  export default class Valet {
14
- #cars = new Map() // cars by cid
15
- #cidToCar = new Map() // cid to car
16
- #db = null
7
+ idb = null
17
8
  #uploadQueue = null
18
9
  #alreadyEnqueued = new Set()
19
10
 
@@ -23,7 +14,8 @@ export default class Valet {
23
14
  */
24
15
  uploadFunction = null
25
16
 
26
- constructor () {
17
+ constructor (name = 'default') {
18
+ this.name = name
27
19
  this.#uploadQueue = cargoQueue(async (tasks, callback) => {
28
20
  console.log(
29
21
  'queue worker',
@@ -59,9 +51,8 @@ export default class Valet {
59
51
  }
60
52
 
61
53
  withDB = async (dbWorkFun) => {
62
- // if (!storageSupported) return
63
- if (!this.#db) {
64
- this.#db = await openDB('valet', 2, {
54
+ if (!this.idb) {
55
+ this.idb = await openDB(`fp.${this.name}.valet`, 2, {
65
56
  upgrade (db, oldVersion, newVersion, transaction) {
66
57
  if (oldVersion < 1) {
67
58
  db.createObjectStore('cars') // todo use database name
@@ -75,7 +66,7 @@ export default class Valet {
75
66
  }
76
67
  })
77
68
  }
78
- return await dbWorkFun(this.#db)
69
+ return await dbWorkFun(this.idb)
79
70
  }
80
71
 
81
72
  /**
@@ -84,11 +75,6 @@ export default class Valet {
84
75
  * @param {*} value
85
76
  */
86
77
  async parkCar (carCid, value, cids) {
87
- // this.#cars.set(carCid, value)
88
- // for (const cid of cids) {
89
- // this.#cidToCar.set(cid, carCid)
90
- // }
91
-
92
78
  await this.withDB(async (db) => {
93
79
  const tx = db.transaction(['cars', 'cidToCar'], 'readwrite')
94
80
  await tx.objectStore('cars').put(value, carCid)
@@ -130,45 +116,3 @@ export default class Valet {
130
116
  })
131
117
  }
132
118
  }
133
-
134
- // export class MemoryValet {
135
- // #cars = new Map() // cars by cid
136
- // #cidToCar = new Map() // cid to car
137
-
138
- // /**
139
- // *
140
- // * @param {string} carCid
141
- // * @param {*} value
142
- // */
143
- // async parkCar (carCid, value, cids) {
144
- // this.#cars.set(carCid, value)
145
- // for (const cid of cids) {
146
- // this.#cidToCar.set(cid, carCid)
147
- // }
148
- // }
149
-
150
- // async getBlock (dataCID) {
151
- // return await this.#valetGet(dataCID)
152
- // }
153
-
154
- // /**
155
- // * Internal function to load blocks from persistent storage.
156
- // * Currently it just searches all the cars for the block, but in the future
157
- // * we need to index the block CIDs to the cars, and reference that to find the block.
158
- // * This index will also allow us to use accelerator links for the gateway when needed.
159
- // * It can itself be a prolly tree...
160
- // * @param {string} cid
161
- // * @returns {Promise<Uint8Array|undefined>}
162
- // */
163
- // #valetGet = async (cid) => {
164
- // const carCid = this.#cidToCar.get(cid)
165
- // if (carCid) {
166
- // const carBytes = this.#cars.get(carCid)
167
- // const reader = await CarReader.fromBytes(carBytes)
168
- // const gotBlock = await reader.get(CID.parse(cid))
169
- // if (gotBlock) {
170
- // return gotBlock.bytes
171
- // }
172
- // }
173
- // }
174
- // }
@@ -1,8 +1,8 @@
1
1
  import { parse } from 'multiformats/link'
2
2
 
3
3
  /**
4
- * @typedef {{ cid: import('./link').AnyLink, bytes: Uint8Array }} AnyBlock
5
- * @typedef {{ get: (link: import('./link').AnyLink) => Promise<AnyBlock | undefined> }} BlockFetcher
4
+ * @typedef {{ cid: import('../src/link').AnyLink, bytes: Uint8Array }} AnyBlock
5
+ * @typedef {{ get: (link: import('../src/link').AnyLink) => Promise<AnyBlock | undefined> }} BlockFetcher
6
6
  */
7
7
 
8
8
  /** @implements {BlockFetcher} */
@@ -11,7 +11,7 @@ export class MemoryBlockstore {
11
11
  #blocks = new Map()
12
12
 
13
13
  /**
14
- * @param {import('./link').AnyLink} cid
14
+ * @param {import('../src/link').AnyLink} cid
15
15
  * @returns {Promise<AnyBlock | undefined>}
16
16
  */
17
17
  async get (cid) {
@@ -21,7 +21,7 @@ export class MemoryBlockstore {
21
21
  }
22
22
 
23
23
  /**
24
- * @param {import('./link').AnyLink} cid
24
+ * @param {import('../src/link').AnyLink} cid
25
25
  * @param {Uint8Array} bytes
26
26
  */
27
27
  async put (cid, bytes) {
@@ -30,7 +30,7 @@ export class MemoryBlockstore {
30
30
  }
31
31
 
32
32
  /**
33
- * @param {import('./link').AnyLink} cid
33
+ * @param {import('../src/link').AnyLink} cid
34
34
  * @param {Uint8Array} bytes
35
35
  */
36
36
  putSync (cid, bytes) {
@@ -53,7 +53,7 @@ export class MultiBlockFetcher {
53
53
  this.#fetchers = fetchers
54
54
  }
55
55
 
56
- /** @param {import('./link').AnyLink} link */
56
+ /** @param {import('../src/link').AnyLink} link */
57
57
  async get (link) {
58
58
  for (const f of this.#fetchers) {
59
59
  const v = await f.get(link)
@@ -440,13 +440,8 @@ describe('Clock', () => {
440
440
  toSync = await testFindEventsToSync(blocks, sinceHead)
441
441
  assert.equal(toSync.length, 0)
442
442
 
443
- // todo do these since heads make sense?
444
443
  sinceHead = [...head0, ...head2]
445
444
  toSync = await testFindEventsToSync(blocks, sinceHead)
446
- // console.log('need', toSync.map(b => b.value.data))
447
- // assert.equal(toSync.length, 2) // 0
448
- // assert.equal(toSync[0].cid.toString(), event1.cid.toString())
449
- // assert.equal(toSync[1].cid.toString(), event2.cid.toString())
450
445
  })
451
446
 
452
447
  it('add two events with some shared parents', async () => {
@@ -2,13 +2,13 @@ import { describe, it, beforeEach } from 'mocha'
2
2
  import assert from 'node:assert'
3
3
  import Blockstore from '../src/blockstore.js'
4
4
  import Fireproof from '../src/fireproof.js'
5
- import Index from '../src/db-index.js'
5
+ import DbIndex from '../src/db-index.js'
6
6
  console.x = function () {}
7
7
 
8
- describe('Index query', () => {
8
+ describe('DbIndex query', () => {
9
9
  let database, index
10
10
  beforeEach(async () => {
11
- database = new Fireproof(new Blockstore(), []) // todo: these need a cloud name aka w3name, add this after we have cloud storage of blocks
11
+ database = Fireproof.storage()
12
12
  const docs = [
13
13
  { _id: 'a1s3b32a-3c3a-4b5e-9c1c-8c5c0c5c0c5c', name: 'alice', age: 40 },
14
14
  { _id: 'b2s3b32a-3c3a-4b5e-9c1c-8c5c0c5c0c5c', name: 'bob', age: 40 },
@@ -24,7 +24,7 @@ describe('Index query', () => {
24
24
  assert(response.id, 'should have id')
25
25
  assert.equal(response.id, id)
26
26
  }
27
- index = new Index(database, function (doc, map) {
27
+ index = new DbIndex(database, function (doc, map) {
28
28
  map(doc.age, doc.name)
29
29
  })
30
30
  })
@@ -36,8 +36,10 @@ describe('Index query', () => {
36
36
  assert.equal(result.rows[0].key, 43)
37
37
  assert(result.rows[0].value === 'carol', 'correct value')
38
38
  })
39
- it.skip('query exact key', async () => {
40
- const result = await index.query({ key: 43 })
39
+ it('query exact key', async () => {
40
+ let result = await index.query({ range: [41, 44] })
41
+ assert(result.rows[0].key === 43, 'correct key')
42
+ result = await index.query({ key: 43 })
41
43
  assert(result, 'did return result')
42
44
  assert(result.rows)
43
45
  assert.equal(result.rows.length, 1, 'one row matched')
@@ -111,7 +113,7 @@ describe('Index query', () => {
111
113
  await index.query({ range: [51, 54] })
112
114
 
113
115
  console.x('--- make Xander 53')
114
- const DOCID = 'xxxx-3c3a-4b5e-9c1c-8c5c0c5c0c5c'
116
+ const DOCID = 'xander-doc'
115
117
  const r1 = await database.put({ _id: DOCID, name: 'Xander', age: 53 })
116
118
  assert(r1.id, 'should have id')
117
119
 
@@ -197,12 +199,12 @@ describe('Index query', () => {
197
199
  })
198
200
  })
199
201
 
200
- describe('Index query with bad index definition', () => {
202
+ describe('DbIndex query with bad index definition', () => {
201
203
  let database, index
202
204
  beforeEach(async () => {
203
205
  database = new Fireproof(new Blockstore(), []) // todo: these need a cloud name aka w3name, add this after we have cloud storage of blocks
204
206
  await database.put({ _id: 'a1s3b32a-3c3a-4b5e-9c1c-8c5c0c5c0c5c', name: 'alice', age: 40 })
205
- index = new Index(database, function (doc, map) {
207
+ index = new DbIndex(database, function (doc, map) {
206
208
  map(doc.oops.missingField, doc.name)
207
209
  })
208
210
  })
@@ -214,5 +216,4 @@ describe('Index query with bad index definition', () => {
214
216
  console.error = oldErrFn
215
217
  })
216
218
  })
217
- it.skip('reproduce missing block error from browser so we can turn off always rebuild', async () => {})
218
219
  })
@@ -2,6 +2,7 @@ import { describe, it, beforeEach } from 'mocha'
2
2
  import assert from 'node:assert'
3
3
  import Blockstore from '../src/blockstore.js'
4
4
  import Fireproof from '../src/fireproof.js'
5
+ // import * as codec from '@ipld/dag-cbor'
5
6
 
6
7
  let database, resp0
7
8
 
@@ -9,14 +10,18 @@ let database, resp0
9
10
 
10
11
  describe('Fireproof', () => {
11
12
  beforeEach(async () => {
12
- database = new Fireproof(new Blockstore(), []) // todo: these need a cloud name aka w3name, add this after we have cloud storage of blocks
13
+ database = Fireproof.storage('helloName')
13
14
  resp0 = await database.put({
14
15
  _id: '1ef3b32a-3c3a-4b5e-9c1c-8c5c0c5c0c5c',
15
16
  name: 'alice',
16
17
  age: 42
17
18
  })
18
19
  })
19
-
20
+ it('takes an optional name', () => {
21
+ assert.equal(database.name, 'helloName')
22
+ const x = database.blocks.valet.idb
23
+ assert.equal(x.name.toString(), 'fp.helloName.valet')
24
+ })
20
25
  it('put and get document', async () => {
21
26
  assert(resp0.id, 'should have id')
22
27
  assert.equal(resp0.id, '1ef3b32a-3c3a-4b5e-9c1c-8c5c0c5c0c5c')
@@ -27,13 +32,13 @@ describe('Fireproof', () => {
27
32
  })
28
33
  it('mvcc put and get document with _clock that matches', async () => {
29
34
  assert(resp0.clock, 'should have clock')
30
- assert.equal(resp0.clock[0].toString(), 'bafyreieth2ckopwivda5mf6vu76xwqvox3q5wsaxgbmxy2dgrd4hfuzmma')
35
+ assert.equal(resp0.clock[0].toString(), 'bafyreiadhnnxgaeeqdxujfew6zxr4lnjyskkrg26cdjvk7tivy6dt4xmsm')
31
36
  const theDoc = await database.get('1ef3b32a-3c3a-4b5e-9c1c-8c5c0c5c0c5c')
32
37
  theDoc._clock = database.clock
33
38
  const put2 = await database.put(theDoc)
34
39
  assert.equal(put2.id, '1ef3b32a-3c3a-4b5e-9c1c-8c5c0c5c0c5c')
35
40
  assert.equal(put2.clock.length, 1)
36
- assert.equal(put2.clock[0].toString(), 'bafyreida2c2ckhjfoz5ulmbbfe66ey4svvedrl4tzbvtoxags2qck7lj2i')
41
+ assert.equal(put2.clock[0].toString(), 'bafyreib2kck2fv73lgahfcd5imarslgxcmachbxxavhtwahx5ppjfts4qe')
37
42
  })
38
43
  it('get should return an object instance that is not the same as the one in the db', async () => {
39
44
  const theDoc = await database.get('1ef3b32a-3c3a-4b5e-9c1c-8c5c0c5c0c5c')
@@ -46,7 +51,7 @@ describe('Fireproof', () => {
46
51
  it('get with mvcc option', async () => {
47
52
  const theDoc = await database.get('1ef3b32a-3c3a-4b5e-9c1c-8c5c0c5c0c5c', { mvcc: true })
48
53
  assert(theDoc._clock, 'should have _clock')
49
- assert.equal(theDoc._clock[0].toString(), 'bafyreieth2ckopwivda5mf6vu76xwqvox3q5wsaxgbmxy2dgrd4hfuzmma')
54
+ assert.equal(theDoc._clock[0].toString(), 'bafyreiadhnnxgaeeqdxujfew6zxr4lnjyskkrg26cdjvk7tivy6dt4xmsm')
50
55
  })
51
56
  it('get with mvcc option where someone else changed another document first', async () => {
52
57
  const theDoc = await database.get('1ef3b32a-3c3a-4b5e-9c1c-8c5c0c5c0c5c', { mvcc: true })
@@ -237,6 +242,34 @@ describe('Fireproof', () => {
237
242
  assert.equal(prevBob.age, 11)
238
243
  })
239
244
 
245
+ it('provides docs since tiny', async () => {
246
+ const result = await database.changesSince()
247
+ assert.equal(result.rows.length, 1)
248
+ assert.equal(result.rows[0].key, '1ef3b32a-3c3a-4b5e-9c1c-8c5c0c5c0c5c')
249
+
250
+ // console.log('result', result)
251
+
252
+ // const result2 = await database.changesSince(result.clock)
253
+ // console.log('result2', result2)
254
+ // assert.equal(result2.rows.length, 0)
255
+
256
+ const bKey = 'befbef-3c3a-4b5e-9c1c-bbbbbb'
257
+ const bvalue = {
258
+ _id: bKey,
259
+ name: 'bob',
260
+ age: 44
261
+ }
262
+ const response = await database.put(bvalue)
263
+ assert(response.id, 'should have id')
264
+ assert.equal(response.id, bKey)
265
+
266
+ const res3 = await database.changesSince()
267
+ assert.equal(res3.rows.length, 2)
268
+
269
+ const res4 = await database.changesSince(result.clock)
270
+ assert.equal(res4.rows.length, 1)
271
+ })
272
+
240
273
  it('provides docs since', async () => {
241
274
  const result = await database.changesSince()
242
275
  assert.equal(result.rows.length, 1)
@@ -275,8 +308,6 @@ describe('Fireproof', () => {
275
308
 
276
309
  const res5 = await database.changesSince(res4.clock)
277
310
 
278
- // await database.visClock()
279
-
280
311
  assert.equal(res5.rows.length, 1)
281
312
 
282
313
  const res6 = await database.changesSince(result2.clock)
@@ -304,26 +335,43 @@ describe('Fireproof', () => {
304
335
  assert.equal(res9.rows.length, 0)
305
336
  })
306
337
 
307
- it.skip('docs since repeated changes', async () => {
338
+ it('docs since repeated changes', async () => {
308
339
  assert.equal((await database.changesSince()).rows.length, 1)
309
340
  let resp, doc, changes
310
- for (let index = 0; index < 200; index++) {
311
- const id = '' + (300 - index).toString()
341
+ for (let index = 0; index < 30; index++) {
342
+ const id = '1' + (301 - index).toString()
343
+ // console.log(`Putting id: ${id}, index: ${index}`)
312
344
  resp = await database.put({ index, _id: id }).catch(e => {
313
- assert.equal(e.message, 'put failed on _id: ' + id)
345
+ assert.fail(`put failed on _id: ${id}, error: ${e.message}`)
314
346
  })
315
- assert(resp.id)
347
+ assert(resp.id, `Failed to obtain resp.id for _id: ${id}`)
348
+
349
+ // console.log(`vis for update id: ${id}, index:`, index)
350
+ // for await (const line of database.vis()) {
351
+ // console.log(line)
352
+ // }
353
+
316
354
  doc = await database.get(resp.id).catch(e => {
317
- console.trace('failed', e)
318
- assert.equal(e.message, 'get failed on _id: ' + id)
355
+ console.log('failed', e)
356
+ assert.fail(`get failed on _id: ${id}, error: ${e.message}`)
319
357
  })
320
- assert.equal(doc.index, index)
321
- changes = await database.changesSince().catch(e => {
322
- assert.equal(e.message, 'changesSince failed on _id: ' + id)
358
+
359
+ assert.equal(doc.index, index, `doc.index is not equal to index for _id: ${id}`)
360
+ changes = await database.changesSince().catch(async e => {
361
+ assert.fail(`changesSince failed on _id: ${id}, error: ${e.message}`)
362
+ })
363
+ changes.rows.forEach(row => {
364
+ for (const key in row) {
365
+ const value = row[key]
366
+ assert(!/^bafy/.test(value), `Unexpected "bafy..." value found at index ${index} in row ${JSON.stringify(row)}`)
367
+ }
323
368
  })
324
- assert.equal(changes.rows.length, index + 2)
369
+
370
+ // console.log('changes: ', index, changes.rows.length, JSON.stringify(changes.rows))
371
+ assert.equal(changes.rows.length, index + 2, `failed on ${index}, with ${changes.rows.length} ${id}`)
325
372
  }
326
- }).timeout(20000)
373
+ }).timeout(30000)
374
+
327
375
  it('concurrent transactions', async () => {
328
376
  assert.equal((await database.changesSince()).rows.length, 1)
329
377
  const promises = []
@@ -356,4 +404,11 @@ describe('Fireproof', () => {
356
404
  // await sleep(1000)
357
405
  assert.equal((await database.changesSince()).rows.length, 2)
358
406
  }).timeout(20000)
407
+ it('serialize database', async () => {
408
+ await database.put({ _id: 'rehy', name: 'drate' })
409
+ assert.equal((await database.changesSince()).rows.length, 2)
410
+ const serialized = JSON.parse(JSON.stringify(database))
411
+ assert.equal(serialized.name, 'helloName')
412
+ assert.equal(serialized.clock.length, 1)
413
+ })
359
414
  })
package/test/helpers.js CHANGED
@@ -3,7 +3,7 @@ import crypto from 'node:crypto'
3
3
  import * as Link from 'multiformats/link'
4
4
  import * as raw from 'multiformats/codecs/raw'
5
5
  import { sha256 } from 'multiformats/hashes/sha2'
6
- import { MemoryBlockstore } from '../src/block.js'
6
+ import { MemoryBlockstore } from './block.js'
7
7
 
8
8
  // console.x = console.log
9
9
  // console.log = function (...args) {