@fireproof/core 0.5.12 → 0.5.14

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,11 +1,10 @@
1
- // @ts-nocheck
2
1
  /**
3
2
  * A Fireproof database Listener allows you to react to events in the database.
4
3
  *
5
4
  * @class Listener
6
5
  * @classdesc An listener attaches to a Fireproof database and runs a routing function on each change, sending the results to subscribers.
7
6
  *
8
- * @param {import('./database').Database} database - The Database database instance to index.
7
+ * @param {import('./database.js').Database} database - The Database database instance to index.
9
8
  * @param {Function} routingFn - The routing function to apply to each entry in the database.
10
9
  */
11
10
  // import { ChangeEvent } from './db-index'
@@ -16,7 +15,7 @@ export class Listener {
16
15
  subcribers = new Map();
17
16
  doStopListening = null;
18
17
  /**
19
- * @param {import('./database').Database} database
18
+ * @param {import('./database.js').Database} database
20
19
  * @param {(_: any, emit: any) => void} routingFn
21
20
  */
22
21
  constructor(database, routingFn = function (/** @type {any} */ _, /** @type {(arg0: string) => void} */ emit) {
@@ -1,6 +1,5 @@
1
- // @ts-nocheck
2
- import { advance, EventFetcher, EventBlock, findCommonAncestorWithSortedEvents, findEventsToSync, vis as visClock } from './clock';
3
- // import { create, load } from '../../../../prolly-trees/src/map'
1
+ import { advance, EventFetcher, EventBlock, findCommonAncestorWithSortedEvents, findEventsToSync, vis as visClock } from './clock.js';
2
+ // import { create, load } from '../../../../prolly-trees/src/map.js'
4
3
  // @ts-ignore
5
4
  import { create, load } from 'prolly-trees/map';
6
5
  // @ts-ignore
@@ -9,11 +8,12 @@ import { nocache as cache } from 'prolly-trees/cache';
9
8
  import { CIDCounter, bf, simpleCompare as compare } from 'prolly-trees/utils';
10
9
  import * as codec from '@ipld/dag-cbor';
11
10
  import { sha256 as hasher } from 'multiformats/hashes/sha2';
12
- import { doTransaction } from './blockstore';
11
+ import { doTransaction } from './blockstore.js';
13
12
  import { create as createBlock } from 'multiformats/block';
14
13
  const blockOpts = { cache, chunker: bf(30), codec, hasher, compare };
14
+ // const SYNC_ROOT = 'fireproof' // change this if you want to break sync
15
15
  /**
16
- * @typedef {import('./blockstore').TransactionBlockstore} TransactionBlockstore
16
+ * @typedef {import('./blockstore.js').TransactionBlockstore} TransactionBlockstore
17
17
  */
18
18
  // const withLog = async (label, fn) => {
19
19
  // const resp = await fn()
@@ -42,13 +42,11 @@ export const makeGetBlock = blocks => {
42
42
  async function createAndSaveNewEvent({ inBlocks, bigPut, root, event: inEvent, head, additions, removals = [] }) {
43
43
  let cids;
44
44
  const { key, value, del } = inEvent;
45
+ // console.log('createAndSaveNewEvent', root.constructor.name, root.entryList)
46
+ // root = await root.block
45
47
  const data = {
46
48
  root: root
47
- ? {
48
- cid: root.cid,
49
- bytes: root.bytes,
50
- value: root.value // can we remove this?
51
- }
49
+ ? (await root.address)
52
50
  : null,
53
51
  key
54
52
  };
@@ -61,6 +59,27 @@ async function createAndSaveNewEvent({ inBlocks, bigPut, root, event: inEvent, h
61
59
  data.value = value;
62
60
  data.type = 'put';
63
61
  }
62
+ // console.log('head', head)
63
+ // if (head.length === 0) {
64
+ // // create an empty prolly root
65
+ // let emptyRoot
66
+ // for await (const node of create({ get: getBlock, list: [{ key: '_sync', value: SYNC_ROOT }], ...blockOpts })) {
67
+ // emptyRoot = await node.block
68
+ // bigPut(emptyRoot)
69
+ // }
70
+ // console.log('emptyRoot', emptyRoot)
71
+ // const first = await EventBlock.create(
72
+ // {
73
+ // root: emptyRoot.cid,
74
+ // key: null,
75
+ // value: null,
76
+ // type: 'del'
77
+ // },
78
+ // []
79
+ // )
80
+ // bigPut(first)
81
+ // head = [first.cid]
82
+ // }
64
83
  /** @type {import('./clock').EventData} */
65
84
  // @ts-ignore
66
85
  const event = await EventBlock.create(data, head);
@@ -123,16 +142,22 @@ const prollyRootFromAncestor = async (events, ancestor, getBlock) => {
123
142
  // console.log('prollyRootFromAncestor', ancestor)
124
143
  const event = await events.get(ancestor);
125
144
  const { root } = event.value.data;
126
- // console.log('prollyRootFromAncestor', root.cid, JSON.stringify(root.value))
127
145
  if (root) {
128
- return load({ cid: root.cid, get: getBlock, ...blockOpts });
146
+ return load({ cid: root, get: getBlock, ...blockOpts });
129
147
  }
130
148
  else {
131
- return null;
149
+ // console.log('no root', root) // false means no common ancestor. null means empty database.
150
+ return root;
132
151
  }
133
152
  };
153
+ // async function bigMerge (events, head, getBlock) {
154
+ // const allRoots = await Promise.all(head.map(async h => prollyRootFromAncestor(events, h, getBlock)))
155
+ // console.log('allRoots', allRoots)
156
+ // // todo query over all roots and merge them, but how do they not have a common ancestor? they all start with the _sync root
157
+ // throw new Error('not implemented')
158
+ // }
134
159
  const doProllyBulk = async (inBlocks, head, event, doFull = false) => {
135
- const { getBlock, blocks } = makeGetAndPutBlock(inBlocks);
160
+ const { getBlock, blocks } = makeGetAndPutBlock(inBlocks); // this is doubled with eventfetcher
136
161
  let bulkSorted = [];
137
162
  let prollyRootNode = null;
138
163
  const events = new EventFetcher(blocks);
@@ -144,26 +169,39 @@ const doProllyBulk = async (inBlocks, head, event, doFull = false) => {
144
169
  // Otherwise, we find the common ancestor and update the root and other blocks
145
170
  // todo this is returning more events than necessary, lets define the desired semantics from the top down
146
171
  // good semantics mean we can cache the results of this call
172
+ // const {cids, events : bulkSorted } = await findEventsToSync(blocks, head)
147
173
  const { ancestor, sorted } = await findCommonAncestorWithSortedEvents(events, head, doFull);
148
174
  bulkSorted = sorted;
149
- // console.log('sorted', JSON.stringify(sorted.map(({ value: { data: { key, value } } }) => ({ key, value }))))
150
- prollyRootNode = await prollyRootFromAncestor(events, ancestor, getBlock);
175
+ // console.log('sorted', !!ancestor, JSON.stringify(sorted.map(({ value: { data: { key, value } } }) => ({ key, value }))))
176
+ if (ancestor) {
177
+ prollyRootNode = await prollyRootFromAncestor(events, ancestor, getBlock);
178
+ // if (!prollyRootNode) {
179
+ // prollyRootNode = await bigMerge(events, head, getBlock)
180
+ // // throw new Error('no common ancestor')
181
+ // }
182
+ }
151
183
  // console.log('event', event)
152
184
  }
153
185
  }
154
186
  const bulkOperations = bulkFromEvents(bulkSorted, event);
155
187
  // if prolly root node is null, we need to create a new one
156
188
  if (!prollyRootNode) {
189
+ // console.log('make new root', bulkOperations.length)
157
190
  let root;
191
+ // let rootNode
158
192
  const newBlocks = [];
159
193
  // if all operations are deletes, we can just return an empty root
160
194
  if (bulkOperations.every(op => op.del)) {
161
195
  return { root: null, blocks: [], clockCIDs: await events.all() };
162
196
  }
163
197
  for await (const node of create({ get: getBlock, list: bulkOperations, ...blockOpts })) {
164
- root = await node.block;
165
- newBlocks.push(root);
198
+ // root = await node.block
199
+ root = node;
200
+ newBlocks.push(await node.block);
166
201
  }
202
+ // throw new Error('not root time')
203
+ // root.isThisOne = 'yes'
204
+ // console.log('made new root', root.constructor.name, root.block.cid.toString())
167
205
  return { root, blocks: newBlocks, clockCIDs: await events.all() };
168
206
  }
169
207
  else {
@@ -175,7 +213,7 @@ const doProllyBulk = async (inBlocks, head, event, doFull = false) => {
175
213
  /**
176
214
  * Put a value (a CID) for the given key. If the key exists it's value is overwritten.
177
215
  *
178
- * @param {import('./blockstore').Blockstore} inBlocks Bucket block storage.
216
+ * @param {import('./blockstore.js').Blockstore} inBlocks Bucket block storage.
179
217
  * @param {import('./clock').EventLink<import('./clock').EventData>[]} head Merkle clock head.
180
218
  * @param {{key: string, value: import('./clock').EventLink<import('./clock').EventData>}} event The key of the value to put.
181
219
  * @param {object} [options]
@@ -183,6 +221,7 @@ const doProllyBulk = async (inBlocks, head, event, doFull = false) => {
183
221
  */
184
222
  export async function put(inBlocks, head, event, options) {
185
223
  const { bigPut } = makeGetAndPutBlock(inBlocks);
224
+ // console.log('major put')
186
225
  // If the head is empty, we create a new event and return the root and addition blocks
187
226
  if (!head.length) {
188
227
  const additions = new Map();
@@ -214,7 +253,7 @@ export async function put(inBlocks, head, event, options) {
214
253
  return createAndSaveNewEvent({
215
254
  inBlocks,
216
255
  bigPut,
217
- root: prollyRootBlock,
256
+ root: newProllyRootNode,
218
257
  event,
219
258
  head,
220
259
  additions: Array.from(additions.values()) /*, todo? Array.from(removals.values()) */
@@ -232,15 +271,20 @@ export async function root(inBlocks, head, doFull = false) {
232
271
  throw new Error('no head');
233
272
  }
234
273
  // console.log('root', head.map(h => h.toString()))
235
- const { root: newProllyRootNode, blocks: newBlocks, clockCIDs } = await doProllyBulk(inBlocks, head, null, doFull);
236
274
  // todo maybe these should go to a temp blockstore?
237
- await doTransaction('root', inBlocks, async (transactionBlocks) => {
275
+ return await doTransaction('root', inBlocks, async (transactionBlocks) => {
238
276
  const { bigPut } = makeGetAndPutBlock(transactionBlocks);
277
+ const { root: newProllyRootNode, blocks: newBlocks, clockCIDs } = await doProllyBulk(inBlocks, head, null, doFull);
278
+ //
279
+ // const rootBlock = await newProllyRootNode.block
280
+ // bigPut(rootBlock)
239
281
  for (const nb of newBlocks) {
240
282
  bigPut(nb);
241
283
  }
284
+ // console.log('root root', newProllyRootNode.constructor.name, newProllyRootNode)
285
+ return { clockCIDs, node: newProllyRootNode };
242
286
  }, false);
243
- return { clockCIDs, node: newProllyRootNode };
287
+ // return { clockCIDs, node: newProllyRootNode }
244
288
  }
245
289
  /**
246
290
  * Get the list of events not known by the `since` event
@@ -255,7 +299,8 @@ export async function eventsSince(blocks, head, since) {
255
299
  return { clockCIDs: [], result: [] };
256
300
  }
257
301
  // @ts-ignore
258
- const sinceHead = [...since, ...head]; // ?
302
+ const sinceHead = [...since, ...head].map(h => h.toString()); // ?
303
+ // console.log('eventsSince', sinceHead.map(h => h.toString()))
259
304
  const { cids, events: unknownSorted3 } = await findEventsToSync(blocks, sinceHead);
260
305
  return { clockCIDs: cids, result: unknownSorted3.map(({ value: { data } }) => data) };
261
306
  }
@@ -292,10 +337,14 @@ async function rootOrCache(blocks, head, rootCache, doFull = false) {
292
337
  // console.log('finding root')
293
338
  // const callTag = Math.random().toString(36).substring(7)
294
339
  // console.time(callTag + '.root')
340
+ //
341
+ // const prevClock = [...this.clock]
295
342
  ;
296
343
  ({ node, clockCIDs } = await root(blocks, head, doFull));
344
+ // this.applyClock(prevClock, result.head)
345
+ // await this.notifyListeners([decodedEvent])
297
346
  // console.timeEnd(callTag + '.root')
298
- // console.log('found root')
347
+ // console.log('found root', node.entryList)
299
348
  }
300
349
  return { node, clockCIDs };
301
350
  }
package/dist/src/sha1.js CHANGED
@@ -1,4 +1,5 @@
1
- // from https://github.com/duzun/sync-sha1/blob/master/rawSha1
1
+ // @ts-nocheck
2
+ // from https://github.com/duzun/sync-sha1/blob/master/rawSha1.js
2
3
  // MIT License Copyright (c) 2020 Dumitru Uzun
3
4
  // Permission is hereby granted, free of charge, to any person obtaining a copy
4
5
  // of this software and associated documentation files (the "Software"), to deal
package/dist/src/sync.js CHANGED
@@ -7,18 +7,13 @@ import { CarReader } from '@ipld/car';
7
7
  * @typedef {import('./database.js').Database} Database
8
8
  */
9
9
  export class Sync {
10
- PeerClass;
11
- database;
12
- pushBacklog;
13
- pushBacklogResolve;
14
- pushBacklogReject;
15
- peer;
16
10
  /**
17
11
  * @param {Database} database
18
12
  * @param {typeof SimplePeer} [PeerClass]
19
13
  * @memberof Sync
20
14
  * @static
21
15
  */
16
+ status = 'new';
22
17
  constructor(database, PeerClass = SimplePeer) {
23
18
  this.database = database;
24
19
  this.database.blocks.syncs.add(this); // should this happen during setup?
@@ -27,22 +22,30 @@ export class Sync {
27
22
  this.pushBacklogResolve = resolve;
28
23
  this.pushBacklogReject = reject;
29
24
  });
25
+ this.isReady = false;
30
26
  // this.pushBacklog.then(() => {
31
27
  // // console.log('sync backlog resolved')
32
28
  // this.database.notifyReset()
33
29
  // })
30
+ // this.connected = new Promise((resolve, reject) => {
31
+ // this.readyResolve = resolve
32
+ // this.readyReject = reject
33
+ // })
34
34
  }
35
35
  async offer() {
36
+ this.status = 'offering';
36
37
  return this.setupPeer(true);
37
38
  }
38
39
  async accept(base64offer) {
39
40
  const offer = JSON.parse(atob(base64offer));
40
41
  const p = this.setupPeer(false);
41
42
  this.peer.signal(offer);
43
+ this.status = 'accepting';
42
44
  return p;
43
45
  }
44
46
  connect(base64accept) {
45
47
  const accept = JSON.parse(atob(base64accept));
48
+ this.status = 'connecting';
46
49
  this.peer.signal(accept);
47
50
  }
48
51
  async setupPeer(initiator = false) {
@@ -71,6 +74,8 @@ export class Sync {
71
74
  // console.log('not a car', data.toString())
72
75
  }
73
76
  if (reader) {
77
+ // console.log('got car')
78
+ this.status = 'parking car';
74
79
  const blz = new Set();
75
80
  for await (const block of reader.blocks()) {
76
81
  blz.add(block);
@@ -82,7 +87,7 @@ export class Sync {
82
87
  // this.database.clock.map(c => c.toString())
83
88
  // )
84
89
  // console.log(
85
- // 'got blocks',
90
+ // 'got blocks!',
86
91
  // [...blz].map(({ cid }) => cid.toString())
87
92
  // )
88
93
  // @ts-ignore
@@ -114,35 +119,48 @@ export class Sync {
114
119
  const message = JSON.parse(data.toString());
115
120
  // console.log('got message', message)
116
121
  if (message.ok) {
122
+ this.status = 'ok';
117
123
  this.pushBacklogResolve({ ok: true });
118
124
  }
119
125
  else if (message.clock) {
120
126
  const reqCidDiff = message;
121
127
  // this might be a CID diff
122
- console.log('got diff', reqCidDiff);
128
+ // console.log('got diff', reqCidDiff)
123
129
  const carBlock = await Sync.makeCar(this.database, null, reqCidDiff.cids);
124
130
  if (!carBlock) {
125
131
  // we are full synced
126
132
  // console.log('we are full synced')
133
+ this.status = 'full synced';
127
134
  this.peer.send(JSON.stringify({ ok: true }));
128
135
  // this.pushBacklogResolve({ ok: true })
129
136
  }
130
137
  else {
131
- // console.log('do send', carBlock.bytes.length)
138
+ // console.log('do send diff', carBlock.bytes.length)
139
+ this.status = 'sending diff car';
132
140
  this.peer.send(carBlock.bytes);
141
+ // console.log('sent diff car')
133
142
  // this.pushBacklogResolve({ ok: true })
134
143
  }
135
144
  }
136
145
  }
137
146
  }
147
+ destroy() {
148
+ this.database.blocks.syncs.delete(this);
149
+ this.status = 'destroyed';
150
+ this.peer.destroy();
151
+ }
138
152
  async sendUpdate(blockstore) {
153
+ if (!this.peer || !this.isReady)
154
+ return;
139
155
  // console.log('send update from', this.database.instanceId)
140
156
  // todo should send updates since last sync
141
157
  const newCar = await blocksToCarBlock(blockstore.lastCid, blockstore);
158
+ this.status = 'sending update car';
142
159
  this.peer.send(newCar.bytes);
143
160
  }
144
161
  async startSync() {
145
162
  // console.log('start sync', this.peer.initiator)
163
+ this.isReady = true;
146
164
  const allCIDs = await this.database.allStoredCIDs();
147
165
  // console.log('allCIDs', allCIDs)
148
166
  const reqCidDiff = {
@@ -150,6 +168,7 @@ export class Sync {
150
168
  cids: allCIDs.map(cid => cid.toString())
151
169
  };
152
170
  // console.log('send diff', reqCidDiff)
171
+ this.status = 'sending cid diff';
153
172
  this.peer.send(JSON.stringify(reqCidDiff));
154
173
  }
155
174
  // get all the cids
package/dist/src/valet.js CHANGED
@@ -35,7 +35,11 @@ export class Valet {
35
35
  this.name = name;
36
36
  this.setKeyMaterial(keyMaterial);
37
37
  this.uploadQueue = cargoQueue(async (tasks, callback) => {
38
- console.log('queue worker', tasks.length, tasks.reduce((acc, t) => acc + t.value.length, 0));
38
+ // console.log(
39
+ // 'queue worker',
40
+ // tasks.length,
41
+ // tasks.reduce((acc, t) => acc + t.value.length, 0)
42
+ // )
39
43
  if (this.uploadFunction) {
40
44
  // todo we can coalesce these into a single car file
41
45
  return await this.withDB(async (db) => {
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@fireproof/core",
3
- "version": "0.5.12",
3
+ "version": "0.5.14",
4
4
  "description": "Cloudless database for apps, the browser, and IPFS",
5
5
  "main": "dist/src/fireproof.js",
6
6
  "module": "dist/src/fireproof.mjs",
@@ -19,7 +19,7 @@
19
19
  "lint": "standard",
20
20
  "lint:fix": "standard --fix",
21
21
  "tsc": "tsc --watch",
22
- "build": "rollup -c"
22
+ "build": "tsc && rollup -c"
23
23
  },
24
24
  "keywords": [
25
25
  "database",
package/src/blockstore.js CHANGED
@@ -163,6 +163,7 @@ export class TransactionBlockstore {
163
163
  await this.doCommit(innerBlockstore)
164
164
  if (doSync) {
165
165
  // const all =
166
+ // console.log('syncing', innerBlockstore.label)
166
167
  await Promise.all([...this.syncs].map(async sync => sync.sendUpdate(innerBlockstore).catch(e => {
167
168
  console.error('sync error, cancelling', e)
168
169
  sync.destroy()