@fireproof/core 0.19.8-dev-global → 0.19.9-dev-frag

Sign up to get free protection for your applications and to get access to all the features.
Files changed (58) hide show
  1. package/README.md +7 -0
  2. package/chunk-7EWIAXTM.js +7 -0
  3. package/chunk-7EWIAXTM.js.map +1 -0
  4. package/chunk-JO5AVWG7.js +67 -0
  5. package/chunk-JO5AVWG7.js.map +1 -0
  6. package/chunk-PB4BKL4O.js +7 -0
  7. package/chunk-PB4BKL4O.js.map +1 -0
  8. package/chunk-YS4GL6OK.js +266 -0
  9. package/chunk-YS4GL6OK.js.map +1 -0
  10. package/{store-indexdb-WLRSICCB.js → gateway-IZRHJWPE.js} +48 -80
  11. package/gateway-IZRHJWPE.js.map +1 -0
  12. package/gateway-YSNUK2L3.js +145 -0
  13. package/gateway-YSNUK2L3.js.map +1 -0
  14. package/index.cjs +2132 -1783
  15. package/index.cjs.map +1 -1
  16. package/index.d.cts +613 -513
  17. package/index.d.ts +613 -513
  18. package/index.global.js +19366 -20107
  19. package/index.global.js.map +1 -1
  20. package/index.js +1512 -1022
  21. package/index.js.map +1 -1
  22. package/key-bag-file-NMEBFSPM.js +54 -0
  23. package/key-bag-file-NMEBFSPM.js.map +1 -0
  24. package/key-bag-indexdb-X5V6GNBZ.js +50 -0
  25. package/key-bag-indexdb-X5V6GNBZ.js.map +1 -0
  26. package/mem-filesystem-B6C6QOIP.js +41 -0
  27. package/mem-filesystem-B6C6QOIP.js.map +1 -0
  28. package/metafile-cjs.json +1 -1
  29. package/metafile-esm.json +1 -1
  30. package/metafile-iife.json +1 -1
  31. package/node-filesystem-5JLBSHKQ.js +41 -0
  32. package/node-filesystem-5JLBSHKQ.js.map +1 -0
  33. package/package.json +8 -7
  34. package/tests/blockstore/fragment-gateway.test.ts +107 -0
  35. package/tests/blockstore/keyed-crypto.test.ts +302 -0
  36. package/tests/blockstore/loader.test.ts +24 -19
  37. package/tests/blockstore/store.test.ts +34 -28
  38. package/tests/blockstore/transaction.test.ts +19 -15
  39. package/tests/fireproof/config.test.ts +94 -78
  40. package/tests/fireproof/crdt.test.ts +34 -28
  41. package/tests/fireproof/database.test.ts +22 -14
  42. package/tests/fireproof/fireproof.test.fixture.ts +133 -0
  43. package/tests/fireproof/fireproof.test.ts +331 -219
  44. package/tests/fireproof/hello.test.ts +6 -4
  45. package/tests/fireproof/indexer.test.ts +34 -27
  46. package/tests/fireproof/utils.test.ts +65 -0
  47. package/tests/helpers.ts +25 -57
  48. package/utils-IZPK4QS7.js +14 -0
  49. package/utils-IZPK4QS7.js.map +1 -0
  50. package/chunk-BNL4PVBF.js +0 -314
  51. package/chunk-BNL4PVBF.js.map +0 -1
  52. package/chunk-JW2QT6BF.js +0 -184
  53. package/chunk-JW2QT6BF.js.map +0 -1
  54. package/node-sys-container-MIEX6ELJ.js +0 -29
  55. package/node-sys-container-MIEX6ELJ.js.map +0 -1
  56. package/store-file-VJ6BI4II.js +0 -191
  57. package/store-file-VJ6BI4II.js.map +0 -1
  58. package/store-indexdb-WLRSICCB.js.map +0 -1
@@ -1,8 +1,8 @@
1
- import { sleep, itSkip } from "../helpers.js";
2
-
1
+ import { mockSuperThis, sleep, storageURL } from "../helpers.js";
2
+ import { docs } from "./fireproof.test.fixture.js";
3
3
  import { CID } from "multiformats/cid";
4
4
 
5
- import { bs, rt, fireproof, Database, index, DocResponse, IndexRows, DocWithId, Index, MapFn } from "@fireproof/core";
5
+ import { Database, DocResponse, DocWithId, Index, IndexRows, MapFn, bs, fireproof, index } from "@fireproof/core";
6
6
 
7
7
  export function carLogIncludesGroup(list: bs.AnyLink[], cid: CID) {
8
8
  return list.some((c) => c.equals(cid));
@@ -25,12 +25,13 @@ describe("dreamcode", function () {
25
25
  let doc: DocWithId<Doc>;
26
26
  let result: IndexRows<string, Doc>;
27
27
  let db: Database;
28
+ const sthis = mockSuperThis();
28
29
  afterEach(async function () {
29
30
  await db.close();
30
31
  await db.destroy();
31
32
  });
32
33
  beforeEach(async function () {
33
- await rt.SysContainer.start();
34
+ await sthis.start();
34
35
  db = fireproof("test-db");
35
36
  ok = await db.put({ _id: "test-1", text: "fireproof", dream: true });
36
37
  doc = await db.get(ok.id);
@@ -66,14 +67,15 @@ describe("public API", function () {
66
67
  let ok: DocResponse;
67
68
  let doc: DocWithId<Doc>;
68
69
  let query: IndexRows<string, Doc>;
70
+ const sthis = mockSuperThis();
69
71
 
70
- afterEach(async function () {
72
+ afterEach(async () => {
71
73
  await db.close();
72
74
  await db.destroy();
73
75
  });
74
76
 
75
77
  beforeEach(async function () {
76
- await rt.SysContainer.start();
78
+ await sthis.start();
77
79
  db = fireproof("test-api");
78
80
  // index = index(db, 'test-index', (doc) => doc.foo)
79
81
  ok = await db.put({ _id: "test", foo: "bar" });
@@ -104,12 +106,13 @@ describe("basic database", function () {
104
106
  foo: string;
105
107
  }
106
108
  let db: Database<Doc>;
109
+ const sthis = mockSuperThis();
107
110
  afterEach(async function () {
108
111
  await db.close();
109
112
  await db.destroy();
110
113
  });
111
114
  beforeEach(async function () {
112
- await rt.SysContainer.start();
115
+ await sthis.start();
113
116
  db = new Database("test-basic");
114
117
  });
115
118
  it("can put with id", async function () {
@@ -126,7 +129,7 @@ describe("basic database", function () {
126
129
  it("can define an index", async function () {
127
130
  const ok = await db.put({ _id: "test", foo: "bar" });
128
131
  expect(ok).toBeTruthy();
129
- const idx = index<string, { foo: string }>(db, "test-index", (doc) => doc.foo);
132
+ const idx = index<string, { foo: string }>(sthis, db, "test-index", (doc) => doc.foo);
130
133
  const result = await idx.query();
131
134
  expect(result).toBeTruthy();
132
135
  expect(result.rows).toBeTruthy();
@@ -136,87 +139,99 @@ describe("basic database", function () {
136
139
  it("can define an index with a default function", async function () {
137
140
  const ok = await db.put({ _id: "test", foo: "bar" });
138
141
  expect(ok).toBeTruthy();
139
- const idx = index(db, "foo");
142
+ const idx = index(sthis, db, "foo");
140
143
  const result = await idx.query();
141
144
  expect(result).toBeTruthy();
142
145
  expect(result.rows).toBeTruthy();
143
146
  expect(result.rows.length).toBe(1);
144
147
  expect(result.rows[0].key).toBe("bar");
145
148
  });
149
+ it("should query with multiple successive functions", async function () {
150
+ interface TestDoc {
151
+ _id: string;
152
+ foo: string;
153
+ baz: string;
154
+ }
155
+ await db.put<TestDoc>({ _id: "test", foo: "bar", baz: "qux" });
156
+ const query1 = await db.query<string, TestDoc>((doc) => {
157
+ return doc.foo;
158
+ });
159
+ const query2 = await db.query<string, TestDoc>((doc) => {
160
+ return doc.baz;
161
+ });
162
+ expect(query1).toBeTruthy();
163
+ expect(query1.rows).toBeTruthy();
164
+ expect(query1.rows.length).toBe(1);
165
+ expect(query2).toBeTruthy();
166
+ expect(query2.rows).toBeTruthy();
167
+ expect(query2.rows.length).toBe(1);
168
+ });
146
169
  });
147
170
 
148
171
  describe("benchmarking with compaction", function () {
149
172
  let db: Database;
173
+ const sthis = mockSuperThis();
150
174
  afterEach(async function () {
151
175
  await db.close();
152
176
  await db.destroy();
153
177
  });
154
178
  beforeEach(async function () {
155
179
  // erase the existing test data
156
- await rt.SysContainer.start();
157
- db = new Database("test-benchmark-compaction", { autoCompact: 3, public: true });
180
+ await sthis.start();
181
+ db = new Database("test-benchmark-compaction", { autoCompact: 3 });
158
182
  });
159
- itSkip(
160
- "passing: insert during compaction",
161
- async function () {
162
- const ok = await db.put({ _id: "test", foo: "fast" });
163
- expect(ok).toBeTruthy();
164
- expect(ok.id).toBe("test");
165
- expect(db._crdt.clock.head).toBeTruthy();
166
- expect(db._crdt.clock.head.length).toBe(1);
167
-
168
- const numDocs = 20000;
169
- const batchSize = 500;
170
- console.time(`insert and read ${numDocs} records`);
171
-
172
- const doing = null;
173
- for (let i = 0; i < numDocs; i += batchSize) {
174
- const ops: Promise<DocResponse>[] = [];
175
- db.put({ foo: "fast" });
176
- // await doing
177
- // doing = db.compact()
178
- db.put({ foo: "fast" });
179
- for (let j = 0; j < batchSize && i + j < numDocs; j++) {
180
- ops.push(
181
- db.put({
182
- data: Math.random(),
183
- fire: Math.random()
184
- .toString()
185
- .repeat(25 * 1024),
186
- }),
187
- );
188
- }
189
- const blocks = db._crdt.blockstore as bs.EncryptedBlockstore;
190
- const loader = blocks.loader;
191
- expect(loader).toBeTruthy();
192
- const label = `write ${i} log ${loader.carLog.length}`;
193
- console.time(label);
194
- db.put({
195
- data: Math.random(),
196
- fire: Math.random()
197
- .toString()
198
- .repeat(25 * 1024),
199
- });
183
+ it("insert during compaction", async function () {
184
+ const ok = await db.put({ _id: "test", foo: "fast" });
185
+ expect(ok).toBeTruthy();
186
+ expect(ok.id).toBe("test");
187
+ expect(db._crdt.clock.head).toBeTruthy();
188
+ expect(db._crdt.clock.head.length).toBe(1);
200
189
 
201
- await Promise.all(ops);
202
- console.timeEnd(label);
190
+ const numDocs = 20;
191
+ const batchSize = 5;
192
+
193
+ const doing = null;
194
+ for (let i = 0; i < numDocs; i += batchSize) {
195
+ // console.log("batch", i, db.blockstore.loader?.carLog.length);
196
+ const ops: Promise<DocResponse>[] = [];
197
+ db.put({ foo: "fast" });
198
+ // await doing
199
+ // doing = db.compact()
200
+ db.put({ foo: "fast" });
201
+ for (let j = 0; j < batchSize && i + j < numDocs; j++) {
202
+ ops.push(
203
+ db.put({
204
+ data: Math.random(),
205
+ fire: Math.random().toString().repeat(25),
206
+ }),
207
+ );
203
208
  }
204
- await doing;
205
- console.timeEnd(`insert and read ${numDocs} records`);
206
- },
207
- 20000000,
208
- );
209
+ const blocks = db._crdt.blockstore as bs.EncryptedBlockstore;
210
+ const loader = blocks.loader;
211
+ expect(loader).toBeTruthy();
212
+
213
+ db.put({
214
+ data: Math.random(),
215
+ fire: Math.random().toString().repeat(25),
216
+ });
217
+
218
+ await Promise.all(ops);
219
+ // console.log("batch done", i, db.blockstore.loader?.carLog.length);
220
+ }
221
+ await doing;
222
+ });
209
223
  });
210
224
 
211
225
  describe("benchmarking a database", function () {
212
226
  /** @type {Database} */
213
227
  let db: Database;
228
+ const sthis = mockSuperThis();
214
229
  afterEach(async function () {
215
230
  await db.close();
216
231
  await db.destroy();
217
232
  });
218
233
  beforeEach(async function () {
219
- await rt.SysContainer.start();
234
+ await sthis.start();
220
235
  // erase the existing test data
221
236
  db = new Database("test-benchmark", { autoCompact: 100000, public: true });
222
237
  // db = new Database(null, {autoCompact: 100000})
@@ -227,144 +242,140 @@ describe("benchmarking a database", function () {
227
242
  // run:
228
243
  // npm test -- --grep 'insert and read many records'
229
244
  //
230
- itSkip(
231
- "passing: insert and read many records",
232
- async () => {
233
- const ok = await db.put({ _id: "test", foo: "fast" });
234
- expect(ok).toBeTruthy();
235
- expect(ok.id).toBe("test");
236
-
237
- expect(db._crdt.clock.head).toBeTruthy();
238
- expect(db._crdt.clock.head.length).toBe(1);
239
-
240
- const numDocs = 2500;
241
- const batchSize = 500;
242
- console.time(`insert and read ${numDocs} records`);
243
-
244
- for (let i = 0; i < numDocs; i += batchSize) {
245
- const ops: Promise<DocResponse>[] = [];
246
- for (let j = 0; j < batchSize && i + j < numDocs; j++) {
247
- ops.push(
248
- db
249
- .put({
250
- _id: `test${i + j}`,
251
- fire: Math.random()
252
- .toString()
253
- .repeat(25 * 1024),
254
- })
255
- .then((ok) => {
256
- db.get<{ fire: string }>(`test${i + j}`).then((doc) => {
257
- expect(doc.fire).toBeTruthy();
258
- });
259
- return ok;
260
- }),
261
- );
262
- }
263
- await Promise.all(ops);
245
+ it.skip("passing: insert and read many records", async () => {
246
+ const ok = await db.put({ _id: "test", foo: "fast" });
247
+ expect(ok).toBeTruthy();
248
+ expect(ok.id).toBe("test");
249
+
250
+ expect(db._crdt.clock.head).toBeTruthy();
251
+ expect(db._crdt.clock.head.length).toBe(1);
252
+
253
+ const numDocs = 2500;
254
+ const batchSize = 500;
255
+ // console.time(`insert and read ${numDocs} records`);
256
+
257
+ for (let i = 0; i < numDocs; i += batchSize) {
258
+ const ops: Promise<DocResponse>[] = [];
259
+ for (let j = 0; j < batchSize && i + j < numDocs; j++) {
260
+ ops.push(
261
+ db
262
+ .put({
263
+ _id: `test${i + j}`,
264
+ fire: Math.random()
265
+ .toString()
266
+ .repeat(25 * 1024),
267
+ })
268
+ .then((ok) => {
269
+ db.get<{ fire: string }>(`test${i + j}`).then((doc) => {
270
+ expect(doc.fire).toBeTruthy();
271
+ });
272
+ return ok;
273
+ }),
274
+ );
264
275
  }
276
+ await Promise.all(ops);
277
+ }
265
278
 
266
- console.timeEnd(`insert and read ${numDocs} records`);
279
+ // console.timeEnd(`insert and read ${numDocs} records`);
267
280
 
268
- // console.time('allDocs')
269
- // const allDocsResult2 = await db.allDocs()
270
- // console.timeEnd('allDocs')
271
- // equals(allDocsResult2.rows.length, numDocs+1)
281
+ // console.time('allDocs')
282
+ // const allDocsResult2 = await db.allDocs()
283
+ // console.timeEnd('allDocs')
284
+ // equals(allDocsResult2.rows.length, numDocs+1)
272
285
 
273
- console.time("open new DB");
274
- const newDb = new Database("test-benchmark", { autoCompact: 100000, public: true });
275
- const doc = await newDb.get<{ foo: string }>("test");
276
- expect(doc.foo).toBe("fast");
277
- console.timeEnd("open new DB");
286
+ // console.time("open new DB");
287
+ const newDb = new Database("test-benchmark", { autoCompact: 100000, public: true });
288
+ const doc = await newDb.get<{ foo: string }>("test");
289
+ expect(doc.foo).toBe("fast");
290
+ // console.timeEnd("open new DB");
278
291
 
279
- console.time("changes");
280
- const result = await db.changes(); // takes 1.5 seconds (doesn't have to load blocks from cars)
281
- console.timeEnd("changes");
282
- expect(result.rows.length).toBe(numDocs + 1);
292
+ // console.time("changes");
293
+ const result = await db.changes(); // takes 1.5 seconds (doesn't have to load blocks from cars)
294
+ // console.timeEnd("changes");
295
+ expect(result.rows.length).toBe(numDocs + 1);
283
296
 
284
- // this takes 1 minute w 1000 docs
285
- console.time("changes new DB");
286
- const result2 = await newDb.changes();
287
- console.timeEnd("changes new DB");
288
- expect(result2.rows.length).toBe(numDocs + 1);
297
+ // this takes 1 minute w 1000 docs
298
+ // console.time("changes new DB");
299
+ const result2 = await newDb.changes();
300
+ // console.timeEnd("changes new DB");
301
+ expect(result2.rows.length).toBe(numDocs + 1);
289
302
 
290
- await sleep(1000);
303
+ await sleep(1000);
291
304
 
292
- console.log("begin compact");
305
+ // console.log("begin compact");
293
306
 
294
- await sleep(100);
307
+ await sleep(100);
295
308
 
296
- console.time("COMPACT");
297
- await db.compact();
298
- console.timeEnd("COMPACT");
309
+ // console.time("COMPACT");
310
+ await db.compact();
311
+ // console.timeEnd("COMPACT");
299
312
 
300
- // todo compaction should not need this write to show in the new db
301
- await db.put({ _id: "compacted-test", foo: "bar" });
313
+ // todo compaction should not need this write to show in the new db
314
+ await db.put({ _id: "compacted-test", foo: "bar" });
302
315
 
303
- // console.log('car log length', db._crdt.blockstore.loader.carLog.length)
304
- const blocks = db._crdt.blockstore as bs.EncryptedBlockstore;
305
- const loader = blocks.loader;
306
- expect(loader).toBeTruthy();
307
- expect(loader.carLog.length).toBe(2);
308
-
309
- // console.time('allDocs new DB') // takes forever on 5k
310
- // const allDocsResult = await newDb.allDocs()
311
- // console.timeEnd('allDocs new DB')
312
- // equals(allDocsResult.rows.length, numDocs+1)
313
- await sleep(100);
314
-
315
- console.time("compacted reopen again");
316
- const newDb2 = new Database("test-benchmark", { autoCompact: 100000, public: true });
317
- const doc21 = await newDb2.get<FooType>("test");
318
- expect(doc21.foo).toBe("fast");
319
- const blocks2 = newDb2._crdt.blockstore as bs.EncryptedBlockstore;
320
- const loader2 = blocks2.loader;
321
- expect(loader2).toBeTruthy();
322
-
323
- expect(loader2.carLog.length).toBe(2);
324
-
325
- const doc2 = await newDb2.get<FooType>("compacted-test");
326
-
327
- expect(doc2.foo).toBe("bar");
328
-
329
- expect(doc2.foo).toBe("bar");
330
- console.timeEnd("compacted reopen again");
331
-
332
- await sleep(100);
333
-
334
- console.time("compacted changes new DB2");
335
- const result3 = await newDb2.changes();
336
- console.timeEnd("compacted changes new DB2");
337
- expect(result3.rows.length).toBe(numDocs + 2);
338
-
339
- console.time("compacted newDb2 insert and read 100 records");
340
- const ops2: Promise<void>[] = [];
341
- for (let i = 0; i < 100; i++) {
342
- const ok = newDb2
343
- .put({
344
- _id: `test${i}`,
345
- fire: Math.random()
346
- .toString()
347
- .repeat(25 * 1024),
348
- })
349
- .then(() => {
350
- newDb2.get<{ fire: number }>(`test${i}`).then((doc) => {
351
- expect(doc.fire).toBeTruthy();
352
- });
316
+ // console.log('car log length', db._crdt.blockstore.loader.carLog.length)
317
+ const blocks = db._crdt.blockstore as bs.EncryptedBlockstore;
318
+ const loader = blocks.loader;
319
+ expect(loader).toBeTruthy();
320
+ expect(loader.carLog.length).toBe(2);
321
+
322
+ // console.time('allDocs new DB') // takes forever on 5k
323
+ // const allDocsResult = await newDb.allDocs()
324
+ // console.timeEnd('allDocs new DB')
325
+ // equals(allDocsResult.rows.length, numDocs+1)
326
+ await sleep(100);
327
+
328
+ // console.time("compacted reopen again");
329
+ const newDb2 = new Database("test-benchmark", { autoCompact: 100000, public: true });
330
+ const doc21 = await newDb2.get<FooType>("test");
331
+ expect(doc21.foo).toBe("fast");
332
+ const blocks2 = newDb2._crdt.blockstore as bs.EncryptedBlockstore;
333
+ const loader2 = blocks2.loader;
334
+ expect(loader2).toBeTruthy();
335
+
336
+ expect(loader2.carLog.length).toBe(2);
337
+
338
+ const doc2 = await newDb2.get<FooType>("compacted-test");
339
+
340
+ expect(doc2.foo).toBe("bar");
341
+
342
+ expect(doc2.foo).toBe("bar");
343
+ // console.timeEnd("compacted reopen again");
344
+
345
+ await sleep(100);
346
+
347
+ // console.time("compacted changes new DB2");
348
+ const result3 = await newDb2.changes();
349
+ // console.timeEnd("compacted changes new DB2");
350
+ expect(result3.rows.length).toBe(numDocs + 2);
351
+
352
+ // console.time("compacted newDb2 insert and read 100 records");
353
+ const ops2: Promise<void>[] = [];
354
+ for (let i = 0; i < 100; i++) {
355
+ const ok = newDb2
356
+ .put({
357
+ _id: `test${i}`,
358
+ fire: Math.random()
359
+ .toString()
360
+ .repeat(25 * 1024),
361
+ })
362
+ .then(() => {
363
+ newDb2.get<{ fire: number }>(`test${i}`).then((doc) => {
364
+ expect(doc.fire).toBeTruthy();
353
365
  });
354
- ops2.push(ok);
355
- }
356
- await Promise.all(ops2);
357
- console.timeEnd("compacted newDb2 insert and read 100 records");
358
-
359
- // triggers OOM on my machine
360
- // await sleep(100)
361
- // console.time('compacted allDocs new DB2')
362
- // const allDocsResult3 = await newDb2.allDocs()
363
- // console.timeEnd('compacted allDocs new DB2')
364
- // equals(allDocsResult3.rows.length, numDocs+2)
365
- },
366
- 20000000,
367
- );
366
+ });
367
+ ops2.push(ok);
368
+ }
369
+ await Promise.all(ops2);
370
+ // console.timeEnd("compacted newDb2 insert and read 100 records");
371
+
372
+ // triggers OOM on my machine
373
+ // await sleep(100)
374
+ // console.time('compacted allDocs new DB2')
375
+ // const allDocsResult3 = await newDb2.allDocs()
376
+ // console.timeEnd('compacted allDocs new DB2')
377
+ // equals(allDocsResult3.rows.length, numDocs+2)
378
+ }, 20000000);
368
379
  });
369
380
 
370
381
  describe("Reopening a database", function () {
@@ -372,13 +383,14 @@ describe("Reopening a database", function () {
372
383
  foo: string;
373
384
  }
374
385
  let db: Database;
386
+ const sthis = mockSuperThis();
375
387
  afterEach(async function () {
376
388
  await db.close();
377
389
  await db.destroy();
378
390
  });
379
391
  beforeEach(async function () {
380
392
  // erase the existing test data
381
- await rt.SysContainer.start();
393
+ await sthis.start();
382
394
 
383
395
  db = new Database("test-reopen", { autoCompact: 100000 });
384
396
  const ok = await db.put({ _id: "test", foo: "bar" });
@@ -442,34 +454,30 @@ describe("Reopening a database", function () {
442
454
  }
443
455
  }, 20000);
444
456
 
445
- itSkip(
446
- "passing slow, should have the same data on reopen after reopen and update",
447
- async function () {
448
- for (let i = 0; i < 200; i++) {
449
- // console.log("iteration", i);
450
- // console.time("db open");
451
- const db = new Database("test-reopen", { autoCompact: 1000 }); // try with 10
452
- // assert(db._crdt.ready);
453
- await db._crdt.ready();
454
- // console.timeEnd("db open");
455
- const blocks = db._crdt.blockstore as bs.EncryptedBlockstore;
456
- const loader = blocks.loader;
457
- expect(loader).toBeDefined();
458
- expect(loader.carLog.length).toBe(i + 1);
459
- // console.log('car log length', loader.carLog.length)
460
- // console.time("db put");
461
- const ok = await db.put({ _id: `test${i}`, fire: "proof".repeat(50 * 1024) });
462
- // console.timeEnd("db put");
463
- expect(ok).toBeTruthy();
464
- expect(loader.carLog.length).toBe(i + 2);
465
- // console.time("db get");
466
- const doc = await db.get<FireType>(`test${i}`);
467
- // console.timeEnd("db get");
468
- expect(doc.fire).toBe("proof".repeat(50 * 1024));
469
- }
470
- },
471
- 200000,
472
- );
457
+ it.skip("passing slow, should have the same data on reopen after reopen and update", async function () {
458
+ for (let i = 0; i < 200; i++) {
459
+ // console.log("iteration", i);
460
+ // console.time("db open");
461
+ const db = new Database("test-reopen", { autoCompact: 1000 }); // try with 10
462
+ // assert(db._crdt.ready);
463
+ await db._crdt.ready();
464
+ // console.timeEnd("db open");
465
+ const blocks = db._crdt.blockstore as bs.EncryptedBlockstore;
466
+ const loader = blocks.loader;
467
+ expect(loader).toBeDefined();
468
+ expect(loader.carLog.length).toBe(i + 1);
469
+ // console.log('car log length', loader.carLog.length)
470
+ // console.time("db put");
471
+ const ok = await db.put({ _id: `test${i}`, fire: "proof".repeat(50 * 1024) });
472
+ // console.timeEnd("db put");
473
+ expect(ok).toBeTruthy();
474
+ expect(loader.carLog.length).toBe(i + 2);
475
+ // console.time("db get");
476
+ const doc = await db.get<FireType>(`test${i}`);
477
+ // console.timeEnd("db get");
478
+ expect(doc.fire).toBe("proof".repeat(50 * 1024));
479
+ }
480
+ }, 200000);
473
481
  });
474
482
 
475
483
  describe("Reopening a database with indexes", function () {
@@ -480,12 +488,13 @@ describe("Reopening a database with indexes", function () {
480
488
  let idx: Index<string, Doc>;
481
489
  let didMap: boolean;
482
490
  let mapFn: MapFn<Doc>;
491
+ const sthis = mockSuperThis();
483
492
  afterEach(async function () {
484
493
  await db.close();
485
494
  await db.destroy();
486
495
  });
487
496
  beforeEach(async function () {
488
- await rt.SysContainer.start();
497
+ await sthis.start();
489
498
  db = fireproof("test-reopen-idx");
490
499
  const ok = await db.put({ _id: "test", foo: "bar" });
491
500
  expect(ok.id).toBe("test");
@@ -496,13 +505,13 @@ describe("Reopening a database with indexes", function () {
496
505
  didMap = true;
497
506
  return doc.foo;
498
507
  };
499
- idx = index<string, Doc>(db, "foo", mapFn);
508
+ idx = index<string, Doc>(sthis, db, "foo", mapFn);
500
509
  });
501
510
 
502
511
  it("should persist data", async function () {
503
512
  const doc = await db.get<Doc>("test");
504
513
  expect(doc.foo).toBe("bar");
505
- const idx2 = index<string, Doc>(db, "foo");
514
+ const idx2 = index<string, Doc>(sthis, db, "foo");
506
515
  expect(idx2).toBe(idx);
507
516
  const result = await idx2.query();
508
517
  expect(result).toBeTruthy();
@@ -513,7 +522,7 @@ describe("Reopening a database with indexes", function () {
513
522
  });
514
523
 
515
524
  it("should reuse the index", async function () {
516
- const idx2 = index(db, "foo", mapFn);
525
+ const idx2 = index(sthis, db, "foo", mapFn);
517
526
  expect(idx2).toBe(idx);
518
527
  const result = await idx2.query();
519
528
  expect(result).toBeTruthy();
@@ -576,8 +585,9 @@ describe("Reopening a database with indexes", function () {
576
585
  });
577
586
 
578
587
  describe("basic js verify", function () {
588
+ const sthis = mockSuperThis();
579
589
  beforeAll(async function () {
580
- await rt.SysContainer.start();
590
+ await sthis.start();
581
591
  });
582
592
  it("should include cids in arrays", async function () {
583
593
  const db = fireproof("test-verify");
@@ -600,3 +610,105 @@ describe("basic js verify", function () {
600
610
  await db.destroy();
601
611
  });
602
612
  });
613
+
614
+ describe("same workload twice, same CID", function () {
615
+ let dbA: Database;
616
+ let dbB: Database;
617
+ let headA: string;
618
+ let headB: string;
619
+
620
+ const sthis = mockSuperThis();
621
+ // let configA: any;
622
+ // let configB: any;
623
+
624
+ const configA = {
625
+ store: {
626
+ stores: {
627
+ base: storageURL(sthis).build().setParam("storekey", "@test@"),
628
+ },
629
+ },
630
+ };
631
+
632
+ const configB = {
633
+ store: {
634
+ stores: {
635
+ base: storageURL(sthis).build().setParam("storekey", "@test@"),
636
+ },
637
+ },
638
+ };
639
+
640
+ afterEach(async function () {
641
+ await dbA.close();
642
+ await dbA.destroy();
643
+ await dbB.close();
644
+ await dbB.destroy();
645
+ });
646
+ beforeEach(async function () {
647
+ let ok: DocResponse;
648
+ await sthis.start();
649
+ // todo this fails because the test setup doesn't properly configure both databases to use the same key
650
+ dbA = fireproof("test-dual-workload-a", configA);
651
+ for (const doc of docs) {
652
+ ok = await dbA.put(doc);
653
+ expect(ok).toBeTruthy();
654
+ expect(ok.id).toBeTruthy();
655
+ }
656
+ headA = dbA._crdt.clock.head.toString();
657
+
658
+ // todo this fails because the test setup doesn't properly configure both databases to use the same key
659
+ dbB = fireproof("test-dual-workload-b", configB);
660
+ for (const doc of docs) {
661
+ ok = await dbB.put(doc);
662
+ expect(ok).toBeTruthy();
663
+ expect(ok.id).toBeTruthy();
664
+ }
665
+ headB = dbB._crdt.clock.head.toString();
666
+ });
667
+ it("should have head A and B", async function () {
668
+ expect(headA).toBeTruthy();
669
+ expect(headB).toBeTruthy();
670
+ expect(headA).toEqual(headB);
671
+ expect(headA.length).toBeGreaterThan(10);
672
+ });
673
+ it("should have same car log", async function () {
674
+ const logA = dbA._crdt.blockstore.loader?.carLog;
675
+ expect(logA).toBeTruthy();
676
+ assert(logA);
677
+ expect(logA.length).toBe(38);
678
+
679
+ const logB = dbB._crdt.blockstore.loader?.carLog;
680
+ expect(logB).toBeTruthy();
681
+ assert(logB);
682
+ expect(logB.length).toBe(38);
683
+
684
+ const logA2 = logA.map((c) => c.toString());
685
+ const logB2 = logB.map((c) => c.toString());
686
+
687
+ expect(logA2.length).toBe(logB2.length);
688
+
689
+ // todo this fails because the test setup doesn't properly configure both databases to use the same key
690
+ // expect(logA2).toEqual(logB2);
691
+ });
692
+ it("should have same car log after compact", async function () {
693
+ await dbA.compact();
694
+ await dbB.compact();
695
+
696
+ const cmpLogA = dbA._crdt.blockstore.loader?.carLog;
697
+ expect(cmpLogA).toBeTruthy();
698
+ assert(cmpLogA);
699
+ expect(cmpLogA.length).toBe(1);
700
+
701
+ const cmpLogB = dbB._crdt.blockstore.loader?.carLog;
702
+ expect(cmpLogB).toBeTruthy();
703
+ assert(cmpLogB);
704
+ expect(cmpLogB.length).toBe(1);
705
+
706
+ const cmpLogA2 = cmpLogA.map((c) => c.toString());
707
+ const cmpLogB2 = cmpLogB.map((c) => c.toString());
708
+
709
+ expect(cmpLogA2.length).toBe(cmpLogB2.length);
710
+
711
+ // todo this fails because the test setup doesn't properly configure both databases to use the same key
712
+ // expect(cmpLogA2).toEqual(cmpLogB2);
713
+ });
714
+ });