@automerge/automerge-repo 2.0.0-alpha.7 → 2.0.0-beta.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (79) hide show
  1. package/README.md +8 -8
  2. package/dist/AutomergeUrl.d.ts +17 -5
  3. package/dist/AutomergeUrl.d.ts.map +1 -1
  4. package/dist/AutomergeUrl.js +71 -24
  5. package/dist/DocHandle.d.ts +68 -45
  6. package/dist/DocHandle.d.ts.map +1 -1
  7. package/dist/DocHandle.js +166 -69
  8. package/dist/FindProgress.d.ts +30 -0
  9. package/dist/FindProgress.d.ts.map +1 -0
  10. package/dist/FindProgress.js +1 -0
  11. package/dist/RemoteHeadsSubscriptions.d.ts +4 -5
  12. package/dist/RemoteHeadsSubscriptions.d.ts.map +1 -1
  13. package/dist/RemoteHeadsSubscriptions.js +4 -1
  14. package/dist/Repo.d.ts +46 -6
  15. package/dist/Repo.d.ts.map +1 -1
  16. package/dist/Repo.js +252 -67
  17. package/dist/helpers/abortable.d.ts +36 -0
  18. package/dist/helpers/abortable.d.ts.map +1 -0
  19. package/dist/helpers/abortable.js +47 -0
  20. package/dist/helpers/arraysAreEqual.d.ts.map +1 -1
  21. package/dist/helpers/bufferFromHex.d.ts +3 -0
  22. package/dist/helpers/bufferFromHex.d.ts.map +1 -0
  23. package/dist/helpers/bufferFromHex.js +13 -0
  24. package/dist/helpers/debounce.d.ts.map +1 -1
  25. package/dist/helpers/eventPromise.d.ts.map +1 -1
  26. package/dist/helpers/headsAreSame.d.ts +2 -2
  27. package/dist/helpers/headsAreSame.d.ts.map +1 -1
  28. package/dist/helpers/mergeArrays.d.ts +1 -1
  29. package/dist/helpers/mergeArrays.d.ts.map +1 -1
  30. package/dist/helpers/pause.d.ts.map +1 -1
  31. package/dist/helpers/tests/network-adapter-tests.d.ts.map +1 -1
  32. package/dist/helpers/tests/network-adapter-tests.js +13 -13
  33. package/dist/helpers/tests/storage-adapter-tests.d.ts +2 -2
  34. package/dist/helpers/tests/storage-adapter-tests.d.ts.map +1 -1
  35. package/dist/helpers/tests/storage-adapter-tests.js +25 -48
  36. package/dist/helpers/throttle.d.ts.map +1 -1
  37. package/dist/helpers/withTimeout.d.ts.map +1 -1
  38. package/dist/index.d.ts +2 -1
  39. package/dist/index.d.ts.map +1 -1
  40. package/dist/index.js +1 -1
  41. package/dist/network/messages.d.ts.map +1 -1
  42. package/dist/storage/StorageSubsystem.d.ts +15 -1
  43. package/dist/storage/StorageSubsystem.d.ts.map +1 -1
  44. package/dist/storage/StorageSubsystem.js +50 -14
  45. package/dist/synchronizer/CollectionSynchronizer.d.ts +4 -3
  46. package/dist/synchronizer/CollectionSynchronizer.d.ts.map +1 -1
  47. package/dist/synchronizer/CollectionSynchronizer.js +34 -15
  48. package/dist/synchronizer/DocSynchronizer.d.ts +3 -2
  49. package/dist/synchronizer/DocSynchronizer.d.ts.map +1 -1
  50. package/dist/synchronizer/DocSynchronizer.js +51 -27
  51. package/dist/synchronizer/Synchronizer.d.ts +11 -0
  52. package/dist/synchronizer/Synchronizer.d.ts.map +1 -1
  53. package/dist/types.d.ts +4 -1
  54. package/dist/types.d.ts.map +1 -1
  55. package/fuzz/fuzz.ts +3 -3
  56. package/package.json +3 -4
  57. package/src/AutomergeUrl.ts +101 -26
  58. package/src/DocHandle.ts +235 -82
  59. package/src/FindProgress.ts +48 -0
  60. package/src/RemoteHeadsSubscriptions.ts +11 -9
  61. package/src/Repo.ts +368 -74
  62. package/src/helpers/abortable.ts +62 -0
  63. package/src/helpers/bufferFromHex.ts +14 -0
  64. package/src/helpers/headsAreSame.ts +2 -2
  65. package/src/helpers/tests/network-adapter-tests.ts +14 -13
  66. package/src/helpers/tests/storage-adapter-tests.ts +44 -86
  67. package/src/index.ts +7 -0
  68. package/src/storage/StorageSubsystem.ts +66 -16
  69. package/src/synchronizer/CollectionSynchronizer.ts +37 -16
  70. package/src/synchronizer/DocSynchronizer.ts +59 -32
  71. package/src/synchronizer/Synchronizer.ts +14 -0
  72. package/src/types.ts +4 -1
  73. package/test/AutomergeUrl.test.ts +130 -0
  74. package/test/CollectionSynchronizer.test.ts +4 -4
  75. package/test/DocHandle.test.ts +181 -38
  76. package/test/DocSynchronizer.test.ts +10 -3
  77. package/test/Repo.test.ts +376 -203
  78. package/test/StorageSubsystem.test.ts +80 -1
  79. package/test/remoteHeads.test.ts +27 -12
package/dist/Repo.js CHANGED
@@ -1,14 +1,15 @@
1
1
  import { next as Automerge } from "@automerge/automerge/slim";
2
2
  import debug from "debug";
3
3
  import { EventEmitter } from "eventemitter3";
4
- import { generateAutomergeUrl, interpretAsDocumentId, parseAutomergeUrl, } from "./AutomergeUrl.js";
5
- import { DocHandle } from "./DocHandle.js";
4
+ import { encodeHeads, generateAutomergeUrl, interpretAsDocumentId, isValidAutomergeUrl, parseAutomergeUrl, } from "./AutomergeUrl.js";
5
+ import { DELETED, DocHandle, READY, UNAVAILABLE, UNLOADED, } from "./DocHandle.js";
6
6
  import { RemoteHeadsSubscriptions } from "./RemoteHeadsSubscriptions.js";
7
7
  import { headsAreSame } from "./helpers/headsAreSame.js";
8
8
  import { throttle } from "./helpers/throttle.js";
9
9
  import { NetworkSubsystem } from "./network/NetworkSubsystem.js";
10
10
  import { StorageSubsystem } from "./storage/StorageSubsystem.js";
11
11
  import { CollectionSynchronizer } from "./synchronizer/CollectionSynchronizer.js";
12
+ import { abortable } from "./helpers/abortable.js";
12
13
  function randomPeerId() {
13
14
  return ("peer-" + Math.random().toString(36).slice(4));
14
15
  }
@@ -40,7 +41,8 @@ export class Repo extends EventEmitter {
40
41
  peerMetadataByPeerId = {};
41
42
  #remoteHeadsSubscriptions = new RemoteHeadsSubscriptions();
42
43
  #remoteHeadsGossipingEnabled = false;
43
- constructor({ storage, network = [], peerId = randomPeerId(), sharePolicy, isEphemeral = storage === undefined, enableRemoteHeadsGossiping = false, } = {}) {
44
+ #progressCache = {};
45
+ constructor({ storage, network = [], peerId = randomPeerId(), sharePolicy, isEphemeral = storage === undefined, enableRemoteHeadsGossiping = false, denylist = [], } = {}) {
44
46
  super();
45
47
  this.#remoteHeadsGossipingEnabled = enableRemoteHeadsGossiping;
46
48
  this.#log = debug(`automerge-repo:repo`);
@@ -56,12 +58,14 @@ export class Repo extends EventEmitter {
56
58
  });
57
59
  // SYNCHRONIZER
58
60
  // The synchronizer uses the network subsystem to keep documents in sync with peers.
59
- this.synchronizer = new CollectionSynchronizer(this);
61
+ this.synchronizer = new CollectionSynchronizer(this, denylist);
60
62
  // When the synchronizer emits messages, send them to peers
61
63
  this.synchronizer.on("message", message => {
62
64
  this.#log(`sending ${message.type} message to ${message.targetId}`);
63
65
  networkSubsystem.send(message);
64
66
  });
67
+ // Forward metrics from doc synchronizers
68
+ this.synchronizer.on("metrics", event => this.emit("doc-metrics", event));
65
69
  if (this.#remoteHeadsGossipingEnabled) {
66
70
  this.synchronizer.on("open-doc", ({ peerId, documentId }) => {
67
71
  this.#remoteHeadsSubscriptions.subscribePeerToDoc(peerId, documentId);
@@ -70,6 +74,9 @@ export class Repo extends EventEmitter {
70
74
  // STORAGE
71
75
  // The storage subsystem has access to some form of persistence, and deals with save and loading documents.
72
76
  const storageSubsystem = storage ? new StorageSubsystem(storage) : undefined;
77
+ if (storageSubsystem) {
78
+ storageSubsystem.on("document-loaded", event => this.emit("doc-metrics", { type: "doc-loaded", ...event }));
79
+ }
73
80
  this.storageSubsystem = storageSubsystem;
74
81
  // NETWORK
75
82
  // The network subsystem deals with sending and receiving messages to and from peers.
@@ -114,11 +121,12 @@ export class Repo extends EventEmitter {
114
121
  }
115
122
  const heads = handle.getRemoteHeads(storageId);
116
123
  const haveHeadsChanged = message.syncState.theirHeads &&
117
- (!heads || !headsAreSame(heads, message.syncState.theirHeads));
124
+ (!heads ||
125
+ !headsAreSame(heads, encodeHeads(message.syncState.theirHeads)));
118
126
  if (haveHeadsChanged && message.syncState.theirHeads) {
119
- handle.setRemoteHeads(storageId, message.syncState.theirHeads);
127
+ handle.setRemoteHeads(storageId, encodeHeads(message.syncState.theirHeads));
120
128
  if (storageId && this.#remoteHeadsGossipingEnabled) {
121
- this.#remoteHeadsSubscriptions.handleImmediateRemoteHeadsChanged(message.documentId, storageId, message.syncState.theirHeads);
129
+ this.#remoteHeadsSubscriptions.handleImmediateRemoteHeadsChanged(message.documentId, storageId, encodeHeads(message.syncState.theirHeads));
122
130
  }
123
131
  }
124
132
  });
@@ -164,16 +172,8 @@ export class Repo extends EventEmitter {
164
172
  };
165
173
  handle.on("heads-changed", throttle(saveFn, this.saveDebounceRate));
166
174
  }
167
- handle.on("unavailable", () => {
168
- this.#log("document unavailable", { documentId: handle.documentId });
169
- this.emit("unavailable-document", {
170
- documentId: handle.documentId,
171
- });
172
- });
173
175
  // Register the document with the synchronizer. This advertises our interest in the document.
174
- this.synchronizer.addDocument(handle.documentId);
175
- // Preserve the old event in case anyone was using it.
176
- this.emit("document", { handle });
176
+ this.synchronizer.addDocument(handle);
177
177
  }
178
178
  #receiveMessage(message) {
179
179
  switch (message.type) {
@@ -274,18 +274,13 @@ export class Repo extends EventEmitter {
274
274
  * Any peers this `Repo` is connected to for whom `sharePolicy` returns `true` will
275
275
  * be notified of the newly created DocHandle.
276
276
  *
277
- * @throws if the cloned handle is not yet ready or if
278
- * `clonedHandle.docSync()` returns `undefined` (i.e. the handle is unavailable).
279
277
  */
280
278
  clone(clonedHandle) {
281
279
  if (!clonedHandle.isReady()) {
282
280
  throw new Error(`Cloned handle is not yet in ready state.
283
- (Try await handle.waitForReady() first.)`);
284
- }
285
- const sourceDoc = clonedHandle.docSync();
286
- if (!sourceDoc) {
287
- throw new Error("Cloned handle doesn't have a document.");
281
+ (Try await handle.whenReady() first.)`);
288
282
  }
283
+ const sourceDoc = clonedHandle.doc();
289
284
  const handle = this.create();
290
285
  handle.update(() => {
291
286
  // we replace the document with the new cloned one
@@ -293,55 +288,220 @@ export class Repo extends EventEmitter {
293
288
  });
294
289
  return handle;
295
290
  }
296
- /**
297
- * Retrieves a document by id. It gets data from the local system, but also emits a `document`
298
- * event to advertise interest in the document.
299
- */
300
- find(
301
- /** The url or documentId of the handle to retrieve */
302
- id) {
303
- const documentId = interpretAsDocumentId(id);
304
- // If we have the handle cached, return it
291
+ findWithProgress(id, options = {}) {
292
+ const { signal } = options;
293
+ const { documentId, heads } = isValidAutomergeUrl(id)
294
+ ? parseAutomergeUrl(id)
295
+ : { documentId: interpretAsDocumentId(id), heads: undefined };
296
+ // Check handle cache first - return plain FindStep for terminal states
305
297
  if (this.#handleCache[documentId]) {
306
- if (this.#handleCache[documentId].isUnavailable()) {
307
- // this ensures that the event fires after the handle has been returned
308
- setTimeout(() => {
309
- this.#handleCache[documentId].emit("unavailable", {
310
- handle: this.#handleCache[documentId],
311
- });
312
- });
298
+ const handle = this.#handleCache[documentId];
299
+ if (handle.state === UNAVAILABLE) {
300
+ const result = {
301
+ state: "unavailable",
302
+ error: new Error(`Document ${id} is unavailable`),
303
+ handle,
304
+ };
305
+ return result;
306
+ }
307
+ if (handle.state === DELETED) {
308
+ const result = {
309
+ state: "failed",
310
+ error: new Error(`Document ${id} was deleted`),
311
+ handle,
312
+ };
313
+ return result;
314
+ }
315
+ if (handle.state === READY) {
316
+ const result = {
317
+ state: "ready",
318
+ handle: heads ? handle.view(heads) : handle,
319
+ };
320
+ return result;
313
321
  }
314
- return this.#handleCache[documentId];
315
322
  }
316
- // If we don't already have the handle, make an empty one and try loading it
317
- const handle = this.#getHandle({
318
- documentId,
319
- });
320
- // Loading & network is going to be asynchronous no matter what,
321
- // but we want to return the handle immediately.
322
- const attemptLoad = this.storageSubsystem
323
- ? this.storageSubsystem.loadDoc(handle.documentId)
324
- : Promise.resolve(null);
325
- attemptLoad
326
- .then(async (loadedDoc) => {
323
+ // Check progress cache for any existing signal
324
+ const cachedProgress = this.#progressCache[documentId];
325
+ if (cachedProgress) {
326
+ const handle = this.#handleCache[documentId];
327
+ // Return cached progress if we have a handle and it's either in a terminal state or loading
328
+ if (handle &&
329
+ (handle.state === READY ||
330
+ handle.state === UNAVAILABLE ||
331
+ handle.state === DELETED ||
332
+ handle.state === "loading")) {
333
+ return cachedProgress;
334
+ }
335
+ }
336
+ const handle = this.#getHandle({ documentId });
337
+ const initial = {
338
+ state: "loading",
339
+ progress: 0,
340
+ handle,
341
+ };
342
+ // Create a new progress signal
343
+ const progressSignal = {
344
+ subscribers: new Set(),
345
+ currentProgress: undefined,
346
+ notify: (progress) => {
347
+ progressSignal.currentProgress = progress;
348
+ progressSignal.subscribers.forEach(callback => callback(progress));
349
+ // Cache all states, not just terminal ones
350
+ this.#progressCache[documentId] = progress;
351
+ },
352
+ peek: () => progressSignal.currentProgress || initial,
353
+ subscribe: (callback) => {
354
+ progressSignal.subscribers.add(callback);
355
+ return () => progressSignal.subscribers.delete(callback);
356
+ },
357
+ };
358
+ progressSignal.notify(initial);
359
+ // Start the loading process
360
+ void this.#loadDocumentWithProgress(id, documentId, handle, progressSignal, signal ? abortable(new Promise(() => { }), signal) : new Promise(() => { }));
361
+ const result = {
362
+ ...initial,
363
+ peek: progressSignal.peek,
364
+ subscribe: progressSignal.subscribe,
365
+ };
366
+ this.#progressCache[documentId] = result;
367
+ return result;
368
+ }
369
+ async #loadDocumentWithProgress(id, documentId, handle, progressSignal, abortPromise) {
370
+ try {
371
+ progressSignal.notify({
372
+ state: "loading",
373
+ progress: 25,
374
+ handle,
375
+ });
376
+ const loadingPromise = await (this.storageSubsystem
377
+ ? this.storageSubsystem.loadDoc(handle.documentId)
378
+ : Promise.resolve(null));
379
+ const loadedDoc = await Promise.race([loadingPromise, abortPromise]);
327
380
  if (loadedDoc) {
328
- // uhhhh, sorry if you're reading this because we were lying to the type system
329
381
  handle.update(() => loadedDoc);
330
382
  handle.doneLoading();
383
+ progressSignal.notify({
384
+ state: "loading",
385
+ progress: 50,
386
+ handle,
387
+ });
331
388
  }
332
389
  else {
333
- // we want to wait for the network subsystem to be ready before
334
- // we request the document. this prevents entering unavailable during initialization.
335
- await this.networkSubsystem.whenReady();
390
+ await Promise.race([this.networkSubsystem.whenReady(), abortPromise]);
336
391
  handle.request();
392
+ progressSignal.notify({
393
+ state: "loading",
394
+ progress: 75,
395
+ handle,
396
+ });
337
397
  }
338
398
  this.#registerHandleWithSubsystems(handle);
339
- })
340
- .catch(err => {
341
- this.#log("error waiting for network", { err });
342
- });
399
+ await Promise.race([handle.whenReady([READY, UNAVAILABLE]), abortPromise]);
400
+ if (handle.state === UNAVAILABLE) {
401
+ const unavailableProgress = {
402
+ state: "unavailable",
403
+ handle,
404
+ };
405
+ progressSignal.notify(unavailableProgress);
406
+ return;
407
+ }
408
+ if (handle.state === DELETED) {
409
+ throw new Error(`Document ${id} was deleted`);
410
+ }
411
+ progressSignal.notify({ state: "ready", handle });
412
+ }
413
+ catch (error) {
414
+ progressSignal.notify({
415
+ state: "failed",
416
+ error: error instanceof Error ? error : new Error(String(error)),
417
+ handle: this.#getHandle({ documentId }),
418
+ });
419
+ }
420
+ }
421
+ async find(id, options = {}) {
422
+ const { allowableStates = ["ready"], signal } = options;
423
+ // Check if already aborted
424
+ if (signal?.aborted) {
425
+ throw new Error("Operation aborted");
426
+ }
427
+ const progress = this.findWithProgress(id, { signal });
428
+ if ("subscribe" in progress) {
429
+ this.#registerHandleWithSubsystems(progress.handle);
430
+ return new Promise((resolve, reject) => {
431
+ const unsubscribe = progress.subscribe(state => {
432
+ if (allowableStates.includes(state.handle.state)) {
433
+ unsubscribe();
434
+ resolve(state.handle);
435
+ }
436
+ else if (state.state === "unavailable") {
437
+ unsubscribe();
438
+ reject(new Error(`Document ${id} is unavailable`));
439
+ }
440
+ else if (state.state === "failed") {
441
+ unsubscribe();
442
+ reject(state.error);
443
+ }
444
+ });
445
+ });
446
+ }
447
+ else {
448
+ if (progress.handle.state === READY) {
449
+ return progress.handle;
450
+ }
451
+ // If the handle isn't ready, wait for it and then return it
452
+ await progress.handle.whenReady([READY, UNAVAILABLE]);
453
+ return progress.handle;
454
+ }
455
+ }
456
+ /**
457
+ * Loads a document without waiting for ready state
458
+ */
459
+ async #loadDocument(documentId) {
460
+ // If we have the handle cached, return it
461
+ if (this.#handleCache[documentId]) {
462
+ return this.#handleCache[documentId];
463
+ }
464
+ // If we don't already have the handle, make an empty one and try loading it
465
+ const handle = this.#getHandle({ documentId });
466
+ const loadedDoc = await (this.storageSubsystem
467
+ ? this.storageSubsystem.loadDoc(handle.documentId)
468
+ : Promise.resolve(null));
469
+ if (loadedDoc) {
470
+ // We need to cast this to <T> because loadDoc operates in <unknowns>.
471
+ // This is really where we ought to be validating the input matches <T>.
472
+ handle.update(() => loadedDoc);
473
+ handle.doneLoading();
474
+ }
475
+ else {
476
+ // Because the network subsystem might still be booting up, we wait
477
+ // here so that we don't immediately give up loading because we're still
478
+ // making our initial connection to a sync server.
479
+ await this.networkSubsystem.whenReady();
480
+ handle.request();
481
+ }
482
+ this.#registerHandleWithSubsystems(handle);
343
483
  return handle;
344
484
  }
485
+ /**
486
+ * Retrieves a document by id. It gets data from the local system, but also emits a `document`
487
+ * event to advertise interest in the document.
488
+ */
489
+ async findClassic(
490
+ /** The url or documentId of the handle to retrieve */
491
+ id, options = {}) {
492
+ const documentId = interpretAsDocumentId(id);
493
+ const { allowableStates, signal } = options;
494
+ return abortable((async () => {
495
+ const handle = await this.#loadDocument(documentId);
496
+ if (!allowableStates) {
497
+ await handle.whenReady([READY, UNAVAILABLE]);
498
+ if (handle.state === UNAVAILABLE && !signal?.aborted) {
499
+ throw new Error(`Document ${id} is unavailable`);
500
+ }
501
+ }
502
+ return handle;
503
+ })(), signal);
504
+ }
345
505
  delete(
346
506
  /** The url or documentId of the handle to delete */
347
507
  id) {
@@ -349,6 +509,7 @@ export class Repo extends EventEmitter {
349
509
  const handle = this.#getHandle({ documentId });
350
510
  handle.delete();
351
511
  delete this.#handleCache[documentId];
512
+ delete this.#progressCache[documentId];
352
513
  this.emit("delete-document", { documentId });
353
514
  }
354
515
  /**
@@ -361,9 +522,7 @@ export class Repo extends EventEmitter {
361
522
  async export(id) {
362
523
  const documentId = interpretAsDocumentId(id);
363
524
  const handle = this.#getHandle({ documentId });
364
- const doc = await handle.doc();
365
- if (!doc)
366
- return undefined;
525
+ const doc = handle.doc();
367
526
  return Automerge.save(doc);
368
527
  }
369
528
  /**
@@ -409,13 +568,39 @@ export class Repo extends EventEmitter {
409
568
  ? documents.map(id => this.#handleCache[id])
410
569
  : Object.values(this.#handleCache);
411
570
  await Promise.all(handles.map(async (handle) => {
412
- const doc = handle.docSync();
413
- if (!doc) {
414
- return;
415
- }
416
- return this.storageSubsystem.saveDoc(handle.documentId, doc);
571
+ return this.storageSubsystem.saveDoc(handle.documentId, handle.doc());
417
572
  }));
418
573
  }
574
+ /**
575
+ * Removes a DocHandle from the handleCache.
576
+ * @hidden this API is experimental and may change.
577
+ * @param documentId - documentId of the DocHandle to remove from handleCache, if present in cache.
578
+ * @returns Promise<void>
579
+ */
580
+ async removeFromCache(documentId) {
581
+ if (!this.#handleCache[documentId]) {
582
+ this.#log(`WARN: removeFromCache called but handle not found in handleCache for documentId: ${documentId}`);
583
+ return;
584
+ }
585
+ const handle = this.#getHandle({ documentId });
586
+ await handle.whenReady([READY, UNLOADED, DELETED, UNAVAILABLE]);
587
+ const doc = handle.doc();
588
+ // because this is an internal-ish function, we'll be extra careful about undefined docs here
589
+ if (doc) {
590
+ if (handle.isReady()) {
591
+ handle.unload();
592
+ }
593
+ else {
594
+ this.#log(`WARN: removeFromCache called but handle for documentId: ${documentId} in unexpected state: ${handle.state}`);
595
+ }
596
+ delete this.#handleCache[documentId];
597
+ // TODO: remove document from synchronizer when removeDocument is implemented
598
+ // this.synchronizer.removeDocument(documentId)
599
+ }
600
+ else {
601
+ this.#log(`WARN: removeFromCache called but doc undefined for documentId: ${documentId}`);
602
+ }
603
+ }
419
604
  shutdown() {
420
605
  this.networkSubsystem.adapters.forEach(adapter => {
421
606
  adapter.disconnect();
@@ -0,0 +1,36 @@
1
+ /**
2
+ * Wraps a Promise and causes it to reject when the signal is aborted.
3
+ *
4
+ * @remarks
5
+ * This utility wraps a Promise and rejects when the provided AbortSignal is aborted.
6
+ * It's designed to make Promise awaits abortable.
7
+ *
8
+ * @example
9
+ * ```typescript
10
+ * const controller = new AbortController();
11
+ *
12
+ * try {
13
+ * const result = await abortable(fetch('https://api.example.com/data'), controller.signal);
14
+ * // Meanwhile, to abort in concurrent code before the above line returns: controller.abort();
15
+ * } catch (err) {
16
+ * if (err.name === 'AbortError') {
17
+ * console.log('The operation was aborted');
18
+ * }
19
+ * }
20
+ *
21
+ * ```
22
+ *
23
+ * @param p - A Promise to wrap
24
+ * @param signal - An AbortSignal that can be used to abort the operation
25
+ * @returns A wrapper Promise that rejects with AbortError if the signal is aborted
26
+ * before the promise p settles, and settles as p settles otherwise
27
+ * @throws {DOMException} With name "AbortError" if aborted before p settles
28
+ */
29
+ export declare function abortable<T>(p: Promise<T>, signal: AbortSignal | undefined): Promise<T>;
30
+ /**
31
+ * Include this type in an options object to pass an AbortSignal to a function.
32
+ */
33
+ export interface AbortOptions {
34
+ signal?: AbortSignal;
35
+ }
36
+ //# sourceMappingURL=abortable.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"abortable.d.ts","sourceRoot":"","sources":["../../src/helpers/abortable.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;;;;;;;;;;;;;;GA2BG;AAEH,wBAAgB,SAAS,CAAC,CAAC,EACzB,CAAC,EAAE,OAAO,CAAC,CAAC,CAAC,EACb,MAAM,EAAE,WAAW,GAAG,SAAS,GAC9B,OAAO,CAAC,CAAC,CAAC,CAsBZ;AAED;;GAEG;AACH,MAAM,WAAW,YAAY;IAC3B,MAAM,CAAC,EAAE,WAAW,CAAA;CACrB"}
@@ -0,0 +1,47 @@
1
+ /**
2
+ * Wraps a Promise and causes it to reject when the signal is aborted.
3
+ *
4
+ * @remarks
5
+ * This utility wraps a Promise and rejects when the provided AbortSignal is aborted.
6
+ * It's designed to make Promise awaits abortable.
7
+ *
8
+ * @example
9
+ * ```typescript
10
+ * const controller = new AbortController();
11
+ *
12
+ * try {
13
+ * const result = await abortable(fetch('https://api.example.com/data'), controller.signal);
14
+ * // Meanwhile, to abort in concurrent code before the above line returns: controller.abort();
15
+ * } catch (err) {
16
+ * if (err.name === 'AbortError') {
17
+ * console.log('The operation was aborted');
18
+ * }
19
+ * }
20
+ *
21
+ * ```
22
+ *
23
+ * @param p - A Promise to wrap
24
+ * @param signal - An AbortSignal that can be used to abort the operation
25
+ * @returns A wrapper Promise that rejects with AbortError if the signal is aborted
26
+ * before the promise p settles, and settles as p settles otherwise
27
+ * @throws {DOMException} With name "AbortError" if aborted before p settles
28
+ */
29
+ export function abortable(p, signal) {
30
+ let settled = false;
31
+ return new Promise((resolve, reject) => {
32
+ signal?.addEventListener("abort", () => {
33
+ if (!settled) {
34
+ reject(new DOMException("Operation aborted", "AbortError"));
35
+ }
36
+ }, { once: true });
37
+ p.then(result => {
38
+ resolve(result);
39
+ })
40
+ .catch(error => {
41
+ reject(error);
42
+ })
43
+ .finally(() => {
44
+ settled = true;
45
+ });
46
+ });
47
+ }
@@ -1 +1 @@
1
- {"version":3,"file":"arraysAreEqual.d.ts","sourceRoot":"","sources":["../../src/helpers/arraysAreEqual.ts"],"names":[],"mappings":"AAAA,eAAO,MAAM,cAAc,GAAI,CAAC,KAAK,CAAC,EAAE,KAAK,CAAC,EAAE,YAC4B,CAAA"}
1
+ {"version":3,"file":"arraysAreEqual.d.ts","sourceRoot":"","sources":["../../src/helpers/arraysAreEqual.ts"],"names":[],"mappings":"AAAA,eAAO,MAAM,cAAc,GAAI,CAAC,EAAE,GAAG,CAAC,EAAE,EAAE,GAAG,CAAC,EAAE,YAC4B,CAAA"}
@@ -0,0 +1,3 @@
1
+ export declare const uint8ArrayFromHexString: (hexString: string) => Uint8Array;
2
+ export declare const uint8ArrayToHexString: (data: Uint8Array) => string;
3
+ //# sourceMappingURL=bufferFromHex.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"bufferFromHex.d.ts","sourceRoot":"","sources":["../../src/helpers/bufferFromHex.ts"],"names":[],"mappings":"AAAA,eAAO,MAAM,uBAAuB,GAAI,WAAW,MAAM,KAAG,UAS3D,CAAA;AAED,eAAO,MAAM,qBAAqB,GAAI,MAAM,UAAU,KAAG,MAExD,CAAA"}
@@ -0,0 +1,13 @@
1
+ export const uint8ArrayFromHexString = (hexString) => {
2
+ if (hexString.length % 2 !== 0) {
3
+ throw new Error("Hex string must have an even length");
4
+ }
5
+ const bytes = new Uint8Array(hexString.length / 2);
6
+ for (let i = 0; i < hexString.length; i += 2) {
7
+ bytes[i >> 1] = parseInt(hexString.slice(i, i + 2), 16);
8
+ }
9
+ return bytes;
10
+ };
11
+ export const uint8ArrayToHexString = (data) => {
12
+ return Array.from(data, byte => byte.toString(16).padStart(2, "0")).join("");
13
+ };
@@ -1 +1 @@
1
- {"version":3,"file":"debounce.d.ts","sourceRoot":"","sources":["../../src/helpers/debounce.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;GAWG;AAEH,eAAO,MAAM,QAAQ,GAAI,CAAC,SAAS,CAAC,GAAG,IAAI,EAAE,UAAU,CAAC,CAAC,CAAC,KAAK,UAAU,CAAC,CAAC,CAAC,MACtE,CAAC,QACC,MAAM,eAGc,UAAU,CAAC,CAAC,CAAC,SAMxC,CAAA"}
1
+ {"version":3,"file":"debounce.d.ts","sourceRoot":"","sources":["../../src/helpers/debounce.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;GAWG;AAEH,eAAO,MAAM,QAAQ,GAAI,CAAC,SAAS,CAAC,GAAG,IAAI,EAAE,UAAU,CAAC,CAAC,CAAC,KAAK,UAAU,CAAC,CAAC,CAAC,EAC1E,IAAI,CAAC,EACL,MAAM,MAAM,MAGK,GAAG,MAAM,UAAU,CAAC,CAAC,CAAC,SAMxC,CAAA"}
@@ -1 +1 @@
1
- {"version":3,"file":"eventPromise.d.ts","sourceRoot":"","sources":["../../src/helpers/eventPromise.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,YAAY,EAAE,MAAM,eAAe,CAAA;AAE5C,4FAA4F;AAC5F,eAAO,MAAM,YAAY,YAAa,YAAY,SAAS,MAAM,iBACE,CAAA;AAEnE,eAAO,MAAM,aAAa,aAAc,YAAY,EAAE,SAAS,MAAM,mBAGpE,CAAA"}
1
+ {"version":3,"file":"eventPromise.d.ts","sourceRoot":"","sources":["../../src/helpers/eventPromise.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,YAAY,EAAE,MAAM,eAAe,CAAA;AAE5C,4FAA4F;AAC5F,eAAO,MAAM,YAAY,GAAI,SAAS,YAAY,EAAE,OAAO,MAAM,iBACE,CAAA;AAEnE,eAAO,MAAM,aAAa,GAAI,UAAU,YAAY,EAAE,EAAE,OAAO,MAAM,mBAGpE,CAAA"}
@@ -1,3 +1,3 @@
1
- import { Heads } from "@automerge/automerge/slim/next";
2
- export declare const headsAreSame: (a: Heads, b: Heads) => boolean;
1
+ import type { UrlHeads } from "../types.js";
2
+ export declare const headsAreSame: (a: UrlHeads, b: UrlHeads) => boolean;
3
3
  //# sourceMappingURL=headsAreSame.d.ts.map
@@ -1 +1 @@
1
- {"version":3,"file":"headsAreSame.d.ts","sourceRoot":"","sources":["../../src/helpers/headsAreSame.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,KAAK,EAAE,MAAM,gCAAgC,CAAA;AAGtD,eAAO,MAAM,YAAY,MAAO,KAAK,KAAK,KAAK,YAE9C,CAAA"}
1
+ {"version":3,"file":"headsAreSame.d.ts","sourceRoot":"","sources":["../../src/helpers/headsAreSame.ts"],"names":[],"mappings":"AACA,OAAO,KAAK,EAAE,QAAQ,EAAE,MAAM,aAAa,CAAA;AAE3C,eAAO,MAAM,YAAY,GAAI,GAAG,QAAQ,EAAE,GAAG,QAAQ,YAEpD,CAAA"}
@@ -1,2 +1,2 @@
1
- export declare function mergeArrays(myArrays: Uint8Array[]): Uint8Array;
1
+ export declare function mergeArrays(myArrays: Uint8Array[]): Uint8Array<ArrayBuffer>;
2
2
  //# sourceMappingURL=mergeArrays.d.ts.map
@@ -1 +1 @@
1
- {"version":3,"file":"mergeArrays.d.ts","sourceRoot":"","sources":["../../src/helpers/mergeArrays.ts"],"names":[],"mappings":"AAAA,wBAAgB,WAAW,CAAC,QAAQ,EAAE,UAAU,EAAE,cAgBjD"}
1
+ {"version":3,"file":"mergeArrays.d.ts","sourceRoot":"","sources":["../../src/helpers/mergeArrays.ts"],"names":[],"mappings":"AAAA,wBAAgB,WAAW,CAAC,QAAQ,EAAE,UAAU,EAAE,2BAgBjD"}
@@ -1 +1 @@
1
- {"version":3,"file":"pause.d.ts","sourceRoot":"","sources":["../../src/helpers/pause.ts"],"names":[],"mappings":"AAEA,eAAO,MAAM,KAAK,+BAC4C,CAAA"}
1
+ {"version":3,"file":"pause.d.ts","sourceRoot":"","sources":["../../src/helpers/pause.ts"],"names":[],"mappings":"AAEA,eAAO,MAAM,KAAK,GAAI,UAAK,kBACmC,CAAA"}
@@ -1 +1 @@
1
- {"version":3,"file":"network-adapter-tests.d.ts","sourceRoot":"","sources":["../../../src/helpers/tests/network-adapter-tests.ts"],"names":[],"mappings":"AAUA,OAAO,KAAK,EAAE,uBAAuB,EAAE,MAAM,0CAA0C,CAAA;AAIvF;;;;;;;;;;;GAWG;AACH,wBAAgB,sBAAsB,CAAC,MAAM,EAAE,OAAO,EAAE,KAAK,CAAC,EAAE,MAAM,GAAG,IAAI,CA0Q5E;AAID,KAAK,OAAO,GAAG,uBAAuB,GAAG,uBAAuB,EAAE,CAAA;AAElE,MAAM,MAAM,OAAO,GAAG,MAAM,OAAO,CAAC;IAClC,QAAQ,EAAE,CAAC,OAAO,EAAE,OAAO,EAAE,OAAO,CAAC,CAAA;IACrC,QAAQ,CAAC,EAAE,MAAM,IAAI,CAAA;CACtB,CAAC,CAAA"}
1
+ {"version":3,"file":"network-adapter-tests.d.ts","sourceRoot":"","sources":["../../../src/helpers/tests/network-adapter-tests.ts"],"names":[],"mappings":"AAUA,OAAO,KAAK,EAAE,uBAAuB,EAAE,MAAM,0CAA0C,CAAA;AAIvF;;;;;;;;;;;GAWG;AACH,wBAAgB,sBAAsB,CAAC,MAAM,EAAE,OAAO,EAAE,KAAK,CAAC,EAAE,MAAM,GAAG,IAAI,CA2Q5E;AAID,KAAK,OAAO,GAAG,uBAAuB,GAAG,uBAAuB,EAAE,CAAA;AAElE,MAAM,MAAM,OAAO,GAAG,MAAM,OAAO,CAAC;IAClC,QAAQ,EAAE,CAAC,OAAO,EAAE,OAAO,EAAE,OAAO,CAAC,CAAA;IACrC,QAAQ,CAAC,EAAE,MAAM,IAAI,CAAA;CACtB,CAAC,CAAA"}