@automerge/automerge-repo 1.1.0-alpha.7 → 1.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +3 -1
- package/dist/AutomergeUrl.js +1 -1
- package/dist/DocHandle.d.ts +10 -4
- package/dist/DocHandle.d.ts.map +1 -1
- package/dist/DocHandle.js +17 -8
- package/dist/Repo.d.ts +18 -6
- package/dist/Repo.d.ts.map +1 -1
- package/dist/Repo.js +88 -72
- package/dist/helpers/pause.d.ts +0 -1
- package/dist/helpers/pause.d.ts.map +1 -1
- package/dist/helpers/pause.js +2 -8
- package/dist/helpers/withTimeout.d.ts.map +1 -1
- package/dist/helpers/withTimeout.js +2 -0
- package/dist/index.d.ts +1 -1
- package/dist/index.d.ts.map +1 -1
- package/dist/index.js +1 -1
- package/dist/network/NetworkAdapter.d.ts.map +1 -1
- package/dist/network/NetworkAdapter.js +1 -0
- package/dist/network/NetworkSubsystem.js +3 -3
- package/dist/network/messages.d.ts +43 -38
- package/dist/network/messages.d.ts.map +1 -1
- package/dist/network/messages.js +7 -9
- package/dist/synchronizer/CollectionSynchronizer.d.ts.map +1 -1
- package/dist/synchronizer/CollectionSynchronizer.js +1 -0
- package/dist/synchronizer/DocSynchronizer.d.ts.map +1 -1
- package/dist/synchronizer/DocSynchronizer.js +11 -7
- package/dist/synchronizer/Synchronizer.d.ts +11 -3
- package/dist/synchronizer/Synchronizer.d.ts.map +1 -1
- package/package.json +3 -4
- package/src/AutomergeUrl.ts +1 -1
- package/src/DocHandle.ts +34 -12
- package/src/Repo.ts +113 -84
- package/src/helpers/pause.ts +3 -11
- package/src/helpers/withTimeout.ts +2 -0
- package/src/index.ts +1 -1
- package/src/network/NetworkAdapter.ts +4 -2
- package/src/network/NetworkSubsystem.ts +3 -3
- package/src/network/messages.ts +60 -63
- package/src/synchronizer/CollectionSynchronizer.ts +1 -0
- package/src/synchronizer/DocSynchronizer.ts +19 -15
- package/src/synchronizer/Synchronizer.ts +11 -3
- package/test/CollectionSynchronizer.test.ts +7 -5
- package/test/DocHandle.test.ts +11 -2
- package/test/RemoteHeadsSubscriptions.test.ts +49 -49
- package/test/Repo.test.ts +39 -1
- package/test/StorageSubsystem.test.ts +1 -1
- package/test/helpers/collectMessages.ts +19 -0
- package/test/remoteHeads.test.ts +142 -119
- package/.eslintrc +0 -28
- package/test/helpers/waitForMessages.ts +0 -22
|
@@ -137,11 +137,13 @@ export class DocSynchronizer extends Synchronizer {
|
|
|
137
137
|
|
|
138
138
|
let pendingCallbacks = this.#pendingSyncStateCallbacks[peerId]
|
|
139
139
|
if (!pendingCallbacks) {
|
|
140
|
-
this.#onLoadSyncState(peerId)
|
|
141
|
-
|
|
142
|
-
|
|
143
|
-
|
|
144
|
-
|
|
140
|
+
this.#onLoadSyncState(peerId)
|
|
141
|
+
.then(syncState => {
|
|
142
|
+
this.#initSyncState(peerId, syncState ?? A.initSyncState())
|
|
143
|
+
})
|
|
144
|
+
.catch(err => {
|
|
145
|
+
this.#log(`Error loading sync state for ${peerId}: ${err}`)
|
|
146
|
+
})
|
|
145
147
|
pendingCallbacks = this.#pendingSyncStateCallbacks[peerId] = []
|
|
146
148
|
}
|
|
147
149
|
|
|
@@ -262,13 +264,15 @@ export class DocSynchronizer extends Synchronizer {
|
|
|
262
264
|
)
|
|
263
265
|
this.#setSyncState(peerId, reparsedSyncState)
|
|
264
266
|
|
|
265
|
-
docPromise
|
|
266
|
-
|
|
267
|
-
|
|
268
|
-
|
|
269
|
-
|
|
270
|
-
|
|
271
|
-
|
|
267
|
+
docPromise
|
|
268
|
+
.then(doc => {
|
|
269
|
+
if (doc) {
|
|
270
|
+
this.#sendSyncMessage(peerId, doc)
|
|
271
|
+
}
|
|
272
|
+
})
|
|
273
|
+
.catch(err => {
|
|
274
|
+
this.#log(`Error loading doc for ${peerId}: ${err}`)
|
|
275
|
+
})
|
|
272
276
|
})
|
|
273
277
|
})
|
|
274
278
|
}
|
|
@@ -330,10 +334,10 @@ export class DocSynchronizer extends Synchronizer {
|
|
|
330
334
|
}
|
|
331
335
|
|
|
332
336
|
this.#processAllPendingSyncMessages()
|
|
333
|
-
this.#processSyncMessage(message
|
|
337
|
+
this.#processSyncMessage(message)
|
|
334
338
|
}
|
|
335
339
|
|
|
336
|
-
#processSyncMessage(message: SyncMessage | RequestMessage
|
|
340
|
+
#processSyncMessage(message: SyncMessage | RequestMessage) {
|
|
337
341
|
if (isRequestMessage(message)) {
|
|
338
342
|
this.#peerDocumentStatuses[message.senderId] = "wants"
|
|
339
343
|
}
|
|
@@ -392,7 +396,7 @@ export class DocSynchronizer extends Synchronizer {
|
|
|
392
396
|
|
|
393
397
|
#processAllPendingSyncMessages() {
|
|
394
398
|
for (const message of this.#pendingSyncMessages) {
|
|
395
|
-
this.#processSyncMessage(message.message
|
|
399
|
+
this.#processSyncMessage(message.message)
|
|
396
400
|
}
|
|
397
401
|
|
|
398
402
|
this.#pendingSyncMessages = []
|
|
@@ -3,15 +3,23 @@ import {
|
|
|
3
3
|
MessageContents,
|
|
4
4
|
OpenDocMessage,
|
|
5
5
|
RepoMessage,
|
|
6
|
-
SyncStateMessage,
|
|
7
6
|
} from "../network/messages.js"
|
|
7
|
+
import { SyncState } from "@automerge/automerge"
|
|
8
|
+
import { PeerId, DocumentId } from "../types.js"
|
|
8
9
|
|
|
9
10
|
export abstract class Synchronizer extends EventEmitter<SynchronizerEvents> {
|
|
10
11
|
abstract receiveMessage(message: RepoMessage): void
|
|
11
12
|
}
|
|
12
13
|
|
|
13
14
|
export interface SynchronizerEvents {
|
|
14
|
-
message: (
|
|
15
|
-
"sync-state": (
|
|
15
|
+
message: (payload: MessageContents) => void
|
|
16
|
+
"sync-state": (payload: SyncStatePayload) => void
|
|
16
17
|
"open-doc": (arg: OpenDocMessage) => void
|
|
17
18
|
}
|
|
19
|
+
|
|
20
|
+
/** Notify the repo that the sync state has changed */
|
|
21
|
+
export interface SyncStatePayload {
|
|
22
|
+
peerId: PeerId
|
|
23
|
+
documentId: DocumentId
|
|
24
|
+
syncState: SyncState
|
|
25
|
+
}
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
import assert from "assert"
|
|
2
2
|
import { beforeEach, describe, it } from "vitest"
|
|
3
|
-
import { PeerId, Repo } from "../src/index.js"
|
|
3
|
+
import { PeerId, Repo, SyncMessage } from "../src/index.js"
|
|
4
4
|
import { CollectionSynchronizer } from "../src/synchronizer/CollectionSynchronizer.js"
|
|
5
5
|
|
|
6
6
|
describe("CollectionSynchronizer", () => {
|
|
@@ -24,8 +24,9 @@ describe("CollectionSynchronizer", () => {
|
|
|
24
24
|
synchronizer.addPeer("peer1" as PeerId)
|
|
25
25
|
|
|
26
26
|
synchronizer.once("message", event => {
|
|
27
|
-
|
|
28
|
-
assert(
|
|
27
|
+
const { targetId, documentId } = event as SyncMessage
|
|
28
|
+
assert(targetId === "peer1")
|
|
29
|
+
assert(documentId === handle.documentId)
|
|
29
30
|
done()
|
|
30
31
|
})
|
|
31
32
|
|
|
@@ -37,8 +38,9 @@ describe("CollectionSynchronizer", () => {
|
|
|
37
38
|
const handle = repo.create()
|
|
38
39
|
synchronizer.addDocument(handle.documentId)
|
|
39
40
|
synchronizer.once("message", event => {
|
|
40
|
-
|
|
41
|
-
assert(
|
|
41
|
+
const { targetId, documentId } = event as SyncMessage
|
|
42
|
+
assert(targetId === "peer1")
|
|
43
|
+
assert(documentId === handle.documentId)
|
|
42
44
|
done()
|
|
43
45
|
})
|
|
44
46
|
synchronizer.addPeer("peer1" as PeerId)
|
package/test/DocHandle.test.ts
CHANGED
|
@@ -5,7 +5,7 @@ import { describe, it } from "vitest"
|
|
|
5
5
|
import { generateAutomergeUrl, parseAutomergeUrl } from "../src/AutomergeUrl.js"
|
|
6
6
|
import { eventPromise } from "../src/helpers/eventPromise.js"
|
|
7
7
|
import { pause } from "../src/helpers/pause.js"
|
|
8
|
-
import { DocHandle, DocHandleChangePayload
|
|
8
|
+
import { DocHandle, DocHandleChangePayload } from "../src/index.js"
|
|
9
9
|
import { TestDoc } from "./types.js"
|
|
10
10
|
|
|
11
11
|
describe("DocHandle", () => {
|
|
@@ -20,6 +20,15 @@ describe("DocHandle", () => {
|
|
|
20
20
|
assert.equal(handle.documentId, TEST_ID)
|
|
21
21
|
})
|
|
22
22
|
|
|
23
|
+
it("should take an initial value", async () => {
|
|
24
|
+
const handle = new DocHandle(TEST_ID, {
|
|
25
|
+
isNew: true,
|
|
26
|
+
initialValue: { foo: "bar" },
|
|
27
|
+
})
|
|
28
|
+
const doc = await handle.doc()
|
|
29
|
+
assert.equal(doc.foo, "bar")
|
|
30
|
+
})
|
|
31
|
+
|
|
23
32
|
it("should become ready when a document is loaded", async () => {
|
|
24
33
|
const handle = new DocHandle<TestDoc>(TEST_ID)
|
|
25
34
|
assert.equal(handle.isReady(), false)
|
|
@@ -44,7 +53,7 @@ describe("DocHandle", () => {
|
|
|
44
53
|
assert.deepEqual(doc, handle.docSync())
|
|
45
54
|
})
|
|
46
55
|
|
|
47
|
-
it("should return undefined if we
|
|
56
|
+
it("should return undefined if we access the doc before ready", async () => {
|
|
48
57
|
const handle = new DocHandle<TestDoc>(TEST_ID)
|
|
49
58
|
|
|
50
59
|
assert.equal(handle.docSync(), undefined)
|
|
@@ -8,7 +8,7 @@ import {
|
|
|
8
8
|
RemoteHeadsChanged,
|
|
9
9
|
RemoteSubscriptionControlMessage,
|
|
10
10
|
} from "../src/network/messages.js"
|
|
11
|
-
import {
|
|
11
|
+
import { collectMessages } from "./helpers/collectMessages.js"
|
|
12
12
|
|
|
13
13
|
describe("RepoHeadsSubscriptions", () => {
|
|
14
14
|
const storageA = "remote-a" as StorageId
|
|
@@ -86,15 +86,15 @@ describe("RepoHeadsSubscriptions", () => {
|
|
|
86
86
|
it("should allow to subscribe and unsubscribe to storage ids", async () => {
|
|
87
87
|
const remoteHeadsSubscriptions = new RemoteHeadsSubscriptions()
|
|
88
88
|
|
|
89
|
-
const remoteHeadsMessages =
|
|
90
|
-
remoteHeadsSubscriptions,
|
|
91
|
-
"remote-heads-changed"
|
|
92
|
-
)
|
|
89
|
+
const remoteHeadsMessages = collectMessages({
|
|
90
|
+
emitter: remoteHeadsSubscriptions,
|
|
91
|
+
event: "remote-heads-changed",
|
|
92
|
+
})
|
|
93
93
|
|
|
94
|
-
const changeRemoteSubsAfterSubscribe =
|
|
95
|
-
remoteHeadsSubscriptions,
|
|
96
|
-
"change-remote-subs"
|
|
97
|
-
)
|
|
94
|
+
const changeRemoteSubsAfterSubscribe = collectMessages({
|
|
95
|
+
emitter: remoteHeadsSubscriptions,
|
|
96
|
+
event: "change-remote-subs",
|
|
97
|
+
})
|
|
98
98
|
|
|
99
99
|
// subscribe to storageB and change storageB heads
|
|
100
100
|
remoteHeadsSubscriptions.subscribeToRemotes([storageB])
|
|
@@ -114,10 +114,10 @@ describe("RepoHeadsSubscriptions", () => {
|
|
|
114
114
|
assert.deepStrictEqual(messages[0].remove, undefined)
|
|
115
115
|
assert.deepStrictEqual(messages[0].peers, [])
|
|
116
116
|
|
|
117
|
-
const remoteHeadsMessagesAfterUnsub =
|
|
118
|
-
remoteHeadsSubscriptions,
|
|
119
|
-
"change-remote-subs"
|
|
120
|
-
)
|
|
117
|
+
const remoteHeadsMessagesAfterUnsub = collectMessages({
|
|
118
|
+
emitter: remoteHeadsSubscriptions,
|
|
119
|
+
event: "change-remote-subs",
|
|
120
|
+
})
|
|
121
121
|
|
|
122
122
|
// unsubscribe from storageB
|
|
123
123
|
remoteHeadsSubscriptions.unsubscribeFromRemotes([storageB])
|
|
@@ -133,15 +133,15 @@ describe("RepoHeadsSubscriptions", () => {
|
|
|
133
133
|
it("should forward all changes to generous peers", async () => {
|
|
134
134
|
const remoteHeadsSubscriptions = new RemoteHeadsSubscriptions()
|
|
135
135
|
|
|
136
|
-
const notifyRemoteHeadsMessagesPromise =
|
|
137
|
-
remoteHeadsSubscriptions,
|
|
138
|
-
"notify-remote-heads"
|
|
139
|
-
)
|
|
136
|
+
const notifyRemoteHeadsMessagesPromise = collectMessages({
|
|
137
|
+
emitter: remoteHeadsSubscriptions,
|
|
138
|
+
event: "notify-remote-heads",
|
|
139
|
+
})
|
|
140
140
|
|
|
141
|
-
const changeRemoteSubsMessagesPromise =
|
|
142
|
-
remoteHeadsSubscriptions,
|
|
143
|
-
"change-remote-subs"
|
|
144
|
-
)
|
|
141
|
+
const changeRemoteSubsMessagesPromise = collectMessages({
|
|
142
|
+
emitter: remoteHeadsSubscriptions,
|
|
143
|
+
event: "change-remote-subs",
|
|
144
|
+
})
|
|
145
145
|
|
|
146
146
|
remoteHeadsSubscriptions.addGenerousPeer(peerC)
|
|
147
147
|
remoteHeadsSubscriptions.subscribeToRemotes([storageB])
|
|
@@ -170,10 +170,10 @@ describe("RepoHeadsSubscriptions", () => {
|
|
|
170
170
|
assert.deepStrictEqual(messages[0].remove, undefined)
|
|
171
171
|
assert.deepStrictEqual(messages[0].peers, [peerC])
|
|
172
172
|
|
|
173
|
-
const changeRemoteSubsMessagesAfterUnsubPromise =
|
|
174
|
-
remoteHeadsSubscriptions,
|
|
175
|
-
"change-remote-subs"
|
|
176
|
-
)
|
|
173
|
+
const changeRemoteSubsMessagesAfterUnsubPromise = collectMessages({
|
|
174
|
+
emitter: remoteHeadsSubscriptions,
|
|
175
|
+
event: "change-remote-subs",
|
|
176
|
+
})
|
|
177
177
|
|
|
178
178
|
// unsubsscribe from storage B
|
|
179
179
|
remoteHeadsSubscriptions.unsubscribeFromRemotes([storageB])
|
|
@@ -189,10 +189,10 @@ describe("RepoHeadsSubscriptions", () => {
|
|
|
189
189
|
it("should not notify generous peers of changed remote heads, if they send the heads originally", async () => {
|
|
190
190
|
const remoteHeadsSubscriptions = new RemoteHeadsSubscriptions()
|
|
191
191
|
|
|
192
|
-
const messagesPromise =
|
|
193
|
-
remoteHeadsSubscriptions,
|
|
194
|
-
"notify-remote-heads"
|
|
195
|
-
)
|
|
192
|
+
const messagesPromise = collectMessages({
|
|
193
|
+
emitter: remoteHeadsSubscriptions,
|
|
194
|
+
event: "notify-remote-heads",
|
|
195
|
+
})
|
|
196
196
|
|
|
197
197
|
remoteHeadsSubscriptions.addGenerousPeer(peerC)
|
|
198
198
|
remoteHeadsSubscriptions.subscribeToRemotes([storageB])
|
|
@@ -219,10 +219,10 @@ describe("RepoHeadsSubscriptions", () => {
|
|
|
219
219
|
|
|
220
220
|
// subscribe peer c to storage b
|
|
221
221
|
remoteHeadsSubscriptions.handleControlMessage(subscribePeerCToStorageB)
|
|
222
|
-
const messagesAfterSubscribePromise =
|
|
223
|
-
remoteHeadsSubscriptions,
|
|
224
|
-
"notify-remote-heads"
|
|
225
|
-
)
|
|
222
|
+
const messagesAfterSubscribePromise = collectMessages({
|
|
223
|
+
emitter: remoteHeadsSubscriptions,
|
|
224
|
+
event: "notify-remote-heads",
|
|
225
|
+
})
|
|
226
226
|
remoteHeadsSubscriptions.subscribePeerToDoc(peerC, docA)
|
|
227
227
|
remoteHeadsSubscriptions.subscribePeerToDoc(peerC, docC)
|
|
228
228
|
|
|
@@ -248,10 +248,10 @@ describe("RepoHeadsSubscriptions", () => {
|
|
|
248
248
|
|
|
249
249
|
// unsubscribe peer C
|
|
250
250
|
remoteHeadsSubscriptions.handleControlMessage(unsubscribePeerCFromStorageB)
|
|
251
|
-
const messagesAfteUnsubscribePromise =
|
|
252
|
-
remoteHeadsSubscriptions,
|
|
253
|
-
"notify-remote-heads"
|
|
254
|
-
)
|
|
251
|
+
const messagesAfteUnsubscribePromise = collectMessages({
|
|
252
|
+
emitter: remoteHeadsSubscriptions,
|
|
253
|
+
event: "notify-remote-heads",
|
|
254
|
+
})
|
|
255
255
|
|
|
256
256
|
// heads of docB for storageB change
|
|
257
257
|
remoteHeadsSubscriptions.handleRemoteHeads(docBHeadsChangedForStorageB)
|
|
@@ -267,10 +267,10 @@ describe("RepoHeadsSubscriptions", () => {
|
|
|
267
267
|
|
|
268
268
|
// subscribe peer c to storage b
|
|
269
269
|
remoteHeadsSubscriptions.handleControlMessage(subscribePeerCToStorageB)
|
|
270
|
-
const messagesAfterSubscribePromise =
|
|
271
|
-
remoteHeadsSubscriptions,
|
|
272
|
-
"notify-remote-heads"
|
|
273
|
-
)
|
|
270
|
+
const messagesAfterSubscribePromise = collectMessages({
|
|
271
|
+
emitter: remoteHeadsSubscriptions,
|
|
272
|
+
event: "notify-remote-heads",
|
|
273
|
+
})
|
|
274
274
|
|
|
275
275
|
// change message for docA in storageB
|
|
276
276
|
remoteHeadsSubscriptions.handleRemoteHeads(docAHeadsChangedForStorageB)
|
|
@@ -290,10 +290,10 @@ describe("RepoHeadsSubscriptions", () => {
|
|
|
290
290
|
it("should only notify of sync states with a more recent timestamp", async () => {
|
|
291
291
|
const remoteHeadsSubscription = new RemoteHeadsSubscriptions()
|
|
292
292
|
|
|
293
|
-
const messagesPromise =
|
|
294
|
-
remoteHeadsSubscription,
|
|
295
|
-
"remote-heads-changed"
|
|
296
|
-
)
|
|
293
|
+
const messagesPromise = collectMessages({
|
|
294
|
+
emitter: remoteHeadsSubscription,
|
|
295
|
+
event: "remote-heads-changed",
|
|
296
|
+
})
|
|
297
297
|
|
|
298
298
|
remoteHeadsSubscription.subscribeToRemotes([storageB])
|
|
299
299
|
remoteHeadsSubscription.handleRemoteHeads(docBHeadsChangedForStorageB2)
|
|
@@ -314,10 +314,10 @@ describe("RepoHeadsSubscriptions", () => {
|
|
|
314
314
|
it("should remove subs of disconnected peers", async () => {
|
|
315
315
|
const remoteHeadsSubscriptions = new RemoteHeadsSubscriptions()
|
|
316
316
|
|
|
317
|
-
const messagesPromise =
|
|
318
|
-
remoteHeadsSubscriptions,
|
|
319
|
-
"change-remote-subs"
|
|
320
|
-
)
|
|
317
|
+
const messagesPromise = collectMessages({
|
|
318
|
+
emitter: remoteHeadsSubscriptions,
|
|
319
|
+
event: "change-remote-subs",
|
|
320
|
+
})
|
|
321
321
|
|
|
322
322
|
remoteHeadsSubscriptions.handleControlMessage({
|
|
323
323
|
type: "remote-subscription-change",
|
package/test/Repo.test.ts
CHANGED
|
@@ -1,5 +1,5 @@
|
|
|
1
1
|
import { next as A } from "@automerge/automerge"
|
|
2
|
-
import { MessageChannelNetworkAdapter } from "
|
|
2
|
+
import { MessageChannelNetworkAdapter } from "../../automerge-repo-network-messagechannel/src/index.js"
|
|
3
3
|
import assert from "assert"
|
|
4
4
|
import * as Uuid from "uuid"
|
|
5
5
|
import { describe, expect, it } from "vitest"
|
|
@@ -13,6 +13,7 @@ import { Repo } from "../src/Repo.js"
|
|
|
13
13
|
import { eventPromise } from "../src/helpers/eventPromise.js"
|
|
14
14
|
import { pause } from "../src/helpers/pause.js"
|
|
15
15
|
import {
|
|
16
|
+
AnyDocumentId,
|
|
16
17
|
AutomergeUrl,
|
|
17
18
|
DocHandle,
|
|
18
19
|
DocumentId,
|
|
@@ -31,6 +32,15 @@ import { TestDoc } from "./types.js"
|
|
|
31
32
|
import { StorageId } from "../src/storage/types.js"
|
|
32
33
|
|
|
33
34
|
describe("Repo", () => {
|
|
35
|
+
describe("constructor", () => {
|
|
36
|
+
it("can be instantiated without network adapters", () => {
|
|
37
|
+
const repo = new Repo({
|
|
38
|
+
network: [],
|
|
39
|
+
})
|
|
40
|
+
expect(repo).toBeInstanceOf(Repo)
|
|
41
|
+
})
|
|
42
|
+
})
|
|
43
|
+
|
|
34
44
|
describe("local only", () => {
|
|
35
45
|
const setup = ({ startReady = true } = {}) => {
|
|
36
46
|
const storageAdapter = new DummyStorageAdapter()
|
|
@@ -57,6 +67,13 @@ describe("Repo", () => {
|
|
|
57
67
|
assert.equal(handle.isReady(), true)
|
|
58
68
|
})
|
|
59
69
|
|
|
70
|
+
it("can create a document with an initial value", async () => {
|
|
71
|
+
const { repo } = setup()
|
|
72
|
+
const handle = repo.create({ foo: "bar" })
|
|
73
|
+
await handle.doc()
|
|
74
|
+
assert.equal(handle.docSync().foo, "bar")
|
|
75
|
+
})
|
|
76
|
+
|
|
60
77
|
it("can find a document by url", () => {
|
|
61
78
|
const { repo } = setup()
|
|
62
79
|
const handle = repo.create<TestDoc>()
|
|
@@ -321,6 +338,27 @@ describe("Repo", () => {
|
|
|
321
338
|
repo.delete(handle.documentId)
|
|
322
339
|
}))
|
|
323
340
|
|
|
341
|
+
it("exports a document", async () => {
|
|
342
|
+
const { repo } = setup()
|
|
343
|
+
const handle = repo.create<TestDoc>()
|
|
344
|
+
handle.change(d => {
|
|
345
|
+
d.foo = "bar"
|
|
346
|
+
})
|
|
347
|
+
assert.equal(handle.isReady(), true)
|
|
348
|
+
|
|
349
|
+
const exported = await repo.export(handle.documentId)
|
|
350
|
+
const loaded = A.load(exported)
|
|
351
|
+
const doc = await handle.doc()
|
|
352
|
+
assert.deepEqual(doc, loaded)
|
|
353
|
+
})
|
|
354
|
+
|
|
355
|
+
it("rejects when exporting a document that does not exist", async () => {
|
|
356
|
+
const { repo } = setup()
|
|
357
|
+
assert.rejects(async () => {
|
|
358
|
+
await repo.export("foo" as AnyDocumentId)
|
|
359
|
+
})
|
|
360
|
+
})
|
|
361
|
+
|
|
324
362
|
it("storage state doesn't change across reloads when the document hasn't changed", async () => {
|
|
325
363
|
const storage = new DummyStorageAdapter()
|
|
326
364
|
|
|
@@ -0,0 +1,19 @@
|
|
|
1
|
+
import { EventEmitter } from "eventemitter3"
|
|
2
|
+
import { pause } from "../../src/helpers/pause.js"
|
|
3
|
+
|
|
4
|
+
export async function collectMessages({
|
|
5
|
+
emitter,
|
|
6
|
+
event,
|
|
7
|
+
until = pause(100),
|
|
8
|
+
}: {
|
|
9
|
+
emitter: EventEmitter
|
|
10
|
+
event: string
|
|
11
|
+
until?: Promise<unknown>
|
|
12
|
+
}): Promise<any[]> {
|
|
13
|
+
const messages = []
|
|
14
|
+
const listener = (message: unknown) => messages.push(message)
|
|
15
|
+
emitter.on(event, listener)
|
|
16
|
+
await until
|
|
17
|
+
emitter.off(event)
|
|
18
|
+
return messages
|
|
19
|
+
}
|