@automerge/automerge-repo 2.0.0-alpha.1 → 2.0.0-alpha.12
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/DocHandle.d.ts +71 -2
- package/dist/DocHandle.d.ts.map +1 -1
- package/dist/DocHandle.js +116 -2
- package/dist/Repo.d.ts +24 -0
- package/dist/Repo.d.ts.map +1 -1
- package/dist/Repo.js +94 -57
- package/dist/entrypoints/fullfat.d.ts +1 -0
- package/dist/entrypoints/fullfat.d.ts.map +1 -1
- package/dist/entrypoints/fullfat.js +1 -2
- package/dist/entrypoints/slim.d.ts +1 -0
- package/dist/entrypoints/slim.d.ts.map +1 -1
- package/dist/entrypoints/slim.js +2 -0
- package/dist/helpers/tests/storage-adapter-tests.d.ts +2 -2
- package/dist/helpers/tests/storage-adapter-tests.d.ts.map +1 -1
- package/dist/helpers/tests/storage-adapter-tests.js +19 -39
- package/dist/storage/StorageSubsystem.d.ts +11 -1
- package/dist/storage/StorageSubsystem.d.ts.map +1 -1
- package/dist/storage/StorageSubsystem.js +18 -3
- package/dist/synchronizer/CollectionSynchronizer.d.ts +13 -0
- package/dist/synchronizer/CollectionSynchronizer.d.ts.map +1 -1
- package/dist/synchronizer/CollectionSynchronizer.js +13 -6
- package/dist/synchronizer/DocSynchronizer.d.ts +7 -0
- package/dist/synchronizer/DocSynchronizer.d.ts.map +1 -1
- package/dist/synchronizer/DocSynchronizer.js +14 -0
- package/dist/synchronizer/Synchronizer.d.ts +8 -0
- package/dist/synchronizer/Synchronizer.d.ts.map +1 -1
- package/package.json +3 -3
- package/src/DocHandle.ts +137 -4
- package/src/Repo.ts +123 -56
- package/src/entrypoints/fullfat.ts +1 -2
- package/src/entrypoints/slim.ts +2 -0
- package/src/helpers/tests/storage-adapter-tests.ts +31 -62
- package/src/storage/StorageSubsystem.ts +26 -3
- package/src/synchronizer/CollectionSynchronizer.ts +23 -6
- package/src/synchronizer/DocSynchronizer.ts +15 -0
- package/src/synchronizer/Synchronizer.ts +9 -0
- package/test/DocHandle.test.ts +141 -0
- package/test/Repo.test.ts +73 -0
- package/test/StorageSubsystem.test.ts +17 -0
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
import { describe, expect, it } from "vitest"
|
|
1
|
+
import { describe, expect, beforeEach, it as _it } from "vitest"
|
|
2
2
|
|
|
3
3
|
import type { StorageAdapterInterface } from "../../storage/StorageAdapterInterface.js"
|
|
4
4
|
|
|
@@ -8,72 +8,61 @@ const PAYLOAD_C = () => new Uint8Array([2, 111, 74, 131, 236, 96, 142, 193])
|
|
|
8
8
|
|
|
9
9
|
const LARGE_PAYLOAD = new Uint8Array(100000).map(() => Math.random() * 256)
|
|
10
10
|
|
|
11
|
-
|
|
12
|
-
|
|
13
|
-
|
|
14
|
-
|
|
15
|
-
|
|
11
|
+
type AdapterTestContext = {
|
|
12
|
+
adapter: StorageAdapterInterface
|
|
13
|
+
}
|
|
14
|
+
|
|
15
|
+
const it = _it<AdapterTestContext>
|
|
16
|
+
|
|
17
|
+
export function runStorageAdapterTests(setup: SetupFn, title?: string): void {
|
|
18
|
+
beforeEach<AdapterTestContext>(async ctx => {
|
|
19
|
+
const { adapter, teardown = NO_OP } = await setup()
|
|
20
|
+
ctx.adapter = adapter
|
|
21
|
+
return teardown
|
|
22
|
+
})
|
|
16
23
|
|
|
17
24
|
describe(`Storage adapter acceptance tests ${
|
|
18
25
|
title ? `(${title})` : ""
|
|
19
26
|
}`, () => {
|
|
20
27
|
describe("load", () => {
|
|
21
|
-
it("should return undefined if there is no data", async () => {
|
|
22
|
-
const { adapter, teardown } = await setup()
|
|
23
|
-
|
|
28
|
+
it("should return undefined if there is no data", async ({ adapter }) => {
|
|
24
29
|
const actual = await adapter.load(["AAAAA", "sync-state", "xxxxx"])
|
|
25
30
|
expect(actual).toBeUndefined()
|
|
26
|
-
|
|
27
|
-
teardown()
|
|
28
31
|
})
|
|
29
32
|
})
|
|
30
33
|
|
|
31
34
|
describe("save and load", () => {
|
|
32
|
-
it("should return data that was saved", async () => {
|
|
33
|
-
const { adapter, teardown } = await setup()
|
|
34
|
-
|
|
35
|
+
it("should return data that was saved", async ({ adapter }) => {
|
|
35
36
|
await adapter.save(["storage-adapter-id"], PAYLOAD_A())
|
|
36
37
|
const actual = await adapter.load(["storage-adapter-id"])
|
|
37
38
|
expect(actual).toStrictEqual(PAYLOAD_A())
|
|
38
|
-
|
|
39
|
-
teardown()
|
|
40
39
|
})
|
|
41
40
|
|
|
42
|
-
it("should work with composite keys", async () => {
|
|
43
|
-
const { adapter, teardown } = await setup()
|
|
44
|
-
|
|
41
|
+
it("should work with composite keys", async ({ adapter }) => {
|
|
45
42
|
await adapter.save(["AAAAA", "sync-state", "xxxxx"], PAYLOAD_A())
|
|
46
43
|
const actual = await adapter.load(["AAAAA", "sync-state", "xxxxx"])
|
|
47
44
|
expect(actual).toStrictEqual(PAYLOAD_A())
|
|
48
|
-
|
|
49
|
-
teardown()
|
|
50
45
|
})
|
|
51
46
|
|
|
52
|
-
it("should work with a large payload", async () => {
|
|
53
|
-
const { adapter, teardown } = await setup()
|
|
54
|
-
|
|
47
|
+
it("should work with a large payload", async ({ adapter }) => {
|
|
55
48
|
await adapter.save(["AAAAA", "sync-state", "xxxxx"], LARGE_PAYLOAD)
|
|
56
49
|
const actual = await adapter.load(["AAAAA", "sync-state", "xxxxx"])
|
|
57
50
|
expect(actual).toStrictEqual(LARGE_PAYLOAD)
|
|
58
|
-
|
|
59
|
-
teardown()
|
|
60
51
|
})
|
|
61
52
|
})
|
|
62
53
|
|
|
63
54
|
describe("loadRange", () => {
|
|
64
|
-
it("should return an empty array if there is no data", async (
|
|
65
|
-
|
|
66
|
-
|
|
55
|
+
it("should return an empty array if there is no data", async ({
|
|
56
|
+
adapter,
|
|
57
|
+
}) => {
|
|
67
58
|
expect(await adapter.loadRange(["AAAAA"])).toStrictEqual([])
|
|
68
|
-
|
|
69
|
-
teardown()
|
|
70
59
|
})
|
|
71
60
|
})
|
|
72
61
|
|
|
73
62
|
describe("save and loadRange", () => {
|
|
74
|
-
it("should return all the data that matches the key", async (
|
|
75
|
-
|
|
76
|
-
|
|
63
|
+
it("should return all the data that matches the key", async ({
|
|
64
|
+
adapter,
|
|
65
|
+
}) => {
|
|
77
66
|
await adapter.save(["AAAAA", "sync-state", "xxxxx"], PAYLOAD_A())
|
|
78
67
|
await adapter.save(["AAAAA", "snapshot", "yyyyy"], PAYLOAD_B())
|
|
79
68
|
await adapter.save(["AAAAA", "sync-state", "zzzzz"], PAYLOAD_C())
|
|
@@ -92,13 +81,9 @@ export function runStorageAdapterTests(_setup: SetupFn, title?: string): void {
|
|
|
92
81
|
{ key: ["AAAAA", "sync-state", "zzzzz"], data: PAYLOAD_C() },
|
|
93
82
|
])
|
|
94
83
|
)
|
|
95
|
-
|
|
96
|
-
teardown()
|
|
97
84
|
})
|
|
98
85
|
|
|
99
|
-
it("should only load values that match they key", async () => {
|
|
100
|
-
const { adapter, teardown } = await setup()
|
|
101
|
-
|
|
86
|
+
it("should only load values that match they key", async ({ adapter }) => {
|
|
102
87
|
await adapter.save(["AAAAA", "sync-state", "xxxxx"], PAYLOAD_A())
|
|
103
88
|
await adapter.save(["BBBBB", "sync-state", "zzzzz"], PAYLOAD_C())
|
|
104
89
|
|
|
@@ -113,15 +98,11 @@ export function runStorageAdapterTests(_setup: SetupFn, title?: string): void {
|
|
|
113
98
|
{ key: ["BBBBB", "sync-state", "zzzzz"], data: PAYLOAD_C() },
|
|
114
99
|
])
|
|
115
100
|
)
|
|
116
|
-
|
|
117
|
-
teardown()
|
|
118
101
|
})
|
|
119
102
|
})
|
|
120
103
|
|
|
121
104
|
describe("save and remove", () => {
|
|
122
|
-
it("after removing, should be empty", async () => {
|
|
123
|
-
const { adapter, teardown } = await setup()
|
|
124
|
-
|
|
105
|
+
it("after removing, should be empty", async ({ adapter }) => {
|
|
125
106
|
await adapter.save(["AAAAA", "snapshot", "xxxxx"], PAYLOAD_A())
|
|
126
107
|
await adapter.remove(["AAAAA", "snapshot", "xxxxx"])
|
|
127
108
|
|
|
@@ -129,30 +110,24 @@ export function runStorageAdapterTests(_setup: SetupFn, title?: string): void {
|
|
|
129
110
|
expect(
|
|
130
111
|
await adapter.load(["AAAAA", "snapshot", "xxxxx"])
|
|
131
112
|
).toBeUndefined()
|
|
132
|
-
|
|
133
|
-
teardown()
|
|
134
113
|
})
|
|
135
114
|
})
|
|
136
115
|
|
|
137
116
|
describe("save and save", () => {
|
|
138
|
-
it("should overwrite data saved with the same key", async (
|
|
139
|
-
|
|
140
|
-
|
|
117
|
+
it("should overwrite data saved with the same key", async ({
|
|
118
|
+
adapter,
|
|
119
|
+
}) => {
|
|
141
120
|
await adapter.save(["AAAAA", "sync-state", "xxxxx"], PAYLOAD_A())
|
|
142
121
|
await adapter.save(["AAAAA", "sync-state", "xxxxx"], PAYLOAD_B())
|
|
143
122
|
|
|
144
123
|
expect(await adapter.loadRange(["AAAAA", "sync-state"])).toStrictEqual([
|
|
145
124
|
{ key: ["AAAAA", "sync-state", "xxxxx"], data: PAYLOAD_B() },
|
|
146
125
|
])
|
|
147
|
-
|
|
148
|
-
teardown()
|
|
149
126
|
})
|
|
150
127
|
})
|
|
151
128
|
|
|
152
129
|
describe("removeRange", () => {
|
|
153
|
-
it("should remove a range of records", async () => {
|
|
154
|
-
const { adapter, teardown } = await setup()
|
|
155
|
-
|
|
130
|
+
it("should remove a range of records", async ({ adapter }) => {
|
|
156
131
|
await adapter.save(["AAAAA", "sync-state", "xxxxx"], PAYLOAD_A())
|
|
157
132
|
await adapter.save(["AAAAA", "snapshot", "yyyyy"], PAYLOAD_B())
|
|
158
133
|
await adapter.save(["AAAAA", "sync-state", "zzzzz"], PAYLOAD_C())
|
|
@@ -162,13 +137,9 @@ export function runStorageAdapterTests(_setup: SetupFn, title?: string): void {
|
|
|
162
137
|
expect(await adapter.loadRange(["AAAAA"])).toStrictEqual([
|
|
163
138
|
{ key: ["AAAAA", "snapshot", "yyyyy"], data: PAYLOAD_B() },
|
|
164
139
|
])
|
|
165
|
-
|
|
166
|
-
teardown()
|
|
167
140
|
})
|
|
168
141
|
|
|
169
|
-
it("should not remove records that don't match", async () => {
|
|
170
|
-
const { adapter, teardown } = await setup()
|
|
171
|
-
|
|
142
|
+
it("should not remove records that don't match", async ({ adapter }) => {
|
|
172
143
|
await adapter.save(["AAAAA", "sync-state", "xxxxx"], PAYLOAD_A())
|
|
173
144
|
await adapter.save(["BBBBB", "sync-state", "zzzzz"], PAYLOAD_B())
|
|
174
145
|
|
|
@@ -178,8 +149,6 @@ export function runStorageAdapterTests(_setup: SetupFn, title?: string): void {
|
|
|
178
149
|
expect(actual).toStrictEqual([
|
|
179
150
|
{ key: ["BBBBB", "sync-state", "zzzzz"], data: PAYLOAD_B() },
|
|
180
151
|
])
|
|
181
|
-
|
|
182
|
-
teardown()
|
|
183
152
|
})
|
|
184
153
|
})
|
|
185
154
|
})
|
|
@@ -189,5 +158,5 @@ const NO_OP = () => {}
|
|
|
189
158
|
|
|
190
159
|
export type SetupFn = () => Promise<{
|
|
191
160
|
adapter: StorageAdapterInterface
|
|
192
|
-
teardown?: () => void
|
|
161
|
+
teardown?: () => void | Promise<void>
|
|
193
162
|
}>
|
|
@@ -8,12 +8,22 @@ import { ChunkInfo, StorageKey, StorageId } from "./types.js"
|
|
|
8
8
|
import { keyHash, headsHash } from "./keyHash.js"
|
|
9
9
|
import { chunkTypeFromKey } from "./chunkTypeFromKey.js"
|
|
10
10
|
import * as Uuid from "uuid"
|
|
11
|
+
import { EventEmitter } from "eventemitter3"
|
|
12
|
+
|
|
13
|
+
type StorageSubsystemEvents = {
|
|
14
|
+
"document-loaded": (arg: {
|
|
15
|
+
documentId: DocumentId
|
|
16
|
+
durationMillis: number
|
|
17
|
+
numOps: number
|
|
18
|
+
numChanges: number
|
|
19
|
+
}) => void
|
|
20
|
+
}
|
|
11
21
|
|
|
12
22
|
/**
|
|
13
23
|
* The storage subsystem is responsible for saving and loading Automerge documents to and from
|
|
14
24
|
* storage adapter. It also provides a generic key/value storage interface for other uses.
|
|
15
25
|
*/
|
|
16
|
-
export class StorageSubsystem {
|
|
26
|
+
export class StorageSubsystem extends EventEmitter<StorageSubsystemEvents> {
|
|
17
27
|
/** The storage adapter to use for saving and loading documents */
|
|
18
28
|
#storageAdapter: StorageAdapterInterface
|
|
19
29
|
|
|
@@ -29,6 +39,7 @@ export class StorageSubsystem {
|
|
|
29
39
|
#log = debug(`automerge-repo:storage-subsystem`)
|
|
30
40
|
|
|
31
41
|
constructor(storageAdapter: StorageAdapterInterface) {
|
|
42
|
+
super()
|
|
32
43
|
this.#storageAdapter = storageAdapter
|
|
33
44
|
}
|
|
34
45
|
|
|
@@ -130,7 +141,14 @@ export class StorageSubsystem {
|
|
|
130
141
|
if (binary.length === 0) return null
|
|
131
142
|
|
|
132
143
|
// Load into an Automerge document
|
|
144
|
+
const start = performance.now()
|
|
133
145
|
const newDoc = A.loadIncremental(A.init(), binary) as A.Doc<T>
|
|
146
|
+
const end = performance.now()
|
|
147
|
+
this.emit("document-loaded", {
|
|
148
|
+
documentId,
|
|
149
|
+
durationMillis: end - start,
|
|
150
|
+
...A.stats(newDoc),
|
|
151
|
+
})
|
|
134
152
|
|
|
135
153
|
// Record the latest heads for the document
|
|
136
154
|
this.#storedHeads.set(documentId, A.getHeads(newDoc))
|
|
@@ -232,8 +250,13 @@ export class StorageSubsystem {
|
|
|
232
250
|
storageId: StorageId
|
|
233
251
|
): Promise<A.SyncState | undefined> {
|
|
234
252
|
const key = [documentId, "sync-state", storageId]
|
|
235
|
-
|
|
236
|
-
|
|
253
|
+
try {
|
|
254
|
+
const loaded = await this.#storageAdapter.load(key)
|
|
255
|
+
return loaded ? A.decodeSyncState(loaded) : undefined
|
|
256
|
+
} catch (e) {
|
|
257
|
+
this.#log(`Error loading sync state for ${documentId} from ${storageId}`)
|
|
258
|
+
return undefined
|
|
259
|
+
}
|
|
237
260
|
}
|
|
238
261
|
|
|
239
262
|
async saveSyncState(
|
|
@@ -15,7 +15,8 @@ export class CollectionSynchronizer extends Synchronizer {
|
|
|
15
15
|
#peers: Set<PeerId> = new Set()
|
|
16
16
|
|
|
17
17
|
/** A map of documentIds to their synchronizers */
|
|
18
|
-
|
|
18
|
+
/** @hidden */
|
|
19
|
+
docSynchronizers: Record<DocumentId, DocSynchronizer> = {}
|
|
19
20
|
|
|
20
21
|
/** Used to determine if the document is know to the Collection and a synchronizer exists or is being set up */
|
|
21
22
|
#docSetUp: Record<DocumentId, boolean> = {}
|
|
@@ -26,11 +27,11 @@ export class CollectionSynchronizer extends Synchronizer {
|
|
|
26
27
|
|
|
27
28
|
/** Returns a synchronizer for the given document, creating one if it doesn't already exist. */
|
|
28
29
|
#fetchDocSynchronizer(documentId: DocumentId) {
|
|
29
|
-
if (!this
|
|
30
|
+
if (!this.docSynchronizers[documentId]) {
|
|
30
31
|
const handle = this.repo.find(stringifyAutomergeUrl({ documentId }))
|
|
31
|
-
this
|
|
32
|
+
this.docSynchronizers[documentId] = this.#initDocSynchronizer(handle)
|
|
32
33
|
}
|
|
33
|
-
return this
|
|
34
|
+
return this.docSynchronizers[documentId]
|
|
34
35
|
}
|
|
35
36
|
|
|
36
37
|
/** Creates a new docSynchronizer and sets it up to propagate messages */
|
|
@@ -57,6 +58,7 @@ export class CollectionSynchronizer extends Synchronizer {
|
|
|
57
58
|
docSynchronizer.on("message", event => this.emit("message", event))
|
|
58
59
|
docSynchronizer.on("open-doc", event => this.emit("open-doc", event))
|
|
59
60
|
docSynchronizer.on("sync-state", event => this.emit("sync-state", event))
|
|
61
|
+
docSynchronizer.on("metrics", event => this.emit("metrics", event))
|
|
60
62
|
return docSynchronizer
|
|
61
63
|
}
|
|
62
64
|
|
|
@@ -131,7 +133,7 @@ export class CollectionSynchronizer extends Synchronizer {
|
|
|
131
133
|
}
|
|
132
134
|
|
|
133
135
|
this.#peers.add(peerId)
|
|
134
|
-
for (const docSynchronizer of Object.values(this
|
|
136
|
+
for (const docSynchronizer of Object.values(this.docSynchronizers)) {
|
|
135
137
|
const { documentId } = docSynchronizer
|
|
136
138
|
void this.repo.sharePolicy(peerId, documentId).then(okToShare => {
|
|
137
139
|
if (okToShare) docSynchronizer.beginSync([peerId])
|
|
@@ -144,7 +146,7 @@ export class CollectionSynchronizer extends Synchronizer {
|
|
|
144
146
|
log(`removing peer ${peerId}`)
|
|
145
147
|
this.#peers.delete(peerId)
|
|
146
148
|
|
|
147
|
-
for (const docSynchronizer of Object.values(this
|
|
149
|
+
for (const docSynchronizer of Object.values(this.docSynchronizers)) {
|
|
148
150
|
docSynchronizer.endSync(peerId)
|
|
149
151
|
}
|
|
150
152
|
}
|
|
@@ -153,4 +155,19 @@ export class CollectionSynchronizer extends Synchronizer {
|
|
|
153
155
|
get peers(): PeerId[] {
|
|
154
156
|
return Array.from(this.#peers)
|
|
155
157
|
}
|
|
158
|
+
|
|
159
|
+
metrics(): {
|
|
160
|
+
[key: string]: {
|
|
161
|
+
peers: PeerId[]
|
|
162
|
+
size: { numOps: number; numChanges: number }
|
|
163
|
+
}
|
|
164
|
+
} {
|
|
165
|
+
return Object.fromEntries(
|
|
166
|
+
Object.entries(this.docSynchronizers).map(
|
|
167
|
+
([documentId, synchronizer]) => {
|
|
168
|
+
return [documentId, synchronizer.metrics()]
|
|
169
|
+
}
|
|
170
|
+
)
|
|
171
|
+
)
|
|
172
|
+
}
|
|
156
173
|
}
|
|
@@ -351,11 +351,19 @@ export class DocSynchronizer extends Synchronizer {
|
|
|
351
351
|
|
|
352
352
|
this.#withSyncState(message.senderId, syncState => {
|
|
353
353
|
this.#handle.update(doc => {
|
|
354
|
+
const start = performance.now()
|
|
354
355
|
const [newDoc, newSyncState] = A.receiveSyncMessage(
|
|
355
356
|
doc,
|
|
356
357
|
syncState,
|
|
357
358
|
message.data
|
|
358
359
|
)
|
|
360
|
+
const end = performance.now()
|
|
361
|
+
this.emit("metrics", {
|
|
362
|
+
type: "receive-sync-message",
|
|
363
|
+
documentId: this.#handle.documentId,
|
|
364
|
+
durationMillis: end - start,
|
|
365
|
+
...A.stats(doc),
|
|
366
|
+
})
|
|
359
367
|
|
|
360
368
|
this.#setSyncState(message.senderId, newSyncState)
|
|
361
369
|
|
|
@@ -401,4 +409,11 @@ export class DocSynchronizer extends Synchronizer {
|
|
|
401
409
|
|
|
402
410
|
this.#pendingSyncMessages = []
|
|
403
411
|
}
|
|
412
|
+
|
|
413
|
+
metrics(): { peers: PeerId[]; size: { numOps: number; numChanges: number } } {
|
|
414
|
+
return {
|
|
415
|
+
peers: this.#peers,
|
|
416
|
+
size: this.#handle.metrics(),
|
|
417
|
+
}
|
|
418
|
+
}
|
|
404
419
|
}
|
|
@@ -15,6 +15,7 @@ export interface SynchronizerEvents {
|
|
|
15
15
|
message: (payload: MessageContents) => void
|
|
16
16
|
"sync-state": (payload: SyncStatePayload) => void
|
|
17
17
|
"open-doc": (arg: OpenDocMessage) => void
|
|
18
|
+
metrics: (arg: DocSyncMetrics) => void
|
|
18
19
|
}
|
|
19
20
|
|
|
20
21
|
/** Notify the repo that the sync state has changed */
|
|
@@ -23,3 +24,11 @@ export interface SyncStatePayload {
|
|
|
23
24
|
documentId: DocumentId
|
|
24
25
|
syncState: SyncState
|
|
25
26
|
}
|
|
27
|
+
|
|
28
|
+
export type DocSyncMetrics = {
|
|
29
|
+
type: "receive-sync-message"
|
|
30
|
+
documentId: DocumentId
|
|
31
|
+
durationMillis: number
|
|
32
|
+
numOps: number
|
|
33
|
+
numChanges: number
|
|
34
|
+
}
|
package/test/DocHandle.test.ts
CHANGED
|
@@ -7,6 +7,7 @@ import { eventPromise } from "../src/helpers/eventPromise.js"
|
|
|
7
7
|
import { pause } from "../src/helpers/pause.js"
|
|
8
8
|
import { DocHandle, DocHandleChangePayload } from "../src/index.js"
|
|
9
9
|
import { TestDoc } from "./types.js"
|
|
10
|
+
import { UNLOADED } from "../src/DocHandle.js"
|
|
10
11
|
|
|
11
12
|
describe("DocHandle", () => {
|
|
12
13
|
const TEST_ID = parseAutomergeUrl(generateAutomergeUrl()).documentId
|
|
@@ -68,6 +69,15 @@ describe("DocHandle", () => {
|
|
|
68
69
|
assert.equal(doc?.foo, "bar")
|
|
69
70
|
})
|
|
70
71
|
|
|
72
|
+
/** HISTORY TRAVERSAL
|
|
73
|
+
* This API is relatively alpha-ish but we're already
|
|
74
|
+
* doing things in our own apps that are fairly ambitious
|
|
75
|
+
* by routing around to a lower-level API.
|
|
76
|
+
* This is an attempt to wrap up the existing practice
|
|
77
|
+
* in a slightly more supportable set of APIs but should be
|
|
78
|
+
* considered provisional: expect further improvements.
|
|
79
|
+
*/
|
|
80
|
+
|
|
71
81
|
it("should return the heads when requested", async () => {
|
|
72
82
|
const handle = setup()
|
|
73
83
|
handle.change(d => (d.foo = "bar"))
|
|
@@ -84,6 +94,94 @@ describe("DocHandle", () => {
|
|
|
84
94
|
assert.deepEqual(handle.heads(), undefined)
|
|
85
95
|
})
|
|
86
96
|
|
|
97
|
+
it("should return the history when requested", async () => {
|
|
98
|
+
const handle = setup()
|
|
99
|
+
handle.change(d => (d.foo = "bar"))
|
|
100
|
+
handle.change(d => (d.foo = "baz"))
|
|
101
|
+
assert.equal(handle.isReady(), true)
|
|
102
|
+
|
|
103
|
+
const history = handle.history()
|
|
104
|
+
assert.deepEqual(handle.history().length, 2)
|
|
105
|
+
})
|
|
106
|
+
|
|
107
|
+
it("should return a commit from the history", async () => {
|
|
108
|
+
const handle = setup()
|
|
109
|
+
handle.change(d => (d.foo = "zero"))
|
|
110
|
+
handle.change(d => (d.foo = "one"))
|
|
111
|
+
handle.change(d => (d.foo = "two"))
|
|
112
|
+
handle.change(d => (d.foo = "three"))
|
|
113
|
+
assert.equal(handle.isReady(), true)
|
|
114
|
+
|
|
115
|
+
const history = handle.history()
|
|
116
|
+
const view = handle.view(history[1])
|
|
117
|
+
assert.deepEqual(view, { foo: "one" })
|
|
118
|
+
})
|
|
119
|
+
|
|
120
|
+
it("should return diffs", async () => {
|
|
121
|
+
const handle = setup()
|
|
122
|
+
handle.change(d => (d.foo = "zero"))
|
|
123
|
+
handle.change(d => (d.foo = "one"))
|
|
124
|
+
handle.change(d => (d.foo = "two"))
|
|
125
|
+
handle.change(d => (d.foo = "three"))
|
|
126
|
+
assert.equal(handle.isReady(), true)
|
|
127
|
+
|
|
128
|
+
const history = handle.history()
|
|
129
|
+
const patches = handle.diff(history[1])
|
|
130
|
+
assert.deepEqual(patches, [
|
|
131
|
+
{ action: "put", path: ["foo"], value: "" },
|
|
132
|
+
{ action: "splice", path: ["foo", 0], value: "one" },
|
|
133
|
+
])
|
|
134
|
+
})
|
|
135
|
+
|
|
136
|
+
it("should support arbitrary diffs too", async () => {
|
|
137
|
+
const handle = setup()
|
|
138
|
+
handle.change(d => (d.foo = "zero"))
|
|
139
|
+
handle.change(d => (d.foo = "one"))
|
|
140
|
+
handle.change(d => (d.foo = "two"))
|
|
141
|
+
handle.change(d => (d.foo = "three"))
|
|
142
|
+
assert.equal(handle.isReady(), true)
|
|
143
|
+
|
|
144
|
+
const history = handle.history()
|
|
145
|
+
const patches = handle.diff(history[1], history[3])
|
|
146
|
+
assert.deepEqual(patches, [
|
|
147
|
+
{ action: "put", path: ["foo"], value: "" },
|
|
148
|
+
{ action: "splice", path: ["foo", 0], value: "three" },
|
|
149
|
+
])
|
|
150
|
+
const backPatches = handle.diff(history[3], history[1])
|
|
151
|
+
assert.deepEqual(backPatches, [
|
|
152
|
+
{ action: "put", path: ["foo"], value: "" },
|
|
153
|
+
{ action: "splice", path: ["foo", 0], value: "one" },
|
|
154
|
+
])
|
|
155
|
+
})
|
|
156
|
+
|
|
157
|
+
it("should allow direct access to decoded changes", async () => {
|
|
158
|
+
const handle = setup()
|
|
159
|
+
const time = Date.now()
|
|
160
|
+
handle.change(d => (d.foo = "foo"), { message: "commitMessage" })
|
|
161
|
+
assert.equal(handle.isReady(), true)
|
|
162
|
+
|
|
163
|
+
const metadata = handle.metadata()
|
|
164
|
+
assert.deepEqual(metadata.message, "commitMessage")
|
|
165
|
+
// NOTE: I'm not testing time because of https://github.com/automerge/automerge/issues/965
|
|
166
|
+
// but it does round-trip successfully!
|
|
167
|
+
})
|
|
168
|
+
|
|
169
|
+
it("should allow direct access to a specific decoded change", async () => {
|
|
170
|
+
const handle = setup()
|
|
171
|
+
const time = Date.now()
|
|
172
|
+
handle.change(d => (d.foo = "foo"), { message: "commitMessage" })
|
|
173
|
+
handle.change(d => (d.foo = "foo"), { message: "commitMessage2" })
|
|
174
|
+
handle.change(d => (d.foo = "foo"), { message: "commitMessage3" })
|
|
175
|
+
handle.change(d => (d.foo = "foo"), { message: "commitMessage4" })
|
|
176
|
+
assert.equal(handle.isReady(), true)
|
|
177
|
+
|
|
178
|
+
const history = handle.history()
|
|
179
|
+
const metadata = handle.metadata(history[0][0])
|
|
180
|
+
assert.deepEqual(metadata.message, "commitMessage")
|
|
181
|
+
// NOTE: I'm not testing time because of https://github.com/automerge/automerge/issues/965
|
|
182
|
+
// but it does round-trip successfully!
|
|
183
|
+
})
|
|
184
|
+
|
|
87
185
|
/**
|
|
88
186
|
* Once there's a Repo#stop API this case should be covered in accompanying
|
|
89
187
|
* tests and the following test removed.
|
|
@@ -325,6 +423,49 @@ describe("DocHandle", () => {
|
|
|
325
423
|
assert.equal(handle.isDeleted(), true)
|
|
326
424
|
})
|
|
327
425
|
|
|
426
|
+
it("should clear document reference when unloaded", async () => {
|
|
427
|
+
const handle = setup()
|
|
428
|
+
|
|
429
|
+
handle.change(doc => {
|
|
430
|
+
doc.foo = "bar"
|
|
431
|
+
})
|
|
432
|
+
const doc = await handle.doc()
|
|
433
|
+
assert.equal(doc?.foo, "bar")
|
|
434
|
+
|
|
435
|
+
handle.unload()
|
|
436
|
+
assert.equal(handle.isUnloaded(), true)
|
|
437
|
+
|
|
438
|
+
const clearedDoc = await handle.doc([UNLOADED])
|
|
439
|
+
assert.notEqual(clearedDoc?.foo, "bar")
|
|
440
|
+
})
|
|
441
|
+
|
|
442
|
+
it("should allow reloading after unloading", async () => {
|
|
443
|
+
const handle = setup()
|
|
444
|
+
|
|
445
|
+
handle.change(doc => {
|
|
446
|
+
doc.foo = "bar"
|
|
447
|
+
})
|
|
448
|
+
const doc = await handle.doc()
|
|
449
|
+
assert.equal(doc?.foo, "bar")
|
|
450
|
+
|
|
451
|
+
handle.unload()
|
|
452
|
+
|
|
453
|
+
// reload to transition from unloaded to loading
|
|
454
|
+
handle.reload()
|
|
455
|
+
|
|
456
|
+
// simulate requesting from the network
|
|
457
|
+
handle.request()
|
|
458
|
+
|
|
459
|
+
// simulate updating from the network
|
|
460
|
+
handle.update(doc => {
|
|
461
|
+
return A.change(doc, d => (d.foo = "bar"))
|
|
462
|
+
})
|
|
463
|
+
|
|
464
|
+
const reloadedDoc = await handle.doc()
|
|
465
|
+
assert.equal(handle.isReady(), true)
|
|
466
|
+
assert.equal(reloadedDoc?.foo, "bar")
|
|
467
|
+
})
|
|
468
|
+
|
|
328
469
|
it("should allow changing at old heads", async () => {
|
|
329
470
|
const handle = setup()
|
|
330
471
|
|
package/test/Repo.test.ts
CHANGED
|
@@ -486,6 +486,39 @@ describe("Repo", () => {
|
|
|
486
486
|
const doc = await handle.doc()
|
|
487
487
|
expect(doc).toEqual({})
|
|
488
488
|
})
|
|
489
|
+
|
|
490
|
+
describe("handle cache", () => {
|
|
491
|
+
it("contains doc handle", async () => {
|
|
492
|
+
const { repo } = setup()
|
|
493
|
+
const handle = repo.create({ foo: "bar" })
|
|
494
|
+
await handle.doc()
|
|
495
|
+
assert(repo.handles[handle.documentId])
|
|
496
|
+
})
|
|
497
|
+
|
|
498
|
+
it("delete removes doc handle", async () => {
|
|
499
|
+
const { repo } = setup()
|
|
500
|
+
const handle = repo.create({ foo: "bar" })
|
|
501
|
+
await handle.doc()
|
|
502
|
+
await repo.delete(handle.documentId)
|
|
503
|
+
assert(repo.handles[handle.documentId] === undefined)
|
|
504
|
+
})
|
|
505
|
+
|
|
506
|
+
it("removeFromCache removes doc handle", async () => {
|
|
507
|
+
const { repo } = setup()
|
|
508
|
+
const handle = repo.create({ foo: "bar" })
|
|
509
|
+
await handle.doc()
|
|
510
|
+
await repo.removeFromCache(handle.documentId)
|
|
511
|
+
assert(repo.handles[handle.documentId] === undefined)
|
|
512
|
+
})
|
|
513
|
+
|
|
514
|
+
it("removeFromCache for documentId not found", async () => {
|
|
515
|
+
const { repo } = setup()
|
|
516
|
+
const badDocumentId = "badbadbad" as DocumentId
|
|
517
|
+
const handleCacheSize = Object.keys(repo.handles).length
|
|
518
|
+
await repo.removeFromCache(badDocumentId)
|
|
519
|
+
assert(Object.keys(repo.handles).length === handleCacheSize)
|
|
520
|
+
})
|
|
521
|
+
})
|
|
489
522
|
})
|
|
490
523
|
|
|
491
524
|
describe("flush behaviour", () => {
|
|
@@ -832,6 +865,46 @@ describe("Repo", () => {
|
|
|
832
865
|
teardown()
|
|
833
866
|
})
|
|
834
867
|
|
|
868
|
+
it("synchronizes changes from bobRepo to charlieRepo when loading from storage", async () => {
|
|
869
|
+
const { bobRepo, bobStorage, teardown } = await setup()
|
|
870
|
+
|
|
871
|
+
// We create a repo that uses bobStorage to put a document into its imaginary disk
|
|
872
|
+
// without it knowing about it
|
|
873
|
+
const bobRepo2 = new Repo({
|
|
874
|
+
storage: bobStorage,
|
|
875
|
+
})
|
|
876
|
+
const inStorageHandle = bobRepo2.create<TestDoc>({
|
|
877
|
+
foo: "foundOnFakeDisk",
|
|
878
|
+
})
|
|
879
|
+
await bobRepo2.flush()
|
|
880
|
+
|
|
881
|
+
// Now, let's load it on the original bob repo (which shares a "disk")
|
|
882
|
+
const bobFoundIt = bobRepo.find<TestDoc>(inStorageHandle.url)
|
|
883
|
+
await bobFoundIt.whenReady()
|
|
884
|
+
|
|
885
|
+
// Before checking if it syncs, make sure we have it!
|
|
886
|
+
// (This behaviour is mostly test-validation, we are already testing load/save elsewhere.)
|
|
887
|
+
assert.deepStrictEqual(await bobFoundIt.doc(), { foo: "foundOnFakeDisk" })
|
|
888
|
+
|
|
889
|
+
await pause(10)
|
|
890
|
+
|
|
891
|
+
// We should have a docSynchronizer and its peers should be alice and charlie
|
|
892
|
+
assert.strictEqual(
|
|
893
|
+
bobRepo.synchronizer.docSynchronizers[bobFoundIt.documentId]?.hasPeer(
|
|
894
|
+
"alice" as PeerId
|
|
895
|
+
),
|
|
896
|
+
true
|
|
897
|
+
)
|
|
898
|
+
assert.strictEqual(
|
|
899
|
+
bobRepo.synchronizer.docSynchronizers[bobFoundIt.documentId]?.hasPeer(
|
|
900
|
+
"charlie" as PeerId
|
|
901
|
+
),
|
|
902
|
+
true
|
|
903
|
+
)
|
|
904
|
+
|
|
905
|
+
teardown()
|
|
906
|
+
})
|
|
907
|
+
|
|
835
908
|
it("charlieRepo doesn't have a document it's not supposed to have", async () => {
|
|
836
909
|
const { aliceRepo, bobRepo, charlieRepo, notForCharlie, teardown } =
|
|
837
910
|
await setup()
|
|
@@ -211,6 +211,23 @@ describe("StorageSubsystem", () => {
|
|
|
211
211
|
)
|
|
212
212
|
assert.strictEqual(loadedSyncState, undefined)
|
|
213
213
|
})
|
|
214
|
+
|
|
215
|
+
it("returns a undefined if loading an existing sync state fails", async () => {
|
|
216
|
+
const storage = new StorageSubsystem(adapter)
|
|
217
|
+
|
|
218
|
+
const { documentId } = parseAutomergeUrl(generateAutomergeUrl())
|
|
219
|
+
const bobStorageId = Uuid.v4() as StorageId
|
|
220
|
+
|
|
221
|
+
const syncStateKey = [documentId, "sync-state", bobStorageId]
|
|
222
|
+
// Save garbage data to simulate a corrupted sync state
|
|
223
|
+
await adapter.save(syncStateKey, Buffer.from("invalid data"))
|
|
224
|
+
|
|
225
|
+
const loadedSyncState = await storage.loadSyncState(
|
|
226
|
+
documentId,
|
|
227
|
+
bobStorageId
|
|
228
|
+
)
|
|
229
|
+
assert.strictEqual(loadedSyncState, undefined)
|
|
230
|
+
})
|
|
214
231
|
})
|
|
215
232
|
|
|
216
233
|
describe("storage id", () => {
|