@automerge/automerge-repo 1.0.12 → 1.0.14
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/AutomergeUrl.d.ts +45 -0
- package/dist/AutomergeUrl.d.ts.map +1 -0
- package/dist/AutomergeUrl.js +108 -0
- package/dist/DocHandle.js +1 -1
- package/dist/Repo.d.ts +5 -5
- package/dist/Repo.d.ts.map +1 -1
- package/dist/Repo.js +10 -21
- package/dist/helpers/cbor.js +1 -1
- package/dist/index.d.ts +3 -3
- package/dist/index.d.ts.map +1 -1
- package/dist/index.js +1 -1
- package/dist/network/NetworkAdapter.d.ts +3 -3
- package/dist/network/NetworkAdapter.d.ts.map +1 -1
- package/dist/network/messages.d.ts +7 -18
- package/dist/network/messages.d.ts.map +1 -1
- package/dist/storage/StorageAdapter.d.ts +19 -22
- package/dist/storage/StorageAdapter.d.ts.map +1 -1
- package/dist/storage/StorageAdapter.js +2 -2
- package/dist/storage/StorageSubsystem.d.ts +39 -3
- package/dist/storage/StorageSubsystem.d.ts.map +1 -1
- package/dist/storage/StorageSubsystem.js +128 -75
- package/dist/storage/chunkTypeFromKey.d.ts +13 -0
- package/dist/storage/chunkTypeFromKey.d.ts.map +1 -0
- package/dist/storage/chunkTypeFromKey.js +18 -0
- package/dist/storage/keyHash.d.ts +4 -0
- package/dist/storage/keyHash.d.ts.map +1 -0
- package/dist/storage/keyHash.js +15 -0
- package/dist/storage/types.d.ts +37 -0
- package/dist/storage/types.d.ts.map +1 -0
- package/dist/storage/types.js +1 -0
- package/dist/synchronizer/CollectionSynchronizer.js +1 -1
- package/dist/types.d.ts +20 -12
- package/dist/types.d.ts.map +1 -1
- package/package.json +2 -2
- package/src/AutomergeUrl.ts +144 -0
- package/src/DocHandle.ts +1 -1
- package/src/Repo.ts +14 -26
- package/src/helpers/cbor.ts +1 -1
- package/src/index.ts +12 -4
- package/src/network/NetworkAdapter.ts +3 -3
- package/src/network/messages.ts +8 -21
- package/src/storage/StorageAdapter.ts +23 -30
- package/src/storage/StorageSubsystem.ts +159 -93
- package/src/storage/chunkTypeFromKey.ts +22 -0
- package/src/storage/keyHash.ts +17 -0
- package/src/storage/types.ts +39 -0
- package/src/synchronizer/CollectionSynchronizer.ts +1 -1
- package/src/types.ts +23 -11
- package/test/AutomergeUrl.test.ts +100 -0
- package/test/DocHandle.test.ts +1 -1
- package/test/DocSynchronizer.test.ts +1 -1
- package/test/Repo.test.ts +22 -6
- package/test/StorageSubsystem.test.ts +144 -36
- package/test/helpers/DummyStorageAdapter.ts +2 -4
- package/dist/DocUrl.d.ts +0 -39
- package/dist/DocUrl.d.ts.map +0 -1
- package/dist/DocUrl.js +0 -74
- package/src/DocUrl.ts +0 -96
|
@@ -0,0 +1,100 @@
|
|
|
1
|
+
import assert from "assert"
|
|
2
|
+
import bs58check from "bs58check"
|
|
3
|
+
import { describe, it } from "vitest"
|
|
4
|
+
import {
|
|
5
|
+
generateAutomergeUrl,
|
|
6
|
+
isValidAutomergeUrl,
|
|
7
|
+
parseAutomergeUrl,
|
|
8
|
+
stringifyAutomergeUrl,
|
|
9
|
+
} from "../src/AutomergeUrl.js"
|
|
10
|
+
import type {
|
|
11
|
+
AutomergeUrl,
|
|
12
|
+
BinaryDocumentId,
|
|
13
|
+
DocumentId,
|
|
14
|
+
} from "../src/types.js"
|
|
15
|
+
|
|
16
|
+
const goodUrl = "automerge:4NMNnkMhL8jXrdJ9jamS58PAVdXu" as AutomergeUrl
|
|
17
|
+
const badChecksumUrl = "automerge:badbadbad" as AutomergeUrl
|
|
18
|
+
const badPrefixUrl = "yjs😉:4NMNnkMhL8jXrdJ9jamS58PAVdXu" as AutomergeUrl
|
|
19
|
+
|
|
20
|
+
const goodDocumentId = "4NMNnkMhL8jXrdJ9jamS58PAVdXu" as DocumentId
|
|
21
|
+
const badChecksumDocumentId = "badbadbad" as DocumentId
|
|
22
|
+
const badUuidDocumentId = bs58check.encode(
|
|
23
|
+
new Uint8Array([1, 2, 3, 4, 42, -1, 69, 777])
|
|
24
|
+
) as DocumentId
|
|
25
|
+
|
|
26
|
+
const goodBinaryDocumentId = Uint8Array.from([
|
|
27
|
+
241, 194, 156, 132, 116, 200, 74, 222, 184, 0, 190, 71, 98, 125, 51, 191,
|
|
28
|
+
]) as BinaryDocumentId
|
|
29
|
+
|
|
30
|
+
describe("AutomergeUrl", () => {
|
|
31
|
+
describe("generateAutomergeUrl", () => {
|
|
32
|
+
it("should generate a valid Automerge URL", () => {
|
|
33
|
+
const url = generateAutomergeUrl()
|
|
34
|
+
assert(url.startsWith("automerge:"))
|
|
35
|
+
assert(parseAutomergeUrl(url).binaryDocumentId)
|
|
36
|
+
})
|
|
37
|
+
})
|
|
38
|
+
|
|
39
|
+
describe("stringifyAutomergeUrl", () => {
|
|
40
|
+
it("should stringify a binary document ID", () => {
|
|
41
|
+
const url = stringifyAutomergeUrl({ documentId: goodBinaryDocumentId })
|
|
42
|
+
assert.strictEqual(url, goodUrl)
|
|
43
|
+
})
|
|
44
|
+
|
|
45
|
+
it("should stringify a string document ID", () => {
|
|
46
|
+
const url = stringifyAutomergeUrl({ documentId: goodDocumentId })
|
|
47
|
+
assert.strictEqual(url, goodUrl)
|
|
48
|
+
})
|
|
49
|
+
|
|
50
|
+
it("supports passing a document ID without wrapping it in an object", () => {
|
|
51
|
+
const url1 = stringifyAutomergeUrl(goodDocumentId)
|
|
52
|
+
const url2 = stringifyAutomergeUrl({ documentId: goodDocumentId })
|
|
53
|
+
assert.equal(url1, url2)
|
|
54
|
+
})
|
|
55
|
+
})
|
|
56
|
+
|
|
57
|
+
describe("parseAutomergeUrl", () => {
|
|
58
|
+
it("should parse a valid url", () => {
|
|
59
|
+
const { binaryDocumentId, documentId } = parseAutomergeUrl(goodUrl)
|
|
60
|
+
assert.deepEqual(binaryDocumentId, goodBinaryDocumentId)
|
|
61
|
+
assert.equal(documentId, goodDocumentId)
|
|
62
|
+
})
|
|
63
|
+
|
|
64
|
+
it("should throw on url with invalid checksum", () => {
|
|
65
|
+
assert.throws(() => parseAutomergeUrl(badChecksumUrl))
|
|
66
|
+
})
|
|
67
|
+
|
|
68
|
+
it("should throw on url with invalid prefix", () => {
|
|
69
|
+
assert.throws(() => parseAutomergeUrl(badPrefixUrl))
|
|
70
|
+
})
|
|
71
|
+
})
|
|
72
|
+
|
|
73
|
+
describe("isValidAutomergeUrl", () => {
|
|
74
|
+
it("should return true for a valid url", () => {
|
|
75
|
+
assert(isValidAutomergeUrl(goodUrl) === true)
|
|
76
|
+
})
|
|
77
|
+
|
|
78
|
+
it("should return false for null url", () => {
|
|
79
|
+
assert(isValidAutomergeUrl(null) === false)
|
|
80
|
+
})
|
|
81
|
+
|
|
82
|
+
it("should return false for a url with invalid checksum", () => {
|
|
83
|
+
assert(isValidAutomergeUrl(badChecksumUrl) === false)
|
|
84
|
+
})
|
|
85
|
+
|
|
86
|
+
it("should return false for a url with invalid prefix", () => {
|
|
87
|
+
assert(isValidAutomergeUrl(badPrefixUrl) === false)
|
|
88
|
+
})
|
|
89
|
+
|
|
90
|
+
it("should return false for a documentId with an invalid checksum", () => {
|
|
91
|
+
const url = stringifyAutomergeUrl({ documentId: badChecksumDocumentId })
|
|
92
|
+
assert(isValidAutomergeUrl(url) === false)
|
|
93
|
+
})
|
|
94
|
+
|
|
95
|
+
it("should return false for a documentId that is not a valid UUID ", () => {
|
|
96
|
+
const url = stringifyAutomergeUrl({ documentId: badUuidDocumentId })
|
|
97
|
+
assert(isValidAutomergeUrl(url) === false)
|
|
98
|
+
})
|
|
99
|
+
})
|
|
100
|
+
})
|
package/test/DocHandle.test.ts
CHANGED
|
@@ -2,7 +2,7 @@ import * as A from "@automerge/automerge/next"
|
|
|
2
2
|
import assert from "assert"
|
|
3
3
|
import { decode } from "cbor-x"
|
|
4
4
|
import { describe, it } from "vitest"
|
|
5
|
-
import { generateAutomergeUrl, parseAutomergeUrl } from "../src/
|
|
5
|
+
import { generateAutomergeUrl, parseAutomergeUrl } from "../src/AutomergeUrl.js"
|
|
6
6
|
import { eventPromise } from "../src/helpers/eventPromise.js"
|
|
7
7
|
import { pause } from "../src/helpers/pause.js"
|
|
8
8
|
import { DocHandle, DocHandleChangePayload } from "../src/index.js"
|
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
import assert from "assert"
|
|
2
2
|
import { describe, it } from "vitest"
|
|
3
3
|
import { DocHandle } from "../src/DocHandle.js"
|
|
4
|
-
import { generateAutomergeUrl, parseAutomergeUrl } from "../src/
|
|
4
|
+
import { generateAutomergeUrl, parseAutomergeUrl } from "../src/AutomergeUrl.js"
|
|
5
5
|
import { eventPromise } from "../src/helpers/eventPromise.js"
|
|
6
6
|
import {
|
|
7
7
|
DocumentUnavailableMessage,
|
package/test/Repo.test.ts
CHANGED
|
@@ -2,9 +2,12 @@ import { MessageChannelNetworkAdapter } from "@automerge/automerge-repo-network-
|
|
|
2
2
|
import assert from "assert"
|
|
3
3
|
import * as Uuid from "uuid"
|
|
4
4
|
import { describe, it } from "vitest"
|
|
5
|
-
import { parseAutomergeUrl } from "../
|
|
5
|
+
import { parseAutomergeUrl } from "../src/AutomergeUrl.js"
|
|
6
6
|
import { READY } from "../src/DocHandle.js"
|
|
7
|
-
import {
|
|
7
|
+
import {
|
|
8
|
+
generateAutomergeUrl,
|
|
9
|
+
stringifyAutomergeUrl,
|
|
10
|
+
} from "../src/AutomergeUrl.js"
|
|
8
11
|
import { Repo } from "../src/Repo.js"
|
|
9
12
|
import { eventPromise } from "../src/helpers/eventPromise.js"
|
|
10
13
|
import { pause } from "../src/helpers/pause.js"
|
|
@@ -12,6 +15,7 @@ import {
|
|
|
12
15
|
AutomergeUrl,
|
|
13
16
|
DocHandle,
|
|
14
17
|
DocumentId,
|
|
18
|
+
LegacyDocumentId,
|
|
15
19
|
PeerId,
|
|
16
20
|
SharePolicy,
|
|
17
21
|
} from "../src/index.js"
|
|
@@ -51,7 +55,7 @@ describe("Repo", () => {
|
|
|
51
55
|
assert.equal(handle.isReady(), true)
|
|
52
56
|
})
|
|
53
57
|
|
|
54
|
-
it("can find a document
|
|
58
|
+
it("can find a document by url", () => {
|
|
55
59
|
const { repo } = setup()
|
|
56
60
|
const handle = repo.create<TestDoc>()
|
|
57
61
|
handle.change((d: TestDoc) => {
|
|
@@ -63,7 +67,19 @@ describe("Repo", () => {
|
|
|
63
67
|
assert.deepEqual(handle2.docSync(), { foo: "bar" })
|
|
64
68
|
})
|
|
65
69
|
|
|
66
|
-
it("can find a document
|
|
70
|
+
it("can find a document by its unprefixed document ID", () => {
|
|
71
|
+
const { repo } = setup()
|
|
72
|
+
const handle = repo.create<TestDoc>()
|
|
73
|
+
handle.change((d: TestDoc) => {
|
|
74
|
+
d.foo = "bar"
|
|
75
|
+
})
|
|
76
|
+
|
|
77
|
+
const handle2 = repo.find(handle.documentId)
|
|
78
|
+
assert.equal(handle, handle2)
|
|
79
|
+
assert.deepEqual(handle2.docSync(), { foo: "bar" })
|
|
80
|
+
})
|
|
81
|
+
|
|
82
|
+
it("can find a document by legacy UUID (for now)", () => {
|
|
67
83
|
disableConsoleWarn()
|
|
68
84
|
|
|
69
85
|
const { repo } = setup()
|
|
@@ -74,9 +90,9 @@ describe("Repo", () => {
|
|
|
74
90
|
|
|
75
91
|
const url = handle.url
|
|
76
92
|
const { binaryDocumentId } = parseAutomergeUrl(url)
|
|
77
|
-
const
|
|
93
|
+
const legacyDocId = Uuid.stringify(binaryDocumentId) as LegacyDocumentId
|
|
78
94
|
|
|
79
|
-
const handle2 = repo.find(
|
|
95
|
+
const handle2 = repo.find(legacyDocId)
|
|
80
96
|
assert.equal(handle, handle2)
|
|
81
97
|
assert.deepEqual(handle2.docSync(), { foo: "bar" })
|
|
82
98
|
|
|
@@ -5,9 +5,11 @@ import fs from "fs"
|
|
|
5
5
|
import os from "os"
|
|
6
6
|
import path from "path"
|
|
7
7
|
import { describe, it } from "vitest"
|
|
8
|
-
import { generateAutomergeUrl, parseAutomergeUrl } from "../src/
|
|
8
|
+
import { generateAutomergeUrl, parseAutomergeUrl } from "../src/AutomergeUrl.js"
|
|
9
9
|
import { StorageSubsystem } from "../src/storage/StorageSubsystem.js"
|
|
10
10
|
import { DummyStorageAdapter } from "./helpers/DummyStorageAdapter.js"
|
|
11
|
+
import { cbor } from "../src/index.js"
|
|
12
|
+
import { pause } from "../src/helpers/pause.js"
|
|
11
13
|
|
|
12
14
|
const tempDir = fs.mkdtempSync(path.join(os.tmpdir(), "automerge-repo-tests"))
|
|
13
15
|
|
|
@@ -19,55 +21,161 @@ describe("StorageSubsystem", () => {
|
|
|
19
21
|
|
|
20
22
|
Object.entries(adaptersToTest).forEach(([adapterName, adapter]) => {
|
|
21
23
|
describe(adapterName, () => {
|
|
22
|
-
|
|
23
|
-
|
|
24
|
+
describe("Automerge document storage", () => {
|
|
25
|
+
it("stores and retrieves an Automerge document", async () => {
|
|
26
|
+
const storage = new StorageSubsystem(adapter)
|
|
24
27
|
|
|
25
|
-
|
|
26
|
-
|
|
28
|
+
const doc = A.change(A.init<any>(), "test", d => {
|
|
29
|
+
d.foo = "bar"
|
|
30
|
+
})
|
|
31
|
+
|
|
32
|
+
// save it to storage
|
|
33
|
+
const key = parseAutomergeUrl(generateAutomergeUrl()).documentId
|
|
34
|
+
await storage.saveDoc(key, doc)
|
|
35
|
+
|
|
36
|
+
// reload it from storage
|
|
37
|
+
const reloadedDoc = await storage.loadDoc(key)
|
|
38
|
+
|
|
39
|
+
// check that it's the same doc
|
|
40
|
+
assert.deepStrictEqual(reloadedDoc, doc)
|
|
41
|
+
})
|
|
42
|
+
|
|
43
|
+
it("retrieves an Automerge document following lots of changes", async () => {
|
|
44
|
+
const storage = new StorageSubsystem(adapter)
|
|
45
|
+
|
|
46
|
+
type TestDoc = { foo: number }
|
|
47
|
+
|
|
48
|
+
const key = parseAutomergeUrl(generateAutomergeUrl()).documentId
|
|
49
|
+
|
|
50
|
+
let doc = A.init<TestDoc>()
|
|
51
|
+
|
|
52
|
+
const N = 100
|
|
53
|
+
for (let i = 0; i < N; i++) {
|
|
54
|
+
doc = A.change(doc, "test", d => {
|
|
55
|
+
d.foo = i
|
|
56
|
+
})
|
|
57
|
+
// save it to storage
|
|
58
|
+
await storage.saveDoc(key, doc)
|
|
59
|
+
}
|
|
60
|
+
|
|
61
|
+
// reload it from storage, simulating a new process
|
|
62
|
+
const storage2 = new StorageSubsystem(adapter)
|
|
63
|
+
const reloadedDoc = await storage2.loadDoc<TestDoc>(key)
|
|
64
|
+
|
|
65
|
+
// check that the doc has the right value
|
|
66
|
+
assert.equal(reloadedDoc?.foo, N - 1)
|
|
27
67
|
})
|
|
28
68
|
|
|
29
|
-
|
|
30
|
-
|
|
31
|
-
|
|
69
|
+
it("stores incremental changes following a load", async () => {
|
|
70
|
+
const storage = new StorageSubsystem(adapter)
|
|
71
|
+
|
|
72
|
+
const doc = A.change(A.init<any>(), "test", d => {
|
|
73
|
+
d.foo = "bar"
|
|
74
|
+
})
|
|
75
|
+
|
|
76
|
+
// save it to storage
|
|
77
|
+
const key = parseAutomergeUrl(generateAutomergeUrl()).documentId
|
|
78
|
+
storage.saveDoc(key, doc)
|
|
79
|
+
|
|
80
|
+
// reload it from storage, simulating a new process
|
|
81
|
+
const storage2 = new StorageSubsystem(adapter)
|
|
82
|
+
const reloadedDoc = await storage2.loadDoc(key)
|
|
83
|
+
|
|
84
|
+
assert(reloadedDoc, "doc should be loaded")
|
|
85
|
+
|
|
86
|
+
// make a change
|
|
87
|
+
const changedDoc = A.change<any>(reloadedDoc, "test 2", d => {
|
|
88
|
+
d.foo = "baz"
|
|
89
|
+
})
|
|
90
|
+
|
|
91
|
+
// save it to storage
|
|
92
|
+
storage2.saveDoc(key, changedDoc)
|
|
93
|
+
})
|
|
94
|
+
|
|
95
|
+
it("removes an Automerge document", async () => {
|
|
96
|
+
const storage = new StorageSubsystem(adapter)
|
|
97
|
+
|
|
98
|
+
const doc = A.change(A.init<any>(), "test", d => {
|
|
99
|
+
d.foo = "bar"
|
|
100
|
+
})
|
|
101
|
+
|
|
102
|
+
// save it to storage
|
|
103
|
+
const key = parseAutomergeUrl(generateAutomergeUrl()).documentId
|
|
104
|
+
await storage.saveDoc(key, doc)
|
|
32
105
|
|
|
33
|
-
|
|
34
|
-
|
|
106
|
+
// reload it from storage
|
|
107
|
+
const reloadedDoc = await storage.loadDoc(key)
|
|
35
108
|
|
|
36
|
-
|
|
37
|
-
|
|
109
|
+
// check that it's the same doc
|
|
110
|
+
assert.deepStrictEqual(reloadedDoc, doc)
|
|
111
|
+
|
|
112
|
+
// remove it
|
|
113
|
+
await storage.removeDoc(key)
|
|
114
|
+
|
|
115
|
+
// reload it from storage
|
|
116
|
+
const reloadedDoc2 = await storage.loadDoc(key)
|
|
117
|
+
|
|
118
|
+
// check that it's undefined
|
|
119
|
+
assert.equal(reloadedDoc2, undefined)
|
|
120
|
+
})
|
|
38
121
|
})
|
|
39
|
-
})
|
|
40
|
-
})
|
|
41
122
|
|
|
42
|
-
|
|
43
|
-
|
|
44
|
-
|
|
123
|
+
describe("Arbitrary key/value storage", () => {
|
|
124
|
+
it("stores and retrieves a blob", async () => {
|
|
125
|
+
const storage = new StorageSubsystem(adapter)
|
|
45
126
|
|
|
46
|
-
|
|
47
|
-
d.foo = "bar"
|
|
48
|
-
})
|
|
127
|
+
const value = cbor.encode({ foo: "bar" })
|
|
49
128
|
|
|
50
|
-
|
|
51
|
-
|
|
52
|
-
|
|
129
|
+
const namespace = "MyCoolAdapter"
|
|
130
|
+
const key = "ABC123"
|
|
131
|
+
await storage.save(namespace, key, value)
|
|
53
132
|
|
|
54
|
-
|
|
55
|
-
|
|
133
|
+
const reloadedValue = await storage.load(namespace, key)
|
|
134
|
+
assert.notEqual(reloadedValue, undefined)
|
|
135
|
+
assert.deepEqual(cbor.decode(reloadedValue)["foo"], "bar")
|
|
136
|
+
})
|
|
56
137
|
|
|
57
|
-
|
|
58
|
-
|
|
138
|
+
it("keeps namespaces separate", async () => {
|
|
139
|
+
const storage = new StorageSubsystem(adapter)
|
|
59
140
|
|
|
60
|
-
|
|
141
|
+
const key = "ABC123"
|
|
61
142
|
|
|
62
|
-
|
|
63
|
-
|
|
64
|
-
|
|
65
|
-
|
|
143
|
+
const namespace1 = "MyCoolAdapter"
|
|
144
|
+
const value1 = cbor.encode({ foo: "bar" })
|
|
145
|
+
await storage.save(namespace1, key, value1)
|
|
146
|
+
|
|
147
|
+
const namespace2 = "SomeDumbAdapter"
|
|
148
|
+
const value2 = cbor.encode({ baz: "pizza" })
|
|
149
|
+
await storage.save(namespace2, key, value2)
|
|
66
150
|
|
|
67
|
-
|
|
68
|
-
|
|
151
|
+
const reloadedValue1 = await storage.load(namespace1, key)
|
|
152
|
+
assert.notEqual(reloadedValue1, undefined)
|
|
153
|
+
assert.deepEqual(cbor.decode(reloadedValue1)["foo"], "bar")
|
|
69
154
|
|
|
70
|
-
|
|
71
|
-
|
|
155
|
+
const reloadedValue2 = await storage.load(namespace2, key)
|
|
156
|
+
assert.notEqual(reloadedValue2, undefined)
|
|
157
|
+
assert.deepEqual(cbor.decode(reloadedValue2)["baz"], "pizza")
|
|
158
|
+
})
|
|
159
|
+
|
|
160
|
+
it("removes a blob", async () => {
|
|
161
|
+
const storage = new StorageSubsystem(adapter)
|
|
162
|
+
|
|
163
|
+
const value = cbor.encode({ foo: "bar" })
|
|
164
|
+
|
|
165
|
+
const namespace = "MyCoolAdapter"
|
|
166
|
+
const key = "ABC123"
|
|
167
|
+
await storage.save(namespace, key, value)
|
|
168
|
+
|
|
169
|
+
const reloadedValue = await storage.load(namespace, key)
|
|
170
|
+
assert.notEqual(reloadedValue, undefined)
|
|
171
|
+
assert.deepEqual(cbor.decode(reloadedValue)["foo"], "bar")
|
|
172
|
+
|
|
173
|
+
await storage.remove(namespace, key)
|
|
174
|
+
|
|
175
|
+
const reloadedValue2 = await storage.load(namespace, key)
|
|
176
|
+
assert.equal(reloadedValue2, undefined)
|
|
177
|
+
})
|
|
178
|
+
})
|
|
179
|
+
})
|
|
72
180
|
})
|
|
73
181
|
})
|
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
import { StorageAdapter, type StorageKey } from "../../src/index.js"
|
|
1
|
+
import { Chunk, StorageAdapter, type StorageKey } from "../../src/index.js"
|
|
2
2
|
|
|
3
3
|
export class DummyStorageAdapter implements StorageAdapter {
|
|
4
4
|
#data: Record<string, Uint8Array> = {}
|
|
@@ -11,9 +11,7 @@ export class DummyStorageAdapter implements StorageAdapter {
|
|
|
11
11
|
return key.split(".")
|
|
12
12
|
}
|
|
13
13
|
|
|
14
|
-
async loadRange(
|
|
15
|
-
keyPrefix: StorageKey
|
|
16
|
-
): Promise<{ data: Uint8Array; key: StorageKey }[]> {
|
|
14
|
+
async loadRange(keyPrefix: StorageKey): Promise<Chunk[]> {
|
|
17
15
|
const range = Object.entries(this.#data)
|
|
18
16
|
.filter(([key, _]) => key.startsWith(this.#keyToString(keyPrefix)))
|
|
19
17
|
.map(([key, data]) => ({ key: this.#stringToKey(key), data }))
|
package/dist/DocUrl.d.ts
DELETED
|
@@ -1,39 +0,0 @@
|
|
|
1
|
-
import { type AutomergeUrl, type BinaryDocumentId, type DocumentId } from "./types.js";
|
|
2
|
-
export declare const urlPrefix = "automerge:";
|
|
3
|
-
/**
|
|
4
|
-
* given an Automerge URL, return a decoded DocumentId (and the encoded DocumentId)
|
|
5
|
-
*
|
|
6
|
-
* @param url
|
|
7
|
-
* @returns { binaryDocumentId: BinaryDocumentId, documentId: DocumentId }
|
|
8
|
-
*/
|
|
9
|
-
export declare const parseAutomergeUrl: (url: AutomergeUrl) => {
|
|
10
|
-
binaryDocumentId: BinaryDocumentId;
|
|
11
|
-
documentId: DocumentId;
|
|
12
|
-
};
|
|
13
|
-
/**
|
|
14
|
-
* Given a documentId in either canonical form, return an Automerge URL
|
|
15
|
-
* Throws on invalid input.
|
|
16
|
-
* Note: this is an object because we anticipate adding fields in the future.
|
|
17
|
-
* @param { documentId: BinaryDocumentId | DocumentId }
|
|
18
|
-
* @returns AutomergeUrl
|
|
19
|
-
*/
|
|
20
|
-
export declare const stringifyAutomergeUrl: ({ documentId, }: {
|
|
21
|
-
documentId: DocumentId | BinaryDocumentId;
|
|
22
|
-
}) => AutomergeUrl;
|
|
23
|
-
/**
|
|
24
|
-
* Given a string, return true if it is a valid Automerge URL
|
|
25
|
-
* also acts as a type discriminator in Typescript.
|
|
26
|
-
* @param str: URL candidate
|
|
27
|
-
* @returns boolean
|
|
28
|
-
*/
|
|
29
|
-
export declare const isValidAutomergeUrl: (str: string) => str is AutomergeUrl;
|
|
30
|
-
/**
|
|
31
|
-
* generateAutomergeUrl produces a new AutomergeUrl.
|
|
32
|
-
* generally only called by create(), but used in tests as well.
|
|
33
|
-
* @returns a new Automerge URL with a random UUID documentId
|
|
34
|
-
*/
|
|
35
|
-
export declare const generateAutomergeUrl: () => AutomergeUrl;
|
|
36
|
-
export declare const documentIdToBinary: (docId: DocumentId) => BinaryDocumentId | undefined;
|
|
37
|
-
export declare const binaryToDocumentId: (docId: BinaryDocumentId) => DocumentId;
|
|
38
|
-
export declare const parseLegacyUUID: (str: string) => AutomergeUrl | undefined;
|
|
39
|
-
//# sourceMappingURL=DocUrl.d.ts.map
|
package/dist/DocUrl.d.ts.map
DELETED
|
@@ -1 +0,0 @@
|
|
|
1
|
-
{"version":3,"file":"DocUrl.d.ts","sourceRoot":"","sources":["../src/DocUrl.ts"],"names":[],"mappings":"AAAA,OAAO,EACL,KAAK,YAAY,EACjB,KAAK,gBAAgB,EACrB,KAAK,UAAU,EAChB,MAAM,YAAY,CAAA;AAInB,eAAO,MAAM,SAAS,eAAe,CAAA;AAErC;;;;;GAKG;AACH,eAAO,MAAM,iBAAiB,QAAS,YAAY;;;CAIlD,CAAA;AAED;;;;;;GAMG;AACH,eAAO,MAAM,qBAAqB;gBAGpB,UAAU,GAAG,gBAAgB;MACvC,YAQH,CAAA;AAED;;;;;GAKG;AACH,eAAO,MAAM,mBAAmB,QAAS,MAAM,wBAK9C,CAAA;AAED;;;;GAIG;AACH,eAAO,MAAM,oBAAoB,QAAO,YAGpC,CAAA;AAEJ,eAAO,MAAM,kBAAkB,UACtB,UAAU,KAChB,gBAAgB,GAAG,SACyC,CAAA;AAE/D,eAAO,MAAM,kBAAkB,UAAW,gBAAgB,KAAG,UACtB,CAAA;AAEvC,eAAO,MAAM,eAAe,QAAS,MAAM,KAAG,YAAY,GAAG,SAM5D,CAAA"}
|
package/dist/DocUrl.js
DELETED
|
@@ -1,74 +0,0 @@
|
|
|
1
|
-
import * as Uuid from "uuid";
|
|
2
|
-
import bs58check from "bs58check";
|
|
3
|
-
export const urlPrefix = "automerge:";
|
|
4
|
-
/**
|
|
5
|
-
* given an Automerge URL, return a decoded DocumentId (and the encoded DocumentId)
|
|
6
|
-
*
|
|
7
|
-
* @param url
|
|
8
|
-
* @returns { binaryDocumentId: BinaryDocumentId, documentId: DocumentId }
|
|
9
|
-
*/
|
|
10
|
-
export const parseAutomergeUrl = (url) => {
|
|
11
|
-
const { binaryDocumentId, documentId } = parts(url);
|
|
12
|
-
if (!binaryDocumentId)
|
|
13
|
-
throw new Error("Invalid document URL: " + url);
|
|
14
|
-
return { binaryDocumentId, documentId };
|
|
15
|
-
};
|
|
16
|
-
/**
|
|
17
|
-
* Given a documentId in either canonical form, return an Automerge URL
|
|
18
|
-
* Throws on invalid input.
|
|
19
|
-
* Note: this is an object because we anticipate adding fields in the future.
|
|
20
|
-
* @param { documentId: BinaryDocumentId | DocumentId }
|
|
21
|
-
* @returns AutomergeUrl
|
|
22
|
-
*/
|
|
23
|
-
export const stringifyAutomergeUrl = ({ documentId, }) => {
|
|
24
|
-
if (documentId instanceof Uint8Array)
|
|
25
|
-
return (urlPrefix +
|
|
26
|
-
binaryToDocumentId(documentId));
|
|
27
|
-
else if (typeof documentId === "string") {
|
|
28
|
-
return (urlPrefix + documentId);
|
|
29
|
-
}
|
|
30
|
-
throw new Error("Invalid documentId: " + documentId);
|
|
31
|
-
};
|
|
32
|
-
/**
|
|
33
|
-
* Given a string, return true if it is a valid Automerge URL
|
|
34
|
-
* also acts as a type discriminator in Typescript.
|
|
35
|
-
* @param str: URL candidate
|
|
36
|
-
* @returns boolean
|
|
37
|
-
*/
|
|
38
|
-
export const isValidAutomergeUrl = (str) => {
|
|
39
|
-
if (!str.startsWith(urlPrefix))
|
|
40
|
-
return false;
|
|
41
|
-
const { binaryDocumentId: documentId } = parts(str);
|
|
42
|
-
return documentId ? true : false;
|
|
43
|
-
};
|
|
44
|
-
/**
|
|
45
|
-
* generateAutomergeUrl produces a new AutomergeUrl.
|
|
46
|
-
* generally only called by create(), but used in tests as well.
|
|
47
|
-
* @returns a new Automerge URL with a random UUID documentId
|
|
48
|
-
*/
|
|
49
|
-
export const generateAutomergeUrl = () => stringifyAutomergeUrl({
|
|
50
|
-
documentId: Uuid.v4(null, new Uint8Array(16)),
|
|
51
|
-
});
|
|
52
|
-
export const documentIdToBinary = (docId) => bs58check.decodeUnsafe(docId);
|
|
53
|
-
export const binaryToDocumentId = (docId) => bs58check.encode(docId);
|
|
54
|
-
export const parseLegacyUUID = (str) => {
|
|
55
|
-
if (Uuid.validate(str)) {
|
|
56
|
-
const uuid = Uuid.parse(str);
|
|
57
|
-
return stringifyAutomergeUrl({ documentId: uuid });
|
|
58
|
-
}
|
|
59
|
-
return undefined;
|
|
60
|
-
};
|
|
61
|
-
/**
|
|
62
|
-
* parts breaks up the URL into constituent pieces,
|
|
63
|
-
* eventually this could include things like heads, so we use this structure
|
|
64
|
-
* we return both a binary & string-encoded version of the document ID
|
|
65
|
-
* @param str
|
|
66
|
-
* @returns { binaryDocumentId, documentId }
|
|
67
|
-
*/
|
|
68
|
-
const parts = (str) => {
|
|
69
|
-
const regex = new RegExp(`^${urlPrefix}(\\w+)$`);
|
|
70
|
-
const [_, docMatch] = str.match(regex) || [];
|
|
71
|
-
const documentId = docMatch;
|
|
72
|
-
const binaryDocumentId = documentIdToBinary(documentId);
|
|
73
|
-
return { binaryDocumentId, documentId };
|
|
74
|
-
};
|
package/src/DocUrl.ts
DELETED
|
@@ -1,96 +0,0 @@
|
|
|
1
|
-
import {
|
|
2
|
-
type AutomergeUrl,
|
|
3
|
-
type BinaryDocumentId,
|
|
4
|
-
type DocumentId,
|
|
5
|
-
} from "./types.js"
|
|
6
|
-
import * as Uuid from "uuid"
|
|
7
|
-
import bs58check from "bs58check"
|
|
8
|
-
|
|
9
|
-
export const urlPrefix = "automerge:"
|
|
10
|
-
|
|
11
|
-
/**
|
|
12
|
-
* given an Automerge URL, return a decoded DocumentId (and the encoded DocumentId)
|
|
13
|
-
*
|
|
14
|
-
* @param url
|
|
15
|
-
* @returns { binaryDocumentId: BinaryDocumentId, documentId: DocumentId }
|
|
16
|
-
*/
|
|
17
|
-
export const parseAutomergeUrl = (url: AutomergeUrl) => {
|
|
18
|
-
const { binaryDocumentId, documentId } = parts(url)
|
|
19
|
-
if (!binaryDocumentId) throw new Error("Invalid document URL: " + url)
|
|
20
|
-
return { binaryDocumentId, documentId }
|
|
21
|
-
}
|
|
22
|
-
|
|
23
|
-
/**
|
|
24
|
-
* Given a documentId in either canonical form, return an Automerge URL
|
|
25
|
-
* Throws on invalid input.
|
|
26
|
-
* Note: this is an object because we anticipate adding fields in the future.
|
|
27
|
-
* @param { documentId: BinaryDocumentId | DocumentId }
|
|
28
|
-
* @returns AutomergeUrl
|
|
29
|
-
*/
|
|
30
|
-
export const stringifyAutomergeUrl = ({
|
|
31
|
-
documentId,
|
|
32
|
-
}: {
|
|
33
|
-
documentId: DocumentId | BinaryDocumentId
|
|
34
|
-
}): AutomergeUrl => {
|
|
35
|
-
if (documentId instanceof Uint8Array)
|
|
36
|
-
return (urlPrefix +
|
|
37
|
-
binaryToDocumentId(documentId as BinaryDocumentId)) as AutomergeUrl
|
|
38
|
-
else if (typeof documentId === "string") {
|
|
39
|
-
return (urlPrefix + documentId) as AutomergeUrl
|
|
40
|
-
}
|
|
41
|
-
throw new Error("Invalid documentId: " + documentId)
|
|
42
|
-
}
|
|
43
|
-
|
|
44
|
-
/**
|
|
45
|
-
* Given a string, return true if it is a valid Automerge URL
|
|
46
|
-
* also acts as a type discriminator in Typescript.
|
|
47
|
-
* @param str: URL candidate
|
|
48
|
-
* @returns boolean
|
|
49
|
-
*/
|
|
50
|
-
export const isValidAutomergeUrl = (str: string): str is AutomergeUrl => {
|
|
51
|
-
if (!str.startsWith(urlPrefix)) return false
|
|
52
|
-
|
|
53
|
-
const { binaryDocumentId: documentId } = parts(str)
|
|
54
|
-
return documentId ? true : false
|
|
55
|
-
}
|
|
56
|
-
|
|
57
|
-
/**
|
|
58
|
-
* generateAutomergeUrl produces a new AutomergeUrl.
|
|
59
|
-
* generally only called by create(), but used in tests as well.
|
|
60
|
-
* @returns a new Automerge URL with a random UUID documentId
|
|
61
|
-
*/
|
|
62
|
-
export const generateAutomergeUrl = (): AutomergeUrl =>
|
|
63
|
-
stringifyAutomergeUrl({
|
|
64
|
-
documentId: Uuid.v4(null, new Uint8Array(16)) as BinaryDocumentId,
|
|
65
|
-
})
|
|
66
|
-
|
|
67
|
-
export const documentIdToBinary = (
|
|
68
|
-
docId: DocumentId
|
|
69
|
-
): BinaryDocumentId | undefined =>
|
|
70
|
-
bs58check.decodeUnsafe(docId) as BinaryDocumentId | undefined
|
|
71
|
-
|
|
72
|
-
export const binaryToDocumentId = (docId: BinaryDocumentId): DocumentId =>
|
|
73
|
-
bs58check.encode(docId) as DocumentId
|
|
74
|
-
|
|
75
|
-
export const parseLegacyUUID = (str: string): AutomergeUrl | undefined => {
|
|
76
|
-
if (Uuid.validate(str)) {
|
|
77
|
-
const uuid = Uuid.parse(str) as BinaryDocumentId
|
|
78
|
-
return stringifyAutomergeUrl({ documentId: uuid })
|
|
79
|
-
}
|
|
80
|
-
return undefined
|
|
81
|
-
}
|
|
82
|
-
|
|
83
|
-
/**
|
|
84
|
-
* parts breaks up the URL into constituent pieces,
|
|
85
|
-
* eventually this could include things like heads, so we use this structure
|
|
86
|
-
* we return both a binary & string-encoded version of the document ID
|
|
87
|
-
* @param str
|
|
88
|
-
* @returns { binaryDocumentId, documentId }
|
|
89
|
-
*/
|
|
90
|
-
const parts = (str: string) => {
|
|
91
|
-
const regex = new RegExp(`^${urlPrefix}(\\w+)$`)
|
|
92
|
-
const [_, docMatch] = str.match(regex) || []
|
|
93
|
-
const documentId = docMatch as DocumentId
|
|
94
|
-
const binaryDocumentId = documentIdToBinary(documentId)
|
|
95
|
-
return { binaryDocumentId, documentId }
|
|
96
|
-
}
|