@comapeo/core 2.0.1 → 2.2.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (114) hide show
  1. package/dist/blob-store/downloader.d.ts +43 -0
  2. package/dist/blob-store/downloader.d.ts.map +1 -0
  3. package/dist/blob-store/entries-stream.d.ts +13 -0
  4. package/dist/blob-store/entries-stream.d.ts.map +1 -0
  5. package/dist/blob-store/hyperdrive-index.d.ts +20 -0
  6. package/dist/blob-store/hyperdrive-index.d.ts.map +1 -0
  7. package/dist/blob-store/index.d.ts +34 -29
  8. package/dist/blob-store/index.d.ts.map +1 -1
  9. package/dist/blob-store/utils.d.ts +27 -0
  10. package/dist/blob-store/utils.d.ts.map +1 -0
  11. package/dist/constants.d.ts +2 -1
  12. package/dist/constants.d.ts.map +1 -1
  13. package/dist/core-manager/index.d.ts +11 -1
  14. package/dist/core-manager/index.d.ts.map +1 -1
  15. package/dist/core-ownership.d.ts.map +1 -1
  16. package/dist/datastore/index.d.ts +5 -4
  17. package/dist/datastore/index.d.ts.map +1 -1
  18. package/dist/datatype/index.d.ts +5 -1
  19. package/dist/discovery/local-discovery.d.ts.map +1 -1
  20. package/dist/errors.d.ts +6 -1
  21. package/dist/errors.d.ts.map +1 -1
  22. package/dist/fastify-plugins/blobs.d.ts.map +1 -1
  23. package/dist/fastify-plugins/maps.d.ts.map +1 -1
  24. package/dist/generated/extensions.d.ts +31 -0
  25. package/dist/generated/extensions.d.ts.map +1 -1
  26. package/dist/index.d.ts +2 -0
  27. package/dist/index.d.ts.map +1 -1
  28. package/dist/lib/drizzle-helpers.d.ts +6 -0
  29. package/dist/lib/drizzle-helpers.d.ts.map +1 -0
  30. package/dist/lib/error.d.ts +51 -0
  31. package/dist/lib/error.d.ts.map +1 -0
  32. package/dist/lib/get-own.d.ts +9 -0
  33. package/dist/lib/get-own.d.ts.map +1 -0
  34. package/dist/lib/is-hostname-ip-address.d.ts +17 -0
  35. package/dist/lib/is-hostname-ip-address.d.ts.map +1 -0
  36. package/dist/lib/ws-core-replicator.d.ts +11 -0
  37. package/dist/lib/ws-core-replicator.d.ts.map +1 -0
  38. package/dist/mapeo-manager.d.ts +18 -22
  39. package/dist/mapeo-manager.d.ts.map +1 -1
  40. package/dist/mapeo-project.d.ts +459 -26
  41. package/dist/mapeo-project.d.ts.map +1 -1
  42. package/dist/member-api.d.ts +44 -1
  43. package/dist/member-api.d.ts.map +1 -1
  44. package/dist/roles.d.ts.map +1 -1
  45. package/dist/schema/client.d.ts +17 -5
  46. package/dist/schema/client.d.ts.map +1 -1
  47. package/dist/schema/project.d.ts +212 -2
  48. package/dist/schema/project.d.ts.map +1 -1
  49. package/dist/sync/core-sync-state.d.ts +20 -15
  50. package/dist/sync/core-sync-state.d.ts.map +1 -1
  51. package/dist/sync/namespace-sync-state.d.ts +13 -1
  52. package/dist/sync/namespace-sync-state.d.ts.map +1 -1
  53. package/dist/sync/peer-sync-controller.d.ts +1 -1
  54. package/dist/sync/peer-sync-controller.d.ts.map +1 -1
  55. package/dist/sync/sync-api.d.ts +47 -2
  56. package/dist/sync/sync-api.d.ts.map +1 -1
  57. package/dist/sync/sync-state.d.ts +12 -0
  58. package/dist/sync/sync-state.d.ts.map +1 -1
  59. package/dist/translation-api.d.ts +2 -2
  60. package/dist/translation-api.d.ts.map +1 -1
  61. package/dist/types.d.ts +10 -2
  62. package/dist/types.d.ts.map +1 -1
  63. package/drizzle/client/0001_chubby_cargill.sql +12 -0
  64. package/drizzle/client/meta/0001_snapshot.json +208 -0
  65. package/drizzle/client/meta/_journal.json +7 -0
  66. package/drizzle/project/0001_medical_wendell_rand.sql +22 -0
  67. package/drizzle/project/meta/0001_snapshot.json +1267 -0
  68. package/drizzle/project/meta/_journal.json +7 -0
  69. package/package.json +14 -5
  70. package/src/blob-store/downloader.js +130 -0
  71. package/src/blob-store/entries-stream.js +81 -0
  72. package/src/blob-store/hyperdrive-index.js +122 -0
  73. package/src/blob-store/index.js +59 -117
  74. package/src/blob-store/utils.js +54 -0
  75. package/src/constants.js +4 -1
  76. package/src/core-manager/index.js +60 -3
  77. package/src/core-ownership.js +2 -4
  78. package/src/datastore/README.md +1 -2
  79. package/src/datastore/index.js +8 -8
  80. package/src/datatype/index.d.ts +5 -1
  81. package/src/datatype/index.js +22 -9
  82. package/src/discovery/local-discovery.js +2 -1
  83. package/src/errors.js +11 -2
  84. package/src/fastify-plugins/blobs.js +17 -1
  85. package/src/fastify-plugins/maps.js +2 -1
  86. package/src/generated/extensions.d.ts +31 -0
  87. package/src/generated/extensions.js +150 -0
  88. package/src/generated/extensions.ts +181 -0
  89. package/src/index.js +10 -0
  90. package/src/invite-api.js +1 -1
  91. package/src/lib/drizzle-helpers.js +79 -0
  92. package/src/lib/error.js +71 -0
  93. package/src/lib/get-own.js +10 -0
  94. package/src/lib/is-hostname-ip-address.js +26 -0
  95. package/src/lib/ws-core-replicator.js +47 -0
  96. package/src/mapeo-manager.js +74 -45
  97. package/src/mapeo-project.js +238 -58
  98. package/src/member-api.js +295 -2
  99. package/src/roles.js +38 -32
  100. package/src/schema/client.js +4 -3
  101. package/src/schema/project.js +7 -0
  102. package/src/sync/core-sync-state.js +39 -23
  103. package/src/sync/namespace-sync-state.js +22 -0
  104. package/src/sync/peer-sync-controller.js +1 -0
  105. package/src/sync/sync-api.js +197 -3
  106. package/src/sync/sync-state.js +18 -0
  107. package/src/translation-api.js +5 -9
  108. package/src/types.ts +12 -3
  109. package/dist/blob-store/live-download.d.ts +0 -107
  110. package/dist/blob-store/live-download.d.ts.map +0 -1
  111. package/dist/lib/timing-safe-equal.d.ts +0 -15
  112. package/dist/lib/timing-safe-equal.d.ts.map +0 -1
  113. package/src/blob-store/live-download.js +0 -373
  114. package/src/lib/timing-safe-equal.js +0 -34
@@ -8,6 +8,13 @@
8
8
  "when": 1726514275142,
9
9
  "tag": "0000_spooky_lady_ursula",
10
10
  "breakpoints": true
11
+ },
12
+ {
13
+ "idx": 1,
14
+ "version": "5",
15
+ "when": 1729783892753,
16
+ "tag": "0001_medical_wendell_rand",
17
+ "breakpoints": true
11
18
  }
12
19
  ]
13
20
  }
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@comapeo/core",
3
- "version": "2.0.1",
3
+ "version": "2.2.0",
4
4
  "description": "Offline p2p mapping library",
5
5
  "main": "src/index.js",
6
6
  "types": "dist/index.d.ts",
@@ -108,8 +108,10 @@
108
108
  "homepage": "https://github.com/digidem/comapeo-core#readme",
109
109
  "devDependencies": {
110
110
  "@bufbuild/buf": "^1.26.1",
111
+ "@comapeo/cloud": "^0.1.0",
112
+ "@comapeo/core2.0.1": "npm:@comapeo/core@2.0.1",
111
113
  "@mapeo/default-config": "5.0.0",
112
- "@mapeo/mock-data": "2.0.0",
114
+ "@mapeo/mock-data": "^2.1.1",
113
115
  "@sinonjs/fake-timers": "^10.0.2",
114
116
  "@types/b4a": "^1.6.0",
115
117
  "@types/bogon": "^1.0.2",
@@ -124,6 +126,7 @@
124
126
  "@types/sub-encoder": "^2.1.0",
125
127
  "@types/throttle-debounce": "^5.0.0",
126
128
  "@types/varint": "^6.0.1",
129
+ "@types/ws": "^8.5.12",
127
130
  "@types/yauzl-promise": "^4.0.0",
128
131
  "@types/yazl": "^2.4.5",
129
132
  "bitfield": "^4.2.0",
@@ -131,6 +134,7 @@
131
134
  "cpy-cli": "^5.0.0",
132
135
  "drizzle-kit": "^0.20.14",
133
136
  "eslint": "^8.57.0",
137
+ "execa": "^9.5.1",
134
138
  "husky": "^8.0.0",
135
139
  "iterpal": "^0.4.0",
136
140
  "lint-staged": "^14.0.1",
@@ -138,6 +142,7 @@
138
142
  "math-random-seed": "^2.0.0",
139
143
  "nanobench": "^3.0.0",
140
144
  "npm-run-all": "^4.1.5",
145
+ "p-props": "^6.0.0",
141
146
  "prettier": "^2.8.8",
142
147
  "random-access-file": "^4.0.7",
143
148
  "random-access-memory": "^6.2.1",
@@ -153,7 +158,7 @@
153
158
  },
154
159
  "dependencies": {
155
160
  "@comapeo/fallback-smp": "^1.0.0",
156
- "@comapeo/schema": "1.0.0",
161
+ "@comapeo/schema": "1.2.0",
157
162
  "@digidem/types": "^2.3.0",
158
163
  "@fastify/error": "^3.4.1",
159
164
  "@fastify/type-provider-typebox": "^4.1.0",
@@ -161,6 +166,7 @@
161
166
  "@mapeo/crypto": "1.0.0-alpha.10",
162
167
  "@mapeo/sqlite-indexer": "1.0.0-alpha.9",
163
168
  "@sinclair/typebox": "^0.29.6",
169
+ "@sindresorhus/merge-streams": "^4.0.0",
164
170
  "b4a": "^1.6.3",
165
171
  "bcp-47": "^2.1.0",
166
172
  "better-sqlite3": "^8.7.0",
@@ -171,7 +177,7 @@
171
177
  "debug": "^4.3.4",
172
178
  "dot-prop": "^9.0.0",
173
179
  "drizzle-orm": "^0.30.8",
174
- "fastify": ">= 4",
180
+ "fastify": "^4.0.0",
175
181
  "fastify-plugin": "^4.5.1",
176
182
  "hyperblobs": "2.3.0",
177
183
  "hypercore": "10.17.0",
@@ -181,7 +187,7 @@
181
187
  "magic-bytes.js": "^1.10.0",
182
188
  "map-obj": "^5.0.2",
183
189
  "mime": "^4.0.3",
184
- "multi-core-indexer": "^1.0.0-alpha.10",
190
+ "multi-core-indexer": "^1.0.0",
185
191
  "p-defer": "^4.0.0",
186
192
  "p-event": "^6.0.1",
187
193
  "p-timeout": "^6.1.2",
@@ -191,13 +197,16 @@
191
197
  "sodium-universal": "^4.0.0",
192
198
  "start-stop-state-machine": "^1.2.0",
193
199
  "streamx": "^2.19.0",
200
+ "string-timing-safe-equal": "^0.1.0",
194
201
  "styled-map-package": "^2.0.0",
195
202
  "sub-encoder": "^2.1.1",
196
203
  "throttle-debounce": "^5.0.0",
197
204
  "tiny-typed-emitter": "^2.1.0",
198
205
  "type-fest": "^4.5.0",
199
206
  "undici": "^6.13.0",
207
+ "unix-path-resolve": "^1.0.2",
200
208
  "varint": "^6.0.0",
209
+ "ws": "^8.18.0",
201
210
  "yauzl-promise": "^4.0.0"
202
211
  }
203
212
  }
@@ -0,0 +1,130 @@
1
+ import { TypedEmitter } from 'tiny-typed-emitter'
2
+ import { createEntriesStream } from './entries-stream.js'
3
+ import { filePathMatchesFilter } from './utils.js'
4
+
5
+ /** @import { BlobFilter } from '../types.js' */
6
+ /** @import { THyperdriveIndex } from './hyperdrive-index.js' */
7
+
8
+ /**
9
+ * Like hyperdrive.download() but 'live', and for multiple drives.
10
+ *
11
+ * Will emit an 'error' event for any unexpected errors. A consumer must attach
12
+ * an error listener to avoid uncaught errors. Sources of errors include:
13
+ *
14
+ * - If the entries stream emits an error
15
+ * - If a drive referenced in an entry is not found
16
+ * - If core.has() throws (e.g. if hypercore is closed)
17
+ * - If core.download().done() throws, which should not happen according to
18
+ * current hypercore code.
19
+ * - If the entries stream ends unexpectedly (it should be live and not end)
20
+ *
21
+ * NB: unlike hyperdrive.download(), this will also download deleted and
22
+ * previous versions of blobs - we don't currently support editing or deleting
23
+ * of blobs, so this should not be an issue, and if we do in the future,
24
+ * downloading deleted and previous versions may be desirable behavior anyway
25
+ *
26
+ * @extends {TypedEmitter<{ error: (error: Error) => void }>}
27
+ */
28
+ export class Downloader extends TypedEmitter {
29
+ /** @type {THyperdriveIndex} */
30
+ #driveIndex
31
+ /** @type {Set<{ done(): Promise<void>, destroy(): void }>} */
32
+ #queuedDownloads = new Set()
33
+ #entriesStream
34
+ #processEntriesPromise
35
+ #ac = new AbortController()
36
+ #shouldDownloadFile
37
+
38
+ /**
39
+ * @param {THyperdriveIndex} driveIndex
40
+ * @param {object} [options]
41
+ * @param {BlobFilter | null} [options.filter] Filter blobs of specific types and/or sizes to download
42
+ */
43
+ constructor(driveIndex, { filter } = {}) {
44
+ super()
45
+ this.#driveIndex = driveIndex
46
+
47
+ this.#shouldDownloadFile = filter
48
+ ? filePathMatchesFilter.bind(null, filter)
49
+ : () => true
50
+
51
+ this.#entriesStream = createEntriesStream(driveIndex, { live: true })
52
+ this.#entriesStream.once('error', this.#handleError)
53
+
54
+ this.#ac.signal.addEventListener('abort', this.#handleAbort, { once: true })
55
+
56
+ this.#processEntriesPromise = this.#processEntries()
57
+ this.#processEntriesPromise.catch(this.#handleError)
58
+ }
59
+
60
+ /**
61
+ * Start processing entries from the entries stream - if an entry matches the
62
+ * filter, and we don't already have it, queue it for download. If the
63
+ * Downloader is live, this method will never resolve, otherwise it will
64
+ * resolve when all the entries have been processed and downloaded.
65
+ */
66
+ async #processEntries() {
67
+ for await (const entry of this.#entriesStream) {
68
+ this.#ac.signal.throwIfAborted()
69
+ const {
70
+ driveId,
71
+ key: filePath,
72
+ value: { blob },
73
+ } = entry
74
+ if (!this.#shouldDownloadFile(filePath)) continue
75
+ const drive = this.#driveIndex.get(driveId)
76
+ // ERROR HANDLING: this is unexpected and should not happen
77
+ if (!drive) throw new Error('Drive not found: ' + driveId)
78
+ const blobs = await drive.getBlobs()
79
+ this.#ac.signal.throwIfAborted()
80
+ await this.#processEntry(blobs.core, blob)
81
+ this.#ac.signal.throwIfAborted()
82
+ }
83
+ throw new Error('Entries stream ended unexpectedly')
84
+ }
85
+
86
+ /**
87
+ * Update state and queue missing entries for download
88
+ *
89
+ * @param {import('hypercore')} blobsCore
90
+ * @param {{ blockOffset: number, blockLength: number, byteLength: number }} blob
91
+ */
92
+ async #processEntry(blobsCore, { blockOffset: start, blockLength: length }) {
93
+ const end = start + length
94
+ const have = await blobsCore.has(start, end)
95
+ this.#ac.signal.throwIfAborted()
96
+ if (have) return
97
+ const download = blobsCore.download({ start, end })
98
+ this.#queuedDownloads.add(download)
99
+ download
100
+ .done()
101
+ // According to the code, this should never throw.
102
+ .catch(this.#handleError)
103
+ .finally(() => {
104
+ this.#queuedDownloads.delete(download)
105
+ })
106
+ }
107
+
108
+ /**
109
+ * Cancel the downloads and clean up resources.
110
+ */
111
+ destroy() {
112
+ this.#ac.abort()
113
+ }
114
+
115
+ /** @param {Error} error */
116
+ #handleError = (error) => {
117
+ if (this.#ac.signal.aborted) return
118
+ this.emit('error', error)
119
+ this.#ac.abort(error)
120
+ }
121
+
122
+ #handleAbort = () => {
123
+ for (const download of this.#queuedDownloads) download.destroy()
124
+ this.#ac.signal.removeEventListener('abort', this.#handleAbort)
125
+ this.#entriesStream.removeListener('error', this.#ac.abort)
126
+ // queuedDownloads is likely to be empty here anyway, but clear just in case.
127
+ this.#queuedDownloads.clear()
128
+ this.#entriesStream.destroy()
129
+ }
130
+ }
@@ -0,0 +1,81 @@
1
+ import SubEncoder from 'sub-encoder'
2
+ import mergeStreams from '@sindresorhus/merge-streams'
3
+ import { Transform, pipeline } from 'node:stream'
4
+ import { noop } from '../utils.js'
5
+
6
+ /** @import Hyperdrive from 'hyperdrive' */
7
+ /** @import { BlobStoreEntriesStream } from '../types.js' */
8
+ /** @import { THyperdriveIndex } from './hyperdrive-index.js' */
9
+
10
+ const keyEncoding = new SubEncoder('files', 'utf-8')
11
+
12
+ /**
13
+ *
14
+ * @param {THyperdriveIndex} driveIndex
15
+ * @param {object} opts
16
+ * @param {boolean} [opts.live=false]
17
+ * @returns {BlobStoreEntriesStream}
18
+ */
19
+ export function createEntriesStream(driveIndex, { live = false } = {}) {
20
+ const mergedEntriesStreams = mergeStreams(
21
+ [...driveIndex].map((drive) => getHistoryStream(drive.db, { live }))
22
+ )
23
+ driveIndex.on('add-drive', addDrive)
24
+ // Close is always emitted, so we can use it to remove the listener
25
+ mergedEntriesStreams.once('close', () =>
26
+ driveIndex.off('add-drive', addDrive)
27
+ )
28
+ return mergedEntriesStreams
29
+
30
+ /** @param {Hyperdrive} drive */
31
+ function addDrive(drive) {
32
+ mergedEntriesStreams.add(getHistoryStream(drive.db, { live }))
33
+ }
34
+ }
35
+
36
+ /**
37
+ *
38
+ * @param {import('hyperbee')} bee
39
+ * @param {object} opts
40
+ * @param {boolean} opts.live
41
+ */
42
+ function getHistoryStream(bee, { live }) {
43
+ // This will also include old versions of files, but it is the only way to
44
+ // get a live stream from a Hyperbee, however we currently do not support
45
+ // edits of blobs, so this should not be an issue, and the consequence is
46
+ // that old versions are downloaded too, which is acceptable.
47
+ const historyStream = bee.createHistoryStream({
48
+ live,
49
+ // `keyEncoding` is necessary because hyperdrive stores file index data
50
+ // under the `files` sub-encoding key
51
+ keyEncoding,
52
+ })
53
+ return pipeline(historyStream, new AddDriveIds(bee.core), noop)
54
+ }
55
+
56
+ class AddDriveIds extends Transform {
57
+ #core
58
+ #cachedDriveId
59
+
60
+ /** @param {import('hypercore')} core */
61
+ constructor(core) {
62
+ super({ objectMode: true })
63
+ this.#core = core
64
+ this.#cachedDriveId = core.discoveryKey?.toString('hex')
65
+ }
66
+
67
+ /** @type {Transform['_transform']} */
68
+ _transform(entry, _, callback) {
69
+ // Minimal performance optimization to only call toString() once.
70
+ // core.discoveryKey will always be defined by the time it starts
71
+ // streaming, but could be null when the instance is first created.
72
+ let driveId
73
+ if (this.#cachedDriveId) {
74
+ driveId = this.#cachedDriveId
75
+ } else {
76
+ driveId = this.#core.discoveryKey?.toString('hex')
77
+ this.#cachedDriveId = driveId
78
+ }
79
+ callback(null, { ...entry, driveId })
80
+ }
81
+ }
@@ -0,0 +1,122 @@
1
+ import b4a from 'b4a'
2
+ import { discoveryKey } from 'hypercore-crypto'
3
+ import Hyperdrive from 'hyperdrive'
4
+ import util from 'node:util'
5
+ import { TypedEmitter } from 'tiny-typed-emitter'
6
+
7
+ /** @typedef {HyperdriveIndexImpl} THyperdriveIndex */
8
+
9
+ /**
10
+ * @extends {TypedEmitter<{ 'add-drive': (drive: Hyperdrive) => void }>}
11
+ */
12
+ export class HyperdriveIndexImpl extends TypedEmitter {
13
+ /** @type {Map<string, Hyperdrive>} */
14
+ #hyperdrives = new Map()
15
+ #writer
16
+ #writerKey
17
+ /** @param {import('../core-manager/index.js').CoreManager} coreManager */
18
+ constructor(coreManager) {
19
+ super()
20
+ /** @type {undefined | Hyperdrive} */
21
+ let writer
22
+ const corestore = new PretendCorestore({ coreManager })
23
+ const blobIndexCores = coreManager.getCores('blobIndex')
24
+ const writerCoreRecord = coreManager.getWriterCore('blobIndex')
25
+ this.#writerKey = writerCoreRecord.key
26
+ for (const { key } of blobIndexCores) {
27
+ // @ts-ignore - we know pretendCorestore is not actually a Corestore
28
+ const drive = new Hyperdrive(corestore, key)
29
+ // We use the discovery key to derive the id for a drive
30
+ this.#hyperdrives.set(getDiscoveryId(key), drive)
31
+ if (key.equals(this.#writerKey)) {
32
+ writer = drive
33
+ }
34
+ }
35
+ if (!writer) {
36
+ throw new Error('Could not find a writer for the blobIndex namespace')
37
+ }
38
+ this.#writer = writer
39
+
40
+ coreManager.on('add-core', ({ key, namespace }) => {
41
+ if (namespace !== 'blobIndex') return
42
+ // We use the discovery key to derive the id for a drive
43
+ const driveId = getDiscoveryId(key)
44
+ if (this.#hyperdrives.has(driveId)) return
45
+ // @ts-ignore - we know pretendCorestore is not actually a Corestore
46
+ const drive = new Hyperdrive(corestore, key)
47
+ this.#hyperdrives.set(driveId, drive)
48
+ this.emit('add-drive', drive)
49
+ })
50
+ }
51
+ get writer() {
52
+ return this.#writer
53
+ }
54
+ get writerKey() {
55
+ return this.#writerKey
56
+ }
57
+ [Symbol.iterator]() {
58
+ return this.#hyperdrives.values()
59
+ }
60
+ /** @param {string} driveId */
61
+ get(driveId) {
62
+ return this.#hyperdrives.get(driveId)
63
+ }
64
+ }
65
+
66
+ /**
67
+ * Implements the `get()` method as used by hyperdrive-next. It returns the
68
+ * relevant cores from the Mapeo CoreManager.
69
+ */
70
+ class PretendCorestore {
71
+ #coreManager
72
+ /**
73
+ * @param {object} options
74
+ * @param {import('../core-manager/index.js').CoreManager} options.coreManager
75
+ */
76
+ constructor({ coreManager }) {
77
+ this.#coreManager = coreManager
78
+ }
79
+
80
+ /**
81
+ * @param {Buffer | { publicKey: Buffer } | { name: string }} opts
82
+ * @returns {import('hypercore')<"binary", Buffer> | undefined}
83
+ */
84
+ get(opts) {
85
+ if (b4a.isBuffer(opts)) {
86
+ opts = { publicKey: opts }
87
+ }
88
+ if ('key' in opts) {
89
+ // @ts-ignore
90
+ opts.publicKey = opts.key
91
+ }
92
+ if ('publicKey' in opts) {
93
+ // NB! We should always add blobIndex (Hyperbee) cores to the core manager
94
+ // before we use them here. We would only reach the addCore path if the
95
+ // blob core is read from the hyperbee header (before it is added to the
96
+ // core manager)
97
+ return (
98
+ this.#coreManager.getCoreByKey(opts.publicKey) ||
99
+ this.#coreManager.addCore(opts.publicKey, 'blob').core
100
+ )
101
+ } else if (opts.name === 'db') {
102
+ return this.#coreManager.getWriterCore('blobIndex').core
103
+ } else if (opts.name.includes('blobs')) {
104
+ return this.#coreManager.getWriterCore('blob').core
105
+ } else {
106
+ throw new Error(
107
+ 'Unsupported corestore.get() with opts ' + util.inspect(opts)
108
+ )
109
+ }
110
+ }
111
+
112
+ /** no-op */
113
+ close() {}
114
+ }
115
+
116
+ /**
117
+ * @param {Buffer} key Public key of hypercore
118
+ * @returns {string} Hex-encoded string of derived discovery key
119
+ */
120
+ function getDiscoveryId(key) {
121
+ return discoveryKey(key).toString('hex')
122
+ }