@comapeo/core 2.1.0 → 2.3.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (73) hide show
  1. package/dist/blob-store/downloader.d.ts +43 -0
  2. package/dist/blob-store/downloader.d.ts.map +1 -0
  3. package/dist/blob-store/entries-stream.d.ts +13 -0
  4. package/dist/blob-store/entries-stream.d.ts.map +1 -0
  5. package/dist/blob-store/hyperdrive-index.d.ts +20 -0
  6. package/dist/blob-store/hyperdrive-index.d.ts.map +1 -0
  7. package/dist/blob-store/index.d.ts +29 -21
  8. package/dist/blob-store/index.d.ts.map +1 -1
  9. package/dist/blob-store/utils.d.ts +27 -0
  10. package/dist/blob-store/utils.d.ts.map +1 -0
  11. package/dist/core-manager/index.d.ts +1 -1
  12. package/dist/core-manager/index.d.ts.map +1 -1
  13. package/dist/core-ownership.d.ts.map +1 -1
  14. package/dist/datastore/index.d.ts +1 -1
  15. package/dist/datastore/index.d.ts.map +1 -1
  16. package/dist/datatype/index.d.ts +5 -1
  17. package/dist/discovery/local-discovery.d.ts.map +1 -1
  18. package/dist/errors.d.ts +6 -1
  19. package/dist/errors.d.ts.map +1 -1
  20. package/dist/fastify-plugins/blobs.d.ts.map +1 -1
  21. package/dist/fastify-plugins/maps.d.ts.map +1 -1
  22. package/dist/index.d.ts.map +1 -1
  23. package/dist/lib/error.d.ts +14 -0
  24. package/dist/lib/error.d.ts.map +1 -1
  25. package/dist/mapeo-manager.d.ts.map +1 -1
  26. package/dist/mapeo-project.d.ts +17 -17
  27. package/dist/mapeo-project.d.ts.map +1 -1
  28. package/dist/member-api.d.ts +4 -0
  29. package/dist/member-api.d.ts.map +1 -1
  30. package/dist/roles.d.ts.map +1 -1
  31. package/dist/schema/project.d.ts +2 -2
  32. package/dist/sync/core-sync-state.d.ts +20 -15
  33. package/dist/sync/core-sync-state.d.ts.map +1 -1
  34. package/dist/sync/namespace-sync-state.d.ts +13 -1
  35. package/dist/sync/namespace-sync-state.d.ts.map +1 -1
  36. package/dist/sync/peer-sync-controller.d.ts +1 -1
  37. package/dist/sync/sync-api.d.ts +22 -3
  38. package/dist/sync/sync-api.d.ts.map +1 -1
  39. package/dist/sync/sync-state.d.ts +12 -0
  40. package/dist/sync/sync-state.d.ts.map +1 -1
  41. package/dist/translation-api.d.ts +2 -2
  42. package/dist/translation-api.d.ts.map +1 -1
  43. package/dist/types.d.ts +7 -0
  44. package/dist/types.d.ts.map +1 -1
  45. package/package.json +8 -3
  46. package/src/blob-store/downloader.js +130 -0
  47. package/src/blob-store/entries-stream.js +81 -0
  48. package/src/blob-store/hyperdrive-index.js +122 -0
  49. package/src/blob-store/index.js +56 -115
  50. package/src/blob-store/utils.js +54 -0
  51. package/src/core-manager/index.js +2 -1
  52. package/src/core-ownership.js +2 -4
  53. package/src/datastore/index.js +4 -3
  54. package/src/datatype/index.d.ts +5 -1
  55. package/src/datatype/index.js +22 -9
  56. package/src/discovery/local-discovery.js +2 -1
  57. package/src/errors.js +11 -2
  58. package/src/fastify-plugins/blobs.js +16 -1
  59. package/src/fastify-plugins/maps.js +2 -1
  60. package/src/lib/error.js +24 -0
  61. package/src/mapeo-manager.js +6 -3
  62. package/src/mapeo-project.js +89 -19
  63. package/src/member-api.js +68 -26
  64. package/src/roles.js +38 -32
  65. package/src/sync/core-sync-state.js +39 -23
  66. package/src/sync/namespace-sync-state.js +22 -0
  67. package/src/sync/sync-api.js +30 -4
  68. package/src/sync/sync-state.js +18 -0
  69. package/src/translation-api.js +5 -9
  70. package/src/types.ts +8 -0
  71. package/dist/blob-store/live-download.d.ts +0 -107
  72. package/dist/blob-store/live-download.d.ts.map +0 -1
  73. package/src/blob-store/live-download.js +0 -373
@@ -0,0 +1,122 @@
1
+ import b4a from 'b4a'
2
+ import { discoveryKey } from 'hypercore-crypto'
3
+ import Hyperdrive from 'hyperdrive'
4
+ import util from 'node:util'
5
+ import { TypedEmitter } from 'tiny-typed-emitter'
6
+
7
+ /** @typedef {HyperdriveIndexImpl} THyperdriveIndex */
8
+
9
+ /**
10
+ * @extends {TypedEmitter<{ 'add-drive': (drive: Hyperdrive) => void }>}
11
+ */
12
+ export class HyperdriveIndexImpl extends TypedEmitter {
13
+ /** @type {Map<string, Hyperdrive>} */
14
+ #hyperdrives = new Map()
15
+ #writer
16
+ #writerKey
17
+ /** @param {import('../core-manager/index.js').CoreManager} coreManager */
18
+ constructor(coreManager) {
19
+ super()
20
+ /** @type {undefined | Hyperdrive} */
21
+ let writer
22
+ const corestore = new PretendCorestore({ coreManager })
23
+ const blobIndexCores = coreManager.getCores('blobIndex')
24
+ const writerCoreRecord = coreManager.getWriterCore('blobIndex')
25
+ this.#writerKey = writerCoreRecord.key
26
+ for (const { key } of blobIndexCores) {
27
+ // @ts-ignore - we know pretendCorestore is not actually a Corestore
28
+ const drive = new Hyperdrive(corestore, key)
29
+ // We use the discovery key to derive the id for a drive
30
+ this.#hyperdrives.set(getDiscoveryId(key), drive)
31
+ if (key.equals(this.#writerKey)) {
32
+ writer = drive
33
+ }
34
+ }
35
+ if (!writer) {
36
+ throw new Error('Could not find a writer for the blobIndex namespace')
37
+ }
38
+ this.#writer = writer
39
+
40
+ coreManager.on('add-core', ({ key, namespace }) => {
41
+ if (namespace !== 'blobIndex') return
42
+ // We use the discovery key to derive the id for a drive
43
+ const driveId = getDiscoveryId(key)
44
+ if (this.#hyperdrives.has(driveId)) return
45
+ // @ts-ignore - we know pretendCorestore is not actually a Corestore
46
+ const drive = new Hyperdrive(corestore, key)
47
+ this.#hyperdrives.set(driveId, drive)
48
+ this.emit('add-drive', drive)
49
+ })
50
+ }
51
+ get writer() {
52
+ return this.#writer
53
+ }
54
+ get writerKey() {
55
+ return this.#writerKey
56
+ }
57
+ [Symbol.iterator]() {
58
+ return this.#hyperdrives.values()
59
+ }
60
+ /** @param {string} driveId */
61
+ get(driveId) {
62
+ return this.#hyperdrives.get(driveId)
63
+ }
64
+ }
65
+
66
+ /**
67
+ * Implements the `get()` method as used by hyperdrive-next. It returns the
68
+ * relevant cores from the Mapeo CoreManager.
69
+ */
70
+ class PretendCorestore {
71
+ #coreManager
72
+ /**
73
+ * @param {object} options
74
+ * @param {import('../core-manager/index.js').CoreManager} options.coreManager
75
+ */
76
+ constructor({ coreManager }) {
77
+ this.#coreManager = coreManager
78
+ }
79
+
80
+ /**
81
+ * @param {Buffer | { publicKey: Buffer } | { name: string }} opts
82
+ * @returns {import('hypercore')<"binary", Buffer> | undefined}
83
+ */
84
+ get(opts) {
85
+ if (b4a.isBuffer(opts)) {
86
+ opts = { publicKey: opts }
87
+ }
88
+ if ('key' in opts) {
89
+ // @ts-ignore
90
+ opts.publicKey = opts.key
91
+ }
92
+ if ('publicKey' in opts) {
93
+ // NB! We should always add blobIndex (Hyperbee) cores to the core manager
94
+ // before we use them here. We would only reach the addCore path if the
95
+ // blob core is read from the hyperbee header (before it is added to the
96
+ // core manager)
97
+ return (
98
+ this.#coreManager.getCoreByKey(opts.publicKey) ||
99
+ this.#coreManager.addCore(opts.publicKey, 'blob').core
100
+ )
101
+ } else if (opts.name === 'db') {
102
+ return this.#coreManager.getWriterCore('blobIndex').core
103
+ } else if (opts.name.includes('blobs')) {
104
+ return this.#coreManager.getWriterCore('blob').core
105
+ } else {
106
+ throw new Error(
107
+ 'Unsupported corestore.get() with opts ' + util.inspect(opts)
108
+ )
109
+ }
110
+ }
111
+
112
+ /** no-op */
113
+ close() {}
114
+ }
115
+
116
+ /**
117
+ * @param {Buffer} key Public key of hypercore
118
+ * @returns {string} Hex-encoded string of derived discovery key
119
+ */
120
+ function getDiscoveryId(key) {
121
+ return discoveryKey(key).toString('hex')
122
+ }
@@ -1,22 +1,23 @@
1
- import Hyperdrive from 'hyperdrive'
2
- import b4a from 'b4a'
3
- import util from 'node:util'
1
+ import { pipeline } from 'node:stream'
4
2
  import { discoveryKey } from 'hypercore-crypto'
3
+ import { Downloader } from './downloader.js'
4
+ import { createEntriesStream } from './entries-stream.js'
5
+ import { FilterEntriesStream } from './utils.js'
6
+ import { noop } from '../utils.js'
5
7
  import { TypedEmitter } from 'tiny-typed-emitter'
6
- import { LiveDownload } from './live-download.js'
8
+ import { HyperdriveIndexImpl as HyperdriveIndex } from './hyperdrive-index.js'
9
+
10
+ /** @import Hyperdrive from 'hyperdrive' */
7
11
  /** @import { JsonObject } from 'type-fest' */
8
12
  /** @import { Readable as NodeReadable } from 'node:stream' */
9
13
  /** @import { Readable as StreamxReadable, Writable } from 'streamx' */
10
- /** @import { BlobId } from '../types.js' */
11
- /** @import { BlobDownloadEvents } from './live-download.js' */
14
+ /** @import { BlobFilter, BlobId, BlobStoreEntriesStream } from '../types.js' */
12
15
 
13
16
  /**
14
17
  * @internal
15
18
  * @typedef {NodeReadable | StreamxReadable} Readable
16
19
  */
17
20
 
18
- /** @typedef {TypedEmitter<{ 'add-drive': (drive: import('hyperdrive')) => void }>} InternalDriveEmitter */
19
-
20
21
  // prop = blob type name
21
22
  // value = array of blob variants supported for that type
22
23
  const SUPPORTED_BLOB_VARIANTS = /** @type {const} */ ({
@@ -37,57 +38,31 @@ class ErrNotFound extends Error {
37
38
  }
38
39
  }
39
40
 
40
- export class BlobStore {
41
- /** @type {Map<string, Hyperdrive>} Indexed by hex-encoded discovery key */
42
- #hyperdrives = new Map()
43
- #writer
44
- /**
45
- * Used to communicate to live download instances when new drives are added
46
- * @type {InternalDriveEmitter}
47
- */
48
- #driveEmitter = new TypedEmitter()
41
+ /** @extends {TypedEmitter<{ error: (error: Error) => void }>} */
42
+ export class BlobStore extends TypedEmitter {
43
+ #driveIndex
44
+ /** @type {Downloader} */
45
+ #downloader
49
46
 
50
47
  /**
51
48
  * @param {object} options
52
49
  * @param {import('../core-manager/index.js').CoreManager} options.coreManager
50
+ * @param {BlobFilter | null} options.downloadFilter - Filter blob types and/or variants to download. Set to `null` to download all blobs.
53
51
  */
54
- constructor({ coreManager }) {
55
- /** @type {undefined | (Hyperdrive & { key: Buffer })} */
56
- let writer
57
- const corestore = new PretendCorestore({ coreManager })
58
- const blobIndexCores = coreManager.getCores('blobIndex')
59
- const { key: writerKey } = coreManager.getWriterCore('blobIndex')
60
- for (const { key } of blobIndexCores) {
61
- // @ts-ignore - we know pretendCorestore is not actually a Corestore
62
- const drive = new Hyperdrive(corestore, key)
63
- // We use the discovery key to derive the id for a drive
64
- this.#hyperdrives.set(getDiscoveryId(key), drive)
65
- if (key.equals(writerKey)) {
66
- writer = proxyProps(drive, { key: writerKey })
67
- }
68
- }
69
- if (!writer) {
70
- throw new Error('Could not find a writer for the blobIndex namespace')
71
- }
72
- this.#writer = writer
73
-
74
- coreManager.on('add-core', ({ key, namespace }) => {
75
- if (namespace !== 'blobIndex') return
76
- // We use the discovery key to derive the id for a drive
77
- const driveId = getDiscoveryId(key)
78
- if (this.#hyperdrives.has(driveId)) return
79
- // @ts-ignore - we know pretendCorestore is not actually a Corestore
80
- const drive = new Hyperdrive(corestore, key)
81
- this.#hyperdrives.set(driveId, drive)
82
- this.#driveEmitter.emit('add-drive', drive)
52
+ constructor({ coreManager, downloadFilter }) {
53
+ super()
54
+ this.#driveIndex = new HyperdriveIndex(coreManager)
55
+ this.#downloader = new Downloader(this.#driveIndex, {
56
+ filter: downloadFilter,
83
57
  })
58
+ this.#downloader.on('error', (error) => this.emit('error', error))
84
59
  }
85
60
 
86
61
  /**
87
62
  * @returns {string}
88
63
  */
89
64
  get writerDriveId() {
90
- return getDiscoveryId(this.#writer.key)
65
+ return getDiscoveryId(this.#driveIndex.writerKey)
91
66
  }
92
67
 
93
68
  /**
@@ -95,7 +70,7 @@ export class BlobStore {
95
70
  * @returns {Hyperdrive}
96
71
  */
97
72
  #getDrive(driveId) {
98
- const drive = this.#hyperdrives.get(driveId)
73
+ const drive = this.#driveIndex.get(driveId)
99
74
  if (!drive) throw new Error('Drive not found ' + driveId.slice(0, 7))
100
75
  return drive
101
76
  }
@@ -116,23 +91,18 @@ export class BlobStore {
116
91
  }
117
92
 
118
93
  /**
119
- * Download blobs from all drives, optionally filtering particular blob types
120
- * or blob variants. Download will be 'live' and will continue downloading new
121
- * data as it becomes available from any replicating drive.
94
+ * Set the filter for downloading blobs.
122
95
  *
123
- * If no filter is specified, all blobs will be downloaded. If a filter is
124
- * specified, then _only_ blobs that match the filter will be downloaded.
125
- *
126
- * @param {import('../types.js').BlobFilter} [filter] Filter blob types and/or variants to download. Filter is { [BlobType]: BlobVariants[] }. At least one blob variant must be specified for each blob type.
127
- * @param {object} options
128
- * @param {AbortSignal} [options.signal] Optional AbortSignal to cancel in-progress download
129
- * @returns {TypedEmitter<BlobDownloadEvents>}
96
+ * @param {import('../types.js').BlobFilter | null} filter Filter blob types and/or variants to download. Filter is { [BlobType]: BlobVariants[] }. At least one blob variant must be specified for each blob type.
97
+ * @returns {void}
130
98
  */
131
- download(filter, { signal } = {}) {
132
- return new LiveDownload(this.#hyperdrives.values(), this.#driveEmitter, {
99
+ setDownloadFilter(filter) {
100
+ this.#downloader.removeAllListeners()
101
+ this.#downloader.destroy()
102
+ this.#downloader = new Downloader(this.#driveIndex, {
133
103
  filter,
134
- signal,
135
104
  })
105
+ this.#downloader.on('error', (error) => this.emit('error', error))
136
106
  }
137
107
 
138
108
  /**
@@ -154,6 +124,22 @@ export class BlobStore {
154
124
  return drive.createReadStream(path, options)
155
125
  }
156
126
 
127
+ /**
128
+ * This is a low-level method to create a stream of entries from all drives.
129
+ * It includes entries for unknown blob types and variants.
130
+ *
131
+ * @param {object} opts
132
+ * @param {boolean} [opts.live=false] Set to `true` to get a live stream of entries
133
+ * @param {import('./utils.js').GenericBlobFilter | null} [opts.filter] Filter blob types and/or variants in returned entries. Filter is { [BlobType]: BlobVariants[] }.
134
+ * @returns {BlobStoreEntriesStream}
135
+ */
136
+ createEntriesReadStream({ live = false, filter } = {}) {
137
+ const entriesStream = createEntriesStream(this.#driveIndex, { live })
138
+ if (!filter) return entriesStream
139
+ const filterStream = new FilterEntriesStream(filter)
140
+ return pipeline(entriesStream, filterStream, noop)
141
+ }
142
+
157
143
  /**
158
144
  * Optimization for creating the blobs read stream when you have
159
145
  * previously read the entry from Hyperdrive using `drive.entry`
@@ -163,7 +149,7 @@ export class BlobStore {
163
149
  * @param {boolean} [options.wait=false] Set to `true` to wait for a blob to download, otherwise will throw if blob is not available locally
164
150
  * @returns {Promise<Readable>}
165
151
  */
166
- async createEntryReadStream(driveId, entry, options = { wait: false }) {
152
+ async createReadStreamFromEntry(driveId, entry, options = { wait: false }) {
167
153
  const drive = this.#getDrive(driveId)
168
154
  const blobs = await drive.getBlobs()
169
155
 
@@ -206,7 +192,7 @@ export class BlobStore {
206
192
  */
207
193
  async put({ type, variant, name }, blob, options) {
208
194
  const path = makePath({ type, variant, name })
209
- await this.#writer.put(path, blob, options)
195
+ await this.#driveIndex.writer.put(path, blob, options)
210
196
  return this.writerDriveId
211
197
  }
212
198
 
@@ -218,7 +204,7 @@ export class BlobStore {
218
204
  */
219
205
  createWriteStream({ type, variant, name }, options) {
220
206
  const path = makePath({ type, variant, name })
221
- const stream = this.#writer.createWriteStream(path, options)
207
+ const stream = this.#driveIndex.writer.createWriteStream(path, options)
222
208
  return proxyProps(stream, {
223
209
  driveId: this.writerDriveId,
224
210
  })
@@ -236,7 +222,7 @@ export class BlobStore {
236
222
  { type, variant, name, driveId },
237
223
  options = { follow: false, wait: false }
238
224
  ) {
239
- const drive = this.#hyperdrives.get(driveId)
225
+ const drive = this.#driveIndex.get(driveId)
240
226
  if (!drive) throw new Error('Drive not found ' + driveId.slice(0, 7))
241
227
  const path = makePath({ type, variant, name })
242
228
  const entry = await drive.entry(path, options)
@@ -255,6 +241,11 @@ export class BlobStore {
255
241
 
256
242
  return drive.clear(path, options)
257
243
  }
244
+
245
+ close() {
246
+ this.#downloader.removeAllListeners()
247
+ this.#downloader.destroy()
248
+ }
258
249
  }
259
250
 
260
251
  /**
@@ -282,56 +273,6 @@ function makePath({ type, variant, name }) {
282
273
  return `/${type}/${variant}/${name}`
283
274
  }
284
275
 
285
- /**
286
- * Implements the `get()` method as used by hyperdrive-next. It returns the
287
- * relevant cores from the Mapeo CoreManager.
288
- */
289
- class PretendCorestore {
290
- #coreManager
291
- /**
292
- * @param {object} options
293
- * @param {import('../core-manager/index.js').CoreManager} options.coreManager
294
- */
295
- constructor({ coreManager }) {
296
- this.#coreManager = coreManager
297
- }
298
-
299
- /**
300
- * @param {Buffer | { publicKey: Buffer } | { name: string }} opts
301
- * @returns {import('hypercore')<"binary", Buffer> | undefined}
302
- */
303
- get(opts) {
304
- if (b4a.isBuffer(opts)) {
305
- opts = { publicKey: opts }
306
- }
307
- if ('key' in opts) {
308
- // @ts-ignore
309
- opts.publicKey = opts.key
310
- }
311
- if ('publicKey' in opts) {
312
- // NB! We should always add blobIndex (Hyperbee) cores to the core manager
313
- // before we use them here. We would only reach the addCore path if the
314
- // blob core is read from the hyperbee header (before it is added to the
315
- // core manager)
316
- return (
317
- this.#coreManager.getCoreByKey(opts.publicKey) ||
318
- this.#coreManager.addCore(opts.publicKey, 'blob').core
319
- )
320
- } else if (opts.name === 'db') {
321
- return this.#coreManager.getWriterCore('blobIndex').core
322
- } else if (opts.name.includes('blobs')) {
323
- return this.#coreManager.getWriterCore('blob').core
324
- } else {
325
- throw new Error(
326
- 'Unsupported corestore.get() with opts ' + util.inspect(opts)
327
- )
328
- }
329
- }
330
-
331
- /** no-op */
332
- close() {}
333
- }
334
-
335
276
  /**
336
277
  * @param {Buffer} key Public key of hypercore
337
278
  * @returns {string} Hex-encoded string of derived discovery key
@@ -0,0 +1,54 @@
1
+ /**
2
+ * This is a more generic version of the BlobFilter type that can filter unknown
3
+ * blob types and variants from the blob store.
4
+ *
5
+ * @typedef {{ [type: string]: readonly string[] }} GenericBlobFilter
6
+ */
7
+
8
+ import { Transform } from 'node:stream'
9
+
10
+ /**
11
+ * @param {GenericBlobFilter} filter
12
+ * @param {string} filePath
13
+ * @returns {boolean}
14
+ */
15
+ export function filePathMatchesFilter(filter, filePath) {
16
+ const pathParts = filePath.split('/', 4)
17
+ const [shouldBeEmpty, type, variant] = pathParts
18
+
19
+ if (typeof shouldBeEmpty !== 'string' || shouldBeEmpty) return false
20
+
21
+ if (!type) return false
22
+ if (!Object.hasOwn(filter, type)) return false
23
+
24
+ const allowedVariants = filter[type] ?? []
25
+ if (allowedVariants.length === 0) {
26
+ return pathParts.length >= 3
27
+ } else {
28
+ return (
29
+ pathParts.length >= 4 &&
30
+ typeof variant === 'string' &&
31
+ allowedVariants.includes(variant)
32
+ )
33
+ }
34
+ }
35
+
36
+ /** @type {import("../types.js").BlobStoreEntriesStream} */
37
+ export class FilterEntriesStream extends Transform {
38
+ #isIncludedInFilter
39
+ /** @param {GenericBlobFilter} filter */
40
+ constructor(filter) {
41
+ super({ objectMode: true })
42
+ this.#isIncludedInFilter = filePathMatchesFilter.bind(null, filter)
43
+ }
44
+ /**
45
+ * @param {import("hyperdrive").HyperdriveEntry} entry
46
+ * @param {Parameters<Transform['_transform']>[1]} _
47
+ * @param {Parameters<Transform['_transform']>[2]} callback
48
+ */
49
+ _transform(entry, _, callback) {
50
+ const { key: filePath } = entry
51
+ if (this.#isIncludedInFilter(filePath)) this.push(entry)
52
+ callback()
53
+ }
54
+ }
@@ -298,7 +298,8 @@ export class CoreManager extends TypedEmitter {
298
298
  keyPair,
299
299
  encryptionKey: this.#encryptionKeys[namespace],
300
300
  })
301
- if (this.#autoDownload) {
301
+ if (this.#autoDownload && namespace !== 'blob') {
302
+ // Blob downloads are managed by BlobStore
302
303
  core.download({ start: 0, end: -1 })
303
304
  }
304
305
  // Every peer adds a listener, so could have many peers
@@ -16,6 +16,7 @@ import pDefer from 'p-defer'
16
16
  import { NAMESPACES } from './constants.js'
17
17
  import { TypedEmitter } from 'tiny-typed-emitter'
18
18
  import { omit } from './lib/omit.js'
19
+ import { NotFoundError } from './errors.js'
19
20
  /**
20
21
  * @import {
21
22
  * CoreOwnershipWithSignatures,
@@ -86,13 +87,10 @@ export class CoreOwnership extends TypedEmitter {
86
87
  for (const namespace of NAMESPACES) {
87
88
  expressions.push(eq(table[`${namespace}CoreId`], coreId))
88
89
  }
89
- // prettier-ignore
90
90
  const result = (await this.#dataType[kSelect]())
91
91
  .where(or.apply(null, expressions))
92
92
  .get()
93
- if (!result) {
94
- throw new Error('NotFound')
95
- }
93
+ if (!result) throw new NotFoundError()
96
94
  return result.docId
97
95
  }
98
96
 
@@ -5,6 +5,7 @@ import pDefer from 'p-defer'
5
5
  import { discoveryKey } from 'hypercore-crypto'
6
6
  import { NAMESPACE_SCHEMAS } from '../constants.js'
7
7
  import { createMap } from '../utils.js'
8
+ import { NotFoundError } from '../errors.js'
8
9
  /** @import { MapeoDoc } from '@comapeo/schema' */
9
10
 
10
11
  /**
@@ -182,7 +183,7 @@ export class DataStore extends TypedEmitter {
182
183
  const coreRecord = this.#coreManager.getCoreByDiscoveryKey(coreDiscoveryKey)
183
184
  if (!coreRecord) throw new Error('Invalid versionId')
184
185
  const block = await coreRecord.core.get(index, { wait: false })
185
- if (!block) throw new Error('Not Found')
186
+ if (!block) throw new NotFoundError('Not Found')
186
187
  return decode(block, { coreDiscoveryKey, index })
187
188
  }
188
189
 
@@ -202,9 +203,9 @@ export class DataStore extends TypedEmitter {
202
203
  async readRaw(versionId) {
203
204
  const { coreDiscoveryKey, index } = parseVersionId(versionId)
204
205
  const coreRecord = this.#coreManager.getCoreByDiscoveryKey(coreDiscoveryKey)
205
- if (!coreRecord) throw new Error('core not found')
206
+ if (!coreRecord) throw new NotFoundError('core not found')
206
207
  const block = await coreRecord.core.get(index, { wait: false })
207
- if (!block) throw new Error('Not Found')
208
+ if (!block) throw new NotFoundError()
208
209
  return block
209
210
  }
210
211
 
@@ -87,8 +87,12 @@ export class DataType<
87
87
 
88
88
  getByDocId(
89
89
  docId: string,
90
- opts?: { lang?: string }
90
+ opts?: { mustBeFound?: true; lang?: string }
91
91
  ): Promise<TDoc & { forks: string[] }>
92
+ getByDocId(
93
+ docId: string,
94
+ opts?: { mustBeFound?: boolean; lang?: string }
95
+ ): Promise<null | (TDoc & { forks: string[] })>
92
96
 
93
97
  getByVersionId(versionId: string, opts?: { lang?: string }): Promise<TDoc>
94
98
 
@@ -164,16 +164,30 @@ export class DataType extends TypedEmitter {
164
164
  }
165
165
 
166
166
  /**
167
+ * @overload
167
168
  * @param {string} docId
168
- * @param {{ lang?: string }} [opts]
169
+ * @param {object} [options]
170
+ * @param {true} [options.mustBeFound]
171
+ * @param {string} [options.lang]
172
+ * @returns {Promise<TDoc & { forks: string[] }>}
173
+ */
174
+ /**
175
+ * @param {string} docId
176
+ * @param {object} [options]
177
+ * @param {boolean} [options.mustBeFound]
178
+ * @param {string} [options.lang]
179
+ * @returns {Promise<null | (TDoc & { forks: string[] })>}
169
180
  */
170
- async getByDocId(docId, { lang } = {}) {
181
+ async getByDocId(docId, { mustBeFound = true, lang } = {}) {
171
182
  await this.#dataStore.indexer.idle()
172
- const result = /** @type {undefined | MapeoDoc} */ (
173
- this.#sql.getByDocId.get({ docId })
174
- )
175
- if (!result) throw new NotFoundError()
176
- return this.#translate(deNullify(result), { lang })
183
+ const result = this.#sql.getByDocId.get({ docId })
184
+ if (result) {
185
+ return this.#translate(deNullify(result), { lang })
186
+ } else if (mustBeFound) {
187
+ throw new NotFoundError()
188
+ } else {
189
+ return null
190
+ }
177
191
  }
178
192
 
179
193
  /**
@@ -186,7 +200,7 @@ export class DataType extends TypedEmitter {
186
200
  }
187
201
 
188
202
  /**
189
- * @param {MapeoDoc} doc
203
+ * @param {any} doc
190
204
  * @param {{ lang?: string }} [opts]
191
205
  */
192
206
  async #translate(doc, { lang } = {}) {
@@ -278,7 +292,6 @@ export class DataType extends TypedEmitter {
278
292
  const doc = {
279
293
  ...existingDoc,
280
294
  updatedAt: new Date().toISOString(),
281
- // @ts-expect-error - TS just doesn't work in this class
282
295
  links: [existingDoc.versionId, ...existingDoc.forks],
283
296
  deleted: true,
284
297
  }
@@ -9,6 +9,7 @@ import StartStopStateMachine from 'start-stop-state-machine'
9
9
  import pTimeout from 'p-timeout'
10
10
  import { keyToPublicId } from '@mapeo/crypto'
11
11
  import { Logger } from '../logger.js'
12
+ import { getErrorCode } from '../lib/error.js'
12
13
  /** @import { OpenedNoiseStream } from '../lib/noise-secret-stream-helpers.js' */
13
14
 
14
15
  /** @typedef {{ publicKey: Buffer, secretKey: Buffer }} Keypair */
@@ -117,7 +118,7 @@ export class LocalDiscovery extends TypedEmitter {
117
118
 
118
119
  /** @param {Error} e */
119
120
  function onSocketError(e) {
120
- if ('code' in e && e.code === 'EPIPE') {
121
+ if (getErrorCode(e) === 'EPIPE') {
121
122
  socket.destroy()
122
123
  if (secretStream) {
123
124
  secretStream.destroy()
package/src/errors.js CHANGED
@@ -1,5 +1,14 @@
1
1
  export class NotFoundError extends Error {
2
- constructor() {
3
- super('Not found')
2
+ constructor(message = 'Not found') {
3
+ super(message)
4
4
  }
5
5
  }
6
+
7
+ /**
8
+ * @param {unknown} err
9
+ * @returns {null}
10
+ */
11
+ export function nullIfNotFound(err) {
12
+ if (err instanceof NotFoundError) return null
13
+ throw err
14
+ }
@@ -1,9 +1,11 @@
1
1
  import fp from 'fastify-plugin'
2
2
  import { filetypemime } from 'magic-bytes.js'
3
+ import { pEvent } from 'p-event'
3
4
  import { Type as T } from '@sinclair/typebox'
4
5
 
5
6
  import { SUPPORTED_BLOB_VARIANTS } from '../blob-store/index.js'
6
7
  import { HEX_REGEX_32_BYTES, Z_BASE_32_REGEX_32_BYTES } from './constants.js'
8
+ import { getErrorMessage } from '../lib/error.js'
7
9
 
8
10
  /** @import { BlobId } from '../types.js' */
9
11
 
@@ -93,12 +95,25 @@ async function routes(fastify, options) {
93
95
 
94
96
  let blobStream
95
97
  try {
96
- blobStream = await blobStore.createEntryReadStream(driveId, entry)
98
+ blobStream = await blobStore.createReadStreamFromEntry(driveId, entry)
97
99
  } catch (e) {
98
100
  reply.code(404)
99
101
  throw e
100
102
  }
101
103
 
104
+ try {
105
+ await pEvent(blobStream, 'readable', { rejectionEvents: ['error'] })
106
+ } catch (err) {
107
+ // This matches [how Hyperblobs checks if a blob is unavailable][0].
108
+ // [0]: https://github.com/holepunchto/hyperblobs/blob/518088d2b828082fd70a276fa2c8848a2cf2a56b/index.js#L49
109
+ if (getErrorMessage(err) === 'Block not available') {
110
+ reply.code(404)
111
+ throw new Error('Blob not found')
112
+ } else {
113
+ throw err
114
+ }
115
+ }
116
+
102
117
  // Extract the 'mimeType' property of the metadata and use it for the response header if found
103
118
  if (
104
119
  metadata &&
@@ -5,6 +5,7 @@ import { ReaderWatch, Server as SMPServerPlugin } from 'styled-map-package'
5
5
 
6
6
  import { noop } from '../utils.js'
7
7
  import { NotFoundError, ENOENTError } from './utils.js'
8
+ import { getErrorCode } from '../lib/error.js'
8
9
 
9
10
  /** @import { FastifyPluginAsync } from 'fastify' */
10
11
  /** @import { Stats } from 'node:fs' */
@@ -56,7 +57,7 @@ export async function plugin(fastify, opts) {
56
57
  try {
57
58
  stats = await fs.stat(customMapPath)
58
59
  } catch (err) {
59
- if (err instanceof Error && 'code' in err && err.code === 'ENOENT') {
60
+ if (getErrorCode(err) === 'ENOENT') {
60
61
  throw new ENOENTError(customMapPath)
61
62
  }
62
63