@comapeo/core 2.0.1 → 2.2.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/blob-store/downloader.d.ts +43 -0
- package/dist/blob-store/downloader.d.ts.map +1 -0
- package/dist/blob-store/entries-stream.d.ts +13 -0
- package/dist/blob-store/entries-stream.d.ts.map +1 -0
- package/dist/blob-store/hyperdrive-index.d.ts +20 -0
- package/dist/blob-store/hyperdrive-index.d.ts.map +1 -0
- package/dist/blob-store/index.d.ts +34 -29
- package/dist/blob-store/index.d.ts.map +1 -1
- package/dist/blob-store/utils.d.ts +27 -0
- package/dist/blob-store/utils.d.ts.map +1 -0
- package/dist/constants.d.ts +2 -1
- package/dist/constants.d.ts.map +1 -1
- package/dist/core-manager/index.d.ts +11 -1
- package/dist/core-manager/index.d.ts.map +1 -1
- package/dist/core-ownership.d.ts.map +1 -1
- package/dist/datastore/index.d.ts +5 -4
- package/dist/datastore/index.d.ts.map +1 -1
- package/dist/datatype/index.d.ts +5 -1
- package/dist/discovery/local-discovery.d.ts.map +1 -1
- package/dist/errors.d.ts +6 -1
- package/dist/errors.d.ts.map +1 -1
- package/dist/fastify-plugins/blobs.d.ts.map +1 -1
- package/dist/fastify-plugins/maps.d.ts.map +1 -1
- package/dist/generated/extensions.d.ts +31 -0
- package/dist/generated/extensions.d.ts.map +1 -1
- package/dist/index.d.ts +2 -0
- package/dist/index.d.ts.map +1 -1
- package/dist/lib/drizzle-helpers.d.ts +6 -0
- package/dist/lib/drizzle-helpers.d.ts.map +1 -0
- package/dist/lib/error.d.ts +51 -0
- package/dist/lib/error.d.ts.map +1 -0
- package/dist/lib/get-own.d.ts +9 -0
- package/dist/lib/get-own.d.ts.map +1 -0
- package/dist/lib/is-hostname-ip-address.d.ts +17 -0
- package/dist/lib/is-hostname-ip-address.d.ts.map +1 -0
- package/dist/lib/ws-core-replicator.d.ts +11 -0
- package/dist/lib/ws-core-replicator.d.ts.map +1 -0
- package/dist/mapeo-manager.d.ts +18 -22
- package/dist/mapeo-manager.d.ts.map +1 -1
- package/dist/mapeo-project.d.ts +459 -26
- package/dist/mapeo-project.d.ts.map +1 -1
- package/dist/member-api.d.ts +44 -1
- package/dist/member-api.d.ts.map +1 -1
- package/dist/roles.d.ts.map +1 -1
- package/dist/schema/client.d.ts +17 -5
- package/dist/schema/client.d.ts.map +1 -1
- package/dist/schema/project.d.ts +212 -2
- package/dist/schema/project.d.ts.map +1 -1
- package/dist/sync/core-sync-state.d.ts +20 -15
- package/dist/sync/core-sync-state.d.ts.map +1 -1
- package/dist/sync/namespace-sync-state.d.ts +13 -1
- package/dist/sync/namespace-sync-state.d.ts.map +1 -1
- package/dist/sync/peer-sync-controller.d.ts +1 -1
- package/dist/sync/peer-sync-controller.d.ts.map +1 -1
- package/dist/sync/sync-api.d.ts +47 -2
- package/dist/sync/sync-api.d.ts.map +1 -1
- package/dist/sync/sync-state.d.ts +12 -0
- package/dist/sync/sync-state.d.ts.map +1 -1
- package/dist/translation-api.d.ts +2 -2
- package/dist/translation-api.d.ts.map +1 -1
- package/dist/types.d.ts +10 -2
- package/dist/types.d.ts.map +1 -1
- package/drizzle/client/0001_chubby_cargill.sql +12 -0
- package/drizzle/client/meta/0001_snapshot.json +208 -0
- package/drizzle/client/meta/_journal.json +7 -0
- package/drizzle/project/0001_medical_wendell_rand.sql +22 -0
- package/drizzle/project/meta/0001_snapshot.json +1267 -0
- package/drizzle/project/meta/_journal.json +7 -0
- package/package.json +14 -5
- package/src/blob-store/downloader.js +130 -0
- package/src/blob-store/entries-stream.js +81 -0
- package/src/blob-store/hyperdrive-index.js +122 -0
- package/src/blob-store/index.js +59 -117
- package/src/blob-store/utils.js +54 -0
- package/src/constants.js +4 -1
- package/src/core-manager/index.js +60 -3
- package/src/core-ownership.js +2 -4
- package/src/datastore/README.md +1 -2
- package/src/datastore/index.js +8 -8
- package/src/datatype/index.d.ts +5 -1
- package/src/datatype/index.js +22 -9
- package/src/discovery/local-discovery.js +2 -1
- package/src/errors.js +11 -2
- package/src/fastify-plugins/blobs.js +17 -1
- package/src/fastify-plugins/maps.js +2 -1
- package/src/generated/extensions.d.ts +31 -0
- package/src/generated/extensions.js +150 -0
- package/src/generated/extensions.ts +181 -0
- package/src/index.js +10 -0
- package/src/invite-api.js +1 -1
- package/src/lib/drizzle-helpers.js +79 -0
- package/src/lib/error.js +71 -0
- package/src/lib/get-own.js +10 -0
- package/src/lib/is-hostname-ip-address.js +26 -0
- package/src/lib/ws-core-replicator.js +47 -0
- package/src/mapeo-manager.js +74 -45
- package/src/mapeo-project.js +238 -58
- package/src/member-api.js +295 -2
- package/src/roles.js +38 -32
- package/src/schema/client.js +4 -3
- package/src/schema/project.js +7 -0
- package/src/sync/core-sync-state.js +39 -23
- package/src/sync/namespace-sync-state.js +22 -0
- package/src/sync/peer-sync-controller.js +1 -0
- package/src/sync/sync-api.js +197 -3
- package/src/sync/sync-state.js +18 -0
- package/src/translation-api.js +5 -9
- package/src/types.ts +12 -3
- package/dist/blob-store/live-download.d.ts +0 -107
- package/dist/blob-store/live-download.d.ts.map +0 -1
- package/dist/lib/timing-safe-equal.d.ts +0 -15
- package/dist/lib/timing-safe-equal.d.ts.map +0 -1
- package/src/blob-store/live-download.js +0 -373
- package/src/lib/timing-safe-equal.js +0 -34
package/src/blob-store/index.js
CHANGED
|
@@ -1,21 +1,23 @@
|
|
|
1
|
-
import
|
|
2
|
-
import b4a from 'b4a'
|
|
3
|
-
import util from 'node:util'
|
|
1
|
+
import { pipeline } from 'node:stream'
|
|
4
2
|
import { discoveryKey } from 'hypercore-crypto'
|
|
3
|
+
import { Downloader } from './downloader.js'
|
|
4
|
+
import { createEntriesStream } from './entries-stream.js'
|
|
5
|
+
import { FilterEntriesStream } from './utils.js'
|
|
6
|
+
import { noop } from '../utils.js'
|
|
5
7
|
import { TypedEmitter } from 'tiny-typed-emitter'
|
|
6
|
-
import {
|
|
8
|
+
import { HyperdriveIndexImpl as HyperdriveIndex } from './hyperdrive-index.js'
|
|
9
|
+
|
|
10
|
+
/** @import Hyperdrive from 'hyperdrive' */
|
|
11
|
+
/** @import { JsonObject } from 'type-fest' */
|
|
7
12
|
/** @import { Readable as NodeReadable } from 'node:stream' */
|
|
8
13
|
/** @import { Readable as StreamxReadable, Writable } from 'streamx' */
|
|
9
|
-
/** @import { BlobId } from '../types.js' */
|
|
10
|
-
/** @import { BlobDownloadEvents } from './live-download.js' */
|
|
14
|
+
/** @import { BlobFilter, BlobId, BlobStoreEntriesStream } from '../types.js' */
|
|
11
15
|
|
|
12
16
|
/**
|
|
13
17
|
* @internal
|
|
14
18
|
* @typedef {NodeReadable | StreamxReadable} Readable
|
|
15
19
|
*/
|
|
16
20
|
|
|
17
|
-
/** @typedef {TypedEmitter<{ 'add-drive': (drive: import('hyperdrive')) => void }>} InternalDriveEmitter */
|
|
18
|
-
|
|
19
21
|
// prop = blob type name
|
|
20
22
|
// value = array of blob variants supported for that type
|
|
21
23
|
const SUPPORTED_BLOB_VARIANTS = /** @type {const} */ ({
|
|
@@ -36,57 +38,31 @@ class ErrNotFound extends Error {
|
|
|
36
38
|
}
|
|
37
39
|
}
|
|
38
40
|
|
|
39
|
-
|
|
40
|
-
|
|
41
|
-
#
|
|
42
|
-
|
|
43
|
-
|
|
44
|
-
* Used to communicate to live download instances when new drives are added
|
|
45
|
-
* @type {InternalDriveEmitter}
|
|
46
|
-
*/
|
|
47
|
-
#driveEmitter = new TypedEmitter()
|
|
41
|
+
/** @extends {TypedEmitter<{ error: (error: Error) => void }>} */
|
|
42
|
+
export class BlobStore extends TypedEmitter {
|
|
43
|
+
#driveIndex
|
|
44
|
+
/** @type {Downloader} */
|
|
45
|
+
#downloader
|
|
48
46
|
|
|
49
47
|
/**
|
|
50
48
|
* @param {object} options
|
|
51
49
|
* @param {import('../core-manager/index.js').CoreManager} options.coreManager
|
|
50
|
+
* @param {BlobFilter | null} options.downloadFilter - Filter blob types and/or variants to download. Set to `null` to download all blobs.
|
|
52
51
|
*/
|
|
53
|
-
constructor({ coreManager }) {
|
|
54
|
-
|
|
55
|
-
|
|
56
|
-
|
|
57
|
-
|
|
58
|
-
const { key: writerKey } = coreManager.getWriterCore('blobIndex')
|
|
59
|
-
for (const { key } of blobIndexCores) {
|
|
60
|
-
// @ts-ignore - we know pretendCorestore is not actually a Corestore
|
|
61
|
-
const drive = new Hyperdrive(corestore, key)
|
|
62
|
-
// We use the discovery key to derive the id for a drive
|
|
63
|
-
this.#hyperdrives.set(getDiscoveryId(key), drive)
|
|
64
|
-
if (key.equals(writerKey)) {
|
|
65
|
-
writer = proxyProps(drive, { key: writerKey })
|
|
66
|
-
}
|
|
67
|
-
}
|
|
68
|
-
if (!writer) {
|
|
69
|
-
throw new Error('Could not find a writer for the blobIndex namespace')
|
|
70
|
-
}
|
|
71
|
-
this.#writer = writer
|
|
72
|
-
|
|
73
|
-
coreManager.on('add-core', ({ key, namespace }) => {
|
|
74
|
-
if (namespace !== 'blobIndex') return
|
|
75
|
-
// We use the discovery key to derive the id for a drive
|
|
76
|
-
const driveId = getDiscoveryId(key)
|
|
77
|
-
if (this.#hyperdrives.has(driveId)) return
|
|
78
|
-
// @ts-ignore - we know pretendCorestore is not actually a Corestore
|
|
79
|
-
const drive = new Hyperdrive(corestore, key)
|
|
80
|
-
this.#hyperdrives.set(driveId, drive)
|
|
81
|
-
this.#driveEmitter.emit('add-drive', drive)
|
|
52
|
+
constructor({ coreManager, downloadFilter }) {
|
|
53
|
+
super()
|
|
54
|
+
this.#driveIndex = new HyperdriveIndex(coreManager)
|
|
55
|
+
this.#downloader = new Downloader(this.#driveIndex, {
|
|
56
|
+
filter: downloadFilter,
|
|
82
57
|
})
|
|
58
|
+
this.#downloader.on('error', (error) => this.emit('error', error))
|
|
83
59
|
}
|
|
84
60
|
|
|
85
61
|
/**
|
|
86
62
|
* @returns {string}
|
|
87
63
|
*/
|
|
88
64
|
get writerDriveId() {
|
|
89
|
-
return getDiscoveryId(this.#
|
|
65
|
+
return getDiscoveryId(this.#driveIndex.writerKey)
|
|
90
66
|
}
|
|
91
67
|
|
|
92
68
|
/**
|
|
@@ -94,7 +70,7 @@ export class BlobStore {
|
|
|
94
70
|
* @returns {Hyperdrive}
|
|
95
71
|
*/
|
|
96
72
|
#getDrive(driveId) {
|
|
97
|
-
const drive = this.#
|
|
73
|
+
const drive = this.#driveIndex.get(driveId)
|
|
98
74
|
if (!drive) throw new Error('Drive not found ' + driveId.slice(0, 7))
|
|
99
75
|
return drive
|
|
100
76
|
}
|
|
@@ -115,23 +91,18 @@ export class BlobStore {
|
|
|
115
91
|
}
|
|
116
92
|
|
|
117
93
|
/**
|
|
118
|
-
*
|
|
119
|
-
* or blob variants. Download will be 'live' and will continue downloading new
|
|
120
|
-
* data as it becomes available from any replicating drive.
|
|
94
|
+
* Set the filter for downloading blobs.
|
|
121
95
|
*
|
|
122
|
-
*
|
|
123
|
-
*
|
|
124
|
-
*
|
|
125
|
-
* @param {import('../types.js').BlobFilter} [filter] Filter blob types and/or variants to download. Filter is { [BlobType]: BlobVariants[] }. At least one blob variant must be specified for each blob type.
|
|
126
|
-
* @param {object} options
|
|
127
|
-
* @param {AbortSignal} [options.signal] Optional AbortSignal to cancel in-progress download
|
|
128
|
-
* @returns {TypedEmitter<BlobDownloadEvents>}
|
|
96
|
+
* @param {import('../types.js').BlobFilter | null} filter Filter blob types and/or variants to download. Filter is { [BlobType]: BlobVariants[] }. At least one blob variant must be specified for each blob type.
|
|
97
|
+
* @returns {void}
|
|
129
98
|
*/
|
|
130
|
-
|
|
131
|
-
|
|
99
|
+
setDownloadFilter(filter) {
|
|
100
|
+
this.#downloader.removeAllListeners()
|
|
101
|
+
this.#downloader.destroy()
|
|
102
|
+
this.#downloader = new Downloader(this.#driveIndex, {
|
|
132
103
|
filter,
|
|
133
|
-
signal,
|
|
134
104
|
})
|
|
105
|
+
this.#downloader.on('error', (error) => this.emit('error', error))
|
|
135
106
|
}
|
|
136
107
|
|
|
137
108
|
/**
|
|
@@ -153,6 +124,22 @@ export class BlobStore {
|
|
|
153
124
|
return drive.createReadStream(path, options)
|
|
154
125
|
}
|
|
155
126
|
|
|
127
|
+
/**
|
|
128
|
+
* This is a low-level method to create a stream of entries from all drives.
|
|
129
|
+
* It includes entries for unknown blob types and variants.
|
|
130
|
+
*
|
|
131
|
+
* @param {object} opts
|
|
132
|
+
* @param {boolean} [opts.live=false] Set to `true` to get a live stream of entries
|
|
133
|
+
* @param {import('./utils.js').GenericBlobFilter | null} [opts.filter] Filter blob types and/or variants in returned entries. Filter is { [BlobType]: BlobVariants[] }.
|
|
134
|
+
* @returns {BlobStoreEntriesStream}
|
|
135
|
+
*/
|
|
136
|
+
createEntriesReadStream({ live = false, filter } = {}) {
|
|
137
|
+
const entriesStream = createEntriesStream(this.#driveIndex, { live })
|
|
138
|
+
if (!filter) return entriesStream
|
|
139
|
+
const filterStream = new FilterEntriesStream(filter)
|
|
140
|
+
return pipeline(entriesStream, filterStream, noop)
|
|
141
|
+
}
|
|
142
|
+
|
|
156
143
|
/**
|
|
157
144
|
* Optimization for creating the blobs read stream when you have
|
|
158
145
|
* previously read the entry from Hyperdrive using `drive.entry`
|
|
@@ -162,7 +149,7 @@ export class BlobStore {
|
|
|
162
149
|
* @param {boolean} [options.wait=false] Set to `true` to wait for a blob to download, otherwise will throw if blob is not available locally
|
|
163
150
|
* @returns {Promise<Readable>}
|
|
164
151
|
*/
|
|
165
|
-
async
|
|
152
|
+
async createReadStreamFromEntry(driveId, entry, options = { wait: false }) {
|
|
166
153
|
const drive = this.#getDrive(driveId)
|
|
167
154
|
const blobs = await drive.getBlobs()
|
|
168
155
|
|
|
@@ -200,24 +187,24 @@ export class BlobStore {
|
|
|
200
187
|
* @param {Omit<BlobId, 'driveId'>} blobId
|
|
201
188
|
* @param {Buffer} blob
|
|
202
189
|
* @param {object} [options]
|
|
203
|
-
* @param {
|
|
190
|
+
* @param {JsonObject} [options.metadata] Metadata to store with the blob
|
|
204
191
|
* @returns {Promise<string>} discovery key as hex string of hyperdrive where blob is stored
|
|
205
192
|
*/
|
|
206
193
|
async put({ type, variant, name }, blob, options) {
|
|
207
194
|
const path = makePath({ type, variant, name })
|
|
208
|
-
await this.#writer.put(path, blob, options)
|
|
195
|
+
await this.#driveIndex.writer.put(path, blob, options)
|
|
209
196
|
return this.writerDriveId
|
|
210
197
|
}
|
|
211
198
|
|
|
212
199
|
/**
|
|
213
200
|
* @param {Omit<BlobId, 'driveId'>} blobId
|
|
214
201
|
* @param {object} [options]
|
|
215
|
-
* @param {
|
|
202
|
+
* @param {JsonObject} [options.metadata] Metadata to store with the blob
|
|
216
203
|
* @returns {Writable & { driveId: string }}
|
|
217
204
|
*/
|
|
218
205
|
createWriteStream({ type, variant, name }, options) {
|
|
219
206
|
const path = makePath({ type, variant, name })
|
|
220
|
-
const stream = this.#writer.createWriteStream(path, options)
|
|
207
|
+
const stream = this.#driveIndex.writer.createWriteStream(path, options)
|
|
221
208
|
return proxyProps(stream, {
|
|
222
209
|
driveId: this.writerDriveId,
|
|
223
210
|
})
|
|
@@ -235,7 +222,7 @@ export class BlobStore {
|
|
|
235
222
|
{ type, variant, name, driveId },
|
|
236
223
|
options = { follow: false, wait: false }
|
|
237
224
|
) {
|
|
238
|
-
const drive = this.#
|
|
225
|
+
const drive = this.#driveIndex.get(driveId)
|
|
239
226
|
if (!drive) throw new Error('Drive not found ' + driveId.slice(0, 7))
|
|
240
227
|
const path = makePath({ type, variant, name })
|
|
241
228
|
const entry = await drive.entry(path, options)
|
|
@@ -254,6 +241,11 @@ export class BlobStore {
|
|
|
254
241
|
|
|
255
242
|
return drive.clear(path, options)
|
|
256
243
|
}
|
|
244
|
+
|
|
245
|
+
close() {
|
|
246
|
+
this.#downloader.removeAllListeners()
|
|
247
|
+
this.#downloader.destroy()
|
|
248
|
+
}
|
|
257
249
|
}
|
|
258
250
|
|
|
259
251
|
/**
|
|
@@ -281,56 +273,6 @@ function makePath({ type, variant, name }) {
|
|
|
281
273
|
return `/${type}/${variant}/${name}`
|
|
282
274
|
}
|
|
283
275
|
|
|
284
|
-
/**
|
|
285
|
-
* Implements the `get()` method as used by hyperdrive-next. It returns the
|
|
286
|
-
* relevant cores from the Mapeo CoreManager.
|
|
287
|
-
*/
|
|
288
|
-
class PretendCorestore {
|
|
289
|
-
#coreManager
|
|
290
|
-
/**
|
|
291
|
-
* @param {object} options
|
|
292
|
-
* @param {import('../core-manager/index.js').CoreManager} options.coreManager
|
|
293
|
-
*/
|
|
294
|
-
constructor({ coreManager }) {
|
|
295
|
-
this.#coreManager = coreManager
|
|
296
|
-
}
|
|
297
|
-
|
|
298
|
-
/**
|
|
299
|
-
* @param {Buffer | { publicKey: Buffer } | { name: string }} opts
|
|
300
|
-
* @returns {import('hypercore')<"binary", Buffer> | undefined}
|
|
301
|
-
*/
|
|
302
|
-
get(opts) {
|
|
303
|
-
if (b4a.isBuffer(opts)) {
|
|
304
|
-
opts = { publicKey: opts }
|
|
305
|
-
}
|
|
306
|
-
if ('key' in opts) {
|
|
307
|
-
// @ts-ignore
|
|
308
|
-
opts.publicKey = opts.key
|
|
309
|
-
}
|
|
310
|
-
if ('publicKey' in opts) {
|
|
311
|
-
// NB! We should always add blobIndex (Hyperbee) cores to the core manager
|
|
312
|
-
// before we use them here. We would only reach the addCore path if the
|
|
313
|
-
// blob core is read from the hyperbee header (before it is added to the
|
|
314
|
-
// core manager)
|
|
315
|
-
return (
|
|
316
|
-
this.#coreManager.getCoreByKey(opts.publicKey) ||
|
|
317
|
-
this.#coreManager.addCore(opts.publicKey, 'blob').core
|
|
318
|
-
)
|
|
319
|
-
} else if (opts.name === 'db') {
|
|
320
|
-
return this.#coreManager.getWriterCore('blobIndex').core
|
|
321
|
-
} else if (opts.name.includes('blobs')) {
|
|
322
|
-
return this.#coreManager.getWriterCore('blob').core
|
|
323
|
-
} else {
|
|
324
|
-
throw new Error(
|
|
325
|
-
'Unsupported corestore.get() with opts ' + util.inspect(opts)
|
|
326
|
-
)
|
|
327
|
-
}
|
|
328
|
-
}
|
|
329
|
-
|
|
330
|
-
/** no-op */
|
|
331
|
-
close() {}
|
|
332
|
-
}
|
|
333
|
-
|
|
334
276
|
/**
|
|
335
277
|
* @param {Buffer} key Public key of hypercore
|
|
336
278
|
* @returns {string} Hex-encoded string of derived discovery key
|
|
@@ -0,0 +1,54 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* This is a more generic version of the BlobFilter type that can filter unknown
|
|
3
|
+
* blob types and variants from the blob store.
|
|
4
|
+
*
|
|
5
|
+
* @typedef {{ [type: string]: readonly string[] }} GenericBlobFilter
|
|
6
|
+
*/
|
|
7
|
+
|
|
8
|
+
import { Transform } from 'node:stream'
|
|
9
|
+
|
|
10
|
+
/**
|
|
11
|
+
* @param {GenericBlobFilter} filter
|
|
12
|
+
* @param {string} filePath
|
|
13
|
+
* @returns {boolean}
|
|
14
|
+
*/
|
|
15
|
+
export function filePathMatchesFilter(filter, filePath) {
|
|
16
|
+
const pathParts = filePath.split('/', 4)
|
|
17
|
+
const [shouldBeEmpty, type, variant] = pathParts
|
|
18
|
+
|
|
19
|
+
if (typeof shouldBeEmpty !== 'string' || shouldBeEmpty) return false
|
|
20
|
+
|
|
21
|
+
if (!type) return false
|
|
22
|
+
if (!Object.hasOwn(filter, type)) return false
|
|
23
|
+
|
|
24
|
+
const allowedVariants = filter[type] ?? []
|
|
25
|
+
if (allowedVariants.length === 0) {
|
|
26
|
+
return pathParts.length >= 3
|
|
27
|
+
} else {
|
|
28
|
+
return (
|
|
29
|
+
pathParts.length >= 4 &&
|
|
30
|
+
typeof variant === 'string' &&
|
|
31
|
+
allowedVariants.includes(variant)
|
|
32
|
+
)
|
|
33
|
+
}
|
|
34
|
+
}
|
|
35
|
+
|
|
36
|
+
/** @type {import("../types.js").BlobStoreEntriesStream} */
|
|
37
|
+
export class FilterEntriesStream extends Transform {
|
|
38
|
+
#isIncludedInFilter
|
|
39
|
+
/** @param {GenericBlobFilter} filter */
|
|
40
|
+
constructor(filter) {
|
|
41
|
+
super({ objectMode: true })
|
|
42
|
+
this.#isIncludedInFilter = filePathMatchesFilter.bind(null, filter)
|
|
43
|
+
}
|
|
44
|
+
/**
|
|
45
|
+
* @param {import("hyperdrive").HyperdriveEntry} entry
|
|
46
|
+
* @param {Parameters<Transform['_transform']>[1]} _
|
|
47
|
+
* @param {Parameters<Transform['_transform']>[2]} callback
|
|
48
|
+
*/
|
|
49
|
+
_transform(entry, _, callback) {
|
|
50
|
+
const { key: filePath } = entry
|
|
51
|
+
if (this.#isIncludedInFilter(filePath)) this.push(entry)
|
|
52
|
+
callback()
|
|
53
|
+
}
|
|
54
|
+
}
|
package/src/constants.js
CHANGED
|
@@ -19,7 +19,7 @@ export const DATA_NAMESPACES = NAMESPACES.filter(
|
|
|
19
19
|
)
|
|
20
20
|
|
|
21
21
|
export const NAMESPACE_SCHEMAS = /** @type {const} */ ({
|
|
22
|
-
data: ['observation', 'track'],
|
|
22
|
+
data: ['observation', 'track', 'remoteDetectionAlert'],
|
|
23
23
|
config: [
|
|
24
24
|
'translation',
|
|
25
25
|
'preset',
|
|
@@ -32,3 +32,6 @@ export const NAMESPACE_SCHEMAS = /** @type {const} */ ({
|
|
|
32
32
|
})
|
|
33
33
|
|
|
34
34
|
export const SUPPORTED_CONFIG_VERSION = 1
|
|
35
|
+
|
|
36
|
+
// WARNING: This value is persisted. Be careful when changing it.
|
|
37
|
+
export const DRIZZLE_MIGRATIONS_TABLE = '__drizzle_migrations'
|
|
@@ -4,16 +4,21 @@ import { debounce } from 'throttle-debounce'
|
|
|
4
4
|
import assert from 'node:assert/strict'
|
|
5
5
|
import { sql, eq } from 'drizzle-orm'
|
|
6
6
|
|
|
7
|
-
import {
|
|
7
|
+
import {
|
|
8
|
+
HaveExtension,
|
|
9
|
+
ProjectExtension,
|
|
10
|
+
DownloadIntentExtension,
|
|
11
|
+
} from '../generated/extensions.js'
|
|
8
12
|
import { Logger } from '../logger.js'
|
|
9
13
|
import { NAMESPACES } from '../constants.js'
|
|
10
14
|
import { noop } from '../utils.js'
|
|
11
15
|
import { coresTable } from '../schema/project.js'
|
|
12
16
|
import * as rle from './bitfield-rle.js'
|
|
13
17
|
import { CoreIndex } from './core-index.js'
|
|
18
|
+
import mapObject from 'map-obj'
|
|
14
19
|
|
|
15
20
|
/** @import Hypercore from 'hypercore' */
|
|
16
|
-
/** @import { HypercorePeer, Namespace } from '../types.js' */
|
|
21
|
+
/** @import { BlobFilter, GenericBlobFilter, HypercorePeer, Namespace } from '../types.js' */
|
|
17
22
|
|
|
18
23
|
const WRITER_CORE_PREHAVES_DEBOUNCE_DELAY = 1000
|
|
19
24
|
|
|
@@ -25,6 +30,7 @@ export const kCoreManagerReplicate = Symbol('replicate core manager')
|
|
|
25
30
|
* @typedef {Object} Events
|
|
26
31
|
* @property {(coreRecord: CoreRecord) => void} add-core
|
|
27
32
|
* @property {(namespace: Namespace, msg: { coreDiscoveryId: string, peerId: string, start: number, bitfield: Uint32Array }) => void} peer-have
|
|
33
|
+
* @property {(blobFilter: GenericBlobFilter, peerId: string) => void} peer-download-intent
|
|
28
34
|
*/
|
|
29
35
|
|
|
30
36
|
/**
|
|
@@ -46,6 +52,7 @@ export class CoreManager extends TypedEmitter {
|
|
|
46
52
|
#deviceId
|
|
47
53
|
#l
|
|
48
54
|
#autoDownload
|
|
55
|
+
#downloadIntentExtension
|
|
49
56
|
|
|
50
57
|
static get namespaces() {
|
|
51
58
|
return NAMESPACES
|
|
@@ -158,6 +165,16 @@ export class CoreManager extends TypedEmitter {
|
|
|
158
165
|
},
|
|
159
166
|
})
|
|
160
167
|
|
|
168
|
+
this.#downloadIntentExtension = this.creatorCore.registerExtension(
|
|
169
|
+
'mapeo/download-intent',
|
|
170
|
+
{
|
|
171
|
+
encoding: DownloadIntentCodec,
|
|
172
|
+
onmessage: (msg, peer) => {
|
|
173
|
+
this.#handleDownloadIntentMessage(msg, peer)
|
|
174
|
+
},
|
|
175
|
+
}
|
|
176
|
+
)
|
|
177
|
+
|
|
161
178
|
this.creatorCore.on('peer-add', (peer) => {
|
|
162
179
|
this.#sendHaves(peer, this.#coreIndex).catch(() => {
|
|
163
180
|
this.#l.log('Failed to send pre-haves to newly-connected peer')
|
|
@@ -281,7 +298,8 @@ export class CoreManager extends TypedEmitter {
|
|
|
281
298
|
keyPair,
|
|
282
299
|
encryptionKey: this.#encryptionKeys[namespace],
|
|
283
300
|
})
|
|
284
|
-
if (this.#autoDownload) {
|
|
301
|
+
if (this.#autoDownload && namespace !== 'blob') {
|
|
302
|
+
// Blob downloads are managed by BlobStore
|
|
285
303
|
core.download({ start: 0, end: -1 })
|
|
286
304
|
}
|
|
287
305
|
// Every peer adds a listener, so could have many peers
|
|
@@ -395,6 +413,23 @@ export class CoreManager extends TypedEmitter {
|
|
|
395
413
|
})
|
|
396
414
|
}
|
|
397
415
|
|
|
416
|
+
/**
|
|
417
|
+
* @param {GenericBlobFilter} blobFilter
|
|
418
|
+
* @param {HypercorePeer} peer
|
|
419
|
+
*/
|
|
420
|
+
#handleDownloadIntentMessage(blobFilter, peer) {
|
|
421
|
+
const peerId = peer.remotePublicKey.toString('hex')
|
|
422
|
+
this.emit('peer-download-intent', blobFilter, peerId)
|
|
423
|
+
}
|
|
424
|
+
|
|
425
|
+
/**
|
|
426
|
+
* @param {BlobFilter} blobFilter
|
|
427
|
+
* @param {HypercorePeer} peer
|
|
428
|
+
*/
|
|
429
|
+
sendDownloadIntents(blobFilter, peer) {
|
|
430
|
+
this.#downloadIntentExtension.send(blobFilter, peer)
|
|
431
|
+
}
|
|
432
|
+
|
|
398
433
|
/**
|
|
399
434
|
*
|
|
400
435
|
* @param {HypercorePeer} peer
|
|
@@ -505,3 +540,25 @@ const HaveExtensionCodec = {
|
|
|
505
540
|
}
|
|
506
541
|
},
|
|
507
542
|
}
|
|
543
|
+
|
|
544
|
+
const DownloadIntentCodec = {
|
|
545
|
+
/** @param {BlobFilter} filter */
|
|
546
|
+
encode(filter) {
|
|
547
|
+
const downloadIntents = mapObject(filter, (key, value) => [
|
|
548
|
+
key,
|
|
549
|
+
{ variants: value || [] },
|
|
550
|
+
])
|
|
551
|
+
return DownloadIntentExtension.encode({ downloadIntents }).finish()
|
|
552
|
+
},
|
|
553
|
+
/**
|
|
554
|
+
* @param {Buffer | Uint8Array} buf
|
|
555
|
+
* @returns {GenericBlobFilter}
|
|
556
|
+
*/
|
|
557
|
+
decode(buf) {
|
|
558
|
+
const msg = DownloadIntentExtension.decode(buf)
|
|
559
|
+
return mapObject(msg.downloadIntents, (key, value) => [
|
|
560
|
+
key + '', // keep TS happy
|
|
561
|
+
value.variants,
|
|
562
|
+
])
|
|
563
|
+
},
|
|
564
|
+
}
|
package/src/core-ownership.js
CHANGED
|
@@ -16,6 +16,7 @@ import pDefer from 'p-defer'
|
|
|
16
16
|
import { NAMESPACES } from './constants.js'
|
|
17
17
|
import { TypedEmitter } from 'tiny-typed-emitter'
|
|
18
18
|
import { omit } from './lib/omit.js'
|
|
19
|
+
import { NotFoundError } from './errors.js'
|
|
19
20
|
/**
|
|
20
21
|
* @import {
|
|
21
22
|
* CoreOwnershipWithSignatures,
|
|
@@ -86,13 +87,10 @@ export class CoreOwnership extends TypedEmitter {
|
|
|
86
87
|
for (const namespace of NAMESPACES) {
|
|
87
88
|
expressions.push(eq(table[`${namespace}CoreId`], coreId))
|
|
88
89
|
}
|
|
89
|
-
// prettier-ignore
|
|
90
90
|
const result = (await this.#dataType[kSelect]())
|
|
91
91
|
.where(or.apply(null, expressions))
|
|
92
92
|
.get()
|
|
93
|
-
if (!result)
|
|
94
|
-
throw new Error('NotFound')
|
|
95
|
-
}
|
|
93
|
+
if (!result) throw new NotFoundError()
|
|
96
94
|
return result.docId
|
|
97
95
|
}
|
|
98
96
|
|
package/src/datastore/README.md
CHANGED
|
@@ -19,6 +19,7 @@ const datastore = new DataStore({
|
|
|
19
19
|
// Process entries here using an indexer...
|
|
20
20
|
},
|
|
21
21
|
namespace: 'data',
|
|
22
|
+
reindex: false,
|
|
22
23
|
})
|
|
23
24
|
|
|
24
25
|
/** @type {MapeoDoc} */
|
|
@@ -33,8 +34,6 @@ datastore.on('index-state', ({ current, remaining, entriesPerSecond }) => {
|
|
|
33
34
|
// show state to user that indexing is happening
|
|
34
35
|
}
|
|
35
36
|
})
|
|
36
|
-
|
|
37
|
-
const { current, remaining, entriesPerSecond } = datastore.getIndexState()
|
|
38
37
|
```
|
|
39
38
|
|
|
40
39
|
## API docs
|
package/src/datastore/index.js
CHANGED
|
@@ -5,6 +5,7 @@ import pDefer from 'p-defer'
|
|
|
5
5
|
import { discoveryKey } from 'hypercore-crypto'
|
|
6
6
|
import { NAMESPACE_SCHEMAS } from '../constants.js'
|
|
7
7
|
import { createMap } from '../utils.js'
|
|
8
|
+
import { NotFoundError } from '../errors.js'
|
|
8
9
|
/** @import { MapeoDoc } from '@comapeo/schema' */
|
|
9
10
|
|
|
10
11
|
/**
|
|
@@ -51,8 +52,9 @@ export class DataStore extends TypedEmitter {
|
|
|
51
52
|
* @param {TNamespace} opts.namespace
|
|
52
53
|
* @param {(entries: MultiCoreIndexer.Entry<'binary'>[]) => Promise<import('../index-writer/index.js').IndexedDocIds>} opts.batch
|
|
53
54
|
* @param {MultiCoreIndexer.StorageParam} opts.storage
|
|
55
|
+
* @param {boolean} opts.reindex
|
|
54
56
|
*/
|
|
55
|
-
constructor({ coreManager, namespace, batch, storage }) {
|
|
57
|
+
constructor({ coreManager, namespace, batch, storage, reindex }) {
|
|
56
58
|
super()
|
|
57
59
|
this.#coreManager = coreManager
|
|
58
60
|
this.#namespace = namespace
|
|
@@ -66,6 +68,7 @@ export class DataStore extends TypedEmitter {
|
|
|
66
68
|
this.#coreIndexer = new MultiCoreIndexer(cores, {
|
|
67
69
|
storage,
|
|
68
70
|
batch: (entries) => this.#handleEntries(entries),
|
|
71
|
+
reindex,
|
|
69
72
|
})
|
|
70
73
|
coreManager.on('add-core', (coreRecord) => {
|
|
71
74
|
if (coreRecord.namespace !== namespace) return
|
|
@@ -91,10 +94,6 @@ export class DataStore extends TypedEmitter {
|
|
|
91
94
|
return this.#writerCore
|
|
92
95
|
}
|
|
93
96
|
|
|
94
|
-
getIndexState() {
|
|
95
|
-
return this.#coreIndexer.state
|
|
96
|
-
}
|
|
97
|
-
|
|
98
97
|
/**
|
|
99
98
|
*
|
|
100
99
|
* @param {MultiCoreIndexer.Entry<'binary'>[]} entries
|
|
@@ -167,6 +166,7 @@ export class DataStore extends TypedEmitter {
|
|
|
167
166
|
const deferred = pDefer()
|
|
168
167
|
this.#pendingIndex.set(versionId, deferred)
|
|
169
168
|
await deferred.promise
|
|
169
|
+
this.#pendingIndex.delete(versionId)
|
|
170
170
|
|
|
171
171
|
return /** @type {Extract<MapeoDoc, TDoc>} */ (
|
|
172
172
|
decode(block, { coreDiscoveryKey, index })
|
|
@@ -183,7 +183,7 @@ export class DataStore extends TypedEmitter {
|
|
|
183
183
|
const coreRecord = this.#coreManager.getCoreByDiscoveryKey(coreDiscoveryKey)
|
|
184
184
|
if (!coreRecord) throw new Error('Invalid versionId')
|
|
185
185
|
const block = await coreRecord.core.get(index, { wait: false })
|
|
186
|
-
if (!block) throw new
|
|
186
|
+
if (!block) throw new NotFoundError('Not Found')
|
|
187
187
|
return decode(block, { coreDiscoveryKey, index })
|
|
188
188
|
}
|
|
189
189
|
|
|
@@ -203,9 +203,9 @@ export class DataStore extends TypedEmitter {
|
|
|
203
203
|
async readRaw(versionId) {
|
|
204
204
|
const { coreDiscoveryKey, index } = parseVersionId(versionId)
|
|
205
205
|
const coreRecord = this.#coreManager.getCoreByDiscoveryKey(coreDiscoveryKey)
|
|
206
|
-
if (!coreRecord) throw new
|
|
206
|
+
if (!coreRecord) throw new NotFoundError('core not found')
|
|
207
207
|
const block = await coreRecord.core.get(index, { wait: false })
|
|
208
|
-
if (!block) throw new
|
|
208
|
+
if (!block) throw new NotFoundError()
|
|
209
209
|
return block
|
|
210
210
|
}
|
|
211
211
|
|
package/src/datatype/index.d.ts
CHANGED
|
@@ -87,8 +87,12 @@ export class DataType<
|
|
|
87
87
|
|
|
88
88
|
getByDocId(
|
|
89
89
|
docId: string,
|
|
90
|
-
opts?: { lang?: string }
|
|
90
|
+
opts?: { mustBeFound?: true; lang?: string }
|
|
91
91
|
): Promise<TDoc & { forks: string[] }>
|
|
92
|
+
getByDocId(
|
|
93
|
+
docId: string,
|
|
94
|
+
opts?: { mustBeFound?: boolean; lang?: string }
|
|
95
|
+
): Promise<null | (TDoc & { forks: string[] })>
|
|
92
96
|
|
|
93
97
|
getByVersionId(versionId: string, opts?: { lang?: string }): Promise<TDoc>
|
|
94
98
|
|
package/src/datatype/index.js
CHANGED
|
@@ -164,16 +164,30 @@ export class DataType extends TypedEmitter {
|
|
|
164
164
|
}
|
|
165
165
|
|
|
166
166
|
/**
|
|
167
|
+
* @overload
|
|
167
168
|
* @param {string} docId
|
|
168
|
-
* @param {
|
|
169
|
+
* @param {object} [options]
|
|
170
|
+
* @param {true} [options.mustBeFound]
|
|
171
|
+
* @param {string} [options.lang]
|
|
172
|
+
* @returns {Promise<TDoc & { forks: string[] }>}
|
|
173
|
+
*/
|
|
174
|
+
/**
|
|
175
|
+
* @param {string} docId
|
|
176
|
+
* @param {object} [options]
|
|
177
|
+
* @param {boolean} [options.mustBeFound]
|
|
178
|
+
* @param {string} [options.lang]
|
|
179
|
+
* @returns {Promise<null | (TDoc & { forks: string[] })>}
|
|
169
180
|
*/
|
|
170
|
-
async getByDocId(docId, { lang } = {}) {
|
|
181
|
+
async getByDocId(docId, { mustBeFound = true, lang } = {}) {
|
|
171
182
|
await this.#dataStore.indexer.idle()
|
|
172
|
-
const result =
|
|
173
|
-
|
|
174
|
-
|
|
175
|
-
if (
|
|
176
|
-
|
|
183
|
+
const result = this.#sql.getByDocId.get({ docId })
|
|
184
|
+
if (result) {
|
|
185
|
+
return this.#translate(deNullify(result), { lang })
|
|
186
|
+
} else if (mustBeFound) {
|
|
187
|
+
throw new NotFoundError()
|
|
188
|
+
} else {
|
|
189
|
+
return null
|
|
190
|
+
}
|
|
177
191
|
}
|
|
178
192
|
|
|
179
193
|
/**
|
|
@@ -186,7 +200,7 @@ export class DataType extends TypedEmitter {
|
|
|
186
200
|
}
|
|
187
201
|
|
|
188
202
|
/**
|
|
189
|
-
* @param {
|
|
203
|
+
* @param {any} doc
|
|
190
204
|
* @param {{ lang?: string }} [opts]
|
|
191
205
|
*/
|
|
192
206
|
async #translate(doc, { lang } = {}) {
|
|
@@ -278,7 +292,6 @@ export class DataType extends TypedEmitter {
|
|
|
278
292
|
const doc = {
|
|
279
293
|
...existingDoc,
|
|
280
294
|
updatedAt: new Date().toISOString(),
|
|
281
|
-
// @ts-expect-error - TS just doesn't work in this class
|
|
282
295
|
links: [existingDoc.versionId, ...existingDoc.forks],
|
|
283
296
|
deleted: true,
|
|
284
297
|
}
|
|
@@ -9,6 +9,7 @@ import StartStopStateMachine from 'start-stop-state-machine'
|
|
|
9
9
|
import pTimeout from 'p-timeout'
|
|
10
10
|
import { keyToPublicId } from '@mapeo/crypto'
|
|
11
11
|
import { Logger } from '../logger.js'
|
|
12
|
+
import { getErrorCode } from '../lib/error.js'
|
|
12
13
|
/** @import { OpenedNoiseStream } from '../lib/noise-secret-stream-helpers.js' */
|
|
13
14
|
|
|
14
15
|
/** @typedef {{ publicKey: Buffer, secretKey: Buffer }} Keypair */
|
|
@@ -117,7 +118,7 @@ export class LocalDiscovery extends TypedEmitter {
|
|
|
117
118
|
|
|
118
119
|
/** @param {Error} e */
|
|
119
120
|
function onSocketError(e) {
|
|
120
|
-
if (
|
|
121
|
+
if (getErrorCode(e) === 'EPIPE') {
|
|
121
122
|
socket.destroy()
|
|
122
123
|
if (secretStream) {
|
|
123
124
|
secretStream.destroy()
|
package/src/errors.js
CHANGED
|
@@ -1,5 +1,14 @@
|
|
|
1
1
|
export class NotFoundError extends Error {
|
|
2
|
-
constructor() {
|
|
3
|
-
super(
|
|
2
|
+
constructor(message = 'Not found') {
|
|
3
|
+
super(message)
|
|
4
4
|
}
|
|
5
5
|
}
|
|
6
|
+
|
|
7
|
+
/**
|
|
8
|
+
* @param {unknown} err
|
|
9
|
+
* @returns {null}
|
|
10
|
+
*/
|
|
11
|
+
export function nullIfNotFound(err) {
|
|
12
|
+
if (err instanceof NotFoundError) return null
|
|
13
|
+
throw err
|
|
14
|
+
}
|