@comapeo/core 2.1.0 → 2.2.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/blob-store/downloader.d.ts +43 -0
- package/dist/blob-store/downloader.d.ts.map +1 -0
- package/dist/blob-store/entries-stream.d.ts +13 -0
- package/dist/blob-store/entries-stream.d.ts.map +1 -0
- package/dist/blob-store/hyperdrive-index.d.ts +20 -0
- package/dist/blob-store/hyperdrive-index.d.ts.map +1 -0
- package/dist/blob-store/index.d.ts +29 -21
- package/dist/blob-store/index.d.ts.map +1 -1
- package/dist/blob-store/utils.d.ts +27 -0
- package/dist/blob-store/utils.d.ts.map +1 -0
- package/dist/core-manager/index.d.ts +1 -1
- package/dist/core-manager/index.d.ts.map +1 -1
- package/dist/core-ownership.d.ts.map +1 -1
- package/dist/datastore/index.d.ts +1 -1
- package/dist/datastore/index.d.ts.map +1 -1
- package/dist/datatype/index.d.ts +5 -1
- package/dist/discovery/local-discovery.d.ts.map +1 -1
- package/dist/errors.d.ts +6 -1
- package/dist/errors.d.ts.map +1 -1
- package/dist/fastify-plugins/blobs.d.ts.map +1 -1
- package/dist/fastify-plugins/maps.d.ts.map +1 -1
- package/dist/index.d.ts.map +1 -1
- package/dist/lib/error.d.ts +14 -0
- package/dist/lib/error.d.ts.map +1 -1
- package/dist/mapeo-manager.d.ts.map +1 -1
- package/dist/mapeo-project.d.ts +17 -17
- package/dist/mapeo-project.d.ts.map +1 -1
- package/dist/member-api.d.ts +4 -0
- package/dist/member-api.d.ts.map +1 -1
- package/dist/roles.d.ts.map +1 -1
- package/dist/schema/project.d.ts +2 -2
- package/dist/sync/core-sync-state.d.ts +20 -15
- package/dist/sync/core-sync-state.d.ts.map +1 -1
- package/dist/sync/namespace-sync-state.d.ts +13 -1
- package/dist/sync/namespace-sync-state.d.ts.map +1 -1
- package/dist/sync/peer-sync-controller.d.ts +1 -1
- package/dist/sync/sync-api.d.ts +22 -3
- package/dist/sync/sync-api.d.ts.map +1 -1
- package/dist/sync/sync-state.d.ts +12 -0
- package/dist/sync/sync-state.d.ts.map +1 -1
- package/dist/translation-api.d.ts +2 -2
- package/dist/translation-api.d.ts.map +1 -1
- package/dist/types.d.ts +7 -0
- package/dist/types.d.ts.map +1 -1
- package/package.json +6 -1
- package/src/blob-store/downloader.js +130 -0
- package/src/blob-store/entries-stream.js +81 -0
- package/src/blob-store/hyperdrive-index.js +122 -0
- package/src/blob-store/index.js +56 -115
- package/src/blob-store/utils.js +54 -0
- package/src/core-manager/index.js +2 -1
- package/src/core-ownership.js +2 -4
- package/src/datastore/index.js +4 -3
- package/src/datatype/index.d.ts +5 -1
- package/src/datatype/index.js +22 -9
- package/src/discovery/local-discovery.js +2 -1
- package/src/errors.js +11 -2
- package/src/fastify-plugins/blobs.js +16 -1
- package/src/fastify-plugins/maps.js +2 -1
- package/src/lib/error.js +24 -0
- package/src/mapeo-manager.js +3 -2
- package/src/mapeo-project.js +89 -19
- package/src/member-api.js +68 -26
- package/src/roles.js +38 -32
- package/src/sync/core-sync-state.js +39 -23
- package/src/sync/namespace-sync-state.js +22 -0
- package/src/sync/sync-api.js +30 -4
- package/src/sync/sync-state.js +18 -0
- package/src/translation-api.js +5 -9
- package/src/types.ts +8 -0
- package/dist/blob-store/live-download.d.ts +0 -107
- package/dist/blob-store/live-download.d.ts.map +0 -1
- package/src/blob-store/live-download.js +0 -373
package/src/sync/sync-api.js
CHANGED
|
@@ -16,7 +16,7 @@ import { NO_ROLE_ID } from '../roles.js'
|
|
|
16
16
|
/** @import * as http from 'node:http' */
|
|
17
17
|
/** @import { CoreOwnership } from '../core-ownership.js' */
|
|
18
18
|
/** @import { OpenedNoiseStream } from '../lib/noise-secret-stream-helpers.js' */
|
|
19
|
-
/** @import { ReplicationStream } from '../types.js' */
|
|
19
|
+
/** @import { BlobFilter, ReplicationStream } from '../types.js' */
|
|
20
20
|
|
|
21
21
|
export const kHandleDiscoveryKey = Symbol('handle discovery key')
|
|
22
22
|
export const kSyncState = Symbol('sync state')
|
|
@@ -26,6 +26,8 @@ export const kWaitForInitialSyncWithPeer = Symbol(
|
|
|
26
26
|
'wait for initial sync with peer'
|
|
27
27
|
)
|
|
28
28
|
export const kSetBlobDownloadFilter = Symbol('set isArchiveDevice')
|
|
29
|
+
export const kAddBlobWantRange = Symbol('add blob want range')
|
|
30
|
+
export const kClearBlobWantRanges = Symbol('clear blob want ranges')
|
|
29
31
|
|
|
30
32
|
/**
|
|
31
33
|
* @typedef {'initial' | 'full'} SyncType
|
|
@@ -91,7 +93,8 @@ export class SyncApi extends TypedEmitter {
|
|
|
91
93
|
#getReplicationStream
|
|
92
94
|
/** @type {Map<string, WebSocket>} */
|
|
93
95
|
#serverWebsockets = new Map()
|
|
94
|
-
|
|
96
|
+
/** @type {null | BlobFilter} */
|
|
97
|
+
#blobDownloadFilter = null
|
|
95
98
|
|
|
96
99
|
/**
|
|
97
100
|
* @param {object} opts
|
|
@@ -100,7 +103,7 @@ export class SyncApi extends TypedEmitter {
|
|
|
100
103
|
* @param {import('../roles.js').Roles} opts.roles
|
|
101
104
|
* @param {() => Promise<Iterable<string>>} opts.getServerWebsocketUrls
|
|
102
105
|
* @param {() => ReplicationStream} opts.getReplicationStream
|
|
103
|
-
* @param {
|
|
106
|
+
* @param {null | BlobFilter} opts.blobDownloadFilter
|
|
104
107
|
* @param {number} [opts.throttleMs]
|
|
105
108
|
* @param {Logger} [opts.logger]
|
|
106
109
|
*/
|
|
@@ -116,7 +119,6 @@ export class SyncApi extends TypedEmitter {
|
|
|
116
119
|
}) {
|
|
117
120
|
super()
|
|
118
121
|
this.#l = Logger.create('syncApi', logger)
|
|
119
|
-
this.#blobDownloadFilter = blobDownloadFilter
|
|
120
122
|
this.#coreManager = coreManager
|
|
121
123
|
this.#coreOwnership = coreOwnership
|
|
122
124
|
this.#roles = roles
|
|
@@ -133,6 +135,8 @@ export class SyncApi extends TypedEmitter {
|
|
|
133
135
|
this.#updateState(namespaceSyncState)
|
|
134
136
|
})
|
|
135
137
|
|
|
138
|
+
this[kSetBlobDownloadFilter](blobDownloadFilter)
|
|
139
|
+
|
|
136
140
|
this.#coreManager.creatorCore.on('peer-add', this.#handlePeerAdd)
|
|
137
141
|
this.#coreManager.creatorCore.on('peer-remove', this.#handlePeerDisconnect)
|
|
138
142
|
|
|
@@ -161,6 +165,28 @@ export class SyncApi extends TypedEmitter {
|
|
|
161
165
|
}
|
|
162
166
|
}
|
|
163
167
|
|
|
168
|
+
/**
|
|
169
|
+
* Add some blob blocks this peer wants.
|
|
170
|
+
*
|
|
171
|
+
* @param {string} peerId
|
|
172
|
+
* @param {number} start
|
|
173
|
+
* @param {number} length
|
|
174
|
+
* @returns {void}
|
|
175
|
+
*/
|
|
176
|
+
[kAddBlobWantRange](peerId, start, length) {
|
|
177
|
+
this[kSyncState].addBlobWantRange(peerId, start, length)
|
|
178
|
+
}
|
|
179
|
+
|
|
180
|
+
/**
|
|
181
|
+
* Clear the blob blocks this peer wants.
|
|
182
|
+
*
|
|
183
|
+
* @param {string} peerId
|
|
184
|
+
* @returns {void}
|
|
185
|
+
*/
|
|
186
|
+
[kClearBlobWantRanges](peerId) {
|
|
187
|
+
this[kSyncState].clearBlobWantRanges(peerId)
|
|
188
|
+
}
|
|
189
|
+
|
|
164
190
|
/** @type {import('../local-peers.js').LocalPeersEvents['discovery-key']} */
|
|
165
191
|
[kHandleDiscoveryKey](discoveryKey, protomux) {
|
|
166
192
|
const peerSyncController = this.#peerSyncControllers.get(protomux)
|
package/src/sync/sync-state.js
CHANGED
|
@@ -68,6 +68,24 @@ export class SyncState extends TypedEmitter {
|
|
|
68
68
|
])
|
|
69
69
|
}
|
|
70
70
|
|
|
71
|
+
/**
|
|
72
|
+
* @param {string} peerId
|
|
73
|
+
* @param {number} start
|
|
74
|
+
* @param {number} length
|
|
75
|
+
* @returns {void}
|
|
76
|
+
*/
|
|
77
|
+
addBlobWantRange(peerId, start, length) {
|
|
78
|
+
this.#syncStates.blob.addWantRange(peerId, start, length)
|
|
79
|
+
}
|
|
80
|
+
|
|
81
|
+
/**
|
|
82
|
+
* @param {string} peerId
|
|
83
|
+
* @returns {void}
|
|
84
|
+
*/
|
|
85
|
+
clearBlobWantRanges(peerId) {
|
|
86
|
+
this.#syncStates.blob.clearWantRanges(peerId)
|
|
87
|
+
}
|
|
88
|
+
|
|
71
89
|
#handleUpdate = () => {
|
|
72
90
|
this.emit('state', this.getState())
|
|
73
91
|
}
|
package/src/translation-api.js
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
import { and, sql } from 'drizzle-orm'
|
|
2
2
|
import { kCreateWithDocId, kSelect } from './datatype/index.js'
|
|
3
3
|
import { hashObject } from './utils.js'
|
|
4
|
-
import {
|
|
4
|
+
import { nullIfNotFound } from './errors.js'
|
|
5
5
|
import { omit } from './lib/omit.js'
|
|
6
6
|
/** @import { Translation, TranslationValue } from '@comapeo/schema' */
|
|
7
7
|
/** @import { SetOptional } from 'type-fest' */
|
|
@@ -50,15 +50,11 @@ export default class TranslationApi {
|
|
|
50
50
|
async put(value) {
|
|
51
51
|
const identifiers = omit(value, ['message'])
|
|
52
52
|
const docId = hashObject(identifiers)
|
|
53
|
-
|
|
54
|
-
|
|
53
|
+
const doc = await this.#dataType.getByDocId(docId).catch(nullIfNotFound)
|
|
54
|
+
if (doc) {
|
|
55
55
|
return await this.#dataType.update(doc.versionId, value)
|
|
56
|
-
}
|
|
57
|
-
|
|
58
|
-
return await this.#dataType[kCreateWithDocId](docId, value)
|
|
59
|
-
} else {
|
|
60
|
-
throw new Error(`Error on translation ${e}`)
|
|
61
|
-
}
|
|
56
|
+
} else {
|
|
57
|
+
return await this.#dataType[kCreateWithDocId](docId, value)
|
|
62
58
|
}
|
|
63
59
|
}
|
|
64
60
|
|
package/src/types.ts
CHANGED
|
@@ -14,6 +14,8 @@ import { Duplex } from 'streamx'
|
|
|
14
14
|
import RandomAccessStorage from 'random-access-storage'
|
|
15
15
|
import { DefaultListener, ListenerSignature } from 'tiny-typed-emitter'
|
|
16
16
|
import type { NAMESPACES } from './constants.js'
|
|
17
|
+
import type { Readable } from 'stream'
|
|
18
|
+
import type { HyperdriveEntry } from 'hyperdrive'
|
|
17
19
|
|
|
18
20
|
export type Namespace = (typeof NAMESPACES)[number]
|
|
19
21
|
|
|
@@ -147,3 +149,9 @@ export type DefaultEmitterEvents<
|
|
|
147
149
|
newListener: (event: keyof L, listener: L[keyof L]) => void
|
|
148
150
|
removeListener: (event: keyof L, listener: L[keyof L]) => void
|
|
149
151
|
}
|
|
152
|
+
|
|
153
|
+
export type BlobStoreEntriesStream = Readable & {
|
|
154
|
+
[Symbol.asyncIterator](): AsyncIterableIterator<
|
|
155
|
+
HyperdriveEntry & { driveId: string }
|
|
156
|
+
>
|
|
157
|
+
}
|
|
@@ -1,107 +0,0 @@
|
|
|
1
|
-
/**
|
|
2
|
-
* Reduce multiple states into one. Factored out for unit testing because I
|
|
3
|
-
* don't trust my coding. Probably a smarter way to do this, but this works.
|
|
4
|
-
*
|
|
5
|
-
* @param {Iterable<{ state: BlobDownloadState | BlobDownloadStateError }>} liveDownloads
|
|
6
|
-
* @param {{ signal?: AbortSignal }} options
|
|
7
|
-
* @returns {BlobDownloadState | BlobDownloadStateError}
|
|
8
|
-
*/
|
|
9
|
-
export function combineStates(liveDownloads: Iterable<{
|
|
10
|
-
state: BlobDownloadState | BlobDownloadStateError;
|
|
11
|
-
}>, { signal }?: {
|
|
12
|
-
signal?: AbortSignal;
|
|
13
|
-
}): BlobDownloadState | BlobDownloadStateError;
|
|
14
|
-
/**
|
|
15
|
-
* @typedef {object} BlobDownloadState
|
|
16
|
-
* @property {number} haveCount The number of files already downloaded
|
|
17
|
-
* @property {number} haveBytes The bytes already downloaded
|
|
18
|
-
* @property {number} wantCount The number of files pending download
|
|
19
|
-
* @property {number} wantBytes The bytes pending download
|
|
20
|
-
* @property {null} error If status = 'error' then this will be an Error object
|
|
21
|
-
* @property {'checking' | 'downloading' | 'downloaded' | 'aborted'} status
|
|
22
|
-
*/
|
|
23
|
-
/** @typedef {Omit<BlobDownloadState, 'error' | 'status'> & { status: 'error', error: Error }} BlobDownloadStateError */
|
|
24
|
-
/**
|
|
25
|
-
* @typedef {object} BlobDownloadEvents
|
|
26
|
-
* @property {(state: BlobDownloadState | BlobDownloadStateError ) => void} state Emitted with the current download state whenever it changes (not emitted during initial 'checking' status)
|
|
27
|
-
*/
|
|
28
|
-
/**
|
|
29
|
-
* LiveDownload class
|
|
30
|
-
* @extends {TypedEmitter<BlobDownloadEvents>}
|
|
31
|
-
*/
|
|
32
|
-
export class LiveDownload extends TypedEmitter<BlobDownloadEvents> {
|
|
33
|
-
/**
|
|
34
|
-
* Like drive.download() but 'live', and for multiple drives
|
|
35
|
-
* @param {Iterable<import('hyperdrive')>} drives
|
|
36
|
-
* @param {import('./index.js').InternalDriveEmitter} emitter
|
|
37
|
-
* @param {object} options
|
|
38
|
-
* @param {import('../types.js').BlobFilter} [options.filter] Filter blobs of specific types and/or sizes to download
|
|
39
|
-
* @param {AbortSignal} [options.signal]
|
|
40
|
-
*/
|
|
41
|
-
constructor(drives: Iterable<import("hyperdrive")>, emitter: import("./index.js").InternalDriveEmitter, { filter, signal }: {
|
|
42
|
-
filter?: import("../types.js").BlobFilter | undefined;
|
|
43
|
-
signal?: AbortSignal | undefined;
|
|
44
|
-
});
|
|
45
|
-
/**
|
|
46
|
-
* @returns {BlobDownloadState | BlobDownloadStateError}
|
|
47
|
-
*/
|
|
48
|
-
get state(): BlobDownloadState | BlobDownloadStateError;
|
|
49
|
-
#private;
|
|
50
|
-
}
|
|
51
|
-
/**
|
|
52
|
-
* LiveDownload class
|
|
53
|
-
* @extends {TypedEmitter<BlobDownloadEvents>}
|
|
54
|
-
*/
|
|
55
|
-
export class DriveLiveDownload extends TypedEmitter<BlobDownloadEvents> {
|
|
56
|
-
/**
|
|
57
|
-
* Like drive.download() but 'live',
|
|
58
|
-
* @param {import('hyperdrive')} drive
|
|
59
|
-
* @param {object} options
|
|
60
|
-
* @param {import('../types.js').BlobFilter} [options.filter] Filter blobs of specific types and/or sizes to download
|
|
61
|
-
* @param {AbortSignal} [options.signal]
|
|
62
|
-
*/
|
|
63
|
-
constructor(drive: import("hyperdrive"), { filter, signal }?: {
|
|
64
|
-
filter?: import("../types.js").BlobFilter | undefined;
|
|
65
|
-
signal?: AbortSignal | undefined;
|
|
66
|
-
});
|
|
67
|
-
/**
|
|
68
|
-
* @returns {BlobDownloadState | BlobDownloadStateError}
|
|
69
|
-
*/
|
|
70
|
-
get state(): BlobDownloadState | BlobDownloadStateError;
|
|
71
|
-
#private;
|
|
72
|
-
}
|
|
73
|
-
export type BlobDownloadState = {
|
|
74
|
-
/**
|
|
75
|
-
* The number of files already downloaded
|
|
76
|
-
*/
|
|
77
|
-
haveCount: number;
|
|
78
|
-
/**
|
|
79
|
-
* The bytes already downloaded
|
|
80
|
-
*/
|
|
81
|
-
haveBytes: number;
|
|
82
|
-
/**
|
|
83
|
-
* The number of files pending download
|
|
84
|
-
*/
|
|
85
|
-
wantCount: number;
|
|
86
|
-
/**
|
|
87
|
-
* The bytes pending download
|
|
88
|
-
*/
|
|
89
|
-
wantBytes: number;
|
|
90
|
-
/**
|
|
91
|
-
* If status = 'error' then this will be an Error object
|
|
92
|
-
*/
|
|
93
|
-
error: null;
|
|
94
|
-
status: "checking" | "downloading" | "downloaded" | "aborted";
|
|
95
|
-
};
|
|
96
|
-
export type BlobDownloadStateError = Omit<BlobDownloadState, "error" | "status"> & {
|
|
97
|
-
status: "error";
|
|
98
|
-
error: Error;
|
|
99
|
-
};
|
|
100
|
-
export type BlobDownloadEvents = {
|
|
101
|
-
/**
|
|
102
|
-
* Emitted with the current download state whenever it changes (not emitted during initial 'checking' status)
|
|
103
|
-
*/
|
|
104
|
-
state: (state: BlobDownloadState | BlobDownloadStateError) => void;
|
|
105
|
-
};
|
|
106
|
-
import { TypedEmitter } from 'tiny-typed-emitter';
|
|
107
|
-
//# sourceMappingURL=live-download.d.ts.map
|
|
@@ -1 +0,0 @@
|
|
|
1
|
-
{"version":3,"file":"live-download.d.ts","sourceRoot":"","sources":["../../src/blob-store/live-download.js"],"names":[],"mappings":"AA+RA;;;;;;;GAOG;AACH,6CAJW,QAAQ,CAAC;IAAE,KAAK,EAAE,iBAAiB,GAAG,sBAAsB,CAAA;CAAE,CAAC,eAC/D;IAAE,MAAM,CAAC,EAAE,WAAW,CAAA;CAAE,GACtB,iBAAiB,GAAG,sBAAsB,CAqCtD;AApUD;;;;;;;;GAQG;AAEH,wHAAwH;AAExH;;;GAGG;AAEH;;;GAGG;AACH;IAKE;;;;;;;OAOG;IACH,oBANW,QAAQ,CAAC,OAAO,YAAY,CAAC,CAAC,WAC9B,OAAO,YAAY,EAAE,oBAAoB,sBAEjD;QAAmD,MAAM;QAC3B,MAAM;KAAC,EAiCvC;IAED;;OAEG;IACH,wDAEC;;CACF;AAED;;;GAGG;AACH;IAaE;;;;;;OAMG;IACH,mBALW,OAAO,YAAY,CAAC,uBAE5B;QAAmD,MAAM;QAC3B,MAAM;KAAC,EAmBvC;IAED;;OAEG;IACH,wDAyBC;;CAqIF;;;;;eArRa,MAAM;;;;eACN,MAAM;;;;eACN,MAAM;;;;eACN,MAAM;;;;WACN,IAAI;YACJ,UAAU,GAAG,aAAa,GAAG,YAAY,GAAG,SAAS;;qCAGrD,IAAI,CAAC,iBAAiB,EAAE,OAAO,GAAG,QAAQ,CAAC,GAAG;IAAE,MAAM,EAAE,OAAO,CAAC;IAAC,KAAK,EAAE,KAAK,CAAA;CAAE;;;;;WAI/E,CAAC,KAAK,EAAE,iBAAiB,GAAG,sBAAsB,KAAM,IAAI;;6BApB7C,oBAAoB"}
|
|
@@ -1,373 +0,0 @@
|
|
|
1
|
-
import { TypedEmitter } from 'tiny-typed-emitter'
|
|
2
|
-
import { once } from 'node:events'
|
|
3
|
-
import SubEncoder from 'sub-encoder'
|
|
4
|
-
|
|
5
|
-
const keyEncoding = new SubEncoder('files', 'utf-8')
|
|
6
|
-
|
|
7
|
-
/**
|
|
8
|
-
* @typedef {object} BlobDownloadState
|
|
9
|
-
* @property {number} haveCount The number of files already downloaded
|
|
10
|
-
* @property {number} haveBytes The bytes already downloaded
|
|
11
|
-
* @property {number} wantCount The number of files pending download
|
|
12
|
-
* @property {number} wantBytes The bytes pending download
|
|
13
|
-
* @property {null} error If status = 'error' then this will be an Error object
|
|
14
|
-
* @property {'checking' | 'downloading' | 'downloaded' | 'aborted'} status
|
|
15
|
-
*/
|
|
16
|
-
|
|
17
|
-
/** @typedef {Omit<BlobDownloadState, 'error' | 'status'> & { status: 'error', error: Error }} BlobDownloadStateError */
|
|
18
|
-
|
|
19
|
-
/**
|
|
20
|
-
* @typedef {object} BlobDownloadEvents
|
|
21
|
-
* @property {(state: BlobDownloadState | BlobDownloadStateError ) => void} state Emitted with the current download state whenever it changes (not emitted during initial 'checking' status)
|
|
22
|
-
*/
|
|
23
|
-
|
|
24
|
-
/**
|
|
25
|
-
* LiveDownload class
|
|
26
|
-
* @extends {TypedEmitter<BlobDownloadEvents>}
|
|
27
|
-
*/
|
|
28
|
-
export class LiveDownload extends TypedEmitter {
|
|
29
|
-
/** @type {Set<DriveLiveDownload>} */
|
|
30
|
-
#driveLiveDownloads = new Set()
|
|
31
|
-
#signal
|
|
32
|
-
|
|
33
|
-
/**
|
|
34
|
-
* Like drive.download() but 'live', and for multiple drives
|
|
35
|
-
* @param {Iterable<import('hyperdrive')>} drives
|
|
36
|
-
* @param {import('./index.js').InternalDriveEmitter} emitter
|
|
37
|
-
* @param {object} options
|
|
38
|
-
* @param {import('../types.js').BlobFilter} [options.filter] Filter blobs of specific types and/or sizes to download
|
|
39
|
-
* @param {AbortSignal} [options.signal]
|
|
40
|
-
*/
|
|
41
|
-
constructor(drives, emitter, { filter, signal }) {
|
|
42
|
-
super()
|
|
43
|
-
this.#signal = signal
|
|
44
|
-
|
|
45
|
-
const emitState = () => {
|
|
46
|
-
this.emit('state', this.state)
|
|
47
|
-
}
|
|
48
|
-
|
|
49
|
-
/** @param {import('hyperdrive')} drive */
|
|
50
|
-
const addDrive = (drive) => {
|
|
51
|
-
const download = new DriveLiveDownload(drive, {
|
|
52
|
-
filter,
|
|
53
|
-
signal,
|
|
54
|
-
})
|
|
55
|
-
this.#driveLiveDownloads.add(download)
|
|
56
|
-
download.on('state', emitState)
|
|
57
|
-
}
|
|
58
|
-
|
|
59
|
-
for (const drive of drives) addDrive(drive)
|
|
60
|
-
emitter.on('add-drive', addDrive)
|
|
61
|
-
|
|
62
|
-
signal?.addEventListener(
|
|
63
|
-
'abort',
|
|
64
|
-
() => {
|
|
65
|
-
emitter.off('add-drive', addDrive)
|
|
66
|
-
for (const download of this.#driveLiveDownloads) {
|
|
67
|
-
download.off('state', emitState)
|
|
68
|
-
}
|
|
69
|
-
},
|
|
70
|
-
{ once: true }
|
|
71
|
-
)
|
|
72
|
-
}
|
|
73
|
-
|
|
74
|
-
/**
|
|
75
|
-
* @returns {BlobDownloadState | BlobDownloadStateError}
|
|
76
|
-
*/
|
|
77
|
-
get state() {
|
|
78
|
-
return combineStates(this.#driveLiveDownloads, { signal: this.#signal })
|
|
79
|
-
}
|
|
80
|
-
}
|
|
81
|
-
|
|
82
|
-
/**
|
|
83
|
-
* LiveDownload class
|
|
84
|
-
* @extends {TypedEmitter<BlobDownloadEvents>}
|
|
85
|
-
*/
|
|
86
|
-
export class DriveLiveDownload extends TypedEmitter {
|
|
87
|
-
#haveCount = 0
|
|
88
|
-
#haveBytes = 0
|
|
89
|
-
#wantBytes = 0
|
|
90
|
-
#initialCheck = true
|
|
91
|
-
#drive
|
|
92
|
-
#folders
|
|
93
|
-
/** @type {Set<{ done(): Promise<void>, destroy(): void }>} */
|
|
94
|
-
#downloads = new Set()
|
|
95
|
-
/** @type {Error | null} */
|
|
96
|
-
#error = null
|
|
97
|
-
#signal
|
|
98
|
-
|
|
99
|
-
/**
|
|
100
|
-
* Like drive.download() but 'live',
|
|
101
|
-
* @param {import('hyperdrive')} drive
|
|
102
|
-
* @param {object} options
|
|
103
|
-
* @param {import('../types.js').BlobFilter} [options.filter] Filter blobs of specific types and/or sizes to download
|
|
104
|
-
* @param {AbortSignal} [options.signal]
|
|
105
|
-
*/
|
|
106
|
-
constructor(drive, { filter, signal } = {}) {
|
|
107
|
-
super()
|
|
108
|
-
this.#drive = drive
|
|
109
|
-
this.#folders = filterToFolders(filter)
|
|
110
|
-
this.#signal = signal
|
|
111
|
-
if (signal && !signal.aborted) {
|
|
112
|
-
signal.addEventListener(
|
|
113
|
-
'abort',
|
|
114
|
-
() => {
|
|
115
|
-
for (const download of this.#downloads) download.destroy()
|
|
116
|
-
this.#downloads.clear()
|
|
117
|
-
this.emit('state', this.state)
|
|
118
|
-
},
|
|
119
|
-
{ once: true }
|
|
120
|
-
)
|
|
121
|
-
}
|
|
122
|
-
this.#start().catch(this.#handleError.bind(this))
|
|
123
|
-
}
|
|
124
|
-
|
|
125
|
-
/**
|
|
126
|
-
* @returns {BlobDownloadState | BlobDownloadStateError}
|
|
127
|
-
*/
|
|
128
|
-
get state() {
|
|
129
|
-
if (this.#error) {
|
|
130
|
-
return {
|
|
131
|
-
haveCount: this.#haveCount,
|
|
132
|
-
haveBytes: this.#haveBytes,
|
|
133
|
-
wantCount: this.#downloads.size,
|
|
134
|
-
wantBytes: this.#wantBytes,
|
|
135
|
-
error: this.#error,
|
|
136
|
-
status: 'error',
|
|
137
|
-
}
|
|
138
|
-
}
|
|
139
|
-
return {
|
|
140
|
-
haveCount: this.#haveCount,
|
|
141
|
-
haveBytes: this.#haveBytes,
|
|
142
|
-
wantCount: this.#downloads.size,
|
|
143
|
-
wantBytes: this.#wantBytes,
|
|
144
|
-
error: null,
|
|
145
|
-
status: this.#signal?.aborted
|
|
146
|
-
? 'aborted'
|
|
147
|
-
: this.#initialCheck
|
|
148
|
-
? 'checking'
|
|
149
|
-
: this.#downloads.size > 0
|
|
150
|
-
? 'downloading'
|
|
151
|
-
: 'downloaded',
|
|
152
|
-
}
|
|
153
|
-
}
|
|
154
|
-
|
|
155
|
-
async #start() {
|
|
156
|
-
const blobsCore = await this.#getBlobsCore()
|
|
157
|
-
/* c8 ignore next */
|
|
158
|
-
if (this.#signal?.aborted || !blobsCore) return // Can't get here in tests
|
|
159
|
-
let seq = 0
|
|
160
|
-
|
|
161
|
-
for (const folder of this.#folders) {
|
|
162
|
-
// Don't emit state during initial iteration of existing data, since this is
|
|
163
|
-
// likely fast and not useful UX feedback
|
|
164
|
-
const entryStream = this.#drive.list(folder, { recursive: true })
|
|
165
|
-
if (this.#signal) {
|
|
166
|
-
this.#signal.addEventListener('abort', () => entryStream.destroy(), {
|
|
167
|
-
once: true,
|
|
168
|
-
})
|
|
169
|
-
}
|
|
170
|
-
for await (const entry of entryStream) {
|
|
171
|
-
if (this.#signal?.aborted) return
|
|
172
|
-
seq = Math.max(seq, entry.seq)
|
|
173
|
-
const { blob } = entry.value
|
|
174
|
-
if (!blob) continue
|
|
175
|
-
await this.#processEntry(blobsCore, blob)
|
|
176
|
-
}
|
|
177
|
-
if (this.#signal?.aborted) return
|
|
178
|
-
}
|
|
179
|
-
|
|
180
|
-
this.#initialCheck = false
|
|
181
|
-
this.emit('state', this.state)
|
|
182
|
-
|
|
183
|
-
const bee = this.#drive.db
|
|
184
|
-
// This will also download old versions of files, but it is the only way to
|
|
185
|
-
// get a live stream from a Hyperbee, however we currently do not support
|
|
186
|
-
// edits of blobs, so this should not be an issue. `keyEncoding` is
|
|
187
|
-
// necessary because hyperdrive stores file index data under the `files`
|
|
188
|
-
// sub-encoding key
|
|
189
|
-
const historyStream = bee.createHistoryStream({
|
|
190
|
-
live: true,
|
|
191
|
-
gt: seq,
|
|
192
|
-
keyEncoding,
|
|
193
|
-
})
|
|
194
|
-
if (this.#signal) {
|
|
195
|
-
this.#signal.addEventListener('abort', () => historyStream.destroy(), {
|
|
196
|
-
once: true,
|
|
197
|
-
})
|
|
198
|
-
}
|
|
199
|
-
for await (const entry of historyStream) {
|
|
200
|
-
if (this.#signal?.aborted) return
|
|
201
|
-
const { blob } = entry.value
|
|
202
|
-
if (!blob) continue
|
|
203
|
-
if (!matchesFolder(entry.key, this.#folders)) continue
|
|
204
|
-
// TODO: consider cancelling downloads when a delete entry is found?
|
|
205
|
-
// Probably not worth the extra work.
|
|
206
|
-
if (entry.type !== 'put') continue
|
|
207
|
-
const wasDownloaded = this.state.status === 'downloaded'
|
|
208
|
-
await this.#processEntry(blobsCore, blob)
|
|
209
|
-
if (wasDownloaded && this.state.status === 'downloading') {
|
|
210
|
-
// State has changed, so emit
|
|
211
|
-
this.emit('state', this.state)
|
|
212
|
-
}
|
|
213
|
-
}
|
|
214
|
-
/* c8 ignore next 2 */
|
|
215
|
-
// Could possibly reach here if aborted after check in loop, hard to test
|
|
216
|
-
this.emit('state', this.state)
|
|
217
|
-
}
|
|
218
|
-
|
|
219
|
-
/**
|
|
220
|
-
* If a Hyperdrive has been added by its key and has never replicated, then
|
|
221
|
-
* drive.getBlobs() will not resolve until replication starts. Since we do not
|
|
222
|
-
* want the downloader to remain in the "checking" state forever, we catch
|
|
223
|
-
* this case and update the state before waiting for the hyperdrive hyperblobs
|
|
224
|
-
* instance. This also makes waiting for the blobs instance cancellable.
|
|
225
|
-
*
|
|
226
|
-
* @returns {Promise<import('hypercore') | undefined>}
|
|
227
|
-
*/
|
|
228
|
-
async #getBlobsCore() {
|
|
229
|
-
if (this.#drive.blobs) return this.#drive.blobs.core
|
|
230
|
-
await this.#drive.ready()
|
|
231
|
-
await this.#drive.core.update({ wait: true })
|
|
232
|
-
|
|
233
|
-
// If no peers at this stage, we are not going to be able to get the blobs
|
|
234
|
-
// until a peer appears, so consider this state "downloaded", because
|
|
235
|
-
// otherwise this will just hang as "checking"
|
|
236
|
-
if (!this.#drive.core.peers.length) {
|
|
237
|
-
this.#initialCheck = false
|
|
238
|
-
this.emit('state', this.state)
|
|
239
|
-
}
|
|
240
|
-
try {
|
|
241
|
-
const [blobs] = await once(this.#drive, 'blobs', { signal: this.#signal })
|
|
242
|
-
return blobs.core
|
|
243
|
-
} catch (e) {
|
|
244
|
-
if (e instanceof Error && e.name === 'AbortError') return
|
|
245
|
-
throw e
|
|
246
|
-
}
|
|
247
|
-
}
|
|
248
|
-
|
|
249
|
-
/** @param {Error} e */
|
|
250
|
-
#handleError(e) {
|
|
251
|
-
this.#error = e
|
|
252
|
-
this.emit('state', this.state)
|
|
253
|
-
}
|
|
254
|
-
|
|
255
|
-
/**
|
|
256
|
-
* Update state and queue missing entries for download
|
|
257
|
-
*
|
|
258
|
-
* @param {import('hypercore')} core
|
|
259
|
-
* @param {{ blockOffset: number, blockLength: number, byteLength: number }} blob
|
|
260
|
-
*/
|
|
261
|
-
async #processEntry(
|
|
262
|
-
core,
|
|
263
|
-
{ blockOffset: start, blockLength: length, byteLength }
|
|
264
|
-
) {
|
|
265
|
-
const end = start + length
|
|
266
|
-
const have = await core.has(start, end)
|
|
267
|
-
if (have) {
|
|
268
|
-
this.#haveCount++
|
|
269
|
-
this.#haveBytes += byteLength
|
|
270
|
-
} else {
|
|
271
|
-
this.#wantBytes += byteLength
|
|
272
|
-
const download = core.download({ start, end })
|
|
273
|
-
this.#downloads.add(download)
|
|
274
|
-
download
|
|
275
|
-
.done()
|
|
276
|
-
.then(() => {
|
|
277
|
-
this.#downloads.delete(download)
|
|
278
|
-
this.#haveCount++
|
|
279
|
-
this.#haveBytes += byteLength
|
|
280
|
-
this.#wantBytes -= byteLength
|
|
281
|
-
this.emit('state', this.state)
|
|
282
|
-
})
|
|
283
|
-
.catch(this.#handleError.bind(this))
|
|
284
|
-
}
|
|
285
|
-
}
|
|
286
|
-
}
|
|
287
|
-
|
|
288
|
-
/**
|
|
289
|
-
* Reduce multiple states into one. Factored out for unit testing because I
|
|
290
|
-
* don't trust my coding. Probably a smarter way to do this, but this works.
|
|
291
|
-
*
|
|
292
|
-
* @param {Iterable<{ state: BlobDownloadState | BlobDownloadStateError }>} liveDownloads
|
|
293
|
-
* @param {{ signal?: AbortSignal }} options
|
|
294
|
-
* @returns {BlobDownloadState | BlobDownloadStateError}
|
|
295
|
-
*/
|
|
296
|
-
export function combineStates(liveDownloads, { signal } = {}) {
|
|
297
|
-
/** @type {BlobDownloadState | BlobDownloadStateError} */
|
|
298
|
-
let combinedState = {
|
|
299
|
-
haveCount: 0,
|
|
300
|
-
haveBytes: 0,
|
|
301
|
-
wantCount: 0,
|
|
302
|
-
wantBytes: 0,
|
|
303
|
-
error: null,
|
|
304
|
-
status: 'downloaded',
|
|
305
|
-
}
|
|
306
|
-
for (const { state } of liveDownloads) {
|
|
307
|
-
combinedState.haveCount += state.haveCount
|
|
308
|
-
combinedState.haveBytes += state.haveBytes
|
|
309
|
-
combinedState.wantCount += state.wantCount
|
|
310
|
-
combinedState.wantBytes += state.wantBytes
|
|
311
|
-
if (state.status === combinedState.status) continue
|
|
312
|
-
if (state.status === 'error') {
|
|
313
|
-
combinedState = { ...combinedState, error: state.error, status: 'error' }
|
|
314
|
-
} else if (
|
|
315
|
-
state.status === 'downloading' &&
|
|
316
|
-
combinedState.status === 'downloaded'
|
|
317
|
-
) {
|
|
318
|
-
combinedState = { ...combinedState, status: 'downloading' }
|
|
319
|
-
} else if (
|
|
320
|
-
state.status === 'checking' &&
|
|
321
|
-
(combinedState.status === 'downloaded' ||
|
|
322
|
-
combinedState.status === 'downloading')
|
|
323
|
-
) {
|
|
324
|
-
combinedState = { ...combinedState, status: 'checking' }
|
|
325
|
-
}
|
|
326
|
-
}
|
|
327
|
-
if (signal?.aborted) {
|
|
328
|
-
combinedState.status = 'aborted'
|
|
329
|
-
}
|
|
330
|
-
return combinedState
|
|
331
|
-
}
|
|
332
|
-
|
|
333
|
-
/**
|
|
334
|
-
* Convert a filter to an array of folders that need to be downloaded
|
|
335
|
-
*
|
|
336
|
-
* @param {import('../types.js').BlobFilter} [filter]
|
|
337
|
-
* @returns {string[]} array of folders that match the filter
|
|
338
|
-
*/
|
|
339
|
-
function filterToFolders(filter) {
|
|
340
|
-
if (!filter) return ['/']
|
|
341
|
-
const folders = []
|
|
342
|
-
for (const [
|
|
343
|
-
type,
|
|
344
|
-
variants,
|
|
345
|
-
] of /** @type {import('type-fest').Entries<typeof filter>} */ (
|
|
346
|
-
Object.entries(filter)
|
|
347
|
-
)) {
|
|
348
|
-
// De-dupe variants array
|
|
349
|
-
for (const variant of new Set(variants)) {
|
|
350
|
-
folders.push(makePath({ type, variant }))
|
|
351
|
-
}
|
|
352
|
-
}
|
|
353
|
-
return folders
|
|
354
|
-
}
|
|
355
|
-
|
|
356
|
-
/**
|
|
357
|
-
* Returns true if the path is within one of the given folders
|
|
358
|
-
*
|
|
359
|
-
* @param {string} path
|
|
360
|
-
* @param {string[]} folders
|
|
361
|
-
* @returns {boolean}
|
|
362
|
-
*/
|
|
363
|
-
function matchesFolder(path, folders) {
|
|
364
|
-
for (const folder of folders) {
|
|
365
|
-
if (path.startsWith(folder)) return true
|
|
366
|
-
}
|
|
367
|
-
return false
|
|
368
|
-
}
|
|
369
|
-
|
|
370
|
-
/** @param {Pick<import('../types.js').BlobId, 'type' | 'variant'>} opts */
|
|
371
|
-
function makePath({ type, variant }) {
|
|
372
|
-
return `/${type}/${variant}`
|
|
373
|
-
}
|