@waku/core 0.0.34-9f1d8ca.0 → 0.0.34-c41b319.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/bundle/base_protocol-CCK9RCtH.js +152 -0
- package/bundle/{index-BIW3qNYx.js → index-Db7LxDrL.js} +78 -159
- package/bundle/index.js +1882 -158
- package/bundle/lib/base_protocol.js +2 -2
- package/bundle/lib/message/version_0.js +2 -2
- package/bundle/{version_0-CdmZMfkQ.js → version_0-ANFNAdFD.js} +283 -45
- package/dist/.tsbuildinfo +1 -1
- package/dist/index.d.ts +1 -3
- package/dist/index.js +1 -3
- package/dist/index.js.map +1 -1
- package/dist/lib/base_protocol.d.ts +3 -23
- package/dist/lib/base_protocol.js +3 -47
- package/dist/lib/base_protocol.js.map +1 -1
- package/dist/lib/connection_manager/connection_manager.d.ts +118 -0
- package/dist/lib/{connection_manager.js → connection_manager/connection_manager.js} +136 -36
- package/dist/lib/connection_manager/connection_manager.js.map +1 -0
- package/dist/lib/connection_manager/index.d.ts +1 -0
- package/dist/lib/connection_manager/index.js +2 -0
- package/dist/lib/connection_manager/index.js.map +1 -0
- package/dist/lib/{keep_alive_manager.d.ts → connection_manager/keep_alive_manager.d.ts} +4 -2
- package/dist/lib/{keep_alive_manager.js → connection_manager/keep_alive_manager.js} +2 -2
- package/dist/lib/connection_manager/keep_alive_manager.js.map +1 -0
- package/dist/lib/connection_manager/utils.d.ts +7 -0
- package/dist/lib/connection_manager/utils.js +22 -0
- package/dist/lib/connection_manager/utils.js.map +1 -0
- package/dist/lib/filter/filter.d.ts +18 -0
- package/dist/lib/filter/filter.js +209 -0
- package/dist/lib/filter/filter.js.map +1 -0
- package/dist/lib/filter/index.d.ts +1 -18
- package/dist/lib/filter/index.js +1 -208
- package/dist/lib/filter/index.js.map +1 -1
- package/dist/lib/light_push/index.d.ts +1 -15
- package/dist/lib/light_push/index.js +1 -143
- package/dist/lib/light_push/index.js.map +1 -1
- package/dist/lib/light_push/light_push.d.ts +15 -0
- package/dist/lib/light_push/light_push.js +144 -0
- package/dist/lib/light_push/light_push.js.map +1 -0
- package/dist/lib/metadata/index.d.ts +1 -3
- package/dist/lib/metadata/index.js +1 -118
- package/dist/lib/metadata/index.js.map +1 -1
- package/dist/lib/metadata/metadata.d.ts +3 -0
- package/dist/lib/metadata/metadata.js +119 -0
- package/dist/lib/metadata/metadata.js.map +1 -0
- package/dist/lib/store/index.d.ts +1 -9
- package/dist/lib/store/index.js +1 -82
- package/dist/lib/store/index.js.map +1 -1
- package/dist/lib/store/store.d.ts +9 -0
- package/dist/lib/store/store.js +83 -0
- package/dist/lib/store/store.js.map +1 -0
- package/dist/lib/stream_manager/stream_manager.d.ts +2 -2
- package/dist/lib/stream_manager/stream_manager.js +16 -17
- package/dist/lib/stream_manager/stream_manager.js.map +1 -1
- package/package.json +1 -1
- package/src/index.ts +1 -4
- package/src/lib/base_protocol.ts +3 -76
- package/src/lib/{connection_manager.ts → connection_manager/connection_manager.ts} +168 -63
- package/src/lib/connection_manager/index.ts +1 -0
- package/src/lib/{keep_alive_manager.ts → connection_manager/keep_alive_manager.ts} +7 -3
- package/src/lib/connection_manager/utils.ts +25 -0
- package/src/lib/filter/filter.ts +315 -0
- package/src/lib/filter/index.ts +1 -315
- package/src/lib/light_push/index.ts +1 -188
- package/src/lib/light_push/light_push.ts +188 -0
- package/src/lib/metadata/index.ts +1 -182
- package/src/lib/metadata/metadata.ts +182 -0
- package/src/lib/store/index.ts +1 -136
- package/src/lib/store/store.ts +136 -0
- package/src/lib/stream_manager/stream_manager.ts +16 -18
- package/bundle/base_protocol-Dzv-QHPR.js +0 -275
- package/dist/lib/connection_manager.d.ts +0 -62
- package/dist/lib/connection_manager.js.map +0 -1
- package/dist/lib/filterPeers.d.ts +0 -13
- package/dist/lib/filterPeers.js +0 -38
- package/dist/lib/filterPeers.js.map +0 -1
- package/dist/lib/health_manager.d.ts +0 -14
- package/dist/lib/health_manager.js +0 -70
- package/dist/lib/health_manager.js.map +0 -1
- package/dist/lib/keep_alive_manager.js.map +0 -1
- package/src/lib/filterPeers.ts +0 -51
- package/src/lib/health_manager.ts +0 -90
@@ -0,0 +1,182 @@
|
|
1
|
+
import type { PeerId } from "@libp2p/interface";
|
2
|
+
import { IncomingStreamData } from "@libp2p/interface";
|
3
|
+
import {
|
4
|
+
type IMetadata,
|
5
|
+
type Libp2pComponents,
|
6
|
+
type MetadataQueryResult,
|
7
|
+
type PeerIdStr,
|
8
|
+
ProtocolError,
|
9
|
+
PubsubTopic,
|
10
|
+
type ShardInfo
|
11
|
+
} from "@waku/interfaces";
|
12
|
+
import { proto_metadata } from "@waku/proto";
|
13
|
+
import { encodeRelayShard, Logger, pubsubTopicsToShardInfo } from "@waku/utils";
|
14
|
+
import all from "it-all";
|
15
|
+
import * as lp from "it-length-prefixed";
|
16
|
+
import { pipe } from "it-pipe";
|
17
|
+
import { Uint8ArrayList } from "uint8arraylist";
|
18
|
+
|
19
|
+
import { BaseProtocol } from "../base_protocol.js";
|
20
|
+
|
21
|
+
const log = new Logger("metadata");
|
22
|
+
|
23
|
+
export const MetadataCodec = "/vac/waku/metadata/1.0.0";
|
24
|
+
|
25
|
+
class Metadata extends BaseProtocol implements IMetadata {
|
26
|
+
private libp2pComponents: Libp2pComponents;
|
27
|
+
protected handshakesConfirmed: Map<PeerIdStr, ShardInfo> = new Map();
|
28
|
+
|
29
|
+
public constructor(
|
30
|
+
public pubsubTopics: PubsubTopic[],
|
31
|
+
libp2p: Libp2pComponents
|
32
|
+
) {
|
33
|
+
super(MetadataCodec, libp2p.components, pubsubTopics);
|
34
|
+
this.libp2pComponents = libp2p;
|
35
|
+
void libp2p.registrar.handle(MetadataCodec, (streamData) => {
|
36
|
+
void this.onRequest(streamData);
|
37
|
+
});
|
38
|
+
}
|
39
|
+
|
40
|
+
/**
|
41
|
+
* Make a metadata query to a peer
|
42
|
+
*/
|
43
|
+
public async query(peerId: PeerId): Promise<MetadataQueryResult> {
|
44
|
+
const request = proto_metadata.WakuMetadataRequest.encode(
|
45
|
+
pubsubTopicsToShardInfo(this.pubsubTopics)
|
46
|
+
);
|
47
|
+
|
48
|
+
const peer = await this.libp2pComponents.peerStore.get(peerId);
|
49
|
+
if (!peer) {
|
50
|
+
return {
|
51
|
+
shardInfo: null,
|
52
|
+
error: ProtocolError.NO_PEER_AVAILABLE
|
53
|
+
};
|
54
|
+
}
|
55
|
+
|
56
|
+
let stream;
|
57
|
+
try {
|
58
|
+
stream = await this.getStream(peerId);
|
59
|
+
} catch (error) {
|
60
|
+
log.error("Failed to get stream", error);
|
61
|
+
return {
|
62
|
+
shardInfo: null,
|
63
|
+
error: ProtocolError.NO_STREAM_AVAILABLE
|
64
|
+
};
|
65
|
+
}
|
66
|
+
|
67
|
+
const encodedResponse = await pipe(
|
68
|
+
[request],
|
69
|
+
lp.encode,
|
70
|
+
stream,
|
71
|
+
lp.decode,
|
72
|
+
async (source) => await all(source)
|
73
|
+
);
|
74
|
+
|
75
|
+
const { error, shardInfo } = this.decodeMetadataResponse(encodedResponse);
|
76
|
+
|
77
|
+
if (error) {
|
78
|
+
return {
|
79
|
+
shardInfo: null,
|
80
|
+
error
|
81
|
+
};
|
82
|
+
}
|
83
|
+
|
84
|
+
await this.savePeerShardInfo(peerId, shardInfo);
|
85
|
+
|
86
|
+
return {
|
87
|
+
shardInfo,
|
88
|
+
error: null
|
89
|
+
};
|
90
|
+
}
|
91
|
+
|
92
|
+
public async confirmOrAttemptHandshake(
|
93
|
+
peerId: PeerId
|
94
|
+
): Promise<MetadataQueryResult> {
|
95
|
+
const shardInfo = this.handshakesConfirmed.get(peerId.toString());
|
96
|
+
if (shardInfo) {
|
97
|
+
return {
|
98
|
+
shardInfo,
|
99
|
+
error: null
|
100
|
+
};
|
101
|
+
}
|
102
|
+
|
103
|
+
return await this.query(peerId);
|
104
|
+
}
|
105
|
+
|
106
|
+
/**
|
107
|
+
* Handle an incoming metadata request
|
108
|
+
*/
|
109
|
+
private async onRequest(streamData: IncomingStreamData): Promise<void> {
|
110
|
+
try {
|
111
|
+
const { stream, connection } = streamData;
|
112
|
+
const encodedShardInfo = proto_metadata.WakuMetadataResponse.encode(
|
113
|
+
pubsubTopicsToShardInfo(this.pubsubTopics)
|
114
|
+
);
|
115
|
+
|
116
|
+
const encodedResponse = await pipe(
|
117
|
+
[encodedShardInfo],
|
118
|
+
lp.encode,
|
119
|
+
stream,
|
120
|
+
lp.decode,
|
121
|
+
async (source) => await all(source)
|
122
|
+
);
|
123
|
+
|
124
|
+
const { error, shardInfo } = this.decodeMetadataResponse(encodedResponse);
|
125
|
+
|
126
|
+
if (error) {
|
127
|
+
return;
|
128
|
+
}
|
129
|
+
|
130
|
+
await this.savePeerShardInfo(connection.remotePeer, shardInfo);
|
131
|
+
} catch (error) {
|
132
|
+
log.error("Error handling metadata request", error);
|
133
|
+
}
|
134
|
+
}
|
135
|
+
|
136
|
+
private decodeMetadataResponse(
|
137
|
+
encodedResponse: Uint8ArrayList[]
|
138
|
+
): MetadataQueryResult {
|
139
|
+
const bytes = new Uint8ArrayList();
|
140
|
+
|
141
|
+
encodedResponse.forEach((chunk) => {
|
142
|
+
bytes.append(chunk);
|
143
|
+
});
|
144
|
+
const response = proto_metadata.WakuMetadataResponse.decode(
|
145
|
+
bytes
|
146
|
+
) as ShardInfo;
|
147
|
+
|
148
|
+
if (!response) {
|
149
|
+
log.error("Error decoding metadata response");
|
150
|
+
return {
|
151
|
+
shardInfo: null,
|
152
|
+
error: ProtocolError.DECODE_FAILED
|
153
|
+
};
|
154
|
+
}
|
155
|
+
|
156
|
+
return {
|
157
|
+
shardInfo: response,
|
158
|
+
error: null
|
159
|
+
};
|
160
|
+
}
|
161
|
+
|
162
|
+
private async savePeerShardInfo(
|
163
|
+
peerId: PeerId,
|
164
|
+
shardInfo: ShardInfo
|
165
|
+
): Promise<void> {
|
166
|
+
// add or update the shardInfo to peer store
|
167
|
+
await this.libp2pComponents.peerStore.merge(peerId, {
|
168
|
+
metadata: {
|
169
|
+
shardInfo: encodeRelayShard(shardInfo)
|
170
|
+
}
|
171
|
+
});
|
172
|
+
|
173
|
+
this.handshakesConfirmed.set(peerId.toString(), shardInfo);
|
174
|
+
}
|
175
|
+
}
|
176
|
+
|
177
|
+
export function wakuMetadata(
|
178
|
+
pubsubTopics: PubsubTopic[]
|
179
|
+
): (components: Libp2pComponents) => IMetadata {
|
180
|
+
return (components: Libp2pComponents) =>
|
181
|
+
new Metadata(pubsubTopics, components);
|
182
|
+
}
|
package/src/lib/store/index.ts
CHANGED
@@ -1,136 +1 @@
|
|
1
|
-
|
2
|
-
import {
|
3
|
-
IDecodedMessage,
|
4
|
-
IDecoder,
|
5
|
-
IStoreCore,
|
6
|
-
Libp2p,
|
7
|
-
PubsubTopic,
|
8
|
-
QueryRequestParams
|
9
|
-
} from "@waku/interfaces";
|
10
|
-
import { Logger } from "@waku/utils";
|
11
|
-
import all from "it-all";
|
12
|
-
import * as lp from "it-length-prefixed";
|
13
|
-
import { pipe } from "it-pipe";
|
14
|
-
import { Uint8ArrayList } from "uint8arraylist";
|
15
|
-
|
16
|
-
import { BaseProtocol } from "../base_protocol.js";
|
17
|
-
import { toProtoMessage } from "../to_proto_message.js";
|
18
|
-
|
19
|
-
import {
|
20
|
-
DEFAULT_PAGE_SIZE,
|
21
|
-
MAX_PAGE_SIZE,
|
22
|
-
StoreQueryRequest,
|
23
|
-
StoreQueryResponse
|
24
|
-
} from "./rpc.js";
|
25
|
-
|
26
|
-
const log = new Logger("store");
|
27
|
-
|
28
|
-
export const StoreCodec = "/vac/waku/store-query/3.0.0";
|
29
|
-
|
30
|
-
export class StoreCore extends BaseProtocol implements IStoreCore {
|
31
|
-
public constructor(
|
32
|
-
public readonly pubsubTopics: PubsubTopic[],
|
33
|
-
libp2p: Libp2p
|
34
|
-
) {
|
35
|
-
super(StoreCodec, libp2p.components, log, pubsubTopics);
|
36
|
-
}
|
37
|
-
|
38
|
-
public async *queryPerPage<T extends IDecodedMessage>(
|
39
|
-
queryOpts: QueryRequestParams,
|
40
|
-
decoders: Map<string, IDecoder<T>>,
|
41
|
-
peer: Peer
|
42
|
-
): AsyncGenerator<Promise<T | undefined>[]> {
|
43
|
-
if (
|
44
|
-
queryOpts.contentTopics.toString() !==
|
45
|
-
Array.from(decoders.keys()).toString()
|
46
|
-
) {
|
47
|
-
throw new Error(
|
48
|
-
"Internal error, the decoders should match the query's content topics"
|
49
|
-
);
|
50
|
-
}
|
51
|
-
|
52
|
-
let currentCursor = queryOpts.paginationCursor;
|
53
|
-
while (true) {
|
54
|
-
const storeQueryRequest = StoreQueryRequest.create({
|
55
|
-
...queryOpts,
|
56
|
-
paginationCursor: currentCursor
|
57
|
-
});
|
58
|
-
|
59
|
-
let stream;
|
60
|
-
try {
|
61
|
-
stream = await this.getStream(peer);
|
62
|
-
} catch (e) {
|
63
|
-
log.error("Failed to get stream", e);
|
64
|
-
break;
|
65
|
-
}
|
66
|
-
|
67
|
-
const res = await pipe(
|
68
|
-
[storeQueryRequest.encode()],
|
69
|
-
lp.encode,
|
70
|
-
stream,
|
71
|
-
lp.decode,
|
72
|
-
async (source) => await all(source)
|
73
|
-
);
|
74
|
-
|
75
|
-
const bytes = new Uint8ArrayList();
|
76
|
-
res.forEach((chunk) => {
|
77
|
-
bytes.append(chunk);
|
78
|
-
});
|
79
|
-
|
80
|
-
const storeQueryResponse = StoreQueryResponse.decode(bytes);
|
81
|
-
|
82
|
-
if (
|
83
|
-
!storeQueryResponse.statusCode ||
|
84
|
-
storeQueryResponse.statusCode >= 300
|
85
|
-
) {
|
86
|
-
const errorMessage = `Store query failed with status code: ${storeQueryResponse.statusCode}, description: ${storeQueryResponse.statusDesc}`;
|
87
|
-
log.error(errorMessage);
|
88
|
-
throw new Error(errorMessage);
|
89
|
-
}
|
90
|
-
|
91
|
-
if (!storeQueryResponse.messages || !storeQueryResponse.messages.length) {
|
92
|
-
log.warn("Stopping pagination due to empty messages in response");
|
93
|
-
break;
|
94
|
-
}
|
95
|
-
|
96
|
-
log.info(
|
97
|
-
`${storeQueryResponse.messages.length} messages retrieved from store`
|
98
|
-
);
|
99
|
-
|
100
|
-
const decodedMessages = storeQueryResponse.messages.map((protoMsg) => {
|
101
|
-
if (!protoMsg.message) {
|
102
|
-
return Promise.resolve(undefined);
|
103
|
-
}
|
104
|
-
const contentTopic = protoMsg.message.contentTopic;
|
105
|
-
if (contentTopic) {
|
106
|
-
const decoder = decoders.get(contentTopic);
|
107
|
-
if (decoder) {
|
108
|
-
return decoder.fromProtoObj(
|
109
|
-
protoMsg.pubsubTopic || "",
|
110
|
-
toProtoMessage(protoMsg.message)
|
111
|
-
);
|
112
|
-
}
|
113
|
-
}
|
114
|
-
return Promise.resolve(undefined);
|
115
|
-
});
|
116
|
-
|
117
|
-
yield decodedMessages;
|
118
|
-
|
119
|
-
if (queryOpts.paginationForward) {
|
120
|
-
currentCursor =
|
121
|
-
storeQueryResponse.messages[storeQueryResponse.messages.length - 1]
|
122
|
-
.messageHash;
|
123
|
-
} else {
|
124
|
-
currentCursor = storeQueryResponse.messages[0].messageHash;
|
125
|
-
}
|
126
|
-
|
127
|
-
if (
|
128
|
-
storeQueryResponse.messages.length > MAX_PAGE_SIZE &&
|
129
|
-
storeQueryResponse.messages.length <
|
130
|
-
(queryOpts.paginationLimit || DEFAULT_PAGE_SIZE)
|
131
|
-
) {
|
132
|
-
break;
|
133
|
-
}
|
134
|
-
}
|
135
|
-
}
|
136
|
-
}
|
1
|
+
export { StoreCore, StoreCodec } from "./store.js";
|
@@ -0,0 +1,136 @@
|
|
1
|
+
import type { PeerId } from "@libp2p/interface";
|
2
|
+
import {
|
3
|
+
IDecodedMessage,
|
4
|
+
IDecoder,
|
5
|
+
IStoreCore,
|
6
|
+
Libp2p,
|
7
|
+
PubsubTopic,
|
8
|
+
QueryRequestParams
|
9
|
+
} from "@waku/interfaces";
|
10
|
+
import { Logger } from "@waku/utils";
|
11
|
+
import all from "it-all";
|
12
|
+
import * as lp from "it-length-prefixed";
|
13
|
+
import { pipe } from "it-pipe";
|
14
|
+
import { Uint8ArrayList } from "uint8arraylist";
|
15
|
+
|
16
|
+
import { BaseProtocol } from "../base_protocol.js";
|
17
|
+
import { toProtoMessage } from "../to_proto_message.js";
|
18
|
+
|
19
|
+
import {
|
20
|
+
DEFAULT_PAGE_SIZE,
|
21
|
+
MAX_PAGE_SIZE,
|
22
|
+
StoreQueryRequest,
|
23
|
+
StoreQueryResponse
|
24
|
+
} from "./rpc.js";
|
25
|
+
|
26
|
+
const log = new Logger("store");
|
27
|
+
|
28
|
+
export const StoreCodec = "/vac/waku/store-query/3.0.0";
|
29
|
+
|
30
|
+
export class StoreCore extends BaseProtocol implements IStoreCore {
|
31
|
+
public constructor(
|
32
|
+
public readonly pubsubTopics: PubsubTopic[],
|
33
|
+
libp2p: Libp2p
|
34
|
+
) {
|
35
|
+
super(StoreCodec, libp2p.components, pubsubTopics);
|
36
|
+
}
|
37
|
+
|
38
|
+
public async *queryPerPage<T extends IDecodedMessage>(
|
39
|
+
queryOpts: QueryRequestParams,
|
40
|
+
decoders: Map<string, IDecoder<T>>,
|
41
|
+
peerId: PeerId
|
42
|
+
): AsyncGenerator<Promise<T | undefined>[]> {
|
43
|
+
if (
|
44
|
+
queryOpts.contentTopics.toString() !==
|
45
|
+
Array.from(decoders.keys()).toString()
|
46
|
+
) {
|
47
|
+
throw new Error(
|
48
|
+
"Internal error, the decoders should match the query's content topics"
|
49
|
+
);
|
50
|
+
}
|
51
|
+
|
52
|
+
let currentCursor = queryOpts.paginationCursor;
|
53
|
+
while (true) {
|
54
|
+
const storeQueryRequest = StoreQueryRequest.create({
|
55
|
+
...queryOpts,
|
56
|
+
paginationCursor: currentCursor
|
57
|
+
});
|
58
|
+
|
59
|
+
let stream;
|
60
|
+
try {
|
61
|
+
stream = await this.getStream(peerId);
|
62
|
+
} catch (e) {
|
63
|
+
log.error("Failed to get stream", e);
|
64
|
+
break;
|
65
|
+
}
|
66
|
+
|
67
|
+
const res = await pipe(
|
68
|
+
[storeQueryRequest.encode()],
|
69
|
+
lp.encode,
|
70
|
+
stream,
|
71
|
+
lp.decode,
|
72
|
+
async (source) => await all(source)
|
73
|
+
);
|
74
|
+
|
75
|
+
const bytes = new Uint8ArrayList();
|
76
|
+
res.forEach((chunk) => {
|
77
|
+
bytes.append(chunk);
|
78
|
+
});
|
79
|
+
|
80
|
+
const storeQueryResponse = StoreQueryResponse.decode(bytes);
|
81
|
+
|
82
|
+
if (
|
83
|
+
!storeQueryResponse.statusCode ||
|
84
|
+
storeQueryResponse.statusCode >= 300
|
85
|
+
) {
|
86
|
+
const errorMessage = `Store query failed with status code: ${storeQueryResponse.statusCode}, description: ${storeQueryResponse.statusDesc}`;
|
87
|
+
log.error(errorMessage);
|
88
|
+
throw new Error(errorMessage);
|
89
|
+
}
|
90
|
+
|
91
|
+
if (!storeQueryResponse.messages || !storeQueryResponse.messages.length) {
|
92
|
+
log.warn("Stopping pagination due to empty messages in response");
|
93
|
+
break;
|
94
|
+
}
|
95
|
+
|
96
|
+
log.info(
|
97
|
+
`${storeQueryResponse.messages.length} messages retrieved from store`
|
98
|
+
);
|
99
|
+
|
100
|
+
const decodedMessages = storeQueryResponse.messages.map((protoMsg) => {
|
101
|
+
if (!protoMsg.message) {
|
102
|
+
return Promise.resolve(undefined);
|
103
|
+
}
|
104
|
+
const contentTopic = protoMsg.message.contentTopic;
|
105
|
+
if (contentTopic) {
|
106
|
+
const decoder = decoders.get(contentTopic);
|
107
|
+
if (decoder) {
|
108
|
+
return decoder.fromProtoObj(
|
109
|
+
protoMsg.pubsubTopic || "",
|
110
|
+
toProtoMessage(protoMsg.message)
|
111
|
+
);
|
112
|
+
}
|
113
|
+
}
|
114
|
+
return Promise.resolve(undefined);
|
115
|
+
});
|
116
|
+
|
117
|
+
yield decodedMessages;
|
118
|
+
|
119
|
+
if (queryOpts.paginationForward) {
|
120
|
+
currentCursor =
|
121
|
+
storeQueryResponse.messages[storeQueryResponse.messages.length - 1]
|
122
|
+
.messageHash;
|
123
|
+
} else {
|
124
|
+
currentCursor = storeQueryResponse.messages[0].messageHash;
|
125
|
+
}
|
126
|
+
|
127
|
+
if (
|
128
|
+
storeQueryResponse.messages.length > MAX_PAGE_SIZE &&
|
129
|
+
storeQueryResponse.messages.length <
|
130
|
+
(queryOpts.paginationLimit || DEFAULT_PAGE_SIZE)
|
131
|
+
) {
|
132
|
+
break;
|
133
|
+
}
|
134
|
+
}
|
135
|
+
}
|
136
|
+
}
|
@@ -21,39 +21,38 @@ export class StreamManager {
|
|
21
21
|
this.addEventListener("peer:update", this.handlePeerUpdateStreamPool);
|
22
22
|
}
|
23
23
|
|
24
|
-
public async getStream(
|
25
|
-
const
|
26
|
-
|
27
|
-
const scheduledStream = this.streamPool.get(peerId);
|
24
|
+
public async getStream(peerId: PeerId): Promise<Stream> {
|
25
|
+
const peerIdStr = peerId.toString();
|
26
|
+
const scheduledStream = this.streamPool.get(peerIdStr);
|
28
27
|
|
29
28
|
if (scheduledStream) {
|
30
|
-
this.streamPool.delete(
|
29
|
+
this.streamPool.delete(peerIdStr);
|
31
30
|
await scheduledStream;
|
32
31
|
}
|
33
32
|
|
34
|
-
let stream = this.getOpenStreamForCodec(
|
33
|
+
let stream = this.getOpenStreamForCodec(peerId);
|
35
34
|
|
36
35
|
if (stream) {
|
37
36
|
this.log.info(
|
38
|
-
`Found existing stream peerId=${
|
37
|
+
`Found existing stream peerId=${peerIdStr} multicodec=${this.multicodec}`
|
39
38
|
);
|
40
|
-
this.lockStream(
|
39
|
+
this.lockStream(peerIdStr, stream);
|
41
40
|
return stream;
|
42
41
|
}
|
43
42
|
|
44
|
-
stream = await this.createStream(
|
45
|
-
this.lockStream(
|
43
|
+
stream = await this.createStream(peerId);
|
44
|
+
this.lockStream(peerIdStr, stream);
|
46
45
|
|
47
46
|
return stream;
|
48
47
|
}
|
49
48
|
|
50
|
-
private async createStream(
|
51
|
-
const connections = this.getConnections(
|
49
|
+
private async createStream(peerId: PeerId, retries = 0): Promise<Stream> {
|
50
|
+
const connections = this.getConnections(peerId);
|
52
51
|
const connection = selectOpenConnection(connections);
|
53
52
|
|
54
53
|
if (!connection) {
|
55
54
|
throw new Error(
|
56
|
-
`Failed to get a connection to the peer peerId=${
|
55
|
+
`Failed to get a connection to the peer peerId=${peerId.toString()} multicodec=${this.multicodec}`
|
57
56
|
);
|
58
57
|
}
|
59
58
|
|
@@ -63,11 +62,11 @@ export class StreamManager {
|
|
63
62
|
for (let i = 0; i < retries + 1; i++) {
|
64
63
|
try {
|
65
64
|
this.log.info(
|
66
|
-
`Attempting to create a stream for peerId=${
|
65
|
+
`Attempting to create a stream for peerId=${peerId.toString()} multicodec=${this.multicodec}`
|
67
66
|
);
|
68
67
|
stream = await connection.newStream(this.multicodec);
|
69
68
|
this.log.info(
|
70
|
-
`Created stream for peerId=${
|
69
|
+
`Created stream for peerId=${peerId.toString()} multicodec=${this.multicodec}`
|
71
70
|
);
|
72
71
|
break;
|
73
72
|
} catch (error) {
|
@@ -77,8 +76,7 @@ export class StreamManager {
|
|
77
76
|
|
78
77
|
if (!stream) {
|
79
78
|
throw new Error(
|
80
|
-
`Failed to create a new stream for ${
|
81
|
-
lastError
|
79
|
+
`Failed to create a new stream for ${peerId.toString()} -- ` + lastError
|
82
80
|
);
|
83
81
|
}
|
84
82
|
|
@@ -97,7 +95,7 @@ export class StreamManager {
|
|
97
95
|
|
98
96
|
try {
|
99
97
|
this.ongoingCreation.add(peerId);
|
100
|
-
await this.createStream(peer);
|
98
|
+
await this.createStream(peer.id);
|
101
99
|
} catch (error) {
|
102
100
|
this.log.error(`Failed to createStreamWithLock:`, error);
|
103
101
|
} finally {
|