@libp2p/gossipsub 14.1.1-6059227cb

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (160) hide show
  1. package/README.md +85 -0
  2. package/dist/index.min.js +19 -0
  3. package/dist/index.min.js.map +7 -0
  4. package/dist/src/config.d.ts +32 -0
  5. package/dist/src/config.d.ts.map +1 -0
  6. package/dist/src/config.js +2 -0
  7. package/dist/src/config.js.map +1 -0
  8. package/dist/src/constants.d.ts +213 -0
  9. package/dist/src/constants.d.ts.map +1 -0
  10. package/dist/src/constants.js +217 -0
  11. package/dist/src/constants.js.map +1 -0
  12. package/dist/src/errors.d.ts +9 -0
  13. package/dist/src/errors.d.ts.map +1 -0
  14. package/dist/src/errors.js +15 -0
  15. package/dist/src/errors.js.map +1 -0
  16. package/dist/src/gossipsub.d.ts +419 -0
  17. package/dist/src/gossipsub.d.ts.map +1 -0
  18. package/dist/src/gossipsub.js +2520 -0
  19. package/dist/src/gossipsub.js.map +1 -0
  20. package/dist/src/index.d.ts +344 -0
  21. package/dist/src/index.d.ts.map +1 -0
  22. package/dist/src/index.js +43 -0
  23. package/dist/src/index.js.map +1 -0
  24. package/dist/src/message/decodeRpc.d.ts +11 -0
  25. package/dist/src/message/decodeRpc.d.ts.map +1 -0
  26. package/dist/src/message/decodeRpc.js +10 -0
  27. package/dist/src/message/decodeRpc.js.map +1 -0
  28. package/dist/src/message/index.d.ts +2 -0
  29. package/dist/src/message/index.d.ts.map +1 -0
  30. package/dist/src/message/index.js +2 -0
  31. package/dist/src/message/index.js.map +1 -0
  32. package/dist/src/message/rpc.d.ts +99 -0
  33. package/dist/src/message/rpc.d.ts.map +1 -0
  34. package/dist/src/message/rpc.js +663 -0
  35. package/dist/src/message/rpc.js.map +1 -0
  36. package/dist/src/message-cache.d.ts +80 -0
  37. package/dist/src/message-cache.d.ts.map +1 -0
  38. package/dist/src/message-cache.js +144 -0
  39. package/dist/src/message-cache.js.map +1 -0
  40. package/dist/src/metrics.d.ts +467 -0
  41. package/dist/src/metrics.d.ts.map +1 -0
  42. package/dist/src/metrics.js +896 -0
  43. package/dist/src/metrics.js.map +1 -0
  44. package/dist/src/score/compute-score.d.ts +4 -0
  45. package/dist/src/score/compute-score.d.ts.map +1 -0
  46. package/dist/src/score/compute-score.js +75 -0
  47. package/dist/src/score/compute-score.js.map +1 -0
  48. package/dist/src/score/index.d.ts +4 -0
  49. package/dist/src/score/index.d.ts.map +1 -0
  50. package/dist/src/score/index.js +4 -0
  51. package/dist/src/score/index.js.map +1 -0
  52. package/dist/src/score/message-deliveries.d.ts +45 -0
  53. package/dist/src/score/message-deliveries.d.ts.map +1 -0
  54. package/dist/src/score/message-deliveries.js +75 -0
  55. package/dist/src/score/message-deliveries.js.map +1 -0
  56. package/dist/src/score/peer-score-params.d.ts +125 -0
  57. package/dist/src/score/peer-score-params.d.ts.map +1 -0
  58. package/dist/src/score/peer-score-params.js +159 -0
  59. package/dist/src/score/peer-score-params.js.map +1 -0
  60. package/dist/src/score/peer-score-thresholds.d.ts +31 -0
  61. package/dist/src/score/peer-score-thresholds.d.ts.map +1 -0
  62. package/dist/src/score/peer-score-thresholds.js +32 -0
  63. package/dist/src/score/peer-score-thresholds.js.map +1 -0
  64. package/dist/src/score/peer-score.d.ts +119 -0
  65. package/dist/src/score/peer-score.d.ts.map +1 -0
  66. package/dist/src/score/peer-score.js +459 -0
  67. package/dist/src/score/peer-score.js.map +1 -0
  68. package/dist/src/score/peer-stats.d.ts +32 -0
  69. package/dist/src/score/peer-stats.d.ts.map +1 -0
  70. package/dist/src/score/peer-stats.js +2 -0
  71. package/dist/src/score/peer-stats.js.map +1 -0
  72. package/dist/src/score/scoreMetrics.d.ts +23 -0
  73. package/dist/src/score/scoreMetrics.d.ts.map +1 -0
  74. package/dist/src/score/scoreMetrics.js +155 -0
  75. package/dist/src/score/scoreMetrics.js.map +1 -0
  76. package/dist/src/stream.d.ts +30 -0
  77. package/dist/src/stream.d.ts.map +1 -0
  78. package/dist/src/stream.js +55 -0
  79. package/dist/src/stream.js.map +1 -0
  80. package/dist/src/tracer.d.ts +53 -0
  81. package/dist/src/tracer.d.ts.map +1 -0
  82. package/dist/src/tracer.js +155 -0
  83. package/dist/src/tracer.js.map +1 -0
  84. package/dist/src/types.d.ts +148 -0
  85. package/dist/src/types.d.ts.map +1 -0
  86. package/dist/src/types.js +90 -0
  87. package/dist/src/types.js.map +1 -0
  88. package/dist/src/utils/buildRawMessage.d.ts +20 -0
  89. package/dist/src/utils/buildRawMessage.d.ts.map +1 -0
  90. package/dist/src/utils/buildRawMessage.js +151 -0
  91. package/dist/src/utils/buildRawMessage.js.map +1 -0
  92. package/dist/src/utils/create-gossip-rpc.d.ts +7 -0
  93. package/dist/src/utils/create-gossip-rpc.d.ts.map +1 -0
  94. package/dist/src/utils/create-gossip-rpc.js +31 -0
  95. package/dist/src/utils/create-gossip-rpc.js.map +1 -0
  96. package/dist/src/utils/index.d.ts +4 -0
  97. package/dist/src/utils/index.d.ts.map +1 -0
  98. package/dist/src/utils/index.js +4 -0
  99. package/dist/src/utils/index.js.map +1 -0
  100. package/dist/src/utils/messageIdToString.d.ts +5 -0
  101. package/dist/src/utils/messageIdToString.d.ts.map +1 -0
  102. package/dist/src/utils/messageIdToString.js +8 -0
  103. package/dist/src/utils/messageIdToString.js.map +1 -0
  104. package/dist/src/utils/msgIdFn.d.ts +10 -0
  105. package/dist/src/utils/msgIdFn.d.ts.map +1 -0
  106. package/dist/src/utils/msgIdFn.js +23 -0
  107. package/dist/src/utils/msgIdFn.js.map +1 -0
  108. package/dist/src/utils/multiaddr.d.ts +3 -0
  109. package/dist/src/utils/multiaddr.d.ts.map +1 -0
  110. package/dist/src/utils/multiaddr.js +15 -0
  111. package/dist/src/utils/multiaddr.js.map +1 -0
  112. package/dist/src/utils/publishConfig.d.ts +8 -0
  113. package/dist/src/utils/publishConfig.d.ts.map +1 -0
  114. package/dist/src/utils/publishConfig.js +25 -0
  115. package/dist/src/utils/publishConfig.js.map +1 -0
  116. package/dist/src/utils/set.d.ts +14 -0
  117. package/dist/src/utils/set.d.ts.map +1 -0
  118. package/dist/src/utils/set.js +41 -0
  119. package/dist/src/utils/set.js.map +1 -0
  120. package/dist/src/utils/shuffle.d.ts +7 -0
  121. package/dist/src/utils/shuffle.d.ts.map +1 -0
  122. package/dist/src/utils/shuffle.js +21 -0
  123. package/dist/src/utils/shuffle.js.map +1 -0
  124. package/dist/src/utils/time-cache.d.ts +22 -0
  125. package/dist/src/utils/time-cache.d.ts.map +1 -0
  126. package/dist/src/utils/time-cache.js +54 -0
  127. package/dist/src/utils/time-cache.js.map +1 -0
  128. package/package.json +142 -0
  129. package/src/config.ts +31 -0
  130. package/src/constants.ts +261 -0
  131. package/src/errors.ts +17 -0
  132. package/src/gossipsub.ts +3061 -0
  133. package/src/index.ts +404 -0
  134. package/src/message/decodeRpc.ts +19 -0
  135. package/src/message/index.ts +1 -0
  136. package/src/message/rpc.proto +58 -0
  137. package/src/message/rpc.ts +848 -0
  138. package/src/message-cache.ts +196 -0
  139. package/src/metrics.ts +1014 -0
  140. package/src/score/compute-score.ts +98 -0
  141. package/src/score/index.ts +3 -0
  142. package/src/score/message-deliveries.ts +95 -0
  143. package/src/score/peer-score-params.ts +316 -0
  144. package/src/score/peer-score-thresholds.ts +70 -0
  145. package/src/score/peer-score.ts +565 -0
  146. package/src/score/peer-stats.ts +33 -0
  147. package/src/score/scoreMetrics.ts +215 -0
  148. package/src/stream.ts +79 -0
  149. package/src/tracer.ts +177 -0
  150. package/src/types.ts +178 -0
  151. package/src/utils/buildRawMessage.ts +174 -0
  152. package/src/utils/create-gossip-rpc.ts +34 -0
  153. package/src/utils/index.ts +3 -0
  154. package/src/utils/messageIdToString.ts +8 -0
  155. package/src/utils/msgIdFn.ts +24 -0
  156. package/src/utils/multiaddr.ts +19 -0
  157. package/src/utils/publishConfig.ts +33 -0
  158. package/src/utils/set.ts +43 -0
  159. package/src/utils/shuffle.ts +21 -0
  160. package/src/utils/time-cache.ts +71 -0
@@ -0,0 +1,896 @@
1
+ import { TopicValidatorResult } from "./index.js";
2
+ import { MessageStatus, RejectReason } from './types.js';
3
+ export var MessageSource;
4
+ (function (MessageSource) {
5
+ MessageSource["forward"] = "forward";
6
+ MessageSource["publish"] = "publish";
7
+ })(MessageSource || (MessageSource = {}));
8
+ export var InclusionReason;
9
+ (function (InclusionReason) {
10
+ /** Peer was a fanaout peer. */
11
+ InclusionReason["Fanout"] = "fanout";
12
+ /** Included from random selection. */
13
+ InclusionReason["Random"] = "random";
14
+ /** Peer subscribed. */
15
+ InclusionReason["Subscribed"] = "subscribed";
16
+ /** On heartbeat, peer was included to fill the outbound quota. */
17
+ InclusionReason["Outbound"] = "outbound";
18
+ /** On heartbeat, not enough peers in mesh */
19
+ InclusionReason["NotEnough"] = "not_enough";
20
+ /** On heartbeat opportunistic grafting due to low mesh score */
21
+ InclusionReason["Opportunistic"] = "opportunistic";
22
+ })(InclusionReason || (InclusionReason = {}));
23
+ /// Reasons why a peer was removed from the mesh.
24
+ export var ChurnReason;
25
+ (function (ChurnReason) {
26
+ /// Peer disconnected.
27
+ ChurnReason["Dc"] = "disconnected";
28
+ /// Peer had a bad score.
29
+ ChurnReason["BadScore"] = "bad_score";
30
+ /// Peer sent a PRUNE.
31
+ ChurnReason["Prune"] = "prune";
32
+ /// Too many peers.
33
+ ChurnReason["Excess"] = "excess";
34
+ })(ChurnReason || (ChurnReason = {}));
35
+ /// Kinds of reasons a peer's score has been penalized
36
+ export var ScorePenalty;
37
+ (function (ScorePenalty) {
38
+ /// A peer grafted before waiting the back-off time.
39
+ ScorePenalty["GraftBackoff"] = "graft_backoff";
40
+ /// A Peer did not respond to an IWANT request in time.
41
+ ScorePenalty["BrokenPromise"] = "broken_promise";
42
+ /// A Peer did not send enough messages as expected.
43
+ ScorePenalty["MessageDeficit"] = "message_deficit";
44
+ /// Too many peers under one IP address.
45
+ ScorePenalty["IPColocation"] = "IP_colocation";
46
+ })(ScorePenalty || (ScorePenalty = {}));
47
+ export var IHaveIgnoreReason;
48
+ (function (IHaveIgnoreReason) {
49
+ IHaveIgnoreReason["LowScore"] = "low_score";
50
+ IHaveIgnoreReason["MaxIhave"] = "max_ihave";
51
+ IHaveIgnoreReason["MaxIasked"] = "max_iasked";
52
+ })(IHaveIgnoreReason || (IHaveIgnoreReason = {}));
53
+ export var ScoreThreshold;
54
+ (function (ScoreThreshold) {
55
+ ScoreThreshold["graylist"] = "graylist";
56
+ ScoreThreshold["publish"] = "publish";
57
+ ScoreThreshold["gossip"] = "gossip";
58
+ ScoreThreshold["mesh"] = "mesh";
59
+ })(ScoreThreshold || (ScoreThreshold = {}));
60
+ /**
61
+ * A collection of metrics used throughout the Gossipsub behaviour.
62
+ * NOTE: except for special reasons, do not add more than 1 label for frequent metrics,
63
+ * there's a performance penalty as of June 2023.
64
+ */
65
+ // eslint-disable-next-line @typescript-eslint/explicit-function-return-type
66
+ export function getMetrics(register, topicStrToLabel, opts) {
67
+ // Using function style instead of class to prevent having to re-declare all MetricsPrometheus types.
68
+ return {
69
+ /* Metrics for static config */
70
+ protocolsEnabled: register.gauge({
71
+ name: 'gossipsub_protocol',
72
+ help: 'Status of enabled protocols',
73
+ labelNames: ['protocol']
74
+ }),
75
+ /* Metrics per known topic */
76
+ /**
77
+ * Status of our subscription to this topic. This metric allows analyzing other topic metrics
78
+ * filtered by our current subscription status.
79
+ * = rust-libp2p `topic_subscription_status`
80
+ */
81
+ topicSubscriptionStatus: register.gauge({
82
+ name: 'gossipsub_topic_subscription_status',
83
+ help: 'Status of our subscription to this topic',
84
+ labelNames: ['topicStr']
85
+ }),
86
+ /**
87
+ * Number of peers subscribed to each topic. This allows us to analyze a topic's behaviour
88
+ * regardless of our subscription status.
89
+ */
90
+ topicPeersCount: register.gauge({
91
+ name: 'gossipsub_topic_peer_count',
92
+ help: 'Number of peers subscribed to each topic',
93
+ labelNames: ['topicStr']
94
+ }),
95
+ /* Metrics regarding mesh state */
96
+ /**
97
+ * Number of peers in our mesh. This metric should be updated with the count of peers for a
98
+ * topic in the mesh regardless of inclusion and churn events.
99
+ * = rust-libp2p `mesh_peer_counts`
100
+ */
101
+ meshPeerCounts: register.gauge({
102
+ name: 'gossipsub_mesh_peer_count',
103
+ help: 'Number of peers in our mesh',
104
+ labelNames: ['topicStr']
105
+ }),
106
+ /**
107
+ * Number of times we include peers in a topic mesh for different reasons.
108
+ * = rust-libp2p `mesh_peer_inclusion_events`
109
+ */
110
+ meshPeerInclusionEventsFanout: register.gauge({
111
+ name: 'gossipsub_mesh_peer_inclusion_events_fanout_total',
112
+ help: 'Number of times we include peers in a topic mesh for fanout reasons',
113
+ labelNames: ['topic']
114
+ }),
115
+ meshPeerInclusionEventsRandom: register.gauge({
116
+ name: 'gossipsub_mesh_peer_inclusion_events_random_total',
117
+ help: 'Number of times we include peers in a topic mesh for random reasons',
118
+ labelNames: ['topic']
119
+ }),
120
+ meshPeerInclusionEventsSubscribed: register.gauge({
121
+ name: 'gossipsub_mesh_peer_inclusion_events_subscribed_total',
122
+ help: 'Number of times we include peers in a topic mesh for subscribed reasons',
123
+ labelNames: ['topic']
124
+ }),
125
+ meshPeerInclusionEventsOutbound: register.gauge({
126
+ name: 'gossipsub_mesh_peer_inclusion_events_outbound_total',
127
+ help: 'Number of times we include peers in a topic mesh for outbound reasons',
128
+ labelNames: ['topic']
129
+ }),
130
+ meshPeerInclusionEventsNotEnough: register.gauge({
131
+ name: 'gossipsub_mesh_peer_inclusion_events_not_enough_total',
132
+ help: 'Number of times we include peers in a topic mesh for not_enough reasons',
133
+ labelNames: ['topic']
134
+ }),
135
+ meshPeerInclusionEventsOpportunistic: register.gauge({
136
+ name: 'gossipsub_mesh_peer_inclusion_events_opportunistic_total',
137
+ help: 'Number of times we include peers in a topic mesh for opportunistic reasons',
138
+ labelNames: ['topic']
139
+ }),
140
+ meshPeerInclusionEventsUnknown: register.gauge({
141
+ name: 'gossipsub_mesh_peer_inclusion_events_unknown_total',
142
+ help: 'Number of times we include peers in a topic mesh for unknown reasons',
143
+ labelNames: ['topic']
144
+ }),
145
+ /**
146
+ * Number of times we remove peers in a topic mesh for different reasons.
147
+ * = rust-libp2p `mesh_peer_churn_events`
148
+ */
149
+ meshPeerChurnEventsDisconnected: register.gauge({
150
+ name: 'gossipsub_peer_churn_events_disconnected_total',
151
+ help: 'Number of times we remove peers in a topic mesh for disconnected reasons',
152
+ labelNames: ['topic']
153
+ }),
154
+ meshPeerChurnEventsBadScore: register.gauge({
155
+ name: 'gossipsub_peer_churn_events_bad_score_total',
156
+ help: 'Number of times we remove peers in a topic mesh for bad_score reasons',
157
+ labelNames: ['topic']
158
+ }),
159
+ meshPeerChurnEventsPrune: register.gauge({
160
+ name: 'gossipsub_peer_churn_events_prune_total',
161
+ help: 'Number of times we remove peers in a topic mesh for prune reasons',
162
+ labelNames: ['topic']
163
+ }),
164
+ meshPeerChurnEventsExcess: register.gauge({
165
+ name: 'gossipsub_peer_churn_events_excess_total',
166
+ help: 'Number of times we remove peers in a topic mesh for excess reasons',
167
+ labelNames: ['topic']
168
+ }),
169
+ meshPeerChurnEventsUnknown: register.gauge({
170
+ name: 'gossipsub_peer_churn_events_unknown_total',
171
+ help: 'Number of times we remove peers in a topic mesh for unknown reasons',
172
+ labelNames: ['topic']
173
+ }),
174
+ /* General Metrics */
175
+ /**
176
+ * Gossipsub supports floodsub, gossipsub v1.0, v1.1, and v1.2. Peers are classified based
177
+ * on which protocol they support. This metric keeps track of the number of peers that are
178
+ * connected of each type.
179
+ */
180
+ peersPerProtocol: register.gauge({
181
+ name: 'gossipsub_peers_per_protocol_count',
182
+ help: 'Peers connected for each topic',
183
+ labelNames: ['protocol']
184
+ }),
185
+ /** The time it takes to complete one iteration of the heartbeat. */
186
+ heartbeatDuration: register.histogram({
187
+ name: 'gossipsub_heartbeat_duration_seconds',
188
+ help: 'The time it takes to complete one iteration of the heartbeat',
189
+ // Should take <10ms, over 1s it's a huge issue that needs debugging, since a heartbeat will be cancelled
190
+ buckets: [0.01, 0.1, 1]
191
+ }),
192
+ /** Heartbeat run took longer than heartbeat interval so next is skipped */
193
+ heartbeatSkipped: register.gauge({
194
+ name: 'gossipsub_heartbeat_skipped',
195
+ help: 'Heartbeat run took longer than heartbeat interval so next is skipped'
196
+ }),
197
+ /**
198
+ * Message validation results for each topic.
199
+ * Invalid == Reject?
200
+ * = rust-libp2p `invalid_messages`, `accepted_messages`, `ignored_messages`, `rejected_messages`
201
+ */
202
+ acceptedMessagesTotal: register.gauge({
203
+ name: 'gossipsub_accepted_messages_total',
204
+ help: 'Total accepted messages for each topic',
205
+ labelNames: ['topic']
206
+ }),
207
+ ignoredMessagesTotal: register.gauge({
208
+ name: 'gossipsub_ignored_messages_total',
209
+ help: 'Total ignored messages for each topic',
210
+ labelNames: ['topic']
211
+ }),
212
+ rejectedMessagesTotal: register.gauge({
213
+ name: 'gossipsub_rejected_messages_total',
214
+ help: 'Total rejected messages for each topic',
215
+ labelNames: ['topic']
216
+ }),
217
+ unknownValidationResultsTotal: register.gauge({
218
+ name: 'gossipsub_unknown_validation_results_total',
219
+ help: 'Total unknown validation results for each topic',
220
+ labelNames: ['topic']
221
+ }),
222
+ /**
223
+ * When the user validates a message, it tries to re propagate it to its mesh peers. If the
224
+ * message expires from the memcache before it can be validated, we count this a cache miss
225
+ * and it is an indicator that the memcache size should be increased.
226
+ * = rust-libp2p `mcache_misses`
227
+ */
228
+ asyncValidationMcacheHit: register.gauge({
229
+ name: 'gossipsub_async_validation_mcache_hit_total',
230
+ help: 'Async validation result reported by the user layer',
231
+ labelNames: ['hit']
232
+ }),
233
+ asyncValidationDelayFromFirstSeenSec: register.histogram({
234
+ name: 'gossipsub_async_validation_delay_from_first_seen',
235
+ help: 'Async validation report delay from first seen in second',
236
+ buckets: [0.01, 0.03, 0.1, 0.3, 1, 3, 10]
237
+ }),
238
+ asyncValidationUnknownFirstSeen: register.gauge({
239
+ name: 'gossipsub_async_validation_unknown_first_seen_count_total',
240
+ help: 'Async validation report unknown first seen value for message'
241
+ }),
242
+ // peer stream
243
+ peerReadStreamError: register.gauge({
244
+ name: 'gossipsub_peer_read_stream_err_count_total',
245
+ help: 'Peer read stream error'
246
+ }),
247
+ // RPC outgoing. Track byte length + data structure sizes
248
+ rpcRecvBytes: register.gauge({ name: 'gossipsub_rpc_recv_bytes_total', help: 'RPC recv' }),
249
+ rpcRecvCount: register.gauge({ name: 'gossipsub_rpc_recv_count_total', help: 'RPC recv' }),
250
+ rpcRecvSubscription: register.gauge({ name: 'gossipsub_rpc_recv_subscription_total', help: 'RPC recv' }),
251
+ rpcRecvMessage: register.gauge({ name: 'gossipsub_rpc_recv_message_total', help: 'RPC recv' }),
252
+ rpcRecvControl: register.gauge({ name: 'gossipsub_rpc_recv_control_total', help: 'RPC recv' }),
253
+ rpcRecvIHave: register.gauge({ name: 'gossipsub_rpc_recv_ihave_total', help: 'RPC recv' }),
254
+ rpcRecvIWant: register.gauge({ name: 'gossipsub_rpc_recv_iwant_total', help: 'RPC recv' }),
255
+ rpcRecvGraft: register.gauge({ name: 'gossipsub_rpc_recv_graft_total', help: 'RPC recv' }),
256
+ rpcRecvPrune: register.gauge({ name: 'gossipsub_rpc_recv_prune_total', help: 'RPC recv' }),
257
+ rpcDataError: register.gauge({ name: 'gossipsub_rpc_data_err_count_total', help: 'RPC data error' }),
258
+ rpcRecvError: register.gauge({ name: 'gossipsub_rpc_recv_err_count_total', help: 'RPC recv error' }),
259
+ /** Total count of RPC dropped because acceptFrom() == false */
260
+ rpcRecvNotAccepted: register.gauge({
261
+ name: 'gossipsub_rpc_rcv_not_accepted_total',
262
+ help: 'Total count of RPC dropped because acceptFrom() == false'
263
+ }),
264
+ // RPC incoming. Track byte length + data structure sizes
265
+ rpcSentBytes: register.gauge({ name: 'gossipsub_rpc_sent_bytes_total', help: 'RPC sent' }),
266
+ rpcSentCount: register.gauge({ name: 'gossipsub_rpc_sent_count_total', help: 'RPC sent' }),
267
+ rpcSentSubscription: register.gauge({ name: 'gossipsub_rpc_sent_subscription_total', help: 'RPC sent' }),
268
+ rpcSentMessage: register.gauge({ name: 'gossipsub_rpc_sent_message_total', help: 'RPC sent' }),
269
+ rpcSentControl: register.gauge({ name: 'gossipsub_rpc_sent_control_total', help: 'RPC sent' }),
270
+ rpcSentIHave: register.gauge({ name: 'gossipsub_rpc_sent_ihave_total', help: 'RPC sent' }),
271
+ rpcSentIWant: register.gauge({ name: 'gossipsub_rpc_sent_iwant_total', help: 'RPC sent' }),
272
+ rpcSentGraft: register.gauge({ name: 'gossipsub_rpc_sent_graft_total', help: 'RPC sent' }),
273
+ rpcSentPrune: register.gauge({ name: 'gossipsub_rpc_sent_prune_total', help: 'RPC sent' }),
274
+ rpcSentIDontWant: register.gauge({ name: 'gossipsub_rpc_sent_idontwant_total', help: 'RPC sent' }),
275
+ // publish message. Track peers sent to and bytes
276
+ /** Total count of msg published by topic */
277
+ msgPublishCount: register.gauge({
278
+ name: 'gossipsub_msg_publish_count_total',
279
+ help: 'Total count of msg published by topic',
280
+ labelNames: ['topic']
281
+ }),
282
+ /** Total count of peers that we publish a msg to */
283
+ msgPublishPeersByTopic: register.gauge({
284
+ name: 'gossipsub_msg_publish_peers_total',
285
+ help: 'Total count of peers that we publish a msg to',
286
+ labelNames: ['topic']
287
+ }),
288
+ /** Total count of peers (by group) that we publish a msg to */
289
+ directPeersPublishedTotal: register.gauge({
290
+ name: 'gossipsub_direct_peers_published_total',
291
+ help: 'Total direct peers that we publish a msg to',
292
+ labelNames: ['topic']
293
+ }),
294
+ floodsubPeersPublishedTotal: register.gauge({
295
+ name: 'gossipsub_floodsub_peers_published_total',
296
+ help: 'Total floodsub peers that we publish a msg to',
297
+ labelNames: ['topic']
298
+ }),
299
+ meshPeersPublishedTotal: register.gauge({
300
+ name: 'gossipsub_mesh_peers_published_total',
301
+ help: 'Total mesh peers that we publish a msg to',
302
+ labelNames: ['topic']
303
+ }),
304
+ fanoutPeersPublishedTotal: register.gauge({
305
+ name: 'gossipsub_fanout_peers_published_total',
306
+ help: 'Total fanout peers that we publish a msg to',
307
+ labelNames: ['topic']
308
+ }),
309
+ /** Total count of msg publish data.length bytes */
310
+ msgPublishBytes: register.gauge({
311
+ name: 'gossipsub_msg_publish_bytes_total',
312
+ help: 'Total count of msg publish data.length bytes',
313
+ labelNames: ['topic']
314
+ }),
315
+ /** Total time in seconds to publish a message */
316
+ msgPublishTime: register.histogram({
317
+ name: 'gossipsub_msg_publish_seconds',
318
+ help: 'Total time in seconds to publish a message',
319
+ buckets: [0.001, 0.002, 0.005, 0.01, 0.1, 0.5, 1],
320
+ labelNames: ['topic']
321
+ }),
322
+ /** Total count of msg forwarded by topic */
323
+ msgForwardCount: register.gauge({
324
+ name: 'gossipsub_msg_forward_count_total',
325
+ help: 'Total count of msg forwarded by topic',
326
+ labelNames: ['topic']
327
+ }),
328
+ /** Total count of peers that we forward a msg to */
329
+ msgForwardPeers: register.gauge({
330
+ name: 'gossipsub_msg_forward_peers_total',
331
+ help: 'Total count of peers that we forward a msg to',
332
+ labelNames: ['topic']
333
+ }),
334
+ /** Total count of recv msgs before any validation */
335
+ msgReceivedPreValidation: register.gauge({
336
+ name: 'gossipsub_msg_received_prevalidation_total',
337
+ help: 'Total count of recv msgs before any validation',
338
+ labelNames: ['topic']
339
+ }),
340
+ /** Total count of recv msgs error */
341
+ msgReceivedError: register.gauge({
342
+ name: 'gossipsub_msg_received_error_total',
343
+ help: 'Total count of recv msgs error',
344
+ labelNames: ['topic']
345
+ }),
346
+ /** Tracks distribution of recv msgs by duplicate, invalid, valid */
347
+ prevalidationInvalidTotal: register.gauge({
348
+ name: 'gossipsub_pre_validation_invalid_total',
349
+ help: 'Total count of invalid messages received',
350
+ labelNames: ['topic']
351
+ }),
352
+ prevalidationValidTotal: register.gauge({
353
+ name: 'gossipsub_pre_validation_valid_total',
354
+ help: 'Total count of valid messages received',
355
+ labelNames: ['topic']
356
+ }),
357
+ prevalidationDuplicateTotal: register.gauge({
358
+ name: 'gossipsub_pre_validation_duplicate_total',
359
+ help: 'Total count of duplicate messages received',
360
+ labelNames: ['topic']
361
+ }),
362
+ prevalidationUnknownTotal: register.gauge({
363
+ name: 'gossipsub_pre_validation_unknown_status_total',
364
+ help: 'Total count of unknown_status messages received',
365
+ labelNames: ['topic']
366
+ }),
367
+ /** Tracks specific reason of invalid */
368
+ msgReceivedInvalid: register.gauge({
369
+ name: 'gossipsub_msg_received_invalid_total',
370
+ help: 'Tracks specific reason of invalid',
371
+ labelNames: ['error']
372
+ }),
373
+ msgReceivedInvalidByTopic: register.gauge({
374
+ name: 'gossipsub_msg_received_invalid_by_topic_total',
375
+ help: 'Tracks specific invalid message by topic',
376
+ labelNames: ['topic']
377
+ }),
378
+ /** Track duplicate message delivery time */
379
+ duplicateMsgDeliveryDelay: register.histogram({
380
+ name: 'gossisub_duplicate_msg_delivery_delay_seconds',
381
+ help: 'Time since the 1st duplicated message validated',
382
+ labelNames: ['topic'],
383
+ buckets: [
384
+ 0.25 * opts.maxMeshMessageDeliveriesWindowSec,
385
+ 0.5 * opts.maxMeshMessageDeliveriesWindowSec,
386
+ Number(opts.maxMeshMessageDeliveriesWindowSec),
387
+ 2 * opts.maxMeshMessageDeliveriesWindowSec,
388
+ 4 * opts.maxMeshMessageDeliveriesWindowSec
389
+ ]
390
+ }),
391
+ /** Total count of late msg delivery total by topic */
392
+ duplicateMsgLateDelivery: register.gauge({
393
+ name: 'gossisub_duplicate_msg_late_delivery_total',
394
+ help: 'Total count of late duplicate message delivery by topic, which triggers P3 penalty',
395
+ labelNames: ['topic']
396
+ }),
397
+ duplicateMsgIgnored: register.gauge({
398
+ name: 'gossisub_ignored_published_duplicate_msgs_total',
399
+ help: 'Total count of published duplicate message ignored by topic',
400
+ labelNames: ['topic']
401
+ }),
402
+ /* Metrics related to scoring */
403
+ /** Total times score() is called */
404
+ scoreFnCalls: register.gauge({
405
+ name: 'gossipsub_score_fn_calls_total',
406
+ help: 'Total times score() is called'
407
+ }),
408
+ /** Total times score() call actually computed computeScore(), no cache */
409
+ scoreFnRuns: register.gauge({
410
+ name: 'gossipsub_score_fn_runs_total',
411
+ help: 'Total times score() call actually computed computeScore(), no cache'
412
+ }),
413
+ scoreCachedDelta: register.histogram({
414
+ name: 'gossipsub_score_cache_delta',
415
+ help: 'Delta of score between cached values that expired',
416
+ buckets: [10, 100, 1000]
417
+ }),
418
+ /** Current count of peers by score threshold */
419
+ peersByScoreThreshold: register.gauge({
420
+ name: 'gossipsub_peers_by_score_threshold_count',
421
+ help: 'Current count of peers by score threshold',
422
+ labelNames: ['threshold']
423
+ }),
424
+ score: register.avgMinMax({
425
+ name: 'gossipsub_score',
426
+ help: 'Avg min max of gossip scores'
427
+ }),
428
+ /**
429
+ * Separate score weights
430
+ * Need to use 2-label metrics in this case to debug the score weights
431
+ */
432
+ scoreWeights: register.avgMinMax({
433
+ name: 'gossipsub_score_weights',
434
+ help: 'Separate score weights',
435
+ labelNames: ['topic', 'p']
436
+ }),
437
+ /** Histogram of the scores for each mesh topic. */
438
+ // TODO: Not implemented
439
+ scorePerMesh: register.avgMinMax({
440
+ name: 'gossipsub_score_per_mesh',
441
+ help: 'Histogram of the scores for each mesh topic',
442
+ labelNames: ['topic']
443
+ }),
444
+ /** A counter of the kind of penalties being applied to peers. */
445
+ // TODO: Not fully implemented
446
+ scoringPenalties: register.gauge({
447
+ name: 'gossipsub_scoring_penalties_total',
448
+ help: 'A counter of the kind of penalties being applied to peers',
449
+ labelNames: ['penalty']
450
+ }),
451
+ behaviourPenalty: register.histogram({
452
+ name: 'gossipsub_peer_stat_behaviour_penalty',
453
+ help: 'Current peer stat behaviour_penalty at each scrape',
454
+ buckets: [
455
+ 0.25 * opts.behaviourPenaltyThreshold,
456
+ 0.5 * opts.behaviourPenaltyThreshold,
457
+ Number(opts.behaviourPenaltyThreshold),
458
+ 2 * opts.behaviourPenaltyThreshold,
459
+ 4 * opts.behaviourPenaltyThreshold
460
+ ]
461
+ }),
462
+ // TODO:
463
+ // - iasked per peer (on heartbeat)
464
+ // - when promise is resolved, track messages from promises
465
+ /** Total received IHAVE messages that we ignore for some reason */
466
+ ihaveRcvIgnored: register.gauge({
467
+ name: 'gossipsub_ihave_rcv_ignored_total',
468
+ help: 'Total received IHAVE messages that we ignore for some reason',
469
+ labelNames: ['reason']
470
+ }),
471
+ /** Total received IHAVE messages by topic */
472
+ ihaveRcvMsgids: register.gauge({
473
+ name: 'gossipsub_ihave_rcv_msgids_total',
474
+ help: 'Total received IHAVE messages by topic',
475
+ labelNames: ['topic']
476
+ }),
477
+ /**
478
+ * Total messages per topic we don't have. Not actual requests.
479
+ * The number of times we have decided that an IWANT control message is required for this
480
+ * topic. A very high metric might indicate an underperforming network.
481
+ * = rust-libp2p `topic_iwant_msgs`
482
+ */
483
+ ihaveRcvNotSeenMsgids: register.gauge({
484
+ name: 'gossipsub_ihave_rcv_not_seen_msgids_total',
485
+ help: 'Total messages per topic we do not have, not actual requests',
486
+ labelNames: ['topic']
487
+ }),
488
+ /** Total received IWANT messages by topic */
489
+ iwantRcvMsgids: register.gauge({
490
+ name: 'gossipsub_iwant_rcv_msgids_total',
491
+ help: 'Total received IWANT messages by topic',
492
+ labelNames: ['topic']
493
+ }),
494
+ /** Total requested messageIDs that we don't have */
495
+ iwantRcvDonthaveMsgids: register.gauge({
496
+ name: 'gossipsub_iwant_rcv_dont_have_msgids_total',
497
+ help: 'Total requested messageIDs that we do not have'
498
+ }),
499
+ /** Total received IDONTWANT messages */
500
+ idontwantRcvMsgids: register.gauge({
501
+ name: 'gossipsub_idontwant_rcv_msgids_total',
502
+ help: 'Total received IDONTWANT messages'
503
+ }),
504
+ /** Total received IDONTWANT messageIDs that we don't have */
505
+ idontwantRcvDonthaveMsgids: register.gauge({
506
+ name: 'gossipsub_idontwant_rcv_dont_have_msgids_total',
507
+ help: 'Total received IDONTWANT messageIDs that we do not have in mcache'
508
+ }),
509
+ iwantPromiseStarted: register.gauge({
510
+ name: 'gossipsub_iwant_promise_sent_total',
511
+ help: 'Total count of started IWANT promises'
512
+ }),
513
+ /** Total count of resolved IWANT promises */
514
+ iwantPromiseResolved: register.gauge({
515
+ name: 'gossipsub_iwant_promise_resolved_total',
516
+ help: 'Total count of resolved IWANT promises'
517
+ }),
518
+ /** Total count of resolved IWANT promises from duplicate messages */
519
+ iwantPromiseResolvedFromDuplicate: register.gauge({
520
+ name: 'gossipsub_iwant_promise_resolved_from_duplicate_total',
521
+ help: 'Total count of resolved IWANT promises from duplicate messages'
522
+ }),
523
+ /** Total count of peers we have asked IWANT promises that are resolved */
524
+ iwantPromiseResolvedPeers: register.gauge({
525
+ name: 'gossipsub_iwant_promise_resolved_peers',
526
+ help: 'Total count of peers we have asked IWANT promises that are resolved'
527
+ }),
528
+ iwantPromiseBroken: register.gauge({
529
+ name: 'gossipsub_iwant_promise_broken',
530
+ help: 'Total count of broken IWANT promises'
531
+ }),
532
+ iwantMessagePruned: register.gauge({
533
+ name: 'gossipsub_iwant_message_pruned',
534
+ help: 'Total count of pruned IWANT messages'
535
+ }),
536
+ /** Histogram of delivery time of resolved IWANT promises */
537
+ iwantPromiseDeliveryTime: register.histogram({
538
+ name: 'gossipsub_iwant_promise_delivery_seconds',
539
+ help: 'Histogram of delivery time of resolved IWANT promises',
540
+ buckets: [
541
+ 0.5 * opts.gossipPromiseExpireSec,
542
+ Number(opts.gossipPromiseExpireSec),
543
+ 2 * opts.gossipPromiseExpireSec,
544
+ 4 * opts.gossipPromiseExpireSec
545
+ ]
546
+ }),
547
+ iwantPromiseUntracked: register.gauge({
548
+ name: 'gossip_iwant_promise_untracked',
549
+ help: 'Total count of untracked IWANT promise'
550
+ }),
551
+ /** Backoff time */
552
+ connectedPeersBackoffSec: register.histogram({
553
+ name: 'gossipsub_connected_peers_backoff_seconds',
554
+ help: 'Backoff time in seconds',
555
+ // Using 1 seconds as minimum as that's close to the heartbeat duration, no need for more resolution.
556
+ // As per spec, backoff times are 10 seconds for UnsubscribeBackoff and 60 seconds for PruneBackoff.
557
+ // Higher values of 60 seconds should not occur, but we add 120 seconds just in case
558
+ // https://github.com/libp2p/specs/blob/master/pubsub/gossipsub/gossipsub-v1.1.md#overview-of-new-parameters
559
+ buckets: [1, 2, 4, 10, 20, 60, 120]
560
+ }),
561
+ /* Data structure sizes */
562
+ /** Unbounded cache sizes */
563
+ cacheSize: register.gauge({
564
+ name: 'gossipsub_cache_size',
565
+ help: 'Unbounded cache sizes',
566
+ labelNames: ['cache']
567
+ }),
568
+ /** Current mcache msg count */
569
+ mcacheSize: register.gauge({
570
+ name: 'gossipsub_mcache_size',
571
+ help: 'Current mcache msg count'
572
+ }),
573
+ mcacheNotValidatedCount: register.gauge({
574
+ name: 'gossipsub_mcache_not_validated_count',
575
+ help: 'Current mcache msg count not validated'
576
+ }),
577
+ fastMsgIdCacheCollision: register.gauge({
578
+ name: 'gossipsub_fastmsgid_cache_collision_total',
579
+ help: 'Total count of key collisions on fastmsgid cache put'
580
+ }),
581
+ newConnectionCount: register.gauge({
582
+ name: 'gossipsub_new_connection_total',
583
+ help: 'Total new connection by status',
584
+ labelNames: ['status']
585
+ }),
586
+ topicStrToLabel,
587
+ toTopic(topicStr) {
588
+ return this.topicStrToLabel.get(topicStr) ?? topicStr;
589
+ },
590
+ /** We joined a topic */
591
+ onJoin(topicStr) {
592
+ this.topicSubscriptionStatus.set({ topicStr }, 1);
593
+ this.meshPeerCounts.set({ topicStr }, 0); // Reset count
594
+ },
595
+ /** We left a topic */
596
+ onLeave(topicStr) {
597
+ this.topicSubscriptionStatus.set({ topicStr }, 0);
598
+ this.meshPeerCounts.set({ topicStr }, 0); // Reset count
599
+ },
600
+ /** Register the inclusion of peers in our mesh due to some reason. */
601
+ onAddToMesh(topicStr, reason, count) {
602
+ const topic = this.toTopic(topicStr);
603
+ switch (reason) {
604
+ case InclusionReason.Fanout:
605
+ this.meshPeerInclusionEventsFanout.inc({ topic }, count);
606
+ break;
607
+ case InclusionReason.Random:
608
+ this.meshPeerInclusionEventsRandom.inc({ topic }, count);
609
+ break;
610
+ case InclusionReason.Subscribed:
611
+ this.meshPeerInclusionEventsSubscribed.inc({ topic }, count);
612
+ break;
613
+ case InclusionReason.Outbound:
614
+ this.meshPeerInclusionEventsOutbound.inc({ topic }, count);
615
+ break;
616
+ case InclusionReason.NotEnough:
617
+ this.meshPeerInclusionEventsNotEnough.inc({ topic }, count);
618
+ break;
619
+ case InclusionReason.Opportunistic:
620
+ this.meshPeerInclusionEventsOpportunistic.inc({ topic }, count);
621
+ break;
622
+ default:
623
+ this.meshPeerInclusionEventsUnknown.inc({ topic }, count);
624
+ break;
625
+ }
626
+ },
627
+ /** Register the removal of peers in our mesh due to some reason */
628
+ // - remove_peer_from_mesh()
629
+ // - heartbeat() Churn::BadScore
630
+ // - heartbeat() Churn::Excess
631
+ // - on_disconnect() Churn::Ds
632
+ onRemoveFromMesh(topicStr, reason, count) {
633
+ const topic = this.toTopic(topicStr);
634
+ switch (reason) {
635
+ case ChurnReason.Dc:
636
+ this.meshPeerChurnEventsDisconnected.inc({ topic }, count);
637
+ break;
638
+ case ChurnReason.BadScore:
639
+ this.meshPeerChurnEventsBadScore.inc({ topic }, count);
640
+ break;
641
+ case ChurnReason.Prune:
642
+ this.meshPeerChurnEventsPrune.inc({ topic }, count);
643
+ break;
644
+ case ChurnReason.Excess:
645
+ this.meshPeerChurnEventsExcess.inc({ topic }, count);
646
+ break;
647
+ default:
648
+ this.meshPeerChurnEventsUnknown.inc({ topic }, count);
649
+ break;
650
+ }
651
+ },
652
+ /**
653
+ * Update validation result to metrics
654
+ *
655
+ * @param messageRecord - null means the message's mcache record was not known at the time of acceptance report
656
+ */
657
+ onReportValidation(messageRecord, acceptance, firstSeenTimestampMs) {
658
+ this.asyncValidationMcacheHit.inc({ hit: messageRecord != null ? 'hit' : 'miss' });
659
+ if (messageRecord != null) {
660
+ const topic = this.toTopic(messageRecord.message.topic);
661
+ switch (acceptance) {
662
+ case TopicValidatorResult.Accept:
663
+ this.acceptedMessagesTotal.inc({ topic });
664
+ break;
665
+ case TopicValidatorResult.Ignore:
666
+ this.ignoredMessagesTotal.inc({ topic });
667
+ break;
668
+ case TopicValidatorResult.Reject:
669
+ this.rejectedMessagesTotal.inc({ topic });
670
+ break;
671
+ default:
672
+ this.unknownValidationResultsTotal.inc({ topic });
673
+ break;
674
+ }
675
+ }
676
+ if (firstSeenTimestampMs != null) {
677
+ this.asyncValidationDelayFromFirstSeenSec.observe((Date.now() - firstSeenTimestampMs) / 1000);
678
+ }
679
+ else {
680
+ this.asyncValidationUnknownFirstSeen.inc();
681
+ }
682
+ },
683
+ /**
684
+ * - in handle_graft() Penalty::GraftBackoff
685
+ * - in apply_iwant_penalties() Penalty::BrokenPromise
686
+ * - in metric_score() P3 Penalty::MessageDeficit
687
+ * - in metric_score() P6 Penalty::IPColocation
688
+ */
689
+ onScorePenalty(penalty) {
690
+ // Can this be labeled by topic too?
691
+ this.scoringPenalties.inc({ penalty }, 1);
692
+ },
693
+ onIhaveRcv(topicStr, ihave, idonthave) {
694
+ const topic = this.toTopic(topicStr);
695
+ this.ihaveRcvMsgids.inc({ topic }, ihave);
696
+ this.ihaveRcvNotSeenMsgids.inc({ topic }, idonthave);
697
+ },
698
+ onIwantRcv(iwantByTopic, iwantDonthave) {
699
+ for (const [topicStr, iwant] of iwantByTopic) {
700
+ const topic = this.toTopic(topicStr);
701
+ this.iwantRcvMsgids.inc({ topic }, iwant);
702
+ }
703
+ this.iwantRcvDonthaveMsgids.inc(iwantDonthave);
704
+ },
705
+ onIdontwantRcv(idontwant, idontwantDonthave) {
706
+ this.idontwantRcvMsgids.inc(idontwant);
707
+ this.idontwantRcvDonthaveMsgids.inc(idontwantDonthave);
708
+ },
709
+ onForwardMsg(topicStr, tosendCount) {
710
+ const topic = this.toTopic(topicStr);
711
+ this.msgForwardCount.inc({ topic }, 1);
712
+ this.msgForwardPeers.inc({ topic }, tosendCount);
713
+ },
714
+ onPublishMsg(topicStr, tosendGroupCount, tosendCount, dataLen, ms) {
715
+ const topic = this.toTopic(topicStr);
716
+ this.msgPublishCount.inc({ topic }, 1);
717
+ this.msgPublishBytes.inc({ topic }, tosendCount * dataLen);
718
+ this.msgPublishPeersByTopic.inc({ topic }, tosendCount);
719
+ this.directPeersPublishedTotal.inc({ topic }, tosendGroupCount.direct);
720
+ this.floodsubPeersPublishedTotal.inc({ topic }, tosendGroupCount.floodsub);
721
+ this.meshPeersPublishedTotal.inc({ topic }, tosendGroupCount.mesh);
722
+ this.fanoutPeersPublishedTotal.inc({ topic }, tosendGroupCount.fanout);
723
+ this.msgPublishTime.observe({ topic }, ms / 1000);
724
+ },
725
+ onMsgRecvPreValidation(topicStr) {
726
+ const topic = this.toTopic(topicStr);
727
+ this.msgReceivedPreValidation.inc({ topic }, 1);
728
+ },
729
+ onMsgRecvError(topicStr) {
730
+ const topic = this.toTopic(topicStr);
731
+ this.msgReceivedError.inc({ topic }, 1);
732
+ },
733
+ onPrevalidationResult(topicStr, status) {
734
+ const topic = this.toTopic(topicStr);
735
+ switch (status) {
736
+ case MessageStatus.duplicate:
737
+ this.prevalidationDuplicateTotal.inc({ topic });
738
+ break;
739
+ case MessageStatus.invalid:
740
+ this.prevalidationInvalidTotal.inc({ topic });
741
+ break;
742
+ case MessageStatus.valid:
743
+ this.prevalidationValidTotal.inc({ topic });
744
+ break;
745
+ default:
746
+ this.prevalidationUnknownTotal.inc({ topic });
747
+ break;
748
+ }
749
+ },
750
+ onMsgRecvInvalid(topicStr, reason) {
751
+ const topic = this.toTopic(topicStr);
752
+ const error = reason.reason === RejectReason.Error ? reason.error : reason.reason;
753
+ this.msgReceivedInvalid.inc({ error }, 1);
754
+ this.msgReceivedInvalidByTopic.inc({ topic }, 1);
755
+ },
756
+ onDuplicateMsgDelivery(topicStr, deliveryDelayMs, isLateDelivery) {
757
+ const topic = this.toTopic(topicStr);
758
+ this.duplicateMsgDeliveryDelay.observe({ topic }, deliveryDelayMs / 1000);
759
+ if (isLateDelivery) {
760
+ this.duplicateMsgLateDelivery.inc({ topic }, 1);
761
+ }
762
+ },
763
+ onPublishDuplicateMsg(topicStr) {
764
+ const topic = this.toTopic(topicStr);
765
+ this.duplicateMsgIgnored.inc({ topic }, 1);
766
+ },
767
+ onPeerReadStreamError() {
768
+ this.peerReadStreamError.inc(1);
769
+ },
770
+ onRpcRecvError() {
771
+ this.rpcRecvError.inc(1);
772
+ },
773
+ onRpcDataError() {
774
+ this.rpcDataError.inc(1);
775
+ },
776
+ onRpcRecv(rpc, rpcBytes) {
777
+ this.rpcRecvBytes.inc(rpcBytes);
778
+ this.rpcRecvCount.inc(1);
779
+ if (rpc.subscriptions != null) {
780
+ this.rpcRecvSubscription.inc(rpc.subscriptions.length);
781
+ }
782
+ if (rpc.messages != null) {
783
+ this.rpcRecvMessage.inc(rpc.messages.length);
784
+ }
785
+ if (rpc.control != null) {
786
+ this.rpcRecvControl.inc(1);
787
+ if (rpc.control.ihave != null) {
788
+ this.rpcRecvIHave.inc(rpc.control.ihave.length);
789
+ }
790
+ if (rpc.control.iwant != null) {
791
+ this.rpcRecvIWant.inc(rpc.control.iwant.length);
792
+ }
793
+ if (rpc.control.graft != null) {
794
+ this.rpcRecvGraft.inc(rpc.control.graft.length);
795
+ }
796
+ if (rpc.control.prune != null) {
797
+ this.rpcRecvPrune.inc(rpc.control.prune.length);
798
+ }
799
+ }
800
+ },
801
+ onRpcSent(rpc, rpcBytes) {
802
+ this.rpcSentBytes.inc(rpcBytes);
803
+ this.rpcSentCount.inc(1);
804
+ if (rpc.subscriptions != null) {
805
+ this.rpcSentSubscription.inc(rpc.subscriptions.length);
806
+ }
807
+ if (rpc.messages != null) {
808
+ this.rpcSentMessage.inc(rpc.messages.length);
809
+ }
810
+ if (rpc.control != null) {
811
+ const ihave = rpc.control.ihave?.length ?? 0;
812
+ const iwant = rpc.control.iwant?.length ?? 0;
813
+ const graft = rpc.control.graft?.length ?? 0;
814
+ const prune = rpc.control.prune?.length ?? 0;
815
+ const idontwant = rpc.control.idontwant?.length ?? 0;
816
+ if (ihave > 0) {
817
+ this.rpcSentIHave.inc(ihave);
818
+ }
819
+ if (iwant > 0) {
820
+ this.rpcSentIWant.inc(iwant);
821
+ }
822
+ if (graft > 0) {
823
+ this.rpcSentGraft.inc(graft);
824
+ }
825
+ if (prune > 0) {
826
+ this.rpcSentPrune.inc(prune);
827
+ }
828
+ if (idontwant > 0) {
829
+ this.rpcSentIDontWant.inc(idontwant);
830
+ }
831
+ if (ihave > 0 || iwant > 0 || graft > 0 || prune > 0 || idontwant > 0) {
832
+ this.rpcSentControl.inc(1);
833
+ }
834
+ }
835
+ },
836
+ registerScores(scores, scoreThresholds) {
837
+ let graylist = 0;
838
+ let publish = 0;
839
+ let gossip = 0;
840
+ let mesh = 0;
841
+ for (const score of scores) {
842
+ if (score >= scoreThresholds.graylistThreshold) {
843
+ graylist++;
844
+ }
845
+ if (score >= scoreThresholds.publishThreshold) {
846
+ publish++;
847
+ }
848
+ if (score >= scoreThresholds.gossipThreshold) {
849
+ gossip++;
850
+ }
851
+ if (score >= 0) {
852
+ mesh++;
853
+ }
854
+ }
855
+ this.peersByScoreThreshold.set({ threshold: ScoreThreshold.graylist }, graylist);
856
+ this.peersByScoreThreshold.set({ threshold: ScoreThreshold.publish }, publish);
857
+ this.peersByScoreThreshold.set({ threshold: ScoreThreshold.gossip }, gossip);
858
+ this.peersByScoreThreshold.set({ threshold: ScoreThreshold.mesh }, mesh);
859
+ // Register full score too
860
+ this.score.set(scores);
861
+ },
862
+ registerScoreWeights(sw) {
863
+ for (const [topic, wsTopic] of sw.byTopic) {
864
+ this.scoreWeights.set({ topic, p: 'p1' }, wsTopic.p1w);
865
+ this.scoreWeights.set({ topic, p: 'p2' }, wsTopic.p2w);
866
+ this.scoreWeights.set({ topic, p: 'p3' }, wsTopic.p3w);
867
+ this.scoreWeights.set({ topic, p: 'p3b' }, wsTopic.p3bw);
868
+ this.scoreWeights.set({ topic, p: 'p4' }, wsTopic.p4w);
869
+ }
870
+ this.scoreWeights.set({ p: 'p5' }, sw.p5w);
871
+ this.scoreWeights.set({ p: 'p6' }, sw.p6w);
872
+ this.scoreWeights.set({ p: 'p7' }, sw.p7w);
873
+ },
874
+ registerScorePerMesh(mesh, scoreByPeer) {
875
+ const peersPerTopicLabel = new Map();
876
+ mesh.forEach((peers, topicStr) => {
877
+ // Aggregate by known topicLabel or throw to 'unknown'. This prevent too high cardinality
878
+ const topicLabel = this.topicStrToLabel.get(topicStr) ?? 'unknown';
879
+ let peersInMesh = peersPerTopicLabel.get(topicLabel);
880
+ if (peersInMesh == null) {
881
+ peersInMesh = new Set();
882
+ peersPerTopicLabel.set(topicLabel, peersInMesh);
883
+ }
884
+ peers.forEach((p) => peersInMesh?.add(p));
885
+ });
886
+ for (const [topic, peers] of peersPerTopicLabel) {
887
+ const meshScores = [];
888
+ peers.forEach((peer) => {
889
+ meshScores.push(scoreByPeer.get(peer) ?? 0);
890
+ });
891
+ this.scorePerMesh.set({ topic }, meshScores);
892
+ }
893
+ }
894
+ };
895
+ }
896
+ //# sourceMappingURL=metrics.js.map