@libp2p/gossipsub 14.1.1-6059227cb

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (160) hide show
  1. package/README.md +85 -0
  2. package/dist/index.min.js +19 -0
  3. package/dist/index.min.js.map +7 -0
  4. package/dist/src/config.d.ts +32 -0
  5. package/dist/src/config.d.ts.map +1 -0
  6. package/dist/src/config.js +2 -0
  7. package/dist/src/config.js.map +1 -0
  8. package/dist/src/constants.d.ts +213 -0
  9. package/dist/src/constants.d.ts.map +1 -0
  10. package/dist/src/constants.js +217 -0
  11. package/dist/src/constants.js.map +1 -0
  12. package/dist/src/errors.d.ts +9 -0
  13. package/dist/src/errors.d.ts.map +1 -0
  14. package/dist/src/errors.js +15 -0
  15. package/dist/src/errors.js.map +1 -0
  16. package/dist/src/gossipsub.d.ts +419 -0
  17. package/dist/src/gossipsub.d.ts.map +1 -0
  18. package/dist/src/gossipsub.js +2520 -0
  19. package/dist/src/gossipsub.js.map +1 -0
  20. package/dist/src/index.d.ts +344 -0
  21. package/dist/src/index.d.ts.map +1 -0
  22. package/dist/src/index.js +43 -0
  23. package/dist/src/index.js.map +1 -0
  24. package/dist/src/message/decodeRpc.d.ts +11 -0
  25. package/dist/src/message/decodeRpc.d.ts.map +1 -0
  26. package/dist/src/message/decodeRpc.js +10 -0
  27. package/dist/src/message/decodeRpc.js.map +1 -0
  28. package/dist/src/message/index.d.ts +2 -0
  29. package/dist/src/message/index.d.ts.map +1 -0
  30. package/dist/src/message/index.js +2 -0
  31. package/dist/src/message/index.js.map +1 -0
  32. package/dist/src/message/rpc.d.ts +99 -0
  33. package/dist/src/message/rpc.d.ts.map +1 -0
  34. package/dist/src/message/rpc.js +663 -0
  35. package/dist/src/message/rpc.js.map +1 -0
  36. package/dist/src/message-cache.d.ts +80 -0
  37. package/dist/src/message-cache.d.ts.map +1 -0
  38. package/dist/src/message-cache.js +144 -0
  39. package/dist/src/message-cache.js.map +1 -0
  40. package/dist/src/metrics.d.ts +467 -0
  41. package/dist/src/metrics.d.ts.map +1 -0
  42. package/dist/src/metrics.js +896 -0
  43. package/dist/src/metrics.js.map +1 -0
  44. package/dist/src/score/compute-score.d.ts +4 -0
  45. package/dist/src/score/compute-score.d.ts.map +1 -0
  46. package/dist/src/score/compute-score.js +75 -0
  47. package/dist/src/score/compute-score.js.map +1 -0
  48. package/dist/src/score/index.d.ts +4 -0
  49. package/dist/src/score/index.d.ts.map +1 -0
  50. package/dist/src/score/index.js +4 -0
  51. package/dist/src/score/index.js.map +1 -0
  52. package/dist/src/score/message-deliveries.d.ts +45 -0
  53. package/dist/src/score/message-deliveries.d.ts.map +1 -0
  54. package/dist/src/score/message-deliveries.js +75 -0
  55. package/dist/src/score/message-deliveries.js.map +1 -0
  56. package/dist/src/score/peer-score-params.d.ts +125 -0
  57. package/dist/src/score/peer-score-params.d.ts.map +1 -0
  58. package/dist/src/score/peer-score-params.js +159 -0
  59. package/dist/src/score/peer-score-params.js.map +1 -0
  60. package/dist/src/score/peer-score-thresholds.d.ts +31 -0
  61. package/dist/src/score/peer-score-thresholds.d.ts.map +1 -0
  62. package/dist/src/score/peer-score-thresholds.js +32 -0
  63. package/dist/src/score/peer-score-thresholds.js.map +1 -0
  64. package/dist/src/score/peer-score.d.ts +119 -0
  65. package/dist/src/score/peer-score.d.ts.map +1 -0
  66. package/dist/src/score/peer-score.js +459 -0
  67. package/dist/src/score/peer-score.js.map +1 -0
  68. package/dist/src/score/peer-stats.d.ts +32 -0
  69. package/dist/src/score/peer-stats.d.ts.map +1 -0
  70. package/dist/src/score/peer-stats.js +2 -0
  71. package/dist/src/score/peer-stats.js.map +1 -0
  72. package/dist/src/score/scoreMetrics.d.ts +23 -0
  73. package/dist/src/score/scoreMetrics.d.ts.map +1 -0
  74. package/dist/src/score/scoreMetrics.js +155 -0
  75. package/dist/src/score/scoreMetrics.js.map +1 -0
  76. package/dist/src/stream.d.ts +30 -0
  77. package/dist/src/stream.d.ts.map +1 -0
  78. package/dist/src/stream.js +55 -0
  79. package/dist/src/stream.js.map +1 -0
  80. package/dist/src/tracer.d.ts +53 -0
  81. package/dist/src/tracer.d.ts.map +1 -0
  82. package/dist/src/tracer.js +155 -0
  83. package/dist/src/tracer.js.map +1 -0
  84. package/dist/src/types.d.ts +148 -0
  85. package/dist/src/types.d.ts.map +1 -0
  86. package/dist/src/types.js +90 -0
  87. package/dist/src/types.js.map +1 -0
  88. package/dist/src/utils/buildRawMessage.d.ts +20 -0
  89. package/dist/src/utils/buildRawMessage.d.ts.map +1 -0
  90. package/dist/src/utils/buildRawMessage.js +151 -0
  91. package/dist/src/utils/buildRawMessage.js.map +1 -0
  92. package/dist/src/utils/create-gossip-rpc.d.ts +7 -0
  93. package/dist/src/utils/create-gossip-rpc.d.ts.map +1 -0
  94. package/dist/src/utils/create-gossip-rpc.js +31 -0
  95. package/dist/src/utils/create-gossip-rpc.js.map +1 -0
  96. package/dist/src/utils/index.d.ts +4 -0
  97. package/dist/src/utils/index.d.ts.map +1 -0
  98. package/dist/src/utils/index.js +4 -0
  99. package/dist/src/utils/index.js.map +1 -0
  100. package/dist/src/utils/messageIdToString.d.ts +5 -0
  101. package/dist/src/utils/messageIdToString.d.ts.map +1 -0
  102. package/dist/src/utils/messageIdToString.js +8 -0
  103. package/dist/src/utils/messageIdToString.js.map +1 -0
  104. package/dist/src/utils/msgIdFn.d.ts +10 -0
  105. package/dist/src/utils/msgIdFn.d.ts.map +1 -0
  106. package/dist/src/utils/msgIdFn.js +23 -0
  107. package/dist/src/utils/msgIdFn.js.map +1 -0
  108. package/dist/src/utils/multiaddr.d.ts +3 -0
  109. package/dist/src/utils/multiaddr.d.ts.map +1 -0
  110. package/dist/src/utils/multiaddr.js +15 -0
  111. package/dist/src/utils/multiaddr.js.map +1 -0
  112. package/dist/src/utils/publishConfig.d.ts +8 -0
  113. package/dist/src/utils/publishConfig.d.ts.map +1 -0
  114. package/dist/src/utils/publishConfig.js +25 -0
  115. package/dist/src/utils/publishConfig.js.map +1 -0
  116. package/dist/src/utils/set.d.ts +14 -0
  117. package/dist/src/utils/set.d.ts.map +1 -0
  118. package/dist/src/utils/set.js +41 -0
  119. package/dist/src/utils/set.js.map +1 -0
  120. package/dist/src/utils/shuffle.d.ts +7 -0
  121. package/dist/src/utils/shuffle.d.ts.map +1 -0
  122. package/dist/src/utils/shuffle.js +21 -0
  123. package/dist/src/utils/shuffle.js.map +1 -0
  124. package/dist/src/utils/time-cache.d.ts +22 -0
  125. package/dist/src/utils/time-cache.d.ts.map +1 -0
  126. package/dist/src/utils/time-cache.js +54 -0
  127. package/dist/src/utils/time-cache.js.map +1 -0
  128. package/package.json +142 -0
  129. package/src/config.ts +31 -0
  130. package/src/constants.ts +261 -0
  131. package/src/errors.ts +17 -0
  132. package/src/gossipsub.ts +3061 -0
  133. package/src/index.ts +404 -0
  134. package/src/message/decodeRpc.ts +19 -0
  135. package/src/message/index.ts +1 -0
  136. package/src/message/rpc.proto +58 -0
  137. package/src/message/rpc.ts +848 -0
  138. package/src/message-cache.ts +196 -0
  139. package/src/metrics.ts +1014 -0
  140. package/src/score/compute-score.ts +98 -0
  141. package/src/score/index.ts +3 -0
  142. package/src/score/message-deliveries.ts +95 -0
  143. package/src/score/peer-score-params.ts +316 -0
  144. package/src/score/peer-score-thresholds.ts +70 -0
  145. package/src/score/peer-score.ts +565 -0
  146. package/src/score/peer-stats.ts +33 -0
  147. package/src/score/scoreMetrics.ts +215 -0
  148. package/src/stream.ts +79 -0
  149. package/src/tracer.ts +177 -0
  150. package/src/types.ts +178 -0
  151. package/src/utils/buildRawMessage.ts +174 -0
  152. package/src/utils/create-gossip-rpc.ts +34 -0
  153. package/src/utils/index.ts +3 -0
  154. package/src/utils/messageIdToString.ts +8 -0
  155. package/src/utils/msgIdFn.ts +24 -0
  156. package/src/utils/multiaddr.ts +19 -0
  157. package/src/utils/publishConfig.ts +33 -0
  158. package/src/utils/set.ts +43 -0
  159. package/src/utils/shuffle.ts +21 -0
  160. package/src/utils/time-cache.ts +71 -0
package/src/metrics.ts ADDED
@@ -0,0 +1,1014 @@
1
+ import { TopicValidatorResult } from './index.ts'
2
+ import {
3
+ MessageStatus,
4
+
5
+ RejectReason
6
+
7
+ } from './types.js'
8
+ import type { RPC } from './message/rpc.js'
9
+ import type { PeerScoreThresholds } from './score/peer-score-thresholds.js'
10
+ import type { PeerIdStr, RejectReasonObj, TopicStr, ValidateError } from './types.js'
11
+
12
+ /** Topic label as provided in `topicStrToLabel` */
13
+ export type TopicLabel = string
14
+ export type TopicStrToLabel = Map<TopicStr, TopicLabel>
15
+
16
+ export enum MessageSource {
17
+ forward = 'forward',
18
+ publish = 'publish'
19
+ }
20
+
21
+ type NoLabels = Record<string, never>
22
+ type LabelsGeneric = Record<string, string | number>
23
+ type LabelKeys<Labels extends LabelsGeneric> = Extract<keyof Labels, string>
24
+ interface CollectFn<Labels extends LabelsGeneric> { (metric: Gauge<Labels>): void }
25
+
26
+ export interface Gauge<Labels extends LabelsGeneric = NoLabels> {
27
+ inc: NoLabels extends Labels ? (value?: number) => void : (labels: Labels, value?: number) => void
28
+ set: NoLabels extends Labels ? (value: number) => void : (labels: Labels, value: number) => void
29
+
30
+ addCollect(collectFn: CollectFn<Labels>): void
31
+ }
32
+
33
+ export interface Histogram<Labels extends LabelsGeneric = NoLabels> {
34
+ startTimer(): () => void
35
+
36
+ observe: NoLabels extends Labels ? (value: number) => void : (labels: Labels, value: number) => void
37
+
38
+ reset(): void
39
+ }
40
+
41
+ export interface AvgMinMax<Labels extends LabelsGeneric = NoLabels> {
42
+ set: NoLabels extends Labels ? (values: number[]) => void : (labels: Labels, values: number[]) => void
43
+ }
44
+
45
+ export type GaugeConfig<Labels extends LabelsGeneric> = {
46
+ name: string
47
+ help: string
48
+ } & (NoLabels extends Labels ? { labelNames?: never } : { labelNames: [LabelKeys<Labels>, ...Array<LabelKeys<Labels>>] })
49
+
50
+ export type HistogramConfig<Labels extends LabelsGeneric> = GaugeConfig<Labels> & {
51
+ buckets?: number[]
52
+ }
53
+
54
+ export type AvgMinMaxConfig<Labels extends LabelsGeneric> = GaugeConfig<Labels>
55
+
56
+ export interface MetricsRegister {
57
+ gauge<Labels extends LabelsGeneric = NoLabels>(config: GaugeConfig<Labels>): Gauge<Labels>
58
+ histogram<Labels extends LabelsGeneric = NoLabels>(config: HistogramConfig<Labels>): Histogram<Labels>
59
+ avgMinMax<Labels extends LabelsGeneric = NoLabels>(config: AvgMinMaxConfig<Labels>): AvgMinMax<Labels>
60
+ }
61
+
62
+ export enum InclusionReason {
63
+ /** Peer was a fanaout peer. */
64
+ Fanout = 'fanout',
65
+ /** Included from random selection. */
66
+ Random = 'random',
67
+ /** Peer subscribed. */
68
+ Subscribed = 'subscribed',
69
+ /** On heartbeat, peer was included to fill the outbound quota. */
70
+ Outbound = 'outbound',
71
+ /** On heartbeat, not enough peers in mesh */
72
+ NotEnough = 'not_enough',
73
+ /** On heartbeat opportunistic grafting due to low mesh score */
74
+ Opportunistic = 'opportunistic'
75
+ }
76
+
77
+ /// Reasons why a peer was removed from the mesh.
78
+ export enum ChurnReason {
79
+ /// Peer disconnected.
80
+ Dc = 'disconnected',
81
+ /// Peer had a bad score.
82
+ BadScore = 'bad_score',
83
+ /// Peer sent a PRUNE.
84
+ Prune = 'prune',
85
+ /// Too many peers.
86
+ Excess = 'excess'
87
+ }
88
+
89
+ /// Kinds of reasons a peer's score has been penalized
90
+ export enum ScorePenalty {
91
+ /// A peer grafted before waiting the back-off time.
92
+ GraftBackoff = 'graft_backoff',
93
+ /// A Peer did not respond to an IWANT request in time.
94
+ BrokenPromise = 'broken_promise',
95
+ /// A Peer did not send enough messages as expected.
96
+ MessageDeficit = 'message_deficit',
97
+ /// Too many peers under one IP address.
98
+ IPColocation = 'IP_colocation'
99
+ }
100
+
101
+ export enum IHaveIgnoreReason {
102
+ LowScore = 'low_score',
103
+ MaxIhave = 'max_ihave',
104
+ MaxIasked = 'max_iasked'
105
+ }
106
+
107
+ export enum ScoreThreshold {
108
+ graylist = 'graylist',
109
+ publish = 'publish',
110
+ gossip = 'gossip',
111
+ mesh = 'mesh'
112
+ }
113
+
114
+ export type PeersByScoreThreshold = Record<ScoreThreshold, number>
115
+
116
+ export interface ToSendGroupCount {
117
+ direct: number
118
+ floodsub: number
119
+ mesh: number
120
+ fanout: number
121
+ }
122
+
123
+ export interface ToAddGroupCount {
124
+ fanout: number
125
+ random: number
126
+ }
127
+
128
+ export type PromiseDeliveredStats =
129
+ | { expired: false, requestedCount: number, maxDeliverMs: number }
130
+ | { expired: true, maxDeliverMs: number }
131
+
132
+ export interface TopicScoreWeights<T> { p1w: T, p2w: T, p3w: T, p3bw: T, p4w: T }
133
+ export interface ScoreWeights<T> {
134
+ byTopic: Map<TopicLabel, TopicScoreWeights<T>>
135
+ p5w: T
136
+ p6w: T
137
+ p7w: T
138
+ score: T
139
+ }
140
+
141
+ export type Metrics = ReturnType<typeof getMetrics>
142
+
143
+ /**
144
+ * A collection of metrics used throughout the Gossipsub behaviour.
145
+ * NOTE: except for special reasons, do not add more than 1 label for frequent metrics,
146
+ * there's a performance penalty as of June 2023.
147
+ */
148
+ // eslint-disable-next-line @typescript-eslint/explicit-function-return-type
149
+ export function getMetrics (
150
+ register: MetricsRegister,
151
+ topicStrToLabel: TopicStrToLabel,
152
+ opts: { gossipPromiseExpireSec: number, behaviourPenaltyThreshold: number, maxMeshMessageDeliveriesWindowSec: number }
153
+ ) {
154
+ // Using function style instead of class to prevent having to re-declare all MetricsPrometheus types.
155
+
156
+ return {
157
+ /* Metrics for static config */
158
+ protocolsEnabled: register.gauge<{ protocol: string }>({
159
+ name: 'gossipsub_protocol',
160
+ help: 'Status of enabled protocols',
161
+ labelNames: ['protocol']
162
+ }),
163
+
164
+ /* Metrics per known topic */
165
+ /**
166
+ * Status of our subscription to this topic. This metric allows analyzing other topic metrics
167
+ * filtered by our current subscription status.
168
+ * = rust-libp2p `topic_subscription_status`
169
+ */
170
+ topicSubscriptionStatus: register.gauge<{ topicStr: TopicStr }>({
171
+ name: 'gossipsub_topic_subscription_status',
172
+ help: 'Status of our subscription to this topic',
173
+ labelNames: ['topicStr']
174
+ }),
175
+ /**
176
+ * Number of peers subscribed to each topic. This allows us to analyze a topic's behaviour
177
+ * regardless of our subscription status.
178
+ */
179
+ topicPeersCount: register.gauge<{ topicStr: TopicStr }>({
180
+ name: 'gossipsub_topic_peer_count',
181
+ help: 'Number of peers subscribed to each topic',
182
+ labelNames: ['topicStr']
183
+ }),
184
+
185
+ /* Metrics regarding mesh state */
186
+ /**
187
+ * Number of peers in our mesh. This metric should be updated with the count of peers for a
188
+ * topic in the mesh regardless of inclusion and churn events.
189
+ * = rust-libp2p `mesh_peer_counts`
190
+ */
191
+ meshPeerCounts: register.gauge<{ topicStr: TopicStr }>({
192
+ name: 'gossipsub_mesh_peer_count',
193
+ help: 'Number of peers in our mesh',
194
+ labelNames: ['topicStr']
195
+ }),
196
+ /**
197
+ * Number of times we include peers in a topic mesh for different reasons.
198
+ * = rust-libp2p `mesh_peer_inclusion_events`
199
+ */
200
+ meshPeerInclusionEventsFanout: register.gauge<{ topic: TopicLabel }>({
201
+ name: 'gossipsub_mesh_peer_inclusion_events_fanout_total',
202
+ help: 'Number of times we include peers in a topic mesh for fanout reasons',
203
+ labelNames: ['topic']
204
+ }),
205
+ meshPeerInclusionEventsRandom: register.gauge<{ topic: TopicLabel }>({
206
+ name: 'gossipsub_mesh_peer_inclusion_events_random_total',
207
+ help: 'Number of times we include peers in a topic mesh for random reasons',
208
+ labelNames: ['topic']
209
+ }),
210
+ meshPeerInclusionEventsSubscribed: register.gauge<{ topic: TopicLabel }>({
211
+ name: 'gossipsub_mesh_peer_inclusion_events_subscribed_total',
212
+ help: 'Number of times we include peers in a topic mesh for subscribed reasons',
213
+ labelNames: ['topic']
214
+ }),
215
+ meshPeerInclusionEventsOutbound: register.gauge<{ topic: TopicLabel }>({
216
+ name: 'gossipsub_mesh_peer_inclusion_events_outbound_total',
217
+ help: 'Number of times we include peers in a topic mesh for outbound reasons',
218
+ labelNames: ['topic']
219
+ }),
220
+ meshPeerInclusionEventsNotEnough: register.gauge<{ topic: TopicLabel }>({
221
+ name: 'gossipsub_mesh_peer_inclusion_events_not_enough_total',
222
+ help: 'Number of times we include peers in a topic mesh for not_enough reasons',
223
+ labelNames: ['topic']
224
+ }),
225
+ meshPeerInclusionEventsOpportunistic: register.gauge<{ topic: TopicLabel }>({
226
+ name: 'gossipsub_mesh_peer_inclusion_events_opportunistic_total',
227
+ help: 'Number of times we include peers in a topic mesh for opportunistic reasons',
228
+ labelNames: ['topic']
229
+ }),
230
+ meshPeerInclusionEventsUnknown: register.gauge<{ topic: TopicLabel }>({
231
+ name: 'gossipsub_mesh_peer_inclusion_events_unknown_total',
232
+ help: 'Number of times we include peers in a topic mesh for unknown reasons',
233
+ labelNames: ['topic']
234
+ }),
235
+ /**
236
+ * Number of times we remove peers in a topic mesh for different reasons.
237
+ * = rust-libp2p `mesh_peer_churn_events`
238
+ */
239
+ meshPeerChurnEventsDisconnected: register.gauge<{ topic: TopicLabel }>({
240
+ name: 'gossipsub_peer_churn_events_disconnected_total',
241
+ help: 'Number of times we remove peers in a topic mesh for disconnected reasons',
242
+ labelNames: ['topic']
243
+ }),
244
+ meshPeerChurnEventsBadScore: register.gauge<{ topic: TopicLabel }>({
245
+ name: 'gossipsub_peer_churn_events_bad_score_total',
246
+ help: 'Number of times we remove peers in a topic mesh for bad_score reasons',
247
+ labelNames: ['topic']
248
+ }),
249
+ meshPeerChurnEventsPrune: register.gauge<{ topic: TopicLabel }>({
250
+ name: 'gossipsub_peer_churn_events_prune_total',
251
+ help: 'Number of times we remove peers in a topic mesh for prune reasons',
252
+ labelNames: ['topic']
253
+ }),
254
+ meshPeerChurnEventsExcess: register.gauge<{ topic: TopicLabel }>({
255
+ name: 'gossipsub_peer_churn_events_excess_total',
256
+ help: 'Number of times we remove peers in a topic mesh for excess reasons',
257
+ labelNames: ['topic']
258
+ }),
259
+ meshPeerChurnEventsUnknown: register.gauge<{ topic: TopicLabel }>({
260
+ name: 'gossipsub_peer_churn_events_unknown_total',
261
+ help: 'Number of times we remove peers in a topic mesh for unknown reasons',
262
+ labelNames: ['topic']
263
+ }),
264
+
265
+ /* General Metrics */
266
+ /**
267
+ * Gossipsub supports floodsub, gossipsub v1.0, v1.1, and v1.2. Peers are classified based
268
+ * on which protocol they support. This metric keeps track of the number of peers that are
269
+ * connected of each type.
270
+ */
271
+ peersPerProtocol: register.gauge<{ protocol: string }>({
272
+ name: 'gossipsub_peers_per_protocol_count',
273
+ help: 'Peers connected for each topic',
274
+ labelNames: ['protocol']
275
+ }),
276
+ /** The time it takes to complete one iteration of the heartbeat. */
277
+ heartbeatDuration: register.histogram({
278
+ name: 'gossipsub_heartbeat_duration_seconds',
279
+ help: 'The time it takes to complete one iteration of the heartbeat',
280
+ // Should take <10ms, over 1s it's a huge issue that needs debugging, since a heartbeat will be cancelled
281
+ buckets: [0.01, 0.1, 1]
282
+ }),
283
+ /** Heartbeat run took longer than heartbeat interval so next is skipped */
284
+ heartbeatSkipped: register.gauge({
285
+ name: 'gossipsub_heartbeat_skipped',
286
+ help: 'Heartbeat run took longer than heartbeat interval so next is skipped'
287
+ }),
288
+
289
+ /**
290
+ * Message validation results for each topic.
291
+ * Invalid == Reject?
292
+ * = rust-libp2p `invalid_messages`, `accepted_messages`, `ignored_messages`, `rejected_messages`
293
+ */
294
+ acceptedMessagesTotal: register.gauge<{ topic: TopicLabel }>({
295
+ name: 'gossipsub_accepted_messages_total',
296
+ help: 'Total accepted messages for each topic',
297
+ labelNames: ['topic']
298
+ }),
299
+ ignoredMessagesTotal: register.gauge<{ topic: TopicLabel }>({
300
+ name: 'gossipsub_ignored_messages_total',
301
+ help: 'Total ignored messages for each topic',
302
+ labelNames: ['topic']
303
+ }),
304
+ rejectedMessagesTotal: register.gauge<{ topic: TopicLabel }>({
305
+ name: 'gossipsub_rejected_messages_total',
306
+ help: 'Total rejected messages for each topic',
307
+ labelNames: ['topic']
308
+ }),
309
+ unknownValidationResultsTotal: register.gauge<{ topic: TopicLabel }>({
310
+ name: 'gossipsub_unknown_validation_results_total',
311
+ help: 'Total unknown validation results for each topic',
312
+ labelNames: ['topic']
313
+ }),
314
+ /**
315
+ * When the user validates a message, it tries to re propagate it to its mesh peers. If the
316
+ * message expires from the memcache before it can be validated, we count this a cache miss
317
+ * and it is an indicator that the memcache size should be increased.
318
+ * = rust-libp2p `mcache_misses`
319
+ */
320
+ asyncValidationMcacheHit: register.gauge<{ hit: 'hit' | 'miss' }>({
321
+ name: 'gossipsub_async_validation_mcache_hit_total',
322
+ help: 'Async validation result reported by the user layer',
323
+ labelNames: ['hit']
324
+ }),
325
+
326
+ asyncValidationDelayFromFirstSeenSec: register.histogram({
327
+ name: 'gossipsub_async_validation_delay_from_first_seen',
328
+ help: 'Async validation report delay from first seen in second',
329
+ buckets: [0.01, 0.03, 0.1, 0.3, 1, 3, 10]
330
+ }),
331
+
332
+ asyncValidationUnknownFirstSeen: register.gauge({
333
+ name: 'gossipsub_async_validation_unknown_first_seen_count_total',
334
+ help: 'Async validation report unknown first seen value for message'
335
+ }),
336
+
337
+ // peer stream
338
+ peerReadStreamError: register.gauge({
339
+ name: 'gossipsub_peer_read_stream_err_count_total',
340
+ help: 'Peer read stream error'
341
+ }),
342
+
343
+ // RPC outgoing. Track byte length + data structure sizes
344
+ rpcRecvBytes: register.gauge({ name: 'gossipsub_rpc_recv_bytes_total', help: 'RPC recv' }),
345
+ rpcRecvCount: register.gauge({ name: 'gossipsub_rpc_recv_count_total', help: 'RPC recv' }),
346
+ rpcRecvSubscription: register.gauge({ name: 'gossipsub_rpc_recv_subscription_total', help: 'RPC recv' }),
347
+ rpcRecvMessage: register.gauge({ name: 'gossipsub_rpc_recv_message_total', help: 'RPC recv' }),
348
+ rpcRecvControl: register.gauge({ name: 'gossipsub_rpc_recv_control_total', help: 'RPC recv' }),
349
+ rpcRecvIHave: register.gauge({ name: 'gossipsub_rpc_recv_ihave_total', help: 'RPC recv' }),
350
+ rpcRecvIWant: register.gauge({ name: 'gossipsub_rpc_recv_iwant_total', help: 'RPC recv' }),
351
+ rpcRecvGraft: register.gauge({ name: 'gossipsub_rpc_recv_graft_total', help: 'RPC recv' }),
352
+ rpcRecvPrune: register.gauge({ name: 'gossipsub_rpc_recv_prune_total', help: 'RPC recv' }),
353
+ rpcDataError: register.gauge({ name: 'gossipsub_rpc_data_err_count_total', help: 'RPC data error' }),
354
+ rpcRecvError: register.gauge({ name: 'gossipsub_rpc_recv_err_count_total', help: 'RPC recv error' }),
355
+
356
+ /** Total count of RPC dropped because acceptFrom() == false */
357
+ rpcRecvNotAccepted: register.gauge({
358
+ name: 'gossipsub_rpc_rcv_not_accepted_total',
359
+ help: 'Total count of RPC dropped because acceptFrom() == false'
360
+ }),
361
+
362
+ // RPC incoming. Track byte length + data structure sizes
363
+ rpcSentBytes: register.gauge({ name: 'gossipsub_rpc_sent_bytes_total', help: 'RPC sent' }),
364
+ rpcSentCount: register.gauge({ name: 'gossipsub_rpc_sent_count_total', help: 'RPC sent' }),
365
+ rpcSentSubscription: register.gauge({ name: 'gossipsub_rpc_sent_subscription_total', help: 'RPC sent' }),
366
+ rpcSentMessage: register.gauge({ name: 'gossipsub_rpc_sent_message_total', help: 'RPC sent' }),
367
+ rpcSentControl: register.gauge({ name: 'gossipsub_rpc_sent_control_total', help: 'RPC sent' }),
368
+ rpcSentIHave: register.gauge({ name: 'gossipsub_rpc_sent_ihave_total', help: 'RPC sent' }),
369
+ rpcSentIWant: register.gauge({ name: 'gossipsub_rpc_sent_iwant_total', help: 'RPC sent' }),
370
+ rpcSentGraft: register.gauge({ name: 'gossipsub_rpc_sent_graft_total', help: 'RPC sent' }),
371
+ rpcSentPrune: register.gauge({ name: 'gossipsub_rpc_sent_prune_total', help: 'RPC sent' }),
372
+ rpcSentIDontWant: register.gauge({ name: 'gossipsub_rpc_sent_idontwant_total', help: 'RPC sent' }),
373
+
374
+ // publish message. Track peers sent to and bytes
375
+ /** Total count of msg published by topic */
376
+ msgPublishCount: register.gauge<{ topic: TopicLabel }>({
377
+ name: 'gossipsub_msg_publish_count_total',
378
+ help: 'Total count of msg published by topic',
379
+ labelNames: ['topic']
380
+ }),
381
+ /** Total count of peers that we publish a msg to */
382
+ msgPublishPeersByTopic: register.gauge<{ topic: TopicLabel }>({
383
+ name: 'gossipsub_msg_publish_peers_total',
384
+ help: 'Total count of peers that we publish a msg to',
385
+ labelNames: ['topic']
386
+ }),
387
+ /** Total count of peers (by group) that we publish a msg to */
388
+ directPeersPublishedTotal: register.gauge<{ topic: TopicLabel }>({
389
+ name: 'gossipsub_direct_peers_published_total',
390
+ help: 'Total direct peers that we publish a msg to',
391
+ labelNames: ['topic']
392
+ }),
393
+ floodsubPeersPublishedTotal: register.gauge<{ topic: TopicLabel }>({
394
+ name: 'gossipsub_floodsub_peers_published_total',
395
+ help: 'Total floodsub peers that we publish a msg to',
396
+ labelNames: ['topic']
397
+ }),
398
+ meshPeersPublishedTotal: register.gauge<{ topic: TopicLabel }>({
399
+ name: 'gossipsub_mesh_peers_published_total',
400
+ help: 'Total mesh peers that we publish a msg to',
401
+ labelNames: ['topic']
402
+ }),
403
+ fanoutPeersPublishedTotal: register.gauge<{ topic: TopicLabel }>({
404
+ name: 'gossipsub_fanout_peers_published_total',
405
+ help: 'Total fanout peers that we publish a msg to',
406
+ labelNames: ['topic']
407
+ }),
408
+ /** Total count of msg publish data.length bytes */
409
+ msgPublishBytes: register.gauge<{ topic: TopicLabel }>({
410
+ name: 'gossipsub_msg_publish_bytes_total',
411
+ help: 'Total count of msg publish data.length bytes',
412
+ labelNames: ['topic']
413
+ }),
414
+ /** Total time in seconds to publish a message */
415
+ msgPublishTime: register.histogram<{ topic: TopicLabel }>({
416
+ name: 'gossipsub_msg_publish_seconds',
417
+ help: 'Total time in seconds to publish a message',
418
+ buckets: [0.001, 0.002, 0.005, 0.01, 0.1, 0.5, 1],
419
+ labelNames: ['topic']
420
+ }),
421
+
422
+ /** Total count of msg forwarded by topic */
423
+ msgForwardCount: register.gauge<{ topic: TopicLabel }>({
424
+ name: 'gossipsub_msg_forward_count_total',
425
+ help: 'Total count of msg forwarded by topic',
426
+ labelNames: ['topic']
427
+ }),
428
+ /** Total count of peers that we forward a msg to */
429
+ msgForwardPeers: register.gauge<{ topic: TopicLabel }>({
430
+ name: 'gossipsub_msg_forward_peers_total',
431
+ help: 'Total count of peers that we forward a msg to',
432
+ labelNames: ['topic']
433
+ }),
434
+
435
+ /** Total count of recv msgs before any validation */
436
+ msgReceivedPreValidation: register.gauge<{ topic: TopicLabel }>({
437
+ name: 'gossipsub_msg_received_prevalidation_total',
438
+ help: 'Total count of recv msgs before any validation',
439
+ labelNames: ['topic']
440
+ }),
441
+ /** Total count of recv msgs error */
442
+ msgReceivedError: register.gauge<{ topic: TopicLabel }>({
443
+ name: 'gossipsub_msg_received_error_total',
444
+ help: 'Total count of recv msgs error',
445
+ labelNames: ['topic']
446
+ }),
447
+ /** Tracks distribution of recv msgs by duplicate, invalid, valid */
448
+ prevalidationInvalidTotal: register.gauge<{ topic: TopicLabel }>({
449
+ name: 'gossipsub_pre_validation_invalid_total',
450
+ help: 'Total count of invalid messages received',
451
+ labelNames: ['topic']
452
+ }),
453
+ prevalidationValidTotal: register.gauge<{ topic: TopicLabel }>({
454
+ name: 'gossipsub_pre_validation_valid_total',
455
+ help: 'Total count of valid messages received',
456
+ labelNames: ['topic']
457
+ }),
458
+ prevalidationDuplicateTotal: register.gauge<{ topic: TopicLabel }>({
459
+ name: 'gossipsub_pre_validation_duplicate_total',
460
+ help: 'Total count of duplicate messages received',
461
+ labelNames: ['topic']
462
+ }),
463
+ prevalidationUnknownTotal: register.gauge<{ topic: TopicLabel }>({
464
+ name: 'gossipsub_pre_validation_unknown_status_total',
465
+ help: 'Total count of unknown_status messages received',
466
+ labelNames: ['topic']
467
+ }),
468
+ /** Tracks specific reason of invalid */
469
+ msgReceivedInvalid: register.gauge<{ error: RejectReason | ValidateError }>({
470
+ name: 'gossipsub_msg_received_invalid_total',
471
+ help: 'Tracks specific reason of invalid',
472
+ labelNames: ['error']
473
+ }),
474
+ msgReceivedInvalidByTopic: register.gauge<{ topic: TopicLabel }>({
475
+ name: 'gossipsub_msg_received_invalid_by_topic_total',
476
+ help: 'Tracks specific invalid message by topic',
477
+ labelNames: ['topic']
478
+ }),
479
+ /** Track duplicate message delivery time */
480
+ duplicateMsgDeliveryDelay: register.histogram<{ topic: TopicLabel }>({
481
+ name: 'gossisub_duplicate_msg_delivery_delay_seconds',
482
+ help: 'Time since the 1st duplicated message validated',
483
+ labelNames: ['topic'],
484
+ buckets: [
485
+ 0.25 * opts.maxMeshMessageDeliveriesWindowSec,
486
+ 0.5 * opts.maxMeshMessageDeliveriesWindowSec,
487
+ Number(opts.maxMeshMessageDeliveriesWindowSec),
488
+ 2 * opts.maxMeshMessageDeliveriesWindowSec,
489
+ 4 * opts.maxMeshMessageDeliveriesWindowSec
490
+ ]
491
+ }),
492
+ /** Total count of late msg delivery total by topic */
493
+ duplicateMsgLateDelivery: register.gauge<{ topic: TopicLabel }>({
494
+ name: 'gossisub_duplicate_msg_late_delivery_total',
495
+ help: 'Total count of late duplicate message delivery by topic, which triggers P3 penalty',
496
+ labelNames: ['topic']
497
+ }),
498
+
499
+ duplicateMsgIgnored: register.gauge<{ topic: TopicLabel }>({
500
+ name: 'gossisub_ignored_published_duplicate_msgs_total',
501
+ help: 'Total count of published duplicate message ignored by topic',
502
+ labelNames: ['topic']
503
+ }),
504
+
505
+ /* Metrics related to scoring */
506
+ /** Total times score() is called */
507
+ scoreFnCalls: register.gauge({
508
+ name: 'gossipsub_score_fn_calls_total',
509
+ help: 'Total times score() is called'
510
+ }),
511
+ /** Total times score() call actually computed computeScore(), no cache */
512
+ scoreFnRuns: register.gauge({
513
+ name: 'gossipsub_score_fn_runs_total',
514
+ help: 'Total times score() call actually computed computeScore(), no cache'
515
+ }),
516
+ scoreCachedDelta: register.histogram({
517
+ name: 'gossipsub_score_cache_delta',
518
+ help: 'Delta of score between cached values that expired',
519
+ buckets: [10, 100, 1000]
520
+ }),
521
+ /** Current count of peers by score threshold */
522
+ peersByScoreThreshold: register.gauge<{ threshold: ScoreThreshold }>({
523
+ name: 'gossipsub_peers_by_score_threshold_count',
524
+ help: 'Current count of peers by score threshold',
525
+ labelNames: ['threshold']
526
+ }),
527
+ score: register.avgMinMax({
528
+ name: 'gossipsub_score',
529
+ help: 'Avg min max of gossip scores'
530
+ }),
531
+ /**
532
+ * Separate score weights
533
+ * Need to use 2-label metrics in this case to debug the score weights
534
+ */
535
+ scoreWeights: register.avgMinMax<{ topic?: TopicLabel, p: string }>({
536
+ name: 'gossipsub_score_weights',
537
+ help: 'Separate score weights',
538
+ labelNames: ['topic', 'p']
539
+ }),
540
+ /** Histogram of the scores for each mesh topic. */
541
+ // TODO: Not implemented
542
+ scorePerMesh: register.avgMinMax<{ topic: TopicLabel }>({
543
+ name: 'gossipsub_score_per_mesh',
544
+ help: 'Histogram of the scores for each mesh topic',
545
+ labelNames: ['topic']
546
+ }),
547
+ /** A counter of the kind of penalties being applied to peers. */
548
+ // TODO: Not fully implemented
549
+ scoringPenalties: register.gauge<{ penalty: ScorePenalty }>({
550
+ name: 'gossipsub_scoring_penalties_total',
551
+ help: 'A counter of the kind of penalties being applied to peers',
552
+ labelNames: ['penalty']
553
+ }),
554
+ behaviourPenalty: register.histogram({
555
+ name: 'gossipsub_peer_stat_behaviour_penalty',
556
+ help: 'Current peer stat behaviour_penalty at each scrape',
557
+ buckets: [
558
+ 0.25 * opts.behaviourPenaltyThreshold,
559
+ 0.5 * opts.behaviourPenaltyThreshold,
560
+ Number(opts.behaviourPenaltyThreshold),
561
+ 2 * opts.behaviourPenaltyThreshold,
562
+ 4 * opts.behaviourPenaltyThreshold
563
+ ]
564
+ }),
565
+
566
+ // TODO:
567
+ // - iasked per peer (on heartbeat)
568
+ // - when promise is resolved, track messages from promises
569
+
570
+ /** Total received IHAVE messages that we ignore for some reason */
571
+ ihaveRcvIgnored: register.gauge<{ reason: IHaveIgnoreReason }>({
572
+ name: 'gossipsub_ihave_rcv_ignored_total',
573
+ help: 'Total received IHAVE messages that we ignore for some reason',
574
+ labelNames: ['reason']
575
+ }),
576
+ /** Total received IHAVE messages by topic */
577
+ ihaveRcvMsgids: register.gauge<{ topic: TopicLabel }>({
578
+ name: 'gossipsub_ihave_rcv_msgids_total',
579
+ help: 'Total received IHAVE messages by topic',
580
+ labelNames: ['topic']
581
+ }),
582
+ /**
583
+ * Total messages per topic we don't have. Not actual requests.
584
+ * The number of times we have decided that an IWANT control message is required for this
585
+ * topic. A very high metric might indicate an underperforming network.
586
+ * = rust-libp2p `topic_iwant_msgs`
587
+ */
588
+ ihaveRcvNotSeenMsgids: register.gauge<{ topic: TopicLabel }>({
589
+ name: 'gossipsub_ihave_rcv_not_seen_msgids_total',
590
+ help: 'Total messages per topic we do not have, not actual requests',
591
+ labelNames: ['topic']
592
+ }),
593
+
594
+ /** Total received IWANT messages by topic */
595
+ iwantRcvMsgids: register.gauge<{ topic: TopicLabel }>({
596
+ name: 'gossipsub_iwant_rcv_msgids_total',
597
+ help: 'Total received IWANT messages by topic',
598
+ labelNames: ['topic']
599
+ }),
600
+ /** Total requested messageIDs that we don't have */
601
+ iwantRcvDonthaveMsgids: register.gauge({
602
+ name: 'gossipsub_iwant_rcv_dont_have_msgids_total',
603
+ help: 'Total requested messageIDs that we do not have'
604
+ }),
605
+ /** Total received IDONTWANT messages */
606
+ idontwantRcvMsgids: register.gauge({
607
+ name: 'gossipsub_idontwant_rcv_msgids_total',
608
+ help: 'Total received IDONTWANT messages'
609
+ }),
610
+ /** Total received IDONTWANT messageIDs that we don't have */
611
+ idontwantRcvDonthaveMsgids: register.gauge({
612
+ name: 'gossipsub_idontwant_rcv_dont_have_msgids_total',
613
+ help: 'Total received IDONTWANT messageIDs that we do not have in mcache'
614
+ }),
615
+ iwantPromiseStarted: register.gauge({
616
+ name: 'gossipsub_iwant_promise_sent_total',
617
+ help: 'Total count of started IWANT promises'
618
+ }),
619
+ /** Total count of resolved IWANT promises */
620
+ iwantPromiseResolved: register.gauge({
621
+ name: 'gossipsub_iwant_promise_resolved_total',
622
+ help: 'Total count of resolved IWANT promises'
623
+ }),
624
+ /** Total count of resolved IWANT promises from duplicate messages */
625
+ iwantPromiseResolvedFromDuplicate: register.gauge({
626
+ name: 'gossipsub_iwant_promise_resolved_from_duplicate_total',
627
+ help: 'Total count of resolved IWANT promises from duplicate messages'
628
+ }),
629
+ /** Total count of peers we have asked IWANT promises that are resolved */
630
+ iwantPromiseResolvedPeers: register.gauge({
631
+ name: 'gossipsub_iwant_promise_resolved_peers',
632
+ help: 'Total count of peers we have asked IWANT promises that are resolved'
633
+ }),
634
+ iwantPromiseBroken: register.gauge({
635
+ name: 'gossipsub_iwant_promise_broken',
636
+ help: 'Total count of broken IWANT promises'
637
+ }),
638
+ iwantMessagePruned: register.gauge({
639
+ name: 'gossipsub_iwant_message_pruned',
640
+ help: 'Total count of pruned IWANT messages'
641
+ }),
642
+ /** Histogram of delivery time of resolved IWANT promises */
643
+ iwantPromiseDeliveryTime: register.histogram({
644
+ name: 'gossipsub_iwant_promise_delivery_seconds',
645
+ help: 'Histogram of delivery time of resolved IWANT promises',
646
+ buckets: [
647
+ 0.5 * opts.gossipPromiseExpireSec,
648
+ Number(opts.gossipPromiseExpireSec),
649
+ 2 * opts.gossipPromiseExpireSec,
650
+ 4 * opts.gossipPromiseExpireSec
651
+ ]
652
+ }),
653
+ iwantPromiseUntracked: register.gauge({
654
+ name: 'gossip_iwant_promise_untracked',
655
+ help: 'Total count of untracked IWANT promise'
656
+ }),
657
+ /** Backoff time */
658
+ connectedPeersBackoffSec: register.histogram({
659
+ name: 'gossipsub_connected_peers_backoff_seconds',
660
+ help: 'Backoff time in seconds',
661
+ // Using 1 seconds as minimum as that's close to the heartbeat duration, no need for more resolution.
662
+ // As per spec, backoff times are 10 seconds for UnsubscribeBackoff and 60 seconds for PruneBackoff.
663
+ // Higher values of 60 seconds should not occur, but we add 120 seconds just in case
664
+ // https://github.com/libp2p/specs/blob/master/pubsub/gossipsub/gossipsub-v1.1.md#overview-of-new-parameters
665
+ buckets: [1, 2, 4, 10, 20, 60, 120]
666
+ }),
667
+
668
+ /* Data structure sizes */
669
+ /** Unbounded cache sizes */
670
+ cacheSize: register.gauge<{ cache: string }>({
671
+ name: 'gossipsub_cache_size',
672
+ help: 'Unbounded cache sizes',
673
+ labelNames: ['cache']
674
+ }),
675
+ /** Current mcache msg count */
676
+ mcacheSize: register.gauge({
677
+ name: 'gossipsub_mcache_size',
678
+ help: 'Current mcache msg count'
679
+ }),
680
+ mcacheNotValidatedCount: register.gauge({
681
+ name: 'gossipsub_mcache_not_validated_count',
682
+ help: 'Current mcache msg count not validated'
683
+ }),
684
+
685
+ fastMsgIdCacheCollision: register.gauge({
686
+ name: 'gossipsub_fastmsgid_cache_collision_total',
687
+ help: 'Total count of key collisions on fastmsgid cache put'
688
+ }),
689
+
690
+ newConnectionCount: register.gauge<{ status: string }>({
691
+ name: 'gossipsub_new_connection_total',
692
+ help: 'Total new connection by status',
693
+ labelNames: ['status']
694
+ }),
695
+
696
+ topicStrToLabel,
697
+
698
+ toTopic (topicStr: TopicStr): TopicLabel {
699
+ return this.topicStrToLabel.get(topicStr) ?? topicStr
700
+ },
701
+
702
+ /** We joined a topic */
703
+ onJoin (topicStr: TopicStr): void {
704
+ this.topicSubscriptionStatus.set({ topicStr }, 1)
705
+ this.meshPeerCounts.set({ topicStr }, 0) // Reset count
706
+ },
707
+
708
+ /** We left a topic */
709
+ onLeave (topicStr: TopicStr): void {
710
+ this.topicSubscriptionStatus.set({ topicStr }, 0)
711
+ this.meshPeerCounts.set({ topicStr }, 0) // Reset count
712
+ },
713
+
714
+ /** Register the inclusion of peers in our mesh due to some reason. */
715
+ onAddToMesh (topicStr: TopicStr, reason: InclusionReason, count: number): void {
716
+ const topic = this.toTopic(topicStr)
717
+ switch (reason) {
718
+ case InclusionReason.Fanout:
719
+ this.meshPeerInclusionEventsFanout.inc({ topic }, count)
720
+ break
721
+ case InclusionReason.Random:
722
+ this.meshPeerInclusionEventsRandom.inc({ topic }, count)
723
+ break
724
+ case InclusionReason.Subscribed:
725
+ this.meshPeerInclusionEventsSubscribed.inc({ topic }, count)
726
+ break
727
+ case InclusionReason.Outbound:
728
+ this.meshPeerInclusionEventsOutbound.inc({ topic }, count)
729
+ break
730
+ case InclusionReason.NotEnough:
731
+ this.meshPeerInclusionEventsNotEnough.inc({ topic }, count)
732
+ break
733
+ case InclusionReason.Opportunistic:
734
+ this.meshPeerInclusionEventsOpportunistic.inc({ topic }, count)
735
+ break
736
+ default:
737
+ this.meshPeerInclusionEventsUnknown.inc({ topic }, count)
738
+ break
739
+ }
740
+ },
741
+
742
+ /** Register the removal of peers in our mesh due to some reason */
743
+ // - remove_peer_from_mesh()
744
+ // - heartbeat() Churn::BadScore
745
+ // - heartbeat() Churn::Excess
746
+ // - on_disconnect() Churn::Ds
747
+ onRemoveFromMesh (topicStr: TopicStr, reason: ChurnReason, count: number): void {
748
+ const topic = this.toTopic(topicStr)
749
+ switch (reason) {
750
+ case ChurnReason.Dc:
751
+ this.meshPeerChurnEventsDisconnected.inc({ topic }, count)
752
+ break
753
+ case ChurnReason.BadScore:
754
+ this.meshPeerChurnEventsBadScore.inc({ topic }, count)
755
+ break
756
+ case ChurnReason.Prune:
757
+ this.meshPeerChurnEventsPrune.inc({ topic }, count)
758
+ break
759
+ case ChurnReason.Excess:
760
+ this.meshPeerChurnEventsExcess.inc({ topic }, count)
761
+ break
762
+ default:
763
+ this.meshPeerChurnEventsUnknown.inc({ topic }, count)
764
+ break
765
+ }
766
+ },
767
+
768
+ /**
769
+ * Update validation result to metrics
770
+ *
771
+ * @param messageRecord - null means the message's mcache record was not known at the time of acceptance report
772
+ */
773
+ onReportValidation (
774
+ messageRecord: { message: { topic: TopicStr } } | null,
775
+ acceptance: TopicValidatorResult,
776
+ firstSeenTimestampMs: number | null
777
+ ): void {
778
+ this.asyncValidationMcacheHit.inc({ hit: messageRecord != null ? 'hit' : 'miss' })
779
+
780
+ if (messageRecord != null) {
781
+ const topic = this.toTopic(messageRecord.message.topic)
782
+ switch (acceptance) {
783
+ case TopicValidatorResult.Accept:
784
+ this.acceptedMessagesTotal.inc({ topic })
785
+ break
786
+ case TopicValidatorResult.Ignore:
787
+ this.ignoredMessagesTotal.inc({ topic })
788
+ break
789
+ case TopicValidatorResult.Reject:
790
+ this.rejectedMessagesTotal.inc({ topic })
791
+ break
792
+ default:
793
+ this.unknownValidationResultsTotal.inc({ topic })
794
+ break
795
+ }
796
+ }
797
+
798
+ if (firstSeenTimestampMs != null) {
799
+ this.asyncValidationDelayFromFirstSeenSec.observe((Date.now() - firstSeenTimestampMs) / 1000)
800
+ } else {
801
+ this.asyncValidationUnknownFirstSeen.inc()
802
+ }
803
+ },
804
+
805
+ /**
806
+ * - in handle_graft() Penalty::GraftBackoff
807
+ * - in apply_iwant_penalties() Penalty::BrokenPromise
808
+ * - in metric_score() P3 Penalty::MessageDeficit
809
+ * - in metric_score() P6 Penalty::IPColocation
810
+ */
811
+ onScorePenalty (penalty: ScorePenalty): void {
812
+ // Can this be labeled by topic too?
813
+ this.scoringPenalties.inc({ penalty }, 1)
814
+ },
815
+
816
+ onIhaveRcv (topicStr: TopicStr, ihave: number, idonthave: number): void {
817
+ const topic = this.toTopic(topicStr)
818
+ this.ihaveRcvMsgids.inc({ topic }, ihave)
819
+ this.ihaveRcvNotSeenMsgids.inc({ topic }, idonthave)
820
+ },
821
+
822
+ onIwantRcv (iwantByTopic: Map<TopicStr, number>, iwantDonthave: number): void {
823
+ for (const [topicStr, iwant] of iwantByTopic) {
824
+ const topic = this.toTopic(topicStr)
825
+ this.iwantRcvMsgids.inc({ topic }, iwant)
826
+ }
827
+
828
+ this.iwantRcvDonthaveMsgids.inc(iwantDonthave)
829
+ },
830
+
831
+ onIdontwantRcv (idontwant: number, idontwantDonthave: number): void {
832
+ this.idontwantRcvMsgids.inc(idontwant)
833
+ this.idontwantRcvDonthaveMsgids.inc(idontwantDonthave)
834
+ },
835
+
836
+ onForwardMsg (topicStr: TopicStr, tosendCount: number): void {
837
+ const topic = this.toTopic(topicStr)
838
+ this.msgForwardCount.inc({ topic }, 1)
839
+ this.msgForwardPeers.inc({ topic }, tosendCount)
840
+ },
841
+
842
+ onPublishMsg (
843
+ topicStr: TopicStr,
844
+ tosendGroupCount: ToSendGroupCount,
845
+ tosendCount: number,
846
+ dataLen: number,
847
+ ms: number
848
+ ): void {
849
+ const topic = this.toTopic(topicStr)
850
+ this.msgPublishCount.inc({ topic }, 1)
851
+ this.msgPublishBytes.inc({ topic }, tosendCount * dataLen)
852
+ this.msgPublishPeersByTopic.inc({ topic }, tosendCount)
853
+ this.directPeersPublishedTotal.inc({ topic }, tosendGroupCount.direct)
854
+ this.floodsubPeersPublishedTotal.inc({ topic }, tosendGroupCount.floodsub)
855
+ this.meshPeersPublishedTotal.inc({ topic }, tosendGroupCount.mesh)
856
+ this.fanoutPeersPublishedTotal.inc({ topic }, tosendGroupCount.fanout)
857
+ this.msgPublishTime.observe({ topic }, ms / 1000)
858
+ },
859
+
860
+ onMsgRecvPreValidation (topicStr: TopicStr): void {
861
+ const topic = this.toTopic(topicStr)
862
+ this.msgReceivedPreValidation.inc({ topic }, 1)
863
+ },
864
+
865
+ onMsgRecvError (topicStr: TopicStr): void {
866
+ const topic = this.toTopic(topicStr)
867
+ this.msgReceivedError.inc({ topic }, 1)
868
+ },
869
+
870
+ onPrevalidationResult (topicStr: TopicStr, status: MessageStatus): void {
871
+ const topic = this.toTopic(topicStr)
872
+ switch (status) {
873
+ case MessageStatus.duplicate:
874
+ this.prevalidationDuplicateTotal.inc({ topic })
875
+ break
876
+ case MessageStatus.invalid:
877
+ this.prevalidationInvalidTotal.inc({ topic })
878
+ break
879
+ case MessageStatus.valid:
880
+ this.prevalidationValidTotal.inc({ topic })
881
+ break
882
+ default:
883
+ this.prevalidationUnknownTotal.inc({ topic })
884
+ break
885
+ }
886
+ },
887
+
888
+ onMsgRecvInvalid (topicStr: TopicStr, reason: RejectReasonObj): void {
889
+ const topic = this.toTopic(topicStr)
890
+
891
+ const error = reason.reason === RejectReason.Error ? reason.error : reason.reason
892
+ this.msgReceivedInvalid.inc({ error }, 1)
893
+ this.msgReceivedInvalidByTopic.inc({ topic }, 1)
894
+ },
895
+
896
+ onDuplicateMsgDelivery (topicStr: TopicStr, deliveryDelayMs: number, isLateDelivery: boolean): void {
897
+ const topic = this.toTopic(topicStr)
898
+ this.duplicateMsgDeliveryDelay.observe({ topic }, deliveryDelayMs / 1000)
899
+ if (isLateDelivery) {
900
+ this.duplicateMsgLateDelivery.inc({ topic }, 1)
901
+ }
902
+ },
903
+
904
+ onPublishDuplicateMsg (topicStr: TopicStr): void {
905
+ const topic = this.toTopic(topicStr)
906
+ this.duplicateMsgIgnored.inc({ topic }, 1)
907
+ },
908
+
909
+ onPeerReadStreamError (): void {
910
+ this.peerReadStreamError.inc(1)
911
+ },
912
+
913
+ onRpcRecvError (): void {
914
+ this.rpcRecvError.inc(1)
915
+ },
916
+
917
+ onRpcDataError (): void {
918
+ this.rpcDataError.inc(1)
919
+ },
920
+
921
+ onRpcRecv (rpc: RPC, rpcBytes: number): void {
922
+ this.rpcRecvBytes.inc(rpcBytes)
923
+ this.rpcRecvCount.inc(1)
924
+ if (rpc.subscriptions != null) { this.rpcRecvSubscription.inc(rpc.subscriptions.length) }
925
+ if (rpc.messages != null) { this.rpcRecvMessage.inc(rpc.messages.length) }
926
+ if (rpc.control != null) {
927
+ this.rpcRecvControl.inc(1)
928
+ if (rpc.control.ihave != null) { this.rpcRecvIHave.inc(rpc.control.ihave.length) }
929
+ if (rpc.control.iwant != null) { this.rpcRecvIWant.inc(rpc.control.iwant.length) }
930
+ if (rpc.control.graft != null) { this.rpcRecvGraft.inc(rpc.control.graft.length) }
931
+ if (rpc.control.prune != null) { this.rpcRecvPrune.inc(rpc.control.prune.length) }
932
+ }
933
+ },
934
+
935
+ onRpcSent (rpc: RPC, rpcBytes: number): void {
936
+ this.rpcSentBytes.inc(rpcBytes)
937
+ this.rpcSentCount.inc(1)
938
+ if (rpc.subscriptions != null) { this.rpcSentSubscription.inc(rpc.subscriptions.length) }
939
+ if (rpc.messages != null) { this.rpcSentMessage.inc(rpc.messages.length) }
940
+ if (rpc.control != null) {
941
+ const ihave = rpc.control.ihave?.length ?? 0
942
+ const iwant = rpc.control.iwant?.length ?? 0
943
+ const graft = rpc.control.graft?.length ?? 0
944
+ const prune = rpc.control.prune?.length ?? 0
945
+ const idontwant = rpc.control.idontwant?.length ?? 0
946
+ if (ihave > 0) { this.rpcSentIHave.inc(ihave) }
947
+ if (iwant > 0) { this.rpcSentIWant.inc(iwant) }
948
+ if (graft > 0) { this.rpcSentGraft.inc(graft) }
949
+ if (prune > 0) { this.rpcSentPrune.inc(prune) }
950
+ if (idontwant > 0) { this.rpcSentIDontWant.inc(idontwant) }
951
+ if (ihave > 0 || iwant > 0 || graft > 0 || prune > 0 || idontwant > 0) { this.rpcSentControl.inc(1) }
952
+ }
953
+ },
954
+
955
+ registerScores (scores: number[], scoreThresholds: PeerScoreThresholds): void {
956
+ let graylist = 0
957
+ let publish = 0
958
+ let gossip = 0
959
+ let mesh = 0
960
+
961
+ for (const score of scores) {
962
+ if (score >= scoreThresholds.graylistThreshold) { graylist++ }
963
+ if (score >= scoreThresholds.publishThreshold) { publish++ }
964
+ if (score >= scoreThresholds.gossipThreshold) { gossip++ }
965
+ if (score >= 0) { mesh++ }
966
+ }
967
+
968
+ this.peersByScoreThreshold.set({ threshold: ScoreThreshold.graylist }, graylist)
969
+ this.peersByScoreThreshold.set({ threshold: ScoreThreshold.publish }, publish)
970
+ this.peersByScoreThreshold.set({ threshold: ScoreThreshold.gossip }, gossip)
971
+ this.peersByScoreThreshold.set({ threshold: ScoreThreshold.mesh }, mesh)
972
+
973
+ // Register full score too
974
+ this.score.set(scores)
975
+ },
976
+
977
+ registerScoreWeights (sw: ScoreWeights<number[]>): void {
978
+ for (const [topic, wsTopic] of sw.byTopic) {
979
+ this.scoreWeights.set({ topic, p: 'p1' }, wsTopic.p1w)
980
+ this.scoreWeights.set({ topic, p: 'p2' }, wsTopic.p2w)
981
+ this.scoreWeights.set({ topic, p: 'p3' }, wsTopic.p3w)
982
+ this.scoreWeights.set({ topic, p: 'p3b' }, wsTopic.p3bw)
983
+ this.scoreWeights.set({ topic, p: 'p4' }, wsTopic.p4w)
984
+ }
985
+
986
+ this.scoreWeights.set({ p: 'p5' }, sw.p5w)
987
+ this.scoreWeights.set({ p: 'p6' }, sw.p6w)
988
+ this.scoreWeights.set({ p: 'p7' }, sw.p7w)
989
+ },
990
+
991
+ registerScorePerMesh (mesh: Map<TopicStr, Set<PeerIdStr>>, scoreByPeer: Map<PeerIdStr, number>): void {
992
+ const peersPerTopicLabel = new Map<TopicLabel, Set<PeerIdStr>>()
993
+
994
+ mesh.forEach((peers, topicStr) => {
995
+ // Aggregate by known topicLabel or throw to 'unknown'. This prevent too high cardinality
996
+ const topicLabel = this.topicStrToLabel.get(topicStr) ?? 'unknown'
997
+ let peersInMesh = peersPerTopicLabel.get(topicLabel)
998
+ if (peersInMesh == null) {
999
+ peersInMesh = new Set()
1000
+ peersPerTopicLabel.set(topicLabel, peersInMesh)
1001
+ }
1002
+ peers.forEach((p) => peersInMesh?.add(p))
1003
+ })
1004
+
1005
+ for (const [topic, peers] of peersPerTopicLabel) {
1006
+ const meshScores: number[] = []
1007
+ peers.forEach((peer) => {
1008
+ meshScores.push(scoreByPeer.get(peer) ?? 0)
1009
+ })
1010
+ this.scorePerMesh.set({ topic }, meshScores)
1011
+ }
1012
+ }
1013
+ }
1014
+ }