librats 0.3.1 → 0.5.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +405 -405
- package/binding.gyp +96 -95
- package/lib/index.d.ts +522 -522
- package/lib/index.js +82 -82
- package/native-src/3rdparty/android/ifaddrs-android.c +600 -0
- package/native-src/3rdparty/android/ifaddrs-android.h +54 -0
- package/native-src/CMakeLists.txt +360 -0
- package/native-src/LICENSE +21 -0
- package/native-src/src/bencode.cpp +485 -0
- package/native-src/src/bencode.h +145 -0
- package/native-src/src/bittorrent.cpp +3682 -0
- package/native-src/src/bittorrent.h +731 -0
- package/native-src/src/dht.cpp +2342 -0
- package/native-src/src/dht.h +501 -0
- package/native-src/src/encrypted_socket.cpp +817 -0
- package/native-src/src/encrypted_socket.h +239 -0
- package/native-src/src/file_transfer.cpp +1808 -0
- package/native-src/src/file_transfer.h +567 -0
- package/native-src/src/fs.cpp +639 -0
- package/native-src/src/fs.h +108 -0
- package/native-src/src/gossipsub.cpp +1137 -0
- package/native-src/src/gossipsub.h +403 -0
- package/native-src/src/ice.cpp +1386 -0
- package/native-src/src/ice.h +328 -0
- package/native-src/src/json.hpp +25526 -0
- package/native-src/src/krpc.cpp +558 -0
- package/native-src/src/krpc.h +145 -0
- package/native-src/src/librats.cpp +2715 -0
- package/native-src/src/librats.h +1729 -0
- package/native-src/src/librats_bittorrent.cpp +167 -0
- package/native-src/src/librats_c.cpp +1317 -0
- package/native-src/src/librats_c.h +237 -0
- package/native-src/src/librats_encryption.cpp +123 -0
- package/native-src/src/librats_file_transfer.cpp +226 -0
- package/native-src/src/librats_gossipsub.cpp +293 -0
- package/native-src/src/librats_ice.cpp +515 -0
- package/native-src/src/librats_logging.cpp +158 -0
- package/native-src/src/librats_mdns.cpp +171 -0
- package/native-src/src/librats_nat.cpp +571 -0
- package/native-src/src/librats_persistence.cpp +815 -0
- package/native-src/src/logger.h +412 -0
- package/native-src/src/mdns.cpp +1178 -0
- package/native-src/src/mdns.h +253 -0
- package/native-src/src/network_utils.cpp +598 -0
- package/native-src/src/network_utils.h +162 -0
- package/native-src/src/noise.cpp +981 -0
- package/native-src/src/noise.h +227 -0
- package/native-src/src/os.cpp +371 -0
- package/native-src/src/os.h +40 -0
- package/native-src/src/rats_export.h +17 -0
- package/native-src/src/sha1.cpp +163 -0
- package/native-src/src/sha1.h +42 -0
- package/native-src/src/socket.cpp +1376 -0
- package/native-src/src/socket.h +309 -0
- package/native-src/src/stun.cpp +484 -0
- package/native-src/src/stun.h +349 -0
- package/native-src/src/threadmanager.cpp +105 -0
- package/native-src/src/threadmanager.h +53 -0
- package/native-src/src/tracker.cpp +1110 -0
- package/native-src/src/tracker.h +268 -0
- package/native-src/src/version.cpp +24 -0
- package/native-src/src/version.h.in +45 -0
- package/native-src/version.rc.in +31 -0
- package/package.json +62 -68
- package/scripts/build-librats.js +241 -194
- package/scripts/postinstall.js +52 -52
- package/scripts/prepare-package.js +187 -91
- package/scripts/verify-installation.js +119 -119
- package/src/librats_node.cpp +1174 -1174
|
@@ -0,0 +1,1137 @@
|
|
|
1
|
+
#include "gossipsub.h"
|
|
2
|
+
#include "librats.h"
|
|
3
|
+
#include "logger.h"
|
|
4
|
+
#include "sha1.h"
|
|
5
|
+
#include <algorithm>
|
|
6
|
+
#include <sstream>
|
|
7
|
+
#include <iomanip>
|
|
8
|
+
|
|
9
|
+
// GossipSub logging macros
|
|
10
|
+
#define LOG_GOSSIPSUB_DEBUG(message) LOG_DEBUG("gossipsub", message)
|
|
11
|
+
#define LOG_GOSSIPSUB_INFO(message) LOG_INFO("gossipsub", message)
|
|
12
|
+
#define LOG_GOSSIPSUB_WARN(message) LOG_WARN("gossipsub", message)
|
|
13
|
+
#define LOG_GOSSIPSUB_ERROR(message) LOG_ERROR("gossipsub", message)
|
|
14
|
+
|
|
15
|
+
namespace librats {
|
|
16
|
+
|
|
17
|
+
// Helper function to convert GossipSubMessageType to string
|
|
18
|
+
std::string gossipsub_message_type_to_string(GossipSubMessageType type) {
|
|
19
|
+
switch (type) {
|
|
20
|
+
case GossipSubMessageType::SUBSCRIBE: return "subscribe";
|
|
21
|
+
case GossipSubMessageType::UNSUBSCRIBE: return "unsubscribe";
|
|
22
|
+
case GossipSubMessageType::PUBLISH: return "publish";
|
|
23
|
+
case GossipSubMessageType::GOSSIP: return "gossip";
|
|
24
|
+
case GossipSubMessageType::GRAFT: return "graft";
|
|
25
|
+
case GossipSubMessageType::PRUNE: return "prune";
|
|
26
|
+
case GossipSubMessageType::IHAVE: return "ihave";
|
|
27
|
+
case GossipSubMessageType::IWANT: return "iwant";
|
|
28
|
+
case GossipSubMessageType::HEARTBEAT: return "heartbeat";
|
|
29
|
+
default: return "unknown";
|
|
30
|
+
}
|
|
31
|
+
}
|
|
32
|
+
|
|
33
|
+
GossipSubMessageType string_to_gossipsub_message_type(const std::string& str) {
|
|
34
|
+
if (str == "subscribe") return GossipSubMessageType::SUBSCRIBE;
|
|
35
|
+
if (str == "unsubscribe") return GossipSubMessageType::UNSUBSCRIBE;
|
|
36
|
+
if (str == "publish") return GossipSubMessageType::PUBLISH;
|
|
37
|
+
if (str == "gossip") return GossipSubMessageType::GOSSIP;
|
|
38
|
+
if (str == "graft") return GossipSubMessageType::GRAFT;
|
|
39
|
+
if (str == "prune") return GossipSubMessageType::PRUNE;
|
|
40
|
+
if (str == "ihave") return GossipSubMessageType::IHAVE;
|
|
41
|
+
if (str == "iwant") return GossipSubMessageType::IWANT;
|
|
42
|
+
if (str == "heartbeat") return GossipSubMessageType::HEARTBEAT;
|
|
43
|
+
return GossipSubMessageType::HEARTBEAT; // Default fallback
|
|
44
|
+
}
|
|
45
|
+
|
|
46
|
+
//=============================================================================
|
|
47
|
+
// PeerScore Implementation
|
|
48
|
+
//=============================================================================
|
|
49
|
+
|
|
50
|
+
void PeerScore::update_score() {
|
|
51
|
+
auto now = std::chrono::steady_clock::now();
|
|
52
|
+
auto time_since_update = std::chrono::duration_cast<std::chrono::seconds>(now - last_updated).count();
|
|
53
|
+
auto connection_duration = std::chrono::duration_cast<std::chrono::seconds>(now - connected_since).count();
|
|
54
|
+
|
|
55
|
+
// Update topic score based on participation
|
|
56
|
+
topic_score = (std::min)(10.0, connection_duration / 60.0); // Up to 10 points for long connections
|
|
57
|
+
|
|
58
|
+
// Update delivery score
|
|
59
|
+
if (messages_delivered > 0) {
|
|
60
|
+
delivery_score = (std::min)(20.0, static_cast<double>(messages_delivered) / 10.0);
|
|
61
|
+
}
|
|
62
|
+
|
|
63
|
+
// Update mesh behavior score
|
|
64
|
+
double graft_prune_ratio = (graft_requests + prune_requests > 0) ?
|
|
65
|
+
static_cast<double>(graft_requests) / (graft_requests + prune_requests) : 0.5;
|
|
66
|
+
mesh_behavior_score = (graft_prune_ratio - 0.5) * 10.0; // -5 to +5 points
|
|
67
|
+
|
|
68
|
+
// Update invalid message penalty
|
|
69
|
+
if (messages_invalid > 0) {
|
|
70
|
+
invalid_message_score = -static_cast<double>(messages_invalid) * 5.0; // -5 points per invalid message
|
|
71
|
+
}
|
|
72
|
+
|
|
73
|
+
// Calculate total score
|
|
74
|
+
score = topic_score + delivery_score + mesh_behavior_score + invalid_message_score;
|
|
75
|
+
|
|
76
|
+
last_updated = now;
|
|
77
|
+
}
|
|
78
|
+
|
|
79
|
+
//=============================================================================
|
|
80
|
+
// GossipSub Implementation
|
|
81
|
+
//=============================================================================
|
|
82
|
+
|
|
83
|
+
GossipSub::GossipSub(RatsClient& rats_client, const GossipSubConfig& config)
|
|
84
|
+
: rats_client_(rats_client), config_(config), running_(false), rng_(std::random_device{}()) {
|
|
85
|
+
|
|
86
|
+
// Register message handler for gossipsub messages
|
|
87
|
+
rats_client_.on("gossipsub", [this](const std::string& peer_id, const nlohmann::json& message) {
|
|
88
|
+
handle_gossipsub_message(peer_id, message);
|
|
89
|
+
});
|
|
90
|
+
}
|
|
91
|
+
|
|
92
|
+
GossipSub::~GossipSub() {
|
|
93
|
+
stop();
|
|
94
|
+
}
|
|
95
|
+
|
|
96
|
+
bool GossipSub::start() {
|
|
97
|
+
if (running_.load()) {
|
|
98
|
+
return false; // Already running
|
|
99
|
+
}
|
|
100
|
+
|
|
101
|
+
running_.store(true);
|
|
102
|
+
|
|
103
|
+
// Start heartbeat thread
|
|
104
|
+
heartbeat_thread_ = std::thread(&GossipSub::heartbeat_loop, this);
|
|
105
|
+
|
|
106
|
+
LOG_GOSSIPSUB_INFO("GossipSub service started");
|
|
107
|
+
return true;
|
|
108
|
+
}
|
|
109
|
+
|
|
110
|
+
void GossipSub::stop() {
|
|
111
|
+
if (!running_.load()) {
|
|
112
|
+
return; // Already stopped
|
|
113
|
+
}
|
|
114
|
+
|
|
115
|
+
LOG_GOSSIPSUB_INFO("GossipSub service stopping");
|
|
116
|
+
|
|
117
|
+
{
|
|
118
|
+
// Unsubscribe from all topics
|
|
119
|
+
std::lock_guard<std::mutex> topics_lock(topics_mutex_);
|
|
120
|
+
for (const auto& topic : subscribed_topics_) {
|
|
121
|
+
nlohmann::json payload;
|
|
122
|
+
payload["topic"] = topic;
|
|
123
|
+
broadcast_gossipsub_message(GossipSubMessageType::UNSUBSCRIBE, payload);
|
|
124
|
+
}
|
|
125
|
+
subscribed_topics_.clear();
|
|
126
|
+
}
|
|
127
|
+
|
|
128
|
+
running_.store(false);
|
|
129
|
+
|
|
130
|
+
// Notify the heartbeat thread to wake up immediately
|
|
131
|
+
{
|
|
132
|
+
std::lock_guard<std::mutex> lock(heartbeat_mutex_);
|
|
133
|
+
heartbeat_cv_.notify_all();
|
|
134
|
+
}
|
|
135
|
+
|
|
136
|
+
// Join heartbeat thread with timeout to avoid infinite hang
|
|
137
|
+
if (heartbeat_thread_.joinable()) {
|
|
138
|
+
heartbeat_thread_.join();
|
|
139
|
+
}
|
|
140
|
+
|
|
141
|
+
LOG_GOSSIPSUB_INFO("GossipSub service stopped");
|
|
142
|
+
}
|
|
143
|
+
|
|
144
|
+
bool GossipSub::is_running() const {
|
|
145
|
+
return running_.load();
|
|
146
|
+
}
|
|
147
|
+
|
|
148
|
+
//=============================================================================
|
|
149
|
+
// Topic Management
|
|
150
|
+
//=============================================================================
|
|
151
|
+
|
|
152
|
+
bool GossipSub::subscribe(const std::string& topic) {
|
|
153
|
+
std::lock_guard<std::mutex> lock(topics_mutex_);
|
|
154
|
+
|
|
155
|
+
if (subscribed_topics_.count(topic)) {
|
|
156
|
+
return false; // Already subscribed
|
|
157
|
+
}
|
|
158
|
+
|
|
159
|
+
subscribed_topics_.insert(topic);
|
|
160
|
+
|
|
161
|
+
// Get or create topic subscription
|
|
162
|
+
TopicSubscription* topic_sub = get_or_create_topic(topic);
|
|
163
|
+
if (!topic_sub) {
|
|
164
|
+
subscribed_topics_.erase(topic);
|
|
165
|
+
return false;
|
|
166
|
+
}
|
|
167
|
+
|
|
168
|
+
// Broadcast subscription to all peers
|
|
169
|
+
nlohmann::json payload;
|
|
170
|
+
payload["topic"] = topic;
|
|
171
|
+
broadcast_gossipsub_message(GossipSubMessageType::SUBSCRIBE, payload);
|
|
172
|
+
|
|
173
|
+
// Start building mesh for this topic
|
|
174
|
+
maintain_mesh(topic);
|
|
175
|
+
|
|
176
|
+
LOG_GOSSIPSUB_INFO("Subscribed to topic: " << topic);
|
|
177
|
+
return true;
|
|
178
|
+
}
|
|
179
|
+
|
|
180
|
+
bool GossipSub::unsubscribe(const std::string& topic) {
|
|
181
|
+
std::lock_guard<std::mutex> lock(topics_mutex_);
|
|
182
|
+
|
|
183
|
+
if (!subscribed_topics_.count(topic)) {
|
|
184
|
+
return false; // Not subscribed
|
|
185
|
+
}
|
|
186
|
+
|
|
187
|
+
subscribed_topics_.erase(topic);
|
|
188
|
+
|
|
189
|
+
// Broadcast unsubscription to all peers
|
|
190
|
+
nlohmann::json payload;
|
|
191
|
+
payload["topic"] = topic;
|
|
192
|
+
broadcast_gossipsub_message(GossipSubMessageType::UNSUBSCRIBE, payload);
|
|
193
|
+
|
|
194
|
+
// Leave mesh for this topic
|
|
195
|
+
auto topic_it = topics_.find(topic);
|
|
196
|
+
if (topic_it != topics_.end()) {
|
|
197
|
+
TopicSubscription* topic_sub = topic_it->second.get();
|
|
198
|
+
|
|
199
|
+
// Send PRUNE to all mesh peers
|
|
200
|
+
for (const auto& peer_id : topic_sub->mesh_peers) {
|
|
201
|
+
nlohmann::json prune_payload;
|
|
202
|
+
prune_payload["topic"] = topic;
|
|
203
|
+
send_gossipsub_message(peer_id, GossipSubMessageType::PRUNE, prune_payload);
|
|
204
|
+
}
|
|
205
|
+
|
|
206
|
+
topic_sub->mesh_peers.clear();
|
|
207
|
+
}
|
|
208
|
+
|
|
209
|
+
LOG_GOSSIPSUB_INFO("Unsubscribed from topic: " << topic);
|
|
210
|
+
return true;
|
|
211
|
+
}
|
|
212
|
+
|
|
213
|
+
bool GossipSub::is_subscribed(const std::string& topic) const {
|
|
214
|
+
std::lock_guard<std::mutex> lock(topics_mutex_);
|
|
215
|
+
return subscribed_topics_.count(topic) > 0;
|
|
216
|
+
}
|
|
217
|
+
|
|
218
|
+
std::vector<std::string> GossipSub::get_subscribed_topics() const {
|
|
219
|
+
std::lock_guard<std::mutex> lock(topics_mutex_);
|
|
220
|
+
return std::vector<std::string>(subscribed_topics_.begin(), subscribed_topics_.end());
|
|
221
|
+
}
|
|
222
|
+
|
|
223
|
+
//=============================================================================
|
|
224
|
+
// Message Publishing
|
|
225
|
+
//=============================================================================
|
|
226
|
+
|
|
227
|
+
bool GossipSub::publish(const std::string& topic, const std::string& message) {
|
|
228
|
+
if (!running_.load()) {
|
|
229
|
+
return false;
|
|
230
|
+
}
|
|
231
|
+
|
|
232
|
+
std::string our_peer_id = rats_client_.get_our_peer_id();
|
|
233
|
+
std::string message_id = generate_message_id(topic, message, our_peer_id);
|
|
234
|
+
|
|
235
|
+
// Check if we've already seen this message
|
|
236
|
+
if (is_message_seen(message_id)) {
|
|
237
|
+
return false;
|
|
238
|
+
}
|
|
239
|
+
|
|
240
|
+
// Cache the message
|
|
241
|
+
cache_message(message_id, topic, message, our_peer_id);
|
|
242
|
+
|
|
243
|
+
// Create publish message
|
|
244
|
+
nlohmann::json payload;
|
|
245
|
+
payload["topic"] = topic;
|
|
246
|
+
payload["message"] = message;
|
|
247
|
+
payload["message_id"] = message_id;
|
|
248
|
+
payload["sender_peer_id"] = our_peer_id;
|
|
249
|
+
payload["timestamp"] = std::chrono::duration_cast<std::chrono::milliseconds>(
|
|
250
|
+
std::chrono::steady_clock::now().time_since_epoch()).count();
|
|
251
|
+
|
|
252
|
+
std::lock_guard<std::mutex> topics_lock(topics_mutex_);
|
|
253
|
+
|
|
254
|
+
// If we're subscribed to this topic, send to mesh peers
|
|
255
|
+
if (subscribed_topics_.count(topic)) {
|
|
256
|
+
auto topic_it = topics_.find(topic);
|
|
257
|
+
if (topic_it != topics_.end()) {
|
|
258
|
+
TopicSubscription* topic_sub = topic_it->second.get();
|
|
259
|
+
|
|
260
|
+
for (const auto& peer_id : topic_sub->mesh_peers) {
|
|
261
|
+
if (is_peer_score_acceptable(peer_id, config_.score_threshold_publish)) {
|
|
262
|
+
send_gossipsub_message(peer_id, GossipSubMessageType::PUBLISH, payload);
|
|
263
|
+
}
|
|
264
|
+
}
|
|
265
|
+
}
|
|
266
|
+
} else {
|
|
267
|
+
// If not subscribed, use fanout
|
|
268
|
+
TopicSubscription* topic_sub = get_or_create_topic(topic);
|
|
269
|
+
if (topic_sub) {
|
|
270
|
+
// Select fanout peers if we don't have enough
|
|
271
|
+
if (topic_sub->fanout_peers.size() < static_cast<size_t>(config_.fanout_size)) {
|
|
272
|
+
std::vector<std::string> candidates = select_peers_for_gossip(topic,
|
|
273
|
+
config_.fanout_size - static_cast<int>(topic_sub->fanout_peers.size()), topic_sub->fanout_peers);
|
|
274
|
+
for (const auto& peer_id : candidates) {
|
|
275
|
+
topic_sub->fanout_peers.insert(peer_id);
|
|
276
|
+
}
|
|
277
|
+
}
|
|
278
|
+
|
|
279
|
+
// Send to fanout peers
|
|
280
|
+
for (const auto& peer_id : topic_sub->fanout_peers) {
|
|
281
|
+
if (is_peer_score_acceptable(peer_id, config_.score_threshold_publish)) {
|
|
282
|
+
send_gossipsub_message(peer_id, GossipSubMessageType::PUBLISH, payload);
|
|
283
|
+
}
|
|
284
|
+
}
|
|
285
|
+
}
|
|
286
|
+
}
|
|
287
|
+
|
|
288
|
+
LOG_GOSSIPSUB_DEBUG("Published message to topic: " << topic << " (ID: " << message_id << ")");
|
|
289
|
+
return true;
|
|
290
|
+
}
|
|
291
|
+
|
|
292
|
+
bool GossipSub::publish(const std::string& topic, const nlohmann::json& message) {
|
|
293
|
+
return publish(topic, message.dump());
|
|
294
|
+
}
|
|
295
|
+
|
|
296
|
+
//=============================================================================
|
|
297
|
+
// Message Handling
|
|
298
|
+
//=============================================================================
|
|
299
|
+
|
|
300
|
+
void GossipSub::handle_gossipsub_message(const std::string& peer_id, const nlohmann::json& message) {
|
|
301
|
+
try {
|
|
302
|
+
std::string type_str = message.value("type", "");
|
|
303
|
+
GossipSubMessageType type = string_to_gossipsub_message_type(type_str);
|
|
304
|
+
nlohmann::json payload = message.value("payload", nlohmann::json::object());
|
|
305
|
+
|
|
306
|
+
LOG_GOSSIPSUB_DEBUG("Received gossipsub " << type_str << " message from " << peer_id);
|
|
307
|
+
|
|
308
|
+
switch (type) {
|
|
309
|
+
case GossipSubMessageType::SUBSCRIBE:
|
|
310
|
+
handle_subscribe(peer_id, payload);
|
|
311
|
+
break;
|
|
312
|
+
case GossipSubMessageType::UNSUBSCRIBE:
|
|
313
|
+
handle_unsubscribe(peer_id, payload);
|
|
314
|
+
break;
|
|
315
|
+
case GossipSubMessageType::PUBLISH:
|
|
316
|
+
handle_publish(peer_id, payload);
|
|
317
|
+
break;
|
|
318
|
+
case GossipSubMessageType::GOSSIP:
|
|
319
|
+
handle_gossip(peer_id, payload);
|
|
320
|
+
break;
|
|
321
|
+
case GossipSubMessageType::GRAFT:
|
|
322
|
+
handle_graft(peer_id, payload);
|
|
323
|
+
break;
|
|
324
|
+
case GossipSubMessageType::PRUNE:
|
|
325
|
+
handle_prune(peer_id, payload);
|
|
326
|
+
break;
|
|
327
|
+
case GossipSubMessageType::IHAVE:
|
|
328
|
+
handle_ihave(peer_id, payload);
|
|
329
|
+
break;
|
|
330
|
+
case GossipSubMessageType::IWANT:
|
|
331
|
+
handle_iwant(peer_id, payload);
|
|
332
|
+
break;
|
|
333
|
+
case GossipSubMessageType::HEARTBEAT:
|
|
334
|
+
handle_heartbeat(peer_id, payload);
|
|
335
|
+
break;
|
|
336
|
+
}
|
|
337
|
+
|
|
338
|
+
} catch (const std::exception& e) {
|
|
339
|
+
LOG_GOSSIPSUB_ERROR("Failed to handle gossipsub message from " << peer_id << ": " << e.what());
|
|
340
|
+
|
|
341
|
+
// Update peer score for invalid message
|
|
342
|
+
std::lock_guard<std::mutex> scores_lock(scores_mutex_);
|
|
343
|
+
auto score_it = peer_scores_.find(peer_id);
|
|
344
|
+
if (score_it != peer_scores_.end()) {
|
|
345
|
+
score_it->second->messages_invalid++;
|
|
346
|
+
score_it->second->update_score();
|
|
347
|
+
}
|
|
348
|
+
}
|
|
349
|
+
}
|
|
350
|
+
|
|
351
|
+
void GossipSub::handle_subscribe(const std::string& peer_id, const nlohmann::json& payload) {
|
|
352
|
+
std::string topic = payload.value("topic", "");
|
|
353
|
+
if (topic.empty()) {
|
|
354
|
+
return;
|
|
355
|
+
}
|
|
356
|
+
|
|
357
|
+
std::lock_guard<std::mutex> topics_lock(topics_mutex_);
|
|
358
|
+
TopicSubscription* topic_sub = get_or_create_topic(topic);
|
|
359
|
+
if (!topic_sub) {
|
|
360
|
+
return;
|
|
361
|
+
}
|
|
362
|
+
|
|
363
|
+
// Add peer to subscribers
|
|
364
|
+
bool was_new = topic_sub->subscribers.insert(peer_id).second;
|
|
365
|
+
|
|
366
|
+
if (was_new) {
|
|
367
|
+
LOG_GOSSIPSUB_DEBUG("Peer " << peer_id << " subscribed to topic: " << topic);
|
|
368
|
+
|
|
369
|
+
// Call peer joined handler
|
|
370
|
+
std::lock_guard<std::mutex> handlers_lock(handlers_mutex_);
|
|
371
|
+
auto handler_it = peer_joined_handlers_.find(topic);
|
|
372
|
+
if (handler_it != peer_joined_handlers_.end()) {
|
|
373
|
+
try {
|
|
374
|
+
handler_it->second(topic, peer_id);
|
|
375
|
+
} catch (const std::exception& e) {
|
|
376
|
+
LOG_GOSSIPSUB_ERROR("Exception in peer joined handler for topic '" << topic << "': " << e.what());
|
|
377
|
+
}
|
|
378
|
+
}
|
|
379
|
+
|
|
380
|
+
// If we're subscribed to this topic, consider adding peer to mesh
|
|
381
|
+
if (subscribed_topics_.count(topic)) {
|
|
382
|
+
maintain_mesh(topic);
|
|
383
|
+
}
|
|
384
|
+
}
|
|
385
|
+
}
|
|
386
|
+
|
|
387
|
+
void GossipSub::handle_unsubscribe(const std::string& peer_id, const nlohmann::json& payload) {
|
|
388
|
+
std::string topic = payload.value("topic", "");
|
|
389
|
+
if (topic.empty()) {
|
|
390
|
+
return;
|
|
391
|
+
}
|
|
392
|
+
|
|
393
|
+
std::lock_guard<std::mutex> topics_lock(topics_mutex_);
|
|
394
|
+
auto topic_it = topics_.find(topic);
|
|
395
|
+
if (topic_it == topics_.end()) {
|
|
396
|
+
return;
|
|
397
|
+
}
|
|
398
|
+
|
|
399
|
+
TopicSubscription* topic_sub = topic_it->second.get();
|
|
400
|
+
|
|
401
|
+
// Remove peer from subscribers and mesh
|
|
402
|
+
bool was_subscribed = topic_sub->subscribers.erase(peer_id) > 0;
|
|
403
|
+
bool was_in_mesh = topic_sub->mesh_peers.erase(peer_id) > 0;
|
|
404
|
+
topic_sub->fanout_peers.erase(peer_id);
|
|
405
|
+
|
|
406
|
+
if (was_subscribed) {
|
|
407
|
+
LOG_GOSSIPSUB_DEBUG("Peer " << peer_id << " unsubscribed from topic: " << topic);
|
|
408
|
+
|
|
409
|
+
// Call peer left handler
|
|
410
|
+
std::lock_guard<std::mutex> handlers_lock(handlers_mutex_);
|
|
411
|
+
auto handler_it = peer_left_handlers_.find(topic);
|
|
412
|
+
if (handler_it != peer_left_handlers_.end()) {
|
|
413
|
+
try {
|
|
414
|
+
handler_it->second(topic, peer_id);
|
|
415
|
+
} catch (const std::exception& e) {
|
|
416
|
+
LOG_GOSSIPSUB_ERROR("Exception in peer left handler for topic '" << topic << "': " << e.what());
|
|
417
|
+
}
|
|
418
|
+
}
|
|
419
|
+
|
|
420
|
+
// If peer was in mesh and we're subscribed, maintain mesh
|
|
421
|
+
if (was_in_mesh && subscribed_topics_.count(topic)) {
|
|
422
|
+
maintain_mesh(topic);
|
|
423
|
+
}
|
|
424
|
+
}
|
|
425
|
+
|
|
426
|
+
// Clean up topic if no subscribers
|
|
427
|
+
if (topic_sub->subscribers.empty()) {
|
|
428
|
+
cleanup_topic(topic);
|
|
429
|
+
}
|
|
430
|
+
}
|
|
431
|
+
|
|
432
|
+
void GossipSub::handle_publish(const std::string& peer_id, const nlohmann::json& payload) {
|
|
433
|
+
std::string topic = payload.value("topic", "");
|
|
434
|
+
std::string message = payload.value("message", "");
|
|
435
|
+
std::string message_id = payload.value("message_id", "");
|
|
436
|
+
std::string sender_peer_id = payload.value("sender_peer_id", peer_id);
|
|
437
|
+
|
|
438
|
+
if (topic.empty() || message.empty() || message_id.empty()) {
|
|
439
|
+
return;
|
|
440
|
+
}
|
|
441
|
+
|
|
442
|
+
// Check if we've already seen this message
|
|
443
|
+
if (is_message_seen(message_id)) {
|
|
444
|
+
return;
|
|
445
|
+
}
|
|
446
|
+
|
|
447
|
+
// Validate message
|
|
448
|
+
ValidationResult validation = validate_message(topic, message, sender_peer_id);
|
|
449
|
+
if (validation == ValidationResult::REJECT) {
|
|
450
|
+
// Update peer score for invalid message
|
|
451
|
+
std::lock_guard<std::mutex> scores_lock(scores_mutex_);
|
|
452
|
+
auto score_it = peer_scores_.find(peer_id);
|
|
453
|
+
if (score_it != peer_scores_.end()) {
|
|
454
|
+
score_it->second->messages_invalid++;
|
|
455
|
+
score_it->second->update_score();
|
|
456
|
+
}
|
|
457
|
+
return;
|
|
458
|
+
}
|
|
459
|
+
|
|
460
|
+
if (validation == ValidationResult::IGNORE_MSG) {
|
|
461
|
+
return;
|
|
462
|
+
}
|
|
463
|
+
|
|
464
|
+
// Cache the message
|
|
465
|
+
cache_message(message_id, topic, message, sender_peer_id);
|
|
466
|
+
|
|
467
|
+
// Update peer score for valid message delivery
|
|
468
|
+
{
|
|
469
|
+
std::lock_guard<std::mutex> scores_lock(scores_mutex_);
|
|
470
|
+
auto score_it = peer_scores_.find(peer_id);
|
|
471
|
+
if (score_it != peer_scores_.end()) {
|
|
472
|
+
score_it->second->messages_delivered++;
|
|
473
|
+
score_it->second->update_score();
|
|
474
|
+
}
|
|
475
|
+
}
|
|
476
|
+
|
|
477
|
+
std::lock_guard<std::mutex> topics_lock(topics_mutex_);
|
|
478
|
+
|
|
479
|
+
// Forward message to mesh peers (except sender)
|
|
480
|
+
if (subscribed_topics_.count(topic)) {
|
|
481
|
+
auto topic_it = topics_.find(topic);
|
|
482
|
+
if (topic_it != topics_.end()) {
|
|
483
|
+
TopicSubscription* topic_sub = topic_it->second.get();
|
|
484
|
+
|
|
485
|
+
for (const auto& forward_peer_id : topic_sub->mesh_peers) {
|
|
486
|
+
if (forward_peer_id != peer_id &&
|
|
487
|
+
is_peer_score_acceptable(forward_peer_id, config_.score_threshold_gossip)) {
|
|
488
|
+
send_gossipsub_message(forward_peer_id, GossipSubMessageType::PUBLISH, payload);
|
|
489
|
+
}
|
|
490
|
+
}
|
|
491
|
+
}
|
|
492
|
+
|
|
493
|
+
// Call local message handler
|
|
494
|
+
std::lock_guard<std::mutex> handlers_lock(handlers_mutex_);
|
|
495
|
+
auto handler_it = message_handlers_.find(topic);
|
|
496
|
+
if (handler_it != message_handlers_.end()) {
|
|
497
|
+
try {
|
|
498
|
+
handler_it->second(topic, message, sender_peer_id);
|
|
499
|
+
} catch (const std::exception& e) {
|
|
500
|
+
LOG_GOSSIPSUB_ERROR("Exception in message handler for topic '" << topic << "': " << e.what());
|
|
501
|
+
}
|
|
502
|
+
}
|
|
503
|
+
}
|
|
504
|
+
|
|
505
|
+
LOG_GOSSIPSUB_DEBUG("Processed published message for topic: " << topic << " (ID: " << message_id << ")");
|
|
506
|
+
}
|
|
507
|
+
|
|
508
|
+
void GossipSub::handle_gossip(const std::string& peer_id, const nlohmann::json& payload) {
|
|
509
|
+
// Handle gossip messages (IHAVE/IWANT are sent as gossip)
|
|
510
|
+
LOG_GOSSIPSUB_DEBUG("Received gossip message from " << peer_id);
|
|
511
|
+
}
|
|
512
|
+
|
|
513
|
+
void GossipSub::handle_graft(const std::string& peer_id, const nlohmann::json& payload) {
|
|
514
|
+
std::string topic = payload.value("topic", "");
|
|
515
|
+
if (topic.empty()) {
|
|
516
|
+
return;
|
|
517
|
+
}
|
|
518
|
+
|
|
519
|
+
// Update peer score
|
|
520
|
+
{
|
|
521
|
+
std::lock_guard<std::mutex> scores_lock(scores_mutex_);
|
|
522
|
+
auto score_it = peer_scores_.find(peer_id);
|
|
523
|
+
if (score_it != peer_scores_.end()) {
|
|
524
|
+
score_it->second->graft_requests++;
|
|
525
|
+
score_it->second->update_score();
|
|
526
|
+
}
|
|
527
|
+
}
|
|
528
|
+
|
|
529
|
+
std::lock_guard<std::mutex> topics_lock(topics_mutex_);
|
|
530
|
+
|
|
531
|
+
// Only accept graft if we're subscribed and peer score is acceptable
|
|
532
|
+
if (subscribed_topics_.count(topic) &&
|
|
533
|
+
is_peer_score_acceptable(peer_id, config_.score_threshold_mesh)) {
|
|
534
|
+
|
|
535
|
+
TopicSubscription* topic_sub = get_or_create_topic(topic);
|
|
536
|
+
if (topic_sub) {
|
|
537
|
+
topic_sub->mesh_peers.insert(peer_id);
|
|
538
|
+
LOG_GOSSIPSUB_DEBUG("Added peer " << peer_id << " to mesh for topic: " << topic);
|
|
539
|
+
}
|
|
540
|
+
} else {
|
|
541
|
+
// Send PRUNE in response to reject the graft
|
|
542
|
+
nlohmann::json prune_payload;
|
|
543
|
+
prune_payload["topic"] = topic;
|
|
544
|
+
send_gossipsub_message(peer_id, GossipSubMessageType::PRUNE, prune_payload);
|
|
545
|
+
}
|
|
546
|
+
}
|
|
547
|
+
|
|
548
|
+
void GossipSub::handle_prune(const std::string& peer_id, const nlohmann::json& payload) {
|
|
549
|
+
std::string topic = payload.value("topic", "");
|
|
550
|
+
if (topic.empty()) {
|
|
551
|
+
return;
|
|
552
|
+
}
|
|
553
|
+
|
|
554
|
+
// Update peer score
|
|
555
|
+
{
|
|
556
|
+
std::lock_guard<std::mutex> scores_lock(scores_mutex_);
|
|
557
|
+
auto score_it = peer_scores_.find(peer_id);
|
|
558
|
+
if (score_it != peer_scores_.end()) {
|
|
559
|
+
score_it->second->prune_requests++;
|
|
560
|
+
score_it->second->update_score();
|
|
561
|
+
}
|
|
562
|
+
}
|
|
563
|
+
|
|
564
|
+
std::lock_guard<std::mutex> topics_lock(topics_mutex_);
|
|
565
|
+
auto topic_it = topics_.find(topic);
|
|
566
|
+
if (topic_it != topics_.end()) {
|
|
567
|
+
TopicSubscription* topic_sub = topic_it->second.get();
|
|
568
|
+
topic_sub->mesh_peers.erase(peer_id);
|
|
569
|
+
LOG_GOSSIPSUB_DEBUG("Removed peer " << peer_id << " from mesh for topic: " << topic);
|
|
570
|
+
}
|
|
571
|
+
}
|
|
572
|
+
|
|
573
|
+
void GossipSub::handle_ihave(const std::string& peer_id, const nlohmann::json& payload) {
|
|
574
|
+
std::vector<std::string> message_ids = payload.value("message_ids", std::vector<std::string>());
|
|
575
|
+
std::string topic = payload.value("topic", "");
|
|
576
|
+
|
|
577
|
+
if (message_ids.empty() || topic.empty()) {
|
|
578
|
+
return;
|
|
579
|
+
}
|
|
580
|
+
|
|
581
|
+
// Check which messages we want
|
|
582
|
+
std::vector<std::string> wanted_messages;
|
|
583
|
+
for (const auto& msg_id : message_ids) {
|
|
584
|
+
if (!is_message_seen(msg_id)) {
|
|
585
|
+
wanted_messages.push_back(msg_id);
|
|
586
|
+
}
|
|
587
|
+
}
|
|
588
|
+
|
|
589
|
+
// Send IWANT for messages we don't have
|
|
590
|
+
if (!wanted_messages.empty()) {
|
|
591
|
+
nlohmann::json iwant_payload;
|
|
592
|
+
iwant_payload["message_ids"] = wanted_messages;
|
|
593
|
+
iwant_payload["topic"] = topic;
|
|
594
|
+
send_gossipsub_message(peer_id, GossipSubMessageType::IWANT, iwant_payload);
|
|
595
|
+
}
|
|
596
|
+
}
|
|
597
|
+
|
|
598
|
+
void GossipSub::handle_iwant(const std::string& peer_id, const nlohmann::json& payload) {
|
|
599
|
+
std::vector<std::string> message_ids = payload.value("message_ids", std::vector<std::string>());
|
|
600
|
+
std::string topic = payload.value("topic", "");
|
|
601
|
+
|
|
602
|
+
if (message_ids.empty() || topic.empty()) {
|
|
603
|
+
return;
|
|
604
|
+
}
|
|
605
|
+
|
|
606
|
+
// Send requested messages that we have
|
|
607
|
+
std::lock_guard<std::mutex> cache_lock(message_cache_mutex_);
|
|
608
|
+
for (const auto& msg_id : message_ids) {
|
|
609
|
+
auto cache_it = message_cache_.find(msg_id);
|
|
610
|
+
if (cache_it != message_cache_.end()) {
|
|
611
|
+
MessageMetadata* metadata = cache_it->second.get();
|
|
612
|
+
|
|
613
|
+
// TODO: Store actual message content in cache
|
|
614
|
+
// For now, we'll send a placeholder response
|
|
615
|
+
nlohmann::json publish_payload;
|
|
616
|
+
publish_payload["topic"] = metadata->topic;
|
|
617
|
+
publish_payload["message"] = ""; // Would contain actual message
|
|
618
|
+
publish_payload["message_id"] = msg_id;
|
|
619
|
+
publish_payload["sender_peer_id"] = metadata->sender_peer_id;
|
|
620
|
+
|
|
621
|
+
send_gossipsub_message(peer_id, GossipSubMessageType::PUBLISH, publish_payload);
|
|
622
|
+
}
|
|
623
|
+
}
|
|
624
|
+
}
|
|
625
|
+
|
|
626
|
+
void GossipSub::handle_heartbeat(const std::string& peer_id, const nlohmann::json& payload) {
|
|
627
|
+
// Process any control messages in the heartbeat
|
|
628
|
+
if (payload.contains("graft")) {
|
|
629
|
+
auto graft_messages = payload["graft"];
|
|
630
|
+
for (const auto& graft : graft_messages) {
|
|
631
|
+
handle_graft(peer_id, graft);
|
|
632
|
+
}
|
|
633
|
+
}
|
|
634
|
+
|
|
635
|
+
if (payload.contains("prune")) {
|
|
636
|
+
auto prune_messages = payload["prune"];
|
|
637
|
+
for (const auto& prune : prune_messages) {
|
|
638
|
+
handle_prune(peer_id, prune);
|
|
639
|
+
}
|
|
640
|
+
}
|
|
641
|
+
|
|
642
|
+
if (payload.contains("ihave")) {
|
|
643
|
+
auto ihave_messages = payload["ihave"];
|
|
644
|
+
for (const auto& ihave : ihave_messages) {
|
|
645
|
+
handle_ihave(peer_id, ihave);
|
|
646
|
+
}
|
|
647
|
+
}
|
|
648
|
+
|
|
649
|
+
if (payload.contains("iwant")) {
|
|
650
|
+
auto iwant_messages = payload["iwant"];
|
|
651
|
+
for (const auto& iwant : iwant_messages) {
|
|
652
|
+
handle_iwant(peer_id, iwant);
|
|
653
|
+
}
|
|
654
|
+
}
|
|
655
|
+
}
|
|
656
|
+
|
|
657
|
+
//=============================================================================
|
|
658
|
+
// Utility Functions
|
|
659
|
+
//=============================================================================
|
|
660
|
+
|
|
661
|
+
std::string GossipSub::generate_message_id(const std::string& topic, const std::string& message, const std::string& sender_peer_id) {
|
|
662
|
+
std::string combined = topic + message + sender_peer_id + std::to_string(
|
|
663
|
+
std::chrono::duration_cast<std::chrono::microseconds>(
|
|
664
|
+
std::chrono::steady_clock::now().time_since_epoch()).count());
|
|
665
|
+
|
|
666
|
+
return SHA1::hash(combined);
|
|
667
|
+
}
|
|
668
|
+
|
|
669
|
+
bool GossipSub::is_message_seen(const std::string& message_id) {
|
|
670
|
+
std::lock_guard<std::mutex> lock(message_cache_mutex_);
|
|
671
|
+
return message_cache_.count(message_id) > 0;
|
|
672
|
+
}
|
|
673
|
+
|
|
674
|
+
void GossipSub::cache_message(const std::string& message_id, const std::string& topic, const std::string& message, const std::string& sender_peer_id) {
|
|
675
|
+
std::lock_guard<std::mutex> lock(message_cache_mutex_);
|
|
676
|
+
|
|
677
|
+
auto metadata = std::make_unique<MessageMetadata>(message_id, topic, sender_peer_id);
|
|
678
|
+
message_cache_[message_id] = std::move(metadata);
|
|
679
|
+
message_ids_seen_[message_id] = std::chrono::steady_clock::now();
|
|
680
|
+
}
|
|
681
|
+
|
|
682
|
+
TopicSubscription* GossipSub::get_or_create_topic(const std::string& topic) {
|
|
683
|
+
auto topic_it = topics_.find(topic);
|
|
684
|
+
if (topic_it == topics_.end()) {
|
|
685
|
+
auto topic_sub = std::make_unique<TopicSubscription>(topic);
|
|
686
|
+
TopicSubscription* ptr = topic_sub.get();
|
|
687
|
+
topics_[topic] = std::move(topic_sub);
|
|
688
|
+
return ptr;
|
|
689
|
+
}
|
|
690
|
+
return topic_it->second.get();
|
|
691
|
+
}
|
|
692
|
+
|
|
693
|
+
void GossipSub::cleanup_topic(const std::string& topic) {
|
|
694
|
+
auto topic_it = topics_.find(topic);
|
|
695
|
+
if (topic_it != topics_.end() && topic_it->second->subscribers.empty()) {
|
|
696
|
+
topics_.erase(topic_it);
|
|
697
|
+
}
|
|
698
|
+
}
|
|
699
|
+
|
|
700
|
+
bool GossipSub::send_gossipsub_message(const std::string& peer_id, GossipSubMessageType type, const nlohmann::json& payload) {
|
|
701
|
+
nlohmann::json message;
|
|
702
|
+
message["type"] = gossipsub_message_type_to_string(type);
|
|
703
|
+
message["payload"] = payload;
|
|
704
|
+
|
|
705
|
+
try {
|
|
706
|
+
rats_client_.send(peer_id, "gossipsub", message);
|
|
707
|
+
return true;
|
|
708
|
+
} catch (const std::exception&) {
|
|
709
|
+
return false;
|
|
710
|
+
}
|
|
711
|
+
}
|
|
712
|
+
|
|
713
|
+
bool GossipSub::broadcast_gossipsub_message(GossipSubMessageType type, const nlohmann::json& payload, const std::unordered_set<std::string>& exclude) {
|
|
714
|
+
nlohmann::json message;
|
|
715
|
+
message["type"] = gossipsub_message_type_to_string(type);
|
|
716
|
+
message["payload"] = payload;
|
|
717
|
+
|
|
718
|
+
// Get all connected peers
|
|
719
|
+
auto all_peers = rats_client_.get_all_peers();
|
|
720
|
+
bool any_sent = false;
|
|
721
|
+
|
|
722
|
+
for (const auto& peer : all_peers) {
|
|
723
|
+
if (exclude.count(peer.peer_id) == 0) {
|
|
724
|
+
try {
|
|
725
|
+
rats_client_.send(peer.peer_id, "gossipsub", message);
|
|
726
|
+
any_sent = true;
|
|
727
|
+
} catch (const std::exception&) {
|
|
728
|
+
// Continue with other peers
|
|
729
|
+
}
|
|
730
|
+
}
|
|
731
|
+
}
|
|
732
|
+
|
|
733
|
+
return any_sent;
|
|
734
|
+
}
|
|
735
|
+
|
|
736
|
+
ValidationResult GossipSub::validate_message(const std::string& topic, const std::string& message, const std::string& sender_peer_id) {
|
|
737
|
+
std::lock_guard<std::mutex> lock(handlers_mutex_);
|
|
738
|
+
|
|
739
|
+
// Check topic-specific validator first
|
|
740
|
+
auto validator_it = message_validators_.find(topic);
|
|
741
|
+
if (validator_it != message_validators_.end()) {
|
|
742
|
+
return validator_it->second(topic, message, sender_peer_id);
|
|
743
|
+
}
|
|
744
|
+
|
|
745
|
+
// Check global validator
|
|
746
|
+
if (global_validator_) {
|
|
747
|
+
return global_validator_(topic, message, sender_peer_id);
|
|
748
|
+
}
|
|
749
|
+
|
|
750
|
+
// Default: accept all messages
|
|
751
|
+
return ValidationResult::ACCEPT;
|
|
752
|
+
}
|
|
753
|
+
|
|
754
|
+
bool GossipSub::is_peer_score_acceptable(const std::string& peer_id, double threshold) {
|
|
755
|
+
std::lock_guard<std::mutex> lock(scores_mutex_);
|
|
756
|
+
auto score_it = peer_scores_.find(peer_id);
|
|
757
|
+
if (score_it == peer_scores_.end()) {
|
|
758
|
+
return true; // Unknown peer, accept for now
|
|
759
|
+
}
|
|
760
|
+
return score_it->second->score >= threshold;
|
|
761
|
+
}
|
|
762
|
+
|
|
763
|
+
void GossipSub::handle_peer_connected(const std::string& peer_id) {
|
|
764
|
+
// Initialize peer score
|
|
765
|
+
std::lock_guard<std::mutex> lock(scores_mutex_);
|
|
766
|
+
if (peer_scores_.find(peer_id) == peer_scores_.end()) {
|
|
767
|
+
peer_scores_[peer_id] = std::make_unique<PeerScore>(peer_id);
|
|
768
|
+
}
|
|
769
|
+
}
|
|
770
|
+
|
|
771
|
+
void GossipSub::handle_peer_disconnected(const std::string& peer_id) {
|
|
772
|
+
// Remove peer from all topics
|
|
773
|
+
std::lock_guard<std::mutex> topics_lock(topics_mutex_);
|
|
774
|
+
for (auto& topic_pair : topics_) {
|
|
775
|
+
TopicSubscription* topic_sub = topic_pair.second.get();
|
|
776
|
+
topic_sub->subscribers.erase(peer_id);
|
|
777
|
+
topic_sub->mesh_peers.erase(peer_id);
|
|
778
|
+
topic_sub->fanout_peers.erase(peer_id);
|
|
779
|
+
}
|
|
780
|
+
|
|
781
|
+
// Remove peer score
|
|
782
|
+
std::lock_guard<std::mutex> scores_lock(scores_mutex_);
|
|
783
|
+
peer_scores_.erase(peer_id);
|
|
784
|
+
}
|
|
785
|
+
|
|
786
|
+
//=============================================================================
|
|
787
|
+
// Heartbeat and Mesh Maintenance
|
|
788
|
+
//=============================================================================
|
|
789
|
+
|
|
790
|
+
void GossipSub::heartbeat_loop() {
|
|
791
|
+
while (running_.load()) {
|
|
792
|
+
try {
|
|
793
|
+
process_heartbeat();
|
|
794
|
+
} catch (const std::exception& e) {
|
|
795
|
+
LOG_GOSSIPSUB_ERROR("Exception in heartbeat loop: " << e.what());
|
|
796
|
+
}
|
|
797
|
+
|
|
798
|
+
// Use condition variable for interruptible sleep
|
|
799
|
+
std::unique_lock<std::mutex> lock(heartbeat_mutex_);
|
|
800
|
+
heartbeat_cv_.wait_for(lock, config_.heartbeat_interval, [this] {
|
|
801
|
+
return !running_.load();
|
|
802
|
+
});
|
|
803
|
+
}
|
|
804
|
+
}
|
|
805
|
+
|
|
806
|
+
void GossipSub::process_heartbeat() {
|
|
807
|
+
cleanup_message_cache();
|
|
808
|
+
|
|
809
|
+
std::lock_guard<std::mutex> topics_lock(topics_mutex_);
|
|
810
|
+
|
|
811
|
+
for (const auto& topic : subscribed_topics_) {
|
|
812
|
+
maintain_mesh(topic);
|
|
813
|
+
}
|
|
814
|
+
|
|
815
|
+
// Process fanout cleanup
|
|
816
|
+
auto now = std::chrono::steady_clock::now();
|
|
817
|
+
for (auto& topic_pair : topics_) {
|
|
818
|
+
TopicSubscription* topic_sub = topic_pair.second.get();
|
|
819
|
+
|
|
820
|
+
// Clean up old fanout peers if we're not subscribed
|
|
821
|
+
if (!subscribed_topics_.count(topic_pair.first)) {
|
|
822
|
+
auto time_since_prune = std::chrono::duration_cast<std::chrono::milliseconds>(
|
|
823
|
+
now - topic_sub->last_fanout_prune);
|
|
824
|
+
|
|
825
|
+
if (time_since_prune >= config_.fanout_ttl) {
|
|
826
|
+
topic_sub->fanout_peers.clear();
|
|
827
|
+
topic_sub->last_fanout_prune = now;
|
|
828
|
+
}
|
|
829
|
+
}
|
|
830
|
+
}
|
|
831
|
+
|
|
832
|
+
// Update peer scores
|
|
833
|
+
std::lock_guard<std::mutex> scores_lock(scores_mutex_);
|
|
834
|
+
for (auto& score_pair : peer_scores_) {
|
|
835
|
+
score_pair.second->update_score();
|
|
836
|
+
}
|
|
837
|
+
}
|
|
838
|
+
|
|
839
|
+
void GossipSub::maintain_mesh(const std::string& topic) {
|
|
840
|
+
auto topic_it = topics_.find(topic);
|
|
841
|
+
if (topic_it == topics_.end()) {
|
|
842
|
+
return;
|
|
843
|
+
}
|
|
844
|
+
|
|
845
|
+
TopicSubscription* topic_sub = topic_it->second.get();
|
|
846
|
+
int current_mesh_size = static_cast<int>(topic_sub->mesh_peers.size());
|
|
847
|
+
|
|
848
|
+
// Remove low-scoring peers from mesh
|
|
849
|
+
std::vector<std::string> to_remove;
|
|
850
|
+
for (const auto& peer_id : topic_sub->mesh_peers) {
|
|
851
|
+
if (!is_peer_score_acceptable(peer_id, config_.score_threshold_mesh)) {
|
|
852
|
+
to_remove.push_back(peer_id);
|
|
853
|
+
}
|
|
854
|
+
}
|
|
855
|
+
|
|
856
|
+
for (const auto& peer_id : to_remove) {
|
|
857
|
+
remove_peer_from_mesh(topic, peer_id);
|
|
858
|
+
current_mesh_size--;
|
|
859
|
+
}
|
|
860
|
+
|
|
861
|
+
// Add peers if below optimal
|
|
862
|
+
if (current_mesh_size < config_.mesh_optimal) {
|
|
863
|
+
int needed = config_.mesh_optimal - current_mesh_size;
|
|
864
|
+
std::vector<std::string> candidates = select_peers_for_mesh(topic, needed);
|
|
865
|
+
|
|
866
|
+
for (const auto& peer_id : candidates) {
|
|
867
|
+
add_peer_to_mesh(topic, peer_id);
|
|
868
|
+
}
|
|
869
|
+
}
|
|
870
|
+
|
|
871
|
+
// Remove excess peers if above high threshold
|
|
872
|
+
if (current_mesh_size > config_.mesh_high) {
|
|
873
|
+
int excess = current_mesh_size - config_.mesh_optimal;
|
|
874
|
+
std::vector<std::string> mesh_peers_vec(topic_sub->mesh_peers.begin(), topic_sub->mesh_peers.end());
|
|
875
|
+
std::vector<std::string> to_prune = random_sample(mesh_peers_vec, excess);
|
|
876
|
+
|
|
877
|
+
for (const auto& peer_id : to_prune) {
|
|
878
|
+
remove_peer_from_mesh(topic, peer_id);
|
|
879
|
+
}
|
|
880
|
+
}
|
|
881
|
+
}
|
|
882
|
+
|
|
883
|
+
void GossipSub::add_peer_to_mesh(const std::string& topic, const std::string& peer_id) {
|
|
884
|
+
auto topic_it = topics_.find(topic);
|
|
885
|
+
if (topic_it == topics_.end()) {
|
|
886
|
+
return;
|
|
887
|
+
}
|
|
888
|
+
|
|
889
|
+
TopicSubscription* topic_sub = topic_it->second.get();
|
|
890
|
+
|
|
891
|
+
// Check if peer is subscribed to the topic
|
|
892
|
+
if (topic_sub->subscribers.count(peer_id) == 0) {
|
|
893
|
+
return;
|
|
894
|
+
}
|
|
895
|
+
|
|
896
|
+
// Check peer score
|
|
897
|
+
if (!is_peer_score_acceptable(peer_id, config_.score_threshold_mesh)) {
|
|
898
|
+
return;
|
|
899
|
+
}
|
|
900
|
+
|
|
901
|
+
// Add to mesh
|
|
902
|
+
if (topic_sub->mesh_peers.insert(peer_id).second) {
|
|
903
|
+
// Send GRAFT message
|
|
904
|
+
nlohmann::json graft_payload;
|
|
905
|
+
graft_payload["topic"] = topic;
|
|
906
|
+
send_gossipsub_message(peer_id, GossipSubMessageType::GRAFT, graft_payload);
|
|
907
|
+
|
|
908
|
+
LOG_GOSSIPSUB_DEBUG("Added peer " << peer_id << " to mesh for topic: " << topic);
|
|
909
|
+
}
|
|
910
|
+
}
|
|
911
|
+
|
|
912
|
+
void GossipSub::remove_peer_from_mesh(const std::string& topic, const std::string& peer_id) {
|
|
913
|
+
auto topic_it = topics_.find(topic);
|
|
914
|
+
if (topic_it == topics_.end()) {
|
|
915
|
+
return;
|
|
916
|
+
}
|
|
917
|
+
|
|
918
|
+
TopicSubscription* topic_sub = topic_it->second.get();
|
|
919
|
+
|
|
920
|
+
if (topic_sub->mesh_peers.erase(peer_id) > 0) {
|
|
921
|
+
// Send PRUNE message
|
|
922
|
+
nlohmann::json prune_payload;
|
|
923
|
+
prune_payload["topic"] = topic;
|
|
924
|
+
send_gossipsub_message(peer_id, GossipSubMessageType::PRUNE, prune_payload);
|
|
925
|
+
|
|
926
|
+
LOG_GOSSIPSUB_DEBUG("Removed peer " << peer_id << " from mesh for topic: " << topic);
|
|
927
|
+
}
|
|
928
|
+
}
|
|
929
|
+
|
|
930
|
+
std::vector<std::string> GossipSub::select_peers_for_mesh(const std::string& topic, int count) {
|
|
931
|
+
auto topic_it = topics_.find(topic);
|
|
932
|
+
if (topic_it == topics_.end()) {
|
|
933
|
+
return {};
|
|
934
|
+
}
|
|
935
|
+
|
|
936
|
+
TopicSubscription* topic_sub = topic_it->second.get();
|
|
937
|
+
|
|
938
|
+
// Get candidates (subscribers not in mesh)
|
|
939
|
+
std::vector<std::string> candidates;
|
|
940
|
+
for (const auto& peer_id : topic_sub->subscribers) {
|
|
941
|
+
if (topic_sub->mesh_peers.count(peer_id) == 0 &&
|
|
942
|
+
is_peer_score_acceptable(peer_id, config_.score_threshold_mesh)) {
|
|
943
|
+
candidates.push_back(peer_id);
|
|
944
|
+
}
|
|
945
|
+
}
|
|
946
|
+
|
|
947
|
+
return random_sample(candidates, count);
|
|
948
|
+
}
|
|
949
|
+
|
|
950
|
+
std::vector<std::string> GossipSub::select_peers_for_gossip(const std::string& topic, int count, const std::unordered_set<std::string>& exclude) {
|
|
951
|
+
// Get all connected peers with acceptable scores
|
|
952
|
+
auto all_peers = rats_client_.get_all_peers();
|
|
953
|
+
std::vector<std::string> candidates;
|
|
954
|
+
|
|
955
|
+
for (const auto& peer : all_peers) {
|
|
956
|
+
if (exclude.count(peer.peer_id) == 0 &&
|
|
957
|
+
is_peer_score_acceptable(peer.peer_id, config_.score_threshold_gossip)) {
|
|
958
|
+
candidates.push_back(peer.peer_id);
|
|
959
|
+
}
|
|
960
|
+
}
|
|
961
|
+
|
|
962
|
+
return random_sample(candidates, count);
|
|
963
|
+
}
|
|
964
|
+
|
|
965
|
+
std::vector<std::string> GossipSub::random_sample(const std::vector<std::string>& peers, int count) {
|
|
966
|
+
if (peers.empty() || count <= 0) {
|
|
967
|
+
return {};
|
|
968
|
+
}
|
|
969
|
+
|
|
970
|
+
std::lock_guard<std::mutex> rng_lock(rng_mutex_);
|
|
971
|
+
|
|
972
|
+
std::vector<std::string> result = peers;
|
|
973
|
+
if (static_cast<int>(result.size()) > count) {
|
|
974
|
+
std::shuffle(result.begin(), result.end(), rng_);
|
|
975
|
+
result.resize(count);
|
|
976
|
+
}
|
|
977
|
+
|
|
978
|
+
return result;
|
|
979
|
+
}
|
|
980
|
+
|
|
981
|
+
std::vector<std::string> GossipSub::random_sample(const std::unordered_set<std::string>& peers, int count) {
|
|
982
|
+
std::vector<std::string> vec(peers.begin(), peers.end());
|
|
983
|
+
return random_sample(vec, count);
|
|
984
|
+
}
|
|
985
|
+
|
|
986
|
+
void GossipSub::cleanup_message_cache() {
|
|
987
|
+
std::lock_guard<std::mutex> lock(message_cache_mutex_);
|
|
988
|
+
|
|
989
|
+
auto now = std::chrono::steady_clock::now();
|
|
990
|
+
|
|
991
|
+
// Clean up old message cache entries
|
|
992
|
+
auto cache_it = message_cache_.begin();
|
|
993
|
+
while (cache_it != message_cache_.end()) {
|
|
994
|
+
auto time_since_first_seen = std::chrono::duration_cast<std::chrono::milliseconds>(
|
|
995
|
+
now - cache_it->second->first_seen);
|
|
996
|
+
|
|
997
|
+
if (time_since_first_seen >= config_.message_cache_ttl) {
|
|
998
|
+
cache_it = message_cache_.erase(cache_it);
|
|
999
|
+
} else {
|
|
1000
|
+
++cache_it;
|
|
1001
|
+
}
|
|
1002
|
+
}
|
|
1003
|
+
|
|
1004
|
+
// Clean up message IDs seen
|
|
1005
|
+
auto ids_it = message_ids_seen_.begin();
|
|
1006
|
+
while (ids_it != message_ids_seen_.end()) {
|
|
1007
|
+
auto time_since_seen = std::chrono::duration_cast<std::chrono::milliseconds>(
|
|
1008
|
+
now - ids_it->second);
|
|
1009
|
+
|
|
1010
|
+
if (time_since_seen >= config_.message_cache_ttl) {
|
|
1011
|
+
ids_it = message_ids_seen_.erase(ids_it);
|
|
1012
|
+
} else {
|
|
1013
|
+
++ids_it;
|
|
1014
|
+
}
|
|
1015
|
+
}
|
|
1016
|
+
}
|
|
1017
|
+
|
|
1018
|
+
//=============================================================================
|
|
1019
|
+
// Public API Implementation (remaining methods)
|
|
1020
|
+
//=============================================================================
|
|
1021
|
+
|
|
1022
|
+
void GossipSub::set_message_validator(const std::string& topic, MessageValidator validator) {
|
|
1023
|
+
std::lock_guard<std::mutex> lock(handlers_mutex_);
|
|
1024
|
+
if (topic.empty()) {
|
|
1025
|
+
global_validator_ = validator;
|
|
1026
|
+
} else {
|
|
1027
|
+
message_validators_[topic] = validator;
|
|
1028
|
+
}
|
|
1029
|
+
}
|
|
1030
|
+
|
|
1031
|
+
void GossipSub::set_message_handler(const std::string& topic, MessageHandler handler) {
|
|
1032
|
+
std::lock_guard<std::mutex> lock(handlers_mutex_);
|
|
1033
|
+
message_handlers_[topic] = handler;
|
|
1034
|
+
}
|
|
1035
|
+
|
|
1036
|
+
void GossipSub::set_peer_joined_handler(const std::string& topic, PeerJoinedHandler handler) {
|
|
1037
|
+
std::lock_guard<std::mutex> lock(handlers_mutex_);
|
|
1038
|
+
peer_joined_handlers_[topic] = handler;
|
|
1039
|
+
}
|
|
1040
|
+
|
|
1041
|
+
void GossipSub::set_peer_left_handler(const std::string& topic, PeerLeftHandler handler) {
|
|
1042
|
+
std::lock_guard<std::mutex> lock(handlers_mutex_);
|
|
1043
|
+
peer_left_handlers_[topic] = handler;
|
|
1044
|
+
}
|
|
1045
|
+
|
|
1046
|
+
std::vector<std::string> GossipSub::get_topic_peers(const std::string& topic) const {
|
|
1047
|
+
std::lock_guard<std::mutex> lock(topics_mutex_);
|
|
1048
|
+
auto topic_it = topics_.find(topic);
|
|
1049
|
+
if (topic_it == topics_.end()) {
|
|
1050
|
+
return {};
|
|
1051
|
+
}
|
|
1052
|
+
|
|
1053
|
+
const auto& subscribers = topic_it->second->subscribers;
|
|
1054
|
+
return std::vector<std::string>(subscribers.begin(), subscribers.end());
|
|
1055
|
+
}
|
|
1056
|
+
|
|
1057
|
+
std::vector<std::string> GossipSub::get_mesh_peers(const std::string& topic) const {
|
|
1058
|
+
std::lock_guard<std::mutex> lock(topics_mutex_);
|
|
1059
|
+
auto topic_it = topics_.find(topic);
|
|
1060
|
+
if (topic_it == topics_.end()) {
|
|
1061
|
+
return {};
|
|
1062
|
+
}
|
|
1063
|
+
|
|
1064
|
+
const auto& mesh_peers = topic_it->second->mesh_peers;
|
|
1065
|
+
return std::vector<std::string>(mesh_peers.begin(), mesh_peers.end());
|
|
1066
|
+
}
|
|
1067
|
+
|
|
1068
|
+
double GossipSub::get_peer_score(const std::string& peer_id) const {
|
|
1069
|
+
std::lock_guard<std::mutex> lock(scores_mutex_);
|
|
1070
|
+
auto score_it = peer_scores_.find(peer_id);
|
|
1071
|
+
if (score_it == peer_scores_.end()) {
|
|
1072
|
+
return 0.0;
|
|
1073
|
+
}
|
|
1074
|
+
return score_it->second->score;
|
|
1075
|
+
}
|
|
1076
|
+
|
|
1077
|
+
nlohmann::json GossipSub::get_statistics() const {
|
|
1078
|
+
nlohmann::json stats;
|
|
1079
|
+
|
|
1080
|
+
// Basic stats
|
|
1081
|
+
stats["running"] = running_.load();
|
|
1082
|
+
|
|
1083
|
+
// Topic stats
|
|
1084
|
+
{
|
|
1085
|
+
std::lock_guard<std::mutex> topics_lock(topics_mutex_);
|
|
1086
|
+
stats["subscribed_topics_count"] = subscribed_topics_.size();
|
|
1087
|
+
stats["total_topics_count"] = topics_.size();
|
|
1088
|
+
|
|
1089
|
+
nlohmann::json topics_detail;
|
|
1090
|
+
for (const auto& topic_pair : topics_) {
|
|
1091
|
+
nlohmann::json topic_stats;
|
|
1092
|
+
topic_stats["subscribers_count"] = topic_pair.second->subscribers.size();
|
|
1093
|
+
topic_stats["mesh_peers_count"] = topic_pair.second->mesh_peers.size();
|
|
1094
|
+
topic_stats["fanout_peers_count"] = topic_pair.second->fanout_peers.size();
|
|
1095
|
+
topic_stats["is_subscribed"] = subscribed_topics_.count(topic_pair.first) > 0;
|
|
1096
|
+
topics_detail[topic_pair.first] = topic_stats;
|
|
1097
|
+
}
|
|
1098
|
+
stats["topics"] = topics_detail;
|
|
1099
|
+
}
|
|
1100
|
+
|
|
1101
|
+
// Peer scores
|
|
1102
|
+
{
|
|
1103
|
+
std::lock_guard<std::mutex> scores_lock(scores_mutex_);
|
|
1104
|
+
stats["peers_count"] = peer_scores_.size();
|
|
1105
|
+
|
|
1106
|
+
double total_score = 0.0;
|
|
1107
|
+
double min_score = (std::numeric_limits<double>::max)();
|
|
1108
|
+
double max_score = (std::numeric_limits<double>::lowest)();
|
|
1109
|
+
|
|
1110
|
+
for (const auto& score_pair : peer_scores_) {
|
|
1111
|
+
double score = score_pair.second->score;
|
|
1112
|
+
total_score += score;
|
|
1113
|
+
min_score = (std::min)(min_score, score);
|
|
1114
|
+
max_score = (std::max)(max_score, score);
|
|
1115
|
+
}
|
|
1116
|
+
|
|
1117
|
+
if (!peer_scores_.empty()) {
|
|
1118
|
+
stats["average_peer_score"] = total_score / peer_scores_.size();
|
|
1119
|
+
stats["min_peer_score"] = min_score;
|
|
1120
|
+
stats["max_peer_score"] = max_score;
|
|
1121
|
+
}
|
|
1122
|
+
}
|
|
1123
|
+
|
|
1124
|
+
return stats;
|
|
1125
|
+
}
|
|
1126
|
+
|
|
1127
|
+
nlohmann::json GossipSub::get_cache_statistics() const {
|
|
1128
|
+
std::lock_guard<std::mutex> lock(message_cache_mutex_);
|
|
1129
|
+
|
|
1130
|
+
nlohmann::json cache_stats;
|
|
1131
|
+
cache_stats["cached_messages_count"] = message_cache_.size();
|
|
1132
|
+
cache_stats["seen_message_ids_count"] = message_ids_seen_.size();
|
|
1133
|
+
|
|
1134
|
+
return cache_stats;
|
|
1135
|
+
}
|
|
1136
|
+
|
|
1137
|
+
} // namespace librats
|