waze-logs 1.0.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- analysis.py +91 -0
- cli.py +1219 -0
- collector.py +193 -0
- collector_europe.py +312 -0
- collector_worldwide.py +532 -0
- database.py +176 -0
- waze_client.py +234 -0
- waze_logs-1.0.0.dist-info/METADATA +411 -0
- waze_logs-1.0.0.dist-info/RECORD +15 -0
- waze_logs-1.0.0.dist-info/WHEEL +5 -0
- waze_logs-1.0.0.dist-info/entry_points.txt +2 -0
- waze_logs-1.0.0.dist-info/licenses/LICENSE +21 -0
- waze_logs-1.0.0.dist-info/top_level.txt +8 -0
- web/app.py +536 -0
- web/templates/index.html +1241 -0
web/app.py
ADDED
|
@@ -0,0 +1,536 @@
|
|
|
1
|
+
"""Flask web application for Waze Madrid Logger visualization."""
|
|
2
|
+
import os
|
|
3
|
+
import sys
|
|
4
|
+
import json
|
|
5
|
+
import time
|
|
6
|
+
import queue
|
|
7
|
+
import threading
|
|
8
|
+
from datetime import datetime, timedelta
|
|
9
|
+
from flask import Flask, render_template, jsonify, request, Response
|
|
10
|
+
|
|
11
|
+
# Add parent directory to path for imports
|
|
12
|
+
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
|
13
|
+
|
|
14
|
+
from database import Database
|
|
15
|
+
from analysis import get_stats, get_recent_events, get_user_profile
|
|
16
|
+
|
|
17
|
+
app = Flask(__name__)
|
|
18
|
+
|
|
19
|
+
# Global event queue for SSE broadcasting
|
|
20
|
+
event_queues = []
|
|
21
|
+
event_queues_lock = threading.Lock()
|
|
22
|
+
|
|
23
|
+
# Status file path for collector updates
|
|
24
|
+
STATUS_FILE = os.path.join(os.path.dirname(os.path.dirname(os.path.abspath(__file__))),
|
|
25
|
+
"data", "collector_status.json")
|
|
26
|
+
|
|
27
|
+
# Database paths - all regional databases
|
|
28
|
+
DATA_DIR = os.path.join(os.path.dirname(os.path.dirname(os.path.abspath(__file__))), "data")
|
|
29
|
+
DB_PATHS = {
|
|
30
|
+
"madrid": os.path.join(DATA_DIR, "waze_madrid.db"),
|
|
31
|
+
"europe": os.path.join(DATA_DIR, "waze_europe.db"),
|
|
32
|
+
"americas": os.path.join(DATA_DIR, "waze_americas.db"),
|
|
33
|
+
"asia": os.path.join(DATA_DIR, "waze_asia.db"),
|
|
34
|
+
"oceania": os.path.join(DATA_DIR, "waze_oceania.db"),
|
|
35
|
+
"africa": os.path.join(DATA_DIR, "waze_africa.db"),
|
|
36
|
+
}
|
|
37
|
+
|
|
38
|
+
# Legacy single DB path for compatibility
|
|
39
|
+
DB_PATH = DB_PATHS["madrid"]
|
|
40
|
+
|
|
41
|
+
|
|
42
|
+
def get_db(region=None):
|
|
43
|
+
"""Get database connection for a specific region or default."""
|
|
44
|
+
if region and region in DB_PATHS:
|
|
45
|
+
return Database(DB_PATHS[region])
|
|
46
|
+
return Database(DB_PATH)
|
|
47
|
+
|
|
48
|
+
|
|
49
|
+
def get_all_dbs():
|
|
50
|
+
"""Get connections to all existing databases."""
|
|
51
|
+
dbs = []
|
|
52
|
+
for region, path in DB_PATHS.items():
|
|
53
|
+
if os.path.exists(path):
|
|
54
|
+
try:
|
|
55
|
+
dbs.append((region, Database(path)))
|
|
56
|
+
except Exception:
|
|
57
|
+
pass
|
|
58
|
+
return dbs
|
|
59
|
+
|
|
60
|
+
|
|
61
|
+
def query_all_dbs(query_func):
|
|
62
|
+
"""Execute a function on all databases and combine results."""
|
|
63
|
+
all_results = []
|
|
64
|
+
for region, db in get_all_dbs():
|
|
65
|
+
try:
|
|
66
|
+
results = query_func(db, region)
|
|
67
|
+
if results:
|
|
68
|
+
all_results.extend(results)
|
|
69
|
+
db.close()
|
|
70
|
+
except Exception as e:
|
|
71
|
+
print(f"Error querying {region}: {e}")
|
|
72
|
+
return all_results
|
|
73
|
+
|
|
74
|
+
|
|
75
|
+
@app.route("/")
|
|
76
|
+
def index():
|
|
77
|
+
"""Render main map view."""
|
|
78
|
+
return render_template("index.html")
|
|
79
|
+
|
|
80
|
+
|
|
81
|
+
@app.route("/api/stats")
|
|
82
|
+
def api_stats():
|
|
83
|
+
"""Get summary statistics from all databases."""
|
|
84
|
+
total_events = 0
|
|
85
|
+
unique_users = set()
|
|
86
|
+
first_event = None
|
|
87
|
+
last_event = None
|
|
88
|
+
|
|
89
|
+
for region, db in get_all_dbs():
|
|
90
|
+
try:
|
|
91
|
+
row = db.execute("""
|
|
92
|
+
SELECT COUNT(*) as count,
|
|
93
|
+
COUNT(DISTINCT username) as users,
|
|
94
|
+
MIN(timestamp_utc) as first_event,
|
|
95
|
+
MAX(timestamp_utc) as last_event
|
|
96
|
+
FROM events
|
|
97
|
+
""").fetchone()
|
|
98
|
+
|
|
99
|
+
if row:
|
|
100
|
+
total_events += row["count"] or 0
|
|
101
|
+
|
|
102
|
+
# Get unique users for this db
|
|
103
|
+
users_rows = db.execute("SELECT DISTINCT username FROM events").fetchall()
|
|
104
|
+
for u in users_rows:
|
|
105
|
+
unique_users.add(u["username"])
|
|
106
|
+
|
|
107
|
+
if row["first_event"]:
|
|
108
|
+
if first_event is None or row["first_event"] < first_event:
|
|
109
|
+
first_event = row["first_event"]
|
|
110
|
+
if row["last_event"]:
|
|
111
|
+
if last_event is None or row["last_event"] > last_event:
|
|
112
|
+
last_event = row["last_event"]
|
|
113
|
+
|
|
114
|
+
db.close()
|
|
115
|
+
except Exception as e:
|
|
116
|
+
print(f"Stats error for {region}: {e}")
|
|
117
|
+
|
|
118
|
+
return jsonify({
|
|
119
|
+
"total_events": total_events,
|
|
120
|
+
"unique_users": len(unique_users),
|
|
121
|
+
"first_event": first_event,
|
|
122
|
+
"last_event": last_event
|
|
123
|
+
})
|
|
124
|
+
|
|
125
|
+
|
|
126
|
+
@app.route("/api/events")
|
|
127
|
+
def api_events():
|
|
128
|
+
"""Get events with optional filters from all databases."""
|
|
129
|
+
# Parse query parameters
|
|
130
|
+
event_type = request.args.get("type")
|
|
131
|
+
since = request.args.get("since") # hours ago
|
|
132
|
+
date_from = request.args.get("from") # ISO date string
|
|
133
|
+
date_to = request.args.get("to") # ISO date string
|
|
134
|
+
username = request.args.get("user") # filter by username
|
|
135
|
+
limit = request.args.get("limit", 1000, type=int)
|
|
136
|
+
|
|
137
|
+
all_events = []
|
|
138
|
+
|
|
139
|
+
for region, db in get_all_dbs():
|
|
140
|
+
try:
|
|
141
|
+
query = "SELECT * FROM events WHERE 1=1"
|
|
142
|
+
params = []
|
|
143
|
+
|
|
144
|
+
if event_type:
|
|
145
|
+
query += " AND report_type = ?"
|
|
146
|
+
params.append(event_type.upper())
|
|
147
|
+
|
|
148
|
+
if username:
|
|
149
|
+
query += " AND username = ?"
|
|
150
|
+
params.append(username)
|
|
151
|
+
|
|
152
|
+
if since:
|
|
153
|
+
hours = int(since)
|
|
154
|
+
cutoff = datetime.utcnow() - timedelta(hours=hours)
|
|
155
|
+
query += " AND timestamp_utc >= ?"
|
|
156
|
+
params.append(cutoff.isoformat())
|
|
157
|
+
elif date_from:
|
|
158
|
+
query += " AND timestamp_utc >= ?"
|
|
159
|
+
params.append(date_from)
|
|
160
|
+
|
|
161
|
+
if date_to:
|
|
162
|
+
date_to_val = date_to
|
|
163
|
+
if len(date_to_val) == 10:
|
|
164
|
+
date_to_val += "T23:59:59"
|
|
165
|
+
query += " AND timestamp_utc <= ?"
|
|
166
|
+
params.append(date_to_val)
|
|
167
|
+
|
|
168
|
+
query += " ORDER BY timestamp_ms DESC LIMIT ?"
|
|
169
|
+
params.append(limit)
|
|
170
|
+
|
|
171
|
+
rows = db.execute(query, tuple(params)).fetchall()
|
|
172
|
+
|
|
173
|
+
for row in rows:
|
|
174
|
+
all_events.append({
|
|
175
|
+
"id": f"{region}_{row['id']}",
|
|
176
|
+
"username": row["username"],
|
|
177
|
+
"latitude": row["latitude"],
|
|
178
|
+
"longitude": row["longitude"],
|
|
179
|
+
"timestamp": row["timestamp_utc"],
|
|
180
|
+
"type": row["report_type"],
|
|
181
|
+
"subtype": row["subtype"],
|
|
182
|
+
"region": region
|
|
183
|
+
})
|
|
184
|
+
|
|
185
|
+
db.close()
|
|
186
|
+
except Exception as e:
|
|
187
|
+
print(f"Events error for {region}: {e}")
|
|
188
|
+
|
|
189
|
+
# Sort by timestamp and limit
|
|
190
|
+
all_events.sort(key=lambda x: x["timestamp"] or "", reverse=True)
|
|
191
|
+
return jsonify(all_events[:limit])
|
|
192
|
+
|
|
193
|
+
|
|
194
|
+
@app.route("/api/heatmap")
|
|
195
|
+
def api_heatmap():
|
|
196
|
+
"""Get events formatted for heatmap layer from all databases."""
|
|
197
|
+
since = request.args.get("since") # hours ago
|
|
198
|
+
event_type = request.args.get("type")
|
|
199
|
+
date_from = request.args.get("from") # ISO date string
|
|
200
|
+
date_to = request.args.get("to") # ISO date string
|
|
201
|
+
username = request.args.get("user") # filter by username
|
|
202
|
+
|
|
203
|
+
# Aggregate heatmap data from all databases
|
|
204
|
+
location_weights = {}
|
|
205
|
+
|
|
206
|
+
for region, db in get_all_dbs():
|
|
207
|
+
try:
|
|
208
|
+
query = "SELECT latitude, longitude, COUNT(*) as weight FROM events WHERE 1=1"
|
|
209
|
+
params = []
|
|
210
|
+
|
|
211
|
+
if event_type:
|
|
212
|
+
query += " AND report_type = ?"
|
|
213
|
+
params.append(event_type.upper())
|
|
214
|
+
|
|
215
|
+
if username:
|
|
216
|
+
query += " AND username = ?"
|
|
217
|
+
params.append(username)
|
|
218
|
+
|
|
219
|
+
if since:
|
|
220
|
+
hours = int(since)
|
|
221
|
+
cutoff = datetime.utcnow() - timedelta(hours=hours)
|
|
222
|
+
query += " AND timestamp_utc >= ?"
|
|
223
|
+
params.append(cutoff.isoformat())
|
|
224
|
+
elif date_from:
|
|
225
|
+
query += " AND timestamp_utc >= ?"
|
|
226
|
+
params.append(date_from)
|
|
227
|
+
|
|
228
|
+
if date_to:
|
|
229
|
+
date_to_val = date_to
|
|
230
|
+
if len(date_to_val) == 10:
|
|
231
|
+
date_to_val += "T23:59:59"
|
|
232
|
+
query += " AND timestamp_utc <= ?"
|
|
233
|
+
params.append(date_to_val)
|
|
234
|
+
|
|
235
|
+
query += " GROUP BY ROUND(latitude, 4), ROUND(longitude, 4)"
|
|
236
|
+
|
|
237
|
+
rows = db.execute(query, tuple(params)).fetchall()
|
|
238
|
+
|
|
239
|
+
for row in rows:
|
|
240
|
+
key = (round(row["latitude"], 4), round(row["longitude"], 4))
|
|
241
|
+
location_weights[key] = location_weights.get(key, 0) + row["weight"]
|
|
242
|
+
|
|
243
|
+
db.close()
|
|
244
|
+
except Exception as e:
|
|
245
|
+
print(f"Heatmap error for {region}: {e}")
|
|
246
|
+
|
|
247
|
+
# Format for Leaflet heatmap: [[lat, lng, intensity], ...]
|
|
248
|
+
heatmap_data = [[lat, lon, weight] for (lat, lon), weight in location_weights.items()]
|
|
249
|
+
|
|
250
|
+
return jsonify(heatmap_data)
|
|
251
|
+
|
|
252
|
+
|
|
253
|
+
@app.route("/api/user/<username>")
|
|
254
|
+
def api_user(username):
|
|
255
|
+
"""Get user profile and events."""
|
|
256
|
+
db = get_db()
|
|
257
|
+
profile = get_user_profile(db, username)
|
|
258
|
+
db.close()
|
|
259
|
+
|
|
260
|
+
if not profile:
|
|
261
|
+
return jsonify({"error": "User not found"}), 404
|
|
262
|
+
|
|
263
|
+
# Remove full events list from profile (too large)
|
|
264
|
+
profile["events"] = profile["events"][-50:] # Last 50 only
|
|
265
|
+
return jsonify(profile)
|
|
266
|
+
|
|
267
|
+
|
|
268
|
+
@app.route("/api/types")
|
|
269
|
+
def api_types():
|
|
270
|
+
"""Get list of event types with counts from all databases."""
|
|
271
|
+
type_counts = {}
|
|
272
|
+
|
|
273
|
+
for region, db in get_all_dbs():
|
|
274
|
+
try:
|
|
275
|
+
rows = db.execute("""
|
|
276
|
+
SELECT report_type, COUNT(*) as count
|
|
277
|
+
FROM events
|
|
278
|
+
GROUP BY report_type
|
|
279
|
+
""").fetchall()
|
|
280
|
+
|
|
281
|
+
for row in rows:
|
|
282
|
+
t = row["report_type"]
|
|
283
|
+
type_counts[t] = type_counts.get(t, 0) + row["count"]
|
|
284
|
+
|
|
285
|
+
db.close()
|
|
286
|
+
except Exception as e:
|
|
287
|
+
print(f"Types error for {region}: {e}")
|
|
288
|
+
|
|
289
|
+
types = [{"type": t, "count": c} for t, c in sorted(type_counts.items(), key=lambda x: -x[1])]
|
|
290
|
+
return jsonify(types)
|
|
291
|
+
|
|
292
|
+
|
|
293
|
+
@app.route("/api/users")
|
|
294
|
+
def api_users():
|
|
295
|
+
"""Get list of users with event counts from all databases."""
|
|
296
|
+
search = request.args.get("q", "")
|
|
297
|
+
limit = request.args.get("limit", 50, type=int)
|
|
298
|
+
|
|
299
|
+
user_counts = {}
|
|
300
|
+
|
|
301
|
+
for region, db in get_all_dbs():
|
|
302
|
+
try:
|
|
303
|
+
if search:
|
|
304
|
+
rows = db.execute("""
|
|
305
|
+
SELECT username, COUNT(*) as count
|
|
306
|
+
FROM events
|
|
307
|
+
WHERE username LIKE ?
|
|
308
|
+
GROUP BY username
|
|
309
|
+
""", (f"%{search}%",)).fetchall()
|
|
310
|
+
else:
|
|
311
|
+
rows = db.execute("""
|
|
312
|
+
SELECT username, COUNT(*) as count
|
|
313
|
+
FROM events
|
|
314
|
+
GROUP BY username
|
|
315
|
+
""").fetchall()
|
|
316
|
+
|
|
317
|
+
for row in rows:
|
|
318
|
+
u = row["username"]
|
|
319
|
+
user_counts[u] = user_counts.get(u, 0) + row["count"]
|
|
320
|
+
|
|
321
|
+
db.close()
|
|
322
|
+
except Exception as e:
|
|
323
|
+
print(f"Users error for {region}: {e}")
|
|
324
|
+
|
|
325
|
+
users = [{"username": u, "count": c} for u, c in sorted(user_counts.items(), key=lambda x: -x[1])[:limit]]
|
|
326
|
+
return jsonify(users)
|
|
327
|
+
|
|
328
|
+
|
|
329
|
+
@app.route("/api/leaderboard")
|
|
330
|
+
def api_leaderboard():
|
|
331
|
+
"""Get top users leaderboard with detailed stats."""
|
|
332
|
+
limit = request.args.get("limit", 10, type=int)
|
|
333
|
+
|
|
334
|
+
user_stats = {}
|
|
335
|
+
|
|
336
|
+
for region, db in get_all_dbs():
|
|
337
|
+
try:
|
|
338
|
+
rows = db.execute("""
|
|
339
|
+
SELECT username,
|
|
340
|
+
COUNT(*) as count,
|
|
341
|
+
COUNT(DISTINCT report_type) as types,
|
|
342
|
+
MAX(timestamp_utc) as last_seen
|
|
343
|
+
FROM events
|
|
344
|
+
WHERE username != 'anonymous'
|
|
345
|
+
GROUP BY username
|
|
346
|
+
""").fetchall()
|
|
347
|
+
|
|
348
|
+
for row in rows:
|
|
349
|
+
u = row["username"]
|
|
350
|
+
if u not in user_stats:
|
|
351
|
+
user_stats[u] = {"count": 0, "types": set(), "last_seen": None}
|
|
352
|
+
|
|
353
|
+
user_stats[u]["count"] += row["count"]
|
|
354
|
+
user_stats[u]["types"].add(row["types"])
|
|
355
|
+
|
|
356
|
+
if row["last_seen"]:
|
|
357
|
+
if user_stats[u]["last_seen"] is None or row["last_seen"] > user_stats[u]["last_seen"]:
|
|
358
|
+
user_stats[u]["last_seen"] = row["last_seen"]
|
|
359
|
+
|
|
360
|
+
db.close()
|
|
361
|
+
except Exception as e:
|
|
362
|
+
print(f"Leaderboard error for {region}: {e}")
|
|
363
|
+
|
|
364
|
+
# Sort by count and format
|
|
365
|
+
sorted_users = sorted(user_stats.items(), key=lambda x: -x[1]["count"])[:limit]
|
|
366
|
+
|
|
367
|
+
leaderboard = []
|
|
368
|
+
for rank, (username, stats) in enumerate(sorted_users, 1):
|
|
369
|
+
leaderboard.append({
|
|
370
|
+
"rank": rank,
|
|
371
|
+
"username": username,
|
|
372
|
+
"count": stats["count"],
|
|
373
|
+
"last_seen": stats["last_seen"]
|
|
374
|
+
})
|
|
375
|
+
|
|
376
|
+
return jsonify(leaderboard)
|
|
377
|
+
|
|
378
|
+
|
|
379
|
+
@app.route("/api/stream")
|
|
380
|
+
def api_stream():
|
|
381
|
+
"""Server-Sent Events endpoint for real-time updates."""
|
|
382
|
+
def generate():
|
|
383
|
+
q = queue.Queue()
|
|
384
|
+
with event_queues_lock:
|
|
385
|
+
event_queues.append(q)
|
|
386
|
+
|
|
387
|
+
try:
|
|
388
|
+
# Send initial connection message
|
|
389
|
+
yield f"data: {json.dumps({'type': 'connected', 'message': 'Connected to live feed'})}\n\n"
|
|
390
|
+
|
|
391
|
+
while True:
|
|
392
|
+
try:
|
|
393
|
+
# Wait for new events with timeout
|
|
394
|
+
event = q.get(timeout=30)
|
|
395
|
+
yield f"data: {json.dumps(event)}\n\n"
|
|
396
|
+
except queue.Empty:
|
|
397
|
+
# Send heartbeat to keep connection alive
|
|
398
|
+
yield f"data: {json.dumps({'type': 'heartbeat'})}\n\n"
|
|
399
|
+
finally:
|
|
400
|
+
with event_queues_lock:
|
|
401
|
+
if q in event_queues:
|
|
402
|
+
event_queues.remove(q)
|
|
403
|
+
|
|
404
|
+
return Response(generate(), mimetype='text/event-stream',
|
|
405
|
+
headers={'Cache-Control': 'no-cache', 'X-Accel-Buffering': 'no'})
|
|
406
|
+
|
|
407
|
+
|
|
408
|
+
@app.route("/api/status")
|
|
409
|
+
def api_status():
|
|
410
|
+
"""Get current collector status."""
|
|
411
|
+
try:
|
|
412
|
+
if os.path.exists(STATUS_FILE):
|
|
413
|
+
with open(STATUS_FILE, 'r') as f:
|
|
414
|
+
status = json.load(f)
|
|
415
|
+
return jsonify(status)
|
|
416
|
+
except Exception:
|
|
417
|
+
pass
|
|
418
|
+
return jsonify({"status": "unknown", "message": "No collector status available"})
|
|
419
|
+
|
|
420
|
+
|
|
421
|
+
@app.route("/api/recent-activity")
|
|
422
|
+
def api_recent_activity():
|
|
423
|
+
"""Get most recent events for activity feed from all databases."""
|
|
424
|
+
all_events = []
|
|
425
|
+
|
|
426
|
+
for region, db in get_all_dbs():
|
|
427
|
+
try:
|
|
428
|
+
rows = db.execute("""
|
|
429
|
+
SELECT id, username, latitude, longitude, timestamp_utc, report_type, subtype, grid_cell
|
|
430
|
+
FROM events
|
|
431
|
+
ORDER BY id DESC
|
|
432
|
+
LIMIT 20
|
|
433
|
+
""").fetchall()
|
|
434
|
+
|
|
435
|
+
for row in rows:
|
|
436
|
+
all_events.append({
|
|
437
|
+
"id": f"{region}_{row['id']}",
|
|
438
|
+
"username": row["username"],
|
|
439
|
+
"latitude": row["latitude"],
|
|
440
|
+
"longitude": row["longitude"],
|
|
441
|
+
"timestamp": row["timestamp_utc"],
|
|
442
|
+
"type": row["report_type"],
|
|
443
|
+
"subtype": row["subtype"],
|
|
444
|
+
"grid_cell": row["grid_cell"] if "grid_cell" in row.keys() else None,
|
|
445
|
+
"region": region
|
|
446
|
+
})
|
|
447
|
+
|
|
448
|
+
db.close()
|
|
449
|
+
except Exception as e:
|
|
450
|
+
print(f"Recent activity error for {region}: {e}")
|
|
451
|
+
|
|
452
|
+
# Sort by timestamp and return most recent
|
|
453
|
+
all_events.sort(key=lambda x: x["timestamp"] or "", reverse=True)
|
|
454
|
+
return jsonify(all_events[:50])
|
|
455
|
+
|
|
456
|
+
|
|
457
|
+
def broadcast_event(event_data):
|
|
458
|
+
"""Broadcast an event to all connected SSE clients."""
|
|
459
|
+
with event_queues_lock:
|
|
460
|
+
for q in event_queues:
|
|
461
|
+
try:
|
|
462
|
+
q.put_nowait(event_data)
|
|
463
|
+
except queue.Full:
|
|
464
|
+
pass
|
|
465
|
+
|
|
466
|
+
|
|
467
|
+
def status_monitor_thread():
|
|
468
|
+
"""Monitor status file and broadcast updates."""
|
|
469
|
+
last_mtime = 0
|
|
470
|
+
last_event_ids = {} # Track per-region
|
|
471
|
+
|
|
472
|
+
while True:
|
|
473
|
+
try:
|
|
474
|
+
# Check for status file updates
|
|
475
|
+
if os.path.exists(STATUS_FILE):
|
|
476
|
+
mtime = os.path.getmtime(STATUS_FILE)
|
|
477
|
+
if mtime > last_mtime:
|
|
478
|
+
last_mtime = mtime
|
|
479
|
+
with open(STATUS_FILE, 'r') as f:
|
|
480
|
+
status = json.load(f)
|
|
481
|
+
status['type'] = 'status'
|
|
482
|
+
broadcast_event(status)
|
|
483
|
+
|
|
484
|
+
# Check for new database events in all regions
|
|
485
|
+
for region, db in get_all_dbs():
|
|
486
|
+
try:
|
|
487
|
+
row = db.execute("SELECT MAX(id) as max_id FROM events").fetchone()
|
|
488
|
+
if row and row["max_id"]:
|
|
489
|
+
current_max = row["max_id"]
|
|
490
|
+
last_id = last_event_ids.get(region, 0)
|
|
491
|
+
|
|
492
|
+
if current_max > last_id:
|
|
493
|
+
# Get new events
|
|
494
|
+
new_events = db.execute("""
|
|
495
|
+
SELECT id, username, latitude, longitude, timestamp_utc,
|
|
496
|
+
report_type, subtype, grid_cell
|
|
497
|
+
FROM events WHERE id > ? ORDER BY id ASC LIMIT 20
|
|
498
|
+
""", (last_id,)).fetchall()
|
|
499
|
+
|
|
500
|
+
for event_row in new_events:
|
|
501
|
+
event_data = {
|
|
502
|
+
"type": "new_event",
|
|
503
|
+
"event": {
|
|
504
|
+
"id": f"{region}_{event_row['id']}",
|
|
505
|
+
"username": event_row["username"],
|
|
506
|
+
"latitude": event_row["latitude"],
|
|
507
|
+
"longitude": event_row["longitude"],
|
|
508
|
+
"timestamp": event_row["timestamp_utc"],
|
|
509
|
+
"report_type": event_row["report_type"],
|
|
510
|
+
"subtype": event_row["subtype"],
|
|
511
|
+
"grid_cell": event_row["grid_cell"] if "grid_cell" in event_row.keys() else None,
|
|
512
|
+
"region": region
|
|
513
|
+
}
|
|
514
|
+
}
|
|
515
|
+
broadcast_event(event_data)
|
|
516
|
+
|
|
517
|
+
last_event_ids[region] = current_max
|
|
518
|
+
db.close()
|
|
519
|
+
except Exception as e:
|
|
520
|
+
pass
|
|
521
|
+
|
|
522
|
+
except Exception as e:
|
|
523
|
+
pass
|
|
524
|
+
|
|
525
|
+
time.sleep(2) # Check every 2 seconds
|
|
526
|
+
|
|
527
|
+
|
|
528
|
+
# Start status monitor thread
|
|
529
|
+
monitor_thread = threading.Thread(target=status_monitor_thread, daemon=True)
|
|
530
|
+
monitor_thread.start()
|
|
531
|
+
|
|
532
|
+
|
|
533
|
+
if __name__ == "__main__":
|
|
534
|
+
print(f"Database: {DB_PATH}")
|
|
535
|
+
print(f"Starting server at http://localhost:5000")
|
|
536
|
+
app.run(debug=True, host="0.0.0.0", port=5000, threaded=True)
|