mcli-framework 7.3.1__py3-none-any.whl → 7.5.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of mcli-framework might be problematic. Click here for more details.

Files changed (96) hide show
  1. mcli/app/commands_cmd.py +741 -0
  2. mcli/lib/auth/aws_manager.py +9 -64
  3. mcli/lib/auth/azure_manager.py +9 -64
  4. mcli/lib/auth/credential_manager.py +70 -1
  5. mcli/lib/auth/gcp_manager.py +11 -64
  6. mcli/ml/dashboard/app.py +6 -39
  7. mcli/ml/dashboard/app_integrated.py +288 -117
  8. mcli/ml/dashboard/app_supabase.py +8 -57
  9. mcli/ml/dashboard/app_training.py +10 -12
  10. mcli/ml/dashboard/common.py +167 -0
  11. mcli/ml/dashboard/overview.py +378 -0
  12. mcli/ml/dashboard/pages/cicd.py +4 -4
  13. mcli/ml/dashboard/pages/debug_dependencies.py +406 -0
  14. mcli/ml/dashboard/pages/gravity_viz.py +783 -0
  15. mcli/ml/dashboard/pages/monte_carlo_predictions.py +555 -0
  16. mcli/ml/dashboard/pages/predictions_enhanced.py +4 -2
  17. mcli/ml/dashboard/pages/scrapers_and_logs.py +25 -9
  18. mcli/ml/dashboard/pages/test_portfolio.py +54 -4
  19. mcli/ml/dashboard/pages/trading.py +80 -26
  20. mcli/ml/dashboard/streamlit_extras_utils.py +297 -0
  21. mcli/ml/dashboard/styles.py +55 -0
  22. mcli/ml/dashboard/utils.py +7 -0
  23. mcli/ml/dashboard/warning_suppression.py +34 -0
  24. mcli/ml/database/session.py +169 -16
  25. mcli/ml/predictions/monte_carlo.py +428 -0
  26. mcli/ml/trading/alpaca_client.py +82 -18
  27. mcli/self/self_cmd.py +182 -737
  28. {mcli_framework-7.3.1.dist-info → mcli_framework-7.5.0.dist-info}/METADATA +2 -3
  29. {mcli_framework-7.3.1.dist-info → mcli_framework-7.5.0.dist-info}/RECORD +33 -87
  30. mcli/__init__.py +0 -160
  31. mcli/__main__.py +0 -14
  32. mcli/app/__init__.py +0 -23
  33. mcli/app/model/__init__.py +0 -0
  34. mcli/app/video/__init__.py +0 -5
  35. mcli/chat/__init__.py +0 -34
  36. mcli/lib/__init__.py +0 -0
  37. mcli/lib/api/__init__.py +0 -0
  38. mcli/lib/auth/__init__.py +0 -1
  39. mcli/lib/config/__init__.py +0 -1
  40. mcli/lib/erd/__init__.py +0 -25
  41. mcli/lib/files/__init__.py +0 -0
  42. mcli/lib/fs/__init__.py +0 -1
  43. mcli/lib/logger/__init__.py +0 -3
  44. mcli/lib/performance/__init__.py +0 -17
  45. mcli/lib/pickles/__init__.py +0 -1
  46. mcli/lib/shell/__init__.py +0 -0
  47. mcli/lib/toml/__init__.py +0 -1
  48. mcli/lib/watcher/__init__.py +0 -0
  49. mcli/ml/__init__.py +0 -16
  50. mcli/ml/api/__init__.py +0 -30
  51. mcli/ml/api/routers/__init__.py +0 -27
  52. mcli/ml/auth/__init__.py +0 -45
  53. mcli/ml/backtesting/__init__.py +0 -39
  54. mcli/ml/cli/__init__.py +0 -5
  55. mcli/ml/config/__init__.py +0 -33
  56. mcli/ml/configs/__init__.py +0 -16
  57. mcli/ml/dashboard/__init__.py +0 -12
  58. mcli/ml/dashboard/components/__init__.py +0 -7
  59. mcli/ml/dashboard/pages/__init__.py +0 -6
  60. mcli/ml/data_ingestion/__init__.py +0 -39
  61. mcli/ml/database/__init__.py +0 -47
  62. mcli/ml/experimentation/__init__.py +0 -29
  63. mcli/ml/features/__init__.py +0 -39
  64. mcli/ml/mlops/__init__.py +0 -33
  65. mcli/ml/models/__init__.py +0 -94
  66. mcli/ml/monitoring/__init__.py +0 -25
  67. mcli/ml/optimization/__init__.py +0 -27
  68. mcli/ml/predictions/__init__.py +0 -5
  69. mcli/ml/preprocessing/__init__.py +0 -28
  70. mcli/ml/scripts/__init__.py +0 -1
  71. mcli/ml/trading/__init__.py +0 -60
  72. mcli/ml/training/__init__.py +0 -10
  73. mcli/mygroup/__init__.py +0 -3
  74. mcli/public/__init__.py +0 -1
  75. mcli/public/commands/__init__.py +0 -2
  76. mcli/self/__init__.py +0 -3
  77. mcli/workflow/__init__.py +0 -0
  78. mcli/workflow/daemon/__init__.py +0 -15
  79. mcli/workflow/dashboard/__init__.py +0 -5
  80. mcli/workflow/docker/__init__.py +0 -0
  81. mcli/workflow/file/__init__.py +0 -0
  82. mcli/workflow/gcloud/__init__.py +0 -1
  83. mcli/workflow/git_commit/__init__.py +0 -0
  84. mcli/workflow/interview/__init__.py +0 -0
  85. mcli/workflow/politician_trading/__init__.py +0 -4
  86. mcli/workflow/registry/__init__.py +0 -0
  87. mcli/workflow/repo/__init__.py +0 -0
  88. mcli/workflow/scheduler/__init__.py +0 -25
  89. mcli/workflow/search/__init__.py +0 -0
  90. mcli/workflow/sync/__init__.py +0 -5
  91. mcli/workflow/videos/__init__.py +0 -1
  92. mcli/workflow/wakatime/__init__.py +0 -80
  93. {mcli_framework-7.3.1.dist-info → mcli_framework-7.5.0.dist-info}/WHEEL +0 -0
  94. {mcli_framework-7.3.1.dist-info → mcli_framework-7.5.0.dist-info}/entry_points.txt +0 -0
  95. {mcli_framework-7.3.1.dist-info → mcli_framework-7.5.0.dist-info}/licenses/LICENSE +0 -0
  96. {mcli_framework-7.3.1.dist-info → mcli_framework-7.5.0.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,783 @@
1
+ """
2
+ Gravity Anomaly Visualization Dashboard
3
+ Correlates gravitational measurements with politician locations and trading activity
4
+ """
5
+
6
+ import streamlit as st
7
+ import pandas as pd
8
+ import plotly.graph_objects as go
9
+ import plotly.express as px
10
+ from datetime import datetime, timedelta
11
+ from typing import List, Dict, Optional, Tuple
12
+ import numpy as np
13
+ import os
14
+ from supabase import Client, create_client
15
+
16
+ # Configure page
17
+ st.set_page_config(
18
+ page_title="Gravity Anomaly Monitor",
19
+ page_icon="🌍",
20
+ layout="wide",
21
+ initial_sidebar_state="expanded"
22
+ )
23
+
24
+ # Custom CSS for better styling
25
+ st.markdown("""
26
+ <style>
27
+ .main-header {
28
+ font-size: 2.5rem;
29
+ font-weight: 700;
30
+ color: #1f77b4;
31
+ margin-bottom: 1rem;
32
+ }
33
+ .metric-card {
34
+ background-color: #f0f2f6;
35
+ padding: 1rem;
36
+ border-radius: 0.5rem;
37
+ margin: 0.5rem 0;
38
+ }
39
+ .alert-high {
40
+ color: #d32f2f;
41
+ font-weight: 600;
42
+ }
43
+ .alert-medium {
44
+ color: #f57c00;
45
+ font-weight: 600;
46
+ }
47
+ .alert-low {
48
+ color: #388e3c;
49
+ font-weight: 600;
50
+ }
51
+ </style>
52
+ """, unsafe_allow_html=True)
53
+
54
+
55
+ @st.cache_resource
56
+ def get_supabase_client() -> Optional[Client]:
57
+ """Get Supabase client with Streamlit Cloud secrets support"""
58
+ try:
59
+ url = st.secrets.get("SUPABASE_URL", "")
60
+ key = st.secrets.get("SUPABASE_KEY", "") or st.secrets.get("SUPABASE_SERVICE_ROLE_KEY", "")
61
+ except (AttributeError, FileNotFoundError):
62
+ url = os.getenv("SUPABASE_URL", "")
63
+ key = os.getenv("SUPABASE_KEY", "") or os.getenv("SUPABASE_SERVICE_ROLE_KEY", "")
64
+
65
+ if not url or not key:
66
+ return None
67
+
68
+ try:
69
+ return create_client(url, key)
70
+ except Exception:
71
+ return None
72
+
73
+
74
+ class GravityData:
75
+ """Simulates gravity measurement data (in production, would fetch from real sensors/APIs)"""
76
+
77
+ @staticmethod
78
+ def generate_gravity_anomalies(lat: float, lon: float, radius_km: float = 50) -> pd.DataFrame:
79
+ """
80
+ Generate simulated gravity measurements near a location
81
+ In production: fetch from GRACE satellite data, ground sensors, or geological surveys
82
+ """
83
+ num_points = np.random.randint(20, 50)
84
+
85
+ # Generate points within radius
86
+ angles = np.random.uniform(0, 2 * np.pi, num_points)
87
+ distances = np.random.uniform(0, radius_km, num_points)
88
+
89
+ # Convert to lat/lon offsets (approximate)
90
+ lat_offsets = (distances / 111) * np.cos(angles) # 111 km per degree latitude
91
+ lon_offsets = (distances / (111 * np.cos(np.radians(lat)))) * np.sin(angles)
92
+
93
+ # Generate gravity anomalies (mGal - milligals)
94
+ # Normal Earth gravity ~9.8 m/s^2, anomalies typically +/- 100 mGal
95
+ base_gravity = 980000 # mGal
96
+ anomalies = np.random.normal(0, 30, num_points) # +/- 30 mGal typical range
97
+
98
+ # Add some interesting features
99
+ if np.random.random() > 0.7: # 30% chance of significant anomaly
100
+ spike_idx = np.random.randint(0, num_points)
101
+ anomalies[spike_idx] += np.random.uniform(50, 100)
102
+
103
+ timestamps = [
104
+ datetime.now() - timedelta(hours=np.random.randint(0, 168)) # Last week
105
+ for _ in range(num_points)
106
+ ]
107
+
108
+ return pd.DataFrame({
109
+ 'latitude': lat + lat_offsets,
110
+ 'longitude': lon + lon_offsets,
111
+ 'gravity_anomaly_mgal': anomalies,
112
+ 'absolute_gravity_mgal': base_gravity + anomalies,
113
+ 'measurement_time': timestamps,
114
+ 'distance_km': distances,
115
+ 'quality': np.random.choice(['high', 'medium', 'low'], num_points, p=[0.6, 0.3, 0.1])
116
+ })
117
+
118
+
119
+ class PoliticianLocations:
120
+ """Manages politician location and trading data"""
121
+
122
+ # Approximate coordinates for major cities (used as fallback)
123
+ STATE_CAPITALS = {
124
+ # US States (capital cities)
125
+ 'Alabama': (32.3668, -86.3000),
126
+ 'California': (38.5816, -121.4944),
127
+ 'Texas': (30.2672, -97.7431),
128
+ 'New Jersey': (40.2206, -74.7597),
129
+ 'Florida': (30.4383, -84.2807),
130
+ 'New York': (42.6526, -73.7562),
131
+ 'Pennsylvania': (40.2732, -76.8867),
132
+ 'Illinois': (39.7817, -89.6501),
133
+ 'Ohio': (39.9612, -82.9988),
134
+ 'Georgia': (33.7490, -84.3880),
135
+ 'Michigan': (42.7325, -84.5555),
136
+ 'North Carolina': (35.7796, -78.6382),
137
+ 'Virginia': (37.5407, -77.4360),
138
+ 'Washington': (47.0379, -122.9007),
139
+ 'Massachusetts': (42.3601, -71.0589),
140
+ # UK
141
+ 'United Kingdom': (51.5074, -0.1278), # London
142
+ 'UK': (51.5074, -0.1278),
143
+ # EU Countries (capitals)
144
+ 'France': (48.8566, 2.3522), # Paris
145
+ 'Germany': (52.5200, 13.4050), # Berlin
146
+ 'Italy': (41.9028, 12.4964), # Rome
147
+ 'Spain': (40.4168, -3.7038), # Madrid
148
+ 'Poland': (52.2297, 21.0122), # Warsaw
149
+ 'Netherlands': (52.3676, 4.9041), # Amsterdam
150
+ 'Belgium': (50.8503, 4.3517), # Brussels
151
+ 'Sweden': (59.3293, 18.0686), # Stockholm
152
+ 'Austria': (48.2082, 16.3738), # Vienna
153
+ 'Denmark': (55.6761, 12.5683), # Copenhagen
154
+ 'Finland': (60.1699, 24.9384), # Helsinki
155
+ }
156
+
157
+ # Major city coordinates for district approximations
158
+ MAJOR_CITIES = {
159
+ # California
160
+ 'San Francisco': (37.7749, -122.4194),
161
+ 'Los Angeles': (34.0522, -118.2437),
162
+ 'San Diego': (32.7157, -117.1611),
163
+ 'Sacramento': (38.5816, -121.4944),
164
+ 'San Jose': (37.3382, -121.8863),
165
+ # Texas
166
+ 'Houston': (29.7604, -97.7431),
167
+ 'Dallas': (32.7767, -96.7970),
168
+ 'Austin': (30.2672, -97.7431),
169
+ 'San Antonio': (29.4241, -98.4936),
170
+ # New York
171
+ 'New York City': (40.7128, -74.0060),
172
+ 'Buffalo': (42.8864, -78.8784),
173
+ 'Rochester': (43.1566, -77.6088),
174
+ 'Albany': (42.6526, -73.7562),
175
+ # Florida
176
+ 'Miami': (25.7617, -80.1918),
177
+ 'Tampa': (27.9506, -82.4572),
178
+ 'Orlando': (28.5383, -81.3792),
179
+ 'Jacksonville': (30.3322, -81.6557),
180
+ 'Tallahassee': (30.4383, -84.2807),
181
+ # Pennsylvania
182
+ 'Philadelphia': (39.9526, -75.1652),
183
+ 'Pittsburgh': (40.4406, -79.9959),
184
+ 'Harrisburg': (40.2732, -76.8867),
185
+ # Illinois
186
+ 'Chicago': (41.8781, -87.6298),
187
+ 'Springfield': (39.7817, -89.6501),
188
+ # New Jersey
189
+ 'Newark': (40.7357, -74.1724),
190
+ 'Jersey City': (40.7178, -74.0431),
191
+ 'Trenton': (40.2206, -74.7597),
192
+ # And more major cities as needed...
193
+ }
194
+
195
+ @staticmethod
196
+ def get_location_for_politician(state_or_country: str, district: str, role: str) -> Tuple[float, float]:
197
+ """
198
+ Get lat/lon for a politician based on their state/district/role
199
+ Returns: (latitude, longitude)
200
+ """
201
+ # For US Congress members with districts, try to use district-specific locations
202
+ if district and state_or_country in ['California', 'Texas', 'New York', 'Florida', 'Pennsylvania', 'Illinois', 'New Jersey']:
203
+ # Parse district like "CA-11" or "TX-02"
204
+ if '-' in str(district):
205
+ district_num = district.split('-')[-1]
206
+
207
+ # District-specific mapping (approximate major city in district)
208
+ # This is a simplified approach - in production, use actual district boundaries
209
+ district_locations = {
210
+ # California districts (examples)
211
+ 'California': {
212
+ '11': (37.7749, -122.4194), # SF Bay Area
213
+ '12': (37.7749, -122.4194), # SF
214
+ '13': (37.3382, -121.8863), # Oakland/East Bay
215
+ '17': (37.3382, -121.8863), # San Jose area
216
+ '28': (34.0522, -118.2437), # LA
217
+ '43': (33.7701, -118.1937), # Long Beach
218
+ },
219
+ # Texas districts
220
+ 'Texas': {
221
+ '02': (29.7604, -97.7431), # Houston
222
+ '07': (29.7604, -97.7431), # Houston
223
+ '24': (32.7767, -96.7970), # Dallas
224
+ '21': (29.4241, -98.4936), # San Antonio
225
+ },
226
+ # New York districts
227
+ 'New York': {
228
+ '12': (40.7128, -74.0060), # NYC Manhattan
229
+ '14': (40.7128, -74.0060), # NYC Bronx/Queens
230
+ '26': (42.8864, -78.8784), # Buffalo
231
+ },
232
+ # Add more as needed
233
+ }
234
+
235
+ if state_or_country in district_locations and district_num in district_locations[state_or_country]:
236
+ # Add small random offset to prevent exact overlap
237
+ base_lat, base_lon = district_locations[state_or_country][district_num]
238
+ import random
239
+ offset_lat = random.uniform(-0.1, 0.1)
240
+ offset_lon = random.uniform(-0.1, 0.1)
241
+ return (base_lat + offset_lat, base_lon + offset_lon)
242
+
243
+ # For senators or politicians without district mapping, use state capital with small offset
244
+ base_coords = PoliticianLocations.STATE_CAPITALS.get(state_or_country, (38.9072, -77.0369))
245
+
246
+ # Add small random offset to prevent exact overlap for multiple politicians from same location
247
+ import random
248
+ offset_lat = random.uniform(-0.2, 0.2)
249
+ offset_lon = random.uniform(-0.2, 0.2)
250
+
251
+ return (base_coords[0] + offset_lat, base_coords[1] + offset_lon)
252
+
253
+ @staticmethod
254
+ @st.cache_data(ttl=60)
255
+ def get_politicians_from_db() -> pd.DataFrame:
256
+ """Fetch politicians with trading data from database"""
257
+ client = get_supabase_client()
258
+ if not client:
259
+ return PoliticianLocations.get_fallback_politicians()
260
+
261
+ try:
262
+ # Fetch politicians
263
+ politicians_response = client.table("politicians").select("*").execute()
264
+ if not politicians_response.data:
265
+ return PoliticianLocations.get_fallback_politicians()
266
+
267
+ politicians_df = pd.DataFrame(politicians_response.data)
268
+
269
+ # Fetch trading disclosures to calculate volumes
270
+ disclosures_response = client.table("trading_disclosures").select("*").execute()
271
+ disclosures_df = pd.DataFrame(disclosures_response.data) if disclosures_response.data else pd.DataFrame()
272
+
273
+ # Calculate trading metrics per politician
274
+ result_data = []
275
+ for _, pol in politicians_df.iterrows():
276
+ pol_id = pol.get('id')
277
+ pol_disclosures = disclosures_df[disclosures_df['politician_id'] == pol_id] if not disclosures_df.empty else pd.DataFrame()
278
+
279
+ # Calculate metrics
280
+ recent_trades = len(pol_disclosures)
281
+ total_volume = 0
282
+ last_trade_date = None
283
+
284
+ if not pol_disclosures.empty:
285
+ # Estimate volume from range midpoints
286
+ for _, d in pol_disclosures.iterrows():
287
+ min_amt = d.get('amount_range_min', 0) or 0
288
+ max_amt = d.get('amount_range_max', 0) or 0
289
+ if min_amt and max_amt:
290
+ total_volume += (min_amt + max_amt) / 2
291
+ elif d.get('amount_exact'):
292
+ total_volume += d['amount_exact']
293
+
294
+ # Get last trade date
295
+ transaction_dates = pd.to_datetime(pol_disclosures['transaction_date'], errors='coerce')
296
+ last_trade_date = transaction_dates.max()
297
+
298
+ # Get location based on state/district/role
299
+ state_or_country = pol.get('state_or_country', '')
300
+ district = pol.get('district', '')
301
+ role = pol.get('role', '')
302
+ lat, lon = PoliticianLocations.get_location_for_politician(
303
+ state_or_country, district, role
304
+ )
305
+
306
+ # Build politician record - prefer first+last name over full_name if available
307
+ first_name = pol.get('first_name', '').strip()
308
+ last_name = pol.get('last_name', '').strip()
309
+ full_name = pol.get('full_name', '').strip()
310
+
311
+ # Use first_name + last_name if both available, otherwise use full_name
312
+ if first_name and last_name:
313
+ display_name = f"{first_name} {last_name}"
314
+ elif full_name:
315
+ display_name = full_name
316
+ elif first_name:
317
+ display_name = first_name
318
+ elif last_name:
319
+ display_name = last_name
320
+ else:
321
+ display_name = f"Politician {pol_id[:8]}" # Fallback to ID
322
+
323
+ result_data.append({
324
+ 'name': display_name,
325
+ 'role': pol.get('role', 'Unknown'),
326
+ 'state': state_or_country,
327
+ 'district': pol.get('district'),
328
+ 'party': pol.get('party', 'Unknown'),
329
+ 'lat': lat,
330
+ 'lon': lon,
331
+ 'recent_trades': recent_trades,
332
+ 'total_trade_volume': total_volume,
333
+ 'last_trade_date': last_trade_date if pd.notna(last_trade_date) else datetime(2025, 1, 1),
334
+ })
335
+
336
+ result_df = pd.DataFrame(result_data)
337
+ # Filter out politicians with no trading data
338
+ result_df = result_df[result_df['recent_trades'] > 0]
339
+
340
+ if result_df.empty:
341
+ return PoliticianLocations.get_fallback_politicians()
342
+
343
+ return result_df
344
+
345
+ except Exception as e:
346
+ st.warning(f"Could not fetch politicians from database: {e}")
347
+ return PoliticianLocations.get_fallback_politicians()
348
+
349
+ @staticmethod
350
+ def get_fallback_politicians() -> pd.DataFrame:
351
+ """Fallback sample data if database is unavailable"""
352
+ politicians = [
353
+ {
354
+ 'name': 'Nancy Pelosi',
355
+ 'role': 'US House Representative',
356
+ 'state': 'California',
357
+ 'district': 'CA-11',
358
+ 'party': 'Democrat',
359
+ 'lat': 37.7749,
360
+ 'lon': -122.4194,
361
+ 'recent_trades': 15,
362
+ 'total_trade_volume': 5_000_000,
363
+ 'last_trade_date': datetime(2025, 10, 5),
364
+ },
365
+ {
366
+ 'name': 'Tommy Tuberville',
367
+ 'role': 'US Senator',
368
+ 'state': 'Alabama',
369
+ 'district': None,
370
+ 'party': 'Republican',
371
+ 'lat': 32.3668,
372
+ 'lon': -86.3000,
373
+ 'recent_trades': 23,
374
+ 'total_trade_volume': 3_200_000,
375
+ 'last_trade_date': datetime(2025, 10, 3),
376
+ },
377
+ ]
378
+ return pd.DataFrame(politicians)
379
+
380
+
381
+ def create_gravity_map(politicians_df: pd.DataFrame, selected_politician: Optional[str] = None) -> go.Figure:
382
+ """Create interactive map showing politician locations and gravity anomalies"""
383
+
384
+ fig = go.Figure()
385
+
386
+ # Add politician markers
387
+ for _, pol in politicians_df.iterrows():
388
+ is_selected = pol['name'] == selected_politician
389
+
390
+ fig.add_trace(go.Scattergeo(
391
+ lon=[pol['lon']],
392
+ lat=[pol['lat']],
393
+ mode='markers+text',
394
+ marker=dict(
395
+ size=20 if is_selected else 12,
396
+ color='red' if is_selected else 'blue',
397
+ symbol='star',
398
+ line=dict(width=2, color='white')
399
+ ),
400
+ text=pol['name'],
401
+ textposition='top center',
402
+ name=pol['name'],
403
+ hovertemplate=(
404
+ f"<b>{pol['name']}</b><br>"
405
+ f"Role: {pol['role']}<br>"
406
+ f"State: {pol['state']}<br>"
407
+ f"Recent Trades: {pol['recent_trades']}<br>"
408
+ f"Trade Volume: ${pol['total_trade_volume']:,.0f}<br>"
409
+ "<extra></extra>"
410
+ ),
411
+ ))
412
+
413
+ # Add gravity measurement points if politician is selected
414
+ if is_selected:
415
+ gravity_data = GravityData.generate_gravity_anomalies(pol['lat'], pol['lon'])
416
+
417
+ # Color by anomaly strength
418
+ colors = gravity_data['gravity_anomaly_mgal']
419
+
420
+ fig.add_trace(go.Scattergeo(
421
+ lon=gravity_data['longitude'],
422
+ lat=gravity_data['latitude'],
423
+ mode='markers',
424
+ marker=dict(
425
+ size=8,
426
+ color=colors,
427
+ colorscale='RdYlGn_r',
428
+ cmin=-50,
429
+ cmax=50,
430
+ colorbar=dict(
431
+ title="Gravity<br>Anomaly<br>(mGal)",
432
+ x=1.1
433
+ ),
434
+ showscale=True,
435
+ ),
436
+ name='Gravity Measurements',
437
+ hovertemplate=(
438
+ "Anomaly: %{marker.color:.2f} mGal<br>"
439
+ "Distance: %{customdata[0]:.1f} km<br>"
440
+ "Time: %{customdata[1]}<br>"
441
+ "<extra></extra>"
442
+ ),
443
+ customdata=gravity_data[['distance_km', 'measurement_time']].values
444
+ ))
445
+
446
+ # Update layout
447
+ fig.update_geos(
448
+ projection_type='natural earth',
449
+ showcountries=True,
450
+ countrycolor='lightgray',
451
+ showland=True,
452
+ landcolor='white',
453
+ showocean=True,
454
+ oceancolor='lightblue',
455
+ coastlinewidth=1,
456
+ )
457
+
458
+ fig.update_layout(
459
+ title='Politician Locations & Gravity Anomalies',
460
+ height=600,
461
+ showlegend=False,
462
+ margin=dict(l=0, r=0, t=40, b=0)
463
+ )
464
+
465
+ return fig
466
+
467
+
468
+ def create_gravity_heatmap(gravity_df: pd.DataFrame) -> go.Figure:
469
+ """Create heatmap of gravity measurements over time"""
470
+
471
+ fig = go.Figure(data=go.Densitymapbox(
472
+ lat=gravity_df['latitude'],
473
+ lon=gravity_df['longitude'],
474
+ z=gravity_df['gravity_anomaly_mgal'],
475
+ radius=20,
476
+ colorscale='RdYlGn_r',
477
+ zmin=-50,
478
+ zmax=50,
479
+ hovertemplate='Anomaly: %{z:.2f} mGal<extra></extra>',
480
+ ))
481
+
482
+ # Calculate center point
483
+ center_lat = gravity_df['latitude'].mean()
484
+ center_lon = gravity_df['longitude'].mean()
485
+
486
+ fig.update_layout(
487
+ mapbox_style="open-street-map",
488
+ mapbox=dict(
489
+ center=dict(lat=center_lat, lon=center_lon),
490
+ zoom=8
491
+ ),
492
+ margin=dict(l=0, r=0, t=0, b=0),
493
+ height=400
494
+ )
495
+
496
+ return fig
497
+
498
+
499
+ def create_correlation_chart(politician_df: pd.DataFrame, gravity_df: pd.DataFrame) -> go.Figure:
500
+ """Create scatter plot correlating gravity anomalies with trading activity"""
501
+
502
+ # Aggregate gravity data by time period
503
+ gravity_stats = {
504
+ 'max_anomaly': gravity_df['gravity_anomaly_mgal'].max(),
505
+ 'mean_anomaly': gravity_df['gravity_anomaly_mgal'].mean(),
506
+ 'std_anomaly': gravity_df['gravity_anomaly_mgal'].std(),
507
+ 'num_measurements': len(gravity_df),
508
+ }
509
+
510
+ fig = go.Figure()
511
+
512
+ fig.add_trace(go.Bar(
513
+ x=['Max Anomaly', 'Mean Anomaly', 'Std Dev', 'Measurements'],
514
+ y=[
515
+ gravity_stats['max_anomaly'],
516
+ gravity_stats['mean_anomaly'],
517
+ gravity_stats['std_anomaly'],
518
+ gravity_stats['num_measurements'] / 10 # Scale for visibility
519
+ ],
520
+ marker_color=['red', 'orange', 'yellow', 'green'],
521
+ text=[f"{v:.2f}" for v in [
522
+ gravity_stats['max_anomaly'],
523
+ gravity_stats['mean_anomaly'],
524
+ gravity_stats['std_anomaly'],
525
+ gravity_stats['num_measurements'] / 10
526
+ ]],
527
+ textposition='auto',
528
+ ))
529
+
530
+ fig.update_layout(
531
+ title='Gravity Anomaly Statistics',
532
+ xaxis_title='Metric',
533
+ yaxis_title='Value',
534
+ height=300,
535
+ showlegend=False
536
+ )
537
+
538
+ return fig
539
+
540
+
541
+ def create_timeline_chart(gravity_df: pd.DataFrame, politician_name: str) -> go.Figure:
542
+ """Create timeline showing gravity measurements over time"""
543
+
544
+ # Sort by time
545
+ gravity_df = gravity_df.sort_values('measurement_time')
546
+
547
+ fig = go.Figure()
548
+
549
+ fig.add_trace(go.Scatter(
550
+ x=gravity_df['measurement_time'],
551
+ y=gravity_df['gravity_anomaly_mgal'],
552
+ mode='markers+lines',
553
+ marker=dict(
554
+ size=8,
555
+ color=gravity_df['gravity_anomaly_mgal'],
556
+ colorscale='RdYlGn_r',
557
+ showscale=False,
558
+ line=dict(width=1, color='white')
559
+ ),
560
+ line=dict(width=1, color='gray', dash='dot'),
561
+ name='Gravity Anomaly',
562
+ hovertemplate=(
563
+ 'Time: %{x}<br>'
564
+ 'Anomaly: %{y:.2f} mGal<br>'
565
+ '<extra></extra>'
566
+ )
567
+ ))
568
+
569
+ # Add zero line
570
+ fig.add_hline(y=0, line_dash="dash", line_color="gray", opacity=0.5)
571
+
572
+ fig.update_layout(
573
+ title=f'Gravity Measurements Over Time - {politician_name}',
574
+ xaxis_title='Time',
575
+ yaxis_title='Gravity Anomaly (mGal)',
576
+ height=350,
577
+ showlegend=False,
578
+ hovermode='closest'
579
+ )
580
+
581
+ return fig
582
+
583
+
584
+ def main():
585
+ """Main application"""
586
+
587
+ # Header
588
+ st.markdown('<h1 class="main-header">🌍 Gravity Anomaly Monitor</h1>', unsafe_allow_html=True)
589
+ st.markdown("""
590
+ Monitor gravitational anomalies near politician locations and correlate with trading activity.
591
+ Data sources: GRACE satellites, ground-based gravimeters, and geological surveys.
592
+ """)
593
+
594
+ # Sidebar
595
+ st.sidebar.header("⚙️ Configuration")
596
+
597
+ # Load politician data from database
598
+ politicians_df = PoliticianLocations.get_politicians_from_db()
599
+
600
+ # Show data info
601
+ st.sidebar.info(f"📊 Loaded {len(politicians_df)} politicians with trading data")
602
+
603
+ # Politician selection
604
+ selected_politician = st.sidebar.selectbox(
605
+ "Select Politician",
606
+ options=['All'] + politicians_df['name'].tolist(),
607
+ index=0
608
+ )
609
+
610
+ # Filters
611
+ st.sidebar.subheader("📊 Filters")
612
+
613
+ date_range = st.sidebar.slider(
614
+ "Data Range (days)",
615
+ min_value=1,
616
+ max_value=30,
617
+ value=7,
618
+ help="Number of days of historical data to display"
619
+ )
620
+
621
+ min_trade_volume = st.sidebar.number_input(
622
+ "Minimum Trade Volume ($)",
623
+ min_value=0,
624
+ max_value=10_000_000,
625
+ value=0, # Changed from 1,000,000 to 0 to show all politicians by default
626
+ step=100_000,
627
+ format="%d",
628
+ help="Filter politicians by minimum trade volume. Set to 0 to see all."
629
+ )
630
+
631
+ # Filter politicians by trade volume
632
+ filtered_politicians = politicians_df[
633
+ politicians_df['total_trade_volume'] >= min_trade_volume
634
+ ]
635
+
636
+ # Show filter results
637
+ if len(filtered_politicians) < len(politicians_df):
638
+ st.sidebar.warning(f"⚠️ Filter reduced to {len(filtered_politicians)} politicians (from {len(politicians_df)})")
639
+ else:
640
+ st.sidebar.success(f"✅ Showing all {len(filtered_politicians)} politicians")
641
+
642
+ # Main content
643
+ if selected_politician != 'All':
644
+ # Single politician view
645
+ pol = politicians_df[politicians_df['name'] == selected_politician].iloc[0]
646
+
647
+ # Metrics row
648
+ col1, col2, col3, col4 = st.columns(4)
649
+
650
+ with col1:
651
+ st.metric(
652
+ label="Recent Trades",
653
+ value=pol['recent_trades'],
654
+ delta=f"Last: {(datetime.now() - pol['last_trade_date']).days}d ago"
655
+ )
656
+
657
+ with col2:
658
+ st.metric(
659
+ label="Trade Volume",
660
+ value=f"${pol['total_trade_volume']:,.0f}",
661
+ delta=None
662
+ )
663
+
664
+ with col3:
665
+ gravity_data = GravityData.generate_gravity_anomalies(pol['lat'], pol['lon'])
666
+ max_anomaly = gravity_data['gravity_anomaly_mgal'].max()
667
+ alert_class = 'alert-high' if max_anomaly > 40 else 'alert-medium' if max_anomaly > 20 else 'alert-low'
668
+ st.metric(
669
+ label="Max Gravity Anomaly",
670
+ value=f"{max_anomaly:.2f} mGal",
671
+ help="Unusually high anomalies may indicate geological features or data quality issues"
672
+ )
673
+
674
+ with col4:
675
+ st.metric(
676
+ label="Measurements",
677
+ value=len(gravity_data),
678
+ delta=f"{len(gravity_data[gravity_data['quality'] == 'high'])} high quality"
679
+ )
680
+
681
+ # Tabs for different visualizations
682
+ tab1, tab2, tab3, tab4 = st.tabs(["🗺️ Map", "🔥 Heatmap", "📈 Timeline", "📊 Statistics"])
683
+
684
+ with tab1:
685
+ st.plotly_chart(
686
+ create_gravity_map(filtered_politicians, selected_politician),
687
+ config={"displayModeBar": True},
688
+ use_container_width=True
689
+ )
690
+
691
+ with tab2:
692
+ st.plotly_chart(
693
+ create_gravity_heatmap(gravity_data),
694
+ config={"displayModeBar": True},
695
+ use_container_width=True
696
+ )
697
+
698
+ # Data table
699
+ st.subheader("Measurement Data")
700
+ st.dataframe(
701
+ gravity_data[[
702
+ 'latitude', 'longitude', 'gravity_anomaly_mgal',
703
+ 'distance_km', 'quality', 'measurement_time'
704
+ ]].sort_values('gravity_anomaly_mgal', ascending=False).head(10),
705
+ use_container_width=True
706
+ )
707
+
708
+ with tab3:
709
+ st.plotly_chart(
710
+ create_timeline_chart(gravity_data, selected_politician),
711
+ config={"displayModeBar": True},
712
+ use_container_width=True
713
+ )
714
+
715
+ with tab4:
716
+ st.plotly_chart(
717
+ create_correlation_chart(filtered_politicians, gravity_data),
718
+ config={"displayModeBar": True},
719
+ use_container_width=True
720
+ )
721
+
722
+ # Additional stats
723
+ col1, col2 = st.columns(2)
724
+
725
+ with col1:
726
+ st.subheader("Gravity Statistics")
727
+ st.write(f"**Mean Anomaly:** {gravity_data['gravity_anomaly_mgal'].mean():.2f} mGal")
728
+ st.write(f"**Std Dev:** {gravity_data['gravity_anomaly_mgal'].std():.2f} mGal")
729
+ st.write(f"**Min:** {gravity_data['gravity_anomaly_mgal'].min():.2f} mGal")
730
+ st.write(f"**Max:** {gravity_data['gravity_anomaly_mgal'].max():.2f} mGal")
731
+
732
+ with col2:
733
+ st.subheader("Data Quality")
734
+ quality_counts = gravity_data['quality'].value_counts()
735
+ st.bar_chart(quality_counts)
736
+
737
+ else:
738
+ # Overview of all politicians
739
+ st.subheader("📍 All Politicians Overview")
740
+
741
+ st.plotly_chart(
742
+ create_gravity_map(filtered_politicians),
743
+ config={"displayModeBar": True},
744
+ use_container_width=True
745
+ )
746
+
747
+ # Summary table
748
+ st.subheader("Trading Activity Summary")
749
+ summary_df = filtered_politicians[[
750
+ 'name', 'role', 'state', 'party',
751
+ 'recent_trades', 'total_trade_volume', 'last_trade_date'
752
+ ]].sort_values('total_trade_volume', ascending=False)
753
+
754
+ st.dataframe(summary_df, use_container_width=True)
755
+
756
+ # Trading volume chart
757
+ st.subheader("Trade Volume Comparison")
758
+ fig = px.bar(
759
+ filtered_politicians.sort_values('total_trade_volume', ascending=True),
760
+ x='total_trade_volume',
761
+ y='name',
762
+ orientation='h',
763
+ color='party',
764
+ color_discrete_map={'Democrat': 'blue', 'Republican': 'red'},
765
+ labels={'total_trade_volume': 'Total Trade Volume ($)', 'name': 'Politician'},
766
+ title='Trade Volume by Politician'
767
+ )
768
+ st.plotly_chart(fig, config={"displayModeBar": True}, use_container_width=True)
769
+
770
+ # Footer
771
+ st.markdown("---")
772
+ st.markdown("""
773
+ **Data Sources:**
774
+ - Gravity: GRACE satellites, ground gravimeters, geological surveys
775
+ - Trading: mcli politician trading database
776
+ - Locations: Official government records
777
+
778
+ **Note:** This is a demonstration. In production, integrate with real-time data sources.
779
+ """)
780
+
781
+
782
+ if __name__ == "__main__":
783
+ main()