mcli-framework 7.10.1__py3-none-any.whl → 7.10.2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of mcli-framework might be problematic. Click here for more details.

Files changed (99) hide show
  1. mcli/lib/custom_commands.py +10 -0
  2. mcli/lib/optional_deps.py +240 -0
  3. mcli/workflow/git_commit/ai_service.py +13 -2
  4. mcli/workflow/notebook/converter.py +375 -0
  5. mcli/workflow/notebook/notebook_cmd.py +441 -0
  6. mcli/workflow/notebook/schema.py +402 -0
  7. mcli/workflow/notebook/validator.py +313 -0
  8. mcli/workflow/workflow.py +14 -0
  9. {mcli_framework-7.10.1.dist-info → mcli_framework-7.10.2.dist-info}/METADATA +36 -2
  10. {mcli_framework-7.10.1.dist-info → mcli_framework-7.10.2.dist-info}/RECORD +14 -94
  11. mcli/__init__.py +0 -160
  12. mcli/__main__.py +0 -14
  13. mcli/app/__init__.py +0 -23
  14. mcli/app/model/__init__.py +0 -0
  15. mcli/app/video/__init__.py +0 -5
  16. mcli/chat/__init__.py +0 -34
  17. mcli/lib/__init__.py +0 -0
  18. mcli/lib/api/__init__.py +0 -0
  19. mcli/lib/auth/__init__.py +0 -1
  20. mcli/lib/config/__init__.py +0 -1
  21. mcli/lib/erd/__init__.py +0 -25
  22. mcli/lib/files/__init__.py +0 -0
  23. mcli/lib/fs/__init__.py +0 -1
  24. mcli/lib/logger/__init__.py +0 -3
  25. mcli/lib/performance/__init__.py +0 -17
  26. mcli/lib/pickles/__init__.py +0 -1
  27. mcli/lib/secrets/__init__.py +0 -10
  28. mcli/lib/shell/__init__.py +0 -0
  29. mcli/lib/toml/__init__.py +0 -1
  30. mcli/lib/watcher/__init__.py +0 -0
  31. mcli/ml/__init__.py +0 -16
  32. mcli/ml/api/__init__.py +0 -30
  33. mcli/ml/api/routers/__init__.py +0 -27
  34. mcli/ml/auth/__init__.py +0 -41
  35. mcli/ml/backtesting/__init__.py +0 -33
  36. mcli/ml/cli/__init__.py +0 -5
  37. mcli/ml/config/__init__.py +0 -33
  38. mcli/ml/configs/__init__.py +0 -16
  39. mcli/ml/dashboard/__init__.py +0 -12
  40. mcli/ml/dashboard/components/__init__.py +0 -7
  41. mcli/ml/dashboard/pages/__init__.py +0 -6
  42. mcli/ml/data_ingestion/__init__.py +0 -29
  43. mcli/ml/database/__init__.py +0 -40
  44. mcli/ml/experimentation/__init__.py +0 -29
  45. mcli/ml/features/__init__.py +0 -39
  46. mcli/ml/features/political_features.py +0 -677
  47. mcli/ml/mlops/__init__.py +0 -19
  48. mcli/ml/models/__init__.py +0 -90
  49. mcli/ml/monitoring/__init__.py +0 -25
  50. mcli/ml/optimization/__init__.py +0 -27
  51. mcli/ml/predictions/__init__.py +0 -5
  52. mcli/ml/preprocessing/__init__.py +0 -24
  53. mcli/ml/preprocessing/politician_trading_preprocessor.py +0 -570
  54. mcli/ml/scripts/__init__.py +0 -1
  55. mcli/ml/serving/__init__.py +0 -1
  56. mcli/ml/trading/__init__.py +0 -63
  57. mcli/ml/training/__init__.py +0 -7
  58. mcli/mygroup/__init__.py +0 -3
  59. mcli/public/__init__.py +0 -1
  60. mcli/public/commands/__init__.py +0 -2
  61. mcli/self/__init__.py +0 -3
  62. mcli/workflow/__init__.py +0 -0
  63. mcli/workflow/daemon/__init__.py +0 -15
  64. mcli/workflow/dashboard/__init__.py +0 -5
  65. mcli/workflow/docker/__init__.py +0 -0
  66. mcli/workflow/file/__init__.py +0 -0
  67. mcli/workflow/gcloud/__init__.py +0 -1
  68. mcli/workflow/git_commit/__init__.py +0 -0
  69. mcli/workflow/interview/__init__.py +0 -0
  70. mcli/workflow/politician_trading/__init__.py +0 -4
  71. mcli/workflow/politician_trading/config.py +0 -134
  72. mcli/workflow/politician_trading/connectivity.py +0 -492
  73. mcli/workflow/politician_trading/data_sources.py +0 -654
  74. mcli/workflow/politician_trading/database.py +0 -412
  75. mcli/workflow/politician_trading/demo.py +0 -249
  76. mcli/workflow/politician_trading/models.py +0 -327
  77. mcli/workflow/politician_trading/monitoring.py +0 -413
  78. mcli/workflow/politician_trading/scrapers.py +0 -1074
  79. mcli/workflow/politician_trading/scrapers_california.py +0 -434
  80. mcli/workflow/politician_trading/scrapers_corporate_registry.py +0 -797
  81. mcli/workflow/politician_trading/scrapers_eu.py +0 -376
  82. mcli/workflow/politician_trading/scrapers_free_sources.py +0 -509
  83. mcli/workflow/politician_trading/scrapers_third_party.py +0 -373
  84. mcli/workflow/politician_trading/scrapers_uk.py +0 -378
  85. mcli/workflow/politician_trading/scrapers_us_states.py +0 -471
  86. mcli/workflow/politician_trading/seed_database.py +0 -520
  87. mcli/workflow/politician_trading/supabase_functions.py +0 -354
  88. mcli/workflow/politician_trading/workflow.py +0 -879
  89. mcli/workflow/registry/__init__.py +0 -0
  90. mcli/workflow/repo/__init__.py +0 -0
  91. mcli/workflow/scheduler/__init__.py +0 -25
  92. mcli/workflow/search/__init__.py +0 -0
  93. mcli/workflow/sync/__init__.py +0 -5
  94. mcli/workflow/videos/__init__.py +0 -1
  95. mcli/workflow/wakatime/__init__.py +0 -80
  96. {mcli_framework-7.10.1.dist-info → mcli_framework-7.10.2.dist-info}/WHEEL +0 -0
  97. {mcli_framework-7.10.1.dist-info → mcli_framework-7.10.2.dist-info}/entry_points.txt +0 -0
  98. {mcli_framework-7.10.1.dist-info → mcli_framework-7.10.2.dist-info}/licenses/LICENSE +0 -0
  99. {mcli_framework-7.10.1.dist-info → mcli_framework-7.10.2.dist-info}/top_level.txt +0 -0
@@ -1,354 +0,0 @@
1
- """
2
- Supabase Edge Functions for politician trading data collection
3
-
4
- This module provides the function code that can be deployed as Supabase Edge Functions
5
- for automated data collection via cron jobs.
6
- """
7
-
8
- # Edge Function code for Supabase (TypeScript/Deno)
9
- POLITICIAN_TRADING_EDGE_FUNCTION = """
10
- import { serve } from "https://deno.land/std@0.168.0/http/server.ts";
11
- import { createClient } from "https://esm.sh/@supabase/supabase-js@2";
12
-
13
- const corsHeaders = {
14
- 'Access-Control-Allow-Origin': '*',
15
- 'Access-Control-Allow-Headers': 'authorization, x-client-info, apikey, content-type',
16
- };
17
-
18
- serve(async (req) => {
19
- // Handle CORS preflight requests
20
- if (req.method === 'OPTIONS') {
21
- return new Response('ok', { headers: corsHeaders });
22
- }
23
-
24
- try {
25
- // Initialize Supabase client
26
- const supabaseUrl = Deno.env.get('SUPABASE_URL')!;
27
- const supabaseKey = Deno.env.get('SUPABASE_SERVICE_ROLE_KEY')!;
28
- const supabase = createClient(supabaseUrl, supabaseKey);
29
-
30
- console.log('🏛️ Starting politician trading data collection cron job');
31
-
32
- // Create a new job record
33
- const jobId = crypto.randomUUID();
34
- const startTime = new Date().toISOString();
35
-
36
- const { error: jobError } = await supabase
37
- .from('data_pull_jobs')
38
- .insert({
39
- id: jobId,
40
- job_type: 'automated_collection',
41
- status: 'running',
42
- started_at: startTime,
43
- config_snapshot: {
44
- triggered_by: 'supabase_cron',
45
- timestamp: startTime
46
- }
47
- });
48
-
49
- if (jobError) {
50
- console.error('Failed to create job record:', jobError);
51
- throw jobError;
52
- }
53
-
54
- // Simulate data collection (in production, this would call actual APIs)
55
- const results = await performDataCollection(supabase);
56
-
57
- // Update job with results
58
- const { error: updateError } = await supabase
59
- .from('data_pull_jobs')
60
- .update({
61
- status: 'completed',
62
- completed_at: new Date().toISOString(),
63
- records_found: results.recordsFound,
64
- records_processed: results.recordsProcessed,
65
- records_new: results.recordsNew,
66
- records_updated: results.recordsUpdated,
67
- records_failed: results.recordsFailed
68
- })
69
- .eq('id', jobId);
70
-
71
- if (updateError) {
72
- console.error('Failed to update job record:', updateError);
73
- }
74
-
75
- console.log('✅ Politician trading collection completed:', results);
76
-
77
- return new Response(
78
- JSON.stringify({
79
- success: true,
80
- jobId,
81
- results,
82
- timestamp: new Date().toISOString()
83
- }),
84
- {
85
- headers: { ...corsHeaders, 'Content-Type': 'application/json' },
86
- status: 200,
87
- }
88
- );
89
-
90
- } catch (error) {
91
- console.error('❌ Cron job failed:', error);
92
-
93
- return new Response(
94
- JSON.stringify({
95
- success: false,
96
- error: error.message,
97
- timestamp: new Date().toISOString()
98
- }),
99
- {
100
- headers: { ...corsHeaders, 'Content-Type': 'application/json' },
101
- status: 500,
102
- }
103
- );
104
- }
105
- });
106
-
107
- async function performDataCollection(supabase) {
108
- // This would implement the actual data collection logic
109
- // For now, we'll return mock results
110
-
111
- const results = {
112
- recordsFound: 0,
113
- recordsProcessed: 0,
114
- recordsNew: 0,
115
- recordsUpdated: 0,
116
- recordsFailed: 0
117
- };
118
-
119
- try {
120
- // Example: Check for new trading disclosures
121
- // In production, this would make HTTP requests to government APIs
122
-
123
- // Simulate finding some new records
124
- const mockDisclosures = await simulateDataFetch();
125
- results.recordsFound = mockDisclosures.length;
126
-
127
- for (const disclosure of mockDisclosures) {
128
- try {
129
- // Check if disclosure already exists
130
- const { data: existing } = await supabase
131
- .from('trading_disclosures')
132
- .select('id')
133
- .eq('politician_id', disclosure.politician_id)
134
- .eq('transaction_date', disclosure.transaction_date)
135
- .eq('asset_name', disclosure.asset_name)
136
- .eq('transaction_type', disclosure.transaction_type)
137
- .single();
138
-
139
- if (existing) {
140
- // Update existing record
141
- const { error } = await supabase
142
- .from('trading_disclosures')
143
- .update({
144
- ...disclosure,
145
- updated_at: new Date().toISOString()
146
- })
147
- .eq('id', existing.id);
148
-
149
- if (error) {
150
- console.error('Update failed:', error);
151
- results.recordsFailed++;
152
- } else {
153
- results.recordsUpdated++;
154
- }
155
- } else {
156
- // Insert new record
157
- const { error } = await supabase
158
- .from('trading_disclosures')
159
- .insert({
160
- ...disclosure,
161
- id: crypto.randomUUID(),
162
- created_at: new Date().toISOString(),
163
- updated_at: new Date().toISOString()
164
- });
165
-
166
- if (error) {
167
- console.error('Insert failed:', error);
168
- results.recordsFailed++;
169
- } else {
170
- results.recordsNew++;
171
- }
172
- }
173
-
174
- results.recordsProcessed++;
175
-
176
- } catch (error) {
177
- console.error('Processing error:', error);
178
- results.recordsFailed++;
179
- }
180
- }
181
-
182
- } catch (error) {
183
- console.error('Data collection error:', error);
184
- throw error;
185
- }
186
-
187
- return results;
188
- }
189
-
190
- async function simulateDataFetch() {
191
- // Simulate fetching data from external APIs
192
- // In production, this would make real HTTP requests
193
-
194
- return [
195
- {
196
- politician_id: 'sample-politician-id',
197
- transaction_date: new Date(Date.now() - 24 * 60 * 60 * 1000).toISOString(), // Yesterday
198
- disclosure_date: new Date().toISOString(),
199
- transaction_type: 'purchase',
200
- asset_name: 'Sample Corp',
201
- asset_ticker: 'SMPL',
202
- asset_type: 'stock',
203
- amount_range_min: 1001,
204
- amount_range_max: 15000,
205
- source_url: 'https://example.com/disclosure',
206
- raw_data: {
207
- source: 'simulated',
208
- timestamp: new Date().toISOString()
209
- },
210
- status: 'processed'
211
- }
212
- ];
213
- }
214
- """
215
-
216
- # Python function that can be called from the edge function
217
- PYTHON_COLLECTION_FUNCTION = '''
218
- """
219
- Python function for data collection that can be called from the edge function
220
- """
221
-
222
- import asyncio
223
- import json
224
- from datetime import datetime
225
- from typing import Dict, Any
226
-
227
- from .workflow import run_politician_trading_collection
228
-
229
- async def handle_cron_collection() -> Dict[str, Any]:
230
- """
231
- Main function called by the Supabase cron job
232
- """
233
- try:
234
- print("🏛️ Starting scheduled politician trading data collection")
235
-
236
- # Run the full collection workflow
237
- result = await run_politician_trading_collection()
238
-
239
- # Log results
240
- print(f"✅ Collection completed: {result.get('summary', {})}")
241
-
242
- return {
243
- "success": True,
244
- "result": result,
245
- "timestamp": datetime.utcnow().isoformat()
246
- }
247
-
248
- except Exception as e:
249
- error_msg = f"❌ Scheduled collection failed: {e}"
250
- print(error_msg)
251
-
252
- return {
253
- "success": False,
254
- "error": str(e),
255
- "timestamp": datetime.utcnow().isoformat()
256
- }
257
-
258
- # Export for cron usage
259
- cron_handler = handle_cron_collection
260
- '''
261
-
262
- # Supabase SQL for setting up the cron job
263
- CRON_JOB_SQL = """
264
- -- Politician Trading Data Collection Cron Job Setup
265
-
266
- -- Enable the pg_cron extension (if not already enabled)
267
- CREATE EXTENSION IF NOT EXISTS pg_cron;
268
-
269
- -- Create the cron job to run every 6 hours
270
- SELECT cron.schedule(
271
- 'politician-trading-collection', -- job name
272
- '0 */6 * * *', -- cron expression: every 6 hours at minute 0
273
- $$
274
- -- Call the Edge Function via HTTP
275
- SELECT net.http_post(
276
- url := 'https://uljsqvwkomdrlnofmlad.supabase.co/functions/v1/politician-trading-collect',
277
- headers := '{"Content-Type": "application/json", "Authorization": "Bearer eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJpc3MiOiJzdXBhYmFzZSIsInJlZiI6InVsanNxdndrb21kcmxub2ZtbGFkIiwicm9sZSI6ImFub24iLCJpYXQiOjE3NTY4MDIyNDQsImV4cCI6MjA3MjM3ODI0NH0.QCpfcEpxGX_5Wn8ljf_J2KWjJLGdF8zRsV_7OatxmHI"}'::jsonb,
278
- body := '{}'::jsonb
279
- ) as request_id;
280
- $$
281
- );
282
-
283
- -- Alternative: Direct database operation (if you prefer not to use Edge Functions)
284
- SELECT cron.schedule(
285
- 'politician-trading-db-check',
286
- '0 */2 * * *', -- Every 2 hours
287
- $$
288
- -- Insert a status check record
289
- INSERT INTO data_pull_jobs (
290
- job_type,
291
- status,
292
- started_at,
293
- config_snapshot
294
- ) VALUES (
295
- 'cron_status_check',
296
- 'completed',
297
- NOW(),
298
- '{"type": "automatic_status_check"}'::jsonb
299
- );
300
- $$
301
- );
302
-
303
- -- View all scheduled cron jobs
304
- SELECT * FROM cron.job;
305
-
306
- -- View cron job run history
307
- SELECT * FROM cron.job_run_details
308
- ORDER BY start_time DESC
309
- LIMIT 10;
310
-
311
- -- Delete a cron job (if needed)
312
- -- SELECT cron.unschedule('politician-trading-collection');
313
-
314
- -- Monitor cron job failures
315
- CREATE OR REPLACE VIEW cron_job_monitoring AS
316
- SELECT
317
- jobname,
318
- status,
319
- return_message,
320
- start_time,
321
- end_time,
322
- (end_time - start_time) as duration
323
- FROM cron.job_run_details
324
- WHERE jobname = 'politician-trading-collection'
325
- ORDER BY start_time DESC;
326
-
327
- -- Create notification for failed jobs (optional)
328
- CREATE OR REPLACE FUNCTION notify_cron_failure()
329
- RETURNS trigger AS $$
330
- BEGIN
331
- IF NEW.status = 'failed' AND NEW.jobname = 'politician-trading-collection' THEN
332
- INSERT INTO data_pull_jobs (
333
- job_type,
334
- status,
335
- error_message,
336
- started_at
337
- ) VALUES (
338
- 'cron_failure_alert',
339
- 'failed',
340
- NEW.return_message,
341
- NOW()
342
- );
343
- END IF;
344
- RETURN NEW;
345
- END;
346
- $$ LANGUAGE plpgsql;
347
-
348
- -- Trigger for cron failure notifications
349
- DROP TRIGGER IF EXISTS cron_failure_trigger ON cron.job_run_details;
350
- CREATE TRIGGER cron_failure_trigger
351
- AFTER INSERT ON cron.job_run_details
352
- FOR EACH ROW
353
- EXECUTE FUNCTION notify_cron_failure();
354
- """