mcli-framework 7.1.2__py3-none-any.whl → 7.2.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of mcli-framework might be problematic. Click here for more details.

Files changed (38) hide show
  1. mcli/app/main.py +10 -0
  2. mcli/lib/custom_commands.py +424 -0
  3. mcli/lib/paths.py +12 -0
  4. mcli/ml/dashboard/app.py +13 -13
  5. mcli/ml/dashboard/app_integrated.py +1949 -70
  6. mcli/ml/dashboard/app_supabase.py +46 -21
  7. mcli/ml/dashboard/app_training.py +14 -14
  8. mcli/ml/dashboard/components/charts.py +258 -0
  9. mcli/ml/dashboard/components/metrics.py +125 -0
  10. mcli/ml/dashboard/components/tables.py +228 -0
  11. mcli/ml/dashboard/pages/cicd.py +382 -0
  12. mcli/ml/dashboard/pages/predictions_enhanced.py +820 -0
  13. mcli/ml/dashboard/pages/scrapers_and_logs.py +1060 -0
  14. mcli/ml/dashboard/pages/workflows.py +533 -0
  15. mcli/ml/training/train_model.py +569 -0
  16. mcli/self/self_cmd.py +322 -94
  17. mcli/workflow/politician_trading/data_sources.py +259 -1
  18. mcli/workflow/politician_trading/models.py +159 -1
  19. mcli/workflow/politician_trading/scrapers_corporate_registry.py +846 -0
  20. mcli/workflow/politician_trading/scrapers_free_sources.py +516 -0
  21. mcli/workflow/politician_trading/scrapers_third_party.py +391 -0
  22. mcli/workflow/politician_trading/seed_database.py +539 -0
  23. mcli/workflow/workflow.py +8 -27
  24. {mcli_framework-7.1.2.dist-info → mcli_framework-7.2.0.dist-info}/METADATA +1 -1
  25. {mcli_framework-7.1.2.dist-info → mcli_framework-7.2.0.dist-info}/RECORD +29 -25
  26. mcli/workflow/daemon/api_daemon.py +0 -800
  27. mcli/workflow/daemon/commands.py +0 -1196
  28. mcli/workflow/dashboard/dashboard_cmd.py +0 -120
  29. mcli/workflow/file/file.py +0 -100
  30. mcli/workflow/git_commit/commands.py +0 -430
  31. mcli/workflow/politician_trading/commands.py +0 -1939
  32. mcli/workflow/scheduler/commands.py +0 -493
  33. mcli/workflow/sync/sync_cmd.py +0 -437
  34. mcli/workflow/videos/videos.py +0 -242
  35. {mcli_framework-7.1.2.dist-info → mcli_framework-7.2.0.dist-info}/WHEEL +0 -0
  36. {mcli_framework-7.1.2.dist-info → mcli_framework-7.2.0.dist-info}/entry_points.txt +0 -0
  37. {mcli_framework-7.1.2.dist-info → mcli_framework-7.2.0.dist-info}/licenses/LICENSE +0 -0
  38. {mcli_framework-7.1.2.dist-info → mcli_framework-7.2.0.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,228 @@
1
+ """Reusable table components"""
2
+
3
+ import streamlit as st
4
+ import pandas as pd
5
+ from typing import Optional, List, Callable, Any
6
+
7
+
8
+ def display_dataframe_with_search(
9
+ df: pd.DataFrame,
10
+ search_columns: Optional[List[str]] = None,
11
+ default_sort_column: Optional[str] = None,
12
+ page_size: int = 20,
13
+ key_prefix: str = "table"
14
+ ) -> pd.DataFrame:
15
+ """Display a dataframe with search and pagination"""
16
+
17
+ if df.empty:
18
+ st.info("No data available")
19
+ return df
20
+
21
+ # Search functionality
22
+ if search_columns:
23
+ search_term = st.text_input(
24
+ "🔍 Search",
25
+ key=f"{key_prefix}_search",
26
+ placeholder=f"Search in: {', '.join(search_columns)}"
27
+ )
28
+
29
+ if search_term:
30
+ mask = pd.Series([False] * len(df))
31
+ for col in search_columns:
32
+ if col in df.columns:
33
+ mask |= df[col].astype(str).str.contains(search_term, case=False, na=False)
34
+ df = df[mask]
35
+
36
+ # Sorting
37
+ if default_sort_column and default_sort_column in df.columns:
38
+ df = df.sort_values(by=default_sort_column, ascending=False)
39
+
40
+ # Display count
41
+ st.caption(f"Showing {len(df)} records")
42
+
43
+ # Pagination
44
+ if len(df) > page_size:
45
+ total_pages = (len(df) - 1) // page_size + 1
46
+ page = st.number_input(
47
+ "Page",
48
+ min_value=1,
49
+ max_value=total_pages,
50
+ value=1,
51
+ key=f"{key_prefix}_page"
52
+ )
53
+ start_idx = (page - 1) * page_size
54
+ end_idx = start_idx + page_size
55
+ df_display = df.iloc[start_idx:end_idx]
56
+ else:
57
+ df_display = df
58
+
59
+ # Display dataframe
60
+ st.dataframe(df_display, width="stretch", height=400)
61
+
62
+ return df
63
+
64
+
65
+ def display_filterable_dataframe(
66
+ df: pd.DataFrame,
67
+ filter_columns: Optional[dict] = None,
68
+ key_prefix: str = "filter"
69
+ ) -> pd.DataFrame:
70
+ """Display a dataframe with column-specific filters"""
71
+
72
+ if df.empty:
73
+ st.info("No data available")
74
+ return df
75
+
76
+ if filter_columns:
77
+ with st.expander("🎯 Filters", expanded=False):
78
+ cols = st.columns(len(filter_columns))
79
+
80
+ for idx, (col_name, filter_type) in enumerate(filter_columns.items()):
81
+ if col_name not in df.columns:
82
+ continue
83
+
84
+ with cols[idx]:
85
+ if filter_type == "multiselect":
86
+ unique_values = df[col_name].unique().tolist()
87
+ selected = st.multiselect(
88
+ col_name,
89
+ options=unique_values,
90
+ default=unique_values,
91
+ key=f"{key_prefix}_{col_name}"
92
+ )
93
+ if selected:
94
+ df = df[df[col_name].isin(selected)]
95
+
96
+ elif filter_type == "text":
97
+ search_text = st.text_input(
98
+ col_name,
99
+ key=f"{key_prefix}_{col_name}"
100
+ )
101
+ if search_text:
102
+ df = df[df[col_name].astype(str).str.contains(search_text, case=False, na=False)]
103
+
104
+ elif filter_type == "date_range":
105
+ if pd.api.types.is_datetime64_any_dtype(df[col_name]):
106
+ min_date = df[col_name].min()
107
+ max_date = df[col_name].max()
108
+ date_range = st.date_input(
109
+ col_name,
110
+ value=(min_date, max_date),
111
+ key=f"{key_prefix}_{col_name}"
112
+ )
113
+ if len(date_range) == 2:
114
+ df = df[(df[col_name] >= pd.Timestamp(date_range[0])) &
115
+ (df[col_name] <= pd.Timestamp(date_range[1]))]
116
+
117
+ st.dataframe(df, width="stretch")
118
+
119
+ return df
120
+
121
+
122
+ def display_table_with_actions(
123
+ df: pd.DataFrame,
124
+ actions: List[dict],
125
+ row_id_column: str = "id",
126
+ key_prefix: str = "action"
127
+ ):
128
+ """Display a table with action buttons for each row
129
+
130
+ actions: List of dicts with keys: 'label', 'callback', 'icon' (optional)
131
+ """
132
+
133
+ if df.empty:
134
+ st.info("No data available")
135
+ return
136
+
137
+ for idx, row in df.iterrows():
138
+ with st.container():
139
+ # Display row data in columns
140
+ data_cols = st.columns([3] + [1] * len(actions))
141
+
142
+ with data_cols[0]:
143
+ st.write(row.to_dict())
144
+
145
+ # Action buttons
146
+ for action_idx, action in enumerate(actions):
147
+ with data_cols[action_idx + 1]:
148
+ icon = action.get('icon', '')
149
+ button_label = f"{icon} {action['label']}" if icon else action['label']
150
+
151
+ if st.button(
152
+ button_label,
153
+ key=f"{key_prefix}_{row[row_id_column]}_{action_idx}"
154
+ ):
155
+ action['callback'](row)
156
+
157
+ st.divider()
158
+
159
+
160
+ def display_expandable_table(
161
+ df: pd.DataFrame,
162
+ summary_columns: List[str],
163
+ detail_callback: Callable[[Any], None],
164
+ row_id_column: str = "id",
165
+ key_prefix: str = "expand"
166
+ ):
167
+ """Display a table where each row can be expanded for details"""
168
+
169
+ if df.empty:
170
+ st.info("No data available")
171
+ return
172
+
173
+ for idx, row in df.iterrows():
174
+ # Summary view
175
+ summary_data = {col: row[col] for col in summary_columns if col in row}
176
+ summary_text = " | ".join([f"{k}: {v}" for k, v in summary_data.items()])
177
+
178
+ with st.expander(summary_text, expanded=False):
179
+ detail_callback(row)
180
+
181
+
182
+ def export_dataframe(
183
+ df: pd.DataFrame,
184
+ filename: str = "data",
185
+ formats: List[str] = ["csv", "json"],
186
+ key_prefix: str = "export"
187
+ ):
188
+ """Provide export buttons for a dataframe"""
189
+
190
+ if df.empty:
191
+ return
192
+
193
+ cols = st.columns(len(formats))
194
+
195
+ for idx, fmt in enumerate(formats):
196
+ with cols[idx]:
197
+ if fmt == "csv":
198
+ csv = df.to_csv(index=False).encode('utf-8')
199
+ st.download_button(
200
+ label="📥 Download CSV",
201
+ data=csv,
202
+ file_name=f"{filename}.csv",
203
+ mime="text/csv",
204
+ key=f"{key_prefix}_csv"
205
+ )
206
+ elif fmt == "json":
207
+ json_str = df.to_json(orient='records', indent=2)
208
+ st.download_button(
209
+ label="📥 Download JSON",
210
+ data=json_str,
211
+ file_name=f"{filename}.json",
212
+ mime="application/json",
213
+ key=f"{key_prefix}_json"
214
+ )
215
+ elif fmt == "excel":
216
+ # Requires openpyxl
217
+ try:
218
+ buffer = BytesIO()
219
+ df.to_excel(buffer, index=False, engine='openpyxl')
220
+ st.download_button(
221
+ label="📥 Download Excel",
222
+ data=buffer.getvalue(),
223
+ file_name=f"{filename}.xlsx",
224
+ mime="application/vnd.openxmlformats-officedocument.spreadsheetml.sheet",
225
+ key=f"{key_prefix}_excel"
226
+ )
227
+ except ImportError:
228
+ st.warning("Excel export requires openpyxl package")
@@ -0,0 +1,382 @@
1
+ """CI/CD Pipeline Monitoring Dashboard"""
2
+
3
+ import streamlit as st
4
+ import pandas as pd
5
+ import requests
6
+ import os
7
+ from datetime import datetime, timedelta
8
+ import plotly.graph_objects as go
9
+ import plotly.express as px
10
+ from typing import Optional
11
+
12
+ # Import components
13
+ try:
14
+ from ..components.metrics import display_kpi_row, display_status_badge, display_health_indicator
15
+ from ..components.charts import create_timeline_chart, create_status_pie_chart, render_chart
16
+ from ..components.tables import display_filterable_dataframe, export_dataframe
17
+ except ImportError:
18
+ # Fallback for when imported outside package context
19
+ from components.metrics import display_kpi_row, display_status_badge, display_health_indicator
20
+ from components.charts import create_timeline_chart, create_status_pie_chart, render_chart
21
+ from components.tables import display_filterable_dataframe, export_dataframe
22
+
23
+
24
+ def get_cicd_api_url() -> str:
25
+ """Get CI/CD API URL from environment"""
26
+ lsh_url = os.getenv("LSH_API_URL", "http://localhost:3034")
27
+ return f"{lsh_url}/api/cicd"
28
+
29
+
30
+ def fetch_cicd_builds(limit: int = 100) -> pd.DataFrame:
31
+ """Fetch CI/CD build data from API"""
32
+ try:
33
+ api_url = get_cicd_api_url()
34
+ response = requests.get(f"{api_url}/builds", params={"limit": limit}, timeout=5)
35
+ response.raise_for_status()
36
+
37
+ builds = response.json().get("builds", [])
38
+ if builds:
39
+ return pd.DataFrame(builds)
40
+
41
+ except requests.exceptions.HTTPError as e:
42
+ if e.response.status_code == 404:
43
+ # API endpoint not implemented yet - use demo data silently
44
+ pass
45
+ else:
46
+ st.warning(f"Could not fetch CI/CD data: {e}")
47
+ except requests.exceptions.ConnectionError:
48
+ st.warning("⚠️ LSH Daemon connection failed. Using demo data.")
49
+ except Exception as e:
50
+ # Only show warning for unexpected errors
51
+ st.warning(f"Could not fetch CI/CD data: {e}")
52
+
53
+ # Return mock data for demonstration
54
+ return create_mock_cicd_data()
55
+
56
+
57
+ def create_mock_cicd_data() -> pd.DataFrame:
58
+ """Create mock CI/CD data for demonstration"""
59
+ import random
60
+ from datetime import datetime, timedelta
61
+
62
+ pipelines = ["main-build", "develop-build", "feature-test", "release-deploy", "hotfix-deploy"]
63
+ statuses = ["success", "failed", "running", "cancelled"]
64
+ branches = ["main", "develop", "feature/new-dashboard", "release/v1.2.0", "hotfix/bug-123"]
65
+
66
+ data = []
67
+ for i in range(50):
68
+ start_time = datetime.now() - timedelta(days=random.randint(0, 30), hours=random.randint(0, 23))
69
+ duration = random.randint(60, 600) # seconds
70
+ status = random.choices(statuses, weights=[70, 15, 10, 5])[0]
71
+
72
+ data.append({
73
+ "id": f"build-{i+1}",
74
+ "pipeline_name": random.choice(pipelines),
75
+ "branch": random.choice(branches),
76
+ "status": status,
77
+ "started_at": start_time.isoformat(),
78
+ "duration_sec": duration if status != "running" else None,
79
+ "commit_sha": f"{random.randint(1000000, 9999999):07x}",
80
+ "triggered_by": random.choice(["github-webhook", "manual", "schedule"]),
81
+ "success_rate": random.uniform(0.7, 1.0) if status == "success" else random.uniform(0, 0.5)
82
+ })
83
+
84
+ return pd.DataFrame(data)
85
+
86
+
87
+ def fetch_webhooks() -> list:
88
+ """Fetch configured webhooks"""
89
+ try:
90
+ api_url = get_cicd_api_url()
91
+ response = requests.get(f"{api_url}/webhooks", timeout=5)
92
+ response.raise_for_status()
93
+ return response.json().get("webhooks", [])
94
+ except requests.exceptions.HTTPError as e:
95
+ if e.response.status_code == 404:
96
+ # API endpoint not implemented yet - use demo data silently
97
+ pass
98
+ else:
99
+ st.warning(f"Could not fetch webhooks: {e}")
100
+ except requests.exceptions.ConnectionError:
101
+ st.warning("⚠️ LSH Daemon connection failed. Using demo data.")
102
+ except Exception as e:
103
+ st.warning(f"Could not fetch webhooks: {e}")
104
+
105
+ # Return mock data
106
+ return [
107
+ {"id": "wh-1", "name": "GitHub Main", "url": "https://github.com/user/repo", "events": ["push", "pull_request"], "active": True},
108
+ {"id": "wh-2", "name": "GitLab CI", "url": "https://gitlab.com/user/repo", "events": ["push"], "active": True},
109
+ ]
110
+
111
+
112
+ def show_cicd_dashboard():
113
+ """Main CI/CD dashboard page"""
114
+
115
+ st.title("🔧 CI/CD Pipeline Dashboard")
116
+ st.markdown("Monitor build pipelines, deployments, and CI/CD metrics")
117
+
118
+ # Refresh button
119
+ col1, col2, col3 = st.columns([1, 1, 8])
120
+ with col1:
121
+ if st.button("🔄 Refresh"):
122
+ st.rerun()
123
+ with col2:
124
+ auto_refresh = st.checkbox("Auto-refresh", value=False)
125
+
126
+ if auto_refresh:
127
+ import time
128
+ time.sleep(5)
129
+ st.rerun()
130
+
131
+ st.divider()
132
+
133
+ # Fetch data
134
+ with st.spinner("Loading CI/CD data..."):
135
+ builds_df = fetch_cicd_builds()
136
+
137
+ if builds_df.empty:
138
+ st.warning("No CI/CD build data available")
139
+ return
140
+
141
+ # Convert timestamps
142
+ if "started_at" in builds_df.columns:
143
+ builds_df["started_at"] = pd.to_datetime(builds_df["started_at"])
144
+
145
+ # === KPIs ===
146
+ st.subheader("📊 Pipeline Metrics")
147
+
148
+ total_builds = len(builds_df)
149
+ success_builds = len(builds_df[builds_df["status"] == "success"])
150
+ failed_builds = len(builds_df[builds_df["status"] == "failed"])
151
+ running_builds = len(builds_df[builds_df["status"] == "running"])
152
+
153
+ success_rate = (success_builds / total_builds * 100) if total_builds > 0 else 0
154
+ avg_duration = builds_df[builds_df["duration_sec"].notna()]["duration_sec"].mean()
155
+
156
+ metrics = {
157
+ "Total Builds": {"value": total_builds, "icon": "📦"},
158
+ "Success Rate": {"value": f"{success_rate:.1f}%", "delta": "+5.2%", "delta_color": "normal", "icon": "✅"},
159
+ "Failed Builds": {"value": failed_builds, "icon": "❌"},
160
+ "Running": {"value": running_builds, "icon": "🔵"},
161
+ "Avg Duration": {"value": f"{avg_duration:.0f}s" if pd.notna(avg_duration) else "N/A", "icon": "⏱️"}
162
+ }
163
+
164
+ display_kpi_row(metrics, columns=5)
165
+
166
+ st.divider()
167
+
168
+ # === Tabs for different views ===
169
+ tab1, tab2, tab3, tab4 = st.tabs(["📈 Overview", "🔍 Build History", "🔔 Webhooks", "⚙️ Configuration"])
170
+
171
+ with tab1:
172
+ show_cicd_overview(builds_df)
173
+
174
+ with tab2:
175
+ show_build_history(builds_df)
176
+
177
+ with tab3:
178
+ show_webhooks_config()
179
+
180
+ with tab4:
181
+ show_cicd_configuration()
182
+
183
+
184
+ def show_cicd_overview(builds_df: pd.DataFrame):
185
+ """Show CI/CD overview charts"""
186
+
187
+ col1, col2 = st.columns(2)
188
+
189
+ with col1:
190
+ st.markdown("### Status Distribution")
191
+ if "status" in builds_df.columns:
192
+ fig = create_status_pie_chart(builds_df, "status", "Build Status Distribution")
193
+ render_chart(fig)
194
+
195
+ with col2:
196
+ st.markdown("### Pipeline Activity")
197
+ if "pipeline_name" in builds_df.columns:
198
+ pipeline_counts = builds_df["pipeline_name"].value_counts().head(10)
199
+ fig = px.bar(
200
+ x=pipeline_counts.values,
201
+ y=pipeline_counts.index,
202
+ orientation='h',
203
+ title="Top Pipelines by Build Count",
204
+ labels={"x": "Number of Builds", "y": "Pipeline"}
205
+ )
206
+ render_chart(fig)
207
+
208
+ # Success rate trend
209
+ st.markdown("### 📊 Success Rate Trend")
210
+
211
+ if "started_at" in builds_df.columns and "status" in builds_df.columns:
212
+ # Group by date and calculate success rate
213
+ builds_df["date"] = builds_df["started_at"].dt.date
214
+ daily_stats = builds_df.groupby("date").agg({
215
+ "status": lambda x: (x == "success").sum() / len(x) * 100
216
+ }).reset_index()
217
+ daily_stats.columns = ["date", "success_rate"]
218
+
219
+ fig = px.line(
220
+ daily_stats,
221
+ x="date",
222
+ y="success_rate",
223
+ title="Daily Success Rate",
224
+ labels={"date": "Date", "success_rate": "Success Rate (%)"},
225
+ markers=True
226
+ )
227
+ fig.add_hline(y=90, line_dash="dash", line_color="green", annotation_text="Target: 90%")
228
+ render_chart(fig)
229
+
230
+ # Build duration trend
231
+ st.markdown("### ⏱️ Build Duration Trend")
232
+
233
+ if "duration_sec" in builds_df.columns:
234
+ duration_data = builds_df[builds_df["duration_sec"].notna()].copy()
235
+
236
+ if not duration_data.empty:
237
+ duration_data["duration_min"] = duration_data["duration_sec"] / 60
238
+
239
+ fig = px.scatter(
240
+ duration_data,
241
+ x="started_at",
242
+ y="duration_min",
243
+ color="pipeline_name",
244
+ title="Build Duration Over Time",
245
+ labels={"started_at": "Time", "duration_min": "Duration (minutes)"}
246
+ )
247
+ render_chart(fig)
248
+
249
+
250
+ def show_build_history(builds_df: pd.DataFrame):
251
+ """Show detailed build history"""
252
+
253
+ st.markdown("### Build History")
254
+
255
+ # Filters
256
+ filter_config = {
257
+ "pipeline_name": "multiselect",
258
+ "status": "multiselect",
259
+ "branch": "multiselect"
260
+ }
261
+
262
+ filtered_df = display_filterable_dataframe(
263
+ builds_df,
264
+ filter_columns=filter_config,
265
+ key_prefix="cicd_filter"
266
+ )
267
+
268
+ # Export option
269
+ st.markdown("#### 📥 Export Data")
270
+ export_dataframe(filtered_df, filename="cicd_builds", formats=["csv", "json"])
271
+
272
+ # Build details expander
273
+ st.markdown("#### Build Details")
274
+
275
+ if not filtered_df.empty:
276
+ for _, build in filtered_df.head(20).iterrows():
277
+ with st.expander(f"{build.get('pipeline_name', 'Unknown')} - {build.get('commit_sha', 'Unknown')[:7]} - {display_status_badge(build.get('status', 'unknown'), 'small')}"):
278
+ col1, col2 = st.columns(2)
279
+
280
+ with col1:
281
+ st.markdown(f"**Pipeline:** {build.get('pipeline_name', 'N/A')}")
282
+ st.markdown(f"**Branch:** {build.get('branch', 'N/A')}")
283
+ st.markdown(f"**Commit:** `{build.get('commit_sha', 'N/A')}`")
284
+ st.markdown(f"**Triggered By:** {build.get('triggered_by', 'N/A')}")
285
+
286
+ with col2:
287
+ st.markdown(f"**Status:** {display_status_badge(build.get('status', 'unknown'), 'small')}")
288
+ st.markdown(f"**Started:** {build.get('started_at', 'N/A')}")
289
+ if pd.notna(build.get('duration_sec')):
290
+ st.markdown(f"**Duration:** {build['duration_sec']}s ({build['duration_sec']/60:.1f}m)")
291
+
292
+ # Mock logs
293
+ if st.button(f"View Logs", key=f"logs_{build.get('id')}"):
294
+ st.code(f"""
295
+ [INFO] Starting build for {build.get('pipeline_name')}
296
+ [INFO] Checking out branch: {build.get('branch')}
297
+ [INFO] Installing dependencies...
298
+ [INFO] Running tests...
299
+ [INFO] Build {'completed successfully' if build.get('status') == 'success' else 'failed'}
300
+ """, language="bash")
301
+
302
+
303
+ def show_webhooks_config():
304
+ """Show webhook configuration"""
305
+
306
+ st.markdown("### 🔔 Configured Webhooks")
307
+
308
+ webhooks = fetch_webhooks()
309
+
310
+ if not webhooks:
311
+ st.info("No webhooks configured")
312
+ return
313
+
314
+ for webhook in webhooks:
315
+ with st.expander(f"{webhook['name']} - {'✅ Active' if webhook['active'] else '❌ Inactive'}"):
316
+ st.markdown(f"**URL:** `{webhook['url']}`")
317
+ st.markdown(f"**Events:** {', '.join(webhook['events'])}")
318
+ st.markdown(f"**Status:** {'Active' if webhook['active'] else 'Inactive'}")
319
+
320
+ col1, col2, col3 = st.columns(3)
321
+ with col1:
322
+ if st.button("🔄 Test", key=f"test_{webhook['id']}"):
323
+ st.success("Webhook test triggered!")
324
+ with col2:
325
+ if st.button("✏️ Edit", key=f"edit_{webhook['id']}"):
326
+ st.info("Edit functionality coming soon")
327
+ with col3:
328
+ if st.button("🗑️ Delete", key=f"delete_{webhook['id']}"):
329
+ st.warning("Delete confirmation required")
330
+
331
+ st.divider()
332
+
333
+ # Add new webhook
334
+ with st.expander("➕ Add New Webhook"):
335
+ name = st.text_input("Webhook Name")
336
+ url = st.text_input("Webhook URL")
337
+ events = st.multiselect("Events", ["push", "pull_request", "release", "tag"])
338
+
339
+ if st.button("Create Webhook"):
340
+ if name and url and events:
341
+ st.success(f"Webhook '{name}' created successfully!")
342
+ else:
343
+ st.error("Please fill in all fields")
344
+
345
+
346
+ def show_cicd_configuration():
347
+ """Show CI/CD configuration options"""
348
+
349
+ st.markdown("### ⚙️ CI/CD Configuration")
350
+
351
+ with st.form("cicd_config"):
352
+ st.markdown("#### Pipeline Settings")
353
+
354
+ max_concurrent_builds = st.number_input("Max Concurrent Builds", min_value=1, max_value=10, value=3)
355
+ build_timeout = st.number_input("Build Timeout (minutes)", min_value=5, max_value=120, value=30)
356
+ retry_failed_builds = st.checkbox("Auto-retry Failed Builds", value=True)
357
+ max_retries = st.number_input("Max Retries", min_value=1, max_value=5, value=2)
358
+
359
+ st.markdown("#### Notifications")
360
+ notify_on_success = st.checkbox("Notify on Success", value=False)
361
+ notify_on_failure = st.checkbox("Notify on Failure", value=True)
362
+ notification_email = st.text_input("Notification Email")
363
+
364
+ submitted = st.form_submit_button("Save Configuration")
365
+
366
+ if submitted:
367
+ st.success("✅ Configuration saved successfully!")
368
+ st.json({
369
+ "max_concurrent_builds": max_concurrent_builds,
370
+ "build_timeout_minutes": build_timeout,
371
+ "retry_failed_builds": retry_failed_builds,
372
+ "max_retries": max_retries,
373
+ "notifications": {
374
+ "on_success": notify_on_success,
375
+ "on_failure": notify_on_failure,
376
+ "email": notification_email
377
+ }
378
+ })
379
+
380
+
381
+ if __name__ == "__main__":
382
+ show_cicd_dashboard()