ops-core 0.1.0__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,36 @@
1
+ # Python
2
+ __pycache__/
3
+ *.py[cod]
4
+ *.egg-info/
5
+ dist/
6
+ build/
7
+ .eggs/
8
+
9
+ # Virtual environments
10
+ .venv/
11
+ venv/
12
+
13
+ # IDE
14
+ .vscode/
15
+ .idea/
16
+ *.swp
17
+ *.swo
18
+
19
+ # OS
20
+ .DS_Store
21
+ Thumbs.db
22
+
23
+ # Environment / Secrets
24
+ .env
25
+ *.pem
26
+ *.key
27
+ db_config.yaml
28
+
29
+ # Test
30
+ .pytest_cache/
31
+ htmlcov/
32
+ .coverage
33
+
34
+ # Build artifacts
35
+ *.whl
36
+ db_config.yaml.bak
@@ -0,0 +1,26 @@
1
+ Metadata-Version: 2.4
2
+ Name: ops-core
3
+ Version: 0.1.0
4
+ Summary: Shared operations platform framework - execution engine, auth, rollback, audit
5
+ Requires-Python: >=3.11
6
+ Requires-Dist: fastapi>=0.110.0
7
+ Requires-Dist: httpx>=0.26.0
8
+ Requires-Dist: psycopg[binary]>=3.1.0
9
+ Requires-Dist: pydantic-settings>=2.0.0
10
+ Requires-Dist: pydantic>=2.0.0
11
+ Requires-Dist: pyyaml>=6.0
12
+ Requires-Dist: uvicorn>=0.27.0
13
+ Provides-Extra: cli
14
+ Requires-Dist: rich>=13.0.0; extra == 'cli'
15
+ Requires-Dist: typer>=0.9.0; extra == 'cli'
16
+ Provides-Extra: dev
17
+ Requires-Dist: pytest-asyncio>=0.23.0; extra == 'dev'
18
+ Requires-Dist: pytest>=8.0.0; extra == 'dev'
19
+ Requires-Dist: ruff>=0.3.0; extra == 'dev'
20
+ Provides-Extra: mcp
21
+ Requires-Dist: mcp>=1.0.0; extra == 'mcp'
22
+ Description-Content-Type: text/markdown
23
+
24
+ # ops-core
25
+
26
+ Shared operations platform framework for zenith-ops.
@@ -0,0 +1,3 @@
1
+ # ops-core
2
+
3
+ Shared operations platform framework for zenith-ops.
@@ -0,0 +1,36 @@
1
+ [project]
2
+ name = "ops-core"
3
+ version = "0.1.0"
4
+ description = "Shared operations platform framework - execution engine, auth, rollback, audit"
5
+ readme = "README.md"
6
+ requires-python = ">=3.11"
7
+ dependencies = [
8
+ "fastapi>=0.110.0",
9
+ "uvicorn>=0.27.0",
10
+ "psycopg[binary]>=3.1.0",
11
+ "pydantic>=2.0.0",
12
+ "pydantic-settings>=2.0.0",
13
+ "pyyaml>=6.0",
14
+ "httpx>=0.26.0",
15
+ ]
16
+
17
+ [project.optional-dependencies]
18
+ mcp = [
19
+ "mcp>=1.0.0",
20
+ ]
21
+ cli = [
22
+ "typer>=0.9.0",
23
+ "rich>=13.0.0",
24
+ ]
25
+ dev = [
26
+ "pytest>=8.0.0",
27
+ "pytest-asyncio>=0.23.0",
28
+ "ruff>=0.3.0",
29
+ ]
30
+
31
+ [build-system]
32
+ requires = ["hatchling"]
33
+ build-backend = "hatchling.build"
34
+
35
+ [tool.hatch.build.targets.wheel]
36
+ packages = ["src/ops_core"]
@@ -0,0 +1,8 @@
1
+ """
2
+ ops-core: Shared operations platform framework.
3
+
4
+ Provides execution engine, auth, rollback, audit, and MCP/CLI scaffolding
5
+ for project-specific operations platforms (harmony-cs, orderhop-cs, etc.).
6
+ """
7
+
8
+ __version__ = "0.1.0"
@@ -0,0 +1,20 @@
1
+ """
2
+ Audit logging for operations platform.
3
+
4
+ Provides operation-level logging, DB-based audit logger, and request middleware.
5
+ """
6
+
7
+ from .logger import AuditLog, log_operation_attempt, log_operation_result
8
+ from .db_logger import AuditLogEntry, AuditLogger, RequestSource, get_audit_logger
9
+ from .middleware import AuditMiddleware
10
+
11
+ __all__ = [
12
+ "AuditLog",
13
+ "log_operation_attempt",
14
+ "log_operation_result",
15
+ "AuditLogEntry",
16
+ "AuditLogger",
17
+ "RequestSource",
18
+ "get_audit_logger",
19
+ "AuditMiddleware",
20
+ ]
@@ -0,0 +1,485 @@
1
+ """
2
+ Audit logging system for self-improvement tracking.
3
+
4
+ Captures all API requests/responses to enable:
5
+ - Issue diagnosis and debugging
6
+ - Usage pattern analysis
7
+ - Error tracking and alerting
8
+ - Performance monitoring
9
+ - Self-improvement recommendations
10
+ """
11
+
12
+ import json
13
+ import uuid
14
+ from datetime import datetime, timezone
15
+ from enum import Enum
16
+ from typing import Any
17
+
18
+ import psycopg
19
+ from psycopg.rows import dict_row
20
+ from pydantic import BaseModel, Field
21
+
22
+ # Settings passed via constructor
23
+
24
+
25
+ class RequestSource(str, Enum):
26
+ """Source of the request."""
27
+
28
+ CLI = "cli"
29
+ MCP = "mcp"
30
+ API = "api" # Direct API call
31
+ INTERNAL = "internal" # Internal system call
32
+
33
+
34
+ class AuditLogEntry(BaseModel):
35
+ """Audit log entry for request/response tracking."""
36
+
37
+ # Request identification
38
+ request_id: str = Field(default_factory=lambda: str(uuid.uuid4()))
39
+ timestamp: datetime = Field(default_factory=lambda: datetime.now(timezone.utc))
40
+
41
+ # Source information
42
+ source: RequestSource = RequestSource.API
43
+ source_version: str = "" # CLI/MCP version
44
+ user_agent: str = ""
45
+
46
+ # Authentication
47
+ user_role: str = "" # readonly, superuser
48
+ api_key_hash: str = "" # Last 8 chars of API key for identification
49
+
50
+ # Request details
51
+ method: str = "" # GET, POST, etc.
52
+ endpoint: str = "" # /operations, /query/{db}, etc.
53
+ path_params: dict[str, Any] = Field(default_factory=dict)
54
+ query_params: dict[str, Any] = Field(default_factory=dict)
55
+ request_body: dict[str, Any] | None = None
56
+
57
+ # For operations
58
+ operation_id: str | None = None
59
+ operation_category: str | None = None
60
+ database_target: str | None = None
61
+
62
+ # Response details
63
+ status_code: int = 0
64
+ response_body: dict[str, Any] | None = None
65
+ response_size_bytes: int = 0
66
+
67
+ # Performance
68
+ execution_time_ms: float = 0
69
+
70
+ # Error tracking
71
+ is_error: bool = False
72
+ error_type: str | None = None # ValidationError, AuthError, DBError, etc.
73
+ error_message: str | None = None
74
+ error_traceback: str | None = None
75
+
76
+ # For raw queries (sensitive - store safely)
77
+ raw_sql: str | None = None
78
+ affected_rows: int | None = None
79
+
80
+
81
+ class AuditLogger:
82
+ """Handles audit logging to database."""
83
+
84
+ def __init__(self, audit_db_url: str = ""):
85
+ self._db_url: str = audit_db_url
86
+ self._initialized = False
87
+
88
+ def _get_db_url(self) -> str:
89
+ """Get audit database URL."""
90
+ return self._db_url
91
+
92
+ async def ensure_tables(self) -> bool:
93
+ """Ensure audit tables exist."""
94
+ db_url = self._get_db_url()
95
+ if not db_url:
96
+ return False
97
+
98
+ try:
99
+ with psycopg.connect(db_url) as conn:
100
+ with conn.cursor() as cur:
101
+ cur.execute("""
102
+ CREATE TABLE IF NOT EXISTS cs_audit_log (
103
+ id SERIAL PRIMARY KEY,
104
+ request_id UUID NOT NULL UNIQUE,
105
+ timestamp TIMESTAMPTZ NOT NULL DEFAULT NOW(),
106
+
107
+ -- Source
108
+ source VARCHAR(20) NOT NULL,
109
+ source_version VARCHAR(50),
110
+ user_agent TEXT,
111
+
112
+ -- Auth
113
+ user_role VARCHAR(20),
114
+ api_key_hash VARCHAR(8),
115
+
116
+ -- Request
117
+ method VARCHAR(10) NOT NULL,
118
+ endpoint VARCHAR(255) NOT NULL,
119
+ path_params JSONB,
120
+ query_params JSONB,
121
+ request_body JSONB,
122
+
123
+ -- Operation context
124
+ operation_id VARCHAR(100),
125
+ operation_category VARCHAR(50),
126
+ database_target VARCHAR(50),
127
+
128
+ -- Response
129
+ status_code INTEGER NOT NULL,
130
+ response_body JSONB,
131
+ response_size_bytes INTEGER,
132
+
133
+ -- Performance
134
+ execution_time_ms FLOAT,
135
+
136
+ -- Error tracking
137
+ is_error BOOLEAN NOT NULL DEFAULT FALSE,
138
+ error_type VARCHAR(100),
139
+ error_message TEXT,
140
+ error_traceback TEXT,
141
+
142
+ -- Raw query (for analysis)
143
+ raw_sql TEXT,
144
+ affected_rows INTEGER,
145
+
146
+ -- Indexes
147
+ created_at TIMESTAMPTZ NOT NULL DEFAULT NOW()
148
+ );
149
+
150
+ -- Indexes for common queries
151
+ CREATE INDEX IF NOT EXISTS idx_audit_timestamp
152
+ ON cs_audit_log(timestamp DESC);
153
+ CREATE INDEX IF NOT EXISTS idx_audit_is_error
154
+ ON cs_audit_log(is_error) WHERE is_error = TRUE;
155
+ CREATE INDEX IF NOT EXISTS idx_audit_endpoint
156
+ ON cs_audit_log(endpoint);
157
+ CREATE INDEX IF NOT EXISTS idx_audit_operation_id
158
+ ON cs_audit_log(operation_id);
159
+ CREATE INDEX IF NOT EXISTS idx_audit_user_role
160
+ ON cs_audit_log(user_role);
161
+
162
+ -- Error summary view
163
+ CREATE OR REPLACE VIEW cs_audit_error_summary AS
164
+ SELECT
165
+ DATE(timestamp) as date,
166
+ endpoint,
167
+ error_type,
168
+ COUNT(*) as error_count,
169
+ COUNT(DISTINCT api_key_hash) as unique_users
170
+ FROM cs_audit_log
171
+ WHERE is_error = TRUE
172
+ GROUP BY DATE(timestamp), endpoint, error_type
173
+ ORDER BY date DESC, error_count DESC;
174
+
175
+ -- Daily usage summary view
176
+ CREATE OR REPLACE VIEW cs_audit_daily_summary AS
177
+ SELECT
178
+ DATE(timestamp) as date,
179
+ source,
180
+ endpoint,
181
+ COUNT(*) as request_count,
182
+ COUNT(*) FILTER (WHERE is_error) as error_count,
183
+ AVG(execution_time_ms) as avg_execution_ms,
184
+ MAX(execution_time_ms) as max_execution_ms,
185
+ COUNT(DISTINCT api_key_hash) as unique_users
186
+ FROM cs_audit_log
187
+ GROUP BY DATE(timestamp), source, endpoint
188
+ ORDER BY date DESC, request_count DESC;
189
+ """)
190
+ conn.commit()
191
+ self._initialized = True
192
+ return True
193
+ except Exception as e:
194
+ print(f"[AuditLogger] Failed to initialize: {e}")
195
+ return False
196
+
197
+ async def log(self, entry: AuditLogEntry) -> bool:
198
+ """Log an audit entry to database."""
199
+ db_url = self._get_db_url()
200
+ if not db_url:
201
+ return False
202
+
203
+ if not self._initialized:
204
+ await self.ensure_tables()
205
+
206
+ try:
207
+ with psycopg.connect(db_url) as conn:
208
+ with conn.cursor() as cur:
209
+ cur.execute(
210
+ """
211
+ INSERT INTO cs_audit_log (
212
+ request_id, timestamp,
213
+ source, source_version, user_agent,
214
+ user_role, api_key_hash,
215
+ method, endpoint, path_params, query_params, request_body,
216
+ operation_id, operation_category, database_target,
217
+ status_code, response_body, response_size_bytes,
218
+ execution_time_ms,
219
+ is_error, error_type, error_message, error_traceback,
220
+ raw_sql, affected_rows
221
+ ) VALUES (
222
+ %s, %s,
223
+ %s, %s, %s,
224
+ %s, %s,
225
+ %s, %s, %s::jsonb, %s::jsonb, %s::jsonb,
226
+ %s, %s, %s,
227
+ %s, %s::jsonb, %s,
228
+ %s,
229
+ %s, %s, %s, %s,
230
+ %s, %s
231
+ )
232
+ """,
233
+ (
234
+ entry.request_id,
235
+ entry.timestamp,
236
+ entry.source.value,
237
+ entry.source_version,
238
+ entry.user_agent,
239
+ entry.user_role,
240
+ entry.api_key_hash,
241
+ entry.method,
242
+ entry.endpoint,
243
+ json.dumps(entry.path_params)
244
+ if entry.path_params
245
+ else None,
246
+ json.dumps(entry.query_params)
247
+ if entry.query_params
248
+ else None,
249
+ json.dumps(entry.request_body)
250
+ if entry.request_body
251
+ else None,
252
+ entry.operation_id,
253
+ entry.operation_category,
254
+ entry.database_target,
255
+ entry.status_code,
256
+ json.dumps(entry.response_body)
257
+ if entry.response_body
258
+ else None,
259
+ entry.response_size_bytes,
260
+ entry.execution_time_ms,
261
+ entry.is_error,
262
+ entry.error_type,
263
+ entry.error_message,
264
+ entry.error_traceback,
265
+ entry.raw_sql,
266
+ entry.affected_rows,
267
+ ),
268
+ )
269
+ conn.commit()
270
+ return True
271
+ except Exception as e:
272
+ print(f"[AuditLogger] Failed to log: {e}")
273
+ return False
274
+
275
+ async def get_recent_errors(self, limit: int = 20) -> list[dict]:
276
+ """Get recent error logs for analysis."""
277
+ db_url = self._get_db_url()
278
+ if not db_url:
279
+ return []
280
+
281
+ try:
282
+ with psycopg.connect(db_url, row_factory=dict_row) as conn:
283
+ with conn.cursor() as cur:
284
+ cur.execute(
285
+ """
286
+ SELECT
287
+ request_id, timestamp, source, endpoint,
288
+ user_role, status_code,
289
+ error_type, error_message,
290
+ operation_id, database_target
291
+ FROM cs_audit_log
292
+ WHERE is_error = TRUE
293
+ ORDER BY timestamp DESC
294
+ LIMIT %s
295
+ """,
296
+ (limit,),
297
+ )
298
+ return [dict(row) for row in cur.fetchall()]
299
+ except Exception as e:
300
+ print(f"[AuditLogger] Failed to fetch errors: {e}")
301
+ return []
302
+
303
+ async def get_error_summary(self, days: int = 7) -> list[dict]:
304
+ """Get error summary for the last N days."""
305
+ db_url = self._get_db_url()
306
+ if not db_url:
307
+ return []
308
+
309
+ try:
310
+ with psycopg.connect(db_url, row_factory=dict_row) as conn:
311
+ with conn.cursor() as cur:
312
+ cur.execute(
313
+ """
314
+ SELECT * FROM cs_audit_error_summary
315
+ WHERE date >= CURRENT_DATE - INTERVAL '%s days'
316
+ """,
317
+ (days,),
318
+ )
319
+ return [dict(row) for row in cur.fetchall()]
320
+ except Exception as e:
321
+ print(f"[AuditLogger] Failed to fetch error summary: {e}")
322
+ return []
323
+
324
+ async def search_logs(
325
+ self,
326
+ operation_id: str | None = None,
327
+ operation_category: str | None = None,
328
+ error_type: str | None = None,
329
+ source: str | None = None,
330
+ is_error: bool | None = None,
331
+ search: str | None = None,
332
+ days: int = 7,
333
+ limit: int = 50,
334
+ offset: int = 0,
335
+ ) -> dict:
336
+ """Search audit logs with filters for dashboard."""
337
+ db_url = self._get_db_url()
338
+ if not db_url:
339
+ return {"logs": [], "total": 0}
340
+
341
+ try:
342
+ with psycopg.connect(db_url, row_factory=dict_row) as conn:
343
+ with conn.cursor() as cur:
344
+ where = ["timestamp >= CURRENT_TIMESTAMP - INTERVAL '%s days'"]
345
+ params: list = [days]
346
+
347
+ if operation_id:
348
+ where.append("operation_id = %s")
349
+ params.append(operation_id)
350
+ if operation_category:
351
+ where.append("operation_category = %s")
352
+ params.append(operation_category)
353
+ if error_type:
354
+ where.append("error_type = %s")
355
+ params.append(error_type)
356
+ if source:
357
+ where.append("source = %s")
358
+ params.append(source)
359
+ if is_error is not None:
360
+ where.append("is_error = %s")
361
+ params.append(is_error)
362
+ if search:
363
+ where.append(
364
+ "(request_body::text ILIKE %s OR response_body::text ILIKE %s "
365
+ "OR error_message ILIKE %s OR endpoint ILIKE %s)"
366
+ )
367
+ term = f"%{search}%"
368
+ params.extend([term, term, term, term])
369
+
370
+ where_sql = " AND ".join(where)
371
+
372
+ # Count
373
+ cur.execute(
374
+ f"SELECT COUNT(*) as cnt FROM cs_audit_log WHERE {where_sql}",
375
+ params,
376
+ )
377
+ total = cur.fetchone()["cnt"]
378
+
379
+ # Fetch (truncate large JSON fields for list view)
380
+ cur.execute(
381
+ f"""
382
+ SELECT
383
+ id, request_id, timestamp, source, user_role,
384
+ method, endpoint,
385
+ LEFT(request_body::text, 500) AS request_body,
386
+ operation_id,
387
+ operation_category, database_target, status_code,
388
+ LEFT(response_body::text, 500) AS response_body,
389
+ response_size_bytes,
390
+ execution_time_ms, is_error,
391
+ error_type, error_message, affected_rows
392
+ FROM cs_audit_log
393
+ WHERE {where_sql}
394
+ ORDER BY timestamp DESC
395
+ LIMIT %s OFFSET %s
396
+ """,
397
+ params + [limit, offset],
398
+ )
399
+ logs = [dict(row) for row in cur.fetchall()]
400
+
401
+ return {"logs": logs, "total": total}
402
+ except Exception as e:
403
+ print(f"[AuditLogger] Failed to search logs: {e}")
404
+ return {"logs": [], "total": 0, "error": str(e)}
405
+
406
+ async def get_log_detail(self, log_id: int) -> dict | None:
407
+ """Get full audit log entry by ID (including full response_body)."""
408
+ db_url = self._get_db_url()
409
+ if not db_url:
410
+ return None
411
+
412
+ try:
413
+ with psycopg.connect(db_url, row_factory=dict_row) as conn:
414
+ with conn.cursor() as cur:
415
+ cur.execute(
416
+ """
417
+ SELECT * FROM cs_audit_log WHERE id = %s
418
+ """,
419
+ (log_id,),
420
+ )
421
+ row = cur.fetchone()
422
+ return dict(row) if row else None
423
+ except Exception as e:
424
+ print(f"[AuditLogger] Failed to get log detail: {e}")
425
+ return None
426
+
427
+ async def get_filter_options(self) -> dict:
428
+ """Get distinct values for dashboard filter dropdowns."""
429
+ db_url = self._get_db_url()
430
+ if not db_url:
431
+ return {}
432
+
433
+ try:
434
+ with psycopg.connect(db_url, row_factory=dict_row) as conn:
435
+ with conn.cursor() as cur:
436
+ result = {}
437
+ for col in [
438
+ "operation_id",
439
+ "operation_category",
440
+ "error_type",
441
+ "source",
442
+ ]:
443
+ cur.execute(f"""
444
+ SELECT DISTINCT {col} FROM cs_audit_log
445
+ WHERE {col} IS NOT NULL AND {col} != ''
446
+ ORDER BY {col}
447
+ """)
448
+ result[col] = [row[col] for row in cur.fetchall()]
449
+ return result
450
+ except Exception as e:
451
+ print(f"[AuditLogger] Failed to get filter options: {e}")
452
+ return {}
453
+
454
+ async def get_usage_stats(self, days: int = 7) -> list[dict]:
455
+ """Get usage statistics for the last N days."""
456
+ db_url = self._get_db_url()
457
+ if not db_url:
458
+ return []
459
+
460
+ try:
461
+ with psycopg.connect(db_url, row_factory=dict_row) as conn:
462
+ with conn.cursor() as cur:
463
+ cur.execute(
464
+ """
465
+ SELECT * FROM cs_audit_daily_summary
466
+ WHERE date >= CURRENT_DATE - INTERVAL '%s days'
467
+ """,
468
+ (days,),
469
+ )
470
+ return [dict(row) for row in cur.fetchall()]
471
+ except Exception as e:
472
+ print(f"[AuditLogger] Failed to fetch usage stats: {e}")
473
+ return []
474
+
475
+
476
+ # Global instance
477
+ _audit_logger: AuditLogger | None = None
478
+
479
+
480
+ def get_audit_logger(audit_db_url: str = "") -> AuditLogger:
481
+ """Get or create audit logger instance."""
482
+ global _audit_logger
483
+ if _audit_logger is None:
484
+ _audit_logger = AuditLogger(audit_db_url)
485
+ return _audit_logger