mcp-acp 0.1.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
mcp_acp/client.py ADDED
@@ -0,0 +1,1885 @@
1
+ """ACP client wrapper for OpenShift CLI operations."""
2
+
3
+ import asyncio
4
+ import json
5
+ import re
6
+ import secrets
7
+ import subprocess
8
+ from collections.abc import Callable
9
+ from datetime import datetime, timedelta
10
+ from pathlib import Path
11
+ from typing import Any
12
+
13
+ import yaml
14
+
15
+ from mcp_acp.settings import Settings, load_clusters_config, load_settings
16
+ from utils.pylogger import get_python_logger
17
+
18
+ # Initialize structured logger
19
+ logger = get_python_logger()
20
+
21
+
22
+ class ACPClient:
23
+ """Client for interacting with ACP via OpenShift CLI.
24
+
25
+ Attributes:
26
+ settings: Global settings instance
27
+ clusters_config: Cluster configuration instance
28
+ config: Raw cluster configuration (for backward compatibility)
29
+ """
30
+
31
+ # Security constants
32
+ ALLOWED_RESOURCE_TYPES = {"agenticsession", "pods", "event"} # Whitelist
33
+ MAX_BULK_ITEMS = 3 # Maximum items allowed in bulk operations
34
+ LABEL_PREFIX = "acp.ambient-code.ai/label-" # Label prefix for ACP labels
35
+ MAX_COMMAND_TIMEOUT = 120 # Maximum command timeout in seconds
36
+ MAX_LOG_LINES = 10000 # Maximum log lines to retrieve
37
+
38
+ def __init__(self, config_path: str | None = None, settings: Settings | None = None):
39
+ """Initialize ACP client.
40
+
41
+ Args:
42
+ config_path: Path to clusters.yaml config file (deprecated, use settings)
43
+ settings: Settings instance. If not provided, loads default settings.
44
+ """
45
+ # Load or use provided settings
46
+ self.settings = settings or load_settings()
47
+
48
+ # Override config path if provided (for backward compatibility)
49
+ if config_path:
50
+ self.settings.config_path = Path(config_path)
51
+
52
+ # Load cluster configuration
53
+ try:
54
+ self.clusters_config = load_clusters_config(self.settings)
55
+ except Exception as e:
56
+ logger.error("cluster_config_load_failed", error=str(e))
57
+ raise
58
+
59
+ # Backward compatibility: expose raw config
60
+ self.config = {
61
+ "clusters": {
62
+ name: {
63
+ "server": cluster.server,
64
+ "default_project": cluster.default_project,
65
+ "description": cluster.description,
66
+ }
67
+ for name, cluster in self.clusters_config.clusters.items()
68
+ },
69
+ "default_cluster": self.clusters_config.default_cluster,
70
+ }
71
+ self.config_path = str(self.settings.config_path)
72
+
73
+ logger.info(
74
+ "acp_client_initialized",
75
+ clusters=list(self.clusters_config.clusters.keys()),
76
+ default_cluster=self.clusters_config.default_cluster,
77
+ )
78
+
79
+ # Note: _load_config and _validate_config removed - now handled by Pydantic settings
80
+
81
+ def _validate_input(self, value: str, field_name: str, max_length: int = 253) -> None:
82
+ """Validate input to prevent injection attacks.
83
+
84
+ Args:
85
+ value: Value to validate
86
+ field_name: Field name for error messages
87
+ max_length: Maximum allowed length
88
+
89
+ Raises:
90
+ ValueError: If validation fails
91
+ """
92
+ if not isinstance(value, str):
93
+ raise ValueError(f"{field_name} must be a string")
94
+ if len(value) > max_length:
95
+ raise ValueError(f"{field_name} exceeds maximum length of {max_length}")
96
+ # Validate Kubernetes naming conventions (DNS-1123 subdomain)
97
+ if not re.match(r"^[a-z0-9]([-a-z0-9]*[a-z0-9])?$", value):
98
+ raise ValueError(f"{field_name} contains invalid characters. Must match: ^[a-z0-9]([-a-z0-9]*[a-z0-9])?$")
99
+
100
+ def _validate_bulk_operation(self, items: list[str], operation_name: str) -> None:
101
+ """Enforce 3-item limit for safety on bulk operations.
102
+
103
+ Args:
104
+ items: List of items to operate on
105
+ operation_name: Operation name for error message
106
+
107
+ Raises:
108
+ ValueError: If item count exceeds limit
109
+ """
110
+ if len(items) > self.MAX_BULK_ITEMS:
111
+ raise ValueError(
112
+ f"Bulk {operation_name} limited to {self.MAX_BULK_ITEMS} items for safety. "
113
+ f"You requested {len(items)} items. Split into multiple operations."
114
+ )
115
+
116
+ async def _run_oc_command(
117
+ self,
118
+ args: list[str],
119
+ capture_output: bool = True,
120
+ parse_json: bool = False,
121
+ timeout: int | None = None,
122
+ ) -> subprocess.CompletedProcess | dict[str, Any]:
123
+ """Run an oc command asynchronously with security controls.
124
+
125
+ Args:
126
+ args: Command arguments (will be validated)
127
+ capture_output: Whether to capture stdout/stderr
128
+ parse_json: If True, parse stdout as JSON and return dict
129
+ timeout: Command timeout in seconds (default: MAX_COMMAND_TIMEOUT)
130
+
131
+ Returns:
132
+ CompletedProcess result or parsed JSON dict
133
+
134
+ Raises:
135
+ asyncio.TimeoutError: If command exceeds timeout
136
+ ValueError: If arguments contain suspicious content
137
+ """
138
+ # Security: Validate arguments don't contain shell metacharacters
139
+ for arg in args:
140
+ if not isinstance(arg, str):
141
+ raise ValueError(f"All arguments must be strings, got {type(arg)}")
142
+ # Detect potential command injection
143
+ if any(char in arg for char in [";", "|", "&", "$", "`", "\n", "\r"]):
144
+ raise ValueError(f"Argument contains suspicious characters: {arg}")
145
+
146
+ cmd = ["oc"] + args
147
+ effective_timeout = timeout or self.MAX_COMMAND_TIMEOUT
148
+
149
+ if capture_output:
150
+ try:
151
+ process = await asyncio.create_subprocess_exec(
152
+ *cmd,
153
+ stdout=asyncio.subprocess.PIPE,
154
+ stderr=asyncio.subprocess.PIPE,
155
+ # Security: Prevent shell injection
156
+ )
157
+ stdout, stderr = await asyncio.wait_for(process.communicate(), timeout=effective_timeout)
158
+ result = subprocess.CompletedProcess(
159
+ args=cmd,
160
+ returncode=process.returncode or 0,
161
+ stdout=stdout,
162
+ stderr=stderr,
163
+ )
164
+
165
+ if parse_json and result.returncode == 0:
166
+ try:
167
+ return json.loads(result.stdout.decode())
168
+ except json.JSONDecodeError as e:
169
+ raise ValueError(f"Failed to parse JSON response: {e}") from e
170
+
171
+ return result
172
+ except TimeoutError:
173
+ # Kill the process if it times out
174
+ try:
175
+ process.kill()
176
+ await process.wait()
177
+ except Exception:
178
+ pass
179
+ raise TimeoutError(f"Command timed out after {effective_timeout}s") from None
180
+ else:
181
+ # For non-captured output, use subprocess.run with timeout
182
+ try:
183
+ result = await asyncio.wait_for(
184
+ asyncio.to_thread(subprocess.run, cmd, capture_output=False, timeout=effective_timeout),
185
+ timeout=effective_timeout + 5, # Extra buffer
186
+ )
187
+ return result
188
+ except subprocess.TimeoutExpired:
189
+ raise TimeoutError(f"Command timed out after {effective_timeout}s") from None
190
+
191
+ async def _get_resource_json(self, resource_type: str, name: str, namespace: str) -> dict[str, Any]:
192
+ """Get a Kubernetes resource as JSON dict.
193
+
194
+ Args:
195
+ resource_type: Resource type (e.g., 'agenticsession')
196
+ name: Resource name
197
+ namespace: Namespace
198
+
199
+ Returns:
200
+ Resource as JSON dict
201
+
202
+ Raises:
203
+ ValueError: If inputs are invalid
204
+ Exception: If resource not found or command fails
205
+ """
206
+ # Security: Validate inputs
207
+ if resource_type not in self.ALLOWED_RESOURCE_TYPES:
208
+ raise ValueError(f"Resource type '{resource_type}' not allowed")
209
+ self._validate_input(name, "resource name")
210
+ self._validate_input(namespace, "namespace")
211
+
212
+ result = await self._run_oc_command(["get", resource_type, name, "-n", namespace, "-o", "json"])
213
+
214
+ if result.returncode != 0:
215
+ raise Exception(f"Failed to get {resource_type} '{name}': {result.stderr.decode()}")
216
+
217
+ return json.loads(result.stdout.decode())
218
+
219
+ async def _list_resources_json(
220
+ self, resource_type: str, namespace: str, selector: str | None = None
221
+ ) -> list[dict[str, Any]]:
222
+ """List Kubernetes resources as JSON dicts.
223
+
224
+ Args:
225
+ resource_type: Resource type (e.g., 'agenticsession')
226
+ namespace: Namespace
227
+ selector: Optional label selector
228
+
229
+ Returns:
230
+ List of resources as JSON dicts
231
+
232
+ Raises:
233
+ ValueError: If inputs are invalid
234
+ Exception: If command fails
235
+ """
236
+ # Security: Validate inputs
237
+ if resource_type not in self.ALLOWED_RESOURCE_TYPES:
238
+ raise ValueError(f"Resource type '{resource_type}' not allowed")
239
+ self._validate_input(namespace, "namespace")
240
+ if selector and not re.match(r"^[a-zA-Z0-9=,_.\-/]+$", selector):
241
+ raise ValueError(f"Invalid label selector format: {selector}")
242
+
243
+ args = ["get", resource_type, "-n", namespace, "-o", "json"]
244
+ if selector:
245
+ args.extend(["-l", selector])
246
+
247
+ result = await self._run_oc_command(args)
248
+
249
+ if result.returncode != 0:
250
+ raise Exception(f"Failed to list {resource_type}: {result.stderr.decode()}")
251
+
252
+ data = json.loads(result.stdout.decode())
253
+ return data.get("items", [])
254
+
255
+ async def _validate_session_for_dry_run(self, project: str, session: str, operation: str) -> dict[str, Any]:
256
+ """Validate session exists for dry-run and return session info.
257
+
258
+ Args:
259
+ project: Project/namespace name
260
+ session: Session name
261
+ operation: Operation name for message (e.g., "delete", "restart")
262
+
263
+ Returns:
264
+ Dict with dry_run response including session_info if found
265
+ """
266
+ try:
267
+ session_data = await self._get_resource_json("agenticsession", session, project)
268
+
269
+ return {
270
+ "dry_run": True,
271
+ "success": True,
272
+ "message": f"Would {operation} session '{session}' in project '{project}'",
273
+ "session_info": {
274
+ "name": session_data.get("metadata", {}).get("name"),
275
+ "status": session_data.get("status", {}).get("phase"),
276
+ "created": session_data.get("metadata", {}).get("creationTimestamp"),
277
+ "stopped_at": session_data.get("status", {}).get("stoppedAt"),
278
+ },
279
+ }
280
+ except Exception:
281
+ return {
282
+ "dry_run": True,
283
+ "success": False,
284
+ "message": f"Session '{session}' not found in project '{project}'",
285
+ }
286
+
287
+ async def _bulk_operation(
288
+ self,
289
+ project: str,
290
+ sessions: list[str],
291
+ operation_fn: Callable,
292
+ success_key: str,
293
+ dry_run: bool = False,
294
+ ) -> dict[str, Any]:
295
+ """Generic bulk operation handler.
296
+
297
+ Args:
298
+ project: Project/namespace name
299
+ sessions: List of session names
300
+ operation_fn: Async function to call for each session
301
+ success_key: Key name for successful operations in response
302
+ dry_run: Preview mode
303
+
304
+ Returns:
305
+ Standardized bulk operation response
306
+ """
307
+ # Enforce 3-item limit
308
+ self._validate_bulk_operation(sessions, success_key)
309
+
310
+ success = []
311
+ failed = []
312
+ dry_run_info = {"would_execute": [], "skipped": []}
313
+
314
+ for session in sessions:
315
+ result = await operation_fn(project, session, dry_run=dry_run)
316
+
317
+ if dry_run:
318
+ if result.get("success", True):
319
+ dry_run_info["would_execute"].append(
320
+ {
321
+ "session": session,
322
+ "info": result.get("session_info"),
323
+ }
324
+ )
325
+ else:
326
+ dry_run_info["skipped"].append(
327
+ {
328
+ "session": session,
329
+ "reason": result.get("message"),
330
+ }
331
+ )
332
+ else:
333
+ if result.get(success_key, result.get("success")):
334
+ success.append(session)
335
+ else:
336
+ failed.append({"session": session, "error": result.get("message")})
337
+
338
+ response = {success_key: success, "failed": failed}
339
+ if dry_run:
340
+ response["dry_run"] = True
341
+ response["dry_run_info"] = dry_run_info
342
+
343
+ return response
344
+
345
+ async def label_resource(
346
+ self,
347
+ resource_type: str,
348
+ name: str,
349
+ project: str,
350
+ labels: dict[str, str],
351
+ dry_run: bool = False,
352
+ ) -> dict[str, Any]:
353
+ """Add/update labels on any resource (generic, works for sessions/workspaces/etc).
354
+
355
+ Args:
356
+ resource_type: Resource type (agenticsession, namespace, etc)
357
+ name: Resource name
358
+ project: Project/namespace name
359
+ labels: Label key-value pairs (e.g., {'env': 'prod'})
360
+ dry_run: Preview mode
361
+
362
+ Returns:
363
+ Dict with labeling status
364
+ """
365
+ # Validate resource type
366
+ if resource_type not in self.ALLOWED_RESOURCE_TYPES:
367
+ raise ValueError(f"Resource type '{resource_type}' not allowed")
368
+
369
+ # Simple validation + prefix (let K8s do heavy lifting)
370
+ k8s_labels = {}
371
+ for key, value in labels.items():
372
+ # Basic format check
373
+ if not key.replace("-", "").replace("_", "").isalnum():
374
+ raise ValueError(f"Invalid label key: {key}")
375
+ if not value.replace("-", "").replace("_", "").replace(".", "").isalnum():
376
+ raise ValueError(f"Invalid label value: {value}")
377
+ if len(key) > 63 or len(value) > 63:
378
+ raise ValueError("Label key/value must be ≤63 characters")
379
+
380
+ # Add prefix
381
+ k8s_labels[f"{self.LABEL_PREFIX}{key}"] = value
382
+
383
+ if dry_run:
384
+ return {
385
+ "dry_run": True,
386
+ "resource": name,
387
+ "labels": labels,
388
+ "message": f"Would label {resource_type} '{name}'",
389
+ }
390
+
391
+ # Apply with --overwrite (handles add & update)
392
+ label_args = [f"{k}={v}" for k, v in k8s_labels.items()]
393
+ result = await self._run_oc_command(["label", resource_type, name, "-n", project, "--overwrite"] + label_args)
394
+
395
+ if result.returncode == 0:
396
+ return {"labeled": True, "resource": name, "labels": labels}
397
+ else:
398
+ return {"labeled": False, "message": result.stderr.decode()}
399
+
400
+ async def unlabel_resource(
401
+ self,
402
+ resource_type: str,
403
+ name: str,
404
+ project: str,
405
+ label_keys: list[str],
406
+ dry_run: bool = False,
407
+ ) -> dict[str, Any]:
408
+ """Remove specific labels from a resource.
409
+
410
+ Args:
411
+ resource_type: Resource type (agenticsession, namespace, etc)
412
+ name: Resource name
413
+ project: Project/namespace name
414
+ label_keys: List of label keys to remove (without prefix)
415
+ dry_run: Preview mode
416
+
417
+ Returns:
418
+ Dict with unlabeling status
419
+ """
420
+ # Validate resource type
421
+ if resource_type not in self.ALLOWED_RESOURCE_TYPES:
422
+ raise ValueError(f"Resource type '{resource_type}' not allowed")
423
+
424
+ # Build prefixed keys
425
+ prefixed_keys = [f"{self.LABEL_PREFIX}{key}" for key in label_keys]
426
+
427
+ if dry_run:
428
+ return {
429
+ "dry_run": True,
430
+ "resource": name,
431
+ "label_keys": label_keys,
432
+ "message": f"Would remove labels from {resource_type} '{name}'",
433
+ }
434
+
435
+ # Remove labels using oc label with '-' suffix
436
+ label_args = [f"{k}-" for k in prefixed_keys]
437
+ result = await self._run_oc_command(["label", resource_type, name, "-n", project] + label_args)
438
+
439
+ if result.returncode == 0:
440
+ return {"unlabeled": True, "resource": name, "removed_keys": label_keys}
441
+ else:
442
+ return {"unlabeled": False, "message": result.stderr.decode()}
443
+
444
+ async def bulk_label_resources(
445
+ self,
446
+ resource_type: str,
447
+ names: list[str],
448
+ project: str,
449
+ labels: dict[str, str],
450
+ dry_run: bool = False,
451
+ ) -> dict[str, Any]:
452
+ """Label multiple resources with same labels (max 3).
453
+
454
+ Args:
455
+ resource_type: Resource type
456
+ names: List of resource names
457
+ project: Project/namespace name
458
+ labels: Label key-value pairs
459
+ dry_run: Preview mode
460
+
461
+ Returns:
462
+ Dict with bulk labeling results
463
+ """
464
+ # Enforce limit
465
+ self._validate_bulk_operation(names, "label")
466
+
467
+ success = []
468
+ failed = []
469
+
470
+ for name in names:
471
+ result = await self.label_resource(resource_type, name, project, labels, dry_run)
472
+ if result.get("labeled", result.get("success")):
473
+ success.append(name)
474
+ else:
475
+ failed.append({"resource": name, "error": result.get("message")})
476
+
477
+ return {
478
+ "labeled": success,
479
+ "failed": failed,
480
+ "dry_run": dry_run,
481
+ }
482
+
483
+ async def bulk_unlabel_resources(
484
+ self,
485
+ resource_type: str,
486
+ names: list[str],
487
+ project: str,
488
+ label_keys: list[str],
489
+ dry_run: bool = False,
490
+ ) -> dict[str, Any]:
491
+ """Remove labels from multiple resources (max 3).
492
+
493
+ Args:
494
+ resource_type: Resource type
495
+ names: List of resource names
496
+ project: Project/namespace name
497
+ label_keys: List of label keys to remove
498
+ dry_run: Preview mode
499
+
500
+ Returns:
501
+ Dict with bulk unlabeling results
502
+ """
503
+ # Enforce limit
504
+ self._validate_bulk_operation(names, "unlabel")
505
+
506
+ success = []
507
+ failed = []
508
+
509
+ for name in names:
510
+ result = await self.unlabel_resource(resource_type, name, project, label_keys, dry_run)
511
+ if result.get("unlabeled", result.get("success")):
512
+ success.append(name)
513
+ else:
514
+ failed.append({"resource": name, "error": result.get("message")})
515
+
516
+ return {
517
+ "unlabeled": success,
518
+ "failed": failed,
519
+ "dry_run": dry_run,
520
+ }
521
+
522
+ async def list_sessions(
523
+ self,
524
+ project: str,
525
+ status: str | None = None,
526
+ has_display_name: bool | None = None,
527
+ older_than: str | None = None,
528
+ sort_by: str | None = None,
529
+ limit: int | None = None,
530
+ label_selector: str | None = None,
531
+ ) -> dict[str, Any]:
532
+ """List sessions with enhanced filtering.
533
+
534
+ Args:
535
+ project: Project/namespace name
536
+ status: Filter by status (running, stopped, creating, failed)
537
+ has_display_name: Filter by display name presence
538
+ older_than: Filter by age (e.g., "7d", "24h")
539
+ sort_by: Sort field (created, stopped, name)
540
+ limit: Maximum results
541
+ label_selector: Kubernetes label selector (e.g., 'acp.ambient-code.ai/label-env=prod')
542
+
543
+ Returns:
544
+ Dict with sessions list and metadata
545
+ """
546
+ sessions = await self._list_resources_json("agenticsession", project, selector=label_selector)
547
+
548
+ # Build filter predicates
549
+ filters = []
550
+ filters_applied = {}
551
+
552
+ if status:
553
+ filters.append(lambda s: s.get("status", {}).get("phase", "").lower() == status.lower())
554
+ filters_applied["status"] = status
555
+
556
+ if has_display_name is not None:
557
+ filters.append(lambda s: bool(s.get("spec", {}).get("displayName")) == has_display_name)
558
+ filters_applied["has_display_name"] = has_display_name
559
+
560
+ if older_than:
561
+ cutoff_time = self._parse_time_delta(older_than)
562
+ filters.append(lambda s: self._is_older_than(s.get("metadata", {}).get("creationTimestamp"), cutoff_time))
563
+ filters_applied["older_than"] = older_than
564
+
565
+ # Single-pass filter
566
+ filtered = [s for s in sessions if all(f(s) for f in filters)]
567
+
568
+ # Sort
569
+ if sort_by:
570
+ filtered = self._sort_sessions(filtered, sort_by)
571
+ filters_applied["sort_by"] = sort_by
572
+
573
+ # Limit
574
+ if limit and limit > 0:
575
+ filtered = filtered[:limit]
576
+ filters_applied["limit"] = limit
577
+
578
+ return {
579
+ "sessions": filtered,
580
+ "total": len(filtered),
581
+ "filters_applied": filters_applied,
582
+ }
583
+
584
+ async def list_sessions_by_user_labels(
585
+ self,
586
+ project: str,
587
+ labels: dict[str, str],
588
+ **kwargs,
589
+ ) -> dict[str, Any]:
590
+ """List sessions by user-friendly labels (convenience wrapper).
591
+
592
+ Args:
593
+ project: Project/namespace name
594
+ labels: User-friendly label key-value pairs
595
+ **kwargs: Additional arguments passed to list_sessions
596
+
597
+ Returns:
598
+ Dict with sessions list
599
+ """
600
+ # Build K8s label selector from user labels
601
+ label_parts = [f"{self.LABEL_PREFIX}{k}={v}" for k, v in labels.items()]
602
+ label_selector = ",".join(label_parts)
603
+
604
+ return await self.list_sessions(project=project, label_selector=label_selector, **kwargs)
605
+
606
+ def _sort_sessions(self, sessions: list[dict], sort_by: str) -> list[dict]:
607
+ """Sort sessions by field.
608
+
609
+ Args:
610
+ sessions: List of session dicts
611
+ sort_by: Sort field (created, stopped, name)
612
+
613
+ Returns:
614
+ Sorted list
615
+ """
616
+ sort_keys = {
617
+ "created": lambda s: s.get("metadata", {}).get("creationTimestamp", ""),
618
+ "stopped": lambda s: s.get("status", {}).get("stoppedAt", ""),
619
+ "name": lambda s: s.get("metadata", {}).get("name", ""),
620
+ }
621
+
622
+ key_fn = sort_keys.get(sort_by)
623
+ if key_fn:
624
+ return sorted(sessions, key=key_fn, reverse=(sort_by != "name"))
625
+ return sessions
626
+
627
+ def _parse_time_delta(self, time_str: str) -> datetime:
628
+ """Parse time delta string (e.g., '7d', '24h') to datetime.
629
+
630
+ Args:
631
+ time_str: Time delta string
632
+
633
+ Returns:
634
+ Datetime representing the cutoff time
635
+ """
636
+ match = re.match(r"(\d+)([dhm])", time_str.lower())
637
+ if not match:
638
+ raise ValueError(f"Invalid time format: {time_str}. Use format like '7d', '24h', '30m'")
639
+
640
+ value, unit = int(match.group(1)), match.group(2)
641
+ now = datetime.utcnow()
642
+
643
+ if unit == "d":
644
+ return now - timedelta(days=value)
645
+ elif unit == "h":
646
+ return now - timedelta(hours=value)
647
+ elif unit == "m":
648
+ return now - timedelta(minutes=value)
649
+
650
+ raise ValueError(f"Unknown time unit: {unit}")
651
+
652
+ def _is_older_than(self, timestamp_str: str | None, cutoff: datetime) -> bool:
653
+ """Check if timestamp is older than cutoff.
654
+
655
+ Args:
656
+ timestamp_str: ISO format timestamp string
657
+ cutoff: Cutoff datetime
658
+
659
+ Returns:
660
+ True if older than cutoff
661
+ """
662
+ if not timestamp_str:
663
+ return False
664
+
665
+ # Parse ISO format timestamp
666
+ timestamp = datetime.fromisoformat(timestamp_str.replace("Z", "+00:00"))
667
+ return timestamp.replace(tzinfo=None) < cutoff
668
+
669
+ async def delete_session(self, project: str, session: str, dry_run: bool = False) -> dict[str, Any]:
670
+ """Delete a session.
671
+
672
+ Args:
673
+ project: Project/namespace name
674
+ session: Session name
675
+ dry_run: Preview without deleting
676
+
677
+ Returns:
678
+ Dict with deletion status
679
+ """
680
+ if dry_run:
681
+ return await self._validate_session_for_dry_run(project, session, "delete")
682
+
683
+ result = await self._run_oc_command(["delete", "agenticsession", session, "-n", project])
684
+
685
+ if result.returncode != 0:
686
+ return {
687
+ "deleted": False,
688
+ "message": f"Failed to delete session: {result.stderr.decode()}",
689
+ }
690
+
691
+ return {
692
+ "deleted": True,
693
+ "message": f"Successfully deleted session '{session}' from project '{project}'",
694
+ }
695
+
696
+ async def restart_session(self, project: str, session: str, dry_run: bool = False) -> dict[str, Any]:
697
+ """Restart a stopped session.
698
+
699
+ Args:
700
+ project: Project/namespace name
701
+ session: Session name
702
+ dry_run: Preview without restarting
703
+
704
+ Returns:
705
+ Dict with restart status
706
+ """
707
+ try:
708
+ session_data = await self._get_resource_json("agenticsession", session, project)
709
+ current_status = session_data.get("status", {}).get("phase", "unknown")
710
+
711
+ if dry_run:
712
+ return {
713
+ "status": current_status,
714
+ "dry_run": True,
715
+ "success": True,
716
+ "message": f"Would restart session '{session}' (current status: {current_status})",
717
+ "session_info": {
718
+ "name": session_data.get("metadata", {}).get("name"),
719
+ "current_status": current_status,
720
+ "stopped_at": session_data.get("status", {}).get("stoppedAt"),
721
+ },
722
+ }
723
+
724
+ # Restart by patching the stopped field to false
725
+ patch = {"spec": {"stopped": False}}
726
+ result = await self._run_oc_command(
727
+ [
728
+ "patch",
729
+ "agenticsession",
730
+ session,
731
+ "-n",
732
+ project,
733
+ "--type=merge",
734
+ "-p",
735
+ json.dumps(patch),
736
+ ]
737
+ )
738
+
739
+ if result.returncode != 0:
740
+ return {
741
+ "status": "error",
742
+ "message": f"Failed to restart session: {result.stderr.decode()}",
743
+ }
744
+
745
+ return {
746
+ "status": "restarting",
747
+ "message": f"Successfully restarted session '{session}' in project '{project}'",
748
+ }
749
+ except Exception as e:
750
+ return {"status": "error", "message": str(e)}
751
+
752
+ async def bulk_delete_sessions(self, project: str, sessions: list[str], dry_run: bool = False) -> dict[str, Any]:
753
+ """Delete multiple sessions.
754
+
755
+ Args:
756
+ project: Project/namespace name
757
+ sessions: List of session names
758
+ dry_run: Preview without deleting
759
+
760
+ Returns:
761
+ Dict with deletion results
762
+ """
763
+ return await self._bulk_operation(project, sessions, self.delete_session, "deleted", dry_run)
764
+
765
+ async def bulk_stop_sessions(self, project: str, sessions: list[str], dry_run: bool = False) -> dict[str, Any]:
766
+ """Stop multiple running sessions.
767
+
768
+ Args:
769
+ project: Project/namespace name
770
+ sessions: List of session names
771
+ dry_run: Preview without stopping
772
+
773
+ Returns:
774
+ Dict with stop results
775
+ """
776
+
777
+ async def stop_session(project: str, session: str, dry_run: bool = False) -> dict[str, Any]:
778
+ """Internal stop session helper."""
779
+ try:
780
+ session_data = await self._get_resource_json("agenticsession", session, project)
781
+ current_status = session_data.get("status", {}).get("phase")
782
+
783
+ if dry_run:
784
+ return {
785
+ "dry_run": True,
786
+ "success": current_status == "running",
787
+ "message": f"Session status: {current_status}",
788
+ "session_info": {
789
+ "name": session,
790
+ "status": current_status,
791
+ },
792
+ }
793
+
794
+ # Stop the session
795
+ patch = {"spec": {"stopped": True}}
796
+ result = await self._run_oc_command(
797
+ [
798
+ "patch",
799
+ "agenticsession",
800
+ session,
801
+ "-n",
802
+ project,
803
+ "--type=merge",
804
+ "-p",
805
+ json.dumps(patch),
806
+ ]
807
+ )
808
+
809
+ if result.returncode == 0:
810
+ return {"stopped": True, "message": "Success"}
811
+ else:
812
+ return {
813
+ "stopped": False,
814
+ "message": result.stderr.decode(),
815
+ }
816
+ except Exception as e:
817
+ return {"stopped": False, "success": False, "message": str(e)}
818
+
819
+ return await self._bulk_operation(project, sessions, stop_session, "stopped", dry_run)
820
+
821
+ async def bulk_restart_sessions(self, project: str, sessions: list[str], dry_run: bool = False) -> dict[str, Any]:
822
+ """Restart multiple stopped sessions (max 3).
823
+
824
+ Args:
825
+ project: Project/namespace name
826
+ sessions: List of session names
827
+ dry_run: Preview mode
828
+
829
+ Returns:
830
+ Dict with restart results
831
+ """
832
+ # Enforce limit
833
+ self._validate_bulk_operation(sessions, "restart")
834
+
835
+ success = []
836
+ failed = []
837
+
838
+ for session in sessions:
839
+ result = await self.restart_session(project, session, dry_run)
840
+ if result.get("status") == "restarting" or result.get("success"):
841
+ success.append(session)
842
+ else:
843
+ failed.append({"session": session, "error": result.get("message")})
844
+
845
+ return {
846
+ "restarted": success,
847
+ "failed": failed,
848
+ "dry_run": dry_run,
849
+ }
850
+
851
+ async def bulk_delete_sessions_by_label(
852
+ self,
853
+ project: str,
854
+ labels: dict[str, str],
855
+ dry_run: bool = False,
856
+ ) -> dict[str, Any]:
857
+ """Delete sessions matching label selector (max 3).
858
+
859
+ Args:
860
+ project: Project/namespace name
861
+ labels: Label key-value pairs
862
+ dry_run: Preview mode
863
+
864
+ Returns:
865
+ Dict with deletion results
866
+ """
867
+ # Get sessions by label
868
+ result = await self.list_sessions_by_user_labels(project, labels)
869
+ sessions = result.get("sessions", [])
870
+
871
+ if not sessions:
872
+ return {
873
+ "deleted": [],
874
+ "failed": [],
875
+ "message": f"No sessions found with labels {labels}",
876
+ }
877
+
878
+ session_names = [s["metadata"]["name"] for s in sessions]
879
+
880
+ # Early validation with helpful error
881
+ if len(session_names) > self.MAX_BULK_ITEMS:
882
+ raise ValueError(
883
+ f"Label selector matches {len(session_names)} sessions. "
884
+ f"Max {self.MAX_BULK_ITEMS} allowed. Refine your labels to be more specific."
885
+ )
886
+
887
+ # Enhanced dry-run output
888
+ if dry_run:
889
+ return {
890
+ "dry_run": True,
891
+ "matched_sessions": session_names,
892
+ "matched_count": len(session_names),
893
+ "label_selector": ",".join([f"{self.LABEL_PREFIX}{k}={v}" for k, v in labels.items()]),
894
+ "message": f"Would delete {len(session_names)} sessions. Review matched_sessions before confirming.",
895
+ }
896
+
897
+ # Use existing bulk_delete_sessions
898
+ return await self.bulk_delete_sessions(project, session_names, dry_run=dry_run)
899
+
900
+ async def bulk_stop_sessions_by_label(
901
+ self,
902
+ project: str,
903
+ labels: dict[str, str],
904
+ dry_run: bool = False,
905
+ ) -> dict[str, Any]:
906
+ """Stop sessions matching label selector (max 3).
907
+
908
+ Args:
909
+ project: Project/namespace name
910
+ labels: Label key-value pairs
911
+ dry_run: Preview mode
912
+
913
+ Returns:
914
+ Dict with stop results
915
+ """
916
+ # Get sessions by label
917
+ result = await self.list_sessions_by_user_labels(project, labels)
918
+ sessions = result.get("sessions", [])
919
+
920
+ if not sessions:
921
+ return {
922
+ "stopped": [],
923
+ "failed": [],
924
+ "message": f"No sessions found with labels {labels}",
925
+ }
926
+
927
+ session_names = [s["metadata"]["name"] for s in sessions]
928
+
929
+ # Early validation with helpful error
930
+ if len(session_names) > self.MAX_BULK_ITEMS:
931
+ raise ValueError(
932
+ f"Label selector matches {len(session_names)} sessions. "
933
+ f"Max {self.MAX_BULK_ITEMS} allowed. Refine your labels to be more specific."
934
+ )
935
+
936
+ # Enhanced dry-run output
937
+ if dry_run:
938
+ return {
939
+ "dry_run": True,
940
+ "matched_sessions": session_names,
941
+ "matched_count": len(session_names),
942
+ "label_selector": ",".join([f"{self.LABEL_PREFIX}{k}={v}" for k, v in labels.items()]),
943
+ "message": f"Would stop {len(session_names)} sessions. Review matched_sessions before confirming.",
944
+ }
945
+
946
+ # Use existing bulk_stop_sessions
947
+ return await self.bulk_stop_sessions(project, session_names, dry_run=dry_run)
948
+
949
+ async def bulk_restart_sessions_by_label(
950
+ self,
951
+ project: str,
952
+ labels: dict[str, str],
953
+ dry_run: bool = False,
954
+ ) -> dict[str, Any]:
955
+ """Restart sessions matching label selector (max 3).
956
+
957
+ Args:
958
+ project: Project/namespace name
959
+ labels: Label key-value pairs
960
+ dry_run: Preview mode
961
+
962
+ Returns:
963
+ Dict with restart results
964
+ """
965
+ # Get sessions by label
966
+ result = await self.list_sessions_by_user_labels(project, labels)
967
+ sessions = result.get("sessions", [])
968
+
969
+ if not sessions:
970
+ return {
971
+ "restarted": [],
972
+ "failed": [],
973
+ "message": f"No sessions found with labels {labels}",
974
+ }
975
+
976
+ session_names = [s["metadata"]["name"] for s in sessions]
977
+
978
+ # Early validation with helpful error
979
+ if len(session_names) > self.MAX_BULK_ITEMS:
980
+ raise ValueError(
981
+ f"Label selector matches {len(session_names)} sessions. "
982
+ f"Max {self.MAX_BULK_ITEMS} allowed. Refine your labels to be more specific."
983
+ )
984
+
985
+ # Enhanced dry-run output
986
+ if dry_run:
987
+ return {
988
+ "dry_run": True,
989
+ "matched_sessions": session_names,
990
+ "matched_count": len(session_names),
991
+ "label_selector": ",".join([f"{self.LABEL_PREFIX}{k}={v}" for k, v in labels.items()]),
992
+ "message": f"Would restart {len(session_names)} sessions. Review matched_sessions before confirming.",
993
+ }
994
+
995
+ # Use existing bulk_restart_sessions
996
+ return await self.bulk_restart_sessions(project, session_names, dry_run=dry_run)
997
+
998
+ async def get_session_logs(
999
+ self,
1000
+ project: str,
1001
+ session: str,
1002
+ container: str | None = None,
1003
+ tail_lines: int | None = None,
1004
+ ) -> dict[str, Any]:
1005
+ """Get logs for a session.
1006
+
1007
+ Args:
1008
+ project: Project/namespace name
1009
+ session: Session name
1010
+ container: Container name (optional)
1011
+ tail_lines: Number of lines to retrieve
1012
+
1013
+ Returns:
1014
+ Dict with logs
1015
+ """
1016
+ # Security: Validate inputs
1017
+ try:
1018
+ self._validate_input(project, "project")
1019
+ self._validate_input(session, "session")
1020
+ if container:
1021
+ # Container names have slightly different naming rules
1022
+ if not re.match(r"^[a-z0-9]([-a-z0-9]*[a-z0-9])?$", container):
1023
+ raise ValueError(f"Invalid container name: {container}")
1024
+ # Security: Limit tail_lines to prevent DoS
1025
+ if tail_lines and (tail_lines < 1 or tail_lines > self.MAX_LOG_LINES):
1026
+ raise ValueError(f"tail_lines must be between 1 and {self.MAX_LOG_LINES}")
1027
+
1028
+ # Find the pod for this session
1029
+ pods = await self._list_resources_json("pods", project, selector=f"agenticsession={session}")
1030
+
1031
+ if not pods:
1032
+ return {"logs": "", "error": f"No pods found for session '{session}'"}
1033
+
1034
+ pod_name = pods[0].get("metadata", {}).get("name")
1035
+
1036
+ # Build logs command
1037
+ logs_args = ["logs", pod_name, "-n", project]
1038
+ if container:
1039
+ logs_args.extend(["-c", container])
1040
+ if tail_lines:
1041
+ logs_args.extend(["--tail", str(tail_lines)])
1042
+ else:
1043
+ # Default limit to prevent memory exhaustion
1044
+ logs_args.extend(["--tail", str(1000)])
1045
+
1046
+ result = await self._run_oc_command(logs_args)
1047
+
1048
+ if result.returncode != 0:
1049
+ return {
1050
+ "logs": "",
1051
+ "error": f"Failed to retrieve logs: {result.stderr.decode()}",
1052
+ }
1053
+
1054
+ return {
1055
+ "logs": result.stdout.decode(),
1056
+ "container": container or "default",
1057
+ "lines": len(result.stdout.decode().split("\n")),
1058
+ }
1059
+ except ValueError as e:
1060
+ return {"logs": "", "error": str(e)}
1061
+ except Exception as e:
1062
+ return {"logs": "", "error": f"Unexpected error: {str(e)}"}
1063
+
1064
+ def list_clusters(self) -> dict[str, Any]:
1065
+ """List configured clusters.
1066
+
1067
+ Returns:
1068
+ Dict with clusters list
1069
+ """
1070
+ clusters = []
1071
+ config = self.config
1072
+ default_cluster = config.get("default_cluster")
1073
+
1074
+ for name, cluster_config in config.get("clusters", {}).items():
1075
+ clusters.append(
1076
+ {
1077
+ "name": name,
1078
+ "server": cluster_config.get("server"),
1079
+ "description": cluster_config.get("description", ""),
1080
+ "default_project": cluster_config.get("default_project"),
1081
+ "is_default": name == default_cluster,
1082
+ }
1083
+ )
1084
+
1085
+ return {"clusters": clusters, "default_cluster": default_cluster}
1086
+
1087
+ async def whoami(self) -> dict[str, Any]:
1088
+ """Get current user and cluster information.
1089
+
1090
+ Returns:
1091
+ Dict with user info
1092
+ """
1093
+ # Get current user
1094
+ user_result = await self._run_oc_command(["whoami"])
1095
+ user = user_result.stdout.decode().strip() if user_result.returncode == 0 else "unknown"
1096
+
1097
+ # Get current server
1098
+ server_result = await self._run_oc_command(["whoami", "--show-server"])
1099
+ server = server_result.stdout.decode().strip() if server_result.returncode == 0 else "unknown"
1100
+
1101
+ # Get current project
1102
+ project_result = await self._run_oc_command(["project", "-q"])
1103
+ project = project_result.stdout.decode().strip() if project_result.returncode == 0 else "unknown"
1104
+
1105
+ # Get token info
1106
+ token_result = await self._run_oc_command(["whoami", "-t"])
1107
+ token_valid = token_result.returncode == 0
1108
+
1109
+ # Try to get token expiry (if available)
1110
+ token_expires = None
1111
+ if token_valid:
1112
+ # Get token and decode to check expiry
1113
+ # Note: token variable intentionally unused - for future enhancement
1114
+ try:
1115
+ # Try to get token info from oc
1116
+ token_info_result = await self._run_oc_command(["whoami", "--show-token"])
1117
+ if token_info_result.returncode == 0:
1118
+ # Note: OpenShift doesn't provide expiry via CLI easily
1119
+ # This is a placeholder for future enhancement
1120
+ token_expires = None
1121
+ except Exception:
1122
+ pass
1123
+
1124
+ # Get current cluster name (if available from config)
1125
+ cluster = "unknown"
1126
+ cluster_config_data = None
1127
+ for name, cluster_config in self.config.get("clusters", {}).items():
1128
+ if cluster_config.get("server") == server:
1129
+ cluster = name
1130
+ cluster_config_data = cluster_config
1131
+ break
1132
+
1133
+ # Prefer default_project from config over current oc project
1134
+ # This ensures we use the configured project even if oc is set to a different one
1135
+ if cluster_config_data and cluster_config_data.get("default_project"):
1136
+ project = cluster_config_data.get("default_project")
1137
+
1138
+ return {
1139
+ "user": user,
1140
+ "cluster": cluster,
1141
+ "server": server,
1142
+ "project": project,
1143
+ "token_expires": token_expires,
1144
+ "token_valid": token_valid,
1145
+ "authenticated": user != "unknown" and server != "unknown",
1146
+ }
1147
+
1148
+ # P2 Feature: Clone Session
1149
+ async def clone_session(
1150
+ self, project: str, source_session: str, new_display_name: str, dry_run: bool = False
1151
+ ) -> dict[str, Any]:
1152
+ """Clone a session with its configuration.
1153
+
1154
+ Args:
1155
+ project: Project/namespace name
1156
+ source_session: Source session name to clone
1157
+ new_display_name: Display name for new session
1158
+ dry_run: Preview without creating
1159
+
1160
+ Returns:
1161
+ Dict with cloned session info
1162
+ """
1163
+ try:
1164
+ # Get source session
1165
+ source_data = await self._get_resource_json("agenticsession", source_session, project)
1166
+
1167
+ if dry_run:
1168
+ return {
1169
+ "dry_run": True,
1170
+ "success": True,
1171
+ "message": f"Would clone session '{source_session}' with display name '{new_display_name}'",
1172
+ "source_info": {
1173
+ "name": source_data.get("metadata", {}).get("name"),
1174
+ "display_name": source_data.get("spec", {}).get("displayName"),
1175
+ "repos": source_data.get("spec", {}).get("repos", []),
1176
+ "workflow": source_data.get("spec", {}).get("workflow"),
1177
+ },
1178
+ }
1179
+
1180
+ # Create new session from source spec
1181
+ new_spec = source_data.get("spec", {}).copy()
1182
+ new_spec["displayName"] = new_display_name
1183
+ new_spec["stopped"] = False # Start new session as running
1184
+
1185
+ # Create session manifest
1186
+ manifest = {
1187
+ "apiVersion": "agenticplatform.io/v1",
1188
+ "kind": "AgenticSession",
1189
+ "metadata": {
1190
+ "generateName": f"{source_session}-clone-",
1191
+ "namespace": project,
1192
+ },
1193
+ "spec": new_spec,
1194
+ }
1195
+
1196
+ # Apply manifest using secure temporary file
1197
+ import os
1198
+ import tempfile
1199
+
1200
+ # Security: Use secure temp file with proper permissions (0600)
1201
+ fd, manifest_file = tempfile.mkstemp(suffix=".yaml", prefix=f"acp-clone-{secrets.token_hex(8)}-")
1202
+ try:
1203
+ # Write to file descriptor with secure permissions
1204
+ with os.fdopen(fd, "w") as f:
1205
+ yaml.dump(manifest, f)
1206
+
1207
+ result = await self._run_oc_command(["create", "-f", manifest_file, "-o", "json"])
1208
+
1209
+ if result.returncode != 0:
1210
+ return {
1211
+ "cloned": False,
1212
+ "message": f"Failed to clone session: {result.stderr.decode()}",
1213
+ }
1214
+
1215
+ created_data = json.loads(result.stdout.decode())
1216
+ new_session_name = created_data.get("metadata", {}).get("name")
1217
+
1218
+ return {
1219
+ "cloned": True,
1220
+ "session": new_session_name,
1221
+ "message": f"Successfully cloned session '{source_session}' to '{new_session_name}'",
1222
+ }
1223
+ finally:
1224
+ # Ensure cleanup even if operation fails
1225
+ try:
1226
+ os.unlink(manifest_file)
1227
+ except OSError:
1228
+ pass
1229
+
1230
+ except Exception as e:
1231
+ return {"cloned": False, "message": str(e)}
1232
+
1233
+ # P2 Feature: Get Session Transcript
1234
+ async def get_session_transcript(self, project: str, session: str, format: str = "json") -> dict[str, Any]:
1235
+ """Get session transcript/conversation history.
1236
+
1237
+ Args:
1238
+ project: Project/namespace name
1239
+ session: Session name
1240
+ format: Output format ("json" or "markdown")
1241
+
1242
+ Returns:
1243
+ Dict with transcript data
1244
+ """
1245
+ try:
1246
+ session_data = await self._get_resource_json("agenticsession", session, project)
1247
+
1248
+ # Get events which contain the conversation
1249
+ # Note: events variable intentionally unused - for future enhancement
1250
+ # events = await self._list_resources_json(
1251
+ # "event", project, selector=f"involvedObject.name={session}"
1252
+ # )
1253
+
1254
+ # Extract transcript from session status if available
1255
+ transcript_data = session_data.get("status", {}).get("transcript") or []
1256
+
1257
+ if format == "markdown":
1258
+ # Convert to markdown format
1259
+ markdown = f"# Session Transcript: {session}\n\n"
1260
+ for idx, entry in enumerate(transcript_data):
1261
+ role = entry.get("role", "unknown")
1262
+ content = entry.get("content", "")
1263
+ timestamp = entry.get("timestamp", "")
1264
+ markdown += f"## Message {idx + 1} - {role}\n"
1265
+ if timestamp:
1266
+ markdown += f"*{timestamp}*\n\n"
1267
+ markdown += f"{content}\n\n"
1268
+ markdown += "---\n\n"
1269
+
1270
+ return {
1271
+ "transcript": markdown,
1272
+ "format": "markdown",
1273
+ "message_count": len(transcript_data),
1274
+ }
1275
+ else:
1276
+ # Return as JSON
1277
+ return {
1278
+ "transcript": transcript_data,
1279
+ "format": "json",
1280
+ "message_count": len(transcript_data),
1281
+ }
1282
+
1283
+ except Exception as e:
1284
+ return {"transcript": None, "error": str(e)}
1285
+
1286
+ # P2 Feature: Update Session
1287
+ async def update_session(
1288
+ self,
1289
+ project: str,
1290
+ session: str,
1291
+ display_name: str | None = None,
1292
+ timeout: int | None = None,
1293
+ dry_run: bool = False,
1294
+ ) -> dict[str, Any]:
1295
+ """Update session metadata.
1296
+
1297
+ Args:
1298
+ project: Project/namespace name
1299
+ session: Session name
1300
+ display_name: New display name
1301
+ timeout: New timeout in seconds
1302
+ dry_run: Preview without updating
1303
+
1304
+ Returns:
1305
+ Dict with update status
1306
+ """
1307
+ try:
1308
+ session_data = await self._get_resource_json("agenticsession", session, project)
1309
+
1310
+ if dry_run:
1311
+ updates = {}
1312
+ if display_name:
1313
+ updates["displayName"] = display_name
1314
+ if timeout:
1315
+ updates["timeout"] = timeout
1316
+
1317
+ return {
1318
+ "dry_run": True,
1319
+ "success": True,
1320
+ "message": f"Would update session '{session}'",
1321
+ "updates": updates,
1322
+ "current": {
1323
+ "displayName": session_data.get("spec", {}).get("displayName"),
1324
+ "timeout": session_data.get("spec", {}).get("timeout"),
1325
+ },
1326
+ }
1327
+
1328
+ # Build patch
1329
+ patch = {"spec": {}}
1330
+ if display_name:
1331
+ patch["spec"]["displayName"] = display_name
1332
+ if timeout:
1333
+ patch["spec"]["timeout"] = timeout
1334
+
1335
+ if not patch["spec"]:
1336
+ return {"updated": False, "message": "No updates specified"}
1337
+
1338
+ result = await self._run_oc_command(
1339
+ [
1340
+ "patch",
1341
+ "agenticsession",
1342
+ session,
1343
+ "-n",
1344
+ project,
1345
+ "--type=merge",
1346
+ "-p",
1347
+ json.dumps(patch),
1348
+ "-o",
1349
+ "json",
1350
+ ]
1351
+ )
1352
+
1353
+ if result.returncode != 0:
1354
+ return {
1355
+ "updated": False,
1356
+ "message": f"Failed to update session: {result.stderr.decode()}",
1357
+ }
1358
+
1359
+ updated_data = json.loads(result.stdout.decode())
1360
+
1361
+ return {
1362
+ "updated": True,
1363
+ "session": updated_data,
1364
+ "message": f"Successfully updated session '{session}'",
1365
+ }
1366
+
1367
+ except Exception as e:
1368
+ return {"updated": False, "message": str(e)}
1369
+
1370
+ # P2 Feature: Export Session
1371
+ async def export_session(self, project: str, session: str) -> dict[str, Any]:
1372
+ """Export session configuration and transcript.
1373
+
1374
+ Args:
1375
+ project: Project/namespace name
1376
+ session: Session name
1377
+
1378
+ Returns:
1379
+ Dict with exported session data
1380
+ """
1381
+ try:
1382
+ session_data = await self._get_resource_json("agenticsession", session, project)
1383
+
1384
+ # Get transcript
1385
+ transcript_result = await self.get_session_transcript(project, session, format="json")
1386
+
1387
+ export_data = {
1388
+ "config": {
1389
+ "name": session_data.get("metadata", {}).get("name"),
1390
+ "displayName": session_data.get("spec", {}).get("displayName"),
1391
+ "repos": session_data.get("spec", {}).get("repos", []),
1392
+ "workflow": session_data.get("spec", {}).get("workflow"),
1393
+ "llmConfig": session_data.get("spec", {}).get("llmConfig", {}),
1394
+ },
1395
+ "transcript": transcript_result.get("transcript", []),
1396
+ "metadata": {
1397
+ "created": session_data.get("metadata", {}).get("creationTimestamp"),
1398
+ "status": session_data.get("status", {}).get("phase"),
1399
+ "stoppedAt": session_data.get("status", {}).get("stoppedAt"),
1400
+ "messageCount": transcript_result.get("message_count", 0),
1401
+ },
1402
+ }
1403
+
1404
+ return {
1405
+ "exported": True,
1406
+ "data": export_data,
1407
+ "message": f"Successfully exported session '{session}'",
1408
+ }
1409
+
1410
+ except Exception as e:
1411
+ return {"exported": False, "error": str(e)}
1412
+
1413
+ # P3 Feature: Get Session Metrics
1414
+ async def get_session_metrics(self, project: str, session: str) -> dict[str, Any]:
1415
+ """Get session metrics and statistics.
1416
+
1417
+ Args:
1418
+ project: Project/namespace name
1419
+ session: Session name
1420
+
1421
+ Returns:
1422
+ Dict with session metrics
1423
+ """
1424
+ try:
1425
+ session_data = await self._get_resource_json("agenticsession", session, project)
1426
+
1427
+ # Get transcript for analysis
1428
+ transcript_result = await self.get_session_transcript(project, session, format="json")
1429
+ transcript = transcript_result.get("transcript") or []
1430
+
1431
+ # Calculate metrics
1432
+ token_count = 0
1433
+ message_count = len(transcript) if transcript else 0
1434
+ tool_calls = {}
1435
+
1436
+ for entry in transcript:
1437
+ # Count tokens (approximate)
1438
+ content = entry.get("content", "")
1439
+ token_count += len(content.split()) * 1.3 # Rough estimate
1440
+
1441
+ # Count tool calls
1442
+ if "tool_calls" in entry:
1443
+ for tool_call in entry.get("tool_calls", []):
1444
+ tool_name = tool_call.get("name", "unknown")
1445
+ tool_calls[tool_name] = tool_calls.get(tool_name, 0) + 1
1446
+
1447
+ # Calculate duration
1448
+ created = session_data.get("metadata", {}).get("creationTimestamp")
1449
+ stopped = session_data.get("status", {}).get("stoppedAt")
1450
+
1451
+ duration_seconds = 0
1452
+ if created and stopped:
1453
+ try:
1454
+ from datetime import datetime
1455
+
1456
+ created_dt = datetime.fromisoformat(created.replace("Z", "+00:00"))
1457
+ stopped_dt = datetime.fromisoformat(stopped.replace("Z", "+00:00"))
1458
+ duration_seconds = int((stopped_dt - created_dt).total_seconds())
1459
+ except Exception:
1460
+ pass
1461
+
1462
+ return {
1463
+ "token_count": int(token_count),
1464
+ "duration_seconds": duration_seconds,
1465
+ "tool_calls": tool_calls,
1466
+ "message_count": message_count,
1467
+ "status": session_data.get("status", {}).get("phase"),
1468
+ }
1469
+
1470
+ except Exception as e:
1471
+ return {"error": str(e)}
1472
+
1473
+ # P3 Feature: List Workflows
1474
+ async def list_workflows(self, repo_url: str | None = None) -> dict[str, Any]:
1475
+ """List available workflows from repository.
1476
+
1477
+ Args:
1478
+ repo_url: Repository URL (defaults to ootb-ambient-workflows)
1479
+
1480
+ Returns:
1481
+ Dict with workflows list
1482
+ """
1483
+ if not repo_url:
1484
+ repo_url = "https://github.com/ambient-code/ootb-ambient-workflows"
1485
+
1486
+ # Security: Validate repo URL format
1487
+ if not isinstance(repo_url, str):
1488
+ return {"workflows": [], "error": "Repository URL must be a string"}
1489
+ if not (repo_url.startswith("https://") or repo_url.startswith("http://")):
1490
+ return {"workflows": [], "error": "Repository URL must use http:// or https://"}
1491
+ # Prevent command injection through URL
1492
+ if any(char in repo_url for char in [";", "|", "&", "$", "`", "\n", "\r", " "]):
1493
+ return {"workflows": [], "error": "Invalid characters in repository URL"}
1494
+
1495
+ try:
1496
+ # Clone repo to temp directory
1497
+ import shutil
1498
+ import tempfile
1499
+
1500
+ # Security: Use secure temp directory with random name
1501
+ temp_dir = tempfile.mkdtemp(prefix=f"acp-workflows-{secrets.token_hex(8)}-")
1502
+
1503
+ try:
1504
+ # Clone the repo using secure subprocess
1505
+ process = await asyncio.create_subprocess_exec(
1506
+ "git",
1507
+ "clone",
1508
+ "--depth",
1509
+ "1",
1510
+ "--",
1511
+ repo_url,
1512
+ temp_dir,
1513
+ stdout=asyncio.subprocess.PIPE,
1514
+ stderr=asyncio.subprocess.PIPE,
1515
+ )
1516
+ stdout, stderr = await asyncio.wait_for(
1517
+ process.communicate(),
1518
+ timeout=60, # 60 second timeout for git clone
1519
+ )
1520
+
1521
+ if process.returncode != 0:
1522
+ return {
1523
+ "workflows": [],
1524
+ "error": f"Failed to clone repository: {stderr.decode()}",
1525
+ }
1526
+
1527
+ # Find workflow files
1528
+ workflows = []
1529
+ workflows_dir = Path(temp_dir) / "workflows"
1530
+
1531
+ if workflows_dir.exists():
1532
+ # Limit to prevent DoS
1533
+ file_count = 0
1534
+ max_files = 100
1535
+ for workflow_file in workflows_dir.glob("**/*.yaml"):
1536
+ if file_count >= max_files:
1537
+ break
1538
+ file_count += 1
1539
+
1540
+ # Security: Validate file is within expected directory
1541
+ try:
1542
+ workflow_file.resolve().relative_to(workflows_dir.resolve())
1543
+ except ValueError:
1544
+ continue # Skip files outside workflows directory
1545
+
1546
+ # Read workflow to get metadata
1547
+ try:
1548
+ with open(workflow_file) as f:
1549
+ workflow_data = yaml.safe_load(f)
1550
+ if not isinstance(workflow_data, dict):
1551
+ workflow_data = {}
1552
+
1553
+ workflows.append(
1554
+ {
1555
+ "name": workflow_file.stem,
1556
+ "path": str(workflow_file.relative_to(workflows_dir)),
1557
+ "description": (
1558
+ workflow_data.get("description", "")
1559
+ if isinstance(workflow_data.get("description"), str)
1560
+ else ""
1561
+ ),
1562
+ }
1563
+ )
1564
+ except (yaml.YAMLError, OSError):
1565
+ # Skip invalid workflow files
1566
+ continue
1567
+
1568
+ return {
1569
+ "workflows": workflows,
1570
+ "repo_url": repo_url,
1571
+ "count": len(workflows),
1572
+ }
1573
+
1574
+ finally:
1575
+ # Clean up temp directory securely
1576
+ try:
1577
+ shutil.rmtree(temp_dir, ignore_errors=True)
1578
+ except Exception:
1579
+ pass
1580
+
1581
+ except TimeoutError:
1582
+ return {"workflows": [], "error": "Repository clone timed out"}
1583
+ except Exception as e:
1584
+ return {"workflows": [], "error": f"Unexpected error: {str(e)}"}
1585
+
1586
+ # P3 Feature: Create Session from Template
1587
+ async def create_session_from_template(
1588
+ self,
1589
+ project: str,
1590
+ template: str,
1591
+ display_name: str,
1592
+ repos: list[str] | None = None,
1593
+ dry_run: bool = False,
1594
+ ) -> dict[str, Any]:
1595
+ """Create session from predefined template.
1596
+
1597
+ Args:
1598
+ project: Project/namespace name
1599
+ template: Template name (triage, bugfix, feature, exploration)
1600
+ display_name: Display name for session
1601
+ repos: Optional list of repository URLs
1602
+ dry_run: Preview without creating
1603
+
1604
+ Returns:
1605
+ Dict with session creation status
1606
+ """
1607
+ # Define templates
1608
+ templates = {
1609
+ "triage": {
1610
+ "workflow": "triage",
1611
+ "llmConfig": {"model": "claude-sonnet-4", "temperature": 0.7},
1612
+ "description": "Triage and analyze issues",
1613
+ },
1614
+ "bugfix": {
1615
+ "workflow": "bugfix",
1616
+ "llmConfig": {"model": "claude-sonnet-4", "temperature": 0.3},
1617
+ "description": "Fix bugs and issues",
1618
+ },
1619
+ "feature": {
1620
+ "workflow": "feature-development",
1621
+ "llmConfig": {"model": "claude-sonnet-4", "temperature": 0.5},
1622
+ "description": "Develop new features",
1623
+ },
1624
+ "exploration": {
1625
+ "workflow": "codebase-exploration",
1626
+ "llmConfig": {"model": "claude-sonnet-4", "temperature": 0.8},
1627
+ "description": "Explore codebase",
1628
+ },
1629
+ }
1630
+
1631
+ if template not in templates:
1632
+ return {
1633
+ "created": False,
1634
+ "message": f"Unknown template: {template}. Available: {', '.join(templates.keys())}",
1635
+ }
1636
+
1637
+ template_config = templates[template]
1638
+
1639
+ if dry_run:
1640
+ return {
1641
+ "dry_run": True,
1642
+ "success": True,
1643
+ "message": f"Would create session from template '{template}'",
1644
+ "template_config": template_config,
1645
+ "display_name": display_name,
1646
+ "repos": repos or [],
1647
+ }
1648
+
1649
+ try:
1650
+ # Create session manifest
1651
+ manifest = {
1652
+ "apiVersion": "agenticplatform.io/v1",
1653
+ "kind": "AgenticSession",
1654
+ "metadata": {
1655
+ "generateName": f"{template}-",
1656
+ "namespace": project,
1657
+ },
1658
+ "spec": {
1659
+ "displayName": display_name,
1660
+ "workflow": template_config["workflow"],
1661
+ "llmConfig": template_config["llmConfig"],
1662
+ "repos": repos or [],
1663
+ },
1664
+ }
1665
+
1666
+ # Apply manifest using secure temporary file
1667
+ import os
1668
+ import tempfile
1669
+
1670
+ # Security: Use secure temp file with proper permissions (0600)
1671
+ fd, manifest_file = tempfile.mkstemp(suffix=".yaml", prefix=f"acp-template-{secrets.token_hex(8)}-")
1672
+ try:
1673
+ # Write to file descriptor with secure permissions
1674
+ with os.fdopen(fd, "w") as f:
1675
+ yaml.dump(manifest, f)
1676
+
1677
+ result = await self._run_oc_command(["create", "-f", manifest_file, "-o", "json"])
1678
+
1679
+ if result.returncode != 0:
1680
+ return {
1681
+ "created": False,
1682
+ "message": f"Failed to create session: {result.stderr.decode()}",
1683
+ }
1684
+
1685
+ created_data = json.loads(result.stdout.decode())
1686
+ session_name = created_data.get("metadata", {}).get("name")
1687
+
1688
+ return {
1689
+ "created": True,
1690
+ "session": session_name,
1691
+ "message": f"Successfully created session '{session_name}' from template '{template}'",
1692
+ }
1693
+ finally:
1694
+ # Ensure cleanup even if operation fails
1695
+ try:
1696
+ os.unlink(manifest_file)
1697
+ except OSError:
1698
+ pass
1699
+
1700
+ except Exception as e:
1701
+ return {"created": False, "message": str(e)}
1702
+
1703
+ # Auth Feature: Login
1704
+ async def login(self, cluster: str, web: bool = True, token: str | None = None) -> dict[str, Any]:
1705
+ """Authenticate to OpenShift cluster.
1706
+
1707
+ Args:
1708
+ cluster: Cluster alias name or server URL
1709
+ web: Use web login flow
1710
+ token: Direct token authentication
1711
+
1712
+ Returns:
1713
+ Dict with login status
1714
+ """
1715
+ # Look up cluster in config
1716
+ server = cluster
1717
+ if cluster in self.config.get("clusters", {}):
1718
+ server = self.config["clusters"][cluster]["server"]
1719
+
1720
+ try:
1721
+ if token:
1722
+ # Token-based login
1723
+ result = await self._run_oc_command(
1724
+ ["login", "--token", token, "--server", server],
1725
+ capture_output=False,
1726
+ )
1727
+ elif web:
1728
+ # Web-based login
1729
+ result = await self._run_oc_command(
1730
+ ["login", "--web", "--server", server],
1731
+ capture_output=False,
1732
+ )
1733
+ else:
1734
+ return {
1735
+ "authenticated": False,
1736
+ "message": "Either 'web' or 'token' must be provided",
1737
+ }
1738
+
1739
+ if result.returncode != 0:
1740
+ return {
1741
+ "authenticated": False,
1742
+ "message": "Login failed",
1743
+ }
1744
+
1745
+ # Get user info after login
1746
+ whoami_result = await self.whoami()
1747
+
1748
+ return {
1749
+ "authenticated": True,
1750
+ "user": whoami_result.get("user"),
1751
+ "cluster": cluster,
1752
+ "server": server,
1753
+ "message": f"Successfully logged in to {cluster}",
1754
+ }
1755
+
1756
+ except Exception as e:
1757
+ return {"authenticated": False, "message": str(e)}
1758
+
1759
+ # Auth Feature: Switch Cluster
1760
+ async def switch_cluster(self, cluster: str) -> dict[str, Any]:
1761
+ """Switch to a different cluster context.
1762
+
1763
+ Args:
1764
+ cluster: Cluster alias name
1765
+
1766
+ Returns:
1767
+ Dict with switch status
1768
+ """
1769
+ if cluster not in self.config.get("clusters", {}):
1770
+ return {
1771
+ "switched": False,
1772
+ "message": f"Unknown cluster: {cluster}. Use acp_list_clusters to see available clusters.",
1773
+ }
1774
+
1775
+ cluster_config = self.config["clusters"][cluster]
1776
+ server = cluster_config["server"]
1777
+
1778
+ try:
1779
+ # Get current context
1780
+ current_whoami = await self.whoami()
1781
+ previous_cluster = current_whoami.get("cluster", "unknown")
1782
+
1783
+ # Switch context (assumes already authenticated)
1784
+ result = await self._run_oc_command(
1785
+ ["login", "--server", server],
1786
+ capture_output=False,
1787
+ )
1788
+
1789
+ if result.returncode != 0:
1790
+ return {
1791
+ "switched": False,
1792
+ "message": f"Failed to switch to {cluster}. You may need to login first.",
1793
+ }
1794
+
1795
+ # Get new user info
1796
+ new_whoami = await self.whoami()
1797
+
1798
+ return {
1799
+ "switched": True,
1800
+ "previous": previous_cluster,
1801
+ "current": cluster,
1802
+ "user": new_whoami.get("user"),
1803
+ "message": f"Switched from {previous_cluster} to {cluster}",
1804
+ }
1805
+
1806
+ except Exception as e:
1807
+ return {"switched": False, "message": str(e)}
1808
+
1809
+ # Auth Feature: Add Cluster
1810
+ def add_cluster(
1811
+ self,
1812
+ name: str,
1813
+ server: str,
1814
+ description: str | None = None,
1815
+ default_project: str | None = None,
1816
+ set_default: bool = False,
1817
+ ) -> dict[str, Any]:
1818
+ """Add a new cluster to configuration.
1819
+
1820
+ Args:
1821
+ name: Cluster alias name
1822
+ server: Server URL
1823
+ description: Optional description
1824
+ default_project: Optional default project
1825
+ set_default: Set as default cluster
1826
+
1827
+ Returns:
1828
+ Dict with add status
1829
+ """
1830
+ try:
1831
+ # Security: Validate inputs
1832
+ if not isinstance(name, str) or not re.match(r"^[a-z0-9]([-a-z0-9]*[a-z0-9])?$", name):
1833
+ return {"added": False, "message": "Invalid cluster name format"}
1834
+ if not isinstance(server, str) or not (server.startswith("https://") or server.startswith("http://")):
1835
+ return {"added": False, "message": "Server must be a valid HTTP/HTTPS URL"}
1836
+ if description and (not isinstance(description, str) or len(description) > 500):
1837
+ return {
1838
+ "added": False,
1839
+ "message": "Description must be a string under 500 characters",
1840
+ }
1841
+ if default_project:
1842
+ try:
1843
+ self._validate_input(default_project, "default_project")
1844
+ except ValueError as e:
1845
+ return {"added": False, "message": str(e)}
1846
+
1847
+ # Update config
1848
+ if "clusters" not in self.config:
1849
+ self.config["clusters"] = {}
1850
+
1851
+ self.config["clusters"][name] = {
1852
+ "server": server,
1853
+ "description": description or "",
1854
+ "default_project": default_project,
1855
+ }
1856
+
1857
+ if set_default:
1858
+ self.config["default_cluster"] = name
1859
+
1860
+ # Save config securely
1861
+ config_file = Path(self.config_path)
1862
+ config_file.parent.mkdir(parents=True, exist_ok=True)
1863
+
1864
+ # Security: Write with restricted permissions
1865
+ with open(config_file, "w") as f:
1866
+ yaml.dump(self.config, f)
1867
+ # Set file permissions to 0600 (owner read/write only)
1868
+ import os
1869
+
1870
+ os.chmod(config_file, 0o600)
1871
+
1872
+ return {
1873
+ "added": True,
1874
+ "cluster": {
1875
+ "name": name,
1876
+ "server": server,
1877
+ "description": description,
1878
+ "default_project": default_project,
1879
+ "is_default": set_default,
1880
+ },
1881
+ "message": f"Successfully added cluster '{name}'",
1882
+ }
1883
+
1884
+ except Exception as e:
1885
+ return {"added": False, "message": f"Failed to add cluster: {str(e)}"}