ralphx 0.2.2__py3-none-any.whl → 0.3.5__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (45) hide show
  1. ralphx/__init__.py +1 -1
  2. ralphx/api/main.py +9 -1
  3. ralphx/api/routes/auth.py +730 -65
  4. ralphx/api/routes/config.py +3 -56
  5. ralphx/api/routes/export_import.py +795 -0
  6. ralphx/api/routes/loops.py +4 -4
  7. ralphx/api/routes/planning.py +19 -5
  8. ralphx/api/routes/projects.py +84 -2
  9. ralphx/api/routes/templates.py +115 -2
  10. ralphx/api/routes/workflows.py +22 -22
  11. ralphx/cli.py +21 -6
  12. ralphx/core/auth.py +346 -171
  13. ralphx/core/database.py +615 -167
  14. ralphx/core/executor.py +0 -3
  15. ralphx/core/loop.py +15 -2
  16. ralphx/core/loop_templates.py +69 -3
  17. ralphx/core/planning_service.py +109 -21
  18. ralphx/core/preview.py +9 -25
  19. ralphx/core/project_db.py +175 -75
  20. ralphx/core/project_export.py +469 -0
  21. ralphx/core/project_import.py +670 -0
  22. ralphx/core/sample_project.py +430 -0
  23. ralphx/core/templates.py +46 -9
  24. ralphx/core/workflow_executor.py +35 -5
  25. ralphx/core/workflow_export.py +606 -0
  26. ralphx/core/workflow_import.py +1149 -0
  27. ralphx/examples/sample_project/DESIGN.md +345 -0
  28. ralphx/examples/sample_project/README.md +37 -0
  29. ralphx/examples/sample_project/guardrails.md +57 -0
  30. ralphx/examples/sample_project/stories.jsonl +10 -0
  31. ralphx/mcp/__init__.py +6 -2
  32. ralphx/mcp/registry.py +3 -3
  33. ralphx/mcp/server.py +99 -29
  34. ralphx/mcp/tools/__init__.py +4 -0
  35. ralphx/mcp/tools/help.py +204 -0
  36. ralphx/mcp/tools/workflows.py +114 -32
  37. ralphx/mcp_server.py +6 -2
  38. ralphx/static/assets/index-0ovNnfOq.css +1 -0
  39. ralphx/static/assets/index-CY9s08ZB.js +251 -0
  40. ralphx/static/assets/index-CY9s08ZB.js.map +1 -0
  41. ralphx/static/index.html +14 -0
  42. {ralphx-0.2.2.dist-info → ralphx-0.3.5.dist-info}/METADATA +34 -12
  43. {ralphx-0.2.2.dist-info → ralphx-0.3.5.dist-info}/RECORD +45 -30
  44. {ralphx-0.2.2.dist-info → ralphx-0.3.5.dist-info}/WHEEL +0 -0
  45. {ralphx-0.2.2.dist-info → ralphx-0.3.5.dist-info}/entry_points.txt +0 -0
@@ -0,0 +1,606 @@
1
+ """Workflow export functionality for RalphX.
2
+
3
+ Enables exporting entire workflows (with resources, steps, items, settings)
4
+ to a portable ZIP format that can be imported into other projects.
5
+ """
6
+
7
+ import hashlib
8
+ import io
9
+ import json
10
+ import re
11
+ import zipfile
12
+ from dataclasses import dataclass, field
13
+ from datetime import datetime
14
+ from pathlib import Path
15
+ from typing import Any, Optional
16
+
17
+ from ralphx import __version__
18
+ from ralphx.core.project_db import PROJECT_SCHEMA_VERSION, ProjectDatabase
19
+
20
+
21
+ # Export format version
22
+ EXPORT_FORMAT_VERSION = "1.0"
23
+ EXPORT_FORMAT_NAME = "ralphx-workflow-export"
24
+
25
+ # Security limits
26
+ MAX_EXPORT_SIZE_MB = 500
27
+ MAX_FILES_IN_ARCHIVE = 10000
28
+
29
+ # Patterns for detecting potential secrets
30
+ SECRET_PATTERNS = [
31
+ # API keys
32
+ (r'sk-[a-zA-Z0-9]{20,}', 'API key (sk-*)'),
33
+ (r'api[_-]?key["\']?\s*[:=]\s*["\']?[a-zA-Z0-9_-]{16,}', 'API key assignment'),
34
+ (r'key[_-]?["\']?\s*[:=]\s*["\']?[a-zA-Z0-9_-]{32,}', 'Generic key'),
35
+ # OAuth/JWT tokens
36
+ (r'eyJ[a-zA-Z0-9_-]{10,}\.[a-zA-Z0-9_-]+\.[a-zA-Z0-9_-]+', 'JWT token'),
37
+ (r'oauth[_-]?token["\']?\s*[:=]\s*["\']?[a-zA-Z0-9_-]{20,}', 'OAuth token'),
38
+ (r'bearer\s+[a-zA-Z0-9_.-]{20,}', 'Bearer token'),
39
+ # Database URIs with passwords
40
+ (r'(postgres|mysql|mongodb)://[^:]+:[^@]+@', 'Database URI with password'),
41
+ # Private keys
42
+ (r'-----BEGIN\s+(RSA\s+)?PRIVATE\s+KEY-----', 'Private key'),
43
+ (r'-----BEGIN\s+OPENSSH\s+PRIVATE\s+KEY-----', 'SSH private key'),
44
+ # Common patterns
45
+ (r'password["\']?\s*[:=]\s*["\']?[^\s"\']{8,}', 'Password assignment'),
46
+ (r'secret["\']?\s*[:=]\s*["\']?[^\s"\']{8,}', 'Secret assignment'),
47
+ (r'token["\']?\s*[:=]\s*["\']?[a-zA-Z0-9_-]{20,}', 'Token assignment'),
48
+ # AWS
49
+ (r'AKIA[0-9A-Z]{16}', 'AWS access key'),
50
+ (r'aws[_-]?secret[_-]?access[_-]?key', 'AWS secret key reference'),
51
+ ]
52
+
53
+
54
+ @dataclass
55
+ class SecretMatch:
56
+ """A potential secret found during scanning."""
57
+ pattern_name: str
58
+ location: str # e.g., "workflow_resources.Design Doc"
59
+ snippet: str # Redacted snippet showing context
60
+
61
+
62
+ @dataclass
63
+ class ExportPreview:
64
+ """Preview of what will be exported."""
65
+ workflow_name: str
66
+ workflow_id: str
67
+ steps_count: int
68
+ items_total: int
69
+ items_by_step: dict[int, int] # step_id -> count
70
+ resources_count: int
71
+ has_planning_session: bool
72
+ runs_count: int
73
+ estimated_size_bytes: int
74
+ potential_secrets: list[SecretMatch] = field(default_factory=list)
75
+ warnings: list[str] = field(default_factory=list)
76
+
77
+
78
+ @dataclass
79
+ class ExportOptions:
80
+ """Options for export operation."""
81
+ include_runs: bool = False
82
+ include_planning: bool = True
83
+ include_planning_messages: bool = False # Only artifacts by default
84
+ include_step_artifacts: bool = False # Step outputs (not needed for fresh runs)
85
+ strip_secrets: bool = True # Strip potential secrets by default for safety
86
+ as_template: bool = False
87
+
88
+
89
+ class WorkflowExporter:
90
+ """Exports workflows to portable ZIP archives.
91
+
92
+ Export format:
93
+ - manifest.json: Metadata, version, contents summary
94
+ - workflow.json: Workflow definition + steps
95
+ - items.jsonl: All work items (JSONL format)
96
+ - resources/: Workflow resources
97
+ - step-resources/: Step-level resource overrides
98
+ - planning/: Planning session (optional)
99
+ - runs/: Execution history (optional)
100
+ """
101
+
102
+ def __init__(self, project_db: ProjectDatabase):
103
+ """Initialize exporter.
104
+
105
+ Args:
106
+ project_db: ProjectDatabase instance for the project.
107
+ """
108
+ self.db = project_db
109
+
110
+ def get_preview(self, workflow_id: str) -> ExportPreview:
111
+ """Get a preview of what will be exported.
112
+
113
+ Args:
114
+ workflow_id: ID of the workflow to export.
115
+
116
+ Returns:
117
+ ExportPreview with counts and size estimates.
118
+
119
+ Raises:
120
+ ValueError: If workflow not found.
121
+ """
122
+ workflow = self.db.get_workflow(workflow_id)
123
+ if not workflow:
124
+ raise ValueError(f"Workflow '{workflow_id}' not found")
125
+
126
+ steps = self.db.list_workflow_steps(workflow_id)
127
+ resources = self.db.list_workflow_resources(workflow_id)
128
+ items, total_items = self.db.list_work_items(workflow_id=workflow_id, limit=100000)
129
+ runs = self.db.list_runs(workflow_id=workflow_id)
130
+
131
+ # Get planning session if exists
132
+ planning_sessions = self._get_planning_sessions(workflow_id)
133
+
134
+ # Check for truncation warning
135
+ export_warnings: list[str] = []
136
+ if total_items > 100000:
137
+ export_warnings.append(
138
+ f"Workflow has {total_items} items but export is limited to 100,000. "
139
+ f"{total_items - 100000} items will be truncated."
140
+ )
141
+
142
+ # Count items by step
143
+ items_by_step: dict[int, int] = {}
144
+ for item in items:
145
+ step_id = item.get('source_step_id', 0)
146
+ items_by_step[step_id] = items_by_step.get(step_id, 0) + 1
147
+
148
+ # Estimate size (rough calculation)
149
+ estimated_size = self._estimate_export_size(
150
+ workflow, steps, items, resources, planning_sessions, runs
151
+ )
152
+
153
+ # Scan for potential secrets
154
+ potential_secrets = self._scan_for_secrets(workflow, resources, items)
155
+
156
+ return ExportPreview(
157
+ workflow_name=workflow['name'],
158
+ workflow_id=workflow['id'],
159
+ steps_count=len(steps),
160
+ items_total=total_items, # Show real count, not truncated count
161
+ items_by_step=items_by_step,
162
+ resources_count=len(resources),
163
+ has_planning_session=len(planning_sessions) > 0,
164
+ runs_count=len(runs),
165
+ estimated_size_bytes=estimated_size,
166
+ potential_secrets=potential_secrets,
167
+ warnings=export_warnings,
168
+ )
169
+
170
+ def export_workflow(
171
+ self,
172
+ workflow_id: str,
173
+ options: Optional[ExportOptions] = None,
174
+ ) -> tuple[bytes, str]:
175
+ """Export a workflow to ZIP archive.
176
+
177
+ Args:
178
+ workflow_id: ID of the workflow to export.
179
+ options: Export options.
180
+
181
+ Returns:
182
+ Tuple of (zip_bytes, filename).
183
+
184
+ Raises:
185
+ ValueError: If workflow not found or export fails validation.
186
+ """
187
+ if options is None:
188
+ options = ExportOptions()
189
+
190
+ workflow = self.db.get_workflow(workflow_id)
191
+ if not workflow:
192
+ raise ValueError(f"Workflow '{workflow_id}' not found")
193
+
194
+ # Collect all data
195
+ steps = self.db.list_workflow_steps(workflow_id)
196
+ resources = self.db.list_workflow_resources(workflow_id)
197
+ items, _ = self.db.list_work_items(workflow_id=workflow_id, limit=100000)
198
+ step_resources = self._get_all_step_resources(workflow_id, steps)
199
+ planning_sessions = self._get_planning_sessions(workflow_id) if options.include_planning else []
200
+ runs = self.db.list_runs(workflow_id=workflow_id) if options.include_runs else []
201
+
202
+ # Scan for secrets if not stripping
203
+ potential_secrets = []
204
+ if not options.strip_secrets:
205
+ potential_secrets = self._scan_for_secrets(workflow, resources, items)
206
+
207
+ # Build manifest
208
+ manifest = self._build_manifest(
209
+ workflow, steps, items, resources, planning_sessions, runs,
210
+ potential_secrets, options
211
+ )
212
+
213
+ # Create ZIP archive
214
+ zip_buffer = io.BytesIO()
215
+ with zipfile.ZipFile(zip_buffer, 'w', zipfile.ZIP_DEFLATED) as zf:
216
+ # Write manifest
217
+ zf.writestr('manifest.json', json.dumps(manifest, indent=2, default=str))
218
+
219
+ # Write workflow definition
220
+ workflow_data = self._serialize_workflow(workflow, steps, options)
221
+ zf.writestr('workflow.json', json.dumps(workflow_data, indent=2, default=str))
222
+
223
+ # Write items as JSONL
224
+ items_content = self._serialize_items_jsonl(items, options)
225
+ zf.writestr('items.jsonl', items_content)
226
+
227
+ # Write resources
228
+ resources_data = self._serialize_resources(resources, options)
229
+ zf.writestr('resources/resources.json', json.dumps(resources_data, indent=2, default=str))
230
+
231
+ # Write step resources
232
+ if step_resources:
233
+ step_resources_data = self._serialize_step_resources(step_resources, options)
234
+ zf.writestr('step-resources/step-resources.json', json.dumps(step_resources_data, indent=2, default=str))
235
+
236
+ # Write planning sessions
237
+ if planning_sessions and options.include_planning:
238
+ planning_data = self._serialize_planning_sessions(
239
+ planning_sessions, options.include_planning_messages
240
+ )
241
+ zf.writestr('planning/session.json', json.dumps(planning_data, indent=2, default=str))
242
+
243
+ # Write runs
244
+ if runs and options.include_runs:
245
+ runs_data = self._serialize_runs(runs)
246
+ zf.writestr('runs/runs.json', json.dumps(runs_data, indent=2, default=str))
247
+
248
+ # Generate filename
249
+ timestamp = datetime.utcnow().strftime('%Y%m%d-%H%M%S')
250
+ filename = f"workflow-{workflow['id']}-{timestamp}.ralphx.zip"
251
+
252
+ zip_bytes = zip_buffer.getvalue()
253
+
254
+ # Validate size
255
+ if len(zip_bytes) > MAX_EXPORT_SIZE_MB * 1024 * 1024:
256
+ raise ValueError(f"Export exceeds maximum size of {MAX_EXPORT_SIZE_MB}MB")
257
+
258
+ return zip_bytes, filename
259
+
260
+ def _get_planning_sessions(self, workflow_id: str) -> list[dict]:
261
+ """Get planning sessions for a workflow."""
262
+ try:
263
+ return self.db.list_planning_sessions(workflow_id=workflow_id)
264
+ except Exception:
265
+ return []
266
+
267
+ def _get_all_step_resources(self, workflow_id: str, steps: list[dict]) -> dict[int, list[dict]]:
268
+ """Get step resources for all steps."""
269
+ result: dict[int, list[dict]] = {}
270
+ for step in steps:
271
+ step_id = step['id']
272
+ try:
273
+ resources = self.db.list_step_resources(step_id)
274
+ if resources:
275
+ result[step_id] = resources
276
+ except Exception:
277
+ pass
278
+ return result
279
+
280
+ def _estimate_export_size(
281
+ self,
282
+ workflow: dict,
283
+ steps: list[dict],
284
+ items: list[dict],
285
+ resources: list[dict],
286
+ planning_sessions: list[dict],
287
+ runs: list[dict],
288
+ ) -> int:
289
+ """Estimate the export size in bytes."""
290
+ size = 0
291
+
292
+ # Workflow + steps JSON
293
+ size += len(json.dumps(workflow, default=str))
294
+ size += sum(len(json.dumps(s, default=str)) for s in steps)
295
+
296
+ # Items JSONL
297
+ size += sum(len(json.dumps(item, default=str)) for item in items)
298
+
299
+ # Resources
300
+ for r in resources:
301
+ size += len(json.dumps(r, default=str))
302
+ if r.get('content'):
303
+ size += len(r['content'])
304
+
305
+ # Planning sessions
306
+ for ps in planning_sessions:
307
+ size += len(json.dumps(ps, default=str))
308
+
309
+ # Runs
310
+ size += sum(len(json.dumps(r, default=str)) for r in runs)
311
+
312
+ # Add overhead for ZIP compression (estimate 60% compression)
313
+ return int(size * 0.6)
314
+
315
+ def _scan_for_secrets(
316
+ self,
317
+ workflow: dict,
318
+ resources: list[dict],
319
+ items: list[dict],
320
+ ) -> list[SecretMatch]:
321
+ """Scan for potential secrets in exportable content."""
322
+ matches: list[SecretMatch] = []
323
+
324
+ # Compile patterns
325
+ compiled_patterns = [(re.compile(p, re.IGNORECASE), name) for p, name in SECRET_PATTERNS]
326
+
327
+ def scan_text(text: str, location: str) -> None:
328
+ if not text:
329
+ return
330
+ for pattern, name in compiled_patterns:
331
+ for match in pattern.finditer(text):
332
+ # Create redacted snippet
333
+ start = max(0, match.start() - 10)
334
+ end = min(len(text), match.end() + 10)
335
+ snippet = text[start:end]
336
+ # Redact the actual match
337
+ redacted = snippet[:match.start()-start] + '[REDACTED]' + snippet[match.end()-start:]
338
+ matches.append(SecretMatch(
339
+ pattern_name=name,
340
+ location=location,
341
+ snippet=redacted[:50] + '...' if len(redacted) > 50 else redacted,
342
+ ))
343
+
344
+ # Scan workflow name (unlikely but check)
345
+ scan_text(workflow.get('name', ''), 'workflow.name')
346
+
347
+ # Scan resources
348
+ for r in resources:
349
+ location = f"resources.{r.get('name', 'unknown')}"
350
+ scan_text(r.get('content', ''), location)
351
+ scan_text(r.get('name', ''), location + '.name')
352
+
353
+ # Scan items
354
+ for item in items:
355
+ location = f"items.{item.get('id', 'unknown')}"
356
+ scan_text(item.get('content', ''), location)
357
+ scan_text(item.get('title', ''), location + '.title')
358
+ metadata = item.get('metadata')
359
+ if metadata:
360
+ scan_text(json.dumps(metadata, default=str), location + '.metadata')
361
+
362
+ return matches
363
+
364
+ def _build_manifest(
365
+ self,
366
+ workflow: dict,
367
+ steps: list[dict],
368
+ items: list[dict],
369
+ resources: list[dict],
370
+ planning_sessions: list[dict],
371
+ runs: list[dict],
372
+ potential_secrets: list[SecretMatch],
373
+ options: ExportOptions,
374
+ ) -> dict:
375
+ """Build the manifest.json content."""
376
+ return {
377
+ 'version': EXPORT_FORMAT_VERSION,
378
+ 'format': EXPORT_FORMAT_NAME,
379
+ 'exported_at': datetime.utcnow().isoformat() + 'Z',
380
+ 'ralphx_version': __version__,
381
+ 'schema_version': PROJECT_SCHEMA_VERSION,
382
+ 'workflow': {
383
+ 'id': workflow['id'],
384
+ 'name': workflow['name'],
385
+ 'template_id': workflow.get('template_id'),
386
+ },
387
+ 'contents': {
388
+ 'steps': len(steps),
389
+ 'items_total': len(items),
390
+ 'resources': len(resources),
391
+ 'has_planning_session': len(planning_sessions) > 0,
392
+ 'has_runs': len(runs) > 0,
393
+ },
394
+ 'template_metadata': {
395
+ 'is_template': options.as_template,
396
+ 'template_id': None,
397
+ 'template_version': None,
398
+ 'template_source': None,
399
+ },
400
+ 'security': {
401
+ 'potential_secrets_detected': len(potential_secrets) > 0,
402
+ 'secrets_stripped': options.strip_secrets,
403
+ 'paths_sanitized': True,
404
+ },
405
+ 'export_options': {
406
+ 'include_runs': options.include_runs,
407
+ 'include_planning': options.include_planning,
408
+ 'include_planning_messages': options.include_planning_messages,
409
+ 'include_step_artifacts': options.include_step_artifacts,
410
+ },
411
+ }
412
+
413
+ def _serialize_workflow(
414
+ self, workflow: dict, steps: list[dict], options: Optional[ExportOptions] = None
415
+ ) -> dict:
416
+ """Serialize workflow and steps for export.
417
+
418
+ Args:
419
+ workflow: The workflow dict.
420
+ steps: List of step dicts.
421
+ options: Export options (controls whether artifacts are included).
422
+ """
423
+ if options is None:
424
+ options = ExportOptions()
425
+
426
+ serialized_steps = []
427
+ for s in steps:
428
+ step_data = {
429
+ 'id': s['id'],
430
+ 'workflow_id': s['workflow_id'],
431
+ 'step_number': s['step_number'],
432
+ 'name': s['name'],
433
+ 'step_type': s['step_type'],
434
+ 'status': 'pending', # Reset status
435
+ 'config': s.get('config'),
436
+ 'loop_name': s.get('loop_name'),
437
+ }
438
+ # Only include artifacts if explicitly requested (off by default)
439
+ if options.include_step_artifacts:
440
+ step_data['artifacts'] = s.get('artifacts')
441
+ serialized_steps.append(step_data)
442
+
443
+ return {
444
+ 'workflow': {
445
+ 'id': workflow['id'],
446
+ 'template_id': workflow.get('template_id'),
447
+ 'name': workflow['name'],
448
+ 'status': 'draft', # Reset status on export
449
+ 'current_step': 1, # Reset to beginning
450
+ 'created_at': workflow.get('created_at'),
451
+ 'updated_at': workflow.get('updated_at'),
452
+ },
453
+ 'steps': serialized_steps,
454
+ }
455
+
456
+ def _serialize_items_jsonl(self, items: list[dict], options: ExportOptions) -> str:
457
+ """Serialize items to JSONL format."""
458
+ lines = []
459
+ for item in items:
460
+ item_data = {
461
+ 'id': item['id'],
462
+ 'workflow_id': item['workflow_id'],
463
+ 'source_step_id': item.get('source_step_id'),
464
+ 'content': item.get('content', ''),
465
+ 'title': item.get('title'),
466
+ 'priority': item.get('priority'),
467
+ 'status': 'pending', # Reset status on export
468
+ 'category': item.get('category'),
469
+ 'tags': item.get('tags'),
470
+ 'metadata': item.get('metadata'),
471
+ 'item_type': item.get('item_type'),
472
+ 'dependencies': item.get('dependencies'),
473
+ 'phase': item.get('phase'),
474
+ 'duplicate_of': item.get('duplicate_of'),
475
+ 'created_at': item.get('created_at'),
476
+ }
477
+
478
+ # Strip secrets if requested
479
+ if options.strip_secrets:
480
+ item_data['content'] = self._strip_secrets(item_data.get('content', ''))
481
+ item_data['title'] = self._strip_secrets(item_data.get('title', ''))
482
+
483
+ lines.append(json.dumps(item_data, default=str))
484
+
485
+ return '\n'.join(lines)
486
+
487
+ def _serialize_resources(self, resources: list[dict], options: ExportOptions) -> list[dict]:
488
+ """Serialize workflow resources."""
489
+ result = []
490
+ for r in resources:
491
+ content = r.get('content', '')
492
+
493
+ # If content is empty but file_path exists, read the file
494
+ if not content and r.get('file_path'):
495
+ try:
496
+ file_path = Path(r['file_path'])
497
+ if file_path.exists() and file_path.is_file():
498
+ content = file_path.read_text(encoding='utf-8')
499
+ except Exception:
500
+ # If file read fails, leave content empty
501
+ pass
502
+
503
+ if options.strip_secrets:
504
+ content = self._strip_secrets(content)
505
+
506
+ result.append({
507
+ 'id': r['id'],
508
+ 'workflow_id': r['workflow_id'],
509
+ 'resource_type': r['resource_type'],
510
+ 'name': r['name'],
511
+ 'content': content,
512
+ 'file_path': None, # Don't export file paths, inline content instead
513
+ 'source': r.get('source'),
514
+ 'enabled': r.get('enabled', True),
515
+ })
516
+ return result
517
+
518
+ def _serialize_step_resources(
519
+ self,
520
+ step_resources: dict[int, list[dict]],
521
+ options: ExportOptions,
522
+ ) -> dict:
523
+ """Serialize step-level resources."""
524
+ result: dict[str, list[dict]] = {}
525
+ for step_id, resources in step_resources.items():
526
+ serialized = []
527
+ for r in resources:
528
+ content = r.get('content', '')
529
+
530
+ # If content is empty but file_path exists, read the file
531
+ if not content and r.get('file_path'):
532
+ try:
533
+ file_path = Path(r['file_path'])
534
+ if file_path.exists() and file_path.is_file():
535
+ content = file_path.read_text(encoding='utf-8')
536
+ except Exception:
537
+ # If file read fails, leave content empty
538
+ pass
539
+
540
+ if options.strip_secrets:
541
+ content = self._strip_secrets(content)
542
+
543
+ serialized.append({
544
+ 'id': r['id'],
545
+ 'step_id': r['step_id'],
546
+ 'workflow_resource_id': r.get('workflow_resource_id'),
547
+ 'resource_type': r.get('resource_type'),
548
+ 'name': r.get('name'),
549
+ 'content': content,
550
+ 'file_path': None,
551
+ 'mode': r.get('mode'),
552
+ 'enabled': r.get('enabled', True),
553
+ 'priority': r.get('priority', 0),
554
+ })
555
+ result[str(step_id)] = serialized
556
+ return result
557
+
558
+ def _serialize_planning_sessions(
559
+ self,
560
+ sessions: list[dict],
561
+ include_messages: bool,
562
+ ) -> list[dict]:
563
+ """Serialize planning sessions."""
564
+ result = []
565
+ for s in sessions:
566
+ data = {
567
+ 'id': s['id'],
568
+ 'workflow_id': s['workflow_id'],
569
+ 'step_id': s['step_id'],
570
+ 'artifacts': s.get('artifacts'),
571
+ 'status': s.get('status'),
572
+ 'created_at': s.get('created_at'),
573
+ }
574
+ if include_messages:
575
+ data['messages'] = s.get('messages', [])
576
+ result.append(data)
577
+ return result
578
+
579
+ def _serialize_runs(self, runs: list[dict]) -> list[dict]:
580
+ """Serialize run history."""
581
+ return [
582
+ {
583
+ 'id': r['id'],
584
+ 'loop_name': r['loop_name'],
585
+ 'status': r['status'],
586
+ 'workflow_id': r['workflow_id'],
587
+ 'step_id': r['step_id'],
588
+ 'started_at': r.get('started_at'),
589
+ 'completed_at': r.get('completed_at'),
590
+ 'iterations_completed': r.get('iterations_completed', 0),
591
+ 'items_generated': r.get('items_generated', 0),
592
+ 'error_message': r.get('error_message'),
593
+ }
594
+ for r in runs
595
+ ]
596
+
597
+ def _strip_secrets(self, text: Optional[str]) -> str:
598
+ """Strip potential secrets from text."""
599
+ if not text:
600
+ return ''
601
+
602
+ result = text
603
+ for pattern, _ in SECRET_PATTERNS:
604
+ result = re.sub(pattern, '[REDACTED]', result, flags=re.IGNORECASE)
605
+
606
+ return result