ralphx 0.2.2__py3-none-any.whl → 0.3.5__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- ralphx/__init__.py +1 -1
- ralphx/api/main.py +9 -1
- ralphx/api/routes/auth.py +730 -65
- ralphx/api/routes/config.py +3 -56
- ralphx/api/routes/export_import.py +795 -0
- ralphx/api/routes/loops.py +4 -4
- ralphx/api/routes/planning.py +19 -5
- ralphx/api/routes/projects.py +84 -2
- ralphx/api/routes/templates.py +115 -2
- ralphx/api/routes/workflows.py +22 -22
- ralphx/cli.py +21 -6
- ralphx/core/auth.py +346 -171
- ralphx/core/database.py +615 -167
- ralphx/core/executor.py +0 -3
- ralphx/core/loop.py +15 -2
- ralphx/core/loop_templates.py +69 -3
- ralphx/core/planning_service.py +109 -21
- ralphx/core/preview.py +9 -25
- ralphx/core/project_db.py +175 -75
- ralphx/core/project_export.py +469 -0
- ralphx/core/project_import.py +670 -0
- ralphx/core/sample_project.py +430 -0
- ralphx/core/templates.py +46 -9
- ralphx/core/workflow_executor.py +35 -5
- ralphx/core/workflow_export.py +606 -0
- ralphx/core/workflow_import.py +1149 -0
- ralphx/examples/sample_project/DESIGN.md +345 -0
- ralphx/examples/sample_project/README.md +37 -0
- ralphx/examples/sample_project/guardrails.md +57 -0
- ralphx/examples/sample_project/stories.jsonl +10 -0
- ralphx/mcp/__init__.py +6 -2
- ralphx/mcp/registry.py +3 -3
- ralphx/mcp/server.py +99 -29
- ralphx/mcp/tools/__init__.py +4 -0
- ralphx/mcp/tools/help.py +204 -0
- ralphx/mcp/tools/workflows.py +114 -32
- ralphx/mcp_server.py +6 -2
- ralphx/static/assets/index-0ovNnfOq.css +1 -0
- ralphx/static/assets/index-CY9s08ZB.js +251 -0
- ralphx/static/assets/index-CY9s08ZB.js.map +1 -0
- ralphx/static/index.html +14 -0
- {ralphx-0.2.2.dist-info → ralphx-0.3.5.dist-info}/METADATA +34 -12
- {ralphx-0.2.2.dist-info → ralphx-0.3.5.dist-info}/RECORD +45 -30
- {ralphx-0.2.2.dist-info → ralphx-0.3.5.dist-info}/WHEEL +0 -0
- {ralphx-0.2.2.dist-info → ralphx-0.3.5.dist-info}/entry_points.txt +0 -0
|
@@ -0,0 +1,469 @@
|
|
|
1
|
+
"""Project export functionality for RalphX.
|
|
2
|
+
|
|
3
|
+
Enables exporting entire projects with multiple workflows to a portable ZIP format.
|
|
4
|
+
"""
|
|
5
|
+
|
|
6
|
+
import io
|
|
7
|
+
import json
|
|
8
|
+
import zipfile
|
|
9
|
+
from dataclasses import dataclass, field
|
|
10
|
+
from datetime import datetime
|
|
11
|
+
from pathlib import Path
|
|
12
|
+
from typing import Optional
|
|
13
|
+
|
|
14
|
+
from ralphx import __version__
|
|
15
|
+
from ralphx.core.project_db import PROJECT_SCHEMA_VERSION, ProjectDatabase
|
|
16
|
+
from ralphx.core.workflow_export import (
|
|
17
|
+
EXPORT_FORMAT_VERSION,
|
|
18
|
+
ExportOptions,
|
|
19
|
+
SecretMatch,
|
|
20
|
+
SECRET_PATTERNS,
|
|
21
|
+
WorkflowExporter,
|
|
22
|
+
)
|
|
23
|
+
|
|
24
|
+
|
|
25
|
+
# Export format name for project exports
|
|
26
|
+
PROJECT_EXPORT_FORMAT_NAME = "ralphx-project-export"
|
|
27
|
+
|
|
28
|
+
# Security limits
|
|
29
|
+
MAX_EXPORT_SIZE_MB = 500
|
|
30
|
+
|
|
31
|
+
|
|
32
|
+
@dataclass
|
|
33
|
+
class WorkflowSummary:
|
|
34
|
+
"""Summary of a workflow in the project."""
|
|
35
|
+
id: str
|
|
36
|
+
name: str
|
|
37
|
+
steps_count: int
|
|
38
|
+
items_count: int
|
|
39
|
+
resources_count: int
|
|
40
|
+
|
|
41
|
+
|
|
42
|
+
@dataclass
|
|
43
|
+
class ProjectExportPreview:
|
|
44
|
+
"""Preview of what will be exported from a project."""
|
|
45
|
+
project_name: str
|
|
46
|
+
project_slug: str
|
|
47
|
+
workflows: list[WorkflowSummary]
|
|
48
|
+
total_items: int
|
|
49
|
+
total_resources: int
|
|
50
|
+
estimated_size_bytes: int
|
|
51
|
+
potential_secrets: list[SecretMatch] = field(default_factory=list)
|
|
52
|
+
|
|
53
|
+
|
|
54
|
+
@dataclass
|
|
55
|
+
class ProjectExportOptions:
|
|
56
|
+
"""Options for project export operation."""
|
|
57
|
+
workflow_ids: Optional[list[str]] = None # None = all workflows
|
|
58
|
+
include_runs: bool = False
|
|
59
|
+
include_planning: bool = True
|
|
60
|
+
include_planning_messages: bool = False
|
|
61
|
+
include_step_artifacts: bool = False # Step outputs (not needed for fresh runs)
|
|
62
|
+
strip_secrets: bool = True # Strip potential secrets by default for safety
|
|
63
|
+
include_project_resources: bool = True
|
|
64
|
+
|
|
65
|
+
|
|
66
|
+
def _strip_secrets(text: Optional[str]) -> str:
|
|
67
|
+
"""Strip potential secrets from text using the shared secret patterns."""
|
|
68
|
+
import re
|
|
69
|
+
if not text:
|
|
70
|
+
return ''
|
|
71
|
+
result = text
|
|
72
|
+
for pattern, _ in SECRET_PATTERNS:
|
|
73
|
+
result = re.sub(pattern, '[REDACTED]', result, flags=re.IGNORECASE)
|
|
74
|
+
return result
|
|
75
|
+
|
|
76
|
+
|
|
77
|
+
class ProjectExporter:
|
|
78
|
+
"""Exports projects with multiple workflows to ZIP archives.
|
|
79
|
+
|
|
80
|
+
Export format:
|
|
81
|
+
- manifest.json: Project metadata, workflow list
|
|
82
|
+
- project.json: Project settings and resources
|
|
83
|
+
- workflows/
|
|
84
|
+
- workflow-1/
|
|
85
|
+
- workflow.json
|
|
86
|
+
- items.jsonl
|
|
87
|
+
- resources/
|
|
88
|
+
- workflow-2/
|
|
89
|
+
- ...
|
|
90
|
+
- shared-resources/: Project-level resources
|
|
91
|
+
"""
|
|
92
|
+
|
|
93
|
+
def __init__(self, project_db: ProjectDatabase, project_info: dict):
|
|
94
|
+
"""Initialize exporter.
|
|
95
|
+
|
|
96
|
+
Args:
|
|
97
|
+
project_db: ProjectDatabase instance for the project.
|
|
98
|
+
project_info: Project metadata dict (name, slug, path, etc.).
|
|
99
|
+
"""
|
|
100
|
+
self.db = project_db
|
|
101
|
+
self.project_info = project_info
|
|
102
|
+
self.workflow_exporter = WorkflowExporter(project_db)
|
|
103
|
+
|
|
104
|
+
def get_preview(
|
|
105
|
+
self,
|
|
106
|
+
options: Optional[ProjectExportOptions] = None,
|
|
107
|
+
) -> ProjectExportPreview:
|
|
108
|
+
"""Get a preview of what will be exported.
|
|
109
|
+
|
|
110
|
+
Args:
|
|
111
|
+
options: Export options (workflow_ids filter, etc.).
|
|
112
|
+
|
|
113
|
+
Returns:
|
|
114
|
+
ProjectExportPreview with workflow list and totals.
|
|
115
|
+
"""
|
|
116
|
+
if options is None:
|
|
117
|
+
options = ProjectExportOptions()
|
|
118
|
+
|
|
119
|
+
# Get all workflows
|
|
120
|
+
all_workflows = self.db.list_workflows()
|
|
121
|
+
|
|
122
|
+
# Filter by selected IDs if specified
|
|
123
|
+
if options.workflow_ids is not None:
|
|
124
|
+
workflows = [w for w in all_workflows if w['id'] in options.workflow_ids]
|
|
125
|
+
else:
|
|
126
|
+
workflows = all_workflows
|
|
127
|
+
|
|
128
|
+
# Build workflow summaries
|
|
129
|
+
summaries = []
|
|
130
|
+
total_items = 0
|
|
131
|
+
total_resources = 0
|
|
132
|
+
all_secrets: list[SecretMatch] = []
|
|
133
|
+
|
|
134
|
+
for wf in workflows:
|
|
135
|
+
wf_preview = self.workflow_exporter.get_preview(wf['id'])
|
|
136
|
+
summaries.append(WorkflowSummary(
|
|
137
|
+
id=wf['id'],
|
|
138
|
+
name=wf['name'],
|
|
139
|
+
steps_count=wf_preview.steps_count,
|
|
140
|
+
items_count=wf_preview.items_total,
|
|
141
|
+
resources_count=wf_preview.resources_count,
|
|
142
|
+
))
|
|
143
|
+
total_items += wf_preview.items_total
|
|
144
|
+
total_resources += wf_preview.resources_count
|
|
145
|
+
all_secrets.extend(wf_preview.potential_secrets)
|
|
146
|
+
|
|
147
|
+
# Get project resources
|
|
148
|
+
try:
|
|
149
|
+
project_resources = self.db.list_project_resources()
|
|
150
|
+
total_resources += len(project_resources)
|
|
151
|
+
except Exception:
|
|
152
|
+
project_resources = []
|
|
153
|
+
|
|
154
|
+
# Estimate size
|
|
155
|
+
estimated_size = self._estimate_export_size(workflows, total_items, total_resources)
|
|
156
|
+
|
|
157
|
+
return ProjectExportPreview(
|
|
158
|
+
project_name=self.project_info.get('name', 'Unknown'),
|
|
159
|
+
project_slug=self.project_info.get('slug', 'unknown'),
|
|
160
|
+
workflows=summaries,
|
|
161
|
+
total_items=total_items,
|
|
162
|
+
total_resources=total_resources,
|
|
163
|
+
estimated_size_bytes=estimated_size,
|
|
164
|
+
potential_secrets=all_secrets,
|
|
165
|
+
)
|
|
166
|
+
|
|
167
|
+
def export_project(
|
|
168
|
+
self,
|
|
169
|
+
options: Optional[ProjectExportOptions] = None,
|
|
170
|
+
) -> tuple[bytes, str]:
|
|
171
|
+
"""Export project to ZIP archive.
|
|
172
|
+
|
|
173
|
+
Args:
|
|
174
|
+
options: Export options.
|
|
175
|
+
|
|
176
|
+
Returns:
|
|
177
|
+
Tuple of (zip_bytes, filename).
|
|
178
|
+
|
|
179
|
+
Raises:
|
|
180
|
+
ValueError: If export fails validation.
|
|
181
|
+
"""
|
|
182
|
+
if options is None:
|
|
183
|
+
options = ProjectExportOptions()
|
|
184
|
+
|
|
185
|
+
# Get workflows to export
|
|
186
|
+
all_workflows = self.db.list_workflows()
|
|
187
|
+
if options.workflow_ids is not None:
|
|
188
|
+
workflows = [w for w in all_workflows if w['id'] in options.workflow_ids]
|
|
189
|
+
else:
|
|
190
|
+
workflows = all_workflows
|
|
191
|
+
|
|
192
|
+
# Get project resources
|
|
193
|
+
project_resources = []
|
|
194
|
+
if options.include_project_resources:
|
|
195
|
+
try:
|
|
196
|
+
project_resources = self.db.list_project_resources()
|
|
197
|
+
except Exception:
|
|
198
|
+
pass
|
|
199
|
+
|
|
200
|
+
# Build manifest
|
|
201
|
+
manifest = self._build_manifest(workflows, project_resources, options)
|
|
202
|
+
|
|
203
|
+
# Create ZIP archive
|
|
204
|
+
zip_buffer = io.BytesIO()
|
|
205
|
+
with zipfile.ZipFile(zip_buffer, 'w', zipfile.ZIP_DEFLATED) as zf:
|
|
206
|
+
# Write manifest
|
|
207
|
+
zf.writestr('manifest.json', json.dumps(manifest, indent=2, default=str))
|
|
208
|
+
|
|
209
|
+
# Write project info
|
|
210
|
+
project_data = {
|
|
211
|
+
'name': self.project_info.get('name'),
|
|
212
|
+
'slug': self.project_info.get('slug'),
|
|
213
|
+
'path': None, # Don't export path
|
|
214
|
+
'created_at': self.project_info.get('created_at'),
|
|
215
|
+
}
|
|
216
|
+
zf.writestr('project.json', json.dumps(project_data, indent=2, default=str))
|
|
217
|
+
|
|
218
|
+
# Write project resources
|
|
219
|
+
if project_resources:
|
|
220
|
+
resources_data = self._serialize_project_resources(project_resources, options)
|
|
221
|
+
zf.writestr('shared-resources/resources.json', json.dumps(resources_data, indent=2, default=str))
|
|
222
|
+
|
|
223
|
+
# Write each workflow
|
|
224
|
+
workflow_export_options = ExportOptions(
|
|
225
|
+
include_runs=options.include_runs,
|
|
226
|
+
include_planning=options.include_planning,
|
|
227
|
+
include_planning_messages=options.include_planning_messages,
|
|
228
|
+
strip_secrets=options.strip_secrets,
|
|
229
|
+
)
|
|
230
|
+
|
|
231
|
+
for wf in workflows:
|
|
232
|
+
wf_prefix = f"workflows/{wf['id']}/"
|
|
233
|
+
|
|
234
|
+
# Get workflow data
|
|
235
|
+
steps = self.db.list_workflow_steps(wf['id'])
|
|
236
|
+
resources = self.db.list_workflow_resources(wf['id'])
|
|
237
|
+
items, _ = self.db.list_work_items(workflow_id=wf['id'], limit=100000)
|
|
238
|
+
|
|
239
|
+
# Write workflow.json
|
|
240
|
+
serialized_steps = []
|
|
241
|
+
for s in steps:
|
|
242
|
+
step_data = {
|
|
243
|
+
'id': s['id'],
|
|
244
|
+
'workflow_id': s['workflow_id'],
|
|
245
|
+
'step_number': s['step_number'],
|
|
246
|
+
'name': s['name'],
|
|
247
|
+
'step_type': s['step_type'],
|
|
248
|
+
'status': 'pending',
|
|
249
|
+
'config': s.get('config'),
|
|
250
|
+
'loop_name': s.get('loop_name'),
|
|
251
|
+
}
|
|
252
|
+
# Only include artifacts if explicitly requested (off by default)
|
|
253
|
+
if options.include_step_artifacts:
|
|
254
|
+
step_data['artifacts'] = s.get('artifacts')
|
|
255
|
+
serialized_steps.append(step_data)
|
|
256
|
+
|
|
257
|
+
workflow_data = {
|
|
258
|
+
'workflow': {
|
|
259
|
+
'id': wf['id'],
|
|
260
|
+
'template_id': wf.get('template_id'),
|
|
261
|
+
'name': wf['name'],
|
|
262
|
+
'status': 'draft',
|
|
263
|
+
'current_step': 1,
|
|
264
|
+
'created_at': wf.get('created_at'),
|
|
265
|
+
'updated_at': wf.get('updated_at'),
|
|
266
|
+
},
|
|
267
|
+
'steps': serialized_steps,
|
|
268
|
+
}
|
|
269
|
+
zf.writestr(
|
|
270
|
+
wf_prefix + 'workflow.json',
|
|
271
|
+
json.dumps(workflow_data, indent=2, default=str),
|
|
272
|
+
)
|
|
273
|
+
|
|
274
|
+
# Write items.jsonl
|
|
275
|
+
items_lines = []
|
|
276
|
+
for item in items:
|
|
277
|
+
content = item.get('content', '')
|
|
278
|
+
title = item.get('title')
|
|
279
|
+
# Apply secret stripping if enabled
|
|
280
|
+
if options.strip_secrets:
|
|
281
|
+
content = _strip_secrets(content)
|
|
282
|
+
title = _strip_secrets(title) if title else title
|
|
283
|
+
|
|
284
|
+
item_data = {
|
|
285
|
+
'id': item['id'],
|
|
286
|
+
'workflow_id': item['workflow_id'],
|
|
287
|
+
'source_step_id': item.get('source_step_id'),
|
|
288
|
+
'content': content,
|
|
289
|
+
'title': title,
|
|
290
|
+
'priority': item.get('priority'),
|
|
291
|
+
'status': 'pending',
|
|
292
|
+
'category': item.get('category'),
|
|
293
|
+
'tags': item.get('tags'),
|
|
294
|
+
'metadata': item.get('metadata'),
|
|
295
|
+
'item_type': item.get('item_type'),
|
|
296
|
+
'dependencies': item.get('dependencies'),
|
|
297
|
+
'phase': item.get('phase'),
|
|
298
|
+
'duplicate_of': item.get('duplicate_of'),
|
|
299
|
+
'created_at': item.get('created_at'),
|
|
300
|
+
}
|
|
301
|
+
items_lines.append(json.dumps(item_data, default=str))
|
|
302
|
+
zf.writestr(wf_prefix + 'items.jsonl', '\n'.join(items_lines))
|
|
303
|
+
|
|
304
|
+
# Write resources
|
|
305
|
+
# NOTE: We intentionally do NOT read from file_path here.
|
|
306
|
+
# Resources with file_path references are project-local and should
|
|
307
|
+
# only include content that was explicitly inlined. Reading arbitrary
|
|
308
|
+
# file paths during export could leak sensitive files if a malicious
|
|
309
|
+
# import planted a crafted file_path.
|
|
310
|
+
resources_data = []
|
|
311
|
+
for r in resources:
|
|
312
|
+
content = r.get('content')
|
|
313
|
+
|
|
314
|
+
# Apply secret stripping if enabled
|
|
315
|
+
if options.strip_secrets and content:
|
|
316
|
+
content = _strip_secrets(content)
|
|
317
|
+
|
|
318
|
+
resources_data.append({
|
|
319
|
+
'id': r['id'],
|
|
320
|
+
'workflow_id': r['workflow_id'],
|
|
321
|
+
'resource_type': r['resource_type'],
|
|
322
|
+
'name': r['name'],
|
|
323
|
+
'content': content,
|
|
324
|
+
'file_path': None, # Never export file paths
|
|
325
|
+
'source': r.get('source'),
|
|
326
|
+
'enabled': r.get('enabled', True),
|
|
327
|
+
})
|
|
328
|
+
zf.writestr(
|
|
329
|
+
wf_prefix + 'resources/resources.json',
|
|
330
|
+
json.dumps(resources_data, indent=2, default=str),
|
|
331
|
+
)
|
|
332
|
+
|
|
333
|
+
# Write planning sessions if requested
|
|
334
|
+
if options.include_planning:
|
|
335
|
+
try:
|
|
336
|
+
planning_sessions = self.db.list_planning_sessions(workflow_id=wf['id'])
|
|
337
|
+
if planning_sessions:
|
|
338
|
+
planning_data = []
|
|
339
|
+
for s in planning_sessions:
|
|
340
|
+
data = {
|
|
341
|
+
'id': s['id'],
|
|
342
|
+
'workflow_id': s['workflow_id'],
|
|
343
|
+
'step_id': s['step_id'],
|
|
344
|
+
'artifacts': s.get('artifacts'),
|
|
345
|
+
'status': s.get('status'),
|
|
346
|
+
'created_at': s.get('created_at'),
|
|
347
|
+
}
|
|
348
|
+
if options.include_planning_messages:
|
|
349
|
+
data['messages'] = s.get('messages', [])
|
|
350
|
+
planning_data.append(data)
|
|
351
|
+
zf.writestr(
|
|
352
|
+
wf_prefix + 'planning/session.json',
|
|
353
|
+
json.dumps(planning_data, indent=2, default=str),
|
|
354
|
+
)
|
|
355
|
+
except Exception:
|
|
356
|
+
pass
|
|
357
|
+
|
|
358
|
+
# Write runs if requested
|
|
359
|
+
if options.include_runs:
|
|
360
|
+
try:
|
|
361
|
+
runs = self.db.list_runs(workflow_id=wf['id'])
|
|
362
|
+
if runs:
|
|
363
|
+
runs_data = [
|
|
364
|
+
{
|
|
365
|
+
'id': r['id'],
|
|
366
|
+
'loop_name': r['loop_name'],
|
|
367
|
+
'status': r['status'],
|
|
368
|
+
'workflow_id': r['workflow_id'],
|
|
369
|
+
'step_id': r['step_id'],
|
|
370
|
+
'started_at': r.get('started_at'),
|
|
371
|
+
'completed_at': r.get('completed_at'),
|
|
372
|
+
'iterations_completed': r.get('iterations_completed', 0),
|
|
373
|
+
'items_generated': r.get('items_generated', 0),
|
|
374
|
+
}
|
|
375
|
+
for r in runs
|
|
376
|
+
]
|
|
377
|
+
zf.writestr(
|
|
378
|
+
wf_prefix + 'runs/runs.json',
|
|
379
|
+
json.dumps(runs_data, indent=2, default=str),
|
|
380
|
+
)
|
|
381
|
+
except Exception:
|
|
382
|
+
pass
|
|
383
|
+
|
|
384
|
+
# Generate filename
|
|
385
|
+
timestamp = datetime.utcnow().strftime('%Y%m%d-%H%M%S')
|
|
386
|
+
slug = self.project_info.get('slug', 'project')
|
|
387
|
+
filename = f"project-{slug}-{timestamp}.ralphx.zip"
|
|
388
|
+
|
|
389
|
+
zip_bytes = zip_buffer.getvalue()
|
|
390
|
+
|
|
391
|
+
# Validate size
|
|
392
|
+
if len(zip_bytes) > MAX_EXPORT_SIZE_MB * 1024 * 1024:
|
|
393
|
+
raise ValueError(f"Export exceeds maximum size of {MAX_EXPORT_SIZE_MB}MB")
|
|
394
|
+
|
|
395
|
+
return zip_bytes, filename
|
|
396
|
+
|
|
397
|
+
def _estimate_export_size(
|
|
398
|
+
self,
|
|
399
|
+
workflows: list[dict],
|
|
400
|
+
total_items: int,
|
|
401
|
+
total_resources: int,
|
|
402
|
+
) -> int:
|
|
403
|
+
"""Estimate export size in bytes."""
|
|
404
|
+
# Rough estimate: 1KB per item, 2KB per resource, 500B per workflow
|
|
405
|
+
size = (total_items * 1024) + (total_resources * 2048) + (len(workflows) * 512)
|
|
406
|
+
# Add overhead and apply compression estimate (60%)
|
|
407
|
+
return int(size * 0.6)
|
|
408
|
+
|
|
409
|
+
def _build_manifest(
|
|
410
|
+
self,
|
|
411
|
+
workflows: list[dict],
|
|
412
|
+
project_resources: list[dict],
|
|
413
|
+
options: ProjectExportOptions,
|
|
414
|
+
) -> dict:
|
|
415
|
+
"""Build the project manifest."""
|
|
416
|
+
return {
|
|
417
|
+
'version': EXPORT_FORMAT_VERSION,
|
|
418
|
+
'format': PROJECT_EXPORT_FORMAT_NAME,
|
|
419
|
+
'exported_at': datetime.utcnow().isoformat() + 'Z',
|
|
420
|
+
'ralphx_version': __version__,
|
|
421
|
+
'schema_version': PROJECT_SCHEMA_VERSION,
|
|
422
|
+
'project': {
|
|
423
|
+
'name': self.project_info.get('name'),
|
|
424
|
+
'slug': self.project_info.get('slug'),
|
|
425
|
+
},
|
|
426
|
+
'contents': {
|
|
427
|
+
'workflows_count': len(workflows),
|
|
428
|
+
'workflows': [
|
|
429
|
+
{
|
|
430
|
+
'id': w['id'],
|
|
431
|
+
'name': w['name'],
|
|
432
|
+
}
|
|
433
|
+
for w in workflows
|
|
434
|
+
],
|
|
435
|
+
'shared_resources_count': len(project_resources),
|
|
436
|
+
},
|
|
437
|
+
'export_options': {
|
|
438
|
+
'include_runs': options.include_runs,
|
|
439
|
+
'include_planning': options.include_planning,
|
|
440
|
+
'include_step_artifacts': options.include_step_artifacts,
|
|
441
|
+
'include_project_resources': options.include_project_resources,
|
|
442
|
+
},
|
|
443
|
+
}
|
|
444
|
+
|
|
445
|
+
def _serialize_project_resources(
|
|
446
|
+
self,
|
|
447
|
+
resources: list[dict],
|
|
448
|
+
options: ProjectExportOptions,
|
|
449
|
+
) -> list[dict]:
|
|
450
|
+
"""Serialize project resources."""
|
|
451
|
+
# NOTE: We intentionally do NOT read from file_path here.
|
|
452
|
+
# See workflow resources comment above for rationale.
|
|
453
|
+
result = []
|
|
454
|
+
for r in resources:
|
|
455
|
+
content = r.get('content')
|
|
456
|
+
|
|
457
|
+
# Apply secret stripping if enabled
|
|
458
|
+
if options.strip_secrets and content:
|
|
459
|
+
content = _strip_secrets(content)
|
|
460
|
+
|
|
461
|
+
result.append({
|
|
462
|
+
'id': r['id'],
|
|
463
|
+
'resource_type': r['resource_type'],
|
|
464
|
+
'name': r['name'],
|
|
465
|
+
'content': content,
|
|
466
|
+
'file_path': None, # Never export file paths
|
|
467
|
+
'auto_inherit': r.get('auto_inherit', False),
|
|
468
|
+
})
|
|
469
|
+
return result
|