ralphx 0.2.2__py3-none-any.whl → 0.3.5__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (45) hide show
  1. ralphx/__init__.py +1 -1
  2. ralphx/api/main.py +9 -1
  3. ralphx/api/routes/auth.py +730 -65
  4. ralphx/api/routes/config.py +3 -56
  5. ralphx/api/routes/export_import.py +795 -0
  6. ralphx/api/routes/loops.py +4 -4
  7. ralphx/api/routes/planning.py +19 -5
  8. ralphx/api/routes/projects.py +84 -2
  9. ralphx/api/routes/templates.py +115 -2
  10. ralphx/api/routes/workflows.py +22 -22
  11. ralphx/cli.py +21 -6
  12. ralphx/core/auth.py +346 -171
  13. ralphx/core/database.py +615 -167
  14. ralphx/core/executor.py +0 -3
  15. ralphx/core/loop.py +15 -2
  16. ralphx/core/loop_templates.py +69 -3
  17. ralphx/core/planning_service.py +109 -21
  18. ralphx/core/preview.py +9 -25
  19. ralphx/core/project_db.py +175 -75
  20. ralphx/core/project_export.py +469 -0
  21. ralphx/core/project_import.py +670 -0
  22. ralphx/core/sample_project.py +430 -0
  23. ralphx/core/templates.py +46 -9
  24. ralphx/core/workflow_executor.py +35 -5
  25. ralphx/core/workflow_export.py +606 -0
  26. ralphx/core/workflow_import.py +1149 -0
  27. ralphx/examples/sample_project/DESIGN.md +345 -0
  28. ralphx/examples/sample_project/README.md +37 -0
  29. ralphx/examples/sample_project/guardrails.md +57 -0
  30. ralphx/examples/sample_project/stories.jsonl +10 -0
  31. ralphx/mcp/__init__.py +6 -2
  32. ralphx/mcp/registry.py +3 -3
  33. ralphx/mcp/server.py +99 -29
  34. ralphx/mcp/tools/__init__.py +4 -0
  35. ralphx/mcp/tools/help.py +204 -0
  36. ralphx/mcp/tools/workflows.py +114 -32
  37. ralphx/mcp_server.py +6 -2
  38. ralphx/static/assets/index-0ovNnfOq.css +1 -0
  39. ralphx/static/assets/index-CY9s08ZB.js +251 -0
  40. ralphx/static/assets/index-CY9s08ZB.js.map +1 -0
  41. ralphx/static/index.html +14 -0
  42. {ralphx-0.2.2.dist-info → ralphx-0.3.5.dist-info}/METADATA +34 -12
  43. {ralphx-0.2.2.dist-info → ralphx-0.3.5.dist-info}/RECORD +45 -30
  44. {ralphx-0.2.2.dist-info → ralphx-0.3.5.dist-info}/WHEEL +0 -0
  45. {ralphx-0.2.2.dist-info → ralphx-0.3.5.dist-info}/entry_points.txt +0 -0
@@ -0,0 +1,670 @@
1
+ """Project import functionality for RalphX.
2
+
3
+ Enables importing projects with multiple workflows from ZIP archives.
4
+ """
5
+
6
+ import io
7
+ import json
8
+ import zipfile
9
+ from dataclasses import dataclass, field
10
+ from datetime import datetime
11
+ from typing import Optional
12
+
13
+ from ralphx.core.project_db import PROJECT_SCHEMA_VERSION, ProjectDatabase
14
+ from ralphx.core.project_export import PROJECT_EXPORT_FORMAT_NAME
15
+ from ralphx.core.workflow_export import EXPORT_FORMAT_NAME, EXPORT_FORMAT_VERSION
16
+ from ralphx.core.workflow_import import (
17
+ ConflictResolution,
18
+ ImportOptions,
19
+ ImportResult,
20
+ WorkflowImporter,
21
+ MAX_IMPORT_SIZE_MB,
22
+ MAX_COMPRESSION_RATIO,
23
+ MAX_FILES_IN_ARCHIVE,
24
+ _compare_versions,
25
+ )
26
+
27
+
28
+ @dataclass
29
+ class WorkflowPreviewInfo:
30
+ """Preview info for a workflow in the project export."""
31
+ id: str
32
+ name: str
33
+ steps_count: int
34
+ items_count: int
35
+ resources_count: int
36
+ has_step_artifacts: bool = False # Whether workflow has step artifacts
37
+
38
+
39
+ @dataclass
40
+ class ProjectImportPreview:
41
+ """Preview of what will be imported."""
42
+ # Detect export type
43
+ is_project_export: bool # True = project export, False = single workflow
44
+
45
+ # Project info (if project export)
46
+ project_name: Optional[str] = None
47
+ project_slug: Optional[str] = None
48
+
49
+ # Workflows
50
+ workflows: list[WorkflowPreviewInfo] = field(default_factory=list)
51
+ total_items: int = 0
52
+ total_resources: int = 0
53
+ shared_resources_count: int = 0
54
+
55
+ # Compatibility
56
+ is_compatible: bool = True
57
+ compatibility_notes: list[str] = field(default_factory=list)
58
+
59
+ # Metadata
60
+ exported_at: Optional[str] = None
61
+ ralphx_version: Optional[str] = None
62
+ schema_version: int = 0
63
+
64
+
65
+ @dataclass
66
+ class ProjectImportOptions:
67
+ """Options for project import operation."""
68
+ selected_workflow_ids: Optional[list[str]] = None # None = all workflows
69
+ import_shared_resources: bool = True
70
+ import_step_artifacts: bool = False # Import step artifacts if present (off by default)
71
+ conflict_resolution: ConflictResolution = ConflictResolution.RENAME
72
+
73
+
74
+ @dataclass
75
+ class ProjectImportResult:
76
+ """Result of project import operation."""
77
+ success: bool
78
+ workflows_imported: int
79
+ workflow_results: list[ImportResult]
80
+ shared_resources_imported: int
81
+ warnings: list[str] = field(default_factory=list)
82
+
83
+
84
+ class ProjectImporter:
85
+ """Imports projects with multiple workflows from ZIP archives.
86
+
87
+ Supports both:
88
+ - Project exports (multiple workflows)
89
+ - Single workflow exports (auto-detected)
90
+ """
91
+
92
+ def __init__(self, project_db: ProjectDatabase):
93
+ """Initialize importer.
94
+
95
+ Args:
96
+ project_db: ProjectDatabase instance for the project.
97
+ """
98
+ self.db = project_db
99
+ self.workflow_importer = WorkflowImporter(project_db)
100
+
101
+ def get_preview(self, zip_data: bytes) -> ProjectImportPreview:
102
+ """Get a preview of what will be imported.
103
+
104
+ Auto-detects whether this is a project export or single workflow export.
105
+
106
+ Args:
107
+ zip_data: ZIP file content as bytes.
108
+
109
+ Returns:
110
+ ProjectImportPreview with contents info.
111
+
112
+ Raises:
113
+ ValueError: If archive is invalid.
114
+ """
115
+ # Validate archive
116
+ self._validate_archive(zip_data)
117
+
118
+ with zipfile.ZipFile(io.BytesIO(zip_data), 'r') as zf:
119
+ manifest = self._read_manifest(zf)
120
+
121
+ # Detect export type
122
+ export_format = manifest.get('format')
123
+
124
+ if export_format == PROJECT_EXPORT_FORMAT_NAME:
125
+ return self._preview_project_export(zf, manifest)
126
+ elif export_format == EXPORT_FORMAT_NAME:
127
+ # Single workflow export
128
+ return self._preview_workflow_export(zf, manifest)
129
+ else:
130
+ raise ValueError(f"Unknown export format: {export_format}")
131
+
132
+ def import_project(
133
+ self,
134
+ zip_data: bytes,
135
+ options: Optional[ProjectImportOptions] = None,
136
+ ) -> ProjectImportResult:
137
+ """Import a project or workflow from ZIP.
138
+
139
+ Auto-detects export type and handles appropriately.
140
+
141
+ Args:
142
+ zip_data: ZIP file content as bytes.
143
+ options: Import options.
144
+
145
+ Returns:
146
+ ProjectImportResult with details.
147
+
148
+ Raises:
149
+ ValueError: If import fails.
150
+ """
151
+ if options is None:
152
+ options = ProjectImportOptions()
153
+
154
+ # Validate archive
155
+ self._validate_archive(zip_data)
156
+
157
+ with zipfile.ZipFile(io.BytesIO(zip_data), 'r') as zf:
158
+ manifest = self._read_manifest(zf)
159
+ export_format = manifest.get('format')
160
+
161
+ if export_format == PROJECT_EXPORT_FORMAT_NAME:
162
+ return self._import_project_export(zf, manifest, options)
163
+ elif export_format == EXPORT_FORMAT_NAME:
164
+ # Single workflow export - delegate to workflow importer
165
+ return self._import_single_workflow(zip_data, options)
166
+ else:
167
+ raise ValueError(f"Unknown export format: {export_format}")
168
+
169
+ def _validate_archive(self, zip_data: bytes) -> None:
170
+ """Validate ZIP archive for security and format."""
171
+ # Size check
172
+ if len(zip_data) > MAX_IMPORT_SIZE_MB * 1024 * 1024:
173
+ raise ValueError(f"Archive exceeds maximum size of {MAX_IMPORT_SIZE_MB}MB")
174
+
175
+ # Check it's a valid ZIP
176
+ if not zipfile.is_zipfile(io.BytesIO(zip_data)):
177
+ raise ValueError("Invalid ZIP file")
178
+
179
+ with zipfile.ZipFile(io.BytesIO(zip_data), 'r') as zf:
180
+ # Check file count
181
+ if len(zf.namelist()) > MAX_FILES_IN_ARCHIVE:
182
+ raise ValueError(f"Archive contains too many files (max {MAX_FILES_IN_ARCHIVE})")
183
+
184
+ # Calculate total uncompressed size
185
+ total_uncompressed = 0
186
+ for info in zf.infolist():
187
+ # Zip Slip prevention: reject paths that could escape extraction directory
188
+ filename = info.filename
189
+
190
+ # Reject absolute paths (Unix or Windows style)
191
+ if filename.startswith('/') or filename.startswith('\\'):
192
+ raise ValueError(f"Absolute path not allowed in archive: {filename}")
193
+
194
+ # Reject backslashes (Windows path separator could bypass Unix checks)
195
+ if '\\' in filename:
196
+ raise ValueError(f"Backslash not allowed in archive path: {filename}")
197
+
198
+ # Check each path component for ".." traversal
199
+ # This catches "foo/../bar" but NOT "foo/..bar" (valid filename)
200
+ parts = filename.split('/')
201
+ for part in parts:
202
+ if part == '..':
203
+ raise ValueError(f"Path traversal (..) not allowed in archive: {filename}")
204
+
205
+ # Reject symlinks (external_attr high nibble 0xA = symlink)
206
+ if info.external_attr >> 28 == 0xA:
207
+ raise ValueError(f"Symlinks not allowed: {filename}")
208
+
209
+ total_uncompressed += info.file_size
210
+
211
+ # Zip bomb protection
212
+ if len(zip_data) > 0:
213
+ ratio = total_uncompressed / len(zip_data)
214
+ if ratio > MAX_COMPRESSION_RATIO:
215
+ raise ValueError(f"Compression ratio too high ({ratio:.0f}:1)")
216
+
217
+ if total_uncompressed > MAX_IMPORT_SIZE_MB * 1024 * 1024:
218
+ raise ValueError(f"Uncompressed size exceeds {MAX_IMPORT_SIZE_MB}MB")
219
+
220
+ # Verify manifest exists
221
+ if 'manifest.json' not in zf.namelist():
222
+ raise ValueError("Missing manifest.json")
223
+
224
+ def _read_manifest(self, zf: zipfile.ZipFile) -> dict:
225
+ """Read and parse manifest.json."""
226
+ try:
227
+ content = zf.read('manifest.json').decode('utf-8')
228
+ return json.loads(content)
229
+ except (KeyError, json.JSONDecodeError) as e:
230
+ raise ValueError(f"Invalid manifest.json: {e}")
231
+
232
+ def _check_compatibility(self, manifest: dict) -> tuple[bool, list[str]]:
233
+ """Check if the export is compatible."""
234
+ notes = []
235
+ is_compatible = True
236
+
237
+ export_version = manifest.get('version', '0.0')
238
+ if _compare_versions(export_version, EXPORT_FORMAT_VERSION) > 0:
239
+ notes.append(f"Export format {export_version} is newer than supported {EXPORT_FORMAT_VERSION}")
240
+ is_compatible = False
241
+
242
+ schema_version = manifest.get('schema_version', 0)
243
+ if schema_version > PROJECT_SCHEMA_VERSION:
244
+ notes.append(f"Schema version {schema_version} is newer than current {PROJECT_SCHEMA_VERSION}")
245
+ is_compatible = False
246
+ elif schema_version < PROJECT_SCHEMA_VERSION - 5:
247
+ notes.append(f"Schema version {schema_version} is quite old")
248
+
249
+ return is_compatible, notes
250
+
251
+ def _preview_project_export(
252
+ self,
253
+ zf: zipfile.ZipFile,
254
+ manifest: dict,
255
+ ) -> ProjectImportPreview:
256
+ """Preview a project export."""
257
+ is_compatible, notes = self._check_compatibility(manifest)
258
+
259
+ workflows_info = []
260
+ total_items = 0
261
+ total_resources = 0
262
+
263
+ # Get workflow info from manifest
264
+ for wf_info in manifest.get('contents', {}).get('workflows', []):
265
+ wf_id = wf_info['id']
266
+ # Support both new (workflow_id) and old (namespace) path formats
267
+ wf_prefix = f"workflows/{wf_id}/"
268
+ # Check for old namespace-based paths for backward compatibility
269
+ old_namespace = wf_info.get('namespace')
270
+ if old_namespace and f"workflows/{old_namespace}/workflow.json" in zf.namelist():
271
+ wf_prefix = f"workflows/{old_namespace}/"
272
+
273
+ # Count items
274
+ items_count = 0
275
+ if f"{wf_prefix}items.jsonl" in zf.namelist():
276
+ try:
277
+ content = zf.read(f"{wf_prefix}items.jsonl").decode('utf-8')
278
+ items_count = len([l for l in content.strip().split('\n') if l.strip()])
279
+ except Exception:
280
+ pass
281
+
282
+ # Count resources
283
+ resources_count = 0
284
+ if f"{wf_prefix}resources/resources.json" in zf.namelist():
285
+ try:
286
+ content = zf.read(f"{wf_prefix}resources/resources.json").decode('utf-8')
287
+ resources_count = len(json.loads(content))
288
+ except Exception:
289
+ pass
290
+
291
+ # Count steps from workflow.json and detect artifacts
292
+ steps_count = 0
293
+ has_step_artifacts = False
294
+ if f"{wf_prefix}workflow.json" in zf.namelist():
295
+ try:
296
+ content = zf.read(f"{wf_prefix}workflow.json").decode('utf-8')
297
+ wf_data = json.loads(content)
298
+ steps = wf_data.get('steps', [])
299
+ steps_count = len(steps)
300
+ # Check if any step has artifacts
301
+ for step in steps:
302
+ if step.get('artifacts'):
303
+ has_step_artifacts = True
304
+ break
305
+ except Exception:
306
+ pass
307
+
308
+ workflows_info.append(WorkflowPreviewInfo(
309
+ id=wf_info['id'],
310
+ name=wf_info['name'],
311
+ steps_count=steps_count,
312
+ items_count=items_count,
313
+ resources_count=resources_count,
314
+ has_step_artifacts=has_step_artifacts,
315
+ ))
316
+
317
+ total_items += items_count
318
+ total_resources += resources_count
319
+
320
+ # Count shared resources
321
+ shared_resources = 0
322
+ if 'shared-resources/resources.json' in zf.namelist():
323
+ try:
324
+ content = zf.read('shared-resources/resources.json').decode('utf-8')
325
+ shared_resources = len(json.loads(content))
326
+ except Exception:
327
+ pass
328
+
329
+ return ProjectImportPreview(
330
+ is_project_export=True,
331
+ project_name=manifest.get('project', {}).get('name'),
332
+ project_slug=manifest.get('project', {}).get('slug'),
333
+ workflows=workflows_info,
334
+ total_items=total_items,
335
+ total_resources=total_resources,
336
+ shared_resources_count=shared_resources,
337
+ is_compatible=is_compatible,
338
+ compatibility_notes=notes,
339
+ exported_at=manifest.get('exported_at'),
340
+ ralphx_version=manifest.get('ralphx_version'),
341
+ schema_version=manifest.get('schema_version', 0),
342
+ )
343
+
344
+ def _preview_workflow_export(
345
+ self,
346
+ zf: zipfile.ZipFile,
347
+ manifest: dict,
348
+ ) -> ProjectImportPreview:
349
+ """Preview a single workflow export."""
350
+ is_compatible, notes = self._check_compatibility(manifest)
351
+
352
+ wf_info = manifest.get('workflow', {})
353
+ contents = manifest.get('contents', {})
354
+
355
+ return ProjectImportPreview(
356
+ is_project_export=False,
357
+ workflows=[WorkflowPreviewInfo(
358
+ id=wf_info.get('id', ''),
359
+ name=wf_info.get('name', ''),
360
+ steps_count=contents.get('steps', 0),
361
+ items_count=contents.get('items_total', 0),
362
+ resources_count=contents.get('resources', 0),
363
+ )],
364
+ total_items=contents.get('items_total', 0),
365
+ total_resources=contents.get('resources', 0),
366
+ shared_resources_count=0,
367
+ is_compatible=is_compatible,
368
+ compatibility_notes=notes,
369
+ exported_at=manifest.get('exported_at'),
370
+ ralphx_version=manifest.get('ralphx_version'),
371
+ schema_version=manifest.get('schema_version', 0),
372
+ )
373
+
374
+ def _import_project_export(
375
+ self,
376
+ zf: zipfile.ZipFile,
377
+ manifest: dict,
378
+ options: ProjectImportOptions,
379
+ ) -> ProjectImportResult:
380
+ """Import a project export with multiple workflows."""
381
+ warnings = []
382
+ workflow_results = []
383
+ shared_resources_imported = 0
384
+
385
+ # Check compatibility
386
+ is_compatible, notes = self._check_compatibility(manifest)
387
+ if not is_compatible:
388
+ raise ValueError(f"Import not compatible: {'; '.join(notes)}")
389
+
390
+ # Import shared resources first
391
+ if options.import_shared_resources:
392
+ shared_resources_imported = self._import_shared_resources(zf)
393
+
394
+ # Get workflows to import
395
+ all_workflows = manifest.get('contents', {}).get('workflows', [])
396
+
397
+ if options.selected_workflow_ids is not None:
398
+ workflows_to_import = [
399
+ w for w in all_workflows
400
+ if w['id'] in options.selected_workflow_ids
401
+ ]
402
+ else:
403
+ workflows_to_import = all_workflows
404
+
405
+ # Import each workflow
406
+ for wf_info in workflows_to_import:
407
+ try:
408
+ result = self._import_workflow_from_project(
409
+ zf,
410
+ wf_info,
411
+ options.conflict_resolution,
412
+ import_step_artifacts=options.import_step_artifacts,
413
+ )
414
+ workflow_results.append(result)
415
+ except Exception as e:
416
+ warnings.append(f"Failed to import workflow '{wf_info['name']}': {e}")
417
+
418
+ return ProjectImportResult(
419
+ success=len(workflow_results) > 0,
420
+ workflows_imported=len(workflow_results),
421
+ workflow_results=workflow_results,
422
+ shared_resources_imported=shared_resources_imported,
423
+ warnings=warnings,
424
+ )
425
+
426
+ def _import_single_workflow(
427
+ self,
428
+ zip_data: bytes,
429
+ options: ProjectImportOptions,
430
+ ) -> ProjectImportResult:
431
+ """Import a single workflow export."""
432
+ import_options = ImportOptions(
433
+ conflict_resolution=options.conflict_resolution,
434
+ import_step_artifacts=options.import_step_artifacts,
435
+ )
436
+
437
+ result = self.workflow_importer.import_workflow(zip_data, import_options)
438
+
439
+ return ProjectImportResult(
440
+ success=result.success,
441
+ workflows_imported=1 if result.success else 0,
442
+ workflow_results=[result],
443
+ shared_resources_imported=0,
444
+ warnings=result.warnings,
445
+ )
446
+
447
+ def _import_shared_resources(self, zf: zipfile.ZipFile) -> int:
448
+ """Import shared project resources."""
449
+ if 'shared-resources/resources.json' not in zf.namelist():
450
+ return 0
451
+
452
+ try:
453
+ content = zf.read('shared-resources/resources.json').decode('utf-8')
454
+ resources = json.loads(content)
455
+ except Exception:
456
+ return 0
457
+
458
+ imported = 0
459
+ for resource in resources:
460
+ try:
461
+ # NOTE: We intentionally ignore file_path from imports.
462
+ # Accepting arbitrary file paths from imported data could allow
463
+ # an attacker to plant paths that get read during later exports.
464
+ self.db.create_project_resource(
465
+ resource_type=resource['resource_type'],
466
+ name=resource['name'],
467
+ content=resource.get('content'),
468
+ file_path=None, # Never import file paths from archives
469
+ auto_inherit=resource.get('auto_inherit', False),
470
+ )
471
+ imported += 1
472
+ except Exception:
473
+ # Resource may already exist, skip
474
+ pass
475
+
476
+ return imported
477
+
478
+ def _import_workflow_from_project(
479
+ self,
480
+ zf: zipfile.ZipFile,
481
+ wf_info: dict,
482
+ conflict_resolution: ConflictResolution,
483
+ import_step_artifacts: bool = False,
484
+ ) -> ImportResult:
485
+ """Import a single workflow from a project export."""
486
+ import hashlib
487
+ import uuid
488
+
489
+ wf_id = wf_info['id']
490
+ # Support both new (workflow_id) and old (namespace) path formats
491
+ wf_prefix = f"workflows/{wf_id}/"
492
+ # Check for old namespace-based paths for backward compatibility
493
+ old_namespace = wf_info.get('namespace')
494
+ if old_namespace and f"workflows/{old_namespace}/workflow.json" in zf.namelist():
495
+ wf_prefix = f"workflows/{old_namespace}/"
496
+
497
+ # Read workflow data
498
+ workflow_data = json.loads(zf.read(f"{wf_prefix}workflow.json").decode('utf-8'))
499
+
500
+ # Read items
501
+ items_data = []
502
+ if f"{wf_prefix}items.jsonl" in zf.namelist():
503
+ content = zf.read(f"{wf_prefix}items.jsonl").decode('utf-8')
504
+ for line in content.strip().split('\n'):
505
+ if line.strip():
506
+ items_data.append(json.loads(line))
507
+
508
+ # Read resources
509
+ resources_data = []
510
+ if f"{wf_prefix}resources/resources.json" in zf.namelist():
511
+ content = zf.read(f"{wf_prefix}resources/resources.json").decode('utf-8')
512
+ resources_data = json.loads(content)
513
+
514
+ # Read planning sessions
515
+ planning_data = []
516
+ if f"{wf_prefix}planning/session.json" in zf.namelist():
517
+ content = zf.read(f"{wf_prefix}planning/session.json").decode('utf-8')
518
+ planning_data = json.loads(content)
519
+
520
+ # Generate new IDs
521
+ old_wf_id = workflow_data['workflow']['id']
522
+ new_wf_id = f"wf-{uuid.uuid4().hex[:12]}"
523
+
524
+ id_mapping = {old_wf_id: new_wf_id}
525
+
526
+ # Map item IDs
527
+ for item in items_data:
528
+ old_id = item['id']
529
+ hash_suffix = hashlib.md5(f"{old_id}-{uuid.uuid4().hex}".encode()).hexdigest()[:8]
530
+ id_mapping[old_id] = f"{old_id}-{hash_suffix}"
531
+
532
+ # Create workflow
533
+ workflow = self.db.create_workflow(
534
+ id=new_wf_id,
535
+ name=workflow_data['workflow']['name'],
536
+ template_id=workflow_data['workflow'].get('template_id'),
537
+ status='draft',
538
+ )
539
+
540
+ # Create steps
541
+ step_id_mapping = {}
542
+ steps_created = 0
543
+ warnings = [] # Initialize warnings list before step loop
544
+
545
+ for step_def in workflow_data.get('steps', []):
546
+ step = self.db.create_workflow_step(
547
+ workflow_id=new_wf_id,
548
+ step_number=step_def['step_number'],
549
+ name=step_def['name'],
550
+ step_type=step_def['step_type'],
551
+ config=step_def.get('config'),
552
+ loop_name=step_def.get('loop_name'),
553
+ status='pending',
554
+ )
555
+ step_id_mapping[step_def['id']] = step['id']
556
+ steps_created += 1
557
+
558
+ # Import artifacts if option is enabled and artifacts exist
559
+ if import_step_artifacts and step_def.get('artifacts'):
560
+ try:
561
+ self.db.update_workflow_step(step['id'], artifacts=step_def['artifacts'])
562
+ except Exception as e:
563
+ warnings.append(f"Failed to import artifacts for step {step_def['name']}: {e}")
564
+
565
+ # Import items
566
+ items_imported = 0
567
+ items_renamed = 0
568
+
569
+ # Skip item import if no steps were created
570
+ if not step_id_mapping:
571
+ if items_data:
572
+ warnings.append(f"Skipping {len(items_data)} items: no steps were imported to associate them with")
573
+ else:
574
+ for item in items_data:
575
+ new_item_id = id_mapping.get(item['id'], item['id'])
576
+
577
+ # Update dependencies
578
+ deps = item.get('dependencies', []) or []
579
+ new_deps = [id_mapping.get(d, d) for d in deps]
580
+
581
+ # Get step ID - use mapped step ID or fall back to first available step
582
+ old_step_id = item.get('source_step_id')
583
+ new_step_id = step_id_mapping.get(old_step_id) if old_step_id else None
584
+ if new_step_id is None:
585
+ new_step_id = list(step_id_mapping.values())[0]
586
+
587
+ # Update duplicate_of with new ID if mapped
588
+ duplicate_of = item.get('duplicate_of')
589
+ if duplicate_of and duplicate_of in id_mapping:
590
+ duplicate_of = id_mapping[duplicate_of]
591
+
592
+ try:
593
+ self.db.create_work_item(
594
+ id=new_item_id,
595
+ workflow_id=new_wf_id,
596
+ source_step_id=new_step_id,
597
+ content=item.get('content', ''),
598
+ title=item.get('title'),
599
+ priority=item.get('priority'),
600
+ status='pending',
601
+ category=item.get('category'),
602
+ metadata=item.get('metadata'),
603
+ item_type=item.get('item_type'),
604
+ dependencies=new_deps,
605
+ phase=item.get('phase'),
606
+ duplicate_of=duplicate_of,
607
+ )
608
+ items_imported += 1
609
+ if new_item_id != item['id']:
610
+ items_renamed += 1
611
+ # Update tags if present (not supported in create_work_item)
612
+ if item.get('tags'):
613
+ self.db.update_work_item(new_item_id, tags=item['tags'])
614
+ except Exception as e:
615
+ warnings.append(f"Failed to import item {item['id']}: {e}")
616
+
617
+ # Import resources
618
+ resources_created = 0
619
+ for resource in resources_data:
620
+ try:
621
+ # NOTE: We intentionally ignore file_path from imports.
622
+ self.db.create_workflow_resource(
623
+ workflow_id=new_wf_id,
624
+ resource_type=resource['resource_type'],
625
+ name=resource['name'],
626
+ content=resource.get('content'),
627
+ file_path=None, # Never import file paths from archives
628
+ source='imported',
629
+ enabled=resource.get('enabled', True),
630
+ )
631
+ resources_created += 1
632
+ except Exception as e:
633
+ warnings.append(f"Failed to import resource {resource['name']}: {e}")
634
+
635
+ # Import planning sessions
636
+ planning_imported = 0
637
+ for session in planning_data:
638
+ old_step_id = session.get('step_id')
639
+ if old_step_id not in step_id_mapping:
640
+ continue
641
+
642
+ try:
643
+ new_session_id = f"ps-{uuid.uuid4().hex[:12]}"
644
+ self.db.create_planning_session(
645
+ id=new_session_id,
646
+ workflow_id=new_wf_id,
647
+ step_id=step_id_mapping[old_step_id],
648
+ messages=session.get('messages', []),
649
+ artifacts=session.get('artifacts'),
650
+ status='completed',
651
+ )
652
+ planning_imported += 1
653
+ except Exception as e:
654
+ warnings.append(f"Failed to import planning session: {e}")
655
+
656
+ return ImportResult(
657
+ success=True,
658
+ workflow_id=new_wf_id,
659
+ workflow_name=workflow['name'],
660
+ steps_created=steps_created,
661
+ items_imported=items_imported,
662
+ items_renamed=items_renamed,
663
+ items_skipped=len(items_data) - items_imported,
664
+ resources_created=resources_created,
665
+ resources_renamed=0,
666
+ planning_sessions_imported=planning_imported,
667
+ runs_imported=0,
668
+ id_mapping=id_mapping,
669
+ warnings=warnings,
670
+ )