@girardmedia/bootspring 1.2.0 → 2.0.3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +107 -14
- package/bin/bootspring.js +166 -27
- package/cli/agent.js +189 -17
- package/cli/analyze.js +499 -0
- package/cli/audit.js +557 -0
- package/cli/auth.js +495 -38
- package/cli/billing.js +302 -0
- package/cli/build.js +695 -0
- package/cli/business.js +109 -26
- package/cli/checkpoint-utils.js +168 -0
- package/cli/checkpoint.js +639 -0
- package/cli/cloud-sync.js +447 -0
- package/cli/content.js +198 -0
- package/cli/context.js +1 -1
- package/cli/deploy.js +543 -0
- package/cli/fundraise.js +112 -50
- package/cli/github-cmd.js +435 -0
- package/cli/health.js +477 -0
- package/cli/init.js +84 -13
- package/cli/legal.js +107 -95
- package/cli/log.js +2 -2
- package/cli/loop.js +976 -73
- package/cli/manager.js +711 -0
- package/cli/metrics.js +480 -0
- package/cli/monitor.js +812 -0
- package/cli/onboard.js +521 -0
- package/cli/orchestrator.js +12 -24
- package/cli/prd.js +594 -0
- package/cli/preseed-start.js +1483 -0
- package/cli/preseed.js +2302 -0
- package/cli/project.js +436 -0
- package/cli/quality.js +233 -0
- package/cli/security.js +913 -0
- package/cli/seed.js +1441 -5
- package/cli/skill.js +273 -211
- package/cli/suggest.js +989 -0
- package/cli/switch.js +453 -0
- package/cli/visualize.js +527 -0
- package/cli/watch.js +769 -0
- package/cli/workspace.js +607 -0
- package/core/analyze-workflow.js +1134 -0
- package/core/api-client.js +535 -22
- package/core/audit-workflow.js +1350 -0
- package/core/build-orchestrator.js +480 -0
- package/core/build-state.js +577 -0
- package/core/checkpoint-engine.js +408 -0
- package/core/config.js +1109 -26
- package/core/context-loader.js +21 -1
- package/core/deploy-workflow.js +836 -0
- package/core/entitlements.js +93 -22
- package/core/github-sync.js +610 -0
- package/core/index.js +8 -1
- package/core/ingest.js +1111 -0
- package/core/metrics-engine.js +768 -0
- package/core/onboard-workflow.js +1007 -0
- package/core/preseed-workflow.js +934 -0
- package/core/preseed.js +1617 -0
- package/core/project-context.js +325 -0
- package/core/project-state.js +694 -0
- package/core/r2-sync.js +583 -0
- package/core/scaffold.js +525 -7
- package/core/session.js +258 -0
- package/core/task-extractor.js +758 -0
- package/core/telemetry.js +28 -6
- package/core/tier-enforcement.js +737 -0
- package/core/utils.js +38 -14
- package/generators/questionnaire.js +15 -12
- package/generators/sections/ai.js +7 -7
- package/generators/sections/content.js +300 -0
- package/generators/sections/index.js +3 -0
- package/generators/sections/plugins.js +7 -6
- package/generators/templates/build-planning.template.js +596 -0
- package/generators/templates/content.template.js +819 -0
- package/generators/templates/index.js +2 -1
- package/hooks/git-autopilot.js +1250 -0
- package/hooks/index.js +9 -0
- package/intelligence/agent-collab.js +2057 -0
- package/intelligence/auto-suggest.js +634 -0
- package/intelligence/content-gen.js +1589 -0
- package/intelligence/cross-project.js +1647 -0
- package/intelligence/index.js +184 -0
- package/intelligence/learning/insights.json +517 -7
- package/intelligence/learning/pattern-learner.js +1008 -14
- package/intelligence/memory/decision-tracker.js +1431 -31
- package/intelligence/memory/decisions.jsonl +0 -0
- package/intelligence/orchestrator.js +2896 -1
- package/intelligence/prd.js +92 -1
- package/intelligence/recommendation-weights.json +14 -2
- package/intelligence/recommendations.js +463 -9
- package/intelligence/workflow-composer.js +1451 -0
- package/marketplace/index.d.ts +324 -0
- package/marketplace/index.js +1921 -0
- package/mcp/contracts/mcp-contract.v1.json +342 -4
- package/mcp/registry.js +680 -3
- package/mcp/response-formatter.js +23 -0
- package/mcp/tools/assist-tool.js +78 -4
- package/mcp/tools/autopilot-tool.js +408 -0
- package/mcp/tools/content-tool.js +571 -0
- package/mcp/tools/dashboard-tool.js +251 -5
- package/mcp/tools/mvp-tool.js +344 -0
- package/mcp/tools/plugin-tool.js +23 -1
- package/mcp/tools/prd-tool.js +579 -0
- package/mcp/tools/seed-tool.js +447 -0
- package/mcp/tools/skill-tool.js +43 -14
- package/mcp/tools/suggest-tool.js +147 -0
- package/package.json +15 -6
- package/agents/README.md +0 -93
- package/agents/ai-integration-expert/context.md +0 -386
- package/agents/api-expert/context.md +0 -416
- package/agents/architecture-expert/context.md +0 -454
- package/agents/auth-expert/context.md +0 -399
- package/agents/backend-expert/context.md +0 -483
- package/agents/business-strategy-expert/context.md +0 -180
- package/agents/code-review-expert/context.md +0 -365
- package/agents/competitive-analysis-expert/context.md +0 -239
- package/agents/data-modeling-expert/context.md +0 -352
- package/agents/database-expert/context.md +0 -250
- package/agents/devops-expert/context.md +0 -446
- package/agents/email-expert/context.md +0 -379
- package/agents/financial-expert/context.md +0 -213
- package/agents/frontend-expert/context.md +0 -364
- package/agents/fundraising-expert/context.md +0 -257
- package/agents/growth-expert/context.md +0 -249
- package/agents/index.js +0 -140
- package/agents/investor-relations-expert/context.md +0 -266
- package/agents/legal-expert/context.md +0 -284
- package/agents/marketing-expert/context.md +0 -236
- package/agents/monitoring-expert/context.md +0 -362
- package/agents/operations-expert/context.md +0 -279
- package/agents/partnerships-expert/context.md +0 -286
- package/agents/payment-expert/context.md +0 -340
- package/agents/performance-expert/context.md +0 -377
- package/agents/private-equity-expert/context.md +0 -246
- package/agents/railway-expert/context.md +0 -284
- package/agents/research-expert/context.md +0 -245
- package/agents/sales-expert/context.md +0 -241
- package/agents/security-expert/context.md +0 -343
- package/agents/testing-expert/context.md +0 -414
- package/agents/ui-ux-expert/context.md +0 -448
- package/agents/vercel-expert/context.md +0 -426
- package/skills/index.js +0 -787
- package/skills/patterns/README.md +0 -163
- package/skills/patterns/ai/agents.md +0 -281
- package/skills/patterns/ai/claude.md +0 -138
- package/skills/patterns/ai/embeddings.md +0 -150
- package/skills/patterns/ai/rag.md +0 -266
- package/skills/patterns/ai/streaming.md +0 -170
- package/skills/patterns/ai/structured-output.md +0 -162
- package/skills/patterns/ai/tools.md +0 -154
- package/skills/patterns/analytics/tracking.md +0 -220
- package/skills/patterns/api/errors.md +0 -296
- package/skills/patterns/api/graphql.md +0 -440
- package/skills/patterns/api/middleware.md +0 -279
- package/skills/patterns/api/openapi.md +0 -285
- package/skills/patterns/api/rate-limiting.md +0 -231
- package/skills/patterns/api/route-handler.md +0 -217
- package/skills/patterns/api/server-action.md +0 -249
- package/skills/patterns/api/versioning.md +0 -443
- package/skills/patterns/api/webhooks.md +0 -247
- package/skills/patterns/auth/clerk.md +0 -132
- package/skills/patterns/auth/mfa.md +0 -313
- package/skills/patterns/auth/nextauth.md +0 -140
- package/skills/patterns/auth/oauth.md +0 -237
- package/skills/patterns/auth/rbac.md +0 -152
- package/skills/patterns/auth/session-management.md +0 -367
- package/skills/patterns/auth/session.md +0 -120
- package/skills/patterns/database/audit.md +0 -177
- package/skills/patterns/database/migrations.md +0 -177
- package/skills/patterns/database/pagination.md +0 -230
- package/skills/patterns/database/pooling.md +0 -357
- package/skills/patterns/database/prisma.md +0 -180
- package/skills/patterns/database/relations.md +0 -187
- package/skills/patterns/database/seeding.md +0 -246
- package/skills/patterns/database/soft-delete.md +0 -153
- package/skills/patterns/database/transactions.md +0 -162
- package/skills/patterns/deployment/ci-cd.md +0 -231
- package/skills/patterns/deployment/docker.md +0 -188
- package/skills/patterns/deployment/monitoring.md +0 -387
- package/skills/patterns/deployment/vercel.md +0 -160
- package/skills/patterns/email/resend.md +0 -143
- package/skills/patterns/email/templates.md +0 -245
- package/skills/patterns/email/transactional.md +0 -503
- package/skills/patterns/email/verification.md +0 -176
- package/skills/patterns/files/download.md +0 -243
- package/skills/patterns/files/upload.md +0 -239
- package/skills/patterns/i18n/nextintl.md +0 -188
- package/skills/patterns/logging/structured.md +0 -292
- package/skills/patterns/notifications/email-queue.md +0 -248
- package/skills/patterns/notifications/push.md +0 -279
- package/skills/patterns/payments/checkout.md +0 -303
- package/skills/patterns/payments/invoices.md +0 -287
- package/skills/patterns/payments/portal.md +0 -245
- package/skills/patterns/payments/stripe.md +0 -272
- package/skills/patterns/payments/subscriptions.md +0 -300
- package/skills/patterns/payments/usage.md +0 -279
- package/skills/patterns/performance/caching.md +0 -276
- package/skills/patterns/performance/code-splitting.md +0 -233
- package/skills/patterns/performance/edge.md +0 -254
- package/skills/patterns/performance/isr.md +0 -266
- package/skills/patterns/performance/lazy-loading.md +0 -281
- package/skills/patterns/realtime/sse.md +0 -327
- package/skills/patterns/realtime/websockets.md +0 -336
- package/skills/patterns/search/filtering.md +0 -329
- package/skills/patterns/search/fulltext.md +0 -260
- package/skills/patterns/security/audit-logging.md +0 -444
- package/skills/patterns/security/csrf.md +0 -234
- package/skills/patterns/security/headers.md +0 -252
- package/skills/patterns/security/sanitization.md +0 -258
- package/skills/patterns/security/secrets.md +0 -261
- package/skills/patterns/security/validation.md +0 -268
- package/skills/patterns/security/xss.md +0 -229
- package/skills/patterns/seo/metadata.md +0 -252
- package/skills/patterns/state/context.md +0 -349
- package/skills/patterns/state/react-query.md +0 -313
- package/skills/patterns/state/url-state.md +0 -482
- package/skills/patterns/state/zustand.md +0 -262
- package/skills/patterns/testing/api.md +0 -259
- package/skills/patterns/testing/component.md +0 -233
- package/skills/patterns/testing/coverage.md +0 -207
- package/skills/patterns/testing/fixtures.md +0 -225
- package/skills/patterns/testing/integration.md +0 -436
- package/skills/patterns/testing/mocking.md +0 -177
- package/skills/patterns/testing/playwright.md +0 -162
- package/skills/patterns/testing/snapshot.md +0 -175
- package/skills/patterns/testing/vitest.md +0 -307
- package/skills/patterns/ui/accordions.md +0 -395
- package/skills/patterns/ui/cards.md +0 -299
- package/skills/patterns/ui/dropdowns.md +0 -476
- package/skills/patterns/ui/empty-states.md +0 -320
- package/skills/patterns/ui/forms.md +0 -405
- package/skills/patterns/ui/inputs.md +0 -319
- package/skills/patterns/ui/layouts.md +0 -282
- package/skills/patterns/ui/loading.md +0 -291
- package/skills/patterns/ui/modals.md +0 -338
- package/skills/patterns/ui/navigation.md +0 -374
- package/skills/patterns/ui/tables.md +0 -407
- package/skills/patterns/ui/toasts.md +0 -300
- package/skills/patterns/ui/tooltips.md +0 -396
- package/skills/patterns/utils/dates.md +0 -435
- package/skills/patterns/utils/errors.md +0 -451
- package/skills/patterns/utils/formatting.md +0 -345
- package/skills/patterns/utils/validation.md +0 -434
- package/templates/bootspring.config.js +0 -83
- package/templates/business/business-model-canvas.md +0 -246
- package/templates/business/business-plan.md +0 -266
- package/templates/business/competitive-analysis.md +0 -312
- package/templates/fundraising/data-room-checklist.md +0 -300
- package/templates/fundraising/investor-research.md +0 -243
- package/templates/fundraising/pitch-deck-outline.md +0 -253
- package/templates/legal/gdpr-checklist.md +0 -339
- package/templates/legal/privacy-policy.md +0 -285
- package/templates/legal/terms-of-service.md +0 -222
- package/templates/mcp.json +0 -9
|
@@ -278,6 +278,36 @@ const WORKFLOWS = {
|
|
|
278
278
|
{ name: 'Experiment Build', agents: ['frontend-expert', 'backend-expert'], duration: '2-3 days' },
|
|
279
279
|
{ name: 'Analysis & Decision', agents: ['code-review-expert', 'architecture-expert'], duration: '1 day' }
|
|
280
280
|
]
|
|
281
|
+
},
|
|
282
|
+
'full-stack-parallel': {
|
|
283
|
+
name: 'Full Stack Parallel Development',
|
|
284
|
+
description: 'Concurrent frontend and backend development with parallel testing',
|
|
285
|
+
tier: 'free',
|
|
286
|
+
pack: null,
|
|
287
|
+
outcomes: ['Full stack feature delivered with parallel development streams'],
|
|
288
|
+
completionSignals: ['API contract finalized', 'Frontend and backend complete', 'Integration tests passing'],
|
|
289
|
+
phases: [
|
|
290
|
+
{ name: 'Design & Contract', agents: ['api-expert', 'ui-ux-expert', 'architecture-expert'], duration: '1-2 days' },
|
|
291
|
+
{ name: 'Backend Development', agents: ['backend-expert', 'database-expert'], duration: '2-4 days', parallel: true },
|
|
292
|
+
{ name: 'Frontend Development', agents: ['frontend-expert', 'ui-ux-expert'], duration: '2-4 days', parallel: true },
|
|
293
|
+
{ name: 'Integration & Testing', agents: ['testing-expert', 'security-expert'], duration: '1-2 days' },
|
|
294
|
+
{ name: 'Review & Deploy', agents: ['code-review-expert', 'devops-expert'], duration: '1 day' }
|
|
295
|
+
]
|
|
296
|
+
},
|
|
297
|
+
'comprehensive-audit': {
|
|
298
|
+
name: 'Comprehensive Audit',
|
|
299
|
+
description: 'Parallel security, performance, and code quality audits',
|
|
300
|
+
tier: 'free',
|
|
301
|
+
pack: null,
|
|
302
|
+
outcomes: ['Complete audit report with prioritized findings'],
|
|
303
|
+
completionSignals: ['All audits complete', 'Findings documented', 'Remediation plan created'],
|
|
304
|
+
phases: [
|
|
305
|
+
{ name: 'Preparation', agents: ['architecture-expert'], duration: '0.5 days' },
|
|
306
|
+
{ name: 'Security Audit', agents: ['security-expert'], duration: '1-2 days', parallel: true },
|
|
307
|
+
{ name: 'Performance Audit', agents: ['performance-expert'], duration: '1-2 days', parallel: true },
|
|
308
|
+
{ name: 'Code Quality Audit', agents: ['code-review-expert'], duration: '1-2 days', parallel: true },
|
|
309
|
+
{ name: 'Report & Plan', agents: ['architecture-expert', 'code-review-expert'], duration: '1 day' }
|
|
310
|
+
]
|
|
281
311
|
}
|
|
282
312
|
};
|
|
283
313
|
|
|
@@ -591,6 +621,7 @@ function startWorkflow(workflowName) {
|
|
|
591
621
|
|
|
592
622
|
/**
|
|
593
623
|
* Advance workflow to next step
|
|
624
|
+
* Automatically detects and handles parallel phases
|
|
594
625
|
*/
|
|
595
626
|
function advanceWorkflow() {
|
|
596
627
|
const state = loadState();
|
|
@@ -599,6 +630,25 @@ function advanceWorkflow() {
|
|
|
599
630
|
return { success: false, error: 'No active workflow' };
|
|
600
631
|
}
|
|
601
632
|
|
|
633
|
+
// Check if workflow is paused
|
|
634
|
+
if (state.workflowPaused) {
|
|
635
|
+
return {
|
|
636
|
+
success: false,
|
|
637
|
+
error: 'Workflow is paused. Use resumeWorkflow() before advancing.',
|
|
638
|
+
pausedAt: state.workflowPausedAt,
|
|
639
|
+
reason: state.workflowPauseReason
|
|
640
|
+
};
|
|
641
|
+
}
|
|
642
|
+
|
|
643
|
+
// Check if we're in parallel execution mode
|
|
644
|
+
if (state.parallelExecution) {
|
|
645
|
+
return {
|
|
646
|
+
success: false,
|
|
647
|
+
error: 'Parallel phases in progress. Use completeParallelPhase() to mark phases complete.',
|
|
648
|
+
parallelStatus: getParallelExecutionStatus()
|
|
649
|
+
};
|
|
650
|
+
}
|
|
651
|
+
|
|
602
652
|
const workflow = WORKFLOWS[state.activeWorkflow];
|
|
603
653
|
const activeWorkflowName = state.activeWorkflow;
|
|
604
654
|
state.workflowStep += 1;
|
|
@@ -626,6 +676,59 @@ function advanceWorkflow() {
|
|
|
626
676
|
return { success: true, complete: true };
|
|
627
677
|
}
|
|
628
678
|
|
|
679
|
+
// Check if next phase(s) are parallel
|
|
680
|
+
const nextPhase = workflow.phases[state.workflowStep];
|
|
681
|
+
if (nextPhase.parallel) {
|
|
682
|
+
// Find all consecutive parallel phases
|
|
683
|
+
const parallelPhases = [];
|
|
684
|
+
let i = state.workflowStep;
|
|
685
|
+
while (i < workflow.phases.length && workflow.phases[i].parallel) {
|
|
686
|
+
parallelPhases.push({
|
|
687
|
+
index: i,
|
|
688
|
+
phase: workflow.phases[i],
|
|
689
|
+
status: 'in_progress',
|
|
690
|
+
startedAt: new Date().toISOString()
|
|
691
|
+
});
|
|
692
|
+
i++;
|
|
693
|
+
}
|
|
694
|
+
|
|
695
|
+
// Initialize parallel execution state
|
|
696
|
+
state.parallelExecution = {
|
|
697
|
+
workflow: activeWorkflowName,
|
|
698
|
+
phases: parallelPhases,
|
|
699
|
+
startedAt: new Date().toISOString(),
|
|
700
|
+
completedPhases: []
|
|
701
|
+
};
|
|
702
|
+
|
|
703
|
+
state.history.push({
|
|
704
|
+
action: 'parallel_phases_started',
|
|
705
|
+
workflow: activeWorkflowName,
|
|
706
|
+
phases: parallelPhases.map(p => p.phase.name),
|
|
707
|
+
timestamp: new Date().toISOString()
|
|
708
|
+
});
|
|
709
|
+
|
|
710
|
+
saveState(state);
|
|
711
|
+
|
|
712
|
+
emitWorkflowTelemetry('parallel_phases_started', {
|
|
713
|
+
workflow: activeWorkflowName,
|
|
714
|
+
phaseCount: parallelPhases.length,
|
|
715
|
+
phases: parallelPhases.map(p => p.phase.name)
|
|
716
|
+
});
|
|
717
|
+
|
|
718
|
+
return {
|
|
719
|
+
success: true,
|
|
720
|
+
complete: false,
|
|
721
|
+
parallelExecution: true,
|
|
722
|
+
parallelPhases: parallelPhases.map(p => ({
|
|
723
|
+
index: p.index,
|
|
724
|
+
name: p.phase.name,
|
|
725
|
+
agents: p.phase.agents,
|
|
726
|
+
duration: p.phase.duration
|
|
727
|
+
})),
|
|
728
|
+
message: `Started ${parallelPhases.length} parallel phases. Complete each with completeParallelPhase(index).`
|
|
729
|
+
};
|
|
730
|
+
}
|
|
731
|
+
|
|
629
732
|
saveState(state);
|
|
630
733
|
emitWorkflowTelemetry('workflow_step_advanced', {
|
|
631
734
|
workflow: activeWorkflowName,
|
|
@@ -812,6 +915,18 @@ function displayStatus() {
|
|
|
812
915
|
if (state.activeWorkflow) {
|
|
813
916
|
const workflow = WORKFLOWS[state.activeWorkflow];
|
|
814
917
|
console.log(`${c.bold}Workflow Step:${c.reset} ${state.workflowStep + 1}/${workflow.phases.length}`);
|
|
918
|
+
|
|
919
|
+
// Show pause status
|
|
920
|
+
if (state.workflowPaused) {
|
|
921
|
+
const pausedAt = state.workflowPausedAt ? new Date(state.workflowPausedAt) : null;
|
|
922
|
+
const pauseDuration = pausedAt ? Math.round((new Date() - pausedAt) / 1000) : 0;
|
|
923
|
+
console.log(`${c.yellow}${c.bold}Workflow Status:${c.reset}${c.yellow} PAUSED${c.reset}`);
|
|
924
|
+
console.log(`${c.bold}Paused At:${c.reset} ${state.workflowPausedAt}`);
|
|
925
|
+
console.log(`${c.bold}Pause Duration:${c.reset} ${formatDuration(pauseDuration)}`);
|
|
926
|
+
if (state.workflowPauseReason) {
|
|
927
|
+
console.log(`${c.bold}Pause Reason:${c.reset} ${state.workflowPauseReason}`);
|
|
928
|
+
}
|
|
929
|
+
}
|
|
815
930
|
}
|
|
816
931
|
|
|
817
932
|
if (state.suggestions.length > 0) {
|
|
@@ -842,6 +957,8 @@ ${c.cyan}Commands:${c.reset}
|
|
|
842
957
|
workflows List available workflows
|
|
843
958
|
start <workflow> Start a workflow
|
|
844
959
|
next Advance to next workflow step
|
|
960
|
+
pause [reason] Pause the active workflow
|
|
961
|
+
resume Resume a paused workflow
|
|
845
962
|
status Show orchestrator status
|
|
846
963
|
agents List available agents
|
|
847
964
|
help Show this help
|
|
@@ -850,6 +967,8 @@ ${c.cyan}Examples:${c.reset}
|
|
|
850
967
|
node orchestrator.js analyze "building authentication api"
|
|
851
968
|
node orchestrator.js workflow feature-development
|
|
852
969
|
node orchestrator.js start feature-development
|
|
970
|
+
node orchestrator.js pause "waiting for external review"
|
|
971
|
+
node orchestrator.js resume
|
|
853
972
|
node orchestrator.js status
|
|
854
973
|
`);
|
|
855
974
|
}
|
|
@@ -924,6 +1043,41 @@ if (require.main === module) {
|
|
|
924
1043
|
break;
|
|
925
1044
|
}
|
|
926
1045
|
|
|
1046
|
+
case 'pause': {
|
|
1047
|
+
const pauseReason = args.slice(1).join(' ') || null;
|
|
1048
|
+
const pauseResult = pauseWorkflow(pauseReason);
|
|
1049
|
+
if (pauseResult.success) {
|
|
1050
|
+
console.log(`${c.yellow}Workflow paused${c.reset}`);
|
|
1051
|
+
console.log(`${c.bold}Workflow:${c.reset} ${pauseResult.workflow}`);
|
|
1052
|
+
console.log(`${c.bold}Step:${c.reset} ${pauseResult.step}/${pauseResult.totalSteps}`);
|
|
1053
|
+
console.log(`${c.bold}Phase:${c.reset} ${pauseResult.currentPhase.name}`);
|
|
1054
|
+
if (pauseResult.reason) {
|
|
1055
|
+
console.log(`${c.bold}Reason:${c.reset} ${pauseResult.reason}`);
|
|
1056
|
+
}
|
|
1057
|
+
console.log(`${c.dim}Use 'resume' to continue the workflow${c.reset}`);
|
|
1058
|
+
} else {
|
|
1059
|
+
console.log(`${c.red}Error: ${pauseResult.error}${c.reset}`);
|
|
1060
|
+
}
|
|
1061
|
+
break;
|
|
1062
|
+
}
|
|
1063
|
+
|
|
1064
|
+
case 'resume': {
|
|
1065
|
+
const resumeResult = resumeWorkflow();
|
|
1066
|
+
if (resumeResult.success) {
|
|
1067
|
+
console.log(`${c.green}Workflow resumed${c.reset}`);
|
|
1068
|
+
console.log(`${c.bold}Workflow:${c.reset} ${resumeResult.workflow}`);
|
|
1069
|
+
console.log(`${c.bold}Step:${c.reset} ${resumeResult.step}/${resumeResult.totalSteps}`);
|
|
1070
|
+
console.log(`${c.bold}Phase:${c.reset} ${resumeResult.currentPhase.name}`);
|
|
1071
|
+
console.log(`${c.bold}Pause Duration:${c.reset} ${formatDuration(resumeResult.pauseDuration)}`);
|
|
1072
|
+
if (resumeResult.previousPauseReason) {
|
|
1073
|
+
console.log(`${c.dim}Previous pause reason: ${resumeResult.previousPauseReason}${c.reset}`);
|
|
1074
|
+
}
|
|
1075
|
+
} else {
|
|
1076
|
+
console.log(`${c.red}Error: ${resumeResult.error}${c.reset}`);
|
|
1077
|
+
}
|
|
1078
|
+
break;
|
|
1079
|
+
}
|
|
1080
|
+
|
|
927
1081
|
case 'status':
|
|
928
1082
|
displayStatus();
|
|
929
1083
|
break;
|
|
@@ -945,12 +1099,2713 @@ if (require.main === module) {
|
|
|
945
1099
|
}
|
|
946
1100
|
|
|
947
1101
|
// Export for use as module
|
|
1102
|
+
/**
|
|
1103
|
+
* Remediation workflows for common failure scenarios
|
|
1104
|
+
* Enhanced with severity levels, auto-trigger options, fallback phases, and preventive actions
|
|
1105
|
+
*/
|
|
1106
|
+
const REMEDIATION_PATHS = {
|
|
1107
|
+
'testing-failure': {
|
|
1108
|
+
description: 'Tests failed during workflow',
|
|
1109
|
+
severity: 'high',
|
|
1110
|
+
autoTrigger: true,
|
|
1111
|
+
maxRetries: 3,
|
|
1112
|
+
steps: [
|
|
1113
|
+
{ action: 'analyze_failures', agent: 'testing-expert', description: 'Analyze test failures', timeout: 300 },
|
|
1114
|
+
{ action: 'fix_issues', agent: 'code-review-expert', description: 'Review and fix failing code', timeout: 600 },
|
|
1115
|
+
{ action: 'rerun_tests', agent: 'testing-expert', description: 'Re-run tests', timeout: 300 }
|
|
1116
|
+
],
|
|
1117
|
+
preventiveActions: [
|
|
1118
|
+
'Run tests locally before committing',
|
|
1119
|
+
'Check for flaky tests in CI history',
|
|
1120
|
+
'Verify test environment configuration'
|
|
1121
|
+
],
|
|
1122
|
+
fallbackPhase: { name: 'Manual Review', agents: ['code-review-expert'], duration: '1-2 days' }
|
|
1123
|
+
},
|
|
1124
|
+
'security-failure': {
|
|
1125
|
+
description: 'Security issues found during workflow',
|
|
1126
|
+
severity: 'critical',
|
|
1127
|
+
autoTrigger: true,
|
|
1128
|
+
maxRetries: 2,
|
|
1129
|
+
steps: [
|
|
1130
|
+
{ action: 'security_audit', agent: 'security-expert', description: 'Full security audit', timeout: 600 },
|
|
1131
|
+
{ action: 'remediate', agent: 'security-expert', description: 'Fix security vulnerabilities', timeout: 900 },
|
|
1132
|
+
{ action: 'verify', agent: 'security-expert', description: 'Verify fixes', timeout: 300 }
|
|
1133
|
+
],
|
|
1134
|
+
preventiveActions: [
|
|
1135
|
+
'Run security scans in pre-commit hooks',
|
|
1136
|
+
'Keep dependencies updated',
|
|
1137
|
+
'Review auth/authz changes carefully'
|
|
1138
|
+
],
|
|
1139
|
+
fallbackPhase: { name: 'Security Escalation', agents: ['security-expert', 'architecture-expert'], duration: '2-3 days' }
|
|
1140
|
+
},
|
|
1141
|
+
'performance-failure': {
|
|
1142
|
+
description: 'Performance issues detected',
|
|
1143
|
+
severity: 'medium',
|
|
1144
|
+
autoTrigger: false,
|
|
1145
|
+
maxRetries: 2,
|
|
1146
|
+
steps: [
|
|
1147
|
+
{ action: 'profile', agent: 'performance-expert', description: 'Profile application', timeout: 600 },
|
|
1148
|
+
{ action: 'optimize', agent: 'performance-expert', description: 'Implement optimizations', timeout: 900 },
|
|
1149
|
+
{ action: 'benchmark', agent: 'performance-expert', description: 'Benchmark improvements', timeout: 300 }
|
|
1150
|
+
],
|
|
1151
|
+
preventiveActions: [
|
|
1152
|
+
'Set performance budgets',
|
|
1153
|
+
'Monitor bundle sizes',
|
|
1154
|
+
'Profile database queries'
|
|
1155
|
+
],
|
|
1156
|
+
fallbackPhase: { name: 'Performance Review', agents: ['performance-expert', 'database-expert'], duration: '2-3 days' }
|
|
1157
|
+
},
|
|
1158
|
+
'deployment-failure': {
|
|
1159
|
+
description: 'Deployment failed',
|
|
1160
|
+
severity: 'high',
|
|
1161
|
+
autoTrigger: true,
|
|
1162
|
+
maxRetries: 2,
|
|
1163
|
+
steps: [
|
|
1164
|
+
{ action: 'diagnose', agent: 'devops-expert', description: 'Diagnose deployment failure', timeout: 300 },
|
|
1165
|
+
{ action: 'fix_config', agent: 'devops-expert', description: 'Fix configuration issues', timeout: 600 },
|
|
1166
|
+
{ action: 'retry_deploy', agent: 'devops-expert', description: 'Retry deployment', timeout: 300 }
|
|
1167
|
+
],
|
|
1168
|
+
preventiveActions: [
|
|
1169
|
+
'Deploy to staging first',
|
|
1170
|
+
'Verify environment variables',
|
|
1171
|
+
'Check resource limits and quotas'
|
|
1172
|
+
],
|
|
1173
|
+
fallbackPhase: { name: 'Deployment Rollback', agents: ['devops-expert'], duration: '0.5 days' }
|
|
1174
|
+
},
|
|
1175
|
+
'database-failure': {
|
|
1176
|
+
description: 'Database issues detected',
|
|
1177
|
+
severity: 'high',
|
|
1178
|
+
autoTrigger: true,
|
|
1179
|
+
maxRetries: 1,
|
|
1180
|
+
steps: [
|
|
1181
|
+
{ action: 'analyze_schema', agent: 'database-expert', description: 'Analyze database issues', timeout: 300 },
|
|
1182
|
+
{ action: 'fix_migration', agent: 'database-expert', description: 'Fix migration issues', timeout: 600 },
|
|
1183
|
+
{ action: 'verify_data', agent: 'database-expert', description: 'Verify data integrity', timeout: 300 }
|
|
1184
|
+
],
|
|
1185
|
+
preventiveActions: [
|
|
1186
|
+
'Test migrations on copy of production data',
|
|
1187
|
+
'Have rollback plan ready',
|
|
1188
|
+
'Check for breaking schema changes'
|
|
1189
|
+
],
|
|
1190
|
+
fallbackPhase: { name: 'Database Recovery', agents: ['database-expert', 'devops-expert'], duration: '1-2 days' }
|
|
1191
|
+
},
|
|
1192
|
+
'build-failure': {
|
|
1193
|
+
description: 'Build process failed',
|
|
1194
|
+
severity: 'high',
|
|
1195
|
+
autoTrigger: true,
|
|
1196
|
+
maxRetries: 3,
|
|
1197
|
+
steps: [
|
|
1198
|
+
{ action: 'analyze_build', agent: 'devops-expert', description: 'Analyze build errors', timeout: 300 },
|
|
1199
|
+
{ action: 'fix_dependencies', agent: 'backend-expert', description: 'Fix dependency issues', timeout: 600 },
|
|
1200
|
+
{ action: 'rebuild', agent: 'devops-expert', description: 'Rebuild application', timeout: 300 }
|
|
1201
|
+
],
|
|
1202
|
+
preventiveActions: [
|
|
1203
|
+
'Lock dependency versions',
|
|
1204
|
+
'Verify build environment matches CI',
|
|
1205
|
+
'Check for circular dependencies'
|
|
1206
|
+
],
|
|
1207
|
+
fallbackPhase: { name: 'Build Investigation', agents: ['devops-expert', 'backend-expert'], duration: '1 day' }
|
|
1208
|
+
},
|
|
1209
|
+
'integration-failure': {
|
|
1210
|
+
description: 'Integration issues with external services',
|
|
1211
|
+
severity: 'medium',
|
|
1212
|
+
autoTrigger: false,
|
|
1213
|
+
maxRetries: 2,
|
|
1214
|
+
steps: [
|
|
1215
|
+
{ action: 'diagnose_integration', agent: 'api-expert', description: 'Diagnose integration issues', timeout: 300 },
|
|
1216
|
+
{ action: 'fix_api', agent: 'api-expert', description: 'Fix API integration', timeout: 600 },
|
|
1217
|
+
{ action: 'verify_integration', agent: 'testing-expert', description: 'Verify integration works', timeout: 300 }
|
|
1218
|
+
],
|
|
1219
|
+
preventiveActions: [
|
|
1220
|
+
'Mock external services in tests',
|
|
1221
|
+
'Check API version compatibility',
|
|
1222
|
+
'Verify credentials and permissions'
|
|
1223
|
+
],
|
|
1224
|
+
fallbackPhase: { name: 'Integration Workaround', agents: ['api-expert', 'backend-expert'], duration: '1-2 days' }
|
|
1225
|
+
},
|
|
1226
|
+
'timeout-failure': {
|
|
1227
|
+
description: 'Operation timed out',
|
|
1228
|
+
severity: 'medium',
|
|
1229
|
+
autoTrigger: false,
|
|
1230
|
+
maxRetries: 2,
|
|
1231
|
+
steps: [
|
|
1232
|
+
{ action: 'profile_operation', agent: 'performance-expert', description: 'Profile slow operation', timeout: 600 },
|
|
1233
|
+
{ action: 'optimize_critical_path', agent: 'performance-expert', description: 'Optimize critical path', timeout: 600 },
|
|
1234
|
+
{ action: 'adjust_timeout', agent: 'devops-expert', description: 'Adjust timeout settings', timeout: 300 }
|
|
1235
|
+
],
|
|
1236
|
+
preventiveActions: [
|
|
1237
|
+
'Set appropriate timeout values',
|
|
1238
|
+
'Add progress indicators',
|
|
1239
|
+
'Consider async processing'
|
|
1240
|
+
],
|
|
1241
|
+
fallbackPhase: { name: 'Async Processing', agents: ['backend-expert', 'performance-expert'], duration: '1-2 days' }
|
|
1242
|
+
}
|
|
1243
|
+
};
|
|
1244
|
+
|
|
1245
|
+
/**
|
|
1246
|
+
* Failure pattern signatures for auto-detection
|
|
1247
|
+
* Maps keywords to failure types for intelligent failure classification
|
|
1248
|
+
*/
|
|
1249
|
+
const FAILURE_SIGNATURES = {
|
|
1250
|
+
'testing': ['test', 'spec', 'jest', 'vitest', 'assertion', 'expect', 'failing', 'failed test'],
|
|
1251
|
+
'security': ['vulnerability', 'auth', 'permission', 'unauthorized', 'csrf', 'xss', 'injection', 'security'],
|
|
1252
|
+
'performance': ['slow', 'timeout', 'memory', 'cpu', 'latency', 'bottleneck', 'performance'],
|
|
1253
|
+
'deployment': ['deploy', 'release', 'ci/cd', 'pipeline', 'staging', 'production', 'build failed'],
|
|
1254
|
+
'database': ['migration', 'schema', 'query', 'prisma', 'sql', 'database', 'connection', 'deadlock'],
|
|
1255
|
+
'build': ['build', 'compile', 'bundle', 'webpack', 'typescript', 'syntax error', 'module not found'],
|
|
1256
|
+
'integration': ['api', 'endpoint', 'external', 'service', 'http', 'request failed', '500', '503'],
|
|
1257
|
+
'timeout': ['timeout', 'deadline', 'exceeded', 'timed out', 'ETIMEDOUT']
|
|
1258
|
+
};
|
|
1259
|
+
|
|
1260
|
+
/**
|
|
1261
|
+
* Severity weights for adaptive response calculation
|
|
1262
|
+
*/
|
|
1263
|
+
const SEVERITY_WEIGHTS = {
|
|
1264
|
+
'critical': 4,
|
|
1265
|
+
'high': 3,
|
|
1266
|
+
'medium': 2,
|
|
1267
|
+
'low': 1
|
|
1268
|
+
};
|
|
1269
|
+
/**
|
|
1270
|
+
* Report a phase failure and get remediation options
|
|
1271
|
+
* @param {string} failureType - Type of failure (testing, security, performance, deployment, database)
|
|
1272
|
+
* @param {object} details - Failure details
|
|
1273
|
+
*/
|
|
1274
|
+
function reportPhaseFailure(failureType, details = {}) {
|
|
1275
|
+
const state = loadState();
|
|
1276
|
+
|
|
1277
|
+
if (!state.activeWorkflow) {
|
|
1278
|
+
return { success: false, error: 'No active workflow' };
|
|
1279
|
+
}
|
|
1280
|
+
|
|
1281
|
+
const workflow = WORKFLOWS[state.activeWorkflow];
|
|
1282
|
+
const currentPhase = workflow.phases[state.workflowStep];
|
|
1283
|
+
|
|
1284
|
+
// Record the failure
|
|
1285
|
+
state.workflowFailures = state.workflowFailures || [];
|
|
1286
|
+
const failure = {
|
|
1287
|
+
id: `failure-${Date.now()}`,
|
|
1288
|
+
workflow: state.activeWorkflow,
|
|
1289
|
+
phase: currentPhase.name,
|
|
1290
|
+
phaseIndex: state.workflowStep,
|
|
1291
|
+
type: failureType,
|
|
1292
|
+
details,
|
|
1293
|
+
timestamp: new Date().toISOString(),
|
|
1294
|
+
resolved: false
|
|
1295
|
+
};
|
|
1296
|
+
state.workflowFailures.push(failure);
|
|
1297
|
+
|
|
1298
|
+
// Get remediation path
|
|
1299
|
+
const remediationKey = `${failureType}-failure`;
|
|
1300
|
+
const remediation = REMEDIATION_PATHS[remediationKey];
|
|
1301
|
+
|
|
1302
|
+
state.history.push({
|
|
1303
|
+
action: 'phase_failure_reported',
|
|
1304
|
+
workflow: state.activeWorkflow,
|
|
1305
|
+
phase: currentPhase.name,
|
|
1306
|
+
failureType,
|
|
1307
|
+
timestamp: new Date().toISOString()
|
|
1308
|
+
});
|
|
1309
|
+
|
|
1310
|
+
saveState(state);
|
|
1311
|
+
|
|
1312
|
+
emitWorkflowTelemetry('workflow_phase_failed', {
|
|
1313
|
+
workflow: state.activeWorkflow,
|
|
1314
|
+
phase: currentPhase.name,
|
|
1315
|
+
failureType,
|
|
1316
|
+
tier: workflow.tier || 'free'
|
|
1317
|
+
});
|
|
1318
|
+
|
|
1319
|
+
return {
|
|
1320
|
+
success: true,
|
|
1321
|
+
failureId: failure.id,
|
|
1322
|
+
workflow: state.activeWorkflow,
|
|
1323
|
+
phase: currentPhase.name,
|
|
1324
|
+
failureType,
|
|
1325
|
+
remediation: remediation || null,
|
|
1326
|
+
message: remediation
|
|
1327
|
+
? `Remediation path available: ${remediation.description}`
|
|
1328
|
+
: 'No specific remediation path found. Consider manual review.'
|
|
1329
|
+
};
|
|
1330
|
+
}
|
|
1331
|
+
|
|
1332
|
+
/**
|
|
1333
|
+
* Start remediation for a failure
|
|
1334
|
+
* @param {string} failureId - Failure ID
|
|
1335
|
+
*/
|
|
1336
|
+
function startRemediation(failureId) {
|
|
1337
|
+
const state = loadState();
|
|
1338
|
+
|
|
1339
|
+
const failure = state.workflowFailures?.find(f => f.id === failureId);
|
|
1340
|
+
if (!failure) {
|
|
1341
|
+
return { success: false, error: 'Failure not found' };
|
|
1342
|
+
}
|
|
1343
|
+
|
|
1344
|
+
if (failure.resolved) {
|
|
1345
|
+
return { success: false, error: 'Failure already resolved' };
|
|
1346
|
+
}
|
|
1347
|
+
|
|
1348
|
+
const remediationKey = `${failure.type}-failure`;
|
|
1349
|
+
const remediation = REMEDIATION_PATHS[remediationKey];
|
|
1350
|
+
|
|
1351
|
+
if (!remediation) {
|
|
1352
|
+
return { success: false, error: 'No remediation path for this failure type' };
|
|
1353
|
+
}
|
|
1354
|
+
|
|
1355
|
+
// Set up remediation state
|
|
1356
|
+
state.activeRemediation = {
|
|
1357
|
+
failureId,
|
|
1358
|
+
type: failure.type,
|
|
1359
|
+
currentStep: 0,
|
|
1360
|
+
steps: remediation.steps,
|
|
1361
|
+
startedAt: new Date().toISOString()
|
|
1362
|
+
};
|
|
1363
|
+
|
|
1364
|
+
state.history.push({
|
|
1365
|
+
action: 'remediation_started',
|
|
1366
|
+
failureId,
|
|
1367
|
+
type: failure.type,
|
|
1368
|
+
timestamp: new Date().toISOString()
|
|
1369
|
+
});
|
|
1370
|
+
|
|
1371
|
+
saveState(state);
|
|
1372
|
+
|
|
1373
|
+
return {
|
|
1374
|
+
success: true,
|
|
1375
|
+
failureId,
|
|
1376
|
+
remediation: {
|
|
1377
|
+
description: remediation.description,
|
|
1378
|
+
currentStep: remediation.steps[0],
|
|
1379
|
+
totalSteps: remediation.steps.length
|
|
1380
|
+
},
|
|
1381
|
+
message: `Remediation started. First step: ${remediation.steps[0].description}`
|
|
1382
|
+
};
|
|
1383
|
+
}
|
|
1384
|
+
|
|
1385
|
+
/**
|
|
1386
|
+
* Advance remediation to next step
|
|
1387
|
+
*/
|
|
1388
|
+
function advanceRemediation() {
|
|
1389
|
+
const state = loadState();
|
|
1390
|
+
|
|
1391
|
+
if (!state.activeRemediation) {
|
|
1392
|
+
return { success: false, error: 'No active remediation' };
|
|
1393
|
+
}
|
|
1394
|
+
|
|
1395
|
+
const rem = state.activeRemediation;
|
|
1396
|
+
rem.currentStep += 1;
|
|
1397
|
+
|
|
1398
|
+
if (rem.currentStep >= rem.steps.length) {
|
|
1399
|
+
// Remediation complete - mark failure as resolved
|
|
1400
|
+
const failure = state.workflowFailures?.find(f => f.id === rem.failureId);
|
|
1401
|
+
if (failure) {
|
|
1402
|
+
failure.resolved = true;
|
|
1403
|
+
failure.resolvedAt = new Date().toISOString();
|
|
1404
|
+
}
|
|
1405
|
+
|
|
1406
|
+
state.history.push({
|
|
1407
|
+
action: 'remediation_completed',
|
|
1408
|
+
failureId: rem.failureId,
|
|
1409
|
+
timestamp: new Date().toISOString()
|
|
1410
|
+
});
|
|
1411
|
+
|
|
1412
|
+
state.activeRemediation = null;
|
|
1413
|
+
saveState(state);
|
|
1414
|
+
|
|
1415
|
+
return {
|
|
1416
|
+
success: true,
|
|
1417
|
+
complete: true,
|
|
1418
|
+
message: 'Remediation complete. You can resume the workflow.'
|
|
1419
|
+
};
|
|
1420
|
+
}
|
|
1421
|
+
|
|
1422
|
+
saveState(state);
|
|
1423
|
+
|
|
1424
|
+
return {
|
|
1425
|
+
success: true,
|
|
1426
|
+
complete: false,
|
|
1427
|
+
currentStep: rem.steps[rem.currentStep],
|
|
1428
|
+
stepNumber: rem.currentStep + 1,
|
|
1429
|
+
totalSteps: rem.steps.length,
|
|
1430
|
+
message: `Step ${rem.currentStep + 1}/${rem.steps.length}: ${rem.steps[rem.currentStep].description}`
|
|
1431
|
+
};
|
|
1432
|
+
}
|
|
1433
|
+
|
|
1434
|
+
/**
|
|
1435
|
+
* Get workflow failure history
|
|
1436
|
+
*/
|
|
1437
|
+
function getWorkflowFailures() {
|
|
1438
|
+
const state = loadState();
|
|
1439
|
+
return state.workflowFailures || [];
|
|
1440
|
+
}
|
|
1441
|
+
|
|
1442
|
+
/**
|
|
1443
|
+
* Pause active workflow
|
|
1444
|
+
* @param {string} reason - Optional reason for pausing the workflow
|
|
1445
|
+
*/
|
|
1446
|
+
function pauseWorkflow(reason = null) {
|
|
1447
|
+
const state = loadState();
|
|
1448
|
+
|
|
1449
|
+
if (!state.activeWorkflow) {
|
|
1450
|
+
return { success: false, error: 'No active workflow to pause' };
|
|
1451
|
+
}
|
|
1452
|
+
|
|
1453
|
+
if (state.workflowPaused) {
|
|
1454
|
+
return { success: false, error: 'Workflow is already paused' };
|
|
1455
|
+
}
|
|
1456
|
+
|
|
1457
|
+
const workflow = WORKFLOWS[state.activeWorkflow];
|
|
1458
|
+
if (!workflow) {
|
|
1459
|
+
return { success: false, error: 'Active workflow not found' };
|
|
1460
|
+
}
|
|
1461
|
+
|
|
1462
|
+
const pauseTimestamp = new Date().toISOString();
|
|
1463
|
+
|
|
1464
|
+
// Capture complete pause context for exact resume
|
|
1465
|
+
state.workflowPaused = true;
|
|
1466
|
+
state.workflowPausedAt = pauseTimestamp;
|
|
1467
|
+
state.workflowPauseReason = reason;
|
|
1468
|
+
state.workflowPauseContext = {
|
|
1469
|
+
workflow: state.activeWorkflow,
|
|
1470
|
+
step: state.workflowStep,
|
|
1471
|
+
phase: workflow.phases[state.workflowStep],
|
|
1472
|
+
parallelExecution: state.parallelExecution ? { ...state.parallelExecution } : null,
|
|
1473
|
+
activeRemediation: state.activeRemediation ? { ...state.activeRemediation } : null,
|
|
1474
|
+
workflowSignals: state.workflowSignals?.[state.activeWorkflow] || [],
|
|
1475
|
+
timestamp: pauseTimestamp
|
|
1476
|
+
};
|
|
1477
|
+
|
|
1478
|
+
state.history.push({
|
|
1479
|
+
action: 'workflow_paused',
|
|
1480
|
+
workflow: state.activeWorkflow,
|
|
1481
|
+
step: state.workflowStep,
|
|
1482
|
+
reason: reason,
|
|
1483
|
+
timestamp: pauseTimestamp
|
|
1484
|
+
});
|
|
1485
|
+
|
|
1486
|
+
saveState(state);
|
|
1487
|
+
|
|
1488
|
+
emitWorkflowTelemetry('workflow_paused', {
|
|
1489
|
+
workflow: state.activeWorkflow,
|
|
1490
|
+
tier: workflow.tier || 'free',
|
|
1491
|
+
pack: workflow.pack || null,
|
|
1492
|
+
step: state.workflowStep,
|
|
1493
|
+
reason: reason
|
|
1494
|
+
});
|
|
1495
|
+
|
|
1496
|
+
return {
|
|
1497
|
+
success: true,
|
|
1498
|
+
workflow: state.activeWorkflow,
|
|
1499
|
+
step: state.workflowStep + 1,
|
|
1500
|
+
totalSteps: workflow.phases.length,
|
|
1501
|
+
currentPhase: workflow.phases[state.workflowStep],
|
|
1502
|
+
pausedAt: pauseTimestamp,
|
|
1503
|
+
reason: reason,
|
|
1504
|
+
context: state.workflowPauseContext,
|
|
1505
|
+
message: `Workflow "${state.activeWorkflow}" paused at step ${state.workflowStep + 1}${reason ? ` (reason: ${reason})` : ''}`
|
|
1506
|
+
};
|
|
1507
|
+
}
|
|
1508
|
+
|
|
1509
|
+
/**
|
|
1510
|
+
* Resume paused workflow
|
|
1511
|
+
* Continues from the exact point where the workflow was paused
|
|
1512
|
+
*/
|
|
1513
|
+
function resumeWorkflow() {
|
|
1514
|
+
const state = loadState();
|
|
1515
|
+
|
|
1516
|
+
if (!state.activeWorkflow) {
|
|
1517
|
+
return { success: false, error: 'No active workflow to resume' };
|
|
1518
|
+
}
|
|
1519
|
+
|
|
1520
|
+
if (!state.workflowPaused) {
|
|
1521
|
+
return { success: false, error: 'Workflow is not paused' };
|
|
1522
|
+
}
|
|
1523
|
+
|
|
1524
|
+
const workflow = WORKFLOWS[state.activeWorkflow];
|
|
1525
|
+
if (!workflow) {
|
|
1526
|
+
return { success: false, error: 'Active workflow not found' };
|
|
1527
|
+
}
|
|
1528
|
+
|
|
1529
|
+
// Calculate pause duration
|
|
1530
|
+
const pausedAt = new Date(state.workflowPausedAt);
|
|
1531
|
+
const resumedAt = new Date();
|
|
1532
|
+
const pauseDuration = Math.round((resumedAt - pausedAt) / 1000); // seconds
|
|
1533
|
+
|
|
1534
|
+
// Capture pause context before clearing
|
|
1535
|
+
const pauseContext = state.workflowPauseContext || {};
|
|
1536
|
+
const pauseReason = state.workflowPauseReason;
|
|
1537
|
+
|
|
1538
|
+
// Verify and restore workflow state from pause context if needed
|
|
1539
|
+
// This ensures we resume from the exact point even after process restart
|
|
1540
|
+
if (pauseContext.step !== undefined && pauseContext.step !== state.workflowStep) {
|
|
1541
|
+
state.workflowStep = pauseContext.step;
|
|
1542
|
+
}
|
|
1543
|
+
|
|
1544
|
+
// Restore parallel execution state if it was active during pause
|
|
1545
|
+
if (pauseContext.parallelExecution && !state.parallelExecution) {
|
|
1546
|
+
state.parallelExecution = pauseContext.parallelExecution;
|
|
1547
|
+
}
|
|
1548
|
+
|
|
1549
|
+
// Restore active remediation if it was in progress
|
|
1550
|
+
if (pauseContext.activeRemediation && !state.activeRemediation) {
|
|
1551
|
+
state.activeRemediation = pauseContext.activeRemediation;
|
|
1552
|
+
}
|
|
1553
|
+
|
|
1554
|
+
// Clear pause state
|
|
1555
|
+
state.workflowPaused = false;
|
|
1556
|
+
state.workflowPausedAt = null;
|
|
1557
|
+
state.workflowPauseReason = null;
|
|
1558
|
+
state.workflowPauseContext = null;
|
|
1559
|
+
|
|
1560
|
+
state.history.push({
|
|
1561
|
+
action: 'workflow_resumed',
|
|
1562
|
+
workflow: state.activeWorkflow,
|
|
1563
|
+
step: state.workflowStep,
|
|
1564
|
+
pauseDurationSeconds: pauseDuration,
|
|
1565
|
+
previousPauseReason: pauseReason,
|
|
1566
|
+
timestamp: resumedAt.toISOString()
|
|
1567
|
+
});
|
|
1568
|
+
|
|
1569
|
+
saveState(state);
|
|
1570
|
+
|
|
1571
|
+
emitWorkflowTelemetry('workflow_resumed', {
|
|
1572
|
+
workflow: state.activeWorkflow,
|
|
1573
|
+
tier: workflow.tier || 'free',
|
|
1574
|
+
pack: workflow.pack || null,
|
|
1575
|
+
step: state.workflowStep,
|
|
1576
|
+
pauseDurationSeconds: pauseDuration,
|
|
1577
|
+
previousPauseReason: pauseReason
|
|
1578
|
+
});
|
|
1579
|
+
|
|
1580
|
+
// Build resume context with all relevant state
|
|
1581
|
+
const resumeContext = {
|
|
1582
|
+
step: state.workflowStep + 1,
|
|
1583
|
+
totalSteps: workflow.phases.length,
|
|
1584
|
+
currentPhase: workflow.phases[state.workflowStep],
|
|
1585
|
+
parallelExecutionActive: !!state.parallelExecution,
|
|
1586
|
+
remediationActive: !!state.activeRemediation
|
|
1587
|
+
};
|
|
1588
|
+
|
|
1589
|
+
if (state.parallelExecution) {
|
|
1590
|
+
resumeContext.parallelExecution = getParallelExecutionStatus();
|
|
1591
|
+
}
|
|
1592
|
+
|
|
1593
|
+
return {
|
|
1594
|
+
success: true,
|
|
1595
|
+
workflow: state.activeWorkflow,
|
|
1596
|
+
step: state.workflowStep + 1,
|
|
1597
|
+
totalSteps: workflow.phases.length,
|
|
1598
|
+
currentPhase: workflow.phases[state.workflowStep],
|
|
1599
|
+
pauseDuration: pauseDuration,
|
|
1600
|
+
previousPauseReason: pauseReason,
|
|
1601
|
+
resumeContext: resumeContext,
|
|
1602
|
+
message: `Workflow "${state.activeWorkflow}" resumed at step ${state.workflowStep + 1} (was paused for ${formatDuration(pauseDuration)}${pauseReason ? `, reason: ${pauseReason}` : ''})`
|
|
1603
|
+
};
|
|
1604
|
+
}
|
|
1605
|
+
|
|
1606
|
+
/**
|
|
1607
|
+
* Format duration in seconds to human-readable string
|
|
1608
|
+
* @param {number} seconds - Duration in seconds
|
|
1609
|
+
*/
|
|
1610
|
+
function formatDuration(seconds) {
|
|
1611
|
+
if (seconds < 60) {
|
|
1612
|
+
return `${seconds}s`;
|
|
1613
|
+
}
|
|
1614
|
+
if (seconds < 3600) {
|
|
1615
|
+
const mins = Math.floor(seconds / 60);
|
|
1616
|
+
const secs = seconds % 60;
|
|
1617
|
+
return secs > 0 ? `${mins}m ${secs}s` : `${mins}m`;
|
|
1618
|
+
}
|
|
1619
|
+
const hours = Math.floor(seconds / 3600);
|
|
1620
|
+
const mins = Math.floor((seconds % 3600) / 60);
|
|
1621
|
+
return mins > 0 ? `${hours}h ${mins}m` : `${hours}h`;
|
|
1622
|
+
}
|
|
1623
|
+
|
|
1624
|
+
/**
|
|
1625
|
+
* Check if workflow is paused and get pause details
|
|
1626
|
+
*/
|
|
1627
|
+
function isWorkflowPaused() {
|
|
1628
|
+
const state = loadState();
|
|
1629
|
+
|
|
1630
|
+
if (!state.workflowPaused) {
|
|
1631
|
+
return {
|
|
1632
|
+
paused: false,
|
|
1633
|
+
workflow: state.activeWorkflow
|
|
1634
|
+
};
|
|
1635
|
+
}
|
|
1636
|
+
|
|
1637
|
+
const workflow = state.activeWorkflow ? WORKFLOWS[state.activeWorkflow] : null;
|
|
1638
|
+
const pausedAt = state.workflowPausedAt ? new Date(state.workflowPausedAt) : null;
|
|
1639
|
+
const pauseDuration = pausedAt ? Math.round((new Date() - pausedAt) / 1000) : 0;
|
|
1640
|
+
|
|
1641
|
+
return {
|
|
1642
|
+
paused: true,
|
|
1643
|
+
workflow: state.activeWorkflow,
|
|
1644
|
+
workflowName: workflow?.name,
|
|
1645
|
+
pausedAt: state.workflowPausedAt,
|
|
1646
|
+
reason: state.workflowPauseReason,
|
|
1647
|
+
pauseDurationSeconds: pauseDuration,
|
|
1648
|
+
pauseDurationFormatted: formatDuration(pauseDuration),
|
|
1649
|
+
pauseContext: state.workflowPauseContext || null,
|
|
1650
|
+
canResume: !!state.activeWorkflow && !!workflow
|
|
1651
|
+
};
|
|
1652
|
+
}
|
|
1653
|
+
|
|
1654
|
+
/**
|
|
1655
|
+
* Get detailed pause state for persistence verification
|
|
1656
|
+
* Useful for debugging or verifying state after process restart
|
|
1657
|
+
*/
|
|
1658
|
+
function getPauseState() {
|
|
1659
|
+
const state = loadState();
|
|
1660
|
+
|
|
1661
|
+
if (!state.workflowPaused) {
|
|
1662
|
+
return { paused: false };
|
|
1663
|
+
}
|
|
1664
|
+
|
|
1665
|
+
return {
|
|
1666
|
+
paused: true,
|
|
1667
|
+
workflow: state.activeWorkflow,
|
|
1668
|
+
step: state.workflowStep,
|
|
1669
|
+
pausedAt: state.workflowPausedAt,
|
|
1670
|
+
reason: state.workflowPauseReason,
|
|
1671
|
+
context: state.workflowPauseContext,
|
|
1672
|
+
statePath: getPaths().statePath
|
|
1673
|
+
};
|
|
1674
|
+
}
|
|
1675
|
+
|
|
1676
|
+
/**
|
|
1677
|
+
* PRD-Workflow Integration
|
|
1678
|
+
* Links PRD stories to workflow phases for automatic progress tracking
|
|
1679
|
+
*/
|
|
1680
|
+
|
|
1681
|
+
/**
|
|
1682
|
+
* Analyze story content to determine best workflow phase match
|
|
1683
|
+
* Uses keywords and patterns to intelligently map stories to phases
|
|
1684
|
+
* @param {object} story - The story object
|
|
1685
|
+
* @param {object} workflow - The workflow definition
|
|
1686
|
+
* @returns {object} - { phaseIndex, confidence, score }
|
|
1687
|
+
*/
|
|
1688
|
+
function analyzeStoryForPhase(story, workflow) {
|
|
1689
|
+
const titleLower = (story.title || '').toLowerCase();
|
|
1690
|
+
const descLower = (story.description || '').toLowerCase();
|
|
1691
|
+
const acceptanceLower = (story.acceptance || []).join(' ').toLowerCase();
|
|
1692
|
+
const combined = `${titleLower} ${descLower} ${acceptanceLower}`;
|
|
1693
|
+
|
|
1694
|
+
// Phase detection patterns
|
|
1695
|
+
const phasePatterns = {
|
|
1696
|
+
'Design': ['design', 'wireframe', 'mockup', 'layout', 'ui design', 'ux', 'prototype', 'spec', 'figma'],
|
|
1697
|
+
'Design & Contract': ['contract', 'api design', 'interface', 'specification'],
|
|
1698
|
+
'Implementation': ['implement', 'build', 'create', 'develop', 'add', 'integrate', 'code', 'feature'],
|
|
1699
|
+
'Backend Development': ['api', 'endpoint', 'server', 'backend', 'route', 'handler', 'service'],
|
|
1700
|
+
'Frontend Development': ['component', 'page', 'frontend', 'react', 'ui component', 'view', 'form'],
|
|
1701
|
+
'Testing': ['test', 'spec', 'coverage', 'qa', 'quality', 'verify', 'validate', 'unit test', 'e2e'],
|
|
1702
|
+
'Integration & Testing': ['integration', 'e2e', 'end-to-end'],
|
|
1703
|
+
'Review': ['review', 'audit', 'check', 'code review', 'pr review'],
|
|
1704
|
+
'Security': ['security', 'auth', 'permission', 'sanitize', 'vulnerability', 'encryption', 'oauth'],
|
|
1705
|
+
'Deployment': ['deploy', 'release', 'ci/cd', 'pipeline', 'production', 'staging'],
|
|
1706
|
+
'Review & Deploy': ['ship', 'merge', 'release'],
|
|
1707
|
+
'Schema Design': ['schema', 'model', 'database design', 'migration', 'table', 'entity'],
|
|
1708
|
+
'Database': ['database', 'query', 'prisma', 'sql', 'orm'],
|
|
1709
|
+
'Analysis': ['analyze', 'profile', 'benchmark', 'metrics', 'audit'],
|
|
1710
|
+
'Preparation': ['setup', 'init', 'configure', 'prepare', 'scaffold'],
|
|
1711
|
+
'Infrastructure': ['infrastructure', 'devops', 'docker', 'kubernetes', 'aws', 'vercel'],
|
|
1712
|
+
'Monitoring': ['monitor', 'logging', 'observability', 'alerts', 'metrics']
|
|
1713
|
+
};
|
|
1714
|
+
|
|
1715
|
+
let bestMatch = null;
|
|
1716
|
+
let bestScore = 0;
|
|
1717
|
+
|
|
1718
|
+
for (let phaseIdx = 0; phaseIdx < workflow.phases.length; phaseIdx++) {
|
|
1719
|
+
const phase = workflow.phases[phaseIdx];
|
|
1720
|
+
const phaseName = phase.name;
|
|
1721
|
+
const patterns = phasePatterns[phaseName] || [];
|
|
1722
|
+
|
|
1723
|
+
let score = 0;
|
|
1724
|
+
|
|
1725
|
+
// Check direct phase name match (highest weight)
|
|
1726
|
+
if (combined.includes(phaseName.toLowerCase())) {
|
|
1727
|
+
score += 5;
|
|
1728
|
+
}
|
|
1729
|
+
|
|
1730
|
+
// Check pattern matches
|
|
1731
|
+
for (const pattern of patterns) {
|
|
1732
|
+
if (combined.includes(pattern)) {
|
|
1733
|
+
score += 2;
|
|
1734
|
+
}
|
|
1735
|
+
}
|
|
1736
|
+
|
|
1737
|
+
// Check agent-related keywords
|
|
1738
|
+
for (const agent of phase.agents) {
|
|
1739
|
+
const agentKeywords = agent.replace('-expert', '').split('-');
|
|
1740
|
+
for (const keyword of agentKeywords) {
|
|
1741
|
+
if (keyword.length > 2 && combined.includes(keyword)) {
|
|
1742
|
+
score += 1;
|
|
1743
|
+
}
|
|
1744
|
+
}
|
|
1745
|
+
}
|
|
1746
|
+
|
|
1747
|
+
if (score > bestScore) {
|
|
1748
|
+
bestScore = score;
|
|
1749
|
+
bestMatch = phaseIdx;
|
|
1750
|
+
}
|
|
1751
|
+
}
|
|
1752
|
+
|
|
1753
|
+
if (bestMatch !== null && bestScore > 0) {
|
|
1754
|
+
const confidence = bestScore >= 5 ? 'high' : bestScore >= 2 ? 'medium' : 'low';
|
|
1755
|
+
return { phaseIndex: bestMatch, confidence, score: bestScore };
|
|
1756
|
+
}
|
|
1757
|
+
|
|
1758
|
+
return { phaseIndex: null, confidence: 'none', score: 0 };
|
|
1759
|
+
}
|
|
1760
|
+
|
|
1761
|
+
/**
|
|
1762
|
+
* Link a PRD to a workflow
|
|
1763
|
+
* Maps PRD stories to workflow phases based on story content analysis
|
|
1764
|
+
* @param {object} prd - The PRD object
|
|
1765
|
+
* @param {string} workflowName - Name of the workflow to link
|
|
1766
|
+
* @param {object} options - Optional configuration
|
|
1767
|
+
* @param {object} options.manualMapping - Manual story->phase mapping { storyId: phaseIndex }
|
|
1768
|
+
* @param {boolean} options.autoAdvance - Auto-advance workflow on phase completion (default: true)
|
|
1769
|
+
*/
|
|
1770
|
+
function linkPrdToWorkflow(prd, workflowName, options = {}) {
|
|
1771
|
+
const workflow = WORKFLOWS[workflowName];
|
|
1772
|
+
if (!workflow) {
|
|
1773
|
+
return { success: false, error: `Unknown workflow: ${workflowName}` };
|
|
1774
|
+
}
|
|
1775
|
+
|
|
1776
|
+
if (!prd || !prd.stories) {
|
|
1777
|
+
return { success: false, error: 'Invalid PRD' };
|
|
1778
|
+
}
|
|
1779
|
+
|
|
1780
|
+
const state = loadState();
|
|
1781
|
+
state.prdWorkflowLinks = state.prdWorkflowLinks || {};
|
|
1782
|
+
|
|
1783
|
+
const storyPhaseMap = {};
|
|
1784
|
+
const phaseCount = workflow.phases.length;
|
|
1785
|
+
const mappingDetails = { manual: [], analyzed: [], fallback: [] };
|
|
1786
|
+
|
|
1787
|
+
prd.stories.forEach((story, index) => {
|
|
1788
|
+
// Check for manual mapping override
|
|
1789
|
+
if (options.manualMapping && options.manualMapping[story.id] !== undefined) {
|
|
1790
|
+
const manualPhase = options.manualMapping[story.id];
|
|
1791
|
+
if (manualPhase >= 0 && manualPhase < phaseCount) {
|
|
1792
|
+
storyPhaseMap[story.id] = {
|
|
1793
|
+
phase: manualPhase,
|
|
1794
|
+
phaseName: workflow.phases[manualPhase].name,
|
|
1795
|
+
storyTitle: story.title,
|
|
1796
|
+
status: story.status || 'pending',
|
|
1797
|
+
confidence: 'manual'
|
|
1798
|
+
};
|
|
1799
|
+
mappingDetails.manual.push(story.id);
|
|
1800
|
+
return;
|
|
1801
|
+
}
|
|
1802
|
+
}
|
|
1803
|
+
|
|
1804
|
+
// Try intelligent mapping based on story content
|
|
1805
|
+
const analysis = analyzeStoryForPhase(story, workflow);
|
|
1806
|
+
|
|
1807
|
+
if (analysis.phaseIndex !== null && analysis.confidence !== 'none') {
|
|
1808
|
+
storyPhaseMap[story.id] = {
|
|
1809
|
+
phase: analysis.phaseIndex,
|
|
1810
|
+
phaseName: workflow.phases[analysis.phaseIndex].name,
|
|
1811
|
+
storyTitle: story.title,
|
|
1812
|
+
status: story.status || 'pending',
|
|
1813
|
+
confidence: analysis.confidence,
|
|
1814
|
+
analysisScore: analysis.score
|
|
1815
|
+
};
|
|
1816
|
+
mappingDetails.analyzed.push(story.id);
|
|
1817
|
+
} else {
|
|
1818
|
+
// Fallback: distribute evenly across phases
|
|
1819
|
+
const phaseIndex = Math.floor((index / prd.stories.length) * phaseCount);
|
|
1820
|
+
storyPhaseMap[story.id] = {
|
|
1821
|
+
phase: phaseIndex,
|
|
1822
|
+
phaseName: workflow.phases[phaseIndex].name,
|
|
1823
|
+
storyTitle: story.title,
|
|
1824
|
+
status: story.status || 'pending',
|
|
1825
|
+
confidence: 'fallback'
|
|
1826
|
+
};
|
|
1827
|
+
mappingDetails.fallback.push(story.id);
|
|
1828
|
+
}
|
|
1829
|
+
});
|
|
1830
|
+
|
|
1831
|
+
// Build phase progress tracking
|
|
1832
|
+
const phaseProgress = {};
|
|
1833
|
+
workflow.phases.forEach((phase, idx) => {
|
|
1834
|
+
const storiesInPhase = Object.entries(storyPhaseMap)
|
|
1835
|
+
.filter(([, mapping]) => mapping.phase === idx);
|
|
1836
|
+
const completedInPhase = storiesInPhase
|
|
1837
|
+
.filter(([, mapping]) => mapping.status === 'complete');
|
|
1838
|
+
|
|
1839
|
+
phaseProgress[idx] = {
|
|
1840
|
+
phaseName: phase.name,
|
|
1841
|
+
total: storiesInPhase.length,
|
|
1842
|
+
completed: completedInPhase.length,
|
|
1843
|
+
percent: storiesInPhase.length > 0
|
|
1844
|
+
? Math.round((completedInPhase.length / storiesInPhase.length) * 100)
|
|
1845
|
+
: 0,
|
|
1846
|
+
stories: storiesInPhase.map(([id, mapping]) => ({
|
|
1847
|
+
id,
|
|
1848
|
+
title: mapping.storyTitle,
|
|
1849
|
+
status: mapping.status,
|
|
1850
|
+
confidence: mapping.confidence
|
|
1851
|
+
}))
|
|
1852
|
+
};
|
|
1853
|
+
});
|
|
1854
|
+
|
|
1855
|
+
// Track completed story IDs
|
|
1856
|
+
const completedStoryIds = prd.stories
|
|
1857
|
+
.filter(s => s.status === 'complete')
|
|
1858
|
+
.map(s => s.id);
|
|
1859
|
+
|
|
1860
|
+
state.prdWorkflowLinks[prd.name] = {
|
|
1861
|
+
workflow: workflowName,
|
|
1862
|
+
linkedAt: new Date().toISOString(),
|
|
1863
|
+
storyPhaseMap,
|
|
1864
|
+
phaseProgress,
|
|
1865
|
+
totalStories: prd.stories.length,
|
|
1866
|
+
completedStories: completedStoryIds.length,
|
|
1867
|
+
completedStoryIds,
|
|
1868
|
+
autoAdvance: options.autoAdvance !== false,
|
|
1869
|
+
mappingDetails
|
|
1870
|
+
};
|
|
1871
|
+
|
|
1872
|
+
state.history.push({
|
|
1873
|
+
action: 'prd_linked_to_workflow',
|
|
1874
|
+
prd: prd.name,
|
|
1875
|
+
workflow: workflowName,
|
|
1876
|
+
storyCount: prd.stories.length,
|
|
1877
|
+
mappingBreakdown: {
|
|
1878
|
+
manual: mappingDetails.manual.length,
|
|
1879
|
+
analyzed: mappingDetails.analyzed.length,
|
|
1880
|
+
fallback: mappingDetails.fallback.length
|
|
1881
|
+
},
|
|
1882
|
+
timestamp: new Date().toISOString()
|
|
1883
|
+
});
|
|
1884
|
+
|
|
1885
|
+
saveState(state);
|
|
1886
|
+
|
|
1887
|
+
emitWorkflowTelemetry('prd_workflow_linked', {
|
|
1888
|
+
prd: prd.name,
|
|
1889
|
+
workflow: workflowName,
|
|
1890
|
+
storyCount: prd.stories.length,
|
|
1891
|
+
analyzedMappings: mappingDetails.analyzed.length,
|
|
1892
|
+
fallbackMappings: mappingDetails.fallback.length
|
|
1893
|
+
});
|
|
1894
|
+
|
|
1895
|
+
return {
|
|
1896
|
+
success: true,
|
|
1897
|
+
workflow: workflowName,
|
|
1898
|
+
workflowName: workflow.name,
|
|
1899
|
+
prd: prd.name,
|
|
1900
|
+
mapping: storyPhaseMap,
|
|
1901
|
+
phaseProgress,
|
|
1902
|
+
mappingDetails,
|
|
1903
|
+
message: `Linked ${prd.stories.length} stories to ${phaseCount} workflow phases`
|
|
1904
|
+
};
|
|
1905
|
+
}
|
|
1906
|
+
|
|
1907
|
+
/**
|
|
1908
|
+
* Notify workflow when a PRD story is completed
|
|
1909
|
+
* Advances workflow if all stories in current phase are done
|
|
1910
|
+
* @param {string} prdName - Name of the PRD
|
|
1911
|
+
* @param {string} storyId - ID of the completed story
|
|
1912
|
+
*/
|
|
1913
|
+
function onPrdStoryComplete(prdName, storyId) {
|
|
1914
|
+
const state = loadState();
|
|
1915
|
+
const link = state.prdWorkflowLinks?.[prdName];
|
|
1916
|
+
|
|
1917
|
+
if (!link) {
|
|
1918
|
+
return { success: false, error: 'PRD not linked to a workflow' };
|
|
1919
|
+
}
|
|
1920
|
+
|
|
1921
|
+
const workflow = WORKFLOWS[link.workflow];
|
|
1922
|
+
if (!workflow) {
|
|
1923
|
+
return { success: false, error: 'Linked workflow not found' };
|
|
1924
|
+
}
|
|
1925
|
+
|
|
1926
|
+
const storyMapping = link.storyPhaseMap[storyId];
|
|
1927
|
+
if (!storyMapping) {
|
|
1928
|
+
return { success: true, advanced: false, message: 'Story not mapped to workflow phase' };
|
|
1929
|
+
}
|
|
1930
|
+
|
|
1931
|
+
// Check if already marked complete
|
|
1932
|
+
if (link.completedStoryIds?.includes(storyId)) {
|
|
1933
|
+
return {
|
|
1934
|
+
success: true,
|
|
1935
|
+
advanced: false,
|
|
1936
|
+
message: 'Story already marked complete',
|
|
1937
|
+
progress: {
|
|
1938
|
+
completed: link.completedStories,
|
|
1939
|
+
total: link.totalStories,
|
|
1940
|
+
percent: Math.round((link.completedStories / link.totalStories) * 100)
|
|
1941
|
+
}
|
|
1942
|
+
};
|
|
1943
|
+
}
|
|
1944
|
+
|
|
1945
|
+
// Update story status in mapping
|
|
1946
|
+
storyMapping.status = 'complete';
|
|
1947
|
+
storyMapping.completedAt = new Date().toISOString();
|
|
1948
|
+
|
|
1949
|
+
// Update completed tracking
|
|
1950
|
+
link.completedStoryIds = link.completedStoryIds || [];
|
|
1951
|
+
link.completedStoryIds.push(storyId);
|
|
1952
|
+
link.completedStories = link.completedStoryIds.length;
|
|
1953
|
+
|
|
1954
|
+
// Update phase progress
|
|
1955
|
+
const storyPhase = storyMapping.phase;
|
|
1956
|
+
if (link.phaseProgress && link.phaseProgress[storyPhase]) {
|
|
1957
|
+
const phaseStories = Object.entries(link.storyPhaseMap)
|
|
1958
|
+
.filter(([, m]) => m.phase === storyPhase);
|
|
1959
|
+
const completedInPhase = phaseStories.filter(([, m]) => m.status === 'complete');
|
|
1960
|
+
|
|
1961
|
+
link.phaseProgress[storyPhase].completed = completedInPhase.length;
|
|
1962
|
+
link.phaseProgress[storyPhase].percent = Math.round(
|
|
1963
|
+
(completedInPhase.length / phaseStories.length) * 100
|
|
1964
|
+
);
|
|
1965
|
+
|
|
1966
|
+
link.phaseProgress[storyPhase].stories = phaseStories.map(([id, mapping]) => ({
|
|
1967
|
+
id,
|
|
1968
|
+
title: mapping.storyTitle,
|
|
1969
|
+
status: mapping.status,
|
|
1970
|
+
confidence: mapping.confidence
|
|
1971
|
+
}));
|
|
1972
|
+
}
|
|
1973
|
+
|
|
1974
|
+
state.prdWorkflowLinks[prdName] = link;
|
|
1975
|
+
|
|
1976
|
+
state.history.push({
|
|
1977
|
+
action: 'prd_story_completed',
|
|
1978
|
+
prd: prdName,
|
|
1979
|
+
storyId,
|
|
1980
|
+
phase: storyMapping.phaseName,
|
|
1981
|
+
timestamp: new Date().toISOString()
|
|
1982
|
+
});
|
|
1983
|
+
|
|
1984
|
+
// Check if current workflow phase is complete
|
|
1985
|
+
const currentPhase = state.workflowStep || 0;
|
|
1986
|
+
const phaseProgress = link.phaseProgress?.[currentPhase];
|
|
1987
|
+
const phaseComplete = phaseProgress &&
|
|
1988
|
+
phaseProgress.total > 0 &&
|
|
1989
|
+
phaseProgress.completed >= phaseProgress.total;
|
|
1990
|
+
|
|
1991
|
+
// Auto-advance if enabled and phase is complete
|
|
1992
|
+
if (link.autoAdvance !== false &&
|
|
1993
|
+
phaseComplete &&
|
|
1994
|
+
storyMapping.phase === currentPhase &&
|
|
1995
|
+
state.activeWorkflow === link.workflow) {
|
|
1996
|
+
|
|
1997
|
+
saveState(state);
|
|
1998
|
+
const advanceResult = advanceWorkflow();
|
|
1999
|
+
|
|
2000
|
+
emitWorkflowTelemetry('prd_triggered_phase_advance', {
|
|
2001
|
+
prd: prdName,
|
|
2002
|
+
workflow: link.workflow,
|
|
2003
|
+
fromPhase: currentPhase,
|
|
2004
|
+
triggeredBy: storyId
|
|
2005
|
+
});
|
|
2006
|
+
|
|
2007
|
+
return {
|
|
2008
|
+
success: true,
|
|
2009
|
+
advanced: true,
|
|
2010
|
+
advanceResult,
|
|
2011
|
+
phaseCompleted: storyMapping.phaseName,
|
|
2012
|
+
message: `Story completed. Phase "${storyMapping.phaseName}" complete - workflow advanced.`,
|
|
2013
|
+
progress: {
|
|
2014
|
+
completed: link.completedStories,
|
|
2015
|
+
total: link.totalStories,
|
|
2016
|
+
percent: Math.round((link.completedStories / link.totalStories) * 100),
|
|
2017
|
+
phaseProgress: link.phaseProgress
|
|
2018
|
+
}
|
|
2019
|
+
};
|
|
2020
|
+
}
|
|
2021
|
+
|
|
2022
|
+
saveState(state);
|
|
2023
|
+
|
|
2024
|
+
emitWorkflowTelemetry('prd_story_completed', {
|
|
2025
|
+
prd: prdName,
|
|
2026
|
+
workflow: link.workflow,
|
|
2027
|
+
storyId,
|
|
2028
|
+
phase: storyMapping.phase,
|
|
2029
|
+
phaseName: storyMapping.phaseName,
|
|
2030
|
+
phaseComplete
|
|
2031
|
+
});
|
|
2032
|
+
|
|
2033
|
+
return {
|
|
2034
|
+
success: true,
|
|
2035
|
+
advanced: false,
|
|
2036
|
+
storyPhase: storyMapping.phaseName,
|
|
2037
|
+
phaseComplete,
|
|
2038
|
+
message: phaseComplete
|
|
2039
|
+
? `Story completed. Phase "${storyMapping.phaseName}" is now complete.`
|
|
2040
|
+
: 'Story completed.',
|
|
2041
|
+
progress: {
|
|
2042
|
+
completed: link.completedStories,
|
|
2043
|
+
total: link.totalStories,
|
|
2044
|
+
percent: Math.round((link.completedStories / link.totalStories) * 100),
|
|
2045
|
+
phaseProgress: link.phaseProgress
|
|
2046
|
+
}
|
|
2047
|
+
};
|
|
2048
|
+
}
|
|
2049
|
+
|
|
2050
|
+
/**
|
|
2051
|
+
* Get PRD-Workflow link status with detailed progress
|
|
2052
|
+
* @param {string} prdName - Name of the PRD
|
|
2053
|
+
* @returns {object|null} - Link status or null if not linked
|
|
2054
|
+
*/
|
|
2055
|
+
function getPrdWorkflowStatus(prdName) {
|
|
2056
|
+
const state = loadState();
|
|
2057
|
+
const link = state.prdWorkflowLinks?.[prdName];
|
|
2058
|
+
|
|
2059
|
+
if (!link) {
|
|
2060
|
+
return null;
|
|
2061
|
+
}
|
|
2062
|
+
|
|
2063
|
+
const workflow = WORKFLOWS[link.workflow];
|
|
2064
|
+
const currentPhase = state.workflowStep || 0;
|
|
2065
|
+
|
|
2066
|
+
return {
|
|
2067
|
+
prd: prdName,
|
|
2068
|
+
workflow: link.workflow,
|
|
2069
|
+
workflowName: workflow?.name,
|
|
2070
|
+
linkedAt: link.linkedAt,
|
|
2071
|
+
autoAdvance: link.autoAdvance !== false,
|
|
2072
|
+
currentWorkflowPhase: currentPhase,
|
|
2073
|
+
currentPhaseName: workflow?.phases[currentPhase]?.name,
|
|
2074
|
+
progress: {
|
|
2075
|
+
completed: link.completedStories || 0,
|
|
2076
|
+
total: link.totalStories || 0,
|
|
2077
|
+
percent: link.totalStories > 0
|
|
2078
|
+
? Math.round(((link.completedStories || 0) / link.totalStories) * 100)
|
|
2079
|
+
: 0
|
|
2080
|
+
},
|
|
2081
|
+
phaseProgress: link.phaseProgress,
|
|
2082
|
+
storyPhaseMap: link.storyPhaseMap,
|
|
2083
|
+
completedStoryIds: link.completedStoryIds || [],
|
|
2084
|
+
mappingDetails: link.mappingDetails
|
|
2085
|
+
};
|
|
2086
|
+
}
|
|
2087
|
+
|
|
2088
|
+
/**
|
|
2089
|
+
* Get stories for a specific workflow phase
|
|
2090
|
+
* @param {string} prdName - Name of the PRD
|
|
2091
|
+
* @param {number} phaseIndex - Index of the workflow phase
|
|
2092
|
+
* @returns {object|null} - Phase stories info or null
|
|
2093
|
+
*/
|
|
2094
|
+
function getPhaseStories(prdName, phaseIndex) {
|
|
2095
|
+
const state = loadState();
|
|
2096
|
+
const link = state.prdWorkflowLinks?.[prdName];
|
|
2097
|
+
|
|
2098
|
+
if (!link) {
|
|
2099
|
+
return null;
|
|
2100
|
+
}
|
|
2101
|
+
|
|
2102
|
+
const workflow = WORKFLOWS[link.workflow];
|
|
2103
|
+
if (!workflow || phaseIndex >= workflow.phases.length) {
|
|
2104
|
+
return null;
|
|
2105
|
+
}
|
|
2106
|
+
|
|
2107
|
+
const phase = workflow.phases[phaseIndex];
|
|
2108
|
+
const phaseProgress = link.phaseProgress?.[phaseIndex];
|
|
2109
|
+
|
|
2110
|
+
if (!phaseProgress) {
|
|
2111
|
+
return {
|
|
2112
|
+
phase: phaseIndex,
|
|
2113
|
+
phaseName: phase.name,
|
|
2114
|
+
agents: phase.agents,
|
|
2115
|
+
stories: [],
|
|
2116
|
+
progress: { total: 0, completed: 0, percent: 0 }
|
|
2117
|
+
};
|
|
2118
|
+
}
|
|
2119
|
+
|
|
2120
|
+
return {
|
|
2121
|
+
phase: phaseIndex,
|
|
2122
|
+
phaseName: phase.name,
|
|
2123
|
+
agents: phase.agents,
|
|
2124
|
+
duration: phase.duration,
|
|
2125
|
+
stories: phaseProgress.stories || [],
|
|
2126
|
+
progress: {
|
|
2127
|
+
total: phaseProgress.total,
|
|
2128
|
+
completed: phaseProgress.completed,
|
|
2129
|
+
percent: phaseProgress.percent
|
|
2130
|
+
}
|
|
2131
|
+
};
|
|
2132
|
+
}
|
|
2133
|
+
|
|
2134
|
+
/**
|
|
2135
|
+
* Manually reassign a story to a different workflow phase
|
|
2136
|
+
* @param {string} prdName - Name of the PRD
|
|
2137
|
+
* @param {string} storyId - ID of the story
|
|
2138
|
+
* @param {number} newPhaseIndex - New phase index to assign
|
|
2139
|
+
*/
|
|
2140
|
+
function reassignStoryToPhase(prdName, storyId, newPhaseIndex) {
|
|
2141
|
+
const state = loadState();
|
|
2142
|
+
const link = state.prdWorkflowLinks?.[prdName];
|
|
2143
|
+
|
|
2144
|
+
if (!link) {
|
|
2145
|
+
return { success: false, error: 'PRD not linked to a workflow' };
|
|
2146
|
+
}
|
|
2147
|
+
|
|
2148
|
+
const workflow = WORKFLOWS[link.workflow];
|
|
2149
|
+
if (!workflow) {
|
|
2150
|
+
return { success: false, error: 'Linked workflow not found' };
|
|
2151
|
+
}
|
|
2152
|
+
|
|
2153
|
+
if (newPhaseIndex < 0 || newPhaseIndex >= workflow.phases.length) {
|
|
2154
|
+
return { success: false, error: 'Invalid phase index' };
|
|
2155
|
+
}
|
|
2156
|
+
|
|
2157
|
+
const storyMapping = link.storyPhaseMap[storyId];
|
|
2158
|
+
if (!storyMapping) {
|
|
2159
|
+
return { success: false, error: 'Story not found in PRD' };
|
|
2160
|
+
}
|
|
2161
|
+
|
|
2162
|
+
const oldPhase = storyMapping.phase;
|
|
2163
|
+
const oldPhaseName = storyMapping.phaseName;
|
|
2164
|
+
|
|
2165
|
+
// Update mapping
|
|
2166
|
+
storyMapping.phase = newPhaseIndex;
|
|
2167
|
+
storyMapping.phaseName = workflow.phases[newPhaseIndex].name;
|
|
2168
|
+
storyMapping.confidence = 'manual';
|
|
2169
|
+
storyMapping.reassignedAt = new Date().toISOString();
|
|
2170
|
+
|
|
2171
|
+
// Rebuild phase progress
|
|
2172
|
+
workflow.phases.forEach((phase, idx) => {
|
|
2173
|
+
const storiesInPhase = Object.entries(link.storyPhaseMap)
|
|
2174
|
+
.filter(([, m]) => m.phase === idx);
|
|
2175
|
+
const completedInPhase = storiesInPhase.filter(([, m]) => m.status === 'complete');
|
|
2176
|
+
|
|
2177
|
+
link.phaseProgress[idx] = {
|
|
2178
|
+
phaseName: phase.name,
|
|
2179
|
+
total: storiesInPhase.length,
|
|
2180
|
+
completed: completedInPhase.length,
|
|
2181
|
+
percent: storiesInPhase.length > 0
|
|
2182
|
+
? Math.round((completedInPhase.length / storiesInPhase.length) * 100)
|
|
2183
|
+
: 0,
|
|
2184
|
+
stories: storiesInPhase.map(([id, mapping]) => ({
|
|
2185
|
+
id,
|
|
2186
|
+
title: mapping.storyTitle,
|
|
2187
|
+
status: mapping.status,
|
|
2188
|
+
confidence: mapping.confidence
|
|
2189
|
+
}))
|
|
2190
|
+
};
|
|
2191
|
+
});
|
|
2192
|
+
|
|
2193
|
+
state.history.push({
|
|
2194
|
+
action: 'prd_story_reassigned',
|
|
2195
|
+
prd: prdName,
|
|
2196
|
+
storyId,
|
|
2197
|
+
fromPhase: oldPhaseName,
|
|
2198
|
+
toPhase: storyMapping.phaseName,
|
|
2199
|
+
timestamp: new Date().toISOString()
|
|
2200
|
+
});
|
|
2201
|
+
|
|
2202
|
+
saveState(state);
|
|
2203
|
+
|
|
2204
|
+
return {
|
|
2205
|
+
success: true,
|
|
2206
|
+
storyId,
|
|
2207
|
+
fromPhase: { index: oldPhase, name: oldPhaseName },
|
|
2208
|
+
toPhase: { index: newPhaseIndex, name: storyMapping.phaseName },
|
|
2209
|
+
message: `Story reassigned from "${oldPhaseName}" to "${storyMapping.phaseName}"`
|
|
2210
|
+
};
|
|
2211
|
+
}
|
|
2212
|
+
|
|
2213
|
+
/**
|
|
2214
|
+
* Unlink a PRD from its workflow
|
|
2215
|
+
* @param {string} prdName - Name of the PRD
|
|
2216
|
+
*/
|
|
2217
|
+
function unlinkPrdFromWorkflow(prdName) {
|
|
2218
|
+
const state = loadState();
|
|
2219
|
+
|
|
2220
|
+
if (!state.prdWorkflowLinks?.[prdName]) {
|
|
2221
|
+
return { success: false, error: 'PRD not linked to a workflow' };
|
|
2222
|
+
}
|
|
2223
|
+
|
|
2224
|
+
const link = state.prdWorkflowLinks[prdName];
|
|
2225
|
+
delete state.prdWorkflowLinks[prdName];
|
|
2226
|
+
|
|
2227
|
+
state.history.push({
|
|
2228
|
+
action: 'prd_unlinked_from_workflow',
|
|
2229
|
+
prd: prdName,
|
|
2230
|
+
workflow: link.workflow,
|
|
2231
|
+
timestamp: new Date().toISOString()
|
|
2232
|
+
});
|
|
2233
|
+
|
|
2234
|
+
saveState(state);
|
|
2235
|
+
|
|
2236
|
+
return {
|
|
2237
|
+
success: true,
|
|
2238
|
+
prd: prdName,
|
|
2239
|
+
workflow: link.workflow,
|
|
2240
|
+
message: `PRD "${prdName}" unlinked from workflow "${link.workflow}"`
|
|
2241
|
+
};
|
|
2242
|
+
}
|
|
2243
|
+
|
|
2244
|
+
/**
|
|
2245
|
+
* Get all PRDs linked to workflows
|
|
2246
|
+
*/
|
|
2247
|
+
function getLinkedPrds() {
|
|
2248
|
+
const state = loadState();
|
|
2249
|
+
const links = state.prdWorkflowLinks || {};
|
|
2250
|
+
|
|
2251
|
+
return Object.entries(links).map(([prdName, link]) => ({
|
|
2252
|
+
prd: prdName,
|
|
2253
|
+
workflow: link.workflow,
|
|
2254
|
+
linkedAt: link.linkedAt,
|
|
2255
|
+
progress: {
|
|
2256
|
+
completed: link.completedStories || 0,
|
|
2257
|
+
total: link.totalStories || 0,
|
|
2258
|
+
percent: link.totalStories > 0
|
|
2259
|
+
? Math.round(((link.completedStories || 0) / link.totalStories) * 100)
|
|
2260
|
+
: 0
|
|
2261
|
+
}
|
|
2262
|
+
}));
|
|
2263
|
+
}
|
|
2264
|
+
|
|
2265
|
+
/**
|
|
2266
|
+
* Parallel Phase Execution
|
|
2267
|
+
* Enables concurrent execution of workflow phases marked with `parallel: true`
|
|
2268
|
+
*
|
|
2269
|
+
* Features:
|
|
2270
|
+
* - Concurrent execution of parallel phases
|
|
2271
|
+
* - Partial failure handling with configurable strategies
|
|
2272
|
+
* - Individual phase progress tracking within parallel groups
|
|
2273
|
+
* - Failure recovery and retry support
|
|
2274
|
+
*/
|
|
2275
|
+
|
|
2276
|
+
/**
|
|
2277
|
+
* Failure strategy options for parallel execution
|
|
2278
|
+
* - 'fail-fast': Stop all phases on first failure (default)
|
|
2279
|
+
* - 'continue': Continue other phases, collect failures at end
|
|
2280
|
+
* - 'retry': Allow retry of failed phases while others continue
|
|
2281
|
+
*/
|
|
2282
|
+
const PARALLEL_FAILURE_STRATEGIES = {
|
|
2283
|
+
'fail-fast': 'Stop all phases when one fails',
|
|
2284
|
+
'continue': 'Continue other phases, handle failures at completion',
|
|
2285
|
+
'retry': 'Allow retry of failed phases'
|
|
2286
|
+
};
|
|
2287
|
+
|
|
2288
|
+
/**
|
|
2289
|
+
* Start parallel phase execution
|
|
2290
|
+
* Identifies consecutive parallel phases and starts them together
|
|
2291
|
+
* @param {string} workflowName - Workflow to execute
|
|
2292
|
+
* @param {number} startIndex - Starting phase index
|
|
2293
|
+
* @param {object} options - Execution options
|
|
2294
|
+
* @param {string} options.failureStrategy - How to handle failures ('fail-fast', 'continue', 'retry')
|
|
2295
|
+
* @param {number} options.maxRetries - Maximum retry attempts for 'retry' strategy (default: 2)
|
|
2296
|
+
*/
|
|
2297
|
+
function startParallelPhases(workflowName, startIndex = 0, options = {}) {
|
|
2298
|
+
const workflow = WORKFLOWS[workflowName];
|
|
2299
|
+
if (!workflow) {
|
|
2300
|
+
return { success: false, error: `Unknown workflow: ${workflowName}` };
|
|
2301
|
+
}
|
|
2302
|
+
|
|
2303
|
+
const state = loadState();
|
|
2304
|
+
const failureStrategy = options.failureStrategy || 'continue';
|
|
2305
|
+
const maxRetries = options.maxRetries || 2;
|
|
2306
|
+
|
|
2307
|
+
// Validate failure strategy
|
|
2308
|
+
if (!PARALLEL_FAILURE_STRATEGIES[failureStrategy]) {
|
|
2309
|
+
return {
|
|
2310
|
+
success: false,
|
|
2311
|
+
error: `Invalid failure strategy: ${failureStrategy}. Valid options: ${Object.keys(PARALLEL_FAILURE_STRATEGIES).join(', ')}`
|
|
2312
|
+
};
|
|
2313
|
+
}
|
|
2314
|
+
|
|
2315
|
+
// Find consecutive parallel phases starting from startIndex
|
|
2316
|
+
const parallelPhases = [];
|
|
2317
|
+
let i = startIndex;
|
|
2318
|
+
|
|
2319
|
+
while (i < workflow.phases.length && workflow.phases[i].parallel) {
|
|
2320
|
+
parallelPhases.push({
|
|
2321
|
+
index: i,
|
|
2322
|
+
phase: workflow.phases[i],
|
|
2323
|
+
status: 'in_progress',
|
|
2324
|
+
progress: {
|
|
2325
|
+
percent: 0,
|
|
2326
|
+
currentTask: null,
|
|
2327
|
+
tasksCompleted: 0,
|
|
2328
|
+
tasksTotal: 0,
|
|
2329
|
+
lastUpdate: new Date().toISOString()
|
|
2330
|
+
},
|
|
2331
|
+
startedAt: new Date().toISOString(),
|
|
2332
|
+
retryCount: 0
|
|
2333
|
+
});
|
|
2334
|
+
i++;
|
|
2335
|
+
}
|
|
2336
|
+
|
|
2337
|
+
// If no parallel phases, also check if current phase should be included alone
|
|
2338
|
+
if (parallelPhases.length === 0 && startIndex < workflow.phases.length) {
|
|
2339
|
+
return { success: false, isSequential: true, message: 'Phase is sequential, not parallel' };
|
|
2340
|
+
}
|
|
2341
|
+
|
|
2342
|
+
// Initialize parallel execution state
|
|
2343
|
+
state.parallelExecution = {
|
|
2344
|
+
workflow: workflowName,
|
|
2345
|
+
phases: parallelPhases,
|
|
2346
|
+
startedAt: new Date().toISOString(),
|
|
2347
|
+
completedPhases: [],
|
|
2348
|
+
failedPhases: [],
|
|
2349
|
+
failureStrategy,
|
|
2350
|
+
maxRetries,
|
|
2351
|
+
halted: false,
|
|
2352
|
+
haltReason: null
|
|
2353
|
+
};
|
|
2354
|
+
|
|
2355
|
+
state.history.push({
|
|
2356
|
+
action: 'parallel_phases_started',
|
|
2357
|
+
workflow: workflowName,
|
|
2358
|
+
phases: parallelPhases.map(p => p.phase.name),
|
|
2359
|
+
failureStrategy,
|
|
2360
|
+
timestamp: new Date().toISOString()
|
|
2361
|
+
});
|
|
2362
|
+
|
|
2363
|
+
saveState(state);
|
|
2364
|
+
|
|
2365
|
+
emitWorkflowTelemetry('parallel_phases_started', {
|
|
2366
|
+
workflow: workflowName,
|
|
2367
|
+
phaseCount: parallelPhases.length,
|
|
2368
|
+
phases: parallelPhases.map(p => p.phase.name),
|
|
2369
|
+
failureStrategy
|
|
2370
|
+
});
|
|
2371
|
+
|
|
2372
|
+
return {
|
|
2373
|
+
success: true,
|
|
2374
|
+
workflow: workflowName,
|
|
2375
|
+
failureStrategy,
|
|
2376
|
+
parallelPhases: parallelPhases.map(p => ({
|
|
2377
|
+
index: p.index,
|
|
2378
|
+
name: p.phase.name,
|
|
2379
|
+
agents: p.phase.agents,
|
|
2380
|
+
duration: p.phase.duration,
|
|
2381
|
+
progress: p.progress
|
|
2382
|
+
})),
|
|
2383
|
+
message: `Started ${parallelPhases.length} parallel phases with '${failureStrategy}' failure strategy`
|
|
2384
|
+
};
|
|
2385
|
+
}
|
|
2386
|
+
|
|
2387
|
+
/**
|
|
2388
|
+
* Mark a parallel phase as complete
|
|
2389
|
+
* @param {number} phaseIndex - Index of the completed phase
|
|
2390
|
+
* @param {object} result - Optional result/output from the phase
|
|
2391
|
+
*/
|
|
2392
|
+
function completeParallelPhase(phaseIndex, result = null) {
|
|
2393
|
+
const state = loadState();
|
|
2394
|
+
|
|
2395
|
+
if (!state.parallelExecution) {
|
|
2396
|
+
return { success: false, error: 'No parallel execution in progress' };
|
|
2397
|
+
}
|
|
2398
|
+
|
|
2399
|
+
const pe = state.parallelExecution;
|
|
2400
|
+
const workflow = WORKFLOWS[pe.workflow];
|
|
2401
|
+
if (!workflow) {
|
|
2402
|
+
return { success: false, error: 'Workflow not found' };
|
|
2403
|
+
}
|
|
2404
|
+
|
|
2405
|
+
// Check if execution is halted
|
|
2406
|
+
if (pe.halted) {
|
|
2407
|
+
return {
|
|
2408
|
+
success: false,
|
|
2409
|
+
error: 'Parallel execution is halted due to failure',
|
|
2410
|
+
haltReason: pe.haltReason
|
|
2411
|
+
};
|
|
2412
|
+
}
|
|
2413
|
+
|
|
2414
|
+
// Find the phase in parallel execution
|
|
2415
|
+
const phaseEntry = pe.phases.find(p => p.index === phaseIndex);
|
|
2416
|
+
if (!phaseEntry) {
|
|
2417
|
+
return { success: false, error: 'Phase not in parallel execution' };
|
|
2418
|
+
}
|
|
2419
|
+
|
|
2420
|
+
if (phaseEntry.status === 'completed') {
|
|
2421
|
+
return { success: false, error: 'Phase already completed' };
|
|
2422
|
+
}
|
|
2423
|
+
|
|
2424
|
+
if (phaseEntry.status === 'failed' && pe.failureStrategy !== 'retry') {
|
|
2425
|
+
return { success: false, error: 'Phase has failed. Use retryParallelPhase() to retry.' };
|
|
2426
|
+
}
|
|
2427
|
+
|
|
2428
|
+
// Mark phase as complete
|
|
2429
|
+
phaseEntry.status = 'completed';
|
|
2430
|
+
phaseEntry.completedAt = new Date().toISOString();
|
|
2431
|
+
phaseEntry.result = result;
|
|
2432
|
+
phaseEntry.progress = {
|
|
2433
|
+
percent: 100,
|
|
2434
|
+
currentTask: null,
|
|
2435
|
+
tasksCompleted: phaseEntry.progress?.tasksTotal || 0,
|
|
2436
|
+
tasksTotal: phaseEntry.progress?.tasksTotal || 0,
|
|
2437
|
+
lastUpdate: new Date().toISOString()
|
|
2438
|
+
};
|
|
2439
|
+
|
|
2440
|
+
// Remove from failed if it was retried
|
|
2441
|
+
pe.failedPhases = pe.failedPhases.filter(idx => idx !== phaseIndex);
|
|
2442
|
+
pe.completedPhases.push(phaseIndex);
|
|
2443
|
+
|
|
2444
|
+
state.history.push({
|
|
2445
|
+
action: 'parallel_phase_completed',
|
|
2446
|
+
workflow: pe.workflow,
|
|
2447
|
+
phase: phaseEntry.phase.name,
|
|
2448
|
+
phaseIndex,
|
|
2449
|
+
timestamp: new Date().toISOString()
|
|
2450
|
+
});
|
|
2451
|
+
|
|
2452
|
+
// Check if all parallel phases are complete or resolved
|
|
2453
|
+
const activePhases = pe.phases.filter(p => p.status === 'in_progress');
|
|
2454
|
+
const failedPhases = pe.phases.filter(p => p.status === 'failed');
|
|
2455
|
+
const allResolved = activePhases.length === 0;
|
|
2456
|
+
|
|
2457
|
+
if (allResolved) {
|
|
2458
|
+
return finalizeParallelExecution(state, pe, workflow, failedPhases);
|
|
2459
|
+
}
|
|
2460
|
+
|
|
2461
|
+
saveState(state);
|
|
2462
|
+
|
|
2463
|
+
const remaining = activePhases.length;
|
|
2464
|
+
return {
|
|
2465
|
+
success: true,
|
|
2466
|
+
phaseComplete: true,
|
|
2467
|
+
allParallelComplete: false,
|
|
2468
|
+
remaining,
|
|
2469
|
+
failedCount: failedPhases.length,
|
|
2470
|
+
completedPhases: pe.completedPhases.length,
|
|
2471
|
+
totalPhases: pe.phases.length,
|
|
2472
|
+
message: `Phase completed. ${remaining} parallel phases remaining${failedPhases.length > 0 ? `, ${failedPhases.length} failed` : ''}.`
|
|
2473
|
+
};
|
|
2474
|
+
}
|
|
2475
|
+
|
|
2476
|
+
/**
|
|
2477
|
+
* Mark a parallel phase as failed
|
|
2478
|
+
* @param {number} phaseIndex - Index of the failed phase
|
|
2479
|
+
* @param {object} error - Error details
|
|
2480
|
+
* @param {string} error.message - Error message
|
|
2481
|
+
* @param {string} error.type - Error type (e.g., 'testing', 'security', 'timeout')
|
|
2482
|
+
* @param {object} error.details - Additional error details
|
|
2483
|
+
*/
|
|
2484
|
+
function failParallelPhase(phaseIndex, error = {}) {
|
|
2485
|
+
const state = loadState();
|
|
2486
|
+
|
|
2487
|
+
if (!state.parallelExecution) {
|
|
2488
|
+
return { success: false, error: 'No parallel execution in progress' };
|
|
2489
|
+
}
|
|
2490
|
+
|
|
2491
|
+
const pe = state.parallelExecution;
|
|
2492
|
+
const workflow = WORKFLOWS[pe.workflow];
|
|
2493
|
+
if (!workflow) {
|
|
2494
|
+
return { success: false, error: 'Workflow not found' };
|
|
2495
|
+
}
|
|
2496
|
+
|
|
2497
|
+
// Find the phase in parallel execution
|
|
2498
|
+
const phaseEntry = pe.phases.find(p => p.index === phaseIndex);
|
|
2499
|
+
if (!phaseEntry) {
|
|
2500
|
+
return { success: false, error: 'Phase not in parallel execution' };
|
|
2501
|
+
}
|
|
2502
|
+
|
|
2503
|
+
if (phaseEntry.status === 'completed') {
|
|
2504
|
+
return { success: false, error: 'Cannot fail a completed phase' };
|
|
2505
|
+
}
|
|
2506
|
+
|
|
2507
|
+
if (phaseEntry.status === 'failed') {
|
|
2508
|
+
return { success: false, error: 'Phase already marked as failed' };
|
|
2509
|
+
}
|
|
2510
|
+
|
|
2511
|
+
// Mark phase as failed
|
|
2512
|
+
phaseEntry.status = 'failed';
|
|
2513
|
+
phaseEntry.failedAt = new Date().toISOString();
|
|
2514
|
+
phaseEntry.error = {
|
|
2515
|
+
message: error.message || 'Phase failed',
|
|
2516
|
+
type: error.type || 'unknown',
|
|
2517
|
+
details: error.details || {}
|
|
2518
|
+
};
|
|
2519
|
+
|
|
2520
|
+
pe.failedPhases.push(phaseIndex);
|
|
2521
|
+
|
|
2522
|
+
state.history.push({
|
|
2523
|
+
action: 'parallel_phase_failed',
|
|
2524
|
+
workflow: pe.workflow,
|
|
2525
|
+
phase: phaseEntry.phase.name,
|
|
2526
|
+
phaseIndex,
|
|
2527
|
+
error: phaseEntry.error,
|
|
2528
|
+
timestamp: new Date().toISOString()
|
|
2529
|
+
});
|
|
2530
|
+
|
|
2531
|
+
emitWorkflowTelemetry('parallel_phase_failed', {
|
|
2532
|
+
workflow: pe.workflow,
|
|
2533
|
+
phase: phaseEntry.phase.name,
|
|
2534
|
+
phaseIndex,
|
|
2535
|
+
errorType: phaseEntry.error.type,
|
|
2536
|
+
failureStrategy: pe.failureStrategy
|
|
2537
|
+
});
|
|
2538
|
+
|
|
2539
|
+
// Handle based on failure strategy
|
|
2540
|
+
if (pe.failureStrategy === 'fail-fast') {
|
|
2541
|
+
// Halt all phases
|
|
2542
|
+
pe.halted = true;
|
|
2543
|
+
pe.haltReason = `Phase "${phaseEntry.phase.name}" failed: ${phaseEntry.error.message}`;
|
|
2544
|
+
|
|
2545
|
+
state.history.push({
|
|
2546
|
+
action: 'parallel_execution_halted',
|
|
2547
|
+
workflow: pe.workflow,
|
|
2548
|
+
reason: pe.haltReason,
|
|
2549
|
+
failedPhase: phaseEntry.phase.name,
|
|
2550
|
+
timestamp: new Date().toISOString()
|
|
2551
|
+
});
|
|
2552
|
+
|
|
2553
|
+
saveState(state);
|
|
2554
|
+
|
|
2555
|
+
return {
|
|
2556
|
+
success: true,
|
|
2557
|
+
phaseFailed: true,
|
|
2558
|
+
halted: true,
|
|
2559
|
+
haltReason: pe.haltReason,
|
|
2560
|
+
failedPhases: pe.failedPhases.map(idx => {
|
|
2561
|
+
const p = pe.phases.find(ph => ph.index === idx);
|
|
2562
|
+
return { index: idx, name: p?.phase.name, error: p?.error };
|
|
2563
|
+
}),
|
|
2564
|
+
message: `Parallel execution halted: ${pe.haltReason}`
|
|
2565
|
+
};
|
|
2566
|
+
}
|
|
2567
|
+
|
|
2568
|
+
// For 'continue' and 'retry' strategies
|
|
2569
|
+
const activePhases = pe.phases.filter(p => p.status === 'in_progress');
|
|
2570
|
+
const allResolved = activePhases.length === 0;
|
|
2571
|
+
|
|
2572
|
+
if (allResolved) {
|
|
2573
|
+
return finalizeParallelExecution(state, pe, workflow, pe.phases.filter(p => p.status === 'failed'));
|
|
2574
|
+
}
|
|
2575
|
+
|
|
2576
|
+
saveState(state);
|
|
2577
|
+
|
|
2578
|
+
const canRetry = pe.failureStrategy === 'retry' && phaseEntry.retryCount < pe.maxRetries;
|
|
2579
|
+
|
|
2580
|
+
return {
|
|
2581
|
+
success: true,
|
|
2582
|
+
phaseFailed: true,
|
|
2583
|
+
halted: false,
|
|
2584
|
+
canRetry,
|
|
2585
|
+
retriesRemaining: canRetry ? pe.maxRetries - phaseEntry.retryCount : 0,
|
|
2586
|
+
activePhases: activePhases.length,
|
|
2587
|
+
failedPhases: pe.failedPhases.length,
|
|
2588
|
+
message: canRetry
|
|
2589
|
+
? `Phase failed but can be retried (${pe.maxRetries - phaseEntry.retryCount} retries remaining). Other phases continue.`
|
|
2590
|
+
: 'Phase failed. Other phases continue.'
|
|
2591
|
+
};
|
|
2592
|
+
}
|
|
2593
|
+
|
|
2594
|
+
/**
|
|
2595
|
+
* Retry a failed parallel phase
|
|
2596
|
+
* @param {number} phaseIndex - Index of the phase to retry
|
|
2597
|
+
*/
|
|
2598
|
+
function retryParallelPhase(phaseIndex) {
|
|
2599
|
+
const state = loadState();
|
|
2600
|
+
|
|
2601
|
+
if (!state.parallelExecution) {
|
|
2602
|
+
return { success: false, error: 'No parallel execution in progress' };
|
|
2603
|
+
}
|
|
2604
|
+
|
|
2605
|
+
const pe = state.parallelExecution;
|
|
2606
|
+
const workflow = WORKFLOWS[pe.workflow];
|
|
2607
|
+
if (!workflow) {
|
|
2608
|
+
return { success: false, error: 'Workflow not found' };
|
|
2609
|
+
}
|
|
2610
|
+
|
|
2611
|
+
if (pe.failureStrategy !== 'retry') {
|
|
2612
|
+
return { success: false, error: 'Retry not enabled. Use "retry" failure strategy.' };
|
|
2613
|
+
}
|
|
2614
|
+
|
|
2615
|
+
const phaseEntry = pe.phases.find(p => p.index === phaseIndex);
|
|
2616
|
+
if (!phaseEntry) {
|
|
2617
|
+
return { success: false, error: 'Phase not in parallel execution' };
|
|
2618
|
+
}
|
|
2619
|
+
|
|
2620
|
+
if (phaseEntry.status !== 'failed') {
|
|
2621
|
+
return { success: false, error: 'Phase is not in failed state' };
|
|
2622
|
+
}
|
|
2623
|
+
|
|
2624
|
+
if (phaseEntry.retryCount >= pe.maxRetries) {
|
|
2625
|
+
return {
|
|
2626
|
+
success: false,
|
|
2627
|
+
error: `Maximum retries (${pe.maxRetries}) exceeded for this phase`,
|
|
2628
|
+
retryCount: phaseEntry.retryCount
|
|
2629
|
+
};
|
|
2630
|
+
}
|
|
2631
|
+
|
|
2632
|
+
// Reset phase for retry
|
|
2633
|
+
phaseEntry.status = 'in_progress';
|
|
2634
|
+
phaseEntry.retryCount += 1;
|
|
2635
|
+
phaseEntry.lastRetryAt = new Date().toISOString();
|
|
2636
|
+
phaseEntry.progress = {
|
|
2637
|
+
percent: 0,
|
|
2638
|
+
currentTask: null,
|
|
2639
|
+
tasksCompleted: 0,
|
|
2640
|
+
tasksTotal: phaseEntry.progress?.tasksTotal || 0,
|
|
2641
|
+
lastUpdate: new Date().toISOString()
|
|
2642
|
+
};
|
|
2643
|
+
|
|
2644
|
+
// Remove from failed phases list
|
|
2645
|
+
pe.failedPhases = pe.failedPhases.filter(idx => idx !== phaseIndex);
|
|
2646
|
+
|
|
2647
|
+
// Clear halt if it was halted
|
|
2648
|
+
if (pe.halted) {
|
|
2649
|
+
pe.halted = false;
|
|
2650
|
+
pe.haltReason = null;
|
|
2651
|
+
}
|
|
2652
|
+
|
|
2653
|
+
state.history.push({
|
|
2654
|
+
action: 'parallel_phase_retried',
|
|
2655
|
+
workflow: pe.workflow,
|
|
2656
|
+
phase: phaseEntry.phase.name,
|
|
2657
|
+
phaseIndex,
|
|
2658
|
+
retryCount: phaseEntry.retryCount,
|
|
2659
|
+
timestamp: new Date().toISOString()
|
|
2660
|
+
});
|
|
2661
|
+
|
|
2662
|
+
saveState(state);
|
|
2663
|
+
|
|
2664
|
+
emitWorkflowTelemetry('parallel_phase_retried', {
|
|
2665
|
+
workflow: pe.workflow,
|
|
2666
|
+
phase: phaseEntry.phase.name,
|
|
2667
|
+
phaseIndex,
|
|
2668
|
+
retryCount: phaseEntry.retryCount
|
|
2669
|
+
});
|
|
2670
|
+
|
|
2671
|
+
return {
|
|
2672
|
+
success: true,
|
|
2673
|
+
phase: phaseEntry.phase.name,
|
|
2674
|
+
retryCount: phaseEntry.retryCount,
|
|
2675
|
+
retriesRemaining: pe.maxRetries - phaseEntry.retryCount,
|
|
2676
|
+
message: `Phase "${phaseEntry.phase.name}" restarted (retry ${phaseEntry.retryCount}/${pe.maxRetries})`
|
|
2677
|
+
};
|
|
2678
|
+
}
|
|
2679
|
+
|
|
2680
|
+
/**
|
|
2681
|
+
* Update progress for a parallel phase
|
|
2682
|
+
* @param {number} phaseIndex - Index of the phase
|
|
2683
|
+
* @param {object} progress - Progress update
|
|
2684
|
+
* @param {number} progress.percent - Completion percentage (0-100)
|
|
2685
|
+
* @param {string} progress.currentTask - Current task description
|
|
2686
|
+
* @param {number} progress.tasksCompleted - Number of tasks completed
|
|
2687
|
+
* @param {number} progress.tasksTotal - Total number of tasks
|
|
2688
|
+
*/
|
|
2689
|
+
function updateParallelPhaseProgress(phaseIndex, progress = {}) {
|
|
2690
|
+
const state = loadState();
|
|
2691
|
+
|
|
2692
|
+
if (!state.parallelExecution) {
|
|
2693
|
+
return { success: false, error: 'No parallel execution in progress' };
|
|
2694
|
+
}
|
|
2695
|
+
|
|
2696
|
+
const pe = state.parallelExecution;
|
|
2697
|
+
const phaseEntry = pe.phases.find(p => p.index === phaseIndex);
|
|
2698
|
+
|
|
2699
|
+
if (!phaseEntry) {
|
|
2700
|
+
return { success: false, error: 'Phase not in parallel execution' };
|
|
2701
|
+
}
|
|
2702
|
+
|
|
2703
|
+
if (phaseEntry.status !== 'in_progress') {
|
|
2704
|
+
return { success: false, error: `Cannot update progress for phase in ${phaseEntry.status} state` };
|
|
2705
|
+
}
|
|
2706
|
+
|
|
2707
|
+
// Update progress
|
|
2708
|
+
phaseEntry.progress = {
|
|
2709
|
+
percent: Math.min(100, Math.max(0, progress.percent ?? phaseEntry.progress?.percent ?? 0)),
|
|
2710
|
+
currentTask: progress.currentTask ?? phaseEntry.progress?.currentTask,
|
|
2711
|
+
tasksCompleted: progress.tasksCompleted ?? phaseEntry.progress?.tasksCompleted ?? 0,
|
|
2712
|
+
tasksTotal: progress.tasksTotal ?? phaseEntry.progress?.tasksTotal ?? 0,
|
|
2713
|
+
lastUpdate: new Date().toISOString()
|
|
2714
|
+
};
|
|
2715
|
+
|
|
2716
|
+
saveState(state);
|
|
2717
|
+
|
|
2718
|
+
return {
|
|
2719
|
+
success: true,
|
|
2720
|
+
phaseIndex,
|
|
2721
|
+
phaseName: phaseEntry.phase.name,
|
|
2722
|
+
progress: phaseEntry.progress
|
|
2723
|
+
};
|
|
2724
|
+
}
|
|
2725
|
+
|
|
2726
|
+
/**
|
|
2727
|
+
* Finalize parallel execution when all phases are resolved
|
|
2728
|
+
* @private
|
|
2729
|
+
*/
|
|
2730
|
+
function finalizeParallelExecution(state, pe, workflow, failedPhases) {
|
|
2731
|
+
const hasFailures = failedPhases.length > 0;
|
|
2732
|
+
const completedPhases = pe.phases.filter(p => p.status === 'completed');
|
|
2733
|
+
|
|
2734
|
+
if (hasFailures) {
|
|
2735
|
+
// Parallel execution completed with failures
|
|
2736
|
+
state.history.push({
|
|
2737
|
+
action: 'parallel_execution_completed_with_failures',
|
|
2738
|
+
workflow: pe.workflow,
|
|
2739
|
+
completedPhases: completedPhases.map(p => p.phase.name),
|
|
2740
|
+
failedPhases: failedPhases.map(p => p.phase.name),
|
|
2741
|
+
timestamp: new Date().toISOString()
|
|
2742
|
+
});
|
|
2743
|
+
|
|
2744
|
+
emitWorkflowTelemetry('parallel_phases_completed_with_failures', {
|
|
2745
|
+
workflow: pe.workflow,
|
|
2746
|
+
completedCount: completedPhases.length,
|
|
2747
|
+
failedCount: failedPhases.length,
|
|
2748
|
+
phases: pe.phases.map(p => ({ name: p.phase.name, status: p.status }))
|
|
2749
|
+
});
|
|
2750
|
+
|
|
2751
|
+
// Keep parallel execution state for potential recovery
|
|
2752
|
+
pe.resolved = true;
|
|
2753
|
+
pe.resolvedAt = new Date().toISOString();
|
|
2754
|
+
saveState(state);
|
|
2755
|
+
|
|
2756
|
+
return {
|
|
2757
|
+
success: true,
|
|
2758
|
+
allParallelComplete: true,
|
|
2759
|
+
hasFailures: true,
|
|
2760
|
+
completedPhases: completedPhases.map(p => ({
|
|
2761
|
+
index: p.index,
|
|
2762
|
+
name: p.phase.name,
|
|
2763
|
+
result: p.result
|
|
2764
|
+
})),
|
|
2765
|
+
failedPhases: failedPhases.map(p => ({
|
|
2766
|
+
index: p.index,
|
|
2767
|
+
name: p.phase.name,
|
|
2768
|
+
error: p.error
|
|
2769
|
+
})),
|
|
2770
|
+
message: `Parallel execution completed with ${failedPhases.length} failure(s). Review failures before proceeding.`,
|
|
2771
|
+
canProceed: false,
|
|
2772
|
+
recoveryOptions: ['retryFailed', 'skipFailed', 'cancelWorkflow']
|
|
2773
|
+
};
|
|
2774
|
+
}
|
|
2775
|
+
|
|
2776
|
+
// All phases completed successfully
|
|
2777
|
+
const maxPhaseIndex = Math.max(...pe.phases.map(p => p.index));
|
|
2778
|
+
state.workflowStep = maxPhaseIndex + 1;
|
|
2779
|
+
|
|
2780
|
+
state.history.push({
|
|
2781
|
+
action: 'parallel_execution_complete',
|
|
2782
|
+
workflow: pe.workflow,
|
|
2783
|
+
phases: pe.phases.map(p => p.phase.name),
|
|
2784
|
+
timestamp: new Date().toISOString()
|
|
2785
|
+
});
|
|
2786
|
+
|
|
2787
|
+
emitWorkflowTelemetry('parallel_phases_completed', {
|
|
2788
|
+
workflow: pe.workflow,
|
|
2789
|
+
phaseCount: pe.phases.length,
|
|
2790
|
+
phases: pe.phases.map(p => p.phase.name)
|
|
2791
|
+
});
|
|
2792
|
+
|
|
2793
|
+
// Check if workflow is now complete
|
|
2794
|
+
if (state.workflowStep >= workflow.phases.length) {
|
|
2795
|
+
state.activeWorkflow = null;
|
|
2796
|
+
state.parallelExecution = null;
|
|
2797
|
+
saveState(state);
|
|
2798
|
+
|
|
2799
|
+
return {
|
|
2800
|
+
success: true,
|
|
2801
|
+
phaseComplete: true,
|
|
2802
|
+
allParallelComplete: true,
|
|
2803
|
+
hasFailures: false,
|
|
2804
|
+
workflowComplete: true,
|
|
2805
|
+
message: 'All parallel phases complete. Workflow finished.'
|
|
2806
|
+
};
|
|
2807
|
+
}
|
|
2808
|
+
|
|
2809
|
+
// Clear parallel execution state
|
|
2810
|
+
state.parallelExecution = null;
|
|
2811
|
+
saveState(state);
|
|
2812
|
+
|
|
2813
|
+
return {
|
|
2814
|
+
success: true,
|
|
2815
|
+
phaseComplete: true,
|
|
2816
|
+
allParallelComplete: true,
|
|
2817
|
+
hasFailures: false,
|
|
2818
|
+
workflowComplete: false,
|
|
2819
|
+
nextPhase: workflow.phases[state.workflowStep],
|
|
2820
|
+
message: 'All parallel phases complete. Continuing to next phase.'
|
|
2821
|
+
};
|
|
2822
|
+
}
|
|
2823
|
+
|
|
2824
|
+
/**
|
|
2825
|
+
* Skip failed phases and proceed with workflow
|
|
2826
|
+
* Use after parallel execution completes with failures
|
|
2827
|
+
*/
|
|
2828
|
+
function skipFailedParallelPhases() {
|
|
2829
|
+
const state = loadState();
|
|
2830
|
+
|
|
2831
|
+
if (!state.parallelExecution) {
|
|
2832
|
+
return { success: false, error: 'No parallel execution in progress' };
|
|
2833
|
+
}
|
|
2834
|
+
|
|
2835
|
+
const pe = state.parallelExecution;
|
|
2836
|
+
if (!pe.resolved) {
|
|
2837
|
+
return { success: false, error: 'Parallel execution not yet resolved' };
|
|
2838
|
+
}
|
|
2839
|
+
|
|
2840
|
+
const workflow = WORKFLOWS[pe.workflow];
|
|
2841
|
+
if (!workflow) {
|
|
2842
|
+
return { success: false, error: 'Workflow not found' };
|
|
2843
|
+
}
|
|
2844
|
+
|
|
2845
|
+
const failedPhases = pe.phases.filter(p => p.status === 'failed');
|
|
2846
|
+
const skippedNames = failedPhases.map(p => p.phase.name);
|
|
2847
|
+
|
|
2848
|
+
// Move past all parallel phases
|
|
2849
|
+
const maxPhaseIndex = Math.max(...pe.phases.map(p => p.index));
|
|
2850
|
+
state.workflowStep = maxPhaseIndex + 1;
|
|
2851
|
+
|
|
2852
|
+
state.history.push({
|
|
2853
|
+
action: 'parallel_failed_phases_skipped',
|
|
2854
|
+
workflow: pe.workflow,
|
|
2855
|
+
skippedPhases: skippedNames,
|
|
2856
|
+
timestamp: new Date().toISOString()
|
|
2857
|
+
});
|
|
2858
|
+
|
|
2859
|
+
// Check if workflow is now complete
|
|
2860
|
+
if (state.workflowStep >= workflow.phases.length) {
|
|
2861
|
+
state.activeWorkflow = null;
|
|
2862
|
+
state.parallelExecution = null;
|
|
2863
|
+
saveState(state);
|
|
2864
|
+
|
|
2865
|
+
return {
|
|
2866
|
+
success: true,
|
|
2867
|
+
skippedPhases: skippedNames,
|
|
2868
|
+
workflowComplete: true,
|
|
2869
|
+
message: `Skipped ${skippedNames.length} failed phases. Workflow finished.`
|
|
2870
|
+
};
|
|
2871
|
+
}
|
|
2872
|
+
|
|
2873
|
+
state.parallelExecution = null;
|
|
2874
|
+
saveState(state);
|
|
2875
|
+
|
|
2876
|
+
return {
|
|
2877
|
+
success: true,
|
|
2878
|
+
skippedPhases: skippedNames,
|
|
2879
|
+
workflowComplete: false,
|
|
2880
|
+
nextPhase: workflow.phases[state.workflowStep],
|
|
2881
|
+
message: `Skipped ${skippedNames.length} failed phases. Continuing to next phase.`
|
|
2882
|
+
};
|
|
2883
|
+
}
|
|
2884
|
+
|
|
2885
|
+
/**
|
|
2886
|
+
* Retry all failed phases in parallel execution
|
|
2887
|
+
*/
|
|
2888
|
+
function retryAllFailedPhases() {
|
|
2889
|
+
const state = loadState();
|
|
2890
|
+
|
|
2891
|
+
if (!state.parallelExecution) {
|
|
2892
|
+
return { success: false, error: 'No parallel execution in progress' };
|
|
2893
|
+
}
|
|
2894
|
+
|
|
2895
|
+
const pe = state.parallelExecution;
|
|
2896
|
+
const failedPhases = pe.phases.filter(p => p.status === 'failed');
|
|
2897
|
+
|
|
2898
|
+
if (failedPhases.length === 0) {
|
|
2899
|
+
return { success: false, error: 'No failed phases to retry' };
|
|
2900
|
+
}
|
|
2901
|
+
|
|
2902
|
+
// Check if retries are allowed
|
|
2903
|
+
const canRetryAll = failedPhases.every(p => p.retryCount < pe.maxRetries);
|
|
2904
|
+
if (!canRetryAll && pe.failureStrategy === 'retry') {
|
|
2905
|
+
const exhausted = failedPhases.filter(p => p.retryCount >= pe.maxRetries);
|
|
2906
|
+
return {
|
|
2907
|
+
success: false,
|
|
2908
|
+
error: 'Some phases have exhausted retries',
|
|
2909
|
+
exhaustedPhases: exhausted.map(p => ({ name: p.phase.name, retryCount: p.retryCount }))
|
|
2910
|
+
};
|
|
2911
|
+
}
|
|
2912
|
+
|
|
2913
|
+
const retriedPhases = [];
|
|
2914
|
+
|
|
2915
|
+
for (const phaseEntry of failedPhases) {
|
|
2916
|
+
phaseEntry.status = 'in_progress';
|
|
2917
|
+
phaseEntry.retryCount += 1;
|
|
2918
|
+
phaseEntry.lastRetryAt = new Date().toISOString();
|
|
2919
|
+
phaseEntry.progress = {
|
|
2920
|
+
percent: 0,
|
|
2921
|
+
currentTask: null,
|
|
2922
|
+
tasksCompleted: 0,
|
|
2923
|
+
tasksTotal: phaseEntry.progress?.tasksTotal || 0,
|
|
2924
|
+
lastUpdate: new Date().toISOString()
|
|
2925
|
+
};
|
|
2926
|
+
retriedPhases.push(phaseEntry.phase.name);
|
|
2927
|
+
}
|
|
2928
|
+
|
|
2929
|
+
pe.failedPhases = [];
|
|
2930
|
+
pe.resolved = false;
|
|
2931
|
+
pe.halted = false;
|
|
2932
|
+
pe.haltReason = null;
|
|
2933
|
+
|
|
2934
|
+
state.history.push({
|
|
2935
|
+
action: 'parallel_all_failed_retried',
|
|
2936
|
+
workflow: pe.workflow,
|
|
2937
|
+
retriedPhases,
|
|
2938
|
+
timestamp: new Date().toISOString()
|
|
2939
|
+
});
|
|
2940
|
+
|
|
2941
|
+
saveState(state);
|
|
2942
|
+
|
|
2943
|
+
return {
|
|
2944
|
+
success: true,
|
|
2945
|
+
retriedPhases,
|
|
2946
|
+
message: `Retrying ${retriedPhases.length} failed phases`
|
|
2947
|
+
};
|
|
2948
|
+
}
|
|
2949
|
+
|
|
2950
|
+
/**
|
|
2951
|
+
* Get parallel execution status with detailed progress tracking
|
|
2952
|
+
*/
|
|
2953
|
+
function getParallelExecutionStatus() {
|
|
2954
|
+
const state = loadState();
|
|
2955
|
+
|
|
2956
|
+
if (!state.parallelExecution) {
|
|
2957
|
+
return { active: false };
|
|
2958
|
+
}
|
|
2959
|
+
|
|
2960
|
+
const pe = state.parallelExecution;
|
|
2961
|
+
const workflow = WORKFLOWS[pe.workflow];
|
|
2962
|
+
|
|
2963
|
+
// Calculate aggregate progress
|
|
2964
|
+
const inProgressPhases = pe.phases.filter(p => p.status === 'in_progress');
|
|
2965
|
+
const completedPhases = pe.phases.filter(p => p.status === 'completed');
|
|
2966
|
+
const failedPhases = pe.phases.filter(p => p.status === 'failed');
|
|
2967
|
+
|
|
2968
|
+
// Calculate overall progress percentage
|
|
2969
|
+
const totalProgress = pe.phases.reduce((sum, p) => {
|
|
2970
|
+
if (p.status === 'completed') return sum + 100;
|
|
2971
|
+
if (p.status === 'failed') return sum + 0;
|
|
2972
|
+
return sum + (p.progress?.percent || 0);
|
|
2973
|
+
}, 0);
|
|
2974
|
+
const overallPercent = Math.round(totalProgress / pe.phases.length);
|
|
2975
|
+
|
|
2976
|
+
return {
|
|
2977
|
+
active: true,
|
|
2978
|
+
workflow: pe.workflow,
|
|
2979
|
+
workflowName: workflow?.name,
|
|
2980
|
+
startedAt: pe.startedAt,
|
|
2981
|
+
failureStrategy: pe.failureStrategy,
|
|
2982
|
+
maxRetries: pe.maxRetries,
|
|
2983
|
+
halted: pe.halted || false,
|
|
2984
|
+
haltReason: pe.haltReason,
|
|
2985
|
+
resolved: pe.resolved || false,
|
|
2986
|
+
phases: pe.phases.map(p => ({
|
|
2987
|
+
index: p.index,
|
|
2988
|
+
name: p.phase.name,
|
|
2989
|
+
agents: p.phase.agents,
|
|
2990
|
+
status: p.status,
|
|
2991
|
+
progress: p.progress || { percent: 0 },
|
|
2992
|
+
startedAt: p.startedAt,
|
|
2993
|
+
completedAt: p.completedAt,
|
|
2994
|
+
failedAt: p.failedAt,
|
|
2995
|
+
error: p.error,
|
|
2996
|
+
retryCount: p.retryCount || 0,
|
|
2997
|
+
result: p.result
|
|
2998
|
+
})),
|
|
2999
|
+
summary: {
|
|
3000
|
+
total: pe.phases.length,
|
|
3001
|
+
inProgress: inProgressPhases.length,
|
|
3002
|
+
completed: completedPhases.length,
|
|
3003
|
+
failed: failedPhases.length
|
|
3004
|
+
},
|
|
3005
|
+
progress: {
|
|
3006
|
+
completed: completedPhases.length,
|
|
3007
|
+
total: pe.phases.length,
|
|
3008
|
+
percent: overallPercent,
|
|
3009
|
+
byPhase: pe.phases.map(p => ({
|
|
3010
|
+
name: p.phase.name,
|
|
3011
|
+
status: p.status,
|
|
3012
|
+
percent: p.status === 'completed' ? 100 : p.status === 'failed' ? 0 : (p.progress?.percent || 0)
|
|
3013
|
+
}))
|
|
3014
|
+
}
|
|
3015
|
+
};
|
|
3016
|
+
}
|
|
3017
|
+
|
|
3018
|
+
/**
|
|
3019
|
+
* Cancel parallel execution
|
|
3020
|
+
*/
|
|
3021
|
+
function cancelParallelExecution() {
|
|
3022
|
+
const state = loadState();
|
|
3023
|
+
|
|
3024
|
+
if (!state.parallelExecution) {
|
|
3025
|
+
return { success: false, error: 'No parallel execution to cancel' };
|
|
3026
|
+
}
|
|
3027
|
+
|
|
3028
|
+
const pe = state.parallelExecution;
|
|
3029
|
+
|
|
3030
|
+
state.history.push({
|
|
3031
|
+
action: 'parallel_execution_cancelled',
|
|
3032
|
+
workflow: pe.workflow,
|
|
3033
|
+
phases: pe.phases.map(p => p.phase.name),
|
|
3034
|
+
completedPhases: pe.completedPhases.length,
|
|
3035
|
+
timestamp: new Date().toISOString()
|
|
3036
|
+
});
|
|
3037
|
+
|
|
3038
|
+
state.parallelExecution = null;
|
|
3039
|
+
saveState(state);
|
|
3040
|
+
|
|
3041
|
+
return {
|
|
3042
|
+
success: true,
|
|
3043
|
+
message: 'Parallel execution cancelled'
|
|
3044
|
+
};
|
|
3045
|
+
}
|
|
3046
|
+
|
|
3047
|
+
/**
|
|
3048
|
+
* Check if a workflow phase supports parallel execution
|
|
3049
|
+
* @param {string} workflowName - Workflow name
|
|
3050
|
+
* @param {number} phaseIndex - Phase index
|
|
3051
|
+
*/
|
|
3052
|
+
function isParallelPhase(workflowName, phaseIndex) {
|
|
3053
|
+
const workflow = WORKFLOWS[workflowName];
|
|
3054
|
+
if (!workflow || phaseIndex >= workflow.phases.length) {
|
|
3055
|
+
return false;
|
|
3056
|
+
}
|
|
3057
|
+
return !!workflow.phases[phaseIndex].parallel;
|
|
3058
|
+
}
|
|
3059
|
+
|
|
3060
|
+
/**
|
|
3061
|
+
* Define a custom workflow with parallel phases
|
|
3062
|
+
* @param {string} name - Workflow name
|
|
3063
|
+
* @param {object} definition - Workflow definition
|
|
3064
|
+
*/
|
|
3065
|
+
function defineWorkflowWithParallel(name, definition) {
|
|
3066
|
+
if (!definition.name || !definition.phases) {
|
|
3067
|
+
return { success: false, error: 'Invalid workflow definition' };
|
|
3068
|
+
}
|
|
3069
|
+
|
|
3070
|
+
// Validate parallel phases
|
|
3071
|
+
for (const phase of definition.phases) {
|
|
3072
|
+
if (phase.parallel && (!phase.agents || phase.agents.length === 0)) {
|
|
3073
|
+
return { success: false, error: `Parallel phase "${phase.name}" must have agents` };
|
|
3074
|
+
}
|
|
3075
|
+
}
|
|
3076
|
+
|
|
3077
|
+
WORKFLOWS[name] = {
|
|
3078
|
+
...definition,
|
|
3079
|
+
tier: definition.tier || 'free',
|
|
3080
|
+
pack: definition.pack || null
|
|
3081
|
+
};
|
|
3082
|
+
|
|
3083
|
+
return {
|
|
3084
|
+
success: true,
|
|
3085
|
+
workflow: name,
|
|
3086
|
+
phases: definition.phases.length,
|
|
3087
|
+
parallelPhases: definition.phases.filter(p => p.parallel).length
|
|
3088
|
+
};
|
|
3089
|
+
}
|
|
3090
|
+
|
|
3091
|
+
|
|
3092
|
+
/**
|
|
3093
|
+
* =============================================================================
|
|
3094
|
+
* ADAPTIVE WORKFLOW SYSTEM
|
|
3095
|
+
* =============================================================================
|
|
3096
|
+
*
|
|
3097
|
+
* Provides intelligent failure handling with:
|
|
3098
|
+
* - Automatic failure type detection from error messages
|
|
3099
|
+
* - Adaptive remediation path selection
|
|
3100
|
+
* - Failure pattern tracking and learning
|
|
3101
|
+
* - Preventive action suggestions based on history
|
|
3102
|
+
* - Fallback phase support for workflows
|
|
3103
|
+
*/
|
|
3104
|
+
|
|
3105
|
+
/**
|
|
3106
|
+
* Auto-detect failure type from error message or details
|
|
3107
|
+
* Uses FAILURE_SIGNATURES to classify the failure
|
|
3108
|
+
* @param {string} errorMessage - Error message or description
|
|
3109
|
+
* @param {object} details - Additional details about the failure
|
|
3110
|
+
* @returns {object} - { type, confidence, matchedKeywords }
|
|
3111
|
+
*/
|
|
3112
|
+
function detectFailureType(errorMessage, details = {}) {
|
|
3113
|
+
const combined = (errorMessage + ' ' + JSON.stringify(details)).toLowerCase();
|
|
3114
|
+
const scores = {};
|
|
3115
|
+
const matches = {};
|
|
3116
|
+
|
|
3117
|
+
// Score each failure type based on keyword matches
|
|
3118
|
+
for (const [type, keywords] of Object.entries(FAILURE_SIGNATURES)) {
|
|
3119
|
+
scores[type] = 0;
|
|
3120
|
+
matches[type] = [];
|
|
3121
|
+
|
|
3122
|
+
for (const keyword of keywords) {
|
|
3123
|
+
if (combined.includes(keyword.toLowerCase())) {
|
|
3124
|
+
scores[type]++;
|
|
3125
|
+
matches[type].push(keyword);
|
|
3126
|
+
}
|
|
3127
|
+
}
|
|
3128
|
+
}
|
|
3129
|
+
|
|
3130
|
+
// Find the best match
|
|
3131
|
+
let bestType = null;
|
|
3132
|
+
let bestScore = 0;
|
|
3133
|
+
|
|
3134
|
+
for (const [type, score] of Object.entries(scores)) {
|
|
3135
|
+
if (score > bestScore) {
|
|
3136
|
+
bestScore = score;
|
|
3137
|
+
bestType = type;
|
|
3138
|
+
}
|
|
3139
|
+
}
|
|
3140
|
+
|
|
3141
|
+
// Calculate confidence (0-1)
|
|
3142
|
+
const confidence = bestScore > 0 ? Math.min(bestScore / 3, 1) : 0; // 3+ matches = high confidence
|
|
3143
|
+
|
|
3144
|
+
return {
|
|
3145
|
+
type: bestType,
|
|
3146
|
+
confidence,
|
|
3147
|
+
matchedKeywords: bestType ? matches[bestType] : [],
|
|
3148
|
+
allScores: scores
|
|
3149
|
+
};
|
|
3150
|
+
}
|
|
3151
|
+
|
|
3152
|
+
/**
|
|
3153
|
+
* Trigger adaptive remediation for a workflow phase failure
|
|
3154
|
+
* Automatically detects failure type and starts appropriate remediation
|
|
3155
|
+
* @param {string} errorMessage - Error message
|
|
3156
|
+
* @param {object} details - Failure details
|
|
3157
|
+
* @param {object} options - Options { autoStart: boolean, notifyAgents: boolean }
|
|
3158
|
+
* @returns {object} - Remediation plan and status
|
|
3159
|
+
*/
|
|
3160
|
+
function triggerAdaptiveRemediation(errorMessage, details = {}, options = {}) {
|
|
3161
|
+
const state = loadState();
|
|
3162
|
+
|
|
3163
|
+
if (!state.activeWorkflow) {
|
|
3164
|
+
return { success: false, error: 'No active workflow' };
|
|
3165
|
+
}
|
|
3166
|
+
|
|
3167
|
+
// Detect failure type
|
|
3168
|
+
const detection = detectFailureType(errorMessage, details);
|
|
3169
|
+
|
|
3170
|
+
if (!detection.type || detection.confidence < 0.3) {
|
|
3171
|
+
return {
|
|
3172
|
+
success: false,
|
|
3173
|
+
error: 'Could not determine failure type with sufficient confidence',
|
|
3174
|
+
detection,
|
|
3175
|
+
suggestion: 'Use reportPhaseFailure() with an explicit failure type'
|
|
3176
|
+
};
|
|
3177
|
+
}
|
|
3178
|
+
|
|
3179
|
+
const failureType = detection.type;
|
|
3180
|
+
const remediationKey = failureType + '-failure';
|
|
3181
|
+
const remediation = REMEDIATION_PATHS[remediationKey];
|
|
3182
|
+
|
|
3183
|
+
if (!remediation) {
|
|
3184
|
+
return {
|
|
3185
|
+
success: false,
|
|
3186
|
+
error: 'No remediation path for detected failure type: ' + failureType,
|
|
3187
|
+
detection
|
|
3188
|
+
};
|
|
3189
|
+
}
|
|
3190
|
+
|
|
3191
|
+
// Check if auto-trigger is enabled for this failure type
|
|
3192
|
+
if (!remediation.autoTrigger && !options.forceStart) {
|
|
3193
|
+
return {
|
|
3194
|
+
success: true,
|
|
3195
|
+
autoTriggered: false,
|
|
3196
|
+
detection,
|
|
3197
|
+
remediation: {
|
|
3198
|
+
description: remediation.description,
|
|
3199
|
+
severity: remediation.severity,
|
|
3200
|
+
steps: remediation.steps.length,
|
|
3201
|
+
preventiveActions: remediation.preventiveActions
|
|
3202
|
+
},
|
|
3203
|
+
message: 'Remediation available but auto-trigger disabled for ' + failureType + '. Use forceStart: true to override.'
|
|
3204
|
+
};
|
|
3205
|
+
}
|
|
3206
|
+
|
|
3207
|
+
// Report the failure
|
|
3208
|
+
const failureReport = reportPhaseFailure(failureType, {
|
|
3209
|
+
...details,
|
|
3210
|
+
errorMessage,
|
|
3211
|
+
detectedKeywords: detection.matchedKeywords,
|
|
3212
|
+
confidence: detection.confidence
|
|
3213
|
+
});
|
|
3214
|
+
|
|
3215
|
+
// Track this failure pattern
|
|
3216
|
+
trackFailurePattern(failureType, state.activeWorkflow, details);
|
|
3217
|
+
|
|
3218
|
+
// Auto-start remediation if requested
|
|
3219
|
+
let remediationResult = null;
|
|
3220
|
+
if (options.autoStart !== false) {
|
|
3221
|
+
remediationResult = startRemediation(failureReport.failureId);
|
|
3222
|
+
}
|
|
3223
|
+
|
|
3224
|
+
return {
|
|
3225
|
+
success: true,
|
|
3226
|
+
autoTriggered: true,
|
|
3227
|
+
detection,
|
|
3228
|
+
failure: failureReport,
|
|
3229
|
+
remediation: remediationResult,
|
|
3230
|
+
preventiveActions: remediation.preventiveActions,
|
|
3231
|
+
fallbackPhase: remediation.fallbackPhase,
|
|
3232
|
+
message: 'Adaptive remediation ' + (options.autoStart !== false ? 'started' : 'prepared') + ' for ' + failureType + ' failure'
|
|
3233
|
+
};
|
|
3234
|
+
}
|
|
3235
|
+
|
|
3236
|
+
/**
|
|
3237
|
+
* Track failure patterns for learning
|
|
3238
|
+
* Stores failure history for preventive suggestions
|
|
3239
|
+
* @param {string} failureType - Type of failure
|
|
3240
|
+
* @param {string} workflow - Workflow name
|
|
3241
|
+
* @param {object} details - Failure details
|
|
3242
|
+
*/
|
|
3243
|
+
function trackFailurePattern(failureType, workflow, details = {}) {
|
|
3244
|
+
const state = loadState();
|
|
3245
|
+
|
|
3246
|
+
// Initialize failure patterns tracking
|
|
3247
|
+
state.failurePatterns = state.failurePatterns || {
|
|
3248
|
+
byType: {},
|
|
3249
|
+
byWorkflow: {},
|
|
3250
|
+
byPhase: {},
|
|
3251
|
+
timeline: []
|
|
3252
|
+
};
|
|
3253
|
+
|
|
3254
|
+
const timestamp = new Date().toISOString();
|
|
3255
|
+
const workflowObj = WORKFLOWS[workflow];
|
|
3256
|
+
const currentPhase = workflowObj && workflowObj.phases[state.workflowStep] ?
|
|
3257
|
+
workflowObj.phases[state.workflowStep].name : 'unknown';
|
|
3258
|
+
|
|
3259
|
+
// Track by type
|
|
3260
|
+
state.failurePatterns.byType[failureType] = state.failurePatterns.byType[failureType] || {
|
|
3261
|
+
count: 0,
|
|
3262
|
+
lastOccurrence: null,
|
|
3263
|
+
workflows: []
|
|
3264
|
+
};
|
|
3265
|
+
state.failurePatterns.byType[failureType].count++;
|
|
3266
|
+
state.failurePatterns.byType[failureType].lastOccurrence = timestamp;
|
|
3267
|
+
if (!state.failurePatterns.byType[failureType].workflows.includes(workflow)) {
|
|
3268
|
+
state.failurePatterns.byType[failureType].workflows.push(workflow);
|
|
3269
|
+
}
|
|
3270
|
+
|
|
3271
|
+
// Track by workflow
|
|
3272
|
+
state.failurePatterns.byWorkflow[workflow] = state.failurePatterns.byWorkflow[workflow] || {
|
|
3273
|
+
count: 0,
|
|
3274
|
+
failureTypes: {},
|
|
3275
|
+
phases: {}
|
|
3276
|
+
};
|
|
3277
|
+
state.failurePatterns.byWorkflow[workflow].count++;
|
|
3278
|
+
state.failurePatterns.byWorkflow[workflow].failureTypes[failureType] =
|
|
3279
|
+
(state.failurePatterns.byWorkflow[workflow].failureTypes[failureType] || 0) + 1;
|
|
3280
|
+
state.failurePatterns.byWorkflow[workflow].phases[currentPhase] =
|
|
3281
|
+
(state.failurePatterns.byWorkflow[workflow].phases[currentPhase] || 0) + 1;
|
|
3282
|
+
|
|
3283
|
+
// Track by phase
|
|
3284
|
+
state.failurePatterns.byPhase[currentPhase] = state.failurePatterns.byPhase[currentPhase] || {
|
|
3285
|
+
count: 0,
|
|
3286
|
+
failureTypes: {}
|
|
3287
|
+
};
|
|
3288
|
+
state.failurePatterns.byPhase[currentPhase].count++;
|
|
3289
|
+
state.failurePatterns.byPhase[currentPhase].failureTypes[failureType] =
|
|
3290
|
+
(state.failurePatterns.byPhase[currentPhase].failureTypes[failureType] || 0) + 1;
|
|
3291
|
+
|
|
3292
|
+
// Add to timeline (keep last 100 entries)
|
|
3293
|
+
state.failurePatterns.timeline.push({
|
|
3294
|
+
type: failureType,
|
|
3295
|
+
workflow,
|
|
3296
|
+
phase: currentPhase,
|
|
3297
|
+
timestamp,
|
|
3298
|
+
details: {
|
|
3299
|
+
errorSummary: details.errorMessage ? details.errorMessage.substring(0, 200) : null
|
|
3300
|
+
}
|
|
3301
|
+
});
|
|
3302
|
+
if (state.failurePatterns.timeline.length > 100) {
|
|
3303
|
+
state.failurePatterns.timeline = state.failurePatterns.timeline.slice(-100);
|
|
3304
|
+
}
|
|
3305
|
+
|
|
3306
|
+
saveState(state);
|
|
3307
|
+
}
|
|
3308
|
+
|
|
3309
|
+
/**
|
|
3310
|
+
* Get preventive actions based on failure history
|
|
3311
|
+
* Analyzes patterns and suggests actions to prevent future failures
|
|
3312
|
+
* @param {string} workflow - Optional workflow name to focus on
|
|
3313
|
+
* @param {string} phase - Optional phase name to focus on
|
|
3314
|
+
* @returns {object} - Preventive action suggestions
|
|
3315
|
+
*/
|
|
3316
|
+
function getPreventiveActions(workflow, phase) {
|
|
3317
|
+
workflow = workflow || null;
|
|
3318
|
+
phase = phase || null;
|
|
3319
|
+
|
|
3320
|
+
const state = loadState();
|
|
3321
|
+
const patterns = state.failurePatterns || { byType: {}, byWorkflow: {}, byPhase: {} };
|
|
3322
|
+
|
|
3323
|
+
const suggestions = {
|
|
3324
|
+
high: [],
|
|
3325
|
+
medium: [],
|
|
3326
|
+
low: [],
|
|
3327
|
+
patterns: []
|
|
3328
|
+
};
|
|
3329
|
+
|
|
3330
|
+
// Analyze by failure type
|
|
3331
|
+
const sortedTypes = Object.entries(patterns.byType)
|
|
3332
|
+
.sort(function(a, b) { return b[1].count - a[1].count; });
|
|
3333
|
+
|
|
3334
|
+
for (let i = 0; i < sortedTypes.length; i++) {
|
|
3335
|
+
const type = sortedTypes[i][0];
|
|
3336
|
+
const data = sortedTypes[i][1];
|
|
3337
|
+
const remediationKey = type + '-failure';
|
|
3338
|
+
const remediation = REMEDIATION_PATHS[remediationKey];
|
|
3339
|
+
|
|
3340
|
+
if (remediation && data.count >= 2) {
|
|
3341
|
+
const severity = remediation.severity || 'medium';
|
|
3342
|
+
const priority = data.count >= 5 ? 'high' : data.count >= 3 ? 'medium' : 'low';
|
|
3343
|
+
|
|
3344
|
+
suggestions.patterns.push({
|
|
3345
|
+
type: type,
|
|
3346
|
+
occurrences: data.count,
|
|
3347
|
+
severity: severity,
|
|
3348
|
+
lastSeen: data.lastOccurrence
|
|
3349
|
+
});
|
|
3350
|
+
|
|
3351
|
+
if (remediation.preventiveActions) {
|
|
3352
|
+
for (let j = 0; j < remediation.preventiveActions.length; j++) {
|
|
3353
|
+
const action = remediation.preventiveActions[j];
|
|
3354
|
+
if (suggestions[priority].indexOf(action) === -1) {
|
|
3355
|
+
suggestions[priority].push(action);
|
|
3356
|
+
}
|
|
3357
|
+
}
|
|
3358
|
+
}
|
|
3359
|
+
}
|
|
3360
|
+
}
|
|
3361
|
+
|
|
3362
|
+
// Add workflow-specific suggestions
|
|
3363
|
+
if (workflow && patterns.byWorkflow[workflow]) {
|
|
3364
|
+
const wfPatterns = patterns.byWorkflow[workflow];
|
|
3365
|
+
const dominantType = Object.entries(wfPatterns.failureTypes)
|
|
3366
|
+
.sort(function(a, b) { return b[1] - a[1]; })[0];
|
|
3367
|
+
|
|
3368
|
+
if (dominantType) {
|
|
3369
|
+
suggestions.workflowSpecific = {
|
|
3370
|
+
workflow: workflow,
|
|
3371
|
+
dominantFailureType: dominantType[0],
|
|
3372
|
+
occurrences: dominantType[1],
|
|
3373
|
+
suggestion: 'This workflow frequently fails due to ' + dominantType[0] + ' issues. Consider adding extra validation steps.'
|
|
3374
|
+
};
|
|
3375
|
+
}
|
|
3376
|
+
}
|
|
3377
|
+
|
|
3378
|
+
// Add phase-specific suggestions
|
|
3379
|
+
if (phase && patterns.byPhase[phase]) {
|
|
3380
|
+
const phasePatterns = patterns.byPhase[phase];
|
|
3381
|
+
if (phasePatterns.count >= 3) {
|
|
3382
|
+
suggestions.phaseSpecific = {
|
|
3383
|
+
phase: phase,
|
|
3384
|
+
totalFailures: phasePatterns.count,
|
|
3385
|
+
suggestion: 'Phase "' + phase + '" has ' + phasePatterns.count + ' recorded failures. Consider extending duration or adding checkpoints.'
|
|
3386
|
+
};
|
|
3387
|
+
}
|
|
3388
|
+
}
|
|
3389
|
+
|
|
3390
|
+
return suggestions;
|
|
3391
|
+
}
|
|
3392
|
+
|
|
3393
|
+
/**
|
|
3394
|
+
* Get failure pattern analytics
|
|
3395
|
+
* Returns detailed analysis of failure history
|
|
3396
|
+
* @returns {object} - Failure pattern analysis
|
|
3397
|
+
*/
|
|
3398
|
+
function getFailureAnalytics() {
|
|
3399
|
+
const state = loadState();
|
|
3400
|
+
const patterns = state.failurePatterns || { byType: {}, byWorkflow: {}, byPhase: {}, timeline: [] };
|
|
3401
|
+
|
|
3402
|
+
// Calculate totals
|
|
3403
|
+
let totalFailures = 0;
|
|
3404
|
+
const typeValues = Object.values(patterns.byType);
|
|
3405
|
+
for (let i = 0; i < typeValues.length; i++) {
|
|
3406
|
+
totalFailures += typeValues[i].count;
|
|
3407
|
+
}
|
|
3408
|
+
|
|
3409
|
+
// Find most common failure types
|
|
3410
|
+
const topFailureTypes = Object.entries(patterns.byType)
|
|
3411
|
+
.map(function(entry) {
|
|
3412
|
+
const type = entry[0];
|
|
3413
|
+
const data = entry[1];
|
|
3414
|
+
const remKey = type + '-failure';
|
|
3415
|
+
return {
|
|
3416
|
+
type: type,
|
|
3417
|
+
count: data.count,
|
|
3418
|
+
percentage: totalFailures > 0 ? Math.round((data.count / totalFailures) * 100) : 0,
|
|
3419
|
+
lastOccurrence: data.lastOccurrence,
|
|
3420
|
+
severity: REMEDIATION_PATHS[remKey] ? REMEDIATION_PATHS[remKey].severity : 'unknown'
|
|
3421
|
+
};
|
|
3422
|
+
})
|
|
3423
|
+
.sort(function(a, b) { return b.count - a.count; });
|
|
3424
|
+
|
|
3425
|
+
// Find most problematic workflows
|
|
3426
|
+
const problematicWorkflows = Object.entries(patterns.byWorkflow)
|
|
3427
|
+
.map(function(entry) {
|
|
3428
|
+
const workflow = entry[0];
|
|
3429
|
+
const data = entry[1];
|
|
3430
|
+
const dominant = Object.entries(data.failureTypes)
|
|
3431
|
+
.sort(function(a, b) { return b[1] - a[1]; })[0];
|
|
3432
|
+
return {
|
|
3433
|
+
workflow: workflow,
|
|
3434
|
+
failureCount: data.count,
|
|
3435
|
+
dominantIssue: dominant ? dominant[0] : 'unknown',
|
|
3436
|
+
affectedPhases: Object.keys(data.phases)
|
|
3437
|
+
};
|
|
3438
|
+
})
|
|
3439
|
+
.sort(function(a, b) { return b.failureCount - a.failureCount; });
|
|
3440
|
+
|
|
3441
|
+
// Find most problematic phases
|
|
3442
|
+
const problematicPhases = Object.entries(patterns.byPhase)
|
|
3443
|
+
.map(function(entry) {
|
|
3444
|
+
const phase = entry[0];
|
|
3445
|
+
const data = entry[1];
|
|
3446
|
+
return {
|
|
3447
|
+
phase: phase,
|
|
3448
|
+
failureCount: data.count,
|
|
3449
|
+
failureTypes: Object.keys(data.failureTypes)
|
|
3450
|
+
};
|
|
3451
|
+
})
|
|
3452
|
+
.sort(function(a, b) { return b.failureCount - a.failureCount; });
|
|
3453
|
+
|
|
3454
|
+
// Recent trend (last 7 days)
|
|
3455
|
+
const oneWeekAgo = new Date(Date.now() - 7 * 24 * 60 * 60 * 1000).toISOString();
|
|
3456
|
+
const recentFailures = patterns.timeline.filter(function(f) { return f.timestamp >= oneWeekAgo; });
|
|
3457
|
+
|
|
3458
|
+
const recentByType = {};
|
|
3459
|
+
for (let j = 0; j < recentFailures.length; j++) {
|
|
3460
|
+
const f = recentFailures[j];
|
|
3461
|
+
recentByType[f.type] = (recentByType[f.type] || 0) + 1;
|
|
3462
|
+
}
|
|
3463
|
+
|
|
3464
|
+
return {
|
|
3465
|
+
summary: {
|
|
3466
|
+
totalFailures: totalFailures,
|
|
3467
|
+
failureTypes: Object.keys(patterns.byType).length,
|
|
3468
|
+
affectedWorkflows: Object.keys(patterns.byWorkflow).length,
|
|
3469
|
+
affectedPhases: Object.keys(patterns.byPhase).length
|
|
3470
|
+
},
|
|
3471
|
+
topFailureTypes: topFailureTypes.slice(0, 5),
|
|
3472
|
+
problematicWorkflows: problematicWorkflows.slice(0, 5),
|
|
3473
|
+
problematicPhases: problematicPhases.slice(0, 5),
|
|
3474
|
+
recentTrend: {
|
|
3475
|
+
period: '7 days',
|
|
3476
|
+
failures: recentFailures.length,
|
|
3477
|
+
byType: recentByType
|
|
3478
|
+
},
|
|
3479
|
+
timeline: patterns.timeline.slice(-10) // Last 10 failures
|
|
3480
|
+
};
|
|
3481
|
+
}
|
|
3482
|
+
|
|
3483
|
+
/**
|
|
3484
|
+
* Apply fallback phase for a failed workflow phase
|
|
3485
|
+
* Inserts a fallback phase from the remediation path
|
|
3486
|
+
* @param {string} failureId - The failure ID
|
|
3487
|
+
* @returns {object} - Result of applying fallback
|
|
3488
|
+
*/
|
|
3489
|
+
function applyFallbackPhase(failureId) {
|
|
3490
|
+
const state = loadState();
|
|
3491
|
+
|
|
3492
|
+
const failure = state.workflowFailures ? state.workflowFailures.find(function(f) { return f.id === failureId; }) : null;
|
|
3493
|
+
if (!failure) {
|
|
3494
|
+
return { success: false, error: 'Failure not found' };
|
|
3495
|
+
}
|
|
3496
|
+
|
|
3497
|
+
const remediationKey = failure.type + '-failure';
|
|
3498
|
+
const remediation = REMEDIATION_PATHS[remediationKey];
|
|
3499
|
+
|
|
3500
|
+
if (!remediation || !remediation.fallbackPhase) {
|
|
3501
|
+
return { success: false, error: 'No fallback phase defined for this failure type' };
|
|
3502
|
+
}
|
|
3503
|
+
|
|
3504
|
+
const workflow = WORKFLOWS[failure.workflow];
|
|
3505
|
+
if (!workflow) {
|
|
3506
|
+
return { success: false, error: 'Workflow not found' };
|
|
3507
|
+
}
|
|
3508
|
+
|
|
3509
|
+
// Insert fallback phase after current phase
|
|
3510
|
+
const fallbackPhase = {
|
|
3511
|
+
name: remediation.fallbackPhase.name,
|
|
3512
|
+
agents: remediation.fallbackPhase.agents,
|
|
3513
|
+
duration: remediation.fallbackPhase.duration,
|
|
3514
|
+
isFallback: true,
|
|
3515
|
+
triggeredByFailure: failureId
|
|
3516
|
+
};
|
|
3517
|
+
|
|
3518
|
+
// Track fallback phase insertion
|
|
3519
|
+
state.fallbackPhases = state.fallbackPhases || [];
|
|
3520
|
+
state.fallbackPhases.push({
|
|
3521
|
+
workflow: failure.workflow,
|
|
3522
|
+
originalPhase: failure.phase,
|
|
3523
|
+
fallbackPhase: fallbackPhase.name,
|
|
3524
|
+
insertedAt: new Date().toISOString(),
|
|
3525
|
+
failureId: failureId
|
|
3526
|
+
});
|
|
3527
|
+
|
|
3528
|
+
state.history.push({
|
|
3529
|
+
action: 'fallback_phase_applied',
|
|
3530
|
+
workflow: failure.workflow,
|
|
3531
|
+
originalPhase: failure.phase,
|
|
3532
|
+
fallbackPhase: fallbackPhase.name,
|
|
3533
|
+
timestamp: new Date().toISOString()
|
|
3534
|
+
});
|
|
3535
|
+
|
|
3536
|
+
saveState(state);
|
|
3537
|
+
|
|
3538
|
+
emitWorkflowTelemetry('fallback_phase_applied', {
|
|
3539
|
+
workflow: failure.workflow,
|
|
3540
|
+
originalPhase: failure.phase,
|
|
3541
|
+
fallbackPhase: fallbackPhase.name,
|
|
3542
|
+
failureType: failure.type
|
|
3543
|
+
});
|
|
3544
|
+
|
|
3545
|
+
return {
|
|
3546
|
+
success: true,
|
|
3547
|
+
workflow: failure.workflow,
|
|
3548
|
+
originalPhase: failure.phase,
|
|
3549
|
+
fallbackPhase: fallbackPhase,
|
|
3550
|
+
message: 'Fallback phase "' + fallbackPhase.name + '" applied after "' + failure.phase + '"'
|
|
3551
|
+
};
|
|
3552
|
+
}
|
|
3553
|
+
|
|
3554
|
+
/**
|
|
3555
|
+
* Check if remediation retries are exhausted
|
|
3556
|
+
* @param {string} failureId - The failure ID
|
|
3557
|
+
* @returns {object} - Retry status
|
|
3558
|
+
*/
|
|
3559
|
+
function checkRemediationRetries(failureId) {
|
|
3560
|
+
const state = loadState();
|
|
3561
|
+
|
|
3562
|
+
const failure = state.workflowFailures ? state.workflowFailures.find(function(f) { return f.id === failureId; }) : null;
|
|
3563
|
+
if (!failure) {
|
|
3564
|
+
return { success: false, error: 'Failure not found' };
|
|
3565
|
+
}
|
|
3566
|
+
|
|
3567
|
+
const remediationKey = failure.type + '-failure';
|
|
3568
|
+
const remediation = REMEDIATION_PATHS[remediationKey];
|
|
3569
|
+
const maxRetries = remediation ? (remediation.maxRetries || 2) : 2;
|
|
3570
|
+
|
|
3571
|
+
// Count retry attempts
|
|
3572
|
+
const retryAttempts = (state.remediationAttempts && state.remediationAttempts[failureId]) || 0;
|
|
3573
|
+
const retriesRemaining = maxRetries - retryAttempts;
|
|
3574
|
+
const exhausted = retriesRemaining <= 0;
|
|
3575
|
+
|
|
3576
|
+
return {
|
|
3577
|
+
success: true,
|
|
3578
|
+
failureId: failureId,
|
|
3579
|
+
failureType: failure.type,
|
|
3580
|
+
maxRetries: maxRetries,
|
|
3581
|
+
attempts: retryAttempts,
|
|
3582
|
+
retriesRemaining: retriesRemaining,
|
|
3583
|
+
exhausted: exhausted,
|
|
3584
|
+
recommendation: exhausted
|
|
3585
|
+
? 'Consider applying fallback phase or escalating'
|
|
3586
|
+
: retriesRemaining + ' retry attempt(s) remaining'
|
|
3587
|
+
};
|
|
3588
|
+
}
|
|
3589
|
+
|
|
3590
|
+
/**
|
|
3591
|
+
* Record a remediation attempt
|
|
3592
|
+
* @param {string} failureId - The failure ID
|
|
3593
|
+
* @param {object} result - Result of the attempt { success, details }
|
|
3594
|
+
*/
|
|
3595
|
+
function recordRemediationAttempt(failureId, result) {
|
|
3596
|
+
result = result || {};
|
|
3597
|
+
const state = loadState();
|
|
3598
|
+
|
|
3599
|
+
state.remediationAttempts = state.remediationAttempts || {};
|
|
3600
|
+
state.remediationAttempts[failureId] = (state.remediationAttempts[failureId] || 0) + 1;
|
|
3601
|
+
|
|
3602
|
+
state.remediationHistory = state.remediationHistory || [];
|
|
3603
|
+
state.remediationHistory.push({
|
|
3604
|
+
failureId: failureId,
|
|
3605
|
+
attemptNumber: state.remediationAttempts[failureId],
|
|
3606
|
+
success: result.success || false,
|
|
3607
|
+
details: result.details,
|
|
3608
|
+
timestamp: new Date().toISOString()
|
|
3609
|
+
});
|
|
3610
|
+
|
|
3611
|
+
// Keep history manageable
|
|
3612
|
+
if (state.remediationHistory.length > 200) {
|
|
3613
|
+
state.remediationHistory = state.remediationHistory.slice(-200);
|
|
3614
|
+
}
|
|
3615
|
+
|
|
3616
|
+
saveState(state);
|
|
3617
|
+
|
|
3618
|
+
return {
|
|
3619
|
+
success: true,
|
|
3620
|
+
failureId: failureId,
|
|
3621
|
+
attemptNumber: state.remediationAttempts[failureId],
|
|
3622
|
+
retryStatus: checkRemediationRetries(failureId)
|
|
3623
|
+
};
|
|
3624
|
+
}
|
|
3625
|
+
|
|
3626
|
+
/**
|
|
3627
|
+
* Get adaptive workflow recommendations
|
|
3628
|
+
* Based on failure patterns and current context
|
|
3629
|
+
* @param {string} workflow - Optional specific workflow
|
|
3630
|
+
* @returns {object} - Adaptive recommendations
|
|
3631
|
+
*/
|
|
3632
|
+
function getAdaptiveRecommendations(workflow) {
|
|
3633
|
+
workflow = workflow || null;
|
|
3634
|
+
const preventive = getPreventiveActions(workflow);
|
|
3635
|
+
const analytics = getFailureAnalytics();
|
|
3636
|
+
|
|
3637
|
+
const recommendations = {
|
|
3638
|
+
immediate: [],
|
|
3639
|
+
shortTerm: [],
|
|
3640
|
+
longTerm: []
|
|
3641
|
+
};
|
|
3642
|
+
|
|
3643
|
+
// Immediate: Based on recent failures
|
|
3644
|
+
if (analytics.recentTrend.failures > 5) {
|
|
3645
|
+
recommendations.immediate.push({
|
|
3646
|
+
priority: 'high',
|
|
3647
|
+
action: 'Review recent failure patterns',
|
|
3648
|
+
reason: analytics.recentTrend.failures + ' failures in the last 7 days',
|
|
3649
|
+
details: analytics.recentTrend.byType
|
|
3650
|
+
});
|
|
3651
|
+
}
|
|
3652
|
+
|
|
3653
|
+
// Immediate: Critical severity failures
|
|
3654
|
+
const criticalTypes = analytics.topFailureTypes.filter(function(t) { return t.severity === 'critical'; });
|
|
3655
|
+
for (let i = 0; i < criticalTypes.length; i++) {
|
|
3656
|
+
const critical = criticalTypes[i];
|
|
3657
|
+
const remKey = critical.type + '-failure';
|
|
3658
|
+
recommendations.immediate.push({
|
|
3659
|
+
priority: 'critical',
|
|
3660
|
+
action: 'Address ' + critical.type + ' failures',
|
|
3661
|
+
reason: critical.count + ' critical failures recorded',
|
|
3662
|
+
preventiveActions: REMEDIATION_PATHS[remKey] ? REMEDIATION_PATHS[remKey].preventiveActions : []
|
|
3663
|
+
});
|
|
3664
|
+
}
|
|
3665
|
+
|
|
3666
|
+
// Short-term: Frequent failure types
|
|
3667
|
+
const topTypes = analytics.topFailureTypes.slice(0, 3);
|
|
3668
|
+
for (let j = 0; j < topTypes.length; j++) {
|
|
3669
|
+
const failureType = topTypes[j];
|
|
3670
|
+
if (failureType.count >= 3) {
|
|
3671
|
+
recommendations.shortTerm.push({
|
|
3672
|
+
priority: failureType.severity === 'high' ? 'high' : 'medium',
|
|
3673
|
+
action: 'Reduce ' + failureType.type + ' failures',
|
|
3674
|
+
reason: failureType.percentage + '% of all failures',
|
|
3675
|
+
preventiveActions: preventive.high.concat(preventive.medium).slice(0, 3)
|
|
3676
|
+
});
|
|
3677
|
+
}
|
|
3678
|
+
}
|
|
3679
|
+
|
|
3680
|
+
// Long-term: Problematic phases
|
|
3681
|
+
const topPhases = analytics.problematicPhases.slice(0, 2);
|
|
3682
|
+
for (let k = 0; k < topPhases.length; k++) {
|
|
3683
|
+
const phase = topPhases[k];
|
|
3684
|
+
if (phase.failureCount >= 5) {
|
|
3685
|
+
recommendations.longTerm.push({
|
|
3686
|
+
priority: 'medium',
|
|
3687
|
+
action: 'Review and strengthen "' + phase.phase + '" phase',
|
|
3688
|
+
reason: phase.failureCount + ' total failures in this phase',
|
|
3689
|
+
suggestion: 'Consider adding checkpoints, extending duration, or splitting into smaller phases'
|
|
3690
|
+
});
|
|
3691
|
+
}
|
|
3692
|
+
}
|
|
3693
|
+
|
|
3694
|
+
// Add workflow-specific if provided
|
|
3695
|
+
if (workflow) {
|
|
3696
|
+
const wfData = analytics.problematicWorkflows.find(function(w) { return w.workflow === workflow; });
|
|
3697
|
+
if (wfData) {
|
|
3698
|
+
recommendations.workflowSpecific = {
|
|
3699
|
+
workflow: workflow,
|
|
3700
|
+
failureCount: wfData.failureCount,
|
|
3701
|
+
dominantIssue: wfData.dominantIssue,
|
|
3702
|
+
recommendation: 'Focus on ' + wfData.dominantIssue + ' issues which are the main cause of failures in this workflow'
|
|
3703
|
+
};
|
|
3704
|
+
}
|
|
3705
|
+
}
|
|
3706
|
+
|
|
3707
|
+
return recommendations;
|
|
3708
|
+
}
|
|
3709
|
+
|
|
3710
|
+
/**
|
|
3711
|
+
* Define workflow with fallback phases
|
|
3712
|
+
* Enhanced workflow definition with built-in fallbacks
|
|
3713
|
+
* @param {string} name - Workflow name
|
|
3714
|
+
* @param {object} definition - Workflow definition with fallback phases
|
|
3715
|
+
*/
|
|
3716
|
+
function defineWorkflowWithFallbacks(name, definition) {
|
|
3717
|
+
if (!definition.name || !definition.phases) {
|
|
3718
|
+
return { success: false, error: 'Invalid workflow definition' };
|
|
3719
|
+
}
|
|
3720
|
+
|
|
3721
|
+
// Validate fallback phases reference valid remediation paths
|
|
3722
|
+
for (let i = 0; i < definition.phases.length; i++) {
|
|
3723
|
+
const phase = definition.phases[i];
|
|
3724
|
+
if (phase.fallbackOnFailure) {
|
|
3725
|
+
const remediationKey = phase.fallbackOnFailure + '-failure';
|
|
3726
|
+
if (!REMEDIATION_PATHS[remediationKey]) {
|
|
3727
|
+
return {
|
|
3728
|
+
success: false,
|
|
3729
|
+
error: 'Invalid fallback type "' + phase.fallbackOnFailure + '" for phase "' + phase.name + '"'
|
|
3730
|
+
};
|
|
3731
|
+
}
|
|
3732
|
+
}
|
|
3733
|
+
}
|
|
3734
|
+
|
|
3735
|
+
WORKFLOWS[name] = {
|
|
3736
|
+
name: definition.name,
|
|
3737
|
+
description: definition.description,
|
|
3738
|
+
phases: definition.phases,
|
|
3739
|
+
outcomes: definition.outcomes,
|
|
3740
|
+
completionSignals: definition.completionSignals,
|
|
3741
|
+
tier: definition.tier || 'free',
|
|
3742
|
+
pack: definition.pack || null,
|
|
3743
|
+
hasAdaptiveFallbacks: true
|
|
3744
|
+
};
|
|
3745
|
+
|
|
3746
|
+
const phasesWithFallbacks = definition.phases.filter(function(p) { return p.fallbackOnFailure; }).length;
|
|
3747
|
+
|
|
3748
|
+
return {
|
|
3749
|
+
success: true,
|
|
3750
|
+
workflow: name,
|
|
3751
|
+
phases: definition.phases.length,
|
|
3752
|
+
phasesWithFallbacks: phasesWithFallbacks
|
|
3753
|
+
};
|
|
3754
|
+
}
|
|
3755
|
+
|
|
3756
|
+
/**
|
|
3757
|
+
* Clear failure patterns (for testing or reset)
|
|
3758
|
+
* @param {object} options - { keepRecent: number } Keep last N entries
|
|
3759
|
+
*/
|
|
3760
|
+
function clearFailurePatterns(options) {
|
|
3761
|
+
options = options || {};
|
|
3762
|
+
const state = loadState();
|
|
3763
|
+
|
|
3764
|
+
if (options.keepRecent) {
|
|
3765
|
+
// Keep recent entries
|
|
3766
|
+
state.failurePatterns = state.failurePatterns || { timeline: [] };
|
|
3767
|
+
state.failurePatterns.timeline = state.failurePatterns.timeline.slice(-options.keepRecent);
|
|
3768
|
+
// Rebuild stats from kept timeline
|
|
3769
|
+
state.failurePatterns.byType = {};
|
|
3770
|
+
state.failurePatterns.byWorkflow = {};
|
|
3771
|
+
state.failurePatterns.byPhase = {};
|
|
3772
|
+
} else {
|
|
3773
|
+
// Clear everything
|
|
3774
|
+
state.failurePatterns = {
|
|
3775
|
+
byType: {},
|
|
3776
|
+
byWorkflow: {},
|
|
3777
|
+
byPhase: {},
|
|
3778
|
+
timeline: []
|
|
3779
|
+
};
|
|
3780
|
+
}
|
|
3781
|
+
|
|
3782
|
+
state.history.push({
|
|
3783
|
+
action: 'failure_patterns_cleared',
|
|
3784
|
+
keepRecent: options.keepRecent || 0,
|
|
3785
|
+
timestamp: new Date().toISOString()
|
|
3786
|
+
});
|
|
3787
|
+
|
|
3788
|
+
saveState(state);
|
|
3789
|
+
|
|
3790
|
+
return { success: true, message: 'Failure patterns cleared' };
|
|
3791
|
+
}
|
|
3792
|
+
|
|
948
3793
|
module.exports = {
|
|
949
3794
|
analyzeContext,
|
|
950
3795
|
getWorkflow,
|
|
951
3796
|
listWorkflows,
|
|
952
3797
|
startWorkflow,
|
|
953
3798
|
advanceWorkflow,
|
|
3799
|
+
// Pause/Resume functionality
|
|
3800
|
+
pauseWorkflow,
|
|
3801
|
+
resumeWorkflow,
|
|
3802
|
+
isWorkflowPaused,
|
|
3803
|
+
getPauseState,
|
|
3804
|
+
// Failure handling
|
|
3805
|
+
reportPhaseFailure,
|
|
3806
|
+
startRemediation,
|
|
3807
|
+
advanceRemediation,
|
|
3808
|
+
getWorkflowFailures,
|
|
954
3809
|
markWorkflowCheckpoint,
|
|
955
3810
|
getWorkflowSignalProgress,
|
|
956
3811
|
loadState,
|
|
@@ -958,7 +3813,47 @@ module.exports = {
|
|
|
958
3813
|
getCurrentPhase,
|
|
959
3814
|
agentExists,
|
|
960
3815
|
getAvailableAgents,
|
|
3816
|
+
// PRD-Workflow Integration
|
|
3817
|
+
linkPrdToWorkflow,
|
|
3818
|
+
onPrdStoryComplete,
|
|
3819
|
+
getPrdWorkflowStatus,
|
|
3820
|
+
getPhaseStories,
|
|
3821
|
+
reassignStoryToPhase,
|
|
3822
|
+
unlinkPrdFromWorkflow,
|
|
3823
|
+
getLinkedPrds,
|
|
3824
|
+
analyzeStoryForPhase,
|
|
3825
|
+
// Parallel execution
|
|
3826
|
+
startParallelPhases,
|
|
3827
|
+
completeParallelPhase,
|
|
3828
|
+
getParallelExecutionStatus,
|
|
3829
|
+
cancelParallelExecution,
|
|
3830
|
+
isParallelPhase,
|
|
3831
|
+
defineWorkflowWithParallel,
|
|
3832
|
+
failParallelPhase,
|
|
3833
|
+
retryParallelPhase,
|
|
3834
|
+
updateParallelPhaseProgress,
|
|
3835
|
+
skipFailedParallelPhases,
|
|
3836
|
+
retryAllFailedPhases,
|
|
3837
|
+
// Adaptive Workflows
|
|
3838
|
+
detectFailureType,
|
|
3839
|
+
triggerAdaptiveRemediation,
|
|
3840
|
+
trackFailurePattern,
|
|
3841
|
+
getPreventiveActions,
|
|
3842
|
+
getFailureAnalytics,
|
|
3843
|
+
applyFallbackPhase,
|
|
3844
|
+
checkRemediationRetries,
|
|
3845
|
+
recordRemediationAttempt,
|
|
3846
|
+
getAdaptiveRecommendations,
|
|
3847
|
+
defineWorkflowWithFallbacks,
|
|
3848
|
+
clearFailurePatterns,
|
|
3849
|
+
// Utilities
|
|
3850
|
+
formatDuration,
|
|
3851
|
+
// Constants
|
|
961
3852
|
PHASE_AGENTS,
|
|
962
3853
|
TECHNICAL_TRIGGERS,
|
|
963
|
-
WORKFLOWS
|
|
3854
|
+
WORKFLOWS,
|
|
3855
|
+
REMEDIATION_PATHS,
|
|
3856
|
+
FAILURE_SIGNATURES,
|
|
3857
|
+
SEVERITY_WEIGHTS,
|
|
3858
|
+
PARALLEL_FAILURE_STRATEGIES
|
|
964
3859
|
};
|