erosolar-cli 2.1.249 → 2.1.253
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/agents/general.rules.json +10 -133
- package/agents/general.rules.json.bak +278 -0
- package/agents/general.rules.json.bak2 +306 -0
- package/dist/bin/erosolar.js +9 -5
- package/dist/bin/erosolar.js.map +1 -1
- package/dist/capabilities/bidirectionalAuditCapability.d.ts +26 -0
- package/dist/capabilities/bidirectionalAuditCapability.d.ts.map +1 -0
- package/dist/capabilities/bidirectionalAuditCapability.js +44 -0
- package/dist/capabilities/bidirectionalAuditCapability.js.map +1 -0
- package/dist/capabilities/globCapability.d.ts +3 -6
- package/dist/capabilities/globCapability.d.ts.map +1 -1
- package/dist/capabilities/globCapability.js +6 -10
- package/dist/capabilities/globCapability.js.map +1 -1
- package/dist/capabilities/index.d.ts +1 -18
- package/dist/capabilities/index.d.ts.map +1 -1
- package/dist/capabilities/index.js +1 -18
- package/dist/capabilities/index.js.map +1 -1
- package/dist/capabilities/orchestrationCapability.d.ts +2 -0
- package/dist/capabilities/orchestrationCapability.d.ts.map +1 -1
- package/dist/capabilities/orchestrationCapability.js +980 -2
- package/dist/capabilities/orchestrationCapability.js.map +1 -1
- package/dist/capabilities/searchCapability.d.ts +8 -2
- package/dist/capabilities/searchCapability.d.ts.map +1 -1
- package/dist/capabilities/searchCapability.js +11 -6
- package/dist/capabilities/searchCapability.js.map +1 -1
- package/dist/contracts/tools.schema.json +9 -133
- package/dist/core/aiErrorFixer.d.ts +1 -14
- package/dist/core/aiErrorFixer.d.ts.map +1 -1
- package/dist/core/aiErrorFixer.js +51 -239
- package/dist/core/aiErrorFixer.js.map +1 -1
- package/dist/core/alphaZeroEngine.d.ts +16 -256
- package/dist/core/alphaZeroEngine.d.ts.map +1 -1
- package/dist/core/alphaZeroEngine.js +22 -513
- package/dist/core/alphaZeroEngine.js.map +1 -1
- package/dist/core/completeAttackOrchestrator.d.ts +102 -0
- package/dist/core/completeAttackOrchestrator.d.ts.map +1 -0
- package/dist/core/completeAttackOrchestrator.js +293 -0
- package/dist/core/completeAttackOrchestrator.js.map +1 -0
- package/dist/core/defensiveSecurityToolkit.d.ts +373 -0
- package/dist/core/defensiveSecurityToolkit.d.ts.map +1 -0
- package/dist/core/defensiveSecurityToolkit.js +1304 -0
- package/dist/core/defensiveSecurityToolkit.js.map +1 -0
- package/dist/core/errors/errorTypes.d.ts +30 -57
- package/dist/core/errors/errorTypes.d.ts.map +1 -1
- package/dist/core/errors/errorTypes.js +51 -228
- package/dist/core/errors/errorTypes.js.map +1 -1
- package/dist/core/errors/safetyValidator.d.ts +19 -3
- package/dist/core/errors/safetyValidator.d.ts.map +1 -1
- package/dist/core/errors/safetyValidator.js +33 -71
- package/dist/core/errors/safetyValidator.js.map +1 -1
- package/dist/core/failureRecovery.d.ts +4 -100
- package/dist/core/failureRecovery.d.ts.map +1 -1
- package/dist/core/failureRecovery.js +16 -440
- package/dist/core/failureRecovery.js.map +1 -1
- package/dist/core/intelligentTargetResearcher.d.ts +142 -0
- package/dist/core/intelligentTargetResearcher.d.ts.map +1 -0
- package/dist/core/intelligentTargetResearcher.js +367 -0
- package/dist/core/intelligentTargetResearcher.js.map +1 -0
- package/dist/core/intelligentTestFlows.d.ts +26 -107
- package/dist/core/intelligentTestFlows.d.ts.map +1 -1
- package/dist/core/intelligentTestFlows.js +15 -659
- package/dist/core/intelligentTestFlows.js.map +1 -1
- package/dist/core/learningPersistence.d.ts +45 -132
- package/dist/core/learningPersistence.d.ts.map +1 -1
- package/dist/core/learningPersistence.js +32 -463
- package/dist/core/learningPersistence.js.map +1 -1
- package/dist/core/metricsTracker.d.ts +22 -139
- package/dist/core/metricsTracker.d.ts.map +1 -1
- package/dist/core/metricsTracker.js +51 -241
- package/dist/core/metricsTracker.js.map +1 -1
- package/dist/core/performanceMonitor.d.ts +15 -109
- package/dist/core/performanceMonitor.d.ts.map +1 -1
- package/dist/core/performanceMonitor.js +27 -184
- package/dist/core/performanceMonitor.js.map +1 -1
- package/dist/core/reliabilityPrompt.d.ts.map +1 -1
- package/dist/core/reliabilityPrompt.js +14 -0
- package/dist/core/reliabilityPrompt.js.map +1 -1
- package/dist/core/resultVerification.d.ts +6 -100
- package/dist/core/resultVerification.d.ts.map +1 -1
- package/dist/core/resultVerification.js +31 -400
- package/dist/core/resultVerification.js.map +1 -1
- package/dist/core/selfEvolution.d.ts +32 -126
- package/dist/core/selfEvolution.d.ts.map +1 -1
- package/dist/core/selfEvolution.js +24 -967
- package/dist/core/selfEvolution.js.map +1 -1
- package/dist/core/selfImprovement.d.ts +50 -109
- package/dist/core/selfImprovement.d.ts.map +1 -1
- package/dist/core/selfImprovement.js +14 -689
- package/dist/core/selfImprovement.js.map +1 -1
- package/dist/core/sourceCodeManager.d.ts +89 -0
- package/dist/core/sourceCodeManager.d.ts.map +1 -0
- package/dist/core/sourceCodeManager.js +332 -0
- package/dist/core/sourceCodeManager.js.map +1 -0
- package/dist/core/unifiedOrchestrator.d.ts +88 -0
- package/dist/core/unifiedOrchestrator.d.ts.map +1 -0
- package/dist/core/unifiedOrchestrator.js +284 -0
- package/dist/core/unifiedOrchestrator.js.map +1 -0
- package/dist/core/userDefenseOrchestrator.d.ts +202 -0
- package/dist/core/userDefenseOrchestrator.d.ts.map +1 -0
- package/dist/core/userDefenseOrchestrator.js +1006 -0
- package/dist/core/userDefenseOrchestrator.js.map +1 -0
- package/dist/plugins/index.d.ts +1 -1
- package/dist/plugins/index.d.ts.map +1 -1
- package/dist/plugins/index.js +36 -26
- package/dist/plugins/index.js.map +1 -1
- package/dist/plugins/tools/bidirectionalAudit/bidirectionalAuditPlugin.d.ts +8 -0
- package/dist/plugins/tools/bidirectionalAudit/bidirectionalAuditPlugin.d.ts.map +1 -0
- package/dist/plugins/tools/bidirectionalAudit/bidirectionalAuditPlugin.js +17 -0
- package/dist/plugins/tools/bidirectionalAudit/bidirectionalAuditPlugin.js.map +1 -0
- package/dist/plugins/tools/nodeDefaults.d.ts +14 -0
- package/dist/plugins/tools/nodeDefaults.d.ts.map +1 -1
- package/dist/plugins/tools/nodeDefaults.js +17 -54
- package/dist/plugins/tools/nodeDefaults.js.map +1 -1
- package/dist/plugins/tools/orchestration/orchestrationPlugin.d.ts +9 -0
- package/dist/plugins/tools/orchestration/orchestrationPlugin.d.ts.map +1 -0
- package/dist/plugins/tools/orchestration/orchestrationPlugin.js +18 -0
- package/dist/plugins/tools/orchestration/orchestrationPlugin.js.map +1 -0
- package/dist/shell/interactiveShell.d.ts +97 -2
- package/dist/shell/interactiveShell.d.ts.map +1 -1
- package/dist/shell/interactiveShell.js +1001 -6
- package/dist/shell/interactiveShell.js.map +1 -1
- package/dist/tools/appleExposureTools.d.ts +108 -0
- package/dist/tools/appleExposureTools.d.ts.map +1 -0
- package/dist/tools/appleExposureTools.js +850 -0
- package/dist/tools/appleExposureTools.js.map +1 -0
- package/dist/tools/bidirectionalAuditTools.d.ts +104 -0
- package/dist/tools/bidirectionalAuditTools.d.ts.map +1 -0
- package/dist/tools/bidirectionalAuditTools.js +1280 -0
- package/dist/tools/bidirectionalAuditTools.js.map +1 -0
- package/dist/tools/defensiveSecurityTools.d.ts +152 -0
- package/dist/tools/defensiveSecurityTools.d.ts.map +1 -0
- package/dist/tools/defensiveSecurityTools.js +576 -0
- package/dist/tools/defensiveSecurityTools.js.map +1 -0
- package/dist/tools/forwardAttackChainTracer.d.ts +73 -0
- package/dist/tools/forwardAttackChainTracer.d.ts.map +1 -0
- package/dist/tools/forwardAttackChainTracer.js +604 -0
- package/dist/tools/forwardAttackChainTracer.js.map +1 -0
- package/dist/tools/localExplore.d.ts +12 -199
- package/dist/tools/localExplore.d.ts.map +1 -1
- package/dist/tools/localExplore.js +18 -1352
- package/dist/tools/localExplore.js.map +1 -1
- package/dist/tools/offensiveTransparencyTools.d.ts +188 -0
- package/dist/tools/offensiveTransparencyTools.d.ts.map +1 -0
- package/dist/tools/offensiveTransparencyTools.js +890 -0
- package/dist/tools/offensiveTransparencyTools.js.map +1 -0
- package/dist/tools/planningTools.d.ts +8 -17
- package/dist/tools/planningTools.d.ts.map +1 -1
- package/dist/tools/planningTools.js +31 -141
- package/dist/tools/planningTools.js.map +1 -1
- package/dist/tools/searchTools.d.ts +9 -0
- package/dist/tools/searchTools.d.ts.map +1 -1
- package/dist/tools/searchTools.js +305 -189
- package/dist/tools/searchTools.js.map +1 -1
- package/dist/tools/skillTools.d.ts +7 -5
- package/dist/tools/skillTools.d.ts.map +1 -1
- package/dist/tools/skillTools.js +13 -155
- package/dist/tools/skillTools.js.map +1 -1
- package/dist/tools/threatIntelligenceTools.d.ts +128 -0
- package/dist/tools/threatIntelligenceTools.d.ts.map +1 -0
- package/dist/tools/threatIntelligenceTools.js +712 -0
- package/dist/tools/threatIntelligenceTools.js.map +1 -0
- package/dist/ui/PromptController.d.ts +4 -0
- package/dist/ui/PromptController.d.ts.map +1 -1
- package/dist/ui/PromptController.js +32 -11
- package/dist/ui/PromptController.js.map +1 -1
- package/dist/ui/UnifiedUIRenderer.d.ts +20 -0
- package/dist/ui/UnifiedUIRenderer.d.ts.map +1 -1
- package/dist/ui/UnifiedUIRenderer.js +235 -28
- package/dist/ui/UnifiedUIRenderer.js.map +1 -1
- package/dist/ui/animatedStatus.d.ts +2 -0
- package/dist/ui/animatedStatus.d.ts.map +1 -1
- package/dist/ui/animatedStatus.js +36 -2
- package/dist/ui/animatedStatus.js.map +1 -1
- package/dist/ui/orchestration/StatusOrchestrator.d.ts +10 -0
- package/dist/ui/orchestration/StatusOrchestrator.d.ts.map +1 -1
- package/dist/ui/orchestration/StatusOrchestrator.js +36 -4
- package/dist/ui/orchestration/StatusOrchestrator.js.map +1 -1
- package/package.json +1 -1
- package/dist/capabilities/advancedTestGenerationCapability.d.ts +0 -17
- package/dist/capabilities/advancedTestGenerationCapability.d.ts.map +0 -1
- package/dist/capabilities/advancedTestGenerationCapability.js +0 -28
- package/dist/capabilities/advancedTestGenerationCapability.js.map +0 -1
- package/dist/capabilities/browserAutomationCapability.d.ts +0 -37
- package/dist/capabilities/browserAutomationCapability.d.ts.map +0 -1
- package/dist/capabilities/browserAutomationCapability.js +0 -49
- package/dist/capabilities/browserAutomationCapability.js.map +0 -1
- package/dist/capabilities/buildCapability.d.ts +0 -24
- package/dist/capabilities/buildCapability.d.ts.map +0 -1
- package/dist/capabilities/buildCapability.js +0 -25
- package/dist/capabilities/buildCapability.js.map +0 -1
- package/dist/capabilities/cloudCapability.d.ts +0 -13
- package/dist/capabilities/cloudCapability.d.ts.map +0 -1
- package/dist/capabilities/cloudCapability.js +0 -38
- package/dist/capabilities/cloudCapability.js.map +0 -1
- package/dist/capabilities/codeAnalysisCapability.d.ts +0 -13
- package/dist/capabilities/codeAnalysisCapability.d.ts.map +0 -1
- package/dist/capabilities/codeAnalysisCapability.js +0 -24
- package/dist/capabilities/codeAnalysisCapability.js.map +0 -1
- package/dist/capabilities/codeQualityCapability.d.ts +0 -13
- package/dist/capabilities/codeQualityCapability.d.ts.map +0 -1
- package/dist/capabilities/codeQualityCapability.js +0 -25
- package/dist/capabilities/codeQualityCapability.js.map +0 -1
- package/dist/capabilities/dependencySecurityCapability.d.ts +0 -13
- package/dist/capabilities/dependencySecurityCapability.d.ts.map +0 -1
- package/dist/capabilities/dependencySecurityCapability.js +0 -24
- package/dist/capabilities/dependencySecurityCapability.js.map +0 -1
- package/dist/capabilities/devCapability.d.ts +0 -13
- package/dist/capabilities/devCapability.d.ts.map +0 -1
- package/dist/capabilities/devCapability.js +0 -24
- package/dist/capabilities/devCapability.js.map +0 -1
- package/dist/capabilities/emailCapability.d.ts +0 -12
- package/dist/capabilities/emailCapability.d.ts.map +0 -1
- package/dist/capabilities/emailCapability.js +0 -22
- package/dist/capabilities/emailCapability.js.map +0 -1
- package/dist/capabilities/enhancedAnalysisCapability.d.ts +0 -13
- package/dist/capabilities/enhancedAnalysisCapability.d.ts.map +0 -1
- package/dist/capabilities/enhancedAnalysisCapability.js +0 -20
- package/dist/capabilities/enhancedAnalysisCapability.js.map +0 -1
- package/dist/capabilities/enhancedCodeIntelligenceCapability.d.ts +0 -17
- package/dist/capabilities/enhancedCodeIntelligenceCapability.d.ts.map +0 -1
- package/dist/capabilities/enhancedCodeIntelligenceCapability.js +0 -28
- package/dist/capabilities/enhancedCodeIntelligenceCapability.js.map +0 -1
- package/dist/capabilities/enhancedDevWorkflowCapability.d.ts +0 -17
- package/dist/capabilities/enhancedDevWorkflowCapability.d.ts.map +0 -1
- package/dist/capabilities/enhancedDevWorkflowCapability.js +0 -28
- package/dist/capabilities/enhancedDevWorkflowCapability.js.map +0 -1
- package/dist/capabilities/frontendTestingCapability.d.ts +0 -13
- package/dist/capabilities/frontendTestingCapability.d.ts.map +0 -1
- package/dist/capabilities/frontendTestingCapability.js +0 -28
- package/dist/capabilities/frontendTestingCapability.js.map +0 -1
- package/dist/capabilities/interactionCapability.d.ts +0 -12
- package/dist/capabilities/interactionCapability.d.ts.map +0 -1
- package/dist/capabilities/interactionCapability.js +0 -22
- package/dist/capabilities/interactionCapability.js.map +0 -1
- package/dist/capabilities/learnCapability.d.ts +0 -22
- package/dist/capabilities/learnCapability.d.ts.map +0 -1
- package/dist/capabilities/learnCapability.js +0 -37
- package/dist/capabilities/learnCapability.js.map +0 -1
- package/dist/capabilities/notebookCapability.d.ts +0 -17
- package/dist/capabilities/notebookCapability.d.ts.map +0 -1
- package/dist/capabilities/notebookCapability.js +0 -27
- package/dist/capabilities/notebookCapability.js.map +0 -1
- package/dist/capabilities/planningCapability.d.ts +0 -16
- package/dist/capabilities/planningCapability.d.ts.map +0 -1
- package/dist/capabilities/planningCapability.js +0 -26
- package/dist/capabilities/planningCapability.js.map +0 -1
- package/dist/capabilities/refactoringCapability.d.ts +0 -13
- package/dist/capabilities/refactoringCapability.d.ts.map +0 -1
- package/dist/capabilities/refactoringCapability.js +0 -25
- package/dist/capabilities/refactoringCapability.js.map +0 -1
- package/dist/capabilities/repoChecksCapability.d.ts +0 -10
- package/dist/capabilities/repoChecksCapability.d.ts.map +0 -1
- package/dist/capabilities/repoChecksCapability.js +0 -24
- package/dist/capabilities/repoChecksCapability.js.map +0 -1
- package/dist/capabilities/taskManagementCapability.d.ts +0 -12
- package/dist/capabilities/taskManagementCapability.d.ts.map +0 -1
- package/dist/capabilities/taskManagementCapability.js +0 -22
- package/dist/capabilities/taskManagementCapability.js.map +0 -1
- package/dist/capabilities/testingCapability.d.ts +0 -13
- package/dist/capabilities/testingCapability.d.ts.map +0 -1
- package/dist/capabilities/testingCapability.js +0 -25
- package/dist/capabilities/testingCapability.js.map +0 -1
- package/dist/capabilities/validationCapability.d.ts +0 -13
- package/dist/capabilities/validationCapability.d.ts.map +0 -1
- package/dist/capabilities/validationCapability.js +0 -24
- package/dist/capabilities/validationCapability.js.map +0 -1
- package/dist/capabilities/webCapability.d.ts +0 -12
- package/dist/capabilities/webCapability.d.ts.map +0 -1
- package/dist/capabilities/webCapability.js +0 -22
- package/dist/capabilities/webCapability.js.map +0 -1
- package/dist/core/deepBugAnalyzer.d.ts +0 -128
- package/dist/core/deepBugAnalyzer.d.ts.map +0 -1
- package/dist/core/deepBugAnalyzer.js +0 -406
- package/dist/core/deepBugAnalyzer.js.map +0 -1
- package/dist/core/hypothesisEngine.d.ts +0 -113
- package/dist/core/hypothesisEngine.d.ts.map +0 -1
- package/dist/core/hypothesisEngine.js +0 -264
- package/dist/core/hypothesisEngine.js.map +0 -1
- package/dist/core/productTestHarness.d.ts +0 -113
- package/dist/core/productTestHarness.d.ts.map +0 -1
- package/dist/core/productTestHarness.js +0 -351
- package/dist/core/productTestHarness.js.map +0 -1
- package/dist/core/validationRunner.d.ts +0 -106
- package/dist/core/validationRunner.d.ts.map +0 -1
- package/dist/core/validationRunner.js +0 -892
- package/dist/core/validationRunner.js.map +0 -1
- package/dist/plugins/tools/browser/browserAutomationPlugin.d.ts +0 -14
- package/dist/plugins/tools/browser/browserAutomationPlugin.d.ts.map +0 -1
- package/dist/plugins/tools/browser/browserAutomationPlugin.js +0 -26
- package/dist/plugins/tools/browser/browserAutomationPlugin.js.map +0 -1
- package/dist/plugins/tools/checks/localRepoChecksPlugin.d.ts +0 -3
- package/dist/plugins/tools/checks/localRepoChecksPlugin.d.ts.map +0 -1
- package/dist/plugins/tools/checks/localRepoChecksPlugin.js +0 -14
- package/dist/plugins/tools/checks/localRepoChecksPlugin.js.map +0 -1
- package/dist/plugins/tools/cloud/cloudPlugin.d.ts +0 -3
- package/dist/plugins/tools/cloud/cloudPlugin.d.ts.map +0 -1
- package/dist/plugins/tools/cloud/cloudPlugin.js +0 -14
- package/dist/plugins/tools/cloud/cloudPlugin.js.map +0 -1
- package/dist/plugins/tools/codeAnalysis/codeAnalysisPlugin.d.ts +0 -3
- package/dist/plugins/tools/codeAnalysis/codeAnalysisPlugin.d.ts.map +0 -1
- package/dist/plugins/tools/codeAnalysis/codeAnalysisPlugin.js +0 -14
- package/dist/plugins/tools/codeAnalysis/codeAnalysisPlugin.js.map +0 -1
- package/dist/plugins/tools/codeQuality/codeQualityPlugin.d.ts +0 -3
- package/dist/plugins/tools/codeQuality/codeQualityPlugin.d.ts.map +0 -1
- package/dist/plugins/tools/codeQuality/codeQualityPlugin.js +0 -14
- package/dist/plugins/tools/codeQuality/codeQualityPlugin.js.map +0 -1
- package/dist/plugins/tools/dependency/dependencyPlugin.d.ts +0 -3
- package/dist/plugins/tools/dependency/dependencyPlugin.d.ts.map +0 -1
- package/dist/plugins/tools/dependency/dependencyPlugin.js +0 -12
- package/dist/plugins/tools/dependency/dependencyPlugin.js.map +0 -1
- package/dist/plugins/tools/development/devPlugin.d.ts +0 -3
- package/dist/plugins/tools/development/devPlugin.d.ts.map +0 -1
- package/dist/plugins/tools/development/devPlugin.js +0 -14
- package/dist/plugins/tools/development/devPlugin.js.map +0 -1
- package/dist/plugins/tools/email/emailPlugin.d.ts +0 -3
- package/dist/plugins/tools/email/emailPlugin.d.ts.map +0 -1
- package/dist/plugins/tools/email/emailPlugin.js +0 -12
- package/dist/plugins/tools/email/emailPlugin.js.map +0 -1
- package/dist/plugins/tools/enhancedAnalysis/enhancedAnalysisPlugin.d.ts +0 -3
- package/dist/plugins/tools/enhancedAnalysis/enhancedAnalysisPlugin.d.ts.map +0 -1
- package/dist/plugins/tools/enhancedAnalysis/enhancedAnalysisPlugin.js +0 -14
- package/dist/plugins/tools/enhancedAnalysis/enhancedAnalysisPlugin.js.map +0 -1
- package/dist/plugins/tools/enhancedCodeIntelligence/enhancedCodeIntelligencePlugin.d.ts +0 -3
- package/dist/plugins/tools/enhancedCodeIntelligence/enhancedCodeIntelligencePlugin.d.ts.map +0 -1
- package/dist/plugins/tools/enhancedCodeIntelligence/enhancedCodeIntelligencePlugin.js +0 -12
- package/dist/plugins/tools/enhancedCodeIntelligence/enhancedCodeIntelligencePlugin.js.map +0 -1
- package/dist/plugins/tools/enhancedDevWorkflow/enhancedDevWorkflowPlugin.d.ts +0 -3
- package/dist/plugins/tools/enhancedDevWorkflow/enhancedDevWorkflowPlugin.d.ts.map +0 -1
- package/dist/plugins/tools/enhancedDevWorkflow/enhancedDevWorkflowPlugin.js +0 -12
- package/dist/plugins/tools/enhancedDevWorkflow/enhancedDevWorkflowPlugin.js.map +0 -1
- package/dist/plugins/tools/frontendTesting/frontendTestingPlugin.d.ts +0 -3
- package/dist/plugins/tools/frontendTesting/frontendTestingPlugin.d.ts.map +0 -1
- package/dist/plugins/tools/frontendTesting/frontendTestingPlugin.js +0 -14
- package/dist/plugins/tools/frontendTesting/frontendTestingPlugin.js.map +0 -1
- package/dist/plugins/tools/interaction/interactionPlugin.d.ts +0 -3
- package/dist/plugins/tools/interaction/interactionPlugin.d.ts.map +0 -1
- package/dist/plugins/tools/interaction/interactionPlugin.js +0 -12
- package/dist/plugins/tools/interaction/interactionPlugin.js.map +0 -1
- package/dist/plugins/tools/learn/learnPlugin.d.ts +0 -3
- package/dist/plugins/tools/learn/learnPlugin.d.ts.map +0 -1
- package/dist/plugins/tools/learn/learnPlugin.js +0 -14
- package/dist/plugins/tools/learn/learnPlugin.js.map +0 -1
- package/dist/plugins/tools/notebook/notebookPlugin.d.ts +0 -9
- package/dist/plugins/tools/notebook/notebookPlugin.d.ts.map +0 -1
- package/dist/plugins/tools/notebook/notebookPlugin.js +0 -15
- package/dist/plugins/tools/notebook/notebookPlugin.js.map +0 -1
- package/dist/plugins/tools/planning/planningPlugin.d.ts +0 -9
- package/dist/plugins/tools/planning/planningPlugin.d.ts.map +0 -1
- package/dist/plugins/tools/planning/planningPlugin.js +0 -15
- package/dist/plugins/tools/planning/planningPlugin.js.map +0 -1
- package/dist/plugins/tools/refactoring/refactoringPlugin.d.ts +0 -3
- package/dist/plugins/tools/refactoring/refactoringPlugin.d.ts.map +0 -1
- package/dist/plugins/tools/refactoring/refactoringPlugin.js +0 -12
- package/dist/plugins/tools/refactoring/refactoringPlugin.js.map +0 -1
- package/dist/plugins/tools/taskManagement/taskManagementPlugin.d.ts +0 -3
- package/dist/plugins/tools/taskManagement/taskManagementPlugin.d.ts.map +0 -1
- package/dist/plugins/tools/taskManagement/taskManagementPlugin.js +0 -12
- package/dist/plugins/tools/taskManagement/taskManagementPlugin.js.map +0 -1
- package/dist/plugins/tools/testing/testingPlugin.d.ts +0 -3
- package/dist/plugins/tools/testing/testingPlugin.d.ts.map +0 -1
- package/dist/plugins/tools/testing/testingPlugin.js +0 -12
- package/dist/plugins/tools/testing/testingPlugin.js.map +0 -1
- package/dist/plugins/tools/validation/validationPlugin.d.ts +0 -3
- package/dist/plugins/tools/validation/validationPlugin.d.ts.map +0 -1
- package/dist/plugins/tools/validation/validationPlugin.js +0 -14
- package/dist/plugins/tools/validation/validationPlugin.js.map +0 -1
- package/dist/plugins/tools/web/webPlugin.d.ts +0 -3
- package/dist/plugins/tools/web/webPlugin.d.ts.map +0 -1
- package/dist/plugins/tools/web/webPlugin.js +0 -12
- package/dist/plugins/tools/web/webPlugin.js.map +0 -1
- package/dist/tools/advancedTestGenerationTools.d.ts +0 -21
- package/dist/tools/advancedTestGenerationTools.d.ts.map +0 -1
- package/dist/tools/advancedTestGenerationTools.js +0 -304
- package/dist/tools/advancedTestGenerationTools.js.map +0 -1
- package/dist/tools/browserAutomationTools.d.ts +0 -23
- package/dist/tools/browserAutomationTools.d.ts.map +0 -1
- package/dist/tools/browserAutomationTools.js +0 -916
- package/dist/tools/browserAutomationTools.js.map +0 -1
- package/dist/tools/buildTools.d.ts +0 -9
- package/dist/tools/buildTools.d.ts.map +0 -1
- package/dist/tools/buildTools.js +0 -346
- package/dist/tools/buildTools.js.map +0 -1
- package/dist/tools/cloudTools.d.ts +0 -49
- package/dist/tools/cloudTools.d.ts.map +0 -1
- package/dist/tools/cloudTools.js +0 -1258
- package/dist/tools/cloudTools.js.map +0 -1
- package/dist/tools/codeAnalysisTools.d.ts +0 -74
- package/dist/tools/codeAnalysisTools.d.ts.map +0 -1
- package/dist/tools/codeAnalysisTools.js +0 -664
- package/dist/tools/codeAnalysisTools.js.map +0 -1
- package/dist/tools/codeGenerationTools.d.ts +0 -3
- package/dist/tools/codeGenerationTools.d.ts.map +0 -1
- package/dist/tools/codeGenerationTools.js +0 -439
- package/dist/tools/codeGenerationTools.js.map +0 -1
- package/dist/tools/codeQualityTools.d.ts +0 -3
- package/dist/tools/codeQualityTools.d.ts.map +0 -1
- package/dist/tools/codeQualityTools.js +0 -297
- package/dist/tools/codeQualityTools.js.map +0 -1
- package/dist/tools/dependencyTools.d.ts +0 -3
- package/dist/tools/dependencyTools.d.ts.map +0 -1
- package/dist/tools/dependencyTools.js +0 -284
- package/dist/tools/dependencyTools.js.map +0 -1
- package/dist/tools/devTools.d.ts +0 -10
- package/dist/tools/devTools.d.ts.map +0 -1
- package/dist/tools/devTools.js +0 -2126
- package/dist/tools/devTools.js.map +0 -1
- package/dist/tools/emailTools.d.ts +0 -21
- package/dist/tools/emailTools.d.ts.map +0 -1
- package/dist/tools/emailTools.js +0 -449
- package/dist/tools/emailTools.js.map +0 -1
- package/dist/tools/enhancedAnalysisTools.d.ts +0 -9
- package/dist/tools/enhancedAnalysisTools.d.ts.map +0 -1
- package/dist/tools/enhancedAnalysisTools.js +0 -370
- package/dist/tools/enhancedAnalysisTools.js.map +0 -1
- package/dist/tools/enhancedCodeIntelligenceTools.d.ts +0 -7
- package/dist/tools/enhancedCodeIntelligenceTools.d.ts.map +0 -1
- package/dist/tools/enhancedCodeIntelligenceTools.js +0 -540
- package/dist/tools/enhancedCodeIntelligenceTools.js.map +0 -1
- package/dist/tools/enhancedDevWorkflowTools.d.ts +0 -7
- package/dist/tools/enhancedDevWorkflowTools.d.ts.map +0 -1
- package/dist/tools/enhancedDevWorkflowTools.js +0 -432
- package/dist/tools/enhancedDevWorkflowTools.js.map +0 -1
- package/dist/tools/frontendTestingTools.d.ts +0 -35
- package/dist/tools/frontendTestingTools.d.ts.map +0 -1
- package/dist/tools/frontendTestingTools.js +0 -1258
- package/dist/tools/frontendTestingTools.js.map +0 -1
- package/dist/tools/globTools.d.ts +0 -15
- package/dist/tools/globTools.d.ts.map +0 -1
- package/dist/tools/globTools.js +0 -174
- package/dist/tools/globTools.js.map +0 -1
- package/dist/tools/grepTools.d.ts +0 -19
- package/dist/tools/grepTools.d.ts.map +0 -1
- package/dist/tools/grepTools.js +0 -411
- package/dist/tools/grepTools.js.map +0 -1
- package/dist/tools/interactionTools.d.ts +0 -6
- package/dist/tools/interactionTools.d.ts.map +0 -1
- package/dist/tools/interactionTools.js +0 -209
- package/dist/tools/interactionTools.js.map +0 -1
- package/dist/tools/learnTools.d.ts +0 -164
- package/dist/tools/learnTools.d.ts.map +0 -1
- package/dist/tools/learnTools.js +0 -2098
- package/dist/tools/learnTools.js.map +0 -1
- package/dist/tools/notebookEditTools.d.ts +0 -15
- package/dist/tools/notebookEditTools.d.ts.map +0 -1
- package/dist/tools/notebookEditTools.js +0 -197
- package/dist/tools/notebookEditTools.js.map +0 -1
- package/dist/tools/refactoringTools.d.ts +0 -3
- package/dist/tools/refactoringTools.d.ts.map +0 -1
- package/dist/tools/refactoringTools.js +0 -294
- package/dist/tools/refactoringTools.js.map +0 -1
- package/dist/tools/repoChecksTools.d.ts +0 -3
- package/dist/tools/repoChecksTools.d.ts.map +0 -1
- package/dist/tools/repoChecksTools.js +0 -276
- package/dist/tools/repoChecksTools.js.map +0 -1
- package/dist/tools/taskManagementTools.d.ts +0 -10
- package/dist/tools/taskManagementTools.d.ts.map +0 -1
- package/dist/tools/taskManagementTools.js +0 -133
- package/dist/tools/taskManagementTools.js.map +0 -1
- package/dist/tools/testingTools.d.ts +0 -3
- package/dist/tools/testingTools.d.ts.map +0 -1
- package/dist/tools/testingTools.js +0 -237
- package/dist/tools/testingTools.js.map +0 -1
- package/dist/tools/validationTools.d.ts +0 -7
- package/dist/tools/validationTools.d.ts.map +0 -1
- package/dist/tools/validationTools.js +0 -344
- package/dist/tools/validationTools.js.map +0 -1
- package/dist/tools/webTools.d.ts +0 -3
- package/dist/tools/webTools.d.ts.map +0 -1
- package/dist/tools/webTools.js +0 -502
- package/dist/tools/webTools.js.map +0 -1
package/dist/tools/devTools.js
DELETED
|
@@ -1,2126 +0,0 @@
|
|
|
1
|
-
/* eslint-disable @typescript-eslint/no-explicit-any */
|
|
2
|
-
import { exec } from 'node:child_process';
|
|
3
|
-
import { existsSync, readFileSync } from 'node:fs';
|
|
4
|
-
import { join } from 'node:path';
|
|
5
|
-
import { promisify } from 'node:util';
|
|
6
|
-
import { verifiedSuccess, verifiedFailure, createCommandCheck, OutputPatterns, analyzeOutput, } from '../core/resultVerification.js';
|
|
7
|
-
const execAsync = promisify(exec);
|
|
8
|
-
export function createDevTools(workingDir) {
|
|
9
|
-
return [
|
|
10
|
-
{
|
|
11
|
-
name: 'install_dependencies',
|
|
12
|
-
description: 'Install project dependencies using npm, yarn, or pnpm',
|
|
13
|
-
parameters: {
|
|
14
|
-
type: 'object',
|
|
15
|
-
properties: {
|
|
16
|
-
packageManager: {
|
|
17
|
-
type: 'string',
|
|
18
|
-
enum: ['npm', 'yarn', 'pnpm'],
|
|
19
|
-
description: 'Package manager to use (default: npm)',
|
|
20
|
-
},
|
|
21
|
-
production: {
|
|
22
|
-
type: 'boolean',
|
|
23
|
-
description: 'Install only production dependencies',
|
|
24
|
-
},
|
|
25
|
-
},
|
|
26
|
-
additionalProperties: false,
|
|
27
|
-
},
|
|
28
|
-
handler: async (args) => {
|
|
29
|
-
const packageManager = typeof args['packageManager'] === 'string' ? args['packageManager'] : 'npm';
|
|
30
|
-
const production = args['production'] === true;
|
|
31
|
-
const startTime = Date.now();
|
|
32
|
-
try {
|
|
33
|
-
let command;
|
|
34
|
-
if (packageManager === 'npm') {
|
|
35
|
-
// Use npm install instead of npm ci if no package-lock.json exists
|
|
36
|
-
const hasLockFile = existsSync(join(workingDir, 'package-lock.json'));
|
|
37
|
-
command = hasLockFile
|
|
38
|
-
? (production ? 'npm ci --production' : 'npm ci')
|
|
39
|
-
: (production ? 'npm install --production' : 'npm install');
|
|
40
|
-
}
|
|
41
|
-
else if (packageManager === 'yarn') {
|
|
42
|
-
command = production ? 'yarn install --production' : 'yarn install';
|
|
43
|
-
}
|
|
44
|
-
else if (packageManager === 'pnpm') {
|
|
45
|
-
command = production ? 'pnpm install --prod' : 'pnpm install';
|
|
46
|
-
}
|
|
47
|
-
else {
|
|
48
|
-
return verifiedFailure(`Unsupported package manager: ${packageManager}`, 'Supported package managers: npm, yarn, pnpm', ['Use npm, yarn, or pnpm as the packageManager']);
|
|
49
|
-
}
|
|
50
|
-
const { stdout, stderr } = await execAsync(command, {
|
|
51
|
-
cwd: workingDir,
|
|
52
|
-
timeout: 300000, // 5 minutes
|
|
53
|
-
maxBuffer: 1024 * 1024 * 10,
|
|
54
|
-
});
|
|
55
|
-
const durationMs = Date.now() - startTime;
|
|
56
|
-
const combinedOutput = [stdout, stderr].filter(Boolean).join('\n');
|
|
57
|
-
const analysis = analyzeOutput(combinedOutput, OutputPatterns.npm, 0);
|
|
58
|
-
const commandCheck = createCommandCheck('Dependency installation', 0, combinedOutput);
|
|
59
|
-
if (analysis.isFailure) {
|
|
60
|
-
return verifiedFailure('Dependency installation had errors', `Command: ${command}\n\nOutput:\n${combinedOutput}`, ['Check for version conflicts', 'Clear npm cache and retry', 'Check network connectivity'], [commandCheck], durationMs);
|
|
61
|
-
}
|
|
62
|
-
return verifiedSuccess('Dependencies installed successfully', `Command: ${command}\n\nOutput:\n${combinedOutput || '(no output)'}`, [commandCheck], durationMs);
|
|
63
|
-
}
|
|
64
|
-
catch (error) {
|
|
65
|
-
const durationMs = Date.now() - startTime;
|
|
66
|
-
const combinedError = [error.stdout, error.stderr, error.message].filter(Boolean).join('\n');
|
|
67
|
-
if (error.killed) {
|
|
68
|
-
return verifiedFailure('Dependency installation timed out', `The installation was killed after exceeding 5 minute timeout.\nPartial output:\n${combinedError || '(none)'}`, ['Check network connectivity', 'Consider using run_in_background for slow connections'], [{ check: 'Timeout', passed: false, details: 'Exceeded 5 minutes' }], durationMs);
|
|
69
|
-
}
|
|
70
|
-
return verifiedFailure(`Dependency installation failed with exit code ${error.code ?? 1}`, `Error output:\n${combinedError || '(none)'}`, ['Review error message', 'Check package.json syntax', 'Clear npm cache: npm cache clean --force'], [createCommandCheck('Dependency installation', error.code ?? 1, combinedError)], durationMs);
|
|
71
|
-
}
|
|
72
|
-
},
|
|
73
|
-
},
|
|
74
|
-
{
|
|
75
|
-
name: 'npm_publish',
|
|
76
|
-
description: 'Publish package to npm registry. Handles the full workflow: check login, commit changes, bump version, build, and publish.',
|
|
77
|
-
parameters: {
|
|
78
|
-
type: 'object',
|
|
79
|
-
properties: {
|
|
80
|
-
versionBump: {
|
|
81
|
-
type: 'string',
|
|
82
|
-
enum: ['patch', 'minor', 'major', 'prepatch', 'preminor', 'premajor', 'prerelease'],
|
|
83
|
-
description: 'Version bump type (default: patch)',
|
|
84
|
-
},
|
|
85
|
-
commitMessage: {
|
|
86
|
-
type: 'string',
|
|
87
|
-
description: 'Commit message for uncommitted changes (if any). If not provided and there are uncommitted changes, will fail.',
|
|
88
|
-
},
|
|
89
|
-
tag: {
|
|
90
|
-
type: 'string',
|
|
91
|
-
description: 'npm tag for the release (default: latest)',
|
|
92
|
-
},
|
|
93
|
-
dryRun: {
|
|
94
|
-
type: 'boolean',
|
|
95
|
-
description: 'If true, performs all steps except the actual publish',
|
|
96
|
-
},
|
|
97
|
-
skipBuild: {
|
|
98
|
-
type: 'boolean',
|
|
99
|
-
description: 'Skip the build step (use if build is handled by npm prepare script)',
|
|
100
|
-
},
|
|
101
|
-
},
|
|
102
|
-
additionalProperties: false,
|
|
103
|
-
},
|
|
104
|
-
handler: async (args) => {
|
|
105
|
-
const versionBump = typeof args['versionBump'] === 'string' ? args['versionBump'] : 'patch';
|
|
106
|
-
const commitMessage = typeof args['commitMessage'] === 'string' ? args['commitMessage'] : undefined;
|
|
107
|
-
const tag = typeof args['tag'] === 'string' ? args['tag'] : 'latest';
|
|
108
|
-
const dryRun = args['dryRun'] === true;
|
|
109
|
-
const skipBuild = args['skipBuild'] === true;
|
|
110
|
-
const steps = [];
|
|
111
|
-
const errors = [];
|
|
112
|
-
try {
|
|
113
|
-
// Step 1: Check npm login status
|
|
114
|
-
steps.push('## Step 1: Checking npm login status');
|
|
115
|
-
try {
|
|
116
|
-
const { stdout: whoami } = await execAsync('npm whoami', { cwd: workingDir, timeout: 30000 });
|
|
117
|
-
steps.push(`✓ Logged in as: ${whoami.trim()}`);
|
|
118
|
-
}
|
|
119
|
-
catch (e) {
|
|
120
|
-
errors.push('✗ Not logged into npm. Run `npm login` first.');
|
|
121
|
-
return [...steps, '', '## Errors', ...errors].join('\n');
|
|
122
|
-
}
|
|
123
|
-
// Step 2: Check for uncommitted changes
|
|
124
|
-
steps.push('');
|
|
125
|
-
steps.push('## Step 2: Checking git status');
|
|
126
|
-
try {
|
|
127
|
-
const { stdout: gitStatus } = await execAsync('git status --porcelain', { cwd: workingDir, timeout: 10000 });
|
|
128
|
-
if (gitStatus.trim()) {
|
|
129
|
-
if (commitMessage) {
|
|
130
|
-
steps.push(`Found uncommitted changes, committing with message: "${commitMessage}"`);
|
|
131
|
-
await execAsync('git add .', { cwd: workingDir, timeout: 30000 });
|
|
132
|
-
await execAsync(`git commit -m "${commitMessage.replace(/"/g, '\\"')}"`, { cwd: workingDir, timeout: 30000 });
|
|
133
|
-
steps.push('✓ Changes committed');
|
|
134
|
-
}
|
|
135
|
-
else {
|
|
136
|
-
errors.push('✗ Uncommitted changes found. Provide a commitMessage or commit manually first.');
|
|
137
|
-
errors.push('Changed files:');
|
|
138
|
-
gitStatus.trim().split('\n').forEach(line => errors.push(` ${line}`));
|
|
139
|
-
return [...steps, '', '## Errors', ...errors].join('\n');
|
|
140
|
-
}
|
|
141
|
-
}
|
|
142
|
-
else {
|
|
143
|
-
steps.push('✓ Working directory clean');
|
|
144
|
-
}
|
|
145
|
-
}
|
|
146
|
-
catch (e) {
|
|
147
|
-
steps.push(`Warning: Could not check git status: ${e.message}`);
|
|
148
|
-
}
|
|
149
|
-
// Step 3: Read current version
|
|
150
|
-
steps.push('');
|
|
151
|
-
steps.push('## Step 3: Reading package info');
|
|
152
|
-
const packageJsonPath = join(workingDir, 'package.json');
|
|
153
|
-
if (!existsSync(packageJsonPath)) {
|
|
154
|
-
errors.push('✗ package.json not found');
|
|
155
|
-
return [...steps, '', '## Errors', ...errors].join('\n');
|
|
156
|
-
}
|
|
157
|
-
const packageInfo = JSON.parse(readFileSync(packageJsonPath, 'utf-8'));
|
|
158
|
-
steps.push(`✓ Package: ${packageInfo.name} v${packageInfo.version}`);
|
|
159
|
-
// Step 4: Build (unless skipped)
|
|
160
|
-
if (!skipBuild) {
|
|
161
|
-
steps.push('');
|
|
162
|
-
steps.push('## Step 4: Building project');
|
|
163
|
-
try {
|
|
164
|
-
const { stderr: buildErr } = await execAsync('npm run build', {
|
|
165
|
-
cwd: workingDir,
|
|
166
|
-
timeout: 300000,
|
|
167
|
-
maxBuffer: 1024 * 1024 * 10,
|
|
168
|
-
});
|
|
169
|
-
steps.push('✓ Build completed');
|
|
170
|
-
if (buildErr && !buildErr.includes('npm WARN')) {
|
|
171
|
-
steps.push(`Build stderr: ${buildErr.substring(0, 500)}`);
|
|
172
|
-
}
|
|
173
|
-
}
|
|
174
|
-
catch (e) {
|
|
175
|
-
errors.push(`✗ Build failed: ${e.message}`);
|
|
176
|
-
return [...steps, '', '## Errors', ...errors].join('\n');
|
|
177
|
-
}
|
|
178
|
-
}
|
|
179
|
-
else {
|
|
180
|
-
steps.push('');
|
|
181
|
-
steps.push('## Step 4: Build skipped (skipBuild=true)');
|
|
182
|
-
}
|
|
183
|
-
// Step 5: Version bump
|
|
184
|
-
steps.push('');
|
|
185
|
-
steps.push('## Step 5: Bumping version');
|
|
186
|
-
try {
|
|
187
|
-
const { stdout: versionOut } = await execAsync(`npm version ${versionBump}`, {
|
|
188
|
-
cwd: workingDir,
|
|
189
|
-
timeout: 30000,
|
|
190
|
-
});
|
|
191
|
-
const newVersion = versionOut.trim();
|
|
192
|
-
steps.push(`✓ Version bumped to ${newVersion}`);
|
|
193
|
-
}
|
|
194
|
-
catch (e) {
|
|
195
|
-
errors.push(`✗ Version bump failed: ${e.message}`);
|
|
196
|
-
return [...steps, '', '## Errors', ...errors].join('\n');
|
|
197
|
-
}
|
|
198
|
-
// Step 6: Publish
|
|
199
|
-
steps.push('');
|
|
200
|
-
steps.push('## Step 6: Publishing to npm');
|
|
201
|
-
if (dryRun) {
|
|
202
|
-
steps.push('⚠ DRY RUN - skipping actual publish');
|
|
203
|
-
try {
|
|
204
|
-
const { stdout: dryRunOut } = await execAsync(`npm publish --dry-run --tag ${tag}`, {
|
|
205
|
-
cwd: workingDir,
|
|
206
|
-
timeout: 120000,
|
|
207
|
-
maxBuffer: 1024 * 1024 * 10,
|
|
208
|
-
});
|
|
209
|
-
steps.push('Dry run output:');
|
|
210
|
-
steps.push(dryRunOut.substring(0, 2000));
|
|
211
|
-
}
|
|
212
|
-
catch (e) {
|
|
213
|
-
steps.push(`Dry run warning: ${e.message}`);
|
|
214
|
-
}
|
|
215
|
-
}
|
|
216
|
-
else {
|
|
217
|
-
try {
|
|
218
|
-
const { stdout: publishOut } = await execAsync(`npm publish --tag ${tag}`, {
|
|
219
|
-
cwd: workingDir,
|
|
220
|
-
timeout: 120000,
|
|
221
|
-
maxBuffer: 1024 * 1024 * 10,
|
|
222
|
-
});
|
|
223
|
-
steps.push(`✓ Published successfully`);
|
|
224
|
-
// Extract package info from output
|
|
225
|
-
const match = publishOut.match(/\+ ([^\s]+)/);
|
|
226
|
-
if (match) {
|
|
227
|
-
steps.push(`Published: ${match[1]}`);
|
|
228
|
-
}
|
|
229
|
-
}
|
|
230
|
-
catch (e) {
|
|
231
|
-
errors.push(`✗ Publish failed: ${e.message}`);
|
|
232
|
-
return [...steps, '', '## Errors', ...errors].join('\n');
|
|
233
|
-
}
|
|
234
|
-
}
|
|
235
|
-
// Step 7: Push to git
|
|
236
|
-
steps.push('');
|
|
237
|
-
steps.push('## Step 7: Pushing to git');
|
|
238
|
-
try {
|
|
239
|
-
await execAsync('git push && git push --tags', { cwd: workingDir, timeout: 60000 });
|
|
240
|
-
steps.push('✓ Pushed commits and tags to remote');
|
|
241
|
-
}
|
|
242
|
-
catch (e) {
|
|
243
|
-
steps.push(`Warning: Could not push to git: ${e.message}`);
|
|
244
|
-
}
|
|
245
|
-
// Summary
|
|
246
|
-
steps.push('');
|
|
247
|
-
steps.push('## Summary');
|
|
248
|
-
const updatedPackage = JSON.parse(readFileSync(packageJsonPath, 'utf-8'));
|
|
249
|
-
steps.push(`✓ ${updatedPackage.name}@${updatedPackage.version} ${dryRun ? '(dry run)' : 'published'}`);
|
|
250
|
-
return steps.join('\n');
|
|
251
|
-
}
|
|
252
|
-
catch (error) {
|
|
253
|
-
errors.push(`Unexpected error: ${error.message}`);
|
|
254
|
-
return [...steps, '', '## Errors', ...errors].join('\n');
|
|
255
|
-
}
|
|
256
|
-
},
|
|
257
|
-
},
|
|
258
|
-
{
|
|
259
|
-
name: 'npm_check_auth',
|
|
260
|
-
description: 'Check npm authentication status and registry configuration',
|
|
261
|
-
parameters: {
|
|
262
|
-
type: 'object',
|
|
263
|
-
properties: {},
|
|
264
|
-
additionalProperties: false,
|
|
265
|
-
},
|
|
266
|
-
handler: async () => {
|
|
267
|
-
const output = ['# npm Authentication Status', ''];
|
|
268
|
-
try {
|
|
269
|
-
// Check whoami
|
|
270
|
-
try {
|
|
271
|
-
const { stdout: whoami } = await execAsync('npm whoami', { cwd: workingDir, timeout: 30000 });
|
|
272
|
-
output.push(`✓ Logged in as: **${whoami.trim()}**`);
|
|
273
|
-
}
|
|
274
|
-
catch {
|
|
275
|
-
output.push('✗ Not logged in to npm');
|
|
276
|
-
output.push('');
|
|
277
|
-
output.push('To login, run: `npm login`');
|
|
278
|
-
return output.join('\n');
|
|
279
|
-
}
|
|
280
|
-
// Check registry
|
|
281
|
-
try {
|
|
282
|
-
const { stdout: registry } = await execAsync('npm config get registry', { cwd: workingDir, timeout: 10000 });
|
|
283
|
-
output.push(`Registry: ${registry.trim()}`);
|
|
284
|
-
}
|
|
285
|
-
catch {
|
|
286
|
-
output.push('Registry: (could not determine)');
|
|
287
|
-
}
|
|
288
|
-
// Check token
|
|
289
|
-
output.push('');
|
|
290
|
-
output.push('## Token Status');
|
|
291
|
-
try {
|
|
292
|
-
const { stdout: tokenList } = await execAsync('npm token list 2>/dev/null | head -5', {
|
|
293
|
-
cwd: workingDir,
|
|
294
|
-
timeout: 30000,
|
|
295
|
-
shell: '/bin/bash',
|
|
296
|
-
});
|
|
297
|
-
if (tokenList.trim()) {
|
|
298
|
-
output.push('Active tokens found');
|
|
299
|
-
}
|
|
300
|
-
}
|
|
301
|
-
catch {
|
|
302
|
-
output.push('Could not list tokens (this is normal)');
|
|
303
|
-
}
|
|
304
|
-
return output.join('\n');
|
|
305
|
-
}
|
|
306
|
-
catch (error) {
|
|
307
|
-
return `Error checking npm auth: ${error.message}`;
|
|
308
|
-
}
|
|
309
|
-
},
|
|
310
|
-
},
|
|
311
|
-
// ========================================================================
|
|
312
|
-
// PyPI Publishing Tools
|
|
313
|
-
// ========================================================================
|
|
314
|
-
{
|
|
315
|
-
name: 'python_publish',
|
|
316
|
-
description: 'Publish Python package to PyPI. Handles the full workflow: check auth, commit changes, bump version, build, and publish.',
|
|
317
|
-
parameters: {
|
|
318
|
-
type: 'object',
|
|
319
|
-
properties: {
|
|
320
|
-
versionBump: {
|
|
321
|
-
type: 'string',
|
|
322
|
-
enum: ['patch', 'minor', 'major'],
|
|
323
|
-
description: 'Version bump type (default: patch)',
|
|
324
|
-
},
|
|
325
|
-
commitMessage: {
|
|
326
|
-
type: 'string',
|
|
327
|
-
description: 'Commit message for uncommitted changes (if any)',
|
|
328
|
-
},
|
|
329
|
-
dryRun: {
|
|
330
|
-
type: 'boolean',
|
|
331
|
-
description: 'If true, performs all steps except the actual publish',
|
|
332
|
-
},
|
|
333
|
-
testPypi: {
|
|
334
|
-
type: 'boolean',
|
|
335
|
-
description: 'Publish to TestPyPI instead of PyPI (default: false)',
|
|
336
|
-
},
|
|
337
|
-
},
|
|
338
|
-
additionalProperties: false,
|
|
339
|
-
},
|
|
340
|
-
handler: async (args) => {
|
|
341
|
-
const versionBump = typeof args['versionBump'] === 'string' ? args['versionBump'] : 'patch';
|
|
342
|
-
const commitMessage = typeof args['commitMessage'] === 'string' ? args['commitMessage'] : undefined;
|
|
343
|
-
const dryRun = args['dryRun'] === true;
|
|
344
|
-
const testPypi = args['testPypi'] === true;
|
|
345
|
-
const steps = [];
|
|
346
|
-
const errors = [];
|
|
347
|
-
try {
|
|
348
|
-
// Step 1: Detect Python project type and check auth
|
|
349
|
-
steps.push('## Step 1: Detecting project type and checking authentication');
|
|
350
|
-
const hasPoetry = existsSync(join(workingDir, 'pyproject.toml'));
|
|
351
|
-
const hasSetupPy = existsSync(join(workingDir, 'setup.py'));
|
|
352
|
-
const hasSetupCfg = existsSync(join(workingDir, 'setup.cfg'));
|
|
353
|
-
let projectType = 'unknown';
|
|
354
|
-
let currentVersion = '';
|
|
355
|
-
if (hasPoetry) {
|
|
356
|
-
const pyprojectContent = readFileSync(join(workingDir, 'pyproject.toml'), 'utf-8');
|
|
357
|
-
if (pyprojectContent.includes('[tool.poetry]')) {
|
|
358
|
-
projectType = 'poetry';
|
|
359
|
-
const versionMatch = pyprojectContent.match(/version\s*=\s*"([^"]+)"/);
|
|
360
|
-
if (versionMatch)
|
|
361
|
-
currentVersion = versionMatch[1] || '';
|
|
362
|
-
steps.push(`✓ Detected Poetry project (v${currentVersion})`);
|
|
363
|
-
// Check Poetry auth
|
|
364
|
-
try {
|
|
365
|
-
await execAsync('poetry config pypi-token.pypi', { cwd: workingDir, timeout: 10000 });
|
|
366
|
-
steps.push('✓ Poetry PyPI token configured');
|
|
367
|
-
}
|
|
368
|
-
catch {
|
|
369
|
-
steps.push('⚠ PyPI token may not be configured. Run: poetry config pypi-token.pypi <token>');
|
|
370
|
-
}
|
|
371
|
-
}
|
|
372
|
-
}
|
|
373
|
-
if (projectType === 'unknown' && (hasSetupPy || hasSetupCfg)) {
|
|
374
|
-
projectType = 'setuptools';
|
|
375
|
-
// Try to get version from setup.py or setup.cfg
|
|
376
|
-
if (hasSetupPy) {
|
|
377
|
-
const setupContent = readFileSync(join(workingDir, 'setup.py'), 'utf-8');
|
|
378
|
-
const versionMatch = setupContent.match(/version\s*=\s*['"]([^'"]+)['"]/);
|
|
379
|
-
if (versionMatch)
|
|
380
|
-
currentVersion = versionMatch[1] || '';
|
|
381
|
-
}
|
|
382
|
-
steps.push(`✓ Detected setuptools project${currentVersion ? ` (v${currentVersion})` : ''}`);
|
|
383
|
-
// Check twine auth
|
|
384
|
-
try {
|
|
385
|
-
await execAsync('twine --version', { cwd: workingDir, timeout: 10000 });
|
|
386
|
-
steps.push('✓ Twine is available');
|
|
387
|
-
}
|
|
388
|
-
catch {
|
|
389
|
-
errors.push('✗ Twine not found. Install with: pip install twine');
|
|
390
|
-
return [...steps, '', '## Errors', ...errors].join('\n');
|
|
391
|
-
}
|
|
392
|
-
}
|
|
393
|
-
if (projectType === 'unknown') {
|
|
394
|
-
errors.push('✗ Could not detect Python project type. Need pyproject.toml (Poetry) or setup.py/setup.cfg (setuptools)');
|
|
395
|
-
return [...steps, '', '## Errors', ...errors].join('\n');
|
|
396
|
-
}
|
|
397
|
-
// Step 2: Check for uncommitted changes
|
|
398
|
-
steps.push('');
|
|
399
|
-
steps.push('## Step 2: Checking git status');
|
|
400
|
-
try {
|
|
401
|
-
const { stdout: gitStatus } = await execAsync('git status --porcelain', { cwd: workingDir, timeout: 10000 });
|
|
402
|
-
if (gitStatus.trim()) {
|
|
403
|
-
if (commitMessage) {
|
|
404
|
-
steps.push(`Found uncommitted changes, committing with message: "${commitMessage}"`);
|
|
405
|
-
await execAsync('git add .', { cwd: workingDir, timeout: 30000 });
|
|
406
|
-
await execAsync(`git commit -m "${commitMessage.replace(/"/g, '\\"')}"`, { cwd: workingDir, timeout: 30000 });
|
|
407
|
-
steps.push('✓ Changes committed');
|
|
408
|
-
}
|
|
409
|
-
else {
|
|
410
|
-
errors.push('✗ Uncommitted changes found. Provide a commitMessage or commit manually first.');
|
|
411
|
-
gitStatus.trim().split('\n').forEach(line => errors.push(` ${line}`));
|
|
412
|
-
return [...steps, '', '## Errors', ...errors].join('\n');
|
|
413
|
-
}
|
|
414
|
-
}
|
|
415
|
-
else {
|
|
416
|
-
steps.push('✓ Working directory clean');
|
|
417
|
-
}
|
|
418
|
-
}
|
|
419
|
-
catch (e) {
|
|
420
|
-
steps.push(`Warning: Could not check git status: ${e.message}`);
|
|
421
|
-
}
|
|
422
|
-
// Step 3: Bump version
|
|
423
|
-
steps.push('');
|
|
424
|
-
steps.push('## Step 3: Bumping version');
|
|
425
|
-
try {
|
|
426
|
-
if (projectType === 'poetry') {
|
|
427
|
-
const { stdout: versionOut } = await execAsync(`poetry version ${versionBump}`, {
|
|
428
|
-
cwd: workingDir,
|
|
429
|
-
timeout: 30000,
|
|
430
|
-
});
|
|
431
|
-
steps.push(`✓ ${versionOut.trim()}`);
|
|
432
|
-
}
|
|
433
|
-
else {
|
|
434
|
-
// For setuptools, use bump2version if available, otherwise manual
|
|
435
|
-
try {
|
|
436
|
-
await execAsync(`bump2version ${versionBump}`, { cwd: workingDir, timeout: 30000 });
|
|
437
|
-
steps.push(`✓ Version bumped with bump2version`);
|
|
438
|
-
}
|
|
439
|
-
catch {
|
|
440
|
-
steps.push(`⚠ bump2version not available. Please manually update version in setup.py/setup.cfg`);
|
|
441
|
-
}
|
|
442
|
-
}
|
|
443
|
-
}
|
|
444
|
-
catch (e) {
|
|
445
|
-
errors.push(`✗ Version bump failed: ${e.message}`);
|
|
446
|
-
return [...steps, '', '## Errors', ...errors].join('\n');
|
|
447
|
-
}
|
|
448
|
-
// Step 4: Build
|
|
449
|
-
steps.push('');
|
|
450
|
-
steps.push('## Step 4: Building package');
|
|
451
|
-
try {
|
|
452
|
-
if (projectType === 'poetry') {
|
|
453
|
-
await execAsync('poetry build', {
|
|
454
|
-
cwd: workingDir,
|
|
455
|
-
timeout: 120000,
|
|
456
|
-
maxBuffer: 1024 * 1024 * 10,
|
|
457
|
-
});
|
|
458
|
-
steps.push('✓ Built with Poetry');
|
|
459
|
-
}
|
|
460
|
-
else {
|
|
461
|
-
// Clean old builds
|
|
462
|
-
await execAsync('rm -rf dist/ build/ *.egg-info', { cwd: workingDir, timeout: 10000, shell: '/bin/bash' });
|
|
463
|
-
await execAsync('python -m build', {
|
|
464
|
-
cwd: workingDir,
|
|
465
|
-
timeout: 120000,
|
|
466
|
-
maxBuffer: 1024 * 1024 * 10,
|
|
467
|
-
});
|
|
468
|
-
steps.push('✓ Built with python -m build');
|
|
469
|
-
}
|
|
470
|
-
}
|
|
471
|
-
catch (e) {
|
|
472
|
-
errors.push(`✗ Build failed: ${e.message}`);
|
|
473
|
-
return [...steps, '', '## Errors', ...errors].join('\n');
|
|
474
|
-
}
|
|
475
|
-
// Step 5: Publish
|
|
476
|
-
steps.push('');
|
|
477
|
-
steps.push('## Step 5: Publishing to PyPI');
|
|
478
|
-
const repository = testPypi ? 'testpypi' : 'pypi';
|
|
479
|
-
if (dryRun) {
|
|
480
|
-
steps.push(`⚠ DRY RUN - would publish to ${repository}`);
|
|
481
|
-
steps.push('Files that would be uploaded:');
|
|
482
|
-
try {
|
|
483
|
-
const { stdout: distFiles } = await execAsync('ls -la dist/', { cwd: workingDir, timeout: 10000 });
|
|
484
|
-
steps.push(distFiles);
|
|
485
|
-
}
|
|
486
|
-
catch {
|
|
487
|
-
steps.push(' (could not list dist files)');
|
|
488
|
-
}
|
|
489
|
-
}
|
|
490
|
-
else {
|
|
491
|
-
try {
|
|
492
|
-
if (projectType === 'poetry') {
|
|
493
|
-
const publishCmd = testPypi
|
|
494
|
-
? 'poetry publish -r testpypi'
|
|
495
|
-
: 'poetry publish';
|
|
496
|
-
await execAsync(publishCmd, {
|
|
497
|
-
cwd: workingDir,
|
|
498
|
-
timeout: 120000,
|
|
499
|
-
maxBuffer: 1024 * 1024 * 10,
|
|
500
|
-
});
|
|
501
|
-
}
|
|
502
|
-
else {
|
|
503
|
-
const uploadUrl = testPypi
|
|
504
|
-
? '--repository-url https://test.pypi.org/legacy/'
|
|
505
|
-
: '';
|
|
506
|
-
await execAsync(`twine upload ${uploadUrl} dist/*`, {
|
|
507
|
-
cwd: workingDir,
|
|
508
|
-
timeout: 120000,
|
|
509
|
-
maxBuffer: 1024 * 1024 * 10,
|
|
510
|
-
});
|
|
511
|
-
}
|
|
512
|
-
steps.push(`✓ Published to ${repository}`);
|
|
513
|
-
}
|
|
514
|
-
catch (e) {
|
|
515
|
-
errors.push(`✗ Publish failed: ${e.message}`);
|
|
516
|
-
return [...steps, '', '## Errors', ...errors].join('\n');
|
|
517
|
-
}
|
|
518
|
-
}
|
|
519
|
-
// Step 6: Git commit version bump and push
|
|
520
|
-
steps.push('');
|
|
521
|
-
steps.push('## Step 6: Committing version bump and pushing');
|
|
522
|
-
try {
|
|
523
|
-
const { stdout: gitStatus } = await execAsync('git status --porcelain', { cwd: workingDir, timeout: 10000 });
|
|
524
|
-
if (gitStatus.trim()) {
|
|
525
|
-
await execAsync('git add .', { cwd: workingDir, timeout: 30000 });
|
|
526
|
-
await execAsync('git commit -m "Bump version"', { cwd: workingDir, timeout: 30000 });
|
|
527
|
-
steps.push('✓ Version bump committed');
|
|
528
|
-
}
|
|
529
|
-
await execAsync('git push && git push --tags', { cwd: workingDir, timeout: 60000 });
|
|
530
|
-
steps.push('✓ Pushed to remote');
|
|
531
|
-
}
|
|
532
|
-
catch (e) {
|
|
533
|
-
steps.push(`Warning: Could not push to git: ${e.message}`);
|
|
534
|
-
}
|
|
535
|
-
// Summary
|
|
536
|
-
steps.push('');
|
|
537
|
-
steps.push('## Summary');
|
|
538
|
-
steps.push(`✓ Python package ${dryRun ? '(dry run)' : 'published'} to ${repository}`);
|
|
539
|
-
return steps.join('\n');
|
|
540
|
-
}
|
|
541
|
-
catch (error) {
|
|
542
|
-
errors.push(`Unexpected error: ${error.message}`);
|
|
543
|
-
return [...steps, '', '## Errors', ...errors].join('\n');
|
|
544
|
-
}
|
|
545
|
-
},
|
|
546
|
-
},
|
|
547
|
-
{
|
|
548
|
-
name: 'pypi_check_auth',
|
|
549
|
-
description: 'Check PyPI authentication status for both Poetry and twine',
|
|
550
|
-
parameters: {
|
|
551
|
-
type: 'object',
|
|
552
|
-
properties: {},
|
|
553
|
-
additionalProperties: false,
|
|
554
|
-
},
|
|
555
|
-
handler: async () => {
|
|
556
|
-
const output = ['# PyPI Authentication Status', ''];
|
|
557
|
-
// Check Poetry
|
|
558
|
-
output.push('## Poetry');
|
|
559
|
-
try {
|
|
560
|
-
const { stdout: poetryConfig } = await execAsync('poetry config --list 2>/dev/null | grep pypi', {
|
|
561
|
-
cwd: workingDir,
|
|
562
|
-
timeout: 10000,
|
|
563
|
-
shell: '/bin/bash',
|
|
564
|
-
});
|
|
565
|
-
if (poetryConfig.includes('pypi-token')) {
|
|
566
|
-
output.push('✓ PyPI token configured in Poetry');
|
|
567
|
-
}
|
|
568
|
-
else {
|
|
569
|
-
output.push('✗ No PyPI token in Poetry');
|
|
570
|
-
output.push(' Set with: poetry config pypi-token.pypi <your-token>');
|
|
571
|
-
}
|
|
572
|
-
}
|
|
573
|
-
catch {
|
|
574
|
-
output.push('Poetry not available or not configured');
|
|
575
|
-
}
|
|
576
|
-
// Check twine/pip
|
|
577
|
-
output.push('');
|
|
578
|
-
output.push('## Twine/pip');
|
|
579
|
-
try {
|
|
580
|
-
await execAsync('twine --version', { cwd: workingDir, timeout: 10000 });
|
|
581
|
-
output.push('✓ Twine is installed');
|
|
582
|
-
// Check for .pypirc
|
|
583
|
-
const homedir = process.env['HOME'] || '';
|
|
584
|
-
if (existsSync(join(homedir, '.pypirc'))) {
|
|
585
|
-
output.push('✓ .pypirc found in home directory');
|
|
586
|
-
}
|
|
587
|
-
else {
|
|
588
|
-
output.push('⚠ No .pypirc found. Twine will prompt for credentials.');
|
|
589
|
-
}
|
|
590
|
-
}
|
|
591
|
-
catch {
|
|
592
|
-
output.push('✗ Twine not installed');
|
|
593
|
-
output.push(' Install with: pip install twine');
|
|
594
|
-
}
|
|
595
|
-
// Check for TWINE_* env vars
|
|
596
|
-
output.push('');
|
|
597
|
-
output.push('## Environment Variables');
|
|
598
|
-
const twineUser = process.env['TWINE_USERNAME'];
|
|
599
|
-
const twinePass = process.env['TWINE_PASSWORD'];
|
|
600
|
-
if (twineUser) {
|
|
601
|
-
output.push(`✓ TWINE_USERNAME set: ${twineUser}`);
|
|
602
|
-
}
|
|
603
|
-
if (twinePass) {
|
|
604
|
-
output.push('✓ TWINE_PASSWORD is set');
|
|
605
|
-
}
|
|
606
|
-
if (!twineUser && !twinePass) {
|
|
607
|
-
output.push('No TWINE_* environment variables set');
|
|
608
|
-
}
|
|
609
|
-
return output.join('\n');
|
|
610
|
-
},
|
|
611
|
-
},
|
|
612
|
-
// ========================================================================
|
|
613
|
-
// Cargo (Rust) Publishing Tools
|
|
614
|
-
// ========================================================================
|
|
615
|
-
{
|
|
616
|
-
name: 'cargo_publish',
|
|
617
|
-
description: 'Publish Rust crate to crates.io. Handles the full workflow: check auth, commit changes, bump version, build, and publish.',
|
|
618
|
-
parameters: {
|
|
619
|
-
type: 'object',
|
|
620
|
-
properties: {
|
|
621
|
-
versionBump: {
|
|
622
|
-
type: 'string',
|
|
623
|
-
enum: ['patch', 'minor', 'major'],
|
|
624
|
-
description: 'Version bump type (default: patch)',
|
|
625
|
-
},
|
|
626
|
-
commitMessage: {
|
|
627
|
-
type: 'string',
|
|
628
|
-
description: 'Commit message for uncommitted changes (if any)',
|
|
629
|
-
},
|
|
630
|
-
dryRun: {
|
|
631
|
-
type: 'boolean',
|
|
632
|
-
description: 'If true, performs all steps except the actual publish',
|
|
633
|
-
},
|
|
634
|
-
allowDirty: {
|
|
635
|
-
type: 'boolean',
|
|
636
|
-
description: 'Allow publishing with uncommitted changes (default: false)',
|
|
637
|
-
},
|
|
638
|
-
},
|
|
639
|
-
additionalProperties: false,
|
|
640
|
-
},
|
|
641
|
-
handler: async (args) => {
|
|
642
|
-
const versionBump = typeof args['versionBump'] === 'string' ? args['versionBump'] : 'patch';
|
|
643
|
-
const commitMessage = typeof args['commitMessage'] === 'string' ? args['commitMessage'] : undefined;
|
|
644
|
-
const dryRun = args['dryRun'] === true;
|
|
645
|
-
const allowDirty = args['allowDirty'] === true;
|
|
646
|
-
const steps = [];
|
|
647
|
-
const errors = [];
|
|
648
|
-
try {
|
|
649
|
-
// Step 1: Check Cargo.toml exists and auth
|
|
650
|
-
steps.push('## Step 1: Checking project and authentication');
|
|
651
|
-
const cargoTomlPath = join(workingDir, 'Cargo.toml');
|
|
652
|
-
if (!existsSync(cargoTomlPath)) {
|
|
653
|
-
errors.push('✗ Cargo.toml not found. This is not a Rust project.');
|
|
654
|
-
return [...steps, '', '## Errors', ...errors].join('\n');
|
|
655
|
-
}
|
|
656
|
-
const cargoContent = readFileSync(cargoTomlPath, 'utf-8');
|
|
657
|
-
const nameMatch = cargoContent.match(/name\s*=\s*"([^"]+)"/);
|
|
658
|
-
const versionMatch = cargoContent.match(/version\s*=\s*"([^"]+)"/);
|
|
659
|
-
const crateName = nameMatch ? nameMatch[1] : 'unknown';
|
|
660
|
-
const currentVersion = versionMatch ? versionMatch[1] : '0.0.0';
|
|
661
|
-
steps.push(`✓ Found crate: ${crateName} v${currentVersion}`);
|
|
662
|
-
// Check cargo login
|
|
663
|
-
try {
|
|
664
|
-
const { stdout: whoami } = await execAsync('cargo owner --list 2>/dev/null | head -1', {
|
|
665
|
-
cwd: workingDir,
|
|
666
|
-
timeout: 30000,
|
|
667
|
-
shell: '/bin/bash',
|
|
668
|
-
});
|
|
669
|
-
if (whoami.trim()) {
|
|
670
|
-
steps.push(`✓ Logged in to crates.io`);
|
|
671
|
-
}
|
|
672
|
-
}
|
|
673
|
-
catch {
|
|
674
|
-
steps.push('⚠ Could not verify crates.io login. Make sure you ran `cargo login`');
|
|
675
|
-
}
|
|
676
|
-
// Step 2: Check for uncommitted changes
|
|
677
|
-
steps.push('');
|
|
678
|
-
steps.push('## Step 2: Checking git status');
|
|
679
|
-
try {
|
|
680
|
-
const { stdout: gitStatus } = await execAsync('git status --porcelain', { cwd: workingDir, timeout: 10000 });
|
|
681
|
-
if (gitStatus.trim()) {
|
|
682
|
-
if (commitMessage) {
|
|
683
|
-
steps.push(`Found uncommitted changes, committing with message: "${commitMessage}"`);
|
|
684
|
-
await execAsync('git add .', { cwd: workingDir, timeout: 30000 });
|
|
685
|
-
await execAsync(`git commit -m "${commitMessage.replace(/"/g, '\\"')}"`, { cwd: workingDir, timeout: 30000 });
|
|
686
|
-
steps.push('✓ Changes committed');
|
|
687
|
-
}
|
|
688
|
-
else if (!allowDirty) {
|
|
689
|
-
errors.push('✗ Uncommitted changes found. Provide a commitMessage, commit manually, or use allowDirty=true.');
|
|
690
|
-
gitStatus.trim().split('\n').forEach(line => errors.push(` ${line}`));
|
|
691
|
-
return [...steps, '', '## Errors', ...errors].join('\n');
|
|
692
|
-
}
|
|
693
|
-
else {
|
|
694
|
-
steps.push('⚠ Uncommitted changes present (allowDirty=true)');
|
|
695
|
-
}
|
|
696
|
-
}
|
|
697
|
-
else {
|
|
698
|
-
steps.push('✓ Working directory clean');
|
|
699
|
-
}
|
|
700
|
-
}
|
|
701
|
-
catch (e) {
|
|
702
|
-
steps.push(`Warning: Could not check git status: ${e.message}`);
|
|
703
|
-
}
|
|
704
|
-
// Step 3: Bump version
|
|
705
|
-
steps.push('');
|
|
706
|
-
steps.push('## Step 3: Bumping version');
|
|
707
|
-
// Parse current version
|
|
708
|
-
const versionParts = (currentVersion || '0.0.0').split('.').map(p => parseInt(p, 10) || 0);
|
|
709
|
-
let [major = 0, minor = 0, patch = 0] = versionParts;
|
|
710
|
-
switch (versionBump) {
|
|
711
|
-
case 'major':
|
|
712
|
-
major++;
|
|
713
|
-
minor = 0;
|
|
714
|
-
patch = 0;
|
|
715
|
-
break;
|
|
716
|
-
case 'minor':
|
|
717
|
-
minor++;
|
|
718
|
-
patch = 0;
|
|
719
|
-
break;
|
|
720
|
-
case 'patch':
|
|
721
|
-
default:
|
|
722
|
-
patch++;
|
|
723
|
-
break;
|
|
724
|
-
}
|
|
725
|
-
const newVersion = `${major}.${minor}.${patch}`;
|
|
726
|
-
// Update Cargo.toml
|
|
727
|
-
const updatedCargo = cargoContent.replace(/version\s*=\s*"[^"]+"/, `version = "${newVersion}"`);
|
|
728
|
-
const fs = await import('node:fs/promises');
|
|
729
|
-
await fs.writeFile(cargoTomlPath, updatedCargo, 'utf-8');
|
|
730
|
-
steps.push(`✓ Version bumped: ${currentVersion} → ${newVersion}`);
|
|
731
|
-
// Update Cargo.lock
|
|
732
|
-
try {
|
|
733
|
-
await execAsync('cargo check', { cwd: workingDir, timeout: 120000 });
|
|
734
|
-
steps.push('✓ Cargo.lock updated');
|
|
735
|
-
}
|
|
736
|
-
catch (e) {
|
|
737
|
-
steps.push(`Warning: cargo check had issues: ${e.message}`);
|
|
738
|
-
}
|
|
739
|
-
// Step 4: Build and test
|
|
740
|
-
steps.push('');
|
|
741
|
-
steps.push('## Step 4: Building and testing');
|
|
742
|
-
try {
|
|
743
|
-
await execAsync('cargo build --release', {
|
|
744
|
-
cwd: workingDir,
|
|
745
|
-
timeout: 300000,
|
|
746
|
-
maxBuffer: 1024 * 1024 * 10,
|
|
747
|
-
});
|
|
748
|
-
steps.push('✓ Release build successful');
|
|
749
|
-
await execAsync('cargo test', {
|
|
750
|
-
cwd: workingDir,
|
|
751
|
-
timeout: 300000,
|
|
752
|
-
maxBuffer: 1024 * 1024 * 10,
|
|
753
|
-
});
|
|
754
|
-
steps.push('✓ Tests passed');
|
|
755
|
-
}
|
|
756
|
-
catch (e) {
|
|
757
|
-
errors.push(`✗ Build/test failed: ${e.message}`);
|
|
758
|
-
return [...steps, '', '## Errors', ...errors].join('\n');
|
|
759
|
-
}
|
|
760
|
-
// Step 5: Publish
|
|
761
|
-
steps.push('');
|
|
762
|
-
steps.push('## Step 5: Publishing to crates.io');
|
|
763
|
-
if (dryRun) {
|
|
764
|
-
steps.push('⚠ DRY RUN - would publish to crates.io');
|
|
765
|
-
try {
|
|
766
|
-
const { stdout: dryRunOut } = await execAsync('cargo publish --dry-run', {
|
|
767
|
-
cwd: workingDir,
|
|
768
|
-
timeout: 120000,
|
|
769
|
-
maxBuffer: 1024 * 1024 * 10,
|
|
770
|
-
});
|
|
771
|
-
steps.push('Dry run output:');
|
|
772
|
-
steps.push(dryRunOut.substring(0, 2000));
|
|
773
|
-
}
|
|
774
|
-
catch (e) {
|
|
775
|
-
steps.push(`Dry run notes: ${e.message}`);
|
|
776
|
-
}
|
|
777
|
-
}
|
|
778
|
-
else {
|
|
779
|
-
try {
|
|
780
|
-
const publishFlags = allowDirty ? '--allow-dirty' : '';
|
|
781
|
-
await execAsync(`cargo publish ${publishFlags}`, {
|
|
782
|
-
cwd: workingDir,
|
|
783
|
-
timeout: 180000,
|
|
784
|
-
maxBuffer: 1024 * 1024 * 10,
|
|
785
|
-
});
|
|
786
|
-
steps.push(`✓ Published ${crateName}@${newVersion} to crates.io`);
|
|
787
|
-
}
|
|
788
|
-
catch (e) {
|
|
789
|
-
errors.push(`✗ Publish failed: ${e.message}`);
|
|
790
|
-
return [...steps, '', '## Errors', ...errors].join('\n');
|
|
791
|
-
}
|
|
792
|
-
}
|
|
793
|
-
// Step 6: Git commit and push
|
|
794
|
-
steps.push('');
|
|
795
|
-
steps.push('## Step 6: Committing and pushing');
|
|
796
|
-
try {
|
|
797
|
-
await execAsync('git add Cargo.toml Cargo.lock', { cwd: workingDir, timeout: 10000 });
|
|
798
|
-
await execAsync(`git commit -m "Release v${newVersion}"`, { cwd: workingDir, timeout: 30000 });
|
|
799
|
-
await execAsync(`git tag -a "v${newVersion}" -m "Release v${newVersion}"`, { cwd: workingDir, timeout: 10000 });
|
|
800
|
-
steps.push(`✓ Created tag v${newVersion}`);
|
|
801
|
-
await execAsync('git push && git push --tags', { cwd: workingDir, timeout: 60000 });
|
|
802
|
-
steps.push('✓ Pushed to remote');
|
|
803
|
-
}
|
|
804
|
-
catch (e) {
|
|
805
|
-
steps.push(`Warning: Could not push to git: ${e.message}`);
|
|
806
|
-
}
|
|
807
|
-
// Summary
|
|
808
|
-
steps.push('');
|
|
809
|
-
steps.push('## Summary');
|
|
810
|
-
steps.push(`✓ ${crateName}@${newVersion} ${dryRun ? '(dry run)' : 'published to crates.io'}`);
|
|
811
|
-
return steps.join('\n');
|
|
812
|
-
}
|
|
813
|
-
catch (error) {
|
|
814
|
-
errors.push(`Unexpected error: ${error.message}`);
|
|
815
|
-
return [...steps, '', '## Errors', ...errors].join('\n');
|
|
816
|
-
}
|
|
817
|
-
},
|
|
818
|
-
},
|
|
819
|
-
{
|
|
820
|
-
name: 'cargo_check_auth',
|
|
821
|
-
description: 'Check crates.io authentication status',
|
|
822
|
-
parameters: {
|
|
823
|
-
type: 'object',
|
|
824
|
-
properties: {},
|
|
825
|
-
additionalProperties: false,
|
|
826
|
-
},
|
|
827
|
-
handler: async () => {
|
|
828
|
-
const output = ['# Crates.io Authentication Status', ''];
|
|
829
|
-
// Check if cargo is available
|
|
830
|
-
try {
|
|
831
|
-
const { stdout: cargoVersion } = await execAsync('cargo --version', { cwd: workingDir, timeout: 10000 });
|
|
832
|
-
output.push(`✓ ${cargoVersion.trim()}`);
|
|
833
|
-
}
|
|
834
|
-
catch {
|
|
835
|
-
output.push('✗ Cargo not found. Install Rust from https://rustup.rs');
|
|
836
|
-
return output.join('\n');
|
|
837
|
-
}
|
|
838
|
-
// Check credentials file
|
|
839
|
-
const homedir = process.env['HOME'] || '';
|
|
840
|
-
const credentialsPath = join(homedir, '.cargo', 'credentials.toml');
|
|
841
|
-
const legacyCredentialsPath = join(homedir, '.cargo', 'credentials');
|
|
842
|
-
output.push('');
|
|
843
|
-
output.push('## Credentials');
|
|
844
|
-
if (existsSync(credentialsPath) || existsSync(legacyCredentialsPath)) {
|
|
845
|
-
output.push('✓ Credentials file found');
|
|
846
|
-
output.push(' To update token, run: cargo login');
|
|
847
|
-
}
|
|
848
|
-
else {
|
|
849
|
-
output.push('✗ No credentials file found');
|
|
850
|
-
output.push(' Run: cargo login <your-api-token>');
|
|
851
|
-
output.push(' Get token from: https://crates.io/settings/tokens');
|
|
852
|
-
}
|
|
853
|
-
// Check CARGO_REGISTRY_TOKEN env var
|
|
854
|
-
output.push('');
|
|
855
|
-
output.push('## Environment Variables');
|
|
856
|
-
if (process.env['CARGO_REGISTRY_TOKEN']) {
|
|
857
|
-
output.push('✓ CARGO_REGISTRY_TOKEN is set');
|
|
858
|
-
}
|
|
859
|
-
else {
|
|
860
|
-
output.push('CARGO_REGISTRY_TOKEN not set (optional)');
|
|
861
|
-
}
|
|
862
|
-
return output.join('\n');
|
|
863
|
-
},
|
|
864
|
-
},
|
|
865
|
-
// ========================================================================
|
|
866
|
-
// Git Workflow Tools
|
|
867
|
-
// ========================================================================
|
|
868
|
-
{
|
|
869
|
-
name: 'git_release',
|
|
870
|
-
description: 'Create a git release with tag, changelog generation, and optional GitHub release',
|
|
871
|
-
parameters: {
|
|
872
|
-
type: 'object',
|
|
873
|
-
properties: {
|
|
874
|
-
version: {
|
|
875
|
-
type: 'string',
|
|
876
|
-
description: 'Version tag (e.g., v1.0.0). If not provided, will auto-generate based on commits.',
|
|
877
|
-
},
|
|
878
|
-
message: {
|
|
879
|
-
type: 'string',
|
|
880
|
-
description: 'Release message/description',
|
|
881
|
-
},
|
|
882
|
-
generateChangelog: {
|
|
883
|
-
type: 'boolean',
|
|
884
|
-
description: 'Auto-generate changelog from commits since last tag (default: true)',
|
|
885
|
-
},
|
|
886
|
-
pushTag: {
|
|
887
|
-
type: 'boolean',
|
|
888
|
-
description: 'Push the tag to remote (default: true)',
|
|
889
|
-
},
|
|
890
|
-
createGithubRelease: {
|
|
891
|
-
type: 'boolean',
|
|
892
|
-
description: 'Create a GitHub release using gh CLI (default: false)',
|
|
893
|
-
},
|
|
894
|
-
},
|
|
895
|
-
additionalProperties: false,
|
|
896
|
-
},
|
|
897
|
-
handler: async (args) => {
|
|
898
|
-
const version = typeof args['version'] === 'string' ? args['version'] : undefined;
|
|
899
|
-
const message = typeof args['message'] === 'string' ? args['message'] : undefined;
|
|
900
|
-
const generateChangelog = args['generateChangelog'] !== false;
|
|
901
|
-
const pushTag = args['pushTag'] !== false;
|
|
902
|
-
const createGithubRelease = args['createGithubRelease'] === true;
|
|
903
|
-
const output = ['# Git Release', ''];
|
|
904
|
-
try {
|
|
905
|
-
// Get last tag
|
|
906
|
-
let lastTag = '';
|
|
907
|
-
try {
|
|
908
|
-
const { stdout } = await execAsync('git describe --tags --abbrev=0 2>/dev/null || echo ""', {
|
|
909
|
-
cwd: workingDir,
|
|
910
|
-
timeout: 10000,
|
|
911
|
-
shell: '/bin/bash',
|
|
912
|
-
});
|
|
913
|
-
lastTag = stdout.trim();
|
|
914
|
-
}
|
|
915
|
-
catch {
|
|
916
|
-
// No tags exist
|
|
917
|
-
}
|
|
918
|
-
// Determine version
|
|
919
|
-
let tagVersion = version;
|
|
920
|
-
if (!tagVersion) {
|
|
921
|
-
if (lastTag) {
|
|
922
|
-
// Increment patch version
|
|
923
|
-
const match = lastTag.match(/v?(\d+)\.(\d+)\.(\d+)/);
|
|
924
|
-
if (match) {
|
|
925
|
-
const [, major, minor, patch] = match;
|
|
926
|
-
tagVersion = `v${major}.${minor}.${parseInt(patch || '0', 10) + 1}`;
|
|
927
|
-
}
|
|
928
|
-
else {
|
|
929
|
-
tagVersion = 'v0.0.1';
|
|
930
|
-
}
|
|
931
|
-
}
|
|
932
|
-
else {
|
|
933
|
-
tagVersion = 'v0.0.1';
|
|
934
|
-
}
|
|
935
|
-
}
|
|
936
|
-
output.push(`Version: ${tagVersion}`);
|
|
937
|
-
output.push(`Previous: ${lastTag || '(none)'}`);
|
|
938
|
-
// Generate changelog
|
|
939
|
-
let changelog = '';
|
|
940
|
-
if (generateChangelog) {
|
|
941
|
-
output.push('');
|
|
942
|
-
output.push('## Changelog');
|
|
943
|
-
try {
|
|
944
|
-
const range = lastTag ? `${lastTag}..HEAD` : 'HEAD';
|
|
945
|
-
const { stdout: commits } = await execAsync(`git log ${range} --pretty=format:"- %s (%h)" --no-merges 2>/dev/null | head -50`, { cwd: workingDir, timeout: 10000, shell: '/bin/bash' });
|
|
946
|
-
changelog = commits.trim() || 'No commits since last release';
|
|
947
|
-
output.push(changelog);
|
|
948
|
-
}
|
|
949
|
-
catch {
|
|
950
|
-
changelog = 'Could not generate changelog';
|
|
951
|
-
output.push(changelog);
|
|
952
|
-
}
|
|
953
|
-
}
|
|
954
|
-
// Create tag
|
|
955
|
-
output.push('');
|
|
956
|
-
output.push('## Creating Tag');
|
|
957
|
-
const tagMessage = message || `Release ${tagVersion}\n\n${changelog}`;
|
|
958
|
-
try {
|
|
959
|
-
await execAsync(`git tag -a "${tagVersion}" -m "${tagMessage.replace(/"/g, '\\"')}"`, {
|
|
960
|
-
cwd: workingDir,
|
|
961
|
-
timeout: 30000,
|
|
962
|
-
});
|
|
963
|
-
output.push(`✓ Created tag: ${tagVersion}`);
|
|
964
|
-
}
|
|
965
|
-
catch (e) {
|
|
966
|
-
output.push(`✗ Failed to create tag: ${e.message}`);
|
|
967
|
-
return output.join('\n');
|
|
968
|
-
}
|
|
969
|
-
// Push tag
|
|
970
|
-
if (pushTag) {
|
|
971
|
-
output.push('');
|
|
972
|
-
output.push('## Pushing Tag');
|
|
973
|
-
try {
|
|
974
|
-
await execAsync(`git push origin "${tagVersion}"`, { cwd: workingDir, timeout: 60000 });
|
|
975
|
-
output.push(`✓ Pushed tag to origin`);
|
|
976
|
-
}
|
|
977
|
-
catch (e) {
|
|
978
|
-
output.push(`✗ Failed to push tag: ${e.message}`);
|
|
979
|
-
}
|
|
980
|
-
}
|
|
981
|
-
// Create GitHub release
|
|
982
|
-
if (createGithubRelease) {
|
|
983
|
-
output.push('');
|
|
984
|
-
output.push('## Creating GitHub Release');
|
|
985
|
-
try {
|
|
986
|
-
const releaseNotes = changelog || message || `Release ${tagVersion}`;
|
|
987
|
-
await execAsync(`gh release create "${tagVersion}" --title "${tagVersion}" --notes "${releaseNotes.replace(/"/g, '\\"')}"`, { cwd: workingDir, timeout: 60000 });
|
|
988
|
-
output.push(`✓ Created GitHub release`);
|
|
989
|
-
}
|
|
990
|
-
catch (e) {
|
|
991
|
-
output.push(`✗ Failed to create GitHub release: ${e.message}`);
|
|
992
|
-
output.push('Make sure gh CLI is installed and authenticated');
|
|
993
|
-
}
|
|
994
|
-
}
|
|
995
|
-
return output.join('\n');
|
|
996
|
-
}
|
|
997
|
-
catch (error) {
|
|
998
|
-
return `Error creating release: ${error.message}`;
|
|
999
|
-
}
|
|
1000
|
-
},
|
|
1001
|
-
},
|
|
1002
|
-
{
|
|
1003
|
-
name: 'git_sync',
|
|
1004
|
-
description: 'Sync local branch with remote - fetch, pull, and optionally push',
|
|
1005
|
-
parameters: {
|
|
1006
|
-
type: 'object',
|
|
1007
|
-
properties: {
|
|
1008
|
-
branch: {
|
|
1009
|
-
type: 'string',
|
|
1010
|
-
description: 'Branch to sync (default: current branch)',
|
|
1011
|
-
},
|
|
1012
|
-
push: {
|
|
1013
|
-
type: 'boolean',
|
|
1014
|
-
description: 'Push local commits after pulling (default: false)',
|
|
1015
|
-
},
|
|
1016
|
-
rebase: {
|
|
1017
|
-
type: 'boolean',
|
|
1018
|
-
description: 'Use rebase instead of merge when pulling (default: false)',
|
|
1019
|
-
},
|
|
1020
|
-
stashChanges: {
|
|
1021
|
-
type: 'boolean',
|
|
1022
|
-
description: 'Stash uncommitted changes before sync, restore after (default: true)',
|
|
1023
|
-
},
|
|
1024
|
-
},
|
|
1025
|
-
additionalProperties: false,
|
|
1026
|
-
},
|
|
1027
|
-
handler: async (args) => {
|
|
1028
|
-
const branch = typeof args['branch'] === 'string' ? args['branch'] : undefined;
|
|
1029
|
-
const push = args['push'] === true;
|
|
1030
|
-
const rebase = args['rebase'] === true;
|
|
1031
|
-
const stashChanges = args['stashChanges'] !== false;
|
|
1032
|
-
const output = ['# Git Sync', ''];
|
|
1033
|
-
let stashed = false;
|
|
1034
|
-
try {
|
|
1035
|
-
// Get current branch if not specified
|
|
1036
|
-
let targetBranch = branch;
|
|
1037
|
-
if (!targetBranch) {
|
|
1038
|
-
const { stdout } = await execAsync('git branch --show-current', { cwd: workingDir, timeout: 10000 });
|
|
1039
|
-
targetBranch = stdout.trim();
|
|
1040
|
-
}
|
|
1041
|
-
output.push(`Branch: ${targetBranch}`);
|
|
1042
|
-
// Check for uncommitted changes
|
|
1043
|
-
const { stdout: status } = await execAsync('git status --porcelain', { cwd: workingDir, timeout: 10000 });
|
|
1044
|
-
const hasChanges = status.trim().length > 0;
|
|
1045
|
-
if (hasChanges && stashChanges) {
|
|
1046
|
-
output.push('');
|
|
1047
|
-
output.push('## Stashing Changes');
|
|
1048
|
-
await execAsync('git stash push -m "git_sync auto-stash"', { cwd: workingDir, timeout: 30000 });
|
|
1049
|
-
output.push('✓ Stashed uncommitted changes');
|
|
1050
|
-
stashed = true;
|
|
1051
|
-
}
|
|
1052
|
-
else if (hasChanges) {
|
|
1053
|
-
output.push('');
|
|
1054
|
-
output.push('⚠ Warning: Uncommitted changes present, sync may fail');
|
|
1055
|
-
}
|
|
1056
|
-
// Fetch
|
|
1057
|
-
output.push('');
|
|
1058
|
-
output.push('## Fetching');
|
|
1059
|
-
await execAsync('git fetch --all --prune', { cwd: workingDir, timeout: 60000 });
|
|
1060
|
-
output.push('✓ Fetched from all remotes');
|
|
1061
|
-
// Pull
|
|
1062
|
-
output.push('');
|
|
1063
|
-
output.push('## Pulling');
|
|
1064
|
-
const pullCmd = rebase ? `git pull --rebase origin ${targetBranch}` : `git pull origin ${targetBranch}`;
|
|
1065
|
-
try {
|
|
1066
|
-
const { stdout: pullOut } = await execAsync(pullCmd, { cwd: workingDir, timeout: 120000 });
|
|
1067
|
-
output.push(`✓ Pulled from origin/${targetBranch}`);
|
|
1068
|
-
if (pullOut.includes('Already up to date')) {
|
|
1069
|
-
output.push('Already up to date');
|
|
1070
|
-
}
|
|
1071
|
-
}
|
|
1072
|
-
catch (e) {
|
|
1073
|
-
output.push(`✗ Pull failed: ${e.message}`);
|
|
1074
|
-
if (stashed) {
|
|
1075
|
-
await execAsync('git stash pop', { cwd: workingDir, timeout: 30000 });
|
|
1076
|
-
output.push('✓ Restored stashed changes');
|
|
1077
|
-
}
|
|
1078
|
-
return output.join('\n');
|
|
1079
|
-
}
|
|
1080
|
-
// Push
|
|
1081
|
-
if (push) {
|
|
1082
|
-
output.push('');
|
|
1083
|
-
output.push('## Pushing');
|
|
1084
|
-
try {
|
|
1085
|
-
await execAsync(`git push origin ${targetBranch}`, { cwd: workingDir, timeout: 60000 });
|
|
1086
|
-
output.push(`✓ Pushed to origin/${targetBranch}`);
|
|
1087
|
-
}
|
|
1088
|
-
catch (e) {
|
|
1089
|
-
output.push(`✗ Push failed: ${e.message}`);
|
|
1090
|
-
}
|
|
1091
|
-
}
|
|
1092
|
-
// Restore stash
|
|
1093
|
-
if (stashed) {
|
|
1094
|
-
output.push('');
|
|
1095
|
-
output.push('## Restoring Changes');
|
|
1096
|
-
try {
|
|
1097
|
-
await execAsync('git stash pop', { cwd: workingDir, timeout: 30000 });
|
|
1098
|
-
output.push('✓ Restored stashed changes');
|
|
1099
|
-
}
|
|
1100
|
-
catch (e) {
|
|
1101
|
-
output.push(`⚠ Failed to restore stash: ${e.message}`);
|
|
1102
|
-
output.push('Run `git stash pop` manually');
|
|
1103
|
-
}
|
|
1104
|
-
}
|
|
1105
|
-
return output.join('\n');
|
|
1106
|
-
}
|
|
1107
|
-
catch (error) {
|
|
1108
|
-
if (stashed) {
|
|
1109
|
-
try {
|
|
1110
|
-
await execAsync('git stash pop', { cwd: workingDir, timeout: 30000 });
|
|
1111
|
-
}
|
|
1112
|
-
catch {
|
|
1113
|
-
// Ignore
|
|
1114
|
-
}
|
|
1115
|
-
}
|
|
1116
|
-
return `Error syncing: ${error.message}`;
|
|
1117
|
-
}
|
|
1118
|
-
},
|
|
1119
|
-
},
|
|
1120
|
-
{
|
|
1121
|
-
name: 'git_cleanup',
|
|
1122
|
-
description: 'Clean up git repository - prune branches, garbage collect, remove untracked files',
|
|
1123
|
-
parameters: {
|
|
1124
|
-
type: 'object',
|
|
1125
|
-
properties: {
|
|
1126
|
-
pruneBranches: {
|
|
1127
|
-
type: 'boolean',
|
|
1128
|
-
description: 'Delete local branches that have been merged (default: true)',
|
|
1129
|
-
},
|
|
1130
|
-
pruneRemote: {
|
|
1131
|
-
type: 'boolean',
|
|
1132
|
-
description: 'Prune remote-tracking branches (default: true)',
|
|
1133
|
-
},
|
|
1134
|
-
gc: {
|
|
1135
|
-
type: 'boolean',
|
|
1136
|
-
description: 'Run garbage collection (default: true)',
|
|
1137
|
-
},
|
|
1138
|
-
cleanUntracked: {
|
|
1139
|
-
type: 'boolean',
|
|
1140
|
-
description: 'Remove untracked files (DANGEROUS - default: false)',
|
|
1141
|
-
},
|
|
1142
|
-
dryRun: {
|
|
1143
|
-
type: 'boolean',
|
|
1144
|
-
description: 'Show what would be done without making changes',
|
|
1145
|
-
},
|
|
1146
|
-
},
|
|
1147
|
-
additionalProperties: false,
|
|
1148
|
-
},
|
|
1149
|
-
handler: async (args) => {
|
|
1150
|
-
const pruneBranches = args['pruneBranches'] !== false;
|
|
1151
|
-
const pruneRemote = args['pruneRemote'] !== false;
|
|
1152
|
-
const gc = args['gc'] !== false;
|
|
1153
|
-
const cleanUntracked = args['cleanUntracked'] === true;
|
|
1154
|
-
const dryRun = args['dryRun'] === true;
|
|
1155
|
-
const output = ['# Git Cleanup', ''];
|
|
1156
|
-
if (dryRun)
|
|
1157
|
-
output.push('**DRY RUN - no changes will be made**\n');
|
|
1158
|
-
try {
|
|
1159
|
-
// Prune remote-tracking branches
|
|
1160
|
-
if (pruneRemote) {
|
|
1161
|
-
output.push('## Remote-tracking Branches');
|
|
1162
|
-
if (dryRun) {
|
|
1163
|
-
const { stdout } = await execAsync('git remote prune origin --dry-run', {
|
|
1164
|
-
cwd: workingDir,
|
|
1165
|
-
timeout: 30000,
|
|
1166
|
-
});
|
|
1167
|
-
output.push(stdout.trim() || 'No stale branches to prune');
|
|
1168
|
-
}
|
|
1169
|
-
else {
|
|
1170
|
-
const { stdout } = await execAsync('git remote prune origin', { cwd: workingDir, timeout: 30000 });
|
|
1171
|
-
output.push(stdout.trim() || '✓ No stale branches to prune');
|
|
1172
|
-
}
|
|
1173
|
-
output.push('');
|
|
1174
|
-
}
|
|
1175
|
-
// Find and delete merged branches
|
|
1176
|
-
if (pruneBranches) {
|
|
1177
|
-
output.push('## Merged Local Branches');
|
|
1178
|
-
const { stdout: currentBranch } = await execAsync('git branch --show-current', {
|
|
1179
|
-
cwd: workingDir,
|
|
1180
|
-
timeout: 10000,
|
|
1181
|
-
});
|
|
1182
|
-
const { stdout: mergedBranches } = await execAsync('git branch --merged | grep -v "\\*" | grep -v "main" | grep -v "master" | grep -v "develop"', { cwd: workingDir, timeout: 10000, shell: '/bin/bash' }).catch(() => ({ stdout: '' }));
|
|
1183
|
-
const branches = mergedBranches
|
|
1184
|
-
.split('\n')
|
|
1185
|
-
.map((b) => b.trim())
|
|
1186
|
-
.filter((b) => b && b !== currentBranch.trim());
|
|
1187
|
-
if (branches.length === 0) {
|
|
1188
|
-
output.push('No merged branches to delete');
|
|
1189
|
-
}
|
|
1190
|
-
else {
|
|
1191
|
-
for (const branch of branches) {
|
|
1192
|
-
if (dryRun) {
|
|
1193
|
-
output.push(`Would delete: ${branch}`);
|
|
1194
|
-
}
|
|
1195
|
-
else {
|
|
1196
|
-
try {
|
|
1197
|
-
await execAsync(`git branch -d "${branch}"`, { cwd: workingDir, timeout: 10000 });
|
|
1198
|
-
output.push(`✓ Deleted: ${branch}`);
|
|
1199
|
-
}
|
|
1200
|
-
catch {
|
|
1201
|
-
output.push(`✗ Could not delete: ${branch}`);
|
|
1202
|
-
}
|
|
1203
|
-
}
|
|
1204
|
-
}
|
|
1205
|
-
}
|
|
1206
|
-
output.push('');
|
|
1207
|
-
}
|
|
1208
|
-
// Clean untracked files
|
|
1209
|
-
if (cleanUntracked) {
|
|
1210
|
-
output.push('## Untracked Files');
|
|
1211
|
-
if (dryRun) {
|
|
1212
|
-
const { stdout } = await execAsync('git clean -n -d', { cwd: workingDir, timeout: 30000 });
|
|
1213
|
-
output.push(stdout.trim() || 'No untracked files to remove');
|
|
1214
|
-
}
|
|
1215
|
-
else {
|
|
1216
|
-
const { stdout } = await execAsync('git clean -f -d', { cwd: workingDir, timeout: 30000 });
|
|
1217
|
-
output.push(stdout.trim() || '✓ No untracked files to remove');
|
|
1218
|
-
}
|
|
1219
|
-
output.push('');
|
|
1220
|
-
}
|
|
1221
|
-
// Garbage collection
|
|
1222
|
-
if (gc && !dryRun) {
|
|
1223
|
-
output.push('## Garbage Collection');
|
|
1224
|
-
await execAsync('git gc --auto', { cwd: workingDir, timeout: 300000 });
|
|
1225
|
-
output.push('✓ Garbage collection complete');
|
|
1226
|
-
}
|
|
1227
|
-
return output.join('\n');
|
|
1228
|
-
}
|
|
1229
|
-
catch (error) {
|
|
1230
|
-
return `Error during cleanup: ${error.message}`;
|
|
1231
|
-
}
|
|
1232
|
-
},
|
|
1233
|
-
},
|
|
1234
|
-
// ========================================================================
|
|
1235
|
-
// Docker Tools
|
|
1236
|
-
// ========================================================================
|
|
1237
|
-
{
|
|
1238
|
-
name: 'docker_build',
|
|
1239
|
-
description: 'Build a Docker image with optional tagging and pushing to registry',
|
|
1240
|
-
parameters: {
|
|
1241
|
-
type: 'object',
|
|
1242
|
-
properties: {
|
|
1243
|
-
tag: {
|
|
1244
|
-
type: 'string',
|
|
1245
|
-
description: 'Image tag (e.g., myapp:latest)',
|
|
1246
|
-
},
|
|
1247
|
-
dockerfile: {
|
|
1248
|
-
type: 'string',
|
|
1249
|
-
description: 'Path to Dockerfile (default: Dockerfile)',
|
|
1250
|
-
},
|
|
1251
|
-
context: {
|
|
1252
|
-
type: 'string',
|
|
1253
|
-
description: 'Build context path (default: .)',
|
|
1254
|
-
},
|
|
1255
|
-
push: {
|
|
1256
|
-
type: 'boolean',
|
|
1257
|
-
description: 'Push to registry after building (default: false)',
|
|
1258
|
-
},
|
|
1259
|
-
noCache: {
|
|
1260
|
-
type: 'boolean',
|
|
1261
|
-
description: 'Build without cache (default: false)',
|
|
1262
|
-
},
|
|
1263
|
-
buildArgs: {
|
|
1264
|
-
type: 'object',
|
|
1265
|
-
description: 'Build arguments as key-value pairs',
|
|
1266
|
-
},
|
|
1267
|
-
platform: {
|
|
1268
|
-
type: 'string',
|
|
1269
|
-
description: 'Target platform (e.g., linux/amd64,linux/arm64)',
|
|
1270
|
-
},
|
|
1271
|
-
},
|
|
1272
|
-
required: ['tag'],
|
|
1273
|
-
additionalProperties: false,
|
|
1274
|
-
},
|
|
1275
|
-
handler: async (args) => {
|
|
1276
|
-
const tag = args['tag'];
|
|
1277
|
-
const dockerfile = typeof args['dockerfile'] === 'string' ? args['dockerfile'] : 'Dockerfile';
|
|
1278
|
-
const context = typeof args['context'] === 'string' ? args['context'] : '.';
|
|
1279
|
-
const push = args['push'] === true;
|
|
1280
|
-
const noCache = args['noCache'] === true;
|
|
1281
|
-
const buildArgs = args['buildArgs'];
|
|
1282
|
-
const platform = typeof args['platform'] === 'string' ? args['platform'] : undefined;
|
|
1283
|
-
const output = ['# Docker Build', ''];
|
|
1284
|
-
try {
|
|
1285
|
-
// Build command
|
|
1286
|
-
let cmd = `docker build -t "${tag}" -f "${dockerfile}"`;
|
|
1287
|
-
if (noCache)
|
|
1288
|
-
cmd += ' --no-cache';
|
|
1289
|
-
if (platform)
|
|
1290
|
-
cmd += ` --platform ${platform}`;
|
|
1291
|
-
if (buildArgs) {
|
|
1292
|
-
for (const [key, value] of Object.entries(buildArgs)) {
|
|
1293
|
-
cmd += ` --build-arg ${key}="${value}"`;
|
|
1294
|
-
}
|
|
1295
|
-
}
|
|
1296
|
-
cmd += ` "${context}"`;
|
|
1297
|
-
output.push(`## Building Image`);
|
|
1298
|
-
output.push(`Command: ${cmd}`);
|
|
1299
|
-
output.push('');
|
|
1300
|
-
const { stderr } = await execAsync(cmd, {
|
|
1301
|
-
cwd: workingDir,
|
|
1302
|
-
timeout: 600000, // 10 minutes
|
|
1303
|
-
maxBuffer: 1024 * 1024 * 50,
|
|
1304
|
-
});
|
|
1305
|
-
output.push('✓ Build completed');
|
|
1306
|
-
if (stderr && !stderr.includes('deprecated')) {
|
|
1307
|
-
output.push(`Warnings: ${stderr.substring(0, 500)}`);
|
|
1308
|
-
}
|
|
1309
|
-
// Push if requested
|
|
1310
|
-
if (push) {
|
|
1311
|
-
output.push('');
|
|
1312
|
-
output.push('## Pushing Image');
|
|
1313
|
-
try {
|
|
1314
|
-
await execAsync(`docker push "${tag}"`, { cwd: workingDir, timeout: 300000 });
|
|
1315
|
-
output.push(`✓ Pushed ${tag}`);
|
|
1316
|
-
}
|
|
1317
|
-
catch (e) {
|
|
1318
|
-
output.push(`✗ Push failed: ${e.message}`);
|
|
1319
|
-
}
|
|
1320
|
-
}
|
|
1321
|
-
return output.join('\n');
|
|
1322
|
-
}
|
|
1323
|
-
catch (error) {
|
|
1324
|
-
return `Docker build failed: ${error.message}\n${error.stderr || ''}`;
|
|
1325
|
-
}
|
|
1326
|
-
},
|
|
1327
|
-
},
|
|
1328
|
-
{
|
|
1329
|
-
name: 'docker_compose',
|
|
1330
|
-
description: 'Run docker-compose commands (up, down, restart, logs, etc.)',
|
|
1331
|
-
parameters: {
|
|
1332
|
-
type: 'object',
|
|
1333
|
-
properties: {
|
|
1334
|
-
action: {
|
|
1335
|
-
type: 'string',
|
|
1336
|
-
enum: ['up', 'down', 'restart', 'logs', 'ps', 'build', 'pull', 'stop', 'start'],
|
|
1337
|
-
description: 'Docker compose action',
|
|
1338
|
-
},
|
|
1339
|
-
services: {
|
|
1340
|
-
type: 'array',
|
|
1341
|
-
items: { type: 'string' },
|
|
1342
|
-
description: 'Specific services to target (default: all)',
|
|
1343
|
-
},
|
|
1344
|
-
detach: {
|
|
1345
|
-
type: 'boolean',
|
|
1346
|
-
description: 'Run in detached mode (for up/restart)',
|
|
1347
|
-
},
|
|
1348
|
-
build: {
|
|
1349
|
-
type: 'boolean',
|
|
1350
|
-
description: 'Build images before starting (for up)',
|
|
1351
|
-
},
|
|
1352
|
-
follow: {
|
|
1353
|
-
type: 'boolean',
|
|
1354
|
-
description: 'Follow log output (for logs)',
|
|
1355
|
-
},
|
|
1356
|
-
tail: {
|
|
1357
|
-
type: 'number',
|
|
1358
|
-
description: 'Number of lines to show from end of logs',
|
|
1359
|
-
},
|
|
1360
|
-
composeFile: {
|
|
1361
|
-
type: 'string',
|
|
1362
|
-
description: 'Path to compose file (default: docker-compose.yml)',
|
|
1363
|
-
},
|
|
1364
|
-
},
|
|
1365
|
-
required: ['action'],
|
|
1366
|
-
additionalProperties: false,
|
|
1367
|
-
},
|
|
1368
|
-
handler: async (args) => {
|
|
1369
|
-
const action = args['action'];
|
|
1370
|
-
const services = Array.isArray(args['services']) ? args['services'] : [];
|
|
1371
|
-
const detach = args['detach'] === true;
|
|
1372
|
-
const build = args['build'] === true;
|
|
1373
|
-
const follow = args['follow'] === true;
|
|
1374
|
-
const tail = typeof args['tail'] === 'number' ? args['tail'] : undefined;
|
|
1375
|
-
const composeFile = typeof args['composeFile'] === 'string' ? args['composeFile'] : undefined;
|
|
1376
|
-
const output = ['# Docker Compose', ''];
|
|
1377
|
-
try {
|
|
1378
|
-
// Build command
|
|
1379
|
-
let cmd = 'docker compose';
|
|
1380
|
-
if (composeFile)
|
|
1381
|
-
cmd += ` -f "${composeFile}"`;
|
|
1382
|
-
cmd += ` ${action}`;
|
|
1383
|
-
// Action-specific options
|
|
1384
|
-
if (action === 'up') {
|
|
1385
|
-
if (detach)
|
|
1386
|
-
cmd += ' -d';
|
|
1387
|
-
if (build)
|
|
1388
|
-
cmd += ' --build';
|
|
1389
|
-
}
|
|
1390
|
-
if (action === 'logs') {
|
|
1391
|
-
if (follow)
|
|
1392
|
-
cmd += ' -f';
|
|
1393
|
-
if (tail)
|
|
1394
|
-
cmd += ` --tail=${tail}`;
|
|
1395
|
-
}
|
|
1396
|
-
if (services.length > 0) {
|
|
1397
|
-
cmd += ` ${services.join(' ')}`;
|
|
1398
|
-
}
|
|
1399
|
-
output.push(`Command: ${cmd}`);
|
|
1400
|
-
output.push('');
|
|
1401
|
-
const timeout = action === 'logs' && follow ? 30000 : 300000;
|
|
1402
|
-
const { stdout, stderr } = await execAsync(cmd, {
|
|
1403
|
-
cwd: workingDir,
|
|
1404
|
-
timeout,
|
|
1405
|
-
maxBuffer: 1024 * 1024 * 10,
|
|
1406
|
-
});
|
|
1407
|
-
if (stdout)
|
|
1408
|
-
output.push(stdout.substring(0, 5000));
|
|
1409
|
-
if (stderr && !stderr.includes('deprecated')) {
|
|
1410
|
-
output.push(`Stderr: ${stderr.substring(0, 1000)}`);
|
|
1411
|
-
}
|
|
1412
|
-
output.push('');
|
|
1413
|
-
output.push(`✓ ${action} completed`);
|
|
1414
|
-
return output.join('\n');
|
|
1415
|
-
}
|
|
1416
|
-
catch (error) {
|
|
1417
|
-
if (error.killed) {
|
|
1418
|
-
return `${output.join('\n')}\n\n(Command timed out - may still be running)`;
|
|
1419
|
-
}
|
|
1420
|
-
return `Docker compose ${action} failed: ${error.message}\n${error.stderr || ''}`;
|
|
1421
|
-
}
|
|
1422
|
-
},
|
|
1423
|
-
},
|
|
1424
|
-
// ========================================================================
|
|
1425
|
-
// Project Scaffolding Tools
|
|
1426
|
-
// ========================================================================
|
|
1427
|
-
{
|
|
1428
|
-
name: 'project_init',
|
|
1429
|
-
description: 'Initialize a new project with common configurations (git, eslint, prettier, typescript, etc.)',
|
|
1430
|
-
parameters: {
|
|
1431
|
-
type: 'object',
|
|
1432
|
-
properties: {
|
|
1433
|
-
type: {
|
|
1434
|
-
type: 'string',
|
|
1435
|
-
enum: ['node', 'typescript', 'react', 'nextjs', 'python', 'go'],
|
|
1436
|
-
description: 'Project type',
|
|
1437
|
-
},
|
|
1438
|
-
name: {
|
|
1439
|
-
type: 'string',
|
|
1440
|
-
description: 'Project name',
|
|
1441
|
-
},
|
|
1442
|
-
git: {
|
|
1443
|
-
type: 'boolean',
|
|
1444
|
-
description: 'Initialize git repository (default: true)',
|
|
1445
|
-
},
|
|
1446
|
-
eslint: {
|
|
1447
|
-
type: 'boolean',
|
|
1448
|
-
description: 'Add ESLint configuration (for JS/TS projects, default: true)',
|
|
1449
|
-
},
|
|
1450
|
-
prettier: {
|
|
1451
|
-
type: 'boolean',
|
|
1452
|
-
description: 'Add Prettier configuration (default: true)',
|
|
1453
|
-
},
|
|
1454
|
-
docker: {
|
|
1455
|
-
type: 'boolean',
|
|
1456
|
-
description: 'Add Dockerfile and docker-compose.yml (default: false)',
|
|
1457
|
-
},
|
|
1458
|
-
ci: {
|
|
1459
|
-
type: 'string',
|
|
1460
|
-
enum: ['github', 'gitlab', 'none'],
|
|
1461
|
-
description: 'Add CI configuration (default: none)',
|
|
1462
|
-
},
|
|
1463
|
-
},
|
|
1464
|
-
required: ['type'],
|
|
1465
|
-
additionalProperties: false,
|
|
1466
|
-
},
|
|
1467
|
-
handler: async (args) => {
|
|
1468
|
-
const projectType = args['type'];
|
|
1469
|
-
const projectName = typeof args['name'] === 'string' ? args['name'] : 'my-project';
|
|
1470
|
-
const initGit = args['git'] !== false;
|
|
1471
|
-
const addEslint = args['eslint'] !== false;
|
|
1472
|
-
const addPrettier = args['prettier'] !== false;
|
|
1473
|
-
const addDocker = args['docker'] === true;
|
|
1474
|
-
const ci = typeof args['ci'] === 'string' ? args['ci'] : 'none';
|
|
1475
|
-
const output = [`# Project Initialization: ${projectName}`, ''];
|
|
1476
|
-
output.push(`Type: ${projectType}`);
|
|
1477
|
-
output.push('');
|
|
1478
|
-
try {
|
|
1479
|
-
// Initialize based on type
|
|
1480
|
-
output.push('## Creating Project');
|
|
1481
|
-
switch (projectType) {
|
|
1482
|
-
case 'node':
|
|
1483
|
-
await execAsync('npm init -y', { cwd: workingDir, timeout: 30000 });
|
|
1484
|
-
output.push('✓ Initialized npm project');
|
|
1485
|
-
break;
|
|
1486
|
-
case 'typescript':
|
|
1487
|
-
await execAsync('npm init -y && npm install typescript @types/node --save-dev', {
|
|
1488
|
-
cwd: workingDir,
|
|
1489
|
-
timeout: 120000,
|
|
1490
|
-
});
|
|
1491
|
-
await execAsync('npx tsc --init', { cwd: workingDir, timeout: 30000 });
|
|
1492
|
-
output.push('✓ Initialized TypeScript project');
|
|
1493
|
-
break;
|
|
1494
|
-
case 'react':
|
|
1495
|
-
await execAsync(`npx create-react-app ${projectName} --template typescript`, {
|
|
1496
|
-
cwd: workingDir,
|
|
1497
|
-
timeout: 300000,
|
|
1498
|
-
});
|
|
1499
|
-
output.push('✓ Created React app');
|
|
1500
|
-
break;
|
|
1501
|
-
case 'nextjs':
|
|
1502
|
-
await execAsync(`npx create-next-app@latest ${projectName} --typescript --eslint --tailwind --app`, {
|
|
1503
|
-
cwd: workingDir,
|
|
1504
|
-
timeout: 300000,
|
|
1505
|
-
});
|
|
1506
|
-
output.push('✓ Created Next.js app');
|
|
1507
|
-
break;
|
|
1508
|
-
case 'python':
|
|
1509
|
-
await execAsync('python3 -m venv .venv', { cwd: workingDir, timeout: 60000 });
|
|
1510
|
-
output.push('✓ Created Python virtual environment');
|
|
1511
|
-
break;
|
|
1512
|
-
case 'go':
|
|
1513
|
-
await execAsync(`go mod init ${projectName}`, { cwd: workingDir, timeout: 30000 });
|
|
1514
|
-
output.push('✓ Initialized Go module');
|
|
1515
|
-
break;
|
|
1516
|
-
}
|
|
1517
|
-
// Git
|
|
1518
|
-
if (initGit) {
|
|
1519
|
-
output.push('');
|
|
1520
|
-
output.push('## Git');
|
|
1521
|
-
try {
|
|
1522
|
-
await execAsync('git init', { cwd: workingDir, timeout: 10000 });
|
|
1523
|
-
output.push('✓ Initialized git repository');
|
|
1524
|
-
}
|
|
1525
|
-
catch {
|
|
1526
|
-
output.push('Git already initialized or failed');
|
|
1527
|
-
}
|
|
1528
|
-
}
|
|
1529
|
-
// ESLint (for JS/TS projects)
|
|
1530
|
-
if (addEslint && ['node', 'typescript', 'react', 'nextjs'].includes(projectType)) {
|
|
1531
|
-
output.push('');
|
|
1532
|
-
output.push('## ESLint');
|
|
1533
|
-
try {
|
|
1534
|
-
await execAsync('npm install eslint --save-dev', { cwd: workingDir, timeout: 120000 });
|
|
1535
|
-
output.push('✓ Installed ESLint');
|
|
1536
|
-
}
|
|
1537
|
-
catch (e) {
|
|
1538
|
-
output.push(`ESLint setup: ${e.message}`);
|
|
1539
|
-
}
|
|
1540
|
-
}
|
|
1541
|
-
// Prettier
|
|
1542
|
-
if (addPrettier) {
|
|
1543
|
-
output.push('');
|
|
1544
|
-
output.push('## Prettier');
|
|
1545
|
-
try {
|
|
1546
|
-
await execAsync('npm install prettier --save-dev', { cwd: workingDir, timeout: 120000 });
|
|
1547
|
-
output.push('✓ Installed Prettier');
|
|
1548
|
-
}
|
|
1549
|
-
catch (e) {
|
|
1550
|
-
output.push(`Prettier setup: ${e.message}`);
|
|
1551
|
-
}
|
|
1552
|
-
}
|
|
1553
|
-
// Docker
|
|
1554
|
-
if (addDocker) {
|
|
1555
|
-
output.push('');
|
|
1556
|
-
output.push('## Docker');
|
|
1557
|
-
output.push('Add Dockerfile and docker-compose.yml manually based on your needs');
|
|
1558
|
-
}
|
|
1559
|
-
// CI
|
|
1560
|
-
if (ci !== 'none') {
|
|
1561
|
-
output.push('');
|
|
1562
|
-
output.push(`## CI (${ci})`);
|
|
1563
|
-
if (ci === 'github') {
|
|
1564
|
-
output.push('Add .github/workflows/ci.yml for GitHub Actions');
|
|
1565
|
-
}
|
|
1566
|
-
else if (ci === 'gitlab') {
|
|
1567
|
-
output.push('Add .gitlab-ci.yml for GitLab CI');
|
|
1568
|
-
}
|
|
1569
|
-
}
|
|
1570
|
-
output.push('');
|
|
1571
|
-
output.push('## Summary');
|
|
1572
|
-
output.push(`✓ Project ${projectName} initialized as ${projectType}`);
|
|
1573
|
-
return output.join('\n');
|
|
1574
|
-
}
|
|
1575
|
-
catch (error) {
|
|
1576
|
-
return `Project initialization failed: ${error.message}`;
|
|
1577
|
-
}
|
|
1578
|
-
},
|
|
1579
|
-
},
|
|
1580
|
-
// ========================================================================
|
|
1581
|
-
// Environment & Secret Management
|
|
1582
|
-
// ========================================================================
|
|
1583
|
-
{
|
|
1584
|
-
name: 'env_check',
|
|
1585
|
-
description: 'Check environment configuration and required variables',
|
|
1586
|
-
parameters: {
|
|
1587
|
-
type: 'object',
|
|
1588
|
-
properties: {
|
|
1589
|
-
envFile: {
|
|
1590
|
-
type: 'string',
|
|
1591
|
-
description: 'Path to .env file to check (default: .env)',
|
|
1592
|
-
},
|
|
1593
|
-
exampleFile: {
|
|
1594
|
-
type: 'string',
|
|
1595
|
-
description: 'Path to .env.example to compare against',
|
|
1596
|
-
},
|
|
1597
|
-
requiredVars: {
|
|
1598
|
-
type: 'array',
|
|
1599
|
-
items: { type: 'string' },
|
|
1600
|
-
description: 'List of required environment variables to check',
|
|
1601
|
-
},
|
|
1602
|
-
},
|
|
1603
|
-
additionalProperties: false,
|
|
1604
|
-
},
|
|
1605
|
-
handler: async (args) => {
|
|
1606
|
-
const envFile = typeof args['envFile'] === 'string' ? args['envFile'] : '.env';
|
|
1607
|
-
const exampleFile = typeof args['exampleFile'] === 'string' ? args['exampleFile'] : '.env.example';
|
|
1608
|
-
const requiredVars = Array.isArray(args['requiredVars']) ? args['requiredVars'] : [];
|
|
1609
|
-
const output = ['# Environment Check', ''];
|
|
1610
|
-
try {
|
|
1611
|
-
const envPath = join(workingDir, envFile);
|
|
1612
|
-
const examplePath = join(workingDir, exampleFile);
|
|
1613
|
-
// Parse env file
|
|
1614
|
-
const parseEnvFile = (content) => {
|
|
1615
|
-
const vars = new Map();
|
|
1616
|
-
for (const line of content.split('\n')) {
|
|
1617
|
-
const trimmed = line.trim();
|
|
1618
|
-
if (trimmed && !trimmed.startsWith('#')) {
|
|
1619
|
-
const [key, ...valueParts] = trimmed.split('=');
|
|
1620
|
-
if (key) {
|
|
1621
|
-
vars.set(key.trim(), valueParts.join('=').trim());
|
|
1622
|
-
}
|
|
1623
|
-
}
|
|
1624
|
-
}
|
|
1625
|
-
return vars;
|
|
1626
|
-
};
|
|
1627
|
-
// Check .env exists
|
|
1628
|
-
let envVars = new Map();
|
|
1629
|
-
if (existsSync(envPath)) {
|
|
1630
|
-
const content = readFileSync(envPath, 'utf-8');
|
|
1631
|
-
envVars = parseEnvFile(content);
|
|
1632
|
-
output.push(`✓ Found ${envFile} with ${envVars.size} variables`);
|
|
1633
|
-
}
|
|
1634
|
-
else {
|
|
1635
|
-
output.push(`✗ ${envFile} not found`);
|
|
1636
|
-
}
|
|
1637
|
-
// Compare with example
|
|
1638
|
-
if (existsSync(examplePath)) {
|
|
1639
|
-
output.push('');
|
|
1640
|
-
output.push(`## Comparing with ${exampleFile}`);
|
|
1641
|
-
const exampleContent = readFileSync(examplePath, 'utf-8');
|
|
1642
|
-
const exampleVars = parseEnvFile(exampleContent);
|
|
1643
|
-
const missing = [];
|
|
1644
|
-
const extra = [];
|
|
1645
|
-
for (const key of exampleVars.keys()) {
|
|
1646
|
-
if (!envVars.has(key)) {
|
|
1647
|
-
missing.push(key);
|
|
1648
|
-
}
|
|
1649
|
-
}
|
|
1650
|
-
for (const key of envVars.keys()) {
|
|
1651
|
-
if (!exampleVars.has(key)) {
|
|
1652
|
-
extra.push(key);
|
|
1653
|
-
}
|
|
1654
|
-
}
|
|
1655
|
-
if (missing.length > 0) {
|
|
1656
|
-
output.push(`✗ Missing variables: ${missing.join(', ')}`);
|
|
1657
|
-
}
|
|
1658
|
-
else {
|
|
1659
|
-
output.push('✓ All example variables are defined');
|
|
1660
|
-
}
|
|
1661
|
-
if (extra.length > 0) {
|
|
1662
|
-
output.push(`ℹ Extra variables: ${extra.join(', ')}`);
|
|
1663
|
-
}
|
|
1664
|
-
}
|
|
1665
|
-
// Check required vars
|
|
1666
|
-
if (requiredVars.length > 0) {
|
|
1667
|
-
output.push('');
|
|
1668
|
-
output.push('## Required Variables');
|
|
1669
|
-
const missingRequired = [];
|
|
1670
|
-
const emptyRequired = [];
|
|
1671
|
-
for (const varName of requiredVars) {
|
|
1672
|
-
const value = envVars.get(varName) ?? process.env[varName];
|
|
1673
|
-
if (value === undefined) {
|
|
1674
|
-
missingRequired.push(varName);
|
|
1675
|
-
}
|
|
1676
|
-
else if (value === '') {
|
|
1677
|
-
emptyRequired.push(varName);
|
|
1678
|
-
}
|
|
1679
|
-
else {
|
|
1680
|
-
output.push(`✓ ${varName}: set`);
|
|
1681
|
-
}
|
|
1682
|
-
}
|
|
1683
|
-
if (missingRequired.length > 0) {
|
|
1684
|
-
output.push(`✗ Missing: ${missingRequired.join(', ')}`);
|
|
1685
|
-
}
|
|
1686
|
-
if (emptyRequired.length > 0) {
|
|
1687
|
-
output.push(`⚠ Empty: ${emptyRequired.join(', ')}`);
|
|
1688
|
-
}
|
|
1689
|
-
}
|
|
1690
|
-
// Check for sensitive patterns in .env
|
|
1691
|
-
output.push('');
|
|
1692
|
-
output.push('## Security Check');
|
|
1693
|
-
const sensitivePatterns = ['password', 'secret', 'key', 'token', 'credential'];
|
|
1694
|
-
const exposedSecrets = [];
|
|
1695
|
-
for (const [key, value] of envVars.entries()) {
|
|
1696
|
-
const lowerKey = key.toLowerCase();
|
|
1697
|
-
if (sensitivePatterns.some((p) => lowerKey.includes(p)) && value && value !== '***') {
|
|
1698
|
-
exposedSecrets.push(key);
|
|
1699
|
-
}
|
|
1700
|
-
}
|
|
1701
|
-
if (exposedSecrets.length > 0) {
|
|
1702
|
-
output.push(`⚠ Sensitive variables found: ${exposedSecrets.length}`);
|
|
1703
|
-
output.push('Make sure .env is in .gitignore');
|
|
1704
|
-
}
|
|
1705
|
-
else {
|
|
1706
|
-
output.push('✓ No obvious sensitive data patterns found');
|
|
1707
|
-
}
|
|
1708
|
-
return output.join('\n');
|
|
1709
|
-
}
|
|
1710
|
-
catch (error) {
|
|
1711
|
-
return `Environment check failed: ${error.message}`;
|
|
1712
|
-
}
|
|
1713
|
-
},
|
|
1714
|
-
},
|
|
1715
|
-
// ========================================================================
|
|
1716
|
-
// Process & Service Management
|
|
1717
|
-
// ========================================================================
|
|
1718
|
-
{
|
|
1719
|
-
name: 'service_status',
|
|
1720
|
-
description: 'Check status of common development services (databases, caches, etc.)',
|
|
1721
|
-
parameters: {
|
|
1722
|
-
type: 'object',
|
|
1723
|
-
properties: {
|
|
1724
|
-
services: {
|
|
1725
|
-
type: 'array',
|
|
1726
|
-
items: {
|
|
1727
|
-
type: 'string',
|
|
1728
|
-
enum: ['postgres', 'mysql', 'redis', 'mongodb', 'elasticsearch', 'docker', 'nginx', 'node'],
|
|
1729
|
-
},
|
|
1730
|
-
description: 'Services to check (default: auto-detect)',
|
|
1731
|
-
},
|
|
1732
|
-
ports: {
|
|
1733
|
-
type: 'array',
|
|
1734
|
-
items: { type: 'number' },
|
|
1735
|
-
description: 'Additional ports to check',
|
|
1736
|
-
},
|
|
1737
|
-
},
|
|
1738
|
-
additionalProperties: false,
|
|
1739
|
-
},
|
|
1740
|
-
handler: async (args) => {
|
|
1741
|
-
const services = Array.isArray(args['services']) ? args['services'] : [];
|
|
1742
|
-
const ports = Array.isArray(args['ports']) ? args['ports'] : [];
|
|
1743
|
-
const output = ['# Service Status', ''];
|
|
1744
|
-
const serviceConfig = {
|
|
1745
|
-
postgres: { port: 5432, process: 'postgres' },
|
|
1746
|
-
mysql: { port: 3306, process: 'mysql' },
|
|
1747
|
-
redis: { port: 6379, process: 'redis' },
|
|
1748
|
-
mongodb: { port: 27017, process: 'mongod' },
|
|
1749
|
-
elasticsearch: { port: 9200, process: 'java' },
|
|
1750
|
-
docker: { port: 2375, process: 'docker' },
|
|
1751
|
-
nginx: { port: 80, process: 'nginx' },
|
|
1752
|
-
node: { port: 3000, process: 'node' },
|
|
1753
|
-
};
|
|
1754
|
-
const checkPort = async (port) => {
|
|
1755
|
-
try {
|
|
1756
|
-
const { stdout } = await execAsync(`lsof -i :${port} -sTCP:LISTEN 2>/dev/null | head -1`, {
|
|
1757
|
-
cwd: workingDir,
|
|
1758
|
-
timeout: 5000,
|
|
1759
|
-
shell: '/bin/bash',
|
|
1760
|
-
});
|
|
1761
|
-
return stdout.trim().length > 0;
|
|
1762
|
-
}
|
|
1763
|
-
catch {
|
|
1764
|
-
return false;
|
|
1765
|
-
}
|
|
1766
|
-
};
|
|
1767
|
-
const checkProcess = async (name) => {
|
|
1768
|
-
try {
|
|
1769
|
-
const { stdout } = await execAsync(`pgrep -x "${name}" 2>/dev/null`, {
|
|
1770
|
-
cwd: workingDir,
|
|
1771
|
-
timeout: 5000,
|
|
1772
|
-
shell: '/bin/bash',
|
|
1773
|
-
});
|
|
1774
|
-
return stdout.trim().length > 0;
|
|
1775
|
-
}
|
|
1776
|
-
catch {
|
|
1777
|
-
return false;
|
|
1778
|
-
}
|
|
1779
|
-
};
|
|
1780
|
-
// Check specified services
|
|
1781
|
-
const servicesToCheck = services.length > 0 ? services : Object.keys(serviceConfig);
|
|
1782
|
-
output.push('## Services');
|
|
1783
|
-
for (const service of servicesToCheck) {
|
|
1784
|
-
const config = serviceConfig[service];
|
|
1785
|
-
if (!config)
|
|
1786
|
-
continue;
|
|
1787
|
-
const portOpen = await checkPort(config.port);
|
|
1788
|
-
const processRunning = config.process ? await checkProcess(config.process) : false;
|
|
1789
|
-
const status = portOpen ? '✓' : processRunning ? '⚠' : '✗';
|
|
1790
|
-
const detail = portOpen ? `listening on :${config.port}` : processRunning ? 'process running' : 'not detected';
|
|
1791
|
-
output.push(`${status} ${service}: ${detail}`);
|
|
1792
|
-
}
|
|
1793
|
-
// Check additional ports
|
|
1794
|
-
if (ports.length > 0) {
|
|
1795
|
-
output.push('');
|
|
1796
|
-
output.push('## Custom Ports');
|
|
1797
|
-
for (const port of ports) {
|
|
1798
|
-
const open = await checkPort(port);
|
|
1799
|
-
output.push(`${open ? '✓' : '✗'} Port ${port}: ${open ? 'in use' : 'available'}`);
|
|
1800
|
-
}
|
|
1801
|
-
}
|
|
1802
|
-
// Docker status
|
|
1803
|
-
output.push('');
|
|
1804
|
-
output.push('## Docker');
|
|
1805
|
-
try {
|
|
1806
|
-
const { stdout: dockerPs } = await execAsync('docker ps --format "{{.Names}}: {{.Status}}" 2>/dev/null', {
|
|
1807
|
-
cwd: workingDir,
|
|
1808
|
-
timeout: 10000,
|
|
1809
|
-
shell: '/bin/bash',
|
|
1810
|
-
});
|
|
1811
|
-
if (dockerPs.trim()) {
|
|
1812
|
-
output.push('Running containers:');
|
|
1813
|
-
dockerPs.split('\n').forEach((line) => {
|
|
1814
|
-
if (line.trim())
|
|
1815
|
-
output.push(` ${line.trim()}`);
|
|
1816
|
-
});
|
|
1817
|
-
}
|
|
1818
|
-
else {
|
|
1819
|
-
output.push('No running containers');
|
|
1820
|
-
}
|
|
1821
|
-
}
|
|
1822
|
-
catch {
|
|
1823
|
-
output.push('Docker not available');
|
|
1824
|
-
}
|
|
1825
|
-
return output.join('\n');
|
|
1826
|
-
},
|
|
1827
|
-
},
|
|
1828
|
-
{
|
|
1829
|
-
name: 'kill_port',
|
|
1830
|
-
description: 'Kill process running on a specific port',
|
|
1831
|
-
parameters: {
|
|
1832
|
-
type: 'object',
|
|
1833
|
-
properties: {
|
|
1834
|
-
port: {
|
|
1835
|
-
type: 'number',
|
|
1836
|
-
description: 'Port number to free up',
|
|
1837
|
-
},
|
|
1838
|
-
force: {
|
|
1839
|
-
type: 'boolean',
|
|
1840
|
-
description: 'Force kill with SIGKILL instead of SIGTERM (default: false)',
|
|
1841
|
-
},
|
|
1842
|
-
},
|
|
1843
|
-
required: ['port'],
|
|
1844
|
-
additionalProperties: false,
|
|
1845
|
-
},
|
|
1846
|
-
handler: async (args) => {
|
|
1847
|
-
const port = args['port'];
|
|
1848
|
-
const force = args['force'] === true;
|
|
1849
|
-
const output = [`# Kill Process on Port ${port}`, ''];
|
|
1850
|
-
try {
|
|
1851
|
-
// Find process on port
|
|
1852
|
-
const { stdout: lsofOut } = await execAsync(`lsof -i :${port} -sTCP:LISTEN 2>/dev/null`, {
|
|
1853
|
-
cwd: workingDir,
|
|
1854
|
-
timeout: 10000,
|
|
1855
|
-
shell: '/bin/bash',
|
|
1856
|
-
});
|
|
1857
|
-
if (!lsofOut.trim()) {
|
|
1858
|
-
return `No process found listening on port ${port}`;
|
|
1859
|
-
}
|
|
1860
|
-
output.push('## Process Info');
|
|
1861
|
-
output.push(lsofOut.trim());
|
|
1862
|
-
output.push('');
|
|
1863
|
-
// Extract PID
|
|
1864
|
-
const lines = lsofOut.split('\n').filter((l) => l.trim());
|
|
1865
|
-
if (lines.length < 2) {
|
|
1866
|
-
return `Could not find process on port ${port}`;
|
|
1867
|
-
}
|
|
1868
|
-
const pidMatch = lines[1]?.match(/\S+\s+(\d+)/);
|
|
1869
|
-
if (!pidMatch) {
|
|
1870
|
-
return `Could not extract PID from lsof output`;
|
|
1871
|
-
}
|
|
1872
|
-
const pid = pidMatch[1];
|
|
1873
|
-
const signal = force ? 'KILL' : 'TERM';
|
|
1874
|
-
output.push(`## Killing Process`);
|
|
1875
|
-
output.push(`PID: ${pid}`);
|
|
1876
|
-
output.push(`Signal: ${signal}`);
|
|
1877
|
-
await execAsync(`kill -${signal} ${pid}`, { cwd: workingDir, timeout: 10000 });
|
|
1878
|
-
output.push(`✓ Sent ${signal} signal to process ${pid}`);
|
|
1879
|
-
// Wait and verify
|
|
1880
|
-
await new Promise((r) => setTimeout(r, 1000));
|
|
1881
|
-
try {
|
|
1882
|
-
const { stdout: checkOut } = await execAsync(`lsof -i :${port} -sTCP:LISTEN 2>/dev/null`, {
|
|
1883
|
-
cwd: workingDir,
|
|
1884
|
-
timeout: 5000,
|
|
1885
|
-
shell: '/bin/bash',
|
|
1886
|
-
});
|
|
1887
|
-
if (checkOut.trim()) {
|
|
1888
|
-
output.push(`⚠ Process may still be running. Try with force=true`);
|
|
1889
|
-
}
|
|
1890
|
-
else {
|
|
1891
|
-
output.push(`✓ Port ${port} is now free`);
|
|
1892
|
-
}
|
|
1893
|
-
}
|
|
1894
|
-
catch {
|
|
1895
|
-
output.push(`✓ Port ${port} is now free`);
|
|
1896
|
-
}
|
|
1897
|
-
return output.join('\n');
|
|
1898
|
-
}
|
|
1899
|
-
catch (error) {
|
|
1900
|
-
return `Failed to kill process on port ${port}: ${error.message}`;
|
|
1901
|
-
}
|
|
1902
|
-
},
|
|
1903
|
-
},
|
|
1904
|
-
// ========================================================================
|
|
1905
|
-
// Dependency Management
|
|
1906
|
-
// ========================================================================
|
|
1907
|
-
{
|
|
1908
|
-
name: 'deps_audit',
|
|
1909
|
-
description: 'Audit dependencies for security vulnerabilities and outdated packages',
|
|
1910
|
-
parameters: {
|
|
1911
|
-
type: 'object',
|
|
1912
|
-
properties: {
|
|
1913
|
-
fix: {
|
|
1914
|
-
type: 'boolean',
|
|
1915
|
-
description: 'Attempt to fix vulnerabilities automatically (default: false)',
|
|
1916
|
-
},
|
|
1917
|
-
checkOutdated: {
|
|
1918
|
-
type: 'boolean',
|
|
1919
|
-
description: 'Check for outdated packages (default: true)',
|
|
1920
|
-
},
|
|
1921
|
-
level: {
|
|
1922
|
-
type: 'string',
|
|
1923
|
-
enum: ['info', 'low', 'moderate', 'high', 'critical'],
|
|
1924
|
-
description: 'Minimum vulnerability level to report (default: moderate)',
|
|
1925
|
-
},
|
|
1926
|
-
},
|
|
1927
|
-
additionalProperties: false,
|
|
1928
|
-
},
|
|
1929
|
-
handler: async (args) => {
|
|
1930
|
-
const fix = args['fix'] === true;
|
|
1931
|
-
const checkOutdated = args['checkOutdated'] !== false;
|
|
1932
|
-
const level = typeof args['level'] === 'string' ? args['level'] : 'moderate';
|
|
1933
|
-
const output = ['# Dependency Audit', ''];
|
|
1934
|
-
try {
|
|
1935
|
-
// npm audit
|
|
1936
|
-
output.push('## Security Audit');
|
|
1937
|
-
try {
|
|
1938
|
-
const auditCmd = fix ? 'npm audit fix' : `npm audit --audit-level=${level}`;
|
|
1939
|
-
const { stdout: auditOut, stderr: auditErr } = await execAsync(auditCmd, {
|
|
1940
|
-
cwd: workingDir,
|
|
1941
|
-
timeout: 120000,
|
|
1942
|
-
maxBuffer: 1024 * 1024 * 10,
|
|
1943
|
-
});
|
|
1944
|
-
output.push(auditOut.substring(0, 3000) || '✓ No vulnerabilities found');
|
|
1945
|
-
if (auditErr && !auditErr.includes('npm WARN')) {
|
|
1946
|
-
output.push(`Warnings: ${auditErr.substring(0, 500)}`);
|
|
1947
|
-
}
|
|
1948
|
-
}
|
|
1949
|
-
catch (e) {
|
|
1950
|
-
// npm audit returns non-zero if vulnerabilities found
|
|
1951
|
-
if (e.stdout) {
|
|
1952
|
-
output.push(e.stdout.substring(0, 3000));
|
|
1953
|
-
}
|
|
1954
|
-
else {
|
|
1955
|
-
output.push(`Audit check: ${e.message}`);
|
|
1956
|
-
}
|
|
1957
|
-
}
|
|
1958
|
-
// Check outdated
|
|
1959
|
-
if (checkOutdated) {
|
|
1960
|
-
output.push('');
|
|
1961
|
-
output.push('## Outdated Packages');
|
|
1962
|
-
try {
|
|
1963
|
-
const { stdout: outdatedOut } = await execAsync('npm outdated --json 2>/dev/null', {
|
|
1964
|
-
cwd: workingDir,
|
|
1965
|
-
timeout: 60000,
|
|
1966
|
-
shell: '/bin/bash',
|
|
1967
|
-
});
|
|
1968
|
-
if (outdatedOut.trim() && outdatedOut !== '{}') {
|
|
1969
|
-
const outdated = JSON.parse(outdatedOut);
|
|
1970
|
-
const entries = Object.entries(outdated);
|
|
1971
|
-
if (entries.length > 0) {
|
|
1972
|
-
output.push(`Found ${entries.length} outdated package(s):`);
|
|
1973
|
-
for (const [pkg, info] of entries.slice(0, 20)) {
|
|
1974
|
-
output.push(` ${pkg}: ${info.current} → ${info.wanted} (latest: ${info.latest})`);
|
|
1975
|
-
}
|
|
1976
|
-
if (entries.length > 20) {
|
|
1977
|
-
output.push(` ... and ${entries.length - 20} more`);
|
|
1978
|
-
}
|
|
1979
|
-
}
|
|
1980
|
-
else {
|
|
1981
|
-
output.push('✓ All packages are up to date');
|
|
1982
|
-
}
|
|
1983
|
-
}
|
|
1984
|
-
else {
|
|
1985
|
-
output.push('✓ All packages are up to date');
|
|
1986
|
-
}
|
|
1987
|
-
}
|
|
1988
|
-
catch {
|
|
1989
|
-
output.push('Could not check outdated packages');
|
|
1990
|
-
}
|
|
1991
|
-
}
|
|
1992
|
-
// License check
|
|
1993
|
-
output.push('');
|
|
1994
|
-
output.push('## License Info');
|
|
1995
|
-
try {
|
|
1996
|
-
const { stdout: licenseOut } = await execAsync('npx license-checker --summary 2>/dev/null', {
|
|
1997
|
-
cwd: workingDir,
|
|
1998
|
-
timeout: 60000,
|
|
1999
|
-
shell: '/bin/bash',
|
|
2000
|
-
});
|
|
2001
|
-
if (licenseOut.trim()) {
|
|
2002
|
-
output.push(licenseOut.substring(0, 1000));
|
|
2003
|
-
}
|
|
2004
|
-
}
|
|
2005
|
-
catch {
|
|
2006
|
-
output.push('License checker not available (install with: npm i -g license-checker)');
|
|
2007
|
-
}
|
|
2008
|
-
return output.join('\n');
|
|
2009
|
-
}
|
|
2010
|
-
catch (error) {
|
|
2011
|
-
return `Dependency audit failed: ${error.message}`;
|
|
2012
|
-
}
|
|
2013
|
-
},
|
|
2014
|
-
},
|
|
2015
|
-
{
|
|
2016
|
-
name: 'deps_update',
|
|
2017
|
-
description: 'Update dependencies to latest versions',
|
|
2018
|
-
parameters: {
|
|
2019
|
-
type: 'object',
|
|
2020
|
-
properties: {
|
|
2021
|
-
mode: {
|
|
2022
|
-
type: 'string',
|
|
2023
|
-
enum: ['patch', 'minor', 'major', 'latest'],
|
|
2024
|
-
description: 'Update mode - patch (safest), minor, major, or latest (default: minor)',
|
|
2025
|
-
},
|
|
2026
|
-
packages: {
|
|
2027
|
-
type: 'array',
|
|
2028
|
-
items: { type: 'string' },
|
|
2029
|
-
description: 'Specific packages to update (default: all)',
|
|
2030
|
-
},
|
|
2031
|
-
dryRun: {
|
|
2032
|
-
type: 'boolean',
|
|
2033
|
-
description: 'Show what would be updated without making changes',
|
|
2034
|
-
},
|
|
2035
|
-
runTests: {
|
|
2036
|
-
type: 'boolean',
|
|
2037
|
-
description: 'Run tests after update to verify (default: true)',
|
|
2038
|
-
},
|
|
2039
|
-
},
|
|
2040
|
-
additionalProperties: false,
|
|
2041
|
-
},
|
|
2042
|
-
handler: async (args) => {
|
|
2043
|
-
const mode = typeof args['mode'] === 'string' ? args['mode'] : 'minor';
|
|
2044
|
-
const packages = Array.isArray(args['packages']) ? args['packages'] : [];
|
|
2045
|
-
const dryRun = args['dryRun'] === true;
|
|
2046
|
-
const runTests = args['runTests'] !== false;
|
|
2047
|
-
const output = ['# Dependency Update', ''];
|
|
2048
|
-
output.push(`Mode: ${mode}`);
|
|
2049
|
-
if (dryRun)
|
|
2050
|
-
output.push('**DRY RUN**');
|
|
2051
|
-
output.push('');
|
|
2052
|
-
try {
|
|
2053
|
-
if (dryRun) {
|
|
2054
|
-
// Just check what would be updated
|
|
2055
|
-
output.push('## Would Update');
|
|
2056
|
-
const { stdout } = await execAsync('npm outdated --json 2>/dev/null || true', {
|
|
2057
|
-
cwd: workingDir,
|
|
2058
|
-
timeout: 60000,
|
|
2059
|
-
shell: '/bin/bash',
|
|
2060
|
-
});
|
|
2061
|
-
if (stdout.trim() && stdout !== '{}') {
|
|
2062
|
-
const outdated = JSON.parse(stdout);
|
|
2063
|
-
for (const [pkg, info] of Object.entries(outdated)) {
|
|
2064
|
-
if (packages.length === 0 || packages.includes(pkg)) {
|
|
2065
|
-
const target = mode === 'latest' ? info.latest : mode === 'major' ? info.latest : info.wanted;
|
|
2066
|
-
output.push(` ${pkg}: ${info.current} → ${target}`);
|
|
2067
|
-
}
|
|
2068
|
-
}
|
|
2069
|
-
}
|
|
2070
|
-
else {
|
|
2071
|
-
output.push('All packages are up to date');
|
|
2072
|
-
}
|
|
2073
|
-
return output.join('\n');
|
|
2074
|
-
}
|
|
2075
|
-
// Perform update
|
|
2076
|
-
output.push('## Updating');
|
|
2077
|
-
let updateCmd = 'npm update';
|
|
2078
|
-
if (packages.length > 0) {
|
|
2079
|
-
updateCmd = `npm update ${packages.join(' ')}`;
|
|
2080
|
-
}
|
|
2081
|
-
if (mode === 'latest' || mode === 'major') {
|
|
2082
|
-
// For major updates, use npm-check-updates
|
|
2083
|
-
output.push('Major/latest updates require npm-check-updates');
|
|
2084
|
-
try {
|
|
2085
|
-
const ncuCmd = packages.length > 0
|
|
2086
|
-
? `npx npm-check-updates -u ${packages.join(' ')}`
|
|
2087
|
-
: 'npx npm-check-updates -u';
|
|
2088
|
-
await execAsync(ncuCmd, { cwd: workingDir, timeout: 120000 });
|
|
2089
|
-
await execAsync('npm install', { cwd: workingDir, timeout: 300000 });
|
|
2090
|
-
output.push('✓ Updated package.json and installed');
|
|
2091
|
-
}
|
|
2092
|
-
catch (e) {
|
|
2093
|
-
output.push(`Update failed: ${e.message}`);
|
|
2094
|
-
return output.join('\n');
|
|
2095
|
-
}
|
|
2096
|
-
}
|
|
2097
|
-
else {
|
|
2098
|
-
const { stdout: updateOut } = await execAsync(updateCmd, {
|
|
2099
|
-
cwd: workingDir,
|
|
2100
|
-
timeout: 300000,
|
|
2101
|
-
});
|
|
2102
|
-
output.push(updateOut.substring(0, 2000) || '✓ Updated');
|
|
2103
|
-
}
|
|
2104
|
-
// Run tests
|
|
2105
|
-
if (runTests) {
|
|
2106
|
-
output.push('');
|
|
2107
|
-
output.push('## Verifying');
|
|
2108
|
-
try {
|
|
2109
|
-
await execAsync('npm test', { cwd: workingDir, timeout: 300000 });
|
|
2110
|
-
output.push('✓ Tests passed');
|
|
2111
|
-
}
|
|
2112
|
-
catch (e) {
|
|
2113
|
-
output.push(`⚠ Tests failed: ${e.message}`);
|
|
2114
|
-
output.push('Consider reverting: git checkout package.json package-lock.json && npm install');
|
|
2115
|
-
}
|
|
2116
|
-
}
|
|
2117
|
-
return output.join('\n');
|
|
2118
|
-
}
|
|
2119
|
-
catch (error) {
|
|
2120
|
-
return `Dependency update failed: ${error.message}`;
|
|
2121
|
-
}
|
|
2122
|
-
},
|
|
2123
|
-
},
|
|
2124
|
-
];
|
|
2125
|
-
}
|
|
2126
|
-
//# sourceMappingURL=devTools.js.map
|