@noorm/marie-cli 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/JOY_ZONING.md +200 -0
- package/LICENSE.md +190 -0
- package/README.md +94 -0
- package/dist/cli-new/components/App.js +138 -0
- package/dist/cli-new/components/App.js.map +1 -0
- package/dist/cli-new/components/ApprovalDialog.js +31 -0
- package/dist/cli-new/components/ApprovalDialog.js.map +1 -0
- package/dist/cli-new/components/Banner.js +23 -0
- package/dist/cli-new/components/Banner.js.map +1 -0
- package/dist/cli-new/components/ChatArea.js +49 -0
- package/dist/cli-new/components/ChatArea.js.map +1 -0
- package/dist/cli-new/components/Header.js +20 -0
- package/dist/cli-new/components/Header.js.map +1 -0
- package/dist/cli-new/components/InputArea.js +97 -0
- package/dist/cli-new/components/InputArea.js.map +1 -0
- package/dist/cli-new/components/MessageBubble.js +114 -0
- package/dist/cli-new/components/MessageBubble.js.map +1 -0
- package/dist/cli-new/components/SessionSwitcher.js +46 -0
- package/dist/cli-new/components/SessionSwitcher.js.map +1 -0
- package/dist/cli-new/components/SetupWizard.js +283 -0
- package/dist/cli-new/components/SetupWizard.js.map +1 -0
- package/dist/cli-new/components/ToolCallDisplay.js +45 -0
- package/dist/cli-new/components/ToolCallDisplay.js.map +1 -0
- package/dist/cli-new/hooks/useGit.js +99 -0
- package/dist/cli-new/hooks/useGit.js.map +1 -0
- package/dist/cli-new/hooks/useMarie.js +249 -0
- package/dist/cli-new/hooks/useMarie.js.map +1 -0
- package/dist/cli-new/hooks/useSessions.js +75 -0
- package/dist/cli-new/hooks/useSessions.js.map +1 -0
- package/dist/cli-new/index.js +52 -0
- package/dist/cli-new/index.js.map +1 -0
- package/dist/cli-new/styles/theme.js +68 -0
- package/dist/cli-new/styles/theme.js.map +1 -0
- package/dist/cli-new/types/cli.js +2 -0
- package/dist/cli-new/types/cli.js.map +1 -0
- package/dist/extension.cjs +655 -0
- package/dist/monolith/adapters/CliMarieAdapter.js +72 -0
- package/dist/monolith/adapters/CliMarieAdapter.js.map +1 -0
- package/dist/monolith/adapters/VscodeMarieAdapter.js +81 -0
- package/dist/monolith/adapters/VscodeMarieAdapter.js.map +1 -0
- package/dist/monolith/cli/CliFileSystemPort.js +83 -0
- package/dist/monolith/cli/CliFileSystemPort.js.map +1 -0
- package/dist/monolith/cli/MarieToolDefinitionsCLI.js +438 -0
- package/dist/monolith/cli/MarieToolDefinitionsCLI.js.map +1 -0
- package/dist/monolith/cli/index.js +272 -0
- package/dist/monolith/cli/index.js.map +1 -0
- package/dist/monolith/cli/services/JoyAutomationServiceCLI.js +80 -0
- package/dist/monolith/cli/services/JoyAutomationServiceCLI.js.map +1 -0
- package/dist/monolith/cli/services/JoyServiceCLI.js +63 -0
- package/dist/monolith/cli/services/JoyServiceCLI.js.map +1 -0
- package/dist/monolith/cli/storage.js +119 -0
- package/dist/monolith/cli/storage.js.map +1 -0
- package/dist/monolith/domain/joy/JoyTools.js +513 -0
- package/dist/monolith/domain/joy/JoyTools.js.map +1 -0
- package/dist/monolith/domain/joy/RitualService.js +51 -0
- package/dist/monolith/domain/joy/RitualService.js.map +1 -0
- package/dist/monolith/domain/marie/MarieTypes.js +2 -0
- package/dist/monolith/domain/marie/MarieTypes.js.map +1 -0
- package/dist/monolith/infrastructure/ai/agents/MarieAscendant.js +230 -0
- package/dist/monolith/infrastructure/ai/agents/MarieAscendant.js.map +1 -0
- package/dist/monolith/infrastructure/ai/agents/MarieYOLO.js +207 -0
- package/dist/monolith/infrastructure/ai/agents/MarieYOLO.js.map +1 -0
- package/dist/monolith/infrastructure/ai/context/ContextArchiveService.js +129 -0
- package/dist/monolith/infrastructure/ai/context/ContextArchiveService.js.map +1 -0
- package/dist/monolith/infrastructure/ai/context/ContextManager.js +118 -0
- package/dist/monolith/infrastructure/ai/context/ContextManager.js.map +1 -0
- package/dist/monolith/infrastructure/ai/core/FileSystemPort.js +2 -0
- package/dist/monolith/infrastructure/ai/core/FileSystemPort.js.map +1 -0
- package/dist/monolith/infrastructure/ai/core/GhostPort.js +2 -0
- package/dist/monolith/infrastructure/ai/core/GhostPort.js.map +1 -0
- package/dist/monolith/infrastructure/ai/core/MarieAscensionTypes.js +2 -0
- package/dist/monolith/infrastructure/ai/core/MarieAscensionTypes.js.map +1 -0
- package/dist/monolith/infrastructure/ai/core/MarieEngine.js +590 -0
- package/dist/monolith/infrastructure/ai/core/MarieEngine.js.map +1 -0
- package/dist/monolith/infrastructure/ai/core/MarieEventDispatcher.js +161 -0
- package/dist/monolith/infrastructure/ai/core/MarieEventDispatcher.js.map +1 -0
- package/dist/monolith/infrastructure/ai/core/MarieLockManager.js +121 -0
- package/dist/monolith/infrastructure/ai/core/MarieLockManager.js.map +1 -0
- package/dist/monolith/infrastructure/ai/core/MarieProgressTracker.js +256 -0
- package/dist/monolith/infrastructure/ai/core/MarieProgressTracker.js.map +1 -0
- package/dist/monolith/infrastructure/ai/core/MariePulseService.js +67 -0
- package/dist/monolith/infrastructure/ai/core/MariePulseService.js.map +1 -0
- package/dist/monolith/infrastructure/ai/core/MarieResponse.js +101 -0
- package/dist/monolith/infrastructure/ai/core/MarieResponse.js.map +1 -0
- package/dist/monolith/infrastructure/ai/core/MarieSanitizer.js +86 -0
- package/dist/monolith/infrastructure/ai/core/MarieSanitizer.js.map +1 -0
- package/dist/monolith/infrastructure/ai/core/MarieSession.js +202 -0
- package/dist/monolith/infrastructure/ai/core/MarieSession.js.map +1 -0
- package/dist/monolith/infrastructure/ai/core/MarieStabilityMonitor.js +58 -0
- package/dist/monolith/infrastructure/ai/core/MarieStabilityMonitor.js.map +1 -0
- package/dist/monolith/infrastructure/ai/core/MarieToolMender.js +127 -0
- package/dist/monolith/infrastructure/ai/core/MarieToolMender.js.map +1 -0
- package/dist/monolith/infrastructure/ai/core/MarieToolProcessor.js +548 -0
- package/dist/monolith/infrastructure/ai/core/MarieToolProcessor.js.map +1 -0
- package/dist/monolith/infrastructure/ai/core/MarieYOLOTypes.js +2 -0
- package/dist/monolith/infrastructure/ai/core/MarieYOLOTypes.js.map +1 -0
- package/dist/monolith/infrastructure/ai/core/ReasoningBudget.js +125 -0
- package/dist/monolith/infrastructure/ai/core/ReasoningBudget.js.map +1 -0
- package/dist/monolith/infrastructure/ai/core/VscodeFileSystemPort.js +30 -0
- package/dist/monolith/infrastructure/ai/core/VscodeFileSystemPort.js.map +1 -0
- package/dist/monolith/infrastructure/ai/providers/AIProvider.js +2 -0
- package/dist/monolith/infrastructure/ai/providers/AIProvider.js.map +1 -0
- package/dist/monolith/infrastructure/ai/providers/AnthropicProvider.js +148 -0
- package/dist/monolith/infrastructure/ai/providers/AnthropicProvider.js.map +1 -0
- package/dist/monolith/infrastructure/ai/providers/CerebrasProvider.js +208 -0
- package/dist/monolith/infrastructure/ai/providers/CerebrasProvider.js.map +1 -0
- package/dist/monolith/infrastructure/ai/providers/OpenRouterProvider.js +404 -0
- package/dist/monolith/infrastructure/ai/providers/OpenRouterProvider.js.map +1 -0
- package/dist/monolith/infrastructure/ai/providers/OpenRouterStreamParser.js +283 -0
- package/dist/monolith/infrastructure/ai/providers/OpenRouterStreamParser.js.map +1 -0
- package/dist/monolith/infrastructure/config/ConfigService.js +398 -0
- package/dist/monolith/infrastructure/config/ConfigService.js.map +1 -0
- package/dist/monolith/infrastructure/services/MarieMemoryStore.js +140 -0
- package/dist/monolith/infrastructure/services/MarieMemoryStore.js.map +1 -0
- package/dist/monolith/infrastructure/tools/MarieToolDefinitions.js +1568 -0
- package/dist/monolith/infrastructure/tools/MarieToolDefinitions.js.map +1 -0
- package/dist/monolith/infrastructure/tools/PureStreamParser.js +147 -0
- package/dist/monolith/infrastructure/tools/PureStreamParser.js.map +1 -0
- package/dist/monolith/infrastructure/tools/SharedToolDefinitions.js +223 -0
- package/dist/monolith/infrastructure/tools/SharedToolDefinitions.js.map +1 -0
- package/dist/monolith/infrastructure/tools/ToolRegistry.js +29 -0
- package/dist/monolith/infrastructure/tools/ToolRegistry.js.map +1 -0
- package/dist/monolith/infrastructure/tools/ToolUtils.js +59 -0
- package/dist/monolith/infrastructure/tools/ToolUtils.js.map +1 -0
- package/dist/monolith/plumbing/analysis/CodeHealthService.js +146 -0
- package/dist/monolith/plumbing/analysis/CodeHealthService.js.map +1 -0
- package/dist/monolith/plumbing/analysis/ComplexityService.js +43 -0
- package/dist/monolith/plumbing/analysis/ComplexityService.js.map +1 -0
- package/dist/monolith/plumbing/analysis/DependencyService.js +51 -0
- package/dist/monolith/plumbing/analysis/DependencyService.js.map +1 -0
- package/dist/monolith/plumbing/analysis/DiscoveryService.js +49 -0
- package/dist/monolith/plumbing/analysis/DiscoveryService.js.map +1 -0
- package/dist/monolith/plumbing/analysis/JoyMapService.js +66 -0
- package/dist/monolith/plumbing/analysis/JoyMapService.js.map +1 -0
- package/dist/monolith/plumbing/analysis/LintService.js +132 -0
- package/dist/monolith/plumbing/analysis/LintService.js.map +1 -0
- package/dist/monolith/plumbing/analysis/MarieSentinelService.js +276 -0
- package/dist/monolith/plumbing/analysis/MarieSentinelService.js.map +1 -0
- package/dist/monolith/plumbing/analysis/QualityGuardrailService.js +119 -0
- package/dist/monolith/plumbing/analysis/QualityGuardrailService.js.map +1 -0
- package/dist/monolith/plumbing/analysis/SurgicalMender.js +70 -0
- package/dist/monolith/plumbing/analysis/SurgicalMender.js.map +1 -0
- package/dist/monolith/plumbing/analysis/TestService.js +104 -0
- package/dist/monolith/plumbing/analysis/TestService.js.map +1 -0
- package/dist/monolith/plumbing/filesystem/FileService.js +406 -0
- package/dist/monolith/plumbing/filesystem/FileService.js.map +1 -0
- package/dist/monolith/plumbing/filesystem/PathResolver.js +26 -0
- package/dist/monolith/plumbing/filesystem/PathResolver.js.map +1 -0
- package/dist/monolith/plumbing/git/GitService.js +71 -0
- package/dist/monolith/plumbing/git/GitService.js.map +1 -0
- package/dist/monolith/plumbing/lsp/SymbolService.js +36 -0
- package/dist/monolith/plumbing/lsp/SymbolService.js.map +1 -0
- package/dist/monolith/plumbing/terminal/ProcessRegistry.js +31 -0
- package/dist/monolith/plumbing/terminal/ProcessRegistry.js.map +1 -0
- package/dist/monolith/plumbing/terminal/TerminalService.js +180 -0
- package/dist/monolith/plumbing/terminal/TerminalService.js.map +1 -0
- package/dist/monolith/plumbing/ui/DecorationService.js +54 -0
- package/dist/monolith/plumbing/ui/DecorationService.js.map +1 -0
- package/dist/monolith/plumbing/utils/ErrorUtils.js +11 -0
- package/dist/monolith/plumbing/utils/ErrorUtils.js.map +1 -0
- package/dist/monolith/plumbing/utils/JsonUtils.js +360 -0
- package/dist/monolith/plumbing/utils/JsonUtils.js.map +1 -0
- package/dist/monolith/plumbing/utils/PrefixTree.js +153 -0
- package/dist/monolith/plumbing/utils/PrefixTree.js.map +1 -0
- package/dist/monolith/plumbing/utils/RetryUtils.js +141 -0
- package/dist/monolith/plumbing/utils/RetryUtils.js.map +1 -0
- package/dist/monolith/plumbing/utils/StreamTagDetector.js +128 -0
- package/dist/monolith/plumbing/utils/StreamTagDetector.js.map +1 -0
- package/dist/monolith/plumbing/utils/StringUtils.js +97 -0
- package/dist/monolith/plumbing/utils/StringUtils.js.map +1 -0
- package/dist/monolith/plumbing/utils/TimeoutUtils.js +21 -0
- package/dist/monolith/plumbing/utils/TimeoutUtils.js.map +1 -0
- package/dist/monolith/runtime/MarieRuntime.js +354 -0
- package/dist/monolith/runtime/MarieRuntime.js.map +1 -0
- package/dist/monolith/runtime/RuntimeAdapterBase.js +59 -0
- package/dist/monolith/runtime/RuntimeAdapterBase.js.map +1 -0
- package/dist/monolith/runtime/providerFactory.js +11 -0
- package/dist/monolith/runtime/providerFactory.js.map +1 -0
- package/dist/monolith/runtime/types.js +2 -0
- package/dist/monolith/runtime/types.js.map +1 -0
- package/dist/monolith/services/HealthService.js +38 -0
- package/dist/monolith/services/HealthService.js.map +1 -0
- package/dist/monolith/services/JoyAutomationService.js +131 -0
- package/dist/monolith/services/JoyAutomationService.js.map +1 -0
- package/dist/monolith/services/JoyLogService.js +48 -0
- package/dist/monolith/services/JoyLogService.js.map +1 -0
- package/dist/monolith/services/JoyService.js +190 -0
- package/dist/monolith/services/JoyService.js.map +1 -0
- package/dist/monolith/services/MarieGhostService.js +168 -0
- package/dist/monolith/services/MarieGhostService.js.map +1 -0
- package/dist/monolith/services/MarieSCMProvider.js +41 -0
- package/dist/monolith/services/MarieSCMProvider.js.map +1 -0
- package/package.json +168 -0
|
@@ -0,0 +1,1568 @@
|
|
|
1
|
+
import * as vscode from "vscode";
|
|
2
|
+
import * as fs from "fs/promises";
|
|
3
|
+
import * as path from "path";
|
|
4
|
+
import { getStringArg, getArrayArg } from "./ToolUtils.js";
|
|
5
|
+
import { registerSharedToolDefinitions } from "./SharedToolDefinitions.js";
|
|
6
|
+
import { withTimeout } from "../../plumbing/utils/TimeoutUtils.js";
|
|
7
|
+
import { writeFile, deleteFile, } from "../../plumbing/filesystem/FileService.js";
|
|
8
|
+
import { logGratitude, foldCode, cherishFile, generateJoyDashboard, } from "../../domain/joy/JoyTools.js";
|
|
9
|
+
import { getLettingGoMessage } from "../../../prompts.js";
|
|
10
|
+
import { gitStatus, getStagedDiff, getUnstagedDiff, } from "../../plumbing/git/GitService.js";
|
|
11
|
+
import { SymbolService } from "../../plumbing/lsp/SymbolService.js";
|
|
12
|
+
import { DependencyService } from "../../plumbing/analysis/DependencyService.js";
|
|
13
|
+
import { DiscoveryService } from "../../plumbing/analysis/DiscoveryService.js";
|
|
14
|
+
import { TestService } from "../../plumbing/analysis/TestService.js";
|
|
15
|
+
import { ComplexityService } from "../../plumbing/analysis/ComplexityService.js";
|
|
16
|
+
import { JoyMapService } from "../../plumbing/analysis/JoyMapService.js";
|
|
17
|
+
import { checkCodeHealth } from "../../plumbing/analysis/CodeHealthService.js";
|
|
18
|
+
import { ContextArchiveService } from "../ai/context/ContextArchiveService.js";
|
|
19
|
+
import { LintService } from "../../plumbing/analysis/LintService.js";
|
|
20
|
+
export function registerMarieTools(registry, automationService) {
|
|
21
|
+
registerSharedToolDefinitions(registry, {
|
|
22
|
+
resolvePath: (p) => p,
|
|
23
|
+
writeFile: async (p, content, signal) => await writeFile(p, content, signal),
|
|
24
|
+
readFile: async (p, start, end, signal) => {
|
|
25
|
+
const mod = await import("../../plumbing/filesystem/FileService.js");
|
|
26
|
+
return await mod.readFile(p, start, end, signal);
|
|
27
|
+
},
|
|
28
|
+
listDir: async (p, signal) => {
|
|
29
|
+
const mod = await import("../../plumbing/filesystem/FileService.js");
|
|
30
|
+
return await mod.listFiles(p, signal);
|
|
31
|
+
},
|
|
32
|
+
grepSearch: async (q, p, signal) => {
|
|
33
|
+
const mod = await import("../../plumbing/filesystem/FileService.js");
|
|
34
|
+
return await mod.searchFiles(q, p || vscode.workspace.workspaceFolders?.[0].uri.fsPath || "", signal);
|
|
35
|
+
},
|
|
36
|
+
getGitContext: async () => {
|
|
37
|
+
const root = vscode.workspace.workspaceFolders?.[0].uri.fsPath;
|
|
38
|
+
if (!root)
|
|
39
|
+
return "No workspace detected.";
|
|
40
|
+
const [status, staged, unstaged] = await Promise.all([
|
|
41
|
+
gitStatus(root),
|
|
42
|
+
getStagedDiff(root),
|
|
43
|
+
getUnstagedDiff(root),
|
|
44
|
+
]);
|
|
45
|
+
return `# Git Context\n\n## Status\n\`\`\`\n${status}\n\`\`\`\n\n## Staged Changes\n\`\`\`\n${staged}\n\`\`\`\n\n## Unstaged Changes\n\`\`\`\n${unstaged}\n\`\`\``;
|
|
46
|
+
},
|
|
47
|
+
});
|
|
48
|
+
registry.register({
|
|
49
|
+
name: "resolve_lint_errors",
|
|
50
|
+
description: "Run targeted linting and receive structured errors with surgical fix suggestions.",
|
|
51
|
+
input_schema: {
|
|
52
|
+
type: "object",
|
|
53
|
+
properties: {
|
|
54
|
+
path: { type: "string", description: "Optional path to a specific file to lint" },
|
|
55
|
+
command: { type: "string", description: "Optional custom lint command" },
|
|
56
|
+
},
|
|
57
|
+
},
|
|
58
|
+
execute: async (args) => {
|
|
59
|
+
const workingDir = vscode.workspace.workspaceFolders?.[0].uri.fsPath || process.cwd();
|
|
60
|
+
const p = args.path;
|
|
61
|
+
const cmd = args.command || "npm run lint";
|
|
62
|
+
const errors = p
|
|
63
|
+
? await LintService.runLintOnFile(workingDir, p)
|
|
64
|
+
: await LintService.runLint(workingDir, cmd);
|
|
65
|
+
if (errors.length === 0) {
|
|
66
|
+
return "No lint errors found. The garden is pure. ✨";
|
|
67
|
+
}
|
|
68
|
+
let result = `# 🏥 Linting Triage Report (${errors.length} issues found)\n\n`;
|
|
69
|
+
errors.forEach((err, i) => {
|
|
70
|
+
const fix = LintService.suggestFix(err);
|
|
71
|
+
result += `### Issue ${i + 1}: ${err.file}:${err.line}\n`;
|
|
72
|
+
result += `- **Rule**: ${err.ruleId || "unknown"}\n`;
|
|
73
|
+
result += `- **Message**: ${err.message}\n`;
|
|
74
|
+
if (fix)
|
|
75
|
+
result += `- **Suggested Fix**: ${fix}\n`;
|
|
76
|
+
result += "\n";
|
|
77
|
+
});
|
|
78
|
+
result += `**Precision Strike**: Use 'read_file' to audit the specific lines and 'replace_file_content' to apply surgical mends.`;
|
|
79
|
+
return result;
|
|
80
|
+
},
|
|
81
|
+
});
|
|
82
|
+
registry.register({
|
|
83
|
+
name: "self_heal",
|
|
84
|
+
description: "Autonomous systemic recovery. Marie will audit the project for stability alerts and attempt autonomous fixes.",
|
|
85
|
+
isDestructive: true,
|
|
86
|
+
input_schema: { type: "object", properties: {} },
|
|
87
|
+
execute: async () => {
|
|
88
|
+
const workingDir = vscode.workspace.workspaceFolders?.[0].uri.fsPath || process.cwd();
|
|
89
|
+
const errors = await LintService.runLint(workingDir);
|
|
90
|
+
if (errors.length === 0) {
|
|
91
|
+
return "Marie's systemic audit found no regressions. Stability is absolute. ✨";
|
|
92
|
+
}
|
|
93
|
+
let result = `# 🧬 Autonomous Recovery Protocol\n\nDetected **${errors.length}** stability alerts in the codebase.\n\n`;
|
|
94
|
+
const files = Array.from(new Set(errors.map(e => e.file)));
|
|
95
|
+
result += `**Impacted Files**:\n${files.map(f => `- ${f}`).join("\n")}\n\n`;
|
|
96
|
+
result += `**Trajectory**: Use 'resolve_lint_errors' to view the full triage report and begin surgical remediation.`;
|
|
97
|
+
return result;
|
|
98
|
+
},
|
|
99
|
+
});
|
|
100
|
+
registry.register({
|
|
101
|
+
name: "discard_file",
|
|
102
|
+
description: "Permanently delete a file. This is a ritual of letting go.",
|
|
103
|
+
isDestructive: true,
|
|
104
|
+
input_schema: {
|
|
105
|
+
type: "object",
|
|
106
|
+
properties: {
|
|
107
|
+
path: {
|
|
108
|
+
type: "string",
|
|
109
|
+
description: "The absolute path to the file to discard",
|
|
110
|
+
},
|
|
111
|
+
},
|
|
112
|
+
required: ["path"],
|
|
113
|
+
},
|
|
114
|
+
execute: async (args, onProgress, signal) => {
|
|
115
|
+
const p = getStringArg(args, "path");
|
|
116
|
+
const uri = vscode.Uri.file(p);
|
|
117
|
+
// 1. Safe Compost Check: Audit workspace for references to any symbols in this file
|
|
118
|
+
const symbols = await vscode.commands.executeCommand("vscode.executeDocumentSymbolProvider", uri);
|
|
119
|
+
if (symbols && symbols.length > 0) {
|
|
120
|
+
const blockers = [];
|
|
121
|
+
for (const sym of symbols) {
|
|
122
|
+
const refs = await vscode.commands.executeCommand("vscode.executeReferenceProvider", uri, sym.location.range.start);
|
|
123
|
+
const externalRefs = refs?.filter((r) => r.uri.fsPath !== p) || [];
|
|
124
|
+
if (externalRefs.length > 0) {
|
|
125
|
+
blockers.push(`Symbol \`${sym.name}\` is used in ${externalRefs.length} other file(s).`);
|
|
126
|
+
}
|
|
127
|
+
}
|
|
128
|
+
if (blockers.length > 0) {
|
|
129
|
+
return (`# 🍂 Refusal of Letting Go\n\n` +
|
|
130
|
+
`I cannot discard \`${path.basename(p)}\` because it still has active echos in the workspace:\n\n` +
|
|
131
|
+
blockers.map((b) => `- ${b}`).join("\n") +
|
|
132
|
+
`\n\n**Recommendation**: Use \`execute_semantic_move\` or \`replace_in_file\` to resolve these dependencies before composting.`);
|
|
133
|
+
}
|
|
134
|
+
}
|
|
135
|
+
await deleteFile(p);
|
|
136
|
+
await logGratitude(`Discarded '${p}'`);
|
|
137
|
+
return `File '${p}' has been discarded. ${getLettingGoMessage()}`;
|
|
138
|
+
},
|
|
139
|
+
});
|
|
140
|
+
registry.register({
|
|
141
|
+
name: "get_symbol_definition",
|
|
142
|
+
description: "Find the definition of a symbol at the specified position. Helps navigate complex codebases.",
|
|
143
|
+
input_schema: {
|
|
144
|
+
type: "object",
|
|
145
|
+
properties: {
|
|
146
|
+
path: { type: "string", description: "The absolute path to the file" },
|
|
147
|
+
line: { type: "number", description: "The line number (1-indexed)" },
|
|
148
|
+
character: {
|
|
149
|
+
type: "number",
|
|
150
|
+
description: "The character position (1-indexed)",
|
|
151
|
+
},
|
|
152
|
+
},
|
|
153
|
+
required: ["path", "line", "character"],
|
|
154
|
+
},
|
|
155
|
+
execute: async (args, onProgress, signal) => {
|
|
156
|
+
const p = getStringArg(args, "path");
|
|
157
|
+
const line = args.line - 1;
|
|
158
|
+
const char = args.character - 1;
|
|
159
|
+
const uri = vscode.Uri.file(p);
|
|
160
|
+
const pos = new vscode.Position(line, char);
|
|
161
|
+
return await SymbolService.getDefinitions(uri, pos);
|
|
162
|
+
},
|
|
163
|
+
});
|
|
164
|
+
registry.register({
|
|
165
|
+
name: "get_file_dependencies",
|
|
166
|
+
description: "Map the structural network of a file (imports/exports) to understand its dependencies and role.",
|
|
167
|
+
input_schema: {
|
|
168
|
+
type: "object",
|
|
169
|
+
properties: {
|
|
170
|
+
path: { type: "string", description: "The absolute path to the file" },
|
|
171
|
+
},
|
|
172
|
+
required: ["path"],
|
|
173
|
+
},
|
|
174
|
+
execute: async (args, onProgress, signal) => {
|
|
175
|
+
const p = getStringArg(args, "path");
|
|
176
|
+
return await DependencyService.getFileNetwork(p);
|
|
177
|
+
},
|
|
178
|
+
});
|
|
179
|
+
registry.register({
|
|
180
|
+
name: "pin_context",
|
|
181
|
+
description: "Anchor a critical piece of context (snippet, symbol, or file reference) for long-term strategic memory.",
|
|
182
|
+
input_schema: {
|
|
183
|
+
type: "object",
|
|
184
|
+
properties: {
|
|
185
|
+
id: {
|
|
186
|
+
type: "string",
|
|
187
|
+
description: "A unique identifier for this anchor (e.g., 'auth_flow')",
|
|
188
|
+
},
|
|
189
|
+
label: { type: "string", description: "A short, descriptive label" },
|
|
190
|
+
content: { type: "string", description: "The content to anchor" },
|
|
191
|
+
type: { type: "string", enum: ["snippet", "symbol", "file_ref"] },
|
|
192
|
+
},
|
|
193
|
+
required: ["id", "label", "content", "type"],
|
|
194
|
+
},
|
|
195
|
+
execute: async (args, onProgress, signal) => {
|
|
196
|
+
const service = ContextArchiveService.getInstance();
|
|
197
|
+
service.anchor({
|
|
198
|
+
id: getStringArg(args, "id"),
|
|
199
|
+
label: getStringArg(args, "label"),
|
|
200
|
+
content: getStringArg(args, "content"),
|
|
201
|
+
type: getStringArg(args, "type"),
|
|
202
|
+
});
|
|
203
|
+
return `Context anchored: ${getStringArg(args, "label")} (ID: ${getStringArg(args, "id")}). This information is now preserved for your strategic use. ✨`;
|
|
204
|
+
},
|
|
205
|
+
});
|
|
206
|
+
registry.register({
|
|
207
|
+
name: "get_file_diagnostics",
|
|
208
|
+
description: "Get real-time diagnostics (errors, warnings, lints) for a file from VS Code.",
|
|
209
|
+
input_schema: {
|
|
210
|
+
type: "object",
|
|
211
|
+
properties: {
|
|
212
|
+
path: { type: "string", description: "The absolute path to the file" },
|
|
213
|
+
},
|
|
214
|
+
required: ["path"],
|
|
215
|
+
},
|
|
216
|
+
execute: async (args, onProgress, signal) => {
|
|
217
|
+
const p = getStringArg(args, "path");
|
|
218
|
+
const uri = vscode.Uri.file(p);
|
|
219
|
+
const diags = vscode.languages.getDiagnostics(uri);
|
|
220
|
+
if (diags.length === 0)
|
|
221
|
+
return "No diagnostics found. The file appears clean. ✨";
|
|
222
|
+
let result = `# Diagnostics for ${path.basename(p)}\n\n`;
|
|
223
|
+
diags.forEach((d) => {
|
|
224
|
+
const severity = vscode.DiagnosticSeverity[d.severity];
|
|
225
|
+
result += `- [${severity}] L${d.range.start.line + 1}: ${d.message} (${d.code || "no-code"})\n`;
|
|
226
|
+
});
|
|
227
|
+
return result;
|
|
228
|
+
},
|
|
229
|
+
});
|
|
230
|
+
registry.register({
|
|
231
|
+
name: "get_file_history",
|
|
232
|
+
description: "Consult the 'human echos' of a file by reading its recent git history. Use this to understand context and conventions.",
|
|
233
|
+
input_schema: {
|
|
234
|
+
type: "object",
|
|
235
|
+
properties: {
|
|
236
|
+
path: { type: "string", description: "The absolute path to the file" },
|
|
237
|
+
},
|
|
238
|
+
required: ["path"],
|
|
239
|
+
},
|
|
240
|
+
execute: async (args, onProgress, signal) => {
|
|
241
|
+
const p = getStringArg(args, "path");
|
|
242
|
+
const mod = await import("../../plumbing/git/GitService.js");
|
|
243
|
+
return await mod.getFileHistory(process.cwd(), p);
|
|
244
|
+
},
|
|
245
|
+
});
|
|
246
|
+
registry.register({
|
|
247
|
+
name: "run_test_suite",
|
|
248
|
+
description: "Execute a test command and receive a structured 'Triage Report' identifying specific failures.",
|
|
249
|
+
isDestructive: true,
|
|
250
|
+
input_schema: {
|
|
251
|
+
type: "object",
|
|
252
|
+
properties: {
|
|
253
|
+
command: {
|
|
254
|
+
type: "string",
|
|
255
|
+
description: "The test command (e.g., 'npm test')",
|
|
256
|
+
},
|
|
257
|
+
},
|
|
258
|
+
required: ["command"],
|
|
259
|
+
},
|
|
260
|
+
execute: async (args, onProgress, signal) => {
|
|
261
|
+
const cmd = getStringArg(args, "command");
|
|
262
|
+
return await TestService.runAndTriage(cmd);
|
|
263
|
+
},
|
|
264
|
+
});
|
|
265
|
+
registry.register({
|
|
266
|
+
name: "get_code_complexity",
|
|
267
|
+
description: "Analyze a file for cyclomatic complexity and clutter level to identify refactoring needs.",
|
|
268
|
+
input_schema: {
|
|
269
|
+
type: "object",
|
|
270
|
+
properties: {
|
|
271
|
+
path: { type: "string", description: "The absolute path to the file" },
|
|
272
|
+
},
|
|
273
|
+
required: ["path"],
|
|
274
|
+
},
|
|
275
|
+
execute: async (args, onProgress, signal) => {
|
|
276
|
+
const p = getStringArg(args, "path");
|
|
277
|
+
const metrics = await ComplexityService.analyze(p);
|
|
278
|
+
let result = `# Complexity Scan: ${path.basename(p)}\n\n`;
|
|
279
|
+
result += `- **Clutter Level**: ${metrics.clutterLevel}\n`;
|
|
280
|
+
result += `- **Cyclomatic Complexity**: ${metrics.cyclomaticComplexity}\n`;
|
|
281
|
+
result += `- **Lines of Code**: ${metrics.loc}\n\n`;
|
|
282
|
+
if (metrics.suggestions.length > 0) {
|
|
283
|
+
result += `## 🛠️ Refinement Suggestions\n`;
|
|
284
|
+
metrics.suggestions.forEach((s) => (result += `- ${s}\n`));
|
|
285
|
+
}
|
|
286
|
+
return result;
|
|
287
|
+
},
|
|
288
|
+
});
|
|
289
|
+
registry.register({
|
|
290
|
+
name: "audit_architectural_integrity",
|
|
291
|
+
description: "Verify that code doesn't violate architectural boundaries (e.g., UI/Domain depending on Plumbing) across a file or directory.",
|
|
292
|
+
input_schema: {
|
|
293
|
+
type: "object",
|
|
294
|
+
properties: {
|
|
295
|
+
path: {
|
|
296
|
+
type: "string",
|
|
297
|
+
description: "The absolute path to the file or directory to audit",
|
|
298
|
+
},
|
|
299
|
+
recursive: {
|
|
300
|
+
type: "boolean",
|
|
301
|
+
description: "Whether to scan subdirectories (default: true)",
|
|
302
|
+
},
|
|
303
|
+
},
|
|
304
|
+
required: ["path"],
|
|
305
|
+
},
|
|
306
|
+
execute: async (args, onProgress, signal) => {
|
|
307
|
+
const p = getStringArg(args, "path");
|
|
308
|
+
const recursive = args.recursive !== false;
|
|
309
|
+
const stats = await fs.stat(p);
|
|
310
|
+
const files = [];
|
|
311
|
+
if (stats.isFile()) {
|
|
312
|
+
files.push(p);
|
|
313
|
+
}
|
|
314
|
+
else {
|
|
315
|
+
const entries = await fs.readdir(p, {
|
|
316
|
+
withFileTypes: true,
|
|
317
|
+
recursive,
|
|
318
|
+
});
|
|
319
|
+
files.push(...entries
|
|
320
|
+
.filter((e) => e.isFile() && /\.(ts|tsx|js|jsx)$/.test(e.name))
|
|
321
|
+
.map((e) => path.join(e.path || p, e.name)));
|
|
322
|
+
}
|
|
323
|
+
const violations = [];
|
|
324
|
+
for (const file of files) {
|
|
325
|
+
const health = await checkCodeHealth(file);
|
|
326
|
+
if (health.zoningHealth.isBackflowPresent) {
|
|
327
|
+
violations.push(`**${path.relative(process.cwd(), file)}**: ${health.zoningHealth.illegalImports.join(", ")}`);
|
|
328
|
+
}
|
|
329
|
+
}
|
|
330
|
+
if (violations.length === 0)
|
|
331
|
+
return `No architectural violations detected in ${files.length} file(s). The structure is pure. ✨`;
|
|
332
|
+
let result = `# 👮♀️ Architectural Integrity Report\n\n`;
|
|
333
|
+
result += `Audited **${files.length}** file(s). Found **${violations.length}** violation(s).\n\n`;
|
|
334
|
+
result += `## ⚠️ Zone Leakage (Backflow)\n`;
|
|
335
|
+
violations.forEach((v) => (result += `- ${v}\n`));
|
|
336
|
+
result += `\n**Methodology Note**: Respect the Downward Flow Law (Domain -> Infrastructure -> Plumbing). Use \`trace_data_flow\` to resolve these leaks.`;
|
|
337
|
+
return result;
|
|
338
|
+
},
|
|
339
|
+
});
|
|
340
|
+
registry.register({
|
|
341
|
+
name: "extract_component_api",
|
|
342
|
+
description: "Generate a structured markdown API summary for a component or class (functions, props, types).",
|
|
343
|
+
input_schema: {
|
|
344
|
+
type: "object",
|
|
345
|
+
properties: {
|
|
346
|
+
path: { type: "string", description: "The absolute path to the file" },
|
|
347
|
+
},
|
|
348
|
+
required: ["path"],
|
|
349
|
+
},
|
|
350
|
+
execute: async (args, onProgress, signal) => {
|
|
351
|
+
const p = getStringArg(args, "path");
|
|
352
|
+
const symbols = await vscode.commands.executeCommand("vscode.executeDocumentSymbolProvider", vscode.Uri.file(p));
|
|
353
|
+
if (!symbols || symbols.length === 0)
|
|
354
|
+
return "No symbols found to generate API summary.";
|
|
355
|
+
let result = `# ✍️ API Summary: ${path.basename(p)}\n\n`;
|
|
356
|
+
symbols.forEach((s) => {
|
|
357
|
+
const kind = vscode.SymbolKind[s.kind];
|
|
358
|
+
result += `### [${kind}] ${s.name}\n`;
|
|
359
|
+
if (s.children.length > 0) {
|
|
360
|
+
s.children.forEach((c) => {
|
|
361
|
+
const cKind = vscode.SymbolKind[c.kind];
|
|
362
|
+
result += `- **${c.name}** (${cKind})\n`;
|
|
363
|
+
});
|
|
364
|
+
}
|
|
365
|
+
result += "\n";
|
|
366
|
+
});
|
|
367
|
+
return result;
|
|
368
|
+
},
|
|
369
|
+
});
|
|
370
|
+
registry.register({
|
|
371
|
+
name: "get_folder_structure",
|
|
372
|
+
description: "Get a recursive, tree-like overview of a folder's structure.",
|
|
373
|
+
input_schema: {
|
|
374
|
+
type: "object",
|
|
375
|
+
properties: {
|
|
376
|
+
path: {
|
|
377
|
+
type: "string",
|
|
378
|
+
description: "The absolute path to the folder",
|
|
379
|
+
},
|
|
380
|
+
depth: {
|
|
381
|
+
type: "number",
|
|
382
|
+
description: "Maximum recursion depth (default: 3)",
|
|
383
|
+
},
|
|
384
|
+
},
|
|
385
|
+
required: ["path"],
|
|
386
|
+
},
|
|
387
|
+
execute: async (args, onProgress, signal) => {
|
|
388
|
+
const p = getStringArg(args, "path");
|
|
389
|
+
const depth = args.depth;
|
|
390
|
+
return await DiscoveryService.getFolderTree(p, depth);
|
|
391
|
+
},
|
|
392
|
+
});
|
|
393
|
+
registry.register({
|
|
394
|
+
name: "find_symbol_references",
|
|
395
|
+
description: "Find all references/usages of a symbol across the workspace.",
|
|
396
|
+
input_schema: {
|
|
397
|
+
type: "object",
|
|
398
|
+
properties: {
|
|
399
|
+
path: {
|
|
400
|
+
type: "string",
|
|
401
|
+
description: "The absolute path to the file containing the symbol",
|
|
402
|
+
},
|
|
403
|
+
line: { type: "number", description: "The line number (1-indexed)" },
|
|
404
|
+
character: {
|
|
405
|
+
type: "number",
|
|
406
|
+
description: "The character position (1-indexed)",
|
|
407
|
+
},
|
|
408
|
+
},
|
|
409
|
+
required: ["path", "line", "character"],
|
|
410
|
+
},
|
|
411
|
+
execute: async (args, onProgress, signal) => {
|
|
412
|
+
const p = getStringArg(args, "path");
|
|
413
|
+
const line = args.line - 1;
|
|
414
|
+
const char = args.character - 1;
|
|
415
|
+
const uri = vscode.Uri.file(p);
|
|
416
|
+
const pos = new vscode.Position(line, char);
|
|
417
|
+
const refs = await vscode.commands.executeCommand("vscode.executeReferenceProvider", uri, pos);
|
|
418
|
+
if (!refs || refs.length === 0)
|
|
419
|
+
return "No references found.";
|
|
420
|
+
let result = `Found ${refs.length} reference(s):\n`;
|
|
421
|
+
refs.slice(0, 50).forEach((r) => {
|
|
422
|
+
result += `- ${r.uri.fsPath} [L${r.range.start.line + 1}:C${r.range.start.character + 1}]\n`;
|
|
423
|
+
});
|
|
424
|
+
if (refs.length > 50)
|
|
425
|
+
result += `\n... AND ${refs.length - 50} MORE.`;
|
|
426
|
+
return result;
|
|
427
|
+
},
|
|
428
|
+
});
|
|
429
|
+
registry.register({
|
|
430
|
+
name: "get_workspace_joy_map",
|
|
431
|
+
description: "Generate a project-wide health report. Use this to identify 'Clutter Hotspots' and prioritize refactoring efforts.",
|
|
432
|
+
input_schema: { type: "object", properties: {} },
|
|
433
|
+
execute: async () => {
|
|
434
|
+
const root = vscode.workspace.workspaceFolders?.[0]?.uri.fsPath;
|
|
435
|
+
if (!root)
|
|
436
|
+
return "No workspace folder found.";
|
|
437
|
+
const joyMap = await JoyMapService.generate(root);
|
|
438
|
+
const run = automationService.getCurrentRun();
|
|
439
|
+
let result = `# 🌟 Workspace Joy Map\n\n`;
|
|
440
|
+
result += `**Overall Joy Score**: ${joyMap.overallJoyScore}/100\n`;
|
|
441
|
+
if (run) {
|
|
442
|
+
const resilience = run.heuristicFixes && run.heuristicFixes.length > 3
|
|
443
|
+
? "Fragile ⚠️"
|
|
444
|
+
: "Resilient ✨";
|
|
445
|
+
result += `**Agent Resilience**: ${resilience}\n`;
|
|
446
|
+
}
|
|
447
|
+
result += `**Files Scanned**: ${joyMap.totalFilesScanned}\n\n`;
|
|
448
|
+
result += `> ${joyMap.summary}\n\n`;
|
|
449
|
+
if (joyMap.hotspots.length > 0) {
|
|
450
|
+
result += `## ⚠️ Clutter Hotspots\n`;
|
|
451
|
+
joyMap.hotspots.forEach((h) => {
|
|
452
|
+
result += `- \`${h.path}\`: ${h.joyScore} (${h.clutterLevel})\n`;
|
|
453
|
+
});
|
|
454
|
+
}
|
|
455
|
+
return result;
|
|
456
|
+
},
|
|
457
|
+
});
|
|
458
|
+
registry.register({
|
|
459
|
+
name: "check_ripple_health",
|
|
460
|
+
description: "Verify the health of all workspace files that depend on a specific path. Use this to catch regressions in downstream consumers.",
|
|
461
|
+
input_schema: {
|
|
462
|
+
type: "object",
|
|
463
|
+
properties: {
|
|
464
|
+
path: {
|
|
465
|
+
type: "string",
|
|
466
|
+
description: "The absolute path to the modified file",
|
|
467
|
+
},
|
|
468
|
+
},
|
|
469
|
+
required: ["path"],
|
|
470
|
+
},
|
|
471
|
+
execute: async (args, onProgress, signal) => {
|
|
472
|
+
const p = getStringArg(args, "path");
|
|
473
|
+
try {
|
|
474
|
+
// 1. Get all exported symbols in the file
|
|
475
|
+
const symbols = await vscode.commands.executeCommand("vscode.executeDocumentSymbolProvider", vscode.Uri.file(p));
|
|
476
|
+
if (!symbols || symbols.length === 0)
|
|
477
|
+
return "No symbols found to check ripples for.";
|
|
478
|
+
// 2. Find all unique external reference files
|
|
479
|
+
const uniqueFiles = new Set();
|
|
480
|
+
for (const sym of symbols) {
|
|
481
|
+
const refs = await vscode.commands.executeCommand("vscode.executeReferenceProvider", vscode.Uri.file(p), sym.location.range.start);
|
|
482
|
+
refs?.forEach((r) => {
|
|
483
|
+
if (r.uri.fsPath !== p)
|
|
484
|
+
uniqueFiles.add(r.uri.fsPath);
|
|
485
|
+
});
|
|
486
|
+
}
|
|
487
|
+
if (uniqueFiles.size === 0)
|
|
488
|
+
return "No external downstream dependents found for any symbol in this file. ✨";
|
|
489
|
+
let result = `# 🌊 Comprehensive Ripple Health: ${path.basename(p)}\n\n`;
|
|
490
|
+
result += `Checking **${uniqueFiles.size}** dependent(s) across all symbols...\n\n`;
|
|
491
|
+
const fileList = Array.from(uniqueFiles);
|
|
492
|
+
for (const file of fileList) {
|
|
493
|
+
const diags = vscode.languages.getDiagnostics(vscode.Uri.file(file));
|
|
494
|
+
const errors = diags.filter((d) => d.severity === vscode.DiagnosticSeverity.Error);
|
|
495
|
+
result += `- \`${path.relative(process.cwd(), file)}\`: ${errors.length > 0 ? `❌ ${errors.length} Error(s)` : `✅ Clean`}\n`;
|
|
496
|
+
}
|
|
497
|
+
return result;
|
|
498
|
+
}
|
|
499
|
+
catch (error) {
|
|
500
|
+
return `Ripple check failed: ${error}`;
|
|
501
|
+
}
|
|
502
|
+
},
|
|
503
|
+
});
|
|
504
|
+
registry.register({
|
|
505
|
+
name: "generate_evolution_chronicle",
|
|
506
|
+
description: "Synthesize the 'Story' of a directory or file's evolution using git history and recent diffs.",
|
|
507
|
+
input_schema: {
|
|
508
|
+
type: "object",
|
|
509
|
+
properties: {
|
|
510
|
+
path: {
|
|
511
|
+
type: "string",
|
|
512
|
+
description: "The absolute path to the directory or file",
|
|
513
|
+
},
|
|
514
|
+
},
|
|
515
|
+
required: ["path"],
|
|
516
|
+
},
|
|
517
|
+
execute: async (args, onProgress, signal) => {
|
|
518
|
+
const p = getStringArg(args, "path");
|
|
519
|
+
const mod = await import("../../plumbing/git/GitService.js");
|
|
520
|
+
const history = await mod.getFileHistory(process.cwd(), p);
|
|
521
|
+
let result = `# 📜 Evolution Chronicle: ${path.basename(p)}\n\n`;
|
|
522
|
+
result += `## Recent Trajectory\n`;
|
|
523
|
+
result += history + "\n\n";
|
|
524
|
+
result += `## Intent Synthesis\n`;
|
|
525
|
+
result += `Based on the echos above, this area is transitioning through the Living Project lifecycle. Ensure the next 'Pass' aligns with this momentum. ✨`;
|
|
526
|
+
return result;
|
|
527
|
+
},
|
|
528
|
+
});
|
|
529
|
+
registry.register({
|
|
530
|
+
name: "predict_refactor_ripple",
|
|
531
|
+
description: "Simulate a structural change (e.g., changing a signature) and predict which downstream files will break.",
|
|
532
|
+
input_schema: {
|
|
533
|
+
type: "object",
|
|
534
|
+
properties: {
|
|
535
|
+
path: { type: "string", description: "The absolute path to the file" },
|
|
536
|
+
symbol: { type: "string", description: "The symbol being modified" },
|
|
537
|
+
changeDescription: {
|
|
538
|
+
type: "string",
|
|
539
|
+
description: "Description of the change (e.g., 'Removing optional param X')",
|
|
540
|
+
},
|
|
541
|
+
},
|
|
542
|
+
required: ["path", "symbol", "changeDescription"],
|
|
543
|
+
},
|
|
544
|
+
execute: async (args, onProgress, signal) => {
|
|
545
|
+
const p = getStringArg(args, "path");
|
|
546
|
+
const sym = getStringArg(args, "symbol");
|
|
547
|
+
const uri = vscode.Uri.file(p);
|
|
548
|
+
// 1. Find the symbol's actual position
|
|
549
|
+
const symbols = await vscode.commands.executeCommand("vscode.executeDocumentSymbolProvider", uri);
|
|
550
|
+
const target = symbols?.find((s) => s.name === sym);
|
|
551
|
+
const pos = target?.location.range.start || new vscode.Position(0, 0);
|
|
552
|
+
// 2. Find references to that specific point
|
|
553
|
+
const locations = await vscode.commands.executeCommand("vscode.executeReferenceProvider", uri, pos);
|
|
554
|
+
if (!locations || locations.length === 0)
|
|
555
|
+
return `No ripple effect detected for \`${sym}\`. The change is isolated. ✨`;
|
|
556
|
+
const uniqueFiles = Array.from(new Set(locations.map((l) => l.uri.fsPath))).filter((f) => f !== p);
|
|
557
|
+
let result = `# 🔮 Refactor Ripple Prediction: \`${sym}\`\n\n`;
|
|
558
|
+
result += `Proposed Change: ${getStringArg(args, "changeDescription")}\n\n`;
|
|
559
|
+
if (uniqueFiles.length > 0) {
|
|
560
|
+
result += `## ⚠️ Potentially Broken Dependents (${uniqueFiles.length})\n`;
|
|
561
|
+
for (const f of uniqueFiles) {
|
|
562
|
+
const relativePath = path.relative(process.cwd(), f);
|
|
563
|
+
const fileLocations = locations.filter((l) => l.uri.fsPath === f);
|
|
564
|
+
result += `- \`${relativePath}\`: Found ${fileLocations.length} reference(s)\n`;
|
|
565
|
+
// Added: Sample context from the first reference in each file
|
|
566
|
+
try {
|
|
567
|
+
const content = await fs.readFile(f, "utf-8");
|
|
568
|
+
const lines = content.split("\n");
|
|
569
|
+
const sampleLine = lines[fileLocations[0].range.start.line].trim();
|
|
570
|
+
result += ` > \`${sampleLine}\` (L${fileLocations[0].range.start.line + 1})\n`;
|
|
571
|
+
}
|
|
572
|
+
catch (e) {
|
|
573
|
+
/* ignore read errors */
|
|
574
|
+
}
|
|
575
|
+
}
|
|
576
|
+
result += `\nRecommendation: Use \`check_ripple_health\` after applying the change to verify these files.`;
|
|
577
|
+
}
|
|
578
|
+
else {
|
|
579
|
+
result += `No external dependents found. The change is safe to apply within this file.`;
|
|
580
|
+
}
|
|
581
|
+
return result;
|
|
582
|
+
},
|
|
583
|
+
});
|
|
584
|
+
registry.register({
|
|
585
|
+
name: "generate_migration_plan",
|
|
586
|
+
description: "Generate a step-by-step 'Blueprint' for migrating code from an old pattern to a new one.",
|
|
587
|
+
input_schema: {
|
|
588
|
+
type: "object",
|
|
589
|
+
properties: {
|
|
590
|
+
fromPattern: {
|
|
591
|
+
type: "string",
|
|
592
|
+
description: "The pattern being replaced",
|
|
593
|
+
},
|
|
594
|
+
toPattern: { type: "string", description: "The new target pattern" },
|
|
595
|
+
files: {
|
|
596
|
+
type: "array",
|
|
597
|
+
items: { type: "string" },
|
|
598
|
+
description: "List of files to migrate",
|
|
599
|
+
},
|
|
600
|
+
},
|
|
601
|
+
required: ["fromPattern", "toPattern", "files"],
|
|
602
|
+
},
|
|
603
|
+
execute: async (args, onProgress, signal) => {
|
|
604
|
+
const files = args.files;
|
|
605
|
+
let result = `# 📐 Migration Blueprint\n\n`;
|
|
606
|
+
result += `**Trajectory**: From \`${args.fromPattern}\` to \`${args.toPattern}\`\n\n`;
|
|
607
|
+
result += `## 🗺️ Execution Steps\n`;
|
|
608
|
+
result += `1. **Analysis**: Map all instances of old pattern in ${files.length} files.\n`;
|
|
609
|
+
result += `2. **Transformation**: Apply structural replacements using \`replace_in_file\`.\n`;
|
|
610
|
+
result += `3. **Verification**: Run \`check_ripple_health\` and \`run_test_suite\`.\n\n`;
|
|
611
|
+
result += `## 🗂️ Target Garden\n`;
|
|
612
|
+
files
|
|
613
|
+
.slice(0, 10)
|
|
614
|
+
.forEach((f) => (result += `- \`${f.split("/").pop()}\`\n`));
|
|
615
|
+
if (files.length > 10)
|
|
616
|
+
result += `- ... and ${files.length - 10} more.\n`;
|
|
617
|
+
return result;
|
|
618
|
+
},
|
|
619
|
+
});
|
|
620
|
+
registry.register({
|
|
621
|
+
name: "analyze_agent_telemetry",
|
|
622
|
+
description: "Analyze your own execution telemetry (errors, heuristic fixes, retries) to self-calibrate your strategy.",
|
|
623
|
+
input_schema: { type: "object", properties: {} },
|
|
624
|
+
execute: async () => {
|
|
625
|
+
const run = automationService.getCurrentRun(); // Hypothetical access to current run
|
|
626
|
+
if (!run)
|
|
627
|
+
return "No active telemetry found for this session.";
|
|
628
|
+
let result = `# 🧪 Strategic Telemetry Analysis\n\n`;
|
|
629
|
+
result += `- **Total Elapsed**: ${Math.round((Date.now() - run.startedAt) / 1000)}s\n`;
|
|
630
|
+
result += `- **Heuristic Fixes**: ${run.heuristicFixes?.length || 0}\n`;
|
|
631
|
+
result += `- **Objective Latency**: High (Refining Strategy...)\n\n`;
|
|
632
|
+
if (run.heuristicFixes && run.heuristicFixes.length > 3) {
|
|
633
|
+
result += `## ⚠️ Resilience Warning\n`;
|
|
634
|
+
result += `Multiple heuristic repairs detected. Consider shifting to a more **Empirical Grounding** phase.`;
|
|
635
|
+
}
|
|
636
|
+
else {
|
|
637
|
+
result += `Execution is healthy and resilient. ✨`;
|
|
638
|
+
}
|
|
639
|
+
return result;
|
|
640
|
+
},
|
|
641
|
+
});
|
|
642
|
+
registry.register({
|
|
643
|
+
name: "execute_semantic_rename",
|
|
644
|
+
description: "Perform a workspace-wide, compiler-safe rename of a symbol using the LSP.",
|
|
645
|
+
isDestructive: true,
|
|
646
|
+
input_schema: {
|
|
647
|
+
type: "object",
|
|
648
|
+
properties: {
|
|
649
|
+
path: {
|
|
650
|
+
type: "string",
|
|
651
|
+
description: "The absolute path to the file containing the symbol",
|
|
652
|
+
},
|
|
653
|
+
line: {
|
|
654
|
+
type: "integer",
|
|
655
|
+
description: "The line number of the symbol (1-indexed)",
|
|
656
|
+
},
|
|
657
|
+
column: {
|
|
658
|
+
type: "integer",
|
|
659
|
+
description: "The column number of the symbol (1-indexed)",
|
|
660
|
+
},
|
|
661
|
+
newName: { type: "string", description: "The new name for the symbol" },
|
|
662
|
+
},
|
|
663
|
+
required: ["path", "line", "column", "newName"],
|
|
664
|
+
},
|
|
665
|
+
execute: async (args, onProgress, signal) => {
|
|
666
|
+
const p = getStringArg(args, "path");
|
|
667
|
+
const line = args.line;
|
|
668
|
+
const col = args.column;
|
|
669
|
+
const newName = getStringArg(args, "newName");
|
|
670
|
+
const uri = vscode.Uri.file(p);
|
|
671
|
+
const pos = new vscode.Position(line - 1, col - 1);
|
|
672
|
+
try {
|
|
673
|
+
const edit = await vscode.commands.executeCommand("vscode.executeRenameProvider", uri, pos, newName);
|
|
674
|
+
if (!edit)
|
|
675
|
+
return `LSP could not find a renameable symbol at ${p}:${line}:${col}.`;
|
|
676
|
+
const success = await vscode.workspace.applyEdit(edit);
|
|
677
|
+
if (!success)
|
|
678
|
+
return "Failed to apply workspace-wide semantic rename.";
|
|
679
|
+
return `Successfully renamed symbol to \`${newName}\` across the entire workspace. ✨`;
|
|
680
|
+
}
|
|
681
|
+
catch (error) {
|
|
682
|
+
return `Semantic rename failed: ${error}`;
|
|
683
|
+
}
|
|
684
|
+
},
|
|
685
|
+
});
|
|
686
|
+
registry.register({
|
|
687
|
+
name: "simulate_semantic_edit",
|
|
688
|
+
description: "Simulate a change in a 'Shadow Buffer' to catch lints/errors without affecting the disk.",
|
|
689
|
+
input_schema: {
|
|
690
|
+
type: "object",
|
|
691
|
+
properties: {
|
|
692
|
+
path: { type: "string", description: "The absolute path to the file" },
|
|
693
|
+
targetContent: {
|
|
694
|
+
type: "string",
|
|
695
|
+
description: "Exact content to replace",
|
|
696
|
+
},
|
|
697
|
+
replacementContent: { type: "string", description: "New content" },
|
|
698
|
+
},
|
|
699
|
+
required: ["path", "targetContent", "replacementContent"],
|
|
700
|
+
},
|
|
701
|
+
execute: async (args, onProgress, signal) => {
|
|
702
|
+
const p = getStringArg(args, "path");
|
|
703
|
+
const target = getStringArg(args, "targetContent");
|
|
704
|
+
const replacement = getStringArg(args, "replacementContent");
|
|
705
|
+
try {
|
|
706
|
+
const content = await fs.readFile(p, "utf-8");
|
|
707
|
+
if (!content.includes(target))
|
|
708
|
+
return `Error: Could not find target content in ${p} for simulation.`;
|
|
709
|
+
const simulatedContent = content.replace(target, replacement);
|
|
710
|
+
// Shadow Simulation: Create a hidden sibling file
|
|
711
|
+
const simPath = p.replace(/(\.[^.]+)$/, ".marie_sim$1");
|
|
712
|
+
await fs.writeFile(simPath, simulatedContent);
|
|
713
|
+
// Wait for VS Code to process the new file
|
|
714
|
+
await new Promise((r) => setTimeout(r, 1000));
|
|
715
|
+
const diagnostics = vscode.languages.getDiagnostics(vscode.Uri.file(simPath));
|
|
716
|
+
const localErrors = diagnostics.filter((d) => d.severity === vscode.DiagnosticSeverity.Error);
|
|
717
|
+
let result = "";
|
|
718
|
+
if (localErrors.length > 0) {
|
|
719
|
+
result += `# 🌑 Shadow Realm: Local Regressions\n`;
|
|
720
|
+
localErrors.forEach((e) => (result += `- [Line ${e.range.start.line + 1}] ${e.message}\n`));
|
|
721
|
+
}
|
|
722
|
+
// Multi-file ripple check (the "Oracle's Sight")
|
|
723
|
+
// Check if any other files in the workspace now have errors due to this "Shadow" file
|
|
724
|
+
// (VS Code might not show errors for other files until they are opened,
|
|
725
|
+
// but we can report on the ones it DOES detect)
|
|
726
|
+
const allDiagnostics = vscode.languages.getDiagnostics();
|
|
727
|
+
const externalErrors = allDiagnostics.filter(([uri, diags]) => {
|
|
728
|
+
return (uri.fsPath !== simPath &&
|
|
729
|
+
diags.some((d) => d.severity === vscode.DiagnosticSeverity.Error));
|
|
730
|
+
});
|
|
731
|
+
if (externalErrors.length > 0) {
|
|
732
|
+
result += `\n## ⚠️ Ripple Regressions Detected\n`;
|
|
733
|
+
externalErrors.slice(0, 5).forEach(([uri, diags]) => {
|
|
734
|
+
const file = uri.fsPath.split("/").pop();
|
|
735
|
+
result += `- **${file}**: ${diags[0].message}\n`;
|
|
736
|
+
});
|
|
737
|
+
}
|
|
738
|
+
await fs.unlink(simPath); // Cleanup
|
|
739
|
+
if (result)
|
|
740
|
+
return result;
|
|
741
|
+
return `Simulation successful. Change is syntactically sound and produces no ripple regressions. ✨`;
|
|
742
|
+
}
|
|
743
|
+
catch (error) {
|
|
744
|
+
return `Shadow simulation failed: ${error}`;
|
|
745
|
+
}
|
|
746
|
+
},
|
|
747
|
+
});
|
|
748
|
+
registry.register({
|
|
749
|
+
name: "execute_semantic_move",
|
|
750
|
+
description: "Move a symbol (function/class) to a new file and attempt to update imports throughout the workspace.",
|
|
751
|
+
isDestructive: true,
|
|
752
|
+
input_schema: {
|
|
753
|
+
type: "object",
|
|
754
|
+
properties: {
|
|
755
|
+
sourcePath: {
|
|
756
|
+
type: "string",
|
|
757
|
+
description: "Absolute path to source file",
|
|
758
|
+
},
|
|
759
|
+
targetPath: {
|
|
760
|
+
type: "string",
|
|
761
|
+
description: "Absolute path to target file",
|
|
762
|
+
},
|
|
763
|
+
symbolName: {
|
|
764
|
+
type: "string",
|
|
765
|
+
description: "Name of the symbol to move",
|
|
766
|
+
},
|
|
767
|
+
},
|
|
768
|
+
required: ["sourcePath", "targetPath", "symbolName"],
|
|
769
|
+
},
|
|
770
|
+
execute: async (args, onProgress, signal) => {
|
|
771
|
+
const src = getStringArg(args, "sourcePath");
|
|
772
|
+
const dest = getStringArg(args, "targetPath");
|
|
773
|
+
const sym = getStringArg(args, "symbolName");
|
|
774
|
+
try {
|
|
775
|
+
// 1. Locate symbol
|
|
776
|
+
const symbols = await withTimeout(vscode.commands.executeCommand("vscode.executeDocumentSymbolProvider", vscode.Uri.file(src)), 5000, "LSP Document Symbol Provider");
|
|
777
|
+
const targetSym = symbols?.find((s) => s.name === sym);
|
|
778
|
+
if (!targetSym)
|
|
779
|
+
return `Could not find symbol \`${sym}\` in source file.`;
|
|
780
|
+
const content = await fs.readFile(src, "utf-8");
|
|
781
|
+
const lines = content.split("\n");
|
|
782
|
+
const symContent = lines
|
|
783
|
+
.slice(targetSym.location.range.start.line, targetSym.location.range.end.line + 1)
|
|
784
|
+
.join("\n");
|
|
785
|
+
// 2. Identify required imports (Surgical Heuristic)
|
|
786
|
+
const importLines = lines.filter((l) => l.trim().startsWith("import "));
|
|
787
|
+
const neededImports = importLines.filter((l) => {
|
|
788
|
+
// Very basic check: does the symbol content use any identifier mentioned in this import?
|
|
789
|
+
const match = l.match(/\{([^}]+)\}/);
|
|
790
|
+
if (match) {
|
|
791
|
+
const idents = match[1].split(",").map((i) => i.trim());
|
|
792
|
+
return idents.some((i) => symContent.includes(i));
|
|
793
|
+
}
|
|
794
|
+
return false;
|
|
795
|
+
});
|
|
796
|
+
// 3. Update source
|
|
797
|
+
const newSrcContent = [
|
|
798
|
+
...lines.slice(0, targetSym.location.range.start.line),
|
|
799
|
+
...lines.slice(targetSym.location.range.end.line + 1),
|
|
800
|
+
].join("\n");
|
|
801
|
+
await writeFile(src, newSrcContent, signal);
|
|
802
|
+
// 4. Update destination
|
|
803
|
+
let destContent = "";
|
|
804
|
+
try {
|
|
805
|
+
destContent = await fs.readFile(dest, "utf-8");
|
|
806
|
+
}
|
|
807
|
+
catch (e) {
|
|
808
|
+
destContent = `// Created for moved symbol ${sym}\n`;
|
|
809
|
+
}
|
|
810
|
+
const finalDestContent = neededImports.join("\n") + "\n\n" + destContent + "\n\n" + symContent;
|
|
811
|
+
await writeFile(dest, finalDestContent, signal);
|
|
812
|
+
// Mirror Polishing I: Automated Downstream Import Migration
|
|
813
|
+
const refs = await withTimeout(vscode.commands.executeCommand("vscode.executeReferenceProvider", vscode.Uri.file(src), targetSym.location.range.start), 5000, "LSP Reference Provider");
|
|
814
|
+
const externalRefs = refs.filter((r) => r.uri.fsPath !== src && r.uri.fsPath !== dest);
|
|
815
|
+
const updatedFiles = [];
|
|
816
|
+
if (externalRefs.length > 0) {
|
|
817
|
+
const workspaceEdit = new vscode.WorkspaceEdit();
|
|
818
|
+
const refFiles = Array.from(new Set(externalRefs.map((r) => r.uri.fsPath)));
|
|
819
|
+
for (const refFile of refFiles) {
|
|
820
|
+
const refContent = await fs.readFile(refFile, "utf-8");
|
|
821
|
+
const srcRel = path
|
|
822
|
+
.relative(path.dirname(refFile), src)
|
|
823
|
+
.replace(/\.[^.]+$/, "")
|
|
824
|
+
.replace(/\\/g, "/");
|
|
825
|
+
const destRel = path
|
|
826
|
+
.relative(path.dirname(refFile), dest)
|
|
827
|
+
.replace(/\.[^.]+$/, "")
|
|
828
|
+
.replace(/\\/g, "/");
|
|
829
|
+
// Heuristic replacement of the import path
|
|
830
|
+
const oldImportPath = srcRel.startsWith(".")
|
|
831
|
+
? srcRel
|
|
832
|
+
: `./${srcRel}`;
|
|
833
|
+
const newImportPath = destRel.startsWith(".")
|
|
834
|
+
? destRel
|
|
835
|
+
: `./${destRel}`;
|
|
836
|
+
if (refContent.includes(oldImportPath)) {
|
|
837
|
+
const newRefContent = refContent.replace(new RegExp(`from\\s+['"]${oldImportPath}['"]`, "g"), `from '${newImportPath}'`);
|
|
838
|
+
if (newRefContent !== refContent) {
|
|
839
|
+
await fs.writeFile(refFile, newRefContent);
|
|
840
|
+
updatedFiles.push(path.basename(refFile));
|
|
841
|
+
}
|
|
842
|
+
}
|
|
843
|
+
}
|
|
844
|
+
}
|
|
845
|
+
// Mirror Polishing II: Post-Execution Tidying (Ascension Autonomy)
|
|
846
|
+
await foldCode(src);
|
|
847
|
+
await foldCode(dest);
|
|
848
|
+
return (`Successfully moved \`${sym}\` to \`${dest.split("/").pop()}\`. 🗡️\n\n` +
|
|
849
|
+
`**Methodology Complete**: Required imports were identified and copied to the destination.\n` +
|
|
850
|
+
(updatedFiles.length > 0
|
|
851
|
+
? `**Downstream Migration**: Automatically updated imports in ${updatedFiles.length} file(s): ${updatedFiles.join(", ")}. ✨`
|
|
852
|
+
: `**Mirror Polishing**: No external downstream imports required migration.`) +
|
|
853
|
+
`\n\n**Autonomy Note**: Post-execution tidying completed for both source and destination. 🌸`);
|
|
854
|
+
}
|
|
855
|
+
catch (error) {
|
|
856
|
+
// Autonomous Self-Healing Trigger
|
|
857
|
+
return await automationService.executeSelfHealing(src, error.message);
|
|
858
|
+
}
|
|
859
|
+
},
|
|
860
|
+
});
|
|
861
|
+
registry.register({
|
|
862
|
+
name: "trace_data_flow",
|
|
863
|
+
description: "Analyze how a specific type or interface is used across multiple layers.",
|
|
864
|
+
input_schema: {
|
|
865
|
+
type: "object",
|
|
866
|
+
properties: {
|
|
867
|
+
typeName: {
|
|
868
|
+
type: "string",
|
|
869
|
+
description: "The name of the type/interface to trace",
|
|
870
|
+
},
|
|
871
|
+
filePath: {
|
|
872
|
+
type: "string",
|
|
873
|
+
description: "Path to the file where the type is defined",
|
|
874
|
+
},
|
|
875
|
+
},
|
|
876
|
+
required: ["typeName", "filePath"],
|
|
877
|
+
},
|
|
878
|
+
execute: async (args, onProgress, signal) => {
|
|
879
|
+
const type = getStringArg(args, "typeName");
|
|
880
|
+
const p = getStringArg(args, "filePath");
|
|
881
|
+
const uri = vscode.Uri.file(p);
|
|
882
|
+
// 1. Find the symbol's actual position
|
|
883
|
+
const symbols = await vscode.commands.executeCommand("vscode.executeDocumentSymbolProvider", uri);
|
|
884
|
+
const target = symbols?.find((s) => s.name === type);
|
|
885
|
+
const pos = target?.location.range.start || new vscode.Position(0, 0);
|
|
886
|
+
// 2. Use reference provider to see where this type is used
|
|
887
|
+
const refs = await vscode.commands.executeCommand("vscode.executeReferenceProvider", uri, pos);
|
|
888
|
+
if (!refs || refs.length === 0)
|
|
889
|
+
return `Type \`${type}\` appears to be isolated. ✨`;
|
|
890
|
+
const uniqueFiles = Array.from(new Set(refs.map((r) => r.uri.fsPath)));
|
|
891
|
+
let result = `# 🔮 Data Flow Trace: \`${type}\`\n\n`;
|
|
892
|
+
result += `Found usages in **${uniqueFiles.length}** files.\n\n`;
|
|
893
|
+
const layers = uniqueFiles.map((f) => {
|
|
894
|
+
if (f.includes("/infrastructure/"))
|
|
895
|
+
return "Infrastructure 🏛️";
|
|
896
|
+
if (f.includes("/domain/"))
|
|
897
|
+
return "Domain 🧠";
|
|
898
|
+
if (f.includes("/plumbing/"))
|
|
899
|
+
return "Plumbing 🔧";
|
|
900
|
+
return "UI/External 🎨";
|
|
901
|
+
});
|
|
902
|
+
const layerCounts = layers.reduce((acc, l) => {
|
|
903
|
+
acc[l] = (acc[l] || 0) + 1;
|
|
904
|
+
return acc;
|
|
905
|
+
}, {});
|
|
906
|
+
result += `## Layer Distribution\n`;
|
|
907
|
+
Object.entries(layerCounts).forEach(([layer, count]) => {
|
|
908
|
+
result += `- **${layer}**: ${count} files\n`;
|
|
909
|
+
});
|
|
910
|
+
if (layerCounts["Domain 🧠"] && layerCounts["Infrastructure 🏛️"]) {
|
|
911
|
+
result += `\n> [!NOTE]\n> This type crosses the Domain/Infrastructure boundary. Ensure strict mapping is in place to maintain Joyful purity. ✨`;
|
|
912
|
+
}
|
|
913
|
+
return result;
|
|
914
|
+
},
|
|
915
|
+
});
|
|
916
|
+
registry.register({
|
|
917
|
+
name: "generate_architectural_decision",
|
|
918
|
+
description: "Generate a structured ADR (Architectural Decision Record) to document a major shift in the project.",
|
|
919
|
+
isDestructive: true,
|
|
920
|
+
input_schema: {
|
|
921
|
+
type: "object",
|
|
922
|
+
properties: {
|
|
923
|
+
title: { type: "string", description: "Concise title of the decision" },
|
|
924
|
+
context: {
|
|
925
|
+
type: "string",
|
|
926
|
+
description: "The problem or context driving this change",
|
|
927
|
+
},
|
|
928
|
+
decision: {
|
|
929
|
+
type: "string",
|
|
930
|
+
description: "The chosen solution and rationale",
|
|
931
|
+
},
|
|
932
|
+
consequences: {
|
|
933
|
+
type: "string",
|
|
934
|
+
description: "Impact on the system moving forward",
|
|
935
|
+
},
|
|
936
|
+
},
|
|
937
|
+
required: ["title", "context", "decision", "consequences"],
|
|
938
|
+
},
|
|
939
|
+
execute: async (args, onProgress, signal) => {
|
|
940
|
+
const root = vscode.workspace.workspaceFolders?.[0].uri.fsPath;
|
|
941
|
+
if (!root)
|
|
942
|
+
return "No workspace detected.";
|
|
943
|
+
const id = `ADR-${Math.round(Date.now() / 1000)}`;
|
|
944
|
+
const adrDir = path.join(root, ".marie", "decisions");
|
|
945
|
+
const adrPath = path.join(adrDir, `${id}.md`);
|
|
946
|
+
const title = getStringArg(args, "title");
|
|
947
|
+
const historyContext = title.includes("Refactor")
|
|
948
|
+
? `\n## Historical Context\n> This decision aligns with the trajectory identified in the Evolution Chronicle. ✨\n`
|
|
949
|
+
: "";
|
|
950
|
+
const adrContent = `# 🛡️ ${id}: ${title}\n\n` +
|
|
951
|
+
`**Date**: ${new Date().toLocaleDateString()}\n` +
|
|
952
|
+
`**Status**: Accepted\n\n` +
|
|
953
|
+
historyContext +
|
|
954
|
+
`## Context\n${getStringArg(args, "context")}\n\n` +
|
|
955
|
+
`## Decision\n${getStringArg(args, "decision")}\n\n` +
|
|
956
|
+
`## Consequences\n${getStringArg(args, "consequences")}\n\n` +
|
|
957
|
+
`--- \nGenerated by Marie Autonomous Agent ✨`;
|
|
958
|
+
try {
|
|
959
|
+
await fs.mkdir(adrDir, { recursive: true });
|
|
960
|
+
await fs.writeFile(adrPath, adrContent);
|
|
961
|
+
return `Architectural Decision Record saved to \`.marie/decisions/${id}.md\`. History preserved. 🛡️`;
|
|
962
|
+
}
|
|
963
|
+
catch (e) {
|
|
964
|
+
return `Failed to save ADR: ${e}`;
|
|
965
|
+
}
|
|
966
|
+
},
|
|
967
|
+
});
|
|
968
|
+
registry.register({
|
|
969
|
+
name: "diagnose_action_failure",
|
|
970
|
+
description: "Analyze why a high-order action (Rename, Move) failed and suggest a resilient fallback methodology.",
|
|
971
|
+
input_schema: {
|
|
972
|
+
type: "object",
|
|
973
|
+
properties: {
|
|
974
|
+
actionAttempted: { type: "string" },
|
|
975
|
+
errorMessage: { type: "string" },
|
|
976
|
+
path: { type: "string" },
|
|
977
|
+
},
|
|
978
|
+
required: ["actionAttempted", "errorMessage", "path"],
|
|
979
|
+
},
|
|
980
|
+
execute: async (args, onProgress, signal) => {
|
|
981
|
+
const p = getStringArg(args, "path");
|
|
982
|
+
const diags = vscode.languages.getDiagnostics(vscode.Uri.file(p));
|
|
983
|
+
const errors = diags.filter((d) => d.severity === vscode.DiagnosticSeverity.Error);
|
|
984
|
+
let result = `# 🩹 Sovereign Recovery Diagnostic\n\n`;
|
|
985
|
+
result += `Failed Action: \`${args.actionAttempted}\`\n`;
|
|
986
|
+
result += `Reason: ${args.errorMessage}\n\n`;
|
|
987
|
+
if (errors.length > 0) {
|
|
988
|
+
result += `## ⚠️ Underlying File Errors\n`;
|
|
989
|
+
result += `The LSP is likely blocked because the file has syntax/semantic errors:\n`;
|
|
990
|
+
errors
|
|
991
|
+
.slice(0, 3)
|
|
992
|
+
.forEach((e) => (result += `- [L${e.range.start.line + 1}] ${e.message}\n`));
|
|
993
|
+
result += `\n**Recommendation**: Fix these errors using \`replace_in_file\` before retrying semantic actions.`;
|
|
994
|
+
}
|
|
995
|
+
else {
|
|
996
|
+
result += `## 🗺️ Fallback Trajectory\n`;
|
|
997
|
+
result += `The high-order tool is healthy but the specific symbol could not be transformed. \n`;
|
|
998
|
+
result += `**Trajectory**: Transition to **Shadow Realm Simulation** via \`simulate_semantic_edit\` to verify manual replacements.`;
|
|
999
|
+
}
|
|
1000
|
+
return result;
|
|
1001
|
+
},
|
|
1002
|
+
});
|
|
1003
|
+
registry.register({
|
|
1004
|
+
name: "sprout_new_module",
|
|
1005
|
+
description: "Create a new file with architectural boilerplate and register it in the project's living history.",
|
|
1006
|
+
isDestructive: true,
|
|
1007
|
+
input_schema: {
|
|
1008
|
+
type: "object",
|
|
1009
|
+
properties: {
|
|
1010
|
+
path: { type: "string", description: "Absolute path to the new file" },
|
|
1011
|
+
description: {
|
|
1012
|
+
type: "string",
|
|
1013
|
+
description: "Brief description of the module's purpose",
|
|
1014
|
+
},
|
|
1015
|
+
},
|
|
1016
|
+
required: ["path", "description"],
|
|
1017
|
+
},
|
|
1018
|
+
execute: async (args, onProgress, signal) => {
|
|
1019
|
+
const p = getStringArg(args, "path");
|
|
1020
|
+
const desc = getStringArg(args, "description");
|
|
1021
|
+
const name = path.basename(p);
|
|
1022
|
+
let boilerplate = `/**\n * ${name}: ${desc}\n * \n * Part of the Living Project - Sprouted on ${new Date().toLocaleDateString()}\n */\n\n`;
|
|
1023
|
+
if (p.includes("/infrastructure/")) {
|
|
1024
|
+
boilerplate += `import * as vscode from 'vscode';\n\nexport class ${name.replace(/\.[^.]+$/, "")} {\n // Implementation\n}\n`;
|
|
1025
|
+
}
|
|
1026
|
+
else if (p.includes("/domain/")) {
|
|
1027
|
+
boilerplate += `export interface ${name.replace(/\.[^.]+$/, "")} {\n // Domain Logic\n}\n`;
|
|
1028
|
+
}
|
|
1029
|
+
else if (p.includes("/plumbing/")) {
|
|
1030
|
+
boilerplate += `/** Plumbing Utility */\nexport function ${name.replace(/\.[^.]+$/, "").toLowerCase()}() {\n return;\n}\n`;
|
|
1031
|
+
}
|
|
1032
|
+
await writeFile(p, boilerplate);
|
|
1033
|
+
await logGratitude(`Sprouted new module: ${name}`);
|
|
1034
|
+
return (`# 🌱 New Sprout: ${name}\n\n` +
|
|
1035
|
+
`Module created at \`${path.relative(process.cwd(), p)}\`.\n` +
|
|
1036
|
+
`**Intent**: ${desc}\n\n` +
|
|
1037
|
+
`> **Methodology Note**: This module has been registered in the project's lineage. Please use \`get_file_diagnostics\` to verify initial health. ✨`);
|
|
1038
|
+
},
|
|
1039
|
+
});
|
|
1040
|
+
registry.register({
|
|
1041
|
+
name: "propose_logic_clustering",
|
|
1042
|
+
description: "Scan a directory and propose structural reorganizations (Clustering) based on semantic co-dependency.",
|
|
1043
|
+
input_schema: {
|
|
1044
|
+
type: "object",
|
|
1045
|
+
properties: {
|
|
1046
|
+
directoryPath: {
|
|
1047
|
+
type: "string",
|
|
1048
|
+
description: "Absolute path to the directory to analyze",
|
|
1049
|
+
},
|
|
1050
|
+
},
|
|
1051
|
+
required: ["directoryPath"],
|
|
1052
|
+
},
|
|
1053
|
+
execute: async (args, onProgress, signal) => {
|
|
1054
|
+
const p = getStringArg(args, "directoryPath");
|
|
1055
|
+
const { proposeClustering } = await import("../../domain/joy/JoyTools.js");
|
|
1056
|
+
const clusters = await proposeClustering(p);
|
|
1057
|
+
if (clusters.length === 0)
|
|
1058
|
+
return "No significant clustering opportunities detected. The architecture appears naturally granular. ✨";
|
|
1059
|
+
let result = `# 🧘♂️ Convergence Ritual: Clustering Proposal\n\n`;
|
|
1060
|
+
result += `Based on semantic co-dependency and zoning laws, the following 'Logic Clusters' are proposed for \`${p.split("/").pop()}\`:\n\n`;
|
|
1061
|
+
clusters.forEach((c) => {
|
|
1062
|
+
result += `### ${c.zone} Zone Fragment\n`;
|
|
1063
|
+
result += `- **File Count**: ${c.fileCount}\n`;
|
|
1064
|
+
result += `- **Suggested Clusters**: ${c.suggestedClusters.join(", ")}\n`;
|
|
1065
|
+
result += `- **Rationale**: High internal cohesion suggests these should be grouped into a single module to reduce workspace noise.\n\n`;
|
|
1066
|
+
});
|
|
1067
|
+
result += `**Action**: Use \`execute_semantic_move\` to begin converging these fragments into the suggested clusters.`;
|
|
1068
|
+
return result;
|
|
1069
|
+
},
|
|
1070
|
+
});
|
|
1071
|
+
registry.register({
|
|
1072
|
+
name: "get_pinned_context",
|
|
1073
|
+
description: "Retrieve all anchored strategic context to refresh your memory.",
|
|
1074
|
+
input_schema: { type: "object", properties: {} },
|
|
1075
|
+
execute: async () => {
|
|
1076
|
+
return ContextArchiveService.getInstance().getAllAnchors();
|
|
1077
|
+
},
|
|
1078
|
+
});
|
|
1079
|
+
registry.register({
|
|
1080
|
+
name: "find_files",
|
|
1081
|
+
description: "Search for files across the workspace using a glob pattern (e.g., '**/*.test.ts').",
|
|
1082
|
+
input_schema: {
|
|
1083
|
+
type: "object",
|
|
1084
|
+
properties: {
|
|
1085
|
+
pattern: {
|
|
1086
|
+
type: "string",
|
|
1087
|
+
description: "The glob pattern to search for",
|
|
1088
|
+
},
|
|
1089
|
+
exclude: {
|
|
1090
|
+
type: "string",
|
|
1091
|
+
description: "Optional glob pattern for files to exclude",
|
|
1092
|
+
},
|
|
1093
|
+
},
|
|
1094
|
+
required: ["pattern"],
|
|
1095
|
+
},
|
|
1096
|
+
execute: async (args, onProgress, signal) => {
|
|
1097
|
+
const pattern = getStringArg(args, "pattern");
|
|
1098
|
+
const exclude = args.exclude;
|
|
1099
|
+
const files = await vscode.workspace.findFiles(pattern, exclude);
|
|
1100
|
+
if (files.length === 0)
|
|
1101
|
+
return "No files found matching the pattern.";
|
|
1102
|
+
let result = `Found ${files.length} file(s) matching \`${pattern}\`:\n`;
|
|
1103
|
+
files.slice(0, 50).forEach((f) => (result += `- \`${f.fsPath}\`\n`));
|
|
1104
|
+
if (files.length > 50)
|
|
1105
|
+
result += `\n... AND ${files.length - 50} MORE.`;
|
|
1106
|
+
return result;
|
|
1107
|
+
},
|
|
1108
|
+
});
|
|
1109
|
+
registry.register({
|
|
1110
|
+
name: "list_workspace_symbols",
|
|
1111
|
+
description: "Search for symbols (classes, functions, etc.) across the entire workspace.",
|
|
1112
|
+
input_schema: {
|
|
1113
|
+
type: "object",
|
|
1114
|
+
properties: {
|
|
1115
|
+
query: {
|
|
1116
|
+
type: "string",
|
|
1117
|
+
description: "The symbol name or partial name to search for",
|
|
1118
|
+
},
|
|
1119
|
+
},
|
|
1120
|
+
required: ["query"],
|
|
1121
|
+
},
|
|
1122
|
+
execute: async (args, onProgress, signal) => {
|
|
1123
|
+
const query = getStringArg(args, "query");
|
|
1124
|
+
const symbols = await vscode.commands.executeCommand("vscode.executeWorkspaceSymbolProvider", query);
|
|
1125
|
+
if (!symbols || symbols.length === 0)
|
|
1126
|
+
return "No symbols found matching the query.";
|
|
1127
|
+
let result = `Found ${symbols.length} symbol(s) matching \`${query}\`:\n`;
|
|
1128
|
+
symbols.slice(0, 50).forEach((s) => {
|
|
1129
|
+
const kind = vscode.SymbolKind[s.kind];
|
|
1130
|
+
result += `- [${kind}] \`${s.name}\` in \`${s.location.uri.fsPath}\`\n`;
|
|
1131
|
+
});
|
|
1132
|
+
if (symbols.length > 50)
|
|
1133
|
+
result += `\n... AND ${symbols.length - 50} MORE.`;
|
|
1134
|
+
return result;
|
|
1135
|
+
},
|
|
1136
|
+
});
|
|
1137
|
+
registry.register({
|
|
1138
|
+
name: "verify_workspace_health",
|
|
1139
|
+
description: "Perform a non-destructive health check (build/lint) to ensure recent changes haven't introduced regressions.",
|
|
1140
|
+
input_schema: { type: "object", properties: {} },
|
|
1141
|
+
execute: async () => {
|
|
1142
|
+
const mod = await import("../../plumbing/terminal/TerminalService.js");
|
|
1143
|
+
// Heuristic health check: run 'npm run build' or 'tsc' if available
|
|
1144
|
+
return await mod.TerminalService.runCommand("npm run build -- --noEmit || npx tsc --noEmit");
|
|
1145
|
+
},
|
|
1146
|
+
});
|
|
1147
|
+
registry.register({
|
|
1148
|
+
name: "run_command",
|
|
1149
|
+
description: "Execute a command in the terminal. Requires user approval. Use this for running tests or building the project.",
|
|
1150
|
+
isDestructive: true,
|
|
1151
|
+
input_schema: {
|
|
1152
|
+
type: "object",
|
|
1153
|
+
properties: {
|
|
1154
|
+
command: {
|
|
1155
|
+
type: "string",
|
|
1156
|
+
description: "The shell command to execute",
|
|
1157
|
+
},
|
|
1158
|
+
},
|
|
1159
|
+
required: ["command"],
|
|
1160
|
+
},
|
|
1161
|
+
execute: async (args, onProgress, signal) => {
|
|
1162
|
+
const command = getStringArg(args, "command");
|
|
1163
|
+
const mod = await import("../../plumbing/terminal/TerminalService.js");
|
|
1164
|
+
return await mod.TerminalService.runCommand(command, signal);
|
|
1165
|
+
},
|
|
1166
|
+
});
|
|
1167
|
+
registry.register({
|
|
1168
|
+
name: "perform_strategic_planning",
|
|
1169
|
+
description: "A mandatory planning ritual. Call this at the start of any complex task.",
|
|
1170
|
+
input_schema: {
|
|
1171
|
+
type: "object",
|
|
1172
|
+
properties: {
|
|
1173
|
+
intent: { type: "string" },
|
|
1174
|
+
joyZone: {
|
|
1175
|
+
type: "string",
|
|
1176
|
+
enum: ["joyful", "infrastructure", "plumbing"],
|
|
1177
|
+
},
|
|
1178
|
+
projectName: { type: "string" },
|
|
1179
|
+
lifecycleStage: {
|
|
1180
|
+
type: "string",
|
|
1181
|
+
enum: ["sprout", "bloom", "compost"],
|
|
1182
|
+
},
|
|
1183
|
+
objectives: {
|
|
1184
|
+
type: "array",
|
|
1185
|
+
items: {
|
|
1186
|
+
type: "object",
|
|
1187
|
+
properties: {
|
|
1188
|
+
id: { type: "string" },
|
|
1189
|
+
label: { type: "string" },
|
|
1190
|
+
status: {
|
|
1191
|
+
type: "string",
|
|
1192
|
+
enum: ["pending", "in_progress", "completed", "blocked"],
|
|
1193
|
+
},
|
|
1194
|
+
},
|
|
1195
|
+
required: ["id", "label", "status"],
|
|
1196
|
+
},
|
|
1197
|
+
},
|
|
1198
|
+
ritualChecked: { type: "boolean" },
|
|
1199
|
+
gratitudeMoment: { type: "string" },
|
|
1200
|
+
dependencyRisk: { type: "string" },
|
|
1201
|
+
totalPasses: {
|
|
1202
|
+
type: "number",
|
|
1203
|
+
description: "Total number of passes (1-4)",
|
|
1204
|
+
},
|
|
1205
|
+
passFocus: { type: "string", description: "Initial focus of Pass 1" },
|
|
1206
|
+
},
|
|
1207
|
+
required: [
|
|
1208
|
+
"intent",
|
|
1209
|
+
"joyZone",
|
|
1210
|
+
"projectName",
|
|
1211
|
+
"lifecycleStage",
|
|
1212
|
+
"ritualChecked",
|
|
1213
|
+
"gratitudeMoment",
|
|
1214
|
+
"totalPasses",
|
|
1215
|
+
"passFocus",
|
|
1216
|
+
],
|
|
1217
|
+
},
|
|
1218
|
+
execute: async (args, onProgress) => {
|
|
1219
|
+
const intent = getStringArg(args, "intent");
|
|
1220
|
+
const joyZone = getStringArg(args, "joyZone");
|
|
1221
|
+
const projectName = getStringArg(args, "projectName");
|
|
1222
|
+
const lifecycleStage = getStringArg(args, "lifecycleStage");
|
|
1223
|
+
const objectives = getArrayArg(args, "objectives");
|
|
1224
|
+
const gratitude = getStringArg(args, "gratitudeMoment");
|
|
1225
|
+
// Architectural Layer Validation (Conceptual)
|
|
1226
|
+
const layerOrder = ["joyful", "infrastructure", "plumbing"];
|
|
1227
|
+
const currentLayerIndex = layerOrder.indexOf(joyZone);
|
|
1228
|
+
// Pass ritual state to progress
|
|
1229
|
+
onProgress?.({
|
|
1230
|
+
context: `Mindfulness: ${intent} [Zone: ${joyZone}]`,
|
|
1231
|
+
completedObjectiveIds: [],
|
|
1232
|
+
activeObjectiveId: objectives[0]?.id,
|
|
1233
|
+
achieved: [
|
|
1234
|
+
`Aligned with ${joyZone} zone protocols`,
|
|
1235
|
+
`Planning for ${projectName} (${lifecycleStage})`,
|
|
1236
|
+
`Ritual: ${gratitude}`,
|
|
1237
|
+
],
|
|
1238
|
+
lifecycleStage,
|
|
1239
|
+
ritualComplete: true,
|
|
1240
|
+
currentPass: 1,
|
|
1241
|
+
totalPasses: getStringArg(args, "totalPasses") || 1,
|
|
1242
|
+
passFocus: getStringArg(args, "passFocus"),
|
|
1243
|
+
});
|
|
1244
|
+
return `Strategic Plan for '${projectName}' accepted. The KonMari Waterfall (Domain -> Infrastructure -> Plumbing) has been aligned. ✨`;
|
|
1245
|
+
},
|
|
1246
|
+
});
|
|
1247
|
+
registry.register({
|
|
1248
|
+
name: "checkpoint_pass",
|
|
1249
|
+
description: "Explicitly end a pass, summarize achievements, and orient for the next pass.",
|
|
1250
|
+
input_schema: {
|
|
1251
|
+
type: "object",
|
|
1252
|
+
properties: {
|
|
1253
|
+
summary: {
|
|
1254
|
+
type: "string",
|
|
1255
|
+
description: "What was achieved in this pass",
|
|
1256
|
+
},
|
|
1257
|
+
reflection: {
|
|
1258
|
+
type: "string",
|
|
1259
|
+
description: "What sparked joy or was learned in this pass? (KonMari Reflection)",
|
|
1260
|
+
},
|
|
1261
|
+
nextPassFocus: {
|
|
1262
|
+
type: "string",
|
|
1263
|
+
description: "The focus for the upcoming pass",
|
|
1264
|
+
},
|
|
1265
|
+
zoneSolidification: {
|
|
1266
|
+
type: "boolean",
|
|
1267
|
+
description: "Confirm that all new code follows JOY zoning protocols.",
|
|
1268
|
+
},
|
|
1269
|
+
tidyChecked: {
|
|
1270
|
+
type: "boolean",
|
|
1271
|
+
description: "Confirm that Joyful Tidying (fold_file) has been performed.",
|
|
1272
|
+
},
|
|
1273
|
+
isFinalPass: { type: "boolean" },
|
|
1274
|
+
},
|
|
1275
|
+
required: [
|
|
1276
|
+
"summary",
|
|
1277
|
+
"reflection",
|
|
1278
|
+
"nextPassFocus",
|
|
1279
|
+
"zoneSolidification",
|
|
1280
|
+
"tidyChecked",
|
|
1281
|
+
"isFinalPass",
|
|
1282
|
+
],
|
|
1283
|
+
},
|
|
1284
|
+
execute: async (args, onProgress) => {
|
|
1285
|
+
const summary = getStringArg(args, "summary");
|
|
1286
|
+
const reflection = getStringArg(args, "reflection");
|
|
1287
|
+
const nextFocus = getStringArg(args, "nextPassFocus");
|
|
1288
|
+
const solidified = args.zoneSolidification;
|
|
1289
|
+
const isFinal = args.isFinalPass;
|
|
1290
|
+
if (!solidified) {
|
|
1291
|
+
return "Error: Zone Solidification check failed. Please ensure all new code is correctly zoned before ending the pass.";
|
|
1292
|
+
}
|
|
1293
|
+
if (!args.tidyChecked) {
|
|
1294
|
+
return "Error: Joyful Tidying check failed. Please perform 'fold_file' on modified files to ensure code health.";
|
|
1295
|
+
}
|
|
1296
|
+
onProgress?.({
|
|
1297
|
+
context: `Checkpoint: ${summary} (Reflection: ${reflection})`,
|
|
1298
|
+
achieved: [`Completed Pass: ${summary}`],
|
|
1299
|
+
// We'll let the processor handle the history update
|
|
1300
|
+
passHistory: [{ pass: args.currentPass || 1, summary, reflection }],
|
|
1301
|
+
currentPass: isFinal ? null : undefined,
|
|
1302
|
+
passFocus: isFinal ? undefined : nextFocus,
|
|
1303
|
+
});
|
|
1304
|
+
return `Pass internal checkpoint reached. Focus shifting to: ${nextFocus}. Reflection: ${reflection}`;
|
|
1305
|
+
},
|
|
1306
|
+
});
|
|
1307
|
+
registry.register({
|
|
1308
|
+
name: "map_project_context",
|
|
1309
|
+
description: "A mapping ritual. Returns a high-level overview of JOY zones and key inhabitants to aid strategic planning.",
|
|
1310
|
+
input_schema: { type: "object", properties: {} },
|
|
1311
|
+
execute: async () => {
|
|
1312
|
+
const root = vscode.workspace.workspaceFolders?.[0].uri.fsPath;
|
|
1313
|
+
if (!root)
|
|
1314
|
+
return "No workspace detected.";
|
|
1315
|
+
const { isProjectJoyful, proposeClustering } = await import("../../domain/joy/JoyTools.js");
|
|
1316
|
+
const isJoyful = await isProjectJoyful(root);
|
|
1317
|
+
const clustering = await proposeClustering(root);
|
|
1318
|
+
// Real Hotspot detection based on mtime
|
|
1319
|
+
const hotspots = [];
|
|
1320
|
+
try {
|
|
1321
|
+
const entries = await fs.readdir(root, {
|
|
1322
|
+
withFileTypes: true,
|
|
1323
|
+
recursive: true,
|
|
1324
|
+
});
|
|
1325
|
+
const files = entries
|
|
1326
|
+
.filter((e) => e.isFile() &&
|
|
1327
|
+
!e.name.startsWith(".") &&
|
|
1328
|
+
!e.path.includes("node_modules"))
|
|
1329
|
+
.map((e) => path.join(e.path, e.name));
|
|
1330
|
+
const stats = await Promise.all(files.map(async (f) => ({
|
|
1331
|
+
path: f,
|
|
1332
|
+
mtime: (await fs.stat(f)).mtime.getTime(),
|
|
1333
|
+
})));
|
|
1334
|
+
const recent = stats.sort((a, b) => b.mtime - a.mtime).slice(0, 5);
|
|
1335
|
+
hotspots.push(...recent.map((r) => path.relative(root, r.path)));
|
|
1336
|
+
}
|
|
1337
|
+
catch (e) {
|
|
1338
|
+
console.error("Hotspot detection failed:", e);
|
|
1339
|
+
hotspots.push("src/domain/marie/MarieTypes.ts", "src/infrastructure/ai/MarieEngine.ts");
|
|
1340
|
+
}
|
|
1341
|
+
let map = `# Project Map (JOY Structure)\n\n`;
|
|
1342
|
+
map += `**Status**: ${isJoyful ? "Joyful Ecosystem ✨" : "Architectural Void ⚠️ (Genesis Needed)"}\n\n`;
|
|
1343
|
+
map += `**Zones**:\n`;
|
|
1344
|
+
map += `- **Domain** (\`src/domain\`): Core logic and purity.\n`;
|
|
1345
|
+
map += `- **Infrastructure** (\`src/infrastructure\`): Adapters and stability.\n`;
|
|
1346
|
+
map += `- **Plumbing** (\`src/plumbing\`): Mechanical machinery.\n\n`;
|
|
1347
|
+
map += `**Hotspots** (Most Recently Modified):\n`;
|
|
1348
|
+
for (const h of hotspots) {
|
|
1349
|
+
map += `- \`${h}\`\n`;
|
|
1350
|
+
}
|
|
1351
|
+
map += `\n`;
|
|
1352
|
+
if (clustering.length > 0) {
|
|
1353
|
+
map += `**Clustering Opportunities**:\n`;
|
|
1354
|
+
for (const c of clustering) {
|
|
1355
|
+
map += `- ${c.zone}: ${c.fileCount} files. Consider sub-zones like ${c.suggestedClusters.join(", ")}.\n`;
|
|
1356
|
+
}
|
|
1357
|
+
}
|
|
1358
|
+
return map;
|
|
1359
|
+
},
|
|
1360
|
+
});
|
|
1361
|
+
registry.register({
|
|
1362
|
+
name: "update_run_objectives",
|
|
1363
|
+
description: "Update the current run's objectives and progress context.",
|
|
1364
|
+
input_schema: {
|
|
1365
|
+
type: "object",
|
|
1366
|
+
properties: {
|
|
1367
|
+
context: { type: "string" },
|
|
1368
|
+
completedObjectiveIds: { type: "array", items: { type: "string" } },
|
|
1369
|
+
activeObjectiveId: { type: "string" },
|
|
1370
|
+
achieved: { type: "array", items: { type: "string" } },
|
|
1371
|
+
totalPasses: { type: "number" },
|
|
1372
|
+
},
|
|
1373
|
+
required: ["context"],
|
|
1374
|
+
},
|
|
1375
|
+
execute: async (args, onProgress) => {
|
|
1376
|
+
const context = getStringArg(args, "context");
|
|
1377
|
+
const completedObjectiveIds = getArrayArg(args, "completedObjectiveIds");
|
|
1378
|
+
const activeObjectiveId = getStringArg(args, "activeObjectiveId");
|
|
1379
|
+
const achieved = getArrayArg(args, "achieved");
|
|
1380
|
+
const totalPasses = args.totalPasses;
|
|
1381
|
+
onProgress?.({
|
|
1382
|
+
context,
|
|
1383
|
+
completedObjectiveIds,
|
|
1384
|
+
activeObjectiveId,
|
|
1385
|
+
achieved,
|
|
1386
|
+
totalPasses,
|
|
1387
|
+
});
|
|
1388
|
+
return `Progress updated: ${context}${totalPasses ? ` (Total Passes: ${totalPasses})` : ""}`;
|
|
1389
|
+
},
|
|
1390
|
+
});
|
|
1391
|
+
registry.register({
|
|
1392
|
+
name: "augment_roadmap",
|
|
1393
|
+
description: "Insert a new pass into the current roadmap. Use this when significant unexpected complexity is discovered.",
|
|
1394
|
+
input_schema: {
|
|
1395
|
+
type: "object",
|
|
1396
|
+
properties: {
|
|
1397
|
+
addedPassCount: {
|
|
1398
|
+
type: "number",
|
|
1399
|
+
description: "How many passes to add (usually 1)",
|
|
1400
|
+
},
|
|
1401
|
+
newPassFocus: {
|
|
1402
|
+
type: "string",
|
|
1403
|
+
description: "The focus for the upcoming augmented pass",
|
|
1404
|
+
},
|
|
1405
|
+
reason: {
|
|
1406
|
+
type: "string",
|
|
1407
|
+
description: "Why is the roadmap being augmented? (Mindfulness discovery)",
|
|
1408
|
+
},
|
|
1409
|
+
},
|
|
1410
|
+
required: ["addedPassCount", "newPassFocus", "reason"],
|
|
1411
|
+
},
|
|
1412
|
+
execute: async (args, onProgress) => {
|
|
1413
|
+
const count = args.addedPassCount;
|
|
1414
|
+
const focus = getStringArg(args, "newPassFocus");
|
|
1415
|
+
const reason = getStringArg(args, "reason");
|
|
1416
|
+
onProgress?.({
|
|
1417
|
+
context: `Roadmap Augmented: ${reason}`,
|
|
1418
|
+
achieved: [`Calibrated roadmap: +${count} pass(es)`],
|
|
1419
|
+
// The processor will handle the totalPasses increment
|
|
1420
|
+
totalPasses: count, // This is a delta for the processor if we handle it there, or absolute
|
|
1421
|
+
passFocus: focus,
|
|
1422
|
+
});
|
|
1423
|
+
return `Roadmap augmented with ${count} additional pass(es). Reason: ${reason}`;
|
|
1424
|
+
},
|
|
1425
|
+
});
|
|
1426
|
+
registry.register({
|
|
1427
|
+
name: "execute_genesis_ritual",
|
|
1428
|
+
description: "Convert an entire project to the JOY structure.",
|
|
1429
|
+
input_schema: { type: "object", properties: {} },
|
|
1430
|
+
execute: async () => await automationService.triggerGenesis(),
|
|
1431
|
+
});
|
|
1432
|
+
registry.register({
|
|
1433
|
+
name: "sow_joy_feature",
|
|
1434
|
+
description: "Scaffold a new feature structure across all JOY zones.",
|
|
1435
|
+
input_schema: {
|
|
1436
|
+
type: "object",
|
|
1437
|
+
properties: {
|
|
1438
|
+
name: { type: "string" },
|
|
1439
|
+
intent: { type: "string" },
|
|
1440
|
+
},
|
|
1441
|
+
required: ["name", "intent"],
|
|
1442
|
+
},
|
|
1443
|
+
execute: async (args) => await automationService.sowJoyFeature(getStringArg(args, "name"), getStringArg(args, "intent")),
|
|
1444
|
+
});
|
|
1445
|
+
registry.register({
|
|
1446
|
+
name: "perform_garden_pulse",
|
|
1447
|
+
description: "Deep audit of project structure and scaffolding.",
|
|
1448
|
+
input_schema: { type: "object", properties: {} },
|
|
1449
|
+
execute: async () => await automationService.performGardenPulse(),
|
|
1450
|
+
});
|
|
1451
|
+
registry.register({
|
|
1452
|
+
name: "execute_joy_maintenance",
|
|
1453
|
+
description: "Perform autonomous maintenance on the garden structure (Restoration Ritual).",
|
|
1454
|
+
isDestructive: true,
|
|
1455
|
+
input_schema: { type: "object", properties: {} },
|
|
1456
|
+
execute: async () => await automationService.executeAutonomousRestoration(),
|
|
1457
|
+
});
|
|
1458
|
+
registry.register({
|
|
1459
|
+
name: "fold_file",
|
|
1460
|
+
description: "Format and organize imports in a file.",
|
|
1461
|
+
input_schema: {
|
|
1462
|
+
type: "object",
|
|
1463
|
+
properties: { path: { type: "string" } },
|
|
1464
|
+
required: ["path"],
|
|
1465
|
+
},
|
|
1466
|
+
execute: async (args) => await foldCode(getStringArg(args, "path")),
|
|
1467
|
+
});
|
|
1468
|
+
registry.register({
|
|
1469
|
+
name: "cherish_file",
|
|
1470
|
+
description: "Update the timestamp of a 'Sentimental' file.",
|
|
1471
|
+
input_schema: {
|
|
1472
|
+
type: "object",
|
|
1473
|
+
properties: { path: { type: "string" } },
|
|
1474
|
+
required: ["path"],
|
|
1475
|
+
},
|
|
1476
|
+
execute: async (args) => await cherishFile(getStringArg(args, "path")),
|
|
1477
|
+
});
|
|
1478
|
+
registry.register({
|
|
1479
|
+
name: "check_code_health",
|
|
1480
|
+
description: "Analyze a file for complexity and technical debt.",
|
|
1481
|
+
input_schema: {
|
|
1482
|
+
type: "object",
|
|
1483
|
+
properties: { path: { type: "string" } },
|
|
1484
|
+
required: ["path"],
|
|
1485
|
+
},
|
|
1486
|
+
execute: async (args) => JSON.stringify(await checkCodeHealth(getStringArg(args, "path"))),
|
|
1487
|
+
});
|
|
1488
|
+
registry.register({
|
|
1489
|
+
name: "replace_file_content",
|
|
1490
|
+
description: "Replace a specific string with another in a file. Surgical and mindful.",
|
|
1491
|
+
isDestructive: true,
|
|
1492
|
+
input_schema: {
|
|
1493
|
+
type: "object",
|
|
1494
|
+
properties: {
|
|
1495
|
+
path: { type: "string" },
|
|
1496
|
+
targetFile: { type: "string" },
|
|
1497
|
+
targetContent: { type: "string" },
|
|
1498
|
+
replacementContent: { type: "string" },
|
|
1499
|
+
search: { type: "string" },
|
|
1500
|
+
replace: { type: "string" },
|
|
1501
|
+
},
|
|
1502
|
+
required: ["path"],
|
|
1503
|
+
},
|
|
1504
|
+
execute: async (args, onProgress, signal) => {
|
|
1505
|
+
const p = getStringArg(args, "path") || getStringArg(args, "targetFile");
|
|
1506
|
+
const s = getStringArg(args, "targetContent") || getStringArg(args, "search");
|
|
1507
|
+
const r = getStringArg(args, "replacementContent") ||
|
|
1508
|
+
getStringArg(args, "replace");
|
|
1509
|
+
try {
|
|
1510
|
+
const { replaceInFile } = await import("../../plumbing/filesystem/FileService.js");
|
|
1511
|
+
const result = await replaceInFile(p, s, r);
|
|
1512
|
+
// Post-Execution Tidying
|
|
1513
|
+
await foldCode(p);
|
|
1514
|
+
return `${result}\n\n**Autonomy Note**: Post-execution tidying completed. 🌸`;
|
|
1515
|
+
}
|
|
1516
|
+
catch (error) {
|
|
1517
|
+
// Autonomous Self-Healing Trigger
|
|
1518
|
+
return await automationService.executeSelfHealing(p, error.message);
|
|
1519
|
+
}
|
|
1520
|
+
},
|
|
1521
|
+
});
|
|
1522
|
+
registry.register({
|
|
1523
|
+
name: "generate_joy_dashboard",
|
|
1524
|
+
description: "Generate a JOY.md dashboard for the workspace.",
|
|
1525
|
+
input_schema: {
|
|
1526
|
+
type: "object",
|
|
1527
|
+
properties: { rootPath: { type: "string" } },
|
|
1528
|
+
required: ["rootPath"],
|
|
1529
|
+
},
|
|
1530
|
+
execute: async (args) => await generateJoyDashboard(getStringArg(args, "rootPath")),
|
|
1531
|
+
});
|
|
1532
|
+
registry.register({
|
|
1533
|
+
name: "complete_task_ritual",
|
|
1534
|
+
description: "A final ritual to conclude a large task. Summarizes the entire journey and expresses gratitude. WARNING: Do NOT call this tool until you have confirmed the user's request is 100% satisfied. If you have any doubts or pending verifications, use `notify_user` instead.",
|
|
1535
|
+
input_schema: {
|
|
1536
|
+
type: "object",
|
|
1537
|
+
properties: {
|
|
1538
|
+
finalSummary: {
|
|
1539
|
+
type: "string",
|
|
1540
|
+
description: "The 'Bloom Report': a synthesis of all work achieved.",
|
|
1541
|
+
},
|
|
1542
|
+
gratitude: {
|
|
1543
|
+
type: "string",
|
|
1544
|
+
description: "A final expression of gratitude for the code and the process.",
|
|
1545
|
+
},
|
|
1546
|
+
healthCheck: {
|
|
1547
|
+
type: "string",
|
|
1548
|
+
description: "A final assessment of the project's JOY state.",
|
|
1549
|
+
},
|
|
1550
|
+
},
|
|
1551
|
+
required: ["finalSummary", "gratitude", "healthCheck"],
|
|
1552
|
+
},
|
|
1553
|
+
execute: async (args, onProgress) => {
|
|
1554
|
+
const summary = getStringArg(args, "finalSummary");
|
|
1555
|
+
const gratitude = getStringArg(args, "gratitude");
|
|
1556
|
+
const health = getStringArg(args, "healthCheck");
|
|
1557
|
+
onProgress?.({
|
|
1558
|
+
context: `Bloom Ritual: ${summary}`,
|
|
1559
|
+
achieved: [`Task Completed: ${summary}`, `Code Ascension complete ✨`],
|
|
1560
|
+
currentPass: null,
|
|
1561
|
+
passFocus: undefined,
|
|
1562
|
+
ritualComplete: true,
|
|
1563
|
+
});
|
|
1564
|
+
return `Task Bloom Ritual complete.\n\nReport: ${summary}\n\nGarden Growth: Metrics archived in telemetry.\n\nHealth: ${health}\n\nGratitude: ${gratitude} ✨`;
|
|
1565
|
+
},
|
|
1566
|
+
});
|
|
1567
|
+
}
|
|
1568
|
+
//# sourceMappingURL=MarieToolDefinitions.js.map
|