@polterware/polter 0.4.2 → 0.5.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (45) hide show
  1. package/README.md +70 -184
  2. package/dist/api.js +62 -30
  3. package/dist/app-HGIGWI7F.js +393 -0
  4. package/dist/appPanel-EZOHLTBX.js +1365 -0
  5. package/dist/applier-OEXIUYYO.js +10 -0
  6. package/dist/chunk-3RG5ZIWI.js +10 -0
  7. package/dist/chunk-45CQFZU7.js +262 -0
  8. package/dist/chunk-57CZSEY5.js +5398 -0
  9. package/dist/chunk-6IBRTRLX.js +257 -0
  10. package/dist/chunk-AK3NTS3Y.js +220 -0
  11. package/dist/chunk-BGT5TT2A.js +32 -0
  12. package/dist/chunk-BIN7BDA2.js +77 -0
  13. package/dist/chunk-E2B5FFBU.js +81 -0
  14. package/dist/chunk-EAMHFQKU.js +222 -0
  15. package/dist/chunk-ELSIHPJL.js +455 -0
  16. package/dist/{chunk-XCCKD3RZ.js → chunk-GCS7JEYU.js} +7 -3
  17. package/dist/chunk-GKROVUDG.js +15 -0
  18. package/dist/chunk-GVIKF6UI.js +738 -0
  19. package/dist/chunk-JQB2A3CA.js +72 -0
  20. package/dist/chunk-KEGROLGX.js +50 -0
  21. package/dist/chunk-OKHPN6X7.js +49 -0
  22. package/dist/chunk-RVMOIUSL.js +22 -0
  23. package/dist/chunk-TD6YNU6L.js +22 -0
  24. package/dist/chunk-U64WZOJ3.js +101 -0
  25. package/dist/chunk-U6725U7K.js +138 -0
  26. package/dist/chunk-XNRIN3VM.js +125 -0
  27. package/dist/chunk-ZU5VZHYD.js +28 -0
  28. package/dist/commands-BIIWGCVS.js +15 -0
  29. package/dist/editor-AUFJZ4PE.js +11 -0
  30. package/dist/engine-EZQ26HDJ.js +11 -0
  31. package/dist/globalConf-AGMMIKSL.js +7 -0
  32. package/dist/index.js +49 -7601
  33. package/dist/ipcServer-HXOPKNBP.js +10 -0
  34. package/dist/mcp.js +182 -13892
  35. package/dist/mcpInstaller-J2AGFNWR.js +19 -0
  36. package/dist/parser-4ZBGSI2U.js +10 -0
  37. package/dist/planner-ZVBA66V6.js +9 -0
  38. package/dist/processManager-6T5DBURV.js +37 -0
  39. package/dist/projectConfig-TRCJS3VI.js +21 -0
  40. package/dist/skillSetup-ZQEHJ5ZG.js +14 -0
  41. package/dist/status-QMRCV4XJ.js +8 -0
  42. package/dist/storage-C3D7TLJW.js +17 -0
  43. package/dist/toolResolver-A2BUT3NK.js +17 -0
  44. package/package.json +28 -3
  45. package/dist/chunk-CWBIXRZP.js +0 -2607
@@ -0,0 +1,72 @@
1
+ import {
2
+ getCommandById
3
+ } from "./chunk-GVIKF6UI.js";
4
+ import {
5
+ resolvePkgArgs,
6
+ resolveToolCommand,
7
+ runCommand
8
+ } from "./chunk-ELSIHPJL.js";
9
+
10
+ // src/pipeline/pipelineEvents.ts
11
+ import EE from "eventemitter3";
12
+ var EventEmitter = EE.EventEmitter ?? EE;
13
+ var pipelineEvents = new EventEmitter();
14
+
15
+ // src/pipeline/engine.ts
16
+ async function executePipeline(pipeline, onProgress, cwd = process.cwd()) {
17
+ const stepResults = pipeline.steps.map((step) => ({
18
+ step,
19
+ status: "pending"
20
+ }));
21
+ let aborted = false;
22
+ for (let i = 0; i < pipeline.steps.length; i++) {
23
+ const step = pipeline.steps[i];
24
+ if (aborted) {
25
+ stepResults[i] = { step, status: "skipped" };
26
+ onProgress({ stepResults: [...stepResults], currentStepIndex: i, done: false });
27
+ continue;
28
+ }
29
+ stepResults[i] = { step, status: "running" };
30
+ pipelineEvents.emit("stepStarted", step, i);
31
+ onProgress({ stepResults: [...stepResults], currentStepIndex: i, done: false });
32
+ const cmdDef = getCommandById(step.commandId);
33
+ const toolId = cmdDef?.tool ?? "supabase";
34
+ const resolved = resolveToolCommand(toolId, cwd);
35
+ let baseArgs = cmdDef?.base ?? [];
36
+ if (toolId === "pkg" && cmdDef) {
37
+ try {
38
+ const translated = resolvePkgArgs(cmdDef.base, cwd);
39
+ baseArgs = translated.args;
40
+ } catch {
41
+ }
42
+ }
43
+ const allArgs = [...baseArgs, ...step.args, ...step.flags];
44
+ const result = await runCommand(
45
+ { command: resolved.command, env: resolved.env },
46
+ allArgs,
47
+ cwd
48
+ ).promise;
49
+ const success = !result.spawnError && result.exitCode === 0;
50
+ stepResults[i] = {
51
+ step,
52
+ status: success ? "success" : "error",
53
+ result
54
+ };
55
+ pipelineEvents.emit("stepCompleted", stepResults[i], i);
56
+ if (!success && !step.continueOnError) {
57
+ aborted = true;
58
+ }
59
+ onProgress({ stepResults: [...stepResults], currentStepIndex: i, done: false });
60
+ }
61
+ onProgress({
62
+ stepResults: [...stepResults],
63
+ currentStepIndex: pipeline.steps.length - 1,
64
+ done: true
65
+ });
66
+ pipelineEvents.emit("pipelineCompleted", stepResults);
67
+ return stepResults;
68
+ }
69
+
70
+ export {
71
+ executePipeline
72
+ };
@@ -0,0 +1,50 @@
1
+ // src/lib/editor.ts
2
+ import { spawnSync, spawn } from "child_process";
3
+ import { basename } from "path";
4
+ var TERMINAL_EDITORS = /* @__PURE__ */ new Set([
5
+ "vi",
6
+ "vim",
7
+ "nvim",
8
+ "nano",
9
+ "pico",
10
+ "emacs",
11
+ "micro",
12
+ "helix",
13
+ "hx",
14
+ "joe",
15
+ "mcedit"
16
+ ]);
17
+ function resolveEditor() {
18
+ const raw = process.env["VISUAL"] || process.env["EDITOR"];
19
+ if (raw) {
20
+ const parts = raw.trim().split(/\s+/);
21
+ return { command: parts[0], args: parts.slice(1) };
22
+ }
23
+ const fallback = process.platform === "win32" ? "notepad" : "nano";
24
+ return { command: fallback, args: [] };
25
+ }
26
+ function isTerminalEditor(cmd) {
27
+ return TERMINAL_EDITORS.has(basename(cmd));
28
+ }
29
+ function openInEditor(filePath) {
30
+ const editor = resolveEditor();
31
+ const terminal = isTerminalEditor(editor.command);
32
+ if (terminal) {
33
+ const result = spawnSync(editor.command, [...editor.args, filePath], {
34
+ stdio: "inherit"
35
+ });
36
+ return { exitCode: result.status, isTerminal: true };
37
+ }
38
+ const child = spawn(editor.command, [...editor.args, filePath], {
39
+ detached: true,
40
+ stdio: "ignore"
41
+ });
42
+ child.unref();
43
+ return { exitCode: 0, isTerminal: false };
44
+ }
45
+
46
+ export {
47
+ resolveEditor,
48
+ isTerminalEditor,
49
+ openInEditor
50
+ };
@@ -0,0 +1,49 @@
1
+ import {
2
+ commandExists
3
+ } from "./chunk-RVMOIUSL.js";
4
+
5
+ // src/declarative/status.ts
6
+ import { execSync } from "child_process";
7
+ function safeExec(cmd) {
8
+ try {
9
+ return execSync(cmd, { encoding: "utf-8", stdio: ["pipe", "pipe", "pipe"] }).trim();
10
+ } catch {
11
+ return void 0;
12
+ }
13
+ }
14
+ function getCurrentStatus(cwd = process.cwd()) {
15
+ const result = {};
16
+ if (commandExists("supabase")) {
17
+ const linked = safeExec(`cd "${cwd}" && supabase projects list 2>/dev/null`) !== void 0;
18
+ result.supabase = {
19
+ linked,
20
+ projectRef: void 0,
21
+ functions: []
22
+ };
23
+ const functionsOutput = safeExec(`cd "${cwd}" && supabase functions list 2>/dev/null`);
24
+ if (functionsOutput) {
25
+ result.supabase.functions = functionsOutput.split("\n").filter((line) => line.trim() && !line.startsWith("\u2502") && !line.startsWith("\u250C")).slice(1);
26
+ }
27
+ }
28
+ if (commandExists("vercel")) {
29
+ const whoami = safeExec("vercel whoami 2>/dev/null");
30
+ result.vercel = {
31
+ linked: !!whoami,
32
+ projectId: void 0
33
+ };
34
+ }
35
+ if (commandExists("gh")) {
36
+ const authStatus = safeExec("gh auth status 2>&1");
37
+ const authenticated = authStatus?.includes("Logged in") ?? false;
38
+ const repoOutput = safeExec(`cd "${cwd}" && gh repo view --json nameWithOwner -q .nameWithOwner 2>/dev/null`);
39
+ result.github = {
40
+ repo: repoOutput || void 0,
41
+ authenticated
42
+ };
43
+ }
44
+ return result;
45
+ }
46
+
47
+ export {
48
+ getCurrentStatus
49
+ };
@@ -0,0 +1,22 @@
1
+ // src/lib/system.ts
2
+ import { execSync } from "child_process";
3
+ import which from "which";
4
+ function commandExists(command) {
5
+ try {
6
+ which.sync(command);
7
+ return true;
8
+ } catch {
9
+ return false;
10
+ }
11
+ }
12
+ function execCapture(command) {
13
+ return execSync(command, {
14
+ encoding: "utf-8",
15
+ stdio: ["pipe", "pipe", "pipe"]
16
+ }).trim();
17
+ }
18
+
19
+ export {
20
+ commandExists,
21
+ execCapture
22
+ };
@@ -0,0 +1,22 @@
1
+ // src/lib/fs.ts
2
+ import {
3
+ readFileSync,
4
+ writeFileSync,
5
+ existsSync,
6
+ mkdirSync,
7
+ rmSync,
8
+ readdirSync,
9
+ unlinkSync,
10
+ copyFileSync,
11
+ statSync
12
+ } from "fs";
13
+ import { readdir, stat, mkdtemp } from "fs/promises";
14
+
15
+ export {
16
+ readFileSync,
17
+ writeFileSync,
18
+ existsSync,
19
+ mkdirSync,
20
+ readdirSync,
21
+ unlinkSync
22
+ };
@@ -0,0 +1,101 @@
1
+ import {
2
+ deleteProjectPipeline,
3
+ getProjectPipelines,
4
+ saveProjectPipeline
5
+ } from "./chunk-BIN7BDA2.js";
6
+ import {
7
+ getConf
8
+ } from "./chunk-GKROVUDG.js";
9
+
10
+ // src/config/store.ts
11
+ import { z as z2 } from "zod";
12
+
13
+ // src/data/schemas.ts
14
+ import { z } from "zod";
15
+ var PipelineStepSchema = z.object({
16
+ id: z.string(),
17
+ commandId: z.string(),
18
+ args: z.array(z.string()),
19
+ flags: z.array(z.string()),
20
+ continueOnError: z.boolean(),
21
+ label: z.string().optional()
22
+ });
23
+ var PipelineSchema = z.object({
24
+ id: z.string(),
25
+ name: z.string(),
26
+ description: z.string().optional(),
27
+ steps: z.array(PipelineStepSchema),
28
+ createdAt: z.string(),
29
+ updatedAt: z.string()
30
+ });
31
+
32
+ // src/config/store.ts
33
+ var GLOBAL_PIPELINES_KEY = "globalPipelinesV1";
34
+ function getGlobalPipelines() {
35
+ const config = getConf();
36
+ if (!config.has(GLOBAL_PIPELINES_KEY)) {
37
+ config.set(GLOBAL_PIPELINES_KEY, []);
38
+ }
39
+ const raw = config.get(GLOBAL_PIPELINES_KEY);
40
+ const result = z2.array(PipelineSchema).safeParse(raw);
41
+ return result.success ? result.data : [];
42
+ }
43
+ function saveGlobalPipeline(pipeline) {
44
+ const config = getConf();
45
+ const pipelines = getGlobalPipelines();
46
+ const idx = pipelines.findIndex((p) => p.id === pipeline.id);
47
+ if (idx >= 0) {
48
+ pipelines[idx] = pipeline;
49
+ } else {
50
+ pipelines.push(pipeline);
51
+ }
52
+ config.set(GLOBAL_PIPELINES_KEY, pipelines);
53
+ }
54
+ function deleteGlobalPipeline(pipelineId) {
55
+ const config = getConf();
56
+ const pipelines = getGlobalPipelines().filter((p) => p.id !== pipelineId);
57
+ config.set(GLOBAL_PIPELINES_KEY, pipelines);
58
+ }
59
+
60
+ // src/pipeline/storage.ts
61
+ function getAllPipelines(startDir) {
62
+ const projectPipelines = getProjectPipelines(startDir).map((p) => ({
63
+ ...p,
64
+ source: "project"
65
+ }));
66
+ const globalPipelines = getGlobalPipelines().map((p) => ({
67
+ ...p,
68
+ source: "global"
69
+ }));
70
+ const seen = new Set(projectPipelines.map((p) => p.id));
71
+ const merged = [
72
+ ...projectPipelines,
73
+ ...globalPipelines.filter((p) => !seen.has(p.id))
74
+ ];
75
+ return merged;
76
+ }
77
+ function savePipeline(pipeline, source, startDir) {
78
+ if (source === "project") {
79
+ return saveProjectPipeline(pipeline, startDir);
80
+ } else {
81
+ saveGlobalPipeline(pipeline);
82
+ return true;
83
+ }
84
+ }
85
+ function deletePipeline(pipelineId, source, startDir) {
86
+ if (source === "project") {
87
+ deleteProjectPipeline(pipelineId, startDir);
88
+ } else {
89
+ deleteGlobalPipeline(pipelineId);
90
+ }
91
+ }
92
+ function findPipelineByName(name, startDir) {
93
+ return getAllPipelines(startDir).find((p) => p.name === name);
94
+ }
95
+
96
+ export {
97
+ getAllPipelines,
98
+ savePipeline,
99
+ deletePipeline,
100
+ findPipelineByName
101
+ };
@@ -0,0 +1,138 @@
1
+ import {
2
+ existsSync,
3
+ readFileSync
4
+ } from "./chunk-TD6YNU6L.js";
5
+
6
+ // src/declarative/parser.ts
7
+ import { join } from "path";
8
+
9
+ // src/declarative/schema.ts
10
+ import { z } from "zod";
11
+ var PolterYamlSchema = z.object({
12
+ version: z.literal(1),
13
+ project: z.object({
14
+ name: z.string()
15
+ }).optional(),
16
+ supabase: z.object({
17
+ project_ref: z.string().optional(),
18
+ region: z.string().optional(),
19
+ database: z.object({
20
+ migrations_dir: z.string().optional()
21
+ }).optional(),
22
+ functions: z.array(z.object({
23
+ name: z.string(),
24
+ verify_jwt: z.boolean().optional()
25
+ })).optional(),
26
+ secrets: z.array(z.string()).optional()
27
+ }).optional(),
28
+ vercel: z.object({
29
+ project_id: z.string().optional(),
30
+ framework: z.string().optional(),
31
+ domains: z.array(z.string()).optional(),
32
+ env: z.record(z.string(), z.record(z.string(), z.string())).optional()
33
+ }).optional(),
34
+ github: z.object({
35
+ repo: z.string().optional(),
36
+ branch_protection: z.record(z.string(), z.object({
37
+ required_reviews: z.number().optional(),
38
+ require_status_checks: z.boolean().optional()
39
+ })).optional(),
40
+ secrets: z.array(z.string()).optional()
41
+ }).optional(),
42
+ pkg: z.object({
43
+ manager: z.enum(["npm", "pnpm", "yarn", "bun"]).optional()
44
+ }).optional(),
45
+ pipelines: z.record(z.string(), z.object({
46
+ description: z.string().optional(),
47
+ steps: z.array(z.string())
48
+ })).optional()
49
+ });
50
+
51
+ // src/declarative/parser.ts
52
+ var YAML_FILE = "polter.yaml";
53
+ function parseSimpleYaml(content) {
54
+ const lines = content.split("\n");
55
+ const result = {};
56
+ const stack = [
57
+ { obj: result, indent: -1 }
58
+ ];
59
+ for (let i = 0; i < lines.length; i++) {
60
+ const line = lines[i];
61
+ const trimmed = line.replace(/\s+$/, "");
62
+ if (!trimmed || trimmed.match(/^\s*#/)) continue;
63
+ const indent = line.search(/\S/);
64
+ if (indent < 0) continue;
65
+ while (stack.length > 1 && stack[stack.length - 1].indent >= indent) {
66
+ stack.pop();
67
+ }
68
+ const current = stack[stack.length - 1].obj;
69
+ const content_ = trimmed.trim();
70
+ if (content_.startsWith("- ")) {
71
+ const parentKey = Object.keys(current).pop();
72
+ if (parentKey) {
73
+ const arr = current[parentKey];
74
+ if (Array.isArray(arr)) {
75
+ const val = content_.slice(2).trim();
76
+ const colonIdx2 = val.indexOf(":");
77
+ if (colonIdx2 > 0 && !val.startsWith('"') && !val.startsWith("'")) {
78
+ const key = val.slice(0, colonIdx2).trim();
79
+ const rest = val.slice(colonIdx2 + 1).trim();
80
+ const obj = { [key]: parseValue(rest) };
81
+ arr.push(obj);
82
+ stack.push({ obj, indent: indent + 2 });
83
+ } else {
84
+ arr.push(parseValue(val));
85
+ }
86
+ }
87
+ }
88
+ continue;
89
+ }
90
+ const colonIdx = content_.indexOf(":");
91
+ if (colonIdx > 0) {
92
+ const key = content_.slice(0, colonIdx).trim();
93
+ const rest = content_.slice(colonIdx + 1).trim();
94
+ if (rest === "" || rest === "|" || rest === ">") {
95
+ const nextLine = lines[i + 1];
96
+ const nextTrimmed = nextLine?.trim();
97
+ if (nextTrimmed?.startsWith("- ")) {
98
+ current[key] = [];
99
+ } else {
100
+ const nested = {};
101
+ current[key] = nested;
102
+ stack.push({ obj: nested, indent });
103
+ }
104
+ } else {
105
+ current[key] = parseValue(rest);
106
+ }
107
+ }
108
+ }
109
+ return result;
110
+ }
111
+ function parseValue(raw) {
112
+ if (!raw) return "";
113
+ if (raw.startsWith('"') && raw.endsWith('"') || raw.startsWith("'") && raw.endsWith("'")) {
114
+ return raw.slice(1, -1);
115
+ }
116
+ if (raw === "true") return true;
117
+ if (raw === "false") return false;
118
+ const num = Number(raw);
119
+ if (!isNaN(num) && raw !== "") return num;
120
+ return raw;
121
+ }
122
+ function findPolterYaml(startDir = process.cwd()) {
123
+ const filePath = join(startDir, YAML_FILE);
124
+ return existsSync(filePath) ? filePath : void 0;
125
+ }
126
+ function parsePolterYaml(startDir = process.cwd()) {
127
+ const filePath = findPolterYaml(startDir);
128
+ if (!filePath) return void 0;
129
+ const content = readFileSync(filePath, "utf-8");
130
+ const raw = parseSimpleYaml(content);
131
+ const result = PolterYamlSchema.safeParse(raw);
132
+ return result.success ? result.data : void 0;
133
+ }
134
+
135
+ export {
136
+ findPolterYaml,
137
+ parsePolterYaml
138
+ };
@@ -0,0 +1,125 @@
1
+ import {
2
+ existsSync,
3
+ mkdirSync,
4
+ readFileSync,
5
+ writeFileSync
6
+ } from "./chunk-TD6YNU6L.js";
7
+
8
+ // src/lib/skillSetup.ts
9
+ import { join } from "path";
10
+ import { homedir } from "os";
11
+ import pc from "picocolors";
12
+ var SKILL_DIR = join(homedir(), ".claude", "skills", "polter");
13
+ var SKILL_PATH = join(SKILL_DIR, "SKILL.md");
14
+ var SKILL_CONTENT = `---
15
+ name: polter
16
+ description: "Polter - Infrastructure Orchestrator. Use when developing in any project to: monitor dev processes and check logs for errors after code changes, manage pipelines (multi-step command sequences), run CLI commands (Supabase, GitHub, Vercel, Git), manage packages (install, build, publish, audit), and apply declarative infrastructure from polter.yaml."
17
+ ---
18
+
19
+ # Polter Skill
20
+
21
+ You have access to Polter MCP tools for infrastructure orchestration. Use them proactively during development.
22
+
23
+ ## Process Monitoring
24
+
25
+ **Tools:** \`polter_ps\`, \`polter_logs\`, \`polter_start\`, \`polter_stop\`, \`polter_find_process\`, \`polter_smart_start\`, \`polter_run_script_bg\`
26
+
27
+ - At the start of a session, run \`polter_ps\` to check for active dev processes
28
+ - After significant code edits, check \`polter_logs\` for compilation or runtime errors
29
+ - Use \`polter_find_process\` to find processes running in the current directory
30
+ - Use \`polter_smart_start\` to start package.json scripts as background processes (e.g. dev servers)
31
+ - Use \`polter_run_script_bg\` to run arbitrary scripts in the background
32
+ - Use \`polter_stop\` to stop processes that are no longer needed
33
+
34
+ ## CLI Commands
35
+
36
+ **Tools:** \`polter_list_commands\`, \`polter_run_command\`, \`polter_status\`
37
+
38
+ - Execute commands for Supabase, GitHub CLI, Vercel, and Git via their command IDs
39
+ - Use \`polter_list_commands\` to discover available commands, optionally filtered by tool
40
+ - Use \`polter_run_command\` to execute a command by its ID with additional args/flags
41
+ - Check \`polter_status\` to see which CLI tools are installed and their versions
42
+
43
+ ## Pipelines
44
+
45
+ **Tools:** \`polter_list_pipelines\`, \`polter_run_pipeline\`, \`polter_create_pipeline\`, \`polter_update_pipeline\`, \`polter_delete_pipeline\`
46
+
47
+ - List and run saved multi-step command sequences with \`polter_list_pipelines\` and \`polter_run_pipeline\`
48
+ - Create new pipelines for repetitive workflows (e.g. build + test + deploy) with \`polter_create_pipeline\`
49
+ - Suggest creating pipelines when you notice the user repeating the same sequence of commands
50
+
51
+ ## Package Management
52
+
53
+ **Tools:** \`polter_pkg_build\`, \`polter_pkg_install\`, \`polter_pkg_publish\`, \`polter_pkg_run_script\`, \`polter_pkg_version_bump\`, \`polter_pkg_audit\`, \`polter_pkg_info\`
54
+
55
+ - Auto-detects the package manager (npm, pnpm, yarn, bun) from lockfiles
56
+ - Use \`polter_pkg_build\` for building, \`polter_pkg_install\` for installing dependencies
57
+ - Use \`polter_pkg_run_script\` to run package.json scripts
58
+ - Use \`polter_pkg_audit\` to check for vulnerabilities
59
+ - Use \`polter_pkg_version_bump\` before publishing, then \`polter_pkg_publish\`
60
+
61
+ ## Declarative Infrastructure
62
+
63
+ **Tools:** \`polter_plan\`, \`polter_apply\`
64
+
65
+ - Use \`polter_plan\` to read \`polter.yaml\` and compute a diff of desired vs current state
66
+ - Use \`polter_apply\` to execute the planned infrastructure changes
67
+ - Always run \`polter_plan\` first to review changes before applying
68
+
69
+ ## Workflow Recommendations
70
+
71
+ 1. **Starting a session:** Run \`polter_ps\` to see what's already running
72
+ 2. **After code changes:** Check \`polter_logs\` for errors in dev server output
73
+ 3. **Setting up a project:** Use \`polter_status\` to verify tools, then \`polter_smart_start\` for dev server
74
+ 4. **Deploying:** Create a pipeline with build + test + deploy steps
75
+ 5. **Infrastructure changes:** Edit \`polter.yaml\`, run \`polter_plan\`, then \`polter_apply\`
76
+ `;
77
+ function setupSkill() {
78
+ if (existsSync(SKILL_PATH)) {
79
+ const existing = readFileSync(SKILL_PATH, "utf-8");
80
+ if (existing === SKILL_CONTENT) {
81
+ return { status: "already-up-to-date", path: SKILL_PATH };
82
+ }
83
+ writeFileSync(SKILL_PATH, SKILL_CONTENT, "utf-8");
84
+ return { status: "updated", path: SKILL_PATH };
85
+ }
86
+ mkdirSync(SKILL_DIR, { recursive: true });
87
+ writeFileSync(SKILL_PATH, SKILL_CONTENT, "utf-8");
88
+ return { status: "created", path: SKILL_PATH };
89
+ }
90
+ function setupSkillCli() {
91
+ const result = setupSkill();
92
+ switch (result.status) {
93
+ case "created":
94
+ process.stdout.write(pc.green(`
95
+ Skill installed at ${result.path}
96
+ `));
97
+ process.stdout.write(pc.dim(" Use /polter in Claude Code to activate.\n\n"));
98
+ break;
99
+ case "updated":
100
+ process.stdout.write(pc.green(`
101
+ Skill updated at ${result.path}
102
+ `));
103
+ process.stdout.write(pc.dim(" Use /polter in Claude Code to activate.\n\n"));
104
+ break;
105
+ case "already-up-to-date":
106
+ process.stdout.write(pc.cyan(`
107
+ Skill already up to date at ${result.path}
108
+
109
+ `));
110
+ break;
111
+ }
112
+ }
113
+ function getSkillContent() {
114
+ return SKILL_CONTENT;
115
+ }
116
+ function getSkillPath() {
117
+ return SKILL_PATH;
118
+ }
119
+
120
+ export {
121
+ setupSkill,
122
+ setupSkillCli,
123
+ getSkillContent,
124
+ getSkillPath
125
+ };
@@ -0,0 +1,28 @@
1
+ import {
2
+ existsSync
3
+ } from "./chunk-TD6YNU6L.js";
4
+
5
+ // src/lib/packageRoot.ts
6
+ import { dirname, join, resolve } from "path";
7
+ var rootCache = /* @__PURE__ */ new Map();
8
+ function findNearestPackageRoot(startDir = process.cwd()) {
9
+ const resolvedStart = resolve(startDir);
10
+ if (rootCache.has(resolvedStart)) return rootCache.get(resolvedStart);
11
+ let currentDir = resolvedStart;
12
+ while (true) {
13
+ if (existsSync(join(currentDir, "package.json"))) {
14
+ rootCache.set(resolvedStart, currentDir);
15
+ return currentDir;
16
+ }
17
+ const parentDir = dirname(currentDir);
18
+ if (parentDir === currentDir) {
19
+ rootCache.set(resolvedStart, void 0);
20
+ return void 0;
21
+ }
22
+ currentDir = parentDir;
23
+ }
24
+ }
25
+
26
+ export {
27
+ findNearestPackageRoot
28
+ };
@@ -0,0 +1,15 @@
1
+ import {
2
+ allCommands,
3
+ findCommandByValue,
4
+ getCommandById,
5
+ getCommandValue,
6
+ getCommandsByTool
7
+ } from "./chunk-GVIKF6UI.js";
8
+ import "./chunk-3RG5ZIWI.js";
9
+ export {
10
+ allCommands,
11
+ findCommandByValue,
12
+ getCommandById,
13
+ getCommandValue,
14
+ getCommandsByTool
15
+ };
@@ -0,0 +1,11 @@
1
+ import {
2
+ isTerminalEditor,
3
+ openInEditor,
4
+ resolveEditor
5
+ } from "./chunk-KEGROLGX.js";
6
+ import "./chunk-3RG5ZIWI.js";
7
+ export {
8
+ isTerminalEditor,
9
+ openInEditor,
10
+ resolveEditor
11
+ };
@@ -0,0 +1,11 @@
1
+ import {
2
+ executePipeline
3
+ } from "./chunk-JQB2A3CA.js";
4
+ import "./chunk-GVIKF6UI.js";
5
+ import "./chunk-ELSIHPJL.js";
6
+ import "./chunk-RVMOIUSL.js";
7
+ import "./chunk-TD6YNU6L.js";
8
+ import "./chunk-3RG5ZIWI.js";
9
+ export {
10
+ executePipeline
11
+ };
@@ -0,0 +1,7 @@
1
+ import {
2
+ getConf
3
+ } from "./chunk-GKROVUDG.js";
4
+ import "./chunk-3RG5ZIWI.js";
5
+ export {
6
+ getConf
7
+ };