claude-setup 1.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +21 -0
- package/README.md +51 -0
- package/dist/builder.d.ts +24 -0
- package/dist/builder.js +259 -0
- package/dist/collect.d.ts +13 -0
- package/dist/collect.js +266 -0
- package/dist/commands/add.d.ts +1 -0
- package/dist/commands/add.js +49 -0
- package/dist/commands/doctor.d.ts +1 -0
- package/dist/commands/doctor.js +1 -0
- package/dist/commands/init.d.ts +1 -0
- package/dist/commands/init.js +42 -0
- package/dist/commands/remove.d.ts +1 -0
- package/dist/commands/remove.js +33 -0
- package/dist/commands/status.d.ts +1 -0
- package/dist/commands/status.js +23 -0
- package/dist/commands/sync.d.ts +1 -0
- package/dist/commands/sync.js +63 -0
- package/dist/doctor.d.ts +1 -0
- package/dist/doctor.js +72 -0
- package/dist/index.d.ts +2 -0
- package/dist/index.js +24 -0
- package/dist/manifest.d.ts +20 -0
- package/dist/manifest.js +84 -0
- package/dist/state.d.ts +24 -0
- package/dist/state.js +55 -0
- package/package.json +49 -0
- package/templates/add.md +64 -0
- package/templates/init-empty.md +53 -0
- package/templates/init.md +132 -0
- package/templates/remove.md +53 -0
- package/templates/sync.md +70 -0
package/LICENSE
ADDED
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
MIT License
|
|
2
|
+
|
|
3
|
+
Copyright (c) 2026 AbdoKnbGit
|
|
4
|
+
|
|
5
|
+
Permission is hereby granted, free of charge, to any person obtaining a copy
|
|
6
|
+
of this software and associated documentation files (the "Software"), to deal
|
|
7
|
+
in the Software without restriction, including without limitation the rights
|
|
8
|
+
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
|
9
|
+
copies of the Software, and to permit persons to whom the Software is
|
|
10
|
+
furnished to do so, subject to the following conditions:
|
|
11
|
+
|
|
12
|
+
The above copyright notice and this permission notice shall be included in all
|
|
13
|
+
copies or substantial portions of the Software.
|
|
14
|
+
|
|
15
|
+
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
|
16
|
+
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
|
17
|
+
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
|
18
|
+
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
|
19
|
+
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
|
20
|
+
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
|
21
|
+
SOFTWARE.
|
package/README.md
ADDED
|
@@ -0,0 +1,51 @@
|
|
|
1
|
+
# claude-setup
|
|
2
|
+
|
|
3
|
+
Setup layer for Claude Code. Reads your project, writes command files, Claude Code does the rest.
|
|
4
|
+
|
|
5
|
+
## Install
|
|
6
|
+
|
|
7
|
+
```bash
|
|
8
|
+
npx claude-setup init
|
|
9
|
+
```
|
|
10
|
+
|
|
11
|
+
## Commands
|
|
12
|
+
|
|
13
|
+
| Command | What it does |
|
|
14
|
+
|---------|-------------|
|
|
15
|
+
| `npx claude-setup init` | Full project setup — new or existing |
|
|
16
|
+
| `npx claude-setup add` | Add a multi-file capability |
|
|
17
|
+
| `npx claude-setup sync` | Update setup after project changes |
|
|
18
|
+
| `npx claude-setup status` | Show current setup |
|
|
19
|
+
| `npx claude-setup doctor` | Validate environment |
|
|
20
|
+
| `npx claude-setup remove` | Remove a capability cleanly |
|
|
21
|
+
|
|
22
|
+
## How it works
|
|
23
|
+
|
|
24
|
+
1. **CLI collects** — reads project files (configs, source samples) with strict cost controls
|
|
25
|
+
2. **CLI writes command files** — assembles markdown instructions into `.claude/commands/`
|
|
26
|
+
3. **Claude Code executes** — you run `/stack-init` (or `/stack-sync`, etc.) in Claude Code
|
|
27
|
+
|
|
28
|
+
The CLI has zero intelligence. All reasoning is delegated to Claude Code via the command files.
|
|
29
|
+
|
|
30
|
+
## Three project states
|
|
31
|
+
|
|
32
|
+
- **Empty project** — Claude Code asks 3 discovery questions, then sets up a tailored environment
|
|
33
|
+
- **In development** — reads existing files, writes setup that references actual code patterns
|
|
34
|
+
- **Production** — same as development; merge rules protect existing Claude config (append only, never rewrite)
|
|
35
|
+
|
|
36
|
+
## What it creates
|
|
37
|
+
|
|
38
|
+
- `CLAUDE.md` — project-specific context for Claude Code
|
|
39
|
+
- `.mcp.json` — MCP server connections (only if evidenced by project files)
|
|
40
|
+
- `.claude/settings.json` — hooks (only if warranted)
|
|
41
|
+
- `.claude/skills/` — reusable patterns (only if recurring)
|
|
42
|
+
- `.claude/commands/` — project-specific slash commands
|
|
43
|
+
- `.github/workflows/` — CI workflows (only if `.github/` exists)
|
|
44
|
+
|
|
45
|
+
## Cost controls
|
|
46
|
+
|
|
47
|
+
Every byte in a command file costs tokens. The CLI enforces:
|
|
48
|
+
- Source file sampling (max 10 files, smallest first)
|
|
49
|
+
- Hard truncation per file (150/400 line thresholds)
|
|
50
|
+
- Token budget cap (20,000 tokens max per command file)
|
|
51
|
+
- Universal blocklist (node_modules, dist, binaries, etc.)
|
|
@@ -0,0 +1,24 @@
|
|
|
1
|
+
import { CollectedFiles } from "./collect.js";
|
|
2
|
+
import { ExistingState } from "./state.js";
|
|
3
|
+
export declare function buildInitCommand(collected: CollectedFiles, state: ExistingState): string;
|
|
4
|
+
export declare function buildEmptyProjectCommand(): string;
|
|
5
|
+
export declare function buildAddCommand(input: string, collected: CollectedFiles, state: ExistingState): string;
|
|
6
|
+
export interface FileDiff {
|
|
7
|
+
added: Array<{
|
|
8
|
+
path: string;
|
|
9
|
+
content: string;
|
|
10
|
+
}>;
|
|
11
|
+
changed: Array<{
|
|
12
|
+
path: string;
|
|
13
|
+
current: string;
|
|
14
|
+
}>;
|
|
15
|
+
deleted: string[];
|
|
16
|
+
}
|
|
17
|
+
export declare function buildSyncCommand(diff: FileDiff, collected: CollectedFiles, state: ExistingState): string;
|
|
18
|
+
export declare function buildRemoveCommand(input: string, state: ExistingState): string;
|
|
19
|
+
export interface AtomicStep {
|
|
20
|
+
filename: string;
|
|
21
|
+
content: string;
|
|
22
|
+
}
|
|
23
|
+
export declare function buildAtomicSteps(collected: CollectedFiles, state: ExistingState): AtomicStep[];
|
|
24
|
+
export declare function buildOrchestratorCommand(steps: AtomicStep[]): string;
|
package/dist/builder.js
ADDED
|
@@ -0,0 +1,259 @@
|
|
|
1
|
+
import { readFileSync } from "fs";
|
|
2
|
+
import { join, dirname } from "path";
|
|
3
|
+
import { fileURLToPath } from "url";
|
|
4
|
+
const __dirname = dirname(fileURLToPath(import.meta.url));
|
|
5
|
+
const TEMPLATES_DIR = join(__dirname, "..", "templates");
|
|
6
|
+
const TOKEN_SOFT_WARN = 8_000;
|
|
7
|
+
const TOKEN_HARD_CAP = 20_000;
|
|
8
|
+
function estimateTokens(content) {
|
|
9
|
+
return Math.ceil(content.length / 4);
|
|
10
|
+
}
|
|
11
|
+
function loadTemplate(name) {
|
|
12
|
+
return readFileSync(join(TEMPLATES_DIR, name), "utf8");
|
|
13
|
+
}
|
|
14
|
+
// Simple {{VARIABLE}} replacement
|
|
15
|
+
function replaceVars(template, vars) {
|
|
16
|
+
let result = template;
|
|
17
|
+
for (const [key, value] of Object.entries(vars)) {
|
|
18
|
+
result = result.replaceAll(`{{${key}}}`, value);
|
|
19
|
+
}
|
|
20
|
+
return result;
|
|
21
|
+
}
|
|
22
|
+
// Conditional blocks: {{#if VAR}}...{{else}}...{{/if}} and {{#if VAR}}...{{/if}}
|
|
23
|
+
function processConditionals(template, flags) {
|
|
24
|
+
// Handle {{#if VAR}}...{{else}}...{{/if}}
|
|
25
|
+
let result = template;
|
|
26
|
+
const ifElseRegex = /\{\{#if\s+(\w+)\}\}\n?([\s\S]*?)\{\{else\}\}\n?([\s\S]*?)\{\{\/if\}\}/g;
|
|
27
|
+
result = result.replace(ifElseRegex, (_match, key, ifBlock, elseBlock) => {
|
|
28
|
+
return flags[key] ? ifBlock : elseBlock;
|
|
29
|
+
});
|
|
30
|
+
// Handle {{#if VAR}}...{{/if}} (no else)
|
|
31
|
+
const ifRegex = /\{\{#if\s+(\w+)\}\}\n?([\s\S]*?)\{\{\/if\}\}/g;
|
|
32
|
+
result = result.replace(ifRegex, (_match, key, block) => {
|
|
33
|
+
return flags[key] ? block : "";
|
|
34
|
+
});
|
|
35
|
+
return result;
|
|
36
|
+
}
|
|
37
|
+
function formatConfigFiles(configs) {
|
|
38
|
+
if (Object.keys(configs).length === 0)
|
|
39
|
+
return "(no config files found)";
|
|
40
|
+
return Object.entries(configs)
|
|
41
|
+
.map(([path, content]) => {
|
|
42
|
+
return `#### ${path}\n\`\`\`\n${content}\n\`\`\``;
|
|
43
|
+
})
|
|
44
|
+
.join("\n\n");
|
|
45
|
+
}
|
|
46
|
+
function formatSourceFiles(source) {
|
|
47
|
+
if (source.length === 0)
|
|
48
|
+
return "(no source files sampled)";
|
|
49
|
+
return source
|
|
50
|
+
.map(({ path, content }) => {
|
|
51
|
+
return `#### ${path}\n\`\`\`\n${content}\n\`\`\``;
|
|
52
|
+
})
|
|
53
|
+
.join("\n\n");
|
|
54
|
+
}
|
|
55
|
+
function formatSkippedFiles(skipped) {
|
|
56
|
+
return skipped.map(({ path, reason }) => `- ${path} — ${reason}`).join("\n");
|
|
57
|
+
}
|
|
58
|
+
function formatList(items) {
|
|
59
|
+
if (items.length === 0)
|
|
60
|
+
return "none";
|
|
61
|
+
return items.join(", ");
|
|
62
|
+
}
|
|
63
|
+
function buildVars(collected, state) {
|
|
64
|
+
const version = getVersion();
|
|
65
|
+
const date = new Date().toISOString().split("T")[0];
|
|
66
|
+
return {
|
|
67
|
+
VERSION: version,
|
|
68
|
+
DATE: date,
|
|
69
|
+
CONFIG_FILES: formatConfigFiles(collected.configs),
|
|
70
|
+
SOURCE_FILES: formatSourceFiles(collected.source),
|
|
71
|
+
SKIPPED_LIST: formatSkippedFiles(collected.skipped),
|
|
72
|
+
CLAUDE_MD_CONTENT: state.claudeMd.content
|
|
73
|
+
? `\`\`\`\n${state.claudeMd.content}\n\`\`\``
|
|
74
|
+
: "",
|
|
75
|
+
MCP_JSON_CONTENT: state.mcpJson.content
|
|
76
|
+
? `\`\`\`json\n${state.mcpJson.content}\n\`\`\``
|
|
77
|
+
: "",
|
|
78
|
+
SETTINGS_CONTENT: state.settings.content
|
|
79
|
+
? `\`\`\`json\n${state.settings.content}\n\`\`\``
|
|
80
|
+
: "",
|
|
81
|
+
SKILLS_LIST: formatList(state.skills),
|
|
82
|
+
COMMANDS_LIST: formatList(state.commands),
|
|
83
|
+
WORKFLOWS_LIST: formatList(state.workflows),
|
|
84
|
+
HAS_GITHUB_DIR: state.hasGithubDir ? "yes" : "no",
|
|
85
|
+
};
|
|
86
|
+
}
|
|
87
|
+
function buildFlags(collected, state) {
|
|
88
|
+
return {
|
|
89
|
+
HAS_SKIPPED: collected.skipped.length > 0,
|
|
90
|
+
HAS_CLAUDE_MD: state.claudeMd.exists,
|
|
91
|
+
HAS_MCP_JSON: state.mcpJson.exists,
|
|
92
|
+
HAS_SETTINGS: state.settings.exists,
|
|
93
|
+
HAS_GITHUB_DIR: state.hasGithubDir,
|
|
94
|
+
};
|
|
95
|
+
}
|
|
96
|
+
function getVersion() {
|
|
97
|
+
try {
|
|
98
|
+
const pkgPath = join(__dirname, "..", "package.json");
|
|
99
|
+
const pkg = JSON.parse(readFileSync(pkgPath, "utf8"));
|
|
100
|
+
return pkg.version ?? "0.0.0";
|
|
101
|
+
}
|
|
102
|
+
catch {
|
|
103
|
+
return "0.0.0";
|
|
104
|
+
}
|
|
105
|
+
}
|
|
106
|
+
function fitToTokenBudget(content, sources) {
|
|
107
|
+
if (estimateTokens(content) <= TOKEN_HARD_CAP)
|
|
108
|
+
return content;
|
|
109
|
+
// Progressively remove source files, largest first
|
|
110
|
+
const sorted = [...sources].sort((a, b) => b.content.length - a.content.length);
|
|
111
|
+
for (const remove of sorted) {
|
|
112
|
+
const sourceBlock = `#### ${remove.path}\n\`\`\`\n${remove.content}\n\`\`\``;
|
|
113
|
+
content = content.replace(sourceBlock, `[${remove.path} — removed to fit token budget]`);
|
|
114
|
+
if (estimateTokens(content) <= TOKEN_HARD_CAP)
|
|
115
|
+
break;
|
|
116
|
+
}
|
|
117
|
+
return content;
|
|
118
|
+
}
|
|
119
|
+
function applyTemplate(templateName, collected, state, extraVars = {}) {
|
|
120
|
+
const template = loadTemplate(templateName);
|
|
121
|
+
const vars = { ...buildVars(collected, state), ...extraVars };
|
|
122
|
+
const flags = buildFlags(collected, state);
|
|
123
|
+
let content = replaceVars(template, vars);
|
|
124
|
+
content = processConditionals(content, flags);
|
|
125
|
+
const tokens = estimateTokens(content);
|
|
126
|
+
if (tokens > TOKEN_SOFT_WARN) {
|
|
127
|
+
console.warn(`⚠️ Command file is ${tokens} tokens (soft limit: ${TOKEN_SOFT_WARN})`);
|
|
128
|
+
}
|
|
129
|
+
content = fitToTokenBudget(content, collected.source);
|
|
130
|
+
return content;
|
|
131
|
+
}
|
|
132
|
+
export function buildInitCommand(collected, state) {
|
|
133
|
+
return applyTemplate("init.md", collected, state);
|
|
134
|
+
}
|
|
135
|
+
export function buildEmptyProjectCommand() {
|
|
136
|
+
const template = loadTemplate("init-empty.md");
|
|
137
|
+
const version = getVersion();
|
|
138
|
+
const date = new Date().toISOString().split("T")[0];
|
|
139
|
+
return replaceVars(template, { VERSION: version, DATE: date });
|
|
140
|
+
}
|
|
141
|
+
export function buildAddCommand(input, collected, state) {
|
|
142
|
+
return applyTemplate("add.md", collected, state, { USER_INPUT: input });
|
|
143
|
+
}
|
|
144
|
+
export function buildSyncCommand(diff, collected, state) {
|
|
145
|
+
const lastRun = state.manifest?.runs.at(-1);
|
|
146
|
+
const addedStr = diff.added.length > 0
|
|
147
|
+
? diff.added.map(f => `#### ${f.path}\n\`\`\`\n${f.content}\n\`\`\``).join("\n\n")
|
|
148
|
+
: "(none)";
|
|
149
|
+
const modifiedStr = diff.changed.length > 0
|
|
150
|
+
? diff.changed.map(f => `#### ${f.path}\n\`\`\`\n${f.current}\n\`\`\``).join("\n\n")
|
|
151
|
+
: "(none)";
|
|
152
|
+
const deletedStr = diff.deleted.length > 0
|
|
153
|
+
? diff.deleted.map(f => `- ${f}`).join("\n")
|
|
154
|
+
: "(none)";
|
|
155
|
+
return applyTemplate("sync.md", collected, state, {
|
|
156
|
+
LAST_RUN_DATE: lastRun?.at ?? "unknown",
|
|
157
|
+
ADDED_FILES: addedStr,
|
|
158
|
+
MODIFIED_FILES: modifiedStr,
|
|
159
|
+
DELETED_FILES: deletedStr,
|
|
160
|
+
});
|
|
161
|
+
}
|
|
162
|
+
export function buildRemoveCommand(input, state) {
|
|
163
|
+
// Remove uses a minimal collected set — no project files needed
|
|
164
|
+
const emptyCollected = { configs: {}, source: [], skipped: [] };
|
|
165
|
+
return applyTemplate("remove.md", emptyCollected, state, { USER_INPUT: input });
|
|
166
|
+
}
|
|
167
|
+
export function buildAtomicSteps(collected, state) {
|
|
168
|
+
const fullContent = buildInitCommand(collected, state);
|
|
169
|
+
const vars = buildVars(collected, state);
|
|
170
|
+
const flags = buildFlags(collected, state);
|
|
171
|
+
const version = getVersion();
|
|
172
|
+
const date = new Date().toISOString().split("T")[0];
|
|
173
|
+
const preamble = `<!-- Generated by claude-setup ${version} on ${date} — DO NOT hand-edit -->\n`;
|
|
174
|
+
const idempotentCheck = `\nBefore writing: check if what you are about to write already exists in the target file\n(current content provided below). If yes: print "SKIPPED — already up to date" and stop.\nWrite only what is genuinely missing.\n\n`;
|
|
175
|
+
// Each step gets project context + specific instructions for its target
|
|
176
|
+
const steps = [
|
|
177
|
+
{
|
|
178
|
+
filename: "stack-1-claude-md.md",
|
|
179
|
+
content: preamble + idempotentCheck +
|
|
180
|
+
`## Project context\n\n${vars.CONFIG_FILES}\n\n${vars.SOURCE_FILES}\n\n` +
|
|
181
|
+
`## Target: CLAUDE.md\n\n` +
|
|
182
|
+
(state.claudeMd.exists
|
|
183
|
+
? `### Current CLAUDE.md — EXISTS — append only, never rewrite, never remove\n${vars.CLAUDE_MD_CONTENT}\n\n`
|
|
184
|
+
: `CLAUDE.md does not exist. Create it.\n\n`) +
|
|
185
|
+
`Write or update CLAUDE.md for THIS specific project.\nMake it specific: reference actual file paths, actual patterns, actual conventions from the source above.\nNo generic boilerplate. Every line must trace back to something in the project files.\n` +
|
|
186
|
+
(state.claudeMd.exists ? `\nAppend only — never rewrite or remove existing content.` : ""),
|
|
187
|
+
},
|
|
188
|
+
{
|
|
189
|
+
filename: "stack-2-mcp.md",
|
|
190
|
+
content: preamble + idempotentCheck +
|
|
191
|
+
`## Project context\n\n${vars.CONFIG_FILES}\n\n` +
|
|
192
|
+
`## Target: .mcp.json\n\n` +
|
|
193
|
+
(state.mcpJson.exists
|
|
194
|
+
? `### Current .mcp.json — EXISTS — merge only, never remove existing entries\n${vars.MCP_JSON_CONTENT}\n\n`
|
|
195
|
+
: `.mcp.json does not exist. Create only if you find evidence of external services in the config files above.\n\n`) +
|
|
196
|
+
`Only add MCP servers for services evidenced in the project files. No evidence = no server.\n` +
|
|
197
|
+
(state.mcpJson.exists ? `Merge only — never remove existing entries. Produce valid JSON.` : ""),
|
|
198
|
+
},
|
|
199
|
+
{
|
|
200
|
+
filename: "stack-3-settings.md",
|
|
201
|
+
content: preamble + idempotentCheck +
|
|
202
|
+
`## Project context\n\n${vars.CONFIG_FILES}\n\n` +
|
|
203
|
+
`## Target: .claude/settings.json\n\n` +
|
|
204
|
+
(state.settings.exists
|
|
205
|
+
? `### Current settings.json — EXISTS — merge only, never remove existing hooks\n${vars.SETTINGS_CONTENT}\n\n`
|
|
206
|
+
: `.claude/settings.json does not exist. Create only if hooks are genuinely warranted for this project.\n\n`) +
|
|
207
|
+
`Every hook adds overhead on every Claude Code action. Only add if clearly earned for THIS project.\n` +
|
|
208
|
+
(state.settings.exists ? `Merge only — never remove existing hooks. Never modify existing values.` : ""),
|
|
209
|
+
},
|
|
210
|
+
{
|
|
211
|
+
filename: "stack-4-skills.md",
|
|
212
|
+
content: preamble + idempotentCheck +
|
|
213
|
+
`## Project context\n\n${vars.CONFIG_FILES}\n\n${vars.SOURCE_FILES}\n\n` +
|
|
214
|
+
`## Target: .claude/skills/\n\n` +
|
|
215
|
+
`Skills installed: ${vars.SKILLS_LIST}\n\n` +
|
|
216
|
+
`Only create skills for patterns that recur across this codebase and benefit from automatic loading.\n` +
|
|
217
|
+
`Use applies-when frontmatter so skills load only when relevant.\n` +
|
|
218
|
+
`If a similar skill already exists: extend it. Do not create a parallel one.\n` +
|
|
219
|
+
`Empty is fine — not every project needs skills.`,
|
|
220
|
+
},
|
|
221
|
+
{
|
|
222
|
+
filename: "stack-5-commands.md",
|
|
223
|
+
content: preamble + idempotentCheck +
|
|
224
|
+
`## Project context\n\n${vars.CONFIG_FILES}\n\n` +
|
|
225
|
+
`## Target: .claude/commands/ (not stack-*.md files)\n\n` +
|
|
226
|
+
`Commands installed: ${vars.COMMANDS_LIST}\n\n` +
|
|
227
|
+
`Only create commands that will actually be useful for this kind of project.\n` +
|
|
228
|
+
`Do not duplicate existing commands. Do not create stack-*.md files.`,
|
|
229
|
+
},
|
|
230
|
+
{
|
|
231
|
+
filename: "stack-6-workflows.md",
|
|
232
|
+
content: preamble + idempotentCheck +
|
|
233
|
+
`## Target: .github/workflows/\n\n` +
|
|
234
|
+
`.github/ exists: ${vars.HAS_GITHUB_DIR}\n` +
|
|
235
|
+
`Workflows installed: ${vars.WORKFLOWS_LIST}\n\n` +
|
|
236
|
+
(state.hasGithubDir
|
|
237
|
+
? `Only create workflows warranted by the project. If workflows already exist: do not touch them.`
|
|
238
|
+
: `.github/ does not exist. Only create workflows if the project clearly warrants them.`),
|
|
239
|
+
},
|
|
240
|
+
];
|
|
241
|
+
return steps;
|
|
242
|
+
}
|
|
243
|
+
export function buildOrchestratorCommand(steps) {
|
|
244
|
+
const version = getVersion();
|
|
245
|
+
const date = new Date().toISOString().split("T")[0];
|
|
246
|
+
const stepList = steps
|
|
247
|
+
.map((s, i) => `${i + 1}. /${s.filename.replace(".md", "")}`)
|
|
248
|
+
.join("\n");
|
|
249
|
+
return `<!-- Generated by claude-setup ${version} on ${date} — DO NOT hand-edit -->
|
|
250
|
+
<!-- Run /stack-init in Claude Code -->
|
|
251
|
+
|
|
252
|
+
Run these in order. If one fails, fix it and continue from that step only.
|
|
253
|
+
Do not re-run steps that already completed.
|
|
254
|
+
|
|
255
|
+
${stepList}
|
|
256
|
+
|
|
257
|
+
After all steps complete: one-line summary of what was created.
|
|
258
|
+
`;
|
|
259
|
+
}
|
|
@@ -0,0 +1,13 @@
|
|
|
1
|
+
export interface CollectedFiles {
|
|
2
|
+
configs: Record<string, string>;
|
|
3
|
+
source: Array<{
|
|
4
|
+
path: string;
|
|
5
|
+
content: string;
|
|
6
|
+
}>;
|
|
7
|
+
skipped: Array<{
|
|
8
|
+
path: string;
|
|
9
|
+
reason: string;
|
|
10
|
+
}>;
|
|
11
|
+
}
|
|
12
|
+
export declare function collectProjectFiles(cwd?: string): Promise<CollectedFiles>;
|
|
13
|
+
export declare function isEmptyProject(collected: CollectedFiles): boolean;
|
package/dist/collect.js
ADDED
|
@@ -0,0 +1,266 @@
|
|
|
1
|
+
import { readFileSync, existsSync, statSync } from "fs";
|
|
2
|
+
import { glob } from "glob";
|
|
3
|
+
import { join, extname, basename, dirname } from "path";
|
|
4
|
+
// Universal blocklist — always exclude these patterns
|
|
5
|
+
const BLOCKED_DIRS = new Set([
|
|
6
|
+
"node_modules", "vendor", ".venv", "venv", "env", "__pypackages__",
|
|
7
|
+
"dist", "build", "out", ".next", ".nuxt", ".svelte-kit", "__pycache__",
|
|
8
|
+
"target", ".git", ".cache", "coverage", ".nyc_output", ".pytest_cache",
|
|
9
|
+
".tox", "htmlcov", ".ruff_cache", "logs",
|
|
10
|
+
]);
|
|
11
|
+
const BLOCKED_EXTENSIONS = new Set([
|
|
12
|
+
".lock", ".pyc", ".pyo", ".class", ".o", ".a", ".so", ".dylib", ".dll",
|
|
13
|
+
".exe", ".wasm", ".min.js", ".min.css", ".bundle.js", ".chunk.js", ".map",
|
|
14
|
+
".png", ".jpg", ".jpeg", ".gif", ".webp", ".avif", ".ico",
|
|
15
|
+
".woff", ".woff2", ".ttf", ".eot", ".otf",
|
|
16
|
+
".mp4", ".mp3", ".wav", ".ogg", ".webm", ".pdf",
|
|
17
|
+
".zip", ".tar", ".gz", ".bz2", ".rar", ".7z", ".dmg", ".pkg", ".deb", ".rpm",
|
|
18
|
+
".sqlite3", ".sqlite", ".db", ".csv", ".parquet", ".arrow",
|
|
19
|
+
".pkl", ".pickle", ".npy", ".npz",
|
|
20
|
+
".log", ".swp", ".swo",
|
|
21
|
+
]);
|
|
22
|
+
const BLOCKED_FILES = new Set([
|
|
23
|
+
"go.sum", "poetry.lock", "Pipfile.lock", "composer.lock",
|
|
24
|
+
".DS_Store", "Thumbs.db",
|
|
25
|
+
]);
|
|
26
|
+
// Config files to read at root (or one level deep)
|
|
27
|
+
const CONFIG_FILES = [
|
|
28
|
+
{ pattern: "package.json" },
|
|
29
|
+
{ pattern: "package-lock.json", truncate: truncatePackageLock },
|
|
30
|
+
{ pattern: "pyproject.toml" },
|
|
31
|
+
{ pattern: "setup.py", truncate: (c) => firstLines(c, 60) },
|
|
32
|
+
{ pattern: "requirements.txt" },
|
|
33
|
+
{ pattern: "Pipfile" },
|
|
34
|
+
{ pattern: "go.mod" },
|
|
35
|
+
{ pattern: "Cargo.toml" },
|
|
36
|
+
{ pattern: "pom.xml", truncate: (c) => firstLines(c, 80) },
|
|
37
|
+
{ pattern: "build.gradle", truncate: (c) => firstLines(c, 80) },
|
|
38
|
+
{ pattern: "build.gradle.kts", truncate: (c) => firstLines(c, 80) },
|
|
39
|
+
{ pattern: "composer.json" },
|
|
40
|
+
{ pattern: "Gemfile" },
|
|
41
|
+
{ pattern: "turbo.json" },
|
|
42
|
+
{ pattern: "nx.json" },
|
|
43
|
+
{ pattern: "pnpm-workspace.yaml" },
|
|
44
|
+
{ pattern: "lerna.json" },
|
|
45
|
+
{ pattern: ".env.example" },
|
|
46
|
+
{ pattern: ".env.sample" },
|
|
47
|
+
{ pattern: ".env.template" },
|
|
48
|
+
{ pattern: "docker-compose.yml", truncate: (c) => c.length > 8000 ? firstLines(c, 100) + "\n[... truncated]" : c },
|
|
49
|
+
{ pattern: "docker-compose.yaml", truncate: (c) => c.length > 8000 ? firstLines(c, 100) + "\n[... truncated]" : c },
|
|
50
|
+
{ pattern: "Dockerfile", truncate: (c) => firstLines(c, 50) },
|
|
51
|
+
];
|
|
52
|
+
const SOURCE_EXTENSIONS = new Set([
|
|
53
|
+
".ts", ".tsx", ".js", ".jsx", ".mjs", ".cjs",
|
|
54
|
+
".py", ".go", ".rs", ".java", ".kt", ".scala",
|
|
55
|
+
".rb", ".php", ".cs", ".swift", ".c", ".cpp", ".h",
|
|
56
|
+
".vue", ".svelte", ".astro",
|
|
57
|
+
]);
|
|
58
|
+
const ENTRY_BASENAMES = new Set([
|
|
59
|
+
"index", "main", "app", "server", "cmd", "cli", "mod", "run",
|
|
60
|
+
]);
|
|
61
|
+
const ENTRY_DIRS = [".", "src", "app", "cmd", "bin"];
|
|
62
|
+
const PRIMARY_SOURCE_DIRS = [
|
|
63
|
+
"src", "app", "lib", "core", "pkg", "internal", "api", "cmd",
|
|
64
|
+
];
|
|
65
|
+
const MAX_SOURCE_FILES = 10;
|
|
66
|
+
const MAX_FILE_BYTES = 80_000;
|
|
67
|
+
export async function collectProjectFiles(cwd = process.cwd()) {
|
|
68
|
+
const configs = {};
|
|
69
|
+
const source = [];
|
|
70
|
+
const skipped = [];
|
|
71
|
+
// Collect config files
|
|
72
|
+
for (const cfg of CONFIG_FILES) {
|
|
73
|
+
const filePath = join(cwd, cfg.pattern);
|
|
74
|
+
if (existsSync(filePath)) {
|
|
75
|
+
try {
|
|
76
|
+
let content = readFileSync(filePath, "utf8");
|
|
77
|
+
if (cfg.truncate)
|
|
78
|
+
content = cfg.truncate(content);
|
|
79
|
+
configs[cfg.pattern] = content;
|
|
80
|
+
}
|
|
81
|
+
catch {
|
|
82
|
+
skipped.push({ path: cfg.pattern, reason: "could not read" });
|
|
83
|
+
}
|
|
84
|
+
}
|
|
85
|
+
}
|
|
86
|
+
// Check for .env (note existence but never read)
|
|
87
|
+
if (existsSync(join(cwd, ".env")) && !configs[".env"]) {
|
|
88
|
+
configs[".env"] = "[.env exists — not read for security]";
|
|
89
|
+
}
|
|
90
|
+
// Root-level *.config.{js,ts,mjs} files
|
|
91
|
+
for (const ext of ["js", "ts", "mjs"]) {
|
|
92
|
+
try {
|
|
93
|
+
const matches = await glob(`*.config.${ext}`, { cwd, nodir: true });
|
|
94
|
+
for (const m of matches) {
|
|
95
|
+
const filePath = join(cwd, m);
|
|
96
|
+
try {
|
|
97
|
+
const content = readFileSync(filePath, "utf8");
|
|
98
|
+
configs[m] = firstLines(content, 100);
|
|
99
|
+
}
|
|
100
|
+
catch {
|
|
101
|
+
skipped.push({ path: m, reason: "could not read" });
|
|
102
|
+
}
|
|
103
|
+
}
|
|
104
|
+
}
|
|
105
|
+
catch { /* glob error — skip */ }
|
|
106
|
+
}
|
|
107
|
+
// Root-level *.csproj files
|
|
108
|
+
try {
|
|
109
|
+
const csprojMatches = await glob("*.csproj", { cwd, nodir: true });
|
|
110
|
+
for (const m of csprojMatches) {
|
|
111
|
+
try {
|
|
112
|
+
configs[m] = readFileSync(join(cwd, m), "utf8");
|
|
113
|
+
}
|
|
114
|
+
catch {
|
|
115
|
+
skipped.push({ path: m, reason: "could not read" });
|
|
116
|
+
}
|
|
117
|
+
}
|
|
118
|
+
}
|
|
119
|
+
catch { /* skip */ }
|
|
120
|
+
// Collect source files — max 10, cost-aware
|
|
121
|
+
const allSourceFiles = await findSourceFiles(cwd);
|
|
122
|
+
// Step 1: entry points
|
|
123
|
+
const entries = [];
|
|
124
|
+
for (const dir of ENTRY_DIRS) {
|
|
125
|
+
for (const file of allSourceFiles) {
|
|
126
|
+
const base = basename(file, extname(file));
|
|
127
|
+
const fileDir = dirname(file);
|
|
128
|
+
if (ENTRY_BASENAMES.has(base) && (fileDir === dir || fileDir === ".")) {
|
|
129
|
+
if (!entries.includes(file))
|
|
130
|
+
entries.push(file);
|
|
131
|
+
if (entries.length >= 3)
|
|
132
|
+
break;
|
|
133
|
+
}
|
|
134
|
+
}
|
|
135
|
+
if (entries.length >= 3)
|
|
136
|
+
break;
|
|
137
|
+
}
|
|
138
|
+
// Step 2: breadth sample from primary source directory
|
|
139
|
+
let primaryDir = "";
|
|
140
|
+
let maxCount = 0;
|
|
141
|
+
for (const dir of PRIMARY_SOURCE_DIRS) {
|
|
142
|
+
const count = allSourceFiles.filter(f => f.startsWith(dir + "/")).length;
|
|
143
|
+
if (count > maxCount) {
|
|
144
|
+
maxCount = count;
|
|
145
|
+
primaryDir = dir;
|
|
146
|
+
}
|
|
147
|
+
}
|
|
148
|
+
const breadthFiles = [];
|
|
149
|
+
if (primaryDir) {
|
|
150
|
+
const dirFiles = allSourceFiles
|
|
151
|
+
.filter(f => f.startsWith(primaryDir + "/"))
|
|
152
|
+
.filter(f => !entries.includes(f));
|
|
153
|
+
// Sort smallest first — highest signal-to-size ratio
|
|
154
|
+
const withSize = dirFiles.map(f => {
|
|
155
|
+
try {
|
|
156
|
+
return { path: f, size: statSync(join(cwd, f)).size };
|
|
157
|
+
}
|
|
158
|
+
catch {
|
|
159
|
+
return { path: f, size: Infinity };
|
|
160
|
+
}
|
|
161
|
+
}).sort((a, b) => a.size - b.size);
|
|
162
|
+
for (const { path: p } of withSize) {
|
|
163
|
+
if (breadthFiles.length >= 5)
|
|
164
|
+
break;
|
|
165
|
+
breadthFiles.push(p);
|
|
166
|
+
}
|
|
167
|
+
}
|
|
168
|
+
// Step 3: fill remaining from other top-level dirs
|
|
169
|
+
const selected = [...entries, ...breadthFiles];
|
|
170
|
+
const remaining = allSourceFiles.filter(f => !selected.includes(f));
|
|
171
|
+
for (const f of remaining) {
|
|
172
|
+
if (selected.length >= MAX_SOURCE_FILES)
|
|
173
|
+
break;
|
|
174
|
+
selected.push(f);
|
|
175
|
+
}
|
|
176
|
+
// Step 4: read and truncate
|
|
177
|
+
for (const filePath of selected) {
|
|
178
|
+
const fullPath = join(cwd, filePath);
|
|
179
|
+
try {
|
|
180
|
+
const stat = statSync(fullPath);
|
|
181
|
+
if (stat.size > MAX_FILE_BYTES) {
|
|
182
|
+
skipped.push({ path: filePath, reason: `too large (${(stat.size / 1024).toFixed(0)}KB)` });
|
|
183
|
+
continue;
|
|
184
|
+
}
|
|
185
|
+
const raw = readFileSync(fullPath, "utf8");
|
|
186
|
+
source.push({ path: filePath, content: truncateSource(raw, filePath) });
|
|
187
|
+
}
|
|
188
|
+
catch {
|
|
189
|
+
skipped.push({ path: filePath, reason: "could not read" });
|
|
190
|
+
}
|
|
191
|
+
}
|
|
192
|
+
return { configs, source, skipped };
|
|
193
|
+
}
|
|
194
|
+
async function findSourceFiles(cwd) {
|
|
195
|
+
try {
|
|
196
|
+
const files = await glob("**/*", {
|
|
197
|
+
cwd,
|
|
198
|
+
nodir: true,
|
|
199
|
+
ignore: [...BLOCKED_DIRS].map(d => `${d}/**`),
|
|
200
|
+
maxDepth: 5,
|
|
201
|
+
});
|
|
202
|
+
return files
|
|
203
|
+
.filter(f => {
|
|
204
|
+
const ext = extname(f);
|
|
205
|
+
const base = basename(f);
|
|
206
|
+
if (BLOCKED_FILES.has(base))
|
|
207
|
+
return false;
|
|
208
|
+
if (BLOCKED_EXTENSIONS.has(ext))
|
|
209
|
+
return false;
|
|
210
|
+
if (SOURCE_EXTENSIONS.has(ext))
|
|
211
|
+
return true;
|
|
212
|
+
return false;
|
|
213
|
+
})
|
|
214
|
+
.map(f => f.replace(/\\/g, "/"));
|
|
215
|
+
}
|
|
216
|
+
catch {
|
|
217
|
+
return [];
|
|
218
|
+
}
|
|
219
|
+
}
|
|
220
|
+
function truncateSource(content, path) {
|
|
221
|
+
const lines = content.split("\n");
|
|
222
|
+
const total = lines.length;
|
|
223
|
+
if (total <= 150)
|
|
224
|
+
return content;
|
|
225
|
+
if (total <= 400) {
|
|
226
|
+
return lines.slice(0, 100).join("\n") + `\n[... ${total - 100} more lines truncated]`;
|
|
227
|
+
}
|
|
228
|
+
return lines.slice(0, 60).join("\n") + `\n[... truncated — ${total} lines total]`;
|
|
229
|
+
}
|
|
230
|
+
function firstLines(content, n) {
|
|
231
|
+
const lines = content.split("\n");
|
|
232
|
+
if (lines.length <= n)
|
|
233
|
+
return content;
|
|
234
|
+
return lines.slice(0, n).join("\n") + `\n[... ${lines.length - n} more lines]`;
|
|
235
|
+
}
|
|
236
|
+
function truncatePackageLock(content) {
|
|
237
|
+
try {
|
|
238
|
+
const pkg = JSON.parse(content);
|
|
239
|
+
return JSON.stringify({
|
|
240
|
+
name: pkg.name,
|
|
241
|
+
version: pkg.version,
|
|
242
|
+
lockfileVersion: pkg.lockfileVersion,
|
|
243
|
+
}, null, 2);
|
|
244
|
+
}
|
|
245
|
+
catch {
|
|
246
|
+
return "[package-lock.json: could not parse]";
|
|
247
|
+
}
|
|
248
|
+
}
|
|
249
|
+
export function isEmptyProject(collected) {
|
|
250
|
+
const hasSource = collected.source.length > 0;
|
|
251
|
+
const hasOnlyBarePackageJson = Object.keys(collected.configs).length === 1 &&
|
|
252
|
+
"package.json" in collected.configs &&
|
|
253
|
+
isBarePkgJson(collected.configs["package.json"]);
|
|
254
|
+
const hasAnyConfig = Object.keys(collected.configs).length > 0;
|
|
255
|
+
return !hasSource && (!hasAnyConfig || hasOnlyBarePackageJson);
|
|
256
|
+
}
|
|
257
|
+
function isBarePkgJson(content) {
|
|
258
|
+
try {
|
|
259
|
+
const pkg = JSON.parse(content);
|
|
260
|
+
const deps = { ...pkg.dependencies, ...pkg.devDependencies, ...pkg.peerDependencies };
|
|
261
|
+
return Object.keys(deps).length === 0;
|
|
262
|
+
}
|
|
263
|
+
catch {
|
|
264
|
+
return false;
|
|
265
|
+
}
|
|
266
|
+
}
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
export declare function runAdd(): Promise<void>;
|