agents-harness 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +317 -0
- package/dist/cli.d.ts +2 -0
- package/dist/cli.js +56 -0
- package/dist/cli.js.map +1 -0
- package/dist/commands/config.d.ts +26 -0
- package/dist/commands/config.js +91 -0
- package/dist/commands/config.js.map +1 -0
- package/dist/commands/init.d.ts +1 -0
- package/dist/commands/init.js +67 -0
- package/dist/commands/init.js.map +1 -0
- package/dist/commands/resume.d.ts +6 -0
- package/dist/commands/resume.js +88 -0
- package/dist/commands/resume.js.map +1 -0
- package/dist/commands/run.d.ts +8 -0
- package/dist/commands/run.js +106 -0
- package/dist/commands/run.js.map +1 -0
- package/dist/commands/status.d.ts +1 -0
- package/dist/commands/status.js +38 -0
- package/dist/commands/status.js.map +1 -0
- package/dist/core/context-manager.d.ts +19 -0
- package/dist/core/context-manager.js +118 -0
- package/dist/core/context-manager.js.map +1 -0
- package/dist/core/file-protocol.d.ts +14 -0
- package/dist/core/file-protocol.js +119 -0
- package/dist/core/file-protocol.js.map +1 -0
- package/dist/core/orchestrator.d.ts +30 -0
- package/dist/core/orchestrator.js +238 -0
- package/dist/core/orchestrator.js.map +1 -0
- package/dist/core/types.d.ts +136 -0
- package/dist/core/types.js +3 -0
- package/dist/core/types.js.map +1 -0
- package/dist/dashboard/server.d.ts +15 -0
- package/dist/dashboard/server.js +61 -0
- package/dist/dashboard/server.js.map +1 -0
- package/dist/dashboard/socket.d.ts +8 -0
- package/dist/dashboard/socket.js +22 -0
- package/dist/dashboard/socket.js.map +1 -0
- package/dist/defaults/criteria.d.ts +1 -0
- package/dist/defaults/criteria.js +23 -0
- package/dist/defaults/criteria.js.map +1 -0
- package/dist/defaults/prompts.d.ts +15 -0
- package/dist/defaults/prompts.js +123 -0
- package/dist/defaults/prompts.js.map +1 -0
- package/dist/discovery/config-loader.d.ts +12 -0
- package/dist/discovery/config-loader.js +64 -0
- package/dist/discovery/config-loader.js.map +1 -0
- package/dist/discovery/project-context.d.ts +12 -0
- package/dist/discovery/project-context.js +56 -0
- package/dist/discovery/project-context.js.map +1 -0
- package/dist/discovery/stack-detector.d.ts +15 -0
- package/dist/discovery/stack-detector.js +372 -0
- package/dist/discovery/stack-detector.js.map +1 -0
- package/dist/index.d.ts +12 -0
- package/dist/index.js +14 -0
- package/dist/index.js.map +1 -0
- package/package.json +60 -0
|
@@ -0,0 +1,123 @@
|
|
|
1
|
+
import { DEFAULT_CRITERIA } from "./criteria.js";
|
|
2
|
+
const PLANNER_BASE = `You are a product planner. Your job is to convert user descriptions into comprehensive product specifications, break them into sprints, and write sprint contracts.
|
|
3
|
+
|
|
4
|
+
RULES:
|
|
5
|
+
- Focus on WHAT to build, not HOW to implement it
|
|
6
|
+
- Be ambitious in scope — define the full feature
|
|
7
|
+
- Define user-facing behavior, not internal architecture
|
|
8
|
+
- Break complex features into ordered milestones (sprints)
|
|
9
|
+
- Each sprint must be independently testable
|
|
10
|
+
- Never write code. Never suggest file names or specific implementations.
|
|
11
|
+
- Write your output to the file specified in the task prompt.`;
|
|
12
|
+
const GENERATOR_BASE = `You are a code generator. You implement features based on a sprint contract.
|
|
13
|
+
|
|
14
|
+
RULES:
|
|
15
|
+
- Read .harness/contract.md for what to build
|
|
16
|
+
- If .harness/evaluation.md exists, read it for feedback from a previous attempt
|
|
17
|
+
- If .harness/handoff.md exists, read it for context from a previous sprint
|
|
18
|
+
- Implement EXACTLY what the contract specifies — no more, no less
|
|
19
|
+
- Follow project conventions from CLAUDE.md
|
|
20
|
+
- Run the test suite before finishing. Fix any test failures.
|
|
21
|
+
- Commit your work with conventional commit messages.
|
|
22
|
+
- Do NOT evaluate your own work. Do NOT say "this looks good" or "everything is working."
|
|
23
|
+
Your job is to implement, not judge. A separate evaluator will assess your work.`;
|
|
24
|
+
const EVALUATOR_BASE = `You are a critical code evaluator. Your job is to find problems.
|
|
25
|
+
|
|
26
|
+
MINDSET:
|
|
27
|
+
- Be skeptical. Assume things are broken until proven otherwise.
|
|
28
|
+
- Never give the benefit of the doubt.
|
|
29
|
+
- A feature that "should work" but wasn't tested does NOT pass.
|
|
30
|
+
- Stubbed, mocked, or placeholder implementations in production code are automatic failures.
|
|
31
|
+
- If you can't verify it, it fails.
|
|
32
|
+
|
|
33
|
+
PROCESS:
|
|
34
|
+
1. Read .harness/contract.md for what was promised
|
|
35
|
+
2. Read the actual code that was written (use Grep and Read)
|
|
36
|
+
3. Run the test suite
|
|
37
|
+
4. Check each success criterion from the contract individually
|
|
38
|
+
5. Write your evaluation to .harness/evaluation.md
|
|
39
|
+
|
|
40
|
+
YOUR OUTPUT FORMAT (write to .harness/evaluation.md):
|
|
41
|
+
Status: PASS or FAIL
|
|
42
|
+
Failed criteria:
|
|
43
|
+
- (list each criterion that failed, one per line)
|
|
44
|
+
Passed criteria:
|
|
45
|
+
- (list each criterion that passed, one per line)
|
|
46
|
+
Critique: (specific, actionable feedback for each failure — what's wrong and what needs to change)`;
|
|
47
|
+
const BASE_PROMPTS = {
|
|
48
|
+
planner: PLANNER_BASE,
|
|
49
|
+
generator: GENERATOR_BASE,
|
|
50
|
+
evaluator: EVALUATOR_BASE,
|
|
51
|
+
};
|
|
52
|
+
/**
|
|
53
|
+
* Format a ProjectContext into a readable text block for inclusion in system prompts.
|
|
54
|
+
*/
|
|
55
|
+
export function formatProjectContext(ctx) {
|
|
56
|
+
const lines = [];
|
|
57
|
+
lines.push(`Repository type: ${ctx.repoType}`);
|
|
58
|
+
lines.push(`Root: ${ctx.root}`);
|
|
59
|
+
if (ctx.scope && ctx.scope.length > 0) {
|
|
60
|
+
lines.push(`Scope: ${ctx.scope.join(", ")}`);
|
|
61
|
+
}
|
|
62
|
+
lines.push("");
|
|
63
|
+
lines.push("Workspaces:");
|
|
64
|
+
for (const ws of ctx.workspaces) {
|
|
65
|
+
lines.push(` - ${ws.path}`);
|
|
66
|
+
lines.push(` Language: ${ws.stack.language}`);
|
|
67
|
+
if (ws.stack.framework) {
|
|
68
|
+
lines.push(` Framework: ${ws.stack.framework}`);
|
|
69
|
+
}
|
|
70
|
+
if (ws.stack.testRunner) {
|
|
71
|
+
lines.push(` Test runner: ${ws.stack.testRunner}`);
|
|
72
|
+
}
|
|
73
|
+
lines.push(` Test command: ${ws.stack.testCommand}`);
|
|
74
|
+
if (ws.stack.lintCommand) {
|
|
75
|
+
lines.push(` Lint command: ${ws.stack.lintCommand}`);
|
|
76
|
+
}
|
|
77
|
+
if (ws.stack.buildCommand) {
|
|
78
|
+
lines.push(` Build command: ${ws.stack.buildCommand}`);
|
|
79
|
+
}
|
|
80
|
+
if (ws.stack.devServer) {
|
|
81
|
+
lines.push(` Dev server: ${ws.stack.devServer}`);
|
|
82
|
+
}
|
|
83
|
+
if (ws.claudeMd) {
|
|
84
|
+
lines.push(` CLAUDE.md: ${ws.claudeMd}`);
|
|
85
|
+
}
|
|
86
|
+
}
|
|
87
|
+
if (ctx.rootClaudeMd) {
|
|
88
|
+
lines.push("");
|
|
89
|
+
lines.push("CLAUDE.md:");
|
|
90
|
+
lines.push(ctx.rootClaudeMd);
|
|
91
|
+
}
|
|
92
|
+
return lines.join("\n");
|
|
93
|
+
}
|
|
94
|
+
/**
|
|
95
|
+
* Build a full system prompt for an agent role.
|
|
96
|
+
*
|
|
97
|
+
* Assembles:
|
|
98
|
+
* 1. Base prompt for the role
|
|
99
|
+
* 2. PROJECT CONTEXT section (formatted project context)
|
|
100
|
+
* 3. EVALUATION CRITERIA section (evaluator only — default + custom criteria)
|
|
101
|
+
* 4. ADDITIONAL INSTRUCTIONS section (appendPrompt if provided)
|
|
102
|
+
*/
|
|
103
|
+
export function buildSystemPrompt(role, ctx, appendPrompt) {
|
|
104
|
+
const sections = [];
|
|
105
|
+
// 1. Base prompt
|
|
106
|
+
sections.push(BASE_PROMPTS[role]);
|
|
107
|
+
// 2. Project context
|
|
108
|
+
sections.push(`\n\n## PROJECT CONTEXT\n\n${formatProjectContext(ctx)}`);
|
|
109
|
+
// 3. Evaluation criteria (evaluator only)
|
|
110
|
+
if (role === "evaluator") {
|
|
111
|
+
let criteriaBlock = DEFAULT_CRITERIA;
|
|
112
|
+
if (ctx.criteria) {
|
|
113
|
+
criteriaBlock += `\n\n## Custom Criteria\n\n${ctx.criteria}`;
|
|
114
|
+
}
|
|
115
|
+
sections.push(`\n\n## EVALUATION CRITERIA\n\n${criteriaBlock}`);
|
|
116
|
+
}
|
|
117
|
+
// 4. Additional instructions
|
|
118
|
+
if (appendPrompt) {
|
|
119
|
+
sections.push(`\n\n## ADDITIONAL INSTRUCTIONS\n\n${appendPrompt}`);
|
|
120
|
+
}
|
|
121
|
+
return sections.join("");
|
|
122
|
+
}
|
|
123
|
+
//# sourceMappingURL=prompts.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"prompts.js","sourceRoot":"","sources":["../../src/defaults/prompts.ts"],"names":[],"mappings":"AACA,OAAO,EAAE,gBAAgB,EAAE,MAAM,eAAe,CAAC;AAEjD,MAAM,YAAY,GAAG;;;;;;;;;8DASyC,CAAC;AAE/D,MAAM,cAAc,GAAG;;;;;;;;;;;mFAW4D,CAAC;AAEpF,MAAM,cAAc,GAAG;;;;;;;;;;;;;;;;;;;;;;mGAsB4E,CAAC;AAEpG,MAAM,YAAY,GAA8B;IAC9C,OAAO,EAAE,YAAY;IACrB,SAAS,EAAE,cAAc;IACzB,SAAS,EAAE,cAAc;CAC1B,CAAC;AAEF;;GAEG;AACH,MAAM,UAAU,oBAAoB,CAAC,GAAmB;IACtD,MAAM,KAAK,GAAa,EAAE,CAAC;IAE3B,KAAK,CAAC,IAAI,CAAC,oBAAoB,GAAG,CAAC,QAAQ,EAAE,CAAC,CAAC;IAC/C,KAAK,CAAC,IAAI,CAAC,SAAS,GAAG,CAAC,IAAI,EAAE,CAAC,CAAC;IAEhC,IAAI,GAAG,CAAC,KAAK,IAAI,GAAG,CAAC,KAAK,CAAC,MAAM,GAAG,CAAC,EAAE,CAAC;QACtC,KAAK,CAAC,IAAI,CAAC,UAAU,GAAG,CAAC,KAAK,CAAC,IAAI,CAAC,IAAI,CAAC,EAAE,CAAC,CAAC;IAC/C,CAAC;IAED,KAAK,CAAC,IAAI,CAAC,EAAE,CAAC,CAAC;IACf,KAAK,CAAC,IAAI,CAAC,aAAa,CAAC,CAAC;IAC1B,KAAK,MAAM,EAAE,IAAI,GAAG,CAAC,UAAU,EAAE,CAAC;QAChC,KAAK,CAAC,IAAI,CAAC,OAAO,EAAE,CAAC,IAAI,EAAE,CAAC,CAAC;QAC7B,KAAK,CAAC,IAAI,CAAC,iBAAiB,EAAE,CAAC,KAAK,CAAC,QAAQ,EAAE,CAAC,CAAC;QACjD,IAAI,EAAE,CAAC,KAAK,CAAC,SAAS,EAAE,CAAC;YACvB,KAAK,CAAC,IAAI,CAAC,kBAAkB,EAAE,CAAC,KAAK,CAAC,SAAS,EAAE,CAAC,CAAC;QACrD,CAAC;QACD,IAAI,EAAE,CAAC,KAAK,CAAC,UAAU,EAAE,CAAC;YACxB,KAAK,CAAC,IAAI,CAAC,oBAAoB,EAAE,CAAC,KAAK,CAAC,UAAU,EAAE,CAAC,CAAC;QACxD,CAAC;QACD,KAAK,CAAC,IAAI,CAAC,qBAAqB,EAAE,CAAC,KAAK,CAAC,WAAW,EAAE,CAAC,CAAC;QACxD,IAAI,EAAE,CAAC,KAAK,CAAC,WAAW,EAAE,CAAC;YACzB,KAAK,CAAC,IAAI,CAAC,qBAAqB,EAAE,CAAC,KAAK,CAAC,WAAW,EAAE,CAAC,CAAC;QAC1D,CAAC;QACD,IAAI,EAAE,CAAC,KAAK,CAAC,YAAY,EAAE,CAAC;YAC1B,KAAK,CAAC,IAAI,CAAC,sBAAsB,EAAE,CAAC,KAAK,CAAC,YAAY,EAAE,CAAC,CAAC;QAC5D,CAAC;QACD,IAAI,EAAE,CAAC,KAAK,CAAC,SAAS,EAAE,CAAC;YACvB,KAAK,CAAC,IAAI,CAAC,mBAAmB,EAAE,CAAC,KAAK,CAAC,SAAS,EAAE,CAAC,CAAC;QACtD,CAAC;QACD,IAAI,EAAE,CAAC,QAAQ,EAAE,CAAC;YAChB,KAAK,CAAC,IAAI,CAAC,kBAAkB,EAAE,CAAC,QAAQ,EAAE,CAAC,CAAC;QAC9C,CAAC;IACH,CAAC;IAED,IAAI,GAAG,CAAC,YAAY,EAAE,CAAC;QACrB,KAAK,CAAC,IAAI,CAAC,EAAE,CAAC,CAAC;QACf,KAAK,CAAC,IAAI,CAAC,YAAY,CAAC,CAAC;QACzB,KAAK,CAAC,IAAI,CAAC,GAAG,CAAC,YAAY,CAAC,CAAC;IAC/B,CAAC;IAED,OAAO,KAAK,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;AAC1B,CAAC;AAED;;;;;;;;GAQG;AACH,MAAM,UAAU,iBAAiB,CAC/B,IAAe,EACf,GAAmB,EACnB,YAAqB;IAErB,MAAM,QAAQ,GAAa,EAAE,CAAC;IAE9B,iBAAiB;IACjB,QAAQ,CAAC,IAAI,CAAC,YAAY,CAAC,IAAI,CAAC,CAAC,CAAC;IAElC,qBAAqB;IACrB,QAAQ,CAAC,IAAI,CAAC,6BAA6B,oBAAoB,CAAC,GAAG,CAAC,EAAE,CAAC,CAAC;IAExE,0CAA0C;IAC1C,IAAI,IAAI,KAAK,WAAW,EAAE,CAAC;QACzB,IAAI,aAAa,GAAG,gBAAgB,CAAC;QACrC,IAAI,GAAG,CAAC,QAAQ,EAAE,CAAC;YACjB,aAAa,IAAI,6BAA6B,GAAG,CAAC,QAAQ,EAAE,CAAC;QAC/D,CAAC;QACD,QAAQ,CAAC,IAAI,CAAC,iCAAiC,aAAa,EAAE,CAAC,CAAC;IAClE,CAAC;IAED,6BAA6B;IAC7B,IAAI,YAAY,EAAE,CAAC;QACjB,QAAQ,CAAC,IAAI,CAAC,qCAAqC,YAAY,EAAE,CAAC,CAAC;IACrE,CAAC;IAED,OAAO,QAAQ,CAAC,IAAI,CAAC,EAAE,CAAC,CAAC;AAC3B,CAAC"}
|
|
@@ -0,0 +1,12 @@
|
|
|
1
|
+
import type { HarnessConfig } from "../core/types.js";
|
|
2
|
+
/**
|
|
3
|
+
* Load and parse `.harness/config.yaml` from the given root directory.
|
|
4
|
+
* Returns null if the file does not exist.
|
|
5
|
+
* Snake_case YAML keys are normalized to camelCase TypeScript properties.
|
|
6
|
+
*/
|
|
7
|
+
export declare function loadConfig(root: string): HarnessConfig | null;
|
|
8
|
+
/**
|
|
9
|
+
* Load `.harness/criteria.md` from the given root directory.
|
|
10
|
+
* Returns null if the file does not exist.
|
|
11
|
+
*/
|
|
12
|
+
export declare function loadCriteria(root: string): string | null;
|
|
@@ -0,0 +1,64 @@
|
|
|
1
|
+
import { readFileSync, existsSync } from "node:fs";
|
|
2
|
+
import { join } from "node:path";
|
|
3
|
+
import { parse } from "yaml";
|
|
4
|
+
/**
|
|
5
|
+
* Map of snake_case YAML keys to their camelCase TypeScript equivalents.
|
|
6
|
+
*/
|
|
7
|
+
const SNAKE_TO_CAMEL = {
|
|
8
|
+
max_turns: "maxTurns",
|
|
9
|
+
system_prompt_append: "systemPromptAppend",
|
|
10
|
+
max_attempts_per_sprint: "maxAttemptsPerSprint",
|
|
11
|
+
max_budget_per_sprint_usd: "maxBudgetPerSprintUsd",
|
|
12
|
+
max_total_budget_usd: "maxTotalBudgetUsd",
|
|
13
|
+
test_command: "testCommand",
|
|
14
|
+
lint_command: "lintCommand",
|
|
15
|
+
build_command: "buildCommand",
|
|
16
|
+
dev_server: "devServer",
|
|
17
|
+
};
|
|
18
|
+
/**
|
|
19
|
+
* Recursively normalize an object's keys from snake_case to camelCase
|
|
20
|
+
* using the explicit mapping. Keys not in the mapping are kept as-is.
|
|
21
|
+
*/
|
|
22
|
+
function normalizeKeys(obj) {
|
|
23
|
+
if (obj === null || obj === undefined || typeof obj !== "object") {
|
|
24
|
+
return obj;
|
|
25
|
+
}
|
|
26
|
+
if (Array.isArray(obj)) {
|
|
27
|
+
return obj.map(normalizeKeys);
|
|
28
|
+
}
|
|
29
|
+
const result = {};
|
|
30
|
+
for (const [key, value] of Object.entries(obj)) {
|
|
31
|
+
const camelKey = SNAKE_TO_CAMEL[key] ?? key;
|
|
32
|
+
result[camelKey] = normalizeKeys(value);
|
|
33
|
+
}
|
|
34
|
+
return result;
|
|
35
|
+
}
|
|
36
|
+
/**
|
|
37
|
+
* Load and parse `.harness/config.yaml` from the given root directory.
|
|
38
|
+
* Returns null if the file does not exist.
|
|
39
|
+
* Snake_case YAML keys are normalized to camelCase TypeScript properties.
|
|
40
|
+
*/
|
|
41
|
+
export function loadConfig(root) {
|
|
42
|
+
const configPath = join(root, ".harness", "config.yaml");
|
|
43
|
+
if (!existsSync(configPath)) {
|
|
44
|
+
return null;
|
|
45
|
+
}
|
|
46
|
+
const raw = readFileSync(configPath, "utf-8");
|
|
47
|
+
const parsed = parse(raw);
|
|
48
|
+
if (parsed === null || parsed === undefined || typeof parsed !== "object") {
|
|
49
|
+
return null;
|
|
50
|
+
}
|
|
51
|
+
return normalizeKeys(parsed);
|
|
52
|
+
}
|
|
53
|
+
/**
|
|
54
|
+
* Load `.harness/criteria.md` from the given root directory.
|
|
55
|
+
* Returns null if the file does not exist.
|
|
56
|
+
*/
|
|
57
|
+
export function loadCriteria(root) {
|
|
58
|
+
const criteriaPath = join(root, ".harness", "criteria.md");
|
|
59
|
+
if (!existsSync(criteriaPath)) {
|
|
60
|
+
return null;
|
|
61
|
+
}
|
|
62
|
+
return readFileSync(criteriaPath, "utf-8");
|
|
63
|
+
}
|
|
64
|
+
//# sourceMappingURL=config-loader.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"config-loader.js","sourceRoot":"","sources":["../../src/discovery/config-loader.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,YAAY,EAAE,UAAU,EAAE,MAAM,SAAS,CAAC;AACnD,OAAO,EAAE,IAAI,EAAE,MAAM,WAAW,CAAC;AACjC,OAAO,EAAE,KAAK,EAAE,MAAM,MAAM,CAAC;AAG7B;;GAEG;AACH,MAAM,cAAc,GAA2B;IAC7C,SAAS,EAAE,UAAU;IACrB,oBAAoB,EAAE,oBAAoB;IAC1C,uBAAuB,EAAE,sBAAsB;IAC/C,yBAAyB,EAAE,uBAAuB;IAClD,oBAAoB,EAAE,mBAAmB;IACzC,YAAY,EAAE,aAAa;IAC3B,YAAY,EAAE,aAAa;IAC3B,aAAa,EAAE,cAAc;IAC7B,UAAU,EAAE,WAAW;CACxB,CAAC;AAEF;;;GAGG;AACH,SAAS,aAAa,CAAC,GAAY;IACjC,IAAI,GAAG,KAAK,IAAI,IAAI,GAAG,KAAK,SAAS,IAAI,OAAO,GAAG,KAAK,QAAQ,EAAE,CAAC;QACjE,OAAO,GAAG,CAAC;IACb,CAAC;IAED,IAAI,KAAK,CAAC,OAAO,CAAC,GAAG,CAAC,EAAE,CAAC;QACvB,OAAO,GAAG,CAAC,GAAG,CAAC,aAAa,CAAC,CAAC;IAChC,CAAC;IAED,MAAM,MAAM,GAA4B,EAAE,CAAC;IAC3C,KAAK,MAAM,CAAC,GAAG,EAAE,KAAK,CAAC,IAAI,MAAM,CAAC,OAAO,CAAC,GAA8B,CAAC,EAAE,CAAC;QAC1E,MAAM,QAAQ,GAAG,cAAc,CAAC,GAAG,CAAC,IAAI,GAAG,CAAC;QAC5C,MAAM,CAAC,QAAQ,CAAC,GAAG,aAAa,CAAC,KAAK,CAAC,CAAC;IAC1C,CAAC;IACD,OAAO,MAAM,CAAC;AAChB,CAAC;AAED;;;;GAIG;AACH,MAAM,UAAU,UAAU,CAAC,IAAY;IACrC,MAAM,UAAU,GAAG,IAAI,CAAC,IAAI,EAAE,UAAU,EAAE,aAAa,CAAC,CAAC;IACzD,IAAI,CAAC,UAAU,CAAC,UAAU,CAAC,EAAE,CAAC;QAC5B,OAAO,IAAI,CAAC;IACd,CAAC;IAED,MAAM,GAAG,GAAG,YAAY,CAAC,UAAU,EAAE,OAAO,CAAC,CAAC;IAC9C,MAAM,MAAM,GAAG,KAAK,CAAC,GAAG,CAAC,CAAC;IAE1B,IAAI,MAAM,KAAK,IAAI,IAAI,MAAM,KAAK,SAAS,IAAI,OAAO,MAAM,KAAK,QAAQ,EAAE,CAAC;QAC1E,OAAO,IAAI,CAAC;IACd,CAAC;IAED,OAAO,aAAa,CAAC,MAAM,CAAkB,CAAC;AAChD,CAAC;AAED;;;GAGG;AACH,MAAM,UAAU,YAAY,CAAC,IAAY;IACvC,MAAM,YAAY,GAAG,IAAI,CAAC,IAAI,EAAE,UAAU,EAAE,aAAa,CAAC,CAAC;IAC3D,IAAI,CAAC,UAAU,CAAC,YAAY,CAAC,EAAE,CAAC;QAC9B,OAAO,IAAI,CAAC;IACd,CAAC;IAED,OAAO,YAAY,CAAC,YAAY,EAAE,OAAO,CAAC,CAAC;AAC7C,CAAC"}
|
|
@@ -0,0 +1,12 @@
|
|
|
1
|
+
import type { ProjectContext } from "../core/types.js";
|
|
2
|
+
/**
|
|
3
|
+
* Build a complete ProjectContext by composing all discovery functions.
|
|
4
|
+
*
|
|
5
|
+
* 1. Detect repo type (single vs monorepo)
|
|
6
|
+
* 2. Discover workspaces with their stacks and per-workspace CLAUDE.md
|
|
7
|
+
* 3. Read root CLAUDE.md (.claude/CLAUDE.md or CLAUDE.md)
|
|
8
|
+
* 4. Load .harness/config.yaml
|
|
9
|
+
* 5. Load .harness/criteria.md
|
|
10
|
+
* 6. Pass scope through
|
|
11
|
+
*/
|
|
12
|
+
export declare function buildProjectContext(root: string, scope: string[] | null): ProjectContext;
|
|
@@ -0,0 +1,56 @@
|
|
|
1
|
+
import { readFileSync, existsSync } from "node:fs";
|
|
2
|
+
import { join } from "node:path";
|
|
3
|
+
import { detectRepoType, discoverWorkspaces } from "./stack-detector.js";
|
|
4
|
+
import { loadConfig, loadCriteria } from "./config-loader.js";
|
|
5
|
+
/**
|
|
6
|
+
* Read the root-level CLAUDE.md for the project.
|
|
7
|
+
* Checks `.claude/CLAUDE.md` first, then `CLAUDE.md` at the root.
|
|
8
|
+
*/
|
|
9
|
+
function readRootClaudeMd(root) {
|
|
10
|
+
const dotClaudePath = join(root, ".claude", "CLAUDE.md");
|
|
11
|
+
if (existsSync(dotClaudePath)) {
|
|
12
|
+
try {
|
|
13
|
+
return readFileSync(dotClaudePath, "utf-8");
|
|
14
|
+
}
|
|
15
|
+
catch {
|
|
16
|
+
// fall through
|
|
17
|
+
}
|
|
18
|
+
}
|
|
19
|
+
const rootPath = join(root, "CLAUDE.md");
|
|
20
|
+
if (existsSync(rootPath)) {
|
|
21
|
+
try {
|
|
22
|
+
return readFileSync(rootPath, "utf-8");
|
|
23
|
+
}
|
|
24
|
+
catch {
|
|
25
|
+
// fall through
|
|
26
|
+
}
|
|
27
|
+
}
|
|
28
|
+
return null;
|
|
29
|
+
}
|
|
30
|
+
/**
|
|
31
|
+
* Build a complete ProjectContext by composing all discovery functions.
|
|
32
|
+
*
|
|
33
|
+
* 1. Detect repo type (single vs monorepo)
|
|
34
|
+
* 2. Discover workspaces with their stacks and per-workspace CLAUDE.md
|
|
35
|
+
* 3. Read root CLAUDE.md (.claude/CLAUDE.md or CLAUDE.md)
|
|
36
|
+
* 4. Load .harness/config.yaml
|
|
37
|
+
* 5. Load .harness/criteria.md
|
|
38
|
+
* 6. Pass scope through
|
|
39
|
+
*/
|
|
40
|
+
export function buildProjectContext(root, scope) {
|
|
41
|
+
const repoType = detectRepoType(root);
|
|
42
|
+
const workspaces = discoverWorkspaces(root);
|
|
43
|
+
const rootClaudeMd = readRootClaudeMd(root);
|
|
44
|
+
const config = loadConfig(root);
|
|
45
|
+
const criteria = loadCriteria(root);
|
|
46
|
+
return {
|
|
47
|
+
repoType,
|
|
48
|
+
workspaces,
|
|
49
|
+
rootClaudeMd,
|
|
50
|
+
config,
|
|
51
|
+
criteria,
|
|
52
|
+
scope,
|
|
53
|
+
root,
|
|
54
|
+
};
|
|
55
|
+
}
|
|
56
|
+
//# sourceMappingURL=project-context.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"project-context.js","sourceRoot":"","sources":["../../src/discovery/project-context.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,YAAY,EAAE,UAAU,EAAE,MAAM,SAAS,CAAC;AACnD,OAAO,EAAE,IAAI,EAAE,MAAM,WAAW,CAAC;AACjC,OAAO,EAAE,cAAc,EAAE,kBAAkB,EAAE,MAAM,qBAAqB,CAAC;AACzE,OAAO,EAAE,UAAU,EAAE,YAAY,EAAE,MAAM,oBAAoB,CAAC;AAG9D;;;GAGG;AACH,SAAS,gBAAgB,CAAC,IAAY;IACpC,MAAM,aAAa,GAAG,IAAI,CAAC,IAAI,EAAE,SAAS,EAAE,WAAW,CAAC,CAAC;IACzD,IAAI,UAAU,CAAC,aAAa,CAAC,EAAE,CAAC;QAC9B,IAAI,CAAC;YACH,OAAO,YAAY,CAAC,aAAa,EAAE,OAAO,CAAC,CAAC;QAC9C,CAAC;QAAC,MAAM,CAAC;YACP,eAAe;QACjB,CAAC;IACH,CAAC;IAED,MAAM,QAAQ,GAAG,IAAI,CAAC,IAAI,EAAE,WAAW,CAAC,CAAC;IACzC,IAAI,UAAU,CAAC,QAAQ,CAAC,EAAE,CAAC;QACzB,IAAI,CAAC;YACH,OAAO,YAAY,CAAC,QAAQ,EAAE,OAAO,CAAC,CAAC;QACzC,CAAC;QAAC,MAAM,CAAC;YACP,eAAe;QACjB,CAAC;IACH,CAAC;IAED,OAAO,IAAI,CAAC;AACd,CAAC;AAED;;;;;;;;;GASG;AACH,MAAM,UAAU,mBAAmB,CACjC,IAAY,EACZ,KAAsB;IAEtB,MAAM,QAAQ,GAAG,cAAc,CAAC,IAAI,CAAC,CAAC;IACtC,MAAM,UAAU,GAAG,kBAAkB,CAAC,IAAI,CAAC,CAAC;IAC5C,MAAM,YAAY,GAAG,gBAAgB,CAAC,IAAI,CAAC,CAAC;IAC5C,MAAM,MAAM,GAAG,UAAU,CAAC,IAAI,CAAC,CAAC;IAChC,MAAM,QAAQ,GAAG,YAAY,CAAC,IAAI,CAAC,CAAC;IAEpC,OAAO;QACL,QAAQ;QACR,UAAU;QACV,YAAY;QACZ,MAAM;QACN,QAAQ;QACR,KAAK;QACL,IAAI;KACL,CAAC;AACJ,CAAC"}
|
|
@@ -0,0 +1,15 @@
|
|
|
1
|
+
import type { Stack, Workspace } from "../core/types.js";
|
|
2
|
+
/**
|
|
3
|
+
* Detect the technology stack for a project rooted at `root`.
|
|
4
|
+
*/
|
|
5
|
+
export declare function detectStack(root: string): Stack;
|
|
6
|
+
/**
|
|
7
|
+
* Determine whether the project at `root` is a single repo or a monorepo.
|
|
8
|
+
*/
|
|
9
|
+
export declare function detectRepoType(root: string): "single" | "monorepo";
|
|
10
|
+
/**
|
|
11
|
+
* Discover workspaces in the project at `root`.
|
|
12
|
+
* For a single repo, returns a single workspace at ".".
|
|
13
|
+
* For a monorepo, returns one workspace per detected sub-project.
|
|
14
|
+
*/
|
|
15
|
+
export declare function discoverWorkspaces(root: string): Workspace[];
|
|
@@ -0,0 +1,372 @@
|
|
|
1
|
+
import { existsSync, readFileSync, readdirSync, statSync } from "node:fs";
|
|
2
|
+
import { join, relative } from "node:path";
|
|
3
|
+
// --- File helpers ---
|
|
4
|
+
function fileExists(root, ...segments) {
|
|
5
|
+
return existsSync(join(root, ...segments));
|
|
6
|
+
}
|
|
7
|
+
function readFileSafe(path) {
|
|
8
|
+
try {
|
|
9
|
+
return readFileSync(path, "utf-8");
|
|
10
|
+
}
|
|
11
|
+
catch {
|
|
12
|
+
return null;
|
|
13
|
+
}
|
|
14
|
+
}
|
|
15
|
+
function readJson(path) {
|
|
16
|
+
const raw = readFileSafe(path);
|
|
17
|
+
if (raw === null)
|
|
18
|
+
return null;
|
|
19
|
+
try {
|
|
20
|
+
return JSON.parse(raw);
|
|
21
|
+
}
|
|
22
|
+
catch {
|
|
23
|
+
return null;
|
|
24
|
+
}
|
|
25
|
+
}
|
|
26
|
+
function anyFileExists(root, names) {
|
|
27
|
+
return names.some((n) => fileExists(root, n));
|
|
28
|
+
}
|
|
29
|
+
// --- Language detection ---
|
|
30
|
+
function detectLanguage(root) {
|
|
31
|
+
if (fileExists(root, "package.json"))
|
|
32
|
+
return "typescript";
|
|
33
|
+
if (fileExists(root, "requirements.txt") || fileExists(root, "pyproject.toml"))
|
|
34
|
+
return "python";
|
|
35
|
+
if (fileExists(root, "Cargo.toml"))
|
|
36
|
+
return "rust";
|
|
37
|
+
if (fileExists(root, "go.mod"))
|
|
38
|
+
return "go";
|
|
39
|
+
return "unknown";
|
|
40
|
+
}
|
|
41
|
+
// --- Framework detection ---
|
|
42
|
+
function detectFramework(root) {
|
|
43
|
+
if (anyFileExists(root, [
|
|
44
|
+
"next.config.js",
|
|
45
|
+
"next.config.mjs",
|
|
46
|
+
"next.config.ts",
|
|
47
|
+
]))
|
|
48
|
+
return "nextjs";
|
|
49
|
+
if (anyFileExists(root, [
|
|
50
|
+
"vite.config.js",
|
|
51
|
+
"vite.config.ts",
|
|
52
|
+
"vite.config.mjs",
|
|
53
|
+
]))
|
|
54
|
+
return "vite";
|
|
55
|
+
if (fileExists(root, "manage.py"))
|
|
56
|
+
return "django";
|
|
57
|
+
return null;
|
|
58
|
+
}
|
|
59
|
+
// --- Test runner detection ---
|
|
60
|
+
function detectTestRunner(root, language, pkgJson) {
|
|
61
|
+
// Config-file-based detection (highest priority)
|
|
62
|
+
if (anyFileExists(root, [
|
|
63
|
+
"vitest.config.ts",
|
|
64
|
+
"vitest.config.js",
|
|
65
|
+
"vitest.config.mjs",
|
|
66
|
+
]))
|
|
67
|
+
return "vitest";
|
|
68
|
+
if (anyFileExists(root, [
|
|
69
|
+
"jest.config.ts",
|
|
70
|
+
"jest.config.js",
|
|
71
|
+
"jest.config.mjs",
|
|
72
|
+
"jest.config.cjs",
|
|
73
|
+
]))
|
|
74
|
+
return "jest";
|
|
75
|
+
// Python test runners
|
|
76
|
+
if (fileExists(root, "pytest.ini"))
|
|
77
|
+
return "pytest";
|
|
78
|
+
if (fileExists(root, "conftest.py"))
|
|
79
|
+
return "pytest";
|
|
80
|
+
// Check pyproject.toml for [tool.pytest] section
|
|
81
|
+
if (fileExists(root, "pyproject.toml")) {
|
|
82
|
+
const content = readFileSafe(join(root, "pyproject.toml"));
|
|
83
|
+
if (content && content.includes("[tool.pytest"))
|
|
84
|
+
return "pytest";
|
|
85
|
+
}
|
|
86
|
+
// Rust and Go have built-in test runners
|
|
87
|
+
if (language === "rust")
|
|
88
|
+
return "cargo test";
|
|
89
|
+
if (language === "go")
|
|
90
|
+
return "go test";
|
|
91
|
+
// Fallback: inspect package.json scripts.test
|
|
92
|
+
if (pkgJson) {
|
|
93
|
+
const scripts = pkgJson.scripts;
|
|
94
|
+
const testScript = scripts?.test;
|
|
95
|
+
if (testScript) {
|
|
96
|
+
if (testScript.includes("vitest"))
|
|
97
|
+
return "vitest";
|
|
98
|
+
if (testScript.includes("jest"))
|
|
99
|
+
return "jest";
|
|
100
|
+
if (testScript.includes("mocha"))
|
|
101
|
+
return "mocha";
|
|
102
|
+
}
|
|
103
|
+
}
|
|
104
|
+
return null;
|
|
105
|
+
}
|
|
106
|
+
// --- Command detection ---
|
|
107
|
+
function detectTestCommand(runner, pkgJson) {
|
|
108
|
+
// Prefer package.json scripts.test
|
|
109
|
+
if (pkgJson) {
|
|
110
|
+
const scripts = pkgJson.scripts;
|
|
111
|
+
if (scripts?.test)
|
|
112
|
+
return scripts.test;
|
|
113
|
+
}
|
|
114
|
+
// Fallback based on runner
|
|
115
|
+
if (runner === "pytest")
|
|
116
|
+
return "pytest";
|
|
117
|
+
if (runner === "cargo test")
|
|
118
|
+
return "cargo test";
|
|
119
|
+
if (runner === "go test")
|
|
120
|
+
return "go test ./...";
|
|
121
|
+
return "";
|
|
122
|
+
}
|
|
123
|
+
function detectLintCommand(language, pkgJson) {
|
|
124
|
+
if (pkgJson) {
|
|
125
|
+
const scripts = pkgJson.scripts;
|
|
126
|
+
if (scripts?.lint)
|
|
127
|
+
return scripts.lint;
|
|
128
|
+
}
|
|
129
|
+
if (language === "python")
|
|
130
|
+
return "ruff check .";
|
|
131
|
+
if (language === "rust")
|
|
132
|
+
return "cargo clippy";
|
|
133
|
+
if (language === "go")
|
|
134
|
+
return "go vet ./...";
|
|
135
|
+
return null;
|
|
136
|
+
}
|
|
137
|
+
function detectBuildCommand(language, pkgJson) {
|
|
138
|
+
if (pkgJson) {
|
|
139
|
+
const scripts = pkgJson.scripts;
|
|
140
|
+
if (scripts?.build)
|
|
141
|
+
return scripts.build;
|
|
142
|
+
}
|
|
143
|
+
if (language === "rust")
|
|
144
|
+
return "cargo build";
|
|
145
|
+
if (language === "go")
|
|
146
|
+
return "go build ./...";
|
|
147
|
+
return null;
|
|
148
|
+
}
|
|
149
|
+
function detectDevServer(root, pkgJson) {
|
|
150
|
+
if (pkgJson) {
|
|
151
|
+
const scripts = pkgJson.scripts;
|
|
152
|
+
if (scripts?.dev)
|
|
153
|
+
return scripts.dev;
|
|
154
|
+
if (scripts?.start)
|
|
155
|
+
return scripts.start;
|
|
156
|
+
}
|
|
157
|
+
if (fileExists(root, "manage.py"))
|
|
158
|
+
return "python manage.py runserver";
|
|
159
|
+
return null;
|
|
160
|
+
}
|
|
161
|
+
// --- Public API ---
|
|
162
|
+
/**
|
|
163
|
+
* Detect the technology stack for a project rooted at `root`.
|
|
164
|
+
*/
|
|
165
|
+
export function detectStack(root) {
|
|
166
|
+
const pkgJson = readJson(join(root, "package.json"));
|
|
167
|
+
const language = detectLanguage(root);
|
|
168
|
+
const framework = detectFramework(root);
|
|
169
|
+
const testRunner = detectTestRunner(root, language, pkgJson);
|
|
170
|
+
const testCommand = detectTestCommand(testRunner, pkgJson);
|
|
171
|
+
const lintCommand = detectLintCommand(language, pkgJson);
|
|
172
|
+
const buildCommand = detectBuildCommand(language, pkgJson);
|
|
173
|
+
const devServer = detectDevServer(root, pkgJson);
|
|
174
|
+
return {
|
|
175
|
+
language,
|
|
176
|
+
framework,
|
|
177
|
+
testRunner,
|
|
178
|
+
testCommand,
|
|
179
|
+
lintCommand,
|
|
180
|
+
buildCommand,
|
|
181
|
+
devServer,
|
|
182
|
+
};
|
|
183
|
+
}
|
|
184
|
+
/**
|
|
185
|
+
* Determine whether the project at `root` is a single repo or a monorepo.
|
|
186
|
+
*/
|
|
187
|
+
export function detectRepoType(root) {
|
|
188
|
+
// Check package.json workspaces
|
|
189
|
+
const pkgJson = readJson(join(root, "package.json"));
|
|
190
|
+
if (pkgJson?.workspaces)
|
|
191
|
+
return "monorepo";
|
|
192
|
+
// Check pnpm-workspace.yaml
|
|
193
|
+
if (fileExists(root, "pnpm-workspace.yaml"))
|
|
194
|
+
return "monorepo";
|
|
195
|
+
// Check lerna.json
|
|
196
|
+
if (fileExists(root, "lerna.json"))
|
|
197
|
+
return "monorepo";
|
|
198
|
+
// Convention: 2+ subdirectories with their own stack markers
|
|
199
|
+
const stackMarkers = [
|
|
200
|
+
"package.json",
|
|
201
|
+
"requirements.txt",
|
|
202
|
+
"pyproject.toml",
|
|
203
|
+
"Cargo.toml",
|
|
204
|
+
"go.mod",
|
|
205
|
+
];
|
|
206
|
+
try {
|
|
207
|
+
const entries = readdirSync(root, { withFileTypes: true });
|
|
208
|
+
let stackDirCount = 0;
|
|
209
|
+
for (const entry of entries) {
|
|
210
|
+
if (!entry.isDirectory())
|
|
211
|
+
continue;
|
|
212
|
+
if (entry.name.startsWith(".") || entry.name === "node_modules")
|
|
213
|
+
continue;
|
|
214
|
+
const subdir = join(root, entry.name);
|
|
215
|
+
if (stackMarkers.some((marker) => existsSync(join(subdir, marker)))) {
|
|
216
|
+
stackDirCount++;
|
|
217
|
+
}
|
|
218
|
+
if (stackDirCount >= 2)
|
|
219
|
+
return "monorepo";
|
|
220
|
+
}
|
|
221
|
+
}
|
|
222
|
+
catch {
|
|
223
|
+
// ignore read errors
|
|
224
|
+
}
|
|
225
|
+
return "single";
|
|
226
|
+
}
|
|
227
|
+
/**
|
|
228
|
+
* Read CLAUDE.md for a given directory.
|
|
229
|
+
* Checks .claude/CLAUDE.md first, then CLAUDE.md at the directory root.
|
|
230
|
+
*/
|
|
231
|
+
function readClaudeMd(dir) {
|
|
232
|
+
const dotClaudePath = join(dir, ".claude", "CLAUDE.md");
|
|
233
|
+
const rootPath = join(dir, "CLAUDE.md");
|
|
234
|
+
return readFileSafe(dotClaudePath) ?? readFileSafe(rootPath);
|
|
235
|
+
}
|
|
236
|
+
/**
|
|
237
|
+
* Discover workspaces in the project at `root`.
|
|
238
|
+
* For a single repo, returns a single workspace at ".".
|
|
239
|
+
* For a monorepo, returns one workspace per detected sub-project.
|
|
240
|
+
*/
|
|
241
|
+
export function discoverWorkspaces(root) {
|
|
242
|
+
const repoType = detectRepoType(root);
|
|
243
|
+
if (repoType === "single") {
|
|
244
|
+
return [
|
|
245
|
+
{
|
|
246
|
+
path: ".",
|
|
247
|
+
stack: detectStack(root),
|
|
248
|
+
claudeMd: readClaudeMd(root),
|
|
249
|
+
},
|
|
250
|
+
];
|
|
251
|
+
}
|
|
252
|
+
// Monorepo: find workspace directories
|
|
253
|
+
const workspaceDirs = findWorkspaceDirs(root);
|
|
254
|
+
return workspaceDirs.map((dir) => ({
|
|
255
|
+
path: relative(root, dir) || ".",
|
|
256
|
+
stack: detectStack(dir),
|
|
257
|
+
claudeMd: readClaudeMd(dir),
|
|
258
|
+
}));
|
|
259
|
+
}
|
|
260
|
+
/**
|
|
261
|
+
* Resolve workspace directories for a monorepo.
|
|
262
|
+
* Tries package.json workspaces globs, then falls back to scanning subdirectories.
|
|
263
|
+
*/
|
|
264
|
+
function findWorkspaceDirs(root) {
|
|
265
|
+
const pkgJson = readJson(join(root, "package.json"));
|
|
266
|
+
// Try package.json workspaces (array of globs like ["packages/*"])
|
|
267
|
+
if (pkgJson?.workspaces) {
|
|
268
|
+
const patterns = pkgJson.workspaces;
|
|
269
|
+
return resolveGlobPatterns(root, patterns);
|
|
270
|
+
}
|
|
271
|
+
// Try pnpm-workspace.yaml
|
|
272
|
+
if (fileExists(root, "pnpm-workspace.yaml")) {
|
|
273
|
+
const content = readFileSafe(join(root, "pnpm-workspace.yaml"));
|
|
274
|
+
if (content) {
|
|
275
|
+
const patterns = parsePnpmWorkspacePatterns(content);
|
|
276
|
+
if (patterns.length > 0)
|
|
277
|
+
return resolveGlobPatterns(root, patterns);
|
|
278
|
+
}
|
|
279
|
+
}
|
|
280
|
+
// Fallback: scan subdirectories for stack markers
|
|
281
|
+
return scanForStackDirs(root);
|
|
282
|
+
}
|
|
283
|
+
/**
|
|
284
|
+
* Resolve simple glob patterns like "packages/*" to actual directories.
|
|
285
|
+
* Only handles single-level wildcards for simplicity.
|
|
286
|
+
*/
|
|
287
|
+
function resolveGlobPatterns(root, patterns) {
|
|
288
|
+
const dirs = [];
|
|
289
|
+
for (const pattern of patterns) {
|
|
290
|
+
if (pattern.endsWith("/*")) {
|
|
291
|
+
const parent = join(root, pattern.slice(0, -2));
|
|
292
|
+
try {
|
|
293
|
+
const entries = readdirSync(parent, { withFileTypes: true });
|
|
294
|
+
for (const entry of entries) {
|
|
295
|
+
if (entry.isDirectory() && !entry.name.startsWith(".")) {
|
|
296
|
+
dirs.push(join(parent, entry.name));
|
|
297
|
+
}
|
|
298
|
+
}
|
|
299
|
+
}
|
|
300
|
+
catch {
|
|
301
|
+
// parent directory doesn't exist, skip
|
|
302
|
+
}
|
|
303
|
+
}
|
|
304
|
+
else {
|
|
305
|
+
// Direct path
|
|
306
|
+
const dir = join(root, pattern);
|
|
307
|
+
if (existsSync(dir) && statSync(dir).isDirectory()) {
|
|
308
|
+
dirs.push(dir);
|
|
309
|
+
}
|
|
310
|
+
}
|
|
311
|
+
}
|
|
312
|
+
return dirs;
|
|
313
|
+
}
|
|
314
|
+
/**
|
|
315
|
+
* Parse pnpm-workspace.yaml to extract package patterns.
|
|
316
|
+
* Simple line-based parser — no YAML library needed for this format.
|
|
317
|
+
*/
|
|
318
|
+
function parsePnpmWorkspacePatterns(content) {
|
|
319
|
+
const patterns = [];
|
|
320
|
+
const lines = content.split("\n");
|
|
321
|
+
let inPackages = false;
|
|
322
|
+
for (const line of lines) {
|
|
323
|
+
const trimmed = line.trim();
|
|
324
|
+
if (trimmed === "packages:") {
|
|
325
|
+
inPackages = true;
|
|
326
|
+
continue;
|
|
327
|
+
}
|
|
328
|
+
if (inPackages) {
|
|
329
|
+
if (trimmed.startsWith("- ")) {
|
|
330
|
+
const value = trimmed.slice(2).replace(/['"]/g, "").trim();
|
|
331
|
+
if (value)
|
|
332
|
+
patterns.push(value);
|
|
333
|
+
}
|
|
334
|
+
else if (trimmed && !trimmed.startsWith("#")) {
|
|
335
|
+
// New top-level key, stop parsing
|
|
336
|
+
break;
|
|
337
|
+
}
|
|
338
|
+
}
|
|
339
|
+
}
|
|
340
|
+
return patterns;
|
|
341
|
+
}
|
|
342
|
+
/**
|
|
343
|
+
* Scan root's immediate subdirectories for any that contain stack markers.
|
|
344
|
+
*/
|
|
345
|
+
function scanForStackDirs(root) {
|
|
346
|
+
const stackMarkers = [
|
|
347
|
+
"package.json",
|
|
348
|
+
"requirements.txt",
|
|
349
|
+
"pyproject.toml",
|
|
350
|
+
"Cargo.toml",
|
|
351
|
+
"go.mod",
|
|
352
|
+
];
|
|
353
|
+
const dirs = [];
|
|
354
|
+
try {
|
|
355
|
+
const entries = readdirSync(root, { withFileTypes: true });
|
|
356
|
+
for (const entry of entries) {
|
|
357
|
+
if (!entry.isDirectory())
|
|
358
|
+
continue;
|
|
359
|
+
if (entry.name.startsWith(".") || entry.name === "node_modules")
|
|
360
|
+
continue;
|
|
361
|
+
const subdir = join(root, entry.name);
|
|
362
|
+
if (stackMarkers.some((marker) => existsSync(join(subdir, marker)))) {
|
|
363
|
+
dirs.push(subdir);
|
|
364
|
+
}
|
|
365
|
+
}
|
|
366
|
+
}
|
|
367
|
+
catch {
|
|
368
|
+
// ignore read errors
|
|
369
|
+
}
|
|
370
|
+
return dirs;
|
|
371
|
+
}
|
|
372
|
+
//# sourceMappingURL=stack-detector.js.map
|