@open330/oac 2026.4.1 → 2026.4.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/budget/index.d.ts +29 -1
- package/dist/budget/index.js +5 -1
- package/dist/{chunk-ZPI2VQ7U.js → chunk-2JOG3M5O.js} +564 -3
- package/dist/chunk-2JOG3M5O.js.map +1 -0
- package/dist/{chunk-UL66HWYF.js → chunk-3X6AFC3O.js} +52 -2
- package/dist/chunk-3X6AFC3O.js.map +1 -0
- package/dist/{chunk-TGZ2TGDA.js → chunk-5TSIEP6T.js} +12 -2
- package/dist/chunk-5TSIEP6T.js.map +1 -0
- package/dist/{chunk-VLR2VYFW.js → chunk-CJAJ4MBO.js} +2 -2
- package/dist/chunk-CJAJ4MBO.js.map +1 -0
- package/dist/{chunk-SZUDHVBF.js → chunk-N5OWF3UW.js} +1048 -597
- package/dist/chunk-N5OWF3UW.js.map +1 -0
- package/dist/{chunk-NZEI4RPP.js → chunk-VJW7RNZI.js} +85 -35
- package/dist/chunk-VJW7RNZI.js.map +1 -0
- package/dist/cli/cli.js +6 -6
- package/dist/cli/index.js +6 -6
- package/dist/completion/index.d.ts +2 -2
- package/dist/completion/index.js +1 -1
- package/dist/completion/index.js.map +1 -1
- package/dist/core/index.d.ts +71 -3
- package/dist/core/index.js +3 -1
- package/dist/dashboard/index.js +5 -5
- package/dist/dashboard/index.js.map +1 -1
- package/dist/discovery/index.d.ts +156 -2
- package/dist/discovery/index.js +31 -3
- package/dist/{event-bus-CRLkpNo0.d.ts → event-bus-BQLadg9l.d.ts} +1 -1
- package/dist/execution/index.d.ts +12 -3
- package/dist/execution/index.js +6 -2
- package/dist/repo/index.js +1 -1
- package/dist/{types-cJZwCZZX.d.ts → types-DyP8GmxB.d.ts} +24 -1
- package/package.json +15 -17
- package/dist/chunk-NZEI4RPP.js.map +0 -1
- package/dist/chunk-SZUDHVBF.js.map +0 -1
- package/dist/chunk-TGZ2TGDA.js.map +0 -1
- package/dist/chunk-UL66HWYF.js.map +0 -1
- package/dist/chunk-VLR2VYFW.js.map +0 -1
- package/dist/chunk-ZPI2VQ7U.js.map +0 -1
package/dist/budget/index.d.ts
CHANGED
|
@@ -1,3 +1,5 @@
|
|
|
1
|
+
import { c as Epic } from '../types-DyP8GmxB.js';
|
|
2
|
+
|
|
1
3
|
type AgentProviderId = "claude-code" | "codex" | "opencode" | string;
|
|
2
4
|
type TaskSource = "lint" | "todo" | "test-gap" | "dead-code" | "github-issue" | "custom";
|
|
3
5
|
type TaskComplexity = "trivial" | "simple" | "moderate" | "complex";
|
|
@@ -36,6 +38,11 @@ interface TokenCounter {
|
|
|
36
38
|
readonly maxContextTokens: number;
|
|
37
39
|
}
|
|
38
40
|
declare function estimateTokens(task: Task, provider: AgentProviderId): Promise<TokenEstimate>;
|
|
41
|
+
/**
|
|
42
|
+
* Estimate tokens for an entire epic by summing subtask estimates
|
|
43
|
+
* plus a 20% context overhead for shared module understanding.
|
|
44
|
+
*/
|
|
45
|
+
declare function estimateEpicTokens(epic: Epic, provider: AgentProviderId): Promise<number>;
|
|
39
46
|
|
|
40
47
|
declare function estimateLocChanges(task: Task): number;
|
|
41
48
|
declare function analyzeTaskComplexity(task: Task): TaskComplexity;
|
|
@@ -57,6 +64,27 @@ interface ExecutionPlan {
|
|
|
57
64
|
remainingTokens: number;
|
|
58
65
|
}
|
|
59
66
|
declare function buildExecutionPlan(tasks: Task[], estimates: Map<string, TokenEstimate>, budget: number): ExecutionPlan;
|
|
67
|
+
interface EpicExecutionPlan {
|
|
68
|
+
totalBudget: number;
|
|
69
|
+
selectedEpics: Array<{
|
|
70
|
+
epic: Epic;
|
|
71
|
+
estimatedTokens: number;
|
|
72
|
+
cumulativeBudgetUsed: number;
|
|
73
|
+
}>;
|
|
74
|
+
deferredEpics: Array<{
|
|
75
|
+
epic: Epic;
|
|
76
|
+
estimatedTokens: number;
|
|
77
|
+
reason: "budget_exceeded";
|
|
78
|
+
}>;
|
|
79
|
+
reserveTokens: number;
|
|
80
|
+
remainingTokens: number;
|
|
81
|
+
}
|
|
82
|
+
/**
|
|
83
|
+
* Build an execution plan for epics. Unlike task-level planning, epics are
|
|
84
|
+
* sorted purely by priority (not priority/tokens ratio) since they are
|
|
85
|
+
* already coherent units of work.
|
|
86
|
+
*/
|
|
87
|
+
declare function buildEpicExecutionPlan(epics: Epic[], budget: number): EpicExecutionPlan;
|
|
60
88
|
|
|
61
89
|
declare class ClaudeTokenCounter {
|
|
62
90
|
readonly invocationOverhead = 1500;
|
|
@@ -75,4 +103,4 @@ declare class CodexTokenCounter {
|
|
|
75
103
|
countTokens(text: string): number;
|
|
76
104
|
}
|
|
77
105
|
|
|
78
|
-
export { type AgentProviderId, ClaudeTokenCounter, CodexTokenCounter, type ExecutionMode, type ExecutionPlan, type Task, type TaskComplexity, type TaskSource, type TokenCounter, type TokenEstimate, analyzeTaskComplexity, buildExecutionPlan, estimateLocChanges, estimateTokens };
|
|
106
|
+
export { type AgentProviderId, ClaudeTokenCounter, CodexTokenCounter, type EpicExecutionPlan, type ExecutionMode, type ExecutionPlan, type Task, type TaskComplexity, type TaskSource, type TokenCounter, type TokenEstimate, analyzeTaskComplexity, buildEpicExecutionPlan, buildExecutionPlan, estimateEpicTokens, estimateLocChanges, estimateTokens };
|
package/dist/budget/index.js
CHANGED
|
@@ -2,15 +2,19 @@ import {
|
|
|
2
2
|
ClaudeTokenCounter,
|
|
3
3
|
CodexTokenCounter,
|
|
4
4
|
analyzeTaskComplexity,
|
|
5
|
+
buildEpicExecutionPlan,
|
|
5
6
|
buildExecutionPlan,
|
|
7
|
+
estimateEpicTokens,
|
|
6
8
|
estimateLocChanges,
|
|
7
9
|
estimateTokens
|
|
8
|
-
} from "../chunk-
|
|
10
|
+
} from "../chunk-3X6AFC3O.js";
|
|
9
11
|
export {
|
|
10
12
|
ClaudeTokenCounter,
|
|
11
13
|
CodexTokenCounter,
|
|
12
14
|
analyzeTaskComplexity,
|
|
15
|
+
buildEpicExecutionPlan,
|
|
13
16
|
buildExecutionPlan,
|
|
17
|
+
estimateEpicTokens,
|
|
14
18
|
estimateLocChanges,
|
|
15
19
|
estimateTokens
|
|
16
20
|
};
|
|
@@ -1352,7 +1352,9 @@ ${labelSummary}`, DESCRIPTION_LIMIT);
|
|
|
1352
1352
|
}
|
|
1353
1353
|
function mapComplexityFromLabels(labels) {
|
|
1354
1354
|
const normalized = labels.map((label) => label.toLowerCase());
|
|
1355
|
-
if (normalized.some(
|
|
1355
|
+
if (normalized.some(
|
|
1356
|
+
(label) => label.includes("good first issue") || label.includes("good-first-issue")
|
|
1357
|
+
)) {
|
|
1356
1358
|
return "simple";
|
|
1357
1359
|
}
|
|
1358
1360
|
if (normalized.some((label) => label.includes("feature"))) {
|
|
@@ -1720,6 +1722,551 @@ function clamp(value, min, max) {
|
|
|
1720
1722
|
return Math.min(max, Math.max(min, value));
|
|
1721
1723
|
}
|
|
1722
1724
|
|
|
1725
|
+
// src/discovery/analyzer.ts
|
|
1726
|
+
import { mkdir, readFile as readFile5, readdir as readdir3, rename, stat as stat2, unlink, writeFile } from "fs/promises";
|
|
1727
|
+
import { dirname, extname, join, relative as relative3, resolve as resolve5 } from "path";
|
|
1728
|
+
var DEFAULT_EXTENSIONS = /* @__PURE__ */ new Set([".ts", ".tsx", ".js", ".jsx"]);
|
|
1729
|
+
var DEFAULT_EXCLUDE_DIRS = /* @__PURE__ */ new Set(["node_modules", "dist", ".git", "coverage"]);
|
|
1730
|
+
var DEFAULT_EXCLUDE_SUFFIXES = [
|
|
1731
|
+
".d.ts",
|
|
1732
|
+
".test.ts",
|
|
1733
|
+
".spec.ts",
|
|
1734
|
+
".test.tsx",
|
|
1735
|
+
".spec.tsx",
|
|
1736
|
+
".test.js",
|
|
1737
|
+
".spec.js",
|
|
1738
|
+
".test.jsx",
|
|
1739
|
+
".spec.jsx"
|
|
1740
|
+
];
|
|
1741
|
+
var DEFAULT_MAX_AGE_MS = 864e5;
|
|
1742
|
+
var CONTEXT_DIR_NAME = ".oac/context";
|
|
1743
|
+
var CODEBASE_MAP_FILE = "codebase-map.json";
|
|
1744
|
+
var QUALITY_REPORT_FILE = "quality-report.json";
|
|
1745
|
+
async function analyzeCodebase(repoPath, options) {
|
|
1746
|
+
const resolvedRepoPath = resolve5(repoPath);
|
|
1747
|
+
const sourceDir = options?.sourceDir ?? "src";
|
|
1748
|
+
const srcRoot = join(resolvedRepoPath, sourceDir);
|
|
1749
|
+
const userExclude = options?.exclude ?? [];
|
|
1750
|
+
const repoFullName = options?.repoFullName ?? "";
|
|
1751
|
+
const headSha = options?.headSha ?? "";
|
|
1752
|
+
const allFiles = await walkSourceFiles(srcRoot, userExclude);
|
|
1753
|
+
const fileInfos = [];
|
|
1754
|
+
for (const absPath of allFiles) {
|
|
1755
|
+
const relPath = relative3(resolvedRepoPath, absPath);
|
|
1756
|
+
const info = await analyzeFile(absPath, relPath);
|
|
1757
|
+
fileInfos.push({ ...info, _absolutePath: absPath });
|
|
1758
|
+
}
|
|
1759
|
+
const moduleMap = buildModuleMap(fileInfos, sourceDir);
|
|
1760
|
+
const moduleNames = new Set(Object.keys(moduleMap));
|
|
1761
|
+
const modules = [];
|
|
1762
|
+
for (const [moduleName, files] of Object.entries(moduleMap)) {
|
|
1763
|
+
const moduleFiles = files.map(({ _absolutePath: _, ...fi }) => fi);
|
|
1764
|
+
const totalLoc2 = moduleFiles.reduce((sum, f) => sum + f.loc, 0);
|
|
1765
|
+
const allExports = moduleFiles.flatMap((f) => f.exports);
|
|
1766
|
+
const dependencies = resolveModuleDependencies(files, moduleName, sourceDir, moduleNames);
|
|
1767
|
+
modules.push({
|
|
1768
|
+
name: moduleName,
|
|
1769
|
+
path: moduleName === "root" ? sourceDir : `${sourceDir}/${moduleName}`,
|
|
1770
|
+
files: moduleFiles,
|
|
1771
|
+
totalLoc: totalLoc2,
|
|
1772
|
+
exports: allExports,
|
|
1773
|
+
dependencies
|
|
1774
|
+
});
|
|
1775
|
+
}
|
|
1776
|
+
const totalFiles = fileInfos.length;
|
|
1777
|
+
const totalLoc = fileInfos.reduce((sum, f) => sum + f.loc, 0);
|
|
1778
|
+
const generatedAt = (/* @__PURE__ */ new Date()).toISOString();
|
|
1779
|
+
const codebaseMap = {
|
|
1780
|
+
version: 1,
|
|
1781
|
+
generatedAt,
|
|
1782
|
+
repoFullName,
|
|
1783
|
+
headSha,
|
|
1784
|
+
modules,
|
|
1785
|
+
totalFiles,
|
|
1786
|
+
totalLoc
|
|
1787
|
+
};
|
|
1788
|
+
const findings = await runScanners(resolvedRepoPath, sourceDir, options);
|
|
1789
|
+
const qualityReport = buildQualityReport(findings, repoFullName, generatedAt);
|
|
1790
|
+
return { codebaseMap, qualityReport };
|
|
1791
|
+
}
|
|
1792
|
+
async function persistContext(repoPath, codebaseMap, qualityReport, contextDir) {
|
|
1793
|
+
const resolvedDir = contextDir ? resolve5(contextDir) : join(resolve5(repoPath), CONTEXT_DIR_NAME);
|
|
1794
|
+
await mkdir(resolvedDir, { recursive: true });
|
|
1795
|
+
await atomicWriteJson(join(resolvedDir, CODEBASE_MAP_FILE), codebaseMap);
|
|
1796
|
+
await atomicWriteJson(join(resolvedDir, QUALITY_REPORT_FILE), qualityReport);
|
|
1797
|
+
return resolvedDir;
|
|
1798
|
+
}
|
|
1799
|
+
async function loadContext(repoPath, contextDir) {
|
|
1800
|
+
const resolvedDir = contextDir ? resolve5(contextDir) : join(resolve5(repoPath), CONTEXT_DIR_NAME);
|
|
1801
|
+
try {
|
|
1802
|
+
const [mapRaw, reportRaw] = await Promise.all([
|
|
1803
|
+
readFile5(join(resolvedDir, CODEBASE_MAP_FILE), "utf-8"),
|
|
1804
|
+
readFile5(join(resolvedDir, QUALITY_REPORT_FILE), "utf-8")
|
|
1805
|
+
]);
|
|
1806
|
+
const codebaseMap = JSON.parse(mapRaw);
|
|
1807
|
+
const qualityReport = JSON.parse(reportRaw);
|
|
1808
|
+
return { codebaseMap, qualityReport };
|
|
1809
|
+
} catch {
|
|
1810
|
+
return null;
|
|
1811
|
+
}
|
|
1812
|
+
}
|
|
1813
|
+
function isContextStale(codebaseMap, maxAgeMs = DEFAULT_MAX_AGE_MS) {
|
|
1814
|
+
const generatedAt = new Date(codebaseMap.generatedAt).getTime();
|
|
1815
|
+
if (Number.isNaN(generatedAt)) {
|
|
1816
|
+
return true;
|
|
1817
|
+
}
|
|
1818
|
+
return Date.now() - generatedAt > maxAgeMs;
|
|
1819
|
+
}
|
|
1820
|
+
function deriveModuleFromPath(filePath, sourceDir = "src") {
|
|
1821
|
+
const normalized = filePath.replace(/\\/g, "/");
|
|
1822
|
+
const prefix = `${sourceDir.replace(/\\/g, "/")}/`;
|
|
1823
|
+
if (!normalized.startsWith(prefix)) {
|
|
1824
|
+
return "root";
|
|
1825
|
+
}
|
|
1826
|
+
const afterSrc = normalized.slice(prefix.length);
|
|
1827
|
+
const firstSlash = afterSrc.indexOf("/");
|
|
1828
|
+
if (firstSlash === -1) {
|
|
1829
|
+
return "root";
|
|
1830
|
+
}
|
|
1831
|
+
return afterSrc.slice(0, firstSlash);
|
|
1832
|
+
}
|
|
1833
|
+
async function walkSourceFiles(dirPath, userExclude) {
|
|
1834
|
+
const results = [];
|
|
1835
|
+
const userExcludeDirs = /* @__PURE__ */ new Set();
|
|
1836
|
+
const userExcludeSuffixes = [];
|
|
1837
|
+
for (const pattern of userExclude) {
|
|
1838
|
+
const cleaned = pattern.replace(/\/\*{1,2}$/, "");
|
|
1839
|
+
if (cleaned.startsWith("*.")) {
|
|
1840
|
+
userExcludeSuffixes.push(cleaned.slice(1));
|
|
1841
|
+
} else {
|
|
1842
|
+
userExcludeDirs.add(cleaned);
|
|
1843
|
+
}
|
|
1844
|
+
}
|
|
1845
|
+
function isExcludedFile(name) {
|
|
1846
|
+
if (!DEFAULT_EXTENSIONS.has(extname(name))) return true;
|
|
1847
|
+
if (DEFAULT_EXCLUDE_SUFFIXES.some((suffix) => name.endsWith(suffix))) return true;
|
|
1848
|
+
if (userExcludeSuffixes.some((suffix) => name.endsWith(suffix))) return true;
|
|
1849
|
+
return false;
|
|
1850
|
+
}
|
|
1851
|
+
function isExcludedDir(name) {
|
|
1852
|
+
return DEFAULT_EXCLUDE_DIRS.has(name) || userExcludeDirs.has(name);
|
|
1853
|
+
}
|
|
1854
|
+
async function walk(dir) {
|
|
1855
|
+
let entries;
|
|
1856
|
+
try {
|
|
1857
|
+
entries = await readdir3(dir, { withFileTypes: true });
|
|
1858
|
+
} catch {
|
|
1859
|
+
return;
|
|
1860
|
+
}
|
|
1861
|
+
for (const entry of entries) {
|
|
1862
|
+
if (entry.isDirectory() && !isExcludedDir(entry.name)) {
|
|
1863
|
+
await walk(join(dir, entry.name));
|
|
1864
|
+
} else if (entry.isFile() && !isExcludedFile(entry.name)) {
|
|
1865
|
+
results.push(join(dir, entry.name));
|
|
1866
|
+
}
|
|
1867
|
+
}
|
|
1868
|
+
}
|
|
1869
|
+
await walk(dirPath);
|
|
1870
|
+
results.sort();
|
|
1871
|
+
return results;
|
|
1872
|
+
}
|
|
1873
|
+
async function analyzeFile(absolutePath, relativePath) {
|
|
1874
|
+
const [content, fileStat] = await Promise.all([
|
|
1875
|
+
readFile5(absolutePath, "utf-8"),
|
|
1876
|
+
stat2(absolutePath)
|
|
1877
|
+
]);
|
|
1878
|
+
const lines = content.split("\n");
|
|
1879
|
+
const loc = lines.filter((line) => line.trim().length > 0).length;
|
|
1880
|
+
const exports = extractExports(content);
|
|
1881
|
+
const imports = extractImports(content);
|
|
1882
|
+
return {
|
|
1883
|
+
path: relativePath,
|
|
1884
|
+
loc,
|
|
1885
|
+
sizeBytes: fileStat.size,
|
|
1886
|
+
exports,
|
|
1887
|
+
imports
|
|
1888
|
+
};
|
|
1889
|
+
}
|
|
1890
|
+
function extractExports(content) {
|
|
1891
|
+
const exports = [];
|
|
1892
|
+
const seen = /* @__PURE__ */ new Set();
|
|
1893
|
+
const add = (name) => {
|
|
1894
|
+
const trimmed = name.trim();
|
|
1895
|
+
if (trimmed && !seen.has(trimmed)) {
|
|
1896
|
+
seen.add(trimmed);
|
|
1897
|
+
exports.push(trimmed);
|
|
1898
|
+
}
|
|
1899
|
+
};
|
|
1900
|
+
const namedDeclRe = /\bexport\s+(?:async\s+)?(?:function\*?|class|const|let|var|type|interface|enum)\s+([A-Za-z_$][A-Za-z0-9_$]*)/g;
|
|
1901
|
+
for (const m of content.matchAll(namedDeclRe)) {
|
|
1902
|
+
add(m[1]);
|
|
1903
|
+
}
|
|
1904
|
+
const bracedRe = /\bexport\s*\{([^}]+)\}/g;
|
|
1905
|
+
for (const m of content.matchAll(bracedRe)) {
|
|
1906
|
+
const inner = m[1];
|
|
1907
|
+
for (const item of inner.split(",")) {
|
|
1908
|
+
const parts = item.trim().split(/\s+as\s+/);
|
|
1909
|
+
const exportedName = parts.length > 1 ? parts[1].trim() : parts[0].trim();
|
|
1910
|
+
if (exportedName) {
|
|
1911
|
+
add(exportedName);
|
|
1912
|
+
}
|
|
1913
|
+
}
|
|
1914
|
+
}
|
|
1915
|
+
const defaultRe = /\bexport\s+default\b/;
|
|
1916
|
+
if (defaultRe.test(content)) {
|
|
1917
|
+
add("default");
|
|
1918
|
+
}
|
|
1919
|
+
return exports;
|
|
1920
|
+
}
|
|
1921
|
+
function extractImports(content) {
|
|
1922
|
+
const imports = [];
|
|
1923
|
+
const seen = /* @__PURE__ */ new Set();
|
|
1924
|
+
const add = (path) => {
|
|
1925
|
+
const trimmed = path.trim();
|
|
1926
|
+
if (trimmed && !seen.has(trimmed)) {
|
|
1927
|
+
seen.add(trimmed);
|
|
1928
|
+
imports.push(trimmed);
|
|
1929
|
+
}
|
|
1930
|
+
};
|
|
1931
|
+
const staticImportRe = /\bimport\s+(?:[\s\S]*?\s+from\s+)?["']([^"']+)["']/g;
|
|
1932
|
+
for (const m of content.matchAll(staticImportRe)) {
|
|
1933
|
+
add(m[1]);
|
|
1934
|
+
}
|
|
1935
|
+
const dynamicImportRe = /\bimport\s*\(\s*["']([^"']+)["']\s*\)/g;
|
|
1936
|
+
for (const m of content.matchAll(dynamicImportRe)) {
|
|
1937
|
+
add(m[1]);
|
|
1938
|
+
}
|
|
1939
|
+
return imports;
|
|
1940
|
+
}
|
|
1941
|
+
function buildModuleMap(fileInfos, sourceDir) {
|
|
1942
|
+
const moduleMap = {};
|
|
1943
|
+
for (const fi of fileInfos) {
|
|
1944
|
+
const moduleName = deriveModuleFromPath(fi.path, sourceDir);
|
|
1945
|
+
if (!moduleMap[moduleName]) {
|
|
1946
|
+
moduleMap[moduleName] = [];
|
|
1947
|
+
}
|
|
1948
|
+
moduleMap[moduleName].push(fi);
|
|
1949
|
+
}
|
|
1950
|
+
return moduleMap;
|
|
1951
|
+
}
|
|
1952
|
+
function resolveModuleDependencies(files, currentModule, sourceDir, allModuleNames) {
|
|
1953
|
+
const deps = /* @__PURE__ */ new Set();
|
|
1954
|
+
for (const file of files) {
|
|
1955
|
+
for (const importPath of file.imports) {
|
|
1956
|
+
if (!importPath.startsWith(".")) continue;
|
|
1957
|
+
const fileDir = dirname(file.path);
|
|
1958
|
+
const resolvedImport = join(fileDir, importPath).replace(/\\/g, "/");
|
|
1959
|
+
const importModule = deriveModuleFromPath(resolvedImport, sourceDir);
|
|
1960
|
+
if (importModule !== currentModule && allModuleNames.has(importModule)) {
|
|
1961
|
+
deps.add(importModule);
|
|
1962
|
+
}
|
|
1963
|
+
}
|
|
1964
|
+
}
|
|
1965
|
+
return [...deps].sort();
|
|
1966
|
+
}
|
|
1967
|
+
async function runScanners(repoPath, sourceDir, options) {
|
|
1968
|
+
const scanners = options?.scanners;
|
|
1969
|
+
const composite = scanners ? new CompositeScanner(scanners) : new CompositeScanner();
|
|
1970
|
+
const scanOptions = {
|
|
1971
|
+
exclude: options?.exclude
|
|
1972
|
+
};
|
|
1973
|
+
let tasks;
|
|
1974
|
+
try {
|
|
1975
|
+
tasks = await composite.scan(repoPath, scanOptions);
|
|
1976
|
+
} catch {
|
|
1977
|
+
tasks = [];
|
|
1978
|
+
}
|
|
1979
|
+
return tasks.map((task) => taskToRawFinding(task, sourceDir));
|
|
1980
|
+
}
|
|
1981
|
+
function taskToRawFinding(task, sourceDir) {
|
|
1982
|
+
const filePath = task.targetFiles[0] ?? "";
|
|
1983
|
+
const scannerId = typeof task.metadata?.scannerId === "string" ? task.metadata.scannerId : task.source;
|
|
1984
|
+
return {
|
|
1985
|
+
scannerId,
|
|
1986
|
+
source: task.source,
|
|
1987
|
+
filePath,
|
|
1988
|
+
module: deriveModuleFromPath(filePath, sourceDir),
|
|
1989
|
+
title: task.title,
|
|
1990
|
+
description: task.description,
|
|
1991
|
+
severity: deriveSeverity(task.source),
|
|
1992
|
+
complexity: task.complexity,
|
|
1993
|
+
line: typeof task.metadata?.startLine === "number" ? task.metadata.startLine : void 0,
|
|
1994
|
+
metadata: task.metadata,
|
|
1995
|
+
discoveredAt: task.discoveredAt
|
|
1996
|
+
};
|
|
1997
|
+
}
|
|
1998
|
+
function deriveSeverity(source) {
|
|
1999
|
+
switch (source) {
|
|
2000
|
+
case "lint":
|
|
2001
|
+
return "warning";
|
|
2002
|
+
case "todo":
|
|
2003
|
+
return "info";
|
|
2004
|
+
case "test-gap":
|
|
2005
|
+
return "info";
|
|
2006
|
+
case "github-issue":
|
|
2007
|
+
return "warning";
|
|
2008
|
+
case "dead-code":
|
|
2009
|
+
return "warning";
|
|
2010
|
+
case "custom":
|
|
2011
|
+
return "info";
|
|
2012
|
+
default:
|
|
2013
|
+
return "info";
|
|
2014
|
+
}
|
|
2015
|
+
}
|
|
2016
|
+
function buildQualityReport(findings, repoFullName, generatedAt) {
|
|
2017
|
+
const bySource = {};
|
|
2018
|
+
const byModule = {};
|
|
2019
|
+
const bySeverity = {};
|
|
2020
|
+
for (const finding of findings) {
|
|
2021
|
+
bySource[finding.source] = (bySource[finding.source] ?? 0) + 1;
|
|
2022
|
+
const mod = finding.module ?? "root";
|
|
2023
|
+
byModule[mod] = (byModule[mod] ?? 0) + 1;
|
|
2024
|
+
bySeverity[finding.severity] = (bySeverity[finding.severity] ?? 0) + 1;
|
|
2025
|
+
}
|
|
2026
|
+
return {
|
|
2027
|
+
version: 1,
|
|
2028
|
+
generatedAt,
|
|
2029
|
+
repoFullName,
|
|
2030
|
+
findings,
|
|
2031
|
+
summary: {
|
|
2032
|
+
totalFindings: findings.length,
|
|
2033
|
+
bySource,
|
|
2034
|
+
byModule,
|
|
2035
|
+
bySeverity
|
|
2036
|
+
}
|
|
2037
|
+
};
|
|
2038
|
+
}
|
|
2039
|
+
async function atomicWriteJson(filePath, data) {
|
|
2040
|
+
const tmpPath = `${filePath}.tmp.${Date.now()}`;
|
|
2041
|
+
const content = `${JSON.stringify(data, null, 2)}
|
|
2042
|
+
`;
|
|
2043
|
+
try {
|
|
2044
|
+
await writeFile(tmpPath, content, "utf-8");
|
|
2045
|
+
await rename(tmpPath, filePath);
|
|
2046
|
+
} catch (error) {
|
|
2047
|
+
try {
|
|
2048
|
+
await unlink(tmpPath);
|
|
2049
|
+
} catch {
|
|
2050
|
+
}
|
|
2051
|
+
throw error;
|
|
2052
|
+
}
|
|
2053
|
+
}
|
|
2054
|
+
|
|
2055
|
+
// src/discovery/epic-grouper.ts
|
|
2056
|
+
import { randomUUID } from "crypto";
|
|
2057
|
+
var DEFAULT_MAX_SUBTASKS = 10;
|
|
2058
|
+
var DEFAULT_MIN_FINDINGS = 2;
|
|
2059
|
+
var MAX_CONTEXT_FILES = 20;
|
|
2060
|
+
function groupFindingsIntoEpics(findings, options) {
|
|
2061
|
+
const maxSubtasks = options?.maxSubtasksPerEpic ?? DEFAULT_MAX_SUBTASKS;
|
|
2062
|
+
const _minFindings = options?.minFindingsForEpic ?? DEFAULT_MIN_FINDINGS;
|
|
2063
|
+
const codebaseMap = options?.codebaseMap;
|
|
2064
|
+
const groups = /* @__PURE__ */ new Map();
|
|
2065
|
+
for (const finding of findings) {
|
|
2066
|
+
const module = finding.module ?? deriveModuleFromPath(finding.filePath);
|
|
2067
|
+
const key = `${module}:${finding.source}`;
|
|
2068
|
+
let group = groups.get(key);
|
|
2069
|
+
if (!group) {
|
|
2070
|
+
group = [];
|
|
2071
|
+
groups.set(key, group);
|
|
2072
|
+
}
|
|
2073
|
+
group.push(finding);
|
|
2074
|
+
}
|
|
2075
|
+
const epics = [];
|
|
2076
|
+
for (const [key, groupFindings] of groups) {
|
|
2077
|
+
const [module, source] = key.split(":");
|
|
2078
|
+
if (groupFindings.length <= maxSubtasks) {
|
|
2079
|
+
epics.push(buildEpic(source, module, groupFindings, codebaseMap));
|
|
2080
|
+
} else {
|
|
2081
|
+
const chunks = chunkArray(groupFindings, maxSubtasks);
|
|
2082
|
+
for (let i = 0; i < chunks.length; i++) {
|
|
2083
|
+
epics.push(
|
|
2084
|
+
buildEpic(source, module, chunks[i], codebaseMap, {
|
|
2085
|
+
partIndex: i + 1,
|
|
2086
|
+
totalParts: chunks.length
|
|
2087
|
+
})
|
|
2088
|
+
);
|
|
2089
|
+
}
|
|
2090
|
+
}
|
|
2091
|
+
}
|
|
2092
|
+
return epics;
|
|
2093
|
+
}
|
|
2094
|
+
function buildEpic(source, module, findings, codebaseMap, partInfo) {
|
|
2095
|
+
const epicId = randomUUID().slice(0, 16);
|
|
2096
|
+
const subtasks = findings.map((f) => findingToTask(f, epicId));
|
|
2097
|
+
let title = buildEpicTitle(source, module, subtasks);
|
|
2098
|
+
if (partInfo) {
|
|
2099
|
+
title = `${title} (${partInfo.partIndex}/${partInfo.totalParts})`;
|
|
2100
|
+
}
|
|
2101
|
+
return {
|
|
2102
|
+
id: epicId,
|
|
2103
|
+
title,
|
|
2104
|
+
description: buildEpicDescription(source, module, subtasks),
|
|
2105
|
+
scope: module,
|
|
2106
|
+
subtasks,
|
|
2107
|
+
contextFiles: resolveContextFiles(module, codebaseMap),
|
|
2108
|
+
status: "pending",
|
|
2109
|
+
priority: computeEpicPriority(subtasks),
|
|
2110
|
+
estimatedTokens: 0,
|
|
2111
|
+
// filled later by estimator
|
|
2112
|
+
createdAt: (/* @__PURE__ */ new Date()).toISOString(),
|
|
2113
|
+
metadata: {
|
|
2114
|
+
groupingStrategy: "by-module",
|
|
2115
|
+
source,
|
|
2116
|
+
findingCount: findings.length
|
|
2117
|
+
}
|
|
2118
|
+
};
|
|
2119
|
+
}
|
|
2120
|
+
function buildEpicTitle(source, module, _subtasks) {
|
|
2121
|
+
const moduleLabel = module === "root" ? "" : ` for ${module} module`;
|
|
2122
|
+
switch (source) {
|
|
2123
|
+
case "test-gap":
|
|
2124
|
+
return `Improve test coverage${moduleLabel}`;
|
|
2125
|
+
case "todo":
|
|
2126
|
+
return `Address TODO comments${module === "root" ? "" : ` in ${module} module`}`;
|
|
2127
|
+
case "lint":
|
|
2128
|
+
return `Fix lint issues${module === "root" ? "" : ` in ${module} module`}`;
|
|
2129
|
+
case "dead-code":
|
|
2130
|
+
return `Remove dead code${module === "root" ? "" : ` in ${module} module`}`;
|
|
2131
|
+
case "github-issue":
|
|
2132
|
+
return `Resolve GitHub issues${module === "root" ? "" : ` in ${module} module`}`;
|
|
2133
|
+
case "custom":
|
|
2134
|
+
return `Address findings${module === "root" ? "" : ` in ${module} module`}`;
|
|
2135
|
+
default:
|
|
2136
|
+
return `Address ${source} findings${module === "root" ? "" : ` in ${module} module`}`;
|
|
2137
|
+
}
|
|
2138
|
+
}
|
|
2139
|
+
function buildEpicDescription(source, module, subtasks) {
|
|
2140
|
+
const lines = [
|
|
2141
|
+
`Grouped ${subtasks.length} ${source} findings in the ${module} module:`,
|
|
2142
|
+
""
|
|
2143
|
+
];
|
|
2144
|
+
for (const task of subtasks) {
|
|
2145
|
+
const file = task.targetFiles.length > 0 ? `[${task.targetFiles[0]}] ` : "";
|
|
2146
|
+
lines.push(`- ${file}${task.title}`);
|
|
2147
|
+
}
|
|
2148
|
+
return lines.join("\n");
|
|
2149
|
+
}
|
|
2150
|
+
function findingToTask(finding, epicId) {
|
|
2151
|
+
return {
|
|
2152
|
+
id: `${finding.source}-${randomUUID().slice(0, 8)}`,
|
|
2153
|
+
source: finding.source,
|
|
2154
|
+
title: finding.title,
|
|
2155
|
+
description: finding.description,
|
|
2156
|
+
targetFiles: [finding.filePath].filter(Boolean),
|
|
2157
|
+
priority: derivePriorityFromSeverity(finding.severity),
|
|
2158
|
+
complexity: finding.complexity,
|
|
2159
|
+
executionMode: "new-pr",
|
|
2160
|
+
metadata: finding.metadata,
|
|
2161
|
+
discoveredAt: finding.discoveredAt,
|
|
2162
|
+
parentEpicId: epicId
|
|
2163
|
+
};
|
|
2164
|
+
}
|
|
2165
|
+
function derivePriorityFromSeverity(severity) {
|
|
2166
|
+
switch (severity) {
|
|
2167
|
+
case "error":
|
|
2168
|
+
return 80;
|
|
2169
|
+
case "warning":
|
|
2170
|
+
return 50;
|
|
2171
|
+
case "info":
|
|
2172
|
+
return 30;
|
|
2173
|
+
}
|
|
2174
|
+
}
|
|
2175
|
+
function resolveContextFiles(module, codebaseMap) {
|
|
2176
|
+
if (!codebaseMap) return [];
|
|
2177
|
+
const moduleInfo = codebaseMap.modules.find((m) => m.name === module);
|
|
2178
|
+
if (!moduleInfo) return [];
|
|
2179
|
+
return moduleInfo.files.map((f) => f.path).slice(0, MAX_CONTEXT_FILES);
|
|
2180
|
+
}
|
|
2181
|
+
function computeEpicPriority(subtasks) {
|
|
2182
|
+
if (subtasks.length === 0) return 0;
|
|
2183
|
+
const avg = subtasks.reduce((sum, t) => sum + t.priority, 0) / subtasks.length;
|
|
2184
|
+
const sizeBoost = Math.min(10, subtasks.length * 2);
|
|
2185
|
+
return Math.min(100, Math.round(avg + sizeBoost));
|
|
2186
|
+
}
|
|
2187
|
+
function computeEpicComplexity(subtasks) {
|
|
2188
|
+
if (subtasks.length === 1) return subtasks[0].complexity;
|
|
2189
|
+
if (subtasks.length <= 3) return "simple";
|
|
2190
|
+
if (subtasks.length <= 6) return "moderate";
|
|
2191
|
+
return "complex";
|
|
2192
|
+
}
|
|
2193
|
+
function chunkArray(array, size) {
|
|
2194
|
+
const chunks = [];
|
|
2195
|
+
for (let i = 0; i < array.length; i += size) {
|
|
2196
|
+
chunks.push(array.slice(i, i + size));
|
|
2197
|
+
}
|
|
2198
|
+
return chunks;
|
|
2199
|
+
}
|
|
2200
|
+
|
|
2201
|
+
// src/discovery/backlog.ts
|
|
2202
|
+
import { mkdir as mkdir2, readFile as readFile6, rename as rename2, writeFile as writeFile2 } from "fs/promises";
|
|
2203
|
+
import { dirname as dirname2, join as join2 } from "path";
|
|
2204
|
+
var DEFAULT_CONTEXT_DIR = ".oac/context";
|
|
2205
|
+
async function persistBacklog(repoPath, backlog, contextDir) {
|
|
2206
|
+
const path = join2(repoPath, contextDir ?? DEFAULT_CONTEXT_DIR, "backlog.json");
|
|
2207
|
+
await mkdir2(dirname2(path), { recursive: true });
|
|
2208
|
+
const tempPath = `${path}.tmp`;
|
|
2209
|
+
await writeFile2(tempPath, JSON.stringify(backlog, null, 2), "utf8");
|
|
2210
|
+
await rename2(tempPath, path);
|
|
2211
|
+
return path;
|
|
2212
|
+
}
|
|
2213
|
+
async function loadBacklog(repoPath, contextDir) {
|
|
2214
|
+
const path = join2(repoPath, contextDir ?? DEFAULT_CONTEXT_DIR, "backlog.json");
|
|
2215
|
+
try {
|
|
2216
|
+
const content = await readFile6(path, "utf8");
|
|
2217
|
+
return JSON.parse(content);
|
|
2218
|
+
} catch (err) {
|
|
2219
|
+
if (err instanceof Error && "code" in err && err.code === "ENOENT") {
|
|
2220
|
+
return null;
|
|
2221
|
+
}
|
|
2222
|
+
if (err instanceof SyntaxError) {
|
|
2223
|
+
return null;
|
|
2224
|
+
}
|
|
2225
|
+
return null;
|
|
2226
|
+
}
|
|
2227
|
+
}
|
|
2228
|
+
function createBacklog(repoFullName, headSha, epics) {
|
|
2229
|
+
return {
|
|
2230
|
+
version: 1,
|
|
2231
|
+
lastUpdatedAt: (/* @__PURE__ */ new Date()).toISOString(),
|
|
2232
|
+
repoFullName,
|
|
2233
|
+
headSha,
|
|
2234
|
+
epics
|
|
2235
|
+
};
|
|
2236
|
+
}
|
|
2237
|
+
function updateBacklog(existing, newEpics, completedEpicIds, headSha) {
|
|
2238
|
+
const epicMap = /* @__PURE__ */ new Map();
|
|
2239
|
+
for (const epic of existing.epics) {
|
|
2240
|
+
epicMap.set(epic.id, { ...epic });
|
|
2241
|
+
}
|
|
2242
|
+
for (const id of completedEpicIds) {
|
|
2243
|
+
const epic = epicMap.get(id);
|
|
2244
|
+
if (epic) {
|
|
2245
|
+
epic.status = "completed";
|
|
2246
|
+
epic.completedAt = (/* @__PURE__ */ new Date()).toISOString();
|
|
2247
|
+
}
|
|
2248
|
+
}
|
|
2249
|
+
for (const epic of newEpics) {
|
|
2250
|
+
const existing2 = epicMap.get(epic.id);
|
|
2251
|
+
if (!existing2) {
|
|
2252
|
+
epicMap.set(epic.id, { ...epic });
|
|
2253
|
+
} else if (existing2.status === "pending") {
|
|
2254
|
+
epicMap.set(epic.id, { ...epic });
|
|
2255
|
+
}
|
|
2256
|
+
}
|
|
2257
|
+
const epics = Array.from(epicMap.values());
|
|
2258
|
+
return {
|
|
2259
|
+
version: 1,
|
|
2260
|
+
lastUpdatedAt: (/* @__PURE__ */ new Date()).toISOString(),
|
|
2261
|
+
repoFullName: existing.repoFullName,
|
|
2262
|
+
headSha: headSha ?? existing.headSha,
|
|
2263
|
+
epics
|
|
2264
|
+
};
|
|
2265
|
+
}
|
|
2266
|
+
function getPendingEpics(backlog) {
|
|
2267
|
+
return backlog.epics.filter((e) => e.status === "pending").sort((a, b) => b.priority - a.priority);
|
|
2268
|
+
}
|
|
2269
|
+
|
|
1723
2270
|
export {
|
|
1724
2271
|
TodoScanner,
|
|
1725
2272
|
LintScanner,
|
|
@@ -1727,6 +2274,20 @@ export {
|
|
|
1727
2274
|
GitHubIssuesScanner,
|
|
1728
2275
|
CompositeScanner,
|
|
1729
2276
|
createDefaultCompositeScanner,
|
|
1730
|
-
rankTasks
|
|
2277
|
+
rankTasks,
|
|
2278
|
+
analyzeCodebase,
|
|
2279
|
+
persistContext,
|
|
2280
|
+
loadContext,
|
|
2281
|
+
isContextStale,
|
|
2282
|
+
deriveModuleFromPath,
|
|
2283
|
+
groupFindingsIntoEpics,
|
|
2284
|
+
findingToTask,
|
|
2285
|
+
computeEpicPriority,
|
|
2286
|
+
computeEpicComplexity,
|
|
2287
|
+
persistBacklog,
|
|
2288
|
+
loadBacklog,
|
|
2289
|
+
createBacklog,
|
|
2290
|
+
updateBacklog,
|
|
2291
|
+
getPendingEpics
|
|
1731
2292
|
};
|
|
1732
|
-
//# sourceMappingURL=chunk-
|
|
2293
|
+
//# sourceMappingURL=chunk-2JOG3M5O.js.map
|