@clue-ai/cli 0.0.3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +30 -0
- package/bin/clue-tool.mjs +83 -0
- package/commands/claude-code/clue-init.md +27 -0
- package/commands/codex/clue-init.md +27 -0
- package/package.json +20 -0
- package/src/command-spec.mjs +73 -0
- package/src/contracts.mjs +124 -0
- package/src/fastapi-analyzer.mjs +340 -0
- package/src/init-tool.mjs +86 -0
- package/src/lifecycle-init.mjs +206 -0
- package/src/path-policy.mjs +75 -0
- package/src/public-schema.cjs +784 -0
- package/src/semantic-ci.mjs +1830 -0
- package/src/setup-tool.mjs +141 -0
|
@@ -0,0 +1,340 @@
|
|
|
1
|
+
import { readFile } from "node:fs/promises";
|
|
2
|
+
import { dirname, join, relative, sep } from "node:path";
|
|
3
|
+
import { createHash } from "node:crypto";
|
|
4
|
+
import { buildOperationSourceKey } from "./contracts.mjs";
|
|
5
|
+
|
|
6
|
+
const METHOD_PATTERN = "(get|post|put|patch|delete|head|options)";
|
|
7
|
+
const ROUTE_DECORATOR = new RegExp(
|
|
8
|
+
`@(?<router>[A-Za-z_][A-Za-z0-9_]*)\\.${METHOD_PATTERN}\\(\\s*["'](?<path>[^"']+)["']`,
|
|
9
|
+
"gi",
|
|
10
|
+
);
|
|
11
|
+
const API_ROUTE_DECORATOR = /@(?<router>[A-Za-z_][A-Za-z0-9_]*)\.api_route\(\s*["'](?<path>[^"']+)["'][\s\S]*?methods\s*=\s*\[(?<methods>[^\]]+)\]/gi;
|
|
12
|
+
const FUNCTION_PATTERN = /(?:async\s+def|def)\s+(?<name>[A-Za-z_][A-Za-z0-9_]*)\s*\(/g;
|
|
13
|
+
const ROUTER_ASSIGNMENT = /\b(?<name>[A-Za-z_][A-Za-z0-9_]*)\s*=\s*APIRouter\s*\(/g;
|
|
14
|
+
const INCLUDE_ROUTER_CALL = /\b(?<owner>[A-Za-z_][A-Za-z0-9_]*)\.include_router\s*\(/g;
|
|
15
|
+
const FROM_IMPORT = /^\s*from\s+(?<module>[A-Za-z0-9_.]+|\.+[A-Za-z0-9_.]*)\s+import\s+(?<names>[A-Za-z0-9_,\s]+)$/gm;
|
|
16
|
+
|
|
17
|
+
const sha256 = (value) => `sha256:${createHash("sha256").update(value).digest("hex")}`;
|
|
18
|
+
|
|
19
|
+
const readCallArgs = (source, openParenIndex) => {
|
|
20
|
+
let depth = 0;
|
|
21
|
+
let quote = null;
|
|
22
|
+
let escaped = false;
|
|
23
|
+
for (let index = openParenIndex; index < source.length; index += 1) {
|
|
24
|
+
const char = source[index];
|
|
25
|
+
if (quote) {
|
|
26
|
+
if (escaped) {
|
|
27
|
+
escaped = false;
|
|
28
|
+
} else if (char === "\\") {
|
|
29
|
+
escaped = true;
|
|
30
|
+
} else if (char === quote) {
|
|
31
|
+
quote = null;
|
|
32
|
+
}
|
|
33
|
+
continue;
|
|
34
|
+
}
|
|
35
|
+
if (char === "\"" || char === "'") {
|
|
36
|
+
quote = char;
|
|
37
|
+
continue;
|
|
38
|
+
}
|
|
39
|
+
if (char === "(") {
|
|
40
|
+
depth += 1;
|
|
41
|
+
continue;
|
|
42
|
+
}
|
|
43
|
+
if (char === ")") {
|
|
44
|
+
depth -= 1;
|
|
45
|
+
if (depth === 0) {
|
|
46
|
+
return source.slice(openParenIndex + 1, index);
|
|
47
|
+
}
|
|
48
|
+
}
|
|
49
|
+
}
|
|
50
|
+
throw new Error("unterminated FastAPI call expression");
|
|
51
|
+
};
|
|
52
|
+
|
|
53
|
+
const extractPrefix = ({ args, relativePath, callName }) => {
|
|
54
|
+
const prefixMatch = /\bprefix\s*=\s*(?<quote>["'])(?<value>[^"']*)\k<quote>/m.exec(args);
|
|
55
|
+
if (prefixMatch?.groups?.value !== undefined) {
|
|
56
|
+
return normalizePath(prefixMatch.groups.value);
|
|
57
|
+
}
|
|
58
|
+
if (/\bprefix\s*=/.test(args)) {
|
|
59
|
+
throw new Error(`${relativePath}: ${callName} uses a dynamic prefix that cannot be analyzed safely`);
|
|
60
|
+
}
|
|
61
|
+
return "";
|
|
62
|
+
};
|
|
63
|
+
|
|
64
|
+
const normalizePath = (path) => {
|
|
65
|
+
const normalized = path.trim().startsWith("/") ? path.trim() : `/${path.trim()}`;
|
|
66
|
+
return normalized.replace(/\/{2,}/g, "/").replace(/\{([A-Za-z_][A-Za-z0-9_]*)\}/g, "{$1}");
|
|
67
|
+
};
|
|
68
|
+
|
|
69
|
+
const combinePaths = (...parts) => {
|
|
70
|
+
const joined = parts
|
|
71
|
+
.map((part) => String(part || "").trim())
|
|
72
|
+
.filter(Boolean)
|
|
73
|
+
.map((part) => part.replace(/^\/+|\/+$/g, ""))
|
|
74
|
+
.filter(Boolean)
|
|
75
|
+
.join("/");
|
|
76
|
+
return joined ? normalizePath(joined) : "/";
|
|
77
|
+
};
|
|
78
|
+
|
|
79
|
+
const parseMethods = (methodsText) =>
|
|
80
|
+
methodsText
|
|
81
|
+
.split(",")
|
|
82
|
+
.map((entry) => entry.replace(/["'\s]/g, "").toUpperCase())
|
|
83
|
+
.filter(Boolean);
|
|
84
|
+
|
|
85
|
+
const parseImportNames = (names) =>
|
|
86
|
+
names
|
|
87
|
+
.split(",")
|
|
88
|
+
.map((entry) => entry.trim())
|
|
89
|
+
.filter(Boolean)
|
|
90
|
+
.map((entry) => {
|
|
91
|
+
const [name, alias] = entry.split(/\s+as\s+/);
|
|
92
|
+
return {
|
|
93
|
+
importedName: name.trim(),
|
|
94
|
+
localName: (alias ?? name).trim(),
|
|
95
|
+
};
|
|
96
|
+
});
|
|
97
|
+
|
|
98
|
+
const normalizeModuleCandidates = (currentRelativePath, moduleName) => {
|
|
99
|
+
const currentDir = dirname(currentRelativePath);
|
|
100
|
+
if (moduleName.startsWith(".")) {
|
|
101
|
+
const leadingDots = moduleName.match(/^\.+/)?.[0].length ?? 0;
|
|
102
|
+
const rest = moduleName.slice(leadingDots);
|
|
103
|
+
const baseParts = currentDir === "." ? [] : currentDir.split(/[\\/]+/);
|
|
104
|
+
const keepParts = baseParts.slice(0, Math.max(0, baseParts.length - leadingDots + 1));
|
|
105
|
+
const moduleParts = rest ? rest.split(".") : [];
|
|
106
|
+
const base = join(...keepParts, ...moduleParts);
|
|
107
|
+
return [`${base}.py`, join(base, "__init__.py")].filter((entry) => entry !== ".py");
|
|
108
|
+
}
|
|
109
|
+
const base = moduleName.split(".").join(sep);
|
|
110
|
+
return [`${base}.py`, join(base, "__init__.py")];
|
|
111
|
+
};
|
|
112
|
+
|
|
113
|
+
const resolveImportFile = ({ filesByRelativePath, currentRelativePath, moduleName }) => {
|
|
114
|
+
const candidates = normalizeModuleCandidates(currentRelativePath, moduleName);
|
|
115
|
+
for (const candidate of candidates) {
|
|
116
|
+
if (filesByRelativePath.has(candidate)) {
|
|
117
|
+
return candidate;
|
|
118
|
+
}
|
|
119
|
+
}
|
|
120
|
+
for (const knownPath of filesByRelativePath.keys()) {
|
|
121
|
+
if (candidates.some((candidate) => knownPath.endsWith(candidate))) {
|
|
122
|
+
return knownPath;
|
|
123
|
+
}
|
|
124
|
+
}
|
|
125
|
+
return null;
|
|
126
|
+
};
|
|
127
|
+
|
|
128
|
+
const parseRouterAssignments = (source, relativePath) => {
|
|
129
|
+
const prefixes = new Map();
|
|
130
|
+
for (const match of source.matchAll(ROUTER_ASSIGNMENT)) {
|
|
131
|
+
const openParenIndex = match.index + match[0].length - 1;
|
|
132
|
+
const args = readCallArgs(source, openParenIndex);
|
|
133
|
+
prefixes.set(
|
|
134
|
+
match.groups.name,
|
|
135
|
+
extractPrefix({ args, relativePath, callName: "APIRouter" }),
|
|
136
|
+
);
|
|
137
|
+
}
|
|
138
|
+
return prefixes;
|
|
139
|
+
};
|
|
140
|
+
|
|
141
|
+
const parseImports = ({ source, currentRelativePath, filesByRelativePath }) => {
|
|
142
|
+
const imports = new Map();
|
|
143
|
+
for (const match of source.matchAll(FROM_IMPORT)) {
|
|
144
|
+
const targetFile = resolveImportFile({
|
|
145
|
+
filesByRelativePath,
|
|
146
|
+
currentRelativePath,
|
|
147
|
+
moduleName: match.groups.module,
|
|
148
|
+
});
|
|
149
|
+
if (!targetFile) {
|
|
150
|
+
continue;
|
|
151
|
+
}
|
|
152
|
+
for (const entry of parseImportNames(match.groups.names)) {
|
|
153
|
+
imports.set(entry.localName, {
|
|
154
|
+
file: targetFile,
|
|
155
|
+
name: entry.importedName,
|
|
156
|
+
});
|
|
157
|
+
}
|
|
158
|
+
}
|
|
159
|
+
return imports;
|
|
160
|
+
};
|
|
161
|
+
|
|
162
|
+
const parseIncludeRouters = ({ source, relativePath, imports }) => {
|
|
163
|
+
const includes = [];
|
|
164
|
+
for (const match of source.matchAll(INCLUDE_ROUTER_CALL)) {
|
|
165
|
+
const openParenIndex = match.index + match[0].length - 1;
|
|
166
|
+
const args = readCallArgs(source, openParenIndex);
|
|
167
|
+
const routerMatch = /^\s*(?<router>[A-Za-z_][A-Za-z0-9_]*)/.exec(args);
|
|
168
|
+
if (!routerMatch?.groups?.router) {
|
|
169
|
+
throw new Error(`${relativePath}: include_router target cannot be analyzed safely`);
|
|
170
|
+
}
|
|
171
|
+
const routerName = routerMatch.groups.router;
|
|
172
|
+
const imported = imports.get(routerName);
|
|
173
|
+
includes.push({
|
|
174
|
+
file: imported?.file ?? relativePath,
|
|
175
|
+
routerName: imported?.name ?? routerName,
|
|
176
|
+
prefix: extractPrefix({ args, relativePath, callName: "include_router" }),
|
|
177
|
+
});
|
|
178
|
+
}
|
|
179
|
+
return includes;
|
|
180
|
+
};
|
|
181
|
+
|
|
182
|
+
const findNextFunction = (source, fromIndex) => {
|
|
183
|
+
FUNCTION_PATTERN.lastIndex = fromIndex;
|
|
184
|
+
const match = FUNCTION_PATTERN.exec(source);
|
|
185
|
+
if (!match?.groups?.name) {
|
|
186
|
+
const snippet = source.slice(fromIndex, Math.min(source.length, fromIndex + 1200));
|
|
187
|
+
return { name: null, snippet, code_structure: extractCodeStructure(snippet) };
|
|
188
|
+
}
|
|
189
|
+
const nextDecorator = source.indexOf("\n@", match.index + 1);
|
|
190
|
+
const end = nextDecorator > match.index ? nextDecorator : Math.min(source.length, match.index + 3000);
|
|
191
|
+
const snippet = source.slice(match.index, end);
|
|
192
|
+
return {
|
|
193
|
+
name: match.groups.name,
|
|
194
|
+
snippet,
|
|
195
|
+
code_structure: extractCodeStructure(snippet),
|
|
196
|
+
};
|
|
197
|
+
};
|
|
198
|
+
|
|
199
|
+
const extractCodeStructure = (snippet) => {
|
|
200
|
+
const firstLine = snippet.split("\n")[0] ?? "";
|
|
201
|
+
const returnAnnotation = /\)\s*->\s*(?<annotation>[^:]+):/.exec(firstLine)?.groups?.annotation?.trim() ?? null;
|
|
202
|
+
const parameterAnnotations = [
|
|
203
|
+
...firstLine.matchAll(/\b(?<name>[A-Za-z_][A-Za-z0-9_]*)\s*:\s*(?<annotation>[A-Za-z_][A-Za-z0-9_.\[\], ]*)/g),
|
|
204
|
+
].map((match) => ({
|
|
205
|
+
name: match.groups.name,
|
|
206
|
+
annotation: match.groups.annotation.trim(),
|
|
207
|
+
}));
|
|
208
|
+
const callTargets = [
|
|
209
|
+
...snippet.matchAll(/\b(?<target>[A-Za-z_][A-Za-z0-9_]*(?:\.[A-Za-z_][A-Za-z0-9_]*)+)\s*\(/g),
|
|
210
|
+
]
|
|
211
|
+
.map((match) => match.groups.target)
|
|
212
|
+
.filter((target) => !target.startsWith("json.") && !target.startsWith("logging."));
|
|
213
|
+
const dependencyHints = [
|
|
214
|
+
...snippet.matchAll(/\bDepends\(\s*(?<target>[A-Za-z_][A-Za-z0-9_.]*)?/g),
|
|
215
|
+
]
|
|
216
|
+
.map((match) => match.groups.target)
|
|
217
|
+
.filter(Boolean);
|
|
218
|
+
return {
|
|
219
|
+
parameter_annotations: [...new Set(parameterAnnotations.map((entry) => `${entry.name}:${entry.annotation}`))],
|
|
220
|
+
response_annotation: returnAnnotation,
|
|
221
|
+
call_targets: [...new Set(callTargets)].slice(0, 40),
|
|
222
|
+
dependency_hints: [...new Set(dependencyHints)].slice(0, 20),
|
|
223
|
+
};
|
|
224
|
+
};
|
|
225
|
+
|
|
226
|
+
const routeRecord = ({
|
|
227
|
+
route,
|
|
228
|
+
method,
|
|
229
|
+
pathTemplate,
|
|
230
|
+
relativePath,
|
|
231
|
+
functionInfo,
|
|
232
|
+
decorator,
|
|
233
|
+
}) => ({
|
|
234
|
+
operation_source_key: buildOperationSourceKey(method, pathTemplate),
|
|
235
|
+
method,
|
|
236
|
+
path_template: pathTemplate,
|
|
237
|
+
source: {
|
|
238
|
+
file_fingerprint: sha256(relativePath),
|
|
239
|
+
handler_fingerprint: sha256(functionInfo.name ?? `${relativePath}:${method}:${pathTemplate}`),
|
|
240
|
+
source_fingerprint: sha256(functionInfo.snippet),
|
|
241
|
+
},
|
|
242
|
+
ai_context: {
|
|
243
|
+
relative_path: relativePath,
|
|
244
|
+
route_decorator: decorator,
|
|
245
|
+
handler_name: functionInfo.name,
|
|
246
|
+
source_snippet: functionInfo.snippet,
|
|
247
|
+
code_structure: functionInfo.code_structure,
|
|
248
|
+
router_prefix: route.routerPrefix,
|
|
249
|
+
include_prefix: route.includePrefix,
|
|
250
|
+
},
|
|
251
|
+
});
|
|
252
|
+
|
|
253
|
+
export const analyzeFastApiRoutes = async ({ repoRoot, files }) => {
|
|
254
|
+
const records = [];
|
|
255
|
+
const filesByRelativePath = new Map();
|
|
256
|
+
for (const file of files) {
|
|
257
|
+
const source = await readFile(file, "utf8");
|
|
258
|
+
const relativePath = relative(repoRoot, file);
|
|
259
|
+
const record = { file, relativePath, source };
|
|
260
|
+
records.push(record);
|
|
261
|
+
filesByRelativePath.set(relativePath, record);
|
|
262
|
+
}
|
|
263
|
+
|
|
264
|
+
for (const record of records) {
|
|
265
|
+
record.routerPrefixes = parseRouterAssignments(record.source, record.relativePath);
|
|
266
|
+
record.imports = parseImports({
|
|
267
|
+
source: record.source,
|
|
268
|
+
currentRelativePath: record.relativePath,
|
|
269
|
+
filesByRelativePath,
|
|
270
|
+
});
|
|
271
|
+
}
|
|
272
|
+
|
|
273
|
+
const includePrefixes = new Map();
|
|
274
|
+
for (const record of records) {
|
|
275
|
+
for (const include of parseIncludeRouters({
|
|
276
|
+
source: record.source,
|
|
277
|
+
relativePath: record.relativePath,
|
|
278
|
+
imports: record.imports,
|
|
279
|
+
})) {
|
|
280
|
+
const key = `${include.file}::${include.routerName}`;
|
|
281
|
+
const current = includePrefixes.get(key) ?? [];
|
|
282
|
+
current.push(include.prefix);
|
|
283
|
+
includePrefixes.set(key, current);
|
|
284
|
+
}
|
|
285
|
+
}
|
|
286
|
+
|
|
287
|
+
const routes = [];
|
|
288
|
+
for (const record of records) {
|
|
289
|
+
const buildForDecorator = ({ match, method, decoratorPath }) => {
|
|
290
|
+
const routerName = match.groups.router;
|
|
291
|
+
const functionInfo = findNextFunction(record.source, match.index + match[0].length);
|
|
292
|
+
const key = `${record.relativePath}::${routerName}`;
|
|
293
|
+
const localRouterPrefix = record.routerPrefixes.get(routerName) ?? "";
|
|
294
|
+
const prefixes = includePrefixes.get(key) ?? [""];
|
|
295
|
+
for (const includePrefix of prefixes) {
|
|
296
|
+
routes.push(
|
|
297
|
+
routeRecord({
|
|
298
|
+
route: {
|
|
299
|
+
routerPrefix: localRouterPrefix,
|
|
300
|
+
includePrefix,
|
|
301
|
+
},
|
|
302
|
+
method,
|
|
303
|
+
pathTemplate: combinePaths(includePrefix, localRouterPrefix, decoratorPath),
|
|
304
|
+
relativePath: record.relativePath,
|
|
305
|
+
functionInfo,
|
|
306
|
+
decorator: match[0],
|
|
307
|
+
}),
|
|
308
|
+
);
|
|
309
|
+
}
|
|
310
|
+
};
|
|
311
|
+
|
|
312
|
+
for (const match of record.source.matchAll(ROUTE_DECORATOR)) {
|
|
313
|
+
buildForDecorator({
|
|
314
|
+
match,
|
|
315
|
+
method: String(match[2]).toUpperCase(),
|
|
316
|
+
decoratorPath: normalizePath(match.groups.path),
|
|
317
|
+
});
|
|
318
|
+
}
|
|
319
|
+
|
|
320
|
+
for (const match of record.source.matchAll(API_ROUTE_DECORATOR)) {
|
|
321
|
+
for (const method of parseMethods(match.groups.methods)) {
|
|
322
|
+
buildForDecorator({
|
|
323
|
+
match,
|
|
324
|
+
method,
|
|
325
|
+
decoratorPath: normalizePath(match.groups.path),
|
|
326
|
+
});
|
|
327
|
+
}
|
|
328
|
+
}
|
|
329
|
+
}
|
|
330
|
+
|
|
331
|
+
const byKey = new Map();
|
|
332
|
+
for (const route of routes) {
|
|
333
|
+
if (!byKey.has(route.operation_source_key)) {
|
|
334
|
+
byKey.set(route.operation_source_key, route);
|
|
335
|
+
}
|
|
336
|
+
}
|
|
337
|
+
return [...byKey.values()].sort((a, b) =>
|
|
338
|
+
a.operation_source_key.localeCompare(b.operation_source_key),
|
|
339
|
+
);
|
|
340
|
+
};
|
|
@@ -0,0 +1,86 @@
|
|
|
1
|
+
import { mkdir, writeFile } from "node:fs/promises";
|
|
2
|
+
import { dirname, join } from "node:path";
|
|
3
|
+
import { buildInitReport, validateInitRequest } from "./contracts.mjs";
|
|
4
|
+
import { applyLifecyclePlan, planLifecycleInsertions } from "./lifecycle-init.mjs";
|
|
5
|
+
|
|
6
|
+
const workflowRequestPayload = (request) =>
|
|
7
|
+
JSON.stringify(
|
|
8
|
+
{
|
|
9
|
+
project_key: "${{ vars.CLUE_PROJECT_KEY }}",
|
|
10
|
+
environment: "${{ vars.CLUE_ENVIRONMENT }}",
|
|
11
|
+
service_key: request.service_key,
|
|
12
|
+
repository: {
|
|
13
|
+
provider: "github",
|
|
14
|
+
repository_id: "${{ github.repository_id }}",
|
|
15
|
+
owner: "${{ github.repository_owner }}",
|
|
16
|
+
name: "${{ github.event.repository.name }}",
|
|
17
|
+
default_branch: "${{ github.event.repository.default_branch }}",
|
|
18
|
+
merge_commit: "${{ github.sha }}",
|
|
19
|
+
workflow_run_id: "${{ github.run_id }}",
|
|
20
|
+
},
|
|
21
|
+
service: {
|
|
22
|
+
service_key: request.service_key,
|
|
23
|
+
root_path: request.allowed_source_paths[0],
|
|
24
|
+
framework: request.framework,
|
|
25
|
+
language: "python",
|
|
26
|
+
},
|
|
27
|
+
allowed_source_paths: request.allowed_source_paths,
|
|
28
|
+
excluded_source_paths: request.excluded_source_paths,
|
|
29
|
+
clue_api_base_url: "${{ vars.CLUE_API_BASE_URL }}",
|
|
30
|
+
ai_model: "${{ vars.CLUE_AI_MODEL }}",
|
|
31
|
+
},
|
|
32
|
+
null,
|
|
33
|
+
10,
|
|
34
|
+
).trim();
|
|
35
|
+
|
|
36
|
+
const workflowTemplate = (request) => `name: Clue Semantic Snapshot
|
|
37
|
+
|
|
38
|
+
on:
|
|
39
|
+
push:
|
|
40
|
+
|
|
41
|
+
jobs:
|
|
42
|
+
semantic-snapshot:
|
|
43
|
+
if: github.ref_name == github.event.repository.default_branch
|
|
44
|
+
runs-on: ubuntu-latest
|
|
45
|
+
steps:
|
|
46
|
+
- uses: actions/checkout@v4
|
|
47
|
+
- uses: actions/setup-node@v4
|
|
48
|
+
with:
|
|
49
|
+
node-version: "20"
|
|
50
|
+
- name: Run Clue semantic generation
|
|
51
|
+
continue-on-error: true
|
|
52
|
+
env:
|
|
53
|
+
CLUE_API_KEY: \${{ secrets.CLUE_API_KEY }}
|
|
54
|
+
AI_PROVIDER_API_KEY: \${{ secrets.AI_PROVIDER_API_KEY }}
|
|
55
|
+
run: |
|
|
56
|
+
mkdir -p .clue
|
|
57
|
+
cat > .clue/semantic-request.runtime.json <<'JSON'
|
|
58
|
+
${workflowRequestPayload(request)}
|
|
59
|
+
JSON
|
|
60
|
+
npx @clue-ai/cli semantic-ci --request .clue/semantic-request.runtime.json --repo .
|
|
61
|
+
`;
|
|
62
|
+
|
|
63
|
+
export const runInitTool = async ({
|
|
64
|
+
repoRoot,
|
|
65
|
+
request: rawRequest,
|
|
66
|
+
env = process.env,
|
|
67
|
+
lifecyclePlanner = planLifecycleInsertions,
|
|
68
|
+
}) => {
|
|
69
|
+
const request = validateInitRequest(rawRequest);
|
|
70
|
+
const workflowPath = join(repoRoot, request.ci_workflow_path);
|
|
71
|
+
await mkdir(dirname(workflowPath), { recursive: true });
|
|
72
|
+
await writeFile(workflowPath, workflowTemplate(request), "utf8");
|
|
73
|
+
const lifecyclePlan = await lifecyclePlanner({ repoRoot, request, env });
|
|
74
|
+
const lifecycleResult = await applyLifecyclePlan({
|
|
75
|
+
repoRoot,
|
|
76
|
+
plan: lifecyclePlan,
|
|
77
|
+
});
|
|
78
|
+
|
|
79
|
+
return buildInitReport({
|
|
80
|
+
request,
|
|
81
|
+
lifecycleInsertions: lifecycleResult.lifecycleInsertions,
|
|
82
|
+
warnings: [
|
|
83
|
+
...lifecycleResult.warnings,
|
|
84
|
+
],
|
|
85
|
+
});
|
|
86
|
+
};
|
|
@@ -0,0 +1,206 @@
|
|
|
1
|
+
import { readFile, writeFile } from "node:fs/promises";
|
|
2
|
+
import { isAbsolute, relative, resolve } from "node:path";
|
|
3
|
+
import { listAllowedSourceFiles } from "./path-policy.mjs";
|
|
4
|
+
|
|
5
|
+
const API_NAMES = new Set([
|
|
6
|
+
"ClueInit",
|
|
7
|
+
"ClueIdentify",
|
|
8
|
+
"ClueSetAccount",
|
|
9
|
+
"ClueLogout",
|
|
10
|
+
]);
|
|
11
|
+
|
|
12
|
+
const SOURCE_EXTENSIONS = [".py", ".ts", ".tsx", ".js", ".jsx", ".mjs", ".cjs"];
|
|
13
|
+
const MAX_CONTEXT_FILES = 180;
|
|
14
|
+
const MAX_FILE_CHARS = 12_000;
|
|
15
|
+
const MAX_TOTAL_CHARS = 360_000;
|
|
16
|
+
|
|
17
|
+
const nonEmpty = (value, field) => {
|
|
18
|
+
if (typeof value !== "string" || value.trim() === "") {
|
|
19
|
+
throw new Error(`${field} is required`);
|
|
20
|
+
}
|
|
21
|
+
return value.trim();
|
|
22
|
+
};
|
|
23
|
+
|
|
24
|
+
const safeRelativePath = (repoRoot, filePath) => {
|
|
25
|
+
const root = resolve(repoRoot);
|
|
26
|
+
const absolutePath = resolve(root, filePath);
|
|
27
|
+
const relativePath = relative(root, absolutePath);
|
|
28
|
+
if (
|
|
29
|
+
relativePath.startsWith("..") ||
|
|
30
|
+
isAbsolute(relativePath)
|
|
31
|
+
) {
|
|
32
|
+
throw new Error(`edit path escapes repo root: ${filePath}`);
|
|
33
|
+
}
|
|
34
|
+
return { absolutePath, relativePath };
|
|
35
|
+
};
|
|
36
|
+
|
|
37
|
+
const assertApiName = (apiName) => {
|
|
38
|
+
if (!API_NAMES.has(apiName)) {
|
|
39
|
+
throw new Error(`unsupported lifecycle API: ${apiName}`);
|
|
40
|
+
}
|
|
41
|
+
return apiName;
|
|
42
|
+
};
|
|
43
|
+
|
|
44
|
+
const assertNoForbiddenInstrumentation = (replacement) => {
|
|
45
|
+
if (replacement.includes("ClueTrack")) {
|
|
46
|
+
throw new Error("init tool must not add broad ClueTrack instrumentation");
|
|
47
|
+
}
|
|
48
|
+
if (/data-clue-(id|key)/i.test(replacement)) {
|
|
49
|
+
throw new Error("init tool must not add data-clue-id or data-clue-key");
|
|
50
|
+
}
|
|
51
|
+
};
|
|
52
|
+
|
|
53
|
+
const normalizeLifecycleInsertion = (input) => ({
|
|
54
|
+
api_name: assertApiName(nonEmpty(input.api_name, "api_name")),
|
|
55
|
+
file_path: nonEmpty(input.file_path, "file_path"),
|
|
56
|
+
confidence: Math.max(0, Math.min(1, Number(input.confidence ?? 0))),
|
|
57
|
+
reason: nonEmpty(input.reason, "reason"),
|
|
58
|
+
});
|
|
59
|
+
|
|
60
|
+
const normalizePlan = (input) => {
|
|
61
|
+
if (!input || typeof input !== "object") {
|
|
62
|
+
throw new Error("AI lifecycle plan must be an object");
|
|
63
|
+
}
|
|
64
|
+
if (!Array.isArray(input.edits)) {
|
|
65
|
+
throw new Error("AI lifecycle plan must include edits");
|
|
66
|
+
}
|
|
67
|
+
const edits = input.edits.map((edit) => ({
|
|
68
|
+
file_path: nonEmpty(edit.file_path, "edit.file_path"),
|
|
69
|
+
find: nonEmpty(edit.find, "edit.find"),
|
|
70
|
+
replace: nonEmpty(edit.replace, "edit.replace"),
|
|
71
|
+
}));
|
|
72
|
+
const lifecycleInsertions = Array.isArray(input.lifecycle_insertions)
|
|
73
|
+
? input.lifecycle_insertions.map(normalizeLifecycleInsertion)
|
|
74
|
+
: [];
|
|
75
|
+
return {
|
|
76
|
+
edits,
|
|
77
|
+
lifecycleInsertions,
|
|
78
|
+
warnings: Array.isArray(input.warnings)
|
|
79
|
+
? input.warnings
|
|
80
|
+
.filter((warning) => typeof warning === "string" && warning.trim())
|
|
81
|
+
.map((warning) => warning.trim())
|
|
82
|
+
: [],
|
|
83
|
+
};
|
|
84
|
+
};
|
|
85
|
+
|
|
86
|
+
export const applyLifecyclePlan = async ({ repoRoot, plan: rawPlan }) => {
|
|
87
|
+
const plan = normalizePlan(rawPlan);
|
|
88
|
+
for (const edit of plan.edits) {
|
|
89
|
+
const { absolutePath } = safeRelativePath(repoRoot, edit.file_path);
|
|
90
|
+
assertNoForbiddenInstrumentation(edit.replace);
|
|
91
|
+
const current = await readFile(absolutePath, "utf8");
|
|
92
|
+
const occurrences = current.split(edit.find).length - 1;
|
|
93
|
+
if (occurrences !== 1) {
|
|
94
|
+
throw new Error(
|
|
95
|
+
`edit.find must match exactly once in ${edit.file_path}; matched ${occurrences}`,
|
|
96
|
+
);
|
|
97
|
+
}
|
|
98
|
+
await writeFile(absolutePath, current.replace(edit.find, edit.replace), "utf8");
|
|
99
|
+
}
|
|
100
|
+
return {
|
|
101
|
+
lifecycleInsertions: plan.lifecycleInsertions,
|
|
102
|
+
warnings: plan.warnings,
|
|
103
|
+
};
|
|
104
|
+
};
|
|
105
|
+
|
|
106
|
+
const readContextFiles = async ({ repoRoot, request }) => {
|
|
107
|
+
const files = await listAllowedSourceFiles({
|
|
108
|
+
repoRoot,
|
|
109
|
+
allowedSourcePaths: request.allowed_source_paths,
|
|
110
|
+
excludedSourcePaths: request.excluded_source_paths,
|
|
111
|
+
extensions: SOURCE_EXTENSIONS,
|
|
112
|
+
});
|
|
113
|
+
const context = [];
|
|
114
|
+
let totalChars = 0;
|
|
115
|
+
for (const absolutePath of files.slice(0, MAX_CONTEXT_FILES)) {
|
|
116
|
+
const text = await readFile(absolutePath, "utf8");
|
|
117
|
+
const snippet = text.slice(0, MAX_FILE_CHARS);
|
|
118
|
+
totalChars += snippet.length;
|
|
119
|
+
if (totalChars > MAX_TOTAL_CHARS) {
|
|
120
|
+
break;
|
|
121
|
+
}
|
|
122
|
+
context.push({
|
|
123
|
+
file_path: relative(resolve(repoRoot), absolutePath),
|
|
124
|
+
source: snippet,
|
|
125
|
+
truncated: text.length > snippet.length,
|
|
126
|
+
});
|
|
127
|
+
}
|
|
128
|
+
return context;
|
|
129
|
+
};
|
|
130
|
+
|
|
131
|
+
const buildLifecyclePrompt = ({ request, files }) => JSON.stringify({
|
|
132
|
+
task: "Add Clue SDK lifecycle API calls to this repository using exact text replacements.",
|
|
133
|
+
rules: [
|
|
134
|
+
"Return JSON only.",
|
|
135
|
+
"Use only exact replacements. Each find string must be copied exactly from source.",
|
|
136
|
+
"Add ClueInit, ClueIdentify, ClueSetAccount, and ClueLogout where repository code has clear lifecycle points.",
|
|
137
|
+
"Do not add broad ClueTrack instrumentation.",
|
|
138
|
+
"Do not add data-clue-id, data-clue-key, or similar DOM tags.",
|
|
139
|
+
"Do not create route semantics files or layer files.",
|
|
140
|
+
"Prefer minimal edits that engineers can review in one PR.",
|
|
141
|
+
"If a lifecycle point is unclear, skip that edit and include a warning.",
|
|
142
|
+
],
|
|
143
|
+
repository_context: {
|
|
144
|
+
target_tool: request.target_tool,
|
|
145
|
+
framework: request.framework,
|
|
146
|
+
project_key: request.project_key,
|
|
147
|
+
environment: request.environment,
|
|
148
|
+
service_key: request.service_key,
|
|
149
|
+
},
|
|
150
|
+
output_shape: {
|
|
151
|
+
edits: [
|
|
152
|
+
{
|
|
153
|
+
file_path: "app/main.py",
|
|
154
|
+
find: "exact original text",
|
|
155
|
+
replace: "exact replacement text",
|
|
156
|
+
},
|
|
157
|
+
],
|
|
158
|
+
lifecycle_insertions: [
|
|
159
|
+
{
|
|
160
|
+
api_name: "ClueInit",
|
|
161
|
+
file_path: "app/main.py",
|
|
162
|
+
confidence: 0.8,
|
|
163
|
+
reason: "SDK initialized where FastAPI app is created.",
|
|
164
|
+
},
|
|
165
|
+
],
|
|
166
|
+
warnings: ["short engineer review note"],
|
|
167
|
+
},
|
|
168
|
+
files,
|
|
169
|
+
});
|
|
170
|
+
|
|
171
|
+
export const planLifecycleInsertions = async ({ repoRoot, request, env }) => {
|
|
172
|
+
const apiKey = env.AI_PROVIDER_API_KEY;
|
|
173
|
+
if (!apiKey) {
|
|
174
|
+
throw new Error("AI_PROVIDER_API_KEY is required for lifecycle API insertion");
|
|
175
|
+
}
|
|
176
|
+
const files = await readContextFiles({ repoRoot, request });
|
|
177
|
+
const baseUrl = String(env.AI_PROVIDER_BASE_URL || "https://api.openai.com/v1").replace(/\/+$/, "");
|
|
178
|
+
const model = String(env.CLUE_INIT_AI_MODEL || env.CLUE_AI_MODEL || "gpt-5.4-mini");
|
|
179
|
+
const response = await fetch(`${baseUrl}/chat/completions`, {
|
|
180
|
+
method: "POST",
|
|
181
|
+
headers: {
|
|
182
|
+
"content-type": "application/json",
|
|
183
|
+
authorization: `Bearer ${apiKey}`,
|
|
184
|
+
},
|
|
185
|
+
body: JSON.stringify({
|
|
186
|
+
model,
|
|
187
|
+
messages: [
|
|
188
|
+
{
|
|
189
|
+
role: "system",
|
|
190
|
+
content: "You are a safe code-edit planner for Clue SDK initialization. Return schema-valid JSON only.",
|
|
191
|
+
},
|
|
192
|
+
{ role: "user", content: buildLifecyclePrompt({ request, files }) },
|
|
193
|
+
],
|
|
194
|
+
response_format: { type: "json_object" },
|
|
195
|
+
}),
|
|
196
|
+
});
|
|
197
|
+
if (!response.ok) {
|
|
198
|
+
throw new Error(`AI provider failed during lifecycle planning: ${response.status}`);
|
|
199
|
+
}
|
|
200
|
+
const body = await response.json();
|
|
201
|
+
const content = body?.choices?.[0]?.message?.content;
|
|
202
|
+
if (typeof content !== "string" || content.trim() === "") {
|
|
203
|
+
throw new Error("AI provider returned empty lifecycle plan");
|
|
204
|
+
}
|
|
205
|
+
return JSON.parse(content);
|
|
206
|
+
};
|
|
@@ -0,0 +1,75 @@
|
|
|
1
|
+
import { readdir, stat } from "node:fs/promises";
|
|
2
|
+
import { isAbsolute, join, relative, resolve } from "node:path";
|
|
3
|
+
|
|
4
|
+
const DEFAULT_EXCLUDES = [
|
|
5
|
+
".git",
|
|
6
|
+
".env",
|
|
7
|
+
".next",
|
|
8
|
+
"node_modules",
|
|
9
|
+
"dist",
|
|
10
|
+
"build",
|
|
11
|
+
"coverage",
|
|
12
|
+
"logs",
|
|
13
|
+
"tmp",
|
|
14
|
+
"vendor",
|
|
15
|
+
"__pycache__",
|
|
16
|
+
];
|
|
17
|
+
|
|
18
|
+
const hasExcludedPart = (relativePath, excludes) => {
|
|
19
|
+
const parts = relativePath.split(/[\\/]+/).filter(Boolean);
|
|
20
|
+
return parts.some((part) => excludes.includes(part));
|
|
21
|
+
};
|
|
22
|
+
|
|
23
|
+
const isInsideRoot = (root, absolutePath) => {
|
|
24
|
+
const relativePath = relative(root, absolutePath);
|
|
25
|
+
return (
|
|
26
|
+
relativePath === "" ||
|
|
27
|
+
(!relativePath.startsWith("..") && !isAbsolute(relativePath))
|
|
28
|
+
);
|
|
29
|
+
};
|
|
30
|
+
|
|
31
|
+
export const listAllowedSourceFiles = async ({
|
|
32
|
+
repoRoot,
|
|
33
|
+
allowedSourcePaths,
|
|
34
|
+
excludedSourcePaths,
|
|
35
|
+
extensions,
|
|
36
|
+
}) => {
|
|
37
|
+
const root = resolve(repoRoot);
|
|
38
|
+
const excludes = [...new Set([...DEFAULT_EXCLUDES, ...excludedSourcePaths])];
|
|
39
|
+
const files = [];
|
|
40
|
+
const allowedExtensions = new Set(extensions);
|
|
41
|
+
|
|
42
|
+
const walk = async (absolutePath) => {
|
|
43
|
+
const relativePath = relative(root, absolutePath);
|
|
44
|
+
if (relativePath && hasExcludedPart(relativePath, excludes)) {
|
|
45
|
+
return;
|
|
46
|
+
}
|
|
47
|
+
const currentStat = await stat(absolutePath);
|
|
48
|
+
if (currentStat.isDirectory()) {
|
|
49
|
+
const entries = await readdir(absolutePath);
|
|
50
|
+
for (const entry of entries) {
|
|
51
|
+
await walk(join(absolutePath, entry));
|
|
52
|
+
}
|
|
53
|
+
return;
|
|
54
|
+
}
|
|
55
|
+
if (
|
|
56
|
+
currentStat.isFile() &&
|
|
57
|
+
[...allowedExtensions].some((extension) => absolutePath.endsWith(extension))
|
|
58
|
+
) {
|
|
59
|
+
files.push(absolutePath);
|
|
60
|
+
}
|
|
61
|
+
};
|
|
62
|
+
|
|
63
|
+
for (const sourcePath of allowedSourcePaths) {
|
|
64
|
+
const absolutePath = resolve(root, sourcePath);
|
|
65
|
+
if (!isInsideRoot(root, absolutePath)) {
|
|
66
|
+
throw new Error(`allowed_source_paths escapes repo root: ${sourcePath}`);
|
|
67
|
+
}
|
|
68
|
+
await walk(absolutePath);
|
|
69
|
+
}
|
|
70
|
+
|
|
71
|
+
return files.sort();
|
|
72
|
+
};
|
|
73
|
+
|
|
74
|
+
export const listAllowedPythonFiles = async (options) =>
|
|
75
|
+
listAllowedSourceFiles({ ...options, extensions: [".py"] });
|