@clue-ai/cli 0.0.4 → 0.0.6
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +59 -2
- package/bin/clue-cli.mjs +836 -17
- package/commands/claude-code/clue-init.md +7 -1
- package/commands/codex/clue-init.md +7 -1
- package/package.json +1 -1
- package/src/ai-provider.mjs +146 -0
- package/src/command-spec.mjs +7 -7
- package/src/contracts.mjs +53 -14
- package/src/init-tool.mjs +153 -20
- package/src/lifecycle-guard.mjs +141 -0
- package/src/lifecycle-init.mjs +91 -73
- package/src/path-policy.mjs +2 -0
- package/src/public-schema.cjs +27 -1
- package/src/semantic-ci.mjs +771 -122
- package/src/setup-check.mjs +436 -0
- package/src/setup-detect.mjs +198 -0
- package/src/setup-prepare.mjs +289 -0
- package/src/setup-tool.mjs +94 -27
|
@@ -0,0 +1,436 @@
|
|
|
1
|
+
import { access, readFile } from "node:fs/promises";
|
|
2
|
+
import { join, relative, resolve } from "node:path";
|
|
3
|
+
import {
|
|
4
|
+
findLifecycleCallApiNames,
|
|
5
|
+
findLifecycleGuardViolations,
|
|
6
|
+
} from "./lifecycle-guard.mjs";
|
|
7
|
+
import { listAllowedSourceFiles } from "./path-policy.mjs";
|
|
8
|
+
import { runSemanticInventory } from "./semantic-ci.mjs";
|
|
9
|
+
|
|
10
|
+
const DEFAULT_WORKFLOW_PATH = ".github/workflows/clue-semantic-snapshot.yml";
|
|
11
|
+
|
|
12
|
+
const disallowedWorkflowMetadataPattern =
|
|
13
|
+
/github\.(actor|triggering_actor|repository_owner)|github\.event\.sender|github\.event\.repository\.name|"default_branch"\s*:/;
|
|
14
|
+
const SETUP_SKILLS = [
|
|
15
|
+
"clue-setup-orchestrator",
|
|
16
|
+
"clue-route-semantic-snapshot",
|
|
17
|
+
"clue-semantic-ci",
|
|
18
|
+
"clue-sdk-instrumentation",
|
|
19
|
+
"clue-setup-audit",
|
|
20
|
+
"clue-local-verification",
|
|
21
|
+
"clue-setup-report",
|
|
22
|
+
];
|
|
23
|
+
const TARGET_SKILL_ROOTS = {
|
|
24
|
+
codex: [".agents", "skills"],
|
|
25
|
+
claude_code: [".claude", "skills"],
|
|
26
|
+
};
|
|
27
|
+
const SOURCE_EXTENSIONS = [".py", ".ts", ".tsx", ".js", ".jsx", ".mjs", ".cjs"];
|
|
28
|
+
const REQUIRED_LIFECYCLE_APIS = [
|
|
29
|
+
"ClueInit",
|
|
30
|
+
"ClueIdentify",
|
|
31
|
+
"ClueSetAccount",
|
|
32
|
+
"ClueLogout",
|
|
33
|
+
];
|
|
34
|
+
const BACKEND_SDK_MARKERS = [
|
|
35
|
+
"clue-fastapi-sdk",
|
|
36
|
+
"clue-django-sdk",
|
|
37
|
+
"clue-python-sdk-core",
|
|
38
|
+
"clue_fastapi_sdk",
|
|
39
|
+
"clue_django_sdk",
|
|
40
|
+
"clue_python_sdk_core",
|
|
41
|
+
];
|
|
42
|
+
const DEPENDENCY_FILE_CANDIDATES = [
|
|
43
|
+
"package.json",
|
|
44
|
+
"pnpm-lock.yaml",
|
|
45
|
+
"package-lock.json",
|
|
46
|
+
"yarn.lock",
|
|
47
|
+
"requirements.txt",
|
|
48
|
+
"requirements-dev.txt",
|
|
49
|
+
"pyproject.toml",
|
|
50
|
+
"poetry.lock",
|
|
51
|
+
"Pipfile",
|
|
52
|
+
];
|
|
53
|
+
const exists = async (path) => {
|
|
54
|
+
try {
|
|
55
|
+
await access(path);
|
|
56
|
+
return true;
|
|
57
|
+
} catch {
|
|
58
|
+
return false;
|
|
59
|
+
}
|
|
60
|
+
};
|
|
61
|
+
|
|
62
|
+
const normalizeTarget = (target) => {
|
|
63
|
+
if (typeof target !== "string" || target.trim() === "") return undefined;
|
|
64
|
+
const normalized = target
|
|
65
|
+
.trim()
|
|
66
|
+
.toLowerCase()
|
|
67
|
+
.replace(/[\s-]+/g, "_");
|
|
68
|
+
if (!TARGET_SKILL_ROOTS[normalized]) {
|
|
69
|
+
throw new Error("target must be codex or claude_code");
|
|
70
|
+
}
|
|
71
|
+
return normalized;
|
|
72
|
+
};
|
|
73
|
+
|
|
74
|
+
const addCheck = (checks, id, passed, summary, details = {}) => {
|
|
75
|
+
checks.push({ id, passed, summary, ...details });
|
|
76
|
+
};
|
|
77
|
+
|
|
78
|
+
const readAllowedSourceText = async ({
|
|
79
|
+
repoRoot,
|
|
80
|
+
allowedSourcePaths,
|
|
81
|
+
excludedSourcePaths,
|
|
82
|
+
}) => {
|
|
83
|
+
const files = await listAllowedSourceFiles({
|
|
84
|
+
repoRoot,
|
|
85
|
+
allowedSourcePaths,
|
|
86
|
+
excludedSourcePaths,
|
|
87
|
+
extensions: SOURCE_EXTENSIONS,
|
|
88
|
+
});
|
|
89
|
+
const sources = [];
|
|
90
|
+
for (const absolutePath of files) {
|
|
91
|
+
sources.push({
|
|
92
|
+
file_path: relative(resolve(repoRoot), absolutePath),
|
|
93
|
+
text: await readFile(absolutePath, "utf8"),
|
|
94
|
+
});
|
|
95
|
+
}
|
|
96
|
+
return sources;
|
|
97
|
+
};
|
|
98
|
+
|
|
99
|
+
const readDependencyText = async ({ repoRoot, roots }) => {
|
|
100
|
+
const candidatePaths = [
|
|
101
|
+
...DEPENDENCY_FILE_CANDIDATES,
|
|
102
|
+
...roots.flatMap((root) =>
|
|
103
|
+
DEPENDENCY_FILE_CANDIDATES.map((file) => join(root, file)),
|
|
104
|
+
),
|
|
105
|
+
];
|
|
106
|
+
const sources = [];
|
|
107
|
+
for (const path of [...new Set(candidatePaths)]) {
|
|
108
|
+
const absolutePath = join(repoRoot, path);
|
|
109
|
+
if (!(await exists(absolutePath))) continue;
|
|
110
|
+
sources.push({
|
|
111
|
+
file_path: path,
|
|
112
|
+
text: await readFile(absolutePath, "utf8"),
|
|
113
|
+
});
|
|
114
|
+
}
|
|
115
|
+
return sources;
|
|
116
|
+
};
|
|
117
|
+
|
|
118
|
+
const setupSourcePaths = async ({ repoRoot, request, includeFrontend }) => {
|
|
119
|
+
const requested = request?.allowed_source_paths?.length
|
|
120
|
+
? request.allowed_source_paths
|
|
121
|
+
: ["."];
|
|
122
|
+
if (!includeFrontend) return requested;
|
|
123
|
+
const candidates = [
|
|
124
|
+
...requested,
|
|
125
|
+
"frontend/src",
|
|
126
|
+
"src",
|
|
127
|
+
"app",
|
|
128
|
+
"apps/web/src",
|
|
129
|
+
"packages/frontend",
|
|
130
|
+
];
|
|
131
|
+
const existing = [];
|
|
132
|
+
for (const candidate of [...new Set(candidates)]) {
|
|
133
|
+
if (await exists(join(repoRoot, candidate))) {
|
|
134
|
+
existing.push(candidate);
|
|
135
|
+
}
|
|
136
|
+
}
|
|
137
|
+
return existing.length ? existing : requested;
|
|
138
|
+
};
|
|
139
|
+
|
|
140
|
+
const secretLeakPatterns = [
|
|
141
|
+
/pk_(live|test)_[A-Za-z0-9_-]+/,
|
|
142
|
+
/sk_(live|test)_[A-Za-z0-9_-]+/,
|
|
143
|
+
/npm_[A-Za-z0-9]{20,}/,
|
|
144
|
+
/CLUE_API_KEY\s*[:=]\s*["'][^"']+["']/,
|
|
145
|
+
/CLUE_AI_PROVIDER_API_KEY\s*[:=]\s*["'][^"']+["']/,
|
|
146
|
+
];
|
|
147
|
+
|
|
148
|
+
const findSecretLeaks = (sources) =>
|
|
149
|
+
sources.flatMap((source) =>
|
|
150
|
+
secretLeakPatterns.some((pattern) => pattern.test(source.text))
|
|
151
|
+
? [source.file_path]
|
|
152
|
+
: [],
|
|
153
|
+
);
|
|
154
|
+
|
|
155
|
+
const startsWithRoot = (filePath, root) =>
|
|
156
|
+
filePath === root || filePath.startsWith(`${root.replace(/\/+$/, "")}/`);
|
|
157
|
+
|
|
158
|
+
const hasAnyMarker = (text, markers) =>
|
|
159
|
+
markers.some((marker) => text.includes(marker));
|
|
160
|
+
|
|
161
|
+
const checkSdkLifecycle = ({
|
|
162
|
+
backendRootPaths = [],
|
|
163
|
+
dependencySources = [],
|
|
164
|
+
sources,
|
|
165
|
+
}) => {
|
|
166
|
+
const combined = sources.map((source) => source.text).join("\n");
|
|
167
|
+
const backendSources = sources.filter((source) =>
|
|
168
|
+
backendRootPaths.some((root) => startsWithRoot(source.file_path, root)),
|
|
169
|
+
);
|
|
170
|
+
const backendCombined = backendSources
|
|
171
|
+
.map((source) => source.text)
|
|
172
|
+
.join("\n");
|
|
173
|
+
const dependencyCombined = dependencySources
|
|
174
|
+
.map((source) => source.text)
|
|
175
|
+
.join("\n");
|
|
176
|
+
const foundApiNames = findLifecycleCallApiNames(combined);
|
|
177
|
+
const backendFoundApiNames = findLifecycleCallApiNames(backendCombined);
|
|
178
|
+
const foundApis = REQUIRED_LIFECYCLE_APIS.filter((api) =>
|
|
179
|
+
foundApiNames.includes(api),
|
|
180
|
+
);
|
|
181
|
+
const missingApis = REQUIRED_LIFECYCLE_APIS.filter(
|
|
182
|
+
(api) => !foundApis.includes(api),
|
|
183
|
+
);
|
|
184
|
+
const noOpPattern =
|
|
185
|
+
/window\.Clue(?:Init|Identify|SetAccount|Logout)|(?:function|const|let|var)\s+Clue(?:Init|Identify|SetAccount|Logout)\b/;
|
|
186
|
+
const componentLifecycleInitFiles = sources
|
|
187
|
+
.filter((source) =>
|
|
188
|
+
/useEffect\s*\([\s\S]{0,1200}ClueInit/.test(source.text),
|
|
189
|
+
)
|
|
190
|
+
.map((source) => source.file_path);
|
|
191
|
+
const unguardedLifecycleCalls = sources.flatMap((source) =>
|
|
192
|
+
findLifecycleGuardViolations(source.text).map((violation) => ({
|
|
193
|
+
file_path: source.file_path,
|
|
194
|
+
...violation,
|
|
195
|
+
})),
|
|
196
|
+
);
|
|
197
|
+
const unguardedLifecycleFiles = [
|
|
198
|
+
...new Set(unguardedLifecycleCalls.map((violation) => violation.file_path)),
|
|
199
|
+
];
|
|
200
|
+
const backendPresent = backendSources.length > 0;
|
|
201
|
+
const backendSdkPresent =
|
|
202
|
+
!backendPresent ||
|
|
203
|
+
hasAnyMarker(
|
|
204
|
+
`${backendCombined}\n${dependencyCombined}`,
|
|
205
|
+
BACKEND_SDK_MARKERS,
|
|
206
|
+
);
|
|
207
|
+
const backendInitPresent =
|
|
208
|
+
!backendPresent ||
|
|
209
|
+
/clue_init_fastapi|clue_init_django|configure_settings|CluePythonBootstrapConfig/.test(
|
|
210
|
+
backendCombined,
|
|
211
|
+
);
|
|
212
|
+
const backendIdentityRequired =
|
|
213
|
+
backendPresent &&
|
|
214
|
+
/\b(login|signin|sign_in|auth|token|session)\b/i.test(backendCombined);
|
|
215
|
+
const backendLogoutRequired =
|
|
216
|
+
backendPresent && /\b(logout|signout|sign_out)\b/i.test(backendCombined);
|
|
217
|
+
const backendAccountRequired =
|
|
218
|
+
backendPresent &&
|
|
219
|
+
/\b(account|workspace|organization|tenant)\b/i.test(backendCombined);
|
|
220
|
+
const backendMissingApis = [
|
|
221
|
+
...(backendIdentityRequired &&
|
|
222
|
+
!backendFoundApiNames.includes("ClueIdentify")
|
|
223
|
+
? ["ClueIdentify"]
|
|
224
|
+
: []),
|
|
225
|
+
...(backendLogoutRequired && !backendFoundApiNames.includes("ClueLogout")
|
|
226
|
+
? ["ClueLogout"]
|
|
227
|
+
: []),
|
|
228
|
+
...(backendAccountRequired &&
|
|
229
|
+
!backendFoundApiNames.includes("ClueSetAccount")
|
|
230
|
+
? ["ClueSetAccount"]
|
|
231
|
+
: []),
|
|
232
|
+
];
|
|
233
|
+
return {
|
|
234
|
+
foundApis,
|
|
235
|
+
missingApis,
|
|
236
|
+
backend_lifecycle: {
|
|
237
|
+
backend_present: backendPresent,
|
|
238
|
+
backend_root_paths: backendRootPaths,
|
|
239
|
+
dependency_files: dependencySources.map((source) => source.file_path),
|
|
240
|
+
sdk_dependency_or_import_present: backendSdkPresent,
|
|
241
|
+
sdk_init_present: backendInitPresent,
|
|
242
|
+
required_apis: [
|
|
243
|
+
...(backendIdentityRequired ? ["ClueIdentify"] : []),
|
|
244
|
+
...(backendAccountRequired ? ["ClueSetAccount"] : []),
|
|
245
|
+
...(backendLogoutRequired ? ["ClueLogout"] : []),
|
|
246
|
+
],
|
|
247
|
+
missing_apis: backendMissingApis,
|
|
248
|
+
},
|
|
249
|
+
has_noop_wrapper: noOpPattern.test(combined),
|
|
250
|
+
component_lifecycle_init_files: componentLifecycleInitFiles,
|
|
251
|
+
unguarded_lifecycle_files: unguardedLifecycleFiles,
|
|
252
|
+
unguarded_lifecycle_calls: unguardedLifecycleCalls,
|
|
253
|
+
passed:
|
|
254
|
+
missingApis.length === 0 &&
|
|
255
|
+
backendSdkPresent &&
|
|
256
|
+
backendInitPresent &&
|
|
257
|
+
backendMissingApis.length === 0 &&
|
|
258
|
+
!noOpPattern.test(combined) &&
|
|
259
|
+
componentLifecycleInitFiles.length === 0 &&
|
|
260
|
+
unguardedLifecycleFiles.length === 0,
|
|
261
|
+
};
|
|
262
|
+
};
|
|
263
|
+
|
|
264
|
+
export const runSetupCheck = async ({
|
|
265
|
+
repoRoot,
|
|
266
|
+
target,
|
|
267
|
+
request,
|
|
268
|
+
requireSdkLifecycle = false,
|
|
269
|
+
}) => {
|
|
270
|
+
const resolvedRepoRoot = resolve(repoRoot ?? ".");
|
|
271
|
+
const checks = [];
|
|
272
|
+
const normalizedTarget = normalizeTarget(target);
|
|
273
|
+
|
|
274
|
+
if (normalizedTarget) {
|
|
275
|
+
const skillRoot = join(
|
|
276
|
+
resolvedRepoRoot,
|
|
277
|
+
...TARGET_SKILL_ROOTS[normalizedTarget],
|
|
278
|
+
);
|
|
279
|
+
const missingSkills = [];
|
|
280
|
+
for (const skillName of SETUP_SKILLS) {
|
|
281
|
+
const skillPath = join(skillRoot, skillName, "SKILL.md");
|
|
282
|
+
if (!(await exists(skillPath))) {
|
|
283
|
+
missingSkills.push(
|
|
284
|
+
join(...TARGET_SKILL_ROOTS[normalizedTarget], skillName, "SKILL.md"),
|
|
285
|
+
);
|
|
286
|
+
}
|
|
287
|
+
}
|
|
288
|
+
addCheck(
|
|
289
|
+
checks,
|
|
290
|
+
"setup_skills",
|
|
291
|
+
missingSkills.length === 0,
|
|
292
|
+
missingSkills.length === 0
|
|
293
|
+
? "setup skills are installed"
|
|
294
|
+
: "setup skills are missing",
|
|
295
|
+
{ missing_files: missingSkills },
|
|
296
|
+
);
|
|
297
|
+
}
|
|
298
|
+
|
|
299
|
+
const workflowPath = request?.ci_workflow_path ?? DEFAULT_WORKFLOW_PATH;
|
|
300
|
+
const absoluteWorkflowPath = join(resolvedRepoRoot, workflowPath);
|
|
301
|
+
if (await exists(absoluteWorkflowPath)) {
|
|
302
|
+
const workflow = await readFile(absoluteWorkflowPath, "utf8");
|
|
303
|
+
addCheck(
|
|
304
|
+
checks,
|
|
305
|
+
"semantic_workflow",
|
|
306
|
+
workflow.includes(
|
|
307
|
+
"npx @clue-ai/cli semantic-ci --request-env CLUE_SEMANTIC_REQUEST_JSON --repo .",
|
|
308
|
+
) &&
|
|
309
|
+
workflow.includes("CLUE_SEMANTIC_REQUEST_JSON: |") &&
|
|
310
|
+
workflow.includes("CLUE_API_KEY: ${{ secrets.CLUE_API_KEY }}") &&
|
|
311
|
+
workflow.includes(
|
|
312
|
+
"CLUE_AI_PROVIDER_API_KEY: ${{ secrets.CLUE_AI_PROVIDER_API_KEY }}",
|
|
313
|
+
) &&
|
|
314
|
+
workflow.includes("CLUE_AI_PROVIDER: ${{ vars.CLUE_AI_PROVIDER }}") &&
|
|
315
|
+
workflow.includes("permissions:\n contents: read") &&
|
|
316
|
+
workflow.includes("persist-credentials: false") &&
|
|
317
|
+
!disallowedWorkflowMetadataPattern.test(workflow),
|
|
318
|
+
"semantic workflow uses the canonical env runtime request, least-privilege checkout, and privacy-minimized GitHub metadata",
|
|
319
|
+
{ workflow_path: workflowPath },
|
|
320
|
+
);
|
|
321
|
+
} else {
|
|
322
|
+
addCheck(
|
|
323
|
+
checks,
|
|
324
|
+
"semantic_workflow",
|
|
325
|
+
false,
|
|
326
|
+
"semantic workflow is missing",
|
|
327
|
+
{
|
|
328
|
+
workflow_path: workflowPath,
|
|
329
|
+
},
|
|
330
|
+
);
|
|
331
|
+
}
|
|
332
|
+
|
|
333
|
+
addCheck(
|
|
334
|
+
checks,
|
|
335
|
+
"runtime_request_not_committed",
|
|
336
|
+
!(await exists(
|
|
337
|
+
join(resolvedRepoRoot, ".clue", "semantic-request.runtime.json"),
|
|
338
|
+
)),
|
|
339
|
+
".clue/semantic-request.runtime.json is not present in the repository",
|
|
340
|
+
);
|
|
341
|
+
|
|
342
|
+
if (request) {
|
|
343
|
+
try {
|
|
344
|
+
const inventory = await runSemanticInventory({
|
|
345
|
+
repoRoot: resolvedRepoRoot,
|
|
346
|
+
request,
|
|
347
|
+
});
|
|
348
|
+
addCheck(
|
|
349
|
+
checks,
|
|
350
|
+
"semantic_inventory",
|
|
351
|
+
true,
|
|
352
|
+
"semantic inventory discovers routes",
|
|
353
|
+
{
|
|
354
|
+
route_count: inventory.route_count,
|
|
355
|
+
},
|
|
356
|
+
);
|
|
357
|
+
} catch (error) {
|
|
358
|
+
addCheck(
|
|
359
|
+
checks,
|
|
360
|
+
"semantic_inventory",
|
|
361
|
+
false,
|
|
362
|
+
"semantic inventory failed",
|
|
363
|
+
{
|
|
364
|
+
error: error instanceof Error ? error.message : String(error),
|
|
365
|
+
},
|
|
366
|
+
);
|
|
367
|
+
}
|
|
368
|
+
}
|
|
369
|
+
|
|
370
|
+
const sourcePaths = await setupSourcePaths({
|
|
371
|
+
repoRoot: resolvedRepoRoot,
|
|
372
|
+
request,
|
|
373
|
+
includeFrontend: requireSdkLifecycle,
|
|
374
|
+
});
|
|
375
|
+
const excludedPaths = request?.excluded_source_paths ?? [];
|
|
376
|
+
const sources = await readAllowedSourceText({
|
|
377
|
+
repoRoot: resolvedRepoRoot,
|
|
378
|
+
allowedSourcePaths: sourcePaths,
|
|
379
|
+
excludedSourcePaths: excludedPaths,
|
|
380
|
+
});
|
|
381
|
+
const dependencySources = await readDependencyText({
|
|
382
|
+
repoRoot: resolvedRepoRoot,
|
|
383
|
+
roots: request?.allowed_source_paths ?? [],
|
|
384
|
+
});
|
|
385
|
+
const secretLeaks = findSecretLeaks([
|
|
386
|
+
...sources,
|
|
387
|
+
...dependencySources,
|
|
388
|
+
...((await exists(absoluteWorkflowPath))
|
|
389
|
+
? [
|
|
390
|
+
{
|
|
391
|
+
file_path: workflowPath,
|
|
392
|
+
text: await readFile(absoluteWorkflowPath, "utf8"),
|
|
393
|
+
},
|
|
394
|
+
]
|
|
395
|
+
: []),
|
|
396
|
+
]);
|
|
397
|
+
addCheck(
|
|
398
|
+
checks,
|
|
399
|
+
"no_secret_values",
|
|
400
|
+
secretLeaks.length === 0,
|
|
401
|
+
"no obvious secret values found",
|
|
402
|
+
{
|
|
403
|
+
files: secretLeaks,
|
|
404
|
+
},
|
|
405
|
+
);
|
|
406
|
+
|
|
407
|
+
if (requireSdkLifecycle) {
|
|
408
|
+
const sdkLifecycle = checkSdkLifecycle({
|
|
409
|
+
backendRootPaths: request?.allowed_source_paths ?? [],
|
|
410
|
+
dependencySources,
|
|
411
|
+
sources,
|
|
412
|
+
});
|
|
413
|
+
addCheck(
|
|
414
|
+
checks,
|
|
415
|
+
"sdk_lifecycle",
|
|
416
|
+
sdkLifecycle.passed,
|
|
417
|
+
"SDK lifecycle calls resolve to real source code instead of no-op globals",
|
|
418
|
+
{
|
|
419
|
+
found_apis: sdkLifecycle.foundApis,
|
|
420
|
+
missing_apis: sdkLifecycle.missingApis,
|
|
421
|
+
backend_lifecycle: sdkLifecycle.backend_lifecycle,
|
|
422
|
+
has_noop_wrapper: sdkLifecycle.has_noop_wrapper,
|
|
423
|
+
component_lifecycle_init_files:
|
|
424
|
+
sdkLifecycle.component_lifecycle_init_files,
|
|
425
|
+
unguarded_lifecycle_files: sdkLifecycle.unguarded_lifecycle_files,
|
|
426
|
+
unguarded_lifecycle_calls: sdkLifecycle.unguarded_lifecycle_calls,
|
|
427
|
+
},
|
|
428
|
+
);
|
|
429
|
+
}
|
|
430
|
+
|
|
431
|
+
const passed = checks.every((check) => check.passed);
|
|
432
|
+
return {
|
|
433
|
+
passed,
|
|
434
|
+
checks,
|
|
435
|
+
};
|
|
436
|
+
};
|
|
@@ -0,0 +1,198 @@
|
|
|
1
|
+
import { readdir, readFile } from "node:fs/promises";
|
|
2
|
+
import { dirname, join, relative, resolve } from "node:path";
|
|
3
|
+
import { analyzeFastApiRoutes } from "./fastapi-analyzer.mjs";
|
|
4
|
+
import { listAllowedPythonFiles } from "./path-policy.mjs";
|
|
5
|
+
|
|
6
|
+
const deriveServiceKeyFromPath = (backendRootPath) => {
|
|
7
|
+
const segment = backendRootPath
|
|
8
|
+
.split("/")
|
|
9
|
+
.map((part) => part.trim())
|
|
10
|
+
.filter(Boolean)
|
|
11
|
+
.at(-1);
|
|
12
|
+
const normalized = segment
|
|
13
|
+
?.toLowerCase()
|
|
14
|
+
.replace(/[^a-z0-9_-]+/g, "-")
|
|
15
|
+
.replace(/^-+|-+$/g, "");
|
|
16
|
+
if (!normalized) {
|
|
17
|
+
throw new Error("service_key cannot be derived from detected backend root");
|
|
18
|
+
}
|
|
19
|
+
return normalized;
|
|
20
|
+
};
|
|
21
|
+
|
|
22
|
+
const commonDirectory = (paths) => {
|
|
23
|
+
const directories = paths.map((path) => dirname(path).split(/[\\/]+/).filter(Boolean));
|
|
24
|
+
if (directories.length === 0) return ".";
|
|
25
|
+
const common = [];
|
|
26
|
+
for (let index = 0; index < directories[0].length; index += 1) {
|
|
27
|
+
const part = directories[0][index];
|
|
28
|
+
if (directories.every((directory) => directory[index] === part)) {
|
|
29
|
+
common.push(part);
|
|
30
|
+
continue;
|
|
31
|
+
}
|
|
32
|
+
break;
|
|
33
|
+
}
|
|
34
|
+
return common.length ? common.join("/") : ".";
|
|
35
|
+
};
|
|
36
|
+
|
|
37
|
+
const ignoredPackageDirs = new Set([
|
|
38
|
+
".git",
|
|
39
|
+
".next",
|
|
40
|
+
".turbo",
|
|
41
|
+
"coverage",
|
|
42
|
+
"dist",
|
|
43
|
+
"node_modules",
|
|
44
|
+
]);
|
|
45
|
+
|
|
46
|
+
const listPackageJsonFiles = async ({ repoRoot, currentPath = "." }) => {
|
|
47
|
+
const absolutePath = join(repoRoot, currentPath);
|
|
48
|
+
const entries = await readdir(absolutePath, { withFileTypes: true });
|
|
49
|
+
const files = [];
|
|
50
|
+
for (const entry of entries) {
|
|
51
|
+
if (entry.isDirectory()) {
|
|
52
|
+
if (ignoredPackageDirs.has(entry.name)) continue;
|
|
53
|
+
files.push(
|
|
54
|
+
...(await listPackageJsonFiles({
|
|
55
|
+
repoRoot,
|
|
56
|
+
currentPath: join(currentPath, entry.name),
|
|
57
|
+
})),
|
|
58
|
+
);
|
|
59
|
+
continue;
|
|
60
|
+
}
|
|
61
|
+
if (entry.isFile() && entry.name === "package.json") {
|
|
62
|
+
files.push(join(currentPath, entry.name));
|
|
63
|
+
}
|
|
64
|
+
}
|
|
65
|
+
return files;
|
|
66
|
+
};
|
|
67
|
+
|
|
68
|
+
const readJsonFile = async (path) => JSON.parse(await readFile(path, "utf8"));
|
|
69
|
+
|
|
70
|
+
const normalizePackageServiceKey = ({ packageJson, packagePath }) => {
|
|
71
|
+
const packageName =
|
|
72
|
+
typeof packageJson.name === "string" ? packageJson.name : "";
|
|
73
|
+
const source = packageName || dirname(packagePath);
|
|
74
|
+
const segment = source.split("/").at(-1) ?? source;
|
|
75
|
+
return segment
|
|
76
|
+
.toLowerCase()
|
|
77
|
+
.replace(/[^a-z0-9_-]+/g, "-")
|
|
78
|
+
.replace(/^-+|-+$/g, "");
|
|
79
|
+
};
|
|
80
|
+
|
|
81
|
+
const portFromScripts = (scripts) => {
|
|
82
|
+
const scriptText = Object.values(scripts ?? {})
|
|
83
|
+
.filter((value) => typeof value === "string")
|
|
84
|
+
.join("\n");
|
|
85
|
+
const portFlag = /(?:--port|-p)\s+(\d{2,5})/.exec(scriptText);
|
|
86
|
+
if (portFlag) return portFlag[1];
|
|
87
|
+
const envPort = /(?:^|\s)PORT=(\d{2,5})(?:\s|$)/.exec(scriptText);
|
|
88
|
+
return envPort?.[1] ?? null;
|
|
89
|
+
};
|
|
90
|
+
|
|
91
|
+
const detectFrontendServices = async ({ repoRoot }) => {
|
|
92
|
+
const packageJsonFiles = await listPackageJsonFiles({ repoRoot });
|
|
93
|
+
const services = [];
|
|
94
|
+
for (const packageJsonFile of packageJsonFiles) {
|
|
95
|
+
const absolutePath = join(repoRoot, packageJsonFile);
|
|
96
|
+
const packageJson = await readJsonFile(absolutePath);
|
|
97
|
+
const dependencies = {
|
|
98
|
+
...(packageJson.dependencies ?? {}),
|
|
99
|
+
...(packageJson.devDependencies ?? {}),
|
|
100
|
+
};
|
|
101
|
+
const framework = dependencies.next
|
|
102
|
+
? "nextjs"
|
|
103
|
+
: dependencies.vite
|
|
104
|
+
? "vite"
|
|
105
|
+
: dependencies["@angular/core"]
|
|
106
|
+
? "angular"
|
|
107
|
+
: dependencies.vue
|
|
108
|
+
? "vue"
|
|
109
|
+
: dependencies.react
|
|
110
|
+
? "react"
|
|
111
|
+
: null;
|
|
112
|
+
if (!framework) continue;
|
|
113
|
+
const serviceKey = normalizePackageServiceKey({
|
|
114
|
+
packageJson,
|
|
115
|
+
packagePath: packageJsonFile,
|
|
116
|
+
});
|
|
117
|
+
if (!serviceKey) continue;
|
|
118
|
+
const port = portFromScripts(packageJson.scripts);
|
|
119
|
+
services.push({
|
|
120
|
+
kind: "frontend",
|
|
121
|
+
framework,
|
|
122
|
+
root_path: dirname(packageJsonFile),
|
|
123
|
+
service_key: serviceKey,
|
|
124
|
+
local_url_candidates: port ? [`http://localhost:${port}`] : [],
|
|
125
|
+
});
|
|
126
|
+
}
|
|
127
|
+
return services.sort((left, right) =>
|
|
128
|
+
left.service_key.localeCompare(right.service_key),
|
|
129
|
+
);
|
|
130
|
+
};
|
|
131
|
+
|
|
132
|
+
export const runSetupDetect = async ({ repoRoot, excludedSourcePaths = [] }) => {
|
|
133
|
+
const resolvedRepoRoot = resolve(repoRoot ?? ".");
|
|
134
|
+
const frontendServices = await detectFrontendServices({
|
|
135
|
+
repoRoot: resolvedRepoRoot,
|
|
136
|
+
});
|
|
137
|
+
const files = await listAllowedPythonFiles({
|
|
138
|
+
repoRoot: resolvedRepoRoot,
|
|
139
|
+
allowedSourcePaths: ["."],
|
|
140
|
+
excludedSourcePaths,
|
|
141
|
+
});
|
|
142
|
+
const fastApiRoutes = await analyzeFastApiRoutes({ repoRoot: resolvedRepoRoot, files });
|
|
143
|
+
if (fastApiRoutes.length === 0) {
|
|
144
|
+
return {
|
|
145
|
+
detected: false,
|
|
146
|
+
blockers: [
|
|
147
|
+
{
|
|
148
|
+
code: "NO_FASTAPI_ROUTES",
|
|
149
|
+
message: "No FastAPI routes were mechanically discovered.",
|
|
150
|
+
},
|
|
151
|
+
],
|
|
152
|
+
candidates: [],
|
|
153
|
+
services: {
|
|
154
|
+
frontend: frontendServices,
|
|
155
|
+
backend: [],
|
|
156
|
+
},
|
|
157
|
+
};
|
|
158
|
+
}
|
|
159
|
+
|
|
160
|
+
const routeFiles = [
|
|
161
|
+
...new Set(
|
|
162
|
+
fastApiRoutes.map((route) => route.ai_context.relative_path).filter(Boolean),
|
|
163
|
+
),
|
|
164
|
+
].sort();
|
|
165
|
+
const backendRootPath = commonDirectory(routeFiles);
|
|
166
|
+
const backendServiceKey = deriveServiceKeyFromPath(backendRootPath);
|
|
167
|
+
return {
|
|
168
|
+
detected: true,
|
|
169
|
+
blockers: [],
|
|
170
|
+
candidates: [
|
|
171
|
+
{
|
|
172
|
+
framework: "fastapi",
|
|
173
|
+
backend_root_path: backendRootPath,
|
|
174
|
+
service_key: backendServiceKey,
|
|
175
|
+
route_count: fastApiRoutes.length,
|
|
176
|
+
route_files: routeFiles,
|
|
177
|
+
operation_source_keys: fastApiRoutes
|
|
178
|
+
.map((route) => route.operation_source_key)
|
|
179
|
+
.sort(),
|
|
180
|
+
},
|
|
181
|
+
],
|
|
182
|
+
services: {
|
|
183
|
+
frontend: frontendServices,
|
|
184
|
+
backend: [
|
|
185
|
+
{
|
|
186
|
+
kind: "backend",
|
|
187
|
+
framework: "fastapi",
|
|
188
|
+
root_path: backendRootPath,
|
|
189
|
+
service_key: backendServiceKey,
|
|
190
|
+
local_url_candidates: [],
|
|
191
|
+
},
|
|
192
|
+
],
|
|
193
|
+
},
|
|
194
|
+
excluded_source_paths: excludedSourcePaths,
|
|
195
|
+
scanned_python_file_count: files.length,
|
|
196
|
+
repo_root: relative(process.cwd(), resolvedRepoRoot) || ".",
|
|
197
|
+
};
|
|
198
|
+
};
|