@fro.bot/systematic 2.2.0 → 2.3.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (52) hide show
  1. package/agents/document-review/adversarial-document-reviewer.md +87 -0
  2. package/agents/review/adversarial-reviewer.md +107 -0
  3. package/agents/review/cli-agent-readiness-reviewer.md +443 -0
  4. package/agents/review/cli-readiness-reviewer.md +69 -0
  5. package/agents/review/previous-comments-reviewer.md +64 -0
  6. package/agents/review/project-standards-reviewer.md +80 -0
  7. package/package.json +1 -1
  8. package/skills/ce-compound/assets/resolution-template.md +90 -0
  9. package/skills/ce-compound/references/schema.yaml +222 -0
  10. package/skills/ce-compound/references/yaml-schema.md +87 -0
  11. package/skills/ce-compound-refresh/assets/resolution-template.md +90 -0
  12. package/skills/ce-compound-refresh/references/schema.yaml +222 -0
  13. package/skills/ce-compound-refresh/references/yaml-schema.md +87 -0
  14. package/skills/{ce-review-beta → ce-review}/references/findings-schema.json +8 -7
  15. package/skills/ce-review/references/persona-catalog.md +67 -0
  16. package/skills/ce-review/references/resolve-base.sh +94 -0
  17. package/skills/{ce-review-beta → ce-review}/references/review-output-template.md +36 -3
  18. package/skills/ce-review/references/subagent-template.md +84 -0
  19. package/skills/claude-permissions-optimizer/scripts/extract-commands.mjs +2 -2
  20. package/skills/claude-permissions-optimizer/scripts/normalize.mjs +8 -8
  21. package/skills/document-review/references/findings-schema.json +109 -0
  22. package/skills/document-review/references/review-output-template.md +89 -0
  23. package/skills/document-review/references/subagent-template.md +57 -0
  24. package/skills/git-clean-gone-branches/SKILL.md +63 -0
  25. package/skills/git-clean-gone-branches/scripts/clean-gone +48 -0
  26. package/skills/git-commit/SKILL.md +103 -0
  27. package/skills/git-commit-push-pr/SKILL.md +419 -0
  28. package/skills/onboarding/SKILL.md +407 -0
  29. package/skills/onboarding/scripts/inventory.mjs +1043 -0
  30. package/skills/resolve-pr-feedback/SKILL.md +374 -0
  31. package/skills/resolve-pr-feedback/scripts/get-pr-comments +104 -0
  32. package/skills/resolve-pr-feedback/scripts/get-thread-for-comment +58 -0
  33. package/skills/resolve-pr-feedback/scripts/reply-to-pr-thread +33 -0
  34. package/skills/{resolve-pr-parallel → resolve-pr-feedback}/scripts/resolve-pr-thread +0 -0
  35. package/skills/todo-create/SKILL.md +109 -0
  36. package/skills/todo-resolve/SKILL.md +68 -0
  37. package/skills/todo-triage/SKILL.md +70 -0
  38. package/skills/ce-review-beta/SKILL.md +0 -506
  39. package/skills/ce-review-beta/references/persona-catalog.md +0 -50
  40. package/skills/ce-review-beta/references/subagent-template.md +0 -56
  41. package/skills/file-todos/SKILL.md +0 -231
  42. package/skills/resolve-pr-parallel/SKILL.md +0 -96
  43. package/skills/resolve-pr-parallel/scripts/get-pr-comments +0 -68
  44. package/skills/resolve-todo-parallel/SKILL.md +0 -68
  45. package/skills/triage/SKILL.md +0 -312
  46. package/skills/workflows-brainstorm/SKILL.md +0 -11
  47. package/skills/workflows-compound/SKILL.md +0 -10
  48. package/skills/workflows-plan/SKILL.md +0 -10
  49. package/skills/workflows-review/SKILL.md +0 -10
  50. package/skills/workflows-work/SKILL.md +0 -10
  51. /package/skills/{ce-review-beta → ce-review}/references/diff-scope.md +0 -0
  52. /package/skills/{file-todos → todo-create}/assets/todo-template.md +0 -0
@@ -0,0 +1,1043 @@
1
+ #!/usr/bin/env node
2
+
3
+ // Produces a structured JSON inventory of a repository for the onboarding skill.
4
+ // Gathers file tree, manifest data, framework detection, entry points, scripts,
5
+ // existing documentation, and test infrastructure — all deterministic work that
6
+ // shouldn't burn model tokens.
7
+ //
8
+ // Usage: node inventory.mjs [--root <path>]
9
+ //
10
+ // Output: JSON to stdout
11
+
12
+ import { access, readdir, readFile } from 'node:fs/promises'
13
+ import { basename, join, resolve } from 'node:path'
14
+
15
+ const args = process.argv.slice(2)
16
+
17
+ function flag(name, fallback) {
18
+ const i = args.indexOf(`--${name}`)
19
+ return i !== -1 && args[i + 1] ? args[i + 1] : fallback
20
+ }
21
+
22
+ const root = flag('root', process.cwd())
23
+
24
+ // ── Exclusions ────────────────────────────────────────────────────────────────
25
+
26
+ const EXCLUDED_DIRS = new Set([
27
+ 'node_modules',
28
+ '.git',
29
+ 'vendor',
30
+ 'target',
31
+ 'dist',
32
+ 'build',
33
+ '__pycache__',
34
+ '.next',
35
+ '.cache',
36
+ '.turbo',
37
+ '.nuxt',
38
+ '.output',
39
+ '.svelte-kit',
40
+ '.parcel-cache',
41
+ 'coverage',
42
+ '.pytest_cache',
43
+ '.mypy_cache',
44
+ '.tox',
45
+ 'venv',
46
+ '.venv',
47
+ 'env',
48
+ '.env',
49
+ 'bower_components',
50
+ '.gradle',
51
+ '.idea',
52
+ '.vscode',
53
+ 'Pods',
54
+ 'DerivedData',
55
+ 'xcuserdata',
56
+ ])
57
+
58
+ // ── Helpers ───────────────────────────────────────────────────────────────────
59
+
60
+ async function exists(p) {
61
+ try {
62
+ await access(p)
63
+ return true
64
+ } catch {
65
+ return false
66
+ }
67
+ }
68
+
69
+ async function readJson(p) {
70
+ try {
71
+ return JSON.parse(await readFile(p, 'utf-8'))
72
+ } catch {
73
+ return null
74
+ }
75
+ }
76
+
77
+ async function readText(p) {
78
+ try {
79
+ return await readFile(p, 'utf-8')
80
+ } catch {
81
+ return null
82
+ }
83
+ }
84
+
85
+ async function listDir(dir, { includeDotfiles = false } = {}) {
86
+ try {
87
+ const entries = await readdir(dir, { withFileTypes: true })
88
+ if (includeDotfiles) return entries
89
+ return entries.filter(
90
+ (e) => !e.name.startsWith('.') || e.name === '.github',
91
+ )
92
+ } catch {
93
+ return []
94
+ }
95
+ }
96
+
97
+ async function listDirNames(dir) {
98
+ const entries = await listDir(dir)
99
+ return entries
100
+ .filter((e) => e.isDirectory() && !EXCLUDED_DIRS.has(e.name))
101
+ .map((e) => e.name + '/')
102
+ }
103
+
104
+ async function listFileNames(dir, opts) {
105
+ const entries = await listDir(dir, opts)
106
+ return entries.filter((e) => e.isFile()).map((e) => e.name)
107
+ }
108
+
109
+ async function globShallow(dir, extensions) {
110
+ const files = await listFileNames(dir)
111
+ if (!extensions) return files
112
+ return files.filter((f) => extensions.some((ext) => f.endsWith(ext)))
113
+ }
114
+
115
+ // ── Project Name ──────────────────────────────────────────────────────────────
116
+
117
+ async function detectName() {
118
+ const pkg = await readJson(join(root, 'package.json'))
119
+ if (pkg?.name) return pkg.name
120
+
121
+ const cargo = await readText(join(root, 'Cargo.toml'))
122
+ if (cargo) {
123
+ const m = cargo.match(/\[package\][\s\S]*?name\s*=\s*"([^"]+)"/)
124
+ if (m) return m[1]
125
+ }
126
+
127
+ const gomod = await readText(join(root, 'go.mod'))
128
+ if (gomod) {
129
+ const m = gomod.match(/^module\s+(.+)/m)
130
+ if (m) {
131
+ const parts = m[1].split('/')
132
+ // Skip Go major-version suffix (v2, v3, etc.)
133
+ let last = parts.pop()
134
+ if (/^v\d+$/.test(last) && parts.length > 0) last = parts.pop()
135
+ return last
136
+ }
137
+ }
138
+
139
+ const pyproject = await readText(join(root, 'pyproject.toml'))
140
+ if (pyproject) {
141
+ const m = pyproject.match(/name\s*=\s*"([^"]+)"/)
142
+ if (m) return m[1]
143
+ }
144
+
145
+ const gemspec = (await globShallow(root, ['.gemspec']))[0]
146
+ if (gemspec) {
147
+ const content = await readText(join(root, gemspec))
148
+ if (content) {
149
+ const m = content.match(/\.name\s*=\s*["']([^"']+)["']/)
150
+ if (m) return m[1]
151
+ }
152
+ }
153
+
154
+ return basename(resolve(root))
155
+ }
156
+
157
+ // ── Language & Framework Detection ────────────────────────────────────────────
158
+
159
+ const MANIFEST_MAP = [
160
+ { file: 'package.json', ecosystem: 'Node.js' },
161
+ { file: 'tsconfig.json', ecosystem: 'TypeScript' },
162
+ { file: 'go.mod', ecosystem: 'Go' },
163
+ { file: 'Cargo.toml', ecosystem: 'Rust' },
164
+ { file: 'Gemfile', ecosystem: 'Ruby' },
165
+ { file: 'requirements.txt', ecosystem: 'Python' },
166
+ { file: 'pyproject.toml', ecosystem: 'Python' },
167
+ { file: 'Pipfile', ecosystem: 'Python' },
168
+ { file: 'setup.py', ecosystem: 'Python' },
169
+ { file: 'mix.exs', ecosystem: 'Elixir' },
170
+ { file: 'composer.json', ecosystem: 'PHP' },
171
+ { file: 'pubspec.yaml', ecosystem: 'Dart/Flutter' },
172
+ { file: 'Package.swift', ecosystem: 'Swift' },
173
+ { file: 'pom.xml', ecosystem: 'Java' },
174
+ { file: 'build.gradle', ecosystem: 'JVM' },
175
+ { file: 'build.gradle.kts', ecosystem: 'Kotlin/JVM' },
176
+ { file: 'CMakeLists.txt', ecosystem: 'C/C++' },
177
+ { file: 'Makefile', ecosystem: null }, // too generic to infer language
178
+ { file: 'deno.json', ecosystem: 'Deno' },
179
+ { file: 'deno.jsonc', ecosystem: 'Deno' },
180
+ ]
181
+
182
+ // Layer 3: Config-file-based framework detection/confirmation.
183
+ // These config files are strong signals even when dependencies are ambiguous.
184
+ // Pattern follows Vercel's fs-detectors and Netlify's framework-info.
185
+ const CONFIG_FILE_FRAMEWORKS = [
186
+ { file: 'next.config.js', framework: 'Next.js' },
187
+ { file: 'next.config.mjs', framework: 'Next.js' },
188
+ { file: 'next.config.ts', framework: 'Next.js' },
189
+ { file: 'nuxt.config.ts', framework: 'Nuxt' },
190
+ { file: 'nuxt.config.js', framework: 'Nuxt' },
191
+ { file: 'vite.config.ts', framework: 'Vite' },
192
+ { file: 'vite.config.js', framework: 'Vite' },
193
+ { file: 'vite.config.mts', framework: 'Vite' },
194
+ { file: 'astro.config.mjs', framework: 'Astro' },
195
+ { file: 'astro.config.ts', framework: 'Astro' },
196
+ { file: 'svelte.config.js', framework: 'SvelteKit' },
197
+ { file: 'svelte.config.ts', framework: 'SvelteKit' },
198
+ { file: 'gatsby-config.js', framework: 'Gatsby' },
199
+ { file: 'gatsby-config.ts', framework: 'Gatsby' },
200
+ { file: 'angular.json', framework: 'Angular' },
201
+ { file: 'remix.config.js', framework: 'Remix' },
202
+ { file: 'remix.config.ts', framework: 'Remix' },
203
+ { file: 'ember-cli-build.js', framework: 'Ember' },
204
+ { file: 'quasar.config.js', framework: 'Quasar' },
205
+ { file: 'ionic.config.json', framework: 'Ionic' },
206
+ { file: 'electron-builder.json', framework: 'Electron' },
207
+ { file: 'electron-builder.yml', framework: 'Electron' },
208
+ { file: 'tauri.conf.json', framework: 'Tauri' },
209
+ { file: 'expo-env.d.ts', framework: 'Expo' },
210
+ { file: 'app.json', framework: null }, // too ambiguous alone
211
+ { file: 'webpack.config.js', framework: 'Webpack' },
212
+ { file: 'webpack.config.ts', framework: 'Webpack' },
213
+ { file: 'rollup.config.js', framework: 'Rollup' },
214
+ { file: 'turbo.json', framework: 'Turborepo' },
215
+ // Python
216
+ { file: 'manage.py', framework: 'Django' },
217
+ // Ruby
218
+ { file: 'config/routes.rb', framework: 'Rails' },
219
+ { file: 'config.ru', framework: 'Rack' },
220
+ // PHP
221
+ { file: 'artisan', framework: 'Laravel' },
222
+ { file: 'symfony.lock', framework: 'Symfony' },
223
+ // Elixir
224
+ { file: 'config/config.exs', framework: 'Phoenix' },
225
+ ]
226
+
227
+ // Known frameworks detectable from package.json dependencies.
228
+ // Sourced from Vercel's frameworks.ts and Netlify's framework-info definitions.
229
+ const NODE_FRAMEWORKS = {
230
+ // Meta-frameworks / SSR
231
+ next: 'Next.js',
232
+ nuxt: 'Nuxt',
233
+ '@sveltejs/kit': 'SvelteKit',
234
+ '@remix-run/node': 'Remix',
235
+ remix: 'Remix',
236
+ gatsby: 'Gatsby',
237
+ astro: 'Astro',
238
+ '@builder.io/qwik': 'Qwik',
239
+ '@tanstack/react-start': 'TanStack Start',
240
+ '@analogjs/platform': 'Analog',
241
+ // UI libraries
242
+ react: 'React',
243
+ vue: 'Vue',
244
+ svelte: 'Svelte',
245
+ '@angular/core': 'Angular',
246
+ 'solid-js': 'Solid',
247
+ preact: 'Preact',
248
+ lit: 'Lit',
249
+ // Server frameworks
250
+ express: 'Express',
251
+ fastify: 'Fastify',
252
+ hono: 'Hono',
253
+ koa: 'Koa',
254
+ '@nestjs/core': 'NestJS',
255
+ h3: 'H3',
256
+ nitro: 'Nitro',
257
+ '@elysiajs/core': 'Elysia',
258
+ elysia: 'Elysia',
259
+ // Build tools
260
+ vite: 'Vite',
261
+ esbuild: 'esbuild',
262
+ webpack: 'Webpack',
263
+ turbo: 'Turborepo',
264
+ // Desktop / Mobile
265
+ electron: 'Electron',
266
+ tauri: 'Tauri',
267
+ expo: 'Expo',
268
+ 'react-native': 'React Native',
269
+ // Documentation / Static
270
+ vitepress: 'VitePress',
271
+ vuepress: 'VuePress',
272
+ '@docusaurus/core': 'Docusaurus',
273
+ '@storybook/core': 'Storybook',
274
+ '11ty': 'Eleventy',
275
+ '@11ty/eleventy': 'Eleventy',
276
+ // E-commerce
277
+ '@shopify/hydrogen': 'Hydrogen',
278
+ }
279
+
280
+ // Exclusion rules: if these packages are present, suppress the indicated framework.
281
+ // Prevents false positives from monorepo wrappers. (Pattern from Netlify)
282
+ const NODE_FRAMEWORK_EXCLUSIONS = {
283
+ 'Next.js': ['@nrwl/next'], // Nx wrapper -- different build config
284
+ }
285
+
286
+ const NODE_TEST_FRAMEWORKS = {
287
+ jest: 'Jest',
288
+ vitest: 'Vitest',
289
+ mocha: 'Mocha',
290
+ '@playwright/test': 'Playwright',
291
+ cypress: 'Cypress',
292
+ ava: 'AVA',
293
+ tap: 'tap',
294
+ 'bun:test': 'Bun test',
295
+ }
296
+
297
+ async function detectLanguagesAndFrameworks() {
298
+ const languages = new Set()
299
+ const frameworks = []
300
+ let packageManager = null
301
+ let testFramework = null
302
+
303
+ const rootFiles = await listFileNames(root)
304
+
305
+ for (const { file, ecosystem } of MANIFEST_MAP) {
306
+ if (rootFiles.includes(file) && ecosystem) {
307
+ languages.add(ecosystem)
308
+ }
309
+ }
310
+
311
+ // package.json deep inspection
312
+ const pkg = await readJson(join(root, 'package.json'))
313
+ if (pkg) {
314
+ const allDeps = { ...pkg.dependencies, ...pkg.devDependencies }
315
+
316
+ for (const [dep, fw] of Object.entries(NODE_FRAMEWORKS)) {
317
+ if (allDeps[dep]) {
318
+ // Check exclusion rules before adding
319
+ const exclusions = NODE_FRAMEWORK_EXCLUSIONS[fw]
320
+ if (exclusions && exclusions.some((ex) => allDeps[ex])) continue
321
+
322
+ const ver = allDeps[dep].replace(/[\^~>=<]/g, '').split(' ')[0]
323
+ frameworks.push(ver ? `${fw} ${ver}` : fw)
324
+ }
325
+ }
326
+
327
+ for (const [dep, name] of Object.entries(NODE_TEST_FRAMEWORKS)) {
328
+ if (allDeps[dep]) {
329
+ testFramework = name
330
+ break
331
+ }
332
+ }
333
+ }
334
+
335
+ // Package manager detection -- runs independently of package.json
336
+ // so workspace roots with only a lockfile are still detected.
337
+ if (rootFiles.includes('bun.lockb') || rootFiles.includes('bun.lock'))
338
+ packageManager = 'bun'
339
+ else if (rootFiles.includes('pnpm-lock.yaml')) packageManager = 'pnpm'
340
+ else if (rootFiles.includes('yarn.lock')) packageManager = 'yarn'
341
+ else if (rootFiles.includes('package-lock.json')) packageManager = 'npm'
342
+
343
+ // Ruby framework detection
344
+ if (languages.has('Ruby')) {
345
+ const gemfile = await readText(join(root, 'Gemfile'))
346
+ if (gemfile) {
347
+ if (/gem\s+['"]rails['"]/.test(gemfile)) frameworks.push('Rails')
348
+ if (/gem\s+['"]sinatra['"]/.test(gemfile)) frameworks.push('Sinatra')
349
+ if (/gem\s+['"]hanami['"]/.test(gemfile)) frameworks.push('Hanami')
350
+ if (/gem\s+['"]grape['"]/.test(gemfile)) frameworks.push('Grape')
351
+ if (/gem\s+['"]roda['"]/.test(gemfile)) frameworks.push('Roda')
352
+
353
+ // Ruby test frameworks
354
+ if (/gem\s+['"]rspec['"]/.test(gemfile))
355
+ testFramework = testFramework || 'RSpec'
356
+ else if (/gem\s+['"]minitest['"]/.test(gemfile))
357
+ testFramework = testFramework || 'Minitest'
358
+ }
359
+ }
360
+
361
+ // Python framework detection (covers deps in requirements.txt, pyproject.toml, Pipfile)
362
+ if (languages.has('Python')) {
363
+ const reqs = await readText(join(root, 'requirements.txt'))
364
+ const pyproject = await readText(join(root, 'pyproject.toml'))
365
+ const pipfile = await readText(join(root, 'Pipfile'))
366
+ const combined = (reqs || '') + (pyproject || '') + (pipfile || '')
367
+
368
+ if (/\bdjango\b/i.test(combined)) frameworks.push('Django')
369
+ if (/\bfastapi\b/i.test(combined)) frameworks.push('FastAPI')
370
+ if (/\bflask\b/i.test(combined)) frameworks.push('Flask')
371
+ if (/\bstarlette\b/i.test(combined)) frameworks.push('Starlette')
372
+ if (/\bstreamlit\b/i.test(combined)) frameworks.push('Streamlit')
373
+ if (/\bgradio\b/i.test(combined)) frameworks.push('Gradio')
374
+ if (/\bcelery\b/i.test(combined)) frameworks.push('Celery')
375
+ if (/\bsanic\b/i.test(combined)) frameworks.push('Sanic')
376
+ if (/\btornado\b/i.test(combined)) frameworks.push('Tornado')
377
+
378
+ if (/\bpytest\b/i.test(combined)) testFramework = testFramework || 'pytest'
379
+ if (rootFiles.includes('pytest.ini') || rootFiles.includes('conftest.py'))
380
+ testFramework = testFramework || 'pytest'
381
+ if (/\bunittest\b/i.test(combined))
382
+ testFramework = testFramework || 'unittest'
383
+ }
384
+
385
+ // Go framework detection
386
+ if (languages.has('Go')) {
387
+ const gomod = await readText(join(root, 'go.mod'))
388
+ if (gomod) {
389
+ if (gomod.includes('github.com/gin-gonic/gin')) frameworks.push('Gin')
390
+ if (gomod.includes('github.com/labstack/echo')) frameworks.push('Echo')
391
+ if (gomod.includes('github.com/gofiber/fiber')) frameworks.push('Fiber')
392
+ if (gomod.includes('github.com/gorilla/mux'))
393
+ frameworks.push('Gorilla Mux')
394
+ if (gomod.includes('github.com/go-chi/chi')) frameworks.push('Chi')
395
+ if (gomod.includes('google.golang.org/grpc')) frameworks.push('gRPC')
396
+ if (gomod.includes('github.com/bufbuild/connect-go'))
397
+ frameworks.push('Connect')
398
+ }
399
+ testFramework = testFramework || 'go test'
400
+ }
401
+
402
+ // Rust framework detection
403
+ if (languages.has('Rust')) {
404
+ const cargo = await readText(join(root, 'Cargo.toml'))
405
+ if (cargo) {
406
+ if (/\bactix-web\b/.test(cargo)) frameworks.push('Actix Web')
407
+ if (/\baxum\b/.test(cargo)) frameworks.push('Axum')
408
+ if (/\brocket\b/.test(cargo)) frameworks.push('Rocket')
409
+ if (/\bwarp\b/.test(cargo)) frameworks.push('Warp')
410
+ if (/\btokio\b/.test(cargo)) frameworks.push('Tokio')
411
+ if (/\btauri\b/.test(cargo)) frameworks.push('Tauri')
412
+ }
413
+ }
414
+
415
+ // PHP framework detection
416
+ if (languages.has('PHP')) {
417
+ const composer = await readJson(join(root, 'composer.json'))
418
+ if (composer) {
419
+ const allDeps = { ...composer.require, ...composer['require-dev'] }
420
+ if (allDeps['laravel/framework']) frameworks.push('Laravel')
421
+ if (allDeps['symfony/framework-bundle']) frameworks.push('Symfony')
422
+ if (allDeps['slim/slim']) frameworks.push('Slim')
423
+ if (allDeps['phpunit/phpunit']) testFramework = testFramework || 'PHPUnit'
424
+ if (allDeps['pestphp/pest']) testFramework = testFramework || 'Pest'
425
+ }
426
+ }
427
+
428
+ // Elixir framework detection
429
+ if (languages.has('Elixir')) {
430
+ const mixfile = await readText(join(root, 'mix.exs'))
431
+ if (mixfile) {
432
+ if (/:phoenix\b/.test(mixfile)) frameworks.push('Phoenix')
433
+ if (/:plug\b/.test(mixfile)) frameworks.push('Plug')
434
+ }
435
+ }
436
+
437
+ // Rust test framework
438
+ if (languages.has('Rust')) {
439
+ testFramework = testFramework || 'cargo test'
440
+ }
441
+
442
+ // Fallback: infer test framework from the test script command
443
+ if (!testFramework && pkg?.scripts?.test) {
444
+ const testCmd = pkg.scripts.test
445
+ if (/\bbun\s+test\b/.test(testCmd)) testFramework = 'bun test'
446
+ else if (/\bjest\b/.test(testCmd)) testFramework = 'Jest'
447
+ else if (/\bvitest\b/.test(testCmd)) testFramework = 'Vitest'
448
+ else if (/\bmocha\b/.test(testCmd)) testFramework = 'Mocha'
449
+ else if (/\bpytest\b/.test(testCmd)) testFramework = 'pytest'
450
+ else if (/\brspec\b/.test(testCmd)) testFramework = 'RSpec'
451
+ }
452
+
453
+ // Layer 3: Config-file-based framework confirmation/detection.
454
+ // Catches frameworks missed by dependency scanning and confirms ambiguous cases.
455
+ const frameworkNames = new Set(frameworks.map((f) => f.split(' ')[0]))
456
+ const uncheckedConfigs = CONFIG_FILE_FRAMEWORKS.filter(
457
+ ({ framework }) => framework && !frameworkNames.has(framework),
458
+ )
459
+ const configResults = await Promise.all(
460
+ uncheckedConfigs.map(async ({ file, framework }) => ({
461
+ framework,
462
+ found: await exists(join(root, file)),
463
+ })),
464
+ )
465
+ for (const { framework, found } of configResults) {
466
+ if (found && !frameworkNames.has(framework)) {
467
+ frameworks.push(framework)
468
+ frameworkNames.add(framework)
469
+ }
470
+ }
471
+
472
+ return {
473
+ languages: [...languages],
474
+ frameworks,
475
+ packageManager,
476
+ testFramework,
477
+ }
478
+ }
479
+
480
+ // ── Directory Structure ───────────────────────────────────────────────────────
481
+
482
+ async function getStructure() {
483
+ const topLevel = []
484
+ const srcLayout = {}
485
+
486
+ const entries = await listDir(root)
487
+ for (const entry of entries) {
488
+ if (EXCLUDED_DIRS.has(entry.name)) continue
489
+ if (entry.isDirectory()) {
490
+ topLevel.push(entry.name + '/')
491
+ } else {
492
+ topLevel.push(entry.name)
493
+ }
494
+ }
495
+
496
+ // One level deeper into common source directories
497
+ const srcDirs = [
498
+ 'src',
499
+ 'lib',
500
+ 'app',
501
+ 'pkg',
502
+ 'internal',
503
+ 'cmd',
504
+ 'server',
505
+ 'api',
506
+ ]
507
+ for (const dir of srcDirs) {
508
+ const dirPath = join(root, dir)
509
+ if (await exists(dirPath)) {
510
+ const children = await listDirNames(dirPath)
511
+ const files = await listFileNames(dirPath)
512
+ if (children.length > 0 || files.length > 0) {
513
+ srcLayout[dir] = {
514
+ dirs: children,
515
+ files: files.slice(0, 10), // cap file listing
516
+ }
517
+ }
518
+ }
519
+ }
520
+
521
+ return { topLevel, srcLayout }
522
+ }
523
+
524
+ // ── Entry Points ──────────────────────────────────────────────────────────────
525
+
526
+ // Helper: check a batch of candidate paths, return those that exist.
527
+ async function filterExisting(candidates) {
528
+ const results = await Promise.all(
529
+ candidates.map(async (p) => ((await exists(join(root, p))) ? p : null)),
530
+ )
531
+ return results.filter(Boolean)
532
+ }
533
+
534
+ async function findEntryPoints(languages) {
535
+ const langSet = new Set(languages)
536
+
537
+ // Universal entry points — check root and src/ in one batch
538
+ const universalCandidates = [
539
+ 'index.ts',
540
+ 'index.js',
541
+ 'index.mjs',
542
+ 'index.tsx',
543
+ 'index.jsx',
544
+ 'main.ts',
545
+ 'main.js',
546
+ 'main.mjs',
547
+ 'main.tsx',
548
+ 'main.jsx',
549
+ 'app.ts',
550
+ 'app.js',
551
+ 'app.mjs',
552
+ 'app.tsx',
553
+ 'app.jsx',
554
+ 'server.ts',
555
+ 'server.js',
556
+ 'server.mjs',
557
+ ]
558
+
559
+ const allCandidates = [
560
+ ...universalCandidates,
561
+ ...universalCandidates.map((f) => `src/${f}`),
562
+ ]
563
+
564
+ // Language-specific candidates — add to the same batch
565
+ if (
566
+ langSet.has('Node.js') ||
567
+ langSet.has('TypeScript') ||
568
+ langSet.has('Deno')
569
+ ) {
570
+ allCandidates.push(
571
+ 'app/page.tsx',
572
+ 'app/page.jsx',
573
+ 'app/layout.tsx',
574
+ 'app/layout.jsx',
575
+ 'src/app/page.tsx',
576
+ 'src/app/page.jsx',
577
+ 'src/app/layout.tsx',
578
+ 'src/app/layout.jsx',
579
+ 'pages/index.tsx',
580
+ 'pages/index.jsx',
581
+ 'pages/index.js',
582
+ 'src/pages/index.tsx',
583
+ 'src/pages/index.jsx',
584
+ )
585
+ }
586
+
587
+ if (langSet.has('Python')) {
588
+ allCandidates.push(
589
+ 'main.py',
590
+ 'app.py',
591
+ 'manage.py',
592
+ 'run.py',
593
+ 'wsgi.py',
594
+ 'asgi.py',
595
+ 'src/main.py',
596
+ 'src/app.py',
597
+ )
598
+ }
599
+
600
+ if (langSet.has('Ruby')) {
601
+ allCandidates.push(
602
+ 'config.ru',
603
+ 'config/routes.rb',
604
+ 'config/application.rb',
605
+ 'bin/rails',
606
+ 'Rakefile',
607
+ )
608
+ }
609
+
610
+ if (langSet.has('Go')) {
611
+ allCandidates.push('main.go')
612
+ }
613
+
614
+ if (langSet.has('Rust')) {
615
+ allCandidates.push('src/main.rs', 'src/lib.rs')
616
+ }
617
+
618
+ // Single parallel batch for all fixed-path candidates
619
+ const entryPoints = await filterExisting(allCandidates)
620
+
621
+ // Node/TS: also check package.json main/module fields
622
+ if (
623
+ langSet.has('Node.js') ||
624
+ langSet.has('TypeScript') ||
625
+ langSet.has('Deno')
626
+ ) {
627
+ const pkg = await readJson(join(root, 'package.json'))
628
+ for (const field of [pkg?.main, pkg?.module]) {
629
+ if (
630
+ field &&
631
+ !entryPoints.includes(field) &&
632
+ (await exists(join(root, field)))
633
+ ) {
634
+ entryPoints.push(field)
635
+ }
636
+ }
637
+ }
638
+
639
+ // Python: __main__.py in src subdirectories (requires listing)
640
+ if (langSet.has('Python')) {
641
+ const srcEntries = await listDir(join(root, 'src'))
642
+ const pyMains = await filterExisting(
643
+ srcEntries
644
+ .filter((e) => e.isDirectory())
645
+ .map((e) => `src/${e.name}/__main__.py`),
646
+ )
647
+ entryPoints.push(...pyMains)
648
+ }
649
+
650
+ // Go: cmd/*/main.go (requires listing)
651
+ if (langSet.has('Go')) {
652
+ const cmdDir = join(root, 'cmd')
653
+ if (await exists(cmdDir)) {
654
+ const cmds = await listDir(cmdDir)
655
+ const goMains = await filterExisting(
656
+ cmds.filter((c) => c.isDirectory()).map((c) => `cmd/${c.name}/main.go`),
657
+ )
658
+ entryPoints.push(...goMains)
659
+ }
660
+ }
661
+
662
+ return [...new Set(entryPoints)]
663
+ }
664
+
665
+ // ── Scripts / Commands ────────────────────────────────────────────────────────
666
+
667
+ async function detectScripts() {
668
+ const scripts = {}
669
+
670
+ // package.json scripts
671
+ const pkg = await readJson(join(root, 'package.json'))
672
+ if (pkg?.scripts) {
673
+ const important = [
674
+ 'dev',
675
+ 'start',
676
+ 'build',
677
+ 'test',
678
+ 'lint',
679
+ 'serve',
680
+ 'preview',
681
+ 'typecheck',
682
+ 'check',
683
+ 'format',
684
+ 'migrate',
685
+ ]
686
+ for (const key of important) {
687
+ if (pkg.scripts[key]) scripts[key] = pkg.scripts[key]
688
+ }
689
+ // Also include any scripts not in our list but keep it bounded
690
+ for (const [key, val] of Object.entries(pkg.scripts)) {
691
+ if (!scripts[key] && Object.keys(scripts).length < 15) {
692
+ scripts[key] = val
693
+ }
694
+ }
695
+ }
696
+
697
+ // Makefile targets -- always include alongside npm scripts for polyglot repos
698
+ const makefile = await readText(join(root, 'Makefile'))
699
+ if (makefile) {
700
+ const targets = makefile.match(/^([a-zA-Z_][\w-]*)\s*:/gm)
701
+ if (targets) {
702
+ for (const t of targets.slice(0, 15)) {
703
+ const name = t.replace(':', '').trim()
704
+ if (!scripts[`make ${name}`])
705
+ scripts[`make ${name}`] = '(Makefile target)'
706
+ }
707
+ }
708
+ }
709
+
710
+ // Procfile
711
+ const procfile = await readText(join(root, 'Procfile'))
712
+ if (procfile) {
713
+ for (const line of procfile.split('\n')) {
714
+ const m = line.match(/^(\w+):\s*(.+)/)
715
+ if (m) scripts[`Procfile:${m[1]}`] = m[2].trim()
716
+ }
717
+ }
718
+
719
+ return scripts
720
+ }
721
+
722
+ // ── Documentation Discovery ──────────────────────────────────────────────────
723
+
724
+ // Extract the first markdown heading from a file (cheap I/O, avoids model reads).
725
+ async function extractTitle(filePath) {
726
+ try {
727
+ const content = await readFile(filePath, 'utf-8')
728
+ // Match first ATX heading (# Title)
729
+ const m = content.match(/^#{1,3}\s+(.+)/m)
730
+ return m ? m[1].trim() : null
731
+ } catch {
732
+ return null
733
+ }
734
+ }
735
+
736
+ async function findDocs() {
737
+ const seen = new Set()
738
+ const paths = []
739
+
740
+ function add(path) {
741
+ if (!seen.has(path)) {
742
+ seen.add(path)
743
+ paths.push(path)
744
+ }
745
+ }
746
+
747
+ // Root markdown files
748
+ const rootFiles = await globShallow(root, ['.md'])
749
+ for (const f of rootFiles) add(f)
750
+
751
+ // Common doc directories — only top-level entries; subdirs are discovered
752
+ // via the nested scan below, so no need to list nested paths like
753
+ // "docs/solutions" here (which caused duplicates).
754
+ const docDirs = ['docs', 'doc', 'documentation', 'wiki', '.github']
755
+ for (const dir of docDirs) {
756
+ const dirPath = join(root, dir)
757
+ if (await exists(dirPath)) {
758
+ const files = await globShallow(dirPath, ['.md'])
759
+ for (const f of files.slice(0, 10)) add(`${dir}/${f}`)
760
+ // One level deeper
761
+ const subdirs = await listDirNames(dirPath)
762
+ for (const sub of subdirs.slice(0, 5)) {
763
+ const subName = sub.replace('/', '')
764
+ const subFiles = await globShallow(join(dirPath, subName), ['.md'])
765
+ for (const f of subFiles.slice(0, 5)) add(`${dir}/${subName}/${f}`)
766
+ }
767
+ }
768
+ }
769
+
770
+ // Extract titles in parallel so the model can triage without reading each file
771
+ const docs = await Promise.all(
772
+ paths.map(async (p) => {
773
+ const title = await extractTitle(join(root, p))
774
+ return title ? { path: p, title } : { path: p }
775
+ }),
776
+ )
777
+
778
+ return docs
779
+ }
780
+
781
+ // ── Test Infrastructure ───────────────────────────────────────────────────────
782
+
783
+ async function findTestInfra() {
784
+ const dirs = []
785
+ const config = []
786
+
787
+ // Test directories
788
+ const testDirs = [
789
+ 'tests',
790
+ 'test',
791
+ 'spec',
792
+ '__tests__',
793
+ 'e2e',
794
+ 'integration',
795
+ 'src/tests',
796
+ 'src/test',
797
+ 'src/__tests__',
798
+ ]
799
+ for (const dir of testDirs) {
800
+ if (await exists(join(root, dir))) dirs.push(dir + '/')
801
+ }
802
+
803
+ // Test config files
804
+ const testConfigs = [
805
+ 'jest.config.js',
806
+ 'jest.config.ts',
807
+ 'jest.config.mjs',
808
+ 'vitest.config.js',
809
+ 'vitest.config.ts',
810
+ 'vitest.config.mts',
811
+ '.rspec',
812
+ 'pytest.ini',
813
+ 'conftest.py',
814
+ 'setup.cfg',
815
+ 'phpunit.xml',
816
+ 'karma.conf.js',
817
+ 'cypress.config.js',
818
+ 'cypress.config.ts',
819
+ 'playwright.config.js',
820
+ 'playwright.config.ts',
821
+ ]
822
+ const rootFiles = await listFileNames(root, { includeDotfiles: true })
823
+ for (const f of testConfigs) {
824
+ if (rootFiles.includes(f)) config.push(f)
825
+ }
826
+
827
+ return { dirs, config }
828
+ }
829
+
830
+ // ── Monorepo Detection ────────────────────────────────────────────────────────
831
+
832
+ async function detectMonorepo() {
833
+ const rootFiles = await listFileNames(root)
834
+ const signals = []
835
+
836
+ const pkg = await readJson(join(root, 'package.json'))
837
+ if (pkg?.workspaces) {
838
+ signals.push('npm/yarn workspaces')
839
+ }
840
+
841
+ if (rootFiles.includes('pnpm-workspace.yaml')) signals.push('pnpm workspaces')
842
+ if (rootFiles.includes('nx.json')) signals.push('Nx')
843
+ if (rootFiles.includes('lerna.json')) signals.push('Lerna')
844
+ if (rootFiles.includes('turbo.json')) signals.push('Turborepo')
845
+
846
+ const cargo = await readText(join(root, 'Cargo.toml'))
847
+ if (cargo && /\[workspace\]/.test(cargo)) signals.push('Cargo workspace')
848
+
849
+ if (signals.length === 0) {
850
+ // Check for conventional monorepo directories
851
+ const monoIndicators = ['apps', 'packages', 'services', 'modules', 'libs']
852
+ let found = 0
853
+ for (const dir of monoIndicators) {
854
+ if (await exists(join(root, dir))) found++
855
+ }
856
+ if (found >= 2)
857
+ signals.push('convention-based (multiple top-level package dirs)')
858
+ }
859
+
860
+ if (signals.length === 0) return null
861
+
862
+ // List workspaces
863
+ const workspaces = []
864
+ const wsDirs = ['apps', 'packages', 'services', 'modules', 'libs', 'plugins']
865
+ for (const dir of wsDirs) {
866
+ const dirPath = join(root, dir)
867
+ if (await exists(dirPath)) {
868
+ const children = await listDirNames(dirPath)
869
+ for (const c of children.slice(0, 20)) {
870
+ workspaces.push(`${dir}/${c}`)
871
+ }
872
+ }
873
+ }
874
+
875
+ return { signals, workspaces }
876
+ }
877
+
878
+ // ── Infrastructure & External Dependencies ────────────────────────────────────
879
+
880
+ async function findInfrastructure() {
881
+ const rootFiles = await listFileNames(root, { includeDotfiles: true })
882
+ const envFiles = []
883
+ const configFiles = []
884
+ const services = []
885
+
886
+ // Environment files (signal for external dependencies)
887
+ const envCandidates = [
888
+ '.env.example',
889
+ '.env.sample',
890
+ '.env.template',
891
+ '.env.local.example',
892
+ '.env.development',
893
+ '.env.production',
894
+ ]
895
+ for (const f of envCandidates) {
896
+ if (rootFiles.includes(f)) envFiles.push(f)
897
+ }
898
+
899
+ // Docker / container config (reveals databases, caches, queues)
900
+ const dockerFiles = [
901
+ 'docker-compose.yml',
902
+ 'docker-compose.yaml',
903
+ 'docker-compose.dev.yml',
904
+ 'docker-compose.dev.yaml',
905
+ 'docker-compose.override.yml',
906
+ 'Dockerfile',
907
+ ]
908
+ for (const f of dockerFiles) {
909
+ if (rootFiles.includes(f)) configFiles.push(f)
910
+ }
911
+
912
+ // Deployment / infrastructure config
913
+ const infraFiles = [
914
+ 'fly.toml',
915
+ 'vercel.json',
916
+ 'netlify.toml',
917
+ 'render.yaml',
918
+ 'railway.json',
919
+ 'app.yaml',
920
+ 'serverless.yml',
921
+ 'sam-template.yaml',
922
+ 'Procfile',
923
+ 'nixpacks.toml',
924
+ ]
925
+ for (const f of infraFiles) {
926
+ if (rootFiles.includes(f)) configFiles.push(f)
927
+ }
928
+
929
+ // Detect common services from docker-compose
930
+ for (const dcFile of ['docker-compose.yml', 'docker-compose.yaml']) {
931
+ const dc = await readText(join(root, dcFile))
932
+ if (dc) {
933
+ if (/postgres/i.test(dc)) services.push('PostgreSQL')
934
+ if (/mysql|mariadb/i.test(dc)) services.push('MySQL')
935
+ if (/mongo/i.test(dc)) services.push('MongoDB')
936
+ if (/redis/i.test(dc)) services.push('Redis')
937
+ if (/rabbitmq/i.test(dc)) services.push('RabbitMQ')
938
+ if (/kafka/i.test(dc)) services.push('Kafka')
939
+ if (/elasticsearch/i.test(dc)) services.push('Elasticsearch')
940
+ if (/minio|localstack/i.test(dc)) services.push('S3-compatible storage')
941
+ if (/mailhog|mailpit/i.test(dc)) services.push('Email (dev)')
942
+ break
943
+ }
944
+ }
945
+
946
+ // Detect services from env example files
947
+ for (const envFile of envFiles) {
948
+ const content = await readText(join(root, envFile))
949
+ if (content) {
950
+ if (
951
+ /DATABASE_URL|DB_HOST|POSTGRES/i.test(content) &&
952
+ !services.includes('PostgreSQL') &&
953
+ !services.includes('MySQL')
954
+ )
955
+ services.push('Database (see env config)')
956
+ if (/REDIS/i.test(content) && !services.includes('Redis'))
957
+ services.push('Redis')
958
+ if (/STRIPE/i.test(content)) services.push('Stripe')
959
+ if (/OPENAI|ANTHROPIC|CLAUDE/i.test(content)) services.push('AI/LLM API')
960
+ if (
961
+ /AWS_|S3_/i.test(content) &&
962
+ !services.includes('S3-compatible storage')
963
+ )
964
+ services.push('AWS/S3')
965
+ if (/SENDGRID|MAILGUN|POSTMARK|RESEND/i.test(content))
966
+ services.push('Email service')
967
+ if (/TWILIO/i.test(content)) services.push('Twilio')
968
+ if (/SENTRY/i.test(content)) services.push('Sentry')
969
+ if (/AUTH0|CLERK|SUPABASE_/i.test(content)) services.push('Auth service')
970
+ break // Only read the first env example
971
+ }
972
+ }
973
+
974
+ return {
975
+ envFiles,
976
+ configFiles,
977
+ services: [...new Set(services)],
978
+ }
979
+ }
980
+
981
+ // ── Main ──────────────────────────────────────────────────────────────────────
982
+
983
+ async function main() {
984
+ const [
985
+ name,
986
+ langInfo,
987
+ structure,
988
+ docs,
989
+ testInfra,
990
+ scripts,
991
+ monorepo,
992
+ infrastructure,
993
+ ] = await Promise.all([
994
+ detectName(),
995
+ detectLanguagesAndFrameworks(),
996
+ getStructure(),
997
+ findDocs(),
998
+ findTestInfra(),
999
+ detectScripts(),
1000
+ detectMonorepo(),
1001
+ findInfrastructure(),
1002
+ ])
1003
+
1004
+ const entryPoints = await findEntryPoints(langInfo.languages)
1005
+
1006
+ const inventory = {
1007
+ name,
1008
+ languages: langInfo.languages,
1009
+ frameworks: langInfo.frameworks,
1010
+ packageManager: langInfo.packageManager,
1011
+ testFramework: langInfo.testFramework,
1012
+ monorepo,
1013
+ structure,
1014
+ entryPoints,
1015
+ scripts,
1016
+ docs,
1017
+ testInfra,
1018
+ infrastructure,
1019
+ }
1020
+
1021
+ process.stdout.write(JSON.stringify(inventory) + '\n')
1022
+ }
1023
+
1024
+ main().catch((err) => {
1025
+ // Always exit 0 with valid JSON, even on error
1026
+ process.stdout.write(
1027
+ JSON.stringify({
1028
+ error: err.message,
1029
+ name: basename(root),
1030
+ languages: [],
1031
+ frameworks: [],
1032
+ packageManager: null,
1033
+ testFramework: null,
1034
+ monorepo: null,
1035
+ structure: { topLevel: [], srcLayout: {} },
1036
+ entryPoints: [],
1037
+ scripts: {},
1038
+ docs: [],
1039
+ testInfra: { dirs: [], config: [] },
1040
+ infrastructure: { envFiles: [], configFiles: [], services: [] },
1041
+ }) + '\n',
1042
+ )
1043
+ })