start-vibing 4.4.0 → 4.4.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/package.json +2 -2
- package/template/.claude/commands/e2e-audit.md +16 -0
- package/template/.claude/hooks/e2e-audit-session-start.sh +4 -0
- package/template/.claude/hooks/mcp-usage-session-start.sh +4 -0
- package/template/.claude/settings.json +8 -0
- package/template/.claude/skills/e2e-audit/SKILL.md +216 -660
- package/template/.claude/skills/e2e-audit/findings.schema.json +98 -0
- package/template/.claude/skills/e2e-audit/references/api-contract-playbook.md +66 -0
- package/template/.claude/skills/e2e-audit/references/auth-setup-playbook.md +78 -0
- package/template/.claude/skills/e2e-audit/references/coverage-gap-playbook.md +95 -0
- package/template/.claude/skills/e2e-audit/references/post-run-feedback-playbook.md +80 -0
- package/template/.claude/skills/e2e-audit/scripts/detect-stack.sh +205 -0
- package/template/.claude/skills/e2e-audit/scripts/detect-uncovered.sh +137 -0
- package/template/.claude/skills/e2e-audit/scripts/discover-api-surface.sh +242 -0
- package/template/.claude/skills/e2e-audit/scripts/discover-routes.sh +163 -0
- package/template/.claude/skills/e2e-audit/scripts/inventory-existing-tests.sh +161 -0
- package/template/.claude/skills/e2e-audit/scripts/verify-audit.sh +88 -0
- package/template/.claude/skills/e2e-audit/templates/auth-setup.ts.tpl +24 -0
- package/template/.claude/skills/e2e-audit/templates/base-fixture.ts.tpl +75 -0
- package/template/.claude/skills/e2e-audit/templates/findings-report.md.tpl +54 -0
- package/template/.claude/skills/e2e-audit/templates/post-run-feedback.md.tpl +36 -0
- package/template/.claude/skills/e2e-audit/DESIGN.md +0 -294
- package/template/.claude/skills/e2e-audit/e2e/fixtures/auth.setup.ts +0 -70
- package/template/.claude/skills/e2e-audit/e2e/fixtures/auth.ts +0 -21
- package/template/.claude/skills/e2e-audit/e2e/fixtures/base.ts +0 -90
- package/template/.claude/skills/e2e-audit/e2e/fixtures/storage/.gitkeep +0 -0
- package/template/.claude/skills/e2e-audit/e2e/fixtures/storage/admin.json +0 -50
- package/template/.claude/skills/e2e-audit/e2e/fixtures/storage/manager.json +0 -50
- package/template/.claude/skills/e2e-audit/e2e/fixtures/storage/member.json +0 -50
- package/template/.claude/skills/e2e-audit/e2e/fixtures/storage/owner.json +0 -50
- package/template/.claude/skills/e2e-audit/e2e/pages/dashboard-admin.page.ts +0 -141
- package/template/.claude/skills/e2e-audit/e2e/pages/dashboard-billing.page.ts +0 -47
- package/template/.claude/skills/e2e-audit/e2e/pages/dashboard-chat.page.ts +0 -35
- package/template/.claude/skills/e2e-audit/e2e/pages/dashboard-home.page.ts +0 -134
- package/template/.claude/skills/e2e-audit/e2e/pages/dashboard-integrations.page.ts +0 -334
- package/template/.claude/skills/e2e-audit/e2e/pages/dashboard-knowledge.page.ts +0 -30
- package/template/.claude/skills/e2e-audit/e2e/pages/dashboard-ontology.page.ts +0 -71
- package/template/.claude/skills/e2e-audit/e2e/pages/dashboard-profile.page.ts +0 -38
- package/template/.claude/skills/e2e-audit/e2e/pages/dashboard-teams.page.ts +0 -123
- package/template/.claude/skills/e2e-audit/e2e/pages/dashboard-transcripts.page.ts +0 -109
- package/template/.claude/skills/e2e-audit/e2e/specs/auth/login.spec.ts +0 -59
- package/template/.claude/skills/e2e-audit/e2e/specs/dashboard-admin.spec.ts +0 -233
- package/template/.claude/skills/e2e-audit/e2e/specs/dashboard-billing.spec.ts +0 -44
- package/template/.claude/skills/e2e-audit/e2e/specs/dashboard-chat.spec.ts +0 -50
- package/template/.claude/skills/e2e-audit/e2e/specs/dashboard-home.spec.ts +0 -243
- package/template/.claude/skills/e2e-audit/e2e/specs/dashboard-integrations.spec.ts +0 -472
- package/template/.claude/skills/e2e-audit/e2e/specs/dashboard-knowledge.spec.ts +0 -57
- package/template/.claude/skills/e2e-audit/e2e/specs/dashboard-ontology.spec.ts +0 -72
- package/template/.claude/skills/e2e-audit/e2e/specs/dashboard-profile.spec.ts +0 -48
- package/template/.claude/skills/e2e-audit/e2e/specs/dashboard-teams.spec.ts +0 -247
- package/template/.claude/skills/e2e-audit/e2e/specs/dashboard-transcripts.spec.ts +0 -122
- package/template/.claude/skills/e2e-audit/e2e/specs/security/headers.spec.ts +0 -39
- package/template/.claude/skills/e2e-audit/e2e/specs/security/rbac.spec.ts +0 -92
- package/template/.claude/skills/e2e-audit/e2e/specs/security/xss.spec.ts +0 -74
- package/template/.claude/skills/e2e-audit/e2e/utils/console-collector.ts +0 -89
- package/template/.claude/skills/e2e-audit/e2e/utils/security-helpers.ts +0 -114
- package/template/.claude/skills/e2e-audit/e2e/utils/test-data.ts +0 -64
- package/template/.claude/skills/e2e-audit/runbook.md +0 -115
|
@@ -0,0 +1,137 @@
|
|
|
1
|
+
#!/usr/bin/env bash
|
|
2
|
+
# detect-uncovered.sh — intersect (current branch diff) × (existing tests) and
|
|
3
|
+
# emit every NEW or CHANGED surface that has no test coverage.
|
|
4
|
+
#
|
|
5
|
+
# Inputs (all produced earlier in the pipeline):
|
|
6
|
+
# $1 routes.json (from discover-routes.sh)
|
|
7
|
+
# $2 api-surface.json (from discover-api-surface.sh)
|
|
8
|
+
# $3 existing-tests.json (from inventory-existing-tests.sh)
|
|
9
|
+
# $4 base-ref (default: origin/main)
|
|
10
|
+
#
|
|
11
|
+
# Output: JSON object on stdout:
|
|
12
|
+
# {
|
|
13
|
+
# "base_ref": "origin/main",
|
|
14
|
+
# "diff_files": ["src/app/users/page.tsx", ...],
|
|
15
|
+
# "changed_routes": [{route from routes.json}, ...],
|
|
16
|
+
# "changed_http": [{route from api-surface.http_routes}, ...],
|
|
17
|
+
# "changed_trpc": [{proc from api-surface.trpc_procedures}, ...],
|
|
18
|
+
# "changed_actions": [{action from api-surface.server_actions}, ...],
|
|
19
|
+
# "uncovered_routes": [...], // changed AND no test references its URL or file
|
|
20
|
+
# "uncovered_http": [...],
|
|
21
|
+
# "uncovered_trpc": [...],
|
|
22
|
+
# "uncovered_actions": [...]
|
|
23
|
+
# }
|
|
24
|
+
set -euo pipefail
|
|
25
|
+
|
|
26
|
+
command -v jq >/dev/null || { echo "jq required" >&2; exit 2; }
|
|
27
|
+
|
|
28
|
+
ROUTES_JSON="${1:?usage: detect-uncovered.sh routes.json api-surface.json existing-tests.json [base-ref]}"
|
|
29
|
+
API_JSON="${2:?api-surface.json required}"
|
|
30
|
+
TESTS_JSON="${3:?existing-tests.json required}"
|
|
31
|
+
BASE_REF="${4:-origin/main}"
|
|
32
|
+
|
|
33
|
+
for f in "$ROUTES_JSON" "$API_JSON" "$TESTS_JSON"; do
|
|
34
|
+
[[ -f "$f" ]] || { echo "missing: $f" >&2; exit 2; }
|
|
35
|
+
done
|
|
36
|
+
|
|
37
|
+
# --- 1. branch diff ---------------------------------------------------------
|
|
38
|
+
DIFF_FILES='[]'
|
|
39
|
+
if git rev-parse --is-inside-work-tree >/dev/null 2>&1; then
|
|
40
|
+
# If the base ref doesn't exist locally, fall back to HEAD~10..HEAD
|
|
41
|
+
if git rev-parse --verify "$BASE_REF" >/dev/null 2>&1; then
|
|
42
|
+
MERGE_BASE="$(git merge-base "$BASE_REF" HEAD 2>/dev/null || echo "$BASE_REF")"
|
|
43
|
+
else
|
|
44
|
+
MERGE_BASE="$(git rev-parse HEAD~10 2>/dev/null || git rev-parse HEAD)"
|
|
45
|
+
fi
|
|
46
|
+
while IFS= read -r f; do
|
|
47
|
+
[[ -z "$f" ]] && continue
|
|
48
|
+
DIFF_FILES="$(jq --arg f "$f" '. + [$f]' <<<"$DIFF_FILES")"
|
|
49
|
+
done < <(git diff --name-only "$MERGE_BASE"...HEAD 2>/dev/null; git diff --name-only --cached 2>/dev/null; git diff --name-only 2>/dev/null)
|
|
50
|
+
DIFF_FILES="$(jq 'unique' <<<"$DIFF_FILES")"
|
|
51
|
+
fi
|
|
52
|
+
|
|
53
|
+
# --- 2. filter each inventory by membership in diff -------------------------
|
|
54
|
+
# Helper: pass stdin JSON array + filter over .file field.
|
|
55
|
+
filter_by_file() {
|
|
56
|
+
jq --argjson diff "$DIFF_FILES" '[.[] | select(.file as $f | $diff | any(. == $f))]'
|
|
57
|
+
}
|
|
58
|
+
|
|
59
|
+
CHANGED_ROUTES="$(jq '.' "$ROUTES_JSON" | filter_by_file)"
|
|
60
|
+
CHANGED_HTTP="$(jq '.http_routes' "$API_JSON" | filter_by_file)"
|
|
61
|
+
CHANGED_TRPC="$(jq '.trpc_procedures' "$API_JSON" | filter_by_file)"
|
|
62
|
+
CHANGED_ACTIONS="$(jq '.server_actions' "$API_JSON" | filter_by_file)"
|
|
63
|
+
|
|
64
|
+
# --- 3. load test corpus contents for string-search coverage ---------------
|
|
65
|
+
# Build a single concatenated lowercase blob of all test-file contents;
|
|
66
|
+
# a route is "covered" if its URL path OR its source file path appears in any test.
|
|
67
|
+
TEST_FILES="$(jq -r '.test_files[]?.file // empty' "$TESTS_JSON")"
|
|
68
|
+
TEST_BLOB="/tmp/e2e-audit-testblob-$$.txt"
|
|
69
|
+
: >"$TEST_BLOB"
|
|
70
|
+
while IFS= read -r tf; do
|
|
71
|
+
[[ -z "$tf" ]] && continue
|
|
72
|
+
[[ -f "$tf" ]] && tr '[:upper:]' '[:lower:]' <"$tf" >>"$TEST_BLOB" || true
|
|
73
|
+
done <<<"$TEST_FILES"
|
|
74
|
+
|
|
75
|
+
is_covered() {
|
|
76
|
+
# args: needle1 needle2 ...
|
|
77
|
+
local n
|
|
78
|
+
for n in "$@"; do
|
|
79
|
+
[[ -z "$n" ]] && continue
|
|
80
|
+
# Strip dynamic segments to make a loose match: /users/[id] → /users/
|
|
81
|
+
loose="$(echo "$n" | sed -E 's#\[[^]]+\]#[^/]+#g' | tr '[:upper:]' '[:lower:]')"
|
|
82
|
+
# Plain literal check first
|
|
83
|
+
if grep -qF "$(echo "$n" | tr '[:upper:]' '[:lower:]')" "$TEST_BLOB" 2>/dev/null; then return 0; fi
|
|
84
|
+
# Regex-ish check with dynamic wildcards
|
|
85
|
+
if grep -Eq "$loose" "$TEST_BLOB" 2>/dev/null; then return 0; fi
|
|
86
|
+
done
|
|
87
|
+
return 1
|
|
88
|
+
}
|
|
89
|
+
|
|
90
|
+
# --- 4. compute uncovered for each category --------------------------------
|
|
91
|
+
compute_uncovered() {
|
|
92
|
+
local changed_json="$1"
|
|
93
|
+
local name_field="$2" # which field(s) to probe as test needles
|
|
94
|
+
local secondary_field="$3" # optional (e.g., path as well as file)
|
|
95
|
+
local out='[]'
|
|
96
|
+
while IFS= read -r item; do
|
|
97
|
+
[[ -z "$item" ]] && continue
|
|
98
|
+
primary="$(jq -r --arg k "$name_field" '.[$k] // ""' <<<"$item")"
|
|
99
|
+
secondary="$(jq -r --arg k "$secondary_field" '.[$k] // ""' <<<"$item")"
|
|
100
|
+
if ! is_covered "$primary" "$secondary"; then
|
|
101
|
+
out="$(jq --argjson o "$item" '. + [$o]' <<<"$out")"
|
|
102
|
+
fi
|
|
103
|
+
done < <(jq -c '.[]' <<<"$changed_json")
|
|
104
|
+
echo "$out"
|
|
105
|
+
}
|
|
106
|
+
|
|
107
|
+
UNC_ROUTES="$(compute_uncovered "$CHANGED_ROUTES" "path" "file")"
|
|
108
|
+
UNC_HTTP="$(compute_uncovered "$CHANGED_HTTP" "path" "file")"
|
|
109
|
+
UNC_TRPC="$(compute_uncovered "$CHANGED_TRPC" "name" "file")"
|
|
110
|
+
UNC_ACTIONS="$(compute_uncovered "$CHANGED_ACTIONS" "name" "file")"
|
|
111
|
+
|
|
112
|
+
rm -f "$TEST_BLOB"
|
|
113
|
+
|
|
114
|
+
# --- 5. assemble ------------------------------------------------------------
|
|
115
|
+
jq -n \
|
|
116
|
+
--arg base_ref "$BASE_REF" \
|
|
117
|
+
--argjson diff_files "$DIFF_FILES" \
|
|
118
|
+
--argjson changed_routes "$CHANGED_ROUTES" \
|
|
119
|
+
--argjson changed_http "$CHANGED_HTTP" \
|
|
120
|
+
--argjson changed_trpc "$CHANGED_TRPC" \
|
|
121
|
+
--argjson changed_actions "$CHANGED_ACTIONS" \
|
|
122
|
+
--argjson uncovered_routes "$UNC_ROUTES" \
|
|
123
|
+
--argjson uncovered_http "$UNC_HTTP" \
|
|
124
|
+
--argjson uncovered_trpc "$UNC_TRPC" \
|
|
125
|
+
--argjson uncovered_actions "$UNC_ACTIONS" \
|
|
126
|
+
'{
|
|
127
|
+
base_ref: $base_ref,
|
|
128
|
+
diff_files: $diff_files,
|
|
129
|
+
changed_routes: $changed_routes,
|
|
130
|
+
changed_http: $changed_http,
|
|
131
|
+
changed_trpc: $changed_trpc,
|
|
132
|
+
changed_actions: $changed_actions,
|
|
133
|
+
uncovered_routes: $uncovered_routes,
|
|
134
|
+
uncovered_http: $uncovered_http,
|
|
135
|
+
uncovered_trpc: $uncovered_trpc,
|
|
136
|
+
uncovered_actions: $uncovered_actions
|
|
137
|
+
}'
|
|
@@ -0,0 +1,242 @@
|
|
|
1
|
+
#!/usr/bin/env bash
|
|
2
|
+
# discover-api-surface.sh — enumerate every network-facing surface a browser
|
|
3
|
+
# or third-party client can hit: REST/HTTP route handlers, tRPC procedures,
|
|
4
|
+
# GraphQL resolvers, and server actions.
|
|
5
|
+
#
|
|
6
|
+
# Why: Playwright deduction misses endpoints that aren't touched by the
|
|
7
|
+
# flow you tested. A procedure that's only called from Settings > Advanced
|
|
8
|
+
# > Danger Zone, or an API route called only by a webhook, won't show up
|
|
9
|
+
# in a traffic log. This script reads the SOURCE and emits the full
|
|
10
|
+
# contract so the skill can report which endpoints HAVE no tests.
|
|
11
|
+
#
|
|
12
|
+
# Output: JSON object on stdout:
|
|
13
|
+
# {
|
|
14
|
+
# "http_routes": [{ "method": "POST", "path": "/api/users", "file": "...", "auth": "protected"|"public"|"unknown", "zod_schema_found": true|false }],
|
|
15
|
+
# "trpc_procedures": [{ "name": "users.create", "kind": "query"|"mutation"|"subscription", "file": "...", "auth": "protected"|"public"|"unknown", "input_schema_found": true|false }],
|
|
16
|
+
# "graphql": { "found": true|false, "files": [...] },
|
|
17
|
+
# "server_actions": [{ "name": "createUser", "file": "...", "directive": "'use server'"|"file-scoped" }],
|
|
18
|
+
# "middleware": { "file": "middleware.ts"|null, "has_auth_guard": true|false, "matches_public_patterns": [...] }
|
|
19
|
+
# }
|
|
20
|
+
#
|
|
21
|
+
# No AST. False positives are OK — skill cross-references this with tests.
|
|
22
|
+
set -euo pipefail
|
|
23
|
+
|
|
24
|
+
command -v jq >/dev/null || { echo "jq required" >&2; exit 2; }
|
|
25
|
+
|
|
26
|
+
HTTP='[]'
|
|
27
|
+
TRPC='[]'
|
|
28
|
+
GQL_FOUND=false
|
|
29
|
+
GQL_FILES='[]'
|
|
30
|
+
ACTIONS='[]'
|
|
31
|
+
MW_FILE="null"
|
|
32
|
+
MW_AUTH=false
|
|
33
|
+
MW_PUBLIC='[]'
|
|
34
|
+
|
|
35
|
+
# --- 1. HTTP ROUTE HANDLERS -------------------------------------------------
|
|
36
|
+
# Next.js app router: app/**/route.{ts,tsx,js} with exported METHOD handlers.
|
|
37
|
+
# Next.js pages router: pages/api/**/*.{ts,tsx,js} with default export.
|
|
38
|
+
# Remix: resource routes (files in app/routes with a loader/action but no default).
|
|
39
|
+
# Express/Hono/Fastify: app.get/post/put/delete/patch etc.
|
|
40
|
+
METHODS='GET|POST|PUT|PATCH|DELETE|HEAD|OPTIONS'
|
|
41
|
+
|
|
42
|
+
# Next.js app router route.ts
|
|
43
|
+
while IFS= read -r f; do
|
|
44
|
+
[[ -z "$f" ]] && continue
|
|
45
|
+
# URL path: strip app/src/app prefix and /route.* suffix; keep [param].
|
|
46
|
+
p="$f"; p="${p#src/app/}"; p="${p#app/}"; p="${p%/route.ts}"; p="${p%/route.tsx}"; p="${p%/route.js}"
|
|
47
|
+
# strip route-group segments
|
|
48
|
+
p="$(echo "$p" | awk -F/ '{out=""; for(i=1;i<=NF;i++){if($i~/^\(.*\)$/)continue; out=out(out==""?"":"/")$i} print out}')"
|
|
49
|
+
url="/$p"
|
|
50
|
+
# which methods are exported?
|
|
51
|
+
for m in GET POST PUT PATCH DELETE HEAD OPTIONS; do
|
|
52
|
+
if grep -Eq "export[[:space:]]+(async[[:space:]]+)?function[[:space:]]+$m\\b|export[[:space:]]+const[[:space:]]+$m[[:space:]]*=" "$f" 2>/dev/null; then
|
|
53
|
+
# zod schema mentioned in file?
|
|
54
|
+
zod=false
|
|
55
|
+
grep -Eq "z\\.object\\(|safeParse|\\.parse\\(|zodResolver" "$f" 2>/dev/null && zod=true
|
|
56
|
+
# auth hint: look for session/auth/protected keywords
|
|
57
|
+
auth="unknown"
|
|
58
|
+
if grep -Eq "getServerSession|auth\\(\\)|requireAuth|requireSession|getSession|currentUser\\(|userId" "$f" 2>/dev/null; then auth="protected"
|
|
59
|
+
elif grep -Eq "// public|@public" "$f" 2>/dev/null; then auth="public"
|
|
60
|
+
fi
|
|
61
|
+
HTTP="$(jq --arg m "$m" --arg p "$url" --arg f "$f" --arg a "$auth" --argjson z "$zod" \
|
|
62
|
+
'. + [{method:$m, path:$p, file:$f, auth:$a, zod_schema_found:$z}]' <<<"$HTTP")"
|
|
63
|
+
fi
|
|
64
|
+
done
|
|
65
|
+
done < <(find app src/app -type f \( -name 'route.ts' -o -name 'route.tsx' -o -name 'route.js' \) 2>/dev/null)
|
|
66
|
+
|
|
67
|
+
# Next.js pages/api
|
|
68
|
+
while IFS= read -r f; do
|
|
69
|
+
[[ -z "$f" ]] && continue
|
|
70
|
+
p="$f"; p="${p#src/pages/}"; p="${p#pages/}"
|
|
71
|
+
p="${p%.ts}"; p="${p%.tsx}"; p="${p%.js}"; p="${p%.jsx}"
|
|
72
|
+
# index.ts -> directory path
|
|
73
|
+
p="${p%/index}"
|
|
74
|
+
url="/$p"
|
|
75
|
+
auth="unknown"
|
|
76
|
+
if grep -Eq "getServerSession|requireAuth|getSession|authenticate\\(|req\\.session" "$f" 2>/dev/null; then auth="protected"; fi
|
|
77
|
+
zod=false
|
|
78
|
+
grep -Eq "z\\.object\\(|safeParse|\\.parse\\(" "$f" 2>/dev/null && zod=true
|
|
79
|
+
HTTP="$(jq --arg p "$url" --arg f "$f" --arg a "$auth" --argjson z "$zod" \
|
|
80
|
+
'. + [{method:"ANY", path:$p, file:$f, auth:$a, zod_schema_found:$z}]' <<<"$HTTP")"
|
|
81
|
+
done < <(find pages/api src/pages/api -type f \( -name '*.ts' -o -name '*.tsx' -o -name '*.js' \) 2>/dev/null)
|
|
82
|
+
|
|
83
|
+
# Express / Hono / Fastify style (app.get/post/...)
|
|
84
|
+
while IFS= read -r hit; do
|
|
85
|
+
[[ -z "$hit" ]] && continue
|
|
86
|
+
f="${hit%%:*}"; rest="${hit#*:}"
|
|
87
|
+
line="${rest%%:*}"
|
|
88
|
+
match="${rest#*:}"
|
|
89
|
+
m="$(echo "$match" | grep -oE "\\.(get|post|put|patch|delete|head|options)\\(" | head -1 | tr -d '.(' | tr '[:lower:]' '[:upper:]')"
|
|
90
|
+
url="$(echo "$match" | grep -oE "[\"'][^\"']+[\"']" | head -1 | tr -d "\"'")"
|
|
91
|
+
[[ -z "$m" || -z "$url" ]] && continue
|
|
92
|
+
[[ "$url" =~ ^/ ]] || continue
|
|
93
|
+
HTTP="$(jq --arg m "$m" --arg p "$url" --arg f "$f" \
|
|
94
|
+
'. + [{method:$m, path:$p, file:$f, auth:"unknown", zod_schema_found:false}]' <<<"$HTTP")"
|
|
95
|
+
done < <(grep -rEHn --include='*.ts' --include='*.tsx' --include='*.js' --include='*.mjs' \
|
|
96
|
+
"\\.(get|post|put|patch|delete|head|options)\\([\"'][^\"']+[\"']" \
|
|
97
|
+
src server app 2>/dev/null | head -500 || true)
|
|
98
|
+
|
|
99
|
+
# --- 2. tRPC PROCEDURES -----------------------------------------------------
|
|
100
|
+
# Look for files that import `router` or `procedure` and enumerate calls:
|
|
101
|
+
# foo: publicProcedure.input(...).query(...)
|
|
102
|
+
# bar: protectedProcedure.mutation(...)
|
|
103
|
+
#
|
|
104
|
+
# We don't build a router namespace tree; we emit procedure names as seen.
|
|
105
|
+
# Nested router paths can be computed by the skill from the file layout.
|
|
106
|
+
TRPC_FILES="$(grep -rl -E "createTRPCRouter|publicProcedure|protectedProcedure|router\\(\\{" \
|
|
107
|
+
--include='*.ts' --include='*.tsx' \
|
|
108
|
+
src server 2>/dev/null || true)"
|
|
109
|
+
|
|
110
|
+
while IFS= read -r f; do
|
|
111
|
+
[[ -z "$f" ]] && continue
|
|
112
|
+
# For each procedure definition in this file, capture name + kind + auth.
|
|
113
|
+
# Pattern matches: <name>: (public|protected|<prefix>)Procedure[.input(...)]?.<query|mutation|subscription>
|
|
114
|
+
awk -v file="$f" '
|
|
115
|
+
/(\w+Procedure)/ {
|
|
116
|
+
# join multi-line chains until we hit one of the terminators.
|
|
117
|
+
chain = chain " " $0
|
|
118
|
+
}
|
|
119
|
+
/\.query\s*\(|\.mutation\s*\(|\.subscription\s*\(/ {
|
|
120
|
+
chain = chain " " $0
|
|
121
|
+
# extract name (the last "foo:" before the chain start)
|
|
122
|
+
if (match(chain, /([A-Za-z_][A-Za-z0-9_]*)\s*:\s*[A-Za-z_][A-Za-z0-9_]*Procedure/, m)) {
|
|
123
|
+
name = m[1]
|
|
124
|
+
} else name = "_"
|
|
125
|
+
# extract kind
|
|
126
|
+
if (chain ~ /\.query\s*\(/) kind = "query"
|
|
127
|
+
else if (chain ~ /\.mutation\s*\(/) kind = "mutation"
|
|
128
|
+
else kind = "subscription"
|
|
129
|
+
# extract auth (protected vs public vs custom)
|
|
130
|
+
if (chain ~ /protectedProcedure/) auth = "protected"
|
|
131
|
+
else if (chain ~ /publicProcedure/) auth = "public"
|
|
132
|
+
else auth = "unknown"
|
|
133
|
+
# input schema?
|
|
134
|
+
input_found = (chain ~ /\.input\s*\(/) ? "true" : "false"
|
|
135
|
+
printf "%s|%s|%s|%s|%s\n", name, kind, file, auth, input_found
|
|
136
|
+
chain = ""
|
|
137
|
+
}
|
|
138
|
+
' "$f" 2>/dev/null || true
|
|
139
|
+
done <<<"$TRPC_FILES" | while IFS='|' read -r name kind file auth input_found; do
|
|
140
|
+
[[ -z "$name" ]] && continue
|
|
141
|
+
jq --arg n "$name" --arg k "$kind" --arg f "$file" --arg a "$auth" --argjson i "$input_found" \
|
|
142
|
+
'. + [{name:$n, kind:$k, file:$f, auth:$a, input_schema_found:$i}]'
|
|
143
|
+
done > /tmp/e2e-audit-trpc-$$.jsonl || true
|
|
144
|
+
|
|
145
|
+
if [[ -s /tmp/e2e-audit-trpc-$$.jsonl ]]; then
|
|
146
|
+
# The above pipeline created one growing JSON object per line via jq --arg,
|
|
147
|
+
# but each jq invocation started from scratch. Join properly:
|
|
148
|
+
TRPC='[]'
|
|
149
|
+
while IFS= read -r _; do :; done < /tmp/e2e-audit-trpc-$$.jsonl
|
|
150
|
+
fi
|
|
151
|
+
rm -f /tmp/e2e-audit-trpc-$$.jsonl
|
|
152
|
+
|
|
153
|
+
# Simpler, correct approach for tRPC — rebuild as JSON array in a single pass.
|
|
154
|
+
TRPC='[]'
|
|
155
|
+
while IFS= read -r f; do
|
|
156
|
+
[[ -z "$f" ]] && continue
|
|
157
|
+
while IFS='|' read -r name kind file auth input_found; do
|
|
158
|
+
[[ -z "$name" ]] && continue
|
|
159
|
+
TRPC="$(jq --arg n "$name" --arg k "$kind" --arg fi "$file" --arg a "$auth" --argjson i "$input_found" \
|
|
160
|
+
'. + [{name:$n, kind:$k, file:$fi, auth:$a, input_schema_found:$i}]' <<<"$TRPC")"
|
|
161
|
+
done < <(
|
|
162
|
+
awk -v file="$f" '
|
|
163
|
+
/(\w+Procedure)/ { chain = chain " " $0 }
|
|
164
|
+
/\.query\s*\(|\.mutation\s*\(|\.subscription\s*\(/ {
|
|
165
|
+
chain = chain " " $0
|
|
166
|
+
name = "_"
|
|
167
|
+
if (match(chain, /([A-Za-z_][A-Za-z0-9_]*)[[:space:]]*:[[:space:]]*[A-Za-z_][A-Za-z0-9_]*Procedure/, m)) name = m[1]
|
|
168
|
+
if (chain ~ /\.query[[:space:]]*\(/) kind = "query"
|
|
169
|
+
else if (chain ~ /\.mutation[[:space:]]*\(/) kind = "mutation"
|
|
170
|
+
else kind = "subscription"
|
|
171
|
+
if (chain ~ /protectedProcedure/) auth = "protected"
|
|
172
|
+
else if (chain ~ /publicProcedure/) auth = "public"
|
|
173
|
+
else auth = "unknown"
|
|
174
|
+
input_found = (chain ~ /\.input[[:space:]]*\(/) ? "true" : "false"
|
|
175
|
+
printf "%s|%s|%s|%s|%s\n", name, kind, file, auth, input_found
|
|
176
|
+
chain = ""
|
|
177
|
+
}
|
|
178
|
+
' "$f" 2>/dev/null
|
|
179
|
+
)
|
|
180
|
+
done <<<"$TRPC_FILES"
|
|
181
|
+
|
|
182
|
+
# --- 3. GRAPHQL -------------------------------------------------------------
|
|
183
|
+
GQL_HITS="$(grep -rl -E 'typeDefs|buildSchema|gql`|@ObjectType|@Resolver|createSchema' \
|
|
184
|
+
--include='*.ts' --include='*.tsx' --include='*.graphql' --include='*.gql' \
|
|
185
|
+
src server schema 2>/dev/null | head -50 || true)"
|
|
186
|
+
if [[ -n "$GQL_HITS" ]]; then
|
|
187
|
+
GQL_FOUND=true
|
|
188
|
+
GQL_FILES="$(echo "$GQL_HITS" | jq -Rn '[inputs]')"
|
|
189
|
+
fi
|
|
190
|
+
|
|
191
|
+
# --- 4. SERVER ACTIONS (Next.js) --------------------------------------------
|
|
192
|
+
# Two forms: 'use server' directive at top of file, or 'use server' inline in a
|
|
193
|
+
# function body. We emit both kinds.
|
|
194
|
+
while IFS= read -r f; do
|
|
195
|
+
[[ -z "$f" ]] && continue
|
|
196
|
+
if grep -Eq "^['\"]use server['\"]" "$f" 2>/dev/null; then
|
|
197
|
+
# File-scoped: every exported async function is an action.
|
|
198
|
+
while IFS= read -r name; do
|
|
199
|
+
[[ -z "$name" ]] && continue
|
|
200
|
+
ACTIONS="$(jq --arg n "$name" --arg fi "$f" '. + [{name:$n, file:$fi, directive:"file-scoped"}]' <<<"$ACTIONS")"
|
|
201
|
+
done < <(grep -Eo "export[[:space:]]+(async[[:space:]]+)?function[[:space:]]+[A-Za-z_][A-Za-z0-9_]*" "$f" \
|
|
202
|
+
| awk '{print $NF}' | sed 's/^function[[:space:]]*//')
|
|
203
|
+
fi
|
|
204
|
+
# Inline: async function () { 'use server'; ... }
|
|
205
|
+
while IFS= read -r hit; do
|
|
206
|
+
name="$(echo "$hit" | grep -oE "function[[:space:]]+[A-Za-z_][A-Za-z0-9_]*" | awk '{print $2}')"
|
|
207
|
+
[[ -z "$name" ]] && continue
|
|
208
|
+
ACTIONS="$(jq --arg n "$name" --arg fi "$f" '. + [{name:$n, file:$fi, directive:"'\''use server'\''"}]' <<<"$ACTIONS")"
|
|
209
|
+
done < <(grep -B1 -E "^[[:space:]]*['\"]use server['\"]" "$f" 2>/dev/null | grep -E "function[[:space:]]+[A-Za-z_]" || true)
|
|
210
|
+
done < <(find src app server -type f \( -name '*.ts' -o -name '*.tsx' \) 2>/dev/null)
|
|
211
|
+
|
|
212
|
+
# --- 5. MIDDLEWARE (Next.js) -----------------------------------------------
|
|
213
|
+
for cand in middleware.ts middleware.js src/middleware.ts src/middleware.js; do
|
|
214
|
+
if [[ -f "$cand" ]]; then
|
|
215
|
+
MW_FILE="\"$cand\""
|
|
216
|
+
grep -Eq "auth|getToken|getSession|currentUser|getServerSession|redirect\\(.*sign[_-]?in" "$cand" 2>/dev/null && MW_AUTH=true
|
|
217
|
+
# Extract matcher patterns that look like public paths.
|
|
218
|
+
while IFS= read -r pat; do
|
|
219
|
+
[[ -z "$pat" ]] && continue
|
|
220
|
+
MW_PUBLIC="$(jq --arg p "$pat" '. + [$p]' <<<"$MW_PUBLIC")"
|
|
221
|
+
done < <(grep -oE "['\"]/[^'\"]*['\"]" "$cand" | tr -d "'\"" | sort -u | head -40)
|
|
222
|
+
break
|
|
223
|
+
fi
|
|
224
|
+
done
|
|
225
|
+
|
|
226
|
+
# --- ASSEMBLE ---------------------------------------------------------------
|
|
227
|
+
jq -n \
|
|
228
|
+
--argjson http "$HTTP" \
|
|
229
|
+
--argjson trpc "$TRPC" \
|
|
230
|
+
--argjson graphql_found "$GQL_FOUND" \
|
|
231
|
+
--argjson graphql_files "$GQL_FILES" \
|
|
232
|
+
--argjson actions "$ACTIONS" \
|
|
233
|
+
--argjson mw_file "$MW_FILE" \
|
|
234
|
+
--argjson mw_auth "$MW_AUTH" \
|
|
235
|
+
--argjson mw_public "$MW_PUBLIC" \
|
|
236
|
+
'{
|
|
237
|
+
http_routes: ($http | unique_by([.method, .path, .file])),
|
|
238
|
+
trpc_procedures: ($trpc | unique_by([.name, .kind, .file])),
|
|
239
|
+
graphql: { found: $graphql_found, files: $graphql_files },
|
|
240
|
+
server_actions: ($actions | unique_by([.name, .file])),
|
|
241
|
+
middleware: { file: $mw_file, has_auth_guard: $mw_auth, matches_public_patterns: $mw_public }
|
|
242
|
+
}'
|
|
@@ -0,0 +1,163 @@
|
|
|
1
|
+
#!/usr/bin/env bash
|
|
2
|
+
# discover-routes.sh — enumerate every user-facing page route from the source tree.
|
|
3
|
+
#
|
|
4
|
+
# Output: JSON array on stdout. Each item:
|
|
5
|
+
# { "path": "/users/[id]", "kind": "page" | "layout" | "route-group" | "parallel" | "intercepting",
|
|
6
|
+
# "file": "src/app/users/[id]/page.tsx", "dynamic": true, "catch_all": false }
|
|
7
|
+
#
|
|
8
|
+
# Supports next (app + pages router), remix, sveltekit, nuxt, astro.
|
|
9
|
+
# Non-framework repos emit an empty array.
|
|
10
|
+
set -euo pipefail
|
|
11
|
+
|
|
12
|
+
command -v jq >/dev/null || { echo "jq required" >&2; exit 2; }
|
|
13
|
+
|
|
14
|
+
detect_fw() {
|
|
15
|
+
if jq -e '.dependencies.next // .devDependencies.next' package.json >/dev/null 2>&1; then echo "next"
|
|
16
|
+
elif jq -e '.dependencies["@remix-run/react"] // .devDependencies["@remix-run/react"]' package.json >/dev/null 2>&1; then echo "remix"
|
|
17
|
+
elif jq -e '.dependencies["@sveltejs/kit"] // .devDependencies["@sveltejs/kit"]' package.json >/dev/null 2>&1; then echo "sveltekit"
|
|
18
|
+
elif jq -e '.dependencies.nuxt // .devDependencies.nuxt' package.json >/dev/null 2>&1; then echo "nuxt"
|
|
19
|
+
elif jq -e '.dependencies.astro // .devDependencies.astro' package.json >/dev/null 2>&1; then echo "astro"
|
|
20
|
+
else echo "unknown"
|
|
21
|
+
fi
|
|
22
|
+
}
|
|
23
|
+
|
|
24
|
+
FW="$(detect_fw)"
|
|
25
|
+
OUT='[]'
|
|
26
|
+
|
|
27
|
+
emit() {
|
|
28
|
+
# args: path kind file dynamic catch_all
|
|
29
|
+
OUT="$(jq --arg p "$1" --arg k "$2" --arg f "$3" \
|
|
30
|
+
--argjson d "$4" --argjson c "$5" \
|
|
31
|
+
'. + [{path:$p, kind:$k, file:$f, dynamic:$d, catch_all:$c}]' <<<"$OUT")"
|
|
32
|
+
}
|
|
33
|
+
|
|
34
|
+
# Translate a Next/Remix/Nuxt/SvelteKit file path into a URL path.
|
|
35
|
+
# Strips: src/app, app, src/pages, pages, src/routes, app/routes.
|
|
36
|
+
# Drops: route-group segments like (marketing), private _segments (Nuxt/SvelteKit),
|
|
37
|
+
# converts [param] into a URL segment that keeps the bracket notation.
|
|
38
|
+
path_to_url() {
|
|
39
|
+
local p="$1"
|
|
40
|
+
# strip known leading prefixes
|
|
41
|
+
p="${p#src/app/}"; p="${p#app/}"
|
|
42
|
+
p="${p#src/pages/}"; p="${p#pages/}"
|
|
43
|
+
p="${p#src/routes/}"; p="${p#app/routes/}"
|
|
44
|
+
p="${p#routes/}"
|
|
45
|
+
# drop trailing filename
|
|
46
|
+
p="${p%/page.tsx}"; p="${p%/page.ts}"; p="${p%/page.jsx}"; p="${p%/page.js}"
|
|
47
|
+
p="${p%/+page.svelte}"; p="${p%/+layout.svelte}"
|
|
48
|
+
p="${p%/index.tsx}"; p="${p%/index.ts}"; p="${p%/index.jsx}"; p="${p%/index.js}"
|
|
49
|
+
p="${p%/index.vue}"; p="${p%/index.astro}"
|
|
50
|
+
p="${p%.tsx}"; p="${p%.ts}"; p="${p%.jsx}"; p="${p%.js}"
|
|
51
|
+
p="${p%.vue}"; p="${p%.astro}"; p="${p%.svelte}"
|
|
52
|
+
# remove Next route-group segments (parentheses)
|
|
53
|
+
p="$(echo "$p" | awk -F/ '{
|
|
54
|
+
out=""; for(i=1;i<=NF;i++){
|
|
55
|
+
if($i ~ /^\(.*\)$/) continue;
|
|
56
|
+
out = out (out==""?"":"/") $i;
|
|
57
|
+
}
|
|
58
|
+
print out
|
|
59
|
+
}')"
|
|
60
|
+
# remove private Nuxt/SvelteKit underscore segments
|
|
61
|
+
p="$(echo "$p" | awk -F/ '{
|
|
62
|
+
out=""; for(i=1;i<=NF;i++){
|
|
63
|
+
if($i ~ /^_/) continue;
|
|
64
|
+
out = out (out==""?"":"/") $i;
|
|
65
|
+
}
|
|
66
|
+
print out
|
|
67
|
+
}')"
|
|
68
|
+
# remix: convert dot-delimited route files to slashes, $param -> [param]
|
|
69
|
+
if [[ "$FW" == "remix" ]]; then
|
|
70
|
+
p="$(echo "$p" | sed 's/\./\//g; s/\$/:/g')"
|
|
71
|
+
fi
|
|
72
|
+
# sveltekit/nuxt: [[optional]] / [...rest] already close enough; leave as-is
|
|
73
|
+
echo "/${p}"
|
|
74
|
+
}
|
|
75
|
+
|
|
76
|
+
classify() {
|
|
77
|
+
# args: url file
|
|
78
|
+
local url="$1" file="$2"
|
|
79
|
+
local dyn=false cat=false
|
|
80
|
+
[[ "$url" == *"["*"]"* || "$url" == *":"* ]] && dyn=true
|
|
81
|
+
[[ "$url" == *"[..."*"]"* || "$url" == *"\$\$"* ]] && cat=true
|
|
82
|
+
echo "$dyn $cat"
|
|
83
|
+
}
|
|
84
|
+
|
|
85
|
+
# --- Next.js app router -----------------------------------------------------
|
|
86
|
+
if [[ "$FW" == "next" ]]; then
|
|
87
|
+
while IFS= read -r f; do
|
|
88
|
+
[[ -z "$f" ]] && continue
|
|
89
|
+
url="$(path_to_url "$f")"
|
|
90
|
+
read -r dyn cat < <(classify "$url" "$f")
|
|
91
|
+
emit "$url" "page" "$f" "$dyn" "$cat"
|
|
92
|
+
done < <(find app src/app -type f \( -name 'page.tsx' -o -name 'page.ts' -o -name 'page.jsx' -o -name 'page.js' \) 2>/dev/null)
|
|
93
|
+
|
|
94
|
+
while IFS= read -r f; do
|
|
95
|
+
[[ -z "$f" ]] && continue
|
|
96
|
+
url="$(path_to_url "$f")"
|
|
97
|
+
emit "$url" "layout" "$f" false false
|
|
98
|
+
done < <(find app src/app -type f \( -name 'layout.tsx' -o -name 'layout.ts' \) 2>/dev/null)
|
|
99
|
+
|
|
100
|
+
while IFS= read -r d; do
|
|
101
|
+
[[ -z "$d" ]] && continue
|
|
102
|
+
emit "${d#*app/}" "parallel" "$d" false false
|
|
103
|
+
done < <(find app src/app -type d -name '@*' 2>/dev/null)
|
|
104
|
+
|
|
105
|
+
while IFS= read -r d; do
|
|
106
|
+
[[ -z "$d" ]] && continue
|
|
107
|
+
emit "${d#*app/}" "intercepting" "$d" false false
|
|
108
|
+
done < <(find app src/app -type d \( -name '(.)*' -o -name '(..)*' -o -name '(...)*' \) 2>/dev/null)
|
|
109
|
+
|
|
110
|
+
# Pages router (legacy)
|
|
111
|
+
while IFS= read -r f; do
|
|
112
|
+
[[ -z "$f" ]] && continue
|
|
113
|
+
# exclude _app, _document, _error, api/
|
|
114
|
+
case "$f" in
|
|
115
|
+
*/_app.*|*/_document.*|*/_error.*|*/api/*) continue ;;
|
|
116
|
+
esac
|
|
117
|
+
url="$(path_to_url "$f")"
|
|
118
|
+
read -r dyn cat < <(classify "$url" "$f")
|
|
119
|
+
emit "$url" "page" "$f" "$dyn" "$cat"
|
|
120
|
+
done < <(find pages src/pages -type f \( -name '*.tsx' -o -name '*.ts' -o -name '*.jsx' -o -name '*.js' \) 2>/dev/null)
|
|
121
|
+
fi
|
|
122
|
+
|
|
123
|
+
# --- Remix ------------------------------------------------------------------
|
|
124
|
+
if [[ "$FW" == "remix" ]]; then
|
|
125
|
+
while IFS= read -r f; do
|
|
126
|
+
[[ -z "$f" ]] && continue
|
|
127
|
+
url="$(path_to_url "$f")"
|
|
128
|
+
read -r dyn cat < <(classify "$url" "$f")
|
|
129
|
+
emit "$url" "page" "$f" "$dyn" "$cat"
|
|
130
|
+
done < <(find app/routes src/routes -type f \( -name '*.tsx' -o -name '*.ts' \) 2>/dev/null)
|
|
131
|
+
fi
|
|
132
|
+
|
|
133
|
+
# --- SvelteKit --------------------------------------------------------------
|
|
134
|
+
if [[ "$FW" == "sveltekit" ]]; then
|
|
135
|
+
while IFS= read -r f; do
|
|
136
|
+
[[ -z "$f" ]] && continue
|
|
137
|
+
url="$(path_to_url "$f")"
|
|
138
|
+
read -r dyn cat < <(classify "$url" "$f")
|
|
139
|
+
emit "$url" "page" "$f" "$dyn" "$cat"
|
|
140
|
+
done < <(find src/routes -type f -name '+page.svelte' 2>/dev/null)
|
|
141
|
+
fi
|
|
142
|
+
|
|
143
|
+
# --- Nuxt -------------------------------------------------------------------
|
|
144
|
+
if [[ "$FW" == "nuxt" ]]; then
|
|
145
|
+
while IFS= read -r f; do
|
|
146
|
+
[[ -z "$f" ]] && continue
|
|
147
|
+
url="$(path_to_url "$f")"
|
|
148
|
+
read -r dyn cat < <(classify "$url" "$f")
|
|
149
|
+
emit "$url" "page" "$f" "$dyn" "$cat"
|
|
150
|
+
done < <(find pages src/pages -type f \( -name '*.vue' \) 2>/dev/null)
|
|
151
|
+
fi
|
|
152
|
+
|
|
153
|
+
# --- Astro ------------------------------------------------------------------
|
|
154
|
+
if [[ "$FW" == "astro" ]]; then
|
|
155
|
+
while IFS= read -r f; do
|
|
156
|
+
[[ -z "$f" ]] && continue
|
|
157
|
+
url="$(path_to_url "$f")"
|
|
158
|
+
read -r dyn cat < <(classify "$url" "$f")
|
|
159
|
+
emit "$url" "page" "$f" "$dyn" "$cat"
|
|
160
|
+
done < <(find src/pages -type f \( -name '*.astro' -o -name '*.tsx' -o -name '*.ts' \) 2>/dev/null)
|
|
161
|
+
fi
|
|
162
|
+
|
|
163
|
+
echo "$OUT" | jq '. | unique_by([.path, .file])'
|