tack-cli 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +21 -0
- package/README.md +232 -0
- package/dist/App.d.ts +5 -0
- package/dist/App.js +17 -0
- package/dist/detectors/admin.d.ts +2 -0
- package/dist/detectors/admin.js +33 -0
- package/dist/detectors/auth.d.ts +2 -0
- package/dist/detectors/auth.js +86 -0
- package/dist/detectors/database.d.ts +2 -0
- package/dist/detectors/database.js +96 -0
- package/dist/detectors/duplicates.d.ts +2 -0
- package/dist/detectors/duplicates.js +23 -0
- package/dist/detectors/exports.d.ts +2 -0
- package/dist/detectors/exports.js +30 -0
- package/dist/detectors/framework.d.ts +2 -0
- package/dist/detectors/framework.js +71 -0
- package/dist/detectors/index.d.ts +12 -0
- package/dist/detectors/index.js +128 -0
- package/dist/detectors/jobs.d.ts +2 -0
- package/dist/detectors/jobs.js +62 -0
- package/dist/detectors/multiuser.d.ts +2 -0
- package/dist/detectors/multiuser.js +55 -0
- package/dist/detectors/payments.d.ts +2 -0
- package/dist/detectors/payments.js +49 -0
- package/dist/detectors/rules/auth.yaml +24 -0
- package/dist/detectors/rules/database.yaml +27 -0
- package/dist/detectors/rules/exports.yaml +28 -0
- package/dist/detectors/rules/framework.yaml +26 -0
- package/dist/detectors/rules/jobs.yaml +23 -0
- package/dist/detectors/rules/payments.yaml +22 -0
- package/dist/detectors/types.d.ts +2 -0
- package/dist/detectors/types.js +1 -0
- package/dist/detectors/yamlRunner.d.ts +31 -0
- package/dist/detectors/yamlRunner.js +128 -0
- package/dist/engine/cleanup.d.ts +12 -0
- package/dist/engine/cleanup.js +101 -0
- package/dist/engine/compaction.d.ts +5 -0
- package/dist/engine/compaction.js +44 -0
- package/dist/engine/compareSpec.d.ts +2 -0
- package/dist/engine/compareSpec.js +74 -0
- package/dist/engine/computeDrift.d.ts +6 -0
- package/dist/engine/computeDrift.js +133 -0
- package/dist/engine/contextPack.d.ts +4 -0
- package/dist/engine/contextPack.js +169 -0
- package/dist/engine/decisions.d.ts +4 -0
- package/dist/engine/decisions.js +21 -0
- package/dist/engine/diff.d.ts +46 -0
- package/dist/engine/diff.js +210 -0
- package/dist/engine/handoff.d.ts +7 -0
- package/dist/engine/handoff.js +469 -0
- package/dist/engine/status.d.ts +10 -0
- package/dist/engine/status.js +46 -0
- package/dist/index.d.ts +2 -0
- package/dist/index.js +299 -0
- package/dist/lib/cli.d.ts +4 -0
- package/dist/lib/cli.js +8 -0
- package/dist/lib/files.d.ts +48 -0
- package/dist/lib/files.js +529 -0
- package/dist/lib/git.d.ts +9 -0
- package/dist/lib/git.js +96 -0
- package/dist/lib/logger.d.ts +3 -0
- package/dist/lib/logger.js +21 -0
- package/dist/lib/ndjson.d.ts +2 -0
- package/dist/lib/ndjson.js +45 -0
- package/dist/lib/notes.d.ts +8 -0
- package/dist/lib/notes.js +144 -0
- package/dist/lib/notify.d.ts +1 -0
- package/dist/lib/notify.js +14 -0
- package/dist/lib/project.d.ts +1 -0
- package/dist/lib/project.js +17 -0
- package/dist/lib/promptSafety.d.ts +1 -0
- package/dist/lib/promptSafety.js +20 -0
- package/dist/lib/signals.d.ts +279 -0
- package/dist/lib/signals.js +55 -0
- package/dist/lib/tty.d.ts +2 -0
- package/dist/lib/tty.js +10 -0
- package/dist/lib/validate.d.ts +9 -0
- package/dist/lib/validate.js +282 -0
- package/dist/lib/yaml.d.ts +4 -0
- package/dist/lib/yaml.js +26 -0
- package/dist/mcp.d.ts +1 -0
- package/dist/mcp.js +259 -0
- package/dist/plain/colors.d.ts +5 -0
- package/dist/plain/colors.js +16 -0
- package/dist/plain/diff.d.ts +1 -0
- package/dist/plain/diff.js +129 -0
- package/dist/plain/handoff.d.ts +1 -0
- package/dist/plain/handoff.js +9 -0
- package/dist/plain/init.d.ts +1 -0
- package/dist/plain/init.js +44 -0
- package/dist/plain/notes.d.ts +5 -0
- package/dist/plain/notes.js +49 -0
- package/dist/plain/status.d.ts +2 -0
- package/dist/plain/status.js +13 -0
- package/dist/plain/watch.d.ts +1 -0
- package/dist/plain/watch.js +78 -0
- package/dist/ui/CleanupPlan.d.ts +5 -0
- package/dist/ui/CleanupPlan.js +8 -0
- package/dist/ui/DetectorSweep.d.ts +6 -0
- package/dist/ui/DetectorSweep.js +54 -0
- package/dist/ui/DriftAlert.d.ts +7 -0
- package/dist/ui/DriftAlert.js +105 -0
- package/dist/ui/Handoff.d.ts +1 -0
- package/dist/ui/Handoff.js +37 -0
- package/dist/ui/Init.d.ts +1 -0
- package/dist/ui/Init.js +117 -0
- package/dist/ui/Logo.d.ts +1 -0
- package/dist/ui/Logo.js +13 -0
- package/dist/ui/SpecSummary.d.ts +8 -0
- package/dist/ui/SpecSummary.js +15 -0
- package/dist/ui/Status.d.ts +1 -0
- package/dist/ui/Status.js +38 -0
- package/dist/ui/Watch.d.ts +1 -0
- package/dist/ui/Watch.js +136 -0
- package/dist/yoga.wasm +0 -0
- package/package.json +50 -0
package/LICENSE
ADDED
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
MIT License
|
|
2
|
+
|
|
3
|
+
Copyright (c) 2026
|
|
4
|
+
|
|
5
|
+
Permission is hereby granted, free of charge, to any person obtaining a copy
|
|
6
|
+
of this software and associated documentation files (the "Software"), to deal
|
|
7
|
+
in the Software without restriction, including without limitation the rights
|
|
8
|
+
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
|
9
|
+
copies of the Software, and to permit persons to whom the Software is
|
|
10
|
+
furnished to do so, subject to the following conditions:
|
|
11
|
+
|
|
12
|
+
The above copyright notice and this permission notice shall be included in all
|
|
13
|
+
copies or substantial portions of the Software.
|
|
14
|
+
|
|
15
|
+
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
|
16
|
+
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
|
17
|
+
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
|
18
|
+
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
|
19
|
+
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
|
20
|
+
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
|
21
|
+
SOFTWARE.
|
package/README.md
ADDED
|
@@ -0,0 +1,232 @@
|
|
|
1
|
+
# tack
|
|
2
|
+
|
|
3
|
+
Architecture drift guard. Declare your spec. Tack enforces it.
|
|
4
|
+
|
|
5
|
+
## Why Tack
|
|
6
|
+
|
|
7
|
+
`tack` is a context and change-tracking layer for agent-driven software work.
|
|
8
|
+
|
|
9
|
+
It gives agents and humans a shared project memory across sessions:
|
|
10
|
+
|
|
11
|
+
- Captures architecture intent in `spec.yaml` and supporting context docs.
|
|
12
|
+
- Detects architecture signals in code and tracks drift over time.
|
|
13
|
+
- Generates handoff artifacts (`.md` + canonical `.json`) for the next agent/session.
|
|
14
|
+
- Preserves machine history in append-only logs.
|
|
15
|
+
- Supports explicit decision and note write-back for continuity.
|
|
16
|
+
|
|
17
|
+
## Persistent Context in `.tack/`
|
|
18
|
+
|
|
19
|
+
All state lives in `./.tack/` so work survives restarts, agent changes, and handoffs:
|
|
20
|
+
|
|
21
|
+
- `context.md`, `goals.md`, `assumptions.md`, `open_questions.md` - human intent and constraints.
|
|
22
|
+
- `decisions.md` - durable decision history with reasoning.
|
|
23
|
+
- `_notes.ndjson` - timestamped agent notes between sessions.
|
|
24
|
+
- `spec.yaml` - declared architecture contract (allowed/forbidden systems, constraints, optional `domains` map).
|
|
25
|
+
- `_audit.yaml` - latest detector snapshot.
|
|
26
|
+
- `_drift.yaml` - unresolved/accepted/rejected drift items.
|
|
27
|
+
- `_logs.ndjson` - append-only machine event stream.
|
|
28
|
+
- `handoffs/*.md` and `handoffs/*.json` - transfer artifacts for the next session.
|
|
29
|
+
- `verification.md` - validation steps carried into handoffs.
|
|
30
|
+
|
|
31
|
+
Agents and tools consume this state via:
|
|
32
|
+
|
|
33
|
+
- The `tack-mcp` server (Model Context Protocol), which exposes context resources and write-back tools.
|
|
34
|
+
- Direct file access to `.tack/`, where human-authored docs and machine-managed state live together.
|
|
35
|
+
|
|
36
|
+
## Change Tracking Workflow
|
|
37
|
+
|
|
38
|
+
- `tack status` updates `_audit.yaml` and computes drift against your spec.
|
|
39
|
+
- `tack watch` continuously rescans and appends events to `_logs.ndjson`.
|
|
40
|
+
- `tack handoff` packages context + machine state + git deltas for the next session.
|
|
41
|
+
- `tack log` and `tack note` store decisions and notes that future agents can reuse.
|
|
42
|
+
|
|
43
|
+
## Install
|
|
44
|
+
|
|
45
|
+
```bash
|
|
46
|
+
npm install
|
|
47
|
+
npm run build
|
|
48
|
+
```
|
|
49
|
+
|
|
50
|
+
Optional global/local CLI use:
|
|
51
|
+
|
|
52
|
+
```bash
|
|
53
|
+
npm link
|
|
54
|
+
# now `tack` is available globally on this machine
|
|
55
|
+
```
|
|
56
|
+
|
|
57
|
+
Or package for use in another project:
|
|
58
|
+
|
|
59
|
+
```bash
|
|
60
|
+
npm pack
|
|
61
|
+
# then install the tarball in another project if desired
|
|
62
|
+
```
|
|
63
|
+
|
|
64
|
+
## Usage
|
|
65
|
+
|
|
66
|
+
From any project directory:
|
|
67
|
+
|
|
68
|
+
```bash
|
|
69
|
+
node /absolute/path/to/tack/dist/index.js init
|
|
70
|
+
node /absolute/path/to/tack/dist/index.js status
|
|
71
|
+
node /absolute/path/to/tack/dist/index.js watch
|
|
72
|
+
node /absolute/path/to/tack/dist/index.js handoff
|
|
73
|
+
node /absolute/path/to/tack/dist/index.js mcp
|
|
74
|
+
```
|
|
75
|
+
|
|
76
|
+
Within the `tack` repo itself:
|
|
77
|
+
|
|
78
|
+
```bash
|
|
79
|
+
node dist/index.js help
|
|
80
|
+
```
|
|
81
|
+
|
|
82
|
+
## `tack watch` Preview
|
|
83
|
+
|
|
84
|
+

|
|
85
|
+
|
|
86
|
+
## Typical Multi-Session Loop
|
|
87
|
+
|
|
88
|
+
```bash
|
|
89
|
+
# Session start
|
|
90
|
+
tack status
|
|
91
|
+
|
|
92
|
+
# During work
|
|
93
|
+
tack watch
|
|
94
|
+
|
|
95
|
+
# Record key intent changes
|
|
96
|
+
tack log
|
|
97
|
+
tack note
|
|
98
|
+
|
|
99
|
+
# Session end
|
|
100
|
+
tack handoff
|
|
101
|
+
```
|
|
102
|
+
|
|
103
|
+
## Using Tack with Agents
|
|
104
|
+
|
|
105
|
+
Tack treats LLM agents as **clients of a deterministic engine**. Agents should read context from `.tack/` and write back through the documented channels instead of mutating machine-managed files directly.
|
|
106
|
+
|
|
107
|
+
### MCP (Model Context Protocol)
|
|
108
|
+
|
|
109
|
+
**Run the MCP server:** From a project that has `.tack/`, run:
|
|
110
|
+
|
|
111
|
+
```bash
|
|
112
|
+
tack mcp
|
|
113
|
+
```
|
|
114
|
+
|
|
115
|
+
If `tack` is on your PATH (e.g. `npm link` from the tack clone), that’s all you need. Or: `node /path/to/tack/dist/index.js mcp`. The server reads `.tack/` from the current working directory, so run it from your **project root**.
|
|
116
|
+
|
|
117
|
+
**Cursor MCP:** Add an MCP server in Cursor (Settings → Tools & MCP) with command `tack`, args `["mcp"]`, and **cwd** = your project root (the directory that contains `.tack/`). If `tack` isn’t on PATH, use command `node`, args `["/path/to/tack/dist/index.js", "mcp"]`, cwd = project root. Restart Cursor after changing MCP config.
|
|
118
|
+
|
|
119
|
+
The server (`tack-mcp`) exposes these key resources:
|
|
120
|
+
|
|
121
|
+
- `tack://context/intent` – `context.md`, `goals.md`, `open_questions.md`, `decisions.md`
|
|
122
|
+
- `tack://context/facts` – `implementation_status.md` and `spec.yaml`
|
|
123
|
+
- `tack://context/machine_state` – `_audit.yaml` and `_drift.yaml`
|
|
124
|
+
- `tack://context/decisions_recent` – recent decisions as markdown
|
|
125
|
+
- `tack://handoff/latest` – latest handoff JSON (`.tack/handoffs/*.json`)
|
|
126
|
+
|
|
127
|
+
And these tools for write-back:
|
|
128
|
+
|
|
129
|
+
- `log_decision` – append a decision to `.tack/decisions.md` and log a `decision` event
|
|
130
|
+
- `log_agent_note` – append an agent note to `.tack/_notes.ndjson`
|
|
131
|
+
|
|
132
|
+
### Direct File Access
|
|
133
|
+
|
|
134
|
+
Agents without MCP should:
|
|
135
|
+
|
|
136
|
+
- **Read**:
|
|
137
|
+
- `.tack/spec.yaml` — architecture guardrails
|
|
138
|
+
- `.tack/context.md`, `.tack/goals.md`, `.tack/assumptions.md`, `.tack/open_questions.md`
|
|
139
|
+
- `.tack/implementation_status.md`
|
|
140
|
+
- `.tack/_audit.yaml`, `.tack/_drift.yaml`
|
|
141
|
+
- `.tack/verification.md` — validation/verification steps to run after changes
|
|
142
|
+
- `.tack/handoffs/*.json`, `.tack/handoffs/*.md`
|
|
143
|
+
- `.tack/_notes.ndjson` — agent working notes (NDJSON)
|
|
144
|
+
- **Write back**:
|
|
145
|
+
- Append decisions to `.tack/decisions.md`: `- [YYYY-MM-DD] Decision — reason`
|
|
146
|
+
- Use the CLI to log notes: `tack note --message "..." --type discovered --actor agent:cursor`
|
|
147
|
+
- Or append NDJSON lines manually to `.tack/_notes.ndjson` if the CLI is not available
|
|
148
|
+
|
|
149
|
+
Do **not** modify `.tack/_drift.yaml`, `.tack/_audit.yaml`, or `.tack/_logs.ndjson` directly; they are machine-managed.
|
|
150
|
+
|
|
151
|
+
## Detectors and YAML rules
|
|
152
|
+
|
|
153
|
+
Detection is **YAML-driven**. Bundled rules live in `src/detectors/rules/*.yaml` and are shipped with the CLI. At runtime we also load any `*.yaml` from `.tack/detectors/` (optional project extension).
|
|
154
|
+
|
|
155
|
+
Each rule file uses this schema:
|
|
156
|
+
|
|
157
|
+
- **Top-level:** `name`, `displayName`, `signalId`, `category` (`system` | `scope` | `risk`).
|
|
158
|
+
- **`systems`:** list of entries, each with:
|
|
159
|
+
- `id` — system identifier (e.g. `nextjs`, `prisma`, `stripe`)
|
|
160
|
+
- `packages` — npm package names that imply this system
|
|
161
|
+
- `configFiles` — paths to look for (e.g. `next.config.js`)
|
|
162
|
+
- `directories` — optional dirs (e.g. `src/jobs`)
|
|
163
|
+
- `routePatterns` — optional regex strings to grep in project files
|
|
164
|
+
|
|
165
|
+
If any of packages/configFiles/directories/routePatterns match for a system, one signal is emitted (confidence 1). Invalid YAML or bad regex is skipped without failing the scan. The only detectors still implemented in TypeScript are `multiuser`, `admin`, and `duplicates`; all other primary systems (framework, auth, db, payments, background_jobs, exports) are defined in YAML.
|
|
166
|
+
|
|
167
|
+
## Commands
|
|
168
|
+
|
|
169
|
+
### `init`
|
|
170
|
+
|
|
171
|
+
- Runs a detector sweep
|
|
172
|
+
- Prompts you to classify detected systems as allowed/forbidden/skip
|
|
173
|
+
- Writes initial files under `./.tack/`
|
|
174
|
+
|
|
175
|
+
### `status`
|
|
176
|
+
|
|
177
|
+
- Runs a one-shot scan
|
|
178
|
+
- Updates `./.tack/_audit.yaml`
|
|
179
|
+
- Computes drift and prints summary
|
|
180
|
+
|
|
181
|
+
### `watch`
|
|
182
|
+
|
|
183
|
+
- Starts persistent file watching
|
|
184
|
+
- Re-scans on file changes
|
|
185
|
+
- Creates drift items for new violations/risks/undeclared systems
|
|
186
|
+
- Sends OS notifications for violations and risks
|
|
187
|
+
- Press `q` to quit
|
|
188
|
+
|
|
189
|
+
### `handoff`
|
|
190
|
+
|
|
191
|
+
- Reads context docs + current machine state
|
|
192
|
+
- Reads file-level git changes
|
|
193
|
+
- Writes `./.tack/handoffs/<timestamp>.md`
|
|
194
|
+
- Writes `./.tack/handoffs/<timestamp>.json` (canonical)
|
|
195
|
+
- Includes a **Validation / Verification** section driven by `.tack/verification.md`:
|
|
196
|
+
- Each bullet/numbered item becomes a `verification.steps` entry in JSON and a markdown bullet
|
|
197
|
+
- Intended for humans or external tools to know which commands/checks to run after applying the handoff
|
|
198
|
+
- Tack does **not** execute these commands automatically
|
|
199
|
+
|
|
200
|
+
## Keyboard Controls
|
|
201
|
+
|
|
202
|
+
In selection prompts (`init`, drift options):
|
|
203
|
+
|
|
204
|
+
- `↑` / `↓` to move
|
|
205
|
+
- `Enter` to confirm
|
|
206
|
+
|
|
207
|
+
## Development
|
|
208
|
+
|
|
209
|
+
```bash
|
|
210
|
+
npm run typecheck
|
|
211
|
+
bun test
|
|
212
|
+
npm run dev
|
|
213
|
+
```
|
|
214
|
+
|
|
215
|
+
Optional Bun fast path for build contributors:
|
|
216
|
+
|
|
217
|
+
```bash
|
|
218
|
+
npm run build:bun
|
|
219
|
+
```
|
|
220
|
+
|
|
221
|
+
Optional Bun source-run for contributors who have Bun:
|
|
222
|
+
|
|
223
|
+
```bash
|
|
224
|
+
npm run dev:bun
|
|
225
|
+
```
|
|
226
|
+
|
|
227
|
+
## Notes
|
|
228
|
+
|
|
229
|
+
- Offline-only (no network calls)
|
|
230
|
+
- Writes are guarded to `./.tack/` only
|
|
231
|
+
- Python virtual environments are ignored during scans (`venv`, `.venv`, `site-packages`) to avoid false positives
|
|
232
|
+
|
package/dist/App.d.ts
ADDED
package/dist/App.js
ADDED
|
@@ -0,0 +1,17 @@
|
|
|
1
|
+
import { jsx as _jsx } from "react/jsx-runtime";
|
|
2
|
+
import { Init } from "./ui/Init.js";
|
|
3
|
+
import { Status } from "./ui/Status.js";
|
|
4
|
+
import { Watch } from "./ui/Watch.js";
|
|
5
|
+
import { Handoff } from "./ui/Handoff.js";
|
|
6
|
+
export function App({ command }) {
|
|
7
|
+
switch (command) {
|
|
8
|
+
case "init":
|
|
9
|
+
return _jsx(Init, {});
|
|
10
|
+
case "status":
|
|
11
|
+
return _jsx(Status, {});
|
|
12
|
+
case "watch":
|
|
13
|
+
return _jsx(Watch, {});
|
|
14
|
+
case "handoff":
|
|
15
|
+
return _jsx(Handoff, {});
|
|
16
|
+
}
|
|
17
|
+
}
|
|
@@ -0,0 +1,33 @@
|
|
|
1
|
+
import { createSignal } from "../lib/signals.js";
|
|
2
|
+
import { grepFiles, listProjectFiles } from "../lib/files.js";
|
|
3
|
+
const ADMIN_ROUTE_PATTERNS = [
|
|
4
|
+
/\/admin\//,
|
|
5
|
+
/\/dashboard\/admin/,
|
|
6
|
+
/app\/admin\//,
|
|
7
|
+
/pages\/admin\//,
|
|
8
|
+
/src\/routes\/admin/,
|
|
9
|
+
];
|
|
10
|
+
const ADMIN_MIDDLEWARE_PATTERNS = [
|
|
11
|
+
/isAdmin|requireAdmin|adminOnly|checkAdmin|AdminGuard/,
|
|
12
|
+
/role\s*===?\s*["']admin["']/,
|
|
13
|
+
/roles?\s*\.includes\s*\(\s*["']admin["']\)/,
|
|
14
|
+
];
|
|
15
|
+
export function detectAdmin() {
|
|
16
|
+
try {
|
|
17
|
+
const signals = [];
|
|
18
|
+
const projectFiles = listProjectFiles();
|
|
19
|
+
const adminRouteFiles = projectFiles.filter((f) => ADMIN_ROUTE_PATTERNS.some((p) => p.test(f)));
|
|
20
|
+
if (adminRouteFiles.length > 0) {
|
|
21
|
+
signals.push(createSignal("scope", "admin_panel", adminRouteFiles.slice(0, 5).join(", "), 0.8, `${adminRouteFiles.length} admin route file(s)`));
|
|
22
|
+
}
|
|
23
|
+
const middlewareMatches = ADMIN_MIDDLEWARE_PATTERNS.flatMap((p) => grepFiles(projectFiles, p, 5));
|
|
24
|
+
if (middlewareMatches.length > 0 && adminRouteFiles.length === 0) {
|
|
25
|
+
const files = [...new Set(middlewareMatches.map((m) => m.file))];
|
|
26
|
+
signals.push(createSignal("scope", "admin_panel", files.join(", "), 0.6, "Admin guards/middleware found"));
|
|
27
|
+
}
|
|
28
|
+
return { name: "admin", signals };
|
|
29
|
+
}
|
|
30
|
+
catch {
|
|
31
|
+
return { name: "admin", signals: [] };
|
|
32
|
+
}
|
|
33
|
+
}
|
|
@@ -0,0 +1,86 @@
|
|
|
1
|
+
import { createSignal } from "../lib/signals.js";
|
|
2
|
+
import { readJson, fileExists, grepFiles, listProjectFiles } from "../lib/files.js";
|
|
3
|
+
const AUTH_SYSTEMS = [
|
|
4
|
+
{
|
|
5
|
+
id: "clerk",
|
|
6
|
+
packages: ["@clerk/nextjs", "@clerk/clerk-react", "@clerk/express"],
|
|
7
|
+
routePatterns: [/clerkMiddleware|ClerkProvider|useAuth|useUser/],
|
|
8
|
+
configFiles: [],
|
|
9
|
+
},
|
|
10
|
+
{
|
|
11
|
+
id: "nextauth",
|
|
12
|
+
packages: ["next-auth", "@auth/core"],
|
|
13
|
+
routePatterns: [/NextAuth|getServerSession|useSession/],
|
|
14
|
+
configFiles: ["auth.ts", "auth.config.ts", "src/auth.ts"],
|
|
15
|
+
},
|
|
16
|
+
{
|
|
17
|
+
id: "auth0",
|
|
18
|
+
packages: ["@auth0/nextjs-auth0", "@auth0/auth0-react", "auth0"],
|
|
19
|
+
routePatterns: [/Auth0Provider|useUser|withPageAuthRequired/],
|
|
20
|
+
configFiles: [],
|
|
21
|
+
},
|
|
22
|
+
{
|
|
23
|
+
id: "supabase-auth",
|
|
24
|
+
packages: ["@supabase/auth-helpers-nextjs", "@supabase/ssr"],
|
|
25
|
+
routePatterns: [/createClientComponentClient|createServerComponentClient/],
|
|
26
|
+
configFiles: [],
|
|
27
|
+
},
|
|
28
|
+
{
|
|
29
|
+
id: "lucia",
|
|
30
|
+
packages: ["lucia", "@lucia-auth/adapter-prisma", "@lucia-auth/adapter-drizzle"],
|
|
31
|
+
routePatterns: [/Lucia|validateSessionCookie/],
|
|
32
|
+
configFiles: [],
|
|
33
|
+
},
|
|
34
|
+
{
|
|
35
|
+
id: "passport",
|
|
36
|
+
packages: ["passport", "passport-local", "passport-google-oauth20"],
|
|
37
|
+
routePatterns: [/passport\.authenticate|passport\.use/],
|
|
38
|
+
configFiles: [],
|
|
39
|
+
},
|
|
40
|
+
];
|
|
41
|
+
export function detectAuth() {
|
|
42
|
+
try {
|
|
43
|
+
const signals = [];
|
|
44
|
+
const pkg = readJson("package.json");
|
|
45
|
+
const allDeps = { ...pkg?.dependencies, ...pkg?.devDependencies };
|
|
46
|
+
const projectFiles = listProjectFiles();
|
|
47
|
+
for (const auth of AUTH_SYSTEMS) {
|
|
48
|
+
const foundPkgs = auth.packages.filter((p) => p in allDeps);
|
|
49
|
+
const foundConfig = auth.configFiles.find((f) => fileExists(f));
|
|
50
|
+
let routeMatch;
|
|
51
|
+
if (auth.routePatterns.length > 0) {
|
|
52
|
+
for (const pattern of auth.routePatterns) {
|
|
53
|
+
const matches = grepFiles(projectFiles, pattern, 1);
|
|
54
|
+
if (matches.length > 0) {
|
|
55
|
+
routeMatch = matches[0].file;
|
|
56
|
+
break;
|
|
57
|
+
}
|
|
58
|
+
}
|
|
59
|
+
}
|
|
60
|
+
const sources = [];
|
|
61
|
+
let confidence = 0;
|
|
62
|
+
if (foundPkgs.length > 0) {
|
|
63
|
+
sources.push(`package.json (${foundPkgs.join(", ")})`);
|
|
64
|
+
confidence = 0.8;
|
|
65
|
+
}
|
|
66
|
+
if (foundConfig) {
|
|
67
|
+
sources.push(foundConfig);
|
|
68
|
+
confidence = Math.max(confidence, 0.9);
|
|
69
|
+
}
|
|
70
|
+
if (routeMatch) {
|
|
71
|
+
sources.push(routeMatch);
|
|
72
|
+
confidence = Math.min(confidence + 0.1, 1);
|
|
73
|
+
}
|
|
74
|
+
if (foundPkgs.length > 0 && (foundConfig || routeMatch)) {
|
|
75
|
+
confidence = 1;
|
|
76
|
+
}
|
|
77
|
+
if (sources.length > 0) {
|
|
78
|
+
signals.push(createSignal("system", "auth", sources.join(" + "), confidence, auth.id));
|
|
79
|
+
}
|
|
80
|
+
}
|
|
81
|
+
return { name: "auth", signals };
|
|
82
|
+
}
|
|
83
|
+
catch {
|
|
84
|
+
return { name: "auth", signals: [] };
|
|
85
|
+
}
|
|
86
|
+
}
|
|
@@ -0,0 +1,96 @@
|
|
|
1
|
+
import { createSignal } from "../lib/signals.js";
|
|
2
|
+
import { readJson, readFile, fileExists } from "../lib/files.js";
|
|
3
|
+
const ORM_SYSTEMS = [
|
|
4
|
+
{
|
|
5
|
+
id: "prisma",
|
|
6
|
+
packages: ["prisma", "@prisma/client"],
|
|
7
|
+
configFiles: ["prisma/schema.prisma"],
|
|
8
|
+
dbTypeExtractor: (content) => {
|
|
9
|
+
const match = content.match(/provider\s*=\s*"(postgresql|mysql|sqlite|mongodb|sqlserver|cockroachdb)"/);
|
|
10
|
+
return match?.[1] ?? null;
|
|
11
|
+
},
|
|
12
|
+
},
|
|
13
|
+
{
|
|
14
|
+
id: "drizzle",
|
|
15
|
+
packages: ["drizzle-orm", "drizzle-kit"],
|
|
16
|
+
configFiles: ["drizzle.config.ts", "drizzle.config.js"],
|
|
17
|
+
dbTypeExtractor: (content) => {
|
|
18
|
+
if (content.includes("pg") || content.includes("postgres"))
|
|
19
|
+
return "postgres";
|
|
20
|
+
if (content.includes("mysql"))
|
|
21
|
+
return "mysql";
|
|
22
|
+
if (content.includes("sqlite") || content.includes("better-sqlite"))
|
|
23
|
+
return "sqlite";
|
|
24
|
+
return null;
|
|
25
|
+
},
|
|
26
|
+
},
|
|
27
|
+
{
|
|
28
|
+
id: "typeorm",
|
|
29
|
+
packages: ["typeorm"],
|
|
30
|
+
configFiles: ["ormconfig.json", "ormconfig.ts", "ormconfig.js"],
|
|
31
|
+
dbTypeExtractor: null,
|
|
32
|
+
},
|
|
33
|
+
{
|
|
34
|
+
id: "mongoose",
|
|
35
|
+
packages: ["mongoose"],
|
|
36
|
+
configFiles: [],
|
|
37
|
+
dbTypeExtractor: () => "mongodb",
|
|
38
|
+
},
|
|
39
|
+
{
|
|
40
|
+
id: "knex",
|
|
41
|
+
packages: ["knex"],
|
|
42
|
+
configFiles: ["knexfile.js", "knexfile.ts"],
|
|
43
|
+
dbTypeExtractor: null,
|
|
44
|
+
},
|
|
45
|
+
];
|
|
46
|
+
const DB_DRIVERS = [
|
|
47
|
+
{ pkg: "pg", dbType: "postgres" },
|
|
48
|
+
{ pkg: "mysql2", dbType: "mysql" },
|
|
49
|
+
{ pkg: "better-sqlite3", dbType: "sqlite" },
|
|
50
|
+
{ pkg: "mongodb", dbType: "mongodb" },
|
|
51
|
+
{ pkg: "@libsql/client", dbType: "sqlite" },
|
|
52
|
+
{ pkg: "@neondatabase/serverless", dbType: "postgres" },
|
|
53
|
+
{ pkg: "@planetscale/database", dbType: "mysql" },
|
|
54
|
+
];
|
|
55
|
+
export function detectDatabase() {
|
|
56
|
+
try {
|
|
57
|
+
const signals = [];
|
|
58
|
+
const pkg = readJson("package.json");
|
|
59
|
+
const allDeps = { ...pkg?.dependencies, ...pkg?.devDependencies };
|
|
60
|
+
for (const orm of ORM_SYSTEMS) {
|
|
61
|
+
const foundPkgs = orm.packages.filter((p) => p in allDeps);
|
|
62
|
+
if (foundPkgs.length === 0)
|
|
63
|
+
continue;
|
|
64
|
+
let dbType = null;
|
|
65
|
+
let configSource = null;
|
|
66
|
+
for (const cf of orm.configFiles) {
|
|
67
|
+
if (fileExists(cf)) {
|
|
68
|
+
configSource = cf;
|
|
69
|
+
if (orm.dbTypeExtractor) {
|
|
70
|
+
const content = readFile(cf);
|
|
71
|
+
if (content)
|
|
72
|
+
dbType = orm.dbTypeExtractor(content);
|
|
73
|
+
}
|
|
74
|
+
break;
|
|
75
|
+
}
|
|
76
|
+
}
|
|
77
|
+
if (!dbType && orm.id !== "mongoose") {
|
|
78
|
+
for (const driver of DB_DRIVERS) {
|
|
79
|
+
if (driver.pkg in allDeps) {
|
|
80
|
+
dbType = driver.dbType;
|
|
81
|
+
break;
|
|
82
|
+
}
|
|
83
|
+
}
|
|
84
|
+
}
|
|
85
|
+
const detail = dbType ? `${orm.id} + ${dbType}` : orm.id;
|
|
86
|
+
const sources = [`package.json (${foundPkgs.join(", ")})`];
|
|
87
|
+
if (configSource)
|
|
88
|
+
sources.push(configSource);
|
|
89
|
+
signals.push(createSignal("system", "db", sources.join(" + "), configSource ? 1 : 0.8, detail));
|
|
90
|
+
}
|
|
91
|
+
return { name: "database", signals };
|
|
92
|
+
}
|
|
93
|
+
catch {
|
|
94
|
+
return { name: "database", signals: [] };
|
|
95
|
+
}
|
|
96
|
+
}
|
|
@@ -0,0 +1,23 @@
|
|
|
1
|
+
import { createSignal } from "../lib/signals.js";
|
|
2
|
+
export function detectDuplicates(allSignals) {
|
|
3
|
+
const signals = [];
|
|
4
|
+
const systemSignals = allSignals.filter((s) => s.category === "system");
|
|
5
|
+
const grouped = new Map();
|
|
6
|
+
for (const sig of systemSignals) {
|
|
7
|
+
const existing = grouped.get(sig.id) ?? [];
|
|
8
|
+
existing.push(sig);
|
|
9
|
+
grouped.set(sig.id, existing);
|
|
10
|
+
}
|
|
11
|
+
for (const [id, sigs] of grouped) {
|
|
12
|
+
if (sigs.length <= 1)
|
|
13
|
+
continue;
|
|
14
|
+
const details = sigs
|
|
15
|
+
.map((s) => s.detail)
|
|
16
|
+
.filter((d) => Boolean(d))
|
|
17
|
+
.filter((v, i, arr) => arr.indexOf(v) === i);
|
|
18
|
+
if (details.length > 1) {
|
|
19
|
+
signals.push(createSignal("risk", `duplicate_${id}`, sigs.map((s) => s.source).join(" + "), 0.9, `Multiple ${id} systems: ${details.join(" + ")}`));
|
|
20
|
+
}
|
|
21
|
+
}
|
|
22
|
+
return { name: "duplicates", signals };
|
|
23
|
+
}
|
|
@@ -0,0 +1,30 @@
|
|
|
1
|
+
import { createSignal } from "../lib/signals.js";
|
|
2
|
+
import { readJson } from "../lib/files.js";
|
|
3
|
+
const EXPORT_PACKAGES = [
|
|
4
|
+
{ pkg: "jspdf", detail: "jspdf" },
|
|
5
|
+
{ pkg: "pdfkit", detail: "pdfkit" },
|
|
6
|
+
{ pkg: "@react-pdf/renderer", detail: "react-pdf" },
|
|
7
|
+
{ pkg: "puppeteer", detail: "puppeteer" },
|
|
8
|
+
{ pkg: "playwright", detail: "playwright" },
|
|
9
|
+
{ pkg: "html2canvas", detail: "html2canvas" },
|
|
10
|
+
{ pkg: "exceljs", detail: "exceljs" },
|
|
11
|
+
{ pkg: "xlsx", detail: "sheetjs" },
|
|
12
|
+
{ pkg: "csv-writer", detail: "csv-writer" },
|
|
13
|
+
{ pkg: "csv-stringify", detail: "csv-stringify" },
|
|
14
|
+
{ pkg: "json2csv", detail: "json2csv" },
|
|
15
|
+
];
|
|
16
|
+
export function detectExports() {
|
|
17
|
+
try {
|
|
18
|
+
const signals = [];
|
|
19
|
+
const pkg = readJson("package.json");
|
|
20
|
+
const allDeps = { ...pkg?.dependencies, ...pkg?.devDependencies };
|
|
21
|
+
const found = EXPORT_PACKAGES.filter((e) => e.pkg in allDeps);
|
|
22
|
+
if (found.length > 0) {
|
|
23
|
+
signals.push(createSignal("system", "exports", `package.json (${found.map((f) => f.pkg).join(", ")})`, 0.9, found.map((f) => f.detail).join(", ")));
|
|
24
|
+
}
|
|
25
|
+
return { name: "exports", signals };
|
|
26
|
+
}
|
|
27
|
+
catch {
|
|
28
|
+
return { name: "exports", signals: [] };
|
|
29
|
+
}
|
|
30
|
+
}
|
|
@@ -0,0 +1,71 @@
|
|
|
1
|
+
import { createSignal } from "../lib/signals.js";
|
|
2
|
+
import { readJson, fileExists } from "../lib/files.js";
|
|
3
|
+
const FRAMEWORKS = [
|
|
4
|
+
{
|
|
5
|
+
id: "nextjs",
|
|
6
|
+
packages: ["next"],
|
|
7
|
+
configFiles: ["next.config.js", "next.config.mjs", "next.config.ts"],
|
|
8
|
+
},
|
|
9
|
+
{
|
|
10
|
+
id: "remix",
|
|
11
|
+
packages: ["@remix-run/node", "@remix-run/react"],
|
|
12
|
+
configFiles: ["remix.config.js"],
|
|
13
|
+
},
|
|
14
|
+
{
|
|
15
|
+
id: "sveltekit",
|
|
16
|
+
packages: ["@sveltejs/kit"],
|
|
17
|
+
configFiles: ["svelte.config.js"],
|
|
18
|
+
},
|
|
19
|
+
{
|
|
20
|
+
id: "vite",
|
|
21
|
+
packages: ["vite"],
|
|
22
|
+
configFiles: ["vite.config.ts", "vite.config.js"],
|
|
23
|
+
},
|
|
24
|
+
{
|
|
25
|
+
id: "express",
|
|
26
|
+
packages: ["express"],
|
|
27
|
+
configFiles: [],
|
|
28
|
+
},
|
|
29
|
+
{
|
|
30
|
+
id: "fastify",
|
|
31
|
+
packages: ["fastify"],
|
|
32
|
+
configFiles: [],
|
|
33
|
+
},
|
|
34
|
+
{
|
|
35
|
+
id: "hono",
|
|
36
|
+
packages: ["hono"],
|
|
37
|
+
configFiles: [],
|
|
38
|
+
},
|
|
39
|
+
{
|
|
40
|
+
id: "astro",
|
|
41
|
+
packages: ["astro"],
|
|
42
|
+
configFiles: ["astro.config.mjs", "astro.config.ts"],
|
|
43
|
+
},
|
|
44
|
+
];
|
|
45
|
+
export function detectFramework() {
|
|
46
|
+
try {
|
|
47
|
+
const signals = [];
|
|
48
|
+
const pkg = readJson("package.json");
|
|
49
|
+
const allDeps = {
|
|
50
|
+
...pkg?.dependencies,
|
|
51
|
+
...pkg?.devDependencies,
|
|
52
|
+
};
|
|
53
|
+
for (const fw of FRAMEWORKS) {
|
|
54
|
+
const foundPkg = fw.packages.find((p) => p in allDeps);
|
|
55
|
+
const foundConfig = fw.configFiles.find((f) => fileExists(f));
|
|
56
|
+
if (foundPkg && foundConfig) {
|
|
57
|
+
signals.push(createSignal("system", "framework", `package.json (${foundPkg}) + ${foundConfig}`, 1, fw.id));
|
|
58
|
+
}
|
|
59
|
+
else if (foundPkg) {
|
|
60
|
+
signals.push(createSignal("system", "framework", `package.json (${foundPkg})`, 0.9, fw.id));
|
|
61
|
+
}
|
|
62
|
+
else if (foundConfig) {
|
|
63
|
+
signals.push(createSignal("system", "framework", foundConfig, 0.8, fw.id));
|
|
64
|
+
}
|
|
65
|
+
}
|
|
66
|
+
return { name: "framework", signals };
|
|
67
|
+
}
|
|
68
|
+
catch {
|
|
69
|
+
return { name: "framework", signals: [] };
|
|
70
|
+
}
|
|
71
|
+
}
|
|
@@ -0,0 +1,12 @@
|
|
|
1
|
+
import type { Signal, DetectorResult } from "../lib/signals.js";
|
|
2
|
+
export type DetectorEntry = {
|
|
3
|
+
name: string;
|
|
4
|
+
displayName: string;
|
|
5
|
+
run: () => DetectorResult;
|
|
6
|
+
};
|
|
7
|
+
export declare const PRIMARY_DETECTORS: DetectorEntry[];
|
|
8
|
+
export declare function runAllDetectors(): {
|
|
9
|
+
results: DetectorResult[];
|
|
10
|
+
signals: Signal[];
|
|
11
|
+
};
|
|
12
|
+
export declare function detectorsForFileChange(filepath: string): DetectorEntry[];
|