flaw-kit 2.0.0__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- flaw_kit-2.0.0/PKG-INFO +59 -0
- flaw_kit-2.0.0/README.md +39 -0
- flaw_kit-2.0.0/flaw_audit/__init__.py +3 -0
- flaw_kit-2.0.0/flaw_audit/cli.py +71 -0
- flaw_kit-2.0.0/flaw_audit/dist/analyzers/backend-integrity.js +133 -0
- flaw_kit-2.0.0/flaw_audit/dist/analyzers/base.js +25 -0
- flaw_kit-2.0.0/flaw_audit/dist/analyzers/data-model.js +111 -0
- flaw_kit-2.0.0/flaw_audit/dist/analyzers/deployment.js +111 -0
- flaw_kit-2.0.0/flaw_audit/dist/analyzers/error-handling.js +317 -0
- flaw_kit-2.0.0/flaw_audit/dist/analyzers/feature-reality.js +115 -0
- flaw_kit-2.0.0/flaw_audit/dist/analyzers/frontend-wiring.js +109 -0
- flaw_kit-2.0.0/flaw_audit/dist/analyzers/maintainability.js +92 -0
- flaw_kit-2.0.0/flaw_audit/dist/analyzers/security-auth.js +155 -0
- flaw_kit-2.0.0/flaw_audit/dist/analyzers/smells.js +86 -0
- flaw_kit-2.0.0/flaw_audit/dist/analyzers/testing.js +86 -0
- flaw_kit-2.0.0/flaw_audit/dist/analyzers/validation.js +138 -0
- flaw_kit-2.0.0/flaw_audit/dist/analyzers/wiring.js +781 -0
- flaw_kit-2.0.0/flaw_audit/dist/engine/dep-graph.js +84 -0
- flaw_kit-2.0.0/flaw_audit/dist/engine/explain.js +580 -0
- flaw_kit-2.0.0/flaw_audit/dist/engine/fix-reporter.js +97 -0
- flaw_kit-2.0.0/flaw_audit/dist/engine/html-reporter.js +1176 -0
- flaw_kit-2.0.0/flaw_audit/dist/engine/ignore.js +74 -0
- flaw_kit-2.0.0/flaw_audit/dist/engine/promise-reality.js +231 -0
- flaw_kit-2.0.0/flaw_audit/dist/engine/prompt-reporter.js +69 -0
- flaw_kit-2.0.0/flaw_audit/dist/engine/purpose-plan.js +530 -0
- flaw_kit-2.0.0/flaw_audit/dist/engine/repl.js +602 -0
- flaw_kit-2.0.0/flaw_audit/dist/engine/reporter.js +265 -0
- flaw_kit-2.0.0/flaw_audit/dist/engine/roadmap.js +158 -0
- flaw_kit-2.0.0/flaw_audit/dist/engine/rules-generator.js +157 -0
- flaw_kit-2.0.0/flaw_audit/dist/engine/scanner.js +53 -0
- flaw_kit-2.0.0/flaw_audit/dist/engine/scorer.js +160 -0
- flaw_kit-2.0.0/flaw_audit/dist/engine/symptoms.js +136 -0
- flaw_kit-2.0.0/flaw_audit/dist/engine/triage.js +104 -0
- flaw_kit-2.0.0/flaw_audit/dist/engine/watcher.js +118 -0
- flaw_kit-2.0.0/flaw_audit/dist/index.js +269 -0
- flaw_kit-2.0.0/flaw_audit/dist/types/index.js +14 -0
- flaw_kit-2.0.0/flaw_audit/dist/utils/colors.js +39 -0
- flaw_kit-2.0.0/flaw_audit/dist/utils/fs.js +122 -0
- flaw_kit-2.0.0/flaw_audit/dist/utils/git.js +30 -0
- flaw_kit-2.0.0/flaw_audit/dist/utils/patterns.js +66 -0
- flaw_kit-2.0.0/flaw_kit.egg-info/PKG-INFO +59 -0
- flaw_kit-2.0.0/flaw_kit.egg-info/SOURCES.txt +45 -0
- flaw_kit-2.0.0/flaw_kit.egg-info/dependency_links.txt +1 -0
- flaw_kit-2.0.0/flaw_kit.egg-info/entry_points.txt +2 -0
- flaw_kit-2.0.0/flaw_kit.egg-info/top_level.txt +1 -0
- flaw_kit-2.0.0/pyproject.toml +39 -0
- flaw_kit-2.0.0/setup.cfg +4 -0
flaw_kit-2.0.0/PKG-INFO
ADDED
|
@@ -0,0 +1,59 @@
|
|
|
1
|
+
Metadata-Version: 2.4
|
|
2
|
+
Name: flaw-kit
|
|
3
|
+
Version: 2.0.0
|
|
4
|
+
Summary: FLAW — Flow Logic Audit Watch. Code auditor for AI-generated projects.
|
|
5
|
+
Author: resetroot99, ajakvani
|
|
6
|
+
License: MIT
|
|
7
|
+
Project-URL: Homepage, https://github.com/resetroot99/FLAW
|
|
8
|
+
Project-URL: Repository, https://github.com/resetroot99/FLAW
|
|
9
|
+
Project-URL: Issues, https://github.com/resetroot99/FLAW/issues
|
|
10
|
+
Keywords: audit,code-quality,ai-code-review,static-analysis
|
|
11
|
+
Classifier: Development Status :: 4 - Beta
|
|
12
|
+
Classifier: Environment :: Console
|
|
13
|
+
Classifier: Intended Audience :: Developers
|
|
14
|
+
Classifier: License :: OSI Approved :: MIT License
|
|
15
|
+
Classifier: Programming Language :: Python :: 3
|
|
16
|
+
Classifier: Topic :: Software Development :: Quality Assurance
|
|
17
|
+
Classifier: Topic :: Software Development :: Testing
|
|
18
|
+
Requires-Python: >=3.8
|
|
19
|
+
Description-Content-Type: text/markdown
|
|
20
|
+
|
|
21
|
+
# flaw-kit
|
|
22
|
+
|
|
23
|
+
**FLAW — Flow Logic Audit Watch**
|
|
24
|
+
|
|
25
|
+
Code integrity auditor for AI-generated projects. Scans your codebase and tells you what's broken, what's fake, and what's missing.
|
|
26
|
+
|
|
27
|
+
This is the Python wrapper for the FLAW engine. Requires Node.js >= 18.
|
|
28
|
+
|
|
29
|
+
## Install
|
|
30
|
+
|
|
31
|
+
```bash
|
|
32
|
+
pip install flaw-kit
|
|
33
|
+
```
|
|
34
|
+
|
|
35
|
+
## Usage
|
|
36
|
+
|
|
37
|
+
```bash
|
|
38
|
+
# Interactive mode
|
|
39
|
+
flaw
|
|
40
|
+
|
|
41
|
+
# One-shot scan
|
|
42
|
+
flaw .
|
|
43
|
+
|
|
44
|
+
# Export HTML report
|
|
45
|
+
flaw ../my-app --html
|
|
46
|
+
```
|
|
47
|
+
|
|
48
|
+
## Requirements
|
|
49
|
+
|
|
50
|
+
- Python >= 3.8
|
|
51
|
+
- Node.js >= 18
|
|
52
|
+
|
|
53
|
+
## Full Documentation
|
|
54
|
+
|
|
55
|
+
See the [GitHub repo](https://github.com/resetroot99/FLAW) for full documentation, features, and configuration.
|
|
56
|
+
|
|
57
|
+
## License
|
|
58
|
+
|
|
59
|
+
MIT
|
flaw_kit-2.0.0/README.md
ADDED
|
@@ -0,0 +1,39 @@
|
|
|
1
|
+
# flaw-kit
|
|
2
|
+
|
|
3
|
+
**FLAW — Flow Logic Audit Watch**
|
|
4
|
+
|
|
5
|
+
Code integrity auditor for AI-generated projects. Scans your codebase and tells you what's broken, what's fake, and what's missing.
|
|
6
|
+
|
|
7
|
+
This is the Python wrapper for the FLAW engine. Requires Node.js >= 18.
|
|
8
|
+
|
|
9
|
+
## Install
|
|
10
|
+
|
|
11
|
+
```bash
|
|
12
|
+
pip install flaw-kit
|
|
13
|
+
```
|
|
14
|
+
|
|
15
|
+
## Usage
|
|
16
|
+
|
|
17
|
+
```bash
|
|
18
|
+
# Interactive mode
|
|
19
|
+
flaw
|
|
20
|
+
|
|
21
|
+
# One-shot scan
|
|
22
|
+
flaw .
|
|
23
|
+
|
|
24
|
+
# Export HTML report
|
|
25
|
+
flaw ../my-app --html
|
|
26
|
+
```
|
|
27
|
+
|
|
28
|
+
## Requirements
|
|
29
|
+
|
|
30
|
+
- Python >= 3.8
|
|
31
|
+
- Node.js >= 18
|
|
32
|
+
|
|
33
|
+
## Full Documentation
|
|
34
|
+
|
|
35
|
+
See the [GitHub repo](https://github.com/resetroot99/FLAW) for full documentation, features, and configuration.
|
|
36
|
+
|
|
37
|
+
## License
|
|
38
|
+
|
|
39
|
+
MIT
|
|
@@ -0,0 +1,71 @@
|
|
|
1
|
+
"""
|
|
2
|
+
FLAW CLI — Python wrapper for the FLAW TypeScript engine.
|
|
3
|
+
Requires Node.js >= 18 to be installed.
|
|
4
|
+
"""
|
|
5
|
+
|
|
6
|
+
import subprocess
|
|
7
|
+
import sys
|
|
8
|
+
import os
|
|
9
|
+
import shutil
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
def find_node():
|
|
13
|
+
"""Find the Node.js binary."""
|
|
14
|
+
node = shutil.which("node")
|
|
15
|
+
if node:
|
|
16
|
+
return node
|
|
17
|
+
# Common install locations
|
|
18
|
+
for path in ["/usr/local/bin/node", "/opt/homebrew/bin/node", os.path.expanduser("~/.nvm/current/bin/node")]:
|
|
19
|
+
if os.path.isfile(path):
|
|
20
|
+
return path
|
|
21
|
+
return None
|
|
22
|
+
|
|
23
|
+
|
|
24
|
+
def find_entry():
|
|
25
|
+
"""Find the FLAW JS entry point."""
|
|
26
|
+
# Bundled dist/ alongside this package
|
|
27
|
+
pkg_dir = os.path.dirname(os.path.abspath(__file__))
|
|
28
|
+
bundled = os.path.join(pkg_dir, "dist", "index.js")
|
|
29
|
+
if os.path.isfile(bundled):
|
|
30
|
+
return bundled
|
|
31
|
+
|
|
32
|
+
# Installed globally via npm
|
|
33
|
+
npx = shutil.which("npx")
|
|
34
|
+
if npx:
|
|
35
|
+
return None # Will use npx fallback
|
|
36
|
+
|
|
37
|
+
return None
|
|
38
|
+
|
|
39
|
+
|
|
40
|
+
def main():
|
|
41
|
+
"""Main entry point for the FLAW CLI."""
|
|
42
|
+
node = find_node()
|
|
43
|
+
if not node:
|
|
44
|
+
print("\033[31mError: Node.js >= 18 is required.\033[0m")
|
|
45
|
+
print("Install it from https://nodejs.org or via your package manager.")
|
|
46
|
+
print("")
|
|
47
|
+
print(" brew install node # macOS")
|
|
48
|
+
print(" curl -fsSL https://fnm.vercel.app/install | bash # fnm")
|
|
49
|
+
print(" apt install nodejs # Ubuntu/Debian")
|
|
50
|
+
sys.exit(1)
|
|
51
|
+
|
|
52
|
+
entry = find_entry()
|
|
53
|
+
|
|
54
|
+
if entry:
|
|
55
|
+
# Run bundled JS directly
|
|
56
|
+
result = subprocess.run([node, entry] + sys.argv[1:])
|
|
57
|
+
sys.exit(result.returncode)
|
|
58
|
+
else:
|
|
59
|
+
# Try npx fallback
|
|
60
|
+
npx = shutil.which("npx")
|
|
61
|
+
if npx:
|
|
62
|
+
result = subprocess.run([npx, "flaw-kit"] + sys.argv[1:])
|
|
63
|
+
sys.exit(result.returncode)
|
|
64
|
+
|
|
65
|
+
print("\033[31mError: FLAW engine not found.\033[0m")
|
|
66
|
+
print("Install via npm: npm install -g flaw-kit")
|
|
67
|
+
sys.exit(1)
|
|
68
|
+
|
|
69
|
+
|
|
70
|
+
if __name__ == "__main__":
|
|
71
|
+
main()
|
|
@@ -0,0 +1,133 @@
|
|
|
1
|
+
import { makeFinding, makeSmell, emptyResult } from './base.js';
|
|
2
|
+
import { searchFiles, filesMatching, extractSnippet } from '../utils/patterns.js';
|
|
3
|
+
import { isTestFile } from '../utils/fs.js';
|
|
4
|
+
const serverFilter = (f) => !isTestFile(f) && (/\b(api|server|route|action|controller|handler|middleware|mutation)\b/i.test(f) ||
|
|
5
|
+
/\.(py|rb|go|rs|java|php)$/.test(f));
|
|
6
|
+
const srcFilter = (f) => !isTestFile(f) && /\.(ts|tsx|js|jsx|py|rb|go|java|php)$/.test(f);
|
|
7
|
+
export function analyzeBackendIntegrity(ctx) {
|
|
8
|
+
const result = emptyResult();
|
|
9
|
+
// FK-BE-PERSIST-001: Writes that don't actually persist
|
|
10
|
+
// Look for handlers that return success without DB writes
|
|
11
|
+
const handlerFiles = filesMatching(ctx.fileContents, /(export\s+(default\s+)?function|export\s+const)\s+\w*(POST|PUT|PATCH|DELETE|create|update|save|delete|remove)/i, serverFilter);
|
|
12
|
+
for (const file of handlerFiles) {
|
|
13
|
+
const content = ctx.fileContents.get(file);
|
|
14
|
+
const hasDbOp = /(prisma|db|knex|mongoose|sequelize|supabase|drizzle|sql|query|insert|update|\.save\(|\.create\(|\.update\(|\.delete\(|\.destroy\()/i.test(content);
|
|
15
|
+
const hasResponse = /(return|res\.(json|send|status)|NextResponse|Response)/i.test(content);
|
|
16
|
+
if (hasResponse && !hasDbOp) {
|
|
17
|
+
result.findings.push(makeFinding({
|
|
18
|
+
ruleId: 'FK-BE-PERSIST-001',
|
|
19
|
+
title: 'Write handler may not persist data',
|
|
20
|
+
categoryId: 'BE',
|
|
21
|
+
severity: 'high',
|
|
22
|
+
confidence: 'medium',
|
|
23
|
+
labels: ['Fragile', 'Incomplete'],
|
|
24
|
+
summary: `Handler in ${file} returns a response but has no visible database operation.`,
|
|
25
|
+
impact: 'Data may not actually be saved.',
|
|
26
|
+
location: { file },
|
|
27
|
+
codeSnippet: extractSnippet(ctx.fileContents, file, 1, 0, 8),
|
|
28
|
+
suggestedFix: 'Verify the handler connects to a real persistence layer.',
|
|
29
|
+
}));
|
|
30
|
+
result.smellHits.push(makeSmell('SMELL-DISCONNECTED-BACKEND', 'Disconnected backend', 1));
|
|
31
|
+
}
|
|
32
|
+
}
|
|
33
|
+
// FK-BE-CONTRACT-001: Inconsistent response patterns
|
|
34
|
+
const responsePatterns = searchFiles(ctx.fileContents, /res\.(json|send)\(\s*\{[^}]*\}\s*\)/, serverFilter);
|
|
35
|
+
// Check for inconsistent response shapes (success vs error)
|
|
36
|
+
const successShapes = new Set();
|
|
37
|
+
const errorShapes = new Set();
|
|
38
|
+
for (const hit of responsePatterns) {
|
|
39
|
+
if (/error|fail|400|401|403|404|500/i.test(hit.context)) {
|
|
40
|
+
const shape = hit.context.replace(/['"`][^'"`]*['"`]/g, 'STR').replace(/\d+/g, 'NUM');
|
|
41
|
+
errorShapes.add(shape);
|
|
42
|
+
}
|
|
43
|
+
else {
|
|
44
|
+
const shape = hit.context.replace(/['"`][^'"`]*['"`]/g, 'STR').replace(/\d+/g, 'NUM');
|
|
45
|
+
successShapes.add(shape);
|
|
46
|
+
}
|
|
47
|
+
}
|
|
48
|
+
// FK-BE-ENDPOINT-001: fetch/axios calls to undefined endpoints
|
|
49
|
+
// Collect frontend API calls — match various URL patterns
|
|
50
|
+
const clientFetches = searchFiles(ctx.fileContents, /fetch\(\s*['"``]([^'"`]+)['"``]|axios\.\w+\(\s*['"``]([^'"`]+)['"``]|api\.\w+\(\s*['"``]([^'"`]+)['"``]/, (f) => !isTestFile(f) && /\.(tsx?|jsx?)$/.test(f));
|
|
51
|
+
// Collect all backend route paths from FastAPI/Express/Next.js
|
|
52
|
+
const backendRoutes = new Set();
|
|
53
|
+
// Next.js file-based routes
|
|
54
|
+
for (const f of ctx.files) {
|
|
55
|
+
if (/\/api\//.test(f) && serverFilter(f)) {
|
|
56
|
+
const match = f.match(/\/api\/(.+?)(?:\/route|\/index)?\.\w+$/);
|
|
57
|
+
if (match)
|
|
58
|
+
backendRoutes.add(`/api/${match[1]}`);
|
|
59
|
+
}
|
|
60
|
+
}
|
|
61
|
+
// FastAPI/Express decorator-based routes
|
|
62
|
+
for (const [file, content] of ctx.fileContents) {
|
|
63
|
+
if (!/\.py$/.test(file) && !serverFilter(file))
|
|
64
|
+
continue;
|
|
65
|
+
const lines = content.split('\n');
|
|
66
|
+
// Collect router prefixes: router = APIRouter(prefix="/api/v1/foo")
|
|
67
|
+
let routerPrefix = '';
|
|
68
|
+
const prefixMatch = content.match(/APIRouter\s*\(\s*(?:.*?)prefix\s*=\s*['"`]([^'"`]+)['"`]/);
|
|
69
|
+
if (prefixMatch)
|
|
70
|
+
routerPrefix = prefixMatch[1];
|
|
71
|
+
for (let i = 0; i < lines.length; i++) {
|
|
72
|
+
// @router.get("/path"), @app.post("/path"), etc.
|
|
73
|
+
const routeMatch = lines[i].match(/@(?:router|app)\.(get|post|put|patch|delete)\(\s*['"`]([^'"`]+)['"`]/);
|
|
74
|
+
if (routeMatch) {
|
|
75
|
+
const path = routerPrefix + routeMatch[2];
|
|
76
|
+
backendRoutes.add(path);
|
|
77
|
+
}
|
|
78
|
+
// Express: router.get("/path", ...) or app.post("/path", ...)
|
|
79
|
+
const expressMatch = lines[i].match(/(?:router|app)\.(get|post|put|patch|delete)\(\s*['"`]([^'"`]+)['"`]/);
|
|
80
|
+
if (expressMatch) {
|
|
81
|
+
backendRoutes.add(expressMatch[2]);
|
|
82
|
+
}
|
|
83
|
+
}
|
|
84
|
+
}
|
|
85
|
+
for (const hit of clientFetches) {
|
|
86
|
+
const urlMatch = hit.match.match(/['"`]([^'"`]+?)['"`]/);
|
|
87
|
+
if (!urlMatch)
|
|
88
|
+
continue;
|
|
89
|
+
let url = urlMatch[1].replace(/\?.*/, '').replace(/\/+$/, '');
|
|
90
|
+
// Skip template strings with complex expressions, relative URLs, external URLs
|
|
91
|
+
if (/\$\{/.test(url) || /^https?:\/\//.test(url) || !url.startsWith('/'))
|
|
92
|
+
continue;
|
|
93
|
+
// Normalize dynamic segments for matching: /api/v1/foo/123 -> /api/v1/foo/{id}
|
|
94
|
+
const urlNorm = url.replace(/\/\d+/g, '/{id}');
|
|
95
|
+
const exists = Array.from(backendRoutes).some(route => {
|
|
96
|
+
const routeNorm = route.replace(/\{[^}]+\}/g, '{id}');
|
|
97
|
+
return urlNorm === routeNorm || url === route || url.startsWith(route + '/') || route.startsWith(url);
|
|
98
|
+
});
|
|
99
|
+
if (!exists && backendRoutes.size > 0) {
|
|
100
|
+
result.findings.push(makeFinding({
|
|
101
|
+
ruleId: 'FK-BE-ENDPOINT-001',
|
|
102
|
+
title: 'Client fetches endpoint that may not exist',
|
|
103
|
+
categoryId: 'BE',
|
|
104
|
+
severity: 'high',
|
|
105
|
+
confidence: 'low',
|
|
106
|
+
labels: ['Broken', 'Fake Flow'],
|
|
107
|
+
summary: `Client calls ${url} but no matching API route was found.`,
|
|
108
|
+
impact: 'Frontend action has no backend support.',
|
|
109
|
+
location: { file: hit.file, startLine: hit.line },
|
|
110
|
+
codeSnippet: extractSnippet(ctx.fileContents, hit.file, hit.line),
|
|
111
|
+
suggestedFix: 'Create the API endpoint or fix the URL.',
|
|
112
|
+
}));
|
|
113
|
+
}
|
|
114
|
+
}
|
|
115
|
+
// FK-BE-ORPHAN-001: Backend routes with no frontend caller
|
|
116
|
+
if (backendRoutes.size > 0 && clientFetches.length > 0) {
|
|
117
|
+
const calledUrls = new Set();
|
|
118
|
+
for (const hit of clientFetches) {
|
|
119
|
+
const urlMatch = hit.match.match(/['"`]([^'"`]+?)['"`]/);
|
|
120
|
+
if (urlMatch)
|
|
121
|
+
calledUrls.add(urlMatch[1].replace(/\?.*/, '').replace(/\/+$/, ''));
|
|
122
|
+
}
|
|
123
|
+
// Also check for URL references in template literals
|
|
124
|
+
const templateFetches = searchFiles(ctx.fileContents, /['"``]\/api\/v\d+\/[a-z_/-]+['"``]/, (f) => !isTestFile(f) && /\.(tsx?|jsx?)$/.test(f));
|
|
125
|
+
for (const hit of templateFetches) {
|
|
126
|
+
const urlMatch = hit.match.match(/['"`]([^'"`]+?)['"`]/);
|
|
127
|
+
if (urlMatch)
|
|
128
|
+
calledUrls.add(urlMatch[1]);
|
|
129
|
+
}
|
|
130
|
+
}
|
|
131
|
+
return result;
|
|
132
|
+
}
|
|
133
|
+
//# sourceMappingURL=backend-integrity.js.map
|
|
@@ -0,0 +1,25 @@
|
|
|
1
|
+
let findingCounter = 0;
|
|
2
|
+
export function makeFinding(opts) {
|
|
3
|
+
findingCounter++;
|
|
4
|
+
return {
|
|
5
|
+
id: `finding_${String(findingCounter).padStart(3, '0')}`,
|
|
6
|
+
status: 'open',
|
|
7
|
+
...opts,
|
|
8
|
+
};
|
|
9
|
+
}
|
|
10
|
+
export function makeSmell(id, label, count) {
|
|
11
|
+
return { id, label, count };
|
|
12
|
+
}
|
|
13
|
+
export function emptyResult() {
|
|
14
|
+
return { findings: [], smellHits: [] };
|
|
15
|
+
}
|
|
16
|
+
export function mergeResults(...results) {
|
|
17
|
+
return {
|
|
18
|
+
findings: results.flatMap(r => r.findings),
|
|
19
|
+
smellHits: results.flatMap(r => r.smellHits),
|
|
20
|
+
};
|
|
21
|
+
}
|
|
22
|
+
export function resetFindingCounter() {
|
|
23
|
+
findingCounter = 0;
|
|
24
|
+
}
|
|
25
|
+
//# sourceMappingURL=base.js.map
|
|
@@ -0,0 +1,111 @@
|
|
|
1
|
+
import { makeFinding, emptyResult } from './base.js';
|
|
2
|
+
import { searchFiles, filesMatching, extractSnippet } from '../utils/patterns.js';
|
|
3
|
+
import { isTestFile } from '../utils/fs.js';
|
|
4
|
+
const srcFilter = (f) => !isTestFile(f);
|
|
5
|
+
export function analyzeDataModel(ctx) {
|
|
6
|
+
const result = emptyResult();
|
|
7
|
+
// FK-DM-TENANT-001: Queries without tenant/user scoping
|
|
8
|
+
// JS/TS ORM patterns
|
|
9
|
+
const queryWithoutScope = searchFiles(ctx.fileContents, /\.findMany\(\s*\)|\bfindAll\(\s*\)|\.\w+\.find\(\s*\{?\s*\}?\s*\)/, (f) => srcFilter(f) && /\.(ts|js)$/.test(f));
|
|
10
|
+
// Python ORM patterns: SQLAlchemy session.query(...).all(), select(...), .objects.all()
|
|
11
|
+
const pyQueryWithoutScope = searchFiles(ctx.fileContents, /session\.query\([^)]+\)\.all\(\)|\.execute\(\s*select\([^)]+\)\s*\)|\.objects\.all\(\)/, (f) => srcFilter(f) && /\.py$/.test(f));
|
|
12
|
+
for (const hit of [...queryWithoutScope, ...pyQueryWithoutScope]) {
|
|
13
|
+
const content = ctx.fileContents.get(hit.file);
|
|
14
|
+
const lines = content.split('\n');
|
|
15
|
+
const region = lines.slice(Math.max(0, hit.line - 5), Math.min(lines.length, hit.line + 10)).join('\n');
|
|
16
|
+
const hasScope = /(where|filter|userId|tenantId|orgId|ownerId|scope|belongsTo|tenant_id|user_id|org_id|owner_id|company_id)/i.test(region);
|
|
17
|
+
// Check for limit/pagination in context — bounded queries are low risk
|
|
18
|
+
const hasLimit = /(\.limit\(|\.take\(|LIMIT\s+\d|\$limit|\.slice\(|\.head\(|pagination|paginate|offset|skip\s*[:=]|limit\s*[:=])/i.test(region);
|
|
19
|
+
if (!hasScope && !hasLimit) {
|
|
20
|
+
result.findings.push(makeFinding({
|
|
21
|
+
ruleId: 'FK-DM-TENANT-001',
|
|
22
|
+
title: 'Unscoped query may return all records',
|
|
23
|
+
categoryId: 'DM',
|
|
24
|
+
severity: 'high',
|
|
25
|
+
confidence: 'low',
|
|
26
|
+
labels: ['Unsafe'],
|
|
27
|
+
summary: `Query at ${hit.file}:${hit.line} fetches without visible scoping.`,
|
|
28
|
+
impact: 'May expose data across users or tenants.',
|
|
29
|
+
location: { file: hit.file, startLine: hit.line },
|
|
30
|
+
codeSnippet: extractSnippet(ctx.fileContents, hit.file, hit.line, 2, 4),
|
|
31
|
+
suggestedFix: 'Add user/tenant scoping to the query.',
|
|
32
|
+
}));
|
|
33
|
+
}
|
|
34
|
+
}
|
|
35
|
+
// FK-DM-SCHEMA-001: Schema with optional fields that should be required
|
|
36
|
+
const schemaFiles = filesMatching(ctx.fileContents, /model\s+\w+\s*\{|CREATE TABLE|schema\.\w+Table|class\s+\w+\(.*Base\)/i, srcFilter);
|
|
37
|
+
// Check for Prisma models missing required fields
|
|
38
|
+
for (const file of schemaFiles) {
|
|
39
|
+
const content = ctx.fileContents.get(file);
|
|
40
|
+
if (/\.prisma$/.test(file)) {
|
|
41
|
+
// Check for models without timestamps
|
|
42
|
+
const models = content.match(/model\s+\w+\s*\{[^}]+\}/g) || [];
|
|
43
|
+
for (const model of models) {
|
|
44
|
+
const modelName = model.match(/model\s+(\w+)/)?.[1] || 'Unknown';
|
|
45
|
+
if (!/createdAt|created_at|updatedAt|updated_at/i.test(model)) {
|
|
46
|
+
result.findings.push(makeFinding({
|
|
47
|
+
ruleId: 'FK-DM-SCHEMA-001',
|
|
48
|
+
title: `Model ${modelName} lacks timestamp fields`,
|
|
49
|
+
categoryId: 'DM',
|
|
50
|
+
severity: 'low',
|
|
51
|
+
confidence: 'high',
|
|
52
|
+
labels: ['Incomplete'],
|
|
53
|
+
summary: `Model ${modelName} has no createdAt/updatedAt fields.`,
|
|
54
|
+
impact: 'No audit trail for record changes.',
|
|
55
|
+
location: { file },
|
|
56
|
+
suggestedFix: 'Add createdAt and updatedAt timestamp fields.',
|
|
57
|
+
}));
|
|
58
|
+
}
|
|
59
|
+
}
|
|
60
|
+
}
|
|
61
|
+
}
|
|
62
|
+
// FK-DM-NULLABLE-001: SQLAlchemy columns that are Optional/nullable without defaults
|
|
63
|
+
for (const [file, content] of ctx.fileContents) {
|
|
64
|
+
if (!/\.py$/.test(file) || isTestFile(file))
|
|
65
|
+
continue;
|
|
66
|
+
if (!/class\s+\w+\(.*Base\)/.test(content))
|
|
67
|
+
continue;
|
|
68
|
+
const lines = content.split('\n');
|
|
69
|
+
for (let i = 0; i < lines.length; i++) {
|
|
70
|
+
const line = lines[i];
|
|
71
|
+
// Column(..., nullable=True) or Optional[...] = Column(...) without server_default
|
|
72
|
+
if (/Column\(/.test(line) && /nullable\s*=\s*True/.test(line) && !/server_default|default=/.test(line)) {
|
|
73
|
+
const colName = line.match(/(\w+)\s*(?::\s*\w+)?\s*=\s*(?:mapped_column|Column)/)?.[1];
|
|
74
|
+
if (colName && /(name|title|status|type|email|role)$/i.test(colName)) {
|
|
75
|
+
result.findings.push(makeFinding({
|
|
76
|
+
ruleId: 'FK-DM-NULLABLE-001',
|
|
77
|
+
title: `Critical column "${colName}" is nullable without default`,
|
|
78
|
+
categoryId: 'DM',
|
|
79
|
+
severity: 'medium',
|
|
80
|
+
confidence: 'medium',
|
|
81
|
+
labels: ['Fragile', 'Incomplete'],
|
|
82
|
+
summary: `${file}:${i + 1} — "${colName}" is nullable but looks like a required field.`,
|
|
83
|
+
impact: 'NULL values in required fields cause downstream errors.',
|
|
84
|
+
location: { file, startLine: i + 1 },
|
|
85
|
+
codeSnippet: extractSnippet(ctx.fileContents, file, i + 1, 1, 2),
|
|
86
|
+
suggestedFix: `Make "${colName}" non-nullable or add a server_default.`,
|
|
87
|
+
}));
|
|
88
|
+
}
|
|
89
|
+
}
|
|
90
|
+
}
|
|
91
|
+
}
|
|
92
|
+
// FK-DM-DEMO-001: Seed data referenced in production paths
|
|
93
|
+
const seedRefs = searchFiles(ctx.fileContents, /^import\s+.*['"`].*\b(seed|fixtures?|demo|sample)\b.*['"`]|^(?:const|let|var)\s+.*=\s*require\(.*\b(seed|fixtures?|demo|sample)\b/i, (f) => srcFilter(f) && !isTestFile(f) && !/seed|fixture|demo|test|spec|analyz/i.test(f));
|
|
94
|
+
for (const hit of seedRefs) {
|
|
95
|
+
result.findings.push(makeFinding({
|
|
96
|
+
ruleId: 'FK-DM-DEMO-001',
|
|
97
|
+
title: 'Seed/demo data imported in production path',
|
|
98
|
+
categoryId: 'DM',
|
|
99
|
+
severity: 'medium',
|
|
100
|
+
confidence: 'medium',
|
|
101
|
+
labels: ['Mock Leakage'],
|
|
102
|
+
summary: `Seed or demo data imported in ${hit.file}:${hit.line}.`,
|
|
103
|
+
impact: 'Production logic may depend on test/demo data.',
|
|
104
|
+
location: { file: hit.file, startLine: hit.line },
|
|
105
|
+
codeSnippet: extractSnippet(ctx.fileContents, hit.file, hit.line),
|
|
106
|
+
suggestedFix: 'Remove seed/demo imports from production code.',
|
|
107
|
+
}));
|
|
108
|
+
}
|
|
109
|
+
return result;
|
|
110
|
+
}
|
|
111
|
+
//# sourceMappingURL=data-model.js.map
|
|
@@ -0,0 +1,111 @@
|
|
|
1
|
+
import { makeFinding, emptyResult } from './base.js';
|
|
2
|
+
import { isTestFile } from '../utils/fs.js';
|
|
3
|
+
export function analyzeDeployment(ctx) {
|
|
4
|
+
const result = emptyResult();
|
|
5
|
+
const fileSet = new Set(ctx.files);
|
|
6
|
+
// FK-DO-SETUP-001: Missing README
|
|
7
|
+
const hasReadme = ctx.files.some(f => /^readme\.(md|txt|rst)$/i.test(f));
|
|
8
|
+
if (!hasReadme && ctx.files.length > 10) {
|
|
9
|
+
result.findings.push(makeFinding({
|
|
10
|
+
ruleId: 'FK-DO-SETUP-001',
|
|
11
|
+
title: 'No README file',
|
|
12
|
+
categoryId: 'DO',
|
|
13
|
+
severity: 'medium',
|
|
14
|
+
confidence: 'high',
|
|
15
|
+
labels: ['Incomplete'],
|
|
16
|
+
summary: 'Project has no README for setup instructions.',
|
|
17
|
+
impact: 'New developers cannot onboard without oral tradition.',
|
|
18
|
+
location: { file: '.' },
|
|
19
|
+
suggestedFix: 'Add a README with setup, run, and deployment instructions.',
|
|
20
|
+
}));
|
|
21
|
+
}
|
|
22
|
+
// FK-DO-ENV-001: No .env.example
|
|
23
|
+
const hasEnvExample = ctx.files.some(f => /\.env\.(example|sample|template)$/i.test(f));
|
|
24
|
+
const hasEnvUsage = Array.from(ctx.fileContents.values()).some(c => /process\.env\.\w+|import\.meta\.env\.\w+|os\.environ/i.test(c));
|
|
25
|
+
if (hasEnvUsage && !hasEnvExample) {
|
|
26
|
+
result.findings.push(makeFinding({
|
|
27
|
+
ruleId: 'FK-DO-ENV-001',
|
|
28
|
+
title: 'Environment variables used but no .env.example provided',
|
|
29
|
+
categoryId: 'DO',
|
|
30
|
+
severity: 'medium',
|
|
31
|
+
confidence: 'high',
|
|
32
|
+
labels: ['Incomplete'],
|
|
33
|
+
summary: 'Code references env vars but no example env file exists.',
|
|
34
|
+
impact: 'New developers must guess required configuration.',
|
|
35
|
+
location: { file: '.' },
|
|
36
|
+
suggestedFix: 'Create a .env.example listing all required variables.',
|
|
37
|
+
}));
|
|
38
|
+
}
|
|
39
|
+
// FK-DO-CI-001: No CI configuration
|
|
40
|
+
const hasCi = ctx.files.some(f => /\.github\/workflows\//.test(f) ||
|
|
41
|
+
/\.gitlab-ci\.yml/.test(f) ||
|
|
42
|
+
/Jenkinsfile/.test(f) ||
|
|
43
|
+
/\.circleci\//.test(f) ||
|
|
44
|
+
/bitbucket-pipelines\.yml/.test(f));
|
|
45
|
+
if (!hasCi && ctx.files.length > 20) {
|
|
46
|
+
result.findings.push(makeFinding({
|
|
47
|
+
ruleId: 'FK-DO-CI-001',
|
|
48
|
+
title: 'No CI/CD configuration found',
|
|
49
|
+
categoryId: 'DO',
|
|
50
|
+
severity: 'medium',
|
|
51
|
+
confidence: 'high',
|
|
52
|
+
labels: ['Incomplete'],
|
|
53
|
+
summary: 'No CI/CD pipeline configuration detected.',
|
|
54
|
+
impact: 'No automated checks on code changes.',
|
|
55
|
+
location: { file: '.' },
|
|
56
|
+
suggestedFix: 'Add CI config to run lint, typecheck, and tests on push.',
|
|
57
|
+
}));
|
|
58
|
+
}
|
|
59
|
+
// FK-DO-SETUP-001: No lockfile
|
|
60
|
+
const hasLockfile = ctx.files.some(f => /^(package-lock\.json|yarn\.lock|pnpm-lock\.yaml|bun\.lockb|Gemfile\.lock|poetry\.lock|go\.sum|Cargo\.lock)$/.test(f));
|
|
61
|
+
const hasPackageJson = fileSet.has('package.json');
|
|
62
|
+
if (hasPackageJson && !hasLockfile) {
|
|
63
|
+
result.findings.push(makeFinding({
|
|
64
|
+
ruleId: 'FK-DO-SETUP-001',
|
|
65
|
+
title: 'No lockfile committed',
|
|
66
|
+
categoryId: 'DO',
|
|
67
|
+
severity: 'medium',
|
|
68
|
+
confidence: 'high',
|
|
69
|
+
labels: ['Fragile'],
|
|
70
|
+
summary: 'package.json exists but no lockfile is committed.',
|
|
71
|
+
impact: 'Builds may not be reproducible across environments.',
|
|
72
|
+
location: { file: 'package.json' },
|
|
73
|
+
suggestedFix: 'Commit the lockfile for your package manager.',
|
|
74
|
+
}));
|
|
75
|
+
}
|
|
76
|
+
// FK-DO-LOGS-001: No structured logging
|
|
77
|
+
const hasLogger = Array.from(ctx.fileContents.values()).some(c => /\b(winston|pino|bunyan|log4js|logger\.|logging\.getLogger|structlog|slog\.)/i.test(c));
|
|
78
|
+
const sourceCount = ctx.files.filter(f => /\.(ts|tsx|js|jsx|py|rb|go)$/.test(f) && !isTestFile(f)).length;
|
|
79
|
+
if (!hasLogger && sourceCount > 20) {
|
|
80
|
+
result.findings.push(makeFinding({
|
|
81
|
+
ruleId: 'FK-DO-LOGS-001',
|
|
82
|
+
title: 'No structured logging library detected',
|
|
83
|
+
categoryId: 'DO',
|
|
84
|
+
severity: 'low',
|
|
85
|
+
confidence: 'medium',
|
|
86
|
+
labels: ['Incomplete'],
|
|
87
|
+
summary: 'Project uses console.log but no structured logging.',
|
|
88
|
+
impact: 'Production debugging will be harder without structured logs.',
|
|
89
|
+
location: { file: '.' },
|
|
90
|
+
suggestedFix: 'Add a structured logger (pino, winston, etc.).',
|
|
91
|
+
}));
|
|
92
|
+
}
|
|
93
|
+
// Gitignore check
|
|
94
|
+
const hasGitignore = fileSet.has('.gitignore');
|
|
95
|
+
if (!hasGitignore && ctx.files.length > 5) {
|
|
96
|
+
result.findings.push(makeFinding({
|
|
97
|
+
ruleId: 'FK-DO-SETUP-001',
|
|
98
|
+
title: 'No .gitignore file',
|
|
99
|
+
categoryId: 'DO',
|
|
100
|
+
severity: 'medium',
|
|
101
|
+
confidence: 'high',
|
|
102
|
+
labels: ['Unsafe', 'Incomplete'],
|
|
103
|
+
summary: 'Project has no .gitignore — risk of committing sensitive files.',
|
|
104
|
+
impact: 'node_modules, .env, and build artifacts may be committed.',
|
|
105
|
+
location: { file: '.' },
|
|
106
|
+
suggestedFix: 'Add a .gitignore appropriate for your stack.',
|
|
107
|
+
}));
|
|
108
|
+
}
|
|
109
|
+
return result;
|
|
110
|
+
}
|
|
111
|
+
//# sourceMappingURL=deployment.js.map
|