pumuki-ast-hooks 5.3.17 → 5.3.19
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/docs/VIOLATIONS_RESOLUTION_PLAN.md +23 -25
- package/docs/alerting-system.md +51 -0
- package/docs/observability.md +36 -0
- package/docs/type-safety.md +8 -0
- package/package.json +2 -2
- package/scripts/hooks-system/.AI_TOKEN_STATUS.txt +1 -1
- package/scripts/hooks-system/.audit-reports/notifications.log +11 -0
- package/scripts/hooks-system/.audit-reports/token-monitor.log +72 -0
- package/scripts/hooks-system/application/services/GitTreeState.js +139 -13
- package/scripts/hooks-system/application/services/HookSystemScheduler.js +43 -0
- package/scripts/hooks-system/application/services/PlaybookRunner.js +1 -1
- package/scripts/hooks-system/application/services/installation/GitEnvironmentService.js +1 -1
- package/scripts/hooks-system/application/services/logging/AuditLogger.js +173 -0
- package/scripts/hooks-system/application/services/monitoring/EvidenceMonitor.js +1 -0
- package/scripts/hooks-system/bin/__tests__/evidence-update.spec.js +49 -0
- package/scripts/hooks-system/bin/cli.js +1 -15
- package/scripts/hooks-system/config/env.js +33 -0
- package/scripts/hooks-system/domain/events/__tests__/EventBus.spec.js +33 -0
- package/scripts/hooks-system/domain/events/index.js +6 -16
- package/scripts/hooks-system/infrastructure/ast/ios/analyzers/__tests__/iOSASTIntelligentAnalyzer.spec.js +66 -0
- package/scripts/hooks-system/infrastructure/ast/ios/analyzers/iOSArchitectureRules.js +24 -86
|
@@ -3,23 +3,66 @@ const HookSystemStateMachine = require('../state/HookSystemStateMachine');
|
|
|
3
3
|
|
|
4
4
|
class HookSystemScheduler {
|
|
5
5
|
constructor({ orchestrator, contextEngine, intervalMs = 30000 }) {
|
|
6
|
+
recordMetric({
|
|
7
|
+
hook: 'hook_system_scheduler',
|
|
8
|
+
operation: 'constructor',
|
|
9
|
+
status: 'started',
|
|
10
|
+
intervalMs
|
|
11
|
+
});
|
|
12
|
+
|
|
6
13
|
this.orchestrator = orchestrator;
|
|
7
14
|
this.contextEngine = contextEngine;
|
|
8
15
|
this.intervalMs = intervalMs;
|
|
9
16
|
this.stateMachine = new HookSystemStateMachine();
|
|
10
17
|
this.timer = null;
|
|
18
|
+
|
|
19
|
+
recordMetric({
|
|
20
|
+
hook: 'hook_system_scheduler',
|
|
21
|
+
operation: 'constructor',
|
|
22
|
+
status: 'success',
|
|
23
|
+
intervalMs
|
|
24
|
+
});
|
|
11
25
|
}
|
|
12
26
|
|
|
13
27
|
start() {
|
|
28
|
+
recordMetric({
|
|
29
|
+
hook: 'hook_system_scheduler',
|
|
30
|
+
operation: 'start',
|
|
31
|
+
status: 'started',
|
|
32
|
+
intervalMs: this.intervalMs
|
|
33
|
+
});
|
|
34
|
+
|
|
14
35
|
if (this.timer) return;
|
|
36
|
+
|
|
15
37
|
this.timer = setInterval(() => this.tick(), this.intervalMs);
|
|
38
|
+
|
|
39
|
+
recordMetric({
|
|
40
|
+
hook: 'hook_system_scheduler',
|
|
41
|
+
operation: 'start',
|
|
42
|
+
status: 'success',
|
|
43
|
+
intervalMs: this.intervalMs
|
|
44
|
+
});
|
|
16
45
|
}
|
|
17
46
|
|
|
18
47
|
stop() {
|
|
48
|
+
recordMetric({
|
|
49
|
+
hook: 'hook_system_scheduler',
|
|
50
|
+
operation: 'stop',
|
|
51
|
+
status: 'started',
|
|
52
|
+
hadTimer: !!this.timer
|
|
53
|
+
});
|
|
54
|
+
|
|
19
55
|
if (this.timer) {
|
|
20
56
|
clearInterval(this.timer);
|
|
21
57
|
this.timer = null;
|
|
22
58
|
}
|
|
59
|
+
|
|
60
|
+
recordMetric({
|
|
61
|
+
hook: 'hook_system_scheduler',
|
|
62
|
+
operation: 'stop',
|
|
63
|
+
status: 'success',
|
|
64
|
+
hadTimer: !!this.timer
|
|
65
|
+
});
|
|
23
66
|
}
|
|
24
67
|
|
|
25
68
|
async tick() {
|
|
@@ -110,7 +110,7 @@ fi
|
|
|
110
110
|
|
|
111
111
|
# Try node_modules/.bin first (works with npm install)
|
|
112
112
|
if [ -f "node_modules/.bin/ast-hooks" ]; then
|
|
113
|
-
OUTPUT=$(node_modules/.bin/ast-hooks ast
|
|
113
|
+
OUTPUT=$(node_modules/.bin/ast-hooks ast 2>&1)
|
|
114
114
|
EXIT_CODE=$?
|
|
115
115
|
echo "$OUTPUT"
|
|
116
116
|
if [ $EXIT_CODE -ne 0 ]; then
|
|
@@ -0,0 +1,173 @@
|
|
|
1
|
+
const fs = require('fs');
|
|
2
|
+
const path = require('path');
|
|
3
|
+
|
|
4
|
+
// Import recordMetric for prometheus metrics
|
|
5
|
+
const { recordMetric } = require('../../../infrastructure/telemetry/metrics-logger');
|
|
6
|
+
|
|
7
|
+
class AuditLogger {
|
|
8
|
+
/**
|
|
9
|
+
* @param {Object} options
|
|
10
|
+
* @param {string} [options.repoRoot=process.cwd()]
|
|
11
|
+
* @param {string} [options.filename='.audit_tmp/audit.log']
|
|
12
|
+
* @param {Object} [options.logger=console] - fallback logger for warnings
|
|
13
|
+
*/
|
|
14
|
+
constructor({ repoRoot = process.cwd(), filename, logger = console } = {}) {
|
|
15
|
+
recordMetric({
|
|
16
|
+
hook: 'audit_logger',
|
|
17
|
+
operation: 'constructor',
|
|
18
|
+
status: 'started',
|
|
19
|
+
repoRoot: repoRoot.substring(0, 100)
|
|
20
|
+
});
|
|
21
|
+
|
|
22
|
+
this.repoRoot = repoRoot;
|
|
23
|
+
this.logger = logger;
|
|
24
|
+
this.logPath = filename
|
|
25
|
+
? (path.isAbsolute(filename) ? filename : path.join(repoRoot, filename))
|
|
26
|
+
: path.join(repoRoot, '.audit_tmp', 'audit.log');
|
|
27
|
+
|
|
28
|
+
this.ensureDir();
|
|
29
|
+
|
|
30
|
+
recordMetric({
|
|
31
|
+
hook: 'audit_logger',
|
|
32
|
+
operation: 'constructor',
|
|
33
|
+
status: 'success',
|
|
34
|
+
repoRoot: repoRoot.substring(0, 100)
|
|
35
|
+
});
|
|
36
|
+
}
|
|
37
|
+
|
|
38
|
+
ensureDir() {
|
|
39
|
+
recordMetric({
|
|
40
|
+
hook: 'audit_logger',
|
|
41
|
+
operation: 'ensure_dir',
|
|
42
|
+
status: 'started'
|
|
43
|
+
});
|
|
44
|
+
|
|
45
|
+
try {
|
|
46
|
+
const dir = path.dirname(this.logPath);
|
|
47
|
+
if (!fs.existsSync(dir)) {
|
|
48
|
+
fs.mkdirSync(dir, { recursive: true });
|
|
49
|
+
}
|
|
50
|
+
if (!fs.existsSync(this.logPath)) {
|
|
51
|
+
fs.writeFileSync(this.logPath, '', { encoding: 'utf8' });
|
|
52
|
+
}
|
|
53
|
+
} catch (error) {
|
|
54
|
+
this.warn('AUDIT_LOGGER_INIT_ERROR', error);
|
|
55
|
+
}
|
|
56
|
+
|
|
57
|
+
recordMetric({
|
|
58
|
+
hook: 'audit_logger',
|
|
59
|
+
operation: 'ensure_dir',
|
|
60
|
+
status: 'success'
|
|
61
|
+
});
|
|
62
|
+
}
|
|
63
|
+
|
|
64
|
+
warn(message, error) {
|
|
65
|
+
recordMetric({
|
|
66
|
+
hook: 'audit_logger',
|
|
67
|
+
operation: 'warn',
|
|
68
|
+
status: 'started',
|
|
69
|
+
message: message
|
|
70
|
+
});
|
|
71
|
+
|
|
72
|
+
if (this.logger?.warn) {
|
|
73
|
+
this.logger.warn(message, { error: error?.message });
|
|
74
|
+
} else {
|
|
75
|
+
console.warn(message, error?.message);
|
|
76
|
+
}
|
|
77
|
+
|
|
78
|
+
recordMetric({
|
|
79
|
+
hook: 'audit_logger',
|
|
80
|
+
operation: 'warn',
|
|
81
|
+
status: 'success'
|
|
82
|
+
});
|
|
83
|
+
}
|
|
84
|
+
|
|
85
|
+
/**
|
|
86
|
+
* @param {Object} entry
|
|
87
|
+
* @param {string} entry.action
|
|
88
|
+
* @param {string} [entry.resource]
|
|
89
|
+
* @param {string} [entry.status='success']
|
|
90
|
+
* @param {string|null} [entry.actor=null]
|
|
91
|
+
* @param {Object} [entry.meta={}]
|
|
92
|
+
* @param {string|null} [entry.correlationId=null]
|
|
93
|
+
*/
|
|
94
|
+
record(entry = {}) {
|
|
95
|
+
recordMetric({
|
|
96
|
+
hook: 'audit_logger',
|
|
97
|
+
operation: 'record',
|
|
98
|
+
status: 'started',
|
|
99
|
+
action: entry.action
|
|
100
|
+
});
|
|
101
|
+
|
|
102
|
+
if (!entry.action) {
|
|
103
|
+
recordMetric({
|
|
104
|
+
hook: 'audit_logger',
|
|
105
|
+
operation: 'record',
|
|
106
|
+
status: 'success',
|
|
107
|
+
reason: 'no_action'
|
|
108
|
+
});
|
|
109
|
+
return;
|
|
110
|
+
}
|
|
111
|
+
const safeMeta = this.sanitizeMeta(entry.meta || {});
|
|
112
|
+
|
|
113
|
+
const payload = {
|
|
114
|
+
ts: new Date().toISOString(),
|
|
115
|
+
action: entry.action,
|
|
116
|
+
resource: entry.resource || null,
|
|
117
|
+
status: entry.status || 'success',
|
|
118
|
+
actor: entry.actor || null,
|
|
119
|
+
correlationId: entry.correlationId || null,
|
|
120
|
+
meta: safeMeta
|
|
121
|
+
};
|
|
122
|
+
|
|
123
|
+
try {
|
|
124
|
+
fs.appendFileSync(this.logPath, `${JSON.stringify(payload)}\n`, { encoding: 'utf8' });
|
|
125
|
+
recordMetric({
|
|
126
|
+
hook: 'audit_logger',
|
|
127
|
+
operation: 'record',
|
|
128
|
+
status: 'success',
|
|
129
|
+
action: entry.action
|
|
130
|
+
});
|
|
131
|
+
} catch (error) {
|
|
132
|
+
this.warn('AUDIT_LOGGER_WRITE_ERROR', error);
|
|
133
|
+
recordMetric({
|
|
134
|
+
hook: 'audit_logger',
|
|
135
|
+
operation: 'record',
|
|
136
|
+
status: 'failed',
|
|
137
|
+
action: entry.action,
|
|
138
|
+
error: error.message
|
|
139
|
+
});
|
|
140
|
+
}
|
|
141
|
+
}
|
|
142
|
+
|
|
143
|
+
sanitizeMeta(meta) {
|
|
144
|
+
recordMetric({
|
|
145
|
+
hook: 'audit_logger',
|
|
146
|
+
operation: 'sanitize_meta',
|
|
147
|
+
status: 'started',
|
|
148
|
+
metaKeys: Object.keys(meta || {}).length
|
|
149
|
+
});
|
|
150
|
+
|
|
151
|
+
const forbidden = ['token', 'password', 'secret', 'authorization', 'auth', 'apiKey'];
|
|
152
|
+
const clone = {};
|
|
153
|
+
Object.entries(meta).forEach(([k, v]) => {
|
|
154
|
+
const lowered = k.toLowerCase();
|
|
155
|
+
if (forbidden.some(f => lowered.includes(f))) {
|
|
156
|
+
clone[k] = '[REDACTED]';
|
|
157
|
+
} else {
|
|
158
|
+
clone[k] = v;
|
|
159
|
+
}
|
|
160
|
+
});
|
|
161
|
+
|
|
162
|
+
recordMetric({
|
|
163
|
+
hook: 'audit_logger',
|
|
164
|
+
operation: 'sanitize_meta',
|
|
165
|
+
status: 'success',
|
|
166
|
+
metaKeys: Object.keys(clone).length
|
|
167
|
+
});
|
|
168
|
+
|
|
169
|
+
return clone;
|
|
170
|
+
}
|
|
171
|
+
}
|
|
172
|
+
|
|
173
|
+
module.exports = AuditLogger;
|
|
@@ -19,6 +19,7 @@ class EvidenceMonitor {
|
|
|
19
19
|
resolveUpdateEvidenceScript() {
|
|
20
20
|
const candidates = [
|
|
21
21
|
path.join(this.repoRoot, 'node_modules/@pumuki/ast-intelligence-hooks/bin/update-evidence.sh'),
|
|
22
|
+
path.join(this.repoRoot, 'scripts/hooks-system/bin/update-evidence.sh'),
|
|
22
23
|
path.join(this.repoRoot, 'bin/update-evidence.sh')
|
|
23
24
|
];
|
|
24
25
|
|
|
@@ -0,0 +1,49 @@
|
|
|
1
|
+
const fs = require('fs');
|
|
2
|
+
const os = require('os');
|
|
3
|
+
const path = require('path');
|
|
4
|
+
const { execSync } = require('child_process');
|
|
5
|
+
|
|
6
|
+
describe('ast-hooks evidence:update', () => {
|
|
7
|
+
let repoRoot;
|
|
8
|
+
let subdir;
|
|
9
|
+
|
|
10
|
+
beforeEach(() => {
|
|
11
|
+
repoRoot = fs.mkdtempSync(path.join(os.tmpdir(), 'ast-hooks-evidence-cli-'));
|
|
12
|
+
subdir = path.join(repoRoot, 'packages', 'app');
|
|
13
|
+
fs.mkdirSync(subdir, { recursive: true });
|
|
14
|
+
|
|
15
|
+
execSync('git init', { cwd: repoRoot, stdio: 'ignore' });
|
|
16
|
+
execSync('git config user.email "test@example.com"', { cwd: repoRoot, stdio: 'ignore' });
|
|
17
|
+
execSync('git config user.name "Test"', { cwd: repoRoot, stdio: 'ignore' });
|
|
18
|
+
|
|
19
|
+
fs.writeFileSync(path.join(repoRoot, 'README.md'), 'test', 'utf8');
|
|
20
|
+
execSync('git add README.md', { cwd: repoRoot, stdio: 'ignore' });
|
|
21
|
+
execSync('git commit -m "chore: init"', { cwd: repoRoot, stdio: 'ignore' });
|
|
22
|
+
});
|
|
23
|
+
|
|
24
|
+
afterEach(() => {
|
|
25
|
+
fs.rmSync(repoRoot, { recursive: true, force: true });
|
|
26
|
+
});
|
|
27
|
+
|
|
28
|
+
it('updates .AI_EVIDENCE.json in repo root even when executed from subdirectory', () => {
|
|
29
|
+
const cliPath = path.resolve(__dirname, '..', 'cli.js');
|
|
30
|
+
const output = execSync(`node ${cliPath} evidence:update`, {
|
|
31
|
+
cwd: subdir,
|
|
32
|
+
encoding: 'utf8',
|
|
33
|
+
env: {
|
|
34
|
+
...process.env,
|
|
35
|
+
AUTO_EVIDENCE_TRIGGER: 'test',
|
|
36
|
+
AUTO_EVIDENCE_REASON: 'test',
|
|
37
|
+
AUTO_EVIDENCE_SUMMARY: 'test'
|
|
38
|
+
}
|
|
39
|
+
}).trim();
|
|
40
|
+
|
|
41
|
+
const evidencePath = path.join(repoRoot, '.AI_EVIDENCE.json');
|
|
42
|
+
expect(fs.realpathSync(output)).toBe(fs.realpathSync(evidencePath));
|
|
43
|
+
expect(fs.existsSync(evidencePath)).toBe(true);
|
|
44
|
+
|
|
45
|
+
const json = JSON.parse(fs.readFileSync(evidencePath, 'utf8'));
|
|
46
|
+
expect(typeof json.timestamp).toBe('string');
|
|
47
|
+
expect(json.timestamp.length).toBeGreaterThan(10);
|
|
48
|
+
});
|
|
49
|
+
});
|
|
@@ -121,21 +121,7 @@ const commands = {
|
|
|
121
121
|
},
|
|
122
122
|
|
|
123
123
|
ast: () => {
|
|
124
|
-
|
|
125
|
-
const filteredArgs = [];
|
|
126
|
-
|
|
127
|
-
for (const arg of args) {
|
|
128
|
-
if (arg === '--staged') {
|
|
129
|
-
env.STAGING_ONLY_MODE = '1';
|
|
130
|
-
} else {
|
|
131
|
-
filteredArgs.push(arg);
|
|
132
|
-
}
|
|
133
|
-
}
|
|
134
|
-
|
|
135
|
-
execSync(
|
|
136
|
-
`node ${path.join(HOOKS_ROOT, 'infrastructure/ast/ast-intelligence.js')} ${filteredArgs.join(' ')}`,
|
|
137
|
-
{ stdio: 'inherit', env }
|
|
138
|
-
);
|
|
124
|
+
execSync(`node ${path.join(HOOKS_ROOT, 'infrastructure/ast/ast-intelligence.js')}`, { stdio: 'inherit' });
|
|
139
125
|
},
|
|
140
126
|
|
|
141
127
|
install: () => {
|
|
@@ -0,0 +1,33 @@
|
|
|
1
|
+
const ENV = (process.env.NODE_ENV || 'development').toLowerCase();
|
|
2
|
+
|
|
3
|
+
function normalizeBool(val, defaultValue = false) {
|
|
4
|
+
if (val === undefined) return defaultValue;
|
|
5
|
+
if (typeof val === 'boolean') return val;
|
|
6
|
+
const str = String(val).trim().toLowerCase();
|
|
7
|
+
if (str === '') return defaultValue;
|
|
8
|
+
return !(['false', '0', 'no', 'off'].includes(str));
|
|
9
|
+
}
|
|
10
|
+
|
|
11
|
+
function get(name, defaultValue = undefined) {
|
|
12
|
+
return process.env[name] !== undefined ? process.env[name] : defaultValue;
|
|
13
|
+
}
|
|
14
|
+
|
|
15
|
+
function getNumber(name, defaultValue = 0) {
|
|
16
|
+
const raw = process.env[name];
|
|
17
|
+
const parsed = Number(raw);
|
|
18
|
+
return Number.isFinite(parsed) ? parsed : defaultValue;
|
|
19
|
+
}
|
|
20
|
+
|
|
21
|
+
function getBool(name, defaultValue = false) {
|
|
22
|
+
return normalizeBool(process.env[name], defaultValue);
|
|
23
|
+
}
|
|
24
|
+
|
|
25
|
+
module.exports = {
|
|
26
|
+
env: ENV,
|
|
27
|
+
isProd: ENV === 'production',
|
|
28
|
+
isStg: ENV === 'staging' || ENV === 'stage' || ENV === 'stg',
|
|
29
|
+
isDev: ENV === 'development' || ENV === 'dev',
|
|
30
|
+
get,
|
|
31
|
+
getNumber,
|
|
32
|
+
getBool,
|
|
33
|
+
};
|
|
@@ -0,0 +1,33 @@
|
|
|
1
|
+
const { EventBus, DomainEvent } = require('..');
|
|
2
|
+
|
|
3
|
+
describe('EventBus', () => {
|
|
4
|
+
test('no reprocesa eventos duplicados (idempotencia por id)', async () => {
|
|
5
|
+
const bus = new EventBus();
|
|
6
|
+
const handled = [];
|
|
7
|
+
bus.subscribe('TEST_EVENT', async (evt) => {
|
|
8
|
+
handled.push(evt.id);
|
|
9
|
+
});
|
|
10
|
+
|
|
11
|
+
const evt = new DomainEvent('TEST_EVENT', { foo: 'bar' });
|
|
12
|
+
await bus.publish(evt);
|
|
13
|
+
await bus.publish(evt); // segunda vez mismo id
|
|
14
|
+
|
|
15
|
+
expect(handled).toHaveLength(1);
|
|
16
|
+
expect(handled[0]).toBe(evt.id);
|
|
17
|
+
});
|
|
18
|
+
|
|
19
|
+
test('recorta processedIds al superar maxProcessed', async () => {
|
|
20
|
+
const bus = new EventBus();
|
|
21
|
+
bus.maxProcessed = 3;
|
|
22
|
+
bus.subscribe('*', async () => { });
|
|
23
|
+
|
|
24
|
+
const idsBefore = [];
|
|
25
|
+
for (let i = 0; i < 5; i++) {
|
|
26
|
+
const evt = new DomainEvent('TEST_EVENT', { seq: i });
|
|
27
|
+
idsBefore.push(evt.id);
|
|
28
|
+
await bus.publish(evt);
|
|
29
|
+
}
|
|
30
|
+
|
|
31
|
+
expect(bus.processedIds.size).toBeLessThanOrEqual(bus.maxProcessed);
|
|
32
|
+
});
|
|
33
|
+
});
|
|
@@ -1,5 +1,3 @@
|
|
|
1
|
-
const { ValidationError } = require('../errors');
|
|
2
|
-
|
|
3
1
|
class DomainEvent {
|
|
4
2
|
constructor(type, payload) {
|
|
5
3
|
this.type = type;
|
|
@@ -9,8 +7,8 @@ class DomainEvent {
|
|
|
9
7
|
}
|
|
10
8
|
|
|
11
9
|
validate() {
|
|
12
|
-
if (!this.type) throw new
|
|
13
|
-
if (!this.payload) throw new
|
|
10
|
+
if (!this.type) throw new Error('Event type is required');
|
|
11
|
+
if (!this.payload) throw new Error('Event payload is required');
|
|
14
12
|
return true;
|
|
15
13
|
}
|
|
16
14
|
|
|
@@ -31,9 +29,7 @@ class EvidenceStaleEvent extends DomainEvent {
|
|
|
31
29
|
|
|
32
30
|
validate() {
|
|
33
31
|
super.validate();
|
|
34
|
-
if (!this.payload.evidencePath)
|
|
35
|
-
throw new ValidationError('Evidence path is required', 'payload.evidencePath', this.payload.evidencePath);
|
|
36
|
-
}
|
|
32
|
+
if (!this.payload.evidencePath) throw new Error('Evidence path is required');
|
|
37
33
|
}
|
|
38
34
|
}
|
|
39
35
|
|
|
@@ -45,12 +41,8 @@ class GitFlowViolationEvent extends DomainEvent {
|
|
|
45
41
|
|
|
46
42
|
validate() {
|
|
47
43
|
super.validate();
|
|
48
|
-
if (!this.payload.branch)
|
|
49
|
-
|
|
50
|
-
}
|
|
51
|
-
if (!this.payload.violation) {
|
|
52
|
-
throw new ValidationError('Violation details are required', 'payload.violation', this.payload.violation);
|
|
53
|
-
}
|
|
44
|
+
if (!this.payload.branch) throw new Error('Branch name is required');
|
|
45
|
+
if (!this.payload.violation) throw new Error('Violation details are required');
|
|
54
46
|
}
|
|
55
47
|
}
|
|
56
48
|
|
|
@@ -62,9 +54,7 @@ class AstCriticalFoundEvent extends DomainEvent {
|
|
|
62
54
|
|
|
63
55
|
validate() {
|
|
64
56
|
super.validate();
|
|
65
|
-
if (!Array.isArray(this.payload.findings))
|
|
66
|
-
throw new ValidationError('Findings must be an array', 'payload.findings', this.payload.findings);
|
|
67
|
-
}
|
|
57
|
+
if (!Array.isArray(this.payload.findings)) throw new Error('Findings must be an array');
|
|
68
58
|
}
|
|
69
59
|
}
|
|
70
60
|
|
|
@@ -0,0 +1,66 @@
|
|
|
1
|
+
const { iOSASTIntelligentAnalyzer } = require('../iOSASTIntelligentAnalyzer');
|
|
2
|
+
|
|
3
|
+
describe('iOSASTIntelligentAnalyzer - event-driven navigation rules', () => {
|
|
4
|
+
const makeSUT = () => {
|
|
5
|
+
const findings = [];
|
|
6
|
+
const sut = new iOSASTIntelligentAnalyzer(findings);
|
|
7
|
+
sut.fileContent = '';
|
|
8
|
+
sut.syntaxTokens = [];
|
|
9
|
+
sut.imports = [];
|
|
10
|
+
sut.classes = [];
|
|
11
|
+
sut.structs = [];
|
|
12
|
+
return { sut, findings };
|
|
13
|
+
};
|
|
14
|
+
|
|
15
|
+
const identifierToken = (value, offset = 0) => ({
|
|
16
|
+
kind: 'source.lang.swift.syntaxtype.identifier',
|
|
17
|
+
value,
|
|
18
|
+
offset,
|
|
19
|
+
length: value.length,
|
|
20
|
+
});
|
|
21
|
+
|
|
22
|
+
it('should report CRITICAL when UIKit imperative navigation is detected', () => {
|
|
23
|
+
const { sut, findings } = makeSUT();
|
|
24
|
+
sut.fileContent = 'pushViewController';
|
|
25
|
+
sut.syntaxTokens = [identifierToken('pushViewController', 0)];
|
|
26
|
+
|
|
27
|
+
sut.analyzeAdditionalRules('/tmp/File.swift');
|
|
28
|
+
|
|
29
|
+
const rule = findings.find((f) => f.ruleId === 'ios.navigation.imperative_navigation');
|
|
30
|
+
expect(rule).toBeDefined();
|
|
31
|
+
expect(String(rule.severity).toLowerCase()).toBe('critical');
|
|
32
|
+
});
|
|
33
|
+
|
|
34
|
+
it('should report CRITICAL when SwiftUI navigation API is detected outside View types', () => {
|
|
35
|
+
const { sut, findings } = makeSUT();
|
|
36
|
+
sut.fileContent = 'NavigationLink';
|
|
37
|
+
sut.syntaxTokens = [identifierToken('NavigationLink', 0)];
|
|
38
|
+
sut.imports = [];
|
|
39
|
+
sut.classes = [];
|
|
40
|
+
sut.structs = [];
|
|
41
|
+
|
|
42
|
+
sut.analyzeAdditionalRules('/tmp/NotAView.swift');
|
|
43
|
+
|
|
44
|
+
const rule = findings.find((f) => f.ruleId === 'ios.navigation.swiftui_navigation_outside_view');
|
|
45
|
+
expect(rule).toBeDefined();
|
|
46
|
+
expect(String(rule.severity).toLowerCase()).toBe('critical');
|
|
47
|
+
});
|
|
48
|
+
|
|
49
|
+
it('should not report SwiftUI navigation outside View when file is a SwiftUI View', () => {
|
|
50
|
+
const { sut, findings } = makeSUT();
|
|
51
|
+
sut.fileContent = 'import SwiftUI\nstruct MyView: View { var body: some View { NavigationLink("x", destination: Text("y")) } }';
|
|
52
|
+
sut.syntaxTokens = [identifierToken('NavigationLink', 0)];
|
|
53
|
+
sut.imports = [{ name: 'SwiftUI', line: 1 }];
|
|
54
|
+
sut.structs = [
|
|
55
|
+
{
|
|
56
|
+
'key.name': 'MyView',
|
|
57
|
+
'key.inheritedtypes': [{ 'key.name': 'View' }],
|
|
58
|
+
},
|
|
59
|
+
];
|
|
60
|
+
|
|
61
|
+
sut.analyzeAdditionalRules('/tmp/MyView.swift');
|
|
62
|
+
|
|
63
|
+
const rule = findings.find((f) => f.ruleId === 'ios.navigation.swiftui_navigation_outside_view');
|
|
64
|
+
expect(rule).toBeUndefined();
|
|
65
|
+
});
|
|
66
|
+
});
|