@timmeck/brain 1.8.0 → 1.8.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/BRAIN_PLAN.md +3324 -3324
- package/LICENSE +21 -21
- package/dist/api/server.d.ts +4 -0
- package/dist/api/server.js +73 -0
- package/dist/api/server.js.map +1 -1
- package/dist/brain.js +2 -1
- package/dist/brain.js.map +1 -1
- package/dist/cli/commands/dashboard.js +606 -572
- package/dist/cli/commands/dashboard.js.map +1 -1
- package/dist/dashboard/server.js +25 -25
- package/dist/db/migrations/001_core_schema.js +115 -115
- package/dist/db/migrations/002_learning_schema.js +33 -33
- package/dist/db/migrations/003_code_schema.js +48 -48
- package/dist/db/migrations/004_synapses_schema.js +52 -52
- package/dist/db/migrations/005_fts_indexes.js +73 -73
- package/dist/db/migrations/007_feedback.js +8 -8
- package/dist/db/migrations/008_git_integration.js +33 -33
- package/dist/db/migrations/009_embeddings.js +3 -3
- package/dist/db/repositories/antipattern.repository.js +3 -3
- package/dist/db/repositories/code-module.repository.js +32 -32
- package/dist/db/repositories/notification.repository.js +3 -3
- package/dist/db/repositories/project.repository.js +21 -21
- package/dist/db/repositories/rule.repository.js +24 -24
- package/dist/db/repositories/solution.repository.js +50 -50
- package/dist/db/repositories/synapse.repository.js +18 -18
- package/dist/db/repositories/terminal.repository.js +24 -24
- package/dist/embeddings/engine.d.ts +2 -2
- package/dist/embeddings/engine.js +17 -4
- package/dist/embeddings/engine.js.map +1 -1
- package/dist/index.js +1 -1
- package/dist/ipc/server.d.ts +8 -0
- package/dist/ipc/server.js +67 -1
- package/dist/ipc/server.js.map +1 -1
- package/dist/matching/error-matcher.js +5 -5
- package/dist/matching/fingerprint.js +6 -1
- package/dist/matching/fingerprint.js.map +1 -1
- package/dist/mcp/http-server.js +8 -2
- package/dist/mcp/http-server.js.map +1 -1
- package/dist/services/code.service.d.ts +3 -0
- package/dist/services/code.service.js +33 -4
- package/dist/services/code.service.js.map +1 -1
- package/dist/services/error.service.js +4 -3
- package/dist/services/error.service.js.map +1 -1
- package/dist/services/git.service.js +14 -14
- package/package.json +49 -49
- package/src/api/server.ts +395 -321
- package/src/brain.ts +266 -265
- package/src/cli/colors.ts +116 -116
- package/src/cli/commands/config.ts +169 -169
- package/src/cli/commands/dashboard.ts +755 -720
- package/src/cli/commands/doctor.ts +118 -118
- package/src/cli/commands/explain.ts +83 -83
- package/src/cli/commands/export.ts +31 -31
- package/src/cli/commands/import.ts +199 -199
- package/src/cli/commands/insights.ts +65 -65
- package/src/cli/commands/learn.ts +24 -24
- package/src/cli/commands/modules.ts +53 -53
- package/src/cli/commands/network.ts +67 -67
- package/src/cli/commands/projects.ts +42 -42
- package/src/cli/commands/query.ts +120 -120
- package/src/cli/commands/start.ts +62 -62
- package/src/cli/commands/status.ts +75 -75
- package/src/cli/commands/stop.ts +34 -34
- package/src/cli/ipc-helper.ts +22 -22
- package/src/cli/update-check.ts +63 -63
- package/src/code/fingerprint.ts +87 -87
- package/src/code/parsers/generic.ts +29 -29
- package/src/code/parsers/python.ts +54 -54
- package/src/code/parsers/typescript.ts +65 -65
- package/src/code/registry.ts +60 -60
- package/src/dashboard/server.ts +142 -142
- package/src/db/connection.ts +22 -22
- package/src/db/migrations/001_core_schema.ts +120 -120
- package/src/db/migrations/002_learning_schema.ts +38 -38
- package/src/db/migrations/003_code_schema.ts +53 -53
- package/src/db/migrations/004_synapses_schema.ts +57 -57
- package/src/db/migrations/005_fts_indexes.ts +78 -78
- package/src/db/migrations/006_synapses_phase3.ts +17 -17
- package/src/db/migrations/007_feedback.ts +13 -13
- package/src/db/migrations/008_git_integration.ts +38 -38
- package/src/db/migrations/009_embeddings.ts +8 -8
- package/src/db/repositories/antipattern.repository.ts +66 -66
- package/src/db/repositories/code-module.repository.ts +142 -142
- package/src/db/repositories/notification.repository.ts +66 -66
- package/src/db/repositories/project.repository.ts +93 -93
- package/src/db/repositories/rule.repository.ts +108 -108
- package/src/db/repositories/solution.repository.ts +154 -154
- package/src/db/repositories/synapse.repository.ts +153 -153
- package/src/db/repositories/terminal.repository.ts +101 -101
- package/src/embeddings/engine.ts +238 -217
- package/src/index.ts +63 -63
- package/src/ipc/client.ts +118 -118
- package/src/ipc/protocol.ts +35 -35
- package/src/ipc/router.ts +133 -133
- package/src/ipc/server.ts +176 -110
- package/src/learning/decay.ts +46 -46
- package/src/learning/pattern-extractor.ts +90 -90
- package/src/learning/rule-generator.ts +74 -74
- package/src/matching/error-matcher.ts +5 -5
- package/src/matching/fingerprint.ts +34 -29
- package/src/matching/similarity.ts +61 -61
- package/src/matching/tfidf.ts +74 -74
- package/src/matching/tokenizer.ts +41 -41
- package/src/mcp/auto-detect.ts +93 -93
- package/src/mcp/http-server.ts +140 -137
- package/src/mcp/server.ts +73 -73
- package/src/parsing/error-parser.ts +28 -28
- package/src/parsing/parsers/compiler.ts +93 -93
- package/src/parsing/parsers/generic.ts +28 -28
- package/src/parsing/parsers/go.ts +97 -97
- package/src/parsing/parsers/node.ts +69 -69
- package/src/parsing/parsers/python.ts +62 -62
- package/src/parsing/parsers/rust.ts +50 -50
- package/src/parsing/parsers/shell.ts +42 -42
- package/src/parsing/types.ts +47 -47
- package/src/research/gap-analyzer.ts +135 -135
- package/src/research/insight-generator.ts +123 -123
- package/src/research/research-engine.ts +116 -116
- package/src/research/synergy-detector.ts +126 -126
- package/src/research/template-extractor.ts +130 -130
- package/src/research/trend-analyzer.ts +127 -127
- package/src/services/code.service.ts +271 -238
- package/src/services/error.service.ts +4 -3
- package/src/services/git.service.ts +132 -132
- package/src/services/notification.service.ts +41 -41
- package/src/services/synapse.service.ts +59 -59
- package/src/services/terminal.service.ts +81 -81
- package/src/synapses/activation.ts +80 -80
- package/src/synapses/decay.ts +38 -38
- package/src/synapses/hebbian.ts +69 -69
- package/src/synapses/pathfinder.ts +81 -81
- package/src/synapses/synapse-manager.ts +109 -109
- package/src/types/code.types.ts +52 -52
- package/src/types/error.types.ts +67 -67
- package/src/types/ipc.types.ts +8 -8
- package/src/types/mcp.types.ts +53 -53
- package/src/types/research.types.ts +28 -28
- package/src/types/solution.types.ts +30 -30
- package/src/utils/events.ts +45 -45
- package/src/utils/hash.ts +5 -5
- package/src/utils/logger.ts +48 -48
- package/src/utils/paths.ts +19 -19
- package/tests/e2e/test_code_intelligence.py +1015 -0
- package/tests/e2e/test_error_memory.py +451 -0
- package/tests/e2e/test_full_integration.py +534 -0
- package/tests/fixtures/code-modules/modules.ts +83 -83
- package/tests/fixtures/errors/go.ts +9 -9
- package/tests/fixtures/errors/node.ts +24 -24
- package/tests/fixtures/errors/python.ts +21 -21
- package/tests/fixtures/errors/rust.ts +25 -25
- package/tests/fixtures/errors/shell.ts +15 -15
- package/tests/fixtures/solutions/solutions.ts +27 -27
- package/tests/helpers/setup-db.ts +52 -52
- package/tests/integration/code-flow.test.ts +86 -86
- package/tests/integration/error-flow.test.ts +83 -83
- package/tests/integration/ipc-flow.test.ts +166 -166
- package/tests/integration/learning-cycle.test.ts +82 -82
- package/tests/integration/synapse-flow.test.ts +117 -117
- package/tests/unit/code/analyzer.test.ts +58 -58
- package/tests/unit/code/fingerprint.test.ts +51 -51
- package/tests/unit/code/scorer.test.ts +55 -55
- package/tests/unit/learning/confidence-scorer.test.ts +60 -60
- package/tests/unit/learning/decay.test.ts +45 -45
- package/tests/unit/learning/pattern-extractor.test.ts +50 -50
- package/tests/unit/matching/error-matcher.test.ts +69 -69
- package/tests/unit/matching/fingerprint.test.ts +47 -47
- package/tests/unit/matching/similarity.test.ts +65 -65
- package/tests/unit/matching/tfidf.test.ts +71 -71
- package/tests/unit/matching/tokenizer.test.ts +83 -83
- package/tests/unit/parsing/parsers.test.ts +113 -113
- package/tests/unit/research/gap-analyzer.test.ts +45 -45
- package/tests/unit/research/trend-analyzer.test.ts +45 -45
- package/tests/unit/synapses/activation.test.ts +80 -80
- package/tests/unit/synapses/decay.test.ts +27 -27
- package/tests/unit/synapses/hebbian.test.ts +96 -96
- package/tests/unit/synapses/pathfinder.test.ts +72 -72
- package/tsconfig.json +18 -18
|
@@ -0,0 +1,1015 @@
|
|
|
1
|
+
#!/usr/bin/env python3
|
|
2
|
+
"""
|
|
3
|
+
Brain v1.8.1 — Code Intelligence Complete Flow Test
|
|
4
|
+
Tests code analysis, registration, similarity, and reusability discovery.
|
|
5
|
+
~40 assertions covering every code-related endpoint.
|
|
6
|
+
"""
|
|
7
|
+
|
|
8
|
+
import sys
|
|
9
|
+
import httpx
|
|
10
|
+
|
|
11
|
+
BASE = "http://localhost:7777/api/v1"
|
|
12
|
+
PASS = 0
|
|
13
|
+
FAIL = 0
|
|
14
|
+
ERRORS: list[str] = []
|
|
15
|
+
|
|
16
|
+
|
|
17
|
+
def check(condition: bool, label: str) -> bool:
|
|
18
|
+
global PASS, FAIL
|
|
19
|
+
if condition:
|
|
20
|
+
PASS += 1
|
|
21
|
+
print(f" \033[32mPASS\033[0m {label}")
|
|
22
|
+
else:
|
|
23
|
+
FAIL += 1
|
|
24
|
+
ERRORS.append(label)
|
|
25
|
+
print(f" \033[31mFAIL\033[0m {label}")
|
|
26
|
+
return condition
|
|
27
|
+
|
|
28
|
+
|
|
29
|
+
def post(path: str, json: dict | list | None = None) -> httpx.Response:
|
|
30
|
+
return httpx.post(f"{BASE}{path}", json=json or {}, timeout=15)
|
|
31
|
+
|
|
32
|
+
|
|
33
|
+
def get(path: str, params: dict | None = None) -> httpx.Response:
|
|
34
|
+
return httpx.get(f"{BASE}{path}", params=params, timeout=15)
|
|
35
|
+
|
|
36
|
+
|
|
37
|
+
# ──────────────────────────────────────────────────────────────
|
|
38
|
+
# 22 Code Modules across 3 projects and 3 languages
|
|
39
|
+
# Intentionally similar pairs: #1/#6 (retry), #2/#18 (logger), #5/#22 (cache)
|
|
40
|
+
# ──────────────────────────────────────────────────────────────
|
|
41
|
+
MODULES = [
|
|
42
|
+
# ── TypeScript Modules (project: test-frontend) ──
|
|
43
|
+
{ # 1: retry logic (similar to #6)
|
|
44
|
+
"project": "test-frontend",
|
|
45
|
+
"name": "retryWithBackoff",
|
|
46
|
+
"filePath": "src/utils/retry.ts",
|
|
47
|
+
"language": "typescript",
|
|
48
|
+
"source": """export async function retryWithBackoff<T>(
|
|
49
|
+
fn: () => Promise<T>,
|
|
50
|
+
maxRetries: number = 3,
|
|
51
|
+
baseDelay: number = 1000
|
|
52
|
+
): Promise<T> {
|
|
53
|
+
let lastError: Error;
|
|
54
|
+
for (let attempt = 0; attempt < maxRetries; attempt++) {
|
|
55
|
+
try {
|
|
56
|
+
return await fn();
|
|
57
|
+
} catch (err) {
|
|
58
|
+
lastError = err as Error;
|
|
59
|
+
const delay = baseDelay * Math.pow(2, attempt);
|
|
60
|
+
await new Promise(r => setTimeout(r, delay));
|
|
61
|
+
}
|
|
62
|
+
}
|
|
63
|
+
throw lastError!;
|
|
64
|
+
}""",
|
|
65
|
+
"description": "Retry function with exponential backoff",
|
|
66
|
+
},
|
|
67
|
+
{ # 2: logger (similar to #18)
|
|
68
|
+
"project": "test-frontend",
|
|
69
|
+
"name": "createLogger",
|
|
70
|
+
"filePath": "src/utils/logger.ts",
|
|
71
|
+
"language": "typescript",
|
|
72
|
+
"source": """export interface Logger {
|
|
73
|
+
info(msg: string, ...args: unknown[]): void;
|
|
74
|
+
warn(msg: string, ...args: unknown[]): void;
|
|
75
|
+
error(msg: string, ...args: unknown[]): void;
|
|
76
|
+
debug(msg: string, ...args: unknown[]): void;
|
|
77
|
+
}
|
|
78
|
+
|
|
79
|
+
export function createLogger(prefix: string): Logger {
|
|
80
|
+
const fmt = (level: string, msg: string) =>
|
|
81
|
+
`[${new Date().toISOString()}] [${level}] [${prefix}] ${msg}`;
|
|
82
|
+
return {
|
|
83
|
+
info: (msg, ...args) => console.log(fmt('INFO', msg), ...args),
|
|
84
|
+
warn: (msg, ...args) => console.warn(fmt('WARN', msg), ...args),
|
|
85
|
+
error: (msg, ...args) => console.error(fmt('ERROR', msg), ...args),
|
|
86
|
+
debug: (msg, ...args) => console.debug(fmt('DEBUG', msg), ...args),
|
|
87
|
+
};
|
|
88
|
+
}""",
|
|
89
|
+
"description": "Structured logger factory with prefix support",
|
|
90
|
+
},
|
|
91
|
+
{ # 3: debounce
|
|
92
|
+
"project": "test-frontend",
|
|
93
|
+
"name": "debounce",
|
|
94
|
+
"filePath": "src/utils/debounce.ts",
|
|
95
|
+
"language": "typescript",
|
|
96
|
+
"source": """export function debounce<T extends (...args: any[]) => any>(
|
|
97
|
+
fn: T,
|
|
98
|
+
delayMs: number
|
|
99
|
+
): (...args: Parameters<T>) => void {
|
|
100
|
+
let timer: ReturnType<typeof setTimeout> | null = null;
|
|
101
|
+
return (...args: Parameters<T>) => {
|
|
102
|
+
if (timer) clearTimeout(timer);
|
|
103
|
+
timer = setTimeout(() => {
|
|
104
|
+
fn(...args);
|
|
105
|
+
timer = null;
|
|
106
|
+
}, delayMs);
|
|
107
|
+
};
|
|
108
|
+
}""",
|
|
109
|
+
"description": "Debounce utility for input handling",
|
|
110
|
+
},
|
|
111
|
+
{ # 4: fetch wrapper
|
|
112
|
+
"project": "test-frontend",
|
|
113
|
+
"name": "apiFetch",
|
|
114
|
+
"filePath": "src/api/client.ts",
|
|
115
|
+
"language": "typescript",
|
|
116
|
+
"source": """export interface ApiResponse<T> {
|
|
117
|
+
data: T;
|
|
118
|
+
status: number;
|
|
119
|
+
ok: boolean;
|
|
120
|
+
}
|
|
121
|
+
|
|
122
|
+
export async function apiFetch<T>(
|
|
123
|
+
url: string,
|
|
124
|
+
options: RequestInit = {}
|
|
125
|
+
): Promise<ApiResponse<T>> {
|
|
126
|
+
const token = localStorage.getItem('auth_token');
|
|
127
|
+
const res = await fetch(url, {
|
|
128
|
+
...options,
|
|
129
|
+
headers: {
|
|
130
|
+
'Content-Type': 'application/json',
|
|
131
|
+
...(token ? { Authorization: `Bearer ${token}` } : {}),
|
|
132
|
+
...options.headers,
|
|
133
|
+
},
|
|
134
|
+
});
|
|
135
|
+
const data = await res.json();
|
|
136
|
+
return { data, status: res.status, ok: res.ok };
|
|
137
|
+
}""",
|
|
138
|
+
"description": "Typed API fetch wrapper with auth token injection",
|
|
139
|
+
},
|
|
140
|
+
{ # 5: LRU cache (similar to #22)
|
|
141
|
+
"project": "test-frontend",
|
|
142
|
+
"name": "LRUCache",
|
|
143
|
+
"filePath": "src/utils/cache.ts",
|
|
144
|
+
"language": "typescript",
|
|
145
|
+
"source": """export class LRUCache<K, V> {
|
|
146
|
+
private map = new Map<K, V>();
|
|
147
|
+
constructor(private capacity: number) {}
|
|
148
|
+
|
|
149
|
+
get(key: K): V | undefined {
|
|
150
|
+
if (!this.map.has(key)) return undefined;
|
|
151
|
+
const val = this.map.get(key)!;
|
|
152
|
+
this.map.delete(key);
|
|
153
|
+
this.map.set(key, val);
|
|
154
|
+
return val;
|
|
155
|
+
}
|
|
156
|
+
|
|
157
|
+
set(key: K, value: V): void {
|
|
158
|
+
if (this.map.has(key)) this.map.delete(key);
|
|
159
|
+
this.map.set(key, value);
|
|
160
|
+
if (this.map.size > this.capacity) {
|
|
161
|
+
const first = this.map.keys().next().value;
|
|
162
|
+
this.map.delete(first);
|
|
163
|
+
}
|
|
164
|
+
}
|
|
165
|
+
|
|
166
|
+
get size(): number { return this.map.size; }
|
|
167
|
+
clear(): void { this.map.clear(); }
|
|
168
|
+
}""",
|
|
169
|
+
"description": "Generic LRU cache implementation",
|
|
170
|
+
},
|
|
171
|
+
{ # 6: retry logic variant (similar to #1)
|
|
172
|
+
"project": "test-frontend",
|
|
173
|
+
"name": "fetchWithRetry",
|
|
174
|
+
"filePath": "src/api/retry-fetch.ts",
|
|
175
|
+
"language": "typescript",
|
|
176
|
+
"source": """export async function fetchWithRetry(
|
|
177
|
+
url: string,
|
|
178
|
+
options: RequestInit = {},
|
|
179
|
+
retries: number = 3,
|
|
180
|
+
delay: number = 1000
|
|
181
|
+
): Promise<Response> {
|
|
182
|
+
let lastError: Error;
|
|
183
|
+
for (let i = 0; i < retries; i++) {
|
|
184
|
+
try {
|
|
185
|
+
const res = await fetch(url, options);
|
|
186
|
+
if (res.ok) return res;
|
|
187
|
+
throw new Error(`HTTP ${res.status}`);
|
|
188
|
+
} catch (err) {
|
|
189
|
+
lastError = err as Error;
|
|
190
|
+
const backoff = delay * Math.pow(2, i);
|
|
191
|
+
await new Promise(r => setTimeout(r, backoff));
|
|
192
|
+
}
|
|
193
|
+
}
|
|
194
|
+
throw lastError!;
|
|
195
|
+
}""",
|
|
196
|
+
"description": "Fetch wrapper with retry and exponential backoff",
|
|
197
|
+
},
|
|
198
|
+
{ # 7: event emitter
|
|
199
|
+
"project": "test-frontend",
|
|
200
|
+
"name": "EventEmitter",
|
|
201
|
+
"filePath": "src/utils/events.ts",
|
|
202
|
+
"language": "typescript",
|
|
203
|
+
"source": """type Handler = (...args: any[]) => void;
|
|
204
|
+
|
|
205
|
+
export class EventEmitter {
|
|
206
|
+
private listeners = new Map<string, Set<Handler>>();
|
|
207
|
+
|
|
208
|
+
on(event: string, handler: Handler): void {
|
|
209
|
+
if (!this.listeners.has(event)) this.listeners.set(event, new Set());
|
|
210
|
+
this.listeners.get(event)!.add(handler);
|
|
211
|
+
}
|
|
212
|
+
|
|
213
|
+
off(event: string, handler: Handler): void {
|
|
214
|
+
this.listeners.get(event)?.delete(handler);
|
|
215
|
+
}
|
|
216
|
+
|
|
217
|
+
emit(event: string, ...args: any[]): void {
|
|
218
|
+
for (const handler of this.listeners.get(event) ?? []) {
|
|
219
|
+
handler(...args);
|
|
220
|
+
}
|
|
221
|
+
}
|
|
222
|
+
|
|
223
|
+
once(event: string, handler: Handler): void {
|
|
224
|
+
const wrapper = (...args: any[]) => { this.off(event, wrapper); handler(...args); };
|
|
225
|
+
this.on(event, wrapper);
|
|
226
|
+
}
|
|
227
|
+
}""",
|
|
228
|
+
"description": "Typed event emitter with once support",
|
|
229
|
+
},
|
|
230
|
+
|
|
231
|
+
# ── Python Modules (project: test-backend) ──
|
|
232
|
+
{ # 8: rate limiter
|
|
233
|
+
"project": "test-backend",
|
|
234
|
+
"name": "rate_limiter",
|
|
235
|
+
"filePath": "src/middleware/rate_limiter.py",
|
|
236
|
+
"language": "python",
|
|
237
|
+
"source": """import time
|
|
238
|
+
from collections import defaultdict
|
|
239
|
+
from typing import Dict, Tuple
|
|
240
|
+
|
|
241
|
+
class RateLimiter:
|
|
242
|
+
def __init__(self, max_requests: int = 100, window_seconds: int = 60):
|
|
243
|
+
self.max_requests = max_requests
|
|
244
|
+
self.window = window_seconds
|
|
245
|
+
self._store: Dict[str, list[float]] = defaultdict(list)
|
|
246
|
+
|
|
247
|
+
def is_allowed(self, key: str) -> Tuple[bool, int]:
|
|
248
|
+
now = time.time()
|
|
249
|
+
cutoff = now - self.window
|
|
250
|
+
self._store[key] = [t for t in self._store[key] if t > cutoff]
|
|
251
|
+
if len(self._store[key]) >= self.max_requests:
|
|
252
|
+
return False, 0
|
|
253
|
+
self._store[key].append(now)
|
|
254
|
+
return True, self.max_requests - len(self._store[key])
|
|
255
|
+
|
|
256
|
+
def reset(self, key: str) -> None:
|
|
257
|
+
self._store.pop(key, None)
|
|
258
|
+
""",
|
|
259
|
+
"description": "Sliding window rate limiter",
|
|
260
|
+
},
|
|
261
|
+
{ # 9: JWT auth
|
|
262
|
+
"project": "test-backend",
|
|
263
|
+
"name": "jwt_auth",
|
|
264
|
+
"filePath": "src/auth/jwt_handler.py",
|
|
265
|
+
"language": "python",
|
|
266
|
+
"source": """import jwt
|
|
267
|
+
import time
|
|
268
|
+
from dataclasses import dataclass
|
|
269
|
+
from typing import Optional
|
|
270
|
+
|
|
271
|
+
SECRET_KEY = "change-me-in-production"
|
|
272
|
+
ALGORITHM = "HS256"
|
|
273
|
+
EXPIRY_SECONDS = 3600
|
|
274
|
+
|
|
275
|
+
@dataclass
|
|
276
|
+
class TokenPayload:
|
|
277
|
+
sub: str
|
|
278
|
+
exp: float
|
|
279
|
+
iat: float
|
|
280
|
+
roles: list[str]
|
|
281
|
+
|
|
282
|
+
def create_token(user_id: str, roles: list[str] = None) -> str:
|
|
283
|
+
now = time.time()
|
|
284
|
+
payload = {"sub": user_id, "exp": now + EXPIRY_SECONDS, "iat": now, "roles": roles or []}
|
|
285
|
+
return jwt.encode(payload, SECRET_KEY, algorithm=ALGORITHM)
|
|
286
|
+
|
|
287
|
+
def verify_token(token: str) -> Optional[TokenPayload]:
|
|
288
|
+
try:
|
|
289
|
+
data = jwt.decode(token, SECRET_KEY, algorithms=[ALGORITHM])
|
|
290
|
+
return TokenPayload(**data)
|
|
291
|
+
except jwt.ExpiredSignatureError:
|
|
292
|
+
return None
|
|
293
|
+
except jwt.InvalidTokenError:
|
|
294
|
+
return None
|
|
295
|
+
""",
|
|
296
|
+
"description": "JWT token creation and verification",
|
|
297
|
+
},
|
|
298
|
+
{ # 10: database pool
|
|
299
|
+
"project": "test-backend",
|
|
300
|
+
"name": "db_pool",
|
|
301
|
+
"filePath": "src/db/pool.py",
|
|
302
|
+
"language": "python",
|
|
303
|
+
"source": """import sqlite3
|
|
304
|
+
from queue import Queue, Empty
|
|
305
|
+
from contextlib import contextmanager
|
|
306
|
+
from typing import Generator
|
|
307
|
+
|
|
308
|
+
class ConnectionPool:
|
|
309
|
+
def __init__(self, db_path: str, max_connections: int = 10):
|
|
310
|
+
self.db_path = db_path
|
|
311
|
+
self._pool: Queue[sqlite3.Connection] = Queue(maxsize=max_connections)
|
|
312
|
+
for _ in range(max_connections):
|
|
313
|
+
conn = sqlite3.connect(db_path, check_same_thread=False)
|
|
314
|
+
conn.row_factory = sqlite3.Row
|
|
315
|
+
self._pool.put(conn)
|
|
316
|
+
|
|
317
|
+
@contextmanager
|
|
318
|
+
def acquire(self) -> Generator[sqlite3.Connection, None, None]:
|
|
319
|
+
conn = self._pool.get(timeout=5)
|
|
320
|
+
try:
|
|
321
|
+
yield conn
|
|
322
|
+
finally:
|
|
323
|
+
self._pool.put(conn)
|
|
324
|
+
|
|
325
|
+
def close_all(self) -> None:
|
|
326
|
+
while not self._pool.empty():
|
|
327
|
+
try:
|
|
328
|
+
conn = self._pool.get_nowait()
|
|
329
|
+
conn.close()
|
|
330
|
+
except Empty:
|
|
331
|
+
break
|
|
332
|
+
""",
|
|
333
|
+
"description": "SQLite connection pool with context manager",
|
|
334
|
+
},
|
|
335
|
+
{ # 11: pagination
|
|
336
|
+
"project": "test-backend",
|
|
337
|
+
"name": "paginator",
|
|
338
|
+
"filePath": "src/utils/pagination.py",
|
|
339
|
+
"language": "python",
|
|
340
|
+
"source": """from dataclasses import dataclass
|
|
341
|
+
from typing import TypeVar, Generic, Sequence
|
|
342
|
+
import math
|
|
343
|
+
|
|
344
|
+
T = TypeVar("T")
|
|
345
|
+
|
|
346
|
+
@dataclass
|
|
347
|
+
class Page(Generic[T]):
|
|
348
|
+
items: Sequence[T]
|
|
349
|
+
total: int
|
|
350
|
+
page: int
|
|
351
|
+
per_page: int
|
|
352
|
+
total_pages: int
|
|
353
|
+
has_next: bool
|
|
354
|
+
has_prev: bool
|
|
355
|
+
|
|
356
|
+
def paginate(items: Sequence[T], page: int = 1, per_page: int = 20) -> Page[T]:
|
|
357
|
+
total = len(items)
|
|
358
|
+
total_pages = max(1, math.ceil(total / per_page))
|
|
359
|
+
page = max(1, min(page, total_pages))
|
|
360
|
+
start = (page - 1) * per_page
|
|
361
|
+
end = start + per_page
|
|
362
|
+
return Page(
|
|
363
|
+
items=items[start:end],
|
|
364
|
+
total=total, page=page, per_page=per_page,
|
|
365
|
+
total_pages=total_pages,
|
|
366
|
+
has_next=page < total_pages,
|
|
367
|
+
has_prev=page > 1,
|
|
368
|
+
)
|
|
369
|
+
""",
|
|
370
|
+
"description": "Generic pagination utility",
|
|
371
|
+
},
|
|
372
|
+
{ # 12: config loader
|
|
373
|
+
"project": "test-backend",
|
|
374
|
+
"name": "config_loader",
|
|
375
|
+
"filePath": "src/config/loader.py",
|
|
376
|
+
"language": "python",
|
|
377
|
+
"source": """import os
|
|
378
|
+
import json
|
|
379
|
+
from pathlib import Path
|
|
380
|
+
from typing import Any, Optional
|
|
381
|
+
|
|
382
|
+
class Config:
|
|
383
|
+
def __init__(self, data: dict[str, Any]):
|
|
384
|
+
self._data = data
|
|
385
|
+
|
|
386
|
+
def get(self, key: str, default: Any = None) -> Any:
|
|
387
|
+
keys = key.split(".")
|
|
388
|
+
val = self._data
|
|
389
|
+
for k in keys:
|
|
390
|
+
if isinstance(val, dict):
|
|
391
|
+
val = val.get(k)
|
|
392
|
+
else:
|
|
393
|
+
return default
|
|
394
|
+
if val is None:
|
|
395
|
+
return default
|
|
396
|
+
return val
|
|
397
|
+
|
|
398
|
+
@classmethod
|
|
399
|
+
def from_file(cls, path: str) -> "Config":
|
|
400
|
+
with open(path) as f:
|
|
401
|
+
return cls(json.load(f))
|
|
402
|
+
|
|
403
|
+
@classmethod
|
|
404
|
+
def from_env(cls, prefix: str = "APP_") -> "Config":
|
|
405
|
+
data = {}
|
|
406
|
+
for key, val in os.environ.items():
|
|
407
|
+
if key.startswith(prefix):
|
|
408
|
+
clean = key[len(prefix):].lower().replace("__", ".")
|
|
409
|
+
data[clean] = val
|
|
410
|
+
return cls(data)
|
|
411
|
+
""",
|
|
412
|
+
"description": "Configuration loader from file and environment",
|
|
413
|
+
},
|
|
414
|
+
|
|
415
|
+
# ── Rust Modules (project: test-infra) ──
|
|
416
|
+
{ # 13: hash map cache
|
|
417
|
+
"project": "test-infra",
|
|
418
|
+
"name": "HashCache",
|
|
419
|
+
"filePath": "src/cache/mod.rs",
|
|
420
|
+
"language": "rust",
|
|
421
|
+
"source": """use std::collections::HashMap;
|
|
422
|
+
use std::time::{Duration, Instant};
|
|
423
|
+
|
|
424
|
+
pub struct HashCache<V> {
|
|
425
|
+
store: HashMap<String, (V, Instant)>,
|
|
426
|
+
ttl: Duration,
|
|
427
|
+
}
|
|
428
|
+
|
|
429
|
+
impl<V: Clone> HashCache<V> {
|
|
430
|
+
pub fn new(ttl_secs: u64) -> Self {
|
|
431
|
+
Self {
|
|
432
|
+
store: HashMap::new(),
|
|
433
|
+
ttl: Duration::from_secs(ttl_secs),
|
|
434
|
+
}
|
|
435
|
+
}
|
|
436
|
+
|
|
437
|
+
pub fn get(&self, key: &str) -> Option<&V> {
|
|
438
|
+
self.store.get(key).and_then(|(val, ts)| {
|
|
439
|
+
if ts.elapsed() < self.ttl { Some(val) } else { None }
|
|
440
|
+
})
|
|
441
|
+
}
|
|
442
|
+
|
|
443
|
+
pub fn set(&mut self, key: String, value: V) {
|
|
444
|
+
self.store.insert(key, (value, Instant::now()));
|
|
445
|
+
}
|
|
446
|
+
|
|
447
|
+
pub fn evict_expired(&mut self) {
|
|
448
|
+
self.store.retain(|_, (_, ts)| ts.elapsed() < self.ttl);
|
|
449
|
+
}
|
|
450
|
+
|
|
451
|
+
pub fn len(&self) -> usize { self.store.len() }
|
|
452
|
+
}
|
|
453
|
+
""",
|
|
454
|
+
"description": "TTL-based hash map cache",
|
|
455
|
+
},
|
|
456
|
+
{ # 14: error types
|
|
457
|
+
"project": "test-infra",
|
|
458
|
+
"name": "AppError",
|
|
459
|
+
"filePath": "src/error.rs",
|
|
460
|
+
"language": "rust",
|
|
461
|
+
"source": """use std::fmt;
|
|
462
|
+
|
|
463
|
+
#[derive(Debug)]
|
|
464
|
+
pub enum AppError {
|
|
465
|
+
NotFound(String),
|
|
466
|
+
BadRequest(String),
|
|
467
|
+
Internal(String),
|
|
468
|
+
Unauthorized,
|
|
469
|
+
Forbidden,
|
|
470
|
+
}
|
|
471
|
+
|
|
472
|
+
impl fmt::Display for AppError {
|
|
473
|
+
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
|
474
|
+
match self {
|
|
475
|
+
Self::NotFound(msg) => write!(f, "Not Found: {}", msg),
|
|
476
|
+
Self::BadRequest(msg) => write!(f, "Bad Request: {}", msg),
|
|
477
|
+
Self::Internal(msg) => write!(f, "Internal Error: {}", msg),
|
|
478
|
+
Self::Unauthorized => write!(f, "Unauthorized"),
|
|
479
|
+
Self::Forbidden => write!(f, "Forbidden"),
|
|
480
|
+
}
|
|
481
|
+
}
|
|
482
|
+
}
|
|
483
|
+
|
|
484
|
+
impl std::error::Error for AppError {}
|
|
485
|
+
|
|
486
|
+
impl From<std::io::Error> for AppError {
|
|
487
|
+
fn from(err: std::io::Error) -> Self {
|
|
488
|
+
Self::Internal(err.to_string())
|
|
489
|
+
}
|
|
490
|
+
}
|
|
491
|
+
""",
|
|
492
|
+
"description": "Application error enum with Display and From impls",
|
|
493
|
+
},
|
|
494
|
+
{ # 15: middleware chain
|
|
495
|
+
"project": "test-infra",
|
|
496
|
+
"name": "MiddlewareChain",
|
|
497
|
+
"filePath": "src/middleware/chain.rs",
|
|
498
|
+
"language": "rust",
|
|
499
|
+
"source": """pub type Handler = Box<dyn Fn(&mut Request, &mut Response) -> Result<(), String>>;
|
|
500
|
+
|
|
501
|
+
pub struct Request {
|
|
502
|
+
pub path: String,
|
|
503
|
+
pub method: String,
|
|
504
|
+
pub headers: Vec<(String, String)>,
|
|
505
|
+
pub body: Option<String>,
|
|
506
|
+
}
|
|
507
|
+
|
|
508
|
+
pub struct Response {
|
|
509
|
+
pub status: u16,
|
|
510
|
+
pub headers: Vec<(String, String)>,
|
|
511
|
+
pub body: String,
|
|
512
|
+
}
|
|
513
|
+
|
|
514
|
+
pub struct MiddlewareChain {
|
|
515
|
+
handlers: Vec<Handler>,
|
|
516
|
+
}
|
|
517
|
+
|
|
518
|
+
impl MiddlewareChain {
|
|
519
|
+
pub fn new() -> Self { Self { handlers: Vec::new() } }
|
|
520
|
+
|
|
521
|
+
pub fn add(&mut self, handler: Handler) {
|
|
522
|
+
self.handlers.push(handler);
|
|
523
|
+
}
|
|
524
|
+
|
|
525
|
+
pub fn execute(&self, req: &mut Request, res: &mut Response) -> Result<(), String> {
|
|
526
|
+
for handler in &self.handlers {
|
|
527
|
+
handler(req, res)?;
|
|
528
|
+
}
|
|
529
|
+
Ok(())
|
|
530
|
+
}
|
|
531
|
+
}
|
|
532
|
+
""",
|
|
533
|
+
"description": "Composable middleware chain pattern",
|
|
534
|
+
},
|
|
535
|
+
{ # 16: task queue
|
|
536
|
+
"project": "test-infra",
|
|
537
|
+
"name": "TaskQueue",
|
|
538
|
+
"filePath": "src/queue/task_queue.rs",
|
|
539
|
+
"language": "rust",
|
|
540
|
+
"source": """use std::collections::VecDeque;
|
|
541
|
+
use std::sync::{Arc, Mutex, Condvar};
|
|
542
|
+
|
|
543
|
+
pub struct TaskQueue<T> {
|
|
544
|
+
inner: Arc<(Mutex<VecDeque<T>>, Condvar)>,
|
|
545
|
+
}
|
|
546
|
+
|
|
547
|
+
impl<T> Clone for TaskQueue<T> {
|
|
548
|
+
fn clone(&self) -> Self { Self { inner: self.inner.clone() } }
|
|
549
|
+
}
|
|
550
|
+
|
|
551
|
+
impl<T> TaskQueue<T> {
|
|
552
|
+
pub fn new() -> Self {
|
|
553
|
+
Self { inner: Arc::new((Mutex::new(VecDeque::new()), Condvar::new())) }
|
|
554
|
+
}
|
|
555
|
+
|
|
556
|
+
pub fn push(&self, item: T) {
|
|
557
|
+
let (lock, cvar) = &*self.inner;
|
|
558
|
+
let mut queue = lock.lock().unwrap();
|
|
559
|
+
queue.push_back(item);
|
|
560
|
+
cvar.notify_one();
|
|
561
|
+
}
|
|
562
|
+
|
|
563
|
+
pub fn pop(&self) -> T {
|
|
564
|
+
let (lock, cvar) = &*self.inner;
|
|
565
|
+
let mut queue = lock.lock().unwrap();
|
|
566
|
+
while queue.is_empty() {
|
|
567
|
+
queue = cvar.wait(queue).unwrap();
|
|
568
|
+
}
|
|
569
|
+
queue.pop_front().unwrap()
|
|
570
|
+
}
|
|
571
|
+
|
|
572
|
+
pub fn len(&self) -> usize {
|
|
573
|
+
self.inner.0.lock().unwrap().len()
|
|
574
|
+
}
|
|
575
|
+
}
|
|
576
|
+
""",
|
|
577
|
+
"description": "Thread-safe task queue with condition variable",
|
|
578
|
+
},
|
|
579
|
+
{ # 17: result extension
|
|
580
|
+
"project": "test-infra",
|
|
581
|
+
"name": "ResultExt",
|
|
582
|
+
"filePath": "src/utils/result_ext.rs",
|
|
583
|
+
"language": "rust",
|
|
584
|
+
"source": """pub trait ResultExt<T, E> {
|
|
585
|
+
fn log_err(self, context: &str) -> Result<T, E>;
|
|
586
|
+
fn unwrap_or_log(self, default: T, context: &str) -> T;
|
|
587
|
+
}
|
|
588
|
+
|
|
589
|
+
impl<T, E: std::fmt::Display> ResultExt<T, E> for Result<T, E> {
|
|
590
|
+
fn log_err(self, context: &str) -> Result<T, E> {
|
|
591
|
+
if let Err(ref e) = self {
|
|
592
|
+
eprintln!("[ERROR] {}: {}", context, e);
|
|
593
|
+
}
|
|
594
|
+
self
|
|
595
|
+
}
|
|
596
|
+
|
|
597
|
+
fn unwrap_or_log(self, default: T, context: &str) -> T {
|
|
598
|
+
match self {
|
|
599
|
+
Ok(v) => v,
|
|
600
|
+
Err(e) => {
|
|
601
|
+
eprintln!("[ERROR] {}: {}", context, e);
|
|
602
|
+
default
|
|
603
|
+
}
|
|
604
|
+
}
|
|
605
|
+
}
|
|
606
|
+
}
|
|
607
|
+
""",
|
|
608
|
+
"description": "Result extension trait for logging errors",
|
|
609
|
+
},
|
|
610
|
+
{ # 18: Python logger (similar to #2)
|
|
611
|
+
"project": "test-backend",
|
|
612
|
+
"name": "structured_logger",
|
|
613
|
+
"filePath": "src/utils/logger.py",
|
|
614
|
+
"language": "python",
|
|
615
|
+
"source": """import json
|
|
616
|
+
import sys
|
|
617
|
+
from datetime import datetime
|
|
618
|
+
from typing import Any
|
|
619
|
+
|
|
620
|
+
class StructuredLogger:
|
|
621
|
+
def __init__(self, name: str, level: str = "INFO"):
|
|
622
|
+
self.name = name
|
|
623
|
+
self.level = level
|
|
624
|
+
self._levels = {"DEBUG": 0, "INFO": 1, "WARN": 2, "ERROR": 3}
|
|
625
|
+
|
|
626
|
+
def _log(self, level: str, msg: str, **extra: Any) -> None:
|
|
627
|
+
if self._levels.get(level, 0) < self._levels.get(self.level, 0):
|
|
628
|
+
return
|
|
629
|
+
entry = {
|
|
630
|
+
"timestamp": datetime.utcnow().isoformat(),
|
|
631
|
+
"level": level,
|
|
632
|
+
"logger": self.name,
|
|
633
|
+
"message": msg,
|
|
634
|
+
**extra,
|
|
635
|
+
}
|
|
636
|
+
print(json.dumps(entry), file=sys.stderr)
|
|
637
|
+
|
|
638
|
+
def info(self, msg: str, **kw: Any) -> None: self._log("INFO", msg, **kw)
|
|
639
|
+
def warn(self, msg: str, **kw: Any) -> None: self._log("WARN", msg, **kw)
|
|
640
|
+
def error(self, msg: str, **kw: Any) -> None: self._log("ERROR", msg, **kw)
|
|
641
|
+
def debug(self, msg: str, **kw: Any) -> None: self._log("DEBUG", msg, **kw)
|
|
642
|
+
|
|
643
|
+
def get_logger(name: str = "app") -> StructuredLogger:
|
|
644
|
+
return StructuredLogger(name)
|
|
645
|
+
""",
|
|
646
|
+
"description": "Structured JSON logger with level filtering",
|
|
647
|
+
},
|
|
648
|
+
{ # 19: Python validator
|
|
649
|
+
"project": "test-backend",
|
|
650
|
+
"name": "input_validator",
|
|
651
|
+
"filePath": "src/utils/validator.py",
|
|
652
|
+
"language": "python",
|
|
653
|
+
"source": """import re
|
|
654
|
+
from typing import Any, Optional
|
|
655
|
+
|
|
656
|
+
class ValidationError(Exception):
|
|
657
|
+
def __init__(self, field: str, message: str):
|
|
658
|
+
self.field = field
|
|
659
|
+
self.message = message
|
|
660
|
+
super().__init__(f"{field}: {message}")
|
|
661
|
+
|
|
662
|
+
def validate_email(email: str) -> bool:
|
|
663
|
+
pattern = r'^[a-zA-Z0-9._%+-]+@[a-zA-Z0-9.-]+\\.[a-zA-Z]{2,}$'
|
|
664
|
+
return bool(re.match(pattern, email))
|
|
665
|
+
|
|
666
|
+
def validate_required(data: dict, fields: list[str]) -> None:
|
|
667
|
+
for field in fields:
|
|
668
|
+
if field not in data or data[field] is None:
|
|
669
|
+
raise ValidationError(field, "is required")
|
|
670
|
+
|
|
671
|
+
def validate_length(value: str, field: str, min_len: int = 0, max_len: int = 255) -> None:
|
|
672
|
+
if len(value) < min_len:
|
|
673
|
+
raise ValidationError(field, f"must be at least {min_len} characters")
|
|
674
|
+
if len(value) > max_len:
|
|
675
|
+
raise ValidationError(field, f"must be at most {max_len} characters")
|
|
676
|
+
""",
|
|
677
|
+
"description": "Input validation utilities",
|
|
678
|
+
},
|
|
679
|
+
{ # 20: Rust serializer
|
|
680
|
+
"project": "test-infra",
|
|
681
|
+
"name": "JsonSerializer",
|
|
682
|
+
"filePath": "src/utils/serializer.rs",
|
|
683
|
+
"language": "rust",
|
|
684
|
+
"source": """use std::collections::HashMap;
|
|
685
|
+
|
|
686
|
+
#[derive(Debug, Clone)]
|
|
687
|
+
pub enum JsonValue {
|
|
688
|
+
Null,
|
|
689
|
+
Bool(bool),
|
|
690
|
+
Number(f64),
|
|
691
|
+
Str(String),
|
|
692
|
+
Array(Vec<JsonValue>),
|
|
693
|
+
Object(HashMap<String, JsonValue>),
|
|
694
|
+
}
|
|
695
|
+
|
|
696
|
+
impl JsonValue {
|
|
697
|
+
pub fn as_str(&self) -> Option<&str> {
|
|
698
|
+
match self { Self::Str(s) => Some(s), _ => None }
|
|
699
|
+
}
|
|
700
|
+
pub fn as_f64(&self) -> Option<f64> {
|
|
701
|
+
match self { Self::Number(n) => Some(*n), _ => None }
|
|
702
|
+
}
|
|
703
|
+
pub fn as_bool(&self) -> Option<bool> {
|
|
704
|
+
match self { Self::Bool(b) => Some(*b), _ => None }
|
|
705
|
+
}
|
|
706
|
+
pub fn is_null(&self) -> bool {
|
|
707
|
+
matches!(self, Self::Null)
|
|
708
|
+
}
|
|
709
|
+
}
|
|
710
|
+
|
|
711
|
+
impl std::fmt::Display for JsonValue {
|
|
712
|
+
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
|
713
|
+
match self {
|
|
714
|
+
Self::Null => write!(f, "null"),
|
|
715
|
+
Self::Bool(b) => write!(f, "{}", b),
|
|
716
|
+
Self::Number(n) => write!(f, "{}", n),
|
|
717
|
+
Self::Str(s) => write!(f, "\\\"{}\\\"", s),
|
|
718
|
+
Self::Array(arr) => write!(f, "[{}]", arr.iter().map(|v| v.to_string()).collect::<Vec<_>>().join(",")),
|
|
719
|
+
Self::Object(map) => write!(f, "{{{}}}", map.iter().map(|(k, v)| format!("\\\"{}\\\":{}", k, v)).collect::<Vec<_>>().join(",")),
|
|
720
|
+
}
|
|
721
|
+
}
|
|
722
|
+
}
|
|
723
|
+
""",
|
|
724
|
+
"description": "Simple JSON value enum with accessors and Display",
|
|
725
|
+
},
|
|
726
|
+
{ # 21: Python task scheduler
|
|
727
|
+
"project": "test-backend",
|
|
728
|
+
"name": "task_scheduler",
|
|
729
|
+
"filePath": "src/tasks/scheduler.py",
|
|
730
|
+
"language": "python",
|
|
731
|
+
"source": """import time
|
|
732
|
+
import threading
|
|
733
|
+
from typing import Callable, Optional
|
|
734
|
+
from dataclasses import dataclass, field
|
|
735
|
+
|
|
736
|
+
@dataclass
|
|
737
|
+
class ScheduledTask:
|
|
738
|
+
name: str
|
|
739
|
+
fn: Callable[[], None]
|
|
740
|
+
interval_seconds: float
|
|
741
|
+
last_run: float = 0.0
|
|
742
|
+
running: bool = False
|
|
743
|
+
|
|
744
|
+
class Scheduler:
|
|
745
|
+
def __init__(self):
|
|
746
|
+
self.tasks: list[ScheduledTask] = []
|
|
747
|
+
self._stop_event = threading.Event()
|
|
748
|
+
self._thread: Optional[threading.Thread] = None
|
|
749
|
+
|
|
750
|
+
def add(self, name: str, fn: Callable, interval: float) -> None:
|
|
751
|
+
self.tasks.append(ScheduledTask(name=name, fn=fn, interval_seconds=interval))
|
|
752
|
+
|
|
753
|
+
def start(self) -> None:
|
|
754
|
+
self._stop_event.clear()
|
|
755
|
+
self._thread = threading.Thread(target=self._run, daemon=True)
|
|
756
|
+
self._thread.start()
|
|
757
|
+
|
|
758
|
+
def stop(self) -> None:
|
|
759
|
+
self._stop_event.set()
|
|
760
|
+
if self._thread:
|
|
761
|
+
self._thread.join(timeout=5)
|
|
762
|
+
|
|
763
|
+
def _run(self) -> None:
|
|
764
|
+
while not self._stop_event.is_set():
|
|
765
|
+
now = time.time()
|
|
766
|
+
for task in self.tasks:
|
|
767
|
+
if not task.running and (now - task.last_run) >= task.interval_seconds:
|
|
768
|
+
task.running = True
|
|
769
|
+
task.last_run = now
|
|
770
|
+
try:
|
|
771
|
+
task.fn()
|
|
772
|
+
except Exception as e:
|
|
773
|
+
print(f"[scheduler] {task.name} failed: {e}")
|
|
774
|
+
finally:
|
|
775
|
+
task.running = False
|
|
776
|
+
self._stop_event.wait(timeout=1.0)
|
|
777
|
+
""",
|
|
778
|
+
"description": "Simple interval-based task scheduler",
|
|
779
|
+
},
|
|
780
|
+
{ # 22: TypeScript cache (similar to #5)
|
|
781
|
+
"project": "test-frontend",
|
|
782
|
+
"name": "TTLCache",
|
|
783
|
+
"filePath": "src/utils/ttl-cache.ts",
|
|
784
|
+
"language": "typescript",
|
|
785
|
+
"source": """export class TTLCache<K, V> {
|
|
786
|
+
private map = new Map<K, { value: V; expiry: number }>();
|
|
787
|
+
constructor(private ttlMs: number) {}
|
|
788
|
+
|
|
789
|
+
get(key: K): V | undefined {
|
|
790
|
+
const entry = this.map.get(key);
|
|
791
|
+
if (!entry) return undefined;
|
|
792
|
+
if (Date.now() > entry.expiry) {
|
|
793
|
+
this.map.delete(key);
|
|
794
|
+
return undefined;
|
|
795
|
+
}
|
|
796
|
+
return entry.value;
|
|
797
|
+
}
|
|
798
|
+
|
|
799
|
+
set(key: K, value: V): void {
|
|
800
|
+
this.map.set(key, { value, expiry: Date.now() + this.ttlMs });
|
|
801
|
+
}
|
|
802
|
+
|
|
803
|
+
has(key: K): boolean {
|
|
804
|
+
return this.get(key) !== undefined;
|
|
805
|
+
}
|
|
806
|
+
|
|
807
|
+
get size(): number { return this.map.size; }
|
|
808
|
+
|
|
809
|
+
clear(): void { this.map.clear(); }
|
|
810
|
+
|
|
811
|
+
evictExpired(): number {
|
|
812
|
+
const now = Date.now();
|
|
813
|
+
let evicted = 0;
|
|
814
|
+
for (const [key, entry] of this.map) {
|
|
815
|
+
if (now > entry.expiry) { this.map.delete(key); evicted++; }
|
|
816
|
+
}
|
|
817
|
+
return evicted;
|
|
818
|
+
}
|
|
819
|
+
}""",
|
|
820
|
+
"description": "TTL-based cache with automatic expiry",
|
|
821
|
+
},
|
|
822
|
+
]
|
|
823
|
+
|
|
824
|
+
|
|
825
|
+
def main() -> int:
|
|
826
|
+
print("\n" + "=" * 60)
|
|
827
|
+
print(" BRAIN E2E TEST: Code Intelligence Complete Flow")
|
|
828
|
+
print("=" * 60)
|
|
829
|
+
|
|
830
|
+
module_ids: list[int] = []
|
|
831
|
+
|
|
832
|
+
# ── 1. Register 22 code modules ───────────────────────────
|
|
833
|
+
print("\n[1] Registering 22 code modules across 3 projects...")
|
|
834
|
+
for i, mod in enumerate(MODULES):
|
|
835
|
+
r = post("/code/analyze", mod)
|
|
836
|
+
ok = r.status_code == 201
|
|
837
|
+
data = r.json().get("result", {})
|
|
838
|
+
mid = data.get("moduleId")
|
|
839
|
+
is_new = data.get("isNew")
|
|
840
|
+
score = data.get("reusabilityScore", 0)
|
|
841
|
+
check(ok and mid is not None, f"Module #{i+1} '{mod['name']}' registered (id={mid}, score={score:.2f})")
|
|
842
|
+
if mid is not None:
|
|
843
|
+
module_ids.append(mid)
|
|
844
|
+
|
|
845
|
+
check(len(module_ids) == 22, f"All 22 modules created ({len(module_ids)} IDs)")
|
|
846
|
+
|
|
847
|
+
# ── 2. Duplicate detection ─────────────────────────────────
|
|
848
|
+
print("\n[2] Testing duplicate detection...")
|
|
849
|
+
r = post("/code/analyze", MODULES[0])
|
|
850
|
+
data = r.json().get("result", {})
|
|
851
|
+
dup_new = data.get("isNew")
|
|
852
|
+
check(dup_new is False, f"Duplicate detected (isNew={dup_new})")
|
|
853
|
+
check(data.get("moduleId") == module_ids[0], "Duplicate returns same moduleId")
|
|
854
|
+
|
|
855
|
+
# ── 3. Update detection ────────────────────────────────────
|
|
856
|
+
print("\n[3] Testing update detection...")
|
|
857
|
+
modified = {**MODULES[0], "source": MODULES[0]["source"] + "\n// v2: added jitter to backoff\n"}
|
|
858
|
+
r = post("/code/analyze", modified)
|
|
859
|
+
data = r.json().get("result", {})
|
|
860
|
+
# The fingerprint might change, creating a new module, or it stays the same
|
|
861
|
+
check(r.status_code == 201, "Modified source accepted")
|
|
862
|
+
check(data.get("moduleId") is not None, f"Update returned module id={data.get('moduleId')}")
|
|
863
|
+
|
|
864
|
+
# ── 4. Code similarity detection ──────────────────────────
|
|
865
|
+
print("\n[4] Testing code similarity detection...")
|
|
866
|
+
# Check similarity of module #1 (retry) source
|
|
867
|
+
r = post("/code/similarity", {
|
|
868
|
+
"source": MODULES[0]["source"],
|
|
869
|
+
"language": "typescript",
|
|
870
|
+
})
|
|
871
|
+
check(r.status_code == 201, "Similarity endpoint returns 201")
|
|
872
|
+
sim_data = r.json().get("result")
|
|
873
|
+
check(sim_data is not None, f"Similarity result: {type(sim_data)}")
|
|
874
|
+
|
|
875
|
+
# Check similarity of module #5 (LRU cache) source
|
|
876
|
+
r = post("/code/similarity", {
|
|
877
|
+
"source": MODULES[4]["source"],
|
|
878
|
+
"language": "typescript",
|
|
879
|
+
})
|
|
880
|
+
check(r.status_code == 201, "LRU cache similarity check returns 201")
|
|
881
|
+
|
|
882
|
+
# Check similarity of a Python module
|
|
883
|
+
r = post("/code/similarity", {
|
|
884
|
+
"source": MODULES[17]["source"], # structured_logger
|
|
885
|
+
"language": "python",
|
|
886
|
+
})
|
|
887
|
+
check(r.status_code == 201, "Python similarity check returns 201")
|
|
888
|
+
|
|
889
|
+
# ── 5. Find reusable code ─────────────────────────────────
|
|
890
|
+
print("\n[5] Finding reusable code by purpose...")
|
|
891
|
+
queries = [
|
|
892
|
+
("retry", "Find retry/backoff utilities"),
|
|
893
|
+
("cache", "Find caching implementations"),
|
|
894
|
+
("logger", "Find logging utilities"),
|
|
895
|
+
("auth", "Find authentication modules"),
|
|
896
|
+
("queue", "Find queue implementations"),
|
|
897
|
+
]
|
|
898
|
+
for query, label in queries:
|
|
899
|
+
r = post("/code/find", {"query": query})
|
|
900
|
+
check(r.status_code == 201, f"Find '{query}' returns 201")
|
|
901
|
+
found = r.json().get("result", [])
|
|
902
|
+
check(isinstance(found, list), f"{label}: {len(found)} result(s)")
|
|
903
|
+
|
|
904
|
+
# ── 6. List modules with filters ──────────────────────────
|
|
905
|
+
print("\n[6] Listing modules with filters...")
|
|
906
|
+
r = get("/code/modules")
|
|
907
|
+
check(r.status_code == 200, "List all modules returns 200")
|
|
908
|
+
all_mods = r.json().get("result", [])
|
|
909
|
+
check(isinstance(all_mods, list) and len(all_mods) >= 20, f"Total modules: {len(all_mods)}")
|
|
910
|
+
|
|
911
|
+
r = get("/code/modules", params={"language": "typescript"})
|
|
912
|
+
check(r.status_code == 200, "List TypeScript modules returns 200")
|
|
913
|
+
ts_mods = r.json().get("result", [])
|
|
914
|
+
check(isinstance(ts_mods, list) and len(ts_mods) >= 7, f"TypeScript modules: {len(ts_mods)}")
|
|
915
|
+
|
|
916
|
+
r = get("/code/modules", params={"language": "python"})
|
|
917
|
+
py_mods = r.json().get("result", [])
|
|
918
|
+
check(isinstance(py_mods, list) and len(py_mods) >= 5, f"Python modules: {len(py_mods)}")
|
|
919
|
+
|
|
920
|
+
r = get("/code/modules", params={"language": "rust"})
|
|
921
|
+
rust_mods = r.json().get("result", [])
|
|
922
|
+
check(isinstance(rust_mods, list) and len(rust_mods) >= 5, f"Rust modules: {len(rust_mods)}")
|
|
923
|
+
|
|
924
|
+
r = get("/code/modules", params={"limit": "5"})
|
|
925
|
+
check(r.status_code == 200, "Limit=5 returns 200")
|
|
926
|
+
limited = r.json().get("result", [])
|
|
927
|
+
check(len(limited) <= 5, f"Limit respected: {len(limited)} modules")
|
|
928
|
+
|
|
929
|
+
# ── 7. Get single module details ──────────────────────────
|
|
930
|
+
print("\n[7] Getting single module details...")
|
|
931
|
+
if module_ids:
|
|
932
|
+
r = get(f"/code/{module_ids[0]}")
|
|
933
|
+
check(r.status_code == 200, "Get module by ID returns 200")
|
|
934
|
+
mod_detail = r.json().get("result", {})
|
|
935
|
+
check(mod_detail.get("id") == module_ids[0], "Module detail has correct ID")
|
|
936
|
+
check(mod_detail.get("name") is not None, f"Module name: {mod_detail.get('name')}")
|
|
937
|
+
check(mod_detail.get("language") is not None, f"Module language: {mod_detail.get('language')}")
|
|
938
|
+
|
|
939
|
+
# ── 8. Synapse verification ───────────────────────────────
|
|
940
|
+
print("\n[8] Verifying synapses between similar modules...")
|
|
941
|
+
r = get("/synapses/stats")
|
|
942
|
+
check(r.status_code == 200, "Synapse stats returns 200")
|
|
943
|
+
stats = r.json().get("result", {})
|
|
944
|
+
check(stats.get("totalSynapses", 0) > 0, f"Total synapses: {stats.get('totalSynapses', 0)}")
|
|
945
|
+
|
|
946
|
+
# ── 9. Spreading activation from module node ──────────────
|
|
947
|
+
print("\n[9] Testing spreading activation...")
|
|
948
|
+
if module_ids:
|
|
949
|
+
r = post("/synapses/related", {
|
|
950
|
+
"nodeType": "code_module",
|
|
951
|
+
"nodeId": module_ids[0],
|
|
952
|
+
"maxDepth": 3,
|
|
953
|
+
"minWeight": 0.01,
|
|
954
|
+
})
|
|
955
|
+
check(r.status_code == 201, "Spreading activation returns 201")
|
|
956
|
+
related = r.json().get("result", [])
|
|
957
|
+
check(isinstance(related, list), f"Related nodes: {len(related)}")
|
|
958
|
+
|
|
959
|
+
# ── 10. Path finding between modules ──────────────────────
|
|
960
|
+
print("\n[10] Testing path finding between modules...")
|
|
961
|
+
if len(module_ids) >= 2:
|
|
962
|
+
r = post("/synapses/path", {
|
|
963
|
+
"fromType": "code_module",
|
|
964
|
+
"fromId": module_ids[0],
|
|
965
|
+
"toType": "code_module",
|
|
966
|
+
"toId": module_ids[1],
|
|
967
|
+
})
|
|
968
|
+
check(r.status_code == 201, "Path finding returns 201")
|
|
969
|
+
path = r.json().get("result")
|
|
970
|
+
check(path is not None or path is None, f"Path result: {type(path)}")
|
|
971
|
+
|
|
972
|
+
# ── 11. Verify auto-created projects ──────────────────────
|
|
973
|
+
print("\n[11] Verifying auto-created projects...")
|
|
974
|
+
r = get("/projects")
|
|
975
|
+
check(r.status_code == 200, "Projects endpoint returns 200")
|
|
976
|
+
projects = r.json().get("result", [])
|
|
977
|
+
check(isinstance(projects, list) and len(projects) >= 3, f"Projects: {len(projects)}")
|
|
978
|
+
project_names = [p.get("name") for p in projects if isinstance(p, dict)]
|
|
979
|
+
for name in ["test-frontend", "test-backend", "test-infra"]:
|
|
980
|
+
check(name in project_names, f"Project '{name}' auto-created")
|
|
981
|
+
|
|
982
|
+
# ── 12. Check module stats via analytics ──────────────────
|
|
983
|
+
print("\n[12] Checking analytics summary for module stats...")
|
|
984
|
+
r = get("/analytics/summary")
|
|
985
|
+
check(r.status_code == 200, "Analytics summary returns 200")
|
|
986
|
+
summary = r.json().get("result", {})
|
|
987
|
+
modules_data = summary.get("modules", {})
|
|
988
|
+
total_modules = modules_data.get("total", 0) if isinstance(modules_data, dict) else 0
|
|
989
|
+
check(total_modules >= 20, f"Analytics shows {total_modules} modules")
|
|
990
|
+
|
|
991
|
+
# ── Summary ────────────────────────────────────────────────
|
|
992
|
+
print("\n" + "=" * 60)
|
|
993
|
+
total = PASS + FAIL
|
|
994
|
+
print(f" Results: {PASS}/{total} passed, {FAIL} failed")
|
|
995
|
+
if ERRORS:
|
|
996
|
+
print(f"\n Failed tests:")
|
|
997
|
+
for e in ERRORS:
|
|
998
|
+
print(f" - {e}")
|
|
999
|
+
print("=" * 60 + "\n")
|
|
1000
|
+
|
|
1001
|
+
return 0 if FAIL == 0 else 1
|
|
1002
|
+
|
|
1003
|
+
|
|
1004
|
+
if __name__ == "__main__":
|
|
1005
|
+
try:
|
|
1006
|
+
sys.exit(main())
|
|
1007
|
+
except httpx.ConnectError:
|
|
1008
|
+
print("\n\033[31mERROR: Cannot connect to Brain daemon on port 7777.\033[0m")
|
|
1009
|
+
print("Run 'brain start' or 'brain doctor' first.\n")
|
|
1010
|
+
sys.exit(2)
|
|
1011
|
+
except Exception as e:
|
|
1012
|
+
print(f"\n\033[31mFATAL: {e}\033[0m\n")
|
|
1013
|
+
import traceback
|
|
1014
|
+
traceback.print_exc()
|
|
1015
|
+
sys.exit(2)
|