te.js 2.1.5 → 2.2.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/auto-docs/analysis/handler-analyzer.test.js +106 -0
- package/auto-docs/analysis/source-resolver.test.js +58 -0
- package/auto-docs/constants.js +13 -2
- package/auto-docs/openapi/generator.js +7 -5
- package/auto-docs/openapi/generator.test.js +132 -0
- package/auto-docs/openapi/spec-builders.js +39 -19
- package/cli/docs-command.js +44 -36
- package/cors/index.test.js +82 -0
- package/database/index.js +3 -1
- package/database/mongodb.js +17 -11
- package/database/redis.js +53 -44
- package/docs/configuration.md +24 -10
- package/docs/error-handling.md +134 -50
- package/lib/llm/client.js +40 -10
- package/lib/llm/index.js +14 -1
- package/lib/llm/parse.test.js +60 -0
- package/package.json +3 -1
- package/radar/index.js +281 -0
- package/rate-limit/index.js +8 -11
- package/rate-limit/index.test.js +64 -0
- package/server/ammo/body-parser.js +156 -152
- package/server/ammo/body-parser.test.js +79 -0
- package/server/ammo/enhancer.js +8 -4
- package/server/ammo.js +216 -17
- package/server/context/request-context.js +51 -0
- package/server/context/request-context.test.js +53 -0
- package/server/endpoint.js +15 -0
- package/server/error.js +56 -3
- package/server/error.test.js +45 -0
- package/server/errors/channels/base.js +31 -0
- package/server/errors/channels/channels.test.js +148 -0
- package/server/errors/channels/console.js +64 -0
- package/server/errors/channels/index.js +111 -0
- package/server/errors/channels/log.js +27 -0
- package/server/errors/llm-cache.js +102 -0
- package/server/errors/llm-cache.test.js +160 -0
- package/server/errors/llm-error-service.js +77 -16
- package/server/errors/llm-rate-limiter.js +72 -0
- package/server/errors/llm-rate-limiter.test.js +105 -0
- package/server/files/uploader.js +38 -26
- package/server/handler.js +5 -3
- package/server/targets/registry.js +9 -9
- package/server/targets/registry.test.js +108 -0
- package/te.js +214 -57
- package/utils/auto-register.js +1 -1
- package/utils/configuration.js +23 -9
- package/utils/configuration.test.js +58 -0
- package/utils/errors-llm-config.js +142 -9
- package/utils/request-logger.js +49 -3
|
@@ -3,11 +3,15 @@
|
|
|
3
3
|
* returns statusCode and message (and optionally devInsight in non-production).
|
|
4
4
|
* Uses shared lib/llm with errors.llm config. Developers do not pass an error object;
|
|
5
5
|
* the LLM infers from the code where ammo.throw() was called.
|
|
6
|
+
*
|
|
7
|
+
* Flow: cache check -> rate limit check -> LLM call -> record rate -> store cache -> return.
|
|
6
8
|
*/
|
|
7
9
|
|
|
8
10
|
import { createProvider } from '../../lib/llm/index.js';
|
|
9
11
|
import { extractJSON } from '../../lib/llm/parse.js';
|
|
10
12
|
import { getErrorsLlmConfig } from '../../utils/errors-llm-config.js';
|
|
13
|
+
import { getRateLimiter } from './llm-rate-limiter.js';
|
|
14
|
+
import { getCache } from './llm-cache.js';
|
|
11
15
|
|
|
12
16
|
const DEFAULT_STATUS = 500;
|
|
13
17
|
const DEFAULT_MESSAGE = 'Internal Server Error';
|
|
@@ -24,7 +28,8 @@ const DEFAULT_MESSAGE = 'Internal Server Error';
|
|
|
24
28
|
* @returns {string}
|
|
25
29
|
*/
|
|
26
30
|
function buildPrompt(context) {
|
|
27
|
-
const { codeContext, method, path, includeDevInsight, messageType, error } =
|
|
31
|
+
const { codeContext, method, path, includeDevInsight, messageType, error } =
|
|
32
|
+
context;
|
|
28
33
|
const forDeveloper = messageType === 'developer';
|
|
29
34
|
|
|
30
35
|
const requestPart = [method, path].filter(Boolean).length
|
|
@@ -35,7 +40,10 @@ function buildPrompt(context) {
|
|
|
35
40
|
if (codeContext?.snippets?.length) {
|
|
36
41
|
codePart = codeContext.snippets
|
|
37
42
|
.map((s, i) => {
|
|
38
|
-
const label =
|
|
43
|
+
const label =
|
|
44
|
+
i === 0
|
|
45
|
+
? 'Call site (where ammo.throw() was invoked)'
|
|
46
|
+
: `Upstream caller ${i}`;
|
|
39
47
|
return `--- ${label}: ${s.file} (line ${s.line}) ---\n${s.snippet}`;
|
|
40
48
|
})
|
|
41
49
|
.join('\n\n');
|
|
@@ -43,7 +51,7 @@ function buildPrompt(context) {
|
|
|
43
51
|
|
|
44
52
|
let errorPart = '';
|
|
45
53
|
if (error !== undefined && error !== null) {
|
|
46
|
-
if (error
|
|
54
|
+
if (error != null && typeof error.message === 'string') {
|
|
47
55
|
errorPart = `\nOptional error message (may be empty): ${error.message}`;
|
|
48
56
|
} else {
|
|
49
57
|
errorPart = `\nOptional error/message: ${String(error)}`;
|
|
@@ -80,27 +88,65 @@ JSON:`;
|
|
|
80
88
|
|
|
81
89
|
/**
|
|
82
90
|
* Infer HTTP statusCode and message (and optionally devInsight) from code context using the LLM.
|
|
83
|
-
*
|
|
84
|
-
* The primary input is codeContext (surrounding + upstream/downstream snippets); error is optional.
|
|
91
|
+
* Checks cache first, then rate limit. On success stores result in cache.
|
|
85
92
|
*
|
|
86
93
|
* @param {object} context - Context for the prompt.
|
|
87
|
-
* @param {{ snippets: Array<{ file: string, line: number, snippet: string }> }} context.codeContext
|
|
88
|
-
* @param {string} [context.method]
|
|
89
|
-
* @param {string} [context.path]
|
|
90
|
-
* @param {boolean} [context.includeDevInsight]
|
|
91
|
-
* @param {'endUser'|'developer'} [context.messageType]
|
|
92
|
-
* @param {string|Error|undefined} [context.error]
|
|
93
|
-
* @returns {Promise<{ statusCode: number, message: string, devInsight?: string }>}
|
|
94
|
+
* @param {{ snippets: Array<{ file: string, line: number, snippet: string }> }} context.codeContext
|
|
95
|
+
* @param {string} [context.method]
|
|
96
|
+
* @param {string} [context.path]
|
|
97
|
+
* @param {boolean} [context.includeDevInsight]
|
|
98
|
+
* @param {'endUser'|'developer'} [context.messageType]
|
|
99
|
+
* @param {string|Error|undefined} [context.error]
|
|
100
|
+
* @returns {Promise<{ statusCode: number, message: string, devInsight?: string, cached?: boolean, rateLimited?: boolean }>}
|
|
94
101
|
*/
|
|
95
102
|
export async function inferErrorFromContext(context) {
|
|
96
103
|
const config = getErrorsLlmConfig();
|
|
97
|
-
const {
|
|
98
|
-
|
|
104
|
+
const {
|
|
105
|
+
baseURL,
|
|
106
|
+
apiKey,
|
|
107
|
+
model,
|
|
108
|
+
messageType: configMessageType,
|
|
109
|
+
timeout,
|
|
110
|
+
rateLimit,
|
|
111
|
+
cache: cacheEnabled,
|
|
112
|
+
cacheTTL,
|
|
113
|
+
} = config;
|
|
99
114
|
|
|
100
115
|
const isProduction = process.env.NODE_ENV === 'production';
|
|
101
|
-
const includeDevInsight =
|
|
116
|
+
const includeDevInsight =
|
|
117
|
+
context.includeDevInsight !== false
|
|
118
|
+
? context.forceDevInsight
|
|
119
|
+
? true
|
|
120
|
+
: !isProduction
|
|
121
|
+
: false;
|
|
102
122
|
const messageType = context.messageType ?? configMessageType;
|
|
103
123
|
|
|
124
|
+
// 1. Cache check
|
|
125
|
+
if (cacheEnabled) {
|
|
126
|
+
const cache = getCache(cacheTTL);
|
|
127
|
+
const key = cache.buildKey(context.codeContext, context.error);
|
|
128
|
+
const cached = cache.get(key);
|
|
129
|
+
if (cached) {
|
|
130
|
+
return { ...cached, cached: true };
|
|
131
|
+
}
|
|
132
|
+
}
|
|
133
|
+
|
|
134
|
+
// 2. Rate limit check
|
|
135
|
+
const limiter = getRateLimiter(rateLimit);
|
|
136
|
+
if (!limiter.canCall()) {
|
|
137
|
+
return {
|
|
138
|
+
statusCode: DEFAULT_STATUS,
|
|
139
|
+
message: DEFAULT_MESSAGE,
|
|
140
|
+
...(includeDevInsight && {
|
|
141
|
+
devInsight: 'LLM rate limit exceeded — error was not enhanced.',
|
|
142
|
+
}),
|
|
143
|
+
rateLimited: true,
|
|
144
|
+
};
|
|
145
|
+
}
|
|
146
|
+
|
|
147
|
+
// 3. LLM call
|
|
148
|
+
const provider = createProvider({ baseURL, apiKey, model, timeout });
|
|
149
|
+
|
|
104
150
|
const prompt = buildPrompt({
|
|
105
151
|
codeContext: context.codeContext,
|
|
106
152
|
method: context.method,
|
|
@@ -111,6 +157,10 @@ export async function inferErrorFromContext(context) {
|
|
|
111
157
|
});
|
|
112
158
|
|
|
113
159
|
const { content } = await provider.analyze(prompt);
|
|
160
|
+
|
|
161
|
+
// 4. Record the call against the rate limit
|
|
162
|
+
limiter.record();
|
|
163
|
+
|
|
114
164
|
const parsed = extractJSON(content);
|
|
115
165
|
|
|
116
166
|
if (!parsed || typeof parsed !== 'object') {
|
|
@@ -132,9 +182,20 @@ export async function inferErrorFromContext(context) {
|
|
|
132
182
|
: DEFAULT_MESSAGE;
|
|
133
183
|
|
|
134
184
|
const result = { statusCode, message };
|
|
135
|
-
if (
|
|
185
|
+
if (
|
|
186
|
+
includeDevInsight &&
|
|
187
|
+
typeof parsed.devInsight === 'string' &&
|
|
188
|
+
parsed.devInsight.trim()
|
|
189
|
+
) {
|
|
136
190
|
result.devInsight = parsed.devInsight.trim();
|
|
137
191
|
}
|
|
138
192
|
|
|
193
|
+
// 5. Store in cache
|
|
194
|
+
if (cacheEnabled) {
|
|
195
|
+
const cache = getCache(cacheTTL);
|
|
196
|
+
const key = cache.buildKey(context.codeContext, context.error);
|
|
197
|
+
cache.set(key, result);
|
|
198
|
+
}
|
|
199
|
+
|
|
139
200
|
return result;
|
|
140
201
|
}
|
|
@@ -0,0 +1,72 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* In-memory sliding window rate limiter for LLM error inference calls.
|
|
3
|
+
* Tracks LLM call timestamps in the last 60 seconds.
|
|
4
|
+
* Shared singleton across the process; configured from errors.llm.rateLimit.
|
|
5
|
+
*/
|
|
6
|
+
|
|
7
|
+
class LLMRateLimiter {
|
|
8
|
+
/**
|
|
9
|
+
* @param {number} maxPerMinute - Maximum LLM calls allowed per 60-second window.
|
|
10
|
+
*/
|
|
11
|
+
constructor(maxPerMinute) {
|
|
12
|
+
this.maxPerMinute = maxPerMinute > 0 ? Math.floor(maxPerMinute) : 10;
|
|
13
|
+
/** @type {number[]} timestamps of recent LLM calls (ms since epoch) */
|
|
14
|
+
this._timestamps = [];
|
|
15
|
+
}
|
|
16
|
+
|
|
17
|
+
/**
|
|
18
|
+
* Prune timestamps older than 60 seconds from now.
|
|
19
|
+
*/
|
|
20
|
+
_prune() {
|
|
21
|
+
const cutoff = Date.now() - 60_000;
|
|
22
|
+
let i = 0;
|
|
23
|
+
while (i < this._timestamps.length && this._timestamps[i] <= cutoff) {
|
|
24
|
+
i++;
|
|
25
|
+
}
|
|
26
|
+
if (i > 0) this._timestamps.splice(0, i);
|
|
27
|
+
}
|
|
28
|
+
|
|
29
|
+
/**
|
|
30
|
+
* Returns true if an LLM call is allowed under the current rate.
|
|
31
|
+
* @returns {boolean}
|
|
32
|
+
*/
|
|
33
|
+
canCall() {
|
|
34
|
+
this._prune();
|
|
35
|
+
return this._timestamps.length < this.maxPerMinute;
|
|
36
|
+
}
|
|
37
|
+
|
|
38
|
+
/**
|
|
39
|
+
* Record that an LLM call was made right now.
|
|
40
|
+
*/
|
|
41
|
+
record() {
|
|
42
|
+
this._prune();
|
|
43
|
+
this._timestamps.push(Date.now());
|
|
44
|
+
}
|
|
45
|
+
|
|
46
|
+
/**
|
|
47
|
+
* Returns how many calls remain in the current window.
|
|
48
|
+
* @returns {number}
|
|
49
|
+
*/
|
|
50
|
+
remaining() {
|
|
51
|
+
this._prune();
|
|
52
|
+
return Math.max(0, this.maxPerMinute - this._timestamps.length);
|
|
53
|
+
}
|
|
54
|
+
}
|
|
55
|
+
|
|
56
|
+
/** @type {LLMRateLimiter|null} */
|
|
57
|
+
let _instance = null;
|
|
58
|
+
|
|
59
|
+
/**
|
|
60
|
+
* Get (or create) the singleton rate limiter.
|
|
61
|
+
* Re-initializes if maxPerMinute changes.
|
|
62
|
+
* @param {number} maxPerMinute
|
|
63
|
+
* @returns {LLMRateLimiter}
|
|
64
|
+
*/
|
|
65
|
+
export function getRateLimiter(maxPerMinute) {
|
|
66
|
+
if (!_instance || _instance.maxPerMinute !== maxPerMinute) {
|
|
67
|
+
_instance = new LLMRateLimiter(maxPerMinute);
|
|
68
|
+
}
|
|
69
|
+
return _instance;
|
|
70
|
+
}
|
|
71
|
+
|
|
72
|
+
export { LLMRateLimiter };
|
|
@@ -0,0 +1,105 @@
|
|
|
1
|
+
import { describe, it, expect, beforeEach, vi } from 'vitest';
|
|
2
|
+
import { LLMRateLimiter, getRateLimiter } from './llm-rate-limiter.js';
|
|
3
|
+
|
|
4
|
+
describe('LLMRateLimiter', () => {
|
|
5
|
+
describe('constructor', () => {
|
|
6
|
+
it('uses provided maxPerMinute', () => {
|
|
7
|
+
const limiter = new LLMRateLimiter(5);
|
|
8
|
+
expect(limiter.maxPerMinute).toBe(5);
|
|
9
|
+
});
|
|
10
|
+
|
|
11
|
+
it('defaults to 10 when maxPerMinute is invalid', () => {
|
|
12
|
+
expect(new LLMRateLimiter(0).maxPerMinute).toBe(10);
|
|
13
|
+
expect(new LLMRateLimiter(-1).maxPerMinute).toBe(10);
|
|
14
|
+
});
|
|
15
|
+
|
|
16
|
+
it('floors non-integer values', () => {
|
|
17
|
+
expect(new LLMRateLimiter(4.9).maxPerMinute).toBe(4);
|
|
18
|
+
});
|
|
19
|
+
});
|
|
20
|
+
|
|
21
|
+
describe('canCall() and record()', () => {
|
|
22
|
+
it('allows calls when under the limit', () => {
|
|
23
|
+
const limiter = new LLMRateLimiter(3);
|
|
24
|
+
expect(limiter.canCall()).toBe(true);
|
|
25
|
+
});
|
|
26
|
+
|
|
27
|
+
it('blocks calls when at the limit', () => {
|
|
28
|
+
const limiter = new LLMRateLimiter(2);
|
|
29
|
+
limiter.record();
|
|
30
|
+
limiter.record();
|
|
31
|
+
expect(limiter.canCall()).toBe(false);
|
|
32
|
+
});
|
|
33
|
+
|
|
34
|
+
it('allows calls again after recording up to max', () => {
|
|
35
|
+
const limiter = new LLMRateLimiter(1);
|
|
36
|
+
expect(limiter.canCall()).toBe(true);
|
|
37
|
+
limiter.record();
|
|
38
|
+
expect(limiter.canCall()).toBe(false);
|
|
39
|
+
});
|
|
40
|
+
|
|
41
|
+
it('remaining() returns correct count', () => {
|
|
42
|
+
const limiter = new LLMRateLimiter(3);
|
|
43
|
+
expect(limiter.remaining()).toBe(3);
|
|
44
|
+
limiter.record();
|
|
45
|
+
expect(limiter.remaining()).toBe(2);
|
|
46
|
+
limiter.record();
|
|
47
|
+
expect(limiter.remaining()).toBe(1);
|
|
48
|
+
limiter.record();
|
|
49
|
+
expect(limiter.remaining()).toBe(0);
|
|
50
|
+
});
|
|
51
|
+
});
|
|
52
|
+
|
|
53
|
+
describe('sliding window pruning', () => {
|
|
54
|
+
it('expires old timestamps after 60 seconds', () => {
|
|
55
|
+
vi.useFakeTimers();
|
|
56
|
+
|
|
57
|
+
const limiter = new LLMRateLimiter(2);
|
|
58
|
+
limiter.record();
|
|
59
|
+
limiter.record();
|
|
60
|
+
expect(limiter.canCall()).toBe(false);
|
|
61
|
+
|
|
62
|
+
vi.advanceTimersByTime(61_000);
|
|
63
|
+
|
|
64
|
+
expect(limiter.canCall()).toBe(true);
|
|
65
|
+
|
|
66
|
+
vi.useRealTimers();
|
|
67
|
+
});
|
|
68
|
+
|
|
69
|
+
it('only expires timestamps older than 60 seconds', () => {
|
|
70
|
+
vi.useFakeTimers();
|
|
71
|
+
|
|
72
|
+
const limiter = new LLMRateLimiter(2);
|
|
73
|
+
limiter.record();
|
|
74
|
+
|
|
75
|
+
vi.advanceTimersByTime(50_000);
|
|
76
|
+
limiter.record();
|
|
77
|
+
|
|
78
|
+
vi.advanceTimersByTime(15_000);
|
|
79
|
+
expect(limiter.canCall()).toBe(true);
|
|
80
|
+
expect(limiter.remaining()).toBe(1);
|
|
81
|
+
|
|
82
|
+
vi.useRealTimers();
|
|
83
|
+
});
|
|
84
|
+
});
|
|
85
|
+
});
|
|
86
|
+
|
|
87
|
+
describe('getRateLimiter (singleton)', () => {
|
|
88
|
+
it('returns a LLMRateLimiter instance', () => {
|
|
89
|
+
const limiter = getRateLimiter(10);
|
|
90
|
+
expect(limiter).toBeInstanceOf(LLMRateLimiter);
|
|
91
|
+
});
|
|
92
|
+
|
|
93
|
+
it('returns same instance for same maxPerMinute', () => {
|
|
94
|
+
const a = getRateLimiter(10);
|
|
95
|
+
const b = getRateLimiter(10);
|
|
96
|
+
expect(a).toBe(b);
|
|
97
|
+
});
|
|
98
|
+
|
|
99
|
+
it('creates a new instance when maxPerMinute changes', () => {
|
|
100
|
+
const a = getRateLimiter(5);
|
|
101
|
+
const b = getRateLimiter(15);
|
|
102
|
+
expect(a).not.toBe(b);
|
|
103
|
+
expect(b.maxPerMinute).toBe(15);
|
|
104
|
+
});
|
|
105
|
+
});
|
package/server/files/uploader.js
CHANGED
|
@@ -1,5 +1,5 @@
|
|
|
1
1
|
import { filesize } from 'filesize';
|
|
2
|
-
import
|
|
2
|
+
import fsp from 'node:fs/promises';
|
|
3
3
|
import TejError from './../error.js';
|
|
4
4
|
import { extAndType, extract, paths } from './helper.js';
|
|
5
5
|
|
|
@@ -19,11 +19,11 @@ class TejFileUploader {
|
|
|
19
19
|
file() {
|
|
20
20
|
const keys = [...arguments];
|
|
21
21
|
return async (ammo, next) => {
|
|
22
|
-
if (!ammo.headers['content-type']
|
|
22
|
+
if (!ammo.headers['content-type']?.startsWith('multipart/form-data'))
|
|
23
23
|
return next();
|
|
24
24
|
|
|
25
25
|
const payload = ammo.payload;
|
|
26
|
-
const updatedPayload =
|
|
26
|
+
const updatedPayload = Object.create(null);
|
|
27
27
|
|
|
28
28
|
for (const part in payload) {
|
|
29
29
|
const obj = payload[part];
|
|
@@ -43,26 +43,32 @@ class TejFileUploader {
|
|
|
43
43
|
if (!filename) continue;
|
|
44
44
|
|
|
45
45
|
const { dir, absolute, relative } = paths(this.destination, filename);
|
|
46
|
-
const size = filesize(obj.value.length,
|
|
47
|
-
|
|
48
|
-
|
|
49
|
-
|
|
46
|
+
const size = filesize(obj.value.length, {
|
|
47
|
+
output: 'object',
|
|
48
|
+
round: 0,
|
|
49
|
+
});
|
|
50
|
+
const maxSize = filesize(this.maxFileSize, {
|
|
51
|
+
output: 'object',
|
|
52
|
+
round: 0,
|
|
53
|
+
});
|
|
50
54
|
if (this.maxFileSize && obj.value.length > this.maxFileSize)
|
|
51
|
-
throw new TejError(
|
|
52
|
-
|
|
55
|
+
throw new TejError(
|
|
56
|
+
413,
|
|
57
|
+
`File size exceeds ${maxSize.value} ${maxSize.symbol}`,
|
|
58
|
+
);
|
|
53
59
|
|
|
54
|
-
|
|
55
|
-
|
|
60
|
+
await fsp.mkdir(dir, { recursive: true });
|
|
61
|
+
await fsp.writeFile(absolute, obj.value, 'binary');
|
|
56
62
|
|
|
57
63
|
updatedPayload[key] = {
|
|
58
64
|
filename,
|
|
59
65
|
extension: ext,
|
|
60
66
|
path: {
|
|
61
67
|
absolute: absolute,
|
|
62
|
-
relative: relative
|
|
68
|
+
relative: relative,
|
|
63
69
|
},
|
|
64
70
|
mimetype: type,
|
|
65
|
-
size
|
|
71
|
+
size,
|
|
66
72
|
};
|
|
67
73
|
}
|
|
68
74
|
}
|
|
@@ -75,11 +81,11 @@ class TejFileUploader {
|
|
|
75
81
|
files() {
|
|
76
82
|
const keys = [...arguments];
|
|
77
83
|
return async (ammo, next) => {
|
|
78
|
-
if (!ammo.headers['content-type']
|
|
84
|
+
if (!ammo.headers['content-type']?.startsWith('multipart/form-data'))
|
|
79
85
|
return next();
|
|
80
86
|
|
|
81
87
|
const payload = ammo.payload;
|
|
82
|
-
const updatedPayload =
|
|
88
|
+
const updatedPayload = Object.create(null);
|
|
83
89
|
const files = [];
|
|
84
90
|
|
|
85
91
|
for (const part in payload) {
|
|
@@ -99,27 +105,33 @@ class TejFileUploader {
|
|
|
99
105
|
if (!filename) continue;
|
|
100
106
|
|
|
101
107
|
const { dir, absolute, relative } = paths(this.destination, filename);
|
|
102
|
-
const size = filesize(obj.value.length,
|
|
103
|
-
|
|
104
|
-
|
|
105
|
-
|
|
108
|
+
const size = filesize(obj.value.length, {
|
|
109
|
+
output: 'object',
|
|
110
|
+
round: 0,
|
|
111
|
+
});
|
|
112
|
+
const maxSize = filesize(this.maxFileSize, {
|
|
113
|
+
output: 'object',
|
|
114
|
+
round: 0,
|
|
115
|
+
});
|
|
106
116
|
if (this.maxFileSize && obj.value.length > this.maxFileSize) {
|
|
107
|
-
throw new TejError(
|
|
108
|
-
|
|
117
|
+
throw new TejError(
|
|
118
|
+
413,
|
|
119
|
+
`File size exceeds ${maxSize.value} ${maxSize.symbol}`,
|
|
120
|
+
);
|
|
109
121
|
}
|
|
110
122
|
|
|
111
|
-
|
|
112
|
-
|
|
123
|
+
await fsp.mkdir(dir, { recursive: true });
|
|
124
|
+
await fsp.writeFile(absolute, obj.value, 'binary');
|
|
113
125
|
|
|
114
126
|
files.push({
|
|
115
127
|
key,
|
|
116
128
|
filename,
|
|
117
129
|
path: {
|
|
118
130
|
absolute: absolute,
|
|
119
|
-
relative: relative
|
|
131
|
+
relative: relative,
|
|
120
132
|
},
|
|
121
133
|
mimetype: type,
|
|
122
|
-
size
|
|
134
|
+
size,
|
|
123
135
|
});
|
|
124
136
|
}
|
|
125
137
|
}
|
|
@@ -128,7 +140,7 @@ class TejFileUploader {
|
|
|
128
140
|
if (!acc[file.key]) acc[file.key] = [];
|
|
129
141
|
acc[file.key].push(file);
|
|
130
142
|
return acc;
|
|
131
|
-
},
|
|
143
|
+
}, Object.create(null));
|
|
132
144
|
|
|
133
145
|
for (const key in groupedFilesByKey) {
|
|
134
146
|
updatedPayload[key] = groupedFilesByKey[key];
|
package/server/handler.js
CHANGED
|
@@ -112,7 +112,8 @@ const executeChain = async (target, ammo) => {
|
|
|
112
112
|
* @returns {Promise<void>}
|
|
113
113
|
*/
|
|
114
114
|
const errorHandler = async (ammo, err) => {
|
|
115
|
-
|
|
115
|
+
// Pass false as second arg to suppress tej-logger's Console.trace() double-stack output.
|
|
116
|
+
if (env('LOG_EXCEPTIONS')) errorLogger.error(err, false);
|
|
116
117
|
|
|
117
118
|
const result = ammo.throw(err);
|
|
118
119
|
if (result != null && typeof result.then === 'function') {
|
|
@@ -139,7 +140,7 @@ const handler = async (req, res) => {
|
|
|
139
140
|
return;
|
|
140
141
|
}
|
|
141
142
|
|
|
142
|
-
const url = req.url.split('?')[0];
|
|
143
|
+
const url = (req.url ?? '/').split('?')[0] || '/';
|
|
143
144
|
const match = targetRegistry.aim(url);
|
|
144
145
|
const ammo = new Ammo(req, res);
|
|
145
146
|
|
|
@@ -165,7 +166,8 @@ const handler = async (req, res) => {
|
|
|
165
166
|
}
|
|
166
167
|
}
|
|
167
168
|
|
|
168
|
-
// Add route parameters to ammo.payload
|
|
169
|
+
// Add route parameters to ammo.params and ammo.payload
|
|
170
|
+
ammo.params = match.params || {};
|
|
169
171
|
if (match.params && Object.keys(match.params).length > 0) {
|
|
170
172
|
Object.assign(ammo.payload, match.params);
|
|
171
173
|
}
|
|
@@ -58,7 +58,7 @@ class TargetRegistry {
|
|
|
58
58
|
});
|
|
59
59
|
|
|
60
60
|
if (exactMatch) {
|
|
61
|
-
return { target: exactMatch, params:
|
|
61
|
+
return { target: exactMatch, params: Object.create(null) };
|
|
62
62
|
}
|
|
63
63
|
|
|
64
64
|
// Then, try parameterized route matching
|
|
@@ -94,17 +94,17 @@ class TargetRegistry {
|
|
|
94
94
|
const patternSegments = pattern.split('/').filter((s) => s.length > 0);
|
|
95
95
|
const urlSegments = url.split('/').filter((s) => s.length > 0);
|
|
96
96
|
|
|
97
|
-
// Must have same number of segments
|
|
98
|
-
if (patternSegments.length !== urlSegments.length) {
|
|
99
|
-
return null;
|
|
100
|
-
}
|
|
101
|
-
|
|
102
97
|
// If both are empty (root paths), they match
|
|
103
98
|
if (patternSegments.length === 0 && urlSegments.length === 0) {
|
|
104
99
|
return {};
|
|
105
100
|
}
|
|
106
101
|
|
|
107
|
-
|
|
102
|
+
// Must have same number of segments
|
|
103
|
+
if (patternSegments.length !== urlSegments.length) {
|
|
104
|
+
return null;
|
|
105
|
+
}
|
|
106
|
+
|
|
107
|
+
const params = Object.create(null);
|
|
108
108
|
|
|
109
109
|
// Match each segment
|
|
110
110
|
for (let i = 0; i < patternSegments.length; i++) {
|
|
@@ -133,7 +133,7 @@ class TargetRegistry {
|
|
|
133
133
|
*/
|
|
134
134
|
getAllEndpoints(options = {}) {
|
|
135
135
|
const grouped =
|
|
136
|
-
typeof options === 'boolean' ? options :
|
|
136
|
+
typeof options === 'boolean' ? options : options && options.grouped;
|
|
137
137
|
const detailed =
|
|
138
138
|
typeof options === 'object' && options && options.detailed === true;
|
|
139
139
|
|
|
@@ -150,7 +150,7 @@ class TargetRegistry {
|
|
|
150
150
|
if (!acc[group]) acc[group] = [];
|
|
151
151
|
acc[group].push(target.getPath());
|
|
152
152
|
return acc;
|
|
153
|
-
},
|
|
153
|
+
}, Object.create(null));
|
|
154
154
|
}
|
|
155
155
|
return this.targets.map((target) => target.getPath());
|
|
156
156
|
}
|
|
@@ -0,0 +1,108 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* @fileoverview Tests for TargetRegistry routing logic.
|
|
3
|
+
*/
|
|
4
|
+
import { describe, it, expect, beforeEach } from 'vitest';
|
|
5
|
+
|
|
6
|
+
// Use a fresh instance per test by clearing the singleton
|
|
7
|
+
let TargetRegistry;
|
|
8
|
+
let registry;
|
|
9
|
+
|
|
10
|
+
beforeEach(async () => {
|
|
11
|
+
// Reset module cache to get fresh singleton
|
|
12
|
+
TargetRegistry = (await import('./registry.js')).default.constructor;
|
|
13
|
+
// Re-import to use existing singleton, but clear its state
|
|
14
|
+
const mod = await import('./registry.js');
|
|
15
|
+
registry = mod.default;
|
|
16
|
+
registry.targets = [];
|
|
17
|
+
registry.globalMiddlewares = [];
|
|
18
|
+
});
|
|
19
|
+
|
|
20
|
+
describe('TargetRegistry.aim', () => {
|
|
21
|
+
it('should return null for unmatched routes', () => {
|
|
22
|
+
expect(registry.aim('/api/users')).toBeNull();
|
|
23
|
+
});
|
|
24
|
+
|
|
25
|
+
it('should match exact path', () => {
|
|
26
|
+
const mockTarget = {
|
|
27
|
+
getPath: () => '/api/users',
|
|
28
|
+
getMethods: () => null,
|
|
29
|
+
};
|
|
30
|
+
registry.targets.push(mockTarget);
|
|
31
|
+
const result = registry.aim('/api/users');
|
|
32
|
+
expect(result).not.toBeNull();
|
|
33
|
+
expect(result.target).toBe(mockTarget);
|
|
34
|
+
expect(result.params).toBeDefined();
|
|
35
|
+
});
|
|
36
|
+
|
|
37
|
+
it('should match parameterized route and extract params', () => {
|
|
38
|
+
const mockTarget = {
|
|
39
|
+
getPath: () => '/api/users/:id',
|
|
40
|
+
getMethods: () => null,
|
|
41
|
+
};
|
|
42
|
+
registry.targets.push(mockTarget);
|
|
43
|
+
const result = registry.aim('/api/users/42');
|
|
44
|
+
expect(result).not.toBeNull();
|
|
45
|
+
expect(result.params.id).toBe('42');
|
|
46
|
+
});
|
|
47
|
+
|
|
48
|
+
it('should use Object.create(null) for params (no prototype pollution)', () => {
|
|
49
|
+
const mockTarget = {
|
|
50
|
+
getPath: () => '/api/:resource',
|
|
51
|
+
getMethods: () => null,
|
|
52
|
+
};
|
|
53
|
+
registry.targets.push(mockTarget);
|
|
54
|
+
const result = registry.aim('/api/users');
|
|
55
|
+
// Param key is the route parameter name ('resource'), not the URL value
|
|
56
|
+
expect(result.params['resource']).toBe('users');
|
|
57
|
+
// The params object must use null prototype (safe from prototype pollution)
|
|
58
|
+
expect(Object.getPrototypeOf(result.params)).toBeNull();
|
|
59
|
+
});
|
|
60
|
+
|
|
61
|
+
it('should not match routes with different segment counts', () => {
|
|
62
|
+
const mockTarget = {
|
|
63
|
+
getPath: () => '/api/users/:id',
|
|
64
|
+
getMethods: () => null,
|
|
65
|
+
};
|
|
66
|
+
registry.targets.push(mockTarget);
|
|
67
|
+
expect(registry.aim('/api/users')).toBeNull();
|
|
68
|
+
expect(registry.aim('/api/users/42/profile')).toBeNull();
|
|
69
|
+
});
|
|
70
|
+
});
|
|
71
|
+
|
|
72
|
+
describe('TargetRegistry.getAllEndpoints', () => {
|
|
73
|
+
it('should return flat path list by default', () => {
|
|
74
|
+
registry.targets = [
|
|
75
|
+
{
|
|
76
|
+
getPath: () => '/api/users',
|
|
77
|
+
getMetadata: () => null,
|
|
78
|
+
getHandler: () => null,
|
|
79
|
+
},
|
|
80
|
+
{
|
|
81
|
+
getPath: () => '/api/posts',
|
|
82
|
+
getMetadata: () => null,
|
|
83
|
+
getHandler: () => null,
|
|
84
|
+
},
|
|
85
|
+
];
|
|
86
|
+
expect(registry.getAllEndpoints()).toEqual(['/api/users', '/api/posts']);
|
|
87
|
+
});
|
|
88
|
+
|
|
89
|
+
it('should return grouped object when grouped=true', () => {
|
|
90
|
+
registry.targets = [
|
|
91
|
+
{
|
|
92
|
+
getPath: () => '/api/users',
|
|
93
|
+
getMetadata: () => null,
|
|
94
|
+
getHandler: () => null,
|
|
95
|
+
},
|
|
96
|
+
{
|
|
97
|
+
getPath: () => '/api/posts',
|
|
98
|
+
getMetadata: () => null,
|
|
99
|
+
getHandler: () => null,
|
|
100
|
+
},
|
|
101
|
+
];
|
|
102
|
+
const grouped = registry.getAllEndpoints(true);
|
|
103
|
+
expect(grouped['api']).toContain('/api/users');
|
|
104
|
+
expect(grouped['api']).toContain('/api/posts');
|
|
105
|
+
// Result must be null-prototype dict
|
|
106
|
+
expect(Object.getPrototypeOf(grouped)).toBeNull();
|
|
107
|
+
});
|
|
108
|
+
});
|