unreal-engine-mcp-server 0.4.3 → 0.4.5
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.env.production +1 -1
- package/.github/workflows/publish-mcp.yml +2 -1
- package/.github/workflows/smithery-build.yml +29 -0
- package/CHANGELOG.md +26 -0
- package/README.md +13 -2
- package/claude_desktop_config_example.json +2 -1
- package/dist/index.d.ts +46 -1
- package/dist/index.js +40 -14
- package/dist/tools/consolidated-tool-definitions.d.ts +43 -0
- package/dist/tools/consolidated-tool-definitions.js +126 -116
- package/dist/tools/landscape.js +77 -15
- package/dist/unreal-bridge.d.ts +8 -3
- package/dist/unreal-bridge.js +25 -35
- package/dist/utils/error-handler.d.ts +40 -0
- package/dist/utils/error-handler.js +75 -0
- package/dist/utils/http.js +80 -3
- package/dist/utils/response-validator.js +6 -1
- package/docs/unreal-tool-test-cases.md +572 -0
- package/eslint.config.mjs +68 -0
- package/package.json +18 -9
- package/server.json +8 -2
- package/smithery.yaml +29 -0
- package/src/index.ts +37 -14
- package/src/tools/consolidated-tool-definitions.ts +126 -116
- package/src/tools/landscape.ts +77 -15
- package/src/unreal-bridge.ts +28 -31
- package/src/utils/error-handler.ts +113 -1
- package/src/utils/http.ts +102 -3
- package/src/utils/response-validator.ts +7 -2
- package/tsconfig.json +36 -13
package/dist/unreal-bridge.js
CHANGED
|
@@ -2,6 +2,7 @@ import WebSocket from 'ws';
|
|
|
2
2
|
import { createHttpClient } from './utils/http.js';
|
|
3
3
|
import { Logger } from './utils/logger.js';
|
|
4
4
|
import { loadEnv } from './types/env.js';
|
|
5
|
+
import { ErrorHandler } from './utils/error-handler.js';
|
|
5
6
|
export class UnrealBridge {
|
|
6
7
|
ws;
|
|
7
8
|
http = createHttpClient('');
|
|
@@ -15,6 +16,11 @@ export class UnrealBridge {
|
|
|
15
16
|
autoReconnectEnabled = false; // disabled by default to prevent looping retries
|
|
16
17
|
engineVersionCache;
|
|
17
18
|
ENGINE_VERSION_TTL_MS = 5 * 60 * 1000;
|
|
19
|
+
// WebSocket health monitoring (best practice from WebSocket optimization guides)
|
|
20
|
+
lastPongReceived = 0;
|
|
21
|
+
pingInterval;
|
|
22
|
+
PING_INTERVAL_MS = 30000; // 30 seconds
|
|
23
|
+
PONG_TIMEOUT_MS = 10000; // 10 seconds
|
|
18
24
|
// Command queue for throttling
|
|
19
25
|
commandQueue = [];
|
|
20
26
|
isProcessing = false;
|
|
@@ -289,11 +295,12 @@ except Exception as e:
|
|
|
289
295
|
};
|
|
290
296
|
get isConnected() { return this.connected; }
|
|
291
297
|
/**
|
|
292
|
-
* Attempt to connect with
|
|
298
|
+
* Attempt to connect with exponential backoff retry strategy
|
|
299
|
+
* Uses optimized retry pattern from TypeScript best practices
|
|
293
300
|
* @param maxAttempts Maximum number of connection attempts
|
|
294
301
|
* @param timeoutMs Timeout for each connection attempt in milliseconds
|
|
295
|
-
* @param retryDelayMs
|
|
296
|
-
* @returns Promise that resolves
|
|
302
|
+
* @param retryDelayMs Initial delay between retry attempts in milliseconds
|
|
303
|
+
* @returns Promise that resolves to true if connected, false otherwise
|
|
297
304
|
*/
|
|
298
305
|
connectPromise;
|
|
299
306
|
async tryConnect(maxAttempts = 3, timeoutMs = 5000, retryDelayMs = 2000) {
|
|
@@ -308,39 +315,22 @@ except Exception as e:
|
|
|
308
315
|
}
|
|
309
316
|
return this.connected;
|
|
310
317
|
}
|
|
311
|
-
|
|
312
|
-
|
|
313
|
-
|
|
314
|
-
|
|
315
|
-
|
|
316
|
-
|
|
317
|
-
|
|
318
|
-
|
|
319
|
-
|
|
320
|
-
|
|
321
|
-
return; // Successfully connected
|
|
322
|
-
}
|
|
323
|
-
catch (err) {
|
|
324
|
-
const msg = (err?.message || String(err));
|
|
325
|
-
this.log.debug(`Connection attempt ${attempt} failed: ${msg}`);
|
|
326
|
-
if (attempt < maxAttempts) {
|
|
327
|
-
this.log.debug(`Retrying in ${retryDelayMs}ms...`);
|
|
328
|
-
// Sleep, but allow early break if we became connected during the wait
|
|
329
|
-
const start = Date.now();
|
|
330
|
-
while (Date.now() - start < retryDelayMs) {
|
|
331
|
-
if (this.connected)
|
|
332
|
-
return; // someone else connected
|
|
333
|
-
await new Promise(r => setTimeout(r, 50));
|
|
334
|
-
}
|
|
335
|
-
}
|
|
336
|
-
else {
|
|
337
|
-
// Keep this at warn (not error) and avoid stack spam
|
|
338
|
-
this.log.warn(`All ${maxAttempts} connection attempts failed`);
|
|
339
|
-
return; // exit, connected remains false
|
|
340
|
-
}
|
|
341
|
-
}
|
|
318
|
+
// Use ErrorHandler's retryWithBackoff for consistent retry behavior
|
|
319
|
+
this.connectPromise = ErrorHandler.retryWithBackoff(() => this.connect(timeoutMs), {
|
|
320
|
+
maxRetries: maxAttempts - 1,
|
|
321
|
+
initialDelay: retryDelayMs,
|
|
322
|
+
maxDelay: 10000,
|
|
323
|
+
backoffMultiplier: 1.5,
|
|
324
|
+
shouldRetry: (error) => {
|
|
325
|
+
// Only retry on connection-related errors
|
|
326
|
+
const msg = error?.message?.toLowerCase() || '';
|
|
327
|
+
return msg.includes('timeout') || msg.includes('connection') || msg.includes('econnrefused');
|
|
342
328
|
}
|
|
343
|
-
})()
|
|
329
|
+
}).then(() => {
|
|
330
|
+
// Success
|
|
331
|
+
}).catch((err) => {
|
|
332
|
+
this.log.warn(`Connection failed after ${maxAttempts} attempts:`, err.message);
|
|
333
|
+
});
|
|
344
334
|
try {
|
|
345
335
|
await this.connectPromise;
|
|
346
336
|
}
|
|
@@ -29,5 +29,45 @@ export declare class ErrorHandler {
|
|
|
29
29
|
private static getUserFriendlyMessage;
|
|
30
30
|
/** Determine if an error is likely retriable */
|
|
31
31
|
private static isRetriable;
|
|
32
|
+
/**
|
|
33
|
+
* Retry an async operation with exponential backoff
|
|
34
|
+
* Best practice from TypeScript async programming patterns
|
|
35
|
+
* @param operation - Async operation to retry
|
|
36
|
+
* @param options - Retry configuration
|
|
37
|
+
* @returns Result of the operation
|
|
38
|
+
*/
|
|
39
|
+
static retryWithBackoff<T>(operation: () => Promise<T>, options?: {
|
|
40
|
+
maxRetries?: number;
|
|
41
|
+
initialDelay?: number;
|
|
42
|
+
maxDelay?: number;
|
|
43
|
+
backoffMultiplier?: number;
|
|
44
|
+
shouldRetry?: (error: unknown) => boolean;
|
|
45
|
+
}): Promise<T>;
|
|
46
|
+
/**
|
|
47
|
+
* Add timeout to any promise
|
|
48
|
+
* @param promise - Promise to add timeout to
|
|
49
|
+
* @param timeoutMs - Timeout in milliseconds
|
|
50
|
+
* @param errorMessage - Custom error message for timeout
|
|
51
|
+
* @returns Promise that rejects on timeout
|
|
52
|
+
*/
|
|
53
|
+
static withTimeout<T>(promise: Promise<T>, timeoutMs: number, errorMessage?: string): Promise<T>;
|
|
54
|
+
/**
|
|
55
|
+
* Execute multiple operations with Promise.allSettled for better error handling
|
|
56
|
+
* Returns detailed results for each operation, including failures
|
|
57
|
+
* @param operations - Array of async operations to execute
|
|
58
|
+
* @returns Object with successful and failed operations separated
|
|
59
|
+
*/
|
|
60
|
+
static batchExecute<T>(operations: Array<() => Promise<T>>): Promise<{
|
|
61
|
+
successful: Array<{
|
|
62
|
+
index: number;
|
|
63
|
+
value: T;
|
|
64
|
+
}>;
|
|
65
|
+
failed: Array<{
|
|
66
|
+
index: number;
|
|
67
|
+
reason: unknown;
|
|
68
|
+
}>;
|
|
69
|
+
successCount: number;
|
|
70
|
+
failureCount: number;
|
|
71
|
+
}>;
|
|
32
72
|
}
|
|
33
73
|
//# sourceMappingURL=error-handler.d.ts.map
|
|
@@ -132,5 +132,80 @@ export class ErrorHandler {
|
|
|
132
132
|
catch { }
|
|
133
133
|
return false;
|
|
134
134
|
}
|
|
135
|
+
/**
|
|
136
|
+
* Retry an async operation with exponential backoff
|
|
137
|
+
* Best practice from TypeScript async programming patterns
|
|
138
|
+
* @param operation - Async operation to retry
|
|
139
|
+
* @param options - Retry configuration
|
|
140
|
+
* @returns Result of the operation
|
|
141
|
+
*/
|
|
142
|
+
static async retryWithBackoff(operation, options = {}) {
|
|
143
|
+
const { maxRetries = 3, initialDelay = 100, maxDelay = 10000, backoffMultiplier = 2, shouldRetry = (error) => this.isRetriable(error) } = options;
|
|
144
|
+
let lastError;
|
|
145
|
+
let delay = initialDelay;
|
|
146
|
+
for (let attempt = 0; attempt <= maxRetries; attempt++) {
|
|
147
|
+
try {
|
|
148
|
+
return await operation();
|
|
149
|
+
}
|
|
150
|
+
catch (error) {
|
|
151
|
+
lastError = error;
|
|
152
|
+
if (attempt === maxRetries || !shouldRetry(error)) {
|
|
153
|
+
throw error;
|
|
154
|
+
}
|
|
155
|
+
log.debug(`Retry attempt ${attempt + 1}/${maxRetries} after ${delay}ms`);
|
|
156
|
+
await new Promise(resolve => setTimeout(resolve, delay));
|
|
157
|
+
delay = Math.min(delay * backoffMultiplier, maxDelay);
|
|
158
|
+
}
|
|
159
|
+
}
|
|
160
|
+
throw lastError;
|
|
161
|
+
}
|
|
162
|
+
/**
|
|
163
|
+
* Add timeout to any promise
|
|
164
|
+
* @param promise - Promise to add timeout to
|
|
165
|
+
* @param timeoutMs - Timeout in milliseconds
|
|
166
|
+
* @param errorMessage - Custom error message for timeout
|
|
167
|
+
* @returns Promise that rejects on timeout
|
|
168
|
+
*/
|
|
169
|
+
static async withTimeout(promise, timeoutMs, errorMessage = 'Operation timed out') {
|
|
170
|
+
let timeoutHandle;
|
|
171
|
+
const timeoutPromise = new Promise((_, reject) => {
|
|
172
|
+
timeoutHandle = setTimeout(() => {
|
|
173
|
+
reject(new Error(errorMessage));
|
|
174
|
+
}, timeoutMs);
|
|
175
|
+
});
|
|
176
|
+
try {
|
|
177
|
+
return await Promise.race([promise, timeoutPromise]);
|
|
178
|
+
}
|
|
179
|
+
finally {
|
|
180
|
+
if (timeoutHandle !== undefined) {
|
|
181
|
+
clearTimeout(timeoutHandle);
|
|
182
|
+
}
|
|
183
|
+
}
|
|
184
|
+
}
|
|
185
|
+
/**
|
|
186
|
+
* Execute multiple operations with Promise.allSettled for better error handling
|
|
187
|
+
* Returns detailed results for each operation, including failures
|
|
188
|
+
* @param operations - Array of async operations to execute
|
|
189
|
+
* @returns Object with successful and failed operations separated
|
|
190
|
+
*/
|
|
191
|
+
static async batchExecute(operations) {
|
|
192
|
+
const results = await Promise.allSettled(operations.map(op => op()));
|
|
193
|
+
const successful = [];
|
|
194
|
+
const failed = [];
|
|
195
|
+
results.forEach((result, index) => {
|
|
196
|
+
if (result.status === 'fulfilled') {
|
|
197
|
+
successful.push({ index, value: result.value });
|
|
198
|
+
}
|
|
199
|
+
else {
|
|
200
|
+
failed.push({ index, reason: result.reason });
|
|
201
|
+
}
|
|
202
|
+
});
|
|
203
|
+
return {
|
|
204
|
+
successful,
|
|
205
|
+
failed,
|
|
206
|
+
successCount: successful.length,
|
|
207
|
+
failureCount: failed.length
|
|
208
|
+
};
|
|
209
|
+
}
|
|
135
210
|
}
|
|
136
211
|
//# sourceMappingURL=error-handler.js.map
|
package/dist/utils/http.js
CHANGED
|
@@ -2,6 +2,42 @@ import axios from 'axios';
|
|
|
2
2
|
import http from 'http';
|
|
3
3
|
import https from 'https';
|
|
4
4
|
import { Logger } from './logger.js';
|
|
5
|
+
class SimpleCache {
|
|
6
|
+
cache = new Map();
|
|
7
|
+
maxSize = 100;
|
|
8
|
+
set(key, data, ttl = 60000) {
|
|
9
|
+
// Prevent unbounded growth
|
|
10
|
+
if (this.cache.size >= this.maxSize) {
|
|
11
|
+
const firstKey = this.cache.keys().next().value;
|
|
12
|
+
if (firstKey !== undefined) {
|
|
13
|
+
this.cache.delete(firstKey);
|
|
14
|
+
}
|
|
15
|
+
}
|
|
16
|
+
this.cache.set(key, {
|
|
17
|
+
data,
|
|
18
|
+
timestamp: Date.now(),
|
|
19
|
+
ttl
|
|
20
|
+
});
|
|
21
|
+
}
|
|
22
|
+
get(key) {
|
|
23
|
+
const entry = this.cache.get(key);
|
|
24
|
+
if (!entry)
|
|
25
|
+
return null;
|
|
26
|
+
// Check if expired
|
|
27
|
+
if (Date.now() - entry.timestamp > entry.ttl) {
|
|
28
|
+
this.cache.delete(key);
|
|
29
|
+
return null;
|
|
30
|
+
}
|
|
31
|
+
return entry.data;
|
|
32
|
+
}
|
|
33
|
+
clear() {
|
|
34
|
+
this.cache.clear();
|
|
35
|
+
}
|
|
36
|
+
getStats() {
|
|
37
|
+
return { size: this.cache.size, maxSize: this.maxSize };
|
|
38
|
+
}
|
|
39
|
+
}
|
|
40
|
+
const responseCache = new SimpleCache();
|
|
5
41
|
// Enhanced connection pooling configuration to prevent socket failures
|
|
6
42
|
const httpAgent = new http.Agent({
|
|
7
43
|
keepAlive: true,
|
|
@@ -49,22 +85,63 @@ export function createHttpClient(baseURL) {
|
|
|
49
85
|
maxBodyLength: 50 * 1024 * 1024,
|
|
50
86
|
decompress: true
|
|
51
87
|
});
|
|
52
|
-
//
|
|
88
|
+
// Request interceptor: timing, caching check, and logging
|
|
53
89
|
client.interceptors.request.use((config) => {
|
|
54
90
|
// Add metadata for performance tracking
|
|
55
91
|
config.metadata = { startTime: Date.now() };
|
|
92
|
+
// Check cache for GET requests
|
|
93
|
+
if (config.method?.toLowerCase() === 'get' && config.url) {
|
|
94
|
+
const cacheKey = `${config.url}:${JSON.stringify(config.params || {})}`;
|
|
95
|
+
const cached = responseCache.get(cacheKey);
|
|
96
|
+
if (cached) {
|
|
97
|
+
log.debug(`[HTTP Cache Hit] ${config.url}`);
|
|
98
|
+
// Return cached response
|
|
99
|
+
config.cached = cached;
|
|
100
|
+
}
|
|
101
|
+
}
|
|
56
102
|
return config;
|
|
57
103
|
}, (error) => {
|
|
104
|
+
log.error('[HTTP Request Error]', error);
|
|
58
105
|
return Promise.reject(error);
|
|
59
106
|
});
|
|
60
|
-
//
|
|
107
|
+
// Response interceptor: timing, caching, and error handling
|
|
61
108
|
client.interceptors.response.use((response) => {
|
|
109
|
+
// Check if we used cached response
|
|
110
|
+
if (response.config.cached) {
|
|
111
|
+
return Promise.resolve({
|
|
112
|
+
...response,
|
|
113
|
+
data: response.config.cached,
|
|
114
|
+
status: 200,
|
|
115
|
+
statusText: 'OK (Cached)',
|
|
116
|
+
headers: {},
|
|
117
|
+
config: response.config
|
|
118
|
+
});
|
|
119
|
+
}
|
|
120
|
+
// Performance tracking
|
|
62
121
|
const duration = Date.now() - (response.config.metadata?.startTime || 0);
|
|
63
122
|
if (duration > 5000) {
|
|
64
|
-
log.warn(`[HTTP
|
|
123
|
+
log.warn(`[HTTP Slow] ${response.config.url} took ${duration}ms`);
|
|
124
|
+
}
|
|
125
|
+
else if (duration > 1000) {
|
|
126
|
+
log.debug(`[HTTP] ${response.config.url} took ${duration}ms`);
|
|
127
|
+
}
|
|
128
|
+
// Cache successful GET responses
|
|
129
|
+
if (response.config.method?.toLowerCase() === 'get' &&
|
|
130
|
+
response.status === 200 &&
|
|
131
|
+
response.config.url) {
|
|
132
|
+
const cacheKey = `${response.config.url}:${JSON.stringify(response.config.params || {})}`;
|
|
133
|
+
// Cache for 30 seconds by default
|
|
134
|
+
responseCache.set(cacheKey, response.data, 30000);
|
|
65
135
|
}
|
|
66
136
|
return response;
|
|
67
137
|
}, (error) => {
|
|
138
|
+
// Enhanced error logging
|
|
139
|
+
const duration = Date.now() - (error.config?.metadata?.startTime || 0);
|
|
140
|
+
log.error(`[HTTP Error] ${error.config?.url} failed after ${duration}ms:`, {
|
|
141
|
+
status: error.response?.status,
|
|
142
|
+
message: error.message,
|
|
143
|
+
code: error.code
|
|
144
|
+
});
|
|
68
145
|
return Promise.reject(error);
|
|
69
146
|
});
|
|
70
147
|
return client;
|
|
@@ -7,10 +7,15 @@ const log = new Logger('ResponseValidator');
|
|
|
7
7
|
* Validates tool responses against their defined output schemas
|
|
8
8
|
*/
|
|
9
9
|
export class ResponseValidator {
|
|
10
|
+
// Keep ajv as any to avoid complex interop typing issues with Ajv's ESM/CJS dual export
|
|
11
|
+
// shape when using NodeNext module resolution.
|
|
10
12
|
ajv;
|
|
11
13
|
validators = new Map();
|
|
12
14
|
constructor() {
|
|
13
|
-
|
|
15
|
+
// Cast Ajv to any for construction to avoid errors when TypeScript's NodeNext
|
|
16
|
+
// module resolution represents the import as a namespace object.
|
|
17
|
+
const AjvCtor = Ajv?.default ?? Ajv;
|
|
18
|
+
this.ajv = new AjvCtor({
|
|
14
19
|
allErrors: true,
|
|
15
20
|
verbose: true,
|
|
16
21
|
strict: false // Allow additional properties for flexibility
|