@dahawa/hawa-cli-analysis 1.0.4

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,85 @@
1
+ import os from 'os';
2
+ import pino from 'pino';
3
+ import fs from 'fs';
4
+ import path from 'path';
5
+
6
+ class LogManager {
7
+ constructor() {
8
+ this.allLoggers = {};
9
+ }
10
+ __createLogger(cliType , full) {
11
+ //日志打印在用户目录下面
12
+ let homedir = os.homedir();
13
+ //在用户目录下创建配置信息和日志输出
14
+ let logDir = path.join(homedir, '.hawa-cli-analysis','logs',cliType);
15
+
16
+ //如果日志目录不存在进行创建
17
+ if (!fs.existsSync(logDir)) {
18
+ fs.mkdirSync(logDir, { recursive: true });
19
+ }
20
+
21
+ //console.log('hawa-cli-analysis log Directory:', logDir);
22
+
23
+ // 使用时间戳创建文件,但只在第一次创建时显示
24
+ let timestamp = Date.now();
25
+ const filename = full ? `api-full-${timestamp}.log` : `api-simple-${timestamp}.log`;
26
+ const filepath = path.join(logDir, filename);
27
+
28
+ //console.log(`Creating ${full ? 'full' : 'simple'} log file:`, filepath);
29
+
30
+ const fd = fs.openSync(filepath, 'a');
31
+
32
+ const stream = {
33
+ write: (str) => {
34
+ // 每条日志:writeSync -> fsyncSync
35
+ fs.writeSync(fd, str);
36
+ fs.fsyncSync(fd);
37
+ }
38
+ };
39
+
40
+ // 创建 Pino 记录器,并将流作为日志输出目标
41
+ const logger = pino({
42
+ level: process.env.LOG_LEVEL || 'debug', // 允许通过环境变量控制
43
+ base: null, // 去掉 pid 和 hostname
44
+ timestamp: () => {
45
+ const now = new Date();
46
+ const yyyy = now.getFullYear();
47
+ const mm = String(now.getMonth() + 1).padStart(2, '0');
48
+ const dd = String(now.getDate()).padStart(2, '0');
49
+ const hh = String(now.getHours()).padStart(2, '0');
50
+ const mi = String(now.getMinutes()).padStart(2, '0');
51
+ const ss = String(now.getSeconds()).padStart(2, '0');
52
+ return `,"time":"${yyyy}-${mm}-${dd} ${hh}:${mi}:${ss}"`;
53
+ },
54
+ formatters: {
55
+ level(label) {
56
+ return { level: label }; // level 数字改为字符串枚举值
57
+ },
58
+ }
59
+ },stream);
60
+
61
+ return logger;
62
+
63
+ }
64
+ getSystemLogger(){
65
+ return this.getLogger();
66
+ }
67
+ getLogger(cliType, full=true) {
68
+ if(!this.allLoggers["_system"]){
69
+ this.allLoggers["_system"] = this.__createLogger("system", false);
70
+ }
71
+ if(cliType==null){
72
+ return this.allLoggers["_system"];
73
+ }
74
+ if(!this.allLoggers[cliType]){
75
+ this.allLoggers[cliType] = {
76
+ full:this.__createLogger(cliType, full),
77
+ simple:this.__createLogger(cliType, false),
78
+ system:this.allLoggers["_system"]
79
+ }
80
+ }
81
+ return this.allLoggers[cliType];
82
+ }
83
+ }
84
+
85
+ export default new LogManager();
package/logger.js ADDED
@@ -0,0 +1,112 @@
1
+ import os from 'os';
2
+ import pino from 'pino';
3
+ import fs from 'fs';
4
+ import {createStream} from 'rotating-file-stream';
5
+ import {fileURLToPath } from 'url';
6
+ import path from 'path';
7
+
8
+ //日志打印在用户目录下面
9
+ //const __filename = fileURLToPath(import.meta.url);
10
+ let homedir = os.homedir();
11
+ //在用户目录下创建配置信息和日志输出
12
+ let logDir = path.join(homedir, '.hawa-cli-analysis','logs');
13
+
14
+ //如果日志目录不存在进行创建
15
+ if (!fs.existsSync(logDir)) {
16
+ fs.mkdirSync(logDir, { recursive: true });
17
+ }
18
+
19
+ //console.log('hawa-cli-analysis log Directory:', logDir);
20
+
21
+ let timestamp = Date.now();
22
+
23
+ // 创建一个 rotating-file-stream 实例
24
+ /*
25
+ const stream = createStream(`api-full-${timestamp}.log`, {
26
+ size: "10M", // 每个日志文件的最大大小
27
+ interval: "1d", // 每天轮转一次
28
+ compress: "gzip", // 压缩旧日志文件
29
+ path: logDir, // 日志文件存放路径
30
+ });
31
+ */
32
+ /*
33
+ const stream = pino.destination({
34
+ dest: `${logDir}/api-full-${timestamp}.log`, // 文件路径
35
+ sync: false, //使用同步写入
36
+ });
37
+ */
38
+ const fd = fs.openSync(`${logDir}/api-full-${timestamp}.log`, 'a');
39
+
40
+ const stream = {
41
+ write: (str) => {
42
+ // 每条日志:writeSync -> fsyncSync
43
+ fs.writeSync(fd, str);
44
+ fs.fsyncSync(fd);
45
+ }
46
+ };
47
+
48
+ // 创建 Pino 记录器,并将流作为日志输出目标
49
+ const apiFullLogger = pino({
50
+ level: process.env.LOG_LEVEL || 'debug', // 允许通过环境变量控制
51
+ base: null, // 去掉 pid 和 hostname
52
+ timestamp: () => {
53
+ const now = new Date();
54
+ const yyyy = now.getFullYear();
55
+ const mm = String(now.getMonth() + 1).padStart(2, '0');
56
+ const dd = String(now.getDate()).padStart(2, '0');
57
+ const hh = String(now.getHours()).padStart(2, '0');
58
+ const mi = String(now.getMinutes()).padStart(2, '0');
59
+ const ss = String(now.getSeconds()).padStart(2, '0');
60
+ return `,"time":"${yyyy}-${mm}-${dd} ${hh}:${mi}:${ss}"`;
61
+ },
62
+ formatters: {
63
+ level(label) {
64
+ return { level: label }; // level 数字改为字符串枚举值
65
+ },
66
+ }
67
+ },stream);
68
+ /**
69
+ // 创建一个 rotating-file-stream 实例
70
+ const streamSimple = createStream(`api-simple-${timestamp}.log`, {
71
+ size: "10M", // 每个日志文件的最大大小
72
+ interval: "1d", // 每天轮转一次
73
+ compress: "gzip", // 压缩旧日志文件
74
+ path: logDir, // 日志文件存放路径
75
+ });
76
+ **/
77
+
78
+ const fdsimple = fs.openSync(`${logDir}/api-simple-${timestamp}.log`, 'a');
79
+
80
+ const streamSimple = {
81
+ write: (str) => {
82
+ // 每条日志:writeSync -> fsyncSync
83
+ //fs.writeSync(fdsimple, str.replace(/\\n/g, '\n'));
84
+ fs.writeSync(fdsimple, str);
85
+ fs.fsyncSync(fdsimple);
86
+ }
87
+ };
88
+
89
+ // 创建 Pino 记录器,并将流作为日志输出目标
90
+ export const apiSimpleLogger = pino({
91
+ level: process.env.LOG_LEVEL || 'debug', // 允许通过环境变量控制
92
+ base: null, // 去掉 pid 和 hostname
93
+ timestamp: () => {
94
+ const now = new Date();
95
+ const yyyy = now.getFullYear();
96
+ const mm = String(now.getMonth() + 1).padStart(2, '0');
97
+ const dd = String(now.getDate()).padStart(2, '0');
98
+ const hh = String(now.getHours()).padStart(2, '0');
99
+ const mi = String(now.getMinutes()).padStart(2, '0');
100
+ const ss = String(now.getSeconds()).padStart(2, '0');
101
+ return `,"time":"${yyyy}-${mm}-${dd} ${hh}:${mi}:${ss}"`;
102
+ },
103
+ formatters: {
104
+ level(label) {
105
+ return { level: label }; // level 数字改为字符串枚举值
106
+ },
107
+ }
108
+ },streamSimple);
109
+
110
+
111
+
112
+ export default apiFullLogger;
@@ -0,0 +1,5 @@
1
+ import {startMCPServerProxy} from "../codex/mcpserver.js"
2
+
3
+ startMCPServerProxy();
4
+
5
+ process.stdout.write("ok_ok\n");
@@ -0,0 +1,40 @@
1
+ {
2
+ "meta|https://radar.mcp.cloudflare.com": {
3
+ "issuer": "https://radar.mcp.cloudflare.com",
4
+ "authorization_endpoint": "https://radar.mcp.cloudflare.com/oauth/authorize",
5
+ "token_endpoint": "https://radar.mcp.cloudflare.com/token",
6
+ "registration_endpoint": "https://radar.mcp.cloudflare.com/register",
7
+ "response_types_supported": [
8
+ "code"
9
+ ],
10
+ "response_modes_supported": [
11
+ "query"
12
+ ],
13
+ "grant_types_supported": [
14
+ "authorization_code",
15
+ "refresh_token"
16
+ ],
17
+ "token_endpoint_auth_methods_supported": [
18
+ "client_secret_basic",
19
+ "client_secret_post",
20
+ "none"
21
+ ],
22
+ "revocation_endpoint": "https://radar.mcp.cloudflare.com/token",
23
+ "code_challenge_methods_supported": [
24
+ "plain",
25
+ "S256"
26
+ ]
27
+ },
28
+ "reg|https://radar.mcp.cloudflare.com|http://127.0.0.1:53175/callback": {
29
+ "client_id": "ZCV41vccvgvhNsEh",
30
+ "obtained_at": 1759938844949
31
+ },
32
+ "tokens|https://radar.mcp.cloudflare.com|ZCV41vccvgvhNsEh": {
33
+ "access_token": "bab2be0cfaea55f79aefd9d5c9b4cc04:S5OyKXATKP6YduMh:It8GB9SBrgrcvzf7nGU6D2xUi0WYjqKF",
34
+ "token_type": "bearer",
35
+ "expires_in": 3600,
36
+ "refresh_token": "bab2be0cfaea55f79aefd9d5c9b4cc04:S5OyKXATKP6YduMh:piFV9MdBn22WOLkrssCqa4qkiPPKSPv7",
37
+ "scope": "user:read offline_access account:read radar:read url_scanner:write",
38
+ "expires_at": 1759942473010
39
+ }
40
+ }
package/package.json ADDED
@@ -0,0 +1,36 @@
1
+ {
2
+ "name": "@dahawa/hawa-cli-analysis",
3
+ "version": "1.0.4",
4
+ "main": "index",
5
+ "bin": {
6
+ "uclaude": "uclaude.js",
7
+ "ucodex": "ucodex.js"
8
+ },
9
+ "scripts": {
10
+ "test": "echo \"Error: no test specified\" && exit 1"
11
+ },
12
+ "type": "module",
13
+ "repository": {
14
+ "type": "git",
15
+ "url": "git+https://github.com/jiweigang1/hawa-cli-analysis.git"
16
+ },
17
+ "author": "",
18
+ "license": "MIT",
19
+ "bugs": {
20
+ "url": "https://github.com/jiweigang1/hawa-cli-analysis/issues"
21
+ },
22
+ "homepage": "https://github.com/jiweigang1/hawa-cli-analysis#readme",
23
+ "description": "",
24
+ "dependencies": {
25
+ "@anthropic-ai/sdk": "^0.62.0",
26
+ "@iarna/toml": "^2.2.5",
27
+ "@modelcontextprotocol/sdk": "^1.19.1",
28
+ "fastify": "^5.6.1",
29
+ "inquirer": "^12.9.6",
30
+ "pino": "^9.9.5",
31
+ "ps-list": "^9.0.0",
32
+ "ps-tree": "^1.2.0",
33
+ "rotating-file-stream": "^3.2.7",
34
+ "uuid": "^13.0.0"
35
+ }
36
+ }
@@ -0,0 +1,80 @@
1
+ import net from 'net';
2
+
3
+ /**
4
+ * 端口管理器
5
+ * 用于动态分配可用端口
6
+ */
7
+ class PortManager {
8
+ constructor() {
9
+ this.allocatedPorts = new Set();
10
+ this.startPort = 3000;
11
+ this.maxPort = 3999;
12
+ }
13
+
14
+ /**
15
+ * 检查端口是否可用
16
+ * @param {number} port - 要检查的端口
17
+ * @returns {Promise<boolean>} - 端口是否可用
18
+ */
19
+ async isPortAvailable(port) {
20
+ return new Promise((resolve) => {
21
+ const server = net.createServer();
22
+
23
+ server.once('error', (err) => {
24
+ if (err.code === 'EADDRINUSE') {
25
+ resolve(false);
26
+ } else {
27
+ resolve(false);
28
+ }
29
+ });
30
+
31
+ server.once('listening', () => {
32
+ server.close();
33
+ resolve(true);
34
+ });
35
+
36
+ server.listen(port);
37
+ });
38
+ }
39
+
40
+ /**
41
+ * 获取一个可用端口
42
+ * @returns {Promise<number|null>} - 可用端口或null
43
+ */
44
+ async getAvailablePort() {
45
+ for (let port = this.startPort; port <= this.maxPort; port++) {
46
+ if (this.allocatedPorts.has(port)) {
47
+ continue;
48
+ }
49
+
50
+ const isAvailable = await this.isPortAvailable(port);
51
+ if (isAvailable) {
52
+ this.allocatedPorts.add(port);
53
+ return port;
54
+ }
55
+ }
56
+
57
+ return null;
58
+ }
59
+
60
+ /**
61
+ * 释放端口
62
+ * @param {number} port - 要释放的端口
63
+ */
64
+ releasePort(port) {
65
+ this.allocatedPorts.delete(port);
66
+ }
67
+
68
+ /**
69
+ * 获取当前已分配的端口
70
+ * @returns {Set<number>} - 已分配的端口集合
71
+ */
72
+ getAllocatedPorts() {
73
+ return new Set(this.allocatedPorts);
74
+ }
75
+ }
76
+
77
+ // 创建全局单例实例
78
+ const portManager = new PortManager();
79
+
80
+ export default portManager;
@@ -0,0 +1,213 @@
1
+ // Simple example of using transformResponseIn with streaming data
2
+ import AnthropicTransformer from './anthropic-transformer.js';
3
+
4
+ // Example streaming response data (like the one you provided)
5
+ const exampleStreamData = [
6
+ 'event: response.created\ndata: {"id":"resp_abc123","type":"response.created","created":1739558401,"response":{"id":"resp_abc123","model":"gpt-4o-mini","status":"in_progress"}}\n\n',
7
+ 'event: response.in_progress\ndata: {"id":"resp_abc123","type":"response.in_progress","response":{"id":"resp_abc123","status":"in_progress"}}\n\n',
8
+ 'event: response.output_text.delta\ndata: {"id":"resp_abc123","type":"response.output_text.delta","response_id":"resp_abc123","output_index":0,"delta":"早"}\n\n',
9
+ 'event: response.output_text.delta\ndata: {"id":"resp_abc123","type":"response.output_text.delta","response_id":"resp_abc123","output_index":0,"delta":"上"}\n\n',
10
+ 'event: response.output_text.delta\ndata: {"id":"resp_abc123","type":"response.output_text.delta","response_id":"resp_abc123","output_index":0,"delta":"好,"}\n\n',
11
+ 'event: response.output_text.delta\ndata: {"id":"resp_abc123","type":"response.output_text.delta","response_id":"resp_abc123","output_index":0,"delta":"给你一段流式返回示例。"}\n\n',
12
+ 'event: response.output_text.done\ndata: {"id":"resp_abc123","type":"response.output_text.done","response_id":"resp_abc123","output_index":0}\n\n'
13
+ ];
14
+
15
+ // Simple method to transform streaming response data
16
+ async function transformResponseIn(streamData) {
17
+ const transformer = new AnthropicTransformer();
18
+
19
+ // Create a mock Response object with a readable stream
20
+ const stream = new ReadableStream({
21
+ start(controller) {
22
+ // Simulate streaming by sending data chunks with delays
23
+ let index = 0;
24
+ const encoder = new TextEncoder();
25
+
26
+ const sendNextChunk = () => {
27
+ if (index < streamData.length) {
28
+ controller.enqueue(encoder.encode(streamData[index]));
29
+ index++;
30
+ setTimeout(sendNextChunk, 100); // Simulate network delay
31
+ } else {
32
+ controller.close();
33
+ }
34
+ };
35
+
36
+ sendNextChunk();
37
+ }
38
+ });
39
+
40
+ // Create a Response object that mimics the actual streaming response
41
+ const mockResponse = new Response(stream, {
42
+ headers: {
43
+ 'Content-Type': 'text/event-stream',
44
+ 'Cache-Control': 'no-cache',
45
+ 'Connection': 'keep-alive'
46
+ }
47
+ });
48
+
49
+ try {
50
+ // Use the existing transformResponseIn method
51
+ const transformedResponse = await transformer.transformResponseIn(mockResponse);
52
+
53
+ // Process the transformed stream
54
+ const reader = transformedResponse.body.getReader();
55
+ const decoder = new TextDecoder();
56
+
57
+ console.log('Transformed Anthropic-style streaming response:');
58
+ console.log('==============================================');
59
+
60
+ while (true) {
61
+ const { done, value } = await reader.read();
62
+ if (done) break;
63
+
64
+ const chunk = decoder.decode(value);
65
+ const lines = chunk.split('\n');
66
+
67
+ for (const line of lines) {
68
+ if (line.startsWith('event:')) {
69
+ const eventType = line.replace('event:', '').trim();
70
+ console.log(`Event: ${eventType}`);
71
+ } else if (line.startsWith('data:')) {
72
+ const data = line.replace('data:', '').trim();
73
+ try {
74
+ const parsedData = JSON.parse(data);
75
+ console.log('Data:', JSON.stringify(parsedData, null, 2));
76
+ } catch (e) {
77
+ console.log('Data:', data);
78
+ }
79
+ }
80
+ }
81
+ }
82
+
83
+ return transformedResponse;
84
+ } catch (error) {
85
+ console.error('Error transforming response:', error);
86
+ throw error;
87
+ }
88
+ }
89
+
90
+ // Even simpler method for direct data transformation
91
+ function simpleTransformResponse(data) {
92
+ // Convert the streaming data to Anthropic format
93
+ const anthropicEvents = [];
94
+ let messageId = `msg_${Date.now()}`;
95
+ let contentIndex = 0;
96
+ let accumulatedText = '';
97
+
98
+ // Simulate message start
99
+ anthropicEvents.push({
100
+ event: 'message_start',
101
+ data: {
102
+ type: 'message_start',
103
+ message: {
104
+ id: messageId,
105
+ type: 'message',
106
+ role: 'assistant',
107
+ content: [],
108
+ model: 'gpt-4o-mini',
109
+ stop_reason: null,
110
+ stop_sequence: null,
111
+ usage: {
112
+ input_tokens: 0,
113
+ output_tokens: 0
114
+ }
115
+ }
116
+ }
117
+ });
118
+
119
+ // Process text content
120
+ for (const item of data) {
121
+ if (item.includes('response.output_text.delta')) {
122
+ const match = item.match(/"delta":"([^"]*)"/);
123
+ if (match) {
124
+ accumulatedText += match[1];
125
+ }
126
+ }
127
+ }
128
+
129
+ // Add content block
130
+ anthropicEvents.push({
131
+ event: 'content_block_start',
132
+ data: {
133
+ type: 'content_block_start',
134
+ index: contentIndex,
135
+ content_block: {
136
+ type: 'text',
137
+ text: ''
138
+ }
139
+ }
140
+ });
141
+
142
+ // Add the accumulated text
143
+ anthropicEvents.push({
144
+ event: 'content_block_delta',
145
+ data: {
146
+ type: 'content_block_delta',
147
+ index: contentIndex,
148
+ delta: {
149
+ type: 'text_delta',
150
+ text: accumulatedText
151
+ }
152
+ }
153
+ });
154
+
155
+ // Close content block
156
+ anthropicEvents.push({
157
+ event: 'content_block_stop',
158
+ data: {
159
+ type: 'content_block_stop',
160
+ index: contentIndex
161
+ }
162
+ });
163
+
164
+ // Message delta and stop
165
+ anthropicEvents.push({
166
+ event: 'message_delta',
167
+ data: {
168
+ type: 'message_delta',
169
+ delta: {
170
+ stop_reason: 'end_turn',
171
+ stop_sequence: null
172
+ },
173
+ usage: {
174
+ input_tokens: 10,
175
+ output_tokens: accumulatedText.length,
176
+ cache_read_input_tokens: 0
177
+ }
178
+ }
179
+ });
180
+
181
+ anthropicEvents.push({
182
+ event: 'message_stop',
183
+ data: {
184
+ type: 'message_stop'
185
+ }
186
+ });
187
+
188
+ return anthropicEvents;
189
+ }
190
+
191
+ // Usage examples
192
+ async function runExamples() {
193
+ console.log('=== Simple Transform Example ===');
194
+ const simpleResult = simpleTransformResponse(exampleStreamData);
195
+ simpleResult.forEach(event => {
196
+ console.log(`${event.event}:`, JSON.stringify(event.data, null, 2));
197
+ });
198
+
199
+ console.log('\n=== Full Transform Example ===');
200
+ try {
201
+ await transformResponseIn(exampleStreamData);
202
+ } catch (error) {
203
+ console.error('Error:', error.message);
204
+ }
205
+ }
206
+
207
+ // Export the functions
208
+ export { transformResponseIn, simpleTransformResponse, runExamples };
209
+
210
+ // Run examples if this file is executed directly
211
+ if (import.meta.url === `file://${process.argv[1]}`) {
212
+ runExamples();
213
+ }
@@ -0,0 +1,36 @@
1
+ /**
2
+ * 测试懒加载MCP Server的启动性能
3
+ */
4
+ import { startMCPServerProxy } from '../codex/mcpserver.js';
5
+ import LogManager from '../logger-manager.js';
6
+
7
+ const logger = LogManager.getSystemLogger();
8
+
9
+ console.log('=== 测试MCP Server懒加载性能 ===');
10
+
11
+ // 记录启动开始时间
12
+ const startTime = Date.now();
13
+
14
+ logger.info('开始启动MCP Server代理...');
15
+
16
+ // 启动MCP Server代理(现在不会立即连接所有MCP服务)
17
+ startMCPServerProxy();
18
+
19
+ // 记录启动完成时间
20
+ const endTime = Date.now();
21
+ const startupTime = endTime - startTime;
22
+
23
+ console.log(`\n=== 测试结果 ===`);
24
+ console.log(`MCP Server代理启动时间: ${startupTime}ms`);
25
+ console.log(`启动方式: 懒加载(不会立即连接所有MCP服务)`);
26
+ console.log(`状态: 代理服务已启动,等待客户端请求时才会连接具体的MCP服务`);
27
+
28
+ logger.info(`MCP Server代理启动完成,耗时: ${startupTime}ms`);
29
+
30
+ // 保持进程运行一段时间以便观察
31
+ setTimeout(() => {
32
+ console.log('\n=== 测试完成 ===');
33
+ console.log('MCP Server代理采用懒加载机制,只有在收到客户端请求时才会连接对应的MCP服务');
34
+ console.log('这样可以显著减少启动时间,特别是当配置了多个MCP服务时');
35
+ process.exit(0);
36
+ }, 3000);
package/tests/test.js ADDED
@@ -0,0 +1,30 @@
1
+ import {} from "../clogger.js"
2
+ import Anthropic from "@anthropic-ai/sdk";
3
+
4
+
5
+ const anthropic = new Anthropic({
6
+ apiKey: "sk-c1GF5uhjQpEcfqYZE3XvGf85XGpG7Rhj6E5829M3qoawzDzu", // 你的 key
7
+ // 如果你走 DeepSeek 的 Anthropic 兼容端点,也加上 baseURL:
8
+ baseURL: "https://api.moonshot.cn/anthropic",
9
+ });
10
+
11
+ const message = await anthropic.messages.create({
12
+ model: "kimi-k2-0905-preview", // 用冒号
13
+ max_tokens: 1000,
14
+ system: "You are a helpful assistant.",
15
+ messages: [
16
+ {
17
+ role: "user",
18
+ content: [
19
+ { type: "text", text: "Hi, how are you?" }
20
+ ]
21
+ }
22
+ ]
23
+ });
24
+
25
+ console.log(message.content); // 用 console.log 而不是 print
26
+
27
+
28
+
29
+
30
+
@@ -0,0 +1,51 @@
1
+ import { spawn } from 'child_process';
2
+ import JsonRpcClient from '../codex/mcpclient.js';
3
+ import LogManager from '../logger-manager.js';
4
+
5
+ const logger = LogManager.getSystemLogger();
6
+
7
+ async function testMCPProxy() {
8
+ logger.debug('Starting MCP proxy server test...');
9
+
10
+ // Start the MCP proxy server
11
+ const proxyProcess = spawn('node', ['../codex/mcpserverproxy.js', '--mcpServerName=supabase'], {
12
+ stdio: ['pipe', 'pipe', 'pipe']
13
+ });
14
+
15
+ proxyProcess.stderr.on('data', (data) => {
16
+ logger.error('Proxy stderr:', data.toString());
17
+ });
18
+
19
+ proxyProcess.stdout.on('data', (data) => {
20
+ logger.debug('Proxy stdout:', data.toString());
21
+ });
22
+
23
+ // Wait a bit for the server to start
24
+ await new Promise(resolve => setTimeout(resolve, 2000));
25
+
26
+ try {
27
+ logger.debug('Creating JSON-RPC client...');
28
+ const client = new JsonRpcClient();
29
+
30
+ logger.debug('Testing initialize...');
31
+ const initResult = await client.call('supabase_initialize');
32
+ logger.debug('Initialize result:', JSON.stringify(initResult, null, 2));
33
+
34
+ logger.debug('Testing list tools...');
35
+ const toolsResult = await client.call('supabase_list');
36
+ logger.debug('Tools result:', JSON.stringify(toolsResult, null, 2));
37
+
38
+ if (toolsResult && toolsResult.tools) {
39
+ logger.debug(`Found ${toolsResult.tools.length} tools via proxy`);
40
+ } else {
41
+ logger.debug('No tools found via proxy or unexpected format');
42
+ }
43
+
44
+ } catch (error) {
45
+ console.error('Error testing MCP proxy:', error);
46
+ } finally {
47
+ proxyProcess.kill();
48
+ }
49
+ }
50
+
51
+ testMCPProxy();