@mhingston5/conduit 1.1.6 → 1.1.8
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +29 -1
- package/dist/index.js +274 -67
- package/dist/index.js.map +1 -1
- package/package.json +2 -1
- package/src/auth.cmd.ts +26 -14
- package/src/core/config.service.ts +22 -1
- package/src/core/middleware/auth.middleware.ts +1 -2
- package/src/core/security.service.ts +8 -8
- package/src/executors/isolate.executor.ts +39 -12
- package/src/gateway/auth.service.ts +55 -13
- package/src/gateway/gateway.service.ts +22 -14
- package/src/gateway/upstream.client.ts +172 -15
- package/src/index.ts +5 -1
- package/tests/__snapshots__/assets.test.ts.snap +17 -15
- package/tests/auth.service.test.ts +57 -0
- package/tests/config.service.test.ts +29 -1
- package/tests/middleware.test.ts +16 -13
- package/tests/routing.test.ts +1 -0
- package/tests/upstream.transports.test.ts +156 -0
- package/tests/debug.fallback.test.ts +0 -40
- package/tests/debug_upstream.ts +0 -69
|
@@ -58,28 +58,30 @@ exports[`Asset Integrity (Golden Tests) > should match Python SDK snapshot 1`] =
|
|
|
58
58
|
"# Generated SDK - Do not edit
|
|
59
59
|
_allowed_tools = ["test__*","github__*"]
|
|
60
60
|
|
|
61
|
-
class
|
|
62
|
-
def
|
|
63
|
-
|
|
64
|
-
|
|
61
|
+
class _test_Namespace:
|
|
62
|
+
async def hello(self, args=None, **kwargs):
|
|
63
|
+
params = args if args is not None else kwargs
|
|
64
|
+
return await _internal_call_tool("test__hello", params)
|
|
65
|
+
|
|
66
|
+
class _github_Namespace:
|
|
67
|
+
async def create_issue(self, args=None, **kwargs):
|
|
68
|
+
params = args if args is not None else kwargs
|
|
69
|
+
return await _internal_call_tool("github__create_issue", params)
|
|
65
70
|
|
|
66
71
|
class _Tools:
|
|
67
72
|
def __init__(self):
|
|
68
|
-
self.test =
|
|
69
|
-
|
|
70
|
-
})
|
|
71
|
-
self.github = _ToolNamespace({
|
|
72
|
-
"create_issue": lambda args, n="github__create_issue": _internal_call_tool(n, args)
|
|
73
|
-
})
|
|
73
|
+
self.test = _test_Namespace()
|
|
74
|
+
self.github = _github_Namespace()
|
|
74
75
|
|
|
75
76
|
def __getattr__(self, name):
|
|
76
77
|
# Flat access fallback: search all namespaces
|
|
77
|
-
for
|
|
78
|
-
|
|
79
|
-
|
|
78
|
+
for attr_name in dir(self):
|
|
79
|
+
attr = getattr(self, attr_name, None)
|
|
80
|
+
if attr and hasattr(attr, name):
|
|
81
|
+
return getattr(attr, name)
|
|
80
82
|
raise AttributeError(f"Namespace or Tool '{name}' not found")
|
|
81
83
|
|
|
82
|
-
async def raw(self, name, args):
|
|
84
|
+
async def raw(self, name, args=None):
|
|
83
85
|
"""Call a tool by its full name (escape hatch for dynamic/unknown tools)"""
|
|
84
86
|
normalized = name.replace(".", "__")
|
|
85
87
|
if _allowed_tools is not None:
|
|
@@ -89,7 +91,7 @@ class _Tools:
|
|
|
89
91
|
)
|
|
90
92
|
if not allowed:
|
|
91
93
|
raise PermissionError(f"Tool {name} is not in the allowlist")
|
|
92
|
-
return await _internal_call_tool(normalized, args)
|
|
94
|
+
return await _internal_call_tool(normalized, args or {})
|
|
93
95
|
|
|
94
96
|
tools = _Tools()"
|
|
95
97
|
`;
|
|
@@ -70,4 +70,61 @@ describe('AuthService', () => {
|
|
|
70
70
|
expect(headers2['Authorization']).toBe('Bearer cached-access');
|
|
71
71
|
expect(axios.post).toHaveBeenCalledTimes(1); // Still 1, not 2
|
|
72
72
|
});
|
|
73
|
+
|
|
74
|
+
it('should send JSON token refresh for Atlassian token endpoint', async () => {
|
|
75
|
+
const creds: any = {
|
|
76
|
+
type: 'oauth2',
|
|
77
|
+
clientId: 'id',
|
|
78
|
+
clientSecret: 'secret',
|
|
79
|
+
tokenUrl: 'https://auth.atlassian.com/oauth/token',
|
|
80
|
+
refreshToken: 'refresh',
|
|
81
|
+
};
|
|
82
|
+
|
|
83
|
+
(axios.post as any).mockResolvedValue({
|
|
84
|
+
data: {
|
|
85
|
+
access_token: 'new-access',
|
|
86
|
+
expires_in: 0,
|
|
87
|
+
},
|
|
88
|
+
});
|
|
89
|
+
|
|
90
|
+
await authService.getAuthHeaders(creds);
|
|
91
|
+
|
|
92
|
+
const [, body, config] = (axios.post as any).mock.calls[0];
|
|
93
|
+
expect(body).toMatchObject({
|
|
94
|
+
grant_type: 'refresh_token',
|
|
95
|
+
refresh_token: 'refresh',
|
|
96
|
+
client_id: 'id',
|
|
97
|
+
client_secret: 'secret',
|
|
98
|
+
});
|
|
99
|
+
expect(config.headers['Content-Type']).toBe('application/json');
|
|
100
|
+
});
|
|
101
|
+
|
|
102
|
+
it('should include tokenParams and cache rotating refresh tokens', async () => {
|
|
103
|
+
const creds: any = {
|
|
104
|
+
type: 'oauth2',
|
|
105
|
+
clientId: 'id',
|
|
106
|
+
clientSecret: 'secret',
|
|
107
|
+
tokenUrl: 'https://auth.atlassian.com/oauth/token',
|
|
108
|
+
refreshToken: 'r1',
|
|
109
|
+
tokenRequestFormat: 'json',
|
|
110
|
+
tokenParams: { audience: 'api.atlassian.com' },
|
|
111
|
+
};
|
|
112
|
+
|
|
113
|
+
(axios.post as any)
|
|
114
|
+
.mockResolvedValueOnce({
|
|
115
|
+
data: { access_token: 'a1', expires_in: 0, refresh_token: 'r2' },
|
|
116
|
+
})
|
|
117
|
+
.mockResolvedValueOnce({
|
|
118
|
+
data: { access_token: 'a2', expires_in: 0 },
|
|
119
|
+
});
|
|
120
|
+
|
|
121
|
+
await authService.getAuthHeaders(creds);
|
|
122
|
+
await authService.getAuthHeaders(creds);
|
|
123
|
+
|
|
124
|
+
const firstBody = (axios.post as any).mock.calls[0][1];
|
|
125
|
+
expect(firstBody).toMatchObject({ refresh_token: 'r1', audience: 'api.atlassian.com' });
|
|
126
|
+
|
|
127
|
+
const secondBody = (axios.post as any).mock.calls[1][1];
|
|
128
|
+
expect(secondBody).toMatchObject({ refresh_token: 'r2', audience: 'api.atlassian.com' });
|
|
129
|
+
});
|
|
73
130
|
});
|
|
@@ -81,6 +81,9 @@ upstreams:
|
|
|
81
81
|
clientSecret: my-secret
|
|
82
82
|
tokenUrl: http://token
|
|
83
83
|
refreshToken: my-refresh
|
|
84
|
+
tokenRequestFormat: json
|
|
85
|
+
tokenParams:
|
|
86
|
+
audience: api.atlassian.com
|
|
84
87
|
`);
|
|
85
88
|
|
|
86
89
|
vi.stubEnv('CONFIG_FILE', 'conduit.test.yaml');
|
|
@@ -92,10 +95,35 @@ upstreams:
|
|
|
92
95
|
clientId: 'my-id',
|
|
93
96
|
clientSecret: 'my-secret',
|
|
94
97
|
tokenUrl: 'http://token',
|
|
95
|
-
refreshToken: 'my-refresh'
|
|
98
|
+
refreshToken: 'my-refresh',
|
|
99
|
+
tokenRequestFormat: 'json',
|
|
100
|
+
tokenParams: { audience: 'api.atlassian.com' },
|
|
96
101
|
});
|
|
97
102
|
|
|
98
103
|
existsSpy.mockRestore();
|
|
99
104
|
readSpy.mockRestore();
|
|
100
105
|
});
|
|
106
|
+
|
|
107
|
+
it('should parse streamableHttp upstream correctly', () => {
|
|
108
|
+
const existsSpy = vi.spyOn(fs, 'existsSync').mockImplementation((p: any) => p.endsWith('conduit.test.yaml'));
|
|
109
|
+
const readSpy = vi.spyOn(fs, 'readFileSync').mockReturnValue(`
|
|
110
|
+
upstreams:
|
|
111
|
+
- id: atlassian
|
|
112
|
+
type: streamableHttp
|
|
113
|
+
url: https://mcp.atlassian.com/v1/sse
|
|
114
|
+
credentials:
|
|
115
|
+
type: bearer
|
|
116
|
+
bearerToken: test-token
|
|
117
|
+
`);
|
|
118
|
+
|
|
119
|
+
vi.stubEnv('CONFIG_FILE', 'conduit.test.yaml');
|
|
120
|
+
const configService = new ConfigService();
|
|
121
|
+
const upstreams = configService.get('upstreams');
|
|
122
|
+
expect(upstreams).toHaveLength(1);
|
|
123
|
+
expect(upstreams![0].type).toBe('streamableHttp');
|
|
124
|
+
expect((upstreams![0] as any).url).toBe('https://mcp.atlassian.com/v1/sse');
|
|
125
|
+
|
|
126
|
+
existsSpy.mockRestore();
|
|
127
|
+
readSpy.mockRestore();
|
|
128
|
+
});
|
|
101
129
|
});
|
package/tests/middleware.test.ts
CHANGED
|
@@ -15,6 +15,7 @@ describe('Middleware Tests', () => {
|
|
|
15
15
|
validateToken: vi.fn(),
|
|
16
16
|
checkRateLimit: vi.fn(),
|
|
17
17
|
getIpcToken: vi.fn(),
|
|
18
|
+
isMasterToken: vi.fn(),
|
|
18
19
|
validateIpcToken: vi.fn(),
|
|
19
20
|
getSession: vi.fn(),
|
|
20
21
|
};
|
|
@@ -39,8 +40,7 @@ describe('Middleware Tests', () => {
|
|
|
39
40
|
authMiddleware = new AuthMiddleware(mockSecurityService as SecurityService);
|
|
40
41
|
});
|
|
41
42
|
|
|
42
|
-
it('should validate bearer token', () => {
|
|
43
|
-
mockSecurityService.validateToken.mockReturnValue(true);
|
|
43
|
+
it('should validate bearer token', async () => {
|
|
44
44
|
const request = {
|
|
45
45
|
jsonrpc: '2.0',
|
|
46
46
|
id: 1,
|
|
@@ -48,24 +48,27 @@ describe('Middleware Tests', () => {
|
|
|
48
48
|
auth: { bearerToken: 'valid-token' }
|
|
49
49
|
};
|
|
50
50
|
|
|
51
|
-
|
|
51
|
+
// Not master and not a valid session -> Forbidden
|
|
52
|
+
mockSecurityService.isMasterToken.mockReturnValue(false);
|
|
52
53
|
mockSecurityService.validateIpcToken.mockReturnValue(false);
|
|
53
|
-
// Mock validateToken behavior via logic or specific mock if used, but AuthMiddleware uses getIpcToken/validateIpcToken
|
|
54
54
|
|
|
55
|
-
authMiddleware.handle(request as any, context, mockNext);
|
|
55
|
+
const result1 = await authMiddleware.handle(request as any, context, mockNext);
|
|
56
|
+
expect(mockSecurityService.isMasterToken).toHaveBeenCalledWith('valid-token');
|
|
56
57
|
expect(mockSecurityService.validateIpcToken).toHaveBeenCalledWith('valid-token');
|
|
57
|
-
expect(
|
|
58
|
-
|
|
59
|
-
|
|
58
|
+
expect(result1?.error?.code).toBe(ConduitError.Forbidden);
|
|
59
|
+
expect(mockNext).not.toHaveBeenCalled();
|
|
60
|
+
|
|
61
|
+
// Master token -> allowed
|
|
62
|
+
mockNext.mockClear();
|
|
63
|
+
mockSecurityService.isMasterToken.mockReturnValue(true);
|
|
60
64
|
|
|
61
|
-
|
|
62
|
-
|
|
63
|
-
authMiddleware.handle(request as any, context, mockNext);
|
|
65
|
+
const result2 = await authMiddleware.handle(request as any, context, mockNext);
|
|
66
|
+
expect(result2?.error).toBeUndefined();
|
|
64
67
|
expect(mockNext).toHaveBeenCalled();
|
|
65
68
|
});
|
|
66
69
|
|
|
67
70
|
it('should throw Forbidden if token is invalid', async () => {
|
|
68
|
-
mockSecurityService.
|
|
71
|
+
mockSecurityService.isMasterToken.mockReturnValue(false);
|
|
69
72
|
mockSecurityService.validateIpcToken.mockReturnValue(false);
|
|
70
73
|
|
|
71
74
|
const request = {
|
|
@@ -76,7 +79,7 @@ describe('Middleware Tests', () => {
|
|
|
76
79
|
};
|
|
77
80
|
|
|
78
81
|
const result = await authMiddleware.handle(request as any, context, mockNext);
|
|
79
|
-
expect(result
|
|
82
|
+
expect(result?.error?.code).toBe(ConduitError.Forbidden);
|
|
80
83
|
expect(mockNext).not.toHaveBeenCalled();
|
|
81
84
|
});
|
|
82
85
|
});
|
package/tests/routing.test.ts
CHANGED
|
@@ -43,6 +43,7 @@ describe('RequestController Routing', () => {
|
|
|
43
43
|
createSession: vi.fn().mockReturnValue('token'),
|
|
44
44
|
invalidateSession: vi.fn(),
|
|
45
45
|
getIpcToken: vi.fn().mockReturnValue('master-token'),
|
|
46
|
+
isMasterToken: vi.fn().mockReturnValue(true),
|
|
46
47
|
validateIpcToken: vi.fn().mockReturnValue(true),
|
|
47
48
|
getSession: vi.fn(),
|
|
48
49
|
checkRateLimit: vi.fn().mockReturnValue(true),
|
|
@@ -0,0 +1,156 @@
|
|
|
1
|
+
import { describe, it, expect, vi, beforeEach, afterEach } from 'vitest';
|
|
2
|
+
|
|
3
|
+
const mcpClientMocks = {
|
|
4
|
+
connect: vi.fn(async () => undefined),
|
|
5
|
+
listTools: vi.fn(async () => ({ tools: [{ name: 'hello', description: 'hi', inputSchema: {} }] })),
|
|
6
|
+
callTool: vi.fn(async () => ({ content: [{ type: 'text', text: 'ok' }] })),
|
|
7
|
+
request: vi.fn(async () => ({ ok: true })),
|
|
8
|
+
};
|
|
9
|
+
|
|
10
|
+
const transportMocks = {
|
|
11
|
+
streamableHttpCtor: vi.fn(),
|
|
12
|
+
sseCtor: vi.fn(),
|
|
13
|
+
};
|
|
14
|
+
|
|
15
|
+
vi.mock('@modelcontextprotocol/sdk/client/index.js', () => {
|
|
16
|
+
return {
|
|
17
|
+
Client: class {
|
|
18
|
+
connect = mcpClientMocks.connect;
|
|
19
|
+
listTools = mcpClientMocks.listTools;
|
|
20
|
+
callTool = mcpClientMocks.callTool;
|
|
21
|
+
request = mcpClientMocks.request;
|
|
22
|
+
constructor() {}
|
|
23
|
+
},
|
|
24
|
+
};
|
|
25
|
+
});
|
|
26
|
+
|
|
27
|
+
vi.mock('@modelcontextprotocol/sdk/client/streamableHttp.js', () => {
|
|
28
|
+
return {
|
|
29
|
+
StreamableHTTPClientTransport: class {
|
|
30
|
+
url: URL;
|
|
31
|
+
opts: any;
|
|
32
|
+
constructor(url: URL, opts: any) {
|
|
33
|
+
this.url = url;
|
|
34
|
+
this.opts = opts;
|
|
35
|
+
transportMocks.streamableHttpCtor(url, opts);
|
|
36
|
+
}
|
|
37
|
+
},
|
|
38
|
+
};
|
|
39
|
+
});
|
|
40
|
+
|
|
41
|
+
vi.mock('@modelcontextprotocol/sdk/client/sse.js', () => {
|
|
42
|
+
return {
|
|
43
|
+
SSEClientTransport: class {
|
|
44
|
+
url: URL;
|
|
45
|
+
opts: any;
|
|
46
|
+
constructor(url: URL, opts: any) {
|
|
47
|
+
this.url = url;
|
|
48
|
+
this.opts = opts;
|
|
49
|
+
transportMocks.sseCtor(url, opts);
|
|
50
|
+
}
|
|
51
|
+
},
|
|
52
|
+
};
|
|
53
|
+
});
|
|
54
|
+
|
|
55
|
+
// Not used directly but imported by UpstreamClient
|
|
56
|
+
vi.mock('@modelcontextprotocol/sdk/client/stdio.js', () => {
|
|
57
|
+
return {
|
|
58
|
+
StdioClientTransport: class {},
|
|
59
|
+
};
|
|
60
|
+
});
|
|
61
|
+
|
|
62
|
+
describe('UpstreamClient (remote transports)', () => {
|
|
63
|
+
const originalFetch = globalThis.fetch;
|
|
64
|
+
|
|
65
|
+
beforeEach(() => {
|
|
66
|
+
vi.clearAllMocks();
|
|
67
|
+
globalThis.fetch = vi.fn(async () => new Response(null, { status: 200 })) as any;
|
|
68
|
+
});
|
|
69
|
+
|
|
70
|
+
afterEach(() => {
|
|
71
|
+
globalThis.fetch = originalFetch;
|
|
72
|
+
});
|
|
73
|
+
|
|
74
|
+
it('uses Streamable HTTP client transport for type=streamableHttp', async () => {
|
|
75
|
+
const { UpstreamClient } = await import('../src/gateway/upstream.client.js');
|
|
76
|
+
|
|
77
|
+
const logger: any = { child: () => logger, debug: vi.fn(), info: vi.fn(), error: vi.fn() };
|
|
78
|
+
const authService: any = { getAuthHeaders: vi.fn(async () => ({ Authorization: 'Bearer t' })) };
|
|
79
|
+
const urlValidator: any = { validateUrl: vi.fn(async () => ({ valid: true })) };
|
|
80
|
+
|
|
81
|
+
const client = new UpstreamClient(
|
|
82
|
+
logger,
|
|
83
|
+
{ id: 'atl', type: 'streamableHttp', url: 'https://mcp.atlassian.com/v1/sse' } as any,
|
|
84
|
+
authService,
|
|
85
|
+
urlValidator
|
|
86
|
+
);
|
|
87
|
+
|
|
88
|
+
const res = await client.call({ jsonrpc: '2.0', id: '1', method: 'tools/list' } as any, { correlationId: 'c1' } as any);
|
|
89
|
+
|
|
90
|
+
expect(transportMocks.streamableHttpCtor).toHaveBeenCalled();
|
|
91
|
+
expect(urlValidator.validateUrl).toHaveBeenCalled();
|
|
92
|
+
expect(mcpClientMocks.connect).toHaveBeenCalled();
|
|
93
|
+
expect(mcpClientMocks.listTools).toHaveBeenCalled();
|
|
94
|
+
expect(res.result).toBeDefined();
|
|
95
|
+
});
|
|
96
|
+
|
|
97
|
+
it('pins DNS resolution and blocks cross-origin fetches', async () => {
|
|
98
|
+
const { UpstreamClient } = await import('../src/gateway/upstream.client.js');
|
|
99
|
+
|
|
100
|
+
const logger: any = { child: () => logger, debug: vi.fn(), info: vi.fn(), error: vi.fn() };
|
|
101
|
+
const authService: any = { getAuthHeaders: vi.fn(async () => ({ Authorization: 'Bearer t' })) };
|
|
102
|
+
const urlValidator: any = { validateUrl: vi.fn(async () => ({ valid: true, resolvedIp: '93.184.216.34' })) };
|
|
103
|
+
|
|
104
|
+
const client = new UpstreamClient(
|
|
105
|
+
logger,
|
|
106
|
+
{ id: 'atl', type: 'streamableHttp', url: 'https://mcp.atlassian.com/v1/sse' } as any,
|
|
107
|
+
authService,
|
|
108
|
+
urlValidator
|
|
109
|
+
);
|
|
110
|
+
|
|
111
|
+
// Trigger initial URL validation + pinning
|
|
112
|
+
await client.call({ jsonrpc: '2.0', id: '1', method: 'tools/list' } as any, { correlationId: 'c1' } as any);
|
|
113
|
+
|
|
114
|
+
const [, opts] = transportMocks.streamableHttpCtor.mock.calls[0];
|
|
115
|
+
const wrappedFetch = opts.fetch;
|
|
116
|
+
|
|
117
|
+
// Same-origin request should pass a dispatcher and block redirects
|
|
118
|
+
await wrappedFetch('https://mcp.atlassian.com/v1/sse', {});
|
|
119
|
+
expect((globalThis.fetch as any).mock.calls.length).toBeGreaterThan(0);
|
|
120
|
+
const [request, init] = (globalThis.fetch as any).mock.calls.at(-1);
|
|
121
|
+
expect(request).toBeInstanceOf(Request);
|
|
122
|
+
expect(request.redirect).toBe('manual');
|
|
123
|
+
expect(init?.dispatcher).toBeDefined();
|
|
124
|
+
|
|
125
|
+
// Cross-origin request should be blocked
|
|
126
|
+
await expect(wrappedFetch('https://evil.example.com/', {})).rejects.toThrow(/Forbidden upstream redirect\/origin/);
|
|
127
|
+
});
|
|
128
|
+
|
|
129
|
+
it('lazily creates SSE transport for type=sse and attaches auth headers', async () => {
|
|
130
|
+
const { UpstreamClient } = await import('../src/gateway/upstream.client.js');
|
|
131
|
+
|
|
132
|
+
const logger: any = { child: () => logger, debug: vi.fn(), info: vi.fn(), error: vi.fn() };
|
|
133
|
+
const authService: any = { getAuthHeaders: vi.fn(async () => ({ Authorization: 'Bearer t' })) };
|
|
134
|
+
const urlValidator: any = { validateUrl: vi.fn(async () => ({ valid: true })) };
|
|
135
|
+
|
|
136
|
+
const client = new UpstreamClient(
|
|
137
|
+
logger,
|
|
138
|
+
{
|
|
139
|
+
id: 'atl',
|
|
140
|
+
type: 'sse',
|
|
141
|
+
url: 'https://mcp.atlassian.com/v1/sse',
|
|
142
|
+
credentials: { type: 'bearer', bearerToken: 't' },
|
|
143
|
+
} as any,
|
|
144
|
+
authService,
|
|
145
|
+
urlValidator
|
|
146
|
+
);
|
|
147
|
+
|
|
148
|
+
await client.call({ jsonrpc: '2.0', id: '1', method: 'tools/list' } as any, { correlationId: 'c1' } as any);
|
|
149
|
+
|
|
150
|
+
expect(authService.getAuthHeaders).toHaveBeenCalled();
|
|
151
|
+
expect(transportMocks.sseCtor).toHaveBeenCalled();
|
|
152
|
+
const [, opts] = transportMocks.sseCtor.mock.calls[0];
|
|
153
|
+
expect(opts.requestInit.headers).toMatchObject({ Authorization: 'Bearer t' });
|
|
154
|
+
expect(mcpClientMocks.connect).toHaveBeenCalled();
|
|
155
|
+
});
|
|
156
|
+
});
|
|
@@ -1,40 +0,0 @@
|
|
|
1
|
-
import { describe, it, expect, vi, beforeEach } from 'vitest';
|
|
2
|
-
import { GatewayService } from '../src/gateway/gateway.service.js';
|
|
3
|
-
import { ExecutionContext } from '../src/core/execution.context.js';
|
|
4
|
-
import pino from 'pino';
|
|
5
|
-
|
|
6
|
-
const logger = pino({ level: 'silent' });
|
|
7
|
-
|
|
8
|
-
describe('GatewayService Namespace Fallback Debug', () => {
|
|
9
|
-
let gateway: GatewayService;
|
|
10
|
-
let context: ExecutionContext;
|
|
11
|
-
|
|
12
|
-
beforeEach(() => {
|
|
13
|
-
const securityService = {
|
|
14
|
-
validateUrl: vi.fn().mockReturnValue({ valid: true }),
|
|
15
|
-
} as any;
|
|
16
|
-
gateway = new GatewayService(logger, securityService);
|
|
17
|
-
context = new ExecutionContext({ logger });
|
|
18
|
-
});
|
|
19
|
-
|
|
20
|
-
it('should show detailed error when upstream not found', async () => {
|
|
21
|
-
const response = await gateway.callTool('nonexistent__tool', {}, context);
|
|
22
|
-
expect(response.error?.message).toContain("Upstream not found: 'nonexistent'");
|
|
23
|
-
expect(response.error?.message).toContain("Available: none");
|
|
24
|
-
});
|
|
25
|
-
|
|
26
|
-
it('should hit fallback for namespaceless tool', async () => {
|
|
27
|
-
// Register an upstream with a tool
|
|
28
|
-
gateway.registerUpstream({ id: 'fs', url: 'http://fs' });
|
|
29
|
-
|
|
30
|
-
// Mock discovery
|
|
31
|
-
vi.spyOn(gateway, 'discoverTools').mockResolvedValue([
|
|
32
|
-
{ name: 'fs__list_directory', description: '', inputSchema: {} }
|
|
33
|
-
] as any);
|
|
34
|
-
|
|
35
|
-
const response = await gateway.callTool('list_directory', {}, context);
|
|
36
|
-
// It should NOT return "Upstream not found: ''"
|
|
37
|
-
expect(response.error?.message).not.toContain("Upstream not found");
|
|
38
|
-
// It should try to call fs__list_directory (which will fail due to lack of axios mock here, but that's fine)
|
|
39
|
-
});
|
|
40
|
-
});
|
package/tests/debug_upstream.ts
DELETED
|
@@ -1,69 +0,0 @@
|
|
|
1
|
-
/**
|
|
2
|
-
* Debug script to verify the filesystem upstream is working correctly.
|
|
3
|
-
* Run with: npx tsx tests/debug_upstream.ts
|
|
4
|
-
*/
|
|
5
|
-
import { ConfigService } from '../src/core/config.service.js';
|
|
6
|
-
import { createLogger } from '../src/core/logger.js';
|
|
7
|
-
import { GatewayService } from '../src/gateway/gateway.service.js';
|
|
8
|
-
import { SecurityService } from '../src/core/security.service.js';
|
|
9
|
-
import { ExecutionContext } from '../src/core/execution.context.js';
|
|
10
|
-
|
|
11
|
-
async function main() {
|
|
12
|
-
console.log('=== Conduit Upstream Debug ===\n');
|
|
13
|
-
|
|
14
|
-
const configService = new ConfigService();
|
|
15
|
-
const logger = createLogger(configService);
|
|
16
|
-
|
|
17
|
-
console.log('Config loaded from:', process.cwd());
|
|
18
|
-
console.log('Upstreams configured:', JSON.stringify(configService.get('upstreams'), null, 2));
|
|
19
|
-
console.log();
|
|
20
|
-
|
|
21
|
-
const securityService = new SecurityService(logger, undefined);
|
|
22
|
-
const gatewayService = new GatewayService(logger, securityService);
|
|
23
|
-
|
|
24
|
-
// Register upstreams from config
|
|
25
|
-
const upstreams = configService.get('upstreams') || [];
|
|
26
|
-
for (const upstream of upstreams) {
|
|
27
|
-
console.log(`Registering upstream: ${upstream.id} (${upstream.type})`);
|
|
28
|
-
gatewayService.registerUpstream(upstream);
|
|
29
|
-
}
|
|
30
|
-
|
|
31
|
-
console.log('\n=== Testing Tool Discovery ===\n');
|
|
32
|
-
|
|
33
|
-
const context = new ExecutionContext({ logger });
|
|
34
|
-
|
|
35
|
-
// Give the filesystem server a moment to start
|
|
36
|
-
console.log('Waiting 3 seconds for upstream servers to initialize...');
|
|
37
|
-
await new Promise(resolve => setTimeout(resolve, 3000));
|
|
38
|
-
|
|
39
|
-
try {
|
|
40
|
-
const tools = await gatewayService.discoverTools(context);
|
|
41
|
-
console.log(`\nDiscovered ${tools.length} tools:`);
|
|
42
|
-
for (const tool of tools) {
|
|
43
|
-
console.log(` - ${tool.name}: ${tool.description || '(no description)'}`);
|
|
44
|
-
}
|
|
45
|
-
|
|
46
|
-
// Try to find list_directory
|
|
47
|
-
const listDirTool = tools.find(t => t.name.includes('list_directory') || t.name.includes('list_dir'));
|
|
48
|
-
if (listDirTool) {
|
|
49
|
-
console.log(`\n✅ Found list_directory tool: ${listDirTool.name}`);
|
|
50
|
-
|
|
51
|
-
// Try calling it
|
|
52
|
-
console.log('\n=== Testing Tool Call ===\n');
|
|
53
|
-
const result = await gatewayService.callTool(listDirTool.name, { path: '/private/tmp' }, context);
|
|
54
|
-
console.log('Result:', JSON.stringify(result, null, 2));
|
|
55
|
-
} else {
|
|
56
|
-
console.log('\n❌ list_directory tool NOT found in discovered tools');
|
|
57
|
-
}
|
|
58
|
-
} catch (error: any) {
|
|
59
|
-
console.error('Error during discovery:', error.message);
|
|
60
|
-
}
|
|
61
|
-
|
|
62
|
-
console.log('\n=== Debug Complete ===');
|
|
63
|
-
process.exit(0);
|
|
64
|
-
}
|
|
65
|
-
|
|
66
|
-
main().catch(err => {
|
|
67
|
-
console.error('Fatal error:', err);
|
|
68
|
-
process.exit(1);
|
|
69
|
-
});
|