@mhingston5/conduit 1.1.6 → 1.1.7
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +29 -1
- package/dist/index.js +157 -41
- package/dist/index.js.map +1 -1
- package/package.json +1 -1
- package/src/auth.cmd.ts +26 -14
- package/src/core/config.service.ts +22 -1
- package/src/gateway/auth.service.ts +55 -13
- package/src/gateway/gateway.service.ts +22 -14
- package/src/gateway/upstream.client.ts +84 -15
- package/tests/__snapshots__/assets.test.ts.snap +17 -15
- package/tests/auth.service.test.ts +57 -0
- package/tests/config.service.test.ts +29 -1
- package/tests/upstream.transports.test.ts +117 -0
- package/tests/debug.fallback.test.ts +0 -40
- package/tests/debug_upstream.ts +0 -69
package/package.json
CHANGED
package/src/auth.cmd.ts
CHANGED
|
@@ -157,27 +157,39 @@ export async function handleAuth(options: AuthOptions) {
|
|
|
157
157
|
}
|
|
158
158
|
|
|
159
159
|
try {
|
|
160
|
-
const
|
|
161
|
-
|
|
162
|
-
|
|
163
|
-
|
|
164
|
-
|
|
160
|
+
const payload: Record<string, string> = {
|
|
161
|
+
grant_type: 'authorization_code',
|
|
162
|
+
code,
|
|
163
|
+
redirect_uri: redirectUri,
|
|
164
|
+
client_id: options.clientId,
|
|
165
|
+
};
|
|
166
|
+
|
|
165
167
|
if (options.clientSecret) {
|
|
166
|
-
|
|
168
|
+
payload.client_secret = options.clientSecret;
|
|
167
169
|
}
|
|
168
170
|
if (codeVerifier) {
|
|
169
|
-
|
|
171
|
+
payload.code_verifier = codeVerifier;
|
|
170
172
|
}
|
|
171
173
|
if (resolvedResource) {
|
|
172
|
-
|
|
174
|
+
payload.resource = resolvedResource;
|
|
173
175
|
}
|
|
174
176
|
|
|
175
|
-
const
|
|
176
|
-
|
|
177
|
-
|
|
178
|
-
|
|
179
|
-
|
|
180
|
-
|
|
177
|
+
const tokenHostname = new URL(resolvedTokenUrl).hostname;
|
|
178
|
+
const useJson = tokenHostname === 'auth.atlassian.com';
|
|
179
|
+
|
|
180
|
+
const response = useJson
|
|
181
|
+
? await axios.post(resolvedTokenUrl, payload, {
|
|
182
|
+
headers: {
|
|
183
|
+
'Content-Type': 'application/json',
|
|
184
|
+
'Accept': 'application/json',
|
|
185
|
+
},
|
|
186
|
+
})
|
|
187
|
+
: await axios.post(resolvedTokenUrl, new URLSearchParams(payload), {
|
|
188
|
+
headers: {
|
|
189
|
+
'Content-Type': 'application/x-www-form-urlencoded',
|
|
190
|
+
'Accept': 'application/json',
|
|
191
|
+
},
|
|
192
|
+
});
|
|
181
193
|
|
|
182
194
|
const { refresh_token, access_token } = response.data;
|
|
183
195
|
|
|
@@ -29,6 +29,8 @@ export const UpstreamCredentialsSchema = z.object({
|
|
|
29
29
|
tokenUrl: z.string().optional(),
|
|
30
30
|
refreshToken: z.string().optional(),
|
|
31
31
|
scopes: z.array(z.string()).optional(),
|
|
32
|
+
tokenRequestFormat: z.enum(['form', 'json']).optional(),
|
|
33
|
+
tokenParams: z.record(z.string(), z.string()).optional(),
|
|
32
34
|
apiKey: z.string().optional(),
|
|
33
35
|
bearerToken: z.string().optional(),
|
|
34
36
|
headerName: z.string().optional(),
|
|
@@ -41,6 +43,20 @@ export const HttpUpstreamSchema = z.object({
|
|
|
41
43
|
credentials: UpstreamCredentialsSchema.optional(),
|
|
42
44
|
});
|
|
43
45
|
|
|
46
|
+
export const StreamableHttpUpstreamSchema = z.object({
|
|
47
|
+
id: z.string(),
|
|
48
|
+
type: z.literal('streamableHttp'),
|
|
49
|
+
url: z.string(),
|
|
50
|
+
credentials: UpstreamCredentialsSchema.optional(),
|
|
51
|
+
});
|
|
52
|
+
|
|
53
|
+
export const SseUpstreamSchema = z.object({
|
|
54
|
+
id: z.string(),
|
|
55
|
+
type: z.literal('sse'),
|
|
56
|
+
url: z.string(),
|
|
57
|
+
credentials: UpstreamCredentialsSchema.optional(),
|
|
58
|
+
});
|
|
59
|
+
|
|
44
60
|
export const StdioUpstreamSchema = z.object({
|
|
45
61
|
id: z.string(),
|
|
46
62
|
type: z.literal('stdio'),
|
|
@@ -49,7 +65,12 @@ export const StdioUpstreamSchema = z.object({
|
|
|
49
65
|
env: z.record(z.string(), z.string()).optional(),
|
|
50
66
|
});
|
|
51
67
|
|
|
52
|
-
export const UpstreamInfoSchema = z.union([
|
|
68
|
+
export const UpstreamInfoSchema = z.union([
|
|
69
|
+
HttpUpstreamSchema,
|
|
70
|
+
StreamableHttpUpstreamSchema,
|
|
71
|
+
SseUpstreamSchema,
|
|
72
|
+
StdioUpstreamSchema,
|
|
73
|
+
]);
|
|
53
74
|
|
|
54
75
|
export type ResourceLimits = z.infer<typeof ResourceLimitsSchema>;
|
|
55
76
|
|
|
@@ -12,6 +12,8 @@ export interface UpstreamCredentials {
|
|
|
12
12
|
tokenUrl?: string;
|
|
13
13
|
refreshToken?: string;
|
|
14
14
|
scopes?: string[];
|
|
15
|
+
tokenRequestFormat?: 'form' | 'json';
|
|
16
|
+
tokenParams?: Record<string, string>;
|
|
15
17
|
}
|
|
16
18
|
|
|
17
19
|
interface CachedToken {
|
|
@@ -23,6 +25,8 @@ export class AuthService {
|
|
|
23
25
|
private logger: Logger;
|
|
24
26
|
// Cache tokens separately from credentials to avoid mutation
|
|
25
27
|
private tokenCache = new Map<string, CachedToken>();
|
|
28
|
+
// Keep the latest refresh token in-memory (rotating tokens)
|
|
29
|
+
private refreshTokenCache = new Map<string, string>();
|
|
26
30
|
// Prevent concurrent refresh requests for the same client
|
|
27
31
|
private refreshLocks = new Map<string, Promise<string>>();
|
|
28
32
|
|
|
@@ -81,23 +85,56 @@ export class AuthService {
|
|
|
81
85
|
this.logger.info({ tokenUrl: creds.tokenUrl, clientId: creds.clientId }, 'Refreshing OAuth2 token');
|
|
82
86
|
|
|
83
87
|
try {
|
|
84
|
-
const
|
|
85
|
-
|
|
86
|
-
|
|
87
|
-
|
|
88
|
+
const tokenUrl = creds.tokenUrl;
|
|
89
|
+
const cachedRefreshToken = this.refreshTokenCache.get(cacheKey);
|
|
90
|
+
const refreshToken = cachedRefreshToken || creds.refreshToken;
|
|
91
|
+
|
|
92
|
+
if (!refreshToken) {
|
|
93
|
+
throw new Error('OAuth2 credentials missing required fields for refresh');
|
|
94
|
+
}
|
|
95
|
+
|
|
96
|
+
const payload: Record<string, string> = {
|
|
97
|
+
grant_type: 'refresh_token',
|
|
98
|
+
refresh_token: refreshToken,
|
|
99
|
+
client_id: creds.clientId,
|
|
100
|
+
};
|
|
101
|
+
|
|
88
102
|
if (creds.clientSecret) {
|
|
89
|
-
|
|
103
|
+
payload.client_secret = creds.clientSecret;
|
|
90
104
|
}
|
|
91
105
|
|
|
92
|
-
|
|
93
|
-
|
|
94
|
-
|
|
95
|
-
'Accept': 'application/json',
|
|
96
|
-
},
|
|
97
|
-
});
|
|
106
|
+
if (creds.tokenParams) {
|
|
107
|
+
Object.assign(payload, creds.tokenParams);
|
|
108
|
+
}
|
|
98
109
|
|
|
99
|
-
const
|
|
100
|
-
|
|
110
|
+
const requestFormat = (() => {
|
|
111
|
+
if (creds.tokenRequestFormat) return creds.tokenRequestFormat;
|
|
112
|
+
try {
|
|
113
|
+
const hostname = new URL(tokenUrl).hostname;
|
|
114
|
+
if (hostname === 'auth.atlassian.com') return 'json';
|
|
115
|
+
} catch {
|
|
116
|
+
// ignore
|
|
117
|
+
}
|
|
118
|
+
return 'form';
|
|
119
|
+
})();
|
|
120
|
+
|
|
121
|
+
const response = requestFormat === 'json'
|
|
122
|
+
? await axios.post(tokenUrl, payload, {
|
|
123
|
+
headers: {
|
|
124
|
+
'Content-Type': 'application/json',
|
|
125
|
+
'Accept': 'application/json',
|
|
126
|
+
},
|
|
127
|
+
})
|
|
128
|
+
: await axios.post(tokenUrl, new URLSearchParams(payload), {
|
|
129
|
+
headers: {
|
|
130
|
+
'Content-Type': 'application/x-www-form-urlencoded',
|
|
131
|
+
'Accept': 'application/json',
|
|
132
|
+
},
|
|
133
|
+
});
|
|
134
|
+
|
|
135
|
+
const { access_token, expires_in, refresh_token } = response.data;
|
|
136
|
+
const expiresInRaw = Number(expires_in);
|
|
137
|
+
const expiresInSeconds = Number.isFinite(expiresInRaw) ? expiresInRaw : 3600;
|
|
101
138
|
|
|
102
139
|
// Cache the token (don't mutate the input credentials)
|
|
103
140
|
this.tokenCache.set(cacheKey, {
|
|
@@ -105,6 +142,11 @@ export class AuthService {
|
|
|
105
142
|
expiresAt: Date.now() + (expiresInSeconds * 1000),
|
|
106
143
|
});
|
|
107
144
|
|
|
145
|
+
// Some providers (e.g. Atlassian) rotate refresh tokens
|
|
146
|
+
if (typeof refresh_token === 'string' && refresh_token.length > 0) {
|
|
147
|
+
this.refreshTokenCache.set(cacheKey, refresh_token);
|
|
148
|
+
}
|
|
149
|
+
|
|
108
150
|
return `Bearer ${access_token}`;
|
|
109
151
|
} catch (err: any) {
|
|
110
152
|
const errorMsg = err.response?.data?.error_description || err.response?.data?.error || err.message;
|
|
@@ -144,15 +144,23 @@ export class GatewayService {
|
|
|
144
144
|
|
|
145
145
|
let tools = this.schemaCache.get(packageId);
|
|
146
146
|
|
|
147
|
-
//
|
|
147
|
+
// Discover tools if not cached
|
|
148
148
|
if (!tools) {
|
|
149
|
-
|
|
150
|
-
|
|
151
|
-
|
|
152
|
-
|
|
153
|
-
|
|
154
|
-
|
|
155
|
-
|
|
149
|
+
// 1) Try to get manifest (if supported)
|
|
150
|
+
if (typeof (client as any).getManifest === 'function') {
|
|
151
|
+
try {
|
|
152
|
+
const manifest = await (client as any).getManifest(context);
|
|
153
|
+
if (manifest && manifest.tools) {
|
|
154
|
+
tools = manifest.tools as ToolSchema[];
|
|
155
|
+
}
|
|
156
|
+
} catch (e: any) {
|
|
157
|
+
this.logger.debug({ upstreamId: packageId, err: e.message }, 'Manifest fetch failed (will fallback)');
|
|
158
|
+
}
|
|
159
|
+
}
|
|
160
|
+
|
|
161
|
+
// 2) Fall back to RPC discovery
|
|
162
|
+
if (!tools) {
|
|
163
|
+
try {
|
|
156
164
|
if (typeof (client as any).listTools === 'function') {
|
|
157
165
|
tools = await (client as any).listTools();
|
|
158
166
|
} else {
|
|
@@ -168,14 +176,14 @@ export class GatewayService {
|
|
|
168
176
|
this.logger.warn({ upstreamId: packageId, error: response.error }, 'Failed to discover tools via RPC');
|
|
169
177
|
}
|
|
170
178
|
}
|
|
179
|
+
} catch (e: any) {
|
|
180
|
+
this.logger.error({ upstreamId: packageId, err: e.message }, 'Error during tool discovery');
|
|
171
181
|
}
|
|
182
|
+
}
|
|
172
183
|
|
|
173
|
-
|
|
174
|
-
|
|
175
|
-
|
|
176
|
-
}
|
|
177
|
-
} catch (e: any) {
|
|
178
|
-
this.logger.error({ upstreamId: packageId, err: e.message }, 'Error during tool discovery');
|
|
184
|
+
if (tools && tools.length > 0) {
|
|
185
|
+
this.schemaCache.set(packageId, tools);
|
|
186
|
+
this.logger.info({ upstreamId: packageId, toolCount: tools.length }, 'Discovered tools from upstream');
|
|
179
187
|
}
|
|
180
188
|
}
|
|
181
189
|
|
|
@@ -7,6 +7,8 @@ import { IUrlValidator } from '../core/interfaces/url.validator.interface.js';
|
|
|
7
7
|
|
|
8
8
|
import { Client } from '@modelcontextprotocol/sdk/client/index.js';
|
|
9
9
|
import { StdioClientTransport } from '@modelcontextprotocol/sdk/client/stdio.js';
|
|
10
|
+
import { StreamableHTTPClientTransport } from '@modelcontextprotocol/sdk/client/streamableHttp.js';
|
|
11
|
+
import { SSEClientTransport } from '@modelcontextprotocol/sdk/client/sse.js';
|
|
10
12
|
import { z } from 'zod';
|
|
11
13
|
|
|
12
14
|
export type UpstreamInfo = {
|
|
@@ -14,6 +16,8 @@ export type UpstreamInfo = {
|
|
|
14
16
|
credentials?: UpstreamCredentials;
|
|
15
17
|
} & (
|
|
16
18
|
| { type?: 'http'; url: string }
|
|
19
|
+
| { type: 'streamableHttp'; url: string }
|
|
20
|
+
| { type: 'sse'; url: string }
|
|
17
21
|
| { type: 'stdio'; command: string; args?: string[]; env?: Record<string, string> }
|
|
18
22
|
);
|
|
19
23
|
|
|
@@ -23,7 +27,7 @@ export class UpstreamClient {
|
|
|
23
27
|
private authService: AuthService;
|
|
24
28
|
private urlValidator: IUrlValidator;
|
|
25
29
|
private mcpClient?: Client;
|
|
26
|
-
private transport?: StdioClientTransport;
|
|
30
|
+
private transport?: StdioClientTransport | StreamableHTTPClientTransport | SSEClientTransport;
|
|
27
31
|
private connected: boolean = false;
|
|
28
32
|
|
|
29
33
|
constructor(logger: Logger, info: UpstreamInfo, authService: AuthService, urlValidator: IUrlValidator) {
|
|
@@ -51,13 +55,72 @@ export class UpstreamClient {
|
|
|
51
55
|
}, {
|
|
52
56
|
capabilities: {},
|
|
53
57
|
});
|
|
58
|
+
return;
|
|
59
|
+
}
|
|
60
|
+
|
|
61
|
+
if (this.info.type === 'streamableHttp') {
|
|
62
|
+
this.transport = new StreamableHTTPClientTransport(new URL(this.info.url), {
|
|
63
|
+
fetch: this.createAuthedFetch(),
|
|
64
|
+
});
|
|
65
|
+
this.mcpClient = new Client({
|
|
66
|
+
name: 'conduit-gateway',
|
|
67
|
+
version: '1.0.0',
|
|
68
|
+
}, {
|
|
69
|
+
capabilities: {},
|
|
70
|
+
});
|
|
71
|
+
return;
|
|
72
|
+
}
|
|
73
|
+
|
|
74
|
+
if (this.info.type === 'sse') {
|
|
75
|
+
this.mcpClient = new Client({
|
|
76
|
+
name: 'conduit-gateway',
|
|
77
|
+
version: '1.0.0',
|
|
78
|
+
}, {
|
|
79
|
+
capabilities: {},
|
|
80
|
+
});
|
|
54
81
|
}
|
|
55
82
|
}
|
|
56
83
|
|
|
57
|
-
private
|
|
58
|
-
|
|
84
|
+
private createAuthedFetch() {
|
|
85
|
+
const creds = this.info.credentials;
|
|
86
|
+
if (!creds) return fetch;
|
|
87
|
+
|
|
88
|
+
return async (input: any, init: any = {}) => {
|
|
89
|
+
const headers = new Headers(init.headers || {});
|
|
90
|
+
const authHeaders = await this.authService.getAuthHeaders(creds);
|
|
91
|
+
for (const [k, v] of Object.entries(authHeaders)) {
|
|
92
|
+
headers.set(k, v);
|
|
93
|
+
}
|
|
94
|
+
return fetch(input, { ...init, headers });
|
|
95
|
+
};
|
|
96
|
+
}
|
|
97
|
+
|
|
98
|
+
private async ensureConnected() {
|
|
99
|
+
if (!this.mcpClient) return;
|
|
100
|
+
|
|
101
|
+
if (!this.transport && this.info.type === 'sse') {
|
|
102
|
+
const authHeaders = this.info.credentials
|
|
103
|
+
? await this.authService.getAuthHeaders(this.info.credentials)
|
|
104
|
+
: {};
|
|
105
|
+
|
|
106
|
+
this.transport = new SSEClientTransport(new URL(this.info.url), {
|
|
107
|
+
fetch: this.createAuthedFetch(),
|
|
108
|
+
eventSourceInit: { headers: authHeaders } as any,
|
|
109
|
+
requestInit: { headers: authHeaders },
|
|
110
|
+
});
|
|
111
|
+
}
|
|
112
|
+
|
|
113
|
+
if (!this.transport) return;
|
|
59
114
|
if (this.connected) return;
|
|
60
115
|
|
|
116
|
+
if (this.info.type === 'streamableHttp' || this.info.type === 'sse') {
|
|
117
|
+
const securityResult = await this.urlValidator.validateUrl(this.info.url);
|
|
118
|
+
if (!securityResult.valid) {
|
|
119
|
+
this.logger.error({ url: this.info.url }, 'Blocked upstream URL (SSRF)');
|
|
120
|
+
throw new Error(securityResult.message || 'Forbidden URL');
|
|
121
|
+
}
|
|
122
|
+
}
|
|
123
|
+
|
|
61
124
|
try {
|
|
62
125
|
this.logger.debug('Connecting to upstream transport...');
|
|
63
126
|
await this.mcpClient.connect(this.transport);
|
|
@@ -70,19 +133,23 @@ export class UpstreamClient {
|
|
|
70
133
|
}
|
|
71
134
|
|
|
72
135
|
async call(request: JSONRPCRequest, context: ExecutionContext): Promise<JSONRPCResponse> {
|
|
73
|
-
|
|
74
|
-
|
|
136
|
+
const usesMcpClientTransport = (info: UpstreamInfo): info is (
|
|
137
|
+
| { type: 'stdio'; command: string; args?: string[]; env?: Record<string, string> }
|
|
138
|
+
| { type: 'streamableHttp'; url: string }
|
|
139
|
+
| { type: 'sse'; url: string }
|
|
140
|
+
) & { id: string; credentials?: UpstreamCredentials } =>
|
|
141
|
+
info.type === 'stdio' || info.type === 'streamableHttp' || info.type === 'sse';
|
|
75
142
|
|
|
76
|
-
if (
|
|
77
|
-
return this.
|
|
78
|
-
} else {
|
|
79
|
-
return this.callHttp(request, context as ExecutionContext);
|
|
143
|
+
if (usesMcpClientTransport(this.info)) {
|
|
144
|
+
return this.callMcpClient(request);
|
|
80
145
|
}
|
|
146
|
+
|
|
147
|
+
return this.callHttp(request, context as ExecutionContext);
|
|
81
148
|
}
|
|
82
149
|
|
|
83
|
-
private async
|
|
150
|
+
private async callMcpClient(request: JSONRPCRequest): Promise<JSONRPCResponse> {
|
|
84
151
|
if (!this.mcpClient) {
|
|
85
|
-
return { jsonrpc: '2.0', id: request.id, error: { code: -32603, message: '
|
|
152
|
+
return { jsonrpc: '2.0', id: request.id, error: { code: -32603, message: 'MCP client not initialized' } };
|
|
86
153
|
}
|
|
87
154
|
|
|
88
155
|
try {
|
|
@@ -128,13 +195,13 @@ export class UpstreamClient {
|
|
|
128
195
|
};
|
|
129
196
|
}
|
|
130
197
|
} catch (error: any) {
|
|
131
|
-
this.logger.error({ err: error }, '
|
|
198
|
+
this.logger.error({ err: error }, 'MCP call failed');
|
|
132
199
|
return {
|
|
133
200
|
jsonrpc: '2.0',
|
|
134
201
|
id: request.id,
|
|
135
202
|
error: {
|
|
136
203
|
code: error.code || -32603,
|
|
137
|
-
message: error.message || 'Internal error in
|
|
204
|
+
message: error.message || 'Internal error in MCP transport'
|
|
138
205
|
}
|
|
139
206
|
};
|
|
140
207
|
}
|
|
@@ -142,7 +209,9 @@ export class UpstreamClient {
|
|
|
142
209
|
|
|
143
210
|
private async callHttp(request: JSONRPCRequest, context: ExecutionContext): Promise<JSONRPCResponse> {
|
|
144
211
|
// Narrowing for TS
|
|
145
|
-
if (this.info.type === 'stdio'
|
|
212
|
+
if (this.info.type === 'stdio' || this.info.type === 'streamableHttp' || this.info.type === 'sse') {
|
|
213
|
+
throw new Error('Unreachable');
|
|
214
|
+
}
|
|
146
215
|
const url = this.info.url;
|
|
147
216
|
|
|
148
217
|
const headers: Record<string, string> = {
|
|
@@ -204,7 +273,7 @@ export class UpstreamClient {
|
|
|
204
273
|
}
|
|
205
274
|
}
|
|
206
275
|
async getManifest(context: ExecutionContext): Promise<ToolManifest | null> {
|
|
207
|
-
if (this.info.type !== 'http') return null;
|
|
276
|
+
if (this.info.type && this.info.type !== 'http') return null;
|
|
208
277
|
|
|
209
278
|
try {
|
|
210
279
|
const baseUrl = this.info.url.replace(/\/$/, ''); // Remove trailing slash
|
|
@@ -58,28 +58,30 @@ exports[`Asset Integrity (Golden Tests) > should match Python SDK snapshot 1`] =
|
|
|
58
58
|
"# Generated SDK - Do not edit
|
|
59
59
|
_allowed_tools = ["test__*","github__*"]
|
|
60
60
|
|
|
61
|
-
class
|
|
62
|
-
def
|
|
63
|
-
|
|
64
|
-
|
|
61
|
+
class _test_Namespace:
|
|
62
|
+
async def hello(self, args=None, **kwargs):
|
|
63
|
+
params = args if args is not None else kwargs
|
|
64
|
+
return await _internal_call_tool("test__hello", params)
|
|
65
|
+
|
|
66
|
+
class _github_Namespace:
|
|
67
|
+
async def create_issue(self, args=None, **kwargs):
|
|
68
|
+
params = args if args is not None else kwargs
|
|
69
|
+
return await _internal_call_tool("github__create_issue", params)
|
|
65
70
|
|
|
66
71
|
class _Tools:
|
|
67
72
|
def __init__(self):
|
|
68
|
-
self.test =
|
|
69
|
-
|
|
70
|
-
})
|
|
71
|
-
self.github = _ToolNamespace({
|
|
72
|
-
"create_issue": lambda args, n="github__create_issue": _internal_call_tool(n, args)
|
|
73
|
-
})
|
|
73
|
+
self.test = _test_Namespace()
|
|
74
|
+
self.github = _github_Namespace()
|
|
74
75
|
|
|
75
76
|
def __getattr__(self, name):
|
|
76
77
|
# Flat access fallback: search all namespaces
|
|
77
|
-
for
|
|
78
|
-
|
|
79
|
-
|
|
78
|
+
for attr_name in dir(self):
|
|
79
|
+
attr = getattr(self, attr_name, None)
|
|
80
|
+
if attr and hasattr(attr, name):
|
|
81
|
+
return getattr(attr, name)
|
|
80
82
|
raise AttributeError(f"Namespace or Tool '{name}' not found")
|
|
81
83
|
|
|
82
|
-
async def raw(self, name, args):
|
|
84
|
+
async def raw(self, name, args=None):
|
|
83
85
|
"""Call a tool by its full name (escape hatch for dynamic/unknown tools)"""
|
|
84
86
|
normalized = name.replace(".", "__")
|
|
85
87
|
if _allowed_tools is not None:
|
|
@@ -89,7 +91,7 @@ class _Tools:
|
|
|
89
91
|
)
|
|
90
92
|
if not allowed:
|
|
91
93
|
raise PermissionError(f"Tool {name} is not in the allowlist")
|
|
92
|
-
return await _internal_call_tool(normalized, args)
|
|
94
|
+
return await _internal_call_tool(normalized, args or {})
|
|
93
95
|
|
|
94
96
|
tools = _Tools()"
|
|
95
97
|
`;
|
|
@@ -70,4 +70,61 @@ describe('AuthService', () => {
|
|
|
70
70
|
expect(headers2['Authorization']).toBe('Bearer cached-access');
|
|
71
71
|
expect(axios.post).toHaveBeenCalledTimes(1); // Still 1, not 2
|
|
72
72
|
});
|
|
73
|
+
|
|
74
|
+
it('should send JSON token refresh for Atlassian token endpoint', async () => {
|
|
75
|
+
const creds: any = {
|
|
76
|
+
type: 'oauth2',
|
|
77
|
+
clientId: 'id',
|
|
78
|
+
clientSecret: 'secret',
|
|
79
|
+
tokenUrl: 'https://auth.atlassian.com/oauth/token',
|
|
80
|
+
refreshToken: 'refresh',
|
|
81
|
+
};
|
|
82
|
+
|
|
83
|
+
(axios.post as any).mockResolvedValue({
|
|
84
|
+
data: {
|
|
85
|
+
access_token: 'new-access',
|
|
86
|
+
expires_in: 0,
|
|
87
|
+
},
|
|
88
|
+
});
|
|
89
|
+
|
|
90
|
+
await authService.getAuthHeaders(creds);
|
|
91
|
+
|
|
92
|
+
const [, body, config] = (axios.post as any).mock.calls[0];
|
|
93
|
+
expect(body).toMatchObject({
|
|
94
|
+
grant_type: 'refresh_token',
|
|
95
|
+
refresh_token: 'refresh',
|
|
96
|
+
client_id: 'id',
|
|
97
|
+
client_secret: 'secret',
|
|
98
|
+
});
|
|
99
|
+
expect(config.headers['Content-Type']).toBe('application/json');
|
|
100
|
+
});
|
|
101
|
+
|
|
102
|
+
it('should include tokenParams and cache rotating refresh tokens', async () => {
|
|
103
|
+
const creds: any = {
|
|
104
|
+
type: 'oauth2',
|
|
105
|
+
clientId: 'id',
|
|
106
|
+
clientSecret: 'secret',
|
|
107
|
+
tokenUrl: 'https://auth.atlassian.com/oauth/token',
|
|
108
|
+
refreshToken: 'r1',
|
|
109
|
+
tokenRequestFormat: 'json',
|
|
110
|
+
tokenParams: { audience: 'api.atlassian.com' },
|
|
111
|
+
};
|
|
112
|
+
|
|
113
|
+
(axios.post as any)
|
|
114
|
+
.mockResolvedValueOnce({
|
|
115
|
+
data: { access_token: 'a1', expires_in: 0, refresh_token: 'r2' },
|
|
116
|
+
})
|
|
117
|
+
.mockResolvedValueOnce({
|
|
118
|
+
data: { access_token: 'a2', expires_in: 0 },
|
|
119
|
+
});
|
|
120
|
+
|
|
121
|
+
await authService.getAuthHeaders(creds);
|
|
122
|
+
await authService.getAuthHeaders(creds);
|
|
123
|
+
|
|
124
|
+
const firstBody = (axios.post as any).mock.calls[0][1];
|
|
125
|
+
expect(firstBody).toMatchObject({ refresh_token: 'r1', audience: 'api.atlassian.com' });
|
|
126
|
+
|
|
127
|
+
const secondBody = (axios.post as any).mock.calls[1][1];
|
|
128
|
+
expect(secondBody).toMatchObject({ refresh_token: 'r2', audience: 'api.atlassian.com' });
|
|
129
|
+
});
|
|
73
130
|
});
|
|
@@ -81,6 +81,9 @@ upstreams:
|
|
|
81
81
|
clientSecret: my-secret
|
|
82
82
|
tokenUrl: http://token
|
|
83
83
|
refreshToken: my-refresh
|
|
84
|
+
tokenRequestFormat: json
|
|
85
|
+
tokenParams:
|
|
86
|
+
audience: api.atlassian.com
|
|
84
87
|
`);
|
|
85
88
|
|
|
86
89
|
vi.stubEnv('CONFIG_FILE', 'conduit.test.yaml');
|
|
@@ -92,10 +95,35 @@ upstreams:
|
|
|
92
95
|
clientId: 'my-id',
|
|
93
96
|
clientSecret: 'my-secret',
|
|
94
97
|
tokenUrl: 'http://token',
|
|
95
|
-
refreshToken: 'my-refresh'
|
|
98
|
+
refreshToken: 'my-refresh',
|
|
99
|
+
tokenRequestFormat: 'json',
|
|
100
|
+
tokenParams: { audience: 'api.atlassian.com' },
|
|
96
101
|
});
|
|
97
102
|
|
|
98
103
|
existsSpy.mockRestore();
|
|
99
104
|
readSpy.mockRestore();
|
|
100
105
|
});
|
|
106
|
+
|
|
107
|
+
it('should parse streamableHttp upstream correctly', () => {
|
|
108
|
+
const existsSpy = vi.spyOn(fs, 'existsSync').mockImplementation((p: any) => p.endsWith('conduit.test.yaml'));
|
|
109
|
+
const readSpy = vi.spyOn(fs, 'readFileSync').mockReturnValue(`
|
|
110
|
+
upstreams:
|
|
111
|
+
- id: atlassian
|
|
112
|
+
type: streamableHttp
|
|
113
|
+
url: https://mcp.atlassian.com/v1/sse
|
|
114
|
+
credentials:
|
|
115
|
+
type: bearer
|
|
116
|
+
bearerToken: test-token
|
|
117
|
+
`);
|
|
118
|
+
|
|
119
|
+
vi.stubEnv('CONFIG_FILE', 'conduit.test.yaml');
|
|
120
|
+
const configService = new ConfigService();
|
|
121
|
+
const upstreams = configService.get('upstreams');
|
|
122
|
+
expect(upstreams).toHaveLength(1);
|
|
123
|
+
expect(upstreams![0].type).toBe('streamableHttp');
|
|
124
|
+
expect((upstreams![0] as any).url).toBe('https://mcp.atlassian.com/v1/sse');
|
|
125
|
+
|
|
126
|
+
existsSpy.mockRestore();
|
|
127
|
+
readSpy.mockRestore();
|
|
128
|
+
});
|
|
101
129
|
});
|