@mcp-abap-adt/llm-proxy 0.0.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +22 -0
- package/README.md +526 -0
- package/dist/agent.d.ts +49 -0
- package/dist/agent.d.ts.map +1 -0
- package/dist/agent.js +96 -0
- package/dist/agent.js.map +1 -0
- package/dist/agents/anthropic-agent.d.ts +32 -0
- package/dist/agents/anthropic-agent.d.ts.map +1 -0
- package/dist/agents/anthropic-agent.js +80 -0
- package/dist/agents/anthropic-agent.js.map +1 -0
- package/dist/agents/base.d.ts +64 -0
- package/dist/agents/base.d.ts.map +1 -0
- package/dist/agents/base.js +93 -0
- package/dist/agents/base.js.map +1 -0
- package/dist/agents/deepseek-agent.d.ts +31 -0
- package/dist/agents/deepseek-agent.d.ts.map +1 -0
- package/dist/agents/deepseek-agent.js +71 -0
- package/dist/agents/deepseek-agent.js.map +1 -0
- package/dist/agents/index.d.ts +9 -0
- package/dist/agents/index.d.ts.map +1 -0
- package/dist/agents/index.js +9 -0
- package/dist/agents/index.js.map +1 -0
- package/dist/agents/openai-agent.d.ts +32 -0
- package/dist/agents/openai-agent.d.ts.map +1 -0
- package/dist/agents/openai-agent.js +69 -0
- package/dist/agents/openai-agent.js.map +1 -0
- package/dist/agents/prompt-based-agent.d.ts +28 -0
- package/dist/agents/prompt-based-agent.d.ts.map +1 -0
- package/dist/agents/prompt-based-agent.js +62 -0
- package/dist/agents/prompt-based-agent.js.map +1 -0
- package/dist/agents/sap-core-ai-agent.d.ts +22 -0
- package/dist/agents/sap-core-ai-agent.d.ts.map +1 -0
- package/dist/agents/sap-core-ai-agent.js +20 -0
- package/dist/agents/sap-core-ai-agent.js.map +1 -0
- package/dist/cli.d.ts +53 -0
- package/dist/cli.d.ts.map +1 -0
- package/dist/cli.js +313 -0
- package/dist/cli.js.map +1 -0
- package/dist/index.d.ts +18 -0
- package/dist/index.d.ts.map +1 -0
- package/dist/index.js +24 -0
- package/dist/index.js.map +1 -0
- package/dist/llm-providers/anthropic.d.ts +21 -0
- package/dist/llm-providers/anthropic.d.ts.map +1 -0
- package/dist/llm-providers/anthropic.js +58 -0
- package/dist/llm-providers/anthropic.js.map +1 -0
- package/dist/llm-providers/base.d.ts +28 -0
- package/dist/llm-providers/base.d.ts.map +1 -0
- package/dist/llm-providers/base.js +18 -0
- package/dist/llm-providers/base.js.map +1 -0
- package/dist/llm-providers/deepseek.d.ts +21 -0
- package/dist/llm-providers/deepseek.d.ts.map +1 -0
- package/dist/llm-providers/deepseek.js +50 -0
- package/dist/llm-providers/deepseek.js.map +1 -0
- package/dist/llm-providers/index.d.ts +13 -0
- package/dist/llm-providers/index.d.ts.map +1 -0
- package/dist/llm-providers/index.js +15 -0
- package/dist/llm-providers/index.js.map +1 -0
- package/dist/llm-providers/openai.d.ts +23 -0
- package/dist/llm-providers/openai.d.ts.map +1 -0
- package/dist/llm-providers/openai.js +59 -0
- package/dist/llm-providers/openai.js.map +1 -0
- package/dist/llm-providers/sap-core-ai.d.ts +72 -0
- package/dist/llm-providers/sap-core-ai.d.ts.map +1 -0
- package/dist/llm-providers/sap-core-ai.js +114 -0
- package/dist/llm-providers/sap-core-ai.js.map +1 -0
- package/dist/mcp/client.d.ts +119 -0
- package/dist/mcp/client.d.ts.map +1 -0
- package/dist/mcp/client.js +271 -0
- package/dist/mcp/client.js.map +1 -0
- package/dist/types.d.ts +43 -0
- package/dist/types.d.ts.map +1 -0
- package/dist/types.js +5 -0
- package/dist/types.js.map +1 -0
- package/package.json +59 -0
package/LICENSE
ADDED
|
@@ -0,0 +1,22 @@
|
|
|
1
|
+
MIT License
|
|
2
|
+
|
|
3
|
+
Copyright (c) 2025 Cloud LLM Hub
|
|
4
|
+
|
|
5
|
+
Permission is hereby granted, free of charge, to any person obtaining a copy
|
|
6
|
+
of this software and associated documentation files (the "Software"), to deal
|
|
7
|
+
in the Software without restriction, including without limitation the rights
|
|
8
|
+
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
|
9
|
+
copies of the Software, and to permit persons to whom the Software is
|
|
10
|
+
furnished to do so, subject to the following conditions:
|
|
11
|
+
|
|
12
|
+
The above copyright notice and this permission notice shall be included in all
|
|
13
|
+
copies or substantial portions of the Software.
|
|
14
|
+
|
|
15
|
+
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
|
16
|
+
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
|
17
|
+
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
|
18
|
+
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
|
19
|
+
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
|
20
|
+
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
|
21
|
+
SOFTWARE.
|
|
22
|
+
|
package/README.md
ADDED
|
@@ -0,0 +1,526 @@
|
|
|
1
|
+
# LLM Proxy
|
|
2
|
+
|
|
3
|
+
Minimal LLM agent that normalizes provider access and surfaces MCP tools without executing them.
|
|
4
|
+
|
|
5
|
+
## Overview
|
|
6
|
+
|
|
7
|
+
This agent acts as a thin orchestration layer between LLM providers and MCP (Model Context Protocol) servers. It provides tool catalogs to the LLM and returns the raw LLM response to the consumer.
|
|
8
|
+
|
|
9
|
+
**Important Architecture Change:**
|
|
10
|
+
- **All LLM providers are accessed through SAP AI Core**, not directly
|
|
11
|
+
- OpenAI models → SAP AI Core → OpenAI
|
|
12
|
+
- Anthropic models → SAP AI Core → Anthropic
|
|
13
|
+
- DeepSeek models → SAP AI Core → DeepSeek
|
|
14
|
+
- The model name determines which underlying provider SAP AI Core routes to
|
|
15
|
+
|
|
16
|
+
## Features
|
|
17
|
+
|
|
18
|
+
- ✅ SAP AI Core integration (all LLM providers through SAP AI Core)
|
|
19
|
+
- ✅ MCP client integration with multiple transport protocols
|
|
20
|
+
- ✅ Stdio transport (for local processes)
|
|
21
|
+
- ✅ SSE transport (Server-Sent Events)
|
|
22
|
+
- ✅ Streamable HTTP transport (bidirectional NDJSON)
|
|
23
|
+
- ✅ Auto-detection of transport from URL
|
|
24
|
+
- ✅ Tool catalog surfacing (no tool execution at this layer)
|
|
25
|
+
- ✅ Conversation history management
|
|
26
|
+
- ✅ Raw LLM response passthrough for consumers
|
|
27
|
+
- 🔄 Streaming support (planned)
|
|
28
|
+
|
|
29
|
+
## Installation
|
|
30
|
+
|
|
31
|
+
```bash
|
|
32
|
+
npm install @mcp-abap-adt/llm-proxy
|
|
33
|
+
```
|
|
34
|
+
|
|
35
|
+
## Usage
|
|
36
|
+
|
|
37
|
+
The agent can be used in two ways:
|
|
38
|
+
|
|
39
|
+
1. **Embedded in application** - Import and use directly in your CAP service or application (same process)
|
|
40
|
+
2. **Standalone service** - Run as a separate service/process
|
|
41
|
+
|
|
42
|
+
Both modes connect to MCP servers via transport protocols (HTTP/SSE/stdio), not directly to MCP server instances.
|
|
43
|
+
|
|
44
|
+
### Embedded Usage (Same Process)
|
|
45
|
+
|
|
46
|
+
When using the agent embedded in your application (e.g., in `cloud-llm-hub` CAP service), you import it as a module:
|
|
47
|
+
|
|
48
|
+
```typescript
|
|
49
|
+
// srv/agent-service.ts
|
|
50
|
+
import { SapCoreAIAgent, SapCoreAIProvider, MCPClientWrapper } from '@mcp-abap-adt/llm-proxy';
|
|
51
|
+
import { executeHttpRequest } from '@sap-cloud-sdk/http-client';
|
|
52
|
+
|
|
53
|
+
export default class AgentService extends cds.Service {
|
|
54
|
+
private agent: SapCoreAIAgent;
|
|
55
|
+
|
|
56
|
+
async init() {
|
|
57
|
+
// Create SAP AI Core provider (all LLM providers through SAP AI Core)
|
|
58
|
+
const llmProvider = new SapCoreAIProvider({
|
|
59
|
+
destinationName: 'SAP_AI_CORE_DEST', // SAP destination for AI Core
|
|
60
|
+
model: 'gpt-4o-mini', // Model name determines which provider SAP AI Core uses
|
|
61
|
+
httpClient: async (config) => {
|
|
62
|
+
return await executeHttpRequest(
|
|
63
|
+
{ destinationName: config.destinationName },
|
|
64
|
+
{
|
|
65
|
+
method: config.method as any,
|
|
66
|
+
url: config.url,
|
|
67
|
+
headers: config.headers,
|
|
68
|
+
data: config.data,
|
|
69
|
+
}
|
|
70
|
+
);
|
|
71
|
+
},
|
|
72
|
+
});
|
|
73
|
+
|
|
74
|
+
// Create MCP client
|
|
75
|
+
const mcpClient = new MCPClientWrapper({
|
|
76
|
+
url: 'http://localhost:4004/mcp/stream/http', // MCP proxy endpoint
|
|
77
|
+
headers: {
|
|
78
|
+
'Authorization': 'Basic YWxpY2U6',
|
|
79
|
+
'X-SAP-Destination': 'SAP_DEV_DEST',
|
|
80
|
+
},
|
|
81
|
+
});
|
|
82
|
+
|
|
83
|
+
// Create agent
|
|
84
|
+
this.agent = new SapCoreAIAgent({
|
|
85
|
+
llmProvider,
|
|
86
|
+
mcpClient,
|
|
87
|
+
});
|
|
88
|
+
|
|
89
|
+
await this.agent.connect();
|
|
90
|
+
}
|
|
91
|
+
|
|
92
|
+
async chat(message: string) {
|
|
93
|
+
return await this.agent.process(message);
|
|
94
|
+
}
|
|
95
|
+
}
|
|
96
|
+
```
|
|
97
|
+
|
|
98
|
+
**Architecture Note:**
|
|
99
|
+
- The agent is imported as a module (like `@fr0ster/mcp-abap-adt`)
|
|
100
|
+
- Even when embedded in the same process, the agent connects to the MCP proxy via HTTP transport
|
|
101
|
+
- The MCP proxy embeds the `mcp-abap-adt` server instance
|
|
102
|
+
- This keeps the architecture clean: agent → MCP proxy (via HTTP) → embedded MCP server
|
|
103
|
+
|
|
104
|
+
See [Embedded Usage Guide](../../docs/LLM_AGENT_EMBEDDED_USAGE.md) for complete examples including per-request agent instances and caching strategies.
|
|
105
|
+
|
|
106
|
+
### Standalone Usage (Separate Process)
|
|
107
|
+
|
|
108
|
+
### Basic Example (Stdio Transport)
|
|
109
|
+
|
|
110
|
+
```typescript
|
|
111
|
+
import { SapCoreAIAgent, SapCoreAIProvider, MCPClientWrapper } from '@mcp-abap-adt/llm-proxy';
|
|
112
|
+
import { executeHttpRequest } from '@sap-cloud-sdk/http-client';
|
|
113
|
+
|
|
114
|
+
// Create SAP AI Core provider
|
|
115
|
+
const llmProvider = new SapCoreAIProvider({
|
|
116
|
+
destinationName: 'SAP_AI_CORE_DEST',
|
|
117
|
+
model: 'gpt-4o-mini', // Routes to OpenAI through SAP AI Core
|
|
118
|
+
httpClient: async (config) => {
|
|
119
|
+
return await executeHttpRequest(
|
|
120
|
+
{ destinationName: config.destinationName },
|
|
121
|
+
{
|
|
122
|
+
method: config.method as any,
|
|
123
|
+
url: config.url,
|
|
124
|
+
headers: config.headers,
|
|
125
|
+
data: config.data,
|
|
126
|
+
}
|
|
127
|
+
);
|
|
128
|
+
},
|
|
129
|
+
});
|
|
130
|
+
|
|
131
|
+
const mcpClient = new MCPClientWrapper({
|
|
132
|
+
transport: 'stdio',
|
|
133
|
+
command: 'node',
|
|
134
|
+
args: ['path/to/mcp-server.js'],
|
|
135
|
+
});
|
|
136
|
+
|
|
137
|
+
const agent = new SapCoreAIAgent({
|
|
138
|
+
llmProvider,
|
|
139
|
+
mcpClient,
|
|
140
|
+
});
|
|
141
|
+
|
|
142
|
+
await agent.connect();
|
|
143
|
+
const response = await agent.process('What tools are available?');
|
|
144
|
+
console.log(response.message);
|
|
145
|
+
```
|
|
146
|
+
|
|
147
|
+
### HTTP Transport (Auto-Detection)
|
|
148
|
+
|
|
149
|
+
```typescript
|
|
150
|
+
import { Agent, OpenAIProvider, MCPClientWrapper } from '@mcp-abap-adt/llm-proxy';
|
|
151
|
+
|
|
152
|
+
const llmProvider = new OpenAIProvider({
|
|
153
|
+
apiKey: process.env.OPENAI_API_KEY!,
|
|
154
|
+
model: 'gpt-4o-mini',
|
|
155
|
+
});
|
|
156
|
+
|
|
157
|
+
// Auto-detects 'stream-http' from URL
|
|
158
|
+
const mcpClient = new MCPClientWrapper({
|
|
159
|
+
url: 'http://localhost:4004/mcp/stream/http',
|
|
160
|
+
headers: {
|
|
161
|
+
'Authorization': 'Basic YWxpY2U6',
|
|
162
|
+
'Content-Type': 'application/x-ndjson',
|
|
163
|
+
},
|
|
164
|
+
});
|
|
165
|
+
|
|
166
|
+
const agent = new Agent({
|
|
167
|
+
llmProvider,
|
|
168
|
+
mcpClient,
|
|
169
|
+
});
|
|
170
|
+
|
|
171
|
+
await mcpClient.connect();
|
|
172
|
+
const sessionId = mcpClient.getSessionId(); // Get session ID for subsequent requests
|
|
173
|
+
|
|
174
|
+
const response = await agent.process('What tools are available?');
|
|
175
|
+
// The response is returned as-is; tool execution is handled by the consumer.
|
|
176
|
+
console.log(response.message);
|
|
177
|
+
```
|
|
178
|
+
|
|
179
|
+
### Explicit Transport Selection
|
|
180
|
+
|
|
181
|
+
```typescript
|
|
182
|
+
// SSE transport
|
|
183
|
+
const sseClient = new MCPClientWrapper({
|
|
184
|
+
transport: 'sse',
|
|
185
|
+
url: 'http://localhost:4004/mcp/stream/sse',
|
|
186
|
+
headers: {
|
|
187
|
+
'Authorization': 'Basic YWxpY2U6',
|
|
188
|
+
},
|
|
189
|
+
});
|
|
190
|
+
|
|
191
|
+
// Streamable HTTP transport
|
|
192
|
+
const httpClient = new MCPClientWrapper({
|
|
193
|
+
transport: 'stream-http',
|
|
194
|
+
url: 'http://localhost:4004/mcp/stream/http',
|
|
195
|
+
headers: {
|
|
196
|
+
'Authorization': 'Basic YWxpY2U6',
|
|
197
|
+
},
|
|
198
|
+
});
|
|
199
|
+
```
|
|
200
|
+
|
|
201
|
+
See [src/mcp/README.md](src/mcp/README.md) for detailed transport configuration options.
|
|
202
|
+
|
|
203
|
+
### Embedded Usage in CAP Service
|
|
204
|
+
|
|
205
|
+
The agent can be imported and used directly in CAP services, similar to how `mcp-abap-adt` is used:
|
|
206
|
+
|
|
207
|
+
```typescript
|
|
208
|
+
// srv/agent-service.ts
|
|
209
|
+
import { Agent, OpenAIProvider } from '@mcp-abap-adt/llm-proxy';
|
|
210
|
+
|
|
211
|
+
export default class AgentService extends cds.Service {
|
|
212
|
+
private agent: Agent;
|
|
213
|
+
|
|
214
|
+
async init() {
|
|
215
|
+
this.agent = new Agent({
|
|
216
|
+
llmProvider: new OpenAIProvider({
|
|
217
|
+
apiKey: process.env.OPENAI_API_KEY!,
|
|
218
|
+
}),
|
|
219
|
+
mcpConfig: {
|
|
220
|
+
url: 'http://localhost:4004/mcp/stream/http',
|
|
221
|
+
headers: {
|
|
222
|
+
'Authorization': 'Basic YWxpY2U6',
|
|
223
|
+
'X-SAP-Destination': 'SAP_DEV_DEST',
|
|
224
|
+
},
|
|
225
|
+
},
|
|
226
|
+
});
|
|
227
|
+
|
|
228
|
+
await this.agent.connect();
|
|
229
|
+
}
|
|
230
|
+
|
|
231
|
+
async chat(message: string) {
|
|
232
|
+
return await this.agent.process(message);
|
|
233
|
+
}
|
|
234
|
+
}
|
|
235
|
+
```
|
|
236
|
+
|
|
237
|
+
See [docs/LLM_AGENT_EMBEDDED_USAGE.md](../../docs/LLM_AGENT_EMBEDDED_USAGE.md) for complete embedded usage guide.
|
|
238
|
+
|
|
239
|
+
## Development
|
|
240
|
+
|
|
241
|
+
**Cross-Platform Development:** This project is configured for consistent behavior across Windows, Linux, and macOS. See the parent project's [Cross-Platform Development Guide](../../docs/CROSS_PLATFORM_GUIDE.md) for setup instructions and troubleshooting.
|
|
242
|
+
|
|
243
|
+
**Verify your setup:** Run `npm run verify:setup` from the root project to check cross-platform configuration.
|
|
244
|
+
|
|
245
|
+
```bash
|
|
246
|
+
# Install dependencies
|
|
247
|
+
npm install
|
|
248
|
+
|
|
249
|
+
# Setup environment (copy template and fill in your values)
|
|
250
|
+
cp .env.template .env
|
|
251
|
+
# Edit .env with your API keys and settings
|
|
252
|
+
|
|
253
|
+
# Build
|
|
254
|
+
npm run build
|
|
255
|
+
|
|
256
|
+
# Development mode (with tsx for hot reload)
|
|
257
|
+
# Will automatically load .env file if it exists
|
|
258
|
+
npm run dev
|
|
259
|
+
|
|
260
|
+
# Run test launcher (after build)
|
|
261
|
+
npm start
|
|
262
|
+
|
|
263
|
+
# Or with custom message
|
|
264
|
+
npm start "List all available ABAP programs"
|
|
265
|
+
```
|
|
266
|
+
|
|
267
|
+
### Environment Configuration
|
|
268
|
+
|
|
269
|
+
The agent supports configuration via `.env` file for easier setup:
|
|
270
|
+
|
|
271
|
+
1. Copy the template:
|
|
272
|
+
```bash
|
|
273
|
+
cp .env.template .env
|
|
274
|
+
```
|
|
275
|
+
|
|
276
|
+
2. Edit `.env` with your settings:
|
|
277
|
+
```bash
|
|
278
|
+
# SAP AI Core Configuration (required)
|
|
279
|
+
# All LLM providers are accessed through SAP AI Core
|
|
280
|
+
SAP_CORE_AI_DESTINATION=SAP_AI_CORE_DEST
|
|
281
|
+
SAP_CORE_AI_MODEL=gpt-4o-mini # Model determines provider: gpt-4o-mini → OpenAI, claude-3-5-sonnet → Anthropic
|
|
282
|
+
SAP_CORE_AI_TEMPERATURE=0.7
|
|
283
|
+
SAP_CORE_AI_MAX_TOKENS=2000
|
|
284
|
+
|
|
285
|
+
# MCP Configuration (optional, for MCP integration)
|
|
286
|
+
MCP_ENDPOINT=http://localhost:4004/mcp/stream/http
|
|
287
|
+
MCP_DISABLED=false
|
|
288
|
+
```
|
|
289
|
+
|
|
290
|
+
**Note:** Legacy direct provider configuration (OPENAI_API_KEY, etc.) is deprecated.
|
|
291
|
+
All LLM providers must be accessed through SAP AI Core.
|
|
292
|
+
|
|
293
|
+
3. Run the agent - it will automatically load `.env`:
|
|
294
|
+
```bash
|
|
295
|
+
npm run dev:llm
|
|
296
|
+
```
|
|
297
|
+
|
|
298
|
+
Environment variables from `.env` can be overridden by actual environment variables.
|
|
299
|
+
|
|
300
|
+
### Test Launcher
|
|
301
|
+
|
|
302
|
+
The agent includes a simple CLI test launcher for quick testing.
|
|
303
|
+
|
|
304
|
+
**Note:** The CLI launcher currently supports legacy direct provider configuration for testing purposes. In production, all LLM providers should be accessed through SAP AI Core.
|
|
305
|
+
|
|
306
|
+
#### Test LLM Only (Without MCP)
|
|
307
|
+
|
|
308
|
+
Test just the LLM provider without MCP integration:
|
|
309
|
+
|
|
310
|
+
**Using SAP AI Core (Recommended):**
|
|
311
|
+
```bash
|
|
312
|
+
# Set SAP AI Core destination
|
|
313
|
+
export SAP_CORE_AI_DESTINATION="SAP_AI_CORE_DEST"
|
|
314
|
+
export SAP_CORE_AI_MODEL="gpt-4o-mini" # Routes to OpenAI through SAP AI Core
|
|
315
|
+
npm run dev:llm
|
|
316
|
+
```
|
|
317
|
+
|
|
318
|
+
**Legacy Direct Provider (for testing only):**
|
|
319
|
+
```bash
|
|
320
|
+
# Basic usage - set API key and run
|
|
321
|
+
export OPENAI_API_KEY="sk-proj-your-actual-key-here"
|
|
322
|
+
npm run dev:llm
|
|
323
|
+
|
|
324
|
+
# Or inline
|
|
325
|
+
OPENAI_API_KEY="sk-proj-your-key" npm run dev:llm
|
|
326
|
+
|
|
327
|
+
# With custom message
|
|
328
|
+
export OPENAI_API_KEY="sk-proj-your-key"
|
|
329
|
+
npm run dev:llm "Hello! Can you introduce yourself?"
|
|
330
|
+
|
|
331
|
+
# With specific model
|
|
332
|
+
export OPENAI_API_KEY="sk-proj-your-key"
|
|
333
|
+
export OPENAI_MODEL="gpt-4o" # or gpt-4-turbo, gpt-4o-mini, etc.
|
|
334
|
+
npm run dev:llm
|
|
335
|
+
|
|
336
|
+
# With organization ID (for team accounts)
|
|
337
|
+
export OPENAI_API_KEY="sk-proj-your-key"
|
|
338
|
+
export OPENAI_ORG="org-your-org-id"
|
|
339
|
+
npm run dev:llm
|
|
340
|
+
|
|
341
|
+
# With project ID (for project-specific billing)
|
|
342
|
+
export OPENAI_API_KEY="sk-proj-your-key"
|
|
343
|
+
export OPENAI_PROJECT="proj-your-project-id" # or OPENAI_PRJ
|
|
344
|
+
npm run dev:llm
|
|
345
|
+
|
|
346
|
+
# Full configuration
|
|
347
|
+
export OPENAI_API_KEY="sk-proj-your-key"
|
|
348
|
+
export OPENAI_MODEL="gpt-4o"
|
|
349
|
+
export OPENAI_ORG="org-your-org-id"
|
|
350
|
+
export OPENAI_PROJECT="proj-your-project-id"
|
|
351
|
+
npm run dev:llm
|
|
352
|
+
```
|
|
353
|
+
|
|
354
|
+
**Anthropic (Claude):**
|
|
355
|
+
```bash
|
|
356
|
+
# Set provider and API key
|
|
357
|
+
export LLM_PROVIDER=anthropic
|
|
358
|
+
export ANTHROPIC_API_KEY="sk-ant-your-actual-key-here"
|
|
359
|
+
npm run dev:llm
|
|
360
|
+
|
|
361
|
+
# With custom message
|
|
362
|
+
export LLM_PROVIDER=anthropic
|
|
363
|
+
export ANTHROPIC_API_KEY="sk-ant-your-key"
|
|
364
|
+
npm run dev:llm "What can you do?"
|
|
365
|
+
|
|
366
|
+
# With specific model
|
|
367
|
+
export LLM_PROVIDER=anthropic
|
|
368
|
+
export ANTHROPIC_API_KEY="sk-ant-your-key"
|
|
369
|
+
export ANTHROPIC_MODEL="claude-3-5-sonnet-20241022" # or claude-3-opus, etc.
|
|
370
|
+
npm run dev:llm
|
|
371
|
+
```
|
|
372
|
+
|
|
373
|
+
**DeepSeek:**
|
|
374
|
+
```bash
|
|
375
|
+
# Set provider and API key
|
|
376
|
+
export LLM_PROVIDER=deepseek
|
|
377
|
+
export DEEPSEEK_API_KEY="sk-your-actual-key-here"
|
|
378
|
+
npm run dev:llm
|
|
379
|
+
|
|
380
|
+
# With custom message
|
|
381
|
+
export LLM_PROVIDER=deepseek
|
|
382
|
+
export DEEPSEEK_API_KEY="sk-your-key"
|
|
383
|
+
npm run dev:llm "Explain what you can do"
|
|
384
|
+
```
|
|
385
|
+
|
|
386
|
+
**Alternative methods:**
|
|
387
|
+
```bash
|
|
388
|
+
# Method 1: Using dedicated script (recommended)
|
|
389
|
+
export OPENAI_API_KEY="sk-proj-..."
|
|
390
|
+
npm run dev:llm
|
|
391
|
+
|
|
392
|
+
# Method 2: Using flag
|
|
393
|
+
export OPENAI_API_KEY="sk-proj-..."
|
|
394
|
+
npm run dev -- --llm-only
|
|
395
|
+
|
|
396
|
+
# Method 3: Using environment variable
|
|
397
|
+
export OPENAI_API_KEY="sk-proj-..."
|
|
398
|
+
export MCP_DISABLED=true
|
|
399
|
+
npm run dev
|
|
400
|
+
```
|
|
401
|
+
|
|
402
|
+
#### Basic Usage with OpenAI (With MCP)
|
|
403
|
+
|
|
404
|
+
```bash
|
|
405
|
+
# Method 1: Export environment variable
|
|
406
|
+
export OPENAI_API_KEY="sk-proj-..."
|
|
407
|
+
export MCP_ENDPOINT="http://localhost:4004/mcp/stream/http"
|
|
408
|
+
npm run dev
|
|
409
|
+
|
|
410
|
+
# Method 2: Inline (one-time use)
|
|
411
|
+
OPENAI_API_KEY="sk-proj-..." npm run dev
|
|
412
|
+
|
|
413
|
+
# Method 3: With custom message
|
|
414
|
+
export OPENAI_API_KEY="sk-proj-..."
|
|
415
|
+
npm run dev "What ABAP programs are available?"
|
|
416
|
+
|
|
417
|
+
# Method 4: Using .env file (if you have dotenv setup)
|
|
418
|
+
# Create .env file:
|
|
419
|
+
# OPENAI_API_KEY=sk-proj-...
|
|
420
|
+
# MCP_ENDPOINT=http://localhost:4004/mcp/stream/http
|
|
421
|
+
npm run dev
|
|
422
|
+
```
|
|
423
|
+
|
|
424
|
+
#### Complete Example
|
|
425
|
+
|
|
426
|
+
```bash
|
|
427
|
+
# From project root
|
|
428
|
+
cd submodules/llm-agent
|
|
429
|
+
|
|
430
|
+
# Set required environment variables
|
|
431
|
+
export OPENAI_API_KEY="sk-proj-your-actual-key-here"
|
|
432
|
+
export MCP_ENDPOINT="http://localhost:4004/mcp/stream/http"
|
|
433
|
+
export SAP_DESTINATION="SAP_DEV_DEST" # Optional, for SAP integration
|
|
434
|
+
|
|
435
|
+
# Optional: Set model
|
|
436
|
+
export OPENAI_MODEL="gpt-4o-mini" # or gpt-4o, gpt-4-turbo, etc.
|
|
437
|
+
|
|
438
|
+
# Run test launcher
|
|
439
|
+
npm run dev
|
|
440
|
+
|
|
441
|
+
# Or with custom message
|
|
442
|
+
npm run dev "List all available tools and describe what they do"
|
|
443
|
+
```
|
|
444
|
+
|
|
445
|
+
#### Testing with Different LLM Providers
|
|
446
|
+
|
|
447
|
+
**Anthropic (Claude):**
|
|
448
|
+
```bash
|
|
449
|
+
export LLM_PROVIDER=anthropic
|
|
450
|
+
export ANTHROPIC_API_KEY="sk-ant-your-key-here"
|
|
451
|
+
export ANTHROPIC_MODEL="claude-3-5-sonnet-20241022" # Optional
|
|
452
|
+
npm run dev
|
|
453
|
+
```
|
|
454
|
+
|
|
455
|
+
**DeepSeek:**
|
|
456
|
+
```bash
|
|
457
|
+
export LLM_PROVIDER=deepseek
|
|
458
|
+
export DEEPSEEK_API_KEY="sk-your-key-here"
|
|
459
|
+
export DEEPSEEK_MODEL="deepseek-chat" # Optional
|
|
460
|
+
npm run dev
|
|
461
|
+
```
|
|
462
|
+
|
|
463
|
+
#### Example Output
|
|
464
|
+
|
|
465
|
+
```
|
|
466
|
+
🤖 LLM Proxy Test Launcher v0.0.1
|
|
467
|
+
|
|
468
|
+
📋 Configuration:
|
|
469
|
+
LLM Provider: openai
|
|
470
|
+
MCP Endpoint: http://localhost:4004/mcp/stream/http
|
|
471
|
+
Test Message: What tools are available?
|
|
472
|
+
|
|
473
|
+
✅ Created OpenAI provider
|
|
474
|
+
✅ Created MCP client
|
|
475
|
+
|
|
476
|
+
✅ Created agent instance
|
|
477
|
+
Agent type: OpenAIAgent
|
|
478
|
+
|
|
479
|
+
🔌 Connecting to MCP server...
|
|
480
|
+
✅ Connected to MCP server
|
|
481
|
+
|
|
482
|
+
📦 Available tools: 31
|
|
483
|
+
- GetProgram: Retrieve ABAP program source code...
|
|
484
|
+
- GetClass: Retrieve ABAP class source code...
|
|
485
|
+
- GetFunction: Retrieve ABAP function module...
|
|
486
|
+
... and 28 more
|
|
487
|
+
|
|
488
|
+
💬 Processing message: "What tools are available?"
|
|
489
|
+
|
|
490
|
+
📤 Response:
|
|
491
|
+
────────────────────────────────────────────────────────────
|
|
492
|
+
I can see you have 31 tools available for working with ABAP systems...
|
|
493
|
+
|
|
494
|
+
⏱️ Duration: 2341ms
|
|
495
|
+
|
|
496
|
+
📜 Conversation history: 4 messages
|
|
497
|
+
|
|
498
|
+
✅ Test completed successfully!
|
|
499
|
+
```
|
|
500
|
+
|
|
501
|
+
The test launcher will:
|
|
502
|
+
- Connect to MCP server
|
|
503
|
+
- List available tools
|
|
504
|
+
- Process a test message
|
|
505
|
+
- Show response
|
|
506
|
+
- Display conversation history
|
|
507
|
+
|
|
508
|
+
## Tool Execution Responsibility
|
|
509
|
+
|
|
510
|
+
The agent does not execute tools. It only:
|
|
511
|
+
- Fetches MCP tool catalogs
|
|
512
|
+
- Passes tool definitions to the LLM
|
|
513
|
+
- Returns the raw LLM response to the consumer
|
|
514
|
+
|
|
515
|
+
If your application needs tool execution, parse the model output in the consumer layer and call MCP tools there.
|
|
516
|
+
|
|
517
|
+
## Architecture
|
|
518
|
+
|
|
519
|
+
- `src/agents/` - Agent implementations (BaseAgent, SapCoreAIAgent, etc.)
|
|
520
|
+
- `src/llm-providers/` - LLM provider implementations (SapCoreAIProvider)
|
|
521
|
+
- `src/mcp/` - MCP client wrapper
|
|
522
|
+
- `src/types.ts` - TypeScript type definitions
|
|
523
|
+
|
|
524
|
+
## License
|
|
525
|
+
|
|
526
|
+
MIT
|
package/dist/agent.d.ts
ADDED
|
@@ -0,0 +1,49 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Core Agent Orchestrator
|
|
3
|
+
*
|
|
4
|
+
* Coordinates between LLM provider and MCP tools
|
|
5
|
+
*/
|
|
6
|
+
import type { LLMProvider } from './llm-providers/base.js';
|
|
7
|
+
import { type MCPClientConfig, MCPClientWrapper } from './mcp/client.js';
|
|
8
|
+
import type { AgentResponse, Message } from './types.js';
|
|
9
|
+
export interface AgentConfig {
|
|
10
|
+
llmProvider: LLMProvider;
|
|
11
|
+
/**
|
|
12
|
+
* MCP client instance (if provided, will be used directly)
|
|
13
|
+
* If not provided, will be created from mcpConfig
|
|
14
|
+
*/
|
|
15
|
+
mcpClient?: MCPClientWrapper;
|
|
16
|
+
/**
|
|
17
|
+
* Direct MCP configuration (used if mcpClient is not provided)
|
|
18
|
+
* If both mcpClient and mcpConfig are provided, mcpClient takes precedence
|
|
19
|
+
*/
|
|
20
|
+
mcpConfig?: MCPClientConfig;
|
|
21
|
+
maxIterations?: number;
|
|
22
|
+
}
|
|
23
|
+
export declare class Agent {
|
|
24
|
+
private llmProvider;
|
|
25
|
+
private mcpClient;
|
|
26
|
+
private conversationHistory;
|
|
27
|
+
constructor(config: AgentConfig);
|
|
28
|
+
/**
|
|
29
|
+
* Initialize MCP client connection (call this before using the agent)
|
|
30
|
+
*/
|
|
31
|
+
connect(): Promise<void>;
|
|
32
|
+
/**
|
|
33
|
+
* Process a user message and return agent response
|
|
34
|
+
*/
|
|
35
|
+
process(userMessage: string): Promise<AgentResponse>;
|
|
36
|
+
/**
|
|
37
|
+
* Build system message with tool definitions
|
|
38
|
+
*/
|
|
39
|
+
private buildSystemMessage;
|
|
40
|
+
/**
|
|
41
|
+
* Clear conversation history
|
|
42
|
+
*/
|
|
43
|
+
clearHistory(): void;
|
|
44
|
+
/**
|
|
45
|
+
* Get conversation history
|
|
46
|
+
*/
|
|
47
|
+
getHistory(): Message[];
|
|
48
|
+
}
|
|
49
|
+
//# sourceMappingURL=agent.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"agent.d.ts","sourceRoot":"","sources":["../src/agent.ts"],"names":[],"mappings":"AAAA;;;;GAIG;AAEH,OAAO,KAAK,EAAE,WAAW,EAAE,MAAM,yBAAyB,CAAC;AAC3D,OAAO,EAAE,KAAK,eAAe,EAAE,gBAAgB,EAAE,MAAM,iBAAiB,CAAC;AACzE,OAAO,KAAK,EAAE,aAAa,EAAE,OAAO,EAAE,MAAM,YAAY,CAAC;AAEzD,MAAM,WAAW,WAAW;IAC1B,WAAW,EAAE,WAAW,CAAC;IACzB;;;OAGG;IACH,SAAS,CAAC,EAAE,gBAAgB,CAAC;IAC7B;;;OAGG;IACH,SAAS,CAAC,EAAE,eAAe,CAAC;IAC5B,aAAa,CAAC,EAAE,MAAM,CAAC;CACxB;AAED,qBAAa,KAAK;IAChB,OAAO,CAAC,WAAW,CAAc;IACjC,OAAO,CAAC,SAAS,CAAmB;IACpC,OAAO,CAAC,mBAAmB,CAAiB;gBAEhC,MAAM,EAAE,WAAW;IAe/B;;OAEG;IACG,OAAO,IAAI,OAAO,CAAC,IAAI,CAAC;IAI9B;;OAEG;IACG,OAAO,CAAC,WAAW,EAAE,MAAM,GAAG,OAAO,CAAC,aAAa,CAAC;IAyC1D;;OAEG;IACH,OAAO,CAAC,kBAAkB;IAc1B;;OAEG;IACH,YAAY,IAAI,IAAI;IAIpB;;OAEG;IACH,UAAU,IAAI,OAAO,EAAE;CAGxB"}
|
package/dist/agent.js
ADDED
|
@@ -0,0 +1,96 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Core Agent Orchestrator
|
|
3
|
+
*
|
|
4
|
+
* Coordinates between LLM provider and MCP tools
|
|
5
|
+
*/
|
|
6
|
+
import { MCPClientWrapper } from './mcp/client.js';
|
|
7
|
+
export class Agent {
|
|
8
|
+
llmProvider;
|
|
9
|
+
mcpClient;
|
|
10
|
+
conversationHistory = [];
|
|
11
|
+
constructor(config) {
|
|
12
|
+
this.llmProvider = config.llmProvider;
|
|
13
|
+
// Initialize MCP client
|
|
14
|
+
if (config.mcpClient) {
|
|
15
|
+
this.mcpClient = config.mcpClient;
|
|
16
|
+
}
|
|
17
|
+
else if (config.mcpConfig) {
|
|
18
|
+
this.mcpClient = new MCPClientWrapper(config.mcpConfig);
|
|
19
|
+
}
|
|
20
|
+
else {
|
|
21
|
+
throw new Error('MCP client configuration required. Provide either mcpClient or mcpConfig.');
|
|
22
|
+
}
|
|
23
|
+
}
|
|
24
|
+
/**
|
|
25
|
+
* Initialize MCP client connection (call this before using the agent)
|
|
26
|
+
*/
|
|
27
|
+
async connect() {
|
|
28
|
+
await this.mcpClient.connect();
|
|
29
|
+
}
|
|
30
|
+
/**
|
|
31
|
+
* Process a user message and return agent response
|
|
32
|
+
*/
|
|
33
|
+
async process(userMessage) {
|
|
34
|
+
try {
|
|
35
|
+
// Add user message to history
|
|
36
|
+
this.conversationHistory.push({
|
|
37
|
+
role: 'user',
|
|
38
|
+
content: userMessage,
|
|
39
|
+
});
|
|
40
|
+
// Get available tools from MCP
|
|
41
|
+
const tools = await this.mcpClient.listTools();
|
|
42
|
+
// Build system message with tool definitions
|
|
43
|
+
const systemMessage = this.buildSystemMessage(tools);
|
|
44
|
+
// Prepare messages for LLM
|
|
45
|
+
const messages = [
|
|
46
|
+
{ role: 'system', content: systemMessage },
|
|
47
|
+
...this.conversationHistory,
|
|
48
|
+
];
|
|
49
|
+
// Get LLM response
|
|
50
|
+
const llmResponse = await this.llmProvider.chat(messages);
|
|
51
|
+
// Add assistant response to history
|
|
52
|
+
this.conversationHistory.push({
|
|
53
|
+
role: 'assistant',
|
|
54
|
+
content: llmResponse.content,
|
|
55
|
+
});
|
|
56
|
+
return {
|
|
57
|
+
message: llmResponse.content,
|
|
58
|
+
raw: llmResponse.raw,
|
|
59
|
+
};
|
|
60
|
+
}
|
|
61
|
+
catch (error) {
|
|
62
|
+
return {
|
|
63
|
+
message: '',
|
|
64
|
+
error: error.message || 'Agent processing failed',
|
|
65
|
+
};
|
|
66
|
+
}
|
|
67
|
+
}
|
|
68
|
+
/**
|
|
69
|
+
* Build system message with tool definitions
|
|
70
|
+
*/
|
|
71
|
+
buildSystemMessage(tools) {
|
|
72
|
+
const toolDescriptions = tools
|
|
73
|
+
.map((tool) => {
|
|
74
|
+
return `- ${tool.name}: ${tool.description || 'No description'}`;
|
|
75
|
+
})
|
|
76
|
+
.join('\n');
|
|
77
|
+
return `You are a helpful assistant with access to the following tools:
|
|
78
|
+
|
|
79
|
+
${toolDescriptions}
|
|
80
|
+
|
|
81
|
+
If using a tool is required, describe the tool call and its parameters in your response.`;
|
|
82
|
+
}
|
|
83
|
+
/**
|
|
84
|
+
* Clear conversation history
|
|
85
|
+
*/
|
|
86
|
+
clearHistory() {
|
|
87
|
+
this.conversationHistory = [];
|
|
88
|
+
}
|
|
89
|
+
/**
|
|
90
|
+
* Get conversation history
|
|
91
|
+
*/
|
|
92
|
+
getHistory() {
|
|
93
|
+
return [...this.conversationHistory];
|
|
94
|
+
}
|
|
95
|
+
}
|
|
96
|
+
//# sourceMappingURL=agent.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"agent.js","sourceRoot":"","sources":["../src/agent.ts"],"names":[],"mappings":"AAAA;;;;GAIG;AAGH,OAAO,EAAwB,gBAAgB,EAAE,MAAM,iBAAiB,CAAC;AAkBzE,MAAM,OAAO,KAAK;IACR,WAAW,CAAc;IACzB,SAAS,CAAmB;IAC5B,mBAAmB,GAAc,EAAE,CAAC;IAE5C,YAAY,MAAmB;QAC7B,IAAI,CAAC,WAAW,GAAG,MAAM,CAAC,WAAW,CAAC;QAEtC,wBAAwB;QACxB,IAAI,MAAM,CAAC,SAAS,EAAE,CAAC;YACrB,IAAI,CAAC,SAAS,GAAG,MAAM,CAAC,SAAS,CAAC;QACpC,CAAC;aAAM,IAAI,MAAM,CAAC,SAAS,EAAE,CAAC;YAC5B,IAAI,CAAC,SAAS,GAAG,IAAI,gBAAgB,CAAC,MAAM,CAAC,SAAS,CAAC,CAAC;QAC1D,CAAC;aAAM,CAAC;YACN,MAAM,IAAI,KAAK,CACb,2EAA2E,CAC5E,CAAC;QACJ,CAAC;IACH,CAAC;IAED;;OAEG;IACH,KAAK,CAAC,OAAO;QACX,MAAM,IAAI,CAAC,SAAS,CAAC,OAAO,EAAE,CAAC;IACjC,CAAC;IAED;;OAEG;IACH,KAAK,CAAC,OAAO,CAAC,WAAmB;QAC/B,IAAI,CAAC;YACH,8BAA8B;YAC9B,IAAI,CAAC,mBAAmB,CAAC,IAAI,CAAC;gBAC5B,IAAI,EAAE,MAAM;gBACZ,OAAO,EAAE,WAAW;aACrB,CAAC,CAAC;YAEH,+BAA+B;YAC/B,MAAM,KAAK,GAAG,MAAM,IAAI,CAAC,SAAS,CAAC,SAAS,EAAE,CAAC;YAE/C,6CAA6C;YAC7C,MAAM,aAAa,GAAG,IAAI,CAAC,kBAAkB,CAAC,KAAK,CAAC,CAAC;YAErD,2BAA2B;YAC3B,MAAM,QAAQ,GAAc;gBAC1B,EAAE,IAAI,EAAE,QAAQ,EAAE,OAAO,EAAE,aAAa,EAAE;gBAC1C,GAAG,IAAI,CAAC,mBAAmB;aAC5B,CAAC;YAEF,mBAAmB;YACnB,MAAM,WAAW,GAAG,MAAM,IAAI,CAAC,WAAW,CAAC,IAAI,CAAC,QAAQ,CAAC,CAAC;YAE1D,oCAAoC;YACpC,IAAI,CAAC,mBAAmB,CAAC,IAAI,CAAC;gBAC5B,IAAI,EAAE,WAAW;gBACjB,OAAO,EAAE,WAAW,CAAC,OAAO;aAC7B,CAAC,CAAC;YAEH,OAAO;gBACL,OAAO,EAAE,WAAW,CAAC,OAAO;gBAC5B,GAAG,EAAE,WAAW,CAAC,GAAG;aACrB,CAAC;QACJ,CAAC;QAAC,OAAO,KAAU,EAAE,CAAC;YACpB,OAAO;gBACL,OAAO,EAAE,EAAE;gBACX,KAAK,EAAE,KAAK,CAAC,OAAO,IAAI,yBAAyB;aAClD,CAAC;QACJ,CAAC;IACH,CAAC;IAED;;OAEG;IACK,kBAAkB,CAAC,KAAY;QACrC,MAAM,gBAAgB,GAAG,KAAK;aAC3B,GAAG,CAAC,CAAC,IAAI,EAAE,EAAE;YACZ,OAAO,KAAK,IAAI,CAAC,IAAI,KAAK,IAAI,CAAC,WAAW,IAAI,gBAAgB,EAAE,CAAC;QACnE,CAAC,CAAC;aACD,IAAI,CAAC,IAAI,CAAC,CAAC;QAEd,OAAO;;EAET,gBAAgB;;yFAEuE,CAAC;IACxF,CAAC;IAED;;OAEG;IACH,YAAY;QACV,IAAI,CAAC,mBAAmB,GAAG,EAAE,CAAC;IAChC,CAAC;IAED;;OAEG;IACH,UAAU;QACR,OAAO,CAAC,GAAG,IAAI,CAAC,mBAAmB,CAAC,CAAC;IACvC,CAAC;CACF"}
|