hac-mcp 1.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +41 -0
- package/README.md +145 -0
- package/bin/hac-mcp.js +88 -0
- package/hac.js +320 -0
- package/package.json +53 -0
- package/server.js +276 -0
- package/static/app.js +650 -0
- package/static/index.html +211 -0
- package/static/style.css +282 -0
- package/storage.js +54 -0
- package/tools/context.js +107 -0
- package/tools/flexible_search.js +161 -0
- package/tools/get_type_info.js +188 -0
- package/tools/groovy_execute.js +60 -0
- package/tools/impex_import.js +180 -0
- package/tools/index.js +40 -0
- package/tools/list_cronjobs.js +74 -0
- package/tools/list_environments.js +25 -0
- package/tools/media_read.js +86 -0
- package/tools/media_write.js +122 -0
- package/tools/read_property.js +51 -0
- package/tools/resolve_pk.js +84 -0
- package/tools/run_cronjob.js +71 -0
- package/tools/search_type.js +40 -0
- package/tools/zodLoose.js +26 -0
- package/type-index.js +92 -0
package/LICENSE
ADDED
|
@@ -0,0 +1,41 @@
|
|
|
1
|
+
MIT License
|
|
2
|
+
|
|
3
|
+
Copyright (c) 2026 Yunus Emre Gül
|
|
4
|
+
|
|
5
|
+
Permission is hereby granted, free of charge, to any person obtaining a copy
|
|
6
|
+
of this software and associated documentation files (the "Software"), to deal
|
|
7
|
+
in the Software without restriction, including without limitation the rights
|
|
8
|
+
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
|
9
|
+
copies of the Software, and to permit persons to whom the Software is
|
|
10
|
+
furnished to do so, subject to the following conditions:
|
|
11
|
+
|
|
12
|
+
The above copyright notice and this permission notice shall be included in all
|
|
13
|
+
copies or substantial portions of the Software.
|
|
14
|
+
|
|
15
|
+
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
|
16
|
+
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
|
17
|
+
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
|
18
|
+
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
|
19
|
+
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
|
20
|
+
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
|
21
|
+
SOFTWARE.
|
|
22
|
+
|
|
23
|
+
Commons Clause License Condition
|
|
24
|
+
|
|
25
|
+
The Software is provided to you by the Licensor under the License, as defined
|
|
26
|
+
above, subject to the following condition:
|
|
27
|
+
|
|
28
|
+
Without limiting other conditions in the License, the grant of rights under
|
|
29
|
+
the License will not include, and the License does not grant to you, the right
|
|
30
|
+
to Sell the Software.
|
|
31
|
+
|
|
32
|
+
For purposes of the foregoing, "Sell" means practicing any or all of the rights
|
|
33
|
+
granted to you under the License to provide to third parties, for a fee or other
|
|
34
|
+
consideration (including without limitation fees for hosting or consulting/
|
|
35
|
+
support services related to the Software), a product or service whose value
|
|
36
|
+
derives, entirely or substantially, from the functionality of the Software.
|
|
37
|
+
|
|
38
|
+
Any license notice or attribution required by the License must also include
|
|
39
|
+
this Commons Clause License Condition notice.
|
|
40
|
+
|
|
41
|
+
Licensor: Yunus Emre Gül
|
package/README.md
ADDED
|
@@ -0,0 +1,145 @@
|
|
|
1
|
+
# hac-mcp
|
|
2
|
+
|
|
3
|
+
A [Model Context Protocol (MCP)](https://modelcontextprotocol.io/) server that provides AI assistants (like Claude) with programmatic access to SAP Commerce Cloud's **Hybris Administration Console (HAC)**. It enables automated FlexibleSearch queries, ImpEx imports, Groovy script execution, and system administration tasks across multiple environments.
|
|
4
|
+
|
|
5
|
+
It authenticates with HAC using your existing credentials, no backend changes or additional setup required. Permitted operations are configured per environment, so the AI can only do what you explicitly allow.
|
|
6
|
+
|
|
7
|
+
## What You Can Do
|
|
8
|
+
|
|
9
|
+
- *"Inspect the PromotionRule with code SUMMER25 in staging and recreate it in my local environment"*
|
|
10
|
+
- *"My ImpEx is failing on staging, check the actual values in production and fill in the correct ones"*
|
|
11
|
+
- *"Find all orders stuck in WAIT status for more than 3 days in production and give me a summary"*
|
|
12
|
+
- *"Write a Groovy script to do ... and run it on local first, if it works I will approve it for staging"*
|
|
13
|
+
- *"This code is not working as expected, can you check its edge cases using Groovy with real data on staging?"*
|
|
14
|
+
- *"I want to test this CronJob on staging, it has a media field that accepts a CSV in txt format with source-product and target-product columns. Create multiple test medias, write them, run the job for each case, and validate the results with FlexibleSearch"*
|
|
15
|
+
|
|
16
|
+

|
|
17
|
+
|
|
18
|
+
## Features
|
|
19
|
+
|
|
20
|
+
- **Multi-environment support**: configure and switch between local, staging, and production HAC instances
|
|
21
|
+
- **Fine-grained permissions**: control which operations are allowed per environment
|
|
22
|
+
- **Web UI**: browser-based management console for adding/editing environments and monitoring activity
|
|
23
|
+
- **Real-time logging**: live HAC request and MCP tool execution logs via SSE
|
|
24
|
+
- **Type search**: trigram-based fuzzy search for SAP Commerce type names with per-environment caching
|
|
25
|
+
- **FlexSearch error recovery**: when a query fails due to an unknown field or type, valid field names are fetched and returned alongside the error so the AI can correct and retry without manual intervention
|
|
26
|
+
- **ImpEx validation and enrichment**: scripts are pre-validated for missing mandatory fields before import runs, and any post-import attribute errors are resolved to valid field lists on the fly so the AI can fix and retry the script itself
|
|
27
|
+
|
|
28
|
+
## Tools
|
|
29
|
+
|
|
30
|
+
| Tool | Description |
|
|
31
|
+
|------|-------------|
|
|
32
|
+
| `list_environments` | List all configured HAC environments |
|
|
33
|
+
| `flexible_search` | Execute FlexibleSearch queries |
|
|
34
|
+
| `search_type` | Fuzzy search for type names |
|
|
35
|
+
| `get_type_info` | Retrieve type metadata, attributes, and relationships |
|
|
36
|
+
| `resolve_pk` | Resolve opaque PKs to type code and unique field values |
|
|
37
|
+
| `impex_import` | Execute ImpEx import scripts |
|
|
38
|
+
| `groovy_execute` | Execute Groovy scripts |
|
|
39
|
+
| `read_property` | Search HAC configuration properties by key/value |
|
|
40
|
+
| `media_read` | Read text/plain media content |
|
|
41
|
+
| `media_write` | Create or overwrite media models |
|
|
42
|
+
| `list_cronjobs` | List CronJobs with optional filtering |
|
|
43
|
+
| `run_cronjob` | Execute a CronJob synchronously and wait for completion |
|
|
44
|
+
|
|
45
|
+
## Installation
|
|
46
|
+
|
|
47
|
+
### Via npx (recommended)
|
|
48
|
+
|
|
49
|
+
```bash
|
|
50
|
+
npx hac-mcp
|
|
51
|
+
```
|
|
52
|
+
|
|
53
|
+
### Global install
|
|
54
|
+
|
|
55
|
+
```bash
|
|
56
|
+
npm install -g hac-mcp
|
|
57
|
+
hac-mcp
|
|
58
|
+
```
|
|
59
|
+
|
|
60
|
+
The server starts on `http://localhost:18432` by default.
|
|
61
|
+
|
|
62
|
+
```
|
|
63
|
+
Options:
|
|
64
|
+
-p, --port Port to listen on (default: 18432)
|
|
65
|
+
-v, --version Print version
|
|
66
|
+
-h, --help Show help
|
|
67
|
+
```
|
|
68
|
+
|
|
69
|
+
Environment configuration is stored in `~/.hac-mcp/environments.json`.
|
|
70
|
+
|
|
71
|
+
### Auto-start on system boot (optional, recommended)
|
|
72
|
+
|
|
73
|
+
To keep the server running across restarts, use the `startup` subcommand (requires [PM2](https://pm2.keymetrics.io/)):
|
|
74
|
+
|
|
75
|
+
```bash
|
|
76
|
+
npx hac-mcp startup
|
|
77
|
+
npx hac-mcp startup --port 4000 # with custom port
|
|
78
|
+
```
|
|
79
|
+
|
|
80
|
+
This registers the server with PM2 and runs `pm2 startup`, which prints a one-time command to run (may require `sudo` on macOS/Linux) to hook PM2 into your OS boot sequence.
|
|
81
|
+
|
|
82
|
+
## Configuration
|
|
83
|
+
|
|
84
|
+
### Via Web UI
|
|
85
|
+
|
|
86
|
+
Open `http://localhost:18432/` in your browser, click **+ Add Environment**, fill in the details (connection is tested automatically as you type), then click **Save**.
|
|
87
|
+
|
|
88
|
+

|
|
89
|
+
|
|
90
|
+
### Environment options
|
|
91
|
+
|
|
92
|
+
| Field | Type | Default | Description |
|
|
93
|
+
|-------|------|---------|-------------|
|
|
94
|
+
| `name` | string | | Display name |
|
|
95
|
+
| `description` | string | | Optional notes |
|
|
96
|
+
| `url` | string | | HAC base URL (e.g. `https://host:9002/`) |
|
|
97
|
+
| `username` | string | | HAC login username |
|
|
98
|
+
| `password` | string | | HAC login password |
|
|
99
|
+
| `dbType` | string | `MSSQL` | Database dialect: `MSSQL` or `MySQL` |
|
|
100
|
+
| `allowFlexSearch` | boolean | `true` | Allow FlexibleSearch queries |
|
|
101
|
+
| `allowImpexImport` | boolean | `false` | Allow ImpEx imports |
|
|
102
|
+
| `allowGroovyExecution` | boolean | `false` | Allow Groovy script execution |
|
|
103
|
+
| `allowGroovyCommitMode` | boolean | `false` | Allow Groovy scripts to commit changes |
|
|
104
|
+
| `allowReadProperty` | boolean | `true` | Allow reading platform config properties |
|
|
105
|
+
|
|
106
|
+
> **Tip for production:** Disable `allowImpexImport`, `allowGroovyCommitMode`, or both to prevent accidental data modifications.
|
|
107
|
+
|
|
108
|
+
## Using with Claude
|
|
109
|
+
|
|
110
|
+
Add the following to your MCP client configuration:
|
|
111
|
+
|
|
112
|
+
```json
|
|
113
|
+
{
|
|
114
|
+
"mcpServers": {
|
|
115
|
+
"hac-mcp": {
|
|
116
|
+
"url": "http://localhost:18432/mcp/sse"
|
|
117
|
+
}
|
|
118
|
+
}
|
|
119
|
+
}
|
|
120
|
+
```
|
|
121
|
+
|
|
122
|
+
## Project Structure
|
|
123
|
+
|
|
124
|
+
```
|
|
125
|
+
hac-mcp/
|
|
126
|
+
├── server.js # Express app, MCP SSE endpoint, REST API
|
|
127
|
+
├── hac.js # HAC client (login, FlexSearch, ImpEx, Groovy, etc.)
|
|
128
|
+
├── storage.js # Environment config persistence
|
|
129
|
+
├── type-index.js # Trigram fuzzy type search with caching
|
|
130
|
+
├── tools/
|
|
131
|
+
│ ├── index.js # Tool registry
|
|
132
|
+
│ ├── context.js # Shared runtime state (sessions, logging)
|
|
133
|
+
│ ├── zodLoose.js # Loose Zod validators (string -> number/bool)
|
|
134
|
+
│ └── *.js # One file per MCP tool
|
|
135
|
+
└── static/
|
|
136
|
+
├── index.html # Management console UI
|
|
137
|
+
├── app.js # UI logic
|
|
138
|
+
└── style.css # Styles
|
|
139
|
+
```
|
|
140
|
+
|
|
141
|
+
## Security Notes
|
|
142
|
+
|
|
143
|
+
- Credentials are stored in plaintext in `~/.hac-mcp/environments.json`. Avoid exposing this file.
|
|
144
|
+
- SSL certificate verification is disabled for HAC connections: be aware of this in untrusted networks.
|
|
145
|
+
- Restrict write permissions (`allowImpexImport`, `allowGroovyCommitMode`) on production environments.
|
package/bin/hac-mcp.js
ADDED
|
@@ -0,0 +1,88 @@
|
|
|
1
|
+
#!/usr/bin/env node
|
|
2
|
+
import { createRequire } from 'module';
|
|
3
|
+
import { fileURLToPath } from 'url';
|
|
4
|
+
import { dirname, join } from 'path';
|
|
5
|
+
|
|
6
|
+
const __dirname = dirname(fileURLToPath(import.meta.url));
|
|
7
|
+
const { version } = createRequire(import.meta.url)(join(__dirname, '../package.json'));
|
|
8
|
+
|
|
9
|
+
// ─── Arg parsing ──────────────────────────────────────────────────────────────
|
|
10
|
+
const args = process.argv.slice(2);
|
|
11
|
+
|
|
12
|
+
function flag(name) {
|
|
13
|
+
return args.includes(name);
|
|
14
|
+
}
|
|
15
|
+
|
|
16
|
+
function option(name, short) {
|
|
17
|
+
const idx = args.findIndex(a => a === name || (short && a === short));
|
|
18
|
+
if (idx === -1) return null;
|
|
19
|
+
const val = args[idx + 1];
|
|
20
|
+
if (!val || val.startsWith('-')) { console.error(`Missing value for ${name}`); process.exit(1); }
|
|
21
|
+
return val;
|
|
22
|
+
}
|
|
23
|
+
|
|
24
|
+
if (flag('--help') || flag('-h')) {
|
|
25
|
+
console.log(`
|
|
26
|
+
hac-mcp v${version}
|
|
27
|
+
|
|
28
|
+
A local MCP server for SAP Commerce Cloud HAC (Hybris Administration Console).
|
|
29
|
+
|
|
30
|
+
Usage:
|
|
31
|
+
hac-mcp [options]
|
|
32
|
+
hac-mcp startup [options]
|
|
33
|
+
|
|
34
|
+
Commands:
|
|
35
|
+
startup Register hac-mcp as a startup service via PM2
|
|
36
|
+
|
|
37
|
+
Options:
|
|
38
|
+
-p, --port Port to listen on (default: 18432, env: PORT)
|
|
39
|
+
-v, --version Print version
|
|
40
|
+
-h, --help Show this help
|
|
41
|
+
`.trim());
|
|
42
|
+
process.exit(0);
|
|
43
|
+
}
|
|
44
|
+
|
|
45
|
+
if (flag('--version') || flag('-v')) {
|
|
46
|
+
console.log(version);
|
|
47
|
+
process.exit(0);
|
|
48
|
+
}
|
|
49
|
+
|
|
50
|
+
const port = option('--port', '-p');
|
|
51
|
+
if (port) process.env.PORT = port;
|
|
52
|
+
|
|
53
|
+
// ─── Commands ─────────────────────────────────────────────────────────────────
|
|
54
|
+
const command = args.find(a => !a.startsWith('-') && args.indexOf(a) === args.findIndex(x => x === a));
|
|
55
|
+
|
|
56
|
+
if (command === 'startup') {
|
|
57
|
+
const { execSync } = await import('child_process');
|
|
58
|
+
|
|
59
|
+
try {
|
|
60
|
+
execSync('pm2 --version', { stdio: 'ignore' });
|
|
61
|
+
} catch {
|
|
62
|
+
console.error('PM2 is not installed. Run: npm install -g pm2');
|
|
63
|
+
process.exit(1);
|
|
64
|
+
}
|
|
65
|
+
|
|
66
|
+
const pm2Args = [
|
|
67
|
+
'pm2 start hac-mcp --name hac-mcp',
|
|
68
|
+
port ? `--env PORT=${port}` : '',
|
|
69
|
+
].filter(Boolean).join(' ');
|
|
70
|
+
|
|
71
|
+
try {
|
|
72
|
+
execSync(pm2Args, { stdio: 'inherit' });
|
|
73
|
+
execSync('pm2 save', { stdio: 'inherit' });
|
|
74
|
+
console.log('');
|
|
75
|
+
const result = execSync('pm2 startup', { encoding: 'utf8' });
|
|
76
|
+
console.log(result);
|
|
77
|
+
console.log('Copy and run the command above to complete startup registration.');
|
|
78
|
+
} catch (e) {
|
|
79
|
+
console.error('Failed to set up PM2 startup:', e.message);
|
|
80
|
+
process.exit(1);
|
|
81
|
+
}
|
|
82
|
+
} else if (command !== undefined) {
|
|
83
|
+
console.error(`Unknown command: ${command}`);
|
|
84
|
+
console.error('Run hac-mcp --help for usage.');
|
|
85
|
+
process.exit(1);
|
|
86
|
+
} else {
|
|
87
|
+
await import('../server.js');
|
|
88
|
+
}
|
package/hac.js
ADDED
|
@@ -0,0 +1,320 @@
|
|
|
1
|
+
import https from 'https';
|
|
2
|
+
import dns from 'dns';
|
|
3
|
+
import { URL } from 'url';
|
|
4
|
+
|
|
5
|
+
const dnsLookup = dns.promises.lookup;
|
|
6
|
+
|
|
7
|
+
// Per-hostname: resolved IP + keepalive agent (avoids repeated mDNS lookups)
|
|
8
|
+
const dnsCache = {};
|
|
9
|
+
const agentCache = {};
|
|
10
|
+
|
|
11
|
+
// Logger - server.js sets this to broadcast to SSE clients
|
|
12
|
+
let _log = () => {};
|
|
13
|
+
export function setHacLogger(fn) { _log = fn; }
|
|
14
|
+
function log(level, msg, envName) {
|
|
15
|
+
const prefix = envName ? `[${envName}] ` : '';
|
|
16
|
+
_log({ level, msg: prefix + msg, ts: Date.now() });
|
|
17
|
+
}
|
|
18
|
+
|
|
19
|
+
async function resolveHost(hostname) {
|
|
20
|
+
if (!dnsCache[hostname]) {
|
|
21
|
+
try {
|
|
22
|
+
const r = await dnsLookup(hostname, { family: 4 });
|
|
23
|
+
dnsCache[hostname] = r.address;
|
|
24
|
+
log('info', `Resolved ${hostname} → ${r.address}`);
|
|
25
|
+
} catch {
|
|
26
|
+
dnsCache[hostname] = hostname;
|
|
27
|
+
}
|
|
28
|
+
}
|
|
29
|
+
return dnsCache[hostname];
|
|
30
|
+
}
|
|
31
|
+
|
|
32
|
+
function getAgent(hostname) {
|
|
33
|
+
if (!agentCache[hostname]) {
|
|
34
|
+
agentCache[hostname] = new https.Agent({ rejectUnauthorized: false, keepAlive: true });
|
|
35
|
+
}
|
|
36
|
+
return agentCache[hostname];
|
|
37
|
+
}
|
|
38
|
+
|
|
39
|
+
const HTTP_TIMEOUT_MS = 60_000;
|
|
40
|
+
|
|
41
|
+
function httpRequest(options, body = null) {
|
|
42
|
+
return new Promise((resolve, reject) => {
|
|
43
|
+
const t0 = Date.now();
|
|
44
|
+
const req = https.request(options, (res) => {
|
|
45
|
+
let data = '';
|
|
46
|
+
res.on('data', chunk => (data += chunk));
|
|
47
|
+
res.on('end', () => {
|
|
48
|
+
const envTag = options._envName ? `[${options._envName}] ` : '';
|
|
49
|
+
log('http', `${envTag}${options.method} ${options.path} → ${res.statusCode} (${Date.now() - t0}ms)`);
|
|
50
|
+
resolve({ status: res.statusCode, headers: res.headers, body: data });
|
|
51
|
+
});
|
|
52
|
+
});
|
|
53
|
+
req.setTimeout(HTTP_TIMEOUT_MS, () => {
|
|
54
|
+
req.destroy(new Error(`Request timed out after ${HTTP_TIMEOUT_MS}ms`));
|
|
55
|
+
});
|
|
56
|
+
req.on('error', err => {
|
|
57
|
+
const envTag = options._envName ? `[${options._envName}] ` : '';
|
|
58
|
+
log('error', `${envTag}${options.method} ${options.path} → ${err.message}`);
|
|
59
|
+
reject(err);
|
|
60
|
+
});
|
|
61
|
+
if (body) req.write(body);
|
|
62
|
+
req.end();
|
|
63
|
+
});
|
|
64
|
+
}
|
|
65
|
+
|
|
66
|
+
function extractCsrf(html) {
|
|
67
|
+
const m = html.match(/name="_csrf"\s+value="([^"]+)"/) ||
|
|
68
|
+
html.match(/<meta name="_csrf" content="([^"]+)"/);
|
|
69
|
+
return m?.[1] ?? null;
|
|
70
|
+
}
|
|
71
|
+
|
|
72
|
+
function extractCookies(headers) {
|
|
73
|
+
const cookies = {};
|
|
74
|
+
for (const c of (headers['set-cookie'] || [])) {
|
|
75
|
+
const [pair] = c.split(';');
|
|
76
|
+
const eq = pair.indexOf('=');
|
|
77
|
+
if (eq !== -1) cookies[pair.slice(0, eq).trim()] = pair.slice(eq + 1).trim();
|
|
78
|
+
}
|
|
79
|
+
return cookies;
|
|
80
|
+
}
|
|
81
|
+
|
|
82
|
+
function cookieStr(cookies) {
|
|
83
|
+
return Object.entries(cookies).map(([k, v]) => `${k}=${v}`).join('; ');
|
|
84
|
+
}
|
|
85
|
+
|
|
86
|
+
function htmlDecode(v) {
|
|
87
|
+
if (typeof v !== 'string') return v;
|
|
88
|
+
return v.replace(/&/g, '&').replace(/</g, '<').replace(/>/g, '>').replace(/"/g, '"').replace(/"/g, '"').replace(/'/g, "'").replace(/'/g, "'");
|
|
89
|
+
}
|
|
90
|
+
|
|
91
|
+
function opts(session, path, method, extra = {}) {
|
|
92
|
+
return {
|
|
93
|
+
hostname: session.ip,
|
|
94
|
+
servername: session.host,
|
|
95
|
+
port: session.port,
|
|
96
|
+
path,
|
|
97
|
+
method,
|
|
98
|
+
agent: session.agent,
|
|
99
|
+
_envName: session.envName,
|
|
100
|
+
headers: {
|
|
101
|
+
Host: session.host,
|
|
102
|
+
'User-Agent': 'hac-mcp/1.0',
|
|
103
|
+
Cookie: cookieStr(session.cookies),
|
|
104
|
+
...extra,
|
|
105
|
+
},
|
|
106
|
+
};
|
|
107
|
+
}
|
|
108
|
+
|
|
109
|
+
export async function login(baseUrl, username, password, envName) {
|
|
110
|
+
const url = new URL(baseUrl);
|
|
111
|
+
const host = url.hostname;
|
|
112
|
+
const port = parseInt(url.port) || 443;
|
|
113
|
+
const ctx = url.pathname.replace(/\/+$/, '');
|
|
114
|
+
|
|
115
|
+
log('info', `Logging in to ${host}${ctx || '/'} as ${username}`, envName);
|
|
116
|
+
|
|
117
|
+
const ip = await resolveHost(host);
|
|
118
|
+
const agent = getAgent(host);
|
|
119
|
+
const proto = { host, ip, port, ctx, agent, cookies: {}, envName };
|
|
120
|
+
|
|
121
|
+
log('info', `Fetching login page - getting CSRF token + session cookie`, envName);
|
|
122
|
+
const loginPage = await httpRequest(opts(proto, ctx + '/login', 'GET'));
|
|
123
|
+
const cookies = extractCookies(loginPage.headers);
|
|
124
|
+
const csrf = extractCsrf(loginPage.body);
|
|
125
|
+
if (!csrf) throw new Error('Could not extract CSRF token from login page');
|
|
126
|
+
log('info', `Got CSRF token: ${csrf.slice(0, 16)}…`, envName);
|
|
127
|
+
|
|
128
|
+
log('info', `Submitting credentials for ${username}`, envName);
|
|
129
|
+
const body = new URLSearchParams({ j_username: username, j_password: password, _csrf: csrf }).toString();
|
|
130
|
+
const loginRes = await httpRequest(opts({ ...proto, cookies }, ctx + '/j_spring_security_check', 'POST', {
|
|
131
|
+
'Content-Type': 'application/x-www-form-urlencoded',
|
|
132
|
+
'Content-Length': Buffer.byteLength(body),
|
|
133
|
+
Referer: `https://${host}${ctx}/login`,
|
|
134
|
+
}), body);
|
|
135
|
+
|
|
136
|
+
const sessionCookies = { ...cookies, ...extractCookies(loginRes.headers) };
|
|
137
|
+
const session = { host, ip, port, ctx, agent, cookies: sessionCookies, envName };
|
|
138
|
+
|
|
139
|
+
const redirectPath = loginRes.headers.location || (ctx + '/');
|
|
140
|
+
log('info', `Following redirect → ${redirectPath}`, envName);
|
|
141
|
+
const home = await httpRequest(opts(session, redirectPath, 'GET'));
|
|
142
|
+
|
|
143
|
+
const loggedIn = home.body.includes("You're Administrator") || home.body.includes('logout');
|
|
144
|
+
if (!loggedIn) throw new Error('Login failed - check credentials');
|
|
145
|
+
log('ok', `Login successful for ${username}@${host}`, envName);
|
|
146
|
+
|
|
147
|
+
return session;
|
|
148
|
+
}
|
|
149
|
+
|
|
150
|
+
export class SessionExpiredError extends Error {
|
|
151
|
+
constructor() { super('Session expired - HAC redirected to login page'); this.name = 'SessionExpiredError'; }
|
|
152
|
+
}
|
|
153
|
+
|
|
154
|
+
function assertNotLoginPage(res) {
|
|
155
|
+
const location = res.headers.location || '';
|
|
156
|
+
if (res.status === 302 && location.includes('/login')) throw new SessionExpiredError();
|
|
157
|
+
if (res.status === 200 && res.body.includes('j_spring_security_check')) throw new SessionExpiredError();
|
|
158
|
+
}
|
|
159
|
+
|
|
160
|
+
export async function flexibleSearch(session, query, {
|
|
161
|
+
maxCount = 200, user = 'admin', locale = 'en', dataSource = 'master',
|
|
162
|
+
} = {}) {
|
|
163
|
+
const { ctx, host, envName } = session;
|
|
164
|
+
|
|
165
|
+
log('info', `Fetching FlexSearch page for CSRF token`, envName);
|
|
166
|
+
const flexPage = await httpRequest(opts(session, ctx + '/console/flexsearch', 'GET'));
|
|
167
|
+
assertNotLoginPage(flexPage);
|
|
168
|
+
const csrf = extractCsrf(flexPage.body);
|
|
169
|
+
if (!csrf) throw new Error('Could not extract CSRF token from flexsearch page');
|
|
170
|
+
log('info', `Got CSRF token: ${csrf.slice(0, 16)}…`, envName);
|
|
171
|
+
|
|
172
|
+
log('info', `Executing query (maxCount=${maxCount}, dataSource=${dataSource}): ${query.slice(0, 80)}${query.length > 80 ? '…' : ''}`, envName);
|
|
173
|
+
const body = new URLSearchParams({
|
|
174
|
+
flexibleSearchQuery: query, sqlQuery: '', maxCount: String(maxCount),
|
|
175
|
+
user, locale, dataSource, commit: 'false',
|
|
176
|
+
}).toString();
|
|
177
|
+
|
|
178
|
+
const res = await httpRequest(opts(session, ctx + '/console/flexsearch/execute', 'POST', {
|
|
179
|
+
'Content-Type': 'application/x-www-form-urlencoded; charset=UTF-8',
|
|
180
|
+
'Content-Length': Buffer.byteLength(body),
|
|
181
|
+
'X-CSRF-TOKEN': csrf,
|
|
182
|
+
'X-Requested-With': 'XMLHttpRequest',
|
|
183
|
+
Accept: 'application/json',
|
|
184
|
+
Referer: `https://${host}${ctx}/console/flexsearch`,
|
|
185
|
+
}), body);
|
|
186
|
+
|
|
187
|
+
const result = JSON.parse(res.body);
|
|
188
|
+
if (result.resultList) result.resultList = result.resultList.map(row => row.map(htmlDecode));
|
|
189
|
+
if (result.exception) {
|
|
190
|
+
log('error', `Query failed: ${result.exception}`, envName);
|
|
191
|
+
} else {
|
|
192
|
+
log('ok', `Query returned ${result.resultCount} row(s) in ${result.executionTime}ms`, envName);
|
|
193
|
+
}
|
|
194
|
+
return result;
|
|
195
|
+
}
|
|
196
|
+
|
|
197
|
+
export async function impexImport(session, scriptContent, {
|
|
198
|
+
validationEnum = 'IMPORT_STRICT', maxThreads = 20, encoding = 'UTF-8',
|
|
199
|
+
legacyMode = false, enableCodeExecution = false, distributedMode = false, sldEnabled = false,
|
|
200
|
+
} = {}) {
|
|
201
|
+
const { ctx, host, envName } = session;
|
|
202
|
+
|
|
203
|
+
log('info', `Fetching ImpEx page for CSRF token`, envName);
|
|
204
|
+
const impexPage = await httpRequest(opts(session, ctx + '/console/impex/import', 'GET'));
|
|
205
|
+
assertNotLoginPage(impexPage);
|
|
206
|
+
const csrf = extractCsrf(impexPage.body);
|
|
207
|
+
if (!csrf) throw new Error('Could not extract CSRF token from impex page');
|
|
208
|
+
log('info', `Got CSRF token: ${csrf.slice(0, 16)}…`, envName);
|
|
209
|
+
|
|
210
|
+
log('info', `Submitting ImpEx script (${scriptContent.length} chars, validation=${validationEnum}, threads=${maxThreads})`, envName);
|
|
211
|
+
const params = new URLSearchParams({
|
|
212
|
+
scriptContent, validationEnum, maxThreads: String(maxThreads), encoding,
|
|
213
|
+
_legacyMode: 'on', _enableCodeExecution: 'on', _distributedMode: 'on', _sldEnabled: 'on',
|
|
214
|
+
_csrf: csrf,
|
|
215
|
+
});
|
|
216
|
+
if (legacyMode) params.set('legacyMode', 'true');
|
|
217
|
+
if (enableCodeExecution) params.set('enableCodeExecution', 'true');
|
|
218
|
+
if (distributedMode) params.set('distributedMode', 'true');
|
|
219
|
+
if (sldEnabled) params.set('sldEnabled', 'true');
|
|
220
|
+
|
|
221
|
+
const body = params.toString();
|
|
222
|
+
const res = await httpRequest(opts(session, ctx + '/console/impex/import', 'POST', {
|
|
223
|
+
'Content-Type': 'application/x-www-form-urlencoded',
|
|
224
|
+
'Content-Length': Buffer.byteLength(body),
|
|
225
|
+
Referer: `https://${host}${ctx}/console/impex/import`,
|
|
226
|
+
}), body);
|
|
227
|
+
|
|
228
|
+
const levelM = res.body.match(/id="impexResult"[^>]*data-level="([^"]+)"/);
|
|
229
|
+
const resultM = res.body.match(/id="impexResult"[^>]*data-result="([^"]+)"/);
|
|
230
|
+
const preM = res.body.match(/<pre>([\s\S]*?)<\/pre>/);
|
|
231
|
+
const decode = s => s != null ? htmlDecode(s).trim() : null;
|
|
232
|
+
|
|
233
|
+
const result = {
|
|
234
|
+
level: levelM?.[1] ?? null,
|
|
235
|
+
result: resultM?.[1] ?? null,
|
|
236
|
+
details: decode(preM?.[1] ?? null),
|
|
237
|
+
};
|
|
238
|
+
|
|
239
|
+
if (result.level === 'error') {
|
|
240
|
+
log('error', `Import failed: ${result.result}`, envName);
|
|
241
|
+
} else {
|
|
242
|
+
log('ok', `Import complete: ${result.result || 'done'}`, envName);
|
|
243
|
+
}
|
|
244
|
+
return result;
|
|
245
|
+
}
|
|
246
|
+
|
|
247
|
+
export async function groovyExecute(session, script, { commit = false } = {}) {
|
|
248
|
+
const { ctx, host, envName } = session;
|
|
249
|
+
|
|
250
|
+
log('info', `Fetching scripting page for CSRF token`, envName);
|
|
251
|
+
const page = await httpRequest(opts(session, ctx + '/console/scripting', 'GET'));
|
|
252
|
+
assertNotLoginPage(page);
|
|
253
|
+
const csrf = extractCsrf(page.body);
|
|
254
|
+
if (!csrf) throw new Error('Could not extract CSRF token from scripting page');
|
|
255
|
+
log('info', `Got CSRF token: ${csrf.slice(0, 16)}…`, envName);
|
|
256
|
+
|
|
257
|
+
log('info', `Executing Groovy script (${script.length} chars, commit=${commit})`, envName);
|
|
258
|
+
const body = new URLSearchParams({
|
|
259
|
+
script, scriptType: 'groovy', commit: commit ? 'true' : 'false',
|
|
260
|
+
}).toString();
|
|
261
|
+
|
|
262
|
+
const res = await httpRequest(opts(session, ctx + '/console/scripting/execute', 'POST', {
|
|
263
|
+
'Content-Type': 'application/x-www-form-urlencoded; charset=UTF-8',
|
|
264
|
+
'Content-Length': Buffer.byteLength(body),
|
|
265
|
+
'X-CSRF-TOKEN': csrf,
|
|
266
|
+
'X-Requested-With': 'XMLHttpRequest',
|
|
267
|
+
Accept: 'application/json',
|
|
268
|
+
Referer: `https://${host}${ctx}/console/scripting`,
|
|
269
|
+
}), body);
|
|
270
|
+
|
|
271
|
+
assertNotLoginPage(res);
|
|
272
|
+
const result = JSON.parse(res.body);
|
|
273
|
+
|
|
274
|
+
if (result.stacktraceText) {
|
|
275
|
+
log('error', `Groovy execution failed: ${result.stacktraceText.split('\n')[0]}`, envName);
|
|
276
|
+
} else {
|
|
277
|
+
log('ok', `Groovy executed. Result: ${String(result.executionResult).slice(0, 80)}`, envName);
|
|
278
|
+
}
|
|
279
|
+
return result;
|
|
280
|
+
}
|
|
281
|
+
|
|
282
|
+
export async function readProperties(session) {
|
|
283
|
+
const { ctx, envName } = session;
|
|
284
|
+
|
|
285
|
+
log('info', `Fetching configuration properties page`, envName);
|
|
286
|
+
const page = await httpRequest(opts(session, ctx + '/platform/config', 'GET'));
|
|
287
|
+
assertNotLoginPage(page);
|
|
288
|
+
|
|
289
|
+
// Parse <tr id="key"> ... <input ... name="key" value="val"/> ... </tr>
|
|
290
|
+
const properties = {};
|
|
291
|
+
const rowRegex = /<tr\s+id="([^"]+)"[\s\S]*?<input[^>]+class="configValue"[^>]+name="[^"]*"[^>]+value="([^"]*)"[^>]*\/>/g;
|
|
292
|
+
let m;
|
|
293
|
+
while ((m = rowRegex.exec(page.body)) !== null) {
|
|
294
|
+
properties[m[1]] = htmlDecode(m[2]);
|
|
295
|
+
}
|
|
296
|
+
|
|
297
|
+
log('ok', `Parsed ${Object.keys(properties).length} properties`, envName);
|
|
298
|
+
return properties;
|
|
299
|
+
}
|
|
300
|
+
|
|
301
|
+
export async function pkAnalyze(session, pk) {
|
|
302
|
+
const { ctx, host, envName } = session;
|
|
303
|
+
log('info', `Fetching PK analyzer page for CSRF token`, envName);
|
|
304
|
+
const page = await httpRequest(opts(session, ctx + '/platform/pkanalyzer', 'GET'));
|
|
305
|
+
assertNotLoginPage(page);
|
|
306
|
+
const csrf = extractCsrf(page.body);
|
|
307
|
+
if (!csrf) throw new Error('Could not extract CSRF token from pkanalyzer page');
|
|
308
|
+
log('info', `Analyzing PK: ${pk}`, envName);
|
|
309
|
+
const body = new URLSearchParams({ pkString: String(pk) }).toString();
|
|
310
|
+
const res = await httpRequest(opts(session, ctx + '/platform/pkanalyzer/analyze', 'POST', {
|
|
311
|
+
'Content-Type': 'application/x-www-form-urlencoded; charset=UTF-8',
|
|
312
|
+
'Content-Length': Buffer.byteLength(body),
|
|
313
|
+
'X-CSRF-TOKEN': csrf,
|
|
314
|
+
'X-Requested-With': 'XMLHttpRequest',
|
|
315
|
+
Accept: 'application/json',
|
|
316
|
+
Referer: `https://${host}${ctx}/platform/pkanalyzer`,
|
|
317
|
+
}), body);
|
|
318
|
+
assertNotLoginPage(res);
|
|
319
|
+
return JSON.parse(res.body);
|
|
320
|
+
}
|
package/package.json
ADDED
|
@@ -0,0 +1,53 @@
|
|
|
1
|
+
{
|
|
2
|
+
"name": "hac-mcp",
|
|
3
|
+
"version": "1.0.0",
|
|
4
|
+
"description": "MCP server for SAP Commerce Cloud HAC — FlexibleSearch, ImpEx, Groovy, CronJobs and more from any MCP client",
|
|
5
|
+
"keywords": [
|
|
6
|
+
"mcp",
|
|
7
|
+
"model-context-protocol",
|
|
8
|
+
"sap",
|
|
9
|
+
"sap-commerce",
|
|
10
|
+
"hybris",
|
|
11
|
+
"hac",
|
|
12
|
+
"flexiblesearch",
|
|
13
|
+
"groovy",
|
|
14
|
+
"impex",
|
|
15
|
+
"cronjob",
|
|
16
|
+
"claude"
|
|
17
|
+
],
|
|
18
|
+
"author": "Yunus Emre Gül <yunemregul@gmail.com>",
|
|
19
|
+
"license": "MIT-Commons-Clause",
|
|
20
|
+
"homepage": "https://github.com/yunusemregul/hac-mcp",
|
|
21
|
+
"repository": {
|
|
22
|
+
"type": "git",
|
|
23
|
+
"url": "git+https://github.com/yunusemregul/hac-mcp.git"
|
|
24
|
+
},
|
|
25
|
+
"bugs": {
|
|
26
|
+
"url": "https://github.com/yunusemregul/hac-mcp/issues"
|
|
27
|
+
},
|
|
28
|
+
"engines": {
|
|
29
|
+
"node": ">=18"
|
|
30
|
+
},
|
|
31
|
+
"type": "module",
|
|
32
|
+
"main": "server.js",
|
|
33
|
+
"bin": {
|
|
34
|
+
"hac-mcp": "bin/hac-mcp.js"
|
|
35
|
+
},
|
|
36
|
+
"files": [
|
|
37
|
+
"bin",
|
|
38
|
+
"tools",
|
|
39
|
+
"static",
|
|
40
|
+
"server.js",
|
|
41
|
+
"hac.js",
|
|
42
|
+
"storage.js",
|
|
43
|
+
"type-index.js"
|
|
44
|
+
],
|
|
45
|
+
"scripts": {
|
|
46
|
+
"start": "node server.js"
|
|
47
|
+
},
|
|
48
|
+
"dependencies": {
|
|
49
|
+
"@modelcontextprotocol/sdk": "^1.27.1",
|
|
50
|
+
"express": "^5.2.1",
|
|
51
|
+
"zod": "^4.3.6"
|
|
52
|
+
}
|
|
53
|
+
}
|