@arrieta1/mcp-server 2.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +69 -0
- package/bin/project-graphite-mcp.js +3 -0
- package/dist/server.js +185 -0
- package/dist/tools/analyzeMapping.js +204 -0
- package/dist/tools/buildGenerationContext.js +78 -0
- package/dist/tools/callCodeGenerator.js +42 -0
- package/dist/tools/downloadApiData.js +145 -0
- package/dist/tools/getMetadata.js +44 -0
- package/dist/tools/listTransformations.js +34 -0
- package/dist/tools/readFile.js +21 -0
- package/dist/tools/readFolder.js +27 -0
- package/dist/tools/validateTransformation.js +35 -0
- package/dist/tools/writeTransformation.js +99 -0
- package/dist/types.js +5 -0
- package/dist/utils/logger.js +12 -0
- package/dist/utils/pathResolver.js +82 -0
- package/package.json +41 -0
package/README.md
ADDED
|
@@ -0,0 +1,69 @@
|
|
|
1
|
+
# Project Graphite MCP Server
|
|
2
|
+
|
|
3
|
+
Packaged MCP server for transformations tooling.
|
|
4
|
+
|
|
5
|
+
## Install and run
|
|
6
|
+
|
|
7
|
+
Use the package without installing globally:
|
|
8
|
+
|
|
9
|
+
```bash
|
|
10
|
+
npx -y @dayanaarrieta-graphite/mcp-server
|
|
11
|
+
```
|
|
12
|
+
|
|
13
|
+
For npmjs publishing/install, authenticate first:
|
|
14
|
+
|
|
15
|
+
```bash
|
|
16
|
+
npm login
|
|
17
|
+
```
|
|
18
|
+
|
|
19
|
+
The server starts on `http://localhost:3333` by default.
|
|
20
|
+
|
|
21
|
+
## Configuration
|
|
22
|
+
|
|
23
|
+
Environment variables:
|
|
24
|
+
|
|
25
|
+
- `MCP_PORT` (default: `3333`)
|
|
26
|
+
- `MCP_HOST` (default: `localhost`)
|
|
27
|
+
|
|
28
|
+
Example:
|
|
29
|
+
|
|
30
|
+
```bash
|
|
31
|
+
MCP_HOST=127.0.0.1 MCP_PORT=3333 npx -y @dayanaarrieta-graphite/mcp-server
|
|
32
|
+
```
|
|
33
|
+
|
|
34
|
+
## MCP client configuration example
|
|
35
|
+
|
|
36
|
+
```json
|
|
37
|
+
{
|
|
38
|
+
"mcpServers": {
|
|
39
|
+
"graphite-transformations": {
|
|
40
|
+
"command": "npx",
|
|
41
|
+
"args": ["-y", "@dayanaarrieta-graphite/mcp-server"],
|
|
42
|
+
"env": {
|
|
43
|
+
"MCP_HOST": "localhost",
|
|
44
|
+
"MCP_PORT": "3333"
|
|
45
|
+
}
|
|
46
|
+
}
|
|
47
|
+
}
|
|
48
|
+
}
|
|
49
|
+
```
|
|
50
|
+
|
|
51
|
+
## Local development
|
|
52
|
+
|
|
53
|
+
```bash
|
|
54
|
+
yarn --cwd mcp-server
|
|
55
|
+
yarn --cwd mcp-server build
|
|
56
|
+
yarn --cwd mcp-server start
|
|
57
|
+
```
|
|
58
|
+
|
|
59
|
+
## Publish (public)
|
|
60
|
+
|
|
61
|
+
```bash
|
|
62
|
+
npm publish --access public
|
|
63
|
+
```
|
|
64
|
+
|
|
65
|
+
## Quick health check
|
|
66
|
+
|
|
67
|
+
```bash
|
|
68
|
+
curl -s "http://localhost:${MCP_PORT:-3333}/health"
|
|
69
|
+
```
|
package/dist/server.js
ADDED
|
@@ -0,0 +1,185 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
3
|
+
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
4
|
+
};
|
|
5
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
6
|
+
// ============================================================
|
|
7
|
+
// MCP Server - Project Graphite
|
|
8
|
+
// All orchestration happens here. OpenCode/Claude only generate.
|
|
9
|
+
// ============================================================
|
|
10
|
+
const express_1 = __importDefault(require("express"));
|
|
11
|
+
const fs_1 = __importDefault(require("fs"));
|
|
12
|
+
const path_1 = __importDefault(require("path"));
|
|
13
|
+
const analyzeMapping_1 = require("./tools/analyzeMapping");
|
|
14
|
+
const buildGenerationContext_1 = require("./tools/buildGenerationContext");
|
|
15
|
+
const callCodeGenerator_1 = require("./tools/callCodeGenerator");
|
|
16
|
+
const downloadApiData_1 = require("./tools/downloadApiData");
|
|
17
|
+
const getMetadata_1 = require("./tools/getMetadata");
|
|
18
|
+
const listTransformations_1 = require("./tools/listTransformations");
|
|
19
|
+
const readFile_1 = require("./tools/readFile");
|
|
20
|
+
const readFolder_1 = require("./tools/readFolder");
|
|
21
|
+
const validateTransformation_1 = require("./tools/validateTransformation");
|
|
22
|
+
const writeTransformation_1 = require("./tools/writeTransformation");
|
|
23
|
+
const logger_1 = require("./utils/logger");
|
|
24
|
+
const pathResolver_1 = require("./utils/pathResolver");
|
|
25
|
+
const app = (0, express_1.default)();
|
|
26
|
+
app.use(express_1.default.json({ limit: "50mb" }));
|
|
27
|
+
const PORT = parseInt(process.env.MCP_PORT ?? "3333", 10);
|
|
28
|
+
const HOST = process.env.MCP_HOST ?? "localhost";
|
|
29
|
+
const TOOLS = {
|
|
30
|
+
// File I/O
|
|
31
|
+
readFile: (a) => (0, readFile_1.readFile)(a),
|
|
32
|
+
readFolder: (a) => (0, readFolder_1.readFolder)(a),
|
|
33
|
+
// Data acquisition
|
|
34
|
+
downloadApiData: (a) => (0, downloadApiData_1.downloadApiData)(a),
|
|
35
|
+
// Analysis
|
|
36
|
+
analyzeMapping: (a) => (0, analyzeMapping_1.analyzeMapping)(a),
|
|
37
|
+
getTransformationMetadata: (a) => (0, getMetadata_1.getTransformationMetadata)(a),
|
|
38
|
+
listTransformations: (a) => (0, listTransformations_1.listTransformations)(a),
|
|
39
|
+
// Generation context
|
|
40
|
+
buildGenerationContext: (a) => (0, buildGenerationContext_1.buildGenerationContext)(a),
|
|
41
|
+
callCodeGenerator: (a) => (0, callCodeGenerator_1.callCodeGenerator)(a),
|
|
42
|
+
// Write
|
|
43
|
+
writeTransformation: (a) => (0, writeTransformation_1.writeTransformation)(a),
|
|
44
|
+
writeGoldenFiles: (a) => (0, writeTransformation_1.writeGoldenFiles)(a),
|
|
45
|
+
// Validate
|
|
46
|
+
validateTransformation: (a) => (0, validateTransformation_1.validateTransformation)(a),
|
|
47
|
+
// -- High-level orchestration (full port in one call) --
|
|
48
|
+
portTransformation: async (a) => {
|
|
49
|
+
const { customerId, erpType } = a;
|
|
50
|
+
const steps = [];
|
|
51
|
+
const errors = [];
|
|
52
|
+
const step = (label) => {
|
|
53
|
+
steps.push(label);
|
|
54
|
+
(0, logger_1.log)("info", label);
|
|
55
|
+
};
|
|
56
|
+
const fail = (label) => {
|
|
57
|
+
errors.push(label);
|
|
58
|
+
(0, logger_1.log)("error", label);
|
|
59
|
+
};
|
|
60
|
+
// 1. Download API data
|
|
61
|
+
step(`[1/7] Downloading API data for ${customerId}/${erpType}`);
|
|
62
|
+
const download = (await (0, downloadApiData_1.downloadApiData)({ customerId, erpType }));
|
|
63
|
+
if (!download.success) {
|
|
64
|
+
fail("Download failed - using local files");
|
|
65
|
+
}
|
|
66
|
+
// 2. Analyze mapping + build reference context
|
|
67
|
+
step("[2/7] Analyzing mapping and reference transformer");
|
|
68
|
+
const analysis = (await (0, analyzeMapping_1.analyzeMapping)({ customerId, erpType }));
|
|
69
|
+
if (!analysis.success) {
|
|
70
|
+
return { success: false, steps, error: "Analysis failed" };
|
|
71
|
+
}
|
|
72
|
+
const analysisData = analysis.data;
|
|
73
|
+
const results = {};
|
|
74
|
+
for (const targetType of ["enum", "class", "test"]) {
|
|
75
|
+
// 3. Build generation context
|
|
76
|
+
step(`[${targetType === "enum" ? 3 : targetType === "class" ? 4 : 5}/7] Building ${targetType} generation context`);
|
|
77
|
+
const ctxResult = (await (0, buildGenerationContext_1.buildGenerationContext)({
|
|
78
|
+
analysisData: analysisData,
|
|
79
|
+
targetType,
|
|
80
|
+
}));
|
|
81
|
+
if (!ctxResult.success) {
|
|
82
|
+
fail(`Context build failed for ${targetType}`);
|
|
83
|
+
continue;
|
|
84
|
+
}
|
|
85
|
+
// 4. Call code generator
|
|
86
|
+
step(`Calling code generator for ${targetType}`);
|
|
87
|
+
const referenceFiles = targetType === "class"
|
|
88
|
+
? (() => {
|
|
89
|
+
const refPath = path_1.default.join(pathResolver_1.REPO_ROOT, "src/transformers/customers/US1206/US1206_sap.ts");
|
|
90
|
+
return fs_1.default.existsSync(refPath) ? [refPath] : [];
|
|
91
|
+
})()
|
|
92
|
+
: [];
|
|
93
|
+
const genResult = (await (0, callCodeGenerator_1.callCodeGenerator)({
|
|
94
|
+
context: ctxResult.data,
|
|
95
|
+
referenceFiles,
|
|
96
|
+
}));
|
|
97
|
+
if (!genResult.success || !genResult.data?.code) {
|
|
98
|
+
fail(`Code generation failed for ${targetType}`);
|
|
99
|
+
continue;
|
|
100
|
+
}
|
|
101
|
+
// 5. Write generated code
|
|
102
|
+
step(`Writing ${targetType} file`);
|
|
103
|
+
const writeResult = (await (0, writeTransformation_1.writeTransformation)({
|
|
104
|
+
customerId,
|
|
105
|
+
erpType,
|
|
106
|
+
type: targetType,
|
|
107
|
+
content: genResult.data.code,
|
|
108
|
+
}));
|
|
109
|
+
results[targetType] = writeResult.data;
|
|
110
|
+
if (!writeResult.success) {
|
|
111
|
+
fail(`Write failed for ${targetType}`);
|
|
112
|
+
}
|
|
113
|
+
}
|
|
114
|
+
// 6. Validate
|
|
115
|
+
step("[6/7] Running tests");
|
|
116
|
+
const validation = (await (0, validateTransformation_1.validateTransformation)({ customerId, erpType }));
|
|
117
|
+
// 7. List to confirm
|
|
118
|
+
step("[7/7] Verifying transformation is listed");
|
|
119
|
+
const list = (await (0, listTransformations_1.listTransformations)({ filter: customerId }));
|
|
120
|
+
return {
|
|
121
|
+
success: errors.length === 0 && validation.success,
|
|
122
|
+
data: {
|
|
123
|
+
customerId,
|
|
124
|
+
erpType,
|
|
125
|
+
steps,
|
|
126
|
+
errors,
|
|
127
|
+
files: results,
|
|
128
|
+
validation: validation.data,
|
|
129
|
+
listed: list.data,
|
|
130
|
+
},
|
|
131
|
+
};
|
|
132
|
+
},
|
|
133
|
+
};
|
|
134
|
+
// -- Health --
|
|
135
|
+
app.get("/health", (_req, res) => {
|
|
136
|
+
res.json({
|
|
137
|
+
status: "ok",
|
|
138
|
+
tools: Object.keys(TOOLS),
|
|
139
|
+
toolCount: Object.keys(TOOLS).length,
|
|
140
|
+
port: PORT,
|
|
141
|
+
timestamp: new Date().toISOString(),
|
|
142
|
+
});
|
|
143
|
+
});
|
|
144
|
+
// -- JSON-RPC Endpoint --
|
|
145
|
+
app.post("/", async (req, res) => {
|
|
146
|
+
const rpc = req.body;
|
|
147
|
+
const respond = (result, error) => {
|
|
148
|
+
const response = { jsonrpc: "2.0", id: rpc.id };
|
|
149
|
+
if (error) {
|
|
150
|
+
response.error = error;
|
|
151
|
+
}
|
|
152
|
+
else {
|
|
153
|
+
response.result = result;
|
|
154
|
+
}
|
|
155
|
+
res.json(response);
|
|
156
|
+
};
|
|
157
|
+
if (rpc.method !== "tools/call") {
|
|
158
|
+
return respond(undefined, { code: -32601, message: `Method not found: ${rpc.method}` });
|
|
159
|
+
}
|
|
160
|
+
const toolName = rpc.params?.name;
|
|
161
|
+
const toolArgs = (rpc.params?.arguments ?? {});
|
|
162
|
+
const tool = toolName ? TOOLS[toolName] : undefined;
|
|
163
|
+
if (!tool) {
|
|
164
|
+
return respond(undefined, {
|
|
165
|
+
code: -32602,
|
|
166
|
+
message: `Unknown tool: ${toolName}. Available: ${Object.keys(TOOLS).join(", ")}`,
|
|
167
|
+
});
|
|
168
|
+
}
|
|
169
|
+
(0, logger_1.log)("info", `▶ Tool: ${toolName}`, { customerId: toolArgs.customerId, erpType: toolArgs.erpType });
|
|
170
|
+
try {
|
|
171
|
+
const result = await tool(toolArgs);
|
|
172
|
+
return respond(result);
|
|
173
|
+
}
|
|
174
|
+
catch (err) {
|
|
175
|
+
(0, logger_1.log)("error", `Tool error: ${toolName}`, err);
|
|
176
|
+
return respond(undefined, { code: -32603, message: String(err) });
|
|
177
|
+
}
|
|
178
|
+
});
|
|
179
|
+
app.listen(PORT, HOST, () => {
|
|
180
|
+
(0, logger_1.log)("info", `✅ MCP Server ready on port ${PORT}`);
|
|
181
|
+
// eslint-disable-next-line no-console -- CLI startup output is intentional
|
|
182
|
+
console.log(`\n✅ MCP Server ready - ${Object.keys(TOOLS).length} tools available on port ${PORT}\n`);
|
|
183
|
+
// eslint-disable-next-line no-console -- CLI startup output is intentional
|
|
184
|
+
console.log("Tools:", Object.keys(TOOLS).join(", "));
|
|
185
|
+
});
|
|
@@ -0,0 +1,204 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
3
|
+
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
4
|
+
};
|
|
5
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
6
|
+
exports.analyzeMapping = analyzeMapping;
|
|
7
|
+
// ============================================================
|
|
8
|
+
// analyzeMapping — Analiza los mappings reales de Graphite.
|
|
9
|
+
// Estructura real: array de objetos con fieldName, mapping
|
|
10
|
+
// (grupo), sourceExpressionRaw, mappingCondition,
|
|
11
|
+
// translationTable, sourceExpression (JSON logic).
|
|
12
|
+
// ============================================================
|
|
13
|
+
const fs_1 = __importDefault(require("fs"));
|
|
14
|
+
const path_1 = __importDefault(require("path"));
|
|
15
|
+
const logger_1 = require("../utils/logger");
|
|
16
|
+
const pathResolver_1 = require("../utils/pathResolver");
|
|
17
|
+
async function analyzeMapping(args) {
|
|
18
|
+
const { customerId, erpType } = args;
|
|
19
|
+
const refCustomer = (0, pathResolver_1.getReferenceCustomer)(erpType);
|
|
20
|
+
const erp = erpType.toLowerCase();
|
|
21
|
+
const apiDir = (0, pathResolver_1.getApiDataDir)(customerId);
|
|
22
|
+
(0, logger_1.log)("info", `Analyzing ${customerId}/${erpType} - reference: ${refCustomer}`);
|
|
23
|
+
// -- 1. Load real mappings from API download --
|
|
24
|
+
const mappingsRaw = loadJson(path_1.default.join(apiDir, "mappings.json"), []);
|
|
25
|
+
const mappingsByGroup = loadJson(path_1.default.join(apiDir, "mappings-by-group.json"), {});
|
|
26
|
+
const translationTables = loadJson(path_1.default.join(apiDir, "translation-tables.json"), {});
|
|
27
|
+
const schema = loadJson(path_1.default.join(apiDir, "schema.json"), {});
|
|
28
|
+
const connection = loadJson(path_1.default.join(apiDir, "connection.json"), {});
|
|
29
|
+
const templates = loadJson(path_1.default.join(apiDir, "templates.json"), {});
|
|
30
|
+
const goldenProfile = loadJson(path_1.default.join(apiDir, "golden-profile.json"), {});
|
|
31
|
+
// -- 2. Load reference transformer code --
|
|
32
|
+
const refDir = (0, pathResolver_1.getCustomerDir)(refCustomer);
|
|
33
|
+
const referenceTransformerCode = readFile(path_1.default.join(refDir, `${refCustomer}_${erp}.ts`)) || readFirstTs(refDir);
|
|
34
|
+
const referenceEnumsCode = readFile(path_1.default.join(refDir, "enums.ts"));
|
|
35
|
+
const referenceTestPath = resolveReferenceTestPath(erp, refCustomer);
|
|
36
|
+
const referenceTestCode = referenceTestPath ? readFile(referenceTestPath) : "";
|
|
37
|
+
const baseClassCode = readFile((0, pathResolver_1.resolveSafePath)(`src/transformers/standard/${erp}-base.ts`)) ||
|
|
38
|
+
readFile((0, pathResolver_1.resolveSafePath)(`src/transformers/standard/${erp}.ts`));
|
|
39
|
+
// -- 3. Extract translation table names used --
|
|
40
|
+
const usedTranslationTables = [
|
|
41
|
+
...new Set(mappingsRaw.filter((m) => m.translationTable).map((m) => m.translationTable)),
|
|
42
|
+
];
|
|
43
|
+
// -- 4. Extract conditional rules --
|
|
44
|
+
const conditionalRules = mappingsRaw
|
|
45
|
+
.filter((m) => m.mappingConditionRaw || m.mappingCondition)
|
|
46
|
+
.map((m) => ({
|
|
47
|
+
field: m.fieldName,
|
|
48
|
+
condition: m.mappingConditionRaw ?? JSON.stringify(m.mappingCondition),
|
|
49
|
+
values: extractConditionValues(m.mappingCondition),
|
|
50
|
+
}));
|
|
51
|
+
// -- 5. Extract all field keys from sourceExpressionRaw --
|
|
52
|
+
const allFieldKeys = [...new Set(mappingsRaw.flatMap((m) => extractFieldKeys(m.sourceExpressionRaw ?? "")))];
|
|
53
|
+
// -- 6. Extract enums from reference code --
|
|
54
|
+
const existingEnums = extractEnums(referenceEnumsCode);
|
|
55
|
+
// -- 7. Extract base classes --
|
|
56
|
+
const baseClasses = extractBaseClasses(referenceTransformerCode);
|
|
57
|
+
// -- 8. Build mapping sections summary --
|
|
58
|
+
const mappingSections = Object.entries(mappingsByGroup).map(([group, fields]) => ({
|
|
59
|
+
group,
|
|
60
|
+
fieldCount: fields.length,
|
|
61
|
+
fields: fields.map((f) => f.fieldName),
|
|
62
|
+
}));
|
|
63
|
+
// -- 9. Build structured mapping for prompt --
|
|
64
|
+
const structuredMapping = buildStructuredMapping(mappingsRaw);
|
|
65
|
+
const context = {
|
|
66
|
+
customerId,
|
|
67
|
+
erpType,
|
|
68
|
+
referenceTransformerCode,
|
|
69
|
+
referenceEnumsCode,
|
|
70
|
+
referenceTestCode,
|
|
71
|
+
mappingJson: JSON.stringify(structuredMapping, null, 2),
|
|
72
|
+
templateJson: JSON.stringify(templates, null, 2),
|
|
73
|
+
fieldCount: mappingsRaw.length,
|
|
74
|
+
translationTables: usedTranslationTables,
|
|
75
|
+
conditionalRules,
|
|
76
|
+
baseClasses,
|
|
77
|
+
existingEnums,
|
|
78
|
+
};
|
|
79
|
+
return {
|
|
80
|
+
success: true,
|
|
81
|
+
data: {
|
|
82
|
+
...context,
|
|
83
|
+
rawMappingsCount: mappingsRaw.length,
|
|
84
|
+
mappingSections,
|
|
85
|
+
allFieldKeys,
|
|
86
|
+
translationTablesData: translationTables,
|
|
87
|
+
schema,
|
|
88
|
+
connection,
|
|
89
|
+
goldenProfile,
|
|
90
|
+
baseClassCode,
|
|
91
|
+
summary: {
|
|
92
|
+
fieldCount: mappingsRaw.length,
|
|
93
|
+
translationTableCount: usedTranslationTables.length,
|
|
94
|
+
conditionalRuleCount: conditionalRules.length,
|
|
95
|
+
mappingSectionCount: Object.keys(mappingsByGroup).length,
|
|
96
|
+
mappingSections: Object.keys(mappingsByGroup),
|
|
97
|
+
hasReferenceCode: referenceTransformerCode.length > 0,
|
|
98
|
+
hasReferenceTest: referenceTestCode.length > 0,
|
|
99
|
+
hasGoldenFiles: Object.keys(goldenProfile).length > 0,
|
|
100
|
+
hasTranslationTables: usedTranslationTables.length > 0,
|
|
101
|
+
complexity: mappingsRaw.length > 100 ? "high" : mappingsRaw.length > 50 ? "medium" : "low",
|
|
102
|
+
erpType,
|
|
103
|
+
referenceCustomer: refCustomer,
|
|
104
|
+
referenceTestPath,
|
|
105
|
+
},
|
|
106
|
+
},
|
|
107
|
+
};
|
|
108
|
+
}
|
|
109
|
+
// -- Helpers --
|
|
110
|
+
function loadJson(filePath, fallback) {
|
|
111
|
+
try {
|
|
112
|
+
if (!fs_1.default.existsSync(filePath)) {
|
|
113
|
+
return fallback;
|
|
114
|
+
}
|
|
115
|
+
return JSON.parse(fs_1.default.readFileSync(filePath, "utf-8"));
|
|
116
|
+
}
|
|
117
|
+
catch {
|
|
118
|
+
return fallback;
|
|
119
|
+
}
|
|
120
|
+
}
|
|
121
|
+
function readFile(filePath) {
|
|
122
|
+
try {
|
|
123
|
+
return fs_1.default.existsSync(filePath) ? fs_1.default.readFileSync(filePath, "utf-8") : "";
|
|
124
|
+
}
|
|
125
|
+
catch {
|
|
126
|
+
return "";
|
|
127
|
+
}
|
|
128
|
+
}
|
|
129
|
+
function readFirstTs(dir) {
|
|
130
|
+
if (!fs_1.default.existsSync(dir)) {
|
|
131
|
+
return "";
|
|
132
|
+
}
|
|
133
|
+
const files = fs_1.default.readdirSync(dir).filter((f) => f.endsWith(".ts") && !f.includes("enum"));
|
|
134
|
+
return files.length ? readFile(path_1.default.join(dir, files[0])) : "";
|
|
135
|
+
}
|
|
136
|
+
function resolveReferenceTestPath(erp, refCustomer) {
|
|
137
|
+
const candidates = [
|
|
138
|
+
path_1.default.join(pathResolver_1.REPO_ROOT, "tests/transformers/standard", erp, "erp.test.ts"),
|
|
139
|
+
path_1.default.join(pathResolver_1.REPO_ROOT, "tests/transformers/customers", `${refCustomer}.test.ts`),
|
|
140
|
+
path_1.default.join(pathResolver_1.REPO_ROOT, "tests/transformers", `${refCustomer}_${erp}.test.ts`),
|
|
141
|
+
];
|
|
142
|
+
if (erp === "sap") {
|
|
143
|
+
candidates.push(path_1.default.join(pathResolver_1.REPO_ROOT, "tests/transformers/customers", "US47815.test.ts"));
|
|
144
|
+
}
|
|
145
|
+
for (const candidate of candidates) {
|
|
146
|
+
if (fs_1.default.existsSync(candidate)) {
|
|
147
|
+
return candidate;
|
|
148
|
+
}
|
|
149
|
+
}
|
|
150
|
+
return "";
|
|
151
|
+
}
|
|
152
|
+
function extractFieldKeys(expr) {
|
|
153
|
+
const matches = expr.match(/<([^>]+)>/g) ?? [];
|
|
154
|
+
return matches.map((m) => m.replace(/[<>]/g, "").trim());
|
|
155
|
+
}
|
|
156
|
+
function extractConditionValues(condition) {
|
|
157
|
+
if (!condition) {
|
|
158
|
+
return [];
|
|
159
|
+
}
|
|
160
|
+
const str = JSON.stringify(condition);
|
|
161
|
+
const matches = str.match(/"([^"]{1,50})"/g) ?? [];
|
|
162
|
+
return [...new Set(matches.map((m) => m.replace(/"/g, "")))].slice(0, 20);
|
|
163
|
+
}
|
|
164
|
+
function extractEnums(code) {
|
|
165
|
+
const enums = [];
|
|
166
|
+
for (const match of code.matchAll(/export enum (\w+)\s*\{([^}]+)\}/g)) {
|
|
167
|
+
const values = {};
|
|
168
|
+
for (const line of match[2]
|
|
169
|
+
.split("\n")
|
|
170
|
+
.map((l) => l.trim())
|
|
171
|
+
.filter(Boolean)) {
|
|
172
|
+
const kv = line
|
|
173
|
+
.replace(",", "")
|
|
174
|
+
.split("=")
|
|
175
|
+
.map((s) => s.trim());
|
|
176
|
+
if (kv.length === 2) {
|
|
177
|
+
values[kv[0]] = kv[1].replace(/'/g, "");
|
|
178
|
+
}
|
|
179
|
+
}
|
|
180
|
+
enums.push({ name: match[1], values });
|
|
181
|
+
}
|
|
182
|
+
return enums;
|
|
183
|
+
}
|
|
184
|
+
function extractBaseClasses(code) {
|
|
185
|
+
return [...new Set((code.match(/extends\s+(\w+)/g) ?? []).map((m) => m.replace("extends ", "").trim()))];
|
|
186
|
+
}
|
|
187
|
+
function buildStructuredMapping(mappings) {
|
|
188
|
+
const result = {};
|
|
189
|
+
for (const m of mappings) {
|
|
190
|
+
const group = m.mapping ?? "unknown";
|
|
191
|
+
if (!result[group]) {
|
|
192
|
+
result[group] = {};
|
|
193
|
+
}
|
|
194
|
+
result[group][m.fieldName] = {
|
|
195
|
+
sourceExpression: m.sourceExpressionRaw,
|
|
196
|
+
...(m.mappingConditionRaw ? { condition: m.mappingConditionRaw } : {}),
|
|
197
|
+
...(m.translationTable ? { translationTable: m.translationTable } : {}),
|
|
198
|
+
...(m.defaultValue ? { defaultValue: m.defaultValue } : {}),
|
|
199
|
+
...(m.characterLimit ? { characterLimit: m.characterLimit } : {}),
|
|
200
|
+
logic: m.sourceExpression,
|
|
201
|
+
};
|
|
202
|
+
}
|
|
203
|
+
return result;
|
|
204
|
+
}
|
|
@@ -0,0 +1,78 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.buildGenerationContext = buildGenerationContext;
|
|
4
|
+
const logger_1 = require("../utils/logger");
|
|
5
|
+
async function buildGenerationContext(args) {
|
|
6
|
+
const { analysisData: ctx, targetType } = args;
|
|
7
|
+
(0, logger_1.log)("info", `Building ${targetType} context for ${ctx.customerId}/${ctx.erpType}`);
|
|
8
|
+
let prompt = "";
|
|
9
|
+
let promptParts;
|
|
10
|
+
if (targetType === "enum") {
|
|
11
|
+
prompt = buildEnumPrompt(ctx);
|
|
12
|
+
}
|
|
13
|
+
else if (targetType === "class") {
|
|
14
|
+
prompt = buildClassPrompt(ctx);
|
|
15
|
+
}
|
|
16
|
+
else if (targetType === "test") {
|
|
17
|
+
prompt = buildTestPrompt(ctx);
|
|
18
|
+
}
|
|
19
|
+
const context = {
|
|
20
|
+
customerId: ctx.customerId,
|
|
21
|
+
erpType: ctx.erpType,
|
|
22
|
+
referenceCode: ctx.referenceTransformerCode,
|
|
23
|
+
mappingData: JSON.parse(ctx.mappingJson || "{}"),
|
|
24
|
+
enumContext: ctx.referenceEnumsCode,
|
|
25
|
+
testGoldenProfile: ctx.mappingJson,
|
|
26
|
+
testGoldenPayload: ctx.templateJson,
|
|
27
|
+
baseClassCode: "",
|
|
28
|
+
prompt,
|
|
29
|
+
promptParts,
|
|
30
|
+
};
|
|
31
|
+
return { success: true, data: context };
|
|
32
|
+
}
|
|
33
|
+
// -- Enum Prompt --
|
|
34
|
+
function buildEnumPrompt(ctx) {
|
|
35
|
+
return `You are a TypeScript expert working on Project Graphite ERP transformations.
|
|
36
|
+
|
|
37
|
+
Generate enums.ts for customer ${ctx.customerId} (${ctx.erpType}).
|
|
38
|
+
|
|
39
|
+
COPY THIS EXACT FILE — replace every "US1206" with "${ctx.customerId}":
|
|
40
|
+
|
|
41
|
+
${ctx.referenceEnumsCode}
|
|
42
|
+
|
|
43
|
+
RULES:
|
|
44
|
+
- Replace every "US1206" → "${ctx.customerId}"
|
|
45
|
+
- Keep EXACTLY the same enum names: ${ctx.customerId}Vendor and ${ctx.customerId}Body
|
|
46
|
+
- Replace the enum VALUES with the actual field keys from this customer's mappings:
|
|
47
|
+
${ctx.mappingJson.slice(0, 3000)}
|
|
48
|
+
- Output ONLY TypeScript code, no explanation, no markdown fences
|
|
49
|
+
|
|
50
|
+
Generate enums.ts now:`;
|
|
51
|
+
}
|
|
52
|
+
function buildClassPrompt(ctx) {
|
|
53
|
+
return `Replace every "US1206" with "${ctx.customerId}" in the attached file. Output only TypeScript code.`;
|
|
54
|
+
}
|
|
55
|
+
// -- Test Prompt --
|
|
56
|
+
function buildTestPrompt(ctx) {
|
|
57
|
+
const refTest = ctx.referenceTestCode.slice(0, 3000);
|
|
58
|
+
return `You are a TypeScript expert working on Project Graphite ERP transformations.
|
|
59
|
+
|
|
60
|
+
Generate a complete Jest test file for ${ctx.customerId} ${ctx.erpType} transformer.
|
|
61
|
+
|
|
62
|
+
RULES:
|
|
63
|
+
- Import: import { transformer, ${ctx.customerId}Sap } from '../../../src/transformers/customers/${ctx.customerId}/${ctx.customerId}_sap'
|
|
64
|
+
- Minimum 10 test cases
|
|
65
|
+
- Test each mapping section (0110, 0120, 0130, 0310, 0510, cc)
|
|
66
|
+
- Test conditional rules
|
|
67
|
+
- Use realistic field values
|
|
68
|
+
- No TODOs — all tests fully implemented
|
|
69
|
+
- Output ONLY TypeScript code, no explanation, no markdown fences
|
|
70
|
+
|
|
71
|
+
REFERENCE TEST (follow this structure):
|
|
72
|
+
${refTest}
|
|
73
|
+
|
|
74
|
+
MAPPING SECTIONS AVAILABLE:
|
|
75
|
+
${ctx.mappingJson.slice(0, 3000)}
|
|
76
|
+
|
|
77
|
+
Generate ${ctx.customerId}_sap.test.ts now:`;
|
|
78
|
+
}
|
|
@@ -0,0 +1,42 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.callCodeGenerator = callCodeGenerator;
|
|
4
|
+
const child_process_1 = require("child_process");
|
|
5
|
+
const logger_1 = require("../utils/logger");
|
|
6
|
+
const OPENCODE_BIN = "/opt/homebrew/bin/opencode";
|
|
7
|
+
async function callCodeGenerator(args) {
|
|
8
|
+
const { context, referenceFiles = [] } = args;
|
|
9
|
+
(0, logger_1.log)("info", `Calling opencode for ${context.customerId}/${context.erpType} (${context.prompt.length} chars)`);
|
|
10
|
+
// Use exact same pattern that works on command line:
|
|
11
|
+
// opencode run "message" -f file1 -f file2
|
|
12
|
+
const spawnArgs = ["run", context.prompt, ...referenceFiles.flatMap((f) => ["-f", f])];
|
|
13
|
+
(0, logger_1.log)("info", `spawn: opencode ${spawnArgs.slice(0, 3).join(" ")} ... (${referenceFiles.length} files)`);
|
|
14
|
+
const result = (0, child_process_1.spawnSync)(OPENCODE_BIN, spawnArgs, {
|
|
15
|
+
encoding: "utf-8",
|
|
16
|
+
timeout: 180000,
|
|
17
|
+
maxBuffer: 1024 * 1024 * 20,
|
|
18
|
+
});
|
|
19
|
+
if (result.error) {
|
|
20
|
+
return { success: false, error: `opencode error: ${result.error.message}` };
|
|
21
|
+
}
|
|
22
|
+
if (result.status !== 0) {
|
|
23
|
+
return { success: false, error: `opencode exited ${result.status}: ${(result.stderr || "").slice(0, 500)}` };
|
|
24
|
+
}
|
|
25
|
+
const output = result.stdout?.trim() ?? "";
|
|
26
|
+
if (output.length < 100) {
|
|
27
|
+
return { success: false, error: `opencode returned too little output (${output.length} chars)` };
|
|
28
|
+
}
|
|
29
|
+
(0, logger_1.log)("info", `opencode returned ${output.length} chars`);
|
|
30
|
+
return {
|
|
31
|
+
success: true,
|
|
32
|
+
data: { code: stripFences(output), source: "opencode" },
|
|
33
|
+
};
|
|
34
|
+
}
|
|
35
|
+
function stripFences(code) {
|
|
36
|
+
return code
|
|
37
|
+
.replace(/^```typescript\n?/gm, "")
|
|
38
|
+
.replace(/^```ts\n?/gm, "")
|
|
39
|
+
.replace(/^```\n?/gm, "")
|
|
40
|
+
.replace(/```$/gm, "")
|
|
41
|
+
.trim();
|
|
42
|
+
}
|
|
@@ -0,0 +1,145 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
3
|
+
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
4
|
+
};
|
|
5
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
6
|
+
exports.downloadApiData = downloadApiData;
|
|
7
|
+
const fs_1 = __importDefault(require("fs"));
|
|
8
|
+
const path_1 = __importDefault(require("path"));
|
|
9
|
+
const https_1 = __importDefault(require("https"));
|
|
10
|
+
const http_1 = __importDefault(require("http"));
|
|
11
|
+
const pathResolver_1 = require("../utils/pathResolver");
|
|
12
|
+
const logger_1 = require("../utils/logger");
|
|
13
|
+
const API_BASE = process.env.GRAPHITE_API_URL ?? "https://poc.projectgraphite.com/api/public/api/v1";
|
|
14
|
+
const API_TOKEN = process.env.GRAPHITE_API_TOKEN ?? "";
|
|
15
|
+
function fetchJson(url) {
|
|
16
|
+
return new Promise((resolve, reject) => {
|
|
17
|
+
const client = url.startsWith("https") ? https_1.default : http_1.default;
|
|
18
|
+
const req = client.get(url, {
|
|
19
|
+
headers: {
|
|
20
|
+
Authorization: `Bearer ${API_TOKEN}`,
|
|
21
|
+
Accept: "application/json",
|
|
22
|
+
},
|
|
23
|
+
}, (res) => {
|
|
24
|
+
let data = "";
|
|
25
|
+
res.on("data", (chunk) => {
|
|
26
|
+
data += chunk;
|
|
27
|
+
});
|
|
28
|
+
res.on("end", () => {
|
|
29
|
+
if (res.statusCode && res.statusCode >= 400) {
|
|
30
|
+
reject(new Error(`HTTP ${res.statusCode} -> ${url}\n${data}`));
|
|
31
|
+
return;
|
|
32
|
+
}
|
|
33
|
+
try {
|
|
34
|
+
resolve(JSON.parse(data));
|
|
35
|
+
}
|
|
36
|
+
catch (e) {
|
|
37
|
+
reject(new Error(`Invalid JSON from ${url}: ${String(e)}`));
|
|
38
|
+
}
|
|
39
|
+
});
|
|
40
|
+
});
|
|
41
|
+
req.on("error", reject);
|
|
42
|
+
req.setTimeout(30000, () => {
|
|
43
|
+
req.destroy();
|
|
44
|
+
reject(new Error(`Timeout: ${url}`));
|
|
45
|
+
});
|
|
46
|
+
});
|
|
47
|
+
}
|
|
48
|
+
async function downloadApiData(args) {
|
|
49
|
+
const { customerId, erpType } = args;
|
|
50
|
+
if (!API_TOKEN) {
|
|
51
|
+
return { success: false, error: "GRAPHITE_API_TOKEN not set" };
|
|
52
|
+
}
|
|
53
|
+
const dir = (0, pathResolver_1.getApiDataDir)(customerId);
|
|
54
|
+
(0, pathResolver_1.ensureDir)(path_1.default.join(dir, ".keep"));
|
|
55
|
+
const saved = [];
|
|
56
|
+
const errors = [];
|
|
57
|
+
// -- Core endpoints (no customer ID in path — Bearer defines the customer) --
|
|
58
|
+
const endpoints = [
|
|
59
|
+
{
|
|
60
|
+
key: "mappings",
|
|
61
|
+
url: `${API_BASE}/mappings/transformation-mappings`,
|
|
62
|
+
filename: "mappings.json",
|
|
63
|
+
},
|
|
64
|
+
{
|
|
65
|
+
key: "templates",
|
|
66
|
+
url: `${API_BASE}/mappings/transformation-templates`,
|
|
67
|
+
filename: "templates.json",
|
|
68
|
+
},
|
|
69
|
+
{
|
|
70
|
+
key: "tables",
|
|
71
|
+
url: `${API_BASE}/mappings/transformation-tables`,
|
|
72
|
+
filename: "translation-tables.json",
|
|
73
|
+
},
|
|
74
|
+
{
|
|
75
|
+
// Schema — publicId can be empty for own customer schema
|
|
76
|
+
key: "schema",
|
|
77
|
+
url: `${API_BASE}/schema/${customerId}`,
|
|
78
|
+
filename: "schema.json",
|
|
79
|
+
},
|
|
80
|
+
];
|
|
81
|
+
// NOTE: /connections/{connectionId} expects a MongoDB ObjectId — skip it.
|
|
82
|
+
// The Bearer token already identifies the connection. Everything we need
|
|
83
|
+
// comes from the mappings, templates, tables, and schema endpoints.
|
|
84
|
+
for (const ep of endpoints) {
|
|
85
|
+
try {
|
|
86
|
+
(0, logger_1.log)("info", `Downloading ${ep.key}...`);
|
|
87
|
+
const data = await fetchJson(ep.url);
|
|
88
|
+
const filePath = path_1.default.join(dir, ep.filename);
|
|
89
|
+
fs_1.default.writeFileSync(filePath, JSON.stringify(data, null, 2), "utf-8");
|
|
90
|
+
saved.push(ep.filename);
|
|
91
|
+
const count = Array.isArray(data) ? ` (${data.length} items)` : "";
|
|
92
|
+
(0, logger_1.log)("info", `Saved ${ep.filename}${count}`);
|
|
93
|
+
}
|
|
94
|
+
catch (err) {
|
|
95
|
+
const msg = `${ep.key}: ${String(err)}`;
|
|
96
|
+
errors.push(msg);
|
|
97
|
+
(0, logger_1.log)("warn", msg);
|
|
98
|
+
}
|
|
99
|
+
}
|
|
100
|
+
// -- Post-process: group mappings by section --
|
|
101
|
+
const mappingsPath = path_1.default.join(dir, "mappings.json");
|
|
102
|
+
if (fs_1.default.existsSync(mappingsPath)) {
|
|
103
|
+
try {
|
|
104
|
+
const allMappings = JSON.parse(fs_1.default.readFileSync(mappingsPath, "utf-8"));
|
|
105
|
+
// Group by mapping section (0110, 0120, cc, etc.)
|
|
106
|
+
const byGroup = {};
|
|
107
|
+
for (const m of allMappings) {
|
|
108
|
+
const group = m.mapping ?? "unknown";
|
|
109
|
+
if (!byGroup[group]) {
|
|
110
|
+
byGroup[group] = [];
|
|
111
|
+
}
|
|
112
|
+
byGroup[group].push(m);
|
|
113
|
+
}
|
|
114
|
+
fs_1.default.writeFileSync(path_1.default.join(dir, "mappings-by-group.json"), JSON.stringify(byGroup, null, 2), "utf-8");
|
|
115
|
+
saved.push("mappings-by-group.json");
|
|
116
|
+
fs_1.default.writeFileSync(path_1.default.join(dir, "field-list.json"), JSON.stringify(allMappings.map((m) => ({
|
|
117
|
+
fieldName: m.fieldName,
|
|
118
|
+
group: m.mapping,
|
|
119
|
+
})), null, 2), "utf-8");
|
|
120
|
+
saved.push("field-list.json");
|
|
121
|
+
const groups = Object.keys(byGroup);
|
|
122
|
+
(0, logger_1.log)("info", `${groups.length} mapping sections: ${groups.join(", ")}`);
|
|
123
|
+
}
|
|
124
|
+
catch (err) {
|
|
125
|
+
(0, logger_1.log)("warn", `Post-process error: ${String(err)}`);
|
|
126
|
+
}
|
|
127
|
+
}
|
|
128
|
+
return {
|
|
129
|
+
success: saved.length > 0,
|
|
130
|
+
data: {
|
|
131
|
+
customerId,
|
|
132
|
+
erpType,
|
|
133
|
+
mappingJson: {},
|
|
134
|
+
templateJson: {},
|
|
135
|
+
savedPaths: saved,
|
|
136
|
+
savedFiles: saved,
|
|
137
|
+
savedCount: saved.length,
|
|
138
|
+
errorCount: errors.length,
|
|
139
|
+
errors,
|
|
140
|
+
note: errors.length === 0
|
|
141
|
+
? "All files downloaded successfully"
|
|
142
|
+
: `${errors.length} endpoint(s) failed - non-critical`,
|
|
143
|
+
},
|
|
144
|
+
};
|
|
145
|
+
}
|
|
@@ -0,0 +1,44 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
3
|
+
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
4
|
+
};
|
|
5
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
6
|
+
exports.getTransformationMetadata = getTransformationMetadata;
|
|
7
|
+
const fs_1 = __importDefault(require("fs"));
|
|
8
|
+
const path_1 = __importDefault(require("path"));
|
|
9
|
+
const pathResolver_1 = require("../utils/pathResolver");
|
|
10
|
+
async function getTransformationMetadata(args) {
|
|
11
|
+
try {
|
|
12
|
+
const dir = (0, pathResolver_1.getCustomerDir)(args.customerId);
|
|
13
|
+
const exists = fs_1.default.existsSync(dir);
|
|
14
|
+
const files = exists ? fs_1.default.readdirSync(dir) : [];
|
|
15
|
+
const erp = args.erpType.toLowerCase();
|
|
16
|
+
const mainFile = `${args.customerId}_${erp}.ts`;
|
|
17
|
+
const testFile = path_1.default.join((0, pathResolver_1.resolveSafePath)("tests/transformers"), `${args.customerId}_${erp}.test.ts`);
|
|
18
|
+
let lineCount = 0;
|
|
19
|
+
let methodCount = 0;
|
|
20
|
+
if (files.includes(mainFile)) {
|
|
21
|
+
const content = fs_1.default.readFileSync(path_1.default.join(dir, mainFile), "utf-8");
|
|
22
|
+
lineCount = content.split("\n").length;
|
|
23
|
+
methodCount = (content.match(/^\s+(public|private|protected)?\s+\w+\s*\(/gm) ?? []).length;
|
|
24
|
+
}
|
|
25
|
+
return {
|
|
26
|
+
success: true,
|
|
27
|
+
data: {
|
|
28
|
+
customerId: args.customerId,
|
|
29
|
+
erpType: args.erpType,
|
|
30
|
+
exists,
|
|
31
|
+
files,
|
|
32
|
+
mainFile,
|
|
33
|
+
hasEnums: files.includes("enums.ts"),
|
|
34
|
+
hasTests: fs_1.default.existsSync(testFile),
|
|
35
|
+
lineCount,
|
|
36
|
+
methodCount,
|
|
37
|
+
directory: dir,
|
|
38
|
+
},
|
|
39
|
+
};
|
|
40
|
+
}
|
|
41
|
+
catch (err) {
|
|
42
|
+
return { success: false, error: String(err) };
|
|
43
|
+
}
|
|
44
|
+
}
|
|
@@ -0,0 +1,34 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
3
|
+
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
4
|
+
};
|
|
5
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
6
|
+
exports.listTransformations = listTransformations;
|
|
7
|
+
const fs_1 = __importDefault(require("fs"));
|
|
8
|
+
const path_1 = __importDefault(require("path"));
|
|
9
|
+
const pathResolver_1 = require("../utils/pathResolver");
|
|
10
|
+
async function listTransformations(args) {
|
|
11
|
+
try {
|
|
12
|
+
const customersPath = (0, pathResolver_1.resolveSafePath)("src/transformers/customers");
|
|
13
|
+
if (!fs_1.default.existsSync(customersPath)) {
|
|
14
|
+
return { success: true, data: { transformations: [], count: 0 } };
|
|
15
|
+
}
|
|
16
|
+
const customers = fs_1.default
|
|
17
|
+
.readdirSync(customersPath, { withFileTypes: true })
|
|
18
|
+
.filter((e) => e.isDirectory())
|
|
19
|
+
.map((e) => e.name)
|
|
20
|
+
.filter((c) => !args.filter || c.toLowerCase().includes(args.filter.toLowerCase()));
|
|
21
|
+
const transformations = customers.map((customerId) => {
|
|
22
|
+
const dir = path_1.default.join(customersPath, customerId);
|
|
23
|
+
const files = fs_1.default.readdirSync(dir);
|
|
24
|
+
const tsFiles = files.filter((f) => f.endsWith(".ts"));
|
|
25
|
+
const hasEnums = files.includes("enums.ts");
|
|
26
|
+
const hasTests = fs_1.default.existsSync(path_1.default.join((0, pathResolver_1.resolveSafePath)("tests/transformers"), `${customerId.toLowerCase()}.test.ts`));
|
|
27
|
+
return { customerId, files, tsFiles, hasEnums, hasTests, fileCount: files.length };
|
|
28
|
+
});
|
|
29
|
+
return { success: true, data: { transformations, count: transformations.length } };
|
|
30
|
+
}
|
|
31
|
+
catch (err) {
|
|
32
|
+
return { success: false, error: String(err) };
|
|
33
|
+
}
|
|
34
|
+
}
|
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
3
|
+
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
4
|
+
};
|
|
5
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
6
|
+
exports.readFile = readFile;
|
|
7
|
+
const fs_1 = __importDefault(require("fs"));
|
|
8
|
+
const pathResolver_1 = require("../utils/pathResolver");
|
|
9
|
+
async function readFile(args) {
|
|
10
|
+
try {
|
|
11
|
+
const fullPath = (0, pathResolver_1.resolveSafePath)(args.path);
|
|
12
|
+
if (!fs_1.default.existsSync(fullPath)) {
|
|
13
|
+
return { success: false, error: `File not found: ${args.path}` };
|
|
14
|
+
}
|
|
15
|
+
const content = fs_1.default.readFileSync(fullPath, "utf-8");
|
|
16
|
+
return { success: true, data: { path: args.path, content, size: content.length } };
|
|
17
|
+
}
|
|
18
|
+
catch (err) {
|
|
19
|
+
return { success: false, error: String(err) };
|
|
20
|
+
}
|
|
21
|
+
}
|
|
@@ -0,0 +1,27 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
3
|
+
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
4
|
+
};
|
|
5
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
6
|
+
exports.readFolder = readFolder;
|
|
7
|
+
const fs_1 = __importDefault(require("fs"));
|
|
8
|
+
const path_1 = __importDefault(require("path"));
|
|
9
|
+
const pathResolver_1 = require("../utils/pathResolver");
|
|
10
|
+
async function readFolder(args) {
|
|
11
|
+
try {
|
|
12
|
+
const fullPath = (0, pathResolver_1.resolveSafePath)(args.path);
|
|
13
|
+
if (!fs_1.default.existsSync(fullPath)) {
|
|
14
|
+
return { success: false, error: `Folder not found: ${args.path}` };
|
|
15
|
+
}
|
|
16
|
+
const entries = fs_1.default.readdirSync(fullPath, { withFileTypes: true }).map((e) => ({
|
|
17
|
+
name: e.name,
|
|
18
|
+
type: e.isDirectory() ? "directory" : "file",
|
|
19
|
+
path: path_1.default.join(args.path, e.name),
|
|
20
|
+
}));
|
|
21
|
+
const filtered = args.pattern ? entries.filter((f) => f.name.includes(args.pattern)) : entries;
|
|
22
|
+
return { success: true, data: { path: args.path, entries: filtered, count: filtered.length } };
|
|
23
|
+
}
|
|
24
|
+
catch (err) {
|
|
25
|
+
return { success: false, error: String(err) };
|
|
26
|
+
}
|
|
27
|
+
}
|
|
@@ -0,0 +1,35 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.validateTransformation = validateTransformation;
|
|
4
|
+
const child_process_1 = require("child_process");
|
|
5
|
+
const logger_1 = require("../utils/logger");
|
|
6
|
+
const pathResolver_1 = require("../utils/pathResolver");
|
|
7
|
+
async function validateTransformation(args) {
|
|
8
|
+
const pattern = `${args.customerId}_${args.erpType.toLowerCase()}`;
|
|
9
|
+
(0, logger_1.log)("info", `Running tests for pattern: ${pattern}`);
|
|
10
|
+
try {
|
|
11
|
+
const output = (0, child_process_1.execSync)(`yarn test --testPathPatterns="${pattern}" --passWithNoTests --forceExit`, {
|
|
12
|
+
encoding: "utf-8",
|
|
13
|
+
timeout: 90000,
|
|
14
|
+
cwd: pathResolver_1.REPO_ROOT,
|
|
15
|
+
});
|
|
16
|
+
const passed = parseInt((output.match(/(\d+) passed/) || [])[1] ?? "0", 10);
|
|
17
|
+
const failed = parseInt((output.match(/(\d+) failed/) || [])[1] ?? "0", 10);
|
|
18
|
+
return {
|
|
19
|
+
success: failed === 0,
|
|
20
|
+
data: { passed, failed, pattern, output: output.slice(-2000) },
|
|
21
|
+
};
|
|
22
|
+
}
|
|
23
|
+
catch (err) {
|
|
24
|
+
const stdout = err.stdout ?? "";
|
|
25
|
+
const stderr = err.stderr ?? "";
|
|
26
|
+
const combined = stdout + stderr;
|
|
27
|
+
const passed = parseInt((combined.match(/(\d+) passed/) || [])[1] ?? "0", 10);
|
|
28
|
+
const failed = parseInt((combined.match(/(\d+) failed/) || [])[1] ?? "1", 10);
|
|
29
|
+
return {
|
|
30
|
+
success: false,
|
|
31
|
+
data: { passed, failed, pattern, output: combined.slice(-2000) },
|
|
32
|
+
error: "Tests failed",
|
|
33
|
+
};
|
|
34
|
+
}
|
|
35
|
+
}
|
|
@@ -0,0 +1,99 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
3
|
+
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
4
|
+
};
|
|
5
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
6
|
+
exports.writeTransformation = writeTransformation;
|
|
7
|
+
exports.writeGoldenFiles = writeGoldenFiles;
|
|
8
|
+
// ============================================================
|
|
9
|
+
// writeTransformation - Writes generated code to disk.
|
|
10
|
+
// Content is ALWAYS provided by the code generator.
|
|
11
|
+
// This tool NEVER generates code itself.
|
|
12
|
+
// ============================================================
|
|
13
|
+
const fs_1 = __importDefault(require("fs"));
|
|
14
|
+
const path_1 = __importDefault(require("path"));
|
|
15
|
+
const logger_1 = require("../utils/logger");
|
|
16
|
+
const pathResolver_1 = require("../utils/pathResolver");
|
|
17
|
+
async function writeTransformation(args) {
|
|
18
|
+
if (args.dryRun) {
|
|
19
|
+
return {
|
|
20
|
+
success: true,
|
|
21
|
+
data: { dryRun: true, wouldWrite: args.type, contentLength: args.content?.length ?? 0 },
|
|
22
|
+
};
|
|
23
|
+
}
|
|
24
|
+
if (!args.content || args.content.trim().length < 50) {
|
|
25
|
+
return { success: false, error: "Content is empty or too short - code generator must provide full content" };
|
|
26
|
+
}
|
|
27
|
+
const { customerId: id, erpType: erp, type, content } = args;
|
|
28
|
+
try {
|
|
29
|
+
const pathMap = {
|
|
30
|
+
class: (0, pathResolver_1.getTransformerPath)(id, erp),
|
|
31
|
+
enum: (0, pathResolver_1.getEnumsPath)(id),
|
|
32
|
+
test: (0, pathResolver_1.getTestPath)(id, erp),
|
|
33
|
+
};
|
|
34
|
+
const filePath = pathMap[type];
|
|
35
|
+
if (!filePath) {
|
|
36
|
+
return { success: false, error: `Unknown file type: ${type}` };
|
|
37
|
+
}
|
|
38
|
+
(0, pathResolver_1.ensureDir)(filePath);
|
|
39
|
+
// Backup existing file if present
|
|
40
|
+
if (fs_1.default.existsSync(filePath)) {
|
|
41
|
+
const backup = `${filePath}.bak.${Date.now()}`;
|
|
42
|
+
fs_1.default.copyFileSync(filePath, backup);
|
|
43
|
+
(0, logger_1.log)("info", `Backed up existing file to ${path_1.default.basename(backup)}`);
|
|
44
|
+
}
|
|
45
|
+
fs_1.default.writeFileSync(filePath, content, "utf-8");
|
|
46
|
+
const lines = content.split("\n").length;
|
|
47
|
+
if (type === "class") {
|
|
48
|
+
copyRequiredDataDependencies(id, erp, content);
|
|
49
|
+
}
|
|
50
|
+
(0, logger_1.log)("info", `Written ${type} for ${id}/${erp}: ${filePath} (${lines} lines)`);
|
|
51
|
+
return {
|
|
52
|
+
success: true,
|
|
53
|
+
data: { path: filePath, type, lines, size: content.length },
|
|
54
|
+
};
|
|
55
|
+
}
|
|
56
|
+
catch (err) {
|
|
57
|
+
return { success: false, error: String(err) };
|
|
58
|
+
}
|
|
59
|
+
}
|
|
60
|
+
function copyRequiredDataDependencies(customerId, erpType, content) {
|
|
61
|
+
const erp = erpType.toLowerCase();
|
|
62
|
+
if (erp !== "sap") {
|
|
63
|
+
return;
|
|
64
|
+
}
|
|
65
|
+
if (!content.includes("./data/sapSubsidiaries")) {
|
|
66
|
+
return;
|
|
67
|
+
}
|
|
68
|
+
const refCustomer = (0, pathResolver_1.getReferenceCustomer)(erpType);
|
|
69
|
+
const sourceFile = path_1.default.join((0, pathResolver_1.getCustomerDir)(refCustomer), "data", "sapSubsidiaries.ts");
|
|
70
|
+
const targetFile = path_1.default.join((0, pathResolver_1.getCustomerDir)(customerId), "data", "sapSubsidiaries.ts");
|
|
71
|
+
if (!fs_1.default.existsSync(sourceFile)) {
|
|
72
|
+
(0, logger_1.log)("warn", `Missing reference dependency file: ${sourceFile}`);
|
|
73
|
+
return;
|
|
74
|
+
}
|
|
75
|
+
(0, pathResolver_1.ensureDir)(targetFile);
|
|
76
|
+
fs_1.default.copyFileSync(sourceFile, targetFile);
|
|
77
|
+
(0, logger_1.log)("info", `Copied data dependency for ${customerId}/${erpType}: ${targetFile}`);
|
|
78
|
+
}
|
|
79
|
+
async function writeGoldenFiles(args) {
|
|
80
|
+
const dir = (0, pathResolver_1.getGoldenDir)(args.customerId);
|
|
81
|
+
(0, pathResolver_1.ensureDir)(path_1.default.join(dir, ".keep"));
|
|
82
|
+
const written = [];
|
|
83
|
+
try {
|
|
84
|
+
if (args.profileContent) {
|
|
85
|
+
const p = path_1.default.join(dir, "profile.json");
|
|
86
|
+
fs_1.default.writeFileSync(p, args.profileContent, "utf-8");
|
|
87
|
+
written.push(p);
|
|
88
|
+
}
|
|
89
|
+
if (args.payloadContent) {
|
|
90
|
+
const p = path_1.default.join(dir, "payload.json");
|
|
91
|
+
fs_1.default.writeFileSync(p, args.payloadContent, "utf-8");
|
|
92
|
+
written.push(p);
|
|
93
|
+
}
|
|
94
|
+
return { success: true, data: { written } };
|
|
95
|
+
}
|
|
96
|
+
catch (err) {
|
|
97
|
+
return { success: false, error: String(err) };
|
|
98
|
+
}
|
|
99
|
+
}
|
package/dist/types.js
ADDED
|
@@ -0,0 +1,12 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.log = log;
|
|
4
|
+
function log(level, message, data) {
|
|
5
|
+
const entry = {
|
|
6
|
+
timestamp: new Date().toISOString(),
|
|
7
|
+
level,
|
|
8
|
+
message,
|
|
9
|
+
...(data !== undefined ? { data } : {}),
|
|
10
|
+
};
|
|
11
|
+
(level === "error" ? process.stderr : process.stdout).write(JSON.stringify(entry) + "\n");
|
|
12
|
+
}
|
|
@@ -0,0 +1,82 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
3
|
+
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
4
|
+
};
|
|
5
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
6
|
+
exports.ERP_REFERENCES = exports.REPO_ROOT = void 0;
|
|
7
|
+
exports.resolveSafePath = resolveSafePath;
|
|
8
|
+
exports.getCustomerDir = getCustomerDir;
|
|
9
|
+
exports.getTransformerPath = getTransformerPath;
|
|
10
|
+
exports.getEnumsPath = getEnumsPath;
|
|
11
|
+
exports.getTestPath = getTestPath;
|
|
12
|
+
exports.getGoldenDir = getGoldenDir;
|
|
13
|
+
exports.getApiDataDir = getApiDataDir;
|
|
14
|
+
exports.ensureDir = ensureDir;
|
|
15
|
+
exports.getReferenceCustomer = getReferenceCustomer;
|
|
16
|
+
const fs_1 = __importDefault(require("fs"));
|
|
17
|
+
const path_1 = __importDefault(require("path"));
|
|
18
|
+
const expectedRepoSubpaths = [path_1.default.join("src", "transformers"), path_1.default.join("tests", "golden"), "package.json"];
|
|
19
|
+
function isRepoRoot(candidate) {
|
|
20
|
+
return expectedRepoSubpaths.every((entry) => fs_1.default.existsSync(path_1.default.join(candidate, entry)));
|
|
21
|
+
}
|
|
22
|
+
function detectRepoRoot() {
|
|
23
|
+
const configuredRoot = process.env.GRAPHITE_REPO_ROOT;
|
|
24
|
+
if (configuredRoot) {
|
|
25
|
+
const resolvedConfiguredRoot = path_1.default.resolve(configuredRoot);
|
|
26
|
+
if (!isRepoRoot(resolvedConfiguredRoot)) {
|
|
27
|
+
throw new Error(`GRAPHITE_REPO_ROOT does not look like repository root: ${resolvedConfiguredRoot}`);
|
|
28
|
+
}
|
|
29
|
+
return resolvedConfiguredRoot;
|
|
30
|
+
}
|
|
31
|
+
const currentWorkingDirectory = process.cwd();
|
|
32
|
+
if (isRepoRoot(currentWorkingDirectory)) {
|
|
33
|
+
return currentWorkingDirectory;
|
|
34
|
+
}
|
|
35
|
+
const builtInRoot = path_1.default.resolve(__dirname, "../../..");
|
|
36
|
+
if (isRepoRoot(builtInRoot)) {
|
|
37
|
+
return builtInRoot;
|
|
38
|
+
}
|
|
39
|
+
throw new Error("Could not detect repository root. Run from the transformations repo root or set GRAPHITE_REPO_ROOT.");
|
|
40
|
+
}
|
|
41
|
+
exports.REPO_ROOT = detectRepoRoot();
|
|
42
|
+
function resolveSafePath(relativePath) {
|
|
43
|
+
const resolved = path_1.default.resolve(exports.REPO_ROOT, relativePath);
|
|
44
|
+
if (!resolved.startsWith(exports.REPO_ROOT)) {
|
|
45
|
+
throw new Error(`Path traversal detected: ${relativePath}`);
|
|
46
|
+
}
|
|
47
|
+
return resolved;
|
|
48
|
+
}
|
|
49
|
+
function getCustomerDir(customerId) {
|
|
50
|
+
return path_1.default.join(exports.REPO_ROOT, "src/transformers/customers", customerId);
|
|
51
|
+
}
|
|
52
|
+
function getTransformerPath(customerId, erpType) {
|
|
53
|
+
return path_1.default.join(getCustomerDir(customerId), `${customerId}_${erpType.toLowerCase()}.ts`);
|
|
54
|
+
}
|
|
55
|
+
function getEnumsPath(customerId) {
|
|
56
|
+
return path_1.default.join(getCustomerDir(customerId), "enums.ts");
|
|
57
|
+
}
|
|
58
|
+
function getTestPath(customerId, erpType) {
|
|
59
|
+
return path_1.default.join(exports.REPO_ROOT, "tests/transformers", `${customerId}_${erpType.toLowerCase()}.test.ts`);
|
|
60
|
+
}
|
|
61
|
+
function getGoldenDir(customerId) {
|
|
62
|
+
return path_1.default.join(exports.REPO_ROOT, "tests/golden", customerId);
|
|
63
|
+
}
|
|
64
|
+
function getApiDataDir(customerId) {
|
|
65
|
+
return path_1.default.join(exports.REPO_ROOT, "api-data", customerId);
|
|
66
|
+
}
|
|
67
|
+
function ensureDir(filePath) {
|
|
68
|
+
const dir = path_1.default.dirname(filePath);
|
|
69
|
+
if (!fs_1.default.existsSync(dir)) {
|
|
70
|
+
fs_1.default.mkdirSync(dir, { recursive: true });
|
|
71
|
+
}
|
|
72
|
+
}
|
|
73
|
+
// ERP -> reference customer mapping
|
|
74
|
+
exports.ERP_REFERENCES = {
|
|
75
|
+
SAP: "US1206",
|
|
76
|
+
ORACLE: "US17104",
|
|
77
|
+
S4: "US47815",
|
|
78
|
+
NETSUITE: "US18162",
|
|
79
|
+
};
|
|
80
|
+
function getReferenceCustomer(erpType) {
|
|
81
|
+
return exports.ERP_REFERENCES[erpType.toUpperCase()] ?? "US1206";
|
|
82
|
+
}
|
package/package.json
ADDED
|
@@ -0,0 +1,41 @@
|
|
|
1
|
+
{
|
|
2
|
+
"name": "@arrieta1/mcp-server",
|
|
3
|
+
"version": "2.0.0",
|
|
4
|
+
"description": "Project Graphite MCP server package",
|
|
5
|
+
"main": "dist/server.js",
|
|
6
|
+
"bin": {
|
|
7
|
+
"project-graphite-mcp": "bin/project-graphite-mcp.js"
|
|
8
|
+
},
|
|
9
|
+
"files": [
|
|
10
|
+
"dist",
|
|
11
|
+
"bin",
|
|
12
|
+
"README.md"
|
|
13
|
+
],
|
|
14
|
+
"publishConfig": {
|
|
15
|
+
"access": "public"
|
|
16
|
+
},
|
|
17
|
+
"engines": {
|
|
18
|
+
"node": ">=18"
|
|
19
|
+
},
|
|
20
|
+
"scripts": {
|
|
21
|
+
"build": "tsc -p tsconfig.json",
|
|
22
|
+
"start": "node dist/server.js",
|
|
23
|
+
"dev": "ts-node src/server.ts",
|
|
24
|
+
"prepublishOnly": "npm run build"
|
|
25
|
+
},
|
|
26
|
+
"keywords": [
|
|
27
|
+
"mcp",
|
|
28
|
+
"json-rpc",
|
|
29
|
+
"graphite"
|
|
30
|
+
],
|
|
31
|
+
"license": "UNLICENSED",
|
|
32
|
+
"dependencies": {
|
|
33
|
+
"express": "^4.21.2"
|
|
34
|
+
},
|
|
35
|
+
"devDependencies": {
|
|
36
|
+
"@types/express": "^4.17.21",
|
|
37
|
+
"@types/node": "^22.15.30",
|
|
38
|
+
"ts-node": "^10.9.2",
|
|
39
|
+
"typescript": "^5.8.3"
|
|
40
|
+
}
|
|
41
|
+
}
|