@mcp-consultant-tools/azure-data-factory 27.0.0-beta.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/build/AdfService.d.ts +156 -0
- package/build/AdfService.d.ts.map +1 -0
- package/build/AdfService.js +482 -0
- package/build/AdfService.js.map +1 -0
- package/build/index.d.ts +9 -0
- package/build/index.d.ts.map +1 -0
- package/build/index.js +911 -0
- package/build/index.js.map +1 -0
- package/build/types.d.ts +335 -0
- package/build/types.d.ts.map +1 -0
- package/build/types.js +5 -0
- package/build/types.js.map +1 -0
- package/build/utils/formatters.d.ts +41 -0
- package/build/utils/formatters.d.ts.map +1 -0
- package/build/utils/formatters.js +269 -0
- package/build/utils/formatters.js.map +1 -0
- package/package.json +57 -0
package/build/index.js
ADDED
|
@@ -0,0 +1,911 @@
|
|
|
1
|
+
#!/usr/bin/env node
|
|
2
|
+
/**
|
|
3
|
+
* Azure Data Factory MCP Server
|
|
4
|
+
*
|
|
5
|
+
* Provides tools for pipeline execution, monitoring, and error debugging.
|
|
6
|
+
*/
|
|
7
|
+
import { StdioServerTransport } from '@modelcontextprotocol/sdk/server/stdio.js';
|
|
8
|
+
import { pathToFileURL } from 'node:url';
|
|
9
|
+
import { realpathSync } from 'node:fs';
|
|
10
|
+
import { createMcpServer, createEnvLoader } from '@mcp-consultant-tools/core';
|
|
11
|
+
import { z } from 'zod';
|
|
12
|
+
import { AdfService } from './AdfService.js';
|
|
13
|
+
import { formatPipelineRunSummary, formatActivityRuns, formatPipelineList, formatTriggerList, formatIntegrationRuntimeStatus, formatPipelineRunsJson, formatActivityRunsJson, } from './utils/formatters.js';
|
|
14
|
+
export function registerAzureDataFactoryTools(server, adfService) {
|
|
15
|
+
let service = adfService || null;
|
|
16
|
+
function getService() {
|
|
17
|
+
if (!service) {
|
|
18
|
+
const missingConfig = [];
|
|
19
|
+
let factories = [];
|
|
20
|
+
// Parse factory configuration
|
|
21
|
+
if (process.env.AZURE_DATA_FACTORIES) {
|
|
22
|
+
try {
|
|
23
|
+
factories = JSON.parse(process.env.AZURE_DATA_FACTORIES);
|
|
24
|
+
}
|
|
25
|
+
catch (error) {
|
|
26
|
+
throw new Error('Failed to parse AZURE_DATA_FACTORIES JSON');
|
|
27
|
+
}
|
|
28
|
+
}
|
|
29
|
+
else if (process.env.AZURE_DATA_FACTORY_SUBSCRIPTION_ID) {
|
|
30
|
+
// Single factory configuration
|
|
31
|
+
factories = [
|
|
32
|
+
{
|
|
33
|
+
id: 'default',
|
|
34
|
+
name: 'Default Data Factory',
|
|
35
|
+
subscriptionId: process.env.AZURE_DATA_FACTORY_SUBSCRIPTION_ID,
|
|
36
|
+
resourceGroup: process.env.AZURE_DATA_FACTORY_RESOURCE_GROUP || '',
|
|
37
|
+
factoryName: process.env.AZURE_DATA_FACTORY_NAME || '',
|
|
38
|
+
active: true,
|
|
39
|
+
},
|
|
40
|
+
];
|
|
41
|
+
if (!factories[0].resourceGroup) {
|
|
42
|
+
missingConfig.push('AZURE_DATA_FACTORY_RESOURCE_GROUP');
|
|
43
|
+
}
|
|
44
|
+
if (!factories[0].factoryName) {
|
|
45
|
+
missingConfig.push('AZURE_DATA_FACTORY_NAME');
|
|
46
|
+
}
|
|
47
|
+
}
|
|
48
|
+
else {
|
|
49
|
+
missingConfig.push('AZURE_DATA_FACTORIES or AZURE_DATA_FACTORY_SUBSCRIPTION_ID');
|
|
50
|
+
}
|
|
51
|
+
// Check Azure AD credentials
|
|
52
|
+
if (!process.env.AZURE_TENANT_ID) {
|
|
53
|
+
missingConfig.push('AZURE_TENANT_ID');
|
|
54
|
+
}
|
|
55
|
+
if (!process.env.AZURE_CLIENT_ID) {
|
|
56
|
+
missingConfig.push('AZURE_CLIENT_ID');
|
|
57
|
+
}
|
|
58
|
+
if (!process.env.AZURE_CLIENT_SECRET) {
|
|
59
|
+
missingConfig.push('AZURE_CLIENT_SECRET');
|
|
60
|
+
}
|
|
61
|
+
if (missingConfig.length > 0) {
|
|
62
|
+
throw new Error(`Missing Azure Data Factory configuration: ${missingConfig.join(', ')}`);
|
|
63
|
+
}
|
|
64
|
+
const config = {
|
|
65
|
+
factories,
|
|
66
|
+
tenantId: process.env.AZURE_TENANT_ID,
|
|
67
|
+
clientId: process.env.AZURE_CLIENT_ID,
|
|
68
|
+
clientSecret: process.env.AZURE_CLIENT_SECRET,
|
|
69
|
+
enableWrite: process.env.AZURE_DATA_FACTORY_ENABLE_WRITE?.toLowerCase() === 'true',
|
|
70
|
+
enableTriggerControl: process.env.AZURE_DATA_FACTORY_ENABLE_TRIGGER_CONTROL?.toLowerCase() ===
|
|
71
|
+
'true',
|
|
72
|
+
};
|
|
73
|
+
service = new AdfService(config);
|
|
74
|
+
console.error('Azure Data Factory service initialized');
|
|
75
|
+
}
|
|
76
|
+
return service;
|
|
77
|
+
}
|
|
78
|
+
// ========================================
|
|
79
|
+
// FACTORY TOOLS
|
|
80
|
+
// ========================================
|
|
81
|
+
server.tool('adf-list-factories', 'List all configured Azure Data Factory instances', {}, async () => {
|
|
82
|
+
try {
|
|
83
|
+
const svc = getService();
|
|
84
|
+
const factories = svc.getAllFactories();
|
|
85
|
+
return {
|
|
86
|
+
content: [
|
|
87
|
+
{
|
|
88
|
+
type: 'text',
|
|
89
|
+
text: JSON.stringify({
|
|
90
|
+
factories: factories.map((f) => ({
|
|
91
|
+
id: f.id,
|
|
92
|
+
name: f.name,
|
|
93
|
+
factoryName: f.factoryName,
|
|
94
|
+
resourceGroup: f.resourceGroup,
|
|
95
|
+
active: f.active,
|
|
96
|
+
})),
|
|
97
|
+
writeEnabled: svc.isWriteEnabled(),
|
|
98
|
+
triggerControlEnabled: svc.isTriggerControlEnabled(),
|
|
99
|
+
}, null, 2),
|
|
100
|
+
},
|
|
101
|
+
],
|
|
102
|
+
};
|
|
103
|
+
}
|
|
104
|
+
catch (error) {
|
|
105
|
+
console.error('Error listing factories:', error);
|
|
106
|
+
return {
|
|
107
|
+
content: [
|
|
108
|
+
{ type: 'text', text: `Failed to list factories: ${error.message}` },
|
|
109
|
+
],
|
|
110
|
+
isError: true,
|
|
111
|
+
};
|
|
112
|
+
}
|
|
113
|
+
});
|
|
114
|
+
// ========================================
|
|
115
|
+
// PIPELINE TOOLS
|
|
116
|
+
// ========================================
|
|
117
|
+
server.tool('adf-list-pipelines', 'List all pipelines in an Azure Data Factory', {
|
|
118
|
+
factoryId: z
|
|
119
|
+
.string()
|
|
120
|
+
.optional()
|
|
121
|
+
.describe('Factory ID (use adf-list-factories to find IDs)'),
|
|
122
|
+
}, async ({ factoryId }) => {
|
|
123
|
+
try {
|
|
124
|
+
const svc = getService();
|
|
125
|
+
const pipelines = await svc.listPipelines(factoryId);
|
|
126
|
+
const factory = svc.resolveFactory(factoryId);
|
|
127
|
+
return {
|
|
128
|
+
content: [
|
|
129
|
+
{
|
|
130
|
+
type: 'text',
|
|
131
|
+
text: `## Pipelines in ${factory.name}\n\n` +
|
|
132
|
+
formatPipelineList(pipelines) +
|
|
133
|
+
`\n\n**Total: ${pipelines.length} pipelines**`,
|
|
134
|
+
},
|
|
135
|
+
],
|
|
136
|
+
};
|
|
137
|
+
}
|
|
138
|
+
catch (error) {
|
|
139
|
+
console.error('Error listing pipelines:', error);
|
|
140
|
+
return {
|
|
141
|
+
content: [
|
|
142
|
+
{ type: 'text', text: `Failed to list pipelines: ${error.message}` },
|
|
143
|
+
],
|
|
144
|
+
isError: true,
|
|
145
|
+
};
|
|
146
|
+
}
|
|
147
|
+
});
|
|
148
|
+
server.tool('adf-get-pipeline', 'Get details of a specific pipeline including activities and parameters', {
|
|
149
|
+
pipelineName: z.string().describe('Name of the pipeline'),
|
|
150
|
+
factoryId: z.string().optional().describe('Factory ID'),
|
|
151
|
+
}, async ({ pipelineName, factoryId, }) => {
|
|
152
|
+
try {
|
|
153
|
+
const svc = getService();
|
|
154
|
+
const pipeline = await svc.getPipeline(pipelineName, factoryId);
|
|
155
|
+
return {
|
|
156
|
+
content: [{ type: 'text', text: JSON.stringify(pipeline, null, 2) }],
|
|
157
|
+
};
|
|
158
|
+
}
|
|
159
|
+
catch (error) {
|
|
160
|
+
console.error('Error getting pipeline:', error);
|
|
161
|
+
return {
|
|
162
|
+
content: [
|
|
163
|
+
{ type: 'text', text: `Failed to get pipeline: ${error.message}` },
|
|
164
|
+
],
|
|
165
|
+
isError: true,
|
|
166
|
+
};
|
|
167
|
+
}
|
|
168
|
+
});
|
|
169
|
+
server.tool('adf-run-pipeline', 'Trigger an Azure Data Factory pipeline run. Requires AZURE_DATA_FACTORY_ENABLE_WRITE=true.', {
|
|
170
|
+
pipelineName: z.string().describe('Name of the pipeline to run'),
|
|
171
|
+
parameters: z
|
|
172
|
+
.record(z.any())
|
|
173
|
+
.optional()
|
|
174
|
+
.describe('Pipeline parameters as key-value pairs'),
|
|
175
|
+
factoryId: z.string().optional().describe('Factory ID'),
|
|
176
|
+
}, async ({ pipelineName, parameters, factoryId, }) => {
|
|
177
|
+
try {
|
|
178
|
+
const svc = getService();
|
|
179
|
+
const result = await svc.runPipeline(pipelineName, parameters, factoryId);
|
|
180
|
+
const factory = svc.resolveFactory(factoryId);
|
|
181
|
+
return {
|
|
182
|
+
content: [
|
|
183
|
+
{
|
|
184
|
+
type: 'text',
|
|
185
|
+
text: JSON.stringify({
|
|
186
|
+
runId: result.runId,
|
|
187
|
+
message: `Pipeline '${pipelineName}' triggered successfully`,
|
|
188
|
+
factory: factory.name,
|
|
189
|
+
monitorUrl: `https://adf.azure.com/en/monitoring/pipelineRuns/${result.runId}`,
|
|
190
|
+
nextSteps: [
|
|
191
|
+
`Use adf-get-pipeline-run with runId '${result.runId}' to check status`,
|
|
192
|
+
"Use adf-get-activity-runs to see activity-level details if the run fails",
|
|
193
|
+
],
|
|
194
|
+
}, null, 2),
|
|
195
|
+
},
|
|
196
|
+
],
|
|
197
|
+
};
|
|
198
|
+
}
|
|
199
|
+
catch (error) {
|
|
200
|
+
console.error('Error running pipeline:', error);
|
|
201
|
+
return {
|
|
202
|
+
content: [
|
|
203
|
+
{ type: 'text', text: `Failed to run pipeline: ${error.message}` },
|
|
204
|
+
],
|
|
205
|
+
isError: true,
|
|
206
|
+
};
|
|
207
|
+
}
|
|
208
|
+
});
|
|
209
|
+
server.tool('adf-get-pipeline-run', 'Get the status and details of a pipeline run including error messages if failed', {
|
|
210
|
+
runId: z.string().describe('The pipeline run ID'),
|
|
211
|
+
factoryId: z.string().optional().describe('Factory ID'),
|
|
212
|
+
}, async ({ runId, factoryId }) => {
|
|
213
|
+
try {
|
|
214
|
+
const svc = getService();
|
|
215
|
+
const run = await svc.getPipelineRun(runId, factoryId);
|
|
216
|
+
return {
|
|
217
|
+
content: [{ type: 'text', text: formatPipelineRunSummary(run) }],
|
|
218
|
+
};
|
|
219
|
+
}
|
|
220
|
+
catch (error) {
|
|
221
|
+
console.error('Error getting pipeline run:', error);
|
|
222
|
+
return {
|
|
223
|
+
content: [
|
|
224
|
+
{ type: 'text', text: `Failed to get pipeline run: ${error.message}` },
|
|
225
|
+
],
|
|
226
|
+
isError: true,
|
|
227
|
+
};
|
|
228
|
+
}
|
|
229
|
+
});
|
|
230
|
+
server.tool('adf-get-activity-runs', 'Get activity-level details for a pipeline run. Essential for debugging - shows which activity failed and why.', {
|
|
231
|
+
runId: z.string().describe('The pipeline run ID'),
|
|
232
|
+
status: z
|
|
233
|
+
.enum(['Succeeded', 'Failed', 'InProgress', 'Cancelled', 'Queued'])
|
|
234
|
+
.optional()
|
|
235
|
+
.describe('Filter by activity status'),
|
|
236
|
+
activityName: z
|
|
237
|
+
.string()
|
|
238
|
+
.optional()
|
|
239
|
+
.describe('Filter by specific activity name'),
|
|
240
|
+
factoryId: z.string().optional().describe('Factory ID'),
|
|
241
|
+
}, async ({ runId, status, activityName, factoryId, }) => {
|
|
242
|
+
try {
|
|
243
|
+
const svc = getService();
|
|
244
|
+
const activities = await svc.getActivityRuns(runId, factoryId, {
|
|
245
|
+
status,
|
|
246
|
+
activityName,
|
|
247
|
+
});
|
|
248
|
+
// Try to get pipeline run info for context
|
|
249
|
+
let pipelineRun;
|
|
250
|
+
try {
|
|
251
|
+
pipelineRun = await svc.getPipelineRun(runId, factoryId);
|
|
252
|
+
}
|
|
253
|
+
catch {
|
|
254
|
+
// Ignore - just for context
|
|
255
|
+
}
|
|
256
|
+
const formatted = formatActivityRuns(activities, pipelineRun);
|
|
257
|
+
const json = formatActivityRunsJson(activities);
|
|
258
|
+
return {
|
|
259
|
+
content: [
|
|
260
|
+
{ type: 'text', text: formatted },
|
|
261
|
+
{
|
|
262
|
+
type: 'text',
|
|
263
|
+
text: '\n\n---\n\n**JSON Response:**\n\n```json\n' + JSON.stringify(json, null, 2) + '\n```',
|
|
264
|
+
},
|
|
265
|
+
],
|
|
266
|
+
};
|
|
267
|
+
}
|
|
268
|
+
catch (error) {
|
|
269
|
+
console.error('Error getting activity runs:', error);
|
|
270
|
+
return {
|
|
271
|
+
content: [
|
|
272
|
+
{
|
|
273
|
+
type: 'text',
|
|
274
|
+
text: `Failed to get activity runs: ${error.message}`,
|
|
275
|
+
},
|
|
276
|
+
],
|
|
277
|
+
isError: true,
|
|
278
|
+
};
|
|
279
|
+
}
|
|
280
|
+
});
|
|
281
|
+
server.tool('adf-cancel-pipeline-run', 'Cancel a running pipeline. Requires AZURE_DATA_FACTORY_ENABLE_WRITE=true.', {
|
|
282
|
+
runId: z.string().describe('The pipeline run ID to cancel'),
|
|
283
|
+
factoryId: z.string().optional().describe('Factory ID'),
|
|
284
|
+
}, async ({ runId, factoryId }) => {
|
|
285
|
+
try {
|
|
286
|
+
const svc = getService();
|
|
287
|
+
await svc.cancelPipelineRun(runId, factoryId);
|
|
288
|
+
return {
|
|
289
|
+
content: [
|
|
290
|
+
{
|
|
291
|
+
type: 'text',
|
|
292
|
+
text: JSON.stringify({
|
|
293
|
+
message: `Pipeline run '${runId}' cancellation initiated`,
|
|
294
|
+
note: 'The pipeline may take a moment to fully cancel',
|
|
295
|
+
}, null, 2),
|
|
296
|
+
},
|
|
297
|
+
],
|
|
298
|
+
};
|
|
299
|
+
}
|
|
300
|
+
catch (error) {
|
|
301
|
+
console.error('Error cancelling pipeline run:', error);
|
|
302
|
+
return {
|
|
303
|
+
content: [
|
|
304
|
+
{
|
|
305
|
+
type: 'text',
|
|
306
|
+
text: `Failed to cancel pipeline run: ${error.message}`,
|
|
307
|
+
},
|
|
308
|
+
],
|
|
309
|
+
isError: true,
|
|
310
|
+
};
|
|
311
|
+
}
|
|
312
|
+
});
|
|
313
|
+
server.tool('adf-query-pipeline-runs', 'Query pipeline runs with filters (date range, status, pipeline name)', {
|
|
314
|
+
lastDays: z
|
|
315
|
+
.number()
|
|
316
|
+
.optional()
|
|
317
|
+
.default(7)
|
|
318
|
+
.describe('Number of days to look back (default: 7)'),
|
|
319
|
+
pipelineName: z.string().optional().describe('Filter by pipeline name'),
|
|
320
|
+
status: z
|
|
321
|
+
.enum(['Queued', 'InProgress', 'Succeeded', 'Failed', 'Canceling', 'Cancelled'])
|
|
322
|
+
.optional()
|
|
323
|
+
.describe('Filter by run status'),
|
|
324
|
+
factoryId: z.string().optional().describe('Factory ID'),
|
|
325
|
+
}, async ({ lastDays, pipelineName, status, factoryId, }) => {
|
|
326
|
+
try {
|
|
327
|
+
const svc = getService();
|
|
328
|
+
const now = new Date();
|
|
329
|
+
const days = lastDays || 7;
|
|
330
|
+
const request = {
|
|
331
|
+
lastUpdatedAfter: new Date(now.getTime() - days * 24 * 60 * 60 * 1000).toISOString(),
|
|
332
|
+
lastUpdatedBefore: new Date(now.getTime() + 24 * 60 * 60 * 1000).toISOString(),
|
|
333
|
+
filters: [],
|
|
334
|
+
orderBy: [{ orderBy: 'RunStart', order: 'DESC' }],
|
|
335
|
+
};
|
|
336
|
+
if (pipelineName) {
|
|
337
|
+
request.filters.push({
|
|
338
|
+
operand: 'PipelineName',
|
|
339
|
+
operator: 'Equals',
|
|
340
|
+
values: [pipelineName],
|
|
341
|
+
});
|
|
342
|
+
}
|
|
343
|
+
if (status) {
|
|
344
|
+
request.filters.push({
|
|
345
|
+
operand: 'Status',
|
|
346
|
+
operator: 'Equals',
|
|
347
|
+
values: [status],
|
|
348
|
+
});
|
|
349
|
+
}
|
|
350
|
+
const result = await svc.queryPipelineRuns(request, factoryId);
|
|
351
|
+
const formatted = formatPipelineRunsJson(result.value);
|
|
352
|
+
return {
|
|
353
|
+
content: [{ type: 'text', text: JSON.stringify(formatted, null, 2) }],
|
|
354
|
+
};
|
|
355
|
+
}
|
|
356
|
+
catch (error) {
|
|
357
|
+
console.error('Error querying pipeline runs:', error);
|
|
358
|
+
return {
|
|
359
|
+
content: [
|
|
360
|
+
{
|
|
361
|
+
type: 'text',
|
|
362
|
+
text: `Failed to query pipeline runs: ${error.message}`,
|
|
363
|
+
},
|
|
364
|
+
],
|
|
365
|
+
isError: true,
|
|
366
|
+
};
|
|
367
|
+
}
|
|
368
|
+
});
|
|
369
|
+
server.tool('adf-rerun-pipeline', 'Rerun a failed pipeline from the point of failure (recovery mode). Requires AZURE_DATA_FACTORY_ENABLE_WRITE=true.', {
|
|
370
|
+
failedRunId: z.string().describe('The run ID of the failed pipeline'),
|
|
371
|
+
startFromFailure: z
|
|
372
|
+
.boolean()
|
|
373
|
+
.optional()
|
|
374
|
+
.default(true)
|
|
375
|
+
.describe('Start from failed activities (default: true)'),
|
|
376
|
+
startActivityName: z
|
|
377
|
+
.string()
|
|
378
|
+
.optional()
|
|
379
|
+
.describe('Optionally specify exact activity to start from'),
|
|
380
|
+
factoryId: z.string().optional().describe('Factory ID'),
|
|
381
|
+
}, async ({ failedRunId, startFromFailure, startActivityName, factoryId, }) => {
|
|
382
|
+
try {
|
|
383
|
+
const svc = getService();
|
|
384
|
+
// Get the original run to find the pipeline name
|
|
385
|
+
const originalRun = await svc.getPipelineRun(failedRunId, factoryId);
|
|
386
|
+
const result = await svc.runPipeline(originalRun.pipelineName, originalRun.parameters, factoryId, {
|
|
387
|
+
referencePipelineRunId: failedRunId,
|
|
388
|
+
isRecovery: true,
|
|
389
|
+
startFromFailure: startFromFailure ?? true,
|
|
390
|
+
startActivityName,
|
|
391
|
+
});
|
|
392
|
+
return {
|
|
393
|
+
content: [
|
|
394
|
+
{
|
|
395
|
+
type: 'text',
|
|
396
|
+
text: JSON.stringify({
|
|
397
|
+
runId: result.runId,
|
|
398
|
+
message: `Pipeline '${originalRun.pipelineName}' rerun initiated in recovery mode`,
|
|
399
|
+
originalRunId: failedRunId,
|
|
400
|
+
startFromFailure: startFromFailure ?? true,
|
|
401
|
+
startActivityName: startActivityName || 'Auto-detect failed activities',
|
|
402
|
+
}, null, 2),
|
|
403
|
+
},
|
|
404
|
+
],
|
|
405
|
+
};
|
|
406
|
+
}
|
|
407
|
+
catch (error) {
|
|
408
|
+
console.error('Error rerunning pipeline:', error);
|
|
409
|
+
return {
|
|
410
|
+
content: [
|
|
411
|
+
{ type: 'text', text: `Failed to rerun pipeline: ${error.message}` },
|
|
412
|
+
],
|
|
413
|
+
isError: true,
|
|
414
|
+
};
|
|
415
|
+
}
|
|
416
|
+
});
|
|
417
|
+
// ========================================
|
|
418
|
+
// DATASET TOOLS
|
|
419
|
+
// ========================================
|
|
420
|
+
server.tool('adf-list-datasets', 'List all datasets in an Azure Data Factory', {
|
|
421
|
+
factoryId: z.string().optional().describe('Factory ID'),
|
|
422
|
+
}, async ({ factoryId }) => {
|
|
423
|
+
try {
|
|
424
|
+
const svc = getService();
|
|
425
|
+
const datasets = await svc.listDatasets(factoryId);
|
|
426
|
+
const factory = svc.resolveFactory(factoryId);
|
|
427
|
+
const summary = datasets.map((d) => ({
|
|
428
|
+
name: d.name,
|
|
429
|
+
type: d.properties.type,
|
|
430
|
+
linkedService: d.properties.linkedServiceName?.referenceName,
|
|
431
|
+
folder: d.properties.folder?.name,
|
|
432
|
+
}));
|
|
433
|
+
return {
|
|
434
|
+
content: [
|
|
435
|
+
{
|
|
436
|
+
type: 'text',
|
|
437
|
+
text: JSON.stringify({
|
|
438
|
+
factory: factory.name,
|
|
439
|
+
count: datasets.length,
|
|
440
|
+
datasets: summary,
|
|
441
|
+
}, null, 2),
|
|
442
|
+
},
|
|
443
|
+
],
|
|
444
|
+
};
|
|
445
|
+
}
|
|
446
|
+
catch (error) {
|
|
447
|
+
console.error('Error listing datasets:', error);
|
|
448
|
+
return {
|
|
449
|
+
content: [
|
|
450
|
+
{ type: 'text', text: `Failed to list datasets: ${error.message}` },
|
|
451
|
+
],
|
|
452
|
+
isError: true,
|
|
453
|
+
};
|
|
454
|
+
}
|
|
455
|
+
});
|
|
456
|
+
server.tool('adf-get-dataset', 'Get details of a specific dataset including schema', {
|
|
457
|
+
datasetName: z.string().describe('Name of the dataset'),
|
|
458
|
+
factoryId: z.string().optional().describe('Factory ID'),
|
|
459
|
+
}, async ({ datasetName, factoryId, }) => {
|
|
460
|
+
try {
|
|
461
|
+
const svc = getService();
|
|
462
|
+
const dataset = await svc.getDataset(datasetName, factoryId);
|
|
463
|
+
return {
|
|
464
|
+
content: [{ type: 'text', text: JSON.stringify(dataset, null, 2) }],
|
|
465
|
+
};
|
|
466
|
+
}
|
|
467
|
+
catch (error) {
|
|
468
|
+
console.error('Error getting dataset:', error);
|
|
469
|
+
return {
|
|
470
|
+
content: [
|
|
471
|
+
{ type: 'text', text: `Failed to get dataset: ${error.message}` },
|
|
472
|
+
],
|
|
473
|
+
isError: true,
|
|
474
|
+
};
|
|
475
|
+
}
|
|
476
|
+
});
|
|
477
|
+
// ========================================
|
|
478
|
+
// LINKED SERVICE TOOLS
|
|
479
|
+
// ========================================
|
|
480
|
+
server.tool('adf-list-linked-services', 'List all linked services in an Azure Data Factory (credentials sanitized for security)', {
|
|
481
|
+
factoryId: z.string().optional().describe('Factory ID'),
|
|
482
|
+
}, async ({ factoryId }) => {
|
|
483
|
+
try {
|
|
484
|
+
const svc = getService();
|
|
485
|
+
const linkedServices = await svc.listLinkedServices(factoryId);
|
|
486
|
+
const factory = svc.resolveFactory(factoryId);
|
|
487
|
+
const summary = linkedServices.map((ls) => ({
|
|
488
|
+
name: ls.name,
|
|
489
|
+
type: ls.properties.type,
|
|
490
|
+
description: ls.properties.description,
|
|
491
|
+
connectVia: ls.properties.connectVia?.referenceName,
|
|
492
|
+
}));
|
|
493
|
+
return {
|
|
494
|
+
content: [
|
|
495
|
+
{
|
|
496
|
+
type: 'text',
|
|
497
|
+
text: JSON.stringify({
|
|
498
|
+
factory: factory.name,
|
|
499
|
+
count: linkedServices.length,
|
|
500
|
+
linkedServices: summary,
|
|
501
|
+
note: 'Connection strings and credentials are redacted for security',
|
|
502
|
+
}, null, 2),
|
|
503
|
+
},
|
|
504
|
+
],
|
|
505
|
+
};
|
|
506
|
+
}
|
|
507
|
+
catch (error) {
|
|
508
|
+
console.error('Error listing linked services:', error);
|
|
509
|
+
return {
|
|
510
|
+
content: [
|
|
511
|
+
{
|
|
512
|
+
type: 'text',
|
|
513
|
+
text: `Failed to list linked services: ${error.message}`,
|
|
514
|
+
},
|
|
515
|
+
],
|
|
516
|
+
isError: true,
|
|
517
|
+
};
|
|
518
|
+
}
|
|
519
|
+
});
|
|
520
|
+
// ========================================
|
|
521
|
+
// DATA FLOW TOOLS
|
|
522
|
+
// ========================================
|
|
523
|
+
server.tool('adf-list-data-flows', 'List all data flows in an Azure Data Factory', {
|
|
524
|
+
factoryId: z.string().optional().describe('Factory ID'),
|
|
525
|
+
}, async ({ factoryId }) => {
|
|
526
|
+
try {
|
|
527
|
+
const svc = getService();
|
|
528
|
+
const dataFlows = await svc.listDataFlows(factoryId);
|
|
529
|
+
const factory = svc.resolveFactory(factoryId);
|
|
530
|
+
const summary = dataFlows.map((df) => ({
|
|
531
|
+
name: df.name,
|
|
532
|
+
type: df.properties.type,
|
|
533
|
+
description: df.properties.description,
|
|
534
|
+
folder: df.properties.folder?.name,
|
|
535
|
+
}));
|
|
536
|
+
return {
|
|
537
|
+
content: [
|
|
538
|
+
{
|
|
539
|
+
type: 'text',
|
|
540
|
+
text: JSON.stringify({
|
|
541
|
+
factory: factory.name,
|
|
542
|
+
count: dataFlows.length,
|
|
543
|
+
dataFlows: summary,
|
|
544
|
+
}, null, 2),
|
|
545
|
+
},
|
|
546
|
+
],
|
|
547
|
+
};
|
|
548
|
+
}
|
|
549
|
+
catch (error) {
|
|
550
|
+
console.error('Error listing data flows:', error);
|
|
551
|
+
return {
|
|
552
|
+
content: [
|
|
553
|
+
{ type: 'text', text: `Failed to list data flows: ${error.message}` },
|
|
554
|
+
],
|
|
555
|
+
isError: true,
|
|
556
|
+
};
|
|
557
|
+
}
|
|
558
|
+
});
|
|
559
|
+
server.tool('adf-get-data-flow', 'Get details of a specific data flow', {
|
|
560
|
+
dataFlowName: z.string().describe('Name of the data flow'),
|
|
561
|
+
factoryId: z.string().optional().describe('Factory ID'),
|
|
562
|
+
}, async ({ dataFlowName, factoryId, }) => {
|
|
563
|
+
try {
|
|
564
|
+
const svc = getService();
|
|
565
|
+
const dataFlow = await svc.getDataFlow(dataFlowName, factoryId);
|
|
566
|
+
return {
|
|
567
|
+
content: [{ type: 'text', text: JSON.stringify(dataFlow, null, 2) }],
|
|
568
|
+
};
|
|
569
|
+
}
|
|
570
|
+
catch (error) {
|
|
571
|
+
console.error('Error getting data flow:', error);
|
|
572
|
+
return {
|
|
573
|
+
content: [
|
|
574
|
+
{ type: 'text', text: `Failed to get data flow: ${error.message}` },
|
|
575
|
+
],
|
|
576
|
+
isError: true,
|
|
577
|
+
};
|
|
578
|
+
}
|
|
579
|
+
});
|
|
580
|
+
// ========================================
|
|
581
|
+
// TRIGGER TOOLS
|
|
582
|
+
// ========================================
|
|
583
|
+
server.tool('adf-list-triggers', 'List all triggers in an Azure Data Factory with their current state', {
|
|
584
|
+
factoryId: z.string().optional().describe('Factory ID'),
|
|
585
|
+
}, async ({ factoryId }) => {
|
|
586
|
+
try {
|
|
587
|
+
const svc = getService();
|
|
588
|
+
const triggers = await svc.listTriggers(factoryId);
|
|
589
|
+
const factory = svc.resolveFactory(factoryId);
|
|
590
|
+
return {
|
|
591
|
+
content: [
|
|
592
|
+
{
|
|
593
|
+
type: 'text',
|
|
594
|
+
text: `## Triggers in ${factory.name}\n\n` +
|
|
595
|
+
formatTriggerList(triggers) +
|
|
596
|
+
`\n\n**Total: ${triggers.length} triggers**`,
|
|
597
|
+
},
|
|
598
|
+
],
|
|
599
|
+
};
|
|
600
|
+
}
|
|
601
|
+
catch (error) {
|
|
602
|
+
console.error('Error listing triggers:', error);
|
|
603
|
+
return {
|
|
604
|
+
content: [
|
|
605
|
+
{ type: 'text', text: `Failed to list triggers: ${error.message}` },
|
|
606
|
+
],
|
|
607
|
+
isError: true,
|
|
608
|
+
};
|
|
609
|
+
}
|
|
610
|
+
});
|
|
611
|
+
server.tool('adf-get-trigger', 'Get details of a specific trigger including its configuration', {
|
|
612
|
+
triggerName: z.string().describe('Name of the trigger'),
|
|
613
|
+
factoryId: z.string().optional().describe('Factory ID'),
|
|
614
|
+
}, async ({ triggerName, factoryId, }) => {
|
|
615
|
+
try {
|
|
616
|
+
const svc = getService();
|
|
617
|
+
const trigger = await svc.getTrigger(triggerName, factoryId);
|
|
618
|
+
return {
|
|
619
|
+
content: [{ type: 'text', text: JSON.stringify(trigger, null, 2) }],
|
|
620
|
+
};
|
|
621
|
+
}
|
|
622
|
+
catch (error) {
|
|
623
|
+
console.error('Error getting trigger:', error);
|
|
624
|
+
return {
|
|
625
|
+
content: [
|
|
626
|
+
{ type: 'text', text: `Failed to get trigger: ${error.message}` },
|
|
627
|
+
],
|
|
628
|
+
isError: true,
|
|
629
|
+
};
|
|
630
|
+
}
|
|
631
|
+
});
|
|
632
|
+
server.tool('adf-start-trigger', 'Start (activate) a trigger. Requires AZURE_DATA_FACTORY_ENABLE_TRIGGER_CONTROL=true.', {
|
|
633
|
+
triggerName: z.string().describe('Name of the trigger to start'),
|
|
634
|
+
factoryId: z.string().optional().describe('Factory ID'),
|
|
635
|
+
}, async ({ triggerName, factoryId, }) => {
|
|
636
|
+
try {
|
|
637
|
+
const svc = getService();
|
|
638
|
+
await svc.startTrigger(triggerName, factoryId);
|
|
639
|
+
return {
|
|
640
|
+
content: [
|
|
641
|
+
{
|
|
642
|
+
type: 'text',
|
|
643
|
+
text: JSON.stringify({
|
|
644
|
+
message: `Trigger '${triggerName}' start initiated`,
|
|
645
|
+
note: 'The trigger may take a moment to fully start',
|
|
646
|
+
}, null, 2),
|
|
647
|
+
},
|
|
648
|
+
],
|
|
649
|
+
};
|
|
650
|
+
}
|
|
651
|
+
catch (error) {
|
|
652
|
+
console.error('Error starting trigger:', error);
|
|
653
|
+
return {
|
|
654
|
+
content: [
|
|
655
|
+
{ type: 'text', text: `Failed to start trigger: ${error.message}` },
|
|
656
|
+
],
|
|
657
|
+
isError: true,
|
|
658
|
+
};
|
|
659
|
+
}
|
|
660
|
+
});
|
|
661
|
+
server.tool('adf-stop-trigger', 'Stop (deactivate) a trigger. Requires AZURE_DATA_FACTORY_ENABLE_TRIGGER_CONTROL=true.', {
|
|
662
|
+
triggerName: z.string().describe('Name of the trigger to stop'),
|
|
663
|
+
factoryId: z.string().optional().describe('Factory ID'),
|
|
664
|
+
}, async ({ triggerName, factoryId, }) => {
|
|
665
|
+
try {
|
|
666
|
+
const svc = getService();
|
|
667
|
+
await svc.stopTrigger(triggerName, factoryId);
|
|
668
|
+
return {
|
|
669
|
+
content: [
|
|
670
|
+
{
|
|
671
|
+
type: 'text',
|
|
672
|
+
text: JSON.stringify({
|
|
673
|
+
message: `Trigger '${triggerName}' stop initiated`,
|
|
674
|
+
note: 'The trigger may take a moment to fully stop',
|
|
675
|
+
}, null, 2),
|
|
676
|
+
},
|
|
677
|
+
],
|
|
678
|
+
};
|
|
679
|
+
}
|
|
680
|
+
catch (error) {
|
|
681
|
+
console.error('Error stopping trigger:', error);
|
|
682
|
+
return {
|
|
683
|
+
content: [
|
|
684
|
+
{ type: 'text', text: `Failed to stop trigger: ${error.message}` },
|
|
685
|
+
],
|
|
686
|
+
isError: true,
|
|
687
|
+
};
|
|
688
|
+
}
|
|
689
|
+
});
|
|
690
|
+
server.tool('adf-query-trigger-runs', 'Query trigger execution history', {
|
|
691
|
+
lastDays: z
|
|
692
|
+
.number()
|
|
693
|
+
.optional()
|
|
694
|
+
.default(7)
|
|
695
|
+
.describe('Number of days to look back (default: 7)'),
|
|
696
|
+
triggerName: z.string().optional().describe('Filter by trigger name'),
|
|
697
|
+
status: z
|
|
698
|
+
.enum(['Succeeded', 'Failed', 'Inprogress'])
|
|
699
|
+
.optional()
|
|
700
|
+
.describe('Filter by run status'),
|
|
701
|
+
factoryId: z.string().optional().describe('Factory ID'),
|
|
702
|
+
}, async ({ lastDays, triggerName, status, factoryId, }) => {
|
|
703
|
+
try {
|
|
704
|
+
const svc = getService();
|
|
705
|
+
const now = new Date();
|
|
706
|
+
const days = lastDays || 7;
|
|
707
|
+
const request = {
|
|
708
|
+
lastUpdatedAfter: new Date(now.getTime() - days * 24 * 60 * 60 * 1000).toISOString(),
|
|
709
|
+
lastUpdatedBefore: new Date(now.getTime() + 24 * 60 * 60 * 1000).toISOString(),
|
|
710
|
+
filters: [],
|
|
711
|
+
orderBy: [
|
|
712
|
+
{ orderBy: 'TriggerRunTimestamp', order: 'DESC' },
|
|
713
|
+
],
|
|
714
|
+
};
|
|
715
|
+
if (triggerName) {
|
|
716
|
+
request.filters.push({
|
|
717
|
+
operand: 'TriggerName',
|
|
718
|
+
operator: 'Equals',
|
|
719
|
+
values: [triggerName],
|
|
720
|
+
});
|
|
721
|
+
}
|
|
722
|
+
if (status) {
|
|
723
|
+
request.filters.push({
|
|
724
|
+
operand: 'Status',
|
|
725
|
+
operator: 'Equals',
|
|
726
|
+
values: [status],
|
|
727
|
+
});
|
|
728
|
+
}
|
|
729
|
+
const result = await svc.queryTriggerRuns(request, factoryId);
|
|
730
|
+
return {
|
|
731
|
+
content: [
|
|
732
|
+
{
|
|
733
|
+
type: 'text',
|
|
734
|
+
text: JSON.stringify({
|
|
735
|
+
count: result.value.length,
|
|
736
|
+
triggerRuns: result.value.map((tr) => ({
|
|
737
|
+
triggerRunId: tr.triggerRunId,
|
|
738
|
+
triggerName: tr.triggerName,
|
|
739
|
+
triggerType: tr.triggerType,
|
|
740
|
+
status: tr.status,
|
|
741
|
+
timestamp: tr.triggerRunTimestamp,
|
|
742
|
+
message: tr.message,
|
|
743
|
+
})),
|
|
744
|
+
}, null, 2),
|
|
745
|
+
},
|
|
746
|
+
],
|
|
747
|
+
};
|
|
748
|
+
}
|
|
749
|
+
catch (error) {
|
|
750
|
+
console.error('Error querying trigger runs:', error);
|
|
751
|
+
return {
|
|
752
|
+
content: [
|
|
753
|
+
{
|
|
754
|
+
type: 'text',
|
|
755
|
+
text: `Failed to query trigger runs: ${error.message}`,
|
|
756
|
+
},
|
|
757
|
+
],
|
|
758
|
+
isError: true,
|
|
759
|
+
};
|
|
760
|
+
}
|
|
761
|
+
});
|
|
762
|
+
// ========================================
|
|
763
|
+
// INTEGRATION RUNTIME TOOLS
|
|
764
|
+
// ========================================
|
|
765
|
+
server.tool('adf-list-integration-runtimes', 'List all integration runtimes in an Azure Data Factory', {
|
|
766
|
+
factoryId: z.string().optional().describe('Factory ID'),
|
|
767
|
+
}, async ({ factoryId }) => {
|
|
768
|
+
try {
|
|
769
|
+
const svc = getService();
|
|
770
|
+
const runtimes = await svc.listIntegrationRuntimes(factoryId);
|
|
771
|
+
const factory = svc.resolveFactory(factoryId);
|
|
772
|
+
const summary = runtimes.map((ir) => ({
|
|
773
|
+
name: ir.name,
|
|
774
|
+
type: ir.properties.type,
|
|
775
|
+
state: ir.properties.state,
|
|
776
|
+
description: ir.properties.description,
|
|
777
|
+
}));
|
|
778
|
+
return {
|
|
779
|
+
content: [
|
|
780
|
+
{
|
|
781
|
+
type: 'text',
|
|
782
|
+
text: JSON.stringify({
|
|
783
|
+
factory: factory.name,
|
|
784
|
+
count: runtimes.length,
|
|
785
|
+
integrationRuntimes: summary,
|
|
786
|
+
}, null, 2),
|
|
787
|
+
},
|
|
788
|
+
],
|
|
789
|
+
};
|
|
790
|
+
}
|
|
791
|
+
catch (error) {
|
|
792
|
+
console.error('Error listing integration runtimes:', error);
|
|
793
|
+
return {
|
|
794
|
+
content: [
|
|
795
|
+
{
|
|
796
|
+
type: 'text',
|
|
797
|
+
text: `Failed to list integration runtimes: ${error.message}`,
|
|
798
|
+
},
|
|
799
|
+
],
|
|
800
|
+
isError: true,
|
|
801
|
+
};
|
|
802
|
+
}
|
|
803
|
+
});
|
|
804
|
+
server.tool('adf-get-integration-runtime-status', 'Get detailed status of an integration runtime', {
|
|
805
|
+
irName: z.string().describe('Name of the integration runtime'),
|
|
806
|
+
factoryId: z.string().optional().describe('Factory ID'),
|
|
807
|
+
}, async ({ irName, factoryId }) => {
|
|
808
|
+
try {
|
|
809
|
+
const svc = getService();
|
|
810
|
+
const status = await svc.getIntegrationRuntimeStatus(irName, factoryId);
|
|
811
|
+
return {
|
|
812
|
+
content: [{ type: 'text', text: formatIntegrationRuntimeStatus(status) }],
|
|
813
|
+
};
|
|
814
|
+
}
|
|
815
|
+
catch (error) {
|
|
816
|
+
console.error('Error getting integration runtime status:', error);
|
|
817
|
+
return {
|
|
818
|
+
content: [
|
|
819
|
+
{
|
|
820
|
+
type: 'text',
|
|
821
|
+
text: `Failed to get integration runtime status: ${error.message}`,
|
|
822
|
+
},
|
|
823
|
+
],
|
|
824
|
+
isError: true,
|
|
825
|
+
};
|
|
826
|
+
}
|
|
827
|
+
});
|
|
828
|
+
server.tool('adf-start-integration-runtime', 'Start a managed integration runtime. Requires AZURE_DATA_FACTORY_ENABLE_WRITE=true.', {
|
|
829
|
+
irName: z.string().describe('Name of the integration runtime to start'),
|
|
830
|
+
factoryId: z.string().optional().describe('Factory ID'),
|
|
831
|
+
}, async ({ irName, factoryId }) => {
|
|
832
|
+
try {
|
|
833
|
+
const svc = getService();
|
|
834
|
+
await svc.startIntegrationRuntime(irName, factoryId);
|
|
835
|
+
return {
|
|
836
|
+
content: [
|
|
837
|
+
{
|
|
838
|
+
type: 'text',
|
|
839
|
+
text: JSON.stringify({
|
|
840
|
+
message: `Integration runtime '${irName}' start initiated`,
|
|
841
|
+
note: 'Managed IR startup can take 2-5 minutes',
|
|
842
|
+
}, null, 2),
|
|
843
|
+
},
|
|
844
|
+
],
|
|
845
|
+
};
|
|
846
|
+
}
|
|
847
|
+
catch (error) {
|
|
848
|
+
console.error('Error starting integration runtime:', error);
|
|
849
|
+
return {
|
|
850
|
+
content: [
|
|
851
|
+
{
|
|
852
|
+
type: 'text',
|
|
853
|
+
text: `Failed to start integration runtime: ${error.message}`,
|
|
854
|
+
},
|
|
855
|
+
],
|
|
856
|
+
isError: true,
|
|
857
|
+
};
|
|
858
|
+
}
|
|
859
|
+
});
|
|
860
|
+
server.tool('adf-stop-integration-runtime', 'Stop a managed integration runtime. Requires AZURE_DATA_FACTORY_ENABLE_WRITE=true.', {
|
|
861
|
+
irName: z.string().describe('Name of the integration runtime to stop'),
|
|
862
|
+
factoryId: z.string().optional().describe('Factory ID'),
|
|
863
|
+
}, async ({ irName, factoryId }) => {
|
|
864
|
+
try {
|
|
865
|
+
const svc = getService();
|
|
866
|
+
await svc.stopIntegrationRuntime(irName, factoryId);
|
|
867
|
+
return {
|
|
868
|
+
content: [
|
|
869
|
+
{
|
|
870
|
+
type: 'text',
|
|
871
|
+
text: JSON.stringify({
|
|
872
|
+
message: `Integration runtime '${irName}' stop initiated`,
|
|
873
|
+
note: 'The IR may take a moment to fully stop',
|
|
874
|
+
}, null, 2),
|
|
875
|
+
},
|
|
876
|
+
],
|
|
877
|
+
};
|
|
878
|
+
}
|
|
879
|
+
catch (error) {
|
|
880
|
+
console.error('Error stopping integration runtime:', error);
|
|
881
|
+
return {
|
|
882
|
+
content: [
|
|
883
|
+
{
|
|
884
|
+
type: 'text',
|
|
885
|
+
text: `Failed to stop integration runtime: ${error.message}`,
|
|
886
|
+
},
|
|
887
|
+
],
|
|
888
|
+
isError: true,
|
|
889
|
+
};
|
|
890
|
+
}
|
|
891
|
+
});
|
|
892
|
+
console.error('Azure Data Factory tools registered: 24 tools');
|
|
893
|
+
}
|
|
894
|
+
// CLI entry point (standalone execution)
|
|
895
|
+
if (import.meta.url === pathToFileURL(realpathSync(process.argv[1])).href) {
|
|
896
|
+
const loadEnv = createEnvLoader();
|
|
897
|
+
loadEnv();
|
|
898
|
+
const server = createMcpServer({
|
|
899
|
+
name: 'mcp-azure-data-factory',
|
|
900
|
+
version: '1.0.0',
|
|
901
|
+
capabilities: { tools: {} },
|
|
902
|
+
});
|
|
903
|
+
registerAzureDataFactoryTools(server);
|
|
904
|
+
const transport = new StdioServerTransport();
|
|
905
|
+
server.connect(transport).catch((error) => {
|
|
906
|
+
console.error('Failed to start Azure Data Factory MCP server:', error);
|
|
907
|
+
process.exit(1);
|
|
908
|
+
});
|
|
909
|
+
console.error('Azure Data Factory MCP server running');
|
|
910
|
+
}
|
|
911
|
+
//# sourceMappingURL=index.js.map
|