gd-gitlab-mcp 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +176 -0
- package/dist/config/index.js +7 -0
- package/dist/server.js +923 -0
- package/dist/services/gitlab.service.js +477 -0
- package/dist/services/review.service.js +35 -0
- package/dist/tools/getDiff.tool.js +10 -0
- package/dist/tools/listMR.tool.js +10 -0
- package/dist/tools/reviewMR.tool.js +23 -0
- package/dist/utils/index.js +17 -0
- package/package.json +29 -0
|
@@ -0,0 +1,477 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
3
|
+
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
4
|
+
};
|
|
5
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
6
|
+
exports.GitLabService = exports.GitLabApiError = void 0;
|
|
7
|
+
const axios_1 = __importDefault(require("axios"));
|
|
8
|
+
const utils_1 = require("../utils");
|
|
9
|
+
// ─── Error types ─────────────────────────────────────────────────────────────
|
|
10
|
+
class GitLabApiError extends Error {
|
|
11
|
+
constructor(message, status, detail) {
|
|
12
|
+
super(message);
|
|
13
|
+
this.status = status;
|
|
14
|
+
this.detail = detail;
|
|
15
|
+
this.name = 'GitLabApiError';
|
|
16
|
+
}
|
|
17
|
+
}
|
|
18
|
+
exports.GitLabApiError = GitLabApiError;
|
|
19
|
+
function mapAxiosError(error) {
|
|
20
|
+
const status = error.response?.status;
|
|
21
|
+
const data = error.response?.data;
|
|
22
|
+
const detail = typeof data?.message === 'string' ? data.message :
|
|
23
|
+
typeof data?.error === 'string' ? data.error :
|
|
24
|
+
error.message;
|
|
25
|
+
if (status === 401)
|
|
26
|
+
throw new GitLabApiError('Authentication failed — check your GitLab PAT', 401, detail);
|
|
27
|
+
if (status === 403)
|
|
28
|
+
throw new GitLabApiError('Access denied — token lacks required permissions', 403, detail);
|
|
29
|
+
if (status === 404)
|
|
30
|
+
throw new GitLabApiError('Resource not found on GitLab', 404, detail);
|
|
31
|
+
if (status === 409)
|
|
32
|
+
throw new GitLabApiError('Conflict — resource already exists or state mismatch', 409, detail);
|
|
33
|
+
if (status === 422)
|
|
34
|
+
throw new GitLabApiError(`Unprocessable entity: ${detail}`, 422, detail);
|
|
35
|
+
if (status === 429) {
|
|
36
|
+
const retryAfter = error.response?.headers?.['retry-after'];
|
|
37
|
+
const suffix = retryAfter ? ` Retry after ${retryAfter}s.` : '';
|
|
38
|
+
throw new GitLabApiError(`Rate limit exceeded.${suffix}`, 429, detail);
|
|
39
|
+
}
|
|
40
|
+
if (status && status >= 500)
|
|
41
|
+
throw new GitLabApiError(`GitLab server error (${status})`, status, detail);
|
|
42
|
+
if (error.code === 'ECONNABORTED' || error.code === 'ERR_CANCELED') {
|
|
43
|
+
throw new GitLabApiError('Request timed out', 408, detail);
|
|
44
|
+
}
|
|
45
|
+
throw new GitLabApiError(`Request failed: ${error.message}`, status ?? 0, detail);
|
|
46
|
+
}
|
|
47
|
+
// ─── Retry with exponential backoff ──────────────────────────────────────────
|
|
48
|
+
const RETRYABLE_STATUSES = new Set([408, 429, 500, 502, 503, 504]);
|
|
49
|
+
async function withRetry(label, fn, maxAttempts = 3) {
|
|
50
|
+
let lastErr;
|
|
51
|
+
for (let attempt = 0; attempt < maxAttempts; attempt++) {
|
|
52
|
+
try {
|
|
53
|
+
return await fn();
|
|
54
|
+
}
|
|
55
|
+
catch (err) {
|
|
56
|
+
lastErr = err;
|
|
57
|
+
const retryable = err instanceof GitLabApiError && RETRYABLE_STATUSES.has(err.status);
|
|
58
|
+
if (!retryable || attempt === maxAttempts - 1)
|
|
59
|
+
throw err;
|
|
60
|
+
const delay = Math.min(500 * 2 ** attempt, 8000);
|
|
61
|
+
(0, utils_1.logger)('warn', `${label}: retryable error on attempt ${attempt + 1}/${maxAttempts}, retrying in ${delay}ms`, err.message);
|
|
62
|
+
await new Promise((r) => setTimeout(r, delay));
|
|
63
|
+
}
|
|
64
|
+
}
|
|
65
|
+
throw lastErr;
|
|
66
|
+
}
|
|
67
|
+
class TTLCache {
|
|
68
|
+
constructor(ttlMs) {
|
|
69
|
+
this.ttlMs = ttlMs;
|
|
70
|
+
this.store = new Map();
|
|
71
|
+
}
|
|
72
|
+
get(key) {
|
|
73
|
+
const entry = this.store.get(key);
|
|
74
|
+
if (!entry || Date.now() > entry.expiresAt) {
|
|
75
|
+
this.store.delete(key);
|
|
76
|
+
return undefined;
|
|
77
|
+
}
|
|
78
|
+
return entry.value;
|
|
79
|
+
}
|
|
80
|
+
set(key, value) {
|
|
81
|
+
this.store.set(key, { value, expiresAt: Date.now() + this.ttlMs });
|
|
82
|
+
}
|
|
83
|
+
invalidate(key) {
|
|
84
|
+
this.store.delete(key);
|
|
85
|
+
}
|
|
86
|
+
}
|
|
87
|
+
// ─── In-flight request deduplicator ──────────────────────────────────────────
|
|
88
|
+
class Deduplicator {
|
|
89
|
+
constructor() {
|
|
90
|
+
this.inflight = new Map();
|
|
91
|
+
}
|
|
92
|
+
run(key, fn) {
|
|
93
|
+
const existing = this.inflight.get(key);
|
|
94
|
+
if (existing)
|
|
95
|
+
return existing;
|
|
96
|
+
const promise = fn().finally(() => this.inflight.delete(key));
|
|
97
|
+
this.inflight.set(key, promise);
|
|
98
|
+
return promise;
|
|
99
|
+
}
|
|
100
|
+
}
|
|
101
|
+
// ─── Service ──────────────────────────────────────────────────────────────────
|
|
102
|
+
const DEFAULT_TIMEOUT = 15000;
|
|
103
|
+
const DIFF_TIMEOUT = 45000;
|
|
104
|
+
class GitLabService {
|
|
105
|
+
constructor(token) {
|
|
106
|
+
// Per-instance caches (one per user session/PAT)
|
|
107
|
+
this.projectCache = new TTLCache(5 * 60000);
|
|
108
|
+
this.projectDeduplicator = new Deduplicator();
|
|
109
|
+
if (!token) {
|
|
110
|
+
throw new Error('GitLab token is required per request. Provide user PAT via header.');
|
|
111
|
+
}
|
|
112
|
+
this.client = axios_1.default.create({
|
|
113
|
+
baseURL: process.env.GITLAB_BASE_URL + '/api/v4',
|
|
114
|
+
headers: { 'PRIVATE-TOKEN': token },
|
|
115
|
+
timeout: DEFAULT_TIMEOUT,
|
|
116
|
+
});
|
|
117
|
+
// Map axios errors to GitLabApiError
|
|
118
|
+
this.client.interceptors.response.use((res) => res, (err) => { mapAxiosError(err); });
|
|
119
|
+
}
|
|
120
|
+
// ── Projects ────────────────────────────────────────────────────────────────
|
|
121
|
+
async searchProjects(query, perPage = 20) {
|
|
122
|
+
return withRetry('searchProjects', async () => {
|
|
123
|
+
const { data } = await this.client.get('/projects', {
|
|
124
|
+
params: {
|
|
125
|
+
search: query,
|
|
126
|
+
simple: true,
|
|
127
|
+
membership: true,
|
|
128
|
+
per_page: perPage,
|
|
129
|
+
order_by: 'last_activity_at',
|
|
130
|
+
sort: 'desc',
|
|
131
|
+
},
|
|
132
|
+
});
|
|
133
|
+
return data;
|
|
134
|
+
});
|
|
135
|
+
}
|
|
136
|
+
async getProjectById(projectId) {
|
|
137
|
+
return withRetry('getProjectById', async () => {
|
|
138
|
+
const { data } = await this.client.get(`/projects/${encodeURIComponent(projectId)}`);
|
|
139
|
+
return data;
|
|
140
|
+
});
|
|
141
|
+
}
|
|
142
|
+
async resolveProject(queryOrId) {
|
|
143
|
+
if (!queryOrId?.trim())
|
|
144
|
+
return null;
|
|
145
|
+
const query = queryOrId.trim();
|
|
146
|
+
const cacheKey = `resolve:${query}`;
|
|
147
|
+
const cached = this.projectCache.get(cacheKey);
|
|
148
|
+
if (cached !== undefined) {
|
|
149
|
+
(0, utils_1.logger)('debug', `resolveProject cache hit: ${query}`);
|
|
150
|
+
return cached;
|
|
151
|
+
}
|
|
152
|
+
return this.projectDeduplicator.run(cacheKey, async () => {
|
|
153
|
+
// Check cache again inside deduplicator (another concurrent call may have populated it)
|
|
154
|
+
const hit = this.projectCache.get(cacheKey);
|
|
155
|
+
if (hit !== undefined)
|
|
156
|
+
return hit;
|
|
157
|
+
let result = null;
|
|
158
|
+
if (/^\d+$/.test(query)) {
|
|
159
|
+
try {
|
|
160
|
+
result = await this.getProjectById(query);
|
|
161
|
+
}
|
|
162
|
+
catch {
|
|
163
|
+
// fall through to search
|
|
164
|
+
}
|
|
165
|
+
}
|
|
166
|
+
if (!result) {
|
|
167
|
+
const projects = await this.searchProjects(query, 50);
|
|
168
|
+
if (projects.length > 0) {
|
|
169
|
+
const q = query.toLowerCase();
|
|
170
|
+
result =
|
|
171
|
+
projects.find((p) => [p.name, p.path, p.path_with_namespace].some((f) => f?.toLowerCase() === q)) ??
|
|
172
|
+
projects.find((p) => [p.name, p.path, p.path_with_namespace].some((f) => f?.toLowerCase().startsWith(q))) ??
|
|
173
|
+
projects[0];
|
|
174
|
+
}
|
|
175
|
+
}
|
|
176
|
+
this.projectCache.set(cacheKey, result);
|
|
177
|
+
return result;
|
|
178
|
+
});
|
|
179
|
+
}
|
|
180
|
+
async listUserRepositories(options = {}) {
|
|
181
|
+
return withRetry('listUserRepositories', async () => {
|
|
182
|
+
const scope = options.scope ?? 'owned';
|
|
183
|
+
const { data } = await this.client.get('/projects', {
|
|
184
|
+
params: {
|
|
185
|
+
owned: scope === 'owned' ? true : undefined,
|
|
186
|
+
membership: scope === 'membership' ? true : undefined,
|
|
187
|
+
simple: true,
|
|
188
|
+
per_page: options.perPage ?? 50,
|
|
189
|
+
page: options.page ?? 1,
|
|
190
|
+
order_by: 'last_activity_at',
|
|
191
|
+
sort: 'desc',
|
|
192
|
+
},
|
|
193
|
+
});
|
|
194
|
+
return data;
|
|
195
|
+
});
|
|
196
|
+
}
|
|
197
|
+
// ── Merge Requests ──────────────────────────────────────────────────────────
|
|
198
|
+
async listMergeRequests(projectId, options = {}) {
|
|
199
|
+
return withRetry('listMergeRequests', async () => {
|
|
200
|
+
const { data } = await this.client.get(`/projects/${projectId}/merge_requests`, {
|
|
201
|
+
params: {
|
|
202
|
+
state: options.state ?? 'opened',
|
|
203
|
+
source_branch: options.sourceBranch,
|
|
204
|
+
target_branch: options.targetBranch,
|
|
205
|
+
author_username: options.authorUsername,
|
|
206
|
+
reviewer_username: options.reviewerUsername,
|
|
207
|
+
search: options.search,
|
|
208
|
+
per_page: options.perPage,
|
|
209
|
+
page: options.page,
|
|
210
|
+
},
|
|
211
|
+
});
|
|
212
|
+
return data;
|
|
213
|
+
});
|
|
214
|
+
}
|
|
215
|
+
async listMergeRequestsByProjectQuery(projectQuery) {
|
|
216
|
+
const project = await this.resolveProject(projectQuery);
|
|
217
|
+
if (!project)
|
|
218
|
+
throw new Error(`Project not found for query: ${projectQuery}`);
|
|
219
|
+
const mergeRequests = await this.listMergeRequests(String(project.id));
|
|
220
|
+
return { project, mergeRequests };
|
|
221
|
+
}
|
|
222
|
+
async getMergeRequest(projectId, iid) {
|
|
223
|
+
return withRetry('getMergeRequest', async () => {
|
|
224
|
+
const { data } = await this.client.get(`/projects/${projectId}/merge_requests/${iid}`);
|
|
225
|
+
return data;
|
|
226
|
+
});
|
|
227
|
+
}
|
|
228
|
+
async getMergeRequestChanges(projectId, iid) {
|
|
229
|
+
return withRetry('getMergeRequestChanges', async () => {
|
|
230
|
+
const { data } = await this.client.get(`/projects/${projectId}/merge_requests/${iid}/changes`, { timeout: DIFF_TIMEOUT });
|
|
231
|
+
return data.changes;
|
|
232
|
+
});
|
|
233
|
+
}
|
|
234
|
+
async getMergeRequestApprovals(projectId, iid) {
|
|
235
|
+
try {
|
|
236
|
+
const { data } = await this.client.get(`/projects/${projectId}/merge_requests/${iid}/approvals`);
|
|
237
|
+
return data;
|
|
238
|
+
}
|
|
239
|
+
catch {
|
|
240
|
+
return null;
|
|
241
|
+
}
|
|
242
|
+
}
|
|
243
|
+
async getMergeRequestDetail(projectId, iid) {
|
|
244
|
+
const [mergeRequest, approvals] = await Promise.all([
|
|
245
|
+
this.getMergeRequest(projectId, iid),
|
|
246
|
+
this.getMergeRequestApprovals(projectId, iid),
|
|
247
|
+
]);
|
|
248
|
+
return { projectId, mergeRequestIid: iid, mergeRequest, approvals };
|
|
249
|
+
}
|
|
250
|
+
async getMergeRequestDiscussions(projectId, iid) {
|
|
251
|
+
return withRetry('getMergeRequestDiscussions', async () => {
|
|
252
|
+
const { data } = await this.client.get(`/projects/${projectId}/merge_requests/${iid}/discussions`, { params: { per_page: 100 } });
|
|
253
|
+
return data;
|
|
254
|
+
});
|
|
255
|
+
}
|
|
256
|
+
async getMergeReadiness(projectId, iid) {
|
|
257
|
+
const mergeRequest = (await this.getMergeRequest(projectId, iid));
|
|
258
|
+
const approvals = await this.getMergeRequestApprovals(projectId, iid);
|
|
259
|
+
const sourceBranch = String(mergeRequest.source_branch ?? '').trim();
|
|
260
|
+
const detailedMergeStatus = String(mergeRequest.detailed_merge_status ?? '').trim();
|
|
261
|
+
const draft = Boolean(mergeRequest.draft ?? mergeRequest.work_in_progress);
|
|
262
|
+
const hasConflicts = Boolean(mergeRequest.has_conflicts);
|
|
263
|
+
const blockingDiscussionsResolved = mergeRequest.blocking_discussions_resolved === undefined
|
|
264
|
+
? true
|
|
265
|
+
: Boolean(mergeRequest.blocking_discussions_resolved);
|
|
266
|
+
const mergeStatusBlocked = [
|
|
267
|
+
'cannot_be_merged', 'cannot_be_merged_recheck', 'not_open',
|
|
268
|
+
'broken_status', 'commits_status', 'locked_lfs_files',
|
|
269
|
+
].includes(detailedMergeStatus);
|
|
270
|
+
const headPipeline = mergeRequest.head_pipeline && typeof mergeRequest.head_pipeline === 'object'
|
|
271
|
+
? mergeRequest.head_pipeline
|
|
272
|
+
: null;
|
|
273
|
+
const latestPipeline = headPipeline ? null
|
|
274
|
+
: sourceBranch ? await this.getLatestPipeline(projectId, sourceBranch)
|
|
275
|
+
: null;
|
|
276
|
+
const pipelineStatus = (headPipeline ? String(headPipeline.status ?? '') : String(latestPipeline?.status ?? '')).trim();
|
|
277
|
+
const pipelinePassedOrNotRequired = !pipelineStatus || pipelineStatus === 'success' || pipelineStatus === 'skipped';
|
|
278
|
+
const approvalsRequired = approvals?.approvals_required;
|
|
279
|
+
const approvalsLeft = approvals?.approvals_left;
|
|
280
|
+
const approvalsSatisfied = approvalsLeft === undefined ? true : approvalsLeft <= 0;
|
|
281
|
+
const checks = {
|
|
282
|
+
isOpen: String(mergeRequest.state ?? '') === 'opened',
|
|
283
|
+
notDraft: !draft,
|
|
284
|
+
noConflicts: !hasConflicts,
|
|
285
|
+
mergeStatusAllowed: !mergeStatusBlocked,
|
|
286
|
+
noBlockingDiscussions: blockingDiscussionsResolved,
|
|
287
|
+
approvalsSatisfied,
|
|
288
|
+
pipelinePassedOrNotRequired,
|
|
289
|
+
};
|
|
290
|
+
const reasons = [];
|
|
291
|
+
if (!checks.isOpen)
|
|
292
|
+
reasons.push('Merge request is not in opened state');
|
|
293
|
+
if (!checks.notDraft)
|
|
294
|
+
reasons.push('Merge request is draft/work in progress');
|
|
295
|
+
if (!checks.noConflicts)
|
|
296
|
+
reasons.push('Merge request has conflicts');
|
|
297
|
+
if (!checks.mergeStatusAllowed)
|
|
298
|
+
reasons.push(`Detailed merge status is blocking: ${detailedMergeStatus}`);
|
|
299
|
+
if (!checks.noBlockingDiscussions)
|
|
300
|
+
reasons.push('Blocking discussions are unresolved');
|
|
301
|
+
if (!checks.approvalsSatisfied) {
|
|
302
|
+
reasons.push(`Approvals not satisfied (required: ${String(approvalsRequired ?? 'unknown')}, left: ${String(approvalsLeft ?? 'unknown')})`);
|
|
303
|
+
}
|
|
304
|
+
if (!checks.pipelinePassedOrNotRequired) {
|
|
305
|
+
reasons.push(`Pipeline not passed (status: ${pipelineStatus || 'unknown'})`);
|
|
306
|
+
}
|
|
307
|
+
return {
|
|
308
|
+
projectId,
|
|
309
|
+
mergeRequestIid: iid,
|
|
310
|
+
readyToMerge: Object.values(checks).every(Boolean),
|
|
311
|
+
reasons,
|
|
312
|
+
checks,
|
|
313
|
+
summary: {
|
|
314
|
+
state: mergeRequest.state,
|
|
315
|
+
detailedMergeStatus,
|
|
316
|
+
sourceBranch,
|
|
317
|
+
targetBranch: mergeRequest.target_branch,
|
|
318
|
+
approvalsRequired,
|
|
319
|
+
approvalsLeft,
|
|
320
|
+
pipelineStatus: pipelineStatus || null,
|
|
321
|
+
},
|
|
322
|
+
mergeRequest,
|
|
323
|
+
approvals,
|
|
324
|
+
pipeline: headPipeline ?? latestPipeline,
|
|
325
|
+
};
|
|
326
|
+
}
|
|
327
|
+
async commentMergeRequest(projectId, iid, body) {
|
|
328
|
+
return withRetry('commentMergeRequest', async () => {
|
|
329
|
+
const { data } = await this.client.post(`/projects/${projectId}/merge_requests/${iid}/notes`, { body });
|
|
330
|
+
return data;
|
|
331
|
+
});
|
|
332
|
+
}
|
|
333
|
+
async createMergeRequest(projectId, payload) {
|
|
334
|
+
return withRetry('createMergeRequest', async () => {
|
|
335
|
+
const { data } = await this.client.post(`/projects/${projectId}/merge_requests`, {
|
|
336
|
+
source_branch: payload.sourceBranch,
|
|
337
|
+
target_branch: payload.targetBranch,
|
|
338
|
+
title: payload.title,
|
|
339
|
+
description: payload.description,
|
|
340
|
+
remove_source_branch: payload.removeSourceBranch,
|
|
341
|
+
squash: payload.squash,
|
|
342
|
+
draft: payload.draft,
|
|
343
|
+
});
|
|
344
|
+
return data;
|
|
345
|
+
});
|
|
346
|
+
}
|
|
347
|
+
async mergeMergeRequest(projectId, iid, payload = {}) {
|
|
348
|
+
return withRetry('mergeMergeRequest', async () => {
|
|
349
|
+
const { data } = await this.client.put(`/projects/${projectId}/merge_requests/${iid}/merge`, {
|
|
350
|
+
squash: payload.squash,
|
|
351
|
+
should_remove_source_branch: payload.shouldRemoveSourceBranch,
|
|
352
|
+
merge_when_pipeline_succeeds: payload.mergeWhenPipelineSucceeds,
|
|
353
|
+
sha: payload.sha,
|
|
354
|
+
merge_commit_message: payload.mergeCommitMessage,
|
|
355
|
+
squash_commit_message: payload.squashCommitMessage,
|
|
356
|
+
});
|
|
357
|
+
return data;
|
|
358
|
+
});
|
|
359
|
+
}
|
|
360
|
+
async closeMergeRequest(projectId, iid) {
|
|
361
|
+
return withRetry('closeMergeRequest', async () => {
|
|
362
|
+
const { data } = await this.client.put(`/projects/${projectId}/merge_requests/${iid}`, {
|
|
363
|
+
state_event: 'close',
|
|
364
|
+
});
|
|
365
|
+
return data;
|
|
366
|
+
});
|
|
367
|
+
}
|
|
368
|
+
async approveMergeRequest(projectId, iid, sha) {
|
|
369
|
+
return withRetry('approveMergeRequest', async () => {
|
|
370
|
+
const { data } = await this.client.post(`/projects/${projectId}/merge_requests/${iid}/approve`, sha ? { sha } : {});
|
|
371
|
+
return data;
|
|
372
|
+
});
|
|
373
|
+
}
|
|
374
|
+
async unapproveMergeRequest(projectId, iid) {
|
|
375
|
+
return withRetry('unapproveMergeRequest', async () => {
|
|
376
|
+
const { data } = await this.client.post(`/projects/${projectId}/merge_requests/${iid}/unapprove`);
|
|
377
|
+
return data;
|
|
378
|
+
});
|
|
379
|
+
}
|
|
380
|
+
// ── Pipelines ───────────────────────────────────────────────────────────────
|
|
381
|
+
async runPipeline(projectId, ref, variables = {}) {
|
|
382
|
+
return withRetry('runPipeline', async () => {
|
|
383
|
+
const { data } = await this.client.post(`/projects/${projectId}/pipeline`, {
|
|
384
|
+
ref,
|
|
385
|
+
variables: Object.entries(variables).map(([key, value]) => ({ key, value })),
|
|
386
|
+
});
|
|
387
|
+
return data;
|
|
388
|
+
});
|
|
389
|
+
}
|
|
390
|
+
async runStagingPipeline(projectId, staging, options = {}) {
|
|
391
|
+
let ref = options.ref?.trim();
|
|
392
|
+
if (!ref && options.mergeRequestIid) {
|
|
393
|
+
const mr = await this.getMergeRequest(projectId, String(options.mergeRequestIid));
|
|
394
|
+
ref = mr.source_branch;
|
|
395
|
+
}
|
|
396
|
+
if (!ref) {
|
|
397
|
+
const project = await this.getProjectById(projectId);
|
|
398
|
+
ref = project.default_branch;
|
|
399
|
+
}
|
|
400
|
+
if (!ref)
|
|
401
|
+
throw new Error('Cannot resolve branch ref for pipeline. Provide ref or mergeRequestIid.');
|
|
402
|
+
const pipeline = await this.runPipeline(projectId, ref, { STAGING: String(staging) });
|
|
403
|
+
return { projectId, ref, staging, pipeline };
|
|
404
|
+
}
|
|
405
|
+
async getPipeline(projectId, pipelineId) {
|
|
406
|
+
return withRetry('getPipeline', async () => {
|
|
407
|
+
const { data } = await this.client.get(`/projects/${projectId}/pipelines/${pipelineId}`);
|
|
408
|
+
return data;
|
|
409
|
+
});
|
|
410
|
+
}
|
|
411
|
+
async listPipelines(projectId, options = {}) {
|
|
412
|
+
return withRetry('listPipelines', async () => {
|
|
413
|
+
const { data } = await this.client.get(`/projects/${projectId}/pipelines`, {
|
|
414
|
+
params: {
|
|
415
|
+
ref: options.ref,
|
|
416
|
+
per_page: options.perPage ?? 20,
|
|
417
|
+
order_by: 'id',
|
|
418
|
+
sort: 'desc',
|
|
419
|
+
},
|
|
420
|
+
});
|
|
421
|
+
return data;
|
|
422
|
+
});
|
|
423
|
+
}
|
|
424
|
+
async getLatestPipeline(projectId, ref) {
|
|
425
|
+
const pipelines = await this.listPipelines(projectId, { ref, perPage: 1 });
|
|
426
|
+
return pipelines.length > 0 ? pipelines[0] : null;
|
|
427
|
+
}
|
|
428
|
+
async getPipelineStatus(projectId, options = {}) {
|
|
429
|
+
if (options.pipelineId) {
|
|
430
|
+
const pipeline = await this.getPipeline(projectId, options.pipelineId);
|
|
431
|
+
return { projectId, source: 'pipelineId', pipeline };
|
|
432
|
+
}
|
|
433
|
+
let ref = options.ref?.trim();
|
|
434
|
+
if (!ref && options.mergeRequestIid) {
|
|
435
|
+
const mr = await this.getMergeRequest(projectId, String(options.mergeRequestIid));
|
|
436
|
+
ref = mr.source_branch;
|
|
437
|
+
}
|
|
438
|
+
if (!ref) {
|
|
439
|
+
const project = await this.getProjectById(projectId);
|
|
440
|
+
ref = project.default_branch;
|
|
441
|
+
}
|
|
442
|
+
const latestPipeline = await this.getLatestPipeline(projectId, ref || undefined);
|
|
443
|
+
return { projectId, source: 'latest', ref, pipeline: latestPipeline };
|
|
444
|
+
}
|
|
445
|
+
async listPipelineJobs(projectId, pipelineId, scopes) {
|
|
446
|
+
return withRetry('listPipelineJobs', async () => {
|
|
447
|
+
const { data } = await this.client.get(`/projects/${projectId}/pipelines/${pipelineId}/jobs`, {
|
|
448
|
+
params: {
|
|
449
|
+
per_page: 100,
|
|
450
|
+
...(scopes?.length ? { scope: scopes } : {}),
|
|
451
|
+
},
|
|
452
|
+
// GitLab accepts repeated scope params — axios serializes arrays with repeat keys
|
|
453
|
+
paramsSerializer: (params) => {
|
|
454
|
+
const parts = [];
|
|
455
|
+
for (const [key, val] of Object.entries(params)) {
|
|
456
|
+
if (Array.isArray(val)) {
|
|
457
|
+
for (const v of val)
|
|
458
|
+
parts.push(`${key}[]=${encodeURIComponent(String(v))}`);
|
|
459
|
+
}
|
|
460
|
+
else if (val !== undefined) {
|
|
461
|
+
parts.push(`${key}=${encodeURIComponent(String(val))}`);
|
|
462
|
+
}
|
|
463
|
+
}
|
|
464
|
+
return parts.join('&');
|
|
465
|
+
},
|
|
466
|
+
});
|
|
467
|
+
return data;
|
|
468
|
+
});
|
|
469
|
+
}
|
|
470
|
+
async retryJob(projectId, jobId) {
|
|
471
|
+
return withRetry('retryJob', async () => {
|
|
472
|
+
const { data } = await this.client.post(`/projects/${projectId}/jobs/${jobId}/retry`);
|
|
473
|
+
return data;
|
|
474
|
+
});
|
|
475
|
+
}
|
|
476
|
+
}
|
|
477
|
+
exports.GitLabService = GitLabService;
|
|
@@ -0,0 +1,35 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.ReviewService = void 0;
|
|
4
|
+
class ReviewService {
|
|
5
|
+
constructor(gitlab) {
|
|
6
|
+
this.gitlab = gitlab;
|
|
7
|
+
}
|
|
8
|
+
async analyzeDiff(diff) {
|
|
9
|
+
const issues = [];
|
|
10
|
+
const lines = diff ? diff.split('\n').length : 0;
|
|
11
|
+
if (lines > 500) {
|
|
12
|
+
issues.push(`Large diff: ${lines} lines (consider splitting into smaller MRs)`);
|
|
13
|
+
}
|
|
14
|
+
if (/console\.log\(/.test(diff)) {
|
|
15
|
+
issues.push('Found `console.log` statements — consider removing or using a logger');
|
|
16
|
+
}
|
|
17
|
+
if (/TODO/.test(diff)) {
|
|
18
|
+
issues.push('Found `TODO` comments — ensure follow-up issues are created');
|
|
19
|
+
}
|
|
20
|
+
if (/eval\(/.test(diff)) {
|
|
21
|
+
issues.push('Use of `eval()` detected — this is dangerous');
|
|
22
|
+
}
|
|
23
|
+
if (issues.length === 0)
|
|
24
|
+
return 'Automated review: No obvious issues found.';
|
|
25
|
+
return ['Automated review:'].concat(issues.map((s, i) => `${i + 1}. ${s}`)).join('\n');
|
|
26
|
+
}
|
|
27
|
+
async runAndComment(projectId, iid) {
|
|
28
|
+
const changes = await this.gitlab.getMergeRequestChanges(projectId, iid);
|
|
29
|
+
const combined = (changes || []).map((c) => c.diff || '').join('\n');
|
|
30
|
+
const result = await this.analyzeDiff(combined);
|
|
31
|
+
await this.gitlab.commentMergeRequest(projectId, iid, result);
|
|
32
|
+
return result;
|
|
33
|
+
}
|
|
34
|
+
}
|
|
35
|
+
exports.ReviewService = ReviewService;
|
|
@@ -0,0 +1,10 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.getDiff = getDiff;
|
|
4
|
+
const gitlab_service_1 = require("../services/gitlab.service");
|
|
5
|
+
async function getDiff(projectId, mrIid) {
|
|
6
|
+
const gitlab = new gitlab_service_1.GitLabService();
|
|
7
|
+
const pid = String(projectId);
|
|
8
|
+
const iid = String(mrIid);
|
|
9
|
+
return gitlab.getMergeRequestChanges(pid, iid);
|
|
10
|
+
}
|
|
@@ -0,0 +1,10 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.listMR = listMR;
|
|
4
|
+
const gitlab_service_1 = require("../services/gitlab.service");
|
|
5
|
+
async function listMR(projectId) {
|
|
6
|
+
if (!projectId)
|
|
7
|
+
throw new Error('projectId is required');
|
|
8
|
+
const gitlab = new gitlab_service_1.GitLabService();
|
|
9
|
+
return gitlab.listMergeRequests(String(projectId));
|
|
10
|
+
}
|
|
@@ -0,0 +1,23 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
// src/tools/reviewMR.tool.ts
|
|
3
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
4
|
+
exports.reviewMRTool = void 0;
|
|
5
|
+
const gitlab_service_1 = require("../services/gitlab.service");
|
|
6
|
+
const review_service_1 = require("../services/review.service");
|
|
7
|
+
exports.reviewMRTool = {
|
|
8
|
+
name: "review_merge_request",
|
|
9
|
+
description: "Review a GitLab merge request",
|
|
10
|
+
parameters: {
|
|
11
|
+
projectId: "string",
|
|
12
|
+
mergeRequestIid: "string",
|
|
13
|
+
},
|
|
14
|
+
execute: async ({ projectId, mergeRequestIid }) => {
|
|
15
|
+
const gitlab = new gitlab_service_1.GitLabService();
|
|
16
|
+
const reviewer = new review_service_1.ReviewService(gitlab);
|
|
17
|
+
const changes = await gitlab.getMergeRequestChanges(projectId, mergeRequestIid);
|
|
18
|
+
const combinedDiff = (changes || []).map((c) => c.diff || "").join("\n");
|
|
19
|
+
const result = await reviewer.analyzeDiff(combinedDiff);
|
|
20
|
+
await gitlab.commentMergeRequest(projectId, mergeRequestIid, result);
|
|
21
|
+
return result;
|
|
22
|
+
},
|
|
23
|
+
};
|
|
@@ -0,0 +1,17 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.logger = logger;
|
|
4
|
+
const LEVEL_PRIORITY = { debug: 0, info: 1, warn: 2, error: 3 };
|
|
5
|
+
const configuredLevel = process.env.LOG_LEVEL &&
|
|
6
|
+
['debug', 'info', 'warn', 'error'].includes(process.env.LOG_LEVEL)
|
|
7
|
+
? process.env.LOG_LEVEL
|
|
8
|
+
: 'info';
|
|
9
|
+
function logger(level, message, ...args) {
|
|
10
|
+
if (LEVEL_PRIORITY[level] < LEVEL_PRIORITY[configuredLevel])
|
|
11
|
+
return;
|
|
12
|
+
const ts = new Date().toISOString();
|
|
13
|
+
const tag = level.toUpperCase().padEnd(5);
|
|
14
|
+
const line = `[${ts}] [gitlab-mcp] [${tag}] ${message}`;
|
|
15
|
+
const fn = level === 'error' ? console.error : level === 'warn' ? console.warn : console.log;
|
|
16
|
+
args.length ? fn(line, ...args) : fn(line);
|
|
17
|
+
}
|
package/package.json
ADDED
|
@@ -0,0 +1,29 @@
|
|
|
1
|
+
{
|
|
2
|
+
"name": "gd-gitlab-mcp",
|
|
3
|
+
"version": "0.1.0",
|
|
4
|
+
"description": "GitLab MCP server — HTTP and stdio transport",
|
|
5
|
+
"main": "dist/server.js",
|
|
6
|
+
"bin": {
|
|
7
|
+
"gd-gitlab-mcp": "dist/server.js"
|
|
8
|
+
},
|
|
9
|
+
"files": [
|
|
10
|
+
"dist"
|
|
11
|
+
],
|
|
12
|
+
"scripts": {
|
|
13
|
+
"build": "tsc",
|
|
14
|
+
"start": "node dist/server.js",
|
|
15
|
+
"start:stdio": "node dist/server.js --stdio",
|
|
16
|
+
"dev": "ts-node-dev -r dotenv/config --respawn --transpile-only --ignore-watch node_modules --watch src src/server.ts",
|
|
17
|
+
"prepublishOnly": "npm run build"
|
|
18
|
+
},
|
|
19
|
+
"dependencies": {
|
|
20
|
+
"@modelcontextprotocol/sdk": "^1.17.2",
|
|
21
|
+
"axios": "^1.4.0",
|
|
22
|
+
"dotenv": "^16.0.0"
|
|
23
|
+
},
|
|
24
|
+
"devDependencies": {
|
|
25
|
+
"ts-node": "^10.9.1",
|
|
26
|
+
"ts-node-dev": "^2.0.0",
|
|
27
|
+
"typescript": "^5.3.0"
|
|
28
|
+
}
|
|
29
|
+
}
|