mobbdev 1.0.118 → 1.0.120
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/index.mjs +1355 -762
- package/package.json +3 -2
package/dist/index.mjs
CHANGED
|
@@ -1,20 +1,1163 @@
|
|
|
1
1
|
var __defProp = Object.defineProperty;
|
|
2
|
+
var __getOwnPropNames = Object.getOwnPropertyNames;
|
|
2
3
|
var __defNormalProp = (obj, key, value) => key in obj ? __defProp(obj, key, { enumerable: true, configurable: true, writable: true, value }) : obj[key] = value;
|
|
4
|
+
var __esm = (fn, res) => function __init() {
|
|
5
|
+
return fn && (res = (0, fn[__getOwnPropNames(fn)[0]])(fn = 0)), res;
|
|
6
|
+
};
|
|
3
7
|
var __export = (target, all) => {
|
|
4
8
|
for (var name in all)
|
|
5
9
|
__defProp(target, name, { get: all[name], enumerable: true });
|
|
6
10
|
};
|
|
7
11
|
var __publicField = (obj, key, value) => __defNormalProp(obj, typeof key !== "symbol" ? key + "" : key, value);
|
|
8
12
|
|
|
13
|
+
// src/features/analysis/scm/env.ts
|
|
14
|
+
import { z as z15 } from "zod";
|
|
15
|
+
var EnvVariablesZod, GITLAB_API_TOKEN, GITHUB_API_TOKEN, GIT_PROXY_HOST, MAX_UPLOAD_FILE_SIZE_MB;
|
|
16
|
+
var init_env = __esm({
|
|
17
|
+
"src/features/analysis/scm/env.ts"() {
|
|
18
|
+
"use strict";
|
|
19
|
+
EnvVariablesZod = z15.object({
|
|
20
|
+
GITLAB_API_TOKEN: z15.string().optional(),
|
|
21
|
+
GITHUB_API_TOKEN: z15.string().optional(),
|
|
22
|
+
GIT_PROXY_HOST: z15.string().optional().default("http://tinyproxy:8888"),
|
|
23
|
+
MAX_UPLOAD_FILE_SIZE_MB: z15.coerce.number().gt(0).default(5)
|
|
24
|
+
});
|
|
25
|
+
({
|
|
26
|
+
GITLAB_API_TOKEN,
|
|
27
|
+
GITHUB_API_TOKEN,
|
|
28
|
+
GIT_PROXY_HOST,
|
|
29
|
+
MAX_UPLOAD_FILE_SIZE_MB
|
|
30
|
+
} = EnvVariablesZod.parse(process.env));
|
|
31
|
+
}
|
|
32
|
+
});
|
|
33
|
+
|
|
34
|
+
// src/mcp/core/configs.ts
|
|
35
|
+
var MCP_DEFAULT_API_URL, MCP_API_KEY_HEADER_NAME, MCP_LOGIN_MAX_WAIT, MCP_LOGIN_CHECK_DELAY, MCP_VUL_REPORT_DIGEST_TIMEOUT_MS, MCP_MAX_FILE_SIZE, MCP_PERIODIC_CHECK_INTERVAL, MCP_DEFAULT_MAX_FILES_TO_SCAN, MCP_REPORT_ID_EXPIRATION_MS, MCP_TOOLS_BROWSER_COOLDOWN_MS, MCP_DEFAULT_LIMIT;
|
|
36
|
+
var init_configs = __esm({
|
|
37
|
+
"src/mcp/core/configs.ts"() {
|
|
38
|
+
"use strict";
|
|
39
|
+
init_env();
|
|
40
|
+
MCP_DEFAULT_API_URL = "https://api.mobb.ai/v1/graphql";
|
|
41
|
+
MCP_API_KEY_HEADER_NAME = "x-mobb-key";
|
|
42
|
+
MCP_LOGIN_MAX_WAIT = 2 * 60 * 1e3;
|
|
43
|
+
MCP_LOGIN_CHECK_DELAY = 2 * 1e3;
|
|
44
|
+
MCP_VUL_REPORT_DIGEST_TIMEOUT_MS = 5 * 60 * 1e3;
|
|
45
|
+
MCP_MAX_FILE_SIZE = MAX_UPLOAD_FILE_SIZE_MB * 1024 * 1024;
|
|
46
|
+
MCP_PERIODIC_CHECK_INTERVAL = 15 * 60 * 1e3;
|
|
47
|
+
MCP_DEFAULT_MAX_FILES_TO_SCAN = 10;
|
|
48
|
+
MCP_REPORT_ID_EXPIRATION_MS = 2 * 60 * 60 * 1e3;
|
|
49
|
+
MCP_TOOLS_BROWSER_COOLDOWN_MS = 24 * 60 * 60 * 1e3;
|
|
50
|
+
MCP_DEFAULT_LIMIT = 3;
|
|
51
|
+
}
|
|
52
|
+
});
|
|
53
|
+
|
|
54
|
+
// src/features/analysis/scm/services/ExcludedDirs.ts
|
|
55
|
+
var EXCLUDED_DIRS;
|
|
56
|
+
var init_ExcludedDirs = __esm({
|
|
57
|
+
"src/features/analysis/scm/services/ExcludedDirs.ts"() {
|
|
58
|
+
"use strict";
|
|
59
|
+
EXCLUDED_DIRS = [
|
|
60
|
+
"$RECYCLE.BIN",
|
|
61
|
+
".7z",
|
|
62
|
+
".AppleDouble",
|
|
63
|
+
".DS_Store",
|
|
64
|
+
".Rproj.user",
|
|
65
|
+
".Spotlight-V100",
|
|
66
|
+
".Trashes",
|
|
67
|
+
".adoc",
|
|
68
|
+
".android",
|
|
69
|
+
".angular",
|
|
70
|
+
".atom",
|
|
71
|
+
".aws-sam",
|
|
72
|
+
".azure",
|
|
73
|
+
".azure-pipelines",
|
|
74
|
+
".babelrc",
|
|
75
|
+
".babelrc.js",
|
|
76
|
+
".bmp",
|
|
77
|
+
".brackets.json",
|
|
78
|
+
".browserslistrc",
|
|
79
|
+
".build",
|
|
80
|
+
".bundle",
|
|
81
|
+
".bundle.js",
|
|
82
|
+
".bzr",
|
|
83
|
+
".c8rc",
|
|
84
|
+
".c9",
|
|
85
|
+
".cabal",
|
|
86
|
+
".cabal-sandbox",
|
|
87
|
+
".cache",
|
|
88
|
+
".cache-loader",
|
|
89
|
+
".cargo",
|
|
90
|
+
".chunk.js",
|
|
91
|
+
".circleci",
|
|
92
|
+
".class",
|
|
93
|
+
".classpath",
|
|
94
|
+
".composer",
|
|
95
|
+
".conf",
|
|
96
|
+
".config",
|
|
97
|
+
".cpanm",
|
|
98
|
+
".crt",
|
|
99
|
+
".cvs",
|
|
100
|
+
".d.ts",
|
|
101
|
+
".dart_tool",
|
|
102
|
+
".db",
|
|
103
|
+
".devcontainer",
|
|
104
|
+
".dll",
|
|
105
|
+
".docker",
|
|
106
|
+
".dockerignore",
|
|
107
|
+
".docusaurus",
|
|
108
|
+
".dylib",
|
|
109
|
+
".ebextensions",
|
|
110
|
+
".eclipse",
|
|
111
|
+
".editorconfig",
|
|
112
|
+
".eggs",
|
|
113
|
+
".ember-cli",
|
|
114
|
+
".ensime_cache",
|
|
115
|
+
".env",
|
|
116
|
+
".env.vault",
|
|
117
|
+
".eot",
|
|
118
|
+
".eslintcache",
|
|
119
|
+
".eslintrc",
|
|
120
|
+
".eslintrc.js",
|
|
121
|
+
".exe",
|
|
122
|
+
".expo",
|
|
123
|
+
".expo-shared",
|
|
124
|
+
".flutter-plugins",
|
|
125
|
+
".fseventsd",
|
|
126
|
+
".gem",
|
|
127
|
+
".gif",
|
|
128
|
+
".git",
|
|
129
|
+
".gitattributes",
|
|
130
|
+
".github",
|
|
131
|
+
".gitignore",
|
|
132
|
+
".gitkeep",
|
|
133
|
+
".gitlab",
|
|
134
|
+
".gitlab-ci",
|
|
135
|
+
".gitlab-ci.yml",
|
|
136
|
+
".gitmodules",
|
|
137
|
+
".gradle",
|
|
138
|
+
".gvmrc",
|
|
139
|
+
".gz",
|
|
140
|
+
".hbuilder",
|
|
141
|
+
".helm",
|
|
142
|
+
".hg",
|
|
143
|
+
".hgignore",
|
|
144
|
+
".history",
|
|
145
|
+
".htaccess",
|
|
146
|
+
".husky",
|
|
147
|
+
".ico",
|
|
148
|
+
".idea",
|
|
149
|
+
".ini",
|
|
150
|
+
".ionic",
|
|
151
|
+
".ipynb_checkpoints",
|
|
152
|
+
".ivy2",
|
|
153
|
+
".jekyll-cache",
|
|
154
|
+
".jest-cache",
|
|
155
|
+
".jpeg",
|
|
156
|
+
".jpg",
|
|
157
|
+
".jscsrc",
|
|
158
|
+
".jshintrc",
|
|
159
|
+
".json",
|
|
160
|
+
".k8s",
|
|
161
|
+
".keep",
|
|
162
|
+
".key",
|
|
163
|
+
".kubernetes",
|
|
164
|
+
".lcov",
|
|
165
|
+
".lock",
|
|
166
|
+
".log",
|
|
167
|
+
".logs",
|
|
168
|
+
".m2",
|
|
169
|
+
".mailmap",
|
|
170
|
+
".md",
|
|
171
|
+
".metadata",
|
|
172
|
+
".metals",
|
|
173
|
+
".min.css",
|
|
174
|
+
".min.html",
|
|
175
|
+
".min.js",
|
|
176
|
+
".mvn",
|
|
177
|
+
".mypy_cache",
|
|
178
|
+
".nbproject",
|
|
179
|
+
".netbeans",
|
|
180
|
+
".netlify",
|
|
181
|
+
".next",
|
|
182
|
+
".node-version",
|
|
183
|
+
".node_modules",
|
|
184
|
+
".npmrc",
|
|
185
|
+
".nuget",
|
|
186
|
+
".nunit",
|
|
187
|
+
".nuxt",
|
|
188
|
+
".nvm",
|
|
189
|
+
".nvmrc",
|
|
190
|
+
".nx",
|
|
191
|
+
".nyc_output",
|
|
192
|
+
".nycrc",
|
|
193
|
+
".o",
|
|
194
|
+
".obj",
|
|
195
|
+
".otf",
|
|
196
|
+
".output",
|
|
197
|
+
".p12",
|
|
198
|
+
".parcel-cache",
|
|
199
|
+
".pem",
|
|
200
|
+
".pfx",
|
|
201
|
+
".phpunit.result.cache",
|
|
202
|
+
".png",
|
|
203
|
+
".pnp",
|
|
204
|
+
".pnp.cjs",
|
|
205
|
+
".pnp.js",
|
|
206
|
+
".pnpm",
|
|
207
|
+
".pnpm-state",
|
|
208
|
+
".pnpm-store",
|
|
209
|
+
".pnpmfile.cjs",
|
|
210
|
+
".prettierrc",
|
|
211
|
+
".prettierrc.js",
|
|
212
|
+
".project",
|
|
213
|
+
".project.vim",
|
|
214
|
+
".pub-cache",
|
|
215
|
+
".pulumi",
|
|
216
|
+
".pyc",
|
|
217
|
+
".pyenv",
|
|
218
|
+
".pyo",
|
|
219
|
+
".pytest_cache",
|
|
220
|
+
".python-version",
|
|
221
|
+
".pythonrc",
|
|
222
|
+
".quasar",
|
|
223
|
+
".rar",
|
|
224
|
+
".rbenv-version",
|
|
225
|
+
".react-native",
|
|
226
|
+
".rebar3",
|
|
227
|
+
".rollup.cache",
|
|
228
|
+
".rst",
|
|
229
|
+
".ruby-version",
|
|
230
|
+
".ruff_cache",
|
|
231
|
+
".rush",
|
|
232
|
+
".rvm",
|
|
233
|
+
".rvmrc",
|
|
234
|
+
".sass-cache",
|
|
235
|
+
".sbt",
|
|
236
|
+
".serverless",
|
|
237
|
+
".settings",
|
|
238
|
+
".snap",
|
|
239
|
+
".so",
|
|
240
|
+
".spec.js",
|
|
241
|
+
".spec.jsx",
|
|
242
|
+
".spec.ts",
|
|
243
|
+
".spec.tsx",
|
|
244
|
+
".sql",
|
|
245
|
+
".sqlite",
|
|
246
|
+
".stack-work",
|
|
247
|
+
".storybook",
|
|
248
|
+
".stylelintcache",
|
|
249
|
+
".stylelintrc",
|
|
250
|
+
".stylelintrc.js",
|
|
251
|
+
".sublime-project",
|
|
252
|
+
".sublime-workspace",
|
|
253
|
+
".svelte-kit",
|
|
254
|
+
".svg",
|
|
255
|
+
".svn",
|
|
256
|
+
".swcrc",
|
|
257
|
+
".tar",
|
|
258
|
+
".temp",
|
|
259
|
+
".terraform",
|
|
260
|
+
".test.js",
|
|
261
|
+
".test.jsx",
|
|
262
|
+
".test.ts",
|
|
263
|
+
".test.tsx",
|
|
264
|
+
".tiff",
|
|
265
|
+
".tmp",
|
|
266
|
+
".toml",
|
|
267
|
+
".tox",
|
|
268
|
+
".travis",
|
|
269
|
+
".travis.yml",
|
|
270
|
+
".ttf",
|
|
271
|
+
".turbo",
|
|
272
|
+
".txt",
|
|
273
|
+
".vagrant",
|
|
274
|
+
".venv",
|
|
275
|
+
".vite",
|
|
276
|
+
".vs",
|
|
277
|
+
".vscode",
|
|
278
|
+
".webp",
|
|
279
|
+
".webpack",
|
|
280
|
+
".woff",
|
|
281
|
+
".woff2",
|
|
282
|
+
".wrangler",
|
|
283
|
+
".xml",
|
|
284
|
+
".yaml",
|
|
285
|
+
".yarn",
|
|
286
|
+
".yarnrc",
|
|
287
|
+
".yml",
|
|
288
|
+
".zip",
|
|
289
|
+
"Carthage",
|
|
290
|
+
"Debug",
|
|
291
|
+
"DerivedData",
|
|
292
|
+
"Godeps",
|
|
293
|
+
"Release",
|
|
294
|
+
"TestResults",
|
|
295
|
+
"__pycache__",
|
|
296
|
+
"_build",
|
|
297
|
+
"allure-results",
|
|
298
|
+
"bazel",
|
|
299
|
+
"benchmark",
|
|
300
|
+
"benchmarks",
|
|
301
|
+
"bin",
|
|
302
|
+
"bower_components",
|
|
303
|
+
"buck-out",
|
|
304
|
+
"build",
|
|
305
|
+
"build-cache",
|
|
306
|
+
"build_tools",
|
|
307
|
+
"builds",
|
|
308
|
+
"cache",
|
|
309
|
+
"changelog",
|
|
310
|
+
"changelogs",
|
|
311
|
+
"compiled",
|
|
312
|
+
"copyright",
|
|
313
|
+
"coverage",
|
|
314
|
+
"debug",
|
|
315
|
+
"deploy",
|
|
316
|
+
"deployment",
|
|
317
|
+
"dist",
|
|
318
|
+
"dist-newstyle",
|
|
319
|
+
"dist-packages",
|
|
320
|
+
"docker",
|
|
321
|
+
"dockerfile",
|
|
322
|
+
"egg-info",
|
|
323
|
+
"elm-stuff",
|
|
324
|
+
"fonts",
|
|
325
|
+
"gemfile",
|
|
326
|
+
"generated",
|
|
327
|
+
"go.mod",
|
|
328
|
+
"go.sum",
|
|
329
|
+
"jars",
|
|
330
|
+
"jenkinsfile",
|
|
331
|
+
"jspm_packages",
|
|
332
|
+
"junit-reports",
|
|
333
|
+
"makefile",
|
|
334
|
+
"mock_data",
|
|
335
|
+
"nbproject",
|
|
336
|
+
"node_modules",
|
|
337
|
+
"notice",
|
|
338
|
+
"obj",
|
|
339
|
+
"out",
|
|
340
|
+
"out-tsc",
|
|
341
|
+
"output",
|
|
342
|
+
"packrat",
|
|
343
|
+
"pipfile",
|
|
344
|
+
"pycache",
|
|
345
|
+
"readme",
|
|
346
|
+
"sdist",
|
|
347
|
+
"snapshots",
|
|
348
|
+
"target",
|
|
349
|
+
"debug",
|
|
350
|
+
"temp",
|
|
351
|
+
"tempfiles",
|
|
352
|
+
"test-reports",
|
|
353
|
+
"test-results",
|
|
354
|
+
"test_output",
|
|
355
|
+
"third-party",
|
|
356
|
+
"third_party",
|
|
357
|
+
"thirdparty",
|
|
358
|
+
"tmp",
|
|
359
|
+
"vendor",
|
|
360
|
+
"bundle",
|
|
361
|
+
"venv",
|
|
362
|
+
"virtualenv",
|
|
363
|
+
"webpack-cache",
|
|
364
|
+
"www",
|
|
365
|
+
"wwwroot",
|
|
366
|
+
"xunit"
|
|
367
|
+
];
|
|
368
|
+
}
|
|
369
|
+
});
|
|
370
|
+
|
|
371
|
+
// src/features/analysis/scm/services/ExcludedFilePatterns.ts
|
|
372
|
+
var EXCLUDED_FILE_PATTERNS;
|
|
373
|
+
var init_ExcludedFilePatterns = __esm({
|
|
374
|
+
"src/features/analysis/scm/services/ExcludedFilePatterns.ts"() {
|
|
375
|
+
"use strict";
|
|
376
|
+
EXCLUDED_FILE_PATTERNS = [
|
|
377
|
+
".json",
|
|
378
|
+
".snap",
|
|
379
|
+
".env.vault",
|
|
380
|
+
".env",
|
|
381
|
+
".yaml",
|
|
382
|
+
".yml",
|
|
383
|
+
".toml",
|
|
384
|
+
".ini",
|
|
385
|
+
".conf",
|
|
386
|
+
".config",
|
|
387
|
+
".xml",
|
|
388
|
+
".env",
|
|
389
|
+
".md",
|
|
390
|
+
".txt",
|
|
391
|
+
".rst",
|
|
392
|
+
".adoc",
|
|
393
|
+
".lock",
|
|
394
|
+
".png",
|
|
395
|
+
".jpg",
|
|
396
|
+
".jpeg",
|
|
397
|
+
".gif",
|
|
398
|
+
".svg",
|
|
399
|
+
".ico",
|
|
400
|
+
".webp",
|
|
401
|
+
".bmp",
|
|
402
|
+
".tiff",
|
|
403
|
+
".ttf",
|
|
404
|
+
".otf",
|
|
405
|
+
".woff",
|
|
406
|
+
".woff2",
|
|
407
|
+
".eot",
|
|
408
|
+
".zip",
|
|
409
|
+
".tar",
|
|
410
|
+
".gz",
|
|
411
|
+
".rar",
|
|
412
|
+
".7z",
|
|
413
|
+
".log",
|
|
414
|
+
".db",
|
|
415
|
+
".sqlite",
|
|
416
|
+
".sql",
|
|
417
|
+
".pem",
|
|
418
|
+
".crt",
|
|
419
|
+
".key",
|
|
420
|
+
".p12",
|
|
421
|
+
".pfx",
|
|
422
|
+
".editorconfig",
|
|
423
|
+
".sublime-project",
|
|
424
|
+
".sublime-workspace",
|
|
425
|
+
".DS_Store",
|
|
426
|
+
"Thumbs.db",
|
|
427
|
+
".lcov",
|
|
428
|
+
".exe",
|
|
429
|
+
".dll",
|
|
430
|
+
".so",
|
|
431
|
+
".dylib",
|
|
432
|
+
".class",
|
|
433
|
+
".pyc",
|
|
434
|
+
".pyo",
|
|
435
|
+
".o",
|
|
436
|
+
".obj",
|
|
437
|
+
".min.js",
|
|
438
|
+
".min.css",
|
|
439
|
+
".min.html",
|
|
440
|
+
".test.js",
|
|
441
|
+
".test.ts",
|
|
442
|
+
".test.jsx",
|
|
443
|
+
".test.tsx",
|
|
444
|
+
".spec.js",
|
|
445
|
+
".spec.ts",
|
|
446
|
+
".spec.jsx",
|
|
447
|
+
".spec.tsx",
|
|
448
|
+
".d.ts",
|
|
449
|
+
".bundle.js",
|
|
450
|
+
".chunk.js",
|
|
451
|
+
"dockerfile",
|
|
452
|
+
"jenkinsfile",
|
|
453
|
+
"go.sum",
|
|
454
|
+
".gitignore",
|
|
455
|
+
".gitattributes",
|
|
456
|
+
".gitmodules",
|
|
457
|
+
".gitkeep",
|
|
458
|
+
".keep",
|
|
459
|
+
".hgignore",
|
|
460
|
+
".nvmrc",
|
|
461
|
+
".node-version",
|
|
462
|
+
".npmrc",
|
|
463
|
+
".yarnrc",
|
|
464
|
+
".pnpmfile.cjs",
|
|
465
|
+
".ruby-version",
|
|
466
|
+
".python-version",
|
|
467
|
+
".rvmrc",
|
|
468
|
+
".rbenv-version",
|
|
469
|
+
".gvmrc",
|
|
470
|
+
"makefile",
|
|
471
|
+
"rakefile",
|
|
472
|
+
"gulpfile.js",
|
|
473
|
+
"gruntfile.js",
|
|
474
|
+
"webpack.config.js",
|
|
475
|
+
"webpack.config.ts",
|
|
476
|
+
"rollup.config.js",
|
|
477
|
+
"vite.config.js",
|
|
478
|
+
"vite.config.ts",
|
|
479
|
+
"next.config.js",
|
|
480
|
+
"nuxt.config.js",
|
|
481
|
+
"tailwind.config.js",
|
|
482
|
+
"postcss.config.js",
|
|
483
|
+
".babelrc",
|
|
484
|
+
".babelrc.js",
|
|
485
|
+
".swcrc",
|
|
486
|
+
".browserslistrc",
|
|
487
|
+
"jest.config.js",
|
|
488
|
+
"jest.config.ts",
|
|
489
|
+
"vitest.config.js",
|
|
490
|
+
"karma.conf.js",
|
|
491
|
+
"protractor.conf.js",
|
|
492
|
+
"cypress.config.js",
|
|
493
|
+
"playwright.config.js",
|
|
494
|
+
".nycrc",
|
|
495
|
+
".c8rc",
|
|
496
|
+
".eslintrc",
|
|
497
|
+
".eslintrc.js",
|
|
498
|
+
".prettierrc",
|
|
499
|
+
".prettierrc.js",
|
|
500
|
+
".stylelintrc",
|
|
501
|
+
".stylelintrc.js",
|
|
502
|
+
"pipfile",
|
|
503
|
+
"gemfile",
|
|
504
|
+
"go.mod",
|
|
505
|
+
"project.clj",
|
|
506
|
+
"setup.py",
|
|
507
|
+
"setup.cfg",
|
|
508
|
+
"manifest.in",
|
|
509
|
+
".pythonrc",
|
|
510
|
+
"readme",
|
|
511
|
+
"changelog",
|
|
512
|
+
"authors",
|
|
513
|
+
"contributors",
|
|
514
|
+
"license",
|
|
515
|
+
"notice",
|
|
516
|
+
"copyright",
|
|
517
|
+
".htaccess"
|
|
518
|
+
];
|
|
519
|
+
}
|
|
520
|
+
});
|
|
521
|
+
|
|
522
|
+
// src/features/analysis/scm/services/FileUtils.ts
|
|
523
|
+
import fs2 from "fs";
|
|
524
|
+
import { promises as fsPromises } from "fs";
|
|
525
|
+
import { isBinary } from "istextorbinary";
|
|
526
|
+
import path from "path";
|
|
527
|
+
var FileUtils;
|
|
528
|
+
var init_FileUtils = __esm({
|
|
529
|
+
"src/features/analysis/scm/services/FileUtils.ts"() {
|
|
530
|
+
"use strict";
|
|
531
|
+
init_configs();
|
|
532
|
+
init_ExcludedDirs();
|
|
533
|
+
init_ExcludedFilePatterns();
|
|
534
|
+
FileUtils = class {
|
|
535
|
+
static isExcludedFileType(filepath) {
|
|
536
|
+
const basename = path.basename(filepath).toLowerCase();
|
|
537
|
+
if (basename === ".env" || basename.startsWith(".env.")) {
|
|
538
|
+
return true;
|
|
539
|
+
}
|
|
540
|
+
if (EXCLUDED_FILE_PATTERNS.some((pattern) => basename.endsWith(pattern))) {
|
|
541
|
+
return true;
|
|
542
|
+
}
|
|
543
|
+
return false;
|
|
544
|
+
}
|
|
545
|
+
static shouldPackFile(filepath, maxFileSize = MCP_MAX_FILE_SIZE) {
|
|
546
|
+
const absoluteFilepath = path.resolve(filepath);
|
|
547
|
+
if (this.isExcludedFileType(filepath)) {
|
|
548
|
+
return false;
|
|
549
|
+
}
|
|
550
|
+
try {
|
|
551
|
+
const stats = fs2.statSync(absoluteFilepath);
|
|
552
|
+
if (stats.size > maxFileSize) {
|
|
553
|
+
return false;
|
|
554
|
+
}
|
|
555
|
+
const data = fs2.readFileSync(absoluteFilepath);
|
|
556
|
+
if (isBinary(null, data)) {
|
|
557
|
+
return false;
|
|
558
|
+
}
|
|
559
|
+
return true;
|
|
560
|
+
} catch {
|
|
561
|
+
return false;
|
|
562
|
+
}
|
|
563
|
+
}
|
|
564
|
+
// Process directory at repository root level with special handling for excluded root directories
|
|
565
|
+
static async processRootDirectory(dir, excludedRootDirectories) {
|
|
566
|
+
try {
|
|
567
|
+
await fsPromises.access(dir, fs2.constants.R_OK);
|
|
568
|
+
} catch {
|
|
569
|
+
return [];
|
|
570
|
+
}
|
|
571
|
+
const items = await fsPromises.readdir(dir);
|
|
572
|
+
const results = [];
|
|
573
|
+
const filePromises = [];
|
|
574
|
+
for (const item of items) {
|
|
575
|
+
const fullPath = path.join(dir, item);
|
|
576
|
+
try {
|
|
577
|
+
await fsPromises.access(fullPath, fs2.constants.R_OK);
|
|
578
|
+
const stat = await fsPromises.stat(fullPath);
|
|
579
|
+
if (stat.isDirectory()) {
|
|
580
|
+
if (excludedRootDirectories.includes(item)) {
|
|
581
|
+
continue;
|
|
582
|
+
}
|
|
583
|
+
filePromises.push(this.processSubdirectory(fullPath, dir, 1));
|
|
584
|
+
} else {
|
|
585
|
+
results.push({
|
|
586
|
+
name: item,
|
|
587
|
+
fullPath,
|
|
588
|
+
relativePath: item,
|
|
589
|
+
time: stat.mtime.getTime(),
|
|
590
|
+
isFile: true
|
|
591
|
+
});
|
|
592
|
+
}
|
|
593
|
+
} catch {
|
|
594
|
+
continue;
|
|
595
|
+
}
|
|
596
|
+
}
|
|
597
|
+
const subdirResults = await Promise.all(filePromises);
|
|
598
|
+
for (const subdirResult of subdirResults) {
|
|
599
|
+
results.push(...subdirResult);
|
|
600
|
+
}
|
|
601
|
+
return results;
|
|
602
|
+
}
|
|
603
|
+
// Process subdirectories without applying root exclusions
|
|
604
|
+
static async processSubdirectory(dir, rootDir, depth) {
|
|
605
|
+
if (depth > 20) {
|
|
606
|
+
return [];
|
|
607
|
+
}
|
|
608
|
+
try {
|
|
609
|
+
await fsPromises.access(dir, fs2.constants.R_OK);
|
|
610
|
+
} catch {
|
|
611
|
+
return [];
|
|
612
|
+
}
|
|
613
|
+
const items = await fsPromises.readdir(dir);
|
|
614
|
+
const results = [];
|
|
615
|
+
const filePromises = [];
|
|
616
|
+
for (const item of items) {
|
|
617
|
+
const fullPath = path.join(dir, item);
|
|
618
|
+
try {
|
|
619
|
+
await fsPromises.access(fullPath, fs2.constants.R_OK);
|
|
620
|
+
const stat = await fsPromises.stat(fullPath);
|
|
621
|
+
if (stat.isDirectory()) {
|
|
622
|
+
filePromises.push(
|
|
623
|
+
this.processSubdirectory(fullPath, rootDir, depth + 1)
|
|
624
|
+
);
|
|
625
|
+
} else {
|
|
626
|
+
results.push({
|
|
627
|
+
name: item,
|
|
628
|
+
fullPath,
|
|
629
|
+
relativePath: path.relative(rootDir, fullPath),
|
|
630
|
+
time: stat.mtime.getTime(),
|
|
631
|
+
isFile: true
|
|
632
|
+
});
|
|
633
|
+
}
|
|
634
|
+
} catch {
|
|
635
|
+
continue;
|
|
636
|
+
}
|
|
637
|
+
}
|
|
638
|
+
const subdirResults = await Promise.all(filePromises);
|
|
639
|
+
for (const subdirResult of subdirResults) {
|
|
640
|
+
results.push(...subdirResult);
|
|
641
|
+
}
|
|
642
|
+
return results;
|
|
643
|
+
}
|
|
644
|
+
static async getLastChangedFiles({
|
|
645
|
+
dir,
|
|
646
|
+
maxFileSize,
|
|
647
|
+
maxFiles = MCP_DEFAULT_MAX_FILES_TO_SCAN,
|
|
648
|
+
isAllFilesScan
|
|
649
|
+
}) {
|
|
650
|
+
try {
|
|
651
|
+
const stats = fs2.statSync(dir);
|
|
652
|
+
if (!stats.isDirectory()) return [];
|
|
653
|
+
} catch {
|
|
654
|
+
return [];
|
|
655
|
+
}
|
|
656
|
+
let gitMatcher = null;
|
|
657
|
+
try {
|
|
658
|
+
const { GitService: GitService2 } = await Promise.resolve().then(() => (init_GitService(), GitService_exports));
|
|
659
|
+
const gitService = new GitService2(dir);
|
|
660
|
+
gitMatcher = await gitService.getGitignoreMatcher();
|
|
661
|
+
} catch (e) {
|
|
662
|
+
}
|
|
663
|
+
const allFiles = await this.processRootDirectory(dir, EXCLUDED_DIRS);
|
|
664
|
+
const filteredFiles = allFiles.filter(
|
|
665
|
+
(file) => this.shouldPackFile(file.fullPath, maxFileSize) && !gitMatcher?.ignores(file.relativePath)
|
|
666
|
+
).sort((a, b) => b.time - a.time).map((file) => file.relativePath);
|
|
667
|
+
if (isAllFilesScan) {
|
|
668
|
+
return filteredFiles;
|
|
669
|
+
} else {
|
|
670
|
+
return filteredFiles.slice(0, maxFiles);
|
|
671
|
+
}
|
|
672
|
+
}
|
|
673
|
+
};
|
|
674
|
+
}
|
|
675
|
+
});
|
|
676
|
+
|
|
677
|
+
// src/features/analysis/scm/services/GitService.ts
|
|
678
|
+
var GitService_exports = {};
|
|
679
|
+
__export(GitService_exports, {
|
|
680
|
+
GitService: () => GitService
|
|
681
|
+
});
|
|
682
|
+
import fs3 from "fs";
|
|
683
|
+
import ignore from "ignore";
|
|
684
|
+
import * as path2 from "path";
|
|
685
|
+
import { simpleGit } from "simple-git";
|
|
686
|
+
var GitService;
|
|
687
|
+
var init_GitService = __esm({
|
|
688
|
+
"src/features/analysis/scm/services/GitService.ts"() {
|
|
689
|
+
"use strict";
|
|
690
|
+
init_configs();
|
|
691
|
+
init_FileUtils();
|
|
692
|
+
GitService = class {
|
|
693
|
+
constructor(repositoryPath, log2) {
|
|
694
|
+
__publicField(this, "git");
|
|
695
|
+
__publicField(this, "repositoryPath");
|
|
696
|
+
__publicField(this, "log");
|
|
697
|
+
const noopLog = (_message, _level, _data) => {
|
|
698
|
+
};
|
|
699
|
+
this.log = log2 || noopLog;
|
|
700
|
+
this.git = simpleGit(repositoryPath, { binary: "git" });
|
|
701
|
+
this.repositoryPath = repositoryPath;
|
|
702
|
+
this.log("Git service initialized", "debug", { repositoryPath });
|
|
703
|
+
}
|
|
704
|
+
/**
|
|
705
|
+
* Validates that the path is a valid git repository
|
|
706
|
+
*/
|
|
707
|
+
async validateRepository() {
|
|
708
|
+
this.log("Validating git repository", "debug");
|
|
709
|
+
try {
|
|
710
|
+
const isRepo = await this.git.checkIsRepo();
|
|
711
|
+
if (!isRepo) {
|
|
712
|
+
const error = "Path is not a valid git repository";
|
|
713
|
+
this.log(error, "error");
|
|
714
|
+
return { isValid: false, error };
|
|
715
|
+
}
|
|
716
|
+
this.log("Git repository validation successful", "debug");
|
|
717
|
+
return { isValid: true };
|
|
718
|
+
} catch (error) {
|
|
719
|
+
const errorMessage = `Failed to verify git repository: ${error.message}`;
|
|
720
|
+
this.log(errorMessage, "error", { error });
|
|
721
|
+
return { isValid: false, error: errorMessage };
|
|
722
|
+
}
|
|
723
|
+
}
|
|
724
|
+
/**
|
|
725
|
+
* Gets the current git status and returns changed files
|
|
726
|
+
*/
|
|
727
|
+
async getChangedFiles() {
|
|
728
|
+
this.log("Getting git status", "debug");
|
|
729
|
+
try {
|
|
730
|
+
const status = await this.git.status();
|
|
731
|
+
const gitRoot = await this.git.revparse(["--show-toplevel"]);
|
|
732
|
+
const relativePathFromGitRoot = path2.relative(
|
|
733
|
+
gitRoot,
|
|
734
|
+
this.repositoryPath
|
|
735
|
+
);
|
|
736
|
+
const deletedFiles = status.files.filter((file) => file.index === "D" || file.working_dir === "D").map((file) => file.path);
|
|
737
|
+
const files = status.files.filter((file) => {
|
|
738
|
+
return !(file.index === "D" || file.working_dir === "D");
|
|
739
|
+
}).map((file) => {
|
|
740
|
+
const gitRelativePath = file.path;
|
|
741
|
+
if (relativePathFromGitRoot === "") {
|
|
742
|
+
return gitRelativePath;
|
|
743
|
+
}
|
|
744
|
+
if (gitRelativePath.startsWith(relativePathFromGitRoot + "/")) {
|
|
745
|
+
return gitRelativePath.substring(relativePathFromGitRoot.length + 1);
|
|
746
|
+
}
|
|
747
|
+
return path2.relative(
|
|
748
|
+
this.repositoryPath,
|
|
749
|
+
path2.join(gitRoot, gitRelativePath)
|
|
750
|
+
);
|
|
751
|
+
});
|
|
752
|
+
this.log("Git status retrieved", "info", {
|
|
753
|
+
fileCount: files.length,
|
|
754
|
+
files: files.slice(0, 10),
|
|
755
|
+
// Log first 10 files to avoid spam
|
|
756
|
+
deletedFileCount: deletedFiles.length,
|
|
757
|
+
deletedFiles: deletedFiles.slice(0, 10),
|
|
758
|
+
gitRoot,
|
|
759
|
+
workingDir: this.repositoryPath,
|
|
760
|
+
relativePathFromGitRoot
|
|
761
|
+
});
|
|
762
|
+
return { files, deletedFiles, status };
|
|
763
|
+
} catch (error) {
|
|
764
|
+
const errorMessage = `Failed to get git status: ${error.message}`;
|
|
765
|
+
this.log(errorMessage, "error", { error });
|
|
766
|
+
throw new Error(errorMessage);
|
|
767
|
+
}
|
|
768
|
+
}
|
|
769
|
+
/**
|
|
770
|
+
* Gets git repository information including remote URL, current commit hash, and branch name
|
|
771
|
+
*/
|
|
772
|
+
async getGitInfo() {
|
|
773
|
+
this.log("Getting git repository information", "debug");
|
|
774
|
+
try {
|
|
775
|
+
const [repoUrl, hash, reference] = await Promise.all([
|
|
776
|
+
this.git.getConfig("remote.origin.url"),
|
|
777
|
+
this.git.revparse(["HEAD"]),
|
|
778
|
+
this.git.revparse(["--abbrev-ref", "HEAD"])
|
|
779
|
+
]);
|
|
780
|
+
let normalizedRepoUrl = repoUrl.value || "";
|
|
781
|
+
if (normalizedRepoUrl.endsWith(".git")) {
|
|
782
|
+
normalizedRepoUrl = normalizedRepoUrl.slice(0, -".git".length);
|
|
783
|
+
}
|
|
784
|
+
if (normalizedRepoUrl.startsWith("git@github.com:")) {
|
|
785
|
+
normalizedRepoUrl = normalizedRepoUrl.replace(
|
|
786
|
+
"git@github.com:",
|
|
787
|
+
"https://github.com/"
|
|
788
|
+
);
|
|
789
|
+
}
|
|
790
|
+
this.log("Git repository information retrieved", "debug", {
|
|
791
|
+
repoUrl: normalizedRepoUrl,
|
|
792
|
+
hash,
|
|
793
|
+
reference
|
|
794
|
+
});
|
|
795
|
+
return {
|
|
796
|
+
repoUrl: normalizedRepoUrl,
|
|
797
|
+
hash,
|
|
798
|
+
reference
|
|
799
|
+
};
|
|
800
|
+
} catch (error) {
|
|
801
|
+
const errorMessage = `Failed to get git repository information: ${error.message}`;
|
|
802
|
+
this.log(errorMessage, "error", { error });
|
|
803
|
+
throw new Error(errorMessage);
|
|
804
|
+
}
|
|
805
|
+
}
|
|
806
|
+
/**
|
|
807
|
+
* Validates if a branch name is valid according to git's rules
|
|
808
|
+
*/
|
|
809
|
+
async isValidBranchName(branchName) {
|
|
810
|
+
this.log("Validating branch name", "debug", { branchName });
|
|
811
|
+
try {
|
|
812
|
+
const result = await this.git.raw([
|
|
813
|
+
"check-ref-format",
|
|
814
|
+
"--branch",
|
|
815
|
+
branchName
|
|
816
|
+
]);
|
|
817
|
+
const isValid = Boolean(result);
|
|
818
|
+
this.log("Branch name validation result", "debug", {
|
|
819
|
+
branchName,
|
|
820
|
+
isValid
|
|
821
|
+
});
|
|
822
|
+
return isValid;
|
|
823
|
+
} catch (error) {
|
|
824
|
+
this.log("Branch name validation failed", "debug", { branchName, error });
|
|
825
|
+
return false;
|
|
826
|
+
}
|
|
827
|
+
}
|
|
828
|
+
/**
|
|
829
|
+
* Gets the current branch name
|
|
830
|
+
*/
|
|
831
|
+
async getCurrentBranch() {
|
|
832
|
+
this.log("Getting current branch name", "debug");
|
|
833
|
+
try {
|
|
834
|
+
const branch = await this.git.revparse(["--abbrev-ref", "HEAD"]);
|
|
835
|
+
this.log("Current branch retrieved", "debug", { branch });
|
|
836
|
+
return branch;
|
|
837
|
+
} catch (error) {
|
|
838
|
+
const errorMessage = `Failed to get current branch: ${error.message}`;
|
|
839
|
+
this.log(errorMessage, "error", { error });
|
|
840
|
+
throw new Error(errorMessage);
|
|
841
|
+
}
|
|
842
|
+
}
|
|
843
|
+
/**
|
|
844
|
+
* Gets the current commit hash
|
|
845
|
+
*/
|
|
846
|
+
async getCurrentCommitHash() {
|
|
847
|
+
this.log("Getting current commit hash", "debug");
|
|
848
|
+
try {
|
|
849
|
+
const hash = await this.git.revparse(["HEAD"]);
|
|
850
|
+
this.log("Current commit hash retrieved", "debug", { hash });
|
|
851
|
+
return hash;
|
|
852
|
+
} catch (error) {
|
|
853
|
+
const errorMessage = `Failed to get current commit hash: ${error.message}`;
|
|
854
|
+
this.log(errorMessage, "error", { error });
|
|
855
|
+
throw new Error(errorMessage);
|
|
856
|
+
}
|
|
857
|
+
}
|
|
858
|
+
/**
|
|
859
|
+
* Gets both the current commit hash and current branch name
|
|
860
|
+
*/
|
|
861
|
+
async getCurrentCommitAndBranch() {
|
|
862
|
+
this.log("Getting current commit hash and branch", "debug");
|
|
863
|
+
try {
|
|
864
|
+
const [hash, branch] = await Promise.all([
|
|
865
|
+
this.git.revparse(["HEAD"]),
|
|
866
|
+
this.git.revparse(["--abbrev-ref", "HEAD"])
|
|
867
|
+
]);
|
|
868
|
+
this.log("Current commit hash and branch retrieved", "debug", {
|
|
869
|
+
hash,
|
|
870
|
+
branch
|
|
871
|
+
});
|
|
872
|
+
return { hash, branch };
|
|
873
|
+
} catch (error) {
|
|
874
|
+
const errorMessage = `Failed to get current commit hash and branch: ${error.message}`;
|
|
875
|
+
this.log(errorMessage, "error", { error });
|
|
876
|
+
return { hash: "", branch: "" };
|
|
877
|
+
}
|
|
878
|
+
}
|
|
879
|
+
/**
|
|
880
|
+
* Gets the remote repository URL
|
|
881
|
+
*/
|
|
882
|
+
async getRemoteUrl() {
|
|
883
|
+
this.log("Getting remote repository URL", "debug");
|
|
884
|
+
try {
|
|
885
|
+
const remoteUrl = await this.git.getConfig("remote.origin.url");
|
|
886
|
+
const url = remoteUrl.value || "";
|
|
887
|
+
let normalizedUrl = url;
|
|
888
|
+
if (normalizedUrl.endsWith(".git")) {
|
|
889
|
+
normalizedUrl = normalizedUrl.slice(0, -".git".length);
|
|
890
|
+
}
|
|
891
|
+
if (normalizedUrl.startsWith("git@github.com:")) {
|
|
892
|
+
normalizedUrl = normalizedUrl.replace(
|
|
893
|
+
"git@github.com:",
|
|
894
|
+
"https://github.com/"
|
|
895
|
+
);
|
|
896
|
+
}
|
|
897
|
+
this.log("Remote repository URL retrieved", "debug", {
|
|
898
|
+
url: normalizedUrl
|
|
899
|
+
});
|
|
900
|
+
return normalizedUrl;
|
|
901
|
+
} catch (error) {
|
|
902
|
+
const errorMessage = `Failed to get remote repository URL: ${error.message}`;
|
|
903
|
+
this.log(errorMessage, "error", { error });
|
|
904
|
+
throw new Error(errorMessage);
|
|
905
|
+
}
|
|
906
|
+
}
|
|
907
|
+
/**
|
|
908
|
+
* Gets the maxFiles most recently changed files, starting with current changes and then from commit history
|
|
909
|
+
*/
|
|
910
|
+
async getRecentlyChangedFiles({
|
|
911
|
+
maxFiles = MCP_DEFAULT_MAX_FILES_TO_SCAN
|
|
912
|
+
}) {
|
|
913
|
+
this.log(
|
|
914
|
+
`Getting the ${maxFiles} most recently changed files, starting with current changes`,
|
|
915
|
+
"debug"
|
|
916
|
+
);
|
|
917
|
+
try {
|
|
918
|
+
const currentChanges = await this.getChangedFiles();
|
|
919
|
+
const gitRoot = await this.git.revparse(["--show-toplevel"]);
|
|
920
|
+
const relativePathFromGitRoot = path2.relative(
|
|
921
|
+
gitRoot,
|
|
922
|
+
this.repositoryPath
|
|
923
|
+
);
|
|
924
|
+
const fileSet = /* @__PURE__ */ new Set();
|
|
925
|
+
let commitsProcessed = 0;
|
|
926
|
+
for (const file of currentChanges.files) {
|
|
927
|
+
if (fileSet.size >= maxFiles) {
|
|
928
|
+
break;
|
|
929
|
+
}
|
|
930
|
+
const fullPath = path2.join(this.repositoryPath, file);
|
|
931
|
+
if (await FileUtils.shouldPackFile(fullPath) && !file.startsWith("..")) {
|
|
932
|
+
fileSet.add(file);
|
|
933
|
+
}
|
|
934
|
+
}
|
|
935
|
+
this.log(`Added ${fileSet.size} files from current changes`, "debug", {
|
|
936
|
+
filesFromCurrentChanges: fileSet.size,
|
|
937
|
+
currentChangesTotal: currentChanges.files.length
|
|
938
|
+
});
|
|
939
|
+
const logResult = await this.git.log({
|
|
940
|
+
maxCount: maxFiles * 5,
|
|
941
|
+
// 5 times the max files to scan to ensure we find enough files
|
|
942
|
+
format: {
|
|
943
|
+
hash: "%H",
|
|
944
|
+
date: "%ai",
|
|
945
|
+
message: "%s",
|
|
946
|
+
//the field name author_name can't follow the naming convention as we are using the git log command
|
|
947
|
+
author_name: "%an"
|
|
948
|
+
}
|
|
949
|
+
});
|
|
950
|
+
for (const commit of logResult.all) {
|
|
951
|
+
if (fileSet.size >= maxFiles) {
|
|
952
|
+
break;
|
|
953
|
+
}
|
|
954
|
+
commitsProcessed++;
|
|
955
|
+
try {
|
|
956
|
+
const filesOutput = await this.git.show([
|
|
957
|
+
"--name-only",
|
|
958
|
+
"--pretty=format:",
|
|
959
|
+
commit.hash
|
|
960
|
+
]);
|
|
961
|
+
const commitFiles = filesOutput.split("\n").filter((file) => file.trim() !== "");
|
|
962
|
+
for (const file of commitFiles) {
|
|
963
|
+
if (fileSet.size >= maxFiles) {
|
|
964
|
+
break;
|
|
965
|
+
}
|
|
966
|
+
const gitRelativePath = file.trim();
|
|
967
|
+
let adjustedPath;
|
|
968
|
+
if (relativePathFromGitRoot === "") {
|
|
969
|
+
adjustedPath = gitRelativePath;
|
|
970
|
+
} else if (gitRelativePath.startsWith(relativePathFromGitRoot + "/")) {
|
|
971
|
+
adjustedPath = gitRelativePath.substring(
|
|
972
|
+
relativePathFromGitRoot.length + 1
|
|
973
|
+
);
|
|
974
|
+
} else {
|
|
975
|
+
adjustedPath = path2.relative(
|
|
976
|
+
this.repositoryPath,
|
|
977
|
+
path2.join(gitRoot, gitRelativePath)
|
|
978
|
+
);
|
|
979
|
+
}
|
|
980
|
+
this.log(`Considering file: ${adjustedPath}`, "debug");
|
|
981
|
+
if (!fileSet.has(adjustedPath) && await FileUtils.shouldPackFile(
|
|
982
|
+
path2.join(gitRoot, gitRelativePath)
|
|
983
|
+
) && !adjustedPath.startsWith("..")) {
|
|
984
|
+
fileSet.add(adjustedPath);
|
|
985
|
+
}
|
|
986
|
+
}
|
|
987
|
+
} catch (showError) {
|
|
988
|
+
this.log(`Could not get files for commit ${commit.hash}`, "debug", {
|
|
989
|
+
error: showError
|
|
990
|
+
});
|
|
991
|
+
}
|
|
992
|
+
}
|
|
993
|
+
const files = Array.from(fileSet);
|
|
994
|
+
this.log("Recently changed files retrieved", "info", {
|
|
995
|
+
fileCount: files.length,
|
|
996
|
+
commitsProcessed,
|
|
997
|
+
totalCommitsAvailable: logResult.all.length,
|
|
998
|
+
files: files.slice(0, maxFiles),
|
|
999
|
+
// Log the files (should be all of them since we limit to maxFiles)
|
|
1000
|
+
gitRoot,
|
|
1001
|
+
workingDir: this.repositoryPath,
|
|
1002
|
+
relativePathFromGitRoot
|
|
1003
|
+
});
|
|
1004
|
+
return {
|
|
1005
|
+
files,
|
|
1006
|
+
commitCount: commitsProcessed
|
|
1007
|
+
};
|
|
1008
|
+
} catch (error) {
|
|
1009
|
+
const errorMessage = `Failed to get recently changed files: ${error.message}`;
|
|
1010
|
+
this.log(errorMessage, "error", { error });
|
|
1011
|
+
throw new Error(errorMessage);
|
|
1012
|
+
}
|
|
1013
|
+
}
|
|
1014
|
+
/**
|
|
1015
|
+
* Normalizes a Git URL to HTTPS format for various Git hosting platforms
|
|
1016
|
+
* @param url The Git URL to normalize
|
|
1017
|
+
* @returns The normalized HTTPS URL
|
|
1018
|
+
*/
|
|
1019
|
+
normalizeGitUrl(url) {
|
|
1020
|
+
let normalizedUrl = url;
|
|
1021
|
+
if (normalizedUrl.endsWith(".git")) {
|
|
1022
|
+
normalizedUrl = normalizedUrl.slice(0, -".git".length);
|
|
1023
|
+
}
|
|
1024
|
+
const sshToHttpsMappings = [
|
|
1025
|
+
// GitHub
|
|
1026
|
+
{ pattern: "git@github.com:", replacement: "https://github.com/" },
|
|
1027
|
+
// GitLab
|
|
1028
|
+
{ pattern: "git@gitlab.com:", replacement: "https://gitlab.com/" },
|
|
1029
|
+
// Bitbucket
|
|
1030
|
+
{ pattern: "git@bitbucket.org:", replacement: "https://bitbucket.org/" },
|
|
1031
|
+
// Azure DevOps (SSH format)
|
|
1032
|
+
{
|
|
1033
|
+
pattern: "git@ssh.dev.azure.com:",
|
|
1034
|
+
replacement: "https://dev.azure.com/"
|
|
1035
|
+
},
|
|
1036
|
+
// Azure DevOps (alternative SSH format)
|
|
1037
|
+
{
|
|
1038
|
+
pattern: /git@([^:]+):v3\/([^/]+)\/([^/]+)\/([^/]+)/,
|
|
1039
|
+
replacement: "https://$1/$2/_git/$4"
|
|
1040
|
+
}
|
|
1041
|
+
];
|
|
1042
|
+
for (const mapping of sshToHttpsMappings) {
|
|
1043
|
+
if (typeof mapping.pattern === "string") {
|
|
1044
|
+
if (normalizedUrl.startsWith(mapping.pattern)) {
|
|
1045
|
+
normalizedUrl = normalizedUrl.replace(
|
|
1046
|
+
mapping.pattern,
|
|
1047
|
+
mapping.replacement
|
|
1048
|
+
);
|
|
1049
|
+
break;
|
|
1050
|
+
}
|
|
1051
|
+
} else {
|
|
1052
|
+
const match = normalizedUrl.match(mapping.pattern);
|
|
1053
|
+
if (match) {
|
|
1054
|
+
normalizedUrl = normalizedUrl.replace(
|
|
1055
|
+
mapping.pattern,
|
|
1056
|
+
mapping.replacement
|
|
1057
|
+
);
|
|
1058
|
+
break;
|
|
1059
|
+
}
|
|
1060
|
+
}
|
|
1061
|
+
}
|
|
1062
|
+
return normalizedUrl;
|
|
1063
|
+
}
|
|
1064
|
+
/**
|
|
1065
|
+
* Gets all remote repository URLs (equivalent to 'git remote -v')
|
|
1066
|
+
*/
|
|
1067
|
+
async getRepoUrls() {
|
|
1068
|
+
this.log("Getting all remote repository URLs", "debug");
|
|
1069
|
+
try {
|
|
1070
|
+
const remotes = await this.git.remote(["-v"]);
|
|
1071
|
+
if (!remotes) {
|
|
1072
|
+
return {};
|
|
1073
|
+
}
|
|
1074
|
+
const remoteMap = {};
|
|
1075
|
+
remotes.split("\n").forEach((line) => {
|
|
1076
|
+
if (!line.trim()) return;
|
|
1077
|
+
const [remoteName, url, type2] = line.split(/\s+/);
|
|
1078
|
+
if (!remoteName || !url || !type2) return;
|
|
1079
|
+
if (!remoteMap[remoteName]) {
|
|
1080
|
+
remoteMap[remoteName] = { fetch: "", push: "" };
|
|
1081
|
+
}
|
|
1082
|
+
const normalizedUrl = this.normalizeGitUrl(url);
|
|
1083
|
+
const remote = remoteMap[remoteName];
|
|
1084
|
+
if (type2 === "(fetch)") {
|
|
1085
|
+
remote.fetch = normalizedUrl;
|
|
1086
|
+
} else if (type2 === "(push)") {
|
|
1087
|
+
remote.push = normalizedUrl;
|
|
1088
|
+
}
|
|
1089
|
+
});
|
|
1090
|
+
this.log("Remote repository URLs retrieved", "debug", {
|
|
1091
|
+
remotes: remoteMap
|
|
1092
|
+
});
|
|
1093
|
+
return remoteMap;
|
|
1094
|
+
} catch (error) {
|
|
1095
|
+
const errorMessage = `Failed to get remote repository URLs: ${error.message}`;
|
|
1096
|
+
this.log(errorMessage, "error", { error });
|
|
1097
|
+
throw new Error(errorMessage);
|
|
1098
|
+
}
|
|
1099
|
+
}
|
|
1100
|
+
/**
|
|
1101
|
+
* Fetches the contents of the .gitignore file from the repository
|
|
1102
|
+
* @returns The contents of the .gitignore file as a string, or null if the file doesn't exist
|
|
1103
|
+
*/
|
|
1104
|
+
async getGitignoreContent() {
|
|
1105
|
+
this.log("Getting .gitignore contents", "debug");
|
|
1106
|
+
try {
|
|
1107
|
+
let combinedContent = "";
|
|
1108
|
+
const localGitignorePath = path2.join(this.repositoryPath, ".gitignore");
|
|
1109
|
+
if (fs3.existsSync(localGitignorePath)) {
|
|
1110
|
+
const localContent = fs3.readFileSync(localGitignorePath, "utf8");
|
|
1111
|
+
combinedContent += `${localContent}
|
|
1112
|
+
`;
|
|
1113
|
+
}
|
|
1114
|
+
try {
|
|
1115
|
+
const gitRoot = await this.git.revparse(["--show-toplevel"]);
|
|
1116
|
+
const rootGitignorePath = path2.join(gitRoot, ".gitignore");
|
|
1117
|
+
if (fs3.existsSync(rootGitignorePath)) {
|
|
1118
|
+
const rootContent = fs3.readFileSync(rootGitignorePath, "utf8");
|
|
1119
|
+
if (rootContent.trim() !== combinedContent.trim()) {
|
|
1120
|
+
combinedContent += `
|
|
1121
|
+
${rootContent}`;
|
|
1122
|
+
}
|
|
1123
|
+
}
|
|
1124
|
+
} catch (rootErr) {
|
|
1125
|
+
this.log(
|
|
1126
|
+
"Unable to resolve git root while reading .gitignore",
|
|
1127
|
+
"debug",
|
|
1128
|
+
{ error: rootErr }
|
|
1129
|
+
);
|
|
1130
|
+
}
|
|
1131
|
+
if (combinedContent.trim() === "") {
|
|
1132
|
+
this.log(".gitignore file not found", "debug");
|
|
1133
|
+
return null;
|
|
1134
|
+
}
|
|
1135
|
+
this.log(".gitignore contents retrieved successfully", "debug");
|
|
1136
|
+
return combinedContent.trimEnd();
|
|
1137
|
+
} catch (error) {
|
|
1138
|
+
const errorMessage = `Failed to get .gitignore contents: ${error.message}`;
|
|
1139
|
+
this.log(errorMessage, "error", { error });
|
|
1140
|
+
return null;
|
|
1141
|
+
}
|
|
1142
|
+
}
|
|
1143
|
+
async getGitignoreMatcher() {
|
|
1144
|
+
const content = await this.getGitignoreContent();
|
|
1145
|
+
if (!content) return null;
|
|
1146
|
+
return ignore().add(content);
|
|
1147
|
+
}
|
|
1148
|
+
};
|
|
1149
|
+
}
|
|
1150
|
+
});
|
|
1151
|
+
|
|
9
1152
|
// src/index.ts
|
|
10
1153
|
import Debug20 from "debug";
|
|
11
1154
|
import { hideBin } from "yargs/helpers";
|
|
12
1155
|
|
|
13
1156
|
// src/args/commands/convert_to_sarif.ts
|
|
14
|
-
import
|
|
1157
|
+
import fs6 from "fs";
|
|
15
1158
|
|
|
16
1159
|
// src/commands/convert_to_sarif.ts
|
|
17
|
-
import
|
|
1160
|
+
import fs5 from "fs";
|
|
18
1161
|
import path5 from "path";
|
|
19
1162
|
|
|
20
1163
|
// src/commands/fpr_stream_parser.ts
|
|
@@ -399,6 +1542,7 @@ var IssueType_Enum = /* @__PURE__ */ ((IssueType_Enum2) => {
|
|
|
399
1542
|
IssueType_Enum2["ImproperResourceShutdownOrRelease"] = "IMPROPER_RESOURCE_SHUTDOWN_OR_RELEASE";
|
|
400
1543
|
IssueType_Enum2["ImproperStringFormatting"] = "IMPROPER_STRING_FORMATTING";
|
|
401
1544
|
IssueType_Enum2["IncompleteHostnameRegex"] = "INCOMPLETE_HOSTNAME_REGEX";
|
|
1545
|
+
IssueType_Enum2["IncompleteSanitization"] = "INCOMPLETE_SANITIZATION";
|
|
402
1546
|
IssueType_Enum2["IncompleteUrlSanitization"] = "INCOMPLETE_URL_SANITIZATION";
|
|
403
1547
|
IssueType_Enum2["IncompleteUrlSchemeCheck"] = "INCOMPLETE_URL_SCHEME_CHECK";
|
|
404
1548
|
IssueType_Enum2["InformationExposureViaHeaders"] = "INFORMATION_EXPOSURE_VIA_HEADERS";
|
|
@@ -956,7 +2100,7 @@ var DigestVulnerabilityReportDocument = `
|
|
|
956
2100
|
}
|
|
957
2101
|
`;
|
|
958
2102
|
var SubmitVulnerabilityReportDocument = `
|
|
959
|
-
mutation SubmitVulnerabilityReport($fixReportId: String!, $repoUrl: String!, $reference: String!, $projectId: String!, $scanSource: String!, $sha: String, $experimentalEnabled: Boolean, $vulnerabilityReportFileName: String, $pullRequest: Int) {
|
|
2103
|
+
mutation SubmitVulnerabilityReport($fixReportId: String!, $repoUrl: String!, $reference: String!, $projectId: String!, $scanSource: String!, $sha: String, $experimentalEnabled: Boolean, $vulnerabilityReportFileName: String, $pullRequest: Int, $isFullScan: Boolean) {
|
|
960
2104
|
submitVulnerabilityReport(
|
|
961
2105
|
fixReportId: $fixReportId
|
|
962
2106
|
repoUrl: $repoUrl
|
|
@@ -964,6 +2108,7 @@ var SubmitVulnerabilityReportDocument = `
|
|
|
964
2108
|
sha: $sha
|
|
965
2109
|
experimentalEnabled: $experimentalEnabled
|
|
966
2110
|
pullRequest: $pullRequest
|
|
2111
|
+
isFullScan: $isFullScan
|
|
967
2112
|
projectId: $projectId
|
|
968
2113
|
vulnerabilityReportFileName: $vulnerabilityReportFileName
|
|
969
2114
|
scanSource: $scanSource
|
|
@@ -1077,6 +2222,22 @@ var AutoPrAnalysisDocument = `
|
|
|
1077
2222
|
}
|
|
1078
2223
|
}
|
|
1079
2224
|
`;
|
|
2225
|
+
var GetFixReportsByRepoUrlDocument = `
|
|
2226
|
+
query GetFixReportsByRepoUrl($repoUrl: String!) {
|
|
2227
|
+
fixReport(where: {repo: {originalUrl: {_eq: $repoUrl}}}) {
|
|
2228
|
+
id
|
|
2229
|
+
state
|
|
2230
|
+
createdOn
|
|
2231
|
+
repo {
|
|
2232
|
+
originalUrl
|
|
2233
|
+
}
|
|
2234
|
+
vulnerabilityReport {
|
|
2235
|
+
scanDate
|
|
2236
|
+
vendor
|
|
2237
|
+
}
|
|
2238
|
+
}
|
|
2239
|
+
}
|
|
2240
|
+
`;
|
|
1080
2241
|
var GetReportFixesDocument = `
|
|
1081
2242
|
query GetReportFixes($reportId: uuid!, $filters: fix_bool_exp = {}, $limit: Int!, $offset: Int!, $currentUserEmail: String!) {
|
|
1082
2243
|
fixReport(where: {_and: [{id: {_eq: $reportId}}, {state: {_eq: Finished}}]}) {
|
|
@@ -1182,6 +2343,9 @@ function getSdk(client, withWrapper = defaultWrapper) {
|
|
|
1182
2343
|
autoPrAnalysis(variables, requestHeaders, signal) {
|
|
1183
2344
|
return withWrapper((wrappedRequestHeaders) => client.request({ document: AutoPrAnalysisDocument, variables, requestHeaders: { ...requestHeaders, ...wrappedRequestHeaders }, signal }), "autoPrAnalysis", "mutation", variables);
|
|
1184
2345
|
},
|
|
2346
|
+
GetFixReportsByRepoUrl(variables, requestHeaders, signal) {
|
|
2347
|
+
return withWrapper((wrappedRequestHeaders) => client.request({ document: GetFixReportsByRepoUrlDocument, variables, requestHeaders: { ...requestHeaders, ...wrappedRequestHeaders }, signal }), "GetFixReportsByRepoUrl", "query", variables);
|
|
2348
|
+
},
|
|
1185
2349
|
GetReportFixes(variables, requestHeaders, signal) {
|
|
1186
2350
|
return withWrapper((wrappedRequestHeaders) => client.request({ document: GetReportFixesDocument, variables, requestHeaders: { ...requestHeaders, ...wrappedRequestHeaders }, signal }), "GetReportFixes", "query", variables);
|
|
1187
2351
|
},
|
|
@@ -1469,7 +2633,8 @@ var fixDetailsData = {
|
|
|
1469
2633
|
["HTTP_PARAMETER_POLLUTION" /* HttpParameterPollution */]: {
|
|
1470
2634
|
issueDescription: "HTTP Parameter Pollution occurs when an attacker can manipulate the parameters of an HTTP request to change the behavior of the server.",
|
|
1471
2635
|
fixInstructions: "Implement proper input validation and bounds checking to prevent HTTP parameter pollution. Use safe string manipulation functions and ensure that the buffer size is properly managed."
|
|
1472
|
-
}
|
|
2636
|
+
},
|
|
2637
|
+
["INCOMPLETE_SANITIZATION" /* IncompleteSanitization */]: void 0
|
|
1473
2638
|
};
|
|
1474
2639
|
|
|
1475
2640
|
// src/features/analysis/scm/shared/src/getIssueType.ts
|
|
@@ -1586,7 +2751,8 @@ var issueTypeMap = {
|
|
|
1586
2751
|
["DO_NOT_THROW_GENERIC_EXCEPTION" /* DoNotThrowGenericException */]: "Do Not Throw Generic Exception",
|
|
1587
2752
|
["BUFFER_OVERFLOW" /* BufferOverflow */]: "Buffer Overflow",
|
|
1588
2753
|
["STRING_TERMINATION_ERROR" /* StringTerminationError */]: "String Termination Error",
|
|
1589
|
-
["HTTP_PARAMETER_POLLUTION" /* HttpParameterPollution */]: "HTTP Parameter Pollution"
|
|
2754
|
+
["HTTP_PARAMETER_POLLUTION" /* HttpParameterPollution */]: "HTTP Parameter Pollution",
|
|
2755
|
+
["INCOMPLETE_SANITIZATION" /* IncompleteSanitization */]: "Incomplete Sanitization"
|
|
1590
2756
|
};
|
|
1591
2757
|
var issueTypeZ = z.nativeEnum(IssueType_Enum);
|
|
1592
2758
|
var getIssueTypeFriendlyString = (issueType) => {
|
|
@@ -4518,26 +5684,12 @@ function getScmConfig({
|
|
|
4518
5684
|
var DEFUALT_ADO_ORIGIN = scmCloudUrl.Ado;
|
|
4519
5685
|
|
|
4520
5686
|
// src/features/analysis/scm/ado/utils.ts
|
|
5687
|
+
init_env();
|
|
4521
5688
|
import querystring from "querystring";
|
|
4522
5689
|
import * as api from "azure-devops-node-api";
|
|
4523
5690
|
import Debug from "debug";
|
|
4524
5691
|
import { z as z17 } from "zod";
|
|
4525
5692
|
|
|
4526
|
-
// src/features/analysis/scm/env.ts
|
|
4527
|
-
import { z as z15 } from "zod";
|
|
4528
|
-
var EnvVariablesZod = z15.object({
|
|
4529
|
-
GITLAB_API_TOKEN: z15.string().optional(),
|
|
4530
|
-
GITHUB_API_TOKEN: z15.string().optional(),
|
|
4531
|
-
GIT_PROXY_HOST: z15.string().optional().default("http://tinyproxy:8888"),
|
|
4532
|
-
MAX_UPLOAD_FILE_SIZE_MB: z15.coerce.number().gt(0).default(5)
|
|
4533
|
-
});
|
|
4534
|
-
var {
|
|
4535
|
-
GITLAB_API_TOKEN,
|
|
4536
|
-
GITHUB_API_TOKEN,
|
|
4537
|
-
GIT_PROXY_HOST,
|
|
4538
|
-
MAX_UPLOAD_FILE_SIZE_MB
|
|
4539
|
-
} = EnvVariablesZod.parse(process.env);
|
|
4540
|
-
|
|
4541
5693
|
// src/features/analysis/scm/ado/validation.ts
|
|
4542
5694
|
import { z as z16 } from "zod";
|
|
4543
5695
|
var ValidPullRequestStatusZ = z16.union([
|
|
@@ -5162,663 +6314,8 @@ async function getAdoRepoList({
|
|
|
5162
6314
|
// src/features/analysis/scm/ado/AdoSCMLib.ts
|
|
5163
6315
|
import { setTimeout as setTimeout2 } from "timers/promises";
|
|
5164
6316
|
|
|
5165
|
-
// src/features/analysis/scm/git/GitService.ts
|
|
5166
|
-
import * as path2 from "path";
|
|
5167
|
-
import { simpleGit } from "simple-git";
|
|
5168
|
-
|
|
5169
|
-
// src/mcp/core/configs.ts
|
|
5170
|
-
var MCP_DEFAULT_API_URL = "https://api.mobb.ai/v1/graphql";
|
|
5171
|
-
var MCP_API_KEY_HEADER_NAME = "x-mobb-key";
|
|
5172
|
-
var MCP_LOGIN_MAX_WAIT = 2 * 60 * 1e3;
|
|
5173
|
-
var MCP_LOGIN_CHECK_DELAY = 2 * 1e3;
|
|
5174
|
-
var MCP_VUL_REPORT_DIGEST_TIMEOUT_MS = 5 * 60 * 1e3;
|
|
5175
|
-
var MCP_MAX_FILE_SIZE = MAX_UPLOAD_FILE_SIZE_MB * 1024 * 1024;
|
|
5176
|
-
var MCP_PERIODIC_CHECK_INTERVAL = 15 * 60 * 1e3;
|
|
5177
|
-
var MCP_DEFAULT_MAX_FILES_TO_SCAN = 10;
|
|
5178
|
-
var MCP_REPORT_ID_EXPIRATION_MS = 2 * 60 * 60 * 1e3;
|
|
5179
|
-
var MCP_TOOLS_BROWSER_COOLDOWN_MS = 24 * 60 * 60 * 1e3;
|
|
5180
|
-
var MCP_DEFAULT_LIMIT = 3;
|
|
5181
|
-
|
|
5182
|
-
// src/features/analysis/scm/FileUtils.ts
|
|
5183
|
-
import fs2 from "fs";
|
|
5184
|
-
import { isBinary } from "istextorbinary";
|
|
5185
|
-
import path from "path";
|
|
5186
|
-
var EXCLUDED_FILE_PATTERNS = [
|
|
5187
|
-
// ... (copy the full array from FilePacking.ts)
|
|
5188
|
-
".json",
|
|
5189
|
-
".snap",
|
|
5190
|
-
".env.vault",
|
|
5191
|
-
".env",
|
|
5192
|
-
".yaml",
|
|
5193
|
-
".yml",
|
|
5194
|
-
".toml",
|
|
5195
|
-
".ini",
|
|
5196
|
-
".conf",
|
|
5197
|
-
".config",
|
|
5198
|
-
".xml",
|
|
5199
|
-
".env",
|
|
5200
|
-
".md",
|
|
5201
|
-
".txt",
|
|
5202
|
-
".rst",
|
|
5203
|
-
".adoc",
|
|
5204
|
-
".lock",
|
|
5205
|
-
".png",
|
|
5206
|
-
".jpg",
|
|
5207
|
-
".jpeg",
|
|
5208
|
-
".gif",
|
|
5209
|
-
".svg",
|
|
5210
|
-
".ico",
|
|
5211
|
-
".webp",
|
|
5212
|
-
".bmp",
|
|
5213
|
-
".tiff",
|
|
5214
|
-
".ttf",
|
|
5215
|
-
".otf",
|
|
5216
|
-
".woff",
|
|
5217
|
-
".woff2",
|
|
5218
|
-
".eot",
|
|
5219
|
-
".zip",
|
|
5220
|
-
".tar",
|
|
5221
|
-
".gz",
|
|
5222
|
-
".rar",
|
|
5223
|
-
".7z",
|
|
5224
|
-
".log",
|
|
5225
|
-
".db",
|
|
5226
|
-
".sqlite",
|
|
5227
|
-
".sql",
|
|
5228
|
-
".pem",
|
|
5229
|
-
".crt",
|
|
5230
|
-
".key",
|
|
5231
|
-
".p12",
|
|
5232
|
-
".pfx",
|
|
5233
|
-
".editorconfig",
|
|
5234
|
-
".sublime-project",
|
|
5235
|
-
".sublime-workspace",
|
|
5236
|
-
".DS_Store",
|
|
5237
|
-
"Thumbs.db",
|
|
5238
|
-
".lcov",
|
|
5239
|
-
".exe",
|
|
5240
|
-
".dll",
|
|
5241
|
-
".so",
|
|
5242
|
-
".dylib",
|
|
5243
|
-
".class",
|
|
5244
|
-
".pyc",
|
|
5245
|
-
".pyo",
|
|
5246
|
-
".o",
|
|
5247
|
-
".obj",
|
|
5248
|
-
".min.js",
|
|
5249
|
-
".min.css",
|
|
5250
|
-
".min.html",
|
|
5251
|
-
".test.js",
|
|
5252
|
-
".test.ts",
|
|
5253
|
-
".test.jsx",
|
|
5254
|
-
".test.tsx",
|
|
5255
|
-
".spec.js",
|
|
5256
|
-
".spec.ts",
|
|
5257
|
-
".spec.jsx",
|
|
5258
|
-
".spec.tsx",
|
|
5259
|
-
".d.ts",
|
|
5260
|
-
".bundle.js",
|
|
5261
|
-
".chunk.js",
|
|
5262
|
-
"dockerfile",
|
|
5263
|
-
"jenkinsfile",
|
|
5264
|
-
"go.sum",
|
|
5265
|
-
".gitignore",
|
|
5266
|
-
".gitattributes",
|
|
5267
|
-
".gitmodules",
|
|
5268
|
-
".gitkeep",
|
|
5269
|
-
".keep",
|
|
5270
|
-
".hgignore",
|
|
5271
|
-
".nvmrc",
|
|
5272
|
-
".node-version",
|
|
5273
|
-
".npmrc",
|
|
5274
|
-
".yarnrc",
|
|
5275
|
-
".pnpmfile.cjs",
|
|
5276
|
-
".ruby-version",
|
|
5277
|
-
".python-version",
|
|
5278
|
-
".rvmrc",
|
|
5279
|
-
".rbenv-version",
|
|
5280
|
-
".gvmrc",
|
|
5281
|
-
"makefile",
|
|
5282
|
-
"rakefile",
|
|
5283
|
-
"gulpfile.js",
|
|
5284
|
-
"gruntfile.js",
|
|
5285
|
-
"webpack.config.js",
|
|
5286
|
-
"webpack.config.ts",
|
|
5287
|
-
"rollup.config.js",
|
|
5288
|
-
"vite.config.js",
|
|
5289
|
-
"vite.config.ts",
|
|
5290
|
-
"next.config.js",
|
|
5291
|
-
"nuxt.config.js",
|
|
5292
|
-
"tailwind.config.js",
|
|
5293
|
-
"postcss.config.js",
|
|
5294
|
-
".babelrc",
|
|
5295
|
-
".babelrc.js",
|
|
5296
|
-
".swcrc",
|
|
5297
|
-
".browserslistrc",
|
|
5298
|
-
"jest.config.js",
|
|
5299
|
-
"jest.config.ts",
|
|
5300
|
-
"vitest.config.js",
|
|
5301
|
-
"karma.conf.js",
|
|
5302
|
-
"protractor.conf.js",
|
|
5303
|
-
"cypress.config.js",
|
|
5304
|
-
"playwright.config.js",
|
|
5305
|
-
".nycrc",
|
|
5306
|
-
".c8rc",
|
|
5307
|
-
".eslintrc",
|
|
5308
|
-
".eslintrc.js",
|
|
5309
|
-
".prettierrc",
|
|
5310
|
-
".prettierrc.js",
|
|
5311
|
-
".stylelintrc",
|
|
5312
|
-
".stylelintrc.js",
|
|
5313
|
-
"pipfile",
|
|
5314
|
-
"gemfile",
|
|
5315
|
-
"go.mod",
|
|
5316
|
-
"project.clj",
|
|
5317
|
-
"setup.py",
|
|
5318
|
-
"setup.cfg",
|
|
5319
|
-
"manifest.in",
|
|
5320
|
-
".pythonrc",
|
|
5321
|
-
"readme",
|
|
5322
|
-
"changelog",
|
|
5323
|
-
"authors",
|
|
5324
|
-
"contributors",
|
|
5325
|
-
"license",
|
|
5326
|
-
"notice",
|
|
5327
|
-
"copyright",
|
|
5328
|
-
".htaccess"
|
|
5329
|
-
];
|
|
5330
|
-
var FileUtils = class {
|
|
5331
|
-
static isExcludedFileType(filepath) {
|
|
5332
|
-
const basename = path.basename(filepath).toLowerCase();
|
|
5333
|
-
if (basename === ".env" || basename.startsWith(".env.")) {
|
|
5334
|
-
return true;
|
|
5335
|
-
}
|
|
5336
|
-
if (EXCLUDED_FILE_PATTERNS.some((pattern) => basename.endsWith(pattern))) {
|
|
5337
|
-
return true;
|
|
5338
|
-
}
|
|
5339
|
-
return false;
|
|
5340
|
-
}
|
|
5341
|
-
static shouldPackFile(filepath, maxFileSize = 1024 * 1024 * 5) {
|
|
5342
|
-
const absoluteFilepath = path.resolve(filepath);
|
|
5343
|
-
if (this.isExcludedFileType(filepath)) {
|
|
5344
|
-
return false;
|
|
5345
|
-
}
|
|
5346
|
-
if (!fs2.existsSync(absoluteFilepath)) {
|
|
5347
|
-
return false;
|
|
5348
|
-
}
|
|
5349
|
-
if (fs2.lstatSync(absoluteFilepath).size > maxFileSize) {
|
|
5350
|
-
return false;
|
|
5351
|
-
}
|
|
5352
|
-
let data;
|
|
5353
|
-
try {
|
|
5354
|
-
data = fs2.readFileSync(absoluteFilepath);
|
|
5355
|
-
} catch {
|
|
5356
|
-
return false;
|
|
5357
|
-
}
|
|
5358
|
-
if (isBinary(null, data)) {
|
|
5359
|
-
return false;
|
|
5360
|
-
}
|
|
5361
|
-
return true;
|
|
5362
|
-
}
|
|
5363
|
-
static getAllFiles(dir, rootDir) {
|
|
5364
|
-
const root = rootDir || dir;
|
|
5365
|
-
const results = [];
|
|
5366
|
-
const relativeDepth = path.relative(root, dir).split(path.sep).length;
|
|
5367
|
-
if (relativeDepth > 20) {
|
|
5368
|
-
return [];
|
|
5369
|
-
}
|
|
5370
|
-
if (results.length > 1e3) {
|
|
5371
|
-
return [];
|
|
5372
|
-
}
|
|
5373
|
-
try {
|
|
5374
|
-
fs2.accessSync(dir, fs2.constants.R_OK);
|
|
5375
|
-
} catch {
|
|
5376
|
-
return [];
|
|
5377
|
-
}
|
|
5378
|
-
const items = fs2.readdirSync(dir);
|
|
5379
|
-
for (const item of items) {
|
|
5380
|
-
const fullPath = path.join(dir, item);
|
|
5381
|
-
try {
|
|
5382
|
-
fs2.accessSync(fullPath, fs2.constants.R_OK);
|
|
5383
|
-
} catch {
|
|
5384
|
-
continue;
|
|
5385
|
-
}
|
|
5386
|
-
const stat = fs2.statSync(fullPath);
|
|
5387
|
-
if (stat.isDirectory()) {
|
|
5388
|
-
results.push(...this.getAllFiles(fullPath, root));
|
|
5389
|
-
} else {
|
|
5390
|
-
results.push({
|
|
5391
|
-
name: item,
|
|
5392
|
-
fullPath,
|
|
5393
|
-
relativePath: path.relative(root, fullPath),
|
|
5394
|
-
time: stat.mtime.getTime(),
|
|
5395
|
-
isFile: true
|
|
5396
|
-
});
|
|
5397
|
-
}
|
|
5398
|
-
}
|
|
5399
|
-
return results;
|
|
5400
|
-
}
|
|
5401
|
-
static getLastChangedFiles({
|
|
5402
|
-
dir,
|
|
5403
|
-
maxFileSize,
|
|
5404
|
-
maxFiles = MCP_DEFAULT_MAX_FILES_TO_SCAN
|
|
5405
|
-
}) {
|
|
5406
|
-
if (!fs2.existsSync(dir) || !fs2.lstatSync(dir).isDirectory()) return [];
|
|
5407
|
-
const files = this.getAllFiles(dir);
|
|
5408
|
-
return files.filter((file) => this.shouldPackFile(file.fullPath, maxFileSize)).sort((a, b) => b.time - a.time).slice(0, maxFiles).map((file) => file.relativePath);
|
|
5409
|
-
}
|
|
5410
|
-
};
|
|
5411
|
-
|
|
5412
|
-
// src/features/analysis/scm/git/GitService.ts
|
|
5413
|
-
var GitService = class {
|
|
5414
|
-
constructor(repositoryPath, log2) {
|
|
5415
|
-
__publicField(this, "git");
|
|
5416
|
-
__publicField(this, "repositoryPath");
|
|
5417
|
-
__publicField(this, "log");
|
|
5418
|
-
const noopLog = (_message, _level, _data) => {
|
|
5419
|
-
};
|
|
5420
|
-
this.log = log2 || noopLog;
|
|
5421
|
-
this.git = simpleGit(repositoryPath, { binary: "git" });
|
|
5422
|
-
this.repositoryPath = repositoryPath;
|
|
5423
|
-
this.log("Git service initialized", "debug", { repositoryPath });
|
|
5424
|
-
}
|
|
5425
|
-
/**
|
|
5426
|
-
* Validates that the path is a valid git repository
|
|
5427
|
-
*/
|
|
5428
|
-
async validateRepository() {
|
|
5429
|
-
this.log("Validating git repository", "debug");
|
|
5430
|
-
try {
|
|
5431
|
-
const isRepo = await this.git.checkIsRepo();
|
|
5432
|
-
if (!isRepo) {
|
|
5433
|
-
const error = "Path is not a valid git repository";
|
|
5434
|
-
this.log(error, "error");
|
|
5435
|
-
return { isValid: false, error };
|
|
5436
|
-
}
|
|
5437
|
-
this.log("Git repository validation successful", "debug");
|
|
5438
|
-
return { isValid: true };
|
|
5439
|
-
} catch (error) {
|
|
5440
|
-
const errorMessage = `Failed to verify git repository: ${error.message}`;
|
|
5441
|
-
this.log(errorMessage, "error", { error });
|
|
5442
|
-
return { isValid: false, error: errorMessage };
|
|
5443
|
-
}
|
|
5444
|
-
}
|
|
5445
|
-
/**
|
|
5446
|
-
* Gets the current git status and returns changed files
|
|
5447
|
-
*/
|
|
5448
|
-
async getChangedFiles() {
|
|
5449
|
-
this.log("Getting git status", "debug");
|
|
5450
|
-
try {
|
|
5451
|
-
const status = await this.git.status();
|
|
5452
|
-
const gitRoot = await this.git.revparse(["--show-toplevel"]);
|
|
5453
|
-
const relativePathFromGitRoot = path2.relative(
|
|
5454
|
-
gitRoot,
|
|
5455
|
-
this.repositoryPath
|
|
5456
|
-
);
|
|
5457
|
-
const deletedFiles = status.files.filter((file) => file.index === "D" || file.working_dir === "D").map((file) => file.path);
|
|
5458
|
-
const files = status.files.filter((file) => {
|
|
5459
|
-
return !(file.index === "D" || file.working_dir === "D");
|
|
5460
|
-
}).map((file) => {
|
|
5461
|
-
const gitRelativePath = file.path;
|
|
5462
|
-
if (relativePathFromGitRoot === "") {
|
|
5463
|
-
return gitRelativePath;
|
|
5464
|
-
}
|
|
5465
|
-
if (gitRelativePath.startsWith(relativePathFromGitRoot + "/")) {
|
|
5466
|
-
return gitRelativePath.substring(relativePathFromGitRoot.length + 1);
|
|
5467
|
-
}
|
|
5468
|
-
return path2.relative(
|
|
5469
|
-
this.repositoryPath,
|
|
5470
|
-
path2.join(gitRoot, gitRelativePath)
|
|
5471
|
-
);
|
|
5472
|
-
});
|
|
5473
|
-
this.log("Git status retrieved", "info", {
|
|
5474
|
-
fileCount: files.length,
|
|
5475
|
-
files: files.slice(0, 10),
|
|
5476
|
-
// Log first 10 files to avoid spam
|
|
5477
|
-
deletedFileCount: deletedFiles.length,
|
|
5478
|
-
deletedFiles: deletedFiles.slice(0, 10),
|
|
5479
|
-
gitRoot,
|
|
5480
|
-
workingDir: this.repositoryPath,
|
|
5481
|
-
relativePathFromGitRoot
|
|
5482
|
-
});
|
|
5483
|
-
return { files, deletedFiles, status };
|
|
5484
|
-
} catch (error) {
|
|
5485
|
-
const errorMessage = `Failed to get git status: ${error.message}`;
|
|
5486
|
-
this.log(errorMessage, "error", { error });
|
|
5487
|
-
throw new Error(errorMessage);
|
|
5488
|
-
}
|
|
5489
|
-
}
|
|
5490
|
-
/**
|
|
5491
|
-
* Gets git repository information including remote URL, current commit hash, and branch name
|
|
5492
|
-
*/
|
|
5493
|
-
async getGitInfo() {
|
|
5494
|
-
this.log("Getting git repository information", "debug");
|
|
5495
|
-
try {
|
|
5496
|
-
const [repoUrl, hash, reference] = await Promise.all([
|
|
5497
|
-
this.git.getConfig("remote.origin.url"),
|
|
5498
|
-
this.git.revparse(["HEAD"]),
|
|
5499
|
-
this.git.revparse(["--abbrev-ref", "HEAD"])
|
|
5500
|
-
]);
|
|
5501
|
-
let normalizedRepoUrl = repoUrl.value || "";
|
|
5502
|
-
if (normalizedRepoUrl.endsWith(".git")) {
|
|
5503
|
-
normalizedRepoUrl = normalizedRepoUrl.slice(0, -".git".length);
|
|
5504
|
-
}
|
|
5505
|
-
if (normalizedRepoUrl.startsWith("git@github.com:")) {
|
|
5506
|
-
normalizedRepoUrl = normalizedRepoUrl.replace(
|
|
5507
|
-
"git@github.com:",
|
|
5508
|
-
"https://github.com/"
|
|
5509
|
-
);
|
|
5510
|
-
}
|
|
5511
|
-
this.log("Git repository information retrieved", "debug", {
|
|
5512
|
-
repoUrl: normalizedRepoUrl,
|
|
5513
|
-
hash,
|
|
5514
|
-
reference
|
|
5515
|
-
});
|
|
5516
|
-
return {
|
|
5517
|
-
repoUrl: normalizedRepoUrl,
|
|
5518
|
-
hash,
|
|
5519
|
-
reference
|
|
5520
|
-
};
|
|
5521
|
-
} catch (error) {
|
|
5522
|
-
const errorMessage = `Failed to get git repository information: ${error.message}`;
|
|
5523
|
-
this.log(errorMessage, "error", { error });
|
|
5524
|
-
throw new Error(errorMessage);
|
|
5525
|
-
}
|
|
5526
|
-
}
|
|
5527
|
-
/**
|
|
5528
|
-
* Validates if a branch name is valid according to git's rules
|
|
5529
|
-
*/
|
|
5530
|
-
async isValidBranchName(branchName) {
|
|
5531
|
-
this.log("Validating branch name", "debug", { branchName });
|
|
5532
|
-
try {
|
|
5533
|
-
const result = await this.git.raw([
|
|
5534
|
-
"check-ref-format",
|
|
5535
|
-
"--branch",
|
|
5536
|
-
branchName
|
|
5537
|
-
]);
|
|
5538
|
-
const isValid = Boolean(result);
|
|
5539
|
-
this.log("Branch name validation result", "debug", {
|
|
5540
|
-
branchName,
|
|
5541
|
-
isValid
|
|
5542
|
-
});
|
|
5543
|
-
return isValid;
|
|
5544
|
-
} catch (error) {
|
|
5545
|
-
this.log("Branch name validation failed", "debug", { branchName, error });
|
|
5546
|
-
return false;
|
|
5547
|
-
}
|
|
5548
|
-
}
|
|
5549
|
-
/**
|
|
5550
|
-
* Gets the current branch name
|
|
5551
|
-
*/
|
|
5552
|
-
async getCurrentBranch() {
|
|
5553
|
-
this.log("Getting current branch name", "debug");
|
|
5554
|
-
try {
|
|
5555
|
-
const branch = await this.git.revparse(["--abbrev-ref", "HEAD"]);
|
|
5556
|
-
this.log("Current branch retrieved", "debug", { branch });
|
|
5557
|
-
return branch;
|
|
5558
|
-
} catch (error) {
|
|
5559
|
-
const errorMessage = `Failed to get current branch: ${error.message}`;
|
|
5560
|
-
this.log(errorMessage, "error", { error });
|
|
5561
|
-
throw new Error(errorMessage);
|
|
5562
|
-
}
|
|
5563
|
-
}
|
|
5564
|
-
/**
|
|
5565
|
-
* Gets the current commit hash
|
|
5566
|
-
*/
|
|
5567
|
-
async getCurrentCommitHash() {
|
|
5568
|
-
this.log("Getting current commit hash", "debug");
|
|
5569
|
-
try {
|
|
5570
|
-
const hash = await this.git.revparse(["HEAD"]);
|
|
5571
|
-
this.log("Current commit hash retrieved", "debug", { hash });
|
|
5572
|
-
return hash;
|
|
5573
|
-
} catch (error) {
|
|
5574
|
-
const errorMessage = `Failed to get current commit hash: ${error.message}`;
|
|
5575
|
-
this.log(errorMessage, "error", { error });
|
|
5576
|
-
throw new Error(errorMessage);
|
|
5577
|
-
}
|
|
5578
|
-
}
|
|
5579
|
-
/**
|
|
5580
|
-
* Gets both the current commit hash and current branch name
|
|
5581
|
-
*/
|
|
5582
|
-
async getCurrentCommitAndBranch() {
|
|
5583
|
-
this.log("Getting current commit hash and branch", "debug");
|
|
5584
|
-
try {
|
|
5585
|
-
const [hash, branch] = await Promise.all([
|
|
5586
|
-
this.git.revparse(["HEAD"]),
|
|
5587
|
-
this.git.revparse(["--abbrev-ref", "HEAD"])
|
|
5588
|
-
]);
|
|
5589
|
-
this.log("Current commit hash and branch retrieved", "debug", {
|
|
5590
|
-
hash,
|
|
5591
|
-
branch
|
|
5592
|
-
});
|
|
5593
|
-
return { hash, branch };
|
|
5594
|
-
} catch (error) {
|
|
5595
|
-
const errorMessage = `Failed to get current commit hash and branch: ${error.message}`;
|
|
5596
|
-
this.log(errorMessage, "error", { error });
|
|
5597
|
-
return { hash: "", branch: "" };
|
|
5598
|
-
}
|
|
5599
|
-
}
|
|
5600
|
-
/**
|
|
5601
|
-
* Gets the remote repository URL
|
|
5602
|
-
*/
|
|
5603
|
-
async getRemoteUrl() {
|
|
5604
|
-
this.log("Getting remote repository URL", "debug");
|
|
5605
|
-
try {
|
|
5606
|
-
const remoteUrl = await this.git.getConfig("remote.origin.url");
|
|
5607
|
-
const url = remoteUrl.value || "";
|
|
5608
|
-
let normalizedUrl = url;
|
|
5609
|
-
if (normalizedUrl.endsWith(".git")) {
|
|
5610
|
-
normalizedUrl = normalizedUrl.slice(0, -".git".length);
|
|
5611
|
-
}
|
|
5612
|
-
if (normalizedUrl.startsWith("git@github.com:")) {
|
|
5613
|
-
normalizedUrl = normalizedUrl.replace(
|
|
5614
|
-
"git@github.com:",
|
|
5615
|
-
"https://github.com/"
|
|
5616
|
-
);
|
|
5617
|
-
}
|
|
5618
|
-
this.log("Remote repository URL retrieved", "debug", {
|
|
5619
|
-
url: normalizedUrl
|
|
5620
|
-
});
|
|
5621
|
-
return normalizedUrl;
|
|
5622
|
-
} catch (error) {
|
|
5623
|
-
const errorMessage = `Failed to get remote repository URL: ${error.message}`;
|
|
5624
|
-
this.log(errorMessage, "error", { error });
|
|
5625
|
-
throw new Error(errorMessage);
|
|
5626
|
-
}
|
|
5627
|
-
}
|
|
5628
|
-
/**
|
|
5629
|
-
* Gets the maxFiles most recently changed files, starting with current changes and then from commit history
|
|
5630
|
-
*/
|
|
5631
|
-
async getRecentlyChangedFiles({
|
|
5632
|
-
maxFiles = MCP_DEFAULT_MAX_FILES_TO_SCAN
|
|
5633
|
-
}) {
|
|
5634
|
-
this.log(
|
|
5635
|
-
`Getting the ${maxFiles} most recently changed files, starting with current changes`,
|
|
5636
|
-
"debug"
|
|
5637
|
-
);
|
|
5638
|
-
try {
|
|
5639
|
-
const currentChanges = await this.getChangedFiles();
|
|
5640
|
-
const gitRoot = await this.git.revparse(["--show-toplevel"]);
|
|
5641
|
-
const relativePathFromGitRoot = path2.relative(
|
|
5642
|
-
gitRoot,
|
|
5643
|
-
this.repositoryPath
|
|
5644
|
-
);
|
|
5645
|
-
const fileSet = /* @__PURE__ */ new Set();
|
|
5646
|
-
let commitsProcessed = 0;
|
|
5647
|
-
for (const file of currentChanges.files) {
|
|
5648
|
-
if (fileSet.size >= maxFiles) {
|
|
5649
|
-
break;
|
|
5650
|
-
}
|
|
5651
|
-
const fullPath = path2.join(this.repositoryPath, file);
|
|
5652
|
-
if (FileUtils.shouldPackFile(fullPath) && !file.startsWith("..")) {
|
|
5653
|
-
fileSet.add(file);
|
|
5654
|
-
}
|
|
5655
|
-
}
|
|
5656
|
-
this.log(`Added ${fileSet.size} files from current changes`, "debug", {
|
|
5657
|
-
filesFromCurrentChanges: fileSet.size,
|
|
5658
|
-
currentChangesTotal: currentChanges.files.length
|
|
5659
|
-
});
|
|
5660
|
-
const logResult = await this.git.log({
|
|
5661
|
-
maxCount: maxFiles * 5,
|
|
5662
|
-
// 5 times the max files to scan to ensure we find enough files
|
|
5663
|
-
format: {
|
|
5664
|
-
hash: "%H",
|
|
5665
|
-
date: "%ai",
|
|
5666
|
-
message: "%s",
|
|
5667
|
-
//the field name author_name can't follow the naming convention as we are using the git log command
|
|
5668
|
-
author_name: "%an"
|
|
5669
|
-
}
|
|
5670
|
-
});
|
|
5671
|
-
for (const commit of logResult.all) {
|
|
5672
|
-
if (fileSet.size >= maxFiles) {
|
|
5673
|
-
break;
|
|
5674
|
-
}
|
|
5675
|
-
commitsProcessed++;
|
|
5676
|
-
try {
|
|
5677
|
-
const filesOutput = await this.git.show([
|
|
5678
|
-
"--name-only",
|
|
5679
|
-
"--pretty=format:",
|
|
5680
|
-
commit.hash
|
|
5681
|
-
]);
|
|
5682
|
-
const commitFiles = filesOutput.split("\n").filter((file) => file.trim() !== "");
|
|
5683
|
-
for (const file of commitFiles) {
|
|
5684
|
-
if (fileSet.size >= maxFiles) {
|
|
5685
|
-
break;
|
|
5686
|
-
}
|
|
5687
|
-
const gitRelativePath = file.trim();
|
|
5688
|
-
let adjustedPath;
|
|
5689
|
-
if (relativePathFromGitRoot === "") {
|
|
5690
|
-
adjustedPath = gitRelativePath;
|
|
5691
|
-
} else if (gitRelativePath.startsWith(relativePathFromGitRoot + "/")) {
|
|
5692
|
-
adjustedPath = gitRelativePath.substring(
|
|
5693
|
-
relativePathFromGitRoot.length + 1
|
|
5694
|
-
);
|
|
5695
|
-
} else {
|
|
5696
|
-
adjustedPath = path2.relative(
|
|
5697
|
-
this.repositoryPath,
|
|
5698
|
-
path2.join(gitRoot, gitRelativePath)
|
|
5699
|
-
);
|
|
5700
|
-
}
|
|
5701
|
-
this.log(`Considering file: ${adjustedPath}`, "debug");
|
|
5702
|
-
if (!fileSet.has(adjustedPath) && FileUtils.shouldPackFile(path2.join(gitRoot, gitRelativePath)) && !adjustedPath.startsWith("..")) {
|
|
5703
|
-
fileSet.add(adjustedPath);
|
|
5704
|
-
}
|
|
5705
|
-
}
|
|
5706
|
-
} catch (showError) {
|
|
5707
|
-
this.log(`Could not get files for commit ${commit.hash}`, "debug", {
|
|
5708
|
-
error: showError
|
|
5709
|
-
});
|
|
5710
|
-
}
|
|
5711
|
-
}
|
|
5712
|
-
const files = Array.from(fileSet);
|
|
5713
|
-
this.log("Recently changed files retrieved", "info", {
|
|
5714
|
-
fileCount: files.length,
|
|
5715
|
-
commitsProcessed,
|
|
5716
|
-
totalCommitsAvailable: logResult.all.length,
|
|
5717
|
-
files: files.slice(0, maxFiles),
|
|
5718
|
-
// Log the files (should be all of them since we limit to maxFiles)
|
|
5719
|
-
gitRoot,
|
|
5720
|
-
workingDir: this.repositoryPath,
|
|
5721
|
-
relativePathFromGitRoot
|
|
5722
|
-
});
|
|
5723
|
-
return {
|
|
5724
|
-
files,
|
|
5725
|
-
commitCount: commitsProcessed
|
|
5726
|
-
};
|
|
5727
|
-
} catch (error) {
|
|
5728
|
-
const errorMessage = `Failed to get recently changed files: ${error.message}`;
|
|
5729
|
-
this.log(errorMessage, "error", { error });
|
|
5730
|
-
throw new Error(errorMessage);
|
|
5731
|
-
}
|
|
5732
|
-
}
|
|
5733
|
-
/**
|
|
5734
|
-
* Normalizes a Git URL to HTTPS format for various Git hosting platforms
|
|
5735
|
-
* @param url The Git URL to normalize
|
|
5736
|
-
* @returns The normalized HTTPS URL
|
|
5737
|
-
*/
|
|
5738
|
-
normalizeGitUrl(url) {
|
|
5739
|
-
let normalizedUrl = url;
|
|
5740
|
-
if (normalizedUrl.endsWith(".git")) {
|
|
5741
|
-
normalizedUrl = normalizedUrl.slice(0, -".git".length);
|
|
5742
|
-
}
|
|
5743
|
-
const sshToHttpsMappings = [
|
|
5744
|
-
// GitHub
|
|
5745
|
-
{ pattern: "git@github.com:", replacement: "https://github.com/" },
|
|
5746
|
-
// GitLab
|
|
5747
|
-
{ pattern: "git@gitlab.com:", replacement: "https://gitlab.com/" },
|
|
5748
|
-
// Bitbucket
|
|
5749
|
-
{ pattern: "git@bitbucket.org:", replacement: "https://bitbucket.org/" },
|
|
5750
|
-
// Azure DevOps (SSH format)
|
|
5751
|
-
{
|
|
5752
|
-
pattern: "git@ssh.dev.azure.com:",
|
|
5753
|
-
replacement: "https://dev.azure.com/"
|
|
5754
|
-
},
|
|
5755
|
-
// Azure DevOps (alternative SSH format)
|
|
5756
|
-
{
|
|
5757
|
-
pattern: /git@([^:]+):v3\/([^/]+)\/([^/]+)\/([^/]+)/,
|
|
5758
|
-
replacement: "https://$1/$2/_git/$4"
|
|
5759
|
-
}
|
|
5760
|
-
];
|
|
5761
|
-
for (const mapping of sshToHttpsMappings) {
|
|
5762
|
-
if (typeof mapping.pattern === "string") {
|
|
5763
|
-
if (normalizedUrl.startsWith(mapping.pattern)) {
|
|
5764
|
-
normalizedUrl = normalizedUrl.replace(
|
|
5765
|
-
mapping.pattern,
|
|
5766
|
-
mapping.replacement
|
|
5767
|
-
);
|
|
5768
|
-
break;
|
|
5769
|
-
}
|
|
5770
|
-
} else {
|
|
5771
|
-
const match = normalizedUrl.match(mapping.pattern);
|
|
5772
|
-
if (match) {
|
|
5773
|
-
normalizedUrl = normalizedUrl.replace(
|
|
5774
|
-
mapping.pattern,
|
|
5775
|
-
mapping.replacement
|
|
5776
|
-
);
|
|
5777
|
-
break;
|
|
5778
|
-
}
|
|
5779
|
-
}
|
|
5780
|
-
}
|
|
5781
|
-
return normalizedUrl;
|
|
5782
|
-
}
|
|
5783
|
-
/**
|
|
5784
|
-
* Gets all remote repository URLs (equivalent to 'git remote -v')
|
|
5785
|
-
*/
|
|
5786
|
-
async getRepoUrls() {
|
|
5787
|
-
this.log("Getting all remote repository URLs", "debug");
|
|
5788
|
-
try {
|
|
5789
|
-
const remotes = await this.git.remote(["-v"]);
|
|
5790
|
-
if (!remotes) {
|
|
5791
|
-
return {};
|
|
5792
|
-
}
|
|
5793
|
-
const remoteMap = {};
|
|
5794
|
-
remotes.split("\n").forEach((line) => {
|
|
5795
|
-
if (!line.trim()) return;
|
|
5796
|
-
const [remoteName, url, type2] = line.split(/\s+/);
|
|
5797
|
-
if (!remoteName || !url || !type2) return;
|
|
5798
|
-
if (!remoteMap[remoteName]) {
|
|
5799
|
-
remoteMap[remoteName] = { fetch: "", push: "" };
|
|
5800
|
-
}
|
|
5801
|
-
const normalizedUrl = this.normalizeGitUrl(url);
|
|
5802
|
-
const remote = remoteMap[remoteName];
|
|
5803
|
-
if (type2 === "(fetch)") {
|
|
5804
|
-
remote.fetch = normalizedUrl;
|
|
5805
|
-
} else if (type2 === "(push)") {
|
|
5806
|
-
remote.push = normalizedUrl;
|
|
5807
|
-
}
|
|
5808
|
-
});
|
|
5809
|
-
this.log("Remote repository URLs retrieved", "debug", {
|
|
5810
|
-
remotes: remoteMap
|
|
5811
|
-
});
|
|
5812
|
-
return remoteMap;
|
|
5813
|
-
} catch (error) {
|
|
5814
|
-
const errorMessage = `Failed to get remote repository URLs: ${error.message}`;
|
|
5815
|
-
this.log(errorMessage, "error", { error });
|
|
5816
|
-
throw new Error(errorMessage);
|
|
5817
|
-
}
|
|
5818
|
-
}
|
|
5819
|
-
};
|
|
5820
|
-
|
|
5821
6317
|
// src/features/analysis/scm/scmSubmit/index.ts
|
|
6318
|
+
init_GitService();
|
|
5822
6319
|
var isValidBranchName = async (branchName) => {
|
|
5823
6320
|
const gitService = new GitService(process.cwd());
|
|
5824
6321
|
return gitService.isValidBranchName(branchName);
|
|
@@ -6655,6 +7152,9 @@ var MOBB_ICON_IMG = "https://app.mobb.ai/gh-action/Logo_Rounded_Icon.svg";
|
|
|
6655
7152
|
var MAX_BRANCHES_FETCH = 1e3;
|
|
6656
7153
|
var REPORT_DEFAULT_FILE_NAME = "report.json";
|
|
6657
7154
|
|
|
7155
|
+
// src/features/analysis/scm/index.ts
|
|
7156
|
+
init_env();
|
|
7157
|
+
|
|
6658
7158
|
// src/features/analysis/scm/github/GithubSCMLib.ts
|
|
6659
7159
|
import { z as z21 } from "zod";
|
|
6660
7160
|
|
|
@@ -7060,13 +7560,15 @@ function getGithubSdk(params = {}) {
|
|
|
7060
7560
|
if (!res?.repository?.object?.blame?.ranges) {
|
|
7061
7561
|
return [];
|
|
7062
7562
|
}
|
|
7063
|
-
return res.repository.object.blame.ranges.map(
|
|
7064
|
-
|
|
7065
|
-
|
|
7066
|
-
|
|
7067
|
-
|
|
7068
|
-
|
|
7069
|
-
|
|
7563
|
+
return res.repository.object.blame.ranges.map(
|
|
7564
|
+
(range) => ({
|
|
7565
|
+
startingLine: range.startingLine,
|
|
7566
|
+
endingLine: range.endingLine,
|
|
7567
|
+
email: range.commit.author.user?.email || "",
|
|
7568
|
+
name: range.commit.author.user?.name || "",
|
|
7569
|
+
login: range.commit.author.user?.login || ""
|
|
7570
|
+
})
|
|
7571
|
+
);
|
|
7070
7572
|
},
|
|
7071
7573
|
// todo: refactor the name for this function
|
|
7072
7574
|
async createPr(params2) {
|
|
@@ -7089,9 +7591,8 @@ function getGithubSdk(params = {}) {
|
|
|
7089
7591
|
sha: await octokit.rest.git.getRef({ owner, repo, ref: `heads/${defaultBranch}` }).then((response) => response.data.object.sha)
|
|
7090
7592
|
});
|
|
7091
7593
|
const decodedContent = Buffer.from(
|
|
7092
|
-
//
|
|
7093
|
-
|
|
7094
|
-
sourceFileContentResponse.data.content,
|
|
7594
|
+
// Check if file content exists and handle different response types
|
|
7595
|
+
typeof sourceFileContentResponse.data === "object" && !Array.isArray(sourceFileContentResponse.data) && "content" in sourceFileContentResponse.data && typeof sourceFileContentResponse.data.content === "string" ? sourceFileContentResponse.data.content : "",
|
|
7095
7596
|
"base64"
|
|
7096
7597
|
).toString("utf-8");
|
|
7097
7598
|
const tree = [
|
|
@@ -7109,9 +7610,8 @@ function getGithubSdk(params = {}) {
|
|
|
7109
7610
|
path: "/" + secondFilePath
|
|
7110
7611
|
});
|
|
7111
7612
|
const secondDecodedContent = Buffer.from(
|
|
7112
|
-
//
|
|
7113
|
-
|
|
7114
|
-
secondFileContentResponse.data.content,
|
|
7613
|
+
// Check if file content exists and handle different response types
|
|
7614
|
+
typeof secondFileContentResponse.data === "object" && !Array.isArray(secondFileContentResponse.data) && "content" in secondFileContentResponse.data && typeof secondFileContentResponse.data.content === "string" ? secondFileContentResponse.data.content : "",
|
|
7115
7615
|
"base64"
|
|
7116
7616
|
).toString("utf-8");
|
|
7117
7617
|
tree.push({
|
|
@@ -7125,8 +7625,6 @@ function getGithubSdk(params = {}) {
|
|
|
7125
7625
|
owner,
|
|
7126
7626
|
repo,
|
|
7127
7627
|
base_tree: await octokit.rest.git.getRef({ owner, repo, ref: `heads/${defaultBranch}` }).then((response) => response.data.object.sha),
|
|
7128
|
-
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
|
7129
|
-
// @ts-ignore
|
|
7130
7628
|
tree
|
|
7131
7629
|
});
|
|
7132
7630
|
const createCommitResponse = await octokit.rest.git.createCommit({
|
|
@@ -7466,6 +7964,7 @@ import {
|
|
|
7466
7964
|
fetch as undiciFetch,
|
|
7467
7965
|
ProxyAgent as ProxyAgent2
|
|
7468
7966
|
} from "undici";
|
|
7967
|
+
init_env();
|
|
7469
7968
|
|
|
7470
7969
|
// src/features/analysis/scm/gitlab/types.ts
|
|
7471
7970
|
import { z as z22 } from "zod";
|
|
@@ -8229,15 +8728,15 @@ function Spinner({ ci = false } = {}) {
|
|
|
8229
8728
|
}
|
|
8230
8729
|
|
|
8231
8730
|
// src/utils/check_node_version.ts
|
|
8232
|
-
import
|
|
8731
|
+
import fs4 from "fs";
|
|
8233
8732
|
import path4 from "path";
|
|
8234
8733
|
import semver from "semver";
|
|
8235
8734
|
function getPackageJson() {
|
|
8236
8735
|
let manifestPath = path4.join(getDirName(), "../package.json");
|
|
8237
|
-
if (!
|
|
8736
|
+
if (!fs4.existsSync(manifestPath)) {
|
|
8238
8737
|
manifestPath = path4.join(getDirName(), "../../package.json");
|
|
8239
8738
|
}
|
|
8240
|
-
return JSON.parse(
|
|
8739
|
+
return JSON.parse(fs4.readFileSync(manifestPath, "utf8"));
|
|
8241
8740
|
}
|
|
8242
8741
|
var packageJson = getPackageJson();
|
|
8243
8742
|
if (!semver.satisfies(process.version, packageJson.engines.node)) {
|
|
@@ -8303,7 +8802,7 @@ async function convertFprToSarif(inputFilePath, outputFilePath, codePathPatterns
|
|
|
8303
8802
|
await auditXmlSaxParser.parse();
|
|
8304
8803
|
}
|
|
8305
8804
|
await zipIn.close();
|
|
8306
|
-
const writer =
|
|
8805
|
+
const writer = fs5.createWriteStream(outputFilePath);
|
|
8307
8806
|
writer.write(`{
|
|
8308
8807
|
"$schema": "https://json.schemastore.org/sarif-2.1.0.json",
|
|
8309
8808
|
"version": "2.1.0",
|
|
@@ -8637,7 +9136,7 @@ function convertToSarifBuilder(args) {
|
|
|
8637
9136
|
).help().demandOption(["input-file-path", "input-file-format", "output-file-path"]);
|
|
8638
9137
|
}
|
|
8639
9138
|
async function validateConvertToSarifOptions(args) {
|
|
8640
|
-
if (!
|
|
9139
|
+
if (!fs6.existsSync(args.inputFilePath)) {
|
|
8641
9140
|
throw new CliError(
|
|
8642
9141
|
"\nError: --input-file-path flag should point to an existing file"
|
|
8643
9142
|
);
|
|
@@ -8663,15 +9162,15 @@ import chalk10 from "chalk";
|
|
|
8663
9162
|
import yargs from "yargs/yargs";
|
|
8664
9163
|
|
|
8665
9164
|
// src/args/commands/analyze.ts
|
|
8666
|
-
import
|
|
9165
|
+
import fs9 from "fs";
|
|
8667
9166
|
|
|
8668
9167
|
// src/commands/index.ts
|
|
8669
9168
|
import crypto from "crypto";
|
|
8670
9169
|
import os from "os";
|
|
8671
9170
|
|
|
8672
9171
|
// src/features/analysis/index.ts
|
|
8673
|
-
import
|
|
8674
|
-
import
|
|
9172
|
+
import fs8 from "fs";
|
|
9173
|
+
import fsPromises2 from "fs/promises";
|
|
8675
9174
|
import path9 from "path";
|
|
8676
9175
|
import { env as env2 } from "process";
|
|
8677
9176
|
import { pipeline } from "stream/promises";
|
|
@@ -9375,6 +9874,7 @@ async function handleAutoPr(params) {
|
|
|
9375
9874
|
}
|
|
9376
9875
|
|
|
9377
9876
|
// src/features/analysis/git.ts
|
|
9877
|
+
init_GitService();
|
|
9378
9878
|
import Debug10 from "debug";
|
|
9379
9879
|
var debug10 = Debug10("mobbdev:git");
|
|
9380
9880
|
async function getGitInfo(srcDirPath) {
|
|
@@ -9899,6 +10399,12 @@ var GQLClient = class {
|
|
|
9899
10399
|
}
|
|
9900
10400
|
);
|
|
9901
10401
|
}
|
|
10402
|
+
async getFixReportsByRepoUrl({ repoUrl }) {
|
|
10403
|
+
const res = await this._clientSdk.GetFixReportsByRepoUrl({
|
|
10404
|
+
repoUrl
|
|
10405
|
+
});
|
|
10406
|
+
return res;
|
|
10407
|
+
}
|
|
9902
10408
|
async getAnalysis(analysisId) {
|
|
9903
10409
|
const res = await this._clientSdk.getAnalysis({
|
|
9904
10410
|
analysisId
|
|
@@ -9939,7 +10445,8 @@ var GQLClient = class {
|
|
|
9939
10445
|
};
|
|
9940
10446
|
|
|
9941
10447
|
// src/features/analysis/pack.ts
|
|
9942
|
-
|
|
10448
|
+
init_configs();
|
|
10449
|
+
import fs7 from "fs";
|
|
9943
10450
|
import path7 from "path";
|
|
9944
10451
|
import AdmZip from "adm-zip";
|
|
9945
10452
|
import Debug13 from "debug";
|
|
@@ -9949,7 +10456,6 @@ import { simpleGit as simpleGit2 } from "simple-git";
|
|
|
9949
10456
|
import { parseStringPromise } from "xml2js";
|
|
9950
10457
|
import { z as z28 } from "zod";
|
|
9951
10458
|
var debug13 = Debug13("mobbdev:pack");
|
|
9952
|
-
var MAX_FILE_SIZE = 1024 * 1024 * 5;
|
|
9953
10459
|
var FPR_SOURCE_CODE_FILE_MAPPING_SCHEMA = z28.object({
|
|
9954
10460
|
properties: z28.object({
|
|
9955
10461
|
entry: z28.array(
|
|
@@ -10017,11 +10523,11 @@ async function pack(srcDirPath, vulnFiles, isIncludeAllFiles = false) {
|
|
|
10017
10523
|
continue;
|
|
10018
10524
|
}
|
|
10019
10525
|
}
|
|
10020
|
-
if (
|
|
10526
|
+
if (fs7.lstatSync(absFilepath).size > MCP_MAX_FILE_SIZE) {
|
|
10021
10527
|
debug13("ignoring %s because the size is > 5MB", filepath);
|
|
10022
10528
|
continue;
|
|
10023
10529
|
}
|
|
10024
|
-
const data = git ? await git.showBuffer([`HEAD:./${filepath}`]) :
|
|
10530
|
+
const data = git ? await git.showBuffer([`HEAD:./${filepath}`]) : fs7.readFileSync(absFilepath);
|
|
10025
10531
|
if (isBinary2(null, data)) {
|
|
10026
10532
|
debug13("ignoring %s because is seems to be a binary file", filepath);
|
|
10027
10533
|
continue;
|
|
@@ -10445,13 +10951,13 @@ async function downloadRepo({
|
|
|
10445
10951
|
repoSpinner.error({ text: "\u{1F4BE} Repo download failed" });
|
|
10446
10952
|
throw new Error(`Can't access ${chalk5.bold(repoUrl)}`);
|
|
10447
10953
|
}
|
|
10448
|
-
const fileWriterStream =
|
|
10954
|
+
const fileWriterStream = fs8.createWriteStream(zipFilePath);
|
|
10449
10955
|
if (!response.body) {
|
|
10450
10956
|
throw new Error("Response body is empty");
|
|
10451
10957
|
}
|
|
10452
10958
|
await pipeline(response.body, fileWriterStream);
|
|
10453
10959
|
await extract(zipFilePath, { dir: dirname });
|
|
10454
|
-
const repoRoot =
|
|
10960
|
+
const repoRoot = fs8.readdirSync(dirname, { withFileTypes: true }).filter((dirent) => dirent.isDirectory()).map((dirent) => dirent.name)[0];
|
|
10455
10961
|
if (!repoRoot) {
|
|
10456
10962
|
throw new Error("Repo root not found");
|
|
10457
10963
|
}
|
|
@@ -10942,7 +11448,7 @@ async function _zipAndUploadRepo({
|
|
|
10942
11448
|
repoUploadInfo,
|
|
10943
11449
|
isIncludeAllFiles
|
|
10944
11450
|
}) {
|
|
10945
|
-
const srcFileStatus = await
|
|
11451
|
+
const srcFileStatus = await fsPromises2.lstat(srcPath);
|
|
10946
11452
|
const zippingSpinner = createSpinner4("\u{1F4E6} Zipping repo").start();
|
|
10947
11453
|
let zipBuffer;
|
|
10948
11454
|
let gitInfo = { success: false };
|
|
@@ -11376,7 +11882,7 @@ function analyzeBuilder(yargs2) {
|
|
|
11376
11882
|
).help();
|
|
11377
11883
|
}
|
|
11378
11884
|
function validateAnalyzeOptions(argv) {
|
|
11379
|
-
if (argv.f && !
|
|
11885
|
+
if (argv.f && !fs9.existsSync(argv.f)) {
|
|
11380
11886
|
throw new CliError(`
|
|
11381
11887
|
Can't access ${chalk8.bold(argv.f)}`);
|
|
11382
11888
|
}
|
|
@@ -11540,10 +12046,12 @@ import Configstore3 from "configstore";
|
|
|
11540
12046
|
import crypto3 from "crypto";
|
|
11541
12047
|
import { GraphQLClient as GraphQLClient2 } from "graphql-request";
|
|
11542
12048
|
import { v4 as uuidv42 } from "uuid";
|
|
12049
|
+
init_configs();
|
|
11543
12050
|
|
|
11544
12051
|
// src/mcp/services/McpAuthService.ts
|
|
11545
12052
|
import crypto2 from "crypto";
|
|
11546
12053
|
import os2 from "os";
|
|
12054
|
+
init_configs();
|
|
11547
12055
|
import open4 from "open";
|
|
11548
12056
|
var McpAuthService = class {
|
|
11549
12057
|
constructor(client) {
|
|
@@ -11664,7 +12172,7 @@ var McpGQLClient = class {
|
|
|
11664
12172
|
return true;
|
|
11665
12173
|
} catch (e) {
|
|
11666
12174
|
const error = e;
|
|
11667
|
-
logDebug(`API connection verification failed
|
|
12175
|
+
logDebug(`API connection verification failed`, { error });
|
|
11668
12176
|
if (error?.toString().includes("FetchError")) {
|
|
11669
12177
|
logError("API connection verification failed", { error });
|
|
11670
12178
|
return false;
|
|
@@ -12300,7 +12808,7 @@ var McpServer = class {
|
|
|
12300
12808
|
import { z as z32 } from "zod";
|
|
12301
12809
|
|
|
12302
12810
|
// src/mcp/services/PathValidation.ts
|
|
12303
|
-
import
|
|
12811
|
+
import fs10 from "fs";
|
|
12304
12812
|
import path11 from "path";
|
|
12305
12813
|
async function validatePath(inputPath) {
|
|
12306
12814
|
logDebug("Validating MCP path", { inputPath });
|
|
@@ -12355,7 +12863,7 @@ async function validatePath(inputPath) {
|
|
|
12355
12863
|
logDebug("Path validation successful", { inputPath });
|
|
12356
12864
|
logDebug("Checking path existence", { inputPath });
|
|
12357
12865
|
try {
|
|
12358
|
-
await
|
|
12866
|
+
await fs10.promises.access(inputPath);
|
|
12359
12867
|
logDebug("Path exists and is accessible", { inputPath });
|
|
12360
12868
|
return { isValid: true, path: inputPath };
|
|
12361
12869
|
} catch (error) {
|
|
@@ -12418,7 +12926,12 @@ var BaseTool = class {
|
|
|
12418
12926
|
}
|
|
12419
12927
|
};
|
|
12420
12928
|
|
|
12929
|
+
// src/mcp/tools/checkForNewAvailableFixes/CheckForNewAvailableFixesService.ts
|
|
12930
|
+
init_configs();
|
|
12931
|
+
import Configstore4 from "configstore";
|
|
12932
|
+
|
|
12421
12933
|
// src/mcp/core/prompts.ts
|
|
12934
|
+
init_configs();
|
|
12422
12935
|
function friendlyType(s) {
|
|
12423
12936
|
const withoutUnderscores = s.replace(/_/g, " ");
|
|
12424
12937
|
const result = withoutUnderscores.replace(/([a-z])([A-Z])/g, "$1 $2");
|
|
@@ -12775,28 +13288,33 @@ ${applyFixesPrompt({
|
|
|
12775
13288
|
};
|
|
12776
13289
|
|
|
12777
13290
|
// src/mcp/services/GetLocalFiles.ts
|
|
12778
|
-
|
|
13291
|
+
init_FileUtils();
|
|
13292
|
+
init_GitService();
|
|
13293
|
+
init_configs();
|
|
13294
|
+
import fs11 from "fs/promises";
|
|
12779
13295
|
import nodePath from "path";
|
|
12780
13296
|
var getLocalFiles = async ({
|
|
12781
13297
|
path: path13,
|
|
12782
|
-
maxFileSize =
|
|
12783
|
-
maxFiles
|
|
13298
|
+
maxFileSize = MCP_MAX_FILE_SIZE,
|
|
13299
|
+
maxFiles,
|
|
13300
|
+
isAllFilesScan
|
|
12784
13301
|
}) => {
|
|
12785
|
-
const resolvedRepoPath = await
|
|
13302
|
+
const resolvedRepoPath = await fs11.realpath(path13);
|
|
12786
13303
|
const gitService = new GitService(resolvedRepoPath, log);
|
|
12787
13304
|
const gitValidation = await gitService.validateRepository();
|
|
12788
13305
|
let files = [];
|
|
12789
|
-
if (!gitValidation.isValid) {
|
|
13306
|
+
if (!gitValidation.isValid || isAllFilesScan) {
|
|
12790
13307
|
logDebug(
|
|
12791
13308
|
"Git repository validation failed, using all files in the repository",
|
|
12792
13309
|
{
|
|
12793
13310
|
path: path13
|
|
12794
13311
|
}
|
|
12795
13312
|
);
|
|
12796
|
-
files = FileUtils.getLastChangedFiles({
|
|
13313
|
+
files = await FileUtils.getLastChangedFiles({
|
|
12797
13314
|
dir: path13,
|
|
12798
13315
|
maxFileSize,
|
|
12799
|
-
maxFiles
|
|
13316
|
+
maxFiles,
|
|
13317
|
+
isAllFilesScan
|
|
12800
13318
|
});
|
|
12801
13319
|
logDebug("Found files in the repository", {
|
|
12802
13320
|
files,
|
|
@@ -12840,7 +13358,7 @@ var getLocalFiles = async ({
|
|
|
12840
13358
|
const relativePath = nodePath.relative(resolvedRepoPath, absoluteFilePath);
|
|
12841
13359
|
let fileStat;
|
|
12842
13360
|
try {
|
|
12843
|
-
fileStat = await
|
|
13361
|
+
fileStat = await fs11.stat(absoluteFilePath);
|
|
12844
13362
|
} catch (e) {
|
|
12845
13363
|
logDebug("File not found", {
|
|
12846
13364
|
file
|
|
@@ -12857,8 +13375,13 @@ var getLocalFiles = async ({
|
|
|
12857
13375
|
return filesWithStats.filter((file) => file.lastEdited > 0);
|
|
12858
13376
|
};
|
|
12859
13377
|
|
|
13378
|
+
// src/mcp/services/ScanFiles.ts
|
|
13379
|
+
init_GitService();
|
|
13380
|
+
init_configs();
|
|
13381
|
+
|
|
12860
13382
|
// src/mcp/services/FileOperations.ts
|
|
12861
|
-
|
|
13383
|
+
init_FileUtils();
|
|
13384
|
+
import fs12 from "fs";
|
|
12862
13385
|
import path12 from "path";
|
|
12863
13386
|
import AdmZip2 from "adm-zip";
|
|
12864
13387
|
var FileOperations = class {
|
|
@@ -12901,7 +13424,9 @@ var FileOperations = class {
|
|
|
12901
13424
|
packedFilesCount,
|
|
12902
13425
|
totalSize: archiveBuffer.length
|
|
12903
13426
|
};
|
|
12904
|
-
logInfo(
|
|
13427
|
+
logInfo(
|
|
13428
|
+
`Files packed successfully ${packedFilesCount} files, ${result.totalSize} bytes`
|
|
13429
|
+
);
|
|
12905
13430
|
return result;
|
|
12906
13431
|
}
|
|
12907
13432
|
/**
|
|
@@ -12921,7 +13446,7 @@ var FileOperations = class {
|
|
|
12921
13446
|
continue;
|
|
12922
13447
|
}
|
|
12923
13448
|
try {
|
|
12924
|
-
await
|
|
13449
|
+
await fs12.promises.access(absoluteFilepath, fs12.constants.R_OK);
|
|
12925
13450
|
validatedPaths.push(filepath);
|
|
12926
13451
|
} catch (error) {
|
|
12927
13452
|
logDebug(`Skipping ${filepath} - file is not accessible: ${error}`);
|
|
@@ -12938,7 +13463,7 @@ var FileOperations = class {
|
|
|
12938
13463
|
const fileDataArray = [];
|
|
12939
13464
|
for (const absolutePath of filePaths) {
|
|
12940
13465
|
try {
|
|
12941
|
-
const content = await
|
|
13466
|
+
const content = await fs12.promises.readFile(absolutePath);
|
|
12942
13467
|
const relativePath = path12.basename(absolutePath);
|
|
12943
13468
|
fileDataArray.push({
|
|
12944
13469
|
relativePath,
|
|
@@ -12959,7 +13484,7 @@ var FileOperations = class {
|
|
|
12959
13484
|
*/
|
|
12960
13485
|
async readSourceFile(absoluteFilepath, relativeFilepath) {
|
|
12961
13486
|
try {
|
|
12962
|
-
return await
|
|
13487
|
+
return await fs12.promises.readFile(absoluteFilepath);
|
|
12963
13488
|
} catch (fsError) {
|
|
12964
13489
|
logError(`Failed to read ${relativeFilepath} from filesystem: ${fsError}`);
|
|
12965
13490
|
return null;
|
|
@@ -12971,9 +13496,11 @@ var FileOperations = class {
|
|
|
12971
13496
|
var scanFiles = async ({
|
|
12972
13497
|
fileList,
|
|
12973
13498
|
repositoryPath,
|
|
12974
|
-
gqlClient
|
|
13499
|
+
gqlClient,
|
|
13500
|
+
isAllDetectionRulesScan = false,
|
|
13501
|
+
scanContext
|
|
12975
13502
|
}) => {
|
|
12976
|
-
const repoUploadInfo = await initializeSecurityReport(gqlClient);
|
|
13503
|
+
const repoUploadInfo = await initializeSecurityReport(gqlClient, scanContext);
|
|
12977
13504
|
const fixReportId = repoUploadInfo.fixReportId;
|
|
12978
13505
|
const fileOperations = new FileOperations();
|
|
12979
13506
|
const packingResult = await fileOperations.createSourceCodeArchive(
|
|
@@ -12981,8 +13508,15 @@ var scanFiles = async ({
|
|
|
12981
13508
|
repositoryPath,
|
|
12982
13509
|
MCP_MAX_FILE_SIZE
|
|
12983
13510
|
);
|
|
12984
|
-
|
|
12985
|
-
|
|
13511
|
+
logDebug(
|
|
13512
|
+
`[${scanContext}] Files ${packingResult.packedFilesCount} packed successfully, ${packingResult.totalSize} bytes`
|
|
13513
|
+
);
|
|
13514
|
+
await uploadSourceCodeArchive(
|
|
13515
|
+
packingResult.archive,
|
|
13516
|
+
repoUploadInfo,
|
|
13517
|
+
scanContext
|
|
13518
|
+
);
|
|
13519
|
+
const projectId = await getProjectId(gqlClient, scanContext);
|
|
12986
13520
|
const gitService = new GitService(repositoryPath);
|
|
12987
13521
|
const { branch } = await gitService.getCurrentCommitAndBranch();
|
|
12988
13522
|
const repoUrl = await gitService.getRemoteUrl();
|
|
@@ -12990,16 +13524,18 @@ var scanFiles = async ({
|
|
|
12990
13524
|
fixReportId,
|
|
12991
13525
|
projectId,
|
|
12992
13526
|
gqlClient,
|
|
13527
|
+
isAllDetectionRulesScan,
|
|
12993
13528
|
repoUrl: repoUrl || "",
|
|
12994
13529
|
branchName: branch || "no-branch",
|
|
12995
|
-
sha: "0123456789abcdef"
|
|
13530
|
+
sha: "0123456789abcdef",
|
|
13531
|
+
scanContext
|
|
12996
13532
|
});
|
|
12997
13533
|
return {
|
|
12998
13534
|
fixReportId,
|
|
12999
13535
|
projectId
|
|
13000
13536
|
};
|
|
13001
13537
|
};
|
|
13002
|
-
var initializeSecurityReport = async (gqlClient) => {
|
|
13538
|
+
var initializeSecurityReport = async (gqlClient, scanContext) => {
|
|
13003
13539
|
if (!gqlClient) {
|
|
13004
13540
|
throw new GqlClientError();
|
|
13005
13541
|
}
|
|
@@ -13007,7 +13543,7 @@ var initializeSecurityReport = async (gqlClient) => {
|
|
|
13007
13543
|
const {
|
|
13008
13544
|
uploadS3BucketInfo: { repoUploadInfo }
|
|
13009
13545
|
} = await gqlClient.uploadS3BucketInfo();
|
|
13010
|
-
logDebug(
|
|
13546
|
+
logDebug(`[${scanContext}] Upload info retrieved`);
|
|
13011
13547
|
return repoUploadInfo;
|
|
13012
13548
|
} catch (error) {
|
|
13013
13549
|
const message = error.message;
|
|
@@ -13016,7 +13552,7 @@ var initializeSecurityReport = async (gqlClient) => {
|
|
|
13016
13552
|
);
|
|
13017
13553
|
}
|
|
13018
13554
|
};
|
|
13019
|
-
var uploadSourceCodeArchive = async (archiveBuffer, repoUploadInfo) => {
|
|
13555
|
+
var uploadSourceCodeArchive = async (archiveBuffer, repoUploadInfo, scanContext) => {
|
|
13020
13556
|
if (!repoUploadInfo) {
|
|
13021
13557
|
throw new FileUploadError("Upload info is required for source code archive");
|
|
13022
13558
|
}
|
|
@@ -13027,9 +13563,9 @@ var uploadSourceCodeArchive = async (archiveBuffer, repoUploadInfo) => {
|
|
|
13027
13563
|
uploadFields: JSON.parse(repoUploadInfo.uploadFieldsJSON),
|
|
13028
13564
|
uploadKey: repoUploadInfo.uploadKey
|
|
13029
13565
|
});
|
|
13030
|
-
logInfo(
|
|
13566
|
+
logInfo(`[${scanContext}] File uploaded successfully`);
|
|
13031
13567
|
} catch (error) {
|
|
13032
|
-
logError(
|
|
13568
|
+
logError(`[${scanContext}] Source code archive upload failed`, {
|
|
13033
13569
|
error: error.message
|
|
13034
13570
|
});
|
|
13035
13571
|
throw new FileUploadError(
|
|
@@ -13037,36 +13573,39 @@ var uploadSourceCodeArchive = async (archiveBuffer, repoUploadInfo) => {
|
|
|
13037
13573
|
);
|
|
13038
13574
|
}
|
|
13039
13575
|
};
|
|
13040
|
-
var getProjectId = async (gqlClient) => {
|
|
13576
|
+
var getProjectId = async (gqlClient, scanContext) => {
|
|
13041
13577
|
if (!gqlClient) {
|
|
13042
13578
|
throw new GqlClientError();
|
|
13043
13579
|
}
|
|
13044
13580
|
const projectId = await gqlClient.getProjectId();
|
|
13045
|
-
logDebug(
|
|
13581
|
+
logDebug(`[${scanContext}] Project ID retrieved`);
|
|
13046
13582
|
return projectId;
|
|
13047
13583
|
};
|
|
13048
13584
|
var executeSecurityScan = async ({
|
|
13049
13585
|
fixReportId,
|
|
13050
13586
|
projectId,
|
|
13051
13587
|
gqlClient,
|
|
13588
|
+
isAllDetectionRulesScan = false,
|
|
13052
13589
|
repoUrl,
|
|
13053
13590
|
branchName,
|
|
13054
|
-
sha
|
|
13591
|
+
sha,
|
|
13592
|
+
scanContext
|
|
13055
13593
|
}) => {
|
|
13056
13594
|
if (!gqlClient) {
|
|
13057
13595
|
throw new GqlClientError();
|
|
13058
13596
|
}
|
|
13059
|
-
logInfo(
|
|
13597
|
+
logInfo(`[${scanContext}] Starting scan`);
|
|
13060
13598
|
const submitVulnerabilityReportVariables = {
|
|
13061
13599
|
fixReportId,
|
|
13062
13600
|
projectId,
|
|
13063
13601
|
repoUrl,
|
|
13064
13602
|
reference: branchName,
|
|
13065
13603
|
scanSource: "MCP" /* Mcp */,
|
|
13604
|
+
isFullScan: !!isAllDetectionRulesScan,
|
|
13066
13605
|
sha
|
|
13067
13606
|
};
|
|
13068
|
-
logInfo(
|
|
13069
|
-
logDebug(
|
|
13607
|
+
logInfo(`[${scanContext}] Submitting vulnerability report`);
|
|
13608
|
+
logDebug(`[${scanContext}] Submit vulnerability report variables`, {
|
|
13070
13609
|
submitVulnerabilityReportVariables
|
|
13071
13610
|
});
|
|
13072
13611
|
const submitRes = await gqlClient.submitVulnerabilityReport(
|
|
@@ -13078,12 +13617,12 @@ var executeSecurityScan = async ({
|
|
|
13078
13617
|
);
|
|
13079
13618
|
}
|
|
13080
13619
|
const analysisId = submitRes.submitVulnerabilityReport.fixReportId;
|
|
13081
|
-
logInfo(
|
|
13620
|
+
logInfo(`[${scanContext}] Vulnerability report submitted successfully`);
|
|
13082
13621
|
try {
|
|
13083
13622
|
await gqlClient.subscribeToGetAnalysis({
|
|
13084
13623
|
subscribeToAnalysisParams: { analysisId },
|
|
13085
13624
|
callback: async (completedAnalysisId) => {
|
|
13086
|
-
logInfo(
|
|
13625
|
+
logInfo(`[${scanContext}] Security analysis completed successfully`, {
|
|
13087
13626
|
analysisId: completedAnalysisId
|
|
13088
13627
|
});
|
|
13089
13628
|
},
|
|
@@ -13091,10 +13630,16 @@ var executeSecurityScan = async ({
|
|
|
13091
13630
|
timeoutInMs: MCP_VUL_REPORT_DIGEST_TIMEOUT_MS
|
|
13092
13631
|
});
|
|
13093
13632
|
} catch (error) {
|
|
13094
|
-
logError(
|
|
13633
|
+
logError(`[${scanContext}] Security analysis failed or timed out`, {
|
|
13634
|
+
error,
|
|
13635
|
+
analysisId
|
|
13636
|
+
});
|
|
13095
13637
|
throw new ScanError(`Security analysis failed: ${error.message}`);
|
|
13096
13638
|
}
|
|
13097
|
-
logDebug(
|
|
13639
|
+
logDebug(`[${scanContext}] Security scan completed successfully`, {
|
|
13640
|
+
fixReportId,
|
|
13641
|
+
projectId
|
|
13642
|
+
});
|
|
13098
13643
|
};
|
|
13099
13644
|
|
|
13100
13645
|
// src/mcp/tools/checkForNewAvailableFixes/CheckForNewAvailableFixesService.ts
|
|
@@ -13141,24 +13686,32 @@ var _CheckForNewAvailableFixesService = class _CheckForNewAvailableFixesService
|
|
|
13141
13686
|
* since the last scan.
|
|
13142
13687
|
*/
|
|
13143
13688
|
async scanForSecurityVulnerabilities({
|
|
13144
|
-
path: path13
|
|
13689
|
+
path: path13,
|
|
13690
|
+
isAllDetectionRulesScan,
|
|
13691
|
+
isAllFilesScan,
|
|
13692
|
+
scanContext
|
|
13145
13693
|
}) {
|
|
13146
|
-
logDebug(
|
|
13694
|
+
logDebug(`[${scanContext}] Scanning for new security vulnerabilities`, {
|
|
13695
|
+
path: path13
|
|
13696
|
+
});
|
|
13147
13697
|
if (!this.gqlClient) {
|
|
13148
|
-
logInfo(
|
|
13698
|
+
logInfo(`[${scanContext}] No GQL client found, skipping scan`);
|
|
13149
13699
|
return;
|
|
13150
13700
|
}
|
|
13151
13701
|
const isConnected = await this.gqlClient.verifyApiConnection();
|
|
13152
13702
|
if (!isConnected) {
|
|
13153
|
-
logError(
|
|
13703
|
+
logError(`[${scanContext}] Failed to connect to the API, scan aborted`);
|
|
13154
13704
|
return;
|
|
13155
13705
|
}
|
|
13156
|
-
logDebug(
|
|
13706
|
+
logDebug(
|
|
13707
|
+
`[${scanContext}] Connected to the API, assembling list of files to scan`,
|
|
13708
|
+
{ path: path13 }
|
|
13709
|
+
);
|
|
13157
13710
|
const files = await getLocalFiles({
|
|
13158
13711
|
path: path13,
|
|
13159
|
-
|
|
13712
|
+
isAllFilesScan
|
|
13160
13713
|
});
|
|
13161
|
-
logDebug(
|
|
13714
|
+
logDebug(`[${scanContext}] Active files`, { files });
|
|
13162
13715
|
const filesToScan = files.filter((file) => {
|
|
13163
13716
|
const lastScannedEditTime = this.filesLastScanned[file.fullPath];
|
|
13164
13717
|
if (!lastScannedEditTime) {
|
|
@@ -13167,18 +13720,23 @@ var _CheckForNewAvailableFixesService = class _CheckForNewAvailableFixesService
|
|
|
13167
13720
|
return file.lastEdited > lastScannedEditTime;
|
|
13168
13721
|
});
|
|
13169
13722
|
if (filesToScan.length === 0) {
|
|
13170
|
-
logInfo(
|
|
13723
|
+
logInfo(`[${scanContext}] No files require scanning`);
|
|
13171
13724
|
return;
|
|
13172
13725
|
}
|
|
13173
|
-
logDebug(
|
|
13726
|
+
logDebug(`[${scanContext}] Files requiring security scan`, { filesToScan });
|
|
13174
13727
|
const { fixReportId, projectId } = await scanFiles({
|
|
13175
13728
|
fileList: filesToScan.map((file) => file.relativePath),
|
|
13176
13729
|
repositoryPath: path13,
|
|
13177
|
-
gqlClient: this.gqlClient
|
|
13730
|
+
gqlClient: this.gqlClient,
|
|
13731
|
+
isAllDetectionRulesScan,
|
|
13732
|
+
scanContext
|
|
13178
13733
|
});
|
|
13179
13734
|
logInfo(
|
|
13180
|
-
`Security scan completed for ${path13} reportId: ${fixReportId} projectId: ${projectId}`
|
|
13735
|
+
`[${scanContext}] Security scan completed for ${path13} reportId: ${fixReportId} projectId: ${projectId}`
|
|
13181
13736
|
);
|
|
13737
|
+
if (isAllFilesScan) {
|
|
13738
|
+
return;
|
|
13739
|
+
}
|
|
13182
13740
|
const fixes = await this.gqlClient.getReportFixesPaginated({
|
|
13183
13741
|
reportId: fixReportId,
|
|
13184
13742
|
offset: 0,
|
|
@@ -13188,7 +13746,7 @@ var _CheckForNewAvailableFixesService = class _CheckForNewAvailableFixesService
|
|
|
13188
13746
|
(fix) => !this.isFixAlreadyReported(fix)
|
|
13189
13747
|
);
|
|
13190
13748
|
logInfo(
|
|
13191
|
-
`Security fixes retrieved, total: ${fixes?.fixes?.length || 0}, new: ${newFixes?.length || 0}`
|
|
13749
|
+
`[${scanContext}] Security fixes retrieved, total: ${fixes?.fixes?.length || 0}, new: ${newFixes?.length || 0}`
|
|
13192
13750
|
);
|
|
13193
13751
|
this.updateFreshFixesCache(newFixes || [], filesToScan);
|
|
13194
13752
|
this.updateFilesScanTimestamps(filesToScan);
|
|
@@ -13246,6 +13804,7 @@ var _CheckForNewAvailableFixesService = class _CheckForNewAvailableFixesService
|
|
|
13246
13804
|
if (!this.intervalId) {
|
|
13247
13805
|
this.startPeriodicScanning(path13);
|
|
13248
13806
|
this.executeInitialScan(path13);
|
|
13807
|
+
this.executeInitialFullScan(path13);
|
|
13249
13808
|
}
|
|
13250
13809
|
}
|
|
13251
13810
|
startPeriodicScanning(path13) {
|
|
@@ -13254,14 +13813,44 @@ var _CheckForNewAvailableFixesService = class _CheckForNewAvailableFixesService
|
|
|
13254
13813
|
});
|
|
13255
13814
|
this.intervalId = setInterval(() => {
|
|
13256
13815
|
logDebug("Triggering periodic security scan", { path: path13 });
|
|
13257
|
-
this.scanForSecurityVulnerabilities({
|
|
13816
|
+
this.scanForSecurityVulnerabilities({
|
|
13817
|
+
path: path13,
|
|
13818
|
+
scanContext: "BACKGROUND_PERIODIC"
|
|
13819
|
+
}).catch((error) => {
|
|
13258
13820
|
logError("Error during periodic security scan", { error });
|
|
13259
13821
|
});
|
|
13260
13822
|
}, MCP_PERIODIC_CHECK_INTERVAL);
|
|
13261
13823
|
}
|
|
13824
|
+
executeInitialFullScan(path13) {
|
|
13825
|
+
logDebug("Triggering initial full security scan", { path: path13 });
|
|
13826
|
+
const mobbConfigStore2 = new Configstore4(packageJson.name, { apiToken: "" });
|
|
13827
|
+
const fullScanPathsScanned = mobbConfigStore2.get("fullScanPathsScanned") || [];
|
|
13828
|
+
logDebug("Full scan paths scanned", { fullScanPathsScanned });
|
|
13829
|
+
if (fullScanPathsScanned.includes(path13)) {
|
|
13830
|
+
logDebug("Full scan already executed for this path", { path: path13 });
|
|
13831
|
+
return;
|
|
13832
|
+
}
|
|
13833
|
+
mobbConfigStore2.set("fullScanPathsScanned", [...fullScanPathsScanned, path13]);
|
|
13834
|
+
this.scanForSecurityVulnerabilities({
|
|
13835
|
+
path: path13,
|
|
13836
|
+
isAllFilesScan: true,
|
|
13837
|
+
isAllDetectionRulesScan: true,
|
|
13838
|
+
scanContext: "FULL_SCAN"
|
|
13839
|
+
}).catch((error) => {
|
|
13840
|
+
logError("Error during initial full security scan", { error });
|
|
13841
|
+
}).then(() => {
|
|
13842
|
+
const fullScanPathsScanned2 = mobbConfigStore2.get("fullScanPathsScanned") || [];
|
|
13843
|
+
fullScanPathsScanned2.push(path13);
|
|
13844
|
+
mobbConfigStore2.set("fullScanPathsScanned", fullScanPathsScanned2);
|
|
13845
|
+
logDebug("Full scan completed", { path: path13 });
|
|
13846
|
+
});
|
|
13847
|
+
}
|
|
13262
13848
|
executeInitialScan(path13) {
|
|
13263
13849
|
logDebug("Triggering initial security scan", { path: path13 });
|
|
13264
|
-
this.scanForSecurityVulnerabilities({
|
|
13850
|
+
this.scanForSecurityVulnerabilities({
|
|
13851
|
+
path: path13,
|
|
13852
|
+
scanContext: "BACKGROUND_INITIAL"
|
|
13853
|
+
}).catch((error) => {
|
|
13265
13854
|
logError("Error during initial security scan", { error });
|
|
13266
13855
|
});
|
|
13267
13856
|
}
|
|
@@ -13343,9 +13932,11 @@ Example payload:
|
|
|
13343
13932
|
};
|
|
13344
13933
|
|
|
13345
13934
|
// src/mcp/tools/fetchAvailableFixes/FetchAvailableFixesTool.ts
|
|
13935
|
+
init_GitService();
|
|
13346
13936
|
import { z as z33 } from "zod";
|
|
13347
13937
|
|
|
13348
13938
|
// src/mcp/tools/fetchAvailableFixes/FetchAvailableFixesService.ts
|
|
13939
|
+
init_configs();
|
|
13349
13940
|
var _FetchAvailableFixesService = class _FetchAvailableFixesService {
|
|
13350
13941
|
constructor() {
|
|
13351
13942
|
__publicField(this, "gqlClient", null);
|
|
@@ -13505,9 +14096,11 @@ Call this tool instead of ${MCP_TOOL_SCAN_AND_FIX_VULNERABILITIES} when you only
|
|
|
13505
14096
|
};
|
|
13506
14097
|
|
|
13507
14098
|
// src/mcp/tools/scanAndFixVulnerabilities/ScanAndFixVulnerabilitiesTool.ts
|
|
14099
|
+
init_configs();
|
|
13508
14100
|
import z34 from "zod";
|
|
13509
14101
|
|
|
13510
14102
|
// src/mcp/tools/scanAndFixVulnerabilities/ScanAndFixVulnerabilitiesService.ts
|
|
14103
|
+
init_configs();
|
|
13511
14104
|
var _ScanAndFixVulnerabilitiesService = class _ScanAndFixVulnerabilitiesService {
|
|
13512
14105
|
constructor() {
|
|
13513
14106
|
__publicField(this, "gqlClient");
|
|
@@ -13569,7 +14162,8 @@ var _ScanAndFixVulnerabilitiesService = class _ScanAndFixVulnerabilitiesService
|
|
|
13569
14162
|
const scanResult = await scanFiles({
|
|
13570
14163
|
fileList,
|
|
13571
14164
|
repositoryPath,
|
|
13572
|
-
gqlClient: this.gqlClient
|
|
14165
|
+
gqlClient: this.gqlClient,
|
|
14166
|
+
scanContext: "SCAN_AND_FIX_TOOL"
|
|
13573
14167
|
});
|
|
13574
14168
|
fixReportId = scanResult.fixReportId;
|
|
13575
14169
|
} else {
|
|
@@ -13740,8 +14334,7 @@ Example payload:
|
|
|
13740
14334
|
const path13 = pathValidationResult.path;
|
|
13741
14335
|
const files = await getLocalFiles({
|
|
13742
14336
|
path: path13,
|
|
13743
|
-
maxFileSize:
|
|
13744
|
-
// 5MB
|
|
14337
|
+
maxFileSize: MCP_MAX_FILE_SIZE,
|
|
13745
14338
|
maxFiles: args.maxFiles
|
|
13746
14339
|
});
|
|
13747
14340
|
logDebug("Files", { files });
|
|
@@ -13839,7 +14432,7 @@ var mcpHandler = async (_args) => {
|
|
|
13839
14432
|
};
|
|
13840
14433
|
|
|
13841
14434
|
// src/args/commands/review.ts
|
|
13842
|
-
import
|
|
14435
|
+
import fs13 from "fs";
|
|
13843
14436
|
import chalk9 from "chalk";
|
|
13844
14437
|
function reviewBuilder(yargs2) {
|
|
13845
14438
|
return yargs2.option("f", {
|
|
@@ -13876,7 +14469,7 @@ function reviewBuilder(yargs2) {
|
|
|
13876
14469
|
).help();
|
|
13877
14470
|
}
|
|
13878
14471
|
function validateReviewOptions(argv) {
|
|
13879
|
-
if (!
|
|
14472
|
+
if (!fs13.existsSync(argv.f)) {
|
|
13880
14473
|
throw new CliError(`
|
|
13881
14474
|
Can't access ${chalk9.bold(argv.f)}`);
|
|
13882
14475
|
}
|