mobbdev 1.3.5 → 1.4.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/args/commands/upload_ai_blame.d.mts +32 -32
- package/dist/args/commands/upload_ai_blame.mjs +49 -6
- package/dist/index.mjs +1725 -270
- package/package.json +4 -1
package/dist/index.mjs
CHANGED
|
@@ -129,10 +129,13 @@ function getSdk(client, withWrapper = defaultWrapper) {
|
|
|
129
129
|
},
|
|
130
130
|
ScanSkill(variables, requestHeaders, signal) {
|
|
131
131
|
return withWrapper((wrappedRequestHeaders) => client.request({ document: ScanSkillDocument, variables, requestHeaders: { ...requestHeaders, ...wrappedRequestHeaders }, signal }), "ScanSkill", "mutation", variables);
|
|
132
|
+
},
|
|
133
|
+
SkillVerdictsByMd5(variables, requestHeaders, signal) {
|
|
134
|
+
return withWrapper((wrappedRequestHeaders) => client.request({ document: SkillVerdictsByMd5Document, variables, requestHeaders: { ...requestHeaders, ...wrappedRequestHeaders }, signal }), "SkillVerdictsByMd5", "query", variables);
|
|
132
135
|
}
|
|
133
136
|
};
|
|
134
137
|
}
|
|
135
|
-
var AiBlameInferenceType, FixQuestionInputType, Language, ManifestAction, Effort_To_Apply_Fix_Enum, Fix_Rating_Tag_Enum, Fix_Report_State_Enum, Fix_State_Enum, IssueLanguage_Enum, IssueType_Enum, Pr_Status_Enum, Project_Role_Type_Enum, Vulnerability_Report_Issue_Category_Enum, Vulnerability_Report_Issue_State_Enum, Vulnerability_Report_Issue_Tag_Enum, Vulnerability_Report_Vendor_Enum, Vulnerability_Severity_Enum, FixDetailsFragmentDoc, FixReportSummaryFieldsFragmentDoc, MeDocument, GetLastOrgAndNamedProjectDocument, GetLastOrgDocument, GetEncryptedApiTokenDocument, FixReportStateDocument, GetVulnerabilityReportPathsDocument, GetAnalysisSubscriptionDocument, GetAnalysisDocument, GetFixesDocument, GetVulByNodesMetadataDocument, GetFalsePositiveDocument, UpdateScmTokenDocument, UploadS3BucketInfoDocument, GetTracyDiffUploadUrlDocument, AnalyzeCommitForExtensionAiBlameDocument, GetAiBlameInferenceDocument, GetAiBlameAttributionPromptDocument, GetPromptSummaryDocument, UploadAiBlameInferencesInitDocument, FinalizeAiBlameInferencesUploadDocument, UploadTracyRecordsDocument, GetTracyRawDataUploadUrlDocument, DigestVulnerabilityReportDocument, SubmitVulnerabilityReportDocument, CreateCommunityUserDocument, CreateCliLoginDocument, PerformCliLoginDocument, CreateProjectDocument, ValidateRepoUrlDocument, GitReferenceDocument, AutoPrAnalysisDocument, GetFixReportsByRepoUrlDocument, GetReportFixesDocument, GetLatestReportByRepoUrlDocument, UpdateDownloadedFixDataDocument, GetUserMvsAutoFixDocument, StreamBlameAiAnalysisRequestsDocument, StreamCommitBlameRequestsDocument, ScanSkillDocument, defaultWrapper;
|
|
138
|
+
var AiBlameInferenceType, FixQuestionInputType, Language, ManifestAction, Effort_To_Apply_Fix_Enum, Fix_Rating_Tag_Enum, Fix_Report_State_Enum, Fix_State_Enum, IssueLanguage_Enum, IssueType_Enum, Pr_Status_Enum, Project_Role_Type_Enum, Vulnerability_Report_Issue_Category_Enum, Vulnerability_Report_Issue_State_Enum, Vulnerability_Report_Issue_Tag_Enum, Vulnerability_Report_Vendor_Enum, Vulnerability_Severity_Enum, FixDetailsFragmentDoc, FixReportSummaryFieldsFragmentDoc, MeDocument, GetLastOrgAndNamedProjectDocument, GetLastOrgDocument, GetEncryptedApiTokenDocument, FixReportStateDocument, GetVulnerabilityReportPathsDocument, GetAnalysisSubscriptionDocument, GetAnalysisDocument, GetFixesDocument, GetVulByNodesMetadataDocument, GetFalsePositiveDocument, UpdateScmTokenDocument, UploadS3BucketInfoDocument, GetTracyDiffUploadUrlDocument, AnalyzeCommitForExtensionAiBlameDocument, GetAiBlameInferenceDocument, GetAiBlameAttributionPromptDocument, GetPromptSummaryDocument, UploadAiBlameInferencesInitDocument, FinalizeAiBlameInferencesUploadDocument, UploadTracyRecordsDocument, GetTracyRawDataUploadUrlDocument, DigestVulnerabilityReportDocument, SubmitVulnerabilityReportDocument, CreateCommunityUserDocument, CreateCliLoginDocument, PerformCliLoginDocument, CreateProjectDocument, ValidateRepoUrlDocument, GitReferenceDocument, AutoPrAnalysisDocument, GetFixReportsByRepoUrlDocument, GetReportFixesDocument, GetLatestReportByRepoUrlDocument, UpdateDownloadedFixDataDocument, GetUserMvsAutoFixDocument, StreamBlameAiAnalysisRequestsDocument, StreamCommitBlameRequestsDocument, ScanSkillDocument, SkillVerdictsByMd5Document, defaultWrapper;
|
|
136
139
|
var init_client_generates = __esm({
|
|
137
140
|
"src/features/analysis/scm/generates/client_generates.ts"() {
|
|
138
141
|
"use strict";
|
|
@@ -326,6 +329,7 @@ var init_client_generates = __esm({
|
|
|
326
329
|
IssueType_Enum2["SystemExitShouldReraise"] = "SYSTEM_EXIT_SHOULD_RERAISE";
|
|
327
330
|
IssueType_Enum2["SystemInformationLeak"] = "SYSTEM_INFORMATION_LEAK";
|
|
328
331
|
IssueType_Enum2["SystemInformationLeakExternal"] = "SYSTEM_INFORMATION_LEAK_EXTERNAL";
|
|
332
|
+
IssueType_Enum2["TaintedNumericCast"] = "TAINTED_NUMERIC_CAST";
|
|
329
333
|
IssueType_Enum2["TarSlip"] = "TAR_SLIP";
|
|
330
334
|
IssueType_Enum2["TrustBoundaryViolation"] = "TRUST_BOUNDARY_VIOLATION";
|
|
331
335
|
IssueType_Enum2["TypeConfusion"] = "TYPE_CONFUSION";
|
|
@@ -1264,6 +1268,18 @@ var init_client_generates = __esm({
|
|
|
1264
1268
|
cached
|
|
1265
1269
|
summary
|
|
1266
1270
|
}
|
|
1271
|
+
}
|
|
1272
|
+
`;
|
|
1273
|
+
SkillVerdictsByMd5Document = `
|
|
1274
|
+
query SkillVerdictsByMd5($md5s: [String!]!) {
|
|
1275
|
+
skillVerdictsByMd5(md5s: $md5s) {
|
|
1276
|
+
md5
|
|
1277
|
+
verdict
|
|
1278
|
+
summary
|
|
1279
|
+
scannerName
|
|
1280
|
+
scannerVersion
|
|
1281
|
+
scannedAt
|
|
1282
|
+
}
|
|
1267
1283
|
}
|
|
1268
1284
|
`;
|
|
1269
1285
|
defaultWrapper = (action, _operationName, _operationType, _variables) => action();
|
|
@@ -1450,7 +1466,8 @@ var init_getIssueType = __esm({
|
|
|
1450
1466
|
["DJANGO_BLANK_FIELD_NEEDS_NULL_OR_DEFAULT" /* DjangoBlankFieldNeedsNullOrDefault */]: "Django Blank Field Needs Null or Default",
|
|
1451
1467
|
["REDUNDANT_NIL_ERROR_CHECK" /* RedundantNilErrorCheck */]: "Redundant Nil Error Check",
|
|
1452
1468
|
["MISSING_WORKFLOW_PERMISSIONS" /* MissingWorkflowPermissions */]: "Missing Workflow Permissions",
|
|
1453
|
-
["EXCESSIVE_SECRETS_EXPOSURE" /* ExcessiveSecretsExposure */]: "Excessive Secrets Exposure"
|
|
1469
|
+
["EXCESSIVE_SECRETS_EXPOSURE" /* ExcessiveSecretsExposure */]: "Excessive Secrets Exposure",
|
|
1470
|
+
["TAINTED_NUMERIC_CAST" /* TaintedNumericCast */]: "Tainted Numeric Cast"
|
|
1454
1471
|
};
|
|
1455
1472
|
issueTypeZ = z.nativeEnum(IssueType_Enum);
|
|
1456
1473
|
getIssueTypeFriendlyString = (issueType) => {
|
|
@@ -3577,8 +3594,8 @@ var init_FileUtils = __esm({
|
|
|
3577
3594
|
const fullPath = path.join(dir, item);
|
|
3578
3595
|
try {
|
|
3579
3596
|
await fsPromises.access(fullPath, fs.constants.R_OK);
|
|
3580
|
-
const
|
|
3581
|
-
if (
|
|
3597
|
+
const stat4 = await fsPromises.stat(fullPath);
|
|
3598
|
+
if (stat4.isDirectory()) {
|
|
3582
3599
|
if (isRootLevel && excludedRootDirectories.includes(item)) {
|
|
3583
3600
|
continue;
|
|
3584
3601
|
}
|
|
@@ -3590,7 +3607,7 @@ var init_FileUtils = __esm({
|
|
|
3590
3607
|
name: item,
|
|
3591
3608
|
fullPath,
|
|
3592
3609
|
relativePath: path.relative(rootDir, fullPath),
|
|
3593
|
-
time:
|
|
3610
|
+
time: stat4.mtime.getTime(),
|
|
3594
3611
|
isFile: true
|
|
3595
3612
|
});
|
|
3596
3613
|
}
|
|
@@ -4642,7 +4659,8 @@ var fixDetailsData = {
|
|
|
4642
4659
|
["DJANGO_BLANK_FIELD_NEEDS_NULL_OR_DEFAULT" /* DjangoBlankFieldNeedsNullOrDefault */]: void 0,
|
|
4643
4660
|
["REDUNDANT_NIL_ERROR_CHECK" /* RedundantNilErrorCheck */]: void 0,
|
|
4644
4661
|
["MISSING_WORKFLOW_PERMISSIONS" /* MissingWorkflowPermissions */]: void 0,
|
|
4645
|
-
["EXCESSIVE_SECRETS_EXPOSURE" /* ExcessiveSecretsExposure */]: void 0
|
|
4662
|
+
["EXCESSIVE_SECRETS_EXPOSURE" /* ExcessiveSecretsExposure */]: void 0,
|
|
4663
|
+
["TAINTED_NUMERIC_CAST" /* TaintedNumericCast */]: void 0
|
|
4646
4664
|
};
|
|
4647
4665
|
|
|
4648
4666
|
// src/features/analysis/scm/shared/src/commitDescriptionMarkup.ts
|
|
@@ -6174,6 +6192,17 @@ var openRedirect3 = {
|
|
|
6174
6192
|
}
|
|
6175
6193
|
};
|
|
6176
6194
|
|
|
6195
|
+
// src/features/analysis/scm/shared/src/storedQuestionData/python/ssrf.ts
|
|
6196
|
+
var ssrf5 = {
|
|
6197
|
+
domainsAllowlist: {
|
|
6198
|
+
content: () => "Allowed URL prefixes",
|
|
6199
|
+
description: () => `The security risk of this issue is the ability of an attacker to provide input that shoots HTTP requests from your server to arbitrary URLs, including internal ones, like \`https://admin.mycompany.com\`
|
|
6200
|
+
|
|
6201
|
+
To eliminate the risk and fix the issue, check out your app logic and make a whitelist of URLs this API should be allowed to call.`,
|
|
6202
|
+
guidance: () => ""
|
|
6203
|
+
}
|
|
6204
|
+
};
|
|
6205
|
+
|
|
6177
6206
|
// src/features/analysis/scm/shared/src/storedQuestionData/python/uncheckedLoopCondition.ts
|
|
6178
6207
|
var uncheckedLoopCondition3 = {
|
|
6179
6208
|
loopLimit: {
|
|
@@ -6195,7 +6224,8 @@ var vulnerabilities14 = {
|
|
|
6195
6224
|
["OPEN_REDIRECT" /* OpenRedirect */]: openRedirect3,
|
|
6196
6225
|
["UNCHECKED_LOOP_CONDITION" /* UncheckedLoopCondition */]: uncheckedLoopCondition3,
|
|
6197
6226
|
["DUPLICATED_STRINGS" /* DuplicatedStrings */]: duplicatedStrings2,
|
|
6198
|
-
["MISSING_ENCODING_FILE_OPEN" /* MissingEncodingFileOpen */]: missingEncoding
|
|
6227
|
+
["MISSING_ENCODING_FILE_OPEN" /* MissingEncodingFileOpen */]: missingEncoding,
|
|
6228
|
+
["SSRF" /* Ssrf */]: ssrf5
|
|
6199
6229
|
};
|
|
6200
6230
|
var python_default2 = vulnerabilities14;
|
|
6201
6231
|
|
|
@@ -7117,7 +7147,7 @@ async function getAdoSdk(params) {
|
|
|
7117
7147
|
const url = new URL(repoUrl);
|
|
7118
7148
|
const origin = url.origin.toLowerCase().endsWith(".visualstudio.com") ? DEFUALT_ADO_ORIGIN : url.origin.toLowerCase();
|
|
7119
7149
|
const params2 = `path=/&versionDescriptor[versionOptions]=0&versionDescriptor[versionType]=commit&versionDescriptor[version]=${branch}&resolveLfs=true&$format=zip&api-version=5.0&download=true`;
|
|
7120
|
-
const
|
|
7150
|
+
const path34 = [
|
|
7121
7151
|
prefixPath,
|
|
7122
7152
|
owner,
|
|
7123
7153
|
projectName,
|
|
@@ -7128,7 +7158,7 @@ async function getAdoSdk(params) {
|
|
|
7128
7158
|
"items",
|
|
7129
7159
|
"items"
|
|
7130
7160
|
].filter(Boolean).join("/");
|
|
7131
|
-
return new URL(`${
|
|
7161
|
+
return new URL(`${path34}?${params2}`, origin).toString();
|
|
7132
7162
|
},
|
|
7133
7163
|
async getAdoBranchList({ repoUrl }) {
|
|
7134
7164
|
try {
|
|
@@ -7217,8 +7247,8 @@ async function getAdoSdk(params) {
|
|
|
7217
7247
|
const changeType = entry.changeType;
|
|
7218
7248
|
return changeType !== 16 && entry.item?.path;
|
|
7219
7249
|
}).map((entry) => {
|
|
7220
|
-
const
|
|
7221
|
-
return
|
|
7250
|
+
const path34 = entry.item.path;
|
|
7251
|
+
return path34.startsWith("/") ? path34.slice(1) : path34;
|
|
7222
7252
|
});
|
|
7223
7253
|
},
|
|
7224
7254
|
async searchAdoPullRequests({
|
|
@@ -8091,6 +8121,15 @@ var BitbucketParseResultZ = z20.object({
|
|
|
8091
8121
|
repoName: z20.string(),
|
|
8092
8122
|
hostname: z20.literal(BITBUCKET_HOSTNAME)
|
|
8093
8123
|
});
|
|
8124
|
+
var UserWorkspacePermissionsRepositoriesResponseZ = z20.object({
|
|
8125
|
+
values: z20.array(
|
|
8126
|
+
z20.object({
|
|
8127
|
+
repository: z20.object({
|
|
8128
|
+
full_name: z20.string().optional()
|
|
8129
|
+
}).optional()
|
|
8130
|
+
})
|
|
8131
|
+
).optional()
|
|
8132
|
+
});
|
|
8094
8133
|
function parseBitbucketOrganizationAndRepo(bitbucketUrl) {
|
|
8095
8134
|
const parsedGitHubUrl = normalizeUrl(bitbucketUrl);
|
|
8096
8135
|
const parsingResult = parseScmURL(parsedGitHubUrl, "Bitbucket" /* Bitbucket */);
|
|
@@ -8155,12 +8194,17 @@ function getBitbucketSdk(params) {
|
|
|
8155
8194
|
const { repoUrl } = params2;
|
|
8156
8195
|
const { repo_slug, workspace } = parseBitbucketOrganizationAndRepo(repoUrl);
|
|
8157
8196
|
const fullRepoName = `${workspace}/${repo_slug}`;
|
|
8158
|
-
const res = await bitbucketClient.
|
|
8197
|
+
const res = await bitbucketClient.request({
|
|
8198
|
+
method: "GET",
|
|
8199
|
+
url: "/user/workspaces/{workspace}/permissions/repositories",
|
|
8200
|
+
workspace,
|
|
8159
8201
|
q: `repository.full_name~"${fullRepoName}"`
|
|
8160
8202
|
});
|
|
8161
|
-
|
|
8162
|
-
|
|
8163
|
-
)
|
|
8203
|
+
const parsed = UserWorkspacePermissionsRepositoriesResponseZ.safeParse(
|
|
8204
|
+
res.data
|
|
8205
|
+
);
|
|
8206
|
+
const values = parsed.success ? parsed.data.values : void 0;
|
|
8207
|
+
return values?.some((v) => v.repository?.full_name === fullRepoName) ?? false;
|
|
8164
8208
|
},
|
|
8165
8209
|
async createPullRequest(params2) {
|
|
8166
8210
|
const { repo_slug, workspace } = parseBitbucketOrganizationAndRepo(
|
|
@@ -8926,12 +8970,18 @@ function getOctoKit(options) {
|
|
|
8926
8970
|
timeout: 1e4
|
|
8927
8971
|
// 10 second timeout
|
|
8928
8972
|
},
|
|
8929
|
-
retry
|
|
8930
|
-
|
|
8931
|
-
|
|
8973
|
+
// Always retry on transient failures. 401 is intentionally retryable:
|
|
8974
|
+
// GitHub briefly returns 401 for an OAuth token in the first few seconds
|
|
8975
|
+
// after it is minted, and without this, validateRepoUrl and every
|
|
8976
|
+
// downstream SCM call can fail permanently on that propagation glitch.
|
|
8977
|
+
// Trade-off: a genuinely revoked/invalid token surfaces after ~14s of
|
|
8978
|
+
// backoff (@octokit/plugin-retry uses retryCount^2 * 1000 ms: 1s, 4s, 9s)
|
|
8979
|
+
// instead of immediately. Acceptable given the alternative is permanent
|
|
8980
|
+
// failure / 10-minute test timeouts.
|
|
8981
|
+
retry: {
|
|
8982
|
+
doNotRetry: [400, 403, 404, 422],
|
|
8932
8983
|
retries: 3
|
|
8933
|
-
|
|
8934
|
-
} : { enabled: false },
|
|
8984
|
+
},
|
|
8935
8985
|
throttle: options?.isEnableRetries ? {
|
|
8936
8986
|
onRateLimit: (retryAfter, options2, octokit, retryCount) => {
|
|
8937
8987
|
octokit.log.warn(
|
|
@@ -9593,10 +9643,13 @@ function getGithubSdk(params = {}) {
|
|
|
9593
9643
|
return res;
|
|
9594
9644
|
},
|
|
9595
9645
|
/**
|
|
9596
|
-
*
|
|
9597
|
-
* https://docs.github.com/en/rest/
|
|
9646
|
+
* List PRs using GitHub's REST `/repos/{owner}/{repo}/pulls` endpoint.
|
|
9647
|
+
* https://docs.github.com/en/rest/pulls/pulls?apiVersion=2022-11-28#list-pull-requests
|
|
9648
|
+
*
|
|
9649
|
+
* Uses the 5000/hr core rate-limit bucket and reads freshly-created PRs
|
|
9650
|
+
* without the indexing lag of the Search API.
|
|
9598
9651
|
*/
|
|
9599
|
-
async
|
|
9652
|
+
async listPullRequests(params2) {
|
|
9600
9653
|
const {
|
|
9601
9654
|
owner,
|
|
9602
9655
|
repo,
|
|
@@ -9606,26 +9659,22 @@ function getGithubSdk(params = {}) {
|
|
|
9606
9659
|
perPage = 10,
|
|
9607
9660
|
page = 1
|
|
9608
9661
|
} = params2;
|
|
9609
|
-
|
|
9610
|
-
|
|
9611
|
-
|
|
9612
|
-
|
|
9613
|
-
|
|
9614
|
-
|
|
9615
|
-
|
|
9616
|
-
}
|
|
9617
|
-
const githubSortField = sort.field === "updated" || sort.field === "created" ? sort.field : "comments";
|
|
9618
|
-
const response = await octokit.rest.search.issuesAndPullRequests({
|
|
9619
|
-
q: query,
|
|
9620
|
-
sort: githubSortField,
|
|
9621
|
-
order: sort.order,
|
|
9662
|
+
const restSortField = sort.field === "updated" || sort.field === "created" ? sort.field : "popularity";
|
|
9663
|
+
const response = await octokit.rest.pulls.list({
|
|
9664
|
+
owner,
|
|
9665
|
+
repo,
|
|
9666
|
+
state,
|
|
9667
|
+
sort: restSortField,
|
|
9668
|
+
direction: sort.order,
|
|
9622
9669
|
per_page: perPage,
|
|
9623
9670
|
page
|
|
9624
9671
|
});
|
|
9672
|
+
const filtered = updatedAfter ? response.data.filter((pr) => new Date(pr.updated_at) >= updatedAfter) : response.data;
|
|
9673
|
+
const hitDescCutoff = sort.order === "desc" && updatedAfter !== void 0 && filtered.length < response.data.length;
|
|
9674
|
+
const hasMore = response.data.length === perPage && !hitDescCutoff;
|
|
9625
9675
|
return {
|
|
9626
|
-
items:
|
|
9627
|
-
|
|
9628
|
-
hasMore: page * perPage < response.data.total_count
|
|
9676
|
+
items: filtered,
|
|
9677
|
+
hasMore
|
|
9629
9678
|
};
|
|
9630
9679
|
},
|
|
9631
9680
|
/**
|
|
@@ -10225,8 +10274,12 @@ var GithubSCMLib = class extends SCMLib {
|
|
|
10225
10274
|
}).map((file) => file.filename);
|
|
10226
10275
|
}
|
|
10227
10276
|
/**
|
|
10228
|
-
* Override searchSubmitRequests to use GitHub's
|
|
10229
|
-
*
|
|
10277
|
+
* Override searchSubmitRequests to use GitHub's REST `/pulls` endpoint.
|
|
10278
|
+
*
|
|
10279
|
+
* The Search API we used previously has a separate 30/min secondary rate
|
|
10280
|
+
* limit and an async index that lags on just-created PRs. `/pulls` hits
|
|
10281
|
+
* GitHub's primary datastore, uses the 5000/hr core bucket, and returns
|
|
10282
|
+
* `head.ref` / `base.ref` so we can populate sourceBranch / targetBranch.
|
|
10230
10283
|
*/
|
|
10231
10284
|
async searchSubmitRequests(params) {
|
|
10232
10285
|
this._validateAccessToken();
|
|
@@ -10234,7 +10287,7 @@ var GithubSCMLib = class extends SCMLib {
|
|
|
10234
10287
|
const page = parseCursorSafe(params.cursor, 1);
|
|
10235
10288
|
const perPage = params.limit || 10;
|
|
10236
10289
|
const sort = params.sort || { field: "updated", order: "desc" };
|
|
10237
|
-
const
|
|
10290
|
+
const listResult = await this.githubSdk.listPullRequests({
|
|
10238
10291
|
owner,
|
|
10239
10292
|
repo,
|
|
10240
10293
|
updatedAfter: params.filters?.updatedAfter,
|
|
@@ -10243,38 +10296,33 @@ var GithubSCMLib = class extends SCMLib {
|
|
|
10243
10296
|
perPage,
|
|
10244
10297
|
page
|
|
10245
10298
|
});
|
|
10246
|
-
const results =
|
|
10299
|
+
const results = listResult.items.map((pr) => {
|
|
10247
10300
|
let status = "open";
|
|
10248
|
-
if (
|
|
10249
|
-
status =
|
|
10250
|
-
} else if (
|
|
10301
|
+
if (pr.state === "closed") {
|
|
10302
|
+
status = pr.merged_at ? "merged" : "closed";
|
|
10303
|
+
} else if (pr.draft) {
|
|
10251
10304
|
status = "draft";
|
|
10252
10305
|
}
|
|
10253
10306
|
return {
|
|
10254
|
-
submitRequestId: String(
|
|
10255
|
-
submitRequestNumber:
|
|
10256
|
-
title:
|
|
10307
|
+
submitRequestId: String(pr.number),
|
|
10308
|
+
submitRequestNumber: pr.number,
|
|
10309
|
+
title: pr.title,
|
|
10257
10310
|
status,
|
|
10258
|
-
sourceBranch: "",
|
|
10259
|
-
|
|
10260
|
-
|
|
10261
|
-
// Not available in search API
|
|
10262
|
-
authorName: issue.user?.login,
|
|
10311
|
+
sourceBranch: pr.head?.ref ?? "",
|
|
10312
|
+
targetBranch: pr.base?.ref ?? "",
|
|
10313
|
+
authorName: pr.user?.login,
|
|
10263
10314
|
authorEmail: void 0,
|
|
10264
|
-
|
|
10265
|
-
|
|
10266
|
-
|
|
10267
|
-
description: issue.body || void 0,
|
|
10315
|
+
createdAt: new Date(pr.created_at),
|
|
10316
|
+
updatedAt: new Date(pr.updated_at),
|
|
10317
|
+
description: pr.body || void 0,
|
|
10268
10318
|
tickets: [],
|
|
10269
|
-
// Would need separate parsing
|
|
10270
10319
|
changedLines: { added: 0, removed: 0 }
|
|
10271
|
-
// Not available in search API
|
|
10272
10320
|
};
|
|
10273
10321
|
});
|
|
10274
10322
|
return {
|
|
10275
10323
|
results,
|
|
10276
|
-
nextCursor:
|
|
10277
|
-
hasMore:
|
|
10324
|
+
nextCursor: listResult.hasMore ? String(page + 1) : void 0,
|
|
10325
|
+
hasMore: listResult.hasMore
|
|
10278
10326
|
};
|
|
10279
10327
|
}
|
|
10280
10328
|
/**
|
|
@@ -13506,6 +13554,10 @@ var GQLClient = class {
|
|
|
13506
13554
|
async scanSkill(variables) {
|
|
13507
13555
|
return await this._clientSdk.ScanSkill(variables);
|
|
13508
13556
|
}
|
|
13557
|
+
// T-467 — batched verdict lookup for the client-side quarantine check.
|
|
13558
|
+
async skillVerdictsByMd5(md5s) {
|
|
13559
|
+
return await this._clientSdk.SkillVerdictsByMd5({ md5s });
|
|
13560
|
+
}
|
|
13509
13561
|
};
|
|
13510
13562
|
|
|
13511
13563
|
// src/features/analysis/graphql/tracy-batch-upload.ts
|
|
@@ -13893,13 +13945,13 @@ function maskString(str, showStart = 2, showEnd = 2) {
|
|
|
13893
13945
|
}
|
|
13894
13946
|
return str.slice(0, showStart) + "*".repeat(str.length - showStart - showEnd) + str.slice(-showEnd);
|
|
13895
13947
|
}
|
|
13896
|
-
async function sanitizeDataWithCounts(obj) {
|
|
13948
|
+
async function sanitizeDataWithCounts(obj, options) {
|
|
13897
13949
|
const counts = {
|
|
13898
13950
|
detections: { total: 0, high: 0, medium: 0, low: 0 }
|
|
13899
13951
|
};
|
|
13900
13952
|
const MAX_SCAN_LENGTH = 1e5;
|
|
13901
13953
|
const sanitizeString = async (str) => {
|
|
13902
|
-
if (str.length > MAX_SCAN_LENGTH) {
|
|
13954
|
+
if (!options?.noSizeLimit && str.length > MAX_SCAN_LENGTH) {
|
|
13903
13955
|
return str;
|
|
13904
13956
|
}
|
|
13905
13957
|
let result = str;
|
|
@@ -14358,7 +14410,7 @@ async function prepareAndSendTracyRecords(client, rawRecords, workingDir, option
|
|
|
14358
14410
|
`${shouldSanitize ? "sanitize" : "serialize"} ${rawRecords.length} records`,
|
|
14359
14411
|
() => Promise.all(
|
|
14360
14412
|
rawRecords.map(async (record, index) => {
|
|
14361
|
-
if (record.rawData != null) {
|
|
14413
|
+
if (record.rawData != null && record.rawDataS3Key == null) {
|
|
14362
14414
|
const serialized = shouldSanitize ? await sanitizeRawData(record.rawData) : JSON.stringify(record.rawData);
|
|
14363
14415
|
serializedRawDataByIndex.set(index, serialized);
|
|
14364
14416
|
}
|
|
@@ -15028,7 +15080,7 @@ async function postIssueComment(params) {
|
|
|
15028
15080
|
fpDescription
|
|
15029
15081
|
} = params;
|
|
15030
15082
|
const {
|
|
15031
|
-
path:
|
|
15083
|
+
path: path34,
|
|
15032
15084
|
startLine,
|
|
15033
15085
|
vulnerabilityReportIssue: {
|
|
15034
15086
|
vulnerabilityReportIssueTags,
|
|
@@ -15043,7 +15095,7 @@ async function postIssueComment(params) {
|
|
|
15043
15095
|
Refresh the page in order to see the changes.`,
|
|
15044
15096
|
pull_number: pullRequest,
|
|
15045
15097
|
commit_id: commitSha,
|
|
15046
|
-
path:
|
|
15098
|
+
path: path34,
|
|
15047
15099
|
line: startLine
|
|
15048
15100
|
});
|
|
15049
15101
|
const commentId = commentRes.data.id;
|
|
@@ -15077,7 +15129,7 @@ async function postFixComment(params) {
|
|
|
15077
15129
|
scanner
|
|
15078
15130
|
} = params;
|
|
15079
15131
|
const {
|
|
15080
|
-
path:
|
|
15132
|
+
path: path34,
|
|
15081
15133
|
startLine,
|
|
15082
15134
|
vulnerabilityReportIssue: { fixId, vulnerabilityReportIssueTags, category },
|
|
15083
15135
|
vulnerabilityReportIssueId
|
|
@@ -15095,7 +15147,7 @@ async function postFixComment(params) {
|
|
|
15095
15147
|
Refresh the page in order to see the changes.`,
|
|
15096
15148
|
pull_number: pullRequest,
|
|
15097
15149
|
commit_id: commitSha,
|
|
15098
|
-
path:
|
|
15150
|
+
path: path34,
|
|
15099
15151
|
line: startLine
|
|
15100
15152
|
});
|
|
15101
15153
|
const commentId = commentRes.data.id;
|
|
@@ -16617,8 +16669,8 @@ async function resolveSkillScanInput(skillInput) {
|
|
|
16617
16669
|
if (!fs11.existsSync(resolvedPath)) {
|
|
16618
16670
|
return skillInput;
|
|
16619
16671
|
}
|
|
16620
|
-
const
|
|
16621
|
-
if (!
|
|
16672
|
+
const stat4 = fs11.statSync(resolvedPath);
|
|
16673
|
+
if (!stat4.isDirectory()) {
|
|
16622
16674
|
throw new CliError(
|
|
16623
16675
|
"Local skill input must be a directory containing SKILL.md"
|
|
16624
16676
|
);
|
|
@@ -17016,13 +17068,1110 @@ async function analyzeHandler(args) {
|
|
|
17016
17068
|
import { spawn } from "child_process";
|
|
17017
17069
|
|
|
17018
17070
|
// src/features/claude_code/daemon.ts
|
|
17019
|
-
import
|
|
17071
|
+
import { readFileSync, writeFileSync as writeFileSync2 } from "fs";
|
|
17072
|
+
import path21 from "path";
|
|
17020
17073
|
import { setTimeout as sleep2 } from "timers/promises";
|
|
17074
|
+
import Configstore3 from "configstore";
|
|
17075
|
+
|
|
17076
|
+
// src/features/analysis/skill_quarantine/constants.ts
|
|
17077
|
+
var HEARTBEAT_DEBOUNCE_MS = (() => {
|
|
17078
|
+
const raw = Number(process.env["MOBB_TRACY_SKILL_QUARANTINE_DEBOUNCE_MS"]);
|
|
17079
|
+
if (!Number.isFinite(raw) || raw < 0) return 3e4;
|
|
17080
|
+
return Math.min(raw, 3e5);
|
|
17081
|
+
})();
|
|
17082
|
+
var KILL_SWITCH_ENV = "MOBB_TRACY_SKILL_QUARANTINE_DISABLE";
|
|
17083
|
+
var MALICIOUS_VERDICT = "MALICIOUS";
|
|
17084
|
+
var ORPHAN_SWEEP_GRACE_MS = 10 * 60 * 1e3;
|
|
17085
|
+
|
|
17086
|
+
// src/features/analysis/context_file_processor.ts
|
|
17087
|
+
import { createHash } from "crypto";
|
|
17088
|
+
import path13 from "path";
|
|
17089
|
+
import AdmZip3 from "adm-zip";
|
|
17090
|
+
import pLimit6 from "p-limit";
|
|
17091
|
+
var SANITIZE_CONCURRENCY = 5;
|
|
17092
|
+
function md5Hex(data) {
|
|
17093
|
+
return createHash("md5").update(typeof data === "string" ? Buffer.from(data, "utf-8") : data).digest("hex");
|
|
17094
|
+
}
|
|
17095
|
+
async function sanitizeFileContent(content) {
|
|
17096
|
+
const { sanitizedData } = await sanitizeDataWithCounts(content, {
|
|
17097
|
+
noSizeLimit: true
|
|
17098
|
+
});
|
|
17099
|
+
return sanitizedData;
|
|
17100
|
+
}
|
|
17101
|
+
async function processContextFiles(regularFiles, skillGroups) {
|
|
17102
|
+
const limit = pLimit6(SANITIZE_CONCURRENCY);
|
|
17103
|
+
const processedFiles = await Promise.all(
|
|
17104
|
+
regularFiles.map(
|
|
17105
|
+
(entry) => limit(async () => {
|
|
17106
|
+
const sanitizedContent = await sanitizeFileContent(entry.content);
|
|
17107
|
+
const md5 = md5Hex(sanitizedContent);
|
|
17108
|
+
const sizeBytes = Buffer.byteLength(sanitizedContent, "utf-8");
|
|
17109
|
+
return { entry, sanitizedContent, md5, sizeBytes };
|
|
17110
|
+
})
|
|
17111
|
+
)
|
|
17112
|
+
);
|
|
17113
|
+
const processedSkills = await Promise.all(
|
|
17114
|
+
skillGroups.filter((group) => group.files.length > 0).map(
|
|
17115
|
+
(group) => limit(async () => {
|
|
17116
|
+
const zip = new AdmZip3();
|
|
17117
|
+
const sortedFiles = [...group.files].sort(
|
|
17118
|
+
(a, b) => a.path.localeCompare(b.path)
|
|
17119
|
+
);
|
|
17120
|
+
for (const file of sortedFiles) {
|
|
17121
|
+
const sanitizedContent = await sanitizeFileContent(file.content);
|
|
17122
|
+
const zipEntryName = group.isFolder ? path13.relative(group.skillPath, file.path).replace(/\\/g, "/") : path13.basename(file.path);
|
|
17123
|
+
zip.addFile(zipEntryName, Buffer.from(sanitizedContent, "utf-8"));
|
|
17124
|
+
const entry = zip.getEntry(zipEntryName);
|
|
17125
|
+
if (entry) {
|
|
17126
|
+
entry.header.time = /* @__PURE__ */ new Date(0);
|
|
17127
|
+
}
|
|
17128
|
+
}
|
|
17129
|
+
const zipBuffer = zip.toBuffer();
|
|
17130
|
+
const md5 = md5Hex(zipBuffer);
|
|
17131
|
+
return { group, zipBuffer, md5, sizeBytes: zipBuffer.byteLength };
|
|
17132
|
+
})
|
|
17133
|
+
)
|
|
17134
|
+
);
|
|
17135
|
+
return { files: processedFiles, skills: processedSkills };
|
|
17136
|
+
}
|
|
17137
|
+
|
|
17138
|
+
// src/features/analysis/context_file_scanner.ts
|
|
17139
|
+
import { lstat, readFile, stat } from "fs/promises";
|
|
17140
|
+
import { homedir } from "os";
|
|
17141
|
+
import path14 from "path";
|
|
17142
|
+
import { globby as globby2 } from "globby";
|
|
17143
|
+
import { parse as parseJsoncLib } from "jsonc-parser";
|
|
17144
|
+
|
|
17145
|
+
// src/features/analysis/context_file_scan_paths.ts
|
|
17146
|
+
var SKILL_CATEGORY = "skill";
|
|
17147
|
+
var SCAN_PATHS = {
|
|
17148
|
+
"claude-code": [
|
|
17149
|
+
{ glob: "CLAUDE.md", category: "rule", root: "workspace" },
|
|
17150
|
+
{ glob: "CLAUDE.local.md", category: "rule", root: "workspace" },
|
|
17151
|
+
{ glob: "INSIGHTS.md", category: "rule", root: "workspace" },
|
|
17152
|
+
{ glob: "AGENTS.md", category: "rule", root: "workspace" },
|
|
17153
|
+
{ glob: ".claude/rules/**/*.md", category: "rule", root: "workspace" },
|
|
17154
|
+
{ glob: ".claude/CLAUDE.md", category: "rule", root: "home" },
|
|
17155
|
+
{ glob: ".claude/INSIGHTS.md", category: "rule", root: "home" },
|
|
17156
|
+
{ glob: ".claude/rules/**/*.md", category: "rule", root: "home" },
|
|
17157
|
+
{
|
|
17158
|
+
glob: ".claude/projects/*/memory/*.md",
|
|
17159
|
+
category: "memory",
|
|
17160
|
+
root: "home"
|
|
17161
|
+
},
|
|
17162
|
+
{ kind: "skill-bundle", skillsRoot: ".claude/skills", root: "workspace" },
|
|
17163
|
+
{ glob: ".claude/commands/*.md", category: "command", root: "workspace" },
|
|
17164
|
+
{
|
|
17165
|
+
glob: ".claude/agents/*.md",
|
|
17166
|
+
category: "agent-config",
|
|
17167
|
+
root: "workspace"
|
|
17168
|
+
},
|
|
17169
|
+
{ kind: "skill-bundle", skillsRoot: ".claude/skills", root: "home" },
|
|
17170
|
+
{ glob: ".claude/commands/*.md", category: "command", root: "home" },
|
|
17171
|
+
{ glob: ".claude/agents/*.md", category: "agent-config", root: "home" },
|
|
17172
|
+
{ glob: ".claude/settings.json", category: "config", root: "workspace" },
|
|
17173
|
+
{
|
|
17174
|
+
glob: ".claude/settings.local.json",
|
|
17175
|
+
category: "config",
|
|
17176
|
+
root: "workspace"
|
|
17177
|
+
},
|
|
17178
|
+
{ glob: ".mcp.json", category: "mcp-config", root: "workspace" },
|
|
17179
|
+
{ glob: ".claude/.mcp.json", category: "mcp-config", root: "workspace" },
|
|
17180
|
+
{ glob: ".claude/settings.json", category: "config", root: "home" },
|
|
17181
|
+
{ glob: ".claudeignore", category: "ignore", root: "workspace" }
|
|
17182
|
+
],
|
|
17183
|
+
cursor: [
|
|
17184
|
+
// Legacy single-file rules
|
|
17185
|
+
{ glob: ".cursorrules", category: "rule", root: "workspace" },
|
|
17186
|
+
// Project Rules — docs support both `.mdc` and `.md` inside .cursor/rules/
|
|
17187
|
+
{ glob: ".cursor/rules/**/*.mdc", category: "rule", root: "workspace" },
|
|
17188
|
+
{ glob: ".cursor/rules/**/*.md", category: "rule", root: "workspace" },
|
|
17189
|
+
// AGENTS.md — Cursor's documented alternative to .cursor/rules/
|
|
17190
|
+
{ glob: "AGENTS.md", category: "rule", root: "workspace" },
|
|
17191
|
+
// Agent skills — Cursor auto-loads from these dirs plus compat with
|
|
17192
|
+
// Claude / Codex / generic .agents/ per Cursor docs.
|
|
17193
|
+
{ kind: "skill-bundle", skillsRoot: ".cursor/skills", root: "workspace" },
|
|
17194
|
+
{ kind: "skill-bundle", skillsRoot: ".agents/skills", root: "workspace" },
|
|
17195
|
+
{ kind: "skill-bundle", skillsRoot: ".claude/skills", root: "workspace" },
|
|
17196
|
+
{ kind: "skill-bundle", skillsRoot: ".codex/skills", root: "workspace" },
|
|
17197
|
+
// MCP — project + global
|
|
17198
|
+
{ glob: ".cursor/mcp.json", category: "mcp-config", root: "workspace" },
|
|
17199
|
+
{ glob: ".cursor/mcp.json", category: "mcp-config", root: "home" },
|
|
17200
|
+
// Home skills (user-level cross-project skills)
|
|
17201
|
+
{ kind: "skill-bundle", skillsRoot: ".cursor/skills", root: "home" },
|
|
17202
|
+
{ kind: "skill-bundle", skillsRoot: ".agents/skills", root: "home" },
|
|
17203
|
+
{ kind: "skill-bundle", skillsRoot: ".claude/skills", root: "home" },
|
|
17204
|
+
{ kind: "skill-bundle", skillsRoot: ".codex/skills", root: "home" },
|
|
17205
|
+
// Exclusion
|
|
17206
|
+
{ glob: ".cursorignore", category: "ignore", root: "workspace" }
|
|
17207
|
+
// Note: Cursor's global "Rules for AI" from Settings UI is stored in
|
|
17208
|
+
// Cursor's internal settings DB. The tracer_ext reads it via VS Code API
|
|
17209
|
+
// (vscode.workspace.getConfiguration) and includes it as a synthetic entry.
|
|
17210
|
+
],
|
|
17211
|
+
copilot: [
|
|
17212
|
+
// Instructions — workspace
|
|
17213
|
+
{
|
|
17214
|
+
glob: ".github/copilot-instructions.md",
|
|
17215
|
+
category: "rule",
|
|
17216
|
+
root: "workspace"
|
|
17217
|
+
},
|
|
17218
|
+
{
|
|
17219
|
+
glob: ".github/instructions/**/*.instructions.md",
|
|
17220
|
+
category: "rule",
|
|
17221
|
+
root: "workspace"
|
|
17222
|
+
},
|
|
17223
|
+
// AGENTS.md / CLAUDE.md family (Copilot reads these via chat.useAgentsMdFile,
|
|
17224
|
+
// chat.useClaudeMdFile for cross-compat with Claude Code / other agents).
|
|
17225
|
+
{ glob: "AGENTS.md", category: "rule", root: "workspace" },
|
|
17226
|
+
{ glob: "CLAUDE.md", category: "rule", root: "workspace" },
|
|
17227
|
+
{ glob: "CLAUDE.local.md", category: "rule", root: "workspace" },
|
|
17228
|
+
{ glob: ".claude/CLAUDE.md", category: "rule", root: "workspace" },
|
|
17229
|
+
{ glob: ".claude/rules/**/*.md", category: "rule", root: "workspace" },
|
|
17230
|
+
// Prompts — workspace
|
|
17231
|
+
{
|
|
17232
|
+
glob: ".github/prompts/*.prompt.md",
|
|
17233
|
+
category: SKILL_CATEGORY,
|
|
17234
|
+
root: "workspace"
|
|
17235
|
+
},
|
|
17236
|
+
// Custom agents — `.agent.md` is the current format; `.chatmode.md` is the
|
|
17237
|
+
// legacy naming docs recommend renaming. We scan both for transition.
|
|
17238
|
+
{
|
|
17239
|
+
glob: ".github/agents/*.agent.md",
|
|
17240
|
+
category: "agent-config",
|
|
17241
|
+
root: "workspace"
|
|
17242
|
+
},
|
|
17243
|
+
{
|
|
17244
|
+
glob: ".github/chatmodes/*.chatmode.md",
|
|
17245
|
+
category: "agent-config",
|
|
17246
|
+
root: "workspace"
|
|
17247
|
+
},
|
|
17248
|
+
{
|
|
17249
|
+
glob: ".claude/agents/*.md",
|
|
17250
|
+
category: "agent-config",
|
|
17251
|
+
root: "workspace"
|
|
17252
|
+
},
|
|
17253
|
+
// Agent skills — Copilot discovers skills in all three roots (VS Code docs:
|
|
17254
|
+
// Agent Skills). Each skill is a directory with SKILL.md plus sibling files.
|
|
17255
|
+
{ kind: "skill-bundle", skillsRoot: ".github/skills", root: "workspace" },
|
|
17256
|
+
{ kind: "skill-bundle", skillsRoot: ".claude/skills", root: "workspace" },
|
|
17257
|
+
{ kind: "skill-bundle", skillsRoot: ".agents/skills", root: "workspace" },
|
|
17258
|
+
// MCP — VS Code Copilot reads MCP servers from .vscode/mcp.json
|
|
17259
|
+
{ glob: ".vscode/mcp.json", category: "mcp-config", root: "workspace" },
|
|
17260
|
+
// Global — home (JetBrains stores global instructions here)
|
|
17261
|
+
{
|
|
17262
|
+
glob: ".config/github-copilot/global-copilot-instructions.md",
|
|
17263
|
+
category: "rule",
|
|
17264
|
+
root: "home"
|
|
17265
|
+
},
|
|
17266
|
+
// User-level Copilot customizations (~/.copilot/)
|
|
17267
|
+
{
|
|
17268
|
+
glob: ".copilot/instructions/**/*.instructions.md",
|
|
17269
|
+
category: "rule",
|
|
17270
|
+
root: "home"
|
|
17271
|
+
},
|
|
17272
|
+
{ glob: ".copilot/prompts/*.prompt.md", category: "skill", root: "home" },
|
|
17273
|
+
{
|
|
17274
|
+
glob: ".copilot/agents/*.agent.md",
|
|
17275
|
+
category: "agent-config",
|
|
17276
|
+
root: "home"
|
|
17277
|
+
},
|
|
17278
|
+
{ kind: "skill-bundle", skillsRoot: ".copilot/skills", root: "home" },
|
|
17279
|
+
// Cross-compat home paths (Copilot reads Claude / generic agent dirs too)
|
|
17280
|
+
{ glob: ".claude/CLAUDE.md", category: "rule", root: "home" },
|
|
17281
|
+
{ glob: ".claude/rules/**/*.md", category: "rule", root: "home" },
|
|
17282
|
+
{ glob: ".claude/agents/*.md", category: "agent-config", root: "home" },
|
|
17283
|
+
{ kind: "skill-bundle", skillsRoot: ".claude/skills", root: "home" },
|
|
17284
|
+
{ kind: "skill-bundle", skillsRoot: ".agents/skills", root: "home" }
|
|
17285
|
+
]
|
|
17286
|
+
};
|
|
17287
|
+
|
|
17288
|
+
// src/features/analysis/context_file_scanner.ts
|
|
17289
|
+
var MAX_CONTEXT_FILE_SIZE = 20 * 1024 * 1024;
|
|
17290
|
+
var SESSION_TTL_MS = 24 * 60 * 60 * 1e3;
|
|
17291
|
+
var sessionMtimes = /* @__PURE__ */ new Map();
|
|
17292
|
+
function markContextFilesUploaded(sessionId, files, skills) {
|
|
17293
|
+
let entry = sessionMtimes.get(sessionId);
|
|
17294
|
+
if (!entry) {
|
|
17295
|
+
entry = { files: /* @__PURE__ */ new Map(), skills: /* @__PURE__ */ new Map(), lastUpdatedAt: Date.now() };
|
|
17296
|
+
sessionMtimes.set(sessionId, entry);
|
|
17297
|
+
}
|
|
17298
|
+
for (const f of files) {
|
|
17299
|
+
entry.files.set(f.path, f.mtimeMs);
|
|
17300
|
+
}
|
|
17301
|
+
if (skills) {
|
|
17302
|
+
for (const sg of skills) {
|
|
17303
|
+
entry.skills.set(sg.sessionKey, sg.maxMtimeMs);
|
|
17304
|
+
}
|
|
17305
|
+
}
|
|
17306
|
+
entry.lastUpdatedAt = Date.now();
|
|
17307
|
+
}
|
|
17308
|
+
var COPILOT_CUSTOM_LOCATION_SETTINGS = [
|
|
17309
|
+
{
|
|
17310
|
+
key: "chat.agentSkillsLocations",
|
|
17311
|
+
kind: "skill-bundle"
|
|
17312
|
+
},
|
|
17313
|
+
{
|
|
17314
|
+
key: "chat.instructionsFilesLocations",
|
|
17315
|
+
kind: "glob",
|
|
17316
|
+
category: "rule",
|
|
17317
|
+
glob: "**/*.instructions.md"
|
|
17318
|
+
},
|
|
17319
|
+
{
|
|
17320
|
+
key: "chat.promptFilesLocations",
|
|
17321
|
+
kind: "glob",
|
|
17322
|
+
category: "skill",
|
|
17323
|
+
glob: "**/*.prompt.md"
|
|
17324
|
+
},
|
|
17325
|
+
{
|
|
17326
|
+
key: "chat.agentFilesLocations",
|
|
17327
|
+
kind: "glob",
|
|
17328
|
+
category: "agent-config",
|
|
17329
|
+
glob: "**/*.agent.md"
|
|
17330
|
+
}
|
|
17331
|
+
];
|
|
17332
|
+
var CLAUDE_CODE_CUSTOM_LOCATION_SETTINGS = [
|
|
17333
|
+
{
|
|
17334
|
+
key: "autoMemoryDirectory",
|
|
17335
|
+
category: "memory",
|
|
17336
|
+
glob: "*/memory/*.md"
|
|
17337
|
+
}
|
|
17338
|
+
];
|
|
17339
|
+
function parseJsonc(text) {
|
|
17340
|
+
const errors = [];
|
|
17341
|
+
const parsed = parseJsoncLib(text, errors, {
|
|
17342
|
+
allowTrailingComma: true,
|
|
17343
|
+
disallowComments: false
|
|
17344
|
+
});
|
|
17345
|
+
return parsed ?? null;
|
|
17346
|
+
}
|
|
17347
|
+
function extractCustomLocations(value) {
|
|
17348
|
+
if (Array.isArray(value)) {
|
|
17349
|
+
return value.filter(
|
|
17350
|
+
(v) => typeof v === "string" && v.length > 0
|
|
17351
|
+
);
|
|
17352
|
+
}
|
|
17353
|
+
if (value && typeof value === "object") {
|
|
17354
|
+
return Object.entries(value).filter(([, v]) => v === true).map(([k]) => k).filter((k) => k.length > 0);
|
|
17355
|
+
}
|
|
17356
|
+
return [];
|
|
17357
|
+
}
|
|
17358
|
+
var SENSITIVE_HOME_SUBDIRS = [
|
|
17359
|
+
".ssh",
|
|
17360
|
+
".aws",
|
|
17361
|
+
".gnupg",
|
|
17362
|
+
".config",
|
|
17363
|
+
".kube",
|
|
17364
|
+
".docker",
|
|
17365
|
+
".gcloud",
|
|
17366
|
+
".npmrc",
|
|
17367
|
+
".netrc",
|
|
17368
|
+
".git-credentials",
|
|
17369
|
+
".m2",
|
|
17370
|
+
".pypirc",
|
|
17371
|
+
".pgpass",
|
|
17372
|
+
".boto",
|
|
17373
|
+
".password-store"
|
|
17374
|
+
];
|
|
17375
|
+
function resolveCustomLocationPath(raw, workspaceRoot, home) {
|
|
17376
|
+
let s = raw.replace(/\$\{workspaceFolder\}/g, workspaceRoot);
|
|
17377
|
+
if (/\$\{[^}]+\}/.test(s)) {
|
|
17378
|
+
return null;
|
|
17379
|
+
}
|
|
17380
|
+
if (/^~[^/]/.test(s)) {
|
|
17381
|
+
return null;
|
|
17382
|
+
}
|
|
17383
|
+
if (s.startsWith("~/") || s === "~") {
|
|
17384
|
+
s = path14.join(home, s.slice(1));
|
|
17385
|
+
}
|
|
17386
|
+
if (!path14.isAbsolute(s)) {
|
|
17387
|
+
s = path14.resolve(workspaceRoot, s);
|
|
17388
|
+
}
|
|
17389
|
+
const resolved = path14.normalize(s);
|
|
17390
|
+
if (resolved === "/" || resolved === home) {
|
|
17391
|
+
return null;
|
|
17392
|
+
}
|
|
17393
|
+
for (const sub of SENSITIVE_HOME_SUBDIRS) {
|
|
17394
|
+
const sensitive = path14.join(home, sub);
|
|
17395
|
+
if (resolved === sensitive || resolved.startsWith(`${sensitive}${path14.sep}`)) {
|
|
17396
|
+
return null;
|
|
17397
|
+
}
|
|
17398
|
+
}
|
|
17399
|
+
const relWorkspace = path14.relative(workspaceRoot, resolved);
|
|
17400
|
+
const relHome = path14.relative(home, resolved);
|
|
17401
|
+
const escapesWorkspace = relWorkspace.startsWith("..") || path14.isAbsolute(relWorkspace);
|
|
17402
|
+
const escapesHome = relHome.startsWith("..") || path14.isAbsolute(relHome);
|
|
17403
|
+
if (escapesWorkspace && escapesHome) {
|
|
17404
|
+
return null;
|
|
17405
|
+
}
|
|
17406
|
+
return resolved;
|
|
17407
|
+
}
|
|
17408
|
+
var MAX_SETTINGS_CACHE_SIZE = 50;
|
|
17409
|
+
var settingsCache = /* @__PURE__ */ new Map();
|
|
17410
|
+
async function readJsoncSettings(settingsPath) {
|
|
17411
|
+
try {
|
|
17412
|
+
const lst = await lstat(settingsPath);
|
|
17413
|
+
if (lst.isSymbolicLink()) {
|
|
17414
|
+
return null;
|
|
17415
|
+
}
|
|
17416
|
+
} catch {
|
|
17417
|
+
putSettingsCache(settingsPath, { mtimeMs: null, parsed: null });
|
|
17418
|
+
return null;
|
|
17419
|
+
}
|
|
17420
|
+
let mtimeMs = null;
|
|
17421
|
+
try {
|
|
17422
|
+
const st = await stat(settingsPath);
|
|
17423
|
+
if (!st.isFile()) {
|
|
17424
|
+
putSettingsCache(settingsPath, { mtimeMs: null, parsed: null });
|
|
17425
|
+
return null;
|
|
17426
|
+
}
|
|
17427
|
+
mtimeMs = st.mtimeMs;
|
|
17428
|
+
} catch (err) {
|
|
17429
|
+
if (err.code === "ENOENT") {
|
|
17430
|
+
putSettingsCache(settingsPath, { mtimeMs: null, parsed: null });
|
|
17431
|
+
}
|
|
17432
|
+
return null;
|
|
17433
|
+
}
|
|
17434
|
+
const cached = settingsCache.get(settingsPath);
|
|
17435
|
+
if (cached && cached.mtimeMs === mtimeMs) {
|
|
17436
|
+
return cached.parsed;
|
|
17437
|
+
}
|
|
17438
|
+
let text;
|
|
17439
|
+
try {
|
|
17440
|
+
text = await readFile(settingsPath, "utf-8");
|
|
17441
|
+
} catch {
|
|
17442
|
+
return null;
|
|
17443
|
+
}
|
|
17444
|
+
const parsed = parseJsonc(text);
|
|
17445
|
+
if (!parsed || typeof parsed !== "object" || Array.isArray(parsed)) {
|
|
17446
|
+
putSettingsCache(settingsPath, { mtimeMs, parsed: null });
|
|
17447
|
+
return null;
|
|
17448
|
+
}
|
|
17449
|
+
const payload = parsed;
|
|
17450
|
+
putSettingsCache(settingsPath, { mtimeMs, parsed: payload });
|
|
17451
|
+
return payload;
|
|
17452
|
+
}
|
|
17453
|
+
function putSettingsCache(path34, entry) {
|
|
17454
|
+
if (!settingsCache.has(path34) && settingsCache.size >= MAX_SETTINGS_CACHE_SIZE) {
|
|
17455
|
+
settingsCache.delete(settingsCache.keys().next().value);
|
|
17456
|
+
}
|
|
17457
|
+
settingsCache.set(path34, entry);
|
|
17458
|
+
}
|
|
17459
|
+
async function readCopilotCustomLocations(workspaceRoot) {
|
|
17460
|
+
const parsed = await readJsoncSettings(
|
|
17461
|
+
path14.join(workspaceRoot, ".vscode", "settings.json")
|
|
17462
|
+
);
|
|
17463
|
+
if (!parsed) {
|
|
17464
|
+
return [];
|
|
17465
|
+
}
|
|
17466
|
+
const home = homedir();
|
|
17467
|
+
const dynamic = [];
|
|
17468
|
+
const seen = /* @__PURE__ */ new Set();
|
|
17469
|
+
for (const setting of COPILOT_CUSTOM_LOCATION_SETTINGS) {
|
|
17470
|
+
for (const loc of extractCustomLocations(parsed[setting.key])) {
|
|
17471
|
+
const abs = resolveCustomLocationPath(loc, workspaceRoot, home);
|
|
17472
|
+
if (!abs) {
|
|
17473
|
+
continue;
|
|
17474
|
+
}
|
|
17475
|
+
if (setting.kind === "skill-bundle") {
|
|
17476
|
+
const dedupKey = `skill-bundle:${abs}`;
|
|
17477
|
+
if (seen.has(dedupKey)) {
|
|
17478
|
+
continue;
|
|
17479
|
+
}
|
|
17480
|
+
seen.add(dedupKey);
|
|
17481
|
+
dynamic.push({
|
|
17482
|
+
kind: "skill-bundle",
|
|
17483
|
+
skillsRoot: ".",
|
|
17484
|
+
root: "absolute",
|
|
17485
|
+
absoluteBase: abs
|
|
17486
|
+
});
|
|
17487
|
+
} else {
|
|
17488
|
+
const dedupKey = `${setting.category}:${abs}`;
|
|
17489
|
+
if (seen.has(dedupKey)) {
|
|
17490
|
+
continue;
|
|
17491
|
+
}
|
|
17492
|
+
seen.add(dedupKey);
|
|
17493
|
+
dynamic.push({
|
|
17494
|
+
kind: "glob",
|
|
17495
|
+
glob: setting.glob,
|
|
17496
|
+
category: setting.category,
|
|
17497
|
+
root: "absolute",
|
|
17498
|
+
absoluteBase: abs
|
|
17499
|
+
});
|
|
17500
|
+
}
|
|
17501
|
+
}
|
|
17502
|
+
}
|
|
17503
|
+
return dynamic;
|
|
17504
|
+
}
|
|
17505
|
+
async function readClaudeCodeCustomLocations() {
|
|
17506
|
+
const home = homedir();
|
|
17507
|
+
const parsed = await readJsoncSettings(
|
|
17508
|
+
path14.join(home, ".claude", "settings.json")
|
|
17509
|
+
);
|
|
17510
|
+
if (!parsed) {
|
|
17511
|
+
return [];
|
|
17512
|
+
}
|
|
17513
|
+
const dynamic = [];
|
|
17514
|
+
for (const { key, category, glob } of CLAUDE_CODE_CUSTOM_LOCATION_SETTINGS) {
|
|
17515
|
+
const raw = parsed[key];
|
|
17516
|
+
if (typeof raw !== "string" || raw.length === 0) {
|
|
17517
|
+
continue;
|
|
17518
|
+
}
|
|
17519
|
+
if (/\$\{workspaceFolder\}/.test(raw)) {
|
|
17520
|
+
continue;
|
|
17521
|
+
}
|
|
17522
|
+
const abs = resolveCustomLocationPath(raw, home, home);
|
|
17523
|
+
if (!abs) {
|
|
17524
|
+
continue;
|
|
17525
|
+
}
|
|
17526
|
+
dynamic.push({
|
|
17527
|
+
kind: "glob",
|
|
17528
|
+
glob,
|
|
17529
|
+
category,
|
|
17530
|
+
root: "absolute",
|
|
17531
|
+
absoluteBase: abs
|
|
17532
|
+
});
|
|
17533
|
+
}
|
|
17534
|
+
return dynamic;
|
|
17535
|
+
}
|
|
17536
|
+
async function readCustomLocations(workspaceRoot, platform2) {
|
|
17537
|
+
if (platform2 === "copilot") {
|
|
17538
|
+
return readCopilotCustomLocations(workspaceRoot);
|
|
17539
|
+
}
|
|
17540
|
+
if (platform2 === "claude-code") {
|
|
17541
|
+
return readClaudeCodeCustomLocations();
|
|
17542
|
+
}
|
|
17543
|
+
return [];
|
|
17544
|
+
}
|
|
17545
|
+
function groupSkills(files, root, baseDir) {
|
|
17546
|
+
const skillFiles = files.filter((f) => f.category === SKILL_CATEGORY);
|
|
17547
|
+
const folderMap = /* @__PURE__ */ new Map();
|
|
17548
|
+
const standalone = [];
|
|
17549
|
+
for (const f of skillFiles) {
|
|
17550
|
+
const rel = path14.relative(baseDir, f.path).replace(/\\/g, "/");
|
|
17551
|
+
const skillsMarker = "skills/";
|
|
17552
|
+
const skillsIdx = rel.indexOf(skillsMarker);
|
|
17553
|
+
if (skillsIdx === -1) {
|
|
17554
|
+
standalone.push(f);
|
|
17555
|
+
continue;
|
|
17556
|
+
}
|
|
17557
|
+
const relFromSkills = rel.slice(skillsIdx + skillsMarker.length);
|
|
17558
|
+
const slashIdx = relFromSkills.indexOf("/");
|
|
17559
|
+
if (slashIdx === -1) {
|
|
17560
|
+
standalone.push(f);
|
|
17561
|
+
} else {
|
|
17562
|
+
const folderName = relFromSkills.slice(0, slashIdx);
|
|
17563
|
+
if (!folderMap.has(folderName)) {
|
|
17564
|
+
folderMap.set(folderName, []);
|
|
17565
|
+
}
|
|
17566
|
+
folderMap.get(folderName).push(f);
|
|
17567
|
+
}
|
|
17568
|
+
}
|
|
17569
|
+
const groups = [];
|
|
17570
|
+
for (const f of standalone) {
|
|
17571
|
+
const name = path14.basename(f.path, path14.extname(f.path));
|
|
17572
|
+
const sessionKey = `skill:${root}:${name}`;
|
|
17573
|
+
groups.push({
|
|
17574
|
+
name,
|
|
17575
|
+
root,
|
|
17576
|
+
skillPath: f.path,
|
|
17577
|
+
files: [f],
|
|
17578
|
+
isFolder: false,
|
|
17579
|
+
maxMtimeMs: f.mtimeMs,
|
|
17580
|
+
sessionKey
|
|
17581
|
+
});
|
|
17582
|
+
}
|
|
17583
|
+
for (const [folderName, folderFiles] of folderMap) {
|
|
17584
|
+
const maxMtimeMs = Math.max(...folderFiles.map((f) => f.mtimeMs));
|
|
17585
|
+
const anyFile = folderFiles[0];
|
|
17586
|
+
const rel = path14.relative(baseDir, anyFile.path).replace(/\\/g, "/");
|
|
17587
|
+
const skillsIdx = rel.indexOf("skills/");
|
|
17588
|
+
const skillRelPath = rel.slice(
|
|
17589
|
+
0,
|
|
17590
|
+
skillsIdx + "skills/".length + folderName.length
|
|
17591
|
+
);
|
|
17592
|
+
const skillPath = path14.join(baseDir, skillRelPath);
|
|
17593
|
+
const sessionKey = `skill:${root}:${folderName}`;
|
|
17594
|
+
groups.push({
|
|
17595
|
+
name: folderName,
|
|
17596
|
+
root,
|
|
17597
|
+
skillPath,
|
|
17598
|
+
files: folderFiles,
|
|
17599
|
+
isFolder: true,
|
|
17600
|
+
maxMtimeMs,
|
|
17601
|
+
sessionKey
|
|
17602
|
+
});
|
|
17603
|
+
}
|
|
17604
|
+
return groups;
|
|
17605
|
+
}
|
|
17606
|
+
async function scanContextFiles(workspaceRoot, platform2, sessionId) {
|
|
17607
|
+
const staticEntries = SCAN_PATHS[platform2] ?? [];
|
|
17608
|
+
const dynamicEntries = await readCustomLocations(workspaceRoot, platform2);
|
|
17609
|
+
const entries = [...staticEntries, ...dynamicEntries];
|
|
17610
|
+
if (entries.length === 0) {
|
|
17611
|
+
return { regularFiles: [], skillGroups: [] };
|
|
17612
|
+
}
|
|
17613
|
+
const now = Date.now();
|
|
17614
|
+
for (const [sid, entry] of sessionMtimes) {
|
|
17615
|
+
if (now - entry.lastUpdatedAt > SESSION_TTL_MS) {
|
|
17616
|
+
sessionMtimes.delete(sid);
|
|
17617
|
+
}
|
|
17618
|
+
}
|
|
17619
|
+
const home = homedir();
|
|
17620
|
+
const sessionEntry = sessionId ? sessionMtimes.get(sessionId) : void 0;
|
|
17621
|
+
const allFiles = [];
|
|
17622
|
+
const skillBatches = /* @__PURE__ */ new Map();
|
|
17623
|
+
const seenPaths = /* @__PURE__ */ new Set();
|
|
17624
|
+
for (const entry of entries) {
|
|
17625
|
+
const baseDir = resolveBaseDir(entry, workspaceRoot, home);
|
|
17626
|
+
const scope = scopeForRoot(entry.root);
|
|
17627
|
+
const isDynamic = entry.root === "absolute";
|
|
17628
|
+
const matches = entry.kind === "skill-bundle" ? await enumerateSkillBundle(baseDir, entry.skillsRoot) : await enumerateGlob(entry.glob, baseDir, entry.category, isDynamic);
|
|
17629
|
+
for (const { path: filePath, category } of matches) {
|
|
17630
|
+
if (seenPaths.has(filePath)) {
|
|
17631
|
+
continue;
|
|
17632
|
+
}
|
|
17633
|
+
seenPaths.add(filePath);
|
|
17634
|
+
try {
|
|
17635
|
+
const fileStat = await stat(filePath);
|
|
17636
|
+
if (fileStat.size === 0 || fileStat.size > MAX_CONTEXT_FILE_SIZE || !fileStat.isFile()) {
|
|
17637
|
+
continue;
|
|
17638
|
+
}
|
|
17639
|
+
const content = await readFile(filePath, "utf-8");
|
|
17640
|
+
const sizeBytes = Buffer.byteLength(content, "utf-8");
|
|
17641
|
+
const name = deriveIdentifier(filePath, baseDir);
|
|
17642
|
+
const fileEntry = {
|
|
17643
|
+
name,
|
|
17644
|
+
path: filePath,
|
|
17645
|
+
content,
|
|
17646
|
+
sizeBytes,
|
|
17647
|
+
category,
|
|
17648
|
+
mtimeMs: fileStat.mtimeMs
|
|
17649
|
+
};
|
|
17650
|
+
if (scope) {
|
|
17651
|
+
fileEntry.scope = scope;
|
|
17652
|
+
}
|
|
17653
|
+
if (category === SKILL_CATEGORY) {
|
|
17654
|
+
const effectiveRoot = entry.root === "home" ? "home" : "workspace";
|
|
17655
|
+
const batchKey = `${effectiveRoot}:${baseDir}`;
|
|
17656
|
+
let batch = skillBatches.get(batchKey);
|
|
17657
|
+
if (!batch) {
|
|
17658
|
+
batch = { root: effectiveRoot, baseDir, files: [] };
|
|
17659
|
+
skillBatches.set(batchKey, batch);
|
|
17660
|
+
}
|
|
17661
|
+
batch.files.push(fileEntry);
|
|
17662
|
+
} else {
|
|
17663
|
+
const prevMtime = sessionEntry?.files.get(filePath);
|
|
17664
|
+
if (prevMtime !== void 0 && fileStat.mtimeMs <= prevMtime) {
|
|
17665
|
+
continue;
|
|
17666
|
+
}
|
|
17667
|
+
allFiles.push(fileEntry);
|
|
17668
|
+
}
|
|
17669
|
+
} catch {
|
|
17670
|
+
}
|
|
17671
|
+
}
|
|
17672
|
+
}
|
|
17673
|
+
const allSkillGroups = [];
|
|
17674
|
+
for (const { root, baseDir, files } of skillBatches.values()) {
|
|
17675
|
+
const groups = groupSkills(files, root, baseDir);
|
|
17676
|
+
for (const group of groups) {
|
|
17677
|
+
if (sessionEntry) {
|
|
17678
|
+
const prevMtime = sessionEntry.skills.get(group.sessionKey);
|
|
17679
|
+
if (prevMtime !== void 0 && group.maxMtimeMs <= prevMtime) {
|
|
17680
|
+
continue;
|
|
17681
|
+
}
|
|
17682
|
+
}
|
|
17683
|
+
allSkillGroups.push(group);
|
|
17684
|
+
}
|
|
17685
|
+
}
|
|
17686
|
+
return { regularFiles: allFiles, skillGroups: allSkillGroups };
|
|
17687
|
+
}
|
|
17688
|
+
async function enumerateGlob(pattern, cwd, category, isDynamic) {
|
|
17689
|
+
let files;
|
|
17690
|
+
try {
|
|
17691
|
+
files = await globby2(pattern, {
|
|
17692
|
+
cwd,
|
|
17693
|
+
absolute: true,
|
|
17694
|
+
onlyFiles: true,
|
|
17695
|
+
dot: true,
|
|
17696
|
+
// Never follow symlinks — a malicious committed repo can place a
|
|
17697
|
+
// symlink at a scanned path (e.g. .github/copilot-instructions.md →
|
|
17698
|
+
// ~/.aws/credentials) and the scanner would read and upload the target.
|
|
17699
|
+
followSymbolicLinks: false,
|
|
17700
|
+
// Dynamic absolute bases additionally cap traversal depth so a
|
|
17701
|
+
// misconfigured setting can't enumerate the whole filesystem.
|
|
17702
|
+
...isDynamic ? { deep: DYNAMIC_SCAN_MAX_DEPTH } : {}
|
|
17703
|
+
});
|
|
17704
|
+
} catch {
|
|
17705
|
+
return [];
|
|
17706
|
+
}
|
|
17707
|
+
return files.map((path34) => ({ path: path34, category }));
|
|
17708
|
+
}
|
|
17709
|
+
async function enumerateSkillBundle(baseDir, skillsRoot) {
|
|
17710
|
+
const skillsDir = path14.resolve(baseDir, skillsRoot);
|
|
17711
|
+
let manifests;
|
|
17712
|
+
try {
|
|
17713
|
+
manifests = await globby2("*/SKILL.md", {
|
|
17714
|
+
cwd: skillsDir,
|
|
17715
|
+
absolute: true,
|
|
17716
|
+
onlyFiles: true,
|
|
17717
|
+
dot: true,
|
|
17718
|
+
followSymbolicLinks: false,
|
|
17719
|
+
deep: SKILL_MANIFEST_SCAN_DEPTH
|
|
17720
|
+
});
|
|
17721
|
+
} catch {
|
|
17722
|
+
return [];
|
|
17723
|
+
}
|
|
17724
|
+
const perSkillResults = await Promise.all(
|
|
17725
|
+
manifests.map(async (manifest) => {
|
|
17726
|
+
const skillDir = path14.dirname(manifest);
|
|
17727
|
+
try {
|
|
17728
|
+
return await globby2("**/*", {
|
|
17729
|
+
cwd: skillDir,
|
|
17730
|
+
absolute: true,
|
|
17731
|
+
onlyFiles: true,
|
|
17732
|
+
dot: true,
|
|
17733
|
+
followSymbolicLinks: false,
|
|
17734
|
+
deep: SKILL_BUNDLE_MAX_DEPTH
|
|
17735
|
+
});
|
|
17736
|
+
} catch {
|
|
17737
|
+
return [];
|
|
17738
|
+
}
|
|
17739
|
+
})
|
|
17740
|
+
);
|
|
17741
|
+
return perSkillResults.flat().map((p) => ({ path: p, category: "skill" }));
|
|
17742
|
+
}
|
|
17743
|
+
var DYNAMIC_SCAN_MAX_DEPTH = 6;
|
|
17744
|
+
var SKILL_MANIFEST_SCAN_DEPTH = 2;
|
|
17745
|
+
var SKILL_BUNDLE_MAX_DEPTH = 5;
|
|
17746
|
+
function resolveBaseDir(entry, workspaceRoot, home) {
|
|
17747
|
+
switch (entry.root) {
|
|
17748
|
+
case "home":
|
|
17749
|
+
return home;
|
|
17750
|
+
case "absolute":
|
|
17751
|
+
return entry.absoluteBase;
|
|
17752
|
+
default:
|
|
17753
|
+
return workspaceRoot;
|
|
17754
|
+
}
|
|
17755
|
+
}
|
|
17756
|
+
function scopeForRoot(root) {
|
|
17757
|
+
if (root === "home" || root === "absolute") {
|
|
17758
|
+
return "user-global";
|
|
17759
|
+
}
|
|
17760
|
+
return void 0;
|
|
17761
|
+
}
|
|
17762
|
+
function deriveIdentifier(filePath, baseDir) {
|
|
17763
|
+
const relative2 = path14.relative(baseDir, filePath);
|
|
17764
|
+
if (!relative2.startsWith("..")) {
|
|
17765
|
+
return relative2.replace(/\\/g, "/");
|
|
17766
|
+
}
|
|
17767
|
+
return path14.basename(filePath);
|
|
17768
|
+
}
|
|
17769
|
+
|
|
17770
|
+
// src/features/analysis/skill_quarantine/enumerateInstalledSkills.ts
|
|
17771
|
+
async function enumerateInstalledSkills(workspaceRoot) {
|
|
17772
|
+
const { skillGroups } = await scanContextFiles(
|
|
17773
|
+
workspaceRoot,
|
|
17774
|
+
"claude-code",
|
|
17775
|
+
void 0
|
|
17776
|
+
);
|
|
17777
|
+
if (skillGroups.length === 0) {
|
|
17778
|
+
return [];
|
|
17779
|
+
}
|
|
17780
|
+
const { skills } = await processContextFiles([], skillGroups);
|
|
17781
|
+
return skills.map((s) => {
|
|
17782
|
+
const parts = s.group.skillPath.split(/[\\/]/);
|
|
17783
|
+
const origName = parts[parts.length - 1] || s.group.name;
|
|
17784
|
+
return {
|
|
17785
|
+
skillPath: s.group.skillPath,
|
|
17786
|
+
md5: s.md5,
|
|
17787
|
+
origName,
|
|
17788
|
+
isFolder: s.group.isFolder
|
|
17789
|
+
};
|
|
17790
|
+
});
|
|
17791
|
+
}
|
|
17792
|
+
|
|
17793
|
+
// src/features/analysis/skill_quarantine/metrics.ts
|
|
17794
|
+
var Metric = {
|
|
17795
|
+
/** A heartbeat triggered the quarantine check (after debounce). */
|
|
17796
|
+
CHECK_TRIGGERED: "skill_quarantine.check_triggered",
|
|
17797
|
+
/** The env-var kill switch skipped the run. */
|
|
17798
|
+
CHECK_DISABLED_ENV: "skill_quarantine.check_disabled_env",
|
|
17799
|
+
/** Verdict-query call failed. Fail-open. */
|
|
17800
|
+
QUERY_ERROR: "skill_quarantine.query_error",
|
|
17801
|
+
/** Count of skills enumerated in this run (histogram-ish). */
|
|
17802
|
+
SKILLS_CHECKED: "skill_quarantine.skills_checked",
|
|
17803
|
+
/** A skill was freshly quarantined. Tagged with shape. */
|
|
17804
|
+
QUARANTINED: "skill_quarantine.quarantined",
|
|
17805
|
+
/** Presence check hit; skill already quarantined. */
|
|
17806
|
+
ALREADY_QUARANTINED: "skill_quarantine.already_quarantined",
|
|
17807
|
+
/** Move step failed. Tagged with phase (stage | publish). */
|
|
17808
|
+
MOVE_ERROR: "skill_quarantine.move_error",
|
|
17809
|
+
/** Stub creation failed after the move succeeded. */
|
|
17810
|
+
STUB_ERROR: "skill_quarantine.stub_error",
|
|
17811
|
+
/** A stale staging dir was swept. */
|
|
17812
|
+
ORPHAN_SWEPT: "skill_quarantine.orphan_swept",
|
|
17813
|
+
/** Total run duration including I/O. */
|
|
17814
|
+
DURATION_MS: "skill_quarantine.duration_ms"
|
|
17815
|
+
};
|
|
17816
|
+
|
|
17817
|
+
// src/features/analysis/skill_quarantine/quarantineSkill.ts
|
|
17818
|
+
import { randomUUID } from "crypto";
|
|
17819
|
+
import { existsSync as existsSync2 } from "fs";
|
|
17820
|
+
import {
|
|
17821
|
+
mkdir,
|
|
17822
|
+
readdir,
|
|
17823
|
+
readFile as readFile2,
|
|
17824
|
+
rename,
|
|
17825
|
+
rm,
|
|
17826
|
+
stat as stat2,
|
|
17827
|
+
writeFile
|
|
17828
|
+
} from "fs/promises";
|
|
17829
|
+
import path16 from "path";
|
|
17830
|
+
import { move } from "fs-extra";
|
|
17831
|
+
|
|
17832
|
+
// src/features/analysis/skill_quarantine/paths.ts
|
|
17833
|
+
import { homedir as homedir2 } from "os";
|
|
17834
|
+
import path15 from "path";
|
|
17835
|
+
function getQuarantineRoot() {
|
|
17836
|
+
return path15.join(homedir2(), ".tracy", "quarantine", "claude", "skills");
|
|
17837
|
+
}
|
|
17838
|
+
function getQuarantinedHashDir(md5) {
|
|
17839
|
+
return path15.join(getQuarantineRoot(), md5);
|
|
17840
|
+
}
|
|
17841
|
+
function getQuarantinedTargetPath(md5, origName) {
|
|
17842
|
+
return path15.join(getQuarantinedHashDir(md5), origName);
|
|
17843
|
+
}
|
|
17844
|
+
function getStagingDir(md5, pid, uuid) {
|
|
17845
|
+
return path15.join(getQuarantineRoot(), `${md5}_tmp_${pid}_${uuid}`);
|
|
17846
|
+
}
|
|
17847
|
+
var STAGING_DIR_REGEX = /^([0-9a-f]{32})_tmp_/;
|
|
17848
|
+
|
|
17849
|
+
// src/features/analysis/skill_quarantine/stubTemplate.ts
|
|
17850
|
+
var LEGACY_SUMMARY_FALLBACK = "not available (scan predates current schema)";
|
|
17851
|
+
function renderStub(params) {
|
|
17852
|
+
const folderOrFile = params.isFolder ? "skill folder" : "skill file";
|
|
17853
|
+
const reason = params.summary ?? LEGACY_SUMMARY_FALLBACK;
|
|
17854
|
+
return `# \u26D4 QUARANTINED BY TRACY
|
|
17855
|
+
|
|
17856
|
+
This skill was flagged **MALICIOUS** by the Mobb security scanner and has been
|
|
17857
|
+
moved out of your skills folder. **Claude Code will not execute it** while this
|
|
17858
|
+
stub is in place.
|
|
17859
|
+
|
|
17860
|
+
## Why this skill was flagged
|
|
17861
|
+
|
|
17862
|
+
- **Reason:** ${reason}
|
|
17863
|
+
- **Scanner:** ${params.scannerName} @ ${params.scannerVersion}
|
|
17864
|
+
- **Scanned at:** ${params.scannedAt}
|
|
17865
|
+
- **Content hash (MD5):** \`${params.md5}\`
|
|
17866
|
+
|
|
17867
|
+
## Where the original is now
|
|
17868
|
+
|
|
17869
|
+
The original ${folderOrFile} has been moved to:
|
|
17870
|
+
|
|
17871
|
+
${params.quarantinedPath}
|
|
17872
|
+
|
|
17873
|
+
Nothing has been deleted. The contents are intact; only the location changed.
|
|
17874
|
+
|
|
17875
|
+
## If this is a false positive \u2014 how to recover
|
|
17876
|
+
|
|
17877
|
+
If you're confident this skill is safe and want to restore it:
|
|
17878
|
+
|
|
17879
|
+
mv ${params.quarantinedPath} ${params.origPath}
|
|
17880
|
+
|
|
17881
|
+
Tracy will not re-quarantine it as long as the directory
|
|
17882
|
+
\`~/.tracy/quarantine/claude/skills/${params.md5}/\` still exists on your
|
|
17883
|
+
machine (even if it's empty after you moved the contents out). If you delete
|
|
17884
|
+
that directory entirely, the next heartbeat will re-evaluate the skill from
|
|
17885
|
+
scratch.
|
|
17886
|
+
|
|
17887
|
+
## How to report a false positive
|
|
17888
|
+
|
|
17889
|
+
Please email **security@mobb.ai** with:
|
|
17890
|
+
|
|
17891
|
+
- The MD5 above
|
|
17892
|
+
- A short description of why you believe the flag was wrong
|
|
17893
|
+
- (Optional) the skill folder contents
|
|
17894
|
+
|
|
17895
|
+
Your report helps tune the scanner for everyone.
|
|
17896
|
+
`;
|
|
17897
|
+
}
|
|
17898
|
+
|
|
17899
|
+
// src/features/analysis/skill_quarantine/quarantineSkill.ts
|
|
17900
|
+
async function quarantineSkill(params) {
|
|
17901
|
+
const { skillPath, isFolder, md5, origName, verdict, log: log2 } = params;
|
|
17902
|
+
const hashDir = getQuarantinedHashDir(md5);
|
|
17903
|
+
if (existsSync2(hashDir)) {
|
|
17904
|
+
log2.debug(
|
|
17905
|
+
{ md5, metric: Metric.ALREADY_QUARANTINED },
|
|
17906
|
+
"skill_quarantine: already quarantined, skipping"
|
|
17907
|
+
);
|
|
17908
|
+
return { status: "already_quarantined" };
|
|
17909
|
+
}
|
|
17910
|
+
const stagingDir = getStagingDir(md5, process.pid, randomUUID());
|
|
17911
|
+
const stagingTarget = path16.join(stagingDir, origName);
|
|
17912
|
+
const finalTarget = getQuarantinedTargetPath(md5, origName);
|
|
17913
|
+
try {
|
|
17914
|
+
await mkdir(stagingDir, { recursive: true });
|
|
17915
|
+
} catch (err) {
|
|
17916
|
+
log2.error(
|
|
17917
|
+
{ err, md5, metric: Metric.MOVE_ERROR, phase: "stage" },
|
|
17918
|
+
"skill_quarantine: failed to create staging dir"
|
|
17919
|
+
);
|
|
17920
|
+
return { status: "move_error", phase: "stage", err };
|
|
17921
|
+
}
|
|
17922
|
+
try {
|
|
17923
|
+
await move(skillPath, stagingTarget);
|
|
17924
|
+
} catch (err) {
|
|
17925
|
+
await tryRm(stagingDir);
|
|
17926
|
+
log2.error(
|
|
17927
|
+
{ err, md5, metric: Metric.MOVE_ERROR, phase: "stage" },
|
|
17928
|
+
"skill_quarantine: phase-1 move failed"
|
|
17929
|
+
);
|
|
17930
|
+
return { status: "move_error", phase: "stage", err };
|
|
17931
|
+
}
|
|
17932
|
+
try {
|
|
17933
|
+
await rename(stagingDir, hashDir);
|
|
17934
|
+
} catch (err) {
|
|
17935
|
+
log2.error(
|
|
17936
|
+
{
|
|
17937
|
+
err,
|
|
17938
|
+
md5,
|
|
17939
|
+
stagingDir,
|
|
17940
|
+
metric: Metric.MOVE_ERROR,
|
|
17941
|
+
phase: "publish"
|
|
17942
|
+
},
|
|
17943
|
+
"skill_quarantine: phase-2 publish failed; staging dir preserved for manual recovery"
|
|
17944
|
+
);
|
|
17945
|
+
return { status: "move_error", phase: "publish", err };
|
|
17946
|
+
}
|
|
17947
|
+
const quarantinedPath = finalTarget;
|
|
17948
|
+
const stubContent = renderStub({
|
|
17949
|
+
md5,
|
|
17950
|
+
isFolder,
|
|
17951
|
+
quarantinedPath,
|
|
17952
|
+
origPath: skillPath,
|
|
17953
|
+
summary: verdict.summary,
|
|
17954
|
+
scannerName: verdict.scannerName,
|
|
17955
|
+
scannerVersion: verdict.scannerVersion,
|
|
17956
|
+
scannedAt: verdict.scannedAt
|
|
17957
|
+
});
|
|
17958
|
+
try {
|
|
17959
|
+
if (isFolder) {
|
|
17960
|
+
await mkdir(skillPath, { recursive: true });
|
|
17961
|
+
await writeFile(path16.join(skillPath, "SKILL.md"), stubContent, "utf8");
|
|
17962
|
+
} else {
|
|
17963
|
+
await writeFile(skillPath, stubContent, "utf8");
|
|
17964
|
+
}
|
|
17965
|
+
} catch (err) {
|
|
17966
|
+
log2.error(
|
|
17967
|
+
{ err, md5, skillPath, metric: Metric.STUB_ERROR },
|
|
17968
|
+
"skill_quarantine: stub write failed; quarantine is still in place"
|
|
17969
|
+
);
|
|
17970
|
+
return { status: "stub_error", err };
|
|
17971
|
+
}
|
|
17972
|
+
await preRegisterStubMd5(skillPath, isFolder, log2);
|
|
17973
|
+
log2.info(
|
|
17974
|
+
{
|
|
17975
|
+
md5,
|
|
17976
|
+
verdict: verdict.verdict,
|
|
17977
|
+
shape: isFolder ? "folder" : "standalone",
|
|
17978
|
+
scanner: verdict.scannerName,
|
|
17979
|
+
scannerVersion: verdict.scannerVersion,
|
|
17980
|
+
metric: Metric.QUARANTINED
|
|
17981
|
+
},
|
|
17982
|
+
"skill_quarantine: quarantined"
|
|
17983
|
+
);
|
|
17984
|
+
return { status: "quarantined" };
|
|
17985
|
+
}
|
|
17986
|
+
async function preRegisterStubMd5(skillPath, isFolder, log2) {
|
|
17987
|
+
try {
|
|
17988
|
+
const stubEntries = await gatherStubEntries(skillPath, isFolder);
|
|
17989
|
+
const stubGroup = {
|
|
17990
|
+
name: path16.basename(skillPath).replace(/\.md$/i, ""),
|
|
17991
|
+
root: "workspace",
|
|
17992
|
+
skillPath,
|
|
17993
|
+
files: stubEntries,
|
|
17994
|
+
isFolder,
|
|
17995
|
+
maxMtimeMs: Date.now(),
|
|
17996
|
+
sessionKey: `quarantine-stub:${skillPath}`
|
|
17997
|
+
};
|
|
17998
|
+
const { skills } = await processContextFiles([], [stubGroup]);
|
|
17999
|
+
if (skills.length === 0) return;
|
|
18000
|
+
const stubMd5 = skills[0].md5;
|
|
18001
|
+
await mkdir(getQuarantinedHashDir(stubMd5), { recursive: true });
|
|
18002
|
+
} catch (err) {
|
|
18003
|
+
log2.warn(
|
|
18004
|
+
{ err, skillPath },
|
|
18005
|
+
"skill_quarantine: failed to pre-register stub md5"
|
|
18006
|
+
);
|
|
18007
|
+
}
|
|
18008
|
+
}
|
|
18009
|
+
async function gatherStubEntries(skillPath, isFolder) {
|
|
18010
|
+
const now = Date.now();
|
|
18011
|
+
const target = isFolder ? path16.join(skillPath, "SKILL.md") : skillPath;
|
|
18012
|
+
const [st, content] = await Promise.all([
|
|
18013
|
+
stat2(target),
|
|
18014
|
+
readFile2(target, "utf8")
|
|
18015
|
+
]);
|
|
18016
|
+
return [
|
|
18017
|
+
{
|
|
18018
|
+
name: isFolder ? "SKILL.md" : path16.basename(skillPath),
|
|
18019
|
+
path: target,
|
|
18020
|
+
content,
|
|
18021
|
+
sizeBytes: st.size,
|
|
18022
|
+
category: "skill",
|
|
18023
|
+
mtimeMs: now
|
|
18024
|
+
}
|
|
18025
|
+
];
|
|
18026
|
+
}
|
|
18027
|
+
async function sweepOrphanStagingDirs(log2) {
|
|
18028
|
+
const root = getQuarantineRoot();
|
|
18029
|
+
let entries;
|
|
18030
|
+
try {
|
|
18031
|
+
entries = await readdir(root);
|
|
18032
|
+
} catch (err) {
|
|
18033
|
+
if (err.code === "ENOENT") return 0;
|
|
18034
|
+
log2.warn({ err, root }, "skill_quarantine: orphan sweep readdir failed");
|
|
18035
|
+
return 0;
|
|
18036
|
+
}
|
|
18037
|
+
const now = Date.now();
|
|
18038
|
+
let swept = 0;
|
|
18039
|
+
for (const entry of entries) {
|
|
18040
|
+
if (!STAGING_DIR_REGEX.test(entry)) continue;
|
|
18041
|
+
const full = path16.join(root, entry);
|
|
18042
|
+
let mtimeMs;
|
|
18043
|
+
try {
|
|
18044
|
+
mtimeMs = (await stat2(full)).mtimeMs;
|
|
18045
|
+
} catch {
|
|
18046
|
+
continue;
|
|
18047
|
+
}
|
|
18048
|
+
if (now - mtimeMs < ORPHAN_SWEEP_GRACE_MS) continue;
|
|
18049
|
+
try {
|
|
18050
|
+
await rm(full, { recursive: true, force: true });
|
|
18051
|
+
swept += 1;
|
|
18052
|
+
log2.info(
|
|
18053
|
+
{ path: full, metric: Metric.ORPHAN_SWEPT },
|
|
18054
|
+
"skill_quarantine: orphan swept"
|
|
18055
|
+
);
|
|
18056
|
+
} catch (err) {
|
|
18057
|
+
log2.warn({ err, path: full }, "skill_quarantine: orphan sweep rm failed");
|
|
18058
|
+
}
|
|
18059
|
+
}
|
|
18060
|
+
return swept;
|
|
18061
|
+
}
|
|
18062
|
+
async function tryRm(p) {
|
|
18063
|
+
try {
|
|
18064
|
+
await rm(p, { recursive: true, force: true });
|
|
18065
|
+
} catch {
|
|
18066
|
+
}
|
|
18067
|
+
}
|
|
18068
|
+
|
|
18069
|
+
// src/features/analysis/skill_quarantine/queryVerdicts.ts
|
|
18070
|
+
async function queryVerdicts(gqlClient, md5s, log2) {
|
|
18071
|
+
if (md5s.length === 0) {
|
|
18072
|
+
return /* @__PURE__ */ new Map();
|
|
18073
|
+
}
|
|
18074
|
+
try {
|
|
18075
|
+
const res = await gqlClient.skillVerdictsByMd5(md5s);
|
|
18076
|
+
const out = /* @__PURE__ */ new Map();
|
|
18077
|
+
for (const row of res.skillVerdictsByMd5) {
|
|
18078
|
+
out.set(row.md5, {
|
|
18079
|
+
md5: row.md5,
|
|
18080
|
+
verdict: row.verdict,
|
|
18081
|
+
summary: row.summary ?? null,
|
|
18082
|
+
scannerName: row.scannerName,
|
|
18083
|
+
scannerVersion: row.scannerVersion,
|
|
18084
|
+
scannedAt: row.scannedAt
|
|
18085
|
+
});
|
|
18086
|
+
}
|
|
18087
|
+
return out;
|
|
18088
|
+
} catch (err) {
|
|
18089
|
+
log2.warn(
|
|
18090
|
+
{ err, md5_count: md5s.length, metric: "skill_quarantine.query_error" },
|
|
18091
|
+
"skill_quarantine: verdict query failed, failing open"
|
|
18092
|
+
);
|
|
18093
|
+
return /* @__PURE__ */ new Map();
|
|
18094
|
+
}
|
|
18095
|
+
}
|
|
18096
|
+
|
|
18097
|
+
// src/features/analysis/skill_quarantine/runQuarantineCheck.ts
|
|
18098
|
+
var lastRunAt = /* @__PURE__ */ new Map();
|
|
18099
|
+
var killSwitchLogged = false;
|
|
18100
|
+
async function runQuarantineCheckIfNeeded(opts) {
|
|
18101
|
+
const { sessionId, cwd, gqlClient, log: log2 } = opts;
|
|
18102
|
+
if (process.env[KILL_SWITCH_ENV] === "1") {
|
|
18103
|
+
if (!killSwitchLogged) {
|
|
18104
|
+
log2.warn(
|
|
18105
|
+
{ metric: Metric.CHECK_DISABLED_ENV },
|
|
18106
|
+
`skill_quarantine: disabled by ${KILL_SWITCH_ENV}=1`
|
|
18107
|
+
);
|
|
18108
|
+
killSwitchLogged = true;
|
|
18109
|
+
}
|
|
18110
|
+
return;
|
|
18111
|
+
}
|
|
18112
|
+
const now = Date.now();
|
|
18113
|
+
const prev = lastRunAt.get(sessionId);
|
|
18114
|
+
if (prev !== void 0 && now - prev < HEARTBEAT_DEBOUNCE_MS) {
|
|
18115
|
+
return;
|
|
18116
|
+
}
|
|
18117
|
+
lastRunAt.set(sessionId, now);
|
|
18118
|
+
log2.info(
|
|
18119
|
+
{ sessionId, metric: Metric.CHECK_TRIGGERED },
|
|
18120
|
+
"skill_quarantine: check start"
|
|
18121
|
+
);
|
|
18122
|
+
const t0 = Date.now();
|
|
18123
|
+
try {
|
|
18124
|
+
await sweepOrphanStagingDirs(log2);
|
|
18125
|
+
const installed = await enumerateInstalledSkills(cwd);
|
|
18126
|
+
log2.info(
|
|
18127
|
+
{ sessionId, count: installed.length, metric: Metric.SKILLS_CHECKED },
|
|
18128
|
+
"skill_quarantine: skills enumerated"
|
|
18129
|
+
);
|
|
18130
|
+
if (installed.length === 0) {
|
|
18131
|
+
return;
|
|
18132
|
+
}
|
|
18133
|
+
const verdicts = await queryVerdicts(
|
|
18134
|
+
gqlClient,
|
|
18135
|
+
installed.map((s) => s.md5),
|
|
18136
|
+
log2
|
|
18137
|
+
);
|
|
18138
|
+
for (const skill of installed) {
|
|
18139
|
+
const verdict = verdicts.get(skill.md5);
|
|
18140
|
+
if (!verdict || verdict.verdict !== MALICIOUS_VERDICT) {
|
|
18141
|
+
continue;
|
|
18142
|
+
}
|
|
18143
|
+
try {
|
|
18144
|
+
await quarantineSkill({
|
|
18145
|
+
skillPath: skill.skillPath,
|
|
18146
|
+
isFolder: skill.isFolder,
|
|
18147
|
+
md5: skill.md5,
|
|
18148
|
+
origName: skill.origName,
|
|
18149
|
+
verdict,
|
|
18150
|
+
log: log2
|
|
18151
|
+
});
|
|
18152
|
+
} catch (err) {
|
|
18153
|
+
log2.error(
|
|
18154
|
+
{ err, md5: skill.md5, skillPath: skill.skillPath },
|
|
18155
|
+
"skill_quarantine: unexpected error during quarantine"
|
|
18156
|
+
);
|
|
18157
|
+
}
|
|
18158
|
+
}
|
|
18159
|
+
} finally {
|
|
18160
|
+
log2.info(
|
|
18161
|
+
{
|
|
18162
|
+
sessionId,
|
|
18163
|
+
duration_ms: Date.now() - t0,
|
|
18164
|
+
metric: Metric.DURATION_MS
|
|
18165
|
+
},
|
|
18166
|
+
"skill_quarantine: check done"
|
|
18167
|
+
);
|
|
18168
|
+
}
|
|
18169
|
+
}
|
|
17021
18170
|
|
|
17022
18171
|
// src/features/claude_code/daemon_pid_file.ts
|
|
17023
18172
|
import fs13 from "fs";
|
|
17024
18173
|
import os4 from "os";
|
|
17025
|
-
import
|
|
18174
|
+
import path17 from "path";
|
|
17026
18175
|
|
|
17027
18176
|
// src/features/claude_code/data_collector_constants.ts
|
|
17028
18177
|
var CC_VERSION_CACHE_KEY = "claudeCode.detectedCCVersion";
|
|
@@ -17031,24 +18180,28 @@ var GQL_AUTH_TIMEOUT_MS = 15e3;
|
|
|
17031
18180
|
var STALE_KEY_MAX_AGE_MS = 14 * 24 * 60 * 60 * 1e3;
|
|
17032
18181
|
var CLEANUP_INTERVAL_MS = 24 * 60 * 60 * 1e3;
|
|
17033
18182
|
var DAEMON_TTL_MS = 30 * 60 * 1e3;
|
|
17034
|
-
var DAEMON_POLL_INTERVAL_MS =
|
|
18183
|
+
var DAEMON_POLL_INTERVAL_MS = (() => {
|
|
18184
|
+
const raw = Number(process.env["MOBB_DAEMON_POLL_INTERVAL_MS"]);
|
|
18185
|
+
if (!Number.isFinite(raw) || raw <= 0) return 1e4;
|
|
18186
|
+
return Math.min(Math.max(raw, 100), 6e4);
|
|
18187
|
+
})();
|
|
17035
18188
|
var HEARTBEAT_STALE_MS = 3e4;
|
|
17036
18189
|
var TRANSCRIPT_MAX_AGE_MS = 24 * 60 * 60 * 1e3;
|
|
17037
18190
|
var DAEMON_CHUNK_SIZE = 50;
|
|
17038
18191
|
|
|
17039
18192
|
// src/features/claude_code/daemon_pid_file.ts
|
|
17040
18193
|
function getMobbdevDir() {
|
|
17041
|
-
return
|
|
18194
|
+
return path17.join(os4.homedir(), ".mobbdev");
|
|
17042
18195
|
}
|
|
17043
18196
|
function getDaemonCheckScriptPath() {
|
|
17044
|
-
return
|
|
18197
|
+
return path17.join(getMobbdevDir(), "daemon-check.js");
|
|
17045
18198
|
}
|
|
17046
18199
|
var DaemonPidFile = class {
|
|
17047
18200
|
constructor() {
|
|
17048
18201
|
__publicField(this, "data", null);
|
|
17049
18202
|
}
|
|
17050
18203
|
get filePath() {
|
|
17051
|
-
return
|
|
18204
|
+
return path17.join(getMobbdevDir(), "daemon.pid");
|
|
17052
18205
|
}
|
|
17053
18206
|
/** Ensure ~/.mobbdev/ directory exists. */
|
|
17054
18207
|
ensureDir() {
|
|
@@ -17109,10 +18262,159 @@ var DaemonPidFile = class {
|
|
|
17109
18262
|
|
|
17110
18263
|
// src/features/claude_code/data_collector.ts
|
|
17111
18264
|
import { execFile } from "child_process";
|
|
17112
|
-
import { createHash as
|
|
17113
|
-
import { access, open as open4, readdir, readFile, unlink } from "fs/promises";
|
|
17114
|
-
import
|
|
18265
|
+
import { createHash as createHash3 } from "crypto";
|
|
18266
|
+
import { access, open as open4, readdir as readdir2, readFile as readFile3, unlink } from "fs/promises";
|
|
18267
|
+
import path18 from "path";
|
|
17115
18268
|
import { promisify } from "util";
|
|
18269
|
+
|
|
18270
|
+
// src/features/analysis/context_file_uploader.ts
|
|
18271
|
+
import pLimit7 from "p-limit";
|
|
18272
|
+
init_client_generates();
|
|
18273
|
+
var UPLOAD_CONCURRENCY = 5;
|
|
18274
|
+
async function uploadContextRecords(opts) {
|
|
18275
|
+
const {
|
|
18276
|
+
processedFiles,
|
|
18277
|
+
processedSkills,
|
|
18278
|
+
keyPrefix,
|
|
18279
|
+
url,
|
|
18280
|
+
uploadFields,
|
|
18281
|
+
sessionId,
|
|
18282
|
+
now,
|
|
18283
|
+
platform: platform2,
|
|
18284
|
+
repositoryUrl,
|
|
18285
|
+
clientVersion,
|
|
18286
|
+
onFileError,
|
|
18287
|
+
onSkillError
|
|
18288
|
+
} = opts;
|
|
18289
|
+
const records = [];
|
|
18290
|
+
const uploadedFiles = [];
|
|
18291
|
+
const uploadedSkillGroups = [];
|
|
18292
|
+
const limit = pLimit7(UPLOAD_CONCURRENCY);
|
|
18293
|
+
const extraFields = {
|
|
18294
|
+
...repositoryUrl !== void 0 && { repositoryUrl },
|
|
18295
|
+
...clientVersion !== void 0 && { clientVersion }
|
|
18296
|
+
};
|
|
18297
|
+
const tasks = [
|
|
18298
|
+
...processedFiles.map(
|
|
18299
|
+
(pf) => limit(async () => {
|
|
18300
|
+
const s3Key = `${keyPrefix}ctx-${pf.md5}.bin`;
|
|
18301
|
+
try {
|
|
18302
|
+
await uploadFile({
|
|
18303
|
+
file: Buffer.from(pf.sanitizedContent, "utf-8"),
|
|
18304
|
+
url,
|
|
18305
|
+
uploadKey: s3Key,
|
|
18306
|
+
uploadFields
|
|
18307
|
+
});
|
|
18308
|
+
} catch (err) {
|
|
18309
|
+
onFileError?.(pf.entry.name, err);
|
|
18310
|
+
return;
|
|
18311
|
+
}
|
|
18312
|
+
records.push({
|
|
18313
|
+
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
|
18314
|
+
platform: platform2,
|
|
18315
|
+
recordId: `ctx:${sessionId}:${pf.md5}`,
|
|
18316
|
+
recordTimestamp: now,
|
|
18317
|
+
blameType: "CHAT" /* Chat */,
|
|
18318
|
+
rawDataS3Key: s3Key,
|
|
18319
|
+
...extraFields,
|
|
18320
|
+
context: {
|
|
18321
|
+
md5: pf.md5,
|
|
18322
|
+
category: pf.entry.category,
|
|
18323
|
+
name: pf.entry.name,
|
|
18324
|
+
sizeBytes: pf.sizeBytes,
|
|
18325
|
+
filePath: pf.entry.path,
|
|
18326
|
+
sessionId
|
|
18327
|
+
}
|
|
18328
|
+
});
|
|
18329
|
+
uploadedFiles.push(pf.entry);
|
|
18330
|
+
})
|
|
18331
|
+
),
|
|
18332
|
+
...processedSkills.map(
|
|
18333
|
+
(ps) => limit(async () => {
|
|
18334
|
+
const s3Key = `${keyPrefix}skill-${ps.md5}.zip`;
|
|
18335
|
+
try {
|
|
18336
|
+
await uploadFile({
|
|
18337
|
+
file: ps.zipBuffer,
|
|
18338
|
+
url,
|
|
18339
|
+
uploadKey: s3Key,
|
|
18340
|
+
uploadFields
|
|
18341
|
+
});
|
|
18342
|
+
} catch (err) {
|
|
18343
|
+
onSkillError?.(ps.group.name, err);
|
|
18344
|
+
return;
|
|
18345
|
+
}
|
|
18346
|
+
records.push({
|
|
18347
|
+
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
|
18348
|
+
platform: platform2,
|
|
18349
|
+
recordId: `ctx:${sessionId}:${ps.md5}`,
|
|
18350
|
+
recordTimestamp: now,
|
|
18351
|
+
blameType: "CHAT" /* Chat */,
|
|
18352
|
+
rawDataS3Key: s3Key,
|
|
18353
|
+
...extraFields,
|
|
18354
|
+
context: {
|
|
18355
|
+
md5: ps.md5,
|
|
18356
|
+
category: SKILL_CATEGORY,
|
|
18357
|
+
name: ps.group.name,
|
|
18358
|
+
sizeBytes: ps.sizeBytes,
|
|
18359
|
+
filePath: ps.group.skillPath,
|
|
18360
|
+
sessionId
|
|
18361
|
+
}
|
|
18362
|
+
});
|
|
18363
|
+
uploadedSkillGroups.push(ps.group);
|
|
18364
|
+
})
|
|
18365
|
+
)
|
|
18366
|
+
];
|
|
18367
|
+
await Promise.allSettled(tasks);
|
|
18368
|
+
return { records, uploadedFiles, uploadedSkillGroups };
|
|
18369
|
+
}
|
|
18370
|
+
async function runContextFileUploadPipeline(opts) {
|
|
18371
|
+
const {
|
|
18372
|
+
processedFiles,
|
|
18373
|
+
processedSkills,
|
|
18374
|
+
sessionId,
|
|
18375
|
+
platform: platform2,
|
|
18376
|
+
url,
|
|
18377
|
+
uploadFieldsJSON,
|
|
18378
|
+
keyPrefix,
|
|
18379
|
+
repositoryUrl,
|
|
18380
|
+
clientVersion,
|
|
18381
|
+
submitRecords,
|
|
18382
|
+
onFileError,
|
|
18383
|
+
onSkillError
|
|
18384
|
+
} = opts;
|
|
18385
|
+
let uploadFields;
|
|
18386
|
+
try {
|
|
18387
|
+
uploadFields = JSON.parse(uploadFieldsJSON);
|
|
18388
|
+
} catch {
|
|
18389
|
+
return null;
|
|
18390
|
+
}
|
|
18391
|
+
const now = (/* @__PURE__ */ new Date()).toISOString();
|
|
18392
|
+
const { records, uploadedFiles, uploadedSkillGroups } = await uploadContextRecords({
|
|
18393
|
+
processedFiles,
|
|
18394
|
+
processedSkills,
|
|
18395
|
+
keyPrefix,
|
|
18396
|
+
url,
|
|
18397
|
+
uploadFields,
|
|
18398
|
+
sessionId,
|
|
18399
|
+
now,
|
|
18400
|
+
platform: platform2,
|
|
18401
|
+
repositoryUrl,
|
|
18402
|
+
clientVersion,
|
|
18403
|
+
onFileError,
|
|
18404
|
+
onSkillError
|
|
18405
|
+
});
|
|
18406
|
+
if (records.length === 0) {
|
|
18407
|
+
return { fileCount: 0, skillCount: 0 };
|
|
18408
|
+
}
|
|
18409
|
+
await submitRecords(records);
|
|
18410
|
+
markContextFilesUploaded(sessionId, uploadedFiles, uploadedSkillGroups);
|
|
18411
|
+
return {
|
|
18412
|
+
fileCount: uploadedFiles.length,
|
|
18413
|
+
skillCount: uploadedSkillGroups.length
|
|
18414
|
+
};
|
|
18415
|
+
}
|
|
18416
|
+
|
|
18417
|
+
// src/features/claude_code/data_collector.ts
|
|
17116
18418
|
init_client_generates();
|
|
17117
18419
|
|
|
17118
18420
|
// src/utils/shared-logger/create-logger.ts
|
|
@@ -17126,6 +18428,8 @@ var DEFAULT_MAX_LOGS = 1e3;
|
|
|
17126
18428
|
var DEFAULT_MAX_HEARTBEAT = 100;
|
|
17127
18429
|
var LOGS_KEY = "logs";
|
|
17128
18430
|
var HEARTBEAT_KEY = "heartbeat";
|
|
18431
|
+
var MAX_DATA_CHARS = 2048;
|
|
18432
|
+
var MAX_SCOPE_KEYS = 20;
|
|
17129
18433
|
function createConfigstoreStream(store, opts) {
|
|
17130
18434
|
const maxLogs = opts.maxLogs ?? DEFAULT_MAX_LOGS;
|
|
17131
18435
|
const maxHeartbeat = opts.maxHeartbeat ?? DEFAULT_MAX_HEARTBEAT;
|
|
@@ -17141,6 +18445,10 @@ function createConfigstoreStream(store, opts) {
|
|
|
17141
18445
|
existing.push(...entries);
|
|
17142
18446
|
const trimmed = existing.length > max ? existing.slice(-max) : existing;
|
|
17143
18447
|
store.set(key, trimmed);
|
|
18448
|
+
const prefix = key.includes(":") ? key.split(":")[0] : null;
|
|
18449
|
+
if (prefix) {
|
|
18450
|
+
pruneStaleScopes(prefix);
|
|
18451
|
+
}
|
|
17144
18452
|
} catch {
|
|
17145
18453
|
try {
|
|
17146
18454
|
const lines = `${entries.map((e) => JSON.stringify(e)).join("\n")}
|
|
@@ -17150,11 +18458,40 @@ function createConfigstoreStream(store, opts) {
|
|
|
17150
18458
|
}
|
|
17151
18459
|
}
|
|
17152
18460
|
}
|
|
18461
|
+
function pruneStaleScopes(prefix) {
|
|
18462
|
+
const allKeys = Object.keys(store.all);
|
|
18463
|
+
const scopedKeys = allKeys.filter(
|
|
18464
|
+
(k) => k.startsWith(`${prefix}:`) && Array.isArray(store.get(k))
|
|
18465
|
+
);
|
|
18466
|
+
if (scopedKeys.length <= MAX_SCOPE_KEYS) {
|
|
18467
|
+
return;
|
|
18468
|
+
}
|
|
18469
|
+
const withTimestamp = scopedKeys.map((k) => {
|
|
18470
|
+
const entries = store.get(k);
|
|
18471
|
+
const last = entries.length > 0 ? entries[entries.length - 1] : void 0;
|
|
18472
|
+
return { key: k, lastTs: last?.timestamp ?? "" };
|
|
18473
|
+
});
|
|
18474
|
+
withTimestamp.sort((a, b) => a.lastTs.localeCompare(b.lastTs));
|
|
18475
|
+
const toDelete = withTimestamp.slice(
|
|
18476
|
+
0,
|
|
18477
|
+
withTimestamp.length - MAX_SCOPE_KEYS
|
|
18478
|
+
);
|
|
18479
|
+
for (const { key: k } of toDelete) {
|
|
18480
|
+
store.delete(k);
|
|
18481
|
+
}
|
|
18482
|
+
}
|
|
17153
18483
|
const writable = new stream.Writable({
|
|
17154
18484
|
write(chunk, _encoding, callback) {
|
|
17155
18485
|
callback();
|
|
17156
18486
|
try {
|
|
17157
18487
|
const parsed = JSON.parse(chunk.toString());
|
|
18488
|
+
let data = parsed.data;
|
|
18489
|
+
if (data !== void 0) {
|
|
18490
|
+
const serialized = JSON.stringify(data);
|
|
18491
|
+
if (serialized.length > MAX_DATA_CHARS) {
|
|
18492
|
+
data = `${serialized.slice(0, MAX_DATA_CHARS)}... [truncated, ${serialized.length} chars]`;
|
|
18493
|
+
}
|
|
18494
|
+
}
|
|
17158
18495
|
const entry = {
|
|
17159
18496
|
timestamp: parsed.time ? new Date(parsed.time).toISOString() : (/* @__PURE__ */ new Date()).toISOString(),
|
|
17160
18497
|
level: parsed.level ?? "info",
|
|
@@ -17162,7 +18499,7 @@ function createConfigstoreStream(store, opts) {
|
|
|
17162
18499
|
...parsed.durationMs !== void 0 && {
|
|
17163
18500
|
durationMs: parsed.durationMs
|
|
17164
18501
|
},
|
|
17165
|
-
...
|
|
18502
|
+
...data !== void 0 && { data }
|
|
17166
18503
|
};
|
|
17167
18504
|
const isHeartbeat = parsed.heartbeat === true;
|
|
17168
18505
|
if (opts.buffered) {
|
|
@@ -17192,8 +18529,8 @@ function createConfigstoreStream(store, opts) {
|
|
|
17192
18529
|
heartbeatBuffer.length = 0;
|
|
17193
18530
|
}
|
|
17194
18531
|
}
|
|
17195
|
-
function setScopePath(
|
|
17196
|
-
scopePath =
|
|
18532
|
+
function setScopePath(path34) {
|
|
18533
|
+
scopePath = path34;
|
|
17197
18534
|
}
|
|
17198
18535
|
return { writable, flush, setScopePath };
|
|
17199
18536
|
}
|
|
@@ -17274,10 +18611,10 @@ function createDdBatch(config2) {
|
|
|
17274
18611
|
}
|
|
17275
18612
|
|
|
17276
18613
|
// src/utils/shared-logger/hostname.ts
|
|
17277
|
-
import { createHash } from "crypto";
|
|
18614
|
+
import { createHash as createHash2 } from "crypto";
|
|
17278
18615
|
import os5 from "os";
|
|
17279
18616
|
function hashString(input) {
|
|
17280
|
-
return
|
|
18617
|
+
return createHash2("sha256").update(input).digest("hex").slice(0, 16);
|
|
17281
18618
|
}
|
|
17282
18619
|
function getPlainHostname() {
|
|
17283
18620
|
try {
|
|
@@ -17417,7 +18754,7 @@ function createLogger(config2) {
|
|
|
17417
18754
|
|
|
17418
18755
|
// src/features/claude_code/hook_logger.ts
|
|
17419
18756
|
var DD_RUM_TOKEN = true ? "pubf59c0182545bfb4c299175119f1abf9b" : "";
|
|
17420
|
-
var CLI_VERSION = true ? "1.
|
|
18757
|
+
var CLI_VERSION = true ? "1.4.0" : "unknown";
|
|
17421
18758
|
var NAMESPACE = "mobbdev-claude-code-hook-logs";
|
|
17422
18759
|
var claudeCodeVersion;
|
|
17423
18760
|
function buildDdTags() {
|
|
@@ -17495,11 +18832,11 @@ async function detectClaudeCodeVersion() {
|
|
|
17495
18832
|
}
|
|
17496
18833
|
function generateSyntheticId(sessionId, timestamp, type2, lineIndex) {
|
|
17497
18834
|
const input = `${sessionId ?? ""}:${timestamp ?? ""}:${type2 ?? ""}:${lineIndex}`;
|
|
17498
|
-
const hash =
|
|
18835
|
+
const hash = createHash3("sha256").update(input).digest("hex").slice(0, 16);
|
|
17499
18836
|
return `synth:${hash}`;
|
|
17500
18837
|
}
|
|
17501
18838
|
function getCursorKey(transcriptPath) {
|
|
17502
|
-
const hash =
|
|
18839
|
+
const hash = createHash3("sha256").update(transcriptPath).digest("hex").slice(0, 12);
|
|
17503
18840
|
return `cursor.${hash}`;
|
|
17504
18841
|
}
|
|
17505
18842
|
async function resolveTranscriptPath(transcriptPath, sessionId) {
|
|
@@ -17508,12 +18845,12 @@ async function resolveTranscriptPath(transcriptPath, sessionId) {
|
|
|
17508
18845
|
return transcriptPath;
|
|
17509
18846
|
} catch {
|
|
17510
18847
|
}
|
|
17511
|
-
const filename =
|
|
17512
|
-
const dirName =
|
|
17513
|
-
const projectsDir =
|
|
18848
|
+
const filename = path18.basename(transcriptPath);
|
|
18849
|
+
const dirName = path18.basename(path18.dirname(transcriptPath));
|
|
18850
|
+
const projectsDir = path18.dirname(path18.dirname(transcriptPath));
|
|
17514
18851
|
const baseDirName = dirName.replace(/[-.]claude-worktrees-.+$/, "");
|
|
17515
18852
|
if (baseDirName !== dirName) {
|
|
17516
|
-
const candidate =
|
|
18853
|
+
const candidate = path18.join(projectsDir, baseDirName, filename);
|
|
17517
18854
|
try {
|
|
17518
18855
|
await access(candidate);
|
|
17519
18856
|
hookLog.info(
|
|
@@ -17532,10 +18869,10 @@ async function resolveTranscriptPath(transcriptPath, sessionId) {
|
|
|
17532
18869
|
}
|
|
17533
18870
|
}
|
|
17534
18871
|
try {
|
|
17535
|
-
const dirs = await
|
|
18872
|
+
const dirs = await readdir2(projectsDir);
|
|
17536
18873
|
for (const dir of dirs) {
|
|
17537
18874
|
if (dir === dirName) continue;
|
|
17538
|
-
const candidate =
|
|
18875
|
+
const candidate = path18.join(projectsDir, dir, filename);
|
|
17539
18876
|
try {
|
|
17540
18877
|
await access(candidate);
|
|
17541
18878
|
hookLog.info(
|
|
@@ -17566,9 +18903,9 @@ async function readNewTranscriptEntries(transcriptPath, sessionId, sessionStore,
|
|
|
17566
18903
|
if (cursor?.byteOffset) {
|
|
17567
18904
|
const fh = await open4(transcriptPath, "r");
|
|
17568
18905
|
try {
|
|
17569
|
-
const
|
|
17570
|
-
fileSize =
|
|
17571
|
-
if (cursor.byteOffset >=
|
|
18906
|
+
const stat4 = await fh.stat();
|
|
18907
|
+
fileSize = stat4.size;
|
|
18908
|
+
if (cursor.byteOffset >= stat4.size) {
|
|
17572
18909
|
hookLog.info({ data: { sessionId } }, "No new data in transcript file");
|
|
17573
18910
|
return {
|
|
17574
18911
|
entries: [],
|
|
@@ -17576,7 +18913,7 @@ async function readNewTranscriptEntries(transcriptPath, sessionId, sessionStore,
|
|
|
17576
18913
|
resolvedTranscriptPath: transcriptPath
|
|
17577
18914
|
};
|
|
17578
18915
|
}
|
|
17579
|
-
const buf = Buffer.alloc(
|
|
18916
|
+
const buf = Buffer.alloc(stat4.size - cursor.byteOffset);
|
|
17580
18917
|
await fh.read(buf, 0, buf.length, cursor.byteOffset);
|
|
17581
18918
|
content = buf.toString("utf-8");
|
|
17582
18919
|
} finally {
|
|
@@ -17594,7 +18931,7 @@ async function readNewTranscriptEntries(transcriptPath, sessionId, sessionStore,
|
|
|
17594
18931
|
"Read transcript file from offset"
|
|
17595
18932
|
);
|
|
17596
18933
|
} else {
|
|
17597
|
-
content = await
|
|
18934
|
+
content = await readFile3(transcriptPath, "utf-8");
|
|
17598
18935
|
fileSize = Buffer.byteLength(content, "utf-8");
|
|
17599
18936
|
lineIndexOffset = 0;
|
|
17600
18937
|
hookLog.debug(
|
|
@@ -17693,14 +19030,6 @@ var FILTERED_ENTRY_TYPES = /* @__PURE__ */ new Set([
|
|
|
17693
19030
|
// Redundant — the actual user prompt is already captured in the 'user' entry.
|
|
17694
19031
|
"last-prompt"
|
|
17695
19032
|
]);
|
|
17696
|
-
var FILTERED_ASSISTANT_TOOLS = /* @__PURE__ */ new Set([
|
|
17697
|
-
// Polls for a sub-agent result. The input is just task_id + boilerplate
|
|
17698
|
-
// (block, timeout). The actual result is captured in the user:tool_result.
|
|
17699
|
-
"TaskOutput",
|
|
17700
|
-
// Discovers available deferred/MCP tools. The input is just a search query.
|
|
17701
|
-
// The discovered tools are captured in the user:tool_result.
|
|
17702
|
-
"ToolSearch"
|
|
17703
|
-
]);
|
|
17704
19033
|
function filterEntries(entries) {
|
|
17705
19034
|
const filtered = entries.filter((entry) => {
|
|
17706
19035
|
const entryType = entry.type ?? "";
|
|
@@ -17712,16 +19041,6 @@ function filterEntries(entries) {
|
|
|
17712
19041
|
const subtype = typeof data?.["type"] === "string" ? data["type"] : "";
|
|
17713
19042
|
return !FILTERED_PROGRESS_SUBTYPES.has(subtype);
|
|
17714
19043
|
}
|
|
17715
|
-
if (entryType === "assistant") {
|
|
17716
|
-
const message = entry["message"];
|
|
17717
|
-
const content = message?.["content"];
|
|
17718
|
-
if (Array.isArray(content) && content.length > 0) {
|
|
17719
|
-
const block = content[0];
|
|
17720
|
-
if (block["type"] === "tool_use" && typeof block["name"] === "string" && FILTERED_ASSISTANT_TOOLS.has(block["name"])) {
|
|
17721
|
-
return false;
|
|
17722
|
-
}
|
|
17723
|
-
}
|
|
17724
|
-
}
|
|
17725
19044
|
return true;
|
|
17726
19045
|
});
|
|
17727
19046
|
return { filtered, filteredOut: entries.length - filtered.length };
|
|
@@ -17734,13 +19053,13 @@ async function cleanupStaleSessions(configDir) {
|
|
|
17734
19053
|
const now = Date.now();
|
|
17735
19054
|
const prefix = getSessionFilePrefix();
|
|
17736
19055
|
try {
|
|
17737
|
-
const files = await
|
|
19056
|
+
const files = await readdir2(configDir);
|
|
17738
19057
|
let deletedCount = 0;
|
|
17739
19058
|
for (const file of files) {
|
|
17740
19059
|
if (!file.startsWith(prefix) || !file.endsWith(".json")) continue;
|
|
17741
|
-
const filePath =
|
|
19060
|
+
const filePath = path18.join(configDir, file);
|
|
17742
19061
|
try {
|
|
17743
|
-
const content = JSON.parse(await
|
|
19062
|
+
const content = JSON.parse(await readFile3(filePath, "utf-8"));
|
|
17744
19063
|
let newest = 0;
|
|
17745
19064
|
const cursors = content["cursor"];
|
|
17746
19065
|
if (cursors && typeof cursors === "object") {
|
|
@@ -17890,6 +19209,16 @@ async function processTranscript(input, sessionStore, log2, maxEntries = DAEMON_
|
|
|
17890
19209
|
entriesSkipped: filteredOut,
|
|
17891
19210
|
claudeCodeVersion: getClaudeCodeVersion()
|
|
17892
19211
|
});
|
|
19212
|
+
if (input.cwd) {
|
|
19213
|
+
uploadContextFilesIfNeeded(
|
|
19214
|
+
input.session_id,
|
|
19215
|
+
input.cwd,
|
|
19216
|
+
gqlClient,
|
|
19217
|
+
log2
|
|
19218
|
+
).catch((err) => {
|
|
19219
|
+
log2.error({ data: { err } }, "uploadContextFilesIfNeeded failed");
|
|
19220
|
+
});
|
|
19221
|
+
}
|
|
17893
19222
|
return {
|
|
17894
19223
|
entriesUploaded: entries.length,
|
|
17895
19224
|
entriesSkipped: filteredOut,
|
|
@@ -17906,19 +19235,84 @@ async function processTranscript(input, sessionStore, log2, maxEntries = DAEMON_
|
|
|
17906
19235
|
errors: entries.length
|
|
17907
19236
|
};
|
|
17908
19237
|
}
|
|
19238
|
+
async function uploadContextFilesIfNeeded(sessionId, cwd, gqlClient, log2) {
|
|
19239
|
+
const { regularFiles, skillGroups } = await scanContextFiles(
|
|
19240
|
+
cwd,
|
|
19241
|
+
"claude-code",
|
|
19242
|
+
sessionId
|
|
19243
|
+
);
|
|
19244
|
+
if (regularFiles.length === 0 && skillGroups.length === 0) {
|
|
19245
|
+
return;
|
|
19246
|
+
}
|
|
19247
|
+
const { files: processedFiles, skills: processedSkills } = await processContextFiles(regularFiles, skillGroups);
|
|
19248
|
+
if (processedFiles.length === 0 && processedSkills.length === 0) {
|
|
19249
|
+
return;
|
|
19250
|
+
}
|
|
19251
|
+
const uploadUrlResult = await gqlClient.getTracyRawDataUploadUrl();
|
|
19252
|
+
const { url, uploadFieldsJSON, keyPrefix } = uploadUrlResult.getTracyRawDataUploadUrl;
|
|
19253
|
+
if (!url || !uploadFieldsJSON || !keyPrefix) {
|
|
19254
|
+
log2.error(
|
|
19255
|
+
{ data: { sessionId } },
|
|
19256
|
+
"Failed to get S3 upload URL for context files"
|
|
19257
|
+
);
|
|
19258
|
+
return;
|
|
19259
|
+
}
|
|
19260
|
+
const pipelineResult = await runContextFileUploadPipeline({
|
|
19261
|
+
processedFiles,
|
|
19262
|
+
processedSkills,
|
|
19263
|
+
sessionId,
|
|
19264
|
+
platform: "CLAUDE_CODE" /* ClaudeCode */,
|
|
19265
|
+
url,
|
|
19266
|
+
uploadFieldsJSON,
|
|
19267
|
+
keyPrefix,
|
|
19268
|
+
submitRecords: async (records) => {
|
|
19269
|
+
const r = await prepareAndSendTracyRecords(gqlClient, records, cwd);
|
|
19270
|
+
if (!r.ok) {
|
|
19271
|
+
throw new Error(r.errors?.join(", ") ?? "batch upload failed");
|
|
19272
|
+
}
|
|
19273
|
+
},
|
|
19274
|
+
onFileError: (name, err) => log2.error(
|
|
19275
|
+
{ data: { sessionId, name, err } },
|
|
19276
|
+
"Failed to upload context file to S3"
|
|
19277
|
+
),
|
|
19278
|
+
onSkillError: (name, err) => log2.error(
|
|
19279
|
+
{ data: { sessionId, name, err } },
|
|
19280
|
+
"Failed to upload skill zip to S3"
|
|
19281
|
+
)
|
|
19282
|
+
});
|
|
19283
|
+
if (pipelineResult === null) {
|
|
19284
|
+
log2.error(
|
|
19285
|
+
{ data: { sessionId } },
|
|
19286
|
+
"Malformed uploadFieldsJSON for context files"
|
|
19287
|
+
);
|
|
19288
|
+
return;
|
|
19289
|
+
}
|
|
19290
|
+
if (pipelineResult.fileCount > 0 || pipelineResult.skillCount > 0) {
|
|
19291
|
+
log2.info(
|
|
19292
|
+
{
|
|
19293
|
+
data: {
|
|
19294
|
+
sessionId,
|
|
19295
|
+
fileCount: pipelineResult.fileCount,
|
|
19296
|
+
skillCount: pipelineResult.skillCount
|
|
19297
|
+
}
|
|
19298
|
+
},
|
|
19299
|
+
"Uploaded context files and skills for session"
|
|
19300
|
+
);
|
|
19301
|
+
}
|
|
19302
|
+
}
|
|
17909
19303
|
|
|
17910
19304
|
// src/features/claude_code/install_hook.ts
|
|
17911
19305
|
import fs14 from "fs";
|
|
17912
19306
|
import fsPromises4 from "fs/promises";
|
|
17913
19307
|
import os6 from "os";
|
|
17914
|
-
import
|
|
19308
|
+
import path19 from "path";
|
|
17915
19309
|
import chalk11 from "chalk";
|
|
17916
19310
|
|
|
17917
19311
|
// src/features/claude_code/daemon-check-shim.tmpl.js
|
|
17918
19312
|
var daemon_check_shim_tmpl_default = "// Mobb daemon shim \u2014 checks if daemon is alive, spawns if dead.\n// Auto-generated by mobbdev CLI. Do not edit.\nvar fs = require('fs')\nvar spawn = require('child_process').spawn\nvar path = require('path')\nvar os = require('os')\n\nvar pidFile = path.join(os.homedir(), '.mobbdev', 'daemon.pid')\nvar HEARTBEAT_STALE_MS = __HEARTBEAT_STALE_MS__\n\ntry {\n var data = JSON.parse(fs.readFileSync(pidFile, 'utf8'))\n if (Date.now() - data.heartbeat > HEARTBEAT_STALE_MS) throw new Error('stale')\n process.kill(data.pid, 0) // throws ESRCH if the process is gone\n} catch (e) {\n var localCli = process.env.MOBBDEV_LOCAL_CLI\n var child = localCli\n ? spawn('node', [localCli, 'claude-code-daemon'], { detached: true, stdio: 'ignore', windowsHide: true })\n : spawn('npx', ['--yes', 'mobbdev@latest', 'claude-code-daemon'], { detached: true, stdio: 'ignore', shell: true, windowsHide: true })\n child.unref()\n}\n";
|
|
17919
19313
|
|
|
17920
19314
|
// src/features/claude_code/install_hook.ts
|
|
17921
|
-
var CLAUDE_SETTINGS_PATH =
|
|
19315
|
+
var CLAUDE_SETTINGS_PATH = path19.join(os6.homedir(), ".claude", "settings.json");
|
|
17922
19316
|
var RECOMMENDED_MATCHER = "*";
|
|
17923
19317
|
async function claudeSettingsExists() {
|
|
17924
19318
|
try {
|
|
@@ -18064,18 +19458,18 @@ async function installMobbHooks(options = {}) {
|
|
|
18064
19458
|
}
|
|
18065
19459
|
|
|
18066
19460
|
// src/features/claude_code/transcript_scanner.ts
|
|
18067
|
-
import { open as open5, readdir as
|
|
19461
|
+
import { open as open5, readdir as readdir3, stat as stat3 } from "fs/promises";
|
|
18068
19462
|
import os7 from "os";
|
|
18069
|
-
import
|
|
19463
|
+
import path20 from "path";
|
|
18070
19464
|
var UUID_RE = /^[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}$/i;
|
|
18071
19465
|
function getClaudeProjectsDirs() {
|
|
18072
19466
|
const dirs = [];
|
|
18073
19467
|
const configDir = process.env["CLAUDE_CONFIG_DIR"];
|
|
18074
19468
|
if (configDir) {
|
|
18075
|
-
dirs.push(
|
|
19469
|
+
dirs.push(path20.join(configDir, "projects"));
|
|
18076
19470
|
}
|
|
18077
|
-
dirs.push(
|
|
18078
|
-
dirs.push(
|
|
19471
|
+
dirs.push(path20.join(os7.homedir(), ".config", "claude", "projects"));
|
|
19472
|
+
dirs.push(path20.join(os7.homedir(), ".claude", "projects"));
|
|
18079
19473
|
return dirs;
|
|
18080
19474
|
}
|
|
18081
19475
|
async function collectJsonlFiles(files, dir, projectDir, seen, now, results) {
|
|
@@ -18083,12 +19477,12 @@ async function collectJsonlFiles(files, dir, projectDir, seen, now, results) {
|
|
|
18083
19477
|
if (!file.endsWith(".jsonl")) continue;
|
|
18084
19478
|
const sessionId = file.replace(".jsonl", "");
|
|
18085
19479
|
if (!UUID_RE.test(sessionId)) continue;
|
|
18086
|
-
const filePath =
|
|
19480
|
+
const filePath = path20.join(dir, file);
|
|
18087
19481
|
if (seen.has(filePath)) continue;
|
|
18088
19482
|
seen.add(filePath);
|
|
18089
19483
|
let fileStat;
|
|
18090
19484
|
try {
|
|
18091
|
-
fileStat = await
|
|
19485
|
+
fileStat = await stat3(filePath);
|
|
18092
19486
|
} catch {
|
|
18093
19487
|
continue;
|
|
18094
19488
|
}
|
|
@@ -18109,33 +19503,33 @@ async function scanForTranscripts(projectsDirs = getClaudeProjectsDirs()) {
|
|
|
18109
19503
|
for (const projectsDir of projectsDirs) {
|
|
18110
19504
|
let projectDirs;
|
|
18111
19505
|
try {
|
|
18112
|
-
projectDirs = await
|
|
19506
|
+
projectDirs = await readdir3(projectsDir);
|
|
18113
19507
|
} catch {
|
|
18114
19508
|
continue;
|
|
18115
19509
|
}
|
|
18116
19510
|
for (const projName of projectDirs) {
|
|
18117
|
-
const projPath =
|
|
19511
|
+
const projPath = path20.join(projectsDir, projName);
|
|
18118
19512
|
let projStat;
|
|
18119
19513
|
try {
|
|
18120
|
-
projStat = await
|
|
19514
|
+
projStat = await stat3(projPath);
|
|
18121
19515
|
} catch {
|
|
18122
19516
|
continue;
|
|
18123
19517
|
}
|
|
18124
19518
|
if (!projStat.isDirectory()) continue;
|
|
18125
19519
|
let files;
|
|
18126
19520
|
try {
|
|
18127
|
-
files = await
|
|
19521
|
+
files = await readdir3(projPath);
|
|
18128
19522
|
} catch {
|
|
18129
19523
|
continue;
|
|
18130
19524
|
}
|
|
18131
19525
|
await collectJsonlFiles(files, projPath, projPath, seen, now, results);
|
|
18132
19526
|
for (const entry of files) {
|
|
18133
19527
|
if (!UUID_RE.test(entry)) continue;
|
|
18134
|
-
const subagentsDir =
|
|
19528
|
+
const subagentsDir = path20.join(projPath, entry, "subagents");
|
|
18135
19529
|
try {
|
|
18136
|
-
const s = await
|
|
19530
|
+
const s = await stat3(subagentsDir);
|
|
18137
19531
|
if (!s.isDirectory()) continue;
|
|
18138
|
-
const subFiles = await
|
|
19532
|
+
const subFiles = await readdir3(subagentsDir);
|
|
18139
19533
|
await collectJsonlFiles(
|
|
18140
19534
|
subFiles,
|
|
18141
19535
|
subagentsDir,
|
|
@@ -18185,6 +19579,7 @@ async function extractCwdFromTranscript(filePath) {
|
|
|
18185
19579
|
// src/features/claude_code/daemon.ts
|
|
18186
19580
|
async function startDaemon() {
|
|
18187
19581
|
hookLog.info("Daemon starting");
|
|
19582
|
+
pruneHookLogFile();
|
|
18188
19583
|
const pidFile = await acquirePidFile();
|
|
18189
19584
|
async function gracefulExit(code, reason) {
|
|
18190
19585
|
hookLog.info({ data: { code } }, `Daemon exiting: ${reason}`);
|
|
@@ -18231,7 +19626,7 @@ async function startDaemon() {
|
|
|
18231
19626
|
for (const transcript of changed) {
|
|
18232
19627
|
const sessionStore = createSessionConfigStore(transcript.sessionId);
|
|
18233
19628
|
if (!cleanupConfigDir) {
|
|
18234
|
-
cleanupConfigDir =
|
|
19629
|
+
cleanupConfigDir = path21.dirname(sessionStore.path);
|
|
18235
19630
|
}
|
|
18236
19631
|
await drainTranscript(transcript, sessionStore, gqlClient);
|
|
18237
19632
|
}
|
|
@@ -18313,6 +19708,19 @@ async function drainTranscript(transcript, sessionStore, gqlClient) {
|
|
|
18313
19708
|
"Error processing transcript \u2014 skipping"
|
|
18314
19709
|
);
|
|
18315
19710
|
}
|
|
19711
|
+
if (cwd) {
|
|
19712
|
+
runQuarantineCheckIfNeeded({
|
|
19713
|
+
sessionId: transcript.sessionId,
|
|
19714
|
+
cwd,
|
|
19715
|
+
gqlClient,
|
|
19716
|
+
log: log2
|
|
19717
|
+
}).catch((err) => {
|
|
19718
|
+
hookLog.warn(
|
|
19719
|
+
{ err, data: { sessionId: transcript.sessionId } },
|
|
19720
|
+
"runQuarantineCheckIfNeeded failed"
|
|
19721
|
+
);
|
|
19722
|
+
});
|
|
19723
|
+
}
|
|
18316
19724
|
}
|
|
18317
19725
|
async function detectChangedTranscripts(lastSeen) {
|
|
18318
19726
|
const transcripts = await scanForTranscripts();
|
|
@@ -18346,6 +19754,53 @@ async function tryAutoUpgradeHooks() {
|
|
|
18346
19754
|
hookLog.warn({ err }, "Failed to auto-upgrade hook matcher");
|
|
18347
19755
|
}
|
|
18348
19756
|
}
|
|
19757
|
+
var HOOK_LOG_MAX_SCOPE_KEYS = 20;
|
|
19758
|
+
var HOOK_LOG_MAX_ENTRIES_PER_KEY = 200;
|
|
19759
|
+
function pruneHookLogFile() {
|
|
19760
|
+
const logFilePath = new Configstore3("mobbdev-claude-code-hook-logs").path;
|
|
19761
|
+
try {
|
|
19762
|
+
const raw = readFileSync(logFilePath, "utf-8");
|
|
19763
|
+
const data = JSON.parse(raw);
|
|
19764
|
+
const prefixes = /* @__PURE__ */ new Map();
|
|
19765
|
+
for (const key of Object.keys(data)) {
|
|
19766
|
+
const colonIdx = key.indexOf(":");
|
|
19767
|
+
const prefix = colonIdx > 0 ? key.slice(0, colonIdx) : key;
|
|
19768
|
+
const group = prefixes.get(prefix) ?? [];
|
|
19769
|
+
group.push(key);
|
|
19770
|
+
prefixes.set(prefix, group);
|
|
19771
|
+
}
|
|
19772
|
+
let changed = false;
|
|
19773
|
+
for (const [, keys] of prefixes) {
|
|
19774
|
+
if (keys.length <= HOOK_LOG_MAX_SCOPE_KEYS) {
|
|
19775
|
+
continue;
|
|
19776
|
+
}
|
|
19777
|
+
const withTs = keys.map((k) => {
|
|
19778
|
+
const val = data[k];
|
|
19779
|
+
if (!Array.isArray(val) || val.length === 0) {
|
|
19780
|
+
return { key: k, lastTs: "" };
|
|
19781
|
+
}
|
|
19782
|
+
const last = val[val.length - 1];
|
|
19783
|
+
return { key: k, lastTs: last?.timestamp ?? "" };
|
|
19784
|
+
}).sort((a, b) => a.lastTs.localeCompare(b.lastTs));
|
|
19785
|
+
const toDelete = withTs.slice(0, withTs.length - HOOK_LOG_MAX_SCOPE_KEYS);
|
|
19786
|
+
for (const { key } of toDelete) {
|
|
19787
|
+
delete data[key];
|
|
19788
|
+
changed = true;
|
|
19789
|
+
}
|
|
19790
|
+
}
|
|
19791
|
+
for (const [key, val] of Object.entries(data)) {
|
|
19792
|
+
if (Array.isArray(val) && val.length > HOOK_LOG_MAX_ENTRIES_PER_KEY) {
|
|
19793
|
+
data[key] = val.slice(-HOOK_LOG_MAX_ENTRIES_PER_KEY);
|
|
19794
|
+
changed = true;
|
|
19795
|
+
}
|
|
19796
|
+
}
|
|
19797
|
+
if (changed) {
|
|
19798
|
+
writeFileSync2(logFilePath, JSON.stringify(data, null, " "));
|
|
19799
|
+
hookLog.info("Pruned hook log file");
|
|
19800
|
+
}
|
|
19801
|
+
} catch {
|
|
19802
|
+
}
|
|
19803
|
+
}
|
|
18349
19804
|
|
|
18350
19805
|
// src/args/commands/claude_code.ts
|
|
18351
19806
|
var claudeCodeInstallHookBuilder = (yargs2) => {
|
|
@@ -18434,7 +19889,7 @@ import {
|
|
|
18434
19889
|
} from "@modelcontextprotocol/sdk/types.js";
|
|
18435
19890
|
|
|
18436
19891
|
// src/mcp/Logger.ts
|
|
18437
|
-
import
|
|
19892
|
+
import Configstore4 from "configstore";
|
|
18438
19893
|
|
|
18439
19894
|
// src/mcp/services/WorkspaceService.ts
|
|
18440
19895
|
var WorkspaceService = class {
|
|
@@ -18442,8 +19897,8 @@ var WorkspaceService = class {
|
|
|
18442
19897
|
* Sets a known workspace path that was discovered through successful validation
|
|
18443
19898
|
* @param path The validated workspace path to store
|
|
18444
19899
|
*/
|
|
18445
|
-
static setKnownWorkspacePath(
|
|
18446
|
-
this.knownWorkspacePath =
|
|
19900
|
+
static setKnownWorkspacePath(path34) {
|
|
19901
|
+
this.knownWorkspacePath = path34;
|
|
18447
19902
|
}
|
|
18448
19903
|
/**
|
|
18449
19904
|
* Gets the known workspace path that was previously validated
|
|
@@ -18520,7 +19975,7 @@ var Logger = class {
|
|
|
18520
19975
|
__publicField(this, "lastKnownPath", null);
|
|
18521
19976
|
this.host = WorkspaceService.getHost();
|
|
18522
19977
|
this.unknownPathSuffix = Math.floor(1e3 + Math.random() * 9e3).toString();
|
|
18523
|
-
this.mobbConfigStore = new
|
|
19978
|
+
this.mobbConfigStore = new Configstore4("mobb-logs", {});
|
|
18524
19979
|
this.mobbConfigStore.set("version", packageJson.version);
|
|
18525
19980
|
}
|
|
18526
19981
|
/**
|
|
@@ -19304,7 +20759,7 @@ async function createAuthenticatedMcpGQLClient({
|
|
|
19304
20759
|
import { execSync as execSync2 } from "child_process";
|
|
19305
20760
|
import fs15 from "fs";
|
|
19306
20761
|
import os8 from "os";
|
|
19307
|
-
import
|
|
20762
|
+
import path22 from "path";
|
|
19308
20763
|
var IDEs = ["cursor", "windsurf", "webstorm", "vscode", "claude"];
|
|
19309
20764
|
var runCommand = (cmd) => {
|
|
19310
20765
|
try {
|
|
@@ -19319,7 +20774,7 @@ var gitInfo = {
|
|
|
19319
20774
|
};
|
|
19320
20775
|
var getClaudeWorkspacePaths = () => {
|
|
19321
20776
|
const home = os8.homedir();
|
|
19322
|
-
const claudeIdePath =
|
|
20777
|
+
const claudeIdePath = path22.join(home, ".claude", "ide");
|
|
19323
20778
|
const workspacePaths = [];
|
|
19324
20779
|
if (!fs15.existsSync(claudeIdePath)) {
|
|
19325
20780
|
return workspacePaths;
|
|
@@ -19327,7 +20782,7 @@ var getClaudeWorkspacePaths = () => {
|
|
|
19327
20782
|
try {
|
|
19328
20783
|
const lockFiles = fs15.readdirSync(claudeIdePath).filter((file) => file.endsWith(".lock"));
|
|
19329
20784
|
for (const lockFile of lockFiles) {
|
|
19330
|
-
const lockFilePath =
|
|
20785
|
+
const lockFilePath = path22.join(claudeIdePath, lockFile);
|
|
19331
20786
|
try {
|
|
19332
20787
|
const lockContent = JSON.parse(fs15.readFileSync(lockFilePath, "utf8"));
|
|
19333
20788
|
if (lockContent.workspaceFolders && Array.isArray(lockContent.workspaceFolders)) {
|
|
@@ -19352,24 +20807,24 @@ var getMCPConfigPaths = (hostName) => {
|
|
|
19352
20807
|
switch (hostName.toLowerCase()) {
|
|
19353
20808
|
case "cursor":
|
|
19354
20809
|
return [
|
|
19355
|
-
|
|
20810
|
+
path22.join(currentDir, ".cursor", "mcp.json"),
|
|
19356
20811
|
// local first
|
|
19357
|
-
|
|
20812
|
+
path22.join(home, ".cursor", "mcp.json")
|
|
19358
20813
|
];
|
|
19359
20814
|
case "windsurf":
|
|
19360
20815
|
return [
|
|
19361
|
-
|
|
20816
|
+
path22.join(currentDir, ".codeium", "mcp_config.json"),
|
|
19362
20817
|
// local first
|
|
19363
|
-
|
|
20818
|
+
path22.join(home, ".codeium", "windsurf", "mcp_config.json")
|
|
19364
20819
|
];
|
|
19365
20820
|
case "webstorm":
|
|
19366
20821
|
return [];
|
|
19367
20822
|
case "visualstudiocode":
|
|
19368
20823
|
case "vscode":
|
|
19369
20824
|
return [
|
|
19370
|
-
|
|
20825
|
+
path22.join(currentDir, ".vscode", "mcp.json"),
|
|
19371
20826
|
// local first
|
|
19372
|
-
process.platform === "win32" ?
|
|
20827
|
+
process.platform === "win32" ? path22.join(home, "AppData", "Roaming", "Code", "User", "mcp.json") : path22.join(
|
|
19373
20828
|
home,
|
|
19374
20829
|
"Library",
|
|
19375
20830
|
"Application Support",
|
|
@@ -19380,13 +20835,13 @@ var getMCPConfigPaths = (hostName) => {
|
|
|
19380
20835
|
];
|
|
19381
20836
|
case "claude": {
|
|
19382
20837
|
const claudePaths = [
|
|
19383
|
-
|
|
20838
|
+
path22.join(currentDir, ".claude.json"),
|
|
19384
20839
|
// local first
|
|
19385
|
-
|
|
20840
|
+
path22.join(home, ".claude.json")
|
|
19386
20841
|
];
|
|
19387
20842
|
const workspacePaths = getClaudeWorkspacePaths();
|
|
19388
20843
|
for (const workspacePath of workspacePaths) {
|
|
19389
|
-
claudePaths.push(
|
|
20844
|
+
claudePaths.push(path22.join(workspacePath, ".mcp.json"));
|
|
19390
20845
|
}
|
|
19391
20846
|
return claudePaths;
|
|
19392
20847
|
}
|
|
@@ -19547,10 +21002,10 @@ var getHostInfo = (additionalMcpList) => {
|
|
|
19547
21002
|
const ideConfigPaths = /* @__PURE__ */ new Set();
|
|
19548
21003
|
for (const ide of IDEs) {
|
|
19549
21004
|
const configPaths = getMCPConfigPaths(ide);
|
|
19550
|
-
configPaths.forEach((
|
|
21005
|
+
configPaths.forEach((path34) => ideConfigPaths.add(path34));
|
|
19551
21006
|
}
|
|
19552
21007
|
const uniqueAdditionalPaths = additionalMcpList.filter(
|
|
19553
|
-
(
|
|
21008
|
+
(path34) => !ideConfigPaths.has(path34)
|
|
19554
21009
|
);
|
|
19555
21010
|
for (const ide of IDEs) {
|
|
19556
21011
|
const cfg = readMCPConfig(ide);
|
|
@@ -19672,7 +21127,7 @@ init_configs();
|
|
|
19672
21127
|
init_configs();
|
|
19673
21128
|
import fs16 from "fs";
|
|
19674
21129
|
import os9 from "os";
|
|
19675
|
-
import
|
|
21130
|
+
import path23 from "path";
|
|
19676
21131
|
var MAX_DEPTH = 2;
|
|
19677
21132
|
var patterns = ["mcp", "claude"];
|
|
19678
21133
|
var isFileMatch = (fileName) => {
|
|
@@ -19692,7 +21147,7 @@ var searchDir = async (dir, depth = 0) => {
|
|
|
19692
21147
|
if (depth > MAX_DEPTH) return results;
|
|
19693
21148
|
const entries = await fs16.promises.readdir(dir, { withFileTypes: true }).catch(() => []);
|
|
19694
21149
|
for (const entry of entries) {
|
|
19695
|
-
const fullPath =
|
|
21150
|
+
const fullPath = path23.join(dir, entry.name);
|
|
19696
21151
|
if (entry.isFile() && isFileMatch(entry.name)) {
|
|
19697
21152
|
results.push(fullPath);
|
|
19698
21153
|
} else if (entry.isDirectory()) {
|
|
@@ -19709,14 +21164,14 @@ var findSystemMCPConfigs = async () => {
|
|
|
19709
21164
|
const home = os9.homedir();
|
|
19710
21165
|
const platform2 = os9.platform();
|
|
19711
21166
|
const knownDirs = platform2 === "win32" ? [
|
|
19712
|
-
|
|
19713
|
-
|
|
19714
|
-
|
|
21167
|
+
path23.join(home, ".cursor"),
|
|
21168
|
+
path23.join(home, "Documents"),
|
|
21169
|
+
path23.join(home, "Downloads")
|
|
19715
21170
|
] : [
|
|
19716
|
-
|
|
19717
|
-
process.env["XDG_CONFIG_HOME"] ||
|
|
19718
|
-
|
|
19719
|
-
|
|
21171
|
+
path23.join(home, ".cursor"),
|
|
21172
|
+
process.env["XDG_CONFIG_HOME"] || path23.join(home, ".config"),
|
|
21173
|
+
path23.join(home, "Documents"),
|
|
21174
|
+
path23.join(home, "Downloads")
|
|
19720
21175
|
];
|
|
19721
21176
|
const timeoutPromise = new Promise(
|
|
19722
21177
|
(resolve) => setTimeout(() => {
|
|
@@ -22132,13 +23587,13 @@ For a complete security audit workflow, use the \`full-security-audit\` prompt.
|
|
|
22132
23587
|
// src/mcp/services/McpDetectionService/CursorMcpDetectionService.ts
|
|
22133
23588
|
import * as fs19 from "fs";
|
|
22134
23589
|
import * as os12 from "os";
|
|
22135
|
-
import * as
|
|
23590
|
+
import * as path25 from "path";
|
|
22136
23591
|
|
|
22137
23592
|
// src/mcp/services/McpDetectionService/BaseMcpDetectionService.ts
|
|
22138
23593
|
init_configs();
|
|
22139
23594
|
import * as fs18 from "fs";
|
|
22140
23595
|
import fetch7 from "node-fetch";
|
|
22141
|
-
import * as
|
|
23596
|
+
import * as path24 from "path";
|
|
22142
23597
|
|
|
22143
23598
|
// src/mcp/services/McpDetectionService/McpDetectionServiceUtils.ts
|
|
22144
23599
|
import * as fs17 from "fs";
|
|
@@ -22147,14 +23602,14 @@ import * as os11 from "os";
|
|
|
22147
23602
|
// src/mcp/services/McpDetectionService/VscodeMcpDetectionService.ts
|
|
22148
23603
|
import * as fs20 from "fs";
|
|
22149
23604
|
import * as os13 from "os";
|
|
22150
|
-
import * as
|
|
23605
|
+
import * as path26 from "path";
|
|
22151
23606
|
|
|
22152
23607
|
// src/mcp/tools/checkForNewAvailableFixes/CheckForNewAvailableFixesTool.ts
|
|
22153
23608
|
import { z as z42 } from "zod";
|
|
22154
23609
|
|
|
22155
23610
|
// src/mcp/services/PathValidation.ts
|
|
22156
23611
|
import fs21 from "fs";
|
|
22157
|
-
import
|
|
23612
|
+
import path27 from "path";
|
|
22158
23613
|
async function validatePath(inputPath) {
|
|
22159
23614
|
logDebug("Validating MCP path", { inputPath });
|
|
22160
23615
|
if (/^\/[a-zA-Z]:\//.test(inputPath)) {
|
|
@@ -22186,7 +23641,7 @@ async function validatePath(inputPath) {
|
|
|
22186
23641
|
logError(error);
|
|
22187
23642
|
return { isValid: false, error, path: inputPath };
|
|
22188
23643
|
}
|
|
22189
|
-
const normalizedPath =
|
|
23644
|
+
const normalizedPath = path27.normalize(inputPath);
|
|
22190
23645
|
if (normalizedPath.includes("..")) {
|
|
22191
23646
|
const error = `Normalized path contains path traversal patterns: ${inputPath}`;
|
|
22192
23647
|
logError(error);
|
|
@@ -22838,7 +24293,7 @@ init_configs();
|
|
|
22838
24293
|
import fs22 from "fs/promises";
|
|
22839
24294
|
import nodePath from "path";
|
|
22840
24295
|
var getLocalFiles = async ({
|
|
22841
|
-
path:
|
|
24296
|
+
path: path34,
|
|
22842
24297
|
maxFileSize = MCP_MAX_FILE_SIZE,
|
|
22843
24298
|
maxFiles,
|
|
22844
24299
|
isAllFilesScan,
|
|
@@ -22846,17 +24301,17 @@ var getLocalFiles = async ({
|
|
|
22846
24301
|
scanRecentlyChangedFiles
|
|
22847
24302
|
}) => {
|
|
22848
24303
|
logDebug(`[${scanContext}] Starting getLocalFiles`, {
|
|
22849
|
-
path:
|
|
24304
|
+
path: path34,
|
|
22850
24305
|
maxFileSize,
|
|
22851
24306
|
maxFiles,
|
|
22852
24307
|
isAllFilesScan,
|
|
22853
24308
|
scanRecentlyChangedFiles
|
|
22854
24309
|
});
|
|
22855
24310
|
try {
|
|
22856
|
-
const resolvedRepoPath = await fs22.realpath(
|
|
24311
|
+
const resolvedRepoPath = await fs22.realpath(path34);
|
|
22857
24312
|
logDebug(`[${scanContext}] Resolved repository path`, {
|
|
22858
24313
|
resolvedRepoPath,
|
|
22859
|
-
originalPath:
|
|
24314
|
+
originalPath: path34
|
|
22860
24315
|
});
|
|
22861
24316
|
const gitService = new GitService(resolvedRepoPath, log);
|
|
22862
24317
|
const gitValidation = await gitService.validateRepository();
|
|
@@ -22869,7 +24324,7 @@ var getLocalFiles = async ({
|
|
|
22869
24324
|
if (!gitValidation.isValid || isAllFilesScan) {
|
|
22870
24325
|
try {
|
|
22871
24326
|
files = await FileUtils.getLastChangedFiles({
|
|
22872
|
-
dir:
|
|
24327
|
+
dir: path34,
|
|
22873
24328
|
maxFileSize,
|
|
22874
24329
|
maxFiles,
|
|
22875
24330
|
isAllFilesScan
|
|
@@ -22961,7 +24416,7 @@ var getLocalFiles = async ({
|
|
|
22961
24416
|
logError(`${scanContext}Unexpected error in getLocalFiles`, {
|
|
22962
24417
|
error: error instanceof Error ? error.message : String(error),
|
|
22963
24418
|
stack: error instanceof Error ? error.stack : void 0,
|
|
22964
|
-
path:
|
|
24419
|
+
path: path34
|
|
22965
24420
|
});
|
|
22966
24421
|
throw error;
|
|
22967
24422
|
}
|
|
@@ -22971,7 +24426,7 @@ var getLocalFiles = async ({
|
|
|
22971
24426
|
init_client_generates();
|
|
22972
24427
|
init_GitService();
|
|
22973
24428
|
import fs23 from "fs";
|
|
22974
|
-
import
|
|
24429
|
+
import path28 from "path";
|
|
22975
24430
|
import { z as z41 } from "zod";
|
|
22976
24431
|
function extractPathFromPatch(patch) {
|
|
22977
24432
|
const match = patch?.match(/diff --git a\/([^\s]+) b\//);
|
|
@@ -23057,7 +24512,7 @@ var LocalMobbFolderService = class {
|
|
|
23057
24512
|
"[LocalMobbFolderService] Non-git repository detected, skipping .gitignore operations"
|
|
23058
24513
|
);
|
|
23059
24514
|
}
|
|
23060
|
-
const mobbFolderPath =
|
|
24515
|
+
const mobbFolderPath = path28.join(
|
|
23061
24516
|
this.repoPath,
|
|
23062
24517
|
this.defaultMobbFolderName
|
|
23063
24518
|
);
|
|
@@ -23229,7 +24684,7 @@ var LocalMobbFolderService = class {
|
|
|
23229
24684
|
mobbFolderPath,
|
|
23230
24685
|
baseFileName
|
|
23231
24686
|
);
|
|
23232
|
-
const filePath =
|
|
24687
|
+
const filePath = path28.join(mobbFolderPath, uniqueFileName);
|
|
23233
24688
|
await fs23.promises.writeFile(filePath, patch, "utf8");
|
|
23234
24689
|
logInfo("[LocalMobbFolderService] Patch saved successfully", {
|
|
23235
24690
|
filePath,
|
|
@@ -23287,11 +24742,11 @@ var LocalMobbFolderService = class {
|
|
|
23287
24742
|
* @returns Unique filename that doesn't conflict with existing files
|
|
23288
24743
|
*/
|
|
23289
24744
|
getUniqueFileName(folderPath, baseFileName) {
|
|
23290
|
-
const baseName =
|
|
23291
|
-
const extension =
|
|
24745
|
+
const baseName = path28.parse(baseFileName).name;
|
|
24746
|
+
const extension = path28.parse(baseFileName).ext;
|
|
23292
24747
|
let uniqueFileName = baseFileName;
|
|
23293
24748
|
let index = 1;
|
|
23294
|
-
while (fs23.existsSync(
|
|
24749
|
+
while (fs23.existsSync(path28.join(folderPath, uniqueFileName))) {
|
|
23295
24750
|
uniqueFileName = `${baseName}-${index}${extension}`;
|
|
23296
24751
|
index++;
|
|
23297
24752
|
if (index > 1e3) {
|
|
@@ -23322,7 +24777,7 @@ var LocalMobbFolderService = class {
|
|
|
23322
24777
|
logDebug("[LocalMobbFolderService] Logging patch info", { fixId: fix.id });
|
|
23323
24778
|
try {
|
|
23324
24779
|
const mobbFolderPath = await this.getFolder();
|
|
23325
|
-
const patchInfoPath =
|
|
24780
|
+
const patchInfoPath = path28.join(mobbFolderPath, "patchInfo.md");
|
|
23326
24781
|
const markdownContent = this.generateFixMarkdown(fix, savedPatchFileName);
|
|
23327
24782
|
let existingContent = "";
|
|
23328
24783
|
if (fs23.existsSync(patchInfoPath)) {
|
|
@@ -23364,7 +24819,7 @@ var LocalMobbFolderService = class {
|
|
|
23364
24819
|
const timestamp = (/* @__PURE__ */ new Date()).toISOString();
|
|
23365
24820
|
const patch = this.extractPatchFromFix(fix);
|
|
23366
24821
|
const relativePatchedFilePath = patch ? extractPathFromPatch(patch) : null;
|
|
23367
|
-
const patchedFilePath = relativePatchedFilePath ?
|
|
24822
|
+
const patchedFilePath = relativePatchedFilePath ? path28.resolve(this.repoPath, relativePatchedFilePath) : null;
|
|
23368
24823
|
const fixIdentifier = savedPatchFileName ? savedPatchFileName.replace(".patch", "") : fix.id;
|
|
23369
24824
|
let markdown = `# Fix ${fixIdentifier}
|
|
23370
24825
|
|
|
@@ -23700,22 +25155,22 @@ var LocalMobbFolderService = class {
|
|
|
23700
25155
|
// src/mcp/services/PatchApplicationService.ts
|
|
23701
25156
|
init_configs();
|
|
23702
25157
|
import {
|
|
23703
|
-
existsSync as
|
|
25158
|
+
existsSync as existsSync7,
|
|
23704
25159
|
mkdirSync,
|
|
23705
|
-
readFileSync as
|
|
25160
|
+
readFileSync as readFileSync4,
|
|
23706
25161
|
unlinkSync,
|
|
23707
|
-
writeFileSync as
|
|
25162
|
+
writeFileSync as writeFileSync3
|
|
23708
25163
|
} from "fs";
|
|
23709
25164
|
import fs24 from "fs/promises";
|
|
23710
25165
|
import parseDiff2 from "parse-diff";
|
|
23711
|
-
import
|
|
25166
|
+
import path29 from "path";
|
|
23712
25167
|
var PatchApplicationService = class {
|
|
23713
25168
|
/**
|
|
23714
25169
|
* Gets the appropriate comment syntax for a file based on its extension
|
|
23715
25170
|
*/
|
|
23716
25171
|
static getCommentSyntax(filePath) {
|
|
23717
|
-
const ext =
|
|
23718
|
-
const basename2 =
|
|
25172
|
+
const ext = path29.extname(filePath).toLowerCase();
|
|
25173
|
+
const basename2 = path29.basename(filePath);
|
|
23719
25174
|
const commentMap = {
|
|
23720
25175
|
// C-style languages (single line comments)
|
|
23721
25176
|
".js": "//",
|
|
@@ -23923,18 +25378,18 @@ var PatchApplicationService = class {
|
|
|
23923
25378
|
}
|
|
23924
25379
|
);
|
|
23925
25380
|
}
|
|
23926
|
-
const dirPath =
|
|
25381
|
+
const dirPath = path29.dirname(normalizedFilePath);
|
|
23927
25382
|
mkdirSync(dirPath, { recursive: true });
|
|
23928
|
-
|
|
25383
|
+
writeFileSync3(normalizedFilePath, finalContent, "utf8");
|
|
23929
25384
|
return normalizedFilePath;
|
|
23930
25385
|
}
|
|
23931
25386
|
static resolvePathWithinRepo({
|
|
23932
25387
|
repositoryPath,
|
|
23933
25388
|
targetPath
|
|
23934
25389
|
}) {
|
|
23935
|
-
const repoRoot =
|
|
23936
|
-
const normalizedPath =
|
|
23937
|
-
const repoRootWithSep = repoRoot.endsWith(
|
|
25390
|
+
const repoRoot = path29.resolve(repositoryPath);
|
|
25391
|
+
const normalizedPath = path29.resolve(repoRoot, targetPath);
|
|
25392
|
+
const repoRootWithSep = repoRoot.endsWith(path29.sep) ? repoRoot : `${repoRoot}${path29.sep}`;
|
|
23938
25393
|
if (normalizedPath !== repoRoot && !normalizedPath.startsWith(repoRootWithSep)) {
|
|
23939
25394
|
throw new Error(
|
|
23940
25395
|
`Security violation: target path ${targetPath} resolves outside repository`
|
|
@@ -23943,7 +25398,7 @@ var PatchApplicationService = class {
|
|
|
23943
25398
|
return {
|
|
23944
25399
|
repoRoot,
|
|
23945
25400
|
normalizedPath,
|
|
23946
|
-
relativePath:
|
|
25401
|
+
relativePath: path29.relative(repoRoot, normalizedPath)
|
|
23947
25402
|
};
|
|
23948
25403
|
}
|
|
23949
25404
|
/**
|
|
@@ -24225,8 +25680,8 @@ var PatchApplicationService = class {
|
|
|
24225
25680
|
continue;
|
|
24226
25681
|
}
|
|
24227
25682
|
try {
|
|
24228
|
-
const absolutePath =
|
|
24229
|
-
if (
|
|
25683
|
+
const absolutePath = path29.resolve(repositoryPath, targetFile);
|
|
25684
|
+
if (existsSync7(absolutePath)) {
|
|
24230
25685
|
const stats = await fs24.stat(absolutePath);
|
|
24231
25686
|
const fileModTime = stats.mtime.getTime();
|
|
24232
25687
|
if (fileModTime > scanStartTime) {
|
|
@@ -24427,7 +25882,7 @@ var PatchApplicationService = class {
|
|
|
24427
25882
|
targetFile,
|
|
24428
25883
|
absoluteFilePath,
|
|
24429
25884
|
relativePath,
|
|
24430
|
-
exists:
|
|
25885
|
+
exists: existsSync7(absoluteFilePath)
|
|
24431
25886
|
});
|
|
24432
25887
|
return { absoluteFilePath, relativePath };
|
|
24433
25888
|
}
|
|
@@ -24451,7 +25906,7 @@ var PatchApplicationService = class {
|
|
|
24451
25906
|
fix,
|
|
24452
25907
|
scanContext
|
|
24453
25908
|
});
|
|
24454
|
-
appliedFiles.push(
|
|
25909
|
+
appliedFiles.push(path29.relative(repositoryPath, actualPath));
|
|
24455
25910
|
logDebug(`[${scanContext}] Created new file: ${relativePath}`);
|
|
24456
25911
|
}
|
|
24457
25912
|
/**
|
|
@@ -24463,7 +25918,7 @@ var PatchApplicationService = class {
|
|
|
24463
25918
|
appliedFiles,
|
|
24464
25919
|
scanContext
|
|
24465
25920
|
}) {
|
|
24466
|
-
if (
|
|
25921
|
+
if (existsSync7(absoluteFilePath)) {
|
|
24467
25922
|
unlinkSync(absoluteFilePath);
|
|
24468
25923
|
appliedFiles.push(relativePath);
|
|
24469
25924
|
logDebug(`[${scanContext}] Deleted file: ${relativePath}`);
|
|
@@ -24482,12 +25937,12 @@ var PatchApplicationService = class {
|
|
|
24482
25937
|
appliedFiles,
|
|
24483
25938
|
scanContext
|
|
24484
25939
|
}) {
|
|
24485
|
-
if (!
|
|
25940
|
+
if (!existsSync7(absoluteFilePath)) {
|
|
24486
25941
|
throw new Error(
|
|
24487
25942
|
`Target file does not exist: ${targetFile} (resolved to: ${absoluteFilePath})`
|
|
24488
25943
|
);
|
|
24489
25944
|
}
|
|
24490
|
-
const originalContent =
|
|
25945
|
+
const originalContent = readFileSync4(absoluteFilePath, "utf8");
|
|
24491
25946
|
const modifiedContent = this.applyHunksToFile(
|
|
24492
25947
|
originalContent,
|
|
24493
25948
|
fileDiff.chunks
|
|
@@ -24500,7 +25955,7 @@ var PatchApplicationService = class {
|
|
|
24500
25955
|
fix,
|
|
24501
25956
|
scanContext
|
|
24502
25957
|
});
|
|
24503
|
-
appliedFiles.push(
|
|
25958
|
+
appliedFiles.push(path29.relative(repositoryPath, actualPath));
|
|
24504
25959
|
logDebug(`[${scanContext}] Modified file: ${relativePath}`);
|
|
24505
25960
|
}
|
|
24506
25961
|
}
|
|
@@ -24697,8 +26152,8 @@ init_configs();
|
|
|
24697
26152
|
// src/mcp/services/FileOperations.ts
|
|
24698
26153
|
init_FileUtils();
|
|
24699
26154
|
import fs25 from "fs";
|
|
24700
|
-
import
|
|
24701
|
-
import
|
|
26155
|
+
import path30 from "path";
|
|
26156
|
+
import AdmZip4 from "adm-zip";
|
|
24702
26157
|
var FileOperations = class {
|
|
24703
26158
|
/**
|
|
24704
26159
|
* Creates a ZIP archive containing the specified source files
|
|
@@ -24713,14 +26168,14 @@ var FileOperations = class {
|
|
|
24713
26168
|
maxFileSize
|
|
24714
26169
|
}) {
|
|
24715
26170
|
logDebug("[FileOperations] Packing files");
|
|
24716
|
-
const zip = new
|
|
26171
|
+
const zip = new AdmZip4();
|
|
24717
26172
|
let packedFilesCount = 0;
|
|
24718
26173
|
const packedFiles = [];
|
|
24719
26174
|
const excludedFiles = [];
|
|
24720
|
-
const resolvedRepoPath =
|
|
26175
|
+
const resolvedRepoPath = path30.resolve(repositoryPath);
|
|
24721
26176
|
for (const filepath of fileList) {
|
|
24722
|
-
const absoluteFilepath =
|
|
24723
|
-
const resolvedFilePath =
|
|
26177
|
+
const absoluteFilepath = path30.join(repositoryPath, filepath);
|
|
26178
|
+
const resolvedFilePath = path30.resolve(absoluteFilepath);
|
|
24724
26179
|
if (!resolvedFilePath.startsWith(resolvedRepoPath)) {
|
|
24725
26180
|
const reason = "potential path traversal security risk";
|
|
24726
26181
|
logDebug(`[FileOperations] Skipping ${filepath} due to ${reason}`);
|
|
@@ -24767,11 +26222,11 @@ var FileOperations = class {
|
|
|
24767
26222
|
fileList,
|
|
24768
26223
|
repositoryPath
|
|
24769
26224
|
}) {
|
|
24770
|
-
const resolvedRepoPath =
|
|
26225
|
+
const resolvedRepoPath = path30.resolve(repositoryPath);
|
|
24771
26226
|
const validatedPaths = [];
|
|
24772
26227
|
for (const filepath of fileList) {
|
|
24773
|
-
const absoluteFilepath =
|
|
24774
|
-
const resolvedFilePath =
|
|
26228
|
+
const absoluteFilepath = path30.join(repositoryPath, filepath);
|
|
26229
|
+
const resolvedFilePath = path30.resolve(absoluteFilepath);
|
|
24775
26230
|
if (!resolvedFilePath.startsWith(resolvedRepoPath)) {
|
|
24776
26231
|
logDebug(
|
|
24777
26232
|
`[FileOperations] Rejecting ${filepath} - path traversal attempt detected`
|
|
@@ -24799,7 +26254,7 @@ var FileOperations = class {
|
|
|
24799
26254
|
for (const absolutePath of filePaths) {
|
|
24800
26255
|
try {
|
|
24801
26256
|
const content = await fs25.promises.readFile(absolutePath);
|
|
24802
|
-
const relativePath =
|
|
26257
|
+
const relativePath = path30.basename(absolutePath);
|
|
24803
26258
|
fileDataArray.push({
|
|
24804
26259
|
relativePath,
|
|
24805
26260
|
absolutePath,
|
|
@@ -25111,14 +26566,14 @@ var _CheckForNewAvailableFixesService = class _CheckForNewAvailableFixesService
|
|
|
25111
26566
|
* since the last scan.
|
|
25112
26567
|
*/
|
|
25113
26568
|
async scanForSecurityVulnerabilities({
|
|
25114
|
-
path:
|
|
26569
|
+
path: path34,
|
|
25115
26570
|
isAllDetectionRulesScan,
|
|
25116
26571
|
isAllFilesScan,
|
|
25117
26572
|
scanContext
|
|
25118
26573
|
}) {
|
|
25119
26574
|
this.hasAuthenticationFailed = false;
|
|
25120
26575
|
logDebug(`[${scanContext}] Scanning for new security vulnerabilities`, {
|
|
25121
|
-
path:
|
|
26576
|
+
path: path34
|
|
25122
26577
|
});
|
|
25123
26578
|
if (!this.gqlClient) {
|
|
25124
26579
|
logInfo(`[${scanContext}] No GQL client found, skipping scan`);
|
|
@@ -25134,11 +26589,11 @@ var _CheckForNewAvailableFixesService = class _CheckForNewAvailableFixesService
|
|
|
25134
26589
|
}
|
|
25135
26590
|
logDebug(
|
|
25136
26591
|
`[${scanContext}] Connected to the API, assembling list of files to scan`,
|
|
25137
|
-
{ path:
|
|
26592
|
+
{ path: path34 }
|
|
25138
26593
|
);
|
|
25139
26594
|
const isBackgroundScan = scanContext === ScanContext.BACKGROUND_INITIAL || scanContext === ScanContext.BACKGROUND_PERIODIC;
|
|
25140
26595
|
const files = await getLocalFiles({
|
|
25141
|
-
path:
|
|
26596
|
+
path: path34,
|
|
25142
26597
|
isAllFilesScan,
|
|
25143
26598
|
scanContext,
|
|
25144
26599
|
scanRecentlyChangedFiles: !isBackgroundScan
|
|
@@ -25164,13 +26619,13 @@ var _CheckForNewAvailableFixesService = class _CheckForNewAvailableFixesService
|
|
|
25164
26619
|
});
|
|
25165
26620
|
const { fixReportId, projectId } = await scanFiles({
|
|
25166
26621
|
fileList: filesToScan.map((file) => file.relativePath),
|
|
25167
|
-
repositoryPath:
|
|
26622
|
+
repositoryPath: path34,
|
|
25168
26623
|
gqlClient: this.gqlClient,
|
|
25169
26624
|
isAllDetectionRulesScan,
|
|
25170
26625
|
scanContext
|
|
25171
26626
|
});
|
|
25172
26627
|
logInfo(
|
|
25173
|
-
`[${scanContext}] Security scan completed for ${
|
|
26628
|
+
`[${scanContext}] Security scan completed for ${path34} reportId: ${fixReportId} projectId: ${projectId}`
|
|
25174
26629
|
);
|
|
25175
26630
|
if (isAllFilesScan) {
|
|
25176
26631
|
return;
|
|
@@ -25464,13 +26919,13 @@ var _CheckForNewAvailableFixesService = class _CheckForNewAvailableFixesService
|
|
|
25464
26919
|
});
|
|
25465
26920
|
return scannedFiles.some((file) => file.relativePath === fixFile);
|
|
25466
26921
|
}
|
|
25467
|
-
async getFreshFixes({ path:
|
|
26922
|
+
async getFreshFixes({ path: path34 }) {
|
|
25468
26923
|
const scanContext = ScanContext.USER_REQUEST;
|
|
25469
|
-
logDebug(`[${scanContext}] Getting fresh fixes`, { path:
|
|
25470
|
-
if (this.path !==
|
|
25471
|
-
this.path =
|
|
26924
|
+
logDebug(`[${scanContext}] Getting fresh fixes`, { path: path34 });
|
|
26925
|
+
if (this.path !== path34) {
|
|
26926
|
+
this.path = path34;
|
|
25472
26927
|
this.reset();
|
|
25473
|
-
logInfo(`[${scanContext}] Reset service state for new path`, { path:
|
|
26928
|
+
logInfo(`[${scanContext}] Reset service state for new path`, { path: path34 });
|
|
25474
26929
|
}
|
|
25475
26930
|
try {
|
|
25476
26931
|
const loginContext = createMcpLoginContext("check_new_fixes");
|
|
@@ -25489,7 +26944,7 @@ var _CheckForNewAvailableFixesService = class _CheckForNewAvailableFixesService
|
|
|
25489
26944
|
}
|
|
25490
26945
|
throw error;
|
|
25491
26946
|
}
|
|
25492
|
-
this.triggerScan({ path:
|
|
26947
|
+
this.triggerScan({ path: path34, gqlClient: this.gqlClient });
|
|
25493
26948
|
let isMvsAutoFixEnabled = null;
|
|
25494
26949
|
try {
|
|
25495
26950
|
isMvsAutoFixEnabled = await this.gqlClient.getMvsAutoFixSettings();
|
|
@@ -25523,33 +26978,33 @@ var _CheckForNewAvailableFixesService = class _CheckForNewAvailableFixesService
|
|
|
25523
26978
|
return noFreshFixesPrompt;
|
|
25524
26979
|
}
|
|
25525
26980
|
triggerScan({
|
|
25526
|
-
path:
|
|
26981
|
+
path: path34,
|
|
25527
26982
|
gqlClient
|
|
25528
26983
|
}) {
|
|
25529
|
-
if (this.path !==
|
|
25530
|
-
this.path =
|
|
26984
|
+
if (this.path !== path34) {
|
|
26985
|
+
this.path = path34;
|
|
25531
26986
|
this.reset();
|
|
25532
|
-
logInfo(`Reset service state for new path in triggerScan`, { path:
|
|
26987
|
+
logInfo(`Reset service state for new path in triggerScan`, { path: path34 });
|
|
25533
26988
|
}
|
|
25534
26989
|
this.gqlClient = gqlClient;
|
|
25535
26990
|
if (!this.intervalId) {
|
|
25536
|
-
this.startPeriodicScanning(
|
|
25537
|
-
this.executeInitialScan(
|
|
25538
|
-
void this.executeInitialFullScan(
|
|
26991
|
+
this.startPeriodicScanning(path34);
|
|
26992
|
+
this.executeInitialScan(path34);
|
|
26993
|
+
void this.executeInitialFullScan(path34);
|
|
25539
26994
|
}
|
|
25540
26995
|
}
|
|
25541
|
-
startPeriodicScanning(
|
|
26996
|
+
startPeriodicScanning(path34) {
|
|
25542
26997
|
const scanContext = ScanContext.BACKGROUND_PERIODIC;
|
|
25543
26998
|
logDebug(
|
|
25544
26999
|
`[${scanContext}] Starting periodic scan for new security vulnerabilities`,
|
|
25545
27000
|
{
|
|
25546
|
-
path:
|
|
27001
|
+
path: path34
|
|
25547
27002
|
}
|
|
25548
27003
|
);
|
|
25549
27004
|
this.intervalId = setInterval(() => {
|
|
25550
|
-
logDebug(`[${scanContext}] Triggering periodic security scan`, { path:
|
|
27005
|
+
logDebug(`[${scanContext}] Triggering periodic security scan`, { path: path34 });
|
|
25551
27006
|
this.scanForSecurityVulnerabilities({
|
|
25552
|
-
path:
|
|
27007
|
+
path: path34,
|
|
25553
27008
|
scanContext
|
|
25554
27009
|
}).catch((error) => {
|
|
25555
27010
|
logError(`[${scanContext}] Error during periodic security scan`, {
|
|
@@ -25558,45 +27013,45 @@ var _CheckForNewAvailableFixesService = class _CheckForNewAvailableFixesService
|
|
|
25558
27013
|
});
|
|
25559
27014
|
}, MCP_PERIODIC_CHECK_INTERVAL);
|
|
25560
27015
|
}
|
|
25561
|
-
async executeInitialFullScan(
|
|
27016
|
+
async executeInitialFullScan(path34) {
|
|
25562
27017
|
const scanContext = ScanContext.FULL_SCAN;
|
|
25563
|
-
logDebug(`[${scanContext}] Triggering initial full security scan`, { path:
|
|
27018
|
+
logDebug(`[${scanContext}] Triggering initial full security scan`, { path: path34 });
|
|
25564
27019
|
logDebug(`[${scanContext}] Full scan paths scanned`, {
|
|
25565
27020
|
fullScanPathsScanned: this.fullScanPathsScanned
|
|
25566
27021
|
});
|
|
25567
|
-
if (this.fullScanPathsScanned.includes(
|
|
27022
|
+
if (this.fullScanPathsScanned.includes(path34)) {
|
|
25568
27023
|
logDebug(`[${scanContext}] Full scan already executed for this path`, {
|
|
25569
|
-
path:
|
|
27024
|
+
path: path34
|
|
25570
27025
|
});
|
|
25571
27026
|
return;
|
|
25572
27027
|
}
|
|
25573
27028
|
configStore.set("fullScanPathsScanned", [
|
|
25574
27029
|
...this.fullScanPathsScanned,
|
|
25575
|
-
|
|
27030
|
+
path34
|
|
25576
27031
|
]);
|
|
25577
27032
|
try {
|
|
25578
27033
|
await this.scanForSecurityVulnerabilities({
|
|
25579
|
-
path:
|
|
27034
|
+
path: path34,
|
|
25580
27035
|
isAllFilesScan: true,
|
|
25581
27036
|
isAllDetectionRulesScan: true,
|
|
25582
27037
|
scanContext: ScanContext.FULL_SCAN
|
|
25583
27038
|
});
|
|
25584
|
-
if (!this.fullScanPathsScanned.includes(
|
|
25585
|
-
this.fullScanPathsScanned.push(
|
|
27039
|
+
if (!this.fullScanPathsScanned.includes(path34)) {
|
|
27040
|
+
this.fullScanPathsScanned.push(path34);
|
|
25586
27041
|
configStore.set("fullScanPathsScanned", this.fullScanPathsScanned);
|
|
25587
27042
|
}
|
|
25588
|
-
logInfo(`[${scanContext}] Full scan completed`, { path:
|
|
27043
|
+
logInfo(`[${scanContext}] Full scan completed`, { path: path34 });
|
|
25589
27044
|
} catch (error) {
|
|
25590
27045
|
logError(`[${scanContext}] Error during initial full security scan`, {
|
|
25591
27046
|
error
|
|
25592
27047
|
});
|
|
25593
27048
|
}
|
|
25594
27049
|
}
|
|
25595
|
-
executeInitialScan(
|
|
27050
|
+
executeInitialScan(path34) {
|
|
25596
27051
|
const scanContext = ScanContext.BACKGROUND_INITIAL;
|
|
25597
|
-
logDebug(`[${scanContext}] Triggering initial security scan`, { path:
|
|
27052
|
+
logDebug(`[${scanContext}] Triggering initial security scan`, { path: path34 });
|
|
25598
27053
|
this.scanForSecurityVulnerabilities({
|
|
25599
|
-
path:
|
|
27054
|
+
path: path34,
|
|
25600
27055
|
scanContext: ScanContext.BACKGROUND_INITIAL
|
|
25601
27056
|
}).catch((error) => {
|
|
25602
27057
|
logError(`[${scanContext}] Error during initial security scan`, { error });
|
|
@@ -25693,9 +27148,9 @@ Example payload:
|
|
|
25693
27148
|
`Invalid path: potential security risk detected in path: ${pathValidationResult.error}`
|
|
25694
27149
|
);
|
|
25695
27150
|
}
|
|
25696
|
-
const
|
|
27151
|
+
const path34 = pathValidationResult.path;
|
|
25697
27152
|
const resultText = await this.newFixesService.getFreshFixes({
|
|
25698
|
-
path:
|
|
27153
|
+
path: path34
|
|
25699
27154
|
});
|
|
25700
27155
|
logInfo("CheckForNewAvailableFixesTool execution completed", {
|
|
25701
27156
|
resultText
|
|
@@ -25873,8 +27328,8 @@ Call this tool instead of ${MCP_TOOL_SCAN_AND_FIX_VULNERABILITIES} when you only
|
|
|
25873
27328
|
`Invalid path: potential security risk detected in path: ${pathValidationResult.error}`
|
|
25874
27329
|
);
|
|
25875
27330
|
}
|
|
25876
|
-
const
|
|
25877
|
-
const gitService = new GitService(
|
|
27331
|
+
const path34 = pathValidationResult.path;
|
|
27332
|
+
const gitService = new GitService(path34, log);
|
|
25878
27333
|
const gitValidation = await gitService.validateRepository();
|
|
25879
27334
|
if (!gitValidation.isValid) {
|
|
25880
27335
|
throw new Error(`Invalid git repository: ${gitValidation.error}`);
|
|
@@ -26259,9 +27714,9 @@ Example payload:
|
|
|
26259
27714
|
`Invalid path: potential security risk detected in path: ${pathValidationResult.error}`
|
|
26260
27715
|
);
|
|
26261
27716
|
}
|
|
26262
|
-
const
|
|
27717
|
+
const path34 = pathValidationResult.path;
|
|
26263
27718
|
const files = await getLocalFiles({
|
|
26264
|
-
path:
|
|
27719
|
+
path: path34,
|
|
26265
27720
|
maxFileSize: MCP_MAX_FILE_SIZE,
|
|
26266
27721
|
maxFiles: args.maxFiles,
|
|
26267
27722
|
scanContext: ScanContext.USER_REQUEST,
|
|
@@ -26281,7 +27736,7 @@ Example payload:
|
|
|
26281
27736
|
try {
|
|
26282
27737
|
const fixResult = await this.vulnerabilityFixService.processVulnerabilities({
|
|
26283
27738
|
fileList: files.map((file) => file.relativePath),
|
|
26284
|
-
repositoryPath:
|
|
27739
|
+
repositoryPath: path34,
|
|
26285
27740
|
offset: args.offset,
|
|
26286
27741
|
limit: args.limit,
|
|
26287
27742
|
isRescan: args.rescan || !!args.maxFiles
|
|
@@ -26582,10 +28037,10 @@ init_client_generates();
|
|
|
26582
28037
|
init_urlParser2();
|
|
26583
28038
|
|
|
26584
28039
|
// src/features/codeium_intellij/codeium_language_server_grpc_client.ts
|
|
26585
|
-
import
|
|
28040
|
+
import path31 from "path";
|
|
26586
28041
|
import * as grpc from "@grpc/grpc-js";
|
|
26587
28042
|
import * as protoLoader from "@grpc/proto-loader";
|
|
26588
|
-
var PROTO_PATH =
|
|
28043
|
+
var PROTO_PATH = path31.join(
|
|
26589
28044
|
getModuleRootDir(),
|
|
26590
28045
|
"src/features/codeium_intellij/proto/exa/language_server_pb/language_server.proto"
|
|
26591
28046
|
);
|
|
@@ -26597,7 +28052,7 @@ function loadProto() {
|
|
|
26597
28052
|
defaults: true,
|
|
26598
28053
|
oneofs: true,
|
|
26599
28054
|
includeDirs: [
|
|
26600
|
-
|
|
28055
|
+
path31.join(getModuleRootDir(), "src/features/codeium_intellij/proto")
|
|
26601
28056
|
]
|
|
26602
28057
|
});
|
|
26603
28058
|
return grpc.loadPackageDefinition(
|
|
@@ -26653,28 +28108,28 @@ async function getGrpcClient(port, csrf3) {
|
|
|
26653
28108
|
// src/features/codeium_intellij/parse_intellij_logs.ts
|
|
26654
28109
|
import fs27 from "fs";
|
|
26655
28110
|
import os14 from "os";
|
|
26656
|
-
import
|
|
28111
|
+
import path32 from "path";
|
|
26657
28112
|
function getLogsDir() {
|
|
26658
28113
|
if (process.platform === "darwin") {
|
|
26659
|
-
return
|
|
28114
|
+
return path32.join(os14.homedir(), "Library/Logs/JetBrains");
|
|
26660
28115
|
} else if (process.platform === "win32") {
|
|
26661
|
-
return
|
|
26662
|
-
process.env["LOCALAPPDATA"] ||
|
|
28116
|
+
return path32.join(
|
|
28117
|
+
process.env["LOCALAPPDATA"] || path32.join(os14.homedir(), "AppData/Local"),
|
|
26663
28118
|
"JetBrains"
|
|
26664
28119
|
);
|
|
26665
28120
|
} else {
|
|
26666
|
-
return
|
|
28121
|
+
return path32.join(os14.homedir(), ".cache/JetBrains");
|
|
26667
28122
|
}
|
|
26668
28123
|
}
|
|
26669
28124
|
function parseIdeLogDir(ideLogDir) {
|
|
26670
28125
|
const logFiles = fs27.readdirSync(ideLogDir).filter((f) => /^idea(\.\d+)?\.log$/.test(f)).map((f) => ({
|
|
26671
28126
|
name: f,
|
|
26672
|
-
mtime: fs27.statSync(
|
|
28127
|
+
mtime: fs27.statSync(path32.join(ideLogDir, f)).mtimeMs
|
|
26673
28128
|
})).sort((a, b) => a.mtime - b.mtime).map((f) => f.name);
|
|
26674
28129
|
let latestCsrf = null;
|
|
26675
28130
|
let latestPort = null;
|
|
26676
28131
|
for (const logFile of logFiles) {
|
|
26677
|
-
const lines = fs27.readFileSync(
|
|
28132
|
+
const lines = fs27.readFileSync(path32.join(ideLogDir, logFile), "utf-8").split("\n");
|
|
26678
28133
|
for (const line of lines) {
|
|
26679
28134
|
if (!line.includes(
|
|
26680
28135
|
"com.codeium.intellij.language_server.LanguageServerProcessHandler"
|
|
@@ -26702,9 +28157,9 @@ function findRunningCodeiumLanguageServers() {
|
|
|
26702
28157
|
const logsDir = getLogsDir();
|
|
26703
28158
|
if (!fs27.existsSync(logsDir)) return results;
|
|
26704
28159
|
for (const ide of fs27.readdirSync(logsDir)) {
|
|
26705
|
-
let ideLogDir =
|
|
28160
|
+
let ideLogDir = path32.join(logsDir, ide);
|
|
26706
28161
|
if (process.platform !== "darwin") {
|
|
26707
|
-
ideLogDir =
|
|
28162
|
+
ideLogDir = path32.join(ideLogDir, "log");
|
|
26708
28163
|
}
|
|
26709
28164
|
if (!fs27.existsSync(ideLogDir) || !fs27.statSync(ideLogDir).isDirectory()) {
|
|
26710
28165
|
continue;
|
|
@@ -26887,10 +28342,10 @@ function processChatStepCodeAction(step) {
|
|
|
26887
28342
|
// src/features/codeium_intellij/install_hook.ts
|
|
26888
28343
|
import fsPromises5 from "fs/promises";
|
|
26889
28344
|
import os15 from "os";
|
|
26890
|
-
import
|
|
28345
|
+
import path33 from "path";
|
|
26891
28346
|
import chalk14 from "chalk";
|
|
26892
28347
|
function getCodeiumHooksPath() {
|
|
26893
|
-
return
|
|
28348
|
+
return path33.join(os15.homedir(), ".codeium", "hooks.json");
|
|
26894
28349
|
}
|
|
26895
28350
|
async function readCodeiumHooks() {
|
|
26896
28351
|
const hooksPath = getCodeiumHooksPath();
|
|
@@ -26903,7 +28358,7 @@ async function readCodeiumHooks() {
|
|
|
26903
28358
|
}
|
|
26904
28359
|
async function writeCodeiumHooks(config2) {
|
|
26905
28360
|
const hooksPath = getCodeiumHooksPath();
|
|
26906
|
-
const dir =
|
|
28361
|
+
const dir = path33.dirname(hooksPath);
|
|
26907
28362
|
await fsPromises5.mkdir(dir, { recursive: true });
|
|
26908
28363
|
await fsPromises5.writeFile(
|
|
26909
28364
|
hooksPath,
|