mobbdev 1.3.7 → 1.4.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.mjs CHANGED
@@ -129,10 +129,13 @@ function getSdk(client, withWrapper = defaultWrapper) {
129
129
  },
130
130
  ScanSkill(variables, requestHeaders, signal) {
131
131
  return withWrapper((wrappedRequestHeaders) => client.request({ document: ScanSkillDocument, variables, requestHeaders: { ...requestHeaders, ...wrappedRequestHeaders }, signal }), "ScanSkill", "mutation", variables);
132
+ },
133
+ SkillVerdictsByMd5(variables, requestHeaders, signal) {
134
+ return withWrapper((wrappedRequestHeaders) => client.request({ document: SkillVerdictsByMd5Document, variables, requestHeaders: { ...requestHeaders, ...wrappedRequestHeaders }, signal }), "SkillVerdictsByMd5", "query", variables);
132
135
  }
133
136
  };
134
137
  }
135
- var AiBlameInferenceType, FixQuestionInputType, Language, ManifestAction, Effort_To_Apply_Fix_Enum, Fix_Rating_Tag_Enum, Fix_Report_State_Enum, Fix_State_Enum, IssueLanguage_Enum, IssueType_Enum, Pr_Status_Enum, Project_Role_Type_Enum, Vulnerability_Report_Issue_Category_Enum, Vulnerability_Report_Issue_State_Enum, Vulnerability_Report_Issue_Tag_Enum, Vulnerability_Report_Vendor_Enum, Vulnerability_Severity_Enum, FixDetailsFragmentDoc, FixReportSummaryFieldsFragmentDoc, MeDocument, GetLastOrgAndNamedProjectDocument, GetLastOrgDocument, GetEncryptedApiTokenDocument, FixReportStateDocument, GetVulnerabilityReportPathsDocument, GetAnalysisSubscriptionDocument, GetAnalysisDocument, GetFixesDocument, GetVulByNodesMetadataDocument, GetFalsePositiveDocument, UpdateScmTokenDocument, UploadS3BucketInfoDocument, GetTracyDiffUploadUrlDocument, AnalyzeCommitForExtensionAiBlameDocument, GetAiBlameInferenceDocument, GetAiBlameAttributionPromptDocument, GetPromptSummaryDocument, UploadAiBlameInferencesInitDocument, FinalizeAiBlameInferencesUploadDocument, UploadTracyRecordsDocument, GetTracyRawDataUploadUrlDocument, DigestVulnerabilityReportDocument, SubmitVulnerabilityReportDocument, CreateCommunityUserDocument, CreateCliLoginDocument, PerformCliLoginDocument, CreateProjectDocument, ValidateRepoUrlDocument, GitReferenceDocument, AutoPrAnalysisDocument, GetFixReportsByRepoUrlDocument, GetReportFixesDocument, GetLatestReportByRepoUrlDocument, UpdateDownloadedFixDataDocument, GetUserMvsAutoFixDocument, StreamBlameAiAnalysisRequestsDocument, StreamCommitBlameRequestsDocument, ScanSkillDocument, defaultWrapper;
138
+ var AiBlameInferenceType, FixQuestionInputType, Language, ManifestAction, Effort_To_Apply_Fix_Enum, Fix_Rating_Tag_Enum, Fix_Report_State_Enum, Fix_State_Enum, IssueLanguage_Enum, IssueType_Enum, Pr_Status_Enum, Project_Role_Type_Enum, Vulnerability_Report_Issue_Category_Enum, Vulnerability_Report_Issue_State_Enum, Vulnerability_Report_Issue_Tag_Enum, Vulnerability_Report_Vendor_Enum, Vulnerability_Severity_Enum, FixDetailsFragmentDoc, FixReportSummaryFieldsFragmentDoc, MeDocument, GetLastOrgAndNamedProjectDocument, GetLastOrgDocument, GetEncryptedApiTokenDocument, FixReportStateDocument, GetVulnerabilityReportPathsDocument, GetAnalysisSubscriptionDocument, GetAnalysisDocument, GetFixesDocument, GetVulByNodesMetadataDocument, GetFalsePositiveDocument, UpdateScmTokenDocument, UploadS3BucketInfoDocument, GetTracyDiffUploadUrlDocument, AnalyzeCommitForExtensionAiBlameDocument, GetAiBlameInferenceDocument, GetAiBlameAttributionPromptDocument, GetPromptSummaryDocument, UploadAiBlameInferencesInitDocument, FinalizeAiBlameInferencesUploadDocument, UploadTracyRecordsDocument, GetTracyRawDataUploadUrlDocument, DigestVulnerabilityReportDocument, SubmitVulnerabilityReportDocument, CreateCommunityUserDocument, CreateCliLoginDocument, PerformCliLoginDocument, CreateProjectDocument, ValidateRepoUrlDocument, GitReferenceDocument, AutoPrAnalysisDocument, GetFixReportsByRepoUrlDocument, GetReportFixesDocument, GetLatestReportByRepoUrlDocument, UpdateDownloadedFixDataDocument, GetUserMvsAutoFixDocument, StreamBlameAiAnalysisRequestsDocument, StreamCommitBlameRequestsDocument, ScanSkillDocument, SkillVerdictsByMd5Document, defaultWrapper;
136
139
  var init_client_generates = __esm({
137
140
  "src/features/analysis/scm/generates/client_generates.ts"() {
138
141
  "use strict";
@@ -260,10 +263,12 @@ var init_client_generates = __esm({
260
263
  IssueType_Enum2["ImproperExceptionHandling"] = "IMPROPER_EXCEPTION_HANDLING";
261
264
  IssueType_Enum2["ImproperResourceShutdownOrRelease"] = "IMPROPER_RESOURCE_SHUTDOWN_OR_RELEASE";
262
265
  IssueType_Enum2["ImproperStringFormatting"] = "IMPROPER_STRING_FORMATTING";
266
+ IssueType_Enum2["ImproperValidationOfArrayIndex"] = "IMPROPER_VALIDATION_OF_ARRAY_INDEX";
263
267
  IssueType_Enum2["IncompleteHostnameRegex"] = "INCOMPLETE_HOSTNAME_REGEX";
264
268
  IssueType_Enum2["IncompleteSanitization"] = "INCOMPLETE_SANITIZATION";
265
269
  IssueType_Enum2["IncompleteUrlSanitization"] = "INCOMPLETE_URL_SANITIZATION";
266
270
  IssueType_Enum2["IncompleteUrlSchemeCheck"] = "INCOMPLETE_URL_SCHEME_CHECK";
271
+ IssueType_Enum2["IncorrectIntegerConversion"] = "INCORRECT_INTEGER_CONVERSION";
267
272
  IssueType_Enum2["IncorrectSqlApiUsage"] = "INCORRECT_SQL_API_USAGE";
268
273
  IssueType_Enum2["InformationExposureViaHeaders"] = "INFORMATION_EXPOSURE_VIA_HEADERS";
269
274
  IssueType_Enum2["InsecureBinderConfiguration"] = "INSECURE_BINDER_CONFIGURATION";
@@ -288,6 +293,7 @@ var init_client_generates = __esm({
288
293
  IssueType_Enum2["MissingUser"] = "MISSING_USER";
289
294
  IssueType_Enum2["MissingWhitespace"] = "MISSING_WHITESPACE";
290
295
  IssueType_Enum2["MissingWorkflowPermissions"] = "MISSING_WORKFLOW_PERMISSIONS";
296
+ IssueType_Enum2["MissingXFrameOptions"] = "MISSING_X_FRAME_OPTIONS";
291
297
  IssueType_Enum2["ModifiedDefaultParam"] = "MODIFIED_DEFAULT_PARAM";
292
298
  IssueType_Enum2["NonFinalPublicStaticField"] = "NON_FINAL_PUBLIC_STATIC_FIELD";
293
299
  IssueType_Enum2["NonReadonlyField"] = "NON_READONLY_FIELD";
@@ -405,6 +411,7 @@ var init_client_generates = __esm({
405
411
  return Vulnerability_Report_Issue_Tag_Enum3;
406
412
  })(Vulnerability_Report_Issue_Tag_Enum || {});
407
413
  Vulnerability_Report_Vendor_Enum = /* @__PURE__ */ ((Vulnerability_Report_Vendor_Enum3) => {
414
+ Vulnerability_Report_Vendor_Enum3["BlackDuck"] = "blackDuck";
408
415
  Vulnerability_Report_Vendor_Enum3["Checkmarx"] = "checkmarx";
409
416
  Vulnerability_Report_Vendor_Enum3["CheckmarxXml"] = "checkmarxXml";
410
417
  Vulnerability_Report_Vendor_Enum3["Codeql"] = "codeql";
@@ -1265,6 +1272,18 @@ var init_client_generates = __esm({
1265
1272
  cached
1266
1273
  summary
1267
1274
  }
1275
+ }
1276
+ `;
1277
+ SkillVerdictsByMd5Document = `
1278
+ query SkillVerdictsByMd5($md5s: [String!]!) {
1279
+ skillVerdictsByMd5(md5s: $md5s) {
1280
+ md5
1281
+ verdict
1282
+ summary
1283
+ scannerName
1284
+ scannerVersion
1285
+ scannedAt
1286
+ }
1268
1287
  }
1269
1288
  `;
1270
1289
  defaultWrapper = (action, _operationName, _operationType, _variables) => action();
@@ -1452,7 +1471,10 @@ var init_getIssueType = __esm({
1452
1471
  ["REDUNDANT_NIL_ERROR_CHECK" /* RedundantNilErrorCheck */]: "Redundant Nil Error Check",
1453
1472
  ["MISSING_WORKFLOW_PERMISSIONS" /* MissingWorkflowPermissions */]: "Missing Workflow Permissions",
1454
1473
  ["EXCESSIVE_SECRETS_EXPOSURE" /* ExcessiveSecretsExposure */]: "Excessive Secrets Exposure",
1455
- ["TAINTED_NUMERIC_CAST" /* TaintedNumericCast */]: "Tainted Numeric Cast"
1474
+ ["TAINTED_NUMERIC_CAST" /* TaintedNumericCast */]: "Tainted Numeric Cast",
1475
+ ["MISSING_X_FRAME_OPTIONS" /* MissingXFrameOptions */]: "Missing X-Frame-Options Header",
1476
+ ["IMPROPER_VALIDATION_OF_ARRAY_INDEX" /* ImproperValidationOfArrayIndex */]: "Improper Validation of Array Index",
1477
+ ["INCORRECT_INTEGER_CONVERSION" /* IncorrectIntegerConversion */]: "Incorrect Integer Conversion"
1456
1478
  };
1457
1479
  issueTypeZ = z.nativeEnum(IssueType_Enum);
1458
1480
  getIssueTypeFriendlyString = (issueType) => {
@@ -3579,8 +3601,8 @@ var init_FileUtils = __esm({
3579
3601
  const fullPath = path.join(dir, item);
3580
3602
  try {
3581
3603
  await fsPromises.access(fullPath, fs.constants.R_OK);
3582
- const stat3 = await fsPromises.stat(fullPath);
3583
- if (stat3.isDirectory()) {
3604
+ const stat4 = await fsPromises.stat(fullPath);
3605
+ if (stat4.isDirectory()) {
3584
3606
  if (isRootLevel && excludedRootDirectories.includes(item)) {
3585
3607
  continue;
3586
3608
  }
@@ -3592,7 +3614,7 @@ var init_FileUtils = __esm({
3592
3614
  name: item,
3593
3615
  fullPath,
3594
3616
  relativePath: path.relative(rootDir, fullPath),
3595
- time: stat3.mtime.getTime(),
3617
+ time: stat4.mtime.getTime(),
3596
3618
  isFile: true
3597
3619
  });
3598
3620
  }
@@ -4645,7 +4667,10 @@ var fixDetailsData = {
4645
4667
  ["REDUNDANT_NIL_ERROR_CHECK" /* RedundantNilErrorCheck */]: void 0,
4646
4668
  ["MISSING_WORKFLOW_PERMISSIONS" /* MissingWorkflowPermissions */]: void 0,
4647
4669
  ["EXCESSIVE_SECRETS_EXPOSURE" /* ExcessiveSecretsExposure */]: void 0,
4648
- ["TAINTED_NUMERIC_CAST" /* TaintedNumericCast */]: void 0
4670
+ ["TAINTED_NUMERIC_CAST" /* TaintedNumericCast */]: void 0,
4671
+ ["MISSING_X_FRAME_OPTIONS" /* MissingXFrameOptions */]: void 0,
4672
+ ["IMPROPER_VALIDATION_OF_ARRAY_INDEX" /* ImproperValidationOfArrayIndex */]: void 0,
4673
+ ["INCORRECT_INTEGER_CONVERSION" /* IncorrectIntegerConversion */]: void 0
4649
4674
  };
4650
4675
 
4651
4676
  // src/features/analysis/scm/shared/src/commitDescriptionMarkup.ts
@@ -5982,6 +6007,19 @@ var headerMaxAge = {
5982
6007
  }
5983
6008
  };
5984
6009
 
6010
+ // src/features/analysis/scm/shared/src/storedQuestionData/js/missingXFrameOptions.ts
6011
+ var xFrameOptionsValue = {
6012
+ xFrameOptionsValue: {
6013
+ content: () => "Please provide the value for the X-Frame-Options header",
6014
+ description: () => `The \`X-Frame-Options\` HTTP response header tells the browser whether the page is allowed to be rendered inside a \`<frame>\`, \`<iframe>\`, \`<embed>\` or \`<object>\`. Without it, attackers can embed your application in an invisible iframe and trick users into clicking on it \u2014 a class of attacks known as clickjacking (UI redressing).
6015
+ &nbsp;
6016
+ &nbsp; **Allowed values:**
6017
+ &nbsp; - \`DENY\` \u2014 the page cannot be framed by any site, including your own. Recommended default for any page that does not need to be embedded.
6018
+ &nbsp; - \`SAMEORIGIN\` \u2014 the page can only be framed by pages served from the same origin. Use this only if your own application legitimately embeds this page in an iframe.`,
6019
+ guidance: () => ``
6020
+ }
6021
+ };
6022
+
5985
6023
  // src/features/analysis/scm/shared/src/storedQuestionData/js/noLimitsOrThrottling.ts
5986
6024
  var noLimitsOrThrottling2 = {
5987
6025
  setGlobalLimiter: {
@@ -6126,6 +6164,7 @@ var vulnerabilities13 = {
6126
6164
  ["UNCHECKED_LOOP_CONDITION" /* UncheckedLoopCondition */]: uncheckedLoopCondition2,
6127
6165
  ["NO_LIMITS_OR_THROTTLING" /* NoLimitsOrThrottling */]: noLimitsOrThrottling2,
6128
6166
  ["MISSING_CSP_HEADER" /* MissingCspHeader */]: cspHeaderValue,
6167
+ ["MISSING_X_FRAME_OPTIONS" /* MissingXFrameOptions */]: xFrameOptionsValue,
6129
6168
  ["HARDCODED_DOMAIN_IN_HTML" /* HardcodedDomainInHtml */]: hardcodedDomainInHtml,
6130
6169
  ["CSRF" /* Csrf */]: csrf2
6131
6170
  };
@@ -6446,6 +6485,13 @@ var ReferenceType = /* @__PURE__ */ ((ReferenceType2) => {
6446
6485
  ReferenceType2["TAG"] = "TAG";
6447
6486
  return ReferenceType2;
6448
6487
  })(ReferenceType || {});
6488
+ var GithubFullShaZ = z13.string().regex(/^[a-f0-9]{40}$/);
6489
+ var MergedPrSurvivalMetadataZ = z13.object({
6490
+ mergeCommitShas: z13.array(GithubFullShaZ).min(1).refine((shas) => new Set(shas).size === shas.length, {
6491
+ message: "mergeCommitShas must contain unique SHAs"
6492
+ }),
6493
+ targetBranch: z13.string().min(1)
6494
+ });
6449
6495
  var ScmLibScmType = /* @__PURE__ */ ((ScmLibScmType2) => {
6450
6496
  ScmLibScmType2["GITHUB"] = "GITHUB";
6451
6497
  ScmLibScmType2["GITLAB"] = "GITLAB";
@@ -7132,7 +7178,7 @@ async function getAdoSdk(params) {
7132
7178
  const url = new URL(repoUrl);
7133
7179
  const origin = url.origin.toLowerCase().endsWith(".visualstudio.com") ? DEFUALT_ADO_ORIGIN : url.origin.toLowerCase();
7134
7180
  const params2 = `path=/&versionDescriptor[versionOptions]=0&versionDescriptor[versionType]=commit&versionDescriptor[version]=${branch}&resolveLfs=true&$format=zip&api-version=5.0&download=true`;
7135
- const path32 = [
7181
+ const path35 = [
7136
7182
  prefixPath,
7137
7183
  owner,
7138
7184
  projectName,
@@ -7143,7 +7189,7 @@ async function getAdoSdk(params) {
7143
7189
  "items",
7144
7190
  "items"
7145
7191
  ].filter(Boolean).join("/");
7146
- return new URL(`${path32}?${params2}`, origin).toString();
7192
+ return new URL(`${path35}?${params2}`, origin).toString();
7147
7193
  },
7148
7194
  async getAdoBranchList({ repoUrl }) {
7149
7195
  try {
@@ -7232,8 +7278,8 @@ async function getAdoSdk(params) {
7232
7278
  const changeType = entry.changeType;
7233
7279
  return changeType !== 16 && entry.item?.path;
7234
7280
  }).map((entry) => {
7235
- const path32 = entry.item.path;
7236
- return path32.startsWith("/") ? path32.slice(1) : path32;
7281
+ const path35 = entry.item.path;
7282
+ return path35.startsWith("/") ? path35.slice(1) : path35;
7237
7283
  });
7238
7284
  },
7239
7285
  async searchAdoPullRequests({
@@ -7634,6 +7680,12 @@ var SCMLib = class {
7634
7680
  async getPrDataBatch(_repoUrl, _prNumbers) {
7635
7681
  throw new Error("getPrDataBatch not implemented for this SCM provider");
7636
7682
  }
7683
+ /**
7684
+ * GitHub: merge detection for main-branch survival. Other providers return null.
7685
+ */
7686
+ async getMergedPrSurvivalMetadata(_prNumber) {
7687
+ return null;
7688
+ }
7637
7689
  getAccessToken() {
7638
7690
  return this.accessToken || "";
7639
7691
  }
@@ -8895,6 +8947,24 @@ async function encryptSecret(secret, key) {
8895
8947
  return sodium.to_base64(encBytes, sodium.base64_variants.ORIGINAL);
8896
8948
  }
8897
8949
 
8950
+ // src/features/analysis/scm/github/utils/mergeCommitShas.ts
8951
+ async function commitShasBetweenBaseAndMerge(githubSdk, args) {
8952
+ let compare;
8953
+ try {
8954
+ compare = await githubSdk.compareCommitsBasehead({
8955
+ owner: args.owner,
8956
+ repo: args.repo,
8957
+ basehead: `${args.baseSha}...${args.mergeCommitSha}`
8958
+ });
8959
+ } catch (err) {
8960
+ throw new Error(
8961
+ `Failed to compare commits ${args.baseSha}...${args.mergeCommitSha}: ${err instanceof Error ? err.message : String(err)}`
8962
+ );
8963
+ }
8964
+ const shas = compare.data.commits.map((c) => c.sha);
8965
+ return shas.length > 0 ? shas : [args.mergeCommitSha];
8966
+ }
8967
+
8898
8968
  // src/features/analysis/scm/github/utils/utils.ts
8899
8969
  import { Octokit } from "octokit";
8900
8970
  import { fetch as fetch2, ProxyAgent } from "undici";
@@ -9627,6 +9697,12 @@ function getGithubSdk(params = {}) {
9627
9697
  );
9628
9698
  return res;
9629
9699
  },
9700
+ async listPullRequestCommits(params2) {
9701
+ return octokit.rest.pulls.listCommits(params2);
9702
+ },
9703
+ async compareCommitsBasehead(params2) {
9704
+ return octokit.rest.repos.compareCommitsWithBasehead(params2);
9705
+ },
9630
9706
  /**
9631
9707
  * List PRs using GitHub's REST `/repos/{owner}/{repo}/pulls` endpoint.
9632
9708
  * https://docs.github.com/en/rest/pulls/pulls?apiVersion=2022-11-28#list-pull-requests
@@ -10441,6 +10517,34 @@ var GithubSCMLib = class extends SCMLib {
10441
10517
  commentIds
10442
10518
  };
10443
10519
  }
10520
+ /**
10521
+ * Detect merge strategy and SHAs on the target branch for main-branch survival (GitHub only).
10522
+ */
10523
+ async getMergedPrSurvivalMetadata(prNumber) {
10524
+ this._validateAccessTokenAndUrl();
10525
+ const { owner, repo } = parseGithubOwnerAndRepo(this.url);
10526
+ const pr = await this.githubSdk.getPr({
10527
+ owner,
10528
+ repo,
10529
+ pull_number: prNumber
10530
+ });
10531
+ if (pr.data.merged !== true || !pr.data.merge_commit_sha) {
10532
+ return null;
10533
+ }
10534
+ const mergeCommitSha = pr.data.merge_commit_sha;
10535
+ const targetBranch = pr.data.base.ref;
10536
+ const baseSha = pr.data.base.sha;
10537
+ const mergeCommitShas = await commitShasBetweenBaseAndMerge(
10538
+ this.githubSdk,
10539
+ {
10540
+ owner,
10541
+ repo,
10542
+ baseSha,
10543
+ mergeCommitSha
10544
+ }
10545
+ );
10546
+ return { mergeCommitShas, targetBranch };
10547
+ }
10444
10548
  };
10445
10549
 
10446
10550
  // src/features/analysis/scm/gitlab/gitlab.ts
@@ -12447,7 +12551,8 @@ var SCANNERS = {
12447
12551
  Snyk: "snyk",
12448
12552
  Sonarqube: "sonarqube",
12449
12553
  Semgrep: "semgrep",
12450
- Datadog: "datadog"
12554
+ Datadog: "datadog",
12555
+ BlackDuck: "blackduck"
12451
12556
  };
12452
12557
  var scannerToVulnerabilityReportVendorEnum = {
12453
12558
  [SCANNERS.Checkmarx]: "checkmarx" /* Checkmarx */,
@@ -12456,7 +12561,8 @@ var scannerToVulnerabilityReportVendorEnum = {
12456
12561
  [SCANNERS.Codeql]: "codeql" /* Codeql */,
12457
12562
  [SCANNERS.Fortify]: "fortify" /* Fortify */,
12458
12563
  [SCANNERS.Semgrep]: "semgrep" /* Semgrep */,
12459
- [SCANNERS.Datadog]: "datadog" /* Datadog */
12564
+ [SCANNERS.Datadog]: "datadog" /* Datadog */,
12565
+ [SCANNERS.BlackDuck]: "blackDuck" /* BlackDuck */
12460
12566
  };
12461
12567
  var SupportedScannersZ = z25.enum([SCANNERS.Checkmarx, SCANNERS.Snyk]);
12462
12568
  var envVariablesSchema = z25.object({
@@ -13539,6 +13645,10 @@ var GQLClient = class {
13539
13645
  async scanSkill(variables) {
13540
13646
  return await this._clientSdk.ScanSkill(variables);
13541
13647
  }
13648
+ // T-467 — batched verdict lookup for the client-side quarantine check.
13649
+ async skillVerdictsByMd5(md5s) {
13650
+ return await this._clientSdk.SkillVerdictsByMd5({ md5s });
13651
+ }
13542
13652
  };
13543
13653
 
13544
13654
  // src/features/analysis/graphql/tracy-batch-upload.ts
@@ -14871,7 +14981,8 @@ var scannerToFriendlyString = {
14871
14981
  snyk: "Snyk",
14872
14982
  sonarqube: "Sonarqube",
14873
14983
  semgrep: "Semgrep",
14874
- datadog: "Datadog"
14984
+ datadog: "Datadog",
14985
+ blackduck: "Black Duck"
14875
14986
  };
14876
14987
 
14877
14988
  // src/features/analysis/add_fix_comments_for_pr/utils/buildCommentBody.ts
@@ -15061,7 +15172,7 @@ async function postIssueComment(params) {
15061
15172
  fpDescription
15062
15173
  } = params;
15063
15174
  const {
15064
- path: path32,
15175
+ path: path35,
15065
15176
  startLine,
15066
15177
  vulnerabilityReportIssue: {
15067
15178
  vulnerabilityReportIssueTags,
@@ -15076,7 +15187,7 @@ async function postIssueComment(params) {
15076
15187
  Refresh the page in order to see the changes.`,
15077
15188
  pull_number: pullRequest,
15078
15189
  commit_id: commitSha,
15079
- path: path32,
15190
+ path: path35,
15080
15191
  line: startLine
15081
15192
  });
15082
15193
  const commentId = commentRes.data.id;
@@ -15110,7 +15221,7 @@ async function postFixComment(params) {
15110
15221
  scanner
15111
15222
  } = params;
15112
15223
  const {
15113
- path: path32,
15224
+ path: path35,
15114
15225
  startLine,
15115
15226
  vulnerabilityReportIssue: { fixId, vulnerabilityReportIssueTags, category },
15116
15227
  vulnerabilityReportIssueId
@@ -15128,7 +15239,7 @@ async function postFixComment(params) {
15128
15239
  Refresh the page in order to see the changes.`,
15129
15240
  pull_number: pullRequest,
15130
15241
  commit_id: commitSha,
15131
- path: path32,
15242
+ path: path35,
15132
15243
  line: startLine
15133
15244
  });
15134
15245
  const commentId = commentRes.data.id;
@@ -16650,8 +16761,8 @@ async function resolveSkillScanInput(skillInput) {
16650
16761
  if (!fs11.existsSync(resolvedPath)) {
16651
16762
  return skillInput;
16652
16763
  }
16653
- const stat3 = fs11.statSync(resolvedPath);
16654
- if (!stat3.isDirectory()) {
16764
+ const stat4 = fs11.statSync(resolvedPath);
16765
+ if (!stat4.isDirectory()) {
16655
16766
  throw new CliError(
16656
16767
  "Local skill input must be a directory containing SKILL.md"
16657
16768
  );
@@ -16979,7 +17090,7 @@ function analyzeBuilder(yargs2) {
16979
17090
  alias: "scan-file",
16980
17091
  type: "string",
16981
17092
  describe: chalk10.bold(
16982
- "Select the vulnerability report to analyze (Checkmarx, Snyk, Fortify, CodeQL, Sonarqube, Semgrep, Datadog)"
17093
+ "Select the vulnerability report to analyze (Checkmarx, Snyk, Fortify, CodeQL, Sonarqube, Semgrep, Datadog, Black Duck)"
16983
17094
  )
16984
17095
  }).option("repo", repoOption).option("p", {
16985
17096
  alias: "src-path",
@@ -17050,108 +17161,27 @@ import { spawn } from "child_process";
17050
17161
 
17051
17162
  // src/features/claude_code/daemon.ts
17052
17163
  import { readFileSync, writeFileSync as writeFileSync2 } from "fs";
17053
- import path19 from "path";
17164
+ import path22 from "path";
17054
17165
  import { setTimeout as sleep2 } from "timers/promises";
17055
17166
  import Configstore3 from "configstore";
17056
17167
 
17057
- // src/features/claude_code/daemon_pid_file.ts
17058
- import fs13 from "fs";
17059
- import os4 from "os";
17060
- import path13 from "path";
17061
-
17062
- // src/features/claude_code/data_collector_constants.ts
17063
- var CC_VERSION_CACHE_KEY = "claudeCode.detectedCCVersion";
17064
- var CC_VERSION_CLI_KEY = "claudeCode.detectedCCVersionCli";
17065
- var GQL_AUTH_TIMEOUT_MS = 15e3;
17066
- var STALE_KEY_MAX_AGE_MS = 14 * 24 * 60 * 60 * 1e3;
17067
- var CLEANUP_INTERVAL_MS = 24 * 60 * 60 * 1e3;
17068
- var DAEMON_TTL_MS = 30 * 60 * 1e3;
17069
- var DAEMON_POLL_INTERVAL_MS = 1e4;
17070
- var HEARTBEAT_STALE_MS = 3e4;
17071
- var TRANSCRIPT_MAX_AGE_MS = 24 * 60 * 60 * 1e3;
17072
- var DAEMON_CHUNK_SIZE = 50;
17073
-
17074
- // src/features/claude_code/daemon_pid_file.ts
17075
- function getMobbdevDir() {
17076
- return path13.join(os4.homedir(), ".mobbdev");
17077
- }
17078
- function getDaemonCheckScriptPath() {
17079
- return path13.join(getMobbdevDir(), "daemon-check.js");
17080
- }
17081
- var DaemonPidFile = class {
17082
- constructor() {
17083
- __publicField(this, "data", null);
17084
- }
17085
- get filePath() {
17086
- return path13.join(getMobbdevDir(), "daemon.pid");
17087
- }
17088
- /** Ensure ~/.mobbdev/ directory exists. */
17089
- ensureDir() {
17090
- fs13.mkdirSync(getMobbdevDir(), { recursive: true });
17091
- }
17092
- /** Read the PID file from disk. Returns the parsed data or null. */
17093
- read() {
17094
- try {
17095
- const raw = fs13.readFileSync(this.filePath, "utf8");
17096
- const parsed = JSON.parse(raw);
17097
- if (typeof parsed.pid !== "number" || typeof parsed.startedAt !== "number" || typeof parsed.heartbeat !== "number") {
17098
- this.data = null;
17099
- } else {
17100
- this.data = parsed;
17101
- }
17102
- } catch {
17103
- this.data = null;
17104
- }
17105
- return this.data;
17106
- }
17107
- /** Write a new PID file for the given process id. */
17108
- write(pid, version) {
17109
- this.data = {
17110
- pid,
17111
- startedAt: Date.now(),
17112
- heartbeat: Date.now(),
17113
- version
17114
- };
17115
- fs13.writeFileSync(this.filePath, JSON.stringify(this.data), "utf8");
17116
- }
17117
- /** Update the heartbeat timestamp of the current PID file. */
17118
- updateHeartbeat() {
17119
- if (!this.data) this.read();
17120
- if (!this.data) return;
17121
- this.data.heartbeat = Date.now();
17122
- fs13.writeFileSync(this.filePath, JSON.stringify(this.data), "utf8");
17123
- }
17124
- /** Check whether the previously read PID data represents a live daemon. */
17125
- isAlive() {
17126
- if (!this.data) return false;
17127
- if (Date.now() - this.data.heartbeat > HEARTBEAT_STALE_MS) return false;
17128
- try {
17129
- process.kill(this.data.pid, 0);
17130
- return true;
17131
- } catch {
17132
- return false;
17133
- }
17134
- }
17135
- /** Remove the PID file from disk (best-effort). */
17136
- remove() {
17137
- this.data = null;
17138
- try {
17139
- fs13.unlinkSync(this.filePath);
17140
- } catch {
17141
- }
17142
- }
17143
- };
17168
+ // src/features/analysis/skill_quarantine/constants.ts
17169
+ var HEARTBEAT_DEBOUNCE_MS = (() => {
17170
+ const raw = Number(process.env["MOBB_TRACY_SKILL_QUARANTINE_DEBOUNCE_MS"]);
17171
+ if (!Number.isFinite(raw) || raw < 0) return 3e4;
17172
+ return Math.min(raw, 3e5);
17173
+ })();
17174
+ var KILL_SWITCH_ENV = "MOBB_TRACY_SKILL_QUARANTINE_DISABLE";
17175
+ var MALICIOUS_VERDICT = "MALICIOUS";
17176
+ var ORPHAN_SWEEP_GRACE_MS = 10 * 60 * 1e3;
17144
17177
 
17145
- // src/features/claude_code/data_collector.ts
17146
- import { execFile } from "child_process";
17147
- import { createHash as createHash3 } from "crypto";
17148
- import { access, open as open4, readdir, readFile as readFile2, unlink } from "fs/promises";
17149
- import path16 from "path";
17150
- import { promisify } from "util";
17178
+ // src/features/analysis/skill_quarantine/enumerateInstalledSkills.ts
17179
+ import { homedir as homedir2 } from "os";
17180
+ import path15 from "path";
17151
17181
 
17152
17182
  // src/features/analysis/context_file_processor.ts
17153
17183
  import { createHash } from "crypto";
17154
- import path14 from "path";
17184
+ import path13 from "path";
17155
17185
  import AdmZip3 from "adm-zip";
17156
17186
  import pLimit6 from "p-limit";
17157
17187
  var SANITIZE_CONCURRENCY = 5;
@@ -17185,8 +17215,12 @@ async function processContextFiles(regularFiles, skillGroups) {
17185
17215
  );
17186
17216
  for (const file of sortedFiles) {
17187
17217
  const sanitizedContent = await sanitizeFileContent(file.content);
17188
- const zipEntryName = group.isFolder ? path14.relative(group.skillPath, file.path).replace(/\\/g, "/") : path14.basename(file.path);
17218
+ const zipEntryName = group.isFolder ? path13.relative(group.skillPath, file.path).replace(/\\/g, "/") : path13.basename(file.path);
17189
17219
  zip.addFile(zipEntryName, Buffer.from(sanitizedContent, "utf-8"));
17220
+ const entry = zip.getEntry(zipEntryName);
17221
+ if (entry) {
17222
+ entry.header.time = /* @__PURE__ */ new Date(0);
17223
+ }
17190
17224
  }
17191
17225
  const zipBuffer = zip.toBuffer();
17192
17226
  const md5 = md5Hex(zipBuffer);
@@ -17198,30 +17232,14 @@ async function processContextFiles(regularFiles, skillGroups) {
17198
17232
  }
17199
17233
 
17200
17234
  // src/features/analysis/context_file_scanner.ts
17201
- import { readFile, stat } from "fs/promises";
17235
+ import { lstat, readdir, readFile, realpath, stat } from "fs/promises";
17202
17236
  import { homedir } from "os";
17203
- import path15 from "path";
17237
+ import path14 from "path";
17204
17238
  import { globby as globby2 } from "globby";
17205
- var MAX_CONTEXT_FILE_SIZE = 20 * 1024 * 1024;
17239
+ import { parse as parseJsoncLib } from "jsonc-parser";
17240
+
17241
+ // src/features/analysis/context_file_scan_paths.ts
17206
17242
  var SKILL_CATEGORY = "skill";
17207
- var SESSION_TTL_MS = 24 * 60 * 60 * 1e3;
17208
- var sessionMtimes = /* @__PURE__ */ new Map();
17209
- function markContextFilesUploaded(sessionId, files, skills) {
17210
- let entry = sessionMtimes.get(sessionId);
17211
- if (!entry) {
17212
- entry = { files: /* @__PURE__ */ new Map(), skills: /* @__PURE__ */ new Map(), lastUpdatedAt: Date.now() };
17213
- sessionMtimes.set(sessionId, entry);
17214
- }
17215
- for (const f of files) {
17216
- entry.files.set(f.path, f.mtimeMs);
17217
- }
17218
- if (skills) {
17219
- for (const sg of skills) {
17220
- entry.skills.set(sg.sessionKey, sg.maxMtimeMs);
17221
- }
17222
- }
17223
- entry.lastUpdatedAt = Date.now();
17224
- }
17225
17243
  var SCAN_PATHS = {
17226
17244
  "claude-code": [
17227
17245
  { glob: "CLAUDE.md", category: "rule", root: "workspace" },
@@ -17237,20 +17255,16 @@ var SCAN_PATHS = {
17237
17255
  category: "memory",
17238
17256
  root: "home"
17239
17257
  },
17240
- {
17241
- glob: ".claude/skills/**/*",
17242
- category: SKILL_CATEGORY,
17243
- root: "workspace"
17244
- },
17245
- { glob: ".claude/commands/*.md", category: "command", root: "workspace" },
17258
+ { kind: "skill-bundle", skillsRoot: ".claude/skills", root: "workspace" },
17259
+ { glob: ".claude/commands/*.md", category: "skill", root: "workspace" },
17246
17260
  {
17247
17261
  glob: ".claude/agents/*.md",
17248
- category: "agent-config",
17262
+ category: SKILL_CATEGORY,
17249
17263
  root: "workspace"
17250
17264
  },
17251
- { glob: ".claude/skills/**/*", category: SKILL_CATEGORY, root: "home" },
17252
- { glob: ".claude/commands/*.md", category: "command", root: "home" },
17253
- { glob: ".claude/agents/*.md", category: "agent-config", root: "home" },
17265
+ { kind: "skill-bundle", skillsRoot: ".claude/skills", root: "home" },
17266
+ { glob: ".claude/commands/*.md", category: "skill", root: "home" },
17267
+ { glob: ".claude/agents/*.md", category: SKILL_CATEGORY, root: "home" },
17254
17268
  { glob: ".claude/settings.json", category: "config", root: "workspace" },
17255
17269
  {
17256
17270
  glob: ".claude/settings.local.json",
@@ -17263,88 +17277,415 @@ var SCAN_PATHS = {
17263
17277
  { glob: ".claudeignore", category: "ignore", root: "workspace" }
17264
17278
  ],
17265
17279
  cursor: [
17280
+ // Legacy single-file rules
17266
17281
  { glob: ".cursorrules", category: "rule", root: "workspace" },
17282
+ // Project Rules — docs support both `.mdc` and `.md` inside .cursor/rules/
17267
17283
  { glob: ".cursor/rules/**/*.mdc", category: "rule", root: "workspace" },
17284
+ { glob: ".cursor/rules/**/*.md", category: "rule", root: "workspace" },
17285
+ // AGENTS.md — Cursor's documented alternative to .cursor/rules/
17286
+ { glob: "AGENTS.md", category: "rule", root: "workspace" },
17287
+ // Agent skills — Cursor auto-loads from these dirs plus compat with
17288
+ // Claude / Codex / generic .agents/ per Cursor docs.
17289
+ { kind: "skill-bundle", skillsRoot: ".cursor/skills", root: "workspace" },
17290
+ { kind: "skill-bundle", skillsRoot: ".agents/skills", root: "workspace" },
17291
+ { kind: "skill-bundle", skillsRoot: ".claude/skills", root: "workspace" },
17292
+ { kind: "skill-bundle", skillsRoot: ".codex/skills", root: "workspace" },
17293
+ // MCP — project + global
17268
17294
  { glob: ".cursor/mcp.json", category: "mcp-config", root: "workspace" },
17269
17295
  { glob: ".cursor/mcp.json", category: "mcp-config", root: "home" },
17296
+ // Home skills (user-level cross-project skills)
17297
+ { kind: "skill-bundle", skillsRoot: ".cursor/skills", root: "home" },
17298
+ { kind: "skill-bundle", skillsRoot: ".agents/skills", root: "home" },
17299
+ { kind: "skill-bundle", skillsRoot: ".claude/skills", root: "home" },
17300
+ { kind: "skill-bundle", skillsRoot: ".codex/skills", root: "home" },
17301
+ // Exclusion
17270
17302
  { glob: ".cursorignore", category: "ignore", root: "workspace" }
17303
+ // Note: Cursor's global "Rules for AI" from Settings UI is stored in
17304
+ // Cursor's internal settings DB. The tracer_ext reads it via VS Code API
17305
+ // (vscode.workspace.getConfiguration) and includes it as a synthetic entry.
17271
17306
  ],
17272
17307
  copilot: [
17308
+ // Instructions — workspace
17273
17309
  {
17274
17310
  glob: ".github/copilot-instructions.md",
17275
17311
  category: "rule",
17276
17312
  root: "workspace"
17277
17313
  },
17278
17314
  {
17279
- glob: ".github/instructions/*.instructions.md",
17315
+ glob: ".github/instructions/**/*.instructions.md",
17280
17316
  category: "rule",
17281
17317
  root: "workspace"
17282
17318
  },
17319
+ // AGENTS.md / CLAUDE.md family (Copilot reads these via chat.useAgentsMdFile,
17320
+ // chat.useClaudeMdFile for cross-compat with Claude Code / other agents).
17321
+ { glob: "AGENTS.md", category: "rule", root: "workspace" },
17322
+ { glob: "CLAUDE.md", category: "rule", root: "workspace" },
17323
+ { glob: "CLAUDE.local.md", category: "rule", root: "workspace" },
17324
+ { glob: ".claude/CLAUDE.md", category: "rule", root: "workspace" },
17325
+ { glob: ".claude/rules/**/*.md", category: "rule", root: "workspace" },
17326
+ // Prompts — workspace
17283
17327
  {
17284
17328
  glob: ".github/prompts/*.prompt.md",
17285
17329
  category: SKILL_CATEGORY,
17286
17330
  root: "workspace"
17287
17331
  },
17332
+ // Custom agents — `.agent.md` is the current format; `.chatmode.md` is the
17333
+ // legacy naming docs recommend renaming. We scan both for transition.
17334
+ {
17335
+ glob: ".github/agents/*.agent.md",
17336
+ category: "agent-config",
17337
+ root: "workspace"
17338
+ },
17288
17339
  {
17289
17340
  glob: ".github/chatmodes/*.chatmode.md",
17341
+ category: "agent-config",
17342
+ root: "workspace"
17343
+ },
17344
+ {
17345
+ glob: ".claude/agents/*.md",
17290
17346
  category: SKILL_CATEGORY,
17291
17347
  root: "workspace"
17292
17348
  },
17349
+ // Agent skills — Copilot discovers skills in all three roots (VS Code docs:
17350
+ // Agent Skills). Each skill is a directory with SKILL.md plus sibling files.
17351
+ { kind: "skill-bundle", skillsRoot: ".github/skills", root: "workspace" },
17352
+ { kind: "skill-bundle", skillsRoot: ".claude/skills", root: "workspace" },
17353
+ { kind: "skill-bundle", skillsRoot: ".agents/skills", root: "workspace" },
17354
+ // MCP — VS Code Copilot reads MCP servers from .vscode/mcp.json
17355
+ { glob: ".vscode/mcp.json", category: "mcp-config", root: "workspace" },
17356
+ // Global — home (JetBrains stores global instructions here)
17293
17357
  {
17294
17358
  glob: ".config/github-copilot/global-copilot-instructions.md",
17295
17359
  category: "rule",
17296
17360
  root: "home"
17297
- }
17361
+ },
17362
+ // User-level Copilot customizations (~/.copilot/)
17363
+ {
17364
+ glob: ".copilot/instructions/**/*.instructions.md",
17365
+ category: "rule",
17366
+ root: "home"
17367
+ },
17368
+ { glob: ".copilot/prompts/*.prompt.md", category: "skill", root: "home" },
17369
+ {
17370
+ glob: ".copilot/agents/*.agent.md",
17371
+ category: "agent-config",
17372
+ root: "home"
17373
+ },
17374
+ { kind: "skill-bundle", skillsRoot: ".copilot/skills", root: "home" },
17375
+ // Cross-compat home paths (Copilot reads Claude / generic agent dirs too)
17376
+ { glob: ".claude/CLAUDE.md", category: "rule", root: "home" },
17377
+ { glob: ".claude/rules/**/*.md", category: "rule", root: "home" },
17378
+ { glob: ".claude/agents/*.md", category: SKILL_CATEGORY, root: "home" },
17379
+ { kind: "skill-bundle", skillsRoot: ".claude/skills", root: "home" },
17380
+ { kind: "skill-bundle", skillsRoot: ".agents/skills", root: "home" }
17298
17381
  ]
17299
17382
  };
17300
- function groupSkills(files, root, baseDir) {
17301
- const skillFiles = files.filter((f) => f.category === SKILL_CATEGORY);
17302
- const folderMap = /* @__PURE__ */ new Map();
17303
- const standalone = [];
17304
- for (const f of skillFiles) {
17305
- const rel = path15.relative(baseDir, f.path).replace(/\\/g, "/");
17306
- const skillsMarker = "skills/";
17307
- const skillsIdx = rel.indexOf(skillsMarker);
17308
- if (skillsIdx === -1) {
17309
- standalone.push(f);
17310
- continue;
17311
- }
17312
- const relFromSkills = rel.slice(skillsIdx + skillsMarker.length);
17313
- const slashIdx = relFromSkills.indexOf("/");
17314
- if (slashIdx === -1) {
17315
- standalone.push(f);
17316
- } else {
17317
- const folderName = relFromSkills.slice(0, slashIdx);
17318
- if (!folderMap.has(folderName)) {
17319
- folderMap.set(folderName, []);
17320
- }
17321
- folderMap.get(folderName).push(f);
17383
+
17384
+ // src/features/analysis/context_file_scanner.ts
17385
+ var MAX_CONTEXT_FILE_SIZE = 20 * 1024 * 1024;
17386
+ var SESSION_TTL_MS = 24 * 60 * 60 * 1e3;
17387
+ var sessionMtimes = /* @__PURE__ */ new Map();
17388
+ function markContextFilesUploaded(sessionId, files, skills) {
17389
+ let entry = sessionMtimes.get(sessionId);
17390
+ if (!entry) {
17391
+ entry = { files: /* @__PURE__ */ new Map(), skills: /* @__PURE__ */ new Map(), lastUpdatedAt: Date.now() };
17392
+ sessionMtimes.set(sessionId, entry);
17393
+ }
17394
+ for (const f of files) {
17395
+ entry.files.set(f.path, f.mtimeMs);
17396
+ }
17397
+ if (skills) {
17398
+ for (const sg of skills) {
17399
+ entry.skills.set(sg.sessionKey, sg.maxMtimeMs);
17322
17400
  }
17323
17401
  }
17324
- const groups = [];
17325
- for (const f of standalone) {
17326
- const name = path15.basename(f.path, path15.extname(f.path));
17327
- const sessionKey = `skill:${root}:${name}`;
17328
- groups.push({
17329
- name,
17330
- root,
17331
- skillPath: f.path,
17332
- files: [f],
17333
- isFolder: false,
17334
- maxMtimeMs: f.mtimeMs,
17335
- sessionKey
17336
- });
17402
+ entry.lastUpdatedAt = Date.now();
17403
+ }
17404
+ var COPILOT_CUSTOM_LOCATION_SETTINGS = [
17405
+ {
17406
+ key: "chat.agentSkillsLocations",
17407
+ kind: "skill-bundle"
17408
+ },
17409
+ {
17410
+ key: "chat.instructionsFilesLocations",
17411
+ kind: "glob",
17412
+ category: "rule",
17413
+ glob: "**/*.instructions.md"
17414
+ },
17415
+ {
17416
+ key: "chat.promptFilesLocations",
17417
+ kind: "glob",
17418
+ category: "skill",
17419
+ glob: "**/*.prompt.md"
17420
+ },
17421
+ {
17422
+ key: "chat.agentFilesLocations",
17423
+ kind: "glob",
17424
+ category: "agent-config",
17425
+ glob: "**/*.agent.md"
17337
17426
  }
17338
- for (const [folderName, folderFiles] of folderMap) {
17339
- const maxMtimeMs = Math.max(...folderFiles.map((f) => f.mtimeMs));
17340
- const anyFile = folderFiles[0];
17341
- const rel = path15.relative(baseDir, anyFile.path).replace(/\\/g, "/");
17342
- const skillsIdx = rel.indexOf("skills/");
17343
- const skillRelPath = rel.slice(
17427
+ ];
17428
+ var CLAUDE_CODE_CUSTOM_LOCATION_SETTINGS = [
17429
+ {
17430
+ key: "autoMemoryDirectory",
17431
+ category: "memory",
17432
+ glob: "*/memory/*.md"
17433
+ }
17434
+ ];
17435
+ function parseJsonc(text) {
17436
+ const errors = [];
17437
+ const parsed = parseJsoncLib(text, errors, {
17438
+ allowTrailingComma: true,
17439
+ disallowComments: false
17440
+ });
17441
+ return parsed ?? null;
17442
+ }
17443
+ function extractCustomLocations(value) {
17444
+ if (Array.isArray(value)) {
17445
+ return value.filter(
17446
+ (v) => typeof v === "string" && v.length > 0
17447
+ );
17448
+ }
17449
+ if (value && typeof value === "object") {
17450
+ return Object.entries(value).filter(([, v]) => v === true).map(([k]) => k).filter((k) => k.length > 0);
17451
+ }
17452
+ return [];
17453
+ }
17454
+ var SENSITIVE_HOME_SUBDIRS = [
17455
+ ".ssh",
17456
+ ".aws",
17457
+ ".gnupg",
17458
+ ".config",
17459
+ ".kube",
17460
+ ".docker",
17461
+ ".gcloud",
17462
+ ".npmrc",
17463
+ ".netrc",
17464
+ ".git-credentials",
17465
+ ".m2",
17466
+ ".pypirc",
17467
+ ".pgpass",
17468
+ ".boto",
17469
+ ".password-store"
17470
+ ];
17471
+ function resolveCustomLocationPath(raw, workspaceRoot, home) {
17472
+ let s = raw.replace(/\$\{workspaceFolder\}/g, workspaceRoot);
17473
+ if (/\$\{[^}]+\}/.test(s)) {
17474
+ return null;
17475
+ }
17476
+ if (/^~[^/]/.test(s)) {
17477
+ return null;
17478
+ }
17479
+ if (s.startsWith("~/") || s === "~") {
17480
+ s = path14.join(home, s.slice(1));
17481
+ }
17482
+ if (!path14.isAbsolute(s)) {
17483
+ s = path14.resolve(workspaceRoot, s);
17484
+ }
17485
+ const resolved = path14.normalize(s);
17486
+ if (resolved === "/" || resolved === home) {
17487
+ return null;
17488
+ }
17489
+ for (const sub of SENSITIVE_HOME_SUBDIRS) {
17490
+ const sensitive = path14.join(home, sub);
17491
+ if (resolved === sensitive || resolved.startsWith(`${sensitive}${path14.sep}`)) {
17492
+ return null;
17493
+ }
17494
+ }
17495
+ const relWorkspace = path14.relative(workspaceRoot, resolved);
17496
+ const relHome = path14.relative(home, resolved);
17497
+ const escapesWorkspace = relWorkspace.startsWith("..") || path14.isAbsolute(relWorkspace);
17498
+ const escapesHome = relHome.startsWith("..") || path14.isAbsolute(relHome);
17499
+ if (escapesWorkspace && escapesHome) {
17500
+ return null;
17501
+ }
17502
+ return resolved;
17503
+ }
17504
+ var MAX_SETTINGS_CACHE_SIZE = 50;
17505
+ var settingsCache = /* @__PURE__ */ new Map();
17506
+ async function readJsoncSettings(settingsPath) {
17507
+ try {
17508
+ const lst = await lstat(settingsPath);
17509
+ if (lst.isSymbolicLink()) {
17510
+ return null;
17511
+ }
17512
+ } catch {
17513
+ putSettingsCache(settingsPath, { mtimeMs: null, parsed: null });
17514
+ return null;
17515
+ }
17516
+ let mtimeMs = null;
17517
+ try {
17518
+ const st = await stat(settingsPath);
17519
+ if (!st.isFile()) {
17520
+ putSettingsCache(settingsPath, { mtimeMs: null, parsed: null });
17521
+ return null;
17522
+ }
17523
+ mtimeMs = st.mtimeMs;
17524
+ } catch (err) {
17525
+ if (err.code === "ENOENT") {
17526
+ putSettingsCache(settingsPath, { mtimeMs: null, parsed: null });
17527
+ }
17528
+ return null;
17529
+ }
17530
+ const cached = settingsCache.get(settingsPath);
17531
+ if (cached && cached.mtimeMs === mtimeMs) {
17532
+ return cached.parsed;
17533
+ }
17534
+ let text;
17535
+ try {
17536
+ text = await readFile(settingsPath, "utf-8");
17537
+ } catch {
17538
+ return null;
17539
+ }
17540
+ const parsed = parseJsonc(text);
17541
+ if (!parsed || typeof parsed !== "object" || Array.isArray(parsed)) {
17542
+ putSettingsCache(settingsPath, { mtimeMs, parsed: null });
17543
+ return null;
17544
+ }
17545
+ const payload = parsed;
17546
+ putSettingsCache(settingsPath, { mtimeMs, parsed: payload });
17547
+ return payload;
17548
+ }
17549
+ function putSettingsCache(path35, entry) {
17550
+ if (!settingsCache.has(path35) && settingsCache.size >= MAX_SETTINGS_CACHE_SIZE) {
17551
+ settingsCache.delete(settingsCache.keys().next().value);
17552
+ }
17553
+ settingsCache.set(path35, entry);
17554
+ }
17555
+ async function readCopilotCustomLocations(workspaceRoot) {
17556
+ const parsed = await readJsoncSettings(
17557
+ path14.join(workspaceRoot, ".vscode", "settings.json")
17558
+ );
17559
+ if (!parsed) {
17560
+ return [];
17561
+ }
17562
+ const home = homedir();
17563
+ const dynamic = [];
17564
+ const seen = /* @__PURE__ */ new Set();
17565
+ for (const setting of COPILOT_CUSTOM_LOCATION_SETTINGS) {
17566
+ for (const loc of extractCustomLocations(parsed[setting.key])) {
17567
+ const abs = resolveCustomLocationPath(loc, workspaceRoot, home);
17568
+ if (!abs) {
17569
+ continue;
17570
+ }
17571
+ if (setting.kind === "skill-bundle") {
17572
+ const dedupKey = `skill-bundle:${abs}`;
17573
+ if (seen.has(dedupKey)) {
17574
+ continue;
17575
+ }
17576
+ seen.add(dedupKey);
17577
+ dynamic.push({
17578
+ kind: "skill-bundle",
17579
+ skillsRoot: ".",
17580
+ root: "absolute",
17581
+ absoluteBase: abs
17582
+ });
17583
+ } else {
17584
+ const dedupKey = `${setting.category}:${abs}`;
17585
+ if (seen.has(dedupKey)) {
17586
+ continue;
17587
+ }
17588
+ seen.add(dedupKey);
17589
+ dynamic.push({
17590
+ kind: "glob",
17591
+ glob: setting.glob,
17592
+ category: setting.category,
17593
+ root: "absolute",
17594
+ absoluteBase: abs
17595
+ });
17596
+ }
17597
+ }
17598
+ }
17599
+ return dynamic;
17600
+ }
17601
+ async function readClaudeCodeCustomLocations() {
17602
+ const home = homedir();
17603
+ const parsed = await readJsoncSettings(
17604
+ path14.join(home, ".claude", "settings.json")
17605
+ );
17606
+ if (!parsed) {
17607
+ return [];
17608
+ }
17609
+ const dynamic = [];
17610
+ for (const { key, category, glob } of CLAUDE_CODE_CUSTOM_LOCATION_SETTINGS) {
17611
+ const raw = parsed[key];
17612
+ if (typeof raw !== "string" || raw.length === 0) {
17613
+ continue;
17614
+ }
17615
+ if (/\$\{workspaceFolder\}/.test(raw)) {
17616
+ continue;
17617
+ }
17618
+ const abs = resolveCustomLocationPath(raw, home, home);
17619
+ if (!abs) {
17620
+ continue;
17621
+ }
17622
+ dynamic.push({
17623
+ kind: "glob",
17624
+ glob,
17625
+ category,
17626
+ root: "absolute",
17627
+ absoluteBase: abs
17628
+ });
17629
+ }
17630
+ return dynamic;
17631
+ }
17632
+ async function readCustomLocations(workspaceRoot, platform2) {
17633
+ if (platform2 === "copilot") {
17634
+ return readCopilotCustomLocations(workspaceRoot);
17635
+ }
17636
+ if (platform2 === "claude-code") {
17637
+ return readClaudeCodeCustomLocations();
17638
+ }
17639
+ return [];
17640
+ }
17641
+ function groupSkills(files, root, baseDir) {
17642
+ const skillFiles = files.filter((f) => f.category === SKILL_CATEGORY);
17643
+ const folderMap = /* @__PURE__ */ new Map();
17644
+ const standalone = [];
17645
+ for (const f of skillFiles) {
17646
+ const rel = path14.relative(baseDir, f.path).replace(/\\/g, "/");
17647
+ const skillsMarker = "skills/";
17648
+ const skillsIdx = rel.indexOf(skillsMarker);
17649
+ if (skillsIdx === -1) {
17650
+ standalone.push(f);
17651
+ continue;
17652
+ }
17653
+ const relFromSkills = rel.slice(skillsIdx + skillsMarker.length);
17654
+ const slashIdx = relFromSkills.indexOf("/");
17655
+ if (slashIdx === -1) {
17656
+ standalone.push(f);
17657
+ } else {
17658
+ const folderName = relFromSkills.slice(0, slashIdx);
17659
+ if (!folderMap.has(folderName)) {
17660
+ folderMap.set(folderName, []);
17661
+ }
17662
+ folderMap.get(folderName).push(f);
17663
+ }
17664
+ }
17665
+ const groups = [];
17666
+ for (const f of standalone) {
17667
+ const name = path14.basename(f.path, path14.extname(f.path));
17668
+ const sessionKey = `skill:${root}:${name}`;
17669
+ groups.push({
17670
+ name,
17671
+ root,
17672
+ skillPath: f.path,
17673
+ files: [f],
17674
+ isFolder: false,
17675
+ maxMtimeMs: f.mtimeMs,
17676
+ sessionKey
17677
+ });
17678
+ }
17679
+ for (const [folderName, folderFiles] of folderMap) {
17680
+ const maxMtimeMs = Math.max(...folderFiles.map((f) => f.mtimeMs));
17681
+ const anyFile = folderFiles[0];
17682
+ const rel = path14.relative(baseDir, anyFile.path).replace(/\\/g, "/");
17683
+ const skillsIdx = rel.indexOf("skills/");
17684
+ const skillRelPath = rel.slice(
17344
17685
  0,
17345
17686
  skillsIdx + "skills/".length + folderName.length
17346
17687
  );
17347
- const skillPath = path15.join(baseDir, skillRelPath);
17688
+ const skillPath = path14.join(baseDir, skillRelPath);
17348
17689
  const sessionKey = `skill:${root}:${folderName}`;
17349
17690
  groups.push({
17350
17691
  name: folderName,
@@ -17359,8 +17700,10 @@ function groupSkills(files, root, baseDir) {
17359
17700
  return groups;
17360
17701
  }
17361
17702
  async function scanContextFiles(workspaceRoot, platform2, sessionId) {
17362
- const entries = SCAN_PATHS[platform2];
17363
- if (!entries || entries.length === 0) {
17703
+ const staticEntries = SCAN_PATHS[platform2] ?? [];
17704
+ const dynamicEntries = await readCustomLocations(workspaceRoot, platform2);
17705
+ const entries = [...staticEntries, ...dynamicEntries];
17706
+ if (entries.length === 0) {
17364
17707
  return { regularFiles: [], skillGroups: [] };
17365
17708
  }
17366
17709
  const now = Date.now();
@@ -17372,17 +17715,14 @@ async function scanContextFiles(workspaceRoot, platform2, sessionId) {
17372
17715
  const home = homedir();
17373
17716
  const sessionEntry = sessionId ? sessionMtimes.get(sessionId) : void 0;
17374
17717
  const allFiles = [];
17375
- const skillFilesByRoot = /* @__PURE__ */ new Map();
17718
+ const skillBatches = /* @__PURE__ */ new Map();
17376
17719
  const seenPaths = /* @__PURE__ */ new Set();
17377
17720
  for (const entry of entries) {
17378
- const baseDir = entry.root === "home" ? home : workspaceRoot;
17379
- const matchedFiles = await globby2(entry.glob, {
17380
- cwd: baseDir,
17381
- absolute: true,
17382
- onlyFiles: true,
17383
- dot: true
17384
- });
17385
- for (const filePath of matchedFiles) {
17721
+ const baseDir = resolveBaseDir(entry, workspaceRoot, home);
17722
+ const scope = scopeForRoot(entry.root);
17723
+ const isDynamic = entry.root === "absolute";
17724
+ const matches = entry.kind === "skill-bundle" ? await enumerateSkillBundle(baseDir, entry.skillsRoot) : await enumerateGlob(entry.glob, baseDir, entry.category, isDynamic);
17725
+ for (const { path: filePath, category } of matches) {
17386
17726
  if (seenPaths.has(filePath)) {
17387
17727
  continue;
17388
17728
  }
@@ -17400,16 +17740,21 @@ async function scanContextFiles(workspaceRoot, platform2, sessionId) {
17400
17740
  path: filePath,
17401
17741
  content,
17402
17742
  sizeBytes,
17403
- category: entry.category,
17743
+ category,
17404
17744
  mtimeMs: fileStat.mtimeMs
17405
17745
  };
17406
- if (entry.category === SKILL_CATEGORY) {
17407
- let rootFiles = skillFilesByRoot.get(entry.root);
17408
- if (!rootFiles) {
17409
- rootFiles = [];
17410
- skillFilesByRoot.set(entry.root, rootFiles);
17746
+ if (scope) {
17747
+ fileEntry.scope = scope;
17748
+ }
17749
+ if (category === SKILL_CATEGORY) {
17750
+ const effectiveRoot = entry.root === "home" ? "home" : "workspace";
17751
+ const batchKey = `${effectiveRoot}:${baseDir}`;
17752
+ let batch = skillBatches.get(batchKey);
17753
+ if (!batch) {
17754
+ batch = { root: effectiveRoot, baseDir, files: [] };
17755
+ skillBatches.set(batchKey, batch);
17411
17756
  }
17412
- rootFiles.push(fileEntry);
17757
+ batch.files.push(fileEntry);
17413
17758
  } else {
17414
17759
  const prevMtime = sessionEntry?.files.get(filePath);
17415
17760
  if (prevMtime !== void 0 && fileStat.mtimeMs <= prevMtime) {
@@ -17417,13 +17762,12 @@ async function scanContextFiles(workspaceRoot, platform2, sessionId) {
17417
17762
  }
17418
17763
  allFiles.push(fileEntry);
17419
17764
  }
17420
- } catch (_err) {
17765
+ } catch {
17421
17766
  }
17422
17767
  }
17423
17768
  }
17424
17769
  const allSkillGroups = [];
17425
- for (const [root, files] of skillFilesByRoot) {
17426
- const baseDir = root === "home" ? home : workspaceRoot;
17770
+ for (const { root, baseDir, files } of skillBatches.values()) {
17427
17771
  const groups = groupSkills(files, root, baseDir);
17428
17772
  for (const group of groups) {
17429
17773
  if (sessionEntry) {
@@ -17437,13 +17781,715 @@ async function scanContextFiles(workspaceRoot, platform2, sessionId) {
17437
17781
  }
17438
17782
  return { regularFiles: allFiles, skillGroups: allSkillGroups };
17439
17783
  }
17784
+ async function enumerateGlob(pattern, cwd, category, isDynamic) {
17785
+ let files;
17786
+ try {
17787
+ files = await globby2(pattern, {
17788
+ cwd,
17789
+ absolute: true,
17790
+ onlyFiles: true,
17791
+ dot: true,
17792
+ // Never follow symlinks — a malicious committed repo can place a
17793
+ // symlink at a scanned path (e.g. .github/copilot-instructions.md →
17794
+ // ~/.aws/credentials) and the scanner would read and upload the target.
17795
+ followSymbolicLinks: false,
17796
+ // Dynamic absolute bases additionally cap traversal depth so a
17797
+ // misconfigured setting can't enumerate the whole filesystem.
17798
+ ...isDynamic ? { deep: DYNAMIC_SCAN_MAX_DEPTH } : {}
17799
+ });
17800
+ } catch {
17801
+ return [];
17802
+ }
17803
+ return files.map((path35) => ({ path: path35, category }));
17804
+ }
17805
+ async function enumerateSkillBundle(baseDir, skillsRoot) {
17806
+ const skillsDir = path14.resolve(baseDir, skillsRoot);
17807
+ let manifests;
17808
+ try {
17809
+ manifests = await globby2("*/SKILL.md", {
17810
+ cwd: skillsDir,
17811
+ absolute: true,
17812
+ onlyFiles: true,
17813
+ dot: true,
17814
+ followSymbolicLinks: false,
17815
+ deep: SKILL_MANIFEST_SCAN_DEPTH
17816
+ });
17817
+ } catch {
17818
+ return [];
17819
+ }
17820
+ const perSkillResults = await Promise.all(
17821
+ manifests.map(async (manifest) => {
17822
+ const skillDir = path14.dirname(manifest);
17823
+ try {
17824
+ return await globby2("**/*", {
17825
+ cwd: skillDir,
17826
+ absolute: true,
17827
+ onlyFiles: true,
17828
+ dot: true,
17829
+ followSymbolicLinks: false,
17830
+ deep: SKILL_BUNDLE_MAX_DEPTH
17831
+ });
17832
+ } catch {
17833
+ return [];
17834
+ }
17835
+ })
17836
+ );
17837
+ const standaloneFiles = await enumerateStandaloneSkills(skillsDir);
17838
+ const symlinkResults = await enumerateSymlinkedSkills(skillsDir);
17839
+ return [
17840
+ ...perSkillResults.flat().map((p) => ({ path: p, category: "skill" })),
17841
+ ...standaloneFiles.map((p) => ({ path: p, category: "skill" })),
17842
+ ...symlinkResults
17843
+ ];
17844
+ }
17845
+ async function enumerateStandaloneSkills(skillsDir) {
17846
+ try {
17847
+ return await globby2("*.md", {
17848
+ cwd: skillsDir,
17849
+ absolute: true,
17850
+ onlyFiles: true,
17851
+ dot: false,
17852
+ followSymbolicLinks: false
17853
+ });
17854
+ } catch {
17855
+ return [];
17856
+ }
17857
+ }
17858
+ async function enumerateSymlinkedSkills(skillsDir) {
17859
+ let dirEntries;
17860
+ try {
17861
+ dirEntries = await readdir(skillsDir, {
17862
+ withFileTypes: true,
17863
+ encoding: "utf8"
17864
+ });
17865
+ } catch {
17866
+ return [];
17867
+ }
17868
+ const results = [];
17869
+ for (const entry of dirEntries) {
17870
+ if (!entry.isSymbolicLink()) continue;
17871
+ const entryPath = path14.join(skillsDir, entry.name);
17872
+ try {
17873
+ const st = await stat(entryPath);
17874
+ if (st.isDirectory()) {
17875
+ const hasManifest = await stat(path14.join(entryPath, "SKILL.md")).then(() => true).catch(() => false);
17876
+ if (!hasManifest) continue;
17877
+ const realDir = await realpath(entryPath);
17878
+ const realFiles = await globby2("**/*", {
17879
+ cwd: realDir,
17880
+ absolute: true,
17881
+ onlyFiles: true,
17882
+ dot: true,
17883
+ followSymbolicLinks: false,
17884
+ deep: SKILL_BUNDLE_MAX_DEPTH
17885
+ }).catch(() => []);
17886
+ for (const f of realFiles) {
17887
+ results.push({
17888
+ path: path14.join(entryPath, path14.relative(realDir, f)),
17889
+ category: "skill"
17890
+ });
17891
+ }
17892
+ } else if (st.isFile() && entry.name.endsWith(".md")) {
17893
+ results.push({ path: entryPath, category: "skill" });
17894
+ }
17895
+ } catch {
17896
+ }
17897
+ }
17898
+ return results;
17899
+ }
17900
+ var DYNAMIC_SCAN_MAX_DEPTH = 6;
17901
+ var SKILL_MANIFEST_SCAN_DEPTH = 2;
17902
+ var SKILL_BUNDLE_MAX_DEPTH = 5;
17903
+ function resolveBaseDir(entry, workspaceRoot, home) {
17904
+ switch (entry.root) {
17905
+ case "home":
17906
+ return home;
17907
+ case "absolute":
17908
+ return entry.absoluteBase;
17909
+ default:
17910
+ return workspaceRoot;
17911
+ }
17912
+ }
17913
+ function scopeForRoot(root) {
17914
+ if (root === "home" || root === "absolute") {
17915
+ return "user-global";
17916
+ }
17917
+ return void 0;
17918
+ }
17440
17919
  function deriveIdentifier(filePath, baseDir) {
17441
- const relative2 = path15.relative(baseDir, filePath);
17920
+ const relative2 = path14.relative(baseDir, filePath);
17442
17921
  if (!relative2.startsWith("..")) {
17443
17922
  return relative2.replace(/\\/g, "/");
17444
17923
  }
17445
- return path15.basename(filePath);
17924
+ return path14.basename(filePath);
17925
+ }
17926
+
17927
+ // src/features/analysis/skill_quarantine/enumerateInstalledSkills.ts
17928
+ async function enumerateInstalledSkills(workspaceRoot) {
17929
+ const { skillGroups, regularFiles } = await scanContextFiles(
17930
+ workspaceRoot,
17931
+ "claude-code",
17932
+ void 0
17933
+ );
17934
+ const home = homedir2();
17935
+ const agentGroups = regularFiles.filter((f) => f.category === "agent-config").map((f) => ({
17936
+ name: path15.basename(f.path, path15.extname(f.path)),
17937
+ root: f.path.startsWith(home + path15.sep) ? "home" : "workspace",
17938
+ skillPath: f.path,
17939
+ files: [f],
17940
+ isFolder: false,
17941
+ maxMtimeMs: f.mtimeMs,
17942
+ sessionKey: `agent-config:${f.path}`
17943
+ }));
17944
+ const allGroups = [...skillGroups, ...agentGroups];
17945
+ if (allGroups.length === 0) {
17946
+ return [];
17947
+ }
17948
+ const { skills } = await processContextFiles([], allGroups);
17949
+ return skills.map((s) => {
17950
+ const parts = s.group.skillPath.split(/[\\/]/);
17951
+ const origName = parts[parts.length - 1] || s.group.name;
17952
+ return {
17953
+ skillPath: s.group.skillPath,
17954
+ md5: s.md5,
17955
+ origName,
17956
+ isFolder: s.group.isFolder
17957
+ };
17958
+ });
17959
+ }
17960
+
17961
+ // src/features/analysis/skill_quarantine/metrics.ts
17962
+ var Metric = {
17963
+ /** A heartbeat triggered the quarantine check (after debounce). */
17964
+ CHECK_TRIGGERED: "skill_quarantine.check_triggered",
17965
+ /** The env-var kill switch skipped the run. */
17966
+ CHECK_DISABLED_ENV: "skill_quarantine.check_disabled_env",
17967
+ /** Verdict-query call failed. Fail-open. */
17968
+ QUERY_ERROR: "skill_quarantine.query_error",
17969
+ /** Count of skills enumerated in this run (histogram-ish). */
17970
+ SKILLS_CHECKED: "skill_quarantine.skills_checked",
17971
+ /** A skill was freshly quarantined. Tagged with shape. */
17972
+ QUARANTINED: "skill_quarantine.quarantined",
17973
+ /** Presence check hit; skill already quarantined. */
17974
+ ALREADY_QUARANTINED: "skill_quarantine.already_quarantined",
17975
+ /** Move step failed. Tagged with phase (stage | publish). */
17976
+ MOVE_ERROR: "skill_quarantine.move_error",
17977
+ /** Stub creation failed after the move succeeded. */
17978
+ STUB_ERROR: "skill_quarantine.stub_error",
17979
+ /** A stale staging dir was swept. */
17980
+ ORPHAN_SWEPT: "skill_quarantine.orphan_swept",
17981
+ /** Total run duration including I/O. */
17982
+ DURATION_MS: "skill_quarantine.duration_ms"
17983
+ };
17984
+
17985
+ // src/features/analysis/skill_quarantine/quarantineSkill.ts
17986
+ import { randomUUID } from "crypto";
17987
+ import { existsSync as existsSync2 } from "fs";
17988
+ import {
17989
+ lstat as lstat2,
17990
+ mkdir,
17991
+ readdir as readdir2,
17992
+ readFile as readFile2,
17993
+ rename,
17994
+ rm,
17995
+ stat as stat2,
17996
+ unlink,
17997
+ writeFile
17998
+ } from "fs/promises";
17999
+ import path17 from "path";
18000
+ import { move } from "fs-extra";
18001
+
18002
+ // src/features/analysis/skill_quarantine/paths.ts
18003
+ import { homedir as homedir3 } from "os";
18004
+ import path16 from "path";
18005
+ function getQuarantineRoot() {
18006
+ return path16.join(homedir3(), ".tracy", "quarantine", "claude", "skills");
18007
+ }
18008
+ function getQuarantinedHashDir(md5) {
18009
+ return path16.join(getQuarantineRoot(), md5);
18010
+ }
18011
+ function getQuarantinedTargetPath(md5, origName) {
18012
+ return path16.join(getQuarantinedHashDir(md5), origName);
18013
+ }
18014
+ function getStagingDir(md5, pid, uuid) {
18015
+ return path16.join(getQuarantineRoot(), `${md5}_tmp_${pid}_${uuid}`);
18016
+ }
18017
+ var STAGING_DIR_REGEX = /^([0-9a-f]{32})_tmp_/;
18018
+
18019
+ // src/features/analysis/skill_quarantine/stubTemplate.ts
18020
+ var LEGACY_SUMMARY_FALLBACK = "not available (scan predates current schema)";
18021
+ function renderStub(params) {
18022
+ const folderOrFile = params.isFolder ? "skill folder" : "skill file";
18023
+ const reason = params.summary ?? LEGACY_SUMMARY_FALLBACK;
18024
+ return `# \u26D4 QUARANTINED BY TRACY
18025
+
18026
+ This skill was flagged **MALICIOUS** by the Mobb security scanner and has been
18027
+ moved out of your skills folder. **Claude Code will not execute it** while this
18028
+ stub is in place.
18029
+
18030
+ ## Why this skill was flagged
18031
+
18032
+ - **Reason:** ${reason}
18033
+ - **Scanner:** ${params.scannerName} @ ${params.scannerVersion}
18034
+ - **Scanned at:** ${params.scannedAt}
18035
+ - **Content hash (MD5):** \`${params.md5}\`
18036
+
18037
+ ## Where the original is now
18038
+
18039
+ The original ${folderOrFile} has been moved to:
18040
+
18041
+ ${params.quarantinedPath}
18042
+
18043
+ Nothing has been deleted. The contents are intact; only the location changed.
18044
+
18045
+ ## If this is a false positive \u2014 how to recover
18046
+
18047
+ If you're confident this skill is safe and want to restore it:
18048
+
18049
+ mv ${params.quarantinedPath} ${params.origPath}
18050
+
18051
+ Tracy will not re-quarantine it as long as the directory
18052
+ \`~/.tracy/quarantine/claude/skills/${params.md5}/\` still exists on your
18053
+ machine (even if it's empty after you moved the contents out). If you delete
18054
+ that directory entirely, the next heartbeat will re-evaluate the skill from
18055
+ scratch.
18056
+
18057
+ ## How to report a false positive
18058
+
18059
+ Please email **security@mobb.ai** with:
18060
+
18061
+ - The MD5 above
18062
+ - A short description of why you believe the flag was wrong
18063
+ - (Optional) the skill folder contents
18064
+
18065
+ Your report helps tune the scanner for everyone.
18066
+ `;
18067
+ }
18068
+
18069
+ // src/features/analysis/skill_quarantine/quarantineSkill.ts
18070
+ async function quarantineSkill(params) {
18071
+ const { skillPath, isFolder, md5, origName, verdict, log: log2 } = params;
18072
+ const hashDir = getQuarantinedHashDir(md5);
18073
+ if (existsSync2(hashDir)) {
18074
+ log2.debug(
18075
+ { md5, metric: Metric.ALREADY_QUARANTINED },
18076
+ "skill_quarantine: already quarantined, skipping"
18077
+ );
18078
+ return { status: "already_quarantined" };
18079
+ }
18080
+ let isSymlink = false;
18081
+ try {
18082
+ const lst = await lstat2(skillPath);
18083
+ isSymlink = lst.isSymbolicLink();
18084
+ } catch {
18085
+ }
18086
+ if (isSymlink) {
18087
+ const stubContent2 = renderStub({
18088
+ md5,
18089
+ isFolder,
18090
+ // Symlinks have no quarantine archive; note that in the stub.
18091
+ quarantinedPath: `(symlink at ${skillPath} replaced \u2014 original content was not moved)`,
18092
+ origPath: skillPath,
18093
+ summary: verdict.summary,
18094
+ scannerName: verdict.scannerName,
18095
+ scannerVersion: verdict.scannerVersion,
18096
+ scannedAt: verdict.scannedAt
18097
+ });
18098
+ try {
18099
+ await mkdir(hashDir, { recursive: true });
18100
+ if (isFolder) {
18101
+ const tmpDir = `${skillPath}.__tracy_tmp__`;
18102
+ await mkdir(tmpDir, { recursive: true });
18103
+ await writeFile(path17.join(tmpDir, "SKILL.md"), stubContent2, "utf8");
18104
+ await unlink(skillPath);
18105
+ await rename(tmpDir, skillPath);
18106
+ } else {
18107
+ const tmpFile = `${skillPath}.__tracy_tmp__`;
18108
+ await writeFile(tmpFile, stubContent2, "utf8");
18109
+ await unlink(skillPath);
18110
+ await rename(tmpFile, skillPath);
18111
+ }
18112
+ } catch (err) {
18113
+ log2.error(
18114
+ { err, md5, skillPath, metric: Metric.STUB_ERROR },
18115
+ "skill_quarantine: symlink stub write failed"
18116
+ );
18117
+ return { status: "stub_error", err };
18118
+ }
18119
+ await preRegisterStubMd5(skillPath, isFolder, log2);
18120
+ log2.info(
18121
+ {
18122
+ md5,
18123
+ verdict: verdict.verdict,
18124
+ shape: isFolder ? "folder" : "standalone",
18125
+ scanner: verdict.scannerName,
18126
+ scannerVersion: verdict.scannerVersion,
18127
+ metric: Metric.QUARANTINED
18128
+ },
18129
+ "skill_quarantine: quarantined (symlink)"
18130
+ );
18131
+ return { status: "quarantined" };
18132
+ }
18133
+ const stagingDir = getStagingDir(md5, process.pid, randomUUID());
18134
+ const stagingTarget = path17.join(stagingDir, origName);
18135
+ const finalTarget = getQuarantinedTargetPath(md5, origName);
18136
+ try {
18137
+ await mkdir(stagingDir, { recursive: true });
18138
+ } catch (err) {
18139
+ log2.error(
18140
+ { err, md5, metric: Metric.MOVE_ERROR, phase: "stage" },
18141
+ "skill_quarantine: failed to create staging dir"
18142
+ );
18143
+ return { status: "move_error", phase: "stage", err };
18144
+ }
18145
+ try {
18146
+ await move(skillPath, stagingTarget);
18147
+ } catch (err) {
18148
+ await tryRm(stagingDir);
18149
+ log2.error(
18150
+ { err, md5, metric: Metric.MOVE_ERROR, phase: "stage" },
18151
+ "skill_quarantine: phase-1 move failed"
18152
+ );
18153
+ return { status: "move_error", phase: "stage", err };
18154
+ }
18155
+ try {
18156
+ await rename(stagingDir, hashDir);
18157
+ } catch (err) {
18158
+ log2.error(
18159
+ {
18160
+ err,
18161
+ md5,
18162
+ stagingDir,
18163
+ metric: Metric.MOVE_ERROR,
18164
+ phase: "publish"
18165
+ },
18166
+ "skill_quarantine: phase-2 publish failed; staging dir preserved for manual recovery"
18167
+ );
18168
+ return { status: "move_error", phase: "publish", err };
18169
+ }
18170
+ const quarantinedPath = finalTarget;
18171
+ const stubContent = renderStub({
18172
+ md5,
18173
+ isFolder,
18174
+ quarantinedPath,
18175
+ origPath: skillPath,
18176
+ summary: verdict.summary,
18177
+ scannerName: verdict.scannerName,
18178
+ scannerVersion: verdict.scannerVersion,
18179
+ scannedAt: verdict.scannedAt
18180
+ });
18181
+ try {
18182
+ if (isFolder) {
18183
+ await mkdir(skillPath, { recursive: true });
18184
+ await writeFile(path17.join(skillPath, "SKILL.md"), stubContent, "utf8");
18185
+ } else {
18186
+ await writeFile(skillPath, stubContent, "utf8");
18187
+ }
18188
+ } catch (err) {
18189
+ log2.error(
18190
+ { err, md5, skillPath, metric: Metric.STUB_ERROR },
18191
+ "skill_quarantine: stub write failed; quarantine is still in place"
18192
+ );
18193
+ return { status: "stub_error", err };
18194
+ }
18195
+ await preRegisterStubMd5(skillPath, isFolder, log2);
18196
+ log2.info(
18197
+ {
18198
+ md5,
18199
+ verdict: verdict.verdict,
18200
+ shape: isFolder ? "folder" : "standalone",
18201
+ scanner: verdict.scannerName,
18202
+ scannerVersion: verdict.scannerVersion,
18203
+ metric: Metric.QUARANTINED
18204
+ },
18205
+ "skill_quarantine: quarantined"
18206
+ );
18207
+ return { status: "quarantined" };
18208
+ }
18209
+ async function preRegisterStubMd5(skillPath, isFolder, log2) {
18210
+ try {
18211
+ const stubEntries = await gatherStubEntries(skillPath, isFolder);
18212
+ const stubGroup = {
18213
+ name: path17.basename(skillPath).replace(/\.md$/i, ""),
18214
+ root: "workspace",
18215
+ skillPath,
18216
+ files: stubEntries,
18217
+ isFolder,
18218
+ maxMtimeMs: Date.now(),
18219
+ sessionKey: `quarantine-stub:${skillPath}`
18220
+ };
18221
+ const { skills } = await processContextFiles([], [stubGroup]);
18222
+ if (skills.length === 0) return;
18223
+ const stubMd5 = skills[0].md5;
18224
+ await mkdir(getQuarantinedHashDir(stubMd5), { recursive: true });
18225
+ } catch (err) {
18226
+ log2.warn(
18227
+ { err, skillPath },
18228
+ "skill_quarantine: failed to pre-register stub md5"
18229
+ );
18230
+ }
18231
+ }
18232
+ async function gatherStubEntries(skillPath, isFolder) {
18233
+ const now = Date.now();
18234
+ const target = isFolder ? path17.join(skillPath, "SKILL.md") : skillPath;
18235
+ const [st, content] = await Promise.all([
18236
+ stat2(target),
18237
+ readFile2(target, "utf8")
18238
+ ]);
18239
+ return [
18240
+ {
18241
+ name: isFolder ? "SKILL.md" : path17.basename(skillPath),
18242
+ path: target,
18243
+ content,
18244
+ sizeBytes: st.size,
18245
+ category: "skill",
18246
+ mtimeMs: now
18247
+ }
18248
+ ];
18249
+ }
18250
+ async function sweepOrphanStagingDirs(log2) {
18251
+ const root = getQuarantineRoot();
18252
+ let entries;
18253
+ try {
18254
+ entries = await readdir2(root);
18255
+ } catch (err) {
18256
+ if (err.code === "ENOENT") return 0;
18257
+ log2.warn({ err, root }, "skill_quarantine: orphan sweep readdir failed");
18258
+ return 0;
18259
+ }
18260
+ const now = Date.now();
18261
+ let swept = 0;
18262
+ for (const entry of entries) {
18263
+ if (!STAGING_DIR_REGEX.test(entry)) continue;
18264
+ const full = path17.join(root, entry);
18265
+ let mtimeMs;
18266
+ try {
18267
+ mtimeMs = (await stat2(full)).mtimeMs;
18268
+ } catch {
18269
+ continue;
18270
+ }
18271
+ if (now - mtimeMs < ORPHAN_SWEEP_GRACE_MS) continue;
18272
+ try {
18273
+ await rm(full, { recursive: true, force: true });
18274
+ swept += 1;
18275
+ log2.info(
18276
+ { path: full, metric: Metric.ORPHAN_SWEPT },
18277
+ "skill_quarantine: orphan swept"
18278
+ );
18279
+ } catch (err) {
18280
+ log2.warn({ err, path: full }, "skill_quarantine: orphan sweep rm failed");
18281
+ }
18282
+ }
18283
+ return swept;
18284
+ }
18285
+ async function tryRm(p) {
18286
+ try {
18287
+ await rm(p, { recursive: true, force: true });
18288
+ } catch {
18289
+ }
18290
+ }
18291
+
18292
+ // src/features/analysis/skill_quarantine/queryVerdicts.ts
18293
+ async function queryVerdicts(gqlClient, md5s, log2) {
18294
+ if (md5s.length === 0) {
18295
+ return /* @__PURE__ */ new Map();
18296
+ }
18297
+ try {
18298
+ const res = await gqlClient.skillVerdictsByMd5(md5s);
18299
+ const out = /* @__PURE__ */ new Map();
18300
+ for (const row of res.skillVerdictsByMd5) {
18301
+ out.set(row.md5, {
18302
+ md5: row.md5,
18303
+ verdict: row.verdict,
18304
+ summary: row.summary ?? null,
18305
+ scannerName: row.scannerName,
18306
+ scannerVersion: row.scannerVersion,
18307
+ scannedAt: row.scannedAt
18308
+ });
18309
+ }
18310
+ return out;
18311
+ } catch (err) {
18312
+ log2.warn(
18313
+ { err, md5_count: md5s.length, metric: "skill_quarantine.query_error" },
18314
+ "skill_quarantine: verdict query failed, failing open"
18315
+ );
18316
+ return /* @__PURE__ */ new Map();
18317
+ }
18318
+ }
18319
+
18320
+ // src/features/analysis/skill_quarantine/runQuarantineCheck.ts
18321
+ var lastRunAt = /* @__PURE__ */ new Map();
18322
+ var killSwitchLogged = false;
18323
+ async function runQuarantineCheckIfNeeded(opts) {
18324
+ const { sessionId, cwd, gqlClient, log: log2 } = opts;
18325
+ if (process.env[KILL_SWITCH_ENV] === "1") {
18326
+ if (!killSwitchLogged) {
18327
+ log2.warn(
18328
+ { metric: Metric.CHECK_DISABLED_ENV },
18329
+ `skill_quarantine: disabled by ${KILL_SWITCH_ENV}=1`
18330
+ );
18331
+ killSwitchLogged = true;
18332
+ }
18333
+ return;
18334
+ }
18335
+ const now = Date.now();
18336
+ const prev = lastRunAt.get(sessionId);
18337
+ if (prev !== void 0 && now - prev < HEARTBEAT_DEBOUNCE_MS) {
18338
+ return;
18339
+ }
18340
+ lastRunAt.set(sessionId, now);
18341
+ log2.info(
18342
+ { sessionId, metric: Metric.CHECK_TRIGGERED },
18343
+ "skill_quarantine: check start"
18344
+ );
18345
+ const t0 = Date.now();
18346
+ try {
18347
+ await sweepOrphanStagingDirs(log2);
18348
+ const installed = await enumerateInstalledSkills(cwd);
18349
+ log2.info(
18350
+ { sessionId, count: installed.length, metric: Metric.SKILLS_CHECKED },
18351
+ "skill_quarantine: skills enumerated"
18352
+ );
18353
+ if (installed.length === 0) {
18354
+ return;
18355
+ }
18356
+ const verdicts = await queryVerdicts(
18357
+ gqlClient,
18358
+ installed.map((s) => s.md5),
18359
+ log2
18360
+ );
18361
+ for (const skill of installed) {
18362
+ const verdict = verdicts.get(skill.md5);
18363
+ if (!verdict || verdict.verdict !== MALICIOUS_VERDICT) {
18364
+ continue;
18365
+ }
18366
+ try {
18367
+ await quarantineSkill({
18368
+ skillPath: skill.skillPath,
18369
+ isFolder: skill.isFolder,
18370
+ md5: skill.md5,
18371
+ origName: skill.origName,
18372
+ verdict,
18373
+ log: log2
18374
+ });
18375
+ } catch (err) {
18376
+ log2.error(
18377
+ { err, md5: skill.md5, skillPath: skill.skillPath },
18378
+ "skill_quarantine: unexpected error during quarantine"
18379
+ );
18380
+ }
18381
+ }
18382
+ } finally {
18383
+ log2.info(
18384
+ {
18385
+ sessionId,
18386
+ duration_ms: Date.now() - t0,
18387
+ metric: Metric.DURATION_MS
18388
+ },
18389
+ "skill_quarantine: check done"
18390
+ );
18391
+ }
18392
+ }
18393
+
18394
+ // src/features/claude_code/daemon_pid_file.ts
18395
+ import fs13 from "fs";
18396
+ import os4 from "os";
18397
+ import path18 from "path";
18398
+
18399
+ // src/features/claude_code/data_collector_constants.ts
18400
+ var CC_VERSION_CACHE_KEY = "claudeCode.detectedCCVersion";
18401
+ var CC_VERSION_CLI_KEY = "claudeCode.detectedCCVersionCli";
18402
+ var GQL_AUTH_TIMEOUT_MS = 15e3;
18403
+ var STALE_KEY_MAX_AGE_MS = 14 * 24 * 60 * 60 * 1e3;
18404
+ var CLEANUP_INTERVAL_MS = 24 * 60 * 60 * 1e3;
18405
+ var DAEMON_TTL_MS = 30 * 60 * 1e3;
18406
+ var DAEMON_POLL_INTERVAL_MS = (() => {
18407
+ const raw = Number(process.env["MOBB_DAEMON_POLL_INTERVAL_MS"]);
18408
+ if (!Number.isFinite(raw) || raw <= 0) return 1e4;
18409
+ return Math.min(Math.max(raw, 100), 6e4);
18410
+ })();
18411
+ var HEARTBEAT_STALE_MS = 3e4;
18412
+ var TRANSCRIPT_MAX_AGE_MS = 24 * 60 * 60 * 1e3;
18413
+ var DAEMON_CHUNK_SIZE = 50;
18414
+ var CONTEXT_SCAN_INTERVAL_MS = 5e3;
18415
+
18416
+ // src/features/claude_code/daemon_pid_file.ts
18417
+ function getMobbdevDir() {
18418
+ return path18.join(os4.homedir(), ".mobbdev");
18419
+ }
18420
+ function getDaemonCheckScriptPath() {
18421
+ return path18.join(getMobbdevDir(), "daemon-check.js");
17446
18422
  }
18423
+ var DaemonPidFile = class {
18424
+ constructor() {
18425
+ __publicField(this, "data", null);
18426
+ }
18427
+ get filePath() {
18428
+ return path18.join(getMobbdevDir(), "daemon.pid");
18429
+ }
18430
+ /** Ensure ~/.mobbdev/ directory exists. */
18431
+ ensureDir() {
18432
+ fs13.mkdirSync(getMobbdevDir(), { recursive: true });
18433
+ }
18434
+ /** Read the PID file from disk. Returns the parsed data or null. */
18435
+ read() {
18436
+ try {
18437
+ const raw = fs13.readFileSync(this.filePath, "utf8");
18438
+ const parsed = JSON.parse(raw);
18439
+ if (typeof parsed.pid !== "number" || typeof parsed.startedAt !== "number" || typeof parsed.heartbeat !== "number") {
18440
+ this.data = null;
18441
+ } else {
18442
+ this.data = parsed;
18443
+ }
18444
+ } catch {
18445
+ this.data = null;
18446
+ }
18447
+ return this.data;
18448
+ }
18449
+ /** Write a new PID file for the given process id. */
18450
+ write(pid, version) {
18451
+ this.data = {
18452
+ pid,
18453
+ startedAt: Date.now(),
18454
+ heartbeat: Date.now(),
18455
+ version
18456
+ };
18457
+ fs13.writeFileSync(this.filePath, JSON.stringify(this.data), "utf8");
18458
+ }
18459
+ /** Update the heartbeat timestamp of the current PID file. */
18460
+ updateHeartbeat() {
18461
+ if (!this.data) this.read();
18462
+ if (!this.data) return;
18463
+ this.data.heartbeat = Date.now();
18464
+ fs13.writeFileSync(this.filePath, JSON.stringify(this.data), "utf8");
18465
+ }
18466
+ /** Check whether the previously read PID data represents a live daemon. */
18467
+ isAlive() {
18468
+ if (!this.data) return false;
18469
+ if (Date.now() - this.data.heartbeat > HEARTBEAT_STALE_MS) return false;
18470
+ try {
18471
+ process.kill(this.data.pid, 0);
18472
+ return true;
18473
+ } catch {
18474
+ return false;
18475
+ }
18476
+ }
18477
+ /** Remove the PID file from disk (best-effort). */
18478
+ remove() {
18479
+ this.data = null;
18480
+ try {
18481
+ fs13.unlinkSync(this.filePath);
18482
+ } catch {
18483
+ }
18484
+ }
18485
+ };
18486
+
18487
+ // src/features/claude_code/data_collector.ts
18488
+ import { execFile } from "child_process";
18489
+ import { createHash as createHash3 } from "crypto";
18490
+ import { access, open as open4, readdir as readdir3, readFile as readFile3, unlink as unlink2 } from "fs/promises";
18491
+ import path19 from "path";
18492
+ import { promisify } from "util";
17447
18493
 
17448
18494
  // src/features/analysis/context_file_uploader.ts
17449
18495
  import pLimit7 from "p-limit";
@@ -17707,8 +18753,8 @@ function createConfigstoreStream(store, opts) {
17707
18753
  heartbeatBuffer.length = 0;
17708
18754
  }
17709
18755
  }
17710
- function setScopePath(path32) {
17711
- scopePath = path32;
18756
+ function setScopePath(path35) {
18757
+ scopePath = path35;
17712
18758
  }
17713
18759
  return { writable, flush, setScopePath };
17714
18760
  }
@@ -17932,7 +18978,7 @@ function createLogger(config2) {
17932
18978
 
17933
18979
  // src/features/claude_code/hook_logger.ts
17934
18980
  var DD_RUM_TOKEN = true ? "pubf59c0182545bfb4c299175119f1abf9b" : "";
17935
- var CLI_VERSION = true ? "1.3.7" : "unknown";
18981
+ var CLI_VERSION = true ? "1.4.1" : "unknown";
17936
18982
  var NAMESPACE = "mobbdev-claude-code-hook-logs";
17937
18983
  var claudeCodeVersion;
17938
18984
  function buildDdTags() {
@@ -18023,12 +19069,12 @@ async function resolveTranscriptPath(transcriptPath, sessionId) {
18023
19069
  return transcriptPath;
18024
19070
  } catch {
18025
19071
  }
18026
- const filename = path16.basename(transcriptPath);
18027
- const dirName = path16.basename(path16.dirname(transcriptPath));
18028
- const projectsDir = path16.dirname(path16.dirname(transcriptPath));
19072
+ const filename = path19.basename(transcriptPath);
19073
+ const dirName = path19.basename(path19.dirname(transcriptPath));
19074
+ const projectsDir = path19.dirname(path19.dirname(transcriptPath));
18029
19075
  const baseDirName = dirName.replace(/[-.]claude-worktrees-.+$/, "");
18030
19076
  if (baseDirName !== dirName) {
18031
- const candidate = path16.join(projectsDir, baseDirName, filename);
19077
+ const candidate = path19.join(projectsDir, baseDirName, filename);
18032
19078
  try {
18033
19079
  await access(candidate);
18034
19080
  hookLog.info(
@@ -18047,10 +19093,10 @@ async function resolveTranscriptPath(transcriptPath, sessionId) {
18047
19093
  }
18048
19094
  }
18049
19095
  try {
18050
- const dirs = await readdir(projectsDir);
19096
+ const dirs = await readdir3(projectsDir);
18051
19097
  for (const dir of dirs) {
18052
19098
  if (dir === dirName) continue;
18053
- const candidate = path16.join(projectsDir, dir, filename);
19099
+ const candidate = path19.join(projectsDir, dir, filename);
18054
19100
  try {
18055
19101
  await access(candidate);
18056
19102
  hookLog.info(
@@ -18081,9 +19127,9 @@ async function readNewTranscriptEntries(transcriptPath, sessionId, sessionStore,
18081
19127
  if (cursor?.byteOffset) {
18082
19128
  const fh = await open4(transcriptPath, "r");
18083
19129
  try {
18084
- const stat3 = await fh.stat();
18085
- fileSize = stat3.size;
18086
- if (cursor.byteOffset >= stat3.size) {
19130
+ const stat4 = await fh.stat();
19131
+ fileSize = stat4.size;
19132
+ if (cursor.byteOffset >= stat4.size) {
18087
19133
  hookLog.info({ data: { sessionId } }, "No new data in transcript file");
18088
19134
  return {
18089
19135
  entries: [],
@@ -18091,7 +19137,7 @@ async function readNewTranscriptEntries(transcriptPath, sessionId, sessionStore,
18091
19137
  resolvedTranscriptPath: transcriptPath
18092
19138
  };
18093
19139
  }
18094
- const buf = Buffer.alloc(stat3.size - cursor.byteOffset);
19140
+ const buf = Buffer.alloc(stat4.size - cursor.byteOffset);
18095
19141
  await fh.read(buf, 0, buf.length, cursor.byteOffset);
18096
19142
  content = buf.toString("utf-8");
18097
19143
  } finally {
@@ -18109,7 +19155,7 @@ async function readNewTranscriptEntries(transcriptPath, sessionId, sessionStore,
18109
19155
  "Read transcript file from offset"
18110
19156
  );
18111
19157
  } else {
18112
- content = await readFile2(transcriptPath, "utf-8");
19158
+ content = await readFile3(transcriptPath, "utf-8");
18113
19159
  fileSize = Buffer.byteLength(content, "utf-8");
18114
19160
  lineIndexOffset = 0;
18115
19161
  hookLog.debug(
@@ -18231,13 +19277,13 @@ async function cleanupStaleSessions(configDir) {
18231
19277
  const now = Date.now();
18232
19278
  const prefix = getSessionFilePrefix();
18233
19279
  try {
18234
- const files = await readdir(configDir);
19280
+ const files = await readdir3(configDir);
18235
19281
  let deletedCount = 0;
18236
19282
  for (const file of files) {
18237
19283
  if (!file.startsWith(prefix) || !file.endsWith(".json")) continue;
18238
- const filePath = path16.join(configDir, file);
19284
+ const filePath = path19.join(configDir, file);
18239
19285
  try {
18240
- const content = JSON.parse(await readFile2(filePath, "utf-8"));
19286
+ const content = JSON.parse(await readFile3(filePath, "utf-8"));
18241
19287
  let newest = 0;
18242
19288
  const cursors = content["cursor"];
18243
19289
  if (cursors && typeof cursors === "object") {
@@ -18247,7 +19293,7 @@ async function cleanupStaleSessions(configDir) {
18247
19293
  }
18248
19294
  }
18249
19295
  if (newest > 0 && now - newest > STALE_KEY_MAX_AGE_MS) {
18250
- await unlink(filePath);
19296
+ await unlink2(filePath);
18251
19297
  deletedCount++;
18252
19298
  }
18253
19299
  } catch {
@@ -18387,16 +19433,6 @@ async function processTranscript(input, sessionStore, log2, maxEntries = DAEMON_
18387
19433
  entriesSkipped: filteredOut,
18388
19434
  claudeCodeVersion: getClaudeCodeVersion()
18389
19435
  });
18390
- if (input.cwd) {
18391
- uploadContextFilesIfNeeded(
18392
- input.session_id,
18393
- input.cwd,
18394
- gqlClient,
18395
- log2
18396
- ).catch((err) => {
18397
- log2.error({ data: { err } }, "uploadContextFilesIfNeeded failed");
18398
- });
18399
- }
18400
19436
  return {
18401
19437
  entriesUploaded: entries.length,
18402
19438
  entriesSkipped: filteredOut,
@@ -18483,14 +19519,14 @@ async function uploadContextFilesIfNeeded(sessionId, cwd, gqlClient, log2) {
18483
19519
  import fs14 from "fs";
18484
19520
  import fsPromises4 from "fs/promises";
18485
19521
  import os6 from "os";
18486
- import path17 from "path";
19522
+ import path20 from "path";
18487
19523
  import chalk11 from "chalk";
18488
19524
 
18489
19525
  // src/features/claude_code/daemon-check-shim.tmpl.js
18490
19526
  var daemon_check_shim_tmpl_default = "// Mobb daemon shim \u2014 checks if daemon is alive, spawns if dead.\n// Auto-generated by mobbdev CLI. Do not edit.\nvar fs = require('fs')\nvar spawn = require('child_process').spawn\nvar path = require('path')\nvar os = require('os')\n\nvar pidFile = path.join(os.homedir(), '.mobbdev', 'daemon.pid')\nvar HEARTBEAT_STALE_MS = __HEARTBEAT_STALE_MS__\n\ntry {\n var data = JSON.parse(fs.readFileSync(pidFile, 'utf8'))\n if (Date.now() - data.heartbeat > HEARTBEAT_STALE_MS) throw new Error('stale')\n process.kill(data.pid, 0) // throws ESRCH if the process is gone\n} catch (e) {\n var localCli = process.env.MOBBDEV_LOCAL_CLI\n var child = localCli\n ? spawn('node', [localCli, 'claude-code-daemon'], { detached: true, stdio: 'ignore', windowsHide: true })\n : spawn('npx', ['--yes', 'mobbdev@latest', 'claude-code-daemon'], { detached: true, stdio: 'ignore', shell: true, windowsHide: true })\n child.unref()\n}\n";
18491
19527
 
18492
19528
  // src/features/claude_code/install_hook.ts
18493
- var CLAUDE_SETTINGS_PATH = path17.join(os6.homedir(), ".claude", "settings.json");
19529
+ var CLAUDE_SETTINGS_PATH = path20.join(os6.homedir(), ".claude", "settings.json");
18494
19530
  var RECOMMENDED_MATCHER = "*";
18495
19531
  async function claudeSettingsExists() {
18496
19532
  try {
@@ -18636,18 +19672,18 @@ async function installMobbHooks(options = {}) {
18636
19672
  }
18637
19673
 
18638
19674
  // src/features/claude_code/transcript_scanner.ts
18639
- import { open as open5, readdir as readdir2, stat as stat2 } from "fs/promises";
19675
+ import { open as open5, readdir as readdir4, stat as stat3 } from "fs/promises";
18640
19676
  import os7 from "os";
18641
- import path18 from "path";
19677
+ import path21 from "path";
18642
19678
  var UUID_RE = /^[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}$/i;
18643
19679
  function getClaudeProjectsDirs() {
18644
19680
  const dirs = [];
18645
19681
  const configDir = process.env["CLAUDE_CONFIG_DIR"];
18646
19682
  if (configDir) {
18647
- dirs.push(path18.join(configDir, "projects"));
19683
+ dirs.push(path21.join(configDir, "projects"));
18648
19684
  }
18649
- dirs.push(path18.join(os7.homedir(), ".config", "claude", "projects"));
18650
- dirs.push(path18.join(os7.homedir(), ".claude", "projects"));
19685
+ dirs.push(path21.join(os7.homedir(), ".config", "claude", "projects"));
19686
+ dirs.push(path21.join(os7.homedir(), ".claude", "projects"));
18651
19687
  return dirs;
18652
19688
  }
18653
19689
  async function collectJsonlFiles(files, dir, projectDir, seen, now, results) {
@@ -18655,12 +19691,12 @@ async function collectJsonlFiles(files, dir, projectDir, seen, now, results) {
18655
19691
  if (!file.endsWith(".jsonl")) continue;
18656
19692
  const sessionId = file.replace(".jsonl", "");
18657
19693
  if (!UUID_RE.test(sessionId)) continue;
18658
- const filePath = path18.join(dir, file);
19694
+ const filePath = path21.join(dir, file);
18659
19695
  if (seen.has(filePath)) continue;
18660
19696
  seen.add(filePath);
18661
19697
  let fileStat;
18662
19698
  try {
18663
- fileStat = await stat2(filePath);
19699
+ fileStat = await stat3(filePath);
18664
19700
  } catch {
18665
19701
  continue;
18666
19702
  }
@@ -18681,33 +19717,33 @@ async function scanForTranscripts(projectsDirs = getClaudeProjectsDirs()) {
18681
19717
  for (const projectsDir of projectsDirs) {
18682
19718
  let projectDirs;
18683
19719
  try {
18684
- projectDirs = await readdir2(projectsDir);
19720
+ projectDirs = await readdir4(projectsDir);
18685
19721
  } catch {
18686
19722
  continue;
18687
19723
  }
18688
19724
  for (const projName of projectDirs) {
18689
- const projPath = path18.join(projectsDir, projName);
19725
+ const projPath = path21.join(projectsDir, projName);
18690
19726
  let projStat;
18691
19727
  try {
18692
- projStat = await stat2(projPath);
19728
+ projStat = await stat3(projPath);
18693
19729
  } catch {
18694
19730
  continue;
18695
19731
  }
18696
19732
  if (!projStat.isDirectory()) continue;
18697
19733
  let files;
18698
19734
  try {
18699
- files = await readdir2(projPath);
19735
+ files = await readdir4(projPath);
18700
19736
  } catch {
18701
19737
  continue;
18702
19738
  }
18703
19739
  await collectJsonlFiles(files, projPath, projPath, seen, now, results);
18704
19740
  for (const entry of files) {
18705
19741
  if (!UUID_RE.test(entry)) continue;
18706
- const subagentsDir = path18.join(projPath, entry, "subagents");
19742
+ const subagentsDir = path21.join(projPath, entry, "subagents");
18707
19743
  try {
18708
- const s = await stat2(subagentsDir);
19744
+ const s = await stat3(subagentsDir);
18709
19745
  if (!s.isDirectory()) continue;
18710
- const subFiles = await readdir2(subagentsDir);
19746
+ const subFiles = await readdir4(subagentsDir);
18711
19747
  await collectJsonlFiles(
18712
19748
  subFiles,
18713
19749
  subagentsDir,
@@ -18791,6 +19827,8 @@ async function startDaemon() {
18791
19827
  const startedAt = Date.now();
18792
19828
  const lastSeen = /* @__PURE__ */ new Map();
18793
19829
  let cleanupConfigDir;
19830
+ const sessionCwdCache = /* @__PURE__ */ new Map();
19831
+ let lastContextScanMs = 0;
18794
19832
  while (true) {
18795
19833
  if (shuttingDown) {
18796
19834
  await gracefulExit(0, "signal");
@@ -18804,9 +19842,29 @@ async function startDaemon() {
18804
19842
  for (const transcript of changed) {
18805
19843
  const sessionStore = createSessionConfigStore(transcript.sessionId);
18806
19844
  if (!cleanupConfigDir) {
18807
- cleanupConfigDir = path19.dirname(sessionStore.path);
19845
+ cleanupConfigDir = path22.dirname(sessionStore.path);
19846
+ }
19847
+ await drainTranscript(
19848
+ transcript,
19849
+ sessionStore,
19850
+ gqlClient,
19851
+ sessionCwdCache
19852
+ );
19853
+ }
19854
+ if (lastSeen.size > 0) {
19855
+ for (const filePath of sessionCwdCache.keys()) {
19856
+ if (!lastSeen.has(filePath)) sessionCwdCache.delete(filePath);
19857
+ }
19858
+ }
19859
+ const now = Date.now();
19860
+ if (now - lastContextScanMs >= CONTEXT_SCAN_INTERVAL_MS) {
19861
+ lastContextScanMs = now;
19862
+ for (const { sessionId, cwd } of sessionCwdCache.values()) {
19863
+ const log2 = createScopedHookLog(cwd, { daemonMode: true });
19864
+ uploadContextFilesIfNeeded(sessionId, cwd, gqlClient, log2).catch(
19865
+ (err) => log2.warn({ err }, "Context file scan failed")
19866
+ );
18808
19867
  }
18809
- await drainTranscript(transcript, sessionStore, gqlClient);
18810
19868
  }
18811
19869
  if (cleanupConfigDir) {
18812
19870
  await cleanupStaleSessions(cleanupConfigDir);
@@ -18847,11 +19905,17 @@ async function authenticateOrExit(exit) {
18847
19905
  return exit(1, "auth failed");
18848
19906
  }
18849
19907
  }
18850
- async function drainTranscript(transcript, sessionStore, gqlClient) {
19908
+ async function drainTranscript(transcript, sessionStore, gqlClient, sessionCwdCache) {
18851
19909
  const cwd = await extractCwdFromTranscript(transcript.filePath);
18852
19910
  const log2 = createScopedHookLog(cwd ?? transcript.projectDir, {
18853
19911
  daemonMode: true
18854
19912
  });
19913
+ if (cwd) {
19914
+ sessionCwdCache.set(transcript.filePath, {
19915
+ sessionId: transcript.sessionId,
19916
+ cwd
19917
+ });
19918
+ }
18855
19919
  try {
18856
19920
  let hasMore = true;
18857
19921
  while (hasMore) {
@@ -18886,6 +19950,19 @@ async function drainTranscript(transcript, sessionStore, gqlClient) {
18886
19950
  "Error processing transcript \u2014 skipping"
18887
19951
  );
18888
19952
  }
19953
+ if (cwd) {
19954
+ runQuarantineCheckIfNeeded({
19955
+ sessionId: transcript.sessionId,
19956
+ cwd,
19957
+ gqlClient,
19958
+ log: log2
19959
+ }).catch((err) => {
19960
+ hookLog.warn(
19961
+ { err, data: { sessionId: transcript.sessionId } },
19962
+ "runQuarantineCheckIfNeeded failed"
19963
+ );
19964
+ });
19965
+ }
18889
19966
  }
18890
19967
  async function detectChangedTranscripts(lastSeen) {
18891
19968
  const transcripts = await scanForTranscripts();
@@ -19062,8 +20139,8 @@ var WorkspaceService = class {
19062
20139
  * Sets a known workspace path that was discovered through successful validation
19063
20140
  * @param path The validated workspace path to store
19064
20141
  */
19065
- static setKnownWorkspacePath(path32) {
19066
- this.knownWorkspacePath = path32;
20142
+ static setKnownWorkspacePath(path35) {
20143
+ this.knownWorkspacePath = path35;
19067
20144
  }
19068
20145
  /**
19069
20146
  * Gets the known workspace path that was previously validated
@@ -19924,7 +21001,7 @@ async function createAuthenticatedMcpGQLClient({
19924
21001
  import { execSync as execSync2 } from "child_process";
19925
21002
  import fs15 from "fs";
19926
21003
  import os8 from "os";
19927
- import path20 from "path";
21004
+ import path23 from "path";
19928
21005
  var IDEs = ["cursor", "windsurf", "webstorm", "vscode", "claude"];
19929
21006
  var runCommand = (cmd) => {
19930
21007
  try {
@@ -19939,7 +21016,7 @@ var gitInfo = {
19939
21016
  };
19940
21017
  var getClaudeWorkspacePaths = () => {
19941
21018
  const home = os8.homedir();
19942
- const claudeIdePath = path20.join(home, ".claude", "ide");
21019
+ const claudeIdePath = path23.join(home, ".claude", "ide");
19943
21020
  const workspacePaths = [];
19944
21021
  if (!fs15.existsSync(claudeIdePath)) {
19945
21022
  return workspacePaths;
@@ -19947,7 +21024,7 @@ var getClaudeWorkspacePaths = () => {
19947
21024
  try {
19948
21025
  const lockFiles = fs15.readdirSync(claudeIdePath).filter((file) => file.endsWith(".lock"));
19949
21026
  for (const lockFile of lockFiles) {
19950
- const lockFilePath = path20.join(claudeIdePath, lockFile);
21027
+ const lockFilePath = path23.join(claudeIdePath, lockFile);
19951
21028
  try {
19952
21029
  const lockContent = JSON.parse(fs15.readFileSync(lockFilePath, "utf8"));
19953
21030
  if (lockContent.workspaceFolders && Array.isArray(lockContent.workspaceFolders)) {
@@ -19972,24 +21049,24 @@ var getMCPConfigPaths = (hostName) => {
19972
21049
  switch (hostName.toLowerCase()) {
19973
21050
  case "cursor":
19974
21051
  return [
19975
- path20.join(currentDir, ".cursor", "mcp.json"),
21052
+ path23.join(currentDir, ".cursor", "mcp.json"),
19976
21053
  // local first
19977
- path20.join(home, ".cursor", "mcp.json")
21054
+ path23.join(home, ".cursor", "mcp.json")
19978
21055
  ];
19979
21056
  case "windsurf":
19980
21057
  return [
19981
- path20.join(currentDir, ".codeium", "mcp_config.json"),
21058
+ path23.join(currentDir, ".codeium", "mcp_config.json"),
19982
21059
  // local first
19983
- path20.join(home, ".codeium", "windsurf", "mcp_config.json")
21060
+ path23.join(home, ".codeium", "windsurf", "mcp_config.json")
19984
21061
  ];
19985
21062
  case "webstorm":
19986
21063
  return [];
19987
21064
  case "visualstudiocode":
19988
21065
  case "vscode":
19989
21066
  return [
19990
- path20.join(currentDir, ".vscode", "mcp.json"),
21067
+ path23.join(currentDir, ".vscode", "mcp.json"),
19991
21068
  // local first
19992
- process.platform === "win32" ? path20.join(home, "AppData", "Roaming", "Code", "User", "mcp.json") : path20.join(
21069
+ process.platform === "win32" ? path23.join(home, "AppData", "Roaming", "Code", "User", "mcp.json") : path23.join(
19993
21070
  home,
19994
21071
  "Library",
19995
21072
  "Application Support",
@@ -20000,13 +21077,13 @@ var getMCPConfigPaths = (hostName) => {
20000
21077
  ];
20001
21078
  case "claude": {
20002
21079
  const claudePaths = [
20003
- path20.join(currentDir, ".claude.json"),
21080
+ path23.join(currentDir, ".claude.json"),
20004
21081
  // local first
20005
- path20.join(home, ".claude.json")
21082
+ path23.join(home, ".claude.json")
20006
21083
  ];
20007
21084
  const workspacePaths = getClaudeWorkspacePaths();
20008
21085
  for (const workspacePath of workspacePaths) {
20009
- claudePaths.push(path20.join(workspacePath, ".mcp.json"));
21086
+ claudePaths.push(path23.join(workspacePath, ".mcp.json"));
20010
21087
  }
20011
21088
  return claudePaths;
20012
21089
  }
@@ -20167,10 +21244,10 @@ var getHostInfo = (additionalMcpList) => {
20167
21244
  const ideConfigPaths = /* @__PURE__ */ new Set();
20168
21245
  for (const ide of IDEs) {
20169
21246
  const configPaths = getMCPConfigPaths(ide);
20170
- configPaths.forEach((path32) => ideConfigPaths.add(path32));
21247
+ configPaths.forEach((path35) => ideConfigPaths.add(path35));
20171
21248
  }
20172
21249
  const uniqueAdditionalPaths = additionalMcpList.filter(
20173
- (path32) => !ideConfigPaths.has(path32)
21250
+ (path35) => !ideConfigPaths.has(path35)
20174
21251
  );
20175
21252
  for (const ide of IDEs) {
20176
21253
  const cfg = readMCPConfig(ide);
@@ -20292,7 +21369,7 @@ init_configs();
20292
21369
  init_configs();
20293
21370
  import fs16 from "fs";
20294
21371
  import os9 from "os";
20295
- import path21 from "path";
21372
+ import path24 from "path";
20296
21373
  var MAX_DEPTH = 2;
20297
21374
  var patterns = ["mcp", "claude"];
20298
21375
  var isFileMatch = (fileName) => {
@@ -20312,7 +21389,7 @@ var searchDir = async (dir, depth = 0) => {
20312
21389
  if (depth > MAX_DEPTH) return results;
20313
21390
  const entries = await fs16.promises.readdir(dir, { withFileTypes: true }).catch(() => []);
20314
21391
  for (const entry of entries) {
20315
- const fullPath = path21.join(dir, entry.name);
21392
+ const fullPath = path24.join(dir, entry.name);
20316
21393
  if (entry.isFile() && isFileMatch(entry.name)) {
20317
21394
  results.push(fullPath);
20318
21395
  } else if (entry.isDirectory()) {
@@ -20329,14 +21406,14 @@ var findSystemMCPConfigs = async () => {
20329
21406
  const home = os9.homedir();
20330
21407
  const platform2 = os9.platform();
20331
21408
  const knownDirs = platform2 === "win32" ? [
20332
- path21.join(home, ".cursor"),
20333
- path21.join(home, "Documents"),
20334
- path21.join(home, "Downloads")
21409
+ path24.join(home, ".cursor"),
21410
+ path24.join(home, "Documents"),
21411
+ path24.join(home, "Downloads")
20335
21412
  ] : [
20336
- path21.join(home, ".cursor"),
20337
- process.env["XDG_CONFIG_HOME"] || path21.join(home, ".config"),
20338
- path21.join(home, "Documents"),
20339
- path21.join(home, "Downloads")
21413
+ path24.join(home, ".cursor"),
21414
+ process.env["XDG_CONFIG_HOME"] || path24.join(home, ".config"),
21415
+ path24.join(home, "Documents"),
21416
+ path24.join(home, "Downloads")
20340
21417
  ];
20341
21418
  const timeoutPromise = new Promise(
20342
21419
  (resolve) => setTimeout(() => {
@@ -22752,13 +23829,13 @@ For a complete security audit workflow, use the \`full-security-audit\` prompt.
22752
23829
  // src/mcp/services/McpDetectionService/CursorMcpDetectionService.ts
22753
23830
  import * as fs19 from "fs";
22754
23831
  import * as os12 from "os";
22755
- import * as path23 from "path";
23832
+ import * as path26 from "path";
22756
23833
 
22757
23834
  // src/mcp/services/McpDetectionService/BaseMcpDetectionService.ts
22758
23835
  init_configs();
22759
23836
  import * as fs18 from "fs";
22760
23837
  import fetch7 from "node-fetch";
22761
- import * as path22 from "path";
23838
+ import * as path25 from "path";
22762
23839
 
22763
23840
  // src/mcp/services/McpDetectionService/McpDetectionServiceUtils.ts
22764
23841
  import * as fs17 from "fs";
@@ -22767,14 +23844,14 @@ import * as os11 from "os";
22767
23844
  // src/mcp/services/McpDetectionService/VscodeMcpDetectionService.ts
22768
23845
  import * as fs20 from "fs";
22769
23846
  import * as os13 from "os";
22770
- import * as path24 from "path";
23847
+ import * as path27 from "path";
22771
23848
 
22772
23849
  // src/mcp/tools/checkForNewAvailableFixes/CheckForNewAvailableFixesTool.ts
22773
23850
  import { z as z42 } from "zod";
22774
23851
 
22775
23852
  // src/mcp/services/PathValidation.ts
22776
23853
  import fs21 from "fs";
22777
- import path25 from "path";
23854
+ import path28 from "path";
22778
23855
  async function validatePath(inputPath) {
22779
23856
  logDebug("Validating MCP path", { inputPath });
22780
23857
  if (/^\/[a-zA-Z]:\//.test(inputPath)) {
@@ -22806,7 +23883,7 @@ async function validatePath(inputPath) {
22806
23883
  logError(error);
22807
23884
  return { isValid: false, error, path: inputPath };
22808
23885
  }
22809
- const normalizedPath = path25.normalize(inputPath);
23886
+ const normalizedPath = path28.normalize(inputPath);
22810
23887
  if (normalizedPath.includes("..")) {
22811
23888
  const error = `Normalized path contains path traversal patterns: ${inputPath}`;
22812
23889
  logError(error);
@@ -23458,7 +24535,7 @@ init_configs();
23458
24535
  import fs22 from "fs/promises";
23459
24536
  import nodePath from "path";
23460
24537
  var getLocalFiles = async ({
23461
- path: path32,
24538
+ path: path35,
23462
24539
  maxFileSize = MCP_MAX_FILE_SIZE,
23463
24540
  maxFiles,
23464
24541
  isAllFilesScan,
@@ -23466,17 +24543,17 @@ var getLocalFiles = async ({
23466
24543
  scanRecentlyChangedFiles
23467
24544
  }) => {
23468
24545
  logDebug(`[${scanContext}] Starting getLocalFiles`, {
23469
- path: path32,
24546
+ path: path35,
23470
24547
  maxFileSize,
23471
24548
  maxFiles,
23472
24549
  isAllFilesScan,
23473
24550
  scanRecentlyChangedFiles
23474
24551
  });
23475
24552
  try {
23476
- const resolvedRepoPath = await fs22.realpath(path32);
24553
+ const resolvedRepoPath = await fs22.realpath(path35);
23477
24554
  logDebug(`[${scanContext}] Resolved repository path`, {
23478
24555
  resolvedRepoPath,
23479
- originalPath: path32
24556
+ originalPath: path35
23480
24557
  });
23481
24558
  const gitService = new GitService(resolvedRepoPath, log);
23482
24559
  const gitValidation = await gitService.validateRepository();
@@ -23489,7 +24566,7 @@ var getLocalFiles = async ({
23489
24566
  if (!gitValidation.isValid || isAllFilesScan) {
23490
24567
  try {
23491
24568
  files = await FileUtils.getLastChangedFiles({
23492
- dir: path32,
24569
+ dir: path35,
23493
24570
  maxFileSize,
23494
24571
  maxFiles,
23495
24572
  isAllFilesScan
@@ -23581,7 +24658,7 @@ var getLocalFiles = async ({
23581
24658
  logError(`${scanContext}Unexpected error in getLocalFiles`, {
23582
24659
  error: error instanceof Error ? error.message : String(error),
23583
24660
  stack: error instanceof Error ? error.stack : void 0,
23584
- path: path32
24661
+ path: path35
23585
24662
  });
23586
24663
  throw error;
23587
24664
  }
@@ -23591,7 +24668,7 @@ var getLocalFiles = async ({
23591
24668
  init_client_generates();
23592
24669
  init_GitService();
23593
24670
  import fs23 from "fs";
23594
- import path26 from "path";
24671
+ import path29 from "path";
23595
24672
  import { z as z41 } from "zod";
23596
24673
  function extractPathFromPatch(patch) {
23597
24674
  const match = patch?.match(/diff --git a\/([^\s]+) b\//);
@@ -23677,7 +24754,7 @@ var LocalMobbFolderService = class {
23677
24754
  "[LocalMobbFolderService] Non-git repository detected, skipping .gitignore operations"
23678
24755
  );
23679
24756
  }
23680
- const mobbFolderPath = path26.join(
24757
+ const mobbFolderPath = path29.join(
23681
24758
  this.repoPath,
23682
24759
  this.defaultMobbFolderName
23683
24760
  );
@@ -23849,7 +24926,7 @@ var LocalMobbFolderService = class {
23849
24926
  mobbFolderPath,
23850
24927
  baseFileName
23851
24928
  );
23852
- const filePath = path26.join(mobbFolderPath, uniqueFileName);
24929
+ const filePath = path29.join(mobbFolderPath, uniqueFileName);
23853
24930
  await fs23.promises.writeFile(filePath, patch, "utf8");
23854
24931
  logInfo("[LocalMobbFolderService] Patch saved successfully", {
23855
24932
  filePath,
@@ -23907,11 +24984,11 @@ var LocalMobbFolderService = class {
23907
24984
  * @returns Unique filename that doesn't conflict with existing files
23908
24985
  */
23909
24986
  getUniqueFileName(folderPath, baseFileName) {
23910
- const baseName = path26.parse(baseFileName).name;
23911
- const extension = path26.parse(baseFileName).ext;
24987
+ const baseName = path29.parse(baseFileName).name;
24988
+ const extension = path29.parse(baseFileName).ext;
23912
24989
  let uniqueFileName = baseFileName;
23913
24990
  let index = 1;
23914
- while (fs23.existsSync(path26.join(folderPath, uniqueFileName))) {
24991
+ while (fs23.existsSync(path29.join(folderPath, uniqueFileName))) {
23915
24992
  uniqueFileName = `${baseName}-${index}${extension}`;
23916
24993
  index++;
23917
24994
  if (index > 1e3) {
@@ -23942,7 +25019,7 @@ var LocalMobbFolderService = class {
23942
25019
  logDebug("[LocalMobbFolderService] Logging patch info", { fixId: fix.id });
23943
25020
  try {
23944
25021
  const mobbFolderPath = await this.getFolder();
23945
- const patchInfoPath = path26.join(mobbFolderPath, "patchInfo.md");
25022
+ const patchInfoPath = path29.join(mobbFolderPath, "patchInfo.md");
23946
25023
  const markdownContent = this.generateFixMarkdown(fix, savedPatchFileName);
23947
25024
  let existingContent = "";
23948
25025
  if (fs23.existsSync(patchInfoPath)) {
@@ -23984,7 +25061,7 @@ var LocalMobbFolderService = class {
23984
25061
  const timestamp = (/* @__PURE__ */ new Date()).toISOString();
23985
25062
  const patch = this.extractPatchFromFix(fix);
23986
25063
  const relativePatchedFilePath = patch ? extractPathFromPatch(patch) : null;
23987
- const patchedFilePath = relativePatchedFilePath ? path26.resolve(this.repoPath, relativePatchedFilePath) : null;
25064
+ const patchedFilePath = relativePatchedFilePath ? path29.resolve(this.repoPath, relativePatchedFilePath) : null;
23988
25065
  const fixIdentifier = savedPatchFileName ? savedPatchFileName.replace(".patch", "") : fix.id;
23989
25066
  let markdown = `# Fix ${fixIdentifier}
23990
25067
 
@@ -24320,7 +25397,7 @@ var LocalMobbFolderService = class {
24320
25397
  // src/mcp/services/PatchApplicationService.ts
24321
25398
  init_configs();
24322
25399
  import {
24323
- existsSync as existsSync6,
25400
+ existsSync as existsSync7,
24324
25401
  mkdirSync,
24325
25402
  readFileSync as readFileSync4,
24326
25403
  unlinkSync,
@@ -24328,14 +25405,14 @@ import {
24328
25405
  } from "fs";
24329
25406
  import fs24 from "fs/promises";
24330
25407
  import parseDiff2 from "parse-diff";
24331
- import path27 from "path";
25408
+ import path30 from "path";
24332
25409
  var PatchApplicationService = class {
24333
25410
  /**
24334
25411
  * Gets the appropriate comment syntax for a file based on its extension
24335
25412
  */
24336
25413
  static getCommentSyntax(filePath) {
24337
- const ext = path27.extname(filePath).toLowerCase();
24338
- const basename2 = path27.basename(filePath);
25414
+ const ext = path30.extname(filePath).toLowerCase();
25415
+ const basename2 = path30.basename(filePath);
24339
25416
  const commentMap = {
24340
25417
  // C-style languages (single line comments)
24341
25418
  ".js": "//",
@@ -24543,7 +25620,7 @@ var PatchApplicationService = class {
24543
25620
  }
24544
25621
  );
24545
25622
  }
24546
- const dirPath = path27.dirname(normalizedFilePath);
25623
+ const dirPath = path30.dirname(normalizedFilePath);
24547
25624
  mkdirSync(dirPath, { recursive: true });
24548
25625
  writeFileSync3(normalizedFilePath, finalContent, "utf8");
24549
25626
  return normalizedFilePath;
@@ -24552,9 +25629,9 @@ var PatchApplicationService = class {
24552
25629
  repositoryPath,
24553
25630
  targetPath
24554
25631
  }) {
24555
- const repoRoot = path27.resolve(repositoryPath);
24556
- const normalizedPath = path27.resolve(repoRoot, targetPath);
24557
- const repoRootWithSep = repoRoot.endsWith(path27.sep) ? repoRoot : `${repoRoot}${path27.sep}`;
25632
+ const repoRoot = path30.resolve(repositoryPath);
25633
+ const normalizedPath = path30.resolve(repoRoot, targetPath);
25634
+ const repoRootWithSep = repoRoot.endsWith(path30.sep) ? repoRoot : `${repoRoot}${path30.sep}`;
24558
25635
  if (normalizedPath !== repoRoot && !normalizedPath.startsWith(repoRootWithSep)) {
24559
25636
  throw new Error(
24560
25637
  `Security violation: target path ${targetPath} resolves outside repository`
@@ -24563,7 +25640,7 @@ var PatchApplicationService = class {
24563
25640
  return {
24564
25641
  repoRoot,
24565
25642
  normalizedPath,
24566
- relativePath: path27.relative(repoRoot, normalizedPath)
25643
+ relativePath: path30.relative(repoRoot, normalizedPath)
24567
25644
  };
24568
25645
  }
24569
25646
  /**
@@ -24845,8 +25922,8 @@ var PatchApplicationService = class {
24845
25922
  continue;
24846
25923
  }
24847
25924
  try {
24848
- const absolutePath = path27.resolve(repositoryPath, targetFile);
24849
- if (existsSync6(absolutePath)) {
25925
+ const absolutePath = path30.resolve(repositoryPath, targetFile);
25926
+ if (existsSync7(absolutePath)) {
24850
25927
  const stats = await fs24.stat(absolutePath);
24851
25928
  const fileModTime = stats.mtime.getTime();
24852
25929
  if (fileModTime > scanStartTime) {
@@ -25047,7 +26124,7 @@ var PatchApplicationService = class {
25047
26124
  targetFile,
25048
26125
  absoluteFilePath,
25049
26126
  relativePath,
25050
- exists: existsSync6(absoluteFilePath)
26127
+ exists: existsSync7(absoluteFilePath)
25051
26128
  });
25052
26129
  return { absoluteFilePath, relativePath };
25053
26130
  }
@@ -25071,7 +26148,7 @@ var PatchApplicationService = class {
25071
26148
  fix,
25072
26149
  scanContext
25073
26150
  });
25074
- appliedFiles.push(path27.relative(repositoryPath, actualPath));
26151
+ appliedFiles.push(path30.relative(repositoryPath, actualPath));
25075
26152
  logDebug(`[${scanContext}] Created new file: ${relativePath}`);
25076
26153
  }
25077
26154
  /**
@@ -25083,7 +26160,7 @@ var PatchApplicationService = class {
25083
26160
  appliedFiles,
25084
26161
  scanContext
25085
26162
  }) {
25086
- if (existsSync6(absoluteFilePath)) {
26163
+ if (existsSync7(absoluteFilePath)) {
25087
26164
  unlinkSync(absoluteFilePath);
25088
26165
  appliedFiles.push(relativePath);
25089
26166
  logDebug(`[${scanContext}] Deleted file: ${relativePath}`);
@@ -25102,7 +26179,7 @@ var PatchApplicationService = class {
25102
26179
  appliedFiles,
25103
26180
  scanContext
25104
26181
  }) {
25105
- if (!existsSync6(absoluteFilePath)) {
26182
+ if (!existsSync7(absoluteFilePath)) {
25106
26183
  throw new Error(
25107
26184
  `Target file does not exist: ${targetFile} (resolved to: ${absoluteFilePath})`
25108
26185
  );
@@ -25120,7 +26197,7 @@ var PatchApplicationService = class {
25120
26197
  fix,
25121
26198
  scanContext
25122
26199
  });
25123
- appliedFiles.push(path27.relative(repositoryPath, actualPath));
26200
+ appliedFiles.push(path30.relative(repositoryPath, actualPath));
25124
26201
  logDebug(`[${scanContext}] Modified file: ${relativePath}`);
25125
26202
  }
25126
26203
  }
@@ -25317,7 +26394,7 @@ init_configs();
25317
26394
  // src/mcp/services/FileOperations.ts
25318
26395
  init_FileUtils();
25319
26396
  import fs25 from "fs";
25320
- import path28 from "path";
26397
+ import path31 from "path";
25321
26398
  import AdmZip4 from "adm-zip";
25322
26399
  var FileOperations = class {
25323
26400
  /**
@@ -25337,10 +26414,10 @@ var FileOperations = class {
25337
26414
  let packedFilesCount = 0;
25338
26415
  const packedFiles = [];
25339
26416
  const excludedFiles = [];
25340
- const resolvedRepoPath = path28.resolve(repositoryPath);
26417
+ const resolvedRepoPath = path31.resolve(repositoryPath);
25341
26418
  for (const filepath of fileList) {
25342
- const absoluteFilepath = path28.join(repositoryPath, filepath);
25343
- const resolvedFilePath = path28.resolve(absoluteFilepath);
26419
+ const absoluteFilepath = path31.join(repositoryPath, filepath);
26420
+ const resolvedFilePath = path31.resolve(absoluteFilepath);
25344
26421
  if (!resolvedFilePath.startsWith(resolvedRepoPath)) {
25345
26422
  const reason = "potential path traversal security risk";
25346
26423
  logDebug(`[FileOperations] Skipping ${filepath} due to ${reason}`);
@@ -25387,11 +26464,11 @@ var FileOperations = class {
25387
26464
  fileList,
25388
26465
  repositoryPath
25389
26466
  }) {
25390
- const resolvedRepoPath = path28.resolve(repositoryPath);
26467
+ const resolvedRepoPath = path31.resolve(repositoryPath);
25391
26468
  const validatedPaths = [];
25392
26469
  for (const filepath of fileList) {
25393
- const absoluteFilepath = path28.join(repositoryPath, filepath);
25394
- const resolvedFilePath = path28.resolve(absoluteFilepath);
26470
+ const absoluteFilepath = path31.join(repositoryPath, filepath);
26471
+ const resolvedFilePath = path31.resolve(absoluteFilepath);
25395
26472
  if (!resolvedFilePath.startsWith(resolvedRepoPath)) {
25396
26473
  logDebug(
25397
26474
  `[FileOperations] Rejecting ${filepath} - path traversal attempt detected`
@@ -25419,7 +26496,7 @@ var FileOperations = class {
25419
26496
  for (const absolutePath of filePaths) {
25420
26497
  try {
25421
26498
  const content = await fs25.promises.readFile(absolutePath);
25422
- const relativePath = path28.basename(absolutePath);
26499
+ const relativePath = path31.basename(absolutePath);
25423
26500
  fileDataArray.push({
25424
26501
  relativePath,
25425
26502
  absolutePath,
@@ -25731,14 +26808,14 @@ var _CheckForNewAvailableFixesService = class _CheckForNewAvailableFixesService
25731
26808
  * since the last scan.
25732
26809
  */
25733
26810
  async scanForSecurityVulnerabilities({
25734
- path: path32,
26811
+ path: path35,
25735
26812
  isAllDetectionRulesScan,
25736
26813
  isAllFilesScan,
25737
26814
  scanContext
25738
26815
  }) {
25739
26816
  this.hasAuthenticationFailed = false;
25740
26817
  logDebug(`[${scanContext}] Scanning for new security vulnerabilities`, {
25741
- path: path32
26818
+ path: path35
25742
26819
  });
25743
26820
  if (!this.gqlClient) {
25744
26821
  logInfo(`[${scanContext}] No GQL client found, skipping scan`);
@@ -25754,11 +26831,11 @@ var _CheckForNewAvailableFixesService = class _CheckForNewAvailableFixesService
25754
26831
  }
25755
26832
  logDebug(
25756
26833
  `[${scanContext}] Connected to the API, assembling list of files to scan`,
25757
- { path: path32 }
26834
+ { path: path35 }
25758
26835
  );
25759
26836
  const isBackgroundScan = scanContext === ScanContext.BACKGROUND_INITIAL || scanContext === ScanContext.BACKGROUND_PERIODIC;
25760
26837
  const files = await getLocalFiles({
25761
- path: path32,
26838
+ path: path35,
25762
26839
  isAllFilesScan,
25763
26840
  scanContext,
25764
26841
  scanRecentlyChangedFiles: !isBackgroundScan
@@ -25784,13 +26861,13 @@ var _CheckForNewAvailableFixesService = class _CheckForNewAvailableFixesService
25784
26861
  });
25785
26862
  const { fixReportId, projectId } = await scanFiles({
25786
26863
  fileList: filesToScan.map((file) => file.relativePath),
25787
- repositoryPath: path32,
26864
+ repositoryPath: path35,
25788
26865
  gqlClient: this.gqlClient,
25789
26866
  isAllDetectionRulesScan,
25790
26867
  scanContext
25791
26868
  });
25792
26869
  logInfo(
25793
- `[${scanContext}] Security scan completed for ${path32} reportId: ${fixReportId} projectId: ${projectId}`
26870
+ `[${scanContext}] Security scan completed for ${path35} reportId: ${fixReportId} projectId: ${projectId}`
25794
26871
  );
25795
26872
  if (isAllFilesScan) {
25796
26873
  return;
@@ -26084,13 +27161,13 @@ var _CheckForNewAvailableFixesService = class _CheckForNewAvailableFixesService
26084
27161
  });
26085
27162
  return scannedFiles.some((file) => file.relativePath === fixFile);
26086
27163
  }
26087
- async getFreshFixes({ path: path32 }) {
27164
+ async getFreshFixes({ path: path35 }) {
26088
27165
  const scanContext = ScanContext.USER_REQUEST;
26089
- logDebug(`[${scanContext}] Getting fresh fixes`, { path: path32 });
26090
- if (this.path !== path32) {
26091
- this.path = path32;
27166
+ logDebug(`[${scanContext}] Getting fresh fixes`, { path: path35 });
27167
+ if (this.path !== path35) {
27168
+ this.path = path35;
26092
27169
  this.reset();
26093
- logInfo(`[${scanContext}] Reset service state for new path`, { path: path32 });
27170
+ logInfo(`[${scanContext}] Reset service state for new path`, { path: path35 });
26094
27171
  }
26095
27172
  try {
26096
27173
  const loginContext = createMcpLoginContext("check_new_fixes");
@@ -26109,7 +27186,7 @@ var _CheckForNewAvailableFixesService = class _CheckForNewAvailableFixesService
26109
27186
  }
26110
27187
  throw error;
26111
27188
  }
26112
- this.triggerScan({ path: path32, gqlClient: this.gqlClient });
27189
+ this.triggerScan({ path: path35, gqlClient: this.gqlClient });
26113
27190
  let isMvsAutoFixEnabled = null;
26114
27191
  try {
26115
27192
  isMvsAutoFixEnabled = await this.gqlClient.getMvsAutoFixSettings();
@@ -26143,33 +27220,33 @@ var _CheckForNewAvailableFixesService = class _CheckForNewAvailableFixesService
26143
27220
  return noFreshFixesPrompt;
26144
27221
  }
26145
27222
  triggerScan({
26146
- path: path32,
27223
+ path: path35,
26147
27224
  gqlClient
26148
27225
  }) {
26149
- if (this.path !== path32) {
26150
- this.path = path32;
27226
+ if (this.path !== path35) {
27227
+ this.path = path35;
26151
27228
  this.reset();
26152
- logInfo(`Reset service state for new path in triggerScan`, { path: path32 });
27229
+ logInfo(`Reset service state for new path in triggerScan`, { path: path35 });
26153
27230
  }
26154
27231
  this.gqlClient = gqlClient;
26155
27232
  if (!this.intervalId) {
26156
- this.startPeriodicScanning(path32);
26157
- this.executeInitialScan(path32);
26158
- void this.executeInitialFullScan(path32);
27233
+ this.startPeriodicScanning(path35);
27234
+ this.executeInitialScan(path35);
27235
+ void this.executeInitialFullScan(path35);
26159
27236
  }
26160
27237
  }
26161
- startPeriodicScanning(path32) {
27238
+ startPeriodicScanning(path35) {
26162
27239
  const scanContext = ScanContext.BACKGROUND_PERIODIC;
26163
27240
  logDebug(
26164
27241
  `[${scanContext}] Starting periodic scan for new security vulnerabilities`,
26165
27242
  {
26166
- path: path32
27243
+ path: path35
26167
27244
  }
26168
27245
  );
26169
27246
  this.intervalId = setInterval(() => {
26170
- logDebug(`[${scanContext}] Triggering periodic security scan`, { path: path32 });
27247
+ logDebug(`[${scanContext}] Triggering periodic security scan`, { path: path35 });
26171
27248
  this.scanForSecurityVulnerabilities({
26172
- path: path32,
27249
+ path: path35,
26173
27250
  scanContext
26174
27251
  }).catch((error) => {
26175
27252
  logError(`[${scanContext}] Error during periodic security scan`, {
@@ -26178,45 +27255,45 @@ var _CheckForNewAvailableFixesService = class _CheckForNewAvailableFixesService
26178
27255
  });
26179
27256
  }, MCP_PERIODIC_CHECK_INTERVAL);
26180
27257
  }
26181
- async executeInitialFullScan(path32) {
27258
+ async executeInitialFullScan(path35) {
26182
27259
  const scanContext = ScanContext.FULL_SCAN;
26183
- logDebug(`[${scanContext}] Triggering initial full security scan`, { path: path32 });
27260
+ logDebug(`[${scanContext}] Triggering initial full security scan`, { path: path35 });
26184
27261
  logDebug(`[${scanContext}] Full scan paths scanned`, {
26185
27262
  fullScanPathsScanned: this.fullScanPathsScanned
26186
27263
  });
26187
- if (this.fullScanPathsScanned.includes(path32)) {
27264
+ if (this.fullScanPathsScanned.includes(path35)) {
26188
27265
  logDebug(`[${scanContext}] Full scan already executed for this path`, {
26189
- path: path32
27266
+ path: path35
26190
27267
  });
26191
27268
  return;
26192
27269
  }
26193
27270
  configStore.set("fullScanPathsScanned", [
26194
27271
  ...this.fullScanPathsScanned,
26195
- path32
27272
+ path35
26196
27273
  ]);
26197
27274
  try {
26198
27275
  await this.scanForSecurityVulnerabilities({
26199
- path: path32,
27276
+ path: path35,
26200
27277
  isAllFilesScan: true,
26201
27278
  isAllDetectionRulesScan: true,
26202
27279
  scanContext: ScanContext.FULL_SCAN
26203
27280
  });
26204
- if (!this.fullScanPathsScanned.includes(path32)) {
26205
- this.fullScanPathsScanned.push(path32);
27281
+ if (!this.fullScanPathsScanned.includes(path35)) {
27282
+ this.fullScanPathsScanned.push(path35);
26206
27283
  configStore.set("fullScanPathsScanned", this.fullScanPathsScanned);
26207
27284
  }
26208
- logInfo(`[${scanContext}] Full scan completed`, { path: path32 });
27285
+ logInfo(`[${scanContext}] Full scan completed`, { path: path35 });
26209
27286
  } catch (error) {
26210
27287
  logError(`[${scanContext}] Error during initial full security scan`, {
26211
27288
  error
26212
27289
  });
26213
27290
  }
26214
27291
  }
26215
- executeInitialScan(path32) {
27292
+ executeInitialScan(path35) {
26216
27293
  const scanContext = ScanContext.BACKGROUND_INITIAL;
26217
- logDebug(`[${scanContext}] Triggering initial security scan`, { path: path32 });
27294
+ logDebug(`[${scanContext}] Triggering initial security scan`, { path: path35 });
26218
27295
  this.scanForSecurityVulnerabilities({
26219
- path: path32,
27296
+ path: path35,
26220
27297
  scanContext: ScanContext.BACKGROUND_INITIAL
26221
27298
  }).catch((error) => {
26222
27299
  logError(`[${scanContext}] Error during initial security scan`, { error });
@@ -26313,9 +27390,9 @@ Example payload:
26313
27390
  `Invalid path: potential security risk detected in path: ${pathValidationResult.error}`
26314
27391
  );
26315
27392
  }
26316
- const path32 = pathValidationResult.path;
27393
+ const path35 = pathValidationResult.path;
26317
27394
  const resultText = await this.newFixesService.getFreshFixes({
26318
- path: path32
27395
+ path: path35
26319
27396
  });
26320
27397
  logInfo("CheckForNewAvailableFixesTool execution completed", {
26321
27398
  resultText
@@ -26493,8 +27570,8 @@ Call this tool instead of ${MCP_TOOL_SCAN_AND_FIX_VULNERABILITIES} when you only
26493
27570
  `Invalid path: potential security risk detected in path: ${pathValidationResult.error}`
26494
27571
  );
26495
27572
  }
26496
- const path32 = pathValidationResult.path;
26497
- const gitService = new GitService(path32, log);
27573
+ const path35 = pathValidationResult.path;
27574
+ const gitService = new GitService(path35, log);
26498
27575
  const gitValidation = await gitService.validateRepository();
26499
27576
  if (!gitValidation.isValid) {
26500
27577
  throw new Error(`Invalid git repository: ${gitValidation.error}`);
@@ -26879,9 +27956,9 @@ Example payload:
26879
27956
  `Invalid path: potential security risk detected in path: ${pathValidationResult.error}`
26880
27957
  );
26881
27958
  }
26882
- const path32 = pathValidationResult.path;
27959
+ const path35 = pathValidationResult.path;
26883
27960
  const files = await getLocalFiles({
26884
- path: path32,
27961
+ path: path35,
26885
27962
  maxFileSize: MCP_MAX_FILE_SIZE,
26886
27963
  maxFiles: args.maxFiles,
26887
27964
  scanContext: ScanContext.USER_REQUEST,
@@ -26901,7 +27978,7 @@ Example payload:
26901
27978
  try {
26902
27979
  const fixResult = await this.vulnerabilityFixService.processVulnerabilities({
26903
27980
  fileList: files.map((file) => file.relativePath),
26904
- repositoryPath: path32,
27981
+ repositoryPath: path35,
26905
27982
  offset: args.offset,
26906
27983
  limit: args.limit,
26907
27984
  isRescan: args.rescan || !!args.maxFiles
@@ -27202,10 +28279,10 @@ init_client_generates();
27202
28279
  init_urlParser2();
27203
28280
 
27204
28281
  // src/features/codeium_intellij/codeium_language_server_grpc_client.ts
27205
- import path29 from "path";
28282
+ import path32 from "path";
27206
28283
  import * as grpc from "@grpc/grpc-js";
27207
28284
  import * as protoLoader from "@grpc/proto-loader";
27208
- var PROTO_PATH = path29.join(
28285
+ var PROTO_PATH = path32.join(
27209
28286
  getModuleRootDir(),
27210
28287
  "src/features/codeium_intellij/proto/exa/language_server_pb/language_server.proto"
27211
28288
  );
@@ -27217,7 +28294,7 @@ function loadProto() {
27217
28294
  defaults: true,
27218
28295
  oneofs: true,
27219
28296
  includeDirs: [
27220
- path29.join(getModuleRootDir(), "src/features/codeium_intellij/proto")
28297
+ path32.join(getModuleRootDir(), "src/features/codeium_intellij/proto")
27221
28298
  ]
27222
28299
  });
27223
28300
  return grpc.loadPackageDefinition(
@@ -27273,28 +28350,28 @@ async function getGrpcClient(port, csrf3) {
27273
28350
  // src/features/codeium_intellij/parse_intellij_logs.ts
27274
28351
  import fs27 from "fs";
27275
28352
  import os14 from "os";
27276
- import path30 from "path";
28353
+ import path33 from "path";
27277
28354
  function getLogsDir() {
27278
28355
  if (process.platform === "darwin") {
27279
- return path30.join(os14.homedir(), "Library/Logs/JetBrains");
28356
+ return path33.join(os14.homedir(), "Library/Logs/JetBrains");
27280
28357
  } else if (process.platform === "win32") {
27281
- return path30.join(
27282
- process.env["LOCALAPPDATA"] || path30.join(os14.homedir(), "AppData/Local"),
28358
+ return path33.join(
28359
+ process.env["LOCALAPPDATA"] || path33.join(os14.homedir(), "AppData/Local"),
27283
28360
  "JetBrains"
27284
28361
  );
27285
28362
  } else {
27286
- return path30.join(os14.homedir(), ".cache/JetBrains");
28363
+ return path33.join(os14.homedir(), ".cache/JetBrains");
27287
28364
  }
27288
28365
  }
27289
28366
  function parseIdeLogDir(ideLogDir) {
27290
28367
  const logFiles = fs27.readdirSync(ideLogDir).filter((f) => /^idea(\.\d+)?\.log$/.test(f)).map((f) => ({
27291
28368
  name: f,
27292
- mtime: fs27.statSync(path30.join(ideLogDir, f)).mtimeMs
28369
+ mtime: fs27.statSync(path33.join(ideLogDir, f)).mtimeMs
27293
28370
  })).sort((a, b) => a.mtime - b.mtime).map((f) => f.name);
27294
28371
  let latestCsrf = null;
27295
28372
  let latestPort = null;
27296
28373
  for (const logFile of logFiles) {
27297
- const lines = fs27.readFileSync(path30.join(ideLogDir, logFile), "utf-8").split("\n");
28374
+ const lines = fs27.readFileSync(path33.join(ideLogDir, logFile), "utf-8").split("\n");
27298
28375
  for (const line of lines) {
27299
28376
  if (!line.includes(
27300
28377
  "com.codeium.intellij.language_server.LanguageServerProcessHandler"
@@ -27322,9 +28399,9 @@ function findRunningCodeiumLanguageServers() {
27322
28399
  const logsDir = getLogsDir();
27323
28400
  if (!fs27.existsSync(logsDir)) return results;
27324
28401
  for (const ide of fs27.readdirSync(logsDir)) {
27325
- let ideLogDir = path30.join(logsDir, ide);
28402
+ let ideLogDir = path33.join(logsDir, ide);
27326
28403
  if (process.platform !== "darwin") {
27327
- ideLogDir = path30.join(ideLogDir, "log");
28404
+ ideLogDir = path33.join(ideLogDir, "log");
27328
28405
  }
27329
28406
  if (!fs27.existsSync(ideLogDir) || !fs27.statSync(ideLogDir).isDirectory()) {
27330
28407
  continue;
@@ -27507,10 +28584,10 @@ function processChatStepCodeAction(step) {
27507
28584
  // src/features/codeium_intellij/install_hook.ts
27508
28585
  import fsPromises5 from "fs/promises";
27509
28586
  import os15 from "os";
27510
- import path31 from "path";
28587
+ import path34 from "path";
27511
28588
  import chalk14 from "chalk";
27512
28589
  function getCodeiumHooksPath() {
27513
- return path31.join(os15.homedir(), ".codeium", "hooks.json");
28590
+ return path34.join(os15.homedir(), ".codeium", "hooks.json");
27514
28591
  }
27515
28592
  async function readCodeiumHooks() {
27516
28593
  const hooksPath = getCodeiumHooksPath();
@@ -27523,7 +28600,7 @@ async function readCodeiumHooks() {
27523
28600
  }
27524
28601
  async function writeCodeiumHooks(config2) {
27525
28602
  const hooksPath = getCodeiumHooksPath();
27526
- const dir = path31.dirname(hooksPath);
28603
+ const dir = path34.dirname(hooksPath);
27527
28604
  await fsPromises5.mkdir(dir, { recursive: true });
27528
28605
  await fsPromises5.writeFile(
27529
28606
  hooksPath,