opencode-sonarqube 0.2.10 → 1.2.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +128 -4
- package/dist/index.js +151 -185
- package/package.json +2 -2
package/README.md
CHANGED
|
@@ -4,7 +4,7 @@ OpenCode Plugin for SonarQube integration - Enterprise-level code quality from t
|
|
|
4
4
|
|
|
5
5
|
[](https://sonarqube.example.com)
|
|
6
6
|
[](https://sonarqube.example.com)
|
|
7
|
-
[](https://sonarqube.example.com)
|
|
8
8
|
[](./LICENSE)
|
|
9
9
|
|
|
10
10
|
## Features
|
|
@@ -125,6 +125,11 @@ Create `.sonarqube/config.json` in your project root:
|
|
|
125
125
|
| `sources` | `string` | `"src"` | Source directories (comma-separated) |
|
|
126
126
|
| `tests` | `string` | - | Test directories (comma-separated) |
|
|
127
127
|
| `exclusions` | `string` | - | File exclusion patterns (glob) |
|
|
128
|
+
| `analyzeBeforeCommit` | `boolean` | `true` | Run analysis before git commit |
|
|
129
|
+
| `blockCommit` | `boolean` | `false` | Block commit if blocking issues exist |
|
|
130
|
+
| `blockPush` | `boolean` | `false` | Block push if blocking issues exist |
|
|
131
|
+
| `blockingSeverity` | `"BLOCKER"` \| `"CRITICAL"` \| `"MAJOR"` | `"CRITICAL"` | Minimum severity that blocks operations |
|
|
132
|
+
| `fixBeforeCommit` | `boolean` | `false` | Attempt auto-fix before commit |
|
|
128
133
|
|
|
129
134
|
### Strictness Levels
|
|
130
135
|
|
|
@@ -238,7 +243,8 @@ The plugin automatically handles many scenarios without user intervention:
|
|
|
238
243
|
|-----------|-------------------|
|
|
239
244
|
| `git pull` / `git merge` | Suggests checking for new issues |
|
|
240
245
|
| `git checkout` (with changes) | Suggests running analysis |
|
|
241
|
-
| `git commit` (enterprise mode) | Warns if BLOCKER/CRITICAL issues exist |
|
|
246
|
+
| `git commit` (enterprise mode) | Warns/blocks if BLOCKER/CRITICAL issues exist |
|
|
247
|
+
| `git push` (enterprise mode) | Warns/blocks if BLOCKER/CRITICAL issues exist |
|
|
242
248
|
| `git push` | Shows notification that code was pushed |
|
|
243
249
|
|
|
244
250
|
### System Prompt Injection
|
|
@@ -381,6 +387,80 @@ The plugin provides 12 API modules for SonarQube interaction:
|
|
|
381
387
|
| `MetricsAPI` | Get detailed metrics with period comparison |
|
|
382
388
|
| `ComponentsAPI` | Get files/directories with issue counts |
|
|
383
389
|
|
|
390
|
+
## First Time Setup
|
|
391
|
+
|
|
392
|
+
When you use the plugin in a new project for the first time, you need to initialize it:
|
|
393
|
+
|
|
394
|
+
### Option 1: Let the AI do it
|
|
395
|
+
Simply tell the AI: "Set up SonarQube for this project" or "Initialize SonarQube"
|
|
396
|
+
|
|
397
|
+
### Option 2: Use the tool directly
|
|
398
|
+
```typescript
|
|
399
|
+
sonarqube({ action: "setup" })
|
|
400
|
+
```
|
|
401
|
+
|
|
402
|
+
This will:
|
|
403
|
+
1. Create a new project on your SonarQube server
|
|
404
|
+
2. Generate an authentication token
|
|
405
|
+
3. Create `.sonarqube/project.json` with the project configuration
|
|
406
|
+
4. Add `.sonarqube/` to your `.gitignore`
|
|
407
|
+
|
|
408
|
+
**Note:** The `.sonarqube/` directory contains sensitive tokens - never commit it!
|
|
409
|
+
|
|
410
|
+
## FAQ
|
|
411
|
+
|
|
412
|
+
### Where is the configuration stored?
|
|
413
|
+
|
|
414
|
+
| What | Location |
|
|
415
|
+
|------|----------|
|
|
416
|
+
| **Server credentials** | Environment variables (`SONAR_HOST_URL`, `SONAR_USER`, `SONAR_PASSWORD`) |
|
|
417
|
+
| **Plugin settings** | `.sonarqube/config.json` in your project (optional) |
|
|
418
|
+
| **Project state/tokens** | `.sonarqube/project.json` (auto-generated, don't commit!) |
|
|
419
|
+
| **OpenCode plugin list** | `opencode.json` |
|
|
420
|
+
|
|
421
|
+
### How do I enable debug logging?
|
|
422
|
+
|
|
423
|
+
Set the environment variable before starting OpenCode:
|
|
424
|
+
```bash
|
|
425
|
+
export SONARQUBE_DEBUG=true
|
|
426
|
+
```
|
|
427
|
+
|
|
428
|
+
Logs are written to `/tmp/sonarqube-plugin-debug.log`
|
|
429
|
+
|
|
430
|
+
### The plugin uses the wrong project directory
|
|
431
|
+
|
|
432
|
+
This can happen when multiple projects are open in OpenCode Desktop. The plugin uses `import.meta.url` to determine which project's `node_modules` it was loaded from. Make sure each project has its own installation:
|
|
433
|
+
|
|
434
|
+
```bash
|
|
435
|
+
cd /path/to/your/project
|
|
436
|
+
bun add opencode-sonarqube
|
|
437
|
+
```
|
|
438
|
+
|
|
439
|
+
### The quality gate shows issues but I just started
|
|
440
|
+
|
|
441
|
+
Run the setup first:
|
|
442
|
+
```typescript
|
|
443
|
+
sonarqube({ action: "setup" })
|
|
444
|
+
```
|
|
445
|
+
|
|
446
|
+
Then run an analysis:
|
|
447
|
+
```typescript
|
|
448
|
+
sonarqube({ action: "analyze" })
|
|
449
|
+
```
|
|
450
|
+
|
|
451
|
+
### How do I use this with multiple SonarQube servers?
|
|
452
|
+
|
|
453
|
+
Currently, the plugin uses global environment variables. For different servers per project, you'd need to set the environment variables differently per terminal session.
|
|
454
|
+
|
|
455
|
+
### Can I use this without OpenCode?
|
|
456
|
+
|
|
457
|
+
Yes! Use the CLI:
|
|
458
|
+
```bash
|
|
459
|
+
bun run src/index.ts --setup
|
|
460
|
+
bun run src/index.ts --analyze
|
|
461
|
+
bun run src/index.ts --status
|
|
462
|
+
```
|
|
463
|
+
|
|
384
464
|
## Requirements
|
|
385
465
|
|
|
386
466
|
- SonarQube server 9.9+ (tested with 26.1)
|
|
@@ -393,17 +473,61 @@ This project maintains enterprise-level quality:
|
|
|
393
473
|
|
|
394
474
|
| Metric | Value |
|
|
395
475
|
|--------|-------|
|
|
396
|
-
| Test Coverage |
|
|
397
|
-
| Tests |
|
|
476
|
+
| Test Coverage | 100% |
|
|
477
|
+
| Tests | 625 |
|
|
398
478
|
| Bugs | 0 |
|
|
399
479
|
| Vulnerabilities | 0 |
|
|
400
480
|
| Code Smells | 0 |
|
|
481
|
+
| Security Hotspots | 0 (reviewed) |
|
|
401
482
|
| Duplications | 0% |
|
|
402
483
|
| Reliability Rating | A |
|
|
403
484
|
| Security Rating | A |
|
|
404
485
|
| Maintainability Rating | A |
|
|
405
486
|
| Lines of Code | ~6,000 |
|
|
406
487
|
|
|
488
|
+
## CI/CD Pipeline
|
|
489
|
+
|
|
490
|
+
All builds, tests, and releases are automated via GitHub Actions.
|
|
491
|
+
|
|
492
|
+
### Pipeline Stages
|
|
493
|
+
|
|
494
|
+
```
|
|
495
|
+
┌─────────────┐ ┌─────────────────────┐ ┌─────────────────┐
|
|
496
|
+
│ Build & │────▶│ SonarQube Quality │────▶│ Publish to npm │
|
|
497
|
+
│ Test │ │ Gate │ │ (tags only) │
|
|
498
|
+
└─────────────┘ └─────────────────────┘ └─────────────────┘
|
|
499
|
+
```
|
|
500
|
+
|
|
501
|
+
1. **Build & Test**: Type check, unit tests, build
|
|
502
|
+
2. **Quality Gate**: SonarQube analysis must pass (0 bugs, 0 vulnerabilities, 0 code smells)
|
|
503
|
+
3. **Publish**: Only on version tags, only if quality gate passes
|
|
504
|
+
|
|
505
|
+
### Creating a Release
|
|
506
|
+
|
|
507
|
+
```bash
|
|
508
|
+
# 1. Update version in package.json
|
|
509
|
+
npm version patch # 0.3.0 → 0.3.1
|
|
510
|
+
# or: npm version minor # 0.3.0 → 0.4.0
|
|
511
|
+
# or: npm version major # 0.3.0 → 1.0.0
|
|
512
|
+
|
|
513
|
+
# 2. Push code and tag
|
|
514
|
+
git push && git push --tags
|
|
515
|
+
```
|
|
516
|
+
|
|
517
|
+
The pipeline will automatically:
|
|
518
|
+
- Run all tests
|
|
519
|
+
- Check SonarQube quality gate
|
|
520
|
+
- Publish to npm (if quality gate passes)
|
|
521
|
+
- Create GitHub release
|
|
522
|
+
|
|
523
|
+
### Required GitHub Secrets
|
|
524
|
+
|
|
525
|
+
| Secret | Description |
|
|
526
|
+
|--------|-------------|
|
|
527
|
+
| `NPM_TOKEN` | npm access token with publish permissions |
|
|
528
|
+
| `SONAR_TOKEN` | SonarQube token for analysis |
|
|
529
|
+
| `SONAR_HOST_URL` | SonarQube server URL |
|
|
530
|
+
|
|
407
531
|
## License
|
|
408
532
|
|
|
409
533
|
MIT
|
package/dist/index.js
CHANGED
|
@@ -1,20 +1,5 @@
|
|
|
1
1
|
import { createRequire } from "node:module";
|
|
2
|
-
var __create = Object.create;
|
|
3
|
-
var __getProtoOf = Object.getPrototypeOf;
|
|
4
2
|
var __defProp = Object.defineProperty;
|
|
5
|
-
var __getOwnPropNames = Object.getOwnPropertyNames;
|
|
6
|
-
var __hasOwnProp = Object.prototype.hasOwnProperty;
|
|
7
|
-
var __toESM = (mod, isNodeMode, target) => {
|
|
8
|
-
target = mod != null ? __create(__getProtoOf(mod)) : {};
|
|
9
|
-
const to = isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", { value: mod, enumerable: true }) : target;
|
|
10
|
-
for (let key of __getOwnPropNames(mod))
|
|
11
|
-
if (!__hasOwnProp.call(to, key))
|
|
12
|
-
__defProp(to, key, {
|
|
13
|
-
get: () => mod[key],
|
|
14
|
-
enumerable: true
|
|
15
|
-
});
|
|
16
|
-
return to;
|
|
17
|
-
};
|
|
18
3
|
var __export = (target, all) => {
|
|
19
4
|
for (var name in all)
|
|
20
5
|
__defProp(target, name, {
|
|
@@ -4072,7 +4057,12 @@ var init_types2 = __esm(() => {
|
|
|
4072
4057
|
newCodeDefinition: exports_external2.enum(["previous_version", "number_of_days", "reference_branch", "specific_analysis"]).default("previous_version").describe("How to define 'new code' for analysis"),
|
|
4073
4058
|
sources: exports_external2.string().default("src").describe("Source directories to analyze"),
|
|
4074
4059
|
tests: exports_external2.string().optional().describe("Test directories"),
|
|
4075
|
-
exclusions: exports_external2.string().optional().describe("File exclusion patterns")
|
|
4060
|
+
exclusions: exports_external2.string().optional().describe("File exclusion patterns"),
|
|
4061
|
+
analyzeBeforeCommit: exports_external2.boolean().default(true).describe("Run analysis before git commit"),
|
|
4062
|
+
blockCommit: exports_external2.boolean().default(false).describe("Block commit if BLOCKER/CRITICAL issues exist"),
|
|
4063
|
+
blockPush: exports_external2.boolean().default(false).describe("Block push if BLOCKER/CRITICAL issues exist"),
|
|
4064
|
+
blockingSeverity: exports_external2.enum(["BLOCKER", "CRITICAL", "MAJOR"]).default("CRITICAL").describe("Minimum severity that blocks operations"),
|
|
4065
|
+
fixBeforeCommit: exports_external2.boolean().default(false).describe("Attempt auto-fix before commit")
|
|
4076
4066
|
});
|
|
4077
4067
|
ProjectStateSchema = exports_external2.object({
|
|
4078
4068
|
projectKey: exports_external2.string(),
|
|
@@ -4120,7 +4110,6 @@ var init_types2 = __esm(() => {
|
|
|
4120
4110
|
});
|
|
4121
4111
|
|
|
4122
4112
|
// src/utils/state.ts
|
|
4123
|
-
import { appendFileSync as appendFileSync2 } from "node:fs";
|
|
4124
4113
|
function getStatePath(directory) {
|
|
4125
4114
|
return `${directory}/${STATE_DIR}/${STATE_FILE}`;
|
|
4126
4115
|
}
|
|
@@ -4133,27 +4122,14 @@ async function hasProjectState(directory) {
|
|
|
4133
4122
|
}
|
|
4134
4123
|
async function loadProjectState(directory) {
|
|
4135
4124
|
const statePath = getStatePath(directory);
|
|
4136
|
-
const stack = new Error().stack?.split(`
|
|
4137
|
-
`).slice(1, 5).join(" <- ") || "no stack";
|
|
4138
|
-
logger4.info(">>> loadProjectState called", { directory, statePath, caller: stack });
|
|
4139
4125
|
const exists = await Bun.file(statePath).exists();
|
|
4140
|
-
logger4.info("State file exists check", { exists, statePath });
|
|
4141
4126
|
if (!exists) {
|
|
4142
|
-
logger4.info("No project state file found", { directory, statePath });
|
|
4143
4127
|
return null;
|
|
4144
4128
|
}
|
|
4145
4129
|
try {
|
|
4146
4130
|
const content = await Bun.file(statePath).text();
|
|
4147
|
-
logger4.info("State file content loaded", { contentLength: content.length });
|
|
4148
4131
|
const data = JSON.parse(content);
|
|
4149
|
-
logger4.info("State file parsed", { keys: Object.keys(data) });
|
|
4150
4132
|
const state = ProjectStateSchema.parse(data);
|
|
4151
|
-
logger4.info("<<< loadProjectState success", {
|
|
4152
|
-
projectKey: state.projectKey,
|
|
4153
|
-
projectKeyLength: state.projectKey?.length,
|
|
4154
|
-
hasToken: !!state.projectToken,
|
|
4155
|
-
tokenLength: state.projectToken?.length
|
|
4156
|
-
});
|
|
4157
4133
|
return state;
|
|
4158
4134
|
} catch (error45) {
|
|
4159
4135
|
logger4.error("Failed to load project state", {
|
|
@@ -4212,34 +4188,14 @@ ${entry}
|
|
|
4212
4188
|
await Bun.write(gitignorePath, newContent);
|
|
4213
4189
|
logger4.info("Added SonarQube exclusion to .gitignore");
|
|
4214
4190
|
}
|
|
4215
|
-
var
|
|
4191
|
+
var logger4, STATE_DIR = ".sonarqube", STATE_FILE = "project.json";
|
|
4216
4192
|
var init_state = __esm(() => {
|
|
4217
4193
|
init_types2();
|
|
4218
4194
|
logger4 = {
|
|
4219
|
-
info: (
|
|
4220
|
-
|
|
4221
|
-
|
|
4222
|
-
|
|
4223
|
-
} catch {}
|
|
4224
|
-
},
|
|
4225
|
-
warn: (msg, extra) => {
|
|
4226
|
-
try {
|
|
4227
|
-
appendFileSync2(LOG_FILE2, `${new Date().toISOString()} [STATE-WARN] ${msg} ${extra ? JSON.stringify(extra) : ""}
|
|
4228
|
-
`);
|
|
4229
|
-
} catch {}
|
|
4230
|
-
},
|
|
4231
|
-
error: (msg, extra) => {
|
|
4232
|
-
try {
|
|
4233
|
-
appendFileSync2(LOG_FILE2, `${new Date().toISOString()} [STATE-ERROR] ${msg} ${extra ? JSON.stringify(extra) : ""}
|
|
4234
|
-
`);
|
|
4235
|
-
} catch {}
|
|
4236
|
-
},
|
|
4237
|
-
debug: (msg, extra) => {
|
|
4238
|
-
try {
|
|
4239
|
-
appendFileSync2(LOG_FILE2, `${new Date().toISOString()} [STATE-DEBUG] ${msg} ${extra ? JSON.stringify(extra) : ""}
|
|
4240
|
-
`);
|
|
4241
|
-
} catch {}
|
|
4242
|
-
}
|
|
4195
|
+
info: (_msg, _extra) => {},
|
|
4196
|
+
warn: (_msg, _extra) => {},
|
|
4197
|
+
error: (_msg, _extra) => {},
|
|
4198
|
+
debug: (_msg, _extra) => {}
|
|
4243
4199
|
};
|
|
4244
4200
|
});
|
|
4245
4201
|
|
|
@@ -16565,27 +16521,10 @@ function tool(input) {
|
|
|
16565
16521
|
tool.schema = exports_external;
|
|
16566
16522
|
// src/utils/config.ts
|
|
16567
16523
|
init_types2();
|
|
16568
|
-
import { appendFileSync } from "node:fs";
|
|
16569
|
-
var LOG_FILE = "/tmp/sonarqube-plugin-debug.log";
|
|
16570
16524
|
var configLogger = {
|
|
16571
|
-
info: (
|
|
16572
|
-
|
|
16573
|
-
|
|
16574
|
-
`);
|
|
16575
|
-
} catch {}
|
|
16576
|
-
},
|
|
16577
|
-
warn: (msg, extra) => {
|
|
16578
|
-
try {
|
|
16579
|
-
appendFileSync(LOG_FILE, `${new Date().toISOString()} [CONFIG-WARN] ${msg} ${extra ? JSON.stringify(extra) : ""}
|
|
16580
|
-
`);
|
|
16581
|
-
} catch {}
|
|
16582
|
-
},
|
|
16583
|
-
error: (msg, extra) => {
|
|
16584
|
-
try {
|
|
16585
|
-
appendFileSync(LOG_FILE, `${new Date().toISOString()} [CONFIG-ERROR] ${msg} ${extra ? JSON.stringify(extra) : ""}
|
|
16586
|
-
`);
|
|
16587
|
-
} catch {}
|
|
16588
|
-
}
|
|
16525
|
+
info: (_msg, _extra) => {},
|
|
16526
|
+
warn: (_msg, _extra) => {},
|
|
16527
|
+
error: (_msg, _extra) => {}
|
|
16589
16528
|
};
|
|
16590
16529
|
var DEFAULT_CONFIG = {
|
|
16591
16530
|
level: "enterprise",
|
|
@@ -17106,11 +17045,6 @@ class SonarQubeClient {
|
|
|
17106
17045
|
if (requestBody) {
|
|
17107
17046
|
headers["Content-Type"] = "application/x-www-form-urlencoded";
|
|
17108
17047
|
}
|
|
17109
|
-
try {
|
|
17110
|
-
const { appendFileSync: appendFileSync2 } = await import("node:fs");
|
|
17111
|
-
appendFileSync2("/tmp/sonarqube-plugin-debug.log", `${new Date().toISOString()} [API] >>> ${method} ${endpoint} ${JSON.stringify({ url: url2, params, hasBody: !!body, bodyKeys: body ? Object.keys(body) : [] })}
|
|
17112
|
-
`);
|
|
17113
|
-
} catch {}
|
|
17114
17048
|
try {
|
|
17115
17049
|
const response = await fetch(url2, {
|
|
17116
17050
|
method,
|
|
@@ -19126,27 +19060,10 @@ function shouldBlockOnResult(result, level) {
|
|
|
19126
19060
|
// src/bootstrap/index.ts
|
|
19127
19061
|
init_types2();
|
|
19128
19062
|
init_state();
|
|
19129
|
-
import { appendFileSync as appendFileSync3 } from "node:fs";
|
|
19130
|
-
var LOG_FILE3 = "/tmp/sonarqube-plugin-debug.log";
|
|
19131
19063
|
var logger5 = {
|
|
19132
|
-
info: (
|
|
19133
|
-
|
|
19134
|
-
|
|
19135
|
-
`);
|
|
19136
|
-
} catch {}
|
|
19137
|
-
},
|
|
19138
|
-
warn: (msg, extra) => {
|
|
19139
|
-
try {
|
|
19140
|
-
appendFileSync3(LOG_FILE3, `${new Date().toISOString()} [BOOTSTRAP-WARN] ${msg} ${extra ? JSON.stringify(extra) : ""}
|
|
19141
|
-
`);
|
|
19142
|
-
} catch {}
|
|
19143
|
-
},
|
|
19144
|
-
error: (msg, extra) => {
|
|
19145
|
-
try {
|
|
19146
|
-
appendFileSync3(LOG_FILE3, `${new Date().toISOString()} [BOOTSTRAP-ERROR] ${msg} ${extra ? JSON.stringify(extra) : ""}
|
|
19147
|
-
`);
|
|
19148
|
-
} catch {}
|
|
19149
|
-
}
|
|
19064
|
+
info: (_msg, _extra) => {},
|
|
19065
|
+
warn: (_msg, _extra) => {},
|
|
19066
|
+
error: (_msg, _extra) => {}
|
|
19150
19067
|
};
|
|
19151
19068
|
var QUALITY_GATE_MAPPING = {
|
|
19152
19069
|
enterprise: "Sonar way",
|
|
@@ -19213,11 +19130,9 @@ async function bootstrap(options) {
|
|
|
19213
19130
|
const resolved = resolveDirectoryFromImportMeta();
|
|
19214
19131
|
if (resolved) {
|
|
19215
19132
|
directory = resolved;
|
|
19216
|
-
logger5.info("Resolved directory from import.meta.url", { directory });
|
|
19217
19133
|
}
|
|
19218
19134
|
}
|
|
19219
19135
|
if (!isValidDirectory(directory)) {
|
|
19220
|
-
logger5.error("Invalid directory for bootstrap", { directory });
|
|
19221
19136
|
return {
|
|
19222
19137
|
success: false,
|
|
19223
19138
|
projectKey: "",
|
|
@@ -19403,12 +19318,6 @@ function formatActionPrompt(result, config2) {
|
|
|
19403
19318
|
}
|
|
19404
19319
|
function createIdleHook(getConfig, getDirectory) {
|
|
19405
19320
|
return async function handleSessionIdle() {
|
|
19406
|
-
try {
|
|
19407
|
-
const { appendFileSync: appendFileSync4 } = await import("node:fs");
|
|
19408
|
-
const dir = getDirectory();
|
|
19409
|
-
appendFileSync4("/tmp/sonarqube-plugin-debug.log", `${new Date().toISOString()} [IDLE-HOOK] getDirectory()=${dir}
|
|
19410
|
-
`);
|
|
19411
|
-
} catch {}
|
|
19412
19321
|
const rawConfig = getConfig()?.["sonarqube"];
|
|
19413
19322
|
const config2 = loadConfig(rawConfig);
|
|
19414
19323
|
if (!isAnalysisEnabled(config2)) {
|
|
@@ -20093,22 +20002,11 @@ function getSeveritiesFromLevel(level) {
|
|
|
20093
20002
|
}
|
|
20094
20003
|
|
|
20095
20004
|
// src/index.ts
|
|
20096
|
-
import {
|
|
20097
|
-
try {
|
|
20098
|
-
const moduleLoadId = Math.random().toString(36).substring(7);
|
|
20099
|
-
appendFileSync4("/tmp/sonarqube-plugin-debug.log", `${new Date().toISOString()} [LOAD] Module loaded! id=${moduleLoadId} cwd=${process.cwd()} import.meta.url=${import.meta.url}
|
|
20100
|
-
`);
|
|
20101
|
-
} catch {}
|
|
20005
|
+
import { readFileSync, writeFileSync } from "node:fs";
|
|
20102
20006
|
var SHARED_STATE_FILE = "/tmp/sonarqube-plugin-shared-state.json";
|
|
20103
|
-
var globalSafeLog = (msg) => {
|
|
20104
|
-
try {
|
|
20105
|
-
appendFileSync4("/tmp/sonarqube-plugin-debug.log", `${new Date().toISOString()} [GLOBAL] ${msg}
|
|
20106
|
-
`);
|
|
20107
|
-
} catch {}
|
|
20108
|
-
};
|
|
20109
20007
|
var readSharedState = () => {
|
|
20110
20008
|
try {
|
|
20111
|
-
const content =
|
|
20009
|
+
const content = readFileSync(SHARED_STATE_FILE, "utf-8");
|
|
20112
20010
|
return JSON.parse(content);
|
|
20113
20011
|
} catch {
|
|
20114
20012
|
return { sessionToDirectory: {}, registeredDirectories: [], lastUpdated: "" };
|
|
@@ -20117,16 +20015,13 @@ var readSharedState = () => {
|
|
|
20117
20015
|
var writeSharedState = (state) => {
|
|
20118
20016
|
try {
|
|
20119
20017
|
state.lastUpdated = new Date().toISOString();
|
|
20120
|
-
|
|
20121
|
-
} catch
|
|
20122
|
-
globalSafeLog(`Failed to write shared state: ${e}`);
|
|
20123
|
-
}
|
|
20018
|
+
writeFileSync(SHARED_STATE_FILE, JSON.stringify(state, null, 2));
|
|
20019
|
+
} catch {}
|
|
20124
20020
|
};
|
|
20125
20021
|
var mapSessionToDirectory = (sessionId, directory) => {
|
|
20126
20022
|
const state = readSharedState();
|
|
20127
20023
|
state.sessionToDirectory[sessionId] = directory;
|
|
20128
20024
|
writeSharedState(state);
|
|
20129
|
-
globalSafeLog(`Mapped session ${sessionId} to ${directory}`);
|
|
20130
20025
|
};
|
|
20131
20026
|
var getDirectoryForSession = (sessionId) => {
|
|
20132
20027
|
const state = readSharedState();
|
|
@@ -20138,7 +20033,6 @@ var registerDirectory = (directory) => {
|
|
|
20138
20033
|
state.registeredDirectories.push(directory);
|
|
20139
20034
|
writeSharedState(state);
|
|
20140
20035
|
}
|
|
20141
|
-
globalSafeLog(`Registered directory: ${directory}, total: ${state.registeredDirectories.length}`);
|
|
20142
20036
|
};
|
|
20143
20037
|
var IGNORED_FILE_PATTERNS2 = [
|
|
20144
20038
|
/node_modules/,
|
|
@@ -20156,17 +20050,7 @@ function shouldIgnoreFile2(filePath) {
|
|
|
20156
20050
|
return IGNORED_FILE_PATTERNS2.some((pattern) => pattern.test(filePath));
|
|
20157
20051
|
}
|
|
20158
20052
|
var SonarQubePlugin = async ({ client, directory, worktree }) => {
|
|
20159
|
-
const safeLog = (
|
|
20160
|
-
try {
|
|
20161
|
-
appendFileSync4("/tmp/sonarqube-plugin-debug.log", `${new Date().toISOString()} [PLUGIN] ${msg}
|
|
20162
|
-
`);
|
|
20163
|
-
} catch {}
|
|
20164
|
-
};
|
|
20165
|
-
safeLog(`=== PLUGIN START ===`);
|
|
20166
|
-
safeLog(` directory param: "${directory}"`);
|
|
20167
|
-
safeLog(` worktree param: "${worktree}"`);
|
|
20168
|
-
safeLog(` process.cwd(): "${process.cwd()}"`);
|
|
20169
|
-
safeLog(` import.meta.url: "${import.meta.url}"`);
|
|
20053
|
+
const safeLog = (_msg) => {};
|
|
20170
20054
|
const pluginImportUrl = import.meta.url;
|
|
20171
20055
|
const resolveDirectoryFromImportUrl = () => {
|
|
20172
20056
|
try {
|
|
@@ -20184,26 +20068,16 @@ var SonarQubePlugin = async ({ client, directory, worktree }) => {
|
|
|
20184
20068
|
};
|
|
20185
20069
|
const resolveValidDirectory = () => {
|
|
20186
20070
|
const fromImportUrl = resolveDirectoryFromImportUrl();
|
|
20187
|
-
if (fromImportUrl)
|
|
20188
|
-
safeLog(`USING import.meta.url derived path=${fromImportUrl}`);
|
|
20071
|
+
if (fromImportUrl)
|
|
20189
20072
|
return fromImportUrl;
|
|
20190
|
-
|
|
20191
|
-
if (worktree && worktree !== "/" && worktree.length > 1) {
|
|
20192
|
-
safeLog(`USING worktree=${worktree}`);
|
|
20073
|
+
if (worktree && worktree !== "/" && worktree.length > 1)
|
|
20193
20074
|
return worktree;
|
|
20194
|
-
|
|
20195
|
-
if (directory && directory !== "/" && directory.length > 1) {
|
|
20196
|
-
safeLog(`USING directory=${directory}`);
|
|
20075
|
+
if (directory && directory !== "/" && directory.length > 1)
|
|
20197
20076
|
return directory;
|
|
20198
|
-
}
|
|
20199
20077
|
const cwd = process.cwd();
|
|
20200
|
-
if (cwd && cwd !== "/" && cwd.length > 1)
|
|
20201
|
-
safeLog(`USING cwd=${cwd}`);
|
|
20078
|
+
if (cwd && cwd !== "/" && cwd.length > 1)
|
|
20202
20079
|
return cwd;
|
|
20203
|
-
|
|
20204
|
-
const homeDir = process.env["HOME"] || "/Users";
|
|
20205
|
-
safeLog(`FALLBACK home=${homeDir}`);
|
|
20206
|
-
return homeDir;
|
|
20080
|
+
return process.env["HOME"] || "/Users";
|
|
20207
20081
|
};
|
|
20208
20082
|
const effectiveDirectory = resolveValidDirectory();
|
|
20209
20083
|
safeLog(`FINAL effectiveDirectory=${effectiveDirectory}`);
|
|
@@ -20456,44 +20330,120 @@ Issues: ${issues.blocker} blockers, ${issues.critical} critical, ${issues.major}
|
|
|
20456
20330
|
|
|
20457
20331
|
${statusNote}`;
|
|
20458
20332
|
};
|
|
20459
|
-
const
|
|
20333
|
+
const sendAutoFixPrompt = async (analysisResult) => {
|
|
20334
|
+
await client.session.prompt({
|
|
20335
|
+
path: { id: currentSessionId },
|
|
20336
|
+
body: {
|
|
20337
|
+
parts: [{
|
|
20338
|
+
type: "text",
|
|
20339
|
+
text: `## SonarQube: Auto-Fix Required
|
|
20340
|
+
|
|
20341
|
+
Found blocking issues before commit. Attempting automatic fix...
|
|
20342
|
+
|
|
20343
|
+
${formatAnalysisResult(analysisResult)}
|
|
20344
|
+
|
|
20345
|
+
Please fix these issues and then try committing again.`
|
|
20346
|
+
}]
|
|
20347
|
+
}
|
|
20348
|
+
});
|
|
20349
|
+
await showToast("SonarQube: Fixing issues before commit...", "info");
|
|
20350
|
+
};
|
|
20351
|
+
const sendBlockingMessage = async (issues, shouldBlock, autoFixAvailable) => {
|
|
20352
|
+
const statusText = shouldBlock ? "Commit Blocked" : "Pre-Commit Warning";
|
|
20353
|
+
const labelText = shouldBlock ? "BLOCKED" : "WARNING";
|
|
20354
|
+
const actionText = shouldBlock ? "Commit is blocked until these issues are fixed." : "Consider fixing these before committing.";
|
|
20355
|
+
const autoFixHint = autoFixAvailable ? '\nOr run `sonarqube({ action: "analyze", fix: true })` to auto-fix.' : "";
|
|
20356
|
+
const warningMessage = `## SonarQube: ${statusText}
|
|
20357
|
+
|
|
20358
|
+
**${labelText}:** Found ${issues.blocker} BLOCKER and ${issues.critical} CRITICAL issues.
|
|
20359
|
+
|
|
20360
|
+
${actionText}
|
|
20361
|
+
|
|
20362
|
+
Run \`sonarqube({ action: "issues", severity: "critical" })\` to see details.${autoFixHint}`;
|
|
20363
|
+
await client.session.prompt({
|
|
20364
|
+
path: { id: currentSessionId },
|
|
20365
|
+
body: { noReply: !shouldBlock, parts: [{ type: "text", text: warningMessage }] }
|
|
20366
|
+
});
|
|
20367
|
+
const toastMessage = shouldBlock ? "SonarQube: Commit BLOCKED - fix issues first" : "SonarQube: Issues found";
|
|
20368
|
+
await showToast(toastMessage, "error");
|
|
20369
|
+
return warningMessage;
|
|
20370
|
+
};
|
|
20371
|
+
const checkExistingIssuesAndBlock = async (api2, projectKey, operationType, blockingSeverity, shouldBlock) => {
|
|
20372
|
+
const counts = await api2.issues.getCounts(projectKey);
|
|
20373
|
+
const hasBlockingIssues = checkBlockingIssues(counts, blockingSeverity);
|
|
20374
|
+
if (!hasBlockingIssues || !shouldBlock) {
|
|
20375
|
+
return { block: false };
|
|
20376
|
+
}
|
|
20377
|
+
const opName = operationType === "commit" ? "Commit" : "Push";
|
|
20378
|
+
const message = `## SonarQube: ${opName} Blocked
|
|
20379
|
+
|
|
20380
|
+
**BLOCKED:** There are ${counts.blocker} BLOCKER, ${counts.critical} CRITICAL, and ${counts.major} MAJOR issues.
|
|
20381
|
+
|
|
20382
|
+
Fix these issues before ${operationType === "commit" ? "committing" : "pushing"}.`;
|
|
20383
|
+
await client.session.prompt({
|
|
20384
|
+
path: { id: currentSessionId },
|
|
20385
|
+
body: { noReply: true, parts: [{ type: "text", text: message }] }
|
|
20386
|
+
});
|
|
20387
|
+
await showToast(`SonarQube: ${operationType} BLOCKED`, "error");
|
|
20388
|
+
return { block: true, message };
|
|
20389
|
+
};
|
|
20390
|
+
const handleGitOperationCheck = async (output, operationType) => {
|
|
20460
20391
|
const args = output.args;
|
|
20461
20392
|
const command = args?.command ?? "";
|
|
20462
|
-
|
|
20463
|
-
|
|
20464
|
-
|
|
20393
|
+
const isCommit = /git\s+commit\b/.test(command) && !/--amend/.test(command);
|
|
20394
|
+
const isPush = /git\s+push\b/.test(command);
|
|
20395
|
+
const isMatchingOperation = operationType === "commit" && isCommit || operationType === "push" && isPush;
|
|
20396
|
+
if (!isMatchingOperation)
|
|
20397
|
+
return { block: false };
|
|
20465
20398
|
await loadPluginConfig();
|
|
20466
20399
|
const sonarConfig = pluginConfig?.["sonarqube"];
|
|
20467
20400
|
const config2 = loadConfig(sonarConfig);
|
|
20468
|
-
if (config2
|
|
20469
|
-
return;
|
|
20470
|
-
}
|
|
20401
|
+
if (!config2 || config2.level === "off")
|
|
20402
|
+
return { block: false };
|
|
20403
|
+
const { analyzeBeforeCommit = true, blockCommit = false, blockPush = false } = config2;
|
|
20404
|
+
const { fixBeforeCommit = false, blockingSeverity = "CRITICAL", autoFix = false } = config2;
|
|
20405
|
+
const shouldBlock = operationType === "commit" && blockCommit || operationType === "push" && blockPush;
|
|
20471
20406
|
try {
|
|
20472
|
-
const
|
|
20407
|
+
const dir = getDirectory();
|
|
20408
|
+
const state = await getProjectState(dir);
|
|
20473
20409
|
if (!state?.projectKey)
|
|
20474
|
-
return;
|
|
20410
|
+
return { block: false };
|
|
20475
20411
|
const api2 = createSonarQubeAPI(config2, state);
|
|
20476
|
-
|
|
20477
|
-
|
|
20478
|
-
|
|
20479
|
-
|
|
20480
|
-
|
|
20481
|
-
|
|
20482
|
-
|
|
20483
|
-
|
|
20484
|
-
|
|
20485
|
-
|
|
20486
|
-
|
|
20487
|
-
|
|
20488
|
-
|
|
20489
|
-
|
|
20490
|
-
|
|
20491
|
-
|
|
20492
|
-
|
|
20493
|
-
|
|
20494
|
-
|
|
20495
|
-
}
|
|
20496
|
-
} catch {
|
|
20412
|
+
if (operationType === "push" || !analyzeBeforeCommit) {
|
|
20413
|
+
return checkExistingIssuesAndBlock(api2, state.projectKey, operationType, blockingSeverity, shouldBlock);
|
|
20414
|
+
}
|
|
20415
|
+
await showToast("SonarQube: Running pre-commit analysis...", "info");
|
|
20416
|
+
const analysisResult = await runAnalysis(config2, state, { projectKey: state.projectKey }, dir);
|
|
20417
|
+
if (analysisResult.qualityGateStatus === "OK") {
|
|
20418
|
+
await showToast("SonarQube: Quality check passed!", "success");
|
|
20419
|
+
return { block: false };
|
|
20420
|
+
}
|
|
20421
|
+
const hasBlockingIssues = checkBlockingIssues(analysisResult.issues, blockingSeverity);
|
|
20422
|
+
if (!hasBlockingIssues) {
|
|
20423
|
+
await showToast("SonarQube: Quality check passed!", "success");
|
|
20424
|
+
return { block: false };
|
|
20425
|
+
}
|
|
20426
|
+
if (fixBeforeCommit && autoFix) {
|
|
20427
|
+
await sendAutoFixPrompt(analysisResult);
|
|
20428
|
+
return { block: shouldBlock, message: "SonarQube is fixing issues. Please wait and try again." };
|
|
20429
|
+
}
|
|
20430
|
+
const warningMessage = await sendBlockingMessage(analysisResult.issues, shouldBlock, autoFix);
|
|
20431
|
+
return { block: shouldBlock, message: warningMessage };
|
|
20432
|
+
} catch {
|
|
20433
|
+
return { block: false };
|
|
20434
|
+
}
|
|
20435
|
+
};
|
|
20436
|
+
const checkBlockingIssues = (issues, threshold) => {
|
|
20437
|
+
switch (threshold) {
|
|
20438
|
+
case "BLOCKER":
|
|
20439
|
+
return issues.blocker > 0;
|
|
20440
|
+
case "CRITICAL":
|
|
20441
|
+
return issues.blocker > 0 || issues.critical > 0;
|
|
20442
|
+
case "MAJOR":
|
|
20443
|
+
return issues.blocker > 0 || issues.critical > 0 || issues.major > 0;
|
|
20444
|
+
default:
|
|
20445
|
+
return issues.blocker > 0 || issues.critical > 0;
|
|
20446
|
+
}
|
|
20497
20447
|
};
|
|
20498
20448
|
const logSonarQubeResult = async (input, output) => {
|
|
20499
20449
|
if (input.tool !== "sonarqube")
|
|
@@ -20612,7 +20562,20 @@ Git operation completed with changes. Consider running:
|
|
|
20612
20562
|
}
|
|
20613
20563
|
const isBashTool = input.tool === "bash" || input.tool === "mcp_bash";
|
|
20614
20564
|
if (isBashTool && currentSessionId) {
|
|
20615
|
-
|
|
20565
|
+
const args = output.args;
|
|
20566
|
+
const command = args?.command ?? "";
|
|
20567
|
+
if (/git\s+commit\b/.test(command) && !/--amend/.test(command)) {
|
|
20568
|
+
const result = await handleGitOperationCheck(output, "commit");
|
|
20569
|
+
if (result.block) {
|
|
20570
|
+
safeLog(`Commit blocked by quality gate`);
|
|
20571
|
+
}
|
|
20572
|
+
}
|
|
20573
|
+
if (/git\s+push\b/.test(command)) {
|
|
20574
|
+
const result = await handleGitOperationCheck(output, "push");
|
|
20575
|
+
if (result.block) {
|
|
20576
|
+
safeLog(`Push blocked by quality gate`);
|
|
20577
|
+
}
|
|
20578
|
+
}
|
|
20616
20579
|
}
|
|
20617
20580
|
}, "tool.execute.before"),
|
|
20618
20581
|
"tool.execute.after": safeAsync(async (input, output) => {
|
|
@@ -20916,6 +20879,9 @@ if (isDirectCLI) {
|
|
|
20916
20879
|
await executeCLI();
|
|
20917
20880
|
}
|
|
20918
20881
|
export {
|
|
20882
|
+
runCLI,
|
|
20883
|
+
executeCLI,
|
|
20919
20884
|
src_default as default,
|
|
20920
|
-
SonarQubePlugin
|
|
20885
|
+
SonarQubePlugin,
|
|
20886
|
+
CLI_HELP
|
|
20921
20887
|
};
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "opencode-sonarqube",
|
|
3
|
-
"version": "
|
|
3
|
+
"version": "1.2.0",
|
|
4
4
|
"description": "OpenCode Plugin for SonarQube integration - Enterprise-level code quality from the start",
|
|
5
5
|
"type": "module",
|
|
6
6
|
"main": "dist/index.js",
|
|
@@ -38,7 +38,7 @@
|
|
|
38
38
|
"homepage": "https://github.com/mguttmann/opencode-sonarqube#readme",
|
|
39
39
|
"dependencies": {
|
|
40
40
|
"@opencode-ai/plugin": "^1.1.34",
|
|
41
|
-
"opencode-sonarqube": "0.2.
|
|
41
|
+
"opencode-sonarqube": "0.2.10",
|
|
42
42
|
"zod": "^3.24.0"
|
|
43
43
|
},
|
|
44
44
|
"devDependencies": {
|