@nforma.ai/nforma 0.2.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +22 -0
- package/README.md +1024 -0
- package/agents/qgsd-codebase-mapper.md +764 -0
- package/agents/qgsd-debugger.md +1201 -0
- package/agents/qgsd-executor.md +472 -0
- package/agents/qgsd-integration-checker.md +443 -0
- package/agents/qgsd-phase-researcher.md +502 -0
- package/agents/qgsd-plan-checker.md +643 -0
- package/agents/qgsd-planner.md +1182 -0
- package/agents/qgsd-project-researcher.md +621 -0
- package/agents/qgsd-quorum-orchestrator.md +628 -0
- package/agents/qgsd-quorum-slot-worker.md +41 -0
- package/agents/qgsd-quorum-synthesizer.md +133 -0
- package/agents/qgsd-quorum-test-worker.md +37 -0
- package/agents/qgsd-quorum-worker.md +161 -0
- package/agents/qgsd-research-synthesizer.md +239 -0
- package/agents/qgsd-roadmapper.md +660 -0
- package/agents/qgsd-verifier.md +628 -0
- package/bin/accept-debug-invariant.cjs +165 -0
- package/bin/account-manager.cjs +719 -0
- package/bin/aggregate-requirements.cjs +466 -0
- package/bin/analyze-assumptions.cjs +757 -0
- package/bin/analyze-state-space.cjs +921 -0
- package/bin/attribute-trace-divergence.cjs +150 -0
- package/bin/auth-drivers/gh-cli.cjs +93 -0
- package/bin/auth-drivers/index.cjs +46 -0
- package/bin/auth-drivers/pool.cjs +67 -0
- package/bin/auth-drivers/simple.cjs +95 -0
- package/bin/autoClosePtoF.cjs +110 -0
- package/bin/blessed-terminal.cjs +350 -0
- package/bin/build-phase-index.cjs +472 -0
- package/bin/call-quorum-slot.cjs +541 -0
- package/bin/ccr-secure-config.cjs +99 -0
- package/bin/ccr-secure-start.cjs +83 -0
- package/bin/check-bundled-sdks.cjs +177 -0
- package/bin/check-coverage-guard.cjs +112 -0
- package/bin/check-liveness-fairness.cjs +95 -0
- package/bin/check-mcp-health.cjs +123 -0
- package/bin/check-provider-health.cjs +395 -0
- package/bin/check-results-exit.cjs +24 -0
- package/bin/check-spec-sync.cjs +360 -0
- package/bin/check-trace-redaction.cjs +271 -0
- package/bin/check-trace-schema-drift.cjs +99 -0
- package/bin/compareDrift.cjs +21 -0
- package/bin/conformance-schema.cjs +12 -0
- package/bin/count-scenarios.cjs +420 -0
- package/bin/debt-dedup.cjs +144 -0
- package/bin/debt-ledger.cjs +61 -0
- package/bin/debt-retention.cjs +76 -0
- package/bin/debt-state-machine.cjs +80 -0
- package/bin/detect-coverage-gaps.cjs +204 -0
- package/bin/detect-project-intent.cjs +362 -0
- package/bin/export-prism-constants.cjs +164 -0
- package/bin/extract-annotations.cjs +633 -0
- package/bin/extractFormalExpected.cjs +104 -0
- package/bin/fingerprint-drift.cjs +24 -0
- package/bin/fingerprint-issue.cjs +46 -0
- package/bin/formal-core.cjs +519 -0
- package/bin/formal-ref-linker.cjs +141 -0
- package/bin/formal-test-sync.cjs +788 -0
- package/bin/generate-formal-specs.cjs +588 -0
- package/bin/generate-petri-net.cjs +397 -0
- package/bin/generate-phase-spec.cjs +249 -0
- package/bin/generate-proposed-changes.cjs +194 -0
- package/bin/generate-tla-cfg.cjs +122 -0
- package/bin/generate-traceability-matrix.cjs +701 -0
- package/bin/generate-triage-bundle.cjs +300 -0
- package/bin/gh-account-rotate.cjs +34 -0
- package/bin/initialize-model-registry.cjs +105 -0
- package/bin/install-formal-tools.cjs +382 -0
- package/bin/install.js +2424 -0
- package/bin/isNumericThreshold.cjs +34 -0
- package/bin/issue-classifier.cjs +151 -0
- package/bin/levenshtein.cjs +74 -0
- package/bin/lint-formal-models.cjs +580 -0
- package/bin/load-baseline-requirements.cjs +275 -0
- package/bin/manage-agents-core.cjs +815 -0
- package/bin/migrate-formal-dir.cjs +172 -0
- package/bin/migrate-planning.cjs +206 -0
- package/bin/migrate-to-slots.cjs +255 -0
- package/bin/nForma.cjs +2726 -0
- package/bin/observe-config.cjs +353 -0
- package/bin/observe-debt-writer.cjs +140 -0
- package/bin/observe-handler-grafana.cjs +128 -0
- package/bin/observe-handler-internal.cjs +301 -0
- package/bin/observe-handler-logstash.cjs +153 -0
- package/bin/observe-handler-prometheus.cjs +185 -0
- package/bin/observe-handlers.cjs +436 -0
- package/bin/observe-registry.cjs +131 -0
- package/bin/observe-render.cjs +168 -0
- package/bin/planning-paths.cjs +167 -0
- package/bin/polyrepo.cjs +560 -0
- package/bin/prism-priority.cjs +153 -0
- package/bin/probe-quorum-slots.cjs +167 -0
- package/bin/promote-model.cjs +225 -0
- package/bin/propose-debug-invariants.cjs +165 -0
- package/bin/providers.json +392 -0
- package/bin/pty-proxy.py +129 -0
- package/bin/qgsd-solve.cjs +2477 -0
- package/bin/quorum-consensus-gate.cjs +238 -0
- package/bin/quorum-formal-context.cjs +183 -0
- package/bin/quorum-slot-dispatch.cjs +934 -0
- package/bin/read-policy.cjs +60 -0
- package/bin/requirement-map.cjs +63 -0
- package/bin/requirements-core.cjs +247 -0
- package/bin/resolve-cli.cjs +101 -0
- package/bin/review-mcp-logs.cjs +294 -0
- package/bin/run-account-manager-tlc.cjs +188 -0
- package/bin/run-account-pool-alloy.cjs +158 -0
- package/bin/run-alloy.cjs +153 -0
- package/bin/run-audit-alloy.cjs +187 -0
- package/bin/run-breaker-tlc.cjs +181 -0
- package/bin/run-formal-check.cjs +395 -0
- package/bin/run-formal-verify.cjs +701 -0
- package/bin/run-installer-alloy.cjs +188 -0
- package/bin/run-oauth-rotation-prism.cjs +132 -0
- package/bin/run-oscillation-tlc.cjs +202 -0
- package/bin/run-phase-tlc.cjs +228 -0
- package/bin/run-prism.cjs +446 -0
- package/bin/run-protocol-tlc.cjs +201 -0
- package/bin/run-quorum-composition-alloy.cjs +155 -0
- package/bin/run-sensitivity-sweep.cjs +231 -0
- package/bin/run-stop-hook-tlc.cjs +188 -0
- package/bin/run-tlc.cjs +467 -0
- package/bin/run-transcript-alloy.cjs +173 -0
- package/bin/run-uppaal.cjs +264 -0
- package/bin/secrets.cjs +134 -0
- package/bin/sensitivity-report.cjs +219 -0
- package/bin/sensitivity-sweep-feedback.cjs +194 -0
- package/bin/set-secret.cjs +29 -0
- package/bin/setup-telemetry-cron.sh +36 -0
- package/bin/sweepPtoF.cjs +63 -0
- package/bin/sync-baseline-requirements.cjs +290 -0
- package/bin/task-envelope.cjs +360 -0
- package/bin/telemetry-collector.cjs +229 -0
- package/bin/unified-mcp-server.mjs +735 -0
- package/bin/update-agents.cjs +369 -0
- package/bin/update-scoreboard.cjs +1134 -0
- package/bin/validate-debt-entry.cjs +207 -0
- package/bin/validate-invariant.cjs +419 -0
- package/bin/validate-memory.cjs +389 -0
- package/bin/validate-requirements-haiku.cjs +435 -0
- package/bin/validate-traces.cjs +438 -0
- package/bin/verify-formal-results.cjs +124 -0
- package/bin/verify-quorum-health.cjs +273 -0
- package/bin/write-check-result.cjs +106 -0
- package/bin/xstate-to-tla.cjs +483 -0
- package/bin/xstate-trace-walker.cjs +205 -0
- package/commands/qgsd/add-phase.md +43 -0
- package/commands/qgsd/add-requirement.md +24 -0
- package/commands/qgsd/add-todo.md +47 -0
- package/commands/qgsd/audit-milestone.md +37 -0
- package/commands/qgsd/check-todos.md +45 -0
- package/commands/qgsd/cleanup.md +18 -0
- package/commands/qgsd/close-formal-gaps.md +33 -0
- package/commands/qgsd/complete-milestone.md +136 -0
- package/commands/qgsd/debug.md +166 -0
- package/commands/qgsd/discuss-phase.md +83 -0
- package/commands/qgsd/execute-phase.md +117 -0
- package/commands/qgsd/fix-tests.md +27 -0
- package/commands/qgsd/formal-test-sync.md +32 -0
- package/commands/qgsd/health.md +22 -0
- package/commands/qgsd/help.md +22 -0
- package/commands/qgsd/insert-phase.md +32 -0
- package/commands/qgsd/join-discord.md +18 -0
- package/commands/qgsd/list-phase-assumptions.md +46 -0
- package/commands/qgsd/map-codebase.md +71 -0
- package/commands/qgsd/map-requirements.md +20 -0
- package/commands/qgsd/mcp-restart.md +176 -0
- package/commands/qgsd/mcp-set-model.md +134 -0
- package/commands/qgsd/mcp-setup.md +1371 -0
- package/commands/qgsd/mcp-status.md +274 -0
- package/commands/qgsd/mcp-update.md +238 -0
- package/commands/qgsd/new-milestone.md +44 -0
- package/commands/qgsd/new-project.md +42 -0
- package/commands/qgsd/observe.md +260 -0
- package/commands/qgsd/pause-work.md +38 -0
- package/commands/qgsd/plan-milestone-gaps.md +34 -0
- package/commands/qgsd/plan-phase.md +44 -0
- package/commands/qgsd/polyrepo.md +50 -0
- package/commands/qgsd/progress.md +24 -0
- package/commands/qgsd/queue.md +54 -0
- package/commands/qgsd/quick.md +133 -0
- package/commands/qgsd/quorum-test.md +275 -0
- package/commands/qgsd/quorum.md +707 -0
- package/commands/qgsd/reapply-patches.md +110 -0
- package/commands/qgsd/remove-phase.md +31 -0
- package/commands/qgsd/research-phase.md +189 -0
- package/commands/qgsd/resume-work.md +40 -0
- package/commands/qgsd/set-profile.md +34 -0
- package/commands/qgsd/settings.md +39 -0
- package/commands/qgsd/solve.md +565 -0
- package/commands/qgsd/sync-baselines.md +119 -0
- package/commands/qgsd/triage.md +233 -0
- package/commands/qgsd/update.md +37 -0
- package/commands/qgsd/verify-work.md +38 -0
- package/hooks/dist/config-loader.js +297 -0
- package/hooks/dist/conformance-schema.cjs +12 -0
- package/hooks/dist/gsd-context-monitor.js +64 -0
- package/hooks/dist/qgsd-check-update.js +62 -0
- package/hooks/dist/qgsd-circuit-breaker.js +682 -0
- package/hooks/dist/qgsd-precompact.js +156 -0
- package/hooks/dist/qgsd-prompt.js +653 -0
- package/hooks/dist/qgsd-session-start.js +122 -0
- package/hooks/dist/qgsd-slot-correlator.js +58 -0
- package/hooks/dist/qgsd-spec-regen.js +86 -0
- package/hooks/dist/qgsd-statusline.js +91 -0
- package/hooks/dist/qgsd-stop.js +553 -0
- package/hooks/dist/qgsd-token-collector.js +133 -0
- package/hooks/dist/unified-mcp-server.mjs +669 -0
- package/package.json +95 -0
- package/scripts/build-hooks.js +46 -0
- package/scripts/postinstall.js +48 -0
- package/scripts/secret-audit.sh +45 -0
- package/templates/qgsd.json +49 -0
|
@@ -0,0 +1,301 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Internal work detection handler for /qgsd:observe
|
|
3
|
+
* Scans local project state for:
|
|
4
|
+
* 1. Unfinished quick tasks (PLAN.md without SUMMARY.md)
|
|
5
|
+
* 2. Stale debug sessions (quorum-debug-latest.md)
|
|
6
|
+
* 3. TODO/FIXME/HACK/XXX comments in codebase (tracked as debt)
|
|
7
|
+
* 4. Active milestone phases without VERIFICATION.md
|
|
8
|
+
*
|
|
9
|
+
* Returns standard observe schema: { source_label, source_type, status, issues[] }
|
|
10
|
+
*/
|
|
11
|
+
|
|
12
|
+
const fs = require('node:fs');
|
|
13
|
+
const path = require('node:path');
|
|
14
|
+
const { execFileSync } = require('node:child_process');
|
|
15
|
+
|
|
16
|
+
/**
|
|
17
|
+
* Format age from mtime (Date) to human-readable string
|
|
18
|
+
* Named distinctly from observe-handlers.cjs formatAge(isoString) to avoid confusion.
|
|
19
|
+
* @param {Date} mtime - File modification time
|
|
20
|
+
* @returns {string} Human-readable age like "5m", "2h", "3d"
|
|
21
|
+
*/
|
|
22
|
+
function formatAgeFromMtime(mtime) {
|
|
23
|
+
if (!mtime || !(mtime instanceof Date)) return 'unknown';
|
|
24
|
+
const diffMs = Date.now() - mtime.getTime();
|
|
25
|
+
if (diffMs < 0) return 'future';
|
|
26
|
+
const minutes = Math.floor(diffMs / 60000);
|
|
27
|
+
if (minutes < 60) return `${minutes}m`;
|
|
28
|
+
const hours = Math.floor(minutes / 60);
|
|
29
|
+
if (hours < 24) return `${hours}h`;
|
|
30
|
+
const days = Math.floor(hours / 24);
|
|
31
|
+
return `${days}d`;
|
|
32
|
+
}
|
|
33
|
+
|
|
34
|
+
/**
|
|
35
|
+
* Internal work detection handler
|
|
36
|
+
* Scans four categories: unfinished quick tasks, stale debug sessions, TODO comments, active unverified phases
|
|
37
|
+
*
|
|
38
|
+
* @param {object} sourceConfig - { label?, ...other config }
|
|
39
|
+
* @param {object} options - { projectRoot?, limitOverride? }
|
|
40
|
+
* @returns {object} Standard observe schema result
|
|
41
|
+
*/
|
|
42
|
+
function handleInternal(sourceConfig, options) {
|
|
43
|
+
const label = sourceConfig.label || 'Internal Work';
|
|
44
|
+
const projectRoot = options.projectRoot || process.cwd();
|
|
45
|
+
const issues = [];
|
|
46
|
+
|
|
47
|
+
try {
|
|
48
|
+
// Category 1: Unfinished quick tasks
|
|
49
|
+
try {
|
|
50
|
+
const quickDir = path.resolve(projectRoot, '.planning/quick');
|
|
51
|
+
if (fs.existsSync(quickDir)) {
|
|
52
|
+
const entries = fs.readdirSync(quickDir, { withFileTypes: true });
|
|
53
|
+
for (const entry of entries) {
|
|
54
|
+
if (!entry.isDirectory()) continue;
|
|
55
|
+
|
|
56
|
+
// Extract task number from directory name: e.g., "168-add-internal-work-detection"
|
|
57
|
+
const dirName = entry.name;
|
|
58
|
+
const match = dirName.match(/^(\d+)-/);
|
|
59
|
+
if (!match) continue;
|
|
60
|
+
|
|
61
|
+
const taskNum = match[1];
|
|
62
|
+
const slug = dirName.slice(match[0].length);
|
|
63
|
+
|
|
64
|
+
const planPath = path.join(quickDir, dirName, `${taskNum}-PLAN.md`);
|
|
65
|
+
const summaryPath = path.join(quickDir, dirName, `${taskNum}-SUMMARY.md`);
|
|
66
|
+
|
|
67
|
+
// Check: PLAN exists but SUMMARY does not
|
|
68
|
+
if (fs.existsSync(planPath) && !fs.existsSync(summaryPath)) {
|
|
69
|
+
const planStat = fs.statSync(planPath);
|
|
70
|
+
issues.push({
|
|
71
|
+
id: `internal-quick-${taskNum}`,
|
|
72
|
+
title: `Unfinished quick task #${taskNum}: ${slug}`,
|
|
73
|
+
severity: 'warning',
|
|
74
|
+
url: '',
|
|
75
|
+
age: formatAgeFromMtime(planStat.mtime),
|
|
76
|
+
created_at: planStat.mtime.toISOString(),
|
|
77
|
+
meta: 'PLAN exists, no SUMMARY',
|
|
78
|
+
source_type: 'internal',
|
|
79
|
+
issue_type: 'issue',
|
|
80
|
+
_route: `/qgsd:quick "${slug}"`
|
|
81
|
+
});
|
|
82
|
+
}
|
|
83
|
+
}
|
|
84
|
+
}
|
|
85
|
+
} catch (err) {
|
|
86
|
+
// Log warning but continue with other scans (fail-open)
|
|
87
|
+
console.warn(`[observe-internal] Warning scanning quick tasks: ${err.message}`);
|
|
88
|
+
}
|
|
89
|
+
|
|
90
|
+
// Category 2: Stale debug sessions
|
|
91
|
+
try {
|
|
92
|
+
const debugPath = path.resolve(projectRoot, '.planning/quick/quorum-debug-latest.md');
|
|
93
|
+
if (fs.existsSync(debugPath)) {
|
|
94
|
+
const stat = fs.statSync(debugPath);
|
|
95
|
+
|
|
96
|
+
// Check if less than 7 days old
|
|
97
|
+
const diffMs = Date.now() - stat.mtime.getTime();
|
|
98
|
+
const sevenDaysMs = 7 * 24 * 60 * 60 * 1000;
|
|
99
|
+
|
|
100
|
+
if (diffMs < sevenDaysMs) {
|
|
101
|
+
// Read file content to check for "unresolved" or "status: open"
|
|
102
|
+
const content = fs.readFileSync(debugPath, 'utf8');
|
|
103
|
+
const isUnresolved = /unresolved|status:\s*open/i.test(content);
|
|
104
|
+
|
|
105
|
+
if (isUnresolved) {
|
|
106
|
+
issues.push({
|
|
107
|
+
id: 'internal-debug-latest',
|
|
108
|
+
title: 'Unresolved debug session: quorum-debug-latest.md',
|
|
109
|
+
severity: 'info',
|
|
110
|
+
url: '',
|
|
111
|
+
age: formatAgeFromMtime(stat.mtime),
|
|
112
|
+
created_at: stat.mtime.toISOString(),
|
|
113
|
+
meta: 'Debug session may need resolution',
|
|
114
|
+
source_type: 'internal',
|
|
115
|
+
issue_type: 'issue',
|
|
116
|
+
_route: '/qgsd:debug --resume'
|
|
117
|
+
});
|
|
118
|
+
}
|
|
119
|
+
}
|
|
120
|
+
}
|
|
121
|
+
} catch (err) {
|
|
122
|
+
// Log warning but continue with other scans (fail-open)
|
|
123
|
+
console.warn(`[observe-internal] Warning scanning debug sessions: ${err.message}`);
|
|
124
|
+
}
|
|
125
|
+
|
|
126
|
+
// Category 3: TODO/FIXME/HACK/XXX comments in codebase
|
|
127
|
+
try {
|
|
128
|
+
// Fix 5: Validate projectRoot exists before running grep
|
|
129
|
+
if (!fs.existsSync(projectRoot)) {
|
|
130
|
+
console.warn(`[observe-internal] projectRoot does not exist: ${projectRoot}, skipping TODO scan`);
|
|
131
|
+
} else {
|
|
132
|
+
const todoPatterns = [
|
|
133
|
+
{ tag: 'FIXME', severity: 'warning' },
|
|
134
|
+
{ tag: 'HACK', severity: 'warning' },
|
|
135
|
+
{ tag: 'XXX', severity: 'warning' },
|
|
136
|
+
{ tag: 'TODO', severity: 'info' }
|
|
137
|
+
];
|
|
138
|
+
|
|
139
|
+
// Fix 2: Exclude .planning/ at grep level (not post-filter) to ensure
|
|
140
|
+
// limit cap applies to real results, not .planning/ noise
|
|
141
|
+
const excludeDirs = [
|
|
142
|
+
'node_modules', '.git', '.planning',
|
|
143
|
+
'vendor', 'dist', '.next', 'coverage'
|
|
144
|
+
];
|
|
145
|
+
const excludeGlobs = excludeDirs.map(d => `--exclude-dir=${d}`);
|
|
146
|
+
|
|
147
|
+
// File extensions to scan
|
|
148
|
+
const includeExts = ['--include=*.js', '--include=*.cjs', '--include=*.mjs',
|
|
149
|
+
'--include=*.ts', '--include=*.tsx', '--include=*.jsx',
|
|
150
|
+
'--include=*.md', '--include=*.json', '--include=*.py',
|
|
151
|
+
'--include=*.sh', '--include=*.css', '--include=*.html'];
|
|
152
|
+
|
|
153
|
+
// Single grep call for all patterns: TODO|FIXME|HACK|XXX
|
|
154
|
+
// Fix 1: Use -Z (--null) for NUL-separated file:line:content to handle colons in paths
|
|
155
|
+
const pattern = '\\b(TODO|FIXME|HACK|XXX)\\b';
|
|
156
|
+
const grepArgs = ['-rnZ', '-E', pattern, ...includeExts, ...excludeGlobs, projectRoot];
|
|
157
|
+
|
|
158
|
+
let grepOutput = '';
|
|
159
|
+
try {
|
|
160
|
+
grepOutput = execFileSync('grep', grepArgs, {
|
|
161
|
+
encoding: 'utf8',
|
|
162
|
+
maxBuffer: 5 * 1024 * 1024, // 5MB cap
|
|
163
|
+
timeout: 15000 // 15s timeout
|
|
164
|
+
});
|
|
165
|
+
} catch (grepErr) {
|
|
166
|
+
// grep exits 1 when no matches found — that's fine
|
|
167
|
+
if (grepErr.status !== 1) {
|
|
168
|
+
console.warn(`[observe-internal] grep failed: ${grepErr.message}`);
|
|
169
|
+
}
|
|
170
|
+
}
|
|
171
|
+
|
|
172
|
+
if (grepOutput) {
|
|
173
|
+
const lines = grepOutput.split('\n').filter(l => l.trim());
|
|
174
|
+
const limit = options.limitOverride || 50; // Cap to avoid noise
|
|
175
|
+
const todoSeverityMap = Object.fromEntries(todoPatterns.map(p => [p.tag, p.severity]));
|
|
176
|
+
|
|
177
|
+
let count = 0;
|
|
178
|
+
for (const line of lines) {
|
|
179
|
+
if (count >= limit) break;
|
|
180
|
+
|
|
181
|
+
// Fix 1: With -Z flag, grep outputs: filePath\0lineNum:content
|
|
182
|
+
// Split on first NUL byte to get filePath, then split remainder on first colon for lineNum:content
|
|
183
|
+
const nulIdx = line.indexOf('\0');
|
|
184
|
+
if (nulIdx < 0) {
|
|
185
|
+
// Fallback for grep implementations that don't support -Z:
|
|
186
|
+
// use legacy colon-split parsing
|
|
187
|
+
const colonIdx = line.indexOf(':');
|
|
188
|
+
if (colonIdx < 0) continue;
|
|
189
|
+
const afterFile = line.indexOf(':', colonIdx + 1);
|
|
190
|
+
if (afterFile < 0) continue;
|
|
191
|
+
var filePath = line.slice(0, colonIdx);
|
|
192
|
+
var lineNum = line.slice(colonIdx + 1, afterFile);
|
|
193
|
+
var content = line.slice(afterFile + 1).trim();
|
|
194
|
+
} else {
|
|
195
|
+
var filePath = line.slice(0, nulIdx);
|
|
196
|
+
const remainder = line.slice(nulIdx + 1);
|
|
197
|
+
const colonIdx = remainder.indexOf(':');
|
|
198
|
+
if (colonIdx < 0) continue;
|
|
199
|
+
var lineNum = remainder.slice(0, colonIdx);
|
|
200
|
+
var content = remainder.slice(colonIdx + 1).trim();
|
|
201
|
+
}
|
|
202
|
+
|
|
203
|
+
// Determine which tag matched
|
|
204
|
+
const tagMatch = content.match(/\b(TODO|FIXME|HACK|XXX)\b/);
|
|
205
|
+
const tag = tagMatch ? tagMatch[1] : 'TODO';
|
|
206
|
+
const severity = todoSeverityMap[tag] || 'info';
|
|
207
|
+
|
|
208
|
+
// Make path relative to project root for readability
|
|
209
|
+
const relPath = path.relative(projectRoot, filePath);
|
|
210
|
+
|
|
211
|
+
// Fix 3: Enrich TODO issues with fingerprint fields for debt writer
|
|
212
|
+
// fingerprintIssue expects: { exception_type, function_name, message }
|
|
213
|
+
issues.push({
|
|
214
|
+
id: `internal-todo-${relPath}:${lineNum}`,
|
|
215
|
+
title: `${tag} in ${relPath}:${lineNum}`,
|
|
216
|
+
severity,
|
|
217
|
+
url: '',
|
|
218
|
+
age: '',
|
|
219
|
+
created_at: new Date().toISOString(),
|
|
220
|
+
meta: content.slice(0, 120),
|
|
221
|
+
source_type: 'internal',
|
|
222
|
+
issue_type: 'issue',
|
|
223
|
+
exception_type: tag,
|
|
224
|
+
function_name: relPath,
|
|
225
|
+
_route: `/qgsd:quick "Resolve ${tag} at ${relPath}:${lineNum}"`
|
|
226
|
+
});
|
|
227
|
+
count++;
|
|
228
|
+
}
|
|
229
|
+
}
|
|
230
|
+
}
|
|
231
|
+
} catch (err) {
|
|
232
|
+
console.warn(`[observe-internal] Warning scanning TODOs: ${err.message}`);
|
|
233
|
+
}
|
|
234
|
+
|
|
235
|
+
// Category 4: Active milestone phases without VERIFICATION.md
|
|
236
|
+
try {
|
|
237
|
+
const stateFilePath = path.resolve(projectRoot, '.planning/STATE.md');
|
|
238
|
+
if (fs.existsSync(stateFilePath)) {
|
|
239
|
+
const stateContent = fs.readFileSync(stateFilePath, 'utf8');
|
|
240
|
+
|
|
241
|
+
// Extract Phase: value from STATE.md
|
|
242
|
+
const phaseMatch = stateContent.match(/^Phase:\s+(.+?)$/m);
|
|
243
|
+
if (phaseMatch) {
|
|
244
|
+
let phase = phaseMatch[1].trim();
|
|
245
|
+
|
|
246
|
+
// Skip if phase is empty or placeholder
|
|
247
|
+
if (phase && phase !== '-' && phase !== '---') {
|
|
248
|
+
// Sanitize phase string to prevent path traversal
|
|
249
|
+
phase = phase.replace(/[^a-z0-9-]/g, '');
|
|
250
|
+
|
|
251
|
+
if (phase) {
|
|
252
|
+
const phaseDir = path.join(projectRoot, '.planning/phases', phase);
|
|
253
|
+
|
|
254
|
+
// Check if phase directory exists
|
|
255
|
+
if (fs.existsSync(phaseDir) && fs.statSync(phaseDir).isDirectory()) {
|
|
256
|
+
// Check if any VERIFICATION.md file exists
|
|
257
|
+
const entries = fs.readdirSync(phaseDir);
|
|
258
|
+
const hasVerification = entries.some(f => f.endsWith('-VERIFICATION.md'));
|
|
259
|
+
|
|
260
|
+
if (!hasVerification) {
|
|
261
|
+
issues.push({
|
|
262
|
+
id: `internal-milestone-${phase}`,
|
|
263
|
+
title: `Active phase ${phase} has no verification`,
|
|
264
|
+
severity: 'warning',
|
|
265
|
+
url: '',
|
|
266
|
+
age: '',
|
|
267
|
+
created_at: new Date().toISOString(),
|
|
268
|
+
meta: 'Phase active in STATE.md but no VERIFICATION.md found',
|
|
269
|
+
source_type: 'internal',
|
|
270
|
+
issue_type: 'issue',
|
|
271
|
+
_route: '/qgsd:solve'
|
|
272
|
+
});
|
|
273
|
+
}
|
|
274
|
+
}
|
|
275
|
+
}
|
|
276
|
+
}
|
|
277
|
+
}
|
|
278
|
+
}
|
|
279
|
+
} catch (err) {
|
|
280
|
+
// Log warning but continue with other scans (fail-open)
|
|
281
|
+
console.warn(`[observe-internal] Warning scanning milestone phases: ${err.message}`);
|
|
282
|
+
}
|
|
283
|
+
|
|
284
|
+
return {
|
|
285
|
+
source_label: label,
|
|
286
|
+
source_type: 'internal',
|
|
287
|
+
status: 'ok',
|
|
288
|
+
issues
|
|
289
|
+
};
|
|
290
|
+
} catch (err) {
|
|
291
|
+
return {
|
|
292
|
+
source_label: label,
|
|
293
|
+
source_type: 'internal',
|
|
294
|
+
status: 'error',
|
|
295
|
+
error: `Internal work detection failed: ${err.message}`,
|
|
296
|
+
issues: []
|
|
297
|
+
};
|
|
298
|
+
}
|
|
299
|
+
}
|
|
300
|
+
|
|
301
|
+
module.exports = { handleInternal, formatAgeFromMtime };
|
|
@@ -0,0 +1,153 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Logstash/Elasticsearch source handler for /qgsd:observe
|
|
3
|
+
* Queries Elasticsearch indices for log entries matching severity filters
|
|
4
|
+
* Returns standard issue schema for the observe registry
|
|
5
|
+
*/
|
|
6
|
+
|
|
7
|
+
/**
|
|
8
|
+
* Format age from ISO date to human-readable string
|
|
9
|
+
* @param {string} isoDate - ISO8601 date string
|
|
10
|
+
* @returns {string} Human-readable age
|
|
11
|
+
*/
|
|
12
|
+
function formatAge(isoDate) {
|
|
13
|
+
if (!isoDate) return 'unknown';
|
|
14
|
+
const diffMs = Date.now() - new Date(isoDate).getTime();
|
|
15
|
+
if (diffMs < 0) return 'future';
|
|
16
|
+
const minutes = Math.floor(diffMs / 60000);
|
|
17
|
+
if (minutes < 60) return `${minutes}m`;
|
|
18
|
+
const hours = Math.floor(minutes / 60);
|
|
19
|
+
if (hours < 24) return `${hours}h`;
|
|
20
|
+
const days = Math.floor(hours / 24);
|
|
21
|
+
return `${days}d`;
|
|
22
|
+
}
|
|
23
|
+
|
|
24
|
+
/**
|
|
25
|
+
* Normalize log level to standard severity
|
|
26
|
+
* @param {string} level - Log level from Elasticsearch hit
|
|
27
|
+
* @returns {string} Normalized severity
|
|
28
|
+
*/
|
|
29
|
+
function normalizeSeverity(level) {
|
|
30
|
+
if (!level) return 'info';
|
|
31
|
+
const lower = String(level).toLowerCase();
|
|
32
|
+
const mapping = {
|
|
33
|
+
error: 'error',
|
|
34
|
+
fatal: 'error',
|
|
35
|
+
critical: 'error',
|
|
36
|
+
warn: 'warning',
|
|
37
|
+
warning: 'warning',
|
|
38
|
+
info: 'info',
|
|
39
|
+
debug: 'info',
|
|
40
|
+
trace: 'info'
|
|
41
|
+
};
|
|
42
|
+
return mapping[lower] || 'info';
|
|
43
|
+
}
|
|
44
|
+
|
|
45
|
+
/**
|
|
46
|
+
* Logstash/Elasticsearch source handler
|
|
47
|
+
* Queries Elasticsearch indices for log entries and maps hits to standard schema
|
|
48
|
+
*
|
|
49
|
+
* @param {object} sourceConfig - { type, label, endpoint, index?, auth_env?, auth_type?, filter?, issue_type? }
|
|
50
|
+
* @param {object} options - { fetchFn? }
|
|
51
|
+
* @returns {Promise<object>} Standard schema result
|
|
52
|
+
*/
|
|
53
|
+
async function handleLogstash(sourceConfig, options) {
|
|
54
|
+
const label = sourceConfig.label || 'Logstash';
|
|
55
|
+
const endpoint = (sourceConfig.endpoint || '').replace(/\/$/, '');
|
|
56
|
+
const index = sourceConfig.index || 'logstash-*';
|
|
57
|
+
const fetchFn = (options && options.fetchFn) || globalThis.fetch;
|
|
58
|
+
const filter = sourceConfig.filter || {};
|
|
59
|
+
|
|
60
|
+
try {
|
|
61
|
+
// Build auth headers
|
|
62
|
+
const headers = { 'Content-Type': 'application/json' };
|
|
63
|
+
if (sourceConfig.auth_env) {
|
|
64
|
+
const token = process.env[sourceConfig.auth_env];
|
|
65
|
+
if (token) {
|
|
66
|
+
const authType = sourceConfig.auth_type || 'ApiKey';
|
|
67
|
+
headers['Authorization'] = `${authType} ${token}`;
|
|
68
|
+
}
|
|
69
|
+
}
|
|
70
|
+
|
|
71
|
+
// Build Elasticsearch DSL query
|
|
72
|
+
const since = filter.since || '1h';
|
|
73
|
+
const levels = filter.levels || ['error', 'warn'];
|
|
74
|
+
const limit = filter.limit || 50;
|
|
75
|
+
|
|
76
|
+
const queryBody = {
|
|
77
|
+
query: {
|
|
78
|
+
bool: {
|
|
79
|
+
filter: [
|
|
80
|
+
{ range: { '@timestamp': { gte: `now-${since}` } } },
|
|
81
|
+
{ terms: { level: levels } }
|
|
82
|
+
]
|
|
83
|
+
}
|
|
84
|
+
},
|
|
85
|
+
size: limit,
|
|
86
|
+
sort: [{ '@timestamp': 'desc' }]
|
|
87
|
+
};
|
|
88
|
+
|
|
89
|
+
const url = `${endpoint}/${index}/_search`;
|
|
90
|
+
const response = await fetchFn(url, {
|
|
91
|
+
method: 'POST',
|
|
92
|
+
headers,
|
|
93
|
+
body: JSON.stringify(queryBody)
|
|
94
|
+
});
|
|
95
|
+
|
|
96
|
+
if (!response.ok) {
|
|
97
|
+
return {
|
|
98
|
+
source_label: label,
|
|
99
|
+
source_type: 'logstash',
|
|
100
|
+
status: 'error',
|
|
101
|
+
error: `HTTP ${response.status} from Elasticsearch`,
|
|
102
|
+
issues: []
|
|
103
|
+
};
|
|
104
|
+
}
|
|
105
|
+
|
|
106
|
+
const data = await response.json();
|
|
107
|
+
const hits = (data && data.hits && Array.isArray(data.hits.hits)) ? data.hits.hits : [];
|
|
108
|
+
|
|
109
|
+
const issues = hits.map((hit, idx) => {
|
|
110
|
+
const source = hit._source || {};
|
|
111
|
+
const message = source.message || 'No message';
|
|
112
|
+
const title = message.length > 120 ? message.slice(0, 120) + '...' : message;
|
|
113
|
+
const level = source.level || '';
|
|
114
|
+
const timestamp = source['@timestamp'] || new Date().toISOString();
|
|
115
|
+
|
|
116
|
+
// Build compact meta from extra fields (exclude message and @timestamp)
|
|
117
|
+
const extraFields = Object.entries(source)
|
|
118
|
+
.filter(([k]) => k !== 'message' && k !== '@timestamp' && k !== 'level')
|
|
119
|
+
.map(([k, v]) => `${k}=${v}`)
|
|
120
|
+
.join(', ');
|
|
121
|
+
const meta = extraFields.length > 200 ? extraFields.slice(0, 200) + '...' : extraFields;
|
|
122
|
+
|
|
123
|
+
return {
|
|
124
|
+
id: `es-${hit._id || idx}`,
|
|
125
|
+
title,
|
|
126
|
+
severity: normalizeSeverity(level),
|
|
127
|
+
url: '',
|
|
128
|
+
age: formatAge(timestamp),
|
|
129
|
+
created_at: timestamp,
|
|
130
|
+
meta,
|
|
131
|
+
source_type: 'logstash',
|
|
132
|
+
issue_type: sourceConfig.issue_type || 'issue'
|
|
133
|
+
};
|
|
134
|
+
});
|
|
135
|
+
|
|
136
|
+
return {
|
|
137
|
+
source_label: label,
|
|
138
|
+
source_type: 'logstash',
|
|
139
|
+
status: 'ok',
|
|
140
|
+
issues
|
|
141
|
+
};
|
|
142
|
+
} catch (err) {
|
|
143
|
+
return {
|
|
144
|
+
source_label: label,
|
|
145
|
+
source_type: 'logstash',
|
|
146
|
+
status: 'error',
|
|
147
|
+
error: `Elasticsearch fetch failed: ${err.message}`,
|
|
148
|
+
issues: []
|
|
149
|
+
};
|
|
150
|
+
}
|
|
151
|
+
}
|
|
152
|
+
|
|
153
|
+
module.exports = { handleLogstash };
|
|
@@ -0,0 +1,185 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Prometheus source handler for /qgsd:observe
|
|
3
|
+
* Supports: /api/v1/alerts (active alerts) and /api/v1/query (PromQL)
|
|
4
|
+
* Returns standard issue schema for the observe registry
|
|
5
|
+
*/
|
|
6
|
+
|
|
7
|
+
/**
|
|
8
|
+
* Format age from ISO date to human-readable string
|
|
9
|
+
* @param {string} isoDate - ISO8601 date string
|
|
10
|
+
* @returns {string} Human-readable age
|
|
11
|
+
*/
|
|
12
|
+
function formatAge(isoDate) {
|
|
13
|
+
if (!isoDate) return 'unknown';
|
|
14
|
+
const diffMs = Date.now() - new Date(isoDate).getTime();
|
|
15
|
+
if (diffMs < 0) return 'future';
|
|
16
|
+
const minutes = Math.floor(diffMs / 60000);
|
|
17
|
+
if (minutes < 60) return `${minutes}m`;
|
|
18
|
+
const hours = Math.floor(minutes / 60);
|
|
19
|
+
if (hours < 24) return `${hours}h`;
|
|
20
|
+
const days = Math.floor(hours / 24);
|
|
21
|
+
return `${days}d`;
|
|
22
|
+
}
|
|
23
|
+
|
|
24
|
+
/**
|
|
25
|
+
* Prometheus source handler
|
|
26
|
+
* Fetches alerts or runs PromQL query, maps results to standard issue schema
|
|
27
|
+
*
|
|
28
|
+
* @param {object} sourceConfig - { type, label, endpoint, auth_env?, query?, threshold?, issue_type? }
|
|
29
|
+
* @param {object} options - { fetchFn? }
|
|
30
|
+
* @returns {Promise<object>} Standard schema result
|
|
31
|
+
*/
|
|
32
|
+
async function handlePrometheus(sourceConfig, options) {
|
|
33
|
+
const label = sourceConfig.label || 'Prometheus';
|
|
34
|
+
const endpoint = (sourceConfig.endpoint || '').replace(/\/$/, '');
|
|
35
|
+
const fetchFn = (options && options.fetchFn) || globalThis.fetch;
|
|
36
|
+
|
|
37
|
+
try {
|
|
38
|
+
// Build auth headers
|
|
39
|
+
const headers = {};
|
|
40
|
+
if (sourceConfig.auth_env) {
|
|
41
|
+
const token = process.env[sourceConfig.auth_env];
|
|
42
|
+
if (token) {
|
|
43
|
+
headers['Authorization'] = `Bearer ${token}`;
|
|
44
|
+
}
|
|
45
|
+
}
|
|
46
|
+
|
|
47
|
+
// Choose mode: query field → PromQL, else → alerts
|
|
48
|
+
const hasQuery = sourceConfig.query && sourceConfig.query.trim().length > 0;
|
|
49
|
+
const url = hasQuery
|
|
50
|
+
? `${endpoint}/api/v1/query?query=${encodeURIComponent(sourceConfig.query)}`
|
|
51
|
+
: `${endpoint}/api/v1/alerts`;
|
|
52
|
+
|
|
53
|
+
const response = await fetchFn(url, { headers });
|
|
54
|
+
|
|
55
|
+
if (!response.ok) {
|
|
56
|
+
return {
|
|
57
|
+
source_label: label,
|
|
58
|
+
source_type: 'prometheus',
|
|
59
|
+
status: 'error',
|
|
60
|
+
error: `HTTP ${response.status} from Prometheus`,
|
|
61
|
+
issues: []
|
|
62
|
+
};
|
|
63
|
+
}
|
|
64
|
+
|
|
65
|
+
const data = await response.json();
|
|
66
|
+
|
|
67
|
+
if (hasQuery) {
|
|
68
|
+
return mapQueryResult(data, sourceConfig, label);
|
|
69
|
+
} else {
|
|
70
|
+
return mapAlertsResult(data, sourceConfig, label);
|
|
71
|
+
}
|
|
72
|
+
} catch (err) {
|
|
73
|
+
return {
|
|
74
|
+
source_label: label,
|
|
75
|
+
source_type: 'prometheus',
|
|
76
|
+
status: 'error',
|
|
77
|
+
error: `Prometheus fetch failed: ${err.message}`,
|
|
78
|
+
issues: []
|
|
79
|
+
};
|
|
80
|
+
}
|
|
81
|
+
}
|
|
82
|
+
|
|
83
|
+
/**
|
|
84
|
+
* Map /api/v1/alerts response to standard schema
|
|
85
|
+
*/
|
|
86
|
+
function mapAlertsResult(data, sourceConfig, label) {
|
|
87
|
+
const alerts = (data && data.data && Array.isArray(data.data.alerts)) ? data.data.alerts : [];
|
|
88
|
+
|
|
89
|
+
const issues = alerts.map((alert, idx) => {
|
|
90
|
+
const labels = alert.labels || {};
|
|
91
|
+
const annotations = alert.annotations || {};
|
|
92
|
+
const alertname = labels.alertname || `alert-${idx}`;
|
|
93
|
+
const severity = labels.severity || 'warning';
|
|
94
|
+
const state = alert.state || 'unknown';
|
|
95
|
+
const activeAt = alert.activeAt || new Date().toISOString();
|
|
96
|
+
|
|
97
|
+
// Build meta: state + key labels
|
|
98
|
+
const metaParts = [`state: ${state}`];
|
|
99
|
+
if (annotations.summary) metaParts.push(annotations.summary);
|
|
100
|
+
const labelKeys = Object.keys(labels).filter(k => k !== 'alertname' && k !== 'severity');
|
|
101
|
+
if (labelKeys.length > 0) {
|
|
102
|
+
metaParts.push(labelKeys.map(k => `${k}=${labels[k]}`).join(', '));
|
|
103
|
+
}
|
|
104
|
+
|
|
105
|
+
return {
|
|
106
|
+
id: `prom-alert-${idx}`,
|
|
107
|
+
title: alertname,
|
|
108
|
+
severity,
|
|
109
|
+
url: alert.generatorURL || `${sourceConfig.endpoint || ''}/alerts`,
|
|
110
|
+
age: formatAge(activeAt),
|
|
111
|
+
created_at: activeAt,
|
|
112
|
+
meta: metaParts.join(' | '),
|
|
113
|
+
source_type: 'prometheus',
|
|
114
|
+
issue_type: sourceConfig.issue_type || 'drift'
|
|
115
|
+
};
|
|
116
|
+
});
|
|
117
|
+
|
|
118
|
+
return {
|
|
119
|
+
source_label: label,
|
|
120
|
+
source_type: 'prometheus',
|
|
121
|
+
status: 'ok',
|
|
122
|
+
issues
|
|
123
|
+
};
|
|
124
|
+
}
|
|
125
|
+
|
|
126
|
+
/**
|
|
127
|
+
* Map /api/v1/query response to standard schema
|
|
128
|
+
*/
|
|
129
|
+
function mapQueryResult(data, sourceConfig, label) {
|
|
130
|
+
const resultType = (data && data.data && data.data.resultType) || 'vector';
|
|
131
|
+
const result = (data && data.data && data.data.result) || [];
|
|
132
|
+
|
|
133
|
+
let issues = [];
|
|
134
|
+
|
|
135
|
+
if (resultType === 'scalar') {
|
|
136
|
+
// Scalar: result is [timestamp, "value"]
|
|
137
|
+
const value = Array.isArray(result) ? result[1] : String(result);
|
|
138
|
+
issues = [{
|
|
139
|
+
id: 'prom-query-scalar',
|
|
140
|
+
title: `Query result: ${sourceConfig.query || 'scalar'}`,
|
|
141
|
+
severity: 'info',
|
|
142
|
+
url: sourceConfig.endpoint || '',
|
|
143
|
+
age: '',
|
|
144
|
+
created_at: new Date().toISOString(),
|
|
145
|
+
meta: `value: ${value}`,
|
|
146
|
+
source_type: 'prometheus',
|
|
147
|
+
issue_type: sourceConfig.issue_type || 'drift'
|
|
148
|
+
}];
|
|
149
|
+
} else {
|
|
150
|
+
// Vector: result is array of { metric: {...}, value: [ts, "val"] }
|
|
151
|
+
issues = (Array.isArray(result) ? result : []).map((item, idx) => {
|
|
152
|
+
const metric = item.metric || {};
|
|
153
|
+
const metricName = metric.__name__ || 'unknown_metric';
|
|
154
|
+
const value = Array.isArray(item.value) ? item.value[1] : '0';
|
|
155
|
+
|
|
156
|
+
// Build title from metric name + labels
|
|
157
|
+
const labelParts = Object.entries(metric)
|
|
158
|
+
.filter(([k]) => k !== '__name__')
|
|
159
|
+
.map(([k, v]) => `${k}="${v}"`)
|
|
160
|
+
.join(', ');
|
|
161
|
+
const title = labelParts ? `${metricName}{${labelParts}}` : metricName;
|
|
162
|
+
|
|
163
|
+
return {
|
|
164
|
+
id: `prom-query-${idx}`,
|
|
165
|
+
title,
|
|
166
|
+
severity: 'info',
|
|
167
|
+
url: sourceConfig.endpoint || '',
|
|
168
|
+
age: '',
|
|
169
|
+
created_at: new Date().toISOString(),
|
|
170
|
+
meta: `value: ${value}`,
|
|
171
|
+
source_type: 'prometheus',
|
|
172
|
+
issue_type: sourceConfig.issue_type || 'drift'
|
|
173
|
+
};
|
|
174
|
+
});
|
|
175
|
+
}
|
|
176
|
+
|
|
177
|
+
return {
|
|
178
|
+
source_label: label,
|
|
179
|
+
source_type: 'prometheus',
|
|
180
|
+
status: 'ok',
|
|
181
|
+
issues
|
|
182
|
+
};
|
|
183
|
+
}
|
|
184
|
+
|
|
185
|
+
module.exports = { handlePrometheus };
|