@nforma.ai/nforma 0.2.1 → 0.29.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +2 -2
- package/agents/{qgsd-codebase-mapper.md → nf-codebase-mapper.md} +1 -1
- package/agents/{qgsd-debugger.md → nf-debugger.md} +3 -3
- package/agents/{qgsd-executor.md → nf-executor.md} +14 -14
- package/agents/{qgsd-integration-checker.md → nf-integration-checker.md} +1 -1
- package/agents/{qgsd-phase-researcher.md → nf-phase-researcher.md} +6 -6
- package/agents/{qgsd-plan-checker.md → nf-plan-checker.md} +9 -9
- package/agents/{qgsd-planner.md → nf-planner.md} +9 -9
- package/agents/{qgsd-project-researcher.md → nf-project-researcher.md} +2 -2
- package/agents/{qgsd-quorum-orchestrator.md → nf-quorum-orchestrator.md} +33 -33
- package/agents/{qgsd-quorum-slot-worker.md → nf-quorum-slot-worker.md} +3 -3
- package/agents/{qgsd-quorum-synthesizer.md → nf-quorum-synthesizer.md} +3 -3
- package/agents/{qgsd-quorum-test-worker.md → nf-quorum-test-worker.md} +1 -1
- package/agents/{qgsd-quorum-worker.md → nf-quorum-worker.md} +6 -6
- package/agents/{qgsd-research-synthesizer.md → nf-research-synthesizer.md} +5 -5
- package/agents/{qgsd-roadmapper.md → nf-roadmapper.md} +3 -3
- package/agents/{qgsd-verifier.md → nf-verifier.md} +8 -8
- package/bin/accept-debug-invariant.cjs +2 -2
- package/bin/account-manager.cjs +10 -10
- package/bin/aggregate-requirements.cjs +1 -1
- package/bin/analyze-assumptions.cjs +3 -3
- package/bin/analyze-state-space.cjs +14 -14
- package/bin/assumption-register.cjs +146 -0
- package/bin/attribute-trace-divergence.cjs +1 -1
- package/bin/auth-drivers/gh-cli.cjs +1 -1
- package/bin/auth-drivers/pool.cjs +1 -1
- package/bin/autoClosePtoF.cjs +3 -3
- package/bin/budget-tracker.cjs +77 -0
- package/bin/build-layer-manifest.cjs +153 -0
- package/bin/call-quorum-slot.cjs +3 -3
- package/bin/ccr-secure-config.cjs +5 -5
- package/bin/check-bundled-sdks.cjs +1 -1
- package/bin/check-mcp-health.cjs +1 -1
- package/bin/check-provider-health.cjs +6 -6
- package/bin/check-spec-sync.cjs +26 -26
- package/bin/check-trace-schema-drift.cjs +5 -5
- package/bin/conformance-schema.cjs +2 -2
- package/bin/cross-layer-dashboard.cjs +297 -0
- package/bin/design-impact.cjs +377 -0
- package/bin/detect-coverage-gaps.cjs +7 -7
- package/bin/failure-mode-catalog.cjs +227 -0
- package/bin/failure-taxonomy.cjs +177 -0
- package/bin/formal-scope-scan.cjs +179 -0
- package/bin/gate-a-grounding.cjs +334 -0
- package/bin/gate-b-abstraction.cjs +243 -0
- package/bin/gate-c-validation.cjs +166 -0
- package/bin/generate-formal-specs.cjs +17 -17
- package/bin/generate-petri-net.cjs +3 -3
- package/bin/generate-tla-cfg.cjs +5 -5
- package/bin/git-heatmap.cjs +571 -0
- package/bin/harness-diagnostic.cjs +326 -0
- package/bin/hazard-model.cjs +261 -0
- package/bin/install-formal-tools.cjs +1 -1
- package/bin/install.js +184 -139
- package/bin/instrumentation-map.cjs +178 -0
- package/bin/invariant-catalog.cjs +437 -0
- package/bin/issue-classifier.cjs +2 -2
- package/bin/load-baseline-requirements.cjs +4 -4
- package/bin/manage-agents-core.cjs +32 -32
- package/bin/migrate-to-slots.cjs +39 -39
- package/bin/mismatch-register.cjs +217 -0
- package/bin/nForma.cjs +176 -81
- package/bin/{qgsd-solve.cjs → nf-solve.cjs} +327 -14
- package/bin/observe-config.cjs +8 -0
- package/bin/observe-debt-writer.cjs +1 -1
- package/bin/observe-handler-deps.cjs +356 -0
- package/bin/observe-handler-grafana.cjs +2 -17
- package/bin/observe-handler-internal.cjs +5 -5
- package/bin/observe-handler-logstash.cjs +2 -17
- package/bin/observe-handler-prometheus.cjs +2 -17
- package/bin/observe-handler-upstream.cjs +251 -0
- package/bin/observe-handlers.cjs +12 -33
- package/bin/observe-render.cjs +68 -22
- package/bin/observe-utils.cjs +37 -0
- package/bin/observed-fsm.cjs +324 -0
- package/bin/planning-paths.cjs +6 -0
- package/bin/polyrepo.cjs +1 -1
- package/bin/probe-quorum-slots.cjs +1 -1
- package/bin/promote-gate-maturity.cjs +274 -0
- package/bin/promote-model.cjs +1 -1
- package/bin/propose-debug-invariants.cjs +1 -1
- package/bin/quorum-cache.cjs +144 -0
- package/bin/quorum-consensus-gate.cjs +1 -1
- package/bin/quorum-preflight.cjs +89 -0
- package/bin/quorum-slot-dispatch.cjs +6 -6
- package/bin/requirements-core.cjs +1 -1
- package/bin/review-mcp-logs.cjs +1 -1
- package/bin/risk-heatmap.cjs +151 -0
- package/bin/run-account-manager-tlc.cjs +4 -4
- package/bin/run-account-pool-alloy.cjs +2 -2
- package/bin/run-alloy.cjs +2 -2
- package/bin/run-audit-alloy.cjs +2 -2
- package/bin/run-breaker-tlc.cjs +3 -3
- package/bin/run-formal-check.cjs +9 -9
- package/bin/run-formal-verify.cjs +30 -9
- package/bin/run-installer-alloy.cjs +2 -2
- package/bin/run-oscillation-tlc.cjs +4 -4
- package/bin/run-phase-tlc.cjs +1 -1
- package/bin/run-protocol-tlc.cjs +4 -4
- package/bin/run-quorum-composition-alloy.cjs +2 -2
- package/bin/run-sensitivity-sweep.cjs +2 -2
- package/bin/run-stop-hook-tlc.cjs +3 -3
- package/bin/run-tlc.cjs +21 -21
- package/bin/run-transcript-alloy.cjs +2 -2
- package/bin/secrets.cjs +5 -5
- package/bin/security-sweep.cjs +238 -0
- package/bin/sensitivity-report.cjs +3 -3
- package/bin/set-secret.cjs +5 -5
- package/bin/setup-telemetry-cron.sh +3 -3
- package/bin/stall-detector.cjs +126 -0
- package/bin/state-candidates.cjs +206 -0
- package/bin/sync-baseline-requirements.cjs +1 -1
- package/bin/telemetry-collector.cjs +1 -1
- package/bin/test-changed.cjs +111 -0
- package/bin/test-recipe-gen.cjs +250 -0
- package/bin/trace-corpus-stats.cjs +211 -0
- package/bin/unified-mcp-server.mjs +3 -3
- package/bin/update-scoreboard.cjs +1 -1
- package/bin/validate-memory.cjs +2 -2
- package/bin/validate-traces.cjs +10 -10
- package/bin/verify-quorum-health.cjs +66 -5
- package/bin/xstate-to-tla.cjs +4 -4
- package/bin/xstate-trace-walker.cjs +3 -3
- package/commands/{qgsd → nf}/add-phase.md +3 -3
- package/commands/{qgsd → nf}/add-requirement.md +3 -3
- package/commands/{qgsd → nf}/add-todo.md +3 -3
- package/commands/{qgsd → nf}/audit-milestone.md +4 -4
- package/commands/{qgsd → nf}/check-todos.md +3 -3
- package/commands/{qgsd → nf}/cleanup.md +3 -3
- package/commands/{qgsd → nf}/close-formal-gaps.md +2 -2
- package/commands/{qgsd → nf}/complete-milestone.md +9 -9
- package/commands/{qgsd → nf}/debug.md +9 -9
- package/commands/{qgsd → nf}/discuss-phase.md +3 -3
- package/commands/{qgsd → nf}/execute-phase.md +15 -15
- package/commands/{qgsd → nf}/fix-tests.md +3 -3
- package/commands/{qgsd → nf}/formal-test-sync.md +1 -1
- package/commands/{qgsd → nf}/health.md +3 -3
- package/commands/{qgsd → nf}/help.md +3 -3
- package/commands/{qgsd → nf}/insert-phase.md +3 -3
- package/commands/nf/join-discord.md +18 -0
- package/commands/{qgsd → nf}/list-phase-assumptions.md +2 -2
- package/commands/{qgsd → nf}/map-codebase.md +7 -7
- package/commands/{qgsd → nf}/map-requirements.md +3 -3
- package/commands/{qgsd → nf}/mcp-restart.md +3 -3
- package/commands/{qgsd → nf}/mcp-set-model.md +8 -8
- package/commands/{qgsd → nf}/mcp-setup.md +63 -63
- package/commands/{qgsd → nf}/mcp-status.md +3 -3
- package/commands/{qgsd → nf}/mcp-update.md +7 -7
- package/commands/{qgsd → nf}/new-milestone.md +8 -8
- package/commands/{qgsd → nf}/new-project.md +8 -8
- package/commands/{qgsd → nf}/observe.md +49 -16
- package/commands/{qgsd → nf}/pause-work.md +3 -3
- package/commands/{qgsd → nf}/plan-milestone-gaps.md +5 -5
- package/commands/{qgsd → nf}/plan-phase.md +6 -6
- package/commands/{qgsd → nf}/polyrepo.md +2 -2
- package/commands/{qgsd → nf}/progress.md +3 -3
- package/commands/{qgsd → nf}/queue.md +2 -2
- package/commands/{qgsd → nf}/quick.md +8 -8
- package/commands/{qgsd → nf}/quorum-test.md +10 -10
- package/commands/{qgsd → nf}/quorum.md +36 -86
- package/commands/{qgsd → nf}/reapply-patches.md +2 -2
- package/commands/{qgsd → nf}/remove-phase.md +3 -3
- package/commands/{qgsd → nf}/research-phase.md +12 -12
- package/commands/{qgsd → nf}/resume-work.md +3 -3
- package/commands/nf/review-requirements.md +31 -0
- package/commands/{qgsd → nf}/set-profile.md +3 -3
- package/commands/{qgsd → nf}/settings.md +6 -6
- package/commands/{qgsd → nf}/solve.md +35 -35
- package/commands/{qgsd → nf}/sync-baselines.md +4 -4
- package/commands/{qgsd → nf}/triage.md +10 -10
- package/commands/{qgsd → nf}/update.md +3 -3
- package/commands/{qgsd → nf}/verify-work.md +5 -5
- package/hooks/dist/config-loader.js +188 -32
- package/hooks/dist/conformance-schema.cjs +2 -2
- package/hooks/dist/gsd-context-monitor.js +118 -13
- package/hooks/dist/{qgsd-check-update.js → nf-check-update.js} +5 -5
- package/hooks/dist/{qgsd-circuit-breaker.js → nf-circuit-breaker.js} +35 -24
- package/hooks/dist/{qgsd-precompact.js → nf-precompact.js} +13 -13
- package/hooks/dist/{qgsd-prompt.js → nf-prompt.js} +110 -33
- package/hooks/dist/nf-session-start.js +185 -0
- package/hooks/dist/{qgsd-slot-correlator.js → nf-slot-correlator.js} +13 -5
- package/hooks/dist/{qgsd-spec-regen.js → nf-spec-regen.js} +17 -8
- package/hooks/dist/{qgsd-statusline.js → nf-statusline.js} +12 -3
- package/hooks/dist/{qgsd-stop.js → nf-stop.js} +152 -18
- package/hooks/dist/{qgsd-token-collector.js → nf-token-collector.js} +12 -4
- package/hooks/dist/unified-mcp-server.mjs +2 -2
- package/package.json +6 -4
- package/scripts/build-hooks.js +13 -6
- package/scripts/secret-audit.sh +1 -1
- package/scripts/verify-hooks-sync.cjs +90 -0
- package/templates/{qgsd.json → nf.json} +4 -4
- package/commands/qgsd/join-discord.md +0 -18
- package/hooks/dist/qgsd-session-start.js +0 -122
|
@@ -0,0 +1,250 @@
|
|
|
1
|
+
#!/usr/bin/env node
|
|
2
|
+
'use strict';
|
|
3
|
+
|
|
4
|
+
/**
|
|
5
|
+
* test-recipe-gen.cjs — Transform L3 failure modes into executable test recipe JSON.
|
|
6
|
+
*
|
|
7
|
+
* Reads failure-mode-catalog.json, hazard-model.json, and risk-heatmap.json
|
|
8
|
+
* to produce test recipes with setup, input_sequence, expected_outcome,
|
|
9
|
+
* oracle, risk_context, and derived_from for each failure mode.
|
|
10
|
+
*
|
|
11
|
+
* Requirements: RSN-04
|
|
12
|
+
*
|
|
13
|
+
* Usage:
|
|
14
|
+
* node bin/test-recipe-gen.cjs # print summary to stdout
|
|
15
|
+
* node bin/test-recipe-gen.cjs --json # print full results JSON to stdout
|
|
16
|
+
*/
|
|
17
|
+
|
|
18
|
+
const fs = require('fs');
|
|
19
|
+
const path = require('path');
|
|
20
|
+
|
|
21
|
+
const ROOT = process.env.PROJECT_ROOT
|
|
22
|
+
|| (process.argv.find(a => a.startsWith('--project-root=')) || '').replace('--project-root=', '')
|
|
23
|
+
|| path.join(__dirname, '..');
|
|
24
|
+
const FORMAL = path.join(ROOT, '.planning', 'formal');
|
|
25
|
+
const REASONING_DIR = path.join(FORMAL, 'reasoning');
|
|
26
|
+
const OUT_DIR = path.join(FORMAL, 'test-recipes');
|
|
27
|
+
const OUT_FILE = path.join(OUT_DIR, 'test-recipes.json');
|
|
28
|
+
|
|
29
|
+
const JSON_FLAG = process.argv.includes('--json');
|
|
30
|
+
|
|
31
|
+
// ── Recipe builders ──────────────────────────────────────────────────────
|
|
32
|
+
|
|
33
|
+
function buildSetup(fm) {
|
|
34
|
+
return {
|
|
35
|
+
initial_state: fm.state,
|
|
36
|
+
preconditions: [`Machine is in ${fm.state} state`],
|
|
37
|
+
};
|
|
38
|
+
}
|
|
39
|
+
|
|
40
|
+
function buildInputSequence(fm) {
|
|
41
|
+
return [
|
|
42
|
+
{
|
|
43
|
+
event: fm.event,
|
|
44
|
+
description: `Send ${fm.event} event to machine in ${fm.state} state`,
|
|
45
|
+
},
|
|
46
|
+
];
|
|
47
|
+
}
|
|
48
|
+
|
|
49
|
+
function buildExpectedOutcome(fm) {
|
|
50
|
+
switch (fm.failure_mode) {
|
|
51
|
+
case 'omission':
|
|
52
|
+
return {
|
|
53
|
+
final_state: fm.to_state,
|
|
54
|
+
assertions: [`Machine transitions to ${fm.to_state}`],
|
|
55
|
+
};
|
|
56
|
+
case 'commission':
|
|
57
|
+
return {
|
|
58
|
+
rejection: true,
|
|
59
|
+
assertions: [`Machine remains in ${fm.state}; event ${fm.event} is rejected`],
|
|
60
|
+
};
|
|
61
|
+
case 'corruption':
|
|
62
|
+
return {
|
|
63
|
+
final_state: fm.to_state,
|
|
64
|
+
not_state: 'any_other',
|
|
65
|
+
assertions: [`Machine transitions to exactly ${fm.to_state}, not any other state`],
|
|
66
|
+
};
|
|
67
|
+
default:
|
|
68
|
+
return { final_state: fm.to_state, assertions: [] };
|
|
69
|
+
}
|
|
70
|
+
}
|
|
71
|
+
|
|
72
|
+
function buildOracle(fm) {
|
|
73
|
+
switch (fm.failure_mode) {
|
|
74
|
+
case 'omission':
|
|
75
|
+
return {
|
|
76
|
+
type: 'state_assertion',
|
|
77
|
+
check: `machine.state === '${fm.to_state}'`,
|
|
78
|
+
failure_indicates: 'Omission: transition did not fire',
|
|
79
|
+
};
|
|
80
|
+
case 'commission':
|
|
81
|
+
return {
|
|
82
|
+
type: 'guard_rejection',
|
|
83
|
+
check: `machine.state === '${fm.state}'`,
|
|
84
|
+
failure_indicates: 'Commission: unmodeled event was accepted',
|
|
85
|
+
};
|
|
86
|
+
case 'corruption':
|
|
87
|
+
return {
|
|
88
|
+
type: 'state_equality',
|
|
89
|
+
check: `machine.state === '${fm.to_state}'`,
|
|
90
|
+
failure_indicates: 'Corruption: wrong target state reached',
|
|
91
|
+
};
|
|
92
|
+
default:
|
|
93
|
+
return { type: 'unknown', check: '', failure_indicates: '' };
|
|
94
|
+
}
|
|
95
|
+
}
|
|
96
|
+
|
|
97
|
+
function buildRiskContext(fm, hazardMap, riskMap) {
|
|
98
|
+
const key = `${fm.state}-${fm.event}`;
|
|
99
|
+
const hazard = hazardMap.get(key);
|
|
100
|
+
const risk = riskMap.get(key);
|
|
101
|
+
|
|
102
|
+
return {
|
|
103
|
+
severity_class: fm.severity_class || null,
|
|
104
|
+
rpn: hazard ? hazard.rpn : null,
|
|
105
|
+
risk_tier: risk ? risk.risk_tier : null,
|
|
106
|
+
};
|
|
107
|
+
}
|
|
108
|
+
|
|
109
|
+
function buildDerivedFrom(fm) {
|
|
110
|
+
const refs = [
|
|
111
|
+
{
|
|
112
|
+
layer: 'L3',
|
|
113
|
+
artifact: 'reasoning/failure-mode-catalog.json',
|
|
114
|
+
ref: `failure_modes[id=${fm.id}]`,
|
|
115
|
+
},
|
|
116
|
+
];
|
|
117
|
+
|
|
118
|
+
// Add hazard-model reference if the failure mode has one
|
|
119
|
+
if (fm.derived_from) {
|
|
120
|
+
const hazardLink = fm.derived_from.find(
|
|
121
|
+
d => d.artifact === 'reasoning/hazard-model.json'
|
|
122
|
+
);
|
|
123
|
+
if (hazardLink) {
|
|
124
|
+
refs.push({
|
|
125
|
+
layer: 'L3',
|
|
126
|
+
artifact: hazardLink.artifact,
|
|
127
|
+
ref: hazardLink.ref,
|
|
128
|
+
});
|
|
129
|
+
}
|
|
130
|
+
}
|
|
131
|
+
|
|
132
|
+
return refs;
|
|
133
|
+
}
|
|
134
|
+
|
|
135
|
+
function generateRecipe(fm, hazardMap, riskMap) {
|
|
136
|
+
return {
|
|
137
|
+
id: `TR-${fm.id}`,
|
|
138
|
+
failure_mode_id: fm.id,
|
|
139
|
+
title: `Test ${fm.failure_mode} of ${fm.state} --[${fm.event}]--> ${fm.to_state}`,
|
|
140
|
+
setup: buildSetup(fm),
|
|
141
|
+
input_sequence: buildInputSequence(fm),
|
|
142
|
+
expected_outcome: buildExpectedOutcome(fm),
|
|
143
|
+
oracle: buildOracle(fm),
|
|
144
|
+
risk_context: buildRiskContext(fm, hazardMap, riskMap),
|
|
145
|
+
derived_from: buildDerivedFrom(fm),
|
|
146
|
+
};
|
|
147
|
+
}
|
|
148
|
+
|
|
149
|
+
// ── Main ─────────────────────────────────────────────────────────────────
|
|
150
|
+
|
|
151
|
+
function generateAllRecipes(failureModes, hazards, riskTransitions) {
|
|
152
|
+
// Build lookup maps by state-event key
|
|
153
|
+
const hazardMap = new Map();
|
|
154
|
+
for (const h of hazards) {
|
|
155
|
+
hazardMap.set(`${h.state}-${h.event}`, h);
|
|
156
|
+
}
|
|
157
|
+
|
|
158
|
+
const riskMap = new Map();
|
|
159
|
+
for (const t of riskTransitions) {
|
|
160
|
+
riskMap.set(`${t.state}-${t.event}`, t);
|
|
161
|
+
}
|
|
162
|
+
|
|
163
|
+
const recipes = failureModes.map(fm => generateRecipe(fm, hazardMap, riskMap));
|
|
164
|
+
|
|
165
|
+
// Compute summary
|
|
166
|
+
const byFailureMode = {};
|
|
167
|
+
const byRiskTier = {};
|
|
168
|
+
for (const r of recipes) {
|
|
169
|
+
const fmType = r.oracle.type === 'state_assertion' ? 'omission'
|
|
170
|
+
: r.oracle.type === 'guard_rejection' ? 'commission'
|
|
171
|
+
: r.oracle.type === 'state_equality' ? 'corruption'
|
|
172
|
+
: 'unknown';
|
|
173
|
+
byFailureMode[fmType] = (byFailureMode[fmType] || 0) + 1;
|
|
174
|
+
|
|
175
|
+
const tier = r.risk_context.risk_tier || 'unclassified';
|
|
176
|
+
byRiskTier[tier] = (byRiskTier[tier] || 0) + 1;
|
|
177
|
+
}
|
|
178
|
+
|
|
179
|
+
return {
|
|
180
|
+
schema_version: '1',
|
|
181
|
+
generated: new Date().toISOString(),
|
|
182
|
+
source: {
|
|
183
|
+
failure_mode_catalog: 'reasoning/failure-mode-catalog.json',
|
|
184
|
+
hazard_model: 'reasoning/hazard-model.json',
|
|
185
|
+
risk_heatmap: 'reasoning/risk-heatmap.json',
|
|
186
|
+
},
|
|
187
|
+
recipes,
|
|
188
|
+
summary: {
|
|
189
|
+
total_recipes: recipes.length,
|
|
190
|
+
by_failure_mode: byFailureMode,
|
|
191
|
+
by_risk_tier: byRiskTier,
|
|
192
|
+
},
|
|
193
|
+
};
|
|
194
|
+
}
|
|
195
|
+
|
|
196
|
+
function main() {
|
|
197
|
+
// Load failure-mode-catalog
|
|
198
|
+
const fmPath = path.join(REASONING_DIR, 'failure-mode-catalog.json');
|
|
199
|
+
if (!fs.existsSync(fmPath)) {
|
|
200
|
+
console.error('ERROR: failure-mode-catalog.json not found at', fmPath);
|
|
201
|
+
process.exit(1);
|
|
202
|
+
}
|
|
203
|
+
const fmData = JSON.parse(fs.readFileSync(fmPath, 'utf8'));
|
|
204
|
+
const failureModes = fmData.failure_modes || [];
|
|
205
|
+
|
|
206
|
+
// Load hazard-model (optional)
|
|
207
|
+
let hazards = [];
|
|
208
|
+
const hmPath = path.join(REASONING_DIR, 'hazard-model.json');
|
|
209
|
+
if (fs.existsSync(hmPath)) {
|
|
210
|
+
const hmData = JSON.parse(fs.readFileSync(hmPath, 'utf8'));
|
|
211
|
+
hazards = hmData.hazards || [];
|
|
212
|
+
}
|
|
213
|
+
|
|
214
|
+
// Load risk-heatmap (optional)
|
|
215
|
+
let riskTransitions = [];
|
|
216
|
+
const rhPath = path.join(REASONING_DIR, 'risk-heatmap.json');
|
|
217
|
+
if (fs.existsSync(rhPath)) {
|
|
218
|
+
const rhData = JSON.parse(fs.readFileSync(rhPath, 'utf8'));
|
|
219
|
+
riskTransitions = rhData.transitions || [];
|
|
220
|
+
}
|
|
221
|
+
|
|
222
|
+
const result = generateAllRecipes(failureModes, hazards, riskTransitions);
|
|
223
|
+
|
|
224
|
+
// Write output
|
|
225
|
+
fs.mkdirSync(OUT_DIR, { recursive: true });
|
|
226
|
+
fs.writeFileSync(OUT_FILE, JSON.stringify(result, null, 2) + '\n');
|
|
227
|
+
|
|
228
|
+
if (JSON_FLAG) {
|
|
229
|
+
process.stdout.write(JSON.stringify(result));
|
|
230
|
+
} else {
|
|
231
|
+
console.log('Test Recipe Generator');
|
|
232
|
+
console.log(` Total recipes: ${result.summary.total_recipes}`);
|
|
233
|
+
console.log(` By failure mode: ${JSON.stringify(result.summary.by_failure_mode)}`);
|
|
234
|
+
console.log(` By risk tier: ${JSON.stringify(result.summary.by_risk_tier)}`);
|
|
235
|
+
console.log(` Output: ${OUT_FILE}`);
|
|
236
|
+
}
|
|
237
|
+
}
|
|
238
|
+
|
|
239
|
+
if (require.main === module) main();
|
|
240
|
+
|
|
241
|
+
module.exports = {
|
|
242
|
+
generateRecipe,
|
|
243
|
+
generateAllRecipes,
|
|
244
|
+
buildInputSequence,
|
|
245
|
+
buildExpectedOutcome,
|
|
246
|
+
buildOracle,
|
|
247
|
+
buildSetup,
|
|
248
|
+
buildRiskContext,
|
|
249
|
+
buildDerivedFrom,
|
|
250
|
+
};
|
|
@@ -0,0 +1,211 @@
|
|
|
1
|
+
#!/usr/bin/env node
|
|
2
|
+
'use strict';
|
|
3
|
+
// bin/trace-corpus-stats.cjs
|
|
4
|
+
// Indexes conformance events by session, action type, and state transition.
|
|
5
|
+
// Infers sessions using timestamp gaps.
|
|
6
|
+
//
|
|
7
|
+
// Requirement: EVID-02
|
|
8
|
+
|
|
9
|
+
const fs = require('fs');
|
|
10
|
+
const path = require('path');
|
|
11
|
+
|
|
12
|
+
const ROOT = process.env.PROJECT_ROOT || path.join(__dirname, '..');
|
|
13
|
+
const EVIDENCE_DIR = path.join(ROOT, '.planning', 'formal', 'evidence');
|
|
14
|
+
const VOCAB_PATH = path.join(EVIDENCE_DIR, 'event-vocabulary.json');
|
|
15
|
+
const OUTPUT_PATH = path.join(EVIDENCE_DIR, 'trace-corpus-stats.json');
|
|
16
|
+
|
|
17
|
+
const JSON_FLAG = process.argv.includes('--json');
|
|
18
|
+
|
|
19
|
+
// Parse --session-gap-ms from CLI
|
|
20
|
+
const DEFAULT_SESSION_GAP_MS = 300000; // 5 minutes
|
|
21
|
+
function parseSessionGapMs() {
|
|
22
|
+
const idx = process.argv.indexOf('--session-gap-ms');
|
|
23
|
+
if (idx >= 0 && process.argv[idx + 1]) {
|
|
24
|
+
const val = parseInt(process.argv[idx + 1], 10);
|
|
25
|
+
if (!isNaN(val) && val > 0) return val;
|
|
26
|
+
}
|
|
27
|
+
return DEFAULT_SESSION_GAP_MS;
|
|
28
|
+
}
|
|
29
|
+
|
|
30
|
+
// ── Session inference ───────────────────────────────────────────────────────
|
|
31
|
+
|
|
32
|
+
/**
|
|
33
|
+
* Infer sessions from sorted events using timestamp gap analysis.
|
|
34
|
+
* A gap > sessionGapMs between consecutive events starts a new session.
|
|
35
|
+
*/
|
|
36
|
+
function inferSessions(events, sessionGapMs) {
|
|
37
|
+
if (events.length === 0) return [];
|
|
38
|
+
|
|
39
|
+
const sessions = [];
|
|
40
|
+
let currentSession = {
|
|
41
|
+
id: 'session-001',
|
|
42
|
+
events: [events[0]],
|
|
43
|
+
};
|
|
44
|
+
|
|
45
|
+
for (let i = 1; i < events.length; i++) {
|
|
46
|
+
const prevTs = new Date(events[i - 1].ts || events[i - 1].timestamp).getTime();
|
|
47
|
+
const currTs = new Date(events[i].ts || events[i].timestamp).getTime();
|
|
48
|
+
const gap = currTs - prevTs;
|
|
49
|
+
|
|
50
|
+
if (gap > sessionGapMs) {
|
|
51
|
+
sessions.push(currentSession);
|
|
52
|
+
const num = String(sessions.length + 1).padStart(3, '0');
|
|
53
|
+
currentSession = {
|
|
54
|
+
id: `session-${num}`,
|
|
55
|
+
events: [events[i]],
|
|
56
|
+
};
|
|
57
|
+
} else {
|
|
58
|
+
currentSession.events.push(events[i]);
|
|
59
|
+
}
|
|
60
|
+
}
|
|
61
|
+
sessions.push(currentSession);
|
|
62
|
+
|
|
63
|
+
return sessions;
|
|
64
|
+
}
|
|
65
|
+
|
|
66
|
+
/**
|
|
67
|
+
* Build session summary from raw session with events.
|
|
68
|
+
*/
|
|
69
|
+
function summarizeSession(session) {
|
|
70
|
+
const events = session.events;
|
|
71
|
+
const timestamps = events.map(e => new Date(e.ts || e.timestamp).getTime()).filter(t => !isNaN(t));
|
|
72
|
+
const start = timestamps.length > 0 ? new Date(Math.min(...timestamps)).toISOString() : null;
|
|
73
|
+
const end = timestamps.length > 0 ? new Date(Math.max(...timestamps)).toISOString() : null;
|
|
74
|
+
|
|
75
|
+
const actions = {};
|
|
76
|
+
for (const e of events) {
|
|
77
|
+
const action = e.action || 'undefined';
|
|
78
|
+
actions[action] = (actions[action] || 0) + 1;
|
|
79
|
+
}
|
|
80
|
+
|
|
81
|
+
return {
|
|
82
|
+
id: session.id,
|
|
83
|
+
start,
|
|
84
|
+
end,
|
|
85
|
+
event_count: events.length,
|
|
86
|
+
actions,
|
|
87
|
+
};
|
|
88
|
+
}
|
|
89
|
+
|
|
90
|
+
// ── Main ────────────────────────────────────────────────────────────────────
|
|
91
|
+
|
|
92
|
+
function main() {
|
|
93
|
+
const sessionGapMs = parseSessionGapMs();
|
|
94
|
+
|
|
95
|
+
// Ensure output directory
|
|
96
|
+
if (!fs.existsSync(EVIDENCE_DIR)) {
|
|
97
|
+
fs.mkdirSync(EVIDENCE_DIR, { recursive: true });
|
|
98
|
+
}
|
|
99
|
+
|
|
100
|
+
// Read conformance events using planning-paths
|
|
101
|
+
const pp = require('./planning-paths.cjs');
|
|
102
|
+
const eventsPath = pp.resolve(process.cwd(), 'conformance-events');
|
|
103
|
+
|
|
104
|
+
if (!fs.existsSync(eventsPath)) {
|
|
105
|
+
console.error(`Conformance events file not found: ${eventsPath}`);
|
|
106
|
+
process.exit(1);
|
|
107
|
+
}
|
|
108
|
+
|
|
109
|
+
const raw = fs.readFileSync(eventsPath, 'utf8');
|
|
110
|
+
const lines = raw.split('\n').filter(l => l.trim());
|
|
111
|
+
const events = [];
|
|
112
|
+
|
|
113
|
+
for (const line of lines) {
|
|
114
|
+
try {
|
|
115
|
+
events.push(JSON.parse(line));
|
|
116
|
+
} catch (_) {
|
|
117
|
+
// Skip malformed lines
|
|
118
|
+
}
|
|
119
|
+
}
|
|
120
|
+
|
|
121
|
+
// Sort by timestamp
|
|
122
|
+
events.sort((a, b) => {
|
|
123
|
+
const tsA = new Date(a.ts || a.timestamp).getTime();
|
|
124
|
+
const tsB = new Date(b.ts || b.timestamp).getTime();
|
|
125
|
+
return tsA - tsB;
|
|
126
|
+
});
|
|
127
|
+
|
|
128
|
+
// Infer sessions
|
|
129
|
+
const rawSessions = inferSessions(events, sessionGapMs);
|
|
130
|
+
const sessions = rawSessions.map(summarizeSession);
|
|
131
|
+
|
|
132
|
+
// Build action index
|
|
133
|
+
const actionIndex = {};
|
|
134
|
+
for (const session of sessions) {
|
|
135
|
+
for (const [action, count] of Object.entries(session.actions)) {
|
|
136
|
+
if (!actionIndex[action]) {
|
|
137
|
+
actionIndex[action] = { total: 0, sessions: [] };
|
|
138
|
+
}
|
|
139
|
+
actionIndex[action].total += count;
|
|
140
|
+
actionIndex[action].sessions.push(session.id);
|
|
141
|
+
}
|
|
142
|
+
}
|
|
143
|
+
|
|
144
|
+
// Build transition index
|
|
145
|
+
const transitionCounts = {};
|
|
146
|
+
for (const event of events) {
|
|
147
|
+
if (event.from_state && event.to_state) {
|
|
148
|
+
const key = `${event.from_state}→${event.to_state}`;
|
|
149
|
+
transitionCounts[key] = (transitionCounts[key] || 0) + 1;
|
|
150
|
+
}
|
|
151
|
+
}
|
|
152
|
+
const transitionIndex = Object.entries(transitionCounts).map(([key, count]) => {
|
|
153
|
+
const [from, to] = key.split('→');
|
|
154
|
+
return { from, to, count };
|
|
155
|
+
});
|
|
156
|
+
|
|
157
|
+
// Date range
|
|
158
|
+
const timestamps = events.map(e => new Date(e.ts || e.timestamp).getTime()).filter(t => !isNaN(t));
|
|
159
|
+
const dateRange = timestamps.length > 0
|
|
160
|
+
? { earliest: new Date(Math.min(...timestamps)).toISOString(), latest: new Date(Math.max(...timestamps)).toISOString() }
|
|
161
|
+
: { earliest: null, latest: null };
|
|
162
|
+
|
|
163
|
+
// Vocabulary validation
|
|
164
|
+
const vocab = JSON.parse(fs.readFileSync(VOCAB_PATH, 'utf8'));
|
|
165
|
+
const vocabActions = new Set(Object.keys(vocab.vocabulary));
|
|
166
|
+
const allActions = new Set(Object.keys(actionIndex));
|
|
167
|
+
const known = [...allActions].filter(a => vocabActions.has(a));
|
|
168
|
+
const unknown = [...allActions].filter(a => !vocabActions.has(a));
|
|
169
|
+
|
|
170
|
+
// Build result
|
|
171
|
+
const result = {
|
|
172
|
+
schema_version: '1',
|
|
173
|
+
generated: new Date().toISOString(),
|
|
174
|
+
session_gap_ms: sessionGapMs,
|
|
175
|
+
total_events: events.length,
|
|
176
|
+
date_range: dateRange,
|
|
177
|
+
sessions,
|
|
178
|
+
action_index: actionIndex,
|
|
179
|
+
transition_index: transitionIndex,
|
|
180
|
+
vocabulary_validation: {
|
|
181
|
+
known: known.length,
|
|
182
|
+
unknown: unknown.length,
|
|
183
|
+
unknown_actions: unknown,
|
|
184
|
+
},
|
|
185
|
+
summary: `${events.length} events across ${sessions.length} sessions ` +
|
|
186
|
+
`(${dateRange.earliest} to ${dateRange.latest}). ` +
|
|
187
|
+
`${known.length} known actions, ${unknown.length} unknown. ` +
|
|
188
|
+
`${transitionIndex.length} distinct transitions.`,
|
|
189
|
+
};
|
|
190
|
+
|
|
191
|
+
fs.writeFileSync(OUTPUT_PATH, JSON.stringify(result, null, 2) + '\n', 'utf8');
|
|
192
|
+
|
|
193
|
+
if (JSON_FLAG) {
|
|
194
|
+
console.log(JSON.stringify(result, null, 2));
|
|
195
|
+
} else {
|
|
196
|
+
console.log(`Trace Corpus Stats Generated`);
|
|
197
|
+
console.log(` Total events: ${events.length}`);
|
|
198
|
+
console.log(` Sessions: ${sessions.length}`);
|
|
199
|
+
console.log(` Date range: ${dateRange.earliest} to ${dateRange.latest}`);
|
|
200
|
+
console.log(` Known actions: ${known.length}, Unknown: ${unknown.length}`);
|
|
201
|
+
console.log(` Transitions: ${transitionIndex.length}`);
|
|
202
|
+
}
|
|
203
|
+
}
|
|
204
|
+
|
|
205
|
+
// Export for testing
|
|
206
|
+
module.exports = { inferSessions, summarizeSession, parseSessionGapMs, DEFAULT_SESSION_GAP_MS };
|
|
207
|
+
|
|
208
|
+
// Run if invoked directly
|
|
209
|
+
if (require.main === module) {
|
|
210
|
+
main();
|
|
211
|
+
}
|
|
@@ -110,7 +110,7 @@ function buildSlotTools(provider) {
|
|
|
110
110
|
// Universal: identity
|
|
111
111
|
tools.push({
|
|
112
112
|
name: 'identity',
|
|
113
|
-
description: 'Get server identity: name, version, active LLM model, and MCP server name. Used by
|
|
113
|
+
description: 'Get server identity: name, version, active LLM model, and MCP server name. Used by nForma to fingerprint the active quorum team.',
|
|
114
114
|
inputSchema: NO_ARGS_SCHEMA,
|
|
115
115
|
});
|
|
116
116
|
|
|
@@ -526,7 +526,7 @@ function buildIdentityResult(provider) {
|
|
|
526
526
|
model,
|
|
527
527
|
display_provider: provider.display_provider ?? null,
|
|
528
528
|
provider: provider.description,
|
|
529
|
-
install_method: '
|
|
529
|
+
install_method: 'nf-monorepo',
|
|
530
530
|
});
|
|
531
531
|
}
|
|
532
532
|
|
|
@@ -699,7 +699,7 @@ async function main() {
|
|
|
699
699
|
const keytarAccount = 'ANTHROPIC_API_KEY_' + SLOT.toUpperCase().replace(/-/g, '_');
|
|
700
700
|
try {
|
|
701
701
|
const { default: keytar } = await import('keytar');
|
|
702
|
-
const secret = await keytar.getPassword('
|
|
702
|
+
const secret = await keytar.getPassword('nforma', keytarAccount);
|
|
703
703
|
if (secret) {
|
|
704
704
|
process.env.ANTHROPIC_API_KEY = secret;
|
|
705
705
|
process.stderr.write(`[unified-mcp-server] Loaded API key for slot ${SLOT} from keychain\n`);
|
|
@@ -555,7 +555,7 @@ async function initTeam(argv) {
|
|
|
555
555
|
let mcps = [];
|
|
556
556
|
let plugins = [];
|
|
557
557
|
try {
|
|
558
|
-
const claudeJsonPath = process.env.
|
|
558
|
+
const claudeJsonPath = process.env.NF_CLAUDE_JSON || path.join(os.homedir(), '.claude.json');
|
|
559
559
|
const claudeJson = JSON.parse(fs.readFileSync(claudeJsonPath, 'utf8'));
|
|
560
560
|
mcps = Object.keys(claudeJson.mcpServers || {});
|
|
561
561
|
plugins = claudeJson.plugins || [];
|
package/bin/validate-memory.cjs
CHANGED
|
@@ -4,7 +4,7 @@
|
|
|
4
4
|
/**
|
|
5
5
|
* validate-memory.cjs
|
|
6
6
|
*
|
|
7
|
-
* Memory staleness checker for
|
|
7
|
+
* Memory staleness checker for nForma auto-memory (MEMORY.md).
|
|
8
8
|
*
|
|
9
9
|
* Checks:
|
|
10
10
|
* 1. Stale counts — numbers referencing requirements.json count vs actual
|
|
@@ -147,7 +147,7 @@ function checkDeadFileRefs(memoryContent, cwd) {
|
|
|
147
147
|
|
|
148
148
|
// Skip URLs, patterns, and common non-path references
|
|
149
149
|
if (ref.includes('://') || ref.includes('*') || ref.includes('{')) continue;
|
|
150
|
-
// Skip package names like
|
|
150
|
+
// Skip package names like nforma@0.2.0
|
|
151
151
|
if (ref.includes('@')) continue;
|
|
152
152
|
// Skip references that start with ~ (home dir)
|
|
153
153
|
if (ref.startsWith('~')) {
|
package/bin/validate-traces.cjs
CHANGED
|
@@ -120,8 +120,8 @@ function buildTTrace(event, actualState, expectedStateName, divergenceType, scor
|
|
|
120
120
|
const _path = require('path');
|
|
121
121
|
const _fs = require('fs');
|
|
122
122
|
const _machinePath = (() => {
|
|
123
|
-
const repoDist = _path.join(__dirname, '..', 'dist', 'machines', '
|
|
124
|
-
const installDist = _path.join(__dirname, 'dist', 'machines', '
|
|
123
|
+
const repoDist = _path.join(__dirname, '..', 'dist', 'machines', 'nf-workflow.machine.js');
|
|
124
|
+
const installDist = _path.join(__dirname, 'dist', 'machines', 'nf-workflow.machine.js');
|
|
125
125
|
return _fs.existsSync(repoDist) ? repoDist : installDist;
|
|
126
126
|
})();
|
|
127
127
|
const { createActor } = require(_machinePath);
|
|
@@ -237,15 +237,15 @@ if (require.main === module) {
|
|
|
237
237
|
const _startMs = Date.now();
|
|
238
238
|
const { writeCheckResult } = require('./write-check-result.cjs');
|
|
239
239
|
// Machine CJS path: in the repo, ../dist/machines/ (bin/ → dist/machines/)
|
|
240
|
-
// When installed at ~/.claude/
|
|
240
|
+
// When installed at ~/.claude/nf-bin/, ./dist/machines/ (nf-bin/ → nf-bin/dist/machines/)
|
|
241
241
|
const machinePath = (function () {
|
|
242
|
-
const repoDist = path.join(__dirname, '..', 'dist', 'machines', '
|
|
243
|
-
const installDist = path.join(__dirname, 'dist', 'machines', '
|
|
242
|
+
const repoDist = path.join(__dirname, '..', 'dist', 'machines', 'nf-workflow.machine.js');
|
|
243
|
+
const installDist = path.join(__dirname, 'dist', 'machines', 'nf-workflow.machine.js');
|
|
244
244
|
if (fs.existsSync(repoDist)) return repoDist;
|
|
245
245
|
if (fs.existsSync(installDist)) return installDist;
|
|
246
|
-
throw new Error('[validate-traces] Cannot find
|
|
246
|
+
throw new Error('[validate-traces] Cannot find nf-workflow.machine.js in ' + repoDist + ' or ' + installDist);
|
|
247
247
|
})();
|
|
248
|
-
const { createActor,
|
|
248
|
+
const { createActor, nfWorkflowMachine } = require(machinePath);
|
|
249
249
|
|
|
250
250
|
// Load walker for TTrace export (DIAG-01) — lazy-loaded here to avoid breaking module.exports usage
|
|
251
251
|
let walker = null;
|
|
@@ -358,7 +358,7 @@ if (require.main === module) {
|
|
|
358
358
|
}
|
|
359
359
|
|
|
360
360
|
// Fresh actor per event — each conformance event is a single-step trace
|
|
361
|
-
const actor = createActor(
|
|
361
|
+
const actor = createActor(nfWorkflowMachine);
|
|
362
362
|
actor.start();
|
|
363
363
|
actor.send(xstateEvent);
|
|
364
364
|
const snapshot = actor.getSnapshot();
|
|
@@ -372,7 +372,7 @@ if (require.main === module) {
|
|
|
372
372
|
const roundKey = event.round_id || event.session_id || '__standalone__' + event._lineIndex;
|
|
373
373
|
const roundEvents = roundGroups.get(roundKey) || [event];
|
|
374
374
|
divergences.push(
|
|
375
|
-
buildTTrace(event, snapshot.value, expected, 'state_mismatch', scoreboardMeta, confidence, walker,
|
|
375
|
+
buildTTrace(event, snapshot.value, expected, 'state_mismatch', scoreboardMeta, confidence, walker, nfWorkflowMachine, roundEvents)
|
|
376
376
|
);
|
|
377
377
|
}
|
|
378
378
|
}
|
|
@@ -434,5 +434,5 @@ if (require.main === module) {
|
|
|
434
434
|
}
|
|
435
435
|
|
|
436
436
|
if (typeof module !== 'undefined') {
|
|
437
|
-
module.exports = { computeConfidenceTier, CONFIDENCE_THRESHOLDS, validateMCPMetadata, buildTTrace };
|
|
437
|
+
module.exports = { computeConfidenceTier, CONFIDENCE_THRESHOLDS, validateMCPMetadata, buildTTrace, mapToXStateEvent };
|
|
438
438
|
}
|