mindforge-cc 3.0.0 → 4.3.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.agent/CLAUDE.md +50 -545
- package/.claude/CLAUDE.md +50 -545
- package/.mindforge/audit/AUDIT-SCHEMA.md +20 -1
- package/.mindforge/engine/persona-factory.md +45 -0
- package/.mindforge/engine/swarm-controller.md +59 -0
- package/.mindforge/engine/wave-executor.md +104 -54
- package/.mindforge/memory/pattern-library.jsonl +1 -2
- package/.mindforge/personas/swarm-templates.json +118 -0
- package/.planning/ROI.jsonl +2 -0
- package/CHANGELOG.md +63 -0
- package/MINDFORGE.md +75 -106
- package/README.md +31 -13
- package/RELEASENOTES.md +29 -24
- package/bin/engine/feedback-loop.js +71 -0
- package/bin/engine/nexus-tracer.js +150 -0
- package/bin/engine/temporal-hindsight.js +88 -0
- package/bin/governance/trust-verifier.js +81 -0
- package/bin/governance/ztai-archiver.js +104 -0
- package/bin/governance/ztai-manager.js +203 -0
- package/bin/memory/ghost-pattern-detector.js +69 -0
- package/bin/memory/semantic-hub.js +104 -0
- package/bin/models/finops-hub.js +79 -0
- package/bin/models/model-broker.js +110 -0
- package/docs/INTELLIGENCE-MESH.md +32 -0
- package/docs/PERSONAS.md +63 -0
- package/docs/architecture/NEXUS-DASHBOARD.md +35 -0
- package/docs/architecture/V4-SWARM-MESH.md +77 -0
- package/docs/feature-dashboard.md +6 -1
- package/docs/governance-guide.md +27 -18
- package/docs/references/audit-events.md +6 -0
- package/docs/security/SECURITY.md +15 -4
- package/docs/security/ZTAI-OVERVIEW.md +37 -0
- package/docs/usp-features.md +76 -5
- package/package.json +1 -1
|
@@ -0,0 +1,81 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* MindForge ZTAI Trust Verifier
|
|
3
|
+
* v4.2.0-alpha.ztai
|
|
4
|
+
*/
|
|
5
|
+
|
|
6
|
+
const fs = require('fs');
|
|
7
|
+
const ztai = require('./ztai-manager');
|
|
8
|
+
|
|
9
|
+
class TrustVerifier {
|
|
10
|
+
/**
|
|
11
|
+
* Verifies a single audit entry.
|
|
12
|
+
* @param {object} entry - The audit entry object
|
|
13
|
+
* @returns {object} - { valid: boolean, error: string|null, tier: number }
|
|
14
|
+
*/
|
|
15
|
+
verifyEntry(entry) {
|
|
16
|
+
if (!entry.did || !entry.signature) {
|
|
17
|
+
return { valid: false, error: 'Missing ZTAI identity (did/signature)', tier: 0 };
|
|
18
|
+
}
|
|
19
|
+
|
|
20
|
+
try {
|
|
21
|
+
// Reconstruct payroll for verification (signature is stripped)
|
|
22
|
+
const { signature, ...payloadObj } = entry;
|
|
23
|
+
const payload = JSON.stringify(payloadObj);
|
|
24
|
+
|
|
25
|
+
const isValid = ztai.verifySignature(entry.did, payload, signature);
|
|
26
|
+
const agent = ztai.getAgent(entry.did);
|
|
27
|
+
|
|
28
|
+
if (!isValid) {
|
|
29
|
+
return { valid: false, error: 'Cryptographic signature mismatch', tier: agent ? agent.tier : 0 };
|
|
30
|
+
}
|
|
31
|
+
|
|
32
|
+
return { valid: true, error: null, tier: agent.tier };
|
|
33
|
+
} catch (err) {
|
|
34
|
+
return { valid: false, error: err.message, tier: 0 };
|
|
35
|
+
}
|
|
36
|
+
}
|
|
37
|
+
|
|
38
|
+
/**
|
|
39
|
+
* Validates a Tier 3 action requirement.
|
|
40
|
+
* @param {string} did - Executing agent DID
|
|
41
|
+
*/
|
|
42
|
+
isAuthorizedForTier3(did) {
|
|
43
|
+
const agent = ztai.getAgent(did);
|
|
44
|
+
return agent && agent.tier >= 3;
|
|
45
|
+
}
|
|
46
|
+
|
|
47
|
+
/**
|
|
48
|
+
* Scans a file of JSONL audit entries for integrity.
|
|
49
|
+
* @param {string} filePath
|
|
50
|
+
*/
|
|
51
|
+
async verifyAuditLog(filePath) {
|
|
52
|
+
const content = fs.readFileSync(filePath, 'utf8');
|
|
53
|
+
const lines = content.split('\n').filter(l => l.trim());
|
|
54
|
+
const results = {
|
|
55
|
+
total: lines.length,
|
|
56
|
+
valid: 0,
|
|
57
|
+
invalid: 0,
|
|
58
|
+
errors: []
|
|
59
|
+
};
|
|
60
|
+
|
|
61
|
+
for (const [index, line] of lines.entries()) {
|
|
62
|
+
try {
|
|
63
|
+
const entry = JSON.parse(line);
|
|
64
|
+
const { valid, error } = this.verifyEntry(entry);
|
|
65
|
+
if (valid) {
|
|
66
|
+
results.valid++;
|
|
67
|
+
} else {
|
|
68
|
+
results.invalid++;
|
|
69
|
+
results.errors.push(`Line ${index + 1}: ${error}`);
|
|
70
|
+
}
|
|
71
|
+
} catch (err) {
|
|
72
|
+
results.invalid++;
|
|
73
|
+
results.errors.push(`Line ${index + 1}: Invalid JSON`);
|
|
74
|
+
}
|
|
75
|
+
}
|
|
76
|
+
|
|
77
|
+
return results;
|
|
78
|
+
}
|
|
79
|
+
}
|
|
80
|
+
|
|
81
|
+
module.exports = new TrustVerifier();
|
|
@@ -0,0 +1,104 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* MindForge ZTAI Audit Archiver
|
|
3
|
+
* v4.2.5 — Non-Repudiation Engine
|
|
4
|
+
*/
|
|
5
|
+
|
|
6
|
+
const crypto = require('node:crypto');
|
|
7
|
+
const fs = require('node:fs/promises');
|
|
8
|
+
const path = require('node:path');
|
|
9
|
+
const ztai = require('./ztai-manager');
|
|
10
|
+
|
|
11
|
+
class ZTAIArchiver {
|
|
12
|
+
constructor(auditPath = '.mindforge/audit/AUDIT.jsonl') {
|
|
13
|
+
this.auditPath = auditPath;
|
|
14
|
+
this.manifestDir = '.mindforge/audit/manifests';
|
|
15
|
+
}
|
|
16
|
+
|
|
17
|
+
/**
|
|
18
|
+
* Generates an integrity manifest for a specific block of audit entries.
|
|
19
|
+
* @param {Array<Object>} entries - A block of entries to sign.
|
|
20
|
+
* @param {string} archiverDid - The DID of the archiver (e.g., Release Manager)
|
|
21
|
+
* @returns {Promise<Object>} - The signed manifest
|
|
22
|
+
*/
|
|
23
|
+
async generateManifest(entries, archiverDid) {
|
|
24
|
+
if (!entries || entries.length === 0) return null;
|
|
25
|
+
|
|
26
|
+
// 1. Calculate the Merkle-like root hash of the block
|
|
27
|
+
const blockHashes = entries.map(e =>
|
|
28
|
+
crypto.createHash('sha256').update(JSON.stringify(e)).digest('hex')
|
|
29
|
+
);
|
|
30
|
+
|
|
31
|
+
// Simple cumulative hash chain as a Merkle Root equivalent
|
|
32
|
+
let cumulativeHash = '';
|
|
33
|
+
for (const h of blockHashes) {
|
|
34
|
+
cumulativeHash = crypto.createHash('sha256').update(cumulativeHash + h).digest('hex');
|
|
35
|
+
}
|
|
36
|
+
|
|
37
|
+
const manifestMetadata = {
|
|
38
|
+
blockStart: entries[0].timestamp,
|
|
39
|
+
blockEnd: entries[entries.length - 1].timestamp,
|
|
40
|
+
entryCount: entries.length,
|
|
41
|
+
merkleRoot: cumulativeHash,
|
|
42
|
+
archivedAt: new Date().toISOString(),
|
|
43
|
+
archiver: archiverDid,
|
|
44
|
+
version: 'v4.2.5'
|
|
45
|
+
};
|
|
46
|
+
|
|
47
|
+
// 2. Sign the manifest with the archiver's DID
|
|
48
|
+
const signature = await ztai.signData(archiverDid, JSON.stringify(manifestMetadata));
|
|
49
|
+
|
|
50
|
+
return {
|
|
51
|
+
...manifestMetadata,
|
|
52
|
+
signature
|
|
53
|
+
};
|
|
54
|
+
}
|
|
55
|
+
|
|
56
|
+
/**
|
|
57
|
+
* Scans the current AUDIT.jsonl and generates manifests for un-archived blocks.
|
|
58
|
+
* For the simulation, we process the entire file and generate one manifest.
|
|
59
|
+
*/
|
|
60
|
+
async archiveAuditLog(archiverDid) {
|
|
61
|
+
try {
|
|
62
|
+
const data = await fs.readFile(this.auditPath, 'utf8');
|
|
63
|
+
const lines = data.split('\n').filter(l => l.trim() !== '');
|
|
64
|
+
const entries = lines.map(l => JSON.parse(l));
|
|
65
|
+
|
|
66
|
+
if (entries.length === 0) return null;
|
|
67
|
+
|
|
68
|
+
const manifest = await this.generateManifest(entries, archiverDid);
|
|
69
|
+
|
|
70
|
+
// Ensure manifest directory exists
|
|
71
|
+
await fs.mkdir(this.manifestDir, { recursive: true });
|
|
72
|
+
|
|
73
|
+
const manifestPath = path.join(this.manifestDir, `manifest_${Date.now()}.json`);
|
|
74
|
+
await fs.writeFile(manifestPath, JSON.stringify(manifest, null, 2));
|
|
75
|
+
|
|
76
|
+
console.log(`[ZTAI-ARCHIVER] Manifest generated and signed by ${archiverDid}: ${manifestPath}`);
|
|
77
|
+
return manifest;
|
|
78
|
+
} catch (err) {
|
|
79
|
+
console.error(`[ZTAI-ARCHIVER] Failed to archive audit log: ${err.message}`);
|
|
80
|
+
return null;
|
|
81
|
+
}
|
|
82
|
+
}
|
|
83
|
+
|
|
84
|
+
/**
|
|
85
|
+
* Verifies the integrity of the audit log against a manifest.
|
|
86
|
+
*/
|
|
87
|
+
async verifyIntegrity(manifestPath) {
|
|
88
|
+
const manifest = JSON.parse(await fs.readFile(manifestPath, 'utf8'));
|
|
89
|
+
const { signature, ...manifestMetadata } = manifest;
|
|
90
|
+
|
|
91
|
+
// 1. Verify Archiver Signature
|
|
92
|
+
const isSignatureValid = ztai.verifySignature(manifest.archiver, JSON.stringify(manifestMetadata), signature);
|
|
93
|
+
if (!isSignatureValid) {
|
|
94
|
+
throw new Error(`CRITICAL: Manifest signature invalid for ${manifestPath}`);
|
|
95
|
+
}
|
|
96
|
+
|
|
97
|
+
// 2. Recalculate and Verify Merkle Root (Simulated)
|
|
98
|
+
// In a real environment, this would compare against the actual AUDIT.jsonl data slices.
|
|
99
|
+
console.log(`[ZTAI-ARCHIVER] Integrity Verified for block ending ${manifest.blockEnd}`);
|
|
100
|
+
return true;
|
|
101
|
+
}
|
|
102
|
+
}
|
|
103
|
+
|
|
104
|
+
module.exports = ZTAIArchiver;
|
|
@@ -0,0 +1,203 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* MindForge ZTAI (Zero-Trust Agentic Identity) Manager
|
|
3
|
+
* v4.2.5 — Beast Mode Hardening
|
|
4
|
+
*/
|
|
5
|
+
|
|
6
|
+
const crypto = require('node:crypto');
|
|
7
|
+
const { promisify } = require('node:util');
|
|
8
|
+
|
|
9
|
+
const generateKeyPair = promisify(crypto.generateKeyPair);
|
|
10
|
+
|
|
11
|
+
/**
|
|
12
|
+
* Abstract Base Class for Key Providers
|
|
13
|
+
*/
|
|
14
|
+
class KeyProvider {
|
|
15
|
+
async generate(did) { throw new Error('Not implemented'); }
|
|
16
|
+
async sign(did, data) { throw new Error('Not implemented'); }
|
|
17
|
+
async rotate(did) { throw new Error('Not implemented'); }
|
|
18
|
+
delete(did) { throw new Error('Not implemented'); }
|
|
19
|
+
}
|
|
20
|
+
|
|
21
|
+
/**
|
|
22
|
+
* Standard In-Memory Key Provider (Tier 1-2)
|
|
23
|
+
*/
|
|
24
|
+
class LocalKeyProvider extends KeyProvider {
|
|
25
|
+
constructor() {
|
|
26
|
+
super();
|
|
27
|
+
this.keys = new Map(); // DID -> privateKeyPEM
|
|
28
|
+
}
|
|
29
|
+
|
|
30
|
+
async generate(did) {
|
|
31
|
+
const { publicKey, privateKey } = await generateKeyPair('ed25519');
|
|
32
|
+
const pubPEM = publicKey.export({ type: 'spki', format: 'pem' });
|
|
33
|
+
const privPEM = privateKey.export({ type: 'pkcs8', format: 'pem' });
|
|
34
|
+
|
|
35
|
+
this.keys.set(did, privPEM);
|
|
36
|
+
return pubPEM;
|
|
37
|
+
}
|
|
38
|
+
|
|
39
|
+
async sign(did, data) {
|
|
40
|
+
const privPEM = this.keys.get(did);
|
|
41
|
+
if (!privPEM) throw new Error(`Private key not found in local store for ${did}`);
|
|
42
|
+
|
|
43
|
+
const privateKey = crypto.createPrivateKey(privPEM);
|
|
44
|
+
return crypto.sign(null, Buffer.from(data), privateKey).toString('base64');
|
|
45
|
+
}
|
|
46
|
+
|
|
47
|
+
async rotate(did) {
|
|
48
|
+
return this.generate(did);
|
|
49
|
+
}
|
|
50
|
+
|
|
51
|
+
delete(did) {
|
|
52
|
+
this.keys.delete(did);
|
|
53
|
+
}
|
|
54
|
+
}
|
|
55
|
+
|
|
56
|
+
/**
|
|
57
|
+
* Simulated Hardware Security Enclave (Tier 3)
|
|
58
|
+
* Mocks a TPM/KMS environment where keys never leave the "hardware".
|
|
59
|
+
*/
|
|
60
|
+
class SecureEnclaveProvider extends KeyProvider {
|
|
61
|
+
constructor() {
|
|
62
|
+
super();
|
|
63
|
+
this.enclaveStore = new Map(); // DID -> { privateKey, metadata }
|
|
64
|
+
}
|
|
65
|
+
|
|
66
|
+
async generate(did) {
|
|
67
|
+
console.log(`[ZTAI-HSM] Provisioning protected identity enclave for ${did}...`);
|
|
68
|
+
const { publicKey, privateKey } = await generateKeyPair('ed25519');
|
|
69
|
+
const pubPEM = publicKey.export({ type: 'spki', format: 'pem' });
|
|
70
|
+
|
|
71
|
+
this.enclaveStore.set(did, {
|
|
72
|
+
privateKey, // In a real HSM, this would be a key handle/ID
|
|
73
|
+
provisionedAt: new Date().toISOString(),
|
|
74
|
+
integrityCheck: crypto.randomBytes(32).toString('hex')
|
|
75
|
+
});
|
|
76
|
+
|
|
77
|
+
return pubPEM;
|
|
78
|
+
}
|
|
79
|
+
|
|
80
|
+
async sign(did, data) {
|
|
81
|
+
const record = this.enclaveStore.get(did);
|
|
82
|
+
if (!record) throw new Error(`Enclave record not found for ${did}`);
|
|
83
|
+
|
|
84
|
+
console.log(`[ZTAI-HSM] Delegating signature to hardware enclave [DID: ${did}]`);
|
|
85
|
+
|
|
86
|
+
// Simulate enclave "wrapping" or "sealing" logic
|
|
87
|
+
const signature = crypto.sign(null, Buffer.from(data), record.privateKey);
|
|
88
|
+
|
|
89
|
+
// Add a verifiable "Enclave Metadata" header to the signature in a real implementation
|
|
90
|
+
// For now, we just return the standard signature but log the security event.
|
|
91
|
+
return signature.toString('base64');
|
|
92
|
+
}
|
|
93
|
+
|
|
94
|
+
async rotate(did) {
|
|
95
|
+
console.log(`[ZTAI-HSM] Rotating enclave keys for ${did}...`);
|
|
96
|
+
return this.generate(did);
|
|
97
|
+
}
|
|
98
|
+
|
|
99
|
+
delete(did) {
|
|
100
|
+
this.enclaveStore.delete(did);
|
|
101
|
+
}
|
|
102
|
+
}
|
|
103
|
+
|
|
104
|
+
class ZTAIManager {
|
|
105
|
+
constructor() {
|
|
106
|
+
this.agentRegistry = new Map(); // DID -> { publicKey, persona, tier, providerType }
|
|
107
|
+
this.providers = {
|
|
108
|
+
local: new LocalKeyProvider(),
|
|
109
|
+
enclave: new SecureEnclaveProvider()
|
|
110
|
+
};
|
|
111
|
+
}
|
|
112
|
+
|
|
113
|
+
/**
|
|
114
|
+
* Registers a new agent and assigns a provider based on Trust Tier.
|
|
115
|
+
*/
|
|
116
|
+
async registerAgent(persona, tier = 1) {
|
|
117
|
+
const uuid = crypto.randomUUID();
|
|
118
|
+
const did = `did:mindforge:${uuid}`;
|
|
119
|
+
|
|
120
|
+
// Tier 3 agents use the SecureEnclaveProvider
|
|
121
|
+
const providerType = tier >= 3 ? 'enclave' : 'local';
|
|
122
|
+
const provider = this.providers[providerType];
|
|
123
|
+
|
|
124
|
+
const publicKeyPEM = await provider.generate(did);
|
|
125
|
+
|
|
126
|
+
this.agentRegistry.set(did, {
|
|
127
|
+
publicKey: publicKeyPEM,
|
|
128
|
+
persona,
|
|
129
|
+
tier,
|
|
130
|
+
providerType,
|
|
131
|
+
createdAt: new Date().toISOString()
|
|
132
|
+
});
|
|
133
|
+
|
|
134
|
+
return did;
|
|
135
|
+
}
|
|
136
|
+
|
|
137
|
+
/**
|
|
138
|
+
* Signs data using the provider associated with the DID.
|
|
139
|
+
*/
|
|
140
|
+
async signData(did, data) {
|
|
141
|
+
const agent = this.agentRegistry.get(did);
|
|
142
|
+
if (!agent) throw new Error(`Agent not registered: ${did}`);
|
|
143
|
+
|
|
144
|
+
const provider = this.providers[agent.providerType];
|
|
145
|
+
return await provider.sign(did, data);
|
|
146
|
+
}
|
|
147
|
+
|
|
148
|
+
/**
|
|
149
|
+
* Verifies a signature against the registered public key.
|
|
150
|
+
*/
|
|
151
|
+
verifySignature(did, data, signature) {
|
|
152
|
+
const agent = this.agentRegistry.get(did);
|
|
153
|
+
if (!agent) throw new Error(`Agent not registered: ${did}`);
|
|
154
|
+
|
|
155
|
+
const publicKey = crypto.createPublicKey(agent.publicKey);
|
|
156
|
+
return crypto.verify(null, Buffer.from(data), publicKey, Buffer.from(signature, 'base64'));
|
|
157
|
+
}
|
|
158
|
+
|
|
159
|
+
isAuthorized(did, requiredTier) {
|
|
160
|
+
const agent = this.agentRegistry.get(did);
|
|
161
|
+
return agent && agent.tier >= requiredTier;
|
|
162
|
+
}
|
|
163
|
+
|
|
164
|
+
async rotateKeys(did) {
|
|
165
|
+
const agent = this.agentRegistry.get(did);
|
|
166
|
+
if (!agent) throw new Error(`Agent not found: ${did}`);
|
|
167
|
+
|
|
168
|
+
const provider = this.providers[agent.providerType];
|
|
169
|
+
agent.publicKey = await provider.rotate(did);
|
|
170
|
+
agent.rotatedAt = new Date().toISOString();
|
|
171
|
+
return true;
|
|
172
|
+
}
|
|
173
|
+
|
|
174
|
+
revokeAgent(did) {
|
|
175
|
+
const agent = this.agentRegistry.get(did);
|
|
176
|
+
if (agent) {
|
|
177
|
+
this.providers[agent.providerType].delete(did);
|
|
178
|
+
this.agentRegistry.delete(did);
|
|
179
|
+
}
|
|
180
|
+
}
|
|
181
|
+
|
|
182
|
+
getAgent(did) {
|
|
183
|
+
return this.agentRegistry.get(did);
|
|
184
|
+
}
|
|
185
|
+
|
|
186
|
+
/**
|
|
187
|
+
* Specialized signing for FinOps budget decisions (Pillar V).
|
|
188
|
+
*/
|
|
189
|
+
async signFinOpsDecision(did, decision) {
|
|
190
|
+
const data = JSON.stringify(decision);
|
|
191
|
+
return await this.signData(did, data);
|
|
192
|
+
}
|
|
193
|
+
|
|
194
|
+
/**
|
|
195
|
+
* Specialized signing for Self-Healing repair plans (Pillar VI).
|
|
196
|
+
*/
|
|
197
|
+
async signSelfHealPlan(did, plan) {
|
|
198
|
+
const data = JSON.stringify(plan);
|
|
199
|
+
return await this.signData(did, data);
|
|
200
|
+
}
|
|
201
|
+
}
|
|
202
|
+
|
|
203
|
+
module.exports = new ZTAIManager();
|
|
@@ -0,0 +1,69 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* MindForge Ghost Pattern Detector
|
|
3
|
+
* v4.2.5 — Proactive Risk Mitigation
|
|
4
|
+
*/
|
|
5
|
+
|
|
6
|
+
const semanticHub = require('./semantic-hub');
|
|
7
|
+
const fs = require('node:fs/promises');
|
|
8
|
+
|
|
9
|
+
class GhostPatternDetector {
|
|
10
|
+
/**
|
|
11
|
+
* Analyzes a newly proposed pattern against the global Ghost Hub.
|
|
12
|
+
* @param {Object} proposedPattern - The architecture/pattern being proposed.
|
|
13
|
+
* @returns {Promise<Array<Object>>} - List of detected risks.
|
|
14
|
+
*/
|
|
15
|
+
async analyzeRisk(proposedPattern) {
|
|
16
|
+
console.log(`[GHOST-DETECTOR] Analyzing proposed pattern: ${proposedPattern.id}`);
|
|
17
|
+
|
|
18
|
+
// 1. Fetch ghost patterns from semantic hub
|
|
19
|
+
const ghostPatterns = await semanticHub.getGhostPatterns();
|
|
20
|
+
if (ghostPatterns.length === 0) return [];
|
|
21
|
+
|
|
22
|
+
// 2. Fuzzy match or Tag overlap logic (Simulated)
|
|
23
|
+
const risks = ghostPatterns.filter(ghost => {
|
|
24
|
+
// Check for tag overlap
|
|
25
|
+
const overlap = ghost.tags.filter(t => proposedPattern.tags.includes(t));
|
|
26
|
+
|
|
27
|
+
// If there's an overlap and the ghost is tagged 'failure', trigger a risk.
|
|
28
|
+
const isRisk = (overlap.length >= 2 || ghost.tags.includes('critical-fail'));
|
|
29
|
+
if (isRisk) {
|
|
30
|
+
console.warn(`[GHOST-DETECTOR] Found potential ghost match: ${ghost.id} (Tags: ${overlap.join(',')})`);
|
|
31
|
+
}
|
|
32
|
+
return isRisk;
|
|
33
|
+
});
|
|
34
|
+
|
|
35
|
+
return risks.map(r => ({
|
|
36
|
+
ghostId: r.id,
|
|
37
|
+
riskLevel: r.tags.includes('p0') ? 'CRITICAL' : 'HIGH',
|
|
38
|
+
description: `Pattern similarity detected with past failure: ${r.failureContext || 'N/A'}`,
|
|
39
|
+
mitigation: r.mitigationStrategy || 'Consult mf-reviewer for deep-audit.'
|
|
40
|
+
}));
|
|
41
|
+
}
|
|
42
|
+
|
|
43
|
+
/**
|
|
44
|
+
* Batch scan the local project for ghost patterns.
|
|
45
|
+
*/
|
|
46
|
+
async fullScan() {
|
|
47
|
+
const localPatterns = await this.loadLocalPatterns();
|
|
48
|
+
const allRisks = [];
|
|
49
|
+
|
|
50
|
+
for (const p of localPatterns) {
|
|
51
|
+
const risks = await this.analyzeRisk(p);
|
|
52
|
+
if (risks.length > 0) allRisks.push({ patternId: p.id, risks });
|
|
53
|
+
}
|
|
54
|
+
|
|
55
|
+
return allRisks;
|
|
56
|
+
}
|
|
57
|
+
|
|
58
|
+
async loadLocalPatterns() {
|
|
59
|
+
const localFile = '.mindforge/memory/pattern-library.jsonl';
|
|
60
|
+
try {
|
|
61
|
+
const data = await fs.readFile(localFile, 'utf8');
|
|
62
|
+
return data.split('\n').filter(Boolean).map(JSON.parse);
|
|
63
|
+
} catch (e) {
|
|
64
|
+
return [];
|
|
65
|
+
}
|
|
66
|
+
}
|
|
67
|
+
}
|
|
68
|
+
|
|
69
|
+
module.exports = new GhostPatternDetector();
|
|
@@ -0,0 +1,104 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* MindForge Semantic Hub
|
|
3
|
+
* v4.2.5 — Global Intelligence Mesh
|
|
4
|
+
*/
|
|
5
|
+
|
|
6
|
+
const fs = require('node:fs/promises');
|
|
7
|
+
const path = require('node:path');
|
|
8
|
+
const os = require('node:os');
|
|
9
|
+
|
|
10
|
+
class SemanticHub {
|
|
11
|
+
constructor() {
|
|
12
|
+
this.localPath = '.mindforge/memory';
|
|
13
|
+
this.globalPath = path.join(os.homedir(), '.mindforge/memory/global');
|
|
14
|
+
this.syncManifest = path.join(this.localPath, 'sync-manifest.json');
|
|
15
|
+
}
|
|
16
|
+
|
|
17
|
+
/**
|
|
18
|
+
* Initializes the global memory store if it doesn't exist.
|
|
19
|
+
*/
|
|
20
|
+
async ensureGlobalStore() {
|
|
21
|
+
try {
|
|
22
|
+
await fs.mkdir(this.globalPath, { recursive: true });
|
|
23
|
+
console.log(`[SEMANTIC-HUB] Global Store Initialized: ${this.globalPath}`);
|
|
24
|
+
} catch (err) {
|
|
25
|
+
console.error(`[SEMANTIC-HUB] Failed to initialize global store: ${err.message}`);
|
|
26
|
+
}
|
|
27
|
+
}
|
|
28
|
+
|
|
29
|
+
/**
|
|
30
|
+
* Syncs a local library with the global hub.
|
|
31
|
+
* @param {string} libraryName - e.g., 'pattern-library.jsonl'
|
|
32
|
+
*/
|
|
33
|
+
async syncLibrary(libraryName) {
|
|
34
|
+
const localFile = path.join(this.localPath, libraryName);
|
|
35
|
+
const globalFile = path.join(this.globalPath, libraryName);
|
|
36
|
+
|
|
37
|
+
try {
|
|
38
|
+
// 1. Read local entries
|
|
39
|
+
const localData = await fs.readFile(localFile, 'utf8');
|
|
40
|
+
const localEntries = localData.split('\n').filter(Boolean).map(JSON.parse);
|
|
41
|
+
|
|
42
|
+
// 2. Read global entries (if exist)
|
|
43
|
+
let globalEntries = [];
|
|
44
|
+
try {
|
|
45
|
+
const globalData = await fs.readFile(globalFile, 'utf8');
|
|
46
|
+
globalEntries = globalData.split('\n').filter(Boolean).map(JSON.parse);
|
|
47
|
+
} catch (e) {
|
|
48
|
+
// Global doesn't exist yet, that's fine.
|
|
49
|
+
}
|
|
50
|
+
|
|
51
|
+
// 3. Simple ID-based deduplication Logic
|
|
52
|
+
const globalIds = new Set(globalEntries.map(e => e.id));
|
|
53
|
+
const newEntries = localEntries.filter(e => !globalIds.has(e.id));
|
|
54
|
+
|
|
55
|
+
if (newEntries.length > 0) {
|
|
56
|
+
// Append new entries to global store
|
|
57
|
+
const appendData = newEntries.map(e => JSON.stringify(e)).join('\n') + '\n';
|
|
58
|
+
await fs.appendFile(globalFile, appendData);
|
|
59
|
+
console.log(`[SEMANTIC-HUB] Synced ${newEntries.length} new entries to global ${libraryName}`);
|
|
60
|
+
}
|
|
61
|
+
|
|
62
|
+
// 4. Update sync manifest
|
|
63
|
+
await this.updateManifest(libraryName, localEntries.length);
|
|
64
|
+
|
|
65
|
+
return true;
|
|
66
|
+
} catch (err) {
|
|
67
|
+
console.error(`[SEMANTIC-HUB] Sync failed for ${libraryName}: ${err.message}`);
|
|
68
|
+
return false;
|
|
69
|
+
}
|
|
70
|
+
}
|
|
71
|
+
|
|
72
|
+
async updateManifest(libraryName, count) {
|
|
73
|
+
let manifest = {};
|
|
74
|
+
try {
|
|
75
|
+
const data = await fs.readFile(this.syncManifest, 'utf8');
|
|
76
|
+
manifest = JSON.parse(data);
|
|
77
|
+
} catch (e) {}
|
|
78
|
+
|
|
79
|
+
manifest[libraryName] = {
|
|
80
|
+
lastSync: new Date().toISOString(),
|
|
81
|
+
localCount: count
|
|
82
|
+
};
|
|
83
|
+
|
|
84
|
+
await fs.writeFile(this.syncManifest, JSON.stringify(manifest, null, 2));
|
|
85
|
+
}
|
|
86
|
+
|
|
87
|
+
/**
|
|
88
|
+
* Retrieves all 'ghost_pattern' types from the global hub.
|
|
89
|
+
*/
|
|
90
|
+
async getGhostPatterns() {
|
|
91
|
+
const patternFile = path.join(this.globalPath, 'pattern-library.jsonl');
|
|
92
|
+
try {
|
|
93
|
+
const data = await fs.readFile(patternFile, 'utf8');
|
|
94
|
+
return data.split('\n')
|
|
95
|
+
.filter(Boolean)
|
|
96
|
+
.map(JSON.parse)
|
|
97
|
+
.filter(p => p.type === 'ghost-pattern' || p.tags?.includes('failure'));
|
|
98
|
+
} catch (e) {
|
|
99
|
+
return [];
|
|
100
|
+
}
|
|
101
|
+
}
|
|
102
|
+
}
|
|
103
|
+
|
|
104
|
+
module.exports = new SemanticHub();
|
|
@@ -0,0 +1,79 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* MindForge — FinOps Hub (Pillar V: Autonomous FinOps Hub)
|
|
3
|
+
* Enterprise-grade monitoring and budget enforcement for agentic workloads.
|
|
4
|
+
*/
|
|
5
|
+
|
|
6
|
+
const fs = require('fs');
|
|
7
|
+
const path = require('path');
|
|
8
|
+
|
|
9
|
+
class FinOpsHub {
|
|
10
|
+
constructor(config = {}) {
|
|
11
|
+
this.projectRoot = config.projectRoot || process.cwd();
|
|
12
|
+
this.budgetLimit = config.budgetLimit || 100.00; // $100.00 USD Default
|
|
13
|
+
this.monthlyUsage = 0.00;
|
|
14
|
+
}
|
|
15
|
+
|
|
16
|
+
/**
|
|
17
|
+
* Initializes the FinOps state and budget monitoring.
|
|
18
|
+
*/
|
|
19
|
+
async init() {
|
|
20
|
+
const roiPath = path.join(this.projectRoot, '.planning', 'ROI.jsonl');
|
|
21
|
+
if (fs.existsSync(roiPath)) {
|
|
22
|
+
const logs = fs.readFileSync(roiPath, 'utf8').split('\n').filter(Boolean);
|
|
23
|
+
logs.forEach(line => {
|
|
24
|
+
try {
|
|
25
|
+
const entry = JSON.parse(line);
|
|
26
|
+
this.monthlyUsage += entry.estimatedCostUSD || 0;
|
|
27
|
+
} catch (e) {
|
|
28
|
+
// Skip malformed lines
|
|
29
|
+
}
|
|
30
|
+
});
|
|
31
|
+
}
|
|
32
|
+
}
|
|
33
|
+
|
|
34
|
+
/**
|
|
35
|
+
* Checks if the task is within budget constraints.
|
|
36
|
+
* @param {Object} task - Task details (difficulty, priority)
|
|
37
|
+
* @returns {Object} - Budget check result (status, reasoning)
|
|
38
|
+
*/
|
|
39
|
+
checkBudget(task) {
|
|
40
|
+
if (this.monthlyUsage >= this.budgetLimit) {
|
|
41
|
+
return { status: 'DENIED', reason: `Monthly budget limit ($${this.budgetLimit}) reached.` };
|
|
42
|
+
}
|
|
43
|
+
|
|
44
|
+
if (this.monthlyUsage >= this.budgetLimit * 0.9) {
|
|
45
|
+
return { status: 'WARNING', reason: `Project has consumed 90% of the allocated budget ($${this.monthlyUsage.toFixed(2)} / $${this.budgetLimit.toFixed(2)}).` };
|
|
46
|
+
}
|
|
47
|
+
|
|
48
|
+
return { status: 'APPROVED', usage: this.monthlyUsage };
|
|
49
|
+
}
|
|
50
|
+
|
|
51
|
+
/**
|
|
52
|
+
* Generates a "Spending Profile" for the project.
|
|
53
|
+
* Used for the Nexus Dashboard to visualize ROI.
|
|
54
|
+
*/
|
|
55
|
+
getSpendingProfile() {
|
|
56
|
+
const roiPath = path.join(this.projectRoot, '.planning', 'ROI.jsonl');
|
|
57
|
+
if (!fs.existsSync(roiPath)) return { totalSpend: 0, goalsAchieved: 0, roi: 0 };
|
|
58
|
+
|
|
59
|
+
const logs = fs.readFileSync(roiPath, 'utf8').split('\n').filter(Boolean).map(JSON.parse);
|
|
60
|
+
const totalSpend = logs.reduce((acc, l) => acc + (l.estimatedCostUSD || 0), 0);
|
|
61
|
+
const goalsAchieved = logs.reduce((acc, l) => acc + (l.goalAchieved || 0), 0);
|
|
62
|
+
|
|
63
|
+
return {
|
|
64
|
+
totalSpend: totalSpend.toFixed(2),
|
|
65
|
+
goalsAchieved,
|
|
66
|
+
roi: totalSpend > 0 ? (goalsAchieved / totalSpend).toFixed(2) : 0,
|
|
67
|
+
tokenEfficiency: logs.length > 0 ? (totalSpend / logs.length).toFixed(4) : 0,
|
|
68
|
+
};
|
|
69
|
+
}
|
|
70
|
+
|
|
71
|
+
/**
|
|
72
|
+
* Resets the usage counter (system-level call).
|
|
73
|
+
*/
|
|
74
|
+
resetMonthlyUsage() {
|
|
75
|
+
this.monthlyUsage = 0.00;
|
|
76
|
+
}
|
|
77
|
+
}
|
|
78
|
+
|
|
79
|
+
module.exports = FinOpsHub;
|