agentic-qe 3.7.14 → 3.7.15
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.claude/skills/skills-manifest.json +1 -1
- package/CHANGELOG.md +27 -0
- package/dist/cli/bundle.js +1187 -508
- package/dist/cli/commands/prove.d.ts +60 -0
- package/dist/cli/commands/prove.js +167 -0
- package/dist/cli/index.js +2 -0
- package/dist/domains/test-generation/pattern-injection/edge-case-injector.d.ts +6 -0
- package/dist/domains/test-generation/pattern-injection/edge-case-injector.js +30 -0
- package/dist/kernel/unified-memory-schemas.d.ts +2 -2
- package/dist/kernel/unified-memory-schemas.js +26 -1
- package/dist/kernel/unified-memory.js +32 -0
- package/dist/learning/aqe-learning-engine.js +2 -1
- package/dist/learning/daily-log.d.ts +43 -0
- package/dist/learning/daily-log.js +91 -0
- package/dist/learning/experience-capture.d.ts +42 -0
- package/dist/learning/experience-capture.js +94 -4
- package/dist/learning/index.d.ts +4 -0
- package/dist/learning/index.js +8 -0
- package/dist/learning/opd-remediation.d.ts +55 -0
- package/dist/learning/opd-remediation.js +130 -0
- package/dist/learning/pattern-lifecycle.d.ts +12 -1
- package/dist/learning/pattern-lifecycle.js +18 -2
- package/dist/learning/pattern-store.d.ts +12 -4
- package/dist/learning/pattern-store.js +178 -19
- package/dist/learning/qe-hooks.d.ts +1 -0
- package/dist/learning/qe-hooks.js +30 -0
- package/dist/learning/qe-patterns.d.ts +6 -0
- package/dist/learning/qe-patterns.js +10 -1
- package/dist/learning/sqlite-persistence.d.ts +40 -0
- package/dist/learning/sqlite-persistence.js +228 -1
- package/dist/mcp/bundle.js +647 -20
- package/dist/mcp/handlers/core-handlers.d.ts +5 -0
- package/dist/mcp/handlers/core-handlers.js +11 -0
- package/dist/mcp/index.d.ts +1 -0
- package/dist/mcp/index.js +2 -0
- package/dist/mcp/tool-scoping.d.ts +36 -0
- package/dist/mcp/tool-scoping.js +129 -0
- package/package.json +1 -1
|
@@ -0,0 +1,60 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Agentic QE v3 - Proof-of-Quality (PoQ) Command
|
|
3
|
+
*
|
|
4
|
+
* Generates a verifiable quality attestation with SHA-256 hash.
|
|
5
|
+
* Proves that quality checks were actually run, not just claimed.
|
|
6
|
+
*
|
|
7
|
+
* Usage: aqe prove [--format json|markdown] [--output file]
|
|
8
|
+
*/
|
|
9
|
+
import { Command } from 'commander';
|
|
10
|
+
import type { CLIContext } from '../handlers/interfaces.js';
|
|
11
|
+
export interface QualityAttestation {
|
|
12
|
+
version: string;
|
|
13
|
+
timestamp: string;
|
|
14
|
+
projectRoot: string;
|
|
15
|
+
attestation: {
|
|
16
|
+
testsExecuted: boolean;
|
|
17
|
+
coverageChecked: boolean;
|
|
18
|
+
securityScanned: boolean;
|
|
19
|
+
qualityGatePassed: boolean;
|
|
20
|
+
};
|
|
21
|
+
metrics: {
|
|
22
|
+
testCount: number;
|
|
23
|
+
passRate: number;
|
|
24
|
+
coveragePercent: number;
|
|
25
|
+
vulnerabilities: number;
|
|
26
|
+
qualityScore: number;
|
|
27
|
+
patternsUsed: number;
|
|
28
|
+
};
|
|
29
|
+
hash: string;
|
|
30
|
+
generatedBy: string;
|
|
31
|
+
}
|
|
32
|
+
/**
|
|
33
|
+
* Generate SHA-256 hash of attestation data (excluding the hash field itself).
|
|
34
|
+
*/
|
|
35
|
+
export declare function hashAttestation(data: Omit<QualityAttestation, 'hash'>): string;
|
|
36
|
+
/**
|
|
37
|
+
* Collect quality metrics from project artifacts on disk.
|
|
38
|
+
*/
|
|
39
|
+
export declare function collectMetrics(projectRoot: string): Promise<QualityAttestation['metrics']>;
|
|
40
|
+
/**
|
|
41
|
+
* Build a full attestation from metrics.
|
|
42
|
+
*/
|
|
43
|
+
export declare function buildAttestation(projectRoot: string, metrics: QualityAttestation['metrics']): QualityAttestation;
|
|
44
|
+
/**
|
|
45
|
+
* Format attestation as Markdown.
|
|
46
|
+
*/
|
|
47
|
+
export declare function formatMarkdown(att: QualityAttestation): string;
|
|
48
|
+
/**
|
|
49
|
+
* Main prove handler (exported for direct use and testing).
|
|
50
|
+
*/
|
|
51
|
+
export declare function handleProve(options: {
|
|
52
|
+
format?: 'json' | 'markdown';
|
|
53
|
+
output?: string;
|
|
54
|
+
projectRoot?: string;
|
|
55
|
+
}): Promise<QualityAttestation>;
|
|
56
|
+
/**
|
|
57
|
+
* Create the Commander command following the project convention.
|
|
58
|
+
*/
|
|
59
|
+
export declare function createProveCommand(_context: CLIContext, cleanupAndExit: (code: number) => Promise<never>, _ensureInitialized: () => Promise<boolean>): Command;
|
|
60
|
+
//# sourceMappingURL=prove.d.ts.map
|
|
@@ -0,0 +1,167 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Agentic QE v3 - Proof-of-Quality (PoQ) Command
|
|
3
|
+
*
|
|
4
|
+
* Generates a verifiable quality attestation with SHA-256 hash.
|
|
5
|
+
* Proves that quality checks were actually run, not just claimed.
|
|
6
|
+
*
|
|
7
|
+
* Usage: aqe prove [--format json|markdown] [--output file]
|
|
8
|
+
*/
|
|
9
|
+
import { Command } from 'commander';
|
|
10
|
+
import * as crypto from 'crypto';
|
|
11
|
+
import * as fs from 'fs';
|
|
12
|
+
import * as path from 'path';
|
|
13
|
+
/**
|
|
14
|
+
* Generate SHA-256 hash of attestation data (excluding the hash field itself).
|
|
15
|
+
*/
|
|
16
|
+
export function hashAttestation(data) {
|
|
17
|
+
const serialized = JSON.stringify(data, null, 0);
|
|
18
|
+
return crypto.createHash('sha256').update(serialized).digest('hex');
|
|
19
|
+
}
|
|
20
|
+
/**
|
|
21
|
+
* Collect quality metrics from project artifacts on disk.
|
|
22
|
+
*/
|
|
23
|
+
export async function collectMetrics(projectRoot) {
|
|
24
|
+
const metrics = {
|
|
25
|
+
testCount: 0,
|
|
26
|
+
passRate: 0,
|
|
27
|
+
coveragePercent: 0,
|
|
28
|
+
vulnerabilities: 0,
|
|
29
|
+
qualityScore: 0,
|
|
30
|
+
patternsUsed: 0,
|
|
31
|
+
};
|
|
32
|
+
// Check for junit.xml (test results)
|
|
33
|
+
try {
|
|
34
|
+
const junitPath = path.join(projectRoot, 'junit.xml');
|
|
35
|
+
if (fs.existsSync(junitPath)) {
|
|
36
|
+
const content = fs.readFileSync(junitPath, 'utf-8');
|
|
37
|
+
const testsMatch = content.match(/tests="(\d+)"/);
|
|
38
|
+
const failsMatch = content.match(/failures="(\d+)"/);
|
|
39
|
+
if (testsMatch) {
|
|
40
|
+
metrics.testCount = parseInt(testsMatch[1], 10);
|
|
41
|
+
const failures = failsMatch ? parseInt(failsMatch[1], 10) : 0;
|
|
42
|
+
metrics.passRate = metrics.testCount > 0
|
|
43
|
+
? ((metrics.testCount - failures) / metrics.testCount) * 100
|
|
44
|
+
: 0;
|
|
45
|
+
}
|
|
46
|
+
}
|
|
47
|
+
}
|
|
48
|
+
catch { /* non-critical */ }
|
|
49
|
+
// Check for coverage summary
|
|
50
|
+
try {
|
|
51
|
+
const coveragePath = path.join(projectRoot, 'coverage', 'coverage-summary.json');
|
|
52
|
+
if (fs.existsSync(coveragePath)) {
|
|
53
|
+
const coverage = JSON.parse(fs.readFileSync(coveragePath, 'utf-8'));
|
|
54
|
+
metrics.coveragePercent = coverage?.total?.lines?.pct ?? 0;
|
|
55
|
+
}
|
|
56
|
+
}
|
|
57
|
+
catch { /* non-critical */ }
|
|
58
|
+
// Check for memory.db patterns
|
|
59
|
+
try {
|
|
60
|
+
const dbPath = path.join(projectRoot, '.agentic-qe', 'memory.db');
|
|
61
|
+
if (fs.existsSync(dbPath)) {
|
|
62
|
+
metrics.patternsUsed = 1;
|
|
63
|
+
}
|
|
64
|
+
}
|
|
65
|
+
catch { /* non-critical */ }
|
|
66
|
+
// Calculate quality score (weighted average)
|
|
67
|
+
metrics.qualityScore = Math.round(metrics.passRate * 0.4 +
|
|
68
|
+
metrics.coveragePercent * 0.3 +
|
|
69
|
+
(metrics.vulnerabilities === 0 ? 100 : Math.max(0, 100 - metrics.vulnerabilities * 10)) * 0.3);
|
|
70
|
+
return metrics;
|
|
71
|
+
}
|
|
72
|
+
/**
|
|
73
|
+
* Build a full attestation from metrics.
|
|
74
|
+
*/
|
|
75
|
+
export function buildAttestation(projectRoot, metrics) {
|
|
76
|
+
const data = {
|
|
77
|
+
version: '1.0.0',
|
|
78
|
+
timestamp: new Date().toISOString(),
|
|
79
|
+
projectRoot,
|
|
80
|
+
attestation: {
|
|
81
|
+
testsExecuted: metrics.testCount > 0,
|
|
82
|
+
coverageChecked: metrics.coveragePercent > 0,
|
|
83
|
+
securityScanned: metrics.vulnerabilities === 0,
|
|
84
|
+
qualityGatePassed: metrics.qualityScore >= 70,
|
|
85
|
+
},
|
|
86
|
+
metrics,
|
|
87
|
+
generatedBy: 'agentic-qe prove',
|
|
88
|
+
};
|
|
89
|
+
const hash = hashAttestation(data);
|
|
90
|
+
return { ...data, hash };
|
|
91
|
+
}
|
|
92
|
+
/**
|
|
93
|
+
* Format attestation as Markdown.
|
|
94
|
+
*/
|
|
95
|
+
export function formatMarkdown(att) {
|
|
96
|
+
return [
|
|
97
|
+
'# Proof of Quality',
|
|
98
|
+
'',
|
|
99
|
+
`**Generated:** ${att.timestamp}`,
|
|
100
|
+
`**Project:** ${att.projectRoot}`,
|
|
101
|
+
`**Hash:** \`${att.hash}\``,
|
|
102
|
+
'',
|
|
103
|
+
'## Attestation',
|
|
104
|
+
'',
|
|
105
|
+
'| Check | Status |',
|
|
106
|
+
'|-------|--------|',
|
|
107
|
+
`| Tests Executed | ${att.attestation.testsExecuted ? 'PASS' : 'FAIL'} |`,
|
|
108
|
+
`| Coverage Checked | ${att.attestation.coverageChecked ? 'PASS' : 'FAIL'} |`,
|
|
109
|
+
`| Security Scanned | ${att.attestation.securityScanned ? 'PASS' : 'FAIL'} |`,
|
|
110
|
+
`| Quality Gate | ${att.attestation.qualityGatePassed ? 'PASSED' : 'FAILED'} |`,
|
|
111
|
+
'',
|
|
112
|
+
'## Metrics',
|
|
113
|
+
'',
|
|
114
|
+
'| Metric | Value |',
|
|
115
|
+
'|--------|-------|',
|
|
116
|
+
`| Tests | ${att.metrics.testCount} |`,
|
|
117
|
+
`| Pass Rate | ${att.metrics.passRate.toFixed(1)}% |`,
|
|
118
|
+
`| Coverage | ${att.metrics.coveragePercent.toFixed(1)}% |`,
|
|
119
|
+
`| Vulnerabilities | ${att.metrics.vulnerabilities} |`,
|
|
120
|
+
`| Quality Score | ${att.metrics.qualityScore}/100 |`,
|
|
121
|
+
'',
|
|
122
|
+
'---',
|
|
123
|
+
`*${att.generatedBy}*`,
|
|
124
|
+
].join('\n');
|
|
125
|
+
}
|
|
126
|
+
/**
|
|
127
|
+
* Main prove handler (exported for direct use and testing).
|
|
128
|
+
*/
|
|
129
|
+
export async function handleProve(options) {
|
|
130
|
+
const projectRoot = options.projectRoot ?? process.cwd();
|
|
131
|
+
const metrics = await collectMetrics(projectRoot);
|
|
132
|
+
const attestation = buildAttestation(projectRoot, metrics);
|
|
133
|
+
const content = options.format === 'markdown'
|
|
134
|
+
? formatMarkdown(attestation)
|
|
135
|
+
: JSON.stringify(attestation, null, 2);
|
|
136
|
+
if (options.output) {
|
|
137
|
+
fs.writeFileSync(options.output, content);
|
|
138
|
+
console.log(`Quality attestation written to ${options.output}`);
|
|
139
|
+
}
|
|
140
|
+
else {
|
|
141
|
+
console.log(content);
|
|
142
|
+
}
|
|
143
|
+
return attestation;
|
|
144
|
+
}
|
|
145
|
+
/**
|
|
146
|
+
* Create the Commander command following the project convention.
|
|
147
|
+
*/
|
|
148
|
+
export function createProveCommand(_context, cleanupAndExit, _ensureInitialized) {
|
|
149
|
+
return new Command('prove')
|
|
150
|
+
.description('Generate a verifiable Proof-of-Quality attestation')
|
|
151
|
+
.option('-F, --format <format>', 'Output format (json|markdown)', 'json')
|
|
152
|
+
.option('-o, --output <path>', 'Write attestation to file')
|
|
153
|
+
.action(async (options) => {
|
|
154
|
+
try {
|
|
155
|
+
await handleProve({
|
|
156
|
+
format: options.format,
|
|
157
|
+
output: options.output,
|
|
158
|
+
});
|
|
159
|
+
await cleanupAndExit(0);
|
|
160
|
+
}
|
|
161
|
+
catch (error) {
|
|
162
|
+
console.error('Failed to generate proof-of-quality:', error);
|
|
163
|
+
await cleanupAndExit(1);
|
|
164
|
+
}
|
|
165
|
+
});
|
|
166
|
+
}
|
|
167
|
+
//# sourceMappingURL=prove.js.map
|
package/dist/cli/index.js
CHANGED
|
@@ -820,6 +820,7 @@ import { createHooksCommand } from './commands/hooks.js';
|
|
|
820
820
|
import { createLearningCommand } from './commands/learning.js';
|
|
821
821
|
import { createMcpCommand } from './commands/mcp.js';
|
|
822
822
|
import { createPlatformCommand } from './commands/platform.js';
|
|
823
|
+
import { createProveCommand } from './commands/prove.js';
|
|
823
824
|
program.addCommand(createTokenUsageCommand());
|
|
824
825
|
program.addCommand(createLLMRouterCommand());
|
|
825
826
|
program.addCommand(createSyncCommands());
|
|
@@ -827,6 +828,7 @@ program.addCommand(createHooksCommand());
|
|
|
827
828
|
program.addCommand(createLearningCommand());
|
|
828
829
|
program.addCommand(createMcpCommand());
|
|
829
830
|
program.addCommand(createPlatformCommand());
|
|
831
|
+
program.addCommand(createProveCommand(context, cleanupAndExit, ensureInitialized));
|
|
830
832
|
// ============================================================================
|
|
831
833
|
// Shutdown Handlers
|
|
832
834
|
// ============================================================================
|
|
@@ -58,8 +58,14 @@ export declare class EdgeCaseInjector {
|
|
|
58
58
|
private parsePattern;
|
|
59
59
|
/**
|
|
60
60
|
* Format selected patterns into a prompt context string.
|
|
61
|
+
* Appends OPD remediation hints for patterns with low success rates.
|
|
61
62
|
*/
|
|
62
63
|
private formatPromptContext;
|
|
64
|
+
/**
|
|
65
|
+
* Build a synthetic execution history from pattern metadata.
|
|
66
|
+
* Used to feed into OPD remediation when full history is unavailable.
|
|
67
|
+
*/
|
|
68
|
+
private buildSyntheticHistory;
|
|
63
69
|
/**
|
|
64
70
|
* Infer a short tag from pattern name when no tags are available.
|
|
65
71
|
*/
|
|
@@ -9,6 +9,7 @@
|
|
|
9
9
|
* This creates a continuously improving test generation system where
|
|
10
10
|
* patterns discovered in past testing inform future test generation.
|
|
11
11
|
*/
|
|
12
|
+
import { generateRemediationHints } from '../../../learning/opd-remediation.js';
|
|
12
13
|
export const DEFAULT_INJECTION_CONFIG = {
|
|
13
14
|
topN: 3,
|
|
14
15
|
minConfidence: 0.5,
|
|
@@ -191,6 +192,7 @@ export class EdgeCaseInjector {
|
|
|
191
192
|
}
|
|
192
193
|
/**
|
|
193
194
|
* Format selected patterns into a prompt context string.
|
|
195
|
+
* Appends OPD remediation hints for patterns with low success rates.
|
|
194
196
|
*/
|
|
195
197
|
formatPromptContext(patterns) {
|
|
196
198
|
const lines = ['## Historical Edge Cases (from patterns that caught real bugs):'];
|
|
@@ -200,8 +202,36 @@ export class EdgeCaseInjector {
|
|
|
200
202
|
const desc = p.description || p.name;
|
|
201
203
|
lines.push(`${i + 1}. [${tag}] ${desc}`);
|
|
202
204
|
}
|
|
205
|
+
// OPD: Append remediation hints for weak patterns (successRate < 0.5)
|
|
206
|
+
const weakPatterns = patterns.filter(p => p.successRate < 0.5);
|
|
207
|
+
const allHints = [];
|
|
208
|
+
for (const wp of weakPatterns) {
|
|
209
|
+
const hints = generateRemediationHints({ id: wp.key, name: wp.name, description: wp.description, successRate: wp.successRate, usageCount: wp.usageCount, confidence: wp.confidence, tags: wp.tags }, this.buildSyntheticHistory(wp));
|
|
210
|
+
allHints.push(...hints);
|
|
211
|
+
}
|
|
212
|
+
if (allHints.length > 0) {
|
|
213
|
+
lines.push('');
|
|
214
|
+
lines.push('## Remediation Notes (patterns with known issues):');
|
|
215
|
+
for (const hint of allHints.slice(0, 3)) {
|
|
216
|
+
lines.push(`- [${hint.category}] ${hint.suggestion}`);
|
|
217
|
+
}
|
|
218
|
+
}
|
|
203
219
|
return lines.join('\n');
|
|
204
220
|
}
|
|
221
|
+
/**
|
|
222
|
+
* Build a synthetic execution history from pattern metadata.
|
|
223
|
+
* Used to feed into OPD remediation when full history is unavailable.
|
|
224
|
+
*/
|
|
225
|
+
buildSyntheticHistory(pattern) {
|
|
226
|
+
const total = Math.max(pattern.usageCount, 1);
|
|
227
|
+
const successes = Math.round(total * pattern.successRate);
|
|
228
|
+
const history = [];
|
|
229
|
+
for (let i = 0; i < successes; i++)
|
|
230
|
+
history.push({ success: true });
|
|
231
|
+
for (let i = 0; i < total - successes; i++)
|
|
232
|
+
history.push({ success: false });
|
|
233
|
+
return history;
|
|
234
|
+
}
|
|
205
235
|
/**
|
|
206
236
|
* Infer a short tag from pattern name when no tags are available.
|
|
207
237
|
*/
|
|
@@ -6,14 +6,14 @@
|
|
|
6
6
|
*/
|
|
7
7
|
import { HYPERGRAPH_SCHEMA } from '../migrations/20260120_add_hypergraph_tables.js';
|
|
8
8
|
export { HYPERGRAPH_SCHEMA };
|
|
9
|
-
export declare const SCHEMA_VERSION =
|
|
9
|
+
export declare const SCHEMA_VERSION = 9;
|
|
10
10
|
export declare const SCHEMA_VERSION_TABLE = "\n CREATE TABLE IF NOT EXISTS schema_version (\n id INTEGER PRIMARY KEY CHECK (id = 1),\n version INTEGER NOT NULL,\n migrated_at TEXT DEFAULT (datetime('now'))\n );\n";
|
|
11
11
|
export declare const KV_STORE_SCHEMA = "\n -- Key-Value Store (v2 compatible - same schema as HybridBackend)\n CREATE TABLE IF NOT EXISTS kv_store (\n key TEXT NOT NULL,\n namespace TEXT NOT NULL,\n value TEXT NOT NULL,\n expires_at INTEGER,\n created_at INTEGER DEFAULT (strftime('%s', 'now') * 1000),\n PRIMARY KEY (namespace, key)\n );\n CREATE INDEX IF NOT EXISTS idx_kv_namespace ON kv_store(namespace);\n CREATE INDEX IF NOT EXISTS idx_kv_expires ON kv_store(expires_at) WHERE expires_at IS NOT NULL;\n";
|
|
12
12
|
export declare const VECTORS_SCHEMA = "\n -- Vector Embeddings (new in v3 - replaces in-memory AgentDB)\n CREATE TABLE IF NOT EXISTS vectors (\n id TEXT PRIMARY KEY,\n namespace TEXT NOT NULL DEFAULT 'default',\n embedding BLOB NOT NULL,\n dimensions INTEGER NOT NULL,\n metadata TEXT,\n created_at TEXT DEFAULT (datetime('now')),\n updated_at TEXT DEFAULT (datetime('now'))\n );\n CREATE INDEX IF NOT EXISTS idx_vectors_namespace ON vectors(namespace);\n CREATE INDEX IF NOT EXISTS idx_vectors_dimensions ON vectors(dimensions);\n";
|
|
13
13
|
export declare const RL_QVALUES_SCHEMA = "\n -- Q-Values for RL algorithms (ADR-046)\n CREATE TABLE IF NOT EXISTS rl_q_values (\n id TEXT PRIMARY KEY,\n algorithm TEXT NOT NULL,\n agent_id TEXT NOT NULL,\n state_key TEXT NOT NULL,\n action_key TEXT NOT NULL,\n q_value REAL NOT NULL DEFAULT 0.0,\n visits INTEGER NOT NULL DEFAULT 0,\n last_reward REAL,\n domain TEXT,\n created_at TEXT DEFAULT (datetime('now')),\n updated_at TEXT DEFAULT (datetime('now')),\n UNIQUE(algorithm, agent_id, state_key, action_key)\n );\n CREATE INDEX IF NOT EXISTS idx_qvalues_agent ON rl_q_values(agent_id);\n CREATE INDEX IF NOT EXISTS idx_qvalues_algorithm ON rl_q_values(algorithm);\n CREATE INDEX IF NOT EXISTS idx_qvalues_state ON rl_q_values(agent_id, state_key);\n CREATE INDEX IF NOT EXISTS idx_qvalues_domain ON rl_q_values(domain);\n CREATE INDEX IF NOT EXISTS idx_qvalues_updated ON rl_q_values(updated_at);\n";
|
|
14
14
|
export declare const GOAP_SCHEMA = "\n -- GOAP Goals\n CREATE TABLE IF NOT EXISTS goap_goals (\n id TEXT PRIMARY KEY,\n name TEXT NOT NULL,\n description TEXT,\n conditions TEXT NOT NULL,\n priority INTEGER DEFAULT 3,\n qe_domain TEXT,\n created_at TEXT DEFAULT (datetime('now'))\n );\n\n -- GOAP Actions\n CREATE TABLE IF NOT EXISTS goap_actions (\n id TEXT PRIMARY KEY,\n name TEXT NOT NULL,\n description TEXT,\n agent_type TEXT NOT NULL,\n preconditions TEXT NOT NULL,\n effects TEXT NOT NULL,\n cost REAL DEFAULT 1.0,\n estimated_duration_ms INTEGER,\n success_rate REAL DEFAULT 1.0,\n execution_count INTEGER DEFAULT 0,\n category TEXT NOT NULL,\n qe_domain TEXT,\n created_at TEXT DEFAULT (datetime('now')),\n updated_at TEXT DEFAULT (datetime('now'))\n );\n\n -- GOAP Plans\n CREATE TABLE IF NOT EXISTS goap_plans (\n id TEXT PRIMARY KEY,\n goal_id TEXT,\n initial_state TEXT NOT NULL,\n goal_state TEXT NOT NULL,\n action_sequence TEXT NOT NULL,\n total_cost REAL,\n estimated_duration_ms INTEGER,\n status TEXT DEFAULT 'pending',\n reused_from TEXT,\n similarity_score REAL,\n created_at TEXT DEFAULT (datetime('now')),\n executed_at TEXT,\n completed_at TEXT,\n FOREIGN KEY (goal_id) REFERENCES goap_goals(id)\n );\n\n -- Plan Signatures (for similarity matching)\n CREATE TABLE IF NOT EXISTS goap_plan_signatures (\n id TEXT PRIMARY KEY,\n plan_id TEXT NOT NULL UNIQUE,\n goal_hash TEXT NOT NULL,\n state_vector TEXT NOT NULL,\n action_sequence TEXT NOT NULL,\n total_cost REAL NOT NULL,\n success_rate REAL DEFAULT 1.0,\n usage_count INTEGER DEFAULT 0,\n created_at TEXT DEFAULT (datetime('now'))\n );\n\n -- GOAP Indexes\n CREATE INDEX IF NOT EXISTS idx_goap_actions_category ON goap_actions(category);\n CREATE INDEX IF NOT EXISTS idx_goap_actions_agent ON goap_actions(agent_type);\n CREATE INDEX IF NOT EXISTS idx_goap_plans_status ON goap_plans(status);\n CREATE INDEX IF NOT EXISTS idx_goap_sig_goal ON goap_plan_signatures(goal_hash);\n";
|
|
15
15
|
export declare const DREAM_SCHEMA = "\n -- Concept Graph Nodes (Dream Engine)\n CREATE TABLE IF NOT EXISTS concept_nodes (\n id TEXT PRIMARY KEY,\n concept_type TEXT NOT NULL,\n content TEXT NOT NULL,\n embedding BLOB,\n activation_level REAL DEFAULT 0.0,\n last_activated TEXT,\n pattern_id TEXT,\n metadata TEXT,\n created_at TEXT DEFAULT (datetime('now'))\n );\n\n -- Concept Edges\n CREATE TABLE IF NOT EXISTS concept_edges (\n id TEXT PRIMARY KEY,\n source TEXT NOT NULL,\n target TEXT NOT NULL,\n weight REAL NOT NULL DEFAULT 1.0,\n edge_type TEXT NOT NULL,\n evidence INTEGER DEFAULT 1,\n created_at TEXT DEFAULT (datetime('now')),\n updated_at TEXT DEFAULT (datetime('now')),\n FOREIGN KEY (source) REFERENCES concept_nodes(id) ON DELETE CASCADE,\n FOREIGN KEY (target) REFERENCES concept_nodes(id) ON DELETE CASCADE\n );\n\n -- Dream Cycles\n CREATE TABLE IF NOT EXISTS dream_cycles (\n id TEXT PRIMARY KEY,\n start_time TEXT NOT NULL,\n end_time TEXT,\n duration_ms INTEGER,\n concepts_processed INTEGER DEFAULT 0,\n associations_found INTEGER DEFAULT 0,\n insights_generated INTEGER DEFAULT 0,\n status TEXT DEFAULT 'running',\n error TEXT,\n created_at TEXT DEFAULT (datetime('now'))\n );\n\n -- Dream Insights\n CREATE TABLE IF NOT EXISTS dream_insights (\n id TEXT PRIMARY KEY,\n cycle_id TEXT NOT NULL,\n insight_type TEXT NOT NULL,\n source_concepts TEXT NOT NULL,\n description TEXT NOT NULL,\n novelty_score REAL DEFAULT 0.5,\n confidence_score REAL DEFAULT 0.5,\n actionable INTEGER DEFAULT 0,\n applied INTEGER DEFAULT 0,\n suggested_action TEXT,\n pattern_id TEXT,\n created_at TEXT DEFAULT (datetime('now')),\n FOREIGN KEY (cycle_id) REFERENCES dream_cycles(id) ON DELETE CASCADE\n );\n\n -- Dream Indexes\n CREATE INDEX IF NOT EXISTS idx_concept_type ON concept_nodes(concept_type);\n CREATE INDEX IF NOT EXISTS idx_concept_activation ON concept_nodes(activation_level);\n CREATE INDEX IF NOT EXISTS idx_concept_pattern ON concept_nodes(pattern_id);\n CREATE INDEX IF NOT EXISTS idx_edge_source ON concept_edges(source);\n CREATE INDEX IF NOT EXISTS idx_edge_target ON concept_edges(target);\n CREATE INDEX IF NOT EXISTS idx_edge_type ON concept_edges(edge_type);\n CREATE INDEX IF NOT EXISTS idx_edge_weight ON concept_edges(weight DESC);\n CREATE INDEX IF NOT EXISTS idx_insight_cycle ON dream_insights(cycle_id);\n CREATE INDEX IF NOT EXISTS idx_dream_status ON dream_cycles(status);\n";
|
|
16
|
-
export declare const QE_PATTERNS_SCHEMA = "\n -- QE Patterns table (unified from sqlite-persistence.ts)\n CREATE TABLE IF NOT EXISTS qe_patterns (\n id TEXT PRIMARY KEY,\n pattern_type TEXT NOT NULL,\n qe_domain TEXT NOT NULL,\n domain TEXT NOT NULL,\n name TEXT NOT NULL,\n description TEXT,\n confidence REAL DEFAULT 0.5,\n usage_count INTEGER DEFAULT 0,\n success_rate REAL DEFAULT 0.0,\n quality_score REAL DEFAULT 0.0,\n tier TEXT DEFAULT 'short-term',\n template_json TEXT,\n context_json TEXT,\n created_at TEXT DEFAULT (datetime('now')),\n updated_at TEXT DEFAULT (datetime('now')),\n last_used_at TEXT,\n successful_uses INTEGER DEFAULT 0,\n tokens_used INTEGER,\n input_tokens INTEGER,\n output_tokens INTEGER,\n latency_ms REAL,\n reusable INTEGER DEFAULT 0,\n reuse_count INTEGER DEFAULT 0,\n average_token_savings REAL DEFAULT 0,\n total_tokens_saved INTEGER\n );\n\n -- Pattern embeddings table (BLOB storage for vectors)\n CREATE TABLE IF NOT EXISTS qe_pattern_embeddings (\n pattern_id TEXT PRIMARY KEY,\n embedding BLOB NOT NULL,\n dimension INTEGER NOT NULL,\n model TEXT DEFAULT 'all-MiniLM-L6-v2',\n created_at TEXT DEFAULT (datetime('now')),\n FOREIGN KEY (pattern_id) REFERENCES qe_patterns(id) ON DELETE CASCADE\n );\n\n -- Pattern usage history (no FK -- used as analytics log by hooks with synthetic IDs)\n CREATE TABLE IF NOT EXISTS qe_pattern_usage (\n id INTEGER PRIMARY KEY AUTOINCREMENT,\n pattern_id TEXT NOT NULL,\n success INTEGER NOT NULL,\n metrics_json TEXT,\n feedback TEXT,\n created_at TEXT DEFAULT (datetime('now'))\n );\n\n -- Learning trajectories\n CREATE TABLE IF NOT EXISTS qe_trajectories (\n id TEXT PRIMARY KEY,\n task TEXT NOT NULL,\n agent TEXT,\n domain TEXT,\n started_at TEXT DEFAULT (datetime('now')),\n ended_at TEXT,\n success INTEGER,\n steps_json TEXT,\n metadata_json TEXT\n );\n\n -- Embeddings table (unified from EmbeddingCache.ts)\n -- Renamed from 'embedding_cache' to 'embeddings' to match existing code\n CREATE TABLE IF NOT EXISTS embeddings (\n key TEXT NOT NULL,\n namespace TEXT NOT NULL,\n vector BLOB NOT NULL,\n dimension INTEGER NOT NULL,\n text TEXT NOT NULL,\n timestamp INTEGER NOT NULL,\n quantization TEXT NOT NULL,\n metadata TEXT,\n access_count INTEGER DEFAULT 1,\n last_access INTEGER NOT NULL,\n PRIMARY KEY (key, namespace)\n );\n\n -- Execution results table (unified from plan-executor.ts)\n CREATE TABLE IF NOT EXISTS execution_results (\n id TEXT PRIMARY KEY,\n plan_id TEXT NOT NULL,\n status TEXT NOT NULL,\n steps_completed INTEGER DEFAULT 0,\n steps_failed INTEGER DEFAULT 0,\n total_duration_ms INTEGER DEFAULT 0,\n final_world_state TEXT,\n error_message TEXT,\n created_at TEXT DEFAULT (datetime('now'))\n );\n\n -- Executed steps table (unified from plan-executor.ts)\n CREATE TABLE IF NOT EXISTS executed_steps (\n id TEXT PRIMARY KEY,\n execution_id TEXT NOT NULL,\n plan_id TEXT NOT NULL,\n action_id TEXT NOT NULL,\n step_order INTEGER NOT NULL,\n status TEXT NOT NULL,\n retries INTEGER DEFAULT 0,\n started_at TEXT NOT NULL,\n completed_at TEXT,\n duration_ms INTEGER,\n agent_id TEXT,\n agent_output TEXT,\n world_state_before TEXT,\n world_state_after TEXT,\n error_message TEXT,\n FOREIGN KEY (execution_id) REFERENCES execution_results(id)\n );\n\n -- QE Patterns indexes\n CREATE INDEX IF NOT EXISTS idx_qe_patterns_domain ON qe_patterns(qe_domain);\n CREATE INDEX IF NOT EXISTS idx_qe_patterns_type ON qe_patterns(pattern_type);\n CREATE INDEX IF NOT EXISTS idx_qe_patterns_tier ON qe_patterns(tier);\n CREATE INDEX IF NOT EXISTS idx_qe_patterns_quality ON qe_patterns(quality_score DESC);\n CREATE INDEX IF NOT EXISTS idx_qe_usage_pattern ON qe_pattern_usage(pattern_id);\n CREATE INDEX IF NOT EXISTS idx_qe_trajectories_domain ON qe_trajectories(domain);\n CREATE INDEX IF NOT EXISTS idx_embeddings_namespace ON embeddings(namespace);\n CREATE INDEX IF NOT EXISTS idx_embeddings_timestamp ON embeddings(timestamp);\n CREATE INDEX IF NOT EXISTS idx_execution_results_plan ON execution_results(plan_id);\n CREATE INDEX IF NOT EXISTS idx_execution_results_status ON execution_results(status);\n CREATE INDEX IF NOT EXISTS idx_executed_steps_execution ON executed_steps(execution_id);\n CREATE INDEX IF NOT EXISTS idx_executed_steps_action ON executed_steps(action_id);\n";
|
|
16
|
+
export declare const QE_PATTERNS_SCHEMA = "\n -- QE Patterns table (unified from sqlite-persistence.ts)\n CREATE TABLE IF NOT EXISTS qe_patterns (\n id TEXT PRIMARY KEY,\n pattern_type TEXT NOT NULL,\n qe_domain TEXT NOT NULL,\n domain TEXT NOT NULL,\n name TEXT NOT NULL,\n description TEXT,\n confidence REAL DEFAULT 0.5,\n usage_count INTEGER DEFAULT 0,\n success_rate REAL DEFAULT 0.0,\n quality_score REAL DEFAULT 0.0,\n tier TEXT DEFAULT 'short-term',\n template_json TEXT,\n context_json TEXT,\n created_at TEXT DEFAULT (datetime('now')),\n updated_at TEXT DEFAULT (datetime('now')),\n last_used_at TEXT,\n successful_uses INTEGER DEFAULT 0,\n tokens_used INTEGER,\n input_tokens INTEGER,\n output_tokens INTEGER,\n latency_ms REAL,\n reusable INTEGER DEFAULT 0,\n reuse_count INTEGER DEFAULT 0,\n average_token_savings REAL DEFAULT 0,\n total_tokens_saved INTEGER\n );\n\n -- Pattern embeddings table (BLOB storage for vectors)\n CREATE TABLE IF NOT EXISTS qe_pattern_embeddings (\n pattern_id TEXT PRIMARY KEY,\n embedding BLOB NOT NULL,\n dimension INTEGER NOT NULL,\n model TEXT DEFAULT 'all-MiniLM-L6-v2',\n created_at TEXT DEFAULT (datetime('now')),\n FOREIGN KEY (pattern_id) REFERENCES qe_patterns(id) ON DELETE CASCADE\n );\n\n -- Pattern usage history (no FK -- used as analytics log by hooks with synthetic IDs)\n CREATE TABLE IF NOT EXISTS qe_pattern_usage (\n id INTEGER PRIMARY KEY AUTOINCREMENT,\n pattern_id TEXT NOT NULL,\n success INTEGER NOT NULL,\n metrics_json TEXT,\n feedback TEXT,\n created_at TEXT DEFAULT (datetime('now'))\n );\n\n -- Learning trajectories\n CREATE TABLE IF NOT EXISTS qe_trajectories (\n id TEXT PRIMARY KEY,\n task TEXT NOT NULL,\n agent TEXT,\n domain TEXT,\n started_at TEXT DEFAULT (datetime('now')),\n ended_at TEXT,\n success INTEGER,\n steps_json TEXT,\n metadata_json TEXT\n );\n\n -- Embeddings table (unified from EmbeddingCache.ts)\n -- Renamed from 'embedding_cache' to 'embeddings' to match existing code\n CREATE TABLE IF NOT EXISTS embeddings (\n key TEXT NOT NULL,\n namespace TEXT NOT NULL,\n vector BLOB NOT NULL,\n dimension INTEGER NOT NULL,\n text TEXT NOT NULL,\n timestamp INTEGER NOT NULL,\n quantization TEXT NOT NULL,\n metadata TEXT,\n access_count INTEGER DEFAULT 1,\n last_access INTEGER NOT NULL,\n PRIMARY KEY (key, namespace)\n );\n\n -- Execution results table (unified from plan-executor.ts)\n CREATE TABLE IF NOT EXISTS execution_results (\n id TEXT PRIMARY KEY,\n plan_id TEXT NOT NULL,\n status TEXT NOT NULL,\n steps_completed INTEGER DEFAULT 0,\n steps_failed INTEGER DEFAULT 0,\n total_duration_ms INTEGER DEFAULT 0,\n final_world_state TEXT,\n error_message TEXT,\n created_at TEXT DEFAULT (datetime('now'))\n );\n\n -- Executed steps table (unified from plan-executor.ts)\n CREATE TABLE IF NOT EXISTS executed_steps (\n id TEXT PRIMARY KEY,\n execution_id TEXT NOT NULL,\n plan_id TEXT NOT NULL,\n action_id TEXT NOT NULL,\n step_order INTEGER NOT NULL,\n status TEXT NOT NULL,\n retries INTEGER DEFAULT 0,\n started_at TEXT NOT NULL,\n completed_at TEXT,\n duration_ms INTEGER,\n agent_id TEXT,\n agent_output TEXT,\n world_state_before TEXT,\n world_state_after TEXT,\n error_message TEXT,\n FOREIGN KEY (execution_id) REFERENCES execution_results(id)\n );\n\n -- FTS5 full-text search index for hybrid vector/text search\n CREATE VIRTUAL TABLE IF NOT EXISTS qe_patterns_fts USING fts5(\n name, description, pattern_type, qe_domain,\n content='qe_patterns',\n content_rowid='rowid'\n );\n\n -- FTS5 triggers to keep index in sync\n CREATE TRIGGER IF NOT EXISTS qe_patterns_fts_insert AFTER INSERT ON qe_patterns BEGIN\n INSERT INTO qe_patterns_fts(rowid, name, description, pattern_type, qe_domain)\n VALUES (new.rowid, new.name, new.description, new.pattern_type, new.qe_domain);\n END;\n\n CREATE TRIGGER IF NOT EXISTS qe_patterns_fts_delete AFTER DELETE ON qe_patterns BEGIN\n INSERT INTO qe_patterns_fts(qe_patterns_fts, rowid, name, description, pattern_type, qe_domain)\n VALUES ('delete', old.rowid, old.name, old.description, old.pattern_type, old.qe_domain);\n END;\n\n CREATE TRIGGER IF NOT EXISTS qe_patterns_fts_update AFTER UPDATE ON qe_patterns BEGIN\n INSERT INTO qe_patterns_fts(qe_patterns_fts, rowid, name, description, pattern_type, qe_domain)\n VALUES ('delete', old.rowid, old.name, old.description, old.pattern_type, old.qe_domain);\n INSERT INTO qe_patterns_fts(rowid, name, description, pattern_type, qe_domain)\n VALUES (new.rowid, new.name, new.description, new.pattern_type, new.qe_domain);\n END;\n\n -- QE Patterns indexes\n CREATE INDEX IF NOT EXISTS idx_qe_patterns_domain ON qe_patterns(qe_domain);\n CREATE INDEX IF NOT EXISTS idx_qe_patterns_type ON qe_patterns(pattern_type);\n CREATE INDEX IF NOT EXISTS idx_qe_patterns_tier ON qe_patterns(tier);\n CREATE INDEX IF NOT EXISTS idx_qe_patterns_quality ON qe_patterns(quality_score DESC);\n CREATE INDEX IF NOT EXISTS idx_qe_usage_pattern ON qe_pattern_usage(pattern_id);\n CREATE INDEX IF NOT EXISTS idx_qe_trajectories_domain ON qe_trajectories(domain);\n CREATE INDEX IF NOT EXISTS idx_embeddings_namespace ON embeddings(namespace);\n CREATE INDEX IF NOT EXISTS idx_embeddings_timestamp ON embeddings(timestamp);\n CREATE INDEX IF NOT EXISTS idx_execution_results_plan ON execution_results(plan_id);\n CREATE INDEX IF NOT EXISTS idx_execution_results_status ON execution_results(status);\n CREATE INDEX IF NOT EXISTS idx_executed_steps_execution ON executed_steps(execution_id);\n CREATE INDEX IF NOT EXISTS idx_executed_steps_action ON executed_steps(action_id);\n";
|
|
17
17
|
export declare const MINCUT_SCHEMA = "\n -- MinCut Graph Snapshots (ADR-047)\n CREATE TABLE IF NOT EXISTS mincut_snapshots (\n id TEXT PRIMARY KEY,\n timestamp TEXT NOT NULL DEFAULT (datetime('now')),\n vertex_count INTEGER NOT NULL,\n edge_count INTEGER NOT NULL,\n total_weight REAL NOT NULL DEFAULT 0.0,\n is_connected INTEGER NOT NULL DEFAULT 1,\n component_count INTEGER NOT NULL DEFAULT 1,\n vertices_json TEXT NOT NULL,\n edges_json TEXT NOT NULL,\n created_at TEXT DEFAULT (datetime('now'))\n );\n\n -- MinCut History (time-series MinCut values)\n CREATE TABLE IF NOT EXISTS mincut_history (\n id INTEGER PRIMARY KEY AUTOINCREMENT,\n timestamp TEXT NOT NULL DEFAULT (datetime('now')),\n mincut_value REAL NOT NULL,\n vertex_count INTEGER NOT NULL,\n edge_count INTEGER NOT NULL,\n algorithm TEXT NOT NULL DEFAULT 'weighted-degree',\n duration_ms INTEGER,\n snapshot_id TEXT,\n created_at TEXT DEFAULT (datetime('now')),\n FOREIGN KEY (snapshot_id) REFERENCES mincut_snapshots(id) ON DELETE SET NULL\n );\n\n -- MinCut Weak Vertices (detected bottlenecks)\n CREATE TABLE IF NOT EXISTS mincut_weak_vertices (\n id TEXT PRIMARY KEY,\n vertex_id TEXT NOT NULL,\n weighted_degree REAL NOT NULL,\n risk_score REAL NOT NULL,\n reason TEXT NOT NULL,\n domain TEXT,\n vertex_type TEXT NOT NULL,\n suggestions_json TEXT,\n detected_at TEXT NOT NULL DEFAULT (datetime('now')),\n resolved_at TEXT,\n snapshot_id TEXT,\n created_at TEXT DEFAULT (datetime('now')),\n FOREIGN KEY (snapshot_id) REFERENCES mincut_snapshots(id) ON DELETE SET NULL\n );\n\n -- MinCut Alerts\n CREATE TABLE IF NOT EXISTS mincut_alerts (\n id TEXT PRIMARY KEY,\n severity TEXT NOT NULL,\n message TEXT NOT NULL,\n mincut_value REAL NOT NULL,\n threshold REAL NOT NULL,\n affected_vertices_json TEXT,\n remediations_json TEXT,\n acknowledged INTEGER DEFAULT 0,\n acknowledged_at TEXT,\n acknowledged_by TEXT,\n timestamp TEXT NOT NULL DEFAULT (datetime('now')),\n created_at TEXT DEFAULT (datetime('now'))\n );\n\n -- MinCut Healing Actions (self-healing history)\n CREATE TABLE IF NOT EXISTS mincut_healing_actions (\n id TEXT PRIMARY KEY,\n action_type TEXT NOT NULL,\n action_params_json TEXT NOT NULL,\n success INTEGER NOT NULL,\n mincut_before REAL NOT NULL,\n mincut_after REAL NOT NULL,\n improvement REAL NOT NULL DEFAULT 0.0,\n error_message TEXT,\n duration_ms INTEGER NOT NULL,\n triggered_by TEXT,\n snapshot_before_id TEXT,\n snapshot_after_id TEXT,\n created_at TEXT DEFAULT (datetime('now')),\n FOREIGN KEY (snapshot_before_id) REFERENCES mincut_snapshots(id) ON DELETE SET NULL,\n FOREIGN KEY (snapshot_after_id) REFERENCES mincut_snapshots(id) ON DELETE SET NULL\n );\n\n -- MinCut Strange Loop Observations (P1: self-organizing)\n CREATE TABLE IF NOT EXISTS mincut_observations (\n id TEXT PRIMARY KEY,\n iteration INTEGER NOT NULL,\n mincut_value REAL NOT NULL,\n weak_vertex_count INTEGER NOT NULL DEFAULT 0,\n weak_vertices_json TEXT,\n snapshot_id TEXT,\n prediction_json TEXT,\n actual_vs_predicted_diff REAL,\n timestamp TEXT NOT NULL DEFAULT (datetime('now')),\n FOREIGN KEY (snapshot_id) REFERENCES mincut_snapshots(id) ON DELETE SET NULL\n );\n\n -- MinCut Indexes\n CREATE INDEX IF NOT EXISTS idx_mincut_history_timestamp ON mincut_history(timestamp DESC);\n CREATE INDEX IF NOT EXISTS idx_mincut_history_value ON mincut_history(mincut_value);\n CREATE INDEX IF NOT EXISTS idx_mincut_weak_vertex ON mincut_weak_vertices(vertex_id);\n CREATE INDEX IF NOT EXISTS idx_mincut_weak_risk ON mincut_weak_vertices(risk_score DESC);\n CREATE INDEX IF NOT EXISTS idx_mincut_weak_resolved ON mincut_weak_vertices(resolved_at);\n CREATE INDEX IF NOT EXISTS idx_mincut_alerts_severity ON mincut_alerts(severity);\n CREATE INDEX IF NOT EXISTS idx_mincut_alerts_ack ON mincut_alerts(acknowledged);\n CREATE INDEX IF NOT EXISTS idx_mincut_healing_type ON mincut_healing_actions(action_type);\n CREATE INDEX IF NOT EXISTS idx_mincut_healing_success ON mincut_healing_actions(success);\n CREATE INDEX IF NOT EXISTS idx_mincut_observations_iter ON mincut_observations(iteration);\n";
|
|
18
18
|
export declare const SONA_PATTERNS_SCHEMA = "\n -- SONA Patterns table (ADR-046: Pattern Persistence for Neural Backbone)\n CREATE TABLE IF NOT EXISTS sona_patterns (\n id TEXT PRIMARY KEY,\n type TEXT NOT NULL,\n domain TEXT NOT NULL,\n state_embedding BLOB,\n action_embedding BLOB,\n action_type TEXT NOT NULL,\n action_value TEXT,\n outcome_reward REAL NOT NULL DEFAULT 0.0,\n outcome_success INTEGER NOT NULL DEFAULT 0,\n outcome_quality REAL NOT NULL DEFAULT 0.0,\n confidence REAL DEFAULT 0.5,\n usage_count INTEGER DEFAULT 0,\n success_count INTEGER DEFAULT 0,\n failure_count INTEGER DEFAULT 0,\n metadata TEXT,\n created_at TEXT DEFAULT (datetime('now')),\n updated_at TEXT DEFAULT (datetime('now')),\n last_used_at TEXT\n );\n CREATE INDEX IF NOT EXISTS idx_sona_patterns_type ON sona_patterns(type);\n CREATE INDEX IF NOT EXISTS idx_sona_patterns_domain ON sona_patterns(domain);\n CREATE INDEX IF NOT EXISTS idx_sona_patterns_confidence ON sona_patterns(confidence DESC);\n CREATE INDEX IF NOT EXISTS idx_sona_patterns_updated ON sona_patterns(updated_at DESC);\n";
|
|
19
19
|
export declare const WITNESS_CHAIN_SCHEMA = "\n -- Witness Chain (ADR-070: Cryptographic audit trail for QE decisions)\n CREATE TABLE IF NOT EXISTS witness_chain (\n id INTEGER PRIMARY KEY AUTOINCREMENT,\n prev_hash TEXT NOT NULL,\n action_hash TEXT NOT NULL,\n action_type TEXT NOT NULL,\n action_data TEXT,\n timestamp TEXT NOT NULL,\n actor TEXT NOT NULL\n );\n CREATE INDEX IF NOT EXISTS idx_witness_action_type ON witness_chain(action_type);\n CREATE INDEX IF NOT EXISTS idx_witness_timestamp ON witness_chain(timestamp);\n";
|
|
@@ -10,7 +10,7 @@ export { HYPERGRAPH_SCHEMA };
|
|
|
10
10
|
// ============================================================================
|
|
11
11
|
// Schema Version for Migrations
|
|
12
12
|
// ============================================================================
|
|
13
|
-
export const SCHEMA_VERSION =
|
|
13
|
+
export const SCHEMA_VERSION = 9; // v9: adds FTS5 full-text search for qe_patterns (hybrid vector/text search)
|
|
14
14
|
export const SCHEMA_VERSION_TABLE = `
|
|
15
15
|
CREATE TABLE IF NOT EXISTS schema_version (
|
|
16
16
|
id INTEGER PRIMARY KEY CHECK (id = 1),
|
|
@@ -319,6 +319,31 @@ export const QE_PATTERNS_SCHEMA = `
|
|
|
319
319
|
FOREIGN KEY (execution_id) REFERENCES execution_results(id)
|
|
320
320
|
);
|
|
321
321
|
|
|
322
|
+
-- FTS5 full-text search index for hybrid vector/text search
|
|
323
|
+
CREATE VIRTUAL TABLE IF NOT EXISTS qe_patterns_fts USING fts5(
|
|
324
|
+
name, description, pattern_type, qe_domain,
|
|
325
|
+
content='qe_patterns',
|
|
326
|
+
content_rowid='rowid'
|
|
327
|
+
);
|
|
328
|
+
|
|
329
|
+
-- FTS5 triggers to keep index in sync
|
|
330
|
+
CREATE TRIGGER IF NOT EXISTS qe_patterns_fts_insert AFTER INSERT ON qe_patterns BEGIN
|
|
331
|
+
INSERT INTO qe_patterns_fts(rowid, name, description, pattern_type, qe_domain)
|
|
332
|
+
VALUES (new.rowid, new.name, new.description, new.pattern_type, new.qe_domain);
|
|
333
|
+
END;
|
|
334
|
+
|
|
335
|
+
CREATE TRIGGER IF NOT EXISTS qe_patterns_fts_delete AFTER DELETE ON qe_patterns BEGIN
|
|
336
|
+
INSERT INTO qe_patterns_fts(qe_patterns_fts, rowid, name, description, pattern_type, qe_domain)
|
|
337
|
+
VALUES ('delete', old.rowid, old.name, old.description, old.pattern_type, old.qe_domain);
|
|
338
|
+
END;
|
|
339
|
+
|
|
340
|
+
CREATE TRIGGER IF NOT EXISTS qe_patterns_fts_update AFTER UPDATE ON qe_patterns BEGIN
|
|
341
|
+
INSERT INTO qe_patterns_fts(qe_patterns_fts, rowid, name, description, pattern_type, qe_domain)
|
|
342
|
+
VALUES ('delete', old.rowid, old.name, old.description, old.pattern_type, old.qe_domain);
|
|
343
|
+
INSERT INTO qe_patterns_fts(rowid, name, description, pattern_type, qe_domain)
|
|
344
|
+
VALUES (new.rowid, new.name, new.description, new.pattern_type, new.qe_domain);
|
|
345
|
+
END;
|
|
346
|
+
|
|
322
347
|
-- QE Patterns indexes
|
|
323
348
|
CREATE INDEX IF NOT EXISTS idx_qe_patterns_domain ON qe_patterns(qe_domain);
|
|
324
349
|
CREATE INDEX IF NOT EXISTS idx_qe_patterns_type ON qe_patterns(pattern_type);
|
|
@@ -423,6 +423,38 @@ export class UnifiedMemoryManager {
|
|
|
423
423
|
this.db.exec(SONA_PATTERNS_SCHEMA);
|
|
424
424
|
if (currentVersion < 8)
|
|
425
425
|
this.db.exec(FEEDBACK_SCHEMA);
|
|
426
|
+
if (currentVersion < 9) {
|
|
427
|
+
// Add FTS5 full-text search for qe_patterns (hybrid vector/text search)
|
|
428
|
+
this.db.exec(`
|
|
429
|
+
CREATE VIRTUAL TABLE IF NOT EXISTS qe_patterns_fts USING fts5(
|
|
430
|
+
name, description, pattern_type, qe_domain,
|
|
431
|
+
content='qe_patterns',
|
|
432
|
+
content_rowid='rowid'
|
|
433
|
+
);
|
|
434
|
+
|
|
435
|
+
CREATE TRIGGER IF NOT EXISTS qe_patterns_fts_insert AFTER INSERT ON qe_patterns BEGIN
|
|
436
|
+
INSERT INTO qe_patterns_fts(rowid, name, description, pattern_type, qe_domain)
|
|
437
|
+
VALUES (new.rowid, new.name, new.description, new.pattern_type, new.qe_domain);
|
|
438
|
+
END;
|
|
439
|
+
|
|
440
|
+
CREATE TRIGGER IF NOT EXISTS qe_patterns_fts_delete AFTER DELETE ON qe_patterns BEGIN
|
|
441
|
+
INSERT INTO qe_patterns_fts(qe_patterns_fts, rowid, name, description, pattern_type, qe_domain)
|
|
442
|
+
VALUES ('delete', old.rowid, old.name, old.description, old.pattern_type, old.qe_domain);
|
|
443
|
+
END;
|
|
444
|
+
|
|
445
|
+
CREATE TRIGGER IF NOT EXISTS qe_patterns_fts_update AFTER UPDATE ON qe_patterns BEGIN
|
|
446
|
+
INSERT INTO qe_patterns_fts(qe_patterns_fts, rowid, name, description, pattern_type, qe_domain)
|
|
447
|
+
VALUES ('delete', old.rowid, old.name, old.description, old.pattern_type, old.qe_domain);
|
|
448
|
+
INSERT INTO qe_patterns_fts(rowid, name, description, pattern_type, qe_domain)
|
|
449
|
+
VALUES (new.rowid, new.name, new.description, new.pattern_type, new.qe_domain);
|
|
450
|
+
END;
|
|
451
|
+
`);
|
|
452
|
+
// Populate FTS5 index from existing patterns
|
|
453
|
+
this.db.exec(`
|
|
454
|
+
INSERT INTO qe_patterns_fts(rowid, name, description, pattern_type, qe_domain)
|
|
455
|
+
SELECT rowid, name, description, pattern_type, qe_domain FROM qe_patterns;
|
|
456
|
+
`);
|
|
457
|
+
}
|
|
426
458
|
this.db.prepare(`
|
|
427
459
|
INSERT OR REPLACE INTO schema_version (id, version, migrated_at)
|
|
428
460
|
VALUES (1, ?, datetime('now'))
|
|
@@ -41,6 +41,7 @@ import { ok, err } from '../shared/types/index.js';
|
|
|
41
41
|
import { toErrorMessage } from '../shared/error-utils.js';
|
|
42
42
|
import { safeJsonParse } from '../shared/safe-json.js';
|
|
43
43
|
import { createQEReasoningBank, } from './qe-reasoning-bank.js';
|
|
44
|
+
import { PROMOTION_THRESHOLD } from './qe-patterns.js';
|
|
44
45
|
import { createClaudeFlowBridge, } from '../adapters/claude-flow/index.js';
|
|
45
46
|
import { createExperienceCaptureService, } from './experience-capture.js';
|
|
46
47
|
import { createPatternStore } from './pattern-store.js';
|
|
@@ -52,7 +53,7 @@ export const DEFAULT_ENGINE_CONFIG = {
|
|
|
52
53
|
enableClaudeFlow: true,
|
|
53
54
|
enableExperienceCapture: true,
|
|
54
55
|
enablePatternPromotion: true,
|
|
55
|
-
promotionThreshold:
|
|
56
|
+
promotionThreshold: PROMOTION_THRESHOLD,
|
|
56
57
|
};
|
|
57
58
|
// ============================================================================
|
|
58
59
|
// AQE Learning Engine
|
|
@@ -0,0 +1,43 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Daily Log Tier - Human-readable Markdown audit trail
|
|
3
|
+
*
|
|
4
|
+
* Writes daily learning summaries to memory/YYYY-MM-DD.md files.
|
|
5
|
+
* Provides a browsable history of what AQE learned each day.
|
|
6
|
+
*/
|
|
7
|
+
export interface DailyLogEntry {
|
|
8
|
+
timestamp: Date;
|
|
9
|
+
type: 'pattern-learned' | 'pattern-promoted' | 'pattern-quarantined' | 'experience-captured' | 'search-performed' | 'reward-assigned';
|
|
10
|
+
summary: string;
|
|
11
|
+
details?: Record<string, unknown>;
|
|
12
|
+
}
|
|
13
|
+
export interface DailyLogConfig {
|
|
14
|
+
/** Directory to write daily logs (default: .agentic-qe/logs) */
|
|
15
|
+
logDir?: string;
|
|
16
|
+
/** Whether daily logging is enabled (default: true) */
|
|
17
|
+
enabled?: boolean;
|
|
18
|
+
/** Maximum entries per daily log before rotation (default: 500) */
|
|
19
|
+
maxEntriesPerDay?: number;
|
|
20
|
+
}
|
|
21
|
+
export declare class DailyLogger {
|
|
22
|
+
private config;
|
|
23
|
+
private buffer;
|
|
24
|
+
private flushTimer;
|
|
25
|
+
constructor(config?: DailyLogConfig);
|
|
26
|
+
/**
|
|
27
|
+
* Log a learning event
|
|
28
|
+
*/
|
|
29
|
+
log(entry: DailyLogEntry): void;
|
|
30
|
+
/**
|
|
31
|
+
* Flush buffered entries to the daily log file
|
|
32
|
+
*/
|
|
33
|
+
flush(): void;
|
|
34
|
+
/**
|
|
35
|
+
* Get the path to today's log file
|
|
36
|
+
*/
|
|
37
|
+
getTodayLogPath(): string;
|
|
38
|
+
/**
|
|
39
|
+
* Dispose and flush remaining entries
|
|
40
|
+
*/
|
|
41
|
+
dispose(): void;
|
|
42
|
+
}
|
|
43
|
+
//# sourceMappingURL=daily-log.d.ts.map
|
|
@@ -0,0 +1,91 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Daily Log Tier - Human-readable Markdown audit trail
|
|
3
|
+
*
|
|
4
|
+
* Writes daily learning summaries to memory/YYYY-MM-DD.md files.
|
|
5
|
+
* Provides a browsable history of what AQE learned each day.
|
|
6
|
+
*/
|
|
7
|
+
import * as fs from 'fs';
|
|
8
|
+
import * as path from 'path';
|
|
9
|
+
const LOG_ICONS = {
|
|
10
|
+
'pattern-learned': '\u{1F9E0}',
|
|
11
|
+
'pattern-promoted': '\u2B06\uFE0F',
|
|
12
|
+
'pattern-quarantined': '\u{1F512}',
|
|
13
|
+
'experience-captured': '\u{1F4F8}',
|
|
14
|
+
'search-performed': '\u{1F50D}',
|
|
15
|
+
'reward-assigned': '\u{1F3AF}',
|
|
16
|
+
};
|
|
17
|
+
export class DailyLogger {
|
|
18
|
+
config;
|
|
19
|
+
buffer = [];
|
|
20
|
+
flushTimer = null;
|
|
21
|
+
constructor(config) {
|
|
22
|
+
this.config = {
|
|
23
|
+
logDir: config?.logDir ?? path.join(process.cwd(), '.agentic-qe', 'logs'),
|
|
24
|
+
enabled: config?.enabled ?? true,
|
|
25
|
+
maxEntriesPerDay: config?.maxEntriesPerDay ?? 500,
|
|
26
|
+
};
|
|
27
|
+
}
|
|
28
|
+
/**
|
|
29
|
+
* Log a learning event
|
|
30
|
+
*/
|
|
31
|
+
log(entry) {
|
|
32
|
+
if (!this.config.enabled)
|
|
33
|
+
return;
|
|
34
|
+
this.buffer.push(entry);
|
|
35
|
+
// Auto-flush every 10 entries
|
|
36
|
+
if (this.buffer.length >= 10) {
|
|
37
|
+
this.flush();
|
|
38
|
+
}
|
|
39
|
+
}
|
|
40
|
+
/**
|
|
41
|
+
* Flush buffered entries to the daily log file
|
|
42
|
+
*/
|
|
43
|
+
flush() {
|
|
44
|
+
if (this.buffer.length === 0)
|
|
45
|
+
return;
|
|
46
|
+
const today = new Date().toISOString().split('T')[0]; // YYYY-MM-DD
|
|
47
|
+
const logPath = path.join(this.config.logDir, `${today}.md`);
|
|
48
|
+
try {
|
|
49
|
+
// Ensure directory exists
|
|
50
|
+
fs.mkdirSync(this.config.logDir, { recursive: true });
|
|
51
|
+
// Build markdown content
|
|
52
|
+
const entries = this.buffer.splice(0, this.buffer.length);
|
|
53
|
+
const lines = [];
|
|
54
|
+
// Add header if file doesn't exist
|
|
55
|
+
if (!fs.existsSync(logPath)) {
|
|
56
|
+
lines.push(`# AQE Daily Log \u2014 ${today}\n`);
|
|
57
|
+
lines.push('| Time | Event | Summary |');
|
|
58
|
+
lines.push('|------|-------|---------|');
|
|
59
|
+
}
|
|
60
|
+
for (const entry of entries) {
|
|
61
|
+
const time = entry.timestamp.toISOString().split('T')[1]?.slice(0, 8) ?? '00:00:00';
|
|
62
|
+
const icon = LOG_ICONS[entry.type] ?? '\u{1F4DD}';
|
|
63
|
+
const escapedSummary = entry.summary.replace(/\\/g, '\\\\').replace(/\|/g, '\\|').replace(/\n/g, ' ');
|
|
64
|
+
lines.push(`| ${time} | ${icon} ${entry.type} | ${escapedSummary} |`);
|
|
65
|
+
}
|
|
66
|
+
fs.appendFileSync(logPath, lines.join('\n') + '\n');
|
|
67
|
+
}
|
|
68
|
+
catch (error) {
|
|
69
|
+
// Non-critical - don't fail the learning pipeline for logging
|
|
70
|
+
console.debug('[DailyLog] Write failed:', error);
|
|
71
|
+
}
|
|
72
|
+
}
|
|
73
|
+
/**
|
|
74
|
+
* Get the path to today's log file
|
|
75
|
+
*/
|
|
76
|
+
getTodayLogPath() {
|
|
77
|
+
const today = new Date().toISOString().split('T')[0];
|
|
78
|
+
return path.join(this.config.logDir, `${today}.md`);
|
|
79
|
+
}
|
|
80
|
+
/**
|
|
81
|
+
* Dispose and flush remaining entries
|
|
82
|
+
*/
|
|
83
|
+
dispose() {
|
|
84
|
+
this.flush();
|
|
85
|
+
if (this.flushTimer) {
|
|
86
|
+
clearInterval(this.flushTimer);
|
|
87
|
+
this.flushTimer = null;
|
|
88
|
+
}
|
|
89
|
+
}
|
|
90
|
+
}
|
|
91
|
+
//# sourceMappingURL=daily-log.js.map
|