agentic-qe 3.8.11 → 3.8.13
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.claude/skills/qe-code-intelligence/SKILL.md +29 -20
- package/.claude/skills/qe-code-intelligence/evals/qe-code-intelligence.yaml +3 -3
- package/.claude/skills/qe-quality-assessment/SKILL.md +1 -1
- package/.claude/skills/qe-test-generation/SKILL.md +1 -1
- package/.claude/skills/skills-manifest.json +1 -1
- package/CHANGELOG.md +45 -0
- package/README.md +9 -0
- package/assets/skills/qe-code-intelligence/SKILL.md +29 -20
- package/assets/skills/qe-code-intelligence/evals/qe-code-intelligence.yaml +3 -3
- package/assets/skills/qe-quality-assessment/SKILL.md +1 -1
- package/assets/skills/qe-test-generation/SKILL.md +1 -1
- package/dist/cli/bundle.js +1162 -1046
- package/dist/cli/commands/code.js +149 -11
- package/dist/cli/commands/init.js +3 -2
- package/dist/cli/commands/ruvector-commands.js +17 -0
- package/dist/cli/handlers/init-handler.d.ts +1 -0
- package/dist/cli/handlers/init-handler.js +15 -10
- package/dist/cli/utils/file-discovery.d.ts +1 -0
- package/dist/cli/utils/file-discovery.js +1 -1
- package/dist/domains/code-intelligence/coordinator-gnn.d.ts +21 -0
- package/dist/domains/code-intelligence/coordinator-gnn.js +102 -0
- package/dist/domains/contract-testing/coordinator.js +13 -0
- package/dist/domains/coverage-analysis/coordinator.js +5 -0
- package/dist/domains/defect-intelligence/coordinator.d.ts +1 -0
- package/dist/domains/defect-intelligence/coordinator.js +43 -0
- package/dist/domains/quality-assessment/coordinator.js +26 -0
- package/dist/domains/test-generation/coordinator.js +14 -0
- package/dist/init/orchestrator.js +1 -0
- package/dist/init/phases/08-mcp.js +4 -4
- package/dist/init/phases/phase-interface.d.ts +3 -1
- package/dist/integrations/agentic-flow/reasoning-bank/experience-replay.d.ts +11 -0
- package/dist/integrations/agentic-flow/reasoning-bank/experience-replay.js +44 -1
- package/dist/integrations/rl-suite/algorithms/eprop.d.ts +79 -0
- package/dist/integrations/rl-suite/algorithms/eprop.js +284 -0
- package/dist/integrations/rl-suite/algorithms/index.d.ts +2 -1
- package/dist/integrations/rl-suite/algorithms/index.js +2 -1
- package/dist/integrations/rl-suite/index.d.ts +2 -2
- package/dist/integrations/rl-suite/index.js +2 -2
- package/dist/integrations/rl-suite/interfaces.d.ts +3 -3
- package/dist/integrations/rl-suite/interfaces.js +1 -1
- package/dist/integrations/rl-suite/orchestrator.d.ts +2 -2
- package/dist/integrations/rl-suite/orchestrator.js +3 -2
- package/dist/integrations/rl-suite/reward-signals.d.ts +1 -1
- package/dist/integrations/rl-suite/reward-signals.js +1 -1
- package/dist/integrations/ruvector/coherence-gate-cohomology.d.ts +41 -0
- package/dist/integrations/ruvector/coherence-gate-cohomology.js +47 -0
- package/dist/integrations/ruvector/coherence-gate-core.d.ts +200 -0
- package/dist/integrations/ruvector/coherence-gate-core.js +294 -0
- package/dist/integrations/ruvector/coherence-gate-energy.d.ts +136 -0
- package/dist/integrations/ruvector/coherence-gate-energy.js +373 -0
- package/dist/integrations/ruvector/coherence-gate-vector.d.ts +38 -0
- package/dist/integrations/ruvector/coherence-gate-vector.js +76 -0
- package/dist/integrations/ruvector/coherence-gate.d.ts +10 -311
- package/dist/integrations/ruvector/coherence-gate.js +10 -652
- package/dist/integrations/ruvector/cold-tier-trainer.d.ts +103 -0
- package/dist/integrations/ruvector/cold-tier-trainer.js +377 -0
- package/dist/integrations/ruvector/cusum-detector.d.ts +70 -0
- package/dist/integrations/ruvector/cusum-detector.js +142 -0
- package/dist/integrations/ruvector/delta-tracker.d.ts +122 -0
- package/dist/integrations/ruvector/delta-tracker.js +311 -0
- package/dist/integrations/ruvector/domain-transfer.d.ts +79 -1
- package/dist/integrations/ruvector/domain-transfer.js +158 -2
- package/dist/integrations/ruvector/eprop-learner.d.ts +135 -0
- package/dist/integrations/ruvector/eprop-learner.js +351 -0
- package/dist/integrations/ruvector/feature-flags.d.ts +177 -0
- package/dist/integrations/ruvector/feature-flags.js +145 -0
- package/dist/integrations/ruvector/graphmae-encoder.d.ts +88 -0
- package/dist/integrations/ruvector/graphmae-encoder.js +360 -0
- package/dist/integrations/ruvector/hdc-fingerprint.d.ts +127 -0
- package/dist/integrations/ruvector/hdc-fingerprint.js +222 -0
- package/dist/integrations/ruvector/hopfield-memory.d.ts +97 -0
- package/dist/integrations/ruvector/hopfield-memory.js +238 -0
- package/dist/integrations/ruvector/index.d.ts +13 -2
- package/dist/integrations/ruvector/index.js +46 -2
- package/dist/integrations/ruvector/mincut-wrapper.d.ts +7 -0
- package/dist/integrations/ruvector/mincut-wrapper.js +54 -2
- package/dist/integrations/ruvector/reservoir-replay.d.ts +172 -0
- package/dist/integrations/ruvector/reservoir-replay.js +335 -0
- package/dist/integrations/ruvector/solver-adapter.d.ts +93 -0
- package/dist/integrations/ruvector/solver-adapter.js +299 -0
- package/dist/integrations/ruvector/sona-persistence.d.ts +33 -0
- package/dist/integrations/ruvector/sona-persistence.js +47 -0
- package/dist/integrations/ruvector/spectral-sparsifier.d.ts +154 -0
- package/dist/integrations/ruvector/spectral-sparsifier.js +389 -0
- package/dist/integrations/ruvector/temporal-causality.d.ts +63 -0
- package/dist/integrations/ruvector/temporal-causality.js +317 -0
- package/dist/learning/pattern-promotion.d.ts +63 -0
- package/dist/learning/pattern-promotion.js +235 -1
- package/dist/learning/pattern-store.d.ts +2 -0
- package/dist/learning/pattern-store.js +187 -1
- package/dist/learning/sqlite-persistence.d.ts +2 -0
- package/dist/learning/sqlite-persistence.js +4 -0
- package/dist/mcp/bundle.js +506 -427
- package/dist/shared/utils/index.d.ts +1 -0
- package/dist/shared/utils/index.js +1 -0
- package/dist/shared/utils/xorshift128.d.ts +24 -0
- package/dist/shared/utils/xorshift128.js +50 -0
- package/package.json +1 -1
|
@@ -0,0 +1,317 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Granger Causality for Test Failure Prediction (R12, ADR-087 Milestone 4)
|
|
3
|
+
*
|
|
4
|
+
* Temporal causal discovery: finds causal chains between test failures.
|
|
5
|
+
* E.g. "when test_login fails, test_checkout fails 5min later at 87%."
|
|
6
|
+
*
|
|
7
|
+
* Algorithm: Bivariate VAR(p) + F-test with OLS via Gaussian elimination
|
|
8
|
+
* and regularized incomplete beta function for F-distribution CDF.
|
|
9
|
+
*
|
|
10
|
+
* @module integrations/ruvector/temporal-causality
|
|
11
|
+
*/
|
|
12
|
+
const DEFAULT_CONFIG = { maxLag: 5, alpha: 0.05, minSeriesLength: 30 };
|
|
13
|
+
const BETA_CF_MAX_ITER = 200;
|
|
14
|
+
const BETA_CF_EPS = 1e-10;
|
|
15
|
+
const OLS_EPS = 1e-12;
|
|
16
|
+
// -- Statistical math: lnGamma, incomplete beta, F-distribution CDF ---------
|
|
17
|
+
/** Lanczos approximation of ln(Gamma(z)). */
|
|
18
|
+
function lnGamma(z) {
|
|
19
|
+
if (z <= 0)
|
|
20
|
+
return Infinity;
|
|
21
|
+
const c = [0.99999999999980993, 676.5203681218851, -1259.1392167224028,
|
|
22
|
+
771.32342877765313, -176.61502916214059, 12.507343278686905,
|
|
23
|
+
-0.13857109526572012, 9.9843695780195716e-6, 1.5056327351493116e-7];
|
|
24
|
+
if (z < 0.5)
|
|
25
|
+
return Math.log(Math.PI / Math.sin(Math.PI * z)) - lnGamma(1 - z);
|
|
26
|
+
z -= 1;
|
|
27
|
+
let x = c[0];
|
|
28
|
+
for (let i = 1; i < 9; i++)
|
|
29
|
+
x += c[i] / (z + i);
|
|
30
|
+
const t = z + 7.5;
|
|
31
|
+
return 0.5 * Math.log(2 * Math.PI) + (z + 0.5) * Math.log(t) - t + Math.log(x);
|
|
32
|
+
}
|
|
33
|
+
function lnBeta(a, b) {
|
|
34
|
+
return lnGamma(a) + lnGamma(b) - lnGamma(a + b);
|
|
35
|
+
}
|
|
36
|
+
/** Regularized incomplete beta I_x(a,b) via continued fraction (Lentz). */
|
|
37
|
+
function regularizedIncompleteBeta(x, a, b) {
|
|
38
|
+
if (x <= 0)
|
|
39
|
+
return 0;
|
|
40
|
+
if (x >= 1)
|
|
41
|
+
return 1;
|
|
42
|
+
if (a <= 0 || b <= 0)
|
|
43
|
+
return NaN;
|
|
44
|
+
if (x > (a + 1) / (a + b + 2))
|
|
45
|
+
return 1 - regularizedIncompleteBeta(1 - x, b, a);
|
|
46
|
+
const front = Math.exp(a * Math.log(x) + b * Math.log(1 - x) - lnBeta(a, b) - Math.log(a));
|
|
47
|
+
let c = 1, d = 1 - (a + b) * x / (a + 1);
|
|
48
|
+
if (Math.abs(d) < BETA_CF_EPS)
|
|
49
|
+
d = BETA_CF_EPS;
|
|
50
|
+
d = 1 / d;
|
|
51
|
+
let f = d;
|
|
52
|
+
for (let m = 1; m <= BETA_CF_MAX_ITER; m++) {
|
|
53
|
+
let num = m * (b - m) * x / ((a + 2 * m - 1) * (a + 2 * m));
|
|
54
|
+
d = 1 + num * d;
|
|
55
|
+
if (Math.abs(d) < BETA_CF_EPS)
|
|
56
|
+
d = BETA_CF_EPS;
|
|
57
|
+
c = 1 + num / c;
|
|
58
|
+
if (Math.abs(c) < BETA_CF_EPS)
|
|
59
|
+
c = BETA_CF_EPS;
|
|
60
|
+
d = 1 / d;
|
|
61
|
+
f *= c * d;
|
|
62
|
+
num = -(a + m) * (a + b + m) * x / ((a + 2 * m) * (a + 2 * m + 1));
|
|
63
|
+
d = 1 + num * d;
|
|
64
|
+
if (Math.abs(d) < BETA_CF_EPS)
|
|
65
|
+
d = BETA_CF_EPS;
|
|
66
|
+
c = 1 + num / c;
|
|
67
|
+
if (Math.abs(c) < BETA_CF_EPS)
|
|
68
|
+
c = BETA_CF_EPS;
|
|
69
|
+
d = 1 / d;
|
|
70
|
+
const delta = c * d;
|
|
71
|
+
f *= delta;
|
|
72
|
+
if (Math.abs(delta - 1) < BETA_CF_EPS)
|
|
73
|
+
break;
|
|
74
|
+
}
|
|
75
|
+
return front * f;
|
|
76
|
+
}
|
|
77
|
+
/** F-distribution CDF via incomplete beta: P(F<=x) = I_{d1*x/(d1*x+d2)}(d1/2, d2/2) */
|
|
78
|
+
function fDistributionCDF(x, d1, d2) {
|
|
79
|
+
if (x <= 0)
|
|
80
|
+
return 0;
|
|
81
|
+
if (d1 <= 0 || d2 <= 0)
|
|
82
|
+
return NaN;
|
|
83
|
+
if (!isFinite(x))
|
|
84
|
+
return 1;
|
|
85
|
+
return regularizedIncompleteBeta((d1 * x) / (d1 * x + d2), d1 / 2, d2 / 2);
|
|
86
|
+
}
|
|
87
|
+
// -- OLS Regression ---------------------------------------------------------
|
|
88
|
+
/** Solve A*x = b via Gaussian elimination with partial pivoting. */
|
|
89
|
+
function solveLinearSystem(A, b) {
|
|
90
|
+
const n = b.length;
|
|
91
|
+
const aug = A.map((row, i) => [...row, b[i]]);
|
|
92
|
+
for (let col = 0; col < n; col++) {
|
|
93
|
+
let maxVal = Math.abs(aug[col][col]), maxRow = col;
|
|
94
|
+
for (let row = col + 1; row < n; row++) {
|
|
95
|
+
const v = Math.abs(aug[row][col]);
|
|
96
|
+
if (v > maxVal) {
|
|
97
|
+
maxVal = v;
|
|
98
|
+
maxRow = row;
|
|
99
|
+
}
|
|
100
|
+
}
|
|
101
|
+
if (maxVal < OLS_EPS)
|
|
102
|
+
return null;
|
|
103
|
+
if (maxRow !== col)
|
|
104
|
+
[aug[col], aug[maxRow]] = [aug[maxRow], aug[col]];
|
|
105
|
+
for (let row = col + 1; row < n; row++) {
|
|
106
|
+
const f = aug[row][col] / aug[col][col];
|
|
107
|
+
for (let j = col; j <= n; j++)
|
|
108
|
+
aug[row][j] -= f * aug[col][j];
|
|
109
|
+
}
|
|
110
|
+
}
|
|
111
|
+
const x = new Array(n).fill(0);
|
|
112
|
+
for (let row = n - 1; row >= 0; row--) {
|
|
113
|
+
if (Math.abs(aug[row][row]) < OLS_EPS)
|
|
114
|
+
return null;
|
|
115
|
+
let sum = aug[row][n];
|
|
116
|
+
for (let j = row + 1; j < n; j++)
|
|
117
|
+
sum -= aug[row][j] * x[j];
|
|
118
|
+
x[row] = sum / aug[row][row];
|
|
119
|
+
}
|
|
120
|
+
return x;
|
|
121
|
+
}
|
|
122
|
+
/** OLS: beta = (X'X)^{-1} X'y via normal equations. */
|
|
123
|
+
function olsRegression(X, y) {
|
|
124
|
+
const n = y.length, p = X[0].length;
|
|
125
|
+
const XtX = Array.from({ length: p }, () => new Array(p).fill(0));
|
|
126
|
+
for (let i = 0; i < p; i++) {
|
|
127
|
+
for (let j = i; j < p; j++) {
|
|
128
|
+
let s = 0;
|
|
129
|
+
for (let k = 0; k < n; k++)
|
|
130
|
+
s += X[k][i] * X[k][j];
|
|
131
|
+
XtX[i][j] = s;
|
|
132
|
+
XtX[j][i] = s;
|
|
133
|
+
}
|
|
134
|
+
}
|
|
135
|
+
const Xty = new Array(p).fill(0);
|
|
136
|
+
for (let i = 0; i < p; i++) {
|
|
137
|
+
let s = 0;
|
|
138
|
+
for (let k = 0; k < n; k++)
|
|
139
|
+
s += X[k][i] * y[k];
|
|
140
|
+
Xty[i] = s;
|
|
141
|
+
}
|
|
142
|
+
return solveLinearSystem(XtX, Xty);
|
|
143
|
+
}
|
|
144
|
+
/** Residual sum of squares for y = X * beta. */
|
|
145
|
+
function computeRSS(X, y, beta) {
|
|
146
|
+
let rss = 0;
|
|
147
|
+
for (let i = 0; i < y.length; i++) {
|
|
148
|
+
let pred = 0;
|
|
149
|
+
for (let j = 0; j < beta.length; j++)
|
|
150
|
+
pred += X[i][j] * beta[j];
|
|
151
|
+
const r = y[i] - pred;
|
|
152
|
+
rss += r * r;
|
|
153
|
+
}
|
|
154
|
+
return rss;
|
|
155
|
+
}
|
|
156
|
+
// -- Time series alignment --------------------------------------------------
|
|
157
|
+
/** Align two series to a common time grid (nearest-neighbor binning). */
|
|
158
|
+
function alignTimeSeries(source, target) {
|
|
159
|
+
const minT = Math.max(source.timestamps[0] ?? Infinity, target.timestamps[0] ?? Infinity);
|
|
160
|
+
const maxT = Math.min(source.timestamps[source.timestamps.length - 1] ?? -Infinity, target.timestamps[target.timestamps.length - 1] ?? -Infinity);
|
|
161
|
+
if (minT >= maxT)
|
|
162
|
+
return null;
|
|
163
|
+
const intervals = [];
|
|
164
|
+
for (const ts of [source, target])
|
|
165
|
+
for (let i = 1; i < ts.timestamps.length; i++) {
|
|
166
|
+
const dt = ts.timestamps[i] - ts.timestamps[i - 1];
|
|
167
|
+
if (dt > 0)
|
|
168
|
+
intervals.push(dt);
|
|
169
|
+
}
|
|
170
|
+
if (intervals.length === 0)
|
|
171
|
+
return null;
|
|
172
|
+
intervals.sort((a, b) => a - b);
|
|
173
|
+
const bin = intervals[Math.floor(intervals.length / 2)];
|
|
174
|
+
if (bin <= 0)
|
|
175
|
+
return null;
|
|
176
|
+
const nBins = Math.floor((maxT - minT) / bin) + 1;
|
|
177
|
+
if (nBins < 2)
|
|
178
|
+
return null;
|
|
179
|
+
const srcOut = new Array(nBins).fill(0);
|
|
180
|
+
const tgtOut = new Array(nBins).fill(0);
|
|
181
|
+
for (const [series, out] of [[source, srcOut], [target, tgtOut]]) {
|
|
182
|
+
for (let i = 0; i < series.timestamps.length; i++) {
|
|
183
|
+
const b = Math.round((series.timestamps[i] - minT) / bin);
|
|
184
|
+
if (b >= 0 && b < nBins)
|
|
185
|
+
out[b] = series.outcomes[i];
|
|
186
|
+
}
|
|
187
|
+
}
|
|
188
|
+
return { sourceOutcomes: srcOut, targetOutcomes: tgtOut };
|
|
189
|
+
}
|
|
190
|
+
// -- Granger Analyzer -------------------------------------------------------
|
|
191
|
+
export class GrangerAnalyzer {
|
|
192
|
+
config;
|
|
193
|
+
constructor(config) {
|
|
194
|
+
this.config = { ...DEFAULT_CONFIG, ...config };
|
|
195
|
+
}
|
|
196
|
+
/** Analyze all pairs; returns only significant links (p < alpha). */
|
|
197
|
+
analyzeCausality(timeSeries) {
|
|
198
|
+
if (timeSeries.length < 2)
|
|
199
|
+
return [];
|
|
200
|
+
const links = [];
|
|
201
|
+
for (let i = 0; i < timeSeries.length; i++) {
|
|
202
|
+
for (let j = 0; j < timeSeries.length; j++) {
|
|
203
|
+
if (i === j)
|
|
204
|
+
continue;
|
|
205
|
+
const src = timeSeries[i], tgt = timeSeries[j];
|
|
206
|
+
if (src.outcomes.length < this.config.minSeriesLength ||
|
|
207
|
+
tgt.outcomes.length < this.config.minSeriesLength)
|
|
208
|
+
continue;
|
|
209
|
+
let best = null;
|
|
210
|
+
for (let lag = 1; lag <= this.config.maxLag; lag++) {
|
|
211
|
+
const link = this.testPairwise(src, tgt, lag);
|
|
212
|
+
if (this.significanceTest(link) && (best === null || link.pValue < best.pValue))
|
|
213
|
+
best = link;
|
|
214
|
+
}
|
|
215
|
+
if (best)
|
|
216
|
+
links.push(best);
|
|
217
|
+
}
|
|
218
|
+
}
|
|
219
|
+
links.sort((a, b) => a.pValue - b.pValue);
|
|
220
|
+
return links;
|
|
221
|
+
}
|
|
222
|
+
/** Test Granger causality for one pair at one lag via VAR(p) + F-test. */
|
|
223
|
+
testPairwise(source, target, lag) {
|
|
224
|
+
const def = {
|
|
225
|
+
sourceTestId: source.testId, targetTestId: target.testId,
|
|
226
|
+
lag, fStatistic: 0, pValue: 1, strength: 0, direction: 'positive',
|
|
227
|
+
};
|
|
228
|
+
let srcData, tgtData;
|
|
229
|
+
if (this.arraysEqual(source.timestamps, target.timestamps)) {
|
|
230
|
+
srcData = source.outcomes;
|
|
231
|
+
tgtData = target.outcomes;
|
|
232
|
+
}
|
|
233
|
+
else {
|
|
234
|
+
const a = alignTimeSeries(source, target);
|
|
235
|
+
if (!a)
|
|
236
|
+
return def;
|
|
237
|
+
srcData = a.sourceOutcomes;
|
|
238
|
+
tgtData = a.targetOutcomes;
|
|
239
|
+
}
|
|
240
|
+
const n = tgtData.length, effN = n - lag, dfDenom = effN - 2 * lag - 1;
|
|
241
|
+
if (dfDenom <= 0 || effN < this.config.minSeriesLength)
|
|
242
|
+
return def;
|
|
243
|
+
const y = new Array(effN);
|
|
244
|
+
for (let t = 0; t < effN; t++)
|
|
245
|
+
y[t] = tgtData[t + lag];
|
|
246
|
+
const yMean = y.reduce((s, v) => s + v, 0) / y.length;
|
|
247
|
+
if (y.reduce((s, v) => s + (v - yMean) ** 2, 0) < OLS_EPS)
|
|
248
|
+
return def;
|
|
249
|
+
// Restricted: [intercept, Y(t-1)..Y(t-lag)]
|
|
250
|
+
const Xr = new Array(effN);
|
|
251
|
+
for (let t = 0; t < effN; t++) {
|
|
252
|
+
const row = new Array(lag + 1);
|
|
253
|
+
row[0] = 1;
|
|
254
|
+
for (let k = 1; k <= lag; k++)
|
|
255
|
+
row[k] = tgtData[t + lag - k];
|
|
256
|
+
Xr[t] = row;
|
|
257
|
+
}
|
|
258
|
+
// Unrestricted: [intercept, Y lags, X lags]
|
|
259
|
+
const Xu = new Array(effN);
|
|
260
|
+
for (let t = 0; t < effN; t++) {
|
|
261
|
+
const row = new Array(2 * lag + 1);
|
|
262
|
+
row[0] = 1;
|
|
263
|
+
for (let k = 1; k <= lag; k++)
|
|
264
|
+
row[k] = tgtData[t + lag - k];
|
|
265
|
+
for (let k = 1; k <= lag; k++)
|
|
266
|
+
row[lag + k] = srcData[t + lag - k];
|
|
267
|
+
Xu[t] = row;
|
|
268
|
+
}
|
|
269
|
+
const betaR = olsRegression(Xr, y);
|
|
270
|
+
if (!betaR)
|
|
271
|
+
return def;
|
|
272
|
+
const rssR = computeRSS(Xr, y, betaR);
|
|
273
|
+
const betaU = olsRegression(Xu, y);
|
|
274
|
+
if (!betaU)
|
|
275
|
+
return def;
|
|
276
|
+
const rssU = computeRSS(Xu, y, betaU);
|
|
277
|
+
if (rssU > rssR)
|
|
278
|
+
return def; // Numerical noise
|
|
279
|
+
const num = (rssR - rssU) / lag;
|
|
280
|
+
const den = rssU / dfDenom;
|
|
281
|
+
const srcCoeffs = betaU.slice(lag + 1, 2 * lag + 1);
|
|
282
|
+
const strength = srcCoeffs.reduce((s, c) => s + Math.abs(c), 0) / lag;
|
|
283
|
+
const avgCoeff = srcCoeffs.reduce((s, c) => s + c, 0) / lag;
|
|
284
|
+
const dir = avgCoeff >= 0 ? 'positive' : 'negative';
|
|
285
|
+
if (den < OLS_EPS) {
|
|
286
|
+
const fS = num > OLS_EPS ? 1e6 : 0;
|
|
287
|
+
return { ...def, fStatistic: fS, pValue: fS > 0 ? 0 : 1, strength, direction: dir };
|
|
288
|
+
}
|
|
289
|
+
const fStat = num / den;
|
|
290
|
+
const pValue = 1 - fDistributionCDF(fStat, lag, dfDenom);
|
|
291
|
+
return {
|
|
292
|
+
sourceTestId: source.testId, targetTestId: target.testId, lag,
|
|
293
|
+
fStatistic: Math.max(0, fStat),
|
|
294
|
+
pValue: Math.max(0, Math.min(1, pValue)),
|
|
295
|
+
strength, direction: dir,
|
|
296
|
+
};
|
|
297
|
+
}
|
|
298
|
+
/** True if the link is statistically significant at the configured alpha. */
|
|
299
|
+
significanceTest(link) {
|
|
300
|
+
return link.pValue < this.config.alpha && link.fStatistic > 0;
|
|
301
|
+
}
|
|
302
|
+
arraysEqual(a, b) {
|
|
303
|
+
if (a.length !== b.length)
|
|
304
|
+
return false;
|
|
305
|
+
for (let i = 0; i < a.length; i++)
|
|
306
|
+
if (a[i] !== b[i])
|
|
307
|
+
return false;
|
|
308
|
+
return true;
|
|
309
|
+
}
|
|
310
|
+
}
|
|
311
|
+
// -- Factory ----------------------------------------------------------------
|
|
312
|
+
export function createGrangerAnalyzer(config) {
|
|
313
|
+
return new GrangerAnalyzer(config);
|
|
314
|
+
}
|
|
315
|
+
// -- Exported internals for testing -----------------------------------------
|
|
316
|
+
export { regularizedIncompleteBeta as _regularizedIncompleteBeta, fDistributionCDF as _fDistributionCDF, lnGamma as _lnGamma, olsRegression as _olsRegression, alignTimeSeries as _alignTimeSeries, };
|
|
317
|
+
//# sourceMappingURL=temporal-causality.js.map
|
|
@@ -11,6 +11,8 @@ import type { PatternStore, PatternSearchResult } from './pattern-store.js';
|
|
|
11
11
|
import type { SQLitePatternStore } from './sqlite-persistence.js';
|
|
12
12
|
import type { RvfDualWriter } from '../integrations/ruvector/rvf-dual-writer.js';
|
|
13
13
|
import type { Result } from '../shared/types/index.js';
|
|
14
|
+
import { type PatternGraph } from '../integrations/ruvector/solver-adapter.js';
|
|
15
|
+
import type { Database as DatabaseType } from 'better-sqlite3';
|
|
14
16
|
/**
|
|
15
17
|
* Dependencies needed for promotion checks
|
|
16
18
|
*/
|
|
@@ -60,4 +62,65 @@ export declare function seedCrossDomainPatterns(deps: SeedingDeps): Promise<{
|
|
|
60
62
|
transferred: number;
|
|
61
63
|
skipped: number;
|
|
62
64
|
}>;
|
|
65
|
+
/** SQLite schema for pattern citation graph edges */
|
|
66
|
+
export declare const PATTERN_CITATIONS_SCHEMA = "\n CREATE TABLE IF NOT EXISTS pattern_citations (\n source_pattern_id TEXT NOT NULL,\n target_pattern_id TEXT NOT NULL,\n weight REAL NOT NULL DEFAULT 1.0,\n relationship TEXT NOT NULL DEFAULT 'co-occurrence',\n created_at TEXT DEFAULT (datetime('now')),\n PRIMARY KEY (source_pattern_id, target_pattern_id)\n );\n CREATE INDEX IF NOT EXISTS idx_citations_source ON pattern_citations(source_pattern_id);\n CREATE INDEX IF NOT EXISTS idx_citations_target ON pattern_citations(target_pattern_id);\n";
|
|
67
|
+
/**
|
|
68
|
+
* Manages the pattern citation graph in SQLite. Handles schema initialization,
|
|
69
|
+
* recording co-occurrence edges, and building PatternGraph for PageRank.
|
|
70
|
+
*/
|
|
71
|
+
export declare class PatternCitationGraph {
|
|
72
|
+
private readonly db;
|
|
73
|
+
private initialized;
|
|
74
|
+
constructor(db: DatabaseType);
|
|
75
|
+
/** Ensure the pattern_citations table exists */
|
|
76
|
+
ensureSchema(): void;
|
|
77
|
+
/**
|
|
78
|
+
* Record that two patterns co-occurred in the same assessment/session.
|
|
79
|
+
* Increments weight if the edge already exists (stronger co-occurrence).
|
|
80
|
+
*/
|
|
81
|
+
recordCoOccurrence(patternIdA: string, patternIdB: string): void;
|
|
82
|
+
/**
|
|
83
|
+
* Record that pattern B was derived from or supersedes pattern A.
|
|
84
|
+
*/
|
|
85
|
+
recordDerivation(sourcePatternId: string, derivedPatternId: string): void;
|
|
86
|
+
/**
|
|
87
|
+
* Build a PatternGraph from all citation edges in the database.
|
|
88
|
+
* Returns the graph ready for PageRank computation.
|
|
89
|
+
*/
|
|
90
|
+
buildGraph(): PatternGraph;
|
|
91
|
+
/**
|
|
92
|
+
* Bootstrap the citation graph from existing pattern data.
|
|
93
|
+
*
|
|
94
|
+
* Sources:
|
|
95
|
+
* 1. Same-domain co-occurrence: patterns in the same qe_domain are linked
|
|
96
|
+
* (weight = 1.0 per shared domain, reflecting that they co-occur in
|
|
97
|
+
* assessments for that domain).
|
|
98
|
+
* 2. Existing pattern_relationships: "merged" relationships become derivation
|
|
99
|
+
* edges (weight = 2.0).
|
|
100
|
+
*
|
|
101
|
+
* This is idempotent — uses INSERT OR IGNORE so repeated calls don't
|
|
102
|
+
* create duplicate edges (but also don't increment weights).
|
|
103
|
+
*
|
|
104
|
+
* @returns Number of edges created
|
|
105
|
+
*/
|
|
106
|
+
bootstrapFromExistingData(): number;
|
|
107
|
+
/** Get the number of citation edges */
|
|
108
|
+
getEdgeCount(): number;
|
|
109
|
+
}
|
|
110
|
+
/**
|
|
111
|
+
* Compute blended importance scores using PageRank over the citation graph
|
|
112
|
+
* combined with the existing weighted quality formula.
|
|
113
|
+
*
|
|
114
|
+
* Blending: final = (1 - alpha) * qualityScore + alpha * pageRankScore
|
|
115
|
+
*
|
|
116
|
+
* @param patterns - Patterns to score
|
|
117
|
+
* @param citationGraph - The pattern citation graph (from PatternCitationGraph.buildGraph())
|
|
118
|
+
* @param alpha - Blend weight for PageRank (0 = ignore, 1 = only PageRank)
|
|
119
|
+
*/
|
|
120
|
+
export declare function computeBlendedImportance(patterns: Array<{
|
|
121
|
+
id: string;
|
|
122
|
+
confidence: number;
|
|
123
|
+
usageCount: number;
|
|
124
|
+
successRate: number;
|
|
125
|
+
}>, citationGraph: PatternGraph, alpha?: number): Map<string, number>;
|
|
63
126
|
//# sourceMappingURL=pattern-promotion.d.ts.map
|
|
@@ -7,9 +7,11 @@
|
|
|
7
7
|
*/
|
|
8
8
|
import { LoggerFactory } from '../logging/index.js';
|
|
9
9
|
import { toErrorMessage } from '../shared/error-utils.js';
|
|
10
|
-
import { shouldPromotePattern } from './qe-patterns.js';
|
|
10
|
+
import { shouldPromotePattern, calculateQualityScore } from './qe-patterns.js';
|
|
11
11
|
import { getWitnessChain } from '../audit/witness-chain.js';
|
|
12
12
|
import { RELATED_DOMAINS } from './agent-routing.js';
|
|
13
|
+
import { getRuVectorFeatureFlags } from '../integrations/ruvector/feature-flags.js';
|
|
14
|
+
import { PageRankSolver } from '../integrations/ruvector/solver-adapter.js';
|
|
13
15
|
const logger = LoggerFactory.create('PatternPromotion');
|
|
14
16
|
/**
|
|
15
17
|
* Check if a pattern should be promoted with coherence gate (ADR-052)
|
|
@@ -81,6 +83,34 @@ export async function promotePattern(patternId, deps) {
|
|
|
81
83
|
error: toErrorMessage(e),
|
|
82
84
|
});
|
|
83
85
|
}
|
|
86
|
+
// Record citation co-occurrence: promoted pattern cites existing long-term patterns
|
|
87
|
+
// in the same domain (R8, ADR-087)
|
|
88
|
+
if (getRuVectorFeatureFlags().useSublinearSolver) {
|
|
89
|
+
try {
|
|
90
|
+
const db = deps.getSqliteStore().getDatabase?.();
|
|
91
|
+
if (db) {
|
|
92
|
+
const citationGraph = new PatternCitationGraph(db);
|
|
93
|
+
const promoted = await deps.getPattern(patternId);
|
|
94
|
+
if (promoted) {
|
|
95
|
+
const peers = await deps.searchPatterns('', {
|
|
96
|
+
domain: promoted.qeDomain, tier: 'long-term', limit: 20,
|
|
97
|
+
});
|
|
98
|
+
if (peers.success) {
|
|
99
|
+
for (const { pattern: peer } of peers.value) {
|
|
100
|
+
if (peer.id !== patternId) {
|
|
101
|
+
citationGraph.recordCoOccurrence(patternId, peer.id);
|
|
102
|
+
}
|
|
103
|
+
}
|
|
104
|
+
}
|
|
105
|
+
}
|
|
106
|
+
}
|
|
107
|
+
}
|
|
108
|
+
catch (e) {
|
|
109
|
+
logger.warn('Citation graph update failed (non-fatal)', {
|
|
110
|
+
error: toErrorMessage(e),
|
|
111
|
+
});
|
|
112
|
+
}
|
|
113
|
+
}
|
|
84
114
|
if (deps.rvfDualWriter) {
|
|
85
115
|
try {
|
|
86
116
|
const promoted = await deps.getPattern(patternId);
|
|
@@ -184,4 +214,208 @@ export async function seedCrossDomainPatterns(deps) {
|
|
|
184
214
|
logger.info('Cross-domain transfer complete', { transferred, skipped });
|
|
185
215
|
return { transferred, skipped };
|
|
186
216
|
}
|
|
217
|
+
// ============================================================================
|
|
218
|
+
// Pattern Citation Graph & PageRank Scoring (R8, ADR-087)
|
|
219
|
+
// ============================================================================
|
|
220
|
+
/** SQLite schema for pattern citation graph edges */
|
|
221
|
+
export const PATTERN_CITATIONS_SCHEMA = `
|
|
222
|
+
CREATE TABLE IF NOT EXISTS pattern_citations (
|
|
223
|
+
source_pattern_id TEXT NOT NULL,
|
|
224
|
+
target_pattern_id TEXT NOT NULL,
|
|
225
|
+
weight REAL NOT NULL DEFAULT 1.0,
|
|
226
|
+
relationship TEXT NOT NULL DEFAULT 'co-occurrence',
|
|
227
|
+
created_at TEXT DEFAULT (datetime('now')),
|
|
228
|
+
PRIMARY KEY (source_pattern_id, target_pattern_id)
|
|
229
|
+
);
|
|
230
|
+
CREATE INDEX IF NOT EXISTS idx_citations_source ON pattern_citations(source_pattern_id);
|
|
231
|
+
CREATE INDEX IF NOT EXISTS idx_citations_target ON pattern_citations(target_pattern_id);
|
|
232
|
+
`;
|
|
233
|
+
/**
|
|
234
|
+
* Manages the pattern citation graph in SQLite. Handles schema initialization,
|
|
235
|
+
* recording co-occurrence edges, and building PatternGraph for PageRank.
|
|
236
|
+
*/
|
|
237
|
+
export class PatternCitationGraph {
|
|
238
|
+
db;
|
|
239
|
+
initialized = false;
|
|
240
|
+
constructor(db) {
|
|
241
|
+
this.db = db;
|
|
242
|
+
}
|
|
243
|
+
/** Ensure the pattern_citations table exists */
|
|
244
|
+
ensureSchema() {
|
|
245
|
+
if (this.initialized)
|
|
246
|
+
return;
|
|
247
|
+
this.db.exec(PATTERN_CITATIONS_SCHEMA);
|
|
248
|
+
this.initialized = true;
|
|
249
|
+
}
|
|
250
|
+
/**
|
|
251
|
+
* Record that two patterns co-occurred in the same assessment/session.
|
|
252
|
+
* Increments weight if the edge already exists (stronger co-occurrence).
|
|
253
|
+
*/
|
|
254
|
+
recordCoOccurrence(patternIdA, patternIdB) {
|
|
255
|
+
this.ensureSchema();
|
|
256
|
+
// Canonical ordering so (A,B) and (B,A) map to the same edge
|
|
257
|
+
const [src, tgt] = patternIdA < patternIdB
|
|
258
|
+
? [patternIdA, patternIdB]
|
|
259
|
+
: [patternIdB, patternIdA];
|
|
260
|
+
this.db.prepare(`
|
|
261
|
+
INSERT INTO pattern_citations (source_pattern_id, target_pattern_id, weight, relationship)
|
|
262
|
+
VALUES (?, ?, 1.0, 'co-occurrence')
|
|
263
|
+
ON CONFLICT(source_pattern_id, target_pattern_id)
|
|
264
|
+
DO UPDATE SET weight = weight + 1.0
|
|
265
|
+
`).run(src, tgt);
|
|
266
|
+
}
|
|
267
|
+
/**
|
|
268
|
+
* Record that pattern B was derived from or supersedes pattern A.
|
|
269
|
+
*/
|
|
270
|
+
recordDerivation(sourcePatternId, derivedPatternId) {
|
|
271
|
+
this.ensureSchema();
|
|
272
|
+
this.db.prepare(`
|
|
273
|
+
INSERT INTO pattern_citations (source_pattern_id, target_pattern_id, weight, relationship)
|
|
274
|
+
VALUES (?, ?, 2.0, 'derivation')
|
|
275
|
+
ON CONFLICT(source_pattern_id, target_pattern_id)
|
|
276
|
+
DO UPDATE SET weight = MAX(weight, 2.0), relationship = 'derivation'
|
|
277
|
+
`).run(sourcePatternId, derivedPatternId);
|
|
278
|
+
}
|
|
279
|
+
/**
|
|
280
|
+
* Build a PatternGraph from all citation edges in the database.
|
|
281
|
+
* Returns the graph ready for PageRank computation.
|
|
282
|
+
*/
|
|
283
|
+
buildGraph() {
|
|
284
|
+
this.ensureSchema();
|
|
285
|
+
const rows = this.db.prepare(`SELECT source_pattern_id, target_pattern_id, weight, relationship FROM pattern_citations`).all();
|
|
286
|
+
// Collect unique node IDs
|
|
287
|
+
const nodeSet = new Set();
|
|
288
|
+
for (const row of rows) {
|
|
289
|
+
nodeSet.add(row.source_pattern_id);
|
|
290
|
+
nodeSet.add(row.target_pattern_id);
|
|
291
|
+
}
|
|
292
|
+
const nodes = Array.from(nodeSet);
|
|
293
|
+
const nodeIndex = new Map();
|
|
294
|
+
nodes.forEach((id, i) => nodeIndex.set(id, i));
|
|
295
|
+
// Build directed edges (both directions for undirected co-occurrence)
|
|
296
|
+
const edges = [];
|
|
297
|
+
for (const row of rows) {
|
|
298
|
+
const si = nodeIndex.get(row.source_pattern_id);
|
|
299
|
+
const ti = nodeIndex.get(row.target_pattern_id);
|
|
300
|
+
edges.push([si, ti, row.weight]);
|
|
301
|
+
if (row.relationship === 'co-occurrence') {
|
|
302
|
+
edges.push([ti, si, row.weight]); // bidirectional for co-occurrence
|
|
303
|
+
}
|
|
304
|
+
}
|
|
305
|
+
return { nodes, edges };
|
|
306
|
+
}
|
|
307
|
+
/**
|
|
308
|
+
* Bootstrap the citation graph from existing pattern data.
|
|
309
|
+
*
|
|
310
|
+
* Sources:
|
|
311
|
+
* 1. Same-domain co-occurrence: patterns in the same qe_domain are linked
|
|
312
|
+
* (weight = 1.0 per shared domain, reflecting that they co-occur in
|
|
313
|
+
* assessments for that domain).
|
|
314
|
+
* 2. Existing pattern_relationships: "merged" relationships become derivation
|
|
315
|
+
* edges (weight = 2.0).
|
|
316
|
+
*
|
|
317
|
+
* This is idempotent — uses INSERT OR IGNORE so repeated calls don't
|
|
318
|
+
* create duplicate edges (but also don't increment weights).
|
|
319
|
+
*
|
|
320
|
+
* @returns Number of edges created
|
|
321
|
+
*/
|
|
322
|
+
bootstrapFromExistingData() {
|
|
323
|
+
this.ensureSchema();
|
|
324
|
+
let edgesCreated = 0;
|
|
325
|
+
// 1. Same-domain co-occurrence from qe_patterns
|
|
326
|
+
// Group patterns by domain, then create pairwise edges within each domain
|
|
327
|
+
const domainGroups = this.db.prepare(`
|
|
328
|
+
SELECT id, qe_domain FROM qe_patterns WHERE qe_domain IS NOT NULL ORDER BY qe_domain
|
|
329
|
+
`).all();
|
|
330
|
+
const byDomain = new Map();
|
|
331
|
+
for (const row of domainGroups) {
|
|
332
|
+
const group = byDomain.get(row.qe_domain) ?? [];
|
|
333
|
+
group.push(row.id);
|
|
334
|
+
byDomain.set(row.qe_domain, group);
|
|
335
|
+
}
|
|
336
|
+
const insertCoOccurrence = this.db.prepare(`
|
|
337
|
+
INSERT OR IGNORE INTO pattern_citations (source_pattern_id, target_pattern_id, weight, relationship)
|
|
338
|
+
VALUES (?, ?, 1.0, 'co-occurrence')
|
|
339
|
+
`);
|
|
340
|
+
const insertBatch = this.db.transaction((pairs) => {
|
|
341
|
+
for (const [a, b] of pairs) {
|
|
342
|
+
const result = insertCoOccurrence.run(a, b);
|
|
343
|
+
if (result.changes > 0)
|
|
344
|
+
edgesCreated++;
|
|
345
|
+
}
|
|
346
|
+
});
|
|
347
|
+
const pairs = [];
|
|
348
|
+
for (const [, ids] of byDomain) {
|
|
349
|
+
// Create pairwise edges (canonical ordering, cap at 50 patterns per domain
|
|
350
|
+
// to avoid O(n^2) blowup for large domains)
|
|
351
|
+
const capped = ids.slice(0, 50);
|
|
352
|
+
for (let i = 0; i < capped.length; i++) {
|
|
353
|
+
for (let j = i + 1; j < capped.length; j++) {
|
|
354
|
+
const [src, tgt] = capped[i] < capped[j]
|
|
355
|
+
? [capped[i], capped[j]]
|
|
356
|
+
: [capped[j], capped[i]];
|
|
357
|
+
pairs.push([src, tgt]);
|
|
358
|
+
}
|
|
359
|
+
}
|
|
360
|
+
}
|
|
361
|
+
insertBatch(pairs);
|
|
362
|
+
// 2. Existing pattern_relationships → derivation edges
|
|
363
|
+
const relationships = this.db.prepare(`
|
|
364
|
+
SELECT source_pattern_id, target_pattern_id, relationship_type
|
|
365
|
+
FROM pattern_relationships
|
|
366
|
+
WHERE relationship_type IN ('merged', 'derived', 'superseded')
|
|
367
|
+
`).all();
|
|
368
|
+
const insertDerivation = this.db.prepare(`
|
|
369
|
+
INSERT OR IGNORE INTO pattern_citations (source_pattern_id, target_pattern_id, weight, relationship)
|
|
370
|
+
VALUES (?, ?, 2.0, 'derivation')
|
|
371
|
+
`);
|
|
372
|
+
for (const rel of relationships) {
|
|
373
|
+
const result = insertDerivation.run(rel.source_pattern_id, rel.target_pattern_id);
|
|
374
|
+
if (result.changes > 0)
|
|
375
|
+
edgesCreated++;
|
|
376
|
+
}
|
|
377
|
+
return edgesCreated;
|
|
378
|
+
}
|
|
379
|
+
/** Get the number of citation edges */
|
|
380
|
+
getEdgeCount() {
|
|
381
|
+
this.ensureSchema();
|
|
382
|
+
const row = this.db.prepare('SELECT COUNT(*) as cnt FROM pattern_citations').get();
|
|
383
|
+
return row.cnt;
|
|
384
|
+
}
|
|
385
|
+
}
|
|
386
|
+
/**
|
|
387
|
+
* Compute blended importance scores using PageRank over the citation graph
|
|
388
|
+
* combined with the existing weighted quality formula.
|
|
389
|
+
*
|
|
390
|
+
* Blending: final = (1 - alpha) * qualityScore + alpha * pageRankScore
|
|
391
|
+
*
|
|
392
|
+
* @param patterns - Patterns to score
|
|
393
|
+
* @param citationGraph - The pattern citation graph (from PatternCitationGraph.buildGraph())
|
|
394
|
+
* @param alpha - Blend weight for PageRank (0 = ignore, 1 = only PageRank)
|
|
395
|
+
*/
|
|
396
|
+
export function computeBlendedImportance(patterns, citationGraph, alpha = 0.3) {
|
|
397
|
+
const result = new Map();
|
|
398
|
+
if (!getRuVectorFeatureFlags().useSublinearSolver) {
|
|
399
|
+
for (const p of patterns) {
|
|
400
|
+
result.set(p.id, calculateQualityScore(p));
|
|
401
|
+
}
|
|
402
|
+
return result;
|
|
403
|
+
}
|
|
404
|
+
const qualityScores = new Map();
|
|
405
|
+
for (const p of patterns) {
|
|
406
|
+
qualityScores.set(p.id, calculateQualityScore(p));
|
|
407
|
+
}
|
|
408
|
+
if (citationGraph.nodes.length < 3 || citationGraph.edges.length < 2) {
|
|
409
|
+
return qualityScores;
|
|
410
|
+
}
|
|
411
|
+
const solver = new PageRankSolver();
|
|
412
|
+
const pageRankScores = solver.computeImportance(citationGraph);
|
|
413
|
+
for (const p of patterns) {
|
|
414
|
+
const quality = qualityScores.get(p.id) ?? 0;
|
|
415
|
+
const pageRank = pageRankScores.get(p.id) ?? 0;
|
|
416
|
+
const normalizedPR = pageRank * citationGraph.nodes.length;
|
|
417
|
+
result.set(p.id, (1 - alpha) * quality + alpha * Math.min(normalizedPR, 1));
|
|
418
|
+
}
|
|
419
|
+
return result;
|
|
420
|
+
}
|
|
187
421
|
//# sourceMappingURL=pattern-promotion.js.map
|
|
@@ -199,6 +199,8 @@ export declare class PatternStore implements IPatternStore {
|
|
|
199
199
|
private hnswIndex;
|
|
200
200
|
private hnswAvailable;
|
|
201
201
|
private hnswInitPromise;
|
|
202
|
+
private hdcCache;
|
|
203
|
+
private deltaTracker;
|
|
202
204
|
private stats;
|
|
203
205
|
constructor(memory: MemoryBackend, config?: Partial<PatternStoreConfig>);
|
|
204
206
|
/**
|