@wundam/orchex 1.0.0-rc.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +65 -0
- package/README.md +332 -0
- package/bin/orchex.js +2 -0
- package/dist/artifacts.d.ts +132 -0
- package/dist/artifacts.js +832 -0
- package/dist/claude-executor.d.ts +31 -0
- package/dist/claude-executor.js +200 -0
- package/dist/commands.d.ts +36 -0
- package/dist/commands.js +264 -0
- package/dist/config.d.ts +100 -0
- package/dist/config.js +172 -0
- package/dist/context-builder.d.ts +46 -0
- package/dist/context-builder.js +506 -0
- package/dist/cost.d.ts +29 -0
- package/dist/cost.js +60 -0
- package/dist/execution-broadcaster.d.ts +18 -0
- package/dist/execution-broadcaster.js +17 -0
- package/dist/executors/base.d.ts +99 -0
- package/dist/executors/base.js +206 -0
- package/dist/executors/circuit-breaker.d.ts +36 -0
- package/dist/executors/circuit-breaker.js +109 -0
- package/dist/executors/deepseek-executor.d.ts +22 -0
- package/dist/executors/deepseek-executor.js +145 -0
- package/dist/executors/gemini-executor.d.ts +20 -0
- package/dist/executors/gemini-executor.js +176 -0
- package/dist/executors/index.d.ts +81 -0
- package/dist/executors/index.js +193 -0
- package/dist/executors/ollama-executor.d.ts +25 -0
- package/dist/executors/ollama-executor.js +184 -0
- package/dist/executors/openai-executor.d.ts +22 -0
- package/dist/executors/openai-executor.js +142 -0
- package/dist/index.d.ts +1 -0
- package/dist/index.js +115 -0
- package/dist/intelligence/anti-pattern-detector.d.ts +117 -0
- package/dist/intelligence/anti-pattern-detector.js +327 -0
- package/dist/intelligence/budget-enforcer.d.ts +119 -0
- package/dist/intelligence/budget-enforcer.js +226 -0
- package/dist/intelligence/context-optimizer.d.ts +111 -0
- package/dist/intelligence/context-optimizer.js +282 -0
- package/dist/intelligence/cost-tracker.d.ts +114 -0
- package/dist/intelligence/cost-tracker.js +183 -0
- package/dist/intelligence/deliverable-extractor.d.ts +134 -0
- package/dist/intelligence/deliverable-extractor.js +909 -0
- package/dist/intelligence/dependency-inferrer.d.ts +87 -0
- package/dist/intelligence/dependency-inferrer.js +403 -0
- package/dist/intelligence/diagnostics.d.ts +25 -0
- package/dist/intelligence/diagnostics.js +36 -0
- package/dist/intelligence/error-analyzer.d.ts +7 -0
- package/dist/intelligence/error-analyzer.js +76 -0
- package/dist/intelligence/file-chunker.d.ts +15 -0
- package/dist/intelligence/file-chunker.js +64 -0
- package/dist/intelligence/fix-stream-manager.d.ts +59 -0
- package/dist/intelligence/fix-stream-manager.js +212 -0
- package/dist/intelligence/heuristics.d.ts +23 -0
- package/dist/intelligence/heuristics.js +124 -0
- package/dist/intelligence/learning-engine.d.ts +157 -0
- package/dist/intelligence/learning-engine.js +433 -0
- package/dist/intelligence/learning-feedback.d.ts +96 -0
- package/dist/intelligence/learning-feedback.js +202 -0
- package/dist/intelligence/pattern-analyzer.d.ts +35 -0
- package/dist/intelligence/pattern-analyzer.js +189 -0
- package/dist/intelligence/plan-parser.d.ts +124 -0
- package/dist/intelligence/plan-parser.js +498 -0
- package/dist/intelligence/planner.d.ts +29 -0
- package/dist/intelligence/planner.js +86 -0
- package/dist/intelligence/self-healer.d.ts +16 -0
- package/dist/intelligence/self-healer.js +84 -0
- package/dist/intelligence/slicing-metrics.d.ts +62 -0
- package/dist/intelligence/slicing-metrics.js +202 -0
- package/dist/intelligence/slicing-templates.d.ts +81 -0
- package/dist/intelligence/slicing-templates.js +420 -0
- package/dist/intelligence/split-suggester.d.ts +69 -0
- package/dist/intelligence/split-suggester.js +176 -0
- package/dist/intelligence/stream-generator.d.ts +90 -0
- package/dist/intelligence/stream-generator.js +452 -0
- package/dist/logger.d.ts +34 -0
- package/dist/logger.js +83 -0
- package/dist/logging.d.ts +5 -0
- package/dist/logging.js +38 -0
- package/dist/manifest.d.ts +56 -0
- package/dist/manifest.js +254 -0
- package/dist/metrics.d.ts +35 -0
- package/dist/metrics.js +75 -0
- package/dist/orchestrator.d.ts +35 -0
- package/dist/orchestrator.js +723 -0
- package/dist/ownership.d.ts +44 -0
- package/dist/ownership.js +250 -0
- package/dist/semaphore.d.ts +12 -0
- package/dist/semaphore.js +34 -0
- package/dist/telemetry/telemetry-types.d.ts +85 -0
- package/dist/telemetry/telemetry-types.js +1 -0
- package/dist/tier-gating.d.ts +24 -0
- package/dist/tier-gating.js +88 -0
- package/dist/tiers.d.ts +92 -0
- package/dist/tiers.js +108 -0
- package/dist/tools.d.ts +18 -0
- package/dist/tools.js +1363 -0
- package/dist/types.d.ts +740 -0
- package/dist/types.js +160 -0
- package/dist/utils/ownership-validator.d.ts +6 -0
- package/dist/utils/ownership-validator.js +21 -0
- package/dist/waves.d.ts +21 -0
- package/dist/waves.js +146 -0
- package/package.json +120 -0
|
@@ -0,0 +1,87 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Dependency inference from deliverables.
|
|
3
|
+
* Builds a dependency graph and detects cycles.
|
|
4
|
+
*/
|
|
5
|
+
import type { Deliverable } from './deliverable-extractor.js';
|
|
6
|
+
import type { LearnDiagnostics } from './diagnostics.js';
|
|
7
|
+
/**
|
|
8
|
+
* Edge in the dependency graph.
|
|
9
|
+
*/
|
|
10
|
+
export interface DependencyEdge {
|
|
11
|
+
/** Source deliverable ID (the one that depends on target) */
|
|
12
|
+
from: string;
|
|
13
|
+
/** Target deliverable ID (the one being depended upon) */
|
|
14
|
+
to: string;
|
|
15
|
+
/** How this dependency was inferred */
|
|
16
|
+
reason: 'explicit' | 'file-ownership' | 'content-pattern';
|
|
17
|
+
/** Human-readable explanation */
|
|
18
|
+
explanation: string;
|
|
19
|
+
}
|
|
20
|
+
/**
|
|
21
|
+
* Result of dependency analysis.
|
|
22
|
+
*/
|
|
23
|
+
export interface DependencyGraph {
|
|
24
|
+
/** Map of deliverable ID → IDs it depends on */
|
|
25
|
+
dependencies: Map<string, string[]>;
|
|
26
|
+
/** All edges with explanations */
|
|
27
|
+
edges: DependencyEdge[];
|
|
28
|
+
/** Detected cycles (if any) */
|
|
29
|
+
cycles: string[][];
|
|
30
|
+
/** Whether the graph is acyclic (valid) */
|
|
31
|
+
isAcyclic: boolean;
|
|
32
|
+
}
|
|
33
|
+
/**
|
|
34
|
+
* Match a deliverable ID or name to find the target in the deliverables list.
|
|
35
|
+
* Handles fuzzy matching for explicit deps that may use different casing.
|
|
36
|
+
*
|
|
37
|
+
* When a match resolves to a split sub-stream, automatically resolves to the
|
|
38
|
+
* `-core` sibling to ensure correct dependency targeting.
|
|
39
|
+
*/
|
|
40
|
+
export declare function findDeliverableMatch(query: string, deliverables: Deliverable[]): Deliverable | undefined;
|
|
41
|
+
/**
|
|
42
|
+
* Detect if a deliverable is a cleanup/removal stream.
|
|
43
|
+
* Cleanup streams DELETE files rather than CREATE them.
|
|
44
|
+
*/
|
|
45
|
+
export declare function isCleanupStream(d: Deliverable): boolean;
|
|
46
|
+
/**
|
|
47
|
+
* Infer dependencies from file ownership overlaps.
|
|
48
|
+
* If deliverable B reads files owned by deliverable A, B depends on A.
|
|
49
|
+
*
|
|
50
|
+
* NOTE: Cleanup streams are excluded from ownership registration because
|
|
51
|
+
* they DELETE files rather than CREATE them. This prevents incorrect
|
|
52
|
+
* dependencies where readers would wait for cleanup (backwards order).
|
|
53
|
+
*/
|
|
54
|
+
export declare function inferFileOwnershipDeps(deliverables: Deliverable[]): DependencyEdge[];
|
|
55
|
+
/**
|
|
56
|
+
* Infer dependencies from content type patterns.
|
|
57
|
+
* - Tests depend on their corresponding implementation
|
|
58
|
+
* - Docs may depend on the feature they document
|
|
59
|
+
*/
|
|
60
|
+
export declare function inferContentPatternDeps(deliverables: Deliverable[]): DependencyEdge[];
|
|
61
|
+
/**
|
|
62
|
+
* Convert explicit dependency mentions to edges.
|
|
63
|
+
*/
|
|
64
|
+
export declare function inferExplicitDeps(deliverables: Deliverable[], options?: {
|
|
65
|
+
diagnostics?: LearnDiagnostics;
|
|
66
|
+
}): DependencyEdge[];
|
|
67
|
+
/**
|
|
68
|
+
* Detect cycles in the dependency graph using DFS.
|
|
69
|
+
* Returns all cycles found.
|
|
70
|
+
*/
|
|
71
|
+
export declare function detectCycles(dependencies: Map<string, string[]>): string[][];
|
|
72
|
+
/**
|
|
73
|
+
* Build a complete dependency graph from deliverables.
|
|
74
|
+
* Combines explicit, file-ownership, and content-pattern dependencies.
|
|
75
|
+
*/
|
|
76
|
+
export declare function buildDependencyGraph(deliverables: Deliverable[], options?: {
|
|
77
|
+
diagnostics?: LearnDiagnostics;
|
|
78
|
+
}): DependencyGraph;
|
|
79
|
+
/**
|
|
80
|
+
* Apply inferred dependencies back to deliverables.
|
|
81
|
+
* Returns a new array with updated explicitDeps.
|
|
82
|
+
*/
|
|
83
|
+
export declare function applyInferredDeps(deliverables: Deliverable[], graph: DependencyGraph): Deliverable[];
|
|
84
|
+
/**
|
|
85
|
+
* Format dependency graph as human-readable report.
|
|
86
|
+
*/
|
|
87
|
+
export declare function formatDependencyReport(graph: DependencyGraph): string;
|
|
@@ -0,0 +1,403 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Dependency inference from deliverables.
|
|
3
|
+
* Builds a dependency graph and detects cycles.
|
|
4
|
+
*/
|
|
5
|
+
/**
|
|
6
|
+
* Concern suffixes used by the semantic splitter.
|
|
7
|
+
* When a deliverable is split, its ID becomes `{parentId}-{concern}`.
|
|
8
|
+
*/
|
|
9
|
+
const SPLIT_CONCERN_SUFFIXES = ['-types', '-migrations', '-core', '-tests', '-docs'];
|
|
10
|
+
/**
|
|
11
|
+
* When a fuzzy match resolves to a split sub-stream (e.g., `feature-migrations`),
|
|
12
|
+
* resolve to the `-core` sibling instead — the actual implementation.
|
|
13
|
+
*
|
|
14
|
+
* Rationale: external deps like "Depends on Feature X" target the implementation,
|
|
15
|
+
* not migrations or types. The `-core` split owns the source code that downstream
|
|
16
|
+
* streams import from. If no `-core` exists, falls back to the latest implementation
|
|
17
|
+
* concern (migrations > types) before test/docs concerns.
|
|
18
|
+
*/
|
|
19
|
+
function resolveToCoreSibling(matched, deliverables) {
|
|
20
|
+
const suffix = SPLIT_CONCERN_SUFFIXES.find(s => matched.id.endsWith(s));
|
|
21
|
+
if (!suffix)
|
|
22
|
+
return matched; // not a split sub-stream
|
|
23
|
+
const parentPrefix = matched.id.slice(0, -suffix.length);
|
|
24
|
+
// Find all siblings from the same split parent
|
|
25
|
+
const siblings = deliverables.filter(d => SPLIT_CONCERN_SUFFIXES.some(s => d.id === `${parentPrefix}${s}`));
|
|
26
|
+
if (siblings.length <= 1)
|
|
27
|
+
return matched; // no siblings — not actually a split
|
|
28
|
+
// Prefer -core (the implementation)
|
|
29
|
+
const coreSibling = siblings.find(d => d.id === `${parentPrefix}-core`);
|
|
30
|
+
if (coreSibling)
|
|
31
|
+
return coreSibling;
|
|
32
|
+
// No core sibling — pick last implementation concern (skip tests/docs)
|
|
33
|
+
const implOrder = ['-migrations', '-types'];
|
|
34
|
+
for (const implSuffix of implOrder) {
|
|
35
|
+
const sibling = siblings.find(d => d.id === `${parentPrefix}${implSuffix}`);
|
|
36
|
+
if (sibling)
|
|
37
|
+
return sibling;
|
|
38
|
+
}
|
|
39
|
+
return matched;
|
|
40
|
+
}
|
|
41
|
+
/**
|
|
42
|
+
* Match a deliverable ID or name to find the target in the deliverables list.
|
|
43
|
+
* Handles fuzzy matching for explicit deps that may use different casing.
|
|
44
|
+
*
|
|
45
|
+
* When a match resolves to a split sub-stream, automatically resolves to the
|
|
46
|
+
* `-core` sibling to ensure correct dependency targeting.
|
|
47
|
+
*/
|
|
48
|
+
export function findDeliverableMatch(query, deliverables) {
|
|
49
|
+
const queryLower = query.toLowerCase().trim();
|
|
50
|
+
// Exact ID match (no sibling resolution — explicit IDs are intentional)
|
|
51
|
+
const exactId = deliverables.find(d => d.id === query);
|
|
52
|
+
if (exactId)
|
|
53
|
+
return exactId;
|
|
54
|
+
// Exact name match (case insensitive)
|
|
55
|
+
const exactName = deliverables.find(d => d.name.toLowerCase() === queryLower);
|
|
56
|
+
if (exactName)
|
|
57
|
+
return resolveToCoreSibling(exactName, deliverables);
|
|
58
|
+
// ID contains query or query contains ID
|
|
59
|
+
const partialId = deliverables.find(d => d.id.includes(queryLower) || queryLower.includes(d.id));
|
|
60
|
+
if (partialId)
|
|
61
|
+
return resolveToCoreSibling(partialId, deliverables);
|
|
62
|
+
// Name similarity (words overlap)
|
|
63
|
+
const queryWords = queryLower.split(/[\s-_]+/).filter(w => w.length > 2);
|
|
64
|
+
const byNameOverlap = deliverables.find(d => {
|
|
65
|
+
const nameWords = d.name.toLowerCase().split(/[\s-_]+/);
|
|
66
|
+
const overlap = queryWords.filter(qw => nameWords.some(nw => nw.includes(qw) || qw.includes(nw)));
|
|
67
|
+
return overlap.length >= queryWords.length * 0.5;
|
|
68
|
+
});
|
|
69
|
+
if (byNameOverlap)
|
|
70
|
+
return resolveToCoreSibling(byNameOverlap, deliverables);
|
|
71
|
+
return undefined;
|
|
72
|
+
}
|
|
73
|
+
/**
|
|
74
|
+
* Detect if a deliverable is a cleanup/removal stream.
|
|
75
|
+
* Cleanup streams DELETE files rather than CREATE them.
|
|
76
|
+
*/
|
|
77
|
+
export function isCleanupStream(d) {
|
|
78
|
+
const text = `${d.name} ${d.description}`.toLowerCase();
|
|
79
|
+
return /\b(cleanup|clean\s*up|remove|delete|drop)\b/.test(text);
|
|
80
|
+
}
|
|
81
|
+
/**
|
|
82
|
+
* Infer dependencies from file ownership overlaps.
|
|
83
|
+
* If deliverable B reads files owned by deliverable A, B depends on A.
|
|
84
|
+
*
|
|
85
|
+
* NOTE: Cleanup streams are excluded from ownership registration because
|
|
86
|
+
* they DELETE files rather than CREATE them. This prevents incorrect
|
|
87
|
+
* dependencies where readers would wait for cleanup (backwards order).
|
|
88
|
+
*/
|
|
89
|
+
export function inferFileOwnershipDeps(deliverables) {
|
|
90
|
+
const edges = [];
|
|
91
|
+
// Build ownership map: file → owning deliverable
|
|
92
|
+
// Skip cleanup streams - they delete files, don't create them for others
|
|
93
|
+
const ownershipMap = new Map();
|
|
94
|
+
for (const d of deliverables) {
|
|
95
|
+
if (isCleanupStream(d))
|
|
96
|
+
continue;
|
|
97
|
+
for (const file of d.ownedFiles) {
|
|
98
|
+
ownershipMap.set(file, d);
|
|
99
|
+
}
|
|
100
|
+
}
|
|
101
|
+
// Check each deliverable's read files against ownership
|
|
102
|
+
for (const d of deliverables) {
|
|
103
|
+
for (const readFile of d.readFiles) {
|
|
104
|
+
const owner = ownershipMap.get(readFile);
|
|
105
|
+
if (owner && owner.id !== d.id) {
|
|
106
|
+
edges.push({
|
|
107
|
+
from: d.id,
|
|
108
|
+
to: owner.id,
|
|
109
|
+
reason: 'file-ownership',
|
|
110
|
+
explanation: `${d.id} reads ${readFile} which is owned by ${owner.id}`,
|
|
111
|
+
});
|
|
112
|
+
}
|
|
113
|
+
}
|
|
114
|
+
}
|
|
115
|
+
// Drop mutual file-ownership pairs: if A→B and B→A both exist,
|
|
116
|
+
// these are context reads (both streams reading each other's files
|
|
117
|
+
// for understanding), not sequencing dependencies.
|
|
118
|
+
const edgeKeys = new Set(edges.map(e => `${e.from}→${e.to}`));
|
|
119
|
+
const mutualKeys = new Set();
|
|
120
|
+
for (const edge of edges) {
|
|
121
|
+
const reverse = `${edge.to}→${edge.from}`;
|
|
122
|
+
if (edgeKeys.has(reverse)) {
|
|
123
|
+
mutualKeys.add(`${edge.from}→${edge.to}`);
|
|
124
|
+
mutualKeys.add(reverse);
|
|
125
|
+
}
|
|
126
|
+
}
|
|
127
|
+
return edges.filter(e => !mutualKeys.has(`${e.from}→${e.to}`));
|
|
128
|
+
}
|
|
129
|
+
/**
|
|
130
|
+
* Infer dependencies from content type patterns.
|
|
131
|
+
* - Tests depend on their corresponding implementation
|
|
132
|
+
* - Docs may depend on the feature they document
|
|
133
|
+
*/
|
|
134
|
+
export function inferContentPatternDeps(deliverables) {
|
|
135
|
+
const edges = [];
|
|
136
|
+
for (const d of deliverables) {
|
|
137
|
+
// Test deliverables depend on implementation
|
|
138
|
+
if (d.category === 'test') {
|
|
139
|
+
// Look for matching implementation deliverable
|
|
140
|
+
const baseName = d.id.replace(/-tests?$/, '').replace(/^tests?-/, '');
|
|
141
|
+
const implDeliverable = deliverables.find(other => other.id !== d.id &&
|
|
142
|
+
other.category === 'code' &&
|
|
143
|
+
(other.id === baseName ||
|
|
144
|
+
other.id === `${baseName}-core` ||
|
|
145
|
+
other.id === `${baseName}-service` ||
|
|
146
|
+
other.id === `${baseName}-implementation`));
|
|
147
|
+
if (implDeliverable) {
|
|
148
|
+
edges.push({
|
|
149
|
+
from: d.id,
|
|
150
|
+
to: implDeliverable.id,
|
|
151
|
+
reason: 'content-pattern',
|
|
152
|
+
explanation: `${d.id} (test) depends on ${implDeliverable.id} (implementation)`,
|
|
153
|
+
});
|
|
154
|
+
}
|
|
155
|
+
}
|
|
156
|
+
// Tutorial/docs may depend on the feature they document
|
|
157
|
+
if (d.category === 'docs' || d.category === 'tutorial') {
|
|
158
|
+
// Remove common prefixes and suffixes to find the base feature name
|
|
159
|
+
const baseName = d.id
|
|
160
|
+
.replace(/^docs?-/, '')
|
|
161
|
+
.replace(/-docs?$/, '')
|
|
162
|
+
.replace(/^tutorials?-/, '')
|
|
163
|
+
.replace(/-tutorials?$/, '');
|
|
164
|
+
// Priority-based matching: prefer service/core over types
|
|
165
|
+
const codeDeliverables = deliverables.filter(other => other.id !== d.id && other.category === 'code');
|
|
166
|
+
const featureDeliverable =
|
|
167
|
+
// Priority 1: exact match
|
|
168
|
+
codeDeliverables.find(other => other.id === baseName) ||
|
|
169
|
+
// Priority 2: service/core variants
|
|
170
|
+
codeDeliverables.find(other => other.id === `${baseName}-service` ||
|
|
171
|
+
other.id === `${baseName}-core` ||
|
|
172
|
+
other.id === `${baseName}-implementation`) ||
|
|
173
|
+
// Priority 3: any match starting with base (but not types)
|
|
174
|
+
codeDeliverables.find(other => other.id.startsWith(`${baseName}-`) && !other.id.includes('types'));
|
|
175
|
+
if (featureDeliverable) {
|
|
176
|
+
edges.push({
|
|
177
|
+
from: d.id,
|
|
178
|
+
to: featureDeliverable.id,
|
|
179
|
+
reason: 'content-pattern',
|
|
180
|
+
explanation: `${d.id} (${d.category}) documents ${featureDeliverable.id}`,
|
|
181
|
+
});
|
|
182
|
+
}
|
|
183
|
+
}
|
|
184
|
+
}
|
|
185
|
+
return edges;
|
|
186
|
+
}
|
|
187
|
+
/**
|
|
188
|
+
* Convert explicit dependency mentions to edges.
|
|
189
|
+
*/
|
|
190
|
+
export function inferExplicitDeps(deliverables, options) {
|
|
191
|
+
const edges = [];
|
|
192
|
+
for (const d of deliverables) {
|
|
193
|
+
for (const depMention of d.explicitDeps) {
|
|
194
|
+
const target = findDeliverableMatch(depMention, deliverables);
|
|
195
|
+
if (target && target.id !== d.id) {
|
|
196
|
+
edges.push({
|
|
197
|
+
from: d.id,
|
|
198
|
+
to: target.id,
|
|
199
|
+
reason: 'explicit',
|
|
200
|
+
explanation: `${d.id} explicitly depends on "${depMention}" (matched ${target.id})`,
|
|
201
|
+
});
|
|
202
|
+
}
|
|
203
|
+
else if (!target) {
|
|
204
|
+
const msg = `Stream '${d.id}': unmatched explicit dep '${depMention}'`;
|
|
205
|
+
options?.diagnostics?.unmatchedDeps.push(msg);
|
|
206
|
+
options?.diagnostics?.warnings.push(msg);
|
|
207
|
+
}
|
|
208
|
+
}
|
|
209
|
+
}
|
|
210
|
+
return edges;
|
|
211
|
+
}
|
|
212
|
+
/**
|
|
213
|
+
* Detect cycles in the dependency graph using DFS.
|
|
214
|
+
* Returns all cycles found.
|
|
215
|
+
*/
|
|
216
|
+
export function detectCycles(dependencies) {
|
|
217
|
+
const cycles = [];
|
|
218
|
+
const visited = new Set();
|
|
219
|
+
const recursionStack = new Set();
|
|
220
|
+
const path = [];
|
|
221
|
+
function dfs(node) {
|
|
222
|
+
visited.add(node);
|
|
223
|
+
recursionStack.add(node);
|
|
224
|
+
path.push(node);
|
|
225
|
+
const deps = dependencies.get(node) || [];
|
|
226
|
+
for (const dep of deps) {
|
|
227
|
+
if (!visited.has(dep)) {
|
|
228
|
+
dfs(dep);
|
|
229
|
+
}
|
|
230
|
+
else if (recursionStack.has(dep)) {
|
|
231
|
+
// Found a cycle - extract it from the path
|
|
232
|
+
const cycleStart = path.indexOf(dep);
|
|
233
|
+
if (cycleStart !== -1) {
|
|
234
|
+
const cycle = [...path.slice(cycleStart), dep];
|
|
235
|
+
cycles.push(cycle);
|
|
236
|
+
}
|
|
237
|
+
}
|
|
238
|
+
}
|
|
239
|
+
path.pop();
|
|
240
|
+
recursionStack.delete(node);
|
|
241
|
+
}
|
|
242
|
+
// Run DFS from all nodes
|
|
243
|
+
for (const node of dependencies.keys()) {
|
|
244
|
+
if (!visited.has(node)) {
|
|
245
|
+
dfs(node);
|
|
246
|
+
}
|
|
247
|
+
}
|
|
248
|
+
return cycles;
|
|
249
|
+
}
|
|
250
|
+
/**
|
|
251
|
+
* Break cycles that consist entirely of file-ownership edges.
|
|
252
|
+
* For each such cycle, remove the edge from the stream with the fewest
|
|
253
|
+
* total dependencies (least connected). Ties broken alphabetically for
|
|
254
|
+
* determinism.
|
|
255
|
+
*
|
|
256
|
+
* Mixed-reason cycles (containing explicit or content-pattern edges)
|
|
257
|
+
* are left intact — those are genuine user errors.
|
|
258
|
+
*/
|
|
259
|
+
function breakFileOwnershipCycles(cycles, edges, dependencies) {
|
|
260
|
+
let broken = 0;
|
|
261
|
+
const removedKeys = new Set();
|
|
262
|
+
for (const cycle of cycles) {
|
|
263
|
+
// Get edges that form this cycle
|
|
264
|
+
const cycleEdgePairs = [];
|
|
265
|
+
for (let i = 0; i < cycle.length - 1; i++) {
|
|
266
|
+
cycleEdgePairs.push({ from: cycle[i], to: cycle[i + 1] });
|
|
267
|
+
}
|
|
268
|
+
// Find matching edges and check if all are file-ownership
|
|
269
|
+
const matchedEdges = [];
|
|
270
|
+
for (const pair of cycleEdgePairs) {
|
|
271
|
+
const edge = edges.find(e => e.from === pair.from && e.to === pair.to && !removedKeys.has(`${e.from}→${e.to}`));
|
|
272
|
+
if (edge)
|
|
273
|
+
matchedEdges.push(edge);
|
|
274
|
+
}
|
|
275
|
+
if (matchedEdges.length === 0)
|
|
276
|
+
continue;
|
|
277
|
+
const allFileOwnership = matchedEdges.every(e => e.reason === 'file-ownership');
|
|
278
|
+
if (!allFileOwnership)
|
|
279
|
+
continue;
|
|
280
|
+
// Pick the weakest edge: from the stream with fewest total deps,
|
|
281
|
+
// then alphabetically for determinism
|
|
282
|
+
const edgeToRemove = matchedEdges.reduce((weakest, current) => {
|
|
283
|
+
const weakestDeps = dependencies.get(weakest.from)?.length ?? 0;
|
|
284
|
+
const currentDeps = dependencies.get(current.from)?.length ?? 0;
|
|
285
|
+
if (currentDeps < weakestDeps)
|
|
286
|
+
return current;
|
|
287
|
+
if (currentDeps === weakestDeps && current.from < weakest.from)
|
|
288
|
+
return current;
|
|
289
|
+
return weakest;
|
|
290
|
+
});
|
|
291
|
+
// Remove from edges array
|
|
292
|
+
const idx = edges.indexOf(edgeToRemove);
|
|
293
|
+
if (idx !== -1)
|
|
294
|
+
edges.splice(idx, 1);
|
|
295
|
+
// Remove from dependency map
|
|
296
|
+
const fromDeps = dependencies.get(edgeToRemove.from);
|
|
297
|
+
if (fromDeps) {
|
|
298
|
+
const depIdx = fromDeps.indexOf(edgeToRemove.to);
|
|
299
|
+
if (depIdx !== -1)
|
|
300
|
+
fromDeps.splice(depIdx, 1);
|
|
301
|
+
}
|
|
302
|
+
removedKeys.add(`${edgeToRemove.from}→${edgeToRemove.to}`);
|
|
303
|
+
broken++;
|
|
304
|
+
}
|
|
305
|
+
return { broken };
|
|
306
|
+
}
|
|
307
|
+
/**
|
|
308
|
+
* Build a complete dependency graph from deliverables.
|
|
309
|
+
* Combines explicit, file-ownership, and content-pattern dependencies.
|
|
310
|
+
*/
|
|
311
|
+
export function buildDependencyGraph(deliverables, options) {
|
|
312
|
+
// Collect all edges
|
|
313
|
+
const allEdges = [
|
|
314
|
+
...inferExplicitDeps(deliverables, options),
|
|
315
|
+
...inferFileOwnershipDeps(deliverables),
|
|
316
|
+
...inferContentPatternDeps(deliverables),
|
|
317
|
+
];
|
|
318
|
+
// Deduplicate edges (same from→to pair)
|
|
319
|
+
const seenEdges = new Set();
|
|
320
|
+
const uniqueEdges = [];
|
|
321
|
+
for (const edge of allEdges) {
|
|
322
|
+
const key = `${edge.from}→${edge.to}`;
|
|
323
|
+
if (!seenEdges.has(key)) {
|
|
324
|
+
seenEdges.add(key);
|
|
325
|
+
uniqueEdges.push(edge);
|
|
326
|
+
}
|
|
327
|
+
}
|
|
328
|
+
// Build dependency map
|
|
329
|
+
const dependencies = new Map();
|
|
330
|
+
// Initialize all deliverables
|
|
331
|
+
for (const d of deliverables) {
|
|
332
|
+
dependencies.set(d.id, []);
|
|
333
|
+
}
|
|
334
|
+
// Add edges
|
|
335
|
+
for (const edge of uniqueEdges) {
|
|
336
|
+
const deps = dependencies.get(edge.from) || [];
|
|
337
|
+
if (!deps.includes(edge.to)) {
|
|
338
|
+
deps.push(edge.to);
|
|
339
|
+
}
|
|
340
|
+
dependencies.set(edge.from, deps);
|
|
341
|
+
}
|
|
342
|
+
// Detect and resolve cycles
|
|
343
|
+
let cycles = detectCycles(dependencies);
|
|
344
|
+
// Auto-resolve cycles that consist entirely of file-ownership edges
|
|
345
|
+
if (cycles.length > 0) {
|
|
346
|
+
const { broken } = breakFileOwnershipCycles(cycles, uniqueEdges, dependencies);
|
|
347
|
+
if (broken > 0) {
|
|
348
|
+
// Re-detect after edge removal
|
|
349
|
+
cycles = detectCycles(dependencies);
|
|
350
|
+
}
|
|
351
|
+
}
|
|
352
|
+
return {
|
|
353
|
+
dependencies,
|
|
354
|
+
edges: uniqueEdges,
|
|
355
|
+
cycles,
|
|
356
|
+
isAcyclic: cycles.length === 0,
|
|
357
|
+
};
|
|
358
|
+
}
|
|
359
|
+
/**
|
|
360
|
+
* Apply inferred dependencies back to deliverables.
|
|
361
|
+
* Returns a new array with updated explicitDeps.
|
|
362
|
+
*/
|
|
363
|
+
export function applyInferredDeps(deliverables, graph) {
|
|
364
|
+
return deliverables.map(d => {
|
|
365
|
+
const inferredDeps = graph.dependencies.get(d.id) || [];
|
|
366
|
+
// Merge with existing explicit deps, deduplicate
|
|
367
|
+
const allDeps = [...new Set([...d.explicitDeps, ...inferredDeps])];
|
|
368
|
+
return {
|
|
369
|
+
...d,
|
|
370
|
+
explicitDeps: allDeps,
|
|
371
|
+
};
|
|
372
|
+
});
|
|
373
|
+
}
|
|
374
|
+
/**
|
|
375
|
+
* Format dependency graph as human-readable report.
|
|
376
|
+
*/
|
|
377
|
+
export function formatDependencyReport(graph) {
|
|
378
|
+
const lines = ['=== Dependency Analysis ===', ''];
|
|
379
|
+
// Summary
|
|
380
|
+
lines.push(`Total edges: ${graph.edges.length}`);
|
|
381
|
+
lines.push(`Acyclic: ${graph.isAcyclic ? 'Yes ✓' : 'No ⚠️'}`);
|
|
382
|
+
if (graph.cycles.length > 0) {
|
|
383
|
+
lines.push('');
|
|
384
|
+
lines.push('--- Cycles Detected ---');
|
|
385
|
+
for (const cycle of graph.cycles) {
|
|
386
|
+
lines.push(` ${cycle.join(' → ')}`);
|
|
387
|
+
}
|
|
388
|
+
}
|
|
389
|
+
lines.push('');
|
|
390
|
+
lines.push('--- Dependencies by Deliverable ---');
|
|
391
|
+
for (const [id, deps] of graph.dependencies) {
|
|
392
|
+
if (deps.length > 0) {
|
|
393
|
+
lines.push(`${id}: ${deps.join(', ')}`);
|
|
394
|
+
}
|
|
395
|
+
}
|
|
396
|
+
lines.push('');
|
|
397
|
+
lines.push('--- Edge Details ---');
|
|
398
|
+
for (const edge of graph.edges) {
|
|
399
|
+
lines.push(`${edge.from} → ${edge.to} (${edge.reason})`);
|
|
400
|
+
lines.push(` ${edge.explanation}`);
|
|
401
|
+
}
|
|
402
|
+
return lines.join('\n');
|
|
403
|
+
}
|
|
@@ -0,0 +1,25 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Diagnostics collected during the learn pipeline.
|
|
3
|
+
* Threaded as an optional parameter; returned in tool responses.
|
|
4
|
+
*/
|
|
5
|
+
export interface LearnDiagnostics {
|
|
6
|
+
yamlBlocksFound: number;
|
|
7
|
+
yamlBlocksParsed: number;
|
|
8
|
+
yamlParseErrors: string[];
|
|
9
|
+
extractionPath: 'yaml' | 'markdown' | null;
|
|
10
|
+
sectionsFound: Record<number, number>;
|
|
11
|
+
sectionsFilteredAsMeta: string[];
|
|
12
|
+
deliverableCount: number;
|
|
13
|
+
splitCount: number;
|
|
14
|
+
unmatchedDeps: string[];
|
|
15
|
+
ownershipConflicts: string[];
|
|
16
|
+
warnings: string[];
|
|
17
|
+
}
|
|
18
|
+
export declare function createDiagnostics(): LearnDiagnostics;
|
|
19
|
+
/**
|
|
20
|
+
* Detect files claimed by multiple streams.
|
|
21
|
+
* Returns human-readable conflict descriptions.
|
|
22
|
+
*/
|
|
23
|
+
export declare function detectOwnershipConflicts(streams: Record<string, {
|
|
24
|
+
owns?: string[];
|
|
25
|
+
}>): string[];
|
|
@@ -0,0 +1,36 @@
|
|
|
1
|
+
export function createDiagnostics() {
|
|
2
|
+
return {
|
|
3
|
+
yamlBlocksFound: 0,
|
|
4
|
+
yamlBlocksParsed: 0,
|
|
5
|
+
yamlParseErrors: [],
|
|
6
|
+
extractionPath: null,
|
|
7
|
+
sectionsFound: {},
|
|
8
|
+
sectionsFilteredAsMeta: [],
|
|
9
|
+
deliverableCount: 0,
|
|
10
|
+
splitCount: 0,
|
|
11
|
+
unmatchedDeps: [],
|
|
12
|
+
ownershipConflicts: [],
|
|
13
|
+
warnings: [],
|
|
14
|
+
};
|
|
15
|
+
}
|
|
16
|
+
/**
|
|
17
|
+
* Detect files claimed by multiple streams.
|
|
18
|
+
* Returns human-readable conflict descriptions.
|
|
19
|
+
*/
|
|
20
|
+
export function detectOwnershipConflicts(streams) {
|
|
21
|
+
const fileOwners = new Map();
|
|
22
|
+
for (const [id, stream] of Object.entries(streams)) {
|
|
23
|
+
for (const file of stream.owns || []) {
|
|
24
|
+
const owners = fileOwners.get(file) || [];
|
|
25
|
+
owners.push(id);
|
|
26
|
+
fileOwners.set(file, owners);
|
|
27
|
+
}
|
|
28
|
+
}
|
|
29
|
+
const conflicts = [];
|
|
30
|
+
for (const [file, owners] of fileOwners) {
|
|
31
|
+
if (owners.length > 1) {
|
|
32
|
+
conflicts.push(`${file} owned by: ${owners.join(', ')}`);
|
|
33
|
+
}
|
|
34
|
+
}
|
|
35
|
+
return conflicts;
|
|
36
|
+
}
|
|
@@ -0,0 +1,7 @@
|
|
|
1
|
+
export interface ErrorAnalysis {
|
|
2
|
+
category: ErrorCategory;
|
|
3
|
+
retryable: boolean;
|
|
4
|
+
suggestion: string;
|
|
5
|
+
}
|
|
6
|
+
export type ErrorCategory = 'timeout' | 'network' | 'rate_limit' | 'edit_mismatch' | 'invalid_artifact' | 'environment' | 'ownership_violation' | 'test_failure' | 'lint_error' | 'runtime_error' | 'unknown';
|
|
7
|
+
export declare function analyzeError(errorString: string): ErrorAnalysis;
|
|
@@ -0,0 +1,76 @@
|
|
|
1
|
+
const PATTERNS = [
|
|
2
|
+
// Infrastructure errors — not fixable by regenerating prompts
|
|
3
|
+
{
|
|
4
|
+
pattern: /timed?\s*out|ETIMEDOUT|deadline|timeout/i,
|
|
5
|
+
category: 'timeout',
|
|
6
|
+
retryable: false,
|
|
7
|
+
suggestion: 'Timeout is an infrastructure issue. Consider increasing stream timeout or reducing scope.',
|
|
8
|
+
},
|
|
9
|
+
{
|
|
10
|
+
pattern: /network error|ECONNREFUSED|ECONNRESET|EHOSTUNREACH|socket hang up/i,
|
|
11
|
+
category: 'network',
|
|
12
|
+
retryable: false,
|
|
13
|
+
suggestion: 'Network connectivity issue. Check API endpoint availability and network configuration.',
|
|
14
|
+
},
|
|
15
|
+
{
|
|
16
|
+
pattern: /rate limit|too many requests|429|quota exceeded/i,
|
|
17
|
+
category: 'rate_limit',
|
|
18
|
+
retryable: false,
|
|
19
|
+
suggestion: 'Rate limited by API. Wait before retrying or reduce parallel stream count.',
|
|
20
|
+
},
|
|
21
|
+
// Fixable errors — can be resolved by retry with better context
|
|
22
|
+
{
|
|
23
|
+
pattern: /old_?content.*not found|edit.*mismatch|does not match|oldContent/i,
|
|
24
|
+
category: 'edit_mismatch',
|
|
25
|
+
retryable: true,
|
|
26
|
+
suggestion: 'The file content changed since context was built. Re-read the file and retry with updated content.',
|
|
27
|
+
},
|
|
28
|
+
{
|
|
29
|
+
pattern: /invalid.*artifact|parse.*error|orchex-artifact.*not found|JSON\.parse/i,
|
|
30
|
+
category: 'invalid_artifact',
|
|
31
|
+
retryable: true,
|
|
32
|
+
suggestion: 'The agent produced malformed output. Retry with clearer instructions about the artifact format.',
|
|
33
|
+
},
|
|
34
|
+
{
|
|
35
|
+
pattern: /ENOENT|EACCES|EPERM|ENOSPC|no such file|permission denied/i,
|
|
36
|
+
category: 'environment',
|
|
37
|
+
retryable: false,
|
|
38
|
+
suggestion: 'File system error. Check file paths and permissions.',
|
|
39
|
+
},
|
|
40
|
+
{
|
|
41
|
+
pattern: /ownership violation|outside owned files|SECURITY.*path traversal|SECURITY.*absolute path/i,
|
|
42
|
+
category: 'ownership_violation',
|
|
43
|
+
retryable: true,
|
|
44
|
+
suggestion: 'File operation attempted outside owned files. The fix stream should only modify files in the owns list, or the owns list should be updated to include the new file.',
|
|
45
|
+
},
|
|
46
|
+
{
|
|
47
|
+
pattern: /test.*fail|expect.*received|assertion.*error|FAIL\s+tests\//i,
|
|
48
|
+
category: 'test_failure',
|
|
49
|
+
retryable: true,
|
|
50
|
+
suggestion: 'Tests are failing. Include the test error output in the retry prompt so the agent can fix the issue.',
|
|
51
|
+
},
|
|
52
|
+
{
|
|
53
|
+
pattern: /lint|eslint|prettier|formatting/i,
|
|
54
|
+
category: 'lint_error',
|
|
55
|
+
retryable: true,
|
|
56
|
+
suggestion: 'Lint or formatting error. Include the lint output in the retry prompt.',
|
|
57
|
+
},
|
|
58
|
+
{
|
|
59
|
+
pattern: /TypeError|ReferenceError|SyntaxError|Cannot find module|cannot find name|TS\d{4}/i,
|
|
60
|
+
category: 'runtime_error',
|
|
61
|
+
retryable: true,
|
|
62
|
+
suggestion: 'Code has type or runtime errors. Include the error output and relevant type definitions in the retry prompt.',
|
|
63
|
+
},
|
|
64
|
+
];
|
|
65
|
+
export function analyzeError(errorString) {
|
|
66
|
+
for (const { pattern, category, retryable, suggestion } of PATTERNS) {
|
|
67
|
+
if (pattern.test(errorString)) {
|
|
68
|
+
return { category, retryable, suggestion };
|
|
69
|
+
}
|
|
70
|
+
}
|
|
71
|
+
return {
|
|
72
|
+
category: 'unknown',
|
|
73
|
+
retryable: true,
|
|
74
|
+
suggestion: 'Unknown error. Retry with the full error message included in the prompt.',
|
|
75
|
+
};
|
|
76
|
+
}
|
|
@@ -0,0 +1,15 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Chunks a file's content, selecting only the most relevant sections if it's large.
|
|
3
|
+
* For files over minLines, returns sections around lines matching any of the keywords (case-insensitive).
|
|
4
|
+
*
|
|
5
|
+
* - Keeps several lines of surrounding context (`contextRadius`).
|
|
6
|
+
* - If no matches, falls back to the first and last section slices.
|
|
7
|
+
*
|
|
8
|
+
* @param content The full file content
|
|
9
|
+
* @param keywords List of relevant keywords (from the stream plan)
|
|
10
|
+
* @param minLines Minimum lines above which chunking is triggered (default: 300)
|
|
11
|
+
* @param contextRadius How many lines before/after a match to include (default: 10)
|
|
12
|
+
* @param maxTotalLines Max total lines to include from this file (default: 120)
|
|
13
|
+
* @returns string (either chunks joined by \n, or full content if not large enough)
|
|
14
|
+
*/
|
|
15
|
+
export declare function extractRelevantChunks(content: string, keywords: string[], minLines?: number, contextRadius?: number, maxTotalLines?: number, withLineNumbers?: boolean): string;
|