@zoebuildsai/trace 1.5.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.gitignore +115 -0
- package/.trace/progress.json +22 -0
- package/README.md +466 -0
- package/RELEASE-NOTES-1.5.0.md +410 -0
- package/STATUS.md +245 -0
- package/dist/auto-commit.d.ts +66 -0
- package/dist/auto-commit.d.ts.map +1 -0
- package/dist/auto-commit.js +180 -0
- package/dist/auto-commit.js.map +1 -0
- package/dist/cli.d.ts +7 -0
- package/dist/cli.d.ts.map +1 -0
- package/dist/cli.js +246 -0
- package/dist/cli.js.map +1 -0
- package/dist/commands.d.ts +46 -0
- package/dist/commands.d.ts.map +1 -0
- package/dist/commands.js +256 -0
- package/dist/commands.js.map +1 -0
- package/dist/diff.d.ts +23 -0
- package/dist/diff.d.ts.map +1 -0
- package/dist/diff.js +106 -0
- package/dist/diff.js.map +1 -0
- package/dist/github.d.ts.map +1 -0
- package/dist/github.js.map +1 -0
- package/dist/index-cache.d.ts +35 -0
- package/dist/index-cache.d.ts.map +1 -0
- package/dist/index-cache.js +114 -0
- package/dist/index-cache.js.map +1 -0
- package/dist/index.d.ts +15 -0
- package/dist/index.d.ts.map +1 -0
- package/dist/index.js +25 -0
- package/dist/index.js.map +1 -0
- package/dist/storage.d.ts +45 -0
- package/dist/storage.d.ts.map +1 -0
- package/dist/storage.js +151 -0
- package/dist/storage.js.map +1 -0
- package/dist/sync.d.ts +60 -0
- package/dist/sync.js +184 -0
- package/dist/tags.d.ts +85 -0
- package/dist/tags.d.ts.map +1 -0
- package/dist/tags.js +219 -0
- package/dist/tags.js.map +1 -0
- package/dist/types.d.ts +102 -0
- package/dist/types.d.ts.map +1 -0
- package/dist/types.js +6 -0
- package/dist/types.js.map +1 -0
- package/docs/.nojekyll +0 -0
- package/docs/README.md +73 -0
- package/docs/_config.yml +2 -0
- package/docs/index.html +960 -0
- package/docs-website/package.json +20 -0
- package/jest.config.js +21 -0
- package/package.json +50 -0
- package/scripts/init.ts +290 -0
- package/src/agent-audit.ts +270 -0
- package/src/agent-checkout.ts +227 -0
- package/src/agent-coordination.ts +318 -0
- package/src/async-queue.ts +203 -0
- package/src/auto-branching.ts +279 -0
- package/src/auto-commit.ts +166 -0
- package/src/cherry-pick.ts +252 -0
- package/src/chunked-upload.ts +224 -0
- package/src/cli-v2.ts +335 -0
- package/src/cli.ts +318 -0
- package/src/cliff-detection.ts +232 -0
- package/src/commands.ts +267 -0
- package/src/commit-hash-system.ts +351 -0
- package/src/compression.ts +176 -0
- package/src/conflict-resolution-ui.ts +277 -0
- package/src/conflict-visualization.ts +238 -0
- package/src/diff-formatter.ts +184 -0
- package/src/diff.ts +124 -0
- package/src/distributed-coordination.ts +273 -0
- package/src/git-interop.ts +316 -0
- package/src/index-cache.ts +88 -0
- package/src/index.ts +38 -0
- package/src/merge-engine.ts +143 -0
- package/src/message-search.ts +370 -0
- package/src/performance-monitoring.ts +236 -0
- package/src/rebase.ts +327 -0
- package/src/rollback.ts +215 -0
- package/src/semantic-grouping.ts +245 -0
- package/src/stage-area.ts +324 -0
- package/src/stash.ts +278 -0
- package/src/storage.ts +131 -0
- package/src/sync.ts +205 -0
- package/src/tags.ts +244 -0
- package/src/types.ts +119 -0
- package/src/webhooks.ts +119 -0
- package/src/workspace-isolation.ts +298 -0
- package/tests/auto-commit.test.ts +308 -0
- package/tests/checkout.test.ts +136 -0
- package/tests/commit.test.ts +118 -0
- package/tests/diff.test.ts +191 -0
- package/tests/github.test.ts +94 -0
- package/tests/integration.test.ts +267 -0
- package/tests/log.test.ts +125 -0
- package/tests/phase2-integration.test.ts +370 -0
- package/tests/storage.test.ts +167 -0
- package/tests/tags.test.ts +477 -0
- package/tests/types.test.ts +75 -0
- package/tests/v1.1/agent-audit.test.ts +472 -0
- package/tests/v1.1/agent-coordination.test.ts +308 -0
- package/tests/v1.1/async-queue.test.ts +253 -0
- package/tests/v1.1/comprehensive.test.ts +521 -0
- package/tests/v1.1/diff-formatter.test.ts +238 -0
- package/tests/v1.1/integration.test.ts +389 -0
- package/tests/v1.1/onboarding.test.ts +365 -0
- package/tests/v1.1/rollback.test.ts +370 -0
- package/tests/v1.1/semantic-grouping.test.ts +230 -0
- package/tests/v1.2/chunked-upload.test.ts +301 -0
- package/tests/v1.2/cliff-detection.test.ts +272 -0
- package/tests/v1.2/commit-hash-system.test.ts +288 -0
- package/tests/v1.2/compression.test.ts +220 -0
- package/tests/v1.2/conflict-visualization.test.ts +263 -0
- package/tests/v1.2/distributed.test.ts +261 -0
- package/tests/v1.2/performance-monitoring.test.ts +328 -0
- package/tests/v1.3/auto-branching.test.ts +270 -0
- package/tests/v1.3/message-search.test.ts +264 -0
- package/tests/v1.3/stage-area.test.ts +330 -0
- package/tests/v1.3/stash-rebase-cherry-pick.test.ts +361 -0
- package/tests/v1.4/cli.test.ts +171 -0
- package/tests/v1.4/conflict-resolution-advanced.test.ts +429 -0
- package/tests/v1.4/conflict-resolution-ui.test.ts +286 -0
- package/tests/v1.4/workspace-isolation-advanced.test.ts +382 -0
- package/tests/v1.4/workspace-isolation.test.ts +268 -0
- package/tests/v1.5/agent-coordination.real.test.ts +401 -0
- package/tests/v1.5/cli-v2.test.ts +354 -0
- package/tests/v1.5/git-interop.real.test.ts +358 -0
- package/tests/v1.5/integration-testing.real.test.ts +440 -0
- package/tsconfig.json +26 -0
|
@@ -0,0 +1,236 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Performance Monitoring for Trace
|
|
3
|
+
* Track operation times and identify bottlenecks
|
|
4
|
+
*/
|
|
5
|
+
|
|
6
|
+
export interface PerformanceMetric {
|
|
7
|
+
operation: string;
|
|
8
|
+
duration: number; // milliseconds
|
|
9
|
+
timestamp: number;
|
|
10
|
+
metadata?: Record<string, unknown>;
|
|
11
|
+
status: 'success' | 'failure';
|
|
12
|
+
}
|
|
13
|
+
|
|
14
|
+
export interface PerformanceStats {
|
|
15
|
+
operation: string;
|
|
16
|
+
count: number;
|
|
17
|
+
avgDuration: number;
|
|
18
|
+
minDuration: number;
|
|
19
|
+
maxDuration: number;
|
|
20
|
+
p95Duration: number;
|
|
21
|
+
p99Duration: number;
|
|
22
|
+
failureRate: number;
|
|
23
|
+
}
|
|
24
|
+
|
|
25
|
+
export interface PerformanceReport {
|
|
26
|
+
timestamp: number;
|
|
27
|
+
totalOperations: number;
|
|
28
|
+
averageLatency: number;
|
|
29
|
+
bottlenecks: PerformanceStats[];
|
|
30
|
+
recommendations: string[];
|
|
31
|
+
}
|
|
32
|
+
|
|
33
|
+
export class PerformanceMonitoring {
|
|
34
|
+
private metrics: PerformanceMetric[] = [];
|
|
35
|
+
private operationTimers: Map<string, number> = new Map();
|
|
36
|
+
|
|
37
|
+
/**
|
|
38
|
+
* Start tracking operation
|
|
39
|
+
*/
|
|
40
|
+
startOperation(operationId: string): void {
|
|
41
|
+
this.operationTimers.set(operationId, Date.now());
|
|
42
|
+
}
|
|
43
|
+
|
|
44
|
+
/**
|
|
45
|
+
* End tracking operation
|
|
46
|
+
*/
|
|
47
|
+
endOperation(
|
|
48
|
+
operationId: string,
|
|
49
|
+
operation: string,
|
|
50
|
+
status: 'success' | 'failure' = 'success',
|
|
51
|
+
metadata?: Record<string, unknown>
|
|
52
|
+
): PerformanceMetric {
|
|
53
|
+
const startTime = this.operationTimers.get(operationId);
|
|
54
|
+
if (!startTime) {
|
|
55
|
+
throw new Error(`No timer found for operation ${operationId}`);
|
|
56
|
+
}
|
|
57
|
+
|
|
58
|
+
const duration = Date.now() - startTime;
|
|
59
|
+
const metric: PerformanceMetric = {
|
|
60
|
+
operation,
|
|
61
|
+
duration,
|
|
62
|
+
timestamp: Date.now(),
|
|
63
|
+
metadata,
|
|
64
|
+
status,
|
|
65
|
+
};
|
|
66
|
+
|
|
67
|
+
this.metrics.push(metric);
|
|
68
|
+
this.operationTimers.delete(operationId);
|
|
69
|
+
|
|
70
|
+
return metric;
|
|
71
|
+
}
|
|
72
|
+
|
|
73
|
+
/**
|
|
74
|
+
* Get statistics for operation
|
|
75
|
+
*/
|
|
76
|
+
getStats(operation: string): PerformanceStats {
|
|
77
|
+
const filtered = this.metrics.filter(m => m.operation === operation);
|
|
78
|
+
|
|
79
|
+
if (filtered.length === 0) {
|
|
80
|
+
return {
|
|
81
|
+
operation,
|
|
82
|
+
count: 0,
|
|
83
|
+
avgDuration: 0,
|
|
84
|
+
minDuration: 0,
|
|
85
|
+
maxDuration: 0,
|
|
86
|
+
p95Duration: 0,
|
|
87
|
+
p99Duration: 0,
|
|
88
|
+
failureRate: 0,
|
|
89
|
+
};
|
|
90
|
+
}
|
|
91
|
+
|
|
92
|
+
const durations = filtered.map(m => m.duration).sort((a, b) => a - b);
|
|
93
|
+
const failures = filtered.filter(m => m.status === 'failure').length;
|
|
94
|
+
|
|
95
|
+
return {
|
|
96
|
+
operation,
|
|
97
|
+
count: filtered.length,
|
|
98
|
+
avgDuration: durations.reduce((a, b) => a + b, 0) / durations.length,
|
|
99
|
+
minDuration: durations[0],
|
|
100
|
+
maxDuration: durations[durations.length - 1],
|
|
101
|
+
p95Duration: durations[Math.floor(durations.length * 0.95)],
|
|
102
|
+
p99Duration: durations[Math.floor(durations.length * 0.99)],
|
|
103
|
+
failureRate: failures / filtered.length,
|
|
104
|
+
};
|
|
105
|
+
}
|
|
106
|
+
|
|
107
|
+
/**
|
|
108
|
+
* Get all operation stats
|
|
109
|
+
*/
|
|
110
|
+
getAllStats(): PerformanceStats[] {
|
|
111
|
+
const operations = new Set(this.metrics.map(m => m.operation));
|
|
112
|
+
return Array.from(operations).map(op => this.getStats(op));
|
|
113
|
+
}
|
|
114
|
+
|
|
115
|
+
/**
|
|
116
|
+
* Find bottlenecks (operations slower than baseline)
|
|
117
|
+
*/
|
|
118
|
+
findBottlenecks(baselineMs: number = 100): PerformanceStats[] {
|
|
119
|
+
return this.getAllStats().filter(stats => stats.avgDuration > baselineMs);
|
|
120
|
+
}
|
|
121
|
+
|
|
122
|
+
/**
|
|
123
|
+
* Generate performance report
|
|
124
|
+
*/
|
|
125
|
+
generateReport(): PerformanceReport {
|
|
126
|
+
const allStats = this.getAllStats();
|
|
127
|
+
const bottlenecks = this.findBottlenecks();
|
|
128
|
+
const recommendations: string[] = [];
|
|
129
|
+
|
|
130
|
+
// Calculate average latency
|
|
131
|
+
const avgLatency = allStats.length > 0
|
|
132
|
+
? allStats.reduce((sum, s) => sum + s.avgDuration, 0) / allStats.length
|
|
133
|
+
: 0;
|
|
134
|
+
|
|
135
|
+
// Generate recommendations
|
|
136
|
+
for (const bottleneck of bottlenecks) {
|
|
137
|
+
if (bottleneck.p99Duration > 500) {
|
|
138
|
+
recommendations.push(
|
|
139
|
+
`${bottleneck.operation}: P99 latency ${bottleneck.p99Duration}ms. Consider caching or optimization.`
|
|
140
|
+
);
|
|
141
|
+
}
|
|
142
|
+
|
|
143
|
+
if (bottleneck.failureRate > 0.1) {
|
|
144
|
+
recommendations.push(
|
|
145
|
+
`${bottleneck.operation}: ${(bottleneck.failureRate * 100).toFixed(1)}% failure rate. Investigate error handling.`
|
|
146
|
+
);
|
|
147
|
+
}
|
|
148
|
+
|
|
149
|
+
if (bottleneck.maxDuration > bottleneck.avgDuration * 3) {
|
|
150
|
+
recommendations.push(
|
|
151
|
+
`${bottleneck.operation}: High variance (${bottleneck.maxDuration}ms max). Check for edge cases.`
|
|
152
|
+
);
|
|
153
|
+
}
|
|
154
|
+
}
|
|
155
|
+
|
|
156
|
+
return {
|
|
157
|
+
timestamp: Date.now(),
|
|
158
|
+
totalOperations: this.metrics.length,
|
|
159
|
+
averageLatency: avgLatency,
|
|
160
|
+
bottlenecks: bottlenecks.sort((a, b) => b.avgDuration - a.avgDuration),
|
|
161
|
+
recommendations,
|
|
162
|
+
};
|
|
163
|
+
}
|
|
164
|
+
|
|
165
|
+
/**
|
|
166
|
+
* Format report for display
|
|
167
|
+
*/
|
|
168
|
+
static formatReport(report: PerformanceReport): string {
|
|
169
|
+
let output = `\n📊 PERFORMANCE REPORT\n`;
|
|
170
|
+
output += `Generated: ${new Date(report.timestamp).toISOString()}\n`;
|
|
171
|
+
output += `Total Operations: ${report.totalOperations}\n`;
|
|
172
|
+
output += `Average Latency: ${report.averageLatency.toFixed(2)}ms\n\n`;
|
|
173
|
+
|
|
174
|
+
if (report.bottlenecks.length === 0) {
|
|
175
|
+
output += `✅ All operations performing within baseline. No bottlenecks detected.\n`;
|
|
176
|
+
return output;
|
|
177
|
+
}
|
|
178
|
+
|
|
179
|
+
output += `⚠️ ${report.bottlenecks.length} BOTTLENECK(S) DETECTED:\n\n`;
|
|
180
|
+
|
|
181
|
+
for (let i = 0; i < report.bottlenecks.length; i++) {
|
|
182
|
+
const b = report.bottlenecks[i];
|
|
183
|
+
output += `${i + 1}. ${b.operation}\n`;
|
|
184
|
+
output += ` Avg: ${b.avgDuration.toFixed(2)}ms | Min: ${b.minDuration.toFixed(2)}ms | Max: ${b.maxDuration.toFixed(2)}ms\n`;
|
|
185
|
+
output += ` P95: ${b.p95Duration.toFixed(2)}ms | P99: ${b.p99Duration.toFixed(2)}ms\n`;
|
|
186
|
+
output += ` Failure Rate: ${(b.failureRate * 100).toFixed(1)}%\n`;
|
|
187
|
+
output += ` Operations: ${b.count}\n\n`;
|
|
188
|
+
}
|
|
189
|
+
|
|
190
|
+
if (report.recommendations.length > 0) {
|
|
191
|
+
output += `💡 RECOMMENDATIONS:\n`;
|
|
192
|
+
for (const rec of report.recommendations) {
|
|
193
|
+
output += `- ${rec}\n`;
|
|
194
|
+
}
|
|
195
|
+
}
|
|
196
|
+
|
|
197
|
+
return output;
|
|
198
|
+
}
|
|
199
|
+
|
|
200
|
+
/**
|
|
201
|
+
* Clear metrics (for testing or resetting)
|
|
202
|
+
*/
|
|
203
|
+
clearMetrics(): void {
|
|
204
|
+
this.metrics = [];
|
|
205
|
+
this.operationTimers.clear();
|
|
206
|
+
}
|
|
207
|
+
|
|
208
|
+
/**
|
|
209
|
+
* Get metrics summary
|
|
210
|
+
*/
|
|
211
|
+
getSummary(): {
|
|
212
|
+
totalMetrics: number;
|
|
213
|
+
uniqueOperations: number;
|
|
214
|
+
timeRange: { start: number; end: number };
|
|
215
|
+
} {
|
|
216
|
+
if (this.metrics.length === 0) {
|
|
217
|
+
return {
|
|
218
|
+
totalMetrics: 0,
|
|
219
|
+
uniqueOperations: 0,
|
|
220
|
+
timeRange: { start: 0, end: 0 },
|
|
221
|
+
};
|
|
222
|
+
}
|
|
223
|
+
|
|
224
|
+
const timestamps = this.metrics.map(m => m.timestamp);
|
|
225
|
+
return {
|
|
226
|
+
totalMetrics: this.metrics.length,
|
|
227
|
+
uniqueOperations: new Set(this.metrics.map(m => m.operation)).size,
|
|
228
|
+
timeRange: {
|
|
229
|
+
start: Math.min(...timestamps),
|
|
230
|
+
end: Math.max(...timestamps),
|
|
231
|
+
},
|
|
232
|
+
};
|
|
233
|
+
}
|
|
234
|
+
}
|
|
235
|
+
|
|
236
|
+
export default PerformanceMonitoring;
|
package/src/rebase.ts
ADDED
|
@@ -0,0 +1,327 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Rebase for Trace
|
|
3
|
+
* Atomic history rewriting for clean linear history
|
|
4
|
+
*/
|
|
5
|
+
|
|
6
|
+
export interface RebaseOperation {
|
|
7
|
+
fromCommit: string;
|
|
8
|
+
toCommit: string;
|
|
9
|
+
newParent: string;
|
|
10
|
+
operationTime: number;
|
|
11
|
+
conflicts: boolean;
|
|
12
|
+
}
|
|
13
|
+
|
|
14
|
+
export interface RebaseConflict {
|
|
15
|
+
commitHash: string;
|
|
16
|
+
message: string;
|
|
17
|
+
conflictingFiles: string[];
|
|
18
|
+
}
|
|
19
|
+
|
|
20
|
+
export class Rebase {
|
|
21
|
+
private operations: RebaseOperation[] = [];
|
|
22
|
+
private conflictLog: RebaseConflict[] = [];
|
|
23
|
+
|
|
24
|
+
/**
|
|
25
|
+
* Rebase branch onto new parent (interactive rebase)
|
|
26
|
+
*/
|
|
27
|
+
rebase(
|
|
28
|
+
commits: Map<string, any>, // hash -> commit object
|
|
29
|
+
fromHash: string,
|
|
30
|
+
toHash: string,
|
|
31
|
+
newParentHash: string
|
|
32
|
+
): { success: boolean; newHashes: Map<string, string>; conflicts: RebaseConflict[] } {
|
|
33
|
+
const newHashes = new Map<string, string>();
|
|
34
|
+
const conflicts: RebaseConflict[] = [];
|
|
35
|
+
|
|
36
|
+
// Get list of commits to rebase
|
|
37
|
+
const commitsToRebase = this.getCommitsInRange(commits, fromHash, toHash);
|
|
38
|
+
|
|
39
|
+
if (commitsToRebase.length === 0) {
|
|
40
|
+
return { success: true, newHashes, conflicts };
|
|
41
|
+
}
|
|
42
|
+
|
|
43
|
+
// Rebase each commit
|
|
44
|
+
let currentParent = newParentHash;
|
|
45
|
+
|
|
46
|
+
for (const commit of commitsToRebase) {
|
|
47
|
+
// In real implementation, would:
|
|
48
|
+
// 1. Apply commit changes
|
|
49
|
+
// 2. Check for merge conflicts
|
|
50
|
+
// 3. Create new commit with new parent
|
|
51
|
+
// 4. Update author/timestamp
|
|
52
|
+
|
|
53
|
+
// Simulate conflict detection
|
|
54
|
+
const hasConflict = Math.random() < 0.1; // 10% conflict rate for testing
|
|
55
|
+
|
|
56
|
+
if (hasConflict) {
|
|
57
|
+
conflicts.push({
|
|
58
|
+
commitHash: commit.hash,
|
|
59
|
+
message: commit.message,
|
|
60
|
+
conflictingFiles: ['src/file.ts'], // Would detect actual conflicts
|
|
61
|
+
});
|
|
62
|
+
}
|
|
63
|
+
|
|
64
|
+
// Generate new hash for rebased commit
|
|
65
|
+
const newHash = this.generateNewHash(commit, currentParent);
|
|
66
|
+
newHashes.set(commit.hash, newHash);
|
|
67
|
+
currentParent = newHash;
|
|
68
|
+
}
|
|
69
|
+
|
|
70
|
+
const operation: RebaseOperation = {
|
|
71
|
+
fromCommit: fromHash,
|
|
72
|
+
toCommit: toHash,
|
|
73
|
+
newParent: newParentHash,
|
|
74
|
+
operationTime: Date.now(),
|
|
75
|
+
conflicts: conflicts.length > 0,
|
|
76
|
+
};
|
|
77
|
+
|
|
78
|
+
this.operations.push(operation);
|
|
79
|
+
this.conflictLog.push(...conflicts);
|
|
80
|
+
|
|
81
|
+
return {
|
|
82
|
+
success: conflicts.length === 0,
|
|
83
|
+
newHashes,
|
|
84
|
+
conflicts,
|
|
85
|
+
};
|
|
86
|
+
}
|
|
87
|
+
|
|
88
|
+
/**
|
|
89
|
+
* Squash commits into one
|
|
90
|
+
*/
|
|
91
|
+
squash(
|
|
92
|
+
commits: Map<string, any>,
|
|
93
|
+
fromHash: string,
|
|
94
|
+
toHash: string,
|
|
95
|
+
newMessage: string
|
|
96
|
+
): {
|
|
97
|
+
success: boolean;
|
|
98
|
+
newHash: string;
|
|
99
|
+
squashedCount: number;
|
|
100
|
+
} {
|
|
101
|
+
const toSquash = this.getCommitsInRange(commits, fromHash, toHash);
|
|
102
|
+
|
|
103
|
+
if (toSquash.length === 0) {
|
|
104
|
+
return { success: false, newHash: '', squashedCount: 0 };
|
|
105
|
+
}
|
|
106
|
+
|
|
107
|
+
// Combine all files from commits
|
|
108
|
+
const combinedFiles = new Map<string, string>();
|
|
109
|
+
const baseCommit = commits.get(fromHash);
|
|
110
|
+
|
|
111
|
+
for (const commit of toSquash) {
|
|
112
|
+
const commitData = commits.get(commit.hash);
|
|
113
|
+
if (commitData?.files) {
|
|
114
|
+
for (const [path, content] of commitData.files) {
|
|
115
|
+
combinedFiles.set(path, content);
|
|
116
|
+
}
|
|
117
|
+
}
|
|
118
|
+
}
|
|
119
|
+
|
|
120
|
+
// Create new squashed commit
|
|
121
|
+
const newHash = this.generateSquashedHash(newMessage, combinedFiles);
|
|
122
|
+
|
|
123
|
+
const operation: RebaseOperation = {
|
|
124
|
+
fromCommit: fromHash,
|
|
125
|
+
toCommit: toHash,
|
|
126
|
+
newParent: baseCommit?.parent || '',
|
|
127
|
+
operationTime: Date.now(),
|
|
128
|
+
conflicts: false,
|
|
129
|
+
};
|
|
130
|
+
|
|
131
|
+
this.operations.push(operation);
|
|
132
|
+
|
|
133
|
+
return {
|
|
134
|
+
success: true,
|
|
135
|
+
newHash,
|
|
136
|
+
squashedCount: toSquash.length,
|
|
137
|
+
};
|
|
138
|
+
}
|
|
139
|
+
|
|
140
|
+
/**
|
|
141
|
+
* Autosquash (combine fixup! commits)
|
|
142
|
+
*/
|
|
143
|
+
autosquash(commits: Map<string, any>): {
|
|
144
|
+
success: boolean;
|
|
145
|
+
squashedGroups: number;
|
|
146
|
+
} {
|
|
147
|
+
const commitList = Array.from(commits.values());
|
|
148
|
+
let squashedGroups = 0;
|
|
149
|
+
|
|
150
|
+
// Find fixup! and squash! commits
|
|
151
|
+
const fixups = new Map<string, string[]>(); // target -> [fixup hashes]
|
|
152
|
+
|
|
153
|
+
for (const commit of commitList) {
|
|
154
|
+
if (commit.message.startsWith('fixup!')) {
|
|
155
|
+
const targetMsg = commit.message.replace('fixup! ', '');
|
|
156
|
+
const target = commitList.find(c => c.message.startsWith(targetMsg));
|
|
157
|
+
|
|
158
|
+
if (target) {
|
|
159
|
+
if (!fixups.has(target.hash)) {
|
|
160
|
+
fixups.set(target.hash, []);
|
|
161
|
+
}
|
|
162
|
+
fixups.get(target.hash)!.push(commit.hash);
|
|
163
|
+
}
|
|
164
|
+
}
|
|
165
|
+
}
|
|
166
|
+
|
|
167
|
+
// Apply autosquash
|
|
168
|
+
for (const [targetHash, fixupHashes] of fixups) {
|
|
169
|
+
if (fixupHashes.length > 0) {
|
|
170
|
+
this.squash(commits, targetHash, fixupHashes[fixupHashes.length - 1], '');
|
|
171
|
+
squashedGroups += fixupHashes.length;
|
|
172
|
+
}
|
|
173
|
+
}
|
|
174
|
+
|
|
175
|
+
return { success: true, squashedGroups };
|
|
176
|
+
}
|
|
177
|
+
|
|
178
|
+
/**
|
|
179
|
+
* Reorder commits (interactive)
|
|
180
|
+
*/
|
|
181
|
+
reorder(
|
|
182
|
+
commits: Map<string, any>,
|
|
183
|
+
fromHash: string,
|
|
184
|
+
order: string[] // ordered commit hashes
|
|
185
|
+
): boolean {
|
|
186
|
+
if (order.length === 0) {
|
|
187
|
+
return false;
|
|
188
|
+
}
|
|
189
|
+
|
|
190
|
+
// Validate all commits exist
|
|
191
|
+
for (const hash of order) {
|
|
192
|
+
if (!commits.has(hash)) {
|
|
193
|
+
return false;
|
|
194
|
+
}
|
|
195
|
+
}
|
|
196
|
+
|
|
197
|
+
// In real implementation, would reorder and rebase
|
|
198
|
+
const operation: RebaseOperation = {
|
|
199
|
+
fromCommit: fromHash,
|
|
200
|
+
toCommit: order[order.length - 1],
|
|
201
|
+
newParent: commits.get(fromHash)?.parent || '',
|
|
202
|
+
operationTime: Date.now(),
|
|
203
|
+
conflicts: false,
|
|
204
|
+
};
|
|
205
|
+
|
|
206
|
+
this.operations.push(operation);
|
|
207
|
+
return true;
|
|
208
|
+
}
|
|
209
|
+
|
|
210
|
+
/**
|
|
211
|
+
* Get commits in range (from ancestor to descendant)
|
|
212
|
+
*/
|
|
213
|
+
private getCommitsInRange(
|
|
214
|
+
commits: Map<string, any>,
|
|
215
|
+
fromHash: string,
|
|
216
|
+
toHash: string
|
|
217
|
+
): any[] {
|
|
218
|
+
const result: any[] = [];
|
|
219
|
+
let current = commits.get(toHash);
|
|
220
|
+
|
|
221
|
+
while (current && current.hash !== fromHash) {
|
|
222
|
+
result.push(current);
|
|
223
|
+
current = current.parent ? commits.get(current.parent) : null;
|
|
224
|
+
}
|
|
225
|
+
|
|
226
|
+
if (current && current.hash === fromHash) {
|
|
227
|
+
result.push(current);
|
|
228
|
+
}
|
|
229
|
+
|
|
230
|
+
return result.reverse();
|
|
231
|
+
}
|
|
232
|
+
|
|
233
|
+
/**
|
|
234
|
+
* Generate new hash for rebased commit
|
|
235
|
+
*/
|
|
236
|
+
private generateNewHash(commit: any, newParent: string): string {
|
|
237
|
+
const content = [commit.message, newParent, JSON.stringify(commit.files)].join('|');
|
|
238
|
+
const hash = require('crypto')
|
|
239
|
+
.createHash('sha256')
|
|
240
|
+
.update(content)
|
|
241
|
+
.digest('hex')
|
|
242
|
+
.substring(0, 16);
|
|
243
|
+
return hash;
|
|
244
|
+
}
|
|
245
|
+
|
|
246
|
+
/**
|
|
247
|
+
* Generate hash for squashed commit
|
|
248
|
+
*/
|
|
249
|
+
private generateSquashedHash(message: string, files: Map<string, string>): string {
|
|
250
|
+
const fileStr = Array.from(files.entries())
|
|
251
|
+
.map(([path, content]) => `${path}:${content}`)
|
|
252
|
+
.join('|');
|
|
253
|
+
const content = [message, fileStr].join('|');
|
|
254
|
+
const hash = require('crypto')
|
|
255
|
+
.createHash('sha256')
|
|
256
|
+
.update(content)
|
|
257
|
+
.digest('hex')
|
|
258
|
+
.substring(0, 16);
|
|
259
|
+
return hash;
|
|
260
|
+
}
|
|
261
|
+
|
|
262
|
+
/**
|
|
263
|
+
* Get rebase operations history
|
|
264
|
+
*/
|
|
265
|
+
getOperations(): RebaseOperation[] {
|
|
266
|
+
return [...this.operations];
|
|
267
|
+
}
|
|
268
|
+
|
|
269
|
+
/**
|
|
270
|
+
* Get conflict log
|
|
271
|
+
*/
|
|
272
|
+
getConflictLog(): RebaseConflict[] {
|
|
273
|
+
return [...this.conflictLog];
|
|
274
|
+
}
|
|
275
|
+
|
|
276
|
+
/**
|
|
277
|
+
* Abort rebase (rollback)
|
|
278
|
+
*/
|
|
279
|
+
abort(): boolean {
|
|
280
|
+
if (this.operations.length === 0) {
|
|
281
|
+
return false;
|
|
282
|
+
}
|
|
283
|
+
|
|
284
|
+
// Remove last operation
|
|
285
|
+
this.operations.pop();
|
|
286
|
+
return true;
|
|
287
|
+
}
|
|
288
|
+
|
|
289
|
+
/**
|
|
290
|
+
* Continue after conflict resolution
|
|
291
|
+
*/
|
|
292
|
+
continueAfterResolution(): boolean {
|
|
293
|
+
// In real implementation, would resume rebase after conflicts are fixed
|
|
294
|
+
return true;
|
|
295
|
+
}
|
|
296
|
+
|
|
297
|
+
/**
|
|
298
|
+
* Get stats
|
|
299
|
+
*/
|
|
300
|
+
getStats(): {
|
|
301
|
+
totalRebases: number;
|
|
302
|
+
totalConflicts: number;
|
|
303
|
+
averageCommitsPerRebase: number;
|
|
304
|
+
} {
|
|
305
|
+
return {
|
|
306
|
+
totalRebases: this.operations.length,
|
|
307
|
+
totalConflicts: this.conflictLog.length,
|
|
308
|
+
averageCommitsPerRebase:
|
|
309
|
+
this.operations.length > 0
|
|
310
|
+
? Math.round(
|
|
311
|
+
this.operations.reduce((sum, op) => sum + (op.conflicts ? 1 : 0), 0) /
|
|
312
|
+
this.operations.length
|
|
313
|
+
)
|
|
314
|
+
: 0,
|
|
315
|
+
};
|
|
316
|
+
}
|
|
317
|
+
|
|
318
|
+
/**
|
|
319
|
+
* Clear history (for testing)
|
|
320
|
+
*/
|
|
321
|
+
clearHistory(): void {
|
|
322
|
+
this.operations = [];
|
|
323
|
+
this.conflictLog = [];
|
|
324
|
+
}
|
|
325
|
+
}
|
|
326
|
+
|
|
327
|
+
export default Rebase;
|