@exaudeus/workrail 3.11.2 → 3.13.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/console/assets/index-DW78t31j.css +1 -0
- package/dist/console/assets/index-EsSXrC_a.js +28 -0
- package/dist/console/index.html +2 -2
- package/dist/di/container.js +8 -0
- package/dist/di/tokens.d.ts +1 -0
- package/dist/di/tokens.js +1 -0
- package/dist/infrastructure/session/HttpServer.js +2 -14
- package/dist/manifest.json +139 -91
- package/dist/mcp/boundary-coercion.d.ts +2 -0
- package/dist/mcp/boundary-coercion.js +73 -0
- package/dist/mcp/handler-factory.d.ts +1 -1
- package/dist/mcp/handler-factory.js +13 -6
- package/dist/mcp/handlers/shared/request-workflow-reader.d.ts +10 -2
- package/dist/mcp/handlers/shared/request-workflow-reader.js +27 -10
- package/dist/mcp/handlers/shared/workflow-source-visibility.d.ts +3 -1
- package/dist/mcp/handlers/shared/workflow-source-visibility.js +7 -3
- package/dist/mcp/handlers/v2-execution/replay.js +25 -1
- package/dist/mcp/handlers/v2-execution/start.js +23 -17
- package/dist/mcp/handlers/v2-manage-workflow-source.d.ts +7 -0
- package/dist/mcp/handlers/v2-manage-workflow-source.js +50 -0
- package/dist/mcp/handlers/v2-workflow.js +123 -8
- package/dist/mcp/output-schemas.d.ts +393 -0
- package/dist/mcp/output-schemas.js +49 -1
- package/dist/mcp/server.js +2 -0
- package/dist/mcp/tool-descriptions.js +20 -0
- package/dist/mcp/tools.js +6 -0
- package/dist/mcp/types/tool-description-types.d.ts +1 -1
- package/dist/mcp/types/tool-description-types.js +1 -0
- package/dist/mcp/types/workflow-tool-edition.d.ts +1 -1
- package/dist/mcp/types.d.ts +2 -0
- package/dist/mcp/v2/tool-registry.js +8 -0
- package/dist/mcp/v2/tools.d.ts +15 -0
- package/dist/mcp/v2/tools.js +8 -1
- package/dist/v2/durable-core/constants.d.ts +1 -0
- package/dist/v2/durable-core/constants.js +2 -1
- package/dist/v2/durable-core/domain/observation-builder.d.ts +4 -1
- package/dist/v2/durable-core/domain/observation-builder.js +9 -0
- package/dist/v2/durable-core/schemas/export-bundle/index.d.ts +76 -16
- package/dist/v2/durable-core/schemas/session/events.d.ts +26 -5
- package/dist/v2/durable-core/schemas/session/events.js +2 -1
- package/dist/v2/infra/in-memory/managed-source-store/index.d.ts +8 -0
- package/dist/v2/infra/in-memory/managed-source-store/index.js +33 -0
- package/dist/v2/infra/local/data-dir/index.d.ts +2 -0
- package/dist/v2/infra/local/data-dir/index.js +6 -0
- package/dist/v2/infra/local/managed-source-store/index.d.ts +15 -0
- package/dist/v2/infra/local/managed-source-store/index.js +164 -0
- package/dist/v2/infra/local/session-summary-provider/index.js +2 -0
- package/dist/v2/infra/local/workspace-anchor/index.js +1 -0
- package/dist/v2/ports/data-dir.port.d.ts +2 -0
- package/dist/v2/ports/managed-source-store.port.d.ts +25 -0
- package/dist/v2/ports/managed-source-store.port.js +2 -0
- package/dist/v2/ports/workspace-anchor.port.d.ts +3 -0
- package/dist/v2/projections/resume-ranking.d.ts +1 -0
- package/dist/v2/usecases/console-routes.js +26 -0
- package/dist/v2/usecases/console-service.js +25 -6
- package/dist/v2/usecases/console-types.d.ts +22 -1
- package/dist/v2/usecases/worktree-service.d.ts +10 -0
- package/dist/v2/usecases/worktree-service.js +136 -0
- package/package.json +1 -1
- package/workflows/adaptive-ticket-creation.json +276 -282
- package/workflows/architecture-scalability-audit.json +317 -0
- package/workflows/document-creation-workflow.json +70 -191
- package/workflows/documentation-update-workflow.json +59 -309
- package/workflows/intelligent-test-case-generation.json +37 -212
- package/workflows/personal-learning-materials-creation-branched.json +1 -21
- package/workflows/presentation-creation.json +143 -308
- package/workflows/relocation-workflow-us.json +161 -535
- package/workflows/routines/tension-driven-design.json +5 -5
- package/workflows/scoped-documentation-workflow.json +110 -181
- package/workflows/workflow-for-workflows.v2.json +21 -5
- package/dist/console/assets/index-C5C4nDs4.css +0 -1
- package/dist/console/assets/index-CSUqsoQl.js +0 -28
- package/workflows/CHANGELOG-bug-investigation.md +0 -298
- package/workflows/bug-investigation.agentic.json +0 -212
- package/workflows/bug-investigation.json +0 -112
- package/workflows/mr-review-workflow.agentic.json +0 -538
- package/workflows/mr-review-workflow.json +0 -277
|
@@ -0,0 +1,164 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
3
|
+
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
4
|
+
};
|
|
5
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
6
|
+
exports.LocalManagedSourceStoreV2 = void 0;
|
|
7
|
+
const path_1 = __importDefault(require("path"));
|
|
8
|
+
const zod_1 = require("zod");
|
|
9
|
+
const neverthrow_1 = require("neverthrow");
|
|
10
|
+
const MANAGED_SOURCE_LOCK_RETRY_MS = 250;
|
|
11
|
+
const jcs_js_1 = require("../../../durable-core/canonical/jcs.js");
|
|
12
|
+
const ManagedSourceRecordSchema = zod_1.z.object({
|
|
13
|
+
path: zod_1.z.string(),
|
|
14
|
+
addedAtMs: zod_1.z.number().int().nonnegative(),
|
|
15
|
+
});
|
|
16
|
+
const ManagedSourcesFileSchema = zod_1.z.object({
|
|
17
|
+
v: zod_1.z.literal(1),
|
|
18
|
+
sources: zod_1.z.array(ManagedSourceRecordSchema),
|
|
19
|
+
});
|
|
20
|
+
function mapFsToManagedSourceError(e) {
|
|
21
|
+
if (e.code === 'FS_ALREADY_EXISTS') {
|
|
22
|
+
return {
|
|
23
|
+
code: 'MANAGED_SOURCE_BUSY',
|
|
24
|
+
message: 'Managed sources are being updated by another WorkRail process.',
|
|
25
|
+
retry: { kind: 'retryable_after_ms', afterMs: MANAGED_SOURCE_LOCK_RETRY_MS },
|
|
26
|
+
lockPath: 'managed-sources.lock',
|
|
27
|
+
};
|
|
28
|
+
}
|
|
29
|
+
return { code: 'MANAGED_SOURCE_IO_ERROR', message: e.message };
|
|
30
|
+
}
|
|
31
|
+
function normalizeRecords(sources) {
|
|
32
|
+
const seen = new Set();
|
|
33
|
+
const normalized = [];
|
|
34
|
+
for (const source of sources) {
|
|
35
|
+
const normalizedPath = path_1.default.resolve(source.path);
|
|
36
|
+
if (seen.has(normalizedPath))
|
|
37
|
+
continue;
|
|
38
|
+
seen.add(normalizedPath);
|
|
39
|
+
normalized.push({ path: normalizedPath, addedAtMs: source.addedAtMs });
|
|
40
|
+
}
|
|
41
|
+
return normalized;
|
|
42
|
+
}
|
|
43
|
+
class LocalManagedSourceStoreV2 {
|
|
44
|
+
constructor(dataDir, fs) {
|
|
45
|
+
this.dataDir = dataDir;
|
|
46
|
+
this.fs = fs;
|
|
47
|
+
}
|
|
48
|
+
list() {
|
|
49
|
+
return this.readSources();
|
|
50
|
+
}
|
|
51
|
+
attach(sourcePath) {
|
|
52
|
+
const normalizedPath = path_1.default.resolve(sourcePath);
|
|
53
|
+
const nowMs = Date.now();
|
|
54
|
+
return this.withLock(() => this.readSources().andThen((sources) => {
|
|
55
|
+
const alreadyPresent = sources.some((s) => s.path === normalizedPath);
|
|
56
|
+
if (alreadyPresent)
|
|
57
|
+
return (0, neverthrow_1.okAsync)(undefined);
|
|
58
|
+
const next = [...sources, { path: normalizedPath, addedAtMs: nowMs }];
|
|
59
|
+
return this.persist(next);
|
|
60
|
+
}));
|
|
61
|
+
}
|
|
62
|
+
detach(sourcePath) {
|
|
63
|
+
const normalizedPath = path_1.default.resolve(sourcePath);
|
|
64
|
+
return this.withLock(() => this.readSources().andThen((sources) => {
|
|
65
|
+
const next = sources.filter((s) => s.path !== normalizedPath);
|
|
66
|
+
if (next.length === sources.length)
|
|
67
|
+
return (0, neverthrow_1.okAsync)(undefined);
|
|
68
|
+
return this.persist(next);
|
|
69
|
+
}));
|
|
70
|
+
}
|
|
71
|
+
readSources() {
|
|
72
|
+
const filePath = this.dataDir.managedSourcesPath();
|
|
73
|
+
return this.fs.readFileUtf8(filePath)
|
|
74
|
+
.orElse((e) => {
|
|
75
|
+
if (e.code === 'FS_NOT_FOUND')
|
|
76
|
+
return (0, neverthrow_1.okAsync)('');
|
|
77
|
+
return (0, neverthrow_1.errAsync)(mapFsToManagedSourceError(e));
|
|
78
|
+
})
|
|
79
|
+
.andThen((raw) => {
|
|
80
|
+
if (raw === '')
|
|
81
|
+
return (0, neverthrow_1.okAsync)([]);
|
|
82
|
+
let parsed;
|
|
83
|
+
try {
|
|
84
|
+
parsed = JSON.parse(raw);
|
|
85
|
+
}
|
|
86
|
+
catch {
|
|
87
|
+
return (0, neverthrow_1.errAsync)({
|
|
88
|
+
code: 'MANAGED_SOURCE_CORRUPTION',
|
|
89
|
+
message: `Invalid JSON in managed sources file: ${filePath}`,
|
|
90
|
+
});
|
|
91
|
+
}
|
|
92
|
+
const validated = ManagedSourcesFileSchema.safeParse(parsed);
|
|
93
|
+
if (!validated.success) {
|
|
94
|
+
return (0, neverthrow_1.errAsync)({
|
|
95
|
+
code: 'MANAGED_SOURCE_CORRUPTION',
|
|
96
|
+
message: `Managed sources file has invalid shape: ${filePath}`,
|
|
97
|
+
});
|
|
98
|
+
}
|
|
99
|
+
return (0, neverthrow_1.okAsync)(normalizeRecords(validated.data.sources));
|
|
100
|
+
});
|
|
101
|
+
}
|
|
102
|
+
persist(sources) {
|
|
103
|
+
const filePath = this.dataDir.managedSourcesPath();
|
|
104
|
+
const dir = path_1.default.dirname(filePath);
|
|
105
|
+
const tmpPath = `${filePath}.tmp`;
|
|
106
|
+
const fileValue = {
|
|
107
|
+
v: 1,
|
|
108
|
+
sources: [...normalizeRecords(sources)],
|
|
109
|
+
};
|
|
110
|
+
const canonical = (0, jcs_js_1.toCanonicalBytes)(fileValue).mapErr((e) => ({
|
|
111
|
+
code: 'MANAGED_SOURCE_IO_ERROR',
|
|
112
|
+
message: `Failed to canonicalize managed sources state: ${e.message}`,
|
|
113
|
+
}));
|
|
114
|
+
if (canonical.isErr())
|
|
115
|
+
return (0, neverthrow_1.errAsync)(canonical.error);
|
|
116
|
+
const bytes = canonical.value;
|
|
117
|
+
return this.fs.mkdirp(dir)
|
|
118
|
+
.mapErr(mapFsToManagedSourceError)
|
|
119
|
+
.andThen(() => this.fs.openWriteTruncate(tmpPath).mapErr(mapFsToManagedSourceError))
|
|
120
|
+
.andThen(({ fd }) => this.fs.writeAll(fd, bytes)
|
|
121
|
+
.mapErr(mapFsToManagedSourceError)
|
|
122
|
+
.andThen(() => this.fs.fsyncFile(fd).mapErr(mapFsToManagedSourceError))
|
|
123
|
+
.andThen(() => this.fs.closeFile(fd).mapErr(mapFsToManagedSourceError))
|
|
124
|
+
.orElse((e) => this.fs.closeFile(fd)
|
|
125
|
+
.mapErr(() => e)
|
|
126
|
+
.andThen(() => (0, neverthrow_1.errAsync)(e))))
|
|
127
|
+
.andThen(() => this.fs.rename(tmpPath, filePath).mapErr(mapFsToManagedSourceError))
|
|
128
|
+
.andThen(() => this.fs.fsyncDir(dir).mapErr(mapFsToManagedSourceError));
|
|
129
|
+
}
|
|
130
|
+
withLock(run) {
|
|
131
|
+
const lockPath = this.dataDir.managedSourcesLockPath();
|
|
132
|
+
const dir = path_1.default.dirname(lockPath);
|
|
133
|
+
const lockBytes = new TextEncoder().encode(JSON.stringify({ v: 1, pid: process.pid }));
|
|
134
|
+
return this.fs.mkdirp(dir)
|
|
135
|
+
.mapErr(mapFsToManagedSourceError)
|
|
136
|
+
.andThen(() => this.fs.openExclusive(lockPath, lockBytes)
|
|
137
|
+
.mapErr((e) => {
|
|
138
|
+
const mapped = mapFsToManagedSourceError(e);
|
|
139
|
+
if (mapped.code === 'MANAGED_SOURCE_BUSY') {
|
|
140
|
+
return { ...mapped, lockPath };
|
|
141
|
+
}
|
|
142
|
+
return mapped;
|
|
143
|
+
}))
|
|
144
|
+
.andThen(({ fd }) => this.fs.fsyncFile(fd)
|
|
145
|
+
.mapErr(mapFsToManagedSourceError)
|
|
146
|
+
.andThen(() => this.fs.closeFile(fd).mapErr(mapFsToManagedSourceError))
|
|
147
|
+
.andThen(() => run())
|
|
148
|
+
.andThen((value) => this.fs.unlink(lockPath)
|
|
149
|
+
.orElse((e) => {
|
|
150
|
+
if (e.code === 'FS_NOT_FOUND')
|
|
151
|
+
return (0, neverthrow_1.okAsync)(undefined);
|
|
152
|
+
return (0, neverthrow_1.errAsync)(mapFsToManagedSourceError(e));
|
|
153
|
+
})
|
|
154
|
+
.map(() => value))
|
|
155
|
+
.orElse((error) => this.fs.unlink(lockPath)
|
|
156
|
+
.orElse((e) => {
|
|
157
|
+
if (e.code === 'FS_NOT_FOUND')
|
|
158
|
+
return (0, neverthrow_1.okAsync)(undefined);
|
|
159
|
+
return (0, neverthrow_1.errAsync)(mapFsToManagedSourceError(e));
|
|
160
|
+
})
|
|
161
|
+
.andThen(() => (0, neverthrow_1.errAsync)(error))));
|
|
162
|
+
}
|
|
163
|
+
}
|
|
164
|
+
exports.LocalManagedSourceStoreV2 = LocalManagedSourceStoreV2;
|
|
@@ -17,6 +17,7 @@ const EMPTY_OBSERVATIONS = {
|
|
|
17
17
|
gitHeadSha: null,
|
|
18
18
|
gitBranch: null,
|
|
19
19
|
repoRootHash: null,
|
|
20
|
+
repoRoot: null,
|
|
20
21
|
};
|
|
21
22
|
const TITLE_CONTEXT_KEYS = ['goal', 'taskDescription', 'mrTitle', 'prTitle', 'ticketTitle', 'problem'];
|
|
22
23
|
class LocalSessionSummaryProviderV2 {
|
|
@@ -131,6 +132,7 @@ function extractObservations(events) {
|
|
|
131
132
|
case 'git_head_sha': return { ...acc, gitHeadSha: e.data.value.value };
|
|
132
133
|
case 'git_branch': return { ...acc, gitBranch: e.data.value.value };
|
|
133
134
|
case 'repo_root_hash': return { ...acc, repoRootHash: e.data.value.value };
|
|
135
|
+
case 'repo_root': return { ...acc, repoRoot: e.data.value.value };
|
|
134
136
|
}
|
|
135
137
|
}, EMPTY_OBSERVATIONS);
|
|
136
138
|
}
|
|
@@ -40,6 +40,7 @@ class LocalWorkspaceAnchorV2 {
|
|
|
40
40
|
if (repoRootHash) {
|
|
41
41
|
anchors.push({ key: 'repo_root_hash', value: repoRootHash });
|
|
42
42
|
}
|
|
43
|
+
anchors.push({ key: 'repo_root', value: repoRoot });
|
|
43
44
|
const branch = await this.gitCommand('git rev-parse --abbrev-ref HEAD', cwd);
|
|
44
45
|
if (branch && branch !== 'HEAD') {
|
|
45
46
|
anchors.push({ key: 'git_branch', value: branch });
|
|
@@ -0,0 +1,25 @@
|
|
|
1
|
+
import type { ResultAsync } from 'neverthrow';
|
|
2
|
+
export type ManagedSourceStoreError = {
|
|
3
|
+
readonly code: 'MANAGED_SOURCE_BUSY';
|
|
4
|
+
readonly message: string;
|
|
5
|
+
readonly retry: {
|
|
6
|
+
readonly kind: 'retryable_after_ms';
|
|
7
|
+
readonly afterMs: number;
|
|
8
|
+
};
|
|
9
|
+
readonly lockPath: string;
|
|
10
|
+
} | {
|
|
11
|
+
readonly code: 'MANAGED_SOURCE_IO_ERROR';
|
|
12
|
+
readonly message: string;
|
|
13
|
+
} | {
|
|
14
|
+
readonly code: 'MANAGED_SOURCE_CORRUPTION';
|
|
15
|
+
readonly message: string;
|
|
16
|
+
};
|
|
17
|
+
export interface ManagedSourceRecordV2 {
|
|
18
|
+
readonly path: string;
|
|
19
|
+
readonly addedAtMs: number;
|
|
20
|
+
}
|
|
21
|
+
export interface ManagedSourceStorePortV2 {
|
|
22
|
+
list(): ResultAsync<readonly ManagedSourceRecordV2[], ManagedSourceStoreError>;
|
|
23
|
+
attach(path: string): ResultAsync<void, ManagedSourceStoreError>;
|
|
24
|
+
detach(path: string): ResultAsync<void, ManagedSourceStoreError>;
|
|
25
|
+
}
|
|
@@ -7,6 +7,7 @@ exports.mountConsoleRoutes = mountConsoleRoutes;
|
|
|
7
7
|
const express_1 = __importDefault(require("express"));
|
|
8
8
|
const path_1 = __importDefault(require("path"));
|
|
9
9
|
const fs_1 = __importDefault(require("fs"));
|
|
10
|
+
const worktree_service_js_1 = require("./worktree-service.js");
|
|
10
11
|
function resolveConsoleDist() {
|
|
11
12
|
const releasedDist = path_1.default.join(__dirname, '../../console');
|
|
12
13
|
if (fs_1.default.existsSync(releasedDist))
|
|
@@ -24,6 +25,31 @@ function mountConsoleRoutes(app, consoleService) {
|
|
|
24
25
|
const result = await consoleService.getSessionList();
|
|
25
26
|
result.match((data) => res.json({ success: true, data }), (error) => res.status(500).json({ success: false, error: error.message }));
|
|
26
27
|
});
|
|
28
|
+
let cwdRepoRootPromise = null;
|
|
29
|
+
const REPO_ROOTS_TTL_MS = 60000;
|
|
30
|
+
let cachedRepoRoots = [];
|
|
31
|
+
let repoRootsExpiresAt = 0;
|
|
32
|
+
app.get('/api/v2/worktrees', async (_req, res) => {
|
|
33
|
+
try {
|
|
34
|
+
const sessionResult = await consoleService.getSessionList();
|
|
35
|
+
const sessions = sessionResult.isOk() ? sessionResult.value.sessions : [];
|
|
36
|
+
const activeSessions = (0, worktree_service_js_1.buildActiveSessionCounts)(sessions);
|
|
37
|
+
if (Date.now() > repoRootsExpiresAt) {
|
|
38
|
+
cwdRepoRootPromise ?? (cwdRepoRootPromise = (0, worktree_service_js_1.resolveRepoRoot)(process.cwd()));
|
|
39
|
+
const cwdRoot = await cwdRepoRootPromise;
|
|
40
|
+
const repoRootSet = new Set(sessions.map(s => s.repoRoot).filter((r) => r !== null));
|
|
41
|
+
if (cwdRoot !== null)
|
|
42
|
+
repoRootSet.add(cwdRoot);
|
|
43
|
+
cachedRepoRoots = [...repoRootSet];
|
|
44
|
+
repoRootsExpiresAt = Date.now() + REPO_ROOTS_TTL_MS;
|
|
45
|
+
}
|
|
46
|
+
const data = await (0, worktree_service_js_1.getWorktreeList)(cachedRepoRoots, activeSessions);
|
|
47
|
+
res.json({ success: true, data });
|
|
48
|
+
}
|
|
49
|
+
catch (e) {
|
|
50
|
+
res.status(500).json({ success: false, error: e instanceof Error ? e.message : String(e) });
|
|
51
|
+
}
|
|
52
|
+
});
|
|
27
53
|
app.get('/api/v2/sessions/:sessionId', async (req, res) => {
|
|
28
54
|
const { sessionId } = req.params;
|
|
29
55
|
const result = await consoleService.getSessionDetail(sessionId);
|
|
@@ -14,6 +14,7 @@ const run_context_js_1 = require("../projections/run-context.js");
|
|
|
14
14
|
const constants_js_1 = require("../durable-core/constants.js");
|
|
15
15
|
const index_js_1 = require("../durable-core/ids/index.js");
|
|
16
16
|
const MAX_SESSIONS_TO_LOAD = 500;
|
|
17
|
+
const DORMANCY_THRESHOLD_MS = 3 * 24 * 60 * 60 * 1000;
|
|
17
18
|
class ConsoleService {
|
|
18
19
|
constructor(ports) {
|
|
19
20
|
this.ports = ports;
|
|
@@ -91,13 +92,14 @@ class ConsoleService {
|
|
|
91
92
|
});
|
|
92
93
|
}
|
|
93
94
|
collectSessionSummaries(sessionIds, mtimeBySessionId) {
|
|
94
|
-
const
|
|
95
|
+
const nowMs = Date.now();
|
|
96
|
+
const tasks = sessionIds.map((id) => this.loadSessionSummary(id, mtimeBySessionId.get(id) ?? 0, nowMs));
|
|
95
97
|
return neverthrow_1.ResultAsync.combine(tasks).map((results) => {
|
|
96
98
|
const sessions = results.filter((s) => s !== null);
|
|
97
99
|
return { sessions, totalCount: sessions.length };
|
|
98
100
|
});
|
|
99
101
|
}
|
|
100
|
-
loadSessionSummary(sessionId, lastModifiedMs) {
|
|
102
|
+
loadSessionSummary(sessionId, lastModifiedMs, nowMs) {
|
|
101
103
|
return this.ports.sessionStore
|
|
102
104
|
.load(sessionId)
|
|
103
105
|
.andThen((truth) => {
|
|
@@ -108,7 +110,7 @@ class ConsoleService {
|
|
|
108
110
|
return neverthrow_1.ResultAsync.combine([
|
|
109
111
|
resolveRunCompletion(truth.events, this.ports.snapshotStore),
|
|
110
112
|
workflowNamesRA,
|
|
111
|
-
]).map(([completionMap, workflowNames]) => projectSessionSummary(sessionId, truth, completionMap, workflowNames, lastModifiedMs));
|
|
113
|
+
]).map(([completionMap, workflowNames]) => projectSessionSummary(sessionId, truth, completionMap, workflowNames, lastModifiedMs, nowMs));
|
|
112
114
|
})
|
|
113
115
|
.orElse(() => (0, neverthrow_2.okAsync)(null));
|
|
114
116
|
}
|
|
@@ -313,12 +315,22 @@ function extractGitBranch(events) {
|
|
|
313
315
|
}
|
|
314
316
|
return null;
|
|
315
317
|
}
|
|
318
|
+
function extractRepoRoot(events) {
|
|
319
|
+
for (const e of events) {
|
|
320
|
+
if (e.kind !== constants_js_1.EVENT_KIND.OBSERVATION_RECORDED)
|
|
321
|
+
continue;
|
|
322
|
+
if (e.data.key === 'repo_root') {
|
|
323
|
+
return e.data.value.value;
|
|
324
|
+
}
|
|
325
|
+
}
|
|
326
|
+
return null;
|
|
327
|
+
}
|
|
316
328
|
function truncateTitle(text, maxLen = 120) {
|
|
317
329
|
if (text.length <= maxLen)
|
|
318
330
|
return text;
|
|
319
331
|
return text.slice(0, maxLen - 1) + '…';
|
|
320
332
|
}
|
|
321
|
-
function projectSessionSummary(sessionId, truth, completionByRunId, workflowNames, lastModifiedMs) {
|
|
333
|
+
function projectSessionSummary(sessionId, truth, completionByRunId, workflowNames, lastModifiedMs, nowMs) {
|
|
322
334
|
const { events } = truth;
|
|
323
335
|
const health = (0, session_health_js_1.projectSessionHealthV2)(truth);
|
|
324
336
|
if (health.isErr())
|
|
@@ -332,9 +344,11 @@ function projectSessionSummary(sessionId, truth, completionByRunId, workflowName
|
|
|
332
344
|
const gapsRes = (0, gaps_js_1.projectGapsV2)(events);
|
|
333
345
|
const sessionTitle = deriveSessionTitle(events);
|
|
334
346
|
const gitBranch = extractGitBranch(events);
|
|
347
|
+
const repoRoot = extractRepoRoot(events);
|
|
335
348
|
const runs = Object.values(dag.runsById);
|
|
336
349
|
const run = runs[0];
|
|
337
350
|
if (!run) {
|
|
351
|
+
const noRunStatus = nowMs - lastModifiedMs > DORMANCY_THRESHOLD_MS ? 'dormant' : 'in_progress';
|
|
338
352
|
return {
|
|
339
353
|
sessionId,
|
|
340
354
|
sessionTitle,
|
|
@@ -342,7 +356,7 @@ function projectSessionSummary(sessionId, truth, completionByRunId, workflowName
|
|
|
342
356
|
workflowName: null,
|
|
343
357
|
workflowHash: null,
|
|
344
358
|
runId: null,
|
|
345
|
-
status:
|
|
359
|
+
status: noRunStatus,
|
|
346
360
|
health: sessionHealth,
|
|
347
361
|
nodeCount: 0,
|
|
348
362
|
edgeCount: 0,
|
|
@@ -350,6 +364,7 @@ function projectSessionSummary(sessionId, truth, completionByRunId, workflowName
|
|
|
350
364
|
hasUnresolvedGaps: false,
|
|
351
365
|
recapSnippet: null,
|
|
352
366
|
gitBranch,
|
|
367
|
+
repoRoot,
|
|
353
368
|
lastModifiedMs,
|
|
354
369
|
};
|
|
355
370
|
}
|
|
@@ -358,7 +373,10 @@ function projectSessionSummary(sessionId, truth, completionByRunId, workflowName
|
|
|
358
373
|
const workflowHash = workflow.kind === 'with_workflow' ? workflow.workflowHash : null;
|
|
359
374
|
const workflowName = workflowHash ? (workflowNames[workflowHash] ?? null) : null;
|
|
360
375
|
const statusSignals = statusRes.isOk() ? statusRes.value.byRunId[run.runId] : undefined;
|
|
361
|
-
const
|
|
376
|
+
const runStatus = deriveRunStatus(statusSignals?.isBlocked ?? false, statusSignals?.hasUnresolvedCriticalGaps ?? false, completionByRunId[run.runId] ?? false);
|
|
377
|
+
const status = runStatus === 'in_progress' && nowMs - lastModifiedMs > DORMANCY_THRESHOLD_MS
|
|
378
|
+
? 'dormant'
|
|
379
|
+
: runStatus;
|
|
362
380
|
const hasUnresolvedGaps = gapsRes.isOk()
|
|
363
381
|
? Object.keys(gapsRes.value.unresolvedCriticalByRunId).length > 0
|
|
364
382
|
: false;
|
|
@@ -389,6 +407,7 @@ function projectSessionSummary(sessionId, truth, completionByRunId, workflowName
|
|
|
389
407
|
hasUnresolvedGaps,
|
|
390
408
|
recapSnippet,
|
|
391
409
|
gitBranch,
|
|
410
|
+
repoRoot,
|
|
392
411
|
lastModifiedMs,
|
|
393
412
|
};
|
|
394
413
|
}
|
|
@@ -1,4 +1,5 @@
|
|
|
1
1
|
export type ConsoleRunStatus = 'in_progress' | 'complete' | 'complete_with_gaps' | 'blocked';
|
|
2
|
+
export type ConsoleSessionStatus = ConsoleRunStatus | 'dormant';
|
|
2
3
|
export type ConsoleSessionHealth = 'healthy' | 'corrupt';
|
|
3
4
|
export interface ConsoleSessionSummary {
|
|
4
5
|
readonly sessionId: string;
|
|
@@ -7,7 +8,7 @@ export interface ConsoleSessionSummary {
|
|
|
7
8
|
readonly workflowName: string | null;
|
|
8
9
|
readonly workflowHash: string | null;
|
|
9
10
|
readonly runId: string | null;
|
|
10
|
-
readonly status:
|
|
11
|
+
readonly status: ConsoleSessionStatus;
|
|
11
12
|
readonly health: ConsoleSessionHealth;
|
|
12
13
|
readonly nodeCount: number;
|
|
13
14
|
readonly edgeCount: number;
|
|
@@ -15,6 +16,7 @@ export interface ConsoleSessionSummary {
|
|
|
15
16
|
readonly hasUnresolvedGaps: boolean;
|
|
16
17
|
readonly recapSnippet: string | null;
|
|
17
18
|
readonly gitBranch: string | null;
|
|
19
|
+
readonly repoRoot: string | null;
|
|
18
20
|
readonly lastModifiedMs: number;
|
|
19
21
|
}
|
|
20
22
|
export interface ConsoleSessionListResponse {
|
|
@@ -81,6 +83,25 @@ export interface ConsoleArtifact {
|
|
|
81
83
|
readonly byteLength: number;
|
|
82
84
|
readonly content: unknown;
|
|
83
85
|
}
|
|
86
|
+
export interface ConsoleWorktreeSummary {
|
|
87
|
+
readonly path: string;
|
|
88
|
+
readonly name: string;
|
|
89
|
+
readonly branch: string | null;
|
|
90
|
+
readonly headHash: string;
|
|
91
|
+
readonly headMessage: string;
|
|
92
|
+
readonly headTimestampMs: number;
|
|
93
|
+
readonly changedCount: number;
|
|
94
|
+
readonly aheadCount: number;
|
|
95
|
+
readonly activeSessionCount: number;
|
|
96
|
+
}
|
|
97
|
+
export interface ConsoleRepoWorktrees {
|
|
98
|
+
readonly repoName: string;
|
|
99
|
+
readonly repoRoot: string;
|
|
100
|
+
readonly worktrees: readonly ConsoleWorktreeSummary[];
|
|
101
|
+
}
|
|
102
|
+
export interface ConsoleWorktreeListResponse {
|
|
103
|
+
readonly repos: readonly ConsoleRepoWorktrees[];
|
|
104
|
+
}
|
|
84
105
|
export interface ConsoleNodeDetail {
|
|
85
106
|
readonly nodeId: string;
|
|
86
107
|
readonly nodeKind: 'step' | 'checkpoint' | 'blocked_attempt';
|
|
@@ -0,0 +1,10 @@
|
|
|
1
|
+
import type { ConsoleWorktreeListResponse, ConsoleSessionStatus } from './console-types.js';
|
|
2
|
+
export declare function resolveRepoRoot(path: string): Promise<string | null>;
|
|
3
|
+
export interface ActiveSessionsByBranch {
|
|
4
|
+
readonly counts: ReadonlyMap<string, number>;
|
|
5
|
+
}
|
|
6
|
+
export declare function buildActiveSessionCounts(sessions: ReadonlyArray<{
|
|
7
|
+
gitBranch: string | null;
|
|
8
|
+
status: ConsoleSessionStatus;
|
|
9
|
+
}>): ActiveSessionsByBranch;
|
|
10
|
+
export declare function getWorktreeList(repoRoots: readonly string[], activeSessions: ActiveSessionsByBranch): Promise<ConsoleWorktreeListResponse>;
|
|
@@ -0,0 +1,136 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.resolveRepoRoot = resolveRepoRoot;
|
|
4
|
+
exports.buildActiveSessionCounts = buildActiveSessionCounts;
|
|
5
|
+
exports.getWorktreeList = getWorktreeList;
|
|
6
|
+
const child_process_1 = require("child_process");
|
|
7
|
+
const util_1 = require("util");
|
|
8
|
+
const path_1 = require("path");
|
|
9
|
+
const execFileAsync = (0, util_1.promisify)(child_process_1.execFile);
|
|
10
|
+
const GIT_TIMEOUT_MS = 5000;
|
|
11
|
+
function isExecError(e) {
|
|
12
|
+
return e instanceof Error && 'killed' in e;
|
|
13
|
+
}
|
|
14
|
+
async function git(cwd, args) {
|
|
15
|
+
try {
|
|
16
|
+
const { stdout } = await execFileAsync('git', [...args], {
|
|
17
|
+
cwd,
|
|
18
|
+
encoding: 'utf-8',
|
|
19
|
+
timeout: GIT_TIMEOUT_MS,
|
|
20
|
+
});
|
|
21
|
+
return stdout.trim();
|
|
22
|
+
}
|
|
23
|
+
catch (e) {
|
|
24
|
+
if (isExecError(e))
|
|
25
|
+
return null;
|
|
26
|
+
throw e;
|
|
27
|
+
}
|
|
28
|
+
}
|
|
29
|
+
function parseWorktreePorcelain(raw) {
|
|
30
|
+
const entries = [];
|
|
31
|
+
for (const block of raw.split(/\n\n+/)) {
|
|
32
|
+
const lines = block.trim().split('\n');
|
|
33
|
+
const pathLine = lines.find(l => l.startsWith('worktree '));
|
|
34
|
+
const headLine = lines.find(l => l.startsWith('HEAD '));
|
|
35
|
+
const branchLine = lines.find(l => l.startsWith('branch '));
|
|
36
|
+
if (!pathLine || !headLine)
|
|
37
|
+
continue;
|
|
38
|
+
const path = pathLine.slice('worktree '.length).trim();
|
|
39
|
+
const head = headLine.slice('HEAD '.length).trim();
|
|
40
|
+
const branch = branchLine
|
|
41
|
+
? branchLine.slice('branch refs/heads/'.length).trim()
|
|
42
|
+
: null;
|
|
43
|
+
entries.push({ path, head, branch });
|
|
44
|
+
}
|
|
45
|
+
return entries;
|
|
46
|
+
}
|
|
47
|
+
async function enrichWorktree(wt) {
|
|
48
|
+
const [logRaw, statusRaw, aheadRaw] = await Promise.all([
|
|
49
|
+
git(wt.path, ['log', '-1', '--format=%h%n%s%n%ct']),
|
|
50
|
+
git(wt.path, ['status', '--short']),
|
|
51
|
+
git(wt.path, ['rev-list', '--count', 'origin/main..HEAD']),
|
|
52
|
+
]);
|
|
53
|
+
const [hashLine, messageLine, timestampLine] = logRaw?.split('\n') ?? [];
|
|
54
|
+
const headHash = hashLine?.trim() || wt.head.slice(0, 7);
|
|
55
|
+
const headMessage = messageLine?.trim() ?? '';
|
|
56
|
+
const headTimestampMs = timestampLine ? parseInt(timestampLine.trim(), 10) * 1000 : 0;
|
|
57
|
+
const changedCount = statusRaw !== null
|
|
58
|
+
? statusRaw.split('\n').filter(l => l.trim()).length
|
|
59
|
+
: 0;
|
|
60
|
+
const parsedAhead = aheadRaw !== null ? parseInt(aheadRaw, 10) : NaN;
|
|
61
|
+
const aheadCount = isNaN(parsedAhead) ? 0 : parsedAhead;
|
|
62
|
+
return { headHash, headMessage, headTimestampMs, changedCount, aheadCount };
|
|
63
|
+
}
|
|
64
|
+
async function resolveRepoRoot(path) {
|
|
65
|
+
return git(path, ['rev-parse', '--show-toplevel']);
|
|
66
|
+
}
|
|
67
|
+
async function enrichRepo(repoRoot, activeSessions) {
|
|
68
|
+
const porcelain = await git(repoRoot, ['worktree', 'list', '--porcelain']);
|
|
69
|
+
if (porcelain === null)
|
|
70
|
+
return null;
|
|
71
|
+
const rawWorktrees = parseWorktreePorcelain(porcelain);
|
|
72
|
+
const results = await Promise.allSettled(rawWorktrees.map(wt => enrichWorktree(wt)));
|
|
73
|
+
const worktrees = rawWorktrees.flatMap((wt, i) => {
|
|
74
|
+
const result = results[i];
|
|
75
|
+
if (result.status === 'rejected') {
|
|
76
|
+
console.warn(`[WorktreeService] Failed to enrich worktree at ${wt.path}:`, result.reason);
|
|
77
|
+
return [];
|
|
78
|
+
}
|
|
79
|
+
const e = result.value;
|
|
80
|
+
return [{
|
|
81
|
+
path: wt.path,
|
|
82
|
+
name: (0, path_1.basename)(wt.path),
|
|
83
|
+
branch: wt.branch,
|
|
84
|
+
headHash: e.headHash,
|
|
85
|
+
headMessage: e.headMessage,
|
|
86
|
+
headTimestampMs: e.headTimestampMs,
|
|
87
|
+
changedCount: e.changedCount,
|
|
88
|
+
aheadCount: e.aheadCount,
|
|
89
|
+
activeSessionCount: wt.branch ? (activeSessions.counts.get(wt.branch) ?? 0) : 0,
|
|
90
|
+
}];
|
|
91
|
+
});
|
|
92
|
+
return [...worktrees].sort((a, b) => {
|
|
93
|
+
if (b.activeSessionCount !== a.activeSessionCount)
|
|
94
|
+
return b.activeSessionCount - a.activeSessionCount;
|
|
95
|
+
if (b.changedCount !== a.changedCount)
|
|
96
|
+
return b.changedCount - a.changedCount;
|
|
97
|
+
return b.headTimestampMs - a.headTimestampMs;
|
|
98
|
+
});
|
|
99
|
+
}
|
|
100
|
+
function buildActiveSessionCounts(sessions) {
|
|
101
|
+
const counts = new Map();
|
|
102
|
+
for (const s of sessions) {
|
|
103
|
+
if (s.gitBranch && s.status === 'in_progress') {
|
|
104
|
+
counts.set(s.gitBranch, (counts.get(s.gitBranch) ?? 0) + 1);
|
|
105
|
+
}
|
|
106
|
+
}
|
|
107
|
+
return { counts };
|
|
108
|
+
}
|
|
109
|
+
async function getWorktreeList(repoRoots, activeSessions) {
|
|
110
|
+
const repoResults = await Promise.allSettled(repoRoots.map(async (repoRoot) => {
|
|
111
|
+
const worktrees = await enrichRepo(repoRoot, activeSessions);
|
|
112
|
+
return { repoRoot, worktrees };
|
|
113
|
+
}));
|
|
114
|
+
const repos = repoResults.flatMap((result) => {
|
|
115
|
+
if (result.status === 'rejected') {
|
|
116
|
+
console.warn(`[WorktreeService] Failed to enrich repo:`, result.reason);
|
|
117
|
+
return [];
|
|
118
|
+
}
|
|
119
|
+
const { repoRoot, worktrees } = result.value;
|
|
120
|
+
if (!worktrees || worktrees.length === 0)
|
|
121
|
+
return [];
|
|
122
|
+
return [{
|
|
123
|
+
repoName: (0, path_1.basename)(repoRoot),
|
|
124
|
+
repoRoot,
|
|
125
|
+
worktrees,
|
|
126
|
+
}];
|
|
127
|
+
});
|
|
128
|
+
const sortedRepos = [...repos].sort((a, b) => {
|
|
129
|
+
const aActive = a.worktrees.some(w => w.activeSessionCount > 0) ? 0 : 1;
|
|
130
|
+
const bActive = b.worktrees.some(w => w.activeSessionCount > 0) ? 0 : 1;
|
|
131
|
+
if (aActive !== bActive)
|
|
132
|
+
return aActive - bActive;
|
|
133
|
+
return a.repoName.localeCompare(b.repoName);
|
|
134
|
+
});
|
|
135
|
+
return { repos: sortedRepos };
|
|
136
|
+
}
|