harness-async 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +21 -0
- package/README.md +152 -0
- package/dist/dashboard/assets/index-TGNGdtwt.js +246 -0
- package/dist/dashboard/assets/index-f4TpA4iP.css +1 -0
- package/dist/dashboard/index.html +13 -0
- package/dist/src/adapters/claude-adapter.js +52 -0
- package/dist/src/adapters/codex-adapter.js +55 -0
- package/dist/src/adapters/index.js +14 -0
- package/dist/src/adapters/shared.js +74 -0
- package/dist/src/cli/commands/daemon.js +116 -0
- package/dist/src/cli/commands/doctor.js +50 -0
- package/dist/src/cli/commands/hook.js +188 -0
- package/dist/src/cli/commands/init.js +22 -0
- package/dist/src/cli/commands/run.js +129 -0
- package/dist/src/cli/commands/schedule.js +105 -0
- package/dist/src/cli/commands/task.js +188 -0
- package/dist/src/cli/index.js +23 -0
- package/dist/src/cli/utils/notify.js +32 -0
- package/dist/src/cli/utils/output.js +94 -0
- package/dist/src/core/daemon.js +375 -0
- package/dist/src/core/dag.js +80 -0
- package/dist/src/core/event-log.js +34 -0
- package/dist/src/core/lock.js +25 -0
- package/dist/src/core/run-manager.js +265 -0
- package/dist/src/core/run-orchestrator.js +193 -0
- package/dist/src/core/scheduler.js +106 -0
- package/dist/src/core/sessions.js +48 -0
- package/dist/src/core/store.js +225 -0
- package/dist/src/core/task-manager.js +375 -0
- package/dist/src/core/tmux.js +51 -0
- package/dist/src/daemon.js +35 -0
- package/dist/src/dashboard/routes.js +107 -0
- package/dist/src/dashboard/server.js +142 -0
- package/dist/src/dashboard/ws.js +75 -0
- package/dist/src/types/adapter.js +30 -0
- package/dist/src/types/index.js +87 -0
- package/package.json +65 -0
|
@@ -0,0 +1,375 @@
|
|
|
1
|
+
import { execFile } from 'node:child_process';
|
|
2
|
+
import { access, mkdir, readFile, rm, writeFile } from 'node:fs/promises';
|
|
3
|
+
import { dirname, join, resolve } from 'node:path';
|
|
4
|
+
import { promisify } from 'node:util';
|
|
5
|
+
import chokidar from 'chokidar';
|
|
6
|
+
import cron from 'node-cron';
|
|
7
|
+
import { notifyTaskEvent } from '../cli/utils/notify.js';
|
|
8
|
+
import { startTaskRun } from './run-orchestrator.js';
|
|
9
|
+
import { triggerSchedule, listSchedules } from './scheduler.js';
|
|
10
|
+
import { readConfig, readIndex, readTaskFile, resolveStoreDir } from './store.js';
|
|
11
|
+
import { eventSchema } from '../types/index.js';
|
|
12
|
+
const execFileAsync = promisify(execFile);
|
|
13
|
+
const DAEMON_LABEL = 'com.harness-agent.daemon';
|
|
14
|
+
export function resolveDaemonEntryPath(options) {
|
|
15
|
+
const cliEntry = options.cliEntry ?? process.argv[1];
|
|
16
|
+
const normalizedCliEntry = cliEntry?.replace(/\\/g, '/');
|
|
17
|
+
if (cliEntry && normalizedCliEntry?.endsWith('/dist/src/cli/index.js')) {
|
|
18
|
+
return resolve(dirname(cliEntry), '..', 'daemon.js');
|
|
19
|
+
}
|
|
20
|
+
if (cliEntry && normalizedCliEntry?.endsWith('/src/cli/index.ts')) {
|
|
21
|
+
return resolve(dirname(cliEntry), '..', '..', 'dist', 'src', 'daemon.js');
|
|
22
|
+
}
|
|
23
|
+
return resolve(options.cwd, 'dist', 'src', 'daemon.js');
|
|
24
|
+
}
|
|
25
|
+
export function resolveDaemonPaths(options) {
|
|
26
|
+
const homeDir = options.homeDir ?? process.env.HA_HOME ?? process.env.HOME;
|
|
27
|
+
if (!homeDir) {
|
|
28
|
+
throw new Error('Unable to resolve home directory for daemon');
|
|
29
|
+
}
|
|
30
|
+
const launchAgentsDir = options.launchAgentsDir ?? join(homeDir, 'Library', 'LaunchAgents');
|
|
31
|
+
const logsDir = options.logsDir ?? join(homeDir, 'Library', 'Logs', 'harness-async');
|
|
32
|
+
return {
|
|
33
|
+
homeDir,
|
|
34
|
+
launchAgentsDir,
|
|
35
|
+
logsDir,
|
|
36
|
+
plistPath: join(launchAgentsDir, `${DAEMON_LABEL}.plist`),
|
|
37
|
+
stdoutPath: join(logsDir, 'daemon.out.log'),
|
|
38
|
+
stderrPath: join(logsDir, 'daemon.err.log'),
|
|
39
|
+
};
|
|
40
|
+
}
|
|
41
|
+
export function renderLaunchdPlist(options) {
|
|
42
|
+
return `<?xml version="1.0" encoding="UTF-8"?>
|
|
43
|
+
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
|
|
44
|
+
<plist version="1.0">
|
|
45
|
+
<dict>
|
|
46
|
+
<key>Label</key>
|
|
47
|
+
<string>${DAEMON_LABEL}</string>
|
|
48
|
+
<key>ProgramArguments</key>
|
|
49
|
+
<array>
|
|
50
|
+
<string>${options.nodePath}</string>
|
|
51
|
+
<string>${options.daemonEntry}</string>
|
|
52
|
+
</array>
|
|
53
|
+
<key>WorkingDirectory</key>
|
|
54
|
+
<string>${options.cwd}</string>
|
|
55
|
+
<key>EnvironmentVariables</key>
|
|
56
|
+
<dict>
|
|
57
|
+
<key>HA_HOME</key>
|
|
58
|
+
<string>${options.paths.homeDir}</string>
|
|
59
|
+
<key>HA_PROJECT_ROOT</key>
|
|
60
|
+
<string>${options.cwd}</string>
|
|
61
|
+
</dict>
|
|
62
|
+
<key>StandardOutPath</key>
|
|
63
|
+
<string>${options.paths.stdoutPath}</string>
|
|
64
|
+
<key>StandardErrorPath</key>
|
|
65
|
+
<string>${options.paths.stderrPath}</string>
|
|
66
|
+
<key>RunAtLoad</key>
|
|
67
|
+
<true/>
|
|
68
|
+
<key>KeepAlive</key>
|
|
69
|
+
<true/>
|
|
70
|
+
</dict>
|
|
71
|
+
</plist>
|
|
72
|
+
`;
|
|
73
|
+
}
|
|
74
|
+
export async function startLaunchdDaemon(options) {
|
|
75
|
+
const paths = resolveDaemonPaths(options);
|
|
76
|
+
const runLaunchctl = options.runLaunchctl ?? createLaunchctlRunner(options.launchctlBin);
|
|
77
|
+
const daemonEntry = options.daemonEntry ?? resolveDaemonEntryPath({ cwd: options.cwd });
|
|
78
|
+
const nodePath = options.nodePath ?? process.execPath;
|
|
79
|
+
await mkdir(paths.launchAgentsDir, { recursive: true });
|
|
80
|
+
await mkdir(paths.logsDir, { recursive: true });
|
|
81
|
+
await writeFile(paths.plistPath, renderLaunchdPlist({
|
|
82
|
+
cwd: options.cwd,
|
|
83
|
+
daemonEntry,
|
|
84
|
+
nodePath,
|
|
85
|
+
paths,
|
|
86
|
+
}), 'utf8');
|
|
87
|
+
await runLaunchctl(['load', '-w', paths.plistPath]);
|
|
88
|
+
return paths;
|
|
89
|
+
}
|
|
90
|
+
export async function startDockerDaemon(options) {
|
|
91
|
+
const paths = resolveDaemonPaths(options);
|
|
92
|
+
const runDocker = options.runDocker ?? createDockerRunner(options.dockerBin);
|
|
93
|
+
const imageName = options.imageName ?? 'ha-daemon';
|
|
94
|
+
const containerName = options.containerName ?? 'ha-daemon';
|
|
95
|
+
await runDocker(['build', '-t', imageName, '.'], { cwd: options.cwd });
|
|
96
|
+
await runDocker([
|
|
97
|
+
'run',
|
|
98
|
+
'-d',
|
|
99
|
+
'--name',
|
|
100
|
+
containerName,
|
|
101
|
+
'-v',
|
|
102
|
+
`${join(paths.homeDir, '.ha')}:/data`,
|
|
103
|
+
'-e',
|
|
104
|
+
'HA_HOME=/data',
|
|
105
|
+
'-e',
|
|
106
|
+
'HA_PROJECT_ROOT=/app',
|
|
107
|
+
'-p',
|
|
108
|
+
'3777:3777',
|
|
109
|
+
imageName,
|
|
110
|
+
], { cwd: options.cwd });
|
|
111
|
+
}
|
|
112
|
+
export async function stopLaunchdDaemon(options) {
|
|
113
|
+
const paths = resolveDaemonPaths(options);
|
|
114
|
+
const runLaunchctl = options.runLaunchctl ?? createLaunchctlRunner(options.launchctlBin);
|
|
115
|
+
try {
|
|
116
|
+
await access(paths.plistPath);
|
|
117
|
+
await runLaunchctl(['unload', '-w', paths.plistPath]);
|
|
118
|
+
}
|
|
119
|
+
catch (error) {
|
|
120
|
+
if (error.code !== 'ENOENT') {
|
|
121
|
+
throw error;
|
|
122
|
+
}
|
|
123
|
+
}
|
|
124
|
+
await rm(paths.plistPath, { force: true });
|
|
125
|
+
return paths;
|
|
126
|
+
}
|
|
127
|
+
export async function stopDockerDaemon(options) {
|
|
128
|
+
const runDocker = options.runDocker ?? createDockerRunner(options.dockerBin);
|
|
129
|
+
const containerName = options.containerName ?? 'ha-daemon';
|
|
130
|
+
await runDocker(['stop', containerName], { cwd: options.cwd });
|
|
131
|
+
await runDocker(['rm', containerName], { cwd: options.cwd });
|
|
132
|
+
}
|
|
133
|
+
export async function getDaemonStatus(options) {
|
|
134
|
+
const paths = resolveDaemonPaths(options);
|
|
135
|
+
const runLaunchctl = options.runLaunchctl ?? createLaunchctlRunner(options.launchctlBin);
|
|
136
|
+
const plistExists = await fileExists(paths.plistPath);
|
|
137
|
+
try {
|
|
138
|
+
const result = await runLaunchctl(['list', DAEMON_LABEL]);
|
|
139
|
+
const pidMatch = result.stdout.match(/"PID"\s*=\s*(\d+)/);
|
|
140
|
+
if (pidMatch) {
|
|
141
|
+
return {
|
|
142
|
+
...paths,
|
|
143
|
+
state: 'running',
|
|
144
|
+
pid: Number(pidMatch[1]),
|
|
145
|
+
};
|
|
146
|
+
}
|
|
147
|
+
return {
|
|
148
|
+
...paths,
|
|
149
|
+
state: 'loaded',
|
|
150
|
+
pid: null,
|
|
151
|
+
};
|
|
152
|
+
}
|
|
153
|
+
catch {
|
|
154
|
+
return {
|
|
155
|
+
...paths,
|
|
156
|
+
state: plistExists ? 'installed' : 'stopped',
|
|
157
|
+
pid: null,
|
|
158
|
+
};
|
|
159
|
+
}
|
|
160
|
+
}
|
|
161
|
+
export async function readDaemonLogs(options) {
|
|
162
|
+
const paths = resolveDaemonPaths(options);
|
|
163
|
+
return {
|
|
164
|
+
stdout: await readFileIfExists(paths.stdoutPath),
|
|
165
|
+
stderr: await readFileIfExists(paths.stderrPath),
|
|
166
|
+
};
|
|
167
|
+
}
|
|
168
|
+
export async function startDaemonRuntime(options) {
|
|
169
|
+
const stores = resolveRuntimeStores(options.cwd, options.homeDir);
|
|
170
|
+
const schedules = await listSchedules({
|
|
171
|
+
cwd: options.cwd,
|
|
172
|
+
homeDir: options.homeDir,
|
|
173
|
+
});
|
|
174
|
+
const jobs = schedules
|
|
175
|
+
.filter((schedule) => schedule.enabled)
|
|
176
|
+
.map((schedule) => cron.schedule(schedule.cron, () => {
|
|
177
|
+
void triggerSchedule({
|
|
178
|
+
cwd: options.cwd,
|
|
179
|
+
homeDir: options.homeDir,
|
|
180
|
+
name: schedule.name,
|
|
181
|
+
}).catch((error) => {
|
|
182
|
+
options.logger?.(`Failed to trigger schedule ${schedule.name}: ${error.message}`);
|
|
183
|
+
});
|
|
184
|
+
}));
|
|
185
|
+
const processedLines = new Map();
|
|
186
|
+
const watchers = stores.map((storeDir) => chokidar.watch(join(storeDir, 'events.ndjson'), {
|
|
187
|
+
ignoreInitial: false,
|
|
188
|
+
awaitWriteFinish: {
|
|
189
|
+
stabilityThreshold: 50,
|
|
190
|
+
pollInterval: 10,
|
|
191
|
+
},
|
|
192
|
+
}));
|
|
193
|
+
const processEvents = async (storeDir) => {
|
|
194
|
+
const eventsPath = join(storeDir, 'events.ndjson');
|
|
195
|
+
try {
|
|
196
|
+
const raw = await readFile(eventsPath, 'utf8');
|
|
197
|
+
const lines = raw.split('\n').filter(Boolean);
|
|
198
|
+
const freshLines = lines.slice(processedLines.get(eventsPath) ?? 0);
|
|
199
|
+
processedLines.set(eventsPath, lines.length);
|
|
200
|
+
for (const line of freshLines) {
|
|
201
|
+
const event = eventSchema.parse(JSON.parse(line));
|
|
202
|
+
await options.broadcastEvent?.(event);
|
|
203
|
+
const notification = await mapEventToNotification(storeDir, event);
|
|
204
|
+
if (notification) {
|
|
205
|
+
await notifyTaskEvent(notification, options.notifyAdapter);
|
|
206
|
+
}
|
|
207
|
+
await maybeAutoStartUnlockedTask(storeDir, event, options);
|
|
208
|
+
}
|
|
209
|
+
}
|
|
210
|
+
catch (error) {
|
|
211
|
+
if (error.code === 'ENOENT') {
|
|
212
|
+
return;
|
|
213
|
+
}
|
|
214
|
+
options.logger?.(`Failed to process daemon events: ${error.message}`);
|
|
215
|
+
}
|
|
216
|
+
};
|
|
217
|
+
for (const [index, watcher] of watchers.entries()) {
|
|
218
|
+
const storeDir = stores[index];
|
|
219
|
+
watcher.on('add', () => {
|
|
220
|
+
void processEvents(storeDir);
|
|
221
|
+
});
|
|
222
|
+
watcher.on('change', () => {
|
|
223
|
+
void processEvents(storeDir);
|
|
224
|
+
});
|
|
225
|
+
await processEvents(storeDir);
|
|
226
|
+
}
|
|
227
|
+
return {
|
|
228
|
+
async stop() {
|
|
229
|
+
for (const job of jobs) {
|
|
230
|
+
job.stop();
|
|
231
|
+
}
|
|
232
|
+
await Promise.all(watchers.map(async (watcher) => watcher.close()));
|
|
233
|
+
},
|
|
234
|
+
};
|
|
235
|
+
}
|
|
236
|
+
export async function mapEventToNotification(storeDir, event) {
|
|
237
|
+
if (!event.taskId) {
|
|
238
|
+
return null;
|
|
239
|
+
}
|
|
240
|
+
if (event.type === 'task.dependency_unlocked') {
|
|
241
|
+
return {
|
|
242
|
+
type: 'task.dependency_unlocked',
|
|
243
|
+
taskId: event.taskId,
|
|
244
|
+
title: await resolveTaskTitle(storeDir, event.taskId),
|
|
245
|
+
};
|
|
246
|
+
}
|
|
247
|
+
if (event.type !== 'task.status_changed' || !event.to) {
|
|
248
|
+
return null;
|
|
249
|
+
}
|
|
250
|
+
const notificationType = statusToNotificationType(event.to);
|
|
251
|
+
if (!notificationType) {
|
|
252
|
+
return null;
|
|
253
|
+
}
|
|
254
|
+
return {
|
|
255
|
+
type: notificationType,
|
|
256
|
+
taskId: event.taskId,
|
|
257
|
+
title: await resolveTaskTitle(storeDir, event.taskId),
|
|
258
|
+
};
|
|
259
|
+
}
|
|
260
|
+
function statusToNotificationType(status) {
|
|
261
|
+
if (status === 'completed') {
|
|
262
|
+
return 'task.completed';
|
|
263
|
+
}
|
|
264
|
+
if (status === 'failed') {
|
|
265
|
+
return 'task.failed';
|
|
266
|
+
}
|
|
267
|
+
if (status === 'paused') {
|
|
268
|
+
return 'task.paused';
|
|
269
|
+
}
|
|
270
|
+
if (status === 'waiting-review') {
|
|
271
|
+
return 'task.waiting-review';
|
|
272
|
+
}
|
|
273
|
+
return null;
|
|
274
|
+
}
|
|
275
|
+
async function resolveTaskTitle(storeDir, taskId) {
|
|
276
|
+
try {
|
|
277
|
+
const index = await readIndex(storeDir);
|
|
278
|
+
const record = index.tasks[String(taskId)];
|
|
279
|
+
if (!record) {
|
|
280
|
+
return `Task ${taskId}`;
|
|
281
|
+
}
|
|
282
|
+
const task = await readTaskFile(join(storeDir, 'tasks', record.file));
|
|
283
|
+
return task.title;
|
|
284
|
+
}
|
|
285
|
+
catch {
|
|
286
|
+
return `Task ${taskId}`;
|
|
287
|
+
}
|
|
288
|
+
}
|
|
289
|
+
function resolveRuntimeStores(cwd, homeDir) {
|
|
290
|
+
return [
|
|
291
|
+
resolveStoreDir({ cwd, homeDir, scope: 'project' }),
|
|
292
|
+
resolveStoreDir({ cwd, homeDir, scope: 'global' }),
|
|
293
|
+
];
|
|
294
|
+
}
|
|
295
|
+
async function maybeAutoStartUnlockedTask(storeDir, event, options) {
|
|
296
|
+
if (event.type !== 'task.dependency_unlocked' || !event.taskId) {
|
|
297
|
+
return;
|
|
298
|
+
}
|
|
299
|
+
const task = await resolveTaskFromStore(storeDir, event.taskId);
|
|
300
|
+
if (!task || task.level !== 'L1' || task.assignee === 'human') {
|
|
301
|
+
return;
|
|
302
|
+
}
|
|
303
|
+
const config = await readConfig(storeDir);
|
|
304
|
+
const tool = task.assignee === 'auto' ? config.defaultAgent : task.assignee;
|
|
305
|
+
if (tool !== 'claude' && tool !== 'codex') {
|
|
306
|
+
return;
|
|
307
|
+
}
|
|
308
|
+
await (options.runAutoStart
|
|
309
|
+
? options.runAutoStart({
|
|
310
|
+
taskId: task.id,
|
|
311
|
+
tool,
|
|
312
|
+
scope: task.scope,
|
|
313
|
+
})
|
|
314
|
+
: startTaskRun({
|
|
315
|
+
cwd: options.cwd,
|
|
316
|
+
homeDir: options.homeDir,
|
|
317
|
+
taskId: task.id,
|
|
318
|
+
tool,
|
|
319
|
+
scope: task.scope,
|
|
320
|
+
}));
|
|
321
|
+
}
|
|
322
|
+
async function resolveTaskFromStore(storeDir, taskId) {
|
|
323
|
+
const index = await readIndex(storeDir);
|
|
324
|
+
const record = index.tasks[String(taskId)];
|
|
325
|
+
if (!record) {
|
|
326
|
+
return null;
|
|
327
|
+
}
|
|
328
|
+
return readTaskFile(join(storeDir, 'tasks', record.file));
|
|
329
|
+
}
|
|
330
|
+
function createLaunchctlRunner(launchctlBin = process.env.HA_LAUNCHCTL_BIN ?? 'launchctl') {
|
|
331
|
+
return async (args) => {
|
|
332
|
+
const result = await execFileAsync(launchctlBin, args, {
|
|
333
|
+
env: process.env,
|
|
334
|
+
});
|
|
335
|
+
return {
|
|
336
|
+
stdout: result.stdout,
|
|
337
|
+
stderr: result.stderr,
|
|
338
|
+
};
|
|
339
|
+
};
|
|
340
|
+
}
|
|
341
|
+
function createDockerRunner(dockerBin = process.env.HA_DOCKER_BIN ?? 'docker') {
|
|
342
|
+
return async (args, options) => {
|
|
343
|
+
const result = await execFileAsync(dockerBin, args, {
|
|
344
|
+
cwd: options.cwd,
|
|
345
|
+
env: process.env,
|
|
346
|
+
});
|
|
347
|
+
return {
|
|
348
|
+
stdout: result.stdout,
|
|
349
|
+
stderr: result.stderr,
|
|
350
|
+
};
|
|
351
|
+
};
|
|
352
|
+
}
|
|
353
|
+
async function readFileIfExists(filePath) {
|
|
354
|
+
try {
|
|
355
|
+
return await readFile(filePath, 'utf8');
|
|
356
|
+
}
|
|
357
|
+
catch (error) {
|
|
358
|
+
if (error.code === 'ENOENT') {
|
|
359
|
+
return '';
|
|
360
|
+
}
|
|
361
|
+
throw error;
|
|
362
|
+
}
|
|
363
|
+
}
|
|
364
|
+
async function fileExists(filePath) {
|
|
365
|
+
try {
|
|
366
|
+
await access(filePath);
|
|
367
|
+
return true;
|
|
368
|
+
}
|
|
369
|
+
catch (error) {
|
|
370
|
+
if (error.code === 'ENOENT') {
|
|
371
|
+
return false;
|
|
372
|
+
}
|
|
373
|
+
throw error;
|
|
374
|
+
}
|
|
375
|
+
}
|
|
@@ -0,0 +1,80 @@
|
|
|
1
|
+
export function getExecutionOrder(index) {
|
|
2
|
+
const nodes = Object.keys(index.tasks);
|
|
3
|
+
if (nodes.length === 0) {
|
|
4
|
+
return [];
|
|
5
|
+
}
|
|
6
|
+
const indegree = new Map();
|
|
7
|
+
const adjacency = new Map();
|
|
8
|
+
for (const node of nodes) {
|
|
9
|
+
indegree.set(node, 0);
|
|
10
|
+
adjacency.set(node, []);
|
|
11
|
+
}
|
|
12
|
+
for (const edge of index.dag.edges) {
|
|
13
|
+
adjacency.set(edge.from, [...(adjacency.get(edge.from) ?? []), edge.to]);
|
|
14
|
+
indegree.set(edge.to, (indegree.get(edge.to) ?? 0) + 1);
|
|
15
|
+
}
|
|
16
|
+
const queue = [...nodes].filter((node) => (indegree.get(node) ?? 0) === 0);
|
|
17
|
+
const result = [];
|
|
18
|
+
while (queue.length > 0) {
|
|
19
|
+
const node = queue.shift();
|
|
20
|
+
if (!node) {
|
|
21
|
+
continue;
|
|
22
|
+
}
|
|
23
|
+
result.push(Number(node));
|
|
24
|
+
for (const next of adjacency.get(node) ?? []) {
|
|
25
|
+
const nextDegree = (indegree.get(next) ?? 0) - 1;
|
|
26
|
+
indegree.set(next, nextDegree);
|
|
27
|
+
if (nextDegree === 0) {
|
|
28
|
+
queue.push(next);
|
|
29
|
+
}
|
|
30
|
+
}
|
|
31
|
+
}
|
|
32
|
+
return result;
|
|
33
|
+
}
|
|
34
|
+
export function getRunnableTasks(index) {
|
|
35
|
+
return Object.entries(index.tasks)
|
|
36
|
+
.filter(([, task]) => task.status === 'pending')
|
|
37
|
+
.filter(([, task]) => task.deps.every((depId) => index.tasks[String(depId)]?.status === 'completed'))
|
|
38
|
+
.map(([taskId]) => Number(taskId))
|
|
39
|
+
.sort((left, right) => left - right);
|
|
40
|
+
}
|
|
41
|
+
export function getBlockedTasks(index) {
|
|
42
|
+
return Object.entries(index.tasks)
|
|
43
|
+
.filter(([, task]) => task.status === 'pending')
|
|
44
|
+
.filter(([, task]) => task.deps.some((depId) => index.tasks[String(depId)]?.status !== 'completed'))
|
|
45
|
+
.map(([taskId]) => Number(taskId))
|
|
46
|
+
.sort((left, right) => left - right);
|
|
47
|
+
}
|
|
48
|
+
export function detectCycle(index) {
|
|
49
|
+
const nodes = new Set(Object.keys(index.tasks));
|
|
50
|
+
const indegree = new Map();
|
|
51
|
+
const adjacency = new Map();
|
|
52
|
+
for (const node of nodes) {
|
|
53
|
+
indegree.set(node, 0);
|
|
54
|
+
adjacency.set(node, []);
|
|
55
|
+
}
|
|
56
|
+
for (const edge of index.dag.edges) {
|
|
57
|
+
nodes.add(edge.from);
|
|
58
|
+
nodes.add(edge.to);
|
|
59
|
+
adjacency.set(edge.from, [...(adjacency.get(edge.from) ?? []), edge.to]);
|
|
60
|
+
indegree.set(edge.to, (indegree.get(edge.to) ?? 0) + 1);
|
|
61
|
+
indegree.set(edge.from, indegree.get(edge.from) ?? 0);
|
|
62
|
+
}
|
|
63
|
+
const queue = [...nodes].filter((node) => (indegree.get(node) ?? 0) === 0);
|
|
64
|
+
let visited = 0;
|
|
65
|
+
while (queue.length > 0) {
|
|
66
|
+
const node = queue.shift();
|
|
67
|
+
if (!node) {
|
|
68
|
+
continue;
|
|
69
|
+
}
|
|
70
|
+
visited += 1;
|
|
71
|
+
for (const next of adjacency.get(node) ?? []) {
|
|
72
|
+
const nextDegree = (indegree.get(next) ?? 0) - 1;
|
|
73
|
+
indegree.set(next, nextDegree);
|
|
74
|
+
if (nextDegree === 0) {
|
|
75
|
+
queue.push(next);
|
|
76
|
+
}
|
|
77
|
+
}
|
|
78
|
+
}
|
|
79
|
+
return visited !== nodes.size;
|
|
80
|
+
}
|
|
@@ -0,0 +1,34 @@
|
|
|
1
|
+
import { appendFile, mkdir, readFile } from 'node:fs/promises';
|
|
2
|
+
import { join } from 'node:path';
|
|
3
|
+
import { eventSchema } from '../types/index.js';
|
|
4
|
+
const EVENTS_FILE = 'events.ndjson';
|
|
5
|
+
export async function appendEvent(storeDir, event) {
|
|
6
|
+
const parsed = eventSchema.parse(event);
|
|
7
|
+
await mkdir(storeDir, { recursive: true });
|
|
8
|
+
await appendFile(join(storeDir, EVENTS_FILE), `${JSON.stringify(parsed)}\n`, 'utf8');
|
|
9
|
+
}
|
|
10
|
+
export async function readEvents(storeDir, filter = {}) {
|
|
11
|
+
const filePath = join(storeDir, EVENTS_FILE);
|
|
12
|
+
try {
|
|
13
|
+
const raw = await readFile(filePath, 'utf8');
|
|
14
|
+
return raw
|
|
15
|
+
.split('\n')
|
|
16
|
+
.filter(Boolean)
|
|
17
|
+
.map((line) => eventSchema.parse(JSON.parse(line)))
|
|
18
|
+
.filter((event) => {
|
|
19
|
+
if (filter.taskId !== undefined && event.taskId !== filter.taskId) {
|
|
20
|
+
return false;
|
|
21
|
+
}
|
|
22
|
+
if (filter.type !== undefined && event.type !== filter.type) {
|
|
23
|
+
return false;
|
|
24
|
+
}
|
|
25
|
+
return true;
|
|
26
|
+
});
|
|
27
|
+
}
|
|
28
|
+
catch (error) {
|
|
29
|
+
if (error.code === 'ENOENT') {
|
|
30
|
+
return [];
|
|
31
|
+
}
|
|
32
|
+
throw error;
|
|
33
|
+
}
|
|
34
|
+
}
|
|
@@ -0,0 +1,25 @@
|
|
|
1
|
+
import { open, mkdir } from 'node:fs/promises';
|
|
2
|
+
import { dirname } from 'node:path';
|
|
3
|
+
import lockfile from 'proper-lockfile';
|
|
4
|
+
async function ensureFileExists(filePath) {
|
|
5
|
+
await mkdir(dirname(filePath), { recursive: true });
|
|
6
|
+
const handle = await open(filePath, 'a');
|
|
7
|
+
await handle.close();
|
|
8
|
+
}
|
|
9
|
+
export async function withLock(filePath, fn) {
|
|
10
|
+
await ensureFileExists(filePath);
|
|
11
|
+
const release = await lockfile.lock(filePath, {
|
|
12
|
+
retries: {
|
|
13
|
+
retries: 10,
|
|
14
|
+
factor: 1.5,
|
|
15
|
+
minTimeout: 10,
|
|
16
|
+
maxTimeout: 100,
|
|
17
|
+
},
|
|
18
|
+
});
|
|
19
|
+
try {
|
|
20
|
+
return await fn();
|
|
21
|
+
}
|
|
22
|
+
finally {
|
|
23
|
+
await release();
|
|
24
|
+
}
|
|
25
|
+
}
|