bosun 0.41.2 → 0.41.4
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.env.example +1 -1
- package/agent/agent-pool.mjs +9 -2
- package/agent/agent-prompt-catalog.mjs +971 -0
- package/agent/agent-prompts.mjs +2 -970
- package/agent/agent-supervisor.mjs +119 -6
- package/agent/autofix-git.mjs +33 -0
- package/agent/autofix-prompts.mjs +151 -0
- package/agent/autofix.mjs +11 -175
- package/agent/bosun-skills.mjs +3 -2
- package/bosun.config.example.json +17 -0
- package/bosun.schema.json +87 -188
- package/cli.mjs +34 -1
- package/config/config-doctor.mjs +5 -250
- package/config/config-file-names.mjs +5 -0
- package/config/config.mjs +89 -493
- package/config/executor-config.mjs +493 -0
- package/config/repo-root.mjs +1 -2
- package/config/workspace-health.mjs +242 -0
- package/git/git-safety.mjs +15 -0
- package/github/github-oauth-portal.mjs +46 -0
- package/infra/library-manager-utils.mjs +22 -0
- package/infra/library-manager-well-known-sources.mjs +578 -0
- package/infra/library-manager.mjs +512 -1030
- package/infra/monitor.mjs +35 -9
- package/infra/session-tracker.mjs +10 -7
- package/kanban/kanban-adapter.mjs +17 -1
- package/lib/codebase-audit-manifests.mjs +117 -0
- package/lib/codebase-audit.mjs +18 -115
- package/package.json +18 -3
- package/server/setup-web-server.mjs +58 -5
- package/server/ui-server.mjs +1394 -79
- package/shell/codex-config-file.mjs +178 -0
- package/shell/codex-config.mjs +538 -575
- package/task/task-cli.mjs +54 -3
- package/task/task-executor.mjs +143 -13
- package/task/task-store.mjs +409 -1
- package/telegram/telegram-bot.mjs +127 -0
- package/tools/apply-pr-suggestions.mjs +401 -0
- package/tools/syntax-check.mjs +28 -9
- package/ui/app.js +3 -14
- package/ui/components/kanban-board.js +227 -4
- package/ui/components/session-list.js +85 -5
- package/ui/demo-defaults.js +338 -84
- package/ui/demo.html +155 -0
- package/ui/modules/session-api.js +96 -0
- package/ui/modules/settings-schema.js +1 -2
- package/ui/modules/state.js +43 -3
- package/ui/setup.html +4 -5
- package/ui/styles/components.css +58 -4
- package/ui/tabs/agents.js +12 -15
- package/ui/tabs/control.js +1 -0
- package/ui/tabs/library.js +484 -22
- package/ui/tabs/manual-flows.js +105 -29
- package/ui/tabs/tasks.js +848 -141
- package/ui/tabs/telemetry.js +129 -11
- package/ui/tabs/workflow-canvas-utils.mjs +130 -0
- package/ui/tabs/workflows.js +293 -23
- package/voice/voice-tool-definitions.mjs +757 -0
- package/voice/voice-tools.mjs +34 -778
- package/workflow/manual-flow-audit.mjs +165 -0
- package/workflow/manual-flows.mjs +164 -259
- package/workflow/workflow-engine.mjs +147 -58
- package/workflow/workflow-nodes/definitions.mjs +1207 -0
- package/workflow/workflow-nodes/transforms.mjs +612 -0
- package/workflow/workflow-nodes.mjs +358 -63
- package/workflow/workflow-templates.mjs +313 -191
- package/workflow-templates/_helpers.mjs +154 -0
- package/workflow-templates/agents.mjs +61 -4
- package/workflow-templates/code-quality.mjs +7 -7
- package/workflow-templates/github.mjs +20 -10
- package/workflow-templates/task-batch.mjs +44 -11
- package/workflow-templates/task-lifecycle.mjs +31 -6
- package/workspace/worktree-manager.mjs +277 -3
|
@@ -0,0 +1,612 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* workflow-nodes.mjs — Built-in Workflow Node Types for Bosun
|
|
3
|
+
*
|
|
4
|
+
* Registers all standard node types that can be used in workflow definitions.
|
|
5
|
+
* Node types are organized by category:
|
|
6
|
+
*
|
|
7
|
+
* TRIGGERS — Events that start workflow execution
|
|
8
|
+
* CONDITIONS — Branching logic / gates
|
|
9
|
+
* ACTIONS — Side-effect operations (run agent, create task, etc.)
|
|
10
|
+
* VALIDATION — Verification gates (screenshots, tests, model review)
|
|
11
|
+
* TRANSFORM — Data transformation / aggregation
|
|
12
|
+
* NOTIFY — Notifications (telegram, log, etc.)
|
|
13
|
+
*
|
|
14
|
+
* Each node type must export:
|
|
15
|
+
* execute(node, ctx, engine) → Promise<any> — The node's logic
|
|
16
|
+
* describe() → string — Human-readable description
|
|
17
|
+
* schema → object — JSON Schema for node config
|
|
18
|
+
*/
|
|
19
|
+
|
|
20
|
+
import { existsSync, readFileSync, writeFileSync, mkdirSync } from "node:fs";
|
|
21
|
+
import { resolve, dirname } from "node:path";
|
|
22
|
+
import { execSync, execFileSync, spawn } from "node:child_process";
|
|
23
|
+
import { createHash, randomUUID } from "node:crypto";
|
|
24
|
+
import { getAgentToolConfig, getEffectiveTools } from "../../agent/agent-tool-config.mjs";
|
|
25
|
+
import { getToolsPromptBlock } from "../../agent/agent-custom-tools.mjs";
|
|
26
|
+
import { buildRelevantSkillsPromptBlock, findRelevantSkills } from "../../agent/bosun-skills.mjs";
|
|
27
|
+
import { getSessionTracker } from "../../infra/session-tracker.mjs";
|
|
28
|
+
import { fixGitConfigCorruption } from "../../workspace/worktree-manager.mjs";
|
|
29
|
+
|
|
30
|
+
import {
|
|
31
|
+
registerNodeType,
|
|
32
|
+
BOSUN_ATTACHED_PR_LABEL,
|
|
33
|
+
PORTABLE_PRUNE_AND_COUNT_WORKTREES_COMMAND,
|
|
34
|
+
PORTABLE_WORKTREE_COUNT_COMMAND,
|
|
35
|
+
TAG,
|
|
36
|
+
WORKFLOW_AGENT_EVENT_PREVIEW_LIMIT,
|
|
37
|
+
WORKFLOW_AGENT_HEARTBEAT_MS,
|
|
38
|
+
WORKFLOW_TELEGRAM_ICON_MAP,
|
|
39
|
+
bindTaskContext,
|
|
40
|
+
buildAgentEventPreview,
|
|
41
|
+
buildAgentExecutionDigest,
|
|
42
|
+
buildGitExecutionEnv,
|
|
43
|
+
buildTaskContextBlock,
|
|
44
|
+
buildWorkflowAgentToolContract,
|
|
45
|
+
collectWakePhraseCandidates,
|
|
46
|
+
condenseAgentItems,
|
|
47
|
+
createKanbanTaskWithProject,
|
|
48
|
+
decodeWorkflowUnicodeIconToken,
|
|
49
|
+
deriveManagedWorktreeDirName,
|
|
50
|
+
detectWakePhraseMatch,
|
|
51
|
+
execGitArgsSync,
|
|
52
|
+
extractStreamText,
|
|
53
|
+
extractSymbolHint,
|
|
54
|
+
formatAttachmentLine,
|
|
55
|
+
formatCommentLine,
|
|
56
|
+
getPathValue,
|
|
57
|
+
isBosunStateComment,
|
|
58
|
+
isManagedBosunWorktree,
|
|
59
|
+
makeIsolatedGitEnv,
|
|
60
|
+
normalizeLegacyWorkflowCommand,
|
|
61
|
+
normalizeLineEndings,
|
|
62
|
+
normalizeNarrativeText,
|
|
63
|
+
normalizeTaskAttachments,
|
|
64
|
+
normalizeTaskComments,
|
|
65
|
+
normalizeWorkflowStack,
|
|
66
|
+
normalizeWorkflowTelegramText,
|
|
67
|
+
parseBooleanSetting,
|
|
68
|
+
parsePathListingLine,
|
|
69
|
+
resolveGitCandidates,
|
|
70
|
+
resolveWorkflowNodeValue,
|
|
71
|
+
simplifyPathLabel,
|
|
72
|
+
summarizeAgentStreamEvent,
|
|
73
|
+
summarizeAssistantMessageData,
|
|
74
|
+
summarizeAssistantUsage,
|
|
75
|
+
summarizePathListingBlock,
|
|
76
|
+
trimLogText,
|
|
77
|
+
} from "./definitions.mjs";
|
|
78
|
+
|
|
79
|
+
registerNodeType("transform.json_parse", {
|
|
80
|
+
describe: () => "Parse JSON from a previous node's output",
|
|
81
|
+
schema: {
|
|
82
|
+
type: "object",
|
|
83
|
+
properties: {
|
|
84
|
+
input: { type: "string", description: "Source: node ID or {{variable}}" },
|
|
85
|
+
field: { type: "string", description: "Field in source output containing JSON" },
|
|
86
|
+
},
|
|
87
|
+
},
|
|
88
|
+
async execute(node, ctx) {
|
|
89
|
+
const sourceId = node.config?.input;
|
|
90
|
+
const field = node.config?.field || "output";
|
|
91
|
+
let raw = sourceId ? ctx.getNodeOutput(sourceId)?.[field] : ctx.resolve(node.config?.value || "");
|
|
92
|
+
if (typeof raw !== "string") raw = JSON.stringify(raw);
|
|
93
|
+
try {
|
|
94
|
+
return { data: JSON.parse(raw), success: true };
|
|
95
|
+
} catch (err) {
|
|
96
|
+
return { success: false, error: err.message };
|
|
97
|
+
}
|
|
98
|
+
},
|
|
99
|
+
});
|
|
100
|
+
|
|
101
|
+
registerNodeType("transform.template", {
|
|
102
|
+
describe: () => "Render a text template with context variables",
|
|
103
|
+
schema: {
|
|
104
|
+
type: "object",
|
|
105
|
+
properties: {
|
|
106
|
+
template: { type: "string", description: "Template text with {{variables}}" },
|
|
107
|
+
},
|
|
108
|
+
required: ["template"],
|
|
109
|
+
},
|
|
110
|
+
async execute(node, ctx) {
|
|
111
|
+
const result = ctx.resolve(node.config?.template || "");
|
|
112
|
+
return { text: result };
|
|
113
|
+
},
|
|
114
|
+
});
|
|
115
|
+
|
|
116
|
+
registerNodeType("transform.aggregate", {
|
|
117
|
+
describe: () => "Aggregate outputs from multiple nodes into a single object",
|
|
118
|
+
schema: {
|
|
119
|
+
type: "object",
|
|
120
|
+
properties: {
|
|
121
|
+
sources: { type: "array", items: { type: "string" }, description: "Node IDs to aggregate" },
|
|
122
|
+
},
|
|
123
|
+
},
|
|
124
|
+
async execute(node, ctx) {
|
|
125
|
+
const sources = node.config?.sources || [];
|
|
126
|
+
const aggregated = {};
|
|
127
|
+
for (const src of sources) {
|
|
128
|
+
aggregated[src] = ctx.getNodeOutput(src);
|
|
129
|
+
}
|
|
130
|
+
return { aggregated, count: sources.length };
|
|
131
|
+
},
|
|
132
|
+
});
|
|
133
|
+
|
|
134
|
+
registerNodeType("transform.llm_parse", {
|
|
135
|
+
describe: () =>
|
|
136
|
+
"Parse unstructured LLM output into structured fields using regex patterns " +
|
|
137
|
+
"or keyword extraction. Essential for routing decisions based on LLM verdicts " +
|
|
138
|
+
"(e.g., PASS/FAIL/PARTIAL, correct/minor/critical).",
|
|
139
|
+
schema: {
|
|
140
|
+
type: "object",
|
|
141
|
+
properties: {
|
|
142
|
+
input: {
|
|
143
|
+
type: "string",
|
|
144
|
+
description: "Source text to parse — node ID, {{variable}}, or literal text",
|
|
145
|
+
},
|
|
146
|
+
field: {
|
|
147
|
+
type: "string",
|
|
148
|
+
default: "output",
|
|
149
|
+
description: "Field name within source node output (when input is a node ID)",
|
|
150
|
+
},
|
|
151
|
+
patterns: {
|
|
152
|
+
type: "object",
|
|
153
|
+
description:
|
|
154
|
+
"Map of field names to regex patterns. Each pattern is applied to the input; " +
|
|
155
|
+
"the first capture group (or full match) is stored under that key. " +
|
|
156
|
+
'Example: { "verdict": "\\\\b(PASS|FAIL|PARTIAL)\\\\b", "score": "score:\\\\s*(\\\\d+)" }',
|
|
157
|
+
additionalProperties: { type: "string" },
|
|
158
|
+
},
|
|
159
|
+
keywords: {
|
|
160
|
+
type: "object",
|
|
161
|
+
description:
|
|
162
|
+
"Map of field names to keyword lists. The first keyword found in the input is stored. " +
|
|
163
|
+
'Example: { "severity": ["critical", "minor", "correct"] }',
|
|
164
|
+
additionalProperties: {
|
|
165
|
+
type: "array",
|
|
166
|
+
items: { type: "string" },
|
|
167
|
+
},
|
|
168
|
+
},
|
|
169
|
+
outputPort: {
|
|
170
|
+
type: "string",
|
|
171
|
+
description:
|
|
172
|
+
"Which parsed field to use as the matchedPort for downstream routing. " +
|
|
173
|
+
"If set, the value of that parsed field becomes the output port.",
|
|
174
|
+
},
|
|
175
|
+
},
|
|
176
|
+
required: [],
|
|
177
|
+
},
|
|
178
|
+
async execute(node, ctx) {
|
|
179
|
+
// Resolve the input text
|
|
180
|
+
let text = "";
|
|
181
|
+
const inputRef = ctx.resolve(node.config?.input || "");
|
|
182
|
+
const field = node.config?.field || "output";
|
|
183
|
+
|
|
184
|
+
if (inputRef && ctx.getNodeOutput(inputRef)) {
|
|
185
|
+
// Input is a node ID — grab the specified field
|
|
186
|
+
const nodeOutput = ctx.getNodeOutput(inputRef);
|
|
187
|
+
text = String(
|
|
188
|
+
nodeOutput?.[field] ?? nodeOutput?.reviewOutput ?? nodeOutput?.text ?? JSON.stringify(nodeOutput) ?? "",
|
|
189
|
+
);
|
|
190
|
+
} else {
|
|
191
|
+
// Input is a template/literal
|
|
192
|
+
text = String(inputRef || "");
|
|
193
|
+
}
|
|
194
|
+
|
|
195
|
+
const parsed = {};
|
|
196
|
+
|
|
197
|
+
// Apply regex patterns
|
|
198
|
+
const patterns = node.config?.patterns || {};
|
|
199
|
+
for (const [key, patternStr] of Object.entries(patterns)) {
|
|
200
|
+
try {
|
|
201
|
+
const regex = new RegExp(patternStr, "i");
|
|
202
|
+
const match = text.match(regex);
|
|
203
|
+
if (match) {
|
|
204
|
+
parsed[key] = match[1] !== undefined ? match[1] : match[0];
|
|
205
|
+
} else {
|
|
206
|
+
parsed[key] = null;
|
|
207
|
+
}
|
|
208
|
+
} catch (err) {
|
|
209
|
+
ctx.log(node.id, `Pattern "${key}" error: ${err.message}`, "warn");
|
|
210
|
+
parsed[key] = null;
|
|
211
|
+
}
|
|
212
|
+
}
|
|
213
|
+
|
|
214
|
+
// Apply keyword extraction
|
|
215
|
+
const keywords = node.config?.keywords || {};
|
|
216
|
+
const lowerText = text.toLowerCase();
|
|
217
|
+
for (const [key, wordList] of Object.entries(keywords)) {
|
|
218
|
+
if (!Array.isArray(wordList)) continue;
|
|
219
|
+
const found = wordList.find((w) => lowerText.includes(String(w).toLowerCase()));
|
|
220
|
+
parsed[key] = found || null;
|
|
221
|
+
}
|
|
222
|
+
|
|
223
|
+
// Determine output port for routing
|
|
224
|
+
const portField = node.config?.outputPort || "";
|
|
225
|
+
let matchedPort = "default";
|
|
226
|
+
if (portField && parsed[portField] != null) {
|
|
227
|
+
matchedPort = String(parsed[portField]).toLowerCase().trim();
|
|
228
|
+
}
|
|
229
|
+
|
|
230
|
+
ctx.log(node.id, `Parsed: ${JSON.stringify(parsed)}, port=${matchedPort}`);
|
|
231
|
+
|
|
232
|
+
return {
|
|
233
|
+
parsed,
|
|
234
|
+
matchedPort,
|
|
235
|
+
port: matchedPort,
|
|
236
|
+
inputLength: text.length,
|
|
237
|
+
};
|
|
238
|
+
},
|
|
239
|
+
});
|
|
240
|
+
|
|
241
|
+
// ═══════════════════════════════════════════════════════════════════════════
|
|
242
|
+
// NOTIFY — Notifications
|
|
243
|
+
// ═══════════════════════════════════════════════════════════════════════════
|
|
244
|
+
|
|
245
|
+
registerNodeType("transform.mcp_extract", {
|
|
246
|
+
describe: () =>
|
|
247
|
+
"Extract and reshape structured data from an upstream MCP tool call or " +
|
|
248
|
+
"any node output. Supports dot-path fields, JSON pointers, array wildcards, " +
|
|
249
|
+
"type coercion, default values, and output mapping. Essential for piping " +
|
|
250
|
+
"specific data points between MCP tool calls in a workflow.",
|
|
251
|
+
schema: {
|
|
252
|
+
type: "object",
|
|
253
|
+
properties: {
|
|
254
|
+
source: {
|
|
255
|
+
type: "string",
|
|
256
|
+
description: "Source node ID to extract from (e.g. 'mcp-github-prs')",
|
|
257
|
+
},
|
|
258
|
+
sourceField: {
|
|
259
|
+
type: "string",
|
|
260
|
+
default: "data",
|
|
261
|
+
description: "Field within the source node's output to extract from",
|
|
262
|
+
},
|
|
263
|
+
root: {
|
|
264
|
+
type: "string",
|
|
265
|
+
description: "Root path within the source data (narrows extraction scope)",
|
|
266
|
+
},
|
|
267
|
+
fields: {
|
|
268
|
+
type: "object",
|
|
269
|
+
description:
|
|
270
|
+
"Map of outputKey → sourcePath (dot-path, JSON pointer, or wildcard). " +
|
|
271
|
+
"Example: { 'prTitles': 'items[*].title', 'firstAuthor': 'items[0].user.login' }",
|
|
272
|
+
additionalProperties: { type: "string" },
|
|
273
|
+
},
|
|
274
|
+
defaults: {
|
|
275
|
+
type: "object",
|
|
276
|
+
description: "Default values for missing fields",
|
|
277
|
+
additionalProperties: true,
|
|
278
|
+
},
|
|
279
|
+
types: {
|
|
280
|
+
type: "object",
|
|
281
|
+
description: "Type coercion: fieldName → 'string'|'number'|'boolean'|'array'|'integer'|'json'",
|
|
282
|
+
additionalProperties: { type: "string" },
|
|
283
|
+
},
|
|
284
|
+
outputMap: {
|
|
285
|
+
type: "object",
|
|
286
|
+
description: "Additional output mapping/reshaping after extraction",
|
|
287
|
+
additionalProperties: true,
|
|
288
|
+
},
|
|
289
|
+
outputVariable: {
|
|
290
|
+
type: "string",
|
|
291
|
+
description: "Variable name to store extracted data in ctx.data",
|
|
292
|
+
},
|
|
293
|
+
},
|
|
294
|
+
required: ["source", "fields"],
|
|
295
|
+
},
|
|
296
|
+
async execute(node, ctx) {
|
|
297
|
+
const sourceNodeId = ctx.resolve(node.config?.source || "");
|
|
298
|
+
const sourceField = node.config?.sourceField || "data";
|
|
299
|
+
|
|
300
|
+
if (!sourceNodeId) throw new Error("transform.mcp_extract: 'source' node ID is required");
|
|
301
|
+
|
|
302
|
+
const sourceOutput = ctx.getNodeOutput(sourceNodeId);
|
|
303
|
+
if (!sourceOutput) {
|
|
304
|
+
ctx.log(node.id, `Source node "${sourceNodeId}" has no output — using empty object`);
|
|
305
|
+
return { success: false, error: `No output from node "${sourceNodeId}"`, extracted: {} };
|
|
306
|
+
}
|
|
307
|
+
|
|
308
|
+
// Get the specific field from the source output
|
|
309
|
+
const adapter = await getMcpAdapter();
|
|
310
|
+
let sourceData = sourceField ? adapter.getByPath(sourceOutput, sourceField) : sourceOutput;
|
|
311
|
+
|
|
312
|
+
// Fall back to full output if field doesn't exist
|
|
313
|
+
if (sourceData === undefined) {
|
|
314
|
+
sourceData = sourceOutput;
|
|
315
|
+
}
|
|
316
|
+
|
|
317
|
+
// Extract fields
|
|
318
|
+
const extractConfig = {
|
|
319
|
+
root: node.config?.root,
|
|
320
|
+
fields: node.config?.fields || {},
|
|
321
|
+
defaults: node.config?.defaults || {},
|
|
322
|
+
types: node.config?.types || {},
|
|
323
|
+
};
|
|
324
|
+
|
|
325
|
+
const extracted = adapter.extractMcpOutput(sourceData, extractConfig);
|
|
326
|
+
ctx.log(node.id, `Extracted ${Object.keys(extracted).length} field(s) from "${sourceNodeId}"`);
|
|
327
|
+
|
|
328
|
+
// Optional output mapping
|
|
329
|
+
let finalOutput = { success: true, extracted, ...extracted };
|
|
330
|
+
|
|
331
|
+
if (node.config?.outputMap) {
|
|
332
|
+
const mapped = adapter.mapOutputFields(finalOutput, node.config.outputMap, ctx);
|
|
333
|
+
finalOutput = { ...finalOutput, mapped, ...mapped };
|
|
334
|
+
}
|
|
335
|
+
|
|
336
|
+
if (node.config?.outputVariable) {
|
|
337
|
+
ctx.data[node.config.outputVariable] = finalOutput;
|
|
338
|
+
}
|
|
339
|
+
|
|
340
|
+
return finalOutput;
|
|
341
|
+
},
|
|
342
|
+
});
|
|
343
|
+
|
|
344
|
+
// ═══════════════════════════════════════════════════════════════════════════
|
|
345
|
+
// TASK LIFECYCLE — Workflow-first task execution primitives
|
|
346
|
+
//
|
|
347
|
+
// These node types decompose the monolithic TaskExecutor.executeTask() flow
|
|
348
|
+
// into composable DAG nodes, enabling the full task lifecycle to run as a
|
|
349
|
+
// native workflow (template-task-lifecycle).
|
|
350
|
+
//
|
|
351
|
+
// Every node follows the contract:
|
|
352
|
+
// execute(node, ctx, engine) → { success: boolean, ... }
|
|
353
|
+
// describe() → string
|
|
354
|
+
// schema → JSON Schema with required[] where applicable
|
|
355
|
+
//
|
|
356
|
+
// Design principles:
|
|
357
|
+
// 1. Idempotent cleanup — release nodes are safe on double-call
|
|
358
|
+
// 2. Context-first — nodes auto-read ctx.data when config is omitted
|
|
359
|
+
// 3. Rich return values — every return contains enough info for conditions
|
|
360
|
+
// 4. Error boundary — nodes never throw unless config is fatally wrong
|
|
361
|
+
// ═══════════════════════════════════════════════════════════════════════════
|
|
362
|
+
|
|
363
|
+
/** Module-scope lazy caches for task lifecycle imports. */
|
|
364
|
+
let _taskClaimsMod = null;
|
|
365
|
+
let _taskClaimsInitPromise = null;
|
|
366
|
+
let _taskComplexityMod = null;
|
|
367
|
+
let _kanbanAdapterMod = null;
|
|
368
|
+
let _agentPoolMod = null;
|
|
369
|
+
let _gitSafetyMod = null;
|
|
370
|
+
let _diffStatsMod = null;
|
|
371
|
+
|
|
372
|
+
async function ensureTaskClaimsMod() {
|
|
373
|
+
if (!_taskClaimsMod) _taskClaimsMod = await import("../../task/task-claims.mjs");
|
|
374
|
+
return _taskClaimsMod;
|
|
375
|
+
}
|
|
376
|
+
function pickTaskString(...values) {
|
|
377
|
+
for (const value of values) {
|
|
378
|
+
const normalized = String(value || "").trim();
|
|
379
|
+
if (normalized) return normalized;
|
|
380
|
+
}
|
|
381
|
+
return "";
|
|
382
|
+
}
|
|
383
|
+
function deriveTaskBranch(task = {}) {
|
|
384
|
+
const explicit = pickTaskString(
|
|
385
|
+
task?.branch,
|
|
386
|
+
task?.branchName,
|
|
387
|
+
task?.meta?.branch,
|
|
388
|
+
task?.metadata?.branch,
|
|
389
|
+
);
|
|
390
|
+
if (explicit) return explicit;
|
|
391
|
+
const taskId = pickTaskString(task?.id, task?.task_id).replace(/[^a-zA-Z0-9]/g, "").slice(0, 12);
|
|
392
|
+
const titleSlug = pickTaskString(task?.title, "task")
|
|
393
|
+
.toLowerCase()
|
|
394
|
+
.replace(/[^a-z0-9]+/g, "-")
|
|
395
|
+
.replace(/^-+|-+$/g, "")
|
|
396
|
+
.slice(0, 48);
|
|
397
|
+
const suffix = titleSlug || "task";
|
|
398
|
+
if (taskId) return `task/${taskId}-${suffix}`;
|
|
399
|
+
return `task/${suffix}`;
|
|
400
|
+
}
|
|
401
|
+
function looksLikeFilesystemPath(value) {
|
|
402
|
+
const text = String(value || "").trim();
|
|
403
|
+
return /^[a-zA-Z]:[\\/]/.test(text) || text.startsWith("/") || text.startsWith("\\");
|
|
404
|
+
}
|
|
405
|
+
function resolveTaskRepositoryRoot(taskRepository, currentRepoRoot) {
|
|
406
|
+
const repository = String(taskRepository || "").trim();
|
|
407
|
+
const repoRoot = String(currentRepoRoot || "").trim();
|
|
408
|
+
if (!repository || !repoRoot) return "";
|
|
409
|
+
const repoName = repository.split("/").pop();
|
|
410
|
+
if (!repoName) return "";
|
|
411
|
+
const normalizedRepoRoot = repoRoot.replace(/\\/g, "/");
|
|
412
|
+
const mirrorToken = "/.bosun/workspaces/";
|
|
413
|
+
if (normalizedRepoRoot.includes(mirrorToken)) {
|
|
414
|
+
const prefix = normalizedRepoRoot.slice(0, normalizedRepoRoot.indexOf(mirrorToken));
|
|
415
|
+
const prefixName = String(prefix.split("/").filter(Boolean).pop() || "").toLowerCase();
|
|
416
|
+
const inferredRepoRoot = prefixName === String(repoName).toLowerCase()
|
|
417
|
+
? prefix
|
|
418
|
+
: resolve(prefix, repoName);
|
|
419
|
+
try {
|
|
420
|
+
if (existsSync(resolve(inferredRepoRoot, ".git"))) return inferredRepoRoot;
|
|
421
|
+
} catch {
|
|
422
|
+
// ignore invalid inferred path
|
|
423
|
+
}
|
|
424
|
+
}
|
|
425
|
+
const candidates = [
|
|
426
|
+
resolve(repoRoot, "..", repoName),
|
|
427
|
+
resolve(repoRoot, ".bosun", "workspaces", String(process.env.BOSUN_WORKSPACE || "").trim(), repoName),
|
|
428
|
+
];
|
|
429
|
+
for (const candidate of candidates) {
|
|
430
|
+
if (!candidate || candidate.includes("workspaces/")) {
|
|
431
|
+
// keep candidate even when BOSUN_WORKSPACE is empty; resolve() will normalize it.
|
|
432
|
+
}
|
|
433
|
+
try {
|
|
434
|
+
if (existsSync(resolve(candidate, ".git"))) return candidate;
|
|
435
|
+
} catch {
|
|
436
|
+
// ignore invalid candidate
|
|
437
|
+
}
|
|
438
|
+
}
|
|
439
|
+
return "";
|
|
440
|
+
}
|
|
441
|
+
async function ensureTaskClaimsInitialized(ctx, claims) {
|
|
442
|
+
if (typeof claims?.initTaskClaims !== "function") return;
|
|
443
|
+
if (!_taskClaimsInitPromise) {
|
|
444
|
+
const repoRoot = pickTaskString(
|
|
445
|
+
ctx?.data?.repoRoot,
|
|
446
|
+
ctx?.data?.workspace,
|
|
447
|
+
process.cwd(),
|
|
448
|
+
);
|
|
449
|
+
_taskClaimsInitPromise = claims.initTaskClaims({ repoRoot }).catch((err) => {
|
|
450
|
+
_taskClaimsInitPromise = null;
|
|
451
|
+
throw err;
|
|
452
|
+
});
|
|
453
|
+
}
|
|
454
|
+
await _taskClaimsInitPromise;
|
|
455
|
+
}
|
|
456
|
+
async function ensureTaskComplexityMod() {
|
|
457
|
+
if (!_taskComplexityMod) _taskComplexityMod = await import("../../task/task-complexity.mjs");
|
|
458
|
+
return _taskComplexityMod;
|
|
459
|
+
}
|
|
460
|
+
async function ensureKanbanAdapterMod() {
|
|
461
|
+
if (!_kanbanAdapterMod) _kanbanAdapterMod = await import("../../kanban/kanban-adapter.mjs");
|
|
462
|
+
return _kanbanAdapterMod;
|
|
463
|
+
}
|
|
464
|
+
async function ensureAgentPoolMod() {
|
|
465
|
+
if (!_agentPoolMod) _agentPoolMod = await import("../../agent/agent-pool.mjs");
|
|
466
|
+
return _agentPoolMod;
|
|
467
|
+
}
|
|
468
|
+
async function ensureGitSafetyMod() {
|
|
469
|
+
if (!_gitSafetyMod) _gitSafetyMod = await import("../../git/git-safety.mjs");
|
|
470
|
+
return _gitSafetyMod;
|
|
471
|
+
}
|
|
472
|
+
async function ensureDiffStatsMod() {
|
|
473
|
+
if (!_diffStatsMod) _diffStatsMod = await import("../../git/diff-stats.mjs");
|
|
474
|
+
return _diffStatsMod;
|
|
475
|
+
}
|
|
476
|
+
let _taskStoreMod = null;
|
|
477
|
+
async function ensureTaskStoreMod() {
|
|
478
|
+
if (!_taskStoreMod) _taskStoreMod = await import("../../task/task-store.mjs");
|
|
479
|
+
return _taskStoreMod;
|
|
480
|
+
}
|
|
481
|
+
|
|
482
|
+
function normalizeCanStartGuardResult(raw) {
|
|
483
|
+
if (typeof raw === "boolean") {
|
|
484
|
+
return {
|
|
485
|
+
canStart: raw,
|
|
486
|
+
reason: raw ? "ok" : "blocked",
|
|
487
|
+
blockingTaskIds: [],
|
|
488
|
+
missingDependencyTaskIds: [],
|
|
489
|
+
blockingSprintIds: [],
|
|
490
|
+
blockingEpicIds: [],
|
|
491
|
+
};
|
|
492
|
+
}
|
|
493
|
+
const data = raw && typeof raw === "object" ? raw : {};
|
|
494
|
+
const canStart = data.canStart !== false;
|
|
495
|
+
return {
|
|
496
|
+
canStart,
|
|
497
|
+
reason: String(data.reason || (canStart ? "ok" : "blocked")).trim() || (canStart ? "ok" : "blocked"),
|
|
498
|
+
blockingTaskIds: Array.isArray(data.blockingTaskIds) ? data.blockingTaskIds : [],
|
|
499
|
+
missingDependencyTaskIds: Array.isArray(data.missingDependencyTaskIds) ? data.missingDependencyTaskIds : [],
|
|
500
|
+
blockingSprintIds: Array.isArray(data.blockingSprintIds) ? data.blockingSprintIds : [],
|
|
501
|
+
blockingEpicIds: Array.isArray(data.blockingEpicIds) ? data.blockingEpicIds : [],
|
|
502
|
+
sprintOrderMode: data.sprintOrderMode || null,
|
|
503
|
+
sprintTaskOrderMode: data.sprintTaskOrderMode || null,
|
|
504
|
+
};
|
|
505
|
+
}
|
|
506
|
+
/** Resolve a config value, falling back to ctx.data, then defaultVal. */
|
|
507
|
+
function cfgOrCtx(node, ctx, key, defaultVal = "") {
|
|
508
|
+
const raw = node.config?.[key];
|
|
509
|
+
if (raw != null && raw !== "") return ctx.resolve(String(raw));
|
|
510
|
+
const ctxVal = ctx.data?.[key];
|
|
511
|
+
if (ctxVal != null && ctxVal !== "") return String(ctxVal);
|
|
512
|
+
return defaultVal;
|
|
513
|
+
}
|
|
514
|
+
|
|
515
|
+
function getWorkflowRuntimeState(ctx) {
|
|
516
|
+
if (!ctx || typeof ctx !== "object") return {};
|
|
517
|
+
if (!ctx.__workflowRuntimeState || typeof ctx.__workflowRuntimeState !== "object") {
|
|
518
|
+
ctx.__workflowRuntimeState = {};
|
|
519
|
+
}
|
|
520
|
+
return ctx.__workflowRuntimeState;
|
|
521
|
+
}
|
|
522
|
+
|
|
523
|
+
function isUnresolvedTemplateToken(value) {
|
|
524
|
+
return /{{[^{}]+}}/.test(String(value || ""));
|
|
525
|
+
}
|
|
526
|
+
|
|
527
|
+
function normalizeGitRefValue(value) {
|
|
528
|
+
const text = String(value ?? "").trim();
|
|
529
|
+
if (!text || isUnresolvedTemplateToken(text)) return "";
|
|
530
|
+
const lowered = text.toLowerCase();
|
|
531
|
+
if (lowered === "null" || lowered === "undefined") return "";
|
|
532
|
+
return text;
|
|
533
|
+
}
|
|
534
|
+
|
|
535
|
+
function pickGitRef(...candidates) {
|
|
536
|
+
for (const candidate of candidates) {
|
|
537
|
+
const normalized = normalizeGitRefValue(candidate);
|
|
538
|
+
if (normalized) return normalized;
|
|
539
|
+
}
|
|
540
|
+
return "";
|
|
541
|
+
}
|
|
542
|
+
|
|
543
|
+
function formatExecSyncError(err) {
|
|
544
|
+
if (!err) return "unknown error";
|
|
545
|
+
const detail = [err?.stderr, err?.stdout, err?.message]
|
|
546
|
+
.map((entry) => String(entry || "").trim())
|
|
547
|
+
.filter(Boolean)
|
|
548
|
+
.join(" | ");
|
|
549
|
+
return trimLogText(detail || String(err?.message || err), 420);
|
|
550
|
+
}
|
|
551
|
+
|
|
552
|
+
function isExistingBranchWorktreeError(err) {
|
|
553
|
+
const detail = formatExecSyncError(err).toLowerCase();
|
|
554
|
+
return detail.includes("already exists") || detail.includes("is already checked out");
|
|
555
|
+
}
|
|
556
|
+
|
|
557
|
+
/**
|
|
558
|
+
* Anti-thrash state — module-scope to survive across workflow runs.
|
|
559
|
+
* Mirrors TaskExecutor._noCommitCounts / _skipUntil / _completedWithPR.
|
|
560
|
+
*/
|
|
561
|
+
const _noCommitCounts = new Map();
|
|
562
|
+
const _skipUntil = new Map();
|
|
563
|
+
const _completedWithPR = new Set();
|
|
564
|
+
const MAX_NO_COMMIT_ATTEMPTS = 3;
|
|
565
|
+
const NO_COMMIT_BASE_COOLDOWN_MS = 15 * 60 * 1000; // 15 min
|
|
566
|
+
const NO_COMMIT_MAX_COOLDOWN_MS = 2 * 60 * 60 * 1000; // 2 hours
|
|
567
|
+
const STRICT_START_GUARD_MISSING_TASK = /^(1|true|yes|on)$/i.test(
|
|
568
|
+
String(process.env.BOSUN_STRICT_START_GUARD_MISSING_TASK || "").trim(),
|
|
569
|
+
);
|
|
570
|
+
|
|
571
|
+
// ── trigger.task_available ──────────────────────────────────────────────────
|
|
572
|
+
|
|
573
|
+
|
|
574
|
+
|
|
575
|
+
|
|
576
|
+
|
|
577
|
+
|
|
578
|
+
|
|
579
|
+
export {
|
|
580
|
+
_completedWithPR,
|
|
581
|
+
cfgOrCtx,
|
|
582
|
+
ensureKanbanAdapterMod,
|
|
583
|
+
ensureTaskClaimsInitialized,
|
|
584
|
+
ensureTaskClaimsMod,
|
|
585
|
+
ensureTaskStoreMod,
|
|
586
|
+
formatExecSyncError,
|
|
587
|
+
getWorkflowRuntimeState,
|
|
588
|
+
isExistingBranchWorktreeError,
|
|
589
|
+
isUnresolvedTemplateToken,
|
|
590
|
+
normalizeCanStartGuardResult,
|
|
591
|
+
normalizeGitRefValue,
|
|
592
|
+
pickGitRef,
|
|
593
|
+
STRICT_START_GUARD_MISSING_TASK,
|
|
594
|
+
};
|
|
595
|
+
|
|
596
|
+
export {
|
|
597
|
+
_noCommitCounts,
|
|
598
|
+
_skipUntil,
|
|
599
|
+
MAX_NO_COMMIT_ATTEMPTS,
|
|
600
|
+
};
|
|
601
|
+
|
|
602
|
+
export {
|
|
603
|
+
deriveTaskBranch,
|
|
604
|
+
looksLikeFilesystemPath,
|
|
605
|
+
pickTaskString,
|
|
606
|
+
resolveTaskRepositoryRoot,
|
|
607
|
+
};
|
|
608
|
+
|
|
609
|
+
export {
|
|
610
|
+
ensureAgentPoolMod,
|
|
611
|
+
ensureTaskComplexityMod,
|
|
612
|
+
};
|