@geekbeer/minion 2.23.0 → 2.32.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/core/lib/platform.js +117 -0
- package/{routes → core/routes}/health.js +1 -1
- package/{routes → core/routes}/routines.js +44 -4
- package/{routes → core/routes}/skills.js +3 -3
- package/{routes → core/routes}/workflows.js +4 -4
- package/{chat-store.js → core/stores/chat-store.js} +1 -1
- package/{execution-store.js → core/stores/execution-store.js} +1 -1
- package/{routine-store.js → core/stores/routine-store.js} +1 -1
- package/{workflow-store.js → core/stores/workflow-store.js} +1 -1
- package/{minion-cli.sh → linux/minion-cli.sh} +245 -4
- package/{routes → linux/routes}/chat.js +3 -3
- package/{routes → linux/routes}/commands.js +1 -1
- package/{routes → linux/routes}/config.js +3 -3
- package/{routes → linux/routes}/directives.js +5 -5
- package/{routes → linux/routes}/files.js +2 -2
- package/{routes → linux/routes}/terminal.js +2 -2
- package/{routine-runner.js → linux/routine-runner.js} +4 -4
- package/{server.js → linux/server.js} +71 -36
- package/{workflow-runner.js → linux/workflow-runner.js} +4 -4
- package/package.json +16 -20
- package/win/bin/hq-win.js +18 -0
- package/win/bin/hq.ps1 +108 -0
- package/win/bin/minion-cli-win.js +20 -0
- package/win/lib/llm-checker.js +115 -0
- package/win/lib/log-manager.js +119 -0
- package/win/lib/process-manager.js +112 -0
- package/win/minion-cli.ps1 +869 -0
- package/win/routes/chat.js +280 -0
- package/win/routes/commands.js +101 -0
- package/win/routes/config.js +227 -0
- package/win/routes/directives.js +136 -0
- package/win/routes/files.js +283 -0
- package/win/routes/terminal.js +316 -0
- package/win/routine-runner.js +324 -0
- package/win/server.js +230 -0
- package/win/terminal-server.js +234 -0
- package/win/workflow-runner.js +380 -0
- package/routes/index.js +0 -106
- /package/{api.js → core/api.js} +0 -0
- /package/{config.js → core/config.js} +0 -0
- /package/{lib → core/lib}/auth.js +0 -0
- /package/{lib → core/lib}/llm-checker.js +0 -0
- /package/{lib → core/lib}/log-manager.js +0 -0
- /package/{routes → core/routes}/auth.js +0 -0
- /package/{bin → linux/bin}/hq +0 -0
- /package/{lib → linux/lib}/process-manager.js +0 -0
- /package/{terminal-proxy.js → linux/terminal-proxy.js} +0 -0
|
@@ -0,0 +1,380 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Windows Workflow Runner
|
|
3
|
+
*
|
|
4
|
+
* Manages cron-based workflow execution using node-pty instead of tmux.
|
|
5
|
+
* Drop-in replacement for workflow-runner.js on Windows.
|
|
6
|
+
*
|
|
7
|
+
* Key differences from Linux version:
|
|
8
|
+
* - Uses node-pty to spawn CLI sessions (replaces tmux)
|
|
9
|
+
* - Exit codes captured via onExit callback (no file polling)
|
|
10
|
+
* - Output logged via onData handler (replaces tmux pipe-pane)
|
|
11
|
+
* - Sessions tracked in-memory via activeSessions Map
|
|
12
|
+
*/
|
|
13
|
+
|
|
14
|
+
const { Cron } = require('croner')
|
|
15
|
+
const crypto = require('crypto')
|
|
16
|
+
const path = require('path')
|
|
17
|
+
const fs = require('fs').promises
|
|
18
|
+
const fsSync = require('fs')
|
|
19
|
+
|
|
20
|
+
const { config } = require('../core/config')
|
|
21
|
+
const executionStore = require('../core/stores/execution-store')
|
|
22
|
+
const workflowStore = require('../core/stores/workflow-store')
|
|
23
|
+
const logManager = require('./lib/log-manager')
|
|
24
|
+
const { MARKER_DIR, buildExtendedPath } = require('../core/lib/platform')
|
|
25
|
+
|
|
26
|
+
// Active cron jobs keyed by workflow ID
|
|
27
|
+
const activeJobs = new Map()
|
|
28
|
+
|
|
29
|
+
// Currently running executions
|
|
30
|
+
const runningExecutions = new Map()
|
|
31
|
+
|
|
32
|
+
/**
|
|
33
|
+
* Active pty sessions keyed by session name.
|
|
34
|
+
* Shared with terminal-routes.js for session listing/capture.
|
|
35
|
+
* @type {Map<string, {pty: object, buffer: string, logStream: object|null, completed: boolean, exitCode: number|null}>}
|
|
36
|
+
*/
|
|
37
|
+
const activeSessions = new Map()
|
|
38
|
+
|
|
39
|
+
function sleep(ms) {
|
|
40
|
+
return new Promise((resolve) => setTimeout(resolve, ms))
|
|
41
|
+
}
|
|
42
|
+
|
|
43
|
+
function generateSessionName(workflowId, executionId) {
|
|
44
|
+
const workflowShort = workflowId ? workflowId.substring(0, 8) : 'manual'
|
|
45
|
+
const execShort = executionId ? executionId.substring(0, 4) : ''
|
|
46
|
+
return execShort ? `wf-${workflowShort}-${execShort}` : `wf-${workflowShort}`
|
|
47
|
+
}
|
|
48
|
+
|
|
49
|
+
async function writeMarkerFile(sessionName, data) {
|
|
50
|
+
try {
|
|
51
|
+
await fs.mkdir(MARKER_DIR, { recursive: true })
|
|
52
|
+
const filePath = path.join(MARKER_DIR, `${sessionName}.json`)
|
|
53
|
+
await fs.writeFile(filePath, JSON.stringify(data, null, 2), 'utf-8')
|
|
54
|
+
console.log(`[WorkflowRunner] Wrote marker file: ${filePath}`)
|
|
55
|
+
} catch (err) {
|
|
56
|
+
console.error(`[WorkflowRunner] Failed to write marker file: ${err.message}`)
|
|
57
|
+
}
|
|
58
|
+
}
|
|
59
|
+
|
|
60
|
+
async function cleanupMarkerFile(sessionName) {
|
|
61
|
+
try {
|
|
62
|
+
const filePath = path.join(MARKER_DIR, `${sessionName}.json`)
|
|
63
|
+
await fs.unlink(filePath)
|
|
64
|
+
console.log(`[WorkflowRunner] Cleaned up marker file: ${filePath}`)
|
|
65
|
+
} catch { /* ignore */ }
|
|
66
|
+
}
|
|
67
|
+
|
|
68
|
+
/**
|
|
69
|
+
* Load node-pty dynamically (it's an optionalDependency).
|
|
70
|
+
* @returns {object} The node-pty module
|
|
71
|
+
*/
|
|
72
|
+
function loadNodePty() {
|
|
73
|
+
// Prefer prebuilt binaries (no Build Tools required)
|
|
74
|
+
try { return require('node-pty-prebuilt-multiarch') } catch {}
|
|
75
|
+
// Fallback: source-compiled version
|
|
76
|
+
try { return require('node-pty') } catch {}
|
|
77
|
+
throw new Error(
|
|
78
|
+
'node-pty is required for Windows workflow execution. ' +
|
|
79
|
+
'Install with: npm install node-pty-prebuilt-multiarch'
|
|
80
|
+
)
|
|
81
|
+
}
|
|
82
|
+
|
|
83
|
+
/**
|
|
84
|
+
* Execute a workflow in a single node-pty session.
|
|
85
|
+
* All skills run sequentially with context preserved.
|
|
86
|
+
*/
|
|
87
|
+
async function executeWorkflowSession(workflow, executionId, skillNames, options = {}) {
|
|
88
|
+
const pty = loadNodePty()
|
|
89
|
+
const homeDir = config.HOME_DIR
|
|
90
|
+
const sessionName = generateSessionName(workflow.id, executionId)
|
|
91
|
+
|
|
92
|
+
// Build prompt
|
|
93
|
+
const skillCommands = skillNames.map(name => `/${name}`).join(', then ')
|
|
94
|
+
|
|
95
|
+
const rolePrefix = options.role
|
|
96
|
+
? `You are acting as the "${options.role}" role in this session. Read ~/.minion/roles/${options.role}.md for your role guidelines before proceeding.\n\n`
|
|
97
|
+
: ''
|
|
98
|
+
|
|
99
|
+
const revisionContext = options.revisionFeedback
|
|
100
|
+
? `## Revision Feedback\nThe reviewer requested changes to your previous output. Address the following feedback:\n${options.revisionFeedback}\n\n`
|
|
101
|
+
: ''
|
|
102
|
+
|
|
103
|
+
const prompt = options.skipExecutionReport
|
|
104
|
+
? `${rolePrefix}${revisionContext}Run the following skills in order: ${skillCommands}.`
|
|
105
|
+
: `${rolePrefix}${revisionContext}Run the following skills in order: ${skillCommands}. After completing all skills, run /execution-report to report the results.`
|
|
106
|
+
|
|
107
|
+
const extendedPath = buildExtendedPath(homeDir)
|
|
108
|
+
const logFile = logManager.getLogPath(executionId)
|
|
109
|
+
|
|
110
|
+
console.log(`[WorkflowRunner] Executing workflow: ${workflow.name}`)
|
|
111
|
+
console.log(`[WorkflowRunner] Skills: ${skillNames.join(' -> ')} -> execution-report`)
|
|
112
|
+
console.log(`[WorkflowRunner] Session: ${sessionName}`)
|
|
113
|
+
console.log(`[WorkflowRunner] Log file: ${logFile}`)
|
|
114
|
+
console.log(`[WorkflowRunner] HOME: ${homeDir}`)
|
|
115
|
+
|
|
116
|
+
try {
|
|
117
|
+
await logManager.ensureLogDir()
|
|
118
|
+
await logManager.pruneOldLogs()
|
|
119
|
+
|
|
120
|
+
// Kill existing session if any
|
|
121
|
+
const existing = activeSessions.get(sessionName)
|
|
122
|
+
if (existing && existing.pty) {
|
|
123
|
+
try { existing.pty.kill() } catch { /* ignore */ }
|
|
124
|
+
activeSessions.delete(sessionName)
|
|
125
|
+
}
|
|
126
|
+
|
|
127
|
+
// Write marker file BEFORE starting session
|
|
128
|
+
await writeMarkerFile(sessionName, {
|
|
129
|
+
execution_id: executionId,
|
|
130
|
+
workflow_id: workflow.id,
|
|
131
|
+
workflow_name: workflow.name,
|
|
132
|
+
skill_names: skillNames,
|
|
133
|
+
started_at: new Date().toISOString(),
|
|
134
|
+
})
|
|
135
|
+
|
|
136
|
+
// Build the LLM command
|
|
137
|
+
if (!config.LLM_COMMAND) {
|
|
138
|
+
throw new Error('LLM_COMMAND is not configured. Set LLM_COMMAND in minion.env')
|
|
139
|
+
}
|
|
140
|
+
const escapedPrompt = prompt.replace(/'/g, "''")
|
|
141
|
+
const llmCommand = config.LLM_COMMAND.replace(/\{prompt\}/g, escapedPrompt)
|
|
142
|
+
|
|
143
|
+
// Build environment
|
|
144
|
+
const env = {
|
|
145
|
+
...process.env,
|
|
146
|
+
HOME: homeDir,
|
|
147
|
+
USERPROFILE: homeDir,
|
|
148
|
+
PATH: extendedPath,
|
|
149
|
+
MINION_EXECUTION_ID: executionId,
|
|
150
|
+
MINION_WORKFLOW_ID: workflow.id,
|
|
151
|
+
MINION_WORKFLOW_NAME: workflow.name,
|
|
152
|
+
}
|
|
153
|
+
|
|
154
|
+
// Open log file for streaming writes
|
|
155
|
+
const logDir = path.dirname(logFile)
|
|
156
|
+
await fs.mkdir(logDir, { recursive: true })
|
|
157
|
+
const logStream = fsSync.createWriteStream(logFile, { flags: 'a' })
|
|
158
|
+
|
|
159
|
+
// Spawn pty process
|
|
160
|
+
const shell = process.env.COMSPEC || 'cmd.exe'
|
|
161
|
+
const shellArgs = shell.toLowerCase().includes('cmd') ? ['/c', llmCommand] : ['-Command', llmCommand]
|
|
162
|
+
|
|
163
|
+
const ptyProcess = pty.spawn(shell, shellArgs, {
|
|
164
|
+
name: 'xterm-256color',
|
|
165
|
+
cols: 200,
|
|
166
|
+
rows: 50,
|
|
167
|
+
cwd: homeDir,
|
|
168
|
+
env,
|
|
169
|
+
})
|
|
170
|
+
|
|
171
|
+
// Track session
|
|
172
|
+
let outputBuffer = ''
|
|
173
|
+
const session = {
|
|
174
|
+
pty: ptyProcess,
|
|
175
|
+
buffer: '',
|
|
176
|
+
logStream,
|
|
177
|
+
completed: false,
|
|
178
|
+
exitCode: null,
|
|
179
|
+
startedAt: new Date().toISOString(),
|
|
180
|
+
}
|
|
181
|
+
activeSessions.set(sessionName, session)
|
|
182
|
+
|
|
183
|
+
console.log(`[WorkflowRunner] Started pty session: ${sessionName} (PID: ${ptyProcess.pid})`)
|
|
184
|
+
|
|
185
|
+
// Wait for completion
|
|
186
|
+
return await new Promise((resolve) => {
|
|
187
|
+
const timeout = 60 * 60 * 1000 // 60 minutes
|
|
188
|
+
const timer = setTimeout(() => {
|
|
189
|
+
console.error(`[WorkflowRunner] Workflow ${workflow.name} timed out after 60 minutes`)
|
|
190
|
+
try { ptyProcess.kill() } catch { /* ignore */ }
|
|
191
|
+
resolve({ success: false, error: 'Execution timeout (60 minutes)', sessionName })
|
|
192
|
+
}, timeout)
|
|
193
|
+
|
|
194
|
+
ptyProcess.onData((data) => {
|
|
195
|
+
outputBuffer += data
|
|
196
|
+
session.buffer += data
|
|
197
|
+
// Cap buffer at 1MB to prevent memory issues
|
|
198
|
+
if (session.buffer.length > 1024 * 1024) {
|
|
199
|
+
session.buffer = session.buffer.slice(-512 * 1024)
|
|
200
|
+
}
|
|
201
|
+
// Write to log file
|
|
202
|
+
try { logStream.write(data) } catch { /* ignore */ }
|
|
203
|
+
})
|
|
204
|
+
|
|
205
|
+
ptyProcess.onExit(({ exitCode }) => {
|
|
206
|
+
clearTimeout(timer)
|
|
207
|
+
session.completed = true
|
|
208
|
+
session.exitCode = exitCode
|
|
209
|
+
|
|
210
|
+
// Close log stream
|
|
211
|
+
try { logStream.end() } catch { /* ignore */ }
|
|
212
|
+
|
|
213
|
+
if (exitCode === 0) {
|
|
214
|
+
console.log(`[WorkflowRunner] Workflow ${workflow.name} completed successfully`)
|
|
215
|
+
resolve({ success: true, sessionName })
|
|
216
|
+
} else {
|
|
217
|
+
console.error(`[WorkflowRunner] Workflow ${workflow.name} failed with exit code: ${exitCode}`)
|
|
218
|
+
resolve({ success: false, error: `Exit code: ${exitCode}`, sessionName })
|
|
219
|
+
}
|
|
220
|
+
})
|
|
221
|
+
})
|
|
222
|
+
} catch (error) {
|
|
223
|
+
console.error(`[WorkflowRunner] Workflow ${workflow.name} failed: ${error.message}`)
|
|
224
|
+
return { success: false, error: error.message, sessionName }
|
|
225
|
+
} finally {
|
|
226
|
+
await cleanupMarkerFile(sessionName)
|
|
227
|
+
}
|
|
228
|
+
}
|
|
229
|
+
|
|
230
|
+
async function saveExecution(executionData) {
|
|
231
|
+
try {
|
|
232
|
+
await executionStore.save(executionData)
|
|
233
|
+
} catch (err) {
|
|
234
|
+
console.error(`[WorkflowRunner] Failed to save execution: ${err.message}`)
|
|
235
|
+
}
|
|
236
|
+
}
|
|
237
|
+
|
|
238
|
+
async function runWorkflow(workflow, options = {}) {
|
|
239
|
+
const pipelineSkillNames = workflow.pipeline_skill_names || []
|
|
240
|
+
if (pipelineSkillNames.length === 0) {
|
|
241
|
+
console.log(`[WorkflowRunner] Workflow "${workflow.name}" has empty pipeline, skipping`)
|
|
242
|
+
return { execution_id: null, session_name: null }
|
|
243
|
+
}
|
|
244
|
+
|
|
245
|
+
const executionId = options.executionId || crypto.randomUUID()
|
|
246
|
+
const startedAt = new Date().toISOString()
|
|
247
|
+
const sessionName = generateSessionName(workflow.id, executionId)
|
|
248
|
+
|
|
249
|
+
console.log(`[WorkflowRunner] Starting workflow: ${workflow.name} (execution: ${executionId})`)
|
|
250
|
+
|
|
251
|
+
runningExecutions.set(executionId, {
|
|
252
|
+
workflow_name: workflow.name,
|
|
253
|
+
skill_names: pipelineSkillNames,
|
|
254
|
+
started_at: startedAt,
|
|
255
|
+
session_name: sessionName,
|
|
256
|
+
})
|
|
257
|
+
|
|
258
|
+
const logFile = logManager.getLogPath(executionId)
|
|
259
|
+
|
|
260
|
+
await saveExecution({
|
|
261
|
+
id: executionId,
|
|
262
|
+
skill_name: pipelineSkillNames.join(' -> '),
|
|
263
|
+
workflow_id: workflow.id,
|
|
264
|
+
workflow_name: workflow.name,
|
|
265
|
+
status: 'running',
|
|
266
|
+
outcome: null,
|
|
267
|
+
started_at: startedAt,
|
|
268
|
+
completed_at: null,
|
|
269
|
+
parent_execution_id: null,
|
|
270
|
+
error_message: null,
|
|
271
|
+
log_file: logFile,
|
|
272
|
+
})
|
|
273
|
+
|
|
274
|
+
const result = await executeWorkflowSession(workflow, executionId, pipelineSkillNames, options)
|
|
275
|
+
const completedAt = new Date().toISOString()
|
|
276
|
+
|
|
277
|
+
await saveExecution({
|
|
278
|
+
id: executionId,
|
|
279
|
+
skill_name: pipelineSkillNames.join(' -> '),
|
|
280
|
+
workflow_id: workflow.id,
|
|
281
|
+
workflow_name: workflow.name,
|
|
282
|
+
status: result.success ? 'completed' : 'failed',
|
|
283
|
+
outcome: result.success ? null : 'failure',
|
|
284
|
+
started_at: startedAt,
|
|
285
|
+
completed_at: completedAt,
|
|
286
|
+
parent_execution_id: null,
|
|
287
|
+
error_message: result.error || null,
|
|
288
|
+
log_file: logFile,
|
|
289
|
+
})
|
|
290
|
+
|
|
291
|
+
await workflowStore.updateLastRun(workflow.id)
|
|
292
|
+
|
|
293
|
+
runningExecutions.delete(executionId)
|
|
294
|
+
console.log(`[WorkflowRunner] Completed workflow: ${workflow.name}`)
|
|
295
|
+
|
|
296
|
+
return { execution_id: executionId, session_name: sessionName }
|
|
297
|
+
}
|
|
298
|
+
|
|
299
|
+
function loadWorkflows(workflows) {
|
|
300
|
+
stopAll()
|
|
301
|
+
let activeCount = 0
|
|
302
|
+
|
|
303
|
+
for (const workflow of workflows) {
|
|
304
|
+
if (!workflow.is_active) {
|
|
305
|
+
console.log(`[WorkflowRunner] Skipping inactive workflow: ${workflow.name}`)
|
|
306
|
+
continue
|
|
307
|
+
}
|
|
308
|
+
if (!workflow.cron_expression) {
|
|
309
|
+
console.log(`[WorkflowRunner] Workflow "${workflow.name}" has no schedule (manual trigger only)`)
|
|
310
|
+
activeJobs.set(workflow.id, { job: null, workflow })
|
|
311
|
+
continue
|
|
312
|
+
}
|
|
313
|
+
try {
|
|
314
|
+
const job = new Cron(workflow.cron_expression, () => {
|
|
315
|
+
runWorkflow(workflow).catch(err => {
|
|
316
|
+
console.error(`[WorkflowRunner] Unhandled error in ${workflow.name}: ${err.message}`)
|
|
317
|
+
})
|
|
318
|
+
})
|
|
319
|
+
activeJobs.set(workflow.id, { job, workflow })
|
|
320
|
+
activeCount++
|
|
321
|
+
console.log(`[WorkflowRunner] Registered: ${workflow.name} (${workflow.cron_expression})`)
|
|
322
|
+
} catch (err) {
|
|
323
|
+
console.error(`[WorkflowRunner] Failed to register ${workflow.name}: ${err.message}`)
|
|
324
|
+
}
|
|
325
|
+
}
|
|
326
|
+
|
|
327
|
+
console.log(`[WorkflowRunner] Loaded ${workflows.length} workflows, ${activeCount} with cron schedule`)
|
|
328
|
+
}
|
|
329
|
+
|
|
330
|
+
function stopAll() {
|
|
331
|
+
for (const [, { job, workflow }] of activeJobs) {
|
|
332
|
+
if (job) job.stop()
|
|
333
|
+
console.log(`[WorkflowRunner] Stopped: ${workflow.name}`)
|
|
334
|
+
}
|
|
335
|
+
activeJobs.clear()
|
|
336
|
+
}
|
|
337
|
+
|
|
338
|
+
function getStatus() {
|
|
339
|
+
const workflows = []
|
|
340
|
+
for (const [id, { job, workflow }] of activeJobs) {
|
|
341
|
+
workflows.push({
|
|
342
|
+
id,
|
|
343
|
+
name: workflow.name,
|
|
344
|
+
cron_expression: workflow.cron_expression,
|
|
345
|
+
next_run: job?.nextRun()?.toISOString() || null,
|
|
346
|
+
})
|
|
347
|
+
}
|
|
348
|
+
const running = []
|
|
349
|
+
for (const [id, info] of runningExecutions) {
|
|
350
|
+
running.push({
|
|
351
|
+
execution_id: id,
|
|
352
|
+
workflow_name: info.workflow_name,
|
|
353
|
+
skill_names: info.skill_names,
|
|
354
|
+
session_name: info.session_name,
|
|
355
|
+
started_at: info.started_at,
|
|
356
|
+
})
|
|
357
|
+
}
|
|
358
|
+
return {
|
|
359
|
+
active_workflows: activeJobs.size,
|
|
360
|
+
running_executions: runningExecutions.size,
|
|
361
|
+
workflows,
|
|
362
|
+
running,
|
|
363
|
+
}
|
|
364
|
+
}
|
|
365
|
+
|
|
366
|
+
function getWorkflowById(workflowId) {
|
|
367
|
+
const entry = activeJobs.get(workflowId)
|
|
368
|
+
return entry?.workflow || null
|
|
369
|
+
}
|
|
370
|
+
|
|
371
|
+
module.exports = {
|
|
372
|
+
loadWorkflows,
|
|
373
|
+
stopAll,
|
|
374
|
+
getStatus,
|
|
375
|
+
runWorkflow,
|
|
376
|
+
getWorkflowById,
|
|
377
|
+
generateSessionName,
|
|
378
|
+
activeSessions,
|
|
379
|
+
MARKER_DIR,
|
|
380
|
+
}
|
package/routes/index.js
DELETED
|
@@ -1,106 +0,0 @@
|
|
|
1
|
-
/**
|
|
2
|
-
* Route aggregator for minion agent
|
|
3
|
-
*
|
|
4
|
-
* Registers all route plugins with Fastify.
|
|
5
|
-
*
|
|
6
|
-
* API Overview:
|
|
7
|
-
* ─────────────────────────────────────────────────────────────────────────────
|
|
8
|
-
* Health & Status (routes/health.js)
|
|
9
|
-
* GET /api/health - Health check (no auth)
|
|
10
|
-
* GET /api/status - Get current status (no auth)
|
|
11
|
-
* POST /api/status - Update status (no auth)
|
|
12
|
-
*
|
|
13
|
-
* Commands (routes/commands.js)
|
|
14
|
-
* GET /api/commands - List available commands (auth required)
|
|
15
|
-
* POST /api/command - Execute a whitelisted command (auth required)
|
|
16
|
-
*
|
|
17
|
-
* Skills (routes/skills.js)
|
|
18
|
-
* GET /api/list-skills - List deployed skills (auth required)
|
|
19
|
-
* POST /api/deploy-skill - Deploy a skill (auth required)
|
|
20
|
-
*
|
|
21
|
-
* Workflows (routes/workflows.js)
|
|
22
|
-
* GET /api/workflows - List all workflows with status (auth required)
|
|
23
|
-
* POST /api/workflows - Receive workflows from HQ (auth required)
|
|
24
|
-
* PUT /api/workflows/:id - Update workflow schedule/status (auth required)
|
|
25
|
-
* DELETE /api/workflows/:id - Remove a workflow (auth required)
|
|
26
|
-
* POST /api/workflows/trigger - Manual trigger for a workflow (auth required)
|
|
27
|
-
* GET /api/executions - List execution history (auth required)
|
|
28
|
-
* GET /api/executions/:id - Get single execution details (auth required)
|
|
29
|
-
* GET /api/executions/:id/log - Get execution log file content (auth required)
|
|
30
|
-
* POST /api/executions/:id/outcome - Update execution outcome (no auth, local only)
|
|
31
|
-
*
|
|
32
|
-
* Terminal (routes/terminal.js)
|
|
33
|
-
* GET /api/terminal/sessions - List tmux sessions (auth required)
|
|
34
|
-
* GET /api/terminal/capture - Capture pane content (auth required)
|
|
35
|
-
* POST /api/terminal/send - Send keys to session (auth required)
|
|
36
|
-
* POST /api/terminal/kill - Kill a session (auth required)
|
|
37
|
-
*
|
|
38
|
-
* Routines (routes/routines.js)
|
|
39
|
-
* GET /api/routines - List all routines with status (auth required)
|
|
40
|
-
* POST /api/routines - Receive routines from HQ (auth required)
|
|
41
|
-
* POST /api/routines/sync - Pull routines from HQ (auth required)
|
|
42
|
-
* PUT /api/routines/:id/schedule - Update routine schedule (auth required)
|
|
43
|
-
* DELETE /api/routines/:id - Remove a routine (auth required)
|
|
44
|
-
* POST /api/routines/trigger - Manual trigger for a routine (auth required)
|
|
45
|
-
*
|
|
46
|
-
* Files (routes/files.js)
|
|
47
|
-
* GET /api/files - List files in directory (auth required)
|
|
48
|
-
* GET /api/files/* - Download a file (auth required)
|
|
49
|
-
* POST /api/files/* - Upload a file (auth required)
|
|
50
|
-
* DELETE /api/files/* - Delete a file (auth required)
|
|
51
|
-
*
|
|
52
|
-
* Directives (routes/directives.js)
|
|
53
|
-
* POST /api/directive - Receive and execute a temp skill directive (auth required)
|
|
54
|
-
*
|
|
55
|
-
* Auth (routes/auth.js)
|
|
56
|
-
* GET /api/auth/status - Get LLM authentication status (auth required)
|
|
57
|
-
*
|
|
58
|
-
* Chat (routes/chat.js)
|
|
59
|
-
* POST /api/chat - Send message, get SSE stream (auth required)
|
|
60
|
-
* GET /api/chat/session - Get active session (auth required)
|
|
61
|
-
* POST /api/chat/clear - Clear session (auth required)
|
|
62
|
-
*
|
|
63
|
-
* Config (routes/config.js)
|
|
64
|
-
* GET /api/config/backup - Download config files as tar.gz (auth required)
|
|
65
|
-
* GET /api/config/env/:key - Get environment variable value (auth required)
|
|
66
|
-
* PUT /api/config/env - Update an environment variable (auth required, whitelisted keys only)
|
|
67
|
-
* ─────────────────────────────────────────────────────────────────────────────
|
|
68
|
-
*/
|
|
69
|
-
|
|
70
|
-
const { healthRoutes, setOffline } = require('./health')
|
|
71
|
-
const { commandRoutes, getProcessManager, getAllowedCommands } = require('./commands')
|
|
72
|
-
const { skillRoutes } = require('./skills')
|
|
73
|
-
const { workflowRoutes } = require('./workflows')
|
|
74
|
-
const { routineRoutes } = require('./routines')
|
|
75
|
-
const { terminalRoutes } = require('./terminal')
|
|
76
|
-
const { fileRoutes } = require('./files')
|
|
77
|
-
const { directiveRoutes } = require('./directives')
|
|
78
|
-
const { authRoutes } = require('./auth')
|
|
79
|
-
const { chatRoutes } = require('./chat')
|
|
80
|
-
const { configRoutes } = require('./config')
|
|
81
|
-
|
|
82
|
-
/**
|
|
83
|
-
* Register all routes with Fastify instance
|
|
84
|
-
* @param {import('fastify').FastifyInstance} fastify
|
|
85
|
-
*/
|
|
86
|
-
async function registerRoutes(fastify) {
|
|
87
|
-
await fastify.register(healthRoutes)
|
|
88
|
-
await fastify.register(commandRoutes)
|
|
89
|
-
await fastify.register(skillRoutes)
|
|
90
|
-
await fastify.register(workflowRoutes)
|
|
91
|
-
await fastify.register(routineRoutes)
|
|
92
|
-
await fastify.register(terminalRoutes)
|
|
93
|
-
await fastify.register(fileRoutes)
|
|
94
|
-
await fastify.register(directiveRoutes)
|
|
95
|
-
await fastify.register(authRoutes)
|
|
96
|
-
await fastify.register(chatRoutes)
|
|
97
|
-
await fastify.register(configRoutes)
|
|
98
|
-
}
|
|
99
|
-
|
|
100
|
-
module.exports = {
|
|
101
|
-
registerRoutes,
|
|
102
|
-
// Re-export utilities used by server.js
|
|
103
|
-
setOffline,
|
|
104
|
-
getProcessManager,
|
|
105
|
-
getAllowedCommands,
|
|
106
|
-
}
|
/package/{api.js → core/api.js}
RENAMED
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
/package/{bin → linux/bin}/hq
RENAMED
|
File without changes
|
|
File without changes
|
|
File without changes
|