@lovelybunch/api 1.0.69-alpha.1 → 1.0.69-alpha.10
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/lib/git.d.ts +29 -1
- package/dist/lib/git.js +145 -3
- package/dist/lib/jobs/job-runner.d.ts +4 -0
- package/dist/lib/jobs/job-runner.js +126 -15
- package/dist/lib/jobs/job-store.js +6 -0
- package/dist/lib/user-preferences.d.ts +3 -1
- package/dist/routes/api/v1/config/route.d.ts +1 -1
- package/dist/routes/api/v1/config/route.js +45 -1
- package/dist/routes/api/v1/context/knowledge/[filename]/route.js +18 -11
- package/dist/routes/api/v1/context/knowledge/route.js +5 -2
- package/dist/routes/api/v1/git/index.js +115 -4
- package/dist/routes/api/v1/jobs/[id]/route.d.ts +6 -0
- package/dist/routes/api/v1/jobs/[id]/route.js +9 -0
- package/dist/routes/api/v1/jobs/[id]/runs/[runId]/log/route.d.ts +14 -0
- package/dist/routes/api/v1/jobs/[id]/runs/[runId]/log/route.js +64 -0
- package/dist/routes/api/v1/jobs/[id]/runs/[runId]/route.d.ts +28 -0
- package/dist/routes/api/v1/jobs/[id]/runs/[runId]/route.js +39 -0
- package/dist/routes/api/v1/jobs/index.js +4 -0
- package/dist/routes/api/v1/jobs/route.d.ts +6 -0
- package/dist/routes/api/v1/jobs/route.js +3 -0
- package/dist/routes/api/v1/user/settings/route.js +43 -3
- package/package.json +4 -4
- package/static/assets/index-CDMfOGVc.css +33 -0
- package/static/assets/index-DarwWmEe.js +911 -0
- package/static/index.html +2 -2
- package/static/assets/index-DIVD0EVP.css +0 -33
- package/static/assets/index-gdnIvn_s.js +0 -894
package/dist/lib/git.d.ts
CHANGED
|
@@ -43,7 +43,8 @@ export declare function getCredentialConfig(): Promise<{
|
|
|
43
43
|
helper?: string;
|
|
44
44
|
origin?: string;
|
|
45
45
|
}>;
|
|
46
|
-
export declare function
|
|
46
|
+
export declare function setRemoteUrl(remoteUrl: string): Promise<void>;
|
|
47
|
+
export declare function storeCredentials(username: string, password: string, remoteUrl?: string): Promise<void>;
|
|
47
48
|
export interface WorktreeInfo {
|
|
48
49
|
name: string;
|
|
49
50
|
path: string;
|
|
@@ -63,4 +64,31 @@ export declare function commitInWorktree(name: string, message: string, files?:
|
|
|
63
64
|
}>;
|
|
64
65
|
export declare function pushWorktree(name: string): Promise<string>;
|
|
65
66
|
export declare function pullWorktree(name: string, strategy?: PullStrategy): Promise<string>;
|
|
67
|
+
export interface CommitFileChange {
|
|
68
|
+
path: string;
|
|
69
|
+
status: string;
|
|
70
|
+
insertions: number;
|
|
71
|
+
deletions: number;
|
|
72
|
+
}
|
|
73
|
+
export interface CommitDetails {
|
|
74
|
+
sha: string;
|
|
75
|
+
message: string;
|
|
76
|
+
author: {
|
|
77
|
+
name: string;
|
|
78
|
+
email: string;
|
|
79
|
+
date: string;
|
|
80
|
+
};
|
|
81
|
+
committer: {
|
|
82
|
+
name: string;
|
|
83
|
+
email: string;
|
|
84
|
+
date: string;
|
|
85
|
+
};
|
|
86
|
+
branch?: string;
|
|
87
|
+
filesChanged: number;
|
|
88
|
+
insertions: number;
|
|
89
|
+
deletions: number;
|
|
90
|
+
files: CommitFileChange[];
|
|
91
|
+
}
|
|
92
|
+
export declare function getCommitDetails(sha: string): Promise<CommitDetails>;
|
|
93
|
+
export declare function getFileDiff(sha: string, filepath: string): Promise<string>;
|
|
66
94
|
export {};
|
package/dist/lib/git.js
CHANGED
|
@@ -211,10 +211,41 @@ export async function getCredentialConfig() {
|
|
|
211
211
|
return {};
|
|
212
212
|
}
|
|
213
213
|
}
|
|
214
|
-
export async function
|
|
214
|
+
export async function setRemoteUrl(remoteUrl) {
|
|
215
|
+
// Validate the remote URL
|
|
216
|
+
if (!remoteUrl.trim()) {
|
|
217
|
+
throw new Error('Remote URL is required');
|
|
218
|
+
}
|
|
219
|
+
const trimmed = remoteUrl.trim();
|
|
220
|
+
// Check if it's a valid URL format
|
|
221
|
+
if (!trimmed.startsWith('https://') && !trimmed.startsWith('http://') && !trimmed.startsWith('git@')) {
|
|
222
|
+
throw new Error('Remote URL must start with https://, http://, or git@');
|
|
223
|
+
}
|
|
224
|
+
// Check if origin already exists
|
|
225
|
+
try {
|
|
226
|
+
await runGit(['config', '--get', 'remote.origin.url']);
|
|
227
|
+
// If we get here, origin exists, so update it
|
|
228
|
+
await runGit(['remote', 'set-url', 'origin', trimmed]);
|
|
229
|
+
}
|
|
230
|
+
catch {
|
|
231
|
+
// Origin doesn't exist, add it
|
|
232
|
+
await runGit(['remote', 'add', 'origin', trimmed]);
|
|
233
|
+
}
|
|
234
|
+
}
|
|
235
|
+
export async function storeCredentials(username, password, remoteUrl) {
|
|
236
|
+
// If a remote URL is provided, set it first
|
|
237
|
+
if (remoteUrl && remoteUrl.trim()) {
|
|
238
|
+
await setRemoteUrl(remoteUrl);
|
|
239
|
+
}
|
|
215
240
|
// Get remote URL to determine protocol and host
|
|
216
|
-
|
|
217
|
-
|
|
241
|
+
let remote;
|
|
242
|
+
try {
|
|
243
|
+
const { stdout: remoteUrlOutput } = await runGit(['config', '--get', 'remote.origin.url']);
|
|
244
|
+
remote = remoteUrlOutput.trim();
|
|
245
|
+
}
|
|
246
|
+
catch {
|
|
247
|
+
throw new Error('No git remote configured. Please provide a remote URL.');
|
|
248
|
+
}
|
|
218
249
|
// Parse the remote URL to extract protocol and host
|
|
219
250
|
let protocol = 'https';
|
|
220
251
|
let host = '';
|
|
@@ -366,3 +397,114 @@ export async function pullWorktree(name, strategy = 'rebase') {
|
|
|
366
397
|
const { stdout } = await runGit(args, { timeout: 30000 }); // 30 second timeout
|
|
367
398
|
return stdout;
|
|
368
399
|
}
|
|
400
|
+
export async function getCommitDetails(sha) {
|
|
401
|
+
// Get commit metadata
|
|
402
|
+
const { stdout: metaOutput } = await runGit([
|
|
403
|
+
'show',
|
|
404
|
+
'--format=%H%n%an%n%ae%n%aI%n%cn%n%ce%n%cI%n%B',
|
|
405
|
+
'--no-patch',
|
|
406
|
+
sha
|
|
407
|
+
]);
|
|
408
|
+
const lines = metaOutput.trim().split('\n');
|
|
409
|
+
const commitSha = lines[0];
|
|
410
|
+
const authorName = lines[1];
|
|
411
|
+
const authorEmail = lines[2];
|
|
412
|
+
const authorDate = lines[3];
|
|
413
|
+
const committerName = lines[4];
|
|
414
|
+
const committerEmail = lines[5];
|
|
415
|
+
const committerDate = lines[6];
|
|
416
|
+
const message = lines.slice(7).join('\n').trim();
|
|
417
|
+
// Get file changes with status
|
|
418
|
+
const { stdout: filesOutput } = await runGit([
|
|
419
|
+
'diff-tree',
|
|
420
|
+
'--no-commit-id',
|
|
421
|
+
'--name-status',
|
|
422
|
+
'-r',
|
|
423
|
+
sha
|
|
424
|
+
]);
|
|
425
|
+
// Get numstat for insertions/deletions per file
|
|
426
|
+
const { stdout: numstatOutput } = await runGit([
|
|
427
|
+
'show',
|
|
428
|
+
'--numstat',
|
|
429
|
+
'--format=',
|
|
430
|
+
sha
|
|
431
|
+
]);
|
|
432
|
+
// Parse file changes
|
|
433
|
+
const fileStatusMap = new Map();
|
|
434
|
+
filesOutput.trim().split('\n').filter(Boolean).forEach((line) => {
|
|
435
|
+
const [status, ...pathParts] = line.split('\t');
|
|
436
|
+
const path = pathParts.join('\t'); // Handle filenames with tabs
|
|
437
|
+
fileStatusMap.set(path, status);
|
|
438
|
+
});
|
|
439
|
+
const files = [];
|
|
440
|
+
let totalInsertions = 0;
|
|
441
|
+
let totalDeletions = 0;
|
|
442
|
+
numstatOutput.trim().split('\n').filter(Boolean).forEach((line) => {
|
|
443
|
+
const parts = line.split('\t');
|
|
444
|
+
if (parts.length >= 3) {
|
|
445
|
+
const insertions = parts[0] === '-' ? 0 : parseInt(parts[0], 10);
|
|
446
|
+
const deletions = parts[1] === '-' ? 0 : parseInt(parts[1], 10);
|
|
447
|
+
const path = parts.slice(2).join('\t');
|
|
448
|
+
const status = fileStatusMap.get(path) || 'M';
|
|
449
|
+
files.push({
|
|
450
|
+
path,
|
|
451
|
+
status,
|
|
452
|
+
insertions,
|
|
453
|
+
deletions
|
|
454
|
+
});
|
|
455
|
+
totalInsertions += insertions;
|
|
456
|
+
totalDeletions += deletions;
|
|
457
|
+
}
|
|
458
|
+
});
|
|
459
|
+
// Try to get branch name (may not always be available)
|
|
460
|
+
let branch;
|
|
461
|
+
try {
|
|
462
|
+
const { stdout: branchOutput } = await runGit([
|
|
463
|
+
'branch',
|
|
464
|
+
'--contains',
|
|
465
|
+
sha,
|
|
466
|
+
'--format=%(refname:short)'
|
|
467
|
+
]);
|
|
468
|
+
const branches = branchOutput.trim().split('\n').filter(Boolean);
|
|
469
|
+
branch = branches[0]; // Use first branch that contains this commit
|
|
470
|
+
}
|
|
471
|
+
catch {
|
|
472
|
+
// Branch info not available
|
|
473
|
+
}
|
|
474
|
+
return {
|
|
475
|
+
sha: commitSha,
|
|
476
|
+
message,
|
|
477
|
+
author: {
|
|
478
|
+
name: authorName,
|
|
479
|
+
email: authorEmail,
|
|
480
|
+
date: authorDate
|
|
481
|
+
},
|
|
482
|
+
committer: {
|
|
483
|
+
name: committerName,
|
|
484
|
+
email: committerEmail,
|
|
485
|
+
date: committerDate
|
|
486
|
+
},
|
|
487
|
+
branch,
|
|
488
|
+
filesChanged: files.length,
|
|
489
|
+
insertions: totalInsertions,
|
|
490
|
+
deletions: totalDeletions,
|
|
491
|
+
files
|
|
492
|
+
};
|
|
493
|
+
}
|
|
494
|
+
export async function getFileDiff(sha, filepath) {
|
|
495
|
+
const { stdout } = await runGit([
|
|
496
|
+
'show',
|
|
497
|
+
`${sha}:${filepath}`,
|
|
498
|
+
'--',
|
|
499
|
+
filepath
|
|
500
|
+
]);
|
|
501
|
+
// Get the actual diff for this file
|
|
502
|
+
const { stdout: diffOutput } = await runGit([
|
|
503
|
+
'show',
|
|
504
|
+
'--format=',
|
|
505
|
+
sha,
|
|
506
|
+
'--',
|
|
507
|
+
filepath
|
|
508
|
+
]);
|
|
509
|
+
return diffOutput;
|
|
510
|
+
}
|
|
@@ -11,6 +11,10 @@ export declare class JobRunner {
|
|
|
11
11
|
constructor();
|
|
12
12
|
private ensureCliAvailable;
|
|
13
13
|
private ensureLogPath;
|
|
14
|
+
private loadAgentInstructions;
|
|
15
|
+
private loadMcpConfigs;
|
|
16
|
+
private writeMcpJson;
|
|
17
|
+
private cleanupMcpJson;
|
|
14
18
|
private buildInstruction;
|
|
15
19
|
run(job: ScheduledJob, runId: string): Promise<JobRunResult>;
|
|
16
20
|
}
|
|
@@ -18,28 +18,39 @@ function resolveAgent(model) {
|
|
|
18
18
|
return 'codex';
|
|
19
19
|
return 'claude';
|
|
20
20
|
}
|
|
21
|
-
function buildCommand(agent, instruction,
|
|
21
|
+
function buildCommand(agent, instruction, config) {
|
|
22
22
|
const quotedInstruction = shellQuote(instruction);
|
|
23
|
+
let mainCommand = '';
|
|
23
24
|
switch (agent) {
|
|
24
25
|
case 'gemini': {
|
|
25
|
-
|
|
26
|
-
|
|
26
|
+
// For non-Claude agents, use the --mcp flag approach if supported
|
|
27
|
+
const mcpFlags = config.mcpServers && config.mcpServers.length > 0
|
|
28
|
+
? config.mcpServers.map(server => `--mcp ${shellQuote(server)}`).join(' ')
|
|
29
|
+
: '';
|
|
30
|
+
mainCommand = `gemini --yolo ${mcpFlags} -i ${quotedInstruction}`;
|
|
31
|
+
break;
|
|
27
32
|
}
|
|
28
33
|
case 'codex': {
|
|
29
|
-
|
|
30
|
-
const
|
|
31
|
-
|
|
34
|
+
// For non-Claude agents, use the --mcp flag approach if supported
|
|
35
|
+
const mcpFlags = config.mcpServers && config.mcpServers.length > 0
|
|
36
|
+
? config.mcpServers.map(server => `--mcp ${shellQuote(server)}`).join(' ')
|
|
37
|
+
: '';
|
|
38
|
+
const baseCmd = `codex ${quotedInstruction} --dangerously-bypass-approvals-and-sandbox ${mcpFlags}`.trim();
|
|
39
|
+
const needsPseudoTty = config.runningAsRoot && process.platform !== 'win32';
|
|
40
|
+
mainCommand = needsPseudoTty
|
|
32
41
|
? `script -q -e -c ${shellQuote(baseCmd)} /dev/null`
|
|
33
42
|
: baseCmd;
|
|
34
|
-
|
|
43
|
+
break;
|
|
35
44
|
}
|
|
36
45
|
case 'claude':
|
|
37
46
|
default: {
|
|
38
|
-
|
|
39
|
-
const
|
|
40
|
-
|
|
47
|
+
// Claude uses .mcp.json for MCP server configuration (no --mcp flag)
|
|
48
|
+
const prefix = config.runningAsRoot ? 'IS_SANDBOX=1 ' : '';
|
|
49
|
+
mainCommand = `${prefix}claude ${quotedInstruction} --dangerously-skip-permissions`.trim();
|
|
50
|
+
break;
|
|
41
51
|
}
|
|
42
52
|
}
|
|
53
|
+
return { command: agent === 'claude' ? 'claude' : agent, shellCommand: mainCommand };
|
|
43
54
|
}
|
|
44
55
|
const CLI_AGENT_LABEL = {
|
|
45
56
|
claude: 'Claude',
|
|
@@ -80,18 +91,103 @@ export class JobRunner {
|
|
|
80
91
|
await fs.mkdir(logsDir, { recursive: true });
|
|
81
92
|
return path.join(logsDir, `${runId}.log`);
|
|
82
93
|
}
|
|
83
|
-
|
|
84
|
-
|
|
94
|
+
async loadAgentInstructions(agentId) {
|
|
95
|
+
try {
|
|
96
|
+
const projectRoot = await this.projectRootPromise;
|
|
97
|
+
const agentPath = path.join(projectRoot, '.nut', 'agents', agentId);
|
|
98
|
+
const content = await fs.readFile(agentPath, 'utf-8');
|
|
99
|
+
// Extract content after frontmatter
|
|
100
|
+
const frontmatterMatch = content.match(/^---\n([\s\S]*?)\n---\n([\s\S]*)$/);
|
|
101
|
+
if (frontmatterMatch && frontmatterMatch[2]) {
|
|
102
|
+
return frontmatterMatch[2].trim();
|
|
103
|
+
}
|
|
104
|
+
return content.trim();
|
|
105
|
+
}
|
|
106
|
+
catch (error) {
|
|
107
|
+
console.warn(`Failed to load agent ${agentId}:`, error);
|
|
108
|
+
return null;
|
|
109
|
+
}
|
|
110
|
+
}
|
|
111
|
+
async loadMcpConfigs() {
|
|
112
|
+
try {
|
|
113
|
+
const projectRoot = await this.projectRootPromise;
|
|
114
|
+
const mcpConfigPath = path.join(projectRoot, '.nut', 'mcp', 'config.json');
|
|
115
|
+
const content = await fs.readFile(mcpConfigPath, 'utf-8');
|
|
116
|
+
const json = JSON.parse(content);
|
|
117
|
+
return json.mcpServers || {};
|
|
118
|
+
}
|
|
119
|
+
catch (error) {
|
|
120
|
+
if (error.code !== 'ENOENT') {
|
|
121
|
+
console.warn('Failed to load MCP config:', error);
|
|
122
|
+
}
|
|
123
|
+
return {};
|
|
124
|
+
}
|
|
125
|
+
}
|
|
126
|
+
async writeMcpJson(mcpServers, allMcpConfigs) {
|
|
127
|
+
const projectRoot = await this.projectRootPromise;
|
|
128
|
+
const mcpJsonPath = path.join(projectRoot, '.mcp.json');
|
|
129
|
+
// Filter to only include the requested servers that are enabled
|
|
130
|
+
const filteredConfigs = {};
|
|
131
|
+
for (const serverName of mcpServers) {
|
|
132
|
+
const config = allMcpConfigs[serverName];
|
|
133
|
+
if (config && config.enabled !== false) {
|
|
134
|
+
filteredConfigs[serverName] = config;
|
|
135
|
+
}
|
|
136
|
+
}
|
|
137
|
+
const mcpJson = {
|
|
138
|
+
mcpServers: filteredConfigs
|
|
139
|
+
};
|
|
140
|
+
await fs.writeFile(mcpJsonPath, JSON.stringify(mcpJson, null, 2), 'utf-8');
|
|
141
|
+
}
|
|
142
|
+
async cleanupMcpJson() {
|
|
143
|
+
try {
|
|
144
|
+
const projectRoot = await this.projectRootPromise;
|
|
145
|
+
const mcpJsonPath = path.join(projectRoot, '.mcp.json');
|
|
146
|
+
// Reset to empty mcpServers object
|
|
147
|
+
const emptyMcpJson = {
|
|
148
|
+
mcpServers: {}
|
|
149
|
+
};
|
|
150
|
+
await fs.writeFile(mcpJsonPath, JSON.stringify(emptyMcpJson, null, 2), 'utf-8');
|
|
151
|
+
}
|
|
152
|
+
catch (error) {
|
|
153
|
+
// Don't fail the job if cleanup fails
|
|
154
|
+
console.warn('Failed to cleanup .mcp.json:', error);
|
|
155
|
+
}
|
|
156
|
+
}
|
|
157
|
+
async buildInstruction(job, agentLabel) {
|
|
85
158
|
const scheduleDescription = job.schedule.type === 'cron'
|
|
86
159
|
? `Cron: ${job.schedule.expression}`
|
|
87
160
|
: `Interval: every ${job.schedule.hours}h on ${job.schedule.daysOfWeek.join(', ')}`;
|
|
88
|
-
|
|
161
|
+
let instruction = `Run this scheduled Coconut job (${job.id}).\nSchedule: ${scheduleDescription}.\nPreferred CLI agent: ${agentLabel}.\n\n`;
|
|
162
|
+
// If an agent is specified, load and use its instructions
|
|
163
|
+
if (job.agentId) {
|
|
164
|
+
const agentInstructions = await this.loadAgentInstructions(job.agentId);
|
|
165
|
+
if (agentInstructions) {
|
|
166
|
+
instruction += `Agent Instructions (${job.agentId}):\n${agentInstructions}\n\n`;
|
|
167
|
+
}
|
|
168
|
+
}
|
|
169
|
+
// Add custom instructions (or main prompt if no agent)
|
|
170
|
+
const customInstructions = job.prompt.trim();
|
|
171
|
+
if (customInstructions) {
|
|
172
|
+
const label = job.agentId ? 'Additional Custom Instructions' : 'Instructions';
|
|
173
|
+
instruction += `${label}:\n${customInstructions}`;
|
|
174
|
+
}
|
|
175
|
+
return instruction;
|
|
89
176
|
}
|
|
90
177
|
async run(job, runId) {
|
|
91
178
|
const agent = resolveAgent(job.model);
|
|
92
|
-
const instruction = this.buildInstruction(job, CLI_AGENT_LABEL[agent] || agent);
|
|
179
|
+
const instruction = await this.buildInstruction(job, CLI_AGENT_LABEL[agent] || agent);
|
|
93
180
|
const runningAsRoot = typeof process.getuid === 'function' && process.getuid() === 0;
|
|
94
|
-
|
|
181
|
+
// Write .mcp.json if MCP servers are specified (Claude reads this from project root)
|
|
182
|
+
const mcpJsonWritten = job.mcpServers && job.mcpServers.length > 0 && agent === 'claude';
|
|
183
|
+
if (mcpJsonWritten) {
|
|
184
|
+
const allMcpConfigs = await this.loadMcpConfigs();
|
|
185
|
+
await this.writeMcpJson(job.mcpServers, allMcpConfigs);
|
|
186
|
+
}
|
|
187
|
+
const { shellCommand } = buildCommand(agent, instruction, {
|
|
188
|
+
runningAsRoot,
|
|
189
|
+
mcpServers: job.mcpServers
|
|
190
|
+
});
|
|
95
191
|
const projectRoot = await this.projectRootPromise;
|
|
96
192
|
const logPath = await this.ensureLogPath(job.id, runId);
|
|
97
193
|
const logStream = createWriteStream(logPath, { flags: 'a' });
|
|
@@ -99,6 +195,9 @@ export class JobRunner {
|
|
|
99
195
|
logStream.write(`[${new Date().toISOString()}] Starting job ${job.id} using ${agent} CLI\n`);
|
|
100
196
|
logStream.write(`Instruction: ${instruction}\n`);
|
|
101
197
|
logStream.write(`Command: ${shellCommand}\n`);
|
|
198
|
+
if (job.mcpServers && job.mcpServers.length > 0) {
|
|
199
|
+
logStream.write(`MCP Servers: ${job.mcpServers.join(', ')} (configured via .mcp.json)\n`);
|
|
200
|
+
}
|
|
102
201
|
return new Promise((resolve) => {
|
|
103
202
|
let cliMissingError = null;
|
|
104
203
|
try {
|
|
@@ -111,6 +210,10 @@ export class JobRunner {
|
|
|
111
210
|
const message = cliMissingError.message;
|
|
112
211
|
logStream.write(`${message}\n`);
|
|
113
212
|
logStream.end();
|
|
213
|
+
// Cleanup .mcp.json if it was written
|
|
214
|
+
if (mcpJsonWritten) {
|
|
215
|
+
this.cleanupMcpJson().catch(err => console.warn('Cleanup failed:', err));
|
|
216
|
+
}
|
|
114
217
|
resolve({
|
|
115
218
|
status: 'failed',
|
|
116
219
|
error: message,
|
|
@@ -146,6 +249,10 @@ export class JobRunner {
|
|
|
146
249
|
logStream.write(`${message}\n`);
|
|
147
250
|
logStream.end();
|
|
148
251
|
clearTimeout(abortTimeout);
|
|
252
|
+
// Cleanup .mcp.json if it was written
|
|
253
|
+
if (mcpJsonWritten) {
|
|
254
|
+
this.cleanupMcpJson().catch(err => console.warn('Cleanup failed:', err));
|
|
255
|
+
}
|
|
149
256
|
resolve({
|
|
150
257
|
status: 'failed',
|
|
151
258
|
error: message,
|
|
@@ -159,6 +266,10 @@ export class JobRunner {
|
|
|
159
266
|
logStream.write(`\n[${new Date().toISOString()}] Job ${job.id} completed with exit code ${code}\n`);
|
|
160
267
|
logStream.end();
|
|
161
268
|
clearTimeout(abortTimeout);
|
|
269
|
+
// Cleanup .mcp.json if it was written
|
|
270
|
+
if (mcpJsonWritten) {
|
|
271
|
+
this.cleanupMcpJson().catch(err => console.warn('Cleanup failed:', err));
|
|
272
|
+
}
|
|
162
273
|
const summary = summaryChunks.join('');
|
|
163
274
|
resolve({
|
|
164
275
|
status,
|
|
@@ -175,6 +175,9 @@ export class JobStore {
|
|
|
175
175
|
runs,
|
|
176
176
|
tags: data.tags ?? [],
|
|
177
177
|
contextPaths: data.contextPaths ?? [],
|
|
178
|
+
agentId: data.agentId,
|
|
179
|
+
agentIds: data.agentIds,
|
|
180
|
+
mcpServers: data.mcpServers,
|
|
178
181
|
};
|
|
179
182
|
}
|
|
180
183
|
toFrontmatter(job) {
|
|
@@ -206,6 +209,9 @@ export class JobStore {
|
|
|
206
209
|
})),
|
|
207
210
|
tags: job.tags ?? [],
|
|
208
211
|
contextPaths: job.contextPaths ?? [],
|
|
212
|
+
agentId: job.agentId,
|
|
213
|
+
agentIds: job.agentIds,
|
|
214
|
+
mcpServers: job.mcpServers,
|
|
209
215
|
};
|
|
210
216
|
}
|
|
211
217
|
}
|
|
@@ -1,8 +1,10 @@
|
|
|
1
|
+
import type { AgentRoleId } from '@lovelybunch/types';
|
|
1
2
|
export interface UserProfile {
|
|
2
3
|
firstName?: string;
|
|
3
4
|
lastName?: string;
|
|
4
5
|
email?: string;
|
|
5
|
-
role?:
|
|
6
|
+
role?: AgentRoleId;
|
|
7
|
+
roleDescription?: string;
|
|
6
8
|
}
|
|
7
9
|
export interface UserPreferences {
|
|
8
10
|
theme?: 'light' | 'dark' | 'coconut' | 'system';
|
|
@@ -16,7 +16,7 @@ export declare function PUT(c: Context): Promise<(Response & import("hono").Type
|
|
|
16
16
|
error: string;
|
|
17
17
|
}, 500, "json">) | (Response & import("hono").TypedResponse<{
|
|
18
18
|
error: string;
|
|
19
|
-
},
|
|
19
|
+
}, 404, "json">)>;
|
|
20
20
|
export declare function TEST(c: Context): Promise<(Response & import("hono").TypedResponse<{
|
|
21
21
|
success: false;
|
|
22
22
|
message: string;
|
|
@@ -124,7 +124,51 @@ export async function PUT(c) {
|
|
|
124
124
|
}
|
|
125
125
|
}
|
|
126
126
|
else {
|
|
127
|
-
|
|
127
|
+
// Update project config (coconut object)
|
|
128
|
+
try {
|
|
129
|
+
const body = await c.req.json();
|
|
130
|
+
const { coconut } = body;
|
|
131
|
+
if (!coconut) {
|
|
132
|
+
return c.json({ error: 'Missing coconut object in request body' }, 400);
|
|
133
|
+
}
|
|
134
|
+
const configPath = await getConfigPath();
|
|
135
|
+
if (!configPath) {
|
|
136
|
+
return c.json({ error: 'GAIT directory not found' }, 404);
|
|
137
|
+
}
|
|
138
|
+
// Load existing config
|
|
139
|
+
let config = {};
|
|
140
|
+
try {
|
|
141
|
+
const content = await fs.readFile(configPath, 'utf-8');
|
|
142
|
+
config = JSON.parse(content);
|
|
143
|
+
}
|
|
144
|
+
catch (error) {
|
|
145
|
+
if (error.code === 'ENOENT') {
|
|
146
|
+
config = {};
|
|
147
|
+
}
|
|
148
|
+
else {
|
|
149
|
+
throw error;
|
|
150
|
+
}
|
|
151
|
+
}
|
|
152
|
+
// Ensure .nut directory exists
|
|
153
|
+
const configDir = path.dirname(configPath);
|
|
154
|
+
await fs.mkdir(configDir, { recursive: true });
|
|
155
|
+
// Merge coconut object with existing config
|
|
156
|
+
config.coconut = {
|
|
157
|
+
...config.coconut,
|
|
158
|
+
...coconut,
|
|
159
|
+
};
|
|
160
|
+
// Save config
|
|
161
|
+
await fs.writeFile(configPath, JSON.stringify(config, null, 2), 'utf-8');
|
|
162
|
+
return c.json({
|
|
163
|
+
success: true,
|
|
164
|
+
message: 'Project configuration updated successfully',
|
|
165
|
+
data: config
|
|
166
|
+
});
|
|
167
|
+
}
|
|
168
|
+
catch (error) {
|
|
169
|
+
console.error('Error updating project config:', error);
|
|
170
|
+
return c.json({ error: 'Failed to update project config' }, 500);
|
|
171
|
+
}
|
|
128
172
|
}
|
|
129
173
|
}
|
|
130
174
|
// POST /api/v1/config/test
|
|
@@ -59,14 +59,16 @@ app.get('/:filename', async (c) => {
|
|
|
59
59
|
const filePath = path.join(knowledgePath, actualFilename);
|
|
60
60
|
const fileContent = await fs.readFile(filePath, 'utf-8');
|
|
61
61
|
const { data, content } = matter(fileContent);
|
|
62
|
-
// Extract title from first heading or use filename
|
|
63
|
-
const title =
|
|
62
|
+
// Extract title from metadata, first heading, or use filename
|
|
63
|
+
const title = data.title ||
|
|
64
|
+
content.match(/^#\s+(.+)$/m)?.[1] ||
|
|
64
65
|
actualFilename.replace('.md', '').replace(/[_-]/g, ' ').replace(/\w\S*/g, (txt) => txt.charAt(0).toUpperCase() + txt.substr(1).toLowerCase());
|
|
65
66
|
const document = {
|
|
66
67
|
filename: actualFilename,
|
|
67
68
|
metadata: {
|
|
68
69
|
...data,
|
|
69
|
-
|
|
70
|
+
title, // Include title in metadata
|
|
71
|
+
updated: data.updated || new Date().toISOString(),
|
|
70
72
|
tags: data.tags || [],
|
|
71
73
|
sources: data.sources || []
|
|
72
74
|
},
|
|
@@ -107,11 +109,19 @@ app.put('/:filename', async (c) => {
|
|
|
107
109
|
// Read current content
|
|
108
110
|
const currentContent = await fs.readFile(filePath, 'utf-8');
|
|
109
111
|
const { data: currentData, content: currentMarkdown } = matter(currentContent);
|
|
112
|
+
// Extract current title from markdown content or metadata
|
|
113
|
+
const currentTitle = currentMarkdown.match(/^#\s+(.+)$/m)?.[1] ||
|
|
114
|
+
currentData.title ||
|
|
115
|
+
actualFilename.replace('.md', '').replace(/[_-]/g, ' ').replace(/\w\S*/g, (txt) => txt.charAt(0).toUpperCase() + txt.substr(1).toLowerCase());
|
|
110
116
|
// Prepare updated content
|
|
111
117
|
const updatedContent = body.content !== undefined ? body.content : currentMarkdown;
|
|
118
|
+
// Determine if title has changed
|
|
119
|
+
const newTitle = body.title !== undefined ? body.title : currentTitle;
|
|
120
|
+
const titleChanged = body.title !== undefined && body.title !== currentTitle;
|
|
112
121
|
const updatedMetadata = {
|
|
113
122
|
...currentData,
|
|
114
123
|
...body.metadata,
|
|
124
|
+
title: newTitle, // Store title in metadata
|
|
115
125
|
updated: new Date().toISOString(),
|
|
116
126
|
// Ensure these are arrays
|
|
117
127
|
tags: body.metadata?.tags !== undefined ? body.metadata.tags : (currentData.tags || []),
|
|
@@ -120,8 +130,9 @@ app.put('/:filename', async (c) => {
|
|
|
120
130
|
// Handle title change - might need to rename file
|
|
121
131
|
let newFilename = actualFilename;
|
|
122
132
|
let newFilePath = filePath;
|
|
123
|
-
if
|
|
124
|
-
|
|
133
|
+
// Only rename file if title explicitly changed
|
|
134
|
+
if (titleChanged) {
|
|
135
|
+
newFilename = generateFilename(newTitle);
|
|
125
136
|
newFilePath = path.join(knowledgePath, newFilename);
|
|
126
137
|
// Check if new filename conflicts with existing file (unless it's the same file)
|
|
127
138
|
if (newFilename !== actualFilename) {
|
|
@@ -142,10 +153,6 @@ app.put('/:filename', async (c) => {
|
|
|
142
153
|
if (newFilename !== actualFilename) {
|
|
143
154
|
await fs.unlink(filePath);
|
|
144
155
|
}
|
|
145
|
-
// Extract updated title
|
|
146
|
-
const title = body.title ||
|
|
147
|
-
updatedContent.match(/^#\s+(.+)$/m)?.[1] ||
|
|
148
|
-
newFilename.replace('.md', '').replace(/[_-]/g, ' ').replace(/\w\S*/g, (txt) => txt.charAt(0).toUpperCase() + txt.substr(1).toLowerCase());
|
|
149
156
|
// Log knowledge update event
|
|
150
157
|
try {
|
|
151
158
|
const session = await requireAuth(c);
|
|
@@ -159,7 +166,7 @@ app.put('/:filename', async (c) => {
|
|
|
159
166
|
payload: {
|
|
160
167
|
filename: newFilename,
|
|
161
168
|
oldFilename: actualFilename !== newFilename ? actualFilename : undefined,
|
|
162
|
-
title,
|
|
169
|
+
title: newTitle,
|
|
163
170
|
category: updatedMetadata.category,
|
|
164
171
|
summary: generateSummary(updatedContent),
|
|
165
172
|
}
|
|
@@ -172,7 +179,7 @@ app.put('/:filename', async (c) => {
|
|
|
172
179
|
success: true,
|
|
173
180
|
document: {
|
|
174
181
|
filename: newFilename,
|
|
175
|
-
title,
|
|
182
|
+
title: newTitle,
|
|
176
183
|
metadata: updatedMetadata,
|
|
177
184
|
content: updatedContent
|
|
178
185
|
}
|
|
@@ -64,13 +64,15 @@ app.get('/', async (c) => {
|
|
|
64
64
|
const filePath = path.join(knowledgePath, file);
|
|
65
65
|
const fileContent = await fs.readFile(filePath, 'utf-8');
|
|
66
66
|
const { data, content } = matter(fileContent);
|
|
67
|
-
// Extract title from first heading or use filename
|
|
68
|
-
const title =
|
|
67
|
+
// Extract title from metadata, first heading, or use filename
|
|
68
|
+
const title = data.title ||
|
|
69
|
+
content.match(/^#\s+(.+)$/m)?.[1] ||
|
|
69
70
|
file.replace('.md', '').replace(/[_-]/g, ' ').replace(/\w\S*/g, (txt) => txt.charAt(0).toUpperCase() + txt.substr(1).toLowerCase());
|
|
70
71
|
return {
|
|
71
72
|
filename: file,
|
|
72
73
|
metadata: {
|
|
73
74
|
...data,
|
|
75
|
+
title, // Include title in metadata
|
|
74
76
|
updated: data.updated || new Date().toISOString(),
|
|
75
77
|
tags: data.tags || [],
|
|
76
78
|
sources: data.sources || []
|
|
@@ -119,6 +121,7 @@ app.post('/', async (c) => {
|
|
|
119
121
|
const now = new Date();
|
|
120
122
|
const frontmatter = {
|
|
121
123
|
version: '1.0',
|
|
124
|
+
title: body.title, // Store title in metadata
|
|
122
125
|
updated: now.toISOString(),
|
|
123
126
|
type: 'knowledge',
|
|
124
127
|
category: body.metadata?.category || 'general',
|