@ian2018cs/agenthub 0.1.52 → 0.1.53
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/assets/index-BdtjtPre.css +32 -0
- package/dist/assets/index-_a9nlevD.js +162 -0
- package/dist/assets/{vendor-icons-KP5LHo3O.js → vendor-icons-D0_WToWG.js} +93 -73
- package/dist/index.html +3 -3
- package/package.json +1 -1
- package/server/database/db.js +77 -1
- package/server/database/init.sql +23 -1
- package/server/index.js +4 -0
- package/server/projects.js +2 -1
- package/server/routes/agents.js +838 -0
- package/server/services/llm.js +46 -0
- package/server/services/system-agent-repo.js +276 -0
- package/server/services/user-directories.js +5 -1
- package/dist/assets/index-BXrxw5Li.js +0 -152
- package/dist/assets/index-DsfWMhMj.css +0 -32
|
@@ -0,0 +1,46 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Generic LLM chat completion utility.
|
|
3
|
+
* Uses OPENAI_API_KEY + OPENAI_BASE_URL (OpenAI-compatible API).
|
|
4
|
+
*/
|
|
5
|
+
|
|
6
|
+
/**
|
|
7
|
+
* Call a chat completion endpoint.
|
|
8
|
+
* @param {object} options
|
|
9
|
+
* @param {string} options.systemPrompt - System prompt
|
|
10
|
+
* @param {string} options.userPrompt - User message
|
|
11
|
+
* @param {string} [options.model] - Model ID (default: gpt-4o-mini)
|
|
12
|
+
* @param {number} [options.maxTokens] - Max output tokens (default: 1024)
|
|
13
|
+
* @param {number} [options.temperature] - Sampling temperature (default: 0.7)
|
|
14
|
+
* @returns {Promise<string>} The assistant reply text
|
|
15
|
+
*/
|
|
16
|
+
export async function chatCompletion({ systemPrompt, userPrompt, model = 'gpt-4o-mini', maxTokens = 1024, temperature = 0.7 }) {
|
|
17
|
+
const apiKey = process.env.OPENAI_API_KEY;
|
|
18
|
+
if (!apiKey) throw new Error('OPENAI_API_KEY is not configured');
|
|
19
|
+
|
|
20
|
+
const baseUrl = (process.env.OPENAI_BASE_URL || 'https://api.openai.com').replace(/\/$/, '');
|
|
21
|
+
|
|
22
|
+
const response = await fetch(`${baseUrl}/v1/chat/completions`, {
|
|
23
|
+
method: 'POST',
|
|
24
|
+
headers: {
|
|
25
|
+
'Content-Type': 'application/json',
|
|
26
|
+
'Authorization': `Bearer ${apiKey}`
|
|
27
|
+
},
|
|
28
|
+
body: JSON.stringify({
|
|
29
|
+
model,
|
|
30
|
+
messages: [
|
|
31
|
+
{ role: 'system', content: systemPrompt },
|
|
32
|
+
{ role: 'user', content: userPrompt }
|
|
33
|
+
],
|
|
34
|
+
max_tokens: maxTokens,
|
|
35
|
+
temperature
|
|
36
|
+
})
|
|
37
|
+
});
|
|
38
|
+
|
|
39
|
+
if (!response.ok) {
|
|
40
|
+
const err = await response.json().catch(() => ({}));
|
|
41
|
+
throw new Error(err.error?.message || `LLM API error: ${response.status}`);
|
|
42
|
+
}
|
|
43
|
+
|
|
44
|
+
const data = await response.json();
|
|
45
|
+
return data.choices?.[0]?.message?.content?.trim() || '';
|
|
46
|
+
}
|
|
@@ -0,0 +1,276 @@
|
|
|
1
|
+
import { promises as fs } from 'fs';
|
|
2
|
+
import path from 'path';
|
|
3
|
+
import { spawn } from 'child_process';
|
|
4
|
+
import { getPublicPaths } from './user-directories.js';
|
|
5
|
+
|
|
6
|
+
export const SYSTEM_AGENT_REPO_URL = 'git@git.amberweather.com:mcp-server/hupoer-agents.git';
|
|
7
|
+
export const SYSTEM_AGENT_REPO_OWNER = 'mcp-server';
|
|
8
|
+
export const SYSTEM_AGENT_REPO_NAME = 'hupoer-agents';
|
|
9
|
+
|
|
10
|
+
function runGit(args, cwd = null) {
|
|
11
|
+
return new Promise((resolve, reject) => {
|
|
12
|
+
const opts = {
|
|
13
|
+
stdio: ['ignore', 'pipe', 'pipe'],
|
|
14
|
+
env: { ...process.env, GIT_TERMINAL_PROMPT: '0' }
|
|
15
|
+
};
|
|
16
|
+
if (cwd) opts.cwd = cwd;
|
|
17
|
+
|
|
18
|
+
const proc = spawn('git', args, opts);
|
|
19
|
+
let stdout = '';
|
|
20
|
+
let stderr = '';
|
|
21
|
+
proc.stdout.on('data', d => { stdout += d.toString(); });
|
|
22
|
+
proc.stderr.on('data', d => { stderr += d.toString(); });
|
|
23
|
+
proc.on('close', code => {
|
|
24
|
+
if (code === 0) resolve({ stdout, stderr });
|
|
25
|
+
else reject(new Error(stderr || `git ${args[0]} failed with code ${code}`));
|
|
26
|
+
});
|
|
27
|
+
proc.on('error', err => reject(err));
|
|
28
|
+
});
|
|
29
|
+
}
|
|
30
|
+
|
|
31
|
+
function getRepoPath() {
|
|
32
|
+
const publicPaths = getPublicPaths();
|
|
33
|
+
return path.join(publicPaths.agentRepoDir, SYSTEM_AGENT_REPO_OWNER, SYSTEM_AGENT_REPO_NAME);
|
|
34
|
+
}
|
|
35
|
+
|
|
36
|
+
/**
|
|
37
|
+
* Ensure the agent repo is cloned locally. If already present, tries to pull.
|
|
38
|
+
* Returns the path to the local clone.
|
|
39
|
+
*/
|
|
40
|
+
export async function ensureAgentRepo() {
|
|
41
|
+
const repoPath = getRepoPath();
|
|
42
|
+
|
|
43
|
+
try {
|
|
44
|
+
await fs.access(repoPath);
|
|
45
|
+
// Already cloned — try to pull latest
|
|
46
|
+
try {
|
|
47
|
+
await runGit(['pull', '--ff-only'], repoPath);
|
|
48
|
+
} catch (err) {
|
|
49
|
+
console.log('[AgentRepo] Failed to pull, using existing clone:', err.message);
|
|
50
|
+
}
|
|
51
|
+
} catch {
|
|
52
|
+
// Not yet cloned
|
|
53
|
+
await fs.mkdir(path.dirname(repoPath), { recursive: true });
|
|
54
|
+
try {
|
|
55
|
+
await runGit(['clone', '--depth', '1', SYSTEM_AGENT_REPO_URL, repoPath]);
|
|
56
|
+
console.log('[AgentRepo] Cloned agent repo to', repoPath);
|
|
57
|
+
} catch (err) {
|
|
58
|
+
console.error('[AgentRepo] Failed to clone:', err.message);
|
|
59
|
+
throw err;
|
|
60
|
+
}
|
|
61
|
+
}
|
|
62
|
+
|
|
63
|
+
return repoPath;
|
|
64
|
+
}
|
|
65
|
+
|
|
66
|
+
/**
|
|
67
|
+
* Parse agent.yaml from an agent directory.
|
|
68
|
+
* Returns the parsed metadata object or null if invalid.
|
|
69
|
+
*/
|
|
70
|
+
async function parseAgentYaml(agentDir) {
|
|
71
|
+
try {
|
|
72
|
+
const yamlPath = path.join(agentDir, 'agent.yaml');
|
|
73
|
+
const content = await fs.readFile(yamlPath, 'utf-8');
|
|
74
|
+
|
|
75
|
+
// Simple YAML parser for the expected fields
|
|
76
|
+
const result = {
|
|
77
|
+
name: path.basename(agentDir),
|
|
78
|
+
display_name: path.basename(agentDir),
|
|
79
|
+
description: '',
|
|
80
|
+
version: '1.0.0',
|
|
81
|
+
author: '',
|
|
82
|
+
skills: [],
|
|
83
|
+
mcps: [],
|
|
84
|
+
files: []
|
|
85
|
+
};
|
|
86
|
+
|
|
87
|
+
// Parse scalar fields
|
|
88
|
+
const scalarPattern = (key) => new RegExp(`^${key}:\\s*["']?(.+?)["']?\\s*$`, 'm');
|
|
89
|
+
const nameMatch = content.match(scalarPattern('name'));
|
|
90
|
+
const displayMatch = content.match(scalarPattern('display_name'));
|
|
91
|
+
const versionMatch = content.match(scalarPattern('version'));
|
|
92
|
+
const authorMatch = content.match(scalarPattern('author'));
|
|
93
|
+
|
|
94
|
+
if (nameMatch) result.name = nameMatch[1].trim();
|
|
95
|
+
if (displayMatch) result.display_name = displayMatch[1].trim();
|
|
96
|
+
if (versionMatch) result.version = versionMatch[1].trim();
|
|
97
|
+
if (authorMatch) result.author = authorMatch[1].trim();
|
|
98
|
+
|
|
99
|
+
// Parse block scalar description (|)
|
|
100
|
+
const descBlockMatch = content.match(/^description:\s*\|$([\s\S]*?)(?=^\S|\Z)/m);
|
|
101
|
+
if (descBlockMatch) {
|
|
102
|
+
result.description = descBlockMatch[1]
|
|
103
|
+
.split('\n')
|
|
104
|
+
.map(l => l.replace(/^ /, ''))
|
|
105
|
+
.join('\n')
|
|
106
|
+
.trim();
|
|
107
|
+
} else {
|
|
108
|
+
const descInlineMatch = content.match(/^description:\s*(.+)$/m);
|
|
109
|
+
if (descInlineMatch) result.description = descInlineMatch[1].trim();
|
|
110
|
+
}
|
|
111
|
+
|
|
112
|
+
// Parse skills list
|
|
113
|
+
const skillsSection = content.match(/^skills:\s*\n((?:[ \t]+.+\n?)*)/m);
|
|
114
|
+
if (skillsSection) {
|
|
115
|
+
const lines = skillsSection[1].split('\n');
|
|
116
|
+
let current = null;
|
|
117
|
+
for (const line of lines) {
|
|
118
|
+
const nameLine = line.match(/^\s+-\s+name:\s*["']?(.+?)["']?\s*$/);
|
|
119
|
+
const repoLine = line.match(/^\s+repo:\s*["']?(.+?)["']?\s*$/);
|
|
120
|
+
if (nameLine) { current = { name: nameLine[1].trim() }; result.skills.push(current); }
|
|
121
|
+
else if (repoLine && current) { current.repo = repoLine[1].trim(); }
|
|
122
|
+
}
|
|
123
|
+
}
|
|
124
|
+
|
|
125
|
+
// Parse mcps list
|
|
126
|
+
const mcpsSection = content.match(/^mcps:\s*\n((?:[ \t]+.+\n?)*)/m);
|
|
127
|
+
if (mcpsSection) {
|
|
128
|
+
const lines = mcpsSection[1].split('\n');
|
|
129
|
+
let current = null;
|
|
130
|
+
for (const line of lines) {
|
|
131
|
+
const nameLine = line.match(/^\s+-\s+name:\s*["']?(.+?)["']?\s*$/);
|
|
132
|
+
const repoLine = line.match(/^\s+repo:\s*["']?(.+?)["']?\s*$/);
|
|
133
|
+
if (nameLine) { current = { name: nameLine[1].trim() }; result.mcps.push(current); }
|
|
134
|
+
else if (repoLine && current) { current.repo = repoLine[1].trim(); }
|
|
135
|
+
}
|
|
136
|
+
}
|
|
137
|
+
|
|
138
|
+
// Parse files list
|
|
139
|
+
const filesSection = content.match(/^files:\s*\n((?:[ \t]+.+\n?)*)/m);
|
|
140
|
+
if (filesSection) {
|
|
141
|
+
const lines = filesSection[1].split('\n');
|
|
142
|
+
for (const line of lines) {
|
|
143
|
+
const fileLine = line.match(/^\s+-\s+["']?(.+?)["']?\s*$/);
|
|
144
|
+
if (fileLine) result.files.push(fileLine[1].trim());
|
|
145
|
+
}
|
|
146
|
+
}
|
|
147
|
+
|
|
148
|
+
return result;
|
|
149
|
+
} catch {
|
|
150
|
+
return null;
|
|
151
|
+
}
|
|
152
|
+
}
|
|
153
|
+
|
|
154
|
+
/**
|
|
155
|
+
* Scan the agent repo for available agents.
|
|
156
|
+
* Returns array of agent metadata objects.
|
|
157
|
+
*/
|
|
158
|
+
export async function scanAgents() {
|
|
159
|
+
let repoPath;
|
|
160
|
+
try {
|
|
161
|
+
repoPath = await ensureAgentRepo();
|
|
162
|
+
} catch (err) {
|
|
163
|
+
console.error('[AgentRepo] Could not access agent repo:', err.message);
|
|
164
|
+
return [];
|
|
165
|
+
}
|
|
166
|
+
|
|
167
|
+
const agents = [];
|
|
168
|
+
|
|
169
|
+
let entries;
|
|
170
|
+
try {
|
|
171
|
+
entries = await fs.readdir(repoPath, { withFileTypes: true });
|
|
172
|
+
} catch {
|
|
173
|
+
return agents;
|
|
174
|
+
}
|
|
175
|
+
|
|
176
|
+
for (const entry of entries) {
|
|
177
|
+
if (entry.name.startsWith('.') || !entry.isDirectory()) continue;
|
|
178
|
+
|
|
179
|
+
const agentDir = path.join(repoPath, entry.name);
|
|
180
|
+
const metadata = await parseAgentYaml(agentDir);
|
|
181
|
+
if (!metadata) continue;
|
|
182
|
+
|
|
183
|
+
// Check for CLAUDE.md
|
|
184
|
+
let hasClaudeMd = false;
|
|
185
|
+
try {
|
|
186
|
+
await fs.access(path.join(agentDir, 'CLAUDE.md'));
|
|
187
|
+
hasClaudeMd = true;
|
|
188
|
+
} catch {}
|
|
189
|
+
|
|
190
|
+
agents.push({
|
|
191
|
+
...metadata,
|
|
192
|
+
dirName: entry.name,
|
|
193
|
+
path: agentDir,
|
|
194
|
+
hasClaudeMd
|
|
195
|
+
});
|
|
196
|
+
}
|
|
197
|
+
|
|
198
|
+
return agents;
|
|
199
|
+
}
|
|
200
|
+
|
|
201
|
+
/**
|
|
202
|
+
* Increment the patch version of a semver string.
|
|
203
|
+
* e.g. "1.0.0" → "1.0.1", "2.3" → "2.3.1"
|
|
204
|
+
*/
|
|
205
|
+
export function incrementPatchVersion(version) {
|
|
206
|
+
if (!version) return '1.0.0';
|
|
207
|
+
const parts = version.split('.').map(n => parseInt(n, 10) || 0);
|
|
208
|
+
while (parts.length < 3) parts.push(0);
|
|
209
|
+
parts[2] += 1;
|
|
210
|
+
return parts.join('.');
|
|
211
|
+
}
|
|
212
|
+
|
|
213
|
+
/**
|
|
214
|
+
* Recursively copy all files and directories from src to dst.
|
|
215
|
+
*/
|
|
216
|
+
async function copyDirRecursive(src, dst) {
|
|
217
|
+
await fs.mkdir(dst, { recursive: true });
|
|
218
|
+
const entries = await fs.readdir(src, { withFileTypes: true });
|
|
219
|
+
for (const entry of entries) {
|
|
220
|
+
const srcPath = path.join(src, entry.name);
|
|
221
|
+
const dstPath = path.join(dst, entry.name);
|
|
222
|
+
if (entry.isDirectory()) {
|
|
223
|
+
await copyDirRecursive(srcPath, dstPath);
|
|
224
|
+
} else {
|
|
225
|
+
await fs.copyFile(srcPath, dstPath);
|
|
226
|
+
}
|
|
227
|
+
}
|
|
228
|
+
}
|
|
229
|
+
|
|
230
|
+
/**
|
|
231
|
+
* Publish an agent to the repo after approval.
|
|
232
|
+
* Steps: pull → copy files → update agent.yaml version → git add/commit/push
|
|
233
|
+
*
|
|
234
|
+
* @param {string} agentName - directory name in the repo
|
|
235
|
+
* @param {string} extractedDir - local directory with the approved agent files
|
|
236
|
+
* @param {string} newVersion - new version string
|
|
237
|
+
* @param {string} submitterName - for commit message attribution
|
|
238
|
+
*/
|
|
239
|
+
export async function publishAgentToRepo(agentName, extractedDir, newVersion, submitterName = 'system') {
|
|
240
|
+
const repoPath = getRepoPath();
|
|
241
|
+
|
|
242
|
+
// 1. Pull latest
|
|
243
|
+
try {
|
|
244
|
+
await runGit(['pull', '--ff-only'], repoPath);
|
|
245
|
+
} catch (err) {
|
|
246
|
+
console.error('[AgentRepo] Pull before publish failed:', err.message);
|
|
247
|
+
throw err;
|
|
248
|
+
}
|
|
249
|
+
|
|
250
|
+
// 2. Copy all files recursively to repo (includes subdirectories for referenced files)
|
|
251
|
+
const agentRepoDir = path.join(repoPath, agentName);
|
|
252
|
+
await copyDirRecursive(extractedDir, agentRepoDir);
|
|
253
|
+
|
|
254
|
+
// 3. Update agent.yaml with new version and updated_at
|
|
255
|
+
const yamlPath = path.join(agentRepoDir, 'agent.yaml');
|
|
256
|
+
try {
|
|
257
|
+
let yamlContent = await fs.readFile(yamlPath, 'utf-8');
|
|
258
|
+
const now = new Date().toISOString();
|
|
259
|
+
yamlContent = yamlContent.replace(/^version:\s*.+$/m, `version: "${newVersion}"`);
|
|
260
|
+
yamlContent = yamlContent.replace(/^updated_at:\s*.+$/m, `updated_at: "${now}"`);
|
|
261
|
+
if (!yamlContent.match(/^updated_at:/m)) {
|
|
262
|
+
yamlContent += `\nupdated_at: "${now}"\n`;
|
|
263
|
+
}
|
|
264
|
+
await fs.writeFile(yamlPath, yamlContent, 'utf-8');
|
|
265
|
+
} catch (err) {
|
|
266
|
+
console.error('[AgentRepo] Failed to update agent.yaml version:', err.message);
|
|
267
|
+
}
|
|
268
|
+
|
|
269
|
+
// 4. Git add, commit, push
|
|
270
|
+
await runGit(['add', agentName], repoPath);
|
|
271
|
+
const commitMsg = `feat: publish ${agentName} v${newVersion} (by ${submitterName})`;
|
|
272
|
+
await runGit(['commit', '-m', commitMsg], repoPath);
|
|
273
|
+
await runGit(['push'], repoPath);
|
|
274
|
+
|
|
275
|
+
console.log(`[AgentRepo] Published ${agentName} v${newVersion}`);
|
|
276
|
+
}
|
|
@@ -41,6 +41,8 @@ export function getPublicPaths() {
|
|
|
41
41
|
return {
|
|
42
42
|
skillsRepoDir: path.join(DATA_DIR, 'skills-repo'),
|
|
43
43
|
mcpRepoDir: path.join(DATA_DIR, 'mcp-repo'),
|
|
44
|
+
agentRepoDir: path.join(DATA_DIR, 'agent-repo'),
|
|
45
|
+
agentSubmissionsDir: path.join(DATA_DIR, 'agent-submissions'),
|
|
44
46
|
};
|
|
45
47
|
}
|
|
46
48
|
|
|
@@ -258,9 +260,11 @@ export async function initUserDirectories(userUuid) {
|
|
|
258
260
|
|
|
259
261
|
// Create mcp-repo directory for the user
|
|
260
262
|
await fs.mkdir(paths.mcpRepoDir, { recursive: true });
|
|
261
|
-
// Ensure shared public
|
|
263
|
+
// Ensure shared public directories exist
|
|
262
264
|
const publicPaths = getPublicPaths();
|
|
263
265
|
await fs.mkdir(publicPaths.mcpRepoDir, { recursive: true });
|
|
266
|
+
await fs.mkdir(publicPaths.agentRepoDir, { recursive: true });
|
|
267
|
+
await fs.mkdir(publicPaths.agentSubmissionsDir, { recursive: true });
|
|
264
268
|
|
|
265
269
|
// Initialize codex home directory with config files (after skillsDir exists for symlink)
|
|
266
270
|
await initCodexDirectories(userUuid);
|