openclaw-node-harness 2.0.0 → 2.0.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/bin/mesh-agent.js +417 -94
- package/bin/mesh-join-token.js +129 -0
- package/bin/mesh-node-remove.js +277 -0
- package/bin/mesh-task-daemon.js +723 -15
- package/bin/openclaw-node-init.js +674 -0
- package/lib/llm-providers.js +262 -0
- package/lib/mesh-collab.js +549 -0
- package/lib/mesh-plans.js +528 -0
- package/lib/mesh-tasks.js +50 -34
- package/package.json +1 -1
|
@@ -0,0 +1,262 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* llm-providers.js — LLM-agnostic provider abstraction for mesh agents.
|
|
3
|
+
*
|
|
4
|
+
* Each provider defines how to spawn a CLI process with a prompt.
|
|
5
|
+
* The mesh agent doesn't care what LLM runs — it only cares about:
|
|
6
|
+
* 1. Send prompt → get text output
|
|
7
|
+
* 2. Exit code 0 = success
|
|
8
|
+
*
|
|
9
|
+
* Built-in providers cover the major ecosystems. Any CLI tool that
|
|
10
|
+
* accepts a prompt and returns text can be registered at runtime
|
|
11
|
+
* or loaded from ~/.openclaw/mesh-providers.json.
|
|
12
|
+
*
|
|
13
|
+
* Resolution order for LLM_PROVIDER:
|
|
14
|
+
* 1. Task-level: task.llm_provider field
|
|
15
|
+
* 2. Environment: MESH_LLM_PROVIDER env var
|
|
16
|
+
* 3. CLI: --provider flag
|
|
17
|
+
* 4. Default: 'claude'
|
|
18
|
+
*/
|
|
19
|
+
|
|
20
|
+
const path = require('path');
|
|
21
|
+
const fs = require('fs');
|
|
22
|
+
const os = require('os');
|
|
23
|
+
|
|
24
|
+
// ── Generic Provider Factory ────────────────────────
|
|
25
|
+
// Most agentic coding CLIs follow a similar pattern:
|
|
26
|
+
// binary [prompt-flag] "prompt" [model-flag] model [cwd-flag] dir
|
|
27
|
+
// This factory builds a provider from a simple config.
|
|
28
|
+
|
|
29
|
+
function makeGenericProvider({ name, binary, promptFlag = '-p', modelFlag = '--model', cwdFlag = '--cwd', defaultModel, extraArgs = [], envStrip = [] }) {
|
|
30
|
+
return {
|
|
31
|
+
name,
|
|
32
|
+
binary,
|
|
33
|
+
buildArgs(prompt, model, task, targetDir) {
|
|
34
|
+
const args = [];
|
|
35
|
+
if (promptFlag) args.push(promptFlag, prompt);
|
|
36
|
+
else args.push(prompt); // bare positional prompt
|
|
37
|
+
if (modelFlag && (model || defaultModel)) args.push(modelFlag, model || defaultModel);
|
|
38
|
+
if (cwdFlag && targetDir) args.push(cwdFlag, targetDir);
|
|
39
|
+
args.push(...extraArgs);
|
|
40
|
+
return args;
|
|
41
|
+
},
|
|
42
|
+
cleanEnv(env) {
|
|
43
|
+
if (envStrip.length === 0) return { ...env };
|
|
44
|
+
const clean = { ...env };
|
|
45
|
+
for (const prefix of envStrip) {
|
|
46
|
+
for (const key of Object.keys(clean)) {
|
|
47
|
+
if (key.startsWith(prefix) || key === prefix) delete clean[key];
|
|
48
|
+
}
|
|
49
|
+
}
|
|
50
|
+
return clean;
|
|
51
|
+
},
|
|
52
|
+
defaultModel,
|
|
53
|
+
};
|
|
54
|
+
}
|
|
55
|
+
|
|
56
|
+
// ── Built-in Provider Definitions ───────────────────
|
|
57
|
+
|
|
58
|
+
const PROVIDERS = {
|
|
59
|
+
// ─── Anthropic ───
|
|
60
|
+
claude: {
|
|
61
|
+
name: 'claude',
|
|
62
|
+
binary: process.env.CLAUDE_PATH || 'claude',
|
|
63
|
+
buildArgs(prompt, model, task, targetDir, workspaceDir) {
|
|
64
|
+
const args = [
|
|
65
|
+
'-p', prompt,
|
|
66
|
+
'--output-format', 'text',
|
|
67
|
+
'--model', model || 'sonnet',
|
|
68
|
+
'--permission-mode', 'bypassPermissions',
|
|
69
|
+
];
|
|
70
|
+
|
|
71
|
+
if (targetDir) args.push('--add-dir', targetDir);
|
|
72
|
+
if (workspaceDir && workspaceDir !== targetDir) args.push('--add-dir', workspaceDir);
|
|
73
|
+
|
|
74
|
+
// Add scope directories
|
|
75
|
+
if (task.scope && task.scope.length > 0) {
|
|
76
|
+
const addedDirs = new Set([targetDir, workspaceDir].filter(Boolean));
|
|
77
|
+
for (const s of task.scope) {
|
|
78
|
+
for (const base of [targetDir, workspaceDir].filter(Boolean)) {
|
|
79
|
+
const resolved = path.resolve(base, s);
|
|
80
|
+
const resolvedDir = path.dirname(resolved);
|
|
81
|
+
if (!resolved.startsWith(base) && !resolved.startsWith('/tmp/')) continue;
|
|
82
|
+
if (addedDirs.has(resolvedDir)) continue;
|
|
83
|
+
addedDirs.add(resolvedDir);
|
|
84
|
+
args.push('--add-dir', resolvedDir);
|
|
85
|
+
}
|
|
86
|
+
}
|
|
87
|
+
}
|
|
88
|
+
|
|
89
|
+
return args;
|
|
90
|
+
},
|
|
91
|
+
cleanEnv(env) {
|
|
92
|
+
const clean = { ...env };
|
|
93
|
+
for (const key of Object.keys(clean)) {
|
|
94
|
+
if (key.startsWith('CLAUDE_CODE') || key === 'CLAUDECODE') delete clean[key];
|
|
95
|
+
}
|
|
96
|
+
return clean;
|
|
97
|
+
},
|
|
98
|
+
defaultModel: 'sonnet',
|
|
99
|
+
},
|
|
100
|
+
|
|
101
|
+
// ─── OpenAI ───
|
|
102
|
+
openai: makeGenericProvider({
|
|
103
|
+
name: 'openai',
|
|
104
|
+
binary: process.env.OPENAI_PATH || 'codex',
|
|
105
|
+
defaultModel: 'gpt-4.1',
|
|
106
|
+
cwdFlag: '--cwd',
|
|
107
|
+
}),
|
|
108
|
+
|
|
109
|
+
// ─── Google Gemini ───
|
|
110
|
+
gemini: makeGenericProvider({
|
|
111
|
+
name: 'gemini',
|
|
112
|
+
binary: process.env.GEMINI_PATH || 'gemini',
|
|
113
|
+
defaultModel: 'gemini-2.5-pro',
|
|
114
|
+
}),
|
|
115
|
+
|
|
116
|
+
// ─── DeepSeek ───
|
|
117
|
+
deepseek: makeGenericProvider({
|
|
118
|
+
name: 'deepseek',
|
|
119
|
+
binary: process.env.DEEPSEEK_PATH || 'deepseek',
|
|
120
|
+
defaultModel: 'deepseek-chat',
|
|
121
|
+
}),
|
|
122
|
+
|
|
123
|
+
// ─── Kimi (Moonshot AI) ───
|
|
124
|
+
kimi: makeGenericProvider({
|
|
125
|
+
name: 'kimi',
|
|
126
|
+
binary: process.env.KIMI_PATH || 'kimi',
|
|
127
|
+
defaultModel: 'kimi',
|
|
128
|
+
}),
|
|
129
|
+
|
|
130
|
+
// ─── MiniMax ───
|
|
131
|
+
minimax: makeGenericProvider({
|
|
132
|
+
name: 'minimax',
|
|
133
|
+
binary: process.env.MINIMAX_PATH || 'minimax',
|
|
134
|
+
defaultModel: 'minimax-01',
|
|
135
|
+
}),
|
|
136
|
+
|
|
137
|
+
// ─── Meta Llama (via Ollama) ───
|
|
138
|
+
ollama: makeGenericProvider({
|
|
139
|
+
name: 'ollama',
|
|
140
|
+
binary: process.env.OLLAMA_PATH || 'ollama',
|
|
141
|
+
promptFlag: null, // ollama run <model> "prompt"
|
|
142
|
+
modelFlag: null,
|
|
143
|
+
cwdFlag: null,
|
|
144
|
+
defaultModel: 'llama3',
|
|
145
|
+
buildArgs(prompt, model) {
|
|
146
|
+
return ['run', model || 'llama3', prompt];
|
|
147
|
+
},
|
|
148
|
+
}),
|
|
149
|
+
|
|
150
|
+
// ─── Aider (multi-provider CLI — works with OpenAI, Anthropic, Gemini, DeepSeek, etc.) ───
|
|
151
|
+
aider: makeGenericProvider({
|
|
152
|
+
name: 'aider',
|
|
153
|
+
binary: process.env.AIDER_PATH || 'aider',
|
|
154
|
+
promptFlag: '--message',
|
|
155
|
+
modelFlag: '--model',
|
|
156
|
+
cwdFlag: null,
|
|
157
|
+
defaultModel: null,
|
|
158
|
+
}),
|
|
159
|
+
|
|
160
|
+
// ─── Shell (no LLM — raw command execution) ───
|
|
161
|
+
// Shell provider ignores the formatted prompt and runs task.description
|
|
162
|
+
// as a raw shell command. The prompt from buildInitialPrompt() is markdown
|
|
163
|
+
// with headers, bullet points, and retry context — bash can't execute that.
|
|
164
|
+
shell: {
|
|
165
|
+
name: 'shell',
|
|
166
|
+
binary: '/bin/bash',
|
|
167
|
+
buildArgs(prompt, model, task) {
|
|
168
|
+
// Use task.description (the raw command) if available, fall back to prompt
|
|
169
|
+
const cmd = (task && task.description) ? task.description : prompt;
|
|
170
|
+
return ['-c', cmd];
|
|
171
|
+
},
|
|
172
|
+
cleanEnv(env) {
|
|
173
|
+
return { ...env };
|
|
174
|
+
},
|
|
175
|
+
defaultModel: null,
|
|
176
|
+
},
|
|
177
|
+
};
|
|
178
|
+
|
|
179
|
+
// Fix: ollama needs custom buildArgs that the factory can't do, override it
|
|
180
|
+
PROVIDERS.ollama.buildArgs = function(prompt, model) {
|
|
181
|
+
return ['run', model || 'llama3', prompt];
|
|
182
|
+
};
|
|
183
|
+
|
|
184
|
+
// ── Provider Resolution ──────────────────────────────
|
|
185
|
+
|
|
186
|
+
/**
|
|
187
|
+
* Resolve which LLM provider to use.
|
|
188
|
+
* Priority: task.llm_provider > MESH_LLM_PROVIDER env > --provider CLI flag > default
|
|
189
|
+
*/
|
|
190
|
+
function resolveProvider(task, cliProvider, envProvider) {
|
|
191
|
+
const name = (task && task.llm_provider)
|
|
192
|
+
|| envProvider
|
|
193
|
+
|| cliProvider
|
|
194
|
+
|| 'claude';
|
|
195
|
+
|
|
196
|
+
const provider = PROVIDERS[name];
|
|
197
|
+
if (!provider) {
|
|
198
|
+
throw new Error(`Unknown LLM provider: "${name}". Available: ${Object.keys(PROVIDERS).join(', ')}. Register custom providers via registerProvider() or ~/.openclaw/mesh-providers.json`);
|
|
199
|
+
}
|
|
200
|
+
return provider;
|
|
201
|
+
}
|
|
202
|
+
|
|
203
|
+
/**
|
|
204
|
+
* Resolve the model to use.
|
|
205
|
+
* Priority: task.llm_model > --model CLI flag > provider default
|
|
206
|
+
*/
|
|
207
|
+
function resolveModel(task, cliModel, provider) {
|
|
208
|
+
return (task && task.llm_model)
|
|
209
|
+
|| cliModel
|
|
210
|
+
|| provider.defaultModel;
|
|
211
|
+
}
|
|
212
|
+
|
|
213
|
+
/**
|
|
214
|
+
* Register a custom provider at runtime.
|
|
215
|
+
* Can be a full config object (with buildArgs function) or
|
|
216
|
+
* a simple config (binary, defaultModel, etc.) for the generic factory.
|
|
217
|
+
*/
|
|
218
|
+
function registerProvider(name, config) {
|
|
219
|
+
if (config.buildArgs && typeof config.buildArgs === 'function') {
|
|
220
|
+
// Full provider object
|
|
221
|
+
PROVIDERS[name] = { name, cleanEnv: (env) => ({ ...env }), defaultModel: null, ...config };
|
|
222
|
+
} else if (config.binary) {
|
|
223
|
+
// Simple config → use generic factory
|
|
224
|
+
PROVIDERS[name] = makeGenericProvider({ name, ...config });
|
|
225
|
+
} else {
|
|
226
|
+
throw new Error('Provider must have binary (and optionally buildArgs)');
|
|
227
|
+
}
|
|
228
|
+
}
|
|
229
|
+
|
|
230
|
+
/**
|
|
231
|
+
* Load custom providers from ~/.openclaw/mesh-providers.json if it exists.
|
|
232
|
+
* Format: { "provider-name": { "binary": "/path/to/cli", "defaultModel": "model-name", ... } }
|
|
233
|
+
*/
|
|
234
|
+
function loadCustomProviders() {
|
|
235
|
+
const configPath = path.join(os.homedir(), '.openclaw', 'mesh-providers.json');
|
|
236
|
+
try {
|
|
237
|
+
if (!fs.existsSync(configPath)) return 0;
|
|
238
|
+
const configs = JSON.parse(fs.readFileSync(configPath, 'utf8'));
|
|
239
|
+
let count = 0;
|
|
240
|
+
for (const [name, config] of Object.entries(configs)) {
|
|
241
|
+
if (PROVIDERS[name]) continue; // don't override built-ins
|
|
242
|
+
registerProvider(name, config);
|
|
243
|
+
count++;
|
|
244
|
+
}
|
|
245
|
+
return count;
|
|
246
|
+
} catch (e) {
|
|
247
|
+
// Silent fail — bad config shouldn't crash the agent
|
|
248
|
+
return 0;
|
|
249
|
+
}
|
|
250
|
+
}
|
|
251
|
+
|
|
252
|
+
// Auto-load custom providers on require()
|
|
253
|
+
const _customLoaded = loadCustomProviders();
|
|
254
|
+
|
|
255
|
+
module.exports = {
|
|
256
|
+
PROVIDERS,
|
|
257
|
+
resolveProvider,
|
|
258
|
+
resolveModel,
|
|
259
|
+
registerProvider,
|
|
260
|
+
loadCustomProviders,
|
|
261
|
+
makeGenericProvider,
|
|
262
|
+
};
|