@hitechclaw/clawspark 2.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +35 -0
- package/LICENSE +21 -0
- package/README.md +378 -0
- package/clawspark +2715 -0
- package/configs/models.yaml +108 -0
- package/configs/skill-packs.yaml +44 -0
- package/configs/skills.yaml +37 -0
- package/install.sh +387 -0
- package/lib/common.sh +249 -0
- package/lib/detect-hardware.sh +156 -0
- package/lib/diagnose.sh +636 -0
- package/lib/render-diagram.sh +47 -0
- package/lib/sandbox-commands.sh +415 -0
- package/lib/secure.sh +244 -0
- package/lib/select-model.sh +442 -0
- package/lib/setup-browser.sh +138 -0
- package/lib/setup-dashboard.sh +228 -0
- package/lib/setup-inference.sh +128 -0
- package/lib/setup-mcp.sh +142 -0
- package/lib/setup-messaging.sh +242 -0
- package/lib/setup-models.sh +121 -0
- package/lib/setup-openclaw.sh +808 -0
- package/lib/setup-sandbox.sh +188 -0
- package/lib/setup-skills.sh +113 -0
- package/lib/setup-systemd.sh +224 -0
- package/lib/setup-tailscale.sh +188 -0
- package/lib/setup-voice.sh +101 -0
- package/lib/skill-audit.sh +449 -0
- package/lib/verify.sh +177 -0
- package/package.json +57 -0
- package/scripts/release.sh +133 -0
- package/uninstall.sh +161 -0
- package/v2/README.md +50 -0
- package/v2/configs/providers.yaml +79 -0
- package/v2/configs/skills.yaml +36 -0
- package/v2/install.sh +116 -0
- package/v2/lib/common.sh +285 -0
- package/v2/lib/detect-hardware.sh +119 -0
- package/v2/lib/select-runtime.sh +273 -0
- package/v2/lib/setup-extras.sh +95 -0
- package/v2/lib/setup-openclaw.sh +187 -0
- package/v2/lib/setup-provider.sh +131 -0
- package/v2/lib/verify.sh +133 -0
- package/web/index.html +1835 -0
- package/web/install.sh +387 -0
- package/web/logo-hero.svg +11 -0
- package/web/logo-icon.svg +12 -0
- package/web/logo.svg +17 -0
- package/web/vercel.json +8 -0
package/clawspark
ADDED
|
@@ -0,0 +1,2715 @@
|
|
|
1
|
+
#!/usr/bin/env bash
|
|
2
|
+
# clawspark - CLI management tool for the clawspark installation.
|
|
3
|
+
# Installed to /usr/local/bin/clawspark by install.sh.
|
|
4
|
+
set -euo pipefail
|
|
5
|
+
|
|
6
|
+
CLAWSPARK_DEFAULT_HOME="${HOME}/.clawspark"
|
|
7
|
+
CLAWSPARK_V2_HOME="${HOME}/.clawspark-v2"
|
|
8
|
+
|
|
9
|
+
_detect_clawspark_home() {
|
|
10
|
+
if [[ -n "${CLAWSPARK_HOME:-}" ]]; then
|
|
11
|
+
printf '%s' "${CLAWSPARK_HOME}"
|
|
12
|
+
return 0
|
|
13
|
+
fi
|
|
14
|
+
|
|
15
|
+
case "${CLAWSPARK_PROFILE:-}" in
|
|
16
|
+
v2)
|
|
17
|
+
printf '%s' "${CLAWSPARK_V2_HOME}"
|
|
18
|
+
return 0
|
|
19
|
+
;;
|
|
20
|
+
standard|v1)
|
|
21
|
+
printf '%s' "${CLAWSPARK_DEFAULT_HOME}"
|
|
22
|
+
return 0
|
|
23
|
+
;;
|
|
24
|
+
esac
|
|
25
|
+
|
|
26
|
+
if [[ -d "${CLAWSPARK_V2_HOME}" && ! -d "${CLAWSPARK_DEFAULT_HOME}" ]]; then
|
|
27
|
+
printf '%s' "${CLAWSPARK_V2_HOME}"
|
|
28
|
+
else
|
|
29
|
+
printf '%s' "${CLAWSPARK_DEFAULT_HOME}"
|
|
30
|
+
fi
|
|
31
|
+
}
|
|
32
|
+
|
|
33
|
+
CLAWSPARK_DIR="$(_detect_clawspark_home)"
|
|
34
|
+
CLAWSPARK_LOG="${CLAWSPARK_DIR}/install.log"
|
|
35
|
+
CLAWSPARK_DEFAULTS="${CLAWSPARK_DEFAULTS:-false}"
|
|
36
|
+
CLAWSPARK_PROFILE_NAME="standard"
|
|
37
|
+
[[ "${CLAWSPARK_DIR}" == "${CLAWSPARK_V2_HOME}" ]] && CLAWSPARK_PROFILE_NAME="v2"
|
|
38
|
+
|
|
39
|
+
# ── Source common helpers ───────────────────────────────────────────────────
|
|
40
|
+
_load_common() {
|
|
41
|
+
local lib_dir="${CLAWSPARK_DIR}/lib"
|
|
42
|
+
if [[ -f "${lib_dir}/common.sh" ]]; then
|
|
43
|
+
# shellcheck source=/dev/null
|
|
44
|
+
source "${lib_dir}/common.sh"
|
|
45
|
+
else
|
|
46
|
+
# Minimal fallback if lib is missing
|
|
47
|
+
RED=$'\033[0;31m'; GREEN=$'\033[0;32m'; YELLOW=$'\033[0;33m'
|
|
48
|
+
BLUE=$'\033[0;34m'; CYAN=$'\033[0;36m'; BOLD=$'\033[1m'; RESET=$'\033[0m'
|
|
49
|
+
log_info() { printf '[INFO] %s\n' "$*"; }
|
|
50
|
+
log_warn() { printf '[WARN] %s\n' "$*" >&2; }
|
|
51
|
+
log_error() { printf '[ERROR] %s\n' "$*" >&2; }
|
|
52
|
+
log_success() { printf '[OK] %s\n' "$*"; }
|
|
53
|
+
check_command() { command -v "$1" &>/dev/null; }
|
|
54
|
+
hr() { printf '%*s\n' "$(tput cols 2>/dev/null || echo 60)" '' | tr ' ' '-'; }
|
|
55
|
+
print_box() { for l in "$@"; do printf ' %s\n' "$l"; done; }
|
|
56
|
+
fi
|
|
57
|
+
}
|
|
58
|
+
|
|
59
|
+
_load_common
|
|
60
|
+
|
|
61
|
+
_read_openclaw_json_value() {
|
|
62
|
+
local json_path="$1"
|
|
63
|
+
local config_file="${HOME}/.openclaw/openclaw.json"
|
|
64
|
+
|
|
65
|
+
[[ -f "${config_file}" ]] || return 1
|
|
66
|
+
|
|
67
|
+
python3 -c '
|
|
68
|
+
import json, sys
|
|
69
|
+
|
|
70
|
+
config_file = sys.argv[1]
|
|
71
|
+
json_path = sys.argv[2].split(".")
|
|
72
|
+
|
|
73
|
+
with open(config_file, "r", encoding="utf-8") as fh:
|
|
74
|
+
value = json.load(fh)
|
|
75
|
+
|
|
76
|
+
for part in json_path:
|
|
77
|
+
if not isinstance(value, dict) or part not in value:
|
|
78
|
+
print("")
|
|
79
|
+
raise SystemExit(0)
|
|
80
|
+
value = value[part]
|
|
81
|
+
|
|
82
|
+
if value is None:
|
|
83
|
+
print("")
|
|
84
|
+
elif isinstance(value, (dict, list)):
|
|
85
|
+
print(json.dumps(value))
|
|
86
|
+
else:
|
|
87
|
+
print(value)
|
|
88
|
+
' "${config_file}" "${json_path}" 2>/dev/null || true
|
|
89
|
+
}
|
|
90
|
+
|
|
91
|
+
_current_model_ref() {
|
|
92
|
+
local model_ref
|
|
93
|
+
model_ref="$(openclaw config get agents.defaults.model 2>/dev/null | tr -d '"' || true)"
|
|
94
|
+
if [[ -n "${model_ref}" ]]; then
|
|
95
|
+
printf '%s' "${model_ref}"
|
|
96
|
+
return 0
|
|
97
|
+
fi
|
|
98
|
+
|
|
99
|
+
_read_openclaw_json_value agents.defaults.model
|
|
100
|
+
}
|
|
101
|
+
|
|
102
|
+
_current_v2_provider_from_config() {
|
|
103
|
+
_read_openclaw_json_value clawsparkV2.primaryProvider
|
|
104
|
+
}
|
|
105
|
+
|
|
106
|
+
_current_custom_provider_label() {
|
|
107
|
+
if [[ -n "${CUSTOM_AI_PROVIDER_NAME:-}" ]]; then
|
|
108
|
+
printf '%s' "${CUSTOM_AI_PROVIDER_NAME}"
|
|
109
|
+
return 0
|
|
110
|
+
fi
|
|
111
|
+
|
|
112
|
+
_read_openclaw_json_value clawsparkV2.customProviderName
|
|
113
|
+
}
|
|
114
|
+
|
|
115
|
+
_current_provider_base_url() {
|
|
116
|
+
local env_name
|
|
117
|
+
env_name="$(_provider_base_url_env_name "$(_current_provider)")" || return 1
|
|
118
|
+
printf '%s' "${!env_name:-}"
|
|
119
|
+
}
|
|
120
|
+
|
|
121
|
+
_probe_remote_endpoint_status() {
|
|
122
|
+
local endpoint="$1"
|
|
123
|
+
|
|
124
|
+
if [[ -z "${endpoint}" ]]; then
|
|
125
|
+
printf 'not configured'
|
|
126
|
+
return 0
|
|
127
|
+
fi
|
|
128
|
+
|
|
129
|
+
if [[ "${CLAWSPARK_SKIP_REMOTE_HEALTHCHECK:-false}" == "true" ]]; then
|
|
130
|
+
printf 'skipped'
|
|
131
|
+
return 0
|
|
132
|
+
fi
|
|
133
|
+
|
|
134
|
+
if ! check_command curl; then
|
|
135
|
+
printf 'curl unavailable'
|
|
136
|
+
return 0
|
|
137
|
+
fi
|
|
138
|
+
|
|
139
|
+
local http_code
|
|
140
|
+
http_code=$(curl -sS -o /dev/null -w '%{http_code}' --max-time 3 "${endpoint}" 2>/dev/null || printf '000')
|
|
141
|
+
|
|
142
|
+
if [[ "${http_code}" == "000" ]]; then
|
|
143
|
+
printf 'unreachable'
|
|
144
|
+
else
|
|
145
|
+
printf 'responding (HTTP %s)' "${http_code}"
|
|
146
|
+
fi
|
|
147
|
+
}
|
|
148
|
+
|
|
149
|
+
_current_provider_display() {
|
|
150
|
+
local provider
|
|
151
|
+
provider="$(_current_provider)"
|
|
152
|
+
|
|
153
|
+
if [[ "${provider}" == "custom" ]]; then
|
|
154
|
+
local custom_label
|
|
155
|
+
custom_label="$(_current_custom_provider_label)"
|
|
156
|
+
if [[ -n "${custom_label}" ]]; then
|
|
157
|
+
printf 'custom (%s)' "${custom_label}"
|
|
158
|
+
return 0
|
|
159
|
+
fi
|
|
160
|
+
fi
|
|
161
|
+
|
|
162
|
+
printf '%s' "${provider}"
|
|
163
|
+
}
|
|
164
|
+
|
|
165
|
+
_provider_api_key_env_name() {
|
|
166
|
+
local provider="${1:-$(_current_provider)}"
|
|
167
|
+
case "${provider}" in
|
|
168
|
+
openai) printf 'OPENAI_API_KEY' ;;
|
|
169
|
+
anthropic) printf 'ANTHROPIC_API_KEY' ;;
|
|
170
|
+
openrouter) printf 'OPENROUTER_API_KEY' ;;
|
|
171
|
+
google) printf 'GOOGLE_API_KEY' ;;
|
|
172
|
+
custom) printf 'CUSTOM_AI_API_KEY' ;;
|
|
173
|
+
ollama) printf 'OLLAMA_API_KEY' ;;
|
|
174
|
+
*) return 1 ;;
|
|
175
|
+
esac
|
|
176
|
+
}
|
|
177
|
+
|
|
178
|
+
_provider_base_url_env_name() {
|
|
179
|
+
local provider="$1"
|
|
180
|
+
case "${provider}" in
|
|
181
|
+
ollama) printf 'OLLAMA_BASE_URL' ;;
|
|
182
|
+
openai) printf 'OPENAI_BASE_URL' ;;
|
|
183
|
+
anthropic) printf 'ANTHROPIC_BASE_URL' ;;
|
|
184
|
+
openrouter) printf 'OPENROUTER_BASE_URL' ;;
|
|
185
|
+
google) printf 'GOOGLE_BASE_URL' ;;
|
|
186
|
+
custom) printf 'CUSTOM_AI_BASE_URL' ;;
|
|
187
|
+
*) return 1 ;;
|
|
188
|
+
esac
|
|
189
|
+
}
|
|
190
|
+
|
|
191
|
+
_provider_display() {
|
|
192
|
+
local provider="$1"
|
|
193
|
+
local custom_label="${2:-}"
|
|
194
|
+
|
|
195
|
+
if [[ "${provider}" == "custom" && -n "${custom_label}" ]]; then
|
|
196
|
+
printf 'custom (%s)' "${custom_label}"
|
|
197
|
+
return 0
|
|
198
|
+
fi
|
|
199
|
+
|
|
200
|
+
printf '%s' "${provider}"
|
|
201
|
+
}
|
|
202
|
+
|
|
203
|
+
_join_by() {
|
|
204
|
+
local delimiter="$1"
|
|
205
|
+
shift || true
|
|
206
|
+
|
|
207
|
+
local first=1
|
|
208
|
+
local item
|
|
209
|
+
for item in "$@"; do
|
|
210
|
+
if [[ ${first} -eq 1 ]]; then
|
|
211
|
+
printf '%s' "${item}"
|
|
212
|
+
first=0
|
|
213
|
+
else
|
|
214
|
+
printf '%s%s' "${delimiter}" "${item}"
|
|
215
|
+
fi
|
|
216
|
+
done
|
|
217
|
+
}
|
|
218
|
+
|
|
219
|
+
_provider_catalog_rows() {
|
|
220
|
+
cat <<'EOF'
|
|
221
|
+
ollama|local|local-gpu,local-cpu,hybrid|http://127.0.0.1:11434/v1|Local Ollama runtime
|
|
222
|
+
openai|api|api-only,hybrid|https://api.openai.com/v1|OpenAI hosted API
|
|
223
|
+
anthropic|api|api-only,hybrid|https://api.anthropic.com|Anthropic hosted API
|
|
224
|
+
openrouter|api|api-only,hybrid|https://openrouter.ai/api/v1|OpenRouter aggregator
|
|
225
|
+
google|api|api-only,hybrid|https://generativelanguage.googleapis.com/v1beta/openai|Google OpenAI-compatible endpoint
|
|
226
|
+
custom|api|api-only,hybrid|https://your-provider.example.com/v1|OpenAI-compatible custom endpoint
|
|
227
|
+
EOF
|
|
228
|
+
}
|
|
229
|
+
|
|
230
|
+
_provider_default_base_url() {
|
|
231
|
+
local provider="$1"
|
|
232
|
+
while IFS='|' read -r row_provider _row_type _row_modes row_base_url _row_notes; do
|
|
233
|
+
if [[ "${row_provider}" == "${provider}" ]]; then
|
|
234
|
+
printf '%s' "${row_base_url}"
|
|
235
|
+
return 0
|
|
236
|
+
fi
|
|
237
|
+
done < <(_provider_catalog_rows)
|
|
238
|
+
return 1
|
|
239
|
+
}
|
|
240
|
+
|
|
241
|
+
_provider_default_index() {
|
|
242
|
+
if [[ "${CLAWSPARK_DEFAULTS}" == "true" ]]; then
|
|
243
|
+
printf '%s' "$(_provider_default_index_from_current)"
|
|
244
|
+
return 0
|
|
245
|
+
fi
|
|
246
|
+
|
|
247
|
+
_provider_default_index_from_current
|
|
248
|
+
}
|
|
249
|
+
|
|
250
|
+
_provider_default_index_from_current() {
|
|
251
|
+
local provider="${1:-$(_current_provider)}"
|
|
252
|
+
local options=("ollama" "openai" "anthropic" "openrouter" "google" "custom")
|
|
253
|
+
local i
|
|
254
|
+
|
|
255
|
+
for i in $(seq 0 $(( ${#options[@]} - 1 ))); do
|
|
256
|
+
if [[ "${options[$i]}" == "${provider}" ]]; then
|
|
257
|
+
printf '%s' "${i}"
|
|
258
|
+
return 0
|
|
259
|
+
fi
|
|
260
|
+
done
|
|
261
|
+
|
|
262
|
+
printf '0'
|
|
263
|
+
}
|
|
264
|
+
|
|
265
|
+
_prompt_text_value() {
|
|
266
|
+
local question="$1"
|
|
267
|
+
local default_value="${2:-}"
|
|
268
|
+
local value=""
|
|
269
|
+
|
|
270
|
+
if [[ "${CLAWSPARK_DEFAULTS}" == "true" ]]; then
|
|
271
|
+
printf '%s' "${default_value}"
|
|
272
|
+
return 0
|
|
273
|
+
fi
|
|
274
|
+
|
|
275
|
+
if [[ -n "${default_value}" ]]; then
|
|
276
|
+
printf '\n%s%s [%s]:%s ' "${BOLD}" "${question}" "${default_value}" "${RESET}" >/dev/tty
|
|
277
|
+
else
|
|
278
|
+
printf '\n%s%s:%s ' "${BOLD}" "${question}" "${RESET}" >/dev/tty
|
|
279
|
+
fi
|
|
280
|
+
|
|
281
|
+
read -r value </dev/tty || value=""
|
|
282
|
+
if [[ -z "${value}" ]]; then
|
|
283
|
+
value="${default_value}"
|
|
284
|
+
fi
|
|
285
|
+
printf '%s' "${value}"
|
|
286
|
+
}
|
|
287
|
+
|
|
288
|
+
_current_provider() {
|
|
289
|
+
if [[ -n "${CLAWSPARK_V2_PRIMARY_PROVIDER:-}" ]]; then
|
|
290
|
+
printf '%s' "${CLAWSPARK_V2_PRIMARY_PROVIDER}"
|
|
291
|
+
return 0
|
|
292
|
+
fi
|
|
293
|
+
|
|
294
|
+
local v2_provider
|
|
295
|
+
v2_provider="$(_current_v2_provider_from_config)"
|
|
296
|
+
if [[ -n "${v2_provider}" ]]; then
|
|
297
|
+
printf '%s' "${v2_provider}"
|
|
298
|
+
return 0
|
|
299
|
+
fi
|
|
300
|
+
|
|
301
|
+
local model_ref
|
|
302
|
+
model_ref="$(_current_model_ref)"
|
|
303
|
+
case "${model_ref}" in
|
|
304
|
+
ollama/*) printf 'ollama' ;;
|
|
305
|
+
anthropic/*) printf 'anthropic' ;;
|
|
306
|
+
openai/*) printf 'openai' ;;
|
|
307
|
+
custom/*) printf 'custom' ;;
|
|
308
|
+
*) printf 'ollama' ;;
|
|
309
|
+
esac
|
|
310
|
+
}
|
|
311
|
+
|
|
312
|
+
_uses_local_ollama() {
|
|
313
|
+
[[ "$(_current_provider)" == "ollama" ]]
|
|
314
|
+
}
|
|
315
|
+
|
|
316
|
+
_normalize_model_ref() {
|
|
317
|
+
local model="$1"
|
|
318
|
+
local provider="${2:-$(_current_provider)}"
|
|
319
|
+
|
|
320
|
+
if [[ "${model}" == */* ]]; then
|
|
321
|
+
printf '%s' "${model}"
|
|
322
|
+
return 0
|
|
323
|
+
fi
|
|
324
|
+
|
|
325
|
+
case "${provider}" in
|
|
326
|
+
openrouter|google|openai) printf 'openai/%s' "${model}" ;;
|
|
327
|
+
custom) printf 'openai/%s' "${model}" ;;
|
|
328
|
+
anthropic) printf 'anthropic/%s' "${model}" ;;
|
|
329
|
+
*) printf 'ollama/%s' "${model}" ;;
|
|
330
|
+
esac
|
|
331
|
+
}
|
|
332
|
+
|
|
333
|
+
_gateway_env_file() {
|
|
334
|
+
printf '%s' "${HOME}/.openclaw/gateway.env"
|
|
335
|
+
}
|
|
336
|
+
|
|
337
|
+
_ensure_openclaw_dir() {
|
|
338
|
+
mkdir -p "${HOME}/.openclaw"
|
|
339
|
+
}
|
|
340
|
+
|
|
341
|
+
_upsert_gateway_env_var() {
|
|
342
|
+
local key="$1"
|
|
343
|
+
local value="$2"
|
|
344
|
+
local env_file
|
|
345
|
+
env_file="$(_gateway_env_file)"
|
|
346
|
+
|
|
347
|
+
_ensure_openclaw_dir
|
|
348
|
+
touch "${env_file}"
|
|
349
|
+
|
|
350
|
+
python3 -c '
|
|
351
|
+
import pathlib, sys
|
|
352
|
+
|
|
353
|
+
path = pathlib.Path(sys.argv[1])
|
|
354
|
+
key = sys.argv[2]
|
|
355
|
+
value = sys.argv[3]
|
|
356
|
+
line = f"{key}={value}"
|
|
357
|
+
|
|
358
|
+
lines = []
|
|
359
|
+
found = False
|
|
360
|
+
if path.exists():
|
|
361
|
+
lines = path.read_text(encoding="utf-8").splitlines()
|
|
362
|
+
|
|
363
|
+
updated = []
|
|
364
|
+
for existing in lines:
|
|
365
|
+
if existing.startswith(f"{key}="):
|
|
366
|
+
if not found:
|
|
367
|
+
updated.append(line)
|
|
368
|
+
found = True
|
|
369
|
+
else:
|
|
370
|
+
updated.append(existing)
|
|
371
|
+
|
|
372
|
+
if not found:
|
|
373
|
+
updated.append(line)
|
|
374
|
+
|
|
375
|
+
path.write_text("\n".join(updated) + "\n", encoding="utf-8")
|
|
376
|
+
' "${env_file}" "${key}" "${value}" 2>> "${CLAWSPARK_LOG}" || {
|
|
377
|
+
log_error "Failed to update ${key} in ${env_file}."
|
|
378
|
+
exit 1
|
|
379
|
+
}
|
|
380
|
+
|
|
381
|
+
chmod 600 "${env_file}" 2>/dev/null || true
|
|
382
|
+
export "${key}=${value}"
|
|
383
|
+
}
|
|
384
|
+
|
|
385
|
+
_update_v2_provider_metadata() {
|
|
386
|
+
local provider="$1"
|
|
387
|
+
local custom_name="${2:-}"
|
|
388
|
+
local config_file="${HOME}/.openclaw/openclaw.json"
|
|
389
|
+
|
|
390
|
+
if [[ ! -f "${config_file}" ]]; then
|
|
391
|
+
log_error "Config not found at ${config_file}. Run install.sh first."
|
|
392
|
+
exit 1
|
|
393
|
+
fi
|
|
394
|
+
|
|
395
|
+
python3 -c '
|
|
396
|
+
import json, sys
|
|
397
|
+
|
|
398
|
+
path = sys.argv[1]
|
|
399
|
+
provider = sys.argv[2]
|
|
400
|
+
custom_name = sys.argv[3]
|
|
401
|
+
|
|
402
|
+
with open(path, "r", encoding="utf-8") as fh:
|
|
403
|
+
cfg = json.load(fh)
|
|
404
|
+
|
|
405
|
+
v2 = cfg.setdefault("clawsparkV2", {})
|
|
406
|
+
v2["primaryProvider"] = provider
|
|
407
|
+
if custom_name:
|
|
408
|
+
v2["customProviderName"] = custom_name
|
|
409
|
+
elif provider != "custom":
|
|
410
|
+
v2.pop("customProviderName", None)
|
|
411
|
+
|
|
412
|
+
with open(path, "w", encoding="utf-8") as fh:
|
|
413
|
+
json.dump(cfg, fh, indent=2)
|
|
414
|
+
' "${config_file}" "${provider}" "${custom_name}" 2>> "${CLAWSPARK_LOG}" || {
|
|
415
|
+
log_error "Failed to update provider metadata in ${config_file}."
|
|
416
|
+
exit 1
|
|
417
|
+
}
|
|
418
|
+
}
|
|
419
|
+
|
|
420
|
+
_cmd_provider_show() {
|
|
421
|
+
local provider_display provider base_url api_key_env
|
|
422
|
+
provider_display="$(_current_provider_display)"
|
|
423
|
+
provider="$(_current_provider)"
|
|
424
|
+
base_url="$(_current_provider_base_url 2>/dev/null || true)"
|
|
425
|
+
api_key_env="$(_provider_api_key_env_name "${provider}" 2>/dev/null || true)"
|
|
426
|
+
|
|
427
|
+
printf '\n%s%s clawspark provider%s\n\n' "${BOLD}" "${BLUE}" "${RESET}"
|
|
428
|
+
printf ' %-22s %s\n' "Profile" "${CLAWSPARK_PROFILE_NAME}"
|
|
429
|
+
printf ' %-22s %s\n' "Provider" "${provider_display}"
|
|
430
|
+
|
|
431
|
+
if [[ -n "${base_url}" ]]; then
|
|
432
|
+
printf ' %-22s %s\n' "Base URL" "${base_url}"
|
|
433
|
+
else
|
|
434
|
+
printf ' %-22s %s\n' "Base URL" "(not configured)"
|
|
435
|
+
fi
|
|
436
|
+
|
|
437
|
+
if [[ -n "${api_key_env}" ]]; then
|
|
438
|
+
if [[ -n "${!api_key_env:-}" ]]; then
|
|
439
|
+
printf ' %-22s %s\n' "API key" "configured via ${api_key_env}"
|
|
440
|
+
else
|
|
441
|
+
printf ' %-22s %s\n' "API key" "missing (${api_key_env})"
|
|
442
|
+
fi
|
|
443
|
+
fi
|
|
444
|
+
|
|
445
|
+
if [[ "${provider}" == "custom" ]]; then
|
|
446
|
+
printf '\n Update custom provider:\n'
|
|
447
|
+
printf ' clawspark provider set custom --name <label> --base-url <url> [--api-key <key>]\n'
|
|
448
|
+
elif [[ "${provider}" != "ollama" ]]; then
|
|
449
|
+
printf '\n Update remote provider:\n'
|
|
450
|
+
printf ' clawspark provider set %s --base-url <url> [--api-key <key>]\n' "${provider}"
|
|
451
|
+
fi
|
|
452
|
+
printf '\n'
|
|
453
|
+
}
|
|
454
|
+
|
|
455
|
+
_cmd_provider_list() {
|
|
456
|
+
local current_provider current_base_url
|
|
457
|
+
current_provider="$(_current_provider)"
|
|
458
|
+
current_base_url="$(_current_provider_base_url 2>/dev/null || true)"
|
|
459
|
+
|
|
460
|
+
printf '\n%s%s clawspark provider catalog%s\n\n' "${BOLD}" "${BLUE}" "${RESET}"
|
|
461
|
+
printf ' %-10s %-8s %-23s %-58s %s\n' "Provider" "Type" "Runtime Modes" "Default Base URL" "Notes"
|
|
462
|
+
printf ' %-10s %-8s %-23s %-58s %s\n' "--------" "----" "-------------" "----------------" "-----"
|
|
463
|
+
|
|
464
|
+
while IFS='|' read -r provider provider_type runtime_modes default_base_url notes; do
|
|
465
|
+
local marker=" "
|
|
466
|
+
if [[ "${provider}" == "${current_provider}" ]]; then
|
|
467
|
+
marker="*"
|
|
468
|
+
fi
|
|
469
|
+
printf '%s %-10s %-8s %-23s %-58s %s\n' "${marker}" "${provider}" "${provider_type}" "${runtime_modes}" "${default_base_url}" "${notes}"
|
|
470
|
+
done < <(_provider_catalog_rows)
|
|
471
|
+
|
|
472
|
+
if [[ -n "${current_base_url}" ]]; then
|
|
473
|
+
printf '\n Active provider URL: %s\n' "${current_base_url}"
|
|
474
|
+
fi
|
|
475
|
+
printf ' Use %sclawspark provider set <provider> ...%s to switch providers.\n\n' "${CYAN}" "${RESET}"
|
|
476
|
+
}
|
|
477
|
+
|
|
478
|
+
_cmd_provider_doctor() {
|
|
479
|
+
local output_format="text"
|
|
480
|
+
local provider provider_display base_url api_key_env api_key_value model_ref endpoint_state custom_label
|
|
481
|
+
local has_issue=0
|
|
482
|
+
local -a issues=()
|
|
483
|
+
local -a suggestions=()
|
|
484
|
+
|
|
485
|
+
while [[ $# -gt 0 ]]; do
|
|
486
|
+
case "$1" in
|
|
487
|
+
--json)
|
|
488
|
+
output_format="json"
|
|
489
|
+
shift
|
|
490
|
+
;;
|
|
491
|
+
*)
|
|
492
|
+
log_error "Unknown provider doctor option: $1"
|
|
493
|
+
printf ' Usage: clawspark provider doctor [--json]\n'
|
|
494
|
+
exit 1
|
|
495
|
+
;;
|
|
496
|
+
esac
|
|
497
|
+
done
|
|
498
|
+
|
|
499
|
+
provider="$(_current_provider)"
|
|
500
|
+
provider_display="$(_current_provider_display)"
|
|
501
|
+
base_url="$(_current_provider_base_url 2>/dev/null || true)"
|
|
502
|
+
api_key_env="$(_provider_api_key_env_name "${provider}" 2>/dev/null || true)"
|
|
503
|
+
model_ref="$(_current_model_ref)"
|
|
504
|
+
custom_label="$(_current_custom_provider_label 2>/dev/null || true)"
|
|
505
|
+
|
|
506
|
+
if [[ -n "${api_key_env}" ]]; then
|
|
507
|
+
api_key_value="${!api_key_env:-}"
|
|
508
|
+
else
|
|
509
|
+
api_key_value=""
|
|
510
|
+
fi
|
|
511
|
+
|
|
512
|
+
if [[ -n "${base_url}" ]]; then
|
|
513
|
+
:
|
|
514
|
+
else
|
|
515
|
+
issues+=("Base URL is not configured")
|
|
516
|
+
suggestions+=("Run clawspark provider set ${provider} --base-url <url> to configure the endpoint")
|
|
517
|
+
has_issue=1
|
|
518
|
+
fi
|
|
519
|
+
|
|
520
|
+
if [[ "${provider}" == "custom" ]]; then
|
|
521
|
+
if [[ -n "${custom_label}" ]]; then
|
|
522
|
+
:
|
|
523
|
+
else
|
|
524
|
+
issues+=("Custom provider label is missing")
|
|
525
|
+
suggestions+=("Run clawspark provider set custom --name <label> --base-url <url>")
|
|
526
|
+
has_issue=1
|
|
527
|
+
fi
|
|
528
|
+
fi
|
|
529
|
+
|
|
530
|
+
if [[ -n "${api_key_env}" ]]; then
|
|
531
|
+
if [[ -n "${api_key_value}" ]]; then
|
|
532
|
+
:
|
|
533
|
+
elif [[ "${provider}" == "ollama" ]]; then
|
|
534
|
+
suggestions+=("Set OLLAMA_API_KEY=ollama in ~/.openclaw/gateway.env for local compatibility")
|
|
535
|
+
else
|
|
536
|
+
issues+=("API key is missing (${api_key_env})")
|
|
537
|
+
suggestions+=("Export ${api_key_env} in ~/.openclaw/gateway.env or rerun clawspark provider set ${provider} --api-key <key>")
|
|
538
|
+
has_issue=1
|
|
539
|
+
fi
|
|
540
|
+
fi
|
|
541
|
+
|
|
542
|
+
case "${provider}" in
|
|
543
|
+
ollama)
|
|
544
|
+
if [[ -n "${model_ref}" && "${model_ref}" != ollama/* ]]; then
|
|
545
|
+
issues+=("Model ref does not match provider. Expected ollama/* but found ${model_ref}")
|
|
546
|
+
suggestions+=("Switch to an Ollama model with clawspark model switch <model>")
|
|
547
|
+
has_issue=1
|
|
548
|
+
fi
|
|
549
|
+
;;
|
|
550
|
+
anthropic)
|
|
551
|
+
if [[ -n "${model_ref}" && "${model_ref}" != anthropic/* ]]; then
|
|
552
|
+
issues+=("Model ref does not match provider. Expected anthropic/* but found ${model_ref}")
|
|
553
|
+
suggestions+=("Update the default model to anthropic/<model> in OpenClaw config")
|
|
554
|
+
has_issue=1
|
|
555
|
+
fi
|
|
556
|
+
;;
|
|
557
|
+
openai|openrouter|google|custom)
|
|
558
|
+
if [[ -n "${model_ref}" && "${model_ref}" != openai/* ]]; then
|
|
559
|
+
issues+=("Model ref does not match provider. Expected openai/* but found ${model_ref}")
|
|
560
|
+
suggestions+=("Update the default model to openai/<model> in OpenClaw config")
|
|
561
|
+
has_issue=1
|
|
562
|
+
fi
|
|
563
|
+
;;
|
|
564
|
+
esac
|
|
565
|
+
|
|
566
|
+
if [[ "${provider}" != "ollama" ]]; then
|
|
567
|
+
endpoint_state="$(_probe_remote_endpoint_status "${base_url}")"
|
|
568
|
+
case "${endpoint_state}" in
|
|
569
|
+
responding*|skipped|curl\ unavailable)
|
|
570
|
+
;;
|
|
571
|
+
*)
|
|
572
|
+
issues+=("Endpoint probe reported: ${endpoint_state}")
|
|
573
|
+
suggestions+=("Verify network reachability and confirm the configured base URL: ${base_url}")
|
|
574
|
+
has_issue=1
|
|
575
|
+
;;
|
|
576
|
+
esac
|
|
577
|
+
else
|
|
578
|
+
endpoint_state="local"
|
|
579
|
+
fi
|
|
580
|
+
|
|
581
|
+
if [[ "${output_format}" == "json" ]]; then
|
|
582
|
+
local issues_joined suggestions_joined
|
|
583
|
+
issues_joined="$(_join_by '||' "${issues[@]}")"
|
|
584
|
+
suggestions_joined="$(_join_by '||' "${suggestions[@]}")"
|
|
585
|
+
|
|
586
|
+
python3 - <<'PY' "${provider}" "${provider_display}" "${model_ref}" "${base_url}" "${api_key_env}" "${api_key_value}" "${custom_label}" "${endpoint_state}" "${has_issue}" "${issues_joined}" "${suggestions_joined}"
|
|
587
|
+
import json, sys
|
|
588
|
+
|
|
589
|
+
provider, provider_display, model_ref, base_url, api_key_env, api_key_value, custom_label, endpoint_state, has_issue, issues_raw, suggestions_raw = sys.argv[1:]
|
|
590
|
+
|
|
591
|
+
payload = {
|
|
592
|
+
"provider": provider,
|
|
593
|
+
"providerDisplay": provider_display,
|
|
594
|
+
"model": model_ref,
|
|
595
|
+
"baseUrl": base_url,
|
|
596
|
+
"apiKeyEnv": api_key_env,
|
|
597
|
+
"apiKeyConfigured": bool(api_key_value),
|
|
598
|
+
"customProviderName": custom_label,
|
|
599
|
+
"endpointProbe": endpoint_state,
|
|
600
|
+
"status": "issues" if has_issue == "1" else "ok",
|
|
601
|
+
"issues": [item for item in issues_raw.split("||") if item],
|
|
602
|
+
"suggestions": [item for item in suggestions_raw.split("||") if item],
|
|
603
|
+
}
|
|
604
|
+
|
|
605
|
+
print(json.dumps(payload, indent=2))
|
|
606
|
+
PY
|
|
607
|
+
|
|
608
|
+
if [[ ${has_issue} -eq 0 ]]; then
|
|
609
|
+
return 0
|
|
610
|
+
fi
|
|
611
|
+
return 1
|
|
612
|
+
fi
|
|
613
|
+
|
|
614
|
+
printf '\n%s%s clawspark provider doctor%s\n\n' "${BOLD}" "${BLUE}" "${RESET}"
|
|
615
|
+
printf ' %-22s %s\n' "Provider" "${provider_display}"
|
|
616
|
+
printf ' %-22s %s\n' "Model" "${model_ref:-not configured}"
|
|
617
|
+
printf ' %-22s %s\n' "Base URL" "${base_url:-missing}"
|
|
618
|
+
if [[ "${provider}" == "custom" ]]; then
|
|
619
|
+
printf ' %-22s %s\n' "Custom label" "${custom_label:-missing}"
|
|
620
|
+
fi
|
|
621
|
+
if [[ -n "${api_key_env}" ]]; then
|
|
622
|
+
if [[ -n "${api_key_value}" ]]; then
|
|
623
|
+
printf ' %-22s configured via %s\n' "API key" "${api_key_env}"
|
|
624
|
+
else
|
|
625
|
+
printf ' %-22s missing (%s)\n' "API key" "${api_key_env}"
|
|
626
|
+
fi
|
|
627
|
+
fi
|
|
628
|
+
printf ' %-22s %s\n' "Endpoint probe" "${endpoint_state}"
|
|
629
|
+
|
|
630
|
+
if [[ -n "${base_url}" ]]; then
|
|
631
|
+
log_success "Base URL is configured"
|
|
632
|
+
else
|
|
633
|
+
log_warn "Base URL is not configured"
|
|
634
|
+
fi
|
|
635
|
+
if [[ "${provider}" == "custom" ]]; then
|
|
636
|
+
if [[ -n "${custom_label}" ]]; then
|
|
637
|
+
log_success "Custom provider label is set: ${custom_label}"
|
|
638
|
+
else
|
|
639
|
+
log_warn "Custom provider label is missing"
|
|
640
|
+
fi
|
|
641
|
+
fi
|
|
642
|
+
if [[ -n "${api_key_env}" ]]; then
|
|
643
|
+
if [[ -n "${api_key_value}" ]]; then
|
|
644
|
+
log_success "API key is configured via ${api_key_env}"
|
|
645
|
+
elif [[ "${provider}" == "ollama" ]]; then
|
|
646
|
+
log_warn "OLLAMA_API_KEY is missing; local mode usually works best with OLLAMA_API_KEY=ollama"
|
|
647
|
+
else
|
|
648
|
+
log_warn "API key is missing (${api_key_env})"
|
|
649
|
+
fi
|
|
650
|
+
fi
|
|
651
|
+
if [[ ${#issues[@]} -eq 0 ]]; then
|
|
652
|
+
log_success "Model ref matches provider"
|
|
653
|
+
if [[ "${provider}" != "ollama" ]]; then
|
|
654
|
+
log_success "Endpoint probe completed: ${endpoint_state}"
|
|
655
|
+
fi
|
|
656
|
+
else
|
|
657
|
+
local issue
|
|
658
|
+
for issue in "${issues[@]}"; do
|
|
659
|
+
log_warn "${issue}"
|
|
660
|
+
done
|
|
661
|
+
fi
|
|
662
|
+
|
|
663
|
+
if [[ ${#suggestions[@]} -gt 0 ]]; then
|
|
664
|
+
local suggestion
|
|
665
|
+
printf '\n Suggestions:\n'
|
|
666
|
+
for suggestion in "${suggestions[@]}"; do
|
|
667
|
+
printf ' - %s\n' "${suggestion}"
|
|
668
|
+
done
|
|
669
|
+
fi
|
|
670
|
+
|
|
671
|
+
printf '\n'
|
|
672
|
+
if [[ ${has_issue} -eq 0 ]]; then
|
|
673
|
+
log_success "Provider diagnostics passed"
|
|
674
|
+
return 0
|
|
675
|
+
fi
|
|
676
|
+
|
|
677
|
+
log_warn "Provider diagnostics found issues"
|
|
678
|
+
return 1
|
|
679
|
+
}
|
|
680
|
+
|
|
681
|
+
_cmd_provider_set_remote() {
|
|
682
|
+
local provider="$1"
|
|
683
|
+
shift || true
|
|
684
|
+
|
|
685
|
+
local base_url=""
|
|
686
|
+
local api_key=""
|
|
687
|
+
local provider_name=""
|
|
688
|
+
local api_key_env=""
|
|
689
|
+
local base_url_env=""
|
|
690
|
+
|
|
691
|
+
case "${provider}" in
|
|
692
|
+
ollama|openai|anthropic|openrouter|google|custom)
|
|
693
|
+
;;
|
|
694
|
+
*)
|
|
695
|
+
log_error "Unsupported remote provider: ${provider}"
|
|
696
|
+
printf ' Usage: clawspark provider set <ollama|openai|anthropic|openrouter|google|custom> [options]\n'
|
|
697
|
+
exit 1
|
|
698
|
+
;;
|
|
699
|
+
esac
|
|
700
|
+
|
|
701
|
+
while [[ $# -gt 0 ]]; do
|
|
702
|
+
case "$1" in
|
|
703
|
+
--name)
|
|
704
|
+
provider_name="${2:-}"
|
|
705
|
+
shift 2
|
|
706
|
+
;;
|
|
707
|
+
--base-url)
|
|
708
|
+
base_url="${2:-}"
|
|
709
|
+
shift 2
|
|
710
|
+
;;
|
|
711
|
+
--api-key)
|
|
712
|
+
api_key="${2:-}"
|
|
713
|
+
shift 2
|
|
714
|
+
;;
|
|
715
|
+
*)
|
|
716
|
+
log_error "Unknown provider option: $1"
|
|
717
|
+
if [[ "${provider}" == "custom" ]]; then
|
|
718
|
+
printf ' Usage: clawspark provider set custom --name <label> --base-url <url> [--api-key <key>]\n'
|
|
719
|
+
elif [[ "${provider}" == "ollama" ]]; then
|
|
720
|
+
printf ' Usage: clawspark provider set ollama [--base-url <url>]\n'
|
|
721
|
+
else
|
|
722
|
+
printf ' Usage: clawspark provider set %s --base-url <url> [--api-key <key>]\n' "${provider}"
|
|
723
|
+
fi
|
|
724
|
+
exit 1
|
|
725
|
+
;;
|
|
726
|
+
esac
|
|
727
|
+
done
|
|
728
|
+
|
|
729
|
+
[[ -z "${base_url}" ]] && {
|
|
730
|
+
if [[ "${provider}" == "custom" ]]; then
|
|
731
|
+
log_error "Usage: clawspark provider set custom --name <label> --base-url <url> [--api-key <key>]"
|
|
732
|
+
elif [[ "${provider}" == "ollama" ]]; then
|
|
733
|
+
log_error "Usage: clawspark provider set ollama [--base-url <url>]"
|
|
734
|
+
else
|
|
735
|
+
log_error "Usage: clawspark provider set ${provider} --base-url <url> [--api-key <key>]"
|
|
736
|
+
fi
|
|
737
|
+
exit 1
|
|
738
|
+
}
|
|
739
|
+
|
|
740
|
+
if [[ ! "${base_url}" =~ ^https?:// ]]; then
|
|
741
|
+
log_error "Provider base URL must start with http:// or https://"
|
|
742
|
+
exit 1
|
|
743
|
+
fi
|
|
744
|
+
|
|
745
|
+
if [[ "${provider}" == "custom" && -z "${provider_name}" ]]; then
|
|
746
|
+
log_error "Usage: clawspark provider set custom --name <label> --base-url <url> [--api-key <key>]"
|
|
747
|
+
exit 1
|
|
748
|
+
fi
|
|
749
|
+
|
|
750
|
+
base_url_env="$(_provider_base_url_env_name "${provider}")"
|
|
751
|
+
api_key_env="$(_provider_api_key_env_name "${provider}")"
|
|
752
|
+
|
|
753
|
+
_upsert_gateway_env_var "CLAWSPARK_V2_PRIMARY_PROVIDER" "${provider}"
|
|
754
|
+
_upsert_gateway_env_var "${base_url_env}" "${base_url}"
|
|
755
|
+
if [[ -n "${api_key}" ]]; then
|
|
756
|
+
_upsert_gateway_env_var "${api_key_env}" "${api_key}"
|
|
757
|
+
fi
|
|
758
|
+
|
|
759
|
+
if [[ "${provider}" == "custom" ]]; then
|
|
760
|
+
_upsert_gateway_env_var "CUSTOM_AI_PROVIDER_NAME" "${provider_name}"
|
|
761
|
+
_update_v2_provider_metadata "custom" "${provider_name}"
|
|
762
|
+
else
|
|
763
|
+
_update_v2_provider_metadata "${provider}"
|
|
764
|
+
fi
|
|
765
|
+
|
|
766
|
+
CLAWSPARK_PROFILE_NAME="v2"
|
|
767
|
+
|
|
768
|
+
if [[ "${provider}" == "custom" && "${CLAWSPARK_PROVIDER_SET_ALIAS:-}" == "set-custom" ]]; then
|
|
769
|
+
log_success "Custom provider updated: ${provider_name}"
|
|
770
|
+
else
|
|
771
|
+
log_success "Provider updated: $(_provider_display "${provider}" "${provider_name}")"
|
|
772
|
+
fi
|
|
773
|
+
log_info "Provider base URL: ${base_url}"
|
|
774
|
+
if [[ -n "${api_key}" ]]; then
|
|
775
|
+
log_info "API key saved to ${api_key_env}"
|
|
776
|
+
fi
|
|
777
|
+
log_info "Restart to apply: clawspark restart"
|
|
778
|
+
}
|
|
779
|
+
|
|
780
|
+
_cmd_provider_use() {
|
|
781
|
+
local provider="${1:-}"
|
|
782
|
+
shift || true
|
|
783
|
+
|
|
784
|
+
local base_url=""
|
|
785
|
+
local api_key=""
|
|
786
|
+
local provider_name=""
|
|
787
|
+
local set_args=()
|
|
788
|
+
|
|
789
|
+
if [[ -z "${provider}" ]]; then
|
|
790
|
+
local provider_options=("ollama" "openai" "anthropic" "openrouter" "google" "custom")
|
|
791
|
+
local default_idx
|
|
792
|
+
default_idx="$(_provider_default_index)"
|
|
793
|
+
provider=$(prompt_choice "Choose provider" provider_options "${default_idx}")
|
|
794
|
+
fi
|
|
795
|
+
|
|
796
|
+
while [[ $# -gt 0 ]]; do
|
|
797
|
+
case "$1" in
|
|
798
|
+
--base-url)
|
|
799
|
+
base_url="${2:-}"
|
|
800
|
+
shift 2
|
|
801
|
+
;;
|
|
802
|
+
--api-key)
|
|
803
|
+
api_key="${2:-}"
|
|
804
|
+
shift 2
|
|
805
|
+
;;
|
|
806
|
+
--name)
|
|
807
|
+
provider_name="${2:-}"
|
|
808
|
+
shift 2
|
|
809
|
+
;;
|
|
810
|
+
*)
|
|
811
|
+
log_error "Unknown provider option: $1"
|
|
812
|
+
printf ' Usage: clawspark provider use <ollama|openai|anthropic|openrouter|google|custom> [--base-url <url>] [--api-key <key>] [--name <label>]\n'
|
|
813
|
+
exit 1
|
|
814
|
+
;;
|
|
815
|
+
esac
|
|
816
|
+
done
|
|
817
|
+
|
|
818
|
+
case "${provider}" in
|
|
819
|
+
ollama|openai|anthropic|openrouter|google|custom)
|
|
820
|
+
;;
|
|
821
|
+
*)
|
|
822
|
+
log_error "Unsupported provider: ${provider}"
|
|
823
|
+
exit 1
|
|
824
|
+
;;
|
|
825
|
+
esac
|
|
826
|
+
|
|
827
|
+
if [[ -z "${base_url}" ]]; then
|
|
828
|
+
if [[ "${provider}" == "custom" ]]; then
|
|
829
|
+
base_url="$(_prompt_text_value "Enter custom provider base URL" "$(_current_provider_base_url 2>/dev/null || _provider_default_base_url custom || true)")"
|
|
830
|
+
else
|
|
831
|
+
base_url="$(_provider_default_base_url "${provider}" 2>/dev/null || true)"
|
|
832
|
+
fi
|
|
833
|
+
fi
|
|
834
|
+
|
|
835
|
+
if [[ -z "${base_url}" ]]; then
|
|
836
|
+
log_error "No default base URL is available for provider: ${provider}"
|
|
837
|
+
exit 1
|
|
838
|
+
fi
|
|
839
|
+
|
|
840
|
+
if [[ "${provider}" == "custom" && -z "${provider_name}" ]]; then
|
|
841
|
+
provider_name="$(_prompt_text_value "Enter custom provider name" "$(_current_custom_provider_label 2>/dev/null || true)")"
|
|
842
|
+
if [[ -z "${provider_name}" ]]; then
|
|
843
|
+
log_error "Custom provider requires --name with clawspark provider use custom"
|
|
844
|
+
exit 1
|
|
845
|
+
fi
|
|
846
|
+
fi
|
|
847
|
+
|
|
848
|
+
if [[ "${provider}" == "custom" && ! "${base_url}" =~ ^https?:// ]]; then
|
|
849
|
+
log_error "Custom provider requires a valid http:// or https:// base URL"
|
|
850
|
+
exit 1
|
|
851
|
+
fi
|
|
852
|
+
|
|
853
|
+
if [[ "${provider}" == "ollama" && -z "${api_key}" && -z "${OLLAMA_API_KEY:-}" ]]; then
|
|
854
|
+
api_key="ollama"
|
|
855
|
+
fi
|
|
856
|
+
|
|
857
|
+
set_args=(--base-url "${base_url}")
|
|
858
|
+
if [[ -n "${api_key}" ]]; then
|
|
859
|
+
set_args+=(--api-key "${api_key}")
|
|
860
|
+
fi
|
|
861
|
+
if [[ -n "${provider_name}" ]]; then
|
|
862
|
+
set_args+=(--name "${provider_name}")
|
|
863
|
+
fi
|
|
864
|
+
|
|
865
|
+
_cmd_provider_set_remote "${provider}" "${set_args[@]}"
|
|
866
|
+
|
|
867
|
+
if [[ -n "${api_key}" ]]; then
|
|
868
|
+
:
|
|
869
|
+
else
|
|
870
|
+
local api_key_env
|
|
871
|
+
api_key_env="$(_provider_api_key_env_name "${provider}" 2>/dev/null || true)"
|
|
872
|
+
if [[ -n "${api_key_env}" && -z "${!api_key_env:-}" && "${provider}" != "ollama" ]]; then
|
|
873
|
+
log_warn "No API key was provided. Set ${api_key_env} before using remote inference."
|
|
874
|
+
fi
|
|
875
|
+
fi
|
|
876
|
+
}
|
|
877
|
+
|
|
878
|
+
_cmd_provider_set_custom() {
|
|
879
|
+
local CLAWSPARK_PROVIDER_SET_ALIAS="set-custom"
|
|
880
|
+
_cmd_provider_set_remote custom "$@"
|
|
881
|
+
}
|
|
882
|
+
|
|
883
|
+
_cmd_provider() {
|
|
884
|
+
local subcmd="${1:-show}"
|
|
885
|
+
shift || true
|
|
886
|
+
|
|
887
|
+
case "${subcmd}" in
|
|
888
|
+
list)
|
|
889
|
+
_cmd_provider_list
|
|
890
|
+
;;
|
|
891
|
+
doctor)
|
|
892
|
+
_cmd_provider_doctor "$@"
|
|
893
|
+
;;
|
|
894
|
+
show)
|
|
895
|
+
_cmd_provider_show
|
|
896
|
+
;;
|
|
897
|
+
set)
|
|
898
|
+
local provider_name="${1:-}"
|
|
899
|
+
[[ -z "${provider_name}" ]] && {
|
|
900
|
+
log_error "Usage: clawspark provider set <ollama|openai|anthropic|openrouter|google|custom> [options]"
|
|
901
|
+
exit 1
|
|
902
|
+
}
|
|
903
|
+
shift || true
|
|
904
|
+
_cmd_provider_set_remote "${provider_name}" "$@"
|
|
905
|
+
;;
|
|
906
|
+
use)
|
|
907
|
+
_cmd_provider_use "$@"
|
|
908
|
+
;;
|
|
909
|
+
set-custom)
|
|
910
|
+
_cmd_provider_set_custom "$@"
|
|
911
|
+
;;
|
|
912
|
+
*)
|
|
913
|
+
log_error "Unknown provider subcommand: ${subcmd}"
|
|
914
|
+
printf ' Usage: clawspark provider [show|list|doctor [--json]|set <provider> [options]|use [provider] [options]|set-custom --name <label> --base-url <url> [--api-key <key>]]\n'
|
|
915
|
+
exit 1
|
|
916
|
+
;;
|
|
917
|
+
esac
|
|
918
|
+
}
|
|
919
|
+
|
|
920
|
+
# ── Source sandbox commands ────────────────────────────────────────────────
|
|
921
|
+
_load_sandbox_commands() {
|
|
922
|
+
local lib_dir="${CLAWSPARK_DIR}/lib"
|
|
923
|
+
if [[ -f "${lib_dir}/sandbox-commands.sh" ]]; then
|
|
924
|
+
# shellcheck source=/dev/null
|
|
925
|
+
source "${lib_dir}/sandbox-commands.sh"
|
|
926
|
+
fi
|
|
927
|
+
}
|
|
928
|
+
|
|
929
|
+
_load_sandbox_commands
|
|
930
|
+
|
|
931
|
+
# ── Source diagnose module ────────────────────────────────────────────────
|
|
932
|
+
_load_diagnose() {
|
|
933
|
+
local lib_dir="${CLAWSPARK_DIR}/lib"
|
|
934
|
+
if [[ -f "${lib_dir}/diagnose.sh" ]]; then
|
|
935
|
+
# shellcheck source=/dev/null
|
|
936
|
+
source "${lib_dir}/diagnose.sh"
|
|
937
|
+
fi
|
|
938
|
+
}
|
|
939
|
+
|
|
940
|
+
# ── Source skill audit module ─────────────────────────────────────────────
|
|
941
|
+
_load_skill_audit() {
|
|
942
|
+
local lib_dir="${CLAWSPARK_DIR}/lib"
|
|
943
|
+
if [[ -f "${lib_dir}/skill-audit.sh" ]]; then
|
|
944
|
+
# shellcheck source=/dev/null
|
|
945
|
+
source "${lib_dir}/skill-audit.sh"
|
|
946
|
+
fi
|
|
947
|
+
}
|
|
948
|
+
|
|
949
|
+
# ── Version ─────────────────────────────────────────────────────────────────
|
|
950
|
+
CLAWSPARK_VERSION="2.0.0"
|
|
951
|
+
|
|
952
|
+
# ── Usage ───────────────────────────────────────────────────────────────────
|
|
953
|
+
_usage() {
|
|
954
|
+
cat <<USAGE
|
|
955
|
+
${BOLD}clawspark${RESET} v${CLAWSPARK_VERSION} - manage your OpenClaw AI agent
|
|
956
|
+
|
|
957
|
+
${BOLD}Usage:${RESET}
|
|
958
|
+
clawspark <command> [options]
|
|
959
|
+
|
|
960
|
+
${BOLD}Commands:${RESET}
|
|
961
|
+
${GREEN}status${RESET} Show status of all components
|
|
962
|
+
${GREEN}skills sync${RESET} Re-read skills.yaml and install/remove skills
|
|
963
|
+
${GREEN}skills add <name>${RESET} Add a skill and install it
|
|
964
|
+
${GREEN}skills remove <name>${RESET} Remove a skill
|
|
965
|
+
${GREEN}skills pack [name]${RESET} Install a curated skill pack (or list available packs)
|
|
966
|
+
${GREEN}skills audit${RESET} Scan installed skills for security issues
|
|
967
|
+
${GREEN}benchmark${RESET} Run a performance benchmark
|
|
968
|
+
${GREEN}update${RESET} Update OpenClaw and re-sync skills
|
|
969
|
+
${GREEN}dashboard${RESET} Show dashboard URLs and start ClawMetry if needed
|
|
970
|
+
${GREEN}tailscale setup${RESET} Configure Tailscale remote access
|
|
971
|
+
${GREEN}tailscale status${RESET} Show Tailscale connection status and URLs
|
|
972
|
+
${GREEN}airgap on|off${RESET} Toggle air-gap mode
|
|
973
|
+
${GREEN}logs${RESET} Tail gateway logs
|
|
974
|
+
${GREEN}start${RESET} Start all services (Ollama, gateway, node host, dashboard)
|
|
975
|
+
${GREEN}stop${RESET} Stop services (add --all to also stop Ollama)
|
|
976
|
+
${GREEN}restart${RESET} Restart Ollama, gateway, node host, and dashboard
|
|
977
|
+
${GREEN}uninstall${RESET} Remove everything (with confirmation)
|
|
978
|
+
${GREEN}model list${RESET} Show available and active models
|
|
979
|
+
${GREEN}model switch <model>${RESET} Change the primary chat model
|
|
980
|
+
${GREEN}model vision${RESET} Set or show the vision model
|
|
981
|
+
${GREEN}provider${RESET} Show current provider configuration
|
|
982
|
+
${GREEN}provider list${RESET} Show supported provider catalog
|
|
983
|
+
${GREEN}provider doctor${RESET} Validate current provider setup
|
|
984
|
+
${GREEN}provider doctor --json${RESET} Output diagnostics as JSON
|
|
985
|
+
${GREEN}provider set <p> ...${RESET} Update remote provider settings
|
|
986
|
+
${GREEN}provider use [p]${RESET} Switch provider using default endpoints
|
|
987
|
+
${GREEN}provider set-custom ...${RESET} Update custom provider settings
|
|
988
|
+
${GREEN}tools${RESET} List agent tools and their status
|
|
989
|
+
${GREEN}tools enable <name>${RESET} Enable an additional tool (e.g. email, browser)
|
|
990
|
+
${GREEN}tools disable <name>${RESET} Disable a tool for the personal agent
|
|
991
|
+
${GREEN}sandbox${RESET} Show sandbox status
|
|
992
|
+
${GREEN}sandbox on${RESET} Enable Docker sandbox for code execution
|
|
993
|
+
${GREEN}sandbox off${RESET} Disable sandbox (run code on host)
|
|
994
|
+
${GREEN}sandbox test${RESET} Verify sandbox isolation with test suite
|
|
995
|
+
${GREEN}mcp list${RESET} List configured MCP servers
|
|
996
|
+
${GREEN}mcp setup${RESET} Install default MCP servers (diagrams, memory, etc.)
|
|
997
|
+
${GREEN}mcp add <n> <cmd>${RESET} Add a custom MCP server
|
|
998
|
+
${GREEN}mcp remove <n>${RESET} Remove an MCP server
|
|
999
|
+
${GREEN}diagnose${RESET} Run full system diagnostics (alias: doctor)
|
|
1000
|
+
${GREEN}help${RESET} Show this help message
|
|
1001
|
+
|
|
1002
|
+
USAGE
|
|
1003
|
+
}
|
|
1004
|
+
|
|
1005
|
+
# ═══════════════════════════════════════════════════════════════════════════
|
|
1006
|
+
# HELPERS
|
|
1007
|
+
# ═══════════════════════════════════════════════════════════════════════════
|
|
1008
|
+
|
|
1009
|
+
# Kill a process from a PID file, then remove the file
|
|
1010
|
+
_kill_pid_file() {
|
|
1011
|
+
local pid_file="$1"
|
|
1012
|
+
if [[ -f "${pid_file}" ]]; then
|
|
1013
|
+
local pid
|
|
1014
|
+
pid=$(cat "${pid_file}" 2>/dev/null || echo "")
|
|
1015
|
+
if [[ -n "${pid}" ]] && [[ "${pid}" =~ ^[0-9]+$ ]]; then
|
|
1016
|
+
kill "${pid}" 2>/dev/null || true
|
|
1017
|
+
fi
|
|
1018
|
+
rm -f "${pid_file}"
|
|
1019
|
+
fi
|
|
1020
|
+
}
|
|
1021
|
+
|
|
1022
|
+
# Also kill by process name pattern as a fallback
|
|
1023
|
+
_kill_by_name() {
|
|
1024
|
+
local pattern="$1"
|
|
1025
|
+
pkill -f "${pattern}" 2>/dev/null || true
|
|
1026
|
+
}
|
|
1027
|
+
|
|
1028
|
+
# ═══════════════════════════════════════════════════════════════════════════
|
|
1029
|
+
# SHARED HELPERS
|
|
1030
|
+
# ═══════════════════════════════════════════════════════════════════════════
|
|
1031
|
+
|
|
1032
|
+
# _check_service_running LABEL SYSTEMD_UNIT PID_FILE
|
|
1033
|
+
# Prints the status line for a service, checking systemd first then PID file.
|
|
1034
|
+
_check_service_running() {
|
|
1035
|
+
local label="$1"
|
|
1036
|
+
local unit="$2"
|
|
1037
|
+
local pid_file="$3"
|
|
1038
|
+
if [[ -f "/etc/systemd/system/${unit}" ]] && \
|
|
1039
|
+
systemctl is-active --quiet "${unit}" 2>/dev/null; then
|
|
1040
|
+
printf ' %s✓%s %-17s running (systemd)\n' "${GREEN}" "${RESET}" "${label}"
|
|
1041
|
+
elif [[ -f "${pid_file}" ]]; then
|
|
1042
|
+
local pid
|
|
1043
|
+
pid=$(cat "${pid_file}" 2>/dev/null || echo "")
|
|
1044
|
+
if [[ -n "${pid}" ]] && kill -0 "${pid}" 2>/dev/null; then
|
|
1045
|
+
printf ' %s✓%s %-17s running (PID %s)\n' "${GREEN}" "${RESET}" "${label}" "${pid}"
|
|
1046
|
+
else
|
|
1047
|
+
printf ' %s✗%s %-17s not running (stale PID)\n' "${YELLOW}" "${RESET}" "${label}"
|
|
1048
|
+
fi
|
|
1049
|
+
else
|
|
1050
|
+
printf ' %s-%s %-17s not started\n' "${YELLOW}" "${RESET}" "${label}"
|
|
1051
|
+
fi
|
|
1052
|
+
}
|
|
1053
|
+
|
|
1054
|
+
_source_gateway_env() {
|
|
1055
|
+
local env_file="${HOME}/.openclaw/gateway.env"
|
|
1056
|
+
if [[ -f "${env_file}" ]]; then
|
|
1057
|
+
local line key val
|
|
1058
|
+
while IFS= read -r line || [[ -n "${line}" ]]; do
|
|
1059
|
+
[[ "${line}" =~ ^[[:space:]]*# ]] && continue
|
|
1060
|
+
[[ -z "${line// }" ]] && continue
|
|
1061
|
+
if [[ "${line}" =~ ^([A-Za-z_][A-Za-z0-9_]*)=(.*) ]]; then
|
|
1062
|
+
key="${BASH_REMATCH[1]}"
|
|
1063
|
+
val="${BASH_REMATCH[2]}"
|
|
1064
|
+
val="${val%\"}" ; val="${val#\"}"
|
|
1065
|
+
val="${val%\'}" ; val="${val#\'}"
|
|
1066
|
+
export "${key}=${val}"
|
|
1067
|
+
fi
|
|
1068
|
+
done < "${env_file}"
|
|
1069
|
+
fi
|
|
1070
|
+
|
|
1071
|
+
if [[ -z "${CLAWSPARK_PROFILE:-}" && -n "${CLAWSPARK_V2_RUNTIME_MODE:-}" ]]; then
|
|
1072
|
+
CLAWSPARK_PROFILE_NAME="v2"
|
|
1073
|
+
fi
|
|
1074
|
+
}
|
|
1075
|
+
|
|
1076
|
+
_source_gateway_env
|
|
1077
|
+
|
|
1078
|
+
_wait_for_ollama() {
|
|
1079
|
+
local attempts=0
|
|
1080
|
+
while (( attempts < 15 )); do
|
|
1081
|
+
if curl -sf http://127.0.0.1:11434/ &>/dev/null; then return 0; fi
|
|
1082
|
+
attempts=$(( attempts + 1 ))
|
|
1083
|
+
sleep 1
|
|
1084
|
+
done
|
|
1085
|
+
log_error "Ollama failed to become ready after 15 seconds."
|
|
1086
|
+
return 1
|
|
1087
|
+
}
|
|
1088
|
+
|
|
1089
|
+
_detect_systemd() {
|
|
1090
|
+
if ! check_command systemctl; then
|
|
1091
|
+
echo "false"; return
|
|
1092
|
+
fi
|
|
1093
|
+
if systemctl cat clawspark-gateway.service &>/dev/null; then
|
|
1094
|
+
echo "true"; return
|
|
1095
|
+
fi
|
|
1096
|
+
echo "false"
|
|
1097
|
+
}
|
|
1098
|
+
|
|
1099
|
+
# ═══════════════════════════════════════════════════════════════════════════
|
|
1100
|
+
# COMMANDS
|
|
1101
|
+
# ═══════════════════════════════════════════════════════════════════════════
|
|
1102
|
+
|
|
1103
|
+
# ── status ──────────────────────────────────────────────────────────────────
|
|
1104
|
+
_cmd_status() {
|
|
1105
|
+
printf '\n%s%s clawspark status%s\n\n' "${BOLD}" "${BLUE}" "${RESET}"
|
|
1106
|
+
|
|
1107
|
+
printf ' %s-%s Profile %s\n' "${CYAN}" "${RESET}" "${CLAWSPARK_PROFILE_NAME}"
|
|
1108
|
+
|
|
1109
|
+
local provider
|
|
1110
|
+
provider="$(_current_provider)"
|
|
1111
|
+
local provider_display
|
|
1112
|
+
provider_display="$(_current_provider_display)"
|
|
1113
|
+
printf ' %s-%s Provider %s\n' "${CYAN}" "${RESET}" "${provider_display}"
|
|
1114
|
+
|
|
1115
|
+
# Ollama
|
|
1116
|
+
if _uses_local_ollama; then
|
|
1117
|
+
if curl -sf http://127.0.0.1:11434/ &>/dev/null; then
|
|
1118
|
+
printf ' %s✓%s Ollama running\n' "${GREEN}" "${RESET}"
|
|
1119
|
+
else
|
|
1120
|
+
printf ' %s✗%s Ollama not running\n' "${RED}" "${RESET}"
|
|
1121
|
+
fi
|
|
1122
|
+
|
|
1123
|
+
# Models
|
|
1124
|
+
if check_command ollama; then
|
|
1125
|
+
local model_count
|
|
1126
|
+
model_count=$(ollama list 2>/dev/null | tail -n +2 | wc -l | tr -d ' ')
|
|
1127
|
+
printf ' %s✓%s Models %s loaded\n' "${GREEN}" "${RESET}" "${model_count}"
|
|
1128
|
+
else
|
|
1129
|
+
printf ' %s✗%s Models ollama not found\n' "${RED}" "${RESET}"
|
|
1130
|
+
fi
|
|
1131
|
+
else
|
|
1132
|
+
printf ' %s-%s Ollama not required for %s\n' "${CYAN}" "${RESET}" "${provider}"
|
|
1133
|
+
|
|
1134
|
+
local endpoint endpoint_state
|
|
1135
|
+
endpoint="$(_current_provider_base_url)"
|
|
1136
|
+
endpoint_state="$(_probe_remote_endpoint_status "${endpoint}")"
|
|
1137
|
+
printf ' %s-%s Remote endpoint %s\n' "${CYAN}" "${RESET}" "${endpoint_state}"
|
|
1138
|
+
if [[ -n "${endpoint}" ]]; then
|
|
1139
|
+
printf ' %s-%s Base URL %s\n' "${CYAN}" "${RESET}" "${endpoint}"
|
|
1140
|
+
fi
|
|
1141
|
+
fi
|
|
1142
|
+
|
|
1143
|
+
# OpenClaw
|
|
1144
|
+
if check_command openclaw; then
|
|
1145
|
+
local oc_ver
|
|
1146
|
+
oc_ver=$(openclaw --version 2>/dev/null || echo "unknown")
|
|
1147
|
+
printf ' %s✓%s OpenClaw %s\n' "${GREEN}" "${RESET}" "${oc_ver}"
|
|
1148
|
+
else
|
|
1149
|
+
printf ' %s✗%s OpenClaw not installed\n' "${RED}" "${RESET}"
|
|
1150
|
+
fi
|
|
1151
|
+
|
|
1152
|
+
_check_service_running "Gateway" "clawspark-gateway.service" "${CLAWSPARK_DIR}/gateway.pid"
|
|
1153
|
+
_check_service_running "Node host" "clawspark-nodehost.service" "${CLAWSPARK_DIR}/node.pid"
|
|
1154
|
+
_check_service_running "Dashboard" "clawspark-dashboard.service" "${CLAWSPARK_DIR}/dashboard.pid"
|
|
1155
|
+
|
|
1156
|
+
# Skills
|
|
1157
|
+
local skill_count=0
|
|
1158
|
+
if [[ -f "${CLAWSPARK_DIR}/skills.yaml" ]]; then
|
|
1159
|
+
skill_count=$(grep -c '^ *- ' "${CLAWSPARK_DIR}/skills.yaml" 2>/dev/null || echo 0)
|
|
1160
|
+
fi
|
|
1161
|
+
printf ' %s✓%s Skills %s configured\n' "${GREEN}" "${RESET}" "${skill_count}"
|
|
1162
|
+
|
|
1163
|
+
# Air-gap
|
|
1164
|
+
local ag_state="off"
|
|
1165
|
+
if [[ -f "${CLAWSPARK_DIR}/airgap.state" ]]; then
|
|
1166
|
+
ag_state=$(cat "${CLAWSPARK_DIR}/airgap.state")
|
|
1167
|
+
[[ "${ag_state}" == "true" ]] && ag_state="ON" || ag_state="off"
|
|
1168
|
+
fi
|
|
1169
|
+
printf ' %s-%s Air-gap %s\n' "${CYAN}" "${RESET}" "${ag_state}"
|
|
1170
|
+
|
|
1171
|
+
# Sandbox
|
|
1172
|
+
local sb_state="off"
|
|
1173
|
+
if [[ -f "${CLAWSPARK_DIR}/sandbox.state" ]]; then
|
|
1174
|
+
sb_state=$(cat "${CLAWSPARK_DIR}/sandbox.state")
|
|
1175
|
+
[[ "${sb_state}" == "true" ]] && sb_state="ON" || sb_state="off"
|
|
1176
|
+
fi
|
|
1177
|
+
if check_command docker && docker image inspect clawspark-sandbox:latest &>/dev/null 2>&1; then
|
|
1178
|
+
printf ' %s✓%s Sandbox %s (image ready)\n' "${GREEN}" "${RESET}" "${sb_state}"
|
|
1179
|
+
else
|
|
1180
|
+
printf ' %s-%s Sandbox %s\n' "${CYAN}" "${RESET}" "${sb_state}"
|
|
1181
|
+
fi
|
|
1182
|
+
|
|
1183
|
+
# Token
|
|
1184
|
+
if [[ -f "${CLAWSPARK_DIR}/token" ]]; then
|
|
1185
|
+
printf ' %s✓%s Token present\n' "${GREEN}" "${RESET}"
|
|
1186
|
+
else
|
|
1187
|
+
printf ' %s-%s Token not generated\n' "${YELLOW}" "${RESET}"
|
|
1188
|
+
fi
|
|
1189
|
+
|
|
1190
|
+
printf '\n'
|
|
1191
|
+
}
|
|
1192
|
+
|
|
1193
|
+
# ── skills ──────────────────────────────────────────────────────────────────
|
|
1194
|
+
_cmd_skills() {
|
|
1195
|
+
local subcmd="${1:-}"
|
|
1196
|
+
shift || true
|
|
1197
|
+
|
|
1198
|
+
case "${subcmd}" in
|
|
1199
|
+
sync)
|
|
1200
|
+
_skills_sync
|
|
1201
|
+
;;
|
|
1202
|
+
add)
|
|
1203
|
+
local name="${1:-}"
|
|
1204
|
+
[[ -z "${name}" ]] && { log_error "Usage: clawspark skills add <name>"; exit 1; }
|
|
1205
|
+
_skills_add "${name}"
|
|
1206
|
+
;;
|
|
1207
|
+
remove)
|
|
1208
|
+
local name="${1:-}"
|
|
1209
|
+
[[ -z "${name}" ]] && { log_error "Usage: clawspark skills remove <name>"; exit 1; }
|
|
1210
|
+
_skills_remove "${name}"
|
|
1211
|
+
;;
|
|
1212
|
+
pack)
|
|
1213
|
+
local pack_name="${1:-}"
|
|
1214
|
+
[[ -z "${pack_name}" ]] && { _skills_list_packs; return 0; }
|
|
1215
|
+
_skills_install_pack "${pack_name}"
|
|
1216
|
+
;;
|
|
1217
|
+
audit)
|
|
1218
|
+
_load_skill_audit
|
|
1219
|
+
if command -v audit_skills &>/dev/null; then
|
|
1220
|
+
audit_skills
|
|
1221
|
+
else
|
|
1222
|
+
log_error "Skill audit module not found. Re-install clawspark."
|
|
1223
|
+
exit 1
|
|
1224
|
+
fi
|
|
1225
|
+
;;
|
|
1226
|
+
*)
|
|
1227
|
+
log_error "Unknown skills subcommand: ${subcmd}"
|
|
1228
|
+
printf ' Usage: clawspark skills [sync|add <name>|remove <name>|pack [name]|audit]\n'
|
|
1229
|
+
exit 1
|
|
1230
|
+
;;
|
|
1231
|
+
esac
|
|
1232
|
+
}
|
|
1233
|
+
|
|
1234
|
+
_skills_sync() {
|
|
1235
|
+
local skills_file="${CLAWSPARK_DIR}/skills.yaml"
|
|
1236
|
+
if [[ ! -f "${skills_file}" ]]; then
|
|
1237
|
+
log_error "No skills.yaml found at ${skills_file}"
|
|
1238
|
+
exit 1
|
|
1239
|
+
fi
|
|
1240
|
+
|
|
1241
|
+
log_info "Syncing skills from ${skills_file}..."
|
|
1242
|
+
|
|
1243
|
+
local -a skills=()
|
|
1244
|
+
while IFS= read -r slug; do
|
|
1245
|
+
skills+=("${slug}")
|
|
1246
|
+
done < <(_parse_enabled_skills "${skills_file}")
|
|
1247
|
+
|
|
1248
|
+
log_info "Found ${#skills[@]} skill(s). Installing..."
|
|
1249
|
+
local installed=0 failed=0
|
|
1250
|
+
for skill in "${skills[@]}"; do
|
|
1251
|
+
printf ' %s→%s %s ... ' "${CYAN}" "${RESET}" "${skill}"
|
|
1252
|
+
if npx --yes clawhub@latest install --force "${skill}" >> "${CLAWSPARK_LOG}" 2>&1; then
|
|
1253
|
+
printf '%s✓%s\n' "${GREEN}" "${RESET}"
|
|
1254
|
+
installed=$(( installed + 1 ))
|
|
1255
|
+
else
|
|
1256
|
+
printf '%s✗%s\n' "${YELLOW}" "${RESET}"
|
|
1257
|
+
failed=$(( failed + 1 ))
|
|
1258
|
+
fi
|
|
1259
|
+
done
|
|
1260
|
+
log_success "Sync complete: ${installed} installed, ${failed} failed."
|
|
1261
|
+
}
|
|
1262
|
+
|
|
1263
|
+
_skills_add() {
|
|
1264
|
+
local name="$1"
|
|
1265
|
+
local skills_file="${CLAWSPARK_DIR}/skills.yaml"
|
|
1266
|
+
|
|
1267
|
+
# Check if already present
|
|
1268
|
+
if grep -q "name: ${name}" "${skills_file}" 2>/dev/null || \
|
|
1269
|
+
grep -q "^ *- ${name}$" "${skills_file}" 2>/dev/null; then
|
|
1270
|
+
log_info "Skill '${name}' is already in skills.yaml."
|
|
1271
|
+
else
|
|
1272
|
+
# Append under enabled section
|
|
1273
|
+
# Find the last "- name:" line and add after it
|
|
1274
|
+
local tmpfile
|
|
1275
|
+
tmpfile=$(mktemp)
|
|
1276
|
+
awk -v skill="${name}" '
|
|
1277
|
+
/enabled:/ { in_enabled=1 }
|
|
1278
|
+
in_enabled && /^[[:space:]]*custom:/ {
|
|
1279
|
+
printf " - name: %s\n description: User-added skill\n", skill
|
|
1280
|
+
in_enabled=0
|
|
1281
|
+
}
|
|
1282
|
+
{ print }
|
|
1283
|
+
' "${skills_file}" > "${tmpfile}"
|
|
1284
|
+
mv "${tmpfile}" "${skills_file}"
|
|
1285
|
+
log_success "Added '${name}' to skills.yaml."
|
|
1286
|
+
fi
|
|
1287
|
+
|
|
1288
|
+
log_info "Installing '${name}'..."
|
|
1289
|
+
if npx --yes clawhub@latest install --force "${name}" >> "${CLAWSPARK_LOG}" 2>&1; then
|
|
1290
|
+
log_success "Skill '${name}' installed."
|
|
1291
|
+
else
|
|
1292
|
+
log_error "Failed to install '${name}'. Check ${CLAWSPARK_LOG}."
|
|
1293
|
+
fi
|
|
1294
|
+
}
|
|
1295
|
+
|
|
1296
|
+
_skills_remove() {
|
|
1297
|
+
local name="$1"
|
|
1298
|
+
local skills_file="${CLAWSPARK_DIR}/skills.yaml"
|
|
1299
|
+
|
|
1300
|
+
# Remove from YAML (handles both "- name: slug" multiline and "- slug" single line)
|
|
1301
|
+
local tmpfile
|
|
1302
|
+
tmpfile=$(mktemp)
|
|
1303
|
+
awk -v skill="${name}" '
|
|
1304
|
+
/^[[:space:]]*- name:/ && $0 ~ skill { skip=1; next }
|
|
1305
|
+
skip && /^[[:space:]]+description:/ { skip=0; next }
|
|
1306
|
+
skip { skip=0 }
|
|
1307
|
+
/^[[:space:]]*-[[:space:]]+/ && $0 ~ skill { next }
|
|
1308
|
+
{ print }
|
|
1309
|
+
' "${skills_file}" > "${tmpfile}"
|
|
1310
|
+
mv "${tmpfile}" "${skills_file}"
|
|
1311
|
+
|
|
1312
|
+
log_success "Removed '${name}' from skills.yaml."
|
|
1313
|
+
log_info "Note: The skill package may still be cached locally."
|
|
1314
|
+
}
|
|
1315
|
+
|
|
1316
|
+
_skills_list_packs() {
|
|
1317
|
+
local packs_file="${CLAWSPARK_DIR}/configs/skill-packs.yaml"
|
|
1318
|
+
if [[ ! -f "${packs_file}" ]]; then
|
|
1319
|
+
packs_file="$(cd "$(dirname "$0")" && pwd)/configs/skill-packs.yaml"
|
|
1320
|
+
fi
|
|
1321
|
+
if [[ ! -f "${packs_file}" ]]; then
|
|
1322
|
+
log_error "No skill-packs.yaml found."
|
|
1323
|
+
exit 1
|
|
1324
|
+
fi
|
|
1325
|
+
|
|
1326
|
+
printf '\n%s%sAvailable skill packs%s\n\n' "${BOLD}" "${BLUE}" "${RESET}"
|
|
1327
|
+
|
|
1328
|
+
local current_pack="" current_desc="" skill_count=0
|
|
1329
|
+
while IFS= read -r line; do
|
|
1330
|
+
[[ "${line}" =~ ^[[:space:]]*# ]] && continue
|
|
1331
|
+
[[ -z "${line// }" ]] && continue
|
|
1332
|
+
|
|
1333
|
+
# Detect pack name (indented exactly 2 spaces, ends with colon)
|
|
1334
|
+
if [[ "${line}" =~ ^[[:space:]]{2}([a-zA-Z][a-zA-Z0-9_-]*):[[:space:]]*$ ]]; then
|
|
1335
|
+
# Print previous pack if we had one
|
|
1336
|
+
if [[ -n "${current_pack}" ]]; then
|
|
1337
|
+
printf ' %-14s %s (%d skills)\n' "${current_pack}" "${current_desc}" "${skill_count}"
|
|
1338
|
+
fi
|
|
1339
|
+
current_pack="${BASH_REMATCH[1]}"
|
|
1340
|
+
current_desc=""
|
|
1341
|
+
skill_count=0
|
|
1342
|
+
continue
|
|
1343
|
+
fi
|
|
1344
|
+
|
|
1345
|
+
# Detect description line
|
|
1346
|
+
if [[ "${line}" =~ ^[[:space:]]+description:[[:space:]]*\"?([^\"]*)\"? ]]; then
|
|
1347
|
+
current_desc="${BASH_REMATCH[1]}"
|
|
1348
|
+
continue
|
|
1349
|
+
fi
|
|
1350
|
+
|
|
1351
|
+
# Count skill entries
|
|
1352
|
+
if [[ "${line}" =~ ^[[:space:]]*-[[:space:]]+ ]] && [[ -n "${current_pack}" ]]; then
|
|
1353
|
+
skill_count=$(( skill_count + 1 ))
|
|
1354
|
+
fi
|
|
1355
|
+
done < "${packs_file}"
|
|
1356
|
+
|
|
1357
|
+
# Print the last pack
|
|
1358
|
+
if [[ -n "${current_pack}" ]]; then
|
|
1359
|
+
printf ' %-14s %s (%d skills)\n' "${current_pack}" "${current_desc}" "${skill_count}"
|
|
1360
|
+
fi
|
|
1361
|
+
|
|
1362
|
+
printf '\nInstall a pack: clawspark skills pack <name>\n\n'
|
|
1363
|
+
}
|
|
1364
|
+
|
|
1365
|
+
_skills_install_pack() {
|
|
1366
|
+
local pack_name="$1"
|
|
1367
|
+
local packs_file="${CLAWSPARK_DIR}/configs/skill-packs.yaml"
|
|
1368
|
+
if [[ ! -f "${packs_file}" ]]; then
|
|
1369
|
+
packs_file="$(cd "$(dirname "$0")" && pwd)/configs/skill-packs.yaml"
|
|
1370
|
+
fi
|
|
1371
|
+
if [[ ! -f "${packs_file}" ]]; then
|
|
1372
|
+
log_error "No skill-packs.yaml found."
|
|
1373
|
+
exit 1
|
|
1374
|
+
fi
|
|
1375
|
+
|
|
1376
|
+
# Parse skills for the requested pack
|
|
1377
|
+
local -a skills=()
|
|
1378
|
+
local in_pack=false in_skills=false
|
|
1379
|
+
|
|
1380
|
+
while IFS= read -r line; do
|
|
1381
|
+
[[ "${line}" =~ ^[[:space:]]*# ]] && continue
|
|
1382
|
+
[[ -z "${line// }" ]] && continue
|
|
1383
|
+
|
|
1384
|
+
# Detect pack name line
|
|
1385
|
+
if [[ "${line}" =~ ^[[:space:]]{2}([a-zA-Z][a-zA-Z0-9_-]*):[[:space:]]*$ ]]; then
|
|
1386
|
+
if [[ "${BASH_REMATCH[1]}" == "${pack_name}" ]]; then
|
|
1387
|
+
in_pack=true
|
|
1388
|
+
elif ${in_pack}; then
|
|
1389
|
+
break
|
|
1390
|
+
fi
|
|
1391
|
+
in_skills=false
|
|
1392
|
+
continue
|
|
1393
|
+
fi
|
|
1394
|
+
|
|
1395
|
+
if ${in_pack}; then
|
|
1396
|
+
if [[ "${line}" =~ ^[[:space:]]+skills:[[:space:]]*$ ]]; then
|
|
1397
|
+
in_skills=true
|
|
1398
|
+
continue
|
|
1399
|
+
fi
|
|
1400
|
+
# Another key under this pack (not skills list) -- stop reading skills
|
|
1401
|
+
if [[ "${line}" =~ ^[[:space:]]{4}[a-zA-Z] ]] && [[ ! "${line}" =~ ^[[:space:]]*- ]]; then
|
|
1402
|
+
in_skills=false
|
|
1403
|
+
continue
|
|
1404
|
+
fi
|
|
1405
|
+
if ${in_skills} && [[ "${line}" =~ ^[[:space:]]*-[[:space:]]+(.*) ]]; then
|
|
1406
|
+
local slug="${BASH_REMATCH[1]}"
|
|
1407
|
+
slug="${slug## }"; slug="${slug%% }"
|
|
1408
|
+
skills+=("${slug}")
|
|
1409
|
+
fi
|
|
1410
|
+
fi
|
|
1411
|
+
done < "${packs_file}"
|
|
1412
|
+
|
|
1413
|
+
if [[ ${#skills[@]} -eq 0 ]]; then
|
|
1414
|
+
log_error "Pack '${pack_name}' not found. Run 'clawspark skills pack' to see available packs."
|
|
1415
|
+
exit 1
|
|
1416
|
+
fi
|
|
1417
|
+
|
|
1418
|
+
printf '\n%s%sInstalling skill pack: %s%s (%d skills)\n\n' "${BOLD}" "${BLUE}" "${pack_name}" "${RESET}" "${#skills[@]}"
|
|
1419
|
+
|
|
1420
|
+
local installed=0 failed=0
|
|
1421
|
+
for skill in "${skills[@]}"; do
|
|
1422
|
+
printf ' %s->%s %s ... ' "${CYAN}" "${RESET}" "${skill}"
|
|
1423
|
+
if npx --yes clawhub@latest install --force "${skill}" >> "${CLAWSPARK_LOG}" 2>&1; then
|
|
1424
|
+
printf '%sdone%s\n' "${GREEN}" "${RESET}"
|
|
1425
|
+
installed=$(( installed + 1 ))
|
|
1426
|
+
else
|
|
1427
|
+
printf '%sfailed%s\n' "${YELLOW}" "${RESET}"
|
|
1428
|
+
failed=$(( failed + 1 ))
|
|
1429
|
+
fi
|
|
1430
|
+
done
|
|
1431
|
+
|
|
1432
|
+
printf '\n'
|
|
1433
|
+
log_success "Pack '${pack_name}' complete: ${installed} installed, ${failed} failed."
|
|
1434
|
+
}
|
|
1435
|
+
|
|
1436
|
+
# ── benchmark ───────────────────────────────────────────────────────────────
|
|
1437
|
+
_cmd_benchmark() {
|
|
1438
|
+
printf '\n%s%s clawspark benchmark%s\n\n' "${BOLD}" "${BLUE}" "${RESET}"
|
|
1439
|
+
|
|
1440
|
+
if ! _uses_local_ollama; then
|
|
1441
|
+
log_error "Benchmark currently supports local Ollama profiles only."
|
|
1442
|
+
exit 1
|
|
1443
|
+
fi
|
|
1444
|
+
|
|
1445
|
+
if ! curl -sf http://127.0.0.1:11434/ &>/dev/null; then
|
|
1446
|
+
log_error "Ollama is not running. Start it first: clawspark restart"
|
|
1447
|
+
exit 1
|
|
1448
|
+
fi
|
|
1449
|
+
|
|
1450
|
+
# Determine model from config
|
|
1451
|
+
local model_id=""
|
|
1452
|
+
model_id=$(openclaw config get agents.defaults.model 2>/dev/null | tr -d '"' || echo "")
|
|
1453
|
+
# Strip the "ollama/" provider prefix for Ollama API calls
|
|
1454
|
+
model_id="${model_id#ollama/}"
|
|
1455
|
+
if [[ -z "${model_id}" ]]; then
|
|
1456
|
+
model_id=$(ollama list 2>/dev/null | tail -n +2 | head -1 | awk '{print $1}')
|
|
1457
|
+
fi
|
|
1458
|
+
if [[ -z "${model_id}" ]]; then
|
|
1459
|
+
log_error "No model found. Pull one with: ollama pull <model>"
|
|
1460
|
+
exit 1
|
|
1461
|
+
fi
|
|
1462
|
+
|
|
1463
|
+
log_info "Benchmarking model: ${model_id}"
|
|
1464
|
+
|
|
1465
|
+
local prompts=(
|
|
1466
|
+
"Explain quantum computing in one sentence."
|
|
1467
|
+
"Write a haiku about artificial intelligence."
|
|
1468
|
+
"Count from 1 to 20."
|
|
1469
|
+
)
|
|
1470
|
+
|
|
1471
|
+
local total_tokens=0
|
|
1472
|
+
local total_duration_ns=0
|
|
1473
|
+
|
|
1474
|
+
for prompt in "${prompts[@]}"; do
|
|
1475
|
+
printf ' %s→%s "%s" ... ' "${CYAN}" "${RESET}" "${prompt}"
|
|
1476
|
+
local response
|
|
1477
|
+
response=$(curl -sf --max-time 60 http://127.0.0.1:11434/api/generate \
|
|
1478
|
+
-d "{\"model\":\"${model_id}\",\"prompt\":\"${prompt}\",\"stream\":false,\"options\":{\"num_predict\":50}}" 2>/dev/null || echo "")
|
|
1479
|
+
|
|
1480
|
+
if [[ -z "${response}" ]]; then
|
|
1481
|
+
printf '%sfailed%s\n' "${RED}" "${RESET}"
|
|
1482
|
+
continue
|
|
1483
|
+
fi
|
|
1484
|
+
|
|
1485
|
+
local eval_count eval_duration_ns
|
|
1486
|
+
eval_count=$(echo "${response}" | grep -o '"eval_count":[0-9]*' | cut -d: -f2 || echo 0)
|
|
1487
|
+
eval_duration_ns=$(echo "${response}" | grep -o '"eval_duration":[0-9]*' | cut -d: -f2 || echo 0)
|
|
1488
|
+
|
|
1489
|
+
if [[ -n "${eval_count}" && "${eval_count}" -gt 0 && -n "${eval_duration_ns}" && "${eval_duration_ns}" -gt 0 ]]; then
|
|
1490
|
+
local tps
|
|
1491
|
+
tps=$(awk "BEGIN {printf \"%.1f\", ${eval_count} / (${eval_duration_ns} / 1000000000)}")
|
|
1492
|
+
printf '%s%s tok/s%s (%s tokens)\n' "${GREEN}" "${tps}" "${RESET}" "${eval_count}"
|
|
1493
|
+
total_tokens=$(( total_tokens + eval_count ))
|
|
1494
|
+
total_duration_ns=$(( total_duration_ns + eval_duration_ns ))
|
|
1495
|
+
else
|
|
1496
|
+
printf '%sdone%s\n' "${GREEN}" "${RESET}"
|
|
1497
|
+
fi
|
|
1498
|
+
done
|
|
1499
|
+
|
|
1500
|
+
printf '\n'
|
|
1501
|
+
if (( total_duration_ns > 0 )); then
|
|
1502
|
+
local avg_tps
|
|
1503
|
+
avg_tps=$(awk "BEGIN {printf \"%.1f\", ${total_tokens} / (${total_duration_ns} / 1000000000)}")
|
|
1504
|
+
print_box \
|
|
1505
|
+
"${BOLD}Benchmark Results${RESET}" \
|
|
1506
|
+
"" \
|
|
1507
|
+
"Model : ${model_id}" \
|
|
1508
|
+
"Total tokens: ${total_tokens}" \
|
|
1509
|
+
"Average : ${CYAN}${avg_tps} tok/s${RESET}"
|
|
1510
|
+
else
|
|
1511
|
+
log_warn "Could not compute benchmark results."
|
|
1512
|
+
fi
|
|
1513
|
+
printf '\n'
|
|
1514
|
+
}
|
|
1515
|
+
|
|
1516
|
+
# ── update ──────────────────────────────────────────────────────────────────
|
|
1517
|
+
_cmd_update() {
|
|
1518
|
+
log_info "Updating OpenClaw..."
|
|
1519
|
+
sudo npm install -g openclaw@latest 2>&1 | tee -a "${CLAWSPARK_LOG}"
|
|
1520
|
+
log_success "OpenClaw updated to $(openclaw --version 2>/dev/null || echo 'latest')."
|
|
1521
|
+
|
|
1522
|
+
# Re-apply patches and dual-agent config (npm update overwrites dist files)
|
|
1523
|
+
log_info "Re-applying patches and config..."
|
|
1524
|
+
local lib_dir="${CLAWSPARK_DIR}/lib"
|
|
1525
|
+
if [[ -f "${lib_dir}/setup-openclaw.sh" ]]; then
|
|
1526
|
+
source "${lib_dir}/setup-openclaw.sh"
|
|
1527
|
+
_patch_sync_full_history
|
|
1528
|
+
_patch_baileys_browser
|
|
1529
|
+
_patch_mention_detection
|
|
1530
|
+
_setup_agent_config
|
|
1531
|
+
else
|
|
1532
|
+
log_warn "lib/setup-openclaw.sh not found -- patches not re-applied."
|
|
1533
|
+
log_warn "Re-run install.sh to apply patches."
|
|
1534
|
+
fi
|
|
1535
|
+
|
|
1536
|
+
# Restart gateway with correct env
|
|
1537
|
+
log_info "Restarting gateway..."
|
|
1538
|
+
_cmd_restart
|
|
1539
|
+
|
|
1540
|
+
log_info "Syncing skills..."
|
|
1541
|
+
_skills_sync
|
|
1542
|
+
}
|
|
1543
|
+
|
|
1544
|
+
# ── airgap ──────────────────────────────────────────────────────────────────
|
|
1545
|
+
_cmd_airgap() {
|
|
1546
|
+
local mode="${1:-}"
|
|
1547
|
+
local toggle_script="${CLAWSPARK_DIR}/airgap-toggle.sh"
|
|
1548
|
+
|
|
1549
|
+
case "${mode}" in
|
|
1550
|
+
on|off)
|
|
1551
|
+
if [[ -x "${toggle_script}" ]]; then
|
|
1552
|
+
bash "${toggle_script}" "${mode}"
|
|
1553
|
+
else
|
|
1554
|
+
if ! check_command ufw; then
|
|
1555
|
+
log_error "UFW is not installed. Cannot toggle air-gap."
|
|
1556
|
+
exit 1
|
|
1557
|
+
fi
|
|
1558
|
+
if [[ "${mode}" == "on" ]]; then
|
|
1559
|
+
sudo ufw default deny outgoing
|
|
1560
|
+
sudo ufw allow out to 10.0.0.0/8
|
|
1561
|
+
sudo ufw allow out to 172.16.0.0/12
|
|
1562
|
+
sudo ufw allow out to 192.168.0.0/16
|
|
1563
|
+
sudo ufw allow out to 127.0.0.0/8
|
|
1564
|
+
sudo ufw allow out 53
|
|
1565
|
+
echo "true" > "${CLAWSPARK_DIR}/airgap.state"
|
|
1566
|
+
log_success "Air-gap mode ENABLED."
|
|
1567
|
+
else
|
|
1568
|
+
sudo ufw default allow outgoing
|
|
1569
|
+
echo "false" > "${CLAWSPARK_DIR}/airgap.state"
|
|
1570
|
+
log_success "Air-gap mode DISABLED."
|
|
1571
|
+
fi
|
|
1572
|
+
fi
|
|
1573
|
+
;;
|
|
1574
|
+
*)
|
|
1575
|
+
local current="off"
|
|
1576
|
+
[[ -f "${CLAWSPARK_DIR}/airgap.state" && "$(cat "${CLAWSPARK_DIR}/airgap.state")" == "true" ]] && current="ON"
|
|
1577
|
+
printf ' Air-gap mode is currently: %s%s%s\n' "${BOLD}" "${current}" "${RESET}"
|
|
1578
|
+
printf ' Usage: clawspark airgap [on|off]\n'
|
|
1579
|
+
;;
|
|
1580
|
+
esac
|
|
1581
|
+
}
|
|
1582
|
+
|
|
1583
|
+
# ── dashboard ──────────────────────────────────────────────────────────
|
|
1584
|
+
_cmd_dashboard() {
|
|
1585
|
+
printf '\n%s%s clawspark dashboard%s\n\n' "${BOLD}" "${BLUE}" "${RESET}"
|
|
1586
|
+
|
|
1587
|
+
# Check if ClawMetry is running
|
|
1588
|
+
local clawmetry_running=false
|
|
1589
|
+
local clawmetry_pid_file="${CLAWSPARK_DIR}/dashboard.pid"
|
|
1590
|
+
|
|
1591
|
+
if [[ -f "${clawmetry_pid_file}" ]]; then
|
|
1592
|
+
local cm_pid
|
|
1593
|
+
cm_pid=$(cat "${clawmetry_pid_file}")
|
|
1594
|
+
if kill -0 "${cm_pid}" 2>/dev/null; then
|
|
1595
|
+
clawmetry_running=true
|
|
1596
|
+
fi
|
|
1597
|
+
fi
|
|
1598
|
+
|
|
1599
|
+
# Fallback: check via HTTP
|
|
1600
|
+
if ! ${clawmetry_running}; then
|
|
1601
|
+
if curl -sf http://127.0.0.1:8900/ &>/dev/null; then
|
|
1602
|
+
clawmetry_running=true
|
|
1603
|
+
fi
|
|
1604
|
+
fi
|
|
1605
|
+
|
|
1606
|
+
# Start ClawMetry if not running
|
|
1607
|
+
if ! ${clawmetry_running}; then
|
|
1608
|
+
log_info "ClawMetry is not running. Starting it..."
|
|
1609
|
+
if [[ -x "${CLAWSPARK_DIR}/clawmetry/start.sh" ]]; then
|
|
1610
|
+
bash "${CLAWSPARK_DIR}/clawmetry/start.sh" >> "${CLAWSPARK_LOG}" 2>&1
|
|
1611
|
+
log_success "ClawMetry started."
|
|
1612
|
+
elif python3 -m clawmetry --help &>/dev/null; then
|
|
1613
|
+
nohup python3 -m clawmetry --port 8900 --host 127.0.0.1 >> "${CLAWSPARK_DIR}/dashboard.log" 2>&1 &
|
|
1614
|
+
echo $! > "${clawmetry_pid_file}"
|
|
1615
|
+
log_success "ClawMetry started (PID $!)."
|
|
1616
|
+
elif check_command clawmetry; then
|
|
1617
|
+
nohup clawmetry serve >> "${CLAWSPARK_DIR}/dashboard.log" 2>&1 &
|
|
1618
|
+
echo $! > "${clawmetry_pid_file}"
|
|
1619
|
+
log_success "ClawMetry started (PID $!)."
|
|
1620
|
+
else
|
|
1621
|
+
log_warn "ClawMetry is not installed. Metrics dashboard unavailable."
|
|
1622
|
+
fi
|
|
1623
|
+
fi
|
|
1624
|
+
|
|
1625
|
+
printf ' %s%sChat UI:%s http://localhost:18789/__openclaw__/canvas/\n' "${BOLD}" "${GREEN}" "${RESET}"
|
|
1626
|
+
printf ' %s%sMetrics:%s http://localhost:8900\n' "${BOLD}" "${GREEN}" "${RESET}"
|
|
1627
|
+
|
|
1628
|
+
# Show Tailscale URLs if connected
|
|
1629
|
+
if check_command tailscale; then
|
|
1630
|
+
local ts_ip
|
|
1631
|
+
ts_ip=$(tailscale ip -4 2>/dev/null || true)
|
|
1632
|
+
if [[ -n "${ts_ip}" ]]; then
|
|
1633
|
+
printf '\n %s%sTailscale (remote access):%s\n' "${BOLD}" "${CYAN}" "${RESET}"
|
|
1634
|
+
printf ' Chat UI: http://%s:18789\n' "${ts_ip}"
|
|
1635
|
+
printf ' Metrics: http://%s:8900\n' "${ts_ip}"
|
|
1636
|
+
fi
|
|
1637
|
+
fi
|
|
1638
|
+
|
|
1639
|
+
printf '\n'
|
|
1640
|
+
}
|
|
1641
|
+
|
|
1642
|
+
# ── tailscale ──────────────────────────────────────────────────────────
|
|
1643
|
+
_cmd_tailscale() {
|
|
1644
|
+
local subcmd="${1:-}"
|
|
1645
|
+
shift || true
|
|
1646
|
+
|
|
1647
|
+
case "${subcmd}" in
|
|
1648
|
+
setup)
|
|
1649
|
+
_tailscale_setup
|
|
1650
|
+
;;
|
|
1651
|
+
status)
|
|
1652
|
+
_tailscale_status
|
|
1653
|
+
;;
|
|
1654
|
+
*)
|
|
1655
|
+
printf ' Usage: clawspark tailscale [setup|status]\n'
|
|
1656
|
+
exit 1
|
|
1657
|
+
;;
|
|
1658
|
+
esac
|
|
1659
|
+
}
|
|
1660
|
+
|
|
1661
|
+
_tailscale_setup() {
|
|
1662
|
+
printf '\n%s%s clawspark tailscale setup%s\n\n' "${BOLD}" "${BLUE}" "${RESET}"
|
|
1663
|
+
|
|
1664
|
+
# Check if Tailscale is installed
|
|
1665
|
+
if ! check_command tailscale; then
|
|
1666
|
+
log_info "Tailscale is not installed. Installing..."
|
|
1667
|
+
if [[ "$(uname)" == "Linux" ]]; then
|
|
1668
|
+
curl -fsSL https://tailscale.com/install.sh | sh 2>&1 | tee -a "${CLAWSPARK_LOG}"
|
|
1669
|
+
elif [[ "$(uname)" == "Darwin" ]]; then
|
|
1670
|
+
if check_command brew; then
|
|
1671
|
+
brew install --cask tailscale 2>&1 | tee -a "${CLAWSPARK_LOG}"
|
|
1672
|
+
else
|
|
1673
|
+
log_error "Please install Tailscale manually: https://tailscale.com/download"
|
|
1674
|
+
exit 1
|
|
1675
|
+
fi
|
|
1676
|
+
else
|
|
1677
|
+
log_error "Unsupported platform. Please install Tailscale manually: https://tailscale.com/download"
|
|
1678
|
+
exit 1
|
|
1679
|
+
fi
|
|
1680
|
+
fi
|
|
1681
|
+
|
|
1682
|
+
if ! check_command tailscale; then
|
|
1683
|
+
log_error "Tailscale installation failed. Please install manually: https://tailscale.com/download"
|
|
1684
|
+
exit 1
|
|
1685
|
+
fi
|
|
1686
|
+
|
|
1687
|
+
log_info "Starting Tailscale..."
|
|
1688
|
+
sudo tailscale up 2>&1
|
|
1689
|
+
|
|
1690
|
+
# Configure Tailscale to expose the gateway ports
|
|
1691
|
+
local ts_ip
|
|
1692
|
+
ts_ip=$(tailscale ip -4 2>/dev/null || true)
|
|
1693
|
+
if [[ -n "${ts_ip}" ]]; then
|
|
1694
|
+
log_success "Tailscale is connected."
|
|
1695
|
+
printf '\n Tailscale IP: %s%s%s\n' "${CYAN}" "${ts_ip}" "${RESET}"
|
|
1696
|
+
printf ' Chat UI: http://%s:18789\n' "${ts_ip}"
|
|
1697
|
+
printf ' Metrics: http://%s:8900\n' "${ts_ip}"
|
|
1698
|
+
printf '\n'
|
|
1699
|
+
log_info "Make sure your Tailscale ACLs allow access to ports 18789 and 8900."
|
|
1700
|
+
else
|
|
1701
|
+
log_warn "Tailscale is running but no IPv4 address was found. Check 'tailscale status'."
|
|
1702
|
+
fi
|
|
1703
|
+
}
|
|
1704
|
+
|
|
1705
|
+
_tailscale_status() {
|
|
1706
|
+
printf '\n%s%s clawspark tailscale status%s\n\n' "${BOLD}" "${BLUE}" "${RESET}"
|
|
1707
|
+
|
|
1708
|
+
if ! check_command tailscale; then
|
|
1709
|
+
printf ' %s✗%s Tailscale is not installed.\n' "${RED}" "${RESET}"
|
|
1710
|
+
printf ' Run: clawspark tailscale setup\n\n'
|
|
1711
|
+
return
|
|
1712
|
+
fi
|
|
1713
|
+
|
|
1714
|
+
local ts_status
|
|
1715
|
+
ts_status=$(tailscale status --json 2>/dev/null || true)
|
|
1716
|
+
|
|
1717
|
+
if [[ -z "${ts_status}" ]]; then
|
|
1718
|
+
printf ' %s✗%s Tailscale is not running.\n' "${RED}" "${RESET}"
|
|
1719
|
+
printf ' Run: clawspark tailscale setup\n\n'
|
|
1720
|
+
return
|
|
1721
|
+
fi
|
|
1722
|
+
|
|
1723
|
+
local ts_ip
|
|
1724
|
+
ts_ip=$(tailscale ip -4 2>/dev/null || true)
|
|
1725
|
+
|
|
1726
|
+
if [[ -n "${ts_ip}" ]]; then
|
|
1727
|
+
printf ' %s✓%s Tailscale connected\n' "${GREEN}" "${RESET}"
|
|
1728
|
+
printf ' %s-%s Tailscale IP %s\n' "${CYAN}" "${RESET}" "${ts_ip}"
|
|
1729
|
+
printf ' %s-%s Chat UI http://%s:18789\n' "${CYAN}" "${RESET}" "${ts_ip}"
|
|
1730
|
+
printf ' %s-%s Metrics http://%s:8900\n' "${CYAN}" "${RESET}" "${ts_ip}"
|
|
1731
|
+
else
|
|
1732
|
+
printf ' %s✗%s Tailscale not connected\n' "${YELLOW}" "${RESET}"
|
|
1733
|
+
printf ' Run: clawspark tailscale setup\n'
|
|
1734
|
+
fi
|
|
1735
|
+
|
|
1736
|
+
printf '\n'
|
|
1737
|
+
}
|
|
1738
|
+
|
|
1739
|
+
# ── logs ────────────────────────────────────────────────────────────────────
|
|
1740
|
+
_cmd_logs() {
|
|
1741
|
+
local gateway_log="${CLAWSPARK_DIR}/gateway.log"
|
|
1742
|
+
if [[ -f "${gateway_log}" ]]; then
|
|
1743
|
+
tail -f "${gateway_log}"
|
|
1744
|
+
else
|
|
1745
|
+
log_warn "Gateway log not found at ${gateway_log}"
|
|
1746
|
+
log_info "Try: clawspark restart"
|
|
1747
|
+
fi
|
|
1748
|
+
}
|
|
1749
|
+
|
|
1750
|
+
# ── restart ─────────────────────────────────────────────────────────────────
|
|
1751
|
+
_cmd_restart() {
|
|
1752
|
+
log_info "Restarting services..."
|
|
1753
|
+
_source_gateway_env
|
|
1754
|
+
local use_systemd
|
|
1755
|
+
use_systemd=$(_detect_systemd)
|
|
1756
|
+
|
|
1757
|
+
# Restart Ollama when using local inference
|
|
1758
|
+
if _uses_local_ollama; then
|
|
1759
|
+
if check_command systemctl && systemctl is-enabled ollama &>/dev/null; then
|
|
1760
|
+
if sudo -n systemctl restart ollama 2>/dev/null; then
|
|
1761
|
+
log_success "Ollama restarted via systemctl."
|
|
1762
|
+
elif curl -sf http://127.0.0.1:11434/ &>/dev/null; then
|
|
1763
|
+
log_info "Ollama already running (systemd-managed, no sudo available)."
|
|
1764
|
+
else
|
|
1765
|
+
log_warn "Ollama needs sudo to restart. Run: sudo systemctl restart ollama"
|
|
1766
|
+
fi
|
|
1767
|
+
elif [[ -f "${CLAWSPARK_DIR}/ollama.pid" ]]; then
|
|
1768
|
+
_kill_pid_file "${CLAWSPARK_DIR}/ollama.pid"
|
|
1769
|
+
sleep 1
|
|
1770
|
+
nohup ollama serve >> "${CLAWSPARK_DIR}/ollama.log" 2>&1 &
|
|
1771
|
+
echo $! > "${CLAWSPARK_DIR}/ollama.pid"
|
|
1772
|
+
log_success "Ollama restarted (PID $!)."
|
|
1773
|
+
else
|
|
1774
|
+
nohup ollama serve >> "${CLAWSPARK_DIR}/ollama.log" 2>&1 &
|
|
1775
|
+
echo $! > "${CLAWSPARK_DIR}/ollama.pid"
|
|
1776
|
+
log_success "Ollama started (PID $!)."
|
|
1777
|
+
fi
|
|
1778
|
+
|
|
1779
|
+
if ! _wait_for_ollama; then
|
|
1780
|
+
log_error "Aborting: Ollama backend is not ready."
|
|
1781
|
+
return 1
|
|
1782
|
+
fi
|
|
1783
|
+
else
|
|
1784
|
+
log_info "Remote/API provider detected; skipping Ollama restart."
|
|
1785
|
+
fi
|
|
1786
|
+
|
|
1787
|
+
if [[ "${use_systemd}" == "true" ]]; then
|
|
1788
|
+
sudo -n systemctl restart clawspark-gateway.service 2>/dev/null || {
|
|
1789
|
+
log_warn "Gateway needs sudo to restart. Run: sudo systemctl restart clawspark-gateway"
|
|
1790
|
+
}
|
|
1791
|
+
sleep 3
|
|
1792
|
+
sudo -n systemctl restart clawspark-nodehost.service 2>/dev/null || {
|
|
1793
|
+
log_warn "Node host needs sudo to restart. Run: sudo systemctl restart clawspark-nodehost"
|
|
1794
|
+
}
|
|
1795
|
+
if systemctl is-enabled --quiet clawspark-dashboard.service 2>/dev/null; then
|
|
1796
|
+
sudo -n systemctl restart clawspark-dashboard.service 2>/dev/null || true
|
|
1797
|
+
fi
|
|
1798
|
+
sleep 2
|
|
1799
|
+
# Verify
|
|
1800
|
+
systemctl is-active --quiet clawspark-gateway.service 2>/dev/null && log_success "Gateway restarted (systemd)." || log_warn "Gateway may not have restarted."
|
|
1801
|
+
systemctl is-active --quiet clawspark-nodehost.service 2>/dev/null && log_success "Node host restarted (systemd)." || log_warn "Node host may not have restarted."
|
|
1802
|
+
else
|
|
1803
|
+
# PID-based management
|
|
1804
|
+
_kill_pid_file "${CLAWSPARK_DIR}/gateway.pid"
|
|
1805
|
+
_kill_pid_file "${CLAWSPARK_DIR}/node.pid"
|
|
1806
|
+
_kill_pid_file "${CLAWSPARK_DIR}/dashboard.pid"
|
|
1807
|
+
_kill_by_name "openclaw gateway"
|
|
1808
|
+
_kill_by_name "openclaw node"
|
|
1809
|
+
sleep 2
|
|
1810
|
+
|
|
1811
|
+
if check_command openclaw; then
|
|
1812
|
+
nohup openclaw gateway run --bind loopback > "${CLAWSPARK_DIR}/gateway.log" 2>&1 &
|
|
1813
|
+
echo $! > "${CLAWSPARK_DIR}/gateway.pid"
|
|
1814
|
+
log_success "Gateway started (PID $!)."
|
|
1815
|
+
else
|
|
1816
|
+
log_warn "openclaw not found, gateway not started."
|
|
1817
|
+
return 1
|
|
1818
|
+
fi
|
|
1819
|
+
|
|
1820
|
+
sleep 3
|
|
1821
|
+
|
|
1822
|
+
nohup openclaw node run --host 127.0.0.1 --port 18789 > "${CLAWSPARK_DIR}/node.log" 2>&1 &
|
|
1823
|
+
echo $! > "${CLAWSPARK_DIR}/node.pid"
|
|
1824
|
+
log_success "Node host started (PID $!)."
|
|
1825
|
+
|
|
1826
|
+
_start_dashboard
|
|
1827
|
+
fi
|
|
1828
|
+
|
|
1829
|
+
sleep 2
|
|
1830
|
+
log_success "All services restarted."
|
|
1831
|
+
}
|
|
1832
|
+
|
|
1833
|
+
# ── start ──────────────────────────────────────────────────────────────────
|
|
1834
|
+
_cmd_start() {
|
|
1835
|
+
log_info "Starting services..."
|
|
1836
|
+
_source_gateway_env
|
|
1837
|
+
local use_systemd
|
|
1838
|
+
use_systemd=$(_detect_systemd)
|
|
1839
|
+
|
|
1840
|
+
# Start Ollama when required
|
|
1841
|
+
if _uses_local_ollama; then
|
|
1842
|
+
if curl -sf http://127.0.0.1:11434/ &>/dev/null; then
|
|
1843
|
+
log_info "Ollama already running."
|
|
1844
|
+
elif check_command systemctl && systemctl is-enabled ollama &>/dev/null; then
|
|
1845
|
+
sudo -n systemctl start ollama 2>/dev/null || log_warn "Ollama needs sudo to start. Run: sudo systemctl start ollama"
|
|
1846
|
+
else
|
|
1847
|
+
nohup ollama serve >> "${CLAWSPARK_DIR}/ollama.log" 2>&1 &
|
|
1848
|
+
echo $! > "${CLAWSPARK_DIR}/ollama.pid"
|
|
1849
|
+
log_success "Ollama started (PID $!)."
|
|
1850
|
+
fi
|
|
1851
|
+
|
|
1852
|
+
if ! _wait_for_ollama; then
|
|
1853
|
+
log_error "Aborting: Ollama backend is not ready."
|
|
1854
|
+
return 1
|
|
1855
|
+
fi
|
|
1856
|
+
else
|
|
1857
|
+
log_info "Remote/API provider detected; skipping Ollama startup."
|
|
1858
|
+
fi
|
|
1859
|
+
|
|
1860
|
+
if [[ "${use_systemd}" == "true" ]]; then
|
|
1861
|
+
if systemctl is-active --quiet clawspark-gateway.service 2>/dev/null; then
|
|
1862
|
+
log_info "Gateway already running (systemd)."
|
|
1863
|
+
else
|
|
1864
|
+
sudo -n systemctl start clawspark-gateway.service 2>/dev/null || log_warn "Gateway needs sudo. Run: sudo systemctl start clawspark-gateway"
|
|
1865
|
+
fi
|
|
1866
|
+
sleep 2
|
|
1867
|
+
if systemctl is-active --quiet clawspark-nodehost.service 2>/dev/null; then
|
|
1868
|
+
log_info "Node host already running (systemd)."
|
|
1869
|
+
else
|
|
1870
|
+
sudo -n systemctl start clawspark-nodehost.service 2>/dev/null || log_warn "Node host needs sudo. Run: sudo systemctl start clawspark-nodehost"
|
|
1871
|
+
fi
|
|
1872
|
+
if systemctl is-enabled --quiet clawspark-dashboard.service 2>/dev/null; then
|
|
1873
|
+
if ! systemctl is-active --quiet clawspark-dashboard.service 2>/dev/null; then
|
|
1874
|
+
sudo -n systemctl start clawspark-dashboard.service 2>/dev/null || true
|
|
1875
|
+
fi
|
|
1876
|
+
fi
|
|
1877
|
+
else
|
|
1878
|
+
# PID-based management
|
|
1879
|
+
if [[ -f "${CLAWSPARK_DIR}/gateway.pid" ]] && kill -0 "$(cat "${CLAWSPARK_DIR}/gateway.pid" 2>/dev/null)" 2>/dev/null; then
|
|
1880
|
+
log_info "Gateway already running."
|
|
1881
|
+
elif check_command openclaw; then
|
|
1882
|
+
nohup openclaw gateway run --bind loopback > "${CLAWSPARK_DIR}/gateway.log" 2>&1 &
|
|
1883
|
+
echo $! > "${CLAWSPARK_DIR}/gateway.pid"
|
|
1884
|
+
log_success "Gateway started (PID $!)."
|
|
1885
|
+
sleep 3
|
|
1886
|
+
else
|
|
1887
|
+
log_warn "openclaw not found, gateway not started."
|
|
1888
|
+
return 1
|
|
1889
|
+
fi
|
|
1890
|
+
|
|
1891
|
+
if [[ -f "${CLAWSPARK_DIR}/node.pid" ]] && kill -0 "$(cat "${CLAWSPARK_DIR}/node.pid" 2>/dev/null)" 2>/dev/null; then
|
|
1892
|
+
log_info "Node host already running."
|
|
1893
|
+
elif check_command openclaw; then
|
|
1894
|
+
nohup openclaw node run --host 127.0.0.1 --port 18789 > "${CLAWSPARK_DIR}/node.log" 2>&1 &
|
|
1895
|
+
echo $! > "${CLAWSPARK_DIR}/node.pid"
|
|
1896
|
+
log_success "Node host started (PID $!)."
|
|
1897
|
+
fi
|
|
1898
|
+
|
|
1899
|
+
if [[ -f "${CLAWSPARK_DIR}/dashboard.pid" ]] && kill -0 "$(cat "${CLAWSPARK_DIR}/dashboard.pid" 2>/dev/null)" 2>/dev/null; then
|
|
1900
|
+
log_info "Dashboard already running."
|
|
1901
|
+
else
|
|
1902
|
+
_start_dashboard
|
|
1903
|
+
fi
|
|
1904
|
+
fi
|
|
1905
|
+
}
|
|
1906
|
+
|
|
1907
|
+
# ── stop ───────────────────────────────────────────────────────────────────
|
|
1908
|
+
_cmd_stop() {
|
|
1909
|
+
local stop_all=false
|
|
1910
|
+
for arg in "$@"; do
|
|
1911
|
+
[[ "${arg}" == "--all" ]] && stop_all=true
|
|
1912
|
+
done
|
|
1913
|
+
|
|
1914
|
+
log_info "Stopping services..."
|
|
1915
|
+
|
|
1916
|
+
# Detect systemd management
|
|
1917
|
+
local use_systemd=false
|
|
1918
|
+
if check_command systemctl && systemctl is-enabled --quiet clawspark-gateway.service 2>/dev/null; then
|
|
1919
|
+
use_systemd=true
|
|
1920
|
+
fi
|
|
1921
|
+
|
|
1922
|
+
if [[ "${use_systemd}" == "true" ]]; then
|
|
1923
|
+
# Stop via systemd
|
|
1924
|
+
if systemctl is-enabled --quiet clawspark-dashboard.service 2>/dev/null; then
|
|
1925
|
+
sudo -n systemctl stop clawspark-dashboard.service 2>/dev/null || true
|
|
1926
|
+
log_success "Dashboard stopped (systemd)."
|
|
1927
|
+
fi
|
|
1928
|
+
sudo -n systemctl stop clawspark-nodehost.service 2>/dev/null || true
|
|
1929
|
+
log_success "Node host stopped (systemd)."
|
|
1930
|
+
sudo -n systemctl stop clawspark-gateway.service 2>/dev/null || true
|
|
1931
|
+
log_success "Gateway stopped (systemd)."
|
|
1932
|
+
else
|
|
1933
|
+
# PID-based stop
|
|
1934
|
+
_kill_pid_file "${CLAWSPARK_DIR}/dashboard.pid"
|
|
1935
|
+
_kill_by_name "clawmetry"
|
|
1936
|
+
log_success "Dashboard stopped."
|
|
1937
|
+
|
|
1938
|
+
_kill_pid_file "${CLAWSPARK_DIR}/node.pid"
|
|
1939
|
+
_kill_by_name "openclaw node"
|
|
1940
|
+
log_success "Node host stopped."
|
|
1941
|
+
|
|
1942
|
+
_kill_pid_file "${CLAWSPARK_DIR}/gateway.pid"
|
|
1943
|
+
_kill_by_name "openclaw gateway"
|
|
1944
|
+
log_success "Gateway stopped."
|
|
1945
|
+
fi
|
|
1946
|
+
|
|
1947
|
+
# Stop Ollama only with --all flag
|
|
1948
|
+
if ${stop_all} && _uses_local_ollama; then
|
|
1949
|
+
if check_command systemctl && systemctl is-enabled ollama &>/dev/null; then
|
|
1950
|
+
sudo -n systemctl stop ollama 2>/dev/null || log_warn "Ollama needs sudo to stop. Run: sudo systemctl stop ollama"
|
|
1951
|
+
else
|
|
1952
|
+
_kill_pid_file "${CLAWSPARK_DIR}/ollama.pid"
|
|
1953
|
+
log_success "Ollama stopped."
|
|
1954
|
+
fi
|
|
1955
|
+
else
|
|
1956
|
+
log_info "Ollama left unchanged. Use --all on local profiles to also stop Ollama."
|
|
1957
|
+
fi
|
|
1958
|
+
}
|
|
1959
|
+
|
|
1960
|
+
# ── _start_dashboard (helper) ─────────────────────────────────────────────
|
|
1961
|
+
_start_dashboard() {
|
|
1962
|
+
# Method 1: clawmetry CLI in PATH
|
|
1963
|
+
if check_command clawmetry; then
|
|
1964
|
+
nohup clawmetry --port 8900 --host 127.0.0.1 >> "${CLAWSPARK_DIR}/dashboard.log" 2>&1 &
|
|
1965
|
+
echo $! > "${CLAWSPARK_DIR}/dashboard.pid"
|
|
1966
|
+
log_success "Dashboard started via clawmetry (PID $!)."
|
|
1967
|
+
return
|
|
1968
|
+
fi
|
|
1969
|
+
|
|
1970
|
+
# Method 2: clawmetry in ~/.local/bin
|
|
1971
|
+
if [[ -x "${HOME}/.local/bin/clawmetry" ]]; then
|
|
1972
|
+
nohup "${HOME}/.local/bin/clawmetry" --port 8900 --host 127.0.0.1 >> "${CLAWSPARK_DIR}/dashboard.log" 2>&1 &
|
|
1973
|
+
echo $! > "${CLAWSPARK_DIR}/dashboard.pid"
|
|
1974
|
+
log_success "Dashboard started via ~/.local/bin/clawmetry (PID $!)."
|
|
1975
|
+
return
|
|
1976
|
+
fi
|
|
1977
|
+
|
|
1978
|
+
# Method 3: waitress + clawmetry Python module
|
|
1979
|
+
if python3 -c "from clawmetry import create_app; from waitress import serve" 2>/dev/null; then
|
|
1980
|
+
nohup python3 -c "from clawmetry import create_app; from waitress import serve; serve(create_app(), host='127.0.0.1', port=8900)" >> "${CLAWSPARK_DIR}/dashboard.log" 2>&1 &
|
|
1981
|
+
echo $! > "${CLAWSPARK_DIR}/dashboard.pid"
|
|
1982
|
+
log_success "Dashboard started via waitress (PID $!)."
|
|
1983
|
+
return
|
|
1984
|
+
fi
|
|
1985
|
+
|
|
1986
|
+
# Method 4: Flask dev server
|
|
1987
|
+
if python3 -c "from clawmetry import create_app" 2>/dev/null; then
|
|
1988
|
+
nohup python3 -c "from clawmetry import create_app; app = create_app(); app.run(host='127.0.0.1', port=8900)" >> "${CLAWSPARK_DIR}/dashboard.log" 2>&1 &
|
|
1989
|
+
echo $! > "${CLAWSPARK_DIR}/dashboard.pid"
|
|
1990
|
+
log_success "Dashboard started via Flask dev server (PID $!)."
|
|
1991
|
+
return
|
|
1992
|
+
fi
|
|
1993
|
+
|
|
1994
|
+
log_warn "ClawMetry is not installed. Dashboard not started."
|
|
1995
|
+
}
|
|
1996
|
+
|
|
1997
|
+
# ── model ──────────────────────────────────────────────────────────────────
|
|
1998
|
+
_cmd_model() {
|
|
1999
|
+
local subcmd="${1:-list}"
|
|
2000
|
+
shift || true
|
|
2001
|
+
|
|
2002
|
+
case "${subcmd}" in
|
|
2003
|
+
list)
|
|
2004
|
+
_model_list
|
|
2005
|
+
;;
|
|
2006
|
+
switch)
|
|
2007
|
+
local name="${1:-}"
|
|
2008
|
+
[[ -z "${name}" ]] && { log_error "Usage: clawspark model switch <model>"; exit 1; }
|
|
2009
|
+
_model_switch "${name}"
|
|
2010
|
+
;;
|
|
2011
|
+
vision)
|
|
2012
|
+
local name="${1:-}"
|
|
2013
|
+
if [[ -z "${name}" ]]; then
|
|
2014
|
+
_model_vision_show
|
|
2015
|
+
else
|
|
2016
|
+
_model_vision_set "${name}"
|
|
2017
|
+
fi
|
|
2018
|
+
;;
|
|
2019
|
+
*)
|
|
2020
|
+
log_error "Unknown model subcommand: ${subcmd}"
|
|
2021
|
+
printf ' Usage: clawspark model [list|switch <model>|vision [model]]\n'
|
|
2022
|
+
exit 1
|
|
2023
|
+
;;
|
|
2024
|
+
esac
|
|
2025
|
+
}
|
|
2026
|
+
|
|
2027
|
+
_model_list() {
|
|
2028
|
+
printf '\n%s%s clawspark models%s\n\n' "${BOLD}" "${BLUE}" "${RESET}"
|
|
2029
|
+
|
|
2030
|
+
local config_file="${HOME}/.openclaw/openclaw.json"
|
|
2031
|
+
local primary_model="" vision_model="" imagegen_model=""
|
|
2032
|
+
|
|
2033
|
+
if [[ -f "${config_file}" ]]; then
|
|
2034
|
+
local models_output
|
|
2035
|
+
models_output=$(python3 -c "
|
|
2036
|
+
import json, sys
|
|
2037
|
+
with open(sys.argv[1]) as f:
|
|
2038
|
+
d = json.load(f).get('agents', {}).get('defaults', {})
|
|
2039
|
+
print(d.get('model', ''))
|
|
2040
|
+
print(d.get('imageModel', ''))
|
|
2041
|
+
print(d.get('imageGenerationModel', ''))
|
|
2042
|
+
" "${config_file}" 2>/dev/null || echo "")
|
|
2043
|
+
if [[ -n "${models_output}" ]]; then
|
|
2044
|
+
primary_model=$(echo "${models_output}" | sed -n '1p')
|
|
2045
|
+
vision_model=$(echo "${models_output}" | sed -n '2p')
|
|
2046
|
+
imagegen_model=$(echo "${models_output}" | sed -n '3p')
|
|
2047
|
+
fi
|
|
2048
|
+
fi
|
|
2049
|
+
|
|
2050
|
+
printf ' %s%sActive Models:%s\n\n' "${BOLD}" "${GREEN}" "${RESET}"
|
|
2051
|
+
if [[ -n "${primary_model}" ]]; then
|
|
2052
|
+
printf ' %s✓%s Primary (chat) : %s%s%s\n' "${GREEN}" "${RESET}" "${CYAN}" "${primary_model}" "${RESET}"
|
|
2053
|
+
else
|
|
2054
|
+
printf ' %s-%s Primary (chat) : %snot set%s\n' "${YELLOW}" "${RESET}" "${YELLOW}" "${RESET}"
|
|
2055
|
+
fi
|
|
2056
|
+
if [[ -n "${vision_model}" ]]; then
|
|
2057
|
+
printf ' %s✓%s Vision (images) : %s%s%s\n' "${GREEN}" "${RESET}" "${CYAN}" "${vision_model}" "${RESET}"
|
|
2058
|
+
else
|
|
2059
|
+
printf ' %s-%s Vision (images) : %snot configured%s\n' "${YELLOW}" "${RESET}" "${YELLOW}" "${RESET}"
|
|
2060
|
+
fi
|
|
2061
|
+
if [[ -n "${imagegen_model}" ]]; then
|
|
2062
|
+
printf ' %s✓%s Image generation : %s%s%s\n' "${GREEN}" "${RESET}" "${CYAN}" "${imagegen_model}" "${RESET}"
|
|
2063
|
+
else
|
|
2064
|
+
printf ' %s-%s Image generation : %snot configured%s\n' "${YELLOW}" "${RESET}" "${YELLOW}" "${RESET}"
|
|
2065
|
+
fi
|
|
2066
|
+
|
|
2067
|
+
if _uses_local_ollama; then
|
|
2068
|
+
printf '\n %s%sOllama Models (pulled):%s\n\n' "${BOLD}" "${GREEN}" "${RESET}"
|
|
2069
|
+
if check_command ollama; then
|
|
2070
|
+
local model_output
|
|
2071
|
+
model_output=$(ollama list 2>/dev/null || echo "")
|
|
2072
|
+
if [[ -n "${model_output}" ]]; then
|
|
2073
|
+
echo "${model_output}" | tail -n +2 | while IFS= read -r line; do
|
|
2074
|
+
local mname
|
|
2075
|
+
mname=$(echo "${line}" | awk '{print $1}')
|
|
2076
|
+
local msize
|
|
2077
|
+
msize=$(echo "${line}" | awk '{print $3, $4}')
|
|
2078
|
+
local marker=""
|
|
2079
|
+
if [[ "ollama/${mname}" == "${primary_model}" ]]; then
|
|
2080
|
+
marker=" ${GREEN}(primary)${RESET}"
|
|
2081
|
+
elif [[ "ollama/${mname}" == "${vision_model}" ]]; then
|
|
2082
|
+
marker=" ${CYAN}(vision)${RESET}"
|
|
2083
|
+
fi
|
|
2084
|
+
printf ' %-35s %s%s\n' "${mname}" "${msize}" "${marker}"
|
|
2085
|
+
done
|
|
2086
|
+
else
|
|
2087
|
+
printf ' %s(no models pulled)%s\n' "${YELLOW}" "${RESET}"
|
|
2088
|
+
fi
|
|
2089
|
+
else
|
|
2090
|
+
printf ' %s(ollama not found)%s\n' "${RED}" "${RESET}"
|
|
2091
|
+
fi
|
|
2092
|
+
|
|
2093
|
+
printf '\n Switch model: clawspark model switch <model>\n'
|
|
2094
|
+
printf ' Set vision: clawspark model vision <model>\n'
|
|
2095
|
+
printf ' Pull a model: ollama pull <model>\n\n'
|
|
2096
|
+
else
|
|
2097
|
+
local provider_display endpoint
|
|
2098
|
+
provider_display="$(_current_provider_display)"
|
|
2099
|
+
endpoint="$(_current_provider_base_url)"
|
|
2100
|
+
|
|
2101
|
+
printf '\n %s%sRemote/API Provider:%s\n\n' "${BOLD}" "${GREEN}" "${RESET}"
|
|
2102
|
+
printf ' %-35s %s\n' "Provider" "${provider_display}"
|
|
2103
|
+
if [[ -n "${endpoint}" ]]; then
|
|
2104
|
+
printf ' %-35s %s\n' "Base URL" "${endpoint}"
|
|
2105
|
+
else
|
|
2106
|
+
printf ' %-35s %s\n' "Base URL" "(not exported in current environment)"
|
|
2107
|
+
fi
|
|
2108
|
+
printf ' %-35s %s\n' "Model switching" "No local pull required"
|
|
2109
|
+
|
|
2110
|
+
printf '\n Switch model: clawspark model switch <model>\n'
|
|
2111
|
+
printf ' Set vision: clawspark model vision <model>\n'
|
|
2112
|
+
printf ' Example: clawspark model switch openai/gpt-4.1-mini\n\n'
|
|
2113
|
+
fi
|
|
2114
|
+
}
|
|
2115
|
+
|
|
2116
|
+
_model_switch() {
|
|
2117
|
+
local model="$1"
|
|
2118
|
+
local config_file="${HOME}/.openclaw/openclaw.json"
|
|
2119
|
+
|
|
2120
|
+
if [[ ! -f "${config_file}" ]]; then
|
|
2121
|
+
log_error "Config not found at ${config_file}. Run install.sh first."
|
|
2122
|
+
exit 1
|
|
2123
|
+
fi
|
|
2124
|
+
|
|
2125
|
+
local model_id
|
|
2126
|
+
model_id=$(_normalize_model_ref "${model}")
|
|
2127
|
+
|
|
2128
|
+
# Check if the model is pulled in Ollama
|
|
2129
|
+
if [[ "${model_id}" == ollama/* ]] && check_command ollama; then
|
|
2130
|
+
if ! ollama list 2>/dev/null | grep -qi "^${model}"; then
|
|
2131
|
+
log_warn "Model '${model}' does not appear in ollama list."
|
|
2132
|
+
log_warn "Pull it first: ollama pull ${model}"
|
|
2133
|
+
log_warn "Setting config anyway in case the model name is correct."
|
|
2134
|
+
fi
|
|
2135
|
+
fi
|
|
2136
|
+
|
|
2137
|
+
# Update the config
|
|
2138
|
+
python3 -c "
|
|
2139
|
+
import json, sys
|
|
2140
|
+
|
|
2141
|
+
path = sys.argv[1]
|
|
2142
|
+
model = sys.argv[2]
|
|
2143
|
+
|
|
2144
|
+
with open(path, 'r') as f:
|
|
2145
|
+
cfg = json.load(f)
|
|
2146
|
+
|
|
2147
|
+
cfg.setdefault('agents', {}).setdefault('defaults', {})
|
|
2148
|
+
cfg['agents']['defaults']['model'] = model
|
|
2149
|
+
|
|
2150
|
+
with open(path, 'w') as f:
|
|
2151
|
+
json.dump(cfg, f, indent=2)
|
|
2152
|
+
print('ok')
|
|
2153
|
+
" "${config_file}" "${model_id}" 2>> "${CLAWSPARK_LOG}" || {
|
|
2154
|
+
log_error "Failed to update config."
|
|
2155
|
+
exit 1
|
|
2156
|
+
}
|
|
2157
|
+
|
|
2158
|
+
log_success "Primary model set to: ${model_id}"
|
|
2159
|
+
log_info "Restarting gateway to apply..."
|
|
2160
|
+
_cmd_restart
|
|
2161
|
+
}
|
|
2162
|
+
|
|
2163
|
+
_model_vision_show() {
|
|
2164
|
+
local config_file="${HOME}/.openclaw/openclaw.json"
|
|
2165
|
+
local vision_model=""
|
|
2166
|
+
|
|
2167
|
+
if [[ -f "${config_file}" ]]; then
|
|
2168
|
+
vision_model=$(python3 -c "
|
|
2169
|
+
import json, sys
|
|
2170
|
+
with open(sys.argv[1]) as f:
|
|
2171
|
+
cfg = json.load(f)
|
|
2172
|
+
print(cfg.get('agents', {}).get('defaults', {}).get('imageModel', ''))
|
|
2173
|
+
" "${config_file}" 2>/dev/null || echo "")
|
|
2174
|
+
fi
|
|
2175
|
+
|
|
2176
|
+
if [[ -n "${vision_model}" ]]; then
|
|
2177
|
+
printf ' Vision model: %s%s%s\n' "${CYAN}" "${vision_model}" "${RESET}"
|
|
2178
|
+
else
|
|
2179
|
+
printf ' Vision model: %snot configured%s\n' "${YELLOW}" "${RESET}"
|
|
2180
|
+
printf ' Set one with: clawspark model vision <model>\n'
|
|
2181
|
+
printf ' Example: clawspark model vision qwen2.5-vl:7b\n'
|
|
2182
|
+
fi
|
|
2183
|
+
}
|
|
2184
|
+
|
|
2185
|
+
_model_vision_set() {
|
|
2186
|
+
local model="$1"
|
|
2187
|
+
local config_file="${HOME}/.openclaw/openclaw.json"
|
|
2188
|
+
|
|
2189
|
+
if [[ ! -f "${config_file}" ]]; then
|
|
2190
|
+
log_error "Config not found at ${config_file}. Run install.sh first."
|
|
2191
|
+
exit 1
|
|
2192
|
+
fi
|
|
2193
|
+
|
|
2194
|
+
local model_id
|
|
2195
|
+
model_id=$(_normalize_model_ref "${model}")
|
|
2196
|
+
|
|
2197
|
+
# Check if the model is pulled
|
|
2198
|
+
if [[ "${model_id}" == ollama/* ]] && check_command ollama; then
|
|
2199
|
+
if ! ollama list 2>/dev/null | grep -qi "^${model}"; then
|
|
2200
|
+
log_warn "Model '${model}' does not appear in ollama list."
|
|
2201
|
+
log_warn "Pull it first: ollama pull ${model}"
|
|
2202
|
+
log_warn "Setting config anyway in case the model name is correct."
|
|
2203
|
+
fi
|
|
2204
|
+
fi
|
|
2205
|
+
|
|
2206
|
+
python3 -c "
|
|
2207
|
+
import json, sys
|
|
2208
|
+
|
|
2209
|
+
path = sys.argv[1]
|
|
2210
|
+
model = sys.argv[2]
|
|
2211
|
+
|
|
2212
|
+
with open(path, 'r') as f:
|
|
2213
|
+
cfg = json.load(f)
|
|
2214
|
+
|
|
2215
|
+
cfg.setdefault('agents', {}).setdefault('defaults', {})
|
|
2216
|
+
cfg['agents']['defaults']['imageModel'] = model
|
|
2217
|
+
|
|
2218
|
+
# Register inline model with vision capability and openai-completions api
|
|
2219
|
+
# so OpenClaw's image analysis path can find the right streaming provider.
|
|
2220
|
+
if model.startswith('ollama/'):
|
|
2221
|
+
raw_id = model.replace('ollama/', '', 1)
|
|
2222
|
+
prov = cfg.setdefault('models', {}).setdefault('providers', {}).setdefault('ollama', {})
|
|
2223
|
+
prov.setdefault('baseUrl', 'http://127.0.0.1:11434/v1')
|
|
2224
|
+
prov['api'] = 'openai-completions'
|
|
2225
|
+
prov['models'] = [
|
|
2226
|
+
{
|
|
2227
|
+
'id': raw_id,
|
|
2228
|
+
'name': raw_id.replace(':', ' ').title(),
|
|
2229
|
+
'api': 'openai-completions',
|
|
2230
|
+
'input': ['text', 'image'],
|
|
2231
|
+
'contextWindow': 32768,
|
|
2232
|
+
'maxTokens': 8192
|
|
2233
|
+
}
|
|
2234
|
+
]
|
|
2235
|
+
|
|
2236
|
+
with open(path, 'w') as f:
|
|
2237
|
+
json.dump(cfg, f, indent=2)
|
|
2238
|
+
print('ok')
|
|
2239
|
+
" "${config_file}" "${model_id}" 2>> "${CLAWSPARK_LOG}" || {
|
|
2240
|
+
log_error "Failed to update config."
|
|
2241
|
+
exit 1
|
|
2242
|
+
}
|
|
2243
|
+
|
|
2244
|
+
log_success "Vision model set to: ${model_id}"
|
|
2245
|
+
log_info "Restart to apply: clawspark restart"
|
|
2246
|
+
}
|
|
2247
|
+
|
|
2248
|
+
# ── mcp ─────────────────────────────────────────────────────────────────────
|
|
2249
|
+
_cmd_mcp() {
|
|
2250
|
+
local subcmd="${1:-list}"
|
|
2251
|
+
shift || true
|
|
2252
|
+
|
|
2253
|
+
local mcporter_config="${HOME}/.mcporter/mcporter.json"
|
|
2254
|
+
|
|
2255
|
+
case "${subcmd}" in
|
|
2256
|
+
list|ls)
|
|
2257
|
+
if ! check_command mcporter; then
|
|
2258
|
+
log_error "mcporter not installed. Run: npm install -g mcporter"
|
|
2259
|
+
return 1
|
|
2260
|
+
fi
|
|
2261
|
+
printf '\n %s%sMCP Servers:%s\n\n' "${BOLD}" "${CYAN}" "${RESET}"
|
|
2262
|
+
if [[ -f "${mcporter_config}" ]]; then
|
|
2263
|
+
python3 -c "
|
|
2264
|
+
import json
|
|
2265
|
+
with open('${mcporter_config}') as f:
|
|
2266
|
+
cfg = json.load(f)
|
|
2267
|
+
servers = cfg.get('mcpServers', {})
|
|
2268
|
+
if not servers:
|
|
2269
|
+
print(' No MCP servers configured.')
|
|
2270
|
+
else:
|
|
2271
|
+
for name, conf in servers.items():
|
|
2272
|
+
cmd = conf.get('command', conf.get('baseUrl', '?'))
|
|
2273
|
+
args = ' '.join(conf.get('args', []))
|
|
2274
|
+
print(f' \033[32m*\033[0m {name:20s} {cmd} {args}')
|
|
2275
|
+
" 2>/dev/null || log_warn "Could not read mcporter config."
|
|
2276
|
+
else
|
|
2277
|
+
log_info "No mcporter config found. Run: clawspark mcp setup"
|
|
2278
|
+
fi
|
|
2279
|
+
printf '\n'
|
|
2280
|
+
;;
|
|
2281
|
+
setup)
|
|
2282
|
+
if [[ -f "${CLAWSPARK_DIR}/lib/setup-mcp.sh" ]]; then
|
|
2283
|
+
source "${CLAWSPARK_DIR}/lib/setup-mcp.sh"
|
|
2284
|
+
setup_mcp
|
|
2285
|
+
else
|
|
2286
|
+
log_error "setup-mcp.sh not found. Reinstall clawspark."
|
|
2287
|
+
fi
|
|
2288
|
+
;;
|
|
2289
|
+
add)
|
|
2290
|
+
local server_name="${1:-}"
|
|
2291
|
+
local server_cmd="${2:-}"
|
|
2292
|
+
[[ -z "${server_name}" ]] && { log_error "Usage: clawspark mcp add <name> <command> [args...]"; return 1; }
|
|
2293
|
+
[[ -z "${server_cmd}" ]] && { log_error "Usage: clawspark mcp add <name> <command> [args...]"; return 1; }
|
|
2294
|
+
shift 2 || true
|
|
2295
|
+
local server_args=("$@")
|
|
2296
|
+
|
|
2297
|
+
mkdir -p "$(dirname "${mcporter_config}")"
|
|
2298
|
+
python3 -c "
|
|
2299
|
+
import json, sys, os
|
|
2300
|
+
config_path = sys.argv[1]
|
|
2301
|
+
name = sys.argv[2]
|
|
2302
|
+
cmd = sys.argv[3]
|
|
2303
|
+
args = sys.argv[4:]
|
|
2304
|
+
|
|
2305
|
+
cfg = {}
|
|
2306
|
+
if os.path.exists(config_path):
|
|
2307
|
+
with open(config_path) as f:
|
|
2308
|
+
cfg = json.load(f)
|
|
2309
|
+
|
|
2310
|
+
cfg.setdefault('mcpServers', {})[name] = {
|
|
2311
|
+
'command': cmd,
|
|
2312
|
+
'args': args,
|
|
2313
|
+
'env': {}
|
|
2314
|
+
}
|
|
2315
|
+
|
|
2316
|
+
with open(config_path, 'w') as f:
|
|
2317
|
+
json.dump(cfg, f, indent=2)
|
|
2318
|
+
print('ok')
|
|
2319
|
+
" "${mcporter_config}" "${server_name}" "${server_cmd}" "${server_args[@]}" 2>> "${CLAWSPARK_LOG}" || {
|
|
2320
|
+
log_error "Failed to add MCP server."
|
|
2321
|
+
return 1
|
|
2322
|
+
}
|
|
2323
|
+
log_success "Added MCP server: ${server_name}"
|
|
2324
|
+
;;
|
|
2325
|
+
remove|rm)
|
|
2326
|
+
local server_name="${1:-}"
|
|
2327
|
+
[[ -z "${server_name}" ]] && { log_error "Usage: clawspark mcp remove <name>"; return 1; }
|
|
2328
|
+
python3 -c "
|
|
2329
|
+
import json, sys, os
|
|
2330
|
+
config_path = sys.argv[1]
|
|
2331
|
+
name = sys.argv[2]
|
|
2332
|
+
if not os.path.exists(config_path):
|
|
2333
|
+
print('no config')
|
|
2334
|
+
sys.exit(0)
|
|
2335
|
+
with open(config_path) as f:
|
|
2336
|
+
cfg = json.load(f)
|
|
2337
|
+
servers = cfg.get('mcpServers', {})
|
|
2338
|
+
if name in servers:
|
|
2339
|
+
del servers[name]
|
|
2340
|
+
with open(config_path, 'w') as f:
|
|
2341
|
+
json.dump(cfg, f, indent=2)
|
|
2342
|
+
print('removed')
|
|
2343
|
+
else:
|
|
2344
|
+
print('not found')
|
|
2345
|
+
" "${mcporter_config}" "${server_name}" 2>> "${CLAWSPARK_LOG}"
|
|
2346
|
+
log_success "Removed MCP server: ${server_name}"
|
|
2347
|
+
;;
|
|
2348
|
+
*)
|
|
2349
|
+
printf 'Usage: clawspark mcp <command>\n\n'
|
|
2350
|
+
printf 'Commands:\n'
|
|
2351
|
+
printf ' list List configured MCP servers\n'
|
|
2352
|
+
printf ' setup Run MCP setup (install defaults)\n'
|
|
2353
|
+
printf ' add <n> <cmd> Add a custom MCP server\n'
|
|
2354
|
+
printf ' remove <n> Remove an MCP server\n'
|
|
2355
|
+
;;
|
|
2356
|
+
esac
|
|
2357
|
+
}
|
|
2358
|
+
|
|
2359
|
+
# ── tools ───────────────────────────────────────────────────────────────────
|
|
2360
|
+
_cmd_tools() {
|
|
2361
|
+
local subcmd="${1:-}"
|
|
2362
|
+
shift || true
|
|
2363
|
+
|
|
2364
|
+
case "${subcmd}" in
|
|
2365
|
+
enable)
|
|
2366
|
+
local name="${1:-}"
|
|
2367
|
+
[[ -z "${name}" ]] && { log_error "Usage: clawspark tools enable <tool>"; exit 1; }
|
|
2368
|
+
_tools_enable "${name}"
|
|
2369
|
+
;;
|
|
2370
|
+
disable)
|
|
2371
|
+
local name="${1:-}"
|
|
2372
|
+
[[ -z "${name}" ]] && { log_error "Usage: clawspark tools disable <tool>"; exit 1; }
|
|
2373
|
+
_tools_disable "${name}"
|
|
2374
|
+
;;
|
|
2375
|
+
*)
|
|
2376
|
+
_tools_list
|
|
2377
|
+
;;
|
|
2378
|
+
esac
|
|
2379
|
+
}
|
|
2380
|
+
|
|
2381
|
+
_tools_list() {
|
|
2382
|
+
printf '\n%s%s clawspark tools%s\n\n' "${BOLD}" "${BLUE}" "${RESET}"
|
|
2383
|
+
|
|
2384
|
+
local config_file="${HOME}/.openclaw/openclaw.json"
|
|
2385
|
+
|
|
2386
|
+
# Built-in tools that work out of the box
|
|
2387
|
+
local -a builtin_tools=(
|
|
2388
|
+
"exec:Shell commands (bash, docker, kubectl):yes"
|
|
2389
|
+
"web_fetch:HTTP requests and web search via DDG:yes"
|
|
2390
|
+
"read:Read files on the host:yes"
|
|
2391
|
+
"write:Write and create files:yes"
|
|
2392
|
+
"edit:Edit files in place:yes"
|
|
2393
|
+
"message:WhatsApp / Telegram replies:yes"
|
|
2394
|
+
"canvas:Interactive web UI:yes"
|
|
2395
|
+
"process:List, monitor, kill processes:yes"
|
|
2396
|
+
"cron:Scheduled tasks:yes"
|
|
2397
|
+
"sessions_spawn:Sub-agent sessions:yes"
|
|
2398
|
+
"nodes:Remote/paired node execution:yes"
|
|
2399
|
+
"memory_search:Search stored context:yes"
|
|
2400
|
+
"memory_store:Save info across sessions:yes"
|
|
2401
|
+
"transcribe:Audio transcription (local Whisper):yes"
|
|
2402
|
+
"vision:Image analysis (model-dependent):yes"
|
|
2403
|
+
)
|
|
2404
|
+
|
|
2405
|
+
# Optional tools that need configuration
|
|
2406
|
+
local -a optional_tools=(
|
|
2407
|
+
"browser:Browser automation:needs chromium/chrome installed"
|
|
2408
|
+
"email:Send and read emails:needs email account config"
|
|
2409
|
+
"calendar:Calendar management:needs calendar integration"
|
|
2410
|
+
"contacts:Contact lookup:needs contacts provider"
|
|
2411
|
+
"image_generate:Text-to-image:needs image generation API"
|
|
2412
|
+
"code_interpret:Sandboxed code runner:needs sandbox/Docker"
|
|
2413
|
+
"webhook:Webhook management:needs webhook endpoints"
|
|
2414
|
+
)
|
|
2415
|
+
|
|
2416
|
+
# Check which optional tools are currently enabled (not in deny list)
|
|
2417
|
+
local personal_deny=""
|
|
2418
|
+
if [[ -f "${config_file}" ]]; then
|
|
2419
|
+
personal_deny=$(python3 -c "
|
|
2420
|
+
import json, sys
|
|
2421
|
+
with open(sys.argv[1]) as f:
|
|
2422
|
+
cfg = json.load(f)
|
|
2423
|
+
agents = cfg.get('agents', {}).get('list', [])
|
|
2424
|
+
for a in agents:
|
|
2425
|
+
if a.get('id') == 'personal':
|
|
2426
|
+
deny = a.get('tools', {}).get('deny', [])
|
|
2427
|
+
print(' '.join(deny))
|
|
2428
|
+
break
|
|
2429
|
+
" "${config_file}" 2>/dev/null || echo "")
|
|
2430
|
+
fi
|
|
2431
|
+
|
|
2432
|
+
printf ' %s%sBuilt-in (work out of the box):%s\n\n' "${BOLD}" "${GREEN}" "${RESET}"
|
|
2433
|
+
for entry in "${builtin_tools[@]}"; do
|
|
2434
|
+
IFS=: read -r name desc status <<< "${entry}"
|
|
2435
|
+
if [[ " ${personal_deny} " == *" ${name} "* ]]; then
|
|
2436
|
+
printf ' %s✗%s %-18s %s %s(disabled)%s\n' "${RED}" "${RESET}" "${name}" "${desc}" "${RED}" "${RESET}"
|
|
2437
|
+
else
|
|
2438
|
+
printf ' %s✓%s %-18s %s\n' "${GREEN}" "${RESET}" "${name}" "${desc}"
|
|
2439
|
+
fi
|
|
2440
|
+
done
|
|
2441
|
+
|
|
2442
|
+
printf '\n %s%sOptional (need extra setup):%s\n\n' "${BOLD}" "${YELLOW}" "${RESET}"
|
|
2443
|
+
for entry in "${optional_tools[@]}"; do
|
|
2444
|
+
IFS=: read -r name desc requirement <<< "${entry}"
|
|
2445
|
+
if [[ " ${personal_deny} " == *" ${name} "* ]]; then
|
|
2446
|
+
printf ' %s✗%s %-18s %s %s(disabled)%s\n' "${RED}" "${RESET}" "${name}" "${desc}" "${RED}" "${RESET}"
|
|
2447
|
+
else
|
|
2448
|
+
printf ' %s-%s %-18s %s -- %s%s%s\n' "${YELLOW}" "${RESET}" "${name}" "${desc}" "${CYAN}" "${requirement}" "${RESET}"
|
|
2449
|
+
fi
|
|
2450
|
+
done
|
|
2451
|
+
|
|
2452
|
+
printf '\n Enable: clawspark tools enable <name>\n'
|
|
2453
|
+
printf ' Disable: clawspark tools disable <name>\n\n'
|
|
2454
|
+
}
|
|
2455
|
+
|
|
2456
|
+
_tools_enable() {
|
|
2457
|
+
local tool="$1"
|
|
2458
|
+
local config_file="${HOME}/.openclaw/openclaw.json"
|
|
2459
|
+
|
|
2460
|
+
if [[ ! -f "${config_file}" ]]; then
|
|
2461
|
+
log_error "Config not found at ${config_file}. Run install.sh first."
|
|
2462
|
+
exit 1
|
|
2463
|
+
fi
|
|
2464
|
+
|
|
2465
|
+
# Setup instructions for optional tools
|
|
2466
|
+
case "${tool}" in
|
|
2467
|
+
browser)
|
|
2468
|
+
log_info "Enabling browser tool..."
|
|
2469
|
+
log_info "Requirement: Chromium or Chrome must be installed."
|
|
2470
|
+
if check_command chromium-browser || check_command chromium || check_command google-chrome; then
|
|
2471
|
+
log_success "Browser binary found."
|
|
2472
|
+
else
|
|
2473
|
+
log_warn "No browser binary found. Install with: sudo apt install chromium-browser"
|
|
2474
|
+
log_warn "Enabling anyway -- install a browser before using the tool."
|
|
2475
|
+
fi
|
|
2476
|
+
;;
|
|
2477
|
+
web_search)
|
|
2478
|
+
log_info "Enabling web_search tool..."
|
|
2479
|
+
printf ' This requires a Brave Search API key.\n'
|
|
2480
|
+
printf ' Get one at: https://brave.com/search/api/\n\n'
|
|
2481
|
+
printf ' %sEnter your Brave API key (or press Enter to skip):%s ' "${CYAN}" "${RESET}"
|
|
2482
|
+
local api_key
|
|
2483
|
+
read -r api_key
|
|
2484
|
+
if [[ -n "${api_key}" ]]; then
|
|
2485
|
+
local env_file="${HOME}/.openclaw/gateway.env"
|
|
2486
|
+
if ! grep -q 'BRAVE_API_KEY' "${env_file}" 2>/dev/null; then
|
|
2487
|
+
echo "BRAVE_API_KEY=${api_key}" >> "${env_file}"
|
|
2488
|
+
else
|
|
2489
|
+
sed -i "s/BRAVE_API_KEY=.*/BRAVE_API_KEY=${api_key}/" "${env_file}"
|
|
2490
|
+
fi
|
|
2491
|
+
chmod 600 "${env_file}"
|
|
2492
|
+
log_success "Brave API key saved to ${env_file}"
|
|
2493
|
+
log_info "Restart the gateway: clawspark restart"
|
|
2494
|
+
else
|
|
2495
|
+
log_warn "No API key provided. web_search will not work without it."
|
|
2496
|
+
log_warn "Enabling the tool config anyway."
|
|
2497
|
+
fi
|
|
2498
|
+
;;
|
|
2499
|
+
email)
|
|
2500
|
+
log_info "Enabling email tool..."
|
|
2501
|
+
printf ' Email requires IMAP/SMTP configuration in OpenClaw.\n'
|
|
2502
|
+
printf ' After enabling, configure via the OpenClaw control UI:\n'
|
|
2503
|
+
printf ' http://localhost:18789 -> Settings -> Integrations -> Email\n\n'
|
|
2504
|
+
;;
|
|
2505
|
+
calendar)
|
|
2506
|
+
log_info "Enabling calendar tool..."
|
|
2507
|
+
printf ' Calendar requires Google or Outlook integration.\n'
|
|
2508
|
+
printf ' After enabling, configure via the OpenClaw control UI:\n'
|
|
2509
|
+
printf ' http://localhost:18789 -> Settings -> Integrations -> Calendar\n\n'
|
|
2510
|
+
;;
|
|
2511
|
+
contacts)
|
|
2512
|
+
log_info "Enabling contacts tool..."
|
|
2513
|
+
printf ' After enabling, configure via the OpenClaw control UI:\n'
|
|
2514
|
+
printf ' http://localhost:18789 -> Settings -> Integrations -> Contacts\n\n'
|
|
2515
|
+
;;
|
|
2516
|
+
image_generate)
|
|
2517
|
+
log_info "Enabling image_generate tool..."
|
|
2518
|
+
printf ' Image generation requires an API (e.g. DALL-E, Stable Diffusion).\n'
|
|
2519
|
+
printf ' After enabling, configure the provider in OpenClaw settings.\n\n'
|
|
2520
|
+
;;
|
|
2521
|
+
code_interpret)
|
|
2522
|
+
log_info "Enabling code_interpret tool..."
|
|
2523
|
+
if check_command docker; then
|
|
2524
|
+
log_success "Docker is available for sandboxed execution."
|
|
2525
|
+
else
|
|
2526
|
+
log_warn "Docker not found. code_interpret may run unsandboxed."
|
|
2527
|
+
fi
|
|
2528
|
+
;;
|
|
2529
|
+
webhook)
|
|
2530
|
+
log_info "Enabling webhook tool..."
|
|
2531
|
+
printf ' Configure webhook endpoints in OpenClaw settings after enabling.\n\n'
|
|
2532
|
+
;;
|
|
2533
|
+
*)
|
|
2534
|
+
# For built-in tools, just remove from deny list if present
|
|
2535
|
+
log_info "Enabling ${tool}..."
|
|
2536
|
+
;;
|
|
2537
|
+
esac
|
|
2538
|
+
|
|
2539
|
+
# Remove tool from personal agent's deny list (if present)
|
|
2540
|
+
python3 -c "
|
|
2541
|
+
import json, sys
|
|
2542
|
+
|
|
2543
|
+
tool_name = sys.argv[1]
|
|
2544
|
+
path = sys.argv[2]
|
|
2545
|
+
|
|
2546
|
+
with open(path, 'r') as f:
|
|
2547
|
+
cfg = json.load(f)
|
|
2548
|
+
|
|
2549
|
+
agents = cfg.get('agents', {}).get('list', [])
|
|
2550
|
+
changed = False
|
|
2551
|
+
|
|
2552
|
+
for agent in agents:
|
|
2553
|
+
if agent.get('id') == 'personal':
|
|
2554
|
+
deny = agent.get('tools', {}).get('deny', [])
|
|
2555
|
+
if tool_name in deny:
|
|
2556
|
+
deny.remove(tool_name)
|
|
2557
|
+
if not deny:
|
|
2558
|
+
agent['tools'].pop('deny', None)
|
|
2559
|
+
changed = True
|
|
2560
|
+
break
|
|
2561
|
+
|
|
2562
|
+
if changed:
|
|
2563
|
+
with open(path, 'w') as f:
|
|
2564
|
+
json.dump(cfg, f, indent=2)
|
|
2565
|
+
print('removed_from_deny')
|
|
2566
|
+
else:
|
|
2567
|
+
print('not_in_deny')
|
|
2568
|
+
" "${tool}" "${config_file}" 2>> "${CLAWSPARK_LOG}" || true
|
|
2569
|
+
|
|
2570
|
+
log_success "Tool '${tool}' enabled for personal agent (DMs)."
|
|
2571
|
+
log_info "Note: Group agent remains restricted (messaging-only). This is by design."
|
|
2572
|
+
log_info "Restart to apply: clawspark restart"
|
|
2573
|
+
}
|
|
2574
|
+
|
|
2575
|
+
_tools_disable() {
|
|
2576
|
+
local tool="$1"
|
|
2577
|
+
local config_file="${HOME}/.openclaw/openclaw.json"
|
|
2578
|
+
|
|
2579
|
+
if [[ ! -f "${config_file}" ]]; then
|
|
2580
|
+
log_error "Config not found at ${config_file}. Run install.sh first."
|
|
2581
|
+
exit 1
|
|
2582
|
+
fi
|
|
2583
|
+
|
|
2584
|
+
# Don't allow disabling message tool
|
|
2585
|
+
if [[ "${tool}" == "message" ]]; then
|
|
2586
|
+
log_error "Cannot disable 'message' -- it's required for WhatsApp/Telegram."
|
|
2587
|
+
exit 1
|
|
2588
|
+
fi
|
|
2589
|
+
|
|
2590
|
+
log_info "Disabling ${tool} for personal agent..."
|
|
2591
|
+
|
|
2592
|
+
# Add tool to personal agent's deny list
|
|
2593
|
+
python3 -c "
|
|
2594
|
+
import json, sys
|
|
2595
|
+
|
|
2596
|
+
tool_name = sys.argv[1]
|
|
2597
|
+
path = sys.argv[2]
|
|
2598
|
+
|
|
2599
|
+
with open(path, 'r') as f:
|
|
2600
|
+
cfg = json.load(f)
|
|
2601
|
+
|
|
2602
|
+
agents = cfg.get('agents', {}).get('list', [])
|
|
2603
|
+
|
|
2604
|
+
for agent in agents:
|
|
2605
|
+
if agent.get('id') == 'personal':
|
|
2606
|
+
if 'tools' not in agent:
|
|
2607
|
+
agent['tools'] = {}
|
|
2608
|
+
deny = agent['tools'].get('deny', [])
|
|
2609
|
+
if tool_name not in deny:
|
|
2610
|
+
deny.append(tool_name)
|
|
2611
|
+
agent['tools']['deny'] = deny
|
|
2612
|
+
break
|
|
2613
|
+
|
|
2614
|
+
with open(path, 'w') as f:
|
|
2615
|
+
json.dump(cfg, f, indent=2)
|
|
2616
|
+
print('ok')
|
|
2617
|
+
" "${tool}" "${config_file}" 2>> "${CLAWSPARK_LOG}" || {
|
|
2618
|
+
log_error "Failed to update config."
|
|
2619
|
+
exit 1
|
|
2620
|
+
}
|
|
2621
|
+
|
|
2622
|
+
log_success "Tool '${tool}' disabled for personal agent."
|
|
2623
|
+
log_info "Restart to apply: clawspark restart"
|
|
2624
|
+
}
|
|
2625
|
+
|
|
2626
|
+
# ── uninstall ───────────────────────────────────────────────────────────────
|
|
2627
|
+
_cmd_uninstall() {
|
|
2628
|
+
printf '\n%s%s clawspark uninstall%s\n\n' "${BOLD}" "${RED}" "${RESET}"
|
|
2629
|
+
printf ' This will remove OpenClaw, clawspark, and all configuration.\n\n'
|
|
2630
|
+
|
|
2631
|
+
printf ' %sAre you sure? (type YES to confirm):%s ' "${RED}" "${RESET}"
|
|
2632
|
+
local confirm
|
|
2633
|
+
read -r confirm
|
|
2634
|
+
if [[ "${confirm}" != "YES" ]]; then
|
|
2635
|
+
log_info "Uninstall cancelled."
|
|
2636
|
+
exit 0
|
|
2637
|
+
fi
|
|
2638
|
+
|
|
2639
|
+
# Check if the full uninstall.sh exists
|
|
2640
|
+
local uninstall_script="${CLAWSPARK_DIR}/lib/../uninstall.sh"
|
|
2641
|
+
if [[ -f "${uninstall_script}" ]]; then
|
|
2642
|
+
bash "${uninstall_script}" --confirmed
|
|
2643
|
+
else
|
|
2644
|
+
# Inline uninstall
|
|
2645
|
+
_inline_uninstall
|
|
2646
|
+
fi
|
|
2647
|
+
}
|
|
2648
|
+
|
|
2649
|
+
_inline_uninstall() {
|
|
2650
|
+
# Stop all services
|
|
2651
|
+
_kill_pid_file "${CLAWSPARK_DIR}/gateway.pid"
|
|
2652
|
+
_kill_pid_file "${CLAWSPARK_DIR}/node.pid"
|
|
2653
|
+
_kill_pid_file "${CLAWSPARK_DIR}/dashboard.pid"
|
|
2654
|
+
_kill_pid_file "${CLAWSPARK_DIR}/ollama.pid"
|
|
2655
|
+
_kill_by_name "openclaw-gateway"
|
|
2656
|
+
_kill_by_name "openclaw-node"
|
|
2657
|
+
_kill_by_name "clawmetry"
|
|
2658
|
+
sleep 2
|
|
2659
|
+
|
|
2660
|
+
# Uninstall OpenClaw
|
|
2661
|
+
if check_command openclaw; then
|
|
2662
|
+
npm uninstall -g openclaw 2>/dev/null || true
|
|
2663
|
+
log_info "OpenClaw removed."
|
|
2664
|
+
fi
|
|
2665
|
+
|
|
2666
|
+
# Remove configs
|
|
2667
|
+
rm -rf "${HOME}/.openclaw"
|
|
2668
|
+
rm -rf "${CLAWSPARK_DIR}"
|
|
2669
|
+
sudo -n rm -f /usr/local/bin/clawspark 2>/dev/null || rm -f "${HOME}/.local/bin/clawspark" 2>/dev/null || true
|
|
2670
|
+
|
|
2671
|
+
log_success "clawspark has been fully removed."
|
|
2672
|
+
}
|
|
2673
|
+
|
|
2674
|
+
# ═══════════════════════════════════════════════════════════════════════════
|
|
2675
|
+
# MAIN DISPATCH
|
|
2676
|
+
# ═══════════════════════════════════════════════════════════════════════════
|
|
2677
|
+
|
|
2678
|
+
cmd="${1:-help}"
|
|
2679
|
+
shift || true
|
|
2680
|
+
|
|
2681
|
+
case "${cmd}" in
|
|
2682
|
+
status) _cmd_status ;;
|
|
2683
|
+
skills) _cmd_skills "$@" ;;
|
|
2684
|
+
tools) _cmd_tools "$@" ;;
|
|
2685
|
+
mcp) _cmd_mcp "$@" ;;
|
|
2686
|
+
model) _cmd_model "$@" ;;
|
|
2687
|
+
provider) _cmd_provider "$@" ;;
|
|
2688
|
+
benchmark) _cmd_benchmark ;;
|
|
2689
|
+
update) _cmd_update ;;
|
|
2690
|
+
dashboard) _cmd_dashboard ;;
|
|
2691
|
+
tailscale) _cmd_tailscale "$@" ;;
|
|
2692
|
+
airgap) _cmd_airgap "$@" ;;
|
|
2693
|
+
sandbox) _cmd_sandbox "$@" ;;
|
|
2694
|
+
logs) _cmd_logs ;;
|
|
2695
|
+
start) _cmd_start ;;
|
|
2696
|
+
stop) _cmd_stop "$@" ;;
|
|
2697
|
+
restart) _cmd_restart ;;
|
|
2698
|
+
uninstall) _cmd_uninstall ;;
|
|
2699
|
+
diagnose|doctor)
|
|
2700
|
+
_load_diagnose
|
|
2701
|
+
if command -v diagnose_system &>/dev/null; then
|
|
2702
|
+
diagnose_system
|
|
2703
|
+
else
|
|
2704
|
+
log_error "Diagnose module not found. Re-install clawspark."
|
|
2705
|
+
exit 1
|
|
2706
|
+
fi
|
|
2707
|
+
;;
|
|
2708
|
+
help|-h|--help) _usage ;;
|
|
2709
|
+
version|--version|-v) echo "clawspark v${CLAWSPARK_VERSION}" ;;
|
|
2710
|
+
*)
|
|
2711
|
+
log_error "Unknown command: ${cmd}"
|
|
2712
|
+
_usage
|
|
2713
|
+
exit 1
|
|
2714
|
+
;;
|
|
2715
|
+
esac
|