traderclaw-cli 1.0.63 → 1.0.66

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -308,17 +308,25 @@ function privilegeRemediationMessage(cmd, args = [], customLines = []) {
308
308
 
309
309
  function gatewayTimeoutRemediation() {
310
310
  return [
311
- "Gateway bootstrap timed out waiting for health checks.",
312
- "Run these commands in terminal, then click Start Installation again:",
313
- "1) openclaw gateway status --json || true",
314
- "2) openclaw gateway probe || true",
311
+ "Gateway failed to start: service stayed stopped and health checks did not pass.",
312
+ "This usually means the gateway service is misconfigured, crashed at launch, or the system is out of resources.",
313
+ "",
314
+ "Run these commands in your VPS terminal to diagnose and recover:",
315
+ "1) openclaw gateway status --json || true # check current state",
316
+ "2) journalctl --user -u openclaw-gateway -n 50 --no-pager || true # check service logs",
315
317
  "3) openclaw gateway stop || true",
316
318
  "4) openclaw gateway install",
317
319
  "5) openclaw gateway restart",
318
- "6) openclaw gateway status --json",
320
+ "6) openclaw gateway status --json # should show running + rpc.ok=true",
319
321
  "7) tailscale funnel --bg 18789",
320
322
  "8) tailscale funnel status",
321
- "If gateway still fails on a low-memory VM, add swap or use a larger staging size (>=2GB RAM recommended).",
323
+ "",
324
+ "If the gateway still fails:",
325
+ "- Check RAM: openclaw gateway requires >=512MB free (>=2GB total recommended)",
326
+ "- Check disk: df -h ~/.openclaw",
327
+ "- Try: openclaw config validate && openclaw gateway doctor || true",
328
+ "- If config schema error appears, run: npm install -g openclaw@latest",
329
+ "Once the gateway shows 'running' in status, click Start Installation again.",
322
330
  ].join("\n");
323
331
  }
324
332
 
@@ -1303,7 +1311,12 @@ function resolveLlmModelSelection(provider, requestedModel) {
1303
1311
  return { model: chosen || availableModels[0], source: "provider_default", availableModels, warnings };
1304
1312
  }
1305
1313
 
1306
- warnings.push(`No discoverable model list found for provider '${provider}'. Falling back to '${fallbackModelForProvider(provider)}'.`);
1314
+ warnings.push(
1315
+ `[ALERT] No discoverable model list found for provider '${provider}'. ` +
1316
+ `Auto-selecting hardcoded default '${fallbackModelForProvider(provider)}' — ` +
1317
+ `this model will be billed to your API key. ` +
1318
+ `To use a different model, after finishing setup, use openclaw config and set the model manually.`,
1319
+ );
1307
1320
  return { model: fallbackModelForProvider(provider), source: "fallback_guess", availableModels, warnings };
1308
1321
  }
1309
1322
 
@@ -1870,12 +1883,15 @@ export class InstallerStepEngine {
1870
1883
  } catch (err) {
1871
1884
  const text = `${err?.message || ""}\n${err?.stderr || ""}\n${err?.stdout || ""}`.toLowerCase();
1872
1885
  const gatewayModeUnset = text.includes("gateway.mode=local") && text.includes("current: unset");
1873
- if (
1886
+ const gatewayStartFailed =
1874
1887
  text.includes("gateway restart timed out")
1875
1888
  || text.includes("timed out after 60s waiting for health checks")
1876
1889
  || text.includes("waiting for gateway port")
1877
- || gatewayModeUnset
1878
- ) {
1890
+ // OpenClaw ≥ current: shorter-timeout variant of the same class of failure
1891
+ || (text.includes("gateway restart failed") && text.includes("service stayed stopped"))
1892
+ || text.includes("health checks never came up")
1893
+ || text.includes("service stayed stopped");
1894
+ if (gatewayStartFailed || gatewayModeUnset) {
1879
1895
  const recovered = await this.tryAutoRecoverGatewayMode("gateway_bootstrap");
1880
1896
  if (recovered.success) {
1881
1897
  return this.runFunnel();
@@ -9,26 +9,23 @@ import { randomUUID, createPrivateKey, sign as cryptoSign } from "crypto";
9
9
  import { execFile, execSync } from "child_process";
10
10
  import { promisify } from "util";
11
11
  import { createServer } from "http";
12
- import { sortModelsByPreference, MAX_MODELS_PER_PROVIDER_SORT } from "./llm-model-preference.mjs";
13
12
  import { resolvePluginPackageRoot } from "./resolve-plugin-root.mjs";
14
13
 
15
14
  const execFileAsync = promisify(execFile);
16
15
 
17
- /** Fast wizard catalog lookup: prefer one full list, then only probe key providers. */
18
- const OPENCLAW_MODELS_FLAT_TIMEOUT_MS = 7_500;
19
- const OPENCLAW_MODELS_PER_PROVIDER_TIMEOUT_MS = 4_500;
20
- const WIZARD_PRIORITY_PROVIDERS = [
16
+ /**
17
+ * Ordered list of providers for display in the wizard dropdown.
18
+ * The most commonly used providers appear first.
19
+ */
20
+ const WIZARD_PROVIDER_PRIORITY = [
21
21
  "anthropic",
22
22
  "openai",
23
- "openrouter",
24
23
  "google",
24
+ "openrouter",
25
25
  "xai",
26
26
  "deepseek",
27
27
  "groq",
28
28
  "mistral",
29
- ];
30
- const WIZARD_PROVIDER_PRIORITY = [
31
- ...WIZARD_PRIORITY_PROVIDERS,
32
29
  "perplexity",
33
30
  "together",
34
31
  "openai-codex",
@@ -43,14 +40,6 @@ const WIZARD_PROVIDER_PRIORITY = [
43
40
  ];
44
41
  let wizardLlmCatalogPromise = null;
45
42
 
46
- function compareWizardProviderPriority(a, b) {
47
- const ai = WIZARD_PROVIDER_PRIORITY.indexOf(a);
48
- const bi = WIZARD_PROVIDER_PRIORITY.indexOf(b);
49
- const aRank = ai >= 0 ? ai : Number.MAX_SAFE_INTEGER;
50
- const bRank = bi >= 0 ? bi : Number.MAX_SAFE_INTEGER;
51
- return aRank - bRank || a.localeCompare(b);
52
- }
53
-
54
43
  const PLUGIN_ROOT = resolvePluginPackageRoot(import.meta.url);
55
44
  const PLUGIN_PACKAGE_JSON = JSON.parse(readFileSync(join(PLUGIN_ROOT, "package.json"), "utf-8"));
56
45
  const PLUGIN_VERSION =
@@ -1916,35 +1905,29 @@ function parseJsonBody(req) {
1916
1905
  });
1917
1906
  }
1918
1907
 
1908
+ /**
1909
+ * Returns the wizard LLM catalog immediately from the curated static list.
1910
+ *
1911
+ * WHY we no longer call `openclaw models list --all`:
1912
+ * The OpenClaw CLI enumerates models by making live network calls to each
1913
+ * provider's API (provider plugins inject catalogs via network). The wizard
1914
+ * runs BEFORE any API credentials have been configured, so every probe will
1915
+ * always ETIMEDOUT — there is nothing to authenticate with yet. Calling the
1916
+ * CLI here is architecturally wrong for this stage of setup.
1917
+ *
1918
+ * The curated list below is the correct source for the wizard: it covers all
1919
+ * supported providers with their recommended models and loads in < 1 ms.
1920
+ * After installation the user can run `openclaw models list` to see the full
1921
+ * live catalog for their configured providers.
1922
+ */
1919
1923
  async function loadWizardLlmCatalogAsync() {
1920
- const supportedProviders = new Set([
1921
- "anthropic",
1922
- "openai",
1923
- "openai-codex",
1924
- "openrouter",
1925
- "groq",
1926
- "mistral",
1927
- "google",
1928
- "google-vertex",
1929
- "xai",
1930
- "deepseek",
1931
- "together",
1932
- "perplexity",
1933
- "amazon-bedrock",
1934
- "vercel-ai-gateway",
1935
- "minimax",
1936
- "moonshot",
1937
- "nvidia",
1938
- "qwen",
1939
- "cerebras",
1940
- ]);
1941
- const fallback = {
1942
- source: "fallback",
1924
+ return {
1925
+ source: "curated",
1943
1926
  providers: [
1944
1927
  {
1945
1928
  id: "anthropic",
1946
1929
  models: [
1947
- { id: "anthropic/claude-sonnet-4-6", name: "Claude Sonnet 4.6 (recommended default)" },
1930
+ { id: "anthropic/claude-sonnet-4-6", name: "Claude Sonnet 4.6 (recommended)" },
1948
1931
  { id: "anthropic/claude-opus-4-6", name: "Claude Opus 4.6" },
1949
1932
  { id: "anthropic/claude-haiku-4-5", name: "Claude Haiku 4.5" },
1950
1933
  ],
@@ -1952,7 +1935,7 @@ async function loadWizardLlmCatalogAsync() {
1952
1935
  {
1953
1936
  id: "openai",
1954
1937
  models: [
1955
- { id: "openai/gpt-5.4", name: "GPT-5.4 (recommended default)" },
1938
+ { id: "openai/gpt-5.4", name: "GPT-5.4 (recommended)" },
1956
1939
  { id: "openai/gpt-5.4-mini", name: "GPT-5.4 mini" },
1957
1940
  { id: "openai/gpt-5.4-nano", name: "GPT-5.4 nano" },
1958
1941
  ],
@@ -1961,29 +1944,48 @@ async function loadWizardLlmCatalogAsync() {
1961
1944
  id: "openai-codex",
1962
1945
  models: [{ id: "openai-codex/gpt-5-codex", name: "GPT-5 Codex" }],
1963
1946
  },
1947
+ {
1948
+ id: "google",
1949
+ models: [
1950
+ { id: "google/gemini-2.5-flash", name: "Gemini 2.5 Flash (recommended)" },
1951
+ { id: "google/gemini-2.5-pro", name: "Gemini 2.5 Pro" },
1952
+ ],
1953
+ },
1964
1954
  {
1965
1955
  id: "xai",
1966
- models: [{ id: "xai/grok-4", name: "Grok 4" }],
1956
+ models: [
1957
+ { id: "xai/grok-4", name: "Grok 4 (recommended)" },
1958
+ { id: "xai/grok-3", name: "Grok 3" },
1959
+ ],
1967
1960
  },
1968
1961
  {
1969
1962
  id: "deepseek",
1970
- models: [{ id: "deepseek/deepseek-chat", name: "DeepSeek Chat (V3.2)" }],
1963
+ models: [
1964
+ { id: "deepseek/deepseek-chat", name: "DeepSeek Chat V3 (recommended)" },
1965
+ { id: "deepseek/deepseek-reasoner", name: "DeepSeek Reasoner R1" },
1966
+ ],
1971
1967
  },
1972
1968
  {
1973
- id: "google",
1974
- models: [{ id: "google/gemini-2.5-flash", name: "Gemini 2.5 Flash" }],
1969
+ id: "openrouter",
1970
+ models: [
1971
+ { id: "openrouter/anthropic/claude-sonnet-4-6", name: "Claude Sonnet 4.6 via OpenRouter (recommended)" },
1972
+ { id: "openrouter/openai/gpt-5.4", name: "GPT-5.4 via OpenRouter" },
1973
+ { id: "openrouter/google/gemini-2.5-flash", name: "Gemini 2.5 Flash via OpenRouter" },
1974
+ ],
1975
1975
  },
1976
1976
  {
1977
1977
  id: "groq",
1978
- models: [{ id: "groq/llama-4-scout-17b-16e-instruct", name: "Llama 4 Scout" }],
1979
- },
1980
- {
1981
- id: "openrouter",
1982
- models: [{ id: "openrouter/anthropic/claude-sonnet-4-6", name: "Claude Sonnet 4.6 (via OpenRouter)" }],
1978
+ models: [
1979
+ { id: "groq/llama-4-scout-17b-16e-instruct", name: "Llama 4 Scout (recommended)" },
1980
+ { id: "groq/llama-4-maverick-17b-128e-instruct", name: "Llama 4 Maverick" },
1981
+ ],
1983
1982
  },
1984
1983
  {
1985
1984
  id: "mistral",
1986
- models: [{ id: "mistral/mistral-large-latest", name: "Mistral Large" }],
1985
+ models: [
1986
+ { id: "mistral/mistral-large-latest", name: "Mistral Large (recommended)" },
1987
+ { id: "mistral/mistral-medium-latest", name: "Mistral Medium" },
1988
+ ],
1987
1989
  },
1988
1990
  {
1989
1991
  id: "perplexity",
@@ -2001,174 +2003,13 @@ async function loadWizardLlmCatalogAsync() {
2001
2003
  id: "qwen",
2002
2004
  models: [{ id: "qwen/qwen3-235b-a22b", name: "Qwen3 235B A22B" }],
2003
2005
  },
2006
+ {
2007
+ id: "moonshot",
2008
+ models: [{ id: "moonshot/kimi-k2", name: "Kimi K2" }],
2009
+ },
2004
2010
  ],
2011
+ generatedAt: new Date().toISOString(),
2005
2012
  };
2006
-
2007
- if (!commandExists("openclaw")) {
2008
- return { ...fallback, warning: "openclaw_not_found" };
2009
- }
2010
-
2011
- const providerIds = [...supportedProviders].sort(compareWizardProviderPriority);
2012
- const priorityProviderIds = providerIds.filter((id) => WIZARD_PRIORITY_PROVIDERS.includes(id));
2013
-
2014
- async function fetchModelsForProvider(provider) {
2015
- try {
2016
- const { stdout } = await execFileAsync(
2017
- "openclaw",
2018
- ["models", "list", "--all", "--provider", provider, "--json"],
2019
- {
2020
- encoding: "utf-8",
2021
- maxBuffer: 25 * 1024 * 1024,
2022
- timeout: OPENCLAW_MODELS_PER_PROVIDER_TIMEOUT_MS,
2023
- env: NO_COLOR_ENV,
2024
- },
2025
- );
2026
- return { provider, stdout };
2027
- } catch (err) {
2028
- return { provider, error: err };
2029
- }
2030
- }
2031
-
2032
- function seedFallbackProviderMap() {
2033
- return new Map(
2034
- fallback.providers.map((entry) => [
2035
- entry.id,
2036
- entry.models.map((model) => ({ ...model })),
2037
- ]),
2038
- );
2039
- }
2040
-
2041
- function mergeCatalogModelsIntoMap(providerMap, models, expectedProvider = "") {
2042
- let added = 0;
2043
- for (const entry of models) {
2044
- if (!entry || typeof entry.key !== "string") continue;
2045
- const modelId = String(entry.key);
2046
- const slash = modelId.indexOf("/");
2047
- if (slash <= 0 || slash === modelId.length - 1) continue;
2048
- const provider = modelId.slice(0, slash);
2049
- if (!supportedProviders.has(provider)) continue;
2050
- if (expectedProvider && provider !== expectedProvider) continue;
2051
- const existing = providerMap.get(provider) || [];
2052
- existing.push({
2053
- id: modelId,
2054
- name: typeof entry.name === "string" && entry.name.trim() ? entry.name : modelId,
2055
- });
2056
- providerMap.set(provider, existing);
2057
- added += 1;
2058
- }
2059
- return added;
2060
- }
2061
-
2062
- function buildProvidersFromMap(providerMap) {
2063
- return providerIds
2064
- .map((id) => {
2065
- const rawModels = providerMap.get(id) || [];
2066
- const sortedIds = sortModelsByPreference(
2067
- id,
2068
- rawModels.map((m) => m.id),
2069
- );
2070
- const byId = new Map(rawModels.map((m) => [m.id, m]));
2071
- const limitedIds = sortedIds.slice(0, MAX_MODELS_PER_PROVIDER_SORT);
2072
- const models = limitedIds.map((mid) => byId.get(mid)).filter(Boolean);
2073
- return { id, models };
2074
- })
2075
- .filter((entry) => supportedProviders.has(entry.id))
2076
- .filter((entry) => entry.models.length > 0);
2077
- }
2078
-
2079
- /** When `openclaw models list --all --json` returns models; used if per-provider calls yield nothing. */
2080
- function mergeFlatCatalogIntoMap(providerMap) {
2081
- const raw = execSync("openclaw models list --all --json", {
2082
- encoding: "utf-8",
2083
- stdio: ["ignore", "pipe", "pipe"],
2084
- maxBuffer: 50 * 1024 * 1024,
2085
- timeout: OPENCLAW_MODELS_FLAT_TIMEOUT_MS,
2086
- env: NO_COLOR_ENV,
2087
- });
2088
- const parsed = extractJson(raw);
2089
- if (!parsed) return 0;
2090
- const models = Array.isArray(parsed?.models) ? parsed.models : [];
2091
- return mergeCatalogModelsIntoMap(providerMap, models);
2092
- }
2093
-
2094
- try {
2095
- const t0 = Date.now();
2096
- const liveProviderMap = new Map();
2097
- let catalogStrategy = "flat_all";
2098
- let flatError = "";
2099
- let liveAdded = 0;
2100
- try {
2101
- liveAdded = mergeFlatCatalogIntoMap(liveProviderMap);
2102
- } catch (err) {
2103
- flatError = err instanceof Error ? err.message : String(err);
2104
- }
2105
-
2106
- let batches = [];
2107
- if (liveAdded === 0) {
2108
- catalogStrategy = "priority_parallel";
2109
- batches = await Promise.all(priorityProviderIds.map((p) => fetchModelsForProvider(p)));
2110
- for (const batch of batches) {
2111
- if (batch.error || !batch.stdout) continue;
2112
- const parsed = extractJson(batch.stdout);
2113
- if (!parsed) continue;
2114
- const models = Array.isArray(parsed?.models) ? parsed.models : [];
2115
- liveAdded += mergeCatalogModelsIntoMap(liveProviderMap, models, batch.provider);
2116
- }
2117
- }
2118
-
2119
- let providers = buildProvidersFromMap(liveProviderMap);
2120
- if (providers.length === 0) {
2121
- const failedParallel = batches.filter((b) => b.error).length;
2122
- const details = [];
2123
- if (flatError) details.push(`flat list failed: ${flatError.slice(0, 160)}`);
2124
- if (failedParallel > 0) {
2125
- details.push(
2126
- `${failedParallel}/${priorityProviderIds.length} priority provider lookups failed`,
2127
- );
2128
- }
2129
- return {
2130
- ...fallback,
2131
- warning: `openclaw_model_catalog_unavailable${details.length ? ` (${details.join("; ")})` : ""}`,
2132
- };
2133
- }
2134
-
2135
- const mergedProviderMap = new Map(liveProviderMap);
2136
- for (const [provider, models] of seedFallbackProviderMap()) {
2137
- if (!mergedProviderMap.has(provider) || (mergedProviderMap.get(provider) || []).length === 0) {
2138
- mergedProviderMap.set(provider, models);
2139
- }
2140
- }
2141
- providers = buildProvidersFromMap(mergedProviderMap);
2142
-
2143
- const elapsedMs = Date.now() - t0;
2144
- const source =
2145
- providers.length > buildProvidersFromMap(liveProviderMap).length ? "hybrid" : "openclaw";
2146
- const warning =
2147
- source === "hybrid" && flatError
2148
- ? `loaded priority providers; kept curated defaults for the rest (${flatError.slice(0, 160)})`
2149
- : source === "hybrid"
2150
- ? "loaded priority providers; kept curated defaults for the rest"
2151
- : "";
2152
- return {
2153
- source,
2154
- providers,
2155
- generatedAt: new Date().toISOString(),
2156
- catalogFetchMs: elapsedMs,
2157
- catalogStrategy,
2158
- ...(warning ? { warning } : {}),
2159
- };
2160
- } catch (err) {
2161
- const detail = err?.message || String(err);
2162
- const isBufferErr = detail.includes("maxBuffer") || detail.includes("ENOBUFS");
2163
- const hint = isBufferErr
2164
- ? " (stdout exceeded buffer — OpenClaw model catalog may have grown; this version raises the limit)"
2165
- : "";
2166
- console.error(`[traderclaw] loadWizardLlmCatalog failed${hint}: ${detail.slice(0, 500)}`);
2167
- return {
2168
- ...fallback,
2169
- warning: `openclaw_models_list_failed: ${detail}`,
2170
- };
2171
- }
2172
2013
  }
2173
2014
 
2174
2015
  function wizardHtml(defaults) {
@@ -2467,11 +2308,11 @@ function wizardHtml(defaults) {
2467
2308
  startBtn.textContent = "Loading providers...";
2468
2309
  const updateHint = () => {
2469
2310
  const elapsedSeconds = Math.max(1, Math.floor((Date.now() - llmLoadStartedAt) / 1000));
2470
- if (elapsedSeconds >= 8) {
2471
- llmLoadingHintTextEl.textContent = "Still loading provider catalog (" + elapsedSeconds + "s). This should usually finish in under ~10s.";
2311
+ if (elapsedSeconds >= 5) {
2312
+ llmLoadingHintTextEl.textContent = "Still loading (" + elapsedSeconds + "s). Check network or reload.";
2472
2313
  return;
2473
2314
  }
2474
- llmLoadingHintTextEl.textContent = "Fetching provider list (" + elapsedSeconds + "s)...";
2315
+ llmLoadingHintTextEl.textContent = "Loading provider list (" + elapsedSeconds + "s)...";
2475
2316
  };
2476
2317
  updateHint();
2477
2318
  stopLlmLoadTicker();
@@ -2522,7 +2363,7 @@ function wizardHtml(defaults) {
2522
2363
  setLlmCatalogLoading(true);
2523
2364
  setSelectOptions(llmProviderEl, [{ value: "", label: "Loading providers..." }], "");
2524
2365
  setSelectOptions(llmModelEl, [{ value: "", label: "Loading models..." }], "");
2525
- setLlmCatalogReady(false, "Loading LLM provider catalog... this can take a few seconds.");
2366
+ setLlmCatalogReady(false, "Loading provider list...");
2526
2367
  try {
2527
2368
  const res = await fetch("/api/llm/options");
2528
2369
  const data = await res.json();
@@ -2531,21 +2372,15 @@ function wizardHtml(defaults) {
2531
2372
  if (providers.length === 0) {
2532
2373
  setSelectOptions(llmProviderEl, [{ value: "", label: "No providers available" }], "");
2533
2374
  refreshModelOptions("");
2534
- setLlmCatalogReady(false, "No LLM providers were found from OpenClaw. Please check OpenClaw model setup.", true);
2375
+ setLlmCatalogReady(false, "No LLM providers found. Reload the page to try again.", true);
2535
2376
  return;
2536
2377
  }
2537
2378
  setSelectOptions(llmProviderEl, providers, "${defaults.llmProvider}");
2538
2379
  refreshModelOptions("${defaults.llmModel}");
2539
- const isFallback = llmCatalog.source === "fallback";
2540
- const isHybrid = llmCatalog.source === "hybrid";
2541
- const catalogMsg = isFallback
2542
- ? "Showing curated safe defaults only (could not load live OpenClaw catalog" + (llmCatalog.warning ? ": " + llmCatalog.warning : "") + "). Anthropic and OpenAI stay ready first, with several other providers still available."
2543
- : isHybrid
2544
- ? "Loaded priority providers first and filled the rest with curated defaults" + (llmCatalog.warning ? " (" + llmCatalog.warning + ")" : "") + "."
2545
- : "LLM providers loaded. Select provider and paste credential to continue. Model selection is optional.";
2546
- setLlmCatalogReady(true, catalogMsg, isFallback || isHybrid);
2380
+ const catalogMsg = "Select your provider, paste your API key, and start installation. After setup, run \`openclaw models list\` to explore your live catalog.";
2381
+ setLlmCatalogReady(true, catalogMsg, false);
2547
2382
  } catch (err) {
2548
- setLlmCatalogReady(false, "Failed to load LLM providers. Check OpenClaw and reload this page.", true);
2383
+ setLlmCatalogReady(false, "Failed to load LLM providers. Reload the page and try again.", true);
2549
2384
  manualEl.textContent = "Failed to load LLM provider catalog: " + (err && err.message ? err.message : String(err));
2550
2385
  } finally {
2551
2386
  setLlmCatalogLoading(false);
@@ -2598,7 +2433,7 @@ function wizardHtml(defaults) {
2598
2433
 
2599
2434
  const payload = {
2600
2435
  llmProvider: llmProviderEl.value.trim(),
2601
- llmModel: llmModelManualEl.checked ? llmModelEl.value.trim() : "",
2436
+ llmModel: llmModelEl.value.trim(),
2602
2437
  llmCredential: llmCredentialEl.value.trim(),
2603
2438
  apiKey: document.getElementById("apiKey").value.trim(),
2604
2439
  telegramToken: document.getElementById("telegramToken").value.trim(),
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "traderclaw-cli",
3
- "version": "1.0.63",
3
+ "version": "1.0.66",
4
4
  "description": "Global TraderClaw CLI (install --wizard, setup, precheck). Installs solana-traderclaw as a dependency for OpenClaw plugin files.",
5
5
  "type": "module",
6
6
  "bin": {
@@ -17,7 +17,7 @@
17
17
  "node": ">=22"
18
18
  },
19
19
  "dependencies": {
20
- "solana-traderclaw": "^1.0.63"
20
+ "solana-traderclaw": "^1.0.66"
21
21
  },
22
22
  "keywords": [
23
23
  "traderclaw",