@joshuaswarren/openclaw-engram 9.1.12 → 9.1.14
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/access-cli.js +1 -1
- package/dist/{chunk-BEELVTEN.js → chunk-WUKDMJRD.js} +121 -6
- package/dist/chunk-WUKDMJRD.js.map +1 -0
- package/dist/index.js +29 -1
- package/dist/index.js.map +1 -1
- package/package.json +1 -1
- package/scripts/rebuild-native.mjs +15 -6
- package/dist/chunk-BEELVTEN.js.map +0 -1
package/dist/access-cli.js
CHANGED
|
@@ -3412,6 +3412,7 @@ var LOCAL_SERVERS = [
|
|
|
3412
3412
|
detectFn: (resp) => resp === "" || typeof resp === "object" && resp !== null
|
|
3413
3413
|
}
|
|
3414
3414
|
];
|
|
3415
|
+
var LOCAL_LLM_GLOBAL_BACKEND_STATE = "__openclawEngramLocalLlmBackendState";
|
|
3415
3416
|
var LocalLlmClient = class _LocalLlmClient {
|
|
3416
3417
|
config;
|
|
3417
3418
|
isAvailable = null;
|
|
@@ -3476,6 +3477,61 @@ var LocalLlmClient = class _LocalLlmClient {
|
|
|
3476
3477
|
getDetectedType() {
|
|
3477
3478
|
return this.detectedType;
|
|
3478
3479
|
}
|
|
3480
|
+
getBackendKey() {
|
|
3481
|
+
return this.config.localLlmUrl.replace("localhost", "127.0.0.1").replace(/\/+$/, "").replace(/\/v1$/, "");
|
|
3482
|
+
}
|
|
3483
|
+
getGlobalBackendState() {
|
|
3484
|
+
const globalAny = globalThis;
|
|
3485
|
+
if (!globalAny[LOCAL_LLM_GLOBAL_BACKEND_STATE]) {
|
|
3486
|
+
globalAny[LOCAL_LLM_GLOBAL_BACKEND_STATE] = /* @__PURE__ */ new Map();
|
|
3487
|
+
}
|
|
3488
|
+
return globalAny[LOCAL_LLM_GLOBAL_BACKEND_STATE];
|
|
3489
|
+
}
|
|
3490
|
+
getTrippedBackendState(now) {
|
|
3491
|
+
const state = this.getGlobalBackendState().get(this.getBackendKey()) ?? null;
|
|
3492
|
+
if (!state) return null;
|
|
3493
|
+
if (state.untilMs <= now) {
|
|
3494
|
+
this.getGlobalBackendState().delete(this.getBackendKey());
|
|
3495
|
+
this.lastHealthCheck = 0;
|
|
3496
|
+
return null;
|
|
3497
|
+
}
|
|
3498
|
+
return state;
|
|
3499
|
+
}
|
|
3500
|
+
markBackendUnavailable(reason, durationMs) {
|
|
3501
|
+
const normalizedReason = this.normalizeBackendTripReason(reason);
|
|
3502
|
+
if (durationMs > 0) {
|
|
3503
|
+
const untilMs = Date.now() + durationMs;
|
|
3504
|
+
this.getGlobalBackendState().set(this.getBackendKey(), { untilMs, reason: normalizedReason });
|
|
3505
|
+
} else {
|
|
3506
|
+
this.getGlobalBackendState().delete(this.getBackendKey());
|
|
3507
|
+
}
|
|
3508
|
+
this.isAvailable = false;
|
|
3509
|
+
this.lastHealthCheck = 0;
|
|
3510
|
+
log.warn(
|
|
3511
|
+
`local LLM backend unavailable for ${durationMs}ms: model=${this.config.localLlmModel} reason=${normalizedReason}`
|
|
3512
|
+
);
|
|
3513
|
+
}
|
|
3514
|
+
extractNonRecoverableBackendReason(reason) {
|
|
3515
|
+
const match = reason.match(
|
|
3516
|
+
/Failed to load model|Library not loaded|different Team IDs|code signature|llm_engine_mlx_amphibian/i
|
|
3517
|
+
);
|
|
3518
|
+
return match?.[0] ?? null;
|
|
3519
|
+
}
|
|
3520
|
+
extractNonRecoverableBackendReasonFromErrorText(errorText) {
|
|
3521
|
+
const directReason = this.extractNonRecoverableBackendReason(errorText);
|
|
3522
|
+
if (directReason) return directReason;
|
|
3523
|
+
try {
|
|
3524
|
+
const parsed = JSON.parse(errorText);
|
|
3525
|
+
return this.extractNonRecoverableBackendReason(parsed?.error?.message ?? "");
|
|
3526
|
+
} catch {
|
|
3527
|
+
return null;
|
|
3528
|
+
}
|
|
3529
|
+
}
|
|
3530
|
+
normalizeBackendTripReason(reason) {
|
|
3531
|
+
const cleaned = reason.replace(/\s+/g, " ").replace(/^[-:–—\s]+/, "").trim();
|
|
3532
|
+
if (!cleaned) return "unknown local backend failure";
|
|
3533
|
+
return cleaned.length > 160 ? `${cleaned.slice(0, 157)}...` : cleaned;
|
|
3534
|
+
}
|
|
3479
3535
|
/**
|
|
3480
3536
|
* Fetch with timeout for health checks
|
|
3481
3537
|
*/
|
|
@@ -3489,17 +3545,17 @@ var LocalLlmClient = class _LocalLlmClient {
|
|
|
3489
3545
|
});
|
|
3490
3546
|
clearTimeout(timeout);
|
|
3491
3547
|
if (!response.ok) {
|
|
3492
|
-
return { ok: false, data: null };
|
|
3548
|
+
return { ok: false, data: null, status: response.status };
|
|
3493
3549
|
}
|
|
3494
3550
|
const contentType = response.headers.get("content-type");
|
|
3495
3551
|
if (contentType?.includes("application/json")) {
|
|
3496
|
-
return { ok: true, data: await response.json() };
|
|
3552
|
+
return { ok: true, data: await response.json(), status: response.status };
|
|
3497
3553
|
} else {
|
|
3498
|
-
return { ok: true, data: await response.text() };
|
|
3554
|
+
return { ok: true, data: await response.text(), status: response.status };
|
|
3499
3555
|
}
|
|
3500
3556
|
} catch (err) {
|
|
3501
3557
|
clearTimeout(timeout);
|
|
3502
|
-
return { ok: false, data: null };
|
|
3558
|
+
return { ok: false, data: null, status: null };
|
|
3503
3559
|
}
|
|
3504
3560
|
}
|
|
3505
3561
|
/**
|
|
@@ -3508,10 +3564,20 @@ var LocalLlmClient = class _LocalLlmClient {
|
|
|
3508
3564
|
*/
|
|
3509
3565
|
async checkAvailability() {
|
|
3510
3566
|
const now = Date.now();
|
|
3567
|
+
const trippedState = this.getTrippedBackendState(now);
|
|
3568
|
+
if (trippedState) {
|
|
3569
|
+
this.isAvailable = false;
|
|
3570
|
+
this.lastHealthCheck = 0;
|
|
3571
|
+
log.info(
|
|
3572
|
+
`local LLM availability: backend circuit open for ${Math.max(0, trippedState.untilMs - now)}ms (${trippedState.reason})`
|
|
3573
|
+
);
|
|
3574
|
+
return false;
|
|
3575
|
+
}
|
|
3511
3576
|
if (this.isAvailable !== null && now - this.lastHealthCheck < _LocalLlmClient.HEALTH_CHECK_INTERVAL_MS) {
|
|
3512
3577
|
return this.isAvailable;
|
|
3513
3578
|
}
|
|
3514
3579
|
const baseUrl = this.config.localLlmUrl.replace("localhost", "127.0.0.1").replace(/\/+$/, "");
|
|
3580
|
+
let sawUnauthorizedProbe = false;
|
|
3515
3581
|
for (const serverConfig of LOCAL_SERVERS) {
|
|
3516
3582
|
const healthUrl = `${baseUrl}${serverConfig.healthEndpoint}`;
|
|
3517
3583
|
log.debug(`checking ${serverConfig.type} at ${healthUrl}`);
|
|
@@ -3523,6 +3589,9 @@ var LocalLlmClient = class _LocalLlmClient {
|
|
|
3523
3589
|
log.info(`detected ${serverConfig.type} at ${baseUrl}`);
|
|
3524
3590
|
return true;
|
|
3525
3591
|
}
|
|
3592
|
+
if (result.status === 401 || result.status === 403) {
|
|
3593
|
+
sawUnauthorizedProbe = true;
|
|
3594
|
+
}
|
|
3526
3595
|
}
|
|
3527
3596
|
try {
|
|
3528
3597
|
const modelsUrl = `${baseUrl}/v1/models`;
|
|
@@ -3534,11 +3603,19 @@ var LocalLlmClient = class _LocalLlmClient {
|
|
|
3534
3603
|
log.info(`detected generic OpenAI-compatible server at ${baseUrl}`);
|
|
3535
3604
|
return true;
|
|
3536
3605
|
}
|
|
3606
|
+
if (result.status === 401 || result.status === 403) {
|
|
3607
|
+
sawUnauthorizedProbe = true;
|
|
3608
|
+
}
|
|
3537
3609
|
} catch {
|
|
3538
3610
|
}
|
|
3539
3611
|
this.isAvailable = false;
|
|
3540
3612
|
this.detectedType = null;
|
|
3541
3613
|
this.lastHealthCheck = now;
|
|
3614
|
+
if (sawUnauthorizedProbe) {
|
|
3615
|
+
log.warn(
|
|
3616
|
+
`local LLM availability probe was unauthorized at ${baseUrl}; verify localLlmApiKey and localLlmAuthHeader settings`
|
|
3617
|
+
);
|
|
3618
|
+
}
|
|
3542
3619
|
log.debug("local LLM not available at", baseUrl);
|
|
3543
3620
|
return false;
|
|
3544
3621
|
}
|
|
@@ -3908,6 +3985,22 @@ var LocalLlmClient = class _LocalLlmClient {
|
|
|
3908
3985
|
clearTimeout(attemptTimeout);
|
|
3909
3986
|
}
|
|
3910
3987
|
if (response.ok) break;
|
|
3988
|
+
if (response.status >= 500 && attempt < maxAttempts) {
|
|
3989
|
+
try {
|
|
3990
|
+
const errorText = await response.clone().text();
|
|
3991
|
+
const nonRecoverableReason = this.extractNonRecoverableBackendReasonFromErrorText(errorText);
|
|
3992
|
+
if (nonRecoverableReason) {
|
|
3993
|
+
this.markBackendUnavailable(
|
|
3994
|
+
nonRecoverableReason,
|
|
3995
|
+
this.config.localLlm400CooldownMs
|
|
3996
|
+
);
|
|
3997
|
+
this.consecutive400s = 0;
|
|
3998
|
+
return null;
|
|
3999
|
+
}
|
|
4000
|
+
} catch (e) {
|
|
4001
|
+
log.debug(`local LLM failed to inspect retryable error body: ${e}`);
|
|
4002
|
+
}
|
|
4003
|
+
}
|
|
3911
4004
|
if (response.status < 500 || attempt >= maxAttempts) break;
|
|
3912
4005
|
const backoffMs = this.config.localLlmRetryBackoffMs * attempt;
|
|
3913
4006
|
log.warn(
|
|
@@ -3932,8 +4025,9 @@ var LocalLlmClient = class _LocalLlmClient {
|
|
|
3932
4025
|
}
|
|
3933
4026
|
if (!response.ok) {
|
|
3934
4027
|
let reason = "";
|
|
4028
|
+
let errorText = "";
|
|
3935
4029
|
try {
|
|
3936
|
-
|
|
4030
|
+
errorText = await response.text();
|
|
3937
4031
|
try {
|
|
3938
4032
|
const parsed = JSON.parse(errorText);
|
|
3939
4033
|
reason = parsed?.error?.message ? ` \u2014 ${parsed.error.message}` : "";
|
|
@@ -3946,6 +4040,15 @@ var LocalLlmClient = class _LocalLlmClient {
|
|
|
3946
4040
|
log.warn(
|
|
3947
4041
|
`local LLM request failed: ${response.status} ${response.statusText}${reason} (op=${operation}, model=${this.config.localLlmModel}, url=${chatUrl}, promptChars=${promptChars}, maxTokens=${requestBody.max_tokens})`
|
|
3948
4042
|
);
|
|
4043
|
+
const nonRecoverableReason = this.extractNonRecoverableBackendReason(reason) ?? this.extractNonRecoverableBackendReasonFromErrorText(errorText);
|
|
4044
|
+
if (nonRecoverableReason) {
|
|
4045
|
+
this.markBackendUnavailable(
|
|
4046
|
+
nonRecoverableReason,
|
|
4047
|
+
this.config.localLlm400CooldownMs
|
|
4048
|
+
);
|
|
4049
|
+
this.consecutive400s = 0;
|
|
4050
|
+
return null;
|
|
4051
|
+
}
|
|
3949
4052
|
if (response.status === 400) {
|
|
3950
4053
|
this.consecutive400s += 1;
|
|
3951
4054
|
if (this.consecutive400s >= this.config.localLlm400TripThreshold) {
|
|
@@ -3997,6 +4100,13 @@ var LocalLlmClient = class _LocalLlmClient {
|
|
|
3997
4100
|
}
|
|
3998
4101
|
log.warn(`local LLM request error: op=${operation} error=${errMsg}`);
|
|
3999
4102
|
this.isAvailable = false;
|
|
4103
|
+
const nonRecoverableReason = this.extractNonRecoverableBackendReason(errMsg);
|
|
4104
|
+
if (nonRecoverableReason) {
|
|
4105
|
+
this.markBackendUnavailable(
|
|
4106
|
+
nonRecoverableReason,
|
|
4107
|
+
this.config.localLlm400CooldownMs
|
|
4108
|
+
);
|
|
4109
|
+
}
|
|
4000
4110
|
return null;
|
|
4001
4111
|
} finally {
|
|
4002
4112
|
if (queueMeta) {
|
|
@@ -4019,6 +4129,11 @@ var LocalLlmClient = class _LocalLlmClient {
|
|
|
4019
4129
|
try {
|
|
4020
4130
|
const result = await this.fetchWithTimeout(modelsUrl, 3e3);
|
|
4021
4131
|
if (!result.ok) {
|
|
4132
|
+
if (result.status === 401 || result.status === 403) {
|
|
4133
|
+
log.warn(
|
|
4134
|
+
`Local LLM: unauthorized while fetching models from ${modelsUrl}; verify localLlmApiKey and localLlmAuthHeader settings`
|
|
4135
|
+
);
|
|
4136
|
+
}
|
|
4022
4137
|
log.warn(`Local LLM: Failed to fetch models from ${modelsUrl} - server returned error`);
|
|
4023
4138
|
return null;
|
|
4024
4139
|
}
|
|
@@ -31330,4 +31445,4 @@ export {
|
|
|
31330
31445
|
EngramAccessInputError,
|
|
31331
31446
|
EngramAccessService
|
|
31332
31447
|
};
|
|
31333
|
-
//# sourceMappingURL=chunk-
|
|
31448
|
+
//# sourceMappingURL=chunk-WUKDMJRD.js.map
|