@eko-ai/eko 4.1.2 → 4.1.3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/index.cjs +22 -9
- package/dist/index.cjs.map +1 -1
- package/dist/index.esm.js +22 -9
- package/dist/index.esm.js.map +1 -1
- package/dist/llm/react.d.ts.map +1 -1
- package/dist/llm/rlm.d.ts.map +1 -1
- package/package.json +2 -2
package/dist/index.cjs
CHANGED
|
@@ -29813,7 +29813,7 @@ function withUserAgentSuffix(headers, ...userAgentSuffixParts) {
|
|
|
29813
29813
|
}
|
|
29814
29814
|
|
|
29815
29815
|
// src/version.ts
|
|
29816
|
-
var VERSION$1 = "1.5.
|
|
29816
|
+
var VERSION$1 = "1.5.4";
|
|
29817
29817
|
|
|
29818
29818
|
// src/provider.ts
|
|
29819
29819
|
function createOpenRouter(options = {}) {
|
|
@@ -37707,7 +37707,7 @@ class RetryLanguageModel {
|
|
|
37707
37707
|
this.llms = llms;
|
|
37708
37708
|
this.names = names || [];
|
|
37709
37709
|
context && this.setContext(context);
|
|
37710
|
-
this.stream_first_timeout = stream_first_timeout ||
|
|
37710
|
+
this.stream_first_timeout = stream_first_timeout || 45000;
|
|
37711
37711
|
this.stream_token_timeout = stream_token_timeout || 180000;
|
|
37712
37712
|
if (this.names.indexOf("default") == -1) {
|
|
37713
37713
|
this.names.push("default");
|
|
@@ -37874,12 +37874,11 @@ class RetryLanguageModel {
|
|
|
37874
37874
|
}
|
|
37875
37875
|
lastError = e;
|
|
37876
37876
|
if (Log.isEnableInfo()) {
|
|
37877
|
-
Log.info(`LLM stream request, name: ${name} => `, {
|
|
37878
|
-
tools: _options.tools,
|
|
37877
|
+
Log.info(`LLM stream request, name: ${name} => `, e, {
|
|
37879
37878
|
messages: _options.prompt,
|
|
37879
|
+
tools: _options.tools,
|
|
37880
37880
|
});
|
|
37881
37881
|
}
|
|
37882
|
-
Log.error(`LLM error, name: ${name} => `, e);
|
|
37883
37882
|
}
|
|
37884
37883
|
}
|
|
37885
37884
|
return Promise.reject(lastError ? lastError : new Error("No LLM available"));
|
|
@@ -38383,14 +38382,28 @@ async function callLLM(rlm, request, streamCallback, errorHandler, finishHandler
|
|
|
38383
38382
|
}
|
|
38384
38383
|
}
|
|
38385
38384
|
catch (e) {
|
|
38386
|
-
if (
|
|
38387
|
-
await sleep(200 * (retryNum + 1) * (retryNum + 1));
|
|
38385
|
+
if (e instanceof Error && e.name === "AbortError") {
|
|
38388
38386
|
if (errorHandler) {
|
|
38389
38387
|
await errorHandler(request, e, retryNum);
|
|
38390
38388
|
}
|
|
38391
|
-
|
|
38389
|
+
Log.warn("callLLM abort error: ", e);
|
|
38390
|
+
return streamText
|
|
38391
|
+
? [
|
|
38392
|
+
{ type: "text", text: streamText },
|
|
38393
|
+
...toolParts,
|
|
38394
|
+
]
|
|
38395
|
+
: toolParts;
|
|
38396
|
+
}
|
|
38397
|
+
else {
|
|
38398
|
+
if (retryNum < config$1.maxRetryNum) {
|
|
38399
|
+
await sleep(200 * (retryNum + 1) * (retryNum + 1));
|
|
38400
|
+
if (errorHandler) {
|
|
38401
|
+
await errorHandler(request, e, retryNum);
|
|
38402
|
+
}
|
|
38403
|
+
return callLLM(rlm, request, streamCallback, errorHandler, finishHandler, ++retryNum);
|
|
38404
|
+
}
|
|
38405
|
+
throw e;
|
|
38392
38406
|
}
|
|
38393
|
-
throw e;
|
|
38394
38407
|
}
|
|
38395
38408
|
finally {
|
|
38396
38409
|
reader && reader.releaseLock();
|