@marizmelo/llm-cli 0.0.4 → 0.0.5
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/bundle/gemini.js +24 -17
- package/package.json +1 -1
package/bundle/gemini.js
CHANGED
|
@@ -139658,8 +139658,8 @@ var GIT_COMMIT_INFO, CLI_VERSION;
|
|
|
139658
139658
|
var init_git_commit = __esm({
|
|
139659
139659
|
"packages/core/dist/src/generated/git-commit.js"() {
|
|
139660
139660
|
"use strict";
|
|
139661
|
-
GIT_COMMIT_INFO = "
|
|
139662
|
-
CLI_VERSION = "0.0.
|
|
139661
|
+
GIT_COMMIT_INFO = "65cf1981";
|
|
139662
|
+
CLI_VERSION = "0.0.5";
|
|
139663
139663
|
}
|
|
139664
139664
|
});
|
|
139665
139665
|
|
|
@@ -141938,11 +141938,7 @@ var init_ollama_provider = __esm({
|
|
|
141938
141938
|
messages,
|
|
141939
141939
|
stream: false,
|
|
141940
141940
|
...tools.length > 0 ? { tools } : {},
|
|
141941
|
-
options:
|
|
141942
|
-
temperature: request3.generationConfig?.temperature ?? request3.config?.temperature ?? 0,
|
|
141943
|
-
top_p: request3.generationConfig?.topP ?? request3.config?.topP ?? 1,
|
|
141944
|
-
num_predict: request3.generationConfig?.maxOutputTokens ?? request3.config?.maxOutputTokens
|
|
141945
|
-
}
|
|
141941
|
+
options: this.buildOptions(request3)
|
|
141946
141942
|
};
|
|
141947
141943
|
const response = await fetch(`${this.baseUrl}/api/chat`, {
|
|
141948
141944
|
method: "POST",
|
|
@@ -141952,7 +141948,8 @@ var init_ollama_provider = __esm({
|
|
|
141952
141948
|
body: JSON.stringify(ollamaRequest)
|
|
141953
141949
|
});
|
|
141954
141950
|
if (!response.ok) {
|
|
141955
|
-
|
|
141951
|
+
const body = await response.text().catch(() => "");
|
|
141952
|
+
throw new Error(`Ollama API error: ${response.status} ${response.statusText} - ${body}`);
|
|
141956
141953
|
}
|
|
141957
141954
|
const ollamaResponse = await response.json();
|
|
141958
141955
|
return this.convertFromOllamaChatResponse(ollamaResponse);
|
|
@@ -141965,11 +141962,7 @@ var init_ollama_provider = __esm({
|
|
|
141965
141962
|
messages,
|
|
141966
141963
|
stream: true,
|
|
141967
141964
|
...tools.length > 0 ? { tools } : {},
|
|
141968
|
-
options:
|
|
141969
|
-
temperature: request3.generationConfig?.temperature ?? request3.config?.temperature ?? 0,
|
|
141970
|
-
top_p: request3.generationConfig?.topP ?? request3.config?.topP ?? 1,
|
|
141971
|
-
num_predict: request3.generationConfig?.maxOutputTokens ?? request3.config?.maxOutputTokens
|
|
141972
|
-
}
|
|
141965
|
+
options: this.buildOptions(request3)
|
|
141973
141966
|
};
|
|
141974
141967
|
const response = await fetch(`${this.baseUrl}/api/chat`, {
|
|
141975
141968
|
method: "POST",
|
|
@@ -141979,7 +141972,8 @@ var init_ollama_provider = __esm({
|
|
|
141979
141972
|
body: JSON.stringify(ollamaRequest)
|
|
141980
141973
|
});
|
|
141981
141974
|
if (!response.ok) {
|
|
141982
|
-
|
|
141975
|
+
const body = await response.text().catch(() => "");
|
|
141976
|
+
throw new Error(`Ollama API error: ${response.status} ${response.statusText} - ${body}`);
|
|
141983
141977
|
}
|
|
141984
141978
|
const reader = response.body?.getReader();
|
|
141985
141979
|
if (!reader) {
|
|
@@ -142027,6 +142021,19 @@ var init_ollama_provider = __esm({
|
|
|
142027
142021
|
validateConfig(config) {
|
|
142028
142022
|
return config.provider === "ollama" && !!config.model;
|
|
142029
142023
|
}
|
|
142024
|
+
buildOptions(request3) {
|
|
142025
|
+
const options2 = {};
|
|
142026
|
+
const temp = request3.generationConfig?.temperature ?? request3.config?.temperature;
|
|
142027
|
+
if (temp !== void 0)
|
|
142028
|
+
options2.temperature = temp;
|
|
142029
|
+
const topP = request3.generationConfig?.topP ?? request3.config?.topP;
|
|
142030
|
+
if (topP !== void 0)
|
|
142031
|
+
options2.top_p = topP;
|
|
142032
|
+
const maxTokens = request3.generationConfig?.maxOutputTokens ?? request3.config?.maxOutputTokens;
|
|
142033
|
+
if (maxTokens !== void 0)
|
|
142034
|
+
options2.num_predict = maxTokens;
|
|
142035
|
+
return options2;
|
|
142036
|
+
}
|
|
142030
142037
|
convertToOllamaMessages(request3) {
|
|
142031
142038
|
const messages = [];
|
|
142032
142039
|
if (request3.systemInstruction) {
|
|
@@ -142365,7 +142372,7 @@ function createContentGeneratorConfig(config, authType) {
|
|
|
142365
142372
|
return contentGeneratorConfig;
|
|
142366
142373
|
}
|
|
142367
142374
|
async function createContentGenerator(config, gcConfig, sessionId2) {
|
|
142368
|
-
const version2 = "0.0.
|
|
142375
|
+
const version2 = "0.0.5";
|
|
142369
142376
|
const userAgent2 = `GeminiCLI/${version2} (${process.platform}; ${process.arch})`;
|
|
142370
142377
|
const baseHeaders = {
|
|
142371
142378
|
"User-Agent": userAgent2
|
|
@@ -274727,7 +274734,7 @@ async function getPackageJson() {
|
|
|
274727
274734
|
// packages/cli/src/utils/version.ts
|
|
274728
274735
|
async function getCliVersion() {
|
|
274729
274736
|
const pkgJson = await getPackageJson();
|
|
274730
|
-
return "0.0.
|
|
274737
|
+
return "0.0.5";
|
|
274731
274738
|
}
|
|
274732
274739
|
|
|
274733
274740
|
// packages/cli/src/ui/commands/aboutCommand.ts
|
|
@@ -274779,7 +274786,7 @@ init_open();
|
|
|
274779
274786
|
import process30 from "node:process";
|
|
274780
274787
|
|
|
274781
274788
|
// packages/cli/src/generated/git-commit.ts
|
|
274782
|
-
var GIT_COMMIT_INFO2 = "
|
|
274789
|
+
var GIT_COMMIT_INFO2 = "37f235d7";
|
|
274783
274790
|
|
|
274784
274791
|
// packages/cli/src/ui/commands/bugCommand.ts
|
|
274785
274792
|
init_dist3();
|