@marizmelo/llm-cli 0.0.5 → 0.0.6

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (2) hide show
  1. package/bundle/gemini.js +43 -19
  2. package/package.json +1 -1
package/bundle/gemini.js CHANGED
@@ -139658,8 +139658,8 @@ var GIT_COMMIT_INFO, CLI_VERSION;
139658
139658
  var init_git_commit = __esm({
139659
139659
  "packages/core/dist/src/generated/git-commit.js"() {
139660
139660
  "use strict";
139661
- GIT_COMMIT_INFO = "65cf1981";
139662
- CLI_VERSION = "0.0.5";
139661
+ GIT_COMMIT_INFO = "4e5c77fc";
139662
+ CLI_VERSION = "0.0.6";
139663
139663
  }
139664
139664
  });
139665
139665
 
@@ -141926,13 +141926,14 @@ var init_ollama_provider = __esm({
141926
141926
  name = "ollama";
141927
141927
  baseUrl;
141928
141928
  model;
141929
+ toolsSupported = true;
141929
141930
  constructor(config, gcConfig) {
141930
141931
  this.baseUrl = config.baseUrl || "http://localhost:11434";
141931
141932
  this.model = config.model;
141932
141933
  }
141933
141934
  async generateContent(request3, userPromptId) {
141934
141935
  const messages = this.convertToOllamaMessages(request3);
141935
- const tools = this.convertToOllamaTools(request3);
141936
+ const tools = this.toolsSupported ? this.convertToOllamaTools(request3) : [];
141936
141937
  const ollamaRequest = {
141937
141938
  model: this.model,
141938
141939
  messages,
@@ -141942,13 +141943,15 @@ var init_ollama_provider = __esm({
141942
141943
  };
141943
141944
  const response = await fetch(`${this.baseUrl}/api/chat`, {
141944
141945
  method: "POST",
141945
- headers: {
141946
- "Content-Type": "application/json"
141947
- },
141946
+ headers: { "Content-Type": "application/json" },
141948
141947
  body: JSON.stringify(ollamaRequest)
141949
141948
  });
141950
141949
  if (!response.ok) {
141951
141950
  const body = await response.text().catch(() => "");
141951
+ if (this.isToolsNotSupportedError(body) && tools.length > 0) {
141952
+ this.toolsSupported = false;
141953
+ return this.generateContent(request3, userPromptId);
141954
+ }
141952
141955
  throw new Error(`Ollama API error: ${response.status} ${response.statusText} - ${body}`);
141953
141956
  }
141954
141957
  const ollamaResponse = await response.json();
@@ -141956,7 +141959,7 @@ var init_ollama_provider = __esm({
141956
141959
  }
141957
141960
  async generateContentStream(request3, userPromptId) {
141958
141961
  const messages = this.convertToOllamaMessages(request3);
141959
- const tools = this.convertToOllamaTools(request3);
141962
+ const tools = this.toolsSupported ? this.convertToOllamaTools(request3) : [];
141960
141963
  const ollamaRequest = {
141961
141964
  model: this.model,
141962
141965
  messages,
@@ -141966,13 +141969,15 @@ var init_ollama_provider = __esm({
141966
141969
  };
141967
141970
  const response = await fetch(`${this.baseUrl}/api/chat`, {
141968
141971
  method: "POST",
141969
- headers: {
141970
- "Content-Type": "application/json"
141971
- },
141972
+ headers: { "Content-Type": "application/json" },
141972
141973
  body: JSON.stringify(ollamaRequest)
141973
141974
  });
141974
141975
  if (!response.ok) {
141975
141976
  const body = await response.text().catch(() => "");
141977
+ if (this.isToolsNotSupportedError(body) && tools.length > 0) {
141978
+ this.toolsSupported = false;
141979
+ return this.generateContentStream(request3, userPromptId);
141980
+ }
141976
141981
  throw new Error(`Ollama API error: ${response.status} ${response.statusText} - ${body}`);
141977
141982
  }
141978
141983
  const reader = response.body?.getReader();
@@ -142021,6 +142026,24 @@ var init_ollama_provider = __esm({
142021
142026
  validateConfig(config) {
142022
142027
  return config.provider === "ollama" && !!config.model;
142023
142028
  }
142029
+ isToolsNotSupportedError(body) {
142030
+ const lower = body.toLowerCase();
142031
+ return lower.includes("does not support tools") || lower.includes("tool use is not supported");
142032
+ }
142033
+ parseToolArgs(args) {
142034
+ if (args === null || args === void 0)
142035
+ return {};
142036
+ if (typeof args === "object" && !Array.isArray(args))
142037
+ return args;
142038
+ if (typeof args === "string") {
142039
+ try {
142040
+ return JSON.parse(args);
142041
+ } catch {
142042
+ return {};
142043
+ }
142044
+ }
142045
+ return {};
142046
+ }
142024
142047
  buildOptions(request3) {
142025
142048
  const options2 = {};
142026
142049
  const temp = request3.generationConfig?.temperature ?? request3.config?.temperature;
@@ -142114,12 +142137,11 @@ var init_ollama_provider = __esm({
142114
142137
  }
142115
142138
  if (message?.tool_calls) {
142116
142139
  for (const toolCall of message.tool_calls) {
142117
- if (toolCall.function) {
142118
- const args = typeof toolCall.function.arguments === "string" ? JSON.parse(toolCall.function.arguments) : toolCall.function.arguments || {};
142140
+ if (toolCall.function?.name) {
142119
142141
  parts.push({
142120
142142
  functionCall: {
142121
142143
  name: toolCall.function.name,
142122
- args
142144
+ args: this.parseToolArgs(toolCall.function.arguments)
142123
142145
  }
142124
142146
  });
142125
142147
  }
@@ -142173,10 +142195,12 @@ var init_ollama_provider = __esm({
142173
142195
  }
142174
142196
  if (chunk.message?.tool_calls) {
142175
142197
  for (const toolCall of chunk.message.tool_calls) {
142176
- if (toolCall.function) {
142177
- const args = typeof toolCall.function.arguments === "string" ? JSON.parse(toolCall.function.arguments) : toolCall.function.arguments || {};
142198
+ if (toolCall.function?.name) {
142178
142199
  accumulatedToolCalls.push({
142179
- function: { name: toolCall.function.name, arguments: args }
142200
+ function: {
142201
+ name: toolCall.function.name,
142202
+ arguments: this.parseToolArgs(toolCall.function.arguments)
142203
+ }
142180
142204
  });
142181
142205
  }
142182
142206
  }
@@ -142372,7 +142396,7 @@ function createContentGeneratorConfig(config, authType) {
142372
142396
  return contentGeneratorConfig;
142373
142397
  }
142374
142398
  async function createContentGenerator(config, gcConfig, sessionId2) {
142375
- const version2 = "0.0.5";
142399
+ const version2 = "0.0.6";
142376
142400
  const userAgent2 = `GeminiCLI/${version2} (${process.platform}; ${process.arch})`;
142377
142401
  const baseHeaders = {
142378
142402
  "User-Agent": userAgent2
@@ -274734,7 +274758,7 @@ async function getPackageJson() {
274734
274758
  // packages/cli/src/utils/version.ts
274735
274759
  async function getCliVersion() {
274736
274760
  const pkgJson = await getPackageJson();
274737
- return "0.0.5";
274761
+ return "0.0.6";
274738
274762
  }
274739
274763
 
274740
274764
  // packages/cli/src/ui/commands/aboutCommand.ts
@@ -274786,7 +274810,7 @@ init_open();
274786
274810
  import process30 from "node:process";
274787
274811
 
274788
274812
  // packages/cli/src/generated/git-commit.ts
274789
- var GIT_COMMIT_INFO2 = "37f235d7";
274813
+ var GIT_COMMIT_INFO2 = "4e5c77fc";
274790
274814
 
274791
274815
  // packages/cli/src/ui/commands/bugCommand.ts
274792
274816
  init_dist3();
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@marizmelo/llm-cli",
3
- "version": "0.0.5",
3
+ "version": "0.0.6",
4
4
  "engines": {
5
5
  "node": ">=20.0.0"
6
6
  },