fss-link 1.2.15 → 1.2.17

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (2) hide show
  1. package/bundle/fss-link.js +19 -12
  2. package/package.json +1 -1
@@ -19872,11 +19872,7 @@ var init_openaiContentGenerator = __esm({
19872
19872
  if (this.contentGeneratorConfig.authType === "ollama") {
19873
19873
  return true;
19874
19874
  }
19875
- const baseUrl = this.contentGeneratorConfig.baseUrl;
19876
- if (!baseUrl)
19877
- return false;
19878
- return baseUrl.includes("localhost:11434") || baseUrl.includes("127.0.0.1:11434") || baseUrl.includes(":11434/v1") || baseUrl.includes(":11435/v1") || // Support proxy port
19879
- baseUrl.includes("/ollama");
19875
+ return false;
19880
19876
  }
19881
19877
  /**
19882
19878
  * Determine if this is a custom OpenAI-compatible endpoint (proxy, local server, etc.)
@@ -22347,7 +22343,7 @@ async function createContentGeneratorConfig(config, authType) {
22347
22343
  async function createContentGenerator(config, gcConfig, sessionId2) {
22348
22344
  if (DEBUG_CONTENT)
22349
22345
  console.log(`\u{1F41B} DEBUG createContentGenerator: authType=${config.authType}, apiKey=${config.apiKey}, baseUrl=${config.baseUrl}`);
22350
- const version = "1.2.15";
22346
+ const version = "1.2.17";
22351
22347
  const userAgent = `FSS-Link/${version} (${process.platform}; ${process.arch})`;
22352
22348
  const baseHeaders = {
22353
22349
  "User-Agent": userAgent
@@ -68985,7 +68981,7 @@ var init_turn = __esm({
68985
68981
  /*curated*/
68986
68982
  true
68987
68983
  ), req];
68988
- await reportError(error, "Error when talking to Gemini API", contextForReport, "Turn.run-sendMessageStream");
68984
+ await reportError(error, "Error when talking to LLM API", contextForReport, "Turn.run-sendMessageStream");
68989
68985
  const status = typeof error === "object" && error !== null && "status" in error && typeof error.status === "number" ? error.status : void 0;
68990
68986
  const structuredError = {
68991
68987
  message: getErrorMessage(error),
@@ -86148,15 +86144,20 @@ var init_modelManager = __esm({
86148
86144
  /**
86149
86145
  * Normalize base URLs for OpenAI-compatible endpoints
86150
86146
  * Inspired by BobAI proxy's excellent URL normalization
86147
+ *
86148
+ * CRITICAL: Only strip /v1 for OpenAI provider where SDK adds it automatically.
86149
+ * For LM Studio and Ollama, /v1 is part of their actual API path.
86151
86150
  */
86152
86151
  normalizeEndpointUrl(url2, authType) {
86153
86152
  if (!url2) return void 0;
86154
86153
  let normalized2 = url2.trim();
86155
- if (normalized2.endsWith("/v1")) {
86156
- if (DEBUG_MODEL) console.log(`\u{1F527} [URL-NORMALIZE] Stripping /v1 suffix from: ${normalized2}`);
86154
+ if (normalized2.endsWith("/v1") && authType === AuthType.USE_OPENAI) {
86155
+ if (DEBUG_MODEL) console.log(`\u{1F527} [URL-NORMALIZE] Stripping /v1 suffix from OpenAI endpoint: ${normalized2}`);
86157
86156
  normalized2 = normalized2.slice(0, -3);
86158
86157
  }
86159
- normalized2 = normalized2.replace(/\/+$/, "");
86158
+ if (!normalized2.endsWith("/v1")) {
86159
+ normalized2 = normalized2.replace(/\/+$/, "");
86160
+ }
86160
86161
  if (!normalized2.startsWith("http://localhost") && !normalized2.startsWith("http://127.0.0.1") && normalized2.startsWith("http://")) {
86161
86162
  if (DEBUG_MODEL) console.log(`\u{1F527} [URL-NORMALIZE] Upgrading HTTP to HTTPS: ${normalized2}`);
86162
86163
  normalized2 = normalized2.replace("http://", "https://");
@@ -95414,7 +95415,7 @@ async function getPackageJson() {
95414
95415
  // packages/cli/src/utils/version.ts
95415
95416
  async function getCliVersion() {
95416
95417
  const pkgJson = await getPackageJson();
95417
- return "1.2.15";
95418
+ return "1.2.17";
95418
95419
  }
95419
95420
 
95420
95421
  // packages/cli/src/ui/commands/aboutCommand.ts
@@ -95466,7 +95467,7 @@ import open4 from "open";
95466
95467
  import process11 from "node:process";
95467
95468
 
95468
95469
  // packages/cli/src/generated/git-commit.ts
95469
- var GIT_COMMIT_INFO = "60f96849";
95470
+ var GIT_COMMIT_INFO = "6daf471c";
95470
95471
 
95471
95472
  // packages/cli/src/ui/commands/bugCommand.ts
95472
95473
  init_dist2();
@@ -127920,6 +127921,7 @@ async function runNonInteractive(config, input, prompt_id) {
127920
127921
  abortController.signal,
127921
127922
  prompt_id
127922
127923
  );
127924
+ let hasFinishedEvent = false;
127923
127925
  for await (const event of responseStream) {
127924
127926
  if (abortController.signal.aborted) {
127925
127927
  console.error("Operation cancelled.");
@@ -127935,6 +127937,8 @@ async function runNonInteractive(config, input, prompt_id) {
127935
127937
  id: toolCallRequest.callId
127936
127938
  };
127937
127939
  functionCalls.push(fc);
127940
+ } else if (event.type === GeminiEventType.Finished) {
127941
+ hasFinishedEvent = true;
127938
127942
  }
127939
127943
  }
127940
127944
  if (functionCalls.length > 0) {
@@ -127970,6 +127974,9 @@ async function runNonInteractive(config, input, prompt_id) {
127970
127974
  }
127971
127975
  }
127972
127976
  currentMessages = [{ role: "user", parts: toolResponseParts }];
127977
+ } else if (hasFinishedEvent) {
127978
+ process.stdout.write("\n");
127979
+ return;
127973
127980
  } else {
127974
127981
  process.stdout.write("\n");
127975
127982
  return;
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "fss-link",
3
- "version": "1.2.15",
3
+ "version": "1.2.17",
4
4
  "engines": {
5
5
  "node": ">=20.0.0"
6
6
  },