@jaydennleemc/qwen-code-local 0.12.3-beta2 → 0.12.6

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (2) hide show
  1. package/dist/cli.js +92 -17
  2. package/package.json +2 -2
package/dist/cli.js CHANGED
@@ -142684,11 +142684,12 @@ var init_default = __esm({
142684
142684
  return customHeaders ? { ...defaultHeaders, ...customHeaders } : defaultHeaders;
142685
142685
  }
142686
142686
  buildClient() {
142687
- const { apiKey, baseUrl, timeout: timeout2 = DEFAULT_TIMEOUT, maxRetries = DEFAULT_MAX_RETRIES } = this.contentGeneratorConfig;
142687
+ const { apiKey, baseUrl, timeout: timeout2 = DEFAULT_TIMEOUT, maxRetries = DEFAULT_MAX_RETRIES, authType } = this.contentGeneratorConfig;
142688
142688
  const defaultHeaders = this.buildHeaders();
142689
142689
  const runtimeOptions = buildRuntimeFetchOptions("openai", this.cliConfig.getProxy());
142690
+ const isLocalProvider = authType === "ollama" || authType === "lm-studio";
142690
142691
  return new OpenAI({
142691
- apiKey,
142692
+ apiKey: isLocalProvider ? "" : apiKey,
142692
142693
  baseURL: baseUrl,
142693
142694
  timeout: timeout2,
142694
142695
  maxRetries,
@@ -157729,7 +157730,7 @@ __export(geminiContentGenerator_exports, {
157729
157730
  createGeminiContentGenerator: () => createGeminiContentGenerator
157730
157731
  });
157731
157732
  function createGeminiContentGenerator(config2, gcConfig) {
157732
- const version2 = "0.12.3-beta2";
157733
+ const version2 = "0.12.6";
157733
157734
  const userAgent2 = config2.userAgent || `QwenCode/${version2} (${process.platform}; ${process.arch})`;
157734
157735
  const baseHeaders = {
157735
157736
  "User-Agent": userAgent2
@@ -157807,7 +157808,8 @@ function validateModelConfig(config2, isStrictModelProvider = false) {
157807
157808
  if (config2.authType === AuthType2.QWEN_OAUTH) {
157808
157809
  return { valid: true, errors: [] };
157809
157810
  }
157810
- if (!config2.apiKey) {
157811
+ const isLocalModelProvider = config2.authType === AuthType2.USE_OLLAMA || config2.authType === AuthType2.USE_LM_STUDIO;
157812
+ if (!config2.apiKey && !isLocalModelProvider) {
157811
157813
  if (isStrictModelProvider) {
157812
157814
  errors.push(new StrictMissingCredentialsError(config2.authType, config2.model, config2.apiKeyEnvKey));
157813
157815
  } else {
@@ -389669,7 +389671,7 @@ __name(getPackageJson, "getPackageJson");
389669
389671
  // packages/cli/src/utils/version.ts
389670
389672
  async function getCliVersion() {
389671
389673
  const pkgJson = await getPackageJson();
389672
- return "0.12.3-beta2";
389674
+ return "0.12.6";
389673
389675
  }
389674
389676
  __name(getCliVersion, "getCliVersion");
389675
389677
 
@@ -397233,7 +397235,7 @@ var formatDuration = /* @__PURE__ */ __name((milliseconds) => {
397233
397235
 
397234
397236
  // packages/cli/src/generated/git-commit.ts
397235
397237
  init_esbuild_shims();
397236
- var GIT_COMMIT_INFO = "927ad2e0";
397238
+ var GIT_COMMIT_INFO = "56e98fc24";
397237
397239
 
397238
397240
  // packages/cli/src/utils/systemInfo.ts
397239
397241
  async function getNpmVersion() {
@@ -435868,7 +435870,12 @@ function AuthDialog() {
435868
435870
  "http://localhost:11434/v1"
435869
435871
  );
435870
435872
  const [ollamaApiKey, setOllamaApiKey] = (0, import_react83.useState)("");
435871
- const [ollamaStep, setOllamaStep] = (0, import_react83.useState)("baseUrl");
435873
+ const [ollamaStep, setOllamaStep] = (0, import_react83.useState)(
435874
+ "baseUrl"
435875
+ );
435876
+ const [ollamaModels, setOllamaModels] = (0, import_react83.useState)([]);
435877
+ const [loadingOllamaModels, setLoadingOllamaModels] = (0, import_react83.useState)(false);
435878
+ const [selectedOllamaModel, setSelectedOllamaModel] = (0, import_react83.useState)("");
435872
435879
  const mainItems = [
435873
435880
  {
435874
435881
  key: AuthType2.QWEN_OAUTH,
@@ -436035,19 +436042,46 @@ function AuthDialog() {
436035
436042
  }, "handleLmStudioSubmit");
436036
436043
  const handleOllamaSubmit = /* @__PURE__ */ __name(async () => {
436037
436044
  setErrorMessage(null);
436038
- if (!ollamaApiKey.trim()) {
436039
- setErrorMessage(t4("API key cannot be empty."));
436040
- return;
436045
+ setLoadingOllamaModels(true);
436046
+ try {
436047
+ const url2 = `${ollamaBaseUrl.replace("/v1", "")}/api/tags`;
436048
+ const response = await fetch(url2, {
436049
+ method: "GET",
436050
+ headers: ollamaApiKey ? { Authorization: `Bearer ${ollamaApiKey}` } : {}
436051
+ });
436052
+ if (!response.ok) {
436053
+ throw new Error(`Failed to fetch models: ${response.status}`);
436054
+ }
436055
+ const data = await response.json();
436056
+ const models = (data.models || []).map((m3) => m3.name);
436057
+ if (models.length === 0) {
436058
+ setErrorMessage(t4("No models found on Ollama server"));
436059
+ return;
436060
+ }
436061
+ setOllamaModels(models);
436062
+ setSelectedOllamaModel(models[0]);
436063
+ setOllamaStep("models");
436064
+ } catch (err) {
436065
+ setErrorMessage(
436066
+ t4("Failed to connect to Ollama: {{error}}", {
436067
+ error: err instanceof Error ? err.message : "Unknown error"
436068
+ })
436069
+ );
436070
+ } finally {
436071
+ setLoadingOllamaModels(false);
436041
436072
  }
436073
+ }, "handleOllamaSubmit");
436074
+ const handleOllamaModelSelect = /* @__PURE__ */ __name(async () => {
436042
436075
  await onAuthSelect(AuthType2.USE_OLLAMA, {
436043
436076
  apiKey: ollamaApiKey,
436044
- baseUrl: ollamaBaseUrl
436077
+ baseUrl: ollamaBaseUrl,
436078
+ model: selectedOllamaModel
436045
436079
  });
436046
- }, "handleOllamaSubmit");
436080
+ }, "handleOllamaModelSelect");
436047
436081
  const handleGoBack = /* @__PURE__ */ __name(() => {
436048
436082
  setErrorMessage(null);
436049
436083
  onAuthError(null);
436050
- if (viewLevel === "region-select" || viewLevel === "custom-info" || viewLevel === "lm-studio-input" || viewLevel === "ollama-input") {
436084
+ if (viewLevel === "region-select" || viewLevel === "custom-info" || viewLevel === "lm-studio-input" || viewLevel === "ollama-input" || viewLevel === "ollama-models") {
436051
436085
  setViewLevel("main");
436052
436086
  setLmStudioStep("baseUrl");
436053
436087
  setOllamaStep("baseUrl");
@@ -436075,7 +436109,9 @@ function AuthDialog() {
436075
436109
  return;
436076
436110
  }
436077
436111
  if (viewLevel === "ollama-input") {
436078
- if (ollamaStep === "apiKey") {
436112
+ if (ollamaStep === "models") {
436113
+ setOllamaStep("apiKey");
436114
+ } else if (ollamaStep === "apiKey") {
436079
436115
  setOllamaStep("baseUrl");
436080
436116
  } else {
436081
436117
  handleGoBack();
@@ -436209,7 +436245,35 @@ function AuthDialog() {
436209
436245
  }
436210
436246
  ) }),
436211
436247
  errorMessage && /* @__PURE__ */ (0, import_jsx_runtime76.jsx)(Box_default, { marginTop: 1, children: /* @__PURE__ */ (0, import_jsx_runtime76.jsx)(Text3, { color: theme.status.error, children: errorMessage }) }),
436212
- /* @__PURE__ */ (0, import_jsx_runtime76.jsx)(Box_default, { marginTop: 1, children: /* @__PURE__ */ (0, import_jsx_runtime76.jsx)(Text3, { color: theme.text.secondary, children: t4("\u2191\u2193 to navigate, Enter to submit, Esc to go back") }) })
436248
+ /* @__PURE__ */ (0, import_jsx_runtime76.jsx)(Box_default, { marginTop: 1, children: /* @__PURE__ */ (0, import_jsx_runtime76.jsx)(Text3, { color: theme.text.secondary, children: t4("\u2191\u2193 to navigate, Enter to fetch models, Esc to go back") }) })
436249
+ ] }),
436250
+ ollamaStep === "models" && /* @__PURE__ */ (0, import_jsx_runtime76.jsxs)(import_jsx_runtime76.Fragment, { children: [
436251
+ /* @__PURE__ */ (0, import_jsx_runtime76.jsx)(Box_default, { marginTop: 1, children: /* @__PURE__ */ (0, import_jsx_runtime76.jsxs)(Text3, { color: theme.text.secondary, children: [
436252
+ t4("Server"),
436253
+ ":",
436254
+ " ",
436255
+ /* @__PURE__ */ (0, import_jsx_runtime76.jsx)(Text3, { color: theme.text.primary, children: ollamaBaseUrl })
436256
+ ] }) }),
436257
+ /* @__PURE__ */ (0, import_jsx_runtime76.jsx)(Box_default, { marginTop: 1, children: /* @__PURE__ */ (0, import_jsx_runtime76.jsx)(Text3, { color: theme.text.secondary, children: t4("Select Model:") }) }),
436258
+ /* @__PURE__ */ (0, import_jsx_runtime76.jsx)(Box_default, { marginTop: 0, children: loadingOllamaModels ? /* @__PURE__ */ (0, import_jsx_runtime76.jsx)(Text3, { color: theme.text.secondary, children: t4("Loading models...") }) : /* @__PURE__ */ (0, import_jsx_runtime76.jsx)(
436259
+ DescriptiveRadioButtonSelect,
436260
+ {
436261
+ items: ollamaModels.map((m3) => ({
436262
+ key: m3,
436263
+ title: m3,
436264
+ description: "",
436265
+ value: m3
436266
+ })),
436267
+ initialIndex: ollamaModels.indexOf(selectedOllamaModel),
436268
+ onSelect: (val) => {
436269
+ setSelectedOllamaModel(val);
436270
+ handleOllamaModelSelect();
436271
+ },
436272
+ maxItemsToShow: 5
436273
+ }
436274
+ ) }),
436275
+ errorMessage && /* @__PURE__ */ (0, import_jsx_runtime76.jsx)(Box_default, { marginTop: 1, children: /* @__PURE__ */ (0, import_jsx_runtime76.jsx)(Text3, { color: theme.status.error, children: errorMessage }) }),
436276
+ /* @__PURE__ */ (0, import_jsx_runtime76.jsx)(Box_default, { marginTop: 1, children: /* @__PURE__ */ (0, import_jsx_runtime76.jsx)(Text3, { color: theme.text.secondary, children: t4("\u2191\u2193 to navigate, Enter to select, Esc to go back") }) })
436213
436277
  ] })
436214
436278
  ] }), "renderOllamaInputView");
436215
436279
  const getViewTitle = /* @__PURE__ */ __name(() => {
@@ -443283,7 +443347,7 @@ var useAuthCommand = /* @__PURE__ */ __name((settings, config2, addItem, onAuthC
443283
443347
  setAuthError(null);
443284
443348
  setIsAuthDialogOpen(false);
443285
443349
  setIsAuthenticating(true);
443286
- if (authType === AuthType2.USE_OPENAI || authType === AuthType2.USE_LM_STUDIO) {
443350
+ if (authType === AuthType2.USE_OPENAI || authType === AuthType2.USE_LM_STUDIO || authType === AuthType2.USE_OLLAMA) {
443287
443351
  if (credentials) {
443288
443352
  const settingsGenerationConfig = settings.merged.model?.generationConfig;
443289
443353
  config2.updateCredentials(
@@ -443294,6 +443358,17 @@ var useAuthCommand = /* @__PURE__ */ __name((settings, config2, addItem, onAuthC
443294
443358
  },
443295
443359
  settingsGenerationConfig
443296
443360
  );
443361
+ if (authType === AuthType2.USE_OLLAMA && credentials.model) {
443362
+ const authTypeScope = getPersistScopeForModelSelection(settings);
443363
+ settings.setValue(authTypeScope, "security.auth", {
443364
+ selectedType: authType
443365
+ });
443366
+ settings.setValue(authTypeScope, "model.name", credentials.model);
443367
+ updateSettingsFilePreservingFormat(
443368
+ settings.user.path,
443369
+ settings.user.originalSettings
443370
+ );
443371
+ }
443297
443372
  await performAuth(authType, credentials);
443298
443373
  }
443299
443374
  return;
@@ -456129,7 +456204,7 @@ var QwenAgent = class {
456129
456204
  async initialize(args) {
456130
456205
  this.clientCapabilities = args.clientCapabilities;
456131
456206
  const authMethods = buildAuthMethods();
456132
- const version2 = "0.12.3-beta2";
456207
+ const version2 = "0.12.6";
456133
456208
  return {
456134
456209
  protocolVersion: PROTOCOL_VERSION,
456135
456210
  agentInfo: {
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@jaydennleemc/qwen-code-local",
3
- "version": "0.12.3-beta2",
3
+ "version": "0.12.6",
4
4
  "engines": {
5
5
  "node": ">=20.0.0"
6
6
  },
@@ -13,7 +13,7 @@
13
13
  "url": "git+https://github.com/jaydennleemc/qwen-code.git"
14
14
  },
15
15
  "config": {
16
- "sandboxImageUri": "ghcr.io/qwenlm/qwen-code:0.12.3"
16
+ "sandboxImageUri": "ghcr.io/qwenlm/qwen-code:0.12.6"
17
17
  },
18
18
  "scripts": {
19
19
  "start": "cross-env node scripts/start.js",