koishi-plugin-chatluna-google-gemini-adapter 1.3.10 → 1.3.12

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/lib/index.cjs CHANGED
@@ -345,17 +345,21 @@ function prepareModelConfig(params, pluginConfig) {
345
345
  } else if (thinkingBudget >= 0 && thinkingBudget < 128) {
346
346
  thinkingBudget = 128;
347
347
  }
348
- if (model.includes("-thinking") && model.includes("gemini-3.0")) {
348
+ if (model.includes("gemini-3")) {
349
349
  enabledThinking = true;
350
- const match = model.match(/-(low|medium|high)-thinking/);
351
- if (match) {
352
- thinkingLevel = match[1];
350
+ thinkingBudget = void 0;
351
+ const match = model.match(/-(low|medium|high|tiny)-thinking/);
352
+ if (match && match[1]) {
353
353
  model = model.replace(`-${match[1]}-thinking`, "");
354
+ }
355
+ if (match && match[1] !== "tiny") {
356
+ thinkingLevel = match[1];
357
+ } else if (match[1] === "tiny") {
358
+ thinkingLevel = void 0;
359
+ thinkingBudget = 128;
354
360
  } else {
355
361
  thinkingLevel = "THINKING_LEVEL_UNSPECIFIED";
356
- model = model.replace("-thinking", "");
357
362
  }
358
- thinkingBudget = void 0;
359
363
  } else {
360
364
  thinkingLevel = void 0;
361
365
  }
@@ -1020,6 +1024,9 @@ var GeminiClient = class extends import_client.PlatformModelAndEmbeddingsClient
1020
1024
  return logger;
1021
1025
  }
1022
1026
  async refreshModels(config) {
1027
+ const thinkingModel = ["gemini-2.5-pro", "gemini-2.5-flash"];
1028
+ const thinkingLevelModel = ["gemini-3-pro"];
1029
+ const imageResolutionModel = ["gemini-3-pro-image"];
1023
1030
  try {
1024
1031
  const rawModels = await this._requester.getModels(config);
1025
1032
  if (!rawModels.length) {
@@ -1030,6 +1037,7 @@ var GeminiClient = class extends import_client.PlatformModelAndEmbeddingsClient
1030
1037
  }
1031
1038
  const models = [];
1032
1039
  for (const model of rawModels) {
1040
+ const modelNameLower = model.name.toLowerCase();
1033
1041
  const info = {
1034
1042
  name: model.name,
1035
1043
  maxTokens: model.inputTokenLimit,
@@ -1039,11 +1047,16 @@ var GeminiClient = class extends import_client.PlatformModelAndEmbeddingsClient
1039
1047
  import_types2.ModelCapabilities.ToolCall
1040
1048
  ]
1041
1049
  };
1042
- const thinkingModel = ["gemini-2.5-pro", "gemini-2.5-flash"];
1043
- const thinkingLevelModel = ["gemini-3.0-pro"];
1044
- const imageResolutionModel = ["gemini-3.0-pro-image"];
1045
- if (thinkingModel.some(
1046
- (name2) => model.name.toLowerCase().includes(name2) && !model.name.toLowerCase().includes("image")
1050
+ if (imageResolutionModel.some(
1051
+ (name2) => modelNameLower.includes(name2)
1052
+ )) {
1053
+ models.push(
1054
+ { ...info, name: model.name + "-2K" },
1055
+ { ...info, name: model.name + "-4K" },
1056
+ info
1057
+ );
1058
+ } else if (thinkingModel.some(
1059
+ (name2) => modelNameLower.includes(name2) && !modelNameLower.includes("image")
1047
1060
  )) {
1048
1061
  if (!model.name.includes("-thinking")) {
1049
1062
  models.push(
@@ -1055,18 +1068,12 @@ var GeminiClient = class extends import_client.PlatformModelAndEmbeddingsClient
1055
1068
  models.push(info);
1056
1069
  }
1057
1070
  } else if (thinkingLevelModel.some(
1058
- (name2) => model.name.toLowerCase().includes(name2) && !model.name.toLowerCase().includes("image")
1071
+ (name2) => modelNameLower.includes(name2) && !modelNameLower.includes("image")
1059
1072
  )) {
1060
1073
  models.push(
1061
1074
  { ...info, name: model.name + "-low-thinking" },
1062
- info
1063
- );
1064
- } else if (imageResolutionModel.some(
1065
- (name2) => model.name.toLowerCase().includes(name2)
1066
- )) {
1067
- models.push(
1068
- { ...info, name: model.name + "-2K" },
1069
- { ...info, name: model.name + "-4K" },
1075
+ { ...info, name: model.name + "-high-thinking" },
1076
+ { ...info, name: model.name + "-tiny-thinking" },
1070
1077
  info
1071
1078
  );
1072
1079
  } else {
package/lib/index.mjs CHANGED
@@ -342,17 +342,21 @@ function prepareModelConfig(params, pluginConfig) {
342
342
  } else if (thinkingBudget >= 0 && thinkingBudget < 128) {
343
343
  thinkingBudget = 128;
344
344
  }
345
- if (model.includes("-thinking") && model.includes("gemini-3.0")) {
345
+ if (model.includes("gemini-3")) {
346
346
  enabledThinking = true;
347
- const match = model.match(/-(low|medium|high)-thinking/);
348
- if (match) {
349
- thinkingLevel = match[1];
347
+ thinkingBudget = void 0;
348
+ const match = model.match(/-(low|medium|high|tiny)-thinking/);
349
+ if (match && match[1]) {
350
350
  model = model.replace(`-${match[1]}-thinking`, "");
351
+ }
352
+ if (match && match[1] !== "tiny") {
353
+ thinkingLevel = match[1];
354
+ } else if (match[1] === "tiny") {
355
+ thinkingLevel = void 0;
356
+ thinkingBudget = 128;
351
357
  } else {
352
358
  thinkingLevel = "THINKING_LEVEL_UNSPECIFIED";
353
- model = model.replace("-thinking", "");
354
359
  }
355
- thinkingBudget = void 0;
356
360
  } else {
357
361
  thinkingLevel = void 0;
358
362
  }
@@ -1017,6 +1021,9 @@ var GeminiClient = class extends PlatformModelAndEmbeddingsClient {
1017
1021
  return logger;
1018
1022
  }
1019
1023
  async refreshModels(config) {
1024
+ const thinkingModel = ["gemini-2.5-pro", "gemini-2.5-flash"];
1025
+ const thinkingLevelModel = ["gemini-3-pro"];
1026
+ const imageResolutionModel = ["gemini-3-pro-image"];
1020
1027
  try {
1021
1028
  const rawModels = await this._requester.getModels(config);
1022
1029
  if (!rawModels.length) {
@@ -1027,6 +1034,7 @@ var GeminiClient = class extends PlatformModelAndEmbeddingsClient {
1027
1034
  }
1028
1035
  const models = [];
1029
1036
  for (const model of rawModels) {
1037
+ const modelNameLower = model.name.toLowerCase();
1030
1038
  const info = {
1031
1039
  name: model.name,
1032
1040
  maxTokens: model.inputTokenLimit,
@@ -1036,11 +1044,16 @@ var GeminiClient = class extends PlatformModelAndEmbeddingsClient {
1036
1044
  ModelCapabilities.ToolCall
1037
1045
  ]
1038
1046
  };
1039
- const thinkingModel = ["gemini-2.5-pro", "gemini-2.5-flash"];
1040
- const thinkingLevelModel = ["gemini-3.0-pro"];
1041
- const imageResolutionModel = ["gemini-3.0-pro-image"];
1042
- if (thinkingModel.some(
1043
- (name2) => model.name.toLowerCase().includes(name2) && !model.name.toLowerCase().includes("image")
1047
+ if (imageResolutionModel.some(
1048
+ (name2) => modelNameLower.includes(name2)
1049
+ )) {
1050
+ models.push(
1051
+ { ...info, name: model.name + "-2K" },
1052
+ { ...info, name: model.name + "-4K" },
1053
+ info
1054
+ );
1055
+ } else if (thinkingModel.some(
1056
+ (name2) => modelNameLower.includes(name2) && !modelNameLower.includes("image")
1044
1057
  )) {
1045
1058
  if (!model.name.includes("-thinking")) {
1046
1059
  models.push(
@@ -1052,18 +1065,12 @@ var GeminiClient = class extends PlatformModelAndEmbeddingsClient {
1052
1065
  models.push(info);
1053
1066
  }
1054
1067
  } else if (thinkingLevelModel.some(
1055
- (name2) => model.name.toLowerCase().includes(name2) && !model.name.toLowerCase().includes("image")
1068
+ (name2) => modelNameLower.includes(name2) && !modelNameLower.includes("image")
1056
1069
  )) {
1057
1070
  models.push(
1058
1071
  { ...info, name: model.name + "-low-thinking" },
1059
- info
1060
- );
1061
- } else if (imageResolutionModel.some(
1062
- (name2) => model.name.toLowerCase().includes(name2)
1063
- )) {
1064
- models.push(
1065
- { ...info, name: model.name + "-2K" },
1066
- { ...info, name: model.name + "-4K" },
1072
+ { ...info, name: model.name + "-high-thinking" },
1073
+ { ...info, name: model.name + "-tiny-thinking" },
1067
1074
  info
1068
1075
  );
1069
1076
  } else {
package/package.json CHANGED
@@ -1,7 +1,7 @@
1
1
  {
2
2
  "name": "koishi-plugin-chatluna-google-gemini-adapter",
3
3
  "description": "google-gemini adapter for chatluna",
4
- "version": "1.3.10",
4
+ "version": "1.3.12",
5
5
  "main": "lib/index.cjs",
6
6
  "module": "lib/index.mjs",
7
7
  "typings": "lib/index.d.ts",
@@ -63,7 +63,7 @@
63
63
  ],
64
64
  "dependencies": {
65
65
  "@anatine/zod-openapi": "^2.2.8",
66
- "@chatluna/v1-shared-adapter": "^1.0.17",
66
+ "@chatluna/v1-shared-adapter": "^1.0.20",
67
67
  "@langchain/core": "0.3.62",
68
68
  "openapi3-ts": "^4.5.0",
69
69
  "zod": "3.25.76",
@@ -75,7 +75,7 @@
75
75
  },
76
76
  "peerDependencies": {
77
77
  "koishi": "^4.18.9",
78
- "koishi-plugin-chatluna": "^1.3.1",
78
+ "koishi-plugin-chatluna": "^1.3.4",
79
79
  "koishi-plugin-chatluna-storage-service": "^0.0.11"
80
80
  },
81
81
  "peerDependenciesMeta": {