@dahawa/hawa-cli-analysis 1.0.6 → 1.0.7

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/.hcode/.env ADDED
File without changes
@@ -178,6 +178,16 @@ async function handel(request, reply, endpoint){
178
178
  return fetch(url, fetchOptions);
179
179
  }
180
180
 
181
+ if (request.body && request.body.model) {
182
+ // Filter out undefined/null values from models array
183
+ const validModels = models.filter(model => model != null && model !== '');
184
+
185
+ if (!validModels.includes(request.body.model)) {
186
+ logger.system.debug(`Model "${request.body.model}" not in allowed models, changing to "${process.env["MODEL"]}"`);
187
+ request.body.model = process.env["MODEL"];
188
+ }
189
+ }
190
+
181
191
  //console.log("请求地址: " + url);
182
192
  //转换前的请求
183
193
  let initBody = request.body;
@@ -175,8 +175,24 @@ async function handel(request, reply, endpoint){
175
175
 
176
176
  return fetch(url, fetchOptions);
177
177
  }
178
-
179
- //打印请求信息 request.body
178
+
179
+ const models = [process.env["MODEL"] , process.env["SMALL_FAST_MODEL"]]
180
+
181
+
182
+ if (request.body && request.body.model) {
183
+ // Filter out undefined/null values from models array
184
+ const validModels = models.filter(model => model != null && model !== '');
185
+
186
+ if (!validModels.includes(request.body.model)) {
187
+ logger.system.debug(`Model "${request.body.model}" not in allowed models, changing to "${process.env["MODEL"]}"`);
188
+ request.body.model = process.env["MODEL"];
189
+ }
190
+ }
191
+
192
+
193
+
194
+
195
+ //打印请求 //判断 request.body 中的模型如果模型,如果不属于 models 类型,修改为 process.env["MODEL"]信息 request.body
180
196
  let processedBody = JSON.stringify(request.body);
181
197
 
182
198
  logger.system.debug('请求 body' + processedBody);
@@ -332,7 +348,7 @@ const startServer = async () => {
332
348
  logger.system.debug(`✅ Server started on port ${port}`);
333
349
 
334
350
  // 输出端口信息到标准输出,供父进程读取
335
- console.log(`PROXY_PORT:${port}`);
351
+ logger.system.debug(`PROXY_PORT:${port}`);
336
352
  } catch (err) {
337
353
  fastify.log.error(err);
338
354
  process.exit(1);
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@dahawa/hawa-cli-analysis",
3
- "version": "1.0.6",
3
+ "version": "1.0.7",
4
4
  "main": "index",
5
5
  "bin": {
6
6
  "uclaude": "uclaude.js",
package/tests/test.js CHANGED
@@ -1,30 +1,30 @@
1
- import {} from "../clogger.js"
2
- import Anthropic from "@anthropic-ai/sdk";
3
-
4
-
5
- const anthropic = new Anthropic({
6
- apiKey: "sk-c1GF5uhjQpEcfqYZE3XvGf85XGpG7Rhj6E5829M3qoawzDzu", // 你的 key
7
- // 如果你走 DeepSeek 的 Anthropic 兼容端点,也加上 baseURL:
8
- baseURL: "https://api.moonshot.cn/anthropic",
9
- });
10
-
11
- const message = await anthropic.messages.create({
12
- model: "kimi-k2-0905-preview", // 用冒号
13
- max_tokens: 1000,
14
- system: "You are a helpful assistant.",
15
- messages: [
16
- {
17
- role: "user",
18
- content: [
19
- { type: "text", text: "Hi, how are you?" }
20
- ]
21
- }
22
- ]
23
- });
24
-
25
- console.log(message.content); // 用 console.log 而不是 print
26
-
27
-
28
-
29
-
30
-
1
+ import {} from "../clogger.js"
2
+ import Anthropic from "@anthropic-ai/sdk";
3
+
4
+
5
+ const anthropic = new Anthropic({
6
+ apiKey: "sk-c1GF5uhjQpEcfqYZE3XvGf85XGpG7Rhj6E5829M3qoawzDzu", // 你的 key
7
+ // 如果你走 DeepSeek 的 Anthropic 兼容端点,也加上 baseURL:
8
+ baseURL: "https://api.moonshot.cn/anthropic",
9
+ });
10
+
11
+ const message = await anthropic.messages.create({
12
+ model: "kimi-k2-0905-preview", // 用冒号
13
+ max_tokens: 1000,
14
+ system: "You are a helpful assistant.",
15
+ messages: [
16
+ {
17
+ role: "user",
18
+ content: [
19
+ { type: "text", text: "Hi, how are you?" }
20
+ ]
21
+ }
22
+ ]
23
+ });
24
+
25
+ console.log(message.content); // 用 console.log 而不是 print
26
+
27
+
28
+
29
+
30
+
package/uclaude.js CHANGED
@@ -14,7 +14,7 @@ import portManager from './port-manager.js';
14
14
  import { join } from 'path';
15
15
 
16
16
 
17
- const startServer = async (openai, base_url, port = null) => {
17
+ const startServer = async (openai, base_url, port = null ,MODEL , SMALL_FAST_MODEL) => {
18
18
  let dir = path.dirname(fileURLToPath(import.meta.url));
19
19
  const child = spawn('node ' + path.join(dir, "claude" , openai?"claude-openai-proxy.js":'claude-proxy.js'), [],{
20
20
  stdio: ['ignore', 'pipe', 'pipe'],
@@ -22,7 +22,9 @@ const startServer = async (openai, base_url, port = null) => {
22
22
  env:{
23
23
  ...process.env,
24
24
  BASE_URL: base_url,
25
- PROXY_PORT: port
25
+ PROXY_PORT: port,
26
+ MODEL:MODEL,
27
+ SMALL_FAST_MODEL:SMALL_FAST_MODEL
26
28
  }
27
29
  });
28
30
 
@@ -102,7 +104,7 @@ function start(){
102
104
  logger.error("无法获取可用端口,程序退出");
103
105
  process.exit(1);
104
106
  }
105
- startServer(true, BASE_URL, proxyPort);
107
+ startServer(true, BASE_URL, proxyPort , env['MODEL'], env['SMALL_FAST_MODEL']);
106
108
  }else{
107
109
  //启动 claude-proxy.js 代理
108
110
  proxyPort = await portManager.getAvailablePort();
@@ -111,7 +113,7 @@ function start(){
111
113
  logger.error("无法获取可用端口,程序退出");
112
114
  process.exit(1);
113
115
  }
114
- startServer(false, BASE_URL, proxyPort);
116
+ startServer(false, BASE_URL, proxyPort, env['MODEL'], env['SMALL_FAST_MODEL']);
115
117
  }
116
118
 
117
119
  // 设置代理地址
@@ -121,7 +123,7 @@ function start(){
121
123
 
122
124
  logger.debug(`启动 Claude 进程: ${claudePath}`);
123
125
 
124
- const child = spawn(claudePath,process.argv.slice(2),{
126
+ const child = spawn(claudePath,[],{
125
127
  env:{
126
128
  ...process.env, //包含原先环境变量
127
129
  ...anthropicEnv,