gitlab-ai-review 4.2.3 → 6.3.9
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +36 -2
- package/cli.js +118 -32
- package/index.js +741 -335
- package/lib/ai-client.js +145 -61
- package/lib/config.js +798 -11
- package/lib/diff-parser.js +143 -44
- package/lib/document-loader.js +329 -0
- package/lib/export-analyzer.js +384 -0
- package/lib/gitlab-client.js +588 -7
- package/lib/prompt-tools.js +241 -453
- package/package.json +52 -50
- package/lib/impact-analyzer.js +0 -700
- package/lib/incremental-callchain-analyzer.js +0 -764
package/lib/ai-client.js
CHANGED
|
@@ -1,30 +1,75 @@
|
|
|
1
1
|
/**
|
|
2
|
-
* AI 客户端 - 基于
|
|
3
|
-
*
|
|
2
|
+
* AI 客户端 - 基于 LangChain ChatOpenAI (WPS LLM Proxy)
|
|
3
|
+
* 使用 @langchain/openai 实现
|
|
4
4
|
*/
|
|
5
5
|
|
|
6
|
-
import
|
|
6
|
+
import { ChatOpenAI } from '@langchain/openai';
|
|
7
|
+
import { HumanMessage, SystemMessage, AIMessage } from '@langchain/core/messages';
|
|
7
8
|
|
|
8
9
|
/**
|
|
9
|
-
* AI 客户端类 -
|
|
10
|
+
* AI 客户端类 - 基于 LangChain ChatOpenAI
|
|
10
11
|
*/
|
|
11
12
|
export class AIClient {
|
|
12
13
|
constructor(config = {}) {
|
|
13
14
|
const apiKey = config.apiKey || process.env.ARK_API_KEY;
|
|
14
|
-
const baseURL = config.baseURL ||
|
|
15
|
-
const model = config.model || 'doubao-
|
|
15
|
+
const baseURL = config.baseURL || process.env.WPS_BASE_URL;
|
|
16
|
+
const model = config.model || '643045305/doubao/Doubao-Seed-1.6//public';
|
|
17
|
+
const userId = config.userId || process.env.WPS_USER_ID;
|
|
16
18
|
|
|
17
19
|
if (!apiKey) {
|
|
18
20
|
throw new Error('ARK_API_KEY 未配置');
|
|
19
21
|
}
|
|
20
22
|
|
|
21
|
-
|
|
22
|
-
|
|
23
|
-
|
|
24
|
-
|
|
23
|
+
if (!baseURL) {
|
|
24
|
+
throw new Error('WPS_BASE_URL 未配置');
|
|
25
|
+
}
|
|
26
|
+
|
|
27
|
+
if (!userId) {
|
|
28
|
+
throw new Error('WPS_USER_ID 未配置');
|
|
29
|
+
}
|
|
25
30
|
|
|
31
|
+
this.apiKey = apiKey;
|
|
32
|
+
this.baseURL = baseURL;
|
|
26
33
|
this.model = model;
|
|
34
|
+
this.userId = userId;
|
|
27
35
|
this.config = config;
|
|
36
|
+
|
|
37
|
+
// 创建 ChatOpenAI 实例
|
|
38
|
+
this.chatModel = new ChatOpenAI({
|
|
39
|
+
model: this.model,
|
|
40
|
+
temperature: config.temperature || 0.7,
|
|
41
|
+
openAIApiKey: this.apiKey,
|
|
42
|
+
configuration: {
|
|
43
|
+
baseURL: this.baseURL,
|
|
44
|
+
defaultHeaders: {
|
|
45
|
+
'X-WPS-User-ID': this.userId,
|
|
46
|
+
'X-Api-Key': this.apiKey,
|
|
47
|
+
},
|
|
48
|
+
},
|
|
49
|
+
});
|
|
50
|
+
}
|
|
51
|
+
|
|
52
|
+
/**
|
|
53
|
+
* 将消息数组转换为 LangChain 消息格式
|
|
54
|
+
* @param {Array|string} prompt - 消息数组或单个提示词
|
|
55
|
+
* @returns {Array} LangChain 消息数组
|
|
56
|
+
*/
|
|
57
|
+
_convertToLangChainMessages(prompt) {
|
|
58
|
+
if (typeof prompt === 'string') {
|
|
59
|
+
return [new HumanMessage(prompt)];
|
|
60
|
+
}
|
|
61
|
+
|
|
62
|
+
return prompt.map((msg) => {
|
|
63
|
+
switch (msg.role) {
|
|
64
|
+
case 'system':
|
|
65
|
+
return new SystemMessage(msg.content);
|
|
66
|
+
case 'assistant':
|
|
67
|
+
return new AIMessage(msg.content);
|
|
68
|
+
case 'user':
|
|
69
|
+
default:
|
|
70
|
+
return new HumanMessage(msg.content);
|
|
71
|
+
}
|
|
72
|
+
});
|
|
28
73
|
}
|
|
29
74
|
|
|
30
75
|
/**
|
|
@@ -34,24 +79,39 @@ export class AIClient {
|
|
|
34
79
|
* @returns {Promise<Object>} AI 响应
|
|
35
80
|
*/
|
|
36
81
|
async sendMessage(prompt, options = {}) {
|
|
37
|
-
|
|
38
|
-
|
|
39
|
-
|
|
40
|
-
|
|
41
|
-
|
|
42
|
-
|
|
43
|
-
|
|
44
|
-
|
|
45
|
-
|
|
46
|
-
|
|
47
|
-
|
|
82
|
+
try {
|
|
83
|
+
const messages = this._convertToLangChainMessages(prompt);
|
|
84
|
+
|
|
85
|
+
// 如果有特定选项,创建临时模型实例
|
|
86
|
+
let model = this.chatModel;
|
|
87
|
+
if (options.model || options.temperature !== undefined || options.seed !== undefined) {
|
|
88
|
+
model = new ChatOpenAI({
|
|
89
|
+
model: options.model || this.model,
|
|
90
|
+
temperature: options.temperature ?? 0.7,
|
|
91
|
+
openAIApiKey: this.apiKey,
|
|
92
|
+
configuration: {
|
|
93
|
+
baseURL: this.baseURL,
|
|
94
|
+
defaultHeaders: {
|
|
95
|
+
'X-WPS-User-ID': this.userId,
|
|
96
|
+
'X-Api-Key': this.apiKey,
|
|
97
|
+
},
|
|
98
|
+
},
|
|
99
|
+
modelKwargs: options.seed ? { seed: options.seed } : undefined,
|
|
100
|
+
});
|
|
101
|
+
}
|
|
48
102
|
|
|
49
|
-
|
|
50
|
-
|
|
51
|
-
|
|
52
|
-
|
|
53
|
-
|
|
54
|
-
|
|
103
|
+
const response = await model.invoke(messages);
|
|
104
|
+
|
|
105
|
+
return {
|
|
106
|
+
reasoning: response.additional_kwargs?.reasoning_content || '',
|
|
107
|
+
content: response.content || '',
|
|
108
|
+
usage: response.response_metadata?.usage || {},
|
|
109
|
+
raw: response,
|
|
110
|
+
};
|
|
111
|
+
} catch (error) {
|
|
112
|
+
console.error('AI 调用失败:', error);
|
|
113
|
+
throw error;
|
|
114
|
+
}
|
|
55
115
|
}
|
|
56
116
|
|
|
57
117
|
/**
|
|
@@ -62,59 +122,83 @@ export class AIClient {
|
|
|
62
122
|
* @returns {Promise<Object>} 完整 AI 响应
|
|
63
123
|
*/
|
|
64
124
|
async sendMessageStream(prompt, onChunk, options = {}) {
|
|
65
|
-
|
|
66
|
-
|
|
67
|
-
|
|
68
|
-
|
|
69
|
-
|
|
70
|
-
|
|
71
|
-
|
|
72
|
-
|
|
73
|
-
|
|
74
|
-
|
|
75
|
-
|
|
76
|
-
|
|
125
|
+
try {
|
|
126
|
+
const messages = this._convertToLangChainMessages(prompt);
|
|
127
|
+
|
|
128
|
+
// 如果有特定选项,创建临时模型实例
|
|
129
|
+
let model = this.chatModel;
|
|
130
|
+
if (options.model || options.temperature !== undefined || options.seed !== undefined) {
|
|
131
|
+
model = new ChatOpenAI({
|
|
132
|
+
model: options.model || this.model,
|
|
133
|
+
temperature: options.temperature ?? 0.7,
|
|
134
|
+
openAIApiKey: this.apiKey,
|
|
135
|
+
configuration: {
|
|
136
|
+
baseURL: this.baseURL,
|
|
137
|
+
defaultHeaders: {
|
|
138
|
+
'X-WPS-User-ID': this.userId,
|
|
139
|
+
'X-Api-Key': this.apiKey,
|
|
140
|
+
},
|
|
141
|
+
},
|
|
142
|
+
modelKwargs: options.seed ? { seed: options.seed } : undefined,
|
|
143
|
+
});
|
|
144
|
+
}
|
|
145
|
+
|
|
146
|
+
let fullReasoning = '';
|
|
147
|
+
let fullContent = '';
|
|
77
148
|
|
|
78
|
-
|
|
79
|
-
|
|
149
|
+
// 使用流式调用
|
|
150
|
+
const stream = await model.stream(messages);
|
|
80
151
|
|
|
81
|
-
|
|
82
|
-
|
|
83
|
-
|
|
152
|
+
for await (const chunk of stream) {
|
|
153
|
+
const reasoning = chunk.additional_kwargs?.reasoning_content || '';
|
|
154
|
+
const content = chunk.content || '';
|
|
84
155
|
|
|
85
|
-
|
|
86
|
-
|
|
156
|
+
fullReasoning += reasoning;
|
|
157
|
+
fullContent += content;
|
|
87
158
|
|
|
88
|
-
|
|
89
|
-
|
|
159
|
+
if (onChunk) {
|
|
160
|
+
onChunk({ reasoning, content });
|
|
161
|
+
}
|
|
90
162
|
}
|
|
163
|
+
|
|
164
|
+
return {
|
|
165
|
+
reasoning: fullReasoning,
|
|
166
|
+
content: fullContent,
|
|
167
|
+
};
|
|
168
|
+
} catch (error) {
|
|
169
|
+
console.error('AI 流式调用失败:', error);
|
|
170
|
+
throw error;
|
|
91
171
|
}
|
|
172
|
+
}
|
|
92
173
|
|
|
174
|
+
/**
|
|
175
|
+
* 获取当前配置
|
|
176
|
+
* @returns {Object} 配置对象
|
|
177
|
+
*/
|
|
178
|
+
getConfig() {
|
|
93
179
|
return {
|
|
94
|
-
|
|
95
|
-
|
|
180
|
+
model: this.model,
|
|
181
|
+
baseURL: this.baseURL,
|
|
182
|
+
userId: this.userId,
|
|
183
|
+
...this.config,
|
|
96
184
|
};
|
|
97
185
|
}
|
|
98
186
|
|
|
99
187
|
/**
|
|
100
|
-
*
|
|
101
|
-
* @returns {
|
|
188
|
+
* 获取原始客户端(兼容性方法)
|
|
189
|
+
* @returns {AIClient} 返回自身
|
|
102
190
|
*/
|
|
103
191
|
getClient() {
|
|
104
|
-
return this
|
|
192
|
+
return this;
|
|
105
193
|
}
|
|
106
194
|
|
|
107
195
|
/**
|
|
108
|
-
*
|
|
109
|
-
* @returns {
|
|
196
|
+
* 获取 LangChain ChatOpenAI 实例
|
|
197
|
+
* @returns {ChatOpenAI} LangChain 模型实例
|
|
110
198
|
*/
|
|
111
|
-
|
|
112
|
-
return
|
|
113
|
-
model: this.model,
|
|
114
|
-
...this.config,
|
|
115
|
-
};
|
|
199
|
+
getChatModel() {
|
|
200
|
+
return this.chatModel;
|
|
116
201
|
}
|
|
117
202
|
}
|
|
118
203
|
|
|
119
204
|
export default AIClient;
|
|
120
|
-
|