tabby-ai-assistant 1.0.12 → 1.0.15

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (47) hide show
  1. package/.editorconfig +18 -0
  2. package/README.md +113 -55
  3. package/dist/index.js +1 -1
  4. package/package.json +6 -4
  5. package/src/components/chat/ai-sidebar.component.scss +220 -9
  6. package/src/components/chat/ai-sidebar.component.ts +364 -29
  7. package/src/components/chat/chat-input.component.ts +36 -4
  8. package/src/components/chat/chat-interface.component.ts +225 -5
  9. package/src/components/chat/chat-message.component.ts +6 -1
  10. package/src/components/settings/context-settings.component.ts +91 -91
  11. package/src/components/terminal/ai-toolbar-button.component.ts +4 -2
  12. package/src/components/terminal/command-suggestion.component.ts +148 -6
  13. package/src/index.ts +0 -6
  14. package/src/providers/tabby/ai-toolbar-button.provider.ts +7 -3
  15. package/src/services/chat/ai-sidebar.service.ts +414 -410
  16. package/src/services/chat/chat-session.service.ts +36 -12
  17. package/src/services/context/compaction.ts +110 -134
  18. package/src/services/context/manager.ts +27 -7
  19. package/src/services/context/memory.ts +17 -33
  20. package/src/services/context/summary.service.ts +136 -0
  21. package/src/services/core/ai-assistant.service.ts +1060 -37
  22. package/src/services/core/ai-provider-manager.service.ts +154 -25
  23. package/src/services/core/checkpoint.service.ts +218 -18
  24. package/src/services/core/config-provider.service.ts +4 -12
  25. package/src/services/core/toast.service.ts +106 -106
  26. package/src/services/providers/anthropic-provider.service.ts +126 -202
  27. package/src/services/providers/base-provider.service.ts +315 -21
  28. package/src/services/providers/glm-provider.service.ts +151 -233
  29. package/src/services/providers/minimax-provider.service.ts +55 -238
  30. package/src/services/providers/ollama-provider.service.ts +117 -188
  31. package/src/services/providers/openai-compatible.service.ts +165 -177
  32. package/src/services/providers/openai-provider.service.ts +170 -177
  33. package/src/services/providers/vllm-provider.service.ts +116 -188
  34. package/src/services/terminal/terminal-context.service.ts +265 -5
  35. package/src/services/terminal/terminal-manager.service.ts +748 -748
  36. package/src/services/terminal/terminal-tools.service.ts +612 -441
  37. package/src/types/ai.types.ts +156 -3
  38. package/src/types/provider.types.ts +206 -75
  39. package/src/utils/cost.utils.ts +249 -0
  40. package/src/utils/validation.utils.ts +306 -2
  41. package/dist/index.js.LICENSE.txt +0 -18
  42. package/src/index.ts.backup +0 -165
  43. package/src/services/chat/chat-history.service.ts.backup +0 -239
  44. package/src/services/terminal/command-analyzer.service.ts +0 -43
  45. package/src/services/terminal/context-menu.service.ts +0 -45
  46. package/src/services/terminal/hotkey.service.ts +0 -53
  47. package/webpack.config.js.backup +0 -57
@@ -1,7 +1,7 @@
1
1
  import { Injectable } from '@angular/core';
2
2
  import { Observable, Observer } from 'rxjs';
3
3
  import { BaseAiProvider } from './base-provider.service';
4
- import { ProviderCapability, HealthStatus, ValidationResult } from '../../types/provider.types';
4
+ import { ProviderCapability, ValidationResult } from '../../types/provider.types';
5
5
  import { ChatRequest, ChatResponse, StreamEvent, MessageRole, CommandRequest, CommandResponse, ExplainRequest, ExplainResponse, AnalysisRequest, AnalysisResponse } from '../../types/ai.types';
6
6
  import { LoggerService } from '../core/logger.service';
7
7
 
@@ -28,10 +28,6 @@ export class VllmProviderService extends BaseAiProvider {
28
28
  super(logger);
29
29
  }
30
30
 
31
- protected getDefaultBaseURL(): string {
32
- return 'http://localhost:8000/v1';
33
- }
34
-
35
31
  /**
36
32
  * 获取认证头
37
33
  */
@@ -93,7 +89,7 @@ export class VllmProviderService extends BaseAiProvider {
93
89
  }
94
90
 
95
91
  /**
96
- * 流式聊天
92
+ * 流式聊天功能 - 支持工具调用事件
97
93
  */
98
94
  chatStream(request: ChatRequest): Observable<StreamEvent> {
99
95
  return new Observable<StreamEvent>((subscriber: Observer<StreamEvent>) => {
@@ -127,9 +123,16 @@ export class VllmProviderService extends BaseAiProvider {
127
123
  throw new Error('No response body');
128
124
  }
129
125
 
126
+ // 工具调用状态跟踪
127
+ let currentToolCallId = '';
128
+ let currentToolCallName = '';
129
+ let currentToolInput = '';
130
+ let currentToolIndex = -1;
130
131
  let fullContent = '';
131
132
 
132
133
  while (true) {
134
+ if (abortController.signal.aborted) break;
135
+
133
136
  const { done, value } = await reader.read();
134
137
  if (done) break;
135
138
 
@@ -142,8 +145,62 @@ export class VllmProviderService extends BaseAiProvider {
142
145
 
143
146
  try {
144
147
  const parsed = JSON.parse(data);
145
- const delta = parsed.choices[0]?.delta?.content;
146
- if (delta) {
148
+ const choice = parsed.choices?.[0];
149
+
150
+ this.logger.debug('Stream event', { type: 'delta', hasToolCalls: !!choice?.delta?.tool_calls });
151
+
152
+ // 处理工具调用块
153
+ if (choice?.delta?.tool_calls?.length > 0) {
154
+ for (const toolCall of choice.delta.tool_calls) {
155
+ const index = toolCall.index || 0;
156
+
157
+ // 新工具调用开始
158
+ if (currentToolIndex !== index) {
159
+ if (currentToolIndex >= 0) {
160
+ // 发送前一个工具调用的结束事件
161
+ let parsedInput = {};
162
+ try {
163
+ parsedInput = JSON.parse(currentToolInput || '{}');
164
+ } catch (e) {
165
+ // 使用原始输入
166
+ }
167
+ subscriber.next({
168
+ type: 'tool_use_end',
169
+ toolCall: {
170
+ id: currentToolCallId,
171
+ name: currentToolCallName,
172
+ input: parsedInput
173
+ }
174
+ });
175
+ this.logger.debug('Stream event', { type: 'tool_use_end', name: currentToolCallName });
176
+ }
177
+
178
+ currentToolIndex = index;
179
+ currentToolCallId = toolCall.id || `tool_${Date.now()}_${index}`;
180
+ currentToolCallName = toolCall.function?.name || '';
181
+ currentToolInput = toolCall.function?.arguments || '';
182
+
183
+ // 发送工具调用开始事件
184
+ subscriber.next({
185
+ type: 'tool_use_start',
186
+ toolCall: {
187
+ id: currentToolCallId,
188
+ name: currentToolCallName,
189
+ input: {}
190
+ }
191
+ });
192
+ this.logger.debug('Stream event', { type: 'tool_use_start', name: currentToolCallName });
193
+ } else {
194
+ // 继续累积参数
195
+ if (toolCall.function?.arguments) {
196
+ currentToolInput += toolCall.function.arguments;
197
+ }
198
+ }
199
+ }
200
+ }
201
+ // 处理文本增量
202
+ else if (choice?.delta?.content) {
203
+ const delta = choice.delta.content;
147
204
  fullContent += delta;
148
205
  subscriber.next({
149
206
  type: 'text_delta',
@@ -156,6 +213,25 @@ export class VllmProviderService extends BaseAiProvider {
156
213
  }
157
214
  }
158
215
 
216
+ // 发送最后一个工具调用的结束事件
217
+ if (currentToolIndex >= 0) {
218
+ let parsedInput = {};
219
+ try {
220
+ parsedInput = JSON.parse(currentToolInput || '{}');
221
+ } catch (e) {
222
+ // 使用原始输入
223
+ }
224
+ subscriber.next({
225
+ type: 'tool_use_end',
226
+ toolCall: {
227
+ id: currentToolCallId,
228
+ name: currentToolCallName,
229
+ input: parsedInput
230
+ }
231
+ });
232
+ this.logger.debug('Stream event', { type: 'tool_use_end', name: currentToolCallName });
233
+ }
234
+
159
235
  subscriber.next({
160
236
  type: 'message_end',
161
237
  message: {
@@ -165,11 +241,14 @@ export class VllmProviderService extends BaseAiProvider {
165
241
  timestamp: new Date()
166
242
  }
167
243
  });
244
+ this.logger.debug('Stream event', { type: 'message_end', contentLength: fullContent.length });
168
245
  subscriber.complete();
169
246
  } catch (error) {
170
247
  if ((error as any).name !== 'AbortError') {
248
+ const errorMessage = `vLLM stream failed: ${error instanceof Error ? error.message : String(error)}`;
171
249
  this.logError(error, { request });
172
- subscriber.error(new Error(`vLLM stream failed: ${error instanceof Error ? error.message : String(error)}`));
250
+ subscriber.next({ type: 'error', error: errorMessage });
251
+ subscriber.error(new Error(errorMessage));
173
252
  }
174
253
  }
175
254
  };
@@ -181,31 +260,36 @@ export class VllmProviderService extends BaseAiProvider {
181
260
  });
182
261
  }
183
262
 
184
- /**
185
- * 健康检查 - 检测 vLLM 服务是否运行
186
- */
187
- async healthCheck(): Promise<HealthStatus> {
188
- try {
189
- const controller = new AbortController();
190
- const timeoutId = setTimeout(() => controller.abort(), 5000);
191
-
192
- const response = await fetch(`${this.getBaseURL()}/models`, {
193
- method: 'GET',
194
- headers: this.getAuthHeaders(),
195
- signal: controller.signal
196
- });
197
-
198
- clearTimeout(timeoutId);
263
+ protected async sendTestRequest(request: ChatRequest): Promise<ChatResponse> {
264
+ const response = await fetch(`${this.getBaseURL()}/chat/completions`, {
265
+ method: 'POST',
266
+ headers: this.getAuthHeaders(),
267
+ body: JSON.stringify({
268
+ model: this.config?.model || 'meta-llama/Llama-3.1-8B',
269
+ messages: this.transformMessages(request.messages),
270
+ max_tokens: request.maxTokens || 1,
271
+ temperature: request.temperature || 0
272
+ })
273
+ });
199
274
 
200
- if (response.ok) {
201
- this.lastHealthCheck = { status: HealthStatus.HEALTHY, timestamp: new Date() };
202
- return HealthStatus.HEALTHY;
203
- }
204
- return HealthStatus.DEGRADED;
205
- } catch (error) {
206
- this.logger.warn('vLLM health check failed', error);
207
- return HealthStatus.UNHEALTHY;
275
+ if (!response.ok) {
276
+ throw new Error(`vLLM API error: ${response.status}`);
208
277
  }
278
+
279
+ const data = await response.json();
280
+ return {
281
+ message: {
282
+ id: this.generateId(),
283
+ role: MessageRole.ASSISTANT,
284
+ content: data.choices[0]?.message?.content || '',
285
+ timestamp: new Date()
286
+ },
287
+ usage: data.usage ? {
288
+ promptTokens: data.usage.prompt_tokens,
289
+ completionTokens: data.usage.completion_tokens,
290
+ totalTokens: data.usage.total_tokens
291
+ } : undefined
292
+ };
209
293
  }
210
294
 
211
295
  /**
@@ -304,160 +388,4 @@ export class VllmProviderService extends BaseAiProvider {
304
388
  content: typeof msg.content === 'string' ? msg.content : JSON.stringify(msg.content)
305
389
  }));
306
390
  }
307
-
308
- /**
309
- * 构建命令生成提示
310
- */
311
- private buildCommandPrompt(request: CommandRequest): string {
312
- let prompt = `请将以下自然语言描述转换为准确的终端命令:\n\n"${request.naturalLanguage}"\n\n`;
313
-
314
- if (request.context) {
315
- prompt += `当前环境:\n`;
316
- if (request.context.currentDirectory) {
317
- prompt += `- 当前目录:${request.context.currentDirectory}\n`;
318
- }
319
- if (request.context.operatingSystem) {
320
- prompt += `- 操作系统:${request.context.operatingSystem}\n`;
321
- }
322
- if (request.context.shell) {
323
- prompt += `- Shell:${request.context.shell}\n`;
324
- }
325
- }
326
-
327
- prompt += `\n请直接返回JSON格式:\n`;
328
- prompt += `{\n`;
329
- prompt += ` "command": "具体命令",\n`;
330
- prompt += ` "explanation": "命令解释",\n`;
331
- prompt += ` "confidence": 0.95\n`;
332
- prompt += `}\n`;
333
-
334
- return prompt;
335
- }
336
-
337
- /**
338
- * 构建命令解释提示
339
- */
340
- private buildExplainPrompt(request: ExplainRequest): string {
341
- let prompt = `请详细解释以下终端命令:\n\n\`${request.command}\`\n\n`;
342
-
343
- if (request.context?.currentDirectory) {
344
- prompt += `当前目录:${request.context.currentDirectory}\n`;
345
- }
346
- if (request.context?.operatingSystem) {
347
- prompt += `操作系统:${request.context.operatingSystem}\n`;
348
- }
349
-
350
- prompt += `\n请按以下JSON格式返回:\n`;
351
- prompt += `{\n`;
352
- prompt += ` "explanation": "整体解释",\n`;
353
- prompt += ` "breakdown": [\n`;
354
- prompt += ` {"part": "命令部分", "description": "说明"}\n`;
355
- prompt += ` ],\n`;
356
- prompt += ` "examples": ["使用示例"]\n`;
357
- prompt += `}\n`;
358
-
359
- return prompt;
360
- }
361
-
362
- /**
363
- * 构建结果分析提示
364
- */
365
- private buildAnalysisPrompt(request: AnalysisRequest): string {
366
- let prompt = `请分析以下命令执行结果:\n\n`;
367
- prompt += `命令:${request.command}\n`;
368
- prompt += `退出码:${request.exitCode}\n`;
369
- prompt += `输出:\n${request.output}\n\n`;
370
-
371
- if (request.context?.workingDirectory) {
372
- prompt += `工作目录:${request.context.workingDirectory}\n`;
373
- }
374
-
375
- prompt += `\n请按以下JSON格式返回:\n`;
376
- prompt += `{\n`;
377
- prompt += ` "summary": "结果总结",\n`;
378
- prompt += ` "insights": ["洞察1", "洞察2"],\n`;
379
- prompt += ` "success": true/false,\n`;
380
- prompt += ` "issues": [\n`;
381
- prompt += ` {"severity": "warning|error|info", "message": "问题描述", "suggestion": "建议"}\n`;
382
- prompt += ` ]\n`;
383
- prompt += `}\n`;
384
-
385
- return prompt;
386
- }
387
-
388
- /**
389
- * 解析命令响应
390
- */
391
- private parseCommandResponse(content: string): CommandResponse {
392
- try {
393
- const match = content.match(/\{[\s\S]*\}/);
394
- if (match) {
395
- const parsed = JSON.parse(match[0]);
396
- return {
397
- command: parsed.command || '',
398
- explanation: parsed.explanation || '',
399
- confidence: parsed.confidence || 0.5
400
- };
401
- }
402
- } catch (error) {
403
- this.logger.warn('Failed to parse vLLM command response as JSON', error);
404
- }
405
-
406
- const lines = content.split('\n').map(l => l.trim()).filter(l => l);
407
- return {
408
- command: lines[0] || '',
409
- explanation: lines.slice(1).join(' ') || 'AI生成的命令',
410
- confidence: 0.5
411
- };
412
- }
413
-
414
- /**
415
- * 解析解释响应
416
- */
417
- private parseExplainResponse(content: string): ExplainResponse {
418
- try {
419
- const match = content.match(/\{[\s\S]*\}/);
420
- if (match) {
421
- const parsed = JSON.parse(match[0]);
422
- return {
423
- explanation: parsed.explanation || '',
424
- breakdown: parsed.breakdown || [],
425
- examples: parsed.examples || []
426
- };
427
- }
428
- } catch (error) {
429
- this.logger.warn('Failed to parse vLLM explain response as JSON', error);
430
- }
431
-
432
- return {
433
- explanation: content,
434
- breakdown: []
435
- };
436
- }
437
-
438
- /**
439
- * 解析分析响应
440
- */
441
- private parseAnalysisResponse(content: string): AnalysisResponse {
442
- try {
443
- const match = content.match(/\{[\s\S]*\}/);
444
- if (match) {
445
- const parsed = JSON.parse(match[0]);
446
- return {
447
- summary: parsed.summary || '',
448
- insights: parsed.insights || [],
449
- success: parsed.success !== false,
450
- issues: parsed.issues || []
451
- };
452
- }
453
- } catch (error) {
454
- this.logger.warn('Failed to parse vLLM analysis response as JSON', error);
455
- }
456
-
457
- return {
458
- summary: content,
459
- insights: [],
460
- success: true
461
- };
462
- }
463
391
  }
@@ -1,8 +1,20 @@
1
1
  import { Injectable } from '@angular/core';
2
2
  import { Subject, Observable } from 'rxjs';
3
- import { TerminalContext, TerminalSession, TerminalError, CommandResult, SystemInfo, ProcessInfo } from '../../types/terminal.types';
3
+ import { TerminalContext, TerminalSession, TerminalError, CommandResult, SystemInfo, ProcessInfo, ProjectInfo } from '../../types/terminal.types';
4
4
  import { LoggerService } from '../core/logger.service';
5
5
 
6
+ /**
7
+ * 项目检测配置
8
+ */
9
+ interface ProjectDetector {
10
+ pattern: RegExp;
11
+ type: ProjectInfo['type'];
12
+ configFiles: string[];
13
+ parseConfig: (content: string) => Partial<ProjectInfo>;
14
+ language: string;
15
+ framework?: string;
16
+ }
17
+
6
18
  @Injectable({ providedIn: 'root' })
7
19
  export class TerminalContextService {
8
20
  private currentContext: TerminalContext | null = null;
@@ -10,6 +22,127 @@ export class TerminalContextService {
10
22
  private errorDetected$ = new Subject<TerminalError>();
11
23
  private commandExecuted$ = new Subject<CommandResult>();
12
24
 
25
+ // 项目检测器配置
26
+ private readonly projectDetectors: ProjectDetector[] = [
27
+ {
28
+ pattern: /package\.json$/,
29
+ type: 'npm',
30
+ configFiles: ['package.json'],
31
+ parseConfig: (content: string) => {
32
+ try {
33
+ const pkg = JSON.parse(content);
34
+ return {
35
+ name: pkg.name,
36
+ version: pkg.version,
37
+ dependencies: Object.keys(pkg.dependencies || {}),
38
+ scripts: pkg.scripts,
39
+ description: pkg.description,
40
+ framework: pkg.dependencies ? this.detectFramework(Object.keys(pkg.dependencies)) : undefined
41
+ };
42
+ } catch {
43
+ return {};
44
+ }
45
+ },
46
+ language: 'JavaScript/TypeScript'
47
+ },
48
+ {
49
+ pattern: /pom\.xml$/,
50
+ type: 'maven',
51
+ configFiles: ['pom.xml'],
52
+ parseConfig: (content: string) => {
53
+ const nameMatch = content.match(/<artifactId>([^<]+)<\/artifactId>/);
54
+ const versionMatch = content.match(/<version>([^<]+)<\/version>/);
55
+ return {
56
+ name: nameMatch?.[1],
57
+ version: versionMatch?.[1],
58
+ language: 'Java'
59
+ };
60
+ },
61
+ language: 'Java'
62
+ },
63
+ {
64
+ pattern: /build\.gradle$/,
65
+ type: 'gradle',
66
+ configFiles: ['build.gradle', 'build.gradle.kts'],
67
+ parseConfig: (content: string) => {
68
+ const nameMatch = content.match(/rootProject\.name\s*=\s*['"]([^'"]+)['"]/);
69
+ const versionMatch = content.match(/version\s*=\s*['"]([^'"]+)['"]/);
70
+ return {
71
+ name: nameMatch?.[1],
72
+ version: versionMatch?.[1],
73
+ language: 'Java/Kotlin'
74
+ };
75
+ },
76
+ language: 'Java/Kotlin'
77
+ },
78
+ {
79
+ pattern: /requirements\.txt$/,
80
+ type: 'pip',
81
+ configFiles: ['requirements.txt'],
82
+ parseConfig: (content: string) => {
83
+ const deps = content.split('\n')
84
+ .map(line => line.split(/[>=<!]/)[0].trim())
85
+ .filter(d => d.length > 0);
86
+ return {
87
+ dependencies: deps,
88
+ language: 'Python',
89
+ framework: this.detectPythonFramework(deps)
90
+ };
91
+ },
92
+ language: 'Python'
93
+ },
94
+ {
95
+ pattern: /Cargo\.toml$/,
96
+ type: 'cargo',
97
+ configFiles: ['Cargo.toml'],
98
+ parseConfig: (content: string) => {
99
+ const nameMatch = content.match(/name\s*=\s*["']([^"']+)["']/);
100
+ const versionMatch = content.match(/version\s*=\s*["']([^"']+)["']/);
101
+ return {
102
+ name: nameMatch?.[1],
103
+ version: versionMatch?.[1],
104
+ language: 'Rust'
105
+ };
106
+ },
107
+ language: 'Rust'
108
+ },
109
+ {
110
+ pattern: /go\.mod$/,
111
+ type: 'go',
112
+ configFiles: ['go.mod'],
113
+ parseConfig: (content: string) => {
114
+ const moduleMatch = content.match(/module\s+([^\s]+)/);
115
+ const versionMatch = content.match(/go\s+([\d.]+)/);
116
+ return {
117
+ name: moduleMatch?.[1],
118
+ version: versionMatch?.[1],
119
+ language: 'Go'
120
+ };
121
+ },
122
+ language: 'Go'
123
+ },
124
+ {
125
+ pattern: /yarn\.lock$/,
126
+ type: 'yarn',
127
+ configFiles: ['package.json', 'yarn.lock'],
128
+ parseConfig: (content: string) => {
129
+ try {
130
+ const pkg = JSON.parse(content);
131
+ return {
132
+ name: pkg.name,
133
+ version: pkg.version,
134
+ dependencies: Object.keys(pkg.dependencies || {}),
135
+ scripts: pkg.scripts,
136
+ description: pkg.description
137
+ };
138
+ } catch {
139
+ return {};
140
+ }
141
+ },
142
+ language: 'JavaScript/TypeScript'
143
+ }
144
+ ];
145
+
13
146
  constructor(private logger: LoggerService) {
14
147
  this.initializeContext();
15
148
  }
@@ -25,7 +158,7 @@ export class TerminalContextService {
25
158
  isRunning: false,
26
159
  recentCommands: [],
27
160
  systemInfo,
28
- projectInfo
161
+ projectInfo: projectInfo || undefined
29
162
  };
30
163
 
31
164
  this.logger.info('Terminal context initialized', { context: this.currentContext });
@@ -244,13 +377,140 @@ export class TerminalContextService {
244
377
 
245
378
  /**
246
379
  * 检测项目信息
380
+ * 根据当前工作目录中的配置文件检测项目类型和元数据
247
381
  */
248
- private async detectProjectInfo(): Promise<any> {
249
- // TODO: 实现项目检测逻辑
250
- // 检测 .git, package.json, pom.xml, build.gradle 等
382
+ async detectProjectInfo(): Promise<ProjectInfo | null> {
383
+ const cwd = this.currentContext?.session.cwd || process.cwd();
384
+
385
+ // 检测 .git 目录(Git 项目)
386
+ const hasGit = await this.checkFileExists('.git');
387
+ if (hasGit) {
388
+ return {
389
+ type: 'git',
390
+ root: cwd,
391
+ name: this.extractProjectName(cwd),
392
+ language: 'N/A'
393
+ };
394
+ }
395
+
396
+ // 遍历项目检测器
397
+ for (const detector of this.projectDetectors) {
398
+ for (const configFile of detector.configFiles) {
399
+ const content = await this.readFileContent(configFile);
400
+ if (content) {
401
+ const config = detector.parseConfig(content);
402
+ return {
403
+ type: detector.type,
404
+ root: cwd,
405
+ name: config.name || this.extractProjectName(cwd),
406
+ version: config.version,
407
+ dependencies: config.dependencies,
408
+ scripts: config.scripts,
409
+ description: config.description,
410
+ language: detector.language,
411
+ framework: config.framework || detector.framework
412
+ };
413
+ }
414
+ }
415
+ }
416
+
417
+ // 未检测到项目
251
418
  return null;
252
419
  }
253
420
 
421
+ /**
422
+ * 手动触发项目重新检测
423
+ */
424
+ async refreshProjectInfo(): Promise<void> {
425
+ if (!this.currentContext) return;
426
+
427
+ const projectInfo = await this.detectProjectInfo();
428
+ this.currentContext.projectInfo = projectInfo || undefined;
429
+ this.contextChange$.next(this.currentContext);
430
+
431
+ this.logger.info('Project info refreshed', { projectInfo });
432
+ }
433
+
434
+ /**
435
+ * 获取所有检测到的项目类型
436
+ */
437
+ getSupportedProjectTypes(): ProjectInfo['type'][] {
438
+ return this.projectDetectors.map(d => d.type);
439
+ }
440
+
441
+ /**
442
+ * 检查文件是否存在(模拟实现)
443
+ */
444
+ private async checkFileExists(_filename: string): Promise<boolean> {
445
+ // 在浏览器环境中,此方法需要与实际的 Tabby API 集成
446
+ // 这里返回模拟值用于演示
447
+ return false;
448
+ }
449
+
450
+ /**
451
+ * 读取文件内容(模拟实现)
452
+ */
453
+ private async readFileContent(_filename: string): Promise<string | null> {
454
+ // 在浏览器环境中,此方法需要与实际的 Tabby API 集成
455
+ // 这里返回模拟值用于演示
456
+ return null;
457
+ }
458
+
459
+ /**
460
+ * 从路径提取项目名称
461
+ */
462
+ private extractProjectName(path: string): string {
463
+ const parts = path.split('/');
464
+ return parts[parts.length - 1] || 'unknown-project';
465
+ }
466
+
467
+ /**
468
+ * 检测 JavaScript/TypeScript 项目框架
469
+ */
470
+ private detectFramework(dependencies: string[]): string | undefined {
471
+ const frameworkIndicators: { [key: string]: string[] } = {
472
+ 'React': ['react', 'react-dom'],
473
+ 'Vue': ['vue'],
474
+ 'Angular': ['@angular/core'],
475
+ 'Next.js': ['next'],
476
+ 'Nuxt': ['nuxt'],
477
+ 'Svelte': ['svelte'],
478
+ 'Express': ['express'],
479
+ 'NestJS': ['@nestjs/core'],
480
+ 'Electron': ['electron'],
481
+ 'Expo': ['expo']
482
+ };
483
+
484
+ for (const [framework, indicators] of Object.entries(frameworkIndicators)) {
485
+ if (indicators.some(ind => dependencies.includes(ind))) {
486
+ return framework;
487
+ }
488
+ }
489
+ return undefined;
490
+ }
491
+
492
+ /**
493
+ * 检测 Python 项目框架
494
+ */
495
+ private detectPythonFramework(dependencies: string[]): string | undefined {
496
+ const frameworkIndicators: { [key: string]: string[] } = {
497
+ 'Django': ['django'],
498
+ 'Flask': ['flask'],
499
+ 'FastAPI': ['fastapi'],
500
+ 'Pyramid': ['pyramid'],
501
+ 'Tornado': ['tornado'],
502
+ 'Web2py': ['web2py'],
503
+ 'CherryPy': ['cherrypy']
504
+ };
505
+
506
+ for (const [framework, indicators] of Object.entries(frameworkIndicators)) {
507
+ if (indicators.some(ind => dependencies.includes(ind))) {
508
+ return framework;
509
+ }
510
+ }
511
+ return undefined;
512
+ }
513
+
254
514
  /**
255
515
  * 检测当前Shell
256
516
  */