@spaceflow/core 0.1.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (116) hide show
  1. package/CHANGELOG.md +1176 -0
  2. package/README.md +105 -0
  3. package/nest-cli.json +10 -0
  4. package/package.json +128 -0
  5. package/rspack.config.mjs +62 -0
  6. package/src/__mocks__/@opencode-ai/sdk.js +9 -0
  7. package/src/__mocks__/c12.ts +3 -0
  8. package/src/app.module.ts +18 -0
  9. package/src/config/ci.config.ts +29 -0
  10. package/src/config/config-loader.ts +101 -0
  11. package/src/config/config-reader.module.ts +16 -0
  12. package/src/config/config-reader.service.ts +133 -0
  13. package/src/config/feishu.config.ts +35 -0
  14. package/src/config/git-provider.config.ts +29 -0
  15. package/src/config/index.ts +29 -0
  16. package/src/config/llm.config.ts +110 -0
  17. package/src/config/schema-generator.service.ts +129 -0
  18. package/src/config/spaceflow.config.ts +292 -0
  19. package/src/config/storage.config.ts +33 -0
  20. package/src/extension-system/extension.interface.ts +221 -0
  21. package/src/extension-system/index.ts +1 -0
  22. package/src/index.ts +80 -0
  23. package/src/locales/en/translation.json +11 -0
  24. package/src/locales/zh-cn/translation.json +11 -0
  25. package/src/shared/claude-setup/claude-setup.module.ts +8 -0
  26. package/src/shared/claude-setup/claude-setup.service.ts +131 -0
  27. package/src/shared/claude-setup/index.ts +2 -0
  28. package/src/shared/editor-config/index.ts +23 -0
  29. package/src/shared/feishu-sdk/feishu-sdk.module.ts +77 -0
  30. package/src/shared/feishu-sdk/feishu-sdk.service.ts +130 -0
  31. package/src/shared/feishu-sdk/fieshu-card.service.ts +139 -0
  32. package/src/shared/feishu-sdk/index.ts +4 -0
  33. package/src/shared/feishu-sdk/types/card-action.ts +132 -0
  34. package/src/shared/feishu-sdk/types/card.ts +64 -0
  35. package/src/shared/feishu-sdk/types/common.ts +22 -0
  36. package/src/shared/feishu-sdk/types/index.ts +46 -0
  37. package/src/shared/feishu-sdk/types/message.ts +35 -0
  38. package/src/shared/feishu-sdk/types/module.ts +21 -0
  39. package/src/shared/feishu-sdk/types/user.ts +77 -0
  40. package/src/shared/git-provider/adapters/gitea.adapter.spec.ts +473 -0
  41. package/src/shared/git-provider/adapters/gitea.adapter.ts +499 -0
  42. package/src/shared/git-provider/adapters/github.adapter.spec.ts +341 -0
  43. package/src/shared/git-provider/adapters/github.adapter.ts +830 -0
  44. package/src/shared/git-provider/adapters/gitlab.adapter.ts +839 -0
  45. package/src/shared/git-provider/adapters/index.ts +3 -0
  46. package/src/shared/git-provider/detect-provider.spec.ts +195 -0
  47. package/src/shared/git-provider/detect-provider.ts +112 -0
  48. package/src/shared/git-provider/git-provider.interface.ts +188 -0
  49. package/src/shared/git-provider/git-provider.module.ts +73 -0
  50. package/src/shared/git-provider/git-provider.service.spec.ts +282 -0
  51. package/src/shared/git-provider/git-provider.service.ts +309 -0
  52. package/src/shared/git-provider/index.ts +7 -0
  53. package/src/shared/git-provider/parse-repo-url.spec.ts +221 -0
  54. package/src/shared/git-provider/parse-repo-url.ts +155 -0
  55. package/src/shared/git-provider/types.ts +434 -0
  56. package/src/shared/git-sdk/git-sdk-diff.utils.spec.ts +344 -0
  57. package/src/shared/git-sdk/git-sdk-diff.utils.ts +151 -0
  58. package/src/shared/git-sdk/git-sdk.module.ts +8 -0
  59. package/src/shared/git-sdk/git-sdk.service.ts +235 -0
  60. package/src/shared/git-sdk/git-sdk.types.ts +25 -0
  61. package/src/shared/git-sdk/index.ts +4 -0
  62. package/src/shared/i18n/i18n.spec.ts +96 -0
  63. package/src/shared/i18n/i18n.ts +86 -0
  64. package/src/shared/i18n/index.ts +1 -0
  65. package/src/shared/i18n/locale-detect.ts +134 -0
  66. package/src/shared/llm-jsonput/index.ts +94 -0
  67. package/src/shared/llm-jsonput/types.ts +17 -0
  68. package/src/shared/llm-proxy/adapters/claude-code.adapter.spec.ts +131 -0
  69. package/src/shared/llm-proxy/adapters/claude-code.adapter.ts +208 -0
  70. package/src/shared/llm-proxy/adapters/index.ts +4 -0
  71. package/src/shared/llm-proxy/adapters/llm-adapter.interface.ts +23 -0
  72. package/src/shared/llm-proxy/adapters/open-code.adapter.ts +342 -0
  73. package/src/shared/llm-proxy/adapters/openai.adapter.spec.ts +215 -0
  74. package/src/shared/llm-proxy/adapters/openai.adapter.ts +153 -0
  75. package/src/shared/llm-proxy/index.ts +6 -0
  76. package/src/shared/llm-proxy/interfaces/config.interface.ts +32 -0
  77. package/src/shared/llm-proxy/interfaces/index.ts +4 -0
  78. package/src/shared/llm-proxy/interfaces/message.interface.ts +48 -0
  79. package/src/shared/llm-proxy/interfaces/session.interface.ts +28 -0
  80. package/src/shared/llm-proxy/llm-proxy.module.ts +140 -0
  81. package/src/shared/llm-proxy/llm-proxy.service.spec.ts +303 -0
  82. package/src/shared/llm-proxy/llm-proxy.service.ts +132 -0
  83. package/src/shared/llm-proxy/llm-session.spec.ts +111 -0
  84. package/src/shared/llm-proxy/llm-session.ts +109 -0
  85. package/src/shared/llm-proxy/stream-logger.ts +97 -0
  86. package/src/shared/logger/index.ts +11 -0
  87. package/src/shared/logger/logger.interface.ts +93 -0
  88. package/src/shared/logger/logger.spec.ts +178 -0
  89. package/src/shared/logger/logger.ts +175 -0
  90. package/src/shared/logger/renderers/plain.renderer.ts +116 -0
  91. package/src/shared/logger/renderers/tui.renderer.ts +162 -0
  92. package/src/shared/mcp/index.ts +332 -0
  93. package/src/shared/output/index.ts +2 -0
  94. package/src/shared/output/output.module.ts +9 -0
  95. package/src/shared/output/output.service.ts +97 -0
  96. package/src/shared/package-manager/index.ts +115 -0
  97. package/src/shared/parallel/index.ts +1 -0
  98. package/src/shared/parallel/parallel-executor.ts +169 -0
  99. package/src/shared/rspack-config/index.ts +1 -0
  100. package/src/shared/rspack-config/rspack-config.ts +157 -0
  101. package/src/shared/source-utils/index.ts +130 -0
  102. package/src/shared/spaceflow-dir/index.ts +158 -0
  103. package/src/shared/storage/adapters/file.adapter.ts +113 -0
  104. package/src/shared/storage/adapters/index.ts +3 -0
  105. package/src/shared/storage/adapters/memory.adapter.ts +50 -0
  106. package/src/shared/storage/adapters/storage-adapter.interface.ts +48 -0
  107. package/src/shared/storage/index.ts +4 -0
  108. package/src/shared/storage/storage.module.ts +150 -0
  109. package/src/shared/storage/storage.service.ts +293 -0
  110. package/src/shared/storage/types.ts +51 -0
  111. package/src/shared/verbose/index.ts +73 -0
  112. package/test/app.e2e-spec.ts +22 -0
  113. package/tsconfig.build.json +4 -0
  114. package/tsconfig.json +25 -0
  115. package/tsconfig.skill.json +18 -0
  116. package/vitest.config.ts +58 -0
@@ -0,0 +1,303 @@
1
+ import { vi, type Mocked } from "vitest";
2
+ import { Test, TestingModule } from "@nestjs/testing";
3
+ import { LlmProxyService, ChatOptions } from "./llm-proxy.service";
4
+ import { ClaudeCodeAdapter } from "./adapters/claude-code.adapter";
5
+ import { OpenAIAdapter } from "./adapters/openai.adapter";
6
+ import { OpenCodeAdapter } from "./adapters/open-code.adapter";
7
+
8
+ vi.mock("@anthropic-ai/claude-agent-sdk", () => ({
9
+ query: vi.fn(),
10
+ }));
11
+
12
+ describe("LlmProxyService", () => {
13
+ let service: LlmProxyService;
14
+ let claudeAdapter: Mocked<ClaudeCodeAdapter>;
15
+ let openaiAdapter: Mocked<OpenAIAdapter>;
16
+ let opencodeAdapter: Mocked<OpenCodeAdapter>;
17
+
18
+ const mockConfig = {
19
+ defaultAdapter: "claude-code" as const,
20
+ };
21
+
22
+ beforeEach(async () => {
23
+ const mockClaude = {
24
+ name: "claude-code",
25
+ chat: vi.fn(),
26
+ chatStream: vi.fn(),
27
+ isConfigured: vi.fn().mockReturnValue(true),
28
+ isSupportJsonSchema: vi.fn().mockReturnValue(true),
29
+ };
30
+ const mockOpenAI = {
31
+ name: "openai",
32
+ chat: vi.fn(),
33
+ chatStream: vi.fn(),
34
+ isConfigured: vi.fn().mockReturnValue(true),
35
+ isSupportJsonSchema: vi.fn().mockReturnValue(true),
36
+ };
37
+ const mockOpenCode = {
38
+ name: "open-code",
39
+ chat: vi.fn(),
40
+ chatStream: vi.fn(),
41
+ isConfigured: vi.fn().mockReturnValue(true),
42
+ isSupportJsonSchema: vi.fn().mockReturnValue(true),
43
+ };
44
+
45
+ const module: TestingModule = await Test.createTestingModule({
46
+ providers: [
47
+ LlmProxyService,
48
+ { provide: "LLM_PROXY_CONFIG", useValue: mockConfig },
49
+ { provide: ClaudeCodeAdapter, useValue: mockClaude },
50
+ { provide: OpenAIAdapter, useValue: mockOpenAI },
51
+ { provide: OpenCodeAdapter, useValue: mockOpenCode },
52
+ ],
53
+ }).compile();
54
+
55
+ service = module.get<LlmProxyService>(LlmProxyService);
56
+ claudeAdapter = module.get(ClaudeCodeAdapter);
57
+ openaiAdapter = module.get(OpenAIAdapter);
58
+ opencodeAdapter = module.get(OpenCodeAdapter);
59
+ });
60
+
61
+ it("should be defined", () => {
62
+ expect(service).toBeDefined();
63
+ });
64
+
65
+ describe("createSession", () => {
66
+ it("should create a session with default adapter", () => {
67
+ const session = service.createSession();
68
+ expect(session).toBeDefined();
69
+ expect(session.adapterName).toBe("claude-code");
70
+ });
71
+
72
+ it("should create a session with specified adapter", () => {
73
+ const session = service.createSession("openai");
74
+ expect(session).toBeDefined();
75
+ expect(session.adapterName).toBe("openai");
76
+ });
77
+
78
+ it("should throw error if adapter is not configured", () => {
79
+ claudeAdapter.isConfigured.mockReturnValue(false);
80
+ expect(() => service.createSession("claude-code")).toThrow('适配器 "claude-code" 未配置');
81
+ });
82
+ });
83
+
84
+ describe("chat", () => {
85
+ it("should call adapter.chat and return response", async () => {
86
+ const messages = [{ role: "user", content: "hello" }] as any;
87
+ const mockResponse = { content: "hi", role: "assistant" };
88
+ claudeAdapter.chat.mockResolvedValue(mockResponse as any);
89
+
90
+ const result = await service.chat(messages);
91
+
92
+ expect(claudeAdapter.chat).toHaveBeenCalledWith(messages, undefined);
93
+ expect(result).toEqual(mockResponse);
94
+ });
95
+
96
+ it("should use specified adapter", async () => {
97
+ const messages = [{ role: "user", content: "hello" }] as any;
98
+ const options: ChatOptions = { adapter: "openai" };
99
+ openaiAdapter.chat.mockResolvedValue({ content: "hi" } as any);
100
+
101
+ await service.chat(messages, options);
102
+
103
+ expect(openaiAdapter.chat).toHaveBeenCalled();
104
+ expect(claudeAdapter.chat).not.toHaveBeenCalled();
105
+ });
106
+
107
+ it("should handle jsonSchema and parse output", async () => {
108
+ const messages = [{ role: "user", content: "hello" }] as any;
109
+ const mockParsed = { foo: "bar" };
110
+ const mockJsonSchema = {
111
+ parse: vi.fn().mockResolvedValue(mockParsed),
112
+ getSchema: vi.fn().mockReturnValue({ type: "object" }),
113
+ };
114
+ const options = { jsonSchema: mockJsonSchema };
115
+ const mockResponse = { content: '{"foo":"bar"}', role: "assistant" };
116
+ claudeAdapter.chat.mockResolvedValue(mockResponse as any);
117
+
118
+ const result = await service.chat(messages, options as any);
119
+
120
+ expect(result.structuredOutput).toEqual(mockParsed);
121
+ expect(mockJsonSchema.parse).toHaveBeenCalledWith(mockResponse.content);
122
+ });
123
+ });
124
+
125
+ describe("chatStream", () => {
126
+ it("should delegate to adapter.chatStream", async () => {
127
+ const messages = [{ role: "user", content: "hello" }] as any;
128
+ const mockStream = (async function* () {
129
+ yield { type: "text", content: "hi" };
130
+ })();
131
+ claudeAdapter.chatStream.mockReturnValue(mockStream as any);
132
+
133
+ const stream = service.chatStream(messages);
134
+ const chunks: any[] = [];
135
+ for await (const chunk of stream) {
136
+ chunks.push(chunk);
137
+ }
138
+
139
+ expect(chunks).toEqual([{ type: "text", content: "hi" }]);
140
+ expect(claudeAdapter.chatStream).toHaveBeenCalledWith(messages, undefined);
141
+ });
142
+ });
143
+
144
+ describe("chat with jsonSchema fallback", () => {
145
+ it("should append jsonSchema prompt when adapter does not support jsonSchema", async () => {
146
+ claudeAdapter.isSupportJsonSchema.mockReturnValue(false);
147
+ const mockJsonSchema = {
148
+ parse: vi.fn().mockResolvedValue({ foo: "bar" }),
149
+ getSchema: vi.fn(),
150
+ jsonFormatInstruction: "请返回 JSON",
151
+ isMatched: vi.fn().mockReturnValue(false),
152
+ };
153
+ const messages = [
154
+ { role: "system", content: "你是助手" },
155
+ { role: "user", content: "hello" },
156
+ ] as any;
157
+ claudeAdapter.chat.mockResolvedValue({ content: '{"foo":"bar"}' } as any);
158
+ const result = await service.chat(messages, { jsonSchema: mockJsonSchema } as any);
159
+ expect(messages[0].content).toContain("请返回 JSON");
160
+ expect(result.structuredOutput).toEqual({ foo: "bar" });
161
+ });
162
+
163
+ it("should not append jsonSchema prompt if already matched", async () => {
164
+ claudeAdapter.isSupportJsonSchema.mockReturnValue(false);
165
+ const mockJsonSchema = {
166
+ parse: vi.fn().mockResolvedValue({}),
167
+ getSchema: vi.fn(),
168
+ jsonFormatInstruction: "请返回 JSON",
169
+ isMatched: vi.fn().mockReturnValue(true),
170
+ };
171
+ const messages = [{ role: "system", content: "已包含 JSON 指令" }] as any;
172
+ claudeAdapter.chat.mockResolvedValue({ content: "{}" } as any);
173
+ await service.chat(messages, { jsonSchema: mockJsonSchema } as any);
174
+ expect(messages[0].content).toBe("已包含 JSON 指令");
175
+ });
176
+
177
+ it("should add system message if none exists", async () => {
178
+ claudeAdapter.isSupportJsonSchema.mockReturnValue(false);
179
+ const mockJsonSchema = {
180
+ parse: vi.fn().mockResolvedValue({}),
181
+ getSchema: vi.fn(),
182
+ jsonFormatInstruction: "请返回 JSON",
183
+ isMatched: vi.fn().mockReturnValue(false),
184
+ };
185
+ const messages = [{ role: "user", content: "hello" }] as any;
186
+ claudeAdapter.chat.mockResolvedValue({ content: "{}" } as any);
187
+ await service.chat(messages, { jsonSchema: mockJsonSchema } as any);
188
+ expect(messages[0].role).toBe("system");
189
+ expect(messages[0].content).toBe("请返回 JSON");
190
+ });
191
+
192
+ it("should not parse if response has no content", async () => {
193
+ const mockJsonSchema = {
194
+ parse: vi.fn(),
195
+ getSchema: vi.fn(),
196
+ };
197
+ claudeAdapter.chat.mockResolvedValue({ content: "" } as any);
198
+ const result = await service.chat(
199
+ [{ role: "user", content: "hello" }] as any,
200
+ { jsonSchema: mockJsonSchema } as any,
201
+ );
202
+ expect(mockJsonSchema.parse).not.toHaveBeenCalled();
203
+ expect(result.structuredOutput).toBeUndefined();
204
+ });
205
+
206
+ it("should not parse if structuredOutput already exists", async () => {
207
+ const mockJsonSchema = {
208
+ parse: vi.fn(),
209
+ getSchema: vi.fn(),
210
+ };
211
+ claudeAdapter.chat.mockResolvedValue({
212
+ content: "{}",
213
+ structuredOutput: { existing: true },
214
+ } as any);
215
+ const result = await service.chat(
216
+ [{ role: "user", content: "hello" }] as any,
217
+ { jsonSchema: mockJsonSchema } as any,
218
+ );
219
+ expect(mockJsonSchema.parse).not.toHaveBeenCalled();
220
+ expect(result.structuredOutput).toEqual({ existing: true });
221
+ });
222
+ });
223
+
224
+ describe("chatStream with jsonSchema", () => {
225
+ it("should parse jsonSchema on result event", async () => {
226
+ const mockJsonSchema = {
227
+ parse: vi.fn().mockResolvedValue({ parsed: true }),
228
+ getSchema: vi.fn(),
229
+ };
230
+ const mockStream = (async function* () {
231
+ yield { type: "result", response: { content: '{"parsed":true}' } };
232
+ })();
233
+ claudeAdapter.chatStream.mockReturnValue(mockStream as any);
234
+ const chunks: any[] = [];
235
+ for await (const chunk of service.chatStream(
236
+ [{ role: "user", content: "hello" }] as any,
237
+ { jsonSchema: mockJsonSchema } as any,
238
+ )) {
239
+ chunks.push(chunk);
240
+ }
241
+ expect(chunks[0].response.structuredOutput).toEqual({ parsed: true });
242
+ });
243
+
244
+ it("should handle jsonSchema parse error gracefully", async () => {
245
+ const mockJsonSchema = {
246
+ parse: vi.fn().mockRejectedValue(new Error("parse error")),
247
+ getSchema: vi.fn(),
248
+ };
249
+ const mockStream = (async function* () {
250
+ yield { type: "result", response: { content: "invalid json" } };
251
+ })();
252
+ claudeAdapter.chatStream.mockReturnValue(mockStream as any);
253
+ const consoleSpy = vi.spyOn(console, "error").mockImplementation(() => {});
254
+ const chunks: any[] = [];
255
+ for await (const chunk of service.chatStream(
256
+ [{ role: "user", content: "hello" }] as any,
257
+ { jsonSchema: mockJsonSchema } as any,
258
+ )) {
259
+ chunks.push(chunk);
260
+ }
261
+ expect(consoleSpy).toHaveBeenCalled();
262
+ expect(chunks[0].response.structuredOutput).toBeUndefined();
263
+ consoleSpy.mockRestore();
264
+ });
265
+
266
+ it("should use specified adapter for chatStream", async () => {
267
+ const mockStream = (async function* () {
268
+ yield { type: "text", content: "hi" };
269
+ })();
270
+ openaiAdapter.chatStream.mockReturnValue(mockStream as any);
271
+ const chunks: any[] = [];
272
+ for await (const chunk of service.chatStream([{ role: "user", content: "hello" }] as any, {
273
+ adapter: "openai",
274
+ })) {
275
+ chunks.push(chunk);
276
+ }
277
+ expect(openaiAdapter.chatStream).toHaveBeenCalled();
278
+ });
279
+ });
280
+
281
+ describe("getAvailableAdapters", () => {
282
+ it("should return list of configured adapters", () => {
283
+ claudeAdapter.isConfigured.mockReturnValue(true);
284
+ openaiAdapter.isConfigured.mockReturnValue(false);
285
+ opencodeAdapter.isConfigured.mockReturnValue(false);
286
+
287
+ const available = service.getAvailableAdapters();
288
+
289
+ expect(available).toEqual(["claude-code"]);
290
+ });
291
+
292
+ it("should return all adapters when all configured", () => {
293
+ const available = service.getAvailableAdapters();
294
+ expect(available).toEqual(["claude-code", "openai", "open-code"]);
295
+ });
296
+ });
297
+
298
+ describe("getAdapter errors", () => {
299
+ it("should throw for unsupported adapter type", () => {
300
+ expect(() => service.createSession("unknown" as any)).toThrow("不支持的 LLM 类型: unknown");
301
+ });
302
+ });
303
+ });
@@ -0,0 +1,132 @@
1
+ import { Injectable, Inject } from "@nestjs/common";
2
+ import type { LlmAdapter } from "./adapters";
3
+ import { ClaudeCodeAdapter } from "./adapters/claude-code.adapter";
4
+ import { OpenAIAdapter } from "./adapters/openai.adapter";
5
+ import { OpenCodeAdapter } from "./adapters/open-code.adapter";
6
+ import { LlmSessionImpl } from "./llm-session";
7
+ import type {
8
+ LlmMessage,
9
+ LlmRequestOptions,
10
+ LlmResponse,
11
+ LlmSession,
12
+ SessionOptions,
13
+ LlmProxyConfig,
14
+ LLMMode,
15
+ } from "./interfaces";
16
+ import type { LlmJsonPut } from "../llm-jsonput";
17
+
18
+ export interface ChatOptions extends LlmRequestOptions {
19
+ adapter?: LLMMode;
20
+ }
21
+
22
+ @Injectable()
23
+ export class LlmProxyService {
24
+ private adapters: Map<LLMMode, LlmAdapter> = new Map();
25
+
26
+ constructor(
27
+ @Inject("LLM_PROXY_CONFIG") private readonly config: LlmProxyConfig,
28
+ private readonly claudeCodeAdapter: ClaudeCodeAdapter,
29
+ private readonly openaiAdapter: OpenAIAdapter,
30
+ private readonly openCodeAdapter: OpenCodeAdapter,
31
+ ) {
32
+ this.adapters.set("claude-code", claudeCodeAdapter);
33
+ this.adapters.set("openai", openaiAdapter);
34
+ this.adapters.set("open-code", openCodeAdapter);
35
+ }
36
+
37
+ createSession(adapterType?: LLMMode, options?: SessionOptions): LlmSession {
38
+ const type = adapterType || this.getDefaultAdapterType();
39
+ const adapter = this.getAdapter(type);
40
+
41
+ return new LlmSessionImpl(adapter, options);
42
+ }
43
+
44
+ async chat(messages: LlmMessage[], options?: ChatOptions): Promise<LlmResponse> {
45
+ const adapterType = options?.adapter || this.getDefaultAdapterType();
46
+ const adapter = this.getAdapter(adapterType);
47
+
48
+ if (!adapter.isSupportJsonSchema() && options?.jsonSchema) {
49
+ messages = this.appendJsonSchemaSystemPrompt(messages, options.jsonSchema);
50
+ }
51
+
52
+ const response = await adapter.chat(messages, options);
53
+
54
+ if (options?.jsonSchema && response.content && !response.structuredOutput) {
55
+ response.structuredOutput = await options.jsonSchema.parse(response.content);
56
+ }
57
+
58
+ return response;
59
+ }
60
+
61
+ async *chatStream(
62
+ messages: LlmMessage[],
63
+ options?: ChatOptions,
64
+ ): AsyncIterable<import("./interfaces").LlmStreamEvent> {
65
+ const adapterType = options?.adapter || this.getDefaultAdapterType();
66
+ const adapter = this.getAdapter(adapterType);
67
+
68
+ if (!adapter.isSupportJsonSchema() && options?.jsonSchema) {
69
+ messages = this.appendJsonSchemaSystemPrompt(messages, options.jsonSchema);
70
+ }
71
+
72
+ for await (const event of adapter.chatStream(messages, options)) {
73
+ if (
74
+ event.type === "result" &&
75
+ options?.jsonSchema &&
76
+ event.response.content &&
77
+ !event.response.structuredOutput
78
+ ) {
79
+ try {
80
+ event.response.structuredOutput = await options.jsonSchema.parse(event.response.content);
81
+ } catch (error: any) {
82
+ // JSON 解析失败,保持 structuredOutput 为 undefined
83
+ console.error("[LLMProxyService.chatStream] JSON 解析失败:", error);
84
+ }
85
+ }
86
+ yield event;
87
+ }
88
+ }
89
+
90
+ appendJsonSchemaSystemPrompt(messages: LlmMessage[], jsonSchema: LlmJsonPut): LlmMessage[] {
91
+ const systemMsg = messages.find((msg) => msg.role === "system");
92
+ if (jsonSchema.isMatched(systemMsg?.content || "")) {
93
+ return messages;
94
+ }
95
+ if (systemMsg) {
96
+ systemMsg.content += `\n\n${jsonSchema.jsonFormatInstruction}`;
97
+ } else {
98
+ messages.unshift({ role: "system", content: jsonSchema.jsonFormatInstruction });
99
+ }
100
+ return messages;
101
+ }
102
+
103
+ getAvailableAdapters(): LLMMode[] {
104
+ const available: LLMMode[] = [];
105
+
106
+ for (const [type, adapter] of this.adapters) {
107
+ if (adapter.isConfigured()) {
108
+ available.push(type);
109
+ }
110
+ }
111
+
112
+ return available;
113
+ }
114
+
115
+ private getDefaultAdapterType(): LLMMode {
116
+ return this.config.defaultAdapter || "openai";
117
+ }
118
+
119
+ private getAdapter(type: LLMMode): LlmAdapter {
120
+ const adapter = this.adapters.get(type);
121
+
122
+ if (!adapter) {
123
+ throw new Error(`[LLMProxy.getAdapter] 不支持的 LLM 类型: ${type}`);
124
+ }
125
+
126
+ if (!adapter.isConfigured()) {
127
+ throw new Error(`[LLMProxy.getAdapter] 适配器 "${type}" 未配置`);
128
+ }
129
+
130
+ return adapter;
131
+ }
132
+ }
@@ -0,0 +1,111 @@
1
+ import { vi, type Mocked } from "vitest";
2
+ import { LlmSessionImpl } from "./llm-session";
3
+ import { LlmAdapter } from "./adapters";
4
+ import { LlmStreamEvent } from "./interfaces";
5
+
6
+ describe("LlmSessionImpl", () => {
7
+ let adapter: Mocked<LlmAdapter>;
8
+ let session: LlmSessionImpl;
9
+
10
+ beforeEach(() => {
11
+ adapter = {
12
+ name: "test-adapter",
13
+ chat: vi.fn(),
14
+ chatStream: vi.fn(),
15
+ isConfigured: vi.fn().mockReturnValue(true),
16
+ } as any;
17
+
18
+ session = new LlmSessionImpl(adapter);
19
+ });
20
+
21
+ it("should initialize with a random UUID and adapter name", () => {
22
+ expect(session.id).toBeDefined();
23
+ expect(session.adapterName).toBe("test-adapter");
24
+ });
25
+
26
+ it("should initialize with session options", () => {
27
+ const options = {
28
+ systemPrompt: "You are a helper",
29
+ model: "gpt-4",
30
+ verbose: 1 as const,
31
+ };
32
+ const sessionWithOptions = new LlmSessionImpl(adapter, options);
33
+
34
+ // Testing private state via history building and chat
35
+ expect(sessionWithOptions.getHistory()).toEqual([]);
36
+ });
37
+
38
+ describe("send", () => {
39
+ it("should add messages to history and call adapter.chat", async () => {
40
+ const mockResponse = { content: "Hello there!", role: "assistant" };
41
+ adapter.chat.mockResolvedValue(mockResponse as any);
42
+
43
+ const response = await session.send("Hi");
44
+
45
+ expect(response).toEqual(mockResponse);
46
+ const history = session.getHistory();
47
+ expect(history).toHaveLength(2);
48
+ expect(history[0]).toEqual({ role: "user", content: "Hi" });
49
+ expect(history[1]).toEqual({ role: "assistant", content: "Hello there!" });
50
+
51
+ expect(adapter.chat).toHaveBeenCalledWith(
52
+ [{ role: "user", content: "Hi" }],
53
+ expect.objectContaining({ model: undefined, verbose: 0 }),
54
+ );
55
+ });
56
+
57
+ it("should include system prompt in messages sent to adapter", async () => {
58
+ session.setSystemPrompt("System instruction");
59
+ adapter.chat.mockResolvedValue({ content: "OK", role: "assistant" } as any);
60
+
61
+ await session.send("Hi");
62
+
63
+ expect(adapter.chat).toHaveBeenCalledWith(
64
+ [
65
+ { role: "system", content: "System instruction" },
66
+ { role: "user", content: "Hi" },
67
+ ],
68
+ expect.anything(),
69
+ );
70
+ });
71
+ });
72
+
73
+ describe("sendStream", () => {
74
+ it("should handle streaming events and update history", async () => {
75
+ const mockStream = (async function* () {
76
+ yield { type: "text", content: "Hello" } as LlmStreamEvent;
77
+ yield { type: "text", content: " world" } as LlmStreamEvent;
78
+ yield {
79
+ type: "result",
80
+ response: { content: "Hello world", role: "assistant" },
81
+ } as LlmStreamEvent;
82
+ })();
83
+ adapter.chatStream.mockReturnValue(mockStream as any);
84
+
85
+ const events: LlmStreamEvent[] = [];
86
+ for await (const event of session.sendStream("Hi")) {
87
+ events.push(event);
88
+ }
89
+
90
+ expect(events).toHaveLength(3);
91
+ const history = session.getHistory();
92
+ expect(history).toHaveLength(2);
93
+ expect(history[1].content).toBe("Hello world");
94
+ });
95
+ });
96
+
97
+ describe("history management", () => {
98
+ it("should clear history", () => {
99
+ // Manually trigger a message (using send to populate)
100
+ // We'll mock it to be simple
101
+ session.clearHistory();
102
+ expect(session.getHistory()).toEqual([]);
103
+ });
104
+
105
+ it("should return a copy of history", () => {
106
+ const history = session.getHistory();
107
+ history.push({ role: "user", content: "modified" });
108
+ expect(session.getHistory()).toEqual([]);
109
+ });
110
+ });
111
+ });
@@ -0,0 +1,109 @@
1
+ import { randomUUID } from "crypto";
2
+ import type { LlmAdapter } from "./adapters";
3
+ import type {
4
+ LlmMessage,
5
+ LlmRequestOptions,
6
+ LlmResponse,
7
+ LlmStreamEvent,
8
+ LlmSession,
9
+ SessionOptions,
10
+ } from "./interfaces";
11
+ import { type VerboseLevel, normalizeVerbose } from "../verbose";
12
+
13
+ export class LlmSessionImpl implements LlmSession {
14
+ readonly id: string;
15
+ readonly adapterName: string;
16
+
17
+ private history: LlmMessage[] = [];
18
+ private systemPrompt: string = "";
19
+ private defaultModel?: string;
20
+ private verbose: VerboseLevel = 0;
21
+
22
+ constructor(
23
+ private readonly adapter: LlmAdapter,
24
+ options?: SessionOptions,
25
+ ) {
26
+ this.id = randomUUID();
27
+ this.adapterName = adapter.name;
28
+
29
+ if (options?.systemPrompt) {
30
+ this.systemPrompt = options.systemPrompt;
31
+ }
32
+ if (options?.model) {
33
+ this.defaultModel = options.model;
34
+ }
35
+ if (options?.verbose !== undefined) {
36
+ this.verbose = normalizeVerbose(options.verbose);
37
+ }
38
+ }
39
+
40
+ async send(content: string, options?: LlmRequestOptions): Promise<LlmResponse> {
41
+ const userMessage: LlmMessage = { role: "user", content };
42
+ this.history.push(userMessage);
43
+
44
+ const messages = this.buildMessages();
45
+ const mergedOptions = this.mergeOptions(options);
46
+
47
+ const response = await this.adapter.chat(messages, mergedOptions);
48
+
49
+ const assistantMessage: LlmMessage = { role: "assistant", content: response.content };
50
+ this.history.push(assistantMessage);
51
+
52
+ return response;
53
+ }
54
+
55
+ async *sendStream(content: string, options?: LlmRequestOptions): AsyncIterable<LlmStreamEvent> {
56
+ const userMessage: LlmMessage = { role: "user", content };
57
+ this.history.push(userMessage);
58
+
59
+ const messages = this.buildMessages();
60
+ const mergedOptions = this.mergeOptions(options);
61
+
62
+ let fullContent = "";
63
+
64
+ for await (const event of this.adapter.chatStream(messages, mergedOptions)) {
65
+ yield event;
66
+
67
+ if (event.type === "text") {
68
+ fullContent += event.content;
69
+ } else if (event.type === "result") {
70
+ fullContent = event.response.content;
71
+ }
72
+ }
73
+
74
+ const assistantMessage: LlmMessage = { role: "assistant", content: fullContent };
75
+ this.history.push(assistantMessage);
76
+ }
77
+
78
+ getHistory(): LlmMessage[] {
79
+ return [...this.history];
80
+ }
81
+
82
+ clearHistory(): void {
83
+ this.history = [];
84
+ }
85
+
86
+ setSystemPrompt(prompt: string): void {
87
+ this.systemPrompt = prompt;
88
+ }
89
+
90
+ private buildMessages(): LlmMessage[] {
91
+ const messages: LlmMessage[] = [];
92
+
93
+ if (this.systemPrompt) {
94
+ messages.push({ role: "system", content: this.systemPrompt });
95
+ }
96
+
97
+ messages.push(...this.history);
98
+
99
+ return messages;
100
+ }
101
+
102
+ private mergeOptions(options?: LlmRequestOptions): LlmRequestOptions {
103
+ return {
104
+ model: options?.model || this.defaultModel,
105
+ verbose: options?.verbose ?? this.verbose,
106
+ ...options,
107
+ };
108
+ }
109
+ }