@spaceflow/core 0.1.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (116) hide show
  1. package/CHANGELOG.md +1176 -0
  2. package/README.md +105 -0
  3. package/nest-cli.json +10 -0
  4. package/package.json +128 -0
  5. package/rspack.config.mjs +62 -0
  6. package/src/__mocks__/@opencode-ai/sdk.js +9 -0
  7. package/src/__mocks__/c12.ts +3 -0
  8. package/src/app.module.ts +18 -0
  9. package/src/config/ci.config.ts +29 -0
  10. package/src/config/config-loader.ts +101 -0
  11. package/src/config/config-reader.module.ts +16 -0
  12. package/src/config/config-reader.service.ts +133 -0
  13. package/src/config/feishu.config.ts +35 -0
  14. package/src/config/git-provider.config.ts +29 -0
  15. package/src/config/index.ts +29 -0
  16. package/src/config/llm.config.ts +110 -0
  17. package/src/config/schema-generator.service.ts +129 -0
  18. package/src/config/spaceflow.config.ts +292 -0
  19. package/src/config/storage.config.ts +33 -0
  20. package/src/extension-system/extension.interface.ts +221 -0
  21. package/src/extension-system/index.ts +1 -0
  22. package/src/index.ts +80 -0
  23. package/src/locales/en/translation.json +11 -0
  24. package/src/locales/zh-cn/translation.json +11 -0
  25. package/src/shared/claude-setup/claude-setup.module.ts +8 -0
  26. package/src/shared/claude-setup/claude-setup.service.ts +131 -0
  27. package/src/shared/claude-setup/index.ts +2 -0
  28. package/src/shared/editor-config/index.ts +23 -0
  29. package/src/shared/feishu-sdk/feishu-sdk.module.ts +77 -0
  30. package/src/shared/feishu-sdk/feishu-sdk.service.ts +130 -0
  31. package/src/shared/feishu-sdk/fieshu-card.service.ts +139 -0
  32. package/src/shared/feishu-sdk/index.ts +4 -0
  33. package/src/shared/feishu-sdk/types/card-action.ts +132 -0
  34. package/src/shared/feishu-sdk/types/card.ts +64 -0
  35. package/src/shared/feishu-sdk/types/common.ts +22 -0
  36. package/src/shared/feishu-sdk/types/index.ts +46 -0
  37. package/src/shared/feishu-sdk/types/message.ts +35 -0
  38. package/src/shared/feishu-sdk/types/module.ts +21 -0
  39. package/src/shared/feishu-sdk/types/user.ts +77 -0
  40. package/src/shared/git-provider/adapters/gitea.adapter.spec.ts +473 -0
  41. package/src/shared/git-provider/adapters/gitea.adapter.ts +499 -0
  42. package/src/shared/git-provider/adapters/github.adapter.spec.ts +341 -0
  43. package/src/shared/git-provider/adapters/github.adapter.ts +830 -0
  44. package/src/shared/git-provider/adapters/gitlab.adapter.ts +839 -0
  45. package/src/shared/git-provider/adapters/index.ts +3 -0
  46. package/src/shared/git-provider/detect-provider.spec.ts +195 -0
  47. package/src/shared/git-provider/detect-provider.ts +112 -0
  48. package/src/shared/git-provider/git-provider.interface.ts +188 -0
  49. package/src/shared/git-provider/git-provider.module.ts +73 -0
  50. package/src/shared/git-provider/git-provider.service.spec.ts +282 -0
  51. package/src/shared/git-provider/git-provider.service.ts +309 -0
  52. package/src/shared/git-provider/index.ts +7 -0
  53. package/src/shared/git-provider/parse-repo-url.spec.ts +221 -0
  54. package/src/shared/git-provider/parse-repo-url.ts +155 -0
  55. package/src/shared/git-provider/types.ts +434 -0
  56. package/src/shared/git-sdk/git-sdk-diff.utils.spec.ts +344 -0
  57. package/src/shared/git-sdk/git-sdk-diff.utils.ts +151 -0
  58. package/src/shared/git-sdk/git-sdk.module.ts +8 -0
  59. package/src/shared/git-sdk/git-sdk.service.ts +235 -0
  60. package/src/shared/git-sdk/git-sdk.types.ts +25 -0
  61. package/src/shared/git-sdk/index.ts +4 -0
  62. package/src/shared/i18n/i18n.spec.ts +96 -0
  63. package/src/shared/i18n/i18n.ts +86 -0
  64. package/src/shared/i18n/index.ts +1 -0
  65. package/src/shared/i18n/locale-detect.ts +134 -0
  66. package/src/shared/llm-jsonput/index.ts +94 -0
  67. package/src/shared/llm-jsonput/types.ts +17 -0
  68. package/src/shared/llm-proxy/adapters/claude-code.adapter.spec.ts +131 -0
  69. package/src/shared/llm-proxy/adapters/claude-code.adapter.ts +208 -0
  70. package/src/shared/llm-proxy/adapters/index.ts +4 -0
  71. package/src/shared/llm-proxy/adapters/llm-adapter.interface.ts +23 -0
  72. package/src/shared/llm-proxy/adapters/open-code.adapter.ts +342 -0
  73. package/src/shared/llm-proxy/adapters/openai.adapter.spec.ts +215 -0
  74. package/src/shared/llm-proxy/adapters/openai.adapter.ts +153 -0
  75. package/src/shared/llm-proxy/index.ts +6 -0
  76. package/src/shared/llm-proxy/interfaces/config.interface.ts +32 -0
  77. package/src/shared/llm-proxy/interfaces/index.ts +4 -0
  78. package/src/shared/llm-proxy/interfaces/message.interface.ts +48 -0
  79. package/src/shared/llm-proxy/interfaces/session.interface.ts +28 -0
  80. package/src/shared/llm-proxy/llm-proxy.module.ts +140 -0
  81. package/src/shared/llm-proxy/llm-proxy.service.spec.ts +303 -0
  82. package/src/shared/llm-proxy/llm-proxy.service.ts +132 -0
  83. package/src/shared/llm-proxy/llm-session.spec.ts +111 -0
  84. package/src/shared/llm-proxy/llm-session.ts +109 -0
  85. package/src/shared/llm-proxy/stream-logger.ts +97 -0
  86. package/src/shared/logger/index.ts +11 -0
  87. package/src/shared/logger/logger.interface.ts +93 -0
  88. package/src/shared/logger/logger.spec.ts +178 -0
  89. package/src/shared/logger/logger.ts +175 -0
  90. package/src/shared/logger/renderers/plain.renderer.ts +116 -0
  91. package/src/shared/logger/renderers/tui.renderer.ts +162 -0
  92. package/src/shared/mcp/index.ts +332 -0
  93. package/src/shared/output/index.ts +2 -0
  94. package/src/shared/output/output.module.ts +9 -0
  95. package/src/shared/output/output.service.ts +97 -0
  96. package/src/shared/package-manager/index.ts +115 -0
  97. package/src/shared/parallel/index.ts +1 -0
  98. package/src/shared/parallel/parallel-executor.ts +169 -0
  99. package/src/shared/rspack-config/index.ts +1 -0
  100. package/src/shared/rspack-config/rspack-config.ts +157 -0
  101. package/src/shared/source-utils/index.ts +130 -0
  102. package/src/shared/spaceflow-dir/index.ts +158 -0
  103. package/src/shared/storage/adapters/file.adapter.ts +113 -0
  104. package/src/shared/storage/adapters/index.ts +3 -0
  105. package/src/shared/storage/adapters/memory.adapter.ts +50 -0
  106. package/src/shared/storage/adapters/storage-adapter.interface.ts +48 -0
  107. package/src/shared/storage/index.ts +4 -0
  108. package/src/shared/storage/storage.module.ts +150 -0
  109. package/src/shared/storage/storage.service.ts +293 -0
  110. package/src/shared/storage/types.ts +51 -0
  111. package/src/shared/verbose/index.ts +73 -0
  112. package/test/app.e2e-spec.ts +22 -0
  113. package/tsconfig.build.json +4 -0
  114. package/tsconfig.json +25 -0
  115. package/tsconfig.skill.json +18 -0
  116. package/vitest.config.ts +58 -0
@@ -0,0 +1,131 @@
1
+ import { vi, type Mocked, type Mock } from "vitest";
2
+ import { Test, TestingModule } from "@nestjs/testing";
3
+ import { ClaudeCodeAdapter } from "./claude-code.adapter";
4
+ import { ClaudeSetupService } from "../../claude-setup";
5
+ import { LlmStreamEvent } from "../interfaces";
6
+
7
+ vi.mock("@anthropic-ai/claude-agent-sdk", () => ({
8
+ query: vi.fn(),
9
+ }));
10
+
11
+ import { query } from "@anthropic-ai/claude-agent-sdk";
12
+
13
+ describe("ClaudeAdapter", () => {
14
+ let adapter: ClaudeCodeAdapter;
15
+ let claudeSetupService: Mocked<ClaudeSetupService>;
16
+
17
+ const mockConfig = {
18
+ claudeCode: {
19
+ model: "claude-3-5-sonnet",
20
+ baseUrl: "https://api.anthropic.com",
21
+ },
22
+ };
23
+
24
+ beforeEach(async () => {
25
+ const mockSetup = {
26
+ configure: vi.fn().mockResolvedValue(undefined),
27
+ backup: vi.fn().mockResolvedValue(undefined),
28
+ restore: vi.fn().mockResolvedValue(undefined),
29
+ withTemporaryConfig: vi.fn(),
30
+ };
31
+
32
+ const module: TestingModule = await Test.createTestingModule({
33
+ providers: [
34
+ ClaudeCodeAdapter,
35
+ { provide: "LLM_PROXY_CONFIG", useValue: mockConfig },
36
+ { provide: ClaudeSetupService, useValue: mockSetup },
37
+ ],
38
+ }).compile();
39
+
40
+ adapter = module.get<ClaudeCodeAdapter>(ClaudeCodeAdapter);
41
+ claudeSetupService = module.get(ClaudeSetupService);
42
+ });
43
+
44
+ it("should be defined", () => {
45
+ expect(adapter).toBeDefined();
46
+ expect(adapter.name).toBe("claude-code");
47
+ });
48
+
49
+ describe("isConfigured", () => {
50
+ it("should return true if claude config exists", () => {
51
+ expect(adapter.isConfigured()).toBe(true);
52
+ });
53
+
54
+ it("should return false if claude config is missing", async () => {
55
+ const module: TestingModule = await Test.createTestingModule({
56
+ providers: [
57
+ ClaudeCodeAdapter,
58
+ { provide: "LLM_PROXY_CONFIG", useValue: {} },
59
+ { provide: ClaudeSetupService, useValue: { configure: vi.fn() } },
60
+ ],
61
+ }).compile();
62
+ const unconfiguredAdapter = module.get<ClaudeCodeAdapter>(ClaudeCodeAdapter);
63
+ expect(unconfiguredAdapter.isConfigured()).toBe(false);
64
+ });
65
+ });
66
+
67
+ describe("chatStream", () => {
68
+ it("should call claudeSetupService.configure and query with correct params", async () => {
69
+ const messages = [{ role: "user", content: "hello" }] as any;
70
+ const mockQueryResponse = (async function* () {
71
+ yield { type: "assistant", message: { content: "hi" } };
72
+ yield { type: "result", subtype: "success", response: { content: "hi" } };
73
+ })();
74
+ (query as Mock).mockReturnValue(mockQueryResponse);
75
+
76
+ const stream = adapter.chatStream(messages);
77
+ const events: LlmStreamEvent[] = [];
78
+ for await (const event of stream) {
79
+ events.push(event);
80
+ }
81
+
82
+ expect(claudeSetupService.configure).toHaveBeenCalled();
83
+ expect(query).toHaveBeenCalledWith(
84
+ expect.objectContaining({
85
+ prompt: "hello",
86
+ options: expect.objectContaining({
87
+ model: "claude-3-5-sonnet",
88
+ }),
89
+ }),
90
+ );
91
+ expect(events).toContainEqual({ type: "text", content: "hi" });
92
+ });
93
+
94
+ it("should handle EPIPE error during stream", async () => {
95
+ const messages = [{ role: "user", content: "hello" }] as any;
96
+ const epipeError = new Error("EPIPE: broken pipe");
97
+ (epipeError as any).code = "EPIPE";
98
+
99
+ (query as Mock).mockImplementation(() => {
100
+ throw epipeError;
101
+ });
102
+
103
+ const stream = adapter.chatStream(messages);
104
+ const events: LlmStreamEvent[] = [];
105
+ for await (const event of stream) {
106
+ events.push(event);
107
+ }
108
+
109
+ expect(events[0]).toMatchObject({
110
+ type: "error",
111
+ message: expect.stringContaining("连接中断 (EPIPE)"),
112
+ });
113
+ });
114
+
115
+ it("should throw other errors during stream", async () => {
116
+ const messages = [{ role: "user", content: "hello" }] as any;
117
+ const unexpectedError = new Error("Unexpected error");
118
+
119
+ (query as Mock).mockImplementation(() => {
120
+ throw unexpectedError;
121
+ });
122
+
123
+ const stream = adapter.chatStream(messages);
124
+ await expect(async () => {
125
+ for await (const _ of stream) {
126
+ // ignore
127
+ }
128
+ }).rejects.toThrow("Unexpected error");
129
+ });
130
+ });
131
+ });
@@ -0,0 +1,208 @@
1
+ import { Injectable, Inject } from "@nestjs/common";
2
+ import { query, type SpawnOptions } from "@anthropic-ai/claude-agent-sdk";
3
+ import { spawn } from "child_process";
4
+ import type { LlmAdapter } from "./llm-adapter.interface";
5
+ import type {
6
+ LlmMessage,
7
+ LlmRequestOptions,
8
+ LlmResponse,
9
+ LlmStreamEvent,
10
+ LlmProxyConfig,
11
+ } from "../interfaces";
12
+ import { ClaudeSetupService } from "../../claude-setup";
13
+ import { shouldLog } from "../../verbose";
14
+
15
+ @Injectable()
16
+ export class ClaudeCodeAdapter implements LlmAdapter {
17
+ readonly name = "claude-code";
18
+
19
+ constructor(
20
+ @Inject("LLM_PROXY_CONFIG") private readonly config: LlmProxyConfig,
21
+ private readonly claudeSetupService: ClaudeSetupService,
22
+ ) {}
23
+
24
+ isConfigured(): boolean {
25
+ return !!this.config.claudeCode;
26
+ }
27
+
28
+ async chat(messages: LlmMessage[], options?: LlmRequestOptions): Promise<LlmResponse> {
29
+ let result: LlmResponse = { content: "" };
30
+
31
+ for await (const event of this.chatStream(messages, options)) {
32
+ if (event.type === "result") {
33
+ result = event.response;
34
+ } else if (event.type === "error") {
35
+ throw new Error(event.message);
36
+ }
37
+ }
38
+
39
+ return result;
40
+ }
41
+
42
+ async *chatStream(
43
+ messages: LlmMessage[],
44
+ options?: LlmRequestOptions,
45
+ ): AsyncIterable<LlmStreamEvent> {
46
+ // 备份原有配置
47
+ await this.claudeSetupService.backup();
48
+
49
+ try {
50
+ // 应用临时配置
51
+ await this.claudeSetupService.configure(options?.verbose);
52
+
53
+ const claudeConf = this.config.claudeCode;
54
+
55
+ if (!claudeConf) {
56
+ yield {
57
+ type: "error",
58
+ message: "[LLMProxy.ClaudeCodeAdapter.chatStream] 未配置 claude 设置",
59
+ };
60
+ return;
61
+ }
62
+
63
+ const model = options?.model || claudeConf.model || "claude-sonnet-4-5";
64
+ const systemPrompt = this.extractSystemPrompt(messages);
65
+ const userPrompt = this.extractUserPrompt(messages);
66
+
67
+ if (shouldLog(options?.verbose, 1)) {
68
+ console.log(
69
+ `[LLMProxy.ClaudeCodeAdapter.chatStream] 配置: Model=${model}, BaseURL=${claudeConf.baseUrl || "(默认)"}`,
70
+ );
71
+ }
72
+
73
+ const handleUncaughtError = (err: Error) => {
74
+ if ((err as any).code === "EPIPE") {
75
+ console.error(
76
+ "[LLMProxy.ClaudeCodeAdapter.chatStream] EPIPE 错误: Claude CLI 子进程意外退出",
77
+ );
78
+ throw err;
79
+ }
80
+ };
81
+ process.on("uncaughtException", handleUncaughtError);
82
+
83
+ try {
84
+ const spawnEnv = { ...process.env };
85
+ if (claudeConf.baseUrl) spawnEnv.ANTHROPIC_BASE_URL = claudeConf.baseUrl;
86
+ if (claudeConf.authToken) spawnEnv.ANTHROPIC_AUTH_TOKEN = claudeConf.authToken;
87
+
88
+ const spawnClaudeCodeProcess = (spawnOptions: SpawnOptions) => {
89
+ if (shouldLog(options?.verbose, 2)) {
90
+ console.log(
91
+ `[LLMProxy.ClaudeCodeAdapter.chatStream] Spawning: ${spawnOptions.command} ${spawnOptions.args?.join(" ")}`,
92
+ );
93
+ }
94
+ const child = spawn(spawnOptions.command, spawnOptions.args || [], {
95
+ ...spawnOptions,
96
+ stdio: ["pipe", "pipe", "pipe"],
97
+ env: spawnEnv,
98
+ });
99
+
100
+ child.stderr?.on("data", (data) => {
101
+ console.error(`[LLMProxy.ClaudeCodeAdapter.chatStream] CLI stderr: ${data.toString()}`);
102
+ });
103
+
104
+ return child;
105
+ };
106
+
107
+ const queryOptions: Parameters<typeof query>[0]["options"] = {
108
+ model,
109
+ systemPrompt,
110
+ permissionMode: "default",
111
+ spawnClaudeCodeProcess,
112
+ };
113
+
114
+ if (options?.allowedTools?.length) {
115
+ queryOptions.allowedTools = options.allowedTools as any;
116
+ }
117
+
118
+ if (options?.jsonSchema) {
119
+ queryOptions.outputFormat = {
120
+ type: "json_schema",
121
+ schema: options.jsonSchema.getSchema(),
122
+ };
123
+ }
124
+
125
+ const response = query({
126
+ prompt: userPrompt,
127
+ options: queryOptions,
128
+ });
129
+
130
+ let finalContent = "";
131
+ let structuredOutput: unknown = undefined;
132
+
133
+ for await (const message of response) {
134
+ if (message.type === "assistant") {
135
+ const content = message.message.content;
136
+ if (typeof content === "string") {
137
+ yield { type: "text", content };
138
+ finalContent += content;
139
+ } else if (Array.isArray(content)) {
140
+ for (const block of content) {
141
+ if (block.type === "text") {
142
+ yield { type: "text", content: block.text };
143
+ finalContent += block.text;
144
+ } else if (block.type === "tool_use") {
145
+ yield { type: "tool_use", name: block.name, input: block.input };
146
+ } else if (block.type === ("thought" as any)) {
147
+ yield { type: "thought", content: (block as any).thought };
148
+ }
149
+ }
150
+ }
151
+ }
152
+
153
+ if (message.type === "result") {
154
+ if (message.subtype === "success") {
155
+ if (message.structured_output) {
156
+ structuredOutput = message.structured_output;
157
+ }
158
+ yield {
159
+ type: "result",
160
+ response: {
161
+ content: finalContent,
162
+ structuredOutput,
163
+ },
164
+ };
165
+ } else {
166
+ yield {
167
+ type: "error",
168
+ message: `[LLMProxy.ClaudeCodeAdapter.chatStream] ${message.errors?.join(", ") || "未知错误"}`,
169
+ };
170
+ }
171
+ }
172
+ }
173
+ } catch (error: any) {
174
+ if (error?.code === "EPIPE" || error?.message?.includes("EPIPE")) {
175
+ yield {
176
+ type: "error",
177
+ message:
178
+ "[LLMProxy.ClaudeCodeAdapter.chatStream] 连接中断 (EPIPE)。请检查:\n" +
179
+ "1. ANTHROPIC_AUTH_TOKEN 环境变量是否正确设置\n" +
180
+ "2. ANTHROPIC_BASE_URL 是否与 Claude Agent SDK 兼容\n" +
181
+ "3. Claude CLI 是否已正确安装",
182
+ };
183
+ } else {
184
+ throw error;
185
+ }
186
+ } finally {
187
+ process.removeListener("uncaughtException", handleUncaughtError);
188
+ }
189
+ } finally {
190
+ // 恢复原有配置
191
+ await this.claudeSetupService.restore();
192
+ }
193
+ }
194
+
195
+ private extractSystemPrompt(messages: LlmMessage[]): string {
196
+ const systemMessage = messages.find((m) => m.role === "system");
197
+ return systemMessage?.content || "";
198
+ }
199
+
200
+ private extractUserPrompt(messages: LlmMessage[]): string {
201
+ const userMessages = messages.filter((m) => m.role === "user");
202
+ return userMessages.map((m) => m.content).join("\n\n");
203
+ }
204
+
205
+ isSupportJsonSchema(): boolean {
206
+ return true;
207
+ }
208
+ }
@@ -0,0 +1,4 @@
1
+ export * from "./llm-adapter.interface";
2
+ export * from "./claude-code.adapter";
3
+ export * from "./openai.adapter";
4
+ export * from "./open-code.adapter";
@@ -0,0 +1,23 @@
1
+ import type { LlmMessage, LlmRequestOptions, LlmResponse, LlmStreamEvent } from "../interfaces";
2
+ import type { VerboseLevel } from "../../verbose";
3
+
4
+ export interface LlmAdapterConfig {
5
+ model?: string;
6
+ baseUrl?: string;
7
+ apiKey?: string;
8
+ verbose?: VerboseLevel;
9
+ }
10
+
11
+ export interface LlmAdapter {
12
+ readonly name: string;
13
+
14
+ chat(messages: LlmMessage[], options?: LlmRequestOptions): Promise<LlmResponse>;
15
+
16
+ chatStream(messages: LlmMessage[], options?: LlmRequestOptions): AsyncIterable<LlmStreamEvent>;
17
+
18
+ isConfigured(): boolean;
19
+
20
+ isSupportJsonSchema(): boolean;
21
+ }
22
+
23
+ export const LLM_ADAPTER = Symbol("LLM_ADAPTER");
@@ -0,0 +1,342 @@
1
+ import { Injectable, Inject } from "@nestjs/common";
2
+ import { createOpencode } from "@opencode-ai/sdk";
3
+ import type { LlmAdapter } from "./llm-adapter.interface";
4
+ import type {
5
+ LlmMessage,
6
+ LlmRequestOptions,
7
+ LlmResponse,
8
+ LlmStreamEvent,
9
+ LlmProxyConfig,
10
+ OpenCodeAdapterConfig,
11
+ } from "../interfaces";
12
+ import { shouldLog } from "../../verbose";
13
+
14
+ @Injectable()
15
+ export class OpenCodeAdapter implements LlmAdapter {
16
+ readonly name = "open-code";
17
+
18
+ constructor(@Inject("LLM_PROXY_CONFIG") private readonly config: LlmProxyConfig) {}
19
+
20
+ isConfigured(): boolean {
21
+ return !!this.config.openCode;
22
+ }
23
+
24
+ async chat(messages: LlmMessage[], options?: LlmRequestOptions): Promise<LlmResponse> {
25
+ let result: LlmResponse = { content: "" };
26
+
27
+ for await (const event of this.chatStream(messages, options)) {
28
+ if (event.type === "result") {
29
+ result = event.response;
30
+ } else if (event.type === "error") {
31
+ throw new Error(event.message);
32
+ }
33
+ }
34
+
35
+ return result;
36
+ }
37
+
38
+ async *chatStream(
39
+ messages: LlmMessage[],
40
+ options?: LlmRequestOptions,
41
+ ): AsyncIterable<LlmStreamEvent> {
42
+ const openCodeConf = this.config.openCode;
43
+
44
+ if (!openCodeConf) {
45
+ yield {
46
+ type: "error",
47
+ message: "[LLMProxy.OpenCodeAdapter.chatStream] 未配置 openCode 设置",
48
+ };
49
+ return;
50
+ }
51
+
52
+ const providerID = openCodeConf.providerID || "openai";
53
+ const configModel = options?.model || openCodeConf.model || "gpt-4o";
54
+ const model = configModel.includes("/") ? configModel : `${providerID}/${configModel}`;
55
+
56
+ if (shouldLog(options?.verbose, 1)) {
57
+ console.log(
58
+ `[LLMProxy.OpenCodeAdapter.chatStream] 配置: Model=${model}, ProviderID=${providerID}, BaseURL=${openCodeConf.baseUrl || "默认"}`,
59
+ );
60
+ }
61
+
62
+ // 创建 OpenCode 实例(自动启动服务器,使用动态端口避免冲突)
63
+ let opencode: Awaited<ReturnType<typeof createOpencode>> | null = null;
64
+ const port = 4096 + Math.floor(Math.random() * 1000);
65
+
66
+ // 确保进程退出时关闭服务器
67
+ const cleanup = () => {
68
+ if (opencode?.server) {
69
+ opencode.server.close();
70
+ opencode = null;
71
+ }
72
+ };
73
+ process.once("exit", cleanup);
74
+ process.once("SIGINT", cleanup);
75
+ process.once("SIGTERM", cleanup);
76
+
77
+ try {
78
+ opencode = await createOpencode({
79
+ port,
80
+ config: this.buildOpenCodeConfig(openCodeConf, model),
81
+ });
82
+
83
+ const { client } = opencode;
84
+
85
+ // 设置 provider 认证(使用自定义 provider ID)
86
+ const customProviderID = "custom-openai";
87
+ if (openCodeConf.apiKey) {
88
+ await client.auth.set({
89
+ path: { id: customProviderID },
90
+ body: { type: "api", key: openCodeConf.apiKey },
91
+ });
92
+ }
93
+
94
+ const session = await client.session.create({
95
+ body: { title: `spaceflow-${Date.now()}` },
96
+ });
97
+
98
+ if (!session.data?.id) {
99
+ yield {
100
+ type: "error",
101
+ message: "[LLMProxy.OpenCodeAdapter.chatStream] 创建 session 失败",
102
+ };
103
+ return;
104
+ }
105
+
106
+ const sessionId = session.data.id;
107
+ const systemPrompt = this.extractSystemPrompt(messages);
108
+ const userPrompt = this.extractUserPrompt(messages);
109
+
110
+ if (systemPrompt) {
111
+ await client.session.prompt({
112
+ path: { id: sessionId },
113
+ body: {
114
+ noReply: true,
115
+ parts: [{ type: "text", text: systemPrompt }],
116
+ },
117
+ });
118
+ }
119
+
120
+ // 从原始 model 中提取 modelID,但使用自定义 provider ID
121
+ const [, modelID] = model.includes("/") ? model.split("/", 2) : [customProviderID, model];
122
+
123
+ if (shouldLog(options?.verbose, 2)) {
124
+ console.log(
125
+ `[LLMProxy.OpenCodeAdapter.chatStream] 发送 prompt: model=${customProviderID}/${modelID}, userPrompt长度=${userPrompt.length}`,
126
+ );
127
+ }
128
+
129
+ const result = await client.session.prompt({
130
+ path: { id: sessionId },
131
+ body: {
132
+ model: { providerID: customProviderID, modelID },
133
+ parts: [{ type: "text", text: userPrompt }],
134
+ },
135
+ });
136
+
137
+ if (shouldLog(options?.verbose, 2)) {
138
+ console.log(
139
+ `[LLMProxy.OpenCodeAdapter.chatStream] 完整响应对象:\n${JSON.stringify(result, null, 2)}`,
140
+ );
141
+ console.log(
142
+ `[LLMProxy.OpenCodeAdapter.chatStream] result.data:\n${JSON.stringify(result.data, null, 2)}`,
143
+ );
144
+ }
145
+
146
+ let finalContent = "";
147
+
148
+ if (result.data?.parts) {
149
+ for (const part of result.data.parts) {
150
+ const partType = part.type;
151
+
152
+ switch (partType) {
153
+ case "text": {
154
+ const text = (part as any).text || "";
155
+ yield { type: "text", content: text };
156
+ finalContent += text;
157
+ break;
158
+ }
159
+
160
+ case "tool": {
161
+ // 工具调用(ToolPart)
162
+ const toolPart = part as any;
163
+ const state = toolPart.state || {};
164
+ yield {
165
+ type: "tool_use",
166
+ name: toolPart.tool || "unknown",
167
+ input: state.input || {},
168
+ status: state.status,
169
+ output: state.output,
170
+ title: state.title,
171
+ };
172
+ break;
173
+ }
174
+
175
+ case "agent": {
176
+ // 子代理调用(AgentPart)
177
+ const agentPart = part as any;
178
+ yield {
179
+ type: "agent",
180
+ name: agentPart.name || "unknown",
181
+ source: agentPart.source?.value,
182
+ };
183
+ break;
184
+ }
185
+
186
+ case "subtask": {
187
+ // 子任务
188
+ const subtaskPart = part as any;
189
+ yield {
190
+ type: "subtask",
191
+ agent: subtaskPart.agent,
192
+ prompt: subtaskPart.prompt,
193
+ description: subtaskPart.description,
194
+ };
195
+ break;
196
+ }
197
+
198
+ case "step-start": {
199
+ yield {
200
+ type: "step_start",
201
+ snapshot: (part as any).snapshot,
202
+ };
203
+ break;
204
+ }
205
+
206
+ case "step-finish": {
207
+ const stepPart = part as any;
208
+ yield {
209
+ type: "step_finish",
210
+ reason: stepPart.reason,
211
+ tokens: stepPart.tokens,
212
+ cost: stepPart.cost,
213
+ };
214
+ break;
215
+ }
216
+
217
+ case "reasoning": {
218
+ const reasoningPart = part as any;
219
+ yield {
220
+ type: "reasoning",
221
+ content: reasoningPart.text || "",
222
+ };
223
+ break;
224
+ }
225
+
226
+ default:
227
+ // 其他类型(file, snapshot, patch, retry, compaction 等)暂不处理
228
+ if (shouldLog(options?.verbose, 2)) {
229
+ console.log(
230
+ `[LLMProxy.OpenCodeAdapter.chatStream] 未处理的 part 类型: ${partType}`,
231
+ );
232
+ }
233
+ break;
234
+ }
235
+ }
236
+ }
237
+
238
+ if (shouldLog(options?.verbose, 1) && !finalContent) {
239
+ console.warn(
240
+ `[LLMProxy.OpenCodeAdapter.chatStream] 警告: 响应内容为空,parts=${JSON.stringify(result.data?.parts)}`,
241
+ );
242
+ }
243
+
244
+ yield {
245
+ type: "result",
246
+ response: {
247
+ content: finalContent,
248
+ },
249
+ };
250
+
251
+ try {
252
+ await client.session.delete({ path: { id: sessionId } });
253
+ } catch {
254
+ // ignore cleanup errors
255
+ }
256
+ } catch (error: any) {
257
+ yield {
258
+ type: "error",
259
+ message:
260
+ `[LLMProxy.OpenCodeAdapter.chatStream] 错误: ${error.message}\n` +
261
+ `请检查:\n` +
262
+ `1. baseUrl 配置是否正确\n` +
263
+ `2. apiKey 是否有效\n` +
264
+ `3. 模型配置是否有效`,
265
+ };
266
+ } finally {
267
+ // 移除事件监听器
268
+ process.removeListener("exit", cleanup);
269
+ process.removeListener("SIGINT", cleanup);
270
+ process.removeListener("SIGTERM", cleanup);
271
+ // 关闭服务器
272
+ cleanup();
273
+ }
274
+ }
275
+
276
+ /**
277
+ * 构建 OpenCode 配置
278
+ */
279
+ private buildOpenCodeConfig(
280
+ openCodeConf: OpenCodeAdapterConfig,
281
+ model: string,
282
+ ): Record<string, any> {
283
+ // 使用自定义 provider ID(如 custom-openai)而不是 openai
284
+ // 因为 OpenCode 会根据 providerID 决定使用哪个 SDK 方法
285
+ // openai provider 会调用 sdk.responses(),而自定义 provider 使用 @ai-sdk/openai-compatible
286
+ const customProviderID = "custom-openai";
287
+ const [, modelID] = model.includes("/") ? model.split("/", 2) : [customProviderID, model];
288
+
289
+ // 使用 @ai-sdk/openai-compatible,使用 Chat Completions API (/chat/completions)
290
+ const config: Record<string, any> = {
291
+ model: `${customProviderID}/${modelID}`,
292
+ provider: {
293
+ [customProviderID]: {
294
+ npm: "@ai-sdk/openai-compatible",
295
+ name: "Custom OpenAI Compatible",
296
+ },
297
+ },
298
+ };
299
+
300
+ // 配置 provider baseURL
301
+ if (openCodeConf.baseUrl) {
302
+ config.provider[customProviderID].options = {
303
+ baseURL: openCodeConf.baseUrl,
304
+ };
305
+ }
306
+
307
+ // 注册自定义模型
308
+ config.provider[customProviderID].models = {
309
+ [modelID]: {
310
+ name: modelID,
311
+ attachment: true,
312
+ reasoning: false,
313
+ temperature: true,
314
+ tool_call: true,
315
+ cost: {
316
+ input: 0,
317
+ output: 0,
318
+ },
319
+ limit: {
320
+ context: 128000,
321
+ output: 16000,
322
+ },
323
+ },
324
+ };
325
+
326
+ return config;
327
+ }
328
+
329
+ private extractSystemPrompt(messages: LlmMessage[]): string {
330
+ const systemMessage = messages.find((m) => m.role === "system");
331
+ return systemMessage?.content || "";
332
+ }
333
+
334
+ private extractUserPrompt(messages: LlmMessage[]): string {
335
+ const userMessages = messages.filter((m) => m.role === "user");
336
+ return userMessages.map((m) => m.content).join("\n\n");
337
+ }
338
+
339
+ isSupportJsonSchema(): boolean {
340
+ return false;
341
+ }
342
+ }