@spaceflow/review 0.29.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (41) hide show
  1. package/CHANGELOG.md +533 -0
  2. package/README.md +124 -0
  3. package/dist/551.js +9 -0
  4. package/dist/index.js +5704 -0
  5. package/package.json +50 -0
  6. package/src/README.md +364 -0
  7. package/src/__mocks__/@anthropic-ai/claude-agent-sdk.js +3 -0
  8. package/src/__mocks__/json-stringify-pretty-compact.ts +4 -0
  9. package/src/deletion-impact.service.spec.ts +974 -0
  10. package/src/deletion-impact.service.ts +879 -0
  11. package/src/dto/mcp.dto.ts +42 -0
  12. package/src/index.ts +32 -0
  13. package/src/issue-verify.service.spec.ts +460 -0
  14. package/src/issue-verify.service.ts +309 -0
  15. package/src/locales/en/review.json +31 -0
  16. package/src/locales/index.ts +11 -0
  17. package/src/locales/zh-cn/review.json +31 -0
  18. package/src/parse-title-options.spec.ts +251 -0
  19. package/src/parse-title-options.ts +185 -0
  20. package/src/review-report/formatters/deletion-impact.formatter.ts +144 -0
  21. package/src/review-report/formatters/index.ts +4 -0
  22. package/src/review-report/formatters/json.formatter.ts +8 -0
  23. package/src/review-report/formatters/markdown.formatter.ts +291 -0
  24. package/src/review-report/formatters/terminal.formatter.ts +130 -0
  25. package/src/review-report/index.ts +4 -0
  26. package/src/review-report/review-report.module.ts +8 -0
  27. package/src/review-report/review-report.service.ts +58 -0
  28. package/src/review-report/types.ts +26 -0
  29. package/src/review-spec/index.ts +3 -0
  30. package/src/review-spec/review-spec.module.ts +10 -0
  31. package/src/review-spec/review-spec.service.spec.ts +1543 -0
  32. package/src/review-spec/review-spec.service.ts +902 -0
  33. package/src/review-spec/types.ts +143 -0
  34. package/src/review.command.ts +244 -0
  35. package/src/review.config.ts +58 -0
  36. package/src/review.mcp.ts +184 -0
  37. package/src/review.module.ts +52 -0
  38. package/src/review.service.spec.ts +3007 -0
  39. package/src/review.service.ts +2603 -0
  40. package/tsconfig.json +8 -0
  41. package/vitest.config.ts +34 -0
@@ -0,0 +1,42 @@
1
+ import {
2
+ ApiProperty,
3
+ ApiPropertyOptional,
4
+ IsString,
5
+ IsBoolean,
6
+ IsOptional,
7
+ t,
8
+ } from "@spaceflow/core";
9
+
10
+ export class ListRulesInput {
11
+ @ApiPropertyOptional({ description: t("review:mcp.dto.cwd") })
12
+ @IsString()
13
+ @IsOptional()
14
+ cwd?: string;
15
+ }
16
+
17
+ export class GetRulesForFileInput {
18
+ @ApiProperty({ description: t("review:mcp.dto.filePath") })
19
+ @IsString()
20
+ filePath!: string;
21
+
22
+ @ApiPropertyOptional({ description: t("review:mcp.dto.cwd") })
23
+ @IsString()
24
+ @IsOptional()
25
+ cwd?: string;
26
+
27
+ @ApiPropertyOptional({ description: t("review:mcp.dto.includeExamples") })
28
+ @IsBoolean()
29
+ @IsOptional()
30
+ includeExamples?: boolean;
31
+ }
32
+
33
+ export class GetRuleDetailInput {
34
+ @ApiProperty({ description: t("review:mcp.dto.ruleId") })
35
+ @IsString()
36
+ ruleId!: string;
37
+
38
+ @ApiPropertyOptional({ description: t("review:mcp.dto.cwd") })
39
+ @IsString()
40
+ @IsOptional()
41
+ cwd?: string;
42
+ }
package/src/index.ts ADDED
@@ -0,0 +1,32 @@
1
+ import "./locales";
2
+ import { SpaceflowExtension, SpaceflowExtensionMetadata, t } from "@spaceflow/core";
3
+ import { ReviewModule } from "./review.module";
4
+ import { reviewSchema } from "./review.config";
5
+ /** review Extension 元数据 */
6
+ export const reviewMetadata: SpaceflowExtensionMetadata = {
7
+ name: "review",
8
+ commands: ["review"],
9
+ configKey: "review",
10
+ configSchema: reviewSchema,
11
+ version: "1.0.0",
12
+ description: t("review:extensionDescription"),
13
+ };
14
+
15
+ export class ReviewExtension implements SpaceflowExtension {
16
+ getMetadata(): SpaceflowExtensionMetadata {
17
+ return reviewMetadata;
18
+ }
19
+
20
+ getModule() {
21
+ return ReviewModule;
22
+ }
23
+ }
24
+
25
+ export default ReviewExtension;
26
+
27
+ export * from "./review.module";
28
+ export * from "./review.command";
29
+ export * from "./review.service";
30
+ export * from "./review.mcp";
31
+ export * from "./issue-verify.service";
32
+ export * from "./deletion-impact.service";
@@ -0,0 +1,460 @@
1
+ import { vi, type Mocked } from "vitest";
2
+ import { Test, TestingModule } from "@nestjs/testing";
3
+ import { LlmProxyService } from "@spaceflow/core";
4
+ import { ReviewIssue, FileContentsMap, ReviewSpecService } from "./review-spec";
5
+ import { IssueVerifyService } from "./issue-verify.service";
6
+
7
+ vi.mock("@anthropic-ai/claude-agent-sdk", () => ({
8
+ query: vi.fn(),
9
+ }));
10
+
11
+ describe("IssueVerifyService", () => {
12
+ let service: IssueVerifyService;
13
+ let llmProxyService: Mocked<LlmProxyService>;
14
+
15
+ beforeEach(async () => {
16
+ const mockLlmProxyService = {
17
+ chatStream: vi.fn(),
18
+ };
19
+
20
+ const mockReviewSpecService = {
21
+ findRuleById: vi.fn(),
22
+ buildSpecsSection: vi.fn().mockReturnValue("mock rule specs"),
23
+ };
24
+
25
+ const module: TestingModule = await Test.createTestingModule({
26
+ providers: [
27
+ IssueVerifyService,
28
+ {
29
+ provide: LlmProxyService,
30
+ useValue: mockLlmProxyService,
31
+ },
32
+ {
33
+ provide: ReviewSpecService,
34
+ useValue: mockReviewSpecService,
35
+ },
36
+ ],
37
+ }).compile();
38
+
39
+ service = module.get<IssueVerifyService>(IssueVerifyService);
40
+ llmProxyService = module.get(LlmProxyService) as Mocked<LlmProxyService>;
41
+ });
42
+
43
+ it("should return empty array if no issues provided", async () => {
44
+ const result = await service.verifyIssueFixes([], new Map(), [], "openai");
45
+ expect(result).toEqual([]);
46
+ });
47
+
48
+ it("should skip already fixed issues", async () => {
49
+ const issues: ReviewIssue[] = [
50
+ {
51
+ file: "test.ts",
52
+ line: "10",
53
+ ruleId: "R1",
54
+ specFile: "s1.md",
55
+ reason: "r1",
56
+ fixed: "2023-01-01",
57
+ round: 1,
58
+ } as any,
59
+ ];
60
+ const result = await service.verifyIssueFixes(
61
+ issues,
62
+ new Map() as FileContentsMap,
63
+ [],
64
+ "openai",
65
+ );
66
+ expect(result).toHaveLength(1);
67
+ expect(result[0]).toBe(issues[0]);
68
+ expect(llmProxyService.chatStream).not.toHaveBeenCalled();
69
+ });
70
+
71
+ it("should mark as fixed if file is deleted", async () => {
72
+ const issues: ReviewIssue[] = [
73
+ {
74
+ file: "deleted.ts",
75
+ line: "10",
76
+ ruleId: "R1",
77
+ specFile: "s1.md",
78
+ reason: "r1",
79
+ round: 1,
80
+ } as any,
81
+ ];
82
+ const fileContents: FileContentsMap = new Map();
83
+ const result = await service.verifyIssueFixes(issues, fileContents, [], "openai");
84
+ expect(result).toHaveLength(1);
85
+ expect(result[0].fixed).toBeDefined();
86
+ expect(llmProxyService.chatStream).not.toHaveBeenCalled();
87
+ });
88
+
89
+ it("should call LLM to verify issue fix", async () => {
90
+ const issues: ReviewIssue[] = [
91
+ {
92
+ file: "test.ts",
93
+ line: "10",
94
+ ruleId: "R1",
95
+ specFile: "s1.md",
96
+ reason: "r1",
97
+ round: 1,
98
+ } as any,
99
+ ];
100
+ const fileContents: FileContentsMap = new Map([["test.ts", [["-------", "new content"]]]]);
101
+
102
+ const mockStream = (async function* () {
103
+ yield {
104
+ type: "result",
105
+ response: { structuredOutput: { fixed: true, reason: "Fixed now" } },
106
+ };
107
+ })();
108
+ llmProxyService.chatStream.mockReturnValue(mockStream as any);
109
+
110
+ const result = await service.verifyIssueFixes(issues, fileContents, [], "openai");
111
+
112
+ expect(result).toHaveLength(1);
113
+ expect(result[0].fixed).toBeDefined();
114
+ expect(llmProxyService.chatStream).toHaveBeenCalled();
115
+ });
116
+
117
+ it("should handle LLM saying issue is not fixed", async () => {
118
+ const issues: ReviewIssue[] = [
119
+ {
120
+ file: "test.ts",
121
+ line: "10",
122
+ ruleId: "R1",
123
+ specFile: "s1.md",
124
+ reason: "r1",
125
+ round: 1,
126
+ } as any,
127
+ ];
128
+ const fileContents: FileContentsMap = new Map([
129
+ ["test.ts", [["-------", "still bad content"]]],
130
+ ]);
131
+
132
+ const mockStream = (async function* () {
133
+ yield {
134
+ type: "result",
135
+ response: { structuredOutput: { fixed: false, reason: "Still broken" } },
136
+ };
137
+ })();
138
+ llmProxyService.chatStream.mockReturnValue(mockStream as any);
139
+
140
+ const result = await service.verifyIssueFixes(issues, fileContents, [], "openai");
141
+
142
+ expect(result).toHaveLength(1);
143
+ expect(result[0].fixed).toBeUndefined();
144
+ });
145
+
146
+ it("should handle invalid issue from LLM", async () => {
147
+ const issues: ReviewIssue[] = [
148
+ {
149
+ file: "test.ts",
150
+ line: "10",
151
+ ruleId: "R1",
152
+ specFile: "s1.md",
153
+ reason: "r1",
154
+ round: 1,
155
+ } as any,
156
+ ];
157
+ const fileContents: FileContentsMap = new Map([["test.ts", [["-------", "content"]]]]);
158
+
159
+ const mockStream = (async function* () {
160
+ yield {
161
+ type: "result",
162
+ response: { structuredOutput: { fixed: false, valid: false, reason: "False positive" } },
163
+ };
164
+ })();
165
+ llmProxyService.chatStream.mockReturnValue(mockStream as any);
166
+
167
+ const result = await service.verifyIssueFixes(issues, fileContents, [], "openai");
168
+
169
+ expect(result).toHaveLength(1);
170
+ expect(result[0].valid).toBe("false");
171
+ expect(result[0].fixed).toBeUndefined();
172
+ });
173
+
174
+ it("should handle error in LLM stream", async () => {
175
+ const issues: ReviewIssue[] = [
176
+ {
177
+ file: "test.ts",
178
+ line: "10",
179
+ ruleId: "R1",
180
+ specFile: "s1.md",
181
+ reason: "r1",
182
+ round: 1,
183
+ } as any,
184
+ ];
185
+ const fileContents: FileContentsMap = new Map([["test.ts", [["-------", "content"]]]]);
186
+
187
+ const mockStream = (async function* () {
188
+ yield { type: "error", message: "LLM error" };
189
+ })();
190
+ llmProxyService.chatStream.mockReturnValue(mockStream as any);
191
+
192
+ const result = await service.verifyIssueFixes(issues, fileContents, [], "openai");
193
+
194
+ expect(result).toHaveLength(1);
195
+ expect(result[0]).toBe(issues[0]); // Returns original issue on error
196
+ });
197
+
198
+ it("should handle exception during LLM call", async () => {
199
+ const issues: ReviewIssue[] = [
200
+ {
201
+ file: "test.ts",
202
+ line: "10",
203
+ ruleId: "R1",
204
+ specFile: "s1.md",
205
+ reason: "r1",
206
+ round: 1,
207
+ } as any,
208
+ ];
209
+ const fileContents: FileContentsMap = new Map([["test.ts", [["-------", "content"]]]]);
210
+
211
+ llmProxyService.chatStream.mockImplementation(() => {
212
+ throw new Error("Critical failure");
213
+ });
214
+
215
+ const result = await service.verifyIssueFixes(issues, fileContents, [], "openai");
216
+
217
+ expect(result).toHaveLength(1);
218
+ expect(result[0]).toBe(issues[0]);
219
+ });
220
+
221
+ it("should skip issues with valid=false", async () => {
222
+ const issues: ReviewIssue[] = [
223
+ {
224
+ file: "test.ts",
225
+ line: "10",
226
+ ruleId: "R1",
227
+ specFile: "s1.md",
228
+ reason: "r1",
229
+ valid: "false",
230
+ round: 1,
231
+ } as any,
232
+ ];
233
+ const result = await service.verifyIssueFixes(
234
+ issues,
235
+ new Map() as FileContentsMap,
236
+ [],
237
+ "openai",
238
+ );
239
+ expect(result).toHaveLength(1);
240
+ expect(result[0]).toBe(issues[0]);
241
+ expect(llmProxyService.chatStream).not.toHaveBeenCalled();
242
+ });
243
+
244
+ it("should handle non-Error exception during LLM call", async () => {
245
+ const issues: ReviewIssue[] = [
246
+ {
247
+ file: "test.ts",
248
+ line: "10",
249
+ ruleId: "R1",
250
+ specFile: "s1.md",
251
+ reason: "r1",
252
+ round: 1,
253
+ } as any,
254
+ ];
255
+ const fileContents: FileContentsMap = new Map([["test.ts", [["-------", "content"]]]]);
256
+
257
+ llmProxyService.chatStream.mockImplementation(() => {
258
+ throw "string error";
259
+ });
260
+
261
+ const consoleSpy = vi.spyOn(console, "error").mockImplementation(() => {});
262
+ const result = await service.verifyIssueFixes(issues, fileContents, [], "openai");
263
+ expect(result).toHaveLength(1);
264
+ expect(result[0]).toBe(issues[0]);
265
+ consoleSpy.mockRestore();
266
+ });
267
+
268
+ it("should return original issue when LLM returns no structured output", async () => {
269
+ const issues: ReviewIssue[] = [
270
+ {
271
+ file: "test.ts",
272
+ line: "10",
273
+ ruleId: "R1",
274
+ specFile: "s1.md",
275
+ reason: "r1",
276
+ round: 1,
277
+ } as any,
278
+ ];
279
+ const fileContents: FileContentsMap = new Map([["test.ts", [["-------", "content"]]]]);
280
+
281
+ const mockStream = (async function* () {
282
+ yield { type: "result", response: { content: "no json" } };
283
+ })();
284
+ llmProxyService.chatStream.mockReturnValue(mockStream as any);
285
+
286
+ const result = await service.verifyIssueFixes(issues, fileContents, [], "openai");
287
+ expect(result).toHaveLength(1);
288
+ expect(result[0]).toBe(issues[0]);
289
+ });
290
+
291
+ it("should include suggestion in prompt when present", async () => {
292
+ const issues: ReviewIssue[] = [
293
+ {
294
+ file: "test.ts",
295
+ line: "10",
296
+ ruleId: "R1",
297
+ specFile: "s1.md",
298
+ reason: "r1",
299
+ suggestion: "fix this way",
300
+ round: 1,
301
+ } as any,
302
+ ];
303
+ const fileContents: FileContentsMap = new Map([["test.ts", [["-------", "content"]]]]);
304
+
305
+ const mockStream = (async function* () {
306
+ yield {
307
+ type: "result",
308
+ response: { structuredOutput: { fixed: true, valid: true, reason: "ok" } },
309
+ };
310
+ })();
311
+ llmProxyService.chatStream.mockReturnValue(mockStream as any);
312
+
313
+ const result = await service.verifyIssueFixes(issues, fileContents, [], "openai");
314
+ expect(result).toHaveLength(1);
315
+ expect(result[0].fixed).toBeDefined();
316
+ const callArgs = llmProxyService.chatStream.mock.calls[0][0];
317
+ expect(callArgs[1].content).toContain("fix this way");
318
+ });
319
+
320
+ it("should log verbose messages when verbose=1", async () => {
321
+ const issues: ReviewIssue[] = [
322
+ {
323
+ file: "test.ts",
324
+ line: "10",
325
+ ruleId: "R1",
326
+ specFile: "s1.md",
327
+ reason: "r1",
328
+ round: 1,
329
+ } as any,
330
+ ];
331
+ const fileContents: FileContentsMap = new Map([["test.ts", [["-------", "content"]]]]);
332
+
333
+ const mockStream = (async function* () {
334
+ yield {
335
+ type: "result",
336
+ response: { structuredOutput: { fixed: true, valid: true, reason: "Fixed" } },
337
+ };
338
+ })();
339
+ llmProxyService.chatStream.mockReturnValue(mockStream as any);
340
+
341
+ const consoleSpy = vi.spyOn(console, "log").mockImplementation(() => {});
342
+ await service.verifyIssueFixes(issues, fileContents, [], "openai", 1);
343
+ expect(consoleSpy).toHaveBeenCalled();
344
+ consoleSpy.mockRestore();
345
+ });
346
+
347
+ it("should log verbose for invalid issues", async () => {
348
+ const issues: ReviewIssue[] = [
349
+ {
350
+ file: "test.ts",
351
+ line: "10",
352
+ ruleId: "R1",
353
+ specFile: "s1.md",
354
+ reason: "r1",
355
+ round: 1,
356
+ } as any,
357
+ ];
358
+ const fileContents: FileContentsMap = new Map([["test.ts", [["-------", "content"]]]]);
359
+
360
+ const mockStream = (async function* () {
361
+ yield {
362
+ type: "result",
363
+ response: { structuredOutput: { fixed: false, valid: false, reason: "Invalid" } },
364
+ };
365
+ })();
366
+ llmProxyService.chatStream.mockReturnValue(mockStream as any);
367
+
368
+ const consoleSpy = vi.spyOn(console, "log").mockImplementation(() => {});
369
+ await service.verifyIssueFixes(issues, fileContents, [], "openai", 1);
370
+ const logMessages = consoleSpy.mock.calls.map((c) => c[0]);
371
+ expect(logMessages.some((m: string) => m.includes("无效问题"))).toBe(true);
372
+ consoleSpy.mockRestore();
373
+ });
374
+
375
+ it("should log verbose for unfixed issues", async () => {
376
+ const issues: ReviewIssue[] = [
377
+ {
378
+ file: "test.ts",
379
+ line: "10",
380
+ ruleId: "R1",
381
+ specFile: "s1.md",
382
+ reason: "r1",
383
+ round: 1,
384
+ } as any,
385
+ ];
386
+ const fileContents: FileContentsMap = new Map([["test.ts", [["-------", "content"]]]]);
387
+
388
+ const mockStream = (async function* () {
389
+ yield {
390
+ type: "result",
391
+ response: { structuredOutput: { fixed: false, valid: true, reason: "Still broken" } },
392
+ };
393
+ })();
394
+ llmProxyService.chatStream.mockReturnValue(mockStream as any);
395
+
396
+ const consoleSpy = vi.spyOn(console, "log").mockImplementation(() => {});
397
+ await service.verifyIssueFixes(issues, fileContents, [], "openai", 1);
398
+ const logMessages = consoleSpy.mock.calls.map((c) => c[0]);
399
+ expect(logMessages.some((m: string) => m.includes("未修复"))).toBe(true);
400
+ consoleSpy.mockRestore();
401
+ });
402
+
403
+ it("should use ruleInfo when available", async () => {
404
+ const mockReviewSpecService = (service as any).reviewSpecService;
405
+ mockReviewSpecService.findRuleById.mockReturnValue({
406
+ rule: { id: "R1", description: "test rule" },
407
+ spec: { name: "test-spec" },
408
+ });
409
+
410
+ const issues: ReviewIssue[] = [
411
+ {
412
+ file: "test.ts",
413
+ line: "10",
414
+ ruleId: "R1",
415
+ specFile: "s1.md",
416
+ reason: "r1",
417
+ round: 1,
418
+ } as any,
419
+ ];
420
+ const fileContents: FileContentsMap = new Map([["test.ts", [["-------", "content"]]]]);
421
+ const specs = [{ name: "test-spec" }] as any;
422
+
423
+ const mockStream = (async function* () {
424
+ yield {
425
+ type: "result",
426
+ response: { structuredOutput: { fixed: false, valid: true, reason: "ok" } },
427
+ };
428
+ })();
429
+ llmProxyService.chatStream.mockReturnValue(mockStream as any);
430
+
431
+ await service.verifyIssueFixes(issues, fileContents, specs, "openai");
432
+ expect(mockReviewSpecService.buildSpecsSection).toHaveBeenCalled();
433
+ });
434
+
435
+ it("should handle valid=true and fixed=true result", async () => {
436
+ const issues: ReviewIssue[] = [
437
+ {
438
+ file: "test.ts",
439
+ line: "10",
440
+ ruleId: "R1",
441
+ specFile: "s1.md",
442
+ reason: "r1",
443
+ round: 1,
444
+ } as any,
445
+ ];
446
+ const fileContents: FileContentsMap = new Map([["test.ts", [["-------", "content"]]]]);
447
+
448
+ const mockStream = (async function* () {
449
+ yield {
450
+ type: "result",
451
+ response: { structuredOutput: { fixed: true, valid: true, reason: "Fixed" } },
452
+ };
453
+ })();
454
+ llmProxyService.chatStream.mockReturnValue(mockStream as any);
455
+
456
+ const result = await service.verifyIssueFixes(issues, fileContents, [], "openai");
457
+ expect(result[0].fixed).toBeDefined();
458
+ expect(result[0].valid).toBe("true");
459
+ });
460
+ });