revspec 0.1.0 → 0.2.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,398 @@
1
+ import { describe, it, expect, beforeEach, afterEach } from "bun:test"
2
+ import { mkdtempSync, rmSync, writeFileSync, existsSync, readFileSync } from "fs"
3
+ import { join, resolve } from "path"
4
+ import { tmpdir } from "os"
5
+ import { appendEvent, readEventsFromOffset, replayEventsToThreads } from "../src/protocol/live-events"
6
+ import { mergeJsonlIntoReview } from "../src/protocol/live-merge"
7
+ import { ReviewState } from "../src/state/review-state"
8
+
9
+ const CLI = resolve(import.meta.dir, "../bin/revspec.ts")
10
+
11
+ interface SpawnResult {
12
+ exitCode: number
13
+ stdout: string
14
+ stderr: string
15
+ }
16
+
17
+ async function runCli(
18
+ args: string[],
19
+ env: Record<string, string> = {}
20
+ ): Promise<SpawnResult> {
21
+ const proc = Bun.spawn(["bun", "run", CLI, ...args], {
22
+ env: { ...process.env, REVSPEC_WATCH_NO_BLOCK: "1", ...env },
23
+ stdout: "pipe",
24
+ stderr: "pipe",
25
+ })
26
+
27
+ const [stdout, stderr] = await Promise.all([
28
+ new Response(proc.stdout).text(),
29
+ new Response(proc.stderr).text(),
30
+ ])
31
+
32
+ const exitCode = await proc.exited
33
+ return { exitCode, stdout, stderr }
34
+ }
35
+
36
+ describe("live interaction: multi-turn conversation", () => {
37
+ let dir: string
38
+ let specPath: string
39
+ let jsonlPath: string
40
+
41
+ beforeEach(() => {
42
+ dir = mkdtempSync(join(tmpdir(), "revspec-interaction-"))
43
+ specPath = join(dir, "spec.md")
44
+ jsonlPath = join(dir, "spec.review.live.jsonl")
45
+ writeFileSync(specPath, "# Spec\n\nLine 3.\n\nLine 5.\n")
46
+ })
47
+
48
+ afterEach(() => rmSync(dir, { recursive: true }))
49
+
50
+ it("multiple replies in sequence: all messages in JSONL in correct order with correct authors", async () => {
51
+ // Reviewer comments
52
+ appendEvent(jsonlPath, {
53
+ type: "comment", threadId: "t1", line: 3,
54
+ author: "reviewer", text: "first comment", ts: 1000,
55
+ })
56
+
57
+ // AI replies
58
+ const reply1 = Bun.spawn(["bun", "run", CLI, "reply", specPath, "t1", "first AI reply"], {
59
+ stdout: "pipe", stderr: "pipe",
60
+ })
61
+ await reply1.exited
62
+ expect(reply1.exitCode).toBe(0)
63
+
64
+ // Reviewer replies back
65
+ appendEvent(jsonlPath, {
66
+ type: "reply", threadId: "t1",
67
+ author: "reviewer", text: "second comment", ts: 3000,
68
+ })
69
+
70
+ // AI replies again
71
+ const reply2 = Bun.spawn(["bun", "run", CLI, "reply", specPath, "t1", "second AI reply"], {
72
+ stdout: "pipe", stderr: "pipe",
73
+ })
74
+ await reply2.exited
75
+ expect(reply2.exitCode).toBe(0)
76
+
77
+ // Verify all four messages are in JSONL in correct order
78
+ const { events } = readEventsFromOffset(jsonlPath, 0)
79
+ expect(events).toHaveLength(4)
80
+
81
+ expect(events[0].type).toBe("comment")
82
+ expect(events[0].author).toBe("reviewer")
83
+ expect(events[0].text).toBe("first comment")
84
+
85
+ expect(events[1].type).toBe("reply")
86
+ expect(events[1].author).toBe("owner")
87
+ expect(events[1].text).toBe("first AI reply")
88
+
89
+ expect(events[2].type).toBe("reply")
90
+ expect(events[2].author).toBe("reviewer")
91
+ expect(events[2].text).toBe("second comment")
92
+
93
+ expect(events[3].type).toBe("reply")
94
+ expect(events[3].author).toBe("owner")
95
+ expect(events[3].text).toBe("second AI reply")
96
+
97
+ // Verify replayed thread has all 4 messages
98
+ const threads = replayEventsToThreads(events)
99
+ expect(threads).toHaveLength(1)
100
+ expect(threads[0].messages).toHaveLength(4)
101
+ expect(threads[0].messages[0].author).toBe("reviewer")
102
+ expect(threads[0].messages[1].author).toBe("owner")
103
+ expect(threads[0].messages[2].author).toBe("reviewer")
104
+ expect(threads[0].messages[3].author).toBe("owner")
105
+ })
106
+
107
+ it("reply after resolve: merged status reflects AI reply (pending, not resolved)", async () => {
108
+ // Reviewer comments
109
+ appendEvent(jsonlPath, {
110
+ type: "comment", threadId: "t1", line: 3,
111
+ author: "reviewer", text: "something", ts: 1000,
112
+ })
113
+
114
+ // Reviewer resolves
115
+ appendEvent(jsonlPath, {
116
+ type: "resolve", threadId: "t1",
117
+ author: "reviewer", ts: 2000,
118
+ })
119
+
120
+ // AI replies after resolve (race condition)
121
+ const reply = Bun.spawn(["bun", "run", CLI, "reply", specPath, "t1", "AI reply after resolve"], {
122
+ stdout: "pipe", stderr: "pipe",
123
+ })
124
+ await reply.exited
125
+ expect(reply.exitCode).toBe(0)
126
+
127
+ // Merge JSONL: last event is AI reply so status should be pending
128
+ const merged = mergeJsonlIntoReview(jsonlPath, null, specPath)
129
+ expect(merged.threads).toHaveLength(1)
130
+ expect(merged.threads[0].status).toBe("pending")
131
+ expect(merged.threads[0].messages).toHaveLength(2)
132
+ expect(merged.threads[0].messages[1].author).toBe("owner")
133
+ expect(merged.threads[0].messages[1].text).toBe("AI reply after resolve")
134
+ })
135
+
136
+ it("watch filters out owner events: AI own reply not shown back to AI", async () => {
137
+ // Reviewer comments
138
+ appendEvent(jsonlPath, {
139
+ type: "comment", threadId: "t1", line: 3,
140
+ author: "reviewer", text: "initial comment", ts: 1000,
141
+ })
142
+
143
+ // Run watch once — consumes the comment, advances offset
144
+ const watch1 = await runCli(["watch", specPath])
145
+ expect(watch1.exitCode).toBe(0)
146
+ expect(watch1.stdout).toContain("initial comment")
147
+
148
+ // AI replies
149
+ const reply = Bun.spawn(["bun", "run", CLI, "reply", specPath, "t1", "AI reply text"], {
150
+ stdout: "pipe", stderr: "pipe",
151
+ })
152
+ await reply.exited
153
+ expect(reply.exitCode).toBe(0)
154
+
155
+ // Run watch again — offset now points past the comment but at the AI reply
156
+ // The AI reply is an "owner" event, so watch should filter it out and produce no output
157
+ const watch2 = await runCli(["watch", specPath])
158
+ expect(watch2.exitCode).toBe(0)
159
+ expect(watch2.stdout).not.toContain("AI reply text")
160
+ expect(watch2.stdout.trim()).toBe("")
161
+ })
162
+
163
+ it("watch only returns actionable events (comments and replies), ignores resolves and deletes", async () => {
164
+ // Set up: write comment and advance offset past it
165
+ appendEvent(jsonlPath, {
166
+ type: "comment", threadId: "t1", line: 3,
167
+ author: "reviewer", text: "initial comment", ts: 1000,
168
+ })
169
+ await runCli(["watch", specPath])
170
+
171
+ // Append mixed batch: resolve + delete (non-actionable) + new comment + reply (actionable)
172
+ appendEvent(jsonlPath, {
173
+ type: "resolve", threadId: "t1",
174
+ author: "reviewer", ts: 2000,
175
+ })
176
+ appendEvent(jsonlPath, {
177
+ type: "delete", threadId: "t1",
178
+ author: "reviewer", ts: 2001,
179
+ })
180
+ appendEvent(jsonlPath, {
181
+ type: "comment", threadId: "t3", line: 3,
182
+ author: "reviewer", text: "fresh comment", ts: 2002,
183
+ })
184
+ appendEvent(jsonlPath, {
185
+ type: "reply", threadId: "t1",
186
+ author: "reviewer", text: "actually not resolved", ts: 2003,
187
+ })
188
+
189
+ const result = await runCli(["watch", specPath])
190
+ expect(result.exitCode).toBe(0)
191
+
192
+ // Only actionable sections appear
193
+ expect(result.stdout).toContain("New Comments")
194
+ expect(result.stdout).toContain("Replies")
195
+ expect(result.stdout).not.toContain("Resolved")
196
+ expect(result.stdout).not.toContain("Deleted")
197
+
198
+ expect(result.stdout).toContain("fresh comment")
199
+ expect(result.stdout).toContain("actually not resolved")
200
+ })
201
+ })
202
+
203
+ describe("live interaction: thread popup state flow", () => {
204
+ const SPEC = ["line one", "line two", "line three", "line four", "line five"]
205
+
206
+ it("addOwnerReply + markRead + unreadCount cycle", () => {
207
+ const state = new ReviewState(SPEC, [])
208
+
209
+ // Add a comment
210
+ state.addComment(1, "please fix this")
211
+ const threadId = state.threads[0].id // "t1"
212
+
213
+ // Initially no unread
214
+ expect(state.unreadCount()).toBe(0)
215
+ expect(state.isThreadUnread(threadId)).toBe(false)
216
+
217
+ // AI (owner) replies — creates unread
218
+ state.addOwnerReply(threadId, "done", 1001)
219
+ expect(state.unreadCount()).toBe(1)
220
+ expect(state.isThreadUnread(threadId)).toBe(true)
221
+ expect(state.threads[0].status).toBe("pending")
222
+
223
+ // Popup opens, user reads — markRead
224
+ state.markRead(threadId)
225
+ expect(state.unreadCount()).toBe(0)
226
+ expect(state.isThreadUnread(threadId)).toBe(false)
227
+
228
+ // Another AI reply arrives
229
+ state.addOwnerReply(threadId, "also this", 2000)
230
+ expect(state.unreadCount()).toBe(1)
231
+ expect(state.isThreadUnread(threadId)).toBe(true)
232
+ })
233
+
234
+ it("markRead does not affect other unread threads", () => {
235
+ const state = new ReviewState(SPEC, [])
236
+
237
+ state.addComment(1, "comment A")
238
+ state.addComment(3, "comment B")
239
+ const t1 = state.threads[0].id
240
+ const t2 = state.threads[1].id
241
+
242
+ state.addOwnerReply(t1, "reply A", 1001)
243
+ state.addOwnerReply(t2, "reply B", 1002)
244
+ expect(state.unreadCount()).toBe(2)
245
+
246
+ // Mark t1 read — t2 stays unread
247
+ state.markRead(t1)
248
+ expect(state.unreadCount()).toBe(1)
249
+ expect(state.isThreadUnread(t1)).toBe(false)
250
+ expect(state.isThreadUnread(t2)).toBe(true)
251
+ })
252
+
253
+ it("addOwnerReply sets thread status to pending regardless of prior status", () => {
254
+ const state = new ReviewState(SPEC, [])
255
+ state.addComment(2, "fix me")
256
+ const threadId = state.threads[0].id
257
+
258
+ // Manually set to resolved
259
+ state.resolveThread(threadId)
260
+ expect(state.threads[0].status).toBe("resolved")
261
+
262
+ // AI reply overrides to pending
263
+ state.addOwnerReply(threadId, "actually addressed", 5000)
264
+ expect(state.threads[0].status).toBe("pending")
265
+ })
266
+
267
+ it("unreadCount resets correctly when all threads marked read", () => {
268
+ const state = new ReviewState(SPEC, [])
269
+ state.addComment(1, "a")
270
+ state.addComment(2, "b")
271
+ state.addComment(3, "c")
272
+ const ids = state.threads.map((t) => t.id)
273
+
274
+ for (const id of ids) {
275
+ state.addOwnerReply(id, "reply", 1000)
276
+ }
277
+ expect(state.unreadCount()).toBe(3)
278
+
279
+ for (const id of ids) {
280
+ state.markRead(id)
281
+ }
282
+ expect(state.unreadCount()).toBe(0)
283
+ })
284
+ })
285
+
286
+ describe("live interaction: crash recovery via event replay", () => {
287
+ let dir: string
288
+ let specPath: string
289
+ let jsonlPath: string
290
+
291
+ beforeEach(() => {
292
+ dir = mkdtempSync(join(tmpdir(), "revspec-crash-"))
293
+ specPath = join(dir, "spec.md")
294
+ jsonlPath = join(dir, "spec.review.live.jsonl")
295
+ writeFileSync(specPath, "# Spec\n\nContent here.\n")
296
+ })
297
+
298
+ afterEach(() => rmSync(dir, { recursive: true }))
299
+
300
+ it("replaying events from JSONL restores thread state matching what was written", async () => {
301
+ // Write a sequence of events (as if TUI was running)
302
+ const events = [
303
+ { type: "comment" as const, threadId: "t1", line: 3, author: "reviewer", text: "needs work", ts: 1000 },
304
+ { type: "reply" as const, threadId: "t1", author: "owner", text: "working on it", ts: 2000 },
305
+ { type: "reply" as const, threadId: "t1", author: "reviewer", text: "looks better", ts: 3000 },
306
+ { type: "comment" as const, threadId: "t2", line: 1, author: "reviewer", text: "other issue", ts: 4000 },
307
+ { type: "resolve" as const, threadId: "t1", author: "reviewer", ts: 5000 },
308
+ ]
309
+
310
+ for (const ev of events) {
311
+ appendEvent(jsonlPath, ev)
312
+ }
313
+
314
+ // Simulate crash recovery: startup replays entire JSONL from byte 0
315
+ const { events: replayed } = readEventsFromOffset(jsonlPath, 0)
316
+ expect(replayed).toHaveLength(events.length)
317
+
318
+ // Verify each event matches what was written
319
+ for (let i = 0; i < events.length; i++) {
320
+ expect(replayed[i].type).toBe(events[i].type)
321
+ expect(replayed[i].author).toBe(events[i].author)
322
+ expect(replayed[i].ts).toBe(events[i].ts)
323
+ if ('text' in events[i]) {
324
+ expect(replayed[i].text).toBe((events[i] as any).text)
325
+ }
326
+ }
327
+
328
+ // Verify replayed state is correct
329
+ const threads = replayEventsToThreads(replayed)
330
+ expect(threads).toHaveLength(2)
331
+
332
+ const t1 = threads.find((t) => t.id === "t1")!
333
+ expect(t1).toBeDefined()
334
+ expect(t1.status).toBe("resolved")
335
+ expect(t1.messages).toHaveLength(3)
336
+ expect(t1.messages[0].author).toBe("reviewer")
337
+ expect(t1.messages[0].text).toBe("needs work")
338
+ expect(t1.messages[1].author).toBe("owner")
339
+ expect(t1.messages[2].author).toBe("reviewer")
340
+
341
+ const t2 = threads.find((t) => t.id === "t2")!
342
+ expect(t2).toBeDefined()
343
+ expect(t2.status).toBe("open")
344
+ expect(t2.messages).toHaveLength(1)
345
+ })
346
+
347
+ it("crash recovery: mergeJsonlIntoReview restores prior review + new JSONL events", async () => {
348
+ // Prior round left a resolved thread (written to JSON)
349
+ const priorReview = {
350
+ file: specPath,
351
+ threads: [{
352
+ id: "t1", line: 3, status: "resolved" as const,
353
+ messages: [
354
+ { author: "reviewer" as const, text: "old issue", ts: 100 },
355
+ { author: "owner" as const, text: "fixed", ts: 200 },
356
+ ],
357
+ }],
358
+ }
359
+
360
+ // New round JSONL has a new comment plus AI reply
361
+ appendEvent(jsonlPath, { type: "comment", threadId: "t2", line: 1, author: "reviewer", text: "new round comment", ts: 5000 })
362
+ appendEvent(jsonlPath, { type: "reply", threadId: "t2", author: "owner", text: "AI response", ts: 6000 })
363
+
364
+ // Crash recovery: replay merges existing review with new JSONL
365
+ const recovered = mergeJsonlIntoReview(jsonlPath, priorReview, specPath)
366
+ expect(recovered.threads).toHaveLength(2)
367
+
368
+ const recoveredT1 = recovered.threads.find((t) => t.id === "t1")!
369
+ expect(recoveredT1.status).toBe("resolved")
370
+ expect(recoveredT1.messages).toHaveLength(2)
371
+
372
+ const recoveredT2 = recovered.threads.find((t) => t.id === "t2")!
373
+ expect(recoveredT2.status).toBe("pending")
374
+ expect(recoveredT2.messages).toHaveLength(2)
375
+ expect(recoveredT2.messages[0].text).toBe("new round comment")
376
+ expect(recoveredT2.messages[1].text).toBe("AI response")
377
+ })
378
+
379
+ it("crash recovery: truncated JSONL (mid-write) does not corrupt earlier events", async () => {
380
+ // Write valid events
381
+ appendEvent(jsonlPath, { type: "comment", threadId: "t1", line: 3, author: "reviewer", text: "valid", ts: 1000 })
382
+ appendEvent(jsonlPath, { type: "reply", threadId: "t1", author: "owner", text: "valid reply", ts: 2000 })
383
+
384
+ // Simulate crash mid-write: append a partial/malformed line
385
+ const { appendFileSync } = await import("fs")
386
+ appendFileSync(jsonlPath, '{"type":"reply","threadId":"t1","author":"owner","text":"interrupted')
387
+
388
+ // Replay should recover the 2 valid events and discard the malformed line
389
+ const { events } = readEventsFromOffset(jsonlPath, 0)
390
+ expect(events).toHaveLength(2)
391
+
392
+ const threads = replayEventsToThreads(events)
393
+ expect(threads).toHaveLength(1)
394
+ expect(threads[0].messages).toHaveLength(2)
395
+ expect(threads[0].messages[0].text).toBe("valid")
396
+ expect(threads[0].messages[1].text).toBe("valid reply")
397
+ })
398
+ })