prjct-cli 1.7.0 → 1.7.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/CHANGELOG.md CHANGED
@@ -1,5 +1,68 @@
1
1
  # Changelog
2
2
 
3
+ ## [1.7.2] - 2026-02-07
4
+
5
+ ### Bug Fixes
6
+
7
+ - add missing state machine transitions and dead-end states (PRJ-280) (#141)
8
+
9
+
10
+ ## [1.7.2] - 2026-02-07
11
+
12
+ ### Bug Fix
13
+ - **Fix state machine completeness: missing transitions and dead-end states (PRJ-280)**: Added missing transitions (`completed → pause`, `paused → ship`, `completed → reopen`), subtask states (`skipped`, `blocked` with reason tracking), migrated `previousTask` to `pausedTasks[]` array with max limit (5) and staleness detection (30 days), and enforced all transitions through the state machine at the storage level.
14
+
15
+ ### Implementation Details
16
+ Added `reopen` command to `WorkflowCommand` type. Updated `getCurrentState()` to detect paused state from `pausedTasks[]` array and legacy `previousTask`. `failSubtask()` now advances to the next subtask instead of halting. New `skipSubtask(reason)` and `blockSubtask(blocker)` methods mark subtasks and advance. `pauseTask()` pushes onto a `pausedTasks[]` array (max 5), `resumeTask()` pops from array or by ID. `getPausedTasksFromState()` handles backward compat by migrating legacy `previousTask` format. All storage mutation methods (`startTask`, `completeTask`, `pauseTask`, `resumeTask`) validate transitions through the state machine before executing.
17
+
18
+ ### Test Plan
19
+
20
+ #### For QA
21
+ 1. Verify `completed → pause`, `paused → ship`, and `completed → reopen` transitions work
22
+ 2. Start a task with subtasks, call `failSubtask()` — verify it records reason AND advances to next subtask
23
+ 3. Call `skipSubtask(reason)` and `blockSubtask(blocker)` — verify they record reasons and advance
24
+ 4. Pause 3+ tasks — verify `pausedTasks[]` array stores all, respects max limit of 5
25
+ 5. State.json with old `previousTask` format — verify auto-migration into array
26
+ 6. Attempt invalid transition (e.g., `done` from `idle`) — verify error thrown at storage level
27
+
28
+ #### For Users
29
+ **What changed:** Workflow supports reopening completed tasks, shipping paused tasks directly, and multiple paused tasks. Subtask failures auto-advance instead of halting.
30
+ **Breaking changes:** `previousTask` deprecated in favor of `pausedTasks[]`. Backward compat maintained via auto-migration.
31
+
32
+ ## [1.7.1] - 2026-02-07
33
+
34
+ ### Bug Fixes
35
+
36
+ - add Zod validation on all storage reads (PRJ-279) (#140)
37
+
38
+
39
+ ## [1.7.1] - 2026-02-07
40
+
41
+ ### Bug Fix
42
+ - **Add Zod validation on all storage reads (PRJ-279)**: Created `safeRead<T>()` utility that wraps `JSON.parse` + `schema.safeParse()`. All 5 `StorageManager` subclasses (state, queue, ideas, shipped, metrics) now validate reads against their Zod schemas. Corrupted files produce a logged warning + `.backup` file instead of silently crashing downstream.
43
+
44
+ ### Implementation Details
45
+ Created `core/storage/safe-reader.ts` with a `ValidationSchema` interface decoupled from Zod generics to avoid strict type parameter matching. The `StorageManager` base class accepts an optional schema via constructor — subclasses pass their Zod schema with a single import + arg change. `safeRead` returns the raw parsed JSON (not Zod-transformed `result.data`) to preserve extra fields for forward compatibility. Also fixed `ShippedJsonSchema` which used `items` instead of `shipped` (pre-existing schema bug), and made `changes` optional to match actual data.
46
+
47
+ ### Learnings
48
+ - Zod's default `strip` mode silently drops unknown keys from `result.data` — must return raw JSON to preserve extra state.json fields (projectId, stack, domains, etc.)
49
+ - `ShippedJsonSchema` had `items` instead of `shipped` as the array key — pre-existing schema/data mismatch
50
+ - `ValidationSchema` interface avoids Zod generic constraints while still providing type-safe validation
51
+
52
+ ### Test Plan
53
+
54
+ #### For QA
55
+ 1. Create a valid `state.json` — verify it reads correctly with no warnings
56
+ 2. Corrupt a storage file with invalid JSON — verify `.backup` is created and defaults returned
57
+ 3. Write valid JSON with wrong schema — verify `.backup` and defaults
58
+ 4. Add extra fields not in schema — verify they are preserved after read
59
+ 5. Run `bun test` — verify all 438 tests pass (16 new for `safeRead`)
60
+
61
+ #### For Users
62
+ **What changed:** Storage reads are now validated against Zod schemas. Corrupted files no longer cause silent crashes.
63
+ **How to use:** No action needed — automatic.
64
+ **Breaking changes:** None.
65
+
3
66
  ## [1.7.0] - 2026-02-07
4
67
 
5
68
  ### Features
@@ -7,7 +70,6 @@
7
70
  - use relative timestamps to reduce token waste (PRJ-274) (#139)
8
71
  - use relative timestamps to reduce token waste (PRJ-274)
9
72
 
10
-
11
73
  ## [1.6.16] - 2026-02-07
12
74
 
13
75
  ### Improvement
@@ -0,0 +1,262 @@
1
+ /**
2
+ * Safe Reader Tests
3
+ *
4
+ * Tests for Zod-validated storage reads:
5
+ * - Valid data passes through
6
+ * - Corrupted JSON creates .backup + returns null
7
+ * - Valid JSON with wrong schema creates .backup + returns null
8
+ * - Missing files return null (no backup)
9
+ * - Extra fields are preserved (forward compatibility)
10
+ */
11
+
12
+ import { afterEach, beforeEach, describe, expect, it } from 'bun:test'
13
+ import fs from 'node:fs/promises'
14
+ import os from 'node:os'
15
+ import path from 'node:path'
16
+ import { z } from 'zod'
17
+ import { safeRead } from '../../storage/safe-reader'
18
+
19
+ // =============================================================================
20
+ // Test Schema
21
+ // =============================================================================
22
+
23
+ const TestSchema = z.object({
24
+ name: z.string(),
25
+ count: z.number(),
26
+ items: z.array(z.string()),
27
+ })
28
+
29
+ type TestData = z.infer<typeof TestSchema>
30
+
31
+ // =============================================================================
32
+ // Setup
33
+ // =============================================================================
34
+
35
+ let tmpDir: string
36
+
37
+ beforeEach(async () => {
38
+ tmpDir = await fs.mkdtemp(path.join(os.tmpdir(), 'prjct-safe-reader-test-'))
39
+ })
40
+
41
+ afterEach(async () => {
42
+ await fs.rm(tmpDir, { recursive: true, force: true })
43
+ })
44
+
45
+ // =============================================================================
46
+ // Tests
47
+ // =============================================================================
48
+
49
+ describe('safeRead', () => {
50
+ describe('valid data', () => {
51
+ it('should return validated data for valid JSON matching schema', async () => {
52
+ const filePath = path.join(tmpDir, 'valid.json')
53
+ const data: TestData = { name: 'test', count: 42, items: ['a', 'b'] }
54
+ await fs.writeFile(filePath, JSON.stringify(data, null, 2))
55
+
56
+ const result = await safeRead<TestData>(filePath, TestSchema)
57
+
58
+ expect(result).toEqual(data)
59
+ })
60
+
61
+ it('should preserve extra fields not in schema', async () => {
62
+ const filePath = path.join(tmpDir, 'extra-fields.json')
63
+ const data = {
64
+ name: 'test',
65
+ count: 1,
66
+ items: [],
67
+ extraField: 'preserved',
68
+ nested: { deep: true },
69
+ }
70
+ await fs.writeFile(filePath, JSON.stringify(data, null, 2))
71
+
72
+ const result = await safeRead<typeof data>(filePath, TestSchema)
73
+
74
+ expect(result).not.toBeNull()
75
+ expect(result!.name).toBe('test')
76
+ expect(result!.extraField).toBe('preserved')
77
+ expect(result!.nested).toEqual({ deep: true })
78
+ })
79
+
80
+ it('should not create .backup for valid data', async () => {
81
+ const filePath = path.join(tmpDir, 'no-backup.json')
82
+ const data: TestData = { name: 'ok', count: 0, items: [] }
83
+ await fs.writeFile(filePath, JSON.stringify(data))
84
+
85
+ await safeRead<TestData>(filePath, TestSchema)
86
+
87
+ const backupExists = await fs
88
+ .access(`${filePath}.backup`)
89
+ .then(() => true)
90
+ .catch(() => false)
91
+ expect(backupExists).toBe(false)
92
+ })
93
+ })
94
+
95
+ describe('missing files', () => {
96
+ it('should return null for non-existent file', async () => {
97
+ const result = await safeRead<TestData>(path.join(tmpDir, 'missing.json'), TestSchema)
98
+
99
+ expect(result).toBeNull()
100
+ })
101
+
102
+ it('should not create .backup for missing file', async () => {
103
+ const filePath = path.join(tmpDir, 'missing.json')
104
+ await safeRead<TestData>(filePath, TestSchema)
105
+
106
+ const backupExists = await fs
107
+ .access(`${filePath}.backup`)
108
+ .then(() => true)
109
+ .catch(() => false)
110
+ expect(backupExists).toBe(false)
111
+ })
112
+ })
113
+
114
+ describe('corrupted JSON', () => {
115
+ it('should return null for malformed JSON', async () => {
116
+ const filePath = path.join(tmpDir, 'malformed.json')
117
+ await fs.writeFile(filePath, 'not valid json {{{')
118
+
119
+ const result = await safeRead<TestData>(filePath, TestSchema)
120
+
121
+ expect(result).toBeNull()
122
+ })
123
+
124
+ it('should create .backup for malformed JSON', async () => {
125
+ const filePath = path.join(tmpDir, 'malformed.json')
126
+ const badContent = 'not valid json {{{'
127
+ await fs.writeFile(filePath, badContent)
128
+
129
+ await safeRead<TestData>(filePath, TestSchema)
130
+
131
+ const backup = await fs.readFile(`${filePath}.backup`, 'utf-8')
132
+ expect(backup).toBe(badContent)
133
+ })
134
+
135
+ it('should return null for empty file', async () => {
136
+ const filePath = path.join(tmpDir, 'empty.json')
137
+ await fs.writeFile(filePath, '')
138
+
139
+ const result = await safeRead<TestData>(filePath, TestSchema)
140
+
141
+ expect(result).toBeNull()
142
+ })
143
+ })
144
+
145
+ describe('valid JSON with wrong schema', () => {
146
+ it('should return null when required field is missing', async () => {
147
+ const filePath = path.join(tmpDir, 'missing-field.json')
148
+ await fs.writeFile(filePath, JSON.stringify({ name: 'test' })) // missing count and items
149
+
150
+ const result = await safeRead<TestData>(filePath, TestSchema)
151
+
152
+ expect(result).toBeNull()
153
+ })
154
+
155
+ it('should create .backup when schema validation fails', async () => {
156
+ const filePath = path.join(tmpDir, 'wrong-schema.json')
157
+ const data = { name: 123, count: 'not a number', items: 'not an array' }
158
+ await fs.writeFile(filePath, JSON.stringify(data))
159
+
160
+ await safeRead<TestData>(filePath, TestSchema)
161
+
162
+ const backupExists = await fs
163
+ .access(`${filePath}.backup`)
164
+ .then(() => true)
165
+ .catch(() => false)
166
+ expect(backupExists).toBe(true)
167
+ })
168
+
169
+ it('should return null when field has wrong type', async () => {
170
+ const filePath = path.join(tmpDir, 'wrong-type.json')
171
+ await fs.writeFile(filePath, JSON.stringify({ name: 42, count: 1, items: [] }))
172
+
173
+ const result = await safeRead<TestData>(filePath, TestSchema)
174
+
175
+ expect(result).toBeNull()
176
+ })
177
+
178
+ it('should return null when array contains wrong types', async () => {
179
+ const filePath = path.join(tmpDir, 'wrong-array.json')
180
+ await fs.writeFile(filePath, JSON.stringify({ name: 'test', count: 1, items: [1, 2, 3] }))
181
+
182
+ const result = await safeRead<TestData>(filePath, TestSchema)
183
+
184
+ expect(result).toBeNull()
185
+ })
186
+ })
187
+
188
+ describe('optional fields and defaults', () => {
189
+ it('should handle schema with optional fields', async () => {
190
+ const OptionalSchema = z.object({
191
+ name: z.string(),
192
+ description: z.string().optional(),
193
+ })
194
+
195
+ const filePath = path.join(tmpDir, 'optional.json')
196
+ await fs.writeFile(filePath, JSON.stringify({ name: 'test' }))
197
+
198
+ const result = await safeRead<z.infer<typeof OptionalSchema>>(filePath, OptionalSchema)
199
+
200
+ expect(result).not.toBeNull()
201
+ expect(result!.name).toBe('test')
202
+ expect(result!.description).toBeUndefined()
203
+ })
204
+
205
+ it('should handle schema with nullable fields', async () => {
206
+ const NullableSchema = z.object({
207
+ currentTask: z.object({ id: z.string() }).nullable(),
208
+ lastUpdated: z.string(),
209
+ })
210
+
211
+ const filePath = path.join(tmpDir, 'nullable.json')
212
+ await fs.writeFile(filePath, JSON.stringify({ currentTask: null, lastUpdated: '2026-01-01' }))
213
+
214
+ const result = await safeRead<z.infer<typeof NullableSchema>>(filePath, NullableSchema)
215
+
216
+ expect(result).not.toBeNull()
217
+ expect(result!.currentTask).toBeNull()
218
+ })
219
+ })
220
+
221
+ describe('integration with StorageManager pattern', () => {
222
+ it('should work with real StateJsonSchema', async () => {
223
+ // Import the actual schema used in production
224
+ const { StateJsonSchema } = await import('../../schemas/state')
225
+
226
+ const filePath = path.join(tmpDir, 'state.json')
227
+ const stateData = {
228
+ currentTask: null,
229
+ lastUpdated: '2026-02-07T00:00:00.000Z',
230
+ // Extra fields that exist in real state.json but not in schema
231
+ projectId: 'test-123',
232
+ stack: { language: 'TypeScript', framework: 'Hono' },
233
+ }
234
+ await fs.writeFile(filePath, JSON.stringify(stateData, null, 2))
235
+
236
+ const result = await safeRead<typeof stateData>(filePath, StateJsonSchema)
237
+
238
+ expect(result).not.toBeNull()
239
+ expect(result!.currentTask).toBeNull()
240
+ expect(result!.projectId).toBe('test-123') // Extra field preserved
241
+ })
242
+
243
+ it('should reject corrupted state data', async () => {
244
+ const { StateJsonSchema } = await import('../../schemas/state')
245
+
246
+ const filePath = path.join(tmpDir, 'bad-state.json')
247
+ // currentTask should be an object or null, not a number
248
+ const badData = { currentTask: 42, lastUpdated: '2026-02-07' }
249
+ await fs.writeFile(filePath, JSON.stringify(badData))
250
+
251
+ const result = await safeRead(filePath, StateJsonSchema)
252
+
253
+ expect(result).toBeNull()
254
+ // Backup should exist
255
+ const backupExists = await fs
256
+ .access(`${filePath}.backup`)
257
+ .then(() => true)
258
+ .catch(() => false)
259
+ expect(backupExists).toBe(true)
260
+ })
261
+ })
262
+ })
@@ -0,0 +1,216 @@
1
+ /**
2
+ * State Machine Tests
3
+ *
4
+ * Tests for workflow state machine transitions:
5
+ * - All valid transitions work
6
+ * - Invalid transitions are rejected
7
+ * - New transitions: completed→paused, paused→shipped, completed→reopen
8
+ * - getCurrentState detects paused tasks from pausedTasks array
9
+ */
10
+
11
+ import { describe, expect, it } from 'bun:test'
12
+ import type { WorkflowCommand, WorkflowState } from '../../workflow/state-machine'
13
+ import { WorkflowStateMachine } from '../../workflow/state-machine'
14
+
15
+ const sm = new WorkflowStateMachine()
16
+
17
+ // =============================================================================
18
+ // getCurrentState
19
+ // =============================================================================
20
+
21
+ describe('getCurrentState', () => {
22
+ it('returns idle when no task and no paused tasks', () => {
23
+ expect(sm.getCurrentState({ currentTask: null })).toBe('idle')
24
+ expect(sm.getCurrentState({})).toBe('idle')
25
+ })
26
+
27
+ it('returns working for in_progress status', () => {
28
+ expect(sm.getCurrentState({ currentTask: { status: 'in_progress' } })).toBe('working')
29
+ expect(sm.getCurrentState({ currentTask: { status: 'working' } })).toBe('working')
30
+ })
31
+
32
+ it('returns completed for completed/done status', () => {
33
+ expect(sm.getCurrentState({ currentTask: { status: 'completed' } })).toBe('completed')
34
+ expect(sm.getCurrentState({ currentTask: { status: 'done' } })).toBe('completed')
35
+ })
36
+
37
+ it('returns shipped for shipped status', () => {
38
+ expect(sm.getCurrentState({ currentTask: { status: 'shipped' } })).toBe('shipped')
39
+ })
40
+
41
+ it('returns paused when currentTask has paused status', () => {
42
+ expect(sm.getCurrentState({ currentTask: { status: 'paused' } })).toBe('paused')
43
+ })
44
+
45
+ it('returns paused when no currentTask but pausedTasks array has entries', () => {
46
+ expect(sm.getCurrentState({ currentTask: null, pausedTasks: [{ id: '1' }] })).toBe('paused')
47
+ })
48
+
49
+ it('returns paused when no currentTask but legacy previousTask is paused', () => {
50
+ expect(sm.getCurrentState({ currentTask: null, previousTask: { status: 'paused' } })).toBe(
51
+ 'paused'
52
+ )
53
+ })
54
+
55
+ it('returns idle when no currentTask and empty pausedTasks', () => {
56
+ expect(sm.getCurrentState({ currentTask: null, pausedTasks: [] })).toBe('idle')
57
+ })
58
+
59
+ it('returns working for unknown status when task exists', () => {
60
+ expect(sm.getCurrentState({ currentTask: { status: 'active' } })).toBe('working')
61
+ expect(sm.getCurrentState({ currentTask: {} })).toBe('working')
62
+ })
63
+ })
64
+
65
+ // =============================================================================
66
+ // canTransition - valid transitions
67
+ // =============================================================================
68
+
69
+ describe('canTransition - valid', () => {
70
+ const validTransitions: [WorkflowState, WorkflowCommand][] = [
71
+ // idle
72
+ ['idle', 'task'],
73
+ ['idle', 'next'],
74
+ // working
75
+ ['working', 'done'],
76
+ ['working', 'pause'],
77
+ // paused
78
+ ['paused', 'resume'],
79
+ ['paused', 'task'],
80
+ ['paused', 'ship'], // NEW: fast-track ship
81
+ // completed
82
+ ['completed', 'ship'],
83
+ ['completed', 'task'],
84
+ ['completed', 'next'],
85
+ ['completed', 'pause'], // NEW: reopen for review
86
+ ['completed', 'reopen'], // NEW: reopen for rework
87
+ // shipped
88
+ ['shipped', 'task'],
89
+ ['shipped', 'next'],
90
+ ]
91
+
92
+ for (const [state, command] of validTransitions) {
93
+ it(`${state} → ${command} is valid`, () => {
94
+ const result = sm.canTransition(state, command)
95
+ expect(result.valid).toBe(true)
96
+ expect(result.error).toBeUndefined()
97
+ })
98
+ }
99
+ })
100
+
101
+ // =============================================================================
102
+ // canTransition - invalid transitions
103
+ // =============================================================================
104
+
105
+ describe('canTransition - invalid', () => {
106
+ const invalidTransitions: [WorkflowState, WorkflowCommand][] = [
107
+ ['idle', 'done'],
108
+ ['idle', 'pause'],
109
+ ['idle', 'resume'],
110
+ ['idle', 'ship'],
111
+ ['idle', 'reopen'],
112
+ ['working', 'task'],
113
+ ['working', 'ship'],
114
+ ['working', 'resume'],
115
+ ['working', 'next'],
116
+ ['working', 'reopen'],
117
+ ['paused', 'done'],
118
+ ['paused', 'pause'],
119
+ ['paused', 'reopen'],
120
+ ['shipped', 'done'],
121
+ ['shipped', 'pause'],
122
+ ['shipped', 'resume'],
123
+ ['shipped', 'ship'],
124
+ ['shipped', 'reopen'],
125
+ ]
126
+
127
+ for (const [state, command] of invalidTransitions) {
128
+ it(`${state} → ${command} is invalid`, () => {
129
+ const result = sm.canTransition(state, command)
130
+ expect(result.valid).toBe(false)
131
+ expect(result.error).toBeDefined()
132
+ expect(result.suggestion).toBeDefined()
133
+ })
134
+ }
135
+ })
136
+
137
+ // =============================================================================
138
+ // getNextState
139
+ // =============================================================================
140
+
141
+ describe('getNextState', () => {
142
+ it('task → working', () => {
143
+ expect(sm.getNextState('idle', 'task')).toBe('working')
144
+ expect(sm.getNextState('paused', 'task')).toBe('working')
145
+ expect(sm.getNextState('completed', 'task')).toBe('working')
146
+ })
147
+
148
+ it('done → completed', () => {
149
+ expect(sm.getNextState('working', 'done')).toBe('completed')
150
+ })
151
+
152
+ it('pause → paused', () => {
153
+ expect(sm.getNextState('working', 'pause')).toBe('paused')
154
+ expect(sm.getNextState('completed', 'pause')).toBe('paused')
155
+ })
156
+
157
+ it('resume → working', () => {
158
+ expect(sm.getNextState('paused', 'resume')).toBe('working')
159
+ })
160
+
161
+ it('ship → shipped', () => {
162
+ expect(sm.getNextState('completed', 'ship')).toBe('shipped')
163
+ expect(sm.getNextState('paused', 'ship')).toBe('shipped')
164
+ })
165
+
166
+ it('reopen → working', () => {
167
+ expect(sm.getNextState('completed', 'reopen')).toBe('working')
168
+ })
169
+
170
+ it('next preserves current state', () => {
171
+ expect(sm.getNextState('idle', 'next')).toBe('idle')
172
+ expect(sm.getNextState('completed', 'next')).toBe('completed')
173
+ })
174
+ })
175
+
176
+ // =============================================================================
177
+ // getValidCommands
178
+ // =============================================================================
179
+
180
+ describe('getValidCommands', () => {
181
+ it('idle allows task, next', () => {
182
+ expect(sm.getValidCommands('idle')).toEqual(['task', 'next'])
183
+ })
184
+
185
+ it('working allows done, pause', () => {
186
+ expect(sm.getValidCommands('working')).toEqual(['done', 'pause'])
187
+ })
188
+
189
+ it('paused allows resume, task, ship', () => {
190
+ expect(sm.getValidCommands('paused')).toEqual(['resume', 'task', 'ship'])
191
+ })
192
+
193
+ it('completed allows ship, task, next, pause, reopen', () => {
194
+ expect(sm.getValidCommands('completed')).toEqual(['ship', 'task', 'next', 'pause', 'reopen'])
195
+ })
196
+
197
+ it('shipped allows task, next', () => {
198
+ expect(sm.getValidCommands('shipped')).toEqual(['task', 'next'])
199
+ })
200
+ })
201
+
202
+ // =============================================================================
203
+ // formatNextSteps
204
+ // =============================================================================
205
+
206
+ describe('formatNextSteps', () => {
207
+ it('includes reopen in completed state steps', () => {
208
+ const steps = sm.formatNextSteps('completed')
209
+ expect(steps.some((s) => s.includes('reopen'))).toBe(true)
210
+ })
211
+
212
+ it('includes ship in paused state steps', () => {
213
+ const steps = sm.formatNextSteps('paused')
214
+ expect(steps.some((s) => s.includes('ship'))).toBe(true)
215
+ })
216
+ })
@@ -56,7 +56,7 @@ export const ShippedItemSchema = z.object({
56
56
  type: ShipTypeSchema,
57
57
  agent: z.string().optional(), // "fe+be", "be", "fe"
58
58
  description: z.string().optional(),
59
- changes: z.array(ShipChangeSchema),
59
+ changes: z.array(ShipChangeSchema).optional(),
60
60
  codeSnippets: z.array(z.string()).optional(),
61
61
  commit: CommitInfoSchema.optional(),
62
62
  codeMetrics: CodeMetricsSchema.optional(),
@@ -69,7 +69,7 @@ export const ShippedItemSchema = z.object({
69
69
  })
70
70
 
71
71
  export const ShippedJsonSchema = z.object({
72
- items: z.array(ShippedItemSchema),
72
+ shipped: z.array(ShippedItemSchema),
73
73
  lastUpdated: z.string(),
74
74
  })
75
75
 
@@ -104,6 +104,6 @@ export const safeParseShipped = (data: unknown) => ShippedJsonSchema.safeParse(d
104
104
  // =============================================================================
105
105
 
106
106
  export const DEFAULT_SHIPPED: ShippedJson = {
107
- items: [],
107
+ shipped: [],
108
108
  lastUpdated: '',
109
109
  }
@@ -24,6 +24,7 @@ export const TaskStatusSchema = z.enum([
24
24
  'blocked',
25
25
  'paused',
26
26
  'failed',
27
+ 'skipped',
27
28
  ])
28
29
  export const ActivityTypeSchema = z.enum([
29
30
  'task_completed',
@@ -59,6 +60,8 @@ export const SubtaskSchema = z.object({
59
60
  completedAt: z.string().optional(), // ISO8601
60
61
  output: z.string().optional(), // Brief output description
61
62
  summary: SubtaskSummarySchema.optional(), // Full summary for context handoff
63
+ skipReason: z.string().optional(), // Why this subtask was skipped
64
+ blockReason: z.string().optional(), // What is blocking this subtask
62
65
  })
63
66
 
64
67
  // Subtask progress tracking
@@ -94,7 +97,8 @@ export const PreviousTaskSchema = z.object({
94
97
 
95
98
  export const StateJsonSchema = z.object({
96
99
  currentTask: CurrentTaskSchema.nullable(),
97
- previousTask: PreviousTaskSchema.nullable().optional(),
100
+ previousTask: PreviousTaskSchema.nullable().optional(), // deprecated: use pausedTasks
101
+ pausedTasks: z.array(PreviousTaskSchema).optional(), // replaces previousTask
98
102
  lastUpdated: z.string(),
99
103
  })
100
104
 
@@ -190,6 +194,7 @@ export const safeParseQueue = (data: unknown) => QueueJsonSchema.safeParse(data)
190
194
 
191
195
  export const DEFAULT_STATE: StateJson = {
192
196
  currentTask: null,
197
+ pausedTasks: [],
193
198
  lastUpdated: '',
194
199
  }
195
200
 
@@ -6,13 +6,14 @@
6
6
  */
7
7
 
8
8
  import { generateUUID } from '../schemas'
9
+ import { IdeasJsonSchema } from '../schemas/ideas'
9
10
  import type { Idea, IdeaPriority, IdeaStatus, IdeasJson } from '../types'
10
11
  import { getTimestamp, toRelative } from '../utils/date-helper'
11
12
  import { StorageManager } from './storage-manager'
12
13
 
13
14
  class IdeasStorage extends StorageManager<IdeasJson> {
14
15
  constructor() {
15
- super('ideas.json')
16
+ super('ideas.json', IdeasJsonSchema)
16
17
  }
17
18
 
18
19
  protected getDefault(): IdeasJson {
@@ -18,13 +18,14 @@ import {
18
18
  estimateCostSaved,
19
19
  formatCost,
20
20
  type MetricsJson,
21
+ MetricsJsonSchema,
21
22
  } from '../schemas/metrics'
22
23
  import { getTimestamp } from '../utils/date-helper'
23
24
  import { StorageManager } from './storage-manager'
24
25
 
25
26
  class MetricsStorage extends StorageManager<MetricsJson> {
26
27
  constructor() {
27
- super('metrics.json')
28
+ super('metrics.json', MetricsJsonSchema)
28
29
  }
29
30
 
30
31
  protected getDefault(): MetricsJson {
@@ -7,12 +7,13 @@
7
7
 
8
8
  import { generateUUID } from '../schemas'
9
9
  import type { Priority, QueueJson, QueueTask, TaskSection } from '../schemas/state'
10
+ import { QueueJsonSchema } from '../schemas/state'
10
11
  import { getTimestamp } from '../utils/date-helper'
11
12
  import { StorageManager } from './storage-manager'
12
13
 
13
14
  class QueueStorage extends StorageManager<QueueJson> {
14
15
  constructor() {
15
- super('queue.json')
16
+ super('queue.json', QueueJsonSchema)
16
17
  }
17
18
 
18
19
  protected getDefault(): QueueJson {