prjct-cli 1.18.0 → 1.20.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +122 -0
- package/core/__tests__/storage/archive-storage.test.ts +455 -0
- package/core/__tests__/utils/retry.test.ts +381 -0
- package/core/agentic/tool-registry.ts +40 -12
- package/core/schemas/ideas.ts +1 -1
- package/core/services/agent-generator.ts +35 -8
- package/core/services/agent-service.ts +17 -12
- package/core/services/memory-service.ts +42 -0
- package/core/services/sync-service.ts +51 -0
- package/core/storage/archive-storage.ts +205 -0
- package/core/storage/database.ts +24 -0
- package/core/storage/ideas-storage.ts +54 -2
- package/core/storage/index.ts +2 -0
- package/core/storage/queue-storage.ts +43 -1
- package/core/storage/shipped-storage.ts +45 -1
- package/core/storage/state-storage.ts +16 -2
- package/core/types/storage.ts +1 -1
- package/core/utils/retry.ts +318 -0
- package/dist/bin/prjct.mjs +612 -21
- package/package.json +1 -1
|
@@ -0,0 +1,318 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Retry Policy Utility
|
|
3
|
+
*
|
|
4
|
+
* Provides exponential backoff retry logic with error classification and circuit breaker.
|
|
5
|
+
* Used to make agent and tool operations resilient against transient failures.
|
|
6
|
+
*
|
|
7
|
+
* @module utils/retry
|
|
8
|
+
* @version 1.0.0
|
|
9
|
+
*/
|
|
10
|
+
|
|
11
|
+
// =============================================================================
|
|
12
|
+
// Types
|
|
13
|
+
// =============================================================================
|
|
14
|
+
|
|
15
|
+
export interface RetryOptions {
|
|
16
|
+
/** Maximum number of retry attempts (default: 3) */
|
|
17
|
+
maxAttempts: number
|
|
18
|
+
|
|
19
|
+
/** Base delay in milliseconds for exponential backoff (default: 1000) */
|
|
20
|
+
baseDelayMs: number
|
|
21
|
+
|
|
22
|
+
/** Maximum delay in milliseconds (default: 8000) */
|
|
23
|
+
maxDelayMs: number
|
|
24
|
+
|
|
25
|
+
/** Number of consecutive failures before opening circuit (default: 5) */
|
|
26
|
+
circuitBreakerThreshold?: number
|
|
27
|
+
|
|
28
|
+
/** Time in milliseconds to keep circuit open (default: 60000) */
|
|
29
|
+
circuitBreakerTimeoutMs?: number
|
|
30
|
+
}
|
|
31
|
+
|
|
32
|
+
export interface CircuitState {
|
|
33
|
+
consecutiveFailures: number
|
|
34
|
+
openedAt: number | null
|
|
35
|
+
}
|
|
36
|
+
|
|
37
|
+
// =============================================================================
|
|
38
|
+
// Error Classification
|
|
39
|
+
// =============================================================================
|
|
40
|
+
|
|
41
|
+
/**
|
|
42
|
+
* Node.js error codes that indicate transient failures worth retrying
|
|
43
|
+
*/
|
|
44
|
+
const TRANSIENT_ERROR_CODES = new Set([
|
|
45
|
+
'EBUSY', // Resource busy
|
|
46
|
+
'EAGAIN', // Resource temporarily unavailable
|
|
47
|
+
'ETIMEDOUT', // Operation timed out
|
|
48
|
+
'ECONNRESET', // Connection reset by peer
|
|
49
|
+
'ECONNREFUSED', // Connection refused (may be temporary)
|
|
50
|
+
'ENOTFOUND', // DNS lookup failed (may be temporary)
|
|
51
|
+
'EAI_AGAIN', // DNS temporary failure
|
|
52
|
+
])
|
|
53
|
+
|
|
54
|
+
/**
|
|
55
|
+
* Node.js error codes that indicate permanent failures (fail fast)
|
|
56
|
+
*/
|
|
57
|
+
const PERMANENT_ERROR_CODES = new Set([
|
|
58
|
+
'ENOENT', // No such file or directory
|
|
59
|
+
'EACCES', // Permission denied
|
|
60
|
+
'EPERM', // Operation not permitted
|
|
61
|
+
'EISDIR', // Is a directory
|
|
62
|
+
'ENOTDIR', // Not a directory
|
|
63
|
+
'EINVAL', // Invalid argument
|
|
64
|
+
])
|
|
65
|
+
|
|
66
|
+
/**
|
|
67
|
+
* Check if an error is transient (worth retrying)
|
|
68
|
+
*/
|
|
69
|
+
export function isTransientError(error: unknown): boolean {
|
|
70
|
+
if (!error || typeof error !== 'object') {
|
|
71
|
+
return false
|
|
72
|
+
}
|
|
73
|
+
|
|
74
|
+
const err = error as { code?: string; errno?: number; message?: string }
|
|
75
|
+
|
|
76
|
+
// Check error code
|
|
77
|
+
if (err.code && TRANSIENT_ERROR_CODES.has(err.code)) {
|
|
78
|
+
return true
|
|
79
|
+
}
|
|
80
|
+
|
|
81
|
+
// Permanent errors should never be retried
|
|
82
|
+
if (err.code && PERMANENT_ERROR_CODES.has(err.code)) {
|
|
83
|
+
return false
|
|
84
|
+
}
|
|
85
|
+
|
|
86
|
+
// Check message for timeout indicators
|
|
87
|
+
if (err.message) {
|
|
88
|
+
const msg = err.message.toLowerCase()
|
|
89
|
+
if (msg.includes('timeout') || msg.includes('timed out')) {
|
|
90
|
+
return true
|
|
91
|
+
}
|
|
92
|
+
}
|
|
93
|
+
|
|
94
|
+
// Unknown errors are not retried by default (fail fast)
|
|
95
|
+
return false
|
|
96
|
+
}
|
|
97
|
+
|
|
98
|
+
/**
|
|
99
|
+
* Check if an error is permanent (should not retry)
|
|
100
|
+
*/
|
|
101
|
+
export function isPermanentError(error: unknown): boolean {
|
|
102
|
+
if (!error || typeof error !== 'object') {
|
|
103
|
+
return false
|
|
104
|
+
}
|
|
105
|
+
|
|
106
|
+
const err = error as { code?: string }
|
|
107
|
+
return !!(err.code && PERMANENT_ERROR_CODES.has(err.code))
|
|
108
|
+
}
|
|
109
|
+
|
|
110
|
+
// =============================================================================
|
|
111
|
+
// Circuit Breaker
|
|
112
|
+
// =============================================================================
|
|
113
|
+
|
|
114
|
+
/**
|
|
115
|
+
* Circuit breaker state registry (per operation ID)
|
|
116
|
+
*/
|
|
117
|
+
const circuitStates = new Map<string, CircuitState>()
|
|
118
|
+
|
|
119
|
+
/**
|
|
120
|
+
* Check if circuit is open for a given operation
|
|
121
|
+
*/
|
|
122
|
+
function isCircuitOpen(operationId: string, threshold: number, timeoutMs: number): boolean {
|
|
123
|
+
const state = circuitStates.get(operationId)
|
|
124
|
+
if (!state) {
|
|
125
|
+
return false
|
|
126
|
+
}
|
|
127
|
+
|
|
128
|
+
// Circuit is open if threshold exceeded
|
|
129
|
+
if (state.consecutiveFailures >= threshold && state.openedAt) {
|
|
130
|
+
const elapsed = Date.now() - state.openedAt
|
|
131
|
+
// Circuit closes after timeout
|
|
132
|
+
if (elapsed >= timeoutMs) {
|
|
133
|
+
// Reset circuit
|
|
134
|
+
circuitStates.delete(operationId)
|
|
135
|
+
return false
|
|
136
|
+
}
|
|
137
|
+
return true
|
|
138
|
+
}
|
|
139
|
+
|
|
140
|
+
return false
|
|
141
|
+
}
|
|
142
|
+
|
|
143
|
+
/**
|
|
144
|
+
* Record a failure for circuit breaker
|
|
145
|
+
*/
|
|
146
|
+
function recordFailure(operationId: string, threshold: number): void {
|
|
147
|
+
const state = circuitStates.get(operationId) || {
|
|
148
|
+
consecutiveFailures: 0,
|
|
149
|
+
openedAt: null,
|
|
150
|
+
}
|
|
151
|
+
|
|
152
|
+
state.consecutiveFailures++
|
|
153
|
+
|
|
154
|
+
// Open circuit if threshold reached
|
|
155
|
+
if (state.consecutiveFailures >= threshold && !state.openedAt) {
|
|
156
|
+
state.openedAt = Date.now()
|
|
157
|
+
}
|
|
158
|
+
|
|
159
|
+
circuitStates.set(operationId, state)
|
|
160
|
+
}
|
|
161
|
+
|
|
162
|
+
/**
|
|
163
|
+
* Record a success (reset circuit breaker)
|
|
164
|
+
*/
|
|
165
|
+
function recordSuccess(operationId: string): void {
|
|
166
|
+
circuitStates.delete(operationId)
|
|
167
|
+
}
|
|
168
|
+
|
|
169
|
+
// =============================================================================
|
|
170
|
+
// Retry Policy
|
|
171
|
+
// =============================================================================
|
|
172
|
+
|
|
173
|
+
export class RetryPolicy {
|
|
174
|
+
private options: Required<RetryOptions>
|
|
175
|
+
|
|
176
|
+
constructor(options: Partial<RetryOptions> = {}) {
|
|
177
|
+
this.options = {
|
|
178
|
+
maxAttempts: options.maxAttempts ?? 3,
|
|
179
|
+
baseDelayMs: options.baseDelayMs ?? 1000,
|
|
180
|
+
maxDelayMs: options.maxDelayMs ?? 8000,
|
|
181
|
+
circuitBreakerThreshold: options.circuitBreakerThreshold ?? 5,
|
|
182
|
+
circuitBreakerTimeoutMs: options.circuitBreakerTimeoutMs ?? 60000,
|
|
183
|
+
}
|
|
184
|
+
}
|
|
185
|
+
|
|
186
|
+
/**
|
|
187
|
+
* Execute an operation with retry logic
|
|
188
|
+
*
|
|
189
|
+
* @param operation - Async function to execute
|
|
190
|
+
* @param operationId - Optional ID for circuit breaker tracking
|
|
191
|
+
* @returns Result of the operation
|
|
192
|
+
* @throws Error if all attempts fail or circuit is open
|
|
193
|
+
*/
|
|
194
|
+
async execute<T>(operation: () => Promise<T>, operationId: string = 'default'): Promise<T> {
|
|
195
|
+
// Check circuit breaker
|
|
196
|
+
if (
|
|
197
|
+
isCircuitOpen(
|
|
198
|
+
operationId,
|
|
199
|
+
this.options.circuitBreakerThreshold,
|
|
200
|
+
this.options.circuitBreakerTimeoutMs
|
|
201
|
+
)
|
|
202
|
+
) {
|
|
203
|
+
throw new Error(
|
|
204
|
+
`Circuit breaker is open for operation: ${operationId}. Too many consecutive failures.`
|
|
205
|
+
)
|
|
206
|
+
}
|
|
207
|
+
|
|
208
|
+
let lastError: unknown
|
|
209
|
+
let attempt = 0
|
|
210
|
+
|
|
211
|
+
while (attempt < this.options.maxAttempts) {
|
|
212
|
+
try {
|
|
213
|
+
const result = await operation()
|
|
214
|
+
// Success - reset circuit breaker
|
|
215
|
+
recordSuccess(operationId)
|
|
216
|
+
return result
|
|
217
|
+
} catch (error) {
|
|
218
|
+
lastError = error
|
|
219
|
+
attempt++
|
|
220
|
+
|
|
221
|
+
// Check if error is permanent (fail fast)
|
|
222
|
+
if (isPermanentError(error)) {
|
|
223
|
+
recordFailure(operationId, this.options.circuitBreakerThreshold)
|
|
224
|
+
throw error
|
|
225
|
+
}
|
|
226
|
+
|
|
227
|
+
// Check if error is transient and we have attempts left
|
|
228
|
+
const shouldRetry = isTransientError(error) && attempt < this.options.maxAttempts
|
|
229
|
+
|
|
230
|
+
if (!shouldRetry) {
|
|
231
|
+
// Not transient or out of attempts
|
|
232
|
+
recordFailure(operationId, this.options.circuitBreakerThreshold)
|
|
233
|
+
throw error
|
|
234
|
+
}
|
|
235
|
+
|
|
236
|
+
// Calculate delay with exponential backoff
|
|
237
|
+
const delay = Math.min(
|
|
238
|
+
this.options.baseDelayMs * 2 ** (attempt - 1),
|
|
239
|
+
this.options.maxDelayMs
|
|
240
|
+
)
|
|
241
|
+
|
|
242
|
+
// Wait before retry
|
|
243
|
+
await new Promise((resolve) => setTimeout(resolve, delay))
|
|
244
|
+
}
|
|
245
|
+
}
|
|
246
|
+
|
|
247
|
+
// All attempts failed
|
|
248
|
+
recordFailure(operationId, this.options.circuitBreakerThreshold)
|
|
249
|
+
throw lastError
|
|
250
|
+
}
|
|
251
|
+
|
|
252
|
+
/**
|
|
253
|
+
* Check if an error is transient (exposed for testing)
|
|
254
|
+
*/
|
|
255
|
+
isTransientError(error: unknown): boolean {
|
|
256
|
+
return isTransientError(error)
|
|
257
|
+
}
|
|
258
|
+
|
|
259
|
+
/**
|
|
260
|
+
* Check if circuit is open for an operation (exposed for testing)
|
|
261
|
+
*/
|
|
262
|
+
isCircuitOpen(operationId: string): boolean {
|
|
263
|
+
return isCircuitOpen(
|
|
264
|
+
operationId,
|
|
265
|
+
this.options.circuitBreakerThreshold,
|
|
266
|
+
this.options.circuitBreakerTimeoutMs
|
|
267
|
+
)
|
|
268
|
+
}
|
|
269
|
+
|
|
270
|
+
/**
|
|
271
|
+
* Get current circuit state for an operation (exposed for testing)
|
|
272
|
+
*/
|
|
273
|
+
getCircuitState(operationId: string): CircuitState | undefined {
|
|
274
|
+
return circuitStates.get(operationId)
|
|
275
|
+
}
|
|
276
|
+
|
|
277
|
+
/**
|
|
278
|
+
* Reset circuit breaker for an operation (exposed for testing)
|
|
279
|
+
*/
|
|
280
|
+
resetCircuit(operationId: string): void {
|
|
281
|
+
circuitStates.delete(operationId)
|
|
282
|
+
}
|
|
283
|
+
|
|
284
|
+
/**
|
|
285
|
+
* Reset all circuit breakers (exposed for testing)
|
|
286
|
+
*/
|
|
287
|
+
resetAllCircuits(): void {
|
|
288
|
+
circuitStates.clear()
|
|
289
|
+
}
|
|
290
|
+
}
|
|
291
|
+
|
|
292
|
+
// =============================================================================
|
|
293
|
+
// Exports
|
|
294
|
+
// =============================================================================
|
|
295
|
+
|
|
296
|
+
/**
|
|
297
|
+
* Default retry policy for agent operations
|
|
298
|
+
* - 3 attempts
|
|
299
|
+
* - 1s base delay
|
|
300
|
+
* - Up to 8s max delay
|
|
301
|
+
*/
|
|
302
|
+
export const defaultAgentRetryPolicy = new RetryPolicy({
|
|
303
|
+
maxAttempts: 3,
|
|
304
|
+
baseDelayMs: 1000,
|
|
305
|
+
maxDelayMs: 8000,
|
|
306
|
+
})
|
|
307
|
+
|
|
308
|
+
/**
|
|
309
|
+
* Retry policy for tool operations (less aggressive)
|
|
310
|
+
* - 2 attempts
|
|
311
|
+
* - 500ms base delay
|
|
312
|
+
* - Up to 2s max delay
|
|
313
|
+
*/
|
|
314
|
+
export const defaultToolRetryPolicy = new RetryPolicy({
|
|
315
|
+
maxAttempts: 2,
|
|
316
|
+
baseDelayMs: 500,
|
|
317
|
+
maxDelayMs: 2000,
|
|
318
|
+
})
|