@stravigor/saina 0.4.8 → 0.5.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/package.json +1 -1
- package/src/providers/anthropic_provider.ts +18 -20
- package/src/providers/openai_provider.ts +24 -30
- package/src/types.ts +2 -0
- package/src/utils/retry.ts +100 -0
package/package.json
CHANGED
|
@@ -1,4 +1,5 @@
|
|
|
1
1
|
import { parseSSE } from '../utils/sse_parser.ts'
|
|
2
|
+
import { retryableFetch, type RetryOptions } from '../utils/retry.ts'
|
|
2
3
|
import { ExternalServiceError } from '@stravigor/core/exceptions/errors'
|
|
3
4
|
import type {
|
|
4
5
|
AIProvider,
|
|
@@ -23,6 +24,7 @@ export class AnthropicProvider implements AIProvider {
|
|
|
23
24
|
private baseUrl: string
|
|
24
25
|
private defaultModel: string
|
|
25
26
|
private defaultMaxTokens: number
|
|
27
|
+
private retryOptions: RetryOptions
|
|
26
28
|
|
|
27
29
|
constructor(config: ProviderConfig) {
|
|
28
30
|
this.name = 'anthropic'
|
|
@@ -30,21 +32,21 @@ export class AnthropicProvider implements AIProvider {
|
|
|
30
32
|
this.baseUrl = (config.baseUrl ?? 'https://api.anthropic.com').replace(/\/$/, '')
|
|
31
33
|
this.defaultModel = config.model
|
|
32
34
|
this.defaultMaxTokens = config.maxTokens ?? 4096
|
|
35
|
+
this.retryOptions = {
|
|
36
|
+
maxRetries: config.maxRetries ?? 3,
|
|
37
|
+
baseDelay: config.retryBaseDelay ?? 1000,
|
|
38
|
+
}
|
|
33
39
|
}
|
|
34
40
|
|
|
35
41
|
async complete(request: CompletionRequest): Promise<CompletionResponse> {
|
|
36
42
|
const body = this.buildRequestBody(request, false)
|
|
37
43
|
|
|
38
|
-
const response = await
|
|
39
|
-
|
|
40
|
-
|
|
41
|
-
body: JSON.stringify(body),
|
|
42
|
-
|
|
43
|
-
|
|
44
|
-
if (!response.ok) {
|
|
45
|
-
const text = await response.text()
|
|
46
|
-
throw new ExternalServiceError('Anthropic', response.status, text)
|
|
47
|
-
}
|
|
44
|
+
const response = await retryableFetch(
|
|
45
|
+
'Anthropic',
|
|
46
|
+
`${this.baseUrl}/v1/messages`,
|
|
47
|
+
{ method: 'POST', headers: this.buildHeaders(), body: JSON.stringify(body) },
|
|
48
|
+
this.retryOptions
|
|
49
|
+
)
|
|
48
50
|
|
|
49
51
|
const data: any = await response.json()
|
|
50
52
|
return this.parseResponse(data)
|
|
@@ -53,16 +55,12 @@ export class AnthropicProvider implements AIProvider {
|
|
|
53
55
|
async *stream(request: CompletionRequest): AsyncIterable<StreamChunk> {
|
|
54
56
|
const body = this.buildRequestBody(request, true)
|
|
55
57
|
|
|
56
|
-
const response = await
|
|
57
|
-
|
|
58
|
-
|
|
59
|
-
body: JSON.stringify(body),
|
|
60
|
-
|
|
61
|
-
|
|
62
|
-
if (!response.ok) {
|
|
63
|
-
const text = await response.text()
|
|
64
|
-
throw new ExternalServiceError('Anthropic', response.status, text)
|
|
65
|
-
}
|
|
58
|
+
const response = await retryableFetch(
|
|
59
|
+
'Anthropic',
|
|
60
|
+
`${this.baseUrl}/v1/messages`,
|
|
61
|
+
{ method: 'POST', headers: this.buildHeaders(), body: JSON.stringify(body) },
|
|
62
|
+
this.retryOptions
|
|
63
|
+
)
|
|
66
64
|
|
|
67
65
|
if (!response.body) {
|
|
68
66
|
throw new ExternalServiceError('Anthropic', undefined, 'No stream body returned')
|
|
@@ -1,4 +1,5 @@
|
|
|
1
1
|
import { parseSSE } from '../utils/sse_parser.ts'
|
|
2
|
+
import { retryableFetch, type RetryOptions } from '../utils/retry.ts'
|
|
2
3
|
import { ExternalServiceError } from '@stravigor/core/exceptions/errors'
|
|
3
4
|
import type {
|
|
4
5
|
AIProvider,
|
|
@@ -24,6 +25,7 @@ export class OpenAIProvider implements AIProvider {
|
|
|
24
25
|
private baseUrl: string
|
|
25
26
|
private defaultModel: string
|
|
26
27
|
private defaultMaxTokens?: number
|
|
28
|
+
private retryOptions: RetryOptions
|
|
27
29
|
|
|
28
30
|
constructor(config: ProviderConfig, name?: string) {
|
|
29
31
|
this.name = name ?? 'openai'
|
|
@@ -31,6 +33,10 @@ export class OpenAIProvider implements AIProvider {
|
|
|
31
33
|
this.baseUrl = (config.baseUrl ?? 'https://api.openai.com').replace(/\/$/, '')
|
|
32
34
|
this.defaultModel = config.model
|
|
33
35
|
this.defaultMaxTokens = config.maxTokens
|
|
36
|
+
this.retryOptions = {
|
|
37
|
+
maxRetries: config.maxRetries ?? 3,
|
|
38
|
+
baseDelay: config.retryBaseDelay ?? 1000,
|
|
39
|
+
}
|
|
34
40
|
}
|
|
35
41
|
|
|
36
42
|
/** Whether this provider supports OpenAI's native json_schema response format. */
|
|
@@ -41,16 +47,12 @@ export class OpenAIProvider implements AIProvider {
|
|
|
41
47
|
async complete(request: CompletionRequest): Promise<CompletionResponse> {
|
|
42
48
|
const body = this.buildRequestBody(request, false)
|
|
43
49
|
|
|
44
|
-
const response = await
|
|
45
|
-
|
|
46
|
-
|
|
47
|
-
body: JSON.stringify(body),
|
|
48
|
-
|
|
49
|
-
|
|
50
|
-
if (!response.ok) {
|
|
51
|
-
const text = await response.text()
|
|
52
|
-
throw new ExternalServiceError('OpenAI', response.status, text)
|
|
53
|
-
}
|
|
50
|
+
const response = await retryableFetch(
|
|
51
|
+
'OpenAI',
|
|
52
|
+
`${this.baseUrl}/v1/chat/completions`,
|
|
53
|
+
{ method: 'POST', headers: this.buildHeaders(), body: JSON.stringify(body) },
|
|
54
|
+
this.retryOptions
|
|
55
|
+
)
|
|
54
56
|
|
|
55
57
|
const data: any = await response.json()
|
|
56
58
|
return this.parseResponse(data)
|
|
@@ -59,16 +61,12 @@ export class OpenAIProvider implements AIProvider {
|
|
|
59
61
|
async *stream(request: CompletionRequest): AsyncIterable<StreamChunk> {
|
|
60
62
|
const body = this.buildRequestBody(request, true)
|
|
61
63
|
|
|
62
|
-
const response = await
|
|
63
|
-
|
|
64
|
-
|
|
65
|
-
body: JSON.stringify(body),
|
|
66
|
-
|
|
67
|
-
|
|
68
|
-
if (!response.ok) {
|
|
69
|
-
const text = await response.text()
|
|
70
|
-
throw new ExternalServiceError('OpenAI', response.status, text)
|
|
71
|
-
}
|
|
64
|
+
const response = await retryableFetch(
|
|
65
|
+
'OpenAI',
|
|
66
|
+
`${this.baseUrl}/v1/chat/completions`,
|
|
67
|
+
{ method: 'POST', headers: this.buildHeaders(), body: JSON.stringify(body) },
|
|
68
|
+
this.retryOptions
|
|
69
|
+
)
|
|
72
70
|
|
|
73
71
|
if (!response.body) {
|
|
74
72
|
throw new ExternalServiceError('OpenAI', undefined, 'No stream body returned')
|
|
@@ -157,16 +155,12 @@ export class OpenAIProvider implements AIProvider {
|
|
|
157
155
|
model: model ?? 'text-embedding-3-small',
|
|
158
156
|
}
|
|
159
157
|
|
|
160
|
-
const response = await
|
|
161
|
-
|
|
162
|
-
|
|
163
|
-
body: JSON.stringify(body),
|
|
164
|
-
|
|
165
|
-
|
|
166
|
-
if (!response.ok) {
|
|
167
|
-
const text = await response.text()
|
|
168
|
-
throw new ExternalServiceError('OpenAI', response.status, text)
|
|
169
|
-
}
|
|
158
|
+
const response = await retryableFetch(
|
|
159
|
+
'OpenAI',
|
|
160
|
+
`${this.baseUrl}/v1/embeddings`,
|
|
161
|
+
{ method: 'POST', headers: this.buildHeaders(), body: JSON.stringify(body) },
|
|
162
|
+
this.retryOptions
|
|
163
|
+
)
|
|
170
164
|
|
|
171
165
|
const data: any = await response.json()
|
|
172
166
|
|
package/src/types.ts
CHANGED
|
@@ -0,0 +1,100 @@
|
|
|
1
|
+
import { ExternalServiceError } from '@stravigor/core/exceptions/errors'
|
|
2
|
+
|
|
3
|
+
export interface RetryOptions {
|
|
4
|
+
maxRetries?: number
|
|
5
|
+
baseDelay?: number
|
|
6
|
+
maxDelay?: number
|
|
7
|
+
retryableStatuses?: number[]
|
|
8
|
+
}
|
|
9
|
+
|
|
10
|
+
const DEFAULT_RETRYABLE = [429, 500, 502, 503, 529]
|
|
11
|
+
|
|
12
|
+
/**
|
|
13
|
+
* Fetch with automatic retry and exponential backoff for transient errors.
|
|
14
|
+
*
|
|
15
|
+
* Retries on 429 (rate limit), 5xx, and network failures.
|
|
16
|
+
* Parses the `retry-after` header when available; otherwise uses
|
|
17
|
+
* exponential backoff with jitter.
|
|
18
|
+
*
|
|
19
|
+
* Returns the successful `Response`. On final failure, throws
|
|
20
|
+
* `ExternalServiceError` with the last status and body.
|
|
21
|
+
*/
|
|
22
|
+
export async function retryableFetch(
|
|
23
|
+
service: string,
|
|
24
|
+
url: string,
|
|
25
|
+
init: RequestInit,
|
|
26
|
+
options?: RetryOptions
|
|
27
|
+
): Promise<Response> {
|
|
28
|
+
const maxRetries = options?.maxRetries ?? 3
|
|
29
|
+
const baseDelay = options?.baseDelay ?? 1000
|
|
30
|
+
const maxDelay = options?.maxDelay ?? 60_000
|
|
31
|
+
const retryable = options?.retryableStatuses ?? DEFAULT_RETRYABLE
|
|
32
|
+
|
|
33
|
+
for (let attempt = 0; attempt <= maxRetries; attempt++) {
|
|
34
|
+
let response: Response
|
|
35
|
+
|
|
36
|
+
try {
|
|
37
|
+
response = await fetch(url, init)
|
|
38
|
+
} catch (err) {
|
|
39
|
+
// Network error (DNS, connection refused, etc.)
|
|
40
|
+
if (attempt === maxRetries) {
|
|
41
|
+
throw new ExternalServiceError(
|
|
42
|
+
service,
|
|
43
|
+
undefined,
|
|
44
|
+
err instanceof Error ? err.message : String(err)
|
|
45
|
+
)
|
|
46
|
+
}
|
|
47
|
+
await sleep(backoffDelay(attempt, baseDelay, maxDelay))
|
|
48
|
+
continue
|
|
49
|
+
}
|
|
50
|
+
|
|
51
|
+
if (response.ok) return response
|
|
52
|
+
|
|
53
|
+
// Non-retryable status — fail immediately
|
|
54
|
+
if (!retryable.includes(response.status)) {
|
|
55
|
+
const text = await response.text()
|
|
56
|
+
throw new ExternalServiceError(service, response.status, text)
|
|
57
|
+
}
|
|
58
|
+
|
|
59
|
+
// Retryable status — wait and retry (unless last attempt)
|
|
60
|
+
if (attempt === maxRetries) {
|
|
61
|
+
const text = await response.text()
|
|
62
|
+
throw new ExternalServiceError(service, response.status, text)
|
|
63
|
+
}
|
|
64
|
+
|
|
65
|
+
const delay = parseRetryAfter(response) ?? backoffDelay(attempt, baseDelay, maxDelay)
|
|
66
|
+
await sleep(delay)
|
|
67
|
+
}
|
|
68
|
+
|
|
69
|
+
// Unreachable, but satisfies TypeScript
|
|
70
|
+
throw new ExternalServiceError(service, undefined, 'Retry loop exited unexpectedly')
|
|
71
|
+
}
|
|
72
|
+
|
|
73
|
+
/**
|
|
74
|
+
* Parse the `retry-after` header into milliseconds.
|
|
75
|
+
* Supports both delta-seconds ("2") and HTTP-date formats.
|
|
76
|
+
*/
|
|
77
|
+
function parseRetryAfter(response: Response): number | null {
|
|
78
|
+
const header = response.headers.get('retry-after')
|
|
79
|
+
if (!header) return null
|
|
80
|
+
|
|
81
|
+
const seconds = Number(header)
|
|
82
|
+
if (!Number.isNaN(seconds)) return seconds * 1000
|
|
83
|
+
|
|
84
|
+
// HTTP-date format
|
|
85
|
+
const date = Date.parse(header)
|
|
86
|
+
if (!Number.isNaN(date)) return Math.max(0, date - Date.now())
|
|
87
|
+
|
|
88
|
+
return null
|
|
89
|
+
}
|
|
90
|
+
|
|
91
|
+
/** Exponential backoff with jitter: base * 2^attempt + random jitter, capped at maxDelay. */
|
|
92
|
+
function backoffDelay(attempt: number, baseDelay: number, maxDelay: number): number {
|
|
93
|
+
const exp = baseDelay * 2 ** attempt
|
|
94
|
+
const jitter = Math.random() * baseDelay
|
|
95
|
+
return Math.min(exp + jitter, maxDelay)
|
|
96
|
+
}
|
|
97
|
+
|
|
98
|
+
function sleep(ms: number): Promise<void> {
|
|
99
|
+
return new Promise(resolve => setTimeout(resolve, ms))
|
|
100
|
+
}
|