@twelvehart/supermemory-runtime 1.0.0-next.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.env.example +57 -0
- package/README.md +374 -0
- package/dist/index.js +189 -0
- package/dist/mcp/index.js +1132 -0
- package/docker-compose.prod.yml +91 -0
- package/docker-compose.yml +358 -0
- package/drizzle/0000_dapper_the_professor.sql +159 -0
- package/drizzle/0001_api_keys.sql +51 -0
- package/drizzle/meta/0000_snapshot.json +1532 -0
- package/drizzle/meta/_journal.json +13 -0
- package/drizzle.config.ts +20 -0
- package/package.json +114 -0
- package/scripts/add-extraction-job.ts +122 -0
- package/scripts/benchmark-pgvector.ts +122 -0
- package/scripts/bootstrap.sh +209 -0
- package/scripts/check-runtime-pack.ts +111 -0
- package/scripts/claude-mcp-config.ts +336 -0
- package/scripts/docker-entrypoint.sh +183 -0
- package/scripts/doctor.ts +377 -0
- package/scripts/init-db.sql +33 -0
- package/scripts/install.sh +1110 -0
- package/scripts/mcp-setup.ts +271 -0
- package/scripts/migrations/001_create_pgvector_extension.sql +31 -0
- package/scripts/migrations/002_create_memory_embeddings_table.sql +75 -0
- package/scripts/migrations/003_create_hnsw_index.sql +94 -0
- package/scripts/migrations/004_create_memory_embeddings_standalone.sql +70 -0
- package/scripts/migrations/005_create_chunks_table.sql +95 -0
- package/scripts/migrations/006_create_processing_queue.sql +45 -0
- package/scripts/migrations/generate_test_data.sql +42 -0
- package/scripts/migrations/phase1_comprehensive_test.sql +204 -0
- package/scripts/migrations/run_migrations.sh +286 -0
- package/scripts/migrations/test_hnsw_index.sql +255 -0
- package/scripts/pre-commit-secrets +282 -0
- package/scripts/run-extraction-worker.ts +46 -0
- package/scripts/run-phase1-tests.sh +291 -0
- package/scripts/setup.ts +222 -0
- package/scripts/smoke-install.sh +12 -0
- package/scripts/test-health-endpoint.sh +328 -0
- package/src/api/index.ts +2 -0
- package/src/api/middleware/auth.ts +80 -0
- package/src/api/middleware/csrf.ts +308 -0
- package/src/api/middleware/errorHandler.ts +166 -0
- package/src/api/middleware/rateLimit.ts +360 -0
- package/src/api/middleware/validation.ts +514 -0
- package/src/api/routes/documents.ts +286 -0
- package/src/api/routes/profiles.ts +237 -0
- package/src/api/routes/search.ts +71 -0
- package/src/api/stores/index.ts +58 -0
- package/src/config/bootstrap-env.ts +3 -0
- package/src/config/env.ts +71 -0
- package/src/config/feature-flags.ts +25 -0
- package/src/config/index.ts +140 -0
- package/src/config/secrets.config.ts +291 -0
- package/src/db/client.ts +92 -0
- package/src/db/index.ts +73 -0
- package/src/db/postgres.ts +72 -0
- package/src/db/schema/chunks.schema.ts +31 -0
- package/src/db/schema/containers.schema.ts +46 -0
- package/src/db/schema/documents.schema.ts +49 -0
- package/src/db/schema/embeddings.schema.ts +32 -0
- package/src/db/schema/index.ts +11 -0
- package/src/db/schema/memories.schema.ts +72 -0
- package/src/db/schema/profiles.schema.ts +34 -0
- package/src/db/schema/queue.schema.ts +59 -0
- package/src/db/schema/relationships.schema.ts +42 -0
- package/src/db/schema.ts +223 -0
- package/src/db/worker-connection.ts +47 -0
- package/src/index.ts +235 -0
- package/src/mcp/CLAUDE.md +1 -0
- package/src/mcp/index.ts +1380 -0
- package/src/mcp/legacyState.ts +22 -0
- package/src/mcp/rateLimit.ts +358 -0
- package/src/mcp/resources.ts +309 -0
- package/src/mcp/results.ts +104 -0
- package/src/mcp/tools.ts +401 -0
- package/src/queues/config.ts +119 -0
- package/src/queues/index.ts +289 -0
- package/src/sdk/client.ts +225 -0
- package/src/sdk/errors.ts +266 -0
- package/src/sdk/http.ts +560 -0
- package/src/sdk/index.ts +244 -0
- package/src/sdk/resources/base.ts +65 -0
- package/src/sdk/resources/connections.ts +204 -0
- package/src/sdk/resources/documents.ts +163 -0
- package/src/sdk/resources/index.ts +10 -0
- package/src/sdk/resources/memories.ts +150 -0
- package/src/sdk/resources/search.ts +60 -0
- package/src/sdk/resources/settings.ts +36 -0
- package/src/sdk/types.ts +674 -0
- package/src/services/chunking/index.ts +451 -0
- package/src/services/chunking.service.ts +650 -0
- package/src/services/csrf.service.ts +252 -0
- package/src/services/documents.repository.ts +219 -0
- package/src/services/documents.service.ts +191 -0
- package/src/services/embedding.service.ts +404 -0
- package/src/services/extraction.service.ts +300 -0
- package/src/services/extractors/code.extractor.ts +451 -0
- package/src/services/extractors/index.ts +9 -0
- package/src/services/extractors/markdown.extractor.ts +461 -0
- package/src/services/extractors/pdf.extractor.ts +315 -0
- package/src/services/extractors/text.extractor.ts +118 -0
- package/src/services/extractors/url.extractor.ts +243 -0
- package/src/services/index.ts +235 -0
- package/src/services/ingestion.service.ts +177 -0
- package/src/services/llm/anthropic.ts +400 -0
- package/src/services/llm/base.ts +460 -0
- package/src/services/llm/contradiction-detector.service.ts +526 -0
- package/src/services/llm/heuristics.ts +148 -0
- package/src/services/llm/index.ts +309 -0
- package/src/services/llm/memory-classifier.service.ts +383 -0
- package/src/services/llm/memory-extension-detector.service.ts +523 -0
- package/src/services/llm/mock.ts +470 -0
- package/src/services/llm/openai.ts +398 -0
- package/src/services/llm/prompts.ts +438 -0
- package/src/services/llm/types.ts +373 -0
- package/src/services/memory.repository.ts +1769 -0
- package/src/services/memory.service.ts +1338 -0
- package/src/services/memory.types.ts +234 -0
- package/src/services/persistence/index.ts +295 -0
- package/src/services/pipeline.service.ts +509 -0
- package/src/services/profile.repository.ts +436 -0
- package/src/services/profile.service.ts +560 -0
- package/src/services/profile.types.ts +270 -0
- package/src/services/relationships/detector.ts +1128 -0
- package/src/services/relationships/index.ts +268 -0
- package/src/services/relationships/memory-integration.ts +459 -0
- package/src/services/relationships/strategies.ts +132 -0
- package/src/services/relationships/types.ts +370 -0
- package/src/services/search.service.ts +761 -0
- package/src/services/search.types.ts +220 -0
- package/src/services/secrets.service.ts +384 -0
- package/src/services/vectorstore/base.ts +327 -0
- package/src/services/vectorstore/index.ts +444 -0
- package/src/services/vectorstore/memory.ts +286 -0
- package/src/services/vectorstore/migration.ts +295 -0
- package/src/services/vectorstore/mock.ts +403 -0
- package/src/services/vectorstore/pgvector.ts +695 -0
- package/src/services/vectorstore/types.ts +247 -0
- package/src/startup.ts +389 -0
- package/src/types/api.types.ts +193 -0
- package/src/types/document.types.ts +103 -0
- package/src/types/index.ts +241 -0
- package/src/types/profile.base.ts +133 -0
- package/src/utils/errors.ts +447 -0
- package/src/utils/id.ts +15 -0
- package/src/utils/index.ts +101 -0
- package/src/utils/logger.ts +313 -0
- package/src/utils/sanitization.ts +501 -0
- package/src/utils/secret-validation.ts +273 -0
- package/src/utils/synonyms.ts +188 -0
- package/src/utils/validation.ts +581 -0
- package/src/workers/chunking.worker.ts +242 -0
- package/src/workers/embedding.worker.ts +358 -0
- package/src/workers/extraction.worker.ts +346 -0
- package/src/workers/indexing.worker.ts +505 -0
- package/tsconfig.json +38 -0
|
@@ -0,0 +1,400 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Anthropic LLM Provider
|
|
3
|
+
*
|
|
4
|
+
* Implements LLM-based memory extraction using Anthropic's Claude models.
|
|
5
|
+
* Uses the Anthropic SDK with structured output prompting.
|
|
6
|
+
*/
|
|
7
|
+
|
|
8
|
+
import { getLogger } from '../../utils/logger.js'
|
|
9
|
+
import { BaseLLMProvider, LLMError } from './base.js'
|
|
10
|
+
import type {
|
|
11
|
+
AnthropicLLMConfig,
|
|
12
|
+
LLMProviderType,
|
|
13
|
+
ExtractedMemory,
|
|
14
|
+
DetectedRelationship,
|
|
15
|
+
ExtractionOptions,
|
|
16
|
+
RelationshipDetectionOptions,
|
|
17
|
+
} from './types.js'
|
|
18
|
+
import { LLMErrorCode } from './types.js'
|
|
19
|
+
import type { MemoryType } from '../../types/index.js'
|
|
20
|
+
import {
|
|
21
|
+
MEMORY_EXTRACTION_SYSTEM_PROMPT,
|
|
22
|
+
MEMORY_EXTRACTION_EXAMPLES,
|
|
23
|
+
RELATIONSHIP_DETECTION_SYSTEM_PROMPT,
|
|
24
|
+
RELATIONSHIP_DETECTION_EXAMPLES,
|
|
25
|
+
generateExtractionPrompt,
|
|
26
|
+
generateRelationshipPrompt,
|
|
27
|
+
normalizeJsonResponse,
|
|
28
|
+
parseExtractionResponse,
|
|
29
|
+
parseRelationshipResponse,
|
|
30
|
+
} from './prompts.js'
|
|
31
|
+
|
|
32
|
+
const logger = getLogger('AnthropicProvider')
|
|
33
|
+
|
|
34
|
+
// ============================================================================
|
|
35
|
+
// Default Configuration
|
|
36
|
+
// ============================================================================
|
|
37
|
+
|
|
38
|
+
const DEFAULT_ANTHROPIC_CONFIG: Partial<AnthropicLLMConfig> = {
|
|
39
|
+
model: 'claude-3-haiku-20240307',
|
|
40
|
+
maxTokens: 2000,
|
|
41
|
+
temperature: 0.1,
|
|
42
|
+
timeoutMs: 30000,
|
|
43
|
+
maxRetries: 3,
|
|
44
|
+
retryDelayMs: 1000,
|
|
45
|
+
}
|
|
46
|
+
|
|
47
|
+
// ============================================================================
|
|
48
|
+
// Anthropic API Types (minimal, since we're not using the full SDK)
|
|
49
|
+
// ============================================================================
|
|
50
|
+
|
|
51
|
+
interface AnthropicMessage {
|
|
52
|
+
role: 'user' | 'assistant'
|
|
53
|
+
content: string
|
|
54
|
+
}
|
|
55
|
+
|
|
56
|
+
interface AnthropicResponse {
|
|
57
|
+
id: string
|
|
58
|
+
type: 'message'
|
|
59
|
+
role: 'assistant'
|
|
60
|
+
content: Array<{
|
|
61
|
+
type: 'text'
|
|
62
|
+
text: string
|
|
63
|
+
}>
|
|
64
|
+
model: string
|
|
65
|
+
stop_reason: string
|
|
66
|
+
usage: {
|
|
67
|
+
input_tokens: number
|
|
68
|
+
output_tokens: number
|
|
69
|
+
}
|
|
70
|
+
}
|
|
71
|
+
|
|
72
|
+
interface AnthropicError {
|
|
73
|
+
type: 'error'
|
|
74
|
+
error: {
|
|
75
|
+
type: string
|
|
76
|
+
message: string
|
|
77
|
+
}
|
|
78
|
+
}
|
|
79
|
+
|
|
80
|
+
// ============================================================================
|
|
81
|
+
// Anthropic Provider Implementation
|
|
82
|
+
// ============================================================================
|
|
83
|
+
|
|
84
|
+
export class AnthropicLLMProvider extends BaseLLMProvider {
|
|
85
|
+
readonly type: LLMProviderType = 'anthropic'
|
|
86
|
+
|
|
87
|
+
private readonly apiKey?: string
|
|
88
|
+
private readonly model: string
|
|
89
|
+
private readonly baseUrl: string
|
|
90
|
+
|
|
91
|
+
constructor(config: AnthropicLLMConfig) {
|
|
92
|
+
super({
|
|
93
|
+
...DEFAULT_ANTHROPIC_CONFIG,
|
|
94
|
+
...config,
|
|
95
|
+
})
|
|
96
|
+
|
|
97
|
+
this.apiKey = config.apiKey
|
|
98
|
+
this.model = config.model ?? DEFAULT_ANTHROPIC_CONFIG.model!
|
|
99
|
+
this.baseUrl = config.baseUrl ?? 'https://api.anthropic.com'
|
|
100
|
+
|
|
101
|
+
logger.debug('Anthropic provider initialized', {
|
|
102
|
+
model: this.model,
|
|
103
|
+
hasApiKey: !!this.apiKey,
|
|
104
|
+
})
|
|
105
|
+
}
|
|
106
|
+
|
|
107
|
+
// ============================================================================
|
|
108
|
+
// Availability Check
|
|
109
|
+
// ============================================================================
|
|
110
|
+
|
|
111
|
+
isAvailable(): boolean {
|
|
112
|
+
return !!this.apiKey
|
|
113
|
+
}
|
|
114
|
+
|
|
115
|
+
// ============================================================================
|
|
116
|
+
// API Communication
|
|
117
|
+
// ============================================================================
|
|
118
|
+
|
|
119
|
+
private async callAnthropicAPI(
|
|
120
|
+
systemPrompt: string,
|
|
121
|
+
userMessage: string
|
|
122
|
+
): Promise<{ content: string; usage: { input: number; output: number } }> {
|
|
123
|
+
if (!this.apiKey) {
|
|
124
|
+
throw LLMError.providerUnavailable('anthropic')
|
|
125
|
+
}
|
|
126
|
+
|
|
127
|
+
const messages: AnthropicMessage[] = [{ role: 'user', content: userMessage }]
|
|
128
|
+
|
|
129
|
+
const requestBody = {
|
|
130
|
+
model: this.model,
|
|
131
|
+
max_tokens: this.config.maxTokens,
|
|
132
|
+
temperature: this.config.temperature,
|
|
133
|
+
system: systemPrompt,
|
|
134
|
+
messages,
|
|
135
|
+
}
|
|
136
|
+
|
|
137
|
+
const controller = new AbortController()
|
|
138
|
+
const timeoutId = setTimeout(() => controller.abort(), this.config.timeoutMs)
|
|
139
|
+
|
|
140
|
+
try {
|
|
141
|
+
const response = await fetch(`${this.baseUrl}/v1/messages`, {
|
|
142
|
+
method: 'POST',
|
|
143
|
+
headers: {
|
|
144
|
+
'Content-Type': 'application/json',
|
|
145
|
+
'x-api-key': this.apiKey,
|
|
146
|
+
'anthropic-version': '2023-06-01',
|
|
147
|
+
},
|
|
148
|
+
body: JSON.stringify(requestBody),
|
|
149
|
+
signal: controller.signal,
|
|
150
|
+
})
|
|
151
|
+
|
|
152
|
+
clearTimeout(timeoutId)
|
|
153
|
+
|
|
154
|
+
if (!response.ok) {
|
|
155
|
+
const errorBody = (await response.json().catch(() => ({}))) as AnthropicError | Record<string, unknown>
|
|
156
|
+
throw this.handleAnthropicHttpError(response.status, errorBody)
|
|
157
|
+
}
|
|
158
|
+
|
|
159
|
+
// Parse and validate JSON response (with error handling for concurrent request corruption)
|
|
160
|
+
let data: AnthropicResponse
|
|
161
|
+
try {
|
|
162
|
+
data = (await response.json()) as AnthropicResponse
|
|
163
|
+
} catch (parseError) {
|
|
164
|
+
throw LLMError.invalidResponse(
|
|
165
|
+
'anthropic',
|
|
166
|
+
`JSON parse error: ${parseError instanceof Error ? parseError.message : String(parseError)}`
|
|
167
|
+
)
|
|
168
|
+
}
|
|
169
|
+
|
|
170
|
+
// Validate response structure (handles concurrent request JSON corruption)
|
|
171
|
+
if (!data || typeof data !== 'object') {
|
|
172
|
+
throw LLMError.invalidResponse('anthropic', 'Malformed JSON response')
|
|
173
|
+
}
|
|
174
|
+
|
|
175
|
+
if (!data.content || !Array.isArray(data.content)) {
|
|
176
|
+
throw LLMError.invalidResponse('anthropic', 'Invalid response structure: missing content array')
|
|
177
|
+
}
|
|
178
|
+
|
|
179
|
+
if (!data.content?.[0]?.text) {
|
|
180
|
+
throw LLMError.invalidResponse('anthropic', 'Empty response from model')
|
|
181
|
+
}
|
|
182
|
+
|
|
183
|
+
return {
|
|
184
|
+
content: data.content[0].text,
|
|
185
|
+
usage: {
|
|
186
|
+
input: data.usage.input_tokens,
|
|
187
|
+
output: data.usage.output_tokens,
|
|
188
|
+
},
|
|
189
|
+
}
|
|
190
|
+
} catch (error) {
|
|
191
|
+
clearTimeout(timeoutId)
|
|
192
|
+
|
|
193
|
+
if (error instanceof LLMError) {
|
|
194
|
+
throw error
|
|
195
|
+
}
|
|
196
|
+
|
|
197
|
+
if (error instanceof Error && error.name === 'AbortError') {
|
|
198
|
+
throw LLMError.timeout('anthropic')
|
|
199
|
+
}
|
|
200
|
+
|
|
201
|
+
throw this.handleAnthropicError(error)
|
|
202
|
+
}
|
|
203
|
+
}
|
|
204
|
+
|
|
205
|
+
// ============================================================================
|
|
206
|
+
// Memory Extraction
|
|
207
|
+
// ============================================================================
|
|
208
|
+
|
|
209
|
+
protected async doExtractMemories(
|
|
210
|
+
text: string,
|
|
211
|
+
options: ExtractionOptions
|
|
212
|
+
): Promise<{
|
|
213
|
+
memories: ExtractedMemory[]
|
|
214
|
+
rawResponse?: string
|
|
215
|
+
tokensUsed?: { prompt: number; completion: number; total: number }
|
|
216
|
+
}> {
|
|
217
|
+
const systemPrompt = `${MEMORY_EXTRACTION_SYSTEM_PROMPT}\n\n${MEMORY_EXTRACTION_EXAMPLES}`
|
|
218
|
+
const userPrompt = generateExtractionPrompt(text, options)
|
|
219
|
+
|
|
220
|
+
const response = await this.callAnthropicAPI(systemPrompt, userPrompt)
|
|
221
|
+
const parsed = parseExtractionResponse(response.content)
|
|
222
|
+
|
|
223
|
+
// Filter by confidence if specified
|
|
224
|
+
let memories: ExtractedMemory[] = parsed.memories.map((m) => ({
|
|
225
|
+
content: m.content,
|
|
226
|
+
type: m.type,
|
|
227
|
+
confidence: m.confidence,
|
|
228
|
+
entities: m.entities.map((e) => ({
|
|
229
|
+
name: e.name,
|
|
230
|
+
type: e.type as 'person' | 'place' | 'organization' | 'date' | 'concept' | 'other',
|
|
231
|
+
mentions: 1,
|
|
232
|
+
})),
|
|
233
|
+
keywords: m.keywords,
|
|
234
|
+
}))
|
|
235
|
+
|
|
236
|
+
if (options.minConfidence) {
|
|
237
|
+
memories = memories.filter((m) => m.confidence >= options.minConfidence!)
|
|
238
|
+
}
|
|
239
|
+
|
|
240
|
+
if (options.maxMemories) {
|
|
241
|
+
memories = memories.slice(0, options.maxMemories)
|
|
242
|
+
}
|
|
243
|
+
|
|
244
|
+
return {
|
|
245
|
+
memories,
|
|
246
|
+
rawResponse: response.content,
|
|
247
|
+
tokensUsed: {
|
|
248
|
+
prompt: response.usage.input,
|
|
249
|
+
completion: response.usage.output,
|
|
250
|
+
total: response.usage.input + response.usage.output,
|
|
251
|
+
},
|
|
252
|
+
}
|
|
253
|
+
}
|
|
254
|
+
|
|
255
|
+
// ============================================================================
|
|
256
|
+
// Relationship Detection
|
|
257
|
+
// ============================================================================
|
|
258
|
+
|
|
259
|
+
protected async doDetectRelationships(
|
|
260
|
+
newMemory: { id: string; content: string; type: MemoryType },
|
|
261
|
+
existingMemories: Array<{ id: string; content: string; type: MemoryType }>,
|
|
262
|
+
options: RelationshipDetectionOptions
|
|
263
|
+
): Promise<{
|
|
264
|
+
relationships: DetectedRelationship[]
|
|
265
|
+
supersededMemoryIds: string[]
|
|
266
|
+
}> {
|
|
267
|
+
// If no existing memories, return empty
|
|
268
|
+
if (existingMemories.length === 0) {
|
|
269
|
+
return { relationships: [], supersededMemoryIds: [] }
|
|
270
|
+
}
|
|
271
|
+
|
|
272
|
+
const systemPrompt = `${RELATIONSHIP_DETECTION_SYSTEM_PROMPT}\n\n${RELATIONSHIP_DETECTION_EXAMPLES}`
|
|
273
|
+
const userPrompt = generateRelationshipPrompt(newMemory, existingMemories, options)
|
|
274
|
+
|
|
275
|
+
const response = await this.callAnthropicAPI(systemPrompt, userPrompt)
|
|
276
|
+
const parsed = parseRelationshipResponse(response.content)
|
|
277
|
+
|
|
278
|
+
// Filter and validate relationships
|
|
279
|
+
let relationships: DetectedRelationship[] = parsed.relationships.map((r) => ({
|
|
280
|
+
sourceMemoryId: r.sourceMemoryId,
|
|
281
|
+
targetMemoryId: r.targetMemoryId,
|
|
282
|
+
type: r.type as DetectedRelationship['type'],
|
|
283
|
+
confidence: r.confidence,
|
|
284
|
+
reason: r.reason,
|
|
285
|
+
}))
|
|
286
|
+
|
|
287
|
+
if (options.minConfidence) {
|
|
288
|
+
relationships = relationships.filter((r) => r.confidence >= options.minConfidence!)
|
|
289
|
+
}
|
|
290
|
+
|
|
291
|
+
if (options.maxRelationships) {
|
|
292
|
+
relationships = relationships.slice(0, options.maxRelationships)
|
|
293
|
+
}
|
|
294
|
+
|
|
295
|
+
return {
|
|
296
|
+
relationships,
|
|
297
|
+
supersededMemoryIds: parsed.supersededMemoryIds,
|
|
298
|
+
}
|
|
299
|
+
}
|
|
300
|
+
|
|
301
|
+
// ============================================================================
|
|
302
|
+
// Generic JSON Task
|
|
303
|
+
// ============================================================================
|
|
304
|
+
|
|
305
|
+
protected async doGenerateJson(
|
|
306
|
+
systemPrompt: string,
|
|
307
|
+
userPrompt: string
|
|
308
|
+
): Promise<{
|
|
309
|
+
rawResponse: string
|
|
310
|
+
tokensUsed?: { prompt: number; completion: number; total: number }
|
|
311
|
+
}> {
|
|
312
|
+
const response = await this.callAnthropicAPI(systemPrompt, userPrompt)
|
|
313
|
+
const normalized = normalizeJsonResponse(response.content)
|
|
314
|
+
|
|
315
|
+
return {
|
|
316
|
+
rawResponse: normalized,
|
|
317
|
+
tokensUsed: {
|
|
318
|
+
prompt: response.usage.input,
|
|
319
|
+
completion: response.usage.output,
|
|
320
|
+
total: response.usage.input + response.usage.output,
|
|
321
|
+
},
|
|
322
|
+
}
|
|
323
|
+
}
|
|
324
|
+
|
|
325
|
+
// ============================================================================
|
|
326
|
+
// Error Handling
|
|
327
|
+
// ============================================================================
|
|
328
|
+
|
|
329
|
+
private handleAnthropicHttpError(status: number, body: AnthropicError | Record<string, unknown>): LLMError {
|
|
330
|
+
const message =
|
|
331
|
+
'error' in body && typeof body.error === 'object' && body.error
|
|
332
|
+
? ((body.error as { message?: string }).message ?? 'Unknown error')
|
|
333
|
+
: 'Unknown error'
|
|
334
|
+
|
|
335
|
+
// Rate limiting
|
|
336
|
+
if (status === 429) {
|
|
337
|
+
return LLMError.rateLimited('anthropic', 60000)
|
|
338
|
+
}
|
|
339
|
+
|
|
340
|
+
// Authentication errors
|
|
341
|
+
if (status === 401) {
|
|
342
|
+
return LLMError.invalidApiKey('anthropic')
|
|
343
|
+
}
|
|
344
|
+
|
|
345
|
+
// Content filtering (400 with specific message)
|
|
346
|
+
if (status === 400 && message.toLowerCase().includes('content')) {
|
|
347
|
+
return new LLMError('Content was filtered by Anthropic', LLMErrorCode.CONTENT_FILTERED, 'anthropic', false)
|
|
348
|
+
}
|
|
349
|
+
|
|
350
|
+
// Token limit exceeded
|
|
351
|
+
if (status === 400 && message.toLowerCase().includes('token')) {
|
|
352
|
+
return new LLMError('Token limit exceeded', LLMErrorCode.TOKEN_LIMIT_EXCEEDED, 'anthropic', false)
|
|
353
|
+
}
|
|
354
|
+
|
|
355
|
+
// Overloaded (503)
|
|
356
|
+
if (status === 529 || status === 503) {
|
|
357
|
+
return LLMError.providerUnavailable('anthropic')
|
|
358
|
+
}
|
|
359
|
+
|
|
360
|
+
// Server errors (retryable)
|
|
361
|
+
if (status >= 500) {
|
|
362
|
+
return LLMError.providerUnavailable('anthropic')
|
|
363
|
+
}
|
|
364
|
+
|
|
365
|
+
// Default
|
|
366
|
+
return LLMError.invalidResponse('anthropic', message)
|
|
367
|
+
}
|
|
368
|
+
|
|
369
|
+
private handleAnthropicError(error: unknown): LLMError {
|
|
370
|
+
if (error instanceof LLMError) {
|
|
371
|
+
return error
|
|
372
|
+
}
|
|
373
|
+
|
|
374
|
+
if (error instanceof Error) {
|
|
375
|
+
// Network errors
|
|
376
|
+
if (
|
|
377
|
+
error.message.includes('fetch') ||
|
|
378
|
+
error.message.includes('network') ||
|
|
379
|
+
error.message.includes('ECONNREFUSED')
|
|
380
|
+
) {
|
|
381
|
+
return LLMError.providerUnavailable('anthropic')
|
|
382
|
+
}
|
|
383
|
+
|
|
384
|
+
return new LLMError(error.message, LLMErrorCode.PROVIDER_UNAVAILABLE, 'anthropic', true)
|
|
385
|
+
}
|
|
386
|
+
|
|
387
|
+
return new LLMError(String(error), LLMErrorCode.PROVIDER_UNAVAILABLE, 'anthropic', true)
|
|
388
|
+
}
|
|
389
|
+
}
|
|
390
|
+
|
|
391
|
+
// ============================================================================
|
|
392
|
+
// Factory Function
|
|
393
|
+
// ============================================================================
|
|
394
|
+
|
|
395
|
+
/**
|
|
396
|
+
* Create an Anthropic LLM provider
|
|
397
|
+
*/
|
|
398
|
+
export function createAnthropicProvider(config: AnthropicLLMConfig): AnthropicLLMProvider {
|
|
399
|
+
return new AnthropicLLMProvider(config)
|
|
400
|
+
}
|