@namzu/sdk 0.1.6 → 0.1.8

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (58) hide show
  1. package/CHANGELOG.md +31 -0
  2. package/README.md +14 -9
  3. package/dist/constants/provider/index.d.ts +0 -3
  4. package/dist/constants/provider/index.d.ts.map +1 -1
  5. package/dist/constants/provider/index.js +0 -18
  6. package/dist/constants/provider/index.js.map +1 -1
  7. package/dist/index.d.ts +1 -1
  8. package/dist/index.d.ts.map +1 -1
  9. package/dist/index.js +1 -1
  10. package/dist/index.js.map +1 -1
  11. package/dist/provider/__tests__/registry.test.d.ts +11 -0
  12. package/dist/provider/__tests__/registry.test.d.ts.map +1 -0
  13. package/dist/provider/__tests__/registry.test.js +118 -0
  14. package/dist/provider/__tests__/registry.test.js.map +1 -0
  15. package/dist/provider/index.d.ts +3 -3
  16. package/dist/provider/index.d.ts.map +1 -1
  17. package/dist/provider/index.js +3 -3
  18. package/dist/provider/index.js.map +1 -1
  19. package/dist/provider/mock-register.d.ts +12 -0
  20. package/dist/provider/mock-register.d.ts.map +1 -0
  21. package/dist/provider/mock-register.js +24 -0
  22. package/dist/provider/mock-register.js.map +1 -0
  23. package/dist/provider/mock.d.ts +26 -0
  24. package/dist/provider/mock.d.ts.map +1 -0
  25. package/dist/provider/{factory.js → mock.js} +3 -45
  26. package/dist/provider/mock.js.map +1 -0
  27. package/dist/provider/registry.d.ts +47 -0
  28. package/dist/provider/registry.d.ts.map +1 -0
  29. package/dist/provider/registry.js +89 -0
  30. package/dist/provider/registry.js.map +1 -0
  31. package/dist/rag/rag-tool.d.ts +1 -1
  32. package/dist/types/provider/config.d.ts +29 -21
  33. package/dist/types/provider/config.d.ts.map +1 -1
  34. package/dist/types/provider/index.d.ts +1 -1
  35. package/dist/types/provider/index.d.ts.map +1 -1
  36. package/package.json +4 -2
  37. package/src/constants/provider/index.ts +0 -22
  38. package/src/index.ts +5 -4
  39. package/src/provider/__tests__/registry.test.ts +155 -0
  40. package/src/provider/index.ts +3 -3
  41. package/src/provider/mock-register.ts +27 -0
  42. package/src/provider/{factory.ts → mock.ts} +2 -57
  43. package/src/provider/registry.ts +118 -0
  44. package/src/types/provider/config.ts +31 -29
  45. package/src/types/provider/index.ts +3 -4
  46. package/dist/provider/bedrock/client.d.ts +0 -14
  47. package/dist/provider/bedrock/client.d.ts.map +0 -1
  48. package/dist/provider/bedrock/client.js +0 -460
  49. package/dist/provider/bedrock/client.js.map +0 -1
  50. package/dist/provider/factory.d.ts +0 -39
  51. package/dist/provider/factory.d.ts.map +0 -1
  52. package/dist/provider/factory.js.map +0 -1
  53. package/dist/provider/openrouter/client.d.ts +0 -17
  54. package/dist/provider/openrouter/client.d.ts.map +0 -1
  55. package/dist/provider/openrouter/client.js +0 -305
  56. package/dist/provider/openrouter/client.js.map +0 -1
  57. package/src/provider/bedrock/client.ts +0 -548
  58. package/src/provider/openrouter/client.ts +0 -390
@@ -1,390 +0,0 @@
1
- import { SpanStatusCode } from '@opentelemetry/api'
2
- import { OPENROUTER_BASE_URL } from '../../constants/provider/index.js'
3
- import { GENAI, NAMZU, chatSpanName } from '../../telemetry/attributes.js'
4
- import type { TokenUsage } from '../../types/common/index.js'
5
- import type {
6
- ChatCompletionParams,
7
- ChatCompletionResponse,
8
- LLMProvider,
9
- ModelInfo,
10
- StreamChunk,
11
- ToolChoice,
12
- } from '../../types/provider/index.js'
13
- import type { OpenRouterConfig } from '../../types/provider/index.js'
14
- import { toErrorMessage } from '../../utils/error.js'
15
- import { getRootLogger } from '../../utils/logger.js'
16
- import { getTracer } from '../telemetry/setup.js'
17
-
18
- const logger = getRootLogger().child({ component: 'OpenRouterProvider' })
19
-
20
- interface RawUsage {
21
- prompt_tokens: number
22
- completion_tokens: number
23
- total_tokens: number
24
- prompt_tokens_details?: {
25
- cached_tokens?: number
26
- }
27
- cache_discount?: number
28
- cache_read_input_tokens?: number
29
- cache_creation_input_tokens?: number
30
- }
31
-
32
- function parseUsage(raw?: RawUsage): TokenUsage {
33
- if (!raw) {
34
- return {
35
- promptTokens: 0,
36
- completionTokens: 0,
37
- totalTokens: 0,
38
- cachedTokens: 0,
39
- cacheWriteTokens: 0,
40
- }
41
- }
42
- return {
43
- promptTokens: raw.prompt_tokens,
44
- completionTokens: raw.completion_tokens,
45
- totalTokens: raw.total_tokens,
46
- cachedTokens: raw.prompt_tokens_details?.cached_tokens ?? raw.cache_read_input_tokens ?? 0,
47
- cacheWriteTokens: raw.cache_creation_input_tokens ?? 0,
48
- }
49
- }
50
-
51
- function parseCacheDiscount(raw?: RawUsage): number {
52
- return raw?.cache_discount ?? 0
53
- }
54
-
55
- function formatToolChoice(tc: ToolChoice): unknown {
56
- if (typeof tc === 'string') return tc
57
- return tc
58
- }
59
-
60
- export class OpenRouterProvider implements LLMProvider {
61
- readonly id = 'openrouter'
62
- readonly name = 'OpenRouter'
63
-
64
- private config: OpenRouterConfig
65
- private baseUrl: string
66
-
67
- constructor(config: OpenRouterConfig) {
68
- if (!config.apiKey) {
69
- throw new Error('OpenRouter API key is required. Set OPENROUTER_API_KEY env variable.')
70
- }
71
- this.config = config
72
- this.baseUrl = config.baseUrl ?? OPENROUTER_BASE_URL
73
- }
74
-
75
- private getHeaders(): Record<string, string> {
76
- const headers: Record<string, string> = {
77
- Authorization: `Bearer ${this.config.apiKey}`,
78
- 'Content-Type': 'application/json',
79
- }
80
- if (this.config.siteUrl) {
81
- headers['HTTP-Referer'] = this.config.siteUrl
82
- }
83
- if (this.config.siteName) {
84
- headers['X-Title'] = this.config.siteName
85
- }
86
- return headers
87
- }
88
-
89
- private formatMessages(messages: ChatCompletionParams['messages']): unknown[] {
90
- return messages.map((msg) => {
91
- if (msg.role === 'tool') {
92
- return {
93
- role: 'tool',
94
- content: msg.content,
95
- tool_call_id: (msg as { toolCallId?: string }).toolCallId,
96
- }
97
- }
98
- if (msg.role === 'assistant' && 'toolCalls' in msg && msg.toolCalls) {
99
- return {
100
- role: 'assistant',
101
- content: msg.content,
102
- tool_calls: msg.toolCalls.map((tc) => ({
103
- id: tc.id,
104
- type: tc.type,
105
- function: tc.function,
106
- })),
107
- }
108
- }
109
- return { role: msg.role, content: msg.content }
110
- })
111
- }
112
-
113
- private buildRequestBody(params: ChatCompletionParams, stream: boolean): Record<string, unknown> {
114
- const body: Record<string, unknown> = {
115
- model: params.model,
116
- messages: this.formatMessages(params.messages),
117
- stream,
118
- }
119
-
120
- if (params.tools && params.tools.length > 0) {
121
- body.tools = params.tools
122
- }
123
- if (params.toolChoice !== undefined) {
124
- body.tool_choice = formatToolChoice(params.toolChoice)
125
- }
126
- if (params.parallelToolCalls !== undefined) {
127
- body.parallel_tool_calls = params.parallelToolCalls
128
- }
129
-
130
- if (params.temperature !== undefined) body.temperature = params.temperature
131
- if (params.maxTokens !== undefined) body.max_tokens = params.maxTokens
132
- if (params.topP !== undefined) body.top_p = params.topP
133
- if (params.topK !== undefined) body.top_k = params.topK
134
- if (params.frequencyPenalty !== undefined) body.frequency_penalty = params.frequencyPenalty
135
- if (params.presencePenalty !== undefined) body.presence_penalty = params.presencePenalty
136
- if (params.repetitionPenalty !== undefined) body.repetition_penalty = params.repetitionPenalty
137
- if (params.stop) body.stop = params.stop
138
-
139
- if (params.cacheControl) {
140
- body.cache_control = params.cacheControl
141
- }
142
-
143
- if (params.responseFormat) {
144
- body.response_format = params.responseFormat
145
- }
146
-
147
- return body
148
- }
149
-
150
- async chat(params: ChatCompletionParams): Promise<ChatCompletionResponse> {
151
- const tracer = getTracer()
152
-
153
- return tracer.startActiveSpan(chatSpanName(params.model), async (span) => {
154
- span.setAttributes({
155
- [GENAI.OPERATION_NAME]: 'chat',
156
- [GENAI.SYSTEM]: 'openrouter',
157
- [GENAI.REQUEST_MODEL]: params.model,
158
- })
159
- if (params.temperature !== undefined) {
160
- span.setAttribute(GENAI.REQUEST_TEMPERATURE, params.temperature)
161
- }
162
- if (params.maxTokens !== undefined) {
163
- span.setAttribute(GENAI.REQUEST_MAX_TOKENS, params.maxTokens)
164
- }
165
-
166
- try {
167
- const body = this.buildRequestBody(params, false)
168
-
169
- logger.debug('Sending chat completion request', { model: params.model })
170
-
171
- const response = await fetch(`${this.baseUrl}/chat/completions`, {
172
- method: 'POST',
173
- headers: this.getHeaders(),
174
- body: JSON.stringify(body),
175
- signal: AbortSignal.timeout(this.config.timeout ?? 120_000),
176
- })
177
-
178
- if (!response.ok) {
179
- const errorBody = await response.text()
180
- logger.error('OpenRouter API error', {
181
- status: response.status,
182
- body: errorBody,
183
- })
184
- throw new Error(`OpenRouter API error (${response.status}): ${errorBody}`)
185
- }
186
-
187
- const data = (await response.json()) as {
188
- id: string
189
- model: string
190
- choices: Array<{
191
- message: {
192
- role: string
193
- content: string | null
194
- tool_calls?: Array<{
195
- id: string
196
- type: string
197
- function: { name: string; arguments: string }
198
- }>
199
- }
200
- finish_reason: string
201
- }>
202
- usage?: RawUsage
203
- }
204
-
205
- const choice = data.choices[0]
206
- if (!choice) {
207
- throw new Error('OpenRouter returned empty choices')
208
- }
209
-
210
- const usage = parseUsage(data.usage)
211
- const cacheDiscount = parseCacheDiscount(data.usage)
212
-
213
- const result: ChatCompletionResponse = {
214
- id: data.id,
215
- model: data.model,
216
- message: {
217
- role: 'assistant',
218
- content: choice.message.content,
219
- toolCalls: choice.message.tool_calls?.map((tc) => ({
220
- id: tc.id,
221
- type: 'function' as const,
222
- function: tc.function,
223
- })),
224
- },
225
- finishReason: choice.finish_reason as ChatCompletionResponse['finishReason'],
226
- usage,
227
- }
228
-
229
- span.setAttributes({
230
- [GENAI.RESPONSE_ID]: data.id,
231
- [GENAI.RESPONSE_MODEL]: data.model,
232
- [GENAI.RESPONSE_FINISH_REASONS]: choice.finish_reason,
233
- [GENAI.USAGE_INPUT_TOKENS]: usage.promptTokens,
234
- [GENAI.USAGE_OUTPUT_TOKENS]: usage.completionTokens,
235
- [NAMZU.CACHE_READ_TOKENS]: usage.cachedTokens,
236
- [NAMZU.CACHE_WRITE_TOKENS]: usage.cacheWriteTokens,
237
- [NAMZU.CACHE_DISCOUNT]: cacheDiscount,
238
- })
239
- span.setStatus({ code: SpanStatusCode.OK })
240
-
241
- return result
242
- } catch (err) {
243
- span.setStatus({
244
- code: SpanStatusCode.ERROR,
245
- message: toErrorMessage(err),
246
- })
247
- span.recordException(err instanceof Error ? err : new Error(String(err)))
248
- throw err
249
- } finally {
250
- span.end()
251
- }
252
- })
253
- }
254
-
255
- async *chatStream(params: ChatCompletionParams): AsyncIterable<StreamChunk> {
256
- const body = this.buildRequestBody(params, true)
257
-
258
- const response = await fetch(`${this.baseUrl}/chat/completions`, {
259
- method: 'POST',
260
- headers: this.getHeaders(),
261
- body: JSON.stringify(body),
262
- signal: AbortSignal.timeout(this.config.timeout ?? 120_000),
263
- })
264
-
265
- if (!response.ok) {
266
- const errorBody = await response.text()
267
- throw new Error(`OpenRouter API error (${response.status}): ${errorBody}`)
268
- }
269
-
270
- if (!response.body) {
271
- throw new Error('OpenRouter returned no stream body')
272
- }
273
-
274
- const reader = response.body.getReader()
275
- const decoder = new TextDecoder()
276
- let buffer = ''
277
-
278
- try {
279
- while (true) {
280
- const { done, value } = await reader.read()
281
- if (done) break
282
-
283
- buffer += decoder.decode(value, { stream: true })
284
- const lines = buffer.split('\n')
285
- buffer = lines.pop() ?? ''
286
-
287
- for (const line of lines) {
288
- const trimmed = line.trim()
289
- if (!trimmed || !trimmed.startsWith('data: ')) continue
290
- const data = trimmed.slice(6)
291
- if (data === '[DONE]') return
292
-
293
- try {
294
- const parsed = JSON.parse(data) as {
295
- id: string
296
- choices: Array<{
297
- delta: {
298
- content?: string
299
- tool_calls?: Array<{
300
- index: number
301
- id?: string
302
- type?: string
303
- function?: { name?: string; arguments?: string }
304
- }>
305
- }
306
- finish_reason?: string
307
- }>
308
- usage?: RawUsage
309
- }
310
-
311
- const choice = parsed.choices[0]
312
- if (!choice) continue
313
-
314
- yield {
315
- id: parsed.id,
316
- delta: {
317
- content: choice.delta.content,
318
- toolCalls: choice.delta.tool_calls?.map((tc) => ({
319
- index: tc.index,
320
- id: tc.id,
321
- type: tc.type as 'function' | undefined,
322
- function: tc.function,
323
- })),
324
- },
325
- finishReason: choice.finish_reason as StreamChunk['finishReason'],
326
- usage: parsed.usage ? parseUsage(parsed.usage) : undefined,
327
- }
328
- } catch (parseErr) {
329
- logger.warn('Failed to parse streaming chunk', {
330
- error: toErrorMessage(parseErr),
331
- data: data.slice(0, 200),
332
- })
333
- yield {
334
- id: '',
335
- delta: { content: undefined },
336
- finishReason: undefined,
337
- usage: undefined,
338
- error: `Stream parse error: ${toErrorMessage(parseErr)}`,
339
- }
340
- }
341
- }
342
- }
343
- } finally {
344
- reader.releaseLock()
345
- }
346
- }
347
-
348
- async listModels(): Promise<ModelInfo[]> {
349
- const response = await fetch(`${this.baseUrl}/models`, {
350
- headers: this.getHeaders(),
351
- })
352
-
353
- if (!response.ok) {
354
- throw new Error(`Failed to list models: ${response.status}`)
355
- }
356
-
357
- const data = (await response.json()) as {
358
- data: Array<{
359
- id: string
360
- name: string
361
- context_length: number
362
- top_provider?: { max_completion_tokens?: number }
363
- pricing?: { prompt: string; completion: string }
364
- }>
365
- }
366
-
367
- return data.data.map((m) => ({
368
- id: m.id,
369
- name: m.name,
370
- contextWindow: m.context_length,
371
- maxOutputTokens: m.top_provider?.max_completion_tokens ?? 4096,
372
- inputPrice: Number.parseFloat(m.pricing?.prompt ?? '0') * 1_000_000,
373
- outputPrice: Number.parseFloat(m.pricing?.completion ?? '0') * 1_000_000,
374
- supportsToolUse: true,
375
- supportsStreaming: true,
376
- }))
377
- }
378
-
379
- async healthCheck(): Promise<boolean> {
380
- try {
381
- const response = await fetch(`${this.baseUrl}/models`, {
382
- headers: this.getHeaders(),
383
- signal: AbortSignal.timeout(5000),
384
- })
385
- return response.ok
386
- } catch {
387
- return false
388
- }
389
- }
390
- }