@yourgpt/llm-sdk 1.0.1 → 1.2.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (42) hide show
  1. package/README.md +23 -15
  2. package/dist/adapters/index.d.mts +23 -9
  3. package/dist/adapters/index.d.ts +23 -9
  4. package/dist/adapters/index.js.map +1 -1
  5. package/dist/adapters/index.mjs.map +1 -1
  6. package/dist/{base-D_FyHFKj.d.mts → base-CXNMfvXg.d.mts} +10 -2
  7. package/dist/{base-D_FyHFKj.d.ts → base-CXNMfvXg.d.ts} +10 -2
  8. package/dist/index.d.mts +33 -13
  9. package/dist/index.d.ts +33 -13
  10. package/dist/index.js +160 -177
  11. package/dist/index.js.map +1 -1
  12. package/dist/index.mjs +160 -177
  13. package/dist/index.mjs.map +1 -1
  14. package/dist/providers/anthropic/index.d.mts +2 -2
  15. package/dist/providers/anthropic/index.d.ts +2 -2
  16. package/dist/providers/anthropic/index.js.map +1 -1
  17. package/dist/providers/anthropic/index.mjs.map +1 -1
  18. package/dist/providers/azure/index.d.mts +2 -2
  19. package/dist/providers/azure/index.d.ts +2 -2
  20. package/dist/providers/azure/index.js.map +1 -1
  21. package/dist/providers/azure/index.mjs.map +1 -1
  22. package/dist/providers/google/index.d.mts +23 -10
  23. package/dist/providers/google/index.d.ts +23 -10
  24. package/dist/providers/google/index.js +160 -177
  25. package/dist/providers/google/index.js.map +1 -1
  26. package/dist/providers/google/index.mjs +160 -177
  27. package/dist/providers/google/index.mjs.map +1 -1
  28. package/dist/providers/ollama/index.d.mts +2 -2
  29. package/dist/providers/ollama/index.d.ts +2 -2
  30. package/dist/providers/ollama/index.js.map +1 -1
  31. package/dist/providers/ollama/index.mjs.map +1 -1
  32. package/dist/providers/openai/index.d.mts +2 -2
  33. package/dist/providers/openai/index.d.ts +2 -2
  34. package/dist/providers/openai/index.js.map +1 -1
  35. package/dist/providers/openai/index.mjs.map +1 -1
  36. package/dist/providers/xai/index.d.mts +2 -2
  37. package/dist/providers/xai/index.d.ts +2 -2
  38. package/dist/providers/xai/index.js.map +1 -1
  39. package/dist/providers/xai/index.mjs.map +1 -1
  40. package/dist/{types-BBCZ3Fxy.d.mts → types-B8rxpnYi.d.mts} +1 -1
  41. package/dist/{types-DcoCaVVC.d.ts → types-CrQftISG.d.ts} +1 -1
  42. package/package.json +3 -7
package/README.md CHANGED
@@ -5,7 +5,13 @@ Multi-provider LLM SDK with streaming. One API, any provider.
5
5
  ## Installation
6
6
 
7
7
  ```bash
8
- npm install @yourgpt/llm-sdk
8
+ npm install @yourgpt/llm-sdk openai
9
+ ```
10
+
11
+ For Anthropic, install `@anthropic-ai/sdk` instead:
12
+
13
+ ```bash
14
+ npm install @yourgpt/llm-sdk @anthropic-ai/sdk
9
15
  ```
10
16
 
11
17
  ## Quick Start
@@ -18,7 +24,7 @@ export async function POST(req: Request) {
18
24
  const { messages } = await req.json();
19
25
 
20
26
  const result = await streamText({
21
- model: openai("gpt-5"),
27
+ model: openai("gpt-4o"),
22
28
  system: "You are a helpful assistant.",
23
29
  messages,
24
30
  });
@@ -36,16 +42,16 @@ import { google } from "@yourgpt/llm-sdk/google";
36
42
  import { xai } from "@yourgpt/llm-sdk/xai";
37
43
 
38
44
  // OpenAI
39
- await streamText({ model: openai("gpt-5"), messages });
45
+ await streamText({ model: openai("gpt-4o"), messages });
40
46
 
41
47
  // Anthropic
42
48
  await streamText({ model: anthropic("claude-sonnet-4-20250514"), messages });
43
49
 
44
- // Google
50
+ // Google Gemini (uses OpenAI-compatible API)
45
51
  await streamText({ model: google("gemini-2.0-flash"), messages });
46
52
 
47
- // xAI
48
- await streamText({ model: xai("grok-3"), messages });
53
+ // xAI Grok (uses OpenAI-compatible API)
54
+ await streamText({ model: xai("grok-3-fast-beta"), messages });
49
55
  ```
50
56
 
51
57
  ## Server-Side Tools
@@ -56,7 +62,7 @@ import { openai } from "@yourgpt/llm-sdk/openai";
56
62
  import { z } from "zod";
57
63
 
58
64
  const result = await streamText({
59
- model: openai("gpt-5"),
65
+ model: openai("gpt-4o"),
60
66
  messages,
61
67
  tools: {
62
68
  getWeather: tool({
@@ -77,14 +83,16 @@ return result.toDataStreamResponse();
77
83
 
78
84
  ## Supported Providers
79
85
 
80
- | Provider | Import |
81
- | ------------- | ---------------------------- |
82
- | OpenAI | `@yourgpt/llm-sdk/openai` |
83
- | Anthropic | `@yourgpt/llm-sdk/anthropic` |
84
- | Google Gemini | `@yourgpt/llm-sdk/google` |
85
- | xAI (Grok) | `@yourgpt/llm-sdk/xai` |
86
- | Ollama | `@yourgpt/llm-sdk/ollama` |
87
- | Azure OpenAI | `@yourgpt/llm-sdk/azure` |
86
+ | Provider | Import | SDK Required |
87
+ | ------------- | ---------------------------- | ------------------- |
88
+ | OpenAI | `@yourgpt/llm-sdk/openai` | `openai` |
89
+ | Anthropic | `@yourgpt/llm-sdk/anthropic` | `@anthropic-ai/sdk` |
90
+ | Google Gemini | `@yourgpt/llm-sdk/google` | `openai` |
91
+ | xAI (Grok) | `@yourgpt/llm-sdk/xai` | `openai` |
92
+ | Ollama | `@yourgpt/llm-sdk/ollama` | `openai` |
93
+ | Azure OpenAI | `@yourgpt/llm-sdk/azure` | `openai` |
94
+
95
+ > **Note:** OpenAI, Google, xAI, Ollama, and Azure all use the `openai` SDK because they have OpenAI-compatible APIs. Only Anthropic requires its native SDK for full feature support.
88
96
 
89
97
  ## Documentation
90
98
 
@@ -1,14 +1,16 @@
1
- import { L as LLMAdapter, C as ChatCompletionRequest, a as CompletionResult } from '../base-D_FyHFKj.mjs';
2
- export { A as AdapterFactory, l as AnthropicContentBlock, O as OpenAIContentBlock, j as attachmentToAnthropicDocument, i as attachmentToAnthropicImage, k as attachmentToOpenAIImage, f as formatMessages, c as formatMessagesForAnthropic, d as formatMessagesForOpenAI, b as formatTools, h as hasImageAttachments, g as hasMediaAttachments, m as messageToAnthropicContent, e as messageToOpenAIContent } from '../base-D_FyHFKj.mjs';
3
- import { LLMConfig, StreamEvent } from '@yourgpt/copilot-sdk/core';
1
+ import { L as LLMAdapter, C as ChatCompletionRequest, a as CompletionResult } from '../base-CXNMfvXg.mjs';
2
+ export { A as AdapterFactory, l as AnthropicContentBlock, O as OpenAIContentBlock, j as attachmentToAnthropicDocument, i as attachmentToAnthropicImage, k as attachmentToOpenAIImage, f as formatMessages, c as formatMessagesForAnthropic, d as formatMessagesForOpenAI, b as formatTools, h as hasImageAttachments, g as hasMediaAttachments, m as messageToAnthropicContent, e as messageToOpenAIContent } from '../base-CXNMfvXg.mjs';
3
+ import { StreamEvent } from '@yourgpt/copilot-sdk/core';
4
4
 
5
5
  /**
6
6
  * OpenAI adapter configuration
7
7
  */
8
- interface OpenAIAdapterConfig extends Partial<LLMConfig> {
8
+ interface OpenAIAdapterConfig {
9
9
  apiKey: string;
10
10
  model?: string;
11
11
  baseUrl?: string;
12
+ temperature?: number;
13
+ maxTokens?: number;
12
14
  }
13
15
  /**
14
16
  * OpenAI LLM Adapter
@@ -40,11 +42,15 @@ interface ThinkingConfig {
40
42
  /**
41
43
  * Anthropic adapter configuration
42
44
  */
43
- interface AnthropicAdapterConfig extends Partial<LLMConfig> {
45
+ interface AnthropicAdapterConfig {
44
46
  apiKey: string;
45
47
  model?: string;
48
+ /** Base URL for API endpoint */
49
+ baseUrl?: string;
46
50
  /** Enable extended thinking (for Claude 3.7 Sonnet, Claude 4) */
47
51
  thinking?: ThinkingConfig;
52
+ temperature?: number;
53
+ maxTokens?: number;
48
54
  }
49
55
  /**
50
56
  * Anthropic LLM Adapter
@@ -88,9 +94,11 @@ declare function createAnthropicAdapter(config: AnthropicAdapterConfig): Anthrop
88
94
  /**
89
95
  * Ollama adapter configuration
90
96
  */
91
- interface OllamaAdapterConfig extends Partial<LLMConfig> {
97
+ interface OllamaAdapterConfig {
92
98
  model?: string;
93
99
  baseUrl?: string;
100
+ temperature?: number;
101
+ maxTokens?: number;
94
102
  }
95
103
  /**
96
104
  * Ollama LLM Adapter (Local models)
@@ -120,10 +128,12 @@ declare function createOllamaAdapter(config?: OllamaAdapterConfig): OllamaAdapte
120
128
  /**
121
129
  * Google adapter configuration
122
130
  */
123
- interface GoogleAdapterConfig extends Partial<LLMConfig> {
131
+ interface GoogleAdapterConfig {
124
132
  apiKey: string;
125
133
  model?: string;
126
134
  baseUrl?: string;
135
+ temperature?: number;
136
+ maxTokens?: number;
127
137
  /** Safety settings */
128
138
  safetySettings?: Array<{
129
139
  category: string;
@@ -164,10 +174,12 @@ declare function createGoogleAdapter(config: GoogleAdapterConfig): GoogleAdapter
164
174
  /**
165
175
  * xAI adapter configuration
166
176
  */
167
- interface XAIAdapterConfig extends Partial<LLMConfig> {
177
+ interface XAIAdapterConfig {
168
178
  apiKey: string;
169
179
  model?: string;
170
180
  baseUrl?: string;
181
+ temperature?: number;
182
+ maxTokens?: number;
171
183
  }
172
184
  /**
173
185
  * xAI Grok LLM Adapter
@@ -205,7 +217,7 @@ declare function createXAIAdapter(config: XAIAdapterConfig): XAIAdapter;
205
217
  /**
206
218
  * Azure OpenAI adapter configuration
207
219
  */
208
- interface AzureAdapterConfig extends Partial<LLMConfig> {
220
+ interface AzureAdapterConfig {
209
221
  /** Azure OpenAI API key */
210
222
  apiKey: string;
211
223
  /** Azure resource name (e.g., 'my-resource') */
@@ -214,6 +226,8 @@ interface AzureAdapterConfig extends Partial<LLMConfig> {
214
226
  deploymentName: string;
215
227
  /** API version (default: 2024-08-01-preview) */
216
228
  apiVersion?: string;
229
+ temperature?: number;
230
+ maxTokens?: number;
217
231
  /** Custom endpoint URL (optional, overrides resourceName) */
218
232
  baseUrl?: string;
219
233
  }
@@ -1,14 +1,16 @@
1
- import { L as LLMAdapter, C as ChatCompletionRequest, a as CompletionResult } from '../base-D_FyHFKj.js';
2
- export { A as AdapterFactory, l as AnthropicContentBlock, O as OpenAIContentBlock, j as attachmentToAnthropicDocument, i as attachmentToAnthropicImage, k as attachmentToOpenAIImage, f as formatMessages, c as formatMessagesForAnthropic, d as formatMessagesForOpenAI, b as formatTools, h as hasImageAttachments, g as hasMediaAttachments, m as messageToAnthropicContent, e as messageToOpenAIContent } from '../base-D_FyHFKj.js';
3
- import { LLMConfig, StreamEvent } from '@yourgpt/copilot-sdk/core';
1
+ import { L as LLMAdapter, C as ChatCompletionRequest, a as CompletionResult } from '../base-CXNMfvXg.js';
2
+ export { A as AdapterFactory, l as AnthropicContentBlock, O as OpenAIContentBlock, j as attachmentToAnthropicDocument, i as attachmentToAnthropicImage, k as attachmentToOpenAIImage, f as formatMessages, c as formatMessagesForAnthropic, d as formatMessagesForOpenAI, b as formatTools, h as hasImageAttachments, g as hasMediaAttachments, m as messageToAnthropicContent, e as messageToOpenAIContent } from '../base-CXNMfvXg.js';
3
+ import { StreamEvent } from '@yourgpt/copilot-sdk/core';
4
4
 
5
5
  /**
6
6
  * OpenAI adapter configuration
7
7
  */
8
- interface OpenAIAdapterConfig extends Partial<LLMConfig> {
8
+ interface OpenAIAdapterConfig {
9
9
  apiKey: string;
10
10
  model?: string;
11
11
  baseUrl?: string;
12
+ temperature?: number;
13
+ maxTokens?: number;
12
14
  }
13
15
  /**
14
16
  * OpenAI LLM Adapter
@@ -40,11 +42,15 @@ interface ThinkingConfig {
40
42
  /**
41
43
  * Anthropic adapter configuration
42
44
  */
43
- interface AnthropicAdapterConfig extends Partial<LLMConfig> {
45
+ interface AnthropicAdapterConfig {
44
46
  apiKey: string;
45
47
  model?: string;
48
+ /** Base URL for API endpoint */
49
+ baseUrl?: string;
46
50
  /** Enable extended thinking (for Claude 3.7 Sonnet, Claude 4) */
47
51
  thinking?: ThinkingConfig;
52
+ temperature?: number;
53
+ maxTokens?: number;
48
54
  }
49
55
  /**
50
56
  * Anthropic LLM Adapter
@@ -88,9 +94,11 @@ declare function createAnthropicAdapter(config: AnthropicAdapterConfig): Anthrop
88
94
  /**
89
95
  * Ollama adapter configuration
90
96
  */
91
- interface OllamaAdapterConfig extends Partial<LLMConfig> {
97
+ interface OllamaAdapterConfig {
92
98
  model?: string;
93
99
  baseUrl?: string;
100
+ temperature?: number;
101
+ maxTokens?: number;
94
102
  }
95
103
  /**
96
104
  * Ollama LLM Adapter (Local models)
@@ -120,10 +128,12 @@ declare function createOllamaAdapter(config?: OllamaAdapterConfig): OllamaAdapte
120
128
  /**
121
129
  * Google adapter configuration
122
130
  */
123
- interface GoogleAdapterConfig extends Partial<LLMConfig> {
131
+ interface GoogleAdapterConfig {
124
132
  apiKey: string;
125
133
  model?: string;
126
134
  baseUrl?: string;
135
+ temperature?: number;
136
+ maxTokens?: number;
127
137
  /** Safety settings */
128
138
  safetySettings?: Array<{
129
139
  category: string;
@@ -164,10 +174,12 @@ declare function createGoogleAdapter(config: GoogleAdapterConfig): GoogleAdapter
164
174
  /**
165
175
  * xAI adapter configuration
166
176
  */
167
- interface XAIAdapterConfig extends Partial<LLMConfig> {
177
+ interface XAIAdapterConfig {
168
178
  apiKey: string;
169
179
  model?: string;
170
180
  baseUrl?: string;
181
+ temperature?: number;
182
+ maxTokens?: number;
171
183
  }
172
184
  /**
173
185
  * xAI Grok LLM Adapter
@@ -205,7 +217,7 @@ declare function createXAIAdapter(config: XAIAdapterConfig): XAIAdapter;
205
217
  /**
206
218
  * Azure OpenAI adapter configuration
207
219
  */
208
- interface AzureAdapterConfig extends Partial<LLMConfig> {
220
+ interface AzureAdapterConfig {
209
221
  /** Azure OpenAI API key */
210
222
  apiKey: string;
211
223
  /** Azure resource name (e.g., 'my-resource') */
@@ -214,6 +226,8 @@ interface AzureAdapterConfig extends Partial<LLMConfig> {
214
226
  deploymentName: string;
215
227
  /** API version (default: 2024-08-01-preview) */
216
228
  apiVersion?: string;
229
+ temperature?: number;
230
+ maxTokens?: number;
217
231
  /** Custom endpoint URL (optional, overrides resourceName) */
218
232
  baseUrl?: string;
219
233
  }