@yourgpt/llm-sdk 0.1.0 → 1.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (56) hide show
  1. package/README.md +61 -40
  2. package/dist/adapters/index.d.mts +4 -258
  3. package/dist/adapters/index.d.ts +4 -258
  4. package/dist/adapters/index.js +0 -113
  5. package/dist/adapters/index.js.map +1 -1
  6. package/dist/adapters/index.mjs +1 -112
  7. package/dist/adapters/index.mjs.map +1 -1
  8. package/dist/base-D_FyHFKj.d.mts +235 -0
  9. package/dist/base-D_FyHFKj.d.ts +235 -0
  10. package/dist/index.d.mts +209 -451
  11. package/dist/index.d.ts +209 -451
  12. package/dist/index.js +1905 -311
  13. package/dist/index.js.map +1 -1
  14. package/dist/index.mjs +1895 -309
  15. package/dist/index.mjs.map +1 -1
  16. package/dist/providers/anthropic/index.d.mts +61 -0
  17. package/dist/providers/anthropic/index.d.ts +61 -0
  18. package/dist/providers/anthropic/index.js +939 -0
  19. package/dist/providers/anthropic/index.js.map +1 -0
  20. package/dist/providers/anthropic/index.mjs +934 -0
  21. package/dist/providers/anthropic/index.mjs.map +1 -0
  22. package/dist/providers/azure/index.d.mts +38 -0
  23. package/dist/providers/azure/index.d.ts +38 -0
  24. package/dist/providers/azure/index.js +380 -0
  25. package/dist/providers/azure/index.js.map +1 -0
  26. package/dist/providers/azure/index.mjs +377 -0
  27. package/dist/providers/azure/index.mjs.map +1 -0
  28. package/dist/providers/google/index.d.mts +72 -0
  29. package/dist/providers/google/index.d.ts +72 -0
  30. package/dist/providers/google/index.js +790 -0
  31. package/dist/providers/google/index.js.map +1 -0
  32. package/dist/providers/google/index.mjs +785 -0
  33. package/dist/providers/google/index.mjs.map +1 -0
  34. package/dist/providers/ollama/index.d.mts +24 -0
  35. package/dist/providers/ollama/index.d.ts +24 -0
  36. package/dist/providers/ollama/index.js +235 -0
  37. package/dist/providers/ollama/index.js.map +1 -0
  38. package/dist/providers/ollama/index.mjs +232 -0
  39. package/dist/providers/ollama/index.mjs.map +1 -0
  40. package/dist/providers/openai/index.d.mts +82 -0
  41. package/dist/providers/openai/index.d.ts +82 -0
  42. package/dist/providers/openai/index.js +679 -0
  43. package/dist/providers/openai/index.js.map +1 -0
  44. package/dist/providers/openai/index.mjs +674 -0
  45. package/dist/providers/openai/index.mjs.map +1 -0
  46. package/dist/providers/xai/index.d.mts +78 -0
  47. package/dist/providers/xai/index.d.ts +78 -0
  48. package/dist/providers/xai/index.js +671 -0
  49. package/dist/providers/xai/index.js.map +1 -0
  50. package/dist/providers/xai/index.mjs +666 -0
  51. package/dist/providers/xai/index.mjs.map +1 -0
  52. package/dist/types-BBCZ3Fxy.d.mts +308 -0
  53. package/dist/types-CdORv1Yu.d.mts +338 -0
  54. package/dist/types-CdORv1Yu.d.ts +338 -0
  55. package/dist/types-DcoCaVVC.d.ts +308 -0
  56. package/package.json +34 -3
@@ -0,0 +1,235 @@
1
+ import { Message, ActionDefinition, LLMConfig, StreamEvent, MessageAttachment } from '@yourgpt/copilot-sdk/core';
2
+
3
+ /**
4
+ * Chat completion request
5
+ */
6
+ interface ChatCompletionRequest {
7
+ /** Conversation messages */
8
+ messages: Message[];
9
+ /**
10
+ * Raw provider-formatted messages (for agent loop with tool calls)
11
+ * When provided, these are used instead of converting from Message[]
12
+ * This allows passing messages with tool_calls and tool role
13
+ */
14
+ rawMessages?: Array<Record<string, unknown>>;
15
+ /** Available actions/tools */
16
+ actions?: ActionDefinition[];
17
+ /** System prompt */
18
+ systemPrompt?: string;
19
+ /** LLM configuration overrides */
20
+ config?: Partial<LLMConfig>;
21
+ /** Abort signal for cancellation */
22
+ signal?: AbortSignal;
23
+ }
24
+ /**
25
+ * Non-streaming completion result
26
+ */
27
+ interface CompletionResult {
28
+ /** Text content */
29
+ content: string;
30
+ /** Tool calls */
31
+ toolCalls: Array<{
32
+ id: string;
33
+ name: string;
34
+ args: Record<string, unknown>;
35
+ }>;
36
+ /** Thinking content (if extended thinking enabled) */
37
+ thinking?: string;
38
+ /** Raw provider response for debugging */
39
+ rawResponse: Record<string, unknown>;
40
+ }
41
+ /**
42
+ * Base LLM adapter interface
43
+ */
44
+ interface LLMAdapter {
45
+ /** Provider name */
46
+ readonly provider: string;
47
+ /** Model name */
48
+ readonly model: string;
49
+ /**
50
+ * Stream a chat completion
51
+ */
52
+ stream(request: ChatCompletionRequest): AsyncGenerator<StreamEvent>;
53
+ /**
54
+ * Non-streaming chat completion (for debugging/comparison)
55
+ */
56
+ complete?(request: ChatCompletionRequest): Promise<CompletionResult>;
57
+ }
58
+ /**
59
+ * Adapter factory function type
60
+ */
61
+ type AdapterFactory = (config: LLMConfig) => LLMAdapter;
62
+ /**
63
+ * Convert messages to provider format (simple text only)
64
+ */
65
+ declare function formatMessages(messages: Message[], systemPrompt?: string): Array<{
66
+ role: string;
67
+ content: string;
68
+ }>;
69
+ /**
70
+ * Convert actions to OpenAI tool format
71
+ */
72
+ declare function formatTools(actions: ActionDefinition[]): Array<{
73
+ type: "function";
74
+ function: {
75
+ name: string;
76
+ description: string;
77
+ parameters: object;
78
+ };
79
+ }>;
80
+ /**
81
+ * Content block types for multimodal messages
82
+ */
83
+ type AnthropicContentBlock = {
84
+ type: "text";
85
+ text: string;
86
+ } | {
87
+ type: "image";
88
+ source: {
89
+ type: "base64";
90
+ media_type: string;
91
+ data: string;
92
+ } | {
93
+ type: "url";
94
+ url: string;
95
+ };
96
+ } | {
97
+ type: "document";
98
+ source: {
99
+ type: "base64";
100
+ media_type: string;
101
+ data: string;
102
+ } | {
103
+ type: "url";
104
+ url: string;
105
+ };
106
+ };
107
+ type OpenAIContentBlock = {
108
+ type: "text";
109
+ text: string;
110
+ } | {
111
+ type: "image_url";
112
+ image_url: {
113
+ url: string;
114
+ detail?: "low" | "high" | "auto";
115
+ };
116
+ };
117
+ /**
118
+ * Check if a message has image attachments
119
+ * Supports both new format (metadata.attachments) and legacy (attachments)
120
+ */
121
+ declare function hasImageAttachments(message: Message): boolean;
122
+ /**
123
+ * Check if a message has media attachments (images or PDFs)
124
+ */
125
+ declare function hasMediaAttachments(message: Message): boolean;
126
+ /**
127
+ * Convert MessageAttachment to Anthropic image content block
128
+ *
129
+ * Anthropic format:
130
+ * {
131
+ * type: "image",
132
+ * source: {
133
+ * type: "base64",
134
+ * media_type: "image/png",
135
+ * data: "base64data..."
136
+ * }
137
+ * }
138
+ */
139
+ declare function attachmentToAnthropicImage(attachment: MessageAttachment): AnthropicContentBlock | null;
140
+ /**
141
+ * Convert MessageAttachment to OpenAI image_url content block
142
+ *
143
+ * OpenAI format:
144
+ * {
145
+ * type: "image_url",
146
+ * image_url: {
147
+ * url: "data:image/png;base64,..."
148
+ * }
149
+ * }
150
+ */
151
+ declare function attachmentToOpenAIImage(attachment: MessageAttachment): OpenAIContentBlock | null;
152
+ /**
153
+ * Convert MessageAttachment (PDF) to Anthropic document content block
154
+ *
155
+ * Anthropic format:
156
+ * {
157
+ * type: "document",
158
+ * source: {
159
+ * type: "base64",
160
+ * media_type: "application/pdf",
161
+ * data: "base64data..."
162
+ * }
163
+ * }
164
+ */
165
+ declare function attachmentToAnthropicDocument(attachment: MessageAttachment): AnthropicContentBlock | null;
166
+ /**
167
+ * Convert a Message to Anthropic multimodal content blocks
168
+ */
169
+ declare function messageToAnthropicContent(message: Message): string | AnthropicContentBlock[];
170
+ /**
171
+ * Convert a Message to OpenAI multimodal content blocks
172
+ */
173
+ declare function messageToOpenAIContent(message: Message): string | OpenAIContentBlock[];
174
+ /**
175
+ * Anthropic content block types (extended for tools)
176
+ */
177
+ type AnthropicToolUseBlock = {
178
+ type: "tool_use";
179
+ id: string;
180
+ name: string;
181
+ input: Record<string, unknown>;
182
+ };
183
+ type AnthropicToolResultBlock = {
184
+ type: "tool_result";
185
+ tool_use_id: string;
186
+ content: string;
187
+ };
188
+ type AnthropicMessageContent = string | Array<AnthropicContentBlock | AnthropicToolUseBlock | AnthropicToolResultBlock>;
189
+ /**
190
+ * Format messages for Anthropic with full tool support
191
+ * Handles: text, images, tool_use, and tool_result
192
+ *
193
+ * Key differences from OpenAI:
194
+ * - tool_calls become tool_use blocks in assistant content
195
+ * - tool results become tool_result blocks in user content
196
+ */
197
+ declare function formatMessagesForAnthropic(messages: Message[], systemPrompt?: string): {
198
+ system: string;
199
+ messages: Array<{
200
+ role: "user" | "assistant";
201
+ content: AnthropicMessageContent;
202
+ }>;
203
+ };
204
+ /**
205
+ * OpenAI message format with tool support
206
+ */
207
+ type OpenAIMessage = {
208
+ role: "system";
209
+ content: string;
210
+ } | {
211
+ role: "user";
212
+ content: string | OpenAIContentBlock[];
213
+ } | {
214
+ role: "assistant";
215
+ content: string | null;
216
+ tool_calls?: Array<{
217
+ id: string;
218
+ type: "function";
219
+ function: {
220
+ name: string;
221
+ arguments: string;
222
+ };
223
+ }>;
224
+ } | {
225
+ role: "tool";
226
+ content: string;
227
+ tool_call_id: string;
228
+ };
229
+ /**
230
+ * Format messages for OpenAI with full tool support
231
+ * Handles: text, images, tool_calls, and tool results
232
+ */
233
+ declare function formatMessagesForOpenAI(messages: Message[], systemPrompt?: string): OpenAIMessage[];
234
+
235
+ export { type AdapterFactory as A, type ChatCompletionRequest as C, type LLMAdapter as L, type OpenAIContentBlock as O, type CompletionResult as a, formatTools as b, formatMessagesForAnthropic as c, formatMessagesForOpenAI as d, messageToOpenAIContent as e, formatMessages as f, hasMediaAttachments as g, hasImageAttachments as h, attachmentToAnthropicImage as i, attachmentToAnthropicDocument as j, attachmentToOpenAIImage as k, type AnthropicContentBlock as l, messageToAnthropicContent as m };
@@ -0,0 +1,235 @@
1
+ import { Message, ActionDefinition, LLMConfig, StreamEvent, MessageAttachment } from '@yourgpt/copilot-sdk/core';
2
+
3
+ /**
4
+ * Chat completion request
5
+ */
6
+ interface ChatCompletionRequest {
7
+ /** Conversation messages */
8
+ messages: Message[];
9
+ /**
10
+ * Raw provider-formatted messages (for agent loop with tool calls)
11
+ * When provided, these are used instead of converting from Message[]
12
+ * This allows passing messages with tool_calls and tool role
13
+ */
14
+ rawMessages?: Array<Record<string, unknown>>;
15
+ /** Available actions/tools */
16
+ actions?: ActionDefinition[];
17
+ /** System prompt */
18
+ systemPrompt?: string;
19
+ /** LLM configuration overrides */
20
+ config?: Partial<LLMConfig>;
21
+ /** Abort signal for cancellation */
22
+ signal?: AbortSignal;
23
+ }
24
+ /**
25
+ * Non-streaming completion result
26
+ */
27
+ interface CompletionResult {
28
+ /** Text content */
29
+ content: string;
30
+ /** Tool calls */
31
+ toolCalls: Array<{
32
+ id: string;
33
+ name: string;
34
+ args: Record<string, unknown>;
35
+ }>;
36
+ /** Thinking content (if extended thinking enabled) */
37
+ thinking?: string;
38
+ /** Raw provider response for debugging */
39
+ rawResponse: Record<string, unknown>;
40
+ }
41
+ /**
42
+ * Base LLM adapter interface
43
+ */
44
+ interface LLMAdapter {
45
+ /** Provider name */
46
+ readonly provider: string;
47
+ /** Model name */
48
+ readonly model: string;
49
+ /**
50
+ * Stream a chat completion
51
+ */
52
+ stream(request: ChatCompletionRequest): AsyncGenerator<StreamEvent>;
53
+ /**
54
+ * Non-streaming chat completion (for debugging/comparison)
55
+ */
56
+ complete?(request: ChatCompletionRequest): Promise<CompletionResult>;
57
+ }
58
+ /**
59
+ * Adapter factory function type
60
+ */
61
+ type AdapterFactory = (config: LLMConfig) => LLMAdapter;
62
+ /**
63
+ * Convert messages to provider format (simple text only)
64
+ */
65
+ declare function formatMessages(messages: Message[], systemPrompt?: string): Array<{
66
+ role: string;
67
+ content: string;
68
+ }>;
69
+ /**
70
+ * Convert actions to OpenAI tool format
71
+ */
72
+ declare function formatTools(actions: ActionDefinition[]): Array<{
73
+ type: "function";
74
+ function: {
75
+ name: string;
76
+ description: string;
77
+ parameters: object;
78
+ };
79
+ }>;
80
+ /**
81
+ * Content block types for multimodal messages
82
+ */
83
+ type AnthropicContentBlock = {
84
+ type: "text";
85
+ text: string;
86
+ } | {
87
+ type: "image";
88
+ source: {
89
+ type: "base64";
90
+ media_type: string;
91
+ data: string;
92
+ } | {
93
+ type: "url";
94
+ url: string;
95
+ };
96
+ } | {
97
+ type: "document";
98
+ source: {
99
+ type: "base64";
100
+ media_type: string;
101
+ data: string;
102
+ } | {
103
+ type: "url";
104
+ url: string;
105
+ };
106
+ };
107
+ type OpenAIContentBlock = {
108
+ type: "text";
109
+ text: string;
110
+ } | {
111
+ type: "image_url";
112
+ image_url: {
113
+ url: string;
114
+ detail?: "low" | "high" | "auto";
115
+ };
116
+ };
117
+ /**
118
+ * Check if a message has image attachments
119
+ * Supports both new format (metadata.attachments) and legacy (attachments)
120
+ */
121
+ declare function hasImageAttachments(message: Message): boolean;
122
+ /**
123
+ * Check if a message has media attachments (images or PDFs)
124
+ */
125
+ declare function hasMediaAttachments(message: Message): boolean;
126
+ /**
127
+ * Convert MessageAttachment to Anthropic image content block
128
+ *
129
+ * Anthropic format:
130
+ * {
131
+ * type: "image",
132
+ * source: {
133
+ * type: "base64",
134
+ * media_type: "image/png",
135
+ * data: "base64data..."
136
+ * }
137
+ * }
138
+ */
139
+ declare function attachmentToAnthropicImage(attachment: MessageAttachment): AnthropicContentBlock | null;
140
+ /**
141
+ * Convert MessageAttachment to OpenAI image_url content block
142
+ *
143
+ * OpenAI format:
144
+ * {
145
+ * type: "image_url",
146
+ * image_url: {
147
+ * url: "data:image/png;base64,..."
148
+ * }
149
+ * }
150
+ */
151
+ declare function attachmentToOpenAIImage(attachment: MessageAttachment): OpenAIContentBlock | null;
152
+ /**
153
+ * Convert MessageAttachment (PDF) to Anthropic document content block
154
+ *
155
+ * Anthropic format:
156
+ * {
157
+ * type: "document",
158
+ * source: {
159
+ * type: "base64",
160
+ * media_type: "application/pdf",
161
+ * data: "base64data..."
162
+ * }
163
+ * }
164
+ */
165
+ declare function attachmentToAnthropicDocument(attachment: MessageAttachment): AnthropicContentBlock | null;
166
+ /**
167
+ * Convert a Message to Anthropic multimodal content blocks
168
+ */
169
+ declare function messageToAnthropicContent(message: Message): string | AnthropicContentBlock[];
170
+ /**
171
+ * Convert a Message to OpenAI multimodal content blocks
172
+ */
173
+ declare function messageToOpenAIContent(message: Message): string | OpenAIContentBlock[];
174
+ /**
175
+ * Anthropic content block types (extended for tools)
176
+ */
177
+ type AnthropicToolUseBlock = {
178
+ type: "tool_use";
179
+ id: string;
180
+ name: string;
181
+ input: Record<string, unknown>;
182
+ };
183
+ type AnthropicToolResultBlock = {
184
+ type: "tool_result";
185
+ tool_use_id: string;
186
+ content: string;
187
+ };
188
+ type AnthropicMessageContent = string | Array<AnthropicContentBlock | AnthropicToolUseBlock | AnthropicToolResultBlock>;
189
+ /**
190
+ * Format messages for Anthropic with full tool support
191
+ * Handles: text, images, tool_use, and tool_result
192
+ *
193
+ * Key differences from OpenAI:
194
+ * - tool_calls become tool_use blocks in assistant content
195
+ * - tool results become tool_result blocks in user content
196
+ */
197
+ declare function formatMessagesForAnthropic(messages: Message[], systemPrompt?: string): {
198
+ system: string;
199
+ messages: Array<{
200
+ role: "user" | "assistant";
201
+ content: AnthropicMessageContent;
202
+ }>;
203
+ };
204
+ /**
205
+ * OpenAI message format with tool support
206
+ */
207
+ type OpenAIMessage = {
208
+ role: "system";
209
+ content: string;
210
+ } | {
211
+ role: "user";
212
+ content: string | OpenAIContentBlock[];
213
+ } | {
214
+ role: "assistant";
215
+ content: string | null;
216
+ tool_calls?: Array<{
217
+ id: string;
218
+ type: "function";
219
+ function: {
220
+ name: string;
221
+ arguments: string;
222
+ };
223
+ }>;
224
+ } | {
225
+ role: "tool";
226
+ content: string;
227
+ tool_call_id: string;
228
+ };
229
+ /**
230
+ * Format messages for OpenAI with full tool support
231
+ * Handles: text, images, tool_calls, and tool results
232
+ */
233
+ declare function formatMessagesForOpenAI(messages: Message[], systemPrompt?: string): OpenAIMessage[];
234
+
235
+ export { type AdapterFactory as A, type ChatCompletionRequest as C, type LLMAdapter as L, type OpenAIContentBlock as O, type CompletionResult as a, formatTools as b, formatMessagesForAnthropic as c, formatMessagesForOpenAI as d, messageToOpenAIContent as e, formatMessages as f, hasMediaAttachments as g, hasImageAttachments as h, attachmentToAnthropicImage as i, attachmentToAnthropicDocument as j, attachmentToOpenAIImage as k, type AnthropicContentBlock as l, messageToAnthropicContent as m };