@effect/ai-openai 0.17.1 → 0.19.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (70) hide show
  1. package/OpenAiEmbeddingModel/package.json +6 -0
  2. package/OpenAiLanguageModel/package.json +6 -0
  3. package/dist/cjs/OpenAiClient.js +111 -109
  4. package/dist/cjs/OpenAiClient.js.map +1 -1
  5. package/dist/cjs/OpenAiConfig.js +2 -2
  6. package/dist/cjs/OpenAiConfig.js.map +1 -1
  7. package/dist/cjs/{OpenAiEmbeddings.js → OpenAiEmbeddingModel.js} +22 -51
  8. package/dist/cjs/OpenAiEmbeddingModel.js.map +1 -0
  9. package/dist/cjs/OpenAiLanguageModel.js +490 -0
  10. package/dist/cjs/OpenAiLanguageModel.js.map +1 -0
  11. package/dist/cjs/OpenAiTelemetry.js +1 -1
  12. package/dist/cjs/OpenAiTelemetry.js.map +1 -1
  13. package/dist/cjs/OpenAiTokenizer.js +11 -13
  14. package/dist/cjs/OpenAiTokenizer.js.map +1 -1
  15. package/dist/cjs/index.js +5 -5
  16. package/dist/cjs/internal/utilities.js +25 -0
  17. package/dist/cjs/internal/utilities.js.map +1 -0
  18. package/dist/dts/OpenAiClient.d.ts +32 -75
  19. package/dist/dts/OpenAiClient.d.ts.map +1 -1
  20. package/dist/dts/OpenAiConfig.d.ts +5 -6
  21. package/dist/dts/OpenAiConfig.d.ts.map +1 -1
  22. package/dist/dts/{OpenAiEmbeddings.d.ts → OpenAiEmbeddingModel.d.ts} +16 -37
  23. package/dist/dts/OpenAiEmbeddingModel.d.ts.map +1 -0
  24. package/dist/dts/OpenAiLanguageModel.d.ts +108 -0
  25. package/dist/dts/OpenAiLanguageModel.d.ts.map +1 -0
  26. package/dist/dts/OpenAiTelemetry.d.ts +59 -64
  27. package/dist/dts/OpenAiTelemetry.d.ts.map +1 -1
  28. package/dist/dts/OpenAiTokenizer.d.ts +2 -2
  29. package/dist/dts/OpenAiTokenizer.d.ts.map +1 -1
  30. package/dist/dts/index.d.ts +3 -3
  31. package/dist/dts/index.d.ts.map +1 -1
  32. package/dist/dts/internal/utilities.d.ts +2 -0
  33. package/dist/dts/internal/utilities.d.ts.map +1 -0
  34. package/dist/esm/OpenAiClient.js +110 -106
  35. package/dist/esm/OpenAiClient.js.map +1 -1
  36. package/dist/esm/OpenAiConfig.js +2 -2
  37. package/dist/esm/OpenAiConfig.js.map +1 -1
  38. package/dist/esm/{OpenAiEmbeddings.js → OpenAiEmbeddingModel.js} +21 -48
  39. package/dist/esm/OpenAiEmbeddingModel.js.map +1 -0
  40. package/dist/esm/OpenAiLanguageModel.js +478 -0
  41. package/dist/esm/OpenAiLanguageModel.js.map +1 -0
  42. package/dist/esm/OpenAiTelemetry.js +1 -1
  43. package/dist/esm/OpenAiTelemetry.js.map +1 -1
  44. package/dist/esm/OpenAiTokenizer.js +11 -13
  45. package/dist/esm/OpenAiTokenizer.js.map +1 -1
  46. package/dist/esm/index.js +3 -3
  47. package/dist/esm/index.js.map +1 -1
  48. package/dist/esm/internal/utilities.js +16 -0
  49. package/dist/esm/internal/utilities.js.map +1 -0
  50. package/package.json +19 -19
  51. package/src/OpenAiClient.ts +184 -191
  52. package/src/OpenAiConfig.ts +7 -8
  53. package/src/{OpenAiEmbeddings.ts → OpenAiEmbeddingModel.ts} +39 -85
  54. package/src/OpenAiLanguageModel.ts +601 -0
  55. package/src/OpenAiTelemetry.ts +66 -71
  56. package/src/OpenAiTokenizer.ts +35 -26
  57. package/src/index.ts +3 -3
  58. package/src/internal/utilities.ts +19 -0
  59. package/OpenAiCompletions/package.json +0 -6
  60. package/OpenAiEmbeddings/package.json +0 -6
  61. package/dist/cjs/OpenAiCompletions.js +0 -358
  62. package/dist/cjs/OpenAiCompletions.js.map +0 -1
  63. package/dist/cjs/OpenAiEmbeddings.js.map +0 -1
  64. package/dist/dts/OpenAiCompletions.d.ts +0 -75
  65. package/dist/dts/OpenAiCompletions.d.ts.map +0 -1
  66. package/dist/dts/OpenAiEmbeddings.d.ts.map +0 -1
  67. package/dist/esm/OpenAiCompletions.js +0 -345
  68. package/dist/esm/OpenAiCompletions.js.map +0 -1
  69. package/dist/esm/OpenAiEmbeddings.js.map +0 -1
  70. package/src/OpenAiCompletions.ts +0 -500
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@effect/ai-openai",
3
- "version": "0.17.1",
3
+ "version": "0.19.0",
4
4
  "description": "Effect modules for working with AI apis",
5
5
  "license": "MIT",
6
6
  "repository": {
@@ -14,10 +14,10 @@
14
14
  "gpt-tokenizer": "^2.9.0"
15
15
  },
16
16
  "peerDependencies": {
17
- "@effect/ai": "^0.14.1",
18
- "@effect/experimental": "^0.45.1",
19
- "@effect/platform": "^0.81.1",
20
- "effect": "^3.14.22"
17
+ "@effect/ai": "^0.16.0",
18
+ "@effect/experimental": "^0.46.0",
19
+ "@effect/platform": "^0.82.0",
20
+ "effect": "^3.15.0"
21
21
  },
22
22
  "publishConfig": {
23
23
  "provenance": true
@@ -42,20 +42,20 @@
42
42
  "import": "./dist/esm/OpenAiClient.js",
43
43
  "default": "./dist/cjs/OpenAiClient.js"
44
44
  },
45
- "./OpenAiCompletions": {
46
- "types": "./dist/dts/OpenAiCompletions.d.ts",
47
- "import": "./dist/esm/OpenAiCompletions.js",
48
- "default": "./dist/cjs/OpenAiCompletions.js"
49
- },
50
45
  "./OpenAiConfig": {
51
46
  "types": "./dist/dts/OpenAiConfig.d.ts",
52
47
  "import": "./dist/esm/OpenAiConfig.js",
53
48
  "default": "./dist/cjs/OpenAiConfig.js"
54
49
  },
55
- "./OpenAiEmbeddings": {
56
- "types": "./dist/dts/OpenAiEmbeddings.d.ts",
57
- "import": "./dist/esm/OpenAiEmbeddings.js",
58
- "default": "./dist/cjs/OpenAiEmbeddings.js"
50
+ "./OpenAiEmbeddingModel": {
51
+ "types": "./dist/dts/OpenAiEmbeddingModel.d.ts",
52
+ "import": "./dist/esm/OpenAiEmbeddingModel.js",
53
+ "default": "./dist/cjs/OpenAiEmbeddingModel.js"
54
+ },
55
+ "./OpenAiLanguageModel": {
56
+ "types": "./dist/dts/OpenAiLanguageModel.d.ts",
57
+ "import": "./dist/esm/OpenAiLanguageModel.js",
58
+ "default": "./dist/cjs/OpenAiLanguageModel.js"
59
59
  },
60
60
  "./OpenAiTelemetry": {
61
61
  "types": "./dist/dts/OpenAiTelemetry.d.ts",
@@ -81,14 +81,14 @@
81
81
  "OpenAiClient": [
82
82
  "./dist/dts/OpenAiClient.d.ts"
83
83
  ],
84
- "OpenAiCompletions": [
85
- "./dist/dts/OpenAiCompletions.d.ts"
86
- ],
87
84
  "OpenAiConfig": [
88
85
  "./dist/dts/OpenAiConfig.d.ts"
89
86
  ],
90
- "OpenAiEmbeddings": [
91
- "./dist/dts/OpenAiEmbeddings.d.ts"
87
+ "OpenAiEmbeddingModel": [
88
+ "./dist/dts/OpenAiEmbeddingModel.d.ts"
89
+ ],
90
+ "OpenAiLanguageModel": [
91
+ "./dist/dts/OpenAiLanguageModel.d.ts"
92
92
  ],
93
93
  "OpenAiTelemetry": [
94
94
  "./dist/dts/OpenAiTelemetry.d.ts"
@@ -1,31 +1,32 @@
1
1
  /**
2
2
  * @since 1.0.0
3
3
  */
4
- import * as AiModels from "@effect/ai/AiModels"
4
+ import type { ToolCallId } from "@effect/ai/AiInput"
5
5
  import * as AiResponse from "@effect/ai/AiResponse"
6
- import * as AiRole from "@effect/ai/AiRole"
7
6
  import * as Sse from "@effect/experimental/Sse"
8
7
  import * as HttpBody from "@effect/platform/HttpBody"
9
8
  import * as HttpClient from "@effect/platform/HttpClient"
10
9
  import type * as HttpClientError from "@effect/platform/HttpClientError"
11
10
  import * as HttpClientRequest from "@effect/platform/HttpClientRequest"
12
- import * as Chunk from "effect/Chunk"
13
11
  import * as Config from "effect/Config"
14
12
  import type { ConfigError } from "effect/ConfigError"
15
13
  import * as Context from "effect/Context"
16
- import * as Data from "effect/Data"
17
14
  import * as Effect from "effect/Effect"
18
15
  import { identity } from "effect/Function"
19
16
  import * as Layer from "effect/Layer"
20
17
  import * as Option from "effect/Option"
18
+ import * as Predicate from "effect/Predicate"
21
19
  import * as Redacted from "effect/Redacted"
22
20
  import * as Stream from "effect/Stream"
23
21
  import * as Generated from "./Generated.js"
22
+ import * as InternalUtilities from "./internal/utilities.js"
24
23
  import { OpenAiConfig } from "./OpenAiConfig.js"
25
24
 
25
+ const constDisableValidation = { disableValidation: true } as const
26
+
26
27
  /**
27
28
  * @since 1.0.0
28
- * @category tags
29
+ * @category Context
29
30
  */
30
31
  export class OpenAiClient extends Context.Tag("@effect/ai-openai/OpenAiClient")<
31
32
  OpenAiClient,
@@ -34,12 +35,11 @@ export class OpenAiClient extends Context.Tag("@effect/ai-openai/OpenAiClient")<
34
35
 
35
36
  /**
36
37
  * @since 1.0.0
37
- * @category models
38
38
  */
39
39
  export declare namespace OpenAiClient {
40
40
  /**
41
41
  * @since 1.0.0
42
- * @category models
42
+ * @category Models
43
43
  */
44
44
  export interface Service {
45
45
  readonly client: Generated.Client
@@ -48,20 +48,44 @@ export declare namespace OpenAiClient {
48
48
  ) => Stream.Stream<A, HttpClientError.HttpClientError>
49
49
  readonly stream: (
50
50
  request: StreamCompletionRequest
51
- ) => Stream.Stream<StreamChunk, HttpClientError.HttpClientError>
51
+ ) => Stream.Stream<AiResponse.AiResponse, HttpClientError.HttpClientError>
52
52
  }
53
53
  }
54
54
 
55
55
  /**
56
56
  * @since 1.0.0
57
- * @category constructors
57
+ * @category Models
58
+ */
59
+ export type StreamCompletionRequest = Omit<typeof Generated.CreateChatCompletionRequest.Encoded, "stream">
60
+
61
+ /**
62
+ * @since 1.0.0
63
+ * @category Constructors
58
64
  */
59
65
  export const make = (options: {
66
+ /**
67
+ * The API key to use to communicate with the OpenAi API.
68
+ */
60
69
  readonly apiKey?: Redacted.Redacted | undefined
70
+ /**
71
+ * The URL to use to communicate with the OpenAi API.
72
+ */
61
73
  readonly apiUrl?: string | undefined
74
+ /**
75
+ * The OpenAi organization identifier to use when communicating with the
76
+ * OpenAi API.
77
+ */
62
78
  readonly organizationId?: Redacted.Redacted | undefined
79
+ /**
80
+ * The OpenAi project identifier to use when communicating with the OpenAi
81
+ * API.
82
+ */
63
83
  readonly projectId?: Redacted.Redacted | undefined
64
- readonly transformClient?: (client: HttpClient.HttpClient) => HttpClient.HttpClient
84
+ /**
85
+ * A method which can be used to transform the underlying `HttpClient` which
86
+ * will be used to communicate with the OpenAi API.
87
+ */
88
+ readonly transformClient?: ((client: HttpClient.HttpClient) => HttpClient.HttpClient) | undefined
65
89
  }): Effect.Effect<OpenAiClient.Service, never, HttpClient.HttpClient> =>
66
90
  Effect.gen(function*() {
67
91
  const httpClient = (yield* HttpClient.HttpClient).pipe(
@@ -87,6 +111,7 @@ export const make = (options: {
87
111
  Effect.map((config) => config?.transformClient ? config.transformClient(client) : client)
88
112
  )
89
113
  })
114
+
90
115
  const streamRequest = <A = unknown>(request: HttpClientRequest.HttpClientRequest) =>
91
116
  httpClientOk.execute(request).pipe(
92
117
  Effect.map((r) => r.stream),
@@ -96,9 +121,22 @@ export const make = (options: {
96
121
  Stream.takeWhile((event) => event.data !== "[DONE]"),
97
122
  Stream.map((event) => JSON.parse(event.data) as A)
98
123
  )
124
+
99
125
  const stream = (request: StreamCompletionRequest) =>
100
126
  Stream.suspend(() => {
101
- const finishReasons: Array<string> = []
127
+ const toolCalls = {} as Record<number, RawToolCall & { isFinished: boolean }>
128
+ let isFirstChunk = false
129
+ let toolCallIndex: number | undefined = undefined
130
+ let finishReason: AiResponse.FinishReason = "unknown"
131
+ let usage: AiResponse.Usage = {
132
+ inputTokens: 0,
133
+ outputTokens: 0,
134
+ totalTokens: 0,
135
+ reasoningTokens: 0,
136
+ cacheReadInputTokens: 0,
137
+ cacheWriteInputTokens: 0
138
+ }
139
+ let metadata: Record<string, unknown> = {}
102
140
  return streamRequest<RawCompletionChunk>(HttpClientRequest.post("/chat/completions", {
103
141
  body: HttpBody.unsafeJson({
104
142
  ...request,
@@ -106,83 +144,105 @@ export const make = (options: {
106
144
  stream_options: { include_usage: true }
107
145
  })
108
146
  })).pipe(
109
- Stream.mapAccum(new Map<number, ContentPart | Array<ToolCallPart>>(), (acc, chunk) => {
110
- const parts: Array<StreamChunkPart> = []
111
- if (chunk.usage !== null) {
112
- parts.push({
113
- _tag: "Usage",
114
- id: chunk.id,
115
- model: chunk.model,
147
+ Stream.filterMap((chunk) => {
148
+ const parts: Array<AiResponse.Part> = []
149
+
150
+ // Add response metadata immediately once available
151
+ if (isFirstChunk) {
152
+ isFirstChunk = false
153
+ parts.push(
154
+ new AiResponse.MetadataPart({
155
+ id: chunk.id,
156
+ model: chunk.model,
157
+ timestamp: new Date(chunk.created * 1000)
158
+ }, constDisableValidation)
159
+ )
160
+ }
161
+
162
+ // Track usage information
163
+ if (Predicate.isNotNullable(chunk.usage)) {
164
+ usage = {
116
165
  inputTokens: chunk.usage.prompt_tokens,
117
166
  outputTokens: chunk.usage.completion_tokens,
118
- finishReasons,
167
+ totalTokens: chunk.usage.prompt_tokens + chunk.usage.completion_tokens,
168
+ reasoningTokens: chunk.usage.completion_tokens_details.reasoning_tokens,
169
+ cacheReadInputTokens: chunk.usage.prompt_tokens_details.cached_tokens,
170
+ cacheWriteInputTokens: usage.cacheWriteInputTokens
171
+ }
172
+ metadata = {
173
+ ...metadata,
174
+ serviceTier: chunk.service_tier,
119
175
  systemFingerprint: chunk.system_fingerprint,
120
- serviceTier: chunk.service_tier
121
- })
176
+ acceptedPredictionTokens: chunk.usage.completion_tokens_details.accepted_prediction_tokens,
177
+ rejectedPredictionTokens: chunk.usage.completion_tokens_details.rejected_prediction_tokens,
178
+ inputAudioTokens: chunk.usage.prompt_tokens_details.audio_tokens,
179
+ outputAudioTokens: chunk.usage.completion_tokens_details.audio_tokens
180
+ }
122
181
  }
182
+
123
183
  for (let i = 0; i < chunk.choices.length; i++) {
124
184
  const choice = chunk.choices[i]
125
- if (choice.finish_reason !== null) {
126
- finishReasons.push(choice.finish_reason)
127
- }
128
- if ("content" in choice.delta && typeof choice.delta.content === "string") {
129
- let part = acc.get(choice.index) as ContentPart | undefined
130
- part = {
131
- _tag: "Content",
132
- content: choice.delta.content
185
+
186
+ // Track the finish reason for the response
187
+ if (Predicate.isNotNullable(choice.finish_reason)) {
188
+ finishReason = InternalUtilities.resolveFinishReason(choice.finish_reason)
189
+ if (finishReason === "tool-calls" && Predicate.isNotUndefined(toolCallIndex)) {
190
+ finishToolCall(toolCalls[toolCallIndex], parts)
133
191
  }
134
- acc.set(choice.index, part)
135
- parts.push(part)
136
- } else if ("tool_calls" in choice.delta && Array.isArray(choice.delta.tool_calls)) {
137
- const parts = (acc.get(choice.index) ?? []) as Array<ToolCallPart>
138
- for (const toolCall of choice.delta.tool_calls) {
139
- const part = parts[toolCall.index]
140
- const toolPart = part?._tag === "ToolCall" ?
141
- {
142
- ...part,
143
- arguments: part.arguments + toolCall.function.arguments
144
- } :
145
- {
146
- _tag: "ToolCall",
147
- ...toolCall,
148
- ...toolCall.function,
149
- role: choice.delta.role!
150
- } as any
151
- parts[toolCall.index] = toolPart
192
+ if (finishReason === "stop") {
193
+ parts.push(
194
+ new AiResponse.FinishPart({
195
+ usage,
196
+ reason: finishReason,
197
+ providerMetadata: { [InternalUtilities.ProviderMetadataKey]: metadata }
198
+ }, constDisableValidation)
199
+ )
152
200
  }
153
- acc.set(choice.index, parts)
154
- } else if (choice.finish_reason === "tool_calls") {
155
- const toolParts = acc.get(choice.index) as Array<ToolCallPart>
156
- for (const part of toolParts) {
157
- try {
158
- const args = JSON.parse(part.arguments as string)
159
- parts.push({
160
- _tag: "ToolCall",
161
- id: part.id,
162
- name: part.name,
163
- arguments: args
164
- })
165
- // eslint-disable-next-line no-empty
166
- } catch {}
201
+ }
202
+
203
+ // Handle text deltas
204
+ if (Predicate.isNotNullable(choice.delta.content)) {
205
+ parts.push(
206
+ new AiResponse.TextPart({
207
+ text: choice.delta.content
208
+ }, constDisableValidation)
209
+ )
210
+ }
211
+
212
+ // Handle tool call deltas
213
+ if (Predicate.hasProperty(choice.delta, "tool_calls") && Array.isArray(choice.delta.tool_calls)) {
214
+ for (const delta of choice.delta.tool_calls) {
215
+ // Make sure to emit any previous tool calls before starting a new one
216
+ if (Predicate.isNotUndefined(toolCallIndex) && toolCallIndex !== delta.index) {
217
+ finishToolCall(toolCalls[toolCallIndex], parts)
218
+ toolCallIndex = delta.index
219
+ }
220
+
221
+ if (Predicate.isUndefined(toolCallIndex)) {
222
+ const toolCall = delta as unknown as RawToolCall
223
+ // All information except arguments are returned with the first tool call delta
224
+ toolCalls[delta.index] = { ...toolCall, isFinished: false }
225
+ toolCallIndex = delta.index
226
+ } else {
227
+ toolCalls[delta.index].function.arguments += delta.function.arguments
228
+ }
167
229
  }
168
230
  }
169
231
  }
170
- return [
171
- acc,
172
- parts.length === 0
173
- ? Option.none()
174
- : Option.some(new StreamChunk({ parts }))
175
- ]
176
- }),
177
- Stream.filterMap(identity)
232
+
233
+ return parts.length === 0
234
+ ? Option.none()
235
+ : Option.some(AiResponse.AiResponse.make({ parts }, constDisableValidation))
236
+ })
178
237
  )
179
238
  })
239
+
180
240
  return OpenAiClient.of({ client, streamRequest, stream })
181
241
  })
182
242
 
183
243
  /**
184
244
  * @since 1.0.0
185
- * @category layers
245
+ * @category Layers
186
246
  */
187
247
  export const layer = (options: {
188
248
  readonly apiKey?: Redacted.Redacted | undefined
@@ -190,15 +250,11 @@ export const layer = (options: {
190
250
  readonly organizationId?: Redacted.Redacted | undefined
191
251
  readonly projectId?: Redacted.Redacted | undefined
192
252
  readonly transformClient?: (client: HttpClient.HttpClient) => HttpClient.HttpClient
193
- }): Layer.Layer<AiModels.AiModels | OpenAiClient, never, HttpClient.HttpClient> =>
194
- Layer.merge(
195
- AiModels.layer,
196
- Layer.effect(OpenAiClient, make(options))
197
- )
253
+ }): Layer.Layer<OpenAiClient, never, HttpClient.HttpClient> => Layer.effect(OpenAiClient, make(options))
198
254
 
199
255
  /**
200
256
  * @since 1.0.0
201
- * @category layers
257
+ * @category Layers
202
258
  */
203
259
  export const layerConfig = (
204
260
  options: Config.Config.Wrap<{
@@ -208,43 +264,27 @@ export const layerConfig = (
208
264
  readonly projectId?: Redacted.Redacted | undefined
209
265
  readonly transformClient?: (client: HttpClient.HttpClient) => HttpClient.HttpClient
210
266
  }>
211
- ): Layer.Layer<AiModels.AiModels | OpenAiClient, ConfigError, HttpClient.HttpClient> =>
267
+ ): Layer.Layer<OpenAiClient, ConfigError, HttpClient.HttpClient> =>
212
268
  Config.unwrap(options).pipe(
213
269
  Effect.flatMap(make),
214
- Layer.effect(OpenAiClient),
215
- Layer.merge(AiModels.layer)
270
+ Layer.effect(OpenAiClient)
216
271
  )
217
272
 
218
- /**
219
- * @since 1.0.0
220
- * @category models
221
- */
222
- export type StreamCompletionRequest = Omit<typeof Generated.CreateChatCompletionRequest.Encoded, "stream">
223
-
224
273
  interface RawCompletionChunk {
225
274
  readonly id: string
226
275
  readonly object: "chat.completion.chunk"
227
276
  readonly created: number
228
277
  readonly model: string
229
- readonly choices: Array<
230
- {
231
- readonly index: number
232
- readonly finish_reason: null
233
- readonly delta: RawDelta
234
- } | {
235
- readonly index: number
236
- readonly finish_reason: string
237
- readonly delta: {}
238
- }
239
- >
278
+ readonly choices: ReadonlyArray<RawChoice>
240
279
  readonly system_fingerprint: string
241
- readonly service_tier: string
280
+ readonly service_tier: string | null
242
281
  readonly usage: RawUsage | null
243
282
  }
244
283
 
245
- interface RawUsage {
246
- readonly prompt_tokens: number
247
- readonly completion_tokens: number
284
+ interface RawChoice {
285
+ readonly index: number
286
+ readonly finish_reason: "stop" | "length" | "content_filter" | "function_call" | "tool_calls" | null
287
+ readonly delta: RawDelta
248
288
  }
249
289
 
250
290
  type RawDelta = {
@@ -255,7 +295,23 @@ type RawDelta = {
255
295
  readonly index?: number
256
296
  readonly role?: string
257
297
  readonly content?: null
258
- readonly tool_calls: Array<RawToolCall>
298
+ readonly tool_calls: Array<RawToolDelta>
299
+ }
300
+
301
+ interface RawUsage {
302
+ readonly prompt_tokens: number
303
+ readonly completion_tokens: number
304
+ readonly total_tokens: number
305
+ readonly completion_tokens_details: {
306
+ readonly accepted_prediction_tokens: number
307
+ readonly audio_tokens: number
308
+ readonly reasoning_tokens: number
309
+ readonly rejected_prediction_tokens: number
310
+ }
311
+ readonly prompt_tokens_details: {
312
+ readonly audio_tokens: number
313
+ readonly cached_tokens: number
314
+ }
259
315
  }
260
316
 
261
317
  type RawToolCall = {
@@ -264,103 +320,40 @@ type RawToolCall = {
264
320
  readonly type: "function"
265
321
  readonly function: {
266
322
  readonly name: string
267
- readonly arguments: string
323
+ arguments: string
268
324
  }
269
- } | {
325
+ }
326
+
327
+ type RawToolDelta = RawToolCall | {
270
328
  readonly index: number
271
329
  readonly function: {
272
330
  readonly arguments: string
273
331
  }
274
332
  }
275
333
 
276
- /**
277
- * @since 1.0.0
278
- * @category models
279
- */
280
- export class StreamChunk extends Data.Class<{
281
- readonly parts: Array<StreamChunkPart>
282
- }> {
283
- /**
284
- * @since 1.0.0
285
- */
286
- get text(): Option.Option<string> {
287
- const firstContentPart = this.parts.find((part) => part._tag === "Content")
288
- return firstContentPart ? Option.some(firstContentPart.content) : Option.none()
289
- }
290
- /**
291
- * @since 1.0.0
292
- */
293
- get asAiResponse(): AiResponse.AiResponse {
294
- const aiResponseParts: Array<AiResponse.Part> = []
295
-
296
- for (let i = 0; i < this.parts.length; i++) {
297
- const part = this.parts[i]
298
- switch (part._tag) {
299
- case "Content":
300
- aiResponseParts.push(AiResponse.TextPart.fromContent(part.content))
301
- break
302
- case "ToolCall":
303
- aiResponseParts.push(AiResponse.ToolCallPart.fromUnknown({
304
- id: part.id,
305
- name: part.name,
306
- params: part.arguments
307
- }))
308
- break
309
- }
310
- }
311
-
312
- if (aiResponseParts.length === 0) {
313
- return AiResponse.AiResponse.fromText({
314
- role: AiRole.model,
315
- content: ""
316
- })
317
- }
334
+ // =============================================================================
335
+ // Utilities
336
+ // =============================================================================
318
337
 
319
- return new AiResponse.AiResponse({
320
- role: AiRole.model,
321
- parts: Chunk.unsafeFromArray(aiResponseParts)
322
- })
338
+ const finishToolCall = (
339
+ toolCall: RawToolCall & { isFinished: boolean },
340
+ parts: Array<AiResponse.Part>
341
+ ) => {
342
+ // Don't emit the tool call if it's already been emitted
343
+ if (toolCall.isFinished) {
344
+ return
323
345
  }
324
- }
325
-
326
- /**
327
- * @since 1.0.0
328
- * @category models
329
- */
330
- export type StreamChunkPart = ContentPart | ToolCallPart | UsagePart
331
-
332
- /**
333
- * @since 1.0.0
334
- * @category models
335
- */
336
- export interface ContentPart {
337
- readonly _tag: "Content"
338
- readonly name?: string
339
- readonly content: string
340
- }
341
-
342
- /**
343
- * @since 1.0.0
344
- * @category models
345
- */
346
- export interface ToolCallPart {
347
- readonly _tag: "ToolCall"
348
- readonly id: string
349
- readonly name: string
350
- readonly arguments: unknown
351
- }
352
-
353
- /**
354
- * @since 1.0.0
355
- * @category models
356
- */
357
- export interface UsagePart {
358
- readonly _tag: "Usage"
359
- readonly id: string
360
- readonly model: string
361
- readonly inputTokens: number
362
- readonly outputTokens: number
363
- readonly finishReasons: ReadonlyArray<string>
364
- readonly systemFingerprint: string
365
- readonly serviceTier: string | null
346
+ try {
347
+ const params = JSON.parse(toolCall.function.arguments)
348
+ parts.push(
349
+ new AiResponse.ToolCallPart({
350
+ id: toolCall.id as ToolCallId,
351
+ name: toolCall.function.name,
352
+ params
353
+ })
354
+ )
355
+ toolCall.isFinished = true
356
+ // TODO:
357
+ // eslint-disable-next-line no-empty
358
+ } catch (e) {}
366
359
  }
@@ -8,7 +8,7 @@ import { dual } from "effect/Function"
8
8
 
9
9
  /**
10
10
  * @since 1.0.0
11
- * @category tags
11
+ * @category Context
12
12
  */
13
13
  export class OpenAiConfig extends Context.Tag("@effect/ai-openai/OpenAiConfig")<
14
14
  OpenAiConfig,
@@ -25,12 +25,11 @@ export class OpenAiConfig extends Context.Tag("@effect/ai-openai/OpenAiConfig")<
25
25
 
26
26
  /**
27
27
  * @since 1.0.0
28
- * @category models
29
28
  */
30
29
  export declare namespace OpenAiConfig {
31
30
  /**
32
31
  * @since 1.0.
33
- * @category models
32
+ * @category Models
34
33
  */
35
34
  export interface Service {
36
35
  readonly transformClient?: (client: HttpClient) => HttpClient
@@ -39,17 +38,17 @@ export declare namespace OpenAiConfig {
39
38
 
40
39
  /**
41
40
  * @since 1.0.0
42
- * @category configuration
41
+ * @category Configuration
43
42
  */
44
43
  export const withClientTransform: {
45
44
  /**
46
45
  * @since 1.0.0
47
- * @category configuration
46
+ * @category Configuration
48
47
  */
49
48
  (transform: (client: HttpClient) => HttpClient): <A, E, R>(self: Effect.Effect<A, E, R>) => Effect.Effect<A, E, R>
50
49
  /**
51
50
  * @since 1.0.0
52
- * @category configuration
51
+ * @category Configuration
53
52
  */
54
53
  <A, E, R>(
55
54
  self: Effect.Effect<A, E, R>,
@@ -58,12 +57,12 @@ export const withClientTransform: {
58
57
  } = dual<
59
58
  /**
60
59
  * @since 1.0.0
61
- * @category configuration
60
+ * @category Configuration
62
61
  */
63
62
  (transform: (client: HttpClient) => HttpClient) => <A, E, R>(self: Effect.Effect<A, E, R>) => Effect.Effect<A, E, R>,
64
63
  /**
65
64
  * @since 1.0.0
66
- * @category configuration
65
+ * @category Configuration
67
66
  */
68
67
  <A, E, R>(
69
68
  self: Effect.Effect<A, E, R>,