@effect/ai-openai-compat 4.0.0-beta.4 → 4.0.0-beta.41
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/OpenAiClient.d.ts +17 -15
- package/dist/OpenAiClient.d.ts.map +1 -1
- package/dist/OpenAiClient.js +11 -1
- package/dist/OpenAiClient.js.map +1 -1
- package/dist/OpenAiEmbeddingModel.d.ts +85 -0
- package/dist/OpenAiEmbeddingModel.d.ts.map +1 -0
- package/dist/OpenAiEmbeddingModel.js +119 -0
- package/dist/OpenAiEmbeddingModel.js.map +1 -0
- package/dist/OpenAiError.d.ts +22 -32
- package/dist/OpenAiError.d.ts.map +1 -1
- package/dist/OpenAiLanguageModel.d.ts +1 -0
- package/dist/OpenAiLanguageModel.d.ts.map +1 -1
- package/dist/OpenAiLanguageModel.js +26 -10
- package/dist/OpenAiLanguageModel.js.map +1 -1
- package/dist/index.d.ts +8 -0
- package/dist/index.d.ts.map +1 -1
- package/dist/index.js +8 -0
- package/dist/index.js.map +1 -1
- package/dist/internal/errors.js +4 -4
- package/dist/internal/errors.js.map +1 -1
- package/package.json +3 -3
- package/src/OpenAiClient.ts +18 -4
- package/src/OpenAiEmbeddingModel.ts +203 -0
- package/src/OpenAiError.ts +24 -32
- package/src/OpenAiLanguageModel.ts +58 -13
- package/src/index.ts +9 -0
- package/src/internal/errors.ts +4 -4
|
@@ -0,0 +1,203 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* OpenAI Embedding Model implementation.
|
|
3
|
+
*
|
|
4
|
+
* Provides an EmbeddingModel implementation for OpenAI-compatible embeddings APIs.
|
|
5
|
+
*
|
|
6
|
+
* @since 1.0.0
|
|
7
|
+
*/
|
|
8
|
+
import * as Effect from "effect/Effect"
|
|
9
|
+
import { dual } from "effect/Function"
|
|
10
|
+
import * as Layer from "effect/Layer"
|
|
11
|
+
import * as ServiceMap from "effect/ServiceMap"
|
|
12
|
+
import type { Simplify } from "effect/Types"
|
|
13
|
+
import * as AiError from "effect/unstable/ai/AiError"
|
|
14
|
+
import * as EmbeddingModel from "effect/unstable/ai/EmbeddingModel"
|
|
15
|
+
import * as AiModel from "effect/unstable/ai/Model"
|
|
16
|
+
import type { CreateEmbedding200, CreateEmbeddingRequestJson } from "./OpenAiClient.ts"
|
|
17
|
+
import { OpenAiClient } from "./OpenAiClient.ts"
|
|
18
|
+
|
|
19
|
+
/**
|
|
20
|
+
* @since 1.0.0
|
|
21
|
+
* @category models
|
|
22
|
+
*/
|
|
23
|
+
export type Model = string
|
|
24
|
+
|
|
25
|
+
/**
|
|
26
|
+
* Service definition for OpenAI embedding model configuration.
|
|
27
|
+
*
|
|
28
|
+
* @since 1.0.0
|
|
29
|
+
* @category context
|
|
30
|
+
*/
|
|
31
|
+
export class Config extends ServiceMap.Service<
|
|
32
|
+
Config,
|
|
33
|
+
Simplify<
|
|
34
|
+
& Partial<
|
|
35
|
+
Omit<
|
|
36
|
+
CreateEmbeddingRequestJson,
|
|
37
|
+
"input"
|
|
38
|
+
>
|
|
39
|
+
>
|
|
40
|
+
& {
|
|
41
|
+
readonly [x: string]: unknown
|
|
42
|
+
}
|
|
43
|
+
>
|
|
44
|
+
>()("@effect/ai-openai-compat/OpenAiEmbeddingModel/Config") {}
|
|
45
|
+
|
|
46
|
+
/**
|
|
47
|
+
* @since 1.0.0
|
|
48
|
+
* @category constructors
|
|
49
|
+
*/
|
|
50
|
+
export const model = (
|
|
51
|
+
model: string,
|
|
52
|
+
options: {
|
|
53
|
+
readonly dimensions: number
|
|
54
|
+
readonly config?: Omit<typeof Config.Service, "model" | "dimensions">
|
|
55
|
+
}
|
|
56
|
+
): AiModel.Model<"openai", EmbeddingModel.EmbeddingModel | EmbeddingModel.Dimensions, OpenAiClient> =>
|
|
57
|
+
AiModel.make(
|
|
58
|
+
"openai",
|
|
59
|
+
model,
|
|
60
|
+
Layer.merge(
|
|
61
|
+
layer({
|
|
62
|
+
model,
|
|
63
|
+
config: {
|
|
64
|
+
...options.config,
|
|
65
|
+
dimensions: options.dimensions
|
|
66
|
+
}
|
|
67
|
+
}),
|
|
68
|
+
Layer.succeed(EmbeddingModel.Dimensions, options.dimensions)
|
|
69
|
+
)
|
|
70
|
+
)
|
|
71
|
+
|
|
72
|
+
/**
|
|
73
|
+
* Creates an OpenAI embedding model service.
|
|
74
|
+
*
|
|
75
|
+
* @since 1.0.0
|
|
76
|
+
* @category constructors
|
|
77
|
+
*/
|
|
78
|
+
export const make = Effect.fnUntraced(function*({ model, config: providerConfig }: {
|
|
79
|
+
readonly model: string
|
|
80
|
+
readonly config?: Omit<typeof Config.Service, "model"> | undefined
|
|
81
|
+
}): Effect.fn.Return<EmbeddingModel.Service, never, OpenAiClient> {
|
|
82
|
+
const client = yield* OpenAiClient
|
|
83
|
+
|
|
84
|
+
const makeConfig = Effect.gen(function*() {
|
|
85
|
+
const services = yield* Effect.services<never>()
|
|
86
|
+
return { model, ...providerConfig, ...services.mapUnsafe.get(Config.key) }
|
|
87
|
+
})
|
|
88
|
+
|
|
89
|
+
return yield* EmbeddingModel.make({
|
|
90
|
+
embedMany: Effect.fnUntraced(function*({ inputs }) {
|
|
91
|
+
const config = yield* makeConfig
|
|
92
|
+
const response = yield* client.createEmbedding({ ...config, input: inputs })
|
|
93
|
+
return yield* mapProviderResponse(inputs.length, response)
|
|
94
|
+
})
|
|
95
|
+
})
|
|
96
|
+
})
|
|
97
|
+
|
|
98
|
+
/**
|
|
99
|
+
* Creates a layer for the OpenAI embedding model.
|
|
100
|
+
*
|
|
101
|
+
* @since 1.0.0
|
|
102
|
+
* @category layers
|
|
103
|
+
*/
|
|
104
|
+
export const layer = (options: {
|
|
105
|
+
readonly model: string
|
|
106
|
+
readonly config?: Omit<typeof Config.Service, "model"> | undefined
|
|
107
|
+
}): Layer.Layer<EmbeddingModel.EmbeddingModel, never, OpenAiClient> =>
|
|
108
|
+
Layer.effect(EmbeddingModel.EmbeddingModel, make(options))
|
|
109
|
+
|
|
110
|
+
/**
|
|
111
|
+
* Provides config overrides for OpenAI embedding model operations.
|
|
112
|
+
*
|
|
113
|
+
* @since 1.0.0
|
|
114
|
+
* @category configuration
|
|
115
|
+
*/
|
|
116
|
+
export const withConfigOverride: {
|
|
117
|
+
/**
|
|
118
|
+
* Provides config overrides for OpenAI embedding model operations.
|
|
119
|
+
*
|
|
120
|
+
* @since 1.0.0
|
|
121
|
+
* @category configuration
|
|
122
|
+
*/
|
|
123
|
+
(overrides: typeof Config.Service): <A, E, R>(self: Effect.Effect<A, E, R>) => Effect.Effect<A, E, Exclude<R, Config>>
|
|
124
|
+
/**
|
|
125
|
+
* Provides config overrides for OpenAI embedding model operations.
|
|
126
|
+
*
|
|
127
|
+
* @since 1.0.0
|
|
128
|
+
* @category configuration
|
|
129
|
+
*/
|
|
130
|
+
<A, E, R>(self: Effect.Effect<A, E, R>, overrides: typeof Config.Service): Effect.Effect<A, E, Exclude<R, Config>>
|
|
131
|
+
} = dual<
|
|
132
|
+
/**
|
|
133
|
+
* Provides config overrides for OpenAI embedding model operations.
|
|
134
|
+
*
|
|
135
|
+
* @since 1.0.0
|
|
136
|
+
* @category configuration
|
|
137
|
+
*/
|
|
138
|
+
(overrides: typeof Config.Service) => <A, E, R>(self: Effect.Effect<A, E, R>) => Effect.Effect<A, E, Exclude<R, Config>>,
|
|
139
|
+
/**
|
|
140
|
+
* Provides config overrides for OpenAI embedding model operations.
|
|
141
|
+
*
|
|
142
|
+
* @since 1.0.0
|
|
143
|
+
* @category configuration
|
|
144
|
+
*/
|
|
145
|
+
<A, E, R>(self: Effect.Effect<A, E, R>, overrides: typeof Config.Service) => Effect.Effect<A, E, Exclude<R, Config>>
|
|
146
|
+
>(2, (self, overrides) =>
|
|
147
|
+
Effect.flatMap(
|
|
148
|
+
Effect.serviceOption(Config),
|
|
149
|
+
(config) =>
|
|
150
|
+
Effect.provideService(self, Config, {
|
|
151
|
+
...(config._tag === "Some" ? config.value : {}),
|
|
152
|
+
...overrides
|
|
153
|
+
})
|
|
154
|
+
))
|
|
155
|
+
|
|
156
|
+
const mapProviderResponse = (
|
|
157
|
+
inputLength: number,
|
|
158
|
+
response: CreateEmbedding200
|
|
159
|
+
): Effect.Effect<EmbeddingModel.ProviderResponse, AiError.AiError> => {
|
|
160
|
+
if (response.data.length !== inputLength) {
|
|
161
|
+
return Effect.fail(
|
|
162
|
+
invalidOutput(`Provider returned ${response.data.length} embeddings but expected ${inputLength}`)
|
|
163
|
+
)
|
|
164
|
+
}
|
|
165
|
+
|
|
166
|
+
const results = new Array<Array<number>>(inputLength)
|
|
167
|
+
const seen = new Set<number>()
|
|
168
|
+
|
|
169
|
+
for (const entry of response.data) {
|
|
170
|
+
if (!Number.isInteger(entry.index) || entry.index < 0 || entry.index >= inputLength) {
|
|
171
|
+
return Effect.fail(invalidOutput(`Provider returned invalid embedding index: ${entry.index}`))
|
|
172
|
+
}
|
|
173
|
+
if (seen.has(entry.index)) {
|
|
174
|
+
return Effect.fail(invalidOutput(`Provider returned duplicate embedding index: ${entry.index}`))
|
|
175
|
+
}
|
|
176
|
+
if (!Array.isArray(entry.embedding)) {
|
|
177
|
+
return Effect.fail(invalidOutput(`Provider returned non-vector embedding at index ${entry.index}`))
|
|
178
|
+
}
|
|
179
|
+
|
|
180
|
+
seen.add(entry.index)
|
|
181
|
+
results[entry.index] = [...entry.embedding]
|
|
182
|
+
}
|
|
183
|
+
|
|
184
|
+
if (seen.size !== inputLength) {
|
|
185
|
+
return Effect.fail(
|
|
186
|
+
invalidOutput(`Provider returned embeddings for ${seen.size} inputs but expected ${inputLength}`)
|
|
187
|
+
)
|
|
188
|
+
}
|
|
189
|
+
|
|
190
|
+
return Effect.succeed({
|
|
191
|
+
results,
|
|
192
|
+
usage: {
|
|
193
|
+
inputTokens: response.usage?.prompt_tokens
|
|
194
|
+
}
|
|
195
|
+
})
|
|
196
|
+
}
|
|
197
|
+
|
|
198
|
+
const invalidOutput = (description: string): AiError.AiError =>
|
|
199
|
+
AiError.make({
|
|
200
|
+
module: "OpenAiEmbeddingModel",
|
|
201
|
+
method: "embedMany",
|
|
202
|
+
reason: new AiError.InvalidOutputError({ description })
|
|
203
|
+
})
|
package/src/OpenAiError.ts
CHANGED
|
@@ -52,51 +52,43 @@ export type OpenAiRateLimitMetadata = OpenAiErrorMetadata & {
|
|
|
52
52
|
}
|
|
53
53
|
|
|
54
54
|
declare module "effect/unstable/ai/AiError" {
|
|
55
|
-
export interface
|
|
56
|
-
readonly
|
|
57
|
-
readonly openai?: OpenAiRateLimitMetadata | null
|
|
58
|
-
}
|
|
55
|
+
export interface RateLimitErrorMetadata {
|
|
56
|
+
readonly openai?: OpenAiRateLimitMetadata | null
|
|
59
57
|
}
|
|
60
58
|
|
|
61
|
-
export interface
|
|
62
|
-
readonly
|
|
63
|
-
readonly openai?: OpenAiErrorMetadata | null
|
|
64
|
-
}
|
|
59
|
+
export interface QuotaExhaustedErrorMetadata {
|
|
60
|
+
readonly openai?: OpenAiErrorMetadata | null
|
|
65
61
|
}
|
|
66
62
|
|
|
67
|
-
export interface
|
|
68
|
-
readonly
|
|
69
|
-
readonly openai?: OpenAiErrorMetadata | null
|
|
70
|
-
}
|
|
63
|
+
export interface AuthenticationErrorMetadata {
|
|
64
|
+
readonly openai?: OpenAiErrorMetadata | null
|
|
71
65
|
}
|
|
72
66
|
|
|
73
|
-
export interface
|
|
74
|
-
readonly
|
|
75
|
-
readonly openai?: OpenAiErrorMetadata | null
|
|
76
|
-
}
|
|
67
|
+
export interface ContentPolicyErrorMetadata {
|
|
68
|
+
readonly openai?: OpenAiErrorMetadata | null
|
|
77
69
|
}
|
|
78
70
|
|
|
79
|
-
export interface
|
|
80
|
-
readonly
|
|
81
|
-
readonly openai?: OpenAiErrorMetadata | null
|
|
82
|
-
}
|
|
71
|
+
export interface InvalidRequestErrorMetadata {
|
|
72
|
+
readonly openai?: OpenAiErrorMetadata | null
|
|
83
73
|
}
|
|
84
74
|
|
|
85
|
-
export interface
|
|
86
|
-
readonly
|
|
87
|
-
readonly openai?: OpenAiErrorMetadata | null
|
|
88
|
-
}
|
|
75
|
+
export interface InternalProviderErrorMetadata {
|
|
76
|
+
readonly openai?: OpenAiErrorMetadata | null
|
|
89
77
|
}
|
|
90
78
|
|
|
91
|
-
export interface
|
|
92
|
-
readonly
|
|
93
|
-
readonly openai?: OpenAiErrorMetadata | null
|
|
94
|
-
}
|
|
79
|
+
export interface InvalidOutputErrorMetadata {
|
|
80
|
+
readonly openai?: OpenAiErrorMetadata | null
|
|
95
81
|
}
|
|
96
82
|
|
|
97
|
-
export interface
|
|
98
|
-
readonly
|
|
99
|
-
|
|
100
|
-
|
|
83
|
+
export interface StructuredOutputErrorMetadata {
|
|
84
|
+
readonly openai?: OpenAiErrorMetadata | null
|
|
85
|
+
}
|
|
86
|
+
|
|
87
|
+
export interface UnsupportedSchemaErrorMetadata {
|
|
88
|
+
readonly openai?: OpenAiErrorMetadata | null
|
|
89
|
+
}
|
|
90
|
+
|
|
91
|
+
export interface UnknownErrorMetadata {
|
|
92
|
+
readonly openai?: OpenAiErrorMetadata | null
|
|
101
93
|
}
|
|
102
94
|
}
|
|
@@ -8,9 +8,10 @@
|
|
|
8
8
|
*/
|
|
9
9
|
import * as DateTime from "effect/DateTime"
|
|
10
10
|
import * as Effect from "effect/Effect"
|
|
11
|
-
import * as
|
|
11
|
+
import * as Encoding from "effect/Encoding"
|
|
12
12
|
import { dual } from "effect/Function"
|
|
13
13
|
import * as Layer from "effect/Layer"
|
|
14
|
+
import * as Option from "effect/Option"
|
|
14
15
|
import * as Predicate from "effect/Predicate"
|
|
15
16
|
import * as Redactable from "effect/Redactable"
|
|
16
17
|
import type * as Schema from "effect/Schema"
|
|
@@ -101,6 +102,7 @@ export class Config extends ServiceMap.Service<
|
|
|
101
102
|
* Defaults to `true`.
|
|
102
103
|
*/
|
|
103
104
|
readonly strictJsonSchema?: boolean | undefined
|
|
105
|
+
readonly [x: string]: unknown
|
|
104
106
|
}
|
|
105
107
|
>
|
|
106
108
|
>()("@effect/ai-openai-compat/OpenAiLanguageModel/Config") {}
|
|
@@ -316,7 +318,7 @@ export const model = (
|
|
|
316
318
|
model: string,
|
|
317
319
|
config?: Omit<typeof Config.Service, "model">
|
|
318
320
|
): AiModel.Model<"openai", LanguageModel.LanguageModel, OpenAiClient> =>
|
|
319
|
-
AiModel.make("openai", layer({ model, config }))
|
|
321
|
+
AiModel.make("openai", model, layer({ model, config }))
|
|
320
322
|
|
|
321
323
|
// TODO
|
|
322
324
|
// /**
|
|
@@ -327,7 +329,7 @@ export const model = (
|
|
|
327
329
|
// model: string,
|
|
328
330
|
// config?: Omit<typeof Config.Service, "model">
|
|
329
331
|
// ): AiModel.Model<"openai", LanguageModel.LanguageModel | Tokenizer.Tokenizer, OpenAiClient> =>
|
|
330
|
-
// AiModel.make("openai", layerWithTokenizer({ model, config }))
|
|
332
|
+
// AiModel.make("openai", model, layerWithTokenizer({ model, config }))
|
|
331
333
|
|
|
332
334
|
/**
|
|
333
335
|
* Creates an OpenAI language model service.
|
|
@@ -386,6 +388,7 @@ export const make = Effect.fnUntraced(function*({ model, config: providerConfig
|
|
|
386
388
|
)
|
|
387
389
|
|
|
388
390
|
return yield* LanguageModel.make({
|
|
391
|
+
codecTransformer: toCodecOpenAI,
|
|
389
392
|
generateText: Effect.fnUntraced(
|
|
390
393
|
function*(options) {
|
|
391
394
|
const config = yield* makeConfig
|
|
@@ -423,10 +426,7 @@ export const make = Effect.fnUntraced(function*({ model, config: providerConfig
|
|
|
423
426
|
})
|
|
424
427
|
)
|
|
425
428
|
)
|
|
426
|
-
})
|
|
427
|
-
LanguageModel.CurrentCodecTransformer,
|
|
428
|
-
toCodecOpenAI
|
|
429
|
-
))
|
|
429
|
+
})
|
|
430
430
|
})
|
|
431
431
|
|
|
432
432
|
/**
|
|
@@ -561,7 +561,7 @@ const prepareMessages = Effect.fnUntraced(
|
|
|
561
561
|
}
|
|
562
562
|
|
|
563
563
|
if (part.data instanceof Uint8Array) {
|
|
564
|
-
const base64 =
|
|
564
|
+
const base64 = Encoding.encodeBase64(part.data)
|
|
565
565
|
const imageUrl = `data:${mediaType};base64,${base64}`
|
|
566
566
|
content.push({ type: "input_image", image_url: imageUrl, detail })
|
|
567
567
|
}
|
|
@@ -575,7 +575,7 @@ const prepareMessages = Effect.fnUntraced(
|
|
|
575
575
|
}
|
|
576
576
|
|
|
577
577
|
if (part.data instanceof Uint8Array) {
|
|
578
|
-
const base64 =
|
|
578
|
+
const base64 = Encoding.encodeBase64(part.data)
|
|
579
579
|
const fileName = part.fileName ?? `part-${index}.pdf`
|
|
580
580
|
const fileData = `data:application/pdf;base64,${base64}`
|
|
581
581
|
content.push({ type: "input_file", filename: fileName, file_data: fileData })
|
|
@@ -782,7 +782,7 @@ const buildHttpRequestDetails = (
|
|
|
782
782
|
method: request.method,
|
|
783
783
|
url: request.url,
|
|
784
784
|
urlParams: Array.from(request.urlParams),
|
|
785
|
-
hash: request.hash,
|
|
785
|
+
hash: Option.getOrUndefined(request.hash),
|
|
786
786
|
headers: Redactable.redact(request.headers) as Record<string, string>
|
|
787
787
|
})
|
|
788
788
|
|
|
@@ -1002,11 +1002,13 @@ const makeStreamResponse = Effect.fnUntraced(
|
|
|
1002
1002
|
hasToolCalls = hasToolCalls || choice.delta.tool_calls.length > 0
|
|
1003
1003
|
choice.delta.tool_calls.forEach((deltaTool, indexInChunk) => {
|
|
1004
1004
|
const toolIndex = deltaTool.index ?? indexInChunk
|
|
1005
|
-
const
|
|
1005
|
+
const activeToolCall = activeToolCalls[toolIndex]
|
|
1006
|
+
const toolId = activeToolCall?.id ?? deltaTool.id ?? `${event.id}_tool_${toolIndex}`
|
|
1006
1007
|
const providerToolName = deltaTool.function?.name
|
|
1007
|
-
const toolName =
|
|
1008
|
+
const toolName = providerToolName !== undefined
|
|
1009
|
+
? toolNameMapper.getCustomName(providerToolName)
|
|
1010
|
+
: activeToolCall?.name ?? toolNameMapper.getCustomName("unknown_tool")
|
|
1008
1011
|
const argumentsDelta = deltaTool.function?.arguments ?? ""
|
|
1009
|
-
const activeToolCall = activeToolCalls[toolIndex]
|
|
1010
1012
|
|
|
1011
1013
|
if (Predicate.isUndefined(activeToolCall)) {
|
|
1012
1014
|
activeToolCalls[toolIndex] = {
|
|
@@ -1220,6 +1222,7 @@ const toChatCompletionsRequest = (payload: CreateResponse): CreateResponseReques
|
|
|
1220
1222
|
const toolChoice = toChatToolChoice(payload.tool_choice)
|
|
1221
1223
|
|
|
1222
1224
|
return {
|
|
1225
|
+
...extractCustomRequestProperties(payload),
|
|
1223
1226
|
model: payload.model ?? "",
|
|
1224
1227
|
messages: messages.length > 0 ? messages : [{ role: "user", content: "" }],
|
|
1225
1228
|
...(payload.temperature !== undefined ? { temperature: payload.temperature } : undefined),
|
|
@@ -1231,12 +1234,54 @@ const toChatCompletionsRequest = (payload: CreateResponse): CreateResponseReques
|
|
|
1231
1234
|
? { parallel_tool_calls: payload.parallel_tool_calls }
|
|
1232
1235
|
: undefined),
|
|
1233
1236
|
...(payload.service_tier !== undefined ? { service_tier: payload.service_tier } : undefined),
|
|
1237
|
+
...(payload.reasoning !== undefined ? { reasoning: payload.reasoning } : undefined),
|
|
1234
1238
|
...(responseFormat !== undefined ? { response_format: responseFormat } : undefined),
|
|
1235
1239
|
...(tools.length > 0 ? { tools } : undefined),
|
|
1236
1240
|
...(toolChoice !== undefined ? { tool_choice: toolChoice } : undefined)
|
|
1237
1241
|
}
|
|
1238
1242
|
}
|
|
1239
1243
|
|
|
1244
|
+
const createResponseKnownProperties = new Set<string>([
|
|
1245
|
+
"metadata",
|
|
1246
|
+
"top_logprobs",
|
|
1247
|
+
"temperature",
|
|
1248
|
+
"top_p",
|
|
1249
|
+
"user",
|
|
1250
|
+
"safety_identifier",
|
|
1251
|
+
"prompt_cache_key",
|
|
1252
|
+
"service_tier",
|
|
1253
|
+
"prompt_cache_retention",
|
|
1254
|
+
"previous_response_id",
|
|
1255
|
+
"model",
|
|
1256
|
+
"reasoning",
|
|
1257
|
+
"background",
|
|
1258
|
+
"max_output_tokens",
|
|
1259
|
+
"max_tool_calls",
|
|
1260
|
+
"text",
|
|
1261
|
+
"tools",
|
|
1262
|
+
"tool_choice",
|
|
1263
|
+
"truncation",
|
|
1264
|
+
"input",
|
|
1265
|
+
"include",
|
|
1266
|
+
"parallel_tool_calls",
|
|
1267
|
+
"store",
|
|
1268
|
+
"instructions",
|
|
1269
|
+
"stream",
|
|
1270
|
+
"conversation",
|
|
1271
|
+
"modalities",
|
|
1272
|
+
"seed"
|
|
1273
|
+
])
|
|
1274
|
+
|
|
1275
|
+
const extractCustomRequestProperties = (payload: CreateResponse): Record<string, unknown> => {
|
|
1276
|
+
const customProperties: Record<string, unknown> = {}
|
|
1277
|
+
for (const [key, value] of Object.entries(payload)) {
|
|
1278
|
+
if (!createResponseKnownProperties.has(key)) {
|
|
1279
|
+
customProperties[key] = value
|
|
1280
|
+
}
|
|
1281
|
+
}
|
|
1282
|
+
return customProperties
|
|
1283
|
+
}
|
|
1284
|
+
|
|
1240
1285
|
const toChatResponseFormat = (
|
|
1241
1286
|
format: TextResponseFormatConfiguration | undefined
|
|
1242
1287
|
): CreateResponseRequestJson["response_format"] | undefined => {
|
package/src/index.ts
CHANGED
|
@@ -14,6 +14,15 @@ export * as OpenAiClient from "./OpenAiClient.ts"
|
|
|
14
14
|
*/
|
|
15
15
|
export * as OpenAiConfig from "./OpenAiConfig.ts"
|
|
16
16
|
|
|
17
|
+
/**
|
|
18
|
+
* OpenAI Embedding Model implementation.
|
|
19
|
+
*
|
|
20
|
+
* Provides an EmbeddingModel implementation for OpenAI-compatible embeddings APIs.
|
|
21
|
+
*
|
|
22
|
+
* @since 1.0.0
|
|
23
|
+
*/
|
|
24
|
+
export * as OpenAiEmbeddingModel from "./OpenAiEmbeddingModel.ts"
|
|
25
|
+
|
|
17
26
|
/**
|
|
18
27
|
* @since 1.0.0
|
|
19
28
|
*/
|
package/src/internal/errors.ts
CHANGED
|
@@ -153,12 +153,12 @@ export const parseRateLimitHeaders = (headers: Record<string, string>) => {
|
|
|
153
153
|
let retryAfter: Duration.Duration | undefined
|
|
154
154
|
if (retryAfterRaw !== undefined) {
|
|
155
155
|
const parsed = Number.parse(retryAfterRaw)
|
|
156
|
-
if (parsed
|
|
157
|
-
retryAfter = Duration.seconds(parsed)
|
|
156
|
+
if (Option.isSome(parsed)) {
|
|
157
|
+
retryAfter = Duration.seconds(parsed.value)
|
|
158
158
|
}
|
|
159
159
|
}
|
|
160
160
|
const remainingRaw = headers["x-ratelimit-remaining-requests"]
|
|
161
|
-
const remaining = remainingRaw !== undefined ? Number.parse(remainingRaw)
|
|
161
|
+
const remaining = remainingRaw !== undefined ? Option.getOrNull(Number.parse(remainingRaw)) : null
|
|
162
162
|
return {
|
|
163
163
|
retryAfter,
|
|
164
164
|
limit: headers["x-ratelimit-limit-requests"] ?? null,
|
|
@@ -175,7 +175,7 @@ export const buildHttpRequestDetails = (
|
|
|
175
175
|
method: request.method,
|
|
176
176
|
url: request.url,
|
|
177
177
|
urlParams: Array.from(request.urlParams),
|
|
178
|
-
hash: request.hash,
|
|
178
|
+
hash: Option.getOrUndefined(request.hash),
|
|
179
179
|
headers: Redactable.redact(request.headers) as Record<string, string>
|
|
180
180
|
})
|
|
181
181
|
|