@luketandjung/dedalus-labs 1.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/index.cjs +1320 -0
- package/dist/index.d.ts +992 -0
- package/dist/index.js +1293 -0
- package/package.json +55 -0
package/dist/index.d.ts
ADDED
|
@@ -0,0 +1,992 @@
|
|
|
1
|
+
declare namespace exports_Generated {
|
|
2
|
+
export { make, ValidationError, UrlCitation, TopLogprob, ToolChoiceEnum, ToolChoice, ThinkingConfigEnabled, ThinkingConfigDisabled, ReasoningSummaryEnum, ReasoningGenerateSummaryEnum, ReasoningEffortEnum, Reasoning, QueryParams, PromptTokensDetails, Models2 as Models, ModelSettingsTruncationEnum, ModelSettings, ModelId, MCPToolChoice, HeaderParams, HTTPValidationError, FunctionCall, Function, Embedding, DedalusModelChoice2 as DedalusModelChoice, DedalusModel, Custom, CreateEmbeddingResponse2 as CreateEmbeddingResponse, CreateEmbeddingRequestModelEnum2 as CreateEmbeddingRequestModelEnum, CreateEmbeddingRequestEncodingFormat, CreateEmbeddingRequest2 as CreateEmbeddingRequest, CompletionUsage2 as CompletionUsage, CompletionTokensDetails, Client2 as Client, ChoiceLogprobs2 as ChoiceLogprobs, ChoiceFinishReasonEnum2 as ChoiceFinishReasonEnum, Choice, ChatCompletionTokenLogprob, ChatCompletionServiceTierEnum2 as ChatCompletionServiceTierEnum, ChatCompletionResponseMessage, ChatCompletionRequestVerbosityEnum, ChatCompletionRequestServiceTierEnum, ChatCompletionRequestReasoningEffortEnum, ChatCompletionRequest2 as ChatCompletionRequest, ChatCompletionMessageToolCall, ChatCompletionMessageCustomToolCall, ChatCompletion2 as ChatCompletion, Audio, AnnotationsItem };
|
|
3
|
+
}
|
|
4
|
+
import * as HttpClient from "@effect/platform/HttpClient";
|
|
5
|
+
import * as HttpClientError from "@effect/platform/HttpClientError";
|
|
6
|
+
import { AiError } from "@luketandjung/ariadne";
|
|
7
|
+
import * as Effect from "effect/Effect";
|
|
8
|
+
import { ParseError } from "effect/ParseResult";
|
|
9
|
+
import * as S from "effect/Schema";
|
|
10
|
+
/**
|
|
11
|
+
* Model identifier string (e.g., 'openai/gpt-5', 'anthropic/claude-3-5-sonnet').
|
|
12
|
+
*/
|
|
13
|
+
declare class ModelId extends S.String {}
|
|
14
|
+
declare class ToolChoiceEnum extends S.Literal("auto", "required", "none") {}
|
|
15
|
+
declare class MCPToolChoice extends S.Class<MCPToolChoice>("MCPToolChoice")({
|
|
16
|
+
server_label: S.String,
|
|
17
|
+
name: S.String
|
|
18
|
+
}) {}
|
|
19
|
+
declare class ToolChoice extends S.Union(ToolChoiceEnum, S.String, S.Record({
|
|
20
|
+
key: S.String,
|
|
21
|
+
value: S.Unknown
|
|
22
|
+
}), MCPToolChoice, S.Null) {}
|
|
23
|
+
declare class ModelSettingsTruncationEnum extends S.Literal("auto", "disabled") {}
|
|
24
|
+
declare class ReasoningEffortEnum extends S.Literal("minimal", "low", "medium", "high") {}
|
|
25
|
+
declare class ReasoningGenerateSummaryEnum extends S.Literal("auto", "concise", "detailed") {}
|
|
26
|
+
declare class ReasoningSummaryEnum extends S.Literal("auto", "concise", "detailed") {}
|
|
27
|
+
declare class Reasoning extends S.Class<Reasoning>("Reasoning")({
|
|
28
|
+
effort: S.optionalWith(ReasoningEffortEnum, { nullable: true }),
|
|
29
|
+
generate_summary: S.optionalWith(ReasoningGenerateSummaryEnum, { nullable: true }),
|
|
30
|
+
summary: S.optionalWith(ReasoningSummaryEnum, { nullable: true })
|
|
31
|
+
}) {}
|
|
32
|
+
declare class QueryParams extends S.Record({
|
|
33
|
+
key: S.String,
|
|
34
|
+
value: S.Unknown
|
|
35
|
+
}) {}
|
|
36
|
+
declare class HeaderParams extends S.Record({
|
|
37
|
+
key: S.String,
|
|
38
|
+
value: S.Unknown
|
|
39
|
+
}) {}
|
|
40
|
+
declare class ModelSettings extends S.Class<ModelSettings>("ModelSettings")({
|
|
41
|
+
temperature: S.optionalWith(S.Number, { nullable: true }),
|
|
42
|
+
top_p: S.optionalWith(S.Number, { nullable: true }),
|
|
43
|
+
frequency_penalty: S.optionalWith(S.Number, { nullable: true }),
|
|
44
|
+
presence_penalty: S.optionalWith(S.Number, { nullable: true }),
|
|
45
|
+
stop: S.optionalWith(S.Union(S.String, S.Array(S.String)), { nullable: true }),
|
|
46
|
+
seed: S.optionalWith(S.Int, { nullable: true }),
|
|
47
|
+
logit_bias: S.optionalWith(S.Record({
|
|
48
|
+
key: S.String,
|
|
49
|
+
value: S.Unknown
|
|
50
|
+
}), { nullable: true }),
|
|
51
|
+
logprobs: S.optionalWith(S.Boolean, { nullable: true }),
|
|
52
|
+
top_logprobs: S.optionalWith(S.Int, { nullable: true }),
|
|
53
|
+
n: S.optionalWith(S.Int, { nullable: true }),
|
|
54
|
+
user: S.optionalWith(S.String, { nullable: true }),
|
|
55
|
+
response_format: S.optionalWith(S.Record({
|
|
56
|
+
key: S.String,
|
|
57
|
+
value: S.Unknown
|
|
58
|
+
}), { nullable: true }),
|
|
59
|
+
stream: S.optionalWith(S.Boolean, { nullable: true }),
|
|
60
|
+
stream_options: S.optionalWith(S.Record({
|
|
61
|
+
key: S.String,
|
|
62
|
+
value: S.Unknown
|
|
63
|
+
}), { nullable: true }),
|
|
64
|
+
audio: S.optionalWith(S.Record({
|
|
65
|
+
key: S.String,
|
|
66
|
+
value: S.Unknown
|
|
67
|
+
}), { nullable: true }),
|
|
68
|
+
service_tier: S.optionalWith(S.String, { nullable: true }),
|
|
69
|
+
prediction: S.optionalWith(S.Record({
|
|
70
|
+
key: S.String,
|
|
71
|
+
value: S.Unknown
|
|
72
|
+
}), { nullable: true }),
|
|
73
|
+
tool_choice: S.optionalWith(ToolChoice, { nullable: true }),
|
|
74
|
+
parallel_tool_calls: S.optionalWith(S.Boolean, { nullable: true }),
|
|
75
|
+
truncation: S.optionalWith(ModelSettingsTruncationEnum, { nullable: true }),
|
|
76
|
+
max_tokens: S.optionalWith(S.Int, { nullable: true }),
|
|
77
|
+
max_completion_tokens: S.optionalWith(S.Int, { nullable: true }),
|
|
78
|
+
reasoning: S.optionalWith(Reasoning, { nullable: true }),
|
|
79
|
+
reasoning_effort: S.optionalWith(S.String, { nullable: true }),
|
|
80
|
+
metadata: S.optionalWith(S.Record({
|
|
81
|
+
key: S.String,
|
|
82
|
+
value: S.Unknown
|
|
83
|
+
}), { nullable: true }),
|
|
84
|
+
store: S.optionalWith(S.Boolean, { nullable: true }),
|
|
85
|
+
include_usage: S.optionalWith(S.Boolean, { nullable: true }),
|
|
86
|
+
timeout: S.optionalWith(S.Number, { nullable: true }),
|
|
87
|
+
prompt_cache_key: S.optionalWith(S.String, { nullable: true }),
|
|
88
|
+
safety_identifier: S.optionalWith(S.String, { nullable: true }),
|
|
89
|
+
verbosity: S.optionalWith(S.String, { nullable: true }),
|
|
90
|
+
web_search_options: S.optionalWith(S.Record({
|
|
91
|
+
key: S.String,
|
|
92
|
+
value: S.Unknown
|
|
93
|
+
}), { nullable: true }),
|
|
94
|
+
response_include: S.optionalWith(S.Array(S.Literal("code_interpreter_call.outputs", "computer_call_output.output.image_url", "file_search_call.results", "message.input_image.image_url", "message.output_text.logprobs", "reasoning.encrypted_content")), { nullable: true }),
|
|
95
|
+
use_responses: S.optionalWith(S.Boolean, {
|
|
96
|
+
nullable: true,
|
|
97
|
+
default: () => false as const
|
|
98
|
+
}),
|
|
99
|
+
extra_query: S.optionalWith(QueryParams, { nullable: true }),
|
|
100
|
+
extra_headers: S.optionalWith(HeaderParams, { nullable: true }),
|
|
101
|
+
extra_args: S.optionalWith(S.Record({
|
|
102
|
+
key: S.String,
|
|
103
|
+
value: S.Unknown
|
|
104
|
+
}), { nullable: true }),
|
|
105
|
+
attributes: S.optionalWith(S.Record({
|
|
106
|
+
key: S.String,
|
|
107
|
+
value: S.Unknown
|
|
108
|
+
}), { nullable: true }),
|
|
109
|
+
voice: S.optionalWith(S.String, { nullable: true }),
|
|
110
|
+
modalities: S.optionalWith(S.Array(S.String), { nullable: true }),
|
|
111
|
+
input_audio_format: S.optionalWith(S.String, { nullable: true }),
|
|
112
|
+
output_audio_format: S.optionalWith(S.String, { nullable: true }),
|
|
113
|
+
input_audio_transcription: S.optionalWith(S.Record({
|
|
114
|
+
key: S.String,
|
|
115
|
+
value: S.Unknown
|
|
116
|
+
}), { nullable: true }),
|
|
117
|
+
turn_detection: S.optionalWith(S.Record({
|
|
118
|
+
key: S.String,
|
|
119
|
+
value: S.Unknown
|
|
120
|
+
}), { nullable: true }),
|
|
121
|
+
thinking: S.optionalWith(S.Record({
|
|
122
|
+
key: S.String,
|
|
123
|
+
value: S.Unknown
|
|
124
|
+
}), { nullable: true }),
|
|
125
|
+
top_k: S.optionalWith(S.Int, { nullable: true }),
|
|
126
|
+
generation_config: S.optionalWith(S.Record({
|
|
127
|
+
key: S.String,
|
|
128
|
+
value: S.Unknown
|
|
129
|
+
}), { nullable: true }),
|
|
130
|
+
system_instruction: S.optionalWith(S.Record({
|
|
131
|
+
key: S.String,
|
|
132
|
+
value: S.Unknown
|
|
133
|
+
}), { nullable: true }),
|
|
134
|
+
safety_settings: S.optionalWith(S.Array(S.Record({
|
|
135
|
+
key: S.String,
|
|
136
|
+
value: S.Unknown
|
|
137
|
+
})), { nullable: true }),
|
|
138
|
+
tool_config: S.optionalWith(S.Record({
|
|
139
|
+
key: S.String,
|
|
140
|
+
value: S.Unknown
|
|
141
|
+
}), { nullable: true }),
|
|
142
|
+
disable_automatic_function_calling: S.optionalWith(S.Boolean, {
|
|
143
|
+
nullable: true,
|
|
144
|
+
default: () => true as const
|
|
145
|
+
}),
|
|
146
|
+
search_parameters: S.optionalWith(S.Record({
|
|
147
|
+
key: S.String,
|
|
148
|
+
value: S.Unknown
|
|
149
|
+
}), { nullable: true }),
|
|
150
|
+
deferred: S.optionalWith(S.Boolean, { nullable: true })
|
|
151
|
+
}) {}
|
|
152
|
+
/**
|
|
153
|
+
* Structured model selection entry used in request payloads.
|
|
154
|
+
*
|
|
155
|
+
* Supports OpenAI-style semantics (string model id) while enabling
|
|
156
|
+
* optional per-model default settings for Dedalus multi-model routing.
|
|
157
|
+
*/
|
|
158
|
+
declare class DedalusModel extends S.Class<DedalusModel>("DedalusModel")({
|
|
159
|
+
model: S.String,
|
|
160
|
+
settings: S.optionalWith(ModelSettings, { nullable: true })
|
|
161
|
+
}) {}
|
|
162
|
+
/**
|
|
163
|
+
* Dedalus model choice - either a string ID or DedalusModel configuration object.
|
|
164
|
+
*/
|
|
165
|
+
declare class DedalusModelChoice2 extends S.Union(ModelId, DedalusModel) {}
|
|
166
|
+
/**
|
|
167
|
+
* List of models for multi-model routing.
|
|
168
|
+
*/
|
|
169
|
+
declare class Models2 extends S.Array(DedalusModelChoice2) {}
|
|
170
|
+
/**
|
|
171
|
+
* Fields:
|
|
172
|
+
* - type (required): Literal['disabled']
|
|
173
|
+
*/
|
|
174
|
+
declare class ThinkingConfigDisabled extends S.Class<ThinkingConfigDisabled>("ThinkingConfigDisabled")({ type: S.Literal("disabled") }) {}
|
|
175
|
+
/**
|
|
176
|
+
* Fields:
|
|
177
|
+
* - budget_tokens (required): int
|
|
178
|
+
* - type (required): Literal['enabled']
|
|
179
|
+
*/
|
|
180
|
+
declare class ThinkingConfigEnabled extends S.Class<ThinkingConfigEnabled>("ThinkingConfigEnabled")({
|
|
181
|
+
budget_tokens: S.Int.pipe(S.greaterThanOrEqualTo(1024)),
|
|
182
|
+
type: S.Literal("enabled")
|
|
183
|
+
}) {}
|
|
184
|
+
declare class ChatCompletionRequestReasoningEffortEnum extends S.Literal("low", "medium", "high") {}
|
|
185
|
+
declare class ChatCompletionRequestServiceTierEnum extends S.Literal("auto", "default") {}
|
|
186
|
+
declare class ChatCompletionRequestVerbosityEnum extends S.Literal("low", "medium", "high") {}
|
|
187
|
+
/**
|
|
188
|
+
* Chat completion request (OpenAI-compatible).
|
|
189
|
+
*
|
|
190
|
+
* Stateless chat completion endpoint. For stateful conversations with threads,
|
|
191
|
+
* use the Responses API instead.
|
|
192
|
+
*/
|
|
193
|
+
declare class ChatCompletionRequest2 extends S.Class<ChatCompletionRequest2>("ChatCompletionRequest")({
|
|
194
|
+
model: S.Union(DedalusModelChoice2, Models2),
|
|
195
|
+
messages: S.Union(S.Array(S.Record({
|
|
196
|
+
key: S.String,
|
|
197
|
+
value: S.Unknown
|
|
198
|
+
})), S.String),
|
|
199
|
+
input: S.optionalWith(S.Union(S.Array(S.Record({
|
|
200
|
+
key: S.String,
|
|
201
|
+
value: S.Unknown
|
|
202
|
+
})), S.String), { nullable: true }),
|
|
203
|
+
temperature: S.optionalWith(S.Number.pipe(S.greaterThanOrEqualTo(0), S.lessThanOrEqualTo(2)), { nullable: true }),
|
|
204
|
+
top_p: S.optionalWith(S.Number.pipe(S.greaterThanOrEqualTo(0), S.lessThanOrEqualTo(1)), { nullable: true }),
|
|
205
|
+
max_tokens: S.optionalWith(S.Int.pipe(S.greaterThanOrEqualTo(1)), { nullable: true }),
|
|
206
|
+
presence_penalty: S.optionalWith(S.Number.pipe(S.greaterThanOrEqualTo(-2), S.lessThanOrEqualTo(2)), { nullable: true }),
|
|
207
|
+
frequency_penalty: S.optionalWith(S.Number.pipe(S.greaterThanOrEqualTo(-2), S.lessThanOrEqualTo(2)), { nullable: true }),
|
|
208
|
+
logit_bias: S.optionalWith(S.Record({
|
|
209
|
+
key: S.String,
|
|
210
|
+
value: S.Unknown
|
|
211
|
+
}), { nullable: true }),
|
|
212
|
+
stop: S.optionalWith(S.Array(S.String), { nullable: true }),
|
|
213
|
+
thinking: S.optionalWith(S.Union(ThinkingConfigDisabled, ThinkingConfigEnabled), { nullable: true }),
|
|
214
|
+
top_k: S.optionalWith(S.Int.pipe(S.greaterThanOrEqualTo(0)), { nullable: true }),
|
|
215
|
+
system: S.optionalWith(S.Union(S.String, S.Array(S.Record({
|
|
216
|
+
key: S.String,
|
|
217
|
+
value: S.Unknown
|
|
218
|
+
}))), { nullable: true }),
|
|
219
|
+
instructions: S.optionalWith(S.Union(S.String, S.Array(S.Record({
|
|
220
|
+
key: S.String,
|
|
221
|
+
value: S.Unknown
|
|
222
|
+
}))), { nullable: true }),
|
|
223
|
+
generation_config: S.optionalWith(S.Record({
|
|
224
|
+
key: S.String,
|
|
225
|
+
value: S.Unknown
|
|
226
|
+
}), { nullable: true }),
|
|
227
|
+
safety_settings: S.optionalWith(S.Array(S.Record({
|
|
228
|
+
key: S.String,
|
|
229
|
+
value: S.Unknown
|
|
230
|
+
})), { nullable: true }),
|
|
231
|
+
tool_config: S.optionalWith(S.Record({
|
|
232
|
+
key: S.String,
|
|
233
|
+
value: S.Unknown
|
|
234
|
+
}), { nullable: true }),
|
|
235
|
+
disable_automatic_function_calling: S.optionalWith(S.Boolean, { nullable: true }),
|
|
236
|
+
seed: S.optionalWith(S.Int, { nullable: true }),
|
|
237
|
+
user: S.optionalWith(S.String, { nullable: true }),
|
|
238
|
+
n: S.optionalWith(S.Int.pipe(S.greaterThanOrEqualTo(1), S.lessThanOrEqualTo(128)), { nullable: true }),
|
|
239
|
+
stream: S.optionalWith(S.Boolean, {
|
|
240
|
+
nullable: true,
|
|
241
|
+
default: () => false as const
|
|
242
|
+
}),
|
|
243
|
+
stream_options: S.optionalWith(S.Record({
|
|
244
|
+
key: S.String,
|
|
245
|
+
value: S.Unknown
|
|
246
|
+
}), { nullable: true }),
|
|
247
|
+
response_format: S.optionalWith(S.Record({
|
|
248
|
+
key: S.String,
|
|
249
|
+
value: S.Unknown
|
|
250
|
+
}), { nullable: true }),
|
|
251
|
+
tools: S.optionalWith(S.Array(S.Record({
|
|
252
|
+
key: S.String,
|
|
253
|
+
value: S.Unknown
|
|
254
|
+
})), { nullable: true }),
|
|
255
|
+
tool_choice: S.optionalWith(S.Union(S.String, S.Record({
|
|
256
|
+
key: S.String,
|
|
257
|
+
value: S.Unknown
|
|
258
|
+
})), { nullable: true }),
|
|
259
|
+
parallel_tool_calls: S.optionalWith(S.Boolean, { nullable: true }),
|
|
260
|
+
functions: S.optionalWith(S.Array(S.Record({
|
|
261
|
+
key: S.String,
|
|
262
|
+
value: S.Unknown
|
|
263
|
+
})), { nullable: true }),
|
|
264
|
+
function_call: S.optionalWith(S.Union(S.String, S.Record({
|
|
265
|
+
key: S.String,
|
|
266
|
+
value: S.Unknown
|
|
267
|
+
})), { nullable: true }),
|
|
268
|
+
logprobs: S.optionalWith(S.Boolean, { nullable: true }),
|
|
269
|
+
top_logprobs: S.optionalWith(S.Int.pipe(S.greaterThanOrEqualTo(0), S.lessThanOrEqualTo(20)), { nullable: true }),
|
|
270
|
+
max_completion_tokens: S.optionalWith(S.Int.pipe(S.greaterThanOrEqualTo(1)), { nullable: true }),
|
|
271
|
+
reasoning_effort: S.optionalWith(ChatCompletionRequestReasoningEffortEnum, { nullable: true }),
|
|
272
|
+
audio: S.optionalWith(S.Record({
|
|
273
|
+
key: S.String,
|
|
274
|
+
value: S.Unknown
|
|
275
|
+
}), { nullable: true }),
|
|
276
|
+
modalities: S.optionalWith(S.Array(S.String), { nullable: true }),
|
|
277
|
+
prediction: S.optionalWith(S.Record({
|
|
278
|
+
key: S.String,
|
|
279
|
+
value: S.Unknown
|
|
280
|
+
}), { nullable: true }),
|
|
281
|
+
metadata: S.optionalWith(S.Record({
|
|
282
|
+
key: S.String,
|
|
283
|
+
value: S.Unknown
|
|
284
|
+
}), { nullable: true }),
|
|
285
|
+
store: S.optionalWith(S.Boolean, { nullable: true }),
|
|
286
|
+
service_tier: S.optionalWith(ChatCompletionRequestServiceTierEnum, { nullable: true }),
|
|
287
|
+
prompt_cache_key: S.optionalWith(S.String, { nullable: true }),
|
|
288
|
+
safety_identifier: S.optionalWith(S.String, { nullable: true }),
|
|
289
|
+
verbosity: S.optionalWith(ChatCompletionRequestVerbosityEnum, { nullable: true }),
|
|
290
|
+
web_search_options: S.optionalWith(S.Record({
|
|
291
|
+
key: S.String,
|
|
292
|
+
value: S.Unknown
|
|
293
|
+
}), { nullable: true }),
|
|
294
|
+
search_parameters: S.optionalWith(S.Record({
|
|
295
|
+
key: S.String,
|
|
296
|
+
value: S.Unknown
|
|
297
|
+
}), { nullable: true }),
|
|
298
|
+
deferred: S.optionalWith(S.Boolean, { nullable: true }),
|
|
299
|
+
mcp_servers: S.optionalWith(S.Union(S.String, S.Array(S.String)), { nullable: true }),
|
|
300
|
+
guardrails: S.optionalWith(S.Array(S.Record({
|
|
301
|
+
key: S.String,
|
|
302
|
+
value: S.Unknown
|
|
303
|
+
})), { nullable: true }),
|
|
304
|
+
handoff_config: S.optionalWith(S.Record({
|
|
305
|
+
key: S.String,
|
|
306
|
+
value: S.Unknown
|
|
307
|
+
}), { nullable: true }),
|
|
308
|
+
model_attributes: S.optionalWith(S.Record({
|
|
309
|
+
key: S.String,
|
|
310
|
+
value: S.Unknown
|
|
311
|
+
}), { nullable: true }),
|
|
312
|
+
agent_attributes: S.optionalWith(S.Record({
|
|
313
|
+
key: S.String,
|
|
314
|
+
value: S.Unknown
|
|
315
|
+
}), { nullable: true }),
|
|
316
|
+
max_turns: S.optionalWith(S.Int.pipe(S.greaterThanOrEqualTo(1), S.lessThanOrEqualTo(100)), { nullable: true }),
|
|
317
|
+
auto_execute_tools: S.optionalWith(S.Boolean, {
|
|
318
|
+
nullable: true,
|
|
319
|
+
default: () => true as const
|
|
320
|
+
})
|
|
321
|
+
}) {}
|
|
322
|
+
declare class ChoiceFinishReasonEnum2 extends S.Literal("stop", "length", "tool_calls", "content_filter", "function_call") {}
|
|
323
|
+
/**
|
|
324
|
+
* The function that the model called.
|
|
325
|
+
*
|
|
326
|
+
* Fields:
|
|
327
|
+
* - name (required): str
|
|
328
|
+
* - arguments (required): str
|
|
329
|
+
*/
|
|
330
|
+
declare class Function extends S.Class<Function>("Function")({
|
|
331
|
+
name: S.String,
|
|
332
|
+
arguments: S.String
|
|
333
|
+
}) {}
|
|
334
|
+
/**
|
|
335
|
+
* A call to a function tool created by the model.
|
|
336
|
+
*
|
|
337
|
+
* Fields:
|
|
338
|
+
* - id (required): str
|
|
339
|
+
* - type (required): Literal['function']
|
|
340
|
+
* - function (required): Function
|
|
341
|
+
*/
|
|
342
|
+
declare class ChatCompletionMessageToolCall extends S.Class<ChatCompletionMessageToolCall>("ChatCompletionMessageToolCall")({
|
|
343
|
+
id: S.String,
|
|
344
|
+
type: S.Literal("function"),
|
|
345
|
+
function: Function
|
|
346
|
+
}) {}
|
|
347
|
+
/**
|
|
348
|
+
* The custom tool that the model called.
|
|
349
|
+
*
|
|
350
|
+
* Fields:
|
|
351
|
+
* - name (required): str
|
|
352
|
+
* - input (required): str
|
|
353
|
+
*/
|
|
354
|
+
declare class Custom extends S.Class<Custom>("Custom")({
|
|
355
|
+
name: S.String,
|
|
356
|
+
input: S.String
|
|
357
|
+
}) {}
|
|
358
|
+
/**
|
|
359
|
+
* A call to a custom tool created by the model.
|
|
360
|
+
*
|
|
361
|
+
* Fields:
|
|
362
|
+
* - id (required): str
|
|
363
|
+
* - type (required): Literal['custom']
|
|
364
|
+
* - custom (required): Custom
|
|
365
|
+
*/
|
|
366
|
+
declare class ChatCompletionMessageCustomToolCall extends S.Class<ChatCompletionMessageCustomToolCall>("ChatCompletionMessageCustomToolCall")({
|
|
367
|
+
id: S.String,
|
|
368
|
+
type: S.Literal("custom"),
|
|
369
|
+
custom: Custom
|
|
370
|
+
}) {}
|
|
371
|
+
/**
|
|
372
|
+
* A URL citation when using web search.
|
|
373
|
+
*
|
|
374
|
+
* Fields:
|
|
375
|
+
* - end_index (required): int
|
|
376
|
+
* - start_index (required): int
|
|
377
|
+
* - url (required): str
|
|
378
|
+
* - title (required): str
|
|
379
|
+
*/
|
|
380
|
+
declare class UrlCitation extends S.Class<UrlCitation>("UrlCitation")({
|
|
381
|
+
end_index: S.Int,
|
|
382
|
+
start_index: S.Int,
|
|
383
|
+
url: S.String,
|
|
384
|
+
title: S.String
|
|
385
|
+
}) {}
|
|
386
|
+
/**
|
|
387
|
+
* A URL citation when using web search.
|
|
388
|
+
*
|
|
389
|
+
* Fields:
|
|
390
|
+
* - type (required): Literal['url_citation']
|
|
391
|
+
* - url_citation (required): UrlCitation
|
|
392
|
+
*/
|
|
393
|
+
declare class AnnotationsItem extends S.Class<AnnotationsItem>("AnnotationsItem")({
|
|
394
|
+
type: S.Literal("url_citation"),
|
|
395
|
+
url_citation: UrlCitation
|
|
396
|
+
}) {}
|
|
397
|
+
/**
|
|
398
|
+
* Deprecated and replaced by `tool_calls`. The name and arguments of a function that should be called, as generated by the model.
|
|
399
|
+
*
|
|
400
|
+
* Fields:
|
|
401
|
+
* - arguments (required): str
|
|
402
|
+
* - name (required): str
|
|
403
|
+
*/
|
|
404
|
+
declare class FunctionCall extends S.Class<FunctionCall>("FunctionCall")({
|
|
405
|
+
arguments: S.String,
|
|
406
|
+
name: S.String
|
|
407
|
+
}) {}
|
|
408
|
+
/**
|
|
409
|
+
* If the audio output modality is requested, this object contains data
|
|
410
|
+
*
|
|
411
|
+
* about the audio response from the model. [Learn more](https://platform.openai.com/docs/guides/audio).
|
|
412
|
+
*
|
|
413
|
+
* Fields:
|
|
414
|
+
* - id (required): str
|
|
415
|
+
* - expires_at (required): int
|
|
416
|
+
* - data (required): str
|
|
417
|
+
* - transcript (required): str
|
|
418
|
+
*/
|
|
419
|
+
declare class Audio extends S.Class<Audio>("Audio")({
|
|
420
|
+
id: S.String,
|
|
421
|
+
expires_at: S.Int,
|
|
422
|
+
data: S.String,
|
|
423
|
+
transcript: S.String
|
|
424
|
+
}) {}
|
|
425
|
+
/**
|
|
426
|
+
* A chat completion message generated by the model.
|
|
427
|
+
*
|
|
428
|
+
* Fields:
|
|
429
|
+
* - content (required): str | None
|
|
430
|
+
* - refusal (required): str | None
|
|
431
|
+
* - tool_calls (optional): ChatCompletionMessageToolCalls
|
|
432
|
+
* - annotations (optional): list[AnnotationsItem]
|
|
433
|
+
* - role (required): Literal['assistant']
|
|
434
|
+
* - function_call (optional): FunctionCall
|
|
435
|
+
* - audio (optional): Audio | None
|
|
436
|
+
*/
|
|
437
|
+
declare class ChatCompletionResponseMessage extends S.Class<ChatCompletionResponseMessage>("ChatCompletionResponseMessage")({
|
|
438
|
+
content: S.NullOr(S.String),
|
|
439
|
+
refusal: S.optionalWith(S.NullOr(S.String), { nullable: true }),
|
|
440
|
+
tool_calls: S.optionalWith(S.Array(S.Union(ChatCompletionMessageToolCall, ChatCompletionMessageCustomToolCall)), { nullable: true }),
|
|
441
|
+
annotations: S.optionalWith(S.Array(AnnotationsItem), { nullable: true }),
|
|
442
|
+
role: S.Literal("assistant"),
|
|
443
|
+
function_call: S.optionalWith(FunctionCall, { nullable: true }),
|
|
444
|
+
audio: S.optionalWith(Audio, { nullable: true })
|
|
445
|
+
}) {}
|
|
446
|
+
/**
|
|
447
|
+
* Token and its log probability.
|
|
448
|
+
*/
|
|
449
|
+
declare class TopLogprob extends S.Class<TopLogprob>("TopLogprob")({
|
|
450
|
+
token: S.String,
|
|
451
|
+
logprob: S.Number,
|
|
452
|
+
bytes: S.NullOr(S.Array(S.Int))
|
|
453
|
+
}) {}
|
|
454
|
+
/**
|
|
455
|
+
* Token log probability information.
|
|
456
|
+
*/
|
|
457
|
+
declare class ChatCompletionTokenLogprob extends S.Class<ChatCompletionTokenLogprob>("ChatCompletionTokenLogprob")({
|
|
458
|
+
token: S.String,
|
|
459
|
+
logprob: S.Number,
|
|
460
|
+
bytes: S.NullOr(S.Array(S.Int)),
|
|
461
|
+
top_logprobs: S.Array(TopLogprob)
|
|
462
|
+
}) {}
|
|
463
|
+
/**
|
|
464
|
+
* Log probability information for the choice.
|
|
465
|
+
*/
|
|
466
|
+
declare class ChoiceLogprobs2 extends S.Class<ChoiceLogprobs2>("ChoiceLogprobs")({
|
|
467
|
+
content: S.optionalWith(S.Array(ChatCompletionTokenLogprob), { nullable: true }),
|
|
468
|
+
refusal: S.optionalWith(S.Array(ChatCompletionTokenLogprob), { nullable: true })
|
|
469
|
+
}) {}
|
|
470
|
+
/**
|
|
471
|
+
* A chat completion choice.
|
|
472
|
+
*
|
|
473
|
+
* OpenAI-compatible choice object for non-streaming responses.
|
|
474
|
+
* Part of the ChatCompletion response.
|
|
475
|
+
*/
|
|
476
|
+
declare class Choice extends S.Class<Choice>("Choice")({
|
|
477
|
+
finish_reason: S.optionalWith(ChoiceFinishReasonEnum2, { nullable: true }),
|
|
478
|
+
index: S.Int,
|
|
479
|
+
message: ChatCompletionResponseMessage,
|
|
480
|
+
logprobs: S.optionalWith(ChoiceLogprobs2, { nullable: true })
|
|
481
|
+
}) {}
|
|
482
|
+
declare class ChatCompletionServiceTierEnum2 extends S.Literal("auto", "default", "flex", "scale", "priority") {}
|
|
483
|
+
/**
|
|
484
|
+
* Breakdown of tokens used in a completion.
|
|
485
|
+
*
|
|
486
|
+
* Fields:
|
|
487
|
+
* - accepted_prediction_tokens (optional): int
|
|
488
|
+
* - audio_tokens (optional): int
|
|
489
|
+
* - reasoning_tokens (optional): int
|
|
490
|
+
* - rejected_prediction_tokens (optional): int
|
|
491
|
+
*/
|
|
492
|
+
declare class CompletionTokensDetails extends S.Class<CompletionTokensDetails>("CompletionTokensDetails")({
|
|
493
|
+
accepted_prediction_tokens: S.optionalWith(S.Int, {
|
|
494
|
+
nullable: true,
|
|
495
|
+
default: () => 0 as const
|
|
496
|
+
}),
|
|
497
|
+
audio_tokens: S.optionalWith(S.Int, {
|
|
498
|
+
nullable: true,
|
|
499
|
+
default: () => 0 as const
|
|
500
|
+
}),
|
|
501
|
+
reasoning_tokens: S.optionalWith(S.Int, {
|
|
502
|
+
nullable: true,
|
|
503
|
+
default: () => 0 as const
|
|
504
|
+
}),
|
|
505
|
+
rejected_prediction_tokens: S.optionalWith(S.Int, {
|
|
506
|
+
nullable: true,
|
|
507
|
+
default: () => 0 as const
|
|
508
|
+
})
|
|
509
|
+
}) {}
|
|
510
|
+
/**
|
|
511
|
+
* Breakdown of tokens used in the prompt.
|
|
512
|
+
*
|
|
513
|
+
* Fields:
|
|
514
|
+
* - audio_tokens (optional): int
|
|
515
|
+
* - cached_tokens (optional): int
|
|
516
|
+
*/
|
|
517
|
+
declare class PromptTokensDetails extends S.Class<PromptTokensDetails>("PromptTokensDetails")({
|
|
518
|
+
audio_tokens: S.optionalWith(S.Int, {
|
|
519
|
+
nullable: true,
|
|
520
|
+
default: () => 0 as const
|
|
521
|
+
}),
|
|
522
|
+
cached_tokens: S.optionalWith(S.Int, {
|
|
523
|
+
nullable: true,
|
|
524
|
+
default: () => 0 as const
|
|
525
|
+
})
|
|
526
|
+
}) {}
|
|
527
|
+
/**
|
|
528
|
+
* Usage statistics for the completion request.
|
|
529
|
+
*
|
|
530
|
+
* Fields:
|
|
531
|
+
* - completion_tokens (required): int
|
|
532
|
+
* - prompt_tokens (required): int
|
|
533
|
+
* - total_tokens (required): int
|
|
534
|
+
* - completion_tokens_details (optional): CompletionTokensDetails
|
|
535
|
+
* - prompt_tokens_details (optional): PromptTokensDetails
|
|
536
|
+
*/
|
|
537
|
+
declare class CompletionUsage2 extends S.Class<CompletionUsage2>("CompletionUsage")({
|
|
538
|
+
completion_tokens: S.Int,
|
|
539
|
+
prompt_tokens: S.Int,
|
|
540
|
+
total_tokens: S.Int,
|
|
541
|
+
completion_tokens_details: S.optionalWith(CompletionTokensDetails, { nullable: true }),
|
|
542
|
+
prompt_tokens_details: S.optionalWith(PromptTokensDetails, { nullable: true })
|
|
543
|
+
}) {}
|
|
544
|
+
/**
|
|
545
|
+
* Chat completion response for Dedalus API.
|
|
546
|
+
*
|
|
547
|
+
* OpenAI-compatible chat completion response with Dedalus extensions.
|
|
548
|
+
* Maintains full compatibility with OpenAI API while providing additional
|
|
549
|
+
* features like server-side tool execution tracking and MCP error reporting.
|
|
550
|
+
*/
|
|
551
|
+
declare class ChatCompletion2 extends S.Class<ChatCompletion2>("ChatCompletion")({
|
|
552
|
+
id: S.String,
|
|
553
|
+
choices: S.Array(Choice),
|
|
554
|
+
created: S.Int,
|
|
555
|
+
model: S.String,
|
|
556
|
+
service_tier: S.optionalWith(ChatCompletionServiceTierEnum2, { nullable: true }),
|
|
557
|
+
system_fingerprint: S.optionalWith(S.String, { nullable: true }),
|
|
558
|
+
object: S.Literal("chat.completion"),
|
|
559
|
+
usage: S.optionalWith(CompletionUsage2, { nullable: true }),
|
|
560
|
+
tools_executed: S.optionalWith(S.Array(S.String), { nullable: true }),
|
|
561
|
+
mcp_server_errors: S.optionalWith(S.Record({
|
|
562
|
+
key: S.String,
|
|
563
|
+
value: S.Unknown
|
|
564
|
+
}), { nullable: true })
|
|
565
|
+
}) {}
|
|
566
|
+
declare class ValidationError extends S.Class<ValidationError>("ValidationError")({
|
|
567
|
+
loc: S.Array(S.Union(S.String, S.Int)),
|
|
568
|
+
msg: S.String,
|
|
569
|
+
type: S.String
|
|
570
|
+
}) {}
|
|
571
|
+
declare class HTTPValidationError extends S.Class<HTTPValidationError>("HTTPValidationError")({ detail: S.optionalWith(S.Array(ValidationError), { nullable: true }) }) {}
|
|
572
|
+
declare const make: (httpClient: HttpClient.HttpClient, options?: {
|
|
573
|
+
readonly transformClient?: ((client: HttpClient.HttpClient) => Effect.Effect<HttpClient.HttpClient>) | undefined
|
|
574
|
+
}) => Client2;
|
|
575
|
+
interface Client2 {
|
|
576
|
+
readonly httpClient: HttpClient.HttpClient;
|
|
577
|
+
/**
|
|
578
|
+
* Generate a model response. Supports streaming, tools, and MCP servers.
|
|
579
|
+
*/
|
|
580
|
+
readonly createChatCompletionV1ChatCompletionsPost: (options: typeof ChatCompletionRequest2.Encoded) => Effect.Effect<typeof ChatCompletion2.Type, HttpClientError.HttpClientError | ParseError | AiError.MalformedInput>;
|
|
581
|
+
/**
|
|
582
|
+
* Create embeddings using the configured provider.
|
|
583
|
+
*/
|
|
584
|
+
readonly createEmbeddingsV1EmbeddingsPost: (options: typeof CreateEmbeddingRequest2.Encoded) => Effect.Effect<typeof CreateEmbeddingResponse2.Type, HttpClientError.HttpClientError | ParseError | AiError.MalformedInput>;
|
|
585
|
+
}
|
|
586
|
+
declare class CreateEmbeddingRequestModelEnum2 extends S.Literal("openai/text-embedding-ada-002", "openai/text-embedding-3-small", "openai/text-embedding-3-large", "google/text-embedding-004") {}
|
|
587
|
+
/**
|
|
588
|
+
* The format to return the embeddings in. Can be either `float` or [`base64`](https://pypi.org/project/pybase64/).
|
|
589
|
+
*/
|
|
590
|
+
declare class CreateEmbeddingRequestEncodingFormat extends S.Literal("float", "base64") {}
|
|
591
|
+
/**
|
|
592
|
+
* Fields:
|
|
593
|
+
* - input (required): str | Annotated[list[str], MinLen(1), MaxLen(2048)] | Annotated[list[int], MinLen(1), MaxLen(2048)] | Annotated[list[Annotated[list[int], MinLen(1)]], MinLen(1), MaxLen(2048)]
|
|
594
|
+
* - model (required): str | Literal['openai/text-embedding-ada-002', 'openai/text-embedding-3-small', 'openai/text-embedding-3-large', 'google/text-embedding-004']
|
|
595
|
+
* - encoding_format (optional): Literal['float', 'base64']
|
|
596
|
+
* - dimensions (optional): int
|
|
597
|
+
* - user (optional): str
|
|
598
|
+
*/
|
|
599
|
+
declare class CreateEmbeddingRequest2 extends S.Class<CreateEmbeddingRequest2>("CreateEmbeddingRequest")({
|
|
600
|
+
input: S.Union(S.String, S.NonEmptyArray(S.String).pipe(S.minItems(1), S.maxItems(2048)), S.NonEmptyArray(S.Int).pipe(S.minItems(1), S.maxItems(2048)), S.NonEmptyArray(S.NonEmptyArray(S.Int).pipe(S.minItems(1))).pipe(S.minItems(1), S.maxItems(2048))),
|
|
601
|
+
model: S.Union(S.String, CreateEmbeddingRequestModelEnum2),
|
|
602
|
+
encoding_format: S.optionalWith(CreateEmbeddingRequestEncodingFormat, {
|
|
603
|
+
nullable: true,
|
|
604
|
+
default: () => "float" as const
|
|
605
|
+
}),
|
|
606
|
+
dimensions: S.optionalWith(S.Int.pipe(S.greaterThanOrEqualTo(1)), { nullable: true }),
|
|
607
|
+
user: S.optionalWith(S.String, { nullable: true })
|
|
608
|
+
}) {}
|
|
609
|
+
/**
|
|
610
|
+
* Single embedding object.
|
|
611
|
+
*/
|
|
612
|
+
declare class Embedding extends S.Class<Embedding>("Embedding")({
|
|
613
|
+
object: S.optionalWith(S.Literal("embedding"), {
|
|
614
|
+
nullable: true,
|
|
615
|
+
default: () => "embedding" as const
|
|
616
|
+
}),
|
|
617
|
+
embedding: S.Union(S.Array(S.Number), S.String),
|
|
618
|
+
index: S.Int
|
|
619
|
+
}) {}
|
|
620
|
+
/**
|
|
621
|
+
* Response from embeddings endpoint.
|
|
622
|
+
*/
|
|
623
|
+
declare class CreateEmbeddingResponse2 extends S.Class<CreateEmbeddingResponse2>("CreateEmbeddingResponse")({
|
|
624
|
+
object: S.optionalWith(S.Literal("list"), {
|
|
625
|
+
nullable: true,
|
|
626
|
+
default: () => "list" as const
|
|
627
|
+
}),
|
|
628
|
+
data: S.Array(Embedding),
|
|
629
|
+
model: S.String,
|
|
630
|
+
usage: S.Record({
|
|
631
|
+
key: S.String,
|
|
632
|
+
value: S.Unknown
|
|
633
|
+
})
|
|
634
|
+
}) {}
|
|
635
|
+
declare namespace exports_DedalusClient {
|
|
636
|
+
export { make2 as make, layerConfig, layer, StreamChatCompletionRequest, Service2 as Service, DedalusClient, ChatCompletionChunkDelta, ChatCompletionChunkChoice, ChatCompletionChunk };
|
|
637
|
+
}
|
|
638
|
+
import * as HttpClient2 from "@effect/platform/HttpClient";
|
|
639
|
+
import { AiError as AiError2 } from "@luketandjung/ariadne";
|
|
640
|
+
import * as Config from "effect/Config";
|
|
641
|
+
import { ConfigError } from "effect/ConfigError";
|
|
642
|
+
import * as Context from "effect/Context";
|
|
643
|
+
import * as Effect2 from "effect/Effect";
|
|
644
|
+
import * as Layer from "effect/Layer";
|
|
645
|
+
import * as Redacted from "effect/Redacted";
|
|
646
|
+
import * as Schema from "effect/Schema";
|
|
647
|
+
import * as Scope from "effect/Scope";
|
|
648
|
+
import * as Stream from "effect/Stream";
|
|
649
|
+
/**
|
|
650
|
+
* @since 1.0.0
|
|
651
|
+
* @category Context
|
|
652
|
+
*/
|
|
653
|
+
declare class DedalusClient extends Context.Tag("@dedalus-labs/DedalusClient")<DedalusClient, Service2>() {}
|
|
654
|
+
/**
|
|
655
|
+
* @since 1.0.0
|
|
656
|
+
* @category Models
|
|
657
|
+
*/
|
|
658
|
+
interface Service2 {
|
|
659
|
+
readonly client: exports_Generated.Client;
|
|
660
|
+
readonly createChatCompletion: (options: typeof exports_Generated.ChatCompletionRequest.Encoded) => Effect2.Effect2<exports_Generated.ChatCompletion, AiError2.AiError2>;
|
|
661
|
+
readonly createChatCompletionStream: (options: Omit<typeof exports_Generated.ChatCompletionRequest.Encoded, "stream">) => Stream.Stream<ChatCompletionChunk, AiError2.AiError2>;
|
|
662
|
+
readonly createEmbedding: (options: typeof exports_Generated.CreateEmbeddingRequest.Encoded) => Effect2.Effect2<exports_Generated.CreateEmbeddingResponse, AiError2.AiError2>;
|
|
663
|
+
}
|
|
664
|
+
/**
|
|
665
|
+
* @since 1.0.0
|
|
666
|
+
* @category Models
|
|
667
|
+
*/
|
|
668
|
+
type StreamChatCompletionRequest = Omit<typeof exports_Generated.ChatCompletionRequest.Encoded, "stream">;
|
|
669
|
+
/**
|
|
670
|
+
* @since 1.0.0
|
|
671
|
+
* @category Constructors
|
|
672
|
+
*/
|
|
673
|
+
declare const make2: (options: {
|
|
674
|
+
/**
|
|
675
|
+
* Standard OAuth-style API key to use to communicate with the Dedalus API.
|
|
676
|
+
*/
|
|
677
|
+
readonly apiKey?: Redacted.Redacted | undefined
|
|
678
|
+
/**
|
|
679
|
+
* An alternative API gateway/proxy style API key to use to communicate with the Dedalus API.
|
|
680
|
+
*/
|
|
681
|
+
readonly xApiKey?: Redacted.Redacted | undefined
|
|
682
|
+
/**
|
|
683
|
+
* The model provider. Only for users with access to and using BYOK API key.
|
|
684
|
+
*/
|
|
685
|
+
readonly provider?: string | undefined
|
|
686
|
+
/**
|
|
687
|
+
* The model provider key. Only for users with access to and using BYOK API key.
|
|
688
|
+
*/
|
|
689
|
+
readonly providerKey?: Redacted.Redacted | undefined
|
|
690
|
+
/**
|
|
691
|
+
* The environment to use. Determines the base URL if `apiUrl` is not provided.
|
|
692
|
+
* - `"production"` uses https://api.dedaluslabs.ai/v1
|
|
693
|
+
* - `"development"` uses http://localhost:8080/v1
|
|
694
|
+
* Defaults to `"production"`.
|
|
695
|
+
*/
|
|
696
|
+
readonly environment?: "production" | "development" | undefined
|
|
697
|
+
/**
|
|
698
|
+
* The URL to use to communicate with the Dedalus API.
|
|
699
|
+
* Overrides the `environment` setting if provided.
|
|
700
|
+
*/
|
|
701
|
+
readonly apiUrl?: string | undefined
|
|
702
|
+
/**
|
|
703
|
+
* A method which can be used to transform the underlying `HttpClient` which
|
|
704
|
+
* will be used to communicate with the Dedalus API.
|
|
705
|
+
*/
|
|
706
|
+
readonly transformClient?: ((client: HttpClient2.HttpClient2) => HttpClient2.HttpClient2) | undefined
|
|
707
|
+
}) => Effect2.Effect2<Service2, never, HttpClient2.HttpClient2 | Scope.Scope>;
|
|
708
|
+
/**
|
|
709
|
+
* @since 1.0.0
|
|
710
|
+
* @category Layers
|
|
711
|
+
*/
|
|
712
|
+
declare const layer: (options: {
|
|
713
|
+
readonly apiKey?: Redacted.Redacted | undefined
|
|
714
|
+
readonly xApiKey?: Redacted.Redacted | undefined
|
|
715
|
+
readonly provider?: string | undefined
|
|
716
|
+
readonly providerKey?: Redacted.Redacted | undefined
|
|
717
|
+
readonly environment?: "production" | "development" | undefined
|
|
718
|
+
readonly apiUrl?: string | undefined
|
|
719
|
+
readonly transformClient?: (client: HttpClient2.HttpClient2) => HttpClient2.HttpClient2
|
|
720
|
+
}) => Layer.Layer<DedalusClient, never, HttpClient2.HttpClient2>;
|
|
721
|
+
/**
|
|
722
|
+
* @since 1.0.0
|
|
723
|
+
* @category Layers
|
|
724
|
+
*/
|
|
725
|
+
declare const layerConfig: (options: {
|
|
726
|
+
readonly apiKey?: Config.Config<Redacted.Redacted | undefined> | undefined
|
|
727
|
+
readonly xApiKey?: Config.Config<Redacted.Redacted | undefined> | undefined
|
|
728
|
+
readonly provider?: Config.Config<string | undefined> | undefined
|
|
729
|
+
readonly providerKey?: Config.Config<Redacted.Redacted | undefined> | undefined
|
|
730
|
+
readonly environment?: Config.Config<"production" | "development" | undefined> | undefined
|
|
731
|
+
readonly apiUrl?: Config.Config<string | undefined> | undefined
|
|
732
|
+
readonly transformClient?: (client: HttpClient2.HttpClient2) => HttpClient2.HttpClient2
|
|
733
|
+
}) => Layer.Layer<DedalusClient, ConfigError, HttpClient2.HttpClient2>;
|
|
734
|
+
/**
|
|
735
|
+
* A delta message in a streaming chat completion chunk.
|
|
736
|
+
*
|
|
737
|
+
* @since 1.0.0
|
|
738
|
+
* @category Schemas
|
|
739
|
+
*/
|
|
740
|
+
declare class ChatCompletionChunkDelta extends Schema.Class<ChatCompletionChunkDelta>("@dedalus-labs/ChatCompletionChunkDelta")({
|
|
741
|
+
role: Schema.optional(Schema.Literal("developer", "system", "user", "assistant", "tool")),
|
|
742
|
+
content: Schema.optional(Schema.NullOr(Schema.String)),
|
|
743
|
+
refusal: Schema.optional(Schema.NullOr(Schema.String)),
|
|
744
|
+
tool_calls: Schema.optional(Schema.Array(Schema.Struct({
|
|
745
|
+
index: Schema.Int,
|
|
746
|
+
id: Schema.optional(Schema.String),
|
|
747
|
+
type: Schema.optional(Schema.Literal("function")),
|
|
748
|
+
function: Schema.optional(Schema.Struct({
|
|
749
|
+
name: Schema.optional(Schema.String),
|
|
750
|
+
arguments: Schema.optional(Schema.String)
|
|
751
|
+
}))
|
|
752
|
+
})))
|
|
753
|
+
}) {}
|
|
754
|
+
/**
|
|
755
|
+
* A choice in a streaming chat completion chunk.
|
|
756
|
+
*
|
|
757
|
+
* @since 1.0.0
|
|
758
|
+
* @category Schemas
|
|
759
|
+
*/
|
|
760
|
+
declare class ChatCompletionChunkChoice extends Schema.Class<ChatCompletionChunkChoice>("@dedalus-labs/ChatCompletionChunkChoice")({
|
|
761
|
+
index: Schema.Int,
|
|
762
|
+
delta: ChatCompletionChunkDelta,
|
|
763
|
+
logprobs: Schema.optional(Schema.NullOr(exports_Generated.ChoiceLogprobs)),
|
|
764
|
+
finish_reason: Schema.NullOr(exports_Generated.ChoiceFinishReasonEnum)
|
|
765
|
+
}) {}
|
|
766
|
+
/**
|
|
767
|
+
* A streaming chat completion chunk (OpenAI-compatible).
|
|
768
|
+
*
|
|
769
|
+
* @since 1.0.0
|
|
770
|
+
* @category Schemas
|
|
771
|
+
*/
|
|
772
|
+
declare class ChatCompletionChunk extends Schema.Class<ChatCompletionChunk>("@dedalus-labs/ChatCompletionChunk")({
|
|
773
|
+
id: Schema.String,
|
|
774
|
+
object: Schema.Literal("chat.completion.chunk"),
|
|
775
|
+
created: Schema.Int,
|
|
776
|
+
model: Schema.String,
|
|
777
|
+
system_fingerprint: Schema.optional(Schema.NullOr(Schema.String)),
|
|
778
|
+
choices: Schema.Array(ChatCompletionChunkChoice),
|
|
779
|
+
usage: Schema.optional(Schema.NullOr(exports_Generated.CompletionUsage)),
|
|
780
|
+
service_tier: Schema.optional(Schema.NullOr(exports_Generated.ChatCompletionServiceTierEnum))
|
|
781
|
+
}) {}
|
|
782
|
+
declare namespace exports_DedalusConfig {
|
|
783
|
+
export { withClientTransform, DedalusConfig };
|
|
784
|
+
}
|
|
785
|
+
import { HttpClient as HttpClient3 } from "@effect/platform/HttpClient";
|
|
786
|
+
import * as Context2 from "effect/Context";
|
|
787
|
+
import * as Effect3 from "effect/Effect";
|
|
788
|
+
/**
|
|
789
|
+
* @since 1.0.0
|
|
790
|
+
* @category Context
|
|
791
|
+
*/
|
|
792
|
+
declare class DedalusConfig extends Context2.Tag("@dedalus-labs/DedalusConfig")<DedalusConfig, DedalusConfig.Service>() {
|
|
793
|
+
/**
|
|
794
|
+
* @since 1.0.0
|
|
795
|
+
*/
|
|
796
|
+
static readonly getOrUndefined: Effect3.Effect3<typeof DedalusConfig.Service | undefined>;
|
|
797
|
+
}
|
|
798
|
+
/**
|
|
799
|
+
* @since 1.0.0
|
|
800
|
+
*/
|
|
801
|
+
declare namespace DedalusConfig {
|
|
802
|
+
/**
|
|
803
|
+
* @since 1.0.
|
|
804
|
+
* @category Models
|
|
805
|
+
*/
|
|
806
|
+
interface Service {
|
|
807
|
+
readonly transformClient?: (client: HttpClient3) => HttpClient3;
|
|
808
|
+
}
|
|
809
|
+
}
|
|
810
|
+
/**
|
|
811
|
+
* @since 1.0.0
|
|
812
|
+
* @category Configuration
|
|
813
|
+
*/
|
|
814
|
+
declare const withClientTransform: {
|
|
815
|
+
(transform: (client: HttpClient3) => HttpClient3): <
|
|
816
|
+
A,
|
|
817
|
+
E,
|
|
818
|
+
R
|
|
819
|
+
>(self: Effect3.Effect3<A, E, R>) => Effect3.Effect3<A, E, R>
|
|
820
|
+
<
|
|
821
|
+
A,
|
|
822
|
+
E,
|
|
823
|
+
R
|
|
824
|
+
>(self: Effect3.Effect3<A, E, R>, transform: (client: HttpClient3) => HttpClient3): Effect3.Effect3<A, E, R>
|
|
825
|
+
};
|
|
826
|
+
declare namespace exports_DedalusEmbeddingModel {
|
|
827
|
+
export { withConfigOverride, model, makeDataLoader, layerDataLoader, layerBatched, Model, Config2 as Config };
|
|
828
|
+
}
|
|
829
|
+
import { Model as AiModel, EmbeddingModel } from "@luketandjung/ariadne";
|
|
830
|
+
import * as Context3 from "effect/Context";
|
|
831
|
+
import * as Duration from "effect/Duration";
|
|
832
|
+
import * as Effect4 from "effect/Effect";
|
|
833
|
+
import * as Layer2 from "effect/Layer";
|
|
834
|
+
import { Simplify } from "effect/Types";
|
|
835
|
+
/**
|
|
836
|
+
* @since 1.0.0
|
|
837
|
+
* @category Models
|
|
838
|
+
*/
|
|
839
|
+
type Model = typeof exports_Generated.CreateEmbeddingRequestModelEnum.Encoded;
|
|
840
|
+
/**
|
|
841
|
+
* @since 1.0.0
|
|
842
|
+
* @category Context
|
|
843
|
+
*/
|
|
844
|
+
declare class Config2 extends Context3.Tag("@dedalus-labs/DedalusEmbeddingModel/Config")<Config2, Config2.Service>() {
|
|
845
|
+
/**
|
|
846
|
+
* @since 1.0.0
|
|
847
|
+
*/
|
|
848
|
+
static readonly getOrUndefined: Effect4.Effect4<Config2.Service | undefined>;
|
|
849
|
+
}
|
|
850
|
+
/**
|
|
851
|
+
* @since 1.0.0
|
|
852
|
+
*/
|
|
853
|
+
declare namespace Config2 {
|
|
854
|
+
/**
|
|
855
|
+
* @since 1.0.
|
|
856
|
+
* @category Configuration
|
|
857
|
+
*/
|
|
858
|
+
interface Service extends Simplify<Partial<Omit<typeof exports_Generated.CreateEmbeddingRequest.Encoded, "input">>> {}
|
|
859
|
+
/**
|
|
860
|
+
* @since 1.0.
|
|
861
|
+
* @category Configuration
|
|
862
|
+
*/
|
|
863
|
+
interface Batched extends Omit<Config2.Service, "model"> {
|
|
864
|
+
readonly maxBatchSize?: number;
|
|
865
|
+
readonly cache?: {
|
|
866
|
+
readonly capacity: number
|
|
867
|
+
readonly timeToLive: Duration.DurationInput
|
|
868
|
+
};
|
|
869
|
+
}
|
|
870
|
+
/**
|
|
871
|
+
* @since 1.0.
|
|
872
|
+
* @category Configuration
|
|
873
|
+
*/
|
|
874
|
+
interface DataLoader extends Omit<Config2.Service, "model"> {
|
|
875
|
+
readonly window: Duration.DurationInput;
|
|
876
|
+
readonly maxBatchSize?: number;
|
|
877
|
+
}
|
|
878
|
+
}
|
|
879
|
+
/**
|
|
880
|
+
* @since 1.0.0
|
|
881
|
+
* @category Models
|
|
882
|
+
*/
|
|
883
|
+
declare const model: (model: (string & {}) | Model, { mode,...config }: Simplify<({
|
|
884
|
+
readonly mode: "batched"
|
|
885
|
+
} & Config2.Batched) | ({
|
|
886
|
+
readonly mode: "data-loader"
|
|
887
|
+
} & Config2.DataLoader)>) => AiModel.Model<"dedalus-labs", EmbeddingModel.EmbeddingModel, DedalusClient>;
|
|
888
|
+
/**
|
|
889
|
+
* @since 1.0.0
|
|
890
|
+
* @category Constructors
|
|
891
|
+
*/
|
|
892
|
+
declare const makeDataLoader: unknown;
|
|
893
|
+
/**
|
|
894
|
+
* @since 1.0.0
|
|
895
|
+
* @category Layers
|
|
896
|
+
*/
|
|
897
|
+
declare const layerBatched: (options: {
|
|
898
|
+
readonly model: (string & {}) | Model
|
|
899
|
+
readonly config?: Config2.Batched
|
|
900
|
+
}) => Layer2.Layer2<EmbeddingModel.EmbeddingModel, never, DedalusClient>;
|
|
901
|
+
/**
|
|
902
|
+
* @since 1.0.0
|
|
903
|
+
* @category Layers
|
|
904
|
+
*/
|
|
905
|
+
declare const layerDataLoader: (options: {
|
|
906
|
+
readonly model: (string & {}) | Model
|
|
907
|
+
readonly config: Config2.DataLoader
|
|
908
|
+
}) => Layer2.Layer2<EmbeddingModel.EmbeddingModel, never, DedalusClient>;
|
|
909
|
+
/**
|
|
910
|
+
* @since 1.0.0
|
|
911
|
+
* @category Configuration
|
|
912
|
+
*/
|
|
913
|
+
declare const withConfigOverride: {
|
|
914
|
+
(config: Config2.Service): <
|
|
915
|
+
A,
|
|
916
|
+
E,
|
|
917
|
+
R
|
|
918
|
+
>(self: Effect4.Effect4<A, E, R>) => Effect4.Effect4<A, E, R>
|
|
919
|
+
<
|
|
920
|
+
A,
|
|
921
|
+
E,
|
|
922
|
+
R
|
|
923
|
+
>(self: Effect4.Effect4<A, E, R>, config: Config2.Service): Effect4.Effect4<A, E, R>
|
|
924
|
+
};
|
|
925
|
+
declare namespace exports_DedalusLanguageModel {
|
|
926
|
+
export { withConfigOverride2 as withConfigOverride, model2 as model, make3 as make, layer2 as layer, Model2 as Model, Config3 as Config };
|
|
927
|
+
}
|
|
928
|
+
import { Model as AiModel2, LanguageModel } from "@luketandjung/ariadne";
|
|
929
|
+
import * as Context4 from "effect/Context";
|
|
930
|
+
import * as Effect5 from "effect/Effect";
|
|
931
|
+
import * as Layer3 from "effect/Layer";
|
|
932
|
+
import { Simplify as Simplify2 } from "effect/Types";
|
|
933
|
+
/**
|
|
934
|
+
* @since 1.0.0
|
|
935
|
+
* @category Models
|
|
936
|
+
*/
|
|
937
|
+
type Model2 = typeof exports_Generated.DedalusModelChoice.Encoded | typeof exports_Generated.Models.Encoded;
|
|
938
|
+
/**
|
|
939
|
+
* @since 1.0.0
|
|
940
|
+
* @category Context
|
|
941
|
+
*/
|
|
942
|
+
declare class Config3 extends Context4.Tag("@dedalus-labs/DedalusLanguageModel/Config")<Config3, Config3.Service>() {
|
|
943
|
+
/**
|
|
944
|
+
* @since 1.0.0
|
|
945
|
+
*/
|
|
946
|
+
static readonly getOrUndefined: Effect5.Effect5<Config3.Service | undefined>;
|
|
947
|
+
}
|
|
948
|
+
/**
|
|
949
|
+
* @since 1.0.0
|
|
950
|
+
*/
|
|
951
|
+
declare namespace Config3 {
|
|
952
|
+
/**
|
|
953
|
+
* @since 1.0.0
|
|
954
|
+
* @category Models
|
|
955
|
+
*/
|
|
956
|
+
interface Service extends Simplify2<Partial<Omit<typeof exports_Generated.ChatCompletionRequest.Encoded, "messages" | "input" | "instructions" | "system" | "tools" | "tool_choice" | "response_format" | "stream">>> {}
|
|
957
|
+
}
|
|
958
|
+
/**
|
|
959
|
+
* @since 1.0.0
|
|
960
|
+
* @category Ai Models
|
|
961
|
+
*/
|
|
962
|
+
declare const model2: (model2: (string & {}) | Model2, config?: Omit<Config3.Service, "model">) => AiModel2.Model2<"dedalus-labs", LanguageModel.LanguageModel, DedalusClient>;
|
|
963
|
+
/**
|
|
964
|
+
* @since 1.0.0
|
|
965
|
+
* @category Constructors
|
|
966
|
+
*/
|
|
967
|
+
declare const make3: unknown;
|
|
968
|
+
/**
|
|
969
|
+
* @since 1.0.0
|
|
970
|
+
* @category Layers
|
|
971
|
+
*/
|
|
972
|
+
declare const layer2: (options: {
|
|
973
|
+
readonly model2: (string & {}) | Model2
|
|
974
|
+
readonly config?: Omit<Config3.Service, "model">
|
|
975
|
+
}) => Layer3.Layer3<LanguageModel.LanguageModel, never, DedalusClient>;
|
|
976
|
+
/**
|
|
977
|
+
* @since 1.0.0
|
|
978
|
+
* @category Configuration
|
|
979
|
+
*/
|
|
980
|
+
declare const withConfigOverride2: {
|
|
981
|
+
(overrides: Config3.Service): <
|
|
982
|
+
A,
|
|
983
|
+
E,
|
|
984
|
+
R
|
|
985
|
+
>(self: Effect5.Effect5<A, E, R>) => Effect5.Effect5<A, E, R>
|
|
986
|
+
<
|
|
987
|
+
A,
|
|
988
|
+
E,
|
|
989
|
+
R
|
|
990
|
+
>(self: Effect5.Effect5<A, E, R>, overrides: Config3.Service): Effect5.Effect5<A, E, R>
|
|
991
|
+
};
|
|
992
|
+
export { exports_Generated as Generated, exports_DedalusLanguageModel as DedalusLanguageModel, exports_DedalusEmbeddingModel as DedalusEmbeddingModel, exports_DedalusConfig as DedalusConfig, exports_DedalusClient as DedalusClient };
|