@heungtae/codex-chat-bridge 0.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/src/main.rs ADDED
@@ -0,0 +1,1172 @@
1
+ use anyhow::Context;
2
+ use anyhow::Result;
3
+ use anyhow::anyhow;
4
+ use async_stream::stream;
5
+ use axum::Router;
6
+ use axum::body::Body;
7
+ use axum::body::Bytes;
8
+ use axum::extract::State;
9
+ use axum::http::HeaderMap;
10
+ use axum::http::HeaderName;
11
+ use axum::http::HeaderValue;
12
+ use axum::http::StatusCode;
13
+ use axum::http::header::CACHE_CONTROL;
14
+ use axum::http::header::CONTENT_TYPE;
15
+ use axum::response::IntoResponse;
16
+ use axum::response::Response;
17
+ use axum::routing::get;
18
+ use axum::routing::post;
19
+ use clap::Parser;
20
+ use futures::Stream;
21
+ use futures::StreamExt;
22
+ use reqwest::Client;
23
+ use serde::Deserialize;
24
+ use serde::Serialize;
25
+ use serde_json::Value;
26
+ use serde_json::json;
27
+ use std::collections::BTreeMap;
28
+ use std::fs::File;
29
+ use std::fs::{self};
30
+ use std::io::Write;
31
+ use std::path::Path;
32
+ use std::path::PathBuf;
33
+ use std::sync::Arc;
34
+ use tracing::info;
35
+ use tracing::warn;
36
+ use uuid::Uuid;
37
+
38
+ #[derive(Debug, Clone, Parser)]
39
+ #[command(
40
+ name = "codex-chat-bridge",
41
+ about = "Responses-to-Chat completions bridge"
42
+ )]
43
+ struct Args {
44
+ #[arg(long, value_name = "FILE", default_value = "conf.toml")]
45
+ config: PathBuf,
46
+
47
+ #[arg(long)]
48
+ host: Option<String>,
49
+
50
+ #[arg(long)]
51
+ port: Option<u16>,
52
+
53
+ #[arg(long)]
54
+ upstream_url: Option<String>,
55
+
56
+ #[arg(long)]
57
+ api_key_env: Option<String>,
58
+
59
+ #[arg(long, value_name = "FILE")]
60
+ server_info: Option<PathBuf>,
61
+
62
+ #[arg(long)]
63
+ http_shutdown: bool,
64
+ }
65
+
66
+ #[derive(Debug, Clone, Default, Deserialize)]
67
+ struct FileConfig {
68
+ host: Option<String>,
69
+ port: Option<u16>,
70
+ upstream_url: Option<String>,
71
+ api_key_env: Option<String>,
72
+ server_info: Option<PathBuf>,
73
+ http_shutdown: Option<bool>,
74
+ }
75
+
76
+ #[derive(Debug, Clone)]
77
+ struct ResolvedConfig {
78
+ host: String,
79
+ port: Option<u16>,
80
+ upstream_url: String,
81
+ api_key_env: String,
82
+ server_info: Option<PathBuf>,
83
+ http_shutdown: bool,
84
+ }
85
+
86
+ #[derive(Clone)]
87
+ struct AppState {
88
+ client: Client,
89
+ upstream_url: String,
90
+ api_key: String,
91
+ http_shutdown: bool,
92
+ }
93
+
94
+ #[derive(Serialize)]
95
+ struct ServerInfo {
96
+ port: u16,
97
+ pid: u32,
98
+ }
99
+
100
+ #[derive(Debug)]
101
+ struct BridgeRequest {
102
+ chat_request: Value,
103
+ response_id: String,
104
+ }
105
+
106
+ #[derive(Debug, Deserialize)]
107
+ struct ChatChunk {
108
+ #[allow(dead_code)]
109
+ id: Option<String>,
110
+ #[serde(default)]
111
+ choices: Vec<ChatChoice>,
112
+ #[serde(default)]
113
+ usage: Option<ChatUsage>,
114
+ }
115
+
116
+ #[derive(Debug, Deserialize)]
117
+ struct ChatChoice {
118
+ #[serde(default)]
119
+ delta: Option<ChatDelta>,
120
+ #[allow(dead_code)]
121
+ #[serde(default)]
122
+ finish_reason: Option<String>,
123
+ }
124
+
125
+ #[derive(Debug, Deserialize)]
126
+ struct ChatDelta {
127
+ #[serde(default)]
128
+ content: Option<String>,
129
+ #[serde(default)]
130
+ tool_calls: Option<Vec<ChatToolCallDelta>>,
131
+ }
132
+
133
+ #[derive(Debug, Deserialize)]
134
+ struct ChatToolCallDelta {
135
+ #[serde(default)]
136
+ index: Option<usize>,
137
+ #[serde(default)]
138
+ id: Option<String>,
139
+ #[serde(default)]
140
+ function: Option<ChatFunctionDelta>,
141
+ }
142
+
143
+ #[derive(Debug, Deserialize)]
144
+ struct ChatFunctionDelta {
145
+ #[serde(default)]
146
+ name: Option<String>,
147
+ #[serde(default)]
148
+ arguments: Option<String>,
149
+ }
150
+
151
+ #[derive(Debug, Deserialize, Clone)]
152
+ struct ChatUsage {
153
+ #[serde(default)]
154
+ prompt_tokens: i64,
155
+ #[serde(default)]
156
+ completion_tokens: i64,
157
+ #[serde(default)]
158
+ total_tokens: i64,
159
+ }
160
+
161
+ #[derive(Debug, Default)]
162
+ struct ToolCallAccumulator {
163
+ id: Option<String>,
164
+ name: Option<String>,
165
+ arguments: String,
166
+ }
167
+
168
+ #[derive(Debug, Default)]
169
+ struct StreamAccumulator {
170
+ assistant_text: String,
171
+ tool_calls: BTreeMap<usize, ToolCallAccumulator>,
172
+ usage: Option<ChatUsage>,
173
+ }
174
+
175
+ #[derive(Debug, Default)]
176
+ struct SseParser {
177
+ buffer: String,
178
+ current_data_lines: Vec<String>,
179
+ }
180
+
181
+ #[tokio::main]
182
+ async fn main() -> Result<()> {
183
+ tracing_subscriber::fmt()
184
+ .with_env_filter(
185
+ tracing_subscriber::EnvFilter::try_from_default_env()
186
+ .unwrap_or_else(|_| tracing_subscriber::EnvFilter::new("info")),
187
+ )
188
+ .init();
189
+
190
+ let args = Args::parse();
191
+ let file_config = load_file_config(&args.config)?;
192
+ let config = resolve_config(args, file_config);
193
+
194
+ let api_key = std::env::var(&config.api_key_env)
195
+ .ok()
196
+ .filter(|v| !v.trim().is_empty())
197
+ .ok_or_else(|| anyhow!("missing or empty env var: {}", config.api_key_env))?;
198
+
199
+ let client = Client::builder()
200
+ .build()
201
+ .context("building reqwest client")?;
202
+
203
+ let state = Arc::new(AppState {
204
+ client,
205
+ upstream_url: config.upstream_url.clone(),
206
+ api_key,
207
+ http_shutdown: config.http_shutdown,
208
+ });
209
+
210
+ let app = Router::new()
211
+ .route("/v1/responses", post(handle_responses))
212
+ .route("/healthz", get(healthz))
213
+ .route("/shutdown", get(shutdown))
214
+ .with_state(state);
215
+
216
+ let bind_addr = format!("{}:{}", config.host, config.port.unwrap_or(0));
217
+ let listener = tokio::net::TcpListener::bind(&bind_addr)
218
+ .await
219
+ .with_context(|| format!("binding {bind_addr}"))?;
220
+ let local_addr = listener.local_addr().context("reading local_addr")?;
221
+
222
+ if let Some(path) = config.server_info.as_ref() {
223
+ write_server_info(path, local_addr.port())?;
224
+ }
225
+
226
+ info!("codex-chat-bridge listening on {}", local_addr);
227
+ axum::serve(listener, app)
228
+ .await
229
+ .context("serving axum app")?;
230
+ Ok(())
231
+ }
232
+
233
+ fn load_file_config(path: &Path) -> Result<Option<FileConfig>> {
234
+ if !path.exists() {
235
+ return Ok(None);
236
+ }
237
+
238
+ let raw = fs::read_to_string(path)
239
+ .with_context(|| format!("reading config file {}", path.display()))?;
240
+ let parsed: FileConfig = toml::from_str(&raw)
241
+ .with_context(|| format!("parsing config file {}", path.display()))?;
242
+ info!("loaded config file {}", path.display());
243
+ Ok(Some(parsed))
244
+ }
245
+
246
+ fn resolve_config(args: Args, file_config: Option<FileConfig>) -> ResolvedConfig {
247
+ let file_config = file_config.unwrap_or_default();
248
+
249
+ ResolvedConfig {
250
+ host: args
251
+ .host
252
+ .or(file_config.host)
253
+ .unwrap_or_else(|| "127.0.0.1".to_string()),
254
+ port: args.port.or(file_config.port),
255
+ upstream_url: args
256
+ .upstream_url
257
+ .or(file_config.upstream_url)
258
+ .unwrap_or_else(|| "https://api.openai.com/v1/chat/completions".to_string()),
259
+ api_key_env: args
260
+ .api_key_env
261
+ .or(file_config.api_key_env)
262
+ .unwrap_or_else(|| "OPENAI_API_KEY".to_string()),
263
+ server_info: args.server_info.or(file_config.server_info),
264
+ http_shutdown: args.http_shutdown || file_config.http_shutdown.unwrap_or(false),
265
+ }
266
+ }
267
+
268
+ fn write_server_info(path: &Path, port: u16) -> Result<()> {
269
+ if let Some(parent) = path.parent()
270
+ && !parent.as_os_str().is_empty()
271
+ {
272
+ fs::create_dir_all(parent)?;
273
+ }
274
+
275
+ let info = ServerInfo {
276
+ port,
277
+ pid: std::process::id(),
278
+ };
279
+ let mut data = serde_json::to_string(&info)?;
280
+ data.push('\n');
281
+ let mut f = File::create(path)?;
282
+ f.write_all(data.as_bytes())?;
283
+ Ok(())
284
+ }
285
+
286
+ async fn healthz() -> impl IntoResponse {
287
+ (StatusCode::OK, "ok")
288
+ }
289
+
290
+ async fn shutdown(State(state): State<Arc<AppState>>) -> impl IntoResponse {
291
+ if !state.http_shutdown {
292
+ return (StatusCode::NOT_FOUND, "not found").into_response();
293
+ }
294
+
295
+ tokio::spawn(async {
296
+ tokio::time::sleep(std::time::Duration::from_millis(50)).await;
297
+ std::process::exit(0);
298
+ });
299
+
300
+ (StatusCode::OK, "shutting down").into_response()
301
+ }
302
+
303
+ async fn handle_responses(
304
+ State(state): State<Arc<AppState>>,
305
+ headers: HeaderMap,
306
+ body: String,
307
+ ) -> Response {
308
+ let request_value: Value = match serde_json::from_str(&body) {
309
+ Ok(v) => v,
310
+ Err(err) => {
311
+ return sse_error_response(
312
+ "invalid_request",
313
+ &format!("failed to parse request JSON: {err}"),
314
+ );
315
+ }
316
+ };
317
+
318
+ let bridge_request = match map_responses_to_chat_request(&request_value) {
319
+ Ok(v) => v,
320
+ Err(err) => return sse_error_response("invalid_request", &err.to_string()),
321
+ };
322
+
323
+ let mut upstream_request = state
324
+ .client
325
+ .post(&state.upstream_url)
326
+ .bearer_auth(&state.api_key)
327
+ .header(CONTENT_TYPE, "application/json")
328
+ .json(&bridge_request.chat_request);
329
+
330
+ for header_name in [
331
+ "openai-organization",
332
+ "openai-project",
333
+ "x-openai-subagent",
334
+ "x-codex-turn-state",
335
+ "x-codex-turn-metadata",
336
+ ] {
337
+ if let Some(value) = headers.get(header_name) {
338
+ upstream_request = upstream_request.header(header_name, value.clone());
339
+ }
340
+ }
341
+
342
+ let upstream_response = match upstream_request.send().await {
343
+ Ok(response) => response,
344
+ Err(err) => {
345
+ return sse_error_response(
346
+ "upstream_transport_error",
347
+ &format!("failed to call upstream chat endpoint: {err}"),
348
+ );
349
+ }
350
+ };
351
+
352
+ if !upstream_response.status().is_success() {
353
+ let status = upstream_response.status();
354
+ let body = upstream_response
355
+ .text()
356
+ .await
357
+ .unwrap_or_else(|_| "<failed to read error body>".to_string());
358
+ let message = format!("upstream returned {status}: {body}");
359
+ return sse_error_response("upstream_error", &message);
360
+ }
361
+
362
+ let response_stream =
363
+ translate_upstream_stream(upstream_response.bytes_stream(), bridge_request.response_id);
364
+
365
+ let body = Body::from_stream(response_stream);
366
+ (
367
+ StatusCode::OK,
368
+ [
369
+ (CONTENT_TYPE, HeaderValue::from_static("text/event-stream")),
370
+ (CACHE_CONTROL, HeaderValue::from_static("no-cache")),
371
+ (
372
+ HeaderName::from_static("x-accel-buffering"),
373
+ HeaderValue::from_static("no"),
374
+ ),
375
+ ],
376
+ body,
377
+ )
378
+ .into_response()
379
+ }
380
+
381
+ fn translate_upstream_stream<S>(
382
+ upstream_stream: S,
383
+ response_id: String,
384
+ ) -> impl Stream<Item = Result<Bytes, std::convert::Infallible>> + Send + 'static
385
+ where
386
+ S: Stream<Item = Result<Bytes, reqwest::Error>> + Send + 'static,
387
+ {
388
+ stream! {
389
+ let mut upstream_stream = Box::pin(upstream_stream);
390
+ let mut parser = SseParser::default();
391
+ let mut acc = StreamAccumulator::default();
392
+ let mut assistant_item_added = false;
393
+
394
+ yield Ok(sse_event(
395
+ "response.created",
396
+ &json!({
397
+ "type": "response.created",
398
+ "response": {
399
+ "id": response_id.clone(),
400
+ }
401
+ }),
402
+ ));
403
+
404
+ while let Some(chunk_result) = upstream_stream.next().await {
405
+ let chunk = match chunk_result {
406
+ Ok(chunk) => chunk,
407
+ Err(err) => {
408
+ yield Ok(sse_event(
409
+ "response.failed",
410
+ &json!({
411
+ "type": "response.failed",
412
+ "response": {
413
+ "id": response_id.clone(),
414
+ "error": {
415
+ "code": "upstream_stream_error",
416
+ "message": err.to_string(),
417
+ }
418
+ }
419
+ }),
420
+ ));
421
+ return;
422
+ }
423
+ };
424
+
425
+ let text = String::from_utf8_lossy(&chunk);
426
+ let events = parser.feed(&text);
427
+ for data in events {
428
+ if data == "[DONE]" {
429
+ continue;
430
+ }
431
+
432
+ match serde_json::from_str::<ChatChunk>(&data) {
433
+ Ok(chat_chunk) => {
434
+ if let Some(usage) = chat_chunk.usage.clone() {
435
+ acc.usage = Some(usage);
436
+ }
437
+
438
+ for choice in chat_chunk.choices {
439
+ if let Some(delta) = choice.delta {
440
+ if let Some(content) = delta.content
441
+ && !content.is_empty()
442
+ {
443
+ if !assistant_item_added {
444
+ yield Ok(sse_event(
445
+ "response.output_item.added",
446
+ &json!({
447
+ "type": "response.output_item.added",
448
+ "item": {
449
+ "type": "message",
450
+ "role": "assistant",
451
+ "content": [
452
+ {
453
+ "type": "output_text",
454
+ "text": "",
455
+ }
456
+ ]
457
+ }
458
+ }),
459
+ ));
460
+ assistant_item_added = true;
461
+ }
462
+ acc.assistant_text.push_str(&content);
463
+ yield Ok(sse_event(
464
+ "response.output_text.delta",
465
+ &json!({
466
+ "type": "response.output_text.delta",
467
+ "delta": content,
468
+ }),
469
+ ));
470
+ }
471
+
472
+ if let Some(tool_calls) = delta.tool_calls {
473
+ for tool_call in tool_calls {
474
+ let index = tool_call.index.unwrap_or(acc.tool_calls.len());
475
+ let entry = acc.tool_calls.entry(index).or_default();
476
+
477
+ if let Some(id) = tool_call.id {
478
+ entry.id = Some(id);
479
+ }
480
+
481
+ if let Some(function) = tool_call.function {
482
+ if let Some(name) = function.name {
483
+ entry.name = Some(name);
484
+ }
485
+ if let Some(arguments) = function.arguments {
486
+ entry.arguments.push_str(&arguments);
487
+ }
488
+ }
489
+ }
490
+ }
491
+ }
492
+ }
493
+ }
494
+ Err(err) => {
495
+ warn!("failed to decode upstream chat chunk: {err}");
496
+ }
497
+ }
498
+ }
499
+ }
500
+
501
+ if let Some(data) = parser.finish()
502
+ && data != "[DONE]"
503
+ {
504
+ warn!("bridge received trailing SSE payload: {data}");
505
+ }
506
+
507
+ if !acc.assistant_text.is_empty() {
508
+ yield Ok(sse_event(
509
+ "response.output_item.done",
510
+ &json!({
511
+ "type": "response.output_item.done",
512
+ "item": {
513
+ "type": "message",
514
+ "role": "assistant",
515
+ "content": [
516
+ {
517
+ "type": "output_text",
518
+ "text": acc.assistant_text,
519
+ }
520
+ ]
521
+ }
522
+ }),
523
+ ));
524
+ }
525
+
526
+ for (index, tool_call) in acc.tool_calls {
527
+ let call_id = tool_call
528
+ .id
529
+ .unwrap_or_else(|| format!("call_{}_{}", Uuid::now_v7(), index));
530
+ let name = tool_call
531
+ .name
532
+ .unwrap_or_else(|| "unknown_function".to_string());
533
+
534
+ yield Ok(sse_event(
535
+ "response.output_item.done",
536
+ &json!({
537
+ "type": "response.output_item.done",
538
+ "item": {
539
+ "type": "function_call",
540
+ "name": name,
541
+ "arguments": tool_call.arguments,
542
+ "call_id": call_id,
543
+ }
544
+ }),
545
+ ));
546
+ }
547
+
548
+ let usage_json = acc.usage.map(|usage| {
549
+ json!({
550
+ "input_tokens": usage.prompt_tokens,
551
+ "input_tokens_details": null,
552
+ "output_tokens": usage.completion_tokens,
553
+ "output_tokens_details": null,
554
+ "total_tokens": usage.total_tokens,
555
+ })
556
+ });
557
+
558
+ yield Ok(sse_event(
559
+ "response.completed",
560
+ &json!({
561
+ "type": "response.completed",
562
+ "response": {
563
+ "id": response_id.clone(),
564
+ "usage": usage_json,
565
+ }
566
+ }),
567
+ ));
568
+ }
569
+ }
570
+
571
+ fn sse_event(event_name: &str, payload: &Value) -> Bytes {
572
+ let json_payload = serde_json::to_string(payload).unwrap_or_else(|_| {
573
+ "{\"type\":\"response.failed\",\"response\":{\"error\":{\"message\":\"internal serialization error\"}}}".to_string()
574
+ });
575
+ Bytes::from(format!("event: {event_name}\ndata: {json_payload}\n\n"))
576
+ }
577
+
578
+ fn sse_error_response(code: &str, message: &str) -> Response {
579
+ let response_id = format!("resp_bridge_{}", Uuid::now_v7());
580
+ let mut body = Vec::new();
581
+ body.extend_from_slice(&sse_event(
582
+ "response.created",
583
+ &json!({
584
+ "type": "response.created",
585
+ "response": {
586
+ "id": response_id.clone(),
587
+ }
588
+ }),
589
+ ));
590
+ body.extend_from_slice(&sse_event(
591
+ "response.failed",
592
+ &json!({
593
+ "type": "response.failed",
594
+ "response": {
595
+ "id": response_id,
596
+ "error": {
597
+ "code": code,
598
+ "message": message,
599
+ }
600
+ }
601
+ }),
602
+ ));
603
+
604
+ (
605
+ StatusCode::OK,
606
+ [
607
+ (CONTENT_TYPE, HeaderValue::from_static("text/event-stream")),
608
+ (CACHE_CONTROL, HeaderValue::from_static("no-cache")),
609
+ ],
610
+ body,
611
+ )
612
+ .into_response()
613
+ }
614
+
615
+ fn map_responses_to_chat_request(request: &Value) -> Result<BridgeRequest> {
616
+ let model = request
617
+ .get("model")
618
+ .and_then(Value::as_str)
619
+ .ok_or_else(|| anyhow!("missing `model`"))?
620
+ .to_string();
621
+
622
+ let instructions = request
623
+ .get("instructions")
624
+ .and_then(Value::as_str)
625
+ .unwrap_or_default()
626
+ .to_string();
627
+
628
+ let input_items = request
629
+ .get("input")
630
+ .and_then(Value::as_array)
631
+ .ok_or_else(|| anyhow!("missing `input` array"))?;
632
+
633
+ let tools = request
634
+ .get("tools")
635
+ .and_then(Value::as_array)
636
+ .cloned()
637
+ .unwrap_or_default();
638
+
639
+ let tool_choice = request
640
+ .get("tool_choice")
641
+ .cloned()
642
+ .unwrap_or_else(|| Value::String("auto".to_string()));
643
+
644
+ let parallel_tool_calls = request
645
+ .get("parallel_tool_calls")
646
+ .and_then(Value::as_bool)
647
+ .unwrap_or(true);
648
+
649
+ let mut messages = Vec::new();
650
+
651
+ if !instructions.trim().is_empty() {
652
+ messages.push(json!({
653
+ "role": "system",
654
+ "content": instructions,
655
+ }));
656
+ }
657
+
658
+ for item in input_items {
659
+ let item_type = item.get("type").and_then(Value::as_str).unwrap_or_default();
660
+
661
+ match item_type {
662
+ "message" => {
663
+ let role = item.get("role").and_then(Value::as_str).unwrap_or("user");
664
+ let content = item
665
+ .get("content")
666
+ .and_then(Value::as_array)
667
+ .map(Vec::as_slice)
668
+ .map_or_else(String::new, flatten_content_items);
669
+
670
+ if !content.trim().is_empty() {
671
+ messages.push(json!({
672
+ "role": role,
673
+ "content": content,
674
+ }));
675
+ }
676
+ }
677
+ "function_call_output" => {
678
+ let call_id = item
679
+ .get("call_id")
680
+ .and_then(Value::as_str)
681
+ .unwrap_or_default();
682
+ let output_text = item
683
+ .get("output")
684
+ .map(function_output_to_text)
685
+ .unwrap_or_default();
686
+ messages.push(json!({
687
+ "role": "tool",
688
+ "tool_call_id": call_id,
689
+ "content": output_text,
690
+ }));
691
+ }
692
+ "custom_tool_call_output" => {
693
+ let call_id = item
694
+ .get("call_id")
695
+ .and_then(Value::as_str)
696
+ .unwrap_or_default();
697
+ let output_text = item
698
+ .get("output")
699
+ .and_then(Value::as_str)
700
+ .unwrap_or_default();
701
+ messages.push(json!({
702
+ "role": "tool",
703
+ "tool_call_id": call_id,
704
+ "content": output_text,
705
+ }));
706
+ }
707
+ "mcp_tool_call_output" => {
708
+ let call_id = item
709
+ .get("call_id")
710
+ .and_then(Value::as_str)
711
+ .unwrap_or_default();
712
+ let output_text = item
713
+ .get("result")
714
+ .map(|v| v.to_string())
715
+ .unwrap_or_default();
716
+ messages.push(json!({
717
+ "role": "tool",
718
+ "tool_call_id": call_id,
719
+ "content": output_text,
720
+ }));
721
+ }
722
+ _ => {
723
+ warn!("ignoring unsupported input item type: {item_type}");
724
+ }
725
+ }
726
+ }
727
+
728
+ let chat_tools = normalize_chat_tools(tools);
729
+ let chat_tool_choice = normalize_tool_choice(tool_choice);
730
+
731
+ let response_id = format!("resp_bridge_{}", Uuid::now_v7());
732
+
733
+ let mut chat_request = json!({
734
+ "model": model,
735
+ "messages": messages,
736
+ "stream": true,
737
+ "stream_options": { "include_usage": true },
738
+ "tools": chat_tools,
739
+ "tool_choice": chat_tool_choice,
740
+ "parallel_tool_calls": parallel_tool_calls,
741
+ });
742
+
743
+ if chat_request
744
+ .get("tools")
745
+ .and_then(Value::as_array)
746
+ .is_some_and(Vec::is_empty)
747
+ {
748
+ if let Some(obj) = chat_request.as_object_mut() {
749
+ obj.remove("tools");
750
+ obj.remove("tool_choice");
751
+ }
752
+ }
753
+
754
+ Ok(BridgeRequest {
755
+ chat_request,
756
+ response_id,
757
+ })
758
+ }
759
+
760
+ fn flatten_content_items(items: &[Value]) -> String {
761
+ let mut parts = Vec::new();
762
+ for item in items {
763
+ let item_type = item.get("type").and_then(Value::as_str).unwrap_or_default();
764
+ if matches!(item_type, "input_text" | "output_text")
765
+ && let Some(text) = item.get("text").and_then(Value::as_str)
766
+ && !text.is_empty()
767
+ {
768
+ parts.push(text.to_string());
769
+ }
770
+ }
771
+
772
+ parts.join("\n")
773
+ }
774
+
775
+ fn function_output_to_text(value: &Value) -> String {
776
+ match value {
777
+ Value::String(s) => s.clone(),
778
+ Value::Array(items) => flatten_content_items(items),
779
+ other => other.to_string(),
780
+ }
781
+ }
782
+
783
+ fn normalize_chat_tools(tools: Vec<Value>) -> Vec<Value> {
784
+ tools
785
+ .into_iter()
786
+ .filter_map(|tool| {
787
+ if tool.get("type").and_then(Value::as_str) != Some("function") {
788
+ return Some(tool);
789
+ }
790
+
791
+ if tool.get("function").is_some() {
792
+ return Some(tool);
793
+ }
794
+
795
+ let name = tool.get("name")?.as_str()?.to_string();
796
+ let description = tool
797
+ .get("description")
798
+ .and_then(Value::as_str)
799
+ .unwrap_or_default()
800
+ .to_string();
801
+ let parameters = tool
802
+ .get("parameters")
803
+ .cloned()
804
+ .unwrap_or_else(|| json!({"type": "object", "properties": {}}));
805
+
806
+ Some(json!({
807
+ "type": "function",
808
+ "function": {
809
+ "name": name,
810
+ "description": description,
811
+ "parameters": parameters,
812
+ }
813
+ }))
814
+ })
815
+ .collect()
816
+ }
817
+
818
+ fn normalize_tool_choice(tool_choice: Value) -> Value {
819
+ if let Some(s) = tool_choice.as_str() {
820
+ return Value::String(s.to_string());
821
+ }
822
+
823
+ let Some(obj) = tool_choice.as_object() else {
824
+ return Value::String("auto".to_string());
825
+ };
826
+
827
+ if obj.get("function").is_some() {
828
+ return tool_choice;
829
+ }
830
+
831
+ if obj.get("type").and_then(Value::as_str) == Some("function")
832
+ && let Some(name) = obj.get("name").and_then(Value::as_str)
833
+ {
834
+ return json!({
835
+ "type": "function",
836
+ "function": {
837
+ "name": name,
838
+ }
839
+ });
840
+ }
841
+
842
+ Value::String("auto".to_string())
843
+ }
844
+
845
+ impl SseParser {
846
+ fn feed(&mut self, chunk: &str) -> Vec<String> {
847
+ self.buffer.push_str(chunk);
848
+ let mut events = Vec::new();
849
+
850
+ while let Some(pos) = self.buffer.find('\n') {
851
+ let mut line = self.buffer[..pos].to_string();
852
+ self.buffer.drain(..=pos);
853
+
854
+ if line.ends_with('\r') {
855
+ line.pop();
856
+ }
857
+
858
+ if line.is_empty() {
859
+ if !self.current_data_lines.is_empty() {
860
+ events.push(self.current_data_lines.join("\n"));
861
+ self.current_data_lines.clear();
862
+ }
863
+ continue;
864
+ }
865
+
866
+ if let Some(rest) = line.strip_prefix("data:") {
867
+ let data = rest.strip_prefix(' ').unwrap_or(rest).to_string();
868
+ self.current_data_lines.push(data);
869
+ }
870
+ }
871
+
872
+ events
873
+ }
874
+
875
+ fn finish(&mut self) -> Option<String> {
876
+ if self.current_data_lines.is_empty() {
877
+ None
878
+ } else {
879
+ Some(self.current_data_lines.join("\n"))
880
+ }
881
+ }
882
+ }
883
+
884
+ #[cfg(test)]
885
+ mod tests {
886
+ use super::*;
887
+ use futures::stream;
888
+
889
+ #[test]
890
+ fn maps_responses_request_to_chat_request_with_function_tool() {
891
+ let input = json!({
892
+ "model": "gpt-4.1",
893
+ "instructions": "You are helpful",
894
+ "input": [
895
+ {
896
+ "type": "message",
897
+ "role": "user",
898
+ "content": [{"type": "input_text", "text": "hello"}]
899
+ }
900
+ ],
901
+ "tools": [
902
+ {
903
+ "type": "function",
904
+ "name": "get_weather",
905
+ "description": "Get weather",
906
+ "parameters": {"type":"object","properties":{"city":{"type":"string"}}}
907
+ }
908
+ ],
909
+ "tool_choice": "auto",
910
+ "parallel_tool_calls": true
911
+ });
912
+
913
+ let req = map_responses_to_chat_request(&input).expect("should map");
914
+ let messages = req
915
+ .chat_request
916
+ .get("messages")
917
+ .and_then(Value::as_array)
918
+ .expect("messages array");
919
+ assert_eq!(messages.len(), 2);
920
+
921
+ let tools = req
922
+ .chat_request
923
+ .get("tools")
924
+ .and_then(Value::as_array)
925
+ .expect("tools array");
926
+ assert_eq!(
927
+ tools[0]
928
+ .get("function")
929
+ .and_then(Value::as_object)
930
+ .is_some(),
931
+ true
932
+ );
933
+ }
934
+
935
+ #[test]
936
+ fn sse_parser_collects_data_events() {
937
+ let mut parser = SseParser::default();
938
+ let chunk = "event: message\ndata: {\"a\":1}\n\n";
939
+ let events = parser.feed(chunk);
940
+ assert_eq!(events.len(), 1);
941
+ assert_eq!(events[0], "{\"a\":1}");
942
+ }
943
+
944
+ #[test]
945
+ fn normalize_tool_choice_wraps_function_name() {
946
+ let choice = json!({"type":"function", "name":"f"});
947
+ let normalized = normalize_tool_choice(choice);
948
+ assert_eq!(
949
+ normalized,
950
+ json!({"type":"function", "function": {"name":"f"}})
951
+ );
952
+ }
953
+
954
+ #[test]
955
+ fn function_output_text_handles_array_items() {
956
+ let value = json!([
957
+ {"type": "input_text", "text": "line1"},
958
+ {"type": "output_text", "text": "line2"}
959
+ ]);
960
+ assert_eq!(function_output_to_text(&value), "line1\nline2");
961
+ }
962
+
963
+ #[test]
964
+ fn normalize_chat_tools_passes_non_function_tool() {
965
+ let tools = vec![json!({"type": "web_search_preview"})];
966
+ let out = normalize_chat_tools(tools);
967
+ assert_eq!(out, vec![json!({"type": "web_search_preview"})]);
968
+ }
969
+
970
+ #[test]
971
+ fn flatten_content_items_filters_non_text() {
972
+ let items = vec![
973
+ json!({"type":"input_text","text":"a"}),
974
+ json!({"type":"input_image","image_url":"x"}),
975
+ json!({"type":"output_text","text":"b"}),
976
+ ];
977
+ assert_eq!(flatten_content_items(&items), "a\nb");
978
+ }
979
+
980
+ #[test]
981
+ fn map_supports_function_call_output_to_tool_message() {
982
+ let input = json!({
983
+ "model": "gpt-4.1",
984
+ "input": [
985
+ {
986
+ "type": "function_call_output",
987
+ "call_id": "call_1",
988
+ "output": "{\"ok\":true}"
989
+ }
990
+ ],
991
+ "tools": []
992
+ });
993
+
994
+ let req = map_responses_to_chat_request(&input).expect("should map");
995
+ let messages = req
996
+ .chat_request
997
+ .get("messages")
998
+ .and_then(Value::as_array)
999
+ .expect("messages");
1000
+ assert_eq!(messages.len(), 1);
1001
+ assert_eq!(messages[0]["role"], "tool");
1002
+ assert_eq!(messages[0]["tool_call_id"], "call_1");
1003
+ }
1004
+
1005
+ #[test]
1006
+ fn map_defaults_tool_choice_when_invalid() {
1007
+ let input = json!({
1008
+ "model": "gpt-4.1",
1009
+ "input": [{"type":"message","role":"user","content":[{"type":"input_text","text":"hi"}]}],
1010
+ "tools": [{"type":"function","name":"f","parameters":{"type":"object"}}],
1011
+ "tool_choice": 123
1012
+ });
1013
+
1014
+ let req = map_responses_to_chat_request(&input).expect("should map");
1015
+ assert_eq!(req.chat_request["tool_choice"], "auto");
1016
+ }
1017
+
1018
+ #[test]
1019
+ fn map_requires_input_array() {
1020
+ let input = json!({"model":"gpt-4.1"});
1021
+ let err = map_responses_to_chat_request(&input).expect_err("must fail");
1022
+ assert!(err.to_string().contains("missing `input` array"));
1023
+ }
1024
+
1025
+ #[test]
1026
+ fn parser_handles_split_chunks() {
1027
+ let mut parser = SseParser::default();
1028
+ let first = parser.feed("data: {\"a\":");
1029
+ assert!(first.is_empty());
1030
+ let second = parser.feed("1}\n\n");
1031
+ assert_eq!(second, vec!["{\"a\":1}".to_string()]);
1032
+ }
1033
+
1034
+ #[test]
1035
+ fn map_removes_tool_fields_when_tools_empty() {
1036
+ let input = json!({
1037
+ "model": "gpt-4.1",
1038
+ "input": [{"type":"message","role":"user","content":[{"type":"input_text","text":"hi"}]}],
1039
+ "tools": []
1040
+ });
1041
+ let req = map_responses_to_chat_request(&input).expect("ok");
1042
+ let obj = req.chat_request.as_object().expect("object");
1043
+ assert!(!obj.contains_key("tools"));
1044
+ assert!(!obj.contains_key("tool_choice"));
1045
+ }
1046
+
1047
+ #[test]
1048
+ fn sse_error_response_contains_failed_event() {
1049
+ let response = sse_error_response("x", "y");
1050
+ assert_eq!(response.status(), StatusCode::OK);
1051
+ }
1052
+
1053
+ #[test]
1054
+ fn parser_ignores_non_data_lines() {
1055
+ let mut parser = SseParser::default();
1056
+ let out = parser.feed(": ping\nevent: hello\ndata: {\"z\":1}\n\n");
1057
+ assert_eq!(out, vec!["{\"z\":1}".to_string()]);
1058
+ }
1059
+
1060
+ #[test]
1061
+ fn normalize_chat_tools_keeps_function_already_wrapped() {
1062
+ let tools = vec![json!({
1063
+ "type": "function",
1064
+ "function": {"name":"f", "parameters": {"type":"object"}}
1065
+ })];
1066
+ let out = normalize_chat_tools(tools.clone());
1067
+ assert_eq!(out, tools);
1068
+ }
1069
+
1070
+ #[test]
1071
+ fn normalize_tool_choice_preserves_wrapped_choice() {
1072
+ let choice = json!({"type":"function", "function":{"name":"do_it"}});
1073
+ assert_eq!(normalize_tool_choice(choice.clone()), choice);
1074
+ }
1075
+
1076
+ #[test]
1077
+ fn function_output_to_text_for_object_uses_json() {
1078
+ let value = json!({"ok": true});
1079
+ assert_eq!(function_output_to_text(&value), "{\"ok\":true}");
1080
+ }
1081
+
1082
+ #[test]
1083
+ fn map_includes_system_message_from_instructions() {
1084
+ let input = json!({
1085
+ "model": "gpt-4.1",
1086
+ "instructions": "sys",
1087
+ "input": [{"type":"message","role":"user","content":[{"type":"input_text","text":"hi"}]}],
1088
+ "tools": []
1089
+ });
1090
+ let req = map_responses_to_chat_request(&input).expect("ok");
1091
+ let messages = req.chat_request["messages"].as_array().expect("array");
1092
+ assert_eq!(messages[0]["role"], "system");
1093
+ }
1094
+
1095
+ #[tokio::test]
1096
+ async fn stream_emits_output_item_added_before_text_delta() {
1097
+ let upstream = stream::iter(vec![Ok::<Bytes, reqwest::Error>(Bytes::from(
1098
+ "data: {\"choices\":[{\"delta\":{\"content\":\"Hi\"}}]}\n\n\
1099
+ data: [DONE]\n\n",
1100
+ ))]);
1101
+ let mut output = Box::pin(translate_upstream_stream(upstream, "resp_1".to_string()));
1102
+ let mut payload = String::new();
1103
+
1104
+ while let Some(event) = output.next().await {
1105
+ payload.push_str(&String::from_utf8_lossy(&event.expect("stream event")));
1106
+ }
1107
+
1108
+ let added_idx = payload
1109
+ .find("event: response.output_item.added")
1110
+ .expect("added event");
1111
+ let delta_idx = payload
1112
+ .find("event: response.output_text.delta")
1113
+ .expect("delta event");
1114
+ assert!(added_idx < delta_idx);
1115
+ }
1116
+
1117
+ #[test]
1118
+ fn resolve_config_prefers_cli_over_file_and_defaults() {
1119
+ let args = Args {
1120
+ config: PathBuf::from("conf.toml"),
1121
+ host: Some("0.0.0.0".to_string()),
1122
+ port: Some(9999),
1123
+ upstream_url: None,
1124
+ api_key_env: Some("CLI_API_KEY".to_string()),
1125
+ server_info: None,
1126
+ http_shutdown: true,
1127
+ };
1128
+ let file = FileConfig {
1129
+ host: Some("127.0.0.1".to_string()),
1130
+ port: Some(8787),
1131
+ upstream_url: Some("https://example.com/v1/chat/completions".to_string()),
1132
+ api_key_env: Some("FILE_API_KEY".to_string()),
1133
+ server_info: Some(PathBuf::from("/tmp/server.json")),
1134
+ http_shutdown: Some(false),
1135
+ };
1136
+
1137
+ let resolved = resolve_config(args, Some(file));
1138
+ assert_eq!(resolved.host, "0.0.0.0");
1139
+ assert_eq!(resolved.port, Some(9999));
1140
+ assert_eq!(
1141
+ resolved.upstream_url,
1142
+ "https://example.com/v1/chat/completions"
1143
+ );
1144
+ assert_eq!(resolved.api_key_env, "CLI_API_KEY");
1145
+ assert_eq!(resolved.server_info, Some(PathBuf::from("/tmp/server.json")));
1146
+ assert!(resolved.http_shutdown);
1147
+ }
1148
+
1149
+ #[test]
1150
+ fn resolve_config_uses_defaults_when_missing() {
1151
+ let args = Args {
1152
+ config: PathBuf::from("conf.toml"),
1153
+ host: None,
1154
+ port: None,
1155
+ upstream_url: None,
1156
+ api_key_env: None,
1157
+ server_info: None,
1158
+ http_shutdown: false,
1159
+ };
1160
+
1161
+ let resolved = resolve_config(args, None);
1162
+ assert_eq!(resolved.host, "127.0.0.1");
1163
+ assert_eq!(resolved.port, None);
1164
+ assert_eq!(
1165
+ resolved.upstream_url,
1166
+ "https://api.openai.com/v1/chat/completions"
1167
+ );
1168
+ assert_eq!(resolved.api_key_env, "OPENAI_API_KEY");
1169
+ assert_eq!(resolved.server_info, None);
1170
+ assert!(!resolved.http_shutdown);
1171
+ }
1172
+ }