@heungtae/codex-chat-bridge 0.1.0 → 0.1.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +10 -5
- package/USAGE.md +16 -4
- package/package.json +2 -3
- package/scripts/run_codex_with_bridge.sh +2 -4
- package/src/main.rs +60 -6
- package/conf.toml +0 -10
package/README.md
CHANGED
|
@@ -14,11 +14,10 @@ Node.js 20+ and Rust/Cargo are required because npm installation compiles the Ru
|
|
|
14
14
|
npm install @heungtae/codex-chat-bridge
|
|
15
15
|
```
|
|
16
16
|
|
|
17
|
-
|
|
17
|
+
Install globally if you want the `codex-chat-bridge` command on PATH:
|
|
18
18
|
|
|
19
19
|
```bash
|
|
20
|
-
npm
|
|
21
|
-
npm install @heungtae/codex-chat-bridge --registry <private-registry>
|
|
20
|
+
npm install -g @heungtae/codex-chat-bridge
|
|
22
21
|
```
|
|
23
22
|
|
|
24
23
|
## What it does
|
|
@@ -27,7 +26,7 @@ npm install @heungtae/codex-chat-bridge --registry <private-registry>
|
|
|
27
26
|
- Translates request payload into `POST /v1/chat/completions`
|
|
28
27
|
- Streams upstream Chat Completions chunks back as Responses-style SSE events:
|
|
29
28
|
- `response.created`
|
|
30
|
-
- `response.output_item.added` (assistant
|
|
29
|
+
- `response.output_item.added` (assistant text starts; only emitted when text delta exists)
|
|
31
30
|
- `response.output_text.delta`
|
|
32
31
|
- `response.output_item.done` (assistant message and function calls)
|
|
33
32
|
- `response.completed`
|
|
@@ -39,7 +38,8 @@ npm install @heungtae/codex-chat-bridge --registry <private-registry>
|
|
|
39
38
|
npx @heungtae/codex-chat-bridge --port 8787 --api-key-env OPENAI_API_KEY
|
|
40
39
|
```
|
|
41
40
|
|
|
42
|
-
By default, the bridge
|
|
41
|
+
By default, the bridge uses `~/.config/codex-chat-bridge/conf.toml`.
|
|
42
|
+
If the file does not exist, it is created automatically with commented defaults.
|
|
43
43
|
CLI flags override file values.
|
|
44
44
|
|
|
45
45
|
Or run the binary directly via Cargo:
|
|
@@ -73,6 +73,11 @@ Use `scripts/run_codex_with_bridge.sh` to run the bridge and `codex exec` togeth
|
|
|
73
73
|
scripts/run_codex_with_bridge.sh "Summarize this repo."
|
|
74
74
|
```
|
|
75
75
|
|
|
76
|
+
Defaults:
|
|
77
|
+
- `API_KEY_ENV=OPENAI_API_KEY`
|
|
78
|
+
- `UPSTREAM_URL=https://api.openai.com/v1/chat/completions`
|
|
79
|
+
- The script does not force `model`; pass it as extra args when needed (for example: `--model gpt-4.1`).
|
|
80
|
+
|
|
76
81
|
## Package Scripts
|
|
77
82
|
|
|
78
83
|
```bash
|
package/USAGE.md
CHANGED
|
@@ -9,7 +9,13 @@ npm install @heungtae/codex-chat-bridge
|
|
|
9
9
|
npx @heungtae/codex-chat-bridge --help
|
|
10
10
|
```
|
|
11
11
|
|
|
12
|
-
|
|
12
|
+
공개 npm 배포(현재 기본값):
|
|
13
|
+
|
|
14
|
+
```bash
|
|
15
|
+
npm publish --access public
|
|
16
|
+
```
|
|
17
|
+
|
|
18
|
+
private registry 배포(선택):
|
|
13
19
|
|
|
14
20
|
```bash
|
|
15
21
|
npm publish --registry <private-registry> --access restricted
|
|
@@ -60,8 +66,9 @@ npx @heungtae/codex-chat-bridge -- \
|
|
|
60
66
|
--api-key-env OPENAI_API_KEY
|
|
61
67
|
```
|
|
62
68
|
|
|
63
|
-
기본적으로
|
|
64
|
-
|
|
69
|
+
기본적으로 `~/.config/codex-chat-bridge/conf.toml`을 사용합니다.
|
|
70
|
+
파일이 없으면 기본 템플릿으로 자동 생성됩니다.
|
|
71
|
+
우선순위는 `CLI 옵션 > 설정 파일 > 내장 기본값`입니다.
|
|
65
72
|
|
|
66
73
|
또는 Cargo 직접 실행:
|
|
67
74
|
|
|
@@ -78,7 +85,7 @@ npx @heungtae/codex-chat-bridge --config /path/to/conf.toml
|
|
|
78
85
|
```
|
|
79
86
|
|
|
80
87
|
옵션 설명:
|
|
81
|
-
- `--config <FILE>`: 설정 파일 경로 (기본값:
|
|
88
|
+
- `--config <FILE>`: 설정 파일 경로 (기본값: `~/.config/codex-chat-bridge/conf.toml`)
|
|
82
89
|
- `--port`: 브리지 포트 (기본: 랜덤 포트)
|
|
83
90
|
- `--api-key-env`: 업스트림 호출에 쓸 API 키 환경변수 이름
|
|
84
91
|
- `--upstream-url`: 기본값 `https://api.openai.com/v1/chat/completions`
|
|
@@ -133,6 +140,11 @@ codex exec '간단한 테스트를 해줘'
|
|
|
133
140
|
scripts/run_codex_with_bridge.sh "이 저장소 구조를 설명해줘"
|
|
134
141
|
```
|
|
135
142
|
|
|
143
|
+
기본값:
|
|
144
|
+
- `API_KEY_ENV=OPENAI_API_KEY`
|
|
145
|
+
- `UPSTREAM_URL=https://api.openai.com/v1/chat/completions`
|
|
146
|
+
- 래퍼 스크립트는 `model`을 강제하지 않음 (필요 시 추가 인자로 전달)
|
|
147
|
+
|
|
136
148
|
추가 인자 전달:
|
|
137
149
|
|
|
138
150
|
```bash
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@heungtae/codex-chat-bridge",
|
|
3
|
-
"version": "0.1.
|
|
3
|
+
"version": "0.1.2",
|
|
4
4
|
"description": "Responses-to-chat/completions bridge for Codex workflows",
|
|
5
5
|
"license": "Apache-2.0",
|
|
6
6
|
"type": "commonjs",
|
|
@@ -11,13 +11,12 @@
|
|
|
11
11
|
"node": ">=20"
|
|
12
12
|
},
|
|
13
13
|
"publishConfig": {
|
|
14
|
-
"access": "
|
|
14
|
+
"access": "public"
|
|
15
15
|
},
|
|
16
16
|
"files": [
|
|
17
17
|
"bin/",
|
|
18
18
|
"scripts/",
|
|
19
19
|
"src/",
|
|
20
|
-
"conf.toml",
|
|
21
20
|
"Cargo.toml",
|
|
22
21
|
"Cargo.lock",
|
|
23
22
|
"README.md",
|
|
@@ -9,8 +9,8 @@ fi
|
|
|
9
9
|
PROMPT="$1"
|
|
10
10
|
shift || true
|
|
11
11
|
|
|
12
|
-
API_KEY_ENV="${API_KEY_ENV:-
|
|
13
|
-
UPSTREAM_URL="${UPSTREAM_URL:-https://
|
|
12
|
+
API_KEY_ENV="${API_KEY_ENV:-OPENAI_API_KEY}"
|
|
13
|
+
UPSTREAM_URL="${UPSTREAM_URL:-https://api.openai.com/v1/chat/completions}"
|
|
14
14
|
BRIDGE_PORT="${BRIDGE_PORT:-8787}"
|
|
15
15
|
SERVER_INFO="${SERVER_INFO:-/tmp/codex-chat-bridge-info.json}"
|
|
16
16
|
CODEX_BRIDGE_RUST_LOG="${CODEX_BRIDGE_RUST_LOG:-${RUST_LOG:-info,codex_core::rollout::list=off}}"
|
|
@@ -42,6 +42,4 @@ done
|
|
|
42
42
|
RUST_LOG="${CODEX_BRIDGE_RUST_LOG}" codex exec \
|
|
43
43
|
-c "model_providers.chat-bridge={name='Chat Bridge',base_url='http://127.0.0.1:${BRIDGE_PORT}/v1',env_key='${API_KEY_ENV}',wire_api='responses'}" \
|
|
44
44
|
-c 'model_provider="chat-bridge"' \
|
|
45
|
-
-c 'model="arcee-ai/trinity-large-preview:free"' \
|
|
46
|
-
-c 'web_search="disabled"' \
|
|
47
45
|
"$PROMPT" "$@"
|
package/src/main.rs
CHANGED
|
@@ -38,11 +38,16 @@ use uuid::Uuid;
|
|
|
38
38
|
#[derive(Debug, Clone, Parser)]
|
|
39
39
|
#[command(
|
|
40
40
|
name = "codex-chat-bridge",
|
|
41
|
-
about = "Responses-to-Chat completions bridge"
|
|
41
|
+
about = "Responses-to-Chat completions bridge",
|
|
42
|
+
version
|
|
42
43
|
)]
|
|
43
44
|
struct Args {
|
|
44
|
-
#[arg(
|
|
45
|
-
|
|
45
|
+
#[arg(
|
|
46
|
+
long,
|
|
47
|
+
value_name = "FILE",
|
|
48
|
+
help = "config file path (default: ~/.config/codex-chat-bridge/conf.toml)"
|
|
49
|
+
)]
|
|
50
|
+
config: Option<PathBuf>,
|
|
46
51
|
|
|
47
52
|
#[arg(long)]
|
|
48
53
|
host: Option<String>,
|
|
@@ -83,6 +88,18 @@ struct ResolvedConfig {
|
|
|
83
88
|
http_shutdown: bool,
|
|
84
89
|
}
|
|
85
90
|
|
|
91
|
+
const DEFAULT_CONFIG_TEMPLATE: &str = r#"# codex-chat-bridge runtime configuration
|
|
92
|
+
#
|
|
93
|
+
# Priority: CLI flags > config file > built-in defaults
|
|
94
|
+
|
|
95
|
+
# host = "127.0.0.1"
|
|
96
|
+
# port = 8787
|
|
97
|
+
# upstream_url = "https://api.openai.com/v1/chat/completions"
|
|
98
|
+
# api_key_env = "OPENAI_API_KEY"
|
|
99
|
+
# server_info = "/tmp/codex-chat-bridge-info.json"
|
|
100
|
+
# http_shutdown = false
|
|
101
|
+
"#;
|
|
102
|
+
|
|
86
103
|
#[derive(Clone)]
|
|
87
104
|
struct AppState {
|
|
88
105
|
client: Client,
|
|
@@ -188,7 +205,9 @@ async fn main() -> Result<()> {
|
|
|
188
205
|
.init();
|
|
189
206
|
|
|
190
207
|
let args = Args::parse();
|
|
191
|
-
let
|
|
208
|
+
let config_path = resolve_config_path(args.config.clone())?;
|
|
209
|
+
ensure_default_config_file(&config_path)?;
|
|
210
|
+
let file_config = load_file_config(&config_path)?;
|
|
192
211
|
let config = resolve_config(args, file_config);
|
|
193
212
|
|
|
194
213
|
let api_key = std::env::var(&config.api_key_env)
|
|
@@ -243,6 +262,35 @@ fn load_file_config(path: &Path) -> Result<Option<FileConfig>> {
|
|
|
243
262
|
Ok(Some(parsed))
|
|
244
263
|
}
|
|
245
264
|
|
|
265
|
+
fn resolve_config_path(cli_path: Option<PathBuf>) -> Result<PathBuf> {
|
|
266
|
+
if let Some(path) = cli_path {
|
|
267
|
+
return Ok(path);
|
|
268
|
+
}
|
|
269
|
+
|
|
270
|
+
let home = std::env::var_os("HOME")
|
|
271
|
+
.ok_or_else(|| anyhow!("HOME environment variable is not set"))?;
|
|
272
|
+
Ok(PathBuf::from(home)
|
|
273
|
+
.join(".config")
|
|
274
|
+
.join("codex-chat-bridge")
|
|
275
|
+
.join("conf.toml"))
|
|
276
|
+
}
|
|
277
|
+
|
|
278
|
+
fn ensure_default_config_file(path: &Path) -> Result<()> {
|
|
279
|
+
if path.exists() {
|
|
280
|
+
return Ok(());
|
|
281
|
+
}
|
|
282
|
+
|
|
283
|
+
if let Some(parent) = path.parent() {
|
|
284
|
+
fs::create_dir_all(parent)
|
|
285
|
+
.with_context(|| format!("creating config directory {}", parent.display()))?;
|
|
286
|
+
}
|
|
287
|
+
|
|
288
|
+
fs::write(path, DEFAULT_CONFIG_TEMPLATE)
|
|
289
|
+
.with_context(|| format!("creating default config file {}", path.display()))?;
|
|
290
|
+
info!("created default config file {}", path.display());
|
|
291
|
+
Ok(())
|
|
292
|
+
}
|
|
293
|
+
|
|
246
294
|
fn resolve_config(args: Args, file_config: Option<FileConfig>) -> ResolvedConfig {
|
|
247
295
|
let file_config = file_config.unwrap_or_default();
|
|
248
296
|
|
|
@@ -1117,7 +1165,7 @@ mod tests {
|
|
|
1117
1165
|
#[test]
|
|
1118
1166
|
fn resolve_config_prefers_cli_over_file_and_defaults() {
|
|
1119
1167
|
let args = Args {
|
|
1120
|
-
config:
|
|
1168
|
+
config: None,
|
|
1121
1169
|
host: Some("0.0.0.0".to_string()),
|
|
1122
1170
|
port: Some(9999),
|
|
1123
1171
|
upstream_url: None,
|
|
@@ -1149,7 +1197,7 @@ mod tests {
|
|
|
1149
1197
|
#[test]
|
|
1150
1198
|
fn resolve_config_uses_defaults_when_missing() {
|
|
1151
1199
|
let args = Args {
|
|
1152
|
-
config:
|
|
1200
|
+
config: None,
|
|
1153
1201
|
host: None,
|
|
1154
1202
|
port: None,
|
|
1155
1203
|
upstream_url: None,
|
|
@@ -1169,4 +1217,10 @@ mod tests {
|
|
|
1169
1217
|
assert_eq!(resolved.server_info, None);
|
|
1170
1218
|
assert!(!resolved.http_shutdown);
|
|
1171
1219
|
}
|
|
1220
|
+
|
|
1221
|
+
#[test]
|
|
1222
|
+
fn resolve_config_path_prefers_cli_value() {
|
|
1223
|
+
let path = resolve_config_path(Some(PathBuf::from("/tmp/custom.toml"))).expect("ok");
|
|
1224
|
+
assert_eq!(path, PathBuf::from("/tmp/custom.toml"));
|
|
1225
|
+
}
|
|
1172
1226
|
}
|
package/conf.toml
DELETED
|
@@ -1,10 +0,0 @@
|
|
|
1
|
-
# codex-chat-bridge runtime configuration
|
|
2
|
-
#
|
|
3
|
-
# Priority: CLI flags > conf.toml > built-in defaults
|
|
4
|
-
|
|
5
|
-
# host = "127.0.0.1"
|
|
6
|
-
# port = 8787
|
|
7
|
-
# upstream_url = "https://api.openai.com/v1/chat/completions"
|
|
8
|
-
# api_key_env = "OPENAI_API_KEY"
|
|
9
|
-
# server_info = "/tmp/codex-chat-bridge-info.json"
|
|
10
|
-
# http_shutdown = false
|