@heungtae/codex-chat-bridge 0.1.0 → 0.1.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +4 -4
- package/USAGE.md +4 -3
- package/package.json +2 -3
- package/src/main.rs +58 -5
- package/conf.toml +0 -10
package/README.md
CHANGED
|
@@ -14,11 +14,10 @@ Node.js 20+ and Rust/Cargo are required because npm installation compiles the Ru
|
|
|
14
14
|
npm install @heungtae/codex-chat-bridge
|
|
15
15
|
```
|
|
16
16
|
|
|
17
|
-
|
|
17
|
+
Install globally if you want the `codex-chat-bridge` command on PATH:
|
|
18
18
|
|
|
19
19
|
```bash
|
|
20
|
-
npm
|
|
21
|
-
npm install @heungtae/codex-chat-bridge --registry <private-registry>
|
|
20
|
+
npm install -g @heungtae/codex-chat-bridge
|
|
22
21
|
```
|
|
23
22
|
|
|
24
23
|
## What it does
|
|
@@ -39,7 +38,8 @@ npm install @heungtae/codex-chat-bridge --registry <private-registry>
|
|
|
39
38
|
npx @heungtae/codex-chat-bridge --port 8787 --api-key-env OPENAI_API_KEY
|
|
40
39
|
```
|
|
41
40
|
|
|
42
|
-
By default, the bridge
|
|
41
|
+
By default, the bridge uses `~/.config/codex-chat-bridge/conf.toml`.
|
|
42
|
+
If the file does not exist, it is created automatically with commented defaults.
|
|
43
43
|
CLI flags override file values.
|
|
44
44
|
|
|
45
45
|
Or run the binary directly via Cargo:
|
package/USAGE.md
CHANGED
|
@@ -60,8 +60,9 @@ npx @heungtae/codex-chat-bridge -- \
|
|
|
60
60
|
--api-key-env OPENAI_API_KEY
|
|
61
61
|
```
|
|
62
62
|
|
|
63
|
-
기본적으로
|
|
64
|
-
|
|
63
|
+
기본적으로 `~/.config/codex-chat-bridge/conf.toml`을 사용합니다.
|
|
64
|
+
파일이 없으면 기본 템플릿으로 자동 생성됩니다.
|
|
65
|
+
우선순위는 `CLI 옵션 > 설정 파일 > 내장 기본값`입니다.
|
|
65
66
|
|
|
66
67
|
또는 Cargo 직접 실행:
|
|
67
68
|
|
|
@@ -78,7 +79,7 @@ npx @heungtae/codex-chat-bridge --config /path/to/conf.toml
|
|
|
78
79
|
```
|
|
79
80
|
|
|
80
81
|
옵션 설명:
|
|
81
|
-
- `--config <FILE>`: 설정 파일 경로 (기본값:
|
|
82
|
+
- `--config <FILE>`: 설정 파일 경로 (기본값: `~/.config/codex-chat-bridge/conf.toml`)
|
|
82
83
|
- `--port`: 브리지 포트 (기본: 랜덤 포트)
|
|
83
84
|
- `--api-key-env`: 업스트림 호출에 쓸 API 키 환경변수 이름
|
|
84
85
|
- `--upstream-url`: 기본값 `https://api.openai.com/v1/chat/completions`
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@heungtae/codex-chat-bridge",
|
|
3
|
-
"version": "0.1.
|
|
3
|
+
"version": "0.1.1",
|
|
4
4
|
"description": "Responses-to-chat/completions bridge for Codex workflows",
|
|
5
5
|
"license": "Apache-2.0",
|
|
6
6
|
"type": "commonjs",
|
|
@@ -11,13 +11,12 @@
|
|
|
11
11
|
"node": ">=20"
|
|
12
12
|
},
|
|
13
13
|
"publishConfig": {
|
|
14
|
-
"access": "
|
|
14
|
+
"access": "public"
|
|
15
15
|
},
|
|
16
16
|
"files": [
|
|
17
17
|
"bin/",
|
|
18
18
|
"scripts/",
|
|
19
19
|
"src/",
|
|
20
|
-
"conf.toml",
|
|
21
20
|
"Cargo.toml",
|
|
22
21
|
"Cargo.lock",
|
|
23
22
|
"README.md",
|
package/src/main.rs
CHANGED
|
@@ -41,8 +41,12 @@ use uuid::Uuid;
|
|
|
41
41
|
about = "Responses-to-Chat completions bridge"
|
|
42
42
|
)]
|
|
43
43
|
struct Args {
|
|
44
|
-
#[arg(
|
|
45
|
-
|
|
44
|
+
#[arg(
|
|
45
|
+
long,
|
|
46
|
+
value_name = "FILE",
|
|
47
|
+
help = "config file path (default: ~/.config/codex-chat-bridge/conf.toml)"
|
|
48
|
+
)]
|
|
49
|
+
config: Option<PathBuf>,
|
|
46
50
|
|
|
47
51
|
#[arg(long)]
|
|
48
52
|
host: Option<String>,
|
|
@@ -83,6 +87,18 @@ struct ResolvedConfig {
|
|
|
83
87
|
http_shutdown: bool,
|
|
84
88
|
}
|
|
85
89
|
|
|
90
|
+
const DEFAULT_CONFIG_TEMPLATE: &str = r#"# codex-chat-bridge runtime configuration
|
|
91
|
+
#
|
|
92
|
+
# Priority: CLI flags > config file > built-in defaults
|
|
93
|
+
|
|
94
|
+
# host = "127.0.0.1"
|
|
95
|
+
# port = 8787
|
|
96
|
+
# upstream_url = "https://api.openai.com/v1/chat/completions"
|
|
97
|
+
# api_key_env = "OPENAI_API_KEY"
|
|
98
|
+
# server_info = "/tmp/codex-chat-bridge-info.json"
|
|
99
|
+
# http_shutdown = false
|
|
100
|
+
"#;
|
|
101
|
+
|
|
86
102
|
#[derive(Clone)]
|
|
87
103
|
struct AppState {
|
|
88
104
|
client: Client,
|
|
@@ -188,7 +204,9 @@ async fn main() -> Result<()> {
|
|
|
188
204
|
.init();
|
|
189
205
|
|
|
190
206
|
let args = Args::parse();
|
|
191
|
-
let
|
|
207
|
+
let config_path = resolve_config_path(args.config.clone())?;
|
|
208
|
+
ensure_default_config_file(&config_path)?;
|
|
209
|
+
let file_config = load_file_config(&config_path)?;
|
|
192
210
|
let config = resolve_config(args, file_config);
|
|
193
211
|
|
|
194
212
|
let api_key = std::env::var(&config.api_key_env)
|
|
@@ -243,6 +261,35 @@ fn load_file_config(path: &Path) -> Result<Option<FileConfig>> {
|
|
|
243
261
|
Ok(Some(parsed))
|
|
244
262
|
}
|
|
245
263
|
|
|
264
|
+
fn resolve_config_path(cli_path: Option<PathBuf>) -> Result<PathBuf> {
|
|
265
|
+
if let Some(path) = cli_path {
|
|
266
|
+
return Ok(path);
|
|
267
|
+
}
|
|
268
|
+
|
|
269
|
+
let home = std::env::var_os("HOME")
|
|
270
|
+
.ok_or_else(|| anyhow!("HOME environment variable is not set"))?;
|
|
271
|
+
Ok(PathBuf::from(home)
|
|
272
|
+
.join(".config")
|
|
273
|
+
.join("codex-chat-bridge")
|
|
274
|
+
.join("conf.toml"))
|
|
275
|
+
}
|
|
276
|
+
|
|
277
|
+
fn ensure_default_config_file(path: &Path) -> Result<()> {
|
|
278
|
+
if path.exists() {
|
|
279
|
+
return Ok(());
|
|
280
|
+
}
|
|
281
|
+
|
|
282
|
+
if let Some(parent) = path.parent() {
|
|
283
|
+
fs::create_dir_all(parent)
|
|
284
|
+
.with_context(|| format!("creating config directory {}", parent.display()))?;
|
|
285
|
+
}
|
|
286
|
+
|
|
287
|
+
fs::write(path, DEFAULT_CONFIG_TEMPLATE)
|
|
288
|
+
.with_context(|| format!("creating default config file {}", path.display()))?;
|
|
289
|
+
info!("created default config file {}", path.display());
|
|
290
|
+
Ok(())
|
|
291
|
+
}
|
|
292
|
+
|
|
246
293
|
fn resolve_config(args: Args, file_config: Option<FileConfig>) -> ResolvedConfig {
|
|
247
294
|
let file_config = file_config.unwrap_or_default();
|
|
248
295
|
|
|
@@ -1117,7 +1164,7 @@ mod tests {
|
|
|
1117
1164
|
#[test]
|
|
1118
1165
|
fn resolve_config_prefers_cli_over_file_and_defaults() {
|
|
1119
1166
|
let args = Args {
|
|
1120
|
-
config:
|
|
1167
|
+
config: None,
|
|
1121
1168
|
host: Some("0.0.0.0".to_string()),
|
|
1122
1169
|
port: Some(9999),
|
|
1123
1170
|
upstream_url: None,
|
|
@@ -1149,7 +1196,7 @@ mod tests {
|
|
|
1149
1196
|
#[test]
|
|
1150
1197
|
fn resolve_config_uses_defaults_when_missing() {
|
|
1151
1198
|
let args = Args {
|
|
1152
|
-
config:
|
|
1199
|
+
config: None,
|
|
1153
1200
|
host: None,
|
|
1154
1201
|
port: None,
|
|
1155
1202
|
upstream_url: None,
|
|
@@ -1169,4 +1216,10 @@ mod tests {
|
|
|
1169
1216
|
assert_eq!(resolved.server_info, None);
|
|
1170
1217
|
assert!(!resolved.http_shutdown);
|
|
1171
1218
|
}
|
|
1219
|
+
|
|
1220
|
+
#[test]
|
|
1221
|
+
fn resolve_config_path_prefers_cli_value() {
|
|
1222
|
+
let path = resolve_config_path(Some(PathBuf::from("/tmp/custom.toml"))).expect("ok");
|
|
1223
|
+
assert_eq!(path, PathBuf::from("/tmp/custom.toml"));
|
|
1224
|
+
}
|
|
1172
1225
|
}
|
package/conf.toml
DELETED
|
@@ -1,10 +0,0 @@
|
|
|
1
|
-
# codex-chat-bridge runtime configuration
|
|
2
|
-
#
|
|
3
|
-
# Priority: CLI flags > conf.toml > built-in defaults
|
|
4
|
-
|
|
5
|
-
# host = "127.0.0.1"
|
|
6
|
-
# port = 8787
|
|
7
|
-
# upstream_url = "https://api.openai.com/v1/chat/completions"
|
|
8
|
-
# api_key_env = "OPENAI_API_KEY"
|
|
9
|
-
# server_info = "/tmp/codex-chat-bridge-info.json"
|
|
10
|
-
# http_shutdown = false
|