@jhzhu89/m2r 0.1.2 → 0.1.4

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (3) hide show
  1. package/README.md +167 -0
  2. package/dist/index.js +6 -3
  3. package/package.json +1 -1
package/README.md ADDED
@@ -0,0 +1,167 @@
1
+ # m2r
2
+
3
+ Anthropic Messages API to Azure OpenAI Responses API proxy.
4
+
5
+ Enables Claude Code CLI and other Anthropic-compatible clients to use Azure OpenAI as the backend.
6
+
7
+ ## Installation
8
+
9
+ ```bash
10
+ npm install -g @jhzhu89/m2r
11
+ ```
12
+
13
+ ## Configuration
14
+
15
+ Create `~/.m2rrc` with your Azure OpenAI credentials:
16
+
17
+ ```bash
18
+ AZURE_OPENAI_ENDPOINT=https://your-resource.openai.azure.com
19
+ AZURE_OPENAI_API_KEY=your-api-key
20
+ AZURE_OPENAI_DEPLOYMENT=your-deployment-name
21
+ PROXY_PORT=8001
22
+ LOG_LEVEL=info
23
+ ```
24
+
25
+ ## Usage
26
+
27
+ Start the proxy server:
28
+
29
+ ```bash
30
+ m2r
31
+ ```
32
+
33
+ Then point your Anthropic client to `http://localhost:8001`.
34
+
35
+ ## Shell Integration
36
+
37
+ These shell functions automatically start `m2r` when you run `claude` and configure the necessary environment variables.
38
+
39
+ ### Zsh / Bash
40
+
41
+ Add to `~/.zshrc` or `~/.bashrc`:
42
+
43
+ ```bash
44
+ claude() {
45
+ local proxy_port=8001
46
+ local m2rrc="$HOME/.m2rrc"
47
+
48
+ if [[ -f "$m2rrc" ]]; then
49
+ local port_line=$(grep '^PROXY_PORT=' "$m2rrc")
50
+ if [[ -n "$port_line" ]]; then
51
+ proxy_port="${port_line#PROXY_PORT=}"
52
+ fi
53
+ fi
54
+
55
+ if ! nc -z localhost "$proxy_port" 2>/dev/null; then
56
+ echo "Starting m2r on port $proxy_port..."
57
+ mkdir -p "$HOME/.local/log"
58
+ nohup m2r >> "$HOME/.local/log/m2r.log" 2>&1 &
59
+ sleep 1
60
+ fi
61
+
62
+ ANTHROPIC_BASE_URL="http://localhost:$proxy_port" \
63
+ ANTHROPIC_API_KEY="x" \
64
+ CLAUDE_CODE_MAX_OUTPUT_TOKENS="64000" \
65
+ command claude "$@"
66
+ }
67
+
68
+ m2r-log() {
69
+ local log="$HOME/.local/log/m2r.log"
70
+ local follow=false tail=50
71
+ while [[ $# -gt 0 ]]; do
72
+ case "$1" in
73
+ -f|--follow) follow=true; shift ;;
74
+ -n|--tail) tail="$2"; shift 2 ;;
75
+ *) shift ;;
76
+ esac
77
+ done
78
+ [[ ! -f "$log" ]] && echo "Log not found: $log" && return 1
79
+ $follow && tail -n "$tail" -f "$log" || tail -n "$tail" "$log"
80
+ }
81
+
82
+ m2r-restart() {
83
+ local proxy_port=8001 m2rrc="$HOME/.m2rrc"
84
+ if [[ -f "$m2rrc" ]]; then
85
+ local port_line=$(grep '^PROXY_PORT=' "$m2rrc")
86
+ [[ -n "$port_line" ]] && proxy_port="${port_line#PROXY_PORT=}"
87
+ fi
88
+ pkill -f "node.*m2r" 2>/dev/null && echo "Stopped m2r" || echo "m2r not running"
89
+ mkdir -p "$HOME/.local/log"
90
+ nohup m2r >> "$HOME/.local/log/m2r.log" 2>&1 &
91
+ for i in {1..10}; do
92
+ sleep 0.3
93
+ nc -z localhost "$proxy_port" 2>/dev/null && echo "m2r started on port $proxy_port" && return 0
94
+ done
95
+ echo "Failed to start m2r"; return 1
96
+ }
97
+ ```
98
+
99
+ ### PowerShell
100
+
101
+ Add to your `$PROFILE`:
102
+
103
+ ```powershell
104
+ function Get-M2rPort {
105
+ $m2rrc = "$HOME\.m2rrc"
106
+ if (Test-Path $m2rrc) {
107
+ switch -Regex -File $m2rrc { '^PROXY_PORT=(\d+)' { return [int]$Matches[1] } }
108
+ }
109
+ return 8001
110
+ }
111
+
112
+ function Test-M2rRunning($port) {
113
+ try { $tcp = [System.Net.Sockets.TcpClient]::new("localhost", $port); $tcp.Dispose(); return $true } catch { return $false }
114
+ }
115
+
116
+ function Start-M2r($port) {
117
+ $logDir = "$HOME\.local\log"
118
+ New-Item -ItemType Directory -Path $logDir -Force -ErrorAction SilentlyContinue | Out-Null
119
+ Start-Process powershell -ArgumentList "-WindowStyle Hidden -Command `"m2r *>> '$logDir\m2r.log'`"" -WindowStyle Hidden
120
+ for ($i = 0; $i -lt 20; $i++) {
121
+ Start-Sleep -Milliseconds 250
122
+ if (Test-M2rRunning $port) { return $true }
123
+ }
124
+ return $false
125
+ }
126
+
127
+ function claude {
128
+ $port = Get-M2rPort
129
+ if (-not (Test-M2rRunning $port)) {
130
+ Write-Host "Starting m2r on port $port..." -ForegroundColor Cyan
131
+ if (-not (Start-M2r $port)) { Write-Host "Failed to start m2r" -ForegroundColor Red; return }
132
+ }
133
+ $env:ANTHROPIC_BASE_URL = "http://localhost:$port"
134
+ $env:ANTHROPIC_API_KEY = "x"
135
+ $env:CLAUDE_CODE_MAX_OUTPUT_TOKENS = "64000"
136
+ & (Get-Command claude -CommandType Application)[0].Source @args
137
+ }
138
+
139
+ function m2r-restart {
140
+ $port = Get-M2rPort
141
+ $stopped = $false
142
+ Get-CimInstance Win32_Process -Filter "Name='bun.exe'" | Where-Object { $_.CommandLine -match 'm2r' } | ForEach-Object {
143
+ Stop-Process -Id $_.ProcessId -Force -ErrorAction SilentlyContinue
144
+ $stopped = $true
145
+ }
146
+ Write-Host $(if ($stopped) { "Stopped m2r" } else { "m2r not running" })
147
+ if (Start-M2r $port) { Write-Host "m2r started on port $port" } else { Write-Host "Failed to start m2r" -ForegroundColor Red }
148
+ }
149
+
150
+ function m2r-log {
151
+ param([switch]$Follow, [int]$Tail = 50)
152
+ $log = "$HOME\.local\log\m2r.log"
153
+ if (-not (Test-Path $log)) {
154
+ Write-Host "Log file not found: $log" -ForegroundColor Yellow
155
+ return
156
+ }
157
+ if ($Follow) {
158
+ Get-Content $log -Wait -Tail $Tail
159
+ } else {
160
+ Get-Content $log -Tail $Tail
161
+ }
162
+ }
163
+ ```
164
+
165
+ ## License
166
+
167
+ MIT
package/dist/index.js CHANGED
@@ -26753,7 +26753,8 @@ var import_pino, logger;
26753
26753
  var init_logger = __esm(() => {
26754
26754
  import_pino = __toESM(require_pino(), 1);
26755
26755
  logger = import_pino.default({
26756
- level: process.env.LOG_LEVEL || "info"
26756
+ level: process.env.LOG_LEVEL || "info",
26757
+ timestamp: import_pino.default.stdTimeFunctions.isoTime
26757
26758
  });
26758
26759
  });
26759
26760
 
@@ -54004,7 +54005,8 @@ function createClient() {
54004
54005
  return new AzureOpenAI({
54005
54006
  azureADTokenProvider,
54006
54007
  endpoint: config2.azure.endpoint,
54007
- apiVersion: config2.azure.apiVersion
54008
+ apiVersion: config2.azure.apiVersion,
54009
+ timeout: 300000
54008
54010
  });
54009
54011
  }
54010
54012
  var init_client2 = __esm(() => {
@@ -54025,6 +54027,7 @@ var { logger: logger23 } = await Promise.resolve().then(() => (init_logger(), ex
54025
54027
  var app = createApp2(createClient2());
54026
54028
  var server = Bun.serve({
54027
54029
  fetch: app.fetch,
54028
- port: config3.port
54030
+ port: config3.port,
54031
+ idleTimeout: 300
54029
54032
  });
54030
54033
  logger23.info({ port: server.port }, "proxy server started");
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@jhzhu89/m2r",
3
- "version": "0.1.2",
3
+ "version": "0.1.4",
4
4
  "description": "Anthropic Messages API to Azure OpenAI Responses API proxy",
5
5
  "type": "module",
6
6
  "license": "MIT",