@jhzhu89/m2r 0.1.4 → 0.1.6
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/index.js +142 -97
- package/package.json +2 -2
package/dist/index.js
CHANGED
|
@@ -16160,16 +16160,22 @@ function fromRequest(body) {
|
|
|
16160
16160
|
for (const block of blocks) {
|
|
16161
16161
|
if (block.type === "text") {
|
|
16162
16162
|
content2.push({ type: "text", text: block.text });
|
|
16163
|
-
|
|
16163
|
+
continue;
|
|
16164
|
+
}
|
|
16165
|
+
if (block.type === "image") {
|
|
16164
16166
|
content2.push(parseImageBlock(block));
|
|
16165
|
-
|
|
16167
|
+
continue;
|
|
16168
|
+
}
|
|
16169
|
+
if (block.type === "tool_use") {
|
|
16166
16170
|
content2.push({
|
|
16167
16171
|
type: "tool_call",
|
|
16168
16172
|
id: block.id,
|
|
16169
16173
|
name: block.name,
|
|
16170
16174
|
arguments: block.input
|
|
16171
16175
|
});
|
|
16172
|
-
|
|
16176
|
+
continue;
|
|
16177
|
+
}
|
|
16178
|
+
if (block.type === "tool_result") {
|
|
16173
16179
|
content2.push({
|
|
16174
16180
|
type: "tool_result",
|
|
16175
16181
|
id: block.tool_use_id,
|
|
@@ -16207,13 +16213,6 @@ function fromRequest(body) {
|
|
|
16207
16213
|
function generateId(prefix = "msg") {
|
|
16208
16214
|
return `${prefix}_${Date.now().toString(36)}${Math.random().toString(36).slice(2, 8)}`;
|
|
16209
16215
|
}
|
|
16210
|
-
function mapStopReason(status, hasToolCall) {
|
|
16211
|
-
if (hasToolCall)
|
|
16212
|
-
return "tool_use";
|
|
16213
|
-
if (status === "incomplete")
|
|
16214
|
-
return "max_tokens";
|
|
16215
|
-
return "end_turn";
|
|
16216
|
-
}
|
|
16217
16216
|
|
|
16218
16217
|
// src/anthropic/to-response.ts
|
|
16219
16218
|
function toContentBlock(c) {
|
|
@@ -26803,14 +26802,16 @@ function extractUpstreamRequestId(error49) {
|
|
|
26803
26802
|
}
|
|
26804
26803
|
return null;
|
|
26805
26804
|
}
|
|
26806
|
-
function handleError(
|
|
26805
|
+
function handleError(errorOrUnused, maybeError, ctx) {
|
|
26806
|
+
const error49 = maybeError ?? errorOrUnused;
|
|
26807
|
+
const context = ctx ?? { reqId: "unknown" };
|
|
26807
26808
|
const status = extractStatus(error49);
|
|
26808
26809
|
const message2 = extractMessage(error49);
|
|
26809
26810
|
const errorType = mapStatusToErrorType(status);
|
|
26810
26811
|
const httpCode = mapStatusToHttpCode(status);
|
|
26811
26812
|
logger.error({
|
|
26812
|
-
reqId:
|
|
26813
|
-
model:
|
|
26813
|
+
reqId: context.reqId,
|
|
26814
|
+
model: context.model,
|
|
26814
26815
|
status,
|
|
26815
26816
|
errorType,
|
|
26816
26817
|
message: message2,
|
|
@@ -26846,61 +26847,12 @@ var init_error_mapper = __esm(() => {
|
|
|
26846
26847
|
init_logger();
|
|
26847
26848
|
});
|
|
26848
26849
|
|
|
26849
|
-
// src/config/index.ts
|
|
26850
|
-
var exports_config = {};
|
|
26851
|
-
__export(exports_config, {
|
|
26852
|
-
resolveModel: () => resolveModel,
|
|
26853
|
-
config: () => config2
|
|
26854
|
-
});
|
|
26855
|
-
function parseModelMap(env2) {
|
|
26856
|
-
if (!env2)
|
|
26857
|
-
return {};
|
|
26858
|
-
const parsed = JSON.parse(env2);
|
|
26859
|
-
if (typeof parsed !== "object" || parsed === null)
|
|
26860
|
-
return {};
|
|
26861
|
-
const result = {};
|
|
26862
|
-
for (const [key, value] of Object.entries(parsed)) {
|
|
26863
|
-
if (typeof value === "string") {
|
|
26864
|
-
result[key] = value;
|
|
26865
|
-
}
|
|
26866
|
-
}
|
|
26867
|
-
return result;
|
|
26868
|
-
}
|
|
26869
|
-
function resolveModel(alias) {
|
|
26870
|
-
if (config2.modelMap[alias]) {
|
|
26871
|
-
return config2.modelMap[alias];
|
|
26872
|
-
}
|
|
26873
|
-
const lower = alias.toLowerCase();
|
|
26874
|
-
for (const [tier, model] of Object.entries(config2.tiers)) {
|
|
26875
|
-
if (lower.includes(tier)) {
|
|
26876
|
-
return model;
|
|
26877
|
-
}
|
|
26878
|
-
}
|
|
26879
|
-
return alias;
|
|
26880
|
-
}
|
|
26881
|
-
var config2;
|
|
26882
|
-
var init_config = __esm(() => {
|
|
26883
|
-
config2 = {
|
|
26884
|
-
port: parseInt(process.env.PROXY_PORT || "8000"),
|
|
26885
|
-
azure: {
|
|
26886
|
-
endpoint: process.env.AZURE_OPENAI_ENDPOINT || "",
|
|
26887
|
-
apiVersion: process.env.AZURE_OPENAI_API_VERSION || "2025-04-01-preview"
|
|
26888
|
-
},
|
|
26889
|
-
modelMap: parseModelMap(process.env.MODEL_MAP),
|
|
26890
|
-
tiers: {
|
|
26891
|
-
haiku: process.env.TIER_HAIKU || "gpt-5-mini",
|
|
26892
|
-
sonnet: process.env.TIER_SONNET || "gpt-5.2",
|
|
26893
|
-
opus: process.env.TIER_OPUS || "gpt-5.1-codex-max"
|
|
26894
|
-
}
|
|
26895
|
-
};
|
|
26896
|
-
});
|
|
26897
|
-
|
|
26898
26850
|
// src/config/model-config.ts
|
|
26899
26851
|
function getModelConfig(slug) {
|
|
26900
26852
|
const lower = slug.toLowerCase();
|
|
26901
|
-
for (const { prefix, config:
|
|
26853
|
+
for (const { prefix, config: config2 } of MODEL_FAMILIES) {
|
|
26902
26854
|
if (lower.startsWith(prefix)) {
|
|
26903
|
-
return
|
|
26855
|
+
return config2;
|
|
26904
26856
|
}
|
|
26905
26857
|
}
|
|
26906
26858
|
return DEFAULT_CONFIG;
|
|
@@ -26983,6 +26935,74 @@ var init_model_config = __esm(() => {
|
|
|
26983
26935
|
};
|
|
26984
26936
|
});
|
|
26985
26937
|
|
|
26938
|
+
// src/config/routing.ts
|
|
26939
|
+
function resolveModel(config2, alias) {
|
|
26940
|
+
if (config2.modelMap[alias]) {
|
|
26941
|
+
return config2.modelMap[alias];
|
|
26942
|
+
}
|
|
26943
|
+
const lower = alias.toLowerCase();
|
|
26944
|
+
for (const [tier, model] of Object.entries(config2.tiers)) {
|
|
26945
|
+
if (lower.includes(tier)) {
|
|
26946
|
+
return model;
|
|
26947
|
+
}
|
|
26948
|
+
}
|
|
26949
|
+
return alias;
|
|
26950
|
+
}
|
|
26951
|
+
function resolveModelConfig(config2, model) {
|
|
26952
|
+
const resolvedModel = resolveModel(config2, model);
|
|
26953
|
+
const configSnapshot = getModelConfig(resolvedModel);
|
|
26954
|
+
return { model: resolvedModel, config: configSnapshot };
|
|
26955
|
+
}
|
|
26956
|
+
var init_routing = __esm(() => {
|
|
26957
|
+
init_model_config();
|
|
26958
|
+
});
|
|
26959
|
+
|
|
26960
|
+
// src/config/index.ts
|
|
26961
|
+
var exports_config = {};
|
|
26962
|
+
__export(exports_config, {
|
|
26963
|
+
resolveModelConfig: () => resolveModelConfig2,
|
|
26964
|
+
resolveModel: () => resolveModel2,
|
|
26965
|
+
config: () => config2
|
|
26966
|
+
});
|
|
26967
|
+
function parseModelMap(env2) {
|
|
26968
|
+
if (!env2)
|
|
26969
|
+
return {};
|
|
26970
|
+
const parsed = JSON.parse(env2);
|
|
26971
|
+
if (typeof parsed !== "object" || parsed === null)
|
|
26972
|
+
return {};
|
|
26973
|
+
const result = {};
|
|
26974
|
+
for (const [key, value] of Object.entries(parsed)) {
|
|
26975
|
+
if (typeof value === "string") {
|
|
26976
|
+
result[key] = value;
|
|
26977
|
+
}
|
|
26978
|
+
}
|
|
26979
|
+
return result;
|
|
26980
|
+
}
|
|
26981
|
+
var configSnapshot, config2, resolveModel2, resolveModelConfig2;
|
|
26982
|
+
var init_config = __esm(() => {
|
|
26983
|
+
init_routing();
|
|
26984
|
+
configSnapshot = {
|
|
26985
|
+
port: parseInt(process.env.PROXY_PORT || "8000"),
|
|
26986
|
+
azure: {
|
|
26987
|
+
endpoint: process.env.AZURE_OPENAI_ENDPOINT || "",
|
|
26988
|
+
apiVersion: process.env.AZURE_OPENAI_API_VERSION || "2025-04-01-preview"
|
|
26989
|
+
},
|
|
26990
|
+
modelMap: parseModelMap(process.env.MODEL_MAP),
|
|
26991
|
+
tiers: {
|
|
26992
|
+
haiku: process.env.TIER_HAIKU || "gpt-5-mini",
|
|
26993
|
+
sonnet: process.env.TIER_SONNET || "gpt-5.2",
|
|
26994
|
+
opus: process.env.TIER_OPUS || "gpt-5.1-codex-max"
|
|
26995
|
+
}
|
|
26996
|
+
};
|
|
26997
|
+
config2 = {
|
|
26998
|
+
...configSnapshot,
|
|
26999
|
+
resolveModel: (alias) => resolveModel(configSnapshot, alias),
|
|
27000
|
+
resolveModelConfig: (model) => resolveModelConfig(configSnapshot, model)
|
|
27001
|
+
};
|
|
27002
|
+
resolveModel2 = config2.resolveModel;
|
|
27003
|
+
resolveModelConfig2 = config2.resolveModelConfig;
|
|
27004
|
+
});
|
|
27005
|
+
|
|
26986
27006
|
// src/openai/to-request.ts
|
|
26987
27007
|
function mapToolChoice(tc) {
|
|
26988
27008
|
if (!tc || tc.type === "auto")
|
|
@@ -26991,7 +27011,7 @@ function mapToolChoice(tc) {
|
|
|
26991
27011
|
return "required";
|
|
26992
27012
|
return { type: "function", name: tc.name };
|
|
26993
27013
|
}
|
|
26994
|
-
function
|
|
27014
|
+
function buildInput(ir) {
|
|
26995
27015
|
const input = [];
|
|
26996
27016
|
for (const msg of ir.messages) {
|
|
26997
27017
|
if (msg.role === "system") {
|
|
@@ -26999,14 +27019,20 @@ function toResponsesRequest(ir) {
|
|
|
26999
27019
|
role: "system",
|
|
27000
27020
|
content: msg.content.map((c) => c.type === "text" ? c.text : "").join("")
|
|
27001
27021
|
});
|
|
27002
|
-
|
|
27022
|
+
continue;
|
|
27023
|
+
}
|
|
27024
|
+
if (msg.role === "user") {
|
|
27003
27025
|
const parts = [];
|
|
27004
27026
|
for (const c of msg.content) {
|
|
27005
27027
|
if (c.type === "text") {
|
|
27006
27028
|
parts.push({ type: "input_text", text: c.text });
|
|
27007
|
-
|
|
27029
|
+
continue;
|
|
27030
|
+
}
|
|
27031
|
+
if (c.type === "image") {
|
|
27008
27032
|
parts.push({ type: "input_image", image_url: c.url, detail: "auto" });
|
|
27009
|
-
|
|
27033
|
+
continue;
|
|
27034
|
+
}
|
|
27035
|
+
if (c.type === "tool_result") {
|
|
27010
27036
|
input.push({
|
|
27011
27037
|
type: "function_call_output",
|
|
27012
27038
|
call_id: c.id,
|
|
@@ -27017,22 +27043,24 @@ function toResponsesRequest(ir) {
|
|
|
27017
27043
|
if (parts.length > 0) {
|
|
27018
27044
|
input.push({ type: "message", role: "user", content: parts });
|
|
27019
27045
|
}
|
|
27020
|
-
|
|
27021
|
-
|
|
27022
|
-
|
|
27023
|
-
|
|
27024
|
-
|
|
27025
|
-
|
|
27026
|
-
|
|
27027
|
-
|
|
27028
|
-
}
|
|
27029
|
-
|
|
27030
|
-
|
|
27031
|
-
|
|
27032
|
-
|
|
27033
|
-
|
|
27034
|
-
|
|
27035
|
-
|
|
27046
|
+
continue;
|
|
27047
|
+
}
|
|
27048
|
+
for (const c of msg.content) {
|
|
27049
|
+
if (c.type === "text") {
|
|
27050
|
+
input.push({
|
|
27051
|
+
type: "message",
|
|
27052
|
+
role: "assistant",
|
|
27053
|
+
content: c.text
|
|
27054
|
+
});
|
|
27055
|
+
continue;
|
|
27056
|
+
}
|
|
27057
|
+
if (c.type === "tool_call") {
|
|
27058
|
+
input.push({
|
|
27059
|
+
type: "function_call",
|
|
27060
|
+
call_id: c.id,
|
|
27061
|
+
name: c.name,
|
|
27062
|
+
arguments: JSON.stringify(c.arguments)
|
|
27063
|
+
});
|
|
27036
27064
|
}
|
|
27037
27065
|
}
|
|
27038
27066
|
}
|
|
@@ -27043,13 +27071,15 @@ function toResponsesRequest(ir) {
|
|
|
27043
27071
|
parameters: { type: "object", ...t.inputSchema },
|
|
27044
27072
|
strict: null
|
|
27045
27073
|
}));
|
|
27046
|
-
const
|
|
27047
|
-
|
|
27074
|
+
const toolChoice2 = mapToolChoice(ir.toolChoice);
|
|
27075
|
+
return { input, tools, toolChoice: toolChoice2 };
|
|
27076
|
+
}
|
|
27077
|
+
function buildOpenAIRequest(ir, model, modelConfig) {
|
|
27078
|
+
const { input, tools, toolChoice: toolChoice2 } = buildInput(ir);
|
|
27048
27079
|
const reasoning = modelConfig.supportsReasoningSummaries ? {
|
|
27049
27080
|
effort: ir.thinking?.effort ?? modelConfig.defaultReasoningEffort ?? "medium",
|
|
27050
27081
|
summary: "auto"
|
|
27051
27082
|
} : undefined;
|
|
27052
|
-
const toolChoice2 = mapToolChoice(ir.toolChoice);
|
|
27053
27083
|
return {
|
|
27054
27084
|
model,
|
|
27055
27085
|
input,
|
|
@@ -27063,11 +27093,23 @@ function toResponsesRequest(ir) {
|
|
|
27063
27093
|
...ir.topP !== undefined && { top_p: ir.topP }
|
|
27064
27094
|
};
|
|
27065
27095
|
}
|
|
27096
|
+
function toResponsesRequest(ir, resolved) {
|
|
27097
|
+
const resolution = resolved ?? config2.resolveModelConfig(ir.model);
|
|
27098
|
+
return buildOpenAIRequest(ir, resolution.model, resolution.config);
|
|
27099
|
+
}
|
|
27066
27100
|
var init_to_request = __esm(() => {
|
|
27067
27101
|
init_config();
|
|
27068
|
-
init_model_config();
|
|
27069
27102
|
});
|
|
27070
27103
|
|
|
27104
|
+
// src/openai/stop-reason.ts
|
|
27105
|
+
function mapOpenAIStopReason(status, hasToolCall) {
|
|
27106
|
+
if (hasToolCall)
|
|
27107
|
+
return "tool_use";
|
|
27108
|
+
if (status === "incomplete")
|
|
27109
|
+
return "max_tokens";
|
|
27110
|
+
return "end_turn";
|
|
27111
|
+
}
|
|
27112
|
+
|
|
27071
27113
|
// src/openai/from-response.ts
|
|
27072
27114
|
function isOutputText(c) {
|
|
27073
27115
|
return c.type === "output_text";
|
|
@@ -27096,7 +27138,7 @@ function fromResponse(response) {
|
|
|
27096
27138
|
const hasToolCall = content2.some((c) => c.type === "tool_call");
|
|
27097
27139
|
return {
|
|
27098
27140
|
content: content2,
|
|
27099
|
-
stopReason:
|
|
27141
|
+
stopReason: mapOpenAIStopReason(response.status, hasToolCall),
|
|
27100
27142
|
usage: {
|
|
27101
27143
|
inputTokens: response.usage?.input_tokens ?? 0,
|
|
27102
27144
|
outputTokens: response.usage?.output_tokens ?? 0
|
|
@@ -27183,7 +27225,7 @@ function* fromStreamEvent(event) {
|
|
|
27183
27225
|
const hasToolCall = event.response.output.some((item) => item.type === "function_call");
|
|
27184
27226
|
yield {
|
|
27185
27227
|
type: "done",
|
|
27186
|
-
stopReason:
|
|
27228
|
+
stopReason: mapOpenAIStopReason(event.response.status, hasToolCall),
|
|
27187
27229
|
usage: {
|
|
27188
27230
|
inputTokens: event.response.usage?.input_tokens ?? 0,
|
|
27189
27231
|
outputTokens: event.response.usage?.output_tokens ?? 0
|
|
@@ -31769,19 +31811,19 @@ function createApp(client) {
|
|
|
31769
31811
|
}
|
|
31770
31812
|
const ir = fromRequest(validation.data);
|
|
31771
31813
|
const reqId = generateId("req");
|
|
31772
|
-
const
|
|
31814
|
+
const resolved = resolveModelConfig2(ir.model);
|
|
31773
31815
|
c.header("x-request-id", reqId);
|
|
31774
31816
|
logger.info({
|
|
31775
31817
|
reqId,
|
|
31776
31818
|
model: ir.model,
|
|
31777
|
-
resolvedModel,
|
|
31819
|
+
resolvedModel: resolved.model,
|
|
31778
31820
|
stream: ir.stream,
|
|
31779
31821
|
maxTokens: ir.maxTokens,
|
|
31780
31822
|
tools: ir.tools?.length ?? 0,
|
|
31781
31823
|
thinking: ir.thinking?.type === "enabled"
|
|
31782
31824
|
}, "request received");
|
|
31783
31825
|
if (!ir.stream) {
|
|
31784
|
-
const openaiReq = toResponsesRequest(ir);
|
|
31826
|
+
const openaiReq = toResponsesRequest(ir, resolved);
|
|
31785
31827
|
try {
|
|
31786
31828
|
const openaiRes = await client.responses.create(openaiReq);
|
|
31787
31829
|
const irRes = fromResponse(openaiRes);
|
|
@@ -31794,14 +31836,14 @@ function createApp(client) {
|
|
|
31794
31836
|
}, "non-streaming response complete");
|
|
31795
31837
|
return c.json(anthropicRes);
|
|
31796
31838
|
} catch (error49) {
|
|
31797
|
-
return handleError(
|
|
31839
|
+
return handleError(error49, { reqId, model: resolved.model });
|
|
31798
31840
|
}
|
|
31799
31841
|
}
|
|
31800
31842
|
c.header("Content-Type", "text/event-stream");
|
|
31801
31843
|
c.header("Cache-Control", "no-cache");
|
|
31802
31844
|
c.header("Connection", "keep-alive");
|
|
31803
31845
|
return stream(c, async (s) => {
|
|
31804
|
-
const openaiReq = toResponsesRequest(ir);
|
|
31846
|
+
const openaiReq = toResponsesRequest(ir, resolved);
|
|
31805
31847
|
try {
|
|
31806
31848
|
const openaiStream = await client.responses.create({
|
|
31807
31849
|
...openaiReq,
|
|
@@ -31817,7 +31859,10 @@ function createApp(client) {
|
|
|
31817
31859
|
}
|
|
31818
31860
|
logger.info({ reqId }, "streaming response complete");
|
|
31819
31861
|
} catch (error49) {
|
|
31820
|
-
for (const sse of streamError(error49, {
|
|
31862
|
+
for (const sse of streamError(error49, {
|
|
31863
|
+
reqId,
|
|
31864
|
+
model: resolved.model
|
|
31865
|
+
})) {
|
|
31821
31866
|
await s.write(sse);
|
|
31822
31867
|
}
|
|
31823
31868
|
}
|
|
@@ -54028,6 +54073,6 @@ var app = createApp2(createClient2());
|
|
|
54028
54073
|
var server = Bun.serve({
|
|
54029
54074
|
fetch: app.fetch,
|
|
54030
54075
|
port: config3.port,
|
|
54031
|
-
idleTimeout:
|
|
54076
|
+
idleTimeout: 255
|
|
54032
54077
|
});
|
|
54033
54078
|
logger23.info({ port: server.port }, "proxy server started");
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@jhzhu89/m2r",
|
|
3
|
-
"version": "0.1.
|
|
3
|
+
"version": "0.1.6",
|
|
4
4
|
"description": "Anthropic Messages API to Azure OpenAI Responses API proxy",
|
|
5
5
|
"type": "module",
|
|
6
6
|
"license": "MIT",
|
|
@@ -36,7 +36,6 @@
|
|
|
36
36
|
"test:watch": "bun test --watch ./test/unit"
|
|
37
37
|
},
|
|
38
38
|
"dependencies": {
|
|
39
|
-
"@anthropic-ai/sdk": "^0.71.2",
|
|
40
39
|
"@azure/identity": "^4.13.0",
|
|
41
40
|
"dotenv": "^17.2.3",
|
|
42
41
|
"gpt-tokenizer": "^3.4.0",
|
|
@@ -46,6 +45,7 @@
|
|
|
46
45
|
"zod": "^4.3.4"
|
|
47
46
|
},
|
|
48
47
|
"devDependencies": {
|
|
48
|
+
"@anthropic-ai/sdk": "^0.71.2",
|
|
49
49
|
"@eslint/js": "^9.39.2",
|
|
50
50
|
"@types/bun": "latest",
|
|
51
51
|
"eslint": "^9.39.2",
|