local-openai2anthropic 0.2.9__py3-none-any.whl → 0.3.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- local_openai2anthropic/router.py +70 -8
- {local_openai2anthropic-0.2.9.dist-info → local_openai2anthropic-0.3.1.dist-info}/METADATA +1 -1
- {local_openai2anthropic-0.2.9.dist-info → local_openai2anthropic-0.3.1.dist-info}/RECORD +6 -6
- {local_openai2anthropic-0.2.9.dist-info → local_openai2anthropic-0.3.1.dist-info}/WHEEL +0 -0
- {local_openai2anthropic-0.2.9.dist-info → local_openai2anthropic-0.3.1.dist-info}/entry_points.txt +0 -0
- {local_openai2anthropic-0.2.9.dist-info → local_openai2anthropic-0.3.1.dist-info}/licenses/LICENSE +0 -0
local_openai2anthropic/router.py
CHANGED
|
@@ -61,6 +61,10 @@ async def _stream_response(
|
|
|
61
61
|
"""
|
|
62
62
|
Stream response from OpenAI and convert to Anthropic format.
|
|
63
63
|
"""
|
|
64
|
+
# Log streaming request start
|
|
65
|
+
logger.info(f"[OpenAI Stream] Starting streaming request to {url}")
|
|
66
|
+
logger.info(f"[OpenAI Stream] Request model: {json_data.get('model', 'unknown')}")
|
|
67
|
+
|
|
64
68
|
try:
|
|
65
69
|
async with client.stream(
|
|
66
70
|
"POST", url, headers=headers, json=json_data
|
|
@@ -269,13 +273,18 @@ async def _stream_response(
|
|
|
269
273
|
# Handle new tool call (with id) - use separate if, not elif
|
|
270
274
|
# because a chunk may have both id AND arguments
|
|
271
275
|
if tool_call.get("id"):
|
|
276
|
+
func = tool_call.get("function") or {}
|
|
277
|
+
tool_name = func.get("name", "")
|
|
278
|
+
logger.info(
|
|
279
|
+
f"[OpenAI Stream] Tool call started - id={tool_call['id']}, name={tool_name}"
|
|
280
|
+
)
|
|
281
|
+
|
|
272
282
|
if content_block_started:
|
|
273
283
|
yield f"event: content_block_stop\ndata: {json.dumps({'type': 'content_block_stop', 'index': content_block_index})}\n\n"
|
|
274
284
|
content_block_started = False
|
|
275
285
|
content_block_index += 1
|
|
276
286
|
|
|
277
|
-
|
|
278
|
-
yield f"event: content_block_start\ndata: {json.dumps({'type': 'content_block_start', 'index': content_block_index, 'content_block': {'type': 'tool_use', 'id': tool_call['id'], 'name': func.get('name', ''), 'input': {}}})}\n\n"
|
|
287
|
+
yield f"event: content_block_start\ndata: {json.dumps({'type': 'content_block_start', 'index': content_block_index, 'content_block': {'type': 'tool_use', 'id': tool_call['id'], 'name': tool_name, 'input': {}}})}\n\n"
|
|
279
288
|
content_block_started = True
|
|
280
289
|
current_block_type = "tool_use"
|
|
281
290
|
|
|
@@ -296,6 +305,13 @@ async def _stream_response(
|
|
|
296
305
|
)
|
|
297
306
|
yield f"event: content_block_stop\ndata: {json.dumps(stop_block)}\n\n"
|
|
298
307
|
|
|
308
|
+
# Log stream summary before ending
|
|
309
|
+
logger.info(
|
|
310
|
+
f"[OpenAI Stream] Stream ended - message_id={message_id}, "
|
|
311
|
+
f"finish_reason={finish_reason}, input_tokens={input_tokens}, "
|
|
312
|
+
f"output_tokens={output_tokens}, content_blocks={content_block_index + 1}"
|
|
313
|
+
)
|
|
314
|
+
|
|
299
315
|
# Message stop
|
|
300
316
|
stop_event = {"type": "message_stop"}
|
|
301
317
|
logger.debug(
|
|
@@ -512,8 +528,9 @@ async def _handle_with_server_tools(
|
|
|
512
528
|
)
|
|
513
529
|
|
|
514
530
|
completion_data = response.json()
|
|
515
|
-
|
|
516
|
-
|
|
531
|
+
# Log raw OpenAI response for server tools
|
|
532
|
+
logger.info(
|
|
533
|
+
f"[OpenAI Response (Server Tools)] {json.dumps(completion_data, ensure_ascii=False, indent=2)[:2000]}"
|
|
517
534
|
)
|
|
518
535
|
from openai.types.chat import ChatCompletion
|
|
519
536
|
|
|
@@ -531,7 +548,12 @@ async def _handle_with_server_tools(
|
|
|
531
548
|
if tool_calls:
|
|
532
549
|
for tc in tool_calls:
|
|
533
550
|
func_name = tc.function.name if tc.function else ""
|
|
551
|
+
func_args = tc.function.arguments if tc.function else "{}"
|
|
534
552
|
logger.info(f" Tool call: {func_name}")
|
|
553
|
+
logger.info(f" Tool ID: {tc.id}")
|
|
554
|
+
logger.info(
|
|
555
|
+
f" Arguments: {func_args[:200]}"
|
|
556
|
+
) # Log first 200 chars
|
|
535
557
|
|
|
536
558
|
# Generate Anthropic-style ID for server tools
|
|
537
559
|
is_server = handler.is_server_tool_call(
|
|
@@ -859,20 +881,60 @@ async def create_message(
|
|
|
859
881
|
)
|
|
860
882
|
|
|
861
883
|
openai_completion = response.json()
|
|
862
|
-
|
|
863
|
-
|
|
884
|
+
# Log raw OpenAI response
|
|
885
|
+
logger.info(
|
|
886
|
+
f"[OpenAI Raw Response] {json.dumps(openai_completion, ensure_ascii=False, indent=2)[:2000]}"
|
|
864
887
|
)
|
|
865
888
|
|
|
889
|
+
# Log response details
|
|
890
|
+
if openai_completion.get("choices"):
|
|
891
|
+
choice = openai_completion["choices"][0]
|
|
892
|
+
message = choice.get("message", {})
|
|
893
|
+
finish_reason = choice.get("finish_reason")
|
|
894
|
+
content_preview = (
|
|
895
|
+
message.get("content", "")[:100]
|
|
896
|
+
if message.get("content")
|
|
897
|
+
else ""
|
|
898
|
+
)
|
|
899
|
+
tool_calls_count = (
|
|
900
|
+
len(message.get("tool_calls", []))
|
|
901
|
+
if message.get("tool_calls")
|
|
902
|
+
else 0
|
|
903
|
+
)
|
|
904
|
+
logger.info(
|
|
905
|
+
f"[OpenAI Response Details] finish_reason={finish_reason}, "
|
|
906
|
+
f"content_length={len(message.get('content', ''))}, "
|
|
907
|
+
f"tool_calls={tool_calls_count}, "
|
|
908
|
+
f"content_preview={content_preview[:50]!r}"
|
|
909
|
+
)
|
|
910
|
+
|
|
866
911
|
from openai.types.chat import ChatCompletion
|
|
867
912
|
|
|
868
913
|
completion = ChatCompletion.model_validate(openai_completion)
|
|
869
914
|
anthropic_message = convert_openai_to_anthropic(completion, model)
|
|
870
915
|
|
|
871
916
|
anthropic_response = anthropic_message.model_dump()
|
|
872
|
-
|
|
873
|
-
|
|
917
|
+
# Log converted Anthropic response
|
|
918
|
+
logger.info(
|
|
919
|
+
f"[Anthropic Converted Response] {json.dumps(anthropic_response, ensure_ascii=False, indent=2)[:2000]}"
|
|
874
920
|
)
|
|
875
921
|
|
|
922
|
+
# Log Anthropic response details
|
|
923
|
+
content_blocks = anthropic_response.get("content", [])
|
|
924
|
+
stop_reason = anthropic_response.get("stop_reason")
|
|
925
|
+
usage = anthropic_response.get("usage", {})
|
|
926
|
+
logger.info(
|
|
927
|
+
f"[Anthropic Response Details] stop_reason={stop_reason}, "
|
|
928
|
+
f"content_blocks={len(content_blocks)}, "
|
|
929
|
+
f"input_tokens={usage.get('input_tokens')}, "
|
|
930
|
+
f"output_tokens={usage.get('output_tokens')}"
|
|
931
|
+
)
|
|
932
|
+
|
|
933
|
+
# Log content block types
|
|
934
|
+
if content_blocks:
|
|
935
|
+
block_types = [block.get("type") for block in content_blocks]
|
|
936
|
+
logger.info(f"[Anthropic Content Blocks] types={block_types}")
|
|
937
|
+
|
|
876
938
|
return JSONResponse(content=anthropic_response)
|
|
877
939
|
|
|
878
940
|
except httpx.TimeoutException:
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: local-openai2anthropic
|
|
3
|
-
Version: 0.
|
|
3
|
+
Version: 0.3.1
|
|
4
4
|
Summary: A lightweight proxy server that converts Anthropic Messages API to OpenAI API
|
|
5
5
|
Project-URL: Homepage, https://github.com/dongfangzan/local-openai2anthropic
|
|
6
6
|
Project-URL: Repository, https://github.com/dongfangzan/local-openai2anthropic
|
|
@@ -7,13 +7,13 @@ local_openai2anthropic/daemon_runner.py,sha256=rguOH0PgpbjqNsKYei0uCQX8JQOQ1wmtQ
|
|
|
7
7
|
local_openai2anthropic/main.py,sha256=FK5JBBpzB_T44y3N16lPl1hK4ht4LEQqRKzVmkIjIoo,9866
|
|
8
8
|
local_openai2anthropic/openai_types.py,sha256=jFdCvLwtXYoo5gGRqOhbHQcVaxcsxNnCP_yFPIv7rG4,3823
|
|
9
9
|
local_openai2anthropic/protocol.py,sha256=vUEgxtRPFll6jEtLc4DyxTLCBjrWIEScZXhEqe4uibk,5185
|
|
10
|
-
local_openai2anthropic/router.py,sha256=
|
|
10
|
+
local_openai2anthropic/router.py,sha256=jS6-IvAHNYhyGYhR0gJ-wm0Je0Jtkt-e5ca4hnmG3GM,47015
|
|
11
11
|
local_openai2anthropic/tavily_client.py,sha256=QsBhnyF8BFWPAxB4XtWCCpHCquNL5SW93-zjTTi4Meg,3774
|
|
12
12
|
local_openai2anthropic/server_tools/__init__.py,sha256=QlJfjEta-HOCtLe7NaY_fpbEKv-ZpInjAnfmSqE9tbk,615
|
|
13
13
|
local_openai2anthropic/server_tools/base.py,sha256=pNFsv-jSgxVrkY004AHAcYMNZgVSO8ZOeCzQBUtQ3vU,5633
|
|
14
14
|
local_openai2anthropic/server_tools/web_search.py,sha256=1C7lX_cm-tMaN3MsCjinEZYPJc_Hj4yAxYay9h8Zbvs,6543
|
|
15
|
-
local_openai2anthropic-0.
|
|
16
|
-
local_openai2anthropic-0.
|
|
17
|
-
local_openai2anthropic-0.
|
|
18
|
-
local_openai2anthropic-0.
|
|
19
|
-
local_openai2anthropic-0.
|
|
15
|
+
local_openai2anthropic-0.3.1.dist-info/METADATA,sha256=3NC5cFpYZ_EqOZ5Adoeq1wPqqXIcE8UoKDGxL7vkGGg,11240
|
|
16
|
+
local_openai2anthropic-0.3.1.dist-info/WHEEL,sha256=WLgqFyCfm_KASv4WHyYy0P3pM_m7J5L9k2skdKLirC8,87
|
|
17
|
+
local_openai2anthropic-0.3.1.dist-info/entry_points.txt,sha256=hdc9tSJUNxyNLXcTYye5SuD2K0bEQhxBhGnWTFup6ZM,116
|
|
18
|
+
local_openai2anthropic-0.3.1.dist-info/licenses/LICENSE,sha256=X3_kZy3lJvd_xp8IeyUcIAO2Y367MXZc6aaRx8BYR_s,11369
|
|
19
|
+
local_openai2anthropic-0.3.1.dist-info/RECORD,,
|
|
File without changes
|
{local_openai2anthropic-0.2.9.dist-info → local_openai2anthropic-0.3.1.dist-info}/entry_points.txt
RENAMED
|
File without changes
|
{local_openai2anthropic-0.2.9.dist-info → local_openai2anthropic-0.3.1.dist-info}/licenses/LICENSE
RENAMED
|
File without changes
|