local-openai2anthropic 0.3.2__py3-none-any.whl → 0.3.4__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- local_openai2anthropic/__init__.py +1 -1
- local_openai2anthropic/converter.py +2 -0
- local_openai2anthropic/router.py +19 -4
- {local_openai2anthropic-0.3.2.dist-info → local_openai2anthropic-0.3.4.dist-info}/METADATA +1 -1
- {local_openai2anthropic-0.3.2.dist-info → local_openai2anthropic-0.3.4.dist-info}/RECORD +8 -8
- {local_openai2anthropic-0.3.2.dist-info → local_openai2anthropic-0.3.4.dist-info}/WHEEL +0 -0
- {local_openai2anthropic-0.3.2.dist-info → local_openai2anthropic-0.3.4.dist-info}/entry_points.txt +0 -0
- {local_openai2anthropic-0.3.2.dist-info → local_openai2anthropic-0.3.4.dist-info}/licenses/LICENSE +0 -0
|
@@ -54,6 +54,7 @@ def convert_anthropic_to_openai(
|
|
|
54
54
|
tools = anthropic_params.get("tools")
|
|
55
55
|
top_k = anthropic_params.get("top_k")
|
|
56
56
|
top_p = anthropic_params.get("top_p", 0.95)
|
|
57
|
+
repetition_penalty = anthropic_params.get("repetition_penalty", 1.1)
|
|
57
58
|
thinking = anthropic_params.get("thinking")
|
|
58
59
|
# metadata is accepted but not forwarded to OpenAI
|
|
59
60
|
|
|
@@ -102,6 +103,7 @@ def convert_anthropic_to_openai(
|
|
|
102
103
|
"messages": openai_messages,
|
|
103
104
|
"max_tokens": max_tokens,
|
|
104
105
|
"stream": stream,
|
|
106
|
+
"repetition_penalty": repetition_penalty,
|
|
105
107
|
}
|
|
106
108
|
|
|
107
109
|
# Always include usage in stream for accurate token counting
|
local_openai2anthropic/router.py
CHANGED
|
@@ -179,6 +179,7 @@ async def _stream_response(
|
|
|
179
179
|
output_tokens = 0
|
|
180
180
|
message_id = None
|
|
181
181
|
sent_message_delta = False
|
|
182
|
+
pending_text_prefix = ""
|
|
182
183
|
|
|
183
184
|
async for line in response.aiter_lines():
|
|
184
185
|
if not line.startswith("data: "):
|
|
@@ -299,6 +300,7 @@ async def _stream_response(
|
|
|
299
300
|
# Handle reasoning content (thinking)
|
|
300
301
|
if delta.get("reasoning_content"):
|
|
301
302
|
reasoning = delta["reasoning_content"]
|
|
303
|
+
pending_text_prefix = ""
|
|
302
304
|
# Start thinking content block if not already started
|
|
303
305
|
if not content_block_started or current_block_type != "thinking":
|
|
304
306
|
# Close previous block if exists
|
|
@@ -338,8 +340,16 @@ async def _stream_response(
|
|
|
338
340
|
continue
|
|
339
341
|
|
|
340
342
|
# Handle content
|
|
341
|
-
if delta.get("content"):
|
|
343
|
+
if isinstance(delta.get("content"), str):
|
|
344
|
+
content_text = delta.get("content", "")
|
|
345
|
+
if not content_text:
|
|
346
|
+
continue
|
|
347
|
+
if content_text.strip() == "(no content)":
|
|
348
|
+
continue
|
|
342
349
|
if not content_block_started or current_block_type != "text":
|
|
350
|
+
if not content_text.strip():
|
|
351
|
+
pending_text_prefix += content_text
|
|
352
|
+
continue
|
|
343
353
|
# Close previous block if exists
|
|
344
354
|
if content_block_started:
|
|
345
355
|
stop_block = {
|
|
@@ -363,16 +373,21 @@ async def _stream_response(
|
|
|
363
373
|
content_block_started = True
|
|
364
374
|
current_block_type = "text"
|
|
365
375
|
|
|
366
|
-
|
|
376
|
+
if pending_text_prefix:
|
|
377
|
+
content_text = pending_text_prefix + content_text
|
|
378
|
+
pending_text_prefix = ""
|
|
379
|
+
|
|
380
|
+
output_tokens += _count_tokens(content_text)
|
|
367
381
|
delta_block = {
|
|
368
382
|
"type": "content_block_delta",
|
|
369
383
|
"index": content_block_index,
|
|
370
|
-
"delta": {"type": "text_delta", "text":
|
|
384
|
+
"delta": {"type": "text_delta", "text": content_text},
|
|
371
385
|
}
|
|
372
386
|
yield f"event: content_block_delta\ndata: {json.dumps(delta_block)}\n\n"
|
|
373
387
|
|
|
374
388
|
# Handle tool calls
|
|
375
389
|
if delta.get("tool_calls"):
|
|
390
|
+
pending_text_prefix = ""
|
|
376
391
|
for tool_call in delta["tool_calls"]:
|
|
377
392
|
tool_call_idx = tool_call.get("index", 0)
|
|
378
393
|
|
|
@@ -415,7 +430,7 @@ async def _stream_response(
|
|
|
415
430
|
tool_call_buffers[tool_call_idx] = (
|
|
416
431
|
tool_call_buffers.get(tool_call_idx, "") + args
|
|
417
432
|
)
|
|
418
|
-
yield f"event: content_block_delta\ndata: {json.dumps({'type': 'content_block_delta', 'index': content_block_index, 'delta': {'type': 'input_json_delta', 'partial_json':
|
|
433
|
+
yield f"event: content_block_delta\ndata: {json.dumps({'type': 'content_block_delta', 'index': content_block_index, 'delta': {'type': 'input_json_delta', 'partial_json': args}})}\n\n"
|
|
419
434
|
|
|
420
435
|
# Close final content block
|
|
421
436
|
if content_block_started:
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: local-openai2anthropic
|
|
3
|
-
Version: 0.3.
|
|
3
|
+
Version: 0.3.4
|
|
4
4
|
Summary: A lightweight proxy server that converts Anthropic Messages API to OpenAI API
|
|
5
5
|
Project-URL: Homepage, https://github.com/dongfangzan/local-openai2anthropic
|
|
6
6
|
Project-URL: Repository, https://github.com/dongfangzan/local-openai2anthropic
|
|
@@ -1,19 +1,19 @@
|
|
|
1
|
-
local_openai2anthropic/__init__.py,sha256=
|
|
1
|
+
local_openai2anthropic/__init__.py,sha256=IyFQQ77mhxr7_28OFUMuranzRsXTvtL-qpR03jONDgM,1059
|
|
2
2
|
local_openai2anthropic/__main__.py,sha256=K21u5u7FN8-DbO67TT_XDF0neGqJeFrVNkteRauCRQk,179
|
|
3
3
|
local_openai2anthropic/config.py,sha256=3M5ZAz3uYNMGxaottEBseEOZF-GnVaGuioH9Hpmgnd8,1918
|
|
4
|
-
local_openai2anthropic/converter.py,sha256=
|
|
4
|
+
local_openai2anthropic/converter.py,sha256=BkZwnyqxlGhjhg2dpCDsA0qGUWO8srMdMfD5ZvlLPQ0,15979
|
|
5
5
|
local_openai2anthropic/daemon.py,sha256=pZnRojGFcuIpR8yLDNjV-b0LJRBVhgRAa-dKeRRse44,10017
|
|
6
6
|
local_openai2anthropic/daemon_runner.py,sha256=rguOH0PgpbjqNsKYei0uCQX8JQOQ1wmtQH1CtW95Dbw,3274
|
|
7
7
|
local_openai2anthropic/main.py,sha256=FK5JBBpzB_T44y3N16lPl1hK4ht4LEQqRKzVmkIjIoo,9866
|
|
8
8
|
local_openai2anthropic/openai_types.py,sha256=jFdCvLwtXYoo5gGRqOhbHQcVaxcsxNnCP_yFPIv7rG4,3823
|
|
9
9
|
local_openai2anthropic/protocol.py,sha256=vUEgxtRPFll6jEtLc4DyxTLCBjrWIEScZXhEqe4uibk,5185
|
|
10
|
-
local_openai2anthropic/router.py,sha256=
|
|
10
|
+
local_openai2anthropic/router.py,sha256=tUsF4QzQA9zGTqOtUARbLhSFMoCgTqCmpv5AJW1ayGM,53479
|
|
11
11
|
local_openai2anthropic/tavily_client.py,sha256=QsBhnyF8BFWPAxB4XtWCCpHCquNL5SW93-zjTTi4Meg,3774
|
|
12
12
|
local_openai2anthropic/server_tools/__init__.py,sha256=QlJfjEta-HOCtLe7NaY_fpbEKv-ZpInjAnfmSqE9tbk,615
|
|
13
13
|
local_openai2anthropic/server_tools/base.py,sha256=pNFsv-jSgxVrkY004AHAcYMNZgVSO8ZOeCzQBUtQ3vU,5633
|
|
14
14
|
local_openai2anthropic/server_tools/web_search.py,sha256=1C7lX_cm-tMaN3MsCjinEZYPJc_Hj4yAxYay9h8Zbvs,6543
|
|
15
|
-
local_openai2anthropic-0.3.
|
|
16
|
-
local_openai2anthropic-0.3.
|
|
17
|
-
local_openai2anthropic-0.3.
|
|
18
|
-
local_openai2anthropic-0.3.
|
|
19
|
-
local_openai2anthropic-0.3.
|
|
15
|
+
local_openai2anthropic-0.3.4.dist-info/METADATA,sha256=FXWYHsj5HiViIwzdk6QMj6Zufenka_jJPI2MFcOndcI,11240
|
|
16
|
+
local_openai2anthropic-0.3.4.dist-info/WHEEL,sha256=WLgqFyCfm_KASv4WHyYy0P3pM_m7J5L9k2skdKLirC8,87
|
|
17
|
+
local_openai2anthropic-0.3.4.dist-info/entry_points.txt,sha256=hdc9tSJUNxyNLXcTYye5SuD2K0bEQhxBhGnWTFup6ZM,116
|
|
18
|
+
local_openai2anthropic-0.3.4.dist-info/licenses/LICENSE,sha256=X3_kZy3lJvd_xp8IeyUcIAO2Y367MXZc6aaRx8BYR_s,11369
|
|
19
|
+
local_openai2anthropic-0.3.4.dist-info/RECORD,,
|
|
File without changes
|
{local_openai2anthropic-0.3.2.dist-info → local_openai2anthropic-0.3.4.dist-info}/entry_points.txt
RENAMED
|
File without changes
|
{local_openai2anthropic-0.3.2.dist-info → local_openai2anthropic-0.3.4.dist-info}/licenses/LICENSE
RENAMED
|
File without changes
|