pygpt-net 2.6.36__py3-none-any.whl → 2.6.37__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- pygpt_net/CHANGELOG.txt +5 -0
- pygpt_net/__init__.py +3 -3
- pygpt_net/controller/chat/handler/anthropic_stream.py +166 -0
- pygpt_net/controller/chat/handler/google_stream.py +181 -0
- pygpt_net/controller/chat/handler/langchain_stream.py +24 -0
- pygpt_net/controller/chat/handler/llamaindex_stream.py +47 -0
- pygpt_net/controller/chat/handler/openai_stream.py +260 -0
- pygpt_net/controller/chat/handler/utils.py +210 -0
- pygpt_net/controller/chat/handler/worker.py +566 -0
- pygpt_net/controller/chat/handler/xai_stream.py +135 -0
- pygpt_net/controller/chat/stream.py +1 -1
- pygpt_net/controller/ctx/ctx.py +1 -1
- pygpt_net/controller/model/editor.py +3 -0
- pygpt_net/core/bridge/context.py +35 -35
- pygpt_net/core/bridge/worker.py +40 -16
- pygpt_net/core/render/web/body.py +29 -34
- pygpt_net/data/config/config.json +10 -3
- pygpt_net/data/config/models.json +3 -3
- pygpt_net/data/config/settings.json +105 -0
- pygpt_net/data/css/style.dark.css +2 -3
- pygpt_net/data/css/style.light.css +2 -3
- pygpt_net/data/locale/locale.de.ini +3 -1
- pygpt_net/data/locale/locale.en.ini +19 -1
- pygpt_net/data/locale/locale.es.ini +3 -1
- pygpt_net/data/locale/locale.fr.ini +3 -1
- pygpt_net/data/locale/locale.it.ini +3 -1
- pygpt_net/data/locale/locale.pl.ini +4 -2
- pygpt_net/data/locale/locale.uk.ini +3 -1
- pygpt_net/data/locale/locale.zh.ini +3 -1
- pygpt_net/provider/api/__init__.py +5 -3
- pygpt_net/provider/api/anthropic/__init__.py +190 -29
- pygpt_net/provider/api/anthropic/audio.py +30 -0
- pygpt_net/provider/api/anthropic/chat.py +341 -0
- pygpt_net/provider/api/anthropic/image.py +25 -0
- pygpt_net/provider/api/anthropic/tools.py +266 -0
- pygpt_net/provider/api/anthropic/vision.py +142 -0
- pygpt_net/provider/api/google/chat.py +2 -2
- pygpt_net/provider/api/google/tools.py +58 -48
- pygpt_net/provider/api/google/vision.py +7 -1
- pygpt_net/provider/api/openai/chat.py +1 -0
- pygpt_net/provider/api/openai/vision.py +6 -0
- pygpt_net/provider/api/x_ai/__init__.py +247 -0
- pygpt_net/provider/api/x_ai/audio.py +32 -0
- pygpt_net/provider/api/x_ai/chat.py +968 -0
- pygpt_net/provider/api/x_ai/image.py +208 -0
- pygpt_net/provider/api/x_ai/remote.py +262 -0
- pygpt_net/provider/api/x_ai/tools.py +120 -0
- pygpt_net/provider/api/x_ai/vision.py +119 -0
- pygpt_net/provider/core/config/patch.py +28 -0
- pygpt_net/provider/llms/anthropic.py +4 -2
- pygpt_net/ui/base/config_dialog.py +5 -11
- pygpt_net/ui/dialog/models.py +2 -4
- pygpt_net/ui/dialog/plugins.py +40 -43
- pygpt_net/ui/widget/element/labels.py +19 -3
- pygpt_net/ui/widget/textarea/web.py +1 -1
- {pygpt_net-2.6.36.dist-info → pygpt_net-2.6.37.dist-info}/METADATA +11 -6
- {pygpt_net-2.6.36.dist-info → pygpt_net-2.6.37.dist-info}/RECORD +60 -41
- pygpt_net/controller/chat/handler/stream_worker.py +0 -1136
- {pygpt_net-2.6.36.dist-info → pygpt_net-2.6.37.dist-info}/LICENSE +0 -0
- {pygpt_net-2.6.36.dist-info → pygpt_net-2.6.37.dist-info}/WHEEL +0 -0
- {pygpt_net-2.6.36.dist-info → pygpt_net-2.6.37.dist-info}/entry_points.txt +0 -0
pygpt_net/CHANGELOG.txt
CHANGED
pygpt_net/__init__.py
CHANGED
|
@@ -6,15 +6,15 @@
|
|
|
6
6
|
# GitHub: https://github.com/szczyglis-dev/py-gpt #
|
|
7
7
|
# MIT License #
|
|
8
8
|
# Created By : Marcin Szczygliński #
|
|
9
|
-
# Updated Date: 2025.09.
|
|
9
|
+
# Updated Date: 2025.09.05 00:00:00 #
|
|
10
10
|
# ================================================== #
|
|
11
11
|
|
|
12
12
|
__author__ = "Marcin Szczygliński"
|
|
13
13
|
__copyright__ = "Copyright 2025, Marcin Szczygliński"
|
|
14
14
|
__credits__ = ["Marcin Szczygliński"]
|
|
15
15
|
__license__ = "MIT"
|
|
16
|
-
__version__ = "2.6.
|
|
17
|
-
__build__ = "2025-09-
|
|
16
|
+
__version__ = "2.6.37"
|
|
17
|
+
__build__ = "2025-09-05"
|
|
18
18
|
__maintainer__ = "Marcin Szczygliński"
|
|
19
19
|
__github__ = "https://github.com/szczyglis-dev/py-gpt"
|
|
20
20
|
__report__ = "https://github.com/szczyglis-dev/py-gpt/issues"
|
|
@@ -0,0 +1,166 @@
|
|
|
1
|
+
#!/usr/bin/env python3
|
|
2
|
+
# -*- coding: utf-8 -*-
|
|
3
|
+
# ================================================== #
|
|
4
|
+
# This file is a part of PYGPT package #
|
|
5
|
+
# Website: https://pygpt.net #
|
|
6
|
+
# GitHub: https://github.com/szczyglis-dev/py-gpt #
|
|
7
|
+
# MIT License #
|
|
8
|
+
# Created By : Marcin Szczygliński #
|
|
9
|
+
# Updated Date: 2025.09.05 00:00:00 #
|
|
10
|
+
# ================================================== #
|
|
11
|
+
|
|
12
|
+
import io
|
|
13
|
+
from typing import Optional
|
|
14
|
+
|
|
15
|
+
from .utils import as_int
|
|
16
|
+
|
|
17
|
+
|
|
18
|
+
def process_anthropic_chunk(ctx, core, state, chunk) -> Optional[str]:
|
|
19
|
+
"""
|
|
20
|
+
Anthropic streaming events handler. Supports both full event objects and top-level delta objects.
|
|
21
|
+
|
|
22
|
+
:param ctx: Chat context
|
|
23
|
+
:param core: Core controller
|
|
24
|
+
:param state: Chat state
|
|
25
|
+
:param chunk: Incoming streaming chunk
|
|
26
|
+
:return: Extracted text delta or None
|
|
27
|
+
"""
|
|
28
|
+
state.usage_vendor = "anthropic"
|
|
29
|
+
etype = str(getattr(chunk, "type", "") or "")
|
|
30
|
+
response: Optional[str] = None
|
|
31
|
+
|
|
32
|
+
# --- Top-level delta objects (when SDK yields deltas directly) ---
|
|
33
|
+
if etype == "text_delta":
|
|
34
|
+
txt = getattr(chunk, "text", None)
|
|
35
|
+
return str(txt) if txt is not None else None
|
|
36
|
+
|
|
37
|
+
if etype == "thinking_delta":
|
|
38
|
+
return None
|
|
39
|
+
|
|
40
|
+
if etype == "input_json_delta":
|
|
41
|
+
pj = getattr(chunk, "partial_json", "") or ""
|
|
42
|
+
buf = state.fn_args_buffers.get("__anthropic_last__")
|
|
43
|
+
if buf is None:
|
|
44
|
+
buf = io.StringIO()
|
|
45
|
+
state.fn_args_buffers["__anthropic_last__"] = buf
|
|
46
|
+
buf.write(pj)
|
|
47
|
+
if state.tool_calls:
|
|
48
|
+
state.tool_calls[-1]["function"]["arguments"] = buf.getvalue()
|
|
49
|
+
return None
|
|
50
|
+
|
|
51
|
+
if etype == "signature_delta":
|
|
52
|
+
return None
|
|
53
|
+
|
|
54
|
+
# --- Standard event flow ---
|
|
55
|
+
if etype == "message_start":
|
|
56
|
+
try:
|
|
57
|
+
msg = getattr(chunk, "message", None)
|
|
58
|
+
um = getattr(msg, "usage", None) if msg else None
|
|
59
|
+
if um:
|
|
60
|
+
inp = as_int(getattr(um, "input_tokens", None))
|
|
61
|
+
if inp is not None:
|
|
62
|
+
state.usage_payload["in"] = inp
|
|
63
|
+
except Exception:
|
|
64
|
+
pass
|
|
65
|
+
return None
|
|
66
|
+
|
|
67
|
+
if etype == "content_block_start":
|
|
68
|
+
try:
|
|
69
|
+
cb = getattr(chunk, "content_block", None)
|
|
70
|
+
if cb and getattr(cb, "type", "") == "tool_use":
|
|
71
|
+
idx = getattr(chunk, "index", 0) or 0
|
|
72
|
+
tid = getattr(cb, "id", "") or ""
|
|
73
|
+
name = getattr(cb, "name", "") or ""
|
|
74
|
+
state.tool_calls.append({
|
|
75
|
+
"id": tid,
|
|
76
|
+
"type": "function",
|
|
77
|
+
"function": {"name": name, "arguments": ""}
|
|
78
|
+
})
|
|
79
|
+
state.fn_args_buffers[str(idx)] = io.StringIO()
|
|
80
|
+
state.fn_args_buffers["__anthropic_last__"] = state.fn_args_buffers[str(idx)]
|
|
81
|
+
except Exception:
|
|
82
|
+
pass
|
|
83
|
+
|
|
84
|
+
try:
|
|
85
|
+
cb = getattr(chunk, "content_block", None)
|
|
86
|
+
if cb and getattr(cb, "type", "") == "web_search_tool_result":
|
|
87
|
+
results = getattr(cb, "content", None) or []
|
|
88
|
+
for r in results:
|
|
89
|
+
url = r.get("url") if isinstance(r, dict) else None
|
|
90
|
+
if url:
|
|
91
|
+
if ctx.urls is None:
|
|
92
|
+
ctx.urls = []
|
|
93
|
+
if url not in ctx.urls:
|
|
94
|
+
ctx.urls.append(url)
|
|
95
|
+
except Exception:
|
|
96
|
+
pass
|
|
97
|
+
|
|
98
|
+
return None
|
|
99
|
+
|
|
100
|
+
if etype == "content_block_delta":
|
|
101
|
+
try:
|
|
102
|
+
delta = getattr(chunk, "delta", None)
|
|
103
|
+
if not delta:
|
|
104
|
+
return None
|
|
105
|
+
if getattr(delta, "type", "") == "text_delta":
|
|
106
|
+
txt = getattr(delta, "text", None)
|
|
107
|
+
if txt is not None:
|
|
108
|
+
response = str(txt)
|
|
109
|
+
elif getattr(delta, "type", "") == "input_json_delta":
|
|
110
|
+
idx = str(getattr(chunk, "index", 0) or 0)
|
|
111
|
+
buf = state.fn_args_buffers.get(idx)
|
|
112
|
+
pj = getattr(delta, "partial_json", "") or ""
|
|
113
|
+
if buf is None:
|
|
114
|
+
buf = io.StringIO()
|
|
115
|
+
state.fn_args_buffers[idx] = buf
|
|
116
|
+
buf.write(pj)
|
|
117
|
+
state.fn_args_buffers["__anthropic_last__"] = buf
|
|
118
|
+
try:
|
|
119
|
+
if state.tool_calls:
|
|
120
|
+
tc = state.tool_calls[-1]
|
|
121
|
+
tc["function"]["arguments"] = buf.getvalue()
|
|
122
|
+
except Exception:
|
|
123
|
+
pass
|
|
124
|
+
except Exception:
|
|
125
|
+
pass
|
|
126
|
+
return response
|
|
127
|
+
|
|
128
|
+
if etype == "content_block_stop":
|
|
129
|
+
try:
|
|
130
|
+
idx = str(getattr(chunk, "index", 0) or 0)
|
|
131
|
+
buf = state.fn_args_buffers.pop(idx, None)
|
|
132
|
+
if buf is not None:
|
|
133
|
+
try:
|
|
134
|
+
args_val = buf.getvalue()
|
|
135
|
+
finally:
|
|
136
|
+
try:
|
|
137
|
+
buf.close()
|
|
138
|
+
except Exception:
|
|
139
|
+
pass
|
|
140
|
+
if state.tool_calls:
|
|
141
|
+
state.tool_calls[-1]["function"]["arguments"] = args_val
|
|
142
|
+
if state.fn_args_buffers.get("__anthropic_last__") is buf:
|
|
143
|
+
state.fn_args_buffers.pop("__anthropic_last__", None)
|
|
144
|
+
except Exception:
|
|
145
|
+
pass
|
|
146
|
+
return None
|
|
147
|
+
|
|
148
|
+
if etype == "message_delta":
|
|
149
|
+
try:
|
|
150
|
+
usage = getattr(chunk, "usage", None)
|
|
151
|
+
if usage:
|
|
152
|
+
out_tok = as_int(getattr(usage, "output_tokens", None))
|
|
153
|
+
if out_tok is not None:
|
|
154
|
+
state.usage_payload["out"] = out_tok
|
|
155
|
+
delta = getattr(chunk, "delta", None)
|
|
156
|
+
stop_reason = getattr(delta, "stop_reason", None) if delta else None
|
|
157
|
+
if stop_reason == "tool_use":
|
|
158
|
+
state.force_func_call = True
|
|
159
|
+
except Exception:
|
|
160
|
+
pass
|
|
161
|
+
return None
|
|
162
|
+
|
|
163
|
+
if etype == "message_stop":
|
|
164
|
+
return None
|
|
165
|
+
|
|
166
|
+
return None
|
|
@@ -0,0 +1,181 @@
|
|
|
1
|
+
#!/usr/bin/env python3
|
|
2
|
+
# -*- coding: utf-8 -*-
|
|
3
|
+
# ================================================== #
|
|
4
|
+
# This file is a part of PYGPT package #
|
|
5
|
+
# Website: https://pygpt.net #
|
|
6
|
+
# GitHub: https://github.com/szczyglis-dev/py-gpt #
|
|
7
|
+
# MIT License #
|
|
8
|
+
# Created By : Marcin Szczygliński #
|
|
9
|
+
# Updated Date: 2025.09.05 00:00:00 #
|
|
10
|
+
# ================================================== #
|
|
11
|
+
|
|
12
|
+
import base64
|
|
13
|
+
import json
|
|
14
|
+
from typing import Optional, Any
|
|
15
|
+
|
|
16
|
+
from .utils import capture_google_usage, collect_google_citations
|
|
17
|
+
|
|
18
|
+
|
|
19
|
+
def process_google_chunk(ctx, core, state, chunk) -> Optional[str]:
|
|
20
|
+
"""
|
|
21
|
+
Google python-genai streaming chunk.
|
|
22
|
+
|
|
23
|
+
:param ctx: Chat context
|
|
24
|
+
:param core: Core controller
|
|
25
|
+
:param state: Chat state
|
|
26
|
+
:param chunk: Incoming streaming chunk
|
|
27
|
+
:return: Extracted text delta or None
|
|
28
|
+
"""
|
|
29
|
+
response_parts: list[str] = []
|
|
30
|
+
|
|
31
|
+
if state.google_stream_ref is None:
|
|
32
|
+
state.google_stream_ref = state.generator
|
|
33
|
+
|
|
34
|
+
try:
|
|
35
|
+
um = getattr(chunk, "usage_metadata", None)
|
|
36
|
+
if um:
|
|
37
|
+
capture_google_usage(state, um)
|
|
38
|
+
except Exception:
|
|
39
|
+
pass
|
|
40
|
+
|
|
41
|
+
try:
|
|
42
|
+
t = getattr(chunk, "text", None)
|
|
43
|
+
if t:
|
|
44
|
+
response_parts.append(t)
|
|
45
|
+
except Exception:
|
|
46
|
+
pass
|
|
47
|
+
|
|
48
|
+
fc_list = []
|
|
49
|
+
try:
|
|
50
|
+
fc_list = getattr(chunk, "function_calls", None) or []
|
|
51
|
+
except Exception:
|
|
52
|
+
fc_list = []
|
|
53
|
+
|
|
54
|
+
new_calls = []
|
|
55
|
+
|
|
56
|
+
def _to_plain_dict(obj: Any):
|
|
57
|
+
"""Best-effort conversion of SDK objects to plain dict/list."""
|
|
58
|
+
try:
|
|
59
|
+
if hasattr(obj, "to_json_dict"):
|
|
60
|
+
return obj.to_json_dict()
|
|
61
|
+
if hasattr(obj, "model_dump"):
|
|
62
|
+
return obj.model_dump()
|
|
63
|
+
if hasattr(obj, "to_dict"):
|
|
64
|
+
return obj.to_dict()
|
|
65
|
+
except Exception:
|
|
66
|
+
pass
|
|
67
|
+
if isinstance(obj, dict):
|
|
68
|
+
return {k: _to_plain_dict(v) for k, v in obj.items()}
|
|
69
|
+
if isinstance(obj, (list, tuple)):
|
|
70
|
+
return [_to_plain_dict(x) for x in obj]
|
|
71
|
+
return obj
|
|
72
|
+
|
|
73
|
+
if fc_list:
|
|
74
|
+
for fc in fc_list:
|
|
75
|
+
name = getattr(fc, "name", "") or ""
|
|
76
|
+
args_obj = getattr(fc, "args", {}) or {}
|
|
77
|
+
args_dict = _to_plain_dict(args_obj) or {}
|
|
78
|
+
new_calls.append({
|
|
79
|
+
"id": getattr(fc, "id", "") or "",
|
|
80
|
+
"type": "function",
|
|
81
|
+
"function": {
|
|
82
|
+
"name": name,
|
|
83
|
+
"arguments": json.dumps(args_dict, ensure_ascii=False),
|
|
84
|
+
}
|
|
85
|
+
})
|
|
86
|
+
else:
|
|
87
|
+
try:
|
|
88
|
+
cands = getattr(chunk, "candidates", None) or []
|
|
89
|
+
for cand in cands:
|
|
90
|
+
content = getattr(cand, "content", None)
|
|
91
|
+
parts = getattr(content, "parts", None) or []
|
|
92
|
+
for p in parts:
|
|
93
|
+
fn = getattr(p, "function_call", None)
|
|
94
|
+
if not fn:
|
|
95
|
+
continue
|
|
96
|
+
name = getattr(fn, "name", "") or ""
|
|
97
|
+
args_obj = getattr(fn, "args", {}) or {}
|
|
98
|
+
args_dict = _to_plain_dict(args_obj) or {}
|
|
99
|
+
new_calls.append({
|
|
100
|
+
"id": getattr(fn, "id", "") or "",
|
|
101
|
+
"type": "function",
|
|
102
|
+
"function": {
|
|
103
|
+
"name": name,
|
|
104
|
+
"arguments": json.dumps(args_dict, ensure_ascii=False),
|
|
105
|
+
}
|
|
106
|
+
})
|
|
107
|
+
except Exception:
|
|
108
|
+
pass
|
|
109
|
+
|
|
110
|
+
if new_calls:
|
|
111
|
+
seen = {(tc["function"]["name"], tc["function"]["arguments"]) for tc in state.tool_calls}
|
|
112
|
+
for tc in new_calls:
|
|
113
|
+
key = (tc["function"]["name"], tc["function"]["arguments"])
|
|
114
|
+
if key not in seen:
|
|
115
|
+
state.tool_calls.append(tc)
|
|
116
|
+
seen.add(key)
|
|
117
|
+
|
|
118
|
+
try:
|
|
119
|
+
cands = getattr(chunk, "candidates", None) or []
|
|
120
|
+
for cand in cands:
|
|
121
|
+
content = getattr(cand, "content", None)
|
|
122
|
+
parts = getattr(content, "parts", None) or []
|
|
123
|
+
|
|
124
|
+
for p in parts:
|
|
125
|
+
ex = getattr(p, "executable_code", None)
|
|
126
|
+
if ex:
|
|
127
|
+
lang = (getattr(ex, "language", None) or "python").strip() or "python"
|
|
128
|
+
code_txt = (
|
|
129
|
+
getattr(ex, "code", None) or
|
|
130
|
+
getattr(ex, "program", None) or
|
|
131
|
+
getattr(ex, "source", None) or
|
|
132
|
+
""
|
|
133
|
+
)
|
|
134
|
+
if code_txt is None:
|
|
135
|
+
code_txt = ""
|
|
136
|
+
if not state.is_code:
|
|
137
|
+
response_parts.append(f"\n\n**Code interpreter**\n```{lang.lower()}\n{code_txt}")
|
|
138
|
+
state.is_code = True
|
|
139
|
+
else:
|
|
140
|
+
response_parts.append(str(code_txt))
|
|
141
|
+
|
|
142
|
+
cer = getattr(p, "code_execution_result", None)
|
|
143
|
+
if cer:
|
|
144
|
+
if state.is_code:
|
|
145
|
+
response_parts.append("\n\n```\n-----------\n")
|
|
146
|
+
state.is_code = False
|
|
147
|
+
|
|
148
|
+
blob = getattr(p, "inline_data", None)
|
|
149
|
+
if blob:
|
|
150
|
+
mime = (getattr(blob, "mime_type", "") or "").lower()
|
|
151
|
+
if mime.startswith("image/"):
|
|
152
|
+
data = getattr(blob, "data", None)
|
|
153
|
+
if data:
|
|
154
|
+
if isinstance(data, (bytes, bytearray)):
|
|
155
|
+
img_bytes = bytes(data)
|
|
156
|
+
else:
|
|
157
|
+
img_bytes = base64.b64decode(data)
|
|
158
|
+
save_path = core.image.gen_unique_path(ctx)
|
|
159
|
+
with open(save_path, "wb") as f:
|
|
160
|
+
f.write(img_bytes)
|
|
161
|
+
if not isinstance(ctx.images, list):
|
|
162
|
+
ctx.images = []
|
|
163
|
+
ctx.images.append(save_path)
|
|
164
|
+
state.image_paths.append(save_path)
|
|
165
|
+
state.has_google_inline_image = True
|
|
166
|
+
|
|
167
|
+
fdata = getattr(p, "file_data", None)
|
|
168
|
+
if fdata:
|
|
169
|
+
uri = getattr(fdata, "file_uri", None) or getattr(fdata, "uri", None)
|
|
170
|
+
mime = (getattr(fdata, "mime_type", "") or "").lower()
|
|
171
|
+
if uri and mime.startswith("image/") and (uri.startswith("http://") or uri.startswith("https://")):
|
|
172
|
+
if ctx.urls is None:
|
|
173
|
+
ctx.urls = []
|
|
174
|
+
ctx.urls.append(uri)
|
|
175
|
+
|
|
176
|
+
collect_google_citations(ctx, state, chunk)
|
|
177
|
+
|
|
178
|
+
except Exception:
|
|
179
|
+
pass
|
|
180
|
+
|
|
181
|
+
return "".join(response_parts) if response_parts else None
|
|
@@ -0,0 +1,24 @@
|
|
|
1
|
+
#!/usr/bin/env python3
|
|
2
|
+
# -*- coding: utf-8 -*-
|
|
3
|
+
# ================================================== #
|
|
4
|
+
# This file is a part of PYGPT package #
|
|
5
|
+
# Website: https://pygpt.net #
|
|
6
|
+
# GitHub: https://github.com/szczyglis-dev/py-gpt #
|
|
7
|
+
# MIT License #
|
|
8
|
+
# Created By : Marcin Szczygliński #
|
|
9
|
+
# Updated Date: 2025.09.05 00:00:00 #
|
|
10
|
+
# ================================================== #
|
|
11
|
+
|
|
12
|
+
from typing import Optional
|
|
13
|
+
|
|
14
|
+
|
|
15
|
+
def process_langchain_chat(chunk) -> Optional[str]:
|
|
16
|
+
"""
|
|
17
|
+
LangChain chat streaming delta.
|
|
18
|
+
|
|
19
|
+
:param chunk: Incoming streaming chunk
|
|
20
|
+
:return: Extracted text delta or None
|
|
21
|
+
"""
|
|
22
|
+
if getattr(chunk, "content", None) is not None:
|
|
23
|
+
return str(chunk.content)
|
|
24
|
+
return None
|
|
@@ -0,0 +1,47 @@
|
|
|
1
|
+
#!/usr/bin/env python3
|
|
2
|
+
# -*- coding: utf-8 -*-
|
|
3
|
+
# ================================================== #
|
|
4
|
+
# This file is a part of PYGPT package #
|
|
5
|
+
# Website: https://pygpt.net #
|
|
6
|
+
# GitHub: https://github.com/szczyglis-dev/py-gpt #
|
|
7
|
+
# MIT License #
|
|
8
|
+
# Created By : Marcin Szczygliński #
|
|
9
|
+
# Updated Date: 2025.09.05 00:00:00 #
|
|
10
|
+
# ================================================== #
|
|
11
|
+
|
|
12
|
+
from typing import Optional
|
|
13
|
+
|
|
14
|
+
|
|
15
|
+
def process_llama_chat(state, chunk) -> Optional[str]:
|
|
16
|
+
"""
|
|
17
|
+
Llama chat streaming delta with optional tool call extraction.
|
|
18
|
+
|
|
19
|
+
:param state: Chat state
|
|
20
|
+
:param chunk: Incoming streaming chunk
|
|
21
|
+
:return: Extracted text delta or None
|
|
22
|
+
"""
|
|
23
|
+
response = None
|
|
24
|
+
if getattr(chunk, "delta", None) is not None:
|
|
25
|
+
response = str(chunk.delta)
|
|
26
|
+
|
|
27
|
+
tool_chunks = getattr(getattr(chunk, "message", None), "additional_kwargs", {}).get("tool_calls", [])
|
|
28
|
+
if tool_chunks:
|
|
29
|
+
for tool_chunk in tool_chunks:
|
|
30
|
+
id_val = getattr(tool_chunk, "call_id", None) or getattr(tool_chunk, "id", None)
|
|
31
|
+
name = getattr(tool_chunk, "name", None) or getattr(getattr(tool_chunk, "function", None), "name", None)
|
|
32
|
+
args = getattr(tool_chunk, "arguments", None)
|
|
33
|
+
if args is None:
|
|
34
|
+
f = getattr(tool_chunk, "function", None)
|
|
35
|
+
args = getattr(f, "arguments", None) if f else None
|
|
36
|
+
if id_val:
|
|
37
|
+
if not args:
|
|
38
|
+
args = "{}"
|
|
39
|
+
tool_call = {
|
|
40
|
+
"id": id_val,
|
|
41
|
+
"type": "function",
|
|
42
|
+
"function": {"name": name, "arguments": args}
|
|
43
|
+
}
|
|
44
|
+
state.tool_calls.clear()
|
|
45
|
+
state.tool_calls.append(tool_call)
|
|
46
|
+
|
|
47
|
+
return response
|