pygpt-net 2.7.5__py3-none-any.whl → 2.7.7__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (82) hide show
  1. pygpt_net/CHANGELOG.txt +14 -0
  2. pygpt_net/__init__.py +4 -4
  3. pygpt_net/controller/chat/remote_tools.py +3 -9
  4. pygpt_net/controller/chat/stream.py +2 -2
  5. pygpt_net/controller/chat/{handler/worker.py → stream_worker.py} +20 -64
  6. pygpt_net/controller/debug/fixtures.py +3 -2
  7. pygpt_net/controller/files/files.py +65 -4
  8. pygpt_net/core/debug/models.py +2 -2
  9. pygpt_net/core/filesystem/url.py +4 -1
  10. pygpt_net/core/render/web/body.py +3 -2
  11. pygpt_net/core/types/chunk.py +27 -0
  12. pygpt_net/data/config/config.json +14 -4
  13. pygpt_net/data/config/models.json +192 -4
  14. pygpt_net/data/config/settings.json +126 -36
  15. pygpt_net/data/js/app/template.js +1 -1
  16. pygpt_net/data/js/app.min.js +2 -2
  17. pygpt_net/data/locale/locale.de.ini +5 -0
  18. pygpt_net/data/locale/locale.en.ini +35 -8
  19. pygpt_net/data/locale/locale.es.ini +5 -0
  20. pygpt_net/data/locale/locale.fr.ini +5 -0
  21. pygpt_net/data/locale/locale.it.ini +5 -0
  22. pygpt_net/data/locale/locale.pl.ini +5 -0
  23. pygpt_net/data/locale/locale.uk.ini +5 -0
  24. pygpt_net/data/locale/locale.zh.ini +5 -0
  25. pygpt_net/data/locale/plugin.cmd_mouse_control.en.ini +2 -2
  26. pygpt_net/item/ctx.py +3 -5
  27. pygpt_net/js_rc.py +2449 -2447
  28. pygpt_net/plugin/cmd_mouse_control/config.py +8 -7
  29. pygpt_net/plugin/cmd_mouse_control/plugin.py +3 -4
  30. pygpt_net/plugin/cmd_mouse_control/worker.py +2 -1
  31. pygpt_net/plugin/cmd_mouse_control/worker_sandbox.py +2 -1
  32. pygpt_net/provider/api/anthropic/__init__.py +16 -9
  33. pygpt_net/provider/api/anthropic/chat.py +259 -11
  34. pygpt_net/provider/api/anthropic/computer.py +844 -0
  35. pygpt_net/provider/api/anthropic/remote_tools.py +172 -0
  36. pygpt_net/{controller/chat/handler/anthropic_stream.py → provider/api/anthropic/stream.py} +24 -10
  37. pygpt_net/provider/api/anthropic/tools.py +32 -77
  38. pygpt_net/provider/api/anthropic/utils.py +30 -0
  39. pygpt_net/provider/api/google/__init__.py +6 -5
  40. pygpt_net/provider/api/google/chat.py +3 -8
  41. pygpt_net/{controller/chat/handler/google_stream.py → provider/api/google/stream.py} +1 -1
  42. pygpt_net/provider/api/google/utils.py +185 -0
  43. pygpt_net/{controller/chat/handler → provider/api/langchain}/__init__.py +0 -0
  44. pygpt_net/{controller/chat/handler/langchain_stream.py → provider/api/langchain/stream.py} +1 -1
  45. pygpt_net/provider/api/llama_index/__init__.py +0 -0
  46. pygpt_net/{controller/chat/handler/llamaindex_stream.py → provider/api/llama_index/stream.py} +1 -1
  47. pygpt_net/provider/api/openai/__init__.py +7 -3
  48. pygpt_net/provider/api/openai/image.py +2 -2
  49. pygpt_net/provider/api/openai/responses.py +0 -0
  50. pygpt_net/{controller/chat/handler/openai_stream.py → provider/api/openai/stream.py} +1 -1
  51. pygpt_net/provider/api/openai/utils.py +69 -3
  52. pygpt_net/provider/api/x_ai/__init__.py +117 -17
  53. pygpt_net/provider/api/x_ai/chat.py +272 -102
  54. pygpt_net/provider/api/x_ai/image.py +149 -47
  55. pygpt_net/provider/api/x_ai/{remote.py → remote_tools.py} +165 -70
  56. pygpt_net/provider/api/x_ai/responses.py +507 -0
  57. pygpt_net/provider/api/x_ai/stream.py +715 -0
  58. pygpt_net/provider/api/x_ai/tools.py +59 -8
  59. pygpt_net/{controller/chat/handler → provider/api/x_ai}/utils.py +1 -2
  60. pygpt_net/provider/api/x_ai/vision.py +1 -4
  61. pygpt_net/provider/core/config/patch.py +22 -1
  62. pygpt_net/provider/core/model/patch.py +26 -1
  63. pygpt_net/tools/image_viewer/ui/dialogs.py +300 -13
  64. pygpt_net/tools/text_editor/ui/dialogs.py +3 -2
  65. pygpt_net/tools/text_editor/ui/widgets.py +5 -1
  66. pygpt_net/ui/base/context_menu.py +44 -1
  67. pygpt_net/ui/layout/toolbox/indexes.py +22 -19
  68. pygpt_net/ui/layout/toolbox/model.py +28 -5
  69. pygpt_net/ui/widget/dialog/base.py +16 -5
  70. pygpt_net/ui/widget/image/display.py +25 -8
  71. pygpt_net/ui/widget/tabs/output.py +9 -1
  72. pygpt_net/ui/widget/textarea/editor.py +14 -1
  73. pygpt_net/ui/widget/textarea/input.py +20 -7
  74. pygpt_net/ui/widget/textarea/notepad.py +24 -1
  75. pygpt_net/ui/widget/textarea/output.py +23 -1
  76. pygpt_net/ui/widget/textarea/web.py +16 -1
  77. {pygpt_net-2.7.5.dist-info → pygpt_net-2.7.7.dist-info}/METADATA +16 -2
  78. {pygpt_net-2.7.5.dist-info → pygpt_net-2.7.7.dist-info}/RECORD +80 -73
  79. pygpt_net/controller/chat/handler/xai_stream.py +0 -135
  80. {pygpt_net-2.7.5.dist-info → pygpt_net-2.7.7.dist-info}/LICENSE +0 -0
  81. {pygpt_net-2.7.5.dist-info → pygpt_net-2.7.7.dist-info}/WHEEL +0 -0
  82. {pygpt_net-2.7.5.dist-info → pygpt_net-2.7.7.dist-info}/entry_points.txt +0 -0
@@ -0,0 +1,715 @@
1
+ #!/usr/bin/env python3
2
+ # -*- coding: utf-8 -*-
3
+ # ================================================== #
4
+ # This file is a part of PYGPT package #
5
+ # Website: https://pygpt.net #
6
+ # GitHub: https://github.com/szczyglis-dev/py-gpt #
7
+ # MIT License #
8
+ # Created By : Marcin Szczygliński #
9
+ # Updated Date: 2026.01.05 20:00:00 #
10
+ # ================================================== #
11
+
12
+ from typing import Optional, List, Dict, Any
13
+ import base64
14
+ import re
15
+
16
+
17
+ def _stringify_content(content) -> Optional[str]:
18
+ """
19
+ Convert various xAI content shapes into a plain text string.
20
+ Handles:
21
+ - str
22
+ - list of parts (dicts with {'type':'text','text':...} or str)
23
+ - objects with .text or .content attributes
24
+ - dict with 'text' or nested shapes
25
+ """
26
+ try:
27
+ if content is None:
28
+ return None
29
+ if isinstance(content, str):
30
+ return content
31
+ if isinstance(content, list):
32
+ buf: List[str] = []
33
+ for p in content:
34
+ if isinstance(p, str):
35
+ buf.append(p)
36
+ elif isinstance(p, dict):
37
+ if isinstance(p.get("text"), str):
38
+ buf.append(p["text"])
39
+ elif isinstance(p.get("content"), str):
40
+ buf.append(p["content"])
41
+ elif isinstance(p.get("delta"), str):
42
+ buf.append(p["delta"])
43
+ else:
44
+ t = getattr(p, "text", None)
45
+ if isinstance(t, str):
46
+ buf.append(t)
47
+ return "".join(buf) if buf else None
48
+ if isinstance(content, dict):
49
+ if isinstance(content.get("text"), str):
50
+ return content["text"]
51
+ if isinstance(content.get("content"), str):
52
+ return content["content"]
53
+ if isinstance(content.get("delta"), str):
54
+ return content["delta"]
55
+ t = getattr(content, "text", None)
56
+ if isinstance(t, str):
57
+ return t
58
+ c = getattr(content, "content", None)
59
+ if isinstance(c, str):
60
+ return c
61
+ return str(content)
62
+ except Exception:
63
+ return None
64
+
65
+
66
+ def _extract_http_urls_from_text(text: Optional[str]) -> List[str]:
67
+ """
68
+ Extract http(s) URLs from plain text.
69
+ """
70
+ if not text or not isinstance(text, str):
71
+ return []
72
+ # Basic, conservative URL regex
73
+ pattern = re.compile(r"(https?://[^\s)>\]\"']+)", re.IGNORECASE)
74
+ urls = pattern.findall(text)
75
+ # Deduplicate while preserving order
76
+ out, seen = [], set()
77
+ for u in urls:
78
+ if u not in seen:
79
+ out.append(u)
80
+ seen.add(u)
81
+ return out
82
+
83
+
84
+ def _append_urls(ctx, state, urls: List[str]):
85
+ """
86
+ Merge a list of URLs into state.citations and ctx.urls (unique, http/https only).
87
+ """
88
+ if not urls:
89
+ return
90
+ if not isinstance(state.citations, list):
91
+ state.citations = []
92
+ if ctx.urls is None:
93
+ ctx.urls = []
94
+ seen = set(state.citations) | set(ctx.urls)
95
+ for u in urls:
96
+ if not isinstance(u, str):
97
+ continue
98
+ if not (u.startswith("http://") or u.startswith("https://")):
99
+ continue
100
+ if u in seen:
101
+ continue
102
+ state.citations.append(u)
103
+ ctx.urls.append(u)
104
+ seen.add(u)
105
+
106
+
107
+ def _try_save_data_url_image(core, ctx, data_url: str) -> Optional[str]:
108
+ """
109
+ Save data:image/*;base64,... to file and return path.
110
+ """
111
+ try:
112
+ if not data_url.startswith("data:image/"):
113
+ return None
114
+ header, b64 = data_url.split(",", 1)
115
+ ext = "png"
116
+ if ";base64" in header:
117
+ mime = header.split(";")[0].split(":")[1].lower()
118
+ if "jpeg" in mime or "jpg" in mime:
119
+ ext = "jpg"
120
+ elif "png" in mime:
121
+ ext = "png"
122
+ img_bytes = base64.b64decode(b64)
123
+ save_path = core.image.gen_unique_path(ctx, ext=ext)
124
+ with open(save_path, "wb") as f:
125
+ f.write(img_bytes)
126
+ return save_path
127
+ except Exception:
128
+ return None
129
+
130
+
131
+ def _process_message_content_for_outputs(core, ctx, state, content):
132
+ """
133
+ Inspect assistant message content (list of parts) for image_url outputs and URLs.
134
+ - If image_url.url is data:... -> save to file and append to state.image_paths + ctx.images
135
+ - If image_url.url is http(s) -> append to ctx.urls
136
+ - Extract URLs from adjacent text parts conservatively
137
+ """
138
+ if not isinstance(content, list):
139
+ return
140
+ any_image = False
141
+ for p in content:
142
+ if not isinstance(p, dict):
143
+ continue
144
+ ptype = p.get("type")
145
+ if ptype == "image_url":
146
+ img = p.get("image_url") or {}
147
+ url = img.get("url")
148
+ if isinstance(url, str):
149
+ if url.startswith("data:image/"):
150
+ path = _try_save_data_url_image(core, ctx, url)
151
+ if path:
152
+ if not isinstance(ctx.images, list):
153
+ ctx.images = []
154
+ ctx.images.append(path)
155
+ state.image_paths.append(path)
156
+ any_image = True
157
+ elif url.startswith("http://") or url.startswith("https://"):
158
+ _append_urls(ctx, state, [url])
159
+ elif ptype == "text":
160
+ t = p.get("text")
161
+ if isinstance(t, str):
162
+ urls = _extract_http_urls_from_text(t)
163
+ if urls:
164
+ _append_urls(ctx, state, urls)
165
+ # If images were added, mark flag similarly to Google path
166
+ if any_image:
167
+ try:
168
+ state.has_xai_inline_image = True
169
+ except Exception:
170
+ pass
171
+
172
+
173
+ def _merge_tool_calls(state, new_calls: List[Dict[str, Any]]):
174
+ """
175
+ Merge a list of tool_calls (dict-like) into state.tool_calls with de-duplication and streaming concat of arguments.
176
+ """
177
+ if not new_calls:
178
+ return
179
+ if not isinstance(state.tool_calls, list):
180
+ state.tool_calls = []
181
+
182
+ def _norm(tc: Dict[str, Any]) -> Dict[str, Any]:
183
+ fn = tc.get("function") or {}
184
+ args = fn.get("arguments")
185
+ if isinstance(args, (dict, list)):
186
+ try:
187
+ import json as _json
188
+ args = _json.dumps(args, ensure_ascii=False)
189
+ except Exception:
190
+ args = str(args)
191
+ return {
192
+ "id": tc.get("id") or "",
193
+ "type": "function",
194
+ "function": {
195
+ "name": fn.get("name") or "",
196
+ "arguments": args or "",
197
+ },
198
+ }
199
+
200
+ def _find_existing(key_id: str, key_name: str) -> Optional[Dict[str, Any]]:
201
+ if not state.tool_calls:
202
+ return None
203
+ for ex in state.tool_calls:
204
+ if key_id and ex.get("id") == key_id:
205
+ return ex
206
+ if key_name and ex.get("function", {}).get("name") == key_name:
207
+ # name match as fallback for SDKs that stream without ids
208
+ return ex
209
+ return None
210
+
211
+ for tc in new_calls:
212
+ if not isinstance(tc, dict):
213
+ continue
214
+ nid = tc.get("id") or ""
215
+ fn = tc.get("function") or {}
216
+ nname = fn.get("name") or ""
217
+ nargs = fn.get("arguments")
218
+ if isinstance(nargs, (dict, list)):
219
+ try:
220
+ import json as _json
221
+ nargs = _json.dumps(nargs, ensure_ascii=False)
222
+ except Exception:
223
+ nargs = str(nargs)
224
+
225
+ existing = _find_existing(nid, nname)
226
+ if existing is None:
227
+ state.tool_calls.append(_norm(tc))
228
+ else:
229
+ if nname:
230
+ existing["function"]["name"] = nname
231
+ if nargs:
232
+ existing["function"]["arguments"] = (existing["function"].get("arguments", "") or "") + str(nargs)
233
+
234
+
235
+ def _maybe_collect_tail_meta(state, obj: Dict[str, Any], ctx=None):
236
+ """
237
+ Collect tail metadata like citations and usage into state (and ctx for urls), if these fields exist.
238
+ """
239
+ try:
240
+ if not isinstance(obj, dict):
241
+ return
242
+ if "citations" in obj:
243
+ c = obj.get("citations") or []
244
+ if isinstance(c, list):
245
+ if ctx is not None:
246
+ _append_urls(ctx, state, [u for u in c if isinstance(u, str)])
247
+ else:
248
+ try:
249
+ setattr(state, "xai_stream_citations", c)
250
+ except Exception:
251
+ pass
252
+ if "usage" in obj and isinstance(obj["usage"], dict):
253
+ try:
254
+ state.usage_vendor = "xai"
255
+ u = obj["usage"]
256
+ def _as_int(v):
257
+ try:
258
+ return int(v)
259
+ except Exception:
260
+ try:
261
+ return int(float(v))
262
+ except Exception:
263
+ return 0
264
+ p = _as_int(u.get("prompt_tokens") or u.get("input_tokens") or 0)
265
+ c = _as_int(u.get("completion_tokens") or u.get("output_tokens") or 0)
266
+ r = _as_int(u.get("reasoning_tokens") or 0)
267
+ t = _as_int(u.get("total_tokens") or (p + c + r))
268
+ state.usage_payload = {"in": p, "out": max(0, t - p) if t else c, "reasoning": r, "total": t}
269
+ except Exception:
270
+ pass
271
+ except Exception:
272
+ pass
273
+
274
+
275
+ def process_xai_sdk_chunk(ctx, core, state, item) -> Optional[str]:
276
+ """
277
+ xAI SDK native streaming chunk.
278
+
279
+ :param ctx: Chat context
280
+ :param core: Core controller
281
+ :param state: Chat state
282
+ :param item: Incoming streaming chunk; supports:
283
+ - tuple(response, chunk) [old/new SDK style]
284
+ - chunk object with .delta/.content/.tool_calls/.tool_outputs/.citations
285
+ - dict/SimpleNamespace with OpenAI-like choices[0].delta.content
286
+ - dict event style with root 'delta' or 'message'
287
+ :return: Extracted text delta or None
288
+ """
289
+ response = None
290
+ chunk = None
291
+ try:
292
+ if isinstance(item, (list, tuple)) and len(item) == 2:
293
+ response, chunk = item
294
+ else:
295
+ chunk = item
296
+ except Exception:
297
+ return None
298
+
299
+ # persist last response and attach response id to ctx once
300
+ try:
301
+ if response is not None:
302
+ state.xai_last_response = response
303
+ rid = getattr(response, "id", None)
304
+ if rid and not getattr(ctx, "msg_id", None):
305
+ ctx.msg_id = str(rid)
306
+ if not isinstance(ctx.extra, dict):
307
+ ctx.extra = {}
308
+ ctx.extra["xai_response_id"] = ctx.msg_id
309
+ try:
310
+ core.ctx.update_item(ctx)
311
+ except Exception:
312
+ pass
313
+ except Exception:
314
+ pass
315
+
316
+ # Citations at chunk-level (last chunk in live search)
317
+ try:
318
+ cites = getattr(chunk, "citations", None)
319
+ if cites and isinstance(cites, list):
320
+ _append_urls(ctx, state, [u for u in cites if isinstance(u, str)])
321
+ except Exception:
322
+ pass
323
+
324
+ # Tool calls emitted as dedicated SDK objects
325
+ try:
326
+ tc_list = getattr(chunk, "tool_calls", None) or []
327
+ if tc_list:
328
+ norm_list = []
329
+ for tc in tc_list:
330
+ if tc is None:
331
+ continue
332
+ fn = getattr(tc, "function", None)
333
+ name = getattr(fn, "name", "") if fn is not None else ""
334
+ args = getattr(fn, "arguments", "") if fn is not None else ""
335
+ if isinstance(args, (dict, list)):
336
+ try:
337
+ import json as _json
338
+ args = _json.dumps(args, ensure_ascii=False)
339
+ except Exception:
340
+ args = str(args)
341
+ norm_list.append({
342
+ "id": getattr(tc, "id", "") or "",
343
+ "type": "function",
344
+ "function": {"name": name or "", "arguments": args or ""},
345
+ })
346
+ if norm_list:
347
+ _merge_tool_calls(state, norm_list)
348
+ state.force_func_call = True
349
+ except Exception:
350
+ pass
351
+
352
+ # Tool outputs: scan for URLs to enrich ctx.urls (best effort)
353
+ try:
354
+ to_list = getattr(chunk, "tool_outputs", None) or []
355
+ for to in to_list:
356
+ content = getattr(to, "content", None)
357
+ if isinstance(content, str):
358
+ _append_urls(ctx, state, _extract_http_urls_from_text(content))
359
+ except Exception:
360
+ pass
361
+
362
+ # 1) Direct .content (SDK chunk)
363
+ try:
364
+ if hasattr(chunk, "content"):
365
+ t = _stringify_content(getattr(chunk, "content"))
366
+ if t:
367
+ # collect URLs from text content conservatively
368
+ _append_urls(ctx, state, _extract_http_urls_from_text(t))
369
+ return str(t)
370
+ except Exception:
371
+ pass
372
+
373
+ # 2) .delta object or dict
374
+ try:
375
+ delta = getattr(chunk, "delta", None)
376
+ if delta is not None:
377
+ try:
378
+ tc = delta.get("tool_calls") if isinstance(delta, dict) else getattr(delta, "tool_calls", None)
379
+ if tc:
380
+ if isinstance(tc, list):
381
+ # already OpenAI-like dicts
382
+ _merge_tool_calls(state, tc)
383
+ state.force_func_call = True
384
+ except Exception:
385
+ pass
386
+
387
+ dc = delta.get("content") if isinstance(delta, dict) else getattr(delta, "content", None)
388
+ if dc is not None:
389
+ t = _stringify_content(dc)
390
+ if t:
391
+ _append_urls(ctx, state, _extract_http_urls_from_text(t))
392
+ return str(t)
393
+ if isinstance(delta, str) and delta:
394
+ _append_urls(ctx, state, _extract_http_urls_from_text(delta))
395
+ return delta
396
+ except Exception:
397
+ pass
398
+
399
+ # 3) OpenAI-like dict with choices[0].delta or choices[0].message
400
+ try:
401
+ if isinstance(chunk, dict):
402
+ # tools/citations/usage meta
403
+ try:
404
+ chs = chunk.get("choices") or []
405
+ if chs:
406
+ candidate = chs[0] or {}
407
+ tc = (candidate.get("delta") or {}).get("tool_calls") or candidate.get("tool_calls") or []
408
+ if tc:
409
+ _merge_tool_calls(state, tc)
410
+ state.force_func_call = True
411
+ except Exception:
412
+ pass
413
+
414
+ # text delta/message content
415
+ chs = chunk.get("choices") or []
416
+ if chs:
417
+ first = chs[0] or {}
418
+ d = first.get("delta") or {}
419
+ m = first.get("message") or {}
420
+ if "content" in d and d["content"] is not None:
421
+ t = _stringify_content(d["content"])
422
+ if t:
423
+ _append_urls(ctx, state, _extract_http_urls_from_text(t))
424
+ return str(t)
425
+ if "content" in m and m["content"] is not None:
426
+ mc = m["content"]
427
+ # inspect for image_url outputs and URLs
428
+ _process_message_content_for_outputs(core, ctx, state, mc if isinstance(mc, list) else [])
429
+ if isinstance(mc, str):
430
+ _append_urls(ctx, state, _extract_http_urls_from_text(mc))
431
+ return mc
432
+ elif isinstance(mc, list):
433
+ out_parts: List[str] = []
434
+ for p in mc:
435
+ if isinstance(p, dict) and p.get("type") == "text":
436
+ t = p.get("text")
437
+ if isinstance(t, str):
438
+ out_parts.append(t)
439
+ if out_parts:
440
+ txt = "".join(out_parts)
441
+ _append_urls(ctx, state, _extract_http_urls_from_text(txt))
442
+ return txt
443
+
444
+ # root-level delta/message
445
+ if isinstance(chunk.get("delta"), dict) and "content" in chunk["delta"]:
446
+ t = _stringify_content(chunk["delta"]["content"])
447
+ if t:
448
+ _append_urls(ctx, state, _extract_http_urls_from_text(t))
449
+ return str(t)
450
+ if isinstance(chunk.get("message"), dict) and "content" in chunk["message"]:
451
+ mc = chunk["message"]["content"]
452
+ _process_message_content_for_outputs(core, ctx, state, mc if isinstance(mc, list) else [])
453
+ if isinstance(mc, str):
454
+ _append_urls(ctx, state, _extract_http_urls_from_text(mc))
455
+ return mc
456
+
457
+ # tail metadata: citations and usage
458
+ _maybe_collect_tail_meta(state, chunk, ctx=ctx)
459
+ except Exception:
460
+ pass
461
+
462
+ # 4) SimpleNamespace with choices[0].delta/message
463
+ try:
464
+ chs = getattr(chunk, "choices", None)
465
+ if chs:
466
+ first = chs[0]
467
+ delta = getattr(first, "delta", None)
468
+ message = getattr(first, "message", None)
469
+ if delta is not None:
470
+ try:
471
+ tc = getattr(delta, "tool_calls", None)
472
+ if tc:
473
+ _merge_tool_calls(state, tc if isinstance(tc, list) else [])
474
+ state.force_func_call = True
475
+ except Exception:
476
+ pass
477
+ c = getattr(delta, "content", None)
478
+ if c is not None:
479
+ t = _stringify_content(c)
480
+ if t:
481
+ _append_urls(ctx, state, _extract_http_urls_from_text(t))
482
+ return str(t)
483
+ if message is not None:
484
+ c = getattr(message, "content", None)
485
+ if c is not None:
486
+ if isinstance(c, list):
487
+ _process_message_content_for_outputs(core, ctx, state, c)
488
+ # optional: also extract text parts
489
+ out_parts: List[str] = []
490
+ for p in c:
491
+ if isinstance(p, dict) and p.get("type") == "text":
492
+ t = p.get("text")
493
+ if isinstance(t, str):
494
+ out_parts.append(t)
495
+ if out_parts:
496
+ txt = "".join(out_parts)
497
+ _append_urls(ctx, state, _extract_http_urls_from_text(txt))
498
+ return txt
499
+ else:
500
+ t = _stringify_content(c)
501
+ if t:
502
+ _append_urls(ctx, state, _extract_http_urls_from_text(t))
503
+ return str(t)
504
+ except Exception:
505
+ pass
506
+
507
+ # 5) Plain string
508
+ if isinstance(chunk, str):
509
+ _append_urls(ctx, state, _extract_http_urls_from_text(chunk))
510
+ return chunk
511
+
512
+ return None
513
+
514
+
515
+ def xai_extract_tool_calls(response) -> list[dict]:
516
+ """
517
+ Extract tool calls from xAI final response (proto or modern SDK/message shapes).
518
+
519
+ :param response: xAI final response object or dict
520
+ :return: List of tool calls in normalized dict format
521
+ """
522
+ out: list[dict] = []
523
+
524
+ def _append_from_msg(msg_obj):
525
+ try:
526
+ if not msg_obj:
527
+ return
528
+ tcs = getattr(msg_obj, "tool_calls", None)
529
+ if not tcs and isinstance(msg_obj, dict):
530
+ tcs = msg_obj.get("tool_calls")
531
+ if not tcs:
532
+ return
533
+ for tc in tcs:
534
+ try:
535
+ fn = getattr(tc, "function", None)
536
+ if isinstance(tc, dict):
537
+ fn = tc.get("function", fn)
538
+ name = getattr(fn, "name", None) if fn is not None else None
539
+ args = getattr(fn, "arguments", None) if fn is not None else None
540
+ if isinstance(fn, dict):
541
+ name = fn.get("name", name)
542
+ args = fn.get("arguments", args)
543
+ if isinstance(args, (dict, list)):
544
+ try:
545
+ import json as _json
546
+ args = _json.dumps(args, ensure_ascii=False)
547
+ except Exception:
548
+ args = str(args)
549
+ out.append({
550
+ "id": (getattr(tc, "id", None) if not isinstance(tc, dict) else tc.get("id")) or "",
551
+ "type": "function",
552
+ "function": {"name": name or "", "arguments": args or "{}"},
553
+ })
554
+ except Exception:
555
+ continue
556
+ except Exception:
557
+ pass
558
+
559
+ try:
560
+ if isinstance(response, dict):
561
+ ch = (response.get("choices") or [])
562
+ if ch:
563
+ _append_from_msg(ch[0].get("message") or {})
564
+ if "message" in response:
565
+ _append_from_msg(response.get("message"))
566
+ if "output_message" in response:
567
+ _append_from_msg(response.get("output_message"))
568
+ if out:
569
+ return out
570
+ except Exception:
571
+ pass
572
+
573
+ try:
574
+ _append_from_msg(getattr(response, "message", None))
575
+ _append_from_msg(getattr(response, "output_message", None))
576
+ if out:
577
+ return out
578
+ except Exception:
579
+ pass
580
+
581
+ try:
582
+ proto = getattr(response, "proto", None)
583
+ if not proto:
584
+ return out
585
+ choices = getattr(proto, "choices", None) or []
586
+ if not choices:
587
+ return out
588
+ msg = getattr(choices[0], "message", None)
589
+ _append_from_msg(msg)
590
+ except Exception:
591
+ pass
592
+ return out
593
+
594
+
595
+ def xai_extract_citations(response) -> list[str]:
596
+ """
597
+ Extract citations (URLs) from xAI final response if present.
598
+
599
+ :param response: xAI final response object
600
+ :return: List of citation URLs
601
+ """
602
+ def _norm_urls(raw) -> List[str]:
603
+ urls: List[str] = []
604
+ seen = set()
605
+ if isinstance(raw, list):
606
+ for it in raw:
607
+ u = None
608
+ if isinstance(it, str):
609
+ u = it
610
+ elif isinstance(it, dict):
611
+ u = (it.get("url") or it.get("uri") or
612
+ (it.get("source") or {}).get("url") or (it.get("source") or {}).get("uri"))
613
+ if isinstance(u, str) and (u.startswith("http://") or u.startswith("https://")):
614
+ if u not in seen:
615
+ urls.append(u); seen.add(u)
616
+ return urls
617
+
618
+ urls: list[str] = []
619
+ try:
620
+ cites = getattr(response, "citations", None)
621
+ if cites is None and isinstance(response, dict):
622
+ cites = response.get("citations")
623
+ urls.extend([u for u in _norm_urls(cites or []) if u not in urls])
624
+ except Exception:
625
+ pass
626
+
627
+ try:
628
+ msg = getattr(response, "message", None)
629
+ if msg is None and isinstance(response, dict):
630
+ msg = response.get("message")
631
+ if msg:
632
+ mc = getattr(msg, "citations", None)
633
+ if mc is None and isinstance(msg, dict):
634
+ mc = msg.get("citations")
635
+ urls.extend([u for u in _norm_urls(mc or []) if u not in urls])
636
+ except Exception:
637
+ pass
638
+
639
+ try:
640
+ out_msg = getattr(response, "output_message", None)
641
+ if out_msg:
642
+ mc = getattr(out_msg, "citations", None)
643
+ if mc is None and isinstance(out_msg, dict):
644
+ mc = out_msg.get("citations")
645
+ urls.extend([u for u in _norm_urls(mc or []) if u not in urls])
646
+ except Exception:
647
+ pass
648
+
649
+ try:
650
+ proto = getattr(response, "proto", None)
651
+ if proto:
652
+ proto_cites = getattr(proto, "citations", None) or []
653
+ urls.extend([u for u in _norm_urls(proto_cites) if u not in urls])
654
+ choices = getattr(proto, "choices", None) or []
655
+ if choices:
656
+ m = getattr(choices[0], "message", None)
657
+ if m:
658
+ urls.extend([u for u in _norm_urls(getattr(m, "citations", None) or []) if u not in urls])
659
+ except Exception:
660
+ pass
661
+ return urls
662
+
663
+
664
+ def xai_extract_usage(response) -> dict:
665
+ """
666
+ Extract usage from xAI final response via proto or modern usage fields. Return {'in','out','reasoning','total'}.
667
+
668
+ :param response: xAI final response object
669
+ :return: Usage dict
670
+ """
671
+ def _as_int(v):
672
+ try:
673
+ return int(v)
674
+ except Exception:
675
+ try:
676
+ return int(float(v))
677
+ except Exception:
678
+ return 0
679
+
680
+ def _from_usage_dict(usage: Dict[str, Any]) -> dict:
681
+ p = usage.get("prompt_tokens", usage.get("input_tokens", 0)) or 0
682
+ c = usage.get("completion_tokens", usage.get("output_tokens", 0)) or 0
683
+ r = usage.get("reasoning_tokens", 0) or 0
684
+ t = usage.get("total_tokens")
685
+ p = _as_int(p); c = _as_int(c); r = _as_int(r)
686
+ t = _as_int(t if t is not None else (p + c + r))
687
+ out_total = max(0, t - p) if t else c
688
+ return {"in": p, "out": out_total, "reasoning": r, "total": t}
689
+
690
+ if isinstance(response, dict):
691
+ u = response.get("usage")
692
+ if isinstance(u, dict):
693
+ return _from_usage_dict(u)
694
+
695
+ try:
696
+ u = getattr(response, "usage", None)
697
+ if isinstance(u, dict):
698
+ return _from_usage_dict(u)
699
+ except Exception:
700
+ pass
701
+
702
+ try:
703
+ proto = getattr(response, "proto", None)
704
+ usage = getattr(proto, "usage", None) if proto else None
705
+ if usage:
706
+ p = _as_int(getattr(usage, "prompt_tokens", 0) or 0)
707
+ c = _as_int(getattr(usage, "completion_tokens", 0) or 0)
708
+ r = _as_int(getattr(usage, "reasoning_tokens", 0) or 0)
709
+ t = _as_int(getattr(usage, "total_tokens", (p + c + r)) or (p + c + r))
710
+ out_total = max(0, t - p) if t else c
711
+ return {"in": p, "out": out_total, "reasoning": r, "total": t}
712
+ except Exception:
713
+ pass
714
+
715
+ return {}