pygpt-net 2.6.36__py3-none-any.whl → 2.6.38__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- pygpt_net/CHANGELOG.txt +12 -0
- pygpt_net/__init__.py +3 -3
- pygpt_net/controller/chat/handler/anthropic_stream.py +164 -0
- pygpt_net/controller/chat/handler/google_stream.py +181 -0
- pygpt_net/controller/chat/handler/langchain_stream.py +24 -0
- pygpt_net/controller/chat/handler/llamaindex_stream.py +47 -0
- pygpt_net/controller/chat/handler/openai_stream.py +260 -0
- pygpt_net/controller/chat/handler/utils.py +210 -0
- pygpt_net/controller/chat/handler/worker.py +570 -0
- pygpt_net/controller/chat/handler/xai_stream.py +135 -0
- pygpt_net/controller/chat/stream.py +1 -1
- pygpt_net/controller/ctx/ctx.py +1 -1
- pygpt_net/controller/debug/debug.py +6 -6
- pygpt_net/controller/model/editor.py +3 -0
- pygpt_net/controller/model/importer.py +9 -2
- pygpt_net/controller/plugins/plugins.py +11 -3
- pygpt_net/controller/presets/presets.py +2 -2
- pygpt_net/core/bridge/context.py +35 -35
- pygpt_net/core/bridge/worker.py +40 -16
- pygpt_net/core/ctx/bag.py +7 -2
- pygpt_net/core/ctx/reply.py +17 -2
- pygpt_net/core/db/viewer.py +19 -34
- pygpt_net/core/render/plain/pid.py +12 -1
- pygpt_net/core/render/web/body.py +30 -39
- pygpt_net/core/tabs/tab.py +24 -1
- pygpt_net/data/config/config.json +10 -3
- pygpt_net/data/config/models.json +3 -3
- pygpt_net/data/config/settings.json +105 -0
- pygpt_net/data/css/style.dark.css +2 -3
- pygpt_net/data/css/style.light.css +2 -3
- pygpt_net/data/locale/locale.de.ini +3 -1
- pygpt_net/data/locale/locale.en.ini +19 -1
- pygpt_net/data/locale/locale.es.ini +3 -1
- pygpt_net/data/locale/locale.fr.ini +3 -1
- pygpt_net/data/locale/locale.it.ini +3 -1
- pygpt_net/data/locale/locale.pl.ini +4 -2
- pygpt_net/data/locale/locale.uk.ini +3 -1
- pygpt_net/data/locale/locale.zh.ini +3 -1
- pygpt_net/item/assistant.py +51 -2
- pygpt_net/item/attachment.py +21 -20
- pygpt_net/item/calendar_note.py +19 -2
- pygpt_net/item/ctx.py +115 -2
- pygpt_net/item/index.py +9 -2
- pygpt_net/item/mode.py +9 -6
- pygpt_net/item/model.py +20 -3
- pygpt_net/item/notepad.py +14 -2
- pygpt_net/item/preset.py +42 -2
- pygpt_net/item/prompt.py +8 -2
- pygpt_net/plugin/cmd_files/plugin.py +2 -2
- pygpt_net/provider/api/__init__.py +5 -3
- pygpt_net/provider/api/anthropic/__init__.py +190 -29
- pygpt_net/provider/api/anthropic/audio.py +30 -0
- pygpt_net/provider/api/anthropic/chat.py +341 -0
- pygpt_net/provider/api/anthropic/image.py +25 -0
- pygpt_net/provider/api/anthropic/tools.py +266 -0
- pygpt_net/provider/api/anthropic/vision.py +142 -0
- pygpt_net/provider/api/google/chat.py +2 -2
- pygpt_net/provider/api/google/realtime/client.py +2 -2
- pygpt_net/provider/api/google/tools.py +58 -48
- pygpt_net/provider/api/google/vision.py +7 -1
- pygpt_net/provider/api/openai/chat.py +1 -0
- pygpt_net/provider/api/openai/vision.py +6 -0
- pygpt_net/provider/api/x_ai/__init__.py +247 -0
- pygpt_net/provider/api/x_ai/audio.py +32 -0
- pygpt_net/provider/api/x_ai/chat.py +968 -0
- pygpt_net/provider/api/x_ai/image.py +208 -0
- pygpt_net/provider/api/x_ai/remote.py +262 -0
- pygpt_net/provider/api/x_ai/tools.py +120 -0
- pygpt_net/provider/api/x_ai/vision.py +119 -0
- pygpt_net/provider/core/attachment/json_file.py +2 -2
- pygpt_net/provider/core/config/patch.py +28 -0
- pygpt_net/provider/llms/anthropic.py +4 -2
- pygpt_net/tools/text_editor/tool.py +4 -1
- pygpt_net/tools/text_editor/ui/dialogs.py +1 -1
- pygpt_net/ui/base/config_dialog.py +5 -11
- pygpt_net/ui/dialog/db.py +177 -59
- pygpt_net/ui/dialog/dictionary.py +57 -59
- pygpt_net/ui/dialog/editor.py +3 -2
- pygpt_net/ui/dialog/image.py +1 -1
- pygpt_net/ui/dialog/logger.py +3 -2
- pygpt_net/ui/dialog/models.py +16 -16
- pygpt_net/ui/dialog/plugins.py +63 -60
- pygpt_net/ui/layout/ctx/ctx_list.py +3 -4
- pygpt_net/ui/layout/toolbox/__init__.py +2 -2
- pygpt_net/ui/layout/toolbox/assistants.py +8 -9
- pygpt_net/ui/layout/toolbox/presets.py +2 -2
- pygpt_net/ui/main.py +9 -4
- pygpt_net/ui/widget/element/labels.py +20 -4
- pygpt_net/ui/widget/textarea/editor.py +0 -4
- pygpt_net/ui/widget/textarea/web.py +1 -1
- {pygpt_net-2.6.36.dist-info → pygpt_net-2.6.38.dist-info}/METADATA +18 -6
- {pygpt_net-2.6.36.dist-info → pygpt_net-2.6.38.dist-info}/RECORD +95 -76
- pygpt_net/controller/chat/handler/stream_worker.py +0 -1136
- {pygpt_net-2.6.36.dist-info → pygpt_net-2.6.38.dist-info}/LICENSE +0 -0
- {pygpt_net-2.6.36.dist-info → pygpt_net-2.6.38.dist-info}/WHEEL +0 -0
- {pygpt_net-2.6.36.dist-info → pygpt_net-2.6.38.dist-info}/entry_points.txt +0 -0
|
@@ -0,0 +1,266 @@
|
|
|
1
|
+
#!/usr/bin/env python3
|
|
2
|
+
# -*- coding: utf-8 -*-
|
|
3
|
+
# ================================================== #
|
|
4
|
+
# This file is a part of PYGPT package #
|
|
5
|
+
# Website: https://pygpt.net #
|
|
6
|
+
# GitHub: https://github.com/szczyglis-dev/py-gpt #
|
|
7
|
+
# MIT License #
|
|
8
|
+
# Created By : Marcin Szczygliński #
|
|
9
|
+
# Updated Date: 2025.09.05 01:00:00 #
|
|
10
|
+
# ================================================== #
|
|
11
|
+
|
|
12
|
+
import json
|
|
13
|
+
from typing import List, Any, Dict, Optional
|
|
14
|
+
|
|
15
|
+
from pygpt_net.item.model import ModelItem
|
|
16
|
+
|
|
17
|
+
|
|
18
|
+
class Tools:
|
|
19
|
+
def __init__(self, window=None):
|
|
20
|
+
"""
|
|
21
|
+
Tools mapper for Anthropic Messages API.
|
|
22
|
+
|
|
23
|
+
:param window: Window instance
|
|
24
|
+
"""
|
|
25
|
+
self.window = window
|
|
26
|
+
|
|
27
|
+
def _sanitize_schema(self, schema: Any) -> Any:
|
|
28
|
+
"""
|
|
29
|
+
Sanitize JSON Schema dict for Anthropic input_schema.
|
|
30
|
+
|
|
31
|
+
- Remove unsupported or risky keywords.
|
|
32
|
+
- Normalize 'type'.
|
|
33
|
+
- Ensure properties/items recursively valid.
|
|
34
|
+
|
|
35
|
+
:param schema: JSON Schema (dict or list)
|
|
36
|
+
:return: Sanitized JSON Schema (dict)
|
|
37
|
+
"""
|
|
38
|
+
# 1) entry point: if list, take the first element
|
|
39
|
+
if isinstance(schema, list):
|
|
40
|
+
# If it's a list of schemas/types, take the first one (after filtering out empty ones)
|
|
41
|
+
return self._sanitize_schema(schema[0]) if schema else {}
|
|
42
|
+
|
|
43
|
+
if not isinstance(schema, dict):
|
|
44
|
+
return schema
|
|
45
|
+
|
|
46
|
+
# 2) remove unsupported keys
|
|
47
|
+
banned = {
|
|
48
|
+
"unevaluatedProperties",
|
|
49
|
+
"$defs", "$ref", "$schema", "$id",
|
|
50
|
+
"examples", "readOnly", "writeOnly", "nullable",
|
|
51
|
+
"dependentSchemas", "dependentRequired",
|
|
52
|
+
"oneOf", "anyOf", "allOf", "patternProperties", "dependencies",
|
|
53
|
+
"additional_properties", # underscore
|
|
54
|
+
"additionalProperties", # camelCase
|
|
55
|
+
}
|
|
56
|
+
for k in list(schema.keys()):
|
|
57
|
+
if k in banned:
|
|
58
|
+
schema.pop(k, None)
|
|
59
|
+
|
|
60
|
+
# 3) normalize 'type'
|
|
61
|
+
t = schema.get("type")
|
|
62
|
+
|
|
63
|
+
# a) list of types -> take the first non-null
|
|
64
|
+
if isinstance(t, list):
|
|
65
|
+
t_no_null = [x for x in t if isinstance(x, str) and x.lower() != "null"]
|
|
66
|
+
schema["type"] = t_no_null[0] if t_no_null else "object"
|
|
67
|
+
t = schema["type"]
|
|
68
|
+
|
|
69
|
+
# b) if 'type' is not a string (e.g., dict), try to infer or remove it
|
|
70
|
+
if not isinstance(t, str):
|
|
71
|
+
if "properties" in schema:
|
|
72
|
+
schema["type"] = "object"
|
|
73
|
+
elif "items" in schema:
|
|
74
|
+
schema["type"] = "array"
|
|
75
|
+
elif "enum" in schema and isinstance(schema["enum"], list) and all(
|
|
76
|
+
isinstance(x, str) for x in schema["enum"]):
|
|
77
|
+
schema["type"] = "string"
|
|
78
|
+
else:
|
|
79
|
+
# no reasonable type — leave without 'type' and continue
|
|
80
|
+
schema.pop("type", None)
|
|
81
|
+
else:
|
|
82
|
+
schema["type"] = t.lower()
|
|
83
|
+
|
|
84
|
+
# Safe form of type for further comparisons
|
|
85
|
+
t_val = schema.get("type")
|
|
86
|
+
type_l = t_val.lower() if isinstance(t_val, str) else ""
|
|
87
|
+
|
|
88
|
+
# 4) enum only for string
|
|
89
|
+
if "enum" in schema and type_l != "string":
|
|
90
|
+
schema.pop("enum", None)
|
|
91
|
+
|
|
92
|
+
# 5) Object
|
|
93
|
+
if type_l == "object":
|
|
94
|
+
props = schema.get("properties")
|
|
95
|
+
if not isinstance(props, dict):
|
|
96
|
+
props = {}
|
|
97
|
+
clean_props: Dict[str, Any] = {}
|
|
98
|
+
for pname, pval in props.items():
|
|
99
|
+
clean_props[pname] = self._sanitize_schema(pval)
|
|
100
|
+
schema["properties"] = clean_props
|
|
101
|
+
|
|
102
|
+
req = schema.get("required")
|
|
103
|
+
if not (isinstance(req, list) and all(isinstance(x, str) for x in req) and len(req) > 0):
|
|
104
|
+
schema.pop("required", None)
|
|
105
|
+
|
|
106
|
+
# 6) Array
|
|
107
|
+
elif type_l == "array":
|
|
108
|
+
items = schema.get("items")
|
|
109
|
+
if isinstance(items, list):
|
|
110
|
+
items = items[0] if items else {"type": "string"}
|
|
111
|
+
if not isinstance(items, dict):
|
|
112
|
+
items = {"type": "string"}
|
|
113
|
+
schema["items"] = self._sanitize_schema(items)
|
|
114
|
+
|
|
115
|
+
# 7) Recursion over remaining nestings,
|
|
116
|
+
# but skip 'properties' and 'items' — we've already sanitized them
|
|
117
|
+
for k, v in list(schema.items()):
|
|
118
|
+
if k in ("properties", "items"):
|
|
119
|
+
continue
|
|
120
|
+
if isinstance(v, dict):
|
|
121
|
+
schema[k] = self._sanitize_schema(v)
|
|
122
|
+
elif isinstance(v, list):
|
|
123
|
+
schema[k] = [self._sanitize_schema(x) for x in v]
|
|
124
|
+
|
|
125
|
+
return schema
|
|
126
|
+
|
|
127
|
+
def prepare(self, model: ModelItem, functions: list) -> List[dict]:
|
|
128
|
+
"""
|
|
129
|
+
Prepare Anthropic tool definitions: [{"name","description","input_schema"}].
|
|
130
|
+
|
|
131
|
+
:param model: ModelItem
|
|
132
|
+
:param functions: List of app function dicts
|
|
133
|
+
:return: List of tool dicts for Anthropic
|
|
134
|
+
"""
|
|
135
|
+
if not functions or not isinstance(functions, list):
|
|
136
|
+
return []
|
|
137
|
+
|
|
138
|
+
tools: List[dict] = []
|
|
139
|
+
for fn in functions:
|
|
140
|
+
name = str(fn.get("name") or "").strip()
|
|
141
|
+
if not name:
|
|
142
|
+
continue
|
|
143
|
+
desc = fn.get("desc") or ""
|
|
144
|
+
|
|
145
|
+
params: Optional[dict] = {}
|
|
146
|
+
if fn.get("params"):
|
|
147
|
+
try:
|
|
148
|
+
params = json.loads(fn["params"])
|
|
149
|
+
except Exception:
|
|
150
|
+
params = {}
|
|
151
|
+
params = self._sanitize_schema(params or {})
|
|
152
|
+
if not params.get("type"):
|
|
153
|
+
params["type"] = "object"
|
|
154
|
+
|
|
155
|
+
tools.append({
|
|
156
|
+
"name": name,
|
|
157
|
+
"description": desc,
|
|
158
|
+
"input_schema": params or {"type": "object"},
|
|
159
|
+
})
|
|
160
|
+
|
|
161
|
+
return tools
|
|
162
|
+
|
|
163
|
+
def build_remote_tools(self, model: ModelItem = None) -> List[dict]:
|
|
164
|
+
"""
|
|
165
|
+
Build Anthropic server tools (remote tools) based on config flags.
|
|
166
|
+
Currently supports: Web Search tool.
|
|
167
|
+
|
|
168
|
+
Returns a list of tool dicts to be appended to 'tools' in messages.create.
|
|
169
|
+
|
|
170
|
+
:param model: ModelItem
|
|
171
|
+
:return: List of remote tool dicts
|
|
172
|
+
"""
|
|
173
|
+
cfg = self.window.core.config
|
|
174
|
+
tools: List[dict] = []
|
|
175
|
+
|
|
176
|
+
# sonnet-3.5 is not supported
|
|
177
|
+
if model and model.id and model.id.startswith("claude-3.5"):
|
|
178
|
+
return tools
|
|
179
|
+
|
|
180
|
+
# Web Search tool
|
|
181
|
+
if cfg.get("remote_tools.anthropic.web_search"):
|
|
182
|
+
ttype = cfg.get("remote_tools.anthropic.web_search.type", "web_search_20250305") # stable as of docs
|
|
183
|
+
tname = "web_search"
|
|
184
|
+
|
|
185
|
+
tool_def: Dict[str, Any] = {
|
|
186
|
+
"type": ttype,
|
|
187
|
+
"name": tname,
|
|
188
|
+
}
|
|
189
|
+
|
|
190
|
+
# Optional params
|
|
191
|
+
max_uses = cfg.get("remote_tools.anthropic.web_search.max_uses")
|
|
192
|
+
if isinstance(max_uses, int) and max_uses > 0:
|
|
193
|
+
tool_def["max_uses"] = max_uses
|
|
194
|
+
|
|
195
|
+
def parse_csv_list(key: str) -> list:
|
|
196
|
+
raw = cfg.get(key, "")
|
|
197
|
+
if not raw:
|
|
198
|
+
return []
|
|
199
|
+
if isinstance(raw, list):
|
|
200
|
+
return [str(x).strip() for x in raw if str(x).strip()]
|
|
201
|
+
return [s.strip() for s in str(raw).split(",") if s.strip()]
|
|
202
|
+
|
|
203
|
+
allowed = parse_csv_list("remote_tools.anthropic.web_search.allowed_domains")
|
|
204
|
+
blocked = parse_csv_list("remote_tools.anthropic.web_search.blocked_domains")
|
|
205
|
+
if allowed:
|
|
206
|
+
tool_def["allowed_domains"] = allowed
|
|
207
|
+
elif blocked:
|
|
208
|
+
tool_def["blocked_domains"] = blocked
|
|
209
|
+
|
|
210
|
+
# Location (approximate)
|
|
211
|
+
loc_city = cfg.get("remote_tools.anthropic.web_search.user_location.city")
|
|
212
|
+
loc_region = cfg.get("remote_tools.anthropic.web_search.user_location.region")
|
|
213
|
+
loc_country = cfg.get("remote_tools.anthropic.web_search.user_location.country")
|
|
214
|
+
loc_tz = cfg.get("remote_tools.anthropic.web_search.user_location.timezone")
|
|
215
|
+
if any([loc_city, loc_region, loc_country, loc_tz]):
|
|
216
|
+
tool_def["user_location"] = {
|
|
217
|
+
"type": "approximate",
|
|
218
|
+
"city": str(loc_city) if loc_city else None,
|
|
219
|
+
"region": str(loc_region) if loc_region else None,
|
|
220
|
+
"country": str(loc_country) if loc_country else None,
|
|
221
|
+
"timezone": str(loc_tz) if loc_tz else None,
|
|
222
|
+
}
|
|
223
|
+
# remove None fields
|
|
224
|
+
tool_def["user_location"] = {k: v for k, v in tool_def["user_location"].items() if v is not None}
|
|
225
|
+
|
|
226
|
+
tools.append(tool_def)
|
|
227
|
+
|
|
228
|
+
return tools
|
|
229
|
+
|
|
230
|
+
def merge_tools_dedup(self, primary: List[dict], secondary: List[dict]) -> List[dict]:
|
|
231
|
+
"""
|
|
232
|
+
Remove duplicate tools by name, preserving order:
|
|
233
|
+
|
|
234
|
+
- First from primary list
|
|
235
|
+
- Then from secondary list if name not already present
|
|
236
|
+
|
|
237
|
+
:param primary: Primary list of tool dicts
|
|
238
|
+
:param secondary: Secondary list of tool dicts
|
|
239
|
+
:return: Merged list of tool dicts without duplicates
|
|
240
|
+
"""
|
|
241
|
+
result: List[dict] = []
|
|
242
|
+
seen = set()
|
|
243
|
+
for t in primary or []:
|
|
244
|
+
n = t.get("name")
|
|
245
|
+
if n and n not in seen:
|
|
246
|
+
seen.add(n)
|
|
247
|
+
result.append(t)
|
|
248
|
+
for t in secondary or []:
|
|
249
|
+
n = t.get("name")
|
|
250
|
+
if not n or n in seen:
|
|
251
|
+
continue
|
|
252
|
+
seen.add(n)
|
|
253
|
+
result.append(t)
|
|
254
|
+
return result
|
|
255
|
+
|
|
256
|
+
def get_all_tools(self, model: ModelItem, functions: list) -> List[dict]:
|
|
257
|
+
"""
|
|
258
|
+
Get combined list of all tools (app functions + remote tools) for Anthropic.
|
|
259
|
+
|
|
260
|
+
:param model: ModelItem
|
|
261
|
+
:param functions: List of app function dicts
|
|
262
|
+
:return: Combined list of tool dicts
|
|
263
|
+
"""
|
|
264
|
+
base_tools = self.prepare(model, functions)
|
|
265
|
+
remote_tools = self.build_remote_tools(model)
|
|
266
|
+
return self.merge_tools_dedup(base_tools, remote_tools)
|
|
@@ -0,0 +1,142 @@
|
|
|
1
|
+
#!/usr/bin/env python3
|
|
2
|
+
# -*- coding: utf-8 -*-
|
|
3
|
+
# ================================================== #
|
|
4
|
+
# This file is a part of PYGPT package #
|
|
5
|
+
# Website: https://pygpt.net #
|
|
6
|
+
# GitHub: https://github.com/szczyglis-dev/py-gpt #
|
|
7
|
+
# MIT License #
|
|
8
|
+
# Created By : Marcin Szczygliński #
|
|
9
|
+
# Updated Date: 2025.09.05 01:00:00 #
|
|
10
|
+
# ================================================== #
|
|
11
|
+
|
|
12
|
+
import base64
|
|
13
|
+
import os
|
|
14
|
+
from typing import Optional, Dict, List, Union
|
|
15
|
+
|
|
16
|
+
from pygpt_net.item.attachment import AttachmentItem
|
|
17
|
+
from pygpt_net.item.ctx import CtxItem
|
|
18
|
+
|
|
19
|
+
|
|
20
|
+
class Vision:
|
|
21
|
+
def __init__(self, window=None):
|
|
22
|
+
"""
|
|
23
|
+
Vision helpers for Anthropic (image input blocks).
|
|
24
|
+
|
|
25
|
+
:param window: Window instance
|
|
26
|
+
"""
|
|
27
|
+
self.window = window
|
|
28
|
+
self.attachments: Dict[str, str] = {}
|
|
29
|
+
self.urls: List[str] = []
|
|
30
|
+
self.input_tokens = 0
|
|
31
|
+
|
|
32
|
+
def build_blocks(
|
|
33
|
+
self,
|
|
34
|
+
content: Union[str, list],
|
|
35
|
+
attachments: Optional[Dict[str, AttachmentItem]] = None,
|
|
36
|
+
) -> List[dict]:
|
|
37
|
+
"""
|
|
38
|
+
Build image content blocks from local attachments.
|
|
39
|
+
|
|
40
|
+
:param content: User message text (unused here)
|
|
41
|
+
:param attachments: Attachments dict (id -> AttachmentItem)
|
|
42
|
+
:return: List of Anthropic content blocks
|
|
43
|
+
"""
|
|
44
|
+
blocks: List[dict] = []
|
|
45
|
+
self.attachments = {}
|
|
46
|
+
self.urls = []
|
|
47
|
+
|
|
48
|
+
if attachments:
|
|
49
|
+
for id_, attachment in attachments.items():
|
|
50
|
+
if attachment.path and os.path.exists(attachment.path):
|
|
51
|
+
if self.is_image(attachment.path):
|
|
52
|
+
mime = self._guess_mime(attachment.path)
|
|
53
|
+
with open(attachment.path, "rb") as f:
|
|
54
|
+
data = f.read()
|
|
55
|
+
b64 = base64.b64encode(data).decode("utf-8")
|
|
56
|
+
blocks.append({
|
|
57
|
+
"type": "image",
|
|
58
|
+
"source": {
|
|
59
|
+
"type": "base64",
|
|
60
|
+
"media_type": mime,
|
|
61
|
+
"data": b64,
|
|
62
|
+
}
|
|
63
|
+
})
|
|
64
|
+
self.attachments[id_] = attachment.path
|
|
65
|
+
attachment.consumed = True
|
|
66
|
+
|
|
67
|
+
return blocks
|
|
68
|
+
|
|
69
|
+
def is_image(self, path: str) -> bool:
|
|
70
|
+
"""
|
|
71
|
+
Check if path looks like an image.
|
|
72
|
+
|
|
73
|
+
:param path: File path
|
|
74
|
+
:return: True if path has image file extension
|
|
75
|
+
"""
|
|
76
|
+
return path.lower().endswith(('.png', '.jpg', '.jpeg', '.tiff', '.bmp', '.gif', '.webp'))
|
|
77
|
+
|
|
78
|
+
def _guess_mime(self, path: str) -> str:
|
|
79
|
+
"""
|
|
80
|
+
Guess mime type from file extension.
|
|
81
|
+
|
|
82
|
+
:param path: File path
|
|
83
|
+
:return: MIME type string
|
|
84
|
+
"""
|
|
85
|
+
ext = os.path.splitext(path)[1].lower().lstrip(".")
|
|
86
|
+
if ext in ("jpg", "jpeg"):
|
|
87
|
+
return "image/jpeg"
|
|
88
|
+
if ext == "png":
|
|
89
|
+
return "image/png"
|
|
90
|
+
if ext == "gif":
|
|
91
|
+
return "image/gif"
|
|
92
|
+
if ext == "bmp":
|
|
93
|
+
return "image/bmp"
|
|
94
|
+
if ext == "webp":
|
|
95
|
+
return "image/webp"
|
|
96
|
+
if ext == "tiff":
|
|
97
|
+
return "image/tiff"
|
|
98
|
+
return "image/jpeg"
|
|
99
|
+
|
|
100
|
+
def append_images(self, ctx: CtxItem):
|
|
101
|
+
"""
|
|
102
|
+
Append sent images paths to context for UI/history.
|
|
103
|
+
|
|
104
|
+
:param ctx: CtxItem
|
|
105
|
+
"""
|
|
106
|
+
images = self.get_attachments()
|
|
107
|
+
if len(images) > 0:
|
|
108
|
+
ctx.images = self.window.core.filesystem.make_local_list(list(images.values()))
|
|
109
|
+
|
|
110
|
+
def get_attachments(self) -> Dict[str, str]:
|
|
111
|
+
"""
|
|
112
|
+
Return attachments dict (id -> path).
|
|
113
|
+
|
|
114
|
+
:return: Attachments dictionary
|
|
115
|
+
"""
|
|
116
|
+
return self.attachments
|
|
117
|
+
|
|
118
|
+
def get_urls(self) -> List[str]:
|
|
119
|
+
"""
|
|
120
|
+
Return image urls (unused).
|
|
121
|
+
|
|
122
|
+
:return: List of image URLs
|
|
123
|
+
"""
|
|
124
|
+
return self.urls
|
|
125
|
+
|
|
126
|
+
def reset_tokens(self):
|
|
127
|
+
"""Reset input tokens counter."""
|
|
128
|
+
self.input_tokens = 0
|
|
129
|
+
|
|
130
|
+
def get_used_tokens(self) -> int:
|
|
131
|
+
"""
|
|
132
|
+
Return input tokens counter.
|
|
133
|
+
|
|
134
|
+
:return: Number of input tokens
|
|
135
|
+
"""
|
|
136
|
+
return self.input_tokens
|
|
137
|
+
|
|
138
|
+
def reset(self):
|
|
139
|
+
"""Reset state."""
|
|
140
|
+
self.attachments = {}
|
|
141
|
+
self.urls = []
|
|
142
|
+
self.input_tokens = 0
|
|
@@ -401,8 +401,7 @@ class Chat:
|
|
|
401
401
|
# Store only as URL; downloading is out of scope here.
|
|
402
402
|
if uri.startswith("http://") or uri.startswith("https://"):
|
|
403
403
|
urls.append(uri)
|
|
404
|
-
except Exception:
|
|
405
|
-
# Best-effort only
|
|
404
|
+
except Exception as e:
|
|
406
405
|
pass
|
|
407
406
|
|
|
408
407
|
if images:
|
|
@@ -497,6 +496,7 @@ class Chat:
|
|
|
497
496
|
:param multimodal_ctx: MultimodalContext for audio
|
|
498
497
|
:return: List of Part
|
|
499
498
|
"""
|
|
499
|
+
self.window.core.api.google.vision.reset()
|
|
500
500
|
parts: List[Part] = []
|
|
501
501
|
if content:
|
|
502
502
|
parts.append(Part.from_text(text=str(content)))
|
|
@@ -890,7 +890,7 @@ class GoogleLiveClient:
|
|
|
890
890
|
"arguments": json.dumps(args_dict, ensure_ascii=False),
|
|
891
891
|
}
|
|
892
892
|
})
|
|
893
|
-
self._rt_state["force_func_call"] = True
|
|
893
|
+
# self._rt_state["force_func_call"] = True
|
|
894
894
|
self._last_tool_calls = list(self._rt_state["tool_calls"])
|
|
895
895
|
turn_finished = True # let the app run tools now
|
|
896
896
|
|
|
@@ -1004,7 +1004,7 @@ class GoogleLiveClient:
|
|
|
1004
1004
|
if key not in seen:
|
|
1005
1005
|
self._rt_state["tool_calls"].append(c)
|
|
1006
1006
|
seen.add(key)
|
|
1007
|
-
self._rt_state["force_func_call"] = True
|
|
1007
|
+
# self._rt_state["force_func_call"] = True
|
|
1008
1008
|
self._last_tool_calls = list(self._rt_state["tool_calls"])
|
|
1009
1009
|
turn_finished = True
|
|
1010
1010
|
|
|
@@ -28,61 +28,70 @@ class Tools:
|
|
|
28
28
|
# -------- SANITIZER --------
|
|
29
29
|
def _sanitize_schema(self, schema: Any) -> Any:
|
|
30
30
|
"""
|
|
31
|
-
Sanitize JSON Schema dict
|
|
32
|
-
|
|
33
|
-
|
|
34
|
-
|
|
35
|
-
|
|
36
|
-
|
|
37
|
-
|
|
38
|
-
|
|
39
|
-
|
|
40
|
-
|
|
41
|
-
:param schema: Any JSON Schema as dict or list
|
|
42
|
-
:return: Sanitized schema dict
|
|
31
|
+
Sanitize a JSON Schema dict for Google GenAI (function parameters).
|
|
32
|
+
|
|
33
|
+
Key points:
|
|
34
|
+
- Remove unsupported JSON Schema keywords (additionalProperties, oneOf, $ref, ...).
|
|
35
|
+
- Normalize "type" so that it's either a single lowercase string or absent.
|
|
36
|
+
Handle lists (unions), non-string types (e.g., dict), and infer a type when possible.
|
|
37
|
+
- Keep "enum" only when type is string.
|
|
38
|
+
- For objects, sanitize only "properties" (each property's schema) and validate "required".
|
|
39
|
+
- For arrays, sanitize "items" into a single schema (object, not list).
|
|
40
|
+
- Do not recurse into "properties" itself as a map, nor into "required"/"enum" as they are scalars/lists.
|
|
43
41
|
"""
|
|
42
|
+
# 1) Fast exits
|
|
44
43
|
if isinstance(schema, list):
|
|
45
|
-
|
|
44
|
+
# Only descend into lists of dicts (complex schemas). For scalar lists (required/enum), return as is.
|
|
45
|
+
if schema and all(isinstance(x, dict) for x in schema):
|
|
46
|
+
return [self._sanitize_schema(x) for x in schema]
|
|
47
|
+
return schema
|
|
46
48
|
|
|
47
49
|
if not isinstance(schema, dict):
|
|
48
50
|
return schema
|
|
49
51
|
|
|
52
|
+
# 2) Remove unsupported/problematic keywords for Google function parameters
|
|
50
53
|
banned = {
|
|
51
|
-
"additionalProperties",
|
|
52
|
-
"
|
|
53
|
-
"
|
|
54
|
-
"
|
|
55
|
-
"
|
|
56
|
-
"
|
|
57
|
-
"dependentRequired",
|
|
58
|
-
"oneOf",
|
|
59
|
-
"anyOf",
|
|
60
|
-
"allOf",
|
|
61
|
-
"$defs",
|
|
62
|
-
"$ref",
|
|
63
|
-
"$schema",
|
|
64
|
-
"$id",
|
|
65
|
-
"examples",
|
|
66
|
-
"readOnly",
|
|
67
|
-
"writeOnly",
|
|
68
|
-
"nullable",
|
|
54
|
+
"additionalProperties", "additional_properties",
|
|
55
|
+
"unevaluatedProperties", "patternProperties",
|
|
56
|
+
"dependencies", "dependentSchemas", "dependentRequired",
|
|
57
|
+
"oneOf", "anyOf", "allOf",
|
|
58
|
+
"$defs", "$ref", "$schema", "$id",
|
|
59
|
+
"examples", "readOnly", "writeOnly", "nullable",
|
|
69
60
|
}
|
|
70
61
|
for k in list(schema.keys()):
|
|
71
62
|
if k in banned:
|
|
72
63
|
schema.pop(k, None)
|
|
73
64
|
|
|
74
|
-
#
|
|
65
|
+
# 3) Normalize "type" safely
|
|
75
66
|
t = schema.get("type")
|
|
76
|
-
if isinstance(t, list):
|
|
77
|
-
t_no_null = [x for x in t if x != "null"]
|
|
78
|
-
schema["type"] = t_no_null[0] if t_no_null else "string"
|
|
79
67
|
|
|
80
|
-
#
|
|
81
|
-
if
|
|
68
|
+
# a) If it's a list (union), pick the first non-null string, otherwise default to "object"
|
|
69
|
+
if isinstance(t, list):
|
|
70
|
+
t_no_null = [x for x in t if isinstance(x, str) and x.lower() != "null"]
|
|
71
|
+
schema["type"] = t_no_null[0] if t_no_null else "object"
|
|
72
|
+
t = schema["type"]
|
|
73
|
+
|
|
74
|
+
# b) If "type" is not a string (could be dict or missing), try to infer; otherwise drop it
|
|
75
|
+
if not isinstance(t, str):
|
|
76
|
+
if isinstance(schema.get("properties"), dict):
|
|
77
|
+
schema["type"] = "object"
|
|
78
|
+
elif "items" in schema:
|
|
79
|
+
schema["type"] = "array"
|
|
80
|
+
elif isinstance(schema.get("enum"), list) and all(isinstance(x, str) for x in schema["enum"]):
|
|
81
|
+
schema["type"] = "string"
|
|
82
|
+
else:
|
|
83
|
+
schema.pop("type", None)
|
|
84
|
+
else:
|
|
85
|
+
schema["type"] = t.lower()
|
|
86
|
+
|
|
87
|
+
type_l = schema["type"].lower() if isinstance(schema.get("type"), str) else ""
|
|
88
|
+
|
|
89
|
+
# 4) Keep enum only for string-typed schemas
|
|
90
|
+
if "enum" in schema and type_l != "string":
|
|
82
91
|
schema.pop("enum", None)
|
|
83
92
|
|
|
84
|
-
#
|
|
85
|
-
if
|
|
93
|
+
# 5) Objects: sanitize properties and required
|
|
94
|
+
if type_l == "object":
|
|
86
95
|
props = schema.get("properties")
|
|
87
96
|
if not isinstance(props, dict):
|
|
88
97
|
props = {}
|
|
@@ -92,25 +101,26 @@ class Tools:
|
|
|
92
101
|
schema["properties"] = clean_props
|
|
93
102
|
|
|
94
103
|
req = schema.get("required")
|
|
95
|
-
if not isinstance(req, list)
|
|
96
|
-
schema.pop("required", None)
|
|
97
|
-
elif len(req) == 0:
|
|
104
|
+
if not (isinstance(req, list) and all(isinstance(x, str) for x in req) and len(req) > 0):
|
|
98
105
|
schema.pop("required", None)
|
|
99
106
|
|
|
100
|
-
#
|
|
101
|
-
|
|
107
|
+
# 6) Arrays: ensure "items" is a single dict schema
|
|
108
|
+
elif type_l == "array":
|
|
102
109
|
items = schema.get("items")
|
|
103
|
-
if isinstance(items, list)
|
|
104
|
-
items = items[0]
|
|
110
|
+
if isinstance(items, list):
|
|
111
|
+
items = items[0] if items else {"type": "string"}
|
|
105
112
|
if not isinstance(items, dict):
|
|
106
113
|
items = {"type": "string"}
|
|
107
114
|
schema["items"] = self._sanitize_schema(items)
|
|
108
115
|
|
|
109
|
-
#
|
|
116
|
+
# 7) Recurse into the remaining nested dict/list values,
|
|
117
|
+
# but skip "properties", "items", "required", and "enum" (already handled)
|
|
110
118
|
for k, v in list(schema.items()):
|
|
119
|
+
if k in ("properties", "items", "required", "enum"):
|
|
120
|
+
continue
|
|
111
121
|
if isinstance(v, dict):
|
|
112
122
|
schema[k] = self._sanitize_schema(v)
|
|
113
|
-
elif isinstance(v, list):
|
|
123
|
+
elif isinstance(v, list) and v and all(isinstance(x, dict) for x in v):
|
|
114
124
|
schema[k] = [self._sanitize_schema(x) for x in v]
|
|
115
125
|
|
|
116
126
|
return schema
|
|
@@ -369,6 +369,12 @@ class Vision:
|
|
|
369
369
|
"""Reset input tokens counter"""
|
|
370
370
|
self.input_tokens = 0
|
|
371
371
|
|
|
372
|
+
def reset(self):
|
|
373
|
+
"""Reset attachments, urls and input tokens"""
|
|
374
|
+
self.attachments = {}
|
|
375
|
+
self.urls = []
|
|
376
|
+
self.input_tokens = 0
|
|
377
|
+
|
|
372
378
|
def get_attachments(self) -> Dict[str, str]:
|
|
373
379
|
"""
|
|
374
380
|
Get attachments
|