pygpt-net 2.6.8__py3-none-any.whl → 2.6.10__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- pygpt_net/CHANGELOG.txt +12 -0
- pygpt_net/__init__.py +3 -3
- pygpt_net/app.py +4 -0
- pygpt_net/controller/ctx/common.py +9 -3
- pygpt_net/controller/ctx/ctx.py +19 -17
- pygpt_net/controller/kernel/kernel.py +1 -2
- pygpt_net/core/agents/runner.py +19 -0
- pygpt_net/core/agents/tools.py +93 -52
- pygpt_net/core/render/web/body.py +11 -33
- pygpt_net/core/render/web/renderer.py +52 -79
- pygpt_net/data/config/config.json +4 -3
- pygpt_net/data/config/models.json +3 -3
- pygpt_net/data/config/presets/agent_openai_supervisor.json +54 -0
- pygpt_net/data/config/presets/agent_supervisor.json +52 -0
- pygpt_net/data/config/settings.json +14 -0
- pygpt_net/data/locale/locale.de.ini +2 -0
- pygpt_net/data/locale/locale.en.ini +2 -0
- pygpt_net/data/locale/locale.es.ini +2 -0
- pygpt_net/data/locale/locale.fr.ini +2 -0
- pygpt_net/data/locale/locale.it.ini +2 -0
- pygpt_net/data/locale/locale.pl.ini +3 -1
- pygpt_net/data/locale/locale.uk.ini +2 -0
- pygpt_net/data/locale/locale.zh.ini +2 -0
- pygpt_net/plugin/google/config.py +306 -1
- pygpt_net/plugin/google/plugin.py +22 -0
- pygpt_net/plugin/google/worker.py +579 -3
- pygpt_net/provider/agents/llama_index/supervisor_workflow.py +116 -0
- pygpt_net/provider/agents/llama_index/workflow/supervisor.py +303 -0
- pygpt_net/provider/agents/openai/supervisor.py +361 -0
- pygpt_net/provider/core/config/patch.py +11 -0
- pygpt_net/provider/core/preset/patch.py +18 -0
- pygpt_net/ui/main.py +1 -1
- pygpt_net/ui/widget/lists/context.py +10 -1
- pygpt_net/ui/widget/textarea/web.py +47 -4
- {pygpt_net-2.6.8.dist-info → pygpt_net-2.6.10.dist-info}/METADATA +93 -29
- {pygpt_net-2.6.8.dist-info → pygpt_net-2.6.10.dist-info}/RECORD +39 -34
- {pygpt_net-2.6.8.dist-info → pygpt_net-2.6.10.dist-info}/LICENSE +0 -0
- {pygpt_net-2.6.8.dist-info → pygpt_net-2.6.10.dist-info}/WHEEL +0 -0
- {pygpt_net-2.6.8.dist-info → pygpt_net-2.6.10.dist-info}/entry_points.txt +0 -0
|
@@ -17,6 +17,8 @@ import io
|
|
|
17
17
|
import json
|
|
18
18
|
import os
|
|
19
19
|
import re
|
|
20
|
+
|
|
21
|
+
from uuid import uuid4
|
|
20
22
|
from email.message import EmailMessage
|
|
21
23
|
from email.mime.base import MIMEBase
|
|
22
24
|
from email.mime.multipart import MIMEMultipart
|
|
@@ -32,7 +34,7 @@ from pygpt_net.plugin.base.worker import BaseWorker, BaseSignals
|
|
|
32
34
|
# Google libs
|
|
33
35
|
from googleapiclient.discovery import build
|
|
34
36
|
from googleapiclient.errors import HttpError
|
|
35
|
-
from googleapiclient.http import MediaIoBaseDownload, MediaFileUpload
|
|
37
|
+
from googleapiclient.http import MediaIoBaseDownload, MediaFileUpload, MediaIoBaseUpload
|
|
36
38
|
from google.oauth2.credentials import Credentials
|
|
37
39
|
from google.oauth2.service_account import Credentials as ServiceAccountCredentials
|
|
38
40
|
from google_auth_oauthlib.flow import InstalledAppFlow
|
|
@@ -49,6 +51,11 @@ try:
|
|
|
49
51
|
except Exception:
|
|
50
52
|
gkeepapi = None
|
|
51
53
|
|
|
54
|
+
try:
|
|
55
|
+
import requests # for Google Maps REST
|
|
56
|
+
except Exception:
|
|
57
|
+
requests = None
|
|
58
|
+
|
|
52
59
|
|
|
53
60
|
class WorkerSignals(BaseSignals):
|
|
54
61
|
pass
|
|
@@ -72,12 +79,13 @@ class Worker(BaseWorker):
|
|
|
72
79
|
"https://www.googleapis.com/auth/keep",
|
|
73
80
|
"https://www.googleapis.com/auth/keep.readonly",
|
|
74
81
|
]
|
|
82
|
+
DOCS_SCOPES = ["https://www.googleapis.com/auth/documents"]
|
|
75
83
|
|
|
76
84
|
ALL_SCOPES = sorted(
|
|
77
|
-
set(GMAIL_SCOPES + CAL_SCOPES + DRIVE_SCOPES + PEOPLE_SCOPES + YT_SCOPES)
|
|
85
|
+
set(GMAIL_SCOPES + CAL_SCOPES + DRIVE_SCOPES + PEOPLE_SCOPES + YT_SCOPES + DOCS_SCOPES)
|
|
78
86
|
)
|
|
79
87
|
ALL_SCOPES_WITH_KEEP = sorted(
|
|
80
|
-
set(GMAIL_SCOPES + CAL_SCOPES + DRIVE_SCOPES + PEOPLE_SCOPES + YT_SCOPES + KEEP_SCOPES)
|
|
88
|
+
set(GMAIL_SCOPES + CAL_SCOPES + DRIVE_SCOPES + PEOPLE_SCOPES + YT_SCOPES + DOCS_SCOPES + KEEP_SCOPES)
|
|
81
89
|
)
|
|
82
90
|
|
|
83
91
|
def __init__(self, *args, **kwargs):
|
|
@@ -159,6 +167,56 @@ class Worker(BaseWorker):
|
|
|
159
167
|
elif item["cmd"] == "contacts_add":
|
|
160
168
|
response = self.cmd_contacts_add(item)
|
|
161
169
|
|
|
170
|
+
# Google Docs
|
|
171
|
+
elif item["cmd"] == "docs_create":
|
|
172
|
+
response = self.cmd_docs_create(item)
|
|
173
|
+
elif item["cmd"] == "docs_get":
|
|
174
|
+
response = self.cmd_docs_get(item)
|
|
175
|
+
elif item["cmd"] == "docs_list":
|
|
176
|
+
response = self.cmd_docs_list(item)
|
|
177
|
+
elif item["cmd"] == "docs_append_text":
|
|
178
|
+
response = self.cmd_docs_append_text(item)
|
|
179
|
+
elif item["cmd"] == "docs_replace_text":
|
|
180
|
+
response = self.cmd_docs_replace_text(item)
|
|
181
|
+
elif item["cmd"] == "docs_insert_heading":
|
|
182
|
+
response = self.cmd_docs_insert_heading(item)
|
|
183
|
+
elif item["cmd"] == "docs_export":
|
|
184
|
+
response = self.cmd_docs_export(item)
|
|
185
|
+
elif item["cmd"] == "docs_copy_from_template":
|
|
186
|
+
response = self.cmd_docs_copy_from_template(item)
|
|
187
|
+
|
|
188
|
+
# Google Maps
|
|
189
|
+
elif item["cmd"] == "maps_geocode":
|
|
190
|
+
response = self.cmd_maps_geocode(item)
|
|
191
|
+
elif item["cmd"] == "maps_reverse_geocode":
|
|
192
|
+
response = self.cmd_maps_reverse_geocode(item)
|
|
193
|
+
elif item["cmd"] == "maps_directions":
|
|
194
|
+
response = self.cmd_maps_directions(item)
|
|
195
|
+
elif item["cmd"] == "maps_distance_matrix":
|
|
196
|
+
response = self.cmd_maps_distance_matrix(item)
|
|
197
|
+
elif item["cmd"] == "maps_places_textsearch":
|
|
198
|
+
response = self.cmd_maps_places_textsearch(item)
|
|
199
|
+
elif item["cmd"] == "maps_places_nearby":
|
|
200
|
+
response = self.cmd_maps_places_nearby(item)
|
|
201
|
+
elif item["cmd"] == "maps_static_map":
|
|
202
|
+
response = self.cmd_maps_static_map(item)
|
|
203
|
+
|
|
204
|
+
# Google Colab
|
|
205
|
+
elif item["cmd"] == "colab_list_notebooks":
|
|
206
|
+
response = self.cmd_colab_list_notebooks(item)
|
|
207
|
+
elif item["cmd"] == "colab_create_notebook":
|
|
208
|
+
response = self.cmd_colab_create_notebook(item)
|
|
209
|
+
elif item["cmd"] == "colab_add_code_cell":
|
|
210
|
+
response = self.cmd_colab_add_code_cell(item)
|
|
211
|
+
elif item["cmd"] == "colab_add_markdown_cell":
|
|
212
|
+
response = self.cmd_colab_add_markdown_cell(item)
|
|
213
|
+
elif item["cmd"] == "colab_get_link":
|
|
214
|
+
response = self.cmd_colab_get_link(item)
|
|
215
|
+
elif item["cmd"] == "colab_rename":
|
|
216
|
+
response = self.cmd_colab_rename(item)
|
|
217
|
+
elif item["cmd"] == "colab_duplicate":
|
|
218
|
+
response = self.cmd_colab_duplicate(item)
|
|
219
|
+
|
|
162
220
|
if response:
|
|
163
221
|
responses.append(response)
|
|
164
222
|
|
|
@@ -798,6 +856,524 @@ class Worker(BaseWorker):
|
|
|
798
856
|
created = svc.people().createContact(body=body).execute()
|
|
799
857
|
return self.make_response(item, created)
|
|
800
858
|
|
|
859
|
+
# -------------- Google Docs --------------
|
|
860
|
+
|
|
861
|
+
def _docs_service(self):
|
|
862
|
+
return self._service("docs", "v1", scopes=self.DOCS_SCOPES)
|
|
863
|
+
|
|
864
|
+
def _docs_end_index(self, doc: Dict[str, Any]) -> int:
|
|
865
|
+
content = (doc.get("body") or {}).get("content") or []
|
|
866
|
+
if not content:
|
|
867
|
+
return 1
|
|
868
|
+
return content[-1].get("endIndex", 1)
|
|
869
|
+
|
|
870
|
+
def _docs_extract_text(self, doc: Dict[str, Any]) -> str:
|
|
871
|
+
# Extract plain text from document structure
|
|
872
|
+
out = []
|
|
873
|
+
for elem in (doc.get("body") or {}).get("content", []):
|
|
874
|
+
para = (elem.get("paragraph") or {})
|
|
875
|
+
for run in (para.get("elements") or []):
|
|
876
|
+
tr = run.get("textRun")
|
|
877
|
+
if tr and "content" in tr:
|
|
878
|
+
out.append(tr["content"])
|
|
879
|
+
return "".join(out)
|
|
880
|
+
|
|
881
|
+
def _drive_meta(self, svc, file_id: str, fields: str = "id, name, mimeType, parents"):
|
|
882
|
+
return svc.files().get(fileId=file_id, fields=fields).execute()
|
|
883
|
+
|
|
884
|
+
def _resolve_drive_id(self, svc, file_id: Optional[str], path: Optional[str]) -> Optional[str]:
|
|
885
|
+
if file_id:
|
|
886
|
+
return file_id
|
|
887
|
+
if path:
|
|
888
|
+
node = self._drive_find_by_path(svc, path)
|
|
889
|
+
if node:
|
|
890
|
+
return node["id"]
|
|
891
|
+
return None
|
|
892
|
+
|
|
893
|
+
def cmd_docs_create(self, item: dict) -> dict:
|
|
894
|
+
p = item.get("params", {})
|
|
895
|
+
title = p.get("title") or "Untitled"
|
|
896
|
+
svc = self._docs_service()
|
|
897
|
+
doc = svc.documents().create(body={"title": title}).execute()
|
|
898
|
+
doc_id = doc.get("documentId")
|
|
899
|
+
link = f"https://docs.google.com/document/d/{doc_id}/edit"
|
|
900
|
+
return self.make_response(item, {"documentId": doc_id, "title": title, "link": link})
|
|
901
|
+
|
|
902
|
+
def cmd_docs_get(self, item: dict) -> dict:
|
|
903
|
+
p = item.get("params", {})
|
|
904
|
+
doc_id = p.get("document_id")
|
|
905
|
+
if not doc_id and p.get("path"):
|
|
906
|
+
dsvc = self._service("drive", "v3", scopes=self.DRIVE_SCOPES)
|
|
907
|
+
doc_id = self._resolve_drive_id(dsvc, None, p.get("path"))
|
|
908
|
+
if not doc_id:
|
|
909
|
+
return self.make_response(item, "Param 'document_id' or 'path' required")
|
|
910
|
+
svc = self._docs_service()
|
|
911
|
+
doc = svc.documents().get(documentId=doc_id).execute()
|
|
912
|
+
text = self._docs_extract_text(doc)
|
|
913
|
+
return self.make_response(item, {"document": doc, "text": text})
|
|
914
|
+
|
|
915
|
+
def cmd_docs_list(self, item: dict) -> dict:
|
|
916
|
+
p = item.get("params", {})
|
|
917
|
+
q_extra = p.get("q")
|
|
918
|
+
q = "mimeType = 'application/vnd.google-apps.document' and trashed=false"
|
|
919
|
+
if q_extra:
|
|
920
|
+
# simple name filter
|
|
921
|
+
name = q_extra.replace("'", "\\'")
|
|
922
|
+
q += f" and name contains '{name}'"
|
|
923
|
+
page_size = int(p.get("page_size", 100))
|
|
924
|
+
svc = self._service("drive", "v3", scopes=self.DRIVE_SCOPES)
|
|
925
|
+
res = svc.files().list(q=q, pageSize=page_size, fields="files(id,name,parents,modifiedTime)").execute()
|
|
926
|
+
return self.make_response(item, res.get("files", []))
|
|
927
|
+
|
|
928
|
+
def cmd_docs_append_text(self, item: dict) -> dict:
|
|
929
|
+
p = item.get("params", {})
|
|
930
|
+
doc_id = p.get("document_id")
|
|
931
|
+
text = p.get("text") or ""
|
|
932
|
+
newline = bool(p.get("newline", True))
|
|
933
|
+
if not (doc_id or p.get("path")):
|
|
934
|
+
return self.make_response(item, "Param 'document_id' or 'path' required")
|
|
935
|
+
if not text:
|
|
936
|
+
return self.make_response(item, "Param 'text' required")
|
|
937
|
+
if not doc_id and p.get("path"):
|
|
938
|
+
dsvc = self._service("drive", "v3", scopes=self.DRIVE_SCOPES)
|
|
939
|
+
doc_id = self._resolve_drive_id(dsvc, None, p.get("path"))
|
|
940
|
+
svc = self._docs_service()
|
|
941
|
+
doc = svc.documents().get(documentId=doc_id).execute()
|
|
942
|
+
end_idx = self._docs_end_index(doc)
|
|
943
|
+
ins_text = (("\n" if newline else "") + text)
|
|
944
|
+
reqs = [{"insertText": {"location": {"index": end_idx - 1}, "text": ins_text}}]
|
|
945
|
+
updated = svc.documents().batchUpdate(documentId=doc_id, body={"requests": reqs}).execute()
|
|
946
|
+
return self.make_response(item,
|
|
947
|
+
{"documentId": doc_id, "status": "OK", "updates": updated.get("replies", [])})
|
|
948
|
+
|
|
949
|
+
def cmd_docs_replace_text(self, item: dict) -> dict:
|
|
950
|
+
p = item.get("params", {})
|
|
951
|
+
doc_id = p.get("document_id")
|
|
952
|
+
if not doc_id and p.get("path"):
|
|
953
|
+
dsvc = self._service("drive", "v3", scopes=self.DRIVE_SCOPES)
|
|
954
|
+
doc_id = self._resolve_drive_id(dsvc, None, p.get("path"))
|
|
955
|
+
find = p.get("find")
|
|
956
|
+
replace = p.get("replace", "")
|
|
957
|
+
match_case = bool(p.get("matchCase", False))
|
|
958
|
+
if not (doc_id and find):
|
|
959
|
+
return self.make_response(item, "Params 'document_id' (or 'path') and 'find' required")
|
|
960
|
+
svc = self._docs_service()
|
|
961
|
+
reqs = [{
|
|
962
|
+
"replaceAllText": {
|
|
963
|
+
"containsText": {"text": find, "matchCase": match_case},
|
|
964
|
+
"replaceText": replace
|
|
965
|
+
}
|
|
966
|
+
}]
|
|
967
|
+
out = svc.documents().batchUpdate(documentId=doc_id, body={"requests": reqs}).execute()
|
|
968
|
+
return self.make_response(item, {"documentId": doc_id, "status": "OK", "updates": out.get("replies", [])})
|
|
969
|
+
|
|
970
|
+
def cmd_docs_insert_heading(self, item: dict) -> dict:
|
|
971
|
+
p = item.get("params", {})
|
|
972
|
+
doc_id = p.get("document_id")
|
|
973
|
+
if not doc_id and p.get("path"):
|
|
974
|
+
dsvc = self._service("drive", "v3", scopes=self.DRIVE_SCOPES)
|
|
975
|
+
doc_id = self._resolve_drive_id(dsvc, None, p.get("path"))
|
|
976
|
+
text = p.get("text") or ""
|
|
977
|
+
level = int(p.get("level", 1))
|
|
978
|
+
level = min(max(level, 1), 6)
|
|
979
|
+
if not (doc_id and text):
|
|
980
|
+
return self.make_response(item, "Params 'document_id' (or 'path') and 'text' required")
|
|
981
|
+
svc = self._docs_service()
|
|
982
|
+
doc = svc.documents().get(documentId=doc_id).execute()
|
|
983
|
+
start = self._docs_end_index(doc) - 1
|
|
984
|
+
ins = text + "\n"
|
|
985
|
+
reqs = [
|
|
986
|
+
{"insertText": {"location": {"index": start}, "text": ins}},
|
|
987
|
+
{"updateParagraphStyle": {
|
|
988
|
+
"range": {"startIndex": start, "endIndex": start + len(ins)},
|
|
989
|
+
"paragraphStyle": {"namedStyleType": f"HEADING_{level}"},
|
|
990
|
+
"fields": "namedStyleType"
|
|
991
|
+
}},
|
|
992
|
+
]
|
|
993
|
+
out = svc.documents().batchUpdate(documentId=doc_id, body={"requests": reqs}).execute()
|
|
994
|
+
return self.make_response(item, {"documentId": doc_id, "status": "OK", "updates": out.get("replies", [])})
|
|
995
|
+
|
|
996
|
+
def cmd_docs_export(self, item: dict) -> dict:
|
|
997
|
+
p = item.get("params", {})
|
|
998
|
+
doc_id = p.get("document_id")
|
|
999
|
+
if not doc_id and p.get("path"):
|
|
1000
|
+
dsvc = self._service("drive", "v3", scopes=self.DRIVE_SCOPES)
|
|
1001
|
+
doc_id = self._resolve_drive_id(dsvc, None, p.get("path"))
|
|
1002
|
+
mime = p.get("mime") or "application/pdf"
|
|
1003
|
+
out_path = self.prepare_path(p.get("out") or "")
|
|
1004
|
+
if not doc_id:
|
|
1005
|
+
return self.make_response(item, "Param 'document_id' or 'path' required")
|
|
1006
|
+
dsvc = self._service("drive", "v3", scopes=self.DRIVE_SCOPES)
|
|
1007
|
+
meta = dsvc.files().get(fileId=doc_id, fields="id, name").execute()
|
|
1008
|
+
target = out_path or self.prepare_path(meta["name"] + (".pdf" if mime == "application/pdf" else ""))
|
|
1009
|
+
fh = io.FileIO(target, "wb")
|
|
1010
|
+
try:
|
|
1011
|
+
req = dsvc.files().export_media(fileId=doc_id, mimeType=mime)
|
|
1012
|
+
downloader = MediaIoBaseDownload(fh, req)
|
|
1013
|
+
done = False
|
|
1014
|
+
while not done:
|
|
1015
|
+
status, done = downloader.next_chunk()
|
|
1016
|
+
finally:
|
|
1017
|
+
fh.close()
|
|
1018
|
+
return self.make_response(item, {"path": target, "id": meta["id"], "name": meta["name"]})
|
|
1019
|
+
|
|
1020
|
+
def cmd_docs_copy_from_template(self, item: dict) -> dict:
|
|
1021
|
+
p = item.get("params", {})
|
|
1022
|
+
template_id = p.get("template_id")
|
|
1023
|
+
new_title = p.get("title") or "Copy"
|
|
1024
|
+
if not template_id and p.get("template_path"):
|
|
1025
|
+
dsvc = self._service("drive", "v3", scopes=self.DRIVE_SCOPES)
|
|
1026
|
+
template_id = self._resolve_drive_id(dsvc, None, p.get("template_path"))
|
|
1027
|
+
if not template_id:
|
|
1028
|
+
return self.make_response(item, "Param 'template_id' or 'template_path' required")
|
|
1029
|
+
dsvc = self._service("drive", "v3", scopes=self.DRIVE_SCOPES)
|
|
1030
|
+
copied = dsvc.files().copy(fileId=template_id, body={"name": new_title}).execute()
|
|
1031
|
+
link = f"https://docs.google.com/document/d/{copied['id']}/edit"
|
|
1032
|
+
return self.make_response(item, {"id": copied["id"], "name": copied["name"], "link": link})
|
|
1033
|
+
|
|
1034
|
+
# -------------- Google Maps (REST, API key) --------------
|
|
1035
|
+
|
|
1036
|
+
def _maps_key(self) -> Optional[str]:
|
|
1037
|
+
return self.plugin.get_option_value("google_maps_api_key") or self.plugin.get_option_value("maps_api_key")
|
|
1038
|
+
|
|
1039
|
+
def _check_requests(self):
|
|
1040
|
+
if requests is None:
|
|
1041
|
+
raise RuntimeError("Python 'requests' not installed - required for Google Maps calls.")
|
|
1042
|
+
|
|
1043
|
+
def cmd_maps_geocode(self, item: dict) -> dict:
|
|
1044
|
+
self._check_requests()
|
|
1045
|
+
p = item.get("params", {})
|
|
1046
|
+
key = self._maps_key()
|
|
1047
|
+
if not key:
|
|
1048
|
+
return self.make_response(item, "Missing 'google_maps_api_key' in plugin options")
|
|
1049
|
+
address = p.get("address")
|
|
1050
|
+
if not address:
|
|
1051
|
+
return self.make_response(item, "Param 'address' required")
|
|
1052
|
+
params = {"address": address, "key": key}
|
|
1053
|
+
if p.get("language"):
|
|
1054
|
+
params["language"] = p["language"]
|
|
1055
|
+
if p.get("region"):
|
|
1056
|
+
params["region"] = p["region"]
|
|
1057
|
+
r = requests.get("https://maps.googleapis.com/maps/api/geocode/json", params=params, timeout=20)
|
|
1058
|
+
data = r.json()
|
|
1059
|
+
return self.make_response(item, data)
|
|
1060
|
+
|
|
1061
|
+
def cmd_maps_reverse_geocode(self, item: dict) -> dict:
|
|
1062
|
+
self._check_requests()
|
|
1063
|
+
p = item.get("params", {})
|
|
1064
|
+
key = self._maps_key()
|
|
1065
|
+
if not key:
|
|
1066
|
+
return self.make_response(item, "Missing 'google_maps_api_key'")
|
|
1067
|
+
lat = p.get("lat")
|
|
1068
|
+
lng = p.get("lng")
|
|
1069
|
+
if not (lat and lng):
|
|
1070
|
+
return self.make_response(item, "Params 'lat' and 'lng' required")
|
|
1071
|
+
params = {"latlng": f"{lat},{lng}", "key": key}
|
|
1072
|
+
if p.get("language"):
|
|
1073
|
+
params["language"] = p["language"]
|
|
1074
|
+
r = requests.get("https://maps.googleapis.com/maps/api/geocode/json", params=params, timeout=20)
|
|
1075
|
+
return self.make_response(item, r.json())
|
|
1076
|
+
|
|
1077
|
+
def cmd_maps_directions(self, item: dict) -> dict:
|
|
1078
|
+
self._check_requests()
|
|
1079
|
+
p = item.get("params", {})
|
|
1080
|
+
key = self._maps_key()
|
|
1081
|
+
if not key:
|
|
1082
|
+
return self.make_response(item, "Missing 'google_maps_api_key'")
|
|
1083
|
+
origin = p.get("origin")
|
|
1084
|
+
destination = p.get("destination")
|
|
1085
|
+
if not (origin and destination):
|
|
1086
|
+
return self.make_response(item, "Params 'origin' and 'destination' required")
|
|
1087
|
+
params = {
|
|
1088
|
+
"origin": origin,
|
|
1089
|
+
"destination": destination,
|
|
1090
|
+
"mode": p.get("mode", "driving"),
|
|
1091
|
+
"key": key,
|
|
1092
|
+
}
|
|
1093
|
+
if p.get("waypoints"):
|
|
1094
|
+
if isinstance(p["waypoints"], list):
|
|
1095
|
+
params["waypoints"] = "|".join(p["waypoints"])
|
|
1096
|
+
else:
|
|
1097
|
+
params["waypoints"] = str(p["waypoints"])
|
|
1098
|
+
if p.get("departure_time"):
|
|
1099
|
+
params["departure_time"] = p["departure_time"] # 'now' or epoch seconds
|
|
1100
|
+
r = requests.get("https://maps.googleapis.com/maps/api/directions/json", params=params, timeout=30)
|
|
1101
|
+
return self.make_response(item, r.json())
|
|
1102
|
+
|
|
1103
|
+
def cmd_maps_distance_matrix(self, item: dict) -> dict:
|
|
1104
|
+
self._check_requests()
|
|
1105
|
+
p = item.get("params", {})
|
|
1106
|
+
key = self._maps_key()
|
|
1107
|
+
if not key:
|
|
1108
|
+
return self.make_response(item, "Missing 'google_maps_api_key'")
|
|
1109
|
+
origins = p.get("origins")
|
|
1110
|
+
destinations = p.get("destinations")
|
|
1111
|
+
if not (origins and destinations):
|
|
1112
|
+
return self.make_response(item, "Params 'origins' and 'destinations' required")
|
|
1113
|
+
if isinstance(origins, list):
|
|
1114
|
+
origins = "|".join(origins)
|
|
1115
|
+
if isinstance(destinations, list):
|
|
1116
|
+
destinations = "|".join(destinations)
|
|
1117
|
+
params = {
|
|
1118
|
+
"origins": origins,
|
|
1119
|
+
"destinations": destinations,
|
|
1120
|
+
"mode": p.get("mode", "driving"),
|
|
1121
|
+
"key": key,
|
|
1122
|
+
}
|
|
1123
|
+
r = requests.get("https://maps.googleapis.com/maps/api/distancematrix/json", params=params, timeout=20)
|
|
1124
|
+
return self.make_response(item, r.json())
|
|
1125
|
+
|
|
1126
|
+
def cmd_maps_places_textsearch(self, item: dict) -> dict:
|
|
1127
|
+
self._check_requests()
|
|
1128
|
+
p = item.get("params", {})
|
|
1129
|
+
key = self._maps_key()
|
|
1130
|
+
if not key:
|
|
1131
|
+
return self.make_response(item, "Missing 'google_maps_api_key'")
|
|
1132
|
+
query = p.get("query")
|
|
1133
|
+
if not query:
|
|
1134
|
+
return self.make_response(item, "Param 'query' required")
|
|
1135
|
+
params = {"query": query, "key": key}
|
|
1136
|
+
if p.get("location"):
|
|
1137
|
+
params["location"] = p["location"] # "lat,lng"
|
|
1138
|
+
if p.get("radius"):
|
|
1139
|
+
params["radius"] = int(p["radius"])
|
|
1140
|
+
if p.get("type"):
|
|
1141
|
+
params["type"] = p["type"]
|
|
1142
|
+
if p.get("opennow") is not None:
|
|
1143
|
+
params["opennow"] = "true" if p.get("opennow") else "false"
|
|
1144
|
+
r = requests.get("https://maps.googleapis.com/maps/api/place/textsearch/json", params=params, timeout=20)
|
|
1145
|
+
return self.make_response(item, r.json())
|
|
1146
|
+
|
|
1147
|
+
def cmd_maps_places_nearby(self, item: dict) -> dict:
|
|
1148
|
+
self._check_requests()
|
|
1149
|
+
p = item.get("params", {})
|
|
1150
|
+
key = self._maps_key()
|
|
1151
|
+
if not key:
|
|
1152
|
+
return self.make_response(item, "Missing 'google_maps_api_key'")
|
|
1153
|
+
location = p.get("location")
|
|
1154
|
+
radius = p.get("radius")
|
|
1155
|
+
if not (location and radius):
|
|
1156
|
+
return self.make_response(item, "Params 'location' (lat,lng) and 'radius' required")
|
|
1157
|
+
params = {"location": location, "radius": int(radius), "key": key}
|
|
1158
|
+
if p.get("keyword"):
|
|
1159
|
+
params["keyword"] = p["keyword"]
|
|
1160
|
+
if p.get("type"):
|
|
1161
|
+
params["type"] = p["type"]
|
|
1162
|
+
r = requests.get("https://maps.googleapis.com/maps/api/place/nearbysearch/json", params=params, timeout=20)
|
|
1163
|
+
return self.make_response(item, r.json())
|
|
1164
|
+
|
|
1165
|
+
def cmd_maps_static_map(self, item: dict) -> dict:
|
|
1166
|
+
self._check_requests()
|
|
1167
|
+
p = item.get("params", {})
|
|
1168
|
+
key = self._maps_key()
|
|
1169
|
+
if not key:
|
|
1170
|
+
return self.make_response(item, "Missing 'google_maps_api_key'")
|
|
1171
|
+
center = p.get("center")
|
|
1172
|
+
zoom = p.get("zoom", 13)
|
|
1173
|
+
size = p.get("size", "600x400")
|
|
1174
|
+
markers = p.get("markers") # list of "lat,lng" or dict spec
|
|
1175
|
+
maptype = p.get("maptype", "roadmap")
|
|
1176
|
+
out_path = self.prepare_path(p.get("out") or "static_map.png")
|
|
1177
|
+
params = {"key": key, "zoom": zoom, "size": size, "maptype": maptype}
|
|
1178
|
+
if center:
|
|
1179
|
+
params["center"] = center
|
|
1180
|
+
if markers:
|
|
1181
|
+
if isinstance(markers, list):
|
|
1182
|
+
for m in markers:
|
|
1183
|
+
params.setdefault("markers", [])
|
|
1184
|
+
# requests will encode list as repeated params
|
|
1185
|
+
params["markers"] = markers if isinstance(markers, list) else [markers]
|
|
1186
|
+
r = requests.get("https://maps.googleapis.com/maps/api/staticmap", params=params, timeout=20)
|
|
1187
|
+
if r.status_code != 200 or r.headers.get("Content-Type", "").startswith("application/json"):
|
|
1188
|
+
try:
|
|
1189
|
+
return self.make_response(item, r.json())
|
|
1190
|
+
except Exception:
|
|
1191
|
+
return self.make_response(item, f"Static map error: HTTP {r.status_code}")
|
|
1192
|
+
with open(out_path, "wb") as f:
|
|
1193
|
+
f.write(r.content)
|
|
1194
|
+
return self.make_response(item, {"path": out_path, "bytes": len(r.content)})
|
|
1195
|
+
|
|
1196
|
+
# -------------- Google Colab (via Drive + ipynb JSON) --------------
|
|
1197
|
+
|
|
1198
|
+
def _colab_nb_template(self, first_md: Optional[str] = None, first_code: Optional[str] = None) -> Dict[
|
|
1199
|
+
str, Any]:
|
|
1200
|
+
md_cell = None
|
|
1201
|
+
code_cell = None
|
|
1202
|
+
if first_md:
|
|
1203
|
+
md_cell = {
|
|
1204
|
+
"cell_type": "markdown",
|
|
1205
|
+
"metadata": {"id": str(uuid4())},
|
|
1206
|
+
"source": [first_md if first_md.endswith("\n") else first_md + "\n"],
|
|
1207
|
+
}
|
|
1208
|
+
if first_code:
|
|
1209
|
+
code_cell = {
|
|
1210
|
+
"cell_type": "code",
|
|
1211
|
+
"metadata": {"id": str(uuid4())},
|
|
1212
|
+
"source": [s if s.endswith("\n") else s + "\n" for s in first_code.splitlines()],
|
|
1213
|
+
"outputs": [],
|
|
1214
|
+
"execution_count": None,
|
|
1215
|
+
}
|
|
1216
|
+
cells = []
|
|
1217
|
+
if md_cell:
|
|
1218
|
+
cells.append(md_cell)
|
|
1219
|
+
if code_cell:
|
|
1220
|
+
cells.append(code_cell)
|
|
1221
|
+
if not cells:
|
|
1222
|
+
cells = [{
|
|
1223
|
+
"cell_type": "markdown",
|
|
1224
|
+
"metadata": {"id": str(uuid4())},
|
|
1225
|
+
"source": ["# New notebook\n"],
|
|
1226
|
+
}]
|
|
1227
|
+
return {
|
|
1228
|
+
"nbformat": 4,
|
|
1229
|
+
"nbformat_minor": 5,
|
|
1230
|
+
"metadata": {"colab": {"provenance": []}},
|
|
1231
|
+
"cells": cells,
|
|
1232
|
+
}
|
|
1233
|
+
|
|
1234
|
+
def _colab_download_nb(self, dsvc, file_id: str) -> Dict[str, Any]:
|
|
1235
|
+
req = dsvc.files().get_media(fileId=file_id)
|
|
1236
|
+
buf = io.BytesIO()
|
|
1237
|
+
downloader = MediaIoBaseDownload(buf, req)
|
|
1238
|
+
done = False
|
|
1239
|
+
while not done:
|
|
1240
|
+
status, done = downloader.next_chunk()
|
|
1241
|
+
buf.seek(0)
|
|
1242
|
+
return json.loads(buf.read().decode("utf-8"))
|
|
1243
|
+
|
|
1244
|
+
def _colab_upload_nb(self, dsvc, file_id: str, nb: Dict[str, Any]) -> Dict[str, Any]:
|
|
1245
|
+
data = json.dumps(nb, ensure_ascii=False).encode("utf-8")
|
|
1246
|
+
media = MediaIoBaseUpload(io.BytesIO(data), mimetype="application/json", resumable=False)
|
|
1247
|
+
return dsvc.files().update(fileId=file_id, media_body=media, fields="id, name, mimeType").execute()
|
|
1248
|
+
|
|
1249
|
+
def _ensure_parent_id(self, dsvc, remote_parent_path: Optional[str]) -> Optional[str]:
|
|
1250
|
+
if not remote_parent_path:
|
|
1251
|
+
return None
|
|
1252
|
+
node = self._drive_find_by_path(dsvc, remote_parent_path)
|
|
1253
|
+
if not node:
|
|
1254
|
+
return None
|
|
1255
|
+
return node["id"]
|
|
1256
|
+
|
|
1257
|
+
def cmd_colab_list_notebooks(self, item: dict) -> dict:
|
|
1258
|
+
p = item.get("params", {})
|
|
1259
|
+
page_size = int(p.get("page_size", 100))
|
|
1260
|
+
q = "trashed=false and (mimeType='application/vnd.google.colaboratory' or name contains '.ipynb')"
|
|
1261
|
+
if p.get("q"):
|
|
1262
|
+
name = p["q"].replace("'", "\\'")
|
|
1263
|
+
q += f" and name contains '{name}'"
|
|
1264
|
+
dsvc = self._service("drive", "v3", scopes=self.DRIVE_SCOPES)
|
|
1265
|
+
res = dsvc.files().list(q=q, pageSize=page_size,
|
|
1266
|
+
fields="files(id,name,mimeType,parents,modifiedTime)").execute()
|
|
1267
|
+
return self.make_response(item, res.get("files", []))
|
|
1268
|
+
|
|
1269
|
+
def cmd_colab_create_notebook(self, item: dict) -> dict:
|
|
1270
|
+
p = item.get("params", {})
|
|
1271
|
+
name = p.get("name") or "notebook.ipynb"
|
|
1272
|
+
if not name.endswith(".ipynb"):
|
|
1273
|
+
name += ".ipynb"
|
|
1274
|
+
first_md = p.get("markdown")
|
|
1275
|
+
first_code = p.get("code")
|
|
1276
|
+
dsvc = self._service("drive", "v3", scopes=self.DRIVE_SCOPES)
|
|
1277
|
+
parents = []
|
|
1278
|
+
parent_path = p.get("remote_parent_path")
|
|
1279
|
+
if parent_path:
|
|
1280
|
+
pid = self._ensure_parent_id(dsvc, parent_path)
|
|
1281
|
+
if not pid:
|
|
1282
|
+
return self.make_response(item, f"Remote parent path not found: {parent_path}")
|
|
1283
|
+
parents = [pid]
|
|
1284
|
+
nb = self._colab_nb_template(first_md, first_code)
|
|
1285
|
+
media = MediaIoBaseUpload(io.BytesIO(json.dumps(nb).encode("utf-8")), mimetype="application/json")
|
|
1286
|
+
body = {"name": name}
|
|
1287
|
+
if parents:
|
|
1288
|
+
body["parents"] = parents
|
|
1289
|
+
created = dsvc.files().create(body=body, media_body=media, fields="id, name, mimeType, parents").execute()
|
|
1290
|
+
link = f"https://colab.research.google.com/drive/{created['id']}"
|
|
1291
|
+
return self.make_response(item, {"id": created["id"], "name": created["name"], "link": link})
|
|
1292
|
+
|
|
1293
|
+
def _colab_add_cell_common(self, item: dict, cell_type: str) -> dict:
|
|
1294
|
+
p = item.get("params", {})
|
|
1295
|
+
file_id = p.get("file_id")
|
|
1296
|
+
dsvc = self._service("drive", "v3", scopes=self.DRIVE_SCOPES)
|
|
1297
|
+
if not file_id and p.get("path"):
|
|
1298
|
+
file_id = self._resolve_drive_id(dsvc, None, p.get("path"))
|
|
1299
|
+
if not file_id:
|
|
1300
|
+
return self.make_response(item, "Param 'file_id' or 'path' required")
|
|
1301
|
+
nb = self._colab_download_nb(dsvc, file_id)
|
|
1302
|
+
pos = p.get("position")
|
|
1303
|
+
if cell_type == "code":
|
|
1304
|
+
source = p.get("code") or ""
|
|
1305
|
+
cell = {
|
|
1306
|
+
"cell_type": "code",
|
|
1307
|
+
"metadata": {"id": str(uuid4())},
|
|
1308
|
+
"source": [s if s.endswith("\n") else s + "\n" for s in source.splitlines()],
|
|
1309
|
+
"outputs": [],
|
|
1310
|
+
"execution_count": None,
|
|
1311
|
+
}
|
|
1312
|
+
else:
|
|
1313
|
+
source = p.get("markdown") or ""
|
|
1314
|
+
cell = {
|
|
1315
|
+
"cell_type": "markdown",
|
|
1316
|
+
"metadata": {"id": str(uuid4())},
|
|
1317
|
+
"source": [source if source.endswith("\n") else source + "\n"],
|
|
1318
|
+
}
|
|
1319
|
+
if isinstance(pos, int) and 0 <= pos <= len(nb["cells"]):
|
|
1320
|
+
nb["cells"].insert(pos, cell)
|
|
1321
|
+
else:
|
|
1322
|
+
nb["cells"].append(cell)
|
|
1323
|
+
updated = self._colab_upload_nb(dsvc, file_id, nb)
|
|
1324
|
+
return self.make_response(item, {"id": updated["id"], "cells": len(nb["cells"])})
|
|
1325
|
+
|
|
1326
|
+
def cmd_colab_add_code_cell(self, item: dict) -> dict:
|
|
1327
|
+
return self._colab_add_cell_common(item, "code")
|
|
1328
|
+
|
|
1329
|
+
def cmd_colab_add_markdown_cell(self, item: dict) -> dict:
|
|
1330
|
+
return self._colab_add_cell_common(item, "markdown")
|
|
1331
|
+
|
|
1332
|
+
def cmd_colab_get_link(self, item: dict) -> dict:
|
|
1333
|
+
p = item.get("params", {})
|
|
1334
|
+
file_id = p.get("file_id")
|
|
1335
|
+
if not file_id and p.get("path"):
|
|
1336
|
+
dsvc = self._service("drive", "v3", scopes=self.DRIVE_SCOPES)
|
|
1337
|
+
file_id = self._resolve_drive_id(dsvc, None, p.get("path"))
|
|
1338
|
+
if not file_id:
|
|
1339
|
+
return self.make_response(item, "Param 'file_id' or 'path' required")
|
|
1340
|
+
link = f"https://colab.research.google.com/drive/{file_id}"
|
|
1341
|
+
return self.make_response(item, {"file_id": file_id, "link": link})
|
|
1342
|
+
|
|
1343
|
+
def cmd_colab_rename(self, item: dict) -> dict:
|
|
1344
|
+
p = item.get("params", {})
|
|
1345
|
+
new_name = p.get("name")
|
|
1346
|
+
if not new_name:
|
|
1347
|
+
return self.make_response(item, "Param 'name' required")
|
|
1348
|
+
file_id = p.get("file_id")
|
|
1349
|
+
dsvc = self._service("drive", "v3", scopes=self.DRIVE_SCOPES)
|
|
1350
|
+
if not file_id and p.get("path"):
|
|
1351
|
+
file_id = self._resolve_drive_id(dsvc, None, p.get("path"))
|
|
1352
|
+
if not file_id:
|
|
1353
|
+
return self.make_response(item, "Param 'file_id' or 'path' required")
|
|
1354
|
+
updated = dsvc.files().update(fileId=file_id, body={"name": new_name}, fields="id,name").execute()
|
|
1355
|
+
return self.make_response(item, updated)
|
|
1356
|
+
|
|
1357
|
+
def cmd_colab_duplicate(self, item: dict) -> dict:
|
|
1358
|
+
p = item.get("params", {})
|
|
1359
|
+
file_id = p.get("file_id")
|
|
1360
|
+
dsvc = self._service("drive", "v3", scopes=self.DRIVE_SCOPES)
|
|
1361
|
+
if not file_id and p.get("path"):
|
|
1362
|
+
file_id = self._resolve_drive_id(dsvc, None, p.get("path"))
|
|
1363
|
+
if not file_id:
|
|
1364
|
+
return self.make_response(item, "Param 'file_id' or 'path' required")
|
|
1365
|
+
name = p.get("name") or "Copy.ipynb"
|
|
1366
|
+
body = {"name": name}
|
|
1367
|
+
parent_path = p.get("remote_parent_path")
|
|
1368
|
+
if parent_path:
|
|
1369
|
+
pid = self._ensure_parent_id(dsvc, parent_path)
|
|
1370
|
+
if not pid:
|
|
1371
|
+
return self.make_response(item, f"Remote parent path not found: {parent_path}")
|
|
1372
|
+
body["parents"] = [pid]
|
|
1373
|
+
copied = dsvc.files().copy(fileId=file_id, body=body, fields="id,name,parents").execute()
|
|
1374
|
+
link = f"https://colab.research.google.com/drive/{copied['id']}"
|
|
1375
|
+
return self.make_response(item, {"id": copied["id"], "name": copied["name"], "link": link})
|
|
1376
|
+
|
|
801
1377
|
def prepare_path(self, path: str) -> str:
|
|
802
1378
|
"""
|
|
803
1379
|
Prepare path
|