MindsDB 25.7.2.0__py3-none-any.whl → 25.7.4.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of MindsDB might be problematic. Click here for more details.
- mindsdb/__about__.py +1 -1
- mindsdb/__main__.py +1 -1
- mindsdb/api/a2a/common/server/server.py +16 -6
- mindsdb/api/executor/command_executor.py +213 -137
- mindsdb/api/executor/datahub/datanodes/integration_datanode.py +5 -1
- mindsdb/api/executor/datahub/datanodes/project_datanode.py +14 -3
- mindsdb/api/executor/planner/plan_join.py +3 -0
- mindsdb/api/executor/planner/plan_join_ts.py +117 -100
- mindsdb/api/executor/planner/query_planner.py +1 -0
- mindsdb/api/executor/sql_query/steps/apply_predictor_step.py +54 -85
- mindsdb/api/http/initialize.py +16 -43
- mindsdb/api/http/namespaces/agents.py +24 -21
- mindsdb/api/http/namespaces/chatbots.py +83 -120
- mindsdb/api/http/namespaces/file.py +1 -1
- mindsdb/api/http/namespaces/jobs.py +38 -60
- mindsdb/api/http/namespaces/tree.py +69 -61
- mindsdb/api/mcp/start.py +2 -0
- mindsdb/api/mysql/mysql_proxy/utilities/dump.py +3 -2
- mindsdb/integrations/handlers/autogluon_handler/requirements.txt +1 -1
- mindsdb/integrations/handlers/autosklearn_handler/requirements.txt +1 -1
- mindsdb/integrations/handlers/bigquery_handler/bigquery_handler.py +25 -5
- mindsdb/integrations/handlers/chromadb_handler/chromadb_handler.py +3 -3
- mindsdb/integrations/handlers/flaml_handler/requirements.txt +1 -1
- mindsdb/integrations/handlers/google_calendar_handler/google_calendar_tables.py +82 -73
- mindsdb/integrations/handlers/hubspot_handler/requirements.txt +1 -1
- mindsdb/integrations/handlers/langchain_handler/langchain_handler.py +83 -76
- mindsdb/integrations/handlers/lightwood_handler/requirements.txt +4 -4
- mindsdb/integrations/handlers/litellm_handler/litellm_handler.py +16 -3
- mindsdb/integrations/handlers/litellm_handler/settings.py +2 -1
- mindsdb/integrations/handlers/llama_index_handler/requirements.txt +1 -1
- mindsdb/integrations/handlers/pgvector_handler/pgvector_handler.py +106 -90
- mindsdb/integrations/handlers/postgres_handler/postgres_handler.py +41 -39
- mindsdb/integrations/handlers/s3_handler/s3_handler.py +72 -70
- mindsdb/integrations/handlers/salesforce_handler/constants.py +208 -0
- mindsdb/integrations/handlers/salesforce_handler/salesforce_handler.py +142 -81
- mindsdb/integrations/handlers/salesforce_handler/salesforce_tables.py +12 -4
- mindsdb/integrations/handlers/slack_handler/slack_tables.py +141 -161
- mindsdb/integrations/handlers/tpot_handler/requirements.txt +1 -1
- mindsdb/integrations/handlers/web_handler/urlcrawl_helpers.py +32 -17
- mindsdb/integrations/handlers/web_handler/web_handler.py +19 -22
- mindsdb/integrations/handlers/youtube_handler/youtube_tables.py +183 -55
- mindsdb/integrations/libs/vectordatabase_handler.py +10 -1
- mindsdb/integrations/utilities/handler_utils.py +32 -12
- mindsdb/interfaces/agents/agents_controller.py +169 -110
- mindsdb/interfaces/agents/langchain_agent.py +10 -3
- mindsdb/interfaces/data_catalog/data_catalog_loader.py +22 -8
- mindsdb/interfaces/database/database.py +38 -13
- mindsdb/interfaces/database/integrations.py +20 -5
- mindsdb/interfaces/database/projects.py +63 -16
- mindsdb/interfaces/database/views.py +86 -60
- mindsdb/interfaces/jobs/jobs_controller.py +103 -110
- mindsdb/interfaces/knowledge_base/controller.py +33 -5
- mindsdb/interfaces/knowledge_base/evaluate.py +53 -9
- mindsdb/interfaces/knowledge_base/executor.py +24 -0
- mindsdb/interfaces/knowledge_base/llm_client.py +3 -3
- mindsdb/interfaces/knowledge_base/preprocessing/document_preprocessor.py +21 -13
- mindsdb/interfaces/query_context/context_controller.py +100 -133
- mindsdb/interfaces/skills/skills_controller.py +18 -6
- mindsdb/interfaces/storage/db.py +40 -6
- mindsdb/interfaces/variables/variables_controller.py +8 -15
- mindsdb/utilities/config.py +3 -3
- mindsdb/utilities/functions.py +72 -60
- mindsdb/utilities/log.py +38 -6
- mindsdb/utilities/ps.py +7 -7
- {mindsdb-25.7.2.0.dist-info → mindsdb-25.7.4.0.dist-info}/METADATA +262 -263
- {mindsdb-25.7.2.0.dist-info → mindsdb-25.7.4.0.dist-info}/RECORD +69 -68
- {mindsdb-25.7.2.0.dist-info → mindsdb-25.7.4.0.dist-info}/WHEEL +0 -0
- {mindsdb-25.7.2.0.dist-info → mindsdb-25.7.4.0.dist-info}/licenses/LICENSE +0 -0
- {mindsdb-25.7.2.0.dist-info → mindsdb-25.7.4.0.dist-info}/top_level.txt +0 -0
|
@@ -9,97 +9,105 @@ from mindsdb.api.http.namespaces.configs.tree import ns_conf
|
|
|
9
9
|
from mindsdb.metrics.metrics import api_endpoint_metrics
|
|
10
10
|
|
|
11
11
|
|
|
12
|
-
@ns_conf.route(
|
|
12
|
+
@ns_conf.route("/")
|
|
13
13
|
class GetRoot(Resource):
|
|
14
|
-
@ns_conf.doc(
|
|
15
|
-
@api_endpoint_metrics(
|
|
14
|
+
@ns_conf.doc("get_tree_root")
|
|
15
|
+
@api_endpoint_metrics("GET", "/tree")
|
|
16
16
|
def get(self):
|
|
17
17
|
databases = ca.database_controller.get_list()
|
|
18
|
-
result = [
|
|
19
|
-
|
|
20
|
-
|
|
21
|
-
|
|
22
|
-
|
|
23
|
-
|
|
24
|
-
|
|
25
|
-
|
|
18
|
+
result = [
|
|
19
|
+
{
|
|
20
|
+
"name": x["name"],
|
|
21
|
+
"class": "db",
|
|
22
|
+
"type": x["type"],
|
|
23
|
+
"engine": x["engine"],
|
|
24
|
+
"deletable": x["deletable"],
|
|
25
|
+
"visible": x["visible"],
|
|
26
|
+
}
|
|
27
|
+
for x in databases
|
|
28
|
+
]
|
|
26
29
|
return result
|
|
27
30
|
|
|
28
31
|
|
|
29
|
-
@ns_conf.route(
|
|
30
|
-
@ns_conf.param(
|
|
32
|
+
@ns_conf.route("/<db_name>")
|
|
33
|
+
@ns_conf.param("db_name", "Name of the database")
|
|
31
34
|
class GetLeaf(Resource):
|
|
32
|
-
@ns_conf.doc(
|
|
33
|
-
@api_endpoint_metrics(
|
|
35
|
+
@ns_conf.doc("get_tree_leaf")
|
|
36
|
+
@api_endpoint_metrics("GET", "/tree/database")
|
|
34
37
|
def get(self, db_name):
|
|
35
|
-
with_schemas = request.args.get(
|
|
38
|
+
with_schemas = request.args.get("all_schemas")
|
|
36
39
|
if isinstance(with_schemas, str):
|
|
37
|
-
with_schemas = with_schemas.lower() in (
|
|
40
|
+
with_schemas = with_schemas.lower() in ("1", "true")
|
|
38
41
|
else:
|
|
39
42
|
with_schemas = False
|
|
40
43
|
db_name = db_name.lower()
|
|
41
44
|
databases = ca.database_controller.get_dict()
|
|
42
45
|
if db_name not in databases:
|
|
43
|
-
return http_error(
|
|
44
|
-
400,
|
|
45
|
-
"Error",
|
|
46
|
-
f"There is no element with name '{db_name}'"
|
|
47
|
-
)
|
|
46
|
+
return http_error(400, "Error", f"There is no element with name '{db_name}'")
|
|
48
47
|
db = databases[db_name]
|
|
49
|
-
if db[
|
|
48
|
+
if db["type"] == "project":
|
|
50
49
|
project = ca.database_controller.get_project(db_name)
|
|
51
50
|
tables = project.get_tables()
|
|
52
|
-
tables = [
|
|
53
|
-
|
|
54
|
-
|
|
55
|
-
|
|
56
|
-
|
|
57
|
-
|
|
58
|
-
|
|
59
|
-
|
|
60
|
-
|
|
51
|
+
tables = [
|
|
52
|
+
{
|
|
53
|
+
"name": key,
|
|
54
|
+
"schema": None,
|
|
55
|
+
"class": "table",
|
|
56
|
+
"type": val["type"],
|
|
57
|
+
"engine": val.get("engine"),
|
|
58
|
+
"deletable": val.get("deletable"),
|
|
59
|
+
}
|
|
60
|
+
for key, val in tables.items()
|
|
61
|
+
]
|
|
62
|
+
|
|
63
|
+
jobs = ca.jobs_controller.get_list(db_name)
|
|
64
|
+
tables = tables + [
|
|
65
|
+
{"name": job["name"], "schema": None, "class": "job", "type": "job", "engine": "job", "deletable": True}
|
|
66
|
+
for job in jobs
|
|
67
|
+
]
|
|
68
|
+
elif db["type"] == "data":
|
|
61
69
|
handler = ca.integration_controller.get_data_handler(db_name)
|
|
62
|
-
if
|
|
70
|
+
if "all" in inspect.signature(handler.get_tables).parameters:
|
|
63
71
|
response = handler.get_tables(all=with_schemas)
|
|
64
72
|
else:
|
|
65
73
|
response = handler.get_tables()
|
|
66
|
-
if response.type !=
|
|
74
|
+
if response.type != "table":
|
|
67
75
|
return []
|
|
68
|
-
table_types = {
|
|
69
|
-
|
|
70
|
-
'VIEW': 'view'
|
|
71
|
-
}
|
|
72
|
-
tables = response.data_frame.to_dict(orient='records')
|
|
76
|
+
table_types = {"BASE TABLE": "table", "VIEW": "view"}
|
|
77
|
+
tables = response.data_frame.to_dict(orient="records")
|
|
73
78
|
|
|
74
79
|
schemas = defaultdict(list)
|
|
75
80
|
|
|
76
81
|
for table_meta in tables:
|
|
77
82
|
table_meta = {key.lower(): val for key, val in table_meta.items()}
|
|
78
|
-
schama = table_meta.get(
|
|
79
|
-
schemas[schama].append(
|
|
80
|
-
|
|
81
|
-
|
|
82
|
-
|
|
83
|
-
|
|
84
|
-
|
|
85
|
-
|
|
83
|
+
schama = table_meta.get("table_schema")
|
|
84
|
+
schemas[schama].append(
|
|
85
|
+
{
|
|
86
|
+
"name": table_meta["table_name"],
|
|
87
|
+
"class": "table",
|
|
88
|
+
"type": table_types.get(table_meta.get("table_type")),
|
|
89
|
+
"engine": None,
|
|
90
|
+
"deletable": False,
|
|
91
|
+
}
|
|
92
|
+
)
|
|
86
93
|
if len(schemas) == 1 and list(schemas.keys())[0] is None:
|
|
87
94
|
tables = schemas[None]
|
|
88
95
|
else:
|
|
89
|
-
tables = [
|
|
90
|
-
|
|
91
|
-
|
|
92
|
-
|
|
93
|
-
|
|
94
|
-
} for key, val in schemas.items()]
|
|
95
|
-
elif db['type'] == 'system':
|
|
96
|
+
tables = [
|
|
97
|
+
{"name": key, "class": "schema", "deletable": False, "children": val}
|
|
98
|
+
for key, val in schemas.items()
|
|
99
|
+
]
|
|
100
|
+
elif db["type"] == "system":
|
|
96
101
|
system_db = ca.database_controller.get_system_db(db_name)
|
|
97
102
|
tables = system_db.get_tree_tables()
|
|
98
|
-
tables = [
|
|
99
|
-
|
|
100
|
-
|
|
101
|
-
|
|
102
|
-
|
|
103
|
-
|
|
104
|
-
|
|
103
|
+
tables = [
|
|
104
|
+
{
|
|
105
|
+
"name": table.name,
|
|
106
|
+
"class": table.kind,
|
|
107
|
+
"type": "system view",
|
|
108
|
+
"engine": None,
|
|
109
|
+
"deletable": table.deletable,
|
|
110
|
+
}
|
|
111
|
+
for table in tables.values()
|
|
112
|
+
]
|
|
105
113
|
return tables
|
mindsdb/api/mcp/start.py
CHANGED
|
@@ -15,6 +15,7 @@ from starlette.responses import Response
|
|
|
15
15
|
from mindsdb.api.mysql.mysql_proxy.classes.fake_mysql_proxy import FakeMysqlProxy
|
|
16
16
|
from mindsdb.api.executor.data_types.response_type import RESPONSE_TYPE as SQL_RESPONSE_TYPE
|
|
17
17
|
from mindsdb.utilities import log
|
|
18
|
+
from mindsdb.utilities.log import get_uvicorn_logging_config
|
|
18
19
|
from mindsdb.utilities.config import Config
|
|
19
20
|
from mindsdb.interfaces.storage import db
|
|
20
21
|
|
|
@@ -173,6 +174,7 @@ async def run_sse_async() -> None:
|
|
|
173
174
|
host=mcp.settings.host,
|
|
174
175
|
port=mcp.settings.port,
|
|
175
176
|
log_level=mcp.settings.log_level.lower(),
|
|
177
|
+
log_config=get_uvicorn_logging_config("uvicorn_mcp"),
|
|
176
178
|
)
|
|
177
179
|
server = uvicorn.Server(config)
|
|
178
180
|
await server.serve()
|
|
@@ -118,8 +118,9 @@ def _dump_str(var: Any) -> str | None:
|
|
|
118
118
|
return json_encoder.encode(var)
|
|
119
119
|
except Exception:
|
|
120
120
|
return str(var)
|
|
121
|
-
|
|
122
|
-
|
|
121
|
+
# pd.isna returns array of bools for list
|
|
122
|
+
# and the truth value of a numpy array is ambiguous
|
|
123
|
+
if isinstance(var, (list, np.ndarray)) is False and pd.isna(var):
|
|
123
124
|
return None
|
|
124
125
|
return str(var)
|
|
125
126
|
|
|
@@ -1,2 +1,2 @@
|
|
|
1
1
|
autogluon
|
|
2
|
-
type_infer==0.0.
|
|
2
|
+
type_infer==0.0.23
|
|
@@ -1,2 +1,2 @@
|
|
|
1
1
|
auto-sklearn
|
|
2
|
-
type_infer==0.0.
|
|
2
|
+
type_infer==0.0.23
|
|
@@ -1,8 +1,10 @@
|
|
|
1
|
-
|
|
2
|
-
from
|
|
1
|
+
import json
|
|
2
|
+
from typing import Any, Dict, Optional, Text
|
|
3
|
+
|
|
4
|
+
from google.cloud.bigquery import Client, QueryJobConfig, DEFAULT_RETRY
|
|
5
|
+
from google.api_core.exceptions import BadRequest, NotFound
|
|
3
6
|
import pandas as pd
|
|
4
7
|
from sqlalchemy_bigquery.base import BigQueryDialect
|
|
5
|
-
from typing import Any, Dict, Optional, Text
|
|
6
8
|
|
|
7
9
|
from mindsdb.utilities import log
|
|
8
10
|
from mindsdb_sql_parser.ast.base import ASTNode
|
|
@@ -54,9 +56,22 @@ class BigQueryHandler(MetaDatabaseHandler):
|
|
|
54
56
|
if not all(key in self.connection_data for key in ["project_id", "dataset"]):
|
|
55
57
|
raise ValueError("Required parameters (project_id, dataset) must be provided.")
|
|
56
58
|
|
|
59
|
+
service_account_json = self.connection_data.get("service_account_json")
|
|
60
|
+
if isinstance(service_account_json, str):
|
|
61
|
+
# GUI send it as str
|
|
62
|
+
try:
|
|
63
|
+
service_account_json = json.loads(service_account_json)
|
|
64
|
+
except json.decoder.JSONDecodeError:
|
|
65
|
+
raise ValueError("'service_account_json' is not valid JSON")
|
|
66
|
+
if isinstance(service_account_json, dict) and isinstance(service_account_json.get("private_key"), str):
|
|
67
|
+
# some editors may escape new line symbol, also replace windows-like newlines
|
|
68
|
+
service_account_json["private_key"] = (
|
|
69
|
+
service_account_json["private_key"].replace("\\n", "\n").replace("\r\n", "\n")
|
|
70
|
+
)
|
|
71
|
+
|
|
57
72
|
google_sa_oauth2_manager = GoogleServiceAccountOAuth2Manager(
|
|
58
73
|
credentials_file=self.connection_data.get("service_account_keys"),
|
|
59
|
-
credentials_json=
|
|
74
|
+
credentials_json=service_account_json,
|
|
60
75
|
)
|
|
61
76
|
credentials = google_sa_oauth2_manager.get_oauth2_credentials()
|
|
62
77
|
|
|
@@ -85,7 +100,7 @@ class BigQueryHandler(MetaDatabaseHandler):
|
|
|
85
100
|
|
|
86
101
|
try:
|
|
87
102
|
connection = self.connect()
|
|
88
|
-
connection.query("SELECT 1;")
|
|
103
|
+
connection.query("SELECT 1;", timeout=10, retry=DEFAULT_RETRY.with_deadline(10))
|
|
89
104
|
|
|
90
105
|
# Check if the dataset exists
|
|
91
106
|
connection.get_dataset(self.connection_data["dataset"])
|
|
@@ -94,6 +109,11 @@ class BigQueryHandler(MetaDatabaseHandler):
|
|
|
94
109
|
except (BadRequest, ValueError) as e:
|
|
95
110
|
logger.error(f"Error connecting to BigQuery {self.connection_data['project_id']}, {e}!")
|
|
96
111
|
response.error_message = e
|
|
112
|
+
except NotFound:
|
|
113
|
+
response.error_message = (
|
|
114
|
+
f"Error connecting to BigQuery {self.connection_data['project_id']}: "
|
|
115
|
+
f"dataset '{self.connection_data['dataset']}' not found"
|
|
116
|
+
)
|
|
97
117
|
|
|
98
118
|
if response.success is False and self.is_connected is True:
|
|
99
119
|
self.is_connected = False
|
|
@@ -59,6 +59,7 @@ class ChromaDBHandler(VectorStoreHandler):
|
|
|
59
59
|
self._client = None
|
|
60
60
|
self.persist_directory = None
|
|
61
61
|
self.is_connected = False
|
|
62
|
+
self._use_handler_storage = False
|
|
62
63
|
|
|
63
64
|
config = self.validate_connection_parameters(name, **kwargs)
|
|
64
65
|
|
|
@@ -72,8 +73,6 @@ class ChromaDBHandler(VectorStoreHandler):
|
|
|
72
73
|
"hnsw:space": config.distance,
|
|
73
74
|
}
|
|
74
75
|
|
|
75
|
-
self._use_handler_storage = False
|
|
76
|
-
|
|
77
76
|
self.connect()
|
|
78
77
|
|
|
79
78
|
def validate_connection_parameters(self, name, **kwargs):
|
|
@@ -395,7 +394,7 @@ class ChromaDBHandler(VectorStoreHandler):
|
|
|
395
394
|
|
|
396
395
|
return df
|
|
397
396
|
|
|
398
|
-
def insert(self, collection_name: str, df: pd.DataFrame):
|
|
397
|
+
def insert(self, collection_name: str, df: pd.DataFrame) -> Response:
|
|
399
398
|
"""
|
|
400
399
|
Insert/Upsert data into ChromaDB collection.
|
|
401
400
|
If records with same IDs exist, they will be updated.
|
|
@@ -433,6 +432,7 @@ class ChromaDBHandler(VectorStoreHandler):
|
|
|
433
432
|
except Exception as e:
|
|
434
433
|
logger.error(f"Error during upsert operation: {str(e)}")
|
|
435
434
|
raise Exception(f"Failed to insert/update data: {str(e)}")
|
|
435
|
+
return Response(RESPONSE_TYPE.OK, affected_rows=len(df))
|
|
436
436
|
|
|
437
437
|
def upsert(self, table_name: str, data: pd.DataFrame):
|
|
438
438
|
"""
|
|
@@ -1,2 +1,2 @@
|
|
|
1
1
|
flaml<=1.2.3
|
|
2
|
-
type_infer==0.0.
|
|
2
|
+
type_infer==0.0.23
|
|
@@ -9,7 +9,6 @@ from mindsdb.integrations.utilities.sql_utils import extract_comparison_conditio
|
|
|
9
9
|
|
|
10
10
|
|
|
11
11
|
class GoogleCalendarEventsTable(APITable):
|
|
12
|
-
|
|
13
12
|
def select(self, query: ast.Select) -> DataFrame:
|
|
14
13
|
"""
|
|
15
14
|
Gets all events from the calendar.
|
|
@@ -26,33 +25,33 @@ class GoogleCalendarEventsTable(APITable):
|
|
|
26
25
|
# Get the start and end times from the conditions.
|
|
27
26
|
params = {}
|
|
28
27
|
for op, arg1, arg2 in conditions:
|
|
29
|
-
if arg1 ==
|
|
28
|
+
if arg1 == "timeMax" or arg1 == "timeMin":
|
|
30
29
|
date = parse_utc_date(arg2)
|
|
31
|
-
if op ==
|
|
30
|
+
if op == "=":
|
|
32
31
|
params[arg1] = date
|
|
33
32
|
else:
|
|
34
33
|
raise NotImplementedError
|
|
35
|
-
elif arg1 ==
|
|
34
|
+
elif arg1 == "timeZone":
|
|
36
35
|
params[arg1] = arg2
|
|
37
|
-
elif arg1 ==
|
|
36
|
+
elif arg1 == "maxAttendees":
|
|
38
37
|
params[arg1] = arg2
|
|
39
|
-
elif arg1 ==
|
|
38
|
+
elif arg1 == "q":
|
|
40
39
|
params[arg1] = arg2
|
|
41
40
|
|
|
42
41
|
# Get the order by from the query.
|
|
43
42
|
if query.order_by is not None:
|
|
44
|
-
if query.order_by[0].value ==
|
|
45
|
-
params[
|
|
46
|
-
elif query.order_by[0].value ==
|
|
47
|
-
params[
|
|
43
|
+
if query.order_by[0].value == "start_time":
|
|
44
|
+
params["orderBy"] = "startTime"
|
|
45
|
+
elif query.order_by[0].value == "updated":
|
|
46
|
+
params["orderBy"] = "updated"
|
|
48
47
|
else:
|
|
49
48
|
raise NotImplementedError
|
|
50
49
|
|
|
51
50
|
if query.limit is not None:
|
|
52
|
-
params[
|
|
51
|
+
params["maxResults"] = query.limit.value
|
|
53
52
|
|
|
54
53
|
# Get the events from the Google Calendar API.
|
|
55
|
-
events = self.handler.call_application_api(method_name=
|
|
54
|
+
events = self.handler.call_application_api(method_name="get_events", params=params)
|
|
56
55
|
|
|
57
56
|
selected_columns = []
|
|
58
57
|
for target in query.targets:
|
|
@@ -87,9 +86,20 @@ class GoogleCalendarEventsTable(APITable):
|
|
|
87
86
|
values = query.values[0]
|
|
88
87
|
# Get the event data from the values.
|
|
89
88
|
event_data = {}
|
|
90
|
-
timestamp_columns = {
|
|
91
|
-
regular_columns = {
|
|
92
|
-
|
|
89
|
+
timestamp_columns = {"start_time", "end_time", "created", "updated"}
|
|
90
|
+
regular_columns = {
|
|
91
|
+
"summary",
|
|
92
|
+
"description",
|
|
93
|
+
"location",
|
|
94
|
+
"status",
|
|
95
|
+
"html_link",
|
|
96
|
+
"creator",
|
|
97
|
+
"organizer",
|
|
98
|
+
"reminders",
|
|
99
|
+
"timeZone",
|
|
100
|
+
"calendar_id",
|
|
101
|
+
"attendees",
|
|
102
|
+
}
|
|
93
103
|
|
|
94
104
|
# TODO: check why query.columns is None
|
|
95
105
|
for col, val in zip(query.columns, values):
|
|
@@ -100,24 +110,18 @@ class GoogleCalendarEventsTable(APITable):
|
|
|
100
110
|
else:
|
|
101
111
|
raise NotImplementedError
|
|
102
112
|
|
|
103
|
-
st = datetime.datetime.
|
|
104
|
-
et = datetime.datetime.
|
|
113
|
+
st = datetime.datetime.fromtimestamp(event_data["start_time"] / 1000, datetime.timezone.utc).isoformat() + "Z"
|
|
114
|
+
et = datetime.datetime.fromtimestamp(event_data["end_time"] / 1000, datetime.timezone.utc).isoformat() + "Z"
|
|
105
115
|
|
|
106
|
-
event_data[
|
|
107
|
-
'dateTime': st,
|
|
108
|
-
'timeZone': event_data['timeZone']
|
|
109
|
-
}
|
|
116
|
+
event_data["start"] = {"dateTime": st, "timeZone": event_data["timeZone"]}
|
|
110
117
|
|
|
111
|
-
event_data[
|
|
112
|
-
'dateTime': et,
|
|
113
|
-
'timeZone': event_data['timeZone']
|
|
114
|
-
}
|
|
118
|
+
event_data["end"] = {"dateTime": et, "timeZone": event_data["timeZone"]}
|
|
115
119
|
|
|
116
|
-
event_data[
|
|
117
|
-
event_data[
|
|
120
|
+
event_data["attendees"] = event_data["attendees"].split(",")
|
|
121
|
+
event_data["attendees"] = [{"email": attendee} for attendee in event_data["attendees"]]
|
|
118
122
|
|
|
119
123
|
# Insert the event into the Google Calendar API.
|
|
120
|
-
self.handler.call_application_api(method_name=
|
|
124
|
+
self.handler.call_application_api(method_name="create_event", params=event_data)
|
|
121
125
|
|
|
122
126
|
def update(self, query: ast.Update):
|
|
123
127
|
"""
|
|
@@ -135,44 +139,48 @@ class GoogleCalendarEventsTable(APITable):
|
|
|
135
139
|
# Get the event data from the values.
|
|
136
140
|
event_data = {}
|
|
137
141
|
for col, val in zip(query.update_columns, values):
|
|
138
|
-
if col ==
|
|
142
|
+
if col == "start_time" or col == "end_time" or col == "created" or col == "updated":
|
|
139
143
|
event_data[col] = utc_date_str_to_timestamp_ms(val)
|
|
140
|
-
elif
|
|
141
|
-
|
|
142
|
-
|
|
144
|
+
elif (
|
|
145
|
+
col == "summary"
|
|
146
|
+
or col == "description"
|
|
147
|
+
or col == "location"
|
|
148
|
+
or col == "status"
|
|
149
|
+
or col == "html_link"
|
|
150
|
+
or col == "creator"
|
|
151
|
+
or col == "organizer"
|
|
152
|
+
or col == "reminders"
|
|
153
|
+
or col == "timeZone"
|
|
154
|
+
or col == "calendar_id"
|
|
155
|
+
or col == "attendees"
|
|
156
|
+
):
|
|
143
157
|
event_data[col] = val
|
|
144
158
|
else:
|
|
145
159
|
raise NotImplementedError
|
|
146
160
|
|
|
147
|
-
event_data[
|
|
148
|
-
'dateTime': event_data['start_time'],
|
|
149
|
-
'timeZone': event_data['timeZone']
|
|
150
|
-
}
|
|
161
|
+
event_data["start"] = {"dateTime": event_data["start_time"], "timeZone": event_data["timeZone"]}
|
|
151
162
|
|
|
152
|
-
event_data[
|
|
153
|
-
'dateTime': event_data['end_time'],
|
|
154
|
-
'timeZone': event_data['timeZone']
|
|
155
|
-
}
|
|
163
|
+
event_data["end"] = {"dateTime": event_data["end_time"], "timeZone": event_data["timeZone"]}
|
|
156
164
|
|
|
157
|
-
event_data[
|
|
158
|
-
event_data[
|
|
165
|
+
event_data["attendees"] = event_data.get("attendees").split(",")
|
|
166
|
+
event_data["attendees"] = [{"email": attendee} for attendee in event_data["attendees"]]
|
|
159
167
|
|
|
160
168
|
conditions = extract_comparison_conditions(query.where)
|
|
161
169
|
for op, arg1, arg2 in conditions:
|
|
162
|
-
if arg1 ==
|
|
163
|
-
if op ==
|
|
164
|
-
event_data[
|
|
165
|
-
elif op ==
|
|
166
|
-
event_data[
|
|
167
|
-
elif op ==
|
|
168
|
-
event_data[
|
|
170
|
+
if arg1 == "event_id":
|
|
171
|
+
if op == "=":
|
|
172
|
+
event_data["event_id"] = arg2
|
|
173
|
+
elif op == ">":
|
|
174
|
+
event_data["start_id"] = arg2
|
|
175
|
+
elif op == "<":
|
|
176
|
+
event_data["end_id"] = arg2
|
|
169
177
|
else:
|
|
170
178
|
raise NotImplementedError
|
|
171
179
|
else:
|
|
172
180
|
raise NotImplementedError
|
|
173
181
|
|
|
174
182
|
# Update the event in the Google Calendar API.
|
|
175
|
-
self.handler.call_application_api(method_name=
|
|
183
|
+
self.handler.call_application_api(method_name="update_event", params=event_data)
|
|
176
184
|
|
|
177
185
|
def delete(self, query: ast.Delete):
|
|
178
186
|
"""
|
|
@@ -190,35 +198,36 @@ class GoogleCalendarEventsTable(APITable):
|
|
|
190
198
|
# Get the start and end times from the conditions.
|
|
191
199
|
params = {}
|
|
192
200
|
for op, arg1, arg2 in conditions:
|
|
193
|
-
if arg1 ==
|
|
194
|
-
if op ==
|
|
201
|
+
if arg1 == "event_id":
|
|
202
|
+
if op == "=":
|
|
195
203
|
params[arg1] = arg2
|
|
196
|
-
elif op ==
|
|
197
|
-
params[
|
|
198
|
-
elif op ==
|
|
199
|
-
params[
|
|
204
|
+
elif op == ">":
|
|
205
|
+
params["start_id"] = arg2
|
|
206
|
+
elif op == "<":
|
|
207
|
+
params["end_id"] = arg2
|
|
200
208
|
else:
|
|
201
209
|
raise NotImplementedError
|
|
202
210
|
|
|
203
211
|
# Delete the events in the Google Calendar API.
|
|
204
|
-
self.handler.call_application_api(method_name=
|
|
212
|
+
self.handler.call_application_api(method_name="delete_event", params=params)
|
|
205
213
|
|
|
206
214
|
def get_columns(self) -> list:
|
|
207
215
|
"""Gets all columns to be returned in pandas DataFrame responses"""
|
|
208
216
|
return [
|
|
209
|
-
|
|
210
|
-
|
|
211
|
-
|
|
212
|
-
|
|
213
|
-
|
|
214
|
-
|
|
215
|
-
|
|
216
|
-
|
|
217
|
-
|
|
218
|
-
|
|
219
|
-
|
|
220
|
-
|
|
221
|
-
|
|
222
|
-
|
|
223
|
-
|
|
224
|
-
|
|
217
|
+
"etag",
|
|
218
|
+
"id",
|
|
219
|
+
"status",
|
|
220
|
+
"htmlLink",
|
|
221
|
+
"created",
|
|
222
|
+
"updated",
|
|
223
|
+
"summary",
|
|
224
|
+
"creator",
|
|
225
|
+
"organizer",
|
|
226
|
+
"start",
|
|
227
|
+
"end",
|
|
228
|
+
"timeZone",
|
|
229
|
+
"iCalUID",
|
|
230
|
+
"sequence",
|
|
231
|
+
"reminders",
|
|
232
|
+
"eventType",
|
|
233
|
+
]
|
|
@@ -1 +1 @@
|
|
|
1
|
-
hubspot-api-client
|
|
1
|
+
hubspot-api-client==11.1.0
|