MindsDB 25.7.3.0__py3-none-any.whl → 25.7.4.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of MindsDB might be problematic. Click here for more details.

Files changed (61) hide show
  1. mindsdb/__about__.py +1 -1
  2. mindsdb/api/a2a/common/server/server.py +16 -6
  3. mindsdb/api/executor/command_executor.py +206 -135
  4. mindsdb/api/executor/datahub/datanodes/project_datanode.py +14 -3
  5. mindsdb/api/executor/planner/plan_join.py +3 -0
  6. mindsdb/api/executor/planner/plan_join_ts.py +117 -100
  7. mindsdb/api/executor/planner/query_planner.py +1 -0
  8. mindsdb/api/executor/sql_query/steps/apply_predictor_step.py +54 -85
  9. mindsdb/api/http/initialize.py +16 -43
  10. mindsdb/api/http/namespaces/agents.py +23 -20
  11. mindsdb/api/http/namespaces/chatbots.py +83 -120
  12. mindsdb/api/http/namespaces/file.py +1 -1
  13. mindsdb/api/http/namespaces/jobs.py +38 -60
  14. mindsdb/api/http/namespaces/tree.py +69 -61
  15. mindsdb/api/mcp/start.py +2 -0
  16. mindsdb/api/mysql/mysql_proxy/utilities/dump.py +3 -2
  17. mindsdb/integrations/handlers/autogluon_handler/requirements.txt +1 -1
  18. mindsdb/integrations/handlers/autosklearn_handler/requirements.txt +1 -1
  19. mindsdb/integrations/handlers/bigquery_handler/bigquery_handler.py +25 -5
  20. mindsdb/integrations/handlers/chromadb_handler/chromadb_handler.py +3 -3
  21. mindsdb/integrations/handlers/flaml_handler/requirements.txt +1 -1
  22. mindsdb/integrations/handlers/google_calendar_handler/google_calendar_tables.py +82 -73
  23. mindsdb/integrations/handlers/hubspot_handler/requirements.txt +1 -1
  24. mindsdb/integrations/handlers/langchain_handler/langchain_handler.py +83 -76
  25. mindsdb/integrations/handlers/lightwood_handler/requirements.txt +4 -4
  26. mindsdb/integrations/handlers/litellm_handler/litellm_handler.py +5 -2
  27. mindsdb/integrations/handlers/litellm_handler/settings.py +2 -1
  28. mindsdb/integrations/handlers/pgvector_handler/pgvector_handler.py +106 -90
  29. mindsdb/integrations/handlers/postgres_handler/postgres_handler.py +41 -39
  30. mindsdb/integrations/handlers/salesforce_handler/constants.py +208 -0
  31. mindsdb/integrations/handlers/salesforce_handler/salesforce_handler.py +141 -80
  32. mindsdb/integrations/handlers/salesforce_handler/salesforce_tables.py +0 -1
  33. mindsdb/integrations/handlers/tpot_handler/requirements.txt +1 -1
  34. mindsdb/integrations/handlers/web_handler/urlcrawl_helpers.py +32 -17
  35. mindsdb/integrations/handlers/web_handler/web_handler.py +19 -22
  36. mindsdb/integrations/libs/vectordatabase_handler.py +10 -1
  37. mindsdb/integrations/utilities/handler_utils.py +32 -12
  38. mindsdb/interfaces/agents/agents_controller.py +167 -108
  39. mindsdb/interfaces/agents/langchain_agent.py +10 -3
  40. mindsdb/interfaces/data_catalog/data_catalog_loader.py +4 -4
  41. mindsdb/interfaces/database/database.py +38 -13
  42. mindsdb/interfaces/database/integrations.py +20 -5
  43. mindsdb/interfaces/database/projects.py +63 -16
  44. mindsdb/interfaces/database/views.py +86 -60
  45. mindsdb/interfaces/jobs/jobs_controller.py +103 -110
  46. mindsdb/interfaces/knowledge_base/controller.py +26 -5
  47. mindsdb/interfaces/knowledge_base/evaluate.py +2 -1
  48. mindsdb/interfaces/knowledge_base/executor.py +24 -0
  49. mindsdb/interfaces/query_context/context_controller.py +100 -133
  50. mindsdb/interfaces/skills/skills_controller.py +18 -6
  51. mindsdb/interfaces/storage/db.py +40 -6
  52. mindsdb/interfaces/variables/variables_controller.py +8 -15
  53. mindsdb/utilities/config.py +3 -3
  54. mindsdb/utilities/functions.py +72 -60
  55. mindsdb/utilities/log.py +38 -6
  56. mindsdb/utilities/ps.py +7 -7
  57. {mindsdb-25.7.3.0.dist-info → mindsdb-25.7.4.0.dist-info}/METADATA +246 -247
  58. {mindsdb-25.7.3.0.dist-info → mindsdb-25.7.4.0.dist-info}/RECORD +61 -60
  59. {mindsdb-25.7.3.0.dist-info → mindsdb-25.7.4.0.dist-info}/WHEEL +0 -0
  60. {mindsdb-25.7.3.0.dist-info → mindsdb-25.7.4.0.dist-info}/licenses/LICENSE +0 -0
  61. {mindsdb-25.7.3.0.dist-info → mindsdb-25.7.4.0.dist-info}/top_level.txt +0 -0
@@ -9,97 +9,105 @@ from mindsdb.api.http.namespaces.configs.tree import ns_conf
9
9
  from mindsdb.metrics.metrics import api_endpoint_metrics
10
10
 
11
11
 
12
- @ns_conf.route('/')
12
+ @ns_conf.route("/")
13
13
  class GetRoot(Resource):
14
- @ns_conf.doc('get_tree_root')
15
- @api_endpoint_metrics('GET', '/tree')
14
+ @ns_conf.doc("get_tree_root")
15
+ @api_endpoint_metrics("GET", "/tree")
16
16
  def get(self):
17
17
  databases = ca.database_controller.get_list()
18
- result = [{
19
- 'name': x['name'],
20
- 'class': 'db',
21
- 'type': x['type'],
22
- 'engine': x['engine'],
23
- 'deletable': x['deletable'],
24
- 'visible': x['visible']
25
- } for x in databases]
18
+ result = [
19
+ {
20
+ "name": x["name"],
21
+ "class": "db",
22
+ "type": x["type"],
23
+ "engine": x["engine"],
24
+ "deletable": x["deletable"],
25
+ "visible": x["visible"],
26
+ }
27
+ for x in databases
28
+ ]
26
29
  return result
27
30
 
28
31
 
29
- @ns_conf.route('/<db_name>')
30
- @ns_conf.param('db_name', "Name of the database")
32
+ @ns_conf.route("/<db_name>")
33
+ @ns_conf.param("db_name", "Name of the database")
31
34
  class GetLeaf(Resource):
32
- @ns_conf.doc('get_tree_leaf')
33
- @api_endpoint_metrics('GET', '/tree/database')
35
+ @ns_conf.doc("get_tree_leaf")
36
+ @api_endpoint_metrics("GET", "/tree/database")
34
37
  def get(self, db_name):
35
- with_schemas = request.args.get('all_schemas')
38
+ with_schemas = request.args.get("all_schemas")
36
39
  if isinstance(with_schemas, str):
37
- with_schemas = with_schemas.lower() in ('1', 'true')
40
+ with_schemas = with_schemas.lower() in ("1", "true")
38
41
  else:
39
42
  with_schemas = False
40
43
  db_name = db_name.lower()
41
44
  databases = ca.database_controller.get_dict()
42
45
  if db_name not in databases:
43
- return http_error(
44
- 400,
45
- "Error",
46
- f"There is no element with name '{db_name}'"
47
- )
46
+ return http_error(400, "Error", f"There is no element with name '{db_name}'")
48
47
  db = databases[db_name]
49
- if db['type'] == 'project':
48
+ if db["type"] == "project":
50
49
  project = ca.database_controller.get_project(db_name)
51
50
  tables = project.get_tables()
52
- tables = [{
53
- 'name': key,
54
- 'schema': None,
55
- 'class': 'table',
56
- 'type': val['type'],
57
- 'engine': val.get('engine'),
58
- 'deletable': val.get('deletable')
59
- } for key, val in tables.items()]
60
- elif db['type'] == 'data':
51
+ tables = [
52
+ {
53
+ "name": key,
54
+ "schema": None,
55
+ "class": "table",
56
+ "type": val["type"],
57
+ "engine": val.get("engine"),
58
+ "deletable": val.get("deletable"),
59
+ }
60
+ for key, val in tables.items()
61
+ ]
62
+
63
+ jobs = ca.jobs_controller.get_list(db_name)
64
+ tables = tables + [
65
+ {"name": job["name"], "schema": None, "class": "job", "type": "job", "engine": "job", "deletable": True}
66
+ for job in jobs
67
+ ]
68
+ elif db["type"] == "data":
61
69
  handler = ca.integration_controller.get_data_handler(db_name)
62
- if 'all' in inspect.signature(handler.get_tables).parameters:
70
+ if "all" in inspect.signature(handler.get_tables).parameters:
63
71
  response = handler.get_tables(all=with_schemas)
64
72
  else:
65
73
  response = handler.get_tables()
66
- if response.type != 'table':
74
+ if response.type != "table":
67
75
  return []
68
- table_types = {
69
- 'BASE TABLE': 'table',
70
- 'VIEW': 'view'
71
- }
72
- tables = response.data_frame.to_dict(orient='records')
76
+ table_types = {"BASE TABLE": "table", "VIEW": "view"}
77
+ tables = response.data_frame.to_dict(orient="records")
73
78
 
74
79
  schemas = defaultdict(list)
75
80
 
76
81
  for table_meta in tables:
77
82
  table_meta = {key.lower(): val for key, val in table_meta.items()}
78
- schama = table_meta.get('table_schema')
79
- schemas[schama].append({
80
- 'name': table_meta['table_name'],
81
- 'class': 'table',
82
- 'type': table_types.get(table_meta.get('table_type')),
83
- 'engine': None,
84
- 'deletable': False
85
- })
83
+ schama = table_meta.get("table_schema")
84
+ schemas[schama].append(
85
+ {
86
+ "name": table_meta["table_name"],
87
+ "class": "table",
88
+ "type": table_types.get(table_meta.get("table_type")),
89
+ "engine": None,
90
+ "deletable": False,
91
+ }
92
+ )
86
93
  if len(schemas) == 1 and list(schemas.keys())[0] is None:
87
94
  tables = schemas[None]
88
95
  else:
89
- tables = [{
90
- 'name': key,
91
- 'class': 'schema',
92
- 'deletable': False,
93
- 'children': val
94
- } for key, val in schemas.items()]
95
- elif db['type'] == 'system':
96
+ tables = [
97
+ {"name": key, "class": "schema", "deletable": False, "children": val}
98
+ for key, val in schemas.items()
99
+ ]
100
+ elif db["type"] == "system":
96
101
  system_db = ca.database_controller.get_system_db(db_name)
97
102
  tables = system_db.get_tree_tables()
98
- tables = [{
99
- 'name': table.name,
100
- 'class': table.kind,
101
- 'type': 'system view',
102
- 'engine': None,
103
- 'deletable': table.deletable,
104
- } for table in tables.values()]
103
+ tables = [
104
+ {
105
+ "name": table.name,
106
+ "class": table.kind,
107
+ "type": "system view",
108
+ "engine": None,
109
+ "deletable": table.deletable,
110
+ }
111
+ for table in tables.values()
112
+ ]
105
113
  return tables
mindsdb/api/mcp/start.py CHANGED
@@ -15,6 +15,7 @@ from starlette.responses import Response
15
15
  from mindsdb.api.mysql.mysql_proxy.classes.fake_mysql_proxy import FakeMysqlProxy
16
16
  from mindsdb.api.executor.data_types.response_type import RESPONSE_TYPE as SQL_RESPONSE_TYPE
17
17
  from mindsdb.utilities import log
18
+ from mindsdb.utilities.log import get_uvicorn_logging_config
18
19
  from mindsdb.utilities.config import Config
19
20
  from mindsdb.interfaces.storage import db
20
21
 
@@ -173,6 +174,7 @@ async def run_sse_async() -> None:
173
174
  host=mcp.settings.host,
174
175
  port=mcp.settings.port,
175
176
  log_level=mcp.settings.log_level.lower(),
177
+ log_config=get_uvicorn_logging_config("uvicorn_mcp"),
176
178
  )
177
179
  server = uvicorn.Server(config)
178
180
  await server.serve()
@@ -118,8 +118,9 @@ def _dump_str(var: Any) -> str | None:
118
118
  return json_encoder.encode(var)
119
119
  except Exception:
120
120
  return str(var)
121
- if isinstance(var, list) is False and pd.isna(var):
122
- # pd.isna returns array of bools for list, so we need to check if it is not a list
121
+ # pd.isna returns array of bools for list
122
+ # and the truth value of a numpy array is ambiguous
123
+ if isinstance(var, (list, np.ndarray)) is False and pd.isna(var):
123
124
  return None
124
125
  return str(var)
125
126
 
@@ -1,2 +1,2 @@
1
1
  autogluon
2
- type_infer==0.0.20
2
+ type_infer==0.0.23
@@ -1,2 +1,2 @@
1
1
  auto-sklearn
2
- type_infer==0.0.20
2
+ type_infer==0.0.23
@@ -1,8 +1,10 @@
1
- from google.cloud.bigquery import Client, QueryJobConfig
2
- from google.api_core.exceptions import BadRequest
1
+ import json
2
+ from typing import Any, Dict, Optional, Text
3
+
4
+ from google.cloud.bigquery import Client, QueryJobConfig, DEFAULT_RETRY
5
+ from google.api_core.exceptions import BadRequest, NotFound
3
6
  import pandas as pd
4
7
  from sqlalchemy_bigquery.base import BigQueryDialect
5
- from typing import Any, Dict, Optional, Text
6
8
 
7
9
  from mindsdb.utilities import log
8
10
  from mindsdb_sql_parser.ast.base import ASTNode
@@ -54,9 +56,22 @@ class BigQueryHandler(MetaDatabaseHandler):
54
56
  if not all(key in self.connection_data for key in ["project_id", "dataset"]):
55
57
  raise ValueError("Required parameters (project_id, dataset) must be provided.")
56
58
 
59
+ service_account_json = self.connection_data.get("service_account_json")
60
+ if isinstance(service_account_json, str):
61
+ # GUI send it as str
62
+ try:
63
+ service_account_json = json.loads(service_account_json)
64
+ except json.decoder.JSONDecodeError:
65
+ raise ValueError("'service_account_json' is not valid JSON")
66
+ if isinstance(service_account_json, dict) and isinstance(service_account_json.get("private_key"), str):
67
+ # some editors may escape new line symbol, also replace windows-like newlines
68
+ service_account_json["private_key"] = (
69
+ service_account_json["private_key"].replace("\\n", "\n").replace("\r\n", "\n")
70
+ )
71
+
57
72
  google_sa_oauth2_manager = GoogleServiceAccountOAuth2Manager(
58
73
  credentials_file=self.connection_data.get("service_account_keys"),
59
- credentials_json=self.connection_data.get("service_account_json"),
74
+ credentials_json=service_account_json,
60
75
  )
61
76
  credentials = google_sa_oauth2_manager.get_oauth2_credentials()
62
77
 
@@ -85,7 +100,7 @@ class BigQueryHandler(MetaDatabaseHandler):
85
100
 
86
101
  try:
87
102
  connection = self.connect()
88
- connection.query("SELECT 1;")
103
+ connection.query("SELECT 1;", timeout=10, retry=DEFAULT_RETRY.with_deadline(10))
89
104
 
90
105
  # Check if the dataset exists
91
106
  connection.get_dataset(self.connection_data["dataset"])
@@ -94,6 +109,11 @@ class BigQueryHandler(MetaDatabaseHandler):
94
109
  except (BadRequest, ValueError) as e:
95
110
  logger.error(f"Error connecting to BigQuery {self.connection_data['project_id']}, {e}!")
96
111
  response.error_message = e
112
+ except NotFound:
113
+ response.error_message = (
114
+ f"Error connecting to BigQuery {self.connection_data['project_id']}: "
115
+ f"dataset '{self.connection_data['dataset']}' not found"
116
+ )
97
117
 
98
118
  if response.success is False and self.is_connected is True:
99
119
  self.is_connected = False
@@ -59,6 +59,7 @@ class ChromaDBHandler(VectorStoreHandler):
59
59
  self._client = None
60
60
  self.persist_directory = None
61
61
  self.is_connected = False
62
+ self._use_handler_storage = False
62
63
 
63
64
  config = self.validate_connection_parameters(name, **kwargs)
64
65
 
@@ -72,8 +73,6 @@ class ChromaDBHandler(VectorStoreHandler):
72
73
  "hnsw:space": config.distance,
73
74
  }
74
75
 
75
- self._use_handler_storage = False
76
-
77
76
  self.connect()
78
77
 
79
78
  def validate_connection_parameters(self, name, **kwargs):
@@ -395,7 +394,7 @@ class ChromaDBHandler(VectorStoreHandler):
395
394
 
396
395
  return df
397
396
 
398
- def insert(self, collection_name: str, df: pd.DataFrame):
397
+ def insert(self, collection_name: str, df: pd.DataFrame) -> Response:
399
398
  """
400
399
  Insert/Upsert data into ChromaDB collection.
401
400
  If records with same IDs exist, they will be updated.
@@ -433,6 +432,7 @@ class ChromaDBHandler(VectorStoreHandler):
433
432
  except Exception as e:
434
433
  logger.error(f"Error during upsert operation: {str(e)}")
435
434
  raise Exception(f"Failed to insert/update data: {str(e)}")
435
+ return Response(RESPONSE_TYPE.OK, affected_rows=len(df))
436
436
 
437
437
  def upsert(self, table_name: str, data: pd.DataFrame):
438
438
  """
@@ -1,2 +1,2 @@
1
1
  flaml<=1.2.3
2
- type_infer==0.0.20
2
+ type_infer==0.0.23
@@ -9,7 +9,6 @@ from mindsdb.integrations.utilities.sql_utils import extract_comparison_conditio
9
9
 
10
10
 
11
11
  class GoogleCalendarEventsTable(APITable):
12
-
13
12
  def select(self, query: ast.Select) -> DataFrame:
14
13
  """
15
14
  Gets all events from the calendar.
@@ -26,33 +25,33 @@ class GoogleCalendarEventsTable(APITable):
26
25
  # Get the start and end times from the conditions.
27
26
  params = {}
28
27
  for op, arg1, arg2 in conditions:
29
- if arg1 == 'timeMax' or arg1 == 'timeMin':
28
+ if arg1 == "timeMax" or arg1 == "timeMin":
30
29
  date = parse_utc_date(arg2)
31
- if op == '=':
30
+ if op == "=":
32
31
  params[arg1] = date
33
32
  else:
34
33
  raise NotImplementedError
35
- elif arg1 == 'timeZone':
34
+ elif arg1 == "timeZone":
36
35
  params[arg1] = arg2
37
- elif arg1 == 'maxAttendees':
36
+ elif arg1 == "maxAttendees":
38
37
  params[arg1] = arg2
39
- elif arg1 == 'q':
38
+ elif arg1 == "q":
40
39
  params[arg1] = arg2
41
40
 
42
41
  # Get the order by from the query.
43
42
  if query.order_by is not None:
44
- if query.order_by[0].value == 'start_time':
45
- params['orderBy'] = 'startTime'
46
- elif query.order_by[0].value == 'updated':
47
- params['orderBy'] = 'updated'
43
+ if query.order_by[0].value == "start_time":
44
+ params["orderBy"] = "startTime"
45
+ elif query.order_by[0].value == "updated":
46
+ params["orderBy"] = "updated"
48
47
  else:
49
48
  raise NotImplementedError
50
49
 
51
50
  if query.limit is not None:
52
- params['maxResults'] = query.limit.value
51
+ params["maxResults"] = query.limit.value
53
52
 
54
53
  # Get the events from the Google Calendar API.
55
- events = self.handler.call_application_api(method_name='get_events', params=params)
54
+ events = self.handler.call_application_api(method_name="get_events", params=params)
56
55
 
57
56
  selected_columns = []
58
57
  for target in query.targets:
@@ -87,9 +86,20 @@ class GoogleCalendarEventsTable(APITable):
87
86
  values = query.values[0]
88
87
  # Get the event data from the values.
89
88
  event_data = {}
90
- timestamp_columns = {'start_time', 'end_time', 'created', 'updated'}
91
- regular_columns = {'summary', 'description', 'location', 'status', 'html_link',
92
- 'creator', 'organizer', 'reminders', 'timeZone', 'calendar_id', 'attendees'}
89
+ timestamp_columns = {"start_time", "end_time", "created", "updated"}
90
+ regular_columns = {
91
+ "summary",
92
+ "description",
93
+ "location",
94
+ "status",
95
+ "html_link",
96
+ "creator",
97
+ "organizer",
98
+ "reminders",
99
+ "timeZone",
100
+ "calendar_id",
101
+ "attendees",
102
+ }
93
103
 
94
104
  # TODO: check why query.columns is None
95
105
  for col, val in zip(query.columns, values):
@@ -100,24 +110,18 @@ class GoogleCalendarEventsTable(APITable):
100
110
  else:
101
111
  raise NotImplementedError
102
112
 
103
- st = datetime.datetime.utcfromtimestamp(event_data['start_time'] / 1000).isoformat() + 'Z'
104
- et = datetime.datetime.utcfromtimestamp(event_data['end_time'] / 1000).isoformat() + 'Z'
113
+ st = datetime.datetime.fromtimestamp(event_data["start_time"] / 1000, datetime.timezone.utc).isoformat() + "Z"
114
+ et = datetime.datetime.fromtimestamp(event_data["end_time"] / 1000, datetime.timezone.utc).isoformat() + "Z"
105
115
 
106
- event_data['start'] = {
107
- 'dateTime': st,
108
- 'timeZone': event_data['timeZone']
109
- }
116
+ event_data["start"] = {"dateTime": st, "timeZone": event_data["timeZone"]}
110
117
 
111
- event_data['end'] = {
112
- 'dateTime': et,
113
- 'timeZone': event_data['timeZone']
114
- }
118
+ event_data["end"] = {"dateTime": et, "timeZone": event_data["timeZone"]}
115
119
 
116
- event_data['attendees'] = event_data['attendees'].split(',')
117
- event_data['attendees'] = [{'email': attendee} for attendee in event_data['attendees']]
120
+ event_data["attendees"] = event_data["attendees"].split(",")
121
+ event_data["attendees"] = [{"email": attendee} for attendee in event_data["attendees"]]
118
122
 
119
123
  # Insert the event into the Google Calendar API.
120
- self.handler.call_application_api(method_name='create_event', params=event_data)
124
+ self.handler.call_application_api(method_name="create_event", params=event_data)
121
125
 
122
126
  def update(self, query: ast.Update):
123
127
  """
@@ -135,44 +139,48 @@ class GoogleCalendarEventsTable(APITable):
135
139
  # Get the event data from the values.
136
140
  event_data = {}
137
141
  for col, val in zip(query.update_columns, values):
138
- if col == 'start_time' or col == 'end_time' or col == 'created' or col == 'updated':
142
+ if col == "start_time" or col == "end_time" or col == "created" or col == "updated":
139
143
  event_data[col] = utc_date_str_to_timestamp_ms(val)
140
- elif col == 'summary' or col == 'description' or col == 'location' or col == 'status' or col == 'html_link' \
141
- or col == 'creator' or col == 'organizer' or col == 'reminders' \
142
- or col == 'timeZone' or col == 'calendar_id' or col == 'attendees':
144
+ elif (
145
+ col == "summary"
146
+ or col == "description"
147
+ or col == "location"
148
+ or col == "status"
149
+ or col == "html_link"
150
+ or col == "creator"
151
+ or col == "organizer"
152
+ or col == "reminders"
153
+ or col == "timeZone"
154
+ or col == "calendar_id"
155
+ or col == "attendees"
156
+ ):
143
157
  event_data[col] = val
144
158
  else:
145
159
  raise NotImplementedError
146
160
 
147
- event_data['start'] = {
148
- 'dateTime': event_data['start_time'],
149
- 'timeZone': event_data['timeZone']
150
- }
161
+ event_data["start"] = {"dateTime": event_data["start_time"], "timeZone": event_data["timeZone"]}
151
162
 
152
- event_data['end'] = {
153
- 'dateTime': event_data['end_time'],
154
- 'timeZone': event_data['timeZone']
155
- }
163
+ event_data["end"] = {"dateTime": event_data["end_time"], "timeZone": event_data["timeZone"]}
156
164
 
157
- event_data['attendees'] = event_data.get('attendees').split(',')
158
- event_data['attendees'] = [{'email': attendee} for attendee in event_data['attendees']]
165
+ event_data["attendees"] = event_data.get("attendees").split(",")
166
+ event_data["attendees"] = [{"email": attendee} for attendee in event_data["attendees"]]
159
167
 
160
168
  conditions = extract_comparison_conditions(query.where)
161
169
  for op, arg1, arg2 in conditions:
162
- if arg1 == 'event_id':
163
- if op == '=':
164
- event_data['event_id'] = arg2
165
- elif op == '>':
166
- event_data['start_id'] = arg2
167
- elif op == '<':
168
- event_data['end_id'] = arg2
170
+ if arg1 == "event_id":
171
+ if op == "=":
172
+ event_data["event_id"] = arg2
173
+ elif op == ">":
174
+ event_data["start_id"] = arg2
175
+ elif op == "<":
176
+ event_data["end_id"] = arg2
169
177
  else:
170
178
  raise NotImplementedError
171
179
  else:
172
180
  raise NotImplementedError
173
181
 
174
182
  # Update the event in the Google Calendar API.
175
- self.handler.call_application_api(method_name='update_event', params=event_data)
183
+ self.handler.call_application_api(method_name="update_event", params=event_data)
176
184
 
177
185
  def delete(self, query: ast.Delete):
178
186
  """
@@ -190,35 +198,36 @@ class GoogleCalendarEventsTable(APITable):
190
198
  # Get the start and end times from the conditions.
191
199
  params = {}
192
200
  for op, arg1, arg2 in conditions:
193
- if arg1 == 'event_id':
194
- if op == '=':
201
+ if arg1 == "event_id":
202
+ if op == "=":
195
203
  params[arg1] = arg2
196
- elif op == '>':
197
- params['start_id'] = arg2
198
- elif op == '<':
199
- params['end_id'] = arg2
204
+ elif op == ">":
205
+ params["start_id"] = arg2
206
+ elif op == "<":
207
+ params["end_id"] = arg2
200
208
  else:
201
209
  raise NotImplementedError
202
210
 
203
211
  # Delete the events in the Google Calendar API.
204
- self.handler.call_application_api(method_name='delete_event', params=params)
212
+ self.handler.call_application_api(method_name="delete_event", params=params)
205
213
 
206
214
  def get_columns(self) -> list:
207
215
  """Gets all columns to be returned in pandas DataFrame responses"""
208
216
  return [
209
- 'etag',
210
- 'id',
211
- 'status',
212
- 'htmlLink',
213
- 'created',
214
- 'updated',
215
- 'summary',
216
- 'creator',
217
- 'organizer',
218
- 'start',
219
- 'end',
220
- 'timeZone',
221
- 'iCalUID',
222
- 'sequence',
223
- 'reminders',
224
- 'eventType']
217
+ "etag",
218
+ "id",
219
+ "status",
220
+ "htmlLink",
221
+ "created",
222
+ "updated",
223
+ "summary",
224
+ "creator",
225
+ "organizer",
226
+ "start",
227
+ "end",
228
+ "timeZone",
229
+ "iCalUID",
230
+ "sequence",
231
+ "reminders",
232
+ "eventType",
233
+ ]
@@ -1 +1 @@
1
- hubspot-api-client
1
+ hubspot-api-client==11.1.0