MindsDB 25.3.2.0__py3-none-any.whl → 25.3.4.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of MindsDB might be problematic. Click here for more details.
- mindsdb/__about__.py +1 -1
- mindsdb/__main__.py +0 -1
- mindsdb/api/executor/datahub/datanodes/information_schema_datanode.py +2 -6
- mindsdb/api/executor/datahub/datanodes/mindsdb_tables.py +1 -1
- mindsdb/api/http/namespaces/agents.py +9 -5
- mindsdb/api/http/namespaces/chatbots.py +6 -5
- mindsdb/api/http/namespaces/databases.py +5 -6
- mindsdb/api/http/namespaces/skills.py +5 -4
- mindsdb/api/http/namespaces/views.py +6 -7
- mindsdb/integrations/handlers/chromadb_handler/chromadb_handler.py +23 -2
- mindsdb/integrations/handlers/dummy_data_handler/dummy_data_handler.py +16 -6
- mindsdb/integrations/handlers/file_handler/tests/test_file_handler.py +64 -83
- mindsdb/integrations/handlers/github_handler/generate_api.py +228 -0
- mindsdb/integrations/handlers/github_handler/github_handler.py +15 -8
- mindsdb/integrations/handlers/github_handler/requirements.txt +1 -1
- mindsdb/integrations/handlers/huggingface_handler/requirements.txt +5 -4
- mindsdb/integrations/handlers/huggingface_handler/requirements_cpu.txt +5 -5
- mindsdb/integrations/handlers/ms_one_drive_handler/ms_graph_api_one_drive_client.py +1 -1
- mindsdb/integrations/handlers/ms_teams_handler/ms_graph_api_teams_client.py +278 -0
- mindsdb/integrations/handlers/ms_teams_handler/ms_teams_handler.py +114 -70
- mindsdb/integrations/handlers/ms_teams_handler/ms_teams_tables.py +431 -0
- mindsdb/integrations/handlers/pgvector_handler/pgvector_handler.py +18 -4
- mindsdb/integrations/handlers/redshift_handler/redshift_handler.py +1 -0
- mindsdb/integrations/handlers/salesforce_handler/requirements.txt +1 -1
- mindsdb/integrations/handlers/salesforce_handler/salesforce_handler.py +20 -25
- mindsdb/integrations/handlers/salesforce_handler/salesforce_tables.py +2 -2
- mindsdb/integrations/handlers/timescaledb_handler/timescaledb_handler.py +11 -6
- mindsdb/integrations/libs/ml_handler_process/learn_process.py +9 -3
- mindsdb/integrations/libs/vectordatabase_handler.py +2 -2
- mindsdb/integrations/utilities/files/file_reader.py +3 -3
- mindsdb/integrations/utilities/handlers/api_utilities/microsoft/ms_graph_api_utilities.py +36 -2
- mindsdb/integrations/utilities/rag/settings.py +1 -0
- mindsdb/interfaces/chatbot/chatbot_controller.py +6 -4
- mindsdb/interfaces/jobs/jobs_controller.py +1 -4
- mindsdb/interfaces/knowledge_base/controller.py +9 -28
- mindsdb/interfaces/knowledge_base/preprocessing/document_preprocessor.py +1 -1
- mindsdb/interfaces/skills/skills_controller.py +8 -7
- mindsdb/utilities/render/sqlalchemy_render.py +11 -5
- {mindsdb-25.3.2.0.dist-info → mindsdb-25.3.4.0.dist-info}/METADATA +236 -233
- {mindsdb-25.3.2.0.dist-info → mindsdb-25.3.4.0.dist-info}/RECORD +43 -42
- {mindsdb-25.3.2.0.dist-info → mindsdb-25.3.4.0.dist-info}/WHEEL +1 -1
- mindsdb/integrations/handlers/timescaledb_handler/tests/__init__.py +0 -0
- mindsdb/integrations/handlers/timescaledb_handler/tests/test_timescaledb_handler.py +0 -47
- {mindsdb-25.3.2.0.dist-info → mindsdb-25.3.4.0.dist-info/licenses}/LICENSE +0 -0
- {mindsdb-25.3.2.0.dist-info → mindsdb-25.3.4.0.dist-info}/top_level.txt +0 -0
|
@@ -0,0 +1,228 @@
|
|
|
1
|
+
import inspect
|
|
2
|
+
from typing import List
|
|
3
|
+
from dataclasses import dataclass
|
|
4
|
+
|
|
5
|
+
import pandas as pd
|
|
6
|
+
import github
|
|
7
|
+
|
|
8
|
+
from mindsdb.integrations.utilities.sql_utils import (FilterCondition, FilterOperator, SortColumn)
|
|
9
|
+
from mindsdb.integrations.libs.api_handler import APIResource
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
@dataclass
|
|
13
|
+
class Type:
|
|
14
|
+
name: str
|
|
15
|
+
sub_type: str = None
|
|
16
|
+
optional: bool = False
|
|
17
|
+
|
|
18
|
+
|
|
19
|
+
@dataclass
|
|
20
|
+
class GHMethod:
|
|
21
|
+
name: str
|
|
22
|
+
table_name: str
|
|
23
|
+
params: dict
|
|
24
|
+
output: Type
|
|
25
|
+
|
|
26
|
+
|
|
27
|
+
def parse_annotations(annotations):
|
|
28
|
+
'''
|
|
29
|
+
Parse string annotation, and extract type, input examples:
|
|
30
|
+
- Milestone | Opt[str]
|
|
31
|
+
- PaginatedList[Issue]
|
|
32
|
+
'''
|
|
33
|
+
type_name, sub_type = None, None
|
|
34
|
+
if not isinstance(annotations, str):
|
|
35
|
+
|
|
36
|
+
return Type(getattr(annotations, '__name__', None))
|
|
37
|
+
for item in annotations.split('|'):
|
|
38
|
+
item = item.strip()
|
|
39
|
+
if item is None:
|
|
40
|
+
continue
|
|
41
|
+
if '[' in item:
|
|
42
|
+
type_name = item[: item.find('[')]
|
|
43
|
+
item2 = item[item.find('[') + 1: item.rfind(']')]
|
|
44
|
+
if type_name == 'Opt':
|
|
45
|
+
inner_type = parse_annotations(item2)
|
|
46
|
+
inner_type.optional = Type
|
|
47
|
+
return inner_type
|
|
48
|
+
if type_name == 'dict':
|
|
49
|
+
item2 = item2[item2.find(',') + 1:]
|
|
50
|
+
sub_type = parse_annotations(item2).name
|
|
51
|
+
else:
|
|
52
|
+
type_name = item
|
|
53
|
+
# get only first type
|
|
54
|
+
break
|
|
55
|
+
return Type(type_name, sub_type)
|
|
56
|
+
|
|
57
|
+
|
|
58
|
+
def get_properties(cls):
|
|
59
|
+
# find properties of the class
|
|
60
|
+
|
|
61
|
+
properties = {}
|
|
62
|
+
for prop_name, prop in inspect.getmembers(cls):
|
|
63
|
+
if prop_name.startswith('_'):
|
|
64
|
+
continue
|
|
65
|
+
if not isinstance(prop, property):
|
|
66
|
+
continue
|
|
67
|
+
sig2 = inspect.signature(prop.fget)
|
|
68
|
+
|
|
69
|
+
properties[prop_name] = parse_annotations(sig2.return_annotation)
|
|
70
|
+
return properties
|
|
71
|
+
|
|
72
|
+
|
|
73
|
+
def get_github_types():
|
|
74
|
+
# get github types
|
|
75
|
+
types = {}
|
|
76
|
+
|
|
77
|
+
GithubObject = github.GithubObject.GithubObject
|
|
78
|
+
for module_name, module in inspect.getmembers(github, inspect.ismodule):
|
|
79
|
+
cls = getattr(module, module_name, None)
|
|
80
|
+
if cls is None:
|
|
81
|
+
continue
|
|
82
|
+
if issubclass(cls, GithubObject):
|
|
83
|
+
|
|
84
|
+
# remove inherited props
|
|
85
|
+
parent_props = []
|
|
86
|
+
for cls2 in cls.__bases__:
|
|
87
|
+
parent_props += get_properties(cls2).keys()
|
|
88
|
+
|
|
89
|
+
properties = {}
|
|
90
|
+
for k, v in get_properties(cls).items():
|
|
91
|
+
if k not in parent_props:
|
|
92
|
+
properties[k] = v
|
|
93
|
+
|
|
94
|
+
types[module_name] = properties
|
|
95
|
+
return types
|
|
96
|
+
|
|
97
|
+
|
|
98
|
+
def get_github_methods(cls):
|
|
99
|
+
'''
|
|
100
|
+
Analyse class in order to find methods which return list of objects.
|
|
101
|
+
'''
|
|
102
|
+
methods = []
|
|
103
|
+
|
|
104
|
+
for method_name, method in inspect.getmembers(cls, inspect.isfunction):
|
|
105
|
+
sig = inspect.signature(method)
|
|
106
|
+
|
|
107
|
+
return_type = parse_annotations(sig.return_annotation)
|
|
108
|
+
list_prefix = 'get_'
|
|
109
|
+
if not (method_name.startswith(list_prefix) and return_type.name == 'PaginatedList'):
|
|
110
|
+
continue
|
|
111
|
+
|
|
112
|
+
table_name = method_name[len(list_prefix):]
|
|
113
|
+
|
|
114
|
+
params = {}
|
|
115
|
+
for param_name, param in sig.parameters.items():
|
|
116
|
+
params[param_name] = parse_annotations(param.annotation)
|
|
117
|
+
|
|
118
|
+
methods.append(GHMethod(
|
|
119
|
+
name=method_name,
|
|
120
|
+
table_name=table_name,
|
|
121
|
+
params=params,
|
|
122
|
+
output=return_type
|
|
123
|
+
))
|
|
124
|
+
return methods
|
|
125
|
+
|
|
126
|
+
|
|
127
|
+
class GHTable(APIResource):
|
|
128
|
+
def __init__(self, *args, method: GHMethod = None, github_types=None, **kwargs):
|
|
129
|
+
self.method = method
|
|
130
|
+
self.github_types = github_types
|
|
131
|
+
|
|
132
|
+
self.output_columns = {}
|
|
133
|
+
if method.output.sub_type in self.github_types:
|
|
134
|
+
self.output_columns = self.github_types[method.output.sub_type]
|
|
135
|
+
|
|
136
|
+
# check params:
|
|
137
|
+
self.params, self.list_params = [], []
|
|
138
|
+
for name, param_type in method.params.items():
|
|
139
|
+
self.params.append(name)
|
|
140
|
+
if param_type.name == 'list':
|
|
141
|
+
self.list_params.append(name)
|
|
142
|
+
|
|
143
|
+
self._allow_sort = 'sort' in method.params
|
|
144
|
+
|
|
145
|
+
super().__init__(*args, **kwargs)
|
|
146
|
+
|
|
147
|
+
def repr_value(self, value, type_name):
|
|
148
|
+
if value is None or type_name in ('bool', 'int', 'float'):
|
|
149
|
+
return value
|
|
150
|
+
if type_name in self.github_types:
|
|
151
|
+
properties = self.github_types[type_name]
|
|
152
|
+
if 'login' in properties:
|
|
153
|
+
value = getattr(value, 'login')
|
|
154
|
+
elif 'url' in properties:
|
|
155
|
+
value = getattr(value, 'url')
|
|
156
|
+
return str(value)
|
|
157
|
+
|
|
158
|
+
def get_columns(self) -> List[str]:
|
|
159
|
+
return list(self.output_columns.keys())
|
|
160
|
+
|
|
161
|
+
def list(
|
|
162
|
+
self,
|
|
163
|
+
conditions: List[FilterCondition] = None,
|
|
164
|
+
limit: int = None,
|
|
165
|
+
sort: List[SortColumn] = None,
|
|
166
|
+
targets: List[str] = None,
|
|
167
|
+
**kwargs
|
|
168
|
+
) -> pd.DataFrame:
|
|
169
|
+
|
|
170
|
+
if limit is None:
|
|
171
|
+
limit = 20
|
|
172
|
+
|
|
173
|
+
method_kwargs = {}
|
|
174
|
+
if sort is not None and self._allow_sort:
|
|
175
|
+
for col in sort:
|
|
176
|
+
method_kwargs['sort'] = col.column
|
|
177
|
+
method_kwargs['direction'] = 'asc' if col.ascending else 'desc'
|
|
178
|
+
sort.applied = True
|
|
179
|
+
# supported only 1 column
|
|
180
|
+
break
|
|
181
|
+
|
|
182
|
+
if conditions:
|
|
183
|
+
for condition in conditions:
|
|
184
|
+
if condition.column not in self.params:
|
|
185
|
+
continue
|
|
186
|
+
|
|
187
|
+
if condition.column in self.list_params:
|
|
188
|
+
if condition.op == FilterOperator.IN:
|
|
189
|
+
method_kwargs[condition.column] = condition.value
|
|
190
|
+
elif condition.op == FilterOperator.EQUAL:
|
|
191
|
+
method_kwargs[condition.column] = [condition]
|
|
192
|
+
condition.applied = True
|
|
193
|
+
else:
|
|
194
|
+
method_kwargs[condition.column] = condition.value
|
|
195
|
+
condition.applied = True
|
|
196
|
+
|
|
197
|
+
connection = self.handler.connect()
|
|
198
|
+
method = getattr(connection.get_repo(self.handler.repository), self.method.name)
|
|
199
|
+
|
|
200
|
+
data = []
|
|
201
|
+
count = 0
|
|
202
|
+
for record in method(**method_kwargs):
|
|
203
|
+
item = {}
|
|
204
|
+
for name, output_type in self.output_columns.items():
|
|
205
|
+
|
|
206
|
+
# workaround to prevent making addition request per property.
|
|
207
|
+
if name in targets:
|
|
208
|
+
# request only if is required
|
|
209
|
+
value = getattr(record, name)
|
|
210
|
+
else:
|
|
211
|
+
value = getattr(record, '_' + name).value
|
|
212
|
+
if value is not None:
|
|
213
|
+
if output_type.name == 'list':
|
|
214
|
+
value = ",".join([
|
|
215
|
+
str(self.repr_value(i, output_type.sub_type))
|
|
216
|
+
for i in value
|
|
217
|
+
])
|
|
218
|
+
else:
|
|
219
|
+
value = self.repr_value(value, output_type.name)
|
|
220
|
+
item[name] = value
|
|
221
|
+
|
|
222
|
+
data.append(item)
|
|
223
|
+
|
|
224
|
+
count += 1
|
|
225
|
+
if limit <= count:
|
|
226
|
+
break
|
|
227
|
+
|
|
228
|
+
return pd.DataFrame(data, columns=self.get_columns())
|
|
@@ -12,7 +12,7 @@ from mindsdb.integrations.handlers.github_handler.github_tables import (
|
|
|
12
12
|
GithubMilestonesTable,
|
|
13
13
|
GithubProjectsTable, GithubFilesTable
|
|
14
14
|
)
|
|
15
|
-
|
|
15
|
+
from mindsdb.integrations.handlers.github_handler.generate_api import get_github_types, get_github_methods, GHTable
|
|
16
16
|
from mindsdb.integrations.libs.api_handler import APIHandler
|
|
17
17
|
from mindsdb.integrations.libs.response import (
|
|
18
18
|
HandlerStatusResponse as StatusResponse,
|
|
@@ -22,6 +22,7 @@ from mindsdb.utilities import log
|
|
|
22
22
|
|
|
23
23
|
logger = log.getLogger(__name__)
|
|
24
24
|
|
|
25
|
+
|
|
25
26
|
class GithubHandler(APIHandler):
|
|
26
27
|
"""The GitHub handler implementation"""
|
|
27
28
|
|
|
@@ -43,16 +44,22 @@ class GithubHandler(APIHandler):
|
|
|
43
44
|
self.connection = None
|
|
44
45
|
self.is_connected = False
|
|
45
46
|
|
|
47
|
+
# custom tables
|
|
46
48
|
self._register_table("issues", GithubIssuesTable(self))
|
|
47
|
-
self._register_table("pull_requests", GithubPullRequestsTable(self))
|
|
48
|
-
self._register_table("commits", GithubCommitsTable(self))
|
|
49
|
-
self._register_table("releases", GithubReleasesTable(self))
|
|
50
|
-
self._register_table("branches", GithubBranchesTable(self))
|
|
51
|
-
self._register_table("contributors", GithubContributorsTable(self))
|
|
52
|
-
self._register_table("milestones", GithubMilestonesTable(self))
|
|
53
|
-
self._register_table("projects", GithubProjectsTable(self))
|
|
54
49
|
self._register_table("files", GithubFilesTable(self))
|
|
55
50
|
|
|
51
|
+
# generated tables
|
|
52
|
+
github_types = get_github_types()
|
|
53
|
+
|
|
54
|
+
# generate tables from repository object
|
|
55
|
+
for method in get_github_methods(github.Repository.Repository):
|
|
56
|
+
if method.table_name in self._tables:
|
|
57
|
+
continue
|
|
58
|
+
|
|
59
|
+
table = GHTable(self, github_types=github_types, method=method)
|
|
60
|
+
self._register_table(method.table_name, table)
|
|
61
|
+
|
|
62
|
+
|
|
56
63
|
def connect(self) -> StatusResponse:
|
|
57
64
|
"""Set up the connection required by the handler.
|
|
58
65
|
|
|
@@ -1 +1 @@
|
|
|
1
|
-
pygithub
|
|
1
|
+
pygithub==2.6.1
|
|
@@ -1,6 +1,6 @@
|
|
|
1
|
-
datasets==2.16.1
|
|
2
|
-
evaluate
|
|
3
|
-
nltk>=3.9
|
|
4
|
-
huggingface-hub
|
|
5
1
|
# Needs to be installed with `pip install --extra-index-url https://download.pytorch.org/whl/ .[huggingface_cpu]`
|
|
6
|
-
|
|
2
|
+
datasets==2.16.1
|
|
3
|
+
evaluate==0.4.3
|
|
4
|
+
nltk==3.9.1
|
|
5
|
+
huggingface-hub==0.29.3
|
|
6
|
+
torch==2.6.0+cpu
|
|
@@ -0,0 +1,278 @@
|
|
|
1
|
+
from typing import Text, List, Dict, Optional
|
|
2
|
+
|
|
3
|
+
from requests.exceptions import RequestException
|
|
4
|
+
|
|
5
|
+
from mindsdb.integrations.utilities.handlers.api_utilities.microsoft.ms_graph_api_utilities import MSGraphAPIBaseClient
|
|
6
|
+
from mindsdb.utilities import log
|
|
7
|
+
|
|
8
|
+
logger = log.getLogger(__name__)
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
class MSGraphAPITeamsDelegatedPermissionsClient(MSGraphAPIBaseClient):
|
|
12
|
+
"""
|
|
13
|
+
The Microsoft Graph API client for the Microsoft Teams handler with delegated permissions.
|
|
14
|
+
This client is used for accessing the Microsoft Teams specific endpoints of the Microsoft Graph API.
|
|
15
|
+
Several common methods for submitting requests, fetching data, etc. are inherited from the base class.
|
|
16
|
+
"""
|
|
17
|
+
|
|
18
|
+
def check_connection(self) -> bool:
|
|
19
|
+
"""
|
|
20
|
+
Check if the connection to Microsoft Teams is established.
|
|
21
|
+
|
|
22
|
+
Returns:
|
|
23
|
+
bool: True if the connection is established, False otherwise.
|
|
24
|
+
"""
|
|
25
|
+
try:
|
|
26
|
+
self.fetch_data_json("me/joinedTeams")
|
|
27
|
+
return True
|
|
28
|
+
except RequestException as request_error:
|
|
29
|
+
logger.error(f"Failed to check connection to Microsoft Teams: {request_error}")
|
|
30
|
+
return False
|
|
31
|
+
|
|
32
|
+
def get_all_groups(self) -> List[Dict]:
|
|
33
|
+
"""
|
|
34
|
+
Get all groups that the signed in user is a member of.
|
|
35
|
+
|
|
36
|
+
Returns:
|
|
37
|
+
List[Dict]: The groups data.
|
|
38
|
+
"""
|
|
39
|
+
return self.fetch_data_json("me/joinedTeams")
|
|
40
|
+
|
|
41
|
+
def _get_all_group_ids(self) -> List[Text]:
|
|
42
|
+
"""
|
|
43
|
+
Get all group IDs related to Microsoft Teams.
|
|
44
|
+
|
|
45
|
+
Returns:
|
|
46
|
+
List[Text]: The group IDs.
|
|
47
|
+
"""
|
|
48
|
+
if not self._group_ids:
|
|
49
|
+
groups = self.get_all_groups()
|
|
50
|
+
self._group_ids = [group["id"] for group in groups]
|
|
51
|
+
|
|
52
|
+
return self._group_ids
|
|
53
|
+
|
|
54
|
+
def get_channel_in_group_by_id(self, group_id: Text, channel_id: Text) -> Dict:
|
|
55
|
+
"""
|
|
56
|
+
Get a channel by its ID and the ID of the group that it belongs to.
|
|
57
|
+
|
|
58
|
+
Args:
|
|
59
|
+
group_id (Text): The ID of the group that the channel belongs to.
|
|
60
|
+
channel_id (Text): The ID of the channel.
|
|
61
|
+
|
|
62
|
+
Returns:
|
|
63
|
+
Dict: The channel data.
|
|
64
|
+
"""
|
|
65
|
+
channel = self.fetch_data_json(f"teams/{group_id}/channels/{channel_id}")
|
|
66
|
+
# Add the group ID to the channel data.
|
|
67
|
+
channel.update({"teamId": group_id})
|
|
68
|
+
|
|
69
|
+
return channel
|
|
70
|
+
|
|
71
|
+
def get_channels_in_group_by_ids(self, group_id: Text, channel_ids: List[Text]) -> List[Dict]:
|
|
72
|
+
"""
|
|
73
|
+
Get channels by their IDs and the ID of the group that they belong to.
|
|
74
|
+
|
|
75
|
+
Args:
|
|
76
|
+
group_id (Text): The ID of the group that the channels belong to.
|
|
77
|
+
channel_ids (List[Text]): The IDs of the channels.
|
|
78
|
+
|
|
79
|
+
Returns:
|
|
80
|
+
List[Dict]: The channels data.
|
|
81
|
+
"""
|
|
82
|
+
channels = []
|
|
83
|
+
for channel_id in channel_ids:
|
|
84
|
+
channels.append(self.get_channel_in_group_by_id(group_id, channel_id))
|
|
85
|
+
|
|
86
|
+
return channels
|
|
87
|
+
|
|
88
|
+
def get_all_channels_in_group(self, group_id: Text) -> List[Dict]:
|
|
89
|
+
"""
|
|
90
|
+
Get all channels of a group by its ID.
|
|
91
|
+
|
|
92
|
+
Args:
|
|
93
|
+
group_id (Text): The ID of the group.
|
|
94
|
+
|
|
95
|
+
Returns:
|
|
96
|
+
List[Dict]: The channels data.
|
|
97
|
+
"""
|
|
98
|
+
channels = self.fetch_data_json(f"teams/{group_id}/channels")
|
|
99
|
+
for channel in channels:
|
|
100
|
+
channel["teamId"] = group_id
|
|
101
|
+
|
|
102
|
+
return channels
|
|
103
|
+
|
|
104
|
+
def get_all_channels_across_all_groups(self) -> List[Dict]:
|
|
105
|
+
"""
|
|
106
|
+
Get all channels across all groups that the signed in user is a member of.
|
|
107
|
+
|
|
108
|
+
Returns:
|
|
109
|
+
List[Dict]: The channels data.
|
|
110
|
+
"""
|
|
111
|
+
channels = []
|
|
112
|
+
for group_id in self._get_all_group_ids():
|
|
113
|
+
channels += self.get_all_channels_in_group(group_id)
|
|
114
|
+
|
|
115
|
+
return channels
|
|
116
|
+
|
|
117
|
+
def get_channels_across_all_groups_by_ids(self, channel_ids: List[Text]) -> List[Dict]:
|
|
118
|
+
"""
|
|
119
|
+
Get channels by their IDs.
|
|
120
|
+
|
|
121
|
+
Args:
|
|
122
|
+
channel_ids (List[Text]): The IDs of the channels.
|
|
123
|
+
|
|
124
|
+
Returns:
|
|
125
|
+
List[Dict]: The channels data.
|
|
126
|
+
"""
|
|
127
|
+
channels = self.get_all_channels_across_all_groups()
|
|
128
|
+
|
|
129
|
+
return [channel for channel in channels if channel["id"] in channel_ids]
|
|
130
|
+
|
|
131
|
+
def get_message_in_channel_by_id(self, group_id: Text, channel_id: Text, message_id: Text) -> Dict:
|
|
132
|
+
"""
|
|
133
|
+
Get a message by its ID, the ID of the group that it belongs to, and the ID of the channel that it belongs to.
|
|
134
|
+
|
|
135
|
+
Args:
|
|
136
|
+
group_id (Text): The ID of the group that the channel belongs to.
|
|
137
|
+
channel_id (Text): The ID of the channel that the message belongs to.
|
|
138
|
+
message_id (Text): The ID of the message.
|
|
139
|
+
|
|
140
|
+
Returns:
|
|
141
|
+
Dict: The message data.
|
|
142
|
+
"""
|
|
143
|
+
return self.fetch_data_json(f"teams/{group_id}/channels/{channel_id}/messages/{message_id}")
|
|
144
|
+
|
|
145
|
+
def get_messages_in_channel_by_ids(self, group_id: Text, channel_id: Text, message_ids: List[Text]) -> List[Dict]:
|
|
146
|
+
"""
|
|
147
|
+
Get messages by their IDs, the ID of the group that they belong to, and the ID of the channel that they belong to.
|
|
148
|
+
|
|
149
|
+
Args:
|
|
150
|
+
group_id (Text): The ID of the group that the channel belongs to.
|
|
151
|
+
channel_id (Text): The ID of the channel that the messages belong to.
|
|
152
|
+
message_ids (List[Text]): The IDs of the messages.
|
|
153
|
+
|
|
154
|
+
Returns:
|
|
155
|
+
List[Dict]: The messages data.
|
|
156
|
+
"""
|
|
157
|
+
messages = []
|
|
158
|
+
for message_id in message_ids:
|
|
159
|
+
messages.append(self.get_message_in_channel_by_id(group_id, channel_id, message_id))
|
|
160
|
+
|
|
161
|
+
return messages
|
|
162
|
+
|
|
163
|
+
def get_all_messages_in_channel(self, group_id: Text, channel_id: Text, limit: int = None) -> List[Dict]:
|
|
164
|
+
"""
|
|
165
|
+
Get messages of a channel by its ID and the ID of the group that it belongs to.
|
|
166
|
+
|
|
167
|
+
Args:
|
|
168
|
+
group_id (Text): The ID of the group that the channel belongs to.
|
|
169
|
+
channel_id (Text): The ID of the channel.
|
|
170
|
+
|
|
171
|
+
Returns:
|
|
172
|
+
List[Dict]: The messages data.
|
|
173
|
+
"""
|
|
174
|
+
messages = []
|
|
175
|
+
for messages_batch in self.fetch_paginated_data(f"teams/{group_id}/channels/{channel_id}/messages"):
|
|
176
|
+
messages += messages_batch
|
|
177
|
+
|
|
178
|
+
if limit and len(messages) >= limit:
|
|
179
|
+
break
|
|
180
|
+
|
|
181
|
+
return messages[:limit]
|
|
182
|
+
|
|
183
|
+
def get_chat_by_id(self, chat_id: Text) -> Dict:
|
|
184
|
+
"""
|
|
185
|
+
Get a chat by its ID.
|
|
186
|
+
|
|
187
|
+
Args:
|
|
188
|
+
chat_id (Text): The ID of the chat.
|
|
189
|
+
|
|
190
|
+
Returns:
|
|
191
|
+
Dict: The chat data.
|
|
192
|
+
"""
|
|
193
|
+
return self.fetch_data_json(f"/me/chats/{chat_id}")
|
|
194
|
+
|
|
195
|
+
def get_chats_by_ids(self, chat_ids: List[Text]) -> List[Dict]:
|
|
196
|
+
"""
|
|
197
|
+
Get chats by their IDs.
|
|
198
|
+
|
|
199
|
+
Args:
|
|
200
|
+
chat_ids (List[Text]): The IDs of the chats.
|
|
201
|
+
|
|
202
|
+
Returns:
|
|
203
|
+
List[Dict]: The chats data.
|
|
204
|
+
"""
|
|
205
|
+
chats = []
|
|
206
|
+
for chat_id in chat_ids:
|
|
207
|
+
chats.append(self.get_chat_by_id(chat_id))
|
|
208
|
+
|
|
209
|
+
return chats
|
|
210
|
+
|
|
211
|
+
def get_all_chats(self, limit: int = None) -> List[Dict]:
|
|
212
|
+
"""
|
|
213
|
+
Get all chats of the signed in user.
|
|
214
|
+
|
|
215
|
+
Args:
|
|
216
|
+
limit (int): The maximum number of chats to return.
|
|
217
|
+
|
|
218
|
+
Returns:
|
|
219
|
+
List[Dict]: The chats data.
|
|
220
|
+
"""
|
|
221
|
+
chats = []
|
|
222
|
+
for chat_batch in self.fetch_paginated_data("me/chats"):
|
|
223
|
+
chats += chat_batch
|
|
224
|
+
|
|
225
|
+
if limit and len(chats) >= limit:
|
|
226
|
+
break
|
|
227
|
+
|
|
228
|
+
return chats[:limit]
|
|
229
|
+
|
|
230
|
+
def get_message_in_chat_by_id(self, chat_id: Text, message_id: Text) -> Dict:
|
|
231
|
+
"""
|
|
232
|
+
Get a message by its ID and the ID of the chat that it belongs to.
|
|
233
|
+
|
|
234
|
+
Args:
|
|
235
|
+
chat_id (Text): The ID of the chat that the message belongs to.
|
|
236
|
+
message_id (Text): The ID of the message.
|
|
237
|
+
|
|
238
|
+
Returns:
|
|
239
|
+
Dict: The message data.
|
|
240
|
+
"""
|
|
241
|
+
return self.fetch_data_json(f"me/chats/{chat_id}/messages/{message_id}")
|
|
242
|
+
|
|
243
|
+
def get_messages_in_chat_by_ids(self, chat_id: Text, message_ids: List[Text]) -> List[Dict]:
|
|
244
|
+
"""
|
|
245
|
+
Get messages by their IDs and the ID of the chat that they belong to.
|
|
246
|
+
|
|
247
|
+
Args:
|
|
248
|
+
chat_id (Text): The ID of the chat that the messages belong to.
|
|
249
|
+
message_ids (List[Text]): The IDs of the messages.
|
|
250
|
+
|
|
251
|
+
Returns:
|
|
252
|
+
List[Dict]: The messages data.
|
|
253
|
+
"""
|
|
254
|
+
messages = []
|
|
255
|
+
for message_id in message_ids:
|
|
256
|
+
messages.append(self.get_message_in_chat_by_id(chat_id, message_id))
|
|
257
|
+
|
|
258
|
+
return messages
|
|
259
|
+
|
|
260
|
+
def get_all_messages_in_chat(self, chat_id: Text, limit: int = None) -> List[Dict]:
|
|
261
|
+
"""
|
|
262
|
+
Get messages of a chat by its ID.
|
|
263
|
+
|
|
264
|
+
Args:
|
|
265
|
+
chat_id (Text): The ID of the chat.
|
|
266
|
+
|
|
267
|
+
Returns:
|
|
268
|
+
List[Dict]: The messages data.
|
|
269
|
+
"""
|
|
270
|
+
messages = []
|
|
271
|
+
for messages_batch in self.fetch_paginated_data(f"me/chats/{chat_id}/messages"):
|
|
272
|
+
messages += messages_batch
|
|
273
|
+
|
|
274
|
+
if limit and len(messages) >= limit:
|
|
275
|
+
break
|
|
276
|
+
|
|
277
|
+
return messages[:limit]
|
|
278
|
+
|