rdxz2-utill 1.0.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of rdxz2-utill might be problematic. Click here for more details.

utill/my_file.py ADDED
@@ -0,0 +1,60 @@
1
+ import gzip
2
+ import os
3
+ import shutil
4
+
5
+ from loguru import logger
6
+
7
+
8
+ def compress(src_file: str, keep: bool = False):
9
+ src_file = os.path.expanduser(src_file)
10
+ dst_file = src_file + '.gz'
11
+
12
+ os.remove(dst_file) if os.path.exists(dst_file) else None
13
+ logger.debug(f'📄 Compress {dst_file} --> {dst_file}')
14
+ with open(src_file, 'rb') as f_in:
15
+ with gzip.open(dst_file, 'wb') as f_out:
16
+ shutil.copyfileobj(f_in, f_out)
17
+
18
+ os.remove(src_file) if not keep else None
19
+
20
+ return dst_file
21
+
22
+
23
+ def decompress(src_file: str, keep: bool = False):
24
+ if not src_file.endswith('.gz'):
25
+ raise ValueError('File name not ends with .gz!')
26
+
27
+ src_file = os.path.expanduser(src_file)
28
+ dst_file = src_file.removesuffix('.gz')
29
+
30
+ os.remove(dst_file) if os.path.exists(dst_file) else None
31
+ logger.debug(f'📄 Decompress {src_file} --> {dst_file}')
32
+ with gzip.open(src_file, 'rb') as f_in:
33
+ with open(dst_file, 'wb') as f_out:
34
+ shutil.copyfileobj(f_in, f_out)
35
+
36
+ keep or os.remove(src_file)
37
+
38
+ return dst_file
39
+
40
+
41
+ def make_sure_directory_exists(dirname: str):
42
+ if not os.path.exists(os.path.dirname(os.path.expanduser(dirname))):
43
+ os.makedirs(os.path.dirname(os.path.expanduser(dirname)))
44
+
45
+
46
+ def make_sure_path_is_directory(path: str):
47
+ if not path.endswith(os.sep):
48
+ raise ValueError(f'Please specify directory name ending with \'{os.sep}\' character, example for Linux: \'/home/my_username/Downloads/my_folder/\'!')
49
+
50
+
51
+ def read_last_line(filename: str) -> str:
52
+ filename = os.path.expanduser(filename)
53
+ with open(filename, 'rb') as f:
54
+ try: # Catch OSError in case of a one line file
55
+ f.seek(-2, os.SEEK_END)
56
+ while f.read(1) != b'\n':
57
+ f.seek(-2, os.SEEK_CUR)
58
+ except OSError:
59
+ f.seek(0)
60
+ return f.readline().decode()
utill/my_gcs.py ADDED
@@ -0,0 +1,117 @@
1
+ import os
2
+ import re
3
+
4
+ from google.cloud import storage
5
+ from loguru import logger
6
+
7
+ from .my_env import envs
8
+
9
+
10
+ class GCS:
11
+
12
+ def __init__(self, project: str = None, bucket_name: str = None):
13
+ self.project = project if project is not None else envs.GCP_PROJECT_ID
14
+ self.client = storage.Client(project=self.project)
15
+
16
+ bucket_name_parts = (bucket_name or envs.GCS_BUCKET).split('/')
17
+ self.change_bucket(bucket_name_parts[0])
18
+ self.base_path = '/'.join(bucket_name_parts[1:]) if len(bucket_name_parts) > 1 else None
19
+ not self.base_path or logger.debug(f'Base path: {self.base_path}')
20
+
21
+ logger.debug(f'GCS client open, project: {project or "<application-default>"}')
22
+
23
+ def __enter__(self):
24
+ return self
25
+
26
+ def __exit__(self, exc_type, exc_value, exc_tb):
27
+ self.close_client()
28
+
29
+ def _construct_path(self, path: str) -> str:
30
+ return f'{self.base_path}/{path}' if self.base_path else path
31
+
32
+ def change_bucket(self, bucket_name: str):
33
+ self.bucket = self.client.bucket(bucket_name)
34
+ logger.debug(f'Change bucket to {self.bucket.name}')
35
+
36
+ def get(self, path: str) -> storage.Blob:
37
+ path = self._construct_path(path)
38
+ return self.bucket.blob(path)
39
+
40
+ def list(self, path: str) -> list[storage.Blob]:
41
+ path = self._construct_path(path)
42
+ if '*' in path:
43
+ path_prefix = path.split('*')[0]
44
+ regex_pattern = '^' + re.escape(path).replace('\\*', '.*') + '$'
45
+ regex = re.compile(regex_pattern)
46
+ return [x for x in self.bucket.list_blobs(prefix=path_prefix) if regex.match(x.name)]
47
+
48
+ return list(self.bucket.list_blobs(prefix=path))
49
+
50
+ def copy(self, src_path: str, dst_path: str, mv: bool = False):
51
+ src_blob = self.get(src_path)
52
+ dst_blob = self.get(dst_path)
53
+
54
+ dst_blob.rewrite(src_blob)
55
+
56
+ logger.debug(f'✅ Copy gs://{src_blob.bucket.name}/{src_blob.name} to gs://{dst_blob.bucket.name}/{dst_blob.name}')
57
+
58
+ not mv or GCS.remove_blob(src_blob)
59
+
60
+ return dst_blob
61
+
62
+ def copy_to_other_gcs(self, src_blob: storage.Blob, dst_gcs: "GCS", dst_path: str, mv: bool = False):
63
+ self.bucket.copy_blob(src_blob, dst_gcs.bucket, dst_path)
64
+ dst_blob = dst_gcs.get(dst_path)
65
+
66
+ not mv or GCS.remove_blob(src_blob)
67
+
68
+ return dst_blob
69
+
70
+ def upload(self, local_path: str, remote_path: str, mv: bool = False):
71
+ local_path = os.path.expanduser(local_path)
72
+
73
+ if not os.path.exists(local_path):
74
+ raise FileNotFoundError(f'File not found: {local_path}')
75
+
76
+ blob = self.get(remote_path)
77
+ blob.upload_from_filename(local_path)
78
+
79
+ logger.debug(f'✅ Upload {local_path} to gs://{self.bucket.name}/{blob.name}')
80
+
81
+ not mv or os.remove(local_path)
82
+
83
+ return blob
84
+
85
+ def download(self, obj: str | storage.Blob, local_path: str, mv: bool = False):
86
+ local_path = os.path.expanduser(local_path)
87
+ is_blob = type(obj) == storage.Blob
88
+
89
+ if os.path.isdir(local_path):
90
+ local_path = os.path.join(local_path, obj.name.split('/')[-1] if is_blob else os.path.basename(obj))
91
+ if not os.path.dirname(local_path):
92
+ raise FileNotFoundError(f'Destination directory not found: {os.path.dirname(local_path)}')
93
+
94
+ blob = obj if is_blob else self.get(obj)
95
+ blob.download_to_filename(local_path)
96
+
97
+ logger.debug(f'✅ Download gs://{self.bucket.name}/{blob.name} to {local_path}')
98
+
99
+ not mv or GCS.remove_blob(blob)
100
+
101
+ return blob
102
+
103
+ def remove(self, remote_path: str):
104
+ blob = self.get(remote_path)
105
+
106
+ GCS.remove_blob(blob)
107
+
108
+ return blob
109
+
110
+ def close_client(self):
111
+ self.client.close()
112
+ logger.debug('GCS client close')
113
+
114
+ @staticmethod
115
+ def remove_blob(blob: storage.Blob):
116
+ blob.delete()
117
+ logger.debug(f'🗑️ Remove gs://{blob.bucket.name}/{blob.name}')
utill/my_input.py ADDED
@@ -0,0 +1,11 @@
1
+ from .my_style import italic
2
+
3
+
4
+ def ask_yes_no(prompt: str = 'Continue?', yes_strings: tuple[str] = ('y', ), throw_if_no: bool = False) -> str:
5
+ prompt = f'{prompt} ({yes_strings[0]}/no) : '
6
+ yes = input(f'\n{italic(prompt)}') in yes_strings
7
+ if not yes:
8
+ if throw_if_no:
9
+ raise Exception('Aborted by user')
10
+
11
+ return yes
utill/my_json.py ADDED
@@ -0,0 +1,62 @@
1
+ import json
2
+ import re
3
+
4
+
5
+ def _crawl_dictionary_keys(d: dict, path: tuple = ()) -> list[str]:
6
+ paths: list[tuple] = []
7
+
8
+ for key in d.keys():
9
+ key_path = path + (key, )
10
+
11
+ # Recursively traverse nested dictionary
12
+ if type(d[key]) is dict:
13
+ result = _crawl_dictionary_keys(d[key], key_path)
14
+ else:
15
+ result = [key_path]
16
+
17
+ paths += result # Combine the array
18
+
19
+ return paths
20
+
21
+
22
+ def traverse(data: str | dict) -> list:
23
+ if type(data) == str:
24
+ data = json.loads(data)
25
+
26
+ return _crawl_dictionary_keys(data)
27
+
28
+
29
+ def flatten(data: str | dict) -> list:
30
+ if type(data) == str:
31
+ data = json.loads(data)
32
+
33
+ return traverse(data)
34
+
35
+
36
+ def get_path(data: dict, path: str) -> str:
37
+ if type(data) != dict:
38
+ raise ValueError('data is not a dictionary!')
39
+
40
+ items = path.split('.')
41
+ item = items[0]
42
+ path_remaining = '.'.join(items[1:]) if len(items) > 1 else None
43
+
44
+ if item not in data:
45
+ return None
46
+
47
+ if path_remaining is None:
48
+ return data[item]
49
+
50
+ return get_path(data[item], path_remaining)
51
+
52
+
53
+ def load_jsonc_file(path) -> dict:
54
+ """
55
+ Read a .jsonc (JSON with comment) files, as json.loads cannot read it
56
+ """
57
+
58
+ with open(path, 'r') as f:
59
+ content = f.read()
60
+ pattern = r'("(?:\\.|[^"\\])*")|\/\/.*|\/\*[\s\S]*?\*\/'
61
+ content = re.sub(pattern, lambda m: m.group(1) if m.group(1) else '', content)
62
+ return json.loads(content)
utill/my_mb.py ADDED
@@ -0,0 +1,375 @@
1
+
2
+ import csv
3
+ import json
4
+ import os
5
+ import requests
6
+
7
+ from loguru import logger
8
+
9
+ from .my_const import HttpMethod
10
+ from .my_csv import write as csv_write
11
+ from .my_dict import AutoPopulatingDict
12
+ from .my_env import MB_FILENAME
13
+
14
+
15
+ def _decode_collection_location_to_group(collections_dict: dict, location: str):
16
+ return ' > '.join(map(lambda x: collections_dict[x], map(int, location.strip('/').split('/'))))
17
+
18
+
19
+ def _translate_user_group_ids(user: dict) -> set:
20
+ return set(user['group_ids']) - {1} # Exclude 'All Users' group
21
+
22
+
23
+ class MB:
24
+ def __init__(self, base_url: str = None) -> None:
25
+ config = json.loads(open(MB_FILENAME, 'r').read())
26
+
27
+ self.base_url = base_url or config['base_url']
28
+ self.api_key = config['api_key']
29
+
30
+ self._is_user_initialized = False
31
+ self._is_group_initialized = False
32
+ self._is_collection_initialized = False
33
+
34
+ self.dict__question_id__question = AutoPopulatingDict(self._fetch_question_by_id)
35
+ self.dict__question_url__question = AutoPopulatingDict(self._fetch_question_by_url)
36
+ self.dict__dashboard_id__dashboard = AutoPopulatingDict(self._fetch_dashboard_by_id)
37
+ self.dict__collection_id__collection = AutoPopulatingDict(self._fetch_collection_by_id)
38
+
39
+ logger.info(f'✅ Initialized {self.base_url}')
40
+
41
+ # <<----- START: Util
42
+
43
+ def send_request(self, method: HttpMethod, endpoint: str, json_data: dict = None) -> requests.Response:
44
+ url = f'{self.base_url}/{endpoint}'
45
+ logger.debug(f'🚗 [{method}] {endpoint}')
46
+
47
+ headers = {
48
+ 'x-api-key': self.api_key
49
+ }
50
+
51
+ if method == HttpMethod.GET:
52
+ response = requests.get(url, headers=headers)
53
+ elif method == HttpMethod.POST:
54
+ response = requests.post(url, headers=headers, json=json_data)
55
+ elif method == HttpMethod.PUT:
56
+ response = requests.put(url, headers=headers, json=json_data)
57
+ elif method == HttpMethod.DELETE:
58
+ response = requests.delete(url, headers=headers, json=json_data)
59
+ else:
60
+ raise ValueError(f'HTTP method {method} not recognized!')
61
+
62
+ if not (200 <= response.status_code < 300):
63
+ raise Exception(f'HTTP error {response.status_code}: {response.text}')
64
+
65
+ return response
66
+
67
+ def reinit(self):
68
+ self.__init__()
69
+
70
+ # END: Util ----->>
71
+
72
+ # <<----- START: User
73
+
74
+ def _init_all_users(self):
75
+ if not self._is_user_initialized:
76
+ logger.debug('🕐 Initialize user data')
77
+ response_json = self.send_request(HttpMethod.GET, 'api/user').json()['data']
78
+ self._dict__user_id__user = {x['id']: x for x in response_json}
79
+ self._dict__user_email__user = {x['email']: x for x in response_json}
80
+ self._is_user_initialized = True
81
+
82
+ @property
83
+ def dict__user_id__user(self) -> dict:
84
+ self._init_all_users()
85
+ return self._dict__user_id__user
86
+
87
+ @property
88
+ def dict__user_email__user(self) -> dict:
89
+ self._init_all_users()
90
+ return self._dict__user_email__user
91
+
92
+ def make_sure_all_email_exists(self, emails: list[str]):
93
+ not_exists = []
94
+ for email in emails:
95
+ try:
96
+ self.dict__user_email__user[email]
97
+ except KeyError:
98
+ not_exists.append(email)
99
+
100
+ if not_exists:
101
+ raise ValueError(f'Email not exists: {not_exists}')
102
+
103
+ def create_user(self, first_name: str, last_name: str, email: str, group_ids: list):
104
+ self.send_request(HttpMethod.POST, 'api/user', {
105
+ 'first_name': first_name,
106
+ 'last_name': last_name,
107
+ 'email': email,
108
+ 'user_group_memberships': group_ids,
109
+ }).json()
110
+ self._is_user_initialized = False
111
+ logger.info(f'✅ Create user {email}')
112
+
113
+ def deactivate_user_by_email(self, email: str):
114
+ user = self.dict__user_email__user[email]
115
+ self.send_request(HttpMethod.DELETE, f'api/user/{user["id"]}')
116
+ del self.dict__user_email__user[email]
117
+ logger.info(f'✅ Deactivate user [{user["id"]}] {email}')
118
+
119
+ def reset_password_by_email(self, email: str):
120
+ try:
121
+ self.dict__user_email__user[email]
122
+ except KeyError as e:
123
+ logger.error(f'User {email} not exists')
124
+ raise e
125
+ self.send_request(HttpMethod.POST, 'api/session/forgot_password', {'email': email})
126
+ logger.info(f'✅ Reset password {email}')
127
+
128
+ # END: User ----->>
129
+
130
+ # <<----- START: Group
131
+
132
+ def _init_all_groups(self):
133
+ if not self._is_group_initialized:
134
+ logger.debug('🕐 Initialize group data')
135
+ response_json = self.send_request(HttpMethod.GET, 'api/permissions/group').json()
136
+ self._dict__group_id__group = {x['id']: x for x in response_json}
137
+ self._dict__group_name__group = {x['name']: x for x in response_json}
138
+ self._is_group_initialized = True
139
+
140
+ @property
141
+ def dict__group_id__group(self) -> dict:
142
+ self._init_all_groups()
143
+ return self._dict__group_id__group
144
+
145
+ @property
146
+ def dict__group_name__group(self) -> dict:
147
+ self._init_all_groups()
148
+ return self._dict__group_name__group
149
+
150
+ def create_group(self, group_name: str):
151
+ self.send_request(HttpMethod.POST, 'api/permissions/group', {
152
+ 'name': group_name,
153
+ })
154
+ self._is_group_initialized = False
155
+ logger.info(f'✅ Create group {group_name}')
156
+
157
+ def delete_group(self, group_name: str):
158
+ self.send_request(HttpMethod.DELETE, f'api/permissions/group/{self.dict__group_name__group[group_name]["id"]}')
159
+ self._is_group_initialized = False
160
+ logger.info(f'✅ Delete group {group_name}')
161
+
162
+ # END: Group ----->>
163
+
164
+ # <<----- START: Permission
165
+
166
+ def grant_user_id_to_group_by_id(self, user_id: int, group_id: int) -> None:
167
+ self.send_request(HttpMethod.POST, 'api/permissions/membership', {
168
+ 'group_id': group_id,
169
+ 'user_id': user_id,
170
+ })
171
+
172
+ # Update locally
173
+ self.dict__user_id__user[user_id]['group_ids'].append(group_id)
174
+ self.dict__user_email__user[self.dict__user_id__user[user_id]['email']]['group_ids'].append(group_id)
175
+
176
+ logger.info(f'✅ Grant user \'{self.dict__user_id__user[user_id]["email"]}\' to group \'{self.dict__group_id__group[group_id]["name"]}\'')
177
+
178
+ def mirror_user_permission_by_email(self, source_email: str, target_email: str) -> None:
179
+ source_user = self.dict__user_email__user[source_email]
180
+ target_user = self.dict__user_email__user[target_email]
181
+
182
+ source_user_group_ids = _translate_user_group_ids(source_user)
183
+ target_user_group_ids = _translate_user_group_ids(target_user)
184
+
185
+ to_be_granted_group_ids = source_user_group_ids - target_user_group_ids
186
+ existing_group_ids = source_user_group_ids - to_be_granted_group_ids
187
+ if existing_group_ids:
188
+ pass
189
+ for group_id in to_be_granted_group_ids:
190
+ self.grant_user_id_to_group_by_id(target_user['id'], group_id)
191
+
192
+ def grant_group_id_to_collection_by_id(self, group_id: int, collection_id: int):
193
+ # Get latest revision
194
+ graph = self.send_request(HttpMethod.GET, 'api/collection/graph').json()
195
+ logger.debug(f'Latest revision: {graph["revision"]}')
196
+
197
+ group_id_str = str(group_id)
198
+ collection_id_str = str(collection_id)
199
+
200
+ # Test group existence
201
+ try:
202
+ self.dict__group_id__group[group_id]
203
+ except KeyError as e:
204
+ logger.error(f'Group ID {group_id} not exists')
205
+ raise e
206
+
207
+ # Test collection existence
208
+ try:
209
+ self.dict__collection_id__collection[collection_id]
210
+ except KeyError as e:
211
+ logger.error(f'Collection ID {collection_id} not exists')
212
+ raise e
213
+
214
+ if graph['groups'][group_id_str][collection_id_str] != 'none':
215
+ logger.warning(f'Group {self.dict__group_id__group[group_id]["name"]} already has permission {graph["groups"][group_id_str][collection_id_str]} to collection {self.dict__collection_id__collection[collection_id]["name"]}')
216
+ return
217
+ graph['groups'][group_id_str][collection_id_str] = 'read'
218
+
219
+ self.send_request(HttpMethod.PUT, 'api/collection/graph', {
220
+ 'revision': graph['revision'],
221
+ 'groups': {
222
+ group_id_str: {
223
+ collection_id_str: 'read'
224
+ }
225
+ },
226
+ })
227
+
228
+ logger.info(f'✅ Grant group \'{self.dict__group_id__group[group_id]["name"]}\' to collection \'{self.dict__collection_id__collection[collection_id]["name"]}\'')
229
+
230
+ def grant_user_email_to_dashboard_by_url(self, email: str, dashboard_url: str):
231
+ # Get user
232
+ user = self.dict__user_email__user[email]
233
+ user_group_ids = _translate_user_group_ids(user)
234
+
235
+ # Get dashboard
236
+ dashboard_id = int(dashboard_url.split(f'{self.base_url}/dashboard/')[1].split('-')[0])
237
+ dashboard = self.dict__dashboard_id__dashboard[dashboard_id]
238
+
239
+ # Get collection
240
+ collection_id = dashboard['collection_id']
241
+ collection = self.dict__collection_id__collection[collection_id]
242
+
243
+ # Get collection's group
244
+ try:
245
+ group = self.dict__group_name__group[collection['group_name']]
246
+ except KeyError:
247
+ # Create group if not exists
248
+ self.create_group(collection['group_name'])
249
+ group = self.dict__group_name__group[collection['group_name']]
250
+
251
+ # Grant group to collection
252
+ self.grant_group_id_to_collection_by_id(group['id'], collection_id)
253
+
254
+ # Skip if user already in group
255
+ if group['id'] in user_group_ids:
256
+ logger.warning(f'{dashboard_url}: User {email} already in group {group["name"]}')
257
+ return
258
+
259
+ # Grant
260
+ self.grant_user_id_to_group_by_id(user['id'], group['id'])
261
+
262
+ def grant_user_email_to_collection_by_url(self, email: str, collection_url: str):
263
+ # Get user
264
+ user = self.dict__user_email__user[email]
265
+ user_group_ids = _translate_user_group_ids(user)
266
+
267
+ # Get collection
268
+ collection_id = int(collection_url.split(f'{self.base_url}/collection/')[1].split('-')[0])
269
+ collection = self.dict__collection_id__collection[collection_id]
270
+
271
+ # Get collection's group
272
+ try:
273
+ group = self.dict__group_name__group[collection['group_name']]
274
+ except KeyError:
275
+ # Create group if not exists
276
+ self.create_group(collection['group_name'])
277
+ group = self.dict__group_name__group[collection['group_name']]
278
+
279
+ # Grant group to collection
280
+ self.grant_group_id_to_collection_by_id(group['id'], collection_id)
281
+
282
+ # Skip if user already in group
283
+ if group['id'] in user_group_ids:
284
+ logger.warning(f'{collection_url}: User {email} already in group {group["name"]}')
285
+ return
286
+
287
+ # Grant
288
+ self.grant_user_id_to_group_by_id(user['id'], group['id'])
289
+
290
+ def grant_user_email_to_question_by_url(self, email: str, question_url: str):
291
+ # Get user
292
+ user = self.dict__user_email__user[email]
293
+ user_group_ids = _translate_user_group_ids(user)
294
+
295
+ # Get question
296
+ question = self.dict__question_url__question[question_url]
297
+
298
+ # Get question's collection
299
+ collection_id = question['collection_id']
300
+ collection = self.dict__collection_id__collection[question['collection_id']]
301
+
302
+ # Get collection's group
303
+ try:
304
+ group = self.dict__group_name__group[collection['group_name']]
305
+ except KeyError:
306
+ # Create group if not exists
307
+ self.create_group(collection['group_name'])
308
+ group = self.dict__group_name__group[collection['group_name']]
309
+
310
+ # Grant group to collection
311
+ self.grant_group_id_to_collection_by_id(group['id'], collection_id)
312
+
313
+ # Skip if user already in group
314
+ if group['id'] in user_group_ids:
315
+ logger.warning(f'{question_url}: User {email} already in group {group["name"]}')
316
+ return
317
+
318
+ # Grant
319
+ self.grant_user_id_to_group_by_id(user['id'], group['id'])
320
+
321
+ # END: Permission ----->>
322
+
323
+ # <<----- START: Collection
324
+
325
+ def _fetch_collection_by_id(self, collection_id: int) -> dict:
326
+ if not self._is_collection_initialized:
327
+ logger.debug('🕐 Initialize collection data')
328
+ response_json = [x for x in self.send_request(HttpMethod.GET, 'api/collection').json()[1:]] # Exclude root collection
329
+ self.dict__collection_id__collection_name = {x['id']: x['name'] for x in response_json}
330
+ self.dict__collection_id__collection = {x['id']: {
331
+ **x,
332
+ 'group_name': ' > '.join([_decode_collection_location_to_group(self.dict__collection_id__collection_name, x['location']), x['name']]) if x['location'] != '/' else x['name']
333
+ } for x in response_json if x['personal_owner_id'] is None}
334
+ self._is_collection_initialized = True
335
+
336
+ if collection_id in self.dict__collection_id__collection:
337
+ return self.dict__collection_id__collection[collection_id]
338
+ else:
339
+ return self.send_request(HttpMethod.GET, f'api/collection/{collection_id}').json()
340
+
341
+ # END: Collection ----->>
342
+
343
+ # <<----- START: Dashboard
344
+
345
+ def _fetch_dashboard_by_id(self, dashboard_id: int) -> dict:
346
+ return self.send_request(HttpMethod.GET, f'api/dashboard/{dashboard_id}').json()
347
+
348
+ # END: Dashboard ----->>
349
+
350
+ # <<----- START: Question
351
+
352
+ def _fetch_question_by_id(self, question_id: int) -> dict:
353
+ return self.send_request(HttpMethod.GET, f'api/card/{question_id}').json()
354
+
355
+ def _fetch_question_by_url(self, question_url: str) -> dict:
356
+ question_id = int(question_url.split(f'{self.base_url}/question/')[1].split('-')[0])
357
+ return self._fetch_question_by_id(question_id)
358
+
359
+ def download_question_as_csv(self, card_id: int, dst_filename: str = None):
360
+ dst_filename = os.path.expanduser(dst_filename)
361
+ response = self.send_request(HttpMethod.POST, f'api/card/{card_id}/query/csv')
362
+ content_decoded = response.content.decode()
363
+ csvreader = csv.reader(content_decoded.splitlines(), delimiter=',')
364
+ data = list(csvreader)
365
+
366
+ csv_write(dst_filename, data)
367
+
368
+ def archive_question_by_url(self, question_url: str) -> None:
369
+ question_id = int(question_url.split(f'{self.base_url}/question/')[1].split('-')[0])
370
+ self.send_request(HttpMethod.PUT, f'api/card/{question_id}', {
371
+ 'archived': True
372
+ })
373
+ logger.info(f'✅ Archive question {question_url}')
374
+
375
+ # END: Question ----->>