brynq-sdk-sharepoint 2.0.0__tar.gz → 2.0.2__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 1.0
2
2
  Name: brynq_sdk_sharepoint
3
- Version: 2.0.0
3
+ Version: 2.0.2
4
4
  Summary: Sharepoint wrapper from BrynQ
5
5
  Home-page: UNKNOWN
6
6
  Author: BrynQ
@@ -0,0 +1 @@
1
+ from .sharepoint import Sharepoint
@@ -0,0 +1,250 @@
1
+ from brynq_sdk_brynq import BrynQ
2
+ import os
3
+ from typing import List, Union
4
+ import requests
5
+ import json
6
+ from io import BytesIO
7
+ import typing
8
+
9
+
10
+ class Sharepoint(BrynQ):
11
+ def __init__(self, label: Union[str, List], site: str = None, site_id: str = None, json_subset: int = None, site_name: str = None, debug: bool = False):
12
+ """
13
+ :param label: label of the sharepoint system in BrynQ
14
+ :param site: base url of the sharepoint site
15
+ :param site_id: site id of the sharepoint site
16
+ :param json_subset: fill in the part of the json that needs to be accessed to get the wanted drive id, accompanying the drive you are looking for
17
+ :param debug: set to True to enable debug logging
18
+ """
19
+ super().__init__()
20
+ credentials = self.get_system_credential(system='sharepoint', label=label)
21
+ self.debug = debug
22
+ self.timeout = 3600
23
+ if self.debug:
24
+ print(f"credentials: {credentials}")
25
+ self.access_token = credentials['auth']['access_token']
26
+ self.brynq_system_id = credentials['id']
27
+ if site_name is not None:
28
+ self.json_subset = 0 if json_subset is None else json_subset
29
+ self.site_id = self.get_site_id(site_name=site_name)
30
+ elif site_id is not None:
31
+ self.site_id = f"{site},{site_id}"
32
+ self.json_subset = json_subset
33
+ else:
34
+ raise KeyError('Either site_name or site_id, site and json_subset must be provided')
35
+ if self.debug:
36
+ print(f"site_id: {self.site_id}, json_subset: {self.json_subset}, credentials: {credentials}, brynq_system_id: {self.brynq_system_id}")
37
+
38
+ def _get_sharepoint_headers(self):
39
+ access_token = self.refresh_system_credential(system='sharepoint', system_id=self.brynq_system_id)['access_token']
40
+ headers = {'Authorization': f'Bearer {access_token}'}
41
+ if self.debug:
42
+ print(headers)
43
+
44
+ return headers
45
+
46
+ def get_site_id(self, site_name: str) -> str:
47
+ """
48
+ Get the site id of a site
49
+ :param site_name: name of the site
50
+ :return: site id
51
+ """
52
+ url = f'https://graph.microsoft.com/v1.0/sites?search={site_name}'
53
+ if self.debug:
54
+ print(f"url: {url}")
55
+ response = requests.get(url=url, headers=self._get_sharepoint_headers(), timeout=self.timeout)
56
+ response.raise_for_status()
57
+ site_id = response.json()['value'][0]['id']
58
+ if self.debug:
59
+ print(f"site_id: {site_id}")
60
+
61
+ return site_id
62
+
63
+ def get_driveid(self):
64
+ """
65
+ This method is used to derive the driveid to which the files have to be uploaded. Needed in the upload url for file upload.
66
+ :return: returns the needed driveid
67
+ """
68
+ url = f'https://graph.microsoft.com/v1.0/sites/{self.site_id}/drives'
69
+ if self.debug:
70
+ print(f"url: {url}")
71
+ response = requests.get(url, headers=self._get_sharepoint_headers(), timeout=self.timeout)
72
+ response.raise_for_status()
73
+ drive_id = response.json()['value'][self.json_subset]['id']
74
+ if self.debug:
75
+ print(f"drive_id: {drive_id}")
76
+
77
+ return drive_id
78
+
79
+ def upload_file(self, local_file_path: str, remote_file_path: str):
80
+ """
81
+ This method performs the actual file upload to the formerly derived site + drive.
82
+ local_file_path: local path of the file you want to upload
83
+ remote_file_path: remote path of the folder and filename where you want to place the file
84
+ """
85
+ drive_id = self.get_driveid()
86
+ url = f'https://graph.microsoft.com/v1.0/sites/{self.site_id}/drives/{drive_id}/root:/{remote_file_path}:/createUploadSession'
87
+ if self.debug:
88
+ print(f"url: {url}")
89
+ headers = self._get_sharepoint_headers()
90
+ response = requests.post(url, headers=headers, timeout=self.timeout)
91
+ response.raise_for_status()
92
+ upload_url = response.json()['uploadUrl']
93
+ if self.debug:
94
+ print(f"upload_url: {upload_url}")
95
+ with open(f'{local_file_path}', 'rb') as file_input:
96
+ file_bytes = os.path.getsize(f'{local_file_path}')
97
+ headers_upload = {'Content-Type': 'application/json',
98
+ 'Content-Length': f'{file_bytes}',
99
+ 'Content-Range': f'bytes 0-{file_bytes - 1}/{file_bytes}'}
100
+ response_upload = requests.put(url=upload_url, headers=headers_upload, data=file_input, timeout=self.timeout)
101
+ response_upload.raise_for_status()
102
+
103
+ return response_upload
104
+
105
+ def open_file(self, remote_file_path: str) -> bytes:
106
+ """
107
+ Get a file from sharepoint as a bytesstream
108
+ remote_file_path: filepath on sharepoint
109
+ :return: bytes of file object
110
+ """
111
+ drive_id = self.get_driveid()
112
+ url = f'https://graph.microsoft.com/v1.0/sites/{self.site_id}/drives/{drive_id}/root:/{remote_file_path}'
113
+ if self.debug:
114
+ print(f"url: {url}")
115
+ headers = self._get_sharepoint_headers()
116
+ response = requests.get(url=url, headers=headers, timeout=self.timeout)
117
+ response.raise_for_status()
118
+ download_url = response.json()['@microsoft.graph.downloadUrl']
119
+ if self.debug:
120
+ print(f"download_url: {download_url}")
121
+ response_download = requests.get(url=download_url, headers=headers, timeout=self.timeout)
122
+ response_download.raise_for_status()
123
+
124
+ return response_download.content
125
+
126
+ def download_file(self, local_file_path: str, remote_file_path: str):
127
+ """
128
+ This method downloads a file from sharepoint to the local machine.
129
+ local_file_path: local folder where the file will be downloaded to
130
+ remote_file_path: remote path of the file on sharepoint
131
+ """
132
+ driveid = self.get_driveid()
133
+ url = f'https://graph.microsoft.com/v1.0/sites/{self.site_id}/drives/{driveid}/root:/{remote_file_path}'
134
+ headers = self._get_sharepoint_headers()
135
+ response = requests.get(url=url, headers=headers, timeout=self.timeout)
136
+ response.raise_for_status()
137
+ download_url = response.json()['@microsoft.graph.downloadUrl']
138
+ response_download = requests.get(url=download_url, headers=headers, timeout=self.timeout)
139
+ response_download.raise_for_status()
140
+ with open(file=f'{local_file_path}', mode='wb') as f:
141
+ f.write(BytesIO(response_download.content).read())
142
+
143
+ return response_download
144
+
145
+ def download_files(self, local_folder_path: str, remote_folder_path: str):
146
+ """
147
+ This method downloads a file from sharepoint to the local machine.
148
+ local_folder_path: local folder where the files will be downloaded to
149
+ remote_folder_path: remote path of the folder you want to get on sharepoint
150
+ """
151
+ driveid = self.get_driveid()
152
+ folder_content = self.list_dir(remote_folder_path=remote_folder_path)
153
+ # remove subdirectories, can not be downloaded
154
+ folder_content = [item for item in folder_content if 'file' in item]
155
+ if self.debug:
156
+ print(f"folder_content: {folder_content}")
157
+ filecount = 0
158
+
159
+ responses = []
160
+ for file in folder_content:
161
+ url = f'https://graph.microsoft.com/v1.0/sites/{self.site_id}/drives/{driveid}/root:/{remote_folder_path}{file["name"]}'
162
+ if self.debug:
163
+ print(f"url: {url}")
164
+ headers = self._get_sharepoint_headers()
165
+ response = requests.get(url=url, headers=headers, timeout=self.timeout)
166
+ response.raise_for_status()
167
+ download_url = response.json()['@microsoft.graph.downloadUrl']
168
+ response_download = requests.get(url=download_url, headers=headers, timeout=self.timeout)
169
+ with open(file=f'{local_folder_path}{file["name"]}', mode='wb') as f:
170
+ f.write(BytesIO(response_download.content).read())
171
+ filecount += 1
172
+ responses.append(response_download)
173
+ print(f'{filecount} files downloaded')
174
+
175
+ return responses
176
+
177
+ def list_dir(self, remote_folder_path: str, get_files_from_nested_folders: bool = False) -> [json, typing.Generator]:
178
+ """
179
+ Fetch the contents of the API and return the "children"
180
+ which has the information of all the items under that folder
181
+ remote_folder_path: folder path you want to list
182
+ :return: all the contents of the folder items
183
+ """
184
+ if get_files_from_nested_folders:
185
+ return list(self._get_all_files_in_folder(folder_path=remote_folder_path))
186
+
187
+ drive_id = self.get_driveid()
188
+ url = f'https://graph.microsoft.com/v1.0/sites/{self.site_id}/drives/{drive_id}/root:/{remote_folder_path}?expand=children'
189
+ if self.debug:
190
+ print(f"url: {url}")
191
+ response = requests.get(url, headers=self._get_sharepoint_headers(), timeout=120)
192
+ response.raise_for_status()
193
+
194
+ return response.json()['children']
195
+
196
+ # helpers function to get all files in a nested directory
197
+ def _get_all_files_in_folder(self, folder_path) -> typing.Generator:
198
+ children = self.list_dir(remote_folder_path=folder_path)
199
+ for child in children:
200
+ if 'file' in child:
201
+ yield {"folder": folder_path, "file": child['name'], "id": child['id']}
202
+ else:
203
+ yield from self._get_all_files_in_folder(folder_path=f"{folder_path}/{child['name']}")
204
+
205
+ def remove_file(self, remote_file_path: str):
206
+ """
207
+ Remove a file from Sharepoint
208
+ remote_file_path: complete path including filename
209
+ :return: response from Sharepoint
210
+ """
211
+ drive_id = self.get_driveid()
212
+ url = f'https://graph.microsoft.com/v1.0/sites/{self.site_id}/drives/{drive_id}/root:/{remote_file_path}'
213
+ if self.debug:
214
+ print(f"url: {url}")
215
+ response = requests.delete(url=url, headers=self._get_sharepoint_headers(), timeout=self.timeout)
216
+ response.raise_for_status()
217
+
218
+ return response
219
+
220
+ def remove_files(self, remote_folder_path: str):
221
+ """
222
+ Remove a file from Sharepoint
223
+ remote_folder_path: folder path that you want to empty
224
+ """
225
+ drive_id = self.get_driveid()
226
+ folder_content = self.list_dir(remote_folder_path=remote_folder_path)
227
+ responses = []
228
+ for file in folder_content:
229
+ url = f'https://graph.microsoft.com/v1.0/sites/{self.site_id}/drives/{drive_id}/root:/{remote_folder_path}{file["name"]}'
230
+ if self.debug:
231
+ print(f"url: {url}")
232
+ response = requests.delete(url=url, headers=self._get_sharepoint_headers(), timeout=self.timeout)
233
+ response.raise_for_status()
234
+ responses.append(response)
235
+
236
+ return responses
237
+
238
+ def remove_folder(self, folder_id: str):
239
+ """
240
+ Remove a folder from Sharepoint
241
+ folder: folder id that you want to delete
242
+ """
243
+ drive_id = self.get_driveid()
244
+ url = f'https://graph.microsoft.com/v1.0/sites/{self.site_id}/drives/{drive_id}/items/{folder_id}'
245
+ if self.debug:
246
+ print(f"url: {url}")
247
+ response = requests.delete(url=url, headers=self._get_sharepoint_headers(), timeout=self.timeout)
248
+ response.raise_for_status()
249
+
250
+ return response
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 1.0
2
2
  Name: brynq-sdk-sharepoint
3
- Version: 2.0.0
3
+ Version: 2.0.2
4
4
  Summary: Sharepoint wrapper from BrynQ
5
5
  Home-page: UNKNOWN
6
6
  Author: BrynQ
@@ -1,4 +1,6 @@
1
1
  setup.py
2
+ brynq_sdk_sharepoint/__init__.py
3
+ brynq_sdk_sharepoint/sharepoint.py
2
4
  brynq_sdk_sharepoint.egg-info/PKG-INFO
3
5
  brynq_sdk_sharepoint.egg-info/SOURCES.txt
4
6
  brynq_sdk_sharepoint.egg-info/dependency_links.txt
@@ -0,0 +1 @@
1
+ brynq_sdk_sharepoint
@@ -2,7 +2,7 @@ from setuptools import setup, find_namespace_packages
2
2
 
3
3
  setup(
4
4
  name='brynq_sdk_sharepoint',
5
- version='2.0.0',
5
+ version='2.0.2',
6
6
  description='Sharepoint wrapper from BrynQ',
7
7
  long_description='Sharepoint wrapper from BrynQ',
8
8
  author='BrynQ',