brynq-sdk-sharepoint 1.0.0__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,10 @@
1
+ Metadata-Version: 1.0
2
+ Name: brynq_sdk_sharepoint
3
+ Version: 1.0.0
4
+ Summary: Sharepoint wrapper from BrynQ
5
+ Home-page: UNKNOWN
6
+ Author: BrynQ
7
+ Author-email: support@brynq.com
8
+ License: BrynQ License
9
+ Description: Sharepoint wrapper from BrynQ
10
+ Platform: UNKNOWN
@@ -0,0 +1 @@
1
+ from brynq_sdk.sharepoint.sharepoint import Sharepoint
@@ -0,0 +1,232 @@
1
+ import os
2
+ from typing import List, Union
3
+ import requests
4
+ import json
5
+ from io import BytesIO
6
+ import typing
7
+ from brynq_sdk.brynq import BrynQ
8
+
9
+
10
+ class Sharepoint(BrynQ):
11
+ def __init__(self, label: Union[str, List], site: str = None, site_id: str = None, json_subset: int = None, site_name: str = None, debug: bool = False):
12
+ """
13
+ :param label: label of the sharepoint system in BrynQ
14
+ :param site: base url of the sharepoint site
15
+ :param site_id: site id of the sharepoint site
16
+ :param json_subset: fill in the part of the json that needs to be accessed to get the wanted drive id, accompanying the drive you are looking for
17
+ :param debug: set to True to enable debug logging
18
+ """
19
+ super().__init__()
20
+ credentials = self.get_system_credential(system='sharepoint', label=label)
21
+ self.debug = debug
22
+ if self.debug:
23
+ print(f"credentials: {credentials}")
24
+ self.access_token = credentials['auth']['access_token']
25
+ self.brynq_system_id = credentials['id']
26
+ if site_name is not None:
27
+ self.json_subset = 0 if json_subset is None else json_subset
28
+ self.site_id = self.get_site_id(site_name=site_name)
29
+ elif site_id is not None:
30
+ self.site_id = f"{site},{site_id}"
31
+ self.json_subset = json_subset
32
+ else:
33
+ raise KeyError('Either site_name or site_id, site and json_subset must be provided')
34
+ if self.debug:
35
+ print(f"site_id: {self.site_id}, json_subset: {self.json_subset}, credentials: {credentials}, brynq_system_id: {self.brynq_system_id}")
36
+
37
+ def _get_headers(self):
38
+ access_token = self.refresh_system_credential(system='sharepoint', system_id=self.brynq_system_id)['access_token']
39
+ headers = {'Authorization': f'Bearer {access_token}'}
40
+ if self.debug:
41
+ print(headers)
42
+
43
+ return headers
44
+
45
+ def get_site_id(self, site_name: str) -> str:
46
+ """
47
+ Get the site id of a site
48
+ :param site_name: name of the site
49
+ :return: site id
50
+ """
51
+ url = f'https://graph.microsoft.com/v1.0/sites?search={site_name}'
52
+ if self.debug:
53
+ print(f"url: {url}")
54
+ response = requests.get(url=url, headers=self._get_headers())
55
+ response.raise_for_status()
56
+ site_id = response.json()['value'][0]['id']
57
+ if self.debug:
58
+ print(f"site_id: {site_id}")
59
+
60
+ return site_id
61
+
62
+ def get_driveid(self):
63
+ """
64
+ This method is used to derive the driveid to which the files have to be uploaded. Needed in the upload url for file upload.
65
+ :return: returns the needed driveid
66
+ """
67
+ url = f'https://graph.microsoft.com/v1.0/sites/{self.site_id}/drives'
68
+ if self.debug:
69
+ print(f"url: {url}")
70
+ response = requests.get(url, headers=self._get_headers())
71
+ response.raise_for_status()
72
+ drive_id = response.json()['value'][self.json_subset]['id']
73
+ if self.debug:
74
+ print(f"drive_id: {drive_id}")
75
+
76
+ return drive_id
77
+
78
+ def upload_file(self, local_file_path: str, remote_file_path: str):
79
+ """
80
+ This method performs the actual file upload to the formerly derived site + drive.
81
+ local_file_path: local path of the file you want to upload
82
+ remote_file_path: remote path of the folder and filename where you want to place the file
83
+ """
84
+ drive_id = self.get_driveid()
85
+ url = f'https://graph.microsoft.com/v1.0/sites/{self.site_id}/drives/{drive_id}/root:/{remote_file_path}:/createUploadSession'
86
+ if self.debug:
87
+ print(f"url: {url}")
88
+ headers = self._get_headers()
89
+ response = requests.post(url, headers=headers)
90
+ response.raise_for_status()
91
+ upload_url = response.json()['uploadUrl']
92
+ if self.debug:
93
+ print(f"upload_url: {upload_url}")
94
+ with open(f'{local_file_path}', 'rb') as file_input:
95
+ file_bytes = os.path.getsize(f'{local_file_path}')
96
+ headers_upload = {'Content-Type': 'application/json',
97
+ 'Content-Length': f'{file_bytes}',
98
+ 'Content-Range': f'bytes 0-{file_bytes - 1}/{file_bytes}'}
99
+ response_upload = requests.put(url=upload_url, headers=headers_upload, data=file_input)
100
+ response_upload.raise_for_status()
101
+
102
+ def open_file(self, remote_file_path: str) -> bytes:
103
+ """
104
+ Get a file from sharepoint as a bytesstream
105
+ remote_file_path: filepath on sharepoint
106
+ :return: bytes of file object
107
+ """
108
+ drive_id = self.get_driveid()
109
+ url = f'https://graph.microsoft.com/v1.0/sites/{self.site_id}/drives/{drive_id}/root:/{remote_file_path}'
110
+ if self.debug:
111
+ print(f"url: {url}")
112
+ headers = self._get_headers()
113
+ response = requests.get(url=url, headers=headers)
114
+ response.raise_for_status()
115
+ download_url = response.json()['@microsoft.graph.downloadUrl']
116
+ if self.debug:
117
+ print(f"download_url: {download_url}")
118
+ response_download = requests.get(url=download_url, headers=headers)
119
+ response_download.raise_for_status()
120
+
121
+ return response_download.content
122
+
123
+ def download_file(self, local_file_path: str, remote_file_path: str):
124
+ """
125
+ This method downloads a file from sharepoint to the local machine.
126
+ local_file_path: local folder where the file will be downloaded to
127
+ remote_file_path: remote path of the file on sharepoint
128
+ """
129
+ driveid = self.get_driveid()
130
+ url = f'https://graph.microsoft.com/v1.0/sites/{self.site_id}/drives/{driveid}/root:/{remote_file_path}'
131
+ headers = self._get_headers()
132
+ response = requests.get(url=url, headers=headers)
133
+ response.raise_for_status()
134
+ download_url = response.json()['@microsoft.graph.downloadUrl']
135
+ response_download = requests.get(url=download_url, headers=headers)
136
+ response_download.raise_for_status()
137
+ with open(file=f'{local_file_path}', mode='wb') as f:
138
+ f.write(BytesIO(response_download.content).read())
139
+
140
+ def download_files(self, local_folder_path: str, remote_folder_path: str):
141
+ """
142
+ This method downloads a file from sharepoint to the local machine.
143
+ local_folder_path: local folder where the files will be downloaded to
144
+ remote_folder_path: remote path of the folder you want to get on sharepoint
145
+ """
146
+ driveid = self.get_driveid()
147
+ folder_content = self.list_dir(remote_folder_path=remote_folder_path)
148
+ # remove subdirectories, can not be downloaded
149
+ folder_content = [item for item in folder_content if 'file' in item]
150
+ if self.debug:
151
+ print(f"folder_content: {folder_content}")
152
+ filecount = 0
153
+ for file in folder_content:
154
+ url = f'https://graph.microsoft.com/v1.0/sites/{self.site_id}/drives/{driveid}/root:/{remote_folder_path}{file["name"]}'
155
+ if self.debug:
156
+ print(f"url: {url}")
157
+ headers = self._get_headers()
158
+ response = requests.get(url=url, headers=headers)
159
+ response.raise_for_status()
160
+ download_url = response.json()['@microsoft.graph.downloadUrl']
161
+ response_download = requests.get(url=download_url, headers=headers)
162
+ with open(file=f'{local_folder_path}{file["name"]}', mode='wb') as f:
163
+ f.write(BytesIO(response_download.content).read())
164
+ filecount += 1
165
+ print(f'{filecount} files downloaded')
166
+
167
+ def list_dir(self, remote_folder_path: str, get_files_from_nested_folders: bool = False) -> [json, typing.Generator]:
168
+ """
169
+ Fetch the contents of the API and return the "children"
170
+ which has the information of all the items under that folder
171
+ remote_folder_path: folder path you want to list
172
+ :return: all the contents of the folder items
173
+ """
174
+ if get_files_from_nested_folders:
175
+ return list(self._get_all_files_in_folder(folder_path=remote_folder_path))
176
+
177
+ drive_id = self.get_driveid()
178
+ url = f'https://graph.microsoft.com/v1.0/sites/{self.site_id}/drives/{drive_id}/root:/{remote_folder_path}?expand=children'
179
+ if self.debug:
180
+ print(f"url: {url}")
181
+ response = requests.get(url, headers=self._get_headers(), timeout=120)
182
+ response.raise_for_status()
183
+
184
+ return response.json()['children']
185
+
186
+ # helpers function to get all files in a nested directory
187
+ def _get_all_files_in_folder(self, folder_path) -> typing.Generator:
188
+ children = self.list_dir(remote_folder_path=folder_path)
189
+ for child in children:
190
+ if 'file' in child:
191
+ yield {"folder": folder_path, "file": child['name'], "id": child['id']}
192
+ else:
193
+ yield from self._get_all_files_in_folder(folder_path=f"{folder_path}/{child['name']}")
194
+
195
+ def remove_file(self, remote_file_path: str):
196
+ """
197
+ Remove a file from Sharepoint
198
+ remote_file_path: complete path including filename
199
+ :return: response from Sharepoint
200
+ """
201
+ drive_id = self.get_driveid()
202
+ url = f'https://graph.microsoft.com/v1.0/sites/{self.site_id}/drives/{drive_id}/root:/{remote_file_path}'
203
+ if self.debug:
204
+ print(f"url: {url}")
205
+ response = requests.delete(url=url, headers=self._get_headers())
206
+ response.raise_for_status()
207
+
208
+ def remove_files(self, remote_folder_path: str):
209
+ """
210
+ Remove a file from Sharepoint
211
+ remote_folder_path: folder path that you want to empty
212
+ """
213
+ drive_id = self.get_driveid()
214
+ folder_content = self.list_dir(remote_folder_path=remote_folder_path)
215
+ for file in folder_content:
216
+ url = f'https://graph.microsoft.com/v1.0/sites/{self.site_id}/drives/{drive_id}/root:/{remote_folder_path}{file["name"]}'
217
+ if self.debug:
218
+ print(f"url: {url}")
219
+ response = requests.delete(url=url, headers=self._get_headers())
220
+ response.raise_for_status()
221
+
222
+ def remove_folder(self, folder_id: str):
223
+ """
224
+ Remove a folder from Sharepoint
225
+ folder: folder id that you want to delete
226
+ """
227
+ drive_id = self.get_driveid()
228
+ url = f'https://graph.microsoft.com/v1.0/sites/{self.site_id}/drives/{drive_id}/items/{folder_id}'
229
+ if self.debug:
230
+ print(f"url: {url}")
231
+ response = requests.delete(url=url, headers=self._get_headers())
232
+ response.raise_for_status()
@@ -0,0 +1,10 @@
1
+ Metadata-Version: 1.0
2
+ Name: brynq-sdk-sharepoint
3
+ Version: 1.0.0
4
+ Summary: Sharepoint wrapper from BrynQ
5
+ Home-page: UNKNOWN
6
+ Author: BrynQ
7
+ Author-email: support@brynq.com
8
+ License: BrynQ License
9
+ Description: Sharepoint wrapper from BrynQ
10
+ Platform: UNKNOWN
@@ -0,0 +1,9 @@
1
+ setup.py
2
+ brynq_sdk/sharepoint/__init__.py
3
+ brynq_sdk/sharepoint/sharepoint.py
4
+ brynq_sdk_sharepoint.egg-info/PKG-INFO
5
+ brynq_sdk_sharepoint.egg-info/SOURCES.txt
6
+ brynq_sdk_sharepoint.egg-info/dependency_links.txt
7
+ brynq_sdk_sharepoint.egg-info/not-zip-safe
8
+ brynq_sdk_sharepoint.egg-info/requires.txt
9
+ brynq_sdk_sharepoint.egg-info/top_level.txt
@@ -0,0 +1,2 @@
1
+ brynq-sdk-brynq>=1
2
+ requests<=3,>=2
@@ -0,0 +1,4 @@
1
+ [egg_info]
2
+ tag_build =
3
+ tag_date = 0
4
+
@@ -0,0 +1,18 @@
1
+ from setuptools import setup
2
+
3
+
4
+ setup(
5
+ name='brynq_sdk_sharepoint',
6
+ version='1.0.0',
7
+ description='Sharepoint wrapper from BrynQ',
8
+ long_description='Sharepoint wrapper from BrynQ',
9
+ author='BrynQ',
10
+ author_email='support@brynq.com',
11
+ packages=["brynq_sdk.sharepoint"],
12
+ license='BrynQ License',
13
+ install_requires=[
14
+ 'brynq-sdk-brynq>=1',
15
+ 'requests>=2,<=3'
16
+ ],
17
+ zip_safe=False,
18
+ )