brynq-sdk-azure 2.0.1__tar.gz → 2.0.2__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {brynq_sdk_azure-2.0.1 → brynq_sdk_azure-2.0.2}/PKG-INFO +1 -1
- brynq_sdk_azure-2.0.2/brynq_sdk_azure/__init__.py +3 -0
- brynq_sdk_azure-2.0.2/brynq_sdk_azure/azure_connection.py +104 -0
- brynq_sdk_azure-2.0.2/brynq_sdk_azure/blob_storage.py +175 -0
- brynq_sdk_azure-2.0.2/brynq_sdk_azure/entra.py +339 -0
- {brynq_sdk_azure-2.0.1 → brynq_sdk_azure-2.0.2}/brynq_sdk_azure.egg-info/PKG-INFO +1 -1
- {brynq_sdk_azure-2.0.1 → brynq_sdk_azure-2.0.2}/brynq_sdk_azure.egg-info/SOURCES.txt +4 -0
- brynq_sdk_azure-2.0.2/brynq_sdk_azure.egg-info/top_level.txt +1 -0
- {brynq_sdk_azure-2.0.1 → brynq_sdk_azure-2.0.2}/setup.py +1 -1
- brynq_sdk_azure-2.0.1/brynq_sdk_azure.egg-info/top_level.txt +0 -1
- {brynq_sdk_azure-2.0.1 → brynq_sdk_azure-2.0.2}/brynq_sdk_azure.egg-info/dependency_links.txt +0 -0
- {brynq_sdk_azure-2.0.1 → brynq_sdk_azure-2.0.2}/brynq_sdk_azure.egg-info/not-zip-safe +0 -0
- {brynq_sdk_azure-2.0.1 → brynq_sdk_azure-2.0.2}/brynq_sdk_azure.egg-info/requires.txt +0 -0
- {brynq_sdk_azure-2.0.1 → brynq_sdk_azure-2.0.2}/setup.cfg +0 -0
|
@@ -0,0 +1,104 @@
|
|
|
1
|
+
"""
|
|
2
|
+
See how-to on our confluence page for more details.
|
|
3
|
+
Use Azure python sdk to sync the files with Azure Files Share service.
|
|
4
|
+
The config file shall have a settings in a format like:
|
|
5
|
+
azure_config = {
|
|
6
|
+
'azure_connection_string' : r'{the azure connection string}'
|
|
7
|
+
'share_name' : "/sharename/",
|
|
8
|
+
'parent_dir_path' : r"volume/data/" : ALAWAYS start with a test file, to make sure you don't mess up the other folders/files
|
|
9
|
+
}
|
|
10
|
+
"""
|
|
11
|
+
|
|
12
|
+
import os
|
|
13
|
+
import sys
|
|
14
|
+
from azure.storage.fileshare import ShareClient
|
|
15
|
+
|
|
16
|
+
basedir = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
|
|
17
|
+
sys.path.append(basedir)
|
|
18
|
+
|
|
19
|
+
|
|
20
|
+
class AzureConnection:
|
|
21
|
+
"""
|
|
22
|
+
connection_string: the connection_string which functions as a token for the Azure connection
|
|
23
|
+
share_name: the share name in Azure
|
|
24
|
+
"""
|
|
25
|
+
def __init__(self):
|
|
26
|
+
# This ugly fix is needed so the other packages can run without a config.py file
|
|
27
|
+
import config
|
|
28
|
+
self.connection_string = config.azure_config['azure_connection_string']
|
|
29
|
+
self.share_name = config.azure_config['share_name']
|
|
30
|
+
self.share_client = ShareClient.from_connection_string(self.connection_string, share_name=self.share_name)
|
|
31
|
+
|
|
32
|
+
def list_files_and_dirs(self, dir_client):
|
|
33
|
+
"""
|
|
34
|
+
List all the files and folders under this directory_path.
|
|
35
|
+
:param dir_client: The connection to a specified directory
|
|
36
|
+
:return: two lists consist of files and subfolders separately
|
|
37
|
+
"""
|
|
38
|
+
my_list = list(dir_client.list_directories_and_files())
|
|
39
|
+
subdir_list = [x['name'] for x in my_list if x['is_directory'] is True]
|
|
40
|
+
file_list = [x['name'] for x in my_list if x['is_directory'] is False]
|
|
41
|
+
return file_list, subdir_list
|
|
42
|
+
|
|
43
|
+
def create_directory(self, dir_path):
|
|
44
|
+
"""
|
|
45
|
+
Create a ShareDirectoryClient from a connection string
|
|
46
|
+
:param dir_path: The directory_path
|
|
47
|
+
:return: a share_client which connects with the specified directory,
|
|
48
|
+
and a Flag to indicate if this directory exists before
|
|
49
|
+
"""
|
|
50
|
+
dir_client = self.share_client.get_directory_client(directory_path=dir_path)
|
|
51
|
+
dir_already_existed = False
|
|
52
|
+
try:
|
|
53
|
+
dir_client.create_directory()
|
|
54
|
+
except:
|
|
55
|
+
dir_already_existed = True
|
|
56
|
+
return dir_client, dir_already_existed
|
|
57
|
+
|
|
58
|
+
|
|
59
|
+
def empty_folder(self, dir_client, delete_folder = False):
|
|
60
|
+
"""
|
|
61
|
+
To empty a folder including all the subfolders and files within.
|
|
62
|
+
:param dir_client: The share to connect with this directory
|
|
63
|
+
:param delete_folder: To delete the folder as well. If yes then the whole folder will be removed, otherwise only remove the files.
|
|
64
|
+
"""
|
|
65
|
+
file_list, subdir_list = self.list_files_and_dirs(dir_client)
|
|
66
|
+
if len(subdir_list)>0:
|
|
67
|
+
for subdir in subdir_list:
|
|
68
|
+
self.empty_folder(dir_client.get_subdirectory_client(subdir), delete_folder=True)
|
|
69
|
+
for file in file_list:
|
|
70
|
+
dir_client.delete_file(file)
|
|
71
|
+
if delete_folder:
|
|
72
|
+
dir_client.delete_directory()
|
|
73
|
+
|
|
74
|
+
def create_subdirectory_and_upload_file(self, parentdir_path, subdir_path, local_file_path, filename):
|
|
75
|
+
"""
|
|
76
|
+
Create a subfolder and upload files to this folder
|
|
77
|
+
:param parentdir_path: The parent directory
|
|
78
|
+
:param subdir_path: the subfolder directory
|
|
79
|
+
:param local_file_path: local file directory from which to upload the files
|
|
80
|
+
:param filename: filename to be uploaded
|
|
81
|
+
:return:
|
|
82
|
+
"""
|
|
83
|
+
_, _ = self.create_directory(parentdir_path)
|
|
84
|
+
dir_path = os.path.join(parentdir_path,subdir_path)
|
|
85
|
+
subdir, dir_already_existed = self.create_directory(dir_path)
|
|
86
|
+
if dir_already_existed:
|
|
87
|
+
self.empty_folder(subdir, delete_folder=False)
|
|
88
|
+
# Upload a file to the subdirectory
|
|
89
|
+
with open(os.path.join(local_file_path,filename), "rb") as source:
|
|
90
|
+
subdir.upload_file(file_name=filename, data=source)
|
|
91
|
+
|
|
92
|
+
def create_directory_and_upload_file(self, parentdir_path, local_file_path, filename):
|
|
93
|
+
"""
|
|
94
|
+
Create a folder and upload files to this folder
|
|
95
|
+
:param parentdir_path: The parent directory
|
|
96
|
+
:param local_file_path: local file directory from which to upload the files
|
|
97
|
+
:param filename: filename to be uploaded
|
|
98
|
+
:return:
|
|
99
|
+
"""
|
|
100
|
+
# Get the directory client
|
|
101
|
+
parentdir_dir, _ = self.create_directory(parentdir_path)
|
|
102
|
+
# Upload a file to the subdirectory
|
|
103
|
+
with open(os.path.join(local_file_path, filename), "rb") as source:
|
|
104
|
+
parentdir_dir.upload_file(file_name=filename, data=source)
|
|
@@ -0,0 +1,175 @@
|
|
|
1
|
+
from brynq_sdk_brynq import BrynQ
|
|
2
|
+
from azure.storage.blob import BlobServiceClient, BlobClient, ContainerClient, generate_account_sas, ResourceTypes, AccountSasPermissions
|
|
3
|
+
from typing import Union, List, Tuple
|
|
4
|
+
from datetime import datetime, timedelta
|
|
5
|
+
|
|
6
|
+
|
|
7
|
+
class BlobStorage(BrynQ):
|
|
8
|
+
def __init__(self, label: Union[str, List]):
|
|
9
|
+
super().__init__()
|
|
10
|
+
self.blob_service_client = self.__get_authentication(label=label)
|
|
11
|
+
|
|
12
|
+
def __get_authentication(self, label):
|
|
13
|
+
credentials = self.get_system_credential(system='azure-blob-storage', label=label)
|
|
14
|
+
storage_account_name = credentials['storage_account_name']
|
|
15
|
+
storage_account_key = credentials['storage_account_key']
|
|
16
|
+
sas_token = generate_account_sas(
|
|
17
|
+
account_name=storage_account_name,
|
|
18
|
+
account_key=storage_account_key,
|
|
19
|
+
resource_types=ResourceTypes(service=True, container=True, object=True),
|
|
20
|
+
permission=AccountSasPermissions(read=True, write=True, list=True, delete=True, add=True, create=True, update=True, process=True),
|
|
21
|
+
expiry=datetime.utcnow() + timedelta(hours=1)
|
|
22
|
+
)
|
|
23
|
+
blob_service_client = BlobServiceClient(
|
|
24
|
+
account_url=f"https://{storage_account_name}.blob.core.windows.net",
|
|
25
|
+
credential=sas_token
|
|
26
|
+
)
|
|
27
|
+
|
|
28
|
+
return blob_service_client
|
|
29
|
+
|
|
30
|
+
def get_containers(self):
|
|
31
|
+
all_containers = self.blob_service_client.list_containers(include_metadata=True)
|
|
32
|
+
container_list = []
|
|
33
|
+
for container in all_containers:
|
|
34
|
+
container_info = {
|
|
35
|
+
'name': container.name,
|
|
36
|
+
'last_modified': container.last_modified,
|
|
37
|
+
'etag': container.etag,
|
|
38
|
+
'lease_state': container.lease,
|
|
39
|
+
'has_immutability_policy': container.has_immutability_policy,
|
|
40
|
+
'has_legal_hold': container.has_legal_hold,
|
|
41
|
+
'metadata': container.metadata
|
|
42
|
+
}
|
|
43
|
+
container_list.append(container_info)
|
|
44
|
+
|
|
45
|
+
return container_list
|
|
46
|
+
|
|
47
|
+
def get_container(self, container_name: str):
|
|
48
|
+
"""
|
|
49
|
+
Get a container from the blob storage
|
|
50
|
+
"""
|
|
51
|
+
container = self.blob_service_client.get_container_client(container_name)
|
|
52
|
+
return container
|
|
53
|
+
|
|
54
|
+
def create_container(self, container_name: str):
|
|
55
|
+
"""
|
|
56
|
+
Create a container in the blob storage
|
|
57
|
+
"""
|
|
58
|
+
response = self.blob_service_client.create_container(container_name)
|
|
59
|
+
return response
|
|
60
|
+
|
|
61
|
+
def update_container(self):
|
|
62
|
+
pass
|
|
63
|
+
|
|
64
|
+
def delete_container(self):
|
|
65
|
+
pass
|
|
66
|
+
|
|
67
|
+
def get_folders(self, container_name: str):
|
|
68
|
+
"""
|
|
69
|
+
Retrieves a list of 'folders' in the specified container.
|
|
70
|
+
Since Azure Blob Storage uses a flat namespace, folders are simulated using prefixes.
|
|
71
|
+
|
|
72
|
+
:param container_name: The name of the container.
|
|
73
|
+
:return: A list of folder names.
|
|
74
|
+
"""
|
|
75
|
+
container_client = self.get_container(container_name)
|
|
76
|
+
blobs_list = container_client.list_blobs()
|
|
77
|
+
|
|
78
|
+
folder_set = set()
|
|
79
|
+
for blob in blobs_list:
|
|
80
|
+
if '/' in blob.name:
|
|
81
|
+
folder = blob.name.split('/')[0]
|
|
82
|
+
folder_set.add(folder)
|
|
83
|
+
folders = list(folder_set)
|
|
84
|
+
return folders
|
|
85
|
+
|
|
86
|
+
def create_folder(self, container_name: str, folder_name: str):
|
|
87
|
+
"""
|
|
88
|
+
Create a file with a 0 as content. Because the file is created, the folder is also created. After that the file and the folder are created,
|
|
89
|
+
delete the file so the folder will stay. According to the azure docs, it should be possible to create empty files, but this is not working.
|
|
90
|
+
"""
|
|
91
|
+
# Split the url and add the container and folder name in between the url
|
|
92
|
+
original_url = self.blob_service_client.url.split('?')
|
|
93
|
+
url = f"{original_url[0]}/{container_name}/{folder_name}/empty_file?{original_url[1]}"
|
|
94
|
+
blob = BlobClient.from_blob_url(blob_url=url)
|
|
95
|
+
|
|
96
|
+
# Now create the file and delete it so the folder will stay
|
|
97
|
+
response = blob.upload_blob(b"0", blob_type='AppendBlob')
|
|
98
|
+
blob.delete_blob()
|
|
99
|
+
return response
|
|
100
|
+
|
|
101
|
+
def delete_folder(self, container_name: str, folder_name: str):
|
|
102
|
+
"""
|
|
103
|
+
Deletes all the blobs (files) within a folder, effectively deleting the folder.
|
|
104
|
+
:param container_name: The name of the container.
|
|
105
|
+
:param folder_name: The name of the folder to delete.
|
|
106
|
+
"""
|
|
107
|
+
container_client = self.get_container(container_name)
|
|
108
|
+
blobs = container_client.list_blobs(name_starts_with=f"{folder_name}/")
|
|
109
|
+
for blob in blobs:
|
|
110
|
+
blob_client = container_client.get_blob_client(blob)
|
|
111
|
+
blob_client.delete_blob()
|
|
112
|
+
return f"Deleted folder {folder_name} and all its contents."
|
|
113
|
+
|
|
114
|
+
def get_files(self, container_name: str, folder_name: str = ""):
|
|
115
|
+
"""
|
|
116
|
+
Retrieves all files in a container, optionally filtered by folder.
|
|
117
|
+
:param container_name: The name of the container.
|
|
118
|
+
:param folder_name: The name of the folder (optional). If provided, only files in this folder will be listed.
|
|
119
|
+
:return: A list of file names in the container or folder.
|
|
120
|
+
"""
|
|
121
|
+
container_client = self.get_container(container_name)
|
|
122
|
+
blobs_list = container_client.list_blobs(name_starts_with=f"{folder_name}/" if folder_name else "")
|
|
123
|
+
|
|
124
|
+
file_list = []
|
|
125
|
+
for blob in blobs_list:
|
|
126
|
+
if not blob.name.endswith('/'): # Exclude folder markers
|
|
127
|
+
file_list.append(blob.name)
|
|
128
|
+
|
|
129
|
+
return file_list
|
|
130
|
+
|
|
131
|
+
def upload_file(self, container_name: str, blob_name: str, file_path: str, overwrite: bool = False):
|
|
132
|
+
"""
|
|
133
|
+
Uploads a single file to Azure Blob Storage.
|
|
134
|
+
:param container_name: The name of the container to upload to.
|
|
135
|
+
:param blob_name: The name of the blob (the file name in blob storage).
|
|
136
|
+
:param file_path: The local path to the file to upload.
|
|
137
|
+
:param overwrite: Whether to overwrite an existing blob. Default is False.
|
|
138
|
+
"""
|
|
139
|
+
# Get the container client
|
|
140
|
+
container_client = self.get_container(container_name)
|
|
141
|
+
|
|
142
|
+
# Get the blob client
|
|
143
|
+
blob_client = container_client.get_blob_client(blob_name)
|
|
144
|
+
|
|
145
|
+
# Open the file and upload
|
|
146
|
+
with open(file_path, "rb") as data:
|
|
147
|
+
blob_client.upload_blob(data, overwrite=overwrite)
|
|
148
|
+
|
|
149
|
+
print(f"Successfully uploaded {file_path} to {blob_client.url}")
|
|
150
|
+
return blob_client.url
|
|
151
|
+
|
|
152
|
+
def upload_files(self, container_name: str, files: List[Tuple[str, str]], overwrite: bool = False):
|
|
153
|
+
"""
|
|
154
|
+
Uploads multiple files to Azure Blob Storage.
|
|
155
|
+
:param container_name: The name of the container to upload to.
|
|
156
|
+
:param files: A list of tuples (blob_name, file_path), where blob_name is the name of the blob in storage, and file_path is the local file path.
|
|
157
|
+
:param overwrite: Whether to overwrite existing blobs. Default is False.
|
|
158
|
+
"""
|
|
159
|
+
success = True
|
|
160
|
+
for blob_name, file_path in files:
|
|
161
|
+
result = self.upload_file(container_name, blob_name, file_path, overwrite=overwrite)
|
|
162
|
+
if result is None:
|
|
163
|
+
success = False
|
|
164
|
+
return success
|
|
165
|
+
|
|
166
|
+
def delete_file(self, container_name: str, blob_name: str):
|
|
167
|
+
"""
|
|
168
|
+
Deletes a specific file from Azure Blob Storage.
|
|
169
|
+
:param container_name: The name of the container.
|
|
170
|
+
:param blob_name: The name of the blob (the file) to delete.
|
|
171
|
+
"""
|
|
172
|
+
container_client = self.get_container(container_name)
|
|
173
|
+
blob_client = container_client.get_blob_client(blob_name)
|
|
174
|
+
blob_client.delete_blob()
|
|
175
|
+
return f"Deleted file {blob_name} from container {container_name}."
|
|
@@ -0,0 +1,339 @@
|
|
|
1
|
+
from brynq_sdk_brynq import BrynQ
|
|
2
|
+
import urllib.parse
|
|
3
|
+
import warnings
|
|
4
|
+
import requests
|
|
5
|
+
import random
|
|
6
|
+
import string
|
|
7
|
+
import json
|
|
8
|
+
import pandas as pd
|
|
9
|
+
from pandas import json_normalize
|
|
10
|
+
from msal import ConfidentialClientApplication
|
|
11
|
+
from typing import Union, List
|
|
12
|
+
import os
|
|
13
|
+
|
|
14
|
+
class Entra(BrynQ):
|
|
15
|
+
|
|
16
|
+
def __init__(self, label: Union[str, List], debug: bool = False):
|
|
17
|
+
super().__init__()
|
|
18
|
+
self.headers = self.__get_headers(label=label)
|
|
19
|
+
self.endpoint = "https://graph.microsoft.com/v1.0"
|
|
20
|
+
|
|
21
|
+
def __get_headers(self, label):
|
|
22
|
+
credentials = self.get_system_credential(system='azure-entra-token', label=label)
|
|
23
|
+
tenant_id = credentials['tenant_id']
|
|
24
|
+
client_id = credentials['client_id']
|
|
25
|
+
client_secret = credentials['client_secret']
|
|
26
|
+
authority = f"https://login.microsoftonline.com/{tenant_id}"
|
|
27
|
+
|
|
28
|
+
# Create a ConfidentialClientApplication for authentication
|
|
29
|
+
app = ConfidentialClientApplication(
|
|
30
|
+
client_id,
|
|
31
|
+
authority=authority,
|
|
32
|
+
client_credential=client_secret,
|
|
33
|
+
)
|
|
34
|
+
|
|
35
|
+
# Get an access token for the Graph API
|
|
36
|
+
result = app.acquire_token_for_client(scopes=["https://graph.microsoft.com/.default"])
|
|
37
|
+
headers = {
|
|
38
|
+
'Authorization': f"Bearer {result['access_token']}",
|
|
39
|
+
'Content-Type': 'application/json'
|
|
40
|
+
}
|
|
41
|
+
|
|
42
|
+
return headers
|
|
43
|
+
|
|
44
|
+
def __add_attribute_information(self, payload, custom_attributes):
|
|
45
|
+
# First get the official name of the custom attribute and all the other information
|
|
46
|
+
payload.update({"customSecurityAttributes": {}})
|
|
47
|
+
metadata = requests.get('https://graph.microsoft.com/v1.0/directory/customSecurityAttributeDefinitions', headers=self.headers).json()
|
|
48
|
+
# Now loop through the given metadata and add the corresponding metadata and the values itself to the payload
|
|
49
|
+
for attr, value in custom_attributes.items():
|
|
50
|
+
for meta in metadata["value"]:
|
|
51
|
+
if meta["name"] == attr:
|
|
52
|
+
attr_set = meta["attributeSet"]
|
|
53
|
+
attr_type = meta["type"]
|
|
54
|
+
is_collection = meta["isCollection"]
|
|
55
|
+
if attr_set not in payload["customSecurityAttributes"]:
|
|
56
|
+
payload["customSecurityAttributes"][attr_set] = {"@odata.type": "#microsoft.graph.customSecurityAttributeValue"}
|
|
57
|
+
# In case of an integer, the field type should be given as well
|
|
58
|
+
if attr_type == "Integer":
|
|
59
|
+
if is_collection:
|
|
60
|
+
payload["customSecurityAttributes"][attr_set][f"{attr}@odata.type"] = "#Collection(Int32)"
|
|
61
|
+
else:
|
|
62
|
+
payload["customSecurityAttributes"][attr_set][f"{attr}@odata.type"] = "#Int32"
|
|
63
|
+
payload["customSecurityAttributes"][attr_set][attr] = value
|
|
64
|
+
# In case of a boolean, only the value should be given, the field type itself is not relevant
|
|
65
|
+
elif attr_type == "Boolean":
|
|
66
|
+
payload["customSecurityAttributes"][attr_set][attr] = value
|
|
67
|
+
# In case of a string, the field type should be given if the field is a collection of values. If it's a single value, the field type is not relevant
|
|
68
|
+
else:
|
|
69
|
+
if is_collection:
|
|
70
|
+
payload["customSecurityAttributes"][attr_set][f"{attr}@odata.type"] = "#Collection(String)"
|
|
71
|
+
payload["customSecurityAttributes"][attr_set][attr] = value
|
|
72
|
+
return payload
|
|
73
|
+
|
|
74
|
+
def __generate_password(self):
|
|
75
|
+
special_characters = string.punctuation
|
|
76
|
+
digits = string.digits
|
|
77
|
+
uppercase_letters = string.ascii_uppercase
|
|
78
|
+
lowercase_letters = string.ascii_lowercase
|
|
79
|
+
|
|
80
|
+
# Create a pool of characters
|
|
81
|
+
pool = special_characters + digits + uppercase_letters + lowercase_letters
|
|
82
|
+
|
|
83
|
+
# Ensure at least one character of each type
|
|
84
|
+
password = random.choice(special_characters)
|
|
85
|
+
password += random.choice(digits)
|
|
86
|
+
password += random.choice(uppercase_letters)
|
|
87
|
+
password += random.choice(lowercase_letters)
|
|
88
|
+
|
|
89
|
+
# Fill the remaining length with random characters
|
|
90
|
+
password += ''.join(random.choice(pool) for _ in range(20 - 4))
|
|
91
|
+
|
|
92
|
+
# Shuffle the characters to make the password more random
|
|
93
|
+
password_list = list(password)
|
|
94
|
+
random.shuffle(password_list)
|
|
95
|
+
password = ''.join(password_list)
|
|
96
|
+
|
|
97
|
+
return password
|
|
98
|
+
|
|
99
|
+
def get_groups(self) -> pd.DataFrame:
|
|
100
|
+
"""
|
|
101
|
+
Get all groups from Azure Entra
|
|
102
|
+
:return: pd.DataFrame with the groups
|
|
103
|
+
"""
|
|
104
|
+
endpoint = "https://graph.microsoft.com/v1.0"
|
|
105
|
+
df = pd.DataFrame()
|
|
106
|
+
loop = True
|
|
107
|
+
url = f"{endpoint}/groups"
|
|
108
|
+
while loop:
|
|
109
|
+
response = requests.get(url, headers=self.headers)
|
|
110
|
+
groups = response.json()['value']
|
|
111
|
+
df_temp = pd.json_normalize(groups)
|
|
112
|
+
df = pd.concat([df, df_temp], ignore_index=True)
|
|
113
|
+
if '@odata.nextLink' in response.json():
|
|
114
|
+
url = response.json()['@odata.nextLink']
|
|
115
|
+
else:
|
|
116
|
+
loop = False
|
|
117
|
+
df = df.reset_index(drop=True)
|
|
118
|
+
return df
|
|
119
|
+
|
|
120
|
+
def get_group_members(self, group_id: str = '') -> pd.DataFrame:
|
|
121
|
+
"""
|
|
122
|
+
Get all users from a group in Azure Entra
|
|
123
|
+
:param group_id: ID of the group. If no ID is given, all possible groups will be returned
|
|
124
|
+
:return: pd.DataFrame with the users
|
|
125
|
+
"""
|
|
126
|
+
group_url = "https://graph.microsoft.com/v1.0/groups/"
|
|
127
|
+
df = pd.DataFrame()
|
|
128
|
+
while group_url:
|
|
129
|
+
graph_r = requests.get(group_url, headers=self.headers)
|
|
130
|
+
graph_json = graph_r.json()
|
|
131
|
+
groups = graph_json.get('value')
|
|
132
|
+
for group in groups:
|
|
133
|
+
print(f"Group ID: {group['id']}, Group Name: {group['displayName']}")
|
|
134
|
+
# Get users in each group
|
|
135
|
+
next_url_members = f"https://graph.microsoft.com/v1.0/groups/{group['id']}/members"
|
|
136
|
+
while next_url_members:
|
|
137
|
+
members_r = requests.get(next_url_members, headers=self.headers)
|
|
138
|
+
members_json = members_r.json()
|
|
139
|
+
members = members_json.get('value')
|
|
140
|
+
df_temp = pd.json_normalize(members)
|
|
141
|
+
if len(df_temp) > 0:
|
|
142
|
+
df_temp['group_id'] = group['id']
|
|
143
|
+
df_temp['group'] = group['displayName']
|
|
144
|
+
df_temp.rename(columns={'id': 'user_id'}, inplace=True)
|
|
145
|
+
df = pd.concat([df, df_temp], ignore_index=True)
|
|
146
|
+
next_url_members = members_json.get('@odata.nextLink')
|
|
147
|
+
group_url = graph_json.get('@odata.nextLink')
|
|
148
|
+
|
|
149
|
+
df = df.reset_index(drop=True)
|
|
150
|
+
return df
|
|
151
|
+
|
|
152
|
+
def create_group(self, name: str = '', description: str = '', mail_enabled: bool = False, mail_nickname: str = '', security_enabled: bool = True):
|
|
153
|
+
"""
|
|
154
|
+
Create a new group in Azure Entra
|
|
155
|
+
:param name: Name of the group
|
|
156
|
+
:param description: Description of the group
|
|
157
|
+
:param mail_enabled: Is the group mail enabled?
|
|
158
|
+
:param mail_nickname: Mail nickname of the group
|
|
159
|
+
:param security_enabled: Is the group security enabled?
|
|
160
|
+
:return: Response of the request
|
|
161
|
+
"""
|
|
162
|
+
endpoint = "https://graph.microsoft.com/v1.0/groups"
|
|
163
|
+
payload = {
|
|
164
|
+
"displayName": f"{name}",
|
|
165
|
+
"description": f"{description}",
|
|
166
|
+
"mailEnabled": mail_enabled,
|
|
167
|
+
"mailNickname": f"{mail_nickname}",
|
|
168
|
+
"securityEnabled": security_enabled
|
|
169
|
+
}
|
|
170
|
+
response = requests.post(endpoint, headers=self.headers, json=payload)
|
|
171
|
+
return response
|
|
172
|
+
|
|
173
|
+
def update_group(self, id: int, name: str = '', description: str = '', mail_enabled: bool = False, mail_nickname: str = '', security_enabled: bool = True):
|
|
174
|
+
"""
|
|
175
|
+
Create a new group in Azure Entra
|
|
176
|
+
:param id: ID of the group
|
|
177
|
+
:param name: Name of the group
|
|
178
|
+
:param description: Description of the group
|
|
179
|
+
:param mail_enabled: Is the group mail enabled?
|
|
180
|
+
:param mail_nickname: Mail nickname of the group
|
|
181
|
+
:param security_enabled: Is the group security enabled?
|
|
182
|
+
:return: Response of the request
|
|
183
|
+
"""
|
|
184
|
+
endpoint = f"https://graph.microsoft.com/v1.0/groups/{id}"
|
|
185
|
+
payload = {
|
|
186
|
+
"displayName": f"{name}",
|
|
187
|
+
"description": f"{description}",
|
|
188
|
+
"mailEnabled": mail_enabled,
|
|
189
|
+
"mailNickname": f"{mail_nickname}",
|
|
190
|
+
"securityEnabled": security_enabled
|
|
191
|
+
}
|
|
192
|
+
response = requests.patch(endpoint, headers=self.headers, json=payload)
|
|
193
|
+
return response
|
|
194
|
+
|
|
195
|
+
def delete_group(self, group_id):
|
|
196
|
+
"""
|
|
197
|
+
Delete a group in Azure Entra
|
|
198
|
+
:param group_id: ID of the group
|
|
199
|
+
:return: Response of the request
|
|
200
|
+
"""
|
|
201
|
+
endpoint = f"https://graph.microsoft.com/v1.0/groups/{group_id}"
|
|
202
|
+
response = requests.delete(endpoint, headers=self.headers)
|
|
203
|
+
return response
|
|
204
|
+
|
|
205
|
+
def get_users(self, extra_fields: list = [], custom_attributes: bool = False, expand: str = '', expand_select: str = '') -> pd.DataFrame:
|
|
206
|
+
"""
|
|
207
|
+
Get all users from Azure Entra
|
|
208
|
+
:param extra_fields: Besided the default fields, you can add extra fields to the request. Put them in a list
|
|
209
|
+
:param custom_attributes: Get the custom attributes of the users. If True, all the custom attributes will be returned
|
|
210
|
+
:return: pd.DataFrame with the users
|
|
211
|
+
"""
|
|
212
|
+
fields = ['businessPhones', 'displayName', 'givenName', 'id', 'jobTitle', 'mail', 'mobilePhone',
|
|
213
|
+
'officeLocation', 'preferredLanguage', 'surname', 'userPrincipalName'] + extra_fields
|
|
214
|
+
fields = ','.join(fields)
|
|
215
|
+
endpoint = f"https://graph.microsoft.com/v1.0/users?$select={fields}"
|
|
216
|
+
if custom_attributes:
|
|
217
|
+
endpoint = f"https://graph.microsoft.com/beta/users?$select={fields},customSecurityAttributes"
|
|
218
|
+
# Adding expand and select parameters if provided
|
|
219
|
+
if expand:
|
|
220
|
+
if expand_select:
|
|
221
|
+
endpoint += f",&$expand={expand}($select={expand_select})"
|
|
222
|
+
else:
|
|
223
|
+
endpoint += f",&$expand={expand}"
|
|
224
|
+
|
|
225
|
+
df = pd.DataFrame()
|
|
226
|
+
while endpoint:
|
|
227
|
+
response = requests.get(endpoint, headers=self.headers)
|
|
228
|
+
endpoint = response.json().get('@odata.nextLink')
|
|
229
|
+
data = response.json().get('value')
|
|
230
|
+
df_temp = json_normalize(data, sep='.')
|
|
231
|
+
df_temp = df_temp.drop([col for col in df_temp.columns if 'odata.type' in col], axis=1)
|
|
232
|
+
df = pd.concat([df, df_temp], ignore_index=True)
|
|
233
|
+
df = df.reset_index(drop=True)
|
|
234
|
+
return df
|
|
235
|
+
|
|
236
|
+
def create_user(self, account_enabled=True, display_name='', mail_nickname='', user_principal_name='', password='', force_change_password_next_sign_in=False, extra_fields={}, custom_attributes={}):
|
|
237
|
+
"""
|
|
238
|
+
Create a new user in Azure Entra
|
|
239
|
+
:param account_enabled: Is the account enabled? By default True
|
|
240
|
+
:param display_name: Display name of the user
|
|
241
|
+
:param mail_nickname: Mail nickname of the user (the part before the @)
|
|
242
|
+
:param user_principal_name: User principal name of the user
|
|
243
|
+
:param password: Password of the user. If no password is given, a random password will be generated
|
|
244
|
+
:param force_change_password_next_sign_in: Force the user to change the password on the next sign in. By default False
|
|
245
|
+
:param extra_fields: Extra fields you want to add to the user. Put them in a dictionary
|
|
246
|
+
:param custom_attributes: A dictionary with the name of the custom attribute and the value. It could be multiple custom attributes
|
|
247
|
+
"""
|
|
248
|
+
# Custom attributes are only available in the beta version of the API
|
|
249
|
+
endpoint = 'https://graph.microsoft.com/beta/users' if custom_attributes else 'https://graph.microsoft.com/v1.0/users'
|
|
250
|
+
if password == '':
|
|
251
|
+
password = self.__generate_password()
|
|
252
|
+
|
|
253
|
+
payload = {
|
|
254
|
+
"accountEnabled": account_enabled,
|
|
255
|
+
"displayName": f"{display_name}",
|
|
256
|
+
"mailNickname": f"{mail_nickname}",
|
|
257
|
+
"userPrincipalName": user_principal_name,
|
|
258
|
+
"passwordProfile": {
|
|
259
|
+
"forceChangePasswordNextSignIn": force_change_password_next_sign_in,
|
|
260
|
+
"password": f"{password}"
|
|
261
|
+
},
|
|
262
|
+
}
|
|
263
|
+
payload.update(extra_fields)
|
|
264
|
+
|
|
265
|
+
# If there are any custom attributes, add them to the payload. But since the endpoint needs extra metadata, we need to do some extra work
|
|
266
|
+
if len(custom_attributes) > 0:
|
|
267
|
+
payload = self.__add_attribute_information(payload, custom_attributes)
|
|
268
|
+
response = requests.post(endpoint, headers=self.headers, json=payload)
|
|
269
|
+
return response
|
|
270
|
+
|
|
271
|
+
def update_user(self, user_id, fields_to_update: dict = {}, custom_attributes: dict = {}, update_password: bool = False):
|
|
272
|
+
"""
|
|
273
|
+
Update a user in Azure Entra
|
|
274
|
+
:param user_id: The Azure AD ID of the user
|
|
275
|
+
:param fields_to_update: A dictionary with the fields you want to update. Don't put the custom attributes in this dictionary
|
|
276
|
+
:param custom_attributes: A dictionary with the name of the custom attribute and the value. It could be multiple custom attributes
|
|
277
|
+
:param update_password: If True, the password will be updated with a random value. If False, the password will not be updated
|
|
278
|
+
"""
|
|
279
|
+
endpoint = f'https://graph.microsoft.com/beta/users/{user_id}' if len(custom_attributes) > 0 else f'https://graph.microsoft.com/v1.0/users/{user_id}'
|
|
280
|
+
payload = fields_to_update
|
|
281
|
+
if update_password:
|
|
282
|
+
password = self.__generate_password()
|
|
283
|
+
payload.update({"passwordProfile": {
|
|
284
|
+
"forceChangePasswordNextSignIn": False,
|
|
285
|
+
"password": f"{password}"
|
|
286
|
+
}})
|
|
287
|
+
if len(custom_attributes) > 0:
|
|
288
|
+
payload = self.__add_attribute_information(payload, custom_attributes)
|
|
289
|
+
response = requests.patch(endpoint, headers=self.headers, json=payload)
|
|
290
|
+
return response
|
|
291
|
+
|
|
292
|
+
def delete_user(self, user_id, delete=False):
|
|
293
|
+
"""
|
|
294
|
+
Delete (soft or hard) a user from Azure Entra
|
|
295
|
+
:param user_id: The Azure AD ID of the user
|
|
296
|
+
:param delete: If True, the user will be deleted permanently. If False, the user will be soft deleted
|
|
297
|
+
"""
|
|
298
|
+
endpoint = f"https://graph.microsoft.com/v1.0/users/{user_id}"
|
|
299
|
+
if delete:
|
|
300
|
+
response = requests.delete(endpoint, headers=self.headers)
|
|
301
|
+
else:
|
|
302
|
+
payload = {"accountEnabled": False}
|
|
303
|
+
response = requests.patch(endpoint, headers=self.headers, data=json.dumps(payload))
|
|
304
|
+
return response
|
|
305
|
+
|
|
306
|
+
def assign_user_to_group(self, user_id, group_id):
|
|
307
|
+
"""
|
|
308
|
+
Assign a user to a group
|
|
309
|
+
:param user_id: The Azure AD ID of the user
|
|
310
|
+
:param group_id: The Azure AD ID of the group
|
|
311
|
+
return: response
|
|
312
|
+
"""
|
|
313
|
+
url = f"https://graph.microsoft.com/v1.0/groups/{group_id}/members/$ref"
|
|
314
|
+
data = {"@odata.id": f"https://graph.microsoft.com/v1.0/directoryObjects/{user_id}"}
|
|
315
|
+
response = requests.post(url, headers=self.headers, data=json.dumps(data))
|
|
316
|
+
return response
|
|
317
|
+
|
|
318
|
+
def update_manager(self, user_id, manager_id):
|
|
319
|
+
"""
|
|
320
|
+
Update the manager of a user
|
|
321
|
+
:param user_id: The Azure AD ID of the user
|
|
322
|
+
:param manager_id: The Azure AD ID of the manager
|
|
323
|
+
return: response
|
|
324
|
+
"""
|
|
325
|
+
url = f"https://graph.microsoft.com/v1.0/users/{user_id}/manager/$ref"
|
|
326
|
+
content ={f"@odata.id": f"https://graph.microsoft.com/v1.0/users/{manager_id}"}
|
|
327
|
+
response = requests.put(url, headers=self.headers, data=json.dumps(content))
|
|
328
|
+
return response
|
|
329
|
+
|
|
330
|
+
def remove_user_from_group(self, user_id, group_id):
|
|
331
|
+
"""
|
|
332
|
+
Remove a user from a group
|
|
333
|
+
:param user_id: The Azure AD ID of the user
|
|
334
|
+
:param group_id: The Azure AD ID of the group
|
|
335
|
+
return: response
|
|
336
|
+
"""
|
|
337
|
+
url = f"https://graph.microsoft.com/v1.0/groups/{group_id}/members/{user_id}/$ref"
|
|
338
|
+
response = requests.delete(url, headers=self.headers)
|
|
339
|
+
return response
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
brynq_sdk_azure
|
|
@@ -1 +0,0 @@
|
|
|
1
|
-
|
{brynq_sdk_azure-2.0.1 → brynq_sdk_azure-2.0.2}/brynq_sdk_azure.egg-info/dependency_links.txt
RENAMED
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|