semantic-link-labs 0.7.2__py3-none-any.whl → 0.7.4__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of semantic-link-labs might be problematic. Click here for more details.

Files changed (82) hide show
  1. {semantic_link_labs-0.7.2.dist-info → semantic_link_labs-0.7.4.dist-info}/METADATA +15 -3
  2. semantic_link_labs-0.7.4.dist-info/RECORD +134 -0
  3. {semantic_link_labs-0.7.2.dist-info → semantic_link_labs-0.7.4.dist-info}/WHEEL +1 -1
  4. sempy_labs/__init__.py +120 -24
  5. sempy_labs/_bpa_translation/{_translations_am-ET.po → _model/_translations_am-ET.po} +22 -0
  6. sempy_labs/_bpa_translation/{_translations_ar-AE.po → _model/_translations_ar-AE.po} +24 -0
  7. sempy_labs/_bpa_translation/_model/_translations_bg-BG.po +938 -0
  8. sempy_labs/_bpa_translation/_model/_translations_ca-ES.po +934 -0
  9. sempy_labs/_bpa_translation/{_translations_cs-CZ.po → _model/_translations_cs-CZ.po} +179 -157
  10. sempy_labs/_bpa_translation/{_translations_da-DK.po → _model/_translations_da-DK.po} +24 -0
  11. sempy_labs/_bpa_translation/{_translations_de-DE.po → _model/_translations_de-DE.po} +77 -52
  12. sempy_labs/_bpa_translation/{_translations_el-GR.po → _model/_translations_el-GR.po} +25 -0
  13. sempy_labs/_bpa_translation/{_translations_es-ES.po → _model/_translations_es-ES.po} +67 -43
  14. sempy_labs/_bpa_translation/{_translations_fa-IR.po → _model/_translations_fa-IR.po} +24 -0
  15. sempy_labs/_bpa_translation/_model/_translations_fi-FI.po +915 -0
  16. sempy_labs/_bpa_translation/{_translations_fr-FR.po → _model/_translations_fr-FR.po} +83 -57
  17. sempy_labs/_bpa_translation/{_translations_ga-IE.po → _model/_translations_ga-IE.po} +25 -0
  18. sempy_labs/_bpa_translation/{_translations_he-IL.po → _model/_translations_he-IL.po} +23 -0
  19. sempy_labs/_bpa_translation/{_translations_hi-IN.po → _model/_translations_hi-IN.po} +24 -0
  20. sempy_labs/_bpa_translation/{_translations_hu-HU.po → _model/_translations_hu-HU.po} +25 -0
  21. sempy_labs/_bpa_translation/_model/_translations_id-ID.po +918 -0
  22. sempy_labs/_bpa_translation/{_translations_is-IS.po → _model/_translations_is-IS.po} +25 -0
  23. sempy_labs/_bpa_translation/{_translations_it-IT.po → _model/_translations_it-IT.po} +25 -0
  24. sempy_labs/_bpa_translation/{_translations_ja-JP.po → _model/_translations_ja-JP.po} +21 -0
  25. sempy_labs/_bpa_translation/_model/_translations_ko-KR.po +823 -0
  26. sempy_labs/_bpa_translation/_model/_translations_mt-MT.po +937 -0
  27. sempy_labs/_bpa_translation/{_translations_nl-NL.po → _model/_translations_nl-NL.po} +80 -56
  28. sempy_labs/_bpa_translation/{_translations_pl-PL.po → _model/_translations_pl-PL.po} +101 -76
  29. sempy_labs/_bpa_translation/{_translations_pt-BR.po → _model/_translations_pt-BR.po} +25 -0
  30. sempy_labs/_bpa_translation/{_translations_pt-PT.po → _model/_translations_pt-PT.po} +25 -0
  31. sempy_labs/_bpa_translation/_model/_translations_ro-RO.po +939 -0
  32. sempy_labs/_bpa_translation/{_translations_ru-RU.po → _model/_translations_ru-RU.po} +25 -0
  33. sempy_labs/_bpa_translation/_model/_translations_sk-SK.po +925 -0
  34. sempy_labs/_bpa_translation/_model/_translations_sl-SL.po +922 -0
  35. sempy_labs/_bpa_translation/_model/_translations_sv-SE.po +914 -0
  36. sempy_labs/_bpa_translation/{_translations_ta-IN.po → _model/_translations_ta-IN.po} +26 -0
  37. sempy_labs/_bpa_translation/{_translations_te-IN.po → _model/_translations_te-IN.po} +24 -0
  38. sempy_labs/_bpa_translation/{_translations_th-TH.po → _model/_translations_th-TH.po} +24 -0
  39. sempy_labs/_bpa_translation/_model/_translations_tr-TR.po +925 -0
  40. sempy_labs/_bpa_translation/_model/_translations_uk-UA.po +933 -0
  41. sempy_labs/_bpa_translation/{_translations_zh-CN.po → _model/_translations_zh-CN.po} +116 -97
  42. sempy_labs/_bpa_translation/{_translations_zu-ZA.po → _model/_translations_zu-ZA.po} +25 -0
  43. sempy_labs/_capacities.py +541 -0
  44. sempy_labs/_clear_cache.py +298 -3
  45. sempy_labs/_connections.py +138 -0
  46. sempy_labs/_dataflows.py +130 -0
  47. sempy_labs/_deployment_pipelines.py +171 -0
  48. sempy_labs/_environments.py +156 -0
  49. sempy_labs/_generate_semantic_model.py +148 -27
  50. sempy_labs/_git.py +380 -0
  51. sempy_labs/_helper_functions.py +203 -8
  52. sempy_labs/_icons.py +43 -0
  53. sempy_labs/_list_functions.py +170 -1012
  54. sempy_labs/_model_bpa.py +90 -112
  55. sempy_labs/_model_bpa_bulk.py +3 -1
  56. sempy_labs/_model_bpa_rules.py +788 -800
  57. sempy_labs/_notebooks.py +143 -0
  58. sempy_labs/_query_scale_out.py +28 -7
  59. sempy_labs/_spark.py +465 -0
  60. sempy_labs/_sql.py +120 -0
  61. sempy_labs/_translations.py +3 -1
  62. sempy_labs/_vertipaq.py +160 -99
  63. sempy_labs/_workspace_identity.py +66 -0
  64. sempy_labs/_workspaces.py +294 -0
  65. sempy_labs/directlake/__init__.py +2 -0
  66. sempy_labs/directlake/_directlake_schema_compare.py +1 -2
  67. sempy_labs/directlake/_directlake_schema_sync.py +1 -2
  68. sempy_labs/directlake/_dl_helper.py +4 -7
  69. sempy_labs/directlake/_generate_shared_expression.py +85 -0
  70. sempy_labs/directlake/_show_unsupported_directlake_objects.py +1 -2
  71. sempy_labs/lakehouse/_get_lakehouse_tables.py +7 -3
  72. sempy_labs/migration/_migrate_calctables_to_lakehouse.py +5 -0
  73. sempy_labs/migration/_migrate_calctables_to_semantic_model.py +5 -0
  74. sempy_labs/migration/_migrate_model_objects_to_semantic_model.py +6 -2
  75. sempy_labs/migration/_migrate_tables_columns_to_semantic_model.py +6 -5
  76. sempy_labs/migration/_migration_validation.py +6 -0
  77. sempy_labs/report/_report_functions.py +21 -42
  78. sempy_labs/report/_report_rebind.py +5 -0
  79. sempy_labs/tom/_model.py +95 -52
  80. semantic_link_labs-0.7.2.dist-info/RECORD +0 -111
  81. {semantic_link_labs-0.7.2.dist-info → semantic_link_labs-0.7.4.dist-info}/LICENSE +0 -0
  82. {semantic_link_labs-0.7.2.dist-info → semantic_link_labs-0.7.4.dist-info}/top_level.txt +0 -0
@@ -1,7 +1,14 @@
1
1
  import sempy.fabric as fabric
2
- from ._helper_functions import resolve_dataset_id, is_default_semantic_model
2
+ from sempy_labs._helper_functions import (
3
+ resolve_dataset_id,
4
+ is_default_semantic_model,
5
+ get_adls_client,
6
+ )
3
7
  from typing import Optional
4
8
  import sempy_labs._icons as icons
9
+ from sempy._utils._log import log
10
+ import pandas as pd
11
+ from sempy.fabric.exceptions import FabricHTTPException
5
12
 
6
13
 
7
14
  def clear_cache(dataset: str, workspace: Optional[str] = None):
@@ -36,7 +43,295 @@ def clear_cache(dataset: str, workspace: Optional[str] = None):
36
43
  </ClearCache>
37
44
  """
38
45
  fabric.execute_xmla(dataset=dataset, xmla_command=xmla, workspace=workspace)
46
+ print(
47
+ f"{icons.green_dot} Cache cleared for the '{dataset}' semantic model within the '{workspace}' workspace."
48
+ )
39
49
 
40
- outputtext = f"{icons.green_dot} Cache cleared for the '{dataset}' semantic model within the '{workspace}' workspace."
41
50
 
42
- return outputtext
51
+ @log
52
+ def backup_semantic_model(
53
+ dataset: str,
54
+ file_path: str,
55
+ allow_overwrite: Optional[bool] = True,
56
+ apply_compression: Optional[bool] = True,
57
+ workspace: Optional[str] = None,
58
+ ):
59
+ """
60
+ `Backs up <https://learn.microsoft.com/azure/analysis-services/analysis-services-backup>`_ a semantic model to the ADLS Gen2 storage account connected to the workspace.
61
+
62
+ Parameters
63
+ ----------
64
+ dataset : str
65
+ Name of the semantic model.
66
+ file_path : str
67
+ The ADLS Gen2 storage account location in which to backup the semantic model. Always saves within the 'power-bi-backup/<workspace name>' folder.
68
+ Must end in '.abf'.
69
+ Example 1: file_path = 'MyModel.abf'
70
+ Example 2: file_path = 'MyFolder/MyModel.abf'
71
+ allow_overwrite : bool, default=True
72
+ If True, overwrites backup files of the same name. If False, the file you are saving cannot have the same name as a file that already exists in the same location.
73
+ apply_compression : bool, default=True
74
+ If True, compresses the backup file. Compressed backup files save disk space, but require slightly higher CPU utilization.
75
+ workspace : str, default=None
76
+ The Fabric workspace name.
77
+ Defaults to None which resolves to the workspace of the attached lakehouse
78
+ or if no lakehouse attached, resolves to the workspace of the notebook.
79
+ """
80
+
81
+ if not file_path.endswith(".abf"):
82
+ raise ValueError(
83
+ f"{icons.red_dot} The backup file for restoring must be in the .abf format."
84
+ )
85
+
86
+ workspace = fabric.resolve_workspace_name(workspace)
87
+
88
+ tmsl = {
89
+ "backup": {
90
+ "database": dataset,
91
+ "file": file_path,
92
+ "allowOverwrite": allow_overwrite,
93
+ "applyCompression": apply_compression,
94
+ }
95
+ }
96
+
97
+ fabric.execute_tmsl(script=tmsl, workspace=workspace)
98
+ print(
99
+ f"{icons.green_dot} The '{dataset}' semantic model within the '{workspace}' workspace has been backed up to the '{file_path}' location."
100
+ )
101
+
102
+
103
+ @log
104
+ def restore_semantic_model(
105
+ dataset: str,
106
+ file_path: str,
107
+ allow_overwrite: Optional[bool] = True,
108
+ ignore_incompatibilities: Optional[bool] = True,
109
+ force_restore: Optional[bool] = False,
110
+ workspace: Optional[str] = None,
111
+ ):
112
+ """
113
+ `Restores <https://learn.microsoft.com/power-bi/enterprise/service-premium-backup-restore-dataset>`_ a semantic model based on a backup (.abf) file
114
+ within the ADLS Gen2 storage account connected to the workspace.
115
+
116
+ Parameters
117
+ ----------
118
+ dataset : str
119
+ Name of the semantic model.
120
+ file_path : str
121
+ The location in which to backup the semantic model. Must end in '.abf'.
122
+ Example 1: file_path = 'MyModel.abf'
123
+ Example 2: file_path = 'MyFolder/MyModel.abf'
124
+ allow_overwrite : bool, default=True
125
+ If True, overwrites backup files of the same name. If False, the file you are saving cannot have the same name as a file that already exists in the same location.
126
+ ignore_incompatibilities : bool, default=True
127
+ If True, ignores incompatibilities between Azure Analysis Services and Power BI Premium.
128
+ force_restore: bool, default=False
129
+ If True, restores the semantic model with the existing semantic model unloaded and offline.
130
+ workspace : str, default=None
131
+ The Fabric workspace name.
132
+ Defaults to None which resolves to the workspace of the attached lakehouse
133
+ or if no lakehouse attached, resolves to the workspace of the notebook.
134
+ """
135
+ # https://learn.microsoft.com/en-us/power-bi/enterprise/service-premium-backup-restore-dataset
136
+
137
+ if not file_path.endswith(".abf"):
138
+ raise ValueError(
139
+ f"{icons.red_dot} The backup file for restoring must be in the .abf format."
140
+ )
141
+
142
+ workspace = fabric.resolve_workspace_name(workspace)
143
+
144
+ tmsl = {
145
+ "restore": {
146
+ "database": dataset,
147
+ "file": file_path,
148
+ "allowOverwrite": allow_overwrite,
149
+ "security": "copyAll",
150
+ "ignoreIncompatibilities": ignore_incompatibilities,
151
+ }
152
+ }
153
+
154
+ if force_restore:
155
+ tmsl["restore"]["forceRestore"] = force_restore
156
+
157
+ fabric.execute_tmsl(script=tmsl, workspace=workspace)
158
+
159
+ print(
160
+ f"{icons.green_dot} The '{dataset}' semantic model has been restored to the '{workspace}' workspace based on teh '{file_path}' backup file."
161
+ )
162
+
163
+
164
+ @log
165
+ def copy_semantic_model_backup_file(
166
+ source_workspace: str,
167
+ target_workspace: str,
168
+ source_file_name: str,
169
+ target_file_name: str,
170
+ storage_account: str,
171
+ source_file_system: Optional[str] = "power-bi-backup",
172
+ target_file_system: Optional[str] = "power-bi-backup",
173
+ ):
174
+ """
175
+ Copies a semantic model backup file (.abf) from an Azure storage account to another location within the Azure storage account.
176
+
177
+ Requirements:
178
+ 1. Must have an Azure storage account and connect it to both the source and target workspace.
179
+ 2. Must be a 'Storage Blob Data Contributor' for the storage account.
180
+ Steps:
181
+ 1. Navigate to the storage account within the Azure Portal
182
+ 2. Navigate to 'Access Control (IAM)'
183
+ 3. Click '+ Add' -> Add Role Assignment
184
+ 4. Search for 'Storage Blob Data Contributor', select it and click 'Next'
185
+ 5. Add yourself as a member, click 'Next'
186
+ 6. Click 'Review + assign'
187
+
188
+ Parameters
189
+ ----------
190
+ source_workspace : str
191
+ The workspace name of the source semantic model backup file.
192
+ target_workspace : str
193
+ The workspace name of the target semantic model backup file destination.
194
+ source_file_name : str
195
+ The name of the source backup file (i.e. MyModel.abf).
196
+ target_file_name : str
197
+ The name of the target backup file (i.e. MyModel.abf).
198
+ storage_account : str
199
+ The name of the storage account.
200
+ source_file_system : str, default="power-bi-backup"
201
+ The container in which the source backup file is located.
202
+ target_file_system : str, default="power-bi-backup"
203
+ The container in which the target backup file will be saved.
204
+ """
205
+
206
+ suffix = ".abf"
207
+
208
+ if not source_file_name.endswith(suffix):
209
+ source_file_name = f"{source_file_name}{suffix}"
210
+ if not target_file_name.endswith(suffix):
211
+ target_file_name = f"{target_file_name}{suffix}"
212
+
213
+ source_path = f"/{source_workspace}/{source_file_name}"
214
+ target_path = f"/{target_workspace}/{target_file_name}"
215
+
216
+ client = get_adls_client(account_name=storage_account)
217
+
218
+ source_file_system_client = client.get_file_system_client(
219
+ file_system=source_file_system
220
+ )
221
+ destination_file_system_client = client.get_file_system_client(
222
+ file_system=target_file_system
223
+ )
224
+
225
+ source_file_client = source_file_system_client.get_file_client(source_path)
226
+ destination_file_client = destination_file_system_client.get_file_client(
227
+ target_path
228
+ )
229
+
230
+ download = source_file_client.download_file()
231
+ file_content = download.readall()
232
+
233
+ # Upload the content to the destination file
234
+ destination_file_client.create_file() # Create the destination file
235
+ destination_file_client.append_data(
236
+ data=file_content, offset=0, length=len(file_content)
237
+ )
238
+ destination_file_client.flush_data(len(file_content))
239
+
240
+ print(
241
+ f"{icons.green_dot} The backup file of the '{source_file_name}' semantic model from the '{source_workspace}' workspace has been copied as the '{target_file_name}' semantic model backup file within the '{target_workspace}'."
242
+ )
243
+
244
+
245
+ @log
246
+ def list_backups(workspace: Optional[str] = None) -> pd.DataFrame:
247
+ """
248
+ Shows a list of backup files contained within a workspace's ADLS Gen2 storage account.
249
+ Requirement: An ADLS Gen2 storage account must be `connected to the workspace <https://learn.microsoft.com/power-bi/transform-model/dataflows/dataflows-azure-data-lake-storage-integration#connect-to-an-azure-data-lake-gen-2-at-a-workspace-level>`_.
250
+
251
+ Parameters
252
+ ----------
253
+ workspace : str, default=None
254
+ The Fabric workspace name.
255
+ Defaults to None which resolves to the workspace of the attached lakehouse
256
+ or if no lakehouse attached, resolves to the workspace of the notebook.
257
+
258
+ Returns
259
+ -------
260
+ pandas.DataFrame
261
+ A pandas dataframe showing a list of backup files contained within a workspace's ADLS Gen2 storage account.
262
+ """
263
+
264
+ client = fabric.PowerBIRestClient()
265
+ workspace = fabric.resolve_workspace_name(workspace)
266
+ workspace_id = fabric.resolve_workspace_id(workspace)
267
+ response = client.get(
268
+ f"/v1.0/myorg/resources?resourceType=StorageAccount&folderObjectId={workspace_id}"
269
+ )
270
+
271
+ if response.status_code != 200:
272
+ raise FabricHTTPException(response)
273
+
274
+ v = response.json().get("value", [])
275
+ if not v:
276
+ raise ValueError(
277
+ f"{icons.red_dot} A storage account is not associated with the '{workspace}' workspace."
278
+ )
279
+ storage_account = v[0]["resourceName"]
280
+
281
+ df = list_storage_account_files(storage_account=storage_account)
282
+ colName = "Storage Account Name"
283
+ df.insert(0, colName, df.pop(colName))
284
+
285
+ return df
286
+
287
+
288
+ @log
289
+ def list_storage_account_files(
290
+ storage_account: str, container: Optional[str] = "power-bi-backup"
291
+ ) -> pd.DataFrame:
292
+ """
293
+ Shows a list of files within an ADLS Gen2 storage account.
294
+
295
+ Parameters
296
+ ----------
297
+ storage_account: str
298
+ The name of the ADLS Gen2 storage account.
299
+ container : str, default='power-bi-backup'
300
+ The name of the container.
301
+
302
+ Returns
303
+ -------
304
+ pandas.DataFrame
305
+ A pandas dataframe showing a list of files contained within an ADLS Gen2 storage account.
306
+ """
307
+
308
+ df = pd.DataFrame(
309
+ columns=[
310
+ "File Path",
311
+ "File Size",
312
+ "Creation Time",
313
+ "Last Modified",
314
+ "Expiry Time",
315
+ "Encryption Scope",
316
+ ]
317
+ )
318
+
319
+ onelake = get_adls_client(storage_account)
320
+ fs = onelake.get_file_system_client(container)
321
+
322
+ for x in list(fs.get_paths()):
323
+ if not x.is_directory:
324
+ new_data = {
325
+ "File Path": x.name,
326
+ "File Size": x.content_length,
327
+ "Creation Time": x.creation_time,
328
+ "Last Modified": x.last_modified,
329
+ "Expiry Time": x.expiry_time,
330
+ "Encryption Scope": x.encryption_scope,
331
+ }
332
+
333
+ df = pd.concat([df, pd.DataFrame(new_data, index=[0])], ignore_index=True)
334
+
335
+ df["File Size"] = df["File Size"].astype(int)
336
+
337
+ return df
@@ -1,6 +1,144 @@
1
1
  import sempy.fabric as fabric
2
2
  import pandas as pd
3
3
  from sempy.fabric.exceptions import FabricHTTPException
4
+ from typing import Optional
5
+ import sempy_labs._icons as icons
6
+ from sempy_labs._helper_functions import pagination
7
+
8
+
9
+ def list_connections() -> pd.DataFrame:
10
+ """
11
+ Lists all available connections.
12
+
13
+ Returns
14
+ -------
15
+ pandas.DataFrame
16
+ A pandas dataframe showing all available connections.
17
+ """
18
+
19
+ client = fabric.FabricRestClient()
20
+ response = client.get("/v1/connections")
21
+
22
+ if response.status_code != 200:
23
+ raise FabricHTTPException(response)
24
+
25
+ df = pd.DataFrame(
26
+ columns=[
27
+ "Connection Id",
28
+ "Connection Name",
29
+ "Gateway Id",
30
+ "Connectivity Type",
31
+ "Connection Path",
32
+ "Connection Type",
33
+ "Privacy Level",
34
+ "Credential Type",
35
+ "Single Sign on Type",
36
+ "Connection Encyrption",
37
+ "Skip Test Connection",
38
+ ]
39
+ )
40
+
41
+ for i in response.json().get("value", []):
42
+ connection_details = i.get("connectionDetails", {})
43
+ credential_details = i.get("credentialDetails", {})
44
+
45
+ new_data = {
46
+ "Connection Id": i.get("id"),
47
+ "Connection Name": i.get("displayName"),
48
+ "Gateway Id": i.get("gatewayId"),
49
+ "Connectivity Type": i.get("connectivityType"),
50
+ "Connection Path": connection_details.get("path"),
51
+ "Connection Type": connection_details.get("type"),
52
+ "Privacy Level": i.get("privacyLevel"),
53
+ "Credential Type": (
54
+ credential_details.get("credentialType") if credential_details else None
55
+ ),
56
+ "Single Sign On Type": (
57
+ credential_details.get("singleSignOnType")
58
+ if credential_details
59
+ else None
60
+ ),
61
+ "Connection Encryption": (
62
+ credential_details.get("connectionEncryption")
63
+ if credential_details
64
+ else None
65
+ ),
66
+ "Skip Test Connection": (
67
+ credential_details.get("skipTestConnection")
68
+ if credential_details
69
+ else None
70
+ ),
71
+ }
72
+
73
+ df = pd.concat([df, pd.DataFrame(new_data, index=[0])], ignore_index=True)
74
+ bool_cols = ["Skip Test Connection"]
75
+ df[bool_cols] = df[bool_cols].astype(bool)
76
+
77
+ return df
78
+
79
+
80
+ def list_item_connections(item_name: str, item_type: str, workspace: Optional[str] = None) -> pd.DataFrame:
81
+
82
+ """
83
+ Shows the list of connections that the specified item is connected to.
84
+
85
+ Parameters
86
+ ----------
87
+ item_name : str
88
+ The item name.
89
+ item_type : str
90
+ The `item type <https://learn.microsoft.com/rest/api/fabric/core/items/update-item?tabs=HTTP#itemtype>`_.
91
+ workspace : str, default=None
92
+ The Fabric workspace name.
93
+ Defaults to None which resolves to the workspace of the attached lakehouse
94
+ or if no lakehouse attached, resolves to the workspace of the notebook.
95
+
96
+ Returns
97
+ -------
98
+ pandas.DataFrame
99
+ A pandas dataframe showing the list of connections that the specified item is connected to.
100
+ """
101
+
102
+ # https://learn.microsoft.com/en-us/rest/api/fabric/core/items/list-item-connections?tabs=HTTP
103
+
104
+ workspace = fabric.resolve_workspace_name(workspace)
105
+ workspace_id = fabric.resolve_workspace_id(workspace)
106
+ item_type = item_type[0].upper() + item_type[1:]
107
+ item_id = fabric.resolve_item_id(item_name=item_name, type=item_type, workspace=workspace)
108
+
109
+ client = fabric.FabricRestClient()
110
+ response = client.post(f"/v1/workspaces/{workspace_id}/items/{item_id}/connections")
111
+
112
+ df = pd.DataFrame(
113
+ columns=[
114
+ "Connection Name",
115
+ "Connection Id",
116
+ "Connectivity Type",
117
+ "Connection Type",
118
+ "Connection Path",
119
+ "Gateway Id",
120
+ ]
121
+ )
122
+
123
+ if response.status_code != 200:
124
+ raise FabricHTTPException(response)
125
+
126
+ respnoses = pagination(client, response)
127
+
128
+ for r in respnoses:
129
+ for v in r.get('value', []):
130
+ new_data = {
131
+ "Connection Name": v.get('displayName'),
132
+ "Connection Id": v.get('id'),
133
+ "Connectivity Type": v.get('connectivityType'),
134
+ "Connection Type": v.get('connectionDetails', {}).get('type'),
135
+ "Connection Path": v.get('connectionDetails', {}).get('path'),
136
+ "Gateway Id": v.get('gatewayId'),
137
+ }
138
+
139
+ df = pd.concat([df, pd.DataFrame(new_data, index=[0])], ignore_index=True)
140
+
141
+ return df
4
142
 
5
143
 
6
144
  def create_connection_cloud(
@@ -0,0 +1,130 @@
1
+ import sempy.fabric as fabric
2
+ import pandas as pd
3
+ from sempy_labs._helper_functions import (
4
+ resolve_workspace_name_and_id,
5
+ )
6
+ from typing import Optional
7
+ import sempy_labs._icons as icons
8
+ from sempy.fabric.exceptions import FabricHTTPException
9
+
10
+
11
+ def list_dataflows(workspace: Optional[str] = None):
12
+ """
13
+ Shows a list of all dataflows which exist within a workspace.
14
+
15
+ Parameters
16
+ ----------
17
+ workspace : str, default=None
18
+ The Fabric workspace name.
19
+ Defaults to None which resolves to the workspace of the attached lakehouse
20
+ or if no lakehouse attached, resolves to the workspace of the notebook.
21
+
22
+ Returns
23
+ -------
24
+ pandas.DataFrame
25
+ A pandas dataframe showing the dataflows which exist within a workspace.
26
+ """
27
+
28
+ (workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
29
+ client = fabric.PowerBIRestClient()
30
+ response = client.get(f"/v1.0/myorg/groups/{workspace_id}/dataflows")
31
+ if response.status_code != 200:
32
+ raise FabricHTTPException(response)
33
+
34
+ df = pd.DataFrame(
35
+ columns=["Dataflow Id", "Dataflow Name", "Configured By", "Users", "Generation"]
36
+ )
37
+
38
+ for v in response.json().get("value", []):
39
+ new_data = {
40
+ "Dataflow Id": v.get("objectId"),
41
+ "Dataflow Name": v.get("name"),
42
+ "Configured By": v.get("configuredBy"),
43
+ "Users": [v.get("users")],
44
+ "Generation": v.get("generation"),
45
+ }
46
+ df = pd.concat(
47
+ [df, pd.DataFrame(new_data, index=[0])],
48
+ ignore_index=True,
49
+ )
50
+
51
+ df["Generation"] = df["Generation"].astype(int)
52
+
53
+ return df
54
+
55
+
56
+ def assign_workspace_to_dataflow_storage(
57
+ dataflow_storage_account: str, workspace: Optional[str] = None
58
+ ):
59
+ """
60
+ Assigns a dataflow storage account to a workspace.
61
+
62
+ Parameters
63
+ ----------
64
+ dataflow_storage_account : str
65
+ The name of the dataflow storage account.
66
+ workspace : str, default=None
67
+ The name of the workspace.
68
+ Defaults to None which resolves to the workspace of the attached lakehouse
69
+ or if no lakehouse attached, resolves to the workspace of the notebook.
70
+ """
71
+
72
+ (workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
73
+
74
+ df = list_dataflow_storage_accounts()
75
+ df_filt = df[df["Dataflow Storage Account Name"] == dataflow_storage_account]
76
+
77
+ if len(df_filt) == 0:
78
+ raise ValueError(
79
+ f"{icons.red_dot} The '{dataflow_storage_account}' does not exist."
80
+ )
81
+
82
+ dataflow_storage_id = df_filt["Dataflow Storage Account ID"].iloc[0]
83
+ client = fabric.PowerBIRestClient()
84
+
85
+ request_body = {"dataflowStorageId": dataflow_storage_id}
86
+
87
+ response = client.post(
88
+ f"/v1.0/myorg/groups/{workspace_id}/AssignToDataflowStorage", json=request_body
89
+ )
90
+
91
+ if response.status_code != 200:
92
+ raise FabricHTTPException(response)
93
+ print(
94
+ f"{icons.green_dot} The '{dataflow_storage_account}' dataflow storage account has been assigned to the '{workspace}' workspacce."
95
+ )
96
+
97
+
98
+ def list_dataflow_storage_accounts() -> pd.DataFrame:
99
+ """
100
+ Shows the accessible dataflow storage accounts.
101
+
102
+ Returns
103
+ -------
104
+ pandas.DataFrame
105
+ A pandas dataframe showing the accessible dataflow storage accounts.
106
+ """
107
+
108
+ df = pd.DataFrame(
109
+ columns=[
110
+ "Dataflow Storage Account ID",
111
+ "Dataflow Storage Account Name",
112
+ "Enabled",
113
+ ]
114
+ )
115
+ client = fabric.PowerBIRestClient()
116
+ response = client.get("/v1.0/myorg/dataflowStorageAccounts")
117
+ if response.status_code != 200:
118
+ raise FabricHTTPException(response)
119
+
120
+ for v in response.json().get("value", []):
121
+ new_data = {
122
+ "Dataflow Storage Account ID": v.get("id"),
123
+ "Dataflow Storage Account Name": v.get("name"),
124
+ "Enabled": v.get("isEnabled"),
125
+ }
126
+ df = pd.concat([df, pd.DataFrame(new_data, index=[0])], ignore_index=True)
127
+
128
+ df["Enabled"] = df["Enabled"].astype(bool)
129
+
130
+ return df