semantic-link-labs 0.9.4__py3-none-any.whl → 0.9.6__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of semantic-link-labs might be problematic. Click here for more details.

Files changed (71) hide show
  1. {semantic_link_labs-0.9.4.dist-info → semantic_link_labs-0.9.6.dist-info}/METADATA +19 -2
  2. {semantic_link_labs-0.9.4.dist-info → semantic_link_labs-0.9.6.dist-info}/RECORD +71 -64
  3. {semantic_link_labs-0.9.4.dist-info → semantic_link_labs-0.9.6.dist-info}/WHEEL +1 -1
  4. sempy_labs/__init__.py +33 -4
  5. sempy_labs/_capacities.py +59 -128
  6. sempy_labs/_capacity_migration.py +19 -21
  7. sempy_labs/_connections.py +2 -4
  8. sempy_labs/_dashboards.py +60 -0
  9. sempy_labs/_data_pipelines.py +5 -31
  10. sempy_labs/_dataflows.py +2 -2
  11. sempy_labs/_dax_query_view.py +55 -0
  12. sempy_labs/_delta_analyzer.py +16 -14
  13. sempy_labs/_environments.py +28 -49
  14. sempy_labs/_eventhouses.py +27 -53
  15. sempy_labs/_eventstreams.py +16 -34
  16. sempy_labs/_external_data_shares.py +4 -10
  17. sempy_labs/_gateways.py +4 -4
  18. sempy_labs/_generate_semantic_model.py +2 -2
  19. sempy_labs/_git.py +90 -1
  20. sempy_labs/_graphQL.py +8 -21
  21. sempy_labs/_helper_functions.py +440 -91
  22. sempy_labs/_kql_databases.py +24 -35
  23. sempy_labs/_kql_querysets.py +15 -32
  24. sempy_labs/_list_functions.py +17 -192
  25. sempy_labs/_managed_private_endpoints.py +9 -2
  26. sempy_labs/_mirrored_databases.py +17 -49
  27. sempy_labs/_ml_experiments.py +6 -31
  28. sempy_labs/_ml_models.py +4 -28
  29. sempy_labs/_model_bpa.py +4 -11
  30. sempy_labs/_model_bpa_bulk.py +23 -27
  31. sempy_labs/_mounted_data_factories.py +119 -0
  32. sempy_labs/_notebooks.py +16 -26
  33. sempy_labs/_one_lake_integration.py +2 -1
  34. sempy_labs/_semantic_models.py +20 -0
  35. sempy_labs/_sql.py +13 -8
  36. sempy_labs/_sqldatabase.py +61 -100
  37. sempy_labs/_utils.py +42 -0
  38. sempy_labs/_vertipaq.py +25 -13
  39. sempy_labs/_warehouses.py +19 -20
  40. sempy_labs/_workloads.py +23 -9
  41. sempy_labs/_workspace_identity.py +6 -0
  42. sempy_labs/_workspaces.py +55 -7
  43. sempy_labs/admin/__init__.py +21 -1
  44. sempy_labs/admin/_apps.py +1 -1
  45. sempy_labs/admin/_artifacts.py +62 -0
  46. sempy_labs/admin/_basic_functions.py +3 -54
  47. sempy_labs/admin/_capacities.py +61 -0
  48. sempy_labs/admin/_reports.py +74 -0
  49. sempy_labs/admin/_scanner.py +2 -2
  50. sempy_labs/admin/_shared.py +4 -2
  51. sempy_labs/admin/_users.py +133 -0
  52. sempy_labs/admin/_workspaces.py +148 -0
  53. sempy_labs/directlake/_directlake_schema_compare.py +2 -1
  54. sempy_labs/directlake/_directlake_schema_sync.py +65 -19
  55. sempy_labs/directlake/_dl_helper.py +0 -6
  56. sempy_labs/directlake/_generate_shared_expression.py +10 -11
  57. sempy_labs/directlake/_guardrails.py +2 -1
  58. sempy_labs/directlake/_update_directlake_model_lakehouse_connection.py +12 -25
  59. sempy_labs/directlake/_update_directlake_partition_entity.py +11 -3
  60. sempy_labs/lakehouse/__init__.py +2 -0
  61. sempy_labs/lakehouse/_lakehouse.py +6 -7
  62. sempy_labs/lakehouse/_shortcuts.py +198 -57
  63. sempy_labs/migration/_migration_validation.py +0 -4
  64. sempy_labs/report/_download_report.py +4 -6
  65. sempy_labs/report/_generate_report.py +15 -23
  66. sempy_labs/report/_report_bpa.py +12 -19
  67. sempy_labs/report/_report_functions.py +2 -1
  68. sempy_labs/report/_report_rebind.py +8 -6
  69. sempy_labs/tom/_model.py +34 -16
  70. {semantic_link_labs-0.9.4.dist-info → semantic_link_labs-0.9.6.dist-info}/LICENSE +0 -0
  71. {semantic_link_labs-0.9.4.dist-info → semantic_link_labs-0.9.6.dist-info}/top_level.txt +0 -0
sempy_labs/_capacities.py CHANGED
@@ -242,7 +242,7 @@ def list_vcores() -> pd.DataFrame:
242
242
 
243
243
  def get_capacity_resource_governance(capacity_name: str):
244
244
 
245
- dfC = fabric.list_capacities()
245
+ dfC = list_capacities()
246
246
  dfC_filt = dfC[dfC["Display Name"] == capacity_name]
247
247
  capacity_id = dfC_filt["Id"].iloc[0].upper()
248
248
 
@@ -256,7 +256,6 @@ def suspend_fabric_capacity(
256
256
  capacity_name: str,
257
257
  azure_subscription_id: str,
258
258
  resource_group: str,
259
- **kwargs,
260
259
  ):
261
260
  """
262
261
  This function suspends a Fabric capacity.
@@ -275,26 +274,9 @@ def suspend_fabric_capacity(
275
274
  The name of the Azure resource group.
276
275
  """
277
276
 
278
- token_provider = auth.token_provider.get()
279
- if token_provider is None:
280
- token_provider = ServicePrincipalTokenProvider.from_azure_key_vault(
281
- key_vault_uri=kwargs["key_vault_uri"],
282
- key_vault_tenant_id=kwargs["key_vault_tenant_id"],
283
- key_vault_client_id=kwargs["key_vault_client_id"],
284
- key_vault_client_secret=kwargs["key_vault_client_secret"],
285
- )
286
- print(
287
- f"{icons.info} Please use the 'token_provider' parameter instead of the key vault parameters within this function as the key vault parameters have been deprecated."
288
- )
289
-
290
- headers = _get_headers(token_provider, audience="azure")
291
-
292
277
  url = f"https://management.azure.com/subscriptions/{azure_subscription_id}/resourceGroups/{resource_group}/providers/Microsoft.Fabric/capacities/{capacity_name}/suspend?api-version={icons.azure_api_version}"
293
278
 
294
- response = requests.post(url, headers=headers)
295
-
296
- if response.status_code != 202:
297
- raise FabricHTTPException(response)
279
+ _base_api(request=url, client="azure", method="post", status_codes=202)
298
280
 
299
281
  print(f"{icons.green_dot} The '{capacity_name}' capacity has been suspended.")
300
282
 
@@ -304,7 +286,6 @@ def resume_fabric_capacity(
304
286
  capacity_name: str,
305
287
  azure_subscription_id: str,
306
288
  resource_group: str,
307
- **kwargs,
308
289
  ):
309
290
  """
310
291
  This function resumes a Fabric capacity.
@@ -323,26 +304,9 @@ def resume_fabric_capacity(
323
304
  The name of the Azure resource group.
324
305
  """
325
306
 
326
- token_provider = auth.token_provider.get()
327
- if token_provider is None:
328
- token_provider = ServicePrincipalTokenProvider.from_azure_key_vault(
329
- key_vault_uri=kwargs["key_vault_uri"],
330
- key_vault_tenant_id=kwargs["key_vault_tenant_id"],
331
- key_vault_client_id=kwargs["key_vault_client_id"],
332
- key_vault_client_secret=kwargs["key_vault_client_secret"],
333
- )
334
- print(
335
- f"{icons.info} Please use the 'token_provider' parameter instead of the key vault parameters within this function as the key vault parameters have been deprecated."
336
- )
337
-
338
- headers = _get_headers(token_provider, audience="azure")
339
-
340
307
  url = f"https://management.azure.com/subscriptions/{azure_subscription_id}/resourceGroups/{resource_group}/providers/Microsoft.Fabric/capacities/{capacity_name}/resume?api-version={icons.azure_api_version}"
341
308
 
342
- response = requests.post(url, headers=headers)
343
-
344
- if response.status_code != 202:
345
- raise FabricHTTPException(response)
309
+ _base_api(request=url, client="azure", method="post", status_codes=202)
346
310
 
347
311
  print(f"{icons.green_dot} The '{capacity_name}' capacity has been resumed.")
348
312
 
@@ -352,7 +316,6 @@ def delete_embedded_capacity(
352
316
  capacity_name: str,
353
317
  azure_subscription_id: str,
354
318
  resource_group: str,
355
- **kwargs,
356
319
  ):
357
320
  """
358
321
  This function deletes a Power BI Embedded capacity.
@@ -369,53 +332,36 @@ def delete_embedded_capacity(
369
332
  The name of the Azure resource group.
370
333
  """
371
334
 
372
- token_provider = auth.token_provider.get()
373
- if token_provider is None:
374
- token_provider = ServicePrincipalTokenProvider.from_azure_key_vault(
375
- key_vault_uri=kwargs["key_vault_uri"],
376
- key_vault_tenant_id=kwargs["key_vault_tenant_id"],
377
- key_vault_client_id=kwargs["key_vault_client_id"],
378
- key_vault_client_secret=kwargs["key_vault_client_secret"],
379
- )
380
- print(
381
- f"{icons.info} Please use the 'token_provider' parameter instead of the key vault parameters within this function as the key vault parameters have been deprecated."
382
- )
383
-
384
- headers = _get_headers(token_provider, audience="azure")
385
-
386
335
  url = f"https://management.azure.com/subscriptions/{azure_subscription_id}/resourceGroups/{resource_group}/providers/Microsoft.PowerBIDedicated/capacities/{capacity_name}?api-version={icons.azure_api_version}"
387
336
 
388
- response = requests.delete(url, headers=headers)
389
-
390
- if response.status_code not in [200, 202]:
391
- raise FabricHTTPException(response)
337
+ _base_api(request=url, client="azure", method="delete", status_codes=[200, 202])
392
338
 
393
339
  print(f"{icons.green_dot} The '{capacity_name}' capacity has been deleted.")
394
340
 
395
341
 
396
342
  @log
397
- def delete_premium_capacity(capacity_name: str):
343
+ def delete_premium_capacity(capacity: str | UUID, **kwargs):
398
344
  """
399
345
  This function deletes a Power BI Premium capacity.
400
346
 
401
347
  Parameters
402
348
  ----------
403
- capacity_name : str
404
- Name of the Fabric capacity.
349
+ capacity : str | uuid.UUID
350
+ Name or ID of the Fabric capacity.
405
351
  """
352
+ from sempy_labs._helper_functions import resolve_capacity_id
406
353
 
407
- dfC = fabric.list_capacities()
408
-
409
- dfC_filt = dfC[dfC["Display Name"] == capacity_name]
410
- if len(dfC_filt) == 0:
411
- raise ValueError(
412
- f"{icons.red_dot} The '{capacity_name}' capacity does not exist."
354
+ if "capacity_name" in kwargs:
355
+ capacity = kwargs["capacity_name"]
356
+ print(
357
+ f"{icons.warning} The 'capacity_name' parameter is deprecated. Please use 'capacity' instead."
413
358
  )
414
- capacity_id = dfC_filt["Id"].iloc[0].upper()
359
+
360
+ capacity_id = resolve_capacity_id(capacity=capacity).upper()
415
361
 
416
362
  _base_api(request=f"capacities/{capacity_id}", method="delete", status_codes=204)
417
363
 
418
- print(f"{icons.green_dot} The '{capacity_name}' capacity has been deleted.")
364
+ print(f"{icons.green_dot} The '{capacity}' capacity has been deleted.")
419
365
 
420
366
 
421
367
  @log
@@ -423,7 +369,6 @@ def delete_fabric_capacity(
423
369
  capacity_name: str,
424
370
  azure_subscription_id: str,
425
371
  resource_group: str,
426
- **kwargs,
427
372
  ):
428
373
  """
429
374
  This function deletes a Fabric capacity.
@@ -442,26 +387,9 @@ def delete_fabric_capacity(
442
387
  The name of the Azure resource group.
443
388
  """
444
389
 
445
- token_provider = auth.token_provider.get()
446
- if token_provider is None:
447
- token_provider = ServicePrincipalTokenProvider.from_azure_key_vault(
448
- key_vault_uri=kwargs["key_vault_uri"],
449
- key_vault_tenant_id=kwargs["key_vault_tenant_id"],
450
- key_vault_client_id=kwargs["key_vault_client_id"],
451
- key_vault_client_secret=kwargs["key_vault_client_secret"],
452
- )
453
- print(
454
- f"{icons.info} Please use the 'token_provider' parameter instead of the key vault parameters within this function as the key vault parameters have been deprecated."
455
- )
456
-
457
- headers = _get_headers(token_provider, audience="azure")
458
-
459
390
  url = f"https://management.azure.com/subscriptions/{azure_subscription_id}/resourceGroups/{resource_group}/providers/Microsoft.Fabric/capacities/{capacity_name}?api-version={icons.azure_api_version}"
460
391
 
461
- response = requests.delete(url, headers=headers)
462
-
463
- if response.status_code != 202:
464
- raise FabricHTTPException(response)
392
+ _base_api(request=url, client="azure", method="delete", status_codes=202)
465
393
 
466
394
  print(f"{icons.green_dot} The '{capacity_name}' capacity has been deleted.")
467
395
 
@@ -474,7 +402,6 @@ def update_fabric_capacity(
474
402
  sku: Optional[str] = None,
475
403
  admin_members: Optional[str | List[str]] = None,
476
404
  tags: Optional[dict] = None,
477
- **kwargs,
478
405
  ):
479
406
  """
480
407
  This function updates a Fabric capacity's properties.
@@ -499,25 +426,9 @@ def update_fabric_capacity(
499
426
  Tag(s) to add to the capacity. Example: {'tagName': 'tagValue'}.
500
427
  """
501
428
 
502
- token_provider = auth.token_provider.get()
503
- if token_provider is None:
504
- token_provider = ServicePrincipalTokenProvider.from_azure_key_vault(
505
- key_vault_uri=kwargs["key_vault_uri"],
506
- key_vault_tenant_id=kwargs["key_vault_tenant_id"],
507
- key_vault_client_id=kwargs["key_vault_client_id"],
508
- key_vault_client_secret=kwargs["key_vault_client_secret"],
509
- )
510
- print(
511
- f"{icons.info} Please use the 'token_provider' parameter instead of the key vault parameters within this function as the key vault parameters have been deprecated."
512
- )
513
-
514
- headers = _get_headers(token_provider, audience="azure")
515
-
516
429
  url = f"https://management.azure.com/subscriptions/{azure_subscription_id}/resourceGroups/{resource_group}/providers/Microsoft.Fabric/capacities/{capacity_name}?api-version={icons.azure_api_version}"
517
430
 
518
- get_response = requests.get(url, headers=headers)
519
- if get_response.status_code != 200:
520
- raise FabricHTTPException(get_response)
431
+ get_response = _base_api(request=url, client="azure")
521
432
 
522
433
  get_json = get_response.json()
523
434
  current_sku = get_json.get("sku", {}).get("name")
@@ -549,10 +460,9 @@ def update_fabric_capacity(
549
460
  return
550
461
 
551
462
  payload = _add_sll_tag(payload, tags)
552
- response = requests.patch(url, headers=headers, json=payload)
553
-
554
- if response.status_code != 202:
555
- raise FabricHTTPException(response)
463
+ _base_api(
464
+ request=url, client="azure", method="patch", payload=payload, status_codes=202
465
+ )
556
466
 
557
467
  print(
558
468
  f"{icons.green_dot} The '{capacity_name}' capacity has been updated accordingly."
@@ -588,28 +498,13 @@ def check_fabric_capacity_name_availablility(
588
498
  An indication as to whether the Fabric capacity name is available or not.
589
499
  """
590
500
 
591
- token_provider = auth.token_provider.get()
592
- if token_provider is None:
593
- token_provider = ServicePrincipalTokenProvider.from_azure_key_vault(
594
- key_vault_uri=kwargs["key_vault_uri"],
595
- key_vault_tenant_id=kwargs["key_vault_tenant_id"],
596
- key_vault_client_id=kwargs["key_vault_client_id"],
597
- key_vault_client_secret=kwargs["key_vault_client_secret"],
598
- )
599
- print(
600
- f"{icons.info} Please use the 'token_provider' parameter instead of the key vault parameters within this function as the key vault parameters have been deprecated."
601
- )
602
-
603
- headers = _get_headers(token_provider, audience="azure")
604
-
605
501
  payload = {"name": capacity_name, "type": "Microsoft.Fabric/capacities"}
606
502
 
607
503
  url = f"https://management.azure.com/subscriptions/{azure_subscription_id}/providers/Microsoft.Fabric/locations/{region}/checkNameAvailability?api-version={icons.azure_api_version}"
608
504
 
609
- response = requests.post(url, headers=headers, json=payload)
610
-
611
- if response.status_code != 202:
612
- raise FabricHTTPException(response)
505
+ response = _base_api(
506
+ request=url, client="azure", method="post", payload=payload, status_codes=202
507
+ )
613
508
 
614
509
  return bool(response.json().get("nameAvailable"))
615
510
 
@@ -1236,3 +1131,39 @@ def get_resource_group(azure_subscription_id: str, resource_group: str) -> pd.Da
1236
1131
  }
1237
1132
 
1238
1133
  return pd.DataFrame(new_data, index=[0])
1134
+
1135
+
1136
+ def list_capacities() -> pd.DataFrame:
1137
+ """
1138
+ Shows the capacities and their properties.
1139
+
1140
+ Returns
1141
+ -------
1142
+ pandas.DataFrame
1143
+ A pandas dataframe showing the capacities and their properties
1144
+ """
1145
+
1146
+ columns = {
1147
+ "Id": "string",
1148
+ "Display Name": "string",
1149
+ "Sku": "string",
1150
+ "Region": "string",
1151
+ "State": "string",
1152
+ "Admins": "string",
1153
+ }
1154
+ df = _create_dataframe(columns=columns)
1155
+
1156
+ response = _base_api(request="/v1.0/myorg/capacities", client="fabric_sp")
1157
+
1158
+ for i in response.json().get("value", []):
1159
+ new_data = {
1160
+ "Id": i.get("id").lower(),
1161
+ "Display Name": i.get("displayName"),
1162
+ "Sku": i.get("sku"),
1163
+ "Region": i.get("region"),
1164
+ "State": i.get("state"),
1165
+ "Admins": [i.get("admins", [])],
1166
+ }
1167
+ df = pd.concat([df, pd.DataFrame(new_data, index=[0])], ignore_index=True)
1168
+
1169
+ return df
@@ -16,6 +16,7 @@ from sempy_labs._helper_functions import (
16
16
  _base_api,
17
17
  )
18
18
  from sempy_labs._capacities import create_fabric_capacity
19
+ from uuid import UUID
19
20
 
20
21
 
21
22
  def _migrate_settings(source_capacity: str, target_capacity: str):
@@ -105,17 +106,13 @@ def migrate_workspaces(
105
106
  migrated_workspaces = []
106
107
 
107
108
  for i, r in dfW.iterrows():
108
- workspace = r["Name"]
109
-
110
- if workspaces is None or workspace in workspaces:
111
- pass
112
- else:
113
- continue
114
-
115
- if assign_workspace_to_capacity(
116
- capacity_name=target_capacity, workspace=workspace
117
- ):
118
- migrated_workspaces.append(workspace)
109
+ workspace_id = r["Id"]
110
+ workspace_name = r["Name"]
111
+ if workspaces is None or workspace_name in workspaces:
112
+ assign_workspace_to_capacity(
113
+ capacity=target_capacity, workspace=workspace_id
114
+ )
115
+ migrated_workspaces.append(workspace_name)
119
116
 
120
117
  if len(migrated_workspaces) < workspace_count:
121
118
  print(
@@ -123,10 +120,11 @@ def migrate_workspaces(
123
120
  )
124
121
  print(f"{icons.in_progress} Initiating rollback...")
125
122
  for i, r in dfW.iterrows():
126
- workspace = r["Name"]
127
- if workspace in migrated_workspaces:
123
+ workspace_id = r["Id"]
124
+ workspace_name = r["Name"]
125
+ if workspace_name in migrated_workspaces:
128
126
  assign_workspace_to_capacity(
129
- capacity_name=source_capacity, workspace=workspace
127
+ capacity=source_capacity, workspace=workspace_id
130
128
  )
131
129
  print(
132
130
  f"{icons.green_dot} Rollback of the workspaces to the '{source_capacity}' capacity is complete."
@@ -531,7 +529,7 @@ def _migrate_delegated_tenant_settings(source_capacity: str, target_capacity: st
531
529
 
532
530
 
533
531
  @log
534
- def _migrate_spark_settings(source_capacity: str, target_capacity: str):
532
+ def _migrate_spark_settings(source_capacity: str | UUID, target_capacity: str | UUID):
535
533
  """
536
534
  This function migrates a capacity's spark settings to another capacity.
537
535
 
@@ -539,14 +537,14 @@ def _migrate_spark_settings(source_capacity: str, target_capacity: str):
539
537
 
540
538
  Parameters
541
539
  ----------
542
- source_capacity : str
543
- Name of the source capacity.
544
- target_capacity : str
545
- Name of the target capacity.
540
+ source_capacity : str | uuid.UUID
541
+ Name or ID of the source capacity.
542
+ target_capacity : str | uuid.UUID
543
+ Name or ID of the target capacity.
546
544
  """
547
545
 
548
- source_capacity_id = resolve_capacity_id(capacity_name=source_capacity)
549
- target_capacity_id = resolve_capacity_id(capacity_name=target_capacity)
546
+ source_capacity_id = resolve_capacity_id(capacity=source_capacity)
547
+ target_capacity_id = resolve_capacity_id(capacity=target_capacity)
550
548
 
551
549
  # Get source capacity server dns
552
550
  response = _base_api(request=f"metadata/capacityInformation/{source_capacity_id}")
@@ -1,4 +1,3 @@
1
- import sempy.fabric as fabric
2
1
  import pandas as pd
3
2
  from typing import Optional
4
3
  from sempy_labs._helper_functions import (
@@ -7,6 +6,7 @@ from sempy_labs._helper_functions import (
7
6
  _update_dataframe_datatypes,
8
7
  _base_api,
9
8
  _create_dataframe,
9
+ resolve_item_id,
10
10
  )
11
11
  from uuid import UUID
12
12
  import sempy_labs._icons as icons
@@ -230,9 +230,7 @@ def list_item_connections(
230
230
 
231
231
  (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
232
232
  item_type = item_type[0].upper() + item_type[1:]
233
- item_id = fabric.resolve_item_id(
234
- item_name=item_name, type=item_type, workspace=workspace_id
235
- )
233
+ item_id = resolve_item_id(item=item_name, type=item_type, workspace=workspace_id)
236
234
 
237
235
  columns = {
238
236
  "Connection Name": "string",
@@ -0,0 +1,60 @@
1
+ from typing import Optional
2
+ from uuid import UUID
3
+ import pandas as pd
4
+ from sempy_labs._helper_functions import (
5
+ _create_dataframe,
6
+ _base_api,
7
+ resolve_workspace_name_and_id,
8
+ _update_dataframe_datatypes,
9
+ )
10
+
11
+
12
+ def list_dashboards(workspace: Optional[str | UUID] = None) -> pd.DataFrame:
13
+ """
14
+ Shows a list of the dashboards within a workspace.
15
+
16
+ Parameters
17
+ ----------
18
+ workspace : str | uuid.UUID, default=None
19
+ The Fabric workspace name or ID.
20
+ Defaults to None which resolves to the workspace of the attached lakehouse
21
+ or if no lakehouse attached, resolves to the workspace of the notebook.
22
+
23
+ Returns
24
+ -------
25
+ pandas.DataFrame
26
+ A pandas dataframe showing the dashboards within a workspace.
27
+ """
28
+
29
+ columns = {
30
+ "Dashboard ID": "string",
31
+ "Dashboard Name": "string",
32
+ "Read Only": "bool",
33
+ "Web URL": "string",
34
+ "Embed URL": "string",
35
+ "Data Classification": "string",
36
+ "Users": "string",
37
+ "Subscriptions": "string",
38
+ }
39
+ df = _create_dataframe(columns=columns)
40
+
41
+ (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
42
+
43
+ response = _base_api(request=f"/v1.0/myorg/groups/{workspace_id}/dashboards")
44
+
45
+ for v in response.json().get("value", []):
46
+ new_data = {
47
+ "Dashboard ID": v.get("id"),
48
+ "Dashboard Name": v.get("displayName"),
49
+ "Read Only": v.get("isReadOnly"),
50
+ "Web URL": v.get("webUrl"),
51
+ "Embed URL": v.get("embedUrl"),
52
+ "Data Classification": v.get("dataClassification"),
53
+ "Users": v.get("users"),
54
+ "Subscriptions": v.get("subscriptions"),
55
+ }
56
+ df = pd.concat([df, pd.DataFrame(new_data, index=[0])], ignore_index=True)
57
+
58
+ _update_dataframe_datatypes(dataframe=df, column_map=columns)
59
+
60
+ return df
@@ -1,13 +1,13 @@
1
- import sempy.fabric as fabric
2
1
  import pandas as pd
3
2
  from typing import Optional
4
3
  from sempy_labs._helper_functions import (
5
4
  resolve_workspace_name_and_id,
6
5
  _decode_b64,
7
6
  _base_api,
8
- _print_success,
9
7
  resolve_item_id,
10
8
  _create_dataframe,
9
+ delete_item,
10
+ create_item,
11
11
  )
12
12
  from uuid import UUID
13
13
 
@@ -76,25 +76,8 @@ def create_data_pipeline(
76
76
  or if no lakehouse attached, resolves to the workspace of the notebook.
77
77
  """
78
78
 
79
- (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
80
-
81
- payload = {"displayName": name}
82
-
83
- if description:
84
- payload["description"] = description
85
-
86
- _base_api(
87
- request=f"/v1/workspaces/{workspace_id}/dataPipelines",
88
- method="post",
89
- payload=payload,
90
- status_codes=[201, 202],
91
- lro_return_status_code=True,
92
- )
93
- _print_success(
94
- item_name=name,
95
- item_type="data pipeline",
96
- workspace_name=workspace_name,
97
- action="created",
79
+ create_item(
80
+ name=name, description=description, type="DataPipeline", workspace=workspace
98
81
  )
99
82
 
100
83
 
@@ -114,16 +97,7 @@ def delete_data_pipeline(name: str | UUID, workspace: Optional[str | UUID] = Non
114
97
  or if no lakehouse attached, resolves to the workspace of the notebook.
115
98
  """
116
99
 
117
- (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
118
- item_id = resolve_item_id(item=name, type="DataPipeline", workspace=workspace)
119
-
120
- fabric.delete_item(item_id=item_id, workspace=workspace)
121
- _print_success(
122
- item_name=name,
123
- item_type="data pipeline",
124
- workspace_name=workspace_name,
125
- action="deleted",
126
- )
100
+ delete_item(item=name, type="DataPipeline", workspace=workspace)
127
101
 
128
102
 
129
103
  def get_data_pipeline_definition(
sempy_labs/_dataflows.py CHANGED
@@ -1,4 +1,3 @@
1
- import sempy.fabric as fabric
2
1
  import pandas as pd
3
2
  from sempy_labs._helper_functions import (
4
3
  resolve_workspace_name_and_id,
@@ -6,6 +5,7 @@ from sempy_labs._helper_functions import (
6
5
  _update_dataframe_datatypes,
7
6
  _base_api,
8
7
  _create_dataframe,
8
+ resolve_workspace_name,
9
9
  )
10
10
  from typing import Optional, Tuple
11
11
  import sempy_labs._icons as icons
@@ -187,7 +187,7 @@ def list_upstream_dataflows(
187
187
  for v in values:
188
188
  tgt_dataflow_id = v.get("targetDataflowId")
189
189
  tgt_workspace_id = v.get("groupId")
190
- tgt_workspace_name = fabric.resolve_workspace_name(tgt_workspace_id)
190
+ tgt_workspace_name = resolve_workspace_name(workspace_id=tgt_workspace_id)
191
191
  (tgt_dataflow_name, _) = _resolve_dataflow_name_and_id(
192
192
  dataflow=tgt_dataflow_id, workspace=tgt_workspace_id
193
193
  )
@@ -0,0 +1,55 @@
1
+ from typing import Optional
2
+ from uuid import UUID
3
+ from sempy_labs._helper_functions import (
4
+ resolve_dataset_id,
5
+ _get_fabric_context_setting,
6
+ resolve_workspace_id,
7
+ )
8
+ import gzip
9
+ import base64
10
+ import urllib.parse
11
+
12
+
13
+ def generate_dax_query_view_url(
14
+ dataset: str | UUID, dax_string: str, workspace: Optional[str | UUID] = None
15
+ ):
16
+ """
17
+ Prints a URL based on query provided. This URL opens `DAX query view <https://learn.microsoft.com/power-bi/transform-model/dax-query-view>`_ in the Power BI service, connected to the semantic model and using the query provided.
18
+
19
+ Parameters
20
+ ----------
21
+ dataset : str | uuid.UUID
22
+ The semantic model name or ID.
23
+ dax_string : str
24
+ The DAX query string.
25
+ workspace : str | uuid.UUID, default=None
26
+ The workspace name or ID.
27
+ Defaults to None which resolves to the workspace of the attached lakehouse
28
+ or if no lakehouse attached, resolves to the workspace of the notebook.
29
+ """
30
+
31
+ workspace_id = resolve_workspace_id(workspace=workspace)
32
+ dataset_id = resolve_dataset_id(dataset=dataset, workspace=workspace_id)
33
+
34
+ prefix = _get_fabric_context_setting(name="spark.trident.pbienv").lower()
35
+
36
+ if prefix == "prod":
37
+ prefix = "app"
38
+
39
+ def gzip_base64_urlsafe(input_string):
40
+ # Compress the string with gzip
41
+ compressed_data = gzip.compress(input_string.encode("utf-8"))
42
+
43
+ # Encode the compressed data in base64
44
+ base64_data = base64.b64encode(compressed_data)
45
+
46
+ # Make the base64 string URL-safe
47
+ urlsafe_data = urllib.parse.quote_plus(base64_data.decode("utf-8"))
48
+
49
+ return urlsafe_data
50
+
51
+ formatted_query = gzip_base64_urlsafe(dax_string)
52
+
53
+ url = f"https://{prefix}.powerbi.com/groups/{workspace_id}/modeling/{dataset_id}/daxQueryView?query={formatted_query}"
54
+
55
+ print(url)