semantic-link-labs 0.9.0__py3-none-any.whl → 0.9.2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of semantic-link-labs might be problematic. Click here for more details.

Files changed (83) hide show
  1. {semantic_link_labs-0.9.0.dist-info → semantic_link_labs-0.9.2.dist-info}/METADATA +68 -7
  2. {semantic_link_labs-0.9.0.dist-info → semantic_link_labs-0.9.2.dist-info}/RECORD +83 -76
  3. sempy_labs/__init__.py +14 -12
  4. sempy_labs/_authentication.py +0 -2
  5. sempy_labs/_capacities.py +120 -142
  6. sempy_labs/_capacity_migration.py +61 -94
  7. sempy_labs/_clear_cache.py +9 -8
  8. sempy_labs/_connections.py +72 -105
  9. sempy_labs/_data_pipelines.py +47 -49
  10. sempy_labs/_dataflows.py +45 -51
  11. sempy_labs/_dax.py +228 -6
  12. sempy_labs/_delta_analyzer.py +303 -0
  13. sempy_labs/_deployment_pipelines.py +72 -66
  14. sempy_labs/_environments.py +39 -36
  15. sempy_labs/_eventhouses.py +35 -35
  16. sempy_labs/_eventstreams.py +38 -39
  17. sempy_labs/_external_data_shares.py +29 -42
  18. sempy_labs/_gateways.py +57 -101
  19. sempy_labs/_generate_semantic_model.py +22 -30
  20. sempy_labs/_git.py +46 -66
  21. sempy_labs/_graphQL.py +95 -0
  22. sempy_labs/_helper_functions.py +175 -30
  23. sempy_labs/_job_scheduler.py +47 -59
  24. sempy_labs/_kql_databases.py +27 -34
  25. sempy_labs/_kql_querysets.py +23 -30
  26. sempy_labs/_list_functions.py +262 -164
  27. sempy_labs/_managed_private_endpoints.py +52 -47
  28. sempy_labs/_mirrored_databases.py +110 -134
  29. sempy_labs/_mirrored_warehouses.py +13 -13
  30. sempy_labs/_ml_experiments.py +36 -36
  31. sempy_labs/_ml_models.py +37 -38
  32. sempy_labs/_model_dependencies.py +2 -0
  33. sempy_labs/_notebooks.py +28 -29
  34. sempy_labs/_one_lake_integration.py +2 -0
  35. sempy_labs/_query_scale_out.py +63 -81
  36. sempy_labs/_refresh_semantic_model.py +12 -14
  37. sempy_labs/_spark.py +54 -79
  38. sempy_labs/_sql.py +7 -11
  39. sempy_labs/_vertipaq.py +8 -3
  40. sempy_labs/_warehouses.py +30 -33
  41. sempy_labs/_workloads.py +15 -20
  42. sempy_labs/_workspace_identity.py +13 -17
  43. sempy_labs/_workspaces.py +49 -48
  44. sempy_labs/admin/__init__.py +2 -0
  45. sempy_labs/admin/_basic_functions.py +244 -281
  46. sempy_labs/admin/_domains.py +188 -103
  47. sempy_labs/admin/_external_data_share.py +26 -31
  48. sempy_labs/admin/_git.py +17 -22
  49. sempy_labs/admin/_items.py +34 -48
  50. sempy_labs/admin/_scanner.py +20 -13
  51. sempy_labs/directlake/_directlake_schema_compare.py +2 -0
  52. sempy_labs/directlake/_dl_helper.py +10 -11
  53. sempy_labs/directlake/_generate_shared_expression.py +4 -5
  54. sempy_labs/directlake/_get_directlake_lakehouse.py +1 -0
  55. sempy_labs/directlake/_list_directlake_model_calc_tables.py +1 -0
  56. sempy_labs/directlake/_show_unsupported_directlake_objects.py +2 -0
  57. sempy_labs/directlake/_warm_cache.py +2 -0
  58. sempy_labs/graph/__init__.py +33 -0
  59. sempy_labs/graph/_groups.py +402 -0
  60. sempy_labs/graph/_teams.py +113 -0
  61. sempy_labs/graph/_users.py +191 -0
  62. sempy_labs/lakehouse/__init__.py +4 -0
  63. sempy_labs/lakehouse/_get_lakehouse_columns.py +10 -10
  64. sempy_labs/lakehouse/_get_lakehouse_tables.py +14 -20
  65. sempy_labs/lakehouse/_lakehouse.py +101 -4
  66. sempy_labs/lakehouse/_shortcuts.py +42 -20
  67. sempy_labs/migration/__init__.py +4 -0
  68. sempy_labs/migration/_direct_lake_to_import.py +66 -0
  69. sempy_labs/migration/_migrate_calctables_to_lakehouse.py +1 -0
  70. sempy_labs/migration/_migrate_calctables_to_semantic_model.py +1 -0
  71. sempy_labs/migration/_migrate_model_objects_to_semantic_model.py +1 -0
  72. sempy_labs/migration/_migrate_tables_columns_to_semantic_model.py +2 -0
  73. sempy_labs/report/_download_report.py +8 -13
  74. sempy_labs/report/_generate_report.py +49 -46
  75. sempy_labs/report/_paginated.py +20 -26
  76. sempy_labs/report/_report_functions.py +50 -45
  77. sempy_labs/report/_report_list_functions.py +2 -0
  78. sempy_labs/report/_report_rebind.py +6 -10
  79. sempy_labs/report/_reportwrapper.py +187 -220
  80. sempy_labs/tom/_model.py +8 -5
  81. {semantic_link_labs-0.9.0.dist-info → semantic_link_labs-0.9.2.dist-info}/LICENSE +0 -0
  82. {semantic_link_labs-0.9.0.dist-info → semantic_link_labs-0.9.2.dist-info}/WHEEL +0 -0
  83. {semantic_link_labs-0.9.0.dist-info → semantic_link_labs-0.9.2.dist-info}/top_level.txt +0 -0
@@ -1,19 +1,17 @@
1
- import sempy.fabric as fabric
2
1
  import pandas as pd
3
2
  from typing import Optional, Tuple
4
3
  from uuid import UUID
5
4
  import sempy_labs._icons as icons
6
- from sempy.fabric.exceptions import FabricHTTPException
7
5
  from sempy_labs.admin._basic_functions import (
8
6
  _resolve_capacity_name_and_id,
9
7
  _resolve_workspace_name_and_id,
10
8
  )
11
9
  from sempy_labs._helper_functions import (
12
- pagination,
13
10
  _is_valid_uuid,
14
11
  _build_url,
12
+ _base_api,
13
+ _create_dataframe,
15
14
  )
16
- import sempy_labs._authentication as auth
17
15
 
18
16
 
19
17
  def _resolve_item_id(
@@ -106,27 +104,23 @@ def list_items(
106
104
  capacity = kwargs["capacity_name"]
107
105
  del kwargs["capacity_name"]
108
106
 
109
- df = pd.DataFrame(
110
- columns=[
111
- "Item Id",
112
- "Item Name",
113
- "Type",
114
- "Description",
115
- "State",
116
- "Last Updated Date",
117
- "Creator Principal Id",
118
- "Creator Principal Display Name",
119
- "Creator Principal Type",
120
- "Creator User Principal Name",
121
- "Workspace Id",
122
- "Capacity Id",
123
- ]
124
- )
125
-
126
- client = fabric.FabricRestClient(token_provider=auth.token_provider.get())
107
+ columns = {
108
+ "Item Id": "string",
109
+ "Item Name": "string",
110
+ "Type": "string",
111
+ "Description": "string",
112
+ "State": "string",
113
+ "Last Updated Date": "string",
114
+ "Creator Principal Id": "string",
115
+ "Creator Principal Display Name": "string",
116
+ "Creator Principal Type": "string",
117
+ "Creator User Principal Name": "string",
118
+ "Workspace Id": "string",
119
+ "Capacity Id": "string",
120
+ }
121
+ df = _create_dataframe(columns=columns)
127
122
 
128
123
  params = {}
129
-
130
124
  url = "/v1/admin/items"
131
125
 
132
126
  if capacity is not None:
@@ -143,12 +137,7 @@ def list_items(
143
137
 
144
138
  url = _build_url(url, params)
145
139
 
146
- response = client.get(url)
147
-
148
- if response.status_code != 200:
149
- raise FabricHTTPException(response)
150
-
151
- responses = pagination(client, response)
140
+ responses = _base_api(request=url, client="fabric_sp", uses_pagination=True)
152
141
 
153
142
  for r in responses:
154
143
  for v in r.get("itemEntities", []):
@@ -222,32 +211,29 @@ def list_item_access_details(
222
211
  f"{icons.red_dot} The parameter 'item' and 'type' are mandatory."
223
212
  )
224
213
 
225
- client = fabric.FabricRestClient(token_provider=auth.token_provider.get())
226
-
227
214
  workspace_name, workspace_id = _resolve_workspace_name_and_id(workspace)
228
215
  item_name, item_id = _resolve_item_name_and_id(
229
216
  item=item, type=type, workspace=workspace_name
230
217
  )
231
218
 
232
- df = pd.DataFrame(
233
- columns=[
234
- "User Id",
235
- "User Name",
236
- "User Type",
237
- "User Principal Name",
238
- "Item Name",
239
- "Item Type",
240
- "Item Id",
241
- "Permissions",
242
- "Additional Permissions",
243
- ]
219
+ columns = {
220
+ "User Id": "string",
221
+ "User Name": "string",
222
+ "User Type": "string",
223
+ "User Principal Name": "string",
224
+ "Item Name": "string",
225
+ "Item Type": "string",
226
+ "Item Id": "string",
227
+ "Permissions": "string",
228
+ "Additional Permissions": "string",
229
+ }
230
+ df = _create_dataframe(columns=columns)
231
+
232
+ response = _base_api(
233
+ request=f"/v1/admin/workspaces/{workspace_id}/items/{item_id}/users",
234
+ client="fabric_sp",
244
235
  )
245
236
 
246
- response = client.get(f"/v1/admin/workspaces/{workspace_id}/items/{item_id}/users")
247
-
248
- if response.status_code != 200:
249
- raise FabricHTTPException(response)
250
-
251
237
  for v in response.json().get("accessDetails", []):
252
238
  new_data = {
253
239
  "User Id": v.get("principal", {}).get("id"),
@@ -6,7 +6,9 @@ import numpy as np
6
6
  import time
7
7
  from sempy_labs.admin._basic_functions import list_workspaces
8
8
  from sempy._utils._log import log
9
- import sempy_labs._authentication as auth
9
+ from sempy_labs._helper_functions import (
10
+ _base_api,
11
+ )
10
12
 
11
13
 
12
14
  @log
@@ -54,8 +56,6 @@ def scan_workspaces(
54
56
  "misconfiguredDatasourceInstances": [],
55
57
  }
56
58
 
57
- client = fabric.FabricRestClient(token_provider=auth.token_provider.get())
58
-
59
59
  if workspace is None:
60
60
  workspace = fabric.resolve_workspace_name()
61
61
 
@@ -72,25 +72,32 @@ def scan_workspaces(
72
72
  batch_size = 99
73
73
  for i in range(0, len(workspaces), batch_size):
74
74
  batch = workspaces[i : i + batch_size].tolist()
75
- request_body = {"workspaces": batch}
75
+ payload = {"workspaces": batch}
76
76
 
77
- response_clause = f"/v1.0/myorg/admin/workspaces/getInfo?lineage={lineage}&datasourceDetails={data_source_details}&datasetSchema={dataset_schema}&datasetExpressions={dataset_expressions}&getArtifactUsers={artifact_users}"
78
- response = client.post(response_clause, json=request_body)
77
+ url = f"/v1.0/myorg/admin/workspaces/getInfo?lineage={lineage}&datasourceDetails={data_source_details}&datasetSchema={dataset_schema}&datasetExpressions={dataset_expressions}&getArtifactUsers={artifact_users}"
78
+ response = _base_api(
79
+ request=url,
80
+ method="post",
81
+ payload=payload,
82
+ status_codes=202,
83
+ client="fabric_sp",
84
+ )
79
85
 
80
- if response.status_code != 202:
81
- raise FabricHTTPException(response)
82
86
  scan_id = response.json()["id"]
83
87
  scan_status = response.json().get("status")
84
88
  while scan_status not in ["Succeeded", "Failed"]:
85
89
  time.sleep(1)
86
- response = client.get(f"/v1.0/myorg/admin/workspaces/scanStatus/{scan_id}")
90
+ response = _base_api(
91
+ request=f"/v1.0/myorg/admin/workspaces/scanStatus/{scan_id}",
92
+ client="fabric_sp",
93
+ )
87
94
  scan_status = response.json().get("status")
88
95
  if scan_status == "Failed":
89
96
  raise FabricHTTPException(response)
90
- response = client.get(f"/v1.0/myorg/admin/workspaces/scanResult/{scan_id}")
91
- if response.status_code != 200:
92
- raise FabricHTTPException(response)
93
-
97
+ response = _base_api(
98
+ request=f"/v1.0/myorg/admin/workspaces/scanResult/{scan_id}",
99
+ client="fabric_sp",
100
+ )
94
101
  responseJson = response.json()
95
102
 
96
103
  if "workspaces" in responseJson:
@@ -44,6 +44,8 @@ def direct_lake_schema_compare(
44
44
  )
45
45
  del kwargs["lakehouse_workspace"]
46
46
 
47
+ fabric.refresh_tom_cache(workspace=workspace)
48
+
47
49
  (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
48
50
  (dataset_name, dataset_id) = resolve_dataset_name_and_id(dataset, workspace_id)
49
51
 
@@ -11,6 +11,7 @@ from sempy_labs._helper_functions import (
11
11
  _convert_data_type,
12
12
  resolve_dataset_name_and_id,
13
13
  resolve_workspace_name_and_id,
14
+ _base_api,
14
15
  )
15
16
 
16
17
 
@@ -246,18 +247,16 @@ def get_direct_lake_source(
246
247
 
247
248
  return artifact_type, artifact_name, artifact_id, workspace_id
248
249
 
249
- # client = fabric.PowerBIRestClient()
250
- # request_body = {
251
- # "artifacts": [
252
- # {
253
- # "objectId": dataset_id,
254
- # "type": "dataset",
255
- # }
256
- # ]
250
+ # payload = {
251
+ # "artifacts": [
252
+ # {
253
+ # "objectId": dataset_id,
254
+ # "type": "dataset",
255
+ # }
256
+ # ]
257
257
  # }
258
- # response = client.post(
259
- # "metadata/relations/upstream?apiVersion=3", json=request_body
260
- # )
258
+
259
+ # response = _base_api(request="metadata/relations/upstream?apiVersion=3", payload=payload, method="post")
261
260
 
262
261
  # artifacts = response.json().get("artifacts", [])
263
262
  # sql_id, sql_object_name, sql_workspace_id, artifact_type = None, None, None, None
@@ -4,10 +4,10 @@ from sempy_labs._helper_functions import (
4
4
  resolve_lakehouse_id,
5
5
  resolve_warehouse_id,
6
6
  resolve_workspace_name_and_id,
7
+ _base_api,
7
8
  )
8
9
  from typing import Optional
9
10
  import sempy_labs._icons as icons
10
- from sempy.fabric.exceptions import FabricHTTPException
11
11
  from uuid import UUID
12
12
 
13
13
 
@@ -53,11 +53,10 @@ def generate_shared_expression(
53
53
  elif item_type == "Warehouse":
54
54
  item_id = resolve_warehouse_id(warehouse=item_name, workspace=workspace_id)
55
55
 
56
- client = fabric.FabricRestClient()
57
56
  item_type_rest = f"{item_type.lower()}s"
58
- response = client.get(f"/v1/workspaces/{workspace_id}/{item_type_rest}/{item_id}")
59
- if response.status_code != 200:
60
- raise FabricHTTPException(response)
57
+ response = _base_api(
58
+ request=f"/v1/workspaces/{workspace_id}/{item_type_rest}/{item_id}"
59
+ )
61
60
 
62
61
  prop = response.json().get("properties")
63
62
 
@@ -51,6 +51,7 @@ def get_direct_lake_lakehouse(
51
51
  if artifact_type in ["Lakehouse", "Warehouse"]:
52
52
  return artifact_name, artifact_id
53
53
  else:
54
+ fabric.refresh_tom_cache(workspace=workspace)
54
55
  dfP = fabric.list_partitions(dataset=dataset, workspace=workspace)
55
56
  dfP_filt = dfP[dfP["Mode"] == "DirectLake"]
56
57
  if dfP_filt.empty:
@@ -50,6 +50,7 @@ def list_direct_lake_model_calc_tables(
50
50
  f"{icons.red_dot} The '{dataset}' semantic model is not in Direct Lake mode."
51
51
  )
52
52
  else:
53
+ fabric.refresh_tom_cache(workspace=workspace)
53
54
  dfA = fabric.list_annotations(dataset=dataset_id, workspace=workspace_id)
54
55
  dfT = list_tables(dataset_id, workspace_id)
55
56
  dfA_filt = dfA[
@@ -38,6 +38,8 @@ def show_unsupported_direct_lake_objects(
38
38
  (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
39
39
  (dataset_name, dataset_id) = resolve_dataset_name_and_id(dataset, workspace_id)
40
40
 
41
+ fabric.refresh_tom_cache(workspace=workspace)
42
+
41
43
  dfT = fabric.list_tables(dataset=dataset_id, workspace=workspace_id)
42
44
  dfC = fabric.list_columns(dataset=dataset_id, workspace=workspace_id)
43
45
  dfR = fabric.list_relationships(dataset=dataset_id, workspace=workspace_id)
@@ -48,6 +48,8 @@ def warm_direct_lake_cache_perspective(
48
48
  (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
49
49
  (dataset_name, dataset_id) = resolve_dataset_name_and_id(dataset, workspace_id)
50
50
 
51
+ fabric.refresh_tom_cache(workspace=workspace)
52
+
51
53
  dfP = fabric.list_partitions(dataset=dataset_id, workspace=workspace_id)
52
54
  if not any(r["Mode"] == "DirectLake" for _, r in dfP.iterrows()):
53
55
  raise ValueError(
@@ -0,0 +1,33 @@
1
+ from sempy_labs.graph._groups import (
2
+ list_groups,
3
+ list_group_owners,
4
+ list_group_members,
5
+ add_group_members,
6
+ add_group_owners,
7
+ resolve_group_id,
8
+ renew_group,
9
+ )
10
+ from sempy_labs.graph._users import (
11
+ resolve_user_id,
12
+ get_user,
13
+ list_users,
14
+ send_mail,
15
+ )
16
+ from sempy_labs.graph._teams import (
17
+ list_teams,
18
+ )
19
+
20
+ __all__ = [
21
+ "list_groups",
22
+ "list_group_owners",
23
+ "list_group_members",
24
+ "add_group_members",
25
+ "add_group_owners",
26
+ "renew_group",
27
+ "resolve_group_id",
28
+ "resolve_user_id",
29
+ "get_user",
30
+ "list_users",
31
+ "send_mail",
32
+ "list_teams",
33
+ ]