semantic-link-labs 0.9.1__py3-none-any.whl → 0.9.3__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of semantic-link-labs might be problematic. Click here for more details.

Files changed (87) hide show
  1. {semantic_link_labs-0.9.1.dist-info → semantic_link_labs-0.9.3.dist-info}/METADATA +67 -8
  2. {semantic_link_labs-0.9.1.dist-info → semantic_link_labs-0.9.3.dist-info}/RECORD +87 -80
  3. sempy_labs/__init__.py +14 -12
  4. sempy_labs/_ai.py +8 -5
  5. sempy_labs/_capacities.py +120 -142
  6. sempy_labs/_capacity_migration.py +61 -94
  7. sempy_labs/_clear_cache.py +9 -8
  8. sempy_labs/_connections.py +107 -104
  9. sempy_labs/_data_pipelines.py +47 -49
  10. sempy_labs/_dataflows.py +45 -51
  11. sempy_labs/_dax.py +228 -6
  12. sempy_labs/_delta_analyzer.py +321 -0
  13. sempy_labs/_deployment_pipelines.py +72 -66
  14. sempy_labs/_environments.py +39 -36
  15. sempy_labs/_eventhouses.py +35 -35
  16. sempy_labs/_eventstreams.py +38 -39
  17. sempy_labs/_external_data_shares.py +29 -42
  18. sempy_labs/_gateways.py +103 -99
  19. sempy_labs/_generate_semantic_model.py +22 -30
  20. sempy_labs/_git.py +46 -66
  21. sempy_labs/_graphQL.py +95 -0
  22. sempy_labs/_helper_functions.py +227 -36
  23. sempy_labs/_job_scheduler.py +47 -59
  24. sempy_labs/_kql_databases.py +27 -34
  25. sempy_labs/_kql_querysets.py +23 -30
  26. sempy_labs/_list_functions.py +264 -167
  27. sempy_labs/_managed_private_endpoints.py +52 -47
  28. sempy_labs/_mirrored_databases.py +110 -134
  29. sempy_labs/_mirrored_warehouses.py +13 -13
  30. sempy_labs/_ml_experiments.py +36 -36
  31. sempy_labs/_ml_models.py +37 -38
  32. sempy_labs/_model_bpa.py +2 -2
  33. sempy_labs/_model_bpa_rules.py +8 -6
  34. sempy_labs/_model_dependencies.py +2 -0
  35. sempy_labs/_notebooks.py +28 -29
  36. sempy_labs/_one_lake_integration.py +2 -0
  37. sempy_labs/_query_scale_out.py +63 -81
  38. sempy_labs/_refresh_semantic_model.py +12 -14
  39. sempy_labs/_spark.py +54 -79
  40. sempy_labs/_sql.py +7 -11
  41. sempy_labs/_translations.py +2 -2
  42. sempy_labs/_vertipaq.py +11 -6
  43. sempy_labs/_warehouses.py +30 -33
  44. sempy_labs/_workloads.py +15 -20
  45. sempy_labs/_workspace_identity.py +13 -17
  46. sempy_labs/_workspaces.py +49 -48
  47. sempy_labs/admin/__init__.py +2 -0
  48. sempy_labs/admin/_basic_functions.py +244 -281
  49. sempy_labs/admin/_domains.py +186 -103
  50. sempy_labs/admin/_external_data_share.py +26 -31
  51. sempy_labs/admin/_git.py +17 -22
  52. sempy_labs/admin/_items.py +34 -48
  53. sempy_labs/admin/_scanner.py +61 -49
  54. sempy_labs/directlake/_directlake_schema_compare.py +2 -0
  55. sempy_labs/directlake/_dl_helper.py +10 -11
  56. sempy_labs/directlake/_generate_shared_expression.py +4 -5
  57. sempy_labs/directlake/_get_directlake_lakehouse.py +1 -0
  58. sempy_labs/directlake/_list_directlake_model_calc_tables.py +1 -0
  59. sempy_labs/directlake/_show_unsupported_directlake_objects.py +2 -0
  60. sempy_labs/directlake/_warm_cache.py +2 -0
  61. sempy_labs/graph/__init__.py +33 -0
  62. sempy_labs/graph/_groups.py +402 -0
  63. sempy_labs/graph/_teams.py +113 -0
  64. sempy_labs/graph/_users.py +191 -0
  65. sempy_labs/lakehouse/__init__.py +4 -0
  66. sempy_labs/lakehouse/_get_lakehouse_columns.py +12 -12
  67. sempy_labs/lakehouse/_get_lakehouse_tables.py +16 -22
  68. sempy_labs/lakehouse/_lakehouse.py +104 -7
  69. sempy_labs/lakehouse/_shortcuts.py +42 -20
  70. sempy_labs/migration/__init__.py +4 -0
  71. sempy_labs/migration/_direct_lake_to_import.py +66 -0
  72. sempy_labs/migration/_migrate_calctables_to_lakehouse.py +3 -2
  73. sempy_labs/migration/_migrate_calctables_to_semantic_model.py +1 -0
  74. sempy_labs/migration/_migrate_model_objects_to_semantic_model.py +1 -0
  75. sempy_labs/migration/_migrate_tables_columns_to_semantic_model.py +2 -0
  76. sempy_labs/migration/_refresh_calc_tables.py +2 -2
  77. sempy_labs/report/_download_report.py +8 -13
  78. sempy_labs/report/_generate_report.py +49 -46
  79. sempy_labs/report/_paginated.py +20 -26
  80. sempy_labs/report/_report_functions.py +52 -47
  81. sempy_labs/report/_report_list_functions.py +2 -0
  82. sempy_labs/report/_report_rebind.py +6 -10
  83. sempy_labs/report/_reportwrapper.py +187 -220
  84. sempy_labs/tom/_model.py +12 -6
  85. {semantic_link_labs-0.9.1.dist-info → semantic_link_labs-0.9.3.dist-info}/LICENSE +0 -0
  86. {semantic_link_labs-0.9.1.dist-info → semantic_link_labs-0.9.3.dist-info}/WHEEL +0 -0
  87. {semantic_link_labs-0.9.1.dist-info → semantic_link_labs-0.9.3.dist-info}/top_level.txt +0 -0
@@ -1,13 +1,13 @@
1
1
  import sempy.fabric as fabric
2
2
  import pandas as pd
3
- import sempy_labs._icons as icons
4
3
  from typing import Optional
5
4
  from sempy_labs._helper_functions import (
6
5
  resolve_workspace_name_and_id,
7
- lro,
8
- pagination,
6
+ _base_api,
7
+ _print_success,
8
+ resolve_item_id,
9
+ _create_dataframe,
9
10
  )
10
- from sempy.fabric.exceptions import FabricHTTPException
11
11
  from uuid import UUID
12
12
 
13
13
 
@@ -33,20 +33,23 @@ def create_eventhouse(
33
33
 
34
34
  (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
35
35
 
36
- request_body = {"displayName": name}
36
+ payload = {"displayName": name}
37
37
 
38
38
  if description:
39
- request_body["description"] = description
40
-
41
- client = fabric.FabricRestClient()
42
- response = client.post(
43
- f"/v1/workspaces/{workspace_id}/eventhouses", json=request_body
39
+ payload["description"] = description
40
+
41
+ _base_api(
42
+ request=f"/v1/workspaces/{workspace_id}/eventhouses",
43
+ method="post",
44
+ status_codes=[201, 202],
45
+ payload=payload,
46
+ lro_return_status_code=True,
44
47
  )
45
-
46
- lro(client, response, status_codes=[201, 202])
47
-
48
- print(
49
- f"{icons.green_dot} The '{name}' eventhouse has been created within the '{workspace_name}' workspace."
48
+ _print_success(
49
+ item_name=name,
50
+ item_type="eventhouse",
51
+ workspace_name=workspace_name,
52
+ action="created",
50
53
  )
51
54
 
52
55
 
@@ -69,16 +72,18 @@ def list_eventhouses(workspace: Optional[str | UUID] = None) -> pd.DataFrame:
69
72
  A pandas dataframe showing the eventhouses within a workspace.
70
73
  """
71
74
 
72
- df = pd.DataFrame(columns=["Eventhouse Name", "Eventhouse Id", "Description"])
75
+ columns = {
76
+ "Eventhouse Name": "string",
77
+ "Eventhouse Id": "string",
78
+ "Description": "string",
79
+ }
80
+ df = _create_dataframe(columns=columns)
73
81
 
74
82
  (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
75
83
 
76
- client = fabric.FabricRestClient()
77
- response = client.get(f"/v1/workspaces/{workspace_id}/eventhouses")
78
- if response.status_code != 200:
79
- raise FabricHTTPException(response)
80
-
81
- responses = pagination(client, response)
84
+ responses = _base_api(
85
+ request=f"/v1/workspaces/{workspace_id}/eventhouses", uses_pagination=True
86
+ )
82
87
 
83
88
  for r in responses:
84
89
  for v in r.get("value", []):
@@ -109,17 +114,12 @@ def delete_eventhouse(name: str, workspace: Optional[str | UUID] = None):
109
114
  """
110
115
 
111
116
  (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
112
-
113
- item_id = fabric.resolve_item_id(
114
- item_name=name, type="Eventhouse", workspace=workspace_id
115
- )
116
-
117
- client = fabric.FabricRestClient()
118
- response = client.delete(f"/v1/workspaces/{workspace_id}/eventhouses/{item_id}")
119
-
120
- if response.status_code != 200:
121
- raise FabricHTTPException(response)
122
-
123
- print(
124
- f"{icons.green_dot} The '{name}' eventhouse within the '{workspace_name}' workspace has been deleted."
117
+ item_id = resolve_item_id(item=name, type="Eventhouse", workspace=workspace)
118
+
119
+ fabric.delete_item(item_id=item_id, workspace=workspace)
120
+ _print_success(
121
+ item_name=name,
122
+ item_type="eventhouse",
123
+ workspace_name=workspace_name,
124
+ action="deleted",
125
125
  )
@@ -1,13 +1,13 @@
1
1
  import sempy.fabric as fabric
2
2
  import pandas as pd
3
- import sempy_labs._icons as icons
4
3
  from typing import Optional
5
4
  from sempy_labs._helper_functions import (
6
5
  resolve_workspace_name_and_id,
7
- lro,
8
- pagination,
6
+ _base_api,
7
+ _print_success,
8
+ resolve_item_id,
9
+ _create_dataframe,
9
10
  )
10
- from sempy.fabric.exceptions import FabricHTTPException
11
11
  from uuid import UUID
12
12
 
13
13
 
@@ -30,16 +30,17 @@ def list_eventstreams(workspace: Optional[str | UUID] = None) -> pd.DataFrame:
30
30
  A pandas dataframe showing the eventstreams within a workspace.
31
31
  """
32
32
 
33
- df = pd.DataFrame(columns=["Eventstream Name", "Eventstream Id", "Description"])
33
+ columns = {
34
+ "Eventstream Name": "string",
35
+ "Eventstream Id": "string",
36
+ "Description": "string",
37
+ }
38
+ df = _create_dataframe(columns=columns)
34
39
 
35
40
  (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
36
-
37
- client = fabric.FabricRestClient()
38
- response = client.get(f"/v1/workspaces/{workspace_id}/eventstreams")
39
- if response.status_code != 200:
40
- raise FabricHTTPException(response)
41
-
42
- responses = pagination(client, response)
41
+ responses = _base_api(
42
+ request=f"/v1/workspaces/{workspace_id}/eventstreams", uses_pagination=True
43
+ )
43
44
 
44
45
  for r in responses:
45
46
  for v in r.get("value", []):
@@ -75,24 +76,27 @@ def create_eventstream(
75
76
 
76
77
  (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
77
78
 
78
- request_body = {"displayName": name}
79
+ payload = {"displayName": name}
79
80
 
80
81
  if description:
81
- request_body["description"] = description
82
-
83
- client = fabric.FabricRestClient()
84
- response = client.post(
85
- f"/v1/workspaces/{workspace_id}/eventstreams", json=request_body
82
+ payload["description"] = description
83
+
84
+ _base_api(
85
+ request=f"/v1/workspaces/{workspace_id}/eventstreams",
86
+ method="post",
87
+ payload=payload,
88
+ status_codes=[201, 202],
89
+ lro_return_status_code=True,
86
90
  )
87
-
88
- lro(client, response, status_codes=[201, 202])
89
-
90
- print(
91
- f"{icons.green_dot} The '{name}' eventstream has been created within the '{workspace_name}' workspace."
91
+ _print_success(
92
+ item_name=name,
93
+ item_type="eventstream",
94
+ workspace_name=workspace_name,
95
+ action="created",
92
96
  )
93
97
 
94
98
 
95
- def delete_eventstream(name: str, workspace: Optional[str | UUID] = None):
99
+ def delete_eventstream(name: str | UUID, workspace: Optional[str | UUID] = None):
96
100
  """
97
101
  Deletes a Fabric eventstream.
98
102
 
@@ -100,8 +104,8 @@ def delete_eventstream(name: str, workspace: Optional[str | UUID] = None):
100
104
 
101
105
  Parameters
102
106
  ----------
103
- name: str
104
- Name of the eventstream.
107
+ name: str | uuid.UUID
108
+ Name or ID of the eventstream.
105
109
  workspace : str | uuid.UUID, default=None
106
110
  The Fabric workspace name or ID.
107
111
  Defaults to None which resolves to the workspace of the attached lakehouse
@@ -109,17 +113,12 @@ def delete_eventstream(name: str, workspace: Optional[str | UUID] = None):
109
113
  """
110
114
 
111
115
  (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
112
-
113
- item_id = fabric.resolve_item_id(
114
- item_name=name, type="Eventstream", workspace=workspace_id
115
- )
116
-
117
- client = fabric.FabricRestClient()
118
- response = client.delete(f"/v1/workspaces/{workspace_id}/eventstreams/{item_id}")
119
-
120
- if response.status_code != 200:
121
- raise FabricHTTPException(response)
122
-
123
- print(
124
- f"{icons.green_dot} The '{name}' eventstream within the '{workspace_name}' workspace has been deleted."
116
+ item_id = resolve_item_id(item=name, type="Eventstream", workspace=workspace)
117
+
118
+ fabric.delete_item(item_id=item_id, workspace=workspace)
119
+ _print_success(
120
+ item_name=name,
121
+ item_type="eventstream",
122
+ workspace_name=workspace_name,
123
+ action="deleted",
125
124
  )
@@ -5,9 +5,9 @@ from typing import Optional, List
5
5
  import sempy_labs._icons as icons
6
6
  from sempy_labs._helper_functions import (
7
7
  resolve_workspace_name_and_id,
8
- pagination,
8
+ _base_api,
9
+ _create_dataframe,
9
10
  )
10
- from sempy.fabric.exceptions import FabricHTTPException
11
11
 
12
12
 
13
13
  def create_external_data_share(
@@ -48,15 +48,12 @@ def create_external_data_share(
48
48
 
49
49
  payload = {"paths": paths, "recipient": {"userPrincipalName": recipient}}
50
50
 
51
- client = fabric.FabricRestClient()
52
- response = client.post(
53
- f"/v1/workspaces/{workspace_id}/items/{item_id}/externalDataShares",
54
- json=payload,
51
+ _base_api(
52
+ request=f"/v1/workspaces/{workspace_id}/items/{item_id}/externalDataShares",
53
+ method="post",
54
+ status_codes=201,
55
+ payload=payload,
55
56
  )
56
-
57
- if response.status_code != 201:
58
- raise FabricHTTPException(response)
59
-
60
57
  print(
61
58
  f"{icons.green_dot} An external data share was created for the '{item_name}' {item_type} within the '{workspace_name}' workspace for the {paths} paths."
62
59
  )
@@ -92,14 +89,10 @@ def revoke_external_data_share(
92
89
  item_name=item_name, type=item_type, workspace=workspace_id
93
90
  )
94
91
 
95
- client = fabric.FabricRestClient()
96
- response = client.post(
97
- f"/v1/workspaces/{workspace_id}/items/{item_id}/externalDataShares/{external_data_share_id}/revoke"
92
+ _base_api(
93
+ request=f"/v1/workspaces/{workspace_id}/items/{item_id}/externalDataShares/{external_data_share_id}/revoke",
94
+ method="post",
98
95
  )
99
-
100
- if response.status_code != 200:
101
- raise FabricHTTPException(response)
102
-
103
96
  print(
104
97
  f"{icons.green_dot} The '{external_data_share_id}' external data share for the '{item_name}' {item_type} within the '{workspace_name}' workspace has been revoked."
105
98
  )
@@ -135,32 +128,26 @@ def list_external_data_shares_in_item(
135
128
  item_name=item_name, type=item_type, workspace=workspace_id
136
129
  )
137
130
 
138
- client = fabric.FabricRestClient()
139
- response = client.get(
140
- f"/v1/workspaces/{workspace_id}/items/{item_id}/externalDataShares"
131
+ columns = {
132
+ "External Data Share Id": "string",
133
+ "Paths": "string",
134
+ "Creator Principal Id": "string",
135
+ "Creator Principal Type": "string",
136
+ "Recipient User Principal Name": "string",
137
+ "Status": "string",
138
+ "Expiration Time UTC": "string",
139
+ "Workspace Id": "string",
140
+ "Item Id": "string",
141
+ "Item Name": "string",
142
+ "Item Type": "string",
143
+ "Invitation URL": "string",
144
+ }
145
+ df = _create_dataframe(columns=columns)
146
+
147
+ responses = _base_api(
148
+ request=f"/v1/workspaces/{workspace_id}/items/{item_id}/externalDataShares",
149
+ uses_pagination=True,
141
150
  )
142
-
143
- if response.status_code != 200:
144
- raise FabricHTTPException(response)
145
-
146
- df = pd.DataFrame(
147
- columns=[
148
- "External Data Share Id",
149
- "Paths",
150
- "Creator Principal Id",
151
- "Creater Principal Type",
152
- "Recipient User Principal Name",
153
- "Status",
154
- "Expiration Time UTC",
155
- "Workspace Id",
156
- "Item Id",
157
- "Item Name",
158
- "Item Type",
159
- "Invitation URL",
160
- ]
161
- )
162
-
163
- responses = pagination(client, response)
164
151
  dfs = []
165
152
 
166
153
  for r in responses: