semantic-link-labs 0.8.10__py3-none-any.whl → 0.9.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of semantic-link-labs might be problematic. Click here for more details.

Files changed (81) hide show
  1. {semantic_link_labs-0.8.10.dist-info → semantic_link_labs-0.9.0.dist-info}/METADATA +6 -5
  2. {semantic_link_labs-0.8.10.dist-info → semantic_link_labs-0.9.0.dist-info}/RECORD +81 -80
  3. {semantic_link_labs-0.8.10.dist-info → semantic_link_labs-0.9.0.dist-info}/WHEEL +1 -1
  4. sempy_labs/__init__.py +34 -3
  5. sempy_labs/_authentication.py +80 -4
  6. sempy_labs/_capacities.py +770 -200
  7. sempy_labs/_capacity_migration.py +7 -37
  8. sempy_labs/_clear_cache.py +37 -35
  9. sempy_labs/_connections.py +13 -13
  10. sempy_labs/_data_pipelines.py +20 -20
  11. sempy_labs/_dataflows.py +27 -28
  12. sempy_labs/_dax.py +41 -47
  13. sempy_labs/_deployment_pipelines.py +1 -1
  14. sempy_labs/_environments.py +26 -23
  15. sempy_labs/_eventhouses.py +16 -15
  16. sempy_labs/_eventstreams.py +16 -15
  17. sempy_labs/_external_data_shares.py +18 -20
  18. sempy_labs/_gateways.py +16 -14
  19. sempy_labs/_generate_semantic_model.py +107 -62
  20. sempy_labs/_git.py +105 -43
  21. sempy_labs/_helper_functions.py +251 -194
  22. sempy_labs/_job_scheduler.py +227 -0
  23. sempy_labs/_kql_databases.py +16 -15
  24. sempy_labs/_kql_querysets.py +16 -15
  25. sempy_labs/_list_functions.py +150 -126
  26. sempy_labs/_managed_private_endpoints.py +19 -17
  27. sempy_labs/_mirrored_databases.py +51 -48
  28. sempy_labs/_mirrored_warehouses.py +5 -4
  29. sempy_labs/_ml_experiments.py +16 -15
  30. sempy_labs/_ml_models.py +15 -14
  31. sempy_labs/_model_bpa.py +210 -207
  32. sempy_labs/_model_bpa_bulk.py +2 -2
  33. sempy_labs/_model_bpa_rules.py +3 -3
  34. sempy_labs/_model_dependencies.py +55 -29
  35. sempy_labs/_notebooks.py +29 -25
  36. sempy_labs/_one_lake_integration.py +23 -26
  37. sempy_labs/_query_scale_out.py +75 -64
  38. sempy_labs/_refresh_semantic_model.py +25 -26
  39. sempy_labs/_spark.py +33 -32
  40. sempy_labs/_sql.py +19 -12
  41. sempy_labs/_translations.py +10 -7
  42. sempy_labs/_vertipaq.py +38 -33
  43. sempy_labs/_warehouses.py +26 -25
  44. sempy_labs/_workspace_identity.py +11 -10
  45. sempy_labs/_workspaces.py +40 -33
  46. sempy_labs/admin/_basic_functions.py +166 -115
  47. sempy_labs/admin/_domains.py +7 -2
  48. sempy_labs/admin/_external_data_share.py +3 -3
  49. sempy_labs/admin/_git.py +4 -1
  50. sempy_labs/admin/_items.py +11 -6
  51. sempy_labs/admin/_scanner.py +10 -5
  52. sempy_labs/directlake/_directlake_schema_compare.py +25 -16
  53. sempy_labs/directlake/_directlake_schema_sync.py +24 -12
  54. sempy_labs/directlake/_dl_helper.py +74 -55
  55. sempy_labs/directlake/_generate_shared_expression.py +10 -9
  56. sempy_labs/directlake/_get_directlake_lakehouse.py +32 -36
  57. sempy_labs/directlake/_get_shared_expression.py +4 -3
  58. sempy_labs/directlake/_guardrails.py +12 -6
  59. sempy_labs/directlake/_list_directlake_model_calc_tables.py +15 -9
  60. sempy_labs/directlake/_show_unsupported_directlake_objects.py +16 -10
  61. sempy_labs/directlake/_update_directlake_model_lakehouse_connection.py +35 -31
  62. sempy_labs/directlake/_update_directlake_partition_entity.py +39 -31
  63. sempy_labs/directlake/_warm_cache.py +87 -65
  64. sempy_labs/lakehouse/_get_lakehouse_columns.py +23 -26
  65. sempy_labs/lakehouse/_get_lakehouse_tables.py +27 -38
  66. sempy_labs/lakehouse/_lakehouse.py +7 -20
  67. sempy_labs/lakehouse/_shortcuts.py +42 -23
  68. sempy_labs/migration/_create_pqt_file.py +16 -11
  69. sempy_labs/migration/_refresh_calc_tables.py +16 -10
  70. sempy_labs/report/_download_report.py +9 -8
  71. sempy_labs/report/_generate_report.py +85 -44
  72. sempy_labs/report/_paginated.py +9 -9
  73. sempy_labs/report/_report_bpa.py +15 -11
  74. sempy_labs/report/_report_functions.py +80 -91
  75. sempy_labs/report/_report_helper.py +8 -4
  76. sempy_labs/report/_report_list_functions.py +24 -13
  77. sempy_labs/report/_report_rebind.py +17 -16
  78. sempy_labs/report/_reportwrapper.py +41 -33
  79. sempy_labs/tom/_model.py +139 -21
  80. {semantic_link_labs-0.8.10.dist-info → semantic_link_labs-0.9.0.dist-info}/LICENSE +0 -0
  81. {semantic_link_labs-0.8.10.dist-info → semantic_link_labs-0.9.0.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,227 @@
1
+ import sempy.fabric as fabric
2
+ from sempy._utils._log import log
3
+ import pandas as pd
4
+ from typing import Optional
5
+ from sempy_labs._helper_functions import (
6
+ resolve_workspace_name_and_id,
7
+ resolve_item_name_and_id,
8
+ pagination,
9
+ lro,
10
+ )
11
+ from sempy.fabric.exceptions import FabricHTTPException
12
+ from uuid import UUID
13
+ import sempy_labs._icons as icons
14
+
15
+
16
+ @log
17
+ def list_item_job_instances(
18
+ item: str | UUID, type: Optional[str] = None, workspace: Optional[str | UUID] = None
19
+ ) -> pd.DataFrame:
20
+ """
21
+ Returns a list of job instances for the specified item.
22
+
23
+ This is a wrapper function for the following API: `Job Scheduler - List Item Job Instances <https://learn.microsoft.com/rest/api/fabric/core/job-scheduler/list-item-job-instances>`_.
24
+
25
+ Parameters
26
+ ----------
27
+ item : str | uuid.UUID
28
+ The item name or ID
29
+ type : str, default=None
30
+ The item `type <https://learn.microsoft.com/rest/api/fabric/core/items/list-items?tabs=HTTP#itemtype>`_. If specifying the item name as the item, the item type is required.
31
+ workspace : str | uuid.UUID, default=None
32
+ The Fabric workspace name or ID used by the lakehouse.
33
+ Defaults to None which resolves to the workspace of the attached lakehouse
34
+ or if no lakehouse attached, resolves to the workspace of the notebook.
35
+
36
+ Returns
37
+ -------
38
+ pandas.DataFrame
39
+ Shows a list of job instances for the specified item.
40
+ """
41
+
42
+ (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
43
+ (item_name, item_id) = resolve_item_name_and_id(
44
+ item=item, type=type, workspace=workspace
45
+ )
46
+
47
+ client = fabric.FabricRestClient()
48
+ response = client.get(
49
+ f"v1/workspaces/{workspace_id}/items/{item_id}/jobs/instances"
50
+ )
51
+
52
+ if response.status_code != 200:
53
+ raise FabricHTTPException(response)
54
+
55
+ df = pd.DataFrame(
56
+ columns=[
57
+ "Job Instance Id",
58
+ "Item Name",
59
+ "Item Id",
60
+ "Item Type",
61
+ "Job Type",
62
+ "Invoke Type",
63
+ "Status",
64
+ "Root Activity Id",
65
+ "Start Time UTC",
66
+ "End Time UTC",
67
+ "Failure Reason",
68
+ ]
69
+ )
70
+
71
+ responses = pagination(client, response)
72
+
73
+ if not responses[0].get("value"):
74
+ return df
75
+
76
+ dfs = []
77
+ for r in responses:
78
+ for v in r.get("value", []):
79
+ fail = v.get("failureReason", {})
80
+ new_data = {
81
+ "Job Instance Id": v.get("id"),
82
+ "Item Name": item_name,
83
+ "Item Id": v.get("itemId"),
84
+ "Item Type": type,
85
+ "Job Type": v.get("jobType"),
86
+ "Invoke Type": v.get("invokeType"),
87
+ "Status": v.get("status"),
88
+ "Root Activity Id": v.get("rootActivityId"),
89
+ "Start Time UTC": v.get("startTimeUtc"),
90
+ "End Time UTC": v.get("endTimeUtc"),
91
+ "Error Message": fail.get("message") if fail is not None else "",
92
+ }
93
+ dfs.append(pd.DataFrame(new_data, index=[0]))
94
+
95
+ if dfs:
96
+ df = pd.concat(dfs, ignore_index=True)
97
+
98
+ return df
99
+
100
+
101
+ @log
102
+ def list_item_schedules(
103
+ item: str | UUID,
104
+ type: Optional[str] = None,
105
+ job_type: str = "DefaultJob",
106
+ workspace: Optional[str | UUID] = None,
107
+ ) -> pd.DataFrame:
108
+ """
109
+ Get scheduling settings for one specific item.
110
+
111
+ This is a wrapper function for the following API: `Job Scheduler - List Item Schedules <https://learn.microsoft.com/rest/api/fabric/core/job-scheduler/list-item-schedules>`_.
112
+
113
+ Parameters
114
+ ----------
115
+ item : str | uuid.UUID
116
+ The item name or ID
117
+ type : str, default=None
118
+ The item `type <https://learn.microsoft.com/rest/api/fabric/core/items/list-items?tabs=HTTP#itemtype>`_. If specifying the item name as the item, the item type is required.
119
+ job_type : str, default="DefaultJob"
120
+ The job type.
121
+ workspace : str | uuid.UUID, default=None
122
+ The Fabric workspace name or ID used by the lakehouse.
123
+ Defaults to None which resolves to the workspace of the attached lakehouse
124
+ or if no lakehouse attached, resolves to the workspace of the notebook.
125
+
126
+ Returns
127
+ -------
128
+ pandas.DataFrame
129
+ Shows a list of scheduling settings for one specific item.
130
+ """
131
+
132
+ (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
133
+ (item_name, item_id) = resolve_item_name_and_id(
134
+ item=item, type=type, workspace=workspace
135
+ )
136
+
137
+ df = pd.DataFrame(
138
+ columns=[
139
+ "Job Schedule Id",
140
+ "Enabled",
141
+ "Created Date Time",
142
+ "Start Date Time",
143
+ "End Date Time",
144
+ "Local Time Zone Id",
145
+ "Type",
146
+ "Interval",
147
+ "Weekdays",
148
+ "Times",
149
+ "Owner Id",
150
+ "Owner Type",
151
+ ]
152
+ )
153
+
154
+ client = fabric.FabricRestClient()
155
+ response = client.get(
156
+ f"v1/workspaces/{workspace_id}/items/{item_id}/jobs/{job_type}/schedules"
157
+ )
158
+
159
+ if response.status_code != 200:
160
+ raise FabricHTTPException(response)
161
+
162
+ for v in response.json().get("value", []):
163
+ config = v.get("configuration", {})
164
+ own = v.get("owner", {})
165
+ new_data = {
166
+ "Job Schedule Id": v.get("id"),
167
+ "Enabled": v.get("enabled"),
168
+ "Created Date Time": v.get("createdDateTime"),
169
+ "Start Date Time": config.get("startDateTime"),
170
+ "End Date Time": config.get("endDateTime"),
171
+ "Local Time Zone Id": config.get("localTimeZoneId"),
172
+ "Type": config.get("type"),
173
+ "Interval": config.get("interval"),
174
+ "Weekdays": config.get("weekdays"),
175
+ "Times": config.get("times"),
176
+ "Owner Id": own.get("id"),
177
+ "Owner Type": own.get("type"),
178
+ }
179
+
180
+ df = pd.concat([df, pd.DataFrame(new_data, index=[0])], ignore_index=True)
181
+
182
+ df["Enabled"] = df["Enabled"].astype(bool)
183
+ df["Created Date Time"] = pd.to_datetime(df["Created Date Time"])
184
+ df["Start Date Time"] = pd.to_datetime(df["Start Date Time"])
185
+
186
+ return df
187
+
188
+
189
+ @log
190
+ def run_on_demand_item_job(
191
+ item: str | UUID,
192
+ type: Optional[str] = None,
193
+ job_type: str = "DefaultJob",
194
+ workspace: Optional[str | UUID] = None,
195
+ ):
196
+ """
197
+ Run on-demand item job instance.
198
+
199
+ This is a wrapper function for the following API: `Job Scheduler - Run On Demand Item Job <https://learn.microsoft.com/rest/api/fabric/core/job-scheduler/run-on-demand-item-job>`_.
200
+
201
+ Parameters
202
+ ----------
203
+ item : str | uuid.UUID
204
+ The item name or ID
205
+ type : str, default=None
206
+ The item `type <https://learn.microsoft.com/rest/api/fabric/core/items/list-items?tabs=HTTP#itemtype>`_. If specifying the item name as the item, the item type is required.
207
+ job_type : str, default="DefaultJob"
208
+ The job type.
209
+ workspace : str | uuid.UUID, default=None
210
+ The Fabric workspace name or ID used by the lakehouse.
211
+ Defaults to None which resolves to the workspace of the attached lakehouse
212
+ or if no lakehouse attached, resolves to the workspace of the notebook.
213
+ """
214
+
215
+ (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
216
+ (item_name, item_id) = resolve_item_name_and_id(
217
+ item=item, type=type, workspace=workspace
218
+ )
219
+
220
+ client = fabric.FabricRestClient()
221
+ response = client.post(
222
+ f"v1/workspaces/{workspace_id}/items/{item_id}/jobs/instances?jobType={job_type}"
223
+ )
224
+
225
+ lro(client, response, return_status_code=True)
226
+
227
+ print(f"{icons.green_dot} The '{item_name}' {type.lower()} has been executed.")
@@ -8,9 +8,10 @@ from sempy_labs._helper_functions import (
8
8
  pagination,
9
9
  )
10
10
  from sempy.fabric.exceptions import FabricHTTPException
11
+ from uuid import UUID
11
12
 
12
13
 
13
- def list_kql_databases(workspace: Optional[str] = None) -> pd.DataFrame:
14
+ def list_kql_databases(workspace: Optional[str | UUID] = None) -> pd.DataFrame:
14
15
  """
15
16
  Shows the KQL databases within a workspace.
16
17
 
@@ -18,8 +19,8 @@ def list_kql_databases(workspace: Optional[str] = None) -> pd.DataFrame:
18
19
 
19
20
  Parameters
20
21
  ----------
21
- workspace : str, default=None
22
- The Fabric workspace name.
22
+ workspace : str | uuid.UUID, default=None
23
+ The Fabric workspace name or ID.
23
24
  Defaults to None which resolves to the workspace of the attached lakehouse
24
25
  or if no lakehouse attached, resolves to the workspace of the notebook.
25
26
 
@@ -41,7 +42,7 @@ def list_kql_databases(workspace: Optional[str] = None) -> pd.DataFrame:
41
42
  ]
42
43
  )
43
44
 
44
- (workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
45
+ (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
45
46
 
46
47
  client = fabric.FabricRestClient()
47
48
  response = client.get(f"/v1/workspaces/{workspace_id}/kqlDatabases")
@@ -69,7 +70,7 @@ def list_kql_databases(workspace: Optional[str] = None) -> pd.DataFrame:
69
70
 
70
71
 
71
72
  def create_kql_database(
72
- name: str, description: Optional[str] = None, workspace: Optional[str] = None
73
+ name: str, description: Optional[str] = None, workspace: Optional[str | UUID] = None
73
74
  ):
74
75
  """
75
76
  Creates a KQL database.
@@ -82,13 +83,13 @@ def create_kql_database(
82
83
  Name of the KQL database.
83
84
  description : str, default=None
84
85
  A description of the environment.
85
- workspace : str, default=None
86
- The Fabric workspace name.
86
+ workspace : str | uuid.UUID, default=None
87
+ The Fabric workspace name or ID.
87
88
  Defaults to None which resolves to the workspace of the attached lakehouse
88
89
  or if no lakehouse attached, resolves to the workspace of the notebook.
89
90
  """
90
91
 
91
- (workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
92
+ (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
92
93
 
93
94
  request_body = {"displayName": name}
94
95
 
@@ -103,11 +104,11 @@ def create_kql_database(
103
104
  lro(client, response, status_codes=[201, 202])
104
105
 
105
106
  print(
106
- f"{icons.green_dot} The '{name}' KQL database has been created within the '{workspace}' workspace."
107
+ f"{icons.green_dot} The '{name}' KQL database has been created within the '{workspace_name}' workspace."
107
108
  )
108
109
 
109
110
 
110
- def delete_kql_database(name: str, workspace: Optional[str] = None):
111
+ def delete_kql_database(name: str, workspace: Optional[str | UUID] = None):
111
112
  """
112
113
  Deletes a KQL database.
113
114
 
@@ -117,15 +118,15 @@ def delete_kql_database(name: str, workspace: Optional[str] = None):
117
118
  ----------
118
119
  name: str
119
120
  Name of the KQL database.
120
- workspace : str, default=None
121
- The Fabric workspace name.
121
+ workspace : str | uuid.UUID, default=None
122
+ The Fabric workspace name or ID.
122
123
  Defaults to None which resolves to the workspace of the attached lakehouse
123
124
  or if no lakehouse attached, resolves to the workspace of the notebook.
124
125
  """
125
126
 
126
- (workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
127
+ (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
127
128
  kql_database_id = fabric.resolve_item_id(
128
- item_name=name, type="KQLDatabase", workspace=workspace
129
+ item_name=name, type="KQLDatabase", workspace=workspace_id
129
130
  )
130
131
 
131
132
  client = fabric.FabricRestClient()
@@ -136,5 +137,5 @@ def delete_kql_database(name: str, workspace: Optional[str] = None):
136
137
  if response.status_code != 200:
137
138
  raise FabricHTTPException(response)
138
139
  print(
139
- f"{icons.green_dot} The '{name}' KQL database within the '{workspace}' workspace has been deleted."
140
+ f"{icons.green_dot} The '{name}' KQL database within the '{workspace_name}' workspace has been deleted."
140
141
  )
@@ -8,9 +8,10 @@ from sempy_labs._helper_functions import (
8
8
  pagination,
9
9
  )
10
10
  from sempy.fabric.exceptions import FabricHTTPException
11
+ from uuid import UUID
11
12
 
12
13
 
13
- def list_kql_querysets(workspace: Optional[str] = None) -> pd.DataFrame:
14
+ def list_kql_querysets(workspace: Optional[str | UUID] = None) -> pd.DataFrame:
14
15
  """
15
16
  Shows the KQL querysets within a workspace.
16
17
 
@@ -18,8 +19,8 @@ def list_kql_querysets(workspace: Optional[str] = None) -> pd.DataFrame:
18
19
 
19
20
  Parameters
20
21
  ----------
21
- workspace : str, default=None
22
- The Fabric workspace name.
22
+ workspace : str | uuid.UUID, default=None
23
+ The Fabric workspace name or ID.
23
24
  Defaults to None which resolves to the workspace of the attached lakehouse
24
25
  or if no lakehouse attached, resolves to the workspace of the notebook.
25
26
 
@@ -37,7 +38,7 @@ def list_kql_querysets(workspace: Optional[str] = None) -> pd.DataFrame:
37
38
  ]
38
39
  )
39
40
 
40
- (workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
41
+ (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
41
42
 
42
43
  client = fabric.FabricRestClient()
43
44
  response = client.get(f"/v1/workspaces/{workspace_id}/kqlQuerysets")
@@ -59,7 +60,7 @@ def list_kql_querysets(workspace: Optional[str] = None) -> pd.DataFrame:
59
60
 
60
61
 
61
62
  def create_kql_queryset(
62
- name: str, description: Optional[str] = None, workspace: Optional[str] = None
63
+ name: str, description: Optional[str] = None, workspace: Optional[str | UUID] = None
63
64
  ):
64
65
  """
65
66
  Creates a KQL queryset.
@@ -72,13 +73,13 @@ def create_kql_queryset(
72
73
  Name of the KQL queryset.
73
74
  description : str, default=None
74
75
  A description of the environment.
75
- workspace : str, default=None
76
- The Fabric workspace name.
76
+ workspace : str | uuid.UUID, default=None
77
+ The Fabric workspace name or ID.
77
78
  Defaults to None which resolves to the workspace of the attached lakehouse
78
79
  or if no lakehouse attached, resolves to the workspace of the notebook.
79
80
  """
80
81
 
81
- (workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
82
+ (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
82
83
 
83
84
  request_body = {"displayName": name}
84
85
 
@@ -93,11 +94,11 @@ def create_kql_queryset(
93
94
  lro(client, response, status_codes=[201, 202])
94
95
 
95
96
  print(
96
- f"{icons.green_dot} The '{name}' KQL queryset has been created within the '{workspace}' workspace."
97
+ f"{icons.green_dot} The '{name}' KQL queryset has been created within the '{workspace_name}' workspace."
97
98
  )
98
99
 
99
100
 
100
- def delete_kql_queryset(name: str, workspace: Optional[str] = None):
101
+ def delete_kql_queryset(name: str, workspace: Optional[str | UUID] = None):
101
102
  """
102
103
  Deletes a KQL queryset.
103
104
 
@@ -107,15 +108,15 @@ def delete_kql_queryset(name: str, workspace: Optional[str] = None):
107
108
  ----------
108
109
  name: str
109
110
  Name of the KQL queryset.
110
- workspace : str, default=None
111
- The Fabric workspace name.
111
+ workspace : str | uuid.UUID, default=None
112
+ The Fabric workspace name or ID.
112
113
  Defaults to None which resolves to the workspace of the attached lakehouse
113
114
  or if no lakehouse attached, resolves to the workspace of the notebook.
114
115
  """
115
116
 
116
- (workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
117
+ (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
117
118
  kql_database_id = fabric.resolve_item_id(
118
- item_name=name, type="KQLQueryset", workspace=workspace
119
+ item_name=name, type="KQLQueryset", workspace=workspace_id
119
120
  )
120
121
 
121
122
  client = fabric.FabricRestClient()
@@ -126,5 +127,5 @@ def delete_kql_queryset(name: str, workspace: Optional[str] = None):
126
127
  if response.status_code != 200:
127
128
  raise FabricHTTPException(response)
128
129
  print(
129
- f"{icons.green_dot} The '{name}' KQL queryset within the '{workspace}' workspace has been deleted."
130
+ f"{icons.green_dot} The '{name}' KQL queryset within the '{workspace_name}' workspace has been deleted."
130
131
  )