semantic-link-labs 0.12.3__py3-none-any.whl → 0.12.5__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of semantic-link-labs might be problematic. Click here for more details.

Files changed (47) hide show
  1. {semantic_link_labs-0.12.3.dist-info → semantic_link_labs-0.12.5.dist-info}/METADATA +5 -3
  2. {semantic_link_labs-0.12.3.dist-info → semantic_link_labs-0.12.5.dist-info}/RECORD +45 -37
  3. sempy_labs/__init__.py +20 -16
  4. sempy_labs/_a_lib_info.py +1 -1
  5. sempy_labs/_authentication.py +1 -1
  6. sempy_labs/_capacities.py +1 -1
  7. sempy_labs/_dataflows.py +98 -10
  8. sempy_labs/_git.py +1 -1
  9. sempy_labs/_helper_functions.py +32 -5
  10. sempy_labs/_list_functions.py +55 -5
  11. sempy_labs/_managed_private_endpoints.py +63 -1
  12. sempy_labs/_model_bpa.py +6 -0
  13. sempy_labs/_notebooks.py +4 -2
  14. sempy_labs/_onelake.py +131 -0
  15. sempy_labs/_sql_audit_settings.py +208 -0
  16. sempy_labs/_sql_endpoints.py +18 -3
  17. sempy_labs/_utils.py +2 -0
  18. sempy_labs/admin/__init__.py +6 -0
  19. sempy_labs/admin/_basic_functions.py +17 -13
  20. sempy_labs/admin/_items.py +3 -3
  21. sempy_labs/admin/_labels.py +211 -0
  22. sempy_labs/admin/_workspaces.py +2 -2
  23. sempy_labs/deployment_pipeline/__init__.py +21 -0
  24. sempy_labs/deployment_pipeline/_items.py +486 -0
  25. sempy_labs/directlake/_update_directlake_partition_entity.py +73 -41
  26. sempy_labs/directlake/_warm_cache.py +3 -1
  27. sempy_labs/eventstream/__init__.py +37 -0
  28. sempy_labs/eventstream/_items.py +263 -0
  29. sempy_labs/eventstream/_topology.py +652 -0
  30. sempy_labs/graph/__init__.py +10 -0
  31. sempy_labs/graph/_groups.py +123 -53
  32. sempy_labs/graph/_sensitivity_labels.py +39 -0
  33. sempy_labs/graph/_teams.py +19 -18
  34. sempy_labs/graph/_user_licenses.py +96 -0
  35. sempy_labs/graph/_users.py +69 -18
  36. sempy_labs/lakehouse/_get_lakehouse_tables.py +33 -1
  37. sempy_labs/lakehouse/_lakehouse.py +6 -2
  38. sempy_labs/lakehouse/_partitioning.py +165 -0
  39. sempy_labs/report/_export_report.py +0 -22
  40. sempy_labs/report/_report_rebind.py +29 -43
  41. sempy_labs/report/_reportwrapper.py +80 -35
  42. sempy_labs/tom/_model.py +81 -4
  43. sempy_labs/_deployment_pipelines.py +0 -209
  44. sempy_labs/_eventstreams.py +0 -123
  45. {semantic_link_labs-0.12.3.dist-info → semantic_link_labs-0.12.5.dist-info}/WHEEL +0 -0
  46. {semantic_link_labs-0.12.3.dist-info → semantic_link_labs-0.12.5.dist-info}/licenses/LICENSE +0 -0
  47. {semantic_link_labs-0.12.3.dist-info → semantic_link_labs-0.12.5.dist-info}/top_level.txt +0 -0
@@ -1,209 +0,0 @@
1
- import pandas as pd
2
- from sempy_labs._helper_functions import (
3
- _is_valid_uuid,
4
- _base_api,
5
- _update_dataframe_datatypes,
6
- _create_dataframe,
7
- )
8
- import sempy_labs._icons as icons
9
- from uuid import UUID
10
- from sempy._utils._log import log
11
-
12
-
13
- @log
14
- def list_deployment_pipelines() -> pd.DataFrame:
15
- """
16
- Shows a list of deployment pipelines the user can access.
17
-
18
- This is a wrapper function for the following API: `Deployment Pipelines - List Deployment Pipelines <https://learn.microsoft.com/rest/api/fabric/core/deployment-pipelines/list-deployment-pipelines>`_.
19
-
20
- Service Principal Authentication is supported (see `here <https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Service%20Principal.ipynb>`_ for examples).
21
-
22
- Returns
23
- -------
24
- pandas.DataFrame
25
- A pandas dataframe showing a list of deployment pipelines the user can access.
26
- """
27
-
28
- columns = {
29
- "Deployment Pipeline Id": "string",
30
- "Deployment Pipeline Name": "string",
31
- "Description": "string",
32
- }
33
- df = _create_dataframe(columns=columns)
34
-
35
- responses = _base_api(
36
- request="/v1/deploymentPipelines",
37
- status_codes=200,
38
- uses_pagination=True,
39
- client="fabric_sp",
40
- )
41
-
42
- rows = []
43
- for r in responses:
44
- for v in r.get("value", []):
45
- rows.append(
46
- {
47
- "Deployment Pipeline Id": v.get("id"),
48
- "Deployment Pipeline Name": v.get("displayName"),
49
- "Description": v.get("description"),
50
- }
51
- )
52
-
53
- if rows:
54
- df = pd.DataFrame(rows, columns=columns.keys())
55
-
56
- return df
57
-
58
-
59
- @log
60
- def list_deployment_pipeline_stages(deployment_pipeline: str | UUID) -> pd.DataFrame:
61
- """
62
- Shows the specified deployment pipeline stages.
63
-
64
- This is a wrapper function for the following API: `Deployment Pipelines - List Deployment Pipeline Stages <https://learn.microsoft.com/rest/api/fabric/core/deployment-pipelines/list-deployment-pipeline-stages>`_.
65
-
66
- Service Principal Authentication is supported (see `here <https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Service%20Principal.ipynb>`_ for examples).
67
-
68
- Parameters
69
- ----------
70
- deployment_pipeline : str | uuid.UUID
71
- The deployment pipeline name or ID.
72
-
73
- Returns
74
- -------
75
- pandas.DataFrame
76
- A pandas dataframe showing the specified deployment pipeline stages.
77
- """
78
-
79
- from sempy_labs._helper_functions import resolve_deployment_pipeline_id
80
-
81
- columns = {
82
- "Deployment Pipeline Stage Id": "string",
83
- "Deployment Pipeline Stage Name": "string",
84
- "Order": "int",
85
- "Description": "string",
86
- "Workspace Id": "string",
87
- "Workspace Name": "string",
88
- "Public": "bool",
89
- }
90
- df = _create_dataframe(columns=columns)
91
-
92
- deployment_pipeline_id = resolve_deployment_pipeline_id(
93
- deployment_pipeline=deployment_pipeline
94
- )
95
-
96
- responses = _base_api(
97
- request=f"/v1/deploymentPipelines/{deployment_pipeline_id}/stages",
98
- status_codes=200,
99
- uses_pagination=True,
100
- client="fabric_sp",
101
- )
102
-
103
- rows = []
104
- for r in responses:
105
- for v in r.get("value", []):
106
- rows.append(
107
- {
108
- "Deployment Pipeline Stage Id": v.get("id"),
109
- "Deployment Pipeline Stage Name": v.get("displayName"),
110
- "Description": v.get("description"),
111
- "Order": v.get("order"),
112
- "Workspace Id": v.get("workspaceId"),
113
- "Workspace Name": v.get("workspaceName"),
114
- "Public": v.get("isPublic"),
115
- }
116
- )
117
- if rows:
118
- df = pd.DataFrame(rows, columns=list(columns.keys()))
119
- _update_dataframe_datatypes(df, columns)
120
-
121
- return df
122
-
123
-
124
- @log
125
- def list_deployment_pipeline_stage_items(
126
- deployment_pipeline: str | UUID,
127
- stage: str | UUID,
128
- ) -> pd.DataFrame:
129
- """
130
- Shows the supported items from the workspace assigned to the specified stage of the specified deployment pipeline.
131
-
132
- This is a wrapper function for the following API: `Deployment Pipelines - List Deployment Pipeline Stage Items <https://learn.microsoft.com/rest/api/fabric/core/deployment-pipelines/list-deployment-pipeline-stage-items>`_.
133
-
134
- Service Principal Authentication is supported (see `here <https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Service%20Principal.ipynb>`_ for examples).
135
-
136
- Parameters
137
- ----------
138
- deployment_pipeline : str | uuid.UUID
139
- The deployment pipeline name or ID.
140
- stage : str | uuid.UUID
141
- The deployment pipeline stage name or ID.
142
-
143
- Returns
144
- -------
145
- pandas.DataFrame
146
- A pandas dataframe showing the supported items from the workspace assigned to the specified stage of the specified deployment pipeline.
147
- """
148
-
149
- from sempy_labs._helper_functions import resolve_deployment_pipeline_id
150
-
151
- columns = {
152
- "Deployment Pipeline Stage Item Id": "string",
153
- "Deployment Pipeline Stage Item Name": "string",
154
- "Item Type": "string",
155
- "Source Item Id": "string",
156
- "Target Item Id": "string",
157
- "Last Deployment Time": "string",
158
- }
159
- df = _create_dataframe(columns=columns)
160
-
161
- deployment_pipeline_id = resolve_deployment_pipeline_id(
162
- deployment_pipeline=deployment_pipeline
163
- )
164
-
165
- def resolve_deployment_pipeline_stage_id(
166
- deployment_pipeline_id: UUID, stage: str | UUID
167
- ):
168
-
169
- dfPS = list_deployment_pipeline_stages(
170
- deployment_pipeline=deployment_pipeline_id
171
- )
172
-
173
- if _is_valid_uuid(stage):
174
- dfPS_filt = dfPS[dfPS["Deployment Pipeline Stage Id"] == stage]
175
- else:
176
- dfPS_filt = dfPS[dfPS["Deployment Pipeline Stage Name"] == stage]
177
- if dfPS.empty:
178
- raise ValueError(
179
- f"{icons.red_dot} The '{stage}' stage does not exist within the '{deployment_pipeline}' deployment pipeline."
180
- )
181
- return dfPS_filt["Deployment Pipeline Stage Id"].iloc[0]
182
-
183
- stage_id = resolve_deployment_pipeline_stage_id(deployment_pipeline_id, stage)
184
-
185
- responses = _base_api(
186
- request=f"/v1/deploymentPipelines/{deployment_pipeline_id}/stages/{stage_id}/items",
187
- status_codes=200,
188
- uses_pagination=True,
189
- client="fabric_sp",
190
- )
191
-
192
- rows = []
193
- for r in responses:
194
- for v in r.get("value", []):
195
- rows.append(
196
- {
197
- "Deployment Pipeline Stage Item Id": v.get("itemId"),
198
- "Deployment Pipeline Stage Item Name": v.get("itemDisplayName"),
199
- "Item Type": v.get("itemType"),
200
- "Source Item Id": v.get("sourceItemId"),
201
- "Target Item Id": v.get("targetItemId"),
202
- "Last Deployment Time": v.get("lastDeploymentTime"),
203
- }
204
- )
205
-
206
- if rows:
207
- df = pd.DataFrame(rows, columns=list(columns.keys()))
208
-
209
- return df
@@ -1,123 +0,0 @@
1
- import pandas as pd
2
- from typing import Optional
3
- from sempy_labs._helper_functions import (
4
- _base_api,
5
- delete_item,
6
- _create_dataframe,
7
- create_item,
8
- resolve_workspace_id,
9
- )
10
- from uuid import UUID
11
- import sempy_labs._icons as icons
12
- from sempy._utils._log import log
13
-
14
-
15
- @log
16
- def list_eventstreams(workspace: Optional[str | UUID] = None) -> pd.DataFrame:
17
- """
18
- Shows the eventstreams within a workspace.
19
-
20
- This is a wrapper function for the following API: `Items - List Eventstreams <https://learn.microsoft.com/rest/api/fabric/environment/items/list-eventstreams>`_.
21
-
22
- Service Principal Authentication is supported (see `here <https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Service%20Principal.ipynb>`_ for examples).
23
-
24
- Parameters
25
- ----------
26
- workspace : str | uuid.UUID, default=None
27
- The Fabric workspace name or ID.
28
- Defaults to None which resolves to the workspace of the attached lakehouse
29
- or if no lakehouse attached, resolves to the workspace of the notebook.
30
-
31
- Returns
32
- -------
33
- pandas.DataFrame
34
- A pandas dataframe showing the eventstreams within a workspace.
35
- """
36
-
37
- columns = {
38
- "Eventstream Name": "string",
39
- "Eventstream Id": "string",
40
- "Description": "string",
41
- }
42
- df = _create_dataframe(columns=columns)
43
-
44
- workspace_id = resolve_workspace_id(workspace)
45
- responses = _base_api(
46
- request=f"/v1/workspaces/{workspace_id}/eventstreams",
47
- uses_pagination=True,
48
- client="fabric_sp",
49
- )
50
-
51
- rows = []
52
- for r in responses:
53
- for v in r.get("value", []):
54
- rows.append(
55
- {
56
- "Eventstream Name": v.get("displayName"),
57
- "Eventstream Id": v.get("id"),
58
- "Description": v.get("description"),
59
- }
60
- )
61
-
62
- if rows:
63
- df = pd.DataFrame(rows, columns=list(columns.keys()))
64
-
65
- return df
66
-
67
-
68
- @log
69
- def create_eventstream(
70
- name: str, description: Optional[str] = None, workspace: Optional[str | UUID] = None
71
- ):
72
- """
73
- Creates a Fabric eventstream.
74
-
75
- This is a wrapper function for the following API: `Items - Create Eventstream <https://learn.microsoft.com/rest/api/fabric/environment/items/create-eventstream>`_.
76
-
77
- Service Principal Authentication is supported (see `here <https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Service%20Principal.ipynb>`_ for examples).
78
-
79
- Parameters
80
- ----------
81
- name: str
82
- Name of the eventstream.
83
- description : str, default=None
84
- A description of the environment.
85
- workspace : str | uuid.UUID, default=None
86
- The Fabric workspace name or ID.
87
- Defaults to None which resolves to the workspace of the attached lakehouse
88
- or if no lakehouse attached, resolves to the workspace of the notebook.
89
- """
90
-
91
- create_item(
92
- name=name, description=description, type="Eventstream", workspace=workspace
93
- )
94
-
95
-
96
- @log
97
- def delete_eventstream(
98
- eventstream: str | UUID, workspace: Optional[str | UUID] = None, **kwargs
99
- ):
100
- """
101
- Deletes a Fabric eventstream.
102
-
103
- This is a wrapper function for the following API: `Items - Delete Eventstream <https://learn.microsoft.com/rest/api/fabric/environment/items/delete-eventstream>`_.
104
-
105
- Service Principal Authentication is supported (see `here <https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Service%20Principal.ipynb>`_ for examples).
106
-
107
- Parameters
108
- ----------
109
- eventstream: str | uuid.UUID
110
- Name or ID of the eventstream.
111
- workspace : str | uuid.UUID, default=None
112
- The Fabric workspace name or ID.
113
- Defaults to None which resolves to the workspace of the attached lakehouse
114
- or if no lakehouse attached, resolves to the workspace of the notebook.
115
- """
116
-
117
- if "name" in kwargs:
118
- eventstream = kwargs["name"]
119
- print(
120
- f"{icons.warning} The 'name' parameter is deprecated. Please use 'eventstream' instead."
121
- )
122
-
123
- delete_item(item=eventstream, type="Eventstream", workspace=workspace)