semantic-link-labs 0.12.2__py3-none-any.whl → 0.12.4__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of semantic-link-labs might be problematic. Click here for more details.
- {semantic_link_labs-0.12.2.dist-info → semantic_link_labs-0.12.4.dist-info}/METADATA +5 -3
- {semantic_link_labs-0.12.2.dist-info → semantic_link_labs-0.12.4.dist-info}/RECORD +39 -31
- sempy_labs/__init__.py +18 -10
- sempy_labs/_a_lib_info.py +1 -1
- sempy_labs/_authentication.py +1 -1
- sempy_labs/_capacities.py +1 -1
- sempy_labs/_generate_semantic_model.py +2 -2
- sempy_labs/_get_connection_string.py +84 -0
- sempy_labs/_git.py +1 -1
- sempy_labs/_helper_functions.py +28 -4
- sempy_labs/_list_functions.py +55 -5
- sempy_labs/_managed_private_endpoints.py +1 -1
- sempy_labs/_notebooks.py +4 -2
- sempy_labs/_semantic_models.py +118 -0
- sempy_labs/_sql_audit_settings.py +208 -0
- sempy_labs/_sql_endpoints.py +27 -24
- sempy_labs/_utils.py +2 -0
- sempy_labs/_warehouses.py +1 -56
- sempy_labs/admin/__init__.py +6 -0
- sempy_labs/admin/_items.py +3 -3
- sempy_labs/admin/_labels.py +211 -0
- sempy_labs/directlake/_warm_cache.py +3 -1
- sempy_labs/eventstream/__init__.py +37 -0
- sempy_labs/eventstream/_items.py +263 -0
- sempy_labs/eventstream/_topology.py +652 -0
- sempy_labs/graph/__init__.py +12 -0
- sempy_labs/graph/_groups.py +60 -53
- sempy_labs/graph/_sensitivity_labels.py +120 -0
- sempy_labs/graph/_teams.py +19 -18
- sempy_labs/graph/_user_licenses.py +96 -0
- sempy_labs/graph/_users.py +23 -16
- sempy_labs/lakehouse/_get_lakehouse_tables.py +33 -1
- sempy_labs/lakehouse/_lakehouse.py +6 -2
- sempy_labs/lakehouse/_partitioning.py +165 -0
- sempy_labs/report/_reportwrapper.py +15 -5
- sempy_labs/tom/_model.py +111 -16
- sempy_labs/_eventstreams.py +0 -123
- {semantic_link_labs-0.12.2.dist-info → semantic_link_labs-0.12.4.dist-info}/WHEEL +0 -0
- {semantic_link_labs-0.12.2.dist-info → semantic_link_labs-0.12.4.dist-info}/licenses/LICENSE +0 -0
- {semantic_link_labs-0.12.2.dist-info → semantic_link_labs-0.12.4.dist-info}/top_level.txt +0 -0
|
@@ -0,0 +1,652 @@
|
|
|
1
|
+
import pandas as pd
|
|
2
|
+
from typing import Optional
|
|
3
|
+
from sempy_labs._helper_functions import (
|
|
4
|
+
_base_api,
|
|
5
|
+
resolve_item_id,
|
|
6
|
+
resolve_workspace_id,
|
|
7
|
+
resolve_workspace_name_and_id,
|
|
8
|
+
resolve_item_name_and_id,
|
|
9
|
+
_create_dataframe,
|
|
10
|
+
)
|
|
11
|
+
from uuid import UUID
|
|
12
|
+
import sempy_labs._icons as icons
|
|
13
|
+
from sempy._utils._log import log
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
@log
|
|
17
|
+
def get_eventstream_destination(
|
|
18
|
+
eventstream: str | UUID,
|
|
19
|
+
destination_id: UUID,
|
|
20
|
+
workspace: Optional[str | UUID] = None,
|
|
21
|
+
) -> pd.DataFrame:
|
|
22
|
+
"""
|
|
23
|
+
Returns the specified destination of the eventstream.
|
|
24
|
+
|
|
25
|
+
This is a wrapper function for the following API: `Topology - Get Eventstream Destination <https://learn.microsoft.com/rest/api/fabric/eventstream/topology/get-eventstream-destination>`_.
|
|
26
|
+
|
|
27
|
+
Service Principal Authentication is supported (see `here <https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Service%20Principal.ipynb>`_ for examples).
|
|
28
|
+
|
|
29
|
+
Parameters
|
|
30
|
+
----------
|
|
31
|
+
eventstream : str | uuid.UUID
|
|
32
|
+
The name or ID of the eventstream.
|
|
33
|
+
destination_id : uuid.UUID
|
|
34
|
+
The ID of the destination.
|
|
35
|
+
workspace : str | uuid.UUID, default=None
|
|
36
|
+
The Fabric workspace name or ID.
|
|
37
|
+
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
38
|
+
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
39
|
+
|
|
40
|
+
Returns
|
|
41
|
+
-------
|
|
42
|
+
pandas.DataFrame
|
|
43
|
+
A pandas dataframe showing the details of the destination.
|
|
44
|
+
"""
|
|
45
|
+
|
|
46
|
+
workspace_id = resolve_workspace_id(workspace)
|
|
47
|
+
eventstream_id = resolve_item_id(
|
|
48
|
+
item=eventstream, type="Eventstream", workspace=workspace_id
|
|
49
|
+
)
|
|
50
|
+
response = _base_api(
|
|
51
|
+
request=f"/v1/workspaces/{workspace_id}/eventstreams/{eventstream_id}/destinations/{destination_id}",
|
|
52
|
+
client="fabric_sp",
|
|
53
|
+
)
|
|
54
|
+
|
|
55
|
+
columns = {
|
|
56
|
+
"Eventstream Destination Id": "str",
|
|
57
|
+
"Eventstream Destination Name": "str",
|
|
58
|
+
"Eventstream Destination Type": "str",
|
|
59
|
+
"Workspace Id": "str",
|
|
60
|
+
"Item Id": "str",
|
|
61
|
+
"Schema": "str",
|
|
62
|
+
"Delta Table": "str",
|
|
63
|
+
"Input Serialization Type": "str",
|
|
64
|
+
"Input Serialization Encoding": "str",
|
|
65
|
+
"Input Nodes": "str",
|
|
66
|
+
"Status": "str",
|
|
67
|
+
"Error": "str",
|
|
68
|
+
}
|
|
69
|
+
|
|
70
|
+
df = _create_dataframe(columns)
|
|
71
|
+
|
|
72
|
+
result = response.json()
|
|
73
|
+
|
|
74
|
+
rows = []
|
|
75
|
+
prop = result.get("properties", {})
|
|
76
|
+
rows.append(
|
|
77
|
+
{
|
|
78
|
+
"Eventstream Destination Id": result.get("id"),
|
|
79
|
+
"Eventstream Destination Name": result.get("name"),
|
|
80
|
+
"Eventstream Destination Type": result.get("type"),
|
|
81
|
+
"Workspace Id": prop.get("workspaceId"),
|
|
82
|
+
"Item Id": prop.get("itemId"),
|
|
83
|
+
"Schema": prop.get("schema"),
|
|
84
|
+
"Delta Table": prop.get("deltaTable"),
|
|
85
|
+
"Input Serialization Type": prop.get("inputSerialization", {}).get("type"),
|
|
86
|
+
"Input Serialization Encoding": prop.get("inputSerialization", {})
|
|
87
|
+
.get("properties", {})
|
|
88
|
+
.get("encoding"),
|
|
89
|
+
"Input Nodes": (
|
|
90
|
+
", ".join([node.get("name") for node in result.get("inputNodes", [])])
|
|
91
|
+
if result.get("inputNodes")
|
|
92
|
+
else None
|
|
93
|
+
),
|
|
94
|
+
"Status": result.get("status"),
|
|
95
|
+
"Error": result.get("error"),
|
|
96
|
+
}
|
|
97
|
+
)
|
|
98
|
+
|
|
99
|
+
if rows:
|
|
100
|
+
df = pd.DataFrame(rows, columns=columns.keys())
|
|
101
|
+
|
|
102
|
+
return df
|
|
103
|
+
|
|
104
|
+
|
|
105
|
+
@log
|
|
106
|
+
def get_eventstream_destination_connection(
|
|
107
|
+
eventstream: str | UUID,
|
|
108
|
+
destination_id: UUID,
|
|
109
|
+
workspace: Optional[str | UUID] = None,
|
|
110
|
+
) -> pd.DataFrame:
|
|
111
|
+
"""
|
|
112
|
+
Returns the connection information of a specified destination of the eventstream. Only custom endpoints destinations are supported.
|
|
113
|
+
|
|
114
|
+
This is a wrapper function for the following API: `Topology - Get Eventstream Destination Connection <https://learn.microsoft.com/rest/api/fabric/eventstream/topology/get-eventstream-destination-connection>`_.
|
|
115
|
+
|
|
116
|
+
Service Principal Authentication is supported (see `here <https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Service%20Principal.ipynb>`_ for examples).
|
|
117
|
+
|
|
118
|
+
Parameters
|
|
119
|
+
----------
|
|
120
|
+
eventstream : str | uuid.UUID
|
|
121
|
+
The name or ID of the eventstream.
|
|
122
|
+
destination_id : uuid.UUID
|
|
123
|
+
The ID of the destination.
|
|
124
|
+
workspace : str | uuid.UUID, default=None
|
|
125
|
+
The Fabric workspace name or ID.
|
|
126
|
+
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
127
|
+
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
128
|
+
|
|
129
|
+
Returns
|
|
130
|
+
-------
|
|
131
|
+
pandas.DataFrame
|
|
132
|
+
A pandas dataframe showing the connection details of the destination.
|
|
133
|
+
"""
|
|
134
|
+
|
|
135
|
+
workspace_id = resolve_workspace_id(workspace)
|
|
136
|
+
eventstream_id = resolve_item_id(
|
|
137
|
+
item=eventstream, type="Eventstream", workspace=workspace_id
|
|
138
|
+
)
|
|
139
|
+
response = _base_api(
|
|
140
|
+
request=f"/v1/workspaces/{workspace_id}/eventstreams/{eventstream_id}/destinations/{destination_id}/connection",
|
|
141
|
+
client="fabric_sp",
|
|
142
|
+
)
|
|
143
|
+
|
|
144
|
+
columns = {
|
|
145
|
+
"Fully Qualified Namespace": "str",
|
|
146
|
+
"EventHub Name": "str",
|
|
147
|
+
"Consumer Group Name": "str",
|
|
148
|
+
"Primary Key": "str",
|
|
149
|
+
"Secondary Key": "str",
|
|
150
|
+
"Primary Connection String": "str",
|
|
151
|
+
"Secondary Connection String": "str",
|
|
152
|
+
}
|
|
153
|
+
|
|
154
|
+
df = _create_dataframe(columns=columns)
|
|
155
|
+
|
|
156
|
+
result = response.json()
|
|
157
|
+
|
|
158
|
+
rows = []
|
|
159
|
+
rows.append(
|
|
160
|
+
{
|
|
161
|
+
"Fully Qualified Namespace": result.get("fullyQualifiedNamespace"),
|
|
162
|
+
"EventHub Name": result.get("eventHubName"),
|
|
163
|
+
"Consumer Group Name": result.get("consumerGroupName"),
|
|
164
|
+
"Primary Key": result.get("accessKeys", {}).get("primaryKey"),
|
|
165
|
+
"Secondary Key": result.get("accessKeys", {}).get("secondaryKey"),
|
|
166
|
+
"Primary Connection String": result.get("accessKeys", {}).get(
|
|
167
|
+
"primaryConnectionString"
|
|
168
|
+
),
|
|
169
|
+
"Secondary Connection String": result.get("accessKeys", {}).get(
|
|
170
|
+
"secondaryConnectionString"
|
|
171
|
+
),
|
|
172
|
+
}
|
|
173
|
+
)
|
|
174
|
+
|
|
175
|
+
if rows:
|
|
176
|
+
df = pd.DataFrame(rows, columns=columns.keys())
|
|
177
|
+
|
|
178
|
+
return df
|
|
179
|
+
|
|
180
|
+
|
|
181
|
+
@log
|
|
182
|
+
def get_eventstream_source(
|
|
183
|
+
eventstream: str | UUID, source_id: UUID, workspace: Optional[str | UUID] = None
|
|
184
|
+
) -> pd.DataFrame:
|
|
185
|
+
"""
|
|
186
|
+
Returns the specified source of the eventstream.
|
|
187
|
+
|
|
188
|
+
This is a wrapper function for the following API: `Topology - Get Eventstream Source <https://learn.microsoft.com/rest/api/fabric/eventstream/topology/get-eventstream-source>`_.
|
|
189
|
+
|
|
190
|
+
Service Principal Authentication is supported (see `here <https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Service%20Principal.ipynb>`_ for examples).
|
|
191
|
+
|
|
192
|
+
Parameters
|
|
193
|
+
----------
|
|
194
|
+
eventstream : str | uuid.UUID
|
|
195
|
+
The name or ID of the eventstream.
|
|
196
|
+
source_id : uuid.UUID
|
|
197
|
+
The ID of the source.
|
|
198
|
+
workspace : str | uuid.UUID, default=None
|
|
199
|
+
The Fabric workspace name or ID.
|
|
200
|
+
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
201
|
+
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
202
|
+
|
|
203
|
+
Returns
|
|
204
|
+
-------
|
|
205
|
+
pandas.DataFrame
|
|
206
|
+
A pandas dataframe showing the details of the source.
|
|
207
|
+
"""
|
|
208
|
+
|
|
209
|
+
workspace_id = resolve_workspace_id(workspace)
|
|
210
|
+
eventstream_id = resolve_item_id(
|
|
211
|
+
item=eventstream, type="Eventstream", workspace=workspace_id
|
|
212
|
+
)
|
|
213
|
+
response = _base_api(
|
|
214
|
+
request=f"/v1/workspaces/{workspace_id}/eventstreams/{eventstream_id}/sources/{source_id}",
|
|
215
|
+
client="fabric_sp",
|
|
216
|
+
)
|
|
217
|
+
|
|
218
|
+
columns = {
|
|
219
|
+
"Eventstream Source Id": "str",
|
|
220
|
+
"Eventstream Source Name": "str",
|
|
221
|
+
"Eventstream Source Type": "str",
|
|
222
|
+
"Data Connection Id": "str",
|
|
223
|
+
"Consumer Group Name": "str",
|
|
224
|
+
"Input Serialization Type": "str",
|
|
225
|
+
"Input Serialization Encoding": "str",
|
|
226
|
+
"Status": "str",
|
|
227
|
+
"Error": "str",
|
|
228
|
+
}
|
|
229
|
+
|
|
230
|
+
df = _create_dataframe(columns=columns)
|
|
231
|
+
|
|
232
|
+
rows = []
|
|
233
|
+
result = response.json()
|
|
234
|
+
prop = result.get("properties", {})
|
|
235
|
+
rows.append(
|
|
236
|
+
{
|
|
237
|
+
"Eventstream Source Id": result.get("id"),
|
|
238
|
+
"Eventstream Source Name": result.get("name"),
|
|
239
|
+
"Eventstream Source Type": result.get("type"),
|
|
240
|
+
"Data Connection Id": prop.get("dataConnectionId"),
|
|
241
|
+
"Consumer Group Name": prop.get("consumerGroupName"),
|
|
242
|
+
"Input Serialization Type": prop.get("inputSerialization", {}).get("type"),
|
|
243
|
+
"Input Serialization Encoding": prop.get("inputSerialization", {})
|
|
244
|
+
.get("properties", {})
|
|
245
|
+
.get("encoding"),
|
|
246
|
+
"Status": result.get("status"),
|
|
247
|
+
"Error": result.get("error"),
|
|
248
|
+
}
|
|
249
|
+
)
|
|
250
|
+
|
|
251
|
+
if rows:
|
|
252
|
+
df = pd.DataFrame(rows, columns=columns.keys())
|
|
253
|
+
|
|
254
|
+
return df
|
|
255
|
+
|
|
256
|
+
|
|
257
|
+
@log
|
|
258
|
+
def get_eventstream_source_connection(
|
|
259
|
+
eventstream: str | UUID, source_id: UUID, workspace: Optional[str | UUID] = None
|
|
260
|
+
) -> pd.DataFrame:
|
|
261
|
+
"""
|
|
262
|
+
Returns the connection information of specified source of the eventstream. Only custom endpoints sources are supported.
|
|
263
|
+
|
|
264
|
+
This is a wrapper function for the following API: `Topology - Get Eventstream Source Connection <https://learn.microsoft.com/rest/api/fabric/eventstream/topology/get-eventstream-source-connection>`_.
|
|
265
|
+
|
|
266
|
+
Service Principal Authentication is supported (see `here <https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Service%20Principal.ipynb>`_ for examples).
|
|
267
|
+
|
|
268
|
+
Parameters
|
|
269
|
+
----------
|
|
270
|
+
eventstream : str | uuid.UUID
|
|
271
|
+
The name or ID of the eventstream.
|
|
272
|
+
source_id : uuid.UUID
|
|
273
|
+
The ID of the source.
|
|
274
|
+
workspace : str | uuid.UUID, default=None
|
|
275
|
+
The Fabric workspace name or ID.
|
|
276
|
+
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
277
|
+
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
278
|
+
|
|
279
|
+
Returns
|
|
280
|
+
-------
|
|
281
|
+
pandas.DataFrame
|
|
282
|
+
A pandas dataframe showing the connection details of the source.
|
|
283
|
+
"""
|
|
284
|
+
|
|
285
|
+
workspace_id = resolve_workspace_id(workspace)
|
|
286
|
+
eventstream_id = resolve_item_id(
|
|
287
|
+
item=eventstream, type="Eventstream", workspace=workspace_id
|
|
288
|
+
)
|
|
289
|
+
response = _base_api(
|
|
290
|
+
request=f"/v1/workspaces/{workspace_id}/eventstreams/{eventstream_id}/sources/{source_id}/connection",
|
|
291
|
+
client="fabric_sp",
|
|
292
|
+
)
|
|
293
|
+
result = response.json()
|
|
294
|
+
|
|
295
|
+
columns = {
|
|
296
|
+
"Fully Qualified Namespace": "str",
|
|
297
|
+
"EventHub Name": "str",
|
|
298
|
+
"Consumer Group Name": "str",
|
|
299
|
+
"Primary Key": "str",
|
|
300
|
+
"Secondary Key": "str",
|
|
301
|
+
"Primary Connection String": "str",
|
|
302
|
+
"Secondary Connection String": "str",
|
|
303
|
+
}
|
|
304
|
+
df = _create_dataframe(columns=columns)
|
|
305
|
+
|
|
306
|
+
rows = []
|
|
307
|
+
rows.append(
|
|
308
|
+
{
|
|
309
|
+
"Fully Qualified Namespace": result.get("fullyQualifiedNamespace"),
|
|
310
|
+
"EventHub Name": result.get("eventHubName"),
|
|
311
|
+
"Consumer Group Name": result.get("consumerGroupName"),
|
|
312
|
+
"Primary Key": result.get("accessKeys", {}).get("primaryKey"),
|
|
313
|
+
"Secondary Key": result.get("accessKeys", {}).get("secondaryKey"),
|
|
314
|
+
"Primary Connection String": result.get("accessKeys", {}).get(
|
|
315
|
+
"primaryConnectionString"
|
|
316
|
+
),
|
|
317
|
+
"Secondary Connection String": result.get("accessKeys", {}).get(
|
|
318
|
+
"secondaryConnectionString"
|
|
319
|
+
),
|
|
320
|
+
}
|
|
321
|
+
)
|
|
322
|
+
|
|
323
|
+
if rows:
|
|
324
|
+
df = pd.DataFrame(rows, columns=columns.keys())
|
|
325
|
+
|
|
326
|
+
return df
|
|
327
|
+
|
|
328
|
+
|
|
329
|
+
@log
|
|
330
|
+
def get_eventstream_topology(
|
|
331
|
+
eventstream: str | UUID, workspace: Optional[str | UUID] = None
|
|
332
|
+
) -> pd.DataFrame:
|
|
333
|
+
"""
|
|
334
|
+
Returns the topology of the specified eventstream.
|
|
335
|
+
|
|
336
|
+
This is a wrapper function for the following API: `Topology - Get Eventstream Topology <https://learn.microsoft.com/rest/api/fabric/eventstream/topology/get-eventstream-topology>`_.
|
|
337
|
+
|
|
338
|
+
Service Principal Authentication is supported (see `here <https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Service%20Principal.ipynb>`_ for examples).
|
|
339
|
+
|
|
340
|
+
Parameters
|
|
341
|
+
----------
|
|
342
|
+
eventstream : str | uuid.UUID
|
|
343
|
+
The name or ID of the eventstream.
|
|
344
|
+
workspace : str | uuid.UUID, default=None
|
|
345
|
+
The Fabric workspace name or ID.
|
|
346
|
+
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
347
|
+
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
348
|
+
|
|
349
|
+
Returns
|
|
350
|
+
-------
|
|
351
|
+
pandas.DataFrame
|
|
352
|
+
A pandas dataframe showing the topology of the eventstream, including sources and destinations.
|
|
353
|
+
"""
|
|
354
|
+
|
|
355
|
+
workspace_id = resolve_workspace_id(workspace)
|
|
356
|
+
eventstream_id = resolve_item_id(
|
|
357
|
+
item=eventstream, type="Eventstream", workspace=workspace_id
|
|
358
|
+
)
|
|
359
|
+
response = _base_api(
|
|
360
|
+
request=f"/v1/workspaces/{workspace_id}/eventstreams/{eventstream_id}/topology",
|
|
361
|
+
client="fabric_sp",
|
|
362
|
+
)
|
|
363
|
+
|
|
364
|
+
columns = {
|
|
365
|
+
"Eventstream Source Id": "str",
|
|
366
|
+
"Eventstream Source Name": "str",
|
|
367
|
+
"Eventstream Source Type": "str",
|
|
368
|
+
"Data Connection Id": "str",
|
|
369
|
+
"Consumer Group Name": "str",
|
|
370
|
+
"Input Serialization Type": "str",
|
|
371
|
+
"Input Serialization Encoding": "str",
|
|
372
|
+
"Status": "str",
|
|
373
|
+
"Error": "str",
|
|
374
|
+
"Type": "str",
|
|
375
|
+
"Region": "str",
|
|
376
|
+
"Topic": "str",
|
|
377
|
+
"Auto Offset Reset": "str",
|
|
378
|
+
"SASL Mechanism": "str",
|
|
379
|
+
"Security Protocol": "str",
|
|
380
|
+
"Container Name": "str",
|
|
381
|
+
"Database Name": "str",
|
|
382
|
+
"Offset Policy": "str",
|
|
383
|
+
"Table Name": "str",
|
|
384
|
+
"Server Id": "str",
|
|
385
|
+
"Port": "str",
|
|
386
|
+
"Slot Name": "str",
|
|
387
|
+
}
|
|
388
|
+
df = _create_dataframe(columns=columns)
|
|
389
|
+
rows = []
|
|
390
|
+
for r in response.json().get("sources", []):
|
|
391
|
+
prop = r.get("properties", {})
|
|
392
|
+
rows.append(
|
|
393
|
+
{
|
|
394
|
+
"Eventstream Source Id": r.get("id"),
|
|
395
|
+
"Eventstream Source Name": r.get("name"),
|
|
396
|
+
"Eventstream Source Type": r.get("type"),
|
|
397
|
+
"Data Connection Id": prop.get("dataConnectionId"),
|
|
398
|
+
"Consumer Group Name": prop.get("consumerGroupName"),
|
|
399
|
+
"Input Serialization Type": prop.get("inputSerialization", {}).get(
|
|
400
|
+
"type"
|
|
401
|
+
),
|
|
402
|
+
"Input Serialization Encoding": prop.get("inputSerialization", {})
|
|
403
|
+
.get("properties", {})
|
|
404
|
+
.get("encoding"),
|
|
405
|
+
"Status": r.get("status"),
|
|
406
|
+
"Error": r.get("error"),
|
|
407
|
+
"Type": prop.get("type"),
|
|
408
|
+
"Region": prop.get("region"),
|
|
409
|
+
"Topic": prop.get("topic"),
|
|
410
|
+
"Auto Offset Reset": prop.get("autoOffsetReset"),
|
|
411
|
+
"SASL Mechanism": prop.get("saslMechanism"),
|
|
412
|
+
"Security Protocol": prop.get("securityProtocol"),
|
|
413
|
+
"Container Name": prop.get("containerName"),
|
|
414
|
+
"Database Name": prop.get("databaseName"),
|
|
415
|
+
"Offset Policy": prop.get("offsetPolicy"),
|
|
416
|
+
"Table Name": prop.get("tableName"),
|
|
417
|
+
"Server Id": prop.get("serverId"),
|
|
418
|
+
"Port": prop.get("port"),
|
|
419
|
+
"Slot Name": prop.get("slotName"),
|
|
420
|
+
}
|
|
421
|
+
)
|
|
422
|
+
|
|
423
|
+
if rows:
|
|
424
|
+
df = pd.DataFrame(rows, columns=columns.keys())
|
|
425
|
+
|
|
426
|
+
return df
|
|
427
|
+
|
|
428
|
+
|
|
429
|
+
@log
|
|
430
|
+
def resume_eventstream(eventstream: str | UUID, workspace: Optional[str | UUID] = None):
|
|
431
|
+
"""
|
|
432
|
+
Resume running all supported sources and destinations of the eventstream.
|
|
433
|
+
|
|
434
|
+
This is a wrapper function for the following API: `Topology - Resume Eventstream <https://learn.microsoft.com/rest/api/fabric/eventstream/topology/resume-eventstream>`_.
|
|
435
|
+
|
|
436
|
+
Service Principal Authentication is supported (see `here <https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Service%20Principal.ipynb>`_ for examples).
|
|
437
|
+
|
|
438
|
+
Parameters
|
|
439
|
+
----------
|
|
440
|
+
eventstream : str | uuid.UUID
|
|
441
|
+
The name or ID of the eventstream.
|
|
442
|
+
workspace : str | uuid.UUID, default=None
|
|
443
|
+
The Fabric workspace name or ID.
|
|
444
|
+
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
445
|
+
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
446
|
+
"""
|
|
447
|
+
|
|
448
|
+
(workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
|
|
449
|
+
(eventstream_name, eventstream_id) = resolve_item_name_and_id(
|
|
450
|
+
item=eventstream, type="Eventstream", workspace=workspace_id
|
|
451
|
+
)
|
|
452
|
+
_base_api(
|
|
453
|
+
request=f"/v1/workspaces/{workspace_id}/eventstreams/{eventstream_id}/resume",
|
|
454
|
+
client="fabric_sp",
|
|
455
|
+
method="post",
|
|
456
|
+
)
|
|
457
|
+
|
|
458
|
+
print(
|
|
459
|
+
f"{icons.green_dot} The '{eventstream_name}' eventstream within the '{workspace_name}' workspace has been resumed."
|
|
460
|
+
)
|
|
461
|
+
|
|
462
|
+
|
|
463
|
+
@log
|
|
464
|
+
def resume_eventstream_destination(
|
|
465
|
+
eventstream: str | UUID,
|
|
466
|
+
destination_id: UUID,
|
|
467
|
+
workspace: Optional[str | UUID] = None,
|
|
468
|
+
):
|
|
469
|
+
"""
|
|
470
|
+
Resume running the specified destination in the eventstream.
|
|
471
|
+
|
|
472
|
+
This is a wrapper function for the following API: `Topology - Resume Eventstream Destination <https://learn.microsoft.com/rest/api/fabric/eventstream/topology/resume-eventstream-destination>`_.
|
|
473
|
+
|
|
474
|
+
Service Principal Authentication is supported (see `here <https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Service%20Principal.ipynb>`_ for examples).
|
|
475
|
+
|
|
476
|
+
Parameters
|
|
477
|
+
----------
|
|
478
|
+
eventstream : str | uuid.UUID
|
|
479
|
+
The name or ID of the eventstream.
|
|
480
|
+
destination_id : uuid.UUID
|
|
481
|
+
The ID of the destination.
|
|
482
|
+
workspace : str | uuid.UUID, default=None
|
|
483
|
+
The Fabric workspace name or ID.
|
|
484
|
+
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
485
|
+
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
486
|
+
"""
|
|
487
|
+
|
|
488
|
+
(workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
|
|
489
|
+
(eventstream_name, eventstream_id) = resolve_item_name_and_id(
|
|
490
|
+
item=eventstream, type="Eventstream", workspace=workspace_id
|
|
491
|
+
)
|
|
492
|
+
_base_api(
|
|
493
|
+
request=f"/v1/workspaces/{workspace_id}/eventstreams/{eventstream_id}/destinations/{destination_id}/resume",
|
|
494
|
+
client="fabric_sp",
|
|
495
|
+
method="post",
|
|
496
|
+
)
|
|
497
|
+
|
|
498
|
+
print(
|
|
499
|
+
f"{icons.green_dot} The '{destination_id}' destination in the '{eventstream_name}' eventstream within the '{workspace_name}' workspace has been resumed."
|
|
500
|
+
)
|
|
501
|
+
|
|
502
|
+
|
|
503
|
+
@log
|
|
504
|
+
def resume_eventstream_source(
|
|
505
|
+
eventstream: str | UUID, source_id: UUID, workspace: Optional[str | UUID] = None
|
|
506
|
+
):
|
|
507
|
+
"""
|
|
508
|
+
Resume running the specified source in the eventstream.
|
|
509
|
+
|
|
510
|
+
This is a wrapper function for the following API: `Topology - Resume Eventstream Source <https://learn.microsoft.com/rest/api/fabric/eventstream/topology/resume-eventstream-source>`_.
|
|
511
|
+
|
|
512
|
+
Service Principal Authentication is supported (see `here <https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Service%20Principal.ipynb>`_ for examples).
|
|
513
|
+
|
|
514
|
+
Parameters
|
|
515
|
+
----------
|
|
516
|
+
eventstream : str | uuid.UUID
|
|
517
|
+
The name or ID of the eventstream.
|
|
518
|
+
source_id : uuid.UUID
|
|
519
|
+
The ID of the source.
|
|
520
|
+
workspace : str | uuid.UUID, default=None
|
|
521
|
+
The Fabric workspace name or ID.
|
|
522
|
+
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
523
|
+
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
524
|
+
"""
|
|
525
|
+
|
|
526
|
+
(workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
|
|
527
|
+
(eventstream_name, eventstream_id) = resolve_item_name_and_id(
|
|
528
|
+
item=eventstream, type="Eventstream", workspace=workspace_id
|
|
529
|
+
)
|
|
530
|
+
_base_api(
|
|
531
|
+
request=f"/v1/workspaces/{workspace_id}/eventstreams/{eventstream_id}/sources/{source_id}/resume",
|
|
532
|
+
client="fabric_sp",
|
|
533
|
+
method="post",
|
|
534
|
+
)
|
|
535
|
+
|
|
536
|
+
print(
|
|
537
|
+
f"{icons.green_dot} The '{source_id}' source in the '{eventstream_name}' eventstream within the '{workspace_name}' workspace has been resumed."
|
|
538
|
+
)
|
|
539
|
+
|
|
540
|
+
|
|
541
|
+
@log
|
|
542
|
+
def pause_eventstream(
|
|
543
|
+
eventstream: str | UUID, workspace: Optional[str | UUID] = None
|
|
544
|
+
) -> dict:
|
|
545
|
+
"""
|
|
546
|
+
Pause running all supported sources and destinations of the eventstream.
|
|
547
|
+
|
|
548
|
+
This is a wrapper function for the following API: `Topology - Pause Eventstream <https://learn.microsoft.com/rest/api/fabric/eventstream/topology/pause-eventstream>`_.
|
|
549
|
+
|
|
550
|
+
Service Principal Authentication is supported (see `here <https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Service%20Principal.ipynb>`_ for examples).
|
|
551
|
+
|
|
552
|
+
Parameters
|
|
553
|
+
----------
|
|
554
|
+
eventstream : str | uuid.UUID
|
|
555
|
+
The name or ID of the eventstream.
|
|
556
|
+
workspace : str | uuid.UUID, default=None
|
|
557
|
+
The Fabric workspace name or ID.
|
|
558
|
+
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
559
|
+
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
560
|
+
"""
|
|
561
|
+
|
|
562
|
+
(workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
|
|
563
|
+
(eventstream_name, eventstream_id) = resolve_item_name_and_id(
|
|
564
|
+
item=eventstream, type="Eventstream", workspace=workspace_id
|
|
565
|
+
)
|
|
566
|
+
_base_api(
|
|
567
|
+
request=f"/v1/workspaces/{workspace_id}/eventstreams/{eventstream_id}/pause",
|
|
568
|
+
client="fabric_sp",
|
|
569
|
+
method="post",
|
|
570
|
+
)
|
|
571
|
+
|
|
572
|
+
print(
|
|
573
|
+
f"{icons.green_dot} The '{eventstream_name}' eventstream within the '{workspace_name}' workspace has been paused."
|
|
574
|
+
)
|
|
575
|
+
|
|
576
|
+
|
|
577
|
+
@log
|
|
578
|
+
def pause_eventstream_destination(
|
|
579
|
+
eventstream: str | UUID,
|
|
580
|
+
destination_id: UUID,
|
|
581
|
+
workspace: Optional[str | UUID] = None,
|
|
582
|
+
):
|
|
583
|
+
"""
|
|
584
|
+
Pause running the specified destination in the eventstream.
|
|
585
|
+
|
|
586
|
+
This is a wrapper function for the following API: `Topology - Pause Eventstream Destination <https://learn.microsoft.com/rest/api/fabric/eventstream/topology/pause-eventstream-destination>`_.
|
|
587
|
+
|
|
588
|
+
Service Principal Authentication is supported (see `here <https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Service%20Principal.ipynb>`_ for examples).
|
|
589
|
+
|
|
590
|
+
Parameters
|
|
591
|
+
----------
|
|
592
|
+
eventstream : str | uuid.UUID
|
|
593
|
+
The name or ID of the eventstream.
|
|
594
|
+
destination_id : uuid.UUID
|
|
595
|
+
The ID of the destination.
|
|
596
|
+
workspace : str | uuid.UUID, default=None
|
|
597
|
+
The Fabric workspace name or ID.
|
|
598
|
+
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
599
|
+
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
600
|
+
"""
|
|
601
|
+
|
|
602
|
+
(workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
|
|
603
|
+
(eventstream_name, eventstream_id) = resolve_item_name_and_id(
|
|
604
|
+
item=eventstream, type="Eventstream", workspace=workspace_id
|
|
605
|
+
)
|
|
606
|
+
_base_api(
|
|
607
|
+
request=f"/v1/workspaces/{workspace_id}/eventstreams/{eventstream_id}/destinations/{destination_id}/pause",
|
|
608
|
+
client="fabric_sp",
|
|
609
|
+
method="post",
|
|
610
|
+
)
|
|
611
|
+
|
|
612
|
+
print(
|
|
613
|
+
f"{icons.green_dot} The '{destination_id}' destination in the '{eventstream_name}' eventstream within the '{workspace_name}' workspace has been paused."
|
|
614
|
+
)
|
|
615
|
+
|
|
616
|
+
|
|
617
|
+
@log
|
|
618
|
+
def pause_eventstream_source(
|
|
619
|
+
eventstream: str | UUID, source_id: UUID, workspace: Optional[str | UUID] = None
|
|
620
|
+
):
|
|
621
|
+
"""
|
|
622
|
+
Pause running the specified source in the eventstream.
|
|
623
|
+
|
|
624
|
+
This is a wrapper function for the following API: `Topology - Pause Eventstream Source <https://learn.microsoft.com/rest/api/fabric/eventstream/topology/pause-eventstream-source>`_.
|
|
625
|
+
|
|
626
|
+
Service Principal Authentication is supported (see `here <https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Service%20Principal.ipynb>`_ for examples).
|
|
627
|
+
|
|
628
|
+
Parameters
|
|
629
|
+
----------
|
|
630
|
+
eventstream : str | uuid.UUID
|
|
631
|
+
The name or ID of the eventstream.
|
|
632
|
+
source_id : uuid.UUID
|
|
633
|
+
The ID of the source.
|
|
634
|
+
workspace : str | uuid.UUID, default=None
|
|
635
|
+
The Fabric workspace name or ID.
|
|
636
|
+
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
637
|
+
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
638
|
+
"""
|
|
639
|
+
|
|
640
|
+
(workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
|
|
641
|
+
(eventstream_name, eventstream_id) = resolve_item_name_and_id(
|
|
642
|
+
item=eventstream, type="Eventstream", workspace=workspace_id
|
|
643
|
+
)
|
|
644
|
+
_base_api(
|
|
645
|
+
request=f"/v1/workspaces/{workspace_id}/eventstreams/{eventstream_id}/sources/{source_id}/pause",
|
|
646
|
+
client="fabric_sp",
|
|
647
|
+
method="post",
|
|
648
|
+
)
|
|
649
|
+
|
|
650
|
+
print(
|
|
651
|
+
f"{icons.green_dot} The '{source_id}' source in the '{eventstream_name}' eventstream within the '{workspace_name}' workspace has been paused."
|
|
652
|
+
)
|
sempy_labs/graph/__init__.py
CHANGED
|
@@ -22,6 +22,14 @@ from ._users import (
|
|
|
22
22
|
from ._teams import (
|
|
23
23
|
list_teams,
|
|
24
24
|
)
|
|
25
|
+
from ._sensitivity_labels import (
|
|
26
|
+
list_sensitivity_labels,
|
|
27
|
+
resolve_sensitivity_label_id,
|
|
28
|
+
)
|
|
29
|
+
from ._user_licenses import (
|
|
30
|
+
add_user_license,
|
|
31
|
+
remove_user_license,
|
|
32
|
+
)
|
|
25
33
|
|
|
26
34
|
__all__ = [
|
|
27
35
|
"list_groups",
|
|
@@ -42,4 +50,8 @@ __all__ = [
|
|
|
42
50
|
"delete_group",
|
|
43
51
|
"update_user",
|
|
44
52
|
"update_group",
|
|
53
|
+
"list_sensitivity_labels",
|
|
54
|
+
"resolve_sensitivity_label_id",
|
|
55
|
+
"add_user_license",
|
|
56
|
+
"remove_user_license",
|
|
45
57
|
]
|