semantic-link-labs 0.9.2__py3-none-any.whl → 0.9.4__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of semantic-link-labs might be problematic. Click here for more details.
- {semantic_link_labs-0.9.2.dist-info → semantic_link_labs-0.9.4.dist-info}/METADATA +10 -6
- {semantic_link_labs-0.9.2.dist-info → semantic_link_labs-0.9.4.dist-info}/RECORD +54 -44
- {semantic_link_labs-0.9.2.dist-info → semantic_link_labs-0.9.4.dist-info}/WHEEL +1 -1
- sempy_labs/__init__.py +27 -1
- sempy_labs/_ai.py +8 -5
- sempy_labs/_capacity_migration.py +3 -2
- sempy_labs/_connections.py +45 -9
- sempy_labs/_dax.py +17 -3
- sempy_labs/_delta_analyzer.py +308 -138
- sempy_labs/_eventhouses.py +70 -1
- sempy_labs/_gateways.py +56 -8
- sempy_labs/_generate_semantic_model.py +30 -9
- sempy_labs/_helper_functions.py +84 -9
- sempy_labs/_job_scheduler.py +226 -2
- sempy_labs/_list_functions.py +42 -19
- sempy_labs/_ml_experiments.py +1 -1
- sempy_labs/_model_bpa.py +17 -2
- sempy_labs/_model_bpa_rules.py +20 -8
- sempy_labs/_semantic_models.py +117 -0
- sempy_labs/_sql.py +73 -6
- sempy_labs/_sqldatabase.py +227 -0
- sempy_labs/_translations.py +2 -2
- sempy_labs/_vertipaq.py +3 -3
- sempy_labs/_warehouses.py +1 -1
- sempy_labs/admin/__init__.py +49 -8
- sempy_labs/admin/_activities.py +166 -0
- sempy_labs/admin/_apps.py +143 -0
- sempy_labs/admin/_basic_functions.py +32 -652
- sempy_labs/admin/_capacities.py +250 -0
- sempy_labs/admin/_datasets.py +184 -0
- sempy_labs/admin/_domains.py +1 -3
- sempy_labs/admin/_items.py +3 -1
- sempy_labs/admin/_reports.py +165 -0
- sempy_labs/admin/_scanner.py +53 -49
- sempy_labs/admin/_shared.py +74 -0
- sempy_labs/admin/_tenant.py +489 -0
- sempy_labs/directlake/_dl_helper.py +0 -1
- sempy_labs/directlake/_update_directlake_partition_entity.py +6 -0
- sempy_labs/graph/_teams.py +1 -1
- sempy_labs/graph/_users.py +9 -1
- sempy_labs/lakehouse/_get_lakehouse_columns.py +2 -2
- sempy_labs/lakehouse/_get_lakehouse_tables.py +2 -2
- sempy_labs/lakehouse/_lakehouse.py +3 -3
- sempy_labs/lakehouse/_shortcuts.py +29 -16
- sempy_labs/migration/_migrate_calctables_to_lakehouse.py +2 -2
- sempy_labs/migration/_refresh_calc_tables.py +2 -2
- sempy_labs/report/__init__.py +3 -1
- sempy_labs/report/_download_report.py +4 -1
- sempy_labs/report/_export_report.py +272 -0
- sempy_labs/report/_report_functions.py +11 -263
- sempy_labs/report/_report_rebind.py +1 -1
- sempy_labs/tom/_model.py +281 -29
- {semantic_link_labs-0.9.2.dist-info → semantic_link_labs-0.9.4.dist-info}/LICENSE +0 -0
- {semantic_link_labs-0.9.2.dist-info → semantic_link_labs-0.9.4.dist-info}/top_level.txt +0 -0
|
@@ -8,7 +8,6 @@ from sempy_labs._helper_functions import (
|
|
|
8
8
|
_update_dataframe_datatypes,
|
|
9
9
|
_base_api,
|
|
10
10
|
_create_dataframe,
|
|
11
|
-
get_capacity_id,
|
|
12
11
|
)
|
|
13
12
|
from sempy._utils._log import log
|
|
14
13
|
import numpy as np
|
|
@@ -47,6 +46,9 @@ def list_workspaces(
|
|
|
47
46
|
pandas.DataFrame
|
|
48
47
|
A pandas dataframe showing a list of workspaces for the organization.
|
|
49
48
|
"""
|
|
49
|
+
|
|
50
|
+
from sempy_labs.admin._capacities import _resolve_capacity_name_and_id
|
|
51
|
+
|
|
50
52
|
if "filter" in kwargs:
|
|
51
53
|
print(
|
|
52
54
|
"The 'filter' parameter has been deprecated. Please remove this parameter from the function going forward."
|
|
@@ -118,63 +120,6 @@ def list_workspaces(
|
|
|
118
120
|
return df
|
|
119
121
|
|
|
120
122
|
|
|
121
|
-
@log
|
|
122
|
-
def list_capacities(
|
|
123
|
-
capacity: Optional[str | UUID] = None,
|
|
124
|
-
) -> pd.DataFrame:
|
|
125
|
-
"""
|
|
126
|
-
Shows the a list of capacities and their properties.
|
|
127
|
-
|
|
128
|
-
This is a wrapper function for the following API: `Admin - Get Capacities As Admin <https://learn.microsoft.com/rest/api/power-bi/admin/get-capacities-as-admin>`_.
|
|
129
|
-
|
|
130
|
-
Service Principal Authentication is supported (see `here <https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Service%20Principal.ipynb>`_ for examples).
|
|
131
|
-
|
|
132
|
-
Parameters
|
|
133
|
-
----------
|
|
134
|
-
capacity : str | uuid.UUID, default=None
|
|
135
|
-
Capacity name or id to filter.
|
|
136
|
-
|
|
137
|
-
Returns
|
|
138
|
-
-------
|
|
139
|
-
pandas.DataFrame
|
|
140
|
-
A pandas dataframe showing the capacities and their properties.
|
|
141
|
-
"""
|
|
142
|
-
|
|
143
|
-
columns = {
|
|
144
|
-
"Capacity Id": "string",
|
|
145
|
-
"Capacity Name": "string",
|
|
146
|
-
"Sku": "string",
|
|
147
|
-
"Region": "string",
|
|
148
|
-
"State": "string",
|
|
149
|
-
"Admins": "string",
|
|
150
|
-
}
|
|
151
|
-
df = _create_dataframe(columns=columns)
|
|
152
|
-
|
|
153
|
-
responses = _base_api(
|
|
154
|
-
request="/v1.0/myorg/admin/capacities", client="fabric_sp", uses_pagination=True
|
|
155
|
-
)
|
|
156
|
-
|
|
157
|
-
for r in responses:
|
|
158
|
-
for i in r.get("value", []):
|
|
159
|
-
new_data = {
|
|
160
|
-
"Capacity Id": i.get("id").lower(),
|
|
161
|
-
"Capacity Name": i.get("displayName"),
|
|
162
|
-
"Sku": i.get("sku"),
|
|
163
|
-
"Region": i.get("region"),
|
|
164
|
-
"State": i.get("state"),
|
|
165
|
-
"Admins": [i.get("admins", [])],
|
|
166
|
-
}
|
|
167
|
-
df = pd.concat([df, pd.DataFrame(new_data, index=[0])], ignore_index=True)
|
|
168
|
-
|
|
169
|
-
if capacity is not None:
|
|
170
|
-
if _is_valid_uuid(capacity):
|
|
171
|
-
df = df[df["Capacity Id"] == capacity.lower()]
|
|
172
|
-
else:
|
|
173
|
-
df = df[df["Capacity Name"] == capacity]
|
|
174
|
-
|
|
175
|
-
return df
|
|
176
|
-
|
|
177
|
-
|
|
178
123
|
@log
|
|
179
124
|
def assign_workspaces_to_capacity(
|
|
180
125
|
source_capacity: Optional[str | UUID] = None,
|
|
@@ -193,9 +138,11 @@ def assign_workspaces_to_capacity(
|
|
|
193
138
|
target_capacity : str | uuid.UUID, default=None
|
|
194
139
|
The name of the target capacity.
|
|
195
140
|
workspace : str | List[str] | uuid.UUID | List[uuid.UUID], default=None
|
|
196
|
-
The name or
|
|
141
|
+
The name or ID of the workspace(s).
|
|
197
142
|
Defaults to None which resolves to migrating all workspaces within the source capacity to the target capacity.
|
|
198
143
|
"""
|
|
144
|
+
from sempy_labs.admin._capacities import _resolve_capacity_name_and_id
|
|
145
|
+
|
|
199
146
|
if target_capacity is None:
|
|
200
147
|
raise ValueError(
|
|
201
148
|
f"{icons.red_dot} The parameter 'target_capacity' is mandatory."
|
|
@@ -276,7 +223,7 @@ def unassign_workspaces_from_capacity(
|
|
|
276
223
|
Parameters
|
|
277
224
|
----------
|
|
278
225
|
workspaces : str | List[str] | uuid.UUID | List[uuid.UUID]
|
|
279
|
-
The
|
|
226
|
+
The workspace name(s) or ID(s).
|
|
280
227
|
"""
|
|
281
228
|
if isinstance(workspaces, str):
|
|
282
229
|
workspaces = [workspaces]
|
|
@@ -302,133 +249,6 @@ def unassign_workspaces_from_capacity(
|
|
|
302
249
|
)
|
|
303
250
|
|
|
304
251
|
|
|
305
|
-
@log
|
|
306
|
-
def list_tenant_settings() -> pd.DataFrame:
|
|
307
|
-
"""
|
|
308
|
-
Lists all tenant settings.
|
|
309
|
-
|
|
310
|
-
This is a wrapper function for the following API: `Tenants - List Tenant Settings <https://learn.microsoft.com/rest/api/fabric/admin/tenants/list-tenant-settings>`_.
|
|
311
|
-
|
|
312
|
-
Service Principal Authentication is supported (see `here <https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Service%20Principal.ipynb>`_ for examples).
|
|
313
|
-
|
|
314
|
-
Returns
|
|
315
|
-
-------
|
|
316
|
-
pandas.DataFrame
|
|
317
|
-
A pandas dataframe showing the tenant settings.
|
|
318
|
-
"""
|
|
319
|
-
|
|
320
|
-
columns = {
|
|
321
|
-
"Setting Name": "string",
|
|
322
|
-
"Title": "string",
|
|
323
|
-
"Enabled": "bool",
|
|
324
|
-
"Can Specify Security Groups": "bool",
|
|
325
|
-
"Tenant Setting Group": "string",
|
|
326
|
-
"Enabled Security Groups": "string",
|
|
327
|
-
}
|
|
328
|
-
df = _create_dataframe(columns=columns)
|
|
329
|
-
|
|
330
|
-
response = _base_api(request="/v1/admin/tenantsettings", client="fabric_sp")
|
|
331
|
-
|
|
332
|
-
for i in response.json().get("value", []):
|
|
333
|
-
new_data = {
|
|
334
|
-
"Setting Name": i.get("settingName"),
|
|
335
|
-
"Title": i.get("title"),
|
|
336
|
-
"Enabled": i.get("enabled"),
|
|
337
|
-
"Can Specify Security Groups": i.get("canSpecifySecurityGroups"),
|
|
338
|
-
"Tenant Setting Group": i.get("tenantSettingGroup"),
|
|
339
|
-
"Enabled Security Groups": [i.get("enabledSecurityGroups", [])],
|
|
340
|
-
}
|
|
341
|
-
df = pd.concat([df, pd.DataFrame(new_data, index=[0])], ignore_index=True)
|
|
342
|
-
|
|
343
|
-
_update_dataframe_datatypes(dataframe=df, column_map=columns)
|
|
344
|
-
|
|
345
|
-
return df
|
|
346
|
-
|
|
347
|
-
|
|
348
|
-
def list_capacities_delegated_tenant_settings(
|
|
349
|
-
return_dataframe: bool = True,
|
|
350
|
-
) -> pd.DataFrame | dict:
|
|
351
|
-
"""
|
|
352
|
-
Returns list of tenant setting overrides that override at the capacities.
|
|
353
|
-
|
|
354
|
-
This is a wrapper function for the following API: `Tenants - List Capacities Tenant Settings Overrides <https://learn.microsoft.com/rest/api/fabric/admin/tenants/list-capacities-tenant-settings-overrides>`_.
|
|
355
|
-
|
|
356
|
-
Service Principal Authentication is supported (see `here <https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Service%20Principal.ipynb>`_ for examples).
|
|
357
|
-
|
|
358
|
-
Parameters
|
|
359
|
-
----------
|
|
360
|
-
return_dataframe : bool, default=True
|
|
361
|
-
If True, returns a dataframe. If False, returns a dictionary.
|
|
362
|
-
|
|
363
|
-
Returns
|
|
364
|
-
-------
|
|
365
|
-
pandas.DataFrame | dict
|
|
366
|
-
A pandas dataframe showing a list of tenant setting overrides that override at the capacities.
|
|
367
|
-
"""
|
|
368
|
-
|
|
369
|
-
columns = {
|
|
370
|
-
"Capacity Id": "string",
|
|
371
|
-
"Setting Name": "string",
|
|
372
|
-
"Setting Title": "string",
|
|
373
|
-
"Setting Enabled": "bool",
|
|
374
|
-
"Can Specify Security Groups": "bool",
|
|
375
|
-
"Enabled Security Groups": "string",
|
|
376
|
-
"Tenant Setting Group": "string",
|
|
377
|
-
"Tenant Setting Properties": "string",
|
|
378
|
-
"Delegate to Workspace": "bool",
|
|
379
|
-
"Delegated From": "string",
|
|
380
|
-
}
|
|
381
|
-
df = _create_dataframe(columns=columns)
|
|
382
|
-
|
|
383
|
-
responses = _base_api(
|
|
384
|
-
request="/v1/admin/capacities/delegatedTenantSettingOverrides",
|
|
385
|
-
client="fabric_sp",
|
|
386
|
-
uses_pagination=True,
|
|
387
|
-
)
|
|
388
|
-
|
|
389
|
-
if return_dataframe:
|
|
390
|
-
for r in responses:
|
|
391
|
-
for i in r.get("Overrides", []):
|
|
392
|
-
tenant_settings = i.get("tenantSettings", [])
|
|
393
|
-
for setting in tenant_settings:
|
|
394
|
-
new_data = {
|
|
395
|
-
"Capacity Id": i.get("id"),
|
|
396
|
-
"Setting Name": setting.get("settingName"),
|
|
397
|
-
"Setting Title": setting.get("title"),
|
|
398
|
-
"Setting Enabled": setting.get("enabled"),
|
|
399
|
-
"Can Specify Security Groups": setting.get(
|
|
400
|
-
"canSpecifySecurityGroups"
|
|
401
|
-
),
|
|
402
|
-
"Enabled Security Groups": [
|
|
403
|
-
setting.get("enabledSecurityGroups", [])
|
|
404
|
-
],
|
|
405
|
-
"Tenant Setting Group": setting.get("tenantSettingGroup"),
|
|
406
|
-
"Tenant Setting Properties": [setting.get("properties", [])],
|
|
407
|
-
"Delegate to Workspace": setting.get("delegateToWorkspace"),
|
|
408
|
-
"Delegated From": setting.get("delegatedFrom"),
|
|
409
|
-
}
|
|
410
|
-
|
|
411
|
-
df = pd.concat(
|
|
412
|
-
[df, pd.DataFrame(new_data, index=[0])], ignore_index=True
|
|
413
|
-
)
|
|
414
|
-
|
|
415
|
-
_update_dataframe_datatypes(dataframe=df, column_map=columns)
|
|
416
|
-
|
|
417
|
-
return df
|
|
418
|
-
else:
|
|
419
|
-
combined_response = {
|
|
420
|
-
"overrides": [],
|
|
421
|
-
"continuationUri": "",
|
|
422
|
-
"continuationToken": "",
|
|
423
|
-
}
|
|
424
|
-
for r in responses:
|
|
425
|
-
combined_response["overrides"].extend(r["Overrides"])
|
|
426
|
-
combined_response["continuationUri"] = r["continuationUri"]
|
|
427
|
-
combined_response["continuationToken"] = r["continuationToken"]
|
|
428
|
-
|
|
429
|
-
return combined_response
|
|
430
|
-
|
|
431
|
-
|
|
432
252
|
def list_modified_workspaces(
|
|
433
253
|
modified_since: Optional[str] = None,
|
|
434
254
|
exclude_inactive_workspaces: Optional[bool] = False,
|
|
@@ -478,112 +298,6 @@ def list_modified_workspaces(
|
|
|
478
298
|
return df
|
|
479
299
|
|
|
480
300
|
|
|
481
|
-
def list_datasets(
|
|
482
|
-
top: Optional[int] = None,
|
|
483
|
-
filter: Optional[str] = None,
|
|
484
|
-
skip: Optional[int] = None,
|
|
485
|
-
) -> pd.DataFrame:
|
|
486
|
-
"""
|
|
487
|
-
Shows a list of datasets for the organization.
|
|
488
|
-
|
|
489
|
-
This is a wrapper function for the following API: `Admin - Datasets GetDatasetsAsAdmin <https://learn.microsoft.com/rest/api/power-bi/admin/datasets-get-datasets-as-admin>`_.
|
|
490
|
-
|
|
491
|
-
Service Principal Authentication is supported (see `here <https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Service%20Principal.ipynb>`_ for examples).
|
|
492
|
-
|
|
493
|
-
Parameters
|
|
494
|
-
----------
|
|
495
|
-
top : int, default=None
|
|
496
|
-
Returns only the first n results.
|
|
497
|
-
filter : str, default=None
|
|
498
|
-
Returns a subset of a results based on Odata filter query parameter condition.
|
|
499
|
-
skip : int, default=None
|
|
500
|
-
Skips the first n results.
|
|
501
|
-
|
|
502
|
-
Returns
|
|
503
|
-
-------
|
|
504
|
-
pandas.DataFrame
|
|
505
|
-
A pandas dataframe showing a list of datasets for the organization.
|
|
506
|
-
"""
|
|
507
|
-
|
|
508
|
-
columns = {
|
|
509
|
-
"Dataset Id": "string",
|
|
510
|
-
"Dataset Name": "string",
|
|
511
|
-
"Web URL": "string",
|
|
512
|
-
"Add Rows API Enabled": "bool",
|
|
513
|
-
"Configured By": "string",
|
|
514
|
-
"Is Refreshable": "bool",
|
|
515
|
-
"Is Effective Identity Required": "bool",
|
|
516
|
-
"Is Effective Identity Roles Required": "bool",
|
|
517
|
-
"Target Storage Mode": "string",
|
|
518
|
-
"Created Date": "datetime",
|
|
519
|
-
"Content Provider Type": "string",
|
|
520
|
-
"Create Report Embed URL": "string",
|
|
521
|
-
"QnA Embed URL": "string",
|
|
522
|
-
"Upstream Datasets": "string",
|
|
523
|
-
"Users": "string",
|
|
524
|
-
"Is In Place Sharing Enabled": "bool",
|
|
525
|
-
"Workspace Id": "string",
|
|
526
|
-
"Auto Sync Read Only Replicas": "bool",
|
|
527
|
-
"Max Read Only Replicas": "int",
|
|
528
|
-
}
|
|
529
|
-
|
|
530
|
-
df = _create_dataframe(columns=columns)
|
|
531
|
-
|
|
532
|
-
params = {}
|
|
533
|
-
url = "/v1.0/myorg/admin/datasets"
|
|
534
|
-
|
|
535
|
-
if top is not None:
|
|
536
|
-
params["$top"] = top
|
|
537
|
-
|
|
538
|
-
if filter is not None:
|
|
539
|
-
params["$filter"] = filter
|
|
540
|
-
|
|
541
|
-
if skip is not None:
|
|
542
|
-
params["$skip"] = skip
|
|
543
|
-
|
|
544
|
-
url = _build_url(url, params)
|
|
545
|
-
response = _base_api(request=url, client="fabric_sp")
|
|
546
|
-
|
|
547
|
-
rows = []
|
|
548
|
-
for v in response.json().get("value", []):
|
|
549
|
-
rows.append(
|
|
550
|
-
{
|
|
551
|
-
"Dataset Id": v.get("id"),
|
|
552
|
-
"Dataset Name": v.get("name"),
|
|
553
|
-
"Web URL": v.get("webUrl"),
|
|
554
|
-
"Add Rows API Enabled": v.get("addRowsAPIEnabled"),
|
|
555
|
-
"Configured By": v.get("configuredBy"),
|
|
556
|
-
"Is Refreshable": v.get("isRefreshable"),
|
|
557
|
-
"Is Effective Identity Required": v.get("isEffectiveIdentityRequired"),
|
|
558
|
-
"Is Effective Identity Roles Required": v.get(
|
|
559
|
-
"isEffectiveIdentityRolesRequired"
|
|
560
|
-
),
|
|
561
|
-
"Target Storage Mode": v.get("targetStorageMode"),
|
|
562
|
-
"Created Date": pd.to_datetime(v.get("createdDate")),
|
|
563
|
-
"Content Provider Type": v.get("contentProviderType"),
|
|
564
|
-
"Create Report Embed URL": v.get("createReportEmbedURL"),
|
|
565
|
-
"QnA Embed URL": v.get("qnaEmbedURL"),
|
|
566
|
-
"Upstream Datasets": v.get("upstreamDatasets", []),
|
|
567
|
-
"Users": v.get("users", []),
|
|
568
|
-
"Is In Place Sharing Enabled": v.get("isInPlaceSharingEnabled"),
|
|
569
|
-
"Workspace Id": v.get("workspaceId"),
|
|
570
|
-
"Auto Sync Read Only Replicas": v.get("queryScaleOutSettings", {}).get(
|
|
571
|
-
"autoSyncReadOnlyReplicas"
|
|
572
|
-
),
|
|
573
|
-
"Max Read Only Replicas": v.get("queryScaleOutSettings", {}).get(
|
|
574
|
-
"maxReadOnlyReplicas"
|
|
575
|
-
),
|
|
576
|
-
}
|
|
577
|
-
)
|
|
578
|
-
|
|
579
|
-
if rows:
|
|
580
|
-
df = pd.DataFrame(rows, columns=list(columns.keys()))
|
|
581
|
-
|
|
582
|
-
_update_dataframe_datatypes(dataframe=df, column_map=columns)
|
|
583
|
-
|
|
584
|
-
return df
|
|
585
|
-
|
|
586
|
-
|
|
587
301
|
def list_access_entities(
|
|
588
302
|
user_email_address: str,
|
|
589
303
|
) -> pd.DataFrame:
|
|
@@ -649,7 +363,7 @@ def list_workspace_access_details(
|
|
|
649
363
|
Parameters
|
|
650
364
|
----------
|
|
651
365
|
workspace : str | uuid.UUID, default=None
|
|
652
|
-
The
|
|
366
|
+
The workspace name or ID.
|
|
653
367
|
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
654
368
|
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
655
369
|
|
|
@@ -688,216 +402,6 @@ def list_workspace_access_details(
|
|
|
688
402
|
return df
|
|
689
403
|
|
|
690
404
|
|
|
691
|
-
def list_activity_events(
|
|
692
|
-
start_time: str,
|
|
693
|
-
end_time: str,
|
|
694
|
-
activity_filter: Optional[str] = None,
|
|
695
|
-
user_id_filter: Optional[str] = None,
|
|
696
|
-
return_dataframe: bool = True,
|
|
697
|
-
) -> pd.DataFrame | dict:
|
|
698
|
-
"""
|
|
699
|
-
Shows a list of audit activity events for a tenant.
|
|
700
|
-
|
|
701
|
-
This is a wrapper function for the following API: `Admin - Get Activity Events <https://learn.microsoft.com/rest/api/power-bi/admin/get-activity-events>`_.
|
|
702
|
-
|
|
703
|
-
Service Principal Authentication is supported (see `here <https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Service%20Principal.ipynb>`_ for examples).
|
|
704
|
-
|
|
705
|
-
Parameters
|
|
706
|
-
----------
|
|
707
|
-
start_time : str
|
|
708
|
-
Start date and time of the window for audit event results. Example: "2024-09-25T07:55:00".
|
|
709
|
-
end_time : str
|
|
710
|
-
End date and time of the window for audit event results. Example: "2024-09-25T08:55:00".
|
|
711
|
-
activity_filter : str, default=None
|
|
712
|
-
Filter value for activities. Example: 'viewreport'.
|
|
713
|
-
user_id_filter : str, default=None
|
|
714
|
-
Email address of the user.
|
|
715
|
-
return_dataframe : bool, default=True
|
|
716
|
-
If True the response is a pandas.DataFrame. If False returns the original Json. Default True
|
|
717
|
-
|
|
718
|
-
Returns
|
|
719
|
-
-------
|
|
720
|
-
pandas.DataFrame | dict
|
|
721
|
-
A pandas dataframe or json showing a list of audit activity events for a tenant.
|
|
722
|
-
"""
|
|
723
|
-
start_dt = dtparser(start_time)
|
|
724
|
-
end_dt = dtparser(end_time)
|
|
725
|
-
|
|
726
|
-
if not start_dt.date() == end_dt.date():
|
|
727
|
-
raise ValueError(
|
|
728
|
-
f"{icons.red_dot} Start and End Times must be within the same UTC day. Please refer to the documentation here: https://learn.microsoft.com/rest/api/power-bi/admin/get-activity-events#get-audit-activity-events-within-a-time-window-and-for-a-specific-activity-type-and-user-id-example"
|
|
729
|
-
)
|
|
730
|
-
|
|
731
|
-
columns = {
|
|
732
|
-
"Id": "string",
|
|
733
|
-
"Record Type": "string",
|
|
734
|
-
"Creation Time": "datetime",
|
|
735
|
-
"Operation": "string",
|
|
736
|
-
"Organization Id": "string",
|
|
737
|
-
"User Type": "string",
|
|
738
|
-
"User Key": "string",
|
|
739
|
-
"Workload": "string",
|
|
740
|
-
"Result Status": "string",
|
|
741
|
-
"User Id": "string",
|
|
742
|
-
"Client IP": "string",
|
|
743
|
-
"User Agent": "string",
|
|
744
|
-
"Activity": "string",
|
|
745
|
-
"Workspace Name": "string",
|
|
746
|
-
"Workspace Id": "string",
|
|
747
|
-
"Object Id": "string",
|
|
748
|
-
"Request Id": "string",
|
|
749
|
-
"Object Type": "string",
|
|
750
|
-
"Object Display Name": "string",
|
|
751
|
-
"Experience": "string",
|
|
752
|
-
"Refresh Enforcement Policy": "string",
|
|
753
|
-
"Is Success": "bool",
|
|
754
|
-
"Activity Id": "string",
|
|
755
|
-
"Item Name": "string",
|
|
756
|
-
"Dataset Name": "string",
|
|
757
|
-
"Report Name": "string",
|
|
758
|
-
"Capacity Id": "string",
|
|
759
|
-
"Capacity Name": "string",
|
|
760
|
-
"App Name": "string",
|
|
761
|
-
"Dataset Id": "string",
|
|
762
|
-
"Report Id": "string",
|
|
763
|
-
"Artifact Id": "string",
|
|
764
|
-
"Artifact Name": "string",
|
|
765
|
-
"Report Type": "string",
|
|
766
|
-
"App Report Id": "string",
|
|
767
|
-
"Distribution Method": "string",
|
|
768
|
-
"Consumption Method": "string",
|
|
769
|
-
"Artifact Kind": "string",
|
|
770
|
-
}
|
|
771
|
-
df = _create_dataframe(columns=columns)
|
|
772
|
-
|
|
773
|
-
response_json = {"activityEventEntities": []}
|
|
774
|
-
url = f"/v1.0/myorg/admin/activityevents?startDateTime='{start_time}'&endDateTime='{end_time}'"
|
|
775
|
-
|
|
776
|
-
conditions = []
|
|
777
|
-
if activity_filter is not None:
|
|
778
|
-
conditions.append(f"Activity eq '{activity_filter}'")
|
|
779
|
-
if user_id_filter is not None:
|
|
780
|
-
conditions.append(f"UserId eq '{user_id_filter}'")
|
|
781
|
-
|
|
782
|
-
if conditions:
|
|
783
|
-
url += f"&$filter={f' and '.join(conditions)}"
|
|
784
|
-
|
|
785
|
-
responses = _base_api(request=url, client="fabric_sp", uses_pagination=True)
|
|
786
|
-
|
|
787
|
-
for r in responses:
|
|
788
|
-
if return_dataframe:
|
|
789
|
-
for i in r.get("activityEventEntities", []):
|
|
790
|
-
new_data = {
|
|
791
|
-
"Id": i.get("id"),
|
|
792
|
-
"Record Type": i.get("RecordType"),
|
|
793
|
-
"Creation Time": i.get("CreationTime"),
|
|
794
|
-
"Operation": i.get("Operation"),
|
|
795
|
-
"Organization Id": i.get("OrganizationId"),
|
|
796
|
-
"User Type": i.get("UserType"),
|
|
797
|
-
"User Key": i.get("UserKey"),
|
|
798
|
-
"Workload": i.get("Workload"),
|
|
799
|
-
"Result Status": i.get("ResultStatus"),
|
|
800
|
-
"User Id": i.get("UserId"),
|
|
801
|
-
"Client IP": i.get("ClientIP"),
|
|
802
|
-
"User Agent": i.get("UserAgent"),
|
|
803
|
-
"Activity": i.get("Activity"),
|
|
804
|
-
"Workspace Name": i.get("WorkSpaceName"),
|
|
805
|
-
"Workspace Id": i.get("WorkspaceId"),
|
|
806
|
-
"Object Id": i.get("ObjectId"),
|
|
807
|
-
"Request Id": i.get("RequestId"),
|
|
808
|
-
"Object Type": i.get("ObjectType"),
|
|
809
|
-
"Object Display Name": i.get("ObjectDisplayName"),
|
|
810
|
-
"Experience": i.get("Experience"),
|
|
811
|
-
"Refresh Enforcement Policy": i.get("RefreshEnforcementPolicy"),
|
|
812
|
-
"Is Success": i.get("IsSuccess"),
|
|
813
|
-
"Activity Id": i.get("ActivityId"),
|
|
814
|
-
"Item Name": i.get("ItemName"),
|
|
815
|
-
"Dataset Name": i.get("DatasetName"),
|
|
816
|
-
"Report Name": i.get("ReportName"),
|
|
817
|
-
"Capacity Id": i.get("CapacityId"),
|
|
818
|
-
"Capacity Name": i.get("CapacityName"),
|
|
819
|
-
"App Name": i.get("AppName"),
|
|
820
|
-
"Dataset Id": i.get("DatasetId"),
|
|
821
|
-
"Report Id": i.get("ReportId"),
|
|
822
|
-
"Artifact Id": i.get("ArtifactId"),
|
|
823
|
-
"Artifact Name": i.get("ArtifactName"),
|
|
824
|
-
"Report Type": i.get("ReportType"),
|
|
825
|
-
"App Report Id": i.get("AppReportId"),
|
|
826
|
-
"Distribution Method": i.get("DistributionMethod"),
|
|
827
|
-
"Consumption Method": i.get("ConsumptionMethod"),
|
|
828
|
-
"Artifact Kind": i.get("ArtifactKind"),
|
|
829
|
-
}
|
|
830
|
-
df = pd.concat(
|
|
831
|
-
[df, pd.DataFrame(new_data, index=[0])],
|
|
832
|
-
ignore_index=True,
|
|
833
|
-
)
|
|
834
|
-
else:
|
|
835
|
-
response_json["activityEventEntities"].extend(
|
|
836
|
-
r.get("activityEventEntities")
|
|
837
|
-
)
|
|
838
|
-
|
|
839
|
-
if return_dataframe:
|
|
840
|
-
_update_dataframe_datatypes(dataframe=df, column_map=columns)
|
|
841
|
-
return df
|
|
842
|
-
else:
|
|
843
|
-
return response_json
|
|
844
|
-
|
|
845
|
-
|
|
846
|
-
def _resolve_capacity_name_and_id(
|
|
847
|
-
capacity: str | UUID,
|
|
848
|
-
) -> Tuple[str, UUID]:
|
|
849
|
-
|
|
850
|
-
dfC = list_capacities(capacity=capacity)
|
|
851
|
-
try:
|
|
852
|
-
capacity_name = dfC["Capacity Name"].iloc[0]
|
|
853
|
-
capacity_id = dfC["Capacity Id"].iloc[0]
|
|
854
|
-
except Exception:
|
|
855
|
-
raise ValueError(f"{icons.red_dot} The '{capacity}' capacity was not found.")
|
|
856
|
-
|
|
857
|
-
return capacity_name, capacity_id
|
|
858
|
-
|
|
859
|
-
|
|
860
|
-
def _list_capacities_meta() -> pd.DataFrame:
|
|
861
|
-
"""
|
|
862
|
-
Shows the a list of capacities and their properties. This function is the admin version.
|
|
863
|
-
|
|
864
|
-
This is a wrapper function for the following API: `Admin - Get Capacities As Admin <https://learn.microsoft.com/rest/api/power-bi/admin/get-capacities-as-admin>`_.
|
|
865
|
-
|
|
866
|
-
Returns
|
|
867
|
-
-------
|
|
868
|
-
pandas.DataFrame
|
|
869
|
-
A pandas dataframe showing the capacities and their properties
|
|
870
|
-
"""
|
|
871
|
-
|
|
872
|
-
columns = {
|
|
873
|
-
"Capacity Id": "string",
|
|
874
|
-
"Capacity Name": "string",
|
|
875
|
-
"Sku": "string",
|
|
876
|
-
"Region": "string",
|
|
877
|
-
"State": "string",
|
|
878
|
-
"Admins": "string",
|
|
879
|
-
}
|
|
880
|
-
df = _create_dataframe(columns=columns)
|
|
881
|
-
|
|
882
|
-
responses = _base_api(
|
|
883
|
-
request="/v1.0/myorg/admin/capacities", client="fabric_sp", uses_pagination=True
|
|
884
|
-
)
|
|
885
|
-
|
|
886
|
-
for r in responses:
|
|
887
|
-
for i in r.get("value", []):
|
|
888
|
-
new_data = {
|
|
889
|
-
"Capacity Id": i.get("id").lower(),
|
|
890
|
-
"Capacity Name": i.get("displayName"),
|
|
891
|
-
"Sku": i.get("sku"),
|
|
892
|
-
"Region": i.get("region"),
|
|
893
|
-
"State": i.get("state"),
|
|
894
|
-
"Admins": [i.get("admins", [])],
|
|
895
|
-
}
|
|
896
|
-
df = pd.concat([df, pd.DataFrame(new_data, index=[0])], ignore_index=True)
|
|
897
|
-
|
|
898
|
-
return df
|
|
899
|
-
|
|
900
|
-
|
|
901
405
|
def _resolve_workspace_name_and_id(
|
|
902
406
|
workspace: str | UUID,
|
|
903
407
|
) -> Tuple[str, UUID]:
|
|
@@ -918,83 +422,53 @@ def _resolve_workspace_name_and_id(
|
|
|
918
422
|
return workspace_name, workspace_id
|
|
919
423
|
|
|
920
424
|
|
|
921
|
-
def
|
|
922
|
-
top: Optional[int] = None,
|
|
923
|
-
skip: Optional[int] = None,
|
|
924
|
-
filter: Optional[str] = None,
|
|
925
|
-
) -> pd.DataFrame:
|
|
425
|
+
def list_workspace_users(workspace: Optional[str | UUID] = None) -> pd.DataFrame:
|
|
926
426
|
"""
|
|
927
|
-
Shows a list of
|
|
427
|
+
Shows a list of users that have access to the specified workspace.
|
|
928
428
|
|
|
929
|
-
This is a wrapper function for the following API: `Admin -
|
|
429
|
+
This is a wrapper function for the following API: `Admin - Groups GetGroupUsersAsAdmin <https://learn.microsoft.com/rest/api/power-bi/admin/groups-get-group-users-as-admin>`_.
|
|
930
430
|
|
|
931
431
|
Service Principal Authentication is supported (see `here <https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Service%20Principal.ipynb>`_ for examples).
|
|
932
432
|
|
|
933
433
|
Parameters
|
|
934
434
|
----------
|
|
935
|
-
|
|
936
|
-
|
|
937
|
-
|
|
938
|
-
|
|
939
|
-
filter : str, default=None
|
|
940
|
-
Returns a subset of a results based on Odata filter query parameter condition.
|
|
435
|
+
workspace : str | uuid.UUID, default=None
|
|
436
|
+
The workspace name or ID.
|
|
437
|
+
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
438
|
+
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
941
439
|
|
|
942
440
|
Returns
|
|
943
441
|
-------
|
|
944
442
|
pandas.DataFrame
|
|
945
|
-
A pandas dataframe showing a list of
|
|
443
|
+
A pandas dataframe showing a list of users that have access to the specified workspace.
|
|
946
444
|
"""
|
|
947
445
|
|
|
446
|
+
(workspace_name, workspace_id) = _resolve_workspace_name_and_id(workspace)
|
|
447
|
+
|
|
948
448
|
columns = {
|
|
949
|
-
"
|
|
950
|
-
"
|
|
951
|
-
"
|
|
952
|
-
"
|
|
953
|
-
"
|
|
954
|
-
"
|
|
955
|
-
"Created Date": "datetime_coerce",
|
|
956
|
-
"Modified Date": "datetime_coerce",
|
|
957
|
-
"Created By": "string",
|
|
958
|
-
"Modified By": "string",
|
|
959
|
-
"Sensitivity Label Id": "string",
|
|
960
|
-
"Users": "string",
|
|
961
|
-
"Subscriptions": "string",
|
|
962
|
-
"Workspace Id": "string",
|
|
963
|
-
"Report Flags": "int",
|
|
449
|
+
"User Name": "string",
|
|
450
|
+
"Email Address": "string",
|
|
451
|
+
"Group User Access Right": "string",
|
|
452
|
+
"Identifier": "string",
|
|
453
|
+
"Graph Id": "string",
|
|
454
|
+
"Principal Type": "string",
|
|
964
455
|
}
|
|
965
456
|
|
|
966
457
|
df = _create_dataframe(columns=columns)
|
|
967
458
|
|
|
968
|
-
url = "/v1.0/myorg/admin/
|
|
969
|
-
if top is not None:
|
|
970
|
-
url += f"$top={top}&"
|
|
971
|
-
if skip is not None:
|
|
972
|
-
url += f"$skip={skip}&"
|
|
973
|
-
if filter is not None:
|
|
974
|
-
url += f"$filter={filter}&"
|
|
975
|
-
|
|
976
|
-
url.rstrip("$").rstrip("?")
|
|
459
|
+
url = f"/v1.0/myorg/admin/groups/{workspace_id}/users"
|
|
977
460
|
response = _base_api(request=url, client="fabric_sp")
|
|
978
|
-
rows = []
|
|
979
461
|
|
|
462
|
+
rows = []
|
|
980
463
|
for v in response.json().get("value", []):
|
|
981
464
|
rows.append(
|
|
982
465
|
{
|
|
983
|
-
"
|
|
984
|
-
"
|
|
985
|
-
"
|
|
986
|
-
"
|
|
987
|
-
"
|
|
988
|
-
"
|
|
989
|
-
"Created Date": v.get("createdDateTime"),
|
|
990
|
-
"Modified Date": v.get("modifiedDateTime"),
|
|
991
|
-
"Created By": v.get("createdBy"),
|
|
992
|
-
"Modified By": v.get("modifiedBy"),
|
|
993
|
-
"Sensitivity Label Id": v.get("sensitivityLabel", {}).get("labelId"),
|
|
994
|
-
"Users": v.get("users"),
|
|
995
|
-
"Subscriptions": v.get("subscriptions"),
|
|
996
|
-
"Workspace Id": v.get("workspaceId"),
|
|
997
|
-
"Report Flags": v.get("reportFlags"),
|
|
466
|
+
"User Name": v.get("displayName"),
|
|
467
|
+
"Email Address": v.get("emailAddress"),
|
|
468
|
+
"Group User Access Right": v.get("groupUserAccessRight"),
|
|
469
|
+
"Identifier": v.get("identifier"),
|
|
470
|
+
"Graph Id": v.get("graphId"),
|
|
471
|
+
"Principal Type": v.get("principalType"),
|
|
998
472
|
}
|
|
999
473
|
)
|
|
1000
474
|
|
|
@@ -1004,97 +478,3 @@ def list_reports(
|
|
|
1004
478
|
_update_dataframe_datatypes(dataframe=df, column_map=columns)
|
|
1005
479
|
|
|
1006
480
|
return df
|
|
1007
|
-
|
|
1008
|
-
|
|
1009
|
-
def get_capacity_assignment_status(
|
|
1010
|
-
workspace: Optional[str | UUID] = None,
|
|
1011
|
-
) -> pd.DataFrame:
|
|
1012
|
-
"""
|
|
1013
|
-
Gets the status of the assignment-to-capacity operation for the specified workspace.
|
|
1014
|
-
|
|
1015
|
-
This is a wrapper function for the following API: `Capacities - Groups CapacityAssignmentStatus <https://learn.microsoft.com/rest/api/power-bi/capacities/groups-capacity-assignment-status>`_.
|
|
1016
|
-
|
|
1017
|
-
Service Principal Authentication is supported (see `here <https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Service%20Principal.ipynb>`_ for examples).
|
|
1018
|
-
|
|
1019
|
-
Parameters
|
|
1020
|
-
----------
|
|
1021
|
-
workspace : str | uuid.UUID, default=None
|
|
1022
|
-
The Fabric workspace name or id.
|
|
1023
|
-
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
1024
|
-
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
1025
|
-
|
|
1026
|
-
Returns
|
|
1027
|
-
-------
|
|
1028
|
-
pandas.DataFrame
|
|
1029
|
-
A pandas dataframe showing the status of the assignment-to-capacity operation for the specified workspace.
|
|
1030
|
-
"""
|
|
1031
|
-
|
|
1032
|
-
(workspace_name, workspace_id) = _resolve_workspace_name_and_id(workspace)
|
|
1033
|
-
|
|
1034
|
-
columns = {
|
|
1035
|
-
"Status": "string",
|
|
1036
|
-
"Activity Id": "string",
|
|
1037
|
-
"Start Time": "datetime",
|
|
1038
|
-
"End Time": "datetime",
|
|
1039
|
-
"Capacity Id": "string",
|
|
1040
|
-
"Capacity Name": "string",
|
|
1041
|
-
}
|
|
1042
|
-
df = _create_dataframe(columns=columns)
|
|
1043
|
-
|
|
1044
|
-
response = _base_api(
|
|
1045
|
-
request=f"/v1.0/myorg/groups/{workspace_id}/CapacityAssignmentStatus",
|
|
1046
|
-
client="fabric_sp",
|
|
1047
|
-
)
|
|
1048
|
-
v = response.json()
|
|
1049
|
-
capacity_id = v.get("capacityId")
|
|
1050
|
-
|
|
1051
|
-
(capacity_name, capacity_id) = _resolve_capacity_name_and_id(capacity=capacity_id)
|
|
1052
|
-
|
|
1053
|
-
new_data = {
|
|
1054
|
-
"Status": v.get("status"),
|
|
1055
|
-
"Activity Id": v.get("activityId"),
|
|
1056
|
-
"Start Time": v.get("startTime"),
|
|
1057
|
-
"End Time": v.get("endTime"),
|
|
1058
|
-
"Capacity Id": capacity_id,
|
|
1059
|
-
"Capacity Name": capacity_name,
|
|
1060
|
-
}
|
|
1061
|
-
|
|
1062
|
-
df = pd.concat([df, pd.DataFrame([new_data])], ignore_index=True)
|
|
1063
|
-
|
|
1064
|
-
_update_dataframe_datatypes(dataframe=df, column_map=columns)
|
|
1065
|
-
|
|
1066
|
-
return df
|
|
1067
|
-
|
|
1068
|
-
|
|
1069
|
-
def get_capacity_state(capacity: Optional[str | UUID] = None):
|
|
1070
|
-
"""
|
|
1071
|
-
Gets the state of a capacity.
|
|
1072
|
-
|
|
1073
|
-
Service Principal Authentication is supported (see `here <https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Service%20Principal.ipynb>`_ for examples).
|
|
1074
|
-
|
|
1075
|
-
Parameters
|
|
1076
|
-
----------
|
|
1077
|
-
capacity : str | uuid.UUID, default=None
|
|
1078
|
-
The capacity name or ID.
|
|
1079
|
-
Defaults to None which resolves to the capacity of the attached lakehouse
|
|
1080
|
-
or if no lakehouse is attached, resolves to the workspace of the notebook.
|
|
1081
|
-
|
|
1082
|
-
Returns
|
|
1083
|
-
-------
|
|
1084
|
-
str
|
|
1085
|
-
The capacity state.
|
|
1086
|
-
"""
|
|
1087
|
-
|
|
1088
|
-
df = list_capacities()
|
|
1089
|
-
|
|
1090
|
-
if capacity is None:
|
|
1091
|
-
capacity = get_capacity_id()
|
|
1092
|
-
if _is_valid_uuid(capacity):
|
|
1093
|
-
df_filt = df[df["Capacity Id"] == capacity]
|
|
1094
|
-
else:
|
|
1095
|
-
df_filt = df[df["Capacity Name"] == capacity]
|
|
1096
|
-
|
|
1097
|
-
if df_filt.empty:
|
|
1098
|
-
raise ValueError(f"{icons.red_dot} The capacity '{capacity}' was not found.")
|
|
1099
|
-
|
|
1100
|
-
return df_filt["State"].iloc[0]
|