semantic-link-labs 0.9.3__py3-none-any.whl → 0.9.5__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of semantic-link-labs might be problematic. Click here for more details.

Files changed (68) hide show
  1. {semantic_link_labs-0.9.3.dist-info → semantic_link_labs-0.9.5.dist-info}/METADATA +25 -6
  2. {semantic_link_labs-0.9.3.dist-info → semantic_link_labs-0.9.5.dist-info}/RECORD +68 -52
  3. {semantic_link_labs-0.9.3.dist-info → semantic_link_labs-0.9.5.dist-info}/WHEEL +1 -1
  4. sempy_labs/__init__.py +45 -4
  5. sempy_labs/_capacities.py +22 -127
  6. sempy_labs/_capacity_migration.py +11 -9
  7. sempy_labs/_dashboards.py +60 -0
  8. sempy_labs/_data_pipelines.py +5 -31
  9. sempy_labs/_dax.py +17 -3
  10. sempy_labs/_delta_analyzer.py +279 -127
  11. sempy_labs/_environments.py +20 -48
  12. sempy_labs/_eventhouses.py +69 -30
  13. sempy_labs/_eventstreams.py +16 -34
  14. sempy_labs/_gateways.py +4 -4
  15. sempy_labs/_generate_semantic_model.py +30 -10
  16. sempy_labs/_git.py +90 -1
  17. sempy_labs/_graphQL.py +3 -20
  18. sempy_labs/_helper_functions.py +201 -44
  19. sempy_labs/_job_scheduler.py +226 -2
  20. sempy_labs/_kql_databases.py +19 -34
  21. sempy_labs/_kql_querysets.py +15 -32
  22. sempy_labs/_list_functions.py +14 -133
  23. sempy_labs/_mirrored_databases.py +14 -48
  24. sempy_labs/_ml_experiments.py +5 -30
  25. sempy_labs/_ml_models.py +4 -28
  26. sempy_labs/_model_bpa.py +17 -0
  27. sempy_labs/_model_bpa_rules.py +12 -2
  28. sempy_labs/_mounted_data_factories.py +119 -0
  29. sempy_labs/_notebooks.py +16 -26
  30. sempy_labs/_semantic_models.py +117 -0
  31. sempy_labs/_sql.py +78 -10
  32. sempy_labs/_sqldatabase.py +227 -0
  33. sempy_labs/_utils.py +42 -0
  34. sempy_labs/_vertipaq.py +17 -2
  35. sempy_labs/_warehouses.py +5 -17
  36. sempy_labs/_workloads.py +23 -9
  37. sempy_labs/_workspaces.py +13 -5
  38. sempy_labs/admin/__init__.py +70 -9
  39. sempy_labs/admin/_activities.py +166 -0
  40. sempy_labs/admin/_apps.py +143 -0
  41. sempy_labs/admin/_artifacts.py +62 -0
  42. sempy_labs/admin/_basic_functions.py +32 -704
  43. sempy_labs/admin/_capacities.py +311 -0
  44. sempy_labs/admin/_datasets.py +184 -0
  45. sempy_labs/admin/_domains.py +1 -1
  46. sempy_labs/admin/_items.py +3 -1
  47. sempy_labs/admin/_reports.py +239 -0
  48. sempy_labs/admin/_scanner.py +0 -1
  49. sempy_labs/admin/_shared.py +76 -0
  50. sempy_labs/admin/_tenant.py +489 -0
  51. sempy_labs/admin/_users.py +133 -0
  52. sempy_labs/admin/_workspaces.py +148 -0
  53. sempy_labs/directlake/_dl_helper.py +0 -1
  54. sempy_labs/directlake/_update_directlake_partition_entity.py +14 -0
  55. sempy_labs/graph/_teams.py +1 -1
  56. sempy_labs/graph/_users.py +9 -1
  57. sempy_labs/lakehouse/__init__.py +2 -0
  58. sempy_labs/lakehouse/_lakehouse.py +6 -7
  59. sempy_labs/lakehouse/_shortcuts.py +216 -64
  60. sempy_labs/report/__init__.py +3 -1
  61. sempy_labs/report/_download_report.py +4 -1
  62. sempy_labs/report/_export_report.py +272 -0
  63. sempy_labs/report/_generate_report.py +9 -17
  64. sempy_labs/report/_report_bpa.py +12 -19
  65. sempy_labs/report/_report_functions.py +9 -261
  66. sempy_labs/tom/_model.py +307 -40
  67. {semantic_link_labs-0.9.3.dist-info → semantic_link_labs-0.9.5.dist-info}/LICENSE +0 -0
  68. {semantic_link_labs-0.9.3.dist-info → semantic_link_labs-0.9.5.dist-info}/top_level.txt +0 -0
sempy_labs/_utils.py ADDED
@@ -0,0 +1,42 @@
1
+ item_types = {
2
+ "Dashboard": ["Dashboard", "dashboards"],
3
+ "DataPipeline": ["Data Pipeline", "dataPipelines", "pipeline-content.json"],
4
+ "Datamart": ["Datamart", "datamarts"],
5
+ "Environment": ["Environment", "environments"],
6
+ "Eventhouse": ["Eventhouse", "eventhouses", "EventhouseProperties.json"],
7
+ "Eventstream": ["Eventstream", "eventstreams", "eventstream.json"],
8
+ "GraphQLApi": ["GraphQL Api", "GraphQLApis"],
9
+ "KQLDashboard": ["KQL Dashboard", "kqlDashboards", "RealTimeDashboard.json"],
10
+ "KQLDatabase": [
11
+ "KQL Database",
12
+ "kqlDatabases",
13
+ ], # "DatabaseProperties.json", "DatabaseSchema.kql"
14
+ "KQLQueryset": ["KQL Queryset", "kqlQuerysets", "RealTimeQueryset.json"],
15
+ "Lakehouse": ["Lakehouse", "lakehouses"],
16
+ "MLExperiment": ["ML Experiment", "mlExperiments"],
17
+ "MLModel": ["ML Model", "mlModels"],
18
+ "MirroredDatabase": [
19
+ "Mirrored Database",
20
+ "mirroredDatabases",
21
+ "mirroredDatabase.json",
22
+ ],
23
+ "MirroredWarehouse": ["Mirrored Warehouse", "mirroredWarehouses"],
24
+ "MountedDataFactory": [
25
+ "Mounted Data Factory",
26
+ "mountedDataFactories",
27
+ "mountedDataFactory-content.json",
28
+ ],
29
+ "Notebook": ["Notebook", "notebooks"],
30
+ "PaginatedReport": ["Paginated Report", "paginatedReports"],
31
+ "Reflex": ["Reflex", "reflexes", "ReflexEntities.json"],
32
+ "Report": ["Report", "reports", "report.json"],
33
+ "SQLDatabase": ["SQL Database", "sqlDatabases"],
34
+ "SQLEndpoint": ["SQL Endpoint", "sqlEndpoints"],
35
+ "SemanticModel": ["Semantic Model", "semanticModels", "model.bim"],
36
+ "SparkJobDefinition": [
37
+ "Spark Job Definition",
38
+ "sparkJobDefinitions",
39
+ "SparkJobDefinitionV1.json",
40
+ ],
41
+ "Warehouse": ["Warehouse", "warehouses"],
42
+ }
sempy_labs/_vertipaq.py CHANGED
@@ -33,9 +33,11 @@ def vertipaq_analyzer(
33
33
  export: Optional[str] = None,
34
34
  read_stats_from_data: bool = False,
35
35
  **kwargs,
36
- ):
36
+ ) -> dict[str, pd.DataFrame]:
37
37
  """
38
- Displays an HTML visualization of the Vertipaq Analyzer statistics from a semantic model.
38
+ Displays an HTML visualization of the `Vertipaq Analyzer <https://www.sqlbi.com/tools/vertipaq-analyzer/>`_ statistics from a semantic model.
39
+
40
+ `Vertipaq Analyzer <https://www.sqlbi.com/tools/vertipaq-analyzer/>`_ is an open-sourced tool built by SQLBI. It provides a detailed analysis of the VertiPaq engine, which is the in-memory engine used by Power BI and Analysis Services Tabular models.
39
41
 
40
42
  Parameters
41
43
  ----------
@@ -51,6 +53,11 @@ def vertipaq_analyzer(
51
53
  Default value: None.
52
54
  read_stats_from_data : bool, default=False
53
55
  Setting this parameter to true has the function get Column Cardinality and Missing Rows using DAX (Direct Lake semantic models achieve this using a Spark query to the lakehouse).
56
+
57
+ Returns
58
+ -------
59
+ dict[str, pandas.DataFrame]
60
+ A dictionary of pandas dataframes showing the vertipaq analyzer statistics.
54
61
  """
55
62
 
56
63
  from sempy_labs.tom import connect_semantic_model
@@ -502,6 +509,14 @@ def vertipaq_analyzer(
502
509
 
503
510
  if export is None:
504
511
  visualize_vertipaq(dfs)
512
+ return {
513
+ "Model Summary": export_Model,
514
+ "Tables": export_Table,
515
+ "Partitions": export_Part,
516
+ "Columns": export_Col,
517
+ "Relationships": export_Rel,
518
+ "Hierarchies": export_Hier,
519
+ }
505
520
 
506
521
  # Export vertipaq to delta tables in lakehouse
507
522
  if export in ["table", "zip"]:
sempy_labs/_warehouses.py CHANGED
@@ -1,9 +1,9 @@
1
- import sempy.fabric as fabric
2
1
  from sempy_labs._helper_functions import (
3
2
  resolve_workspace_name_and_id,
4
3
  _base_api,
5
4
  _create_dataframe,
6
5
  _update_dataframe_datatypes,
6
+ delete_item,
7
7
  )
8
8
  import pandas as pd
9
9
  from typing import Optional
@@ -115,7 +115,7 @@ def list_warehouses(workspace: Optional[str | UUID] = None) -> pd.DataFrame:
115
115
  return df
116
116
 
117
117
 
118
- def delete_warehouse(name: str, workspace: Optional[str | UUID] = None):
118
+ def delete_warehouse(name: str | UUID, workspace: Optional[str | UUID] = None):
119
119
  """
120
120
  Deletes a Fabric warehouse.
121
121
 
@@ -123,27 +123,15 @@ def delete_warehouse(name: str, workspace: Optional[str | UUID] = None):
123
123
 
124
124
  Parameters
125
125
  ----------
126
- name: str
127
- Name of the warehouse.
126
+ name: str | uuid.UUID
127
+ Name or ID of the warehouse.
128
128
  workspace : str | uuid.UUID, default=None
129
129
  The Fabric workspace name or ID.
130
130
  Defaults to None which resolves to the workspace of the attached lakehouse
131
131
  or if no lakehouse attached, resolves to the workspace of the notebook.
132
132
  """
133
133
 
134
- (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
135
-
136
- item_id = fabric.resolve_item_id(
137
- item_name=name, type="Warehouse", workspace=workspace_id
138
- )
139
-
140
- _base_api(
141
- request=f"/v1/workspaces/{workspace_id}/warehouses/{item_id}", method="delete"
142
- )
143
-
144
- print(
145
- f"{icons.green_dot} The '{name}' warehouse within the '{workspace_name}' workspace has been deleted."
146
- )
134
+ delete_item(item=name, type="Warehouse", workspace=workspace)
147
135
 
148
136
 
149
137
  def get_warehouse_tables(
sempy_labs/_workloads.py CHANGED
@@ -6,9 +6,10 @@ from sempy_labs._helper_functions import (
6
6
  _base_api,
7
7
  _create_dataframe,
8
8
  )
9
+ from uuid import UUID
9
10
 
10
11
 
11
- def list_workloads(capacity_name: str) -> pd.DataFrame:
12
+ def list_workloads(capacity: str | UUID, **kwargs) -> pd.DataFrame:
12
13
  """
13
14
  Returns the current state of the specified capacity workloads.
14
15
  If a workload is enabled, the percentage of maximum memory that the workload can consume is also returned.
@@ -17,8 +18,8 @@ def list_workloads(capacity_name: str) -> pd.DataFrame:
17
18
 
18
19
  Parameters
19
20
  ----------
20
- capacity_name : str
21
- The capacity name.
21
+ capacity : str | uuid.UUID
22
+ The capacity name or ID.
22
23
 
23
24
  Returns
24
25
  -------
@@ -28,6 +29,12 @@ def list_workloads(capacity_name: str) -> pd.DataFrame:
28
29
 
29
30
  from sempy_labs._helper_functions import resolve_capacity_id
30
31
 
32
+ if "capacity_name" in kwargs:
33
+ capacity = kwargs["capacity_name"]
34
+ print(
35
+ f"{icons.warning} The 'capacity_name' parameter is deprecated. Please use 'capacity' instead."
36
+ )
37
+
31
38
  columns = {
32
39
  "Workload Name": "string",
33
40
  "State": "string",
@@ -35,7 +42,7 @@ def list_workloads(capacity_name: str) -> pd.DataFrame:
35
42
  }
36
43
  df = _create_dataframe(columns=columns)
37
44
 
38
- capacity_id = resolve_capacity_id(capacity_name=capacity_name)
45
+ capacity_id = resolve_capacity_id(capacity=capacity)
39
46
 
40
47
  response = _base_api(request=f"/v1.0/myorg/capacities/{capacity_id}/Workloads")
41
48
 
@@ -53,10 +60,11 @@ def list_workloads(capacity_name: str) -> pd.DataFrame:
53
60
 
54
61
 
55
62
  def patch_workload(
56
- capacity_name: str,
63
+ capacity: str | UUID,
57
64
  workload_name: str,
58
65
  state: Optional[str] = None,
59
66
  max_memory_percentage: Optional[int] = None,
67
+ **kwargs,
60
68
  ):
61
69
  """
62
70
  Changes the state of a specific workload to Enabled or Disabled.
@@ -66,8 +74,8 @@ def patch_workload(
66
74
 
67
75
  Parameters
68
76
  ----------
69
- capacity_name : str
70
- The capacity name.
77
+ capacity : str | uuid.UUID
78
+ The capacity name or ID.
71
79
  workload_name : str
72
80
  The workload name.
73
81
  state : str, default=None
@@ -78,7 +86,13 @@ def patch_workload(
78
86
 
79
87
  from sempy_labs._helper_functions import resolve_capacity_id
80
88
 
81
- capacity_id = resolve_capacity_id(capacity_name=capacity_name)
89
+ if "capacity_name" in kwargs:
90
+ capacity = kwargs["capacity_name"]
91
+ print(
92
+ f"{icons.warning} The 'capacity_name' parameter is deprecated. Please use 'capacity' instead."
93
+ )
94
+
95
+ capacity_id = resolve_capacity_id(capacity=capacity)
82
96
 
83
97
  states = ["Disabled", "Enabled", "Unsupported"]
84
98
  state = state.capitalize()
@@ -119,5 +133,5 @@ def patch_workload(
119
133
  _base_api(request=url, method="patch", payload=payload)
120
134
 
121
135
  print(
122
- f"The '{workload_name}' workload within the '{capacity_name}' capacity has been updated accordingly."
136
+ f"The '{workload_name}' workload within the '{capacity}' capacity has been updated accordingly."
123
137
  )
sempy_labs/_workspaces.py CHANGED
@@ -204,7 +204,9 @@ def add_user_to_workspace(
204
204
 
205
205
 
206
206
  def assign_workspace_to_capacity(
207
- capacity_name: str, workspace: Optional[str | UUID] = None
207
+ capacity: str | UUID,
208
+ workspace: Optional[str | UUID] = None,
209
+ **kwargs,
208
210
  ):
209
211
  """
210
212
  Assigns a workspace to a capacity.
@@ -213,16 +215,22 @@ def assign_workspace_to_capacity(
213
215
 
214
216
  Parameters
215
217
  ----------
216
- capacity_name : str
217
- The name of the capacity.
218
+ capacity : str | uuid.UUID
219
+ The name or ID of the capacity.
218
220
  workspace : str | uuid.UUID, default=None
219
221
  The name or ID of the Fabric workspace.
220
222
  Defaults to None which resolves to the workspace of the attached lakehouse
221
223
  or if no lakehouse attached, resolves to the workspace of the notebook.
222
224
  """
223
225
 
226
+ if "capacity_name" in kwargs:
227
+ capacity = kwargs["capacity_name"]
228
+ print(
229
+ f"{icons.warning} The 'capacity_name' parameter is deprecated. Please use 'capacity' instead."
230
+ )
231
+
224
232
  (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
225
- capacity_id = resolve_capacity_id(capacity_name=capacity_name)
233
+ capacity_id = resolve_capacity_id(capacity=capacity)
226
234
 
227
235
  payload = {"capacityId": capacity_id}
228
236
 
@@ -233,7 +241,7 @@ def assign_workspace_to_capacity(
233
241
  status_codes=[200, 202],
234
242
  )
235
243
  print(
236
- f"{icons.green_dot} The '{workspace_name}' workspace has been assigned to the '{capacity_name}' capacity."
244
+ f"{icons.green_dot} The '{workspace_name}' workspace has been assigned to the '{capacity}' capacity."
237
245
  )
238
246
 
239
247
 
@@ -1,21 +1,61 @@
1
+ from sempy_labs.admin._users import (
2
+ list_access_entities,
3
+ list_user_subscriptions,
4
+ )
5
+ from sempy_labs.admin._workspaces import (
6
+ add_user_to_workspace,
7
+ delete_user_from_workspace,
8
+ restore_deleted_workspace,
9
+ )
10
+ from sempy_labs.admin._artifacts import (
11
+ list_unused_artifacts,
12
+ )
13
+ from sempy_labs.admin._shared import (
14
+ list_widely_shared_artifacts,
15
+ )
16
+ from sempy_labs.admin._datasets import (
17
+ list_datasets,
18
+ list_dataset_users,
19
+ )
20
+ from sempy_labs.admin._apps import (
21
+ list_apps,
22
+ list_app_users,
23
+ )
24
+ from sempy_labs.admin._reports import (
25
+ list_reports,
26
+ list_report_users,
27
+ list_report_subscriptions,
28
+ )
29
+ from sempy_labs.admin._activities import (
30
+ list_activity_events,
31
+ )
1
32
  from sempy_labs.admin._scanner import (
2
33
  scan_workspaces,
3
34
  )
35
+ from sempy_labs.admin._capacities import (
36
+ patch_capacity,
37
+ list_capacities,
38
+ get_capacity_assignment_status,
39
+ get_capacity_state,
40
+ list_capacity_users,
41
+ )
42
+ from sempy_labs.admin._tenant import (
43
+ list_tenant_settings,
44
+ delete_capacity_tenant_setting_override,
45
+ update_tenant_setting,
46
+ update_capacity_tenant_setting_override,
47
+ list_workspaces_tenant_settings_overrides,
48
+ list_capacity_tenant_settings_overrides,
49
+ list_capacities_delegated_tenant_settings,
50
+ list_domain_tenant_settings_overrides,
51
+ )
4
52
  from sempy_labs.admin._basic_functions import (
5
53
  assign_workspaces_to_capacity,
6
54
  unassign_workspaces_from_capacity,
7
55
  list_workspaces,
8
56
  list_workspace_access_details,
9
57
  list_modified_workspaces,
10
- list_datasets,
11
- list_reports,
12
- list_capacities,
13
- list_tenant_settings,
14
- list_capacities_delegated_tenant_settings,
15
- list_access_entities,
16
- list_activity_events,
17
- get_capacity_assignment_status,
18
- get_capacity_state,
58
+ list_workspace_users,
19
59
  )
20
60
  from sempy_labs.admin._domains import (
21
61
  list_domains,
@@ -72,4 +112,25 @@ __all__ = [
72
112
  "get_capacity_assignment_status",
73
113
  "scan_workspaces",
74
114
  "get_capacity_state",
115
+ "list_apps",
116
+ "list_app_users",
117
+ "list_dataset_users",
118
+ "list_report_users",
119
+ "patch_capacity",
120
+ "list_workspace_users",
121
+ "list_widely_shared_artifacts",
122
+ "delete_capacity_tenant_setting_override",
123
+ "update_tenant_setting",
124
+ "update_capacity_tenant_setting_override",
125
+ "list_workspaces_tenant_settings_overrides",
126
+ "list_capacity_tenant_settings_overrides",
127
+ "list_capacities_delegated_tenant_settings",
128
+ "list_domain_tenant_settings_overrides",
129
+ "list_unused_artifacts",
130
+ "add_user_to_workspace",
131
+ "delete_user_from_workspace",
132
+ "restore_deleted_workspace",
133
+ "list_capacity_users",
134
+ "list_user_subscriptions",
135
+ "list_report_subscriptions",
75
136
  ]
@@ -0,0 +1,166 @@
1
+ import pandas as pd
2
+ from typing import Optional
3
+ from dateutil.parser import parse as dtparser
4
+ from sempy._utils._log import log
5
+ from sempy_labs._helper_functions import (
6
+ _base_api,
7
+ _create_dataframe,
8
+ _update_dataframe_datatypes,
9
+ )
10
+ import sempy_labs._icons as icons
11
+
12
+
13
+ @log
14
+ def list_activity_events(
15
+ start_time: str,
16
+ end_time: str,
17
+ activity_filter: Optional[str] = None,
18
+ user_id_filter: Optional[str] = None,
19
+ return_dataframe: bool = True,
20
+ ) -> pd.DataFrame | dict:
21
+ """
22
+ Shows a list of audit activity events for a tenant.
23
+
24
+ This is a wrapper function for the following API: `Admin - Get Activity Events <https://learn.microsoft.com/rest/api/power-bi/admin/get-activity-events>`_.
25
+
26
+ Service Principal Authentication is supported (see `here <https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Service%20Principal.ipynb>`_ for examples).
27
+
28
+ Parameters
29
+ ----------
30
+ start_time : str
31
+ Start date and time of the window for audit event results. Example: "2024-09-25T07:55:00".
32
+ end_time : str
33
+ End date and time of the window for audit event results. Example: "2024-09-25T08:55:00".
34
+ activity_filter : str, default=None
35
+ Filter value for activities. Example: 'viewreport'.
36
+ user_id_filter : str, default=None
37
+ Email address of the user.
38
+ return_dataframe : bool, default=True
39
+ If True the response is a pandas.DataFrame. If False returns the original Json. Default True
40
+
41
+ Returns
42
+ -------
43
+ pandas.DataFrame | dict
44
+ A pandas dataframe or json showing a list of audit activity events for a tenant.
45
+ """
46
+ start_dt = dtparser(start_time)
47
+ end_dt = dtparser(end_time)
48
+
49
+ if not start_dt.date() == end_dt.date():
50
+ raise ValueError(
51
+ f"{icons.red_dot} Start and End Times must be within the same UTC day. Please refer to the documentation here: https://learn.microsoft.com/rest/api/power-bi/admin/get-activity-events#get-audit-activity-events-within-a-time-window-and-for-a-specific-activity-type-and-user-id-example"
52
+ )
53
+
54
+ columns = {
55
+ "Id": "string",
56
+ "Record Type": "string",
57
+ "Creation Time": "datetime",
58
+ "Operation": "string",
59
+ "Organization Id": "string",
60
+ "User Type": "string",
61
+ "User Key": "string",
62
+ "Workload": "string",
63
+ "Result Status": "string",
64
+ "User Id": "string",
65
+ "Client IP": "string",
66
+ "User Agent": "string",
67
+ "Activity": "string",
68
+ "Workspace Name": "string",
69
+ "Workspace Id": "string",
70
+ "Object Id": "string",
71
+ "Request Id": "string",
72
+ "Object Type": "string",
73
+ "Object Display Name": "string",
74
+ "Experience": "string",
75
+ "Refresh Enforcement Policy": "string",
76
+ "Is Success": "bool",
77
+ "Activity Id": "string",
78
+ "Item Name": "string",
79
+ "Dataset Name": "string",
80
+ "Report Name": "string",
81
+ "Capacity Id": "string",
82
+ "Capacity Name": "string",
83
+ "App Name": "string",
84
+ "Dataset Id": "string",
85
+ "Report Id": "string",
86
+ "Artifact Id": "string",
87
+ "Artifact Name": "string",
88
+ "Report Type": "string",
89
+ "App Report Id": "string",
90
+ "Distribution Method": "string",
91
+ "Consumption Method": "string",
92
+ "Artifact Kind": "string",
93
+ }
94
+ df = _create_dataframe(columns=columns)
95
+
96
+ response_json = {"activityEventEntities": []}
97
+ url = f"/v1.0/myorg/admin/activityevents?startDateTime='{start_time}'&endDateTime='{end_time}'"
98
+
99
+ conditions = []
100
+ if activity_filter is not None:
101
+ conditions.append(f"Activity eq '{activity_filter}'")
102
+ if user_id_filter is not None:
103
+ conditions.append(f"UserId eq '{user_id_filter}'")
104
+
105
+ if conditions:
106
+ url += f"&$filter={f' and '.join(conditions)}"
107
+
108
+ responses = _base_api(request=url, client="fabric_sp", uses_pagination=True)
109
+
110
+ for r in responses:
111
+ if return_dataframe:
112
+ for i in r.get("activityEventEntities", []):
113
+ new_data = {
114
+ "Id": i.get("id"),
115
+ "Record Type": i.get("RecordType"),
116
+ "Creation Time": i.get("CreationTime"),
117
+ "Operation": i.get("Operation"),
118
+ "Organization Id": i.get("OrganizationId"),
119
+ "User Type": i.get("UserType"),
120
+ "User Key": i.get("UserKey"),
121
+ "Workload": i.get("Workload"),
122
+ "Result Status": i.get("ResultStatus"),
123
+ "User Id": i.get("UserId"),
124
+ "Client IP": i.get("ClientIP"),
125
+ "User Agent": i.get("UserAgent"),
126
+ "Activity": i.get("Activity"),
127
+ "Workspace Name": i.get("WorkSpaceName"),
128
+ "Workspace Id": i.get("WorkspaceId"),
129
+ "Object Id": i.get("ObjectId"),
130
+ "Request Id": i.get("RequestId"),
131
+ "Object Type": i.get("ObjectType"),
132
+ "Object Display Name": i.get("ObjectDisplayName"),
133
+ "Experience": i.get("Experience"),
134
+ "Refresh Enforcement Policy": i.get("RefreshEnforcementPolicy"),
135
+ "Is Success": i.get("IsSuccess"),
136
+ "Activity Id": i.get("ActivityId"),
137
+ "Item Name": i.get("ItemName"),
138
+ "Dataset Name": i.get("DatasetName"),
139
+ "Report Name": i.get("ReportName"),
140
+ "Capacity Id": i.get("CapacityId"),
141
+ "Capacity Name": i.get("CapacityName"),
142
+ "App Name": i.get("AppName"),
143
+ "Dataset Id": i.get("DatasetId"),
144
+ "Report Id": i.get("ReportId"),
145
+ "Artifact Id": i.get("ArtifactId"),
146
+ "Artifact Name": i.get("ArtifactName"),
147
+ "Report Type": i.get("ReportType"),
148
+ "App Report Id": i.get("AppReportId"),
149
+ "Distribution Method": i.get("DistributionMethod"),
150
+ "Consumption Method": i.get("ConsumptionMethod"),
151
+ "Artifact Kind": i.get("ArtifactKind"),
152
+ }
153
+ df = pd.concat(
154
+ [df, pd.DataFrame(new_data, index=[0])],
155
+ ignore_index=True,
156
+ )
157
+ else:
158
+ response_json["activityEventEntities"].extend(
159
+ r.get("activityEventEntities")
160
+ )
161
+
162
+ if return_dataframe:
163
+ _update_dataframe_datatypes(dataframe=df, column_map=columns)
164
+ return df
165
+ else:
166
+ return response_json
@@ -0,0 +1,143 @@
1
+ import pandas as pd
2
+ from typing import Optional
3
+ from sempy_labs._helper_functions import (
4
+ _build_url,
5
+ _base_api,
6
+ _create_dataframe,
7
+ _update_dataframe_datatypes,
8
+ _is_valid_uuid,
9
+ )
10
+ from uuid import UUID
11
+ import sempy_labs._icons as icons
12
+
13
+
14
+ def list_apps(
15
+ top: Optional[int] = 1000,
16
+ skip: Optional[int] = None,
17
+ ) -> pd.DataFrame:
18
+ """
19
+ Shows a list of apps in the organization.
20
+
21
+ This is a wrapper function for the following API: `Admin - Apps GetAppsAsAdmin <https://learn.microsoft.com/rest/api/power-bi/admin/apps-get-apps-as-admin>`_.
22
+
23
+ Service Principal Authentication is supported (see `here <https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Service%20Principal.ipynb>`_ for examples).
24
+
25
+ Parameters
26
+ ----------
27
+ top : int, default=1000
28
+ Returns only the first n results.
29
+ skip : int, default=None
30
+ Skips the first n results.
31
+
32
+ Returns
33
+ -------
34
+ pandas.DataFrame
35
+ A pandas dataframe showing a list of apps in the organization.
36
+ """
37
+
38
+ columns = {
39
+ "App Name": "string",
40
+ "App Id": "string",
41
+ "Description": "string",
42
+ "Published By": "string",
43
+ "Last Update": "datetime_coerce",
44
+ }
45
+
46
+ df = _create_dataframe(columns=columns)
47
+
48
+ params = {}
49
+ url = "/v1.0/myorg/admin/apps"
50
+
51
+ params["$top"] = top
52
+
53
+ if skip is not None:
54
+ params["$skip"] = skip
55
+
56
+ url = _build_url(url, params)
57
+ response = _base_api(request=url, client="fabric_sp")
58
+
59
+ rows = []
60
+ for v in response.json().get("value", []):
61
+ rows.append(
62
+ {
63
+ "App Name": v.get("name"),
64
+ "App Id": v.get("id"),
65
+ "Description": v.get("description"),
66
+ "Published By": v.get("publishedBy"),
67
+ "Last Update": v.get("lastUpdate"),
68
+ }
69
+ )
70
+
71
+ if rows:
72
+ df = pd.DataFrame(rows, columns=list(columns.keys()))
73
+
74
+ _update_dataframe_datatypes(dataframe=df, column_map=columns)
75
+
76
+ return df
77
+
78
+
79
+ def _resolve_app_id(app: str | UUID) -> str:
80
+ if _is_valid_uuid(app):
81
+ return app
82
+ else:
83
+ df = list_apps()
84
+ df_filt = df[df["App Name"] == app]
85
+ if df_filt.empty:
86
+ raise ValueError(f"{icons.red_dot} The '{app}' app does not exist.")
87
+ return df_filt["App Id"].iloc[0]
88
+
89
+
90
+ def list_app_users(app: str | UUID) -> pd.DataFrame:
91
+ """
92
+ Shows a list of users that have access to the specified app.
93
+
94
+ This is a wrapper function for the following API: `Admin - Apps GetAppUsersAsAdmin <https://learn.microsoft.com/rest/api/power-bi/admin/apps-get-app-users-as-admin>`_.
95
+
96
+ Service Principal Authentication is supported (see `here <https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Service%20Principal.ipynb>`_ for examples).
97
+
98
+ Parameters
99
+ ----------
100
+ app : str | uuid.UUID
101
+ The name or ID of the app.
102
+
103
+ Returns
104
+ -------
105
+ pandas.DataFrame
106
+ A pandas dataframe showing a list of users that have access to the specified app.
107
+ """
108
+
109
+ app_id = _resolve_app_id(app)
110
+
111
+ columns = {
112
+ "User Name": "string",
113
+ "Email Address": "string",
114
+ "App User Access Right": "string",
115
+ "Identifier": "string",
116
+ "Graph Id": "string",
117
+ "Principal Type": "string",
118
+ }
119
+
120
+ df = _create_dataframe(columns=columns)
121
+
122
+ url = f"/v1.0/myorg/admin/apps/{app_id}/users"
123
+ response = _base_api(request=url, client="fabric_sp")
124
+
125
+ rows = []
126
+ for v in response.json().get("value", []):
127
+ rows.append(
128
+ {
129
+ "User Name": v.get("displayName"),
130
+ "Email Address": v.get("emailAddress"),
131
+ "App User Access Right": v.get("appUserAccessRight"),
132
+ "Identifier": v.get("identifier"),
133
+ "Graph Id": v.get("graphId"),
134
+ "Principal Type": v.get("principalType"),
135
+ }
136
+ )
137
+
138
+ if rows:
139
+ df = pd.DataFrame(rows, columns=list(columns.keys()))
140
+
141
+ _update_dataframe_datatypes(dataframe=df, column_map=columns)
142
+
143
+ return df