semantic-link-labs 0.9.3__py3-none-any.whl → 0.9.4__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of semantic-link-labs might be problematic. Click here for more details.

Files changed (41) hide show
  1. {semantic_link_labs-0.9.3.dist-info → semantic_link_labs-0.9.4.dist-info}/METADATA +9 -6
  2. {semantic_link_labs-0.9.3.dist-info → semantic_link_labs-0.9.4.dist-info}/RECORD +41 -31
  3. {semantic_link_labs-0.9.3.dist-info → semantic_link_labs-0.9.4.dist-info}/WHEEL +1 -1
  4. sempy_labs/__init__.py +27 -1
  5. sempy_labs/_capacity_migration.py +3 -2
  6. sempy_labs/_dax.py +17 -3
  7. sempy_labs/_delta_analyzer.py +279 -127
  8. sempy_labs/_eventhouses.py +70 -1
  9. sempy_labs/_generate_semantic_model.py +30 -9
  10. sempy_labs/_helper_functions.py +30 -1
  11. sempy_labs/_job_scheduler.py +226 -2
  12. sempy_labs/_list_functions.py +40 -16
  13. sempy_labs/_model_bpa.py +15 -0
  14. sempy_labs/_model_bpa_rules.py +12 -2
  15. sempy_labs/_semantic_models.py +117 -0
  16. sempy_labs/_sql.py +73 -6
  17. sempy_labs/_sqldatabase.py +227 -0
  18. sempy_labs/admin/__init__.py +49 -8
  19. sempy_labs/admin/_activities.py +166 -0
  20. sempy_labs/admin/_apps.py +143 -0
  21. sempy_labs/admin/_basic_functions.py +32 -652
  22. sempy_labs/admin/_capacities.py +250 -0
  23. sempy_labs/admin/_datasets.py +184 -0
  24. sempy_labs/admin/_domains.py +1 -1
  25. sempy_labs/admin/_items.py +3 -1
  26. sempy_labs/admin/_reports.py +165 -0
  27. sempy_labs/admin/_scanner.py +0 -1
  28. sempy_labs/admin/_shared.py +74 -0
  29. sempy_labs/admin/_tenant.py +489 -0
  30. sempy_labs/directlake/_dl_helper.py +0 -1
  31. sempy_labs/directlake/_update_directlake_partition_entity.py +6 -0
  32. sempy_labs/graph/_teams.py +1 -1
  33. sempy_labs/graph/_users.py +9 -1
  34. sempy_labs/lakehouse/_shortcuts.py +28 -15
  35. sempy_labs/report/__init__.py +3 -1
  36. sempy_labs/report/_download_report.py +4 -1
  37. sempy_labs/report/_export_report.py +272 -0
  38. sempy_labs/report/_report_functions.py +9 -261
  39. sempy_labs/tom/_model.py +278 -29
  40. {semantic_link_labs-0.9.3.dist-info → semantic_link_labs-0.9.4.dist-info}/LICENSE +0 -0
  41. {semantic_link_labs-0.9.3.dist-info → semantic_link_labs-0.9.4.dist-info}/top_level.txt +0 -0
sempy_labs/_sql.py CHANGED
@@ -34,7 +34,7 @@ def _bytes2mswin_bstr(value: bytes) -> bytes:
34
34
  class ConnectBase:
35
35
  def __init__(
36
36
  self,
37
- item: str,
37
+ item: str | UUID,
38
38
  workspace: Optional[Union[str, UUID]] = None,
39
39
  timeout: Optional[int] = None,
40
40
  endpoint_type: str = "warehouse",
@@ -49,6 +49,11 @@ class ConnectBase:
49
49
  (resource_name, resource_id) = resolve_item_name_and_id(
50
50
  item=item, type=endpoint_type.capitalize(), workspace=workspace_id
51
51
  )
52
+ if endpoint_type == "sqldatabase":
53
+ # SQLDatabase is has special case for resolving the name and id
54
+ (resource_name, resource_id) = resolve_item_name_and_id(
55
+ item=item, type="SQLDatabase", workspace=workspace_id
56
+ )
52
57
  else:
53
58
  (resource_name, resource_id) = resolve_lakehouse_name_and_id(
54
59
  lakehouse=item, workspace=workspace_id
@@ -61,6 +66,8 @@ class ConnectBase:
61
66
 
62
67
  if endpoint_type == "warehouse":
63
68
  tds_endpoint = response.json().get("properties", {}).get("connectionString")
69
+ if endpoint_type == "sqldatabase":
70
+ tds_endpoint = response.json().get("properties", {}).get("serverFqdn")
64
71
  else:
65
72
  tds_endpoint = (
66
73
  response.json()
@@ -72,7 +79,10 @@ class ConnectBase:
72
79
  # Set up the connection string
73
80
  access_token = SynapseTokenProvider()()
74
81
  tokenstruct = _bytes2mswin_bstr(access_token.encode())
75
- conn_str = f"DRIVER={{ODBC Driver 18 for SQL Server}};SERVER={tds_endpoint};DATABASE={resource_name};Encrypt=Yes;"
82
+ if endpoint_type == "sqldatabase":
83
+ conn_str = f"DRIVER={{ODBC Driver 18 for SQL Server}};SERVER={tds_endpoint};DATABASE={resource_name}-{resource_id};Encrypt=Yes;"
84
+ else:
85
+ conn_str = f"DRIVER={{ODBC Driver 18 for SQL Server}};SERVER={tds_endpoint};DATABASE={resource_name};Encrypt=Yes;"
76
86
 
77
87
  if timeout is not None:
78
88
  conn_str += f"Connect Timeout={timeout};"
@@ -141,10 +151,24 @@ class ConnectBase:
141
151
  class ConnectWarehouse(ConnectBase):
142
152
  def __init__(
143
153
  self,
144
- warehouse: str,
154
+ warehouse: str | UUID,
145
155
  workspace: Optional[Union[str, UUID]] = None,
146
- timeout: Optional[int] = None,
156
+ timeout: int = 30,
147
157
  ):
158
+ """
159
+ Run a SQL or T-SQL query against a Fabric Warehouse.
160
+
161
+ Parameters
162
+ ----------
163
+ warehouse : str | uuid.UUID
164
+ The name or ID of the Fabric warehouse.
165
+ workspace : str | uuid.UUID, default=None
166
+ The name or ID of the workspace.
167
+ Defaults to None which resolves to the workspace of the attached lakehouse
168
+ or if no lakehouse attached, resolves to the workspace of the notebook.
169
+ timeout : int, default=30
170
+ The timeout for the connection in seconds.
171
+ """
148
172
  super().__init__(
149
173
  item=warehouse,
150
174
  workspace=workspace,
@@ -156,13 +180,56 @@ class ConnectWarehouse(ConnectBase):
156
180
  class ConnectLakehouse(ConnectBase):
157
181
  def __init__(
158
182
  self,
159
- lakehouse: str,
183
+ lakehouse: str | UUID,
160
184
  workspace: Optional[Union[str, UUID]] = None,
161
- timeout: Optional[int] = None,
185
+ timeout: int = 30,
162
186
  ):
187
+ """
188
+ Run a SQL or T-SQL query against a Fabric lakehouse.
189
+
190
+ Parameters
191
+ ----------
192
+ lakehouse : str | uuid.UUID
193
+ The name or ID of the Fabric lakehouse.
194
+ workspace : str | uuid.UUID, default=None
195
+ The name or ID of the workspace.
196
+ Defaults to None which resolves to the workspace of the attached lakehouse
197
+ or if no lakehouse attached, resolves to the workspace of the notebook.
198
+ timeout : int, default=30
199
+ The timeout for the connection in seconds.
200
+ """
163
201
  super().__init__(
164
202
  item=lakehouse,
165
203
  workspace=workspace,
166
204
  timeout=timeout,
167
205
  endpoint_type="lakehouse",
168
206
  )
207
+
208
+
209
+ class ConnectSQLDatabase(ConnectBase):
210
+ def __init__(
211
+ self,
212
+ sql_database: str | UUID,
213
+ workspace: Optional[Union[str, UUID]] = None,
214
+ timeout: int = 30,
215
+ ):
216
+ """
217
+ Run a SQL or T-SQL query against a Fabric SQL database.
218
+
219
+ Parameters
220
+ ----------
221
+ sql_database : str | uuid.UUID
222
+ The name or ID of the Fabric SQL database.
223
+ workspace : str | uuid.UUID, default=None
224
+ The name or ID of the workspace.
225
+ Defaults to None which resolves to the workspace of the attached lakehouse
226
+ or if no lakehouse attached, resolves to the workspace of the notebook.
227
+ timeout : int, default=30
228
+ The timeout for the connection in seconds.
229
+ """
230
+ super().__init__(
231
+ item=sql_database,
232
+ workspace=workspace,
233
+ timeout=timeout,
234
+ endpoint_type="sqldatabase",
235
+ )
@@ -0,0 +1,227 @@
1
+ import sempy.fabric as fabric
2
+ from sempy_labs._helper_functions import (
3
+ resolve_workspace_name_and_id,
4
+ _base_api,
5
+ _create_dataframe,
6
+ _update_dataframe_datatypes,
7
+ )
8
+ import pandas as pd
9
+ from typing import Optional
10
+ import sempy_labs._icons as icons
11
+ from uuid import UUID
12
+
13
+ ## Still debugging the creation of an sql database
14
+ # def create_warehouse(
15
+ # warehouse: str,
16
+ # description: Optional[str] = None,
17
+ # case_insensitive_collation: bool = False,
18
+ # workspace: Optional[str | UUID] = None,
19
+ # ):
20
+ # """
21
+ # Creates a Fabric warehouse.
22
+
23
+ # This is a wrapper function for the following API: `Items - Create Warehouse <https://learn.microsoft.com/rest/api/fabric/warehouse/items/create-warehouse>`_.
24
+
25
+ # Parameters
26
+ # ----------
27
+ # warehouse: str
28
+ # Name of the warehouse.
29
+ # description : str, default=None
30
+ # A description of the warehouse.
31
+ # case_insensitive_collation: bool, default=False
32
+ # If True, creates the warehouse with case-insensitive collation.
33
+ # workspace : str | uuid.UUID, default=None
34
+ # The Fabric workspace name or ID.
35
+ # Defaults to None which resolves to the workspace of the attached lakehouse
36
+ # or if no lakehouse attached, resolves to the workspace of the notebook.
37
+ # """
38
+
39
+ # (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
40
+
41
+ # payload = {"displayName": warehouse}
42
+
43
+ # if description:
44
+ # payload["description"] = description
45
+ # if case_insensitive_collation:
46
+ # payload.setdefault("creationPayload", {})
47
+ # payload["creationPayload"][
48
+ # "defaultCollation"
49
+ # ] = "Latin1_General_100_CI_AS_KS_WS_SC_UTF8"
50
+
51
+ # _base_api(
52
+ # request=f"/v1/workspaces/{workspace_id}/warehouses",
53
+ # payload=payload,
54
+ # method="post",
55
+ # lro_return_status_code=True,
56
+ # status_codes=[201, 202],
57
+ # )
58
+
59
+ # print(
60
+ # f"{icons.green_dot} The '{warehouse}' warehouse has been created within the '{workspace_name}' workspace."
61
+ # )
62
+
63
+
64
+ def _list_sql_databases(workspace: Optional[str | UUID] = None) -> pd.DataFrame:
65
+ """
66
+ Shows the databses within a workspace.
67
+
68
+
69
+ Parameters
70
+ ----------
71
+ workspace : str | uuid.UUID, default=None
72
+ The Fabric workspace name or ID.
73
+ Defaults to None which resolves to the workspace of the attached lakehouse
74
+ or if no lakehouse attached, resolves to the workspace of the notebook.
75
+
76
+ Returns
77
+ -------
78
+ pandas.DataFrame
79
+ A pandas dataframe showing the SQLDabatases within a workspace.
80
+ """
81
+
82
+ columns = {
83
+ "SQL Database Name": "string",
84
+ "SQL Database Id": "string",
85
+ "Description": "string",
86
+ "Connection Type": "string",
87
+ "Connection Info": "string",
88
+ "Database Name": "string",
89
+ "Server FQDN": "string",
90
+ "Provisioning Status": "string",
91
+ "Created Date": "datetime",
92
+ "Last Updated Time UTC": "datetime",
93
+ }
94
+ df = _create_dataframe(columns=columns)
95
+
96
+ (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
97
+
98
+ responses = _base_api(
99
+ request=f"/v1/workspaces/{workspace_id}/sqldatabases", uses_pagination=True
100
+ )
101
+
102
+ for r in responses:
103
+ for v in r.get("value", []):
104
+ prop = v.get("properties", {})
105
+
106
+ new_data = {
107
+ "SQL Database Name": v.get("displayName"),
108
+ "SQL Database Id": v.get("id"),
109
+ "Description": v.get("description"),
110
+ "Connection Type": v.get("type"),
111
+ "Connection Info": prop.get("connectionInfo"),
112
+ "Database Name": prop.get("databaseName"),
113
+ "Server FQDN": prop.get("serverFqdn"),
114
+ "Provisioning Status": prop.get("provisioningState"),
115
+ "Created Date": prop.get("createdDate"),
116
+ "Last Updated Time UTC": prop.get("lastUpdatedTimeUtc"),
117
+ }
118
+ df = pd.concat([df, pd.DataFrame(new_data, index=[0])], ignore_index=True)
119
+
120
+ _update_dataframe_datatypes(dataframe=df, column_map=columns)
121
+
122
+ return df
123
+
124
+
125
+ ## Still debugging the deletion of an sql database
126
+ # def delete_warehouse(name: str, workspace: Optional[str | UUID] = None):
127
+ # """
128
+ # Deletes a Fabric warehouse.
129
+
130
+ # This is a wrapper function for the following API: `Items - Delete Warehouse <https://learn.microsoft.com/rest/api/fabric/warehouse/items/delete-warehouse>`_.
131
+
132
+ # Parameters
133
+ # ----------
134
+ # name: str
135
+ # Name of the warehouse.
136
+ # workspace : str | uuid.UUID, default=None
137
+ # The Fabric workspace name or ID.
138
+ # Defaults to None which resolves to the workspace of the attached lakehouse
139
+ # or if no lakehouse attached, resolves to the workspace of the notebook.
140
+ # """
141
+
142
+ # (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
143
+
144
+ # item_id = fabric.resolve_item_id(
145
+ # item_name=name, type="Warehouse", workspace=workspace_id
146
+ # )
147
+
148
+ # _base_api(
149
+ # request=f"/v1/workspaces/{workspace_id}/warehouses/{item_id}", method="delete"
150
+ # )
151
+
152
+ # print(
153
+ # f"{icons.green_dot} The '{name}' warehouse within the '{workspace_name}' workspace has been deleted."
154
+ # )
155
+
156
+
157
+ def get_sql_database_tables(
158
+ sql_database: str | UUID, workspace: Optional[str | UUID] = None
159
+ ) -> pd.DataFrame:
160
+ """
161
+ Shows a list of the tables in the Fabric SQLDabatse. This function is based on INFORMATION_SCHEMA.TABLES.
162
+
163
+ Parameters
164
+ ----------
165
+ sql_database : str | uuid.UUID
166
+ Name or ID of the Fabric SQLDabatase.
167
+ workspace : str | uuid.UUID, default=None
168
+ The Fabric workspace name or ID.
169
+ Defaults to None which resolves to the workspace of the attached lakehouse
170
+ or if no lakehouse attached, resolves to the workspace of the notebook.
171
+
172
+ Returns
173
+ -------
174
+ pandas.DataFrame
175
+ A pandas dataframe showing a list of the tables in the Fabric SQLDabatase.
176
+ """
177
+
178
+ from sempy_labs._sql import ConnectSQLDatabase
179
+
180
+ with ConnectSQLDatabase(sql_database=sql_database, workspace=workspace) as sql:
181
+ df = sql.query(
182
+ """
183
+ SELECT TABLE_SCHEMA AS [Schema], TABLE_NAME AS [Table Name], TABLE_TYPE AS [Table Type]
184
+ FROM INFORMATION_SCHEMA.TABLES
185
+ WHERE TABLE_TYPE = 'BASE TABLE'
186
+ """
187
+ )
188
+
189
+ return df
190
+
191
+
192
+ def get_sql_database_columns(
193
+ sql_database: str | UUID, workspace: Optional[str | UUID] = None
194
+ ) -> pd.DataFrame:
195
+ """
196
+ Shows a list of the columns in each table within the Fabric SQLDabatase. This function is based on INFORMATION_SCHEMA.COLUMNS.
197
+
198
+ Parameters
199
+ ----------
200
+ sql_database : str | uuid.UUID
201
+ Name or ID of the Fabric SQLDabatase.
202
+ workspace : str | uuid.UUID, default=None
203
+ The Fabric workspace name or ID.
204
+ Defaults to None which resolves to the workspace of the attached lakehouse
205
+ or if no lakehouse attached, resolves to the workspace of the notebook.
206
+
207
+ Returns
208
+ -------
209
+ pandas.DataFrame
210
+ A pandas dataframe showing a list of the columns in each table within the Fabric SQLDabatase.
211
+ """
212
+
213
+ from sempy_labs._sql import ConnectSQLDatabase
214
+
215
+ with ConnectSQLDatabase(sql_database=sql_database, workspace=workspace) as sql:
216
+ df = sql.query(
217
+ """
218
+ SELECT t.TABLE_SCHEMA AS [Schema], t.TABLE_NAME AS [Table Name], c.COLUMN_NAME AS [Column Name], c.DATA_TYPE AS [Data Type], c.IS_NULLABLE AS [Is Nullable], c.CHARACTER_MAXIMUM_LENGTH AS [Character Max Length]
219
+ FROM INFORMATION_SCHEMA.TABLES AS t
220
+ LEFT JOIN INFORMATION_SCHEMA.COLUMNS AS c
221
+ ON t.TABLE_NAME = c.TABLE_NAME
222
+ AND t.TABLE_SCHEMA = c.TABLE_SCHEMA
223
+ WHERE t.TABLE_TYPE = 'BASE TABLE'
224
+ """
225
+ )
226
+
227
+ return df
@@ -1,21 +1,48 @@
1
+ from sempy_labs.admin._shared import (
2
+ list_widely_shared_artifacts,
3
+ )
4
+ from sempy_labs.admin._datasets import (
5
+ list_datasets,
6
+ list_dataset_users,
7
+ )
8
+ from sempy_labs.admin._apps import (
9
+ list_apps,
10
+ list_app_users,
11
+ )
12
+ from sempy_labs.admin._reports import (
13
+ list_reports,
14
+ list_report_users,
15
+ )
16
+ from sempy_labs.admin._activities import (
17
+ list_activity_events,
18
+ )
1
19
  from sempy_labs.admin._scanner import (
2
20
  scan_workspaces,
3
21
  )
22
+ from sempy_labs.admin._capacities import (
23
+ patch_capacity,
24
+ list_capacities,
25
+ get_capacity_assignment_status,
26
+ get_capacity_state,
27
+ )
28
+ from sempy_labs.admin._tenant import (
29
+ list_tenant_settings,
30
+ delete_capacity_tenant_setting_override,
31
+ update_tenant_setting,
32
+ update_capacity_tenant_setting_override,
33
+ list_workspaces_tenant_settings_overrides,
34
+ list_capacity_tenant_settings_overrides,
35
+ list_capacities_delegated_tenant_settings,
36
+ list_domain_tenant_settings_overrides,
37
+ )
4
38
  from sempy_labs.admin._basic_functions import (
5
39
  assign_workspaces_to_capacity,
6
40
  unassign_workspaces_from_capacity,
7
41
  list_workspaces,
8
42
  list_workspace_access_details,
9
43
  list_modified_workspaces,
10
- list_datasets,
11
- list_reports,
12
- list_capacities,
13
- list_tenant_settings,
14
- list_capacities_delegated_tenant_settings,
15
44
  list_access_entities,
16
- list_activity_events,
17
- get_capacity_assignment_status,
18
- get_capacity_state,
45
+ list_workspace_users,
19
46
  )
20
47
  from sempy_labs.admin._domains import (
21
48
  list_domains,
@@ -72,4 +99,18 @@ __all__ = [
72
99
  "get_capacity_assignment_status",
73
100
  "scan_workspaces",
74
101
  "get_capacity_state",
102
+ "list_apps",
103
+ "list_app_users",
104
+ "list_dataset_users",
105
+ "list_report_users",
106
+ "patch_capacity",
107
+ "list_workspace_users",
108
+ "list_widely_shared_artifacts",
109
+ "delete_capacity_tenant_setting_override",
110
+ "update_tenant_setting",
111
+ "update_capacity_tenant_setting_override",
112
+ "list_workspaces_tenant_settings_overrides",
113
+ "list_capacity_tenant_settings_overrides",
114
+ "list_capacities_delegated_tenant_settings",
115
+ "list_domain_tenant_settings_overrides",
75
116
  ]
@@ -0,0 +1,166 @@
1
+ import pandas as pd
2
+ from typing import Optional
3
+ from dateutil.parser import parse as dtparser
4
+ from sempy._utils._log import log
5
+ from sempy_labs._helper_functions import (
6
+ _base_api,
7
+ _create_dataframe,
8
+ _update_dataframe_datatypes,
9
+ )
10
+ import sempy_labs._icons as icons
11
+
12
+
13
+ @log
14
+ def list_activity_events(
15
+ start_time: str,
16
+ end_time: str,
17
+ activity_filter: Optional[str] = None,
18
+ user_id_filter: Optional[str] = None,
19
+ return_dataframe: bool = True,
20
+ ) -> pd.DataFrame | dict:
21
+ """
22
+ Shows a list of audit activity events for a tenant.
23
+
24
+ This is a wrapper function for the following API: `Admin - Get Activity Events <https://learn.microsoft.com/rest/api/power-bi/admin/get-activity-events>`_.
25
+
26
+ Service Principal Authentication is supported (see `here <https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Service%20Principal.ipynb>`_ for examples).
27
+
28
+ Parameters
29
+ ----------
30
+ start_time : str
31
+ Start date and time of the window for audit event results. Example: "2024-09-25T07:55:00".
32
+ end_time : str
33
+ End date and time of the window for audit event results. Example: "2024-09-25T08:55:00".
34
+ activity_filter : str, default=None
35
+ Filter value for activities. Example: 'viewreport'.
36
+ user_id_filter : str, default=None
37
+ Email address of the user.
38
+ return_dataframe : bool, default=True
39
+ If True the response is a pandas.DataFrame. If False returns the original Json. Default True
40
+
41
+ Returns
42
+ -------
43
+ pandas.DataFrame | dict
44
+ A pandas dataframe or json showing a list of audit activity events for a tenant.
45
+ """
46
+ start_dt = dtparser(start_time)
47
+ end_dt = dtparser(end_time)
48
+
49
+ if not start_dt.date() == end_dt.date():
50
+ raise ValueError(
51
+ f"{icons.red_dot} Start and End Times must be within the same UTC day. Please refer to the documentation here: https://learn.microsoft.com/rest/api/power-bi/admin/get-activity-events#get-audit-activity-events-within-a-time-window-and-for-a-specific-activity-type-and-user-id-example"
52
+ )
53
+
54
+ columns = {
55
+ "Id": "string",
56
+ "Record Type": "string",
57
+ "Creation Time": "datetime",
58
+ "Operation": "string",
59
+ "Organization Id": "string",
60
+ "User Type": "string",
61
+ "User Key": "string",
62
+ "Workload": "string",
63
+ "Result Status": "string",
64
+ "User Id": "string",
65
+ "Client IP": "string",
66
+ "User Agent": "string",
67
+ "Activity": "string",
68
+ "Workspace Name": "string",
69
+ "Workspace Id": "string",
70
+ "Object Id": "string",
71
+ "Request Id": "string",
72
+ "Object Type": "string",
73
+ "Object Display Name": "string",
74
+ "Experience": "string",
75
+ "Refresh Enforcement Policy": "string",
76
+ "Is Success": "bool",
77
+ "Activity Id": "string",
78
+ "Item Name": "string",
79
+ "Dataset Name": "string",
80
+ "Report Name": "string",
81
+ "Capacity Id": "string",
82
+ "Capacity Name": "string",
83
+ "App Name": "string",
84
+ "Dataset Id": "string",
85
+ "Report Id": "string",
86
+ "Artifact Id": "string",
87
+ "Artifact Name": "string",
88
+ "Report Type": "string",
89
+ "App Report Id": "string",
90
+ "Distribution Method": "string",
91
+ "Consumption Method": "string",
92
+ "Artifact Kind": "string",
93
+ }
94
+ df = _create_dataframe(columns=columns)
95
+
96
+ response_json = {"activityEventEntities": []}
97
+ url = f"/v1.0/myorg/admin/activityevents?startDateTime='{start_time}'&endDateTime='{end_time}'"
98
+
99
+ conditions = []
100
+ if activity_filter is not None:
101
+ conditions.append(f"Activity eq '{activity_filter}'")
102
+ if user_id_filter is not None:
103
+ conditions.append(f"UserId eq '{user_id_filter}'")
104
+
105
+ if conditions:
106
+ url += f"&$filter={f' and '.join(conditions)}"
107
+
108
+ responses = _base_api(request=url, client="fabric_sp", uses_pagination=True)
109
+
110
+ for r in responses:
111
+ if return_dataframe:
112
+ for i in r.get("activityEventEntities", []):
113
+ new_data = {
114
+ "Id": i.get("id"),
115
+ "Record Type": i.get("RecordType"),
116
+ "Creation Time": i.get("CreationTime"),
117
+ "Operation": i.get("Operation"),
118
+ "Organization Id": i.get("OrganizationId"),
119
+ "User Type": i.get("UserType"),
120
+ "User Key": i.get("UserKey"),
121
+ "Workload": i.get("Workload"),
122
+ "Result Status": i.get("ResultStatus"),
123
+ "User Id": i.get("UserId"),
124
+ "Client IP": i.get("ClientIP"),
125
+ "User Agent": i.get("UserAgent"),
126
+ "Activity": i.get("Activity"),
127
+ "Workspace Name": i.get("WorkSpaceName"),
128
+ "Workspace Id": i.get("WorkspaceId"),
129
+ "Object Id": i.get("ObjectId"),
130
+ "Request Id": i.get("RequestId"),
131
+ "Object Type": i.get("ObjectType"),
132
+ "Object Display Name": i.get("ObjectDisplayName"),
133
+ "Experience": i.get("Experience"),
134
+ "Refresh Enforcement Policy": i.get("RefreshEnforcementPolicy"),
135
+ "Is Success": i.get("IsSuccess"),
136
+ "Activity Id": i.get("ActivityId"),
137
+ "Item Name": i.get("ItemName"),
138
+ "Dataset Name": i.get("DatasetName"),
139
+ "Report Name": i.get("ReportName"),
140
+ "Capacity Id": i.get("CapacityId"),
141
+ "Capacity Name": i.get("CapacityName"),
142
+ "App Name": i.get("AppName"),
143
+ "Dataset Id": i.get("DatasetId"),
144
+ "Report Id": i.get("ReportId"),
145
+ "Artifact Id": i.get("ArtifactId"),
146
+ "Artifact Name": i.get("ArtifactName"),
147
+ "Report Type": i.get("ReportType"),
148
+ "App Report Id": i.get("AppReportId"),
149
+ "Distribution Method": i.get("DistributionMethod"),
150
+ "Consumption Method": i.get("ConsumptionMethod"),
151
+ "Artifact Kind": i.get("ArtifactKind"),
152
+ }
153
+ df = pd.concat(
154
+ [df, pd.DataFrame(new_data, index=[0])],
155
+ ignore_index=True,
156
+ )
157
+ else:
158
+ response_json["activityEventEntities"].extend(
159
+ r.get("activityEventEntities")
160
+ )
161
+
162
+ if return_dataframe:
163
+ _update_dataframe_datatypes(dataframe=df, column_map=columns)
164
+ return df
165
+ else:
166
+ return response_json