semantic-link-labs 0.11.0__py3-none-any.whl → 0.11.2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of semantic-link-labs might be problematic. Click here for more details.

Files changed (131) hide show
  1. {semantic_link_labs-0.11.0.dist-info → semantic_link_labs-0.11.2.dist-info}/METADATA +6 -4
  2. semantic_link_labs-0.11.2.dist-info/RECORD +210 -0
  3. sempy_labs/__init__.py +56 -56
  4. sempy_labs/_a_lib_info.py +1 -1
  5. sempy_labs/_ai.py +1 -1
  6. sempy_labs/_capacities.py +2 -2
  7. sempy_labs/_capacity_migration.py +5 -5
  8. sempy_labs/_clear_cache.py +1 -1
  9. sempy_labs/_connections.py +2 -2
  10. sempy_labs/_dashboards.py +16 -16
  11. sempy_labs/_data_pipelines.py +1 -1
  12. sempy_labs/_dataflows.py +101 -26
  13. sempy_labs/_dax.py +3 -3
  14. sempy_labs/_dax_query_view.py +1 -1
  15. sempy_labs/_delta_analyzer.py +4 -4
  16. sempy_labs/_delta_analyzer_history.py +1 -1
  17. sempy_labs/_deployment_pipelines.py +1 -1
  18. sempy_labs/_environments.py +22 -21
  19. sempy_labs/_eventhouses.py +12 -11
  20. sempy_labs/_eventstreams.py +12 -11
  21. sempy_labs/_external_data_shares.py +23 -22
  22. sempy_labs/_gateways.py +47 -45
  23. sempy_labs/_generate_semantic_model.py +3 -3
  24. sempy_labs/_git.py +1 -1
  25. sempy_labs/_graphQL.py +12 -11
  26. sempy_labs/_job_scheduler.py +56 -54
  27. sempy_labs/_kql_databases.py +16 -17
  28. sempy_labs/_kql_querysets.py +12 -11
  29. sempy_labs/_kusto.py +2 -2
  30. sempy_labs/_list_functions.py +1 -1
  31. sempy_labs/_managed_private_endpoints.py +18 -15
  32. sempy_labs/_mirrored_databases.py +16 -15
  33. sempy_labs/_mirrored_warehouses.py +12 -11
  34. sempy_labs/_ml_experiments.py +11 -10
  35. sempy_labs/_ml_models.py +11 -10
  36. sempy_labs/_model_auto_build.py +3 -3
  37. sempy_labs/_model_bpa.py +5 -5
  38. sempy_labs/_model_bpa_bulk.py +3 -3
  39. sempy_labs/_model_dependencies.py +1 -1
  40. sempy_labs/_mounted_data_factories.py +12 -12
  41. sempy_labs/_notebooks.py +1 -1
  42. sempy_labs/_one_lake_integration.py +1 -1
  43. sempy_labs/_query_scale_out.py +1 -1
  44. sempy_labs/_refresh_semantic_model.py +1 -1
  45. sempy_labs/_semantic_models.py +30 -28
  46. sempy_labs/_spark.py +1 -1
  47. sempy_labs/_sql.py +1 -1
  48. sempy_labs/_sql_endpoints.py +12 -11
  49. sempy_labs/_sqldatabase.py +15 -15
  50. sempy_labs/_tags.py +11 -10
  51. sempy_labs/_translations.py +1 -1
  52. sempy_labs/_user_delegation_key.py +2 -2
  53. sempy_labs/_variable_libraries.py +13 -12
  54. sempy_labs/_vertipaq.py +3 -3
  55. sempy_labs/_vpax.py +1 -1
  56. sempy_labs/_warehouses.py +15 -14
  57. sempy_labs/_workloads.py +1 -1
  58. sempy_labs/_workspace_identity.py +1 -1
  59. sempy_labs/_workspaces.py +14 -13
  60. sempy_labs/admin/__init__.py +18 -18
  61. sempy_labs/admin/_activities.py +46 -46
  62. sempy_labs/admin/_apps.py +28 -26
  63. sempy_labs/admin/_artifacts.py +15 -15
  64. sempy_labs/admin/_basic_functions.py +1 -2
  65. sempy_labs/admin/_capacities.py +86 -82
  66. sempy_labs/admin/_dataflows.py +2 -2
  67. sempy_labs/admin/_datasets.py +50 -48
  68. sempy_labs/admin/_domains.py +25 -19
  69. sempy_labs/admin/_external_data_share.py +24 -22
  70. sempy_labs/admin/_git.py +17 -17
  71. sempy_labs/admin/_items.py +47 -45
  72. sempy_labs/admin/_reports.py +61 -58
  73. sempy_labs/admin/_scanner.py +2 -2
  74. sempy_labs/admin/_shared.py +18 -18
  75. sempy_labs/admin/_tags.py +2 -2
  76. sempy_labs/admin/_tenant.py +57 -51
  77. sempy_labs/admin/_users.py +16 -15
  78. sempy_labs/admin/_workspaces.py +2 -2
  79. sempy_labs/directlake/__init__.py +12 -12
  80. sempy_labs/directlake/_directlake_schema_compare.py +3 -3
  81. sempy_labs/directlake/_directlake_schema_sync.py +9 -7
  82. sempy_labs/directlake/_dl_helper.py +1 -1
  83. sempy_labs/directlake/_generate_shared_expression.py +1 -1
  84. sempy_labs/directlake/_get_directlake_lakehouse.py +1 -1
  85. sempy_labs/directlake/_guardrails.py +1 -1
  86. sempy_labs/directlake/_list_directlake_model_calc_tables.py +3 -3
  87. sempy_labs/directlake/_show_unsupported_directlake_objects.py +1 -1
  88. sempy_labs/directlake/_update_directlake_model_lakehouse_connection.py +3 -3
  89. sempy_labs/directlake/_update_directlake_partition_entity.py +4 -4
  90. sempy_labs/directlake/_warm_cache.py +3 -3
  91. sempy_labs/graph/__init__.py +3 -3
  92. sempy_labs/graph/_groups.py +81 -78
  93. sempy_labs/graph/_teams.py +21 -21
  94. sempy_labs/graph/_users.py +111 -10
  95. sempy_labs/lakehouse/__init__.py +7 -7
  96. sempy_labs/lakehouse/_blobs.py +30 -30
  97. sempy_labs/lakehouse/_get_lakehouse_columns.py +2 -2
  98. sempy_labs/lakehouse/_get_lakehouse_tables.py +29 -27
  99. sempy_labs/lakehouse/_helper.py +30 -2
  100. sempy_labs/lakehouse/_lakehouse.py +2 -2
  101. sempy_labs/lakehouse/_livy_sessions.py +47 -42
  102. sempy_labs/lakehouse/_shortcuts.py +22 -21
  103. sempy_labs/migration/__init__.py +8 -8
  104. sempy_labs/migration/_create_pqt_file.py +2 -2
  105. sempy_labs/migration/_migrate_calctables_to_lakehouse.py +3 -3
  106. sempy_labs/migration/_migrate_calctables_to_semantic_model.py +3 -4
  107. sempy_labs/migration/_migrate_model_objects_to_semantic_model.py +2 -2
  108. sempy_labs/migration/_migrate_tables_columns_to_semantic_model.py +4 -4
  109. sempy_labs/migration/_migration_validation.py +1 -2
  110. sempy_labs/migration/_refresh_calc_tables.py +2 -2
  111. sempy_labs/mirrored_azure_databricks_catalog/__init__.py +2 -2
  112. sempy_labs/mirrored_azure_databricks_catalog/_discover.py +40 -40
  113. sempy_labs/mirrored_azure_databricks_catalog/_refresh_catalog_metadata.py +1 -1
  114. sempy_labs/report/__init__.py +10 -10
  115. sempy_labs/report/_download_report.py +2 -2
  116. sempy_labs/report/_export_report.py +2 -2
  117. sempy_labs/report/_generate_report.py +1 -1
  118. sempy_labs/report/_paginated.py +1 -1
  119. sempy_labs/report/_report_bpa.py +4 -3
  120. sempy_labs/report/_report_functions.py +3 -3
  121. sempy_labs/report/_report_list_functions.py +3 -3
  122. sempy_labs/report/_report_rebind.py +1 -1
  123. sempy_labs/report/_reportwrapper.py +247 -249
  124. sempy_labs/report/_save_report.py +3 -3
  125. sempy_labs/theme/_org_themes.py +35 -1
  126. sempy_labs/tom/__init__.py +1 -1
  127. sempy_labs/tom/_model.py +23 -20
  128. semantic_link_labs-0.11.0.dist-info/RECORD +0 -210
  129. {semantic_link_labs-0.11.0.dist-info → semantic_link_labs-0.11.2.dist-info}/WHEEL +0 -0
  130. {semantic_link_labs-0.11.0.dist-info → semantic_link_labs-0.11.2.dist-info}/licenses/LICENSE +0 -0
  131. {semantic_link_labs-0.11.0.dist-info → semantic_link_labs-0.11.2.dist-info}/top_level.txt +0 -0
@@ -1,6 +1,6 @@
1
1
  import pandas as pd
2
2
  from typing import Optional
3
- from sempy_labs._helper_functions import (
3
+ from .._helper_functions import (
4
4
  _build_url,
5
5
  _base_api,
6
6
  _create_dataframe,
@@ -54,8 +54,8 @@ def list_datasets(
54
54
  "Content Provider Type": "string",
55
55
  "Create Report Embed URL": "string",
56
56
  "QnA Embed URL": "string",
57
- "Upstream Datasets": "string",
58
- "Users": "string",
57
+ "Upstream Datasets": "list",
58
+ "Users": "list",
59
59
  "Is In Place Sharing Enabled": "bool",
60
60
  "Workspace Id": "string",
61
61
  "Auto Sync Read Only Replicas": "bool",
@@ -79,39 +79,40 @@ def list_datasets(
79
79
  url = _build_url(url, params)
80
80
  response = _base_api(request=url, client="fabric_sp")
81
81
 
82
- dfs = []
82
+ rows = []
83
83
  for v in response.json().get("value", []):
84
- new_data = {
85
- "Dataset Id": v.get("id"),
86
- "Dataset Name": v.get("name"),
87
- "Web URL": v.get("webUrl"),
88
- "Add Rows API Enabled": v.get("addRowsAPIEnabled"),
89
- "Configured By": v.get("configuredBy"),
90
- "Is Refreshable": v.get("isRefreshable"),
91
- "Is Effective Identity Required": v.get("isEffectiveIdentityRequired"),
92
- "Is Effective Identity Roles Required": v.get(
93
- "isEffectiveIdentityRolesRequired"
94
- ),
95
- "Target Storage Mode": v.get("targetStorageMode"),
96
- "Created Date": pd.to_datetime(v.get("createdDate")),
97
- "Content Provider Type": v.get("contentProviderType"),
98
- "Create Report Embed URL": v.get("createReportEmbedURL"),
99
- "QnA Embed URL": v.get("qnaEmbedURL"),
100
- "Upstream Datasets": v.get("upstreamDatasets", []),
101
- "Users": v.get("users", []),
102
- "Is In Place Sharing Enabled": v.get("isInPlaceSharingEnabled"),
103
- "Workspace Id": v.get("workspaceId"),
104
- "Auto Sync Read Only Replicas": v.get("queryScaleOutSettings", {}).get(
105
- "autoSyncReadOnlyReplicas"
106
- ),
107
- "Max Read Only Replicas": v.get("queryScaleOutSettings", {}).get(
108
- "maxReadOnlyReplicas"
109
- ),
110
- }
111
- dfs.append(pd.DataFrame(new_data, index=[0]))
112
-
113
- if dfs:
114
- df = pd.concat(dfs, ignore_index=True)
84
+ rows.append(
85
+ {
86
+ "Dataset Id": v.get("id"),
87
+ "Dataset Name": v.get("name"),
88
+ "Web URL": v.get("webUrl"),
89
+ "Add Rows API Enabled": v.get("addRowsAPIEnabled"),
90
+ "Configured By": v.get("configuredBy"),
91
+ "Is Refreshable": v.get("isRefreshable"),
92
+ "Is Effective Identity Required": v.get("isEffectiveIdentityRequired"),
93
+ "Is Effective Identity Roles Required": v.get(
94
+ "isEffectiveIdentityRolesRequired"
95
+ ),
96
+ "Target Storage Mode": v.get("targetStorageMode"),
97
+ "Created Date": pd.to_datetime(v.get("createdDate")),
98
+ "Content Provider Type": v.get("contentProviderType"),
99
+ "Create Report Embed URL": v.get("createReportEmbedURL"),
100
+ "QnA Embed URL": v.get("qnaEmbedURL"),
101
+ "Upstream Datasets": v.get("upstreamDatasets", []),
102
+ "Users": v.get("users", []),
103
+ "Is In Place Sharing Enabled": v.get("isInPlaceSharingEnabled"),
104
+ "Workspace Id": v.get("workspaceId"),
105
+ "Auto Sync Read Only Replicas": v.get("queryScaleOutSettings", {}).get(
106
+ "autoSyncReadOnlyReplicas"
107
+ ),
108
+ "Max Read Only Replicas": v.get("queryScaleOutSettings", {}).get(
109
+ "maxReadOnlyReplicas"
110
+ ),
111
+ }
112
+ )
113
+
114
+ if rows:
115
+ df = pd.DataFrame(rows, columns=list(columns.keys()))
115
116
  _update_dataframe_datatypes(dataframe=df, column_map=columns)
116
117
 
117
118
  return df
@@ -165,20 +166,21 @@ def list_dataset_users(dataset: str | UUID) -> pd.DataFrame:
165
166
  url = f"/v1.0/myorg/admin/datasets/{dataset_id}/users"
166
167
  response = _base_api(request=url, client="fabric_sp")
167
168
 
168
- dfs = []
169
+ rows = []
169
170
  for v in response.json().get("value", []):
170
- new_data = {
171
- "User Name": v.get("displayName"),
172
- "Email Address": v.get("emailAddress"),
173
- "Dataset User Access Right": v.get("datasetUserAccessRight"),
174
- "Identifier": v.get("identifier"),
175
- "Graph Id": v.get("graphId"),
176
- "Principal Type": v.get("principalType"),
177
- }
178
- dfs.append(pd.DataFrame(new_data, index=[0]))
179
-
180
- if dfs:
181
- df = pd.concat(dfs, ignore_index=True)
171
+ rows.append(
172
+ {
173
+ "User Name": v.get("displayName"),
174
+ "Email Address": v.get("emailAddress"),
175
+ "Dataset User Access Right": v.get("datasetUserAccessRight"),
176
+ "Identifier": v.get("identifier"),
177
+ "Graph Id": v.get("graphId"),
178
+ "Principal Type": v.get("principalType"),
179
+ }
180
+ )
181
+
182
+ if rows:
183
+ df = pd.DataFrame(rows, columns=list(columns.keys()))
182
184
  _update_dataframe_datatypes(dataframe=df, column_map=columns)
183
185
 
184
186
  return df
@@ -2,8 +2,8 @@ from typing import Optional, List
2
2
  import sempy_labs._icons as icons
3
3
  import pandas as pd
4
4
  from uuid import UUID
5
- from sempy_labs.admin._basic_functions import list_workspaces
6
- from sempy_labs._helper_functions import (
5
+ from ._basic_functions import list_workspaces
6
+ from .._helper_functions import (
7
7
  _base_api,
8
8
  _create_dataframe,
9
9
  _is_valid_uuid,
@@ -119,19 +119,20 @@ def list_domains(non_empty_only: bool = False) -> pd.DataFrame:
119
119
 
120
120
  response = _base_api(request=url, client="fabric_sp")
121
121
 
122
- dfs = []
122
+ rows = []
123
123
  for v in response.json().get("domains", []):
124
- new_data = {
125
- "Domain ID": v.get("id"),
126
- "Domain Name": v.get("displayName"),
127
- "Description": v.get("description"),
128
- "Parent Domain ID": v.get("parentDomainId"),
129
- "Contributors Scope": v.get("contributorsScope"),
130
- }
131
- dfs.append(pd.DataFrame(new_data, index=[0]))
132
-
133
- if dfs:
134
- df = pd.concat(dfs, ignore_index=True)
124
+ rows.append(
125
+ {
126
+ "Domain ID": v.get("id"),
127
+ "Domain Name": v.get("displayName"),
128
+ "Description": v.get("description"),
129
+ "Parent Domain ID": v.get("parentDomainId"),
130
+ "Contributors Scope": v.get("contributorsScope"),
131
+ }
132
+ )
133
+
134
+ if rows:
135
+ df = pd.DataFrame(rows, columns=list(columns.keys()))
135
136
 
136
137
  return df
137
138
 
@@ -177,12 +178,17 @@ def list_domain_workspaces(domain: Optional[str] = None, **kwargs) -> pd.DataFra
177
178
  request=f"/v1/admin/domains/{domain_id}/workspaces", client="fabric_sp"
178
179
  )
179
180
 
181
+ rows = []
180
182
  for v in response.json().get("value", []):
181
- new_data = {
182
- "Workspace ID": v.get("id"),
183
- "Workspace Name": v.get("displayName"),
184
- }
185
- df = pd.concat([df, pd.DataFrame(new_data, index=[0])], ignore_index=True)
183
+ rows.append(
184
+ {
185
+ "Workspace ID": v.get("id"),
186
+ "Workspace Name": v.get("displayName"),
187
+ }
188
+ )
189
+
190
+ if rows:
191
+ df = pd.DataFrame(rows, columns=list(columns.keys()))
186
192
 
187
193
  return df
188
194
 
@@ -1,8 +1,8 @@
1
1
  from uuid import UUID
2
2
  import sempy_labs._icons as icons
3
3
  import pandas as pd
4
- from sempy_labs.admin._basic_functions import _resolve_workspace_name_and_id
5
- from sempy_labs._helper_functions import (
4
+ from ._basic_functions import _resolve_workspace_name_and_id
5
+ from .._helper_functions import (
6
6
  _base_api,
7
7
  _create_dataframe,
8
8
  _update_dataframe_datatypes,
@@ -41,28 +41,30 @@ def list_external_data_shares() -> pd.DataFrame:
41
41
 
42
42
  response = _base_api(request="/v1/admin/items/externalDataShares")
43
43
 
44
- dfs = []
44
+ rows = []
45
45
  for i in response.json().get("value", []):
46
46
  cp = i.get("creatorPrincipal", {})
47
- new_data = {
48
- "External Data Share Id": i.get("id"),
49
- "Paths": [i.get("paths", [])],
50
- "Creater Principal Id": cp.get("id"),
51
- "Creater Principal Name": cp.get("displayName"),
52
- "Creater Principal Type": cp.get("type"),
53
- "Creater Principal UPN": cp.get("userDetails", {}).get("userPrincipalName"),
54
- "Recipient UPN": i.get("recipient", {}).get("userPrincipalName"),
55
- "Status": i.get("status"),
56
- "Expiration Time UTC": i.get("expirationTimeUtc"),
57
- "Workspace Id": i.get("workspaceId"),
58
- "Item Id": i.get("itemId"),
59
- "Invitation URL": i.get("invitationUrl"),
60
- }
61
-
62
- dfs.append(pd.DataFrame(new_data, index=[0]))
63
-
64
- if dfs:
65
- df = pd.concat(dfs, ignore_index=True)
47
+ rows.append(
48
+ {
49
+ "External Data Share Id": i.get("id"),
50
+ "Paths": [i.get("paths", [])],
51
+ "Creater Principal Id": cp.get("id"),
52
+ "Creater Principal Name": cp.get("displayName"),
53
+ "Creater Principal Type": cp.get("type"),
54
+ "Creater Principal UPN": cp.get("userDetails", {}).get(
55
+ "userPrincipalName"
56
+ ),
57
+ "Recipient UPN": i.get("recipient", {}).get("userPrincipalName"),
58
+ "Status": i.get("status"),
59
+ "Expiration Time UTC": i.get("expirationTimeUtc"),
60
+ "Workspace Id": i.get("workspaceId"),
61
+ "Item Id": i.get("itemId"),
62
+ "Invitation URL": i.get("invitationUrl"),
63
+ }
64
+ )
65
+
66
+ if rows:
67
+ df = pd.DataFrame(rows, columns=list(columns.keys()))
66
68
  _update_dataframe_datatypes(dataframe=df, column_map=columns)
67
69
 
68
70
  return df
sempy_labs/admin/_git.py CHANGED
@@ -1,9 +1,9 @@
1
- from sempy_labs._helper_functions import (
1
+ from .._helper_functions import (
2
2
  _base_api,
3
3
  _create_dataframe,
4
4
  )
5
5
  import pandas as pd
6
- from sempy_labs.admin._basic_functions import list_workspaces
6
+ from ._basic_functions import list_workspaces
7
7
  from sempy._utils._log import log
8
8
 
9
9
 
@@ -40,25 +40,25 @@ def list_git_connections() -> pd.DataFrame:
40
40
  uses_pagination=True,
41
41
  )
42
42
 
43
- dfs = []
43
+ rows = []
44
44
  for r in responses:
45
45
  for v in r.get("value", []):
46
46
  git = v.get("gitProviderDetails", {})
47
- new_data = {
48
- "Workspace Id": v.get("workspaceId"),
49
- "Organization Name": git.get("organizationName"),
50
- "Owner Name": git.get("ownerName"),
51
- "Project Name": git.get("projectName"),
52
- "Git Provider Type": git.get("gitProviderType"),
53
- "Repository Name": git.get("repositoryName"),
54
- "Branch Name": git.get("branchName"),
55
- "Directory Name": git.get("directoryName"),
56
- }
47
+ rows.append(
48
+ {
49
+ "Workspace Id": v.get("workspaceId"),
50
+ "Organization Name": git.get("organizationName"),
51
+ "Owner Name": git.get("ownerName"),
52
+ "Project Name": git.get("projectName"),
53
+ "Git Provider Type": git.get("gitProviderType"),
54
+ "Repository Name": git.get("repositoryName"),
55
+ "Branch Name": git.get("branchName"),
56
+ "Directory Name": git.get("directoryName"),
57
+ }
58
+ )
57
59
 
58
- dfs.append(pd.DataFrame(new_data, index=[0]))
59
-
60
- if dfs:
61
- df = pd.concat(dfs, ignore_index=True)
60
+ if rows:
61
+ df = pd.DataFrame(rows, columns=list(columns.keys()))
62
62
  dfW = list_workspaces()
63
63
  df = pd.merge(
64
64
  df, dfW[["Id", "Name"]], left_on="Workspace Id", right_on="Id", how="left"
@@ -2,13 +2,13 @@ import pandas as pd
2
2
  from typing import Optional, Tuple
3
3
  from uuid import UUID
4
4
  import sempy_labs._icons as icons
5
- from sempy_labs.admin._basic_functions import (
5
+ from ._basic_functions import (
6
6
  _resolve_workspace_name_and_id,
7
7
  )
8
- from sempy_labs.admin._capacities import (
8
+ from ._capacities import (
9
9
  _resolve_capacity_name_and_id,
10
10
  )
11
- from sempy_labs._helper_functions import (
11
+ from .._helper_functions import (
12
12
  _is_valid_uuid,
13
13
  _build_url,
14
14
  _base_api,
@@ -150,31 +150,32 @@ def list_items(
150
150
 
151
151
  responses = _base_api(request=url, client="fabric_sp", uses_pagination=True)
152
152
 
153
- dfs = []
153
+ rows = []
154
154
  for r in responses:
155
155
  for v in r.get("itemEntities", []):
156
- new_data = {
157
- "Item Id": v.get("id"),
158
- "Type": v.get("type"),
159
- "Item Name": v.get("name"),
160
- "Description": v.get("description"),
161
- "State": v.get("state"),
162
- "Last Updated Date": v.get("lastUpdatedDate"),
163
- "Creator Principal Id": v.get("creatorPrincipal", {}).get("id"),
164
- "Creator Principal Display Name": v.get("creatorPrincipal", {}).get(
165
- "displayName"
166
- ),
167
- "Creator Principal Type": v.get("creatorPrincipal", {}).get("type"),
168
- "Creator User Principal Name": v.get("creatorPrincipal", {})
169
- .get("userDetails", {})
170
- .get("userPrincipalName"),
171
- "Workspace Id": v.get("workspaceId"),
172
- "Capacity Id": v.get("capacityId"),
173
- }
174
- dfs.append(pd.DataFrame(new_data, index=[0]))
156
+ rows.append(
157
+ {
158
+ "Item Id": v.get("id"),
159
+ "Type": v.get("type"),
160
+ "Item Name": v.get("name"),
161
+ "Description": v.get("description"),
162
+ "State": v.get("state"),
163
+ "Last Updated Date": v.get("lastUpdatedDate"),
164
+ "Creator Principal Id": v.get("creatorPrincipal", {}).get("id"),
165
+ "Creator Principal Display Name": v.get("creatorPrincipal", {}).get(
166
+ "displayName"
167
+ ),
168
+ "Creator Principal Type": v.get("creatorPrincipal", {}).get("type"),
169
+ "Creator User Principal Name": v.get("creatorPrincipal", {})
170
+ .get("userDetails", {})
171
+ .get("userPrincipalName"),
172
+ "Workspace Id": v.get("workspaceId"),
173
+ "Capacity Id": v.get("capacityId"),
174
+ }
175
+ )
175
176
 
176
- if dfs:
177
- df = pd.concat(dfs, ignore_index=True)
177
+ if rows:
178
+ df = pd.DataFrame(rows, columns=list(columns.keys()))
178
179
 
179
180
  if item is not None:
180
181
  if _is_valid_uuid(item):
@@ -250,26 +251,27 @@ def list_item_access_details(
250
251
  client="fabric_sp",
251
252
  )
252
253
 
253
- dfs = []
254
+ rows = []
254
255
  for v in response.json().get("accessDetails", []):
255
- new_data = {
256
- "User Id": v.get("principal", {}).get("id"),
257
- "User Name": v.get("principal", {}).get("displayName"),
258
- "User Type": v.get("principal", {}).get("type"),
259
- "User Principal Name": v.get("principal", {})
260
- .get("userDetails", {})
261
- .get("userPrincipalName"),
262
- "Item Type": v.get("itemAccessDetails", {}).get("type"),
263
- "Permissions": v.get("itemAccessDetails", {}).get("permissions"),
264
- "Additional Permissions": v.get("itemAccessDetails", {}).get(
265
- "additionalPermissions"
266
- ),
267
- "Item Name": item_name,
268
- "Item Id": item_id,
269
- }
270
- dfs.append(pd.DataFrame(new_data, index=[0]))
271
-
272
- if dfs:
273
- df = pd.concat(dfs, ignore_index=True)
256
+ rows.append(
257
+ {
258
+ "User Id": v.get("principal", {}).get("id"),
259
+ "User Name": v.get("principal", {}).get("displayName"),
260
+ "User Type": v.get("principal", {}).get("type"),
261
+ "User Principal Name": v.get("principal", {})
262
+ .get("userDetails", {})
263
+ .get("userPrincipalName"),
264
+ "Item Type": v.get("itemAccessDetails", {}).get("type"),
265
+ "Permissions": v.get("itemAccessDetails", {}).get("permissions"),
266
+ "Additional Permissions": v.get("itemAccessDetails", {}).get(
267
+ "additionalPermissions"
268
+ ),
269
+ "Item Name": item_name,
270
+ "Item Id": item_id,
271
+ }
272
+ )
273
+
274
+ if rows:
275
+ df = pd.DataFrame(rows, columns=list(columns.keys()))
274
276
 
275
277
  return df
@@ -1,6 +1,6 @@
1
1
  import pandas as pd
2
2
  from typing import Optional
3
- from sempy_labs._helper_functions import (
3
+ from .._helper_functions import (
4
4
  _base_api,
5
5
  _create_dataframe,
6
6
  _update_dataframe_datatypes,
@@ -70,29 +70,30 @@ def list_reports(
70
70
  url.rstrip("$").rstrip("?")
71
71
  response = _base_api(request=url, client="fabric_sp")
72
72
 
73
- dfs = []
73
+ rows = []
74
74
  for v in response.json().get("value", []):
75
- new_data = {
76
- "Report Id": v.get("id"),
77
- "Report Name": v.get("name"),
78
- "Type": v.get("reportType"),
79
- "Web URL": v.get("webUrl"),
80
- "Embed URL": v.get("embedUrl"),
81
- "Dataset Id": v.get("datasetId"),
82
- "Created Date": v.get("createdDateTime"),
83
- "Modified Date": v.get("modifiedDateTime"),
84
- "Created By": v.get("createdBy"),
85
- "Modified By": v.get("modifiedBy"),
86
- "Sensitivity Label Id": v.get("sensitivityLabel", {}).get("labelId"),
87
- "Users": v.get("users"),
88
- "Subscriptions": v.get("subscriptions"),
89
- "Workspace Id": v.get("workspaceId"),
90
- "Report Flags": v.get("reportFlags"),
91
- }
92
- dfs.append(pd.DataFrame(new_data, index=[0]))
93
-
94
- if dfs:
95
- df = pd.concat(dfs, ignore_index=True)
75
+ rows.append(
76
+ {
77
+ "Report Id": v.get("id"),
78
+ "Report Name": v.get("name"),
79
+ "Type": v.get("reportType"),
80
+ "Web URL": v.get("webUrl"),
81
+ "Embed URL": v.get("embedUrl"),
82
+ "Dataset Id": v.get("datasetId"),
83
+ "Created Date": v.get("createdDateTime"),
84
+ "Modified Date": v.get("modifiedDateTime"),
85
+ "Created By": v.get("createdBy"),
86
+ "Modified By": v.get("modifiedBy"),
87
+ "Sensitivity Label Id": v.get("sensitivityLabel", {}).get("labelId"),
88
+ "Users": v.get("users"),
89
+ "Subscriptions": v.get("subscriptions"),
90
+ "Workspace Id": v.get("workspaceId"),
91
+ "Report Flags": v.get("reportFlags"),
92
+ }
93
+ )
94
+
95
+ if rows:
96
+ df = pd.DataFrame(rows, columns=list(columns.keys()))
96
97
  _update_dataframe_datatypes(dataframe=df, column_map=columns)
97
98
 
98
99
  return df
@@ -146,20 +147,21 @@ def list_report_users(report: str | UUID) -> pd.DataFrame:
146
147
  url = f"/v1.0/myorg/admin/reports/{report_id}/users"
147
148
  response = _base_api(request=url, client="fabric_sp")
148
149
 
149
- dfs = []
150
+ rows = []
150
151
  for v in response.json().get("value", []):
151
- new_data = {
152
- "User Name": v.get("displayName"),
153
- "Email Address": v.get("emailAddress"),
154
- "Report User Access Right": v.get("reportUserAccessRight"),
155
- "Identifier": v.get("identifier"),
156
- "Graph Id": v.get("graphId"),
157
- "Principal Type": v.get("principalType"),
158
- }
159
- dfs.append(pd.DataFrame(new_data, index=[0]))
160
-
161
- if dfs:
162
- df = pd.concat(dfs, ignore_index=True)
152
+ rows.append(
153
+ {
154
+ "User Name": v.get("displayName"),
155
+ "Email Address": v.get("emailAddress"),
156
+ "Report User Access Right": v.get("reportUserAccessRight"),
157
+ "Identifier": v.get("identifier"),
158
+ "Graph Id": v.get("graphId"),
159
+ "Principal Type": v.get("principalType"),
160
+ }
161
+ )
162
+
163
+ if rows:
164
+ df = pd.DataFrame(rows, columns=list(columns.keys()))
163
165
  _update_dataframe_datatypes(dataframe=df, column_map=columns)
164
166
 
165
167
  return df
@@ -201,7 +203,7 @@ def list_report_subscriptions(report: str | UUID) -> pd.DataFrame:
201
203
  "Link To Content": "bool",
202
204
  "Preview Image": "bool",
203
205
  "Attachment Format": "string",
204
- "Users": "string",
206
+ "Users": "list",
205
207
  }
206
208
 
207
209
  df = _create_dataframe(columns=columns)
@@ -211,28 +213,29 @@ def list_report_subscriptions(report: str | UUID) -> pd.DataFrame:
211
213
  client="fabric_sp",
212
214
  )
213
215
 
214
- dfs = []
216
+ rows = []
215
217
  for v in response.json().get("value", []):
216
- new_data = {
217
- "Subscription Id": v.get("id"),
218
- "Title": v.get("title"),
219
- "Artifact Id": v.get("artifactId"),
220
- "Artifact Name": v.get("artifactDisplayName"),
221
- "Sub Artifact Name": v.get("subArtifactDisplayName"),
222
- "Artifact Type": v.get("artifactType"),
223
- "Is Enabled": v.get("isEnabled"),
224
- "Frequency": v.get("frequency"),
225
- "Start Date": v.get("startDate"),
226
- "End Date": v.get("endDate"),
227
- "Link To Content": v.get("linkToContent"),
228
- "Preview Image": v.get("previewImage"),
229
- "Attachment Format": v.get("attachmentFormat"),
230
- "Users": str(v.get("users")),
231
- }
232
- dfs.append(pd.DataFrame(new_data, index=[0]))
233
-
234
- if dfs:
235
- df = pd.concat(dfs, ignore_index=True)
218
+ rows.append(
219
+ {
220
+ "Subscription Id": v.get("id"),
221
+ "Title": v.get("title"),
222
+ "Artifact Id": v.get("artifactId"),
223
+ "Artifact Name": v.get("artifactDisplayName"),
224
+ "Sub Artifact Name": v.get("subArtifactDisplayName"),
225
+ "Artifact Type": v.get("artifactType"),
226
+ "Is Enabled": v.get("isEnabled"),
227
+ "Frequency": v.get("frequency"),
228
+ "Start Date": v.get("startDate"),
229
+ "End Date": v.get("endDate"),
230
+ "Link To Content": v.get("linkToContent"),
231
+ "Preview Image": v.get("previewImage"),
232
+ "Attachment Format": v.get("attachmentFormat"),
233
+ "Users": v.get("users", []),
234
+ }
235
+ )
236
+
237
+ if rows:
238
+ df = pd.DataFrame(rows, columns=list(columns.keys()))
236
239
  _update_dataframe_datatypes(dataframe=df, column_map=columns)
237
240
 
238
241
  return df
@@ -3,9 +3,9 @@ from uuid import UUID
3
3
  from sempy.fabric.exceptions import FabricHTTPException
4
4
  import time
5
5
  import sempy_labs._icons as icons
6
- from sempy_labs.admin._basic_functions import list_workspaces
6
+ from ._basic_functions import list_workspaces
7
7
  from sempy._utils._log import log
8
- from sempy_labs._helper_functions import (
8
+ from .._helper_functions import (
9
9
  _base_api,
10
10
  _is_valid_uuid,
11
11
  _build_url,