semantic-link-labs 0.11.0__py3-none-any.whl → 0.11.2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of semantic-link-labs might be problematic. Click here for more details.

Files changed (131) hide show
  1. {semantic_link_labs-0.11.0.dist-info → semantic_link_labs-0.11.2.dist-info}/METADATA +6 -4
  2. semantic_link_labs-0.11.2.dist-info/RECORD +210 -0
  3. sempy_labs/__init__.py +56 -56
  4. sempy_labs/_a_lib_info.py +1 -1
  5. sempy_labs/_ai.py +1 -1
  6. sempy_labs/_capacities.py +2 -2
  7. sempy_labs/_capacity_migration.py +5 -5
  8. sempy_labs/_clear_cache.py +1 -1
  9. sempy_labs/_connections.py +2 -2
  10. sempy_labs/_dashboards.py +16 -16
  11. sempy_labs/_data_pipelines.py +1 -1
  12. sempy_labs/_dataflows.py +101 -26
  13. sempy_labs/_dax.py +3 -3
  14. sempy_labs/_dax_query_view.py +1 -1
  15. sempy_labs/_delta_analyzer.py +4 -4
  16. sempy_labs/_delta_analyzer_history.py +1 -1
  17. sempy_labs/_deployment_pipelines.py +1 -1
  18. sempy_labs/_environments.py +22 -21
  19. sempy_labs/_eventhouses.py +12 -11
  20. sempy_labs/_eventstreams.py +12 -11
  21. sempy_labs/_external_data_shares.py +23 -22
  22. sempy_labs/_gateways.py +47 -45
  23. sempy_labs/_generate_semantic_model.py +3 -3
  24. sempy_labs/_git.py +1 -1
  25. sempy_labs/_graphQL.py +12 -11
  26. sempy_labs/_job_scheduler.py +56 -54
  27. sempy_labs/_kql_databases.py +16 -17
  28. sempy_labs/_kql_querysets.py +12 -11
  29. sempy_labs/_kusto.py +2 -2
  30. sempy_labs/_list_functions.py +1 -1
  31. sempy_labs/_managed_private_endpoints.py +18 -15
  32. sempy_labs/_mirrored_databases.py +16 -15
  33. sempy_labs/_mirrored_warehouses.py +12 -11
  34. sempy_labs/_ml_experiments.py +11 -10
  35. sempy_labs/_ml_models.py +11 -10
  36. sempy_labs/_model_auto_build.py +3 -3
  37. sempy_labs/_model_bpa.py +5 -5
  38. sempy_labs/_model_bpa_bulk.py +3 -3
  39. sempy_labs/_model_dependencies.py +1 -1
  40. sempy_labs/_mounted_data_factories.py +12 -12
  41. sempy_labs/_notebooks.py +1 -1
  42. sempy_labs/_one_lake_integration.py +1 -1
  43. sempy_labs/_query_scale_out.py +1 -1
  44. sempy_labs/_refresh_semantic_model.py +1 -1
  45. sempy_labs/_semantic_models.py +30 -28
  46. sempy_labs/_spark.py +1 -1
  47. sempy_labs/_sql.py +1 -1
  48. sempy_labs/_sql_endpoints.py +12 -11
  49. sempy_labs/_sqldatabase.py +15 -15
  50. sempy_labs/_tags.py +11 -10
  51. sempy_labs/_translations.py +1 -1
  52. sempy_labs/_user_delegation_key.py +2 -2
  53. sempy_labs/_variable_libraries.py +13 -12
  54. sempy_labs/_vertipaq.py +3 -3
  55. sempy_labs/_vpax.py +1 -1
  56. sempy_labs/_warehouses.py +15 -14
  57. sempy_labs/_workloads.py +1 -1
  58. sempy_labs/_workspace_identity.py +1 -1
  59. sempy_labs/_workspaces.py +14 -13
  60. sempy_labs/admin/__init__.py +18 -18
  61. sempy_labs/admin/_activities.py +46 -46
  62. sempy_labs/admin/_apps.py +28 -26
  63. sempy_labs/admin/_artifacts.py +15 -15
  64. sempy_labs/admin/_basic_functions.py +1 -2
  65. sempy_labs/admin/_capacities.py +86 -82
  66. sempy_labs/admin/_dataflows.py +2 -2
  67. sempy_labs/admin/_datasets.py +50 -48
  68. sempy_labs/admin/_domains.py +25 -19
  69. sempy_labs/admin/_external_data_share.py +24 -22
  70. sempy_labs/admin/_git.py +17 -17
  71. sempy_labs/admin/_items.py +47 -45
  72. sempy_labs/admin/_reports.py +61 -58
  73. sempy_labs/admin/_scanner.py +2 -2
  74. sempy_labs/admin/_shared.py +18 -18
  75. sempy_labs/admin/_tags.py +2 -2
  76. sempy_labs/admin/_tenant.py +57 -51
  77. sempy_labs/admin/_users.py +16 -15
  78. sempy_labs/admin/_workspaces.py +2 -2
  79. sempy_labs/directlake/__init__.py +12 -12
  80. sempy_labs/directlake/_directlake_schema_compare.py +3 -3
  81. sempy_labs/directlake/_directlake_schema_sync.py +9 -7
  82. sempy_labs/directlake/_dl_helper.py +1 -1
  83. sempy_labs/directlake/_generate_shared_expression.py +1 -1
  84. sempy_labs/directlake/_get_directlake_lakehouse.py +1 -1
  85. sempy_labs/directlake/_guardrails.py +1 -1
  86. sempy_labs/directlake/_list_directlake_model_calc_tables.py +3 -3
  87. sempy_labs/directlake/_show_unsupported_directlake_objects.py +1 -1
  88. sempy_labs/directlake/_update_directlake_model_lakehouse_connection.py +3 -3
  89. sempy_labs/directlake/_update_directlake_partition_entity.py +4 -4
  90. sempy_labs/directlake/_warm_cache.py +3 -3
  91. sempy_labs/graph/__init__.py +3 -3
  92. sempy_labs/graph/_groups.py +81 -78
  93. sempy_labs/graph/_teams.py +21 -21
  94. sempy_labs/graph/_users.py +111 -10
  95. sempy_labs/lakehouse/__init__.py +7 -7
  96. sempy_labs/lakehouse/_blobs.py +30 -30
  97. sempy_labs/lakehouse/_get_lakehouse_columns.py +2 -2
  98. sempy_labs/lakehouse/_get_lakehouse_tables.py +29 -27
  99. sempy_labs/lakehouse/_helper.py +30 -2
  100. sempy_labs/lakehouse/_lakehouse.py +2 -2
  101. sempy_labs/lakehouse/_livy_sessions.py +47 -42
  102. sempy_labs/lakehouse/_shortcuts.py +22 -21
  103. sempy_labs/migration/__init__.py +8 -8
  104. sempy_labs/migration/_create_pqt_file.py +2 -2
  105. sempy_labs/migration/_migrate_calctables_to_lakehouse.py +3 -3
  106. sempy_labs/migration/_migrate_calctables_to_semantic_model.py +3 -4
  107. sempy_labs/migration/_migrate_model_objects_to_semantic_model.py +2 -2
  108. sempy_labs/migration/_migrate_tables_columns_to_semantic_model.py +4 -4
  109. sempy_labs/migration/_migration_validation.py +1 -2
  110. sempy_labs/migration/_refresh_calc_tables.py +2 -2
  111. sempy_labs/mirrored_azure_databricks_catalog/__init__.py +2 -2
  112. sempy_labs/mirrored_azure_databricks_catalog/_discover.py +40 -40
  113. sempy_labs/mirrored_azure_databricks_catalog/_refresh_catalog_metadata.py +1 -1
  114. sempy_labs/report/__init__.py +10 -10
  115. sempy_labs/report/_download_report.py +2 -2
  116. sempy_labs/report/_export_report.py +2 -2
  117. sempy_labs/report/_generate_report.py +1 -1
  118. sempy_labs/report/_paginated.py +1 -1
  119. sempy_labs/report/_report_bpa.py +4 -3
  120. sempy_labs/report/_report_functions.py +3 -3
  121. sempy_labs/report/_report_list_functions.py +3 -3
  122. sempy_labs/report/_report_rebind.py +1 -1
  123. sempy_labs/report/_reportwrapper.py +247 -249
  124. sempy_labs/report/_save_report.py +3 -3
  125. sempy_labs/theme/_org_themes.py +35 -1
  126. sempy_labs/tom/__init__.py +1 -1
  127. sempy_labs/tom/_model.py +23 -20
  128. semantic_link_labs-0.11.0.dist-info/RECORD +0 -210
  129. {semantic_link_labs-0.11.0.dist-info → semantic_link_labs-0.11.2.dist-info}/WHEEL +0 -0
  130. {semantic_link_labs-0.11.0.dist-info → semantic_link_labs-0.11.2.dist-info}/licenses/LICENSE +0 -0
  131. {semantic_link_labs-0.11.0.dist-info → semantic_link_labs-0.11.2.dist-info}/top_level.txt +0 -0
sempy_labs/_ml_models.py CHANGED
@@ -1,6 +1,6 @@
1
1
  import pandas as pd
2
2
  from typing import Optional
3
- from sempy_labs._helper_functions import (
3
+ from ._helper_functions import (
4
4
  resolve_workspace_id,
5
5
  _base_api,
6
6
  delete_item,
@@ -46,22 +46,23 @@ def list_ml_models(workspace: Optional[str | UUID] = None) -> pd.DataFrame:
46
46
  uses_pagination=True,
47
47
  )
48
48
 
49
- dfs = []
49
+ rows = []
50
50
  for r in responses:
51
51
  for v in r.get("value", []):
52
52
  model_id = v.get("id")
53
53
  modelName = v.get("displayName")
54
54
  desc = v.get("description")
55
55
 
56
- new_data = {
57
- "ML Model Name": modelName,
58
- "ML Model Id": model_id,
59
- "Description": desc,
60
- }
61
- dfs.append(pd.DataFrame(new_data, index=[0]))
56
+ rows.append(
57
+ {
58
+ "ML Model Name": modelName,
59
+ "ML Model Id": model_id,
60
+ "Description": desc,
61
+ }
62
+ )
62
63
 
63
- if dfs:
64
- df = pd.concat(dfs, ignore_index=True)
64
+ if rows:
65
+ df = pd.DataFrame(rows, columns=list(columns.keys()))
65
66
 
66
67
  return df
67
68
 
@@ -1,8 +1,8 @@
1
1
  import sempy.fabric as fabric
2
2
  import pandas as pd
3
- from sempy_labs.tom import connect_semantic_model
4
- from sempy_labs._generate_semantic_model import create_blank_semantic_model
5
- from sempy_labs.directlake._generate_shared_expression import generate_shared_expression
3
+ from .tom import connect_semantic_model
4
+ from ._generate_semantic_model import create_blank_semantic_model
5
+ from .directlake._generate_shared_expression import generate_shared_expression
6
6
  from typing import Optional
7
7
  from sempy._utils._log import log
8
8
 
sempy_labs/_model_bpa.py CHANGED
@@ -3,8 +3,8 @@ import pandas as pd
3
3
  import warnings
4
4
  import datetime
5
5
  from IPython.display import display, HTML
6
- from sempy_labs._model_dependencies import get_model_calc_dependencies
7
- from sempy_labs._helper_functions import (
6
+ from ._model_dependencies import get_model_calc_dependencies
7
+ from ._helper_functions import (
8
8
  format_dax_object_name,
9
9
  create_relationship_name,
10
10
  save_as_delta_table,
@@ -15,9 +15,9 @@ from sempy_labs._helper_functions import (
15
15
  resolve_workspace_name_and_id,
16
16
  _create_spark_session,
17
17
  )
18
- from sempy_labs.lakehouse import get_lakehouse_tables, lakehouse_attached
19
- from sempy_labs.tom import connect_semantic_model
20
- from sempy_labs._model_bpa_rules import model_bpa_rules
18
+ from .lakehouse import get_lakehouse_tables, lakehouse_attached
19
+ from .tom import connect_semantic_model
20
+ from ._model_bpa_rules import model_bpa_rules
21
21
  from typing import Optional
22
22
  from sempy._utils._log import log
23
23
  import sempy_labs._icons as icons
@@ -1,7 +1,7 @@
1
1
  import sempy.fabric as fabric
2
2
  import pandas as pd
3
3
  import datetime
4
- from sempy_labs._helper_functions import (
4
+ from ._helper_functions import (
5
5
  save_as_delta_table,
6
6
  resolve_workspace_capacity,
7
7
  retry,
@@ -9,11 +9,11 @@ from sempy_labs._helper_functions import (
9
9
  resolve_workspace_id,
10
10
  resolve_lakehouse_name_and_id,
11
11
  )
12
- from sempy_labs.lakehouse import (
12
+ from .lakehouse import (
13
13
  get_lakehouse_tables,
14
14
  lakehouse_attached,
15
15
  )
16
- from sempy_labs._model_bpa import run_model_bpa
16
+ from ._model_bpa import run_model_bpa
17
17
  from typing import Optional, List
18
18
  from sempy._utils._log import log
19
19
  import sempy_labs._icons as icons
@@ -1,6 +1,6 @@
1
1
  import sempy.fabric as fabric
2
2
  import pandas as pd
3
- from sempy_labs._helper_functions import (
3
+ from ._helper_functions import (
4
4
  format_dax_object_name,
5
5
  resolve_dataset_name_and_id,
6
6
  resolve_workspace_name_and_id,
@@ -1,6 +1,6 @@
1
1
  import pandas as pd
2
2
  from typing import Optional
3
- from sempy_labs._helper_functions import (
3
+ from ._helper_functions import (
4
4
  resolve_workspace_id,
5
5
  _base_api,
6
6
  _create_dataframe,
@@ -49,19 +49,19 @@ def list_mounted_data_factories(
49
49
  uses_pagination=True,
50
50
  )
51
51
 
52
- dfs = []
52
+ rows = []
53
53
  for r in responses:
54
54
  for v in r.get("value", []):
55
- new_data = {
56
- "Mounted Data Factory Name": v.get("displayName"),
57
- "Mounted Data Factory Id": v.get("id"),
58
- "Description": v.get("description"),
59
- }
60
-
61
- dfs.append(pd.DataFrame(new_data, index=[0]))
62
-
63
- if dfs:
64
- df = pd.concat(dfs, ignore_index=True)
55
+ rows.append(
56
+ {
57
+ "Mounted Data Factory Name": v.get("displayName"),
58
+ "Mounted Data Factory Id": v.get("id"),
59
+ "Description": v.get("description"),
60
+ }
61
+ )
62
+
63
+ if rows:
64
+ df = pd.DataFrame(rows, columns=list(columns.keys()))
65
65
  _update_dataframe_datatypes(dataframe=df, column_map=columns)
66
66
 
67
67
  return df
sempy_labs/_notebooks.py CHANGED
@@ -5,7 +5,7 @@ from typing import Optional
5
5
  import base64
6
6
  import requests
7
7
  from sempy._utils._log import log
8
- from sempy_labs._helper_functions import (
8
+ from ._helper_functions import (
9
9
  resolve_workspace_name_and_id,
10
10
  resolve_workspace_id,
11
11
  _decode_b64,
@@ -2,7 +2,7 @@ import sempy.fabric as fabric
2
2
  import pandas as pd
3
3
  from typing import Optional
4
4
  from sempy._utils._log import log
5
- from sempy_labs._helper_functions import (
5
+ from ._helper_functions import (
6
6
  resolve_workspace_name_and_id,
7
7
  resolve_dataset_name_and_id,
8
8
  resolve_workspace_id,
@@ -1,6 +1,6 @@
1
1
  import sempy.fabric as fabric
2
2
  import pandas as pd
3
- from sempy_labs._helper_functions import (
3
+ from ._helper_functions import (
4
4
  resolve_workspace_name_and_id,
5
5
  resolve_dataset_name_and_id,
6
6
  _update_dataframe_datatypes,
@@ -1,6 +1,6 @@
1
1
  import sempy.fabric as fabric
2
2
  import time
3
- from sempy_labs._helper_functions import (
3
+ from ._helper_functions import (
4
4
  resolve_workspace_name_and_id,
5
5
  _get_partition_map,
6
6
  _process_and_display_chart,
@@ -1,7 +1,7 @@
1
1
  from uuid import UUID
2
2
  from typing import Optional, List
3
3
  import pandas as pd
4
- from sempy_labs._helper_functions import (
4
+ from ._helper_functions import (
5
5
  _create_dataframe,
6
6
  _base_api,
7
7
  _update_dataframe_datatypes,
@@ -298,39 +298,41 @@ def list_semantic_model_datasources(
298
298
  client="fabric_sp",
299
299
  )
300
300
 
301
- dfs = []
301
+ rows = []
302
302
  for item in response.json().get("value", []):
303
303
  ds_type = item.get("datasourceType")
304
304
  conn_details = item.get("connectionDetails", {})
305
305
  ds_id = item.get("datasourceId")
306
306
  gateway_id = item.get("gatewayId")
307
307
  if expand_details:
308
- new_data = {
309
- "Datasource Type": ds_type,
310
- "Connection Server": conn_details.get("server"),
311
- "Connection Database": conn_details.get("database"),
312
- "Connection Path": conn_details.get("path"),
313
- "Connection Account": conn_details.get("account"),
314
- "Connection Domain": conn_details.get("domain"),
315
- "Connection Kind": conn_details.get("kind"),
316
- "Connection Email Address": conn_details.get("emailAddress"),
317
- "Connection URL": conn_details.get("url"),
318
- "Connection Class Info": conn_details.get("classInfo"),
319
- "Connection Login Server": conn_details.get("loginServer"),
320
- "Datasource Id": ds_id,
321
- "Gateway Id": gateway_id,
322
- }
323
- dfs.append(pd.DataFrame(new_data, index=[0]))
308
+ rows.append(
309
+ {
310
+ "Datasource Type": ds_type,
311
+ "Connection Server": conn_details.get("server"),
312
+ "Connection Database": conn_details.get("database"),
313
+ "Connection Path": conn_details.get("path"),
314
+ "Connection Account": conn_details.get("account"),
315
+ "Connection Domain": conn_details.get("domain"),
316
+ "Connection Kind": conn_details.get("kind"),
317
+ "Connection Email Address": conn_details.get("emailAddress"),
318
+ "Connection URL": conn_details.get("url"),
319
+ "Connection Class Info": conn_details.get("classInfo"),
320
+ "Connection Login Server": conn_details.get("loginServer"),
321
+ "Datasource Id": ds_id,
322
+ "Gateway Id": gateway_id,
323
+ }
324
+ )
324
325
  else:
325
- new_data = {
326
- "Datasource Type": ds_type,
327
- "Connection Details": conn_details,
328
- "Datasource Id": ds_id,
329
- "Gateway Id": gateway_id,
330
- }
331
- dfs.append(pd.DataFrame([new_data]))
332
-
333
- if dfs:
334
- df = pd.concat(dfs, ignore_index=True)
326
+ rows.append(
327
+ {
328
+ "Datasource Type": ds_type,
329
+ "Connection Details": conn_details,
330
+ "Datasource Id": ds_id,
331
+ "Gateway Id": gateway_id,
332
+ }
333
+ )
334
+
335
+ if rows:
336
+ df = pd.DataFrame(rows, columns=list(columns.keys()))
335
337
 
336
338
  return df
sempy_labs/_spark.py CHANGED
@@ -1,7 +1,7 @@
1
1
  import pandas as pd
2
2
  import sempy_labs._icons as icons
3
3
  from typing import Optional
4
- from sempy_labs._helper_functions import (
4
+ from ._helper_functions import (
5
5
  resolve_workspace_name_and_id,
6
6
  _update_dataframe_datatypes,
7
7
  _base_api,
sempy_labs/_sql.py CHANGED
@@ -3,7 +3,7 @@ from typing import Optional, Union, List
3
3
  from sempy._utils._log import log
4
4
  import struct
5
5
  from itertools import chain, repeat
6
- from sempy_labs._helper_functions import (
6
+ from ._helper_functions import (
7
7
  resolve_lakehouse_name_and_id,
8
8
  resolve_item_name_and_id,
9
9
  resolve_workspace_name_and_id,
@@ -1,7 +1,7 @@
1
1
  from typing import Optional, Literal
2
2
  from uuid import UUID
3
3
  import pandas as pd
4
- from sempy_labs._helper_functions import (
4
+ from ._helper_functions import (
5
5
  _base_api,
6
6
  _create_dataframe,
7
7
  resolve_workspace_name_and_id,
@@ -44,19 +44,19 @@ def list_sql_endpoints(workspace: Optional[str | UUID] = None) -> pd.DataFrame:
44
44
  request=f"/v1/workspaces/{workspace_id}/sqlEndpoints", uses_pagination=True
45
45
  )
46
46
 
47
- dfs = []
47
+ rows = []
48
48
  for r in responses:
49
49
  for v in r.get("value", []):
50
+ rows.append(
51
+ {
52
+ "SQL Endpoint Id": v.get("id"),
53
+ "SQL Endpoint Name": v.get("displayName"),
54
+ "Description": v.get("description"),
55
+ }
56
+ )
50
57
 
51
- new_data = {
52
- "SQL Endpoint Id": v.get("id"),
53
- "SQL Endpoint Name": v.get("displayName"),
54
- "Description": v.get("description"),
55
- }
56
- dfs.append(pd.DataFrame(new_data, index=[0]))
57
-
58
- if dfs:
59
- df = pd.concat(dfs, ignore_index=True)
58
+ if rows:
59
+ df = pd.DataFrame(rows, columns=list(columns.keys()))
60
60
 
61
61
  return df
62
62
 
@@ -142,6 +142,7 @@ def refresh_sql_endpoint_metadata(
142
142
  result = _base_api(
143
143
  request=f"v1/workspaces/{workspace_id}/sqlEndpoints/{sql_endpoint_id}/refreshMetadata?preview=true",
144
144
  method="post",
145
+ client="fabric_sp",
145
146
  status_codes=[200, 202],
146
147
  lro_return_json=True,
147
148
  payload=payload,
@@ -1,4 +1,4 @@
1
- from sempy_labs._helper_functions import (
1
+ from ._helper_functions import (
2
2
  resolve_workspace_id,
3
3
  _base_api,
4
4
  _create_dataframe,
@@ -100,23 +100,23 @@ def list_sql_databases(workspace: Optional[str | UUID] = None) -> pd.DataFrame:
100
100
  client="fabric_sp",
101
101
  )
102
102
 
103
- dfs = []
103
+ rows = []
104
104
  for r in responses:
105
105
  for v in r.get("value", []):
106
106
  prop = v.get("properties", {})
107
- new_data = {
108
- "SQL Database Name": v.get("displayName"),
109
- "SQL Database Id": v.get("id"),
110
- "Description": v.get("description"),
111
- "Connection Info": prop.get("connectionInfo"),
112
- "Database Name": prop.get("databaseName"),
113
- "Server FQDN": prop.get("serverFqdn"),
114
- }
115
-
116
- dfs.append(pd.DataFrame(new_data, index=[0]))
117
-
118
- if dfs:
119
- df = pd.concat(dfs, ignore_index=True)
107
+ rows.append(
108
+ {
109
+ "SQL Database Name": v.get("displayName"),
110
+ "SQL Database Id": v.get("id"),
111
+ "Description": v.get("description"),
112
+ "Connection Info": prop.get("connectionInfo"),
113
+ "Database Name": prop.get("databaseName"),
114
+ "Server FQDN": prop.get("serverFqdn"),
115
+ }
116
+ )
117
+
118
+ if rows:
119
+ df = pd.DataFrame(rows, columns=list(columns.keys()))
120
120
  _update_dataframe_datatypes(dataframe=df, column_map=columns)
121
121
 
122
122
  return df
sempy_labs/_tags.py CHANGED
@@ -1,4 +1,4 @@
1
- from sempy_labs._helper_functions import (
1
+ from ._helper_functions import (
2
2
  _base_api,
3
3
  _create_dataframe,
4
4
  _update_dataframe_datatypes,
@@ -40,17 +40,18 @@ def list_tags() -> pd.DataFrame:
40
40
  client="fabric_sp",
41
41
  )
42
42
 
43
- dfs = []
43
+ rows = []
44
44
  for r in responses:
45
45
  for v in r.get("value", []):
46
- new_data = {
47
- "Tag Name": v.get("displayName"),
48
- "Tag Id": v.get("id"),
49
- }
50
- dfs.append(pd.DataFrame(new_data, index=[0]))
51
-
52
- if dfs:
53
- df = pd.concat(dfs, ignore_index=True)
46
+ rows.append(
47
+ {
48
+ "Tag Name": v.get("displayName"),
49
+ "Tag Id": v.get("id"),
50
+ }
51
+ )
52
+
53
+ if rows:
54
+ df = pd.DataFrame(rows, columns=list(columns.keys()))
54
55
  _update_dataframe_datatypes(dataframe=df, column_map=columns)
55
56
 
56
57
  return df
@@ -3,7 +3,7 @@ import pandas as pd
3
3
  from typing import List, Optional, Union
4
4
  from sempy._utils._log import log
5
5
  import sempy_labs._icons as icons
6
- from sempy_labs._helper_functions import (
6
+ from ._helper_functions import (
7
7
  get_language_codes,
8
8
  _create_spark_session,
9
9
  )
@@ -1,5 +1,5 @@
1
- from sempy_labs.lakehouse._blobs import _request_blob_api
2
- from sempy_labs._helper_functions import (
1
+ from .lakehouse._blobs import _request_blob_api
2
+ from ._helper_functions import (
3
3
  _xml_to_dict,
4
4
  )
5
5
  from datetime import datetime, timedelta, timezone
@@ -1,4 +1,4 @@
1
- from sempy_labs._helper_functions import (
1
+ from ._helper_functions import (
2
2
  resolve_workspace_id,
3
3
  _base_api,
4
4
  _create_dataframe,
@@ -49,21 +49,22 @@ def list_variable_libraries(workspace: Optional[str | UUID] = None) -> pd.DataFr
49
49
  client="fabric_sp",
50
50
  )
51
51
 
52
- dfs = []
52
+ rows = []
53
53
  for r in responses:
54
54
  for v in r.get("value", []):
55
55
  prop = v.get("properties", {})
56
56
 
57
- new_data = {
58
- "Variable Library Name": v.get("displayName"),
59
- "Variable Library Id": v.get("id"),
60
- "Description": v.get("description"),
61
- "Active Value Set Name": prop.get("activeValueSetName"),
62
- }
63
- dfs.append(pd.DataFrame(new_data, index=[0]))
64
-
65
- if dfs:
66
- df = pd.concat(dfs, ignore_index=True)
57
+ rows.append(
58
+ {
59
+ "Variable Library Name": v.get("displayName"),
60
+ "Variable Library Id": v.get("id"),
61
+ "Description": v.get("description"),
62
+ "Active Value Set Name": prop.get("activeValueSetName"),
63
+ }
64
+ )
65
+
66
+ if rows:
67
+ df = pd.DataFrame(rows, columns=list(columns.keys()))
67
68
  _update_dataframe_datatypes(dataframe=df, column_map=columns)
68
69
 
69
70
  return df
sempy_labs/_vertipaq.py CHANGED
@@ -6,7 +6,7 @@ import os
6
6
  import shutil
7
7
  import datetime
8
8
  import warnings
9
- from sempy_labs._helper_functions import (
9
+ from ._helper_functions import (
10
10
  format_dax_object_name,
11
11
  save_as_delta_table,
12
12
  resolve_workspace_capacity,
@@ -17,8 +17,8 @@ from sempy_labs._helper_functions import (
17
17
  resolve_workspace_id,
18
18
  resolve_workspace_name,
19
19
  )
20
- from sempy_labs._list_functions import list_relationships, list_tables
21
- from sempy_labs.lakehouse import lakehouse_attached, get_lakehouse_tables
20
+ from ._list_functions import list_relationships, list_tables
21
+ from .lakehouse import lakehouse_attached, get_lakehouse_tables
22
22
  from typing import Optional
23
23
  from sempy._utils._log import log
24
24
  import sempy_labs._icons as icons
sempy_labs/_vpax.py CHANGED
@@ -6,7 +6,7 @@ import sys
6
6
  from pathlib import Path
7
7
  from typing import Optional
8
8
  from uuid import UUID
9
- from sempy_labs._helper_functions import (
9
+ from ._helper_functions import (
10
10
  resolve_workspace_name_and_id,
11
11
  resolve_dataset_name_and_id,
12
12
  resolve_lakehouse_name_and_id,
sempy_labs/_warehouses.py CHANGED
@@ -1,4 +1,4 @@
1
- from sempy_labs._helper_functions import (
1
+ from ._helper_functions import (
2
2
  resolve_workspace_name_and_id,
3
3
  _base_api,
4
4
  _create_dataframe,
@@ -111,23 +111,24 @@ def list_warehouses(workspace: Optional[str | UUID] = None) -> pd.DataFrame:
111
111
  client="fabric_sp",
112
112
  )
113
113
 
114
- dfs = []
114
+ rows = []
115
115
  for r in responses:
116
116
  for v in r.get("value", []):
117
117
  prop = v.get("properties", {})
118
118
 
119
- new_data = {
120
- "Warehouse Name": v.get("displayName"),
121
- "Warehouse Id": v.get("id"),
122
- "Description": v.get("description"),
123
- "Connection Info": prop.get("connectionInfo"),
124
- "Created Date": prop.get("createdDate"),
125
- "Last Updated Time": prop.get("lastUpdatedTime"),
126
- }
127
- dfs.append(pd.DataFrame(new_data, index=[0]))
128
-
129
- if dfs:
130
- df = pd.concat(dfs, ignore_index=True)
119
+ rows.append(
120
+ {
121
+ "Warehouse Name": v.get("displayName"),
122
+ "Warehouse Id": v.get("id"),
123
+ "Description": v.get("description"),
124
+ "Connection Info": prop.get("connectionInfo"),
125
+ "Created Date": prop.get("createdDate"),
126
+ "Last Updated Time": prop.get("lastUpdatedTime"),
127
+ }
128
+ )
129
+
130
+ if rows:
131
+ df = pd.DataFrame(rows, columns=list(columns.keys()))
131
132
  _update_dataframe_datatypes(dataframe=df, column_map=columns)
132
133
 
133
134
  return df
sempy_labs/_workloads.py CHANGED
@@ -1,7 +1,7 @@
1
1
  import pandas as pd
2
2
  from typing import Optional
3
3
  import sempy_labs._icons as icons
4
- from sempy_labs._helper_functions import (
4
+ from ._helper_functions import (
5
5
  _update_dataframe_datatypes,
6
6
  _base_api,
7
7
  _create_dataframe,
@@ -1,4 +1,4 @@
1
- from sempy_labs._helper_functions import (
1
+ from ._helper_functions import (
2
2
  resolve_workspace_name_and_id,
3
3
  _base_api,
4
4
  )
sempy_labs/_workspaces.py CHANGED
@@ -1,7 +1,7 @@
1
1
  import pandas as pd
2
2
  import sempy_labs._icons as icons
3
3
  from typing import Optional
4
- from sempy_labs._helper_functions import (
4
+ from ._helper_functions import (
5
5
  resolve_workspace_name_and_id,
6
6
  resolve_capacity_id,
7
7
  _base_api,
@@ -144,21 +144,22 @@ def list_workspace_users(workspace: Optional[str | UUID] = None) -> pd.DataFrame
144
144
  client="fabric_sp",
145
145
  )
146
146
 
147
- dfs = []
147
+ rows = []
148
148
  for r in responses:
149
149
  for v in r.get("value", []):
150
150
  p = v.get("principal", {})
151
- new_data = {
152
- "User Name": p.get("displayName"),
153
- "User ID": p.get("id"),
154
- "Type": p.get("type"),
155
- "Role": v.get("role"),
156
- "Email Address": p.get("userDetails", {}).get("userPrincipalName"),
157
- }
158
- dfs.append(pd.DataFrame(new_data, index=[0]))
159
-
160
- if dfs:
161
- df = pd.concat(dfs, ignore_index=True)
151
+ rows.append(
152
+ {
153
+ "User Name": p.get("displayName"),
154
+ "User ID": p.get("id"),
155
+ "Type": p.get("type"),
156
+ "Role": v.get("role"),
157
+ "Email Address": p.get("userDetails", {}).get("userPrincipalName"),
158
+ }
159
+ )
160
+
161
+ if rows:
162
+ df = pd.DataFrame(rows, columns=list(columns.keys()))
162
163
 
163
164
  return df
164
165