semantic-link-labs 0.8.3__py3-none-any.whl → 0.8.5__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of semantic-link-labs might be problematic. Click here for more details.

Files changed (109) hide show
  1. {semantic_link_labs-0.8.3.dist-info → semantic_link_labs-0.8.5.dist-info}/METADATA +38 -8
  2. {semantic_link_labs-0.8.3.dist-info → semantic_link_labs-0.8.5.dist-info}/RECORD +109 -104
  3. {semantic_link_labs-0.8.3.dist-info → semantic_link_labs-0.8.5.dist-info}/WHEEL +1 -1
  4. sempy_labs/__init__.py +53 -1
  5. sempy_labs/_bpa_translation/_model/_translations_am-ET.po +24 -5
  6. sempy_labs/_bpa_translation/_model/_translations_ar-AE.po +28 -4
  7. sempy_labs/_bpa_translation/_model/_translations_bg-BG.po +34 -4
  8. sempy_labs/_bpa_translation/_model/_translations_ca-ES.po +33 -4
  9. sempy_labs/_bpa_translation/_model/_translations_cs-CZ.po +31 -4
  10. sempy_labs/_bpa_translation/_model/_translations_da-DK.po +31 -4
  11. sempy_labs/_bpa_translation/_model/_translations_de-DE.po +34 -4
  12. sempy_labs/_bpa_translation/_model/_translations_el-GR.po +36 -4
  13. sempy_labs/_bpa_translation/_model/_translations_es-ES.po +90 -58
  14. sempy_labs/_bpa_translation/_model/_translations_fa-IR.po +31 -5
  15. sempy_labs/_bpa_translation/_model/_translations_fi-FI.po +31 -4
  16. sempy_labs/_bpa_translation/_model/_translations_fr-FR.po +34 -4
  17. sempy_labs/_bpa_translation/_model/_translations_ga-IE.po +34 -4
  18. sempy_labs/_bpa_translation/_model/_translations_he-IL.po +28 -4
  19. sempy_labs/_bpa_translation/_model/_translations_hi-IN.po +32 -4
  20. sempy_labs/_bpa_translation/_model/_translations_hu-HU.po +32 -4
  21. sempy_labs/_bpa_translation/_model/_translations_id-ID.po +32 -4
  22. sempy_labs/_bpa_translation/_model/_translations_is-IS.po +31 -4
  23. sempy_labs/_bpa_translation/_model/_translations_it-IT.po +34 -4
  24. sempy_labs/_bpa_translation/_model/_translations_ja-JP.po +24 -4
  25. sempy_labs/_bpa_translation/_model/_translations_ko-KR.po +72 -56
  26. sempy_labs/_bpa_translation/_model/_translations_mt-MT.po +34 -4
  27. sempy_labs/_bpa_translation/_model/_translations_nl-NL.po +34 -4
  28. sempy_labs/_bpa_translation/_model/_translations_pl-PL.po +95 -71
  29. sempy_labs/_bpa_translation/_model/_translations_pt-BR.po +32 -4
  30. sempy_labs/_bpa_translation/_model/_translations_pt-PT.po +32 -4
  31. sempy_labs/_bpa_translation/_model/_translations_ro-RO.po +33 -4
  32. sempy_labs/_bpa_translation/_model/_translations_ru-RU.po +34 -4
  33. sempy_labs/_bpa_translation/_model/_translations_sk-SK.po +31 -4
  34. sempy_labs/_bpa_translation/_model/_translations_sl-SL.po +32 -4
  35. sempy_labs/_bpa_translation/_model/_translations_sv-SE.po +32 -4
  36. sempy_labs/_bpa_translation/_model/_translations_ta-IN.po +32 -4
  37. sempy_labs/_bpa_translation/_model/_translations_te-IN.po +31 -4
  38. sempy_labs/_bpa_translation/_model/_translations_th-TH.po +31 -4
  39. sempy_labs/_bpa_translation/_model/_translations_tr-TR.po +32 -4
  40. sempy_labs/_bpa_translation/_model/_translations_uk-UA.po +100 -72
  41. sempy_labs/_bpa_translation/_model/_translations_zh-CN.po +23 -5
  42. sempy_labs/_bpa_translation/_model/_translations_zu-ZA.po +32 -4
  43. sempy_labs/_capacities.py +49 -14
  44. sempy_labs/_capacity_migration.py +1 -7
  45. sempy_labs/_data_pipelines.py +6 -0
  46. sempy_labs/_dataflows.py +118 -1
  47. sempy_labs/_dax.py +189 -3
  48. sempy_labs/_deployment_pipelines.py +13 -7
  49. sempy_labs/_environments.py +6 -0
  50. sempy_labs/_eventhouses.py +6 -0
  51. sempy_labs/_eventstreams.py +6 -0
  52. sempy_labs/_external_data_shares.py +6 -4
  53. sempy_labs/_generate_semantic_model.py +26 -3
  54. sempy_labs/_git.py +14 -14
  55. sempy_labs/_helper_functions.py +197 -1
  56. sempy_labs/_icons.py +55 -22
  57. sempy_labs/_kql_databases.py +6 -0
  58. sempy_labs/_kql_querysets.py +6 -0
  59. sempy_labs/_list_functions.py +1 -1
  60. sempy_labs/_managed_private_endpoints.py +166 -0
  61. sempy_labs/_mirrored_databases.py +428 -0
  62. sempy_labs/_mirrored_warehouses.py +2 -0
  63. sempy_labs/_ml_experiments.py +6 -0
  64. sempy_labs/_ml_models.py +7 -1
  65. sempy_labs/_model_bpa.py +215 -181
  66. sempy_labs/_model_bpa_bulk.py +46 -42
  67. sempy_labs/_model_bpa_rules.py +8 -3
  68. sempy_labs/_model_dependencies.py +41 -87
  69. sempy_labs/_notebooks.py +107 -12
  70. sempy_labs/_query_scale_out.py +8 -6
  71. sempy_labs/_refresh_semantic_model.py +299 -49
  72. sempy_labs/_spark.py +12 -5
  73. sempy_labs/_translations.py +2 -0
  74. sempy_labs/_vertipaq.py +89 -86
  75. sempy_labs/_warehouses.py +79 -0
  76. sempy_labs/_workloads.py +128 -0
  77. sempy_labs/_workspace_identity.py +4 -4
  78. sempy_labs/_workspaces.py +14 -1
  79. sempy_labs/admin/__init__.py +2 -0
  80. sempy_labs/admin/_basic_functions.py +131 -43
  81. sempy_labs/admin/_domains.py +18 -18
  82. sempy_labs/directlake/__init__.py +2 -0
  83. sempy_labs/directlake/_directlake_schema_sync.py +2 -1
  84. sempy_labs/directlake/_dl_helper.py +4 -1
  85. sempy_labs/directlake/_get_shared_expression.py +7 -1
  86. sempy_labs/directlake/_guardrails.py +2 -1
  87. sempy_labs/directlake/_show_unsupported_directlake_objects.py +1 -7
  88. sempy_labs/directlake/_update_directlake_model_lakehouse_connection.py +78 -0
  89. sempy_labs/directlake/_update_directlake_partition_entity.py +13 -32
  90. sempy_labs/directlake/_warm_cache.py +10 -9
  91. sempy_labs/lakehouse/_get_lakehouse_tables.py +6 -2
  92. sempy_labs/lakehouse/_shortcuts.py +4 -0
  93. sempy_labs/migration/_create_pqt_file.py +5 -2
  94. sempy_labs/migration/_migrate_calctables_to_lakehouse.py +3 -2
  95. sempy_labs/migration/_migrate_calctables_to_semantic_model.py +2 -0
  96. sempy_labs/migration/_migrate_model_objects_to_semantic_model.py +2 -8
  97. sempy_labs/migration/_migrate_tables_columns_to_semantic_model.py +17 -0
  98. sempy_labs/migration/_migration_validation.py +2 -0
  99. sempy_labs/migration/_refresh_calc_tables.py +1 -0
  100. sempy_labs/report/__init__.py +6 -1
  101. sempy_labs/report/_download_report.py +75 -0
  102. sempy_labs/report/_generate_report.py +6 -0
  103. sempy_labs/report/_paginated.py +74 -0
  104. sempy_labs/report/_report_functions.py +6 -0
  105. sempy_labs/report/_report_rebind.py +2 -0
  106. sempy_labs/report/_reportwrapper.py +4 -2
  107. sempy_labs/tom/_model.py +135 -68
  108. {semantic_link_labs-0.8.3.dist-info → semantic_link_labs-0.8.5.dist-info}/LICENSE +0 -0
  109. {semantic_link_labs-0.8.3.dist-info → semantic_link_labs-0.8.5.dist-info}/top_level.txt +0 -0
sempy_labs/_vertipaq.py CHANGED
@@ -21,6 +21,7 @@ from sempy_labs.directlake import get_direct_lake_source
21
21
  from typing import Optional
22
22
  from sempy._utils._log import log
23
23
  import sempy_labs._icons as icons
24
+ from pathlib import Path
24
25
 
25
26
 
26
27
  @log
@@ -69,80 +70,86 @@ def vertipaq_analyzer(
69
70
 
70
71
  workspace = fabric.resolve_workspace_name(workspace)
71
72
 
72
- data_type_string = "string"
73
- data_type_long = "long"
74
- data_type_timestamp = "timestamp"
75
- data_type_double = "double"
76
- data_type_bool = "bool"
77
- int_format = "int"
78
- pct_format = "pct"
79
- no_format = ""
80
-
81
73
  vertipaq_map = {
82
74
  "Model": {
83
- "Dataset Name": [data_type_string, no_format],
84
- "Total Size": [data_type_long, int_format],
85
- "Table Count": [data_type_long, int_format],
86
- "Column Count": [data_type_long, int_format],
87
- "Compatibility Level": [data_type_long, no_format],
88
- "Default Mode": [data_type_string, no_format],
75
+ "Dataset Name": [icons.data_type_string, icons.no_format],
76
+ "Total Size": [icons.data_type_long, icons.int_format],
77
+ "Table Count": [icons.data_type_long, icons.int_format],
78
+ "Column Count": [icons.data_type_long, icons.int_format],
79
+ "Compatibility Level": [icons.data_type_long, icons.no_format],
80
+ "Default Mode": [icons.data_type_string, icons.no_format],
89
81
  },
90
82
  "Tables": {
91
- "Table Name": [data_type_string, no_format],
92
- "Type": [data_type_string, no_format],
93
- "Row Count": [data_type_long, int_format],
94
- "Total Size": [data_type_long, int_format],
95
- "Dictionary Size": [data_type_long, int_format],
96
- "Data Size": [data_type_long, int_format],
97
- "Hierarchy Size": [data_type_long, int_format],
98
- "Relationship Size": [data_type_long, int_format],
99
- "User Hierarchy Size": [data_type_long, int_format],
100
- "Partitions": [data_type_long, int_format],
101
- "Columns": [data_type_long, int_format],
102
- "% DB": [data_type_double, pct_format],
83
+ "Table Name": [icons.data_type_string, icons.no_format],
84
+ "Type": [icons.data_type_string, icons.no_format],
85
+ "Row Count": [icons.data_type_long, icons.int_format],
86
+ "Total Size": [icons.data_type_long, icons.int_format],
87
+ "Dictionary Size": [icons.data_type_long, icons.int_format],
88
+ "Data Size": [icons.data_type_long, icons.int_format],
89
+ "Hierarchy Size": [icons.data_type_long, icons.int_format],
90
+ "Relationship Size": [icons.data_type_long, icons.int_format],
91
+ "User Hierarchy Size": [icons.data_type_long, icons.int_format],
92
+ "Partitions": [icons.data_type_long, icons.int_format],
93
+ "Columns": [icons.data_type_long, icons.int_format],
94
+ "% DB": [icons.data_type_double, icons.pct_format],
103
95
  },
104
96
  "Partitions": {
105
- "Table Name": [data_type_string, no_format],
106
- "Partition Name": [data_type_string, no_format],
107
- "Mode": [data_type_string, no_format],
108
- "Record Count": [data_type_long, int_format],
109
- "Segment Count": [data_type_long, int_format],
110
- "Records per Segment": [data_type_double, int_format],
97
+ "Table Name": [icons.data_type_string, icons.no_format],
98
+ "Partition Name": [icons.data_type_string, icons.no_format],
99
+ "Mode": [icons.data_type_string, icons.no_format],
100
+ "Record Count": [icons.data_type_long, icons.int_format],
101
+ "Segment Count": [icons.data_type_long, icons.int_format],
102
+ "Records per Segment": [icons.data_type_double, icons.int_format],
111
103
  },
112
104
  "Columns": {
113
- "Table Name": [data_type_string, no_format],
114
- "Column Name": [data_type_string, no_format],
115
- "Type": [data_type_string, no_format],
116
- "Cardinality": [data_type_long, int_format],
117
- "Total Size": [data_type_long, int_format],
118
- "Data Size": [data_type_long, int_format],
119
- "Dictionary Size": [data_type_long, int_format],
120
- "Hierarchy Size": [data_type_long, int_format],
121
- "% Table": [data_type_double, pct_format],
122
- "% DB": [data_type_double, pct_format],
123
- "Data Type": [data_type_string, no_format],
124
- "Encoding": [data_type_string, no_format],
125
- "Is Resident": [data_type_bool, no_format],
126
- "Temperature": [data_type_double, int_format],
127
- "Last Accessed": [data_type_timestamp, no_format],
105
+ "Table Name": [icons.data_type_string, icons.no_format],
106
+ "Column Name": [icons.data_type_string, icons.no_format],
107
+ "Type": [icons.data_type_string, icons.no_format],
108
+ "Cardinality": [icons.data_type_long, icons.int_format],
109
+ "Total Size": [icons.data_type_long, icons.int_format],
110
+ "Data Size": [icons.data_type_long, icons.int_format],
111
+ "Dictionary Size": [icons.data_type_long, icons.int_format],
112
+ "Hierarchy Size": [icons.data_type_long, icons.int_format],
113
+ "% Table": [icons.data_type_double, icons.pct_format],
114
+ "% DB": [icons.data_type_double, icons.pct_format],
115
+ "Data Type": [icons.data_type_string, icons.no_format],
116
+ "Encoding": [icons.data_type_string, icons.no_format],
117
+ "Is Resident": [icons.data_type_bool, icons.no_format],
118
+ "Temperature": [icons.data_type_double, icons.int_format],
119
+ "Last Accessed": [icons.data_type_timestamp, icons.no_format],
128
120
  },
129
121
  "Hierarchies": {
130
- "Table Name": [data_type_string, no_format],
131
- "Hierarchy Name": [data_type_string, no_format],
132
- "Used Size": [data_type_long, int_format],
122
+ "Table Name": [icons.data_type_string, icons.no_format],
123
+ "Hierarchy Name": [icons.data_type_string, icons.no_format],
124
+ "Used Size": [icons.data_type_long, icons.int_format],
133
125
  },
134
126
  "Relationships": {
135
- "From Object": [data_type_string, no_format],
136
- "To Object": [data_type_string, no_format],
137
- "Multiplicity": [data_type_string, no_format],
138
- "Used Size": [data_type_long, int_format],
139
- "Max From Cardinality": [data_type_long, int_format],
140
- "Max To Cardinality": [data_type_long, int_format],
141
- "Missing Rows": [data_type_long, int_format],
127
+ "From Object": [icons.data_type_string, icons.no_format],
128
+ "To Object": [icons.data_type_string, icons.no_format],
129
+ "Multiplicity": [icons.data_type_string, icons.no_format],
130
+ "Used Size": [icons.data_type_long, icons.int_format],
131
+ "Max From Cardinality": [icons.data_type_long, icons.int_format],
132
+ "Max To Cardinality": [icons.data_type_long, icons.int_format],
133
+ "Missing Rows": [icons.data_type_long, icons.int_format],
142
134
  },
143
135
  }
144
136
 
137
+ with connect_semantic_model(
138
+ dataset=dataset, workspace=workspace, readonly=True
139
+ ) as tom:
140
+ compat_level = tom.model.Model.Database.CompatibilityLevel
141
+ is_direct_lake = tom.is_direct_lake()
142
+ def_mode = tom.model.DefaultMode
143
+ table_count = tom.model.Tables.Count
144
+ column_count = len(list(tom.all_columns()))
145
+ if table_count == 0:
146
+ print(
147
+ f"{icons.warning} The '{dataset}' semantic model within the '{workspace}' workspace has no tables. Vertipaq Analyzer can only be run if the semantic model has tables."
148
+ )
149
+ return
150
+
145
151
  dfT = list_tables(dataset=dataset, extended=True, workspace=workspace)
152
+
146
153
  dfT.rename(columns={"Name": "Table Name"}, inplace=True)
147
154
  columns_to_keep = list(vertipaq_map["Tables"].keys())
148
155
  dfT = dfT[dfT.columns.intersection(columns_to_keep)]
@@ -157,15 +164,6 @@ def vertipaq_analyzer(
157
164
  get_direct_lake_source(dataset=dataset, workspace=workspace)
158
165
  )
159
166
 
160
- with connect_semantic_model(
161
- dataset=dataset, workspace=workspace, readonly=True
162
- ) as tom:
163
- compat_level = tom.model.Model.Database.CompatibilityLevel
164
- is_direct_lake = tom.is_direct_lake()
165
- def_mode = tom.model.DefaultMode
166
- table_count = tom.model.Tables.Count
167
- column_count = len(list(tom.all_columns()))
168
-
169
167
  dfR["Missing Rows"] = 0
170
168
  dfR["Missing Rows"] = dfR["Missing Rows"].astype(int)
171
169
 
@@ -403,6 +401,8 @@ def vertipaq_analyzer(
403
401
  y = db_total_size / (1024**2) * 1000000
404
402
  elif db_total_size >= 1000:
405
403
  y = db_total_size / (1024) * 1000
404
+ else:
405
+ y = db_total_size
406
406
  y = round(y)
407
407
 
408
408
  dfModel = pd.DataFrame(
@@ -421,18 +421,19 @@ def vertipaq_analyzer(
421
421
  export_Model = dfModel.copy()
422
422
 
423
423
  def _style_columns_based_on_types(dataframe: pd.DataFrame, column_type_mapping):
424
-
425
- format_mapping = {
426
- "int": "{:,}",
427
- "pct": "{:.2f}%",
428
- "": "{}",
424
+ # Define formatting functions based on the type mappings
425
+ format_funcs = {
426
+ "int": lambda x: "{:,}".format(x) if pd.notnull(x) else "",
427
+ "pct": lambda x: "{:.2f}%".format(x) if pd.notnull(x) else "",
428
+ "": lambda x: "{}".format(x),
429
429
  }
430
430
 
431
- format_dict = {
432
- col: format_mapping[dt] for col, dt in column_type_mapping.items()
433
- }
431
+ # Apply the formatting function to each column based on its specified type
432
+ for col, dt in column_type_mapping.items():
433
+ if dt in format_funcs:
434
+ dataframe[col] = dataframe[col].map(format_funcs[dt])
434
435
 
435
- return dataframe.style.format(format_dict)
436
+ return dataframe
436
437
 
437
438
  dfModel = _style_columns_based_on_types(
438
439
  dfModel,
@@ -565,13 +566,13 @@ def vertipaq_analyzer(
565
566
  df.columns = df.columns.str.replace(" ", "_")
566
567
 
567
568
  schema = {
568
- "Capacity_Name": data_type_string,
569
- "Capacity_Id": data_type_string,
570
- "Workspace_Name": data_type_string,
571
- "Workspace_Id": data_type_string,
572
- "Dataset_Name": data_type_string,
573
- "Dataset_Id": data_type_string,
574
- "Configured_By": data_type_string,
569
+ "Capacity_Name": icons.data_type_string,
570
+ "Capacity_Id": icons.data_type_string,
571
+ "Workspace_Name": icons.data_type_string,
572
+ "Workspace_Id": icons.data_type_string,
573
+ "Dataset_Name": icons.data_type_string,
574
+ "Dataset_Id": icons.data_type_string,
575
+ "Configured_By": icons.data_type_string,
575
576
  }
576
577
 
577
578
  schema.update(
@@ -580,8 +581,8 @@ def vertipaq_analyzer(
580
581
  for key, value in vertipaq_map[key_name].items()
581
582
  }
582
583
  )
583
- schema["RunId"] = data_type_long
584
- schema["Timestamp"] = data_type_timestamp
584
+ schema["RunId"] = icons.data_type_long
585
+ schema["Timestamp"] = icons.data_type_timestamp
585
586
 
586
587
  delta_table_name = f"VertipaqAnalyzer_{obj}".lower()
587
588
  save_as_delta_table(
@@ -988,7 +989,9 @@ def import_vertipaq_analyzer(folder_path: str, file_name: str):
988
989
  dfs = {}
989
990
  for file_name in zip_ref.namelist():
990
991
  df = pd.read_csv(extracted_dir + "/" + file_name)
991
- dfs[file_name] = df
992
+ file_path = Path(file_name)
993
+ df_name = file_path.stem
994
+ dfs[df_name] = df
992
995
 
993
996
  visualize_vertipaq(dfs)
994
997
 
sempy_labs/_warehouses.py CHANGED
@@ -19,6 +19,8 @@ def create_warehouse(
19
19
  """
20
20
  Creates a Fabric warehouse.
21
21
 
22
+ This is a wrapper function for the following API: `Items - Create Warehouse <https://learn.microsoft.com/rest/api/fabric/warehouse/items/create-warehouse>`_.
23
+
22
24
  Parameters
23
25
  ----------
24
26
  warehouse: str
@@ -61,6 +63,8 @@ def list_warehouses(workspace: Optional[str] = None) -> pd.DataFrame:
61
63
  """
62
64
  Shows the warehouses within a workspace.
63
65
 
66
+ This is a wrapper function for the following API: `Items - List Warehouses <https://learn.microsoft.com/rest/api/fabric/warehouse/items/list-warehouses>`_.
67
+
64
68
  Parameters
65
69
  ----------
66
70
  workspace : str, default=None
@@ -115,6 +119,8 @@ def delete_warehouse(name: str, workspace: Optional[str] = None):
115
119
  """
116
120
  Deletes a Fabric warehouse.
117
121
 
122
+ This is a wrapper function for the following API: `Items - Delete Warehouse <https://learn.microsoft.com/rest/api/fabric/warehouse/items/delete-warehouse>`_.
123
+
118
124
  Parameters
119
125
  ----------
120
126
  name: str
@@ -140,3 +146,76 @@ def delete_warehouse(name: str, workspace: Optional[str] = None):
140
146
  print(
141
147
  f"{icons.green_dot} The '{name}' warehouse within the '{workspace}' workspace has been deleted."
142
148
  )
149
+
150
+
151
+ def get_warehouse_tables(
152
+ warehouse: str, workspace: Optional[str] = None
153
+ ) -> pd.DataFrame:
154
+ """
155
+ Shows a list of the tables in the Fabric warehouse. This function is based on INFORMATION_SCHEMA.TABLES.
156
+
157
+ Parameters
158
+ ----------
159
+ warehouse : str
160
+ Name of the Fabric warehouse.
161
+ workspace : str, default=None
162
+ The Fabric workspace name.
163
+ Defaults to None which resolves to the workspace of the attached lakehouse
164
+ or if no lakehouse attached, resolves to the workspace of the notebook.
165
+
166
+ Returns
167
+ -------
168
+ pandas.DataFrame
169
+ A pandas dataframe showing a list of the tables in the Fabric warehouse.
170
+ """
171
+
172
+ from sempy_labs._sql import ConnectWarehouse
173
+
174
+ with ConnectWarehouse(warehouse=warehouse, workspace=workspace) as sql:
175
+ df = sql.query(
176
+ """
177
+ SELECT TABLE_SCHEMA AS [Schema], TABLE_NAME AS [Table Name], TABLE_TYPE AS [Table Type]
178
+ FROM INFORMATION_SCHEMA.TABLES
179
+ WHERE TABLE_TYPE = 'BASE TABLE'
180
+ """
181
+ )
182
+
183
+ return df
184
+
185
+
186
+ def get_warehouse_columns(
187
+ warehouse: str, workspace: Optional[str] = None
188
+ ) -> pd.DataFrame:
189
+ """
190
+ Shows a list of the columns in each table within the Fabric warehouse. This function is based on INFORMATION_SCHEMA.COLUMNS.
191
+
192
+ Parameters
193
+ ----------
194
+ warehouse : str
195
+ Name of the Fabric warehouse.
196
+ workspace : str, default=None
197
+ The Fabric workspace name.
198
+ Defaults to None which resolves to the workspace of the attached lakehouse
199
+ or if no lakehouse attached, resolves to the workspace of the notebook.
200
+
201
+ Returns
202
+ -------
203
+ pandas.DataFrame
204
+ A pandas dataframe showing a list of the columns in each table within the Fabric warehouse.
205
+ """
206
+
207
+ from sempy_labs._sql import ConnectWarehouse
208
+
209
+ with ConnectWarehouse(warehouse=warehouse, workspace=workspace) as sql:
210
+ df = sql.query(
211
+ """
212
+ SELECT t.TABLE_SCHEMA AS [Schema], t.TABLE_NAME AS [Table Name], c.COLUMN_NAME AS [Column Name], c.DATA_TYPE AS [Data Type], c.IS_NULLABLE AS [Is Nullable], c.CHARACTER_MAXIMUM_LENGTH AS [Character Max Length]
213
+ FROM INFORMATION_SCHEMA.TABLES AS t
214
+ LEFT JOIN INFORMATION_SCHEMA.COLUMNS AS c
215
+ ON t.TABLE_NAME = c.TABLE_NAME
216
+ AND t.TABLE_SCHEMA = c.TABLE_SCHEMA
217
+ WHERE t.TABLE_TYPE = 'BASE TABLE'
218
+ """
219
+ )
220
+
221
+ return df
@@ -0,0 +1,128 @@
1
+ import sempy.fabric as fabric
2
+ import pandas as pd
3
+ from typing import Optional
4
+ import sempy_labs._icons as icons
5
+ from sempy.fabric.exceptions import FabricHTTPException
6
+
7
+
8
+ def list_workloads(capacity_name: str) -> pd.DataFrame:
9
+ """
10
+ Returns the current state of the specified capacity workloads.
11
+ If a workload is enabled, the percentage of maximum memory that the workload can consume is also returned.
12
+
13
+ This is a wrapper function for the following API: `Capacities - Get Workloads <https://learn.microsoft.com/rest/api/power-bi/capacities/get-workloads>`_.
14
+
15
+ Parameters
16
+ ----------
17
+ capacity_name : str
18
+ The capacity name.
19
+
20
+ Returns
21
+ -------
22
+ pandas.DataFrame
23
+ A pandas dataframe showing the current state of the specified capacity workloads.
24
+ """
25
+
26
+ from sempy_labs._helper_functions import resolve_capacity_id
27
+
28
+ df = pd.DataFrame(
29
+ columns=["Workload Name", "State", "Max Memory Percentage Set By User"]
30
+ )
31
+
32
+ capacity_id = resolve_capacity_id(capacity_name=capacity_name)
33
+
34
+ client = fabric.PowerBIRestClient()
35
+ response = client.get(f"/v1.0/myorg/capacities/{capacity_id}/Workloads")
36
+
37
+ if response.status_code != 200:
38
+ raise FabricHTTPException(response)
39
+
40
+ for v in response.json().get("value", []):
41
+ new_data = {
42
+ "Workload Name": v.get("name"),
43
+ "State": v.get("state"),
44
+ "Max Memory Percentage Set By User": v.get("maxMemoryPercentageSetByUser"),
45
+ }
46
+ df = pd.concat([df, pd.DataFrame(new_data, index=[0])], ignore_index=True)
47
+
48
+ int_cols = ["Max Memory Percentage Set By User"]
49
+ df[int_cols] = df[int_cols].astype(int)
50
+
51
+ return df
52
+
53
+
54
+ def patch_workload(
55
+ capacity_name: str,
56
+ workload_name: str,
57
+ state: Optional[str] = None,
58
+ max_memory_percentage: Optional[int] = None,
59
+ ):
60
+ """
61
+ Changes the state of a specific workload to Enabled or Disabled.
62
+ When enabling a workload, specify the percentage of maximum memory that the workload can consume.
63
+
64
+ This is a wrapper function for the following API: `Capacities - Patch Workload <https://learn.microsoft.com/rest/api/power-bi/capacities/patch-workload>`_.
65
+
66
+ Parameters
67
+ ----------
68
+ capacity_name : str
69
+ The capacity name.
70
+ workload_name : str
71
+ The workload name.
72
+ state : str, default=None
73
+ The capacity workload state.
74
+ max_memory_percentage : int, default=None
75
+ The percentage of the maximum memory that a workload can consume (set by the user).
76
+ """
77
+
78
+ from sempy_labs._helper_functions import resolve_capacity_id
79
+
80
+ capacity_id = resolve_capacity_id(capacity_name=capacity_name)
81
+
82
+ states = ["Disabled", "Enabled", "Unsupported"]
83
+ state = state.capitalize()
84
+ if state is not None and state not in states:
85
+ raise ValueError(
86
+ f"{icons.red_dot} Invalid 'state' parameter. Please choose from these options: {states}."
87
+ )
88
+ if max_memory_percentage is not None and (
89
+ max_memory_percentage < 0 or max_memory_percentage > 100
90
+ ):
91
+ raise ValueError(
92
+ f"{icons.red_dot} Invalid max memory percentage. Must be a value between 0-100."
93
+ )
94
+
95
+ client = fabric.PowerBIRestClient()
96
+ url = f"/v1.0/myorg/capacities/{capacity_id}/Workloads/{workload_name}"
97
+ get_response = client.get(url)
98
+ if get_response.status_code != 200:
99
+ raise FabricHTTPException(get_response)
100
+
101
+ get_json = get_response.json().get("value")
102
+ current_state = get_json.get("state")
103
+ current_max_memory = get_json.get("maxMemoryPercentageSetByUser")
104
+
105
+ if current_state == state and str(current_max_memory) == str(max_memory_percentage):
106
+ print(
107
+ f"{icons.info} The current workload settings are the same as those specified in the parameters of this function. The workload has not been updated."
108
+ )
109
+ return
110
+
111
+ payload = {}
112
+ if state is not None:
113
+ payload["state"] = state
114
+ else:
115
+ payload["state"] = current_state
116
+ if max_memory_percentage is not None:
117
+ payload["maxMemoryPercentageSetByUser"] = max_memory_percentage
118
+ else:
119
+ payload["maxMemoryPercentageSetByUser"] = current_max_memory
120
+
121
+ response = client.patch(url, json=payload)
122
+
123
+ if response.status_code != 200:
124
+ raise FabricHTTPException(response)
125
+
126
+ print(
127
+ f"The '{workload_name}' workload within the '{capacity_name}' capacity has been updated accordingly."
128
+ )
@@ -12,6 +12,8 @@ def provision_workspace_identity(workspace: Optional[str] = None):
12
12
  """
13
13
  Provisions a workspace identity for a workspace.
14
14
 
15
+ This is a wrapper function for the following API: `Workspaces - Provision Identity <https://learn.microsoft.com/rest/api/fabric/core/workspaces/provision-identity>`_.
16
+
15
17
  Parameters
16
18
  ----------
17
19
  workspace : str, default=None
@@ -20,8 +22,6 @@ def provision_workspace_identity(workspace: Optional[str] = None):
20
22
  or if no lakehouse attached, resolves to the workspace of the notebook.
21
23
  """
22
24
 
23
- # https://learn.microsoft.com/en-us/rest/api/fabric/core/workspaces/provision-identity?tabs=HTTP
24
-
25
25
  workspace, workspace_id = resolve_workspace_name_and_id(workspace)
26
26
 
27
27
  client = fabric.FabricRestClient()
@@ -41,6 +41,8 @@ def deprovision_workspace_identity(workspace: Optional[str] = None):
41
41
  """
42
42
  Deprovisions a workspace identity for a workspace.
43
43
 
44
+ This is a wrapper function for the following API: `Workspaces - Derovision Identity <https://learn.microsoft.com/rest/api/fabric/core/workspaces/deprovision-identity>`_.
45
+
44
46
  Parameters
45
47
  ----------
46
48
  workspace : str, default=None
@@ -49,8 +51,6 @@ def deprovision_workspace_identity(workspace: Optional[str] = None):
49
51
  or if no lakehouse attached, resolves to the workspace of the notebook.
50
52
  """
51
53
 
52
- # https://learn.microsoft.com/en-us/rest/api/fabric/core/workspaces/deprovision-identity?tabs=HTTP
53
-
54
54
  workspace, workspace_id = resolve_workspace_name_and_id(workspace)
55
55
 
56
56
  client = fabric.FabricRestClient()
sempy_labs/_workspaces.py CHANGED
@@ -14,6 +14,8 @@ def delete_user_from_workspace(email_address: str, workspace: Optional[str] = No
14
14
  """
15
15
  Removes a user from a workspace.
16
16
 
17
+ This is a wrapper function for the following API: `Groups - Delete User In Group <https://learn.microsoft.com/rest/api/power-bi/groups/delete-user-in-group>`_.
18
+
17
19
  Parameters
18
20
  ----------
19
21
  email_address : str
@@ -45,6 +47,8 @@ def update_workspace_user(
45
47
  """
46
48
  Updates a user's role within a workspace.
47
49
 
50
+ This is a wrapper function for the following API: `Groups - Update Group User <https://learn.microsoft.com/rest/api/power-bi/groups/update-group-user>`_.
51
+
48
52
  Parameters
49
53
  ----------
50
54
  email_address : str
@@ -95,6 +99,8 @@ def list_workspace_users(workspace: Optional[str] = None) -> pd.DataFrame:
95
99
  """
96
100
  A list of all the users of a workspace and their roles.
97
101
 
102
+ This is a wrapper function for the following API: `Workspaces - List Workspace Role Assignments <https://learn.microsoft.com/rest/api/fabric/core/workspaces/list-workspace-role-assignments>`_.
103
+
98
104
  Parameters
99
105
  ----------
100
106
  workspace : str, default=None
@@ -142,6 +148,8 @@ def add_user_to_workspace(
142
148
  """
143
149
  Adds a user to a workspace.
144
150
 
151
+ This is a wrapper function for the following API: `Groups - Add Group User <https://learn.microsoft.com/rest/api/power-bi/groups/add-group-user>`_.
152
+
145
153
  Parameters
146
154
  ----------
147
155
  email_address : str
@@ -196,6 +204,8 @@ def assign_workspace_to_capacity(capacity_name: str, workspace: Optional[str] =
196
204
  """
197
205
  Assigns a workspace to a capacity.
198
206
 
207
+ This is a wrapper function for the following API: `Workspaces - Assign To Capacity <https://learn.microsoft.com/rest/api/fabric/core/workspaces/assign-to-capacity>`_.
208
+
199
209
  Parameters
200
210
  ----------
201
211
  capacity_name : str
@@ -228,6 +238,8 @@ def unassign_workspace_from_capacity(workspace: Optional[str] = None):
228
238
  """
229
239
  Unassigns a workspace from its assigned capacity.
230
240
 
241
+ This is a wrapper function for the following API: `Workspaces - Unassign From Capacity <https://learn.microsoft.com/rest/api/fabric/core/workspaces/unassign-from-capacity>`_.
242
+
231
243
  Parameters
232
244
  ----------
233
245
  workspace : str, default=None
@@ -236,7 +248,6 @@ def unassign_workspace_from_capacity(workspace: Optional[str] = None):
236
248
  or if no lakehouse attached, resolves to the workspace of the notebook.
237
249
  """
238
250
 
239
- # https://learn.microsoft.com/en-us/rest/api/fabric/core/workspaces/unassign-from-capacity?tabs=HTTP
240
251
  (workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
241
252
 
242
253
  client = fabric.FabricRestClient()
@@ -253,6 +264,8 @@ def list_workspace_role_assignments(workspace: Optional[str] = None) -> pd.DataF
253
264
  """
254
265
  Shows the members of a given workspace.
255
266
 
267
+ This is a wrapper function for the following API: `Workspaces - List Workspace Role Assignments <https://learn.microsoft.com/rest/api/fabric/core/workspaces/list-workspace-role-assignments>`_.
268
+
256
269
  Parameters
257
270
  ----------
258
271
  workspace : str, default=None
@@ -13,6 +13,7 @@ from sempy_labs.admin._basic_functions import (
13
13
  list_workspace_access_details,
14
14
  list_items,
15
15
  list_activity_events,
16
+ list_modified_workspaces,
16
17
  )
17
18
  from sempy_labs.admin._domains import (
18
19
  list_domains,
@@ -52,4 +53,5 @@ __all__ = [
52
53
  "list_external_data_shares",
53
54
  "revoke_external_data_share",
54
55
  "list_activity_events",
56
+ "list_modified_workspaces",
55
57
  ]