semantic-link-labs 0.6.0__py3-none-any.whl → 0.7.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of semantic-link-labs might be problematic. Click here for more details.

Files changed (103) hide show
  1. semantic_link_labs-0.7.0.dist-info/METADATA +148 -0
  2. semantic_link_labs-0.7.0.dist-info/RECORD +111 -0
  3. {semantic_link_labs-0.6.0.dist-info → semantic_link_labs-0.7.0.dist-info}/WHEEL +1 -1
  4. sempy_labs/__init__.py +26 -2
  5. sempy_labs/_ai.py +3 -65
  6. sempy_labs/_bpa_translation/_translations_am-ET.po +828 -0
  7. sempy_labs/_bpa_translation/_translations_ar-AE.po +860 -0
  8. sempy_labs/_bpa_translation/_translations_cs-CZ.po +894 -0
  9. sempy_labs/_bpa_translation/_translations_da-DK.po +894 -0
  10. sempy_labs/_bpa_translation/_translations_de-DE.po +933 -0
  11. sempy_labs/_bpa_translation/_translations_el-GR.po +936 -0
  12. sempy_labs/_bpa_translation/_translations_es-ES.po +915 -0
  13. sempy_labs/_bpa_translation/_translations_fa-IR.po +883 -0
  14. sempy_labs/_bpa_translation/_translations_fr-FR.po +938 -0
  15. sempy_labs/_bpa_translation/_translations_ga-IE.po +912 -0
  16. sempy_labs/_bpa_translation/_translations_he-IL.po +855 -0
  17. sempy_labs/_bpa_translation/_translations_hi-IN.po +892 -0
  18. sempy_labs/_bpa_translation/_translations_hu-HU.po +910 -0
  19. sempy_labs/_bpa_translation/_translations_is-IS.po +887 -0
  20. sempy_labs/_bpa_translation/_translations_it-IT.po +931 -0
  21. sempy_labs/_bpa_translation/_translations_ja-JP.po +805 -0
  22. sempy_labs/_bpa_translation/_translations_nl-NL.po +924 -0
  23. sempy_labs/_bpa_translation/_translations_pl-PL.po +913 -0
  24. sempy_labs/_bpa_translation/_translations_pt-BR.po +909 -0
  25. sempy_labs/_bpa_translation/_translations_pt-PT.po +904 -0
  26. sempy_labs/_bpa_translation/_translations_ru-RU.po +909 -0
  27. sempy_labs/_bpa_translation/_translations_ta-IN.po +922 -0
  28. sempy_labs/_bpa_translation/_translations_te-IN.po +896 -0
  29. sempy_labs/_bpa_translation/_translations_th-TH.po +873 -0
  30. sempy_labs/_bpa_translation/_translations_zh-CN.po +767 -0
  31. sempy_labs/_bpa_translation/_translations_zu-ZA.po +916 -0
  32. sempy_labs/_clear_cache.py +9 -4
  33. sempy_labs/_generate_semantic_model.py +30 -56
  34. sempy_labs/_helper_functions.py +358 -14
  35. sempy_labs/_icons.py +10 -1
  36. sempy_labs/_list_functions.py +478 -237
  37. sempy_labs/_model_bpa.py +194 -18
  38. sempy_labs/_model_bpa_bulk.py +363 -0
  39. sempy_labs/_model_bpa_rules.py +4 -4
  40. sempy_labs/_model_dependencies.py +12 -10
  41. sempy_labs/_one_lake_integration.py +7 -7
  42. sempy_labs/_query_scale_out.py +45 -66
  43. sempy_labs/_refresh_semantic_model.py +7 -0
  44. sempy_labs/_translations.py +154 -1
  45. sempy_labs/_vertipaq.py +103 -90
  46. sempy_labs/directlake/__init__.py +5 -1
  47. sempy_labs/directlake/_directlake_schema_compare.py +27 -31
  48. sempy_labs/directlake/_directlake_schema_sync.py +55 -66
  49. sempy_labs/directlake/_dl_helper.py +233 -0
  50. sempy_labs/directlake/_get_directlake_lakehouse.py +6 -7
  51. sempy_labs/directlake/_get_shared_expression.py +1 -1
  52. sempy_labs/directlake/_guardrails.py +17 -13
  53. sempy_labs/directlake/_update_directlake_partition_entity.py +54 -30
  54. sempy_labs/directlake/_warm_cache.py +1 -1
  55. sempy_labs/lakehouse/_get_lakehouse_tables.py +61 -69
  56. sempy_labs/lakehouse/_lakehouse.py +3 -2
  57. sempy_labs/lakehouse/_shortcuts.py +1 -1
  58. sempy_labs/migration/_create_pqt_file.py +174 -182
  59. sempy_labs/migration/_migrate_calctables_to_lakehouse.py +236 -268
  60. sempy_labs/migration/_migrate_calctables_to_semantic_model.py +75 -73
  61. sempy_labs/migration/_migrate_model_objects_to_semantic_model.py +442 -426
  62. sempy_labs/migration/_migrate_tables_columns_to_semantic_model.py +91 -97
  63. sempy_labs/migration/_refresh_calc_tables.py +92 -101
  64. sempy_labs/report/_BPAReportTemplate.json +232 -0
  65. sempy_labs/report/__init__.py +6 -2
  66. sempy_labs/report/_bpareporttemplate/.pbi/localSettings.json +9 -0
  67. sempy_labs/report/_bpareporttemplate/.platform +11 -0
  68. sempy_labs/report/_bpareporttemplate/StaticResources/SharedResources/BaseThemes/CY24SU06.json +710 -0
  69. sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/page.json +11 -0
  70. sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/1b08bce3bebabb0a27a8/visual.json +191 -0
  71. sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/2f22ddb70c301693c165/visual.json +438 -0
  72. sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/3b1182230aa6c600b43a/visual.json +127 -0
  73. sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/58577ba6380c69891500/visual.json +576 -0
  74. sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/a2a8fa5028b3b776c96c/visual.json +207 -0
  75. sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/adfd47ef30652707b987/visual.json +506 -0
  76. sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/b6a80ee459e716e170b1/visual.json +127 -0
  77. sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/ce3130a721c020cc3d81/visual.json +513 -0
  78. sempy_labs/report/_bpareporttemplate/definition/pages/92735ae19b31712208ad/page.json +8 -0
  79. sempy_labs/report/_bpareporttemplate/definition/pages/92735ae19b31712208ad/visuals/66e60dfb526437cd78d1/visual.json +112 -0
  80. sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/page.json +11 -0
  81. sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/07deb8bce824e1be37d7/visual.json +513 -0
  82. sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/0b1c68838818b32ad03b/visual.json +352 -0
  83. sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/0c171de9d2683d10b930/visual.json +37 -0
  84. sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/0efa01be0510e40a645e/visual.json +542 -0
  85. sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/6bf2f0eb830ab53cc668/visual.json +221 -0
  86. sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/88d8141cb8500b60030c/visual.json +127 -0
  87. sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/a753273590beed656a03/visual.json +576 -0
  88. sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/b8fdc82cddd61ac447bc/visual.json +127 -0
  89. sempy_labs/report/_bpareporttemplate/definition/pages/d37dce724a0ccc30044b/page.json +9 -0
  90. sempy_labs/report/_bpareporttemplate/definition/pages/d37dce724a0ccc30044b/visuals/ce8532a7e25020271077/visual.json +38 -0
  91. sempy_labs/report/_bpareporttemplate/definition/pages/pages.json +10 -0
  92. sempy_labs/report/_bpareporttemplate/definition/report.json +176 -0
  93. sempy_labs/report/_bpareporttemplate/definition/version.json +4 -0
  94. sempy_labs/report/_bpareporttemplate/definition.pbir +14 -0
  95. sempy_labs/report/_generate_report.py +255 -139
  96. sempy_labs/report/_report_functions.py +26 -33
  97. sempy_labs/report/_report_rebind.py +31 -26
  98. sempy_labs/tom/_model.py +75 -58
  99. semantic_link_labs-0.6.0.dist-info/METADATA +0 -22
  100. semantic_link_labs-0.6.0.dist-info/RECORD +0 -54
  101. sempy_labs/directlake/_fallback.py +0 -60
  102. {semantic_link_labs-0.6.0.dist-info → semantic_link_labs-0.7.0.dist-info}/LICENSE +0 -0
  103. {semantic_link_labs-0.6.0.dist-info → semantic_link_labs-0.7.0.dist-info}/top_level.txt +0 -0
@@ -8,7 +8,9 @@ from sempy._utils._log import log
8
8
 
9
9
 
10
10
  @log
11
- def get_measure_dependencies(dataset: str, workspace: Optional[str] = None):
11
+ def get_measure_dependencies(
12
+ dataset: str, workspace: Optional[str] = None
13
+ ) -> pd.DataFrame:
12
14
  """
13
15
  Shows all dependencies for all measures in a semantic model.
14
16
 
@@ -33,7 +35,7 @@ def get_measure_dependencies(dataset: str, workspace: Optional[str] = None):
33
35
  dataset=dataset,
34
36
  workspace=workspace,
35
37
  dax_string="""
36
- SELECT
38
+ SELECT
37
39
  [TABLE] AS [Table Name]
38
40
  ,[OBJECT] AS [Object Name]
39
41
  ,[OBJECT_TYPE] AS [Object Type]
@@ -132,7 +134,9 @@ def get_measure_dependencies(dataset: str, workspace: Optional[str] = None):
132
134
 
133
135
 
134
136
  @log
135
- def get_model_calc_dependencies(dataset: str, workspace: Optional[str] = None):
137
+ def get_model_calc_dependencies(
138
+ dataset: str, workspace: Optional[str] = None
139
+ ) -> pd.DataFrame:
136
140
  """
137
141
  Shows all dependencies for all objects in a semantic model.
138
142
 
@@ -288,7 +292,7 @@ def measure_dependency_tree(
288
292
 
289
293
  if len(dfM_filt) == 0:
290
294
  print(
291
- f"The '{measure_name}' measure does not exist in the '{dataset}' semantic model in the '{workspace}' workspace."
295
+ f"{icons.red_dot} The '{measure_name}' measure does not exist in the '{dataset}' semantic model in the '{workspace}' workspace."
292
296
  )
293
297
  return
294
298
 
@@ -311,19 +315,17 @@ def measure_dependency_tree(
311
315
  if parent_node is None:
312
316
  parent_node = Node(parent_node_name)
313
317
  node_dict[parent_node_name] = parent_node
314
- parent_node.custom_property = icons.measure_icon + " "
318
+ parent_node.custom_property = f"{icons.measure_icon} "
315
319
 
316
320
  # Create the child node
317
321
  child_node_name = ref_obj_name
318
322
  child_node = Node(child_node_name, parent=parent_node)
319
323
  if ref_obj_type == "Column":
320
- child_node.custom_property = (
321
- icons.column_icon + " '" + ref_obj_table_name + "'"
322
- )
324
+ child_node.custom_property = f"{icons.column_icon} '{ref_obj_table_name}'"
323
325
  elif ref_obj_type == "Table":
324
- child_node.custom_property = icons.table_icon + " "
326
+ child_node.custom_property = f"{icons.table_icon} "
325
327
  elif ref_obj_type == "Measure":
326
- child_node.custom_property = icons.measure_icon + " "
328
+ child_node.custom_property = f"{icons.measure_icon} "
327
329
 
328
330
  # Update the dictionary with the child node
329
331
  node_dict[child_node_name] = child_node
@@ -50,13 +50,13 @@ def export_model_to_onelake(
50
50
  {{
51
51
  'export': {{
52
52
  'layout': 'delta',
53
- 'type': 'full',
54
- 'objects': [
55
- {{
53
+ 'type': 'full',
54
+ 'objects': [
55
+ {{
56
56
  'database': '{dataset}'
57
- }}
58
- ]
59
- }}
57
+ }}
58
+ ]
59
+ }}
60
60
  }}
61
61
  """
62
62
 
@@ -120,7 +120,7 @@ def export_model_to_onelake(
120
120
 
121
121
  print(f"{icons.in_progress} Creating shortcuts...\n")
122
122
  for tableName in tables:
123
- tablePath = "Tables/" + tableName
123
+ tablePath = f"Tables/{tableName}"
124
124
  shortcutName = tableName.replace(" ", "")
125
125
  request_body = {
126
126
  "path": "Tables",
@@ -1,7 +1,10 @@
1
1
  import sempy.fabric as fabric
2
2
  import pandas as pd
3
- from sempy_labs._helper_functions import resolve_dataset_id
4
- from typing import Optional
3
+ from sempy_labs._helper_functions import (
4
+ resolve_dataset_id,
5
+ resolve_workspace_name_and_id,
6
+ )
7
+ from typing import Optional, Tuple
5
8
  import sempy_labs._icons as icons
6
9
  from sempy.fabric.exceptions import FabricHTTPException
7
10
 
@@ -26,12 +29,7 @@ def qso_sync(dataset: str, workspace: Optional[str] = None):
26
29
 
27
30
  # https://learn.microsoft.com/en-us/rest/api/power-bi/datasets/trigger-query-scale-out-sync-in-group
28
31
 
29
- if workspace is None:
30
- workspace_id = fabric.get_workspace_id()
31
- workspace = fabric.resolve_workspace_name(workspace_id)
32
- else:
33
- workspace_id = fabric.resolve_workspace_id(workspace)
34
-
32
+ (workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
35
33
  dataset_id = resolve_dataset_id(dataset, workspace)
36
34
 
37
35
  client = fabric.PowerBIRestClient()
@@ -46,7 +44,9 @@ def qso_sync(dataset: str, workspace: Optional[str] = None):
46
44
  )
47
45
 
48
46
 
49
- def qso_sync_status(dataset: str, workspace: Optional[str] = None):
47
+ def qso_sync_status(
48
+ dataset: str, workspace: Optional[str] = None
49
+ ) -> Tuple[pd.DataFrame, pd.DataFrame]:
50
50
  """
51
51
  Returns the query scale-out sync status for the specified dataset from the specified workspace.
52
52
 
@@ -61,6 +61,8 @@ def qso_sync_status(dataset: str, workspace: Optional[str] = None):
61
61
 
62
62
  Returns
63
63
  -------
64
+ Tuple[pandas.DataFrame, pandas.DataFrame]
65
+ 2 pandas dataframes showing the query scale-out sync status.
64
66
 
65
67
  """
66
68
 
@@ -84,12 +86,7 @@ def qso_sync_status(dataset: str, workspace: Optional[str] = None):
84
86
  columns=["Replica ID", "Replica Type", "Replica Version", "Replica Timestamp"]
85
87
  )
86
88
 
87
- if workspace is None:
88
- workspace_id = fabric.get_workspace_id()
89
- workspace = fabric.resolve_workspace_name(workspace_id)
90
- else:
91
- workspace_id = fabric.resolve_workspace_id(workspace)
92
-
89
+ (workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
93
90
  dataset_id = resolve_dataset_id(dataset, workspace)
94
91
 
95
92
  client = fabric.PowerBIRestClient()
@@ -101,29 +98,29 @@ def qso_sync_status(dataset: str, workspace: Optional[str] = None):
101
98
  raise FabricHTTPException(response)
102
99
 
103
100
  o = response.json()
104
- sos = o["scaleOutStatus"]
101
+ sos = o.get("scaleOutStatus")
105
102
 
106
103
  if sos == "Enabled":
107
104
  new_data = {
108
- "Scale Out Status": o["scaleOutStatus"],
109
- "Sync Start Time": o["syncStartTime"],
110
- "Sync End Time": o["syncEndTime"],
111
- "Commit Version": o["commitVersion"],
112
- "Commit Timestamp": o["commitTimestamp"],
113
- "Target Sync Version": o["targetSyncVersion"],
114
- "Target Sync Timestamp": o["targetSyncTimestamp"],
115
- "Trigger Reason": o["triggerReason"],
116
- "Min Active Read Version": o["minActiveReadVersion"],
117
- "Min Active Read Timestamp": o["minActiveReadTimestamp"],
105
+ "Scale Out Status": o.get("scaleOutStatus"),
106
+ "Sync Start Time": o.get("syncStartTime"),
107
+ "Sync End Time": o.get("syncEndTime"),
108
+ "Commit Version": o.get("commitVersion"),
109
+ "Commit Timestamp": o.get("commitTimestamp"),
110
+ "Target Sync Version": o.get("targetSyncVersion"),
111
+ "Target Sync Timestamp": o.get("targetSyncTimestamp"),
112
+ "Trigger Reason": o.get("triggerReason"),
113
+ "Min Active Read Version": o.get("minActiveReadVersion"),
114
+ "Min Active Read Timestamp": o.get("minActiveReadTimestamp"),
118
115
  }
119
116
  df = pd.concat([df, pd.DataFrame(new_data, index=[0])], ignore_index=True)
120
117
 
121
- for r in o["scaleOutReplicas"]:
118
+ for r in o.get("scaleOutReplicas", []):
122
119
  new_data = {
123
- "Replica ID": r["replicaId"],
124
- "Replica Type": r["replicaType"],
125
- "Replica Version": str(r["replicaVersion"]),
126
- "Replica Timestamp": r["replicaTimestamp"],
120
+ "Replica ID": r.get("replicaId"),
121
+ "Replica Type": r.get("replicaType"),
122
+ "Replica Version": str(r.get("replicaVersion")),
123
+ "Replica Timestamp": r.get("replicaTimestamp"),
127
124
  }
128
125
  dfRep = pd.concat(
129
126
  [dfRep, pd.DataFrame(new_data, index=[0])], ignore_index=True
@@ -147,7 +144,7 @@ def qso_sync_status(dataset: str, workspace: Optional[str] = None):
147
144
  return df, dfRep
148
145
 
149
146
 
150
- def disable_qso(dataset: str, workspace: Optional[str] = None):
147
+ def disable_qso(dataset: str, workspace: Optional[str] = None) -> pd.DataFrame:
151
148
  """
152
149
  Sets the max read-only replicas to 0, disabling query scale out.
153
150
 
@@ -162,15 +159,12 @@ def disable_qso(dataset: str, workspace: Optional[str] = None):
162
159
 
163
160
  Returns
164
161
  -------
162
+ pandas.DataFrame
163
+ A pandas dataframe showing the current query scale out settings.
165
164
 
166
165
  """
167
166
 
168
- if workspace is None:
169
- workspace_id = fabric.get_workspace_id()
170
- workspace = fabric.resolve_workspace_name(workspace_id)
171
- else:
172
- workspace_id = fabric.resolve_workspace_id(workspace)
173
-
167
+ (workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
174
168
  dataset_id = resolve_dataset_id(dataset, workspace)
175
169
 
176
170
  request_body = {"queryScaleOutSettings": {"maxReadOnlyReplicas": "0"}}
@@ -195,7 +189,7 @@ def set_qso(
195
189
  auto_sync: Optional[bool] = True,
196
190
  max_read_only_replicas: Optional[int] = -1,
197
191
  workspace: Optional[str] = None,
198
- ):
192
+ ) -> pd.DataFrame:
199
193
  """
200
194
  Sets the query scale out settings for a semantic model.
201
195
 
@@ -214,17 +208,14 @@ def set_qso(
214
208
 
215
209
  Returns
216
210
  -------
211
+ pandas.DataFrame
212
+ A pandas dataframe showing the current query scale-out settings.
217
213
 
218
214
  """
219
215
 
220
216
  # https://learn.microsoft.com/en-us/rest/api/power-bi/datasets/update-dataset-in-group
221
217
 
222
- if workspace is None:
223
- workspace_id = fabric.get_workspace_id()
224
- workspace = fabric.resolve_workspace_name(workspace_id)
225
- else:
226
- workspace_id = fabric.resolve_workspace_id(workspace)
227
-
218
+ (workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
228
219
  dataset_id = resolve_dataset_id(dataset, workspace)
229
220
 
230
221
  if max_read_only_replicas == 0:
@@ -283,12 +274,7 @@ def set_semantic_model_storage_format(
283
274
 
284
275
  """
285
276
 
286
- if workspace is None:
287
- workspace_id = fabric.get_workspace_id()
288
- workspace = fabric.resolve_workspace_name(workspace_id)
289
- else:
290
- workspace_id = fabric.resolve_workspace_id(workspace)
291
-
277
+ (workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
292
278
  dataset_id = resolve_dataset_id(dataset, workspace)
293
279
 
294
280
  storage_format = storage_format.capitalize()
@@ -317,10 +303,10 @@ def set_semantic_model_storage_format(
317
303
  raise FabricHTTPException(response)
318
304
  print(f"{icons.green_dot} Semantic model storage format set to '{storage_format}'.")
319
305
 
320
- return response.status_code
321
306
 
322
-
323
- def list_qso_settings(dataset: Optional[str] = None, workspace: Optional[str] = None):
307
+ def list_qso_settings(
308
+ dataset: Optional[str] = None, workspace: Optional[str] = None
309
+ ) -> pd.DataFrame:
324
310
  """
325
311
  Shows the query scale out settings for a semantic model (or all semantic models within a workspace).
326
312
 
@@ -339,11 +325,7 @@ def list_qso_settings(dataset: Optional[str] = None, workspace: Optional[str] =
339
325
  A pandas dataframe showing the query scale out settings.
340
326
  """
341
327
 
342
- if workspace is None:
343
- workspace_id = fabric.get_workspace_id()
344
- workspace = fabric.resolve_workspace_name(workspace_id)
345
- else:
346
- workspace_id = fabric.resolve_workspace_id(workspace)
328
+ (workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
347
329
 
348
330
  if dataset is not None:
349
331
  dataset_id = resolve_dataset_id(dataset, workspace)
@@ -360,7 +342,8 @@ def list_qso_settings(dataset: Optional[str] = None, workspace: Optional[str] =
360
342
  )
361
343
  client = fabric.PowerBIRestClient()
362
344
  response = client.get(f"/v1.0/myorg/groups/{workspace_id}/datasets")
363
- for v in response.json()["value"]:
345
+
346
+ for v in response.json().get("value", []):
364
347
  tsm = v.get("targetStorageMode")
365
348
  if tsm == "Abf":
366
349
  sm = "Small"
@@ -416,14 +399,10 @@ def set_workspace_default_storage_format(
416
399
 
417
400
  if storage_format not in storageFormats:
418
401
  print(
419
- f"Invalid storage format. Please choose from these options: {storageFormats}."
402
+ f"{icons.red_dot} Invalid storage format. Please choose from these options: {storageFormats}."
420
403
  )
421
404
 
422
- if workspace is None:
423
- workspace_id = fabric.get_workspace_id()
424
- workspace = fabric.resolve_workspace_name(workspace_id)
425
- else:
426
- workspace_id = fabric.resolve_workspace_id(workspace)
405
+ (workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
427
406
 
428
407
  request_body = {"name": workspace, "defaultDatasetStorageFormat": storage_format}
429
408
 
@@ -16,6 +16,7 @@ def refresh_semantic_model(
16
16
  refresh_type: Optional[str] = None,
17
17
  retry_count: Optional[int] = 0,
18
18
  apply_refresh_policy: Optional[bool] = True,
19
+ max_parallelism: Optional[int] = 10,
19
20
  workspace: Optional[str] = None,
20
21
  ):
21
22
  """
@@ -35,6 +36,10 @@ def refresh_semantic_model(
35
36
  Number of times the operation retries before failing.
36
37
  apply_refresh_policy : bool, default=True
37
38
  If an incremental refresh policy is defined, determines whether to apply the policy. Modes are true or false. If the policy isn't applied, the full process leaves partition definitions unchanged, and fully refreshes all partitions in the table. If commitMode is transactional, applyRefreshPolicy can be true or false. If commitMode is partialBatch, applyRefreshPolicy of true isn't supported, and applyRefreshPolicy must be set to false.
39
+ max_parallelism : int, default=10
40
+ Determines the maximum number of threads that can run the processing commands in parallel.
41
+ This value aligns with the MaxParallelism property that can be set in the TMSL Sequence command or by using other methods.
42
+ Defaults to 10.
38
43
  workspace : str, default=None
39
44
  The Fabric workspace name.
40
45
  Defaults to None which resolves to the workspace of the attached lakehouse
@@ -90,6 +95,7 @@ def refresh_semantic_model(
90
95
  refresh_type=refresh_type,
91
96
  retry_count=retry_count,
92
97
  apply_refresh_policy=apply_refresh_policy,
98
+ max_parallelism=max_parallelism,
93
99
  )
94
100
  else:
95
101
  requestID = fabric.refresh_dataset(
@@ -98,6 +104,7 @@ def refresh_semantic_model(
98
104
  refresh_type=refresh_type,
99
105
  retry_count=retry_count,
100
106
  apply_refresh_policy=apply_refresh_policy,
107
+ max_parallelism=max_parallelism,
101
108
  objects=objects,
102
109
  )
103
110
  print(
@@ -1,3 +1,4 @@
1
+ import sempy
1
2
  import pandas as pd
2
3
  from typing import List, Optional, Union
3
4
  from sempy._utils._log import log
@@ -10,7 +11,7 @@ def translate_semantic_model(
10
11
  languages: Union[str, List[str]],
11
12
  exclude_characters: Optional[str] = None,
12
13
  workspace: Optional[str] = None,
13
- ):
14
+ ) -> pd.DataFrame:
14
15
  """
15
16
  Translates names, descriptions, display folders for all objects in a semantic model.
16
17
 
@@ -29,6 +30,8 @@ def translate_semantic_model(
29
30
 
30
31
  Returns
31
32
  -------
33
+ pandas.DataFrame
34
+ Shows a pandas dataframe which displays all of the translations in the semantic model.
32
35
 
33
36
  """
34
37
 
@@ -302,3 +305,153 @@ def translate_semantic_model(
302
305
  lang,
303
306
  i,
304
307
  )
308
+ result = pd.DataFrame(
309
+ columns=[
310
+ "Language",
311
+ "Object Type",
312
+ "Table Name",
313
+ "Object Name",
314
+ "Translated Object Name",
315
+ "Description",
316
+ "Translated Description",
317
+ "Display Folder",
318
+ "Translated Display Folder",
319
+ ]
320
+ )
321
+ with connect_semantic_model(
322
+ dataset=dataset, readonly=True, workspace=workspace
323
+ ) as tom:
324
+
325
+ sempy.fabric._client._utils._init_analysis_services()
326
+ import Microsoft.AnalysisServices.Tabular as TOM
327
+
328
+ for c in tom.model.Cultures:
329
+ for tr in c.ObjectTranslations:
330
+ oType = str(tr.Object.ObjectType)
331
+ oName = tr.Object.Name
332
+ tValue = tr.Value
333
+ prop = str(tr.Property)
334
+
335
+ if tr.Object.ObjectType == TOM.ObjectType.Table:
336
+ desc = tom.model.Tables[oName].Description
337
+ new_data = {
338
+ "Language": c.Name,
339
+ "Table Name": oName,
340
+ "Object Name": oName,
341
+ "Object Type": oType,
342
+ "Description": desc,
343
+ }
344
+ result = pd.concat(
345
+ [result, pd.DataFrame(new_data, index=[0])], ignore_index=True
346
+ )
347
+ condition = (
348
+ (result["Language"] == c.Name)
349
+ & (result["Table Name"] == oName)
350
+ & (result["Object Name"] == oName)
351
+ & (result["Object Type"] == oType)
352
+ )
353
+ elif tr.Object.ObjectType == TOM.ObjectType.Level:
354
+ hierarchyName = tr.Object.Parent.Name
355
+ tName = tr.Object.Parent.Parent.Name
356
+ levelName = "'" + hierarchyName + "'[" + oName + "]"
357
+ desc = (
358
+ tom.model.Tables[tName]
359
+ .Hierarchies[hierarchyName]
360
+ .Levels[oName]
361
+ .Description
362
+ )
363
+ new_data = {
364
+ "Language": c.Name,
365
+ "Table Name": tName,
366
+ "Object Name": levelName,
367
+ "Object Type": oType,
368
+ "Description": desc,
369
+ }
370
+ result = pd.concat(
371
+ [result, pd.DataFrame(new_data, index=[0])], ignore_index=True
372
+ )
373
+ condition = (
374
+ (result["Language"] == c.Name)
375
+ & (result["Table Name"] == tName)
376
+ & (result["Object Name"] == levelName)
377
+ & (result["Object Type"] == oType)
378
+ )
379
+ elif tr.Object.ObjectType == TOM.ObjectType.Column:
380
+ tName = tr.Object.Table.Name
381
+ desc = tom.model.Tables[tName].Columns[oName].Description
382
+ display_folder = (
383
+ tom.model.Tables[tName].Columns[oName].DisplayFolder
384
+ )
385
+ new_data = {
386
+ "Language": c.Name,
387
+ "Table Name": tName,
388
+ "Object Name": oName,
389
+ "Object Type": oType,
390
+ "Description": desc,
391
+ "Display Folder": display_folder,
392
+ }
393
+ result = pd.concat(
394
+ [result, pd.DataFrame(new_data, index=[0])], ignore_index=True
395
+ )
396
+ condition = (
397
+ (result["Language"] == c.Name)
398
+ & (result["Table Name"] == tName)
399
+ & (result["Object Name"] == oName)
400
+ & (result["Object Type"] == oType)
401
+ )
402
+ elif tr.Object.ObjectType == TOM.ObjectType.Measure:
403
+ tName = tr.Object.Table.Name
404
+ desc = tom.model.Tables[tName].Measures[oName].Description
405
+ display_folder = (
406
+ tom.model.Tables[tName].Measures[oName].DisplayFolder
407
+ )
408
+ new_data = {
409
+ "Language": c.Name,
410
+ "Table Name": tName,
411
+ "Object Name": oName,
412
+ "Object Type": oType,
413
+ "Description": desc,
414
+ "Display Folder": display_folder,
415
+ }
416
+ result = pd.concat(
417
+ [result, pd.DataFrame(new_data, index=[0])], ignore_index=True
418
+ )
419
+ condition = (
420
+ (result["Language"] == c.Name)
421
+ & (result["Table Name"] == tName)
422
+ & (result["Object Name"] == oName)
423
+ & (result["Object Type"] == oType)
424
+ )
425
+ elif tr.Object.ObjectType == TOM.ObjectType.Hierarchy:
426
+ tName = tr.Object.Table.Name
427
+ desc = tom.model.Tables[tName].Hierarchies[oName].Description
428
+ display_folder = (
429
+ tom.model.Tables[tName].Hierarchies[oName].DisplayFolder
430
+ )
431
+ new_data = {
432
+ "Language": c.Name,
433
+ "Table Name": tName,
434
+ "Object Name": oName,
435
+ "Object Type": oType,
436
+ "Description": desc,
437
+ "Display Folder": display_folder,
438
+ }
439
+ result = pd.concat(
440
+ [result, pd.DataFrame(new_data, index=[0])], ignore_index=True
441
+ )
442
+ condition = (
443
+ (result["Language"] == c.Name)
444
+ & (result["Table Name"] == tName)
445
+ & (result["Object Name"] == oName)
446
+ & (result["Object Type"] == oType)
447
+ )
448
+
449
+ if prop == "Caption":
450
+ result.loc[condition, "Translated Object Name"] = tValue
451
+ elif prop == "Description":
452
+ result.loc[condition, "Translated Description"] = tValue
453
+ else:
454
+ result.loc[condition, "Translated Display Folder"] = tValue
455
+ result.fillna("", inplace=True)
456
+
457
+ return result