semantic-link-labs 0.8.3__py3-none-any.whl → 0.8.4__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of semantic-link-labs might be problematic. Click here for more details.

Files changed (101) hide show
  1. {semantic_link_labs-0.8.3.dist-info → semantic_link_labs-0.8.4.dist-info}/METADATA +33 -8
  2. {semantic_link_labs-0.8.3.dist-info → semantic_link_labs-0.8.4.dist-info}/RECORD +101 -98
  3. {semantic_link_labs-0.8.3.dist-info → semantic_link_labs-0.8.4.dist-info}/WHEEL +1 -1
  4. sempy_labs/__init__.py +24 -0
  5. sempy_labs/_bpa_translation/_model/_translations_am-ET.po +24 -5
  6. sempy_labs/_bpa_translation/_model/_translations_ar-AE.po +28 -4
  7. sempy_labs/_bpa_translation/_model/_translations_bg-BG.po +34 -4
  8. sempy_labs/_bpa_translation/_model/_translations_ca-ES.po +33 -4
  9. sempy_labs/_bpa_translation/_model/_translations_cs-CZ.po +31 -4
  10. sempy_labs/_bpa_translation/_model/_translations_da-DK.po +31 -4
  11. sempy_labs/_bpa_translation/_model/_translations_de-DE.po +34 -4
  12. sempy_labs/_bpa_translation/_model/_translations_el-GR.po +36 -4
  13. sempy_labs/_bpa_translation/_model/_translations_es-ES.po +90 -58
  14. sempy_labs/_bpa_translation/_model/_translations_fa-IR.po +31 -5
  15. sempy_labs/_bpa_translation/_model/_translations_fi-FI.po +31 -4
  16. sempy_labs/_bpa_translation/_model/_translations_fr-FR.po +34 -4
  17. sempy_labs/_bpa_translation/_model/_translations_ga-IE.po +34 -4
  18. sempy_labs/_bpa_translation/_model/_translations_he-IL.po +28 -4
  19. sempy_labs/_bpa_translation/_model/_translations_hi-IN.po +32 -4
  20. sempy_labs/_bpa_translation/_model/_translations_hu-HU.po +32 -4
  21. sempy_labs/_bpa_translation/_model/_translations_id-ID.po +32 -4
  22. sempy_labs/_bpa_translation/_model/_translations_is-IS.po +31 -4
  23. sempy_labs/_bpa_translation/_model/_translations_it-IT.po +34 -4
  24. sempy_labs/_bpa_translation/_model/_translations_ja-JP.po +24 -4
  25. sempy_labs/_bpa_translation/_model/_translations_ko-KR.po +72 -56
  26. sempy_labs/_bpa_translation/_model/_translations_mt-MT.po +34 -4
  27. sempy_labs/_bpa_translation/_model/_translations_nl-NL.po +34 -4
  28. sempy_labs/_bpa_translation/_model/_translations_pl-PL.po +95 -71
  29. sempy_labs/_bpa_translation/_model/_translations_pt-BR.po +32 -4
  30. sempy_labs/_bpa_translation/_model/_translations_pt-PT.po +32 -4
  31. sempy_labs/_bpa_translation/_model/_translations_ro-RO.po +33 -4
  32. sempy_labs/_bpa_translation/_model/_translations_ru-RU.po +34 -4
  33. sempy_labs/_bpa_translation/_model/_translations_sk-SK.po +31 -4
  34. sempy_labs/_bpa_translation/_model/_translations_sl-SL.po +32 -4
  35. sempy_labs/_bpa_translation/_model/_translations_sv-SE.po +32 -4
  36. sempy_labs/_bpa_translation/_model/_translations_ta-IN.po +32 -4
  37. sempy_labs/_bpa_translation/_model/_translations_te-IN.po +31 -4
  38. sempy_labs/_bpa_translation/_model/_translations_th-TH.po +31 -4
  39. sempy_labs/_bpa_translation/_model/_translations_tr-TR.po +32 -4
  40. sempy_labs/_bpa_translation/_model/_translations_uk-UA.po +100 -72
  41. sempy_labs/_bpa_translation/_model/_translations_zh-CN.po +23 -5
  42. sempy_labs/_bpa_translation/_model/_translations_zu-ZA.po +32 -4
  43. sempy_labs/_capacities.py +49 -14
  44. sempy_labs/_capacity_migration.py +1 -7
  45. sempy_labs/_data_pipelines.py +6 -0
  46. sempy_labs/_dataflows.py +4 -0
  47. sempy_labs/_deployment_pipelines.py +13 -7
  48. sempy_labs/_environments.py +6 -0
  49. sempy_labs/_eventhouses.py +6 -0
  50. sempy_labs/_eventstreams.py +6 -0
  51. sempy_labs/_external_data_shares.py +6 -4
  52. sempy_labs/_generate_semantic_model.py +26 -3
  53. sempy_labs/_git.py +14 -14
  54. sempy_labs/_helper_functions.py +172 -0
  55. sempy_labs/_icons.py +55 -22
  56. sempy_labs/_kql_databases.py +6 -0
  57. sempy_labs/_kql_querysets.py +6 -0
  58. sempy_labs/_list_functions.py +1 -1
  59. sempy_labs/_managed_private_endpoints.py +166 -0
  60. sempy_labs/_mirrored_warehouses.py +2 -0
  61. sempy_labs/_ml_experiments.py +6 -0
  62. sempy_labs/_ml_models.py +6 -0
  63. sempy_labs/_model_bpa.py +6 -1
  64. sempy_labs/_model_bpa_bulk.py +11 -25
  65. sempy_labs/_model_bpa_rules.py +8 -3
  66. sempy_labs/_notebooks.py +107 -12
  67. sempy_labs/_query_scale_out.py +8 -6
  68. sempy_labs/_refresh_semantic_model.py +299 -49
  69. sempy_labs/_spark.py +12 -5
  70. sempy_labs/_translations.py +2 -0
  71. sempy_labs/_vertipaq.py +58 -67
  72. sempy_labs/_warehouses.py +79 -0
  73. sempy_labs/_workloads.py +128 -0
  74. sempy_labs/_workspace_identity.py +4 -4
  75. sempy_labs/_workspaces.py +14 -1
  76. sempy_labs/admin/_basic_functions.py +85 -43
  77. sempy_labs/admin/_domains.py +18 -18
  78. sempy_labs/directlake/__init__.py +2 -0
  79. sempy_labs/directlake/_directlake_schema_sync.py +2 -1
  80. sempy_labs/directlake/_dl_helper.py +4 -1
  81. sempy_labs/directlake/_get_shared_expression.py +7 -1
  82. sempy_labs/directlake/_guardrails.py +2 -1
  83. sempy_labs/directlake/_show_unsupported_directlake_objects.py +1 -7
  84. sempy_labs/directlake/_update_directlake_model_lakehouse_connection.py +78 -0
  85. sempy_labs/directlake/_update_directlake_partition_entity.py +13 -32
  86. sempy_labs/lakehouse/_get_lakehouse_tables.py +6 -2
  87. sempy_labs/lakehouse/_shortcuts.py +4 -0
  88. sempy_labs/migration/_migrate_calctables_to_lakehouse.py +3 -2
  89. sempy_labs/migration/_migrate_calctables_to_semantic_model.py +2 -0
  90. sempy_labs/migration/_migrate_model_objects_to_semantic_model.py +2 -8
  91. sempy_labs/migration/_migrate_tables_columns_to_semantic_model.py +17 -0
  92. sempy_labs/migration/_migration_validation.py +2 -0
  93. sempy_labs/migration/_refresh_calc_tables.py +1 -0
  94. sempy_labs/report/__init__.py +4 -1
  95. sempy_labs/report/_generate_report.py +6 -0
  96. sempy_labs/report/_paginated.py +74 -0
  97. sempy_labs/report/_report_functions.py +6 -0
  98. sempy_labs/report/_report_rebind.py +2 -0
  99. sempy_labs/tom/_model.py +64 -33
  100. {semantic_link_labs-0.8.3.dist-info → semantic_link_labs-0.8.4.dist-info}/LICENSE +0 -0
  101. {semantic_link_labs-0.8.3.dist-info → semantic_link_labs-0.8.4.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,74 @@
1
+ import sempy.fabric as fabric
2
+ from typing import Optional
3
+ from sempy.fabric.exceptions import FabricHTTPException
4
+ import pandas as pd
5
+
6
+
7
+ def get_report_datasources(
8
+ report: str,
9
+ workspace: Optional[str] = None,
10
+ ) -> pd.DataFrame:
11
+ """
12
+ Returns a list of data sources for the specified paginated report (RDL) from the specified workspace.
13
+
14
+ Parameters
15
+ ----------
16
+ report : str | List[str]
17
+ Name(s) of the Power BI report(s).
18
+ workspace : str, default=None
19
+ The name of the Fabric workspace in which the report resides.
20
+ Defaults to None which resolves to the workspace of the attached lakehouse
21
+ or if no lakehouse attached, resolves to the workspace of the notebook.
22
+
23
+ Returns
24
+ -------
25
+ pandas.DataFrame
26
+ A pandas dataframe showing a list of data sources for the specified paginated report (RDL) from the specified workspace.
27
+ """
28
+
29
+ df = pd.DataFrame(
30
+ columns=[
31
+ "Report Name",
32
+ "Report Id",
33
+ "Datasource Id",
34
+ "Datasource Type",
35
+ "Gateway Id",
36
+ "Server",
37
+ "Database",
38
+ ]
39
+ )
40
+
41
+ if workspace is None:
42
+ workspace_id = fabric.get_workspace_id()
43
+ workspace = fabric.resolve_workspace_name(workspace_id)
44
+ else:
45
+ workspace_id = fabric.resolve_workspace_id(workspace)
46
+
47
+ report_id = fabric.resolve_item_id(
48
+ item_name=report, type="PaginatedReport", workspace=workspace
49
+ )
50
+
51
+ client = fabric.PowerBIRestClient()
52
+
53
+ response = client.get(
54
+ f"/v1.0/myorg/groups/{workspace_id}/reports/{report_id}/datasources"
55
+ )
56
+
57
+ if response.status_code != 200:
58
+ raise FabricHTTPException(response)
59
+
60
+ for i in response.json().get("value", []):
61
+ conn = i.get("connectionDetails", {})
62
+ new_data = {
63
+ "Report Name": report,
64
+ "Report Id": report_id,
65
+ "Datasource Id": i.get("datasourceId"),
66
+ "Datasource Type": i.get("datasourceType"),
67
+ "Gateway Id": i.get("gatewayId"),
68
+ "Server": conn.get("server") if conn else None,
69
+ "Database": conn.get("database") if conn else None,
70
+ }
71
+
72
+ df = pd.concat([df, pd.DataFrame(new_data, index=[0])], ignore_index=True)
73
+
74
+ return df
@@ -33,6 +33,8 @@ def get_report_json(
33
33
  """
34
34
  Gets the report.json file content of a Power BI report.
35
35
 
36
+ This is a wrapper function for the following API: `Items - Get Report Definition <https://learn.microsoft.com/rest/api/fabric/report/items/get-report-definition`_.
37
+
36
38
  Parameters
37
39
  ----------
38
40
  report : str
@@ -157,6 +159,8 @@ def export_report(
157
159
  """
158
160
  Exports a Power BI report to a file in your lakehouse.
159
161
 
162
+ This is a wrapper function for the following APIs: `Reports - Export To File In Group <https://learn.microsoft.com/rest/api/power-bi/reports/export-to-file-in-group`_, `Reports - Get Export To File Status In Group <https://learn.microsoft.com/rest/api/power-bi/reports/get-export-to-file-status-in-group>`_, `Reports - Get File Of Export To File In Group <https://learn.microsoft.com/rest/api/power-bi/reports/get-file-of-export-to-file-in-group>`_.
163
+
160
164
  Parameters
161
165
  ----------
162
166
  report : str
@@ -401,6 +405,8 @@ def clone_report(
401
405
  """
402
406
  Clones a Power BI report.
403
407
 
408
+ This is a wrapper function for the following API: `Reports - Clone Report In Group <https://learn.microsoft.com/rest/api/power-bi/reports/clone-report-in-group`_.
409
+
404
410
  Parameters
405
411
  ----------
406
412
  report : str
@@ -19,6 +19,8 @@ def report_rebind(
19
19
  """
20
20
  Rebinds a report to a semantic model.
21
21
 
22
+ This is a wrapper function for the following API: `Reports - Rebind Report In Group <https://learn.microsoft.com/rest/api/power-bi/reports/rebind-report-in-group`_.
23
+
22
24
  Parameters
23
25
  ----------
24
26
  report : str | List[str]
sempy_labs/tom/_model.py CHANGED
@@ -6,6 +6,7 @@ from datetime import datetime
6
6
  from sempy_labs._helper_functions import (
7
7
  format_dax_object_name,
8
8
  generate_guid,
9
+ _make_list_unique,
9
10
  )
10
11
  from sempy_labs._list_functions import list_relationships
11
12
  from sempy_labs._refresh_semantic_model import refresh_semantic_model
@@ -15,6 +16,7 @@ from typing import List, Iterator, Optional, Union, TYPE_CHECKING
15
16
  from sempy._utils._log import log
16
17
  import sempy_labs._icons as icons
17
18
  from sempy.fabric.exceptions import FabricHTTPException
19
+ import ast
18
20
 
19
21
  if TYPE_CHECKING:
20
22
  import Microsoft.AnalysisServices.Tabular
@@ -914,6 +916,7 @@ class TOMWrapper:
914
916
  part = TOM.Partition()
915
917
  part.Name = name
916
918
  part.Source = TOM.CalculationGroupSource()
919
+ part.Mode = TOM.ModeType.Import
917
920
  tbl.Partitions.Add(part)
918
921
 
919
922
  sortCol = "Ordinal"
@@ -988,10 +991,13 @@ class TOMWrapper:
988
991
  """
989
992
  import Microsoft.AnalysisServices.Tabular as TOM
990
993
 
991
- cul = TOM.Culture()
992
- cul.Name = language
993
-
994
994
  if not any(c.Name == language for c in self.model.Cultures):
995
+ cul = TOM.Culture()
996
+ cul.Name = language
997
+ lm = TOM.LinguisticMetadata()
998
+ lm.ContentType = TOM.ContentType.Json
999
+ lm.Content = f'{{"Version": "1.0.0", "Language": "{language}"}}'
1000
+ cul.LinguisticMetadata = lm
995
1001
  self.model.Cultures.Add(cul)
996
1002
 
997
1003
  def add_perspective(self, perspective_name: str):
@@ -2109,7 +2115,9 @@ class TOMWrapper:
2109
2115
  if c.Parent.Name == table_name and c.Parent.DataCategory == "Time"
2110
2116
  )
2111
2117
 
2112
- def mark_as_date_table(self, table_name: str, column_name: str):
2118
+ def mark_as_date_table(
2119
+ self, table_name: str, column_name: str, validate: bool = False
2120
+ ):
2113
2121
  """
2114
2122
  Marks a table as a `date table <https://learn.microsoft.com/power-bi/transform-model/desktop-date-tables>`_.
2115
2123
 
@@ -2119,6 +2127,8 @@ class TOMWrapper:
2119
2127
  Name of the table.
2120
2128
  column_name : str
2121
2129
  Name of the date column in the table.
2130
+ validate : bool, default=False
2131
+ If True, performs a validation on if the the date table is viable.
2122
2132
  """
2123
2133
  import Microsoft.AnalysisServices.Tabular as TOM
2124
2134
 
@@ -2129,31 +2139,32 @@ class TOMWrapper:
2129
2139
  f"{icons.red_dot} The column specified in the 'column_name' parameter in this function must be of DateTime data type."
2130
2140
  )
2131
2141
 
2132
- daxQuery = f"""
2133
- define measure '{table_name}'[test] =
2134
- var mn = MIN('{table_name}'[{column_name}])
2135
- var ma = MAX('{table_name}'[{column_name}])
2136
- var x = COUNTROWS(DISTINCT('{table_name}'[{column_name}]))
2137
- var y = DATEDIFF(mn, ma, DAY) + 1
2138
- return if(y = x, 1,0)
2139
-
2140
- EVALUATE
2141
- SUMMARIZECOLUMNS(
2142
- "1",[test]
2143
- )
2144
- """
2145
- df = fabric.evaluate_dax(
2146
- dataset=self._dataset, workspace=self._workspace, dax_string=daxQuery
2147
- )
2148
- value = df["1"].iloc[0]
2149
- if value != "1":
2150
- raise ValueError(
2151
- f"{icons.red_dot} The '{column_name}' within the '{table_name}' table does not contain contiguous date values."
2142
+ if validate:
2143
+ dax_query = f"""
2144
+ define measure '{table_name}'[test] =
2145
+ var mn = MIN('{table_name}'[{column_name}])
2146
+ var ma = MAX('{table_name}'[{column_name}])
2147
+ var x = COUNTROWS(DISTINCT('{table_name}'[{column_name}]))
2148
+ var y = DATEDIFF(mn, ma, DAY) + 1
2149
+ return if(y = x, 1,0)
2150
+
2151
+ EVALUATE
2152
+ SUMMARIZECOLUMNS(
2153
+ "1",[test]
2154
+ )
2155
+ """
2156
+ df = fabric.evaluate_dax(
2157
+ dataset=self._dataset, workspace=self._workspace, dax_string=dax_query
2152
2158
  )
2159
+ value = df["[1]"].iloc[0]
2160
+ if value != "1":
2161
+ raise ValueError(
2162
+ f"{icons.red_dot} The '{column_name}' within the '{table_name}' table does not contain contiguous date values."
2163
+ )
2153
2164
 
2154
2165
  # Mark as a date table
2155
2166
  t.DataCategory = "Time"
2156
- c.Columns[column_name].IsKey = True
2167
+ c.IsKey = True
2157
2168
  print(
2158
2169
  f"{icons.green_dot} The '{table_name}' table has been marked as a date table using the '{column_name}' column as its primary date key."
2159
2170
  )
@@ -2692,6 +2703,7 @@ class TOMWrapper:
2692
2703
 
2693
2704
  par = TOM.Partition()
2694
2705
  par.Name = name
2706
+ par.Mode = TOM.ModeType.Import
2695
2707
 
2696
2708
  parSource = TOM.CalculatedPartitionSource()
2697
2709
  parSource.Expression = expression
@@ -2949,6 +2961,8 @@ class TOMWrapper:
2949
2961
  runId = "1"
2950
2962
  self.set_annotation(object=self.model, name="Vertipaq_Run", value=runId)
2951
2963
 
2964
+ icons.sll_tags.append("VertipaqAnnotations")
2965
+
2952
2966
  def row_count(self, object: Union["TOM.Partition", "TOM.Table"]):
2953
2967
  """
2954
2968
  Obtains the row count of a table or partition within a semantic model.
@@ -4182,15 +4196,7 @@ class TOMWrapper:
4182
4196
  Name of the column to use for sorting. Must be of integer (Int64) data type.
4183
4197
  """
4184
4198
 
4185
- import Microsoft.AnalysisServices.Tabular as TOM
4186
-
4187
4199
  sbc = self.model.Tables[table_name].Columns[sort_by_column]
4188
-
4189
- if sbc.DataType != TOM.DataType.Int64:
4190
- raise ValueError(
4191
- f"{icons.red_dot} Invalid sort by column data type. The sort by column must be of 'Int64' data type."
4192
- )
4193
-
4194
4200
  self.model.Tables[table_name].Columns[column_name].SortByColumn = sbc
4195
4201
 
4196
4202
  def remove_sort_by_column(self, table_name: str, column_name: str):
@@ -4476,6 +4482,7 @@ class TOMWrapper:
4476
4482
  # executor.map(process_measure, self.all_measures())
4477
4483
 
4478
4484
  def close(self):
4485
+
4479
4486
  if not self._readonly and self.model is not None:
4480
4487
 
4481
4488
  import Microsoft.AnalysisServices.Tabular as TOM
@@ -4500,6 +4507,30 @@ class TOMWrapper:
4500
4507
  if self._column_map.get(c.LineageTag)[1] != c.DataType:
4501
4508
  self.add_changed_property(object=c, property="DataType")
4502
4509
 
4510
+ tags = [f"{icons.sll_prefix}{a}" for a in icons.sll_tags]
4511
+ tags.append("SLL")
4512
+
4513
+ if not any(a.Name == icons.sll_ann_name for a in self.model.Annotations):
4514
+ ann_list = _make_list_unique(tags)
4515
+ new_ann_value = str(ann_list).replace("'", '"')
4516
+ self.set_annotation(
4517
+ object=self.model, name=icons.sll_ann_name, value=new_ann_value
4518
+ )
4519
+ else:
4520
+ try:
4521
+ ann_value = self.get_annotation_value(
4522
+ object=self.model, name=icons.sll_ann_name
4523
+ )
4524
+ ann_list = ast.literal_eval(ann_value)
4525
+ ann_list += tags
4526
+ ann_list = _make_list_unique(ann_list)
4527
+ new_ann_value = str(ann_list).replace("'", '"')
4528
+ self.set_annotation(
4529
+ object=self.model, name=icons.sll_ann_name, value=new_ann_value
4530
+ )
4531
+ except Exception:
4532
+ pass
4533
+
4503
4534
  self.model.SaveChanges()
4504
4535
 
4505
4536
  if len(self._tables_added) > 0: