semantic-link-labs 0.12.1__py3-none-any.whl → 0.12.3__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of semantic-link-labs might be problematic. Click here for more details.

Files changed (32) hide show
  1. {semantic_link_labs-0.12.1.dist-info → semantic_link_labs-0.12.3.dist-info}/METADATA +4 -2
  2. {semantic_link_labs-0.12.1.dist-info → semantic_link_labs-0.12.3.dist-info}/RECORD +32 -26
  3. sempy_labs/__init__.py +12 -0
  4. sempy_labs/_a_lib_info.py +1 -1
  5. sempy_labs/_data_access_security.py +98 -0
  6. sempy_labs/_data_pipelines.py +23 -9
  7. sempy_labs/_dataflows.py +0 -1
  8. sempy_labs/_deployment_pipelines.py +49 -27
  9. sempy_labs/_eventstreams.py +9 -1
  10. sempy_labs/_generate_semantic_model.py +2 -2
  11. sempy_labs/_get_connection_string.py +84 -0
  12. sempy_labs/_helper_functions.py +17 -1
  13. sempy_labs/_job_scheduler.py +63 -33
  14. sempy_labs/_labels.py +4 -6
  15. sempy_labs/_model_dependencies.py +5 -2
  16. sempy_labs/_semantic_models.py +118 -0
  17. sempy_labs/_sql_endpoints.py +12 -24
  18. sempy_labs/_warehouses.py +1 -1
  19. sempy_labs/admin/__init__.py +6 -0
  20. sempy_labs/admin/_sharing_links.py +110 -0
  21. sempy_labs/graph/__init__.py +16 -0
  22. sempy_labs/graph/_groups.py +157 -2
  23. sempy_labs/graph/_sensitivity_labels.py +81 -0
  24. sempy_labs/graph/_users.py +162 -0
  25. sempy_labs/lakehouse/_shortcuts.py +16 -11
  26. sempy_labs/report/_bpareporttemplate/.pbi/localSettings.json +9 -0
  27. sempy_labs/report/_bpareporttemplate/.platform +11 -0
  28. sempy_labs/report/_reportwrapper.py +53 -6
  29. sempy_labs/tom/_model.py +49 -18
  30. {semantic_link_labs-0.12.1.dist-info → semantic_link_labs-0.12.3.dist-info}/WHEEL +0 -0
  31. {semantic_link_labs-0.12.1.dist-info → semantic_link_labs-0.12.3.dist-info}/licenses/LICENSE +0 -0
  32. {semantic_link_labs-0.12.1.dist-info → semantic_link_labs-0.12.3.dist-info}/top_level.txt +0 -0
sempy_labs/tom/_model.py CHANGED
@@ -6,21 +6,21 @@ import os
6
6
  import json
7
7
  from datetime import datetime
8
8
  from decimal import Decimal
9
- from .._helper_functions import (
9
+ from sempy_labs._helper_functions import (
10
10
  format_dax_object_name,
11
11
  generate_guid,
12
12
  _make_list_unique,
13
13
  resolve_dataset_name_and_id,
14
14
  resolve_workspace_name_and_id,
15
- _base_api,
16
15
  resolve_workspace_id,
17
16
  resolve_item_id,
18
17
  resolve_lakehouse_id,
19
18
  _validate_weight,
19
+ _get_url_prefix,
20
20
  )
21
- from .._list_functions import list_relationships
22
- from .._refresh_semantic_model import refresh_semantic_model
23
- from ..directlake._dl_helper import check_fallback_reason
21
+ from sempy_labs._list_functions import list_relationships
22
+ from sempy_labs._refresh_semantic_model import refresh_semantic_model
23
+ from sempy_labs.directlake._dl_helper import check_fallback_reason
24
24
  from contextlib import contextmanager
25
25
  from typing import List, Iterator, Optional, Union, TYPE_CHECKING, Literal
26
26
  from sempy._utils._log import log
@@ -28,7 +28,9 @@ import sempy_labs._icons as icons
28
28
  import ast
29
29
  from uuid import UUID
30
30
  import sempy_labs._authentication as auth
31
- from ..lakehouse._lakehouse import lakehouse_attached
31
+ from sempy_labs.lakehouse._lakehouse import lakehouse_attached
32
+ import requests
33
+ from sempy.fabric.exceptions import FabricHTTPException
32
34
 
33
35
 
34
36
  if TYPE_CHECKING:
@@ -4601,8 +4603,12 @@ class TOMWrapper:
4601
4603
  pandas.DataFrame
4602
4604
  A pandas dataframe showing the updated measure(s) and their new description(s).
4603
4605
  """
4606
+ import notebookutils
4607
+
4604
4608
  icons.sll_tags.append("GenerateMeasureDescriptions")
4605
4609
 
4610
+ prefix = _get_url_prefix()
4611
+
4606
4612
  df = pd.DataFrame(
4607
4613
  columns=["Table Name", "Measure Name", "Expression", "Description"]
4608
4614
  )
@@ -4655,11 +4661,15 @@ class TOMWrapper:
4655
4661
  "modelItems"
4656
4662
  ].append(new_item)
4657
4663
 
4658
- response = _base_api(
4659
- request="/explore/v202304/nl2nl/completions",
4660
- method="post",
4661
- payload=payload,
4664
+ token = notebookutils.credentials.getToken("pbi")
4665
+ headers = {"Authorization": f"Bearer {token}"}
4666
+ response = requests.post(
4667
+ f"{prefix}/explore/v202304/nl2nl/completions",
4668
+ headers=headers,
4669
+ json=payload,
4662
4670
  )
4671
+ if response.status_code != 200:
4672
+ raise FabricHTTPException(response)
4663
4673
 
4664
4674
  for item in response.json().get("modelItems", []):
4665
4675
  ms_name = item["urn"]
@@ -5096,7 +5106,9 @@ class TOMWrapper:
5096
5106
  """
5097
5107
  import Microsoft.AnalysisServices.Tabular as TOM
5098
5108
 
5099
- p = next(p for p in self.model.Tables[table_name].Partitions)
5109
+ t = self.model.Tables[table_name]
5110
+
5111
+ p = next(p for p in t.Partitions)
5100
5112
  if p.Mode != TOM.ModeType.DirectLake:
5101
5113
  print(f"{icons.info} The '{table_name}' table is not in Direct Lake mode.")
5102
5114
  return
@@ -5106,9 +5118,7 @@ class TOMWrapper:
5106
5118
  partition_schema = schema or p.Source.SchemaName
5107
5119
 
5108
5120
  # Update name of the Direct Lake partition (will be removed later)
5109
- self.model.Tables[table_name].Partitions[
5110
- partition_name
5111
- ].Name = f"{partition_name}_remove"
5121
+ t.Partitions[partition_name].Name = f"{partition_name}_remove"
5112
5122
 
5113
5123
  source_workspace_id = resolve_workspace_id(workspace=source_workspace)
5114
5124
  if source_type == "Lakehouse":
@@ -5120,21 +5130,41 @@ class TOMWrapper:
5120
5130
  item=source, type=source_type, workspace=source_workspace_id
5121
5131
  )
5122
5132
 
5133
+ column_pairs = []
5134
+ m_filter = None
5135
+ for c in t.Columns:
5136
+ if c.Type == TOM.ColumnType.Data:
5137
+ if c.Name != c.SourceColumn:
5138
+ column_pairs.append((c.SourceColumn, c.Name))
5139
+
5140
+ if column_pairs:
5141
+ m_filter = (
5142
+ f'#"Renamed Columns" = Table.RenameColumns(ToDelta, {{'
5143
+ + ", ".join([f'{{"{old}", "{new}"}}' for old, new in column_pairs])
5144
+ + "})"
5145
+ )
5146
+
5123
5147
  def _generate_m_expression(
5124
- workspace_id, artifact_id, artifact_type, table_name, schema_name
5148
+ workspace_id, artifact_id, artifact_type, table_name, schema_name, m_filter
5125
5149
  ):
5126
5150
  """
5127
- Generates the M expression for the import partition.
5151
+ Generates the M expression for the import partition. Adds a rename step if any columns have been renamed in the model.
5128
5152
  """
5129
5153
 
5130
5154
  full_table_name = (
5131
5155
  f"{schema_name}/{table_name}" if schema_name else table_name
5132
5156
  )
5133
5157
 
5134
- return f"""let\n\tSource = AzureStorage.DataLake("https://onelake.dfs.fabric.microsoft.com/{workspace_id}/{artifact_id}", [HierarchicalNavigation=true]),
5158
+ code = f"""let\n\tSource = AzureStorage.DataLake("https://onelake.dfs.fabric.microsoft.com/{workspace_id}/{artifact_id}", [HierarchicalNavigation=true]),
5135
5159
  Tables = Source{{[Name = "Tables"]}}[Content],
5136
5160
  ExpressionTable = Tables{{[Name = "{full_table_name}"]}}[Content],
5137
- ToDelta = DeltaLake.Table(ExpressionTable)\nin\n\tToDelta"""
5161
+ ToDelta = DeltaLake.Table(ExpressionTable)"""
5162
+ if m_filter is None:
5163
+ code += "\n in\n\tToDelta"
5164
+ else:
5165
+ code += f',\n\t {m_filter} \n in\n\t#"Renamed Columns"'
5166
+
5167
+ return code
5138
5168
 
5139
5169
  m_expression = _generate_m_expression(
5140
5170
  source_workspace_id,
@@ -5142,6 +5172,7 @@ class TOMWrapper:
5142
5172
  source_type,
5143
5173
  partition_entity_name,
5144
5174
  partition_schema,
5175
+ m_filter,
5145
5176
  )
5146
5177
 
5147
5178
  # Add the import partition