semantic-link-labs 0.9.2__py3-none-any.whl → 0.9.3__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of semantic-link-labs might be problematic. Click here for more details.

@@ -6,6 +6,7 @@ from sempy_labs._helper_functions import (
6
6
  _base_api,
7
7
  resolve_lakehouse_name_and_id,
8
8
  resolve_workspace_name_and_id,
9
+ _create_spark_session,
9
10
  )
10
11
  import sempy_labs._icons as icons
11
12
  import re
@@ -54,7 +55,6 @@ def optimize_lakehouse_tables(
54
55
  or if no lakehouse attached, resolves to the workspace of the notebook.
55
56
  """
56
57
 
57
- from pyspark.sql import SparkSession
58
58
  from sempy_labs.lakehouse._get_lakehouse_tables import get_lakehouse_tables
59
59
  from delta import DeltaTable
60
60
 
@@ -69,7 +69,7 @@ def optimize_lakehouse_tables(
69
69
  else:
70
70
  tables_filt = lakeTablesDelta.copy()
71
71
 
72
- spark = SparkSession.builder.getOrCreate()
72
+ spark = _create_spark_session()
73
73
 
74
74
  for _, r in (bar := tqdm(tables_filt.iterrows())):
75
75
  tableName = r["Table Name"]
@@ -122,7 +122,7 @@ def vacuum_lakehouse_tables(
122
122
  else:
123
123
  tables_filt = lakeTablesDelta.copy()
124
124
 
125
- spark = SparkSession.builder.getOrCreate()
125
+ spark = _create_spark_session()
126
126
  spark.conf.set("spark.databricks.delta.vacuum.parallelDelete.enabled", "true")
127
127
 
128
128
  for _, r in (bar := tqdm(tables_filt.iterrows())):
@@ -219,7 +219,7 @@ def delete_shortcut(
219
219
  )
220
220
 
221
221
 
222
- def reset_shortcut_cache(workspace: Optional[str | UUID]):
222
+ def reset_shortcut_cache(workspace: Optional[str | UUID] = None):
223
223
  """
224
224
  Deletes any cached files that were stored while reading from shortcuts.
225
225
 
@@ -9,9 +9,9 @@ from sempy_labs._helper_functions import (
9
9
  create_abfss_path,
10
10
  retry,
11
11
  generate_guid,
12
+ _create_spark_session,
12
13
  )
13
14
  from sempy_labs.tom import connect_semantic_model
14
- from pyspark.sql import SparkSession
15
15
  from typing import Optional
16
16
  from sempy._utils._log import log
17
17
  import sempy_labs._icons as icons
@@ -98,7 +98,7 @@ def migrate_calc_tables_to_lakehouse(
98
98
  if killFunction:
99
99
  return
100
100
 
101
- spark = SparkSession.builder.getOrCreate()
101
+ spark = _create_spark_session()
102
102
 
103
103
  if len(dfP_filt) == 0:
104
104
  print(
@@ -2,7 +2,6 @@ import sempy.fabric as fabric
2
2
  import pandas as pd
3
3
  import re
4
4
  from sempy_labs._helper_functions import retry
5
- from pyspark.sql import SparkSession
6
5
  from sempy_labs.tom import connect_semantic_model
7
6
  from typing import Optional
8
7
  from sempy._utils._log import log
@@ -11,6 +10,7 @@ from uuid import UUID
11
10
  from sempy_labs._helper_functions import (
12
11
  resolve_workspace_name_and_id,
13
12
  resolve_dataset_name_and_id,
13
+ _create_spark_session,
14
14
  )
15
15
 
16
16
 
@@ -29,7 +29,7 @@ def refresh_calc_tables(dataset: str | UUID, workspace: Optional[str | UUID] = N
29
29
  or if no lakehouse attached, resolves to the workspace of the notebook.
30
30
  """
31
31
 
32
- spark = SparkSession.builder.getOrCreate()
32
+ spark = _create_spark_session()
33
33
  (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
34
34
  (dataset_name, dataset_id) = resolve_dataset_name_and_id(dataset, workspace_id)
35
35
  icons.sll_tags.append("DirectLakeMigration")
@@ -19,6 +19,7 @@ from sempy_labs._helper_functions import (
19
19
  resolve_dataset_id,
20
20
  _update_dataframe_datatypes,
21
21
  _base_api,
22
+ _create_spark_session,
22
23
  )
23
24
  from typing import List, Optional, Union
24
25
  from sempy._utils._log import log
@@ -726,7 +727,6 @@ def translate_report_titles(
726
727
  or if no lakehouse attached, resolves to the workspace of the notebook.
727
728
  """
728
729
  from synapse.ml.services import Translate
729
- from pyspark.sql import SparkSession
730
730
 
731
731
  (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
732
732
 
@@ -738,7 +738,7 @@ def translate_report_titles(
738
738
 
739
739
  reportJson = get_report_json(report=report, workspace=workspace_id)
740
740
  dfV = list_report_visuals(report=report, workspace=workspace_id)
741
- spark = SparkSession.builder.getOrCreate()
741
+ spark = _create_spark_session()
742
742
  df = spark.createDataFrame(dfV)
743
743
  columnToTranslate = "Title"
744
744
 
@@ -58,7 +58,7 @@ def report_rebind(
58
58
  _base_api(
59
59
  request=f"v1.0/myorg/groups/{report_workspace_id}/reports/{report_id}/Rebind",
60
60
  method="post",
61
- json=payload,
61
+ payload=payload,
62
62
  )
63
63
 
64
64
  print(
sempy_labs/tom/_model.py CHANGED
@@ -3412,7 +3412,10 @@ class TOMWrapper:
3412
3412
  if c.Name == obj.Name:
3413
3413
  tableList.append(c.Parent.Name)
3414
3414
  if (
3415
- re.search(create_pattern(tableList, obj.Name), object.Expression)
3415
+ re.search(
3416
+ create_pattern(tableList, re.escape(obj.Name)),
3417
+ object.Expression,
3418
+ )
3416
3419
  is not None
3417
3420
  ):
3418
3421
  yield obj